[vtk6] 03/10: Imported Upstream version 6.1.0+dfsg

Anton Gladky gladk at moszumanska.debian.org
Sun May 18 12:24:07 UTC 2014


This is an automated email from the git hooks/post-receive script.

gladk pushed a commit to branch master
in repository vtk6.

commit a3c60e14b737c279d1644de4bf22164e1877ac7a
Author: Anton Gladky <gladk at debian.org>
Date:   Sat May 17 20:31:24 2014 +0200

    Imported Upstream version 6.1.0+dfsg
---
 .ExternalData/README.rst                           |     6 +
 Accelerators/Dax/CMakeLists.txt                    |   156 +
 Accelerators/Dax/LICENSE.txt                       |    54 +
 Accelerators/Dax/Testing/Cxx/CMakeLists.txt        |     8 +
 .../Dax/Testing/Cxx/TestDaxMarchingCubes.cxx       |    92 +
 Accelerators/Dax/Testing/Cxx/TestDaxThreshold.cxx  |   107 +
 Accelerators/Dax/Testing/Cxx/TestDaxThreshold2.cxx |    76 +
 .../Data/Baseline/TestDaxMarchingCubes.png.md5     |     1 +
 .../Testing/Data/Baseline/TestDaxThreshold.png.md5 |     1 +
 .../Data/Baseline/TestDaxThreshold2.png.md5        |     1 +
 .../Data/Baseline/TestDaxThreshold2_1.png.md5      |     1 +
 Accelerators/Dax/daxToVtk/CellTypeToType.h         |    81 +
 Accelerators/Dax/daxToVtk/DataSetConverters.h      |   260 +
 Accelerators/Dax/module.cmake                      |    17 +
 Accelerators/Dax/vtkDaxConfig.h.in                 |    41 +
 Accelerators/Dax/vtkDaxDetailCommon.h              |    52 +
 Accelerators/Dax/vtkDaxMarchingCubes.cxx           |    81 +
 Accelerators/Dax/vtkDaxMarchingCubes.h             |    43 +
 Accelerators/Dax/vtkDaxMarchingCubesImpl.cu        |    17 +
 Accelerators/Dax/vtkDaxMarchingCubesImpl.cxx       |    17 +
 Accelerators/Dax/vtkDaxMarchingCubesImpl.h         |   136 +
 Accelerators/Dax/vtkDaxObjectFactory.h             |    72 +
 Accelerators/Dax/vtkDaxThreshold.cxx               |    76 +
 Accelerators/Dax/vtkDaxThreshold.h                 |    44 +
 Accelerators/Dax/vtkDaxThresholdImpl.cu            |    17 +
 Accelerators/Dax/vtkDaxThresholdImpl.cxx           |    17 +
 Accelerators/Dax/vtkDaxThresholdImpl.h             |   172 +
 Accelerators/Dax/vtkToDax/Allocators.h             |   132 +
 Accelerators/Dax/vtkToDax/CellTypeAndDataType.h    |    49 +
 Accelerators/Dax/vtkToDax/CellTypeToType.h         |    98 +
 Accelerators/Dax/vtkToDax/Containers.h             |   373 +
 Accelerators/Dax/vtkToDax/DataSetConverters.h      |   130 +
 Accelerators/Dax/vtkToDax/DataSetTypeToType.h      |    78 +
 Accelerators/Dax/vtkToDax/FieldTypeToType.h        |   100 +
 Accelerators/Dax/vtkToDax/MarchingCubes.h          |   199 +
 Accelerators/Dax/vtkToDax/Portals.h                |   368 +
 Accelerators/Dax/vtkToDax/Threshold.h              |   205 +
 Accelerators/Piston/CMakeLists.txt                 |     4 +-
 Accelerators/Piston/Testing/Cxx/CMakeLists.txt     |    53 +-
 .../Piston/Testing/Cxx/TestCompositeRender.cxx     |     1 -
 .../Data/Baseline/TestCompositeRender.png.md5      |     1 +
 .../Testing/Data/Baseline/TestContour.png.md5      |     1 +
 .../Testing/Data/Baseline/TestDMPFiltering.png.md5 |     1 +
 .../Testing/Data/Baseline/TestRendering.png.md5    |     1 +
 .../Piston/Testing/Data/Baseline/TestSlice.png.md5 |     1 +
 .../Testing/Data/Baseline/TestSlice_1.png.md5      |     1 +
 .../Piston/Testing/Data/Baseline/TestSort.png.md5  |     1 +
 .../Testing/Data/Baseline/TestThreshold.png.md5    |     1 +
 .../Testing/Data/Baseline/TestThreshold_1.png.md5  |     1 +
 .../Testing/Data/Baseline/TestUsePiston.png.md5    |     1 +
 Accelerators/Piston/Testing/Python/CMakeLists.txt  |    24 +-
 Accelerators/Piston/vtkPistonDataObject.cxx        |     4 +-
 Accelerators/Piston/vtkPistonDataObject.h          |     4 +-
 Accelerators/Piston/vtkPistonMapper.cxx            |    20 +
 Accelerators/Piston/vtkPistonMapper.h              |     2 +-
 Accelerators/Piston/vtkPistonSort.h                |     2 +-
 CMake/CheckCXXExpressionCompiles.cmake             |    40 +
 CMake/ExternalData.cmake                           |   789 ++
 CMake/ExternalData_config.cmake.in                 |     4 +
 CMake/FindCg.cmake                                 |     2 +-
 CMake/FindFFMPEG.cmake                             |   206 +-
 CMake/FindHDF5.cmake                               |    16 +
 CMake/FindJsonCpp.cmake                            |    26 +
 CMake/FindNetCDF.cmake                             |   102 +
 CMake/FindOGGTHEORA.cmake                          |     2 +-
 CMake/FindPythonModules.cmake                      |    27 +
 CMake/FindTBB.cmake                                |   283 +
 CMake/FindThrust.cmake                             |    65 +
 CMake/FindXKaapi.cmake                             |    55 +
 CMake/GenerateExportHeader.cmake                   |    10 +-
 CMake/SharedLibraryPathInfo.cxx                    |     2 +-
 CMake/VTKConfig.cmake.in                           |     1 +
 CMake/VTKParallelCMakeTests.cmake                  |     6 +-
 CMake/VTKValgrindSuppressions.supp                 |   100 +
 CMake/vtkCompilerExtras.cmake                      |     6 +-
 CMake/vtkExternalData.cmake                        |    71 +
 CMake/vtkFFMPEGTestAvAlloc.cxx                     |    14 -
 CMake/vtkFFMPEGTestImgConvert.cxx                  |    15 -
 CMake/vtkFFMPEGTestURLFClose.cxx                   |    14 -
 CMake/vtkForwardingExecutable.cmake                |     2 +-
 CMake/vtkGhostscript.cmake                         |     3 +
 CMake/vtkJavaWrapping.cmake                        |    15 +-
 CMake/vtkLegacyData.cmake                          |    36 +
 CMake/vtkMPI.cmake                                 |    15 +
 CMake/vtkMakeInstantiator.cmake                    |     2 -
 CMake/vtkModuleAPI.cmake                           |    49 +-
 CMake/vtkModuleInfo.cmake.in                       |     1 +
 CMake/vtkModuleMacros.cmake                        |   104 +-
 CMake/vtkModuleTop.cmake                           |     1 +
 CMake/vtkPythonPackages.cmake                      |   128 +
 CMake/vtkPythonWrapping.cmake                      |     7 +-
 CMake/vtkQt.cmake                                  |     8 +
 CMake/vtkRequireLargeFilesSupport.cxx              |     2 +-
 CMake/vtkTclWrapping.cmake                         |     2 +-
 CMake/vtkTestFFMPEG.cmake                          |   170 +-
 CMake/vtkTestStreamsLibrary.cmake                  |     4 +-
 CMake/vtkTestingMacros.cmake                       |   440 +-
 CMake/vtkTestingObjectFactory.cmake                |    87 -
 CMake/vtkTestingRenderingDriver.cmake              |    74 +
 CMake/vtkVersion.cmake                             |     2 +-
 CMake/vtkWrapHierarchy.cmake                       |     1 -
 CMake/vtkWrapJava.cmake                            |     2 +-
 CMake/vtkWrapPython.cmake                          |    10 +-
 CMake/vtkWrapPythonSIP.cmake                       |    12 +-
 CMake/vtkWrapTcl.cmake                             |     1 -
 CMakeLists.txt                                     |    94 +-
 CONTRIBUTING.md                                    |    16 +
 Charts/Core/CMakeLists.txt                         |     3 +
 Charts/Core/Testing/Cxx/CMakeLists.txt             |   143 +-
 Charts/Core/Testing/Cxx/TestBagPlot.cxx            |    87 +
 Charts/Core/Testing/Cxx/TestCategoryLegend.cxx     |    75 +
 Charts/Core/Testing/Cxx/TestChartDouble.cxx        |    85 +
 Charts/Core/Testing/Cxx/TestChartUnicode.cxx       |    91 +
 Charts/Core/Testing/Cxx/TestContextUnicode.cxx     |    17 +-
 .../Testing/Cxx/TestControlPointsHandleItem.cxx    |  1244 +-
 .../Testing/Cxx/TestControlPointsItemEvents.cxx    |     4 +-
 Charts/Core/Testing/Cxx/TestFunctionalBagPlot.cxx  |   132 +
 Charts/Core/Testing/Cxx/TestLinePlot3D.cxx         |     2 +-
 Charts/Core/Testing/Cxx/TestLinePlotDouble.cxx     |    85 +
 Charts/Core/Testing/Cxx/TestLinePlotDouble2.cxx    |    62 +
 Charts/Core/Testing/Cxx/TestMultipleRenderers.cxx  |     1 -
 .../Core/Testing/Cxx/TestParallelCoordinates.cxx   |     3 +-
 .../Testing/Cxx/TestParallelCoordinatesDouble.cxx  |    68 +
 Charts/Core/Testing/Cxx/TestZoomAxis.cxx           |    91 +
 Charts/Core/Testing/Data/Baseline/TestAxes.png.md5 |     1 +
 .../Core/Testing/Data/Baseline/TestBagPlot.png.md5 |     1 +
 .../Testing/Data/Baseline/TestBarGraph.png.md5     |     1 +
 .../Data/Baseline/TestBarGraphHorizontal.png.md5   |     1 +
 .../Data/Baseline/TestCategoryLegend.png.md5       |     1 +
 .../Testing/Data/Baseline/TestChartDouble.png.md5  |     1 +
 .../Data/Baseline/TestChartDouble_1.png.md5        |     1 +
 .../Data/Baseline/TestChartDouble_2.png.md5        |     1 +
 .../Testing/Data/Baseline/TestChartMatrix.png.md5  |     1 +
 .../Testing/Data/Baseline/TestChartUnicode.png.md5 |     1 +
 .../Testing/Data/Baseline/TestChartXYZ.png.md5     |     1 +
 .../Testing/Data/Baseline/TestChartsOn3D.png.md5   |     1 +
 .../Baseline/TestColorTransferFunction.png.md5     |     1 +
 .../Baseline/TestColorTransferFunction_1.png.md5   |     1 +
 .../Baseline/TestColorTransferFunction_2.png.md5   |     1 +
 .../Baseline/TestColorTransferFunction_3.png.md5   |     1 +
 .../Core/Testing/Data/Baseline/TestContext.png.md5 |     1 +
 .../Testing/Data/Baseline/TestContextImage.png.md5 |     1 +
 .../Data/Baseline/TestContextUnicode.png.md5       |     1 +
 .../Baseline/TestControlPointsHandleItem.png.md5   |     1 +
 .../Core/Testing/Data/Baseline/TestDiagram.png.md5 |     1 +
 .../Testing/Data/Baseline/TestDiagram_1.png.md5    |     1 +
 .../Data/Baseline/TestFunctionalBagPlot.png.md5    |     1 +
 .../Testing/Data/Baseline/TestHistogram2D.png.md5  |     1 +
 .../Data/Baseline/TestInteractiveChartXYZ.png.md5  |     1 +
 .../Data/Baseline/TestLegendHiddenPlots.png.md5    |     1 +
 .../Testing/Data/Baseline/TestLinePlot.png.md5     |     1 +
 .../Testing/Data/Baseline/TestLinePlot2.png.md5    |     1 +
 .../Testing/Data/Baseline/TestLinePlot3D.png.md5   |     1 +
 .../Data/Baseline/TestLinePlotAxisFonts.png.md5    |     1 +
 .../Data/Baseline/TestLinePlotAxisFonts_1.png.md5  |     1 +
 .../Data/Baseline/TestLinePlotAxisFonts_2.png.md5  |     1 +
 .../Data/Baseline/TestLinePlotColors.png.md5       |     1 +
 .../Data/Baseline/TestLinePlotDouble.png.md5       |     1 +
 .../Data/Baseline/TestLinePlotDouble2.png.md5      |     1 +
 .../Data/Baseline/TestLinePlotDouble_1.png.md5     |     1 +
 .../Data/Baseline/TestLinePlotDouble_2.png.md5     |     1 +
 .../Data/Baseline/TestLinePlotInteraction.png.md5  |     1 +
 .../Baseline/TestLinePlotInteraction_1.png.md5     |     1 +
 .../Data/Baseline/TestLinePlotSelection.png.md5    |     1 +
 .../Data/Baseline/TestLinePlotSelection2.png.md5   |     1 +
 .../Data/Baseline/TestLinePlotSelection2_1.png.md5 |     1 +
 .../Data/Baseline/TestLinePlotSelection_1.png.md5  |     1 +
 .../Baseline/TestMultipleChartRenderers.png.md5    |     1 +
 .../Baseline/TestMultipleChartRenderers_1.png.md5  |     1 +
 .../Data/Baseline/TestMultipleRenderers.png.md5    |     1 +
 .../Data/Baseline/TestMultipleRenderers_1.png.md5  |     1 +
 .../Data/Baseline/TestMultipleRenderers_2.png.md5  |     1 +
 .../Baseline/TestMultipleScalarsToColors.png.md5   |     1 +
 .../Data/Baseline/TestParallelCoordinates.png.md5  |     1 +
 .../Baseline/TestParallelCoordinatesColors.png.md5 |     1 +
 .../Baseline/TestParallelCoordinatesDouble.png.md5 |     1 +
 .../Testing/Data/Baseline/TestPieChart.png.md5     |     1 +
 .../Testing/Data/Baseline/TestPieChart_1.png.md5   |     1 +
 .../Testing/Data/Baseline/TestPlotMatrix.png.md5   |     1 +
 .../Data/Baseline/TestScalarsToColors.png.md5      |     1 +
 .../Testing/Data/Baseline/TestScatterPlot.png.md5  |     1 +
 .../Data/Baseline/TestScatterPlotColors.png.md5    |     1 +
 .../Data/Baseline/TestScatterPlotMatrix.png.md5    |     1 +
 .../Baseline/TestScatterPlotMatrixVehicles.png.md5 |     1 +
 .../Baseline/TestScatterPlotMatrixVisible.png.md5  |     1 +
 .../Data/Baseline/TestScatterPlotMatrix_1.png.md5  |     1 +
 .../Data/Baseline/TestScatterPlotMatrix_2.png.md5  |     1 +
 .../Data/Baseline/TestScatterPlotMatrix_3.png.md5  |     1 +
 .../Data/Baseline/TestScientificPlot.png.md5       |     1 +
 .../Data/Baseline/TestStackedBarGraph.png.md5      |     1 +
 .../Testing/Data/Baseline/TestStackedPlot.png.md5  |     1 +
 .../Testing/Data/Baseline/TestSurfacePlot.png.md5  |     1 +
 .../Testing/Data/Baseline/TestZoomAxis.png.md5     |     1 +
 .../Testing/Data/Baseline/TestZoomAxis_1.png.md5   |     1 +
 Charts/Core/Testing/Data/Fonts/DejaVuSans.ttf.md5  |     1 +
 Charts/Core/Testing/Python/CMakeLists.txt          |    39 +-
 Charts/Core/Testing/Python/TestBarGraph.py         |     4 -
 Charts/Core/Testing/Python/TestLinePlot.py         |     4 -
 Charts/Core/Testing/Python/TestLinePlotColors.py   |     4 -
 .../Python/TestParallelCoordinatesColors.py        |     4 -
 .../Core/Testing/Python/TestScatterPlotColors.py   |     4 -
 Charts/Core/Testing/Python/TestStackedPlot.py      |     4 -
 Charts/Core/module.cmake                           |     4 +-
 Charts/Core/vtkAxis.cxx                            |    10 +-
 Charts/Core/vtkAxis.h                              |    15 +
 Charts/Core/vtkCategoryLegend.cxx                  |   288 +
 Charts/Core/vtkCategoryLegend.h                    |   102 +
 Charts/Core/vtkChart.cxx                           |    83 +-
 Charts/Core/vtkChart.h                             |    27 +-
 Charts/Core/vtkChartLegend.cxx                     |    20 +-
 Charts/Core/vtkChartLegend.h                       |    16 +
 Charts/Core/vtkChartPie.cxx                        |     2 +-
 Charts/Core/vtkChartSelectionHelper.h              |    22 +-
 Charts/Core/vtkChartXY.cxx                         |   140 +-
 Charts/Core/vtkChartXY.h                           |     2 +-
 Charts/Core/vtkColorLegend.cxx                     |   213 +-
 Charts/Core/vtkColorLegend.h                       |    57 +
 Charts/Core/vtkColorTransferControlPointsItem.cxx  |    50 +-
 Charts/Core/vtkColorTransferControlPointsItem.h    |    11 +
 Charts/Core/vtkColorTransferFunctionItem.cxx       |    26 +-
 Charts/Core/vtkColorTransferFunctionItem.h         |     5 +
 Charts/Core/vtkCompositeControlPointsItem.cxx      |    13 +
 Charts/Core/vtkCompositeControlPointsItem.h        |     5 +
 Charts/Core/vtkCompositeTransferFunctionItem.cxx   |     1 +
 Charts/Core/vtkCompositeTransferFunctionItem.h     |     6 +
 Charts/Core/vtkContextPolygon.cxx                  |     1 +
 Charts/Core/vtkControlPointsItem.cxx               |   191 +-
 Charts/Core/vtkControlPointsItem.h                 |    46 +-
 Charts/Core/vtkPiecewiseControlPointsItem.cxx      |    22 +
 Charts/Core/vtkPiecewiseFunctionItem.cxx           |     1 +
 Charts/Core/vtkPiecewisePointHandleItem.cxx        |     2 +
 Charts/Core/vtkPlot.cxx                            |    28 +-
 Charts/Core/vtkPlot.h                              |    17 +-
 Charts/Core/vtkPlotBag.cxx                         |   439 +
 Charts/Core/vtkPlotBag.h                           |   107 +
 Charts/Core/vtkPlotBar.cxx                         |   124 +-
 Charts/Core/vtkPlotBar.h                           |     2 +-
 Charts/Core/vtkPlotFunctionalBag.cxx               |   349 +
 Charts/Core/vtkPlotFunctionalBag.h                 |   134 +
 Charts/Core/vtkPlotHistogram2D.cxx                 |     2 +-
 Charts/Core/vtkPlotHistogram2D.h                   |     2 +-
 Charts/Core/vtkPlotLine3D.cxx                      |     2 +-
 Charts/Core/vtkPlotParallelCoordinates.cxx         |    18 +-
 Charts/Core/vtkPlotPie.cxx                         |     7 +-
 Charts/Core/vtkPlotPoints.cxx                      |   368 +-
 Charts/Core/vtkPlotPoints.h                        |     8 +
 Charts/Core/vtkScalarsToColorsItem.cxx             |     4 +-
 Common/Color/Testing/Cxx/CMakeLists.txt            |    36 +-
 Common/Color/Testing/Cxx/TestNamedColors.cxx       |   204 +-
 .../Testing/Data/Baseline/TestColorSeries.png.md5  |     1 +
 .../Testing/Data/Baseline/CSpline.png.md5          |     1 +
 .../Testing/Data/Baseline/KSpline.png.md5          |     1 +
 .../Data/Baseline/TestParametricFunctions.png.md5  |     1 +
 .../Baseline/TestParametricFunctions_1.png.md5     |     1 +
 .../Baseline/TestParametricFunctions_2.png.md5     |     1 +
 .../Testing/Data/Baseline/closedSplines.png.md5    |     1 +
 .../Testing/Python/CMakeLists.txt                  |    10 +-
 .../Testing/Python/TestParametricFunctions.py      |     4 -
 .../Testing/Tcl/CMakeLists.txt                     |    10 +-
 Common/ComputationalGeometry/vtkCardinalSpline.cxx |    17 +-
 Common/ComputationalGeometry/vtkKochanekSpline.cxx |    20 +-
 Common/Core/CMakeLists.txt                         |    96 +-
 Common/Core/SMP/Kaapi/vtkSMPThreadLocal.h.in       |   239 +
 Common/Core/SMP/Kaapi/vtkSMPTools.cxx              |    53 +
 Common/Core/SMP/Kaapi/vtkSMPToolsInternal.h.in     |    56 +
 Common/Core/SMP/Sequential/vtkAtomicInt.cxx        |   255 +
 Common/Core/SMP/Sequential/vtkAtomicInt.h.in       |   379 +
 Common/Core/SMP/Sequential/vtkSMPThreadLocal.h.in  |   195 +
 Common/Core/SMP/Sequential/vtkSMPTools.cxx         |    23 +
 .../Core/SMP/Sequential/vtkSMPToolsInternal.h.in   |    53 +
 Common/Core/SMP/Simple/vtkSMPThreadLocal.h.in      |   237 +
 Common/Core/SMP/Simple/vtkSMPTools.cxx             |    83 +
 Common/Core/SMP/Simple/vtkSMPToolsInternal.h.in    |   139 +
 Common/Core/SMP/TBB/vtkAtomicInt.h.in              |    94 +
 Common/Core/SMP/TBB/vtkSMPThreadLocal.h.in         |   168 +
 Common/Core/SMP/TBB/vtkSMPTools.cxx                |    49 +
 Common/Core/SMP/TBB/vtkSMPToolsInternal.h.in       |    64 +
 Common/Core/Testing/Cxx/CMakeLists.txt             |    33 +-
 Common/Core/Testing/Cxx/TestAtomic.cxx             |   189 +
 Common/Core/Testing/Cxx/TestCollection.cxx         |     2 +-
 Common/Core/Testing/Cxx/TestDataArray.cxx          |   234 +
 Common/Core/Testing/Cxx/TestDataArrayIterators.cxx |   133 +
 Common/Core/Testing/Cxx/TestGarbageCollector.cxx   |     2 +-
 Common/Core/Testing/Cxx/TestNew.cxx                |     2 +-
 Common/Core/Testing/Cxx/TestSMP.cxx                |   139 +
 Common/Core/Testing/Cxx/TestScalarsToColors.cxx    |    70 +
 Common/Core/Testing/Cxx/TestWeakPointer.cxx        |     2 +-
 .../Core/Testing/Cxx/TestXMLFileOutputWindow.cxx   |    32 +-
 Common/Core/Testing/Python/CMakeLists.txt          |     5 +-
 Common/Core/Testing/Python/TestPointers.py         |    29 +
 Common/Core/Testing/Tcl/CMakeLists.txt             |     5 +-
 Common/Core/Testing/Tcl/TestSetGet.tcl             |     3 +
 Common/Core/module.cmake                           |     2 +-
 Common/Core/vtkAbstractArray.cxx                   |    44 +-
 Common/Core/vtkAbstractArray.h                     |    52 +-
 Common/Core/vtkArrayIteratorTemplate.h             |     4 +-
 Common/Core/vtkArrayPrint.h                        |     6 +-
 Common/Core/vtkArrayWeights.cxx                    |    11 +
 Common/Core/vtkArrayWeights.h                      |     8 +
 Common/Core/vtkAutoInit.h                          |    24 +
 Common/Core/vtkBitArray.cxx                        |    45 +-
 Common/Core/vtkBitArray.h                          |    13 +-
 Common/Core/vtkBitArrayIterator.cxx                |     5 +-
 Common/Core/vtkCharArray.cxx                       |     2 +-
 Common/Core/vtkCharArray.h                         |   108 +-
 Common/Core/vtkCollection.h                        |     2 +-
 Common/Core/vtkConditionVariable.h                 |     4 +
 Common/Core/vtkConfigure.h.in                      |     6 +
 Common/Core/vtkCriticalSection.cxx                 |    74 -
 Common/Core/vtkCriticalSection.h                   |    70 +-
 Common/Core/vtkDataArray.cxx                       |   269 +-
 Common/Core/vtkDataArray.h                         |    28 +-
 Common/Core/vtkDataArrayCollection.h               |     4 +-
 Common/Core/vtkDataArrayIteratorMacro.h            |   136 +
 Common/Core/vtkDataArraySelection.h                |     2 +-
 Common/Core/vtkDataArrayTemplate.h                 |   109 +-
 Common/Core/vtkDataArrayTemplate.txx               |   278 +-
 Common/Core/vtkDebugLeaks.cxx                      |     9 +-
 Common/Core/vtkDebugLeaks.h                        |     6 +-
 Common/Core/vtkDoubleArray.cxx                     |     2 +-
 Common/Core/vtkDoubleArray.h                       |   110 +-
 Common/Core/vtkDynamicLoader.h                     |     4 +-
 Common/Core/vtkEventForwarderCommand.h             |     2 +-
 Common/Core/vtkFloatArray.cxx                      |     2 +-
 Common/Core/vtkFloatArray.h                        |   109 +-
 Common/Core/vtkGarbageCollector.cxx                |     2 +-
 Common/Core/vtkIOStream.cxx                        |     1 +
 Common/Core/vtkIdList.cxx                          |    35 +-
 Common/Core/vtkIdList.h                            |    20 +-
 Common/Core/vtkIdListCollection.h                  |     4 +-
 Common/Core/vtkIdTypeArray.cxx                     |     2 +-
 Common/Core/vtkIdTypeArray.h                       |   117 +-
 Common/Core/vtkInformationKey.h                    |    24 +-
 Common/Core/vtkInstantiator.h                      |     3 +
 Common/Core/vtkIntArray.cxx                        |     2 +-
 Common/Core/vtkIntArray.h                          |   110 +-
 Common/Core/vtkLargeInteger.cxx                    |    23 +-
 Common/Core/vtkLongArray.cxx                       |     2 +-
 Common/Core/vtkLongArray.h                         |   110 +-
 Common/Core/vtkLongLongArray.cxx                   |     2 +-
 Common/Core/vtkLongLongArray.h                     |   111 +-
 Common/Core/vtkLookupTable.cxx                     |     2 +-
 Common/Core/vtkMappedDataArray.h                   |   158 +
 Common/Core/vtkMappedDataArray.txx                 |   169 +
 Common/Core/vtkMath.cxx                            |    16 +-
 Common/Core/vtkMath.h                              |    49 +-
 Common/Core/vtkMathConfigure.h.in                  |    23 +
 Common/Core/vtkMathUtilities.h                     |    39 +
 Common/Core/vtkMutexLock.h                         |     6 +-
 Common/Core/vtkNew.h                               |     4 +
 Common/Core/vtkObjectBase.cxx                      |     2 +-
 Common/Core/vtkObjectBase.h                        |     7 +-
 Common/Core/vtkObjectFactory.h                     |     5 +-
 Common/Core/vtkObjectFactoryCollection.h           |     4 +-
 Common/Core/vtkOutputWindow.cxx                    |     4 +-
 Common/Core/vtkOutputWindow.h                      |     4 +
 Common/Core/vtkOverrideInformationCollection.h     |     4 +-
 Common/Core/vtkPoints.cxx                          |    88 +-
 Common/Core/vtkPoints.h                            |    11 +
 Common/Core/vtkPoints2D.cxx                        |    19 +-
 Common/Core/vtkPriorityQueue.cxx                   |    10 +-
 Common/Core/vtkSMPThreadLocalObject.h              |   166 +
 Common/Core/vtkSMPTools.h                          |   212 +
 Common/Core/vtkScalarsToColors.cxx                 |    68 +-
 Common/Core/vtkScalarsToColors.h                   |     8 +-
 Common/Core/vtkSetGet.h                            |    33 +-
 Common/Core/vtkShortArray.cxx                      |     2 +-
 Common/Core/vtkShortArray.h                        |   110 +-
 Common/Core/vtkSignedCharArray.cxx                 |     2 +-
 Common/Core/vtkSignedCharArray.h                   |   110 +-
 Common/Core/vtkSimpleCriticalSection.cxx           |    83 +
 Common/Core/vtkSimpleCriticalSection.h             |   105 +
 Common/Core/vtkSmartPointer.h                      |     4 +
 Common/Core/vtkStdString.h                         |     2 +-
 Common/Core/vtkStringArray.cxx                     |    46 +-
 Common/Core/vtkStringArray.h                       |    11 +-
 Common/Core/vtkSystemIncludes.h                    |     1 +
 Common/Core/vtkTimePointUtility.h                  |     4 +-
 Common/Core/vtkTimeStamp.cxx                       |    47 +-
 Common/Core/vtkToolkits.h.in                       |     4 -
 Common/Core/vtkTypeTemplate.h                      |    21 +-
 Common/Core/vtkTypeTraits.h                        |     1 +
 Common/Core/vtkTypedArray.cxx.in                   |     2 +-
 Common/Core/vtkTypedArray.h.in                     |     2 +-
 Common/Core/vtkTypedDataArray.h                    |   168 +
 Common/Core/vtkTypedDataArray.txx                  |    76 +
 Common/Core/vtkTypedDataArrayIterator.h            |   169 +
 Common/Core/vtkUnicodeString.cxx                   |     3 +-
 Common/Core/vtkUnicodeStringArray.cxx              |    43 +-
 Common/Core/vtkUnicodeStringArray.h                |     4 +-
 Common/Core/vtkUnsignedCharArray.cxx               |     2 +-
 Common/Core/vtkUnsignedCharArray.h                 |   110 +-
 Common/Core/vtkUnsignedIntArray.cxx                |     2 +-
 Common/Core/vtkUnsignedIntArray.h                  |   110 +-
 Common/Core/vtkUnsignedLongArray.cxx               |     2 +-
 Common/Core/vtkUnsignedLongArray.h                 |   110 +-
 Common/Core/vtkUnsignedLongLongArray.cxx           |     3 +-
 Common/Core/vtkUnsignedLongLongArray.h             |   110 +-
 Common/Core/vtkUnsignedShortArray.cxx              |     2 +-
 Common/Core/vtkUnsignedShortArray.h                |   110 +-
 Common/Core/vtkUnsigned__Int64Array.h              |   108 +-
 Common/Core/vtkVariantArray.cxx                    |    72 +-
 Common/Core/vtkVariantArray.h                      |     9 +-
 Common/Core/vtkVersion.h                           |     4 +-
 Common/Core/vtkVoidArray.cxx                       |    12 +-
 Common/Core/vtkWeakPointer.h                       |     4 +
 Common/Core/vtkWeakPointerBase.h                   |     2 +-
 Common/Core/vtkWindow.h                            |     2 +-
 Common/Core/vtkXMLFileOutputWindow.h               |     4 +-
 Common/Core/vtk__Int64Array.h                      |   108 +-
 Common/DataModel/CMakeLists.txt                    |    16 +
 Common/DataModel/Testing/Cxx/CMakeLists.txt        |    23 +-
 Common/DataModel/Testing/Cxx/TestAMRBox.cxx        |     7 +-
 Common/DataModel/Testing/Cxx/TestCellIterators.cxx |   812 ++
 Common/DataModel/Testing/Cxx/TestPixelExtent.cxx   |   146 +
 .../DataModel/Testing/Cxx/TestQuadraticPolygon.cxx |   408 +
 .../Testing/Cxx/TestQuadraticPolygonFilters.cxx    |   271 +
 .../Testing/Cxx/TestSmoothErrorMetric.cxx          |     2 +-
 .../Testing/Data/Baseline/ImplicitSum.png.md5      |     1 +
 .../Baseline/LineIntersectQuadraticCells.png.md5   |     1 +
 .../Testing/Data/Baseline/PerlinNoise.png.md5      |     1 +
 .../Testing/Data/Baseline/SelectionLoop.png.md5    |     1 +
 .../Data/Baseline/TestConvexPointSet.png.md5       |     1 +
 .../Testing/Data/Baseline/TestICPTransform.png.md5 |     1 +
 .../Data/Baseline/TestICPTransform_1.png.md5       |     1 +
 .../Baseline/TestQuadraticPolygonFilters.png.md5   |     1 +
 .../Data/Baseline/TestQuadricClustering.png.md5    |     1 +
 .../Data/Baseline/TestQuadricClustering_1.png.md5  |     1 +
 .../Data/Baseline/TestStructuredGrid.png.md5       |     1 +
 .../Testing/Data/Baseline/headBone.png.md5         |     1 +
 .../Testing/Data/Baseline/quadricCut.png.md5       |     1 +
 .../Testing/Data/Baseline/scalarColors.png.md5     |     1 +
 .../Testing/Data/Baseline/scalarConn.png.md5       |     1 +
 .../Testing/Data/Baseline/scalarConn_1.png.md5     |     1 +
 .../Testing/Data/Baseline/scalarConn_2.png.md5     |     1 +
 Common/DataModel/Testing/Python/CMakeLists.txt     |    32 +-
 .../DataModel/Testing/Python/TestICPTransform.py   |     4 -
 .../Testing/Python/TestNumericArrayImageData.py    |     4 -
 Common/DataModel/Testing/Python/quadricCut.py      |     4 -
 Common/DataModel/Testing/Tcl/CMakeLists.txt        |    34 +-
 Common/DataModel/module.cmake                      |     3 +
 Common/DataModel/vtkAMRDataInternals.cxx           |     2 +-
 Common/DataModel/vtkAMRInformation.cxx             |     4 +-
 Common/DataModel/vtkAbstractCellLocator.cxx        |     7 +-
 Common/DataModel/vtkAtom.cxx                       |     7 +-
 Common/DataModel/vtkAtom.h                         |     2 -
 Common/DataModel/vtkAttributesErrorMetric.cxx      |     2 +-
 Common/DataModel/vtkBSPCuts.cxx                    |    55 +-
 Common/DataModel/vtkBSPIntersections.cxx           |    12 +-
 .../vtkBiQuadraticQuadraticHexahedron.cxx          |     4 +-
 Common/DataModel/vtkBiQuadraticQuadraticWedge.cxx  |     2 +-
 Common/DataModel/vtkBond.cxx                       |     7 +-
 Common/DataModel/vtkBond.h                         |     1 -
 Common/DataModel/vtkBoundingBox.cxx                |     2 +-
 Common/DataModel/vtkBoundingBox.h                  |     4 +-
 Common/DataModel/vtkBox.cxx                        |     6 +-
 Common/DataModel/vtkBox.h                          |     4 +-
 Common/DataModel/vtkCellData.h                     |     4 +-
 Common/DataModel/vtkCellIterator.cxx               |   115 +
 Common/DataModel/vtkCellIterator.h                 |   284 +
 Common/DataModel/vtkCellLinks.cxx                  |    10 +-
 Common/DataModel/vtkCellLinks.h                    |     2 +-
 Common/DataModel/vtkCellLocator.cxx                |    27 +-
 Common/DataModel/vtkCellType.h                     |     1 +
 Common/DataModel/vtkCone.h                         |     2 +-
 Common/DataModel/vtkCubicLine.cxx                  |     4 -
 Common/DataModel/vtkCylinder.h                     |     2 +-
 Common/DataModel/vtkDataObjectCollection.h         |     4 +-
 Common/DataModel/vtkDataObjectTypes.h              |     4 +-
 Common/DataModel/vtkDataSet.cxx                    |     9 +
 Common/DataModel/vtkDataSet.h                      |     5 +
 Common/DataModel/vtkDataSetAttributes.cxx          |   131 +-
 Common/DataModel/vtkDataSetAttributes.h            |     5 +-
 Common/DataModel/vtkDataSetCellIterator.cxx        |   107 +
 Common/DataModel/vtkDataSetCellIterator.h          |    58 +
 Common/DataModel/vtkDataSetCollection.h            |     4 +-
 Common/DataModel/vtkDirectedAcyclicGraph.cxx       |     2 +-
 Common/DataModel/vtkDispatcher_Private.h           |    24 +-
 Common/DataModel/vtkDistributedGraphHelper.cxx     |     2 +-
 Common/DataModel/vtkEdgeTable.cxx                  |     9 +-
 Common/DataModel/vtkEmptyCell.h                    |     4 +-
 Common/DataModel/vtkExtentTranslator.h             |    22 +-
 Common/DataModel/vtkGenericAdaptorCell.cxx         |     2 +-
 Common/DataModel/vtkGenericAttribute.cxx           |     2 +-
 Common/DataModel/vtkGenericAttributeCollection.cxx |     2 +-
 Common/DataModel/vtkGenericCell.cxx                |    32 +
 Common/DataModel/vtkGenericCell.h                  |    11 +
 Common/DataModel/vtkGenericCellTessellator.cxx     |     2 +-
 Common/DataModel/vtkGenericDataSet.cxx             |     2 +-
 Common/DataModel/vtkGenericEdgeTable.cxx           |     2 +-
 .../DataModel/vtkGenericSubdivisionErrorMetric.cxx |     2 +-
 Common/DataModel/vtkGeometricErrorMetric.cxx       |     2 +-
 Common/DataModel/vtkGraph.cxx                      |    15 +-
 Common/DataModel/vtkHyperOctree.cxx                |    22 +-
 Common/DataModel/vtkHyperOctree.h                  |     2 -
 Common/DataModel/vtkHyperOctreePointsGrabber.cxx   |     2 +-
 Common/DataModel/vtkHyperTree.cxx                  |     3 +-
 Common/DataModel/vtkHyperTreeGrid.cxx              |     9 +-
 Common/DataModel/vtkHyperTreeGrid.h                |     1 -
 Common/DataModel/vtkImageData.cxx                  |     2 +-
 Common/DataModel/vtkImplicitBoolean.cxx            |     6 +-
 Common/DataModel/vtkImplicitDataSet.cxx            |    20 +-
 Common/DataModel/vtkImplicitFunctionCollection.h   |     4 +-
 Common/DataModel/vtkImplicitHalo.cxx               |     2 +-
 Common/DataModel/vtkIncrementalOctreeNode.cxx      |    16 +-
 Common/DataModel/vtkKdNode.cxx                     |     8 +-
 Common/DataModel/vtkKdNode.h                       |    10 +-
 Common/DataModel/vtkKdTree.cxx                     |    68 +-
 Common/DataModel/vtkLine.h                         |     2 +-
 Common/DataModel/vtkMappedUnstructuredGrid.h       |   294 +
 Common/DataModel/vtkMappedUnstructuredGrid.txx     |   233 +
 .../vtkMappedUnstructuredGridCellIterator.h        |    75 +
 .../vtkMappedUnstructuredGridCellIterator.txx      |   142 +
 Common/DataModel/vtkMergePoints.h                  |     4 +-
 Common/DataModel/vtkMolecule.cxx                   |     2 +-
 Common/DataModel/vtkOctreePointLocator.cxx         |    45 +-
 Common/DataModel/vtkOctreePointLocator.h           |    16 +-
 Common/DataModel/vtkOctreePointLocatorNode.h       |     4 +-
 Common/DataModel/vtkOrderedTriangulator.cxx        |     2 +-
 Common/DataModel/vtkPiecewiseFunction.cxx          |    19 +-
 Common/DataModel/vtkPiecewiseFunction.h            |     3 +-
 Common/DataModel/vtkPixelExtent.cxx                |   347 +
 Common/DataModel/vtkPixelExtent.h                  |   661 +
 Common/DataModel/vtkPlane.cxx                      |    27 +-
 Common/DataModel/vtkPlane.h                        |     2 +-
 Common/DataModel/vtkPlaneCollection.h              |     4 +-
 Common/DataModel/vtkPlanes.cxx                     |     2 +-
 Common/DataModel/vtkPlanes.h                       |     2 +-
 Common/DataModel/vtkPlanesIntersection.cxx         |     9 +-
 Common/DataModel/vtkPointData.h                    |     4 +-
 Common/DataModel/vtkPointSet.cxx                   |    20 +-
 Common/DataModel/vtkPointSet.h                     |     4 +
 Common/DataModel/vtkPointSetCellIterator.cxx       |    96 +
 Common/DataModel/vtkPointSetCellIterator.h         |    60 +
 Common/DataModel/vtkPointsProjectedHull.cxx        |    25 +-
 Common/DataModel/vtkPolyData.cxx                   |    97 +-
 Common/DataModel/vtkPolyData.h                     |     3 +-
 Common/DataModel/vtkPolyDataCollection.h           |     4 +-
 Common/DataModel/vtkPolygon.cxx                    |     2 +-
 Common/DataModel/vtkPolyhedron.cxx                 |     7 +-
 Common/DataModel/vtkQuadraticHexahedron.cxx        |     2 +
 Common/DataModel/vtkQuadraticPolygon.cxx           |   493 +
 Common/DataModel/vtkQuadraticPolygon.h             |   148 +
 Common/DataModel/vtkQuadraticPyramid.cxx           |     2 +
 Common/DataModel/vtkQuadraticQuad.cxx              |     2 +
 Common/DataModel/vtkQuadraticWedge.cxx             |     2 +
 Common/DataModel/vtkQuadratureSchemeDefinition.cxx |    31 +-
 Common/DataModel/vtkQuadratureSchemeDefinition.h   |     2 +-
 Common/DataModel/vtkQuadric.h                      |     2 +-
 Common/DataModel/vtkReebGraph.cxx                  |     8 +-
 Common/DataModel/vtkSelectionNode.cxx              |     1 +
 Common/DataModel/vtkSimpleCellTessellator.cxx      |     4 +-
 Common/DataModel/vtkSmoothErrorMetric.cxx          |     2 +-
 Common/DataModel/vtkSphere.h                       |     2 +-
 Common/DataModel/vtkSpline.cxx                     |    10 +-
 Common/DataModel/vtkStructuredData.cxx             |     2 +-
 Common/DataModel/vtkStructuredData.h               |     6 +-
 Common/DataModel/vtkStructuredGrid.cxx             |    83 +-
 Common/DataModel/vtkStructuredGrid.h               |     7 +-
 Common/DataModel/vtkStructuredPoints.h             |     2 +-
 Common/DataModel/vtkStructuredPointsCollection.h   |     4 +-
 Common/DataModel/vtkSuperquadric.h                 |     2 +-
 Common/DataModel/vtkTensor.h                       |     2 +-
 Common/DataModel/vtkUniformGridAMRDataIterator.cxx |     2 +-
 Common/DataModel/vtkUnstructuredGrid.cxx           |   204 +-
 Common/DataModel/vtkUnstructuredGrid.h             |    10 +-
 Common/DataModel/vtkUnstructuredGridBase.cxx       |    65 +
 Common/DataModel/vtkUnstructuredGridBase.h         |   112 +
 .../DataModel/vtkUnstructuredGridCellIterator.cxx  |   242 +
 Common/DataModel/vtkUnstructuredGridCellIterator.h |    78 +
 Common/DataModel/vtkVertex.h                       |     2 +-
 Common/DataModel/vtkXMLDataElement.cxx             |     2 +-
 Common/ExecutionModel/CMakeLists.txt               |     9 +-
 Common/ExecutionModel/Testing/Cxx/CMakeLists.txt   |    29 +-
 .../Testing/Cxx/TestTemporalSupport.cxx            |     2 +-
 .../ExecutionModel/Testing/Python/CMakeLists.txt   |     8 +-
 Common/ExecutionModel/vtkAlgorithm.cxx             |    40 +-
 Common/ExecutionModel/vtkAlgorithm.h               |    22 +-
 .../vtkCachedStreamingDemandDrivenPipeline.cxx     |    14 +-
 Common/ExecutionModel/vtkCastToConcrete.h          |     4 +-
 Common/ExecutionModel/vtkCompositeDataPipeline.cxx |   131 +-
 Common/ExecutionModel/vtkCompositeDataPipeline.h   |    20 +-
 .../ExecutionModel/vtkCompositeDataSetAlgorithm.h  |     2 +-
 Common/ExecutionModel/vtkComputingResources.cxx    |   280 -
 Common/ExecutionModel/vtkComputingResources.h      |   157 -
 Common/ExecutionModel/vtkDataSetAlgorithm.h        |     2 +-
 Common/ExecutionModel/vtkDemandDrivenPipeline.cxx  |     2 +-
 Common/ExecutionModel/vtkExecutionScheduler.cxx    |   852 --
 Common/ExecutionModel/vtkExecutionScheduler.h      |   179 -
 .../vtkExecutionSchedulerManager.cxx               |    35 -
 .../ExecutionModel/vtkExecutionSchedulerManager.h  |    49 -
 Common/ExecutionModel/vtkExecutiveCollection.cxx   |    40 -
 Common/ExecutionModel/vtkExecutiveCollection.h     |    94 -
 Common/ExecutionModel/vtkImageInPlaceFilter.cxx    |     2 +-
 .../ExecutionModel/vtkImageToStructuredPoints.cxx  |     2 +-
 .../ExecutionModel/vtkMultiBlockDataSetAlgorithm.h |     2 +-
 Common/ExecutionModel/vtkPassInputTypeAlgorithm.h  |     2 +-
 Common/ExecutionModel/vtkPointSetAlgorithm.h       |     2 +-
 Common/ExecutionModel/vtkProgressObserver.cxx      |    39 +
 Common/ExecutionModel/vtkProgressObserver.h        |    60 +
 Common/ExecutionModel/vtkSMPProgressObserver.cxx   |    39 +
 Common/ExecutionModel/vtkSMPProgressObserver.h     |    62 +
 Common/ExecutionModel/vtkSimpleScalarTree.cxx      |    10 +-
 .../vtkStreamingDemandDrivenPipeline.cxx           |    10 +-
 Common/ExecutionModel/vtkTableExtentTranslator.cxx |    14 +-
 .../vtkThreadedCompositeDataPipeline.cxx           |   323 +
 .../vtkThreadedCompositeDataPipeline.h             |    65 +
 .../ExecutionModel/vtkThreadedImageAlgorithm.cxx   |    18 +-
 .../vtkThreadedStreamingPipeline.cxx               |   405 -
 .../ExecutionModel/vtkThreadedStreamingPipeline.h  |   177 -
 Common/Math/Testing/Cxx/CMakeLists.txt             |    22 +-
 Common/Math/Testing/Tcl/CMakeLists.txt             |     5 +-
 Common/Math/vtkAmoebaMinimizer.cxx                 |    52 +-
 Common/Math/vtkFastNumericConversion.h             |     2 +-
 Common/Math/vtkFunctionSet.h                       |     2 +-
 Common/Math/vtkMatrix4x4.cxx                       |    35 -
 Common/Math/vtkMatrix4x4.h                         |    44 +-
 Common/Math/vtkPolynomialSolversUnivariate.h       |     4 +-
 Common/Math/vtkQuaternion.h                        |     4 -
 Common/Math/vtkQuaternion.txx                      |    10 -
 Common/Misc/Testing/Cxx/CMakeLists.txt             |    18 +-
 Common/Misc/vtkFunctionParser.cxx                  |   105 +-
 Common/System/Testing/Cxx/CMakeLists.txt           |    17 +-
 Common/System/module.cmake                         |     1 +
 Common/System/vtkTimerLog.cxx                      |     5 +-
 Common/Transforms/Testing/Cxx/CMakeLists.txt       |     5 +
 Common/Transforms/Testing/Cxx/TestTransform.cxx    |    76 +
 .../Data/Baseline/MatrixToTransform.png.md5        |     1 +
 .../Data/Baseline/MatrixToTransform_1.png.md5      |     1 +
 .../Data/Baseline/TestThinPlateWarp.png.md5        |     1 +
 .../Data/Baseline/TestThinPlateWarp3D.png.md5      |     1 +
 .../Testing/Data/Baseline/cylindrical.png.md5      |     1 +
 .../Testing/Data/Baseline/spherical.png.md5        |     1 +
 .../Testing/Data/Baseline/spherical_1.png.md5      |     1 +
 Common/Transforms/Testing/Python/CMakeLists.txt    |    10 +-
 Common/Transforms/Testing/Tcl/CMakeLists.txt       |    13 +-
 Common/Transforms/module.cmake                     |     2 +
 Common/Transforms/vtkAbstractTransform.cxx         |    48 +-
 Common/Transforms/vtkAbstractTransform.h           |     6 +-
 Common/Transforms/vtkIdentityTransform.h           |     2 +-
 Common/Transforms/vtkLinearTransform.cxx           |   185 +-
 Common/Transforms/vtkLinearTransform.h             |     4 +-
 Common/Transforms/vtkThinPlateSplineTransform.cxx  |     8 +-
 Common/Transforms/vtkTransformCollection.h         |     4 +-
 Domains/Chemistry/CMakeLists.txt                   |     5 +-
 Domains/Chemistry/Testing/Cxx/CMakeLists.txt       |    48 +-
 Domains/Chemistry/Testing/Cxx/TestBallAndStick.cxx |    26 +-
 .../Cxx/TestBondColorModeDiscreteByAtom.cxx        |    26 +-
 .../Testing/Cxx/TestBondColorModeSingleColor.cxx   |    26 +-
 .../Chemistry/Testing/Cxx/TestCompositeRender.cxx  |    26 +-
 Domains/Chemistry/Testing/Cxx/TestFastRender.cxx   |    26 +-
 .../Chemistry/Testing/Cxx/TestLiquoriceSticks.cxx  |    26 +-
 Domains/Chemistry/Testing/Cxx/TestMolecule.cxx     |     8 +-
 .../Cxx/TestMoleculeMapperPropertyUpdate.cxx       |    26 +-
 .../Testing/Cxx/TestMoleculeSelection.cxx          |    48 +-
 .../Chemistry/Testing/Cxx/TestMultiCylinderOff.cxx |    26 +-
 .../Chemistry/Testing/Cxx/TestMultiCylinderOn.cxx  |    26 +-
 Domains/Chemistry/Testing/Cxx/TestVDWSpheres.cxx   |    26 +-
 .../Testing/Data/Baseline/TestBallAndStick.png.md5 |     1 +
 .../TestBondColorModeDiscreteByAtom.png.md5        |     1 +
 .../Baseline/TestBondColorModeSingleColor.png.md5  |     1 +
 .../Data/Baseline/TestCMLMoleculeReader.png.md5    |     1 +
 .../Data/Baseline/TestCompositeRender.png.md5      |     1 +
 .../Testing/Data/Baseline/TestFastRender.png.md5   |     1 +
 .../Data/Baseline/TestLiquoriceSticks.png.md5      |     1 +
 .../TestMoleculeMapperPropertyUpdate.png.md5       |     1 +
 .../Data/Baseline/TestMultiCylinderOff.png.md5     |     1 +
 .../Data/Baseline/TestMultiCylinderOn.png.md5      |     1 +
 .../Data/Baseline/TestPDBBallAndStick.png.md5      |     1 +
 .../Data/Baseline/TestProteinRibbon.png.md5        |     1 +
 .../Testing/Data/Baseline/TestVDWSpheres.png.md5   |     1 +
 Domains/Chemistry/module.cmake                     |     4 +-
 Domains/Chemistry/vtkChemistryConfigure.h.in       |     2 +-
 Domains/Chemistry/vtkPeriodicTable.cxx             |     2 +-
 Examples/AMR/Cxx/CMakeLists.txt                    |    29 +-
 Examples/AMR/Cxx/Generate3DAMRDataSetWithPulse.cxx |     2 +-
 Examples/Annotation/Cxx/LabeledMesh/CMakeLists.txt |    27 +-
 Examples/Build/vtkLocal/CMakeLists.txt             |     3 +-
 Examples/Build/vtkMy/Common/vtkBar.h               |     4 +-
 Examples/Build/vtkMy/Imaging/vtkImageFoo.cxx       |     2 +-
 Examples/Build/vtkMy/Unsorted/vtkBar2.h            |     4 +-
 Examples/CMakeLists.txt                            |    20 +-
 Examples/DataManipulation/Cxx/CMakeLists.txt       |    16 +-
 Examples/GUI/Cocoa/CMakeLists.txt                  |     3 +-
 Examples/GUI/Qt/CMakeLists.txt                     |    18 +-
 Examples/GUI/Qt/Events/CMakeLists.txt              |    93 +-
 Examples/GUI/Qt/FourPaneViewer/CMakeLists.txt      |    78 +-
 .../GUI/Qt/FourPaneViewer/QtVTKRenderWindows.cxx   |     6 +-
 .../GUI/Qt/FourPaneViewer/QtVTKRenderWindows.h     |     2 +-
 Examples/GUI/Qt/GraphicsView/CMakeLists.txt        |    92 +-
 Examples/GUI/Qt/GraphicsView/OpenGLScene.cpp       |    10 +-
 Examples/GUI/Qt/ImageViewer/CMakeLists.txt         |    67 +-
 Examples/GUI/Qt/SimpleView/CMakeLists.txt          |   145 +-
 Examples/GUI/Qt/SimpleView/main.cxx                |    11 +-
 Examples/GUI/Win32/SampleMFC/vtkMFCView.h          |     4 +-
 .../vtkBorland/Package/vtkBorlandRenderWindow.h    |     2 +-
 Examples/Hybrid/Cxx/CMakeLists.txt                 |    31 -
 Examples/Hybrid/Cxx/ImageDataLIC2DDemo.cxx         |    22 -
 Examples/Hybrid/Cxx/StructuredGridLIC2DDemo.cxx    |    23 -
 Examples/HyperTree/Cxx/Sphere/Cell.h               |     2 +-
 Examples/HyperTree/Cxx/Sphere/Mesh.cxx             |     2 +-
 Examples/IO/Cxx/CMakeLists.txt                     |    45 +-
 Examples/IO/Cxx/ParticleReader.cxx                 |     2 +-
 .../Cxx/Baseline/TestImageSlicing.png.md5          |     1 +
 Examples/ImageProcessing/Cxx/CMakeLists.txt        |    39 +-
 .../Infovis/Cxx/CustomLinkView/CustomLinkView.cxx  |     2 +-
 Examples/Infovis/Cxx/EasyView/EasyView.cxx         |     2 +-
 Examples/Infovis/Cxx/StatsView/StatsView.cxx       |    10 +-
 Examples/LIC/Cxx/CMakeLists.txt                    |    37 +
 Examples/LIC/Cxx/ImageDataLIC2DDemo.cxx            |    20 +
 Examples/LIC/Cxx/StructuredGridLIC2DDemo.cxx       |    20 +
 Examples/LIC/Cxx/SurfaceLICDemo.cxx                |    21 +
 .../{Hybrid => LIC}/Python/CylinderAndPolarAxes.py |     0
 Examples/Medical/Cxx/Baseline/TestMedical1.png.md5 |     1 +
 Examples/Medical/Cxx/Baseline/TestMedical2.png.md5 |     1 +
 Examples/Medical/Cxx/Baseline/TestMedical3.png.md5 |     1 +
 Examples/Medical/Cxx/Baseline/TestMedical4.png.md5 |     1 +
 Examples/Medical/Cxx/CMakeLists.txt                |    63 +-
 .../Modelling/Cxx/Baseline/TestFinance.png.md5     |     1 +
 Examples/Modelling/Cxx/CMakeLists.txt              |    61 +-
 Examples/MultiBlock/Cxx/CMakeLists.txt             |    25 +-
 Examples/Rendering/Cxx/CMakeLists.txt              |    29 +-
 Examples/Rendering/Cxx/MaterialObjects.cxx         |     1 -
 Examples/Rendering/Cxx/SurfaceLICDemo.cxx          |    23 -
 Examples/Tutorial/Step1/Cxx/CMakeLists.txt         |    16 +-
 Examples/Tutorial/Step2/Cxx/CMakeLists.txt         |    19 +-
 Examples/Tutorial/Step3/Cxx/CMakeLists.txt         |    16 +-
 Examples/Tutorial/Step4/Cxx/CMakeLists.txt         |    14 +-
 Examples/Tutorial/Step5/Cxx/CMakeLists.txt         |    17 +-
 Examples/Tutorial/Step6/Cxx/CMakeLists.txt         |    18 +-
 .../Cxx/Baseline/TestBandedContours.png.md5        |     1 +
 .../Cxx/Baseline/TestFilledContours.png.md5        |     1 +
 .../TestTubesWithVaryingRadiusAndColors.png.md5    |     1 +
 .../VisualizationAlgorithms/Cxx/CMakeLists.txt     |    60 +-
 Examples/VolumeRendering/Cxx/CMakeLists.txt        |    24 +-
 .../Widgets/Cxx/Baseline/TestBalloonWidget.png.md5 |     1 +
 Examples/Widgets/Cxx/Baseline/TestSlider.png.md5   |     1 +
 Examples/Widgets/Cxx/Baseline/TestSlider2D.png.md5 |     1 +
 .../Widgets/Cxx/Baseline/TestSlider2D_1.png.md5    |     1 +
 .../Widgets/Cxx/Baseline/TestSlider2D_2.png.md5    |     1 +
 Examples/Widgets/Cxx/CMakeLists.txt                |    43 +-
 Filters/AMR/Testing/Cxx/CMakeLists.txt             |    34 +-
 Filters/AMR/Testing/Python/CMakeLists.txt          |     8 +-
 Filters/AMR/vtkAMRResampleFilter.cxx               |     2 +-
 Filters/AMR/vtkAMRUtilities.h                      |     4 +-
 Filters/Core/CMakeLists.txt                        |     1 +
 Filters/Core/Testing/Cxx/CMakeLists.txt            |    71 +-
 Filters/Core/Testing/Cxx/TestAppendPolyData.cxx    |   230 +-
 Filters/Core/Testing/Cxx/TestCleanPolyData.cxx     |   133 +
 Filters/Core/Testing/Cxx/TestClipPolyData.cxx      |   137 +
 .../Core/Testing/Cxx/TestConnectivityFilter.cxx    |   145 +
 Filters/Core/Testing/Cxx/TestCutter.cxx            |     2 +-
 .../Testing/Cxx/TestDecimatePolylineFilter.cxx     |   147 +-
 Filters/Core/Testing/Cxx/TestDecimatePro.cxx       |   168 +
 Filters/Core/Testing/Cxx/TestDelaunay3D.cxx        |   134 +
 Filters/Core/Testing/Cxx/TestFeatureEdges.cxx      |   168 +
 Filters/Core/Testing/Cxx/TestHedgeHog.cxx          |   154 +
 Filters/Core/Testing/Cxx/TestMaskPoints.cxx        |   136 +
 .../Testing/Cxx/TestPolyDataConnectivityFilter.cxx |   144 +
 .../Core/Testing/Cxx/TestSMPPipelineContour.cxx    |   154 +
 .../Core/Testing/Cxx/TestSmoothPolyDataFilter.cxx  |   133 +
 Filters/Core/Testing/Cxx/TestThresholdPoints.cxx   |   143 +
 Filters/Core/Testing/Cxx/TestTransposeTable.cxx    |   138 +
 Filters/Core/Testing/Cxx/TestTubeFilter.cxx        |   141 +
 .../Data/Baseline/CellDataToPointData.png.md5      |     1 +
 .../Data/Baseline/CellDataToPointData_1.png.md5    |     1 +
 .../Core/Testing/Data/Baseline/Delaunay2D.png.md5  |     1 +
 .../Testing/Data/Baseline/Delaunay2DAlpha.png.md5  |     1 +
 .../Data/Baseline/Delaunay2DTransform.png.md5      |     1 +
 .../Core/Testing/Data/Baseline/Delaunay3D.png.md5  |     1 +
 .../Testing/Data/Baseline/MassProperties.png.md5   |     1 +
 .../Testing/Data/Baseline/MassProperties_1.png.md5 |     1 +
 .../Data/Baseline/QuadricDecimation.png.md5        |     1 +
 .../Data/Baseline/QuadricDecimation_1.png.md5      |     1 +
 .../Data/Baseline/QuadricDecimation_2.png.md5      |     1 +
 .../Data/Baseline/QuadricDecimation_3.png.md5      |     1 +
 .../Data/Baseline/QuadricDecimation_4.png.md5      |     1 +
 .../Data/Baseline/QuadricDecimation_5.png.md5      |     1 +
 .../Testing/Data/Baseline/StreamPolyData.png.md5   |     1 +
 .../Testing/Data/Baseline/StreamPolyData_1.png.md5 |     1 +
 .../Testing/Data/Baseline/TestDelaunay2D.png.md5   |     1 +
 .../Testing/Data/Baseline/TestDelaunay2D_1.png.md5 |     1 +
 .../Core/Testing/Data/Baseline/TestGlyph3D.png.md5 |     1 +
 .../TestGridSynchronizedTemplates3D.png.md5        |     1 +
 .../Data/Baseline/TestMarchingSquares.png.md5      |     1 +
 .../TestRectilinearSynchronizedTemplates.png.md5   |     1 +
 .../Baseline/TestSynchronizedTemplates2D.png.md5   |     1 +
 .../Baseline/TestSynchronizedTemplates3D.png.md5   |     1 +
 .../Testing/Data/Baseline/TestTensorGlyph.png.md5  |     1 +
 .../Data/Baseline/TestTensorGlyph_1.png.md5        |     1 +
 .../Testing/Data/Baseline/TestTextureGlyph.png.md5 |     1 +
 .../Data/Baseline/TestTriangleFilter.png.md5       |     1 +
 Filters/Core/Testing/Data/Baseline/capCow.png.md5  |     1 +
 .../Core/Testing/Data/Baseline/capCow_1.png.md5    |     1 +
 .../Core/Testing/Data/Baseline/capSphere.png.md5   |     1 +
 Filters/Core/Testing/Data/Baseline/clipArt.png.md5 |     1 +
 .../Testing/Data/Baseline/combStreamers.png.md5    |     1 +
 .../Testing/Data/Baseline/combStreamers2.png.md5   |     1 +
 .../Testing/Data/Baseline/combStreamers2_1.png.md5 |     1 +
 .../Data/Baseline/constrainedDelaunay.png.md5      |     1 +
 .../Data/Baseline/constrainedDelaunay_1.png.md5    |     1 +
 .../Data/Baseline/constrainedDelaunay_2.png.md5    |     1 +
 .../Testing/Data/Baseline/contourCells.png.md5     |     1 +
 .../Testing/Data/Baseline/contourCells_1.png.md5   |     1 +
 .../Data/Baseline/contourQuadraticCells.png.md5    |     1 +
 .../Core/Testing/Data/Baseline/createBFont.png.md5 |     1 +
 .../Testing/Data/Baseline/createBFont_1.png.md5    |     1 +
 Filters/Core/Testing/Data/Baseline/cutLoop.png.md5 |     1 +
 .../Testing/Data/Baseline/deciFranFace.png.md5     |     1 +
 .../Testing/Data/Baseline/deciFranFace_1.png.md5   |     1 +
 .../Testing/Data/Baseline/deciFranFace_2.png.md5   |     1 +
 .../Testing/Data/Baseline/deciFranFace_3.png.md5   |     1 +
 .../Testing/Data/Baseline/deciFranFace_4.png.md5   |     1 +
 .../Testing/Data/Baseline/deciFranFace_5.png.md5   |     1 +
 .../Core/Testing/Data/Baseline/deciPlane.png.md5   |     1 +
 .../Core/Testing/Data/Baseline/deciPlane_1.png.md5 |     1 +
 .../Core/Testing/Data/Baseline/dispPlot.png.md5    |     1 +
 .../Testing/Data/Baseline/fieldToPolyData.png.md5  |     1 +
 .../Data/Baseline/fieldToPolyData_1.png.md5        |     1 +
 .../Testing/Data/Baseline/fieldToRGrid.png.md5     |     1 +
 .../Testing/Data/Baseline/fieldToRGrid_1.png.md5   |     1 +
 .../Testing/Data/Baseline/fieldToRGrid_2.png.md5   |     1 +
 .../Testing/Data/Baseline/fieldToRGrid_3.png.md5   |     1 +
 .../Testing/Data/Baseline/fieldToRGrid_4.png.md5   |     1 +
 .../Testing/Data/Baseline/fieldToSGrid.png.md5     |     1 +
 .../Testing/Data/Baseline/fieldToUGrid.png.md5     |     1 +
 .../Testing/Data/Baseline/financialField.png.md5   |     1 +
 .../Testing/Data/Baseline/financialField2.png.md5  |     1 +
 .../Testing/Data/Baseline/financialField3.png.md5  |     1 +
 .../Core/Testing/Data/Baseline/glyphComb.png.md5   |     1 +
 Filters/Core/Testing/Data/Baseline/hull.png.md5    |     1 +
 .../Core/Testing/Data/Baseline/mergeFilter.png.md5 |     1 +
 .../Data/Baseline/multipleComponentContour.png.md5 |     1 +
 .../Core/Testing/Data/Baseline/multipleIso.png.md5 |     1 +
 .../Core/Testing/Data/Baseline/polyConn.png.md5    |     1 +
 Filters/Core/Testing/Data/Baseline/probe.png.md5   |     1 +
 .../Core/Testing/Data/Baseline/probeComb.png.md5   |     1 +
 .../Testing/Data/Baseline/reverseNormals.png.md5   |     1 +
 .../Testing/Data/Baseline/reverseNormals_1.png.md5 |     1 +
 .../Core/Testing/Data/Baseline/skinOrder.png.md5   |     1 +
 .../Testing/Data/Baseline/smoothMeshOnMesh.png.md5 |     1 +
 .../Core/Testing/Data/Baseline/streamComb.png.md5  |     1 +
 .../Testing/Data/Baseline/streamSurface.png.md5    |     1 +
 .../Testing/Data/Baseline/streamSurface2.png.md5   |     1 +
 Filters/Core/Testing/Data/Baseline/stripF.png.md5  |     1 +
 .../Core/Testing/Data/Baseline/teapotHulls.png.md5 |     1 +
 .../Testing/Data/Baseline/teapotHulls_1.png.md5    |     1 +
 .../Testing/Data/Baseline/teapotHulls_2.png.md5    |     1 +
 .../Testing/Data/Baseline/teapotHulls_3.png.md5    |     1 +
 .../Testing/Data/Baseline/teapotHulls_4.png.md5    |     1 +
 .../Core/Testing/Data/Baseline/tubeComb.png.md5    |     1 +
 Filters/Core/Testing/Python/CMakeLists.txt         |   115 +-
 Filters/Core/Testing/Python/MassProperties.py      |     4 -
 Filters/Core/Testing/Python/TestTensorGlyph.py     |     4 -
 Filters/Core/Testing/Python/TestTextureGlyph.py    |     4 -
 Filters/Core/Testing/Python/deciFranFace.py        |     4 -
 Filters/Core/Testing/Python/deciPlane.py           |     4 -
 Filters/Core/Testing/Python/financialField.py      |     4 -
 Filters/Core/Testing/Python/financialField2.py     |     4 -
 Filters/Core/Testing/Python/financialField3.py     |     4 -
 Filters/Core/Testing/Python/skinOrder.py           |     4 -
 Filters/Core/Testing/Tcl/CMakeLists.txt            |    97 +-
 Filters/Core/vtkAppendFilter.cxx                   |    11 +-
 Filters/Core/vtkAppendPolyData.cxx                 |    20 +-
 Filters/Core/vtkAppendPolyData.h                   |     8 +
 Filters/Core/vtkArrayCalculator.cxx                |   130 +-
 Filters/Core/vtkArrayCalculator.h                  |    18 +
 Filters/Core/vtkAttributeDataToFieldDataFilter.h   |     2 +-
 Filters/Core/vtkCellDataToPointData.cxx            |    13 +-
 Filters/Core/vtkCellDataToPointData.h              |     2 +-
 Filters/Core/vtkCleanPolyData.cxx                  |    21 +-
 Filters/Core/vtkCleanPolyData.h                    |     8 +
 Filters/Core/vtkClipPolyData.cxx                   |    20 +-
 Filters/Core/vtkClipPolyData.h                     |    10 +-
 Filters/Core/vtkCompositeCutter.cxx                |     2 +-
 Filters/Core/vtkConnectivityFilter.cxx             |    27 +
 Filters/Core/vtkConnectivityFilter.h               |     8 +
 Filters/Core/vtkContourFilter.cxx                  |    63 +-
 Filters/Core/vtkContourFilter.h                    |    11 +
 Filters/Core/vtkContourGrid.cxx                    |   121 +-
 Filters/Core/vtkContourGrid.h                      |     2 +-
 Filters/Core/vtkCutter.cxx                         |   123 +-
 Filters/Core/vtkDecimatePolylineFilter.cxx         |    18 +
 Filters/Core/vtkDecimatePolylineFilter.h           |     8 +
 Filters/Core/vtkDecimatePro.cxx                    |    20 +-
 Filters/Core/vtkDecimatePro.h                      |     8 +
 Filters/Core/vtkDelaunay3D.cxx                     |    35 +-
 Filters/Core/vtkDelaunay3D.h                       |     8 +
 Filters/Core/vtkFeatureEdges.cxx                   |    18 +
 Filters/Core/vtkFeatureEdges.h                     |     8 +
 Filters/Core/vtkGlyph2D.h                          |     4 +-
 Filters/Core/vtkGlyph3D.cxx                        |    35 +-
 Filters/Core/vtkGridSynchronizedTemplates3D.cxx    |     4 +
 Filters/Core/vtkGridSynchronizedTemplates3D.h      |     1 -
 Filters/Core/vtkHedgeHog.cxx                       |    28 +-
 Filters/Core/vtkHedgeHog.h                         |    10 +-
 Filters/Core/vtkImageAppend.cxx                    |     4 +-
 Filters/Core/vtkMaskPoints.cxx                     |    78 +-
 Filters/Core/vtkMaskPoints.h                       |    16 +-
 Filters/Core/vtkMaskPolyData.h                     |     2 +-
 Filters/Core/vtkPointDataToCellData.h              |     2 +-
 Filters/Core/vtkPolyDataConnectivityFilter.cxx     |    19 +
 Filters/Core/vtkPolyDataConnectivityFilter.h       |     8 +
 Filters/Core/vtkPolyDataNormals.cxx                |     4 +
 Filters/Core/vtkPolyDataNormals.h                  |     2 +-
 Filters/Core/vtkRectilinearSynchronizedTemplates.h |     1 -
 Filters/Core/vtkReverseSense.h                     |     2 +-
 Filters/Core/vtkSimpleElevationFilter.h            |     2 +-
 Filters/Core/vtkSmoothPolyDataFilter.cxx           |    18 +
 Filters/Core/vtkSmoothPolyDataFilter.h             |    10 +-
 Filters/Core/vtkStructuredGridOutlineFilter.h      |     4 +-
 Filters/Core/vtkSynchronizedTemplates3D.cxx        |    28 +-
 Filters/Core/vtkSynchronizedTemplates3D.h          |    10 +-
 Filters/Core/vtkSynchronizedTemplatesCutter3D.cxx  |    13 +-
 Filters/Core/vtkSynchronizedTemplatesCutter3D.h    |     3 +-
 Filters/Core/vtkThreshold.cxx                      |     8 +-
 Filters/Core/vtkThresholdPoints.cxx                |    32 +
 Filters/Core/vtkThresholdPoints.h                  |    10 +-
 Filters/Core/vtkTransposeTable.cxx                 |   313 +
 Filters/Core/vtkTransposeTable.h                   |    79 +
 Filters/Core/vtkTriangleFilter.h                   |     4 +-
 Filters/Core/vtkTubeFilter.cxx                     |    25 +-
 Filters/Core/vtkTubeFilter.h                       |     8 +
 Filters/Core/vtkVectorDot.h                        |     2 +-
 Filters/Core/vtkVectorNorm.h                       |     2 +-
 Filters/Core/vtkWindowedSincPolyDataFilter.h       |     2 +-
 Filters/Cosmo/CMakeLists.txt                       |    14 -
 Filters/Cosmo/module.cmake                         |     6 -
 Filters/Cosmo/vtkPCosmoHaloFinder.cxx              |  1119 --
 Filters/Cosmo/vtkPCosmoHaloFinder.h                |   231 -
 Filters/Cosmo/vtkPCosmoReader.cxx                  |   403 -
 Filters/Cosmo/vtkPCosmoReader.h                    |   155 -
 Filters/Extraction/Testing/Cxx/CMakeLists.txt      |    28 +-
 Filters/Extraction/Testing/Cxx/TestExtraction.cxx  |     6 +-
 .../Baseline/ExtractEdgesQuadraticCells.png.md5    |     1 +
 .../Baseline/ExtractEdgesQuadraticCells_1.png.md5  |     1 +
 .../Testing/Data/Baseline/ExtractTensors.png.md5   |     1 +
 .../Data/Baseline/TestExtractSelection.png.md5     |     1 +
 .../Testing/Data/Baseline/TestExtraction.png.md5   |     1 +
 .../Testing/Data/Baseline/TestExtraction_1.png.md5 |     1 +
 .../Testing/Data/Baseline/TestExtraction_2.png.md5 |     1 +
 .../Testing/Data/Baseline/TestExtraction_3.png.md5 |     1 +
 .../Testing/Data/Baseline/extractPolyData.png.md5  |     1 +
 .../Testing/Data/Baseline/extractRectGrid.png.md5  |     1 +
 .../Testing/Data/Baseline/extractUGrid.png.md5     |     1 +
 .../Testing/Data/Baseline/extractUGrid_1.png.md5   |     1 +
 .../Testing/Data/Baseline/extractVectors.png.md5   |     1 +
 Filters/Extraction/Testing/Python/CMakeLists.txt   |    15 +-
 Filters/Extraction/Testing/Tcl/CMakeLists.txt      |    14 +-
 Filters/Extraction/vtkExtractArraysOverTime.cxx    |     2 +-
 Filters/Extraction/vtkExtractDataOverTime.h        |     2 +-
 Filters/Extraction/vtkExtractGeometry.cxx          |    38 +-
 Filters/Extraction/vtkExtractGrid.h                |     2 +-
 Filters/Extraction/vtkExtractRectilinearGrid.h     |     2 +-
 Filters/Extraction/vtkExtractTensorComponents.h    |     2 +-
 Filters/Extraction/vtkExtractUnstructuredGrid.h    |     2 +-
 Filters/FlowPaths/Testing/Cxx/CMakeLists.txt       |    33 +-
 .../Cxx/TestAMRInterpolatedVelocityField.cxx       |     2 +-
 .../FlowPaths/Testing/Cxx/TestParticleTracers.cxx  |     2 +-
 Filters/FlowPaths/Testing/Cxx/TestStreamTracer.cxx |     2 +-
 .../Testing/Data/Baseline/TestBSPTree.png.md5      |     1 +
 .../FlowPaths/vtkAMRInterpolatedVelocityField.cxx  |     2 +-
 .../vtkCachingInterpolatedVelocityField.cxx        |    13 +
 .../vtkCachingInterpolatedVelocityField.h          |     2 +
 Filters/FlowPaths/vtkDashedStreamLine.h            |     2 +-
 Filters/FlowPaths/vtkParticlePathFilter.h          |     2 +-
 Filters/FlowPaths/vtkParticleTracerBase.cxx        |   288 +-
 Filters/FlowPaths/vtkParticleTracerBase.h          |    36 +-
 Filters/FlowPaths/vtkStreaklineFilter.cxx          |     2 +-
 Filters/FlowPaths/vtkStreamLine.h                  |     2 +-
 Filters/FlowPaths/vtkStreamPoints.h                |     2 +-
 Filters/FlowPaths/vtkStreamTracer.cxx              |    35 +-
 .../vtkTemporalInterpolatedVelocityField.cxx       |   229 +-
 .../vtkTemporalInterpolatedVelocityField.h         |     2 +-
 Filters/FlowPaths/vtkTemporalStreamTracer.cxx      |     2 +-
 Filters/FlowPaths/vtkTemporalStreamTracer.h        |     2 +-
 Filters/General/CMakeLists.txt                     |     1 -
 Filters/General/Testing/Cxx/CMakeLists.txt         |    76 +-
 Filters/General/Testing/Cxx/TestAppendPoints.cxx   |   168 +
 .../General/Testing/Cxx/TestReflectionFilter.cxx   |     2 +-
 .../Testing/Cxx/TestTableSplitColumnComponents.cxx |    41 +-
 .../General/Testing/Cxx/TestTransformFilter.cxx    |   151 +
 .../Testing/Cxx/TestTransformPolyDataFilter.cxx    |   156 +
 .../Testing/Data/Baseline/BoxClipPolyData.png.md5  |     1 +
 .../Data/Baseline/BoxClipTetrahedra.png.md5        |     1 +
 .../BoxClipTriangulateAndInterpolate.png.md5       |     1 +
 .../General/Testing/Data/Baseline/Canny.png.md5    |     1 +
 .../General/Testing/Data/Baseline/Canny_1.png.md5  |     1 +
 .../General/Testing/Data/Baseline/Canny_2.png.md5  |     1 +
 .../Testing/Data/Baseline/OBBCylinder.png.md5      |     1 +
 .../TestBooleanOperationPolyDataFilter.png.md5     |     1 +
 .../TestBooleanOperationPolyDataFilter2.png.md5    |     1 +
 .../Testing/Data/Baseline/TestCellDerivs.png.md5   |     1 +
 .../Testing/Data/Baseline/TestCellDerivs_1.png.md5 |     1 +
 .../Data/Baseline/TestClipClosedSurface.png.md5    |     1 +
 .../Testing/Data/Baseline/TestClipOutline.png.md5  |     1 +
 .../Data/Baseline/TestContourTriangulator.png.md5  |     1 +
 .../Baseline/TestContourTriangulatorCutter.png.md5 |     1 +
 .../TestContourTriangulatorMarching.png.md5        |     1 +
 .../Testing/Data/Baseline/TestCurvatures.png.md5   |     1 +
 .../Data/Baseline/TestDeformPointSet.png.md5       |     1 +
 .../Data/Baseline/TestDensifyPolyData.png.md5      |     1 +
 .../Data/Baseline/TestDensifyPolyData_1.png.md5    |     1 +
 .../Data/Baseline/TestDensifyPolyData_2.png.md5    |     1 +
 .../Data/Baseline/TestDensifyPolyData_3.png.md5    |     1 +
 .../Baseline/TestDiscreteMarchingCubes.png.md5     |     1 +
 .../Baseline/TestDistancePolyDataFilter.png.md5    |     1 +
 .../Data/Baseline/TestGraphLayoutFilter.png.md5    |     1 +
 .../Baseline/TestIconGlyphFilterGravity.png.md5    |     1 +
 .../TestIntersectionPolyDataFilter.png.md5         |     1 +
 .../Data/Baseline/TestMultiBlockStreamer.png.md5   |     1 +
 .../Data/Baseline/TestQuadraturePoints.png.md5     |     1 +
 .../Baseline/TestRandomAttributeGenerator.png.md5  |     1 +
 .../TestRandomAttributeGeneratorScalar.png.md5     |     1 +
 .../TestRectilinearGridToTetrahedra.png.md5        |     1 +
 .../TestRectilinearGridToTetrahedra_1.png.md5      |     1 +
 .../Testing/Data/Baseline/TestSplineFilter.png.md5 |     1 +
 .../Baseline/TestUncertaintyTubeFilter.png.md5     |     1 +
 .../Baseline/TestYoungsMaterialInterface.png.md5   |     1 +
 .../Testing/Data/Baseline/WarpScalarImage.png.md5  |     1 +
 .../Testing/Data/Baseline/WarpToImage.png.md5      |     1 +
 .../Testing/Data/Baseline/WarpVectorImage.png.md5  |     1 +
 .../Testing/Data/Baseline/blankGrid.png.md5        |     1 +
 .../General/Testing/Data/Baseline/clipComb.png.md5 |     1 +
 .../General/Testing/Data/Baseline/clipHex.png.md5  |     1 +
 .../Testing/Data/Baseline/clipImage.png.md5        |     1 +
 .../Testing/Data/Baseline/clipPyramid.png.md5      |     1 +
 .../Testing/Data/Baseline/clipPyramid_1.png.md5    |     1 +
 .../Data/Baseline/clipQuadraticCells.png.md5       |     1 +
 .../General/Testing/Data/Baseline/clipTet.png.md5  |     1 +
 .../Testing/Data/Baseline/clipVolume.png.md5       |     1 +
 .../Testing/Data/Baseline/clipVolume2.png.md5      |     1 +
 .../Testing/Data/Baseline/clipVolume3.png.md5      |     1 +
 .../Testing/Data/Baseline/clipWedge.png.md5        |     1 +
 .../Data/Baseline/contoursToSurface.png.md5        |     1 +
 .../General/Testing/Data/Baseline/cursor2D.png.md5 |     1 +
 .../General/Testing/Data/Baseline/cursor3D.png.md5 |     1 +
 .../General/Testing/Data/Baseline/dicer.png.md5    |     1 +
 .../Testing/Data/Baseline/edgePoints.png.md5       |     1 +
 .../General/Testing/Data/Baseline/mcubes.png.md5   |     1 +
 .../Testing/Data/Baseline/recursiveDC.png.md5      |     1 +
 .../Testing/Data/Baseline/recursiveDC_1.png.md5    |     1 +
 .../Testing/Data/Baseline/spatialRepAll.png.md5    |     1 +
 .../Testing/Data/Baseline/splitVectors.png.md5     |     1 +
 .../Testing/Data/Baseline/streamTracer.png.md5     |     1 +
 .../Testing/Data/Baseline/streamTracer_1.png.md5   |     1 +
 .../Data/Baseline/subPixelPositionMin.png.md5      |     1 +
 .../Baseline/testDataSetTriangleFilter.png.md5     |     1 +
 .../Baseline/testDataSetTriangleFilter2.png.md5    |     1 +
 .../Baseline/testDataSetTriangleFilter_1.png.md5   |     1 +
 .../Testing/Data/Baseline/testReflect.png.md5      |     1 +
 .../General/Testing/Data/Baseline/warplens.png.md5 |     1 +
 Filters/General/Testing/Python/CMakeLists.txt      |    90 +-
 .../Testing/Python/TestRandomAttributeGenerator.py |    10 -
 .../Python/TestRandomAttributeGeneratorScalar.py   |    69 +
 Filters/General/Testing/Python/spatialRepAll.py    |     4 -
 Filters/General/Testing/Tcl/CMakeLists.txt         |    85 +-
 Filters/General/vtkAppendPoints.cxx                |    33 +-
 Filters/General/vtkAppendPoints.h                  |    13 +-
 .../General/vtkApproximatingSubdivisionFilter.h    |     2 +-
 Filters/General/vtkAxes.h                          |     2 +-
 Filters/General/vtkBoxClipDataSet.cxx              |     5 +-
 Filters/General/vtkBrownianPoints.h                |     5 +-
 Filters/General/vtkCellCenters.h                   |     2 +-
 Filters/General/vtkCellDerivatives.h               |     2 +-
 Filters/General/vtkClipClosedSurface.cxx           |     9 +-
 Filters/General/vtkClipDataSet.cxx                 |     4 +
 Filters/General/vtkCoincidentPoints.cxx            |     1 -
 Filters/General/vtkCursor2D.cxx                    |     2 +-
 Filters/General/vtkCursor2D.h                      |     2 +-
 Filters/General/vtkCursor3D.cxx                    |     2 +-
 Filters/General/vtkCursor3D.h                      |     2 +-
 Filters/General/vtkCurvatures.cxx                  |     6 +-
 Filters/General/vtkDicer.h                         |     2 +-
 Filters/General/vtkHyperStreamline.cxx             |     7 +-
 .../General/vtkInterpolatingSubdivisionFilter.h    |     2 +-
 Filters/General/vtkLinkEdgels.h                    |     2 +-
 Filters/General/vtkMultiBlockMergeFilter.cxx       |     2 +-
 Filters/General/vtkNormalizeMatrixVectors.cxx      |     2 +
 Filters/General/vtkOBBDicer.h                      |     4 +-
 Filters/General/vtkOBBTree.h                       |     4 +
 Filters/General/vtkPassArrays.cxx                  |    18 +
 Filters/General/vtkPassArrays.h                    |     5 +
 Filters/General/vtkQuantizePolyDataPoints.h        |     2 +-
 Filters/General/vtkRandomAttributeGenerator.cxx    |   299 +-
 Filters/General/vtkRandomAttributeGenerator.h      |    43 +-
 Filters/General/vtkRectilinearGridClip.h           |     2 +-
 Filters/General/vtkRectilinearGridToTetrahedra.h   |     2 +-
 Filters/General/vtkSCurveSpline.cxx                |   258 -
 Filters/General/vtkSCurveSpline.h                  |    69 -
 Filters/General/vtkShrinkPolyData.h                |     2 +-
 Filters/General/vtkSplitColumnComponents.cxx       |   122 +-
 Filters/General/vtkSplitColumnComponents.h         |    39 +-
 Filters/General/vtkStructuredGridClip.h            |     2 +-
 Filters/General/vtkTableBasedClipDataSet.cxx       |     6 +-
 Filters/General/vtkTableToStructuredGrid.cxx       |    15 +
 Filters/General/vtkTransformFilter.cxx             |    18 +
 Filters/General/vtkTransformFilter.h               |     9 +
 Filters/General/vtkTransformPolyDataFilter.cxx     |    18 +
 Filters/General/vtkTransformPolyDataFilter.h       |     9 +
 Filters/General/vtkWarpLens.h                      |     2 +-
 Filters/General/vtkWarpTo.h                        |     2 +-
 Filters/General/vtkWarpVector.cxx                  |    54 +-
 Filters/General/vtkYoungsMaterialInterface.cxx     |   262 +-
 Filters/Generic/Testing/Cxx/CMakeLists.txt         |    12 +-
 Filters/Generic/Testing/Cxx/TestGenericClip.cxx    |     2 +-
 .../Testing/Cxx/TestGenericContourFilter.cxx       |     2 +-
 Filters/Generic/Testing/Cxx/TestGenericCutter.cxx  |     2 +-
 .../Testing/Cxx/TestGenericDataSetTessellator.cxx  |     2 +-
 .../Testing/Cxx/TestGenericGeometryFilter.cxx      |     2 +-
 .../Testing/Cxx/TestGenericGlyph3DFilter.cxx       |     2 +-
 .../Generic/Testing/Cxx/TestGenericProbeFilter.cxx |     2 +-
 .../Testing/Cxx/TestGenericStreamTracer.cxx        |     2 +-
 Filters/Generic/Testing/Cxx/otherCreation.cxx      |     2 +-
 .../Testing/Data/Baseline/TestGenericClip.png.md5  |     1 +
 .../Data/Baseline/TestGenericContourFilter.png.md5 |     1 +
 .../Data/Baseline/TestGenericCutter.png.md5        |     1 +
 .../Baseline/TestGenericDataSetTessellator.png.md5 |     1 +
 .../Baseline/TestGenericGeometryFilter.png.md5     |     1 +
 .../Data/Baseline/TestGenericGlyph3DFilter.png.md5 |     1 +
 .../Data/Baseline/TestGenericProbeFilter.png.md5   |     1 +
 .../Data/Baseline/TestGenericStreamTracer.png.md5  |     1 +
 Filters/Generic/vtkGenericStreamTracer.cxx         |     2 +-
 Filters/Geometry/Testing/Cxx/CMakeLists.txt        |    35 +-
 .../Cxx/TestStructuredAMRGridConnectivity.cxx      |     2 +-
 .../Cxx/TestUnstructuredGridGeometryFilter.cxx     |     2 +-
 .../TestExtractSurfaceNonLinearSubdivision.png.md5 |     1 +
 .../TestUnstructuredGridGeometryFilter.png.md5     |     1 +
 .../Data/Baseline/dataSetSurfaceFilter.png.md5     |     1 +
 .../Testing/Data/Baseline/geomFilter.png.md5       |     1 +
 .../Data/Baseline/officeStreamPoints.png.md5       |     1 +
 .../Testing/Data/Baseline/rectGrid.png.md5         |     1 +
 .../Testing/Data/Baseline/rectGrid_1.png.md5       |     1 +
 .../Testing/Data/Baseline/rectGrid_2.png.md5       |     1 +
 .../Testing/Data/Baseline/rectGrid_3.png.md5       |     1 +
 .../Data/Baseline/structuredGridBlanking.png.md5   |     1 +
 Filters/Geometry/Testing/Python/CMakeLists.txt     |    11 +-
 .../Testing/Python/structuredGridBlanking.py       |    83 +
 Filters/Geometry/Testing/Tcl/CMakeLists.txt        |    10 +-
 Filters/Geometry/vtkDataSetSurfaceFilter.cxx       |   543 +-
 Filters/Geometry/vtkDataSetSurfaceFilter.h         |     2 +-
 Filters/Geometry/vtkGeometryFilter.cxx             |     1 +
 Filters/Geometry/vtkImageDataGeometryFilter.h      |     2 +-
 .../Geometry/vtkRectilinearGridGeometryFilter.h    |     2 +-
 Filters/Geometry/vtkStructuredAMRNeighbor.cxx      |     7 -
 Filters/Geometry/vtkStructuredAMRNeighbor.h        |     7 +-
 .../Geometry/vtkStructuredPointsGeometryFilter.h   |     2 +-
 .../Geometry/vtkUnstructuredGridGeometryFilter.cxx |    35 +-
 Filters/Hybrid/Testing/Cxx/CMakeLists.txt          |    58 +-
 .../Testing/Cxx/TestProcrustesAlignmentFilter.cxx  |   280 +
 .../Hybrid/Testing/Data/Baseline/3DMorph.png.md5   |     1 +
 .../Data/Baseline/TemporalStatistics.png.md5       |     1 +
 .../Testing/Data/Baseline/TestFacetReader.png.md5  |     1 +
 .../Baseline/TestGreedyTerrainDecimation.png.md5   |     1 +
 .../Baseline/TestGreedyTerrainDecimation_1.png.md5 |     1 +
 .../Testing/Data/Baseline/TestGridWarp3D.png.md5   |     1 +
 .../Data/Baseline/TestGridWarpCubic.png.md5        |     1 +
 .../Data/Baseline/TestGridWarpLinear.png.md5       |     1 +
 .../Data/Baseline/TestGridWarpNearest.png.md5      |     1 +
 .../Baseline/TestImageStencilWithPolydata.png.md5  |     1 +
 .../Hybrid/Testing/Data/Baseline/TestPCA.png.md5   |     1 +
 .../Data/Baseline/TestPolyDataSilhouette.png.md5   |     1 +
 .../Testing/Data/Baseline/TestProcrustes.png.md5   |     1 +
 .../Baseline/TestProcrustesRigidCentroid.png.md5   |     1 +
 .../Data/Baseline/TestProjectedTerrainPath.png.md5 |     1 +
 .../Data/Baseline/TestTemporalFractal.png.md5      |     1 +
 .../Testing/Data/Baseline/WarpPolyData.png.md5     |     1 +
 .../Data/Baseline/appendImplicitModel.png.md5      |     1 +
 .../Baseline/appendImplicitModelNoBounds.png.md5   |     1 +
 .../Hybrid/Testing/Data/Baseline/banana.png.md5    |     1 +
 .../Hybrid/Testing/Data/Baseline/depthSort.png.md5 |     1 +
 Filters/Hybrid/Testing/Data/Baseline/earth.png.md5 |     1 +
 .../Hybrid/Testing/Data/Baseline/earth_1.png.md5   |     1 +
 .../Testing/Data/Baseline/imageToPolyData.png.md5  |     1 +
 .../Testing/Data/Baseline/largeImageOffset.png.md5 |     1 +
 .../Data/Baseline/largeImageParallel.png.md5       |     1 +
 Filters/Hybrid/Testing/Python/CMakeLists.txt       |    47 +-
 Filters/Hybrid/Testing/Python/largeImageOffset.py  |    16 +-
 .../Hybrid/Testing/Python/largeImageParallel.py    |    16 +-
 Filters/Hybrid/Testing/Tcl/CMakeLists.txt          |    45 +-
 Filters/Hybrid/Testing/Tcl/largeImageOffset.tcl    |    18 +-
 Filters/Hybrid/Testing/Tcl/largeImageParallel.tcl  |    18 +-
 Filters/Hybrid/vtkDepthSortPolyData.cxx            |     4 +-
 Filters/Hybrid/vtkEarthSource.h                    |     2 +-
 Filters/Hybrid/vtkImplicitModeller.cxx             |     3 +-
 Filters/Hybrid/vtkProcrustesAlignmentFilter.cxx    |    66 +-
 Filters/Hybrid/vtkProcrustesAlignmentFilter.h      |    12 +
 Filters/Hybrid/vtkTemporalFractal.cxx              |     2 +-
 Filters/Hybrid/vtkTemporalShiftScale.cxx           |     2 +-
 Filters/Hybrid/vtkTransformToGrid.cxx              |     4 +-
 Filters/HyperTree/Testing/Cxx/CMakeLists.txt       |    27 +-
 .../HyperTree/Testing/Cxx/TestClipHyperOctree.cxx  |     2 +-
 .../Testing/Cxx/TestHyperOctreeContourFilter.cxx   |     2 +-
 .../Testing/Cxx/TestHyperOctreeCutter.cxx          |     2 +-
 .../HyperTree/Testing/Cxx/TestHyperOctreeDual.cxx  |     2 +-
 .../Testing/Cxx/TestHyperOctreeSurfaceFilter.cxx   |     2 +-
 .../Testing/Cxx/TestHyperOctreeToUniformGrid.cxx   |     2 +-
 .../Data/Baseline/TestClipHyperOctree.png.md5      |     1 +
 .../Baseline/TestHyperOctreeContourFilter.png.md5  |     1 +
 .../Data/Baseline/TestHyperOctreeCutter.png.md5    |     1 +
 .../Data/Baseline/TestHyperOctreeDual.png.md5      |     1 +
 .../Data/Baseline/TestHyperOctreeDual_1.png.md5    |     1 +
 .../Baseline/TestHyperOctreeSurfaceFilter.png.md5  |     1 +
 .../Baseline/TestHyperOctreeToUniformGrid.png.md5  |     1 +
 .../Baseline/TestHyperTreeGridBinary2D.png.md5     |     1 +
 .../Baseline/TestHyperTreeGridBinary2DIJK.png.md5  |     1 +
 .../TestHyperTreeGridBinary2DMaterial.png.md5      |     1 +
 .../TestHyperTreeGridBinary2DMaterialIJK.png.md5   |     1 +
 .../TestHyperTreeGridBinaryEllipseMaterial.png.md5 |     1 +
 ...eGridBinaryHyperbolicParaboloidMaterial.png.md5 |     1 +
 .../Baseline/TestHyperTreeGridTernary2D.png.md5    |     1 +
 .../TestHyperTreeGridTernary2DBiMaterial.png.md5   |     1 +
 ...tHyperTreeGridTernary2DFullMaterialBits.png.md5 |     1 +
 .../TestHyperTreeGridTernary2DMaterial.png.md5     |     1 +
 .../TestHyperTreeGridTernary2DMaterialBits.png.md5 |     1 +
 .../TestHyperTreeGridTernary3DAxisCut.png.md5      |     1 +
 ...stHyperTreeGridTernary3DAxisCutMaterial.png.md5 |     1 +
 ...HyperTreeGridTernary3DAxisCutMaterial_1.png.md5 |     1 +
 .../TestHyperTreeGridTernary3DAxisCut_1.png.md5    |     1 +
 .../TestHyperTreeGridTernary3DClip.png.md5         |     1 +
 .../TestHyperTreeGridTernary3DClip_1.png.md5       |     1 +
 .../TestHyperTreeGridTernary3DClip_2.png.md5       |     1 +
 .../TestHyperTreeGridTernary3DContour.png.md5      |     1 +
 ...stHyperTreeGridTernary3DContourMaterial.png.md5 |     1 +
 ...HyperTreeGridTernary3DContourMaterial_1.png.md5 |     1 +
 .../TestHyperTreeGridTernary3DContour_1.png.md5    |     1 +
 .../Baseline/TestHyperTreeGridTernary3DCut.png.md5 |     1 +
 .../TestHyperTreeGridTernary3DCut_1.png.md5        |     1 +
 .../TestHyperTreeGridTernary3DCut_2.png.md5        |     1 +
 .../TestHyperTreeGridTernary3DGeometry.png.md5     |     1 +
 ...eGridTernary3DGeometryLargeMaterialBits.png.md5 |     1 +
 ...tHyperTreeGridTernary3DGeometryMaterial.png.md5 |     1 +
 ...erTreeGridTernary3DGeometryMaterialBits.png.md5 |     1 +
 ...TreeGridTernary3DGeometryMaterialBits_1.png.md5 |     1 +
 ...yperTreeGridTernary3DGeometryMaterial_1.png.md5 |     1 +
 .../TestHyperTreeGridTernary3DGeometry_1.png.md5   |     1 +
 .../TestHyperTreeGridTernary3DUnstructured.png.md5 |     1 +
 ...erTreeGridTernary3DUnstructuredMaterial.png.md5 |     1 +
 ...TreeGridTernary3DUnstructuredMaterial_1.png.md5 |     1 +
 ...estHyperTreeGridTernary3DUnstructured_1.png.md5 |     1 +
 .../TestHyperTreeGridTernaryHyperbola.png.md5      |     1 +
 .../TestHyperTreeGridTernarySphereMaterial.png.md5 |     1 +
 ...estHyperTreeGridTernarySphereMaterial_1.png.md5 |     1 +
 Filters/HyperTree/vtkClipHyperOctree.cxx           |     2 +-
 .../vtkHyperOctreeClipCutPointsGrabber.cxx         |     2 +-
 Filters/HyperTree/vtkHyperOctreeContourFilter.cxx  |     2 +-
 Filters/HyperTree/vtkHyperOctreeCutter.cxx         |     2 +-
 .../vtkHyperOctreeDualGridContourFilter.cxx        |     2 +-
 Filters/HyperTree/vtkHyperOctreeSampleFunction.cxx |     2 +-
 Filters/HyperTree/vtkHyperOctreeSurfaceFilter.cxx  |     2 +-
 .../vtkHyperOctreeToUniformGridFilter.cxx          |     2 +-
 Filters/Imaging/vtkExtractHistogram2D.h            |     6 +-
 Filters/Imaging/vtkPairwiseExtractHistogram2D.cxx  |     1 +
 Filters/Imaging/vtkPairwiseExtractHistogram2D.h    |     8 +-
 .../Testing/Cxx/TestMatlabEngineInterface.cxx      |     2 +-
 Filters/Matlab/vtkMatlabMexAdapter.cxx             |     2 +-
 Filters/Modeling/Testing/Cxx/CMakeLists.txt        |    36 +-
 .../Modeling/Testing/Data/Baseline/Hyper.png.md5   |     1 +
 .../Testing/Data/Baseline/HyperScalarBar.png.md5   |     1 +
 .../Testing/Data/Baseline/KlineBottle.png.md5      |     1 +
 .../Testing/Data/Baseline/TenEllip.png.md5         |     1 +
 .../Data/Baseline/TestBandedContourFilter.png.md5  |     1 +
 .../Testing/Data/Baseline/TestBoxFunction.png.md5  |     1 +
 .../Data/Baseline/TestButterflyScalars.png.md5     |     1 +
 .../Data/Baseline/TestFillHolesFilter.png.md5      |     1 +
 .../Data/Baseline/TestImageMarchingCubes.png.md5   |     1 +
 .../Baseline/TestNamedColorsIntegration.png.md5    |     1 +
 .../Data/Baseline/TestPolyDataPointSampler.png.md5 |     1 +
 .../Baseline/TestQuadRotationalExtrusion.png.md5   |     1 +
 .../TestQuadRotationalExtrusionMultiBlock.png.md5  |     1 +
 ...TestQuadRotationalExtrusionMultiBlock_1.png.md5 |     1 +
 .../Data/Baseline/TestRibbonAndTube.png.md5        |     1 +
 .../Data/Baseline/TestRibbonAndTube_1.png.md5      |     1 +
 .../Data/Baseline/TestRibbonAndTube_2.png.md5      |     1 +
 .../Data/Baseline/TestRibbonAndTube_3.png.md5      |     1 +
 .../Data/Baseline/TestRibbonAndTube_4.png.md5      |     1 +
 .../Data/Baseline/TestRotationalExtrusion.png.md5  |     1 +
 .../Testing/Data/Baseline/TestRuledSurface.png.md5 |     1 +
 .../Data/Baseline/TestRuledSurface2.png.md5        |     1 +
 .../Data/Baseline/TestSelectEnclosedPoints.png.md5 |     1 +
 .../Testing/Data/Baseline/TestSpherePuzzle.png.md5 |     1 +
 .../Data/Baseline/TestSpherePuzzleArrows.png.md5   |     1 +
 .../Modeling/Testing/Data/Baseline/camera.png.md5  |     1 +
 .../Testing/Data/Baseline/contour2DAll.png.md5     |     1 +
 .../Testing/Data/Baseline/contour3DAll.png.md5     |     1 +
 .../Testing/Data/Baseline/eleState.png.md5         |     1 +
 .../Testing/Data/Baseline/extrudeCopyCD.png.md5    |     1 +
 .../Modeling/Testing/Data/Baseline/schwarz.png.md5 |     1 +
 .../Testing/Data/Baseline/smoothCyl.png.md5        |     1 +
 .../Testing/Data/Baseline/smoothCyl2.png.md5       |     1 +
 .../Testing/Data/Baseline/subDivideTetra.png.md5   |     1 +
 .../Data/Baseline/subdividePointData.png.md5       |     1 +
 .../Testing/Data/Baseline/sync3dAll.png.md5        |     1 +
 Filters/Modeling/Testing/Python/CMakeLists.txt     |    53 +-
 .../Testing/Python/TestNamedColorsIntegration.py   |     4 -
 Filters/Modeling/Testing/Python/contour2DAll.py    |     4 -
 Filters/Modeling/Testing/Python/contour3DAll.py    |     4 -
 Filters/Modeling/Testing/Python/smoothCyl.py       |     2 +-
 Filters/Modeling/Testing/Python/sync3dAll.py       |     6 +-
 Filters/Modeling/Testing/Tcl/CMakeLists.txt        |    51 +-
 .../Modeling/vtkBandedPolyDataContourFilter.cxx    |     2 +-
 Filters/Modeling/vtkButterflySubdivisionFilter.h   |     4 +-
 Filters/Modeling/vtkDijkstraGraphGeodesicPath.h    |     4 -
 Filters/Modeling/vtkGeodesicPath.h                 |     2 -
 Filters/Modeling/vtkLinearExtrusionFilter.h        |     2 +-
 Filters/Modeling/vtkLinearSubdivisionFilter.h      |     4 +-
 Filters/Modeling/vtkLoopSubdivisionFilter.h        |     4 +-
 Filters/Modeling/vtkProjectedTexture.h             |     2 +-
 .../Modeling/vtkQuadRotationalExtrusionFilter.h    |     2 +-
 Filters/Modeling/vtkRotationalExtrusionFilter.h    |     2 +-
 Filters/Modeling/vtkSectorSource.h                 |     2 +-
 Filters/Modeling/vtkSelectPolyData.cxx             |    32 +-
 Filters/Modeling/vtkSelectPolyData.h               |     5 +
 Filters/Modeling/vtkSubdivideTetra.h               |     2 +-
 Filters/Parallel/CMakeLists.txt                    |     1 -
 Filters/Parallel/Testing/Cxx/CMakeLists.txt        |    19 +-
 .../Testing/Cxx/DistributedDataRenderPass.cxx      |     1 -
 .../Testing/Data/Baseline/DistributedData.png.md5  |     1 +
 .../Baseline/DistributedDataRenderPass.png.md5     |     1 +
 .../Testing/Data/Baseline/RectOutline.png.md5      |     1 +
 .../Testing/Data/Baseline/TestCutMaterial.png.md5  |     1 +
 .../Data/Baseline/TestExtractCTHPart.png.md5       |     1 +
 .../Testing/Data/Baseline/TestExtrudePiece.png.md5 |     1 +
 .../Data/Baseline/TestImageStreamer.png.md5        |     1 +
 .../Data/Baseline/TransmitImageData.png.md5        |     1 +
 .../Baseline/TransmitImageDataRenderPass.png.md5   |     1 +
 .../Baseline/TransmitImageDataRenderPass_1.png.md5 |     1 +
 .../Data/Baseline/TransmitRectilinearGrid.png.md5  |     1 +
 .../Data/Baseline/TransmitStructuredGrid.png.md5   |     1 +
 Filters/Parallel/Testing/Python/CMakeLists.txt     |    10 +-
 Filters/Parallel/Testing/Tcl/CMakeLists.txt        |    13 +-
 Filters/Parallel/module.cmake                      |     1 +
 Filters/Parallel/vtkExtractCTHPart.cxx             |  1566 +--
 Filters/Parallel/vtkExtractCTHPart.h               |   249 +-
 Filters/Parallel/vtkExtractPolyDataPiece.h         |     2 +-
 Filters/Parallel/vtkExtractUnstructuredGridPiece.h |     2 +-
 Filters/Parallel/vtkExtractUserDefinedPiece.cxx    |     6 +-
 Filters/Parallel/vtkPCellDataToPointData.h         |     2 +-
 Filters/Parallel/vtkPExtractArraysOverTime.cxx     |     2 +-
 Filters/Parallel/vtkPKdTree.cxx                    |     2 +-
 Filters/Parallel/vtkPLinearExtrusionFilter.h       |     2 +-
 Filters/Parallel/vtkPPolyDataNormals.h             |     2 +-
 Filters/Parallel/vtkPSphereSource.h                |     4 +-
 Filters/Parallel/vtkPTableToStructuredGrid.cxx     |   177 -
 Filters/Parallel/vtkPTableToStructuredGrid.h       |    62 -
 Filters/Parallel/vtkPassThroughFilter.h            |     4 +-
 Filters/Parallel/vtkPieceRequestFilter.h           |     2 +-
 Filters/Parallel/vtkPipelineSize.h                 |     2 +-
 Filters/Parallel/vtkRectilinearGridOutlineFilter.h |     4 +-
 .../ParallelFlowPaths/Testing/Cxx/CMakeLists.txt   |     9 +-
 .../Testing/Cxx/TestPParticleTracers.cxx           |   179 +-
 .../Testing/Data/Baseline/TestPStream.cxx.png.md5  |     1 +
 .../ParallelFlowPaths/vtkPParticlePathFilter.cxx   |     5 +-
 .../ParallelFlowPaths/vtkPParticleTracerBase.cxx   |   165 +-
 Filters/ParallelFlowPaths/vtkPParticleTracerBase.h |   118 +-
 Filters/ParallelFlowPaths/vtkPStreaklineFilter.cxx |     9 +-
 Filters/ParallelFlowPaths/vtkPStreamTracer.cxx     |     2 +-
 .../ParallelFlowPaths/vtkPTemporalStreamTracer.cxx |     2 +-
 Filters/ParallelGeometry/CMakeLists.txt            |     2 +
 .../ParallelGeometry/Testing/Cxx/CMakeLists.txt    |    16 +-
 .../Cxx/TestPUnstructuredGridConnectivity.cxx      |   141 +
 .../TestPUnstructuredGridGhostDataGenerator.cxx    |   125 +
 .../Testing/Cxx/UnstructuredGhostZonesCommon.h     |   389 +
 .../vtkPUnstructuredGridConnectivity.cxx           |  2059 +++
 .../vtkPUnstructuredGridConnectivity.h             |   294 +
 .../vtkPUnstructuredGridGhostDataGenerator.cxx     |   120 +
 .../vtkPUnstructuredGridGhostDataGenerator.h       |    75 +
 .../vtkMemoryLimitImageDataStreamer.h              |     2 +-
 .../ParallelStatistics/Testing/Cxx/CMakeLists.txt  |    17 +-
 Filters/Programmable/Testing/Cxx/CMakeLists.txt    |    27 +-
 .../Data/Baseline/MultidimensionalSolution.png.md5 |     1 +
 .../Baseline/MultidimensionalSolution_1.png.md5    |     1 +
 .../Baseline/MultidimensionalSolution_2.png.md5    |     1 +
 .../Data/Baseline/TestProgrammableGlyph.png.md5    |     1 +
 .../Testing/Data/Baseline/progGlyphs.png.md5       |     1 +
 .../Data/Baseline/progGlyphsBySource.png.md5       |     1 +
 .../Data/Baseline/progGlyphsBySource_1.png.md5     |     1 +
 .../Data/Baseline/progGlyphsBySource_2.png.md5     |     1 +
 .../Testing/Data/Baseline/progGlyphs_1.png.md5     |     1 +
 Filters/Programmable/Testing/Python/CMakeLists.txt |     8 +-
 Filters/Programmable/Testing/Tcl/CMakeLists.txt    |     9 +-
 Filters/ReebGraph/Testing/Cxx/CMakeLists.txt       |     4 +-
 Filters/SMP/CMakeLists.txt                         |    15 +
 Filters/SMP/Testing/Cxx/CMakeLists.txt             |     6 +
 Filters/SMP/Testing/Cxx/TestSMPContour.cxx         |   220 +
 Filters/SMP/Testing/Cxx/TestSMPTransform.cxx       |   136 +
 Filters/SMP/Testing/Cxx/TestSMPWarp.cxx            |   272 +
 Filters/SMP/module.cmake                           |    11 +
 Filters/SMP/vtkSMPContourGrid.cxx                  |   526 +
 Filters/SMP/vtkSMPContourGrid.h                    |    70 +
 Filters/SMP/vtkSMPContourGridManyPieces.cxx        |   346 +
 Filters/SMP/vtkSMPContourGridManyPieces.h          |    54 +
 Filters/SMP/vtkSMPMergePoints.cxx                  |   175 +
 Filters/SMP/vtkSMPMergePoints.h                    |   109 +
 Filters/SMP/vtkSMPMergePolyDataHelper.cxx          |   436 +
 Filters/SMP/vtkSMPMergePolyDataHelper.h            |    83 +
 Filters/SMP/vtkSMPTransform.cxx                    |   279 +
 Filters/SMP/vtkSMPTransform.h                      |    71 +
 Filters/SMP/vtkSMPWarpVector.cxx                   |   203 +
 Filters/SMP/vtkSMPWarpVector.h                     |    52 +
 Filters/Selection/Testing/Cxx/CMakeLists.txt       |    43 +-
 .../Testing/Cxx/TestCellDistanceSelector3D.cxx     |     2 +-
 .../Selection/Testing/Cxx/TestLinearSelector2D.cxx |     2 +-
 .../Selection/Testing/Cxx/TestLinearSelector3D.cxx |     2 +-
 Filters/Sources/Testing/Cxx/CMakeLists.txt         |    58 +-
 Filters/Sources/Testing/Cxx/TestArcSource.cxx      |    88 +
 Filters/Sources/Testing/Cxx/TestConeSource.cxx     |   102 +
 Filters/Sources/Testing/Cxx/TestCubeSource.cxx     |    93 +
 Filters/Sources/Testing/Cxx/TestCylinderSource.cxx |    87 +
 Filters/Sources/Testing/Cxx/TestDiskSource.cxx     |    84 +
 .../Testing/Cxx/TestEllipticalButtonSource.cxx     |   101 +
 Filters/Sources/Testing/Cxx/TestFrustumSource.cxx  |   105 +
 Filters/Sources/Testing/Cxx/TestGlyphSource2D.cxx  |    90 +
 Filters/Sources/Testing/Cxx/TestLineSource.cxx     |   157 +-
 .../Testing/Cxx/TestOutlineCornerSource.cxx        |   103 +
 Filters/Sources/Testing/Cxx/TestOutlineSource.cxx  |    95 +
 .../Testing/Cxx/TestParametricFunctionSource.cxx   |    91 +
 Filters/Sources/Testing/Cxx/TestPlaneSource.cxx    |    86 +
 .../Testing/Cxx/TestPlatonicSolidSource.cxx        |   139 +
 Filters/Sources/Testing/Cxx/TestPointSource.cxx    |    79 +
 .../Testing/Cxx/TestRectangularButtonSource.cxx    |   100 +
 .../Testing/Cxx/TestRegularPolygonSource.cxx       |    97 +
 Filters/Sources/Testing/Cxx/TestSphereSource.cxx   |    84 +
 .../Sources/Testing/Cxx/TestSuperquadricSource.cxx |    90 +
 Filters/Sources/Testing/Cxx/TestTextSource.cxx     |    54 +
 .../Testing/Cxx/TestTexturedSphereSource.cxx       |    66 +
 Filters/Sources/Testing/Data/Baseline/Disk.png.md5 |     1 +
 .../Sources/Testing/Data/Baseline/OSCone.png.md5   |     1 +
 .../Testing/Data/Baseline/TestButtonSource.png.md5 |     1 +
 .../Testing/Data/Baseline/TestMultiBlock.png.md5   |     1 +
 .../Data/Baseline/TestPlatonicSolids.png.md5       |     1 +
 .../Data/Baseline/TestRegularPolygonSource.png.md5 |     1 +
 .../Testing/Data/Baseline/coneResolution.png.md5   |     1 +
 .../Sources/Testing/Data/Baseline/glyph2D.png.md5  |     1 +
 .../Testing/Data/Baseline/squadViewer.png.md5      |     1 +
 Filters/Sources/Testing/Python/CMakeLists.txt      |    17 +-
 .../Sources/Testing/Python/TestPlatonicSolids.py   |     4 -
 Filters/Sources/Testing/Tcl/CMakeLists.txt         |     8 +-
 Filters/Sources/vtkArcSource.cxx                   |    17 +-
 Filters/Sources/vtkArcSource.h                     |    18 +-
 Filters/Sources/vtkArrowSource.h                   |     2 +-
 Filters/Sources/vtkConeSource.cxx                  |    15 +-
 Filters/Sources/vtkConeSource.h                    |     8 +
 Filters/Sources/vtkCubeSource.cxx                  |    16 +-
 Filters/Sources/vtkCubeSource.h                    |    12 +-
 Filters/Sources/vtkCylinderSource.cxx              |    13 +
 Filters/Sources/vtkCylinderSource.h                |    10 +-
 Filters/Sources/vtkDiskSource.cxx                  |    13 +
 Filters/Sources/vtkDiskSource.h                    |    10 +-
 Filters/Sources/vtkEllipticalButtonSource.cxx      |    14 +
 Filters/Sources/vtkEllipticalButtonSource.h        |    14 +-
 Filters/Sources/vtkFrustumSource.cxx               |    16 +-
 Filters/Sources/vtkFrustumSource.h                 |     8 +
 Filters/Sources/vtkGlyphSource2D.cxx               |    14 +
 Filters/Sources/vtkGlyphSource2D.h                 |    18 +-
 Filters/Sources/vtkHyperOctreeFractalSource.cxx    |     2 +-
 Filters/Sources/vtkHyperTreeGridSource.cxx         |     2 +-
 Filters/Sources/vtkLineSource.cxx                  |    14 +
 Filters/Sources/vtkLineSource.h                    |     8 +
 Filters/Sources/vtkOutlineCornerSource.cxx         |    14 +
 Filters/Sources/vtkOutlineCornerSource.h           |    10 +-
 Filters/Sources/vtkOutlineSource.cxx               |    16 +
 Filters/Sources/vtkOutlineSource.h                 |    12 +-
 Filters/Sources/vtkParametricFunctionSource.cxx    |    26 +-
 Filters/Sources/vtkParametricFunctionSource.h      |     8 +
 Filters/Sources/vtkPlaneSource.cxx                 |    14 +
 Filters/Sources/vtkPlaneSource.h                   |    10 +-
 Filters/Sources/vtkPlatonicSolidSource.cxx         |    16 +-
 Filters/Sources/vtkPlatonicSolidSource.h           |     8 +
 Filters/Sources/vtkPointSource.cxx                 |    13 +
 Filters/Sources/vtkPointSource.h                   |    10 +-
 Filters/Sources/vtkRectangularButtonSource.cxx     |    15 +
 Filters/Sources/vtkRectangularButtonSource.h       |     9 +
 Filters/Sources/vtkRegularPolygonSource.cxx        |    13 +
 Filters/Sources/vtkRegularPolygonSource.h          |     8 +
 Filters/Sources/vtkSphereSource.cxx                |    15 +
 Filters/Sources/vtkSphereSource.h                  |     8 +
 Filters/Sources/vtkSuperquadricSource.cxx          |    16 +-
 Filters/Sources/vtkSuperquadricSource.h            |    10 +-
 Filters/Sources/vtkTessellatedBoxSource.cxx        |    22 +-
 Filters/Sources/vtkTessellatedBoxSource.h          |     8 +
 Filters/Sources/vtkTextSource.cxx                  |    14 +
 Filters/Sources/vtkTextSource.h                    |     8 +
 Filters/Sources/vtkTexturedSphereSource.cxx        |    14 +
 Filters/Sources/vtkTexturedSphereSource.h          |    10 +-
 Filters/Statistics/CMakeLists.txt                  |     2 +
 Filters/Statistics/Testing/Cxx/CMakeLists.txt      |    20 +-
 .../Testing/Cxx/TestExtractFunctionalBagPlot.cxx   |   141 +
 .../Cxx/TestHighestDensityRegionsStatistics.cxx    |   119 +
 Filters/Statistics/Testing/Python/CMakeLists.txt   |     8 +-
 Filters/Statistics/vtkExtractFunctionalBagPlot.cxx |   207 +
 Filters/Statistics/vtkExtractFunctionalBagPlot.h   |    55 +
 .../vtkHighestDensityRegionsStatistics.cxx         |   314 +
 .../vtkHighestDensityRegionsStatistics.h           |   144 +
 .../Statistics/vtkMultiCorrelativeStatistics.cxx   |     6 +-
 Filters/StatisticsGnuR/CMakeLists.txt              |     2 +-
 .../Testing/Cxx/TestRCalculatorFilter.cxx          |     2 +-
 .../StatisticsGnuR/Testing/Cxx/TestRInterface.cxx  |    13 +-
 .../StatisticsGnuR/Testing/Python/CMakeLists.txt   |    10 +
 .../Testing/Python/TestRCalculatorFilter.py        |   245 +
 Filters/StatisticsGnuR/module.cmake                |     2 +
 Filters/StatisticsGnuR/vtkRAdapter.cxx             |    62 +-
 Filters/StatisticsGnuR/vtkRCalculatorFilter.cxx    |   296 +-
 Filters/StatisticsGnuR/vtkRCalculatorFilter.h      |    32 +
 .../Data/Baseline/AutomaticPlaneGeneration.png.md5 |     1 +
 .../Texture/Testing/Data/Baseline/cylMap.png.md5   |     1 +
 .../Texture/Testing/Data/Baseline/socbal.png.md5   |     1 +
 .../Texture/Testing/Data/Baseline/socbal_1.png.md5 |     1 +
 .../Testing/Data/Baseline/textureThreshold.png.md5 |     1 +
 .../Data/Baseline/textureThreshold_1.png.md5       |     1 +
 .../Data/Baseline/triangularTCoords.png.md5        |     1 +
 Filters/Texture/Testing/Python/CMakeLists.txt      |    13 +-
 Filters/Texture/Testing/Tcl/CMakeLists.txt         |    13 +-
 Filters/Texture/vtkTextureMapToCylinder.h          |     2 +-
 Filters/Texture/vtkTextureMapToPlane.h             |     2 +-
 Filters/Texture/vtkTextureMapToSphere.h            |     2 +-
 Filters/Texture/vtkThresholdTextureCoords.h        |     2 +-
 Filters/Texture/vtkTransformTextureCoords.h        |     2 +-
 Filters/Texture/vtkTriangularTCoords.h             |     4 +-
 Filters/Verdict/Testing/Cxx/CMakeLists.txt         |    25 +-
 Filters/Verdict/Testing/Cxx/MeshQuality.cxx        |     2 +-
 Filters/Verdict/Testing/Python/CMakeLists.txt      |     4 +-
 GUISupport/MFC/CMakeLists.txt                      |    10 +-
 GUISupport/Qt/CMakeLists.txt                       |    58 +-
 GUISupport/Qt/Q4VTKWidgetPlugin.cxx                |     2 +
 GUISupport/Qt/Q4VTKWidgetPlugin.h                  |    10 +-
 GUISupport/Qt/QVTKWidget.cxx                       |     4 +-
 GUISupport/Qt/QVTKWidget.h                         |     4 +-
 GUISupport/Qt/Testing/Cxx/CMakeLists.txt           |    40 +-
 GUISupport/Qt/Testing/Cxx/QTestApp.cxx             |    18 +
 GUISupport/Qt/Testing/Cxx/QTestApp.h               |     6 +
 GUISupport/Qt/module.cmake                         |     2 +
 GUISupport/Qt/vtkQtAbstractModelAdapter.h          |    10 +-
 GUISupport/Qt/vtkQtConnection.cxx                  |     4 +-
 GUISupport/Qt/vtkQtDebugLeaksView.cxx              |     1 +
 GUISupport/Qt/vtkQtTableModelAdapter.cxx           |     2 +-
 GUISupport/QtOpenGL/CMakeLists.txt                 |    23 +-
 GUISupport/QtOpenGL/QVTKGraphicsItem.cxx           |    11 +-
 GUISupport/QtOpenGL/QVTKGraphicsItem.h             |     4 +-
 GUISupport/QtSQL/CMakeLists.txt                    |    27 +-
 GUISupport/QtSQL/Testing/Cxx/TestQtSQLDatabase.cxx |    20 +-
 GUISupport/QtSQL/module.cmake                      |     2 +
 GUISupport/QtSQL/vtkQtSQLDatabase.cxx              |     8 +-
 GUISupport/QtSQL/vtkQtSQLQuery.cxx                 |    14 +-
 GUISupport/QtSQL/vtkQtTimePointUtility.h           |     4 +-
 GUISupport/QtWebkit/CMakeLists.txt                 |    40 +-
 GUISupport/QtWebkit/vtkQtRichTextView.cxx          |     2 -
 Geovis/Core/Testing/Cxx/CMakeLists.txt             |    25 +-
 .../TestCoincidentGeoGraphRepresentation2D.png.md5 |     1 +
 .../Data/Baseline/TestGeoAssignCoordinates.png.md5 |     1 +
 .../Testing/Data/Baseline/TestGeoGraticule.png.md5 |     1 +
 .../Testing/Data/Baseline/TestGlobeSource.png.md5  |     1 +
 .../Data/Baseline/TestLabeledGeoView2D.png.md5     |     1 +
 .../Data/Baseline/TestLabeledGeoView2D_1.png.md5   |     1 +
 .../Data/Baseline/TestLabeledGeoView2D_2.png.md5   |     1 +
 .../Data/Baseline/TestLabeledGeoView2D_3.png.md5   |     1 +
 .../Data/Baseline/TestLabeledGeoView2D_4.png.md5   |     1 +
 .../Data/Baseline/TestLabeledGeoView2D_5.png.md5   |     1 +
 Geovis/Core/vtkGeoAlignedImageSource.cxx           |     2 +-
 IO/AMR/Testing/Cxx/CMakeLists.txt                  |    29 +-
 IO/AMR/module.cmake                                |     6 +-
 IO/AMR/vtkAMREnzoReaderInternal.cxx                |    46 +
 IO/AMR/vtkAMREnzoReaderInternal.h                  |     5 +
 IO/Core/Testing/Cxx/CMakeLists.txt                 |    26 +-
 IO/Core/Testing/Python/CMakeLists.txt              |     6 +-
 IO/Core/Testing/Tcl/CMakeLists.txt                 |     7 +-
 IO/Core/module.cmake                               |     1 +
 IO/Core/vtkBase64Utilities.cxx                     |     2 +-
 IO/Core/vtkBase64Utilities.h                       |     4 +-
 IO/Core/vtkSortFileNames.cxx                       |    12 +-
 IO/Core/vtkTextCodec.cxx                           |     1 -
 IO/Core/vtkTextCodecFactory.cxx                    |     6 +-
 IO/Core/vtkWriter.cxx                              |     5 -
 .../Testing/Data/Baseline/EnSight6Elements.png.md5 |     1 +
 .../Data/Baseline/EnSight6OfficeBin.png.md5        |     1 +
 .../Data/Baseline/EnSightBlow1ASCII.png.md5        |     1 +
 .../Testing/Data/Baseline/EnSightBlow1Bin.png.md5  |     1 +
 .../Data/Baseline/EnSightBlow2ASCII.png.md5        |     1 +
 .../Testing/Data/Baseline/EnSightBlow2Bin.png.md5  |     1 +
 .../Testing/Data/Baseline/EnSightBlow3Bin.png.md5  |     1 +
 .../Testing/Data/Baseline/EnSightBlow4Bin.png.md5  |     1 +
 .../Data/Baseline/EnSightBlow5ASCII.png.md5        |     1 +
 .../Testing/Data/Baseline/EnSightCompound.png.md5  |     1 +
 .../Data/Baseline/EnSightGoldElements.png.md5      |     1 +
 .../Data/Baseline/EnSightIronProtASCII.png.md5     |     1 +
 .../Data/Baseline/EnSightIronProtBin.png.md5       |     1 +
 .../Data/Baseline/EnSightMandelbrot.png.md5        |     1 +
 .../Data/Baseline/EnSightNfacedASCII.png.md5       |     1 +
 .../Testing/Data/Baseline/EnSightNfacedBin.png.md5 |     1 +
 .../Data/Baseline/EnSightOfficeASCII.png.md5       |     1 +
 .../Testing/Data/Baseline/EnSightOfficeBin.png.md5 |     1 +
 .../Data/Baseline/EnSightRectGridASCII.png.md5     |     1 +
 .../Data/Baseline/EnSightRectGridASCII_1.png.md5   |     1 +
 .../Data/Baseline/EnSightRectGridBin.png.md5       |     1 +
 .../Data/Baseline/EnSightRectGridBin_1.png.md5     |     1 +
 .../Data/Baseline/EnSightSelectArrays.png.md5      |     1 +
 .../Testing/Data/Baseline/nacaBinary.png.md5       |     1 +
 IO/EnSight/Testing/Python/CMakeLists.txt           |    46 +-
 IO/EnSight/Testing/Tcl/CMakeLists.txt              |    58 +-
 IO/EnSight/vtkEnSight6Reader.cxx                   |     2 +-
 IO/EnSight/vtkEnSightReader.cxx                    |     1 -
 IO/EnSight/vtkGenericEnSightReader.cxx             |     2 +-
 IO/Exodus/CMakeLists.txt                           |    13 +
 IO/Exodus/Testing/Cxx/CMakeLists.txt               |    28 +-
 IO/Exodus/Testing/Cxx/TestExodusAttributes.cxx     |    75 +
 IO/Exodus/Testing/Cxx/TestExodusSideSets.cxx       |    94 +
 IO/Exodus/Testing/Cxx/TestInSituExodus.cxx         |  1010 ++
 .../Baseline/TestMultiBlockExodusWrite.png.md5     |     1 +
 IO/Exodus/module.cmake                             |     2 +
 IO/Exodus/vtkCPExodusIIElementBlock.cxx            |   222 +
 IO/Exodus/vtkCPExodusIIElementBlock.h              |   113 +
 .../vtkCPExodusIIElementBlockCellIterator.cxx      |   113 +
 IO/Exodus/vtkCPExodusIIElementBlockCellIterator.h  |    64 +
 IO/Exodus/vtkCPExodusIIInSituReader.cxx            |   442 +
 IO/Exodus/vtkCPExodusIIInSituReader.h              |   112 +
 IO/Exodus/vtkCPExodusIINodalCoordinatesTemplate.h  |   124 +
 .../vtkCPExodusIINodalCoordinatesTemplate.txx      |   509 +
 IO/Exodus/vtkCPExodusIIResultsArrayTemplate.h      |   114 +
 IO/Exodus/vtkCPExodusIIResultsArrayTemplate.txx    |   501 +
 IO/Exodus/vtkExodusIICache.h                       |     8 +
 IO/Exodus/vtkExodusIIReader.cxx                    |   221 +-
 IO/Exodus/vtkExodusIIReader.h                      |    24 +
 IO/Exodus/vtkExodusIIReaderPrivate.h               |     1 +
 IO/Export/Testing/Cxx/CMakeLists.txt               |    60 +-
 IO/Export/Testing/Cxx/TestContextGL2PS.cxx         |     2 +-
 .../Testing/Cxx/TestGL2PSLabeledDataMapper.cxx     |   156 +
 IO/Export/Testing/Cxx/TestLinePlotGL2PS.cxx        |   124 +
 IO/Export/Testing/Cxx/TestStackedPlotGL2PS.cxx     |     2 +-
 .../Baseline/TestContextGL2PS-rasterRef.png.md5    |     1 +
 .../Baseline/TestContextGL2PS-rasterRef_1.png.md5  |     1 +
 ...L2PSExporterMultipleRenderers-rasterRef.png.md5 |     1 +
 ...PSExporterMultipleRenderers-rasterRef_1.png.md5 |     1 +
 .../TestGL2PSExporterMultipleRenderers.png.md5     |     1 +
 .../TestGL2PSExporterRaster-rasterRef.png.md5      |     1 +
 .../TestGL2PSExporterRaster-rasterRef_1.png.md5    |     1 +
 .../TestGL2PSExporterRaster-rasterRef_2.png.md5    |     1 +
 .../TestGL2PSExporterRaster-rasterRef_3.png.md5    |     1 +
 ...tGL2PSExporterRasterExclusion-rasterRef.png.md5 |     1 +
 ...L2PSExporterRasterExclusion-rasterRef_1.png.md5 |     1 +
 ...L2PSExporterRasterExclusion-rasterRef_2.png.md5 |     1 +
 .../TestGL2PSExporterRasterExclusion.png.md5       |     1 +
 .../TestGL2PSExporterVector-rasterRef.png.md5      |     1 +
 .../TestGL2PSExporterVector-rasterRef_1.png.md5    |     1 +
 .../TestGL2PSExporterVector-rasterRef_2.png.md5    |     1 +
 .../TestGL2PSExporterVector-rasterRef_3.png.md5    |     1 +
 ...TestGL2PSExporterVolumeRaster-rasterRef.png.md5 |     1 +
 ...stGL2PSExporterVolumeRaster-rasterRef_1.png.md5 |     1 +
 ...stGL2PSExporterVolumeRaster-rasterRef_2.png.md5 |     1 +
 ...stGL2PSExporterVolumeRaster-rasterRef_3.png.md5 |     1 +
 ...stGL2PSExporterVolumeRaster-rasterRef_4.png.md5 |     1 +
 .../TestGL2PSLabeledDataMapper-rasterRef.png.md5   |     1 +
 .../Baseline/TestGL2PSLabeledDataMapper.png.md5    |     1 +
 .../TestGL2PSTextActor3D-rasterRef.png.md5         |     1 +
 .../Data/Baseline/TestGL2PSTextActor3D.png.md5     |     1 +
 .../Baseline/TestLinePlotGL2PS-rasterRef.png.md5   |     1 +
 .../Data/Baseline/TestLinePlotGL2PS.png.md5        |     1 +
 .../Data/Baseline/TestLinePlotGL2PS_1.png.md5      |     1 +
 .../TestStackedPlotGL2PS-rasterRef.png.md5         |     1 +
 .../TestStackedPlotGL2PS-rasterRef_1.png.md5       |     1 +
 .../Data/Baseline/TestStackedPlotGL2PS.png.md5     |     1 +
 IO/Export/module.cmake                             |     7 +-
 IO/Export/vtkGL2PSExporter.cxx                     |    93 +-
 IO/Export/vtkGL2PSExporter.h                       |    16 +
 IO/Export/vtkX3DExporterWriter.h                   |     2 +-
 IO/FFMPEG/CMakeLists.txt                           |     2 +-
 IO/FFMPEG/Testing/Cxx/CMakeLists.txt               |    30 +-
 IO/FFMPEG/vtkFFMPEGConfig.h.in                     |     2 +-
 IO/FFMPEG/vtkFFMPEGWriter.cxx                      |    24 +-
 IO/GDAL/CMakeLists.txt                             |     2 +-
 IO/GDAL/Testing/Cxx/CMakeLists.txt                 |    27 +-
 IO/GDAL/Testing/Cxx/TestGDALVectorReader.cxx       |    31 +
 .../Data/Baseline/TestGDALVectorReader.png.md5     |     1 +
 IO/GDAL/vtkGDALVectorReader.cxx                    |    44 +-
 IO/GDAL/vtkGDALVectorReader.h                      |    23 +-
 IO/GeoJSON/CMakeLists.txt                          |     5 +
 IO/GeoJSON/Testing/Python/CMakeLists.txt           |     1 +
 IO/GeoJSON/Testing/Python/TestGeoJSONWriter.py     |    54 +
 IO/GeoJSON/module.cmake                            |     7 +
 IO/GeoJSON/vtkGeoJSONWriter.cxx                    |   433 +
 IO/GeoJSON/vtkGeoJSONWriter.h                      |   113 +
 IO/Geometry/CMakeLists.txt                         |     1 -
 IO/Geometry/Testing/Cxx/CMakeLists.txt             |    82 +-
 IO/Geometry/Testing/Cxx/TestMFIXReader.cxx         |   183 +
 IO/Geometry/Testing/Cxx/TestOpenFOAMReader.cxx     |   138 +
 IO/Geometry/Testing/Cxx/TestWindBladeReader.cxx    |     2 +-
 .../Testing/Data/Baseline/ParticleReader.png.md5   |     1 +
 .../Testing/Data/Baseline/Plot3DScalars.png.md5    |     1 +
 .../Testing/Data/Baseline/Plot3DScalars_1.png.md5  |     1 +
 .../Testing/Data/Baseline/Plot3DScalars_2.png.md5  |     1 +
 .../Testing/Data/Baseline/Plot3DScalars_3.png.md5  |     1 +
 .../Testing/Data/Baseline/Plot3DVectors.png.md5    |     1 +
 .../Testing/Data/Baseline/TestAVSucdReader.png.md5 |     1 +
 .../Testing/Data/Baseline/TestChacoReader.png.md5  |     1 +
 .../Testing/Data/Baseline/TestGAMBITReader.png.md5 |     1 +
 .../Data/Baseline/TestOpenFOAMReader.png.md5       |     1 +
 .../Testing/Data/Baseline/TestPlot3DMeta.png.md5   |     1 +
 .../Data/Baseline/TestPolygonWriters.png.md5       |     1 +
 .../Data/Baseline/TestProStarReader.png.md5        |     1 +
 .../Data/Baseline/TestSimplePointsReader.png.md5   |     1 +
 .../Data/Baseline/TestTecplotReader.png.md5        |     1 +
 .../Data/Baseline/TestTecplotReader_1.png.md5      |     1 +
 .../Data/Baseline/TestWindBladeReader.png.md5      |     1 +
 .../Data/Baseline/TestWindBladeReader_1.png.md5    |     1 +
 .../Testing/Data/Baseline/TestXYZMol.png.md5       |     1 +
 IO/Geometry/Testing/Data/Baseline/caffeine.png.md5 |     1 +
 IO/Geometry/Testing/Data/Baseline/motor.png.md5    |     1 +
 .../Testing/Data/Baseline/testHexaPenta.png.md5    |     1 +
 .../Testing/Data/Baseline/testHexaPenta_1.png.md5  |     1 +
 .../Testing/Data/Baseline/testHexaPenta_2.png.md5  |     1 +
 IO/Geometry/Testing/Python/CMakeLists.txt          |    30 +-
 IO/Geometry/Testing/Tcl/CMakeLists.txt             |    26 +-
 IO/Geometry/module.cmake                           |     2 +
 IO/Geometry/vtkChacoReader.cxx                     |    16 +-
 IO/Geometry/vtkFLUENTReader.cxx                    |     2 +-
 IO/Geometry/vtkFLUENTReader.h                      |     2 +-
 IO/Geometry/vtkMFIXReader.cxx                      |    41 +-
 IO/Geometry/vtkMoleculeReaderBase.cxx              |     7 +-
 IO/Geometry/vtkMultiBlockPLOT3DReader.cxx          |    12 +-
 IO/Geometry/vtkMultiBlockPLOT3DReader.h            |     5 +
 IO/Geometry/vtkOpenFOAMReader.cxx                  |    11 -
 IO/Geometry/vtkPlot3DMetaReader.cxx                |     8 +-
 IO/Geometry/vtkSESAMEReader.cxx                    |   656 -
 IO/Geometry/vtkSESAMEReader.h                      |   120 -
 IO/Geometry/vtkWindBladeReader.cxx                 |     4 +-
 IO/Geometry/vtkWindBladeReader.h                   |     4 +-
 IO/Image/Testing/Cxx/CMakeLists.txt                |    45 +-
 .../Testing/Data/Baseline/TestBMPReader.png.md5    |     1 +
 .../Testing/Data/Baseline/TestImageWriters.png.md5 |     1 +
 .../Testing/Data/Baseline/TestJPEGReader.png.md5   |     1 +
 IO/Image/Testing/Data/Baseline/TestMHD.png.md5     |     1 +
 .../Testing/Data/Baseline/TestMetaImage2D.png.md5  |     1 +
 .../Testing/Data/Baseline/TestNrrdReader.png.md5   |     1 +
 .../Testing/Data/Baseline/TestSetFileNames.png.md5 |     1 +
 .../Testing/Data/Baseline/TestTIFFReader.png.md5   |     1 +
 IO/Image/Testing/Data/Baseline/dem.png.md5         |     1 +
 IO/Image/Testing/Python/CMakeLists.txt             |    18 +-
 IO/Image/Testing/Python/TestSetFileNames.py        |     2 +-
 IO/Image/Testing/Tcl/CMakeLists.txt                |    18 +-
 IO/Image/Testing/Tcl/TestSetFileNames.tcl          |     2 +-
 IO/Image/module.cmake                              |     2 +
 IO/Image/vtkBMPWriter.h                            |     2 +-
 IO/Image/vtkGESignaReader.h                        |     4 +-
 IO/Image/vtkImageImport.cxx                        |    17 +
 IO/Image/vtkImageImport.h                          |     8 +
 IO/Image/vtkImageImportExecutive.h                 |     4 +-
 IO/Image/vtkImageReader2Collection.h               |     4 +-
 IO/Image/vtkImageWriter.h                          |     4 +-
 IO/Image/vtkJPEGReader.h                           |     4 +-
 IO/Image/vtkJPEGWriter.cxx                         |    21 +-
 IO/Image/vtkMedicalImageProperties.cxx             |     9 +-
 IO/Image/vtkPNGReader.h                            |     4 +-
 IO/Image/vtkPNGWriter.cxx                          |     3 +
 IO/Image/vtkPNGWriter.h                            |    10 +
 IO/Image/vtkPNMReader.cxx                          |     4 +-
 IO/Image/vtkPNMReader.h                            |     4 +-
 IO/Image/vtkPNMWriter.h                            |     4 +-
 IO/Image/vtkPostScriptWriter.h                     |     4 +-
 IO/Image/vtkTIFFReader.cxx                         |    55 +-
 IO/Image/vtkTIFFReader.h                           |     4 +-
 IO/Import/Testing/Cxx/CMakeLists.txt               |    29 +-
 IO/Import/Testing/Cxx/TestVRMLNormals.cxx          |     6 +-
 .../Testing/Data/Baseline/TestVRMLNormals.png.md5  |     1 +
 .../Testing/Data/Baseline/VRMLImporter.png.md5     |     1 +
 IO/Import/Testing/Python/CMakeLists.txt            |     2 +-
 IO/Import/Testing/Tcl/CMakeLists.txt               |     4 +-
 IO/Import/module.cmake                             |     2 +
 IO/Import/vtk3DSImporter.cxx                       |    24 +-
 IO/Import/vtkImporter.h                            |    10 +-
 IO/Import/vtkVRMLImporter.cxx                      |   142 +-
 IO/Import/vtkVRMLImporter.h                        |     8 +-
 IO/Infovis/CMakeLists.txt                          |     1 +
 IO/Infovis/Testing/Cxx/CMakeLists.txt              |    45 +-
 .../Testing/Cxx/TestDelimitedTextReader2.cxx       |    82 +
 IO/Infovis/Testing/Cxx/TestNewickTreeReader.cxx    |    64 +-
 IO/Infovis/Testing/Cxx/TestNewickTreeWriter.cxx    |   124 +
 IO/Infovis/module.cmake                            |     2 +
 IO/Infovis/vtkDIMACSGraphWriter.h                  |     4 +-
 IO/Infovis/vtkDelimitedTextReader.cxx              |   104 +-
 IO/Infovis/vtkDelimitedTextReader.h                |    22 +
 IO/Infovis/vtkNewickTreeReader.cxx                 |   139 +-
 IO/Infovis/vtkNewickTreeReader.h                   |     4 +-
 IO/Infovis/vtkNewickTreeWriter.cxx                 |   145 +
 IO/Infovis/vtkNewickTreeWriter.h                   |    83 +
 IO/Infovis/vtkTulipReader.cxx                      |     2 +-
 IO/Infovis/vtkXGMLReader.cxx                       |     2 +-
 IO/Infovis/vtkXMLTreeReader.cxx                    |     2 +-
 IO/LSDyna/Testing/Cxx/CMakeLists.txt               |    40 +-
 IO/LSDyna/Testing/Cxx/TestLSDynaReader.cxx         |     2 +-
 IO/LSDyna/Testing/Cxx/TestLSDynaReaderSPH.cxx      |     2 +-
 .../Testing/Data/Baseline/TestLSDynaReader.png.md5 |     1 +
 .../Data/Baseline/TestLSDynaReaderSPH.png.md5      |     1 +
 IO/LSDyna/module.cmake                             |     2 +
 IO/LSDyna/private/LSDynaFamily.cxx                 |     6 +-
 IO/LSDyna/vtkLSDynaReader.cxx                      |     2 +-
 IO/Legacy/CMakeLists.txt                           |     6 +
 IO/Legacy/Testing/Cxx/CMakeLists.txt               |    25 +-
 IO/Legacy/module.cmake                             |     2 +
 IO/Legacy/vtkCompositeDataReader.cxx               |    17 +-
 IO/Legacy/vtkCompositeDataWriter.cxx               |    21 +-
 IO/Legacy/vtkDataReader.cxx                        |     2 +-
 IO/Legacy/vtkDataSetWriter.h                       |     4 +-
 IO/Legacy/vtkDataWriter.cxx                        |     4 +-
 IO/Legacy/vtkGraphWriter.h                         |     4 +-
 IO/Legacy/vtkPixelExtentIO.cxx                     |   200 +
 IO/Legacy/vtkPixelExtentIO.h                       |    75 +
 IO/Legacy/vtkPolyDataWriter.h                      |     4 +-
 IO/Legacy/vtkRectilinearGridWriter.h               |     4 +-
 IO/Legacy/vtkStructuredGridWriter.h                |     4 +-
 IO/Legacy/vtkStructuredPointsWriter.h              |     4 +-
 IO/Legacy/vtkTableWriter.h                         |     4 +-
 IO/Legacy/vtkTreeReader.cxx                        |     2 +-
 IO/Legacy/vtkTreeWriter.cxx                        |    16 +-
 IO/Legacy/vtkTreeWriter.h                          |     6 +-
 IO/Legacy/vtkUnstructuredGridWriter.h              |     4 +-
 .../Data/Baseline/TestMINCImageReader.png.md5      |     1 +
 .../Data/Baseline/TestMINCImageWriter.png.md5      |     1 +
 .../Testing/Data/Baseline/TestMNIObjects.png.md5   |     1 +
 .../Testing/Data/Baseline/TestMNITagPoints.png.md5 |     1 +
 .../Data/Baseline/TestMNITransforms.png.md5        |     1 +
 IO/MINC/Testing/Python/CMakeLists.txt              |    10 +-
 IO/MINC/Testing/Tcl/CMakeLists.txt                 |    12 +-
 IO/MINC/module.cmake                               |     3 +
 IO/MINC/vtkMNITagPointWriter.h                     |     2 +-
 IO/MPIImage/CMakeLists.txt                         |     5 +-
 IO/MPIImage/Testing/Cxx/CMakeLists.txt             |    26 +-
 .../Testing/Data/Baseline/ParallelIso.cxx.png.md5  |     1 +
 IO/MPIImage/module.cmake                           |     2 +
 IO/MPIParallel/CMakeLists.txt                      |     8 +-
 IO/MPIParallel/Testing/Cxx/CMakeLists.txt          |    17 +-
 .../Testing/Cxx/TestPWindBladeReader.cxx           |     2 +-
 .../Data/Baseline/TestWindBladeReader.png.md5      |     1 +
 .../Data/Baseline/TestWindBladeReader_1.png.md5    |     1 +
 IO/MPIParallel/module.cmake                        |     2 +
 IO/Movie/CMakeLists.txt                            |     2 +-
 IO/Movie/Testing/Cxx/CMakeLists.txt                |    26 +-
 IO/Movie/vtkOggTheoraWriter.cxx                    |     3 +-
 IO/MySQL/CMakeLists.txt                            |     2 +-
 IO/MySQL/Testing/Cxx/CMakeLists.txt                |    33 +-
 IO/MySQL/Testing/Data/Input/simple_table.vtk.md5   |     1 +
 .../Testing/Data/Input/simple_table_win.vtk.md5    |     1 +
 IO/MySQL/module.cmake                              |     2 +
 IO/MySQL/vtkMySQLDatabase.cxx                      |     2 +-
 IO/MySQL/vtkMySQLQuery.cxx                         |    13 +-
 IO/NetCDF/CMakeLists.txt                           |     3 -
 IO/NetCDF/Testing/Cxx/CMakeLists.txt               |    37 +-
 IO/NetCDF/Testing/Cxx/TestMPASReader.cxx           |     2 +-
 IO/NetCDF/Testing/Cxx/TestNetCDFCAMReader.cxx      |     2 +-
 IO/NetCDF/Testing/Cxx/TestNetCDFPOPReader.cxx      |     2 +-
 .../Testing/Data/Baseline/NetCDFCF2DBounds.png.md5 |     1 +
 .../Data/Baseline/NetCDFCFSetOutputType.png.md5    |     1 +
 .../Data/Baseline/NetCDFCFSphericalCoords.png.md5  |     1 +
 .../Data/Baseline/NetCDFCFUnstructured.png.md5     |     1 +
 .../Testing/Data/Baseline/NetCDFReader.png.md5     |     1 +
 .../Data/Baseline/SLACMultipleModes.png.md5        |     1 +
 .../Data/Baseline/SLACParticleReader.png.md5       |     1 +
 .../Testing/Data/Baseline/SLACReaderLinear.png.md5 |     1 +
 .../Data/Baseline/SLACReaderQuadratic.png.md5      |     1 +
 .../Testing/Data/Baseline/TestMPASReader.png.md5   |     1 +
 .../Data/Baseline/TestNetCDFCAMReader.png.md5      |     1 +
 .../Data/Baseline/TestNetCDFPOPReader.png.md5      |     1 +
 IO/NetCDF/Testing/Python/CMakeLists.txt            |    12 +-
 IO/NetCDF/Testing/Tcl/CMakeLists.txt               |    12 +-
 IO/NetCDF/module.cmake                             |     3 +-
 IO/NetCDF/vtkMPASReader.cxx                        |   184 +-
 IO/NetCDF/vtkMPASReader.h                          |     1 +
 IO/NetCDF/vtkNetCDFCFReader.cxx                    |    73 +-
 IO/NetCDF/vtkNetCDFCFReader.h                      |     2 +
 IO/NetCDF/vtkNetCDFReader.cxx                      |    97 +
 IO/NetCDF/vtkNetCDFReader.h                        |    17 +-
 IO/ODBC/CMakeLists.txt                             |     2 +-
 IO/ODBC/Testing/Cxx/CMakeLists.txt                 |    25 +-
 IO/ODBC/module.cmake                               |     2 +
 IO/ODBC/vtkODBCDatabase.cxx                        |     2 +-
 IO/ODBC/vtkODBCQuery.cxx                           |     2 +-
 IO/PLY/Testing/Cxx/CMakeLists.txt                  |    33 +-
 .../Testing/Data/Baseline/TestPLYReadWrite.png.md5 |     1 +
 IO/PLY/Testing/Data/Baseline/TestPLYReader.png.md5 |     1 +
 IO/PLY/Testing/Python/CMakeLists.txt               |     2 +-
 IO/PLY/Testing/Tcl/CMakeLists.txt                  |     4 +-
 IO/PLY/vtkPLY.cxx                                  |     4 +-
 IO/PLY/vtkPLYWriter.cxx                            |     4 +-
 IO/Parallel/Testing/Cxx/CMakeLists.txt             |    33 +-
 IO/Parallel/Testing/Cxx/TestPOpenFOAMReader.cxx    |   139 +
 .../Data/Baseline/PSLACReaderLinear.png.md5        |     1 +
 .../Data/Baseline/PSLACReaderQuadratic.png.md5     |     1 +
 .../Data/Baseline/TestPDataSetReaderGrid.png.md5   |     1 +
 .../Testing/Data/Baseline/TestPImageWriter.png.md5 |     1 +
 .../Data/Baseline/TestPOpenFOAMReader.png.md5      |     1 +
 IO/Parallel/Testing/Python/CMakeLists.txt          |     4 +-
 IO/Parallel/Testing/Tcl/CMakeLists.txt             |     6 +-
 IO/Parallel/module.cmake                           |     3 +
 IO/Parallel/vtkXMLPUniformGridAMRWriter.cxx        |     2 +-
 IO/ParallelExodus/Testing/Cxx/CMakeLists.txt       |     3 +-
 IO/ParallelExodus/module.cmake                     |     2 +
 IO/ParallelExodus/vtkPExodusIIReader.cxx           |    21 +-
 IO/ParallelLSDyna/Testing/Cxx/CMakeLists.txt       |     9 +-
 .../Testing/Data/Baseline/PLSDynaReader.png.md5    |     1 +
 IO/ParallelNetCDF/CMakeLists.txt                   |     7 +-
 IO/ParallelNetCDF/Testing/Cxx/CMakeLists.txt       |    23 +-
 .../Testing/Cxx/TestPNetCDFPOPReader.cxx           |     2 +-
 .../Data/Baseline/TestPNetCDFPOPReader.png.md5     |     1 +
 IO/ParallelNetCDF/module.cmake                     |     2 +-
 IO/PostgreSQL/CMakeLists.txt                       |     2 +-
 IO/PostgreSQL/Testing/Cxx/CMakeLists.txt           |    34 +-
 .../Testing/Data/Input/simple_table.vtk.md5        |     1 +
 .../Testing/Data/Input/simple_table_win.vtk.md5    |     1 +
 IO/PostgreSQL/module.cmake                         |     2 +
 IO/PostgreSQL/vtkPostgreSQLDatabase.cxx            |     2 +-
 IO/SQL/Testing/Cxx/CMakeLists.txt                  |    35 +-
 IO/SQL/Testing/Data/Input/simple_table.vtk.md5     |     1 +
 IO/SQL/module.cmake                                |     2 +
 IO/SQL/vtkSQLiteQuery.cxx                          |     2 +-
 IO/VPIC/vtkVPICReader.cxx                          |    18 +-
 IO/Video/CMakeLists.txt                            |     2 +-
 IO/Video/module.cmake                              |     2 +
 IO/XML/Testing/Cxx/CMakeLists.txt                  |    49 +-
 .../Data/Baseline/TestHyperOctreeIO.png.md5        |     1 +
 .../Data/Baseline/TestXMLImageDataIO.png.md5       |     1 +
 .../Data/Baseline/TestXMLPolyDataIO.png.md5        |     1 +
 .../Data/Baseline/TestXMLRectilinearGridIO.png.md5 |     1 +
 .../Data/Baseline/TestXMLStructuredGridIO.png.md5  |     1 +
 .../Baseline/TestXMLUnstructuredGridIO.png.md5     |     1 +
 .../Baseline/TestXMLUnstructuredGridReader.png.md5 |     1 +
 IO/XML/Testing/Python/CMakeLists.txt               |    14 +-
 IO/XML/Testing/Tcl/CMakeLists.txt                  |    10 +-
 IO/XML/module.cmake                                |     3 +
 IO/XML/vtkXMLDataReader.cxx                        |     2 +-
 IO/XML/vtkXMLGenericDataObjectReader.cxx           |     6 +-
 IO/XML/vtkXMLOffsetsManager.h                      |     5 +-
 IO/XML/vtkXMLPDataWriter.cxx                       |    11 +-
 IO/XML/vtkXMLPStructuredGridWriter.h               |     5 +-
 IO/XML/vtkXMLPUnstructuredGridWriter.cxx           |     6 +-
 IO/XML/vtkXMLPUnstructuredGridWriter.h             |     4 +-
 IO/XML/vtkXMLPolyDataReader.cxx                    |     2 +-
 IO/XML/vtkXMLReader.cxx                            |    10 +-
 IO/XML/vtkXMLReader.h                              |     4 +-
 IO/XML/vtkXMLUniformGridAMRWriter.cxx              |     2 +-
 IO/XML/vtkXMLUnstructuredDataReader.cxx            |     2 +-
 IO/XML/vtkXMLUnstructuredDataWriter.cxx            |   102 +-
 IO/XML/vtkXMLUnstructuredDataWriter.h              |    22 +
 IO/XML/vtkXMLUnstructuredGridReader.cxx            |     2 +-
 IO/XML/vtkXMLUnstructuredGridWriter.cxx            |    95 +-
 IO/XML/vtkXMLUnstructuredGridWriter.h              |     4 +-
 IO/XML/vtkXMLWriter.cxx                            |   135 +-
 IO/XML/vtkXMLWriter.h                              |     6 +-
 IO/XMLParser/module.cmake                          |     1 +
 IO/XMLParser/vtkXMLDataParser.cxx                  |     4 +-
 IO/XMLParser/vtkXMLUtilities.h                     |     4 +-
 IO/Xdmf2/module.cmake                              |     2 +
 IO/Xdmf2/vtkXdmfHeavyData.cxx                      |     2 +-
 IO/Xdmf2/vtkXdmfReaderInternal.h                   |     2 +-
 Imaging/Color/vtkImageHSIToRGB.cxx                 |     2 +-
 Imaging/Color/vtkImageHSIToRGB.h                   |     2 +-
 Imaging/Color/vtkImageHSVToRGB.cxx                 |     2 +-
 Imaging/Color/vtkImageHSVToRGB.h                   |     2 +-
 Imaging/Color/vtkImageLuminance.cxx                |     2 +-
 Imaging/Color/vtkImageLuminance.h                  |     2 +-
 Imaging/Color/vtkImageMapToRGBA.h                  |     4 +-
 Imaging/Color/vtkImageQuantizeRGBToIndex.cxx       |    10 +-
 Imaging/Color/vtkImageRGBToHSI.cxx                 |     2 +-
 Imaging/Color/vtkImageRGBToHSI.h                   |     2 +-
 Imaging/Color/vtkImageRGBToHSV.cxx                 |     2 +-
 Imaging/Color/vtkImageRGBToHSV.h                   |     2 +-
 Imaging/Core/Testing/Cxx/CMakeLists.txt            |    88 +-
 Imaging/Core/Testing/Cxx/FastSplatter.cxx          |    16 +-
 Imaging/Core/Testing/Cxx/ImageAccumulate.cxx       |     1 -
 Imaging/Core/Testing/Cxx/ImageAutoRange.cxx        |    45 +-
 .../Core/Testing/Cxx/ImageBSplineCoefficients.cxx  |    59 +-
 Imaging/Core/Testing/Cxx/ImageHistogram.cxx        |    48 +-
 Imaging/Core/Testing/Cxx/ImageResize.cxx           |    44 +-
 Imaging/Core/Testing/Cxx/ImageResize3D.cxx         |    49 +-
 Imaging/Core/Testing/Cxx/ImageResizeCropping.cxx   |    57 +-
 Imaging/Core/Testing/Cxx/ImageWeightedSum.cxx      |    33 +-
 Imaging/Core/Testing/Cxx/ImportExport.cxx          |    23 +-
 .../Data/Baseline/ContinuousClose2D.png.md5        |     1 +
 .../Testing/Data/Baseline/FastSplatter.png.md5     |     1 +
 .../Testing/Data/Baseline/IdealHighPass.png.md5    |     1 +
 .../Testing/Data/Baseline/ImageAutoRange.png.md5   |     1 +
 .../Data/Baseline/ImageBSplineCoefficients.png.md5 |     1 +
 .../Testing/Data/Baseline/ImageHistogram.png.md5   |     1 +
 .../Core/Testing/Data/Baseline/ImageResize.png.md5 |     1 +
 .../Testing/Data/Baseline/ImageResize3D.png.md5    |     1 +
 .../Data/Baseline/ImageResizeCropping.png.md5      |     1 +
 .../Data/Baseline/LaplacianEdgeEnhance.png.md5     |     1 +
 .../Data/Baseline/MultipleUpdateExtents.png.md5    |     1 +
 .../Testing/Data/Baseline/ResliceBSpline.png.md5   |     1 +
 .../Testing/Data/Baseline/ResliceColorWrap.png.md5 |     1 +
 .../Data/Baseline/ResliceInformationInput.png.md5  |     1 +
 .../Baseline/ResliceInterpolationModes.png.md5     |     1 +
 .../Baseline/ResliceInterpolationOblique.png.md5   |     1 +
 .../Data/Baseline/ResliceLanczosKaiser.png.md5     |     1 +
 .../Data/Baseline/ResliceMirrorOblique.png.md5     |     1 +
 .../Testing/Data/Baseline/ResliceMirrorPad.png.md5 |     1 +
 .../Data/Baseline/ResliceOptimizationOff.png.md5   |     1 +
 .../Data/Baseline/ReslicePermutations.png.md5      |     1 +
 .../Data/Baseline/ReslicePermuteResample.png.md5   |     1 +
 .../Data/Baseline/ReslicePermuteSlab.png.md5       |     1 +
 .../Testing/Data/Baseline/ResliceSlabModes.png.md5 |     1 +
 .../Testing/Data/Baseline/ResliceToColors.png.md5  |     1 +
 .../Data/Baseline/ResliceWrapOblique.png.md5       |     1 +
 .../Testing/Data/Baseline/ResliceWrapPad.png.md5   |     1 +
 .../Core/Testing/Data/Baseline/Spectrum.png.md5    |     1 +
 .../Testing/Data/Baseline/TestAccumulate.png.md5   |     1 +
 .../Data/Baseline/TestAddStencilData.png.md5       |     1 +
 .../Testing/Data/Baseline/TestAllBlends.png.md5    |     1 +
 .../Data/Baseline/TestAllBlendsFloat.png.md5       |     1 +
 .../Testing/Data/Baseline/TestAllFlips.png.md5     |     1 +
 .../Testing/Data/Baseline/TestAllLogic.png.md5     |     1 +
 .../Testing/Data/Baseline/TestAllMaskBits.png.md5  |     1 +
 .../Data/Baseline/TestAllMathematics.png.md5       |     1 +
 .../Testing/Data/Baseline/TestAllShrinks.png.md5   |     1 +
 .../Baseline/TestAnisotropicDiffusion2D.png.md5    |     1 +
 .../Baseline/TestAnisotropicDiffusion3D.png.md5    |     1 +
 .../Testing/Data/Baseline/TestBlendStencil.png.md5 |     1 +
 .../Data/Baseline/TestButterworthHighPass.png.md5  |     1 +
 .../Data/Baseline/TestButterworthLowPass.png.md5   |     1 +
 .../Core/Testing/Data/Baseline/TestCache.png.md5   |     1 +
 .../Data/Baseline/TestChangeInformation.png.md5    |     1 +
 .../Testing/Data/Baseline/TestCheckerboard.png.md5 |     1 +
 .../Data/Baseline/TestCityBlockDistance.png.md5    |     1 +
 .../Data/Baseline/TestClipStencilData.png.md5      |     1 +
 .../Testing/Data/Baseline/TestConvolve.png.md5     |     1 +
 .../Testing/Data/Baseline/TestCorrelation.png.md5  |     1 +
 .../Testing/Data/Baseline/TestDivergence.png.md5   |     1 +
 .../Testing/Data/Baseline/TestDotProduct.png.md5   |     1 +
 .../Data/Baseline/TestEuclideanDistance.png.md5    |     1 +
 .../Baseline/TestEuclideanDistanceCached.png.md5   |     1 +
 .../Data/Baseline/TestEuclideanToPolar.png.md5     |     1 +
 .../Testing/Data/Baseline/TestExtractVOI.png.md5   |     1 +
 .../Data/Baseline/TestFFTCorrelation.png.md5       |     1 +
 .../Data/Baseline/TestGradientMagnitude.png.md5    |     1 +
 .../Data/Baseline/TestGradientMagnitude2.png.md5   |     1 +
 .../Testing/Data/Baseline/TestHSIToRGB.png.md5     |     1 +
 .../Testing/Data/Baseline/TestHSVToRGB.png.md5     |     1 +
 .../Data/Baseline/TestHybridMedian2D.png.md5       |     1 +
 .../Testing/Data/Baseline/TestIdealLowPass.png.md5 |     1 +
 .../Testing/Data/Baseline/TestImageCanvas.png.md5  |     1 +
 .../Data/Baseline/TestImageProjection.png.md5      |     1 +
 .../TestImageThresholdConnectivity.png.md5         |     1 +
 .../Data/Baseline/TestImageWeightedSum.png.md5     |     1 +
 .../Data/Baseline/TestInPlaceFilter.png.md5        |     1 +
 .../Data/Baseline/TestIslandRemoval2D.png.md5      |     1 +
 .../Testing/Data/Baseline/TestLassoStencil.png.md5 |     1 +
 .../Baseline/TestMapToRGBABlockStreaming.png.md5   |     1 +
 .../Baseline/TestMapToWindowLevelColors.png.md5    |     1 +
 .../Baseline/TestMapToWindowLevelColors2.png.md5   |     1 +
 .../Core/Testing/Data/Baseline/TestMask2.png.md5   |     1 +
 .../Testing/Data/Baseline/TestMedian3D.png.md5     |     1 +
 .../Testing/Data/Baseline/TestNormalize.png.md5    |     1 +
 .../Testing/Data/Baseline/TestOpenClose3D.png.md5  |     1 +
 .../Core/Testing/Data/Baseline/TestPermute.png.md5 |     1 +
 .../Data/Baseline/TestQuantizeTo16Colors.png.md5   |     1 +
 .../Testing/Data/Baseline/TestROIStencil.png.md5   |     1 +
 .../Core/Testing/Data/Baseline/TestRange3D.png.md5 |     1 +
 .../Testing/Data/Baseline/TestResample.png.md5     |     1 +
 .../Data/Baseline/TestSeparableFilter.png.md5      |     1 +
 .../Testing/Data/Baseline/TestShiftScale.png.md5   |     1 +
 .../Testing/Data/Baseline/TestShiftScale2.png.md5  |     1 +
 .../Data/Baseline/TestSimpleImageExample.png.md5   |     1 +
 .../Testing/Data/Baseline/TestSkeleton2D.png.md5   |     1 +
 .../Core/Testing/Data/Baseline/TestSobel2D.png.md5 |     1 +
 .../Core/Testing/Data/Baseline/TestSobel3D.png.md5 |     1 +
 .../Data/Baseline/TestStencilToImage.png.md5       |     1 +
 .../Data/Baseline/TestStencilWithFunction.png.md5  |     1 +
 .../Data/Baseline/TestStencilWithImage.png.md5     |     1 +
 .../Data/Baseline/TestSubtractStencilData.png.md5  |     1 +
 .../Testing/Data/Baseline/TestThreshold.png.md5    |     1 +
 .../Testing/Data/Baseline/TestVariance3D.png.md5   |     1 +
 .../Core/Testing/Data/Baseline/TestWipe.png.md5    |     1 +
 .../Core/Testing/Data/Baseline/TestWrapPad.png.md5 |     1 +
 .../Core/Testing/Data/Baseline/imageMCAll.png.md5  |     1 +
 .../Data/Baseline/reconstructSurface.png.md5       |     1 +
 .../Testing/Data/Baseline/resampledTexture.png.md5 |     1 +
 .../Core/Testing/Data/Baseline/voxelModel.png.md5  |     1 +
 Imaging/Core/Testing/Python/CMakeLists.txt         |   185 +-
 Imaging/Core/Testing/Python/TestAllBlends.py       |     4 -
 Imaging/Core/Testing/Python/TestAllBlendsFloat.py  |     4 -
 Imaging/Core/Testing/Python/TestAllLogic.py        |     4 -
 Imaging/Core/Testing/Python/TestAllMaskBits.py     |     4 -
 Imaging/Core/Testing/Python/TestAllMathematics.py  |     4 -
 Imaging/Core/Testing/Python/TestAllShrinks.py      |     4 -
 .../Python/TestImageThresholdConnectivity.py       |     4 -
 Imaging/Core/Testing/Python/TestThreshold.py       |     4 -
 Imaging/Core/Testing/Python/TestWipe.py            |     4 -
 Imaging/Core/Testing/Python/imageMCAll.py          |     4 -
 Imaging/Core/Testing/Python/reconstructSurface.py  |     4 -
 Imaging/Core/Testing/Tcl/CMakeLists.txt            |   183 +-
 Imaging/Core/vtkExtractVOI.h                       |     2 +-
 Imaging/Core/vtkImageAppendComponents.cxx          |     4 +-
 Imaging/Core/vtkImageAppendComponents.h            |     4 +-
 Imaging/Core/vtkImageBSplineInternals.h            |     4 +-
 Imaging/Core/vtkImageBlend.cxx                     |     4 +-
 Imaging/Core/vtkImageCast.cxx                      |     2 +-
 Imaging/Core/vtkImageCast.h                        |     2 +-
 Imaging/Core/vtkImageClip.h                        |     2 +-
 Imaging/Core/vtkImageConstantPad.cxx               |     2 +-
 Imaging/Core/vtkImageConstantPad.h                 |     2 +-
 Imaging/Core/vtkImageDecomposeFilter.h             |     2 +-
 Imaging/Core/vtkImageDifference.h                  |     2 +-
 Imaging/Core/vtkImageExtractComponents.h           |     2 +-
 Imaging/Core/vtkImageFlip.h                        |     2 +-
 Imaging/Core/vtkImageMagnify.cxx                   |     2 +-
 Imaging/Core/vtkImageMagnify.h                     |     2 +-
 Imaging/Core/vtkImageMask.cxx                      |     2 +-
 Imaging/Core/vtkImageMirrorPad.cxx                 |     2 +-
 Imaging/Core/vtkImageMirrorPad.h                   |     4 +-
 Imaging/Core/vtkImagePadFilter.h                   |     2 +-
 Imaging/Core/vtkImagePermute.h                     |     2 +-
 Imaging/Core/vtkImageResample.h                    |     2 +-
 Imaging/Core/vtkImageShrink3D.h                    |     2 +-
 Imaging/Core/vtkImageThreshold.h                   |     2 +-
 Imaging/Core/vtkImageTranslateExtent.h             |     2 +-
 Imaging/Core/vtkImageWrapPad.h                     |     4 +-
 Imaging/Core/vtkRTAnalyticSource.cxx               |     2 +-
 Imaging/Fourier/module.cmake                       |     2 +
 Imaging/Fourier/vtkImageButterworthHighPass.h      |     2 +-
 Imaging/Fourier/vtkImageButterworthLowPass.h       |     2 +-
 Imaging/Fourier/vtkImageFFT.h                      |     4 +-
 Imaging/Fourier/vtkImageFourierCenter.h            |     2 +-
 Imaging/Fourier/vtkImageFourierFilter.h            |     4 +-
 Imaging/Fourier/vtkImageIdealHighPass.h            |     2 +-
 Imaging/Fourier/vtkImageIdealLowPass.h             |     2 +-
 Imaging/Fourier/vtkImageRFFT.cxx                   |     7 +-
 Imaging/Fourier/vtkImageRFFT.h                     |     4 +-
 Imaging/General/vtkImageAnisotropicDiffusion2D.h   |     2 +-
 Imaging/General/vtkImageAnisotropicDiffusion3D.h   |     2 +-
 Imaging/General/vtkImageCheckerboard.h             |     2 +-
 Imaging/General/vtkImageCityBlockDistance.h        |     2 +-
 Imaging/General/vtkImageCorrelation.cxx            |     2 +-
 Imaging/General/vtkImageCorrelation.h              |     2 +-
 Imaging/General/vtkImageEuclideanToPolar.cxx       |     2 +-
 Imaging/General/vtkImageEuclideanToPolar.h         |     2 +-
 Imaging/General/vtkImageGradient.cxx               |     2 +-
 Imaging/General/vtkImageGradient.h                 |     2 +-
 Imaging/General/vtkImageGradientMagnitude.cxx      |     2 +-
 Imaging/General/vtkImageGradientMagnitude.h        |     2 +-
 Imaging/General/vtkImageHybridMedian2D.h           |     2 +-
 Imaging/General/vtkImageLaplacian.cxx              |     2 +-
 Imaging/General/vtkImageLaplacian.h                |     2 +-
 Imaging/General/vtkImageNormalize.cxx              |     2 +-
 Imaging/General/vtkImageNormalize.h                |     2 +-
 Imaging/General/vtkImageSlabReslice.h              |     2 +
 Imaging/General/vtkImageSobel2D.h                  |     2 +-
 Imaging/General/vtkImageSobel3D.h                  |     2 +-
 Imaging/General/vtkImageSpatialAlgorithm.h         |     2 +-
 Imaging/General/vtkSimpleImageFilterExample.h      |     4 +-
 Imaging/Hybrid/Testing/Cxx/CMakeLists.txt          |    18 +-
 .../Hybrid/Testing/Data/Baseline/genHead.png.md5   |     1 +
 .../Hybrid/Testing/Data/Baseline/iceCream.png.md5  |     1 +
 .../Hybrid/Testing/Data/Baseline/shepards.png.md5  |     1 +
 .../Data/Baseline/triangularTexture.png.md5        |     1 +
 Imaging/Hybrid/Testing/Python/CMakeLists.txt       |    11 +-
 Imaging/Hybrid/Testing/Tcl/CMakeLists.txt          |    12 +-
 Imaging/Hybrid/vtkBooleanTexture.h                 |     2 +-
 Imaging/Hybrid/vtkGaussianSplatter.h               |     2 +-
 Imaging/Hybrid/vtkImageCursor3D.h                  |     2 +-
 Imaging/Hybrid/vtkImageRectilinearWipe.cxx         |     2 +-
 Imaging/Hybrid/vtkImageRectilinearWipe.h           |     2 +-
 Imaging/Hybrid/vtkPointLoad.h                      |     8 +-
 Imaging/Hybrid/vtkSampleFunction.cxx               |     2 +-
 Imaging/Hybrid/vtkSampleFunction.h                 |     2 +-
 Imaging/Hybrid/vtkShepardMethod.h                  |     2 +-
 Imaging/Hybrid/vtkSurfaceReconstructionFilter.h    |     2 +-
 Imaging/Hybrid/vtkTriangularTexture.h              |     2 +-
 Imaging/Hybrid/vtkVoxelModeller.cxx                |     2 +-
 Imaging/Hybrid/vtkVoxelModeller.h                  |     4 +-
 Imaging/Math/vtkImageDivergence.cxx                |     2 +-
 Imaging/Math/vtkImageDivergence.h                  |     2 +-
 Imaging/Math/vtkImageDotProduct.cxx                |     2 +-
 Imaging/Math/vtkImageDotProduct.h                  |     2 +-
 Imaging/Math/vtkImageLogarithmicScale.cxx          |     2 +-
 Imaging/Math/vtkImageLogarithmicScale.h            |     2 +-
 Imaging/Math/vtkImageLogic.cxx                     |     4 +-
 Imaging/Math/vtkImageLogic.h                       |     2 +-
 Imaging/Math/vtkImageMagnitude.cxx                 |     2 +-
 Imaging/Math/vtkImageMagnitude.h                   |     2 +-
 Imaging/Math/vtkImageMaskBits.cxx                  |     2 +-
 Imaging/Math/vtkImageMaskBits.h                    |     2 +-
 Imaging/Math/vtkImageMathematics.cxx               |     2 +-
 Imaging/Math/vtkImageMathematics.h                 |     2 +-
 Imaging/Math/vtkImageWeightedSum.cxx               |     2 +-
 Imaging/Morphological/vtkImageIslandRemoval2D.h    |     2 +-
 .../vtkImageNonMaximumSuppression.cxx              |     2 +-
 .../Morphological/vtkImageNonMaximumSuppression.h  |     2 +-
 Imaging/Morphological/vtkImageSkeleton2D.h         |     2 +-
 .../vtkImageThresholdConnectivity.cxx              |     2 +-
 Imaging/Sources/vtkImageGaussianSource.cxx         |     2 +-
 Imaging/Sources/vtkImageGaussianSource.h           |     2 +-
 Imaging/Sources/vtkImageGridSource.cxx             |     2 +-
 Imaging/Sources/vtkImageGridSource.h               |     2 +-
 Imaging/Sources/vtkImageNoiseSource.cxx            |     2 +-
 Imaging/Sources/vtkImageNoiseSource.h              |     2 +-
 Imaging/Sources/vtkImageSinusoidSource.cxx         |     2 +-
 Imaging/Sources/vtkImageSinusoidSource.h           |     2 +-
 Imaging/Stencil/vtkImageStencilToImage.cxx         |     2 +-
 Infovis/Boost/Testing/Cxx/CMakeLists.txt           |     2 +-
 .../Testing/Cxx/CMakeLists.txt                     |    36 +-
 .../Testing/Cxx/TestBoostAdapter.cxx               |    24 +-
 .../Data/Baseline/TestBoostAlgorithms.png.md5      |     1 +
 .../Data/Baseline/TestBoostAlgorithms_1.png.md5    |     1 +
 .../Baseline/TestBoostDividedEdgeBundling.png.md5  |     1 +
 .../vtkBoostBetweennessClustering.cxx              |     2 -
 .../BoostGraphAlgorithms/vtkBoostGraphAdapter.h    |     6 +
 Infovis/Core/CMakeLists.txt                        |     1 +
 Infovis/Core/Testing/Cxx/CMakeLists.txt            |    64 +-
 Infovis/Core/Testing/Cxx/TestMergeGraphs.cxx       |    12 +-
 Infovis/Core/Testing/Cxx/TestReduceTable.cxx       |   127 +
 .../Data/Baseline/TestGraphAlgorithms.png.md5      |     1 +
 .../Data/Baseline/TestPruneTreeFilter.png.md5      |     1 +
 Infovis/Core/vtkAdjacencyMatrixToEdgeTable.cxx     |     1 +
 Infovis/Core/vtkCollapseVerticesByArray.cxx        |     2 -
 Infovis/Core/vtkEdgeCenters.h                      |     2 +-
 Infovis/Core/vtkPruneTreeFilter.cxx                |     3 +
 Infovis/Core/vtkReduceTable.cxx                    |   334 +
 Infovis/Core/vtkReduceTable.h                      |   156 +
 Infovis/Core/vtkTreeDifferenceFilter.cxx           |     1 +
 Infovis/Layout/Testing/Cxx/CMakeLists.txt          |    32 +-
 .../Data/Baseline/TestChacoGraphReader.png.md5     |     1 +
 .../Baseline/TestCirclePackLayoutStrategy.png.md5  |     1 +
 .../TestCirclePackLayoutStrategy_1.png.md5         |     1 +
 .../Baseline/TestTreeMapLayoutStrategy.png.md5     |     1 +
 .../vtkAttributeClustering2DLayoutStrategy.cxx     |     2 +-
 Infovis/Layout/vtkEdgeLayoutStrategy.h             |     2 +-
 Infovis/Layout/vtkForceDirectedLayoutStrategy.cxx  |     4 +-
 Infovis/Layout/vtkGraphLayoutStrategy.cxx          |     2 +-
 Infovis/Layout/vtkGraphLayoutStrategy.h            |     2 +-
 Infovis/Layout/vtkIncrementalForceLayout.cxx       |     1 +
 Infovis/Layout/vtkPerturbCoincidentVertices.cxx    |     1 -
 Infovis/Parallel/CMakeLists.txt                    |     2 +-
 Infovis/Parallel/Testing/Cxx/CMakeLists.txt        |    24 +-
 .../Parallel/Testing/Cxx/TestPBGLAlgorithms.cxx    |     2 +-
 .../Testing/Cxx/TestPBGLGraphSQLReaderFile.cxx     |     2 +-
 Infovis/Parallel/vtkPBGLDistributedGraphHelper.cxx |     2 +-
 .../Style/Testing/Data/Baseline/TestFlyTo.png.md5  |     1 +
 .../Baseline/TestInteractorStyleTerrain.png.md5    |     1 +
 .../Baseline/TestInteractorStyleTerrain_1.png.md5  |     1 +
 .../Data/Baseline/TestStyleBaseSpike.png.md5       |     1 +
 .../Data/Baseline/TestStyleJoystickActor.png.md5   |     1 +
 .../Data/Baseline/TestStyleJoystickCamera.png.md5  |     1 +
 .../Data/Baseline/TestStyleRubberBandZoom.png.md5  |     1 +
 .../Testing/Data/Baseline/TestStyleTerrain.png.md5 |     1 +
 .../Data/Baseline/TestStyleTrackballActor.png.md5  |     1 +
 .../Data/Baseline/TestStyleTrackballCamera.png.md5 |     1 +
 Interaction/Style/Testing/Python/CMakeLists.txt    |    21 +-
 .../Style/Testing/Python/TestStyleBaseSpike.py     |     4 -
 .../Style/Testing/Python/TestStyleJoystickActor.py |     4 -
 .../Testing/Python/TestStyleJoystickCamera.py      |     4 -
 .../Style/Testing/Python/TestStyleTerrain.py       |     4 -
 .../Testing/Python/TestStyleTrackballActor.py      |     4 -
 .../Testing/Python/TestStyleTrackballCamera.py     |     4 -
 Interaction/Style/Testing/Tcl/CMakeLists.txt       |    22 +-
 Interaction/Style/module.cmake                     |     3 +
 Interaction/Style/vtkInteractorStyleImage.cxx      |    11 +-
 Interaction/Widgets/Testing/Cxx/BoxWidget.cxx      |     2 +-
 Interaction/Widgets/Testing/Cxx/BoxWidget2.cxx     |     2 +-
 Interaction/Widgets/Testing/Cxx/CMakeLists.txt     |    85 +-
 .../Widgets/Testing/Cxx/TerrainPolylineEditor.cxx  |     2 +-
 .../Widgets/Testing/Cxx/TestAngleWidget2D.cxx      |     2 +-
 .../Widgets/Testing/Cxx/TestAngleWidget3D.cxx      |     2 +-
 .../Testing/Cxx/TestCellCentersPointPlacer.cxx     |     2 +-
 .../Testing/Cxx/TestDijkstraImageGeodesicPath.cxx  |     2 +-
 .../Widgets/Testing/Cxx/TestDistanceWidget.cxx     |     2 +-
 .../Widgets/Testing/Cxx/TestDistanceWidget3D.cxx   |     2 +-
 .../Cxx/TestFixedSizeHandleRepresentation3D.cxx    |     2 +-
 .../Widgets/Testing/Cxx/TestImageTracerWidget.cxx  |     2 +-
 .../Testing/Cxx/TestImplicitPlaneWidget.cxx        |     2 +-
 .../Testing/Cxx/TestImplicitPlaneWidget2.cxx       |     2 +-
 .../TestImplicitPlaneWidget2LockNormalToCamera.cxx |     2 +-
 Interaction/Widgets/Testing/Cxx/TestLineWidget.cxx |     2 +-
 .../Widgets/Testing/Cxx/TestLineWidget2.cxx        |     2 +-
 .../Testing/Cxx/TestLogoWidgetAlphaBlending.cxx    |     2 +-
 .../Testing/Cxx/TestLogoWidgetDepthPeeling.cxx     |     2 +-
 .../Testing/Cxx/TestOrientationMarkerWidget.cxx    |     2 +-
 .../Testing/Cxx/TestPickingManagerSeedWidget.cxx   |     2 +-
 .../Widgets/Testing/Cxx/TestPlaneWidget.cxx        |     2 +-
 .../Widgets/Testing/Cxx/TestPointWidget.cxx        |     2 +-
 ...stPolygonalSurfaceConstrainedDistanceWidget.cxx |     2 +-
 .../Widgets/Testing/Cxx/TestScalarBarWidget.cxx    |     2 +-
 Interaction/Widgets/Testing/Cxx/TestSeedWidget.cxx |     2 +-
 .../Widgets/Testing/Cxx/TestSliderWidget.cxx       |     2 +-
 .../Widgets/Testing/Cxx/TestSliderWidget2D.cxx     |     2 +-
 .../Widgets/Testing/Cxx/TestSphereHandleWidget.cxx |     2 +-
 .../Testing/Data/Baseline/BoxWidget.png.md5        |     1 +
 .../Testing/Data/Baseline/BoxWidget2.png.md5       |     1 +
 .../Testing/Data/Baseline/BoxWidget_1.png.md5      |     1 +
 .../Data/Baseline/TerrainPolylineEditor.png.md5    |     1 +
 .../Testing/Data/Baseline/TestAffineWidget.png.md5 |     1 +
 .../Data/Baseline/TestAngleWidget2D.png.md5        |     1 +
 .../Data/Baseline/TestAngleWidget2D_1.png.md5      |     1 +
 .../Data/Baseline/TestAngleWidget3D.png.md5        |     1 +
 .../Data/Baseline/TestBalloonWidget.png.md5        |     1 +
 .../Data/Baseline/TestBiDimensionalWidget.png.md5  |     1 +
 .../Testing/Data/Baseline/TestBorderWidget.png.md5 |     1 +
 .../Data/Baseline/TestBorderWidget_1.png.md5       |     1 +
 .../Testing/Data/Baseline/TestBoxWidget.png.md5    |     1 +
 .../Testing/Data/Baseline/TestCameraWidget.png.md5 |     1 +
 .../Baseline/TestCellCentersPointPlacer.png.md5    |     1 +
 .../Baseline/TestCenteredSliderWidget2D.png.md5    |     1 +
 .../Data/Baseline/TestCheckerboardWidget.png.md5   |     1 +
 .../Baseline/TestConstrainedHandleWidget.png.md5   |     1 +
 .../Data/Baseline/TestContourWidget2.png.md5       |     1 +
 .../Baseline/TestDijkstraGraphGeodesicPath.png.md5 |     1 +
 .../TestDijkstraGraphGeodesicPath_1.png.md5        |     1 +
 .../Baseline/TestDijkstraImageGeodesicPath.png.md5 |     1 +
 .../Data/Baseline/TestDistanceWidget.png.md5       |     1 +
 .../Data/Baseline/TestDistanceWidget3D.png.md5     |     1 +
 .../Data/Baseline/TestDistanceWidget_1.png.md5     |     1 +
 .../TestFixedSizeHandleRepresentation3D.png.md5    |     1 +
 .../Data/Baseline/TestFocalPlaneContour.png.md5    |     1 +
 .../Testing/Data/Baseline/TestHandleWidget.png.md5 |     1 +
 .../Data/Baseline/TestHandleWidget2D.png.md5       |     1 +
 .../Data/Baseline/TestHandleWidget_1.png.md5       |     1 +
 .../Data/Baseline/TestHandleWidget_2.png.md5       |     1 +
 .../Data/Baseline/TestImagePlaneWidget.png.md5     |     1 +
 .../Data/Baseline/TestImageTracerWidget.png.md5    |     1 +
 .../Data/Baseline/TestImageTracerWidget_1.png.md5  |     1 +
 .../Data/Baseline/TestImageTracerWidget_2.png.md5  |     1 +
 .../Data/Baseline/TestImplicitPlaneWidget.png.md5  |     1 +
 .../Data/Baseline/TestImplicitPlaneWidget2.png.md5 |     1 +
 ...tImplicitPlaneWidget2LockNormalToCamera.png.md5 |     1 +
 ...mplicitPlaneWidget2LockNormalToCamera_1.png.md5 |     1 +
 .../Baseline/TestImplicitPlaneWidget2_1.png.md5    |     1 +
 .../Baseline/TestImplicitPlaneWidget2_2.png.md5    |     1 +
 .../Baseline/TestImplicitPlaneWidget2b.png.md5     |     1 +
 .../Baseline/TestImplicitPlaneWidget2b_1.png.md5   |     1 +
 .../Baseline/TestImplicitPlaneWidget2b_2.png.md5   |     1 +
 .../Baseline/TestInteractorEventRecorder.png.md5   |     1 +
 .../Testing/Data/Baseline/TestLineWidget.png.md5   |     1 +
 .../Testing/Data/Baseline/TestLineWidget2.png.md5  |     1 +
 .../Testing/Data/Baseline/TestLogoWidget.png.md5   |     1 +
 .../Baseline/TestLogoWidgetAlphaBlending.png.md5   |     1 +
 .../Baseline/TestLogoWidgetDepthPeeling.png.md5    |     1 +
 .../Baseline/TestOrientationMarkerWidget.png.md5   |     1 +
 .../Baseline/TestOrientationMarkerWidget_1.png.md5 |     1 +
 .../Baseline/TestOrientationMarkerWidget_2.png.md5 |     1 +
 .../Data/Baseline/TestParallelopipedWidget.png.md5 |     1 +
 .../Baseline/TestPickingManagerSeedWidget.png.md5  |     1 +
 .../Baseline/TestPickingManagerWidgets.png.md5     |     1 +
 .../Testing/Data/Baseline/TestPlaneWidget.png.md5  |     1 +
 .../Data/Baseline/TestPlaybackWidget.png.md5       |     1 +
 .../TestPointHandleRepresentation3D.png.md5        |     1 +
 .../Testing/Data/Baseline/TestPointWidget.png.md5  |     1 +
 .../Data/Baseline/TestPointWidget_1.png.md5        |     1 +
 .../TestPolygonalHandleRepresentations.png.md5     |     1 +
 ...TestPolygonalRepresentationHandleWidget.png.md5 |     1 +
 ...lygonalSurfaceConstrainedDistanceWidget.png.md5 |     1 +
 .../Baseline/TestProgrammaticPlacement.png.md5     |     1 +
 .../Baseline/TestRectilinearWipeWidget.png.md5     |     1 +
 .../Baseline/TestRectilinearWipeWidget_1.png.md5   |     1 +
 .../Data/Baseline/TestScalarBarWidget.png.md5      |     1 +
 .../Testing/Data/Baseline/TestSeedWidget.png.md5   |     1 +
 .../Testing/Data/Baseline/TestSeedWidget2.png.md5  |     1 +
 ...TestSeedWidgetNonUniformRepresentations.png.md5 |     1 +
 ...stSeedWidgetNonUniformRepresentations_1.png.md5 |     1 +
 ...stSeedWidgetNonUniformRepresentations_2.png.md5 |     1 +
 .../Testing/Data/Baseline/TestSliderWidget.png.md5 |     1 +
 .../Data/Baseline/TestSliderWidget2D.png.md5       |     1 +
 .../Data/Baseline/TestSliderWidget2D_1.png.md5     |     1 +
 .../Data/Baseline/TestSphereHandleWidget.png.md5   |     1 +
 .../Testing/Data/Baseline/TestSphereWidget.png.md5 |     1 +
 .../Data/Baseline/TestSphereWidget_1.png.md5       |     1 +
 .../Data/Baseline/TestSphereWidget_2.png.md5       |     1 +
 .../Data/Baseline/TestSphereWidget_3.png.md5       |     1 +
 .../TestSurfaceConstrainedHandleWidget.png.md5     |     1 +
 .../TestSurfaceConstrainedHandleWidget_1.png.md5   |     1 +
 .../Data/Baseline/TestTensorProbeWidget.png.md5    |     1 +
 .../Testing/Data/Baseline/TestTextWidget.png.md5   |     1 +
 Interaction/Widgets/Testing/Python/CMakeLists.txt  |    12 +-
 .../Widgets/Testing/Python/TestBoxWidget.py        |     4 -
 .../Widgets/Testing/Python/TestImagePlaneWidget.py |     4 -
 .../Testing/Python/TestInteractorEventRecorder.py  |     4 -
 .../Widgets/Testing/Python/TestSphereWidget.py     |     4 -
 .../Widgets/Testing/Python/TestTextWidget.py       |     4 -
 Interaction/Widgets/Testing/Tcl/CMakeLists.txt     |    11 +-
 Interaction/Widgets/vtkAffineRepresentation2D.h    |     2 +-
 Interaction/Widgets/vtkBalloonWidget.cxx           |     8 +
 Interaction/Widgets/vtkBorderRepresentation.cxx    |   173 +-
 Interaction/Widgets/vtkBorderRepresentation.h      |    30 +-
 Interaction/Widgets/vtkBorderWidget.cxx            |     9 +-
 Interaction/Widgets/vtkCameraRepresentation.cxx    |     2 +-
 Interaction/Widgets/vtkCaptionRepresentation.cxx   |     2 +-
 .../vtkContinuousValueWidgetRepresentation.h       |     2 +-
 Interaction/Widgets/vtkLogoRepresentation.cxx      |     2 +-
 Interaction/Widgets/vtkPlaybackRepresentation.cxx  |     2 +-
 .../Widgets/vtkPolygonalHandleRepresentation3D.h   |     2 +-
 Interaction/Widgets/vtkScalarBarRepresentation.cxx |     8 +-
 Interaction/Widgets/vtkTextRepresentation.cxx      |     2 +-
 Parallel/Core/Testing/Cxx/CMakeLists.txt           |     3 +-
 .../Data/Baseline/TestPolyDataPieces.png.md5       |     1 +
 .../Data/Baseline/TestUnstructuredPieces.png.md5   |     1 +
 Parallel/Core/Testing/Python/CMakeLists.txt        |     6 +-
 Parallel/Core/Testing/Tcl/CMakeLists.txt           |     6 +-
 Parallel/Core/module.cmake                         |     2 +
 Parallel/Core/vtkCommunicator.cxx                  |    66 +-
 Parallel/Core/vtkCommunicator.h                    |     1 -
 Parallel/Core/vtkFieldDataSerializer.cxx           |    42 +-
 Parallel/Core/vtkMultiProcessStream.cxx            |   211 +-
 Parallel/Core/vtkMultiProcessStream.h              |    11 +-
 Parallel/Core/vtkSocketCommunicator.cxx            |     2 +-
 Parallel/Core/vtkSocketController.h                |    10 +-
 Parallel/MPI/CMakeLists.txt                        |    10 +-
 Parallel/MPI/Testing/Cxx/CMakeLists.txt            |    22 +-
 .../Cxx/TestDistributedDataShadowMapPass.cxx       |     3 +-
 .../MPI/Testing/Data/Baseline/TestPProbe.png.md5   |     1 +
 Parallel/MPI/Testing/Tcl/CMakeLists.txt            |    15 +-
 Parallel/MPI/vtkMPICommunicator.cxx                |     2 +-
 Parallel/MPI/vtkMPIUtilities.cxx                   |   115 +
 Parallel/MPI/vtkMPIUtilities.h                     |    46 +
 Rendering/Annotation/CMakeLists.txt                |     1 +
 Rendering/Annotation/Testing/Cxx/CMakeLists.txt    |    28 +-
 .../Annotation/Testing/Cxx/TestAxisActor3D.cxx     |    68 +-
 .../Testing/Data/Baseline/TestAxisActor3D.png.md5  |     1 +
 .../Data/Baseline/TestBarChartActor.png.md5        |     1 +
 .../Testing/Data/Baseline/TestCaptionActor.png.md5 |     1 +
 .../Data/Baseline/TestCaptionActor2D.png.md5       |     1 +
 .../Data/Baseline/TestCaptionActor_1.png.md5       |     1 +
 .../Data/Baseline/TestCubeAxes2DMode.png.md5       |     1 +
 .../Testing/Data/Baseline/TestCubeAxes3.png.md5    |     1 +
 .../Data/Baseline/TestCubeAxesInnerGridAll.png.md5 |     1 +
 .../Baseline/TestCubeAxesInnerGridClosest.png.md5  |     1 +
 .../Baseline/TestCubeAxesInnerGridFurthest.png.md5 |     1 +
 .../Baseline/TestCubeAxesIntersectionPoint.png.md5 |     1 +
 .../TestCubeAxesOrientedBoundingBox.png.md5        |     1 +
 .../Baseline/TestCubeAxesWithGridLines.png.md5     |     1 +
 .../Baseline/TestCubeAxesWithXInnerGrids.png.md5   |     1 +
 .../Baseline/TestCubeAxesWithXInnerPolys.png.md5   |     1 +
 .../Baseline/TestCubeAxesWithXInnerPolys_1.png.md5 |     1 +
 .../Data/Baseline/TestCubeAxesWithXLines.png.md5   |     1 +
 .../Data/Baseline/TestCubeAxesWithXLines_1.png.md5 |     1 +
 .../Baseline/TestCubeAxesWithYInnerGrids.png.md5   |     1 +
 .../Baseline/TestCubeAxesWithYInnerPolys.png.md5   |     1 +
 .../Baseline/TestCubeAxesWithYInnerPolys_1.png.md5 |     1 +
 .../Data/Baseline/TestCubeAxesWithYLines.png.md5   |     1 +
 .../Data/Baseline/TestCubeAxesWithYLines_1.png.md5 |     1 +
 .../Baseline/TestCubeAxesWithZInnerGrids.png.md5   |     1 +
 .../Baseline/TestCubeAxesWithZInnerPolys.png.md5   |     1 +
 .../Data/Baseline/TestCubeAxesWithZLines.png.md5   |     1 +
 .../Data/Baseline/TestLegendBoxActor.png.md5       |     1 +
 .../Data/Baseline/TestLegendScaleActor.png.md5     |     1 +
 .../Data/Baseline/TestPieChartActor.png.md5        |     1 +
 .../Testing/Data/Baseline/TestPolarAxes.png.md5    |     1 +
 .../Data/Baseline/TestSpiderPlotActor.png.md5      |     1 +
 .../Testing/Data/Baseline/TestXYPlotActor.png.md5  |     1 +
 .../Annotation/Testing/Data/Baseline/bore.png.md5  |     1 +
 .../Testing/Data/Baseline/cubeAxes.png.md5         |     1 +
 .../Testing/Data/Baseline/cubeAxes2.png.md5        |     1 +
 .../Testing/Data/Baseline/cubeAxes3.png.md5        |     1 +
 .../Testing/Data/Baseline/xyPlot.png.md5           |     1 +
 .../Testing/Data/Baseline/xyPlot2.png.md5          |     1 +
 .../Testing/Data/Baseline/xyPlot3.png.md5          |     1 +
 .../Testing/Data/Baseline/xyPlot4.png.md5          |     1 +
 Rendering/Annotation/Testing/Python/CMakeLists.txt |    21 +-
 Rendering/Annotation/Testing/Tcl/CMakeLists.txt    |    21 +-
 Rendering/Annotation/module.cmake                  |     2 +
 Rendering/Annotation/vtkAxisActor.cxx              |   272 +-
 Rendering/Annotation/vtkAxisActor.h                |    36 +-
 Rendering/Annotation/vtkAxisActor2D.cxx            |    16 +-
 Rendering/Annotation/vtkAxisFollower.cxx           |     5 +-
 Rendering/Annotation/vtkAxisFollower.h             |     2 +-
 Rendering/Annotation/vtkBarChartActor.cxx          |    19 +-
 Rendering/Annotation/vtkCornerAnnotation.cxx       |    32 +-
 Rendering/Annotation/vtkCubeAxesActor.cxx          |   159 +-
 Rendering/Annotation/vtkCubeAxesActor.h            |     7 +
 Rendering/Annotation/vtkCubeAxesActor2D.cxx        |    22 +-
 Rendering/Annotation/vtkLeaderActor2D.cxx          |    15 +-
 .../Annotation/vtkParallelCoordinatesActor.cxx     |    14 +-
 Rendering/Annotation/vtkPieChartActor.cxx          |    12 +-
 Rendering/Annotation/vtkPolarAxesActor.cxx         |    14 +-
 Rendering/Annotation/vtkProp3DAxisFollower.cxx     |   593 +
 Rendering/Annotation/vtkProp3DAxisFollower.h       |   165 +
 Rendering/Annotation/vtkScalarBarActor.cxx         |    77 +-
 Rendering/Annotation/vtkScalarBarActor.h           |    21 +
 Rendering/Annotation/vtkSpiderPlotActor.cxx        |     7 +-
 Rendering/Annotation/vtkXYPlotActor.cxx            |    10 +-
 Rendering/Cg/vtkCgShader.cxx                       |   511 -
 Rendering/Cg/vtkCgShader.h                         |   172 -
 Rendering/Cg/vtkCgShaderDeviceAdapter.cxx          |   114 -
 Rendering/Cg/vtkCgShaderDeviceAdapter.h            |    75 -
 Rendering/Cg/vtkCgShaderProgram.cxx                |    81 -
 Rendering/Cg/vtkCgShaderProgram.h                  |    77 -
 Rendering/Context2D/module.cmake                   |     3 +-
 Rendering/Context2D/vtkAbstractContextItem.h       |     2 +-
 Rendering/Context2D/vtkContext2D.cxx               |    44 +
 Rendering/Context2D/vtkContext2D.h                 |     9 +-
 Rendering/Context2D/vtkContextBufferId.cxx         |     5 +
 Rendering/Context2D/vtkContextDevice2D.cxx         |     2 +-
 Rendering/Context2D/vtkContextMouseEvent.h         |     4 -
 Rendering/Context2D/vtkContextScene.cxx            |     2 +-
 Rendering/Context2D/vtkMarkerUtilities.cxx         |     2 +
 Rendering/Context2D/vtkOpenGL2ContextDevice2D.cxx  |    41 +-
 Rendering/Context2D/vtkOpenGLContextBufferId.cxx   |     6 +
 Rendering/Context2D/vtkOpenGLContextDevice2D.cxx   |   134 +-
 .../Context2D/vtkOpenGLContextDevice2DPrivate.h    |    21 +-
 Rendering/Context2D/vtkOpenGLContextDevice3D.cxx   |    19 +-
 Rendering/Context2D/vtkTooltipItem.cxx             |    14 +
 Rendering/Core/CMakeLists.txt                      |    13 +-
 Rendering/Core/Testing/Cxx/CMakeLists.txt          |    27 +-
 .../Testing/Cxx/TestViewDependentErrorMetric.cxx   |     2 +-
 .../Core/Testing/Data/Baseline/CamBlur.png.md5     |     1 +
 .../Core/Testing/Data/Baseline/ImageActor.png.md5  |     1 +
 .../Data/Baseline/ImageActorStressed.png.md5       |     1 +
 .../Data/Baseline/NoLightGeneration.png.md5        |     1 +
 .../Data/Baseline/PickerWithLocator.png.md5        |     1 +
 .../Baseline/PolyDataMapperAllPolygons.png.md5     |     1 +
 .../Baseline/PolyDataMapperAllPolygons_1.png.md5   |     1 +
 .../Baseline/PolyDataMapperAllWireframe.png.md5    |     1 +
 .../Baseline/PolyDataMapperAllWireframe_1.png.md5  |     1 +
 .../Core/Testing/Data/Baseline/ScalarBar.png.md5   |     1 +
 .../Data/Baseline/SurfacePickerWithTexture.png.md5 |     1 +
 .../Baseline/SurfacePickerWithTexture_1.png.md5    |     1 +
 .../Data/Baseline/TestCameraInterpolator.png.md5   |     1 +
 .../TestGenericVertexAttributesGLSL.png.md5        |     1 +
 .../TestGenericVertexAttributesGLSL_1.png.md5      |     1 +
 .../Data/Baseline/TestLeaderActor2D.png.md5        |     1 +
 .../Testing/Data/Baseline/TestOnePoint.png.md5     |     1 +
 .../Testing/Data/Baseline/TestOpacity2.png.md5     |     1 +
 .../Testing/Data/Baseline/TestOpacity2_1.png.md5   |     1 +
 .../Testing/Data/Baseline/TestOpacity2_2.png.md5   |     1 +
 .../Testing/Data/Baseline/TestOpacity2_3.png.md5   |     1 +
 .../Data/Baseline/TestOpacityVectors.png.md5       |     1 +
 .../Data/Baseline/TestOpacityVectors_1.png.md5     |     1 +
 .../Data/Baseline/TestOpacityVectors_2.png.md5     |     1 +
 .../Core/Testing/Data/Baseline/TestRotate.png.md5  |     1 +
 .../TestSplitViewportStereoHorizontal.png.md5      |     1 +
 .../TestTransformCoordinateSystems.png.md5         |     1 +
 .../Baseline/TestTransformInterpolator.png.md5     |     1 +
 .../Baseline/TestTransformInterpolator_1.png.md5   |     1 +
 .../Baseline/TestTransformInterpolator_2.png.md5   |     1 +
 .../Baseline/TestTriangleStripCellColor.png.md5    |     1 +
 .../Baseline/TestWindowToImageTransparency.png.md5 |     1 +
 .../Testing/Data/Baseline/TexturedSphere.png.md5   |     1 +
 .../Testing/Data/Baseline/TexturedSphere_1.png.md5 |     1 +
 .../Data/Baseline/TransformConcatenation.png.md5   |     1 +
 .../Data/Baseline/TransformCoverage.png.md5        |     1 +
 .../Data/Baseline/TransformCoverage_1.png.md5      |     1 +
 .../Data/Baseline/TransformImageData.png.md5       |     1 +
 .../Data/Baseline/TransformPolyData.png.md5        |     1 +
 .../Core/Testing/Data/Baseline/assembly.png.md5    |     1 +
 .../Testing/Data/Baseline/cameraWarpedCone.png.md5 |     1 +
 Rendering/Core/Testing/Data/Baseline/cells.png.md5 |     1 +
 .../Core/Testing/Data/Baseline/cowHair.png.md5     |     1 +
 .../Core/Testing/Data/Baseline/cowHair2.png.md5    |     1 +
 .../Core/Testing/Data/Baseline/pickCells.png.md5   |     1 +
 .../Testing/Data/Baseline/propAssembly.png.md5     |     1 +
 .../Testing/Data/Baseline/rendererSource.png.md5   |     1 +
 .../Testing/Data/Baseline/rendererSource_1.png.md5 |     1 +
 .../Core/Testing/Data/Baseline/rotations.png.md5   |     1 +
 .../Data/Baseline/stereoDresdenMace.png.md5        |     1 +
 Rendering/Core/Testing/Python/CMakeLists.txt       |    84 +-
 Rendering/Core/Testing/Python/TestCgShader.py      |    29 -
 .../Python/TestGenericVertexAttributesCg.py        |    84 -
 .../Core/Testing/Python/TestOpacityVectors.py      |    71 +
 Rendering/Core/Testing/Python/cells.py             |     4 -
 Rendering/Core/Testing/Tcl/CMakeLists.txt          |    66 +-
 Rendering/Core/module.cmake                        |     9 +-
 Rendering/Core/vtkAbstractPropPicker.cxx           |    10 -
 Rendering/Core/vtkAbstractPropPicker.h             |    17 -
 Rendering/Core/vtkAbstractVolumeMapper.h           |     2 +-
 Rendering/Core/vtkActor2D.cxx                      |     3 +-
 Rendering/Core/vtkActor2DCollection.h              |     2 +-
 Rendering/Core/vtkActorCollection.h                |     4 +-
 Rendering/Core/vtkAssemblyPaths.h                  |     4 +-
 Rendering/Core/vtkBackgroundColorMonitor.cxx       |    95 +
 Rendering/Core/vtkBackgroundColorMonitor.h         |    73 +
 Rendering/Core/vtkCellPicker.cxx                   |     2 +-
 Rendering/Core/vtkChooserPainter.cxx               |     2 +-
 Rendering/Core/vtkColorTransferFunction.cxx        |    14 +-
 Rendering/Core/vtkColorTransferFunction.h          |     2 +-
 Rendering/Core/vtkCompositePainter.cxx             |     2 +-
 .../Core/vtkDiscretizableColorTransferFunction.cxx |   334 +-
 .../Core/vtkDiscretizableColorTransferFunction.h   |    88 +-
 Rendering/Core/vtkFollower.h                       |     2 +-
 Rendering/Core/vtkFrustumCoverageCuller.h          |     2 +-
 Rendering/Core/vtkGPUInfoList.cxx                  |     2 +-
 .../Core/vtkGenericVertexAttributeMapping.cxx      |     2 +-
 Rendering/Core/vtkGlyph3DMapper.cxx                |     2 +-
 Rendering/Core/vtkHardwareSelector.cxx             |    40 +-
 Rendering/Core/vtkHardwareSelector.h               |    19 +-
 Rendering/Core/vtkImageActor.cxx                   |    12 +-
 Rendering/Core/vtkInteractorEventRecorder.cxx      |    19 +-
 Rendering/Core/vtkInteractorObserver.h             |     2 +-
 Rendering/Core/vtkInteractorStyle.cxx              |     7 +-
 Rendering/Core/vtkInteractorStyle.h                |    46 +-
 Rendering/Core/vtkLight.h                          |    10 +-
 Rendering/Core/vtkLightActor.cxx                   |    10 +-
 Rendering/Core/vtkLookupTableWithEnabling.cxx      |     2 +-
 Rendering/Core/vtkMaterialLibrary.cxx              |    74 -
 Rendering/Core/vtkMaterialLibrary.h                |    61 -
 Rendering/Core/vtkPainterDeviceAdapter.h           |    14 +
 Rendering/Core/vtkPainterPolyDataMapper.cxx        |     6 +
 Rendering/Core/vtkPointPicker.h                    |     2 +-
 Rendering/Core/vtkPrimitivePainter.cxx             |    12 -
 Rendering/Core/vtkProp.cxx                         |     8 +-
 Rendering/Core/vtkProp3D.h                         |     2 +-
 Rendering/Core/vtkProp3DCollection.h               |     4 +-
 Rendering/Core/vtkProp3DFollower.cxx               |    32 +-
 Rendering/Core/vtkProp3DFollower.h                 |     8 +-
 Rendering/Core/vtkPropCollection.h                 |     4 +-
 Rendering/Core/vtkProperty.cxx                     |   592 +-
 Rendering/Core/vtkProperty.h                       |    45 -
 Rendering/Core/vtkRenderWindow.cxx                 |    83 +-
 Rendering/Core/vtkRenderWindow.h                   |    82 +-
 Rendering/Core/vtkRenderWindowInteractor.cxx       |     5 +-
 Rendering/Core/vtkRenderer.cxx                     |    21 +-
 Rendering/Core/vtkRenderer.h                       |     2 +-
 Rendering/Core/vtkShader.cxx                       |  1235 --
 Rendering/Core/vtkShader.h                         |   224 -
 Rendering/Core/vtkShaderCodeLibrary.cxx            |   115 -
 Rendering/Core/vtkShaderCodeLibrary.h              |    80 -
 Rendering/Core/vtkShaderDeviceAdapter.cxx          |    38 -
 Rendering/Core/vtkShaderDeviceAdapter.h            |    81 -
 Rendering/Core/vtkShaderDeviceAdapter2.cxx         |     1 -
 Rendering/Core/vtkShaderProgram.cxx                |   260 -
 Rendering/Core/vtkShaderProgram.h                  |   168 -
 Rendering/Core/vtkStandardPolyDataPainter.cxx      |    82 +-
 Rendering/Core/vtkStandardPolyDataPainter.h        |     5 +-
 Rendering/Core/vtkTDxInteractorStyleCamera.cxx     |     2 +-
 Rendering/Core/vtkTextMapper.cxx                   |   392 +-
 Rendering/Core/vtkTextMapper.h                     |    83 +-
 Rendering/Core/vtkTextProperty.cxx                 |     5 +
 Rendering/Core/vtkTextProperty.h                   |    19 +
 Rendering/Core/vtkTextRenderer.h                   |     4 +
 Rendering/Core/vtkViewDependentErrorMetric.cxx     |     2 +-
 Rendering/Core/vtkViewport.h                       |     4 +-
 Rendering/Core/vtkVolume.cxx                       |    70 +-
 Rendering/Core/vtkWindowLevelLookupTable.h         |     2 +-
 Rendering/Core/vtkWorldPointPicker.h               |     2 +-
 Rendering/Core/vtkXMLMaterial.cxx                  |   313 -
 Rendering/Core/vtkXMLMaterial.h                    |   109 -
 Rendering/Core/vtkXMLMaterialParser.cxx            |   152 -
 Rendering/Core/vtkXMLMaterialParser.h              |   123 -
 Rendering/Core/vtkXMLMaterialReader.cxx            |   112 -
 Rendering/Core/vtkXMLMaterialReader.h              |    92 -
 Rendering/Core/vtkXMLShader.cxx                    |   447 -
 Rendering/Core/vtkXMLShader.h                      |   137 -
 Rendering/FreeType/Testing/Cxx/CMakeLists.txt      |    69 +-
 .../Testing/Cxx/TestFreeTypeTextMapper.cxx         |   265 +
 .../Testing/Cxx/TestFreeTypeTextMapperNoMath.cxx   |   211 +
 .../Cxx/TestMathTextFreeTypeTextRenderer.cxx       |    23 +-
 .../Cxx/TestMathTextFreeTypeTextRendererNoMath.cxx |    23 +-
 .../Data/Baseline/OverlayTextOnImage.png.md5       |     1 +
 .../Data/Baseline/TestFTStringToPath.png.md5       |     1 +
 .../Data/Baseline/TestFreeTypeTextMapper.png.md5   |     1 +
 .../Baseline/TestFreeTypeTextMapperNoMath.png.md5  |     1 +
 .../TestMathTextFreeTypeTextRenderer.png.md5       |     1 +
 .../TestMathTextFreeTypeTextRendererNoMath.png.md5 |     1 +
 .../Testing/Data/Baseline/multiLineText.png.md5    |     1 +
 .../FreeType/Testing/Data/Baseline/text.png.md5    |     1 +
 .../FreeType/Testing/Data/Fonts/DejaVuSans.ttf.md5 |     1 +
 Rendering/FreeType/Testing/Python/CMakeLists.txt   |     6 +-
 Rendering/FreeType/Testing/Tcl/CMakeLists.txt      |     6 +-
 Rendering/FreeType/vtkFreeTypeTools.cxx            |    24 +-
 .../FreeType/vtkMathTextFreeTypeTextRenderer.cxx   |     9 +-
 Rendering/FreeType/vtkMathTextUtilities.cxx        |     2 +
 Rendering/FreeType/vtkMathTextUtilities.h          |     6 +-
 Rendering/FreeType/vtkTextActor.cxx                |     2 +
 Rendering/FreeType/vtkTextActor.h                  |     2 +-
 Rendering/FreeType/vtkTextActor3D.cxx              |    15 +-
 Rendering/FreeType/vtkTextActor3D.h                |     2 +-
 Rendering/FreeTypeFontConfig/CMakeLists.txt        |     2 +-
 .../FreeTypeFontConfig/Testing/Cxx/CMakeLists.txt  |    34 +-
 .../Testing/Cxx/TestSystemFontRendering.cxx        |    29 +-
 .../Data/Baseline/TestSystemFontRendering.png.md5  |     1 +
 .../Baseline/TestSystemFontRendering_1.png.md5     |     1 +
 .../Baseline/TestSystemFontRendering_2.png.md5     |     1 +
 .../vtkFontConfigFreeTypeTools.cxx                 |    46 +-
 .../FreeTypeOpenGL/vtkOpenGLFreeTypeTextMapper.cxx |   201 +-
 .../FreeTypeOpenGL/vtkOpenGLFreeTypeTextMapper.h   |    12 +
 Rendering/GL2PS/Testing/Cxx/CMakeLists.txt         |     7 +-
 Rendering/GL2PS/module.cmake                       |     2 +
 Rendering/GL2PS/vtkGL2PSContextDevice2D.cxx        |   139 +-
 Rendering/GL2PS/vtkGL2PSContextDevice2D.h          |     9 +-
 Rendering/GL2PS/vtkGL2PSUtilities.cxx              |    34 +-
 Rendering/GL2PS/vtkGL2PSUtilities.h                |    17 +-
 Rendering/HybridOpenGL/CMakeLists.txt              |    34 -
 Rendering/HybridOpenGL/Testing/Cxx/CMakeLists.txt  |    43 -
 .../Testing/Cxx/TestImageDataLIC2D.cxx             |    48 -
 .../HybridOpenGL/Testing/Cxx/TestImageDataLIC2D.h  |   297 -
 .../Testing/Cxx/TestStructuredGridLIC2DSlice.h     |   304 -
 .../Testing/Cxx/TestStructuredGridLIC2DXSlice.cxx  |    56 -
 .../Testing/Cxx/TestStructuredGridLIC2DYSlice.cxx  |    55 -
 .../Testing/Cxx/TestStructuredGridLIC2DZSlice.cxx  |    55 -
 .../HybridOpenGL/Testing/Cxx/TestSurfaceLIC.cxx    |    46 -
 .../HybridOpenGL/Testing/Cxx/TestSurfaceLIC.h      |   216 -
 Rendering/HybridOpenGL/module.cmake                |    15 -
 Rendering/HybridOpenGL/vtkImageDataLIC2D.cxx       |   613 -
 Rendering/HybridOpenGL/vtkImageDataLIC2D.h         |   155 -
 .../vtkImageDataLIC2DExtentTranslator.cxx          |    93 -
 .../vtkImageDataLIC2DExtentTranslator.h            |    66 -
 Rendering/HybridOpenGL/vtkStructuredGridLIC2D.cxx  |   817 --
 Rendering/HybridOpenGL/vtkStructuredGridLIC2D.h    |   166 -
 .../HybridOpenGL/vtkStructuredGridLIC2D_fs.glsl    |   179 -
 .../HybridOpenGL/vtkSurfaceLICDefaultPainter.cxx   |    60 -
 .../HybridOpenGL/vtkSurfaceLICDefaultPainter.h     |    68 -
 Rendering/HybridOpenGL/vtkSurfaceLICPainter.cxx    |   937 --
 Rendering/HybridOpenGL/vtkSurfaceLICPainter.h      |   167 -
 .../HybridOpenGL/vtkSurfaceLICPainter_fs1.glsl     |    30 -
 .../HybridOpenGL/vtkSurfaceLICPainter_fs2.glsl     |    58 -
 .../HybridOpenGL/vtkSurfaceLICPainter_vs1.glsl     |    51 -
 Rendering/Image/vtkImageSliceCollection.h          |     2 +-
 Rendering/LIC/CMakeLists.txt                       |    66 +
 Rendering/LIC/Testing/Cxx/CMakeLists.txt           |   375 +
 Rendering/LIC/Testing/Cxx/TestImageDataLIC2D.cxx   |   401 +
 Rendering/LIC/Testing/Cxx/TestImageDataLIC2D.h     |    24 +
 .../Testing/Cxx/TestStructuredGridLIC2DXSlice.cxx  |    55 +
 .../Testing/Cxx/TestStructuredGridLIC2DYSlice.cxx  |    54 +
 .../Testing/Cxx/TestStructuredGridLIC2DZSlice.cxx  |    54 +
 Rendering/LIC/Testing/Cxx/TestSurfaceLIC.cxx       |   252 +
 Rendering/LIC/Testing/Cxx/TestSurfaceLIC.h         |    20 +
 .../Cxx/vtkStructuredGridLIC2DTestDriver.cxx       |   335 +
 .../Testing/Cxx/vtkStructuredGridLIC2DTestDriver.h |    26 +
 .../LIC/Testing/Cxx/vtkSurfaceLICTestDriver.cxx    |   406 +
 .../LIC/Testing/Cxx/vtkSurfaceLICTestDriver.h      |    61 +
 .../Data/Baseline/TestImageDataLIC2D.png.md5       |     1 +
 .../Data/Baseline/TestImageDataLIC2D_1.png.md5     |     1 +
 .../Baseline/TestStructuredGridLIC2DXSlice.png.md5 |     1 +
 .../TestStructuredGridLIC2DXSlice_1.png.md5        |     1 +
 .../Baseline/TestStructuredGridLIC2DYSlice.png.md5 |     1 +
 .../TestStructuredGridLIC2DYSlice_1.png.md5        |     1 +
 .../Baseline/TestStructuredGridLIC2DZSlice.png.md5 |     1 +
 .../TestStructuredGridLIC2DZSlice_1.png.md5        |     1 +
 .../Testing/Data/Baseline/TestSurfaceLIC.png.md5   |     1 +
 ...SurfaceLICCurvedContrastEnhancedBlended.png.md5 |     1 +
 ...CurvedContrastEnhancedBlendedSmallGrain.png.md5 |     1 +
 ...dContrastEnhancedColorBlendedSmallGrain.png.md5 |     1 +
 ...trastEnhancedColorBlendedSmallGrainMask.png.md5 |     1 +
 ...edContrastEnhancedColorMappedSmallGrain.png.md5 |     1 +
 ...ntrastEnhancedColorMappedSmallGrainMask.png.md5 |     1 +
 ...tSurfaceLICCurvedContrastEnhancedMapped.png.md5 |     1 +
 ...CCurvedContrastEnhancedMappedSmallGrain.png.md5 |     1 +
 ...stEnhancedMappedSmallVectorNormalizeOff.png.md5 |     1 +
 ...LICCurvedContrastEnhancedSmallGrainMask.png.md5 |     1 +
 .../Baseline/TestSurfaceLICCurvedDefaults.png.md5  |     1 +
 .../TestSurfaceLICCurvedDefaultsColor.png.md5      |     1 +
 ...faceLICCurvedEnhancedVectorNormalizeOff.png.md5 |     1 +
 ...faceLICMultiBlockContrastEnhancedPerlin.png.md5 |     1 +
 .../TestSurfaceLICPlanarContrastEnhanced.png.md5   |     1 +
 .../Baseline/TestSurfaceLICPlanarDefaults.png.md5  |     1 +
 .../TestSurfaceLICPlanarVectorNormalizeOff.png.md5 |     1 +
 ...anarVectorNormalizeOffMediumGrainPerlin.png.md5 |     1 +
 ...narVectorNormalizeOffMediumGrainUniform.png.md5 |     1 +
 Rendering/LIC/module.cmake                         |    15 +
 Rendering/LIC/vtkImageDataLIC2D.cxx                |   722 ++
 Rendering/LIC/vtkImageDataLIC2D.h                  |   141 +
 .../LIC/vtkImageDataLIC2DExtentTranslator.cxx      |   121 +
 Rendering/LIC/vtkImageDataLIC2DExtentTranslator.h  |    74 +
 Rendering/LIC/vtkLineIntegralConvolution2D.cxx     |  2156 ++++
 Rendering/LIC/vtkLineIntegralConvolution2D.h       |   396 +
 Rendering/LIC/vtkLineIntegralConvolution2D_AA.glsl |    88 +
 .../LIC/vtkLineIntegralConvolution2D_AAH.glsl      |    56 +
 .../LIC/vtkLineIntegralConvolution2D_AAV.glsl      |    56 +
 Rendering/LIC/vtkLineIntegralConvolution2D_CE.glsl |    38 +
 Rendering/LIC/vtkLineIntegralConvolution2D_EE.glsl |    73 +
 .../LIC/vtkLineIntegralConvolution2D_LIC0.glsl     |    89 +
 .../LIC/vtkLineIntegralConvolution2D_LICI.glsl     |   124 +
 .../LIC/vtkLineIntegralConvolution2D_LICN.glsl     |    32 +
 Rendering/LIC/vtkLineIntegralConvolution2D_VT.glsl |    33 +
 Rendering/LIC/vtkPainterCommunicator.h             |    72 +
 Rendering/LIC/vtkPixelTransfer.cxx                 |    32 +
 Rendering/LIC/vtkPixelTransfer.h                   |   233 +
 Rendering/LIC/vtkStructuredGridLIC2D.cxx           |   851 ++
 Rendering/LIC/vtkStructuredGridLIC2D.h             |   166 +
 Rendering/LIC/vtkStructuredGridLIC2D_fs.glsl       |   158 +
 Rendering/LIC/vtkSurfaceLICComposite.cxx           |   425 +
 Rendering/LIC/vtkSurfaceLICComposite.h             |   270 +
 Rendering/LIC/vtkSurfaceLICDefaultPainter.cxx      |   114 +
 Rendering/LIC/vtkSurfaceLICDefaultPainter.h        |    74 +
 Rendering/LIC/vtkSurfaceLICPainter.cxx             |  3442 +++++
 Rendering/LIC/vtkSurfaceLICPainter.h               |   532 +
 Rendering/LIC/vtkSurfaceLICPainter_CE.glsl         |   105 +
 Rendering/LIC/vtkSurfaceLICPainter_DCpy.glsl       |    38 +
 Rendering/LIC/vtkSurfaceLICPainter_GeomFs.glsl     |    32 +
 Rendering/LIC/vtkSurfaceLICPainter_GeomVs.glsl     |    78 +
 Rendering/LIC/vtkSurfaceLICPainter_SC.glsl         |   202 +
 Rendering/LIC/vtkTextureIO.cxx                     |   172 +
 Rendering/LIC/vtkTextureIO.h                       |    86 +
 .../Data/Baseline/TestQuadricLODActor.png.md5      |     1 +
 Rendering/LOD/Testing/Python/CMakeLists.txt        |     2 +-
 Rendering/LOD/Testing/Tcl/CMakeLists.txt           |     2 +-
 Rendering/LOD/vtkLODActor.cxx                      |    20 +-
 .../Testing/Data/Baseline/labeledContours.png.md5  |     1 +
 .../Data/Baseline/labeledContours_1.png.md5        |     1 +
 .../Data/Baseline/labeledContours_2.png.md5        |     1 +
 .../Data/Baseline/labeledContours_3.png.md5        |     1 +
 .../Testing/Data/Baseline/labeledMesh.png.md5      |     1 +
 .../Testing/Data/Baseline/labeledMesh_1.png.md5    |     1 +
 Rendering/Label/Testing/Python/CMakeLists.txt      |     4 +-
 Rendering/Label/Testing/Tcl/CMakeLists.txt         |     7 +-
 Rendering/Label/module.cmake                       |     2 +
 Rendering/Label/vtkLabeledDataMapper.cxx           |     9 +-
 Rendering/Label/vtkLabeledDataMapper.h             |    20 +
 Rendering/Matplotlib/Testing/Cxx/CMakeLists.txt    |    75 +-
 .../Testing/Cxx/TestScalarBarCombinatorics.cxx     |    18 +-
 .../Data/Baseline/TestContextMathTextImage.png.md5 |     1 +
 .../Baseline/TestContextMathTextImage_1.png.md5    |     1 +
 .../TestGL2PSMathTextActor-rasterRef.png.md5       |     1 +
 .../Data/Baseline/TestGL2PSMathTextActor.png.md5   |     1 +
 .../TestGL2PSMathTextActor3D-rasterRef.png.md5     |     1 +
 .../Data/Baseline/TestGL2PSMathTextActor3D.png.md5 |     1 +
 .../Baseline/TestGL2PSMathTextActor3D_1.png.md5    |     1 +
 .../Baseline/TestGL2PSMathTextActor3D_2.png.md5    |     1 +
 .../Baseline/TestGL2PSMathTextActor3D_3.png.md5    |     1 +
 .../Data/Baseline/TestGL2PSMathTextActor_1.png.md5 |     1 +
 .../TestGL2PSMathTextOutput-rasterRef.png.md5      |     1 +
 .../TestGL2PSMathTextOutput-rasterRef_1.png.md5    |     1 +
 .../Data/Baseline/TestGL2PSMathTextOutput.png.md5  |     1 +
 .../Baseline/TestGL2PSMathTextOutput_1.png.md5     |     1 +
 .../TestGL2PSMathTextScaling-rasterRef.png.md5     |     1 +
 .../Data/Baseline/TestGL2PSMathTextScaling.png.md5 |     1 +
 .../Baseline/TestGL2PSMathTextScaling_1.png.md5    |     1 +
 .../Baseline/TestIndexedLookupScalarBar.png.md5    |     1 +
 .../Baseline/TestIndexedLookupScalarBar_1.png.md5  |     1 +
 .../Baseline/TestIndexedLookupScalarBar_2.png.md5  |     1 +
 .../Data/Baseline/TestMathTextActor.png.md5        |     1 +
 .../Data/Baseline/TestMathTextActor3D.png.md5      |     1 +
 .../Data/Baseline/TestMathTextActor3D_1.png.md5    |     1 +
 .../Data/Baseline/TestMathTextActor3D_2.png.md5    |     1 +
 .../Data/Baseline/TestMathTextActor_1.png.md5      |     1 +
 .../Testing/Data/Baseline/TestRenderString.png.md5 |     1 +
 .../Data/Baseline/TestRenderString_1.png.md5       |     1 +
 .../Data/Baseline/TestRenderString_2.png.md5       |     1 +
 .../Baseline/TestScalarBarCombinatorics.png.md5    |     1 +
 .../Testing/Data/Baseline/TestStringToPath.png.md5 |     1 +
 .../Data/Baseline/TestStringToPath_1.png.md5       |     1 +
 Rendering/OpenGL/CMakeLists.txt                    |    58 +-
 Rendering/OpenGL/Testing/Cxx/CMakeLists.txt        |   158 +-
 .../OpenGL/Testing/Cxx/LoadOpenGLExtension.cxx     |    26 +
 .../OpenGL/Testing/Cxx/TestFBOImplementation.cxx   |   292 +-
 .../OpenGL/Testing/Cxx/TestFollowerPicking.cxx     |     2 +-
 .../OpenGL/Testing/Cxx/TestGaussianBlurPass.cxx    |     4 +-
 ...estGenericVertexAttributesGLSLAlphaBlending.cxx |     4 +-
 ...GenericVertexAttributesGLSLDepthPeelingPass.cxx |     4 +-
 .../Testing/Cxx/TestGlyph3DMapperPicking.cxx       |     2 +-
 Rendering/OpenGL/Testing/Cxx/TestLightActor.cxx    |     2 +-
 Rendering/OpenGL/Testing/Cxx/TestMonitors.cxx      |   261 +
 Rendering/OpenGL/Testing/Cxx/TestShadowMapPass.cxx |     2 +-
 .../Cxx/TestTranslucentLUTDepthPeelingPass.cxx     |    34 +-
 .../Data/Baseline/LoadOpenGLExtension.png.md5      |     1 +
 .../Data/Baseline/LoadOpenGLExtension_1.png.md5    |     1 +
 .../Testing/Data/Baseline/RenderNonFinite.png.md5  |     1 +
 .../Testing/Data/Baseline/SurfacePlusEdges.png.md5 |     1 +
 .../Data/Baseline/TestActorLightingFlag.png.md5    |     1 +
 .../Data/Baseline/TestAnimationScene.png.md5       |     1 +
 .../Data/Baseline/TestAreaSelections.png.md5       |     1 +
 .../Data/Baseline/TestAreaSelections_1.png.md5     |     1 +
 .../Data/Baseline/TestBackfaceCulling.png.md5      |     1 +
 .../Data/Baseline/TestBlurAndSobelPasses.png.md5   |     1 +
 .../Data/Baseline/TestBlurAndSobelPasses_1.png.md5 |     1 +
 .../Data/Baseline/TestDynamic2DLabelMapper.png.md5 |     1 +
 .../Data/Baseline/TestFollowerPicking.png.md5      |     1 +
 .../Data/Baseline/TestFreetypeTextMapper.png.md5   |     1 +
 .../Baseline/TestFreetypeTextMapperBigger.png.md5  |     1 +
 .../Data/Baseline/TestFreetypeTextOverlay.png.md5  |     1 +
 .../Data/Baseline/TestGaussianBlurPass.png.md5     |     1 +
 .../Data/Baseline/TestGaussianBlurPass_1.png.md5   |     1 +
 .../Data/Baseline/TestGaussianBlurPass_2.png.md5   |     1 +
 ...enericVertexAttributesGLSLAlphaBlending.png.md5 |     1 +
 ...ericVertexAttributesGLSLAlphaBlending_1.png.md5 |     1 +
 .../TestGenericVertexAttributesGLSLCxx.png.md5     |     1 +
 .../TestGenericVertexAttributesGLSLCxx_1.png.md5   |     1 +
 ...ricVertexAttributesGLSLDepthPeelingPass.png.md5 |     1 +
 ...cVertexAttributesGLSLDepthPeelingPass_1.png.md5 |     1 +
 ...cVertexAttributesGLSLDepthPeelingPass_2.png.md5 |     1 +
 .../Data/Baseline/TestGlyph3DMapper.png.md5        |     1 +
 .../Data/Baseline/TestGlyph3DMapperArrow.png.md5   |     1 +
 .../Data/Baseline/TestGlyph3DMapperMasking.png.md5 |     1 +
 .../Baseline/TestGlyph3DMapperMasking_1.png.md5    |     1 +
 .../TestGlyph3DMapperOrientationArray.png.md5      |     1 +
 .../TestGlyph3DMapperOrientationArray_1.png.md5    |     1 +
 .../Data/Baseline/TestGlyph3DMapperPicking.png.md5 |     1 +
 .../Data/Baseline/TestGradientBackground.png.md5   |     1 +
 .../TestHomogeneousTransformOfActor.png.md5        |     1 +
 .../Baseline/TestImageResliceMapperAlpha.png.md5   |     1 +
 .../TestImageResliceMapperBackground.png.md5       |     1 +
 .../Baseline/TestImageResliceMapperBorder.png.md5  |     1 +
 .../TestImageResliceMapperInterpolation.png.md5    |     1 +
 .../Baseline/TestImageResliceMapperOffAxis.png.md5 |     1 +
 .../TestImageResliceMapperOrient3D.png.md5         |     1 +
 .../Baseline/TestImageResliceMapperSlab.png.md5    |     1 +
 .../Baseline/TestImageSliceMapperAlpha.png.md5     |     1 +
 .../TestImageSliceMapperBackground.png.md5         |     1 +
 .../TestImageSliceMapperBackground_1.png.md5       |     1 +
 .../Baseline/TestImageSliceMapperBorder.png.md5    |     1 +
 .../Baseline/TestImageSliceMapperBorder_1.png.md5  |     1 +
 .../TestImageSliceMapperInterpolation.png.md5      |     1 +
 .../Baseline/TestImageSliceMapperOrient2D.png.md5  |     1 +
 .../Baseline/TestImageSliceMapperOrient3D.png.md5  |     1 +
 .../Testing/Data/Baseline/TestImageStack.png.md5   |     1 +
 .../Testing/Data/Baseline/TestImageStack_1.png.md5 |     1 +
 .../Data/Baseline/TestLabelPlacementMapper.png.md5 |     1 +
 .../Baseline/TestLabelPlacementMapper2D.png.md5    |     1 +
 .../Baseline/TestLabelPlacementMapper2D_1.png.md5  |     1 +
 .../Baseline/TestLabelPlacementMapper2D_2.png.md5  |     1 +
 ...estLabelPlacementMapperCoincidentPoints.png.md5 |     1 +
 .../Testing/Data/Baseline/TestLabelPlacer.png.md5  |     1 +
 .../Data/Baseline/TestLabelPlacer2D.png.md5        |     1 +
 .../Data/Baseline/TestLabelPlacer2D_1.png.md5      |     1 +
 .../Data/Baseline/TestLabelPlacer2D_2.png.md5      |     1 +
 .../Data/Baseline/TestLabelPlacer2D_3.png.md5      |     1 +
 .../TestLabelPlacerCoincidentPoints.png.md5        |     1 +
 .../Testing/Data/Baseline/TestLightActor.png.md5   |     1 +
 .../Data/Baseline/TestMultiTexturing.png.md5       |     1 +
 .../Baseline/TestMultiTexturingTransform.png.md5   |     1 +
 .../Baseline/TestMultiTexturingTransform_1.png.md5 |     1 +
 .../Data/Baseline/TestMultiTexturing_1.png.md5     |     1 +
 .../TestMultiblockDisplayProperties.png.md5        |     1 +
 .../TestMultiblockDisplayProperties_1.png.md5      |     1 +
 .../Testing/Data/Baseline/TestOSConeCxx.png.md5    |     1 +
 .../Data/Baseline/TestOffAxisStereo.png.md5        |     1 +
 .../Testing/Data/Baseline/TestOpacity.png.md5      |     1 +
 .../Testing/Data/Baseline/TestOpacity_1.png.md5    |     1 +
 .../Data/Baseline/TestOpenGLPolyDataMapper.png.md5 |     1 +
 .../Data/Baseline/TestOrderedTriangulator.png.md5  |     1 +
 .../Data/Baseline/TestPolygonSelection.png.md5     |     1 +
 .../TestResetCameraVerticalAspectRatio.png.md5     |     1 +
 ...tResetCameraVerticalAspectRatioParallel.png.md5 |     1 +
 .../Testing/Data/Baseline/TestScalarBar.png.md5    |     1 +
 .../Testing/Data/Baseline/TestScalarBar_1.png.md5  |     1 +
 .../Testing/Data/Baseline/TestScalarBar_2.png.md5  |     1 +
 .../Testing/Data/Baseline/TestScalarBar_3.png.md5  |     1 +
 .../Data/Baseline/TestSetImageOrientation.png.md5  |     1 +
 .../Data/Baseline/TestShadowMapPass.png.md5        |     1 +
 .../Data/Baseline/TestShadowMapPass_1.png.md5      |     1 +
 .../TestSobelGradientMagnitudePass.png.md5         |     1 +
 .../TestSobelGradientMagnitudePass_1.png.md5       |     1 +
 .../OpenGL/Testing/Data/Baseline/TestTDx.png.md5   |     1 +
 .../Data/Baseline/TestTStripsColorsTCoords.png.md5 |     1 +
 .../TestTStripsNormalsColorsTCoords.png.md5        |     1 +
 .../Baseline/TestTStripsNormalsTCoords.png.md5     |     1 +
 .../Data/Baseline/TestTStripsTCoords.png.md5       |     1 +
 .../Baseline/TestTextActor3DAlphaBlending.png.md5  |     1 +
 .../Baseline/TestTextActor3DDepthPeeling.png.md5   |     1 +
 .../Baseline/TestTextActorAlphaBlending.png.md5    |     1 +
 .../Baseline/TestTextActorDepthPeeling.png.md5     |     1 +
 .../Testing/Data/Baseline/TestTextureRGBA.png.md5  |     1 +
 .../Baseline/TestTextureRGBADepthPeeling.png.md5   |     1 +
 .../Data/Baseline/TestTexturedBackground.png.md5   |     1 +
 .../Testing/Data/Baseline/TestTilingCxx.png.md5    |     1 +
 .../TestTransformCoordinateUseDouble.png.md5       |     1 +
 .../TestTranslucentImageActorAlphaBlending.png.md5 |     1 +
 .../TestTranslucentImageActorDepthPeeling.png.md5  |     1 +
 .../TestTranslucentLUTAlphaBlending.png.md5        |     1 +
 .../TestTranslucentLUTDepthPeeling.png.md5         |     1 +
 .../TestTranslucentLUTDepthPeelingPass.png.md5     |     1 +
 .../TestTranslucentLUTDepthPeelingPass_1.png.md5   |     1 +
 .../TestTranslucentLUTDepthPeeling_1.png.md5       |     1 +
 .../TestTranslucentLUTTextureAlphaBlending.png.md5 |     1 +
 .../TestTranslucentLUTTextureDepthPeeling.png.md5  |     1 +
 ...TestTranslucentLUTTextureDepthPeeling_1.png.md5 |     1 +
 Rendering/OpenGL/Testing/Python/CMakeLists.txt     |     9 +-
 .../Testing/Python/TestFreetypeTextMapper.py       |     4 -
 .../Testing/Python/TestFreetypeTextMapperBigger.py |     4 -
 Rendering/OpenGL/Testing/Tcl/CMakeLists.txt        |     6 +-
 Rendering/OpenGL/module.cmake                      |     4 +-
 Rendering/OpenGL/vtkCameraPass.cxx                 |     9 +-
 Rendering/OpenGL/vtkCarbonRenderWindow.cxx         |     1 -
 Rendering/OpenGL/vtkClearZPass.cxx                 |     2 +-
 Rendering/OpenGL/vtkCocoaRenderWindow.h            |     7 +
 Rendering/OpenGL/vtkCocoaRenderWindow.mm           |    80 +-
 Rendering/OpenGL/vtkCocoaRenderWindowInteractor.mm |    58 +-
 Rendering/OpenGL/vtkColorMaterialHelper.cxx        |    16 +
 Rendering/OpenGL/vtkColorMaterialHelper.h          |    15 +-
 Rendering/OpenGL/vtkColorMaterialHelper_vs.glsl    |     6 -
 Rendering/OpenGL/vtkCoreGraphicsGPUInfoList.cxx    |     2 +-
 Rendering/OpenGL/vtkDataTransferHelper.cxx         |     2 +-
 Rendering/OpenGL/vtkDefaultPass.cxx                |     2 +-
 Rendering/OpenGL/vtkDepthPeelingPass.cxx           |   285 +-
 Rendering/OpenGL/vtkDirectXGPUInfoList.cxx         |     2 +-
 Rendering/OpenGL/vtkDummyGPUInfoList.cxx           |     2 +-
 Rendering/OpenGL/vtkFrameBufferObject.cxx          |   582 +-
 Rendering/OpenGL/vtkFrameBufferObject.h            |   122 +-
 Rendering/OpenGL/vtkFrameBufferObject2.cxx         |   728 ++
 Rendering/OpenGL/vtkFrameBufferObject2.h           |   316 +
 Rendering/OpenGL/vtkGLSLShader.cxx                 |   521 -
 Rendering/OpenGL/vtkGLSLShader.h                   |   151 -
 Rendering/OpenGL/vtkGLSLShaderDeviceAdapter.cxx    |   294 -
 Rendering/OpenGL/vtkGLSLShaderDeviceAdapter.h      |    83 -
 Rendering/OpenGL/vtkGLSLShaderDeviceAdapter2.cxx   |     5 +
 Rendering/OpenGL/vtkGLSLShaderProgram.cxx          |   403 -
 Rendering/OpenGL/vtkGLSLShaderProgram.h            |   101 -
 Rendering/OpenGL/vtkGaussianBlurPass.cxx           |     7 +-
 Rendering/OpenGL/vtkGenericOpenGLRenderWindow.cxx  |    43 +-
 Rendering/OpenGL/vtkGenericOpenGLRenderWindow.h    |    10 +
 Rendering/OpenGL/vtkImageProcessingPass.cxx        |     2 +-
 Rendering/OpenGL/vtkLightingHelper.cxx             |    14 +-
 Rendering/OpenGL/vtkLightingHelper.h               |    11 +-
 Rendering/OpenGL/vtkLightsPass.cxx                 |     2 +-
 Rendering/OpenGL/vtkLineIntegralConvolution2D.cxx  |  1002 --
 Rendering/OpenGL/vtkLineIntegralConvolution2D.h    |   225 -
 .../OpenGL/vtkLineIntegralConvolution2D_fs.glsl    |   149 -
 .../OpenGL/vtkLineIntegralConvolution2D_fs1.glsl   |   151 -
 .../OpenGL/vtkLineIntegralConvolution2D_fs2.glsl   |   111 -
 Rendering/OpenGL/vtkOSOpenGLRenderWindow.cxx       |     4 +
 Rendering/OpenGL/vtkOpaquePass.cxx                 |     2 +-
 Rendering/OpenGL/vtkOpenGLActor.cxx                |     5 +
 Rendering/OpenGL/vtkOpenGLCamera.cxx               |     9 +
 Rendering/OpenGL/vtkOpenGLClipPlanesPainter.cxx    |     7 +-
 ...tkOpenGLCoincidentTopologyResolutionPainter.cxx |     8 +
 Rendering/OpenGL/vtkOpenGLDisplayListPainter.cxx   |    10 +-
 Rendering/OpenGL/vtkOpenGLError.h.in               |   252 +
 Rendering/OpenGL/vtkOpenGLExtensionManager.cxx     |   522 +-
 Rendering/OpenGL/vtkOpenGLExtensionManager.h       |   106 +-
 Rendering/OpenGL/vtkOpenGLGlyph3DMapper.cxx        |    12 +-
 Rendering/OpenGL/vtkOpenGLHardwareSelector.cxx     |   217 +
 Rendering/OpenGL/vtkOpenGLHardwareSelector.h       |    64 +
 Rendering/OpenGL/vtkOpenGLImageMapper.cxx          |    60 +-
 Rendering/OpenGL/vtkOpenGLImageSliceMapper.cxx     |    26 +
 Rendering/OpenGL/vtkOpenGLLight.cxx                |     4 +
 Rendering/OpenGL/vtkOpenGLLightMonitor.cxx         |   199 +
 Rendering/OpenGL/vtkOpenGLLightMonitor.h           |   104 +
 .../OpenGL/vtkOpenGLModelViewProjectionMonitor.cxx |    88 +
 .../OpenGL/vtkOpenGLModelViewProjectionMonitor.h   |    79 +
 Rendering/OpenGL/vtkOpenGLPainterDeviceAdapter.cxx |    32 +-
 Rendering/OpenGL/vtkOpenGLPainterDeviceAdapter.h   |    24 +-
 Rendering/OpenGL/vtkOpenGLPolyDataMapper.cxx       |    18 +-
 Rendering/OpenGL/vtkOpenGLPolyDataMapper2D.cxx     |    28 +-
 Rendering/OpenGL/vtkOpenGLProperty.cxx             |    99 +-
 Rendering/OpenGL/vtkOpenGLProperty.h               |     4 -
 Rendering/OpenGL/vtkOpenGLRenderWindow.cxx         |    50 +-
 Rendering/OpenGL/vtkOpenGLRenderWindow.h           |    25 +-
 Rendering/OpenGL/vtkOpenGLRenderer.cxx             |   315 +-
 .../OpenGL/vtkOpenGLRepresentationPainter.cxx      |     4 +
 .../OpenGL/vtkOpenGLScalarsToColorsPainter.cxx     |    10 +
 Rendering/OpenGL/vtkOpenGLState.cxx                |     1 +
 Rendering/OpenGL/vtkOpenGLTexture.cxx              |    99 +-
 Rendering/OpenGL/vtkOverlayPass.cxx                |     2 +-
 Rendering/OpenGL/vtkPixelBufferObject.cxx          |   463 +-
 Rendering/OpenGL/vtkPixelBufferObject.h            |   110 +-
 Rendering/OpenGL/vtkRenderPass.cxx                 |     2 +-
 Rendering/OpenGL/vtkRenderState.cxx                |     2 +-
 Rendering/OpenGL/vtkRenderbuffer.cxx               |   215 +
 Rendering/OpenGL/vtkRenderbuffer.h                 |    94 +
 Rendering/OpenGL/vtkRenderingOpenGLConfigure.h.in  |     4 -
 Rendering/OpenGL/vtkSequencePass.cxx               |     2 +-
 Rendering/OpenGL/vtkShader2.cxx                    |   142 +-
 Rendering/OpenGL/vtkShader2.h                      |    18 +-
 Rendering/OpenGL/vtkShader2Collection.cxx          |     2 +-
 Rendering/OpenGL/vtkShaderProgram2.cxx             |   230 +-
 Rendering/OpenGL/vtkShaderProgram2.h               |   162 +-
 Rendering/OpenGL/vtkShadowMapBakerPass.cxx         |     5 +
 Rendering/OpenGL/vtkShadowMapPass.cxx              |     7 +-
 Rendering/OpenGL/vtkSobelGradientMagnitudePass.cxx |    82 +-
 .../vtkSobelGradientMagnitudePassShader1_fs.glsl   |     4 +-
 .../vtkSobelGradientMagnitudePassShader2_fs.glsl   |     4 +-
 Rendering/OpenGL/vtkTDxDevice.cxx                  |     2 +-
 Rendering/OpenGL/vtkTDxInteractorStyleGeo.cxx      |     2 +-
 Rendering/OpenGL/vtkTDxMacDevice.cxx               |     2 +-
 Rendering/OpenGL/vtkTDxUnixDevice.cxx              |     2 +-
 Rendering/OpenGL/vtkTDxWinDevice.cxx               |     2 +-
 Rendering/OpenGL/vtkTextureObject.cxx              |   589 +-
 Rendering/OpenGL/vtkTextureObject.h                |    92 +-
 Rendering/OpenGL/vtkTextureUnitManager.cxx         |     2 +-
 Rendering/OpenGL/vtkTranslucentPass.cxx            |     2 +-
 Rendering/OpenGL/vtkUniformVariables.cxx           |     8 +-
 Rendering/OpenGL/vtkUniformVariables.h             |    53 +
 Rendering/OpenGL/vtkVolumetricPass.cxx             |     2 +-
 Rendering/OpenGL/vtkWin32OpenGLRenderWindow.cxx    |    30 +-
 Rendering/OpenGL/vtkXGPUInfoList.cxx               |     2 +-
 Rendering/OpenGL/vtkXOpenGLRenderWindow.cxx        |    19 +-
 Rendering/Parallel/Testing/Cxx/CMakeLists.txt      |    18 +-
 .../Cxx/TestDistributedDataCompositeZPass.cxx      |     3 +-
 .../Parallel/Testing/Cxx/TestPCompositeZPass.cxx   |     3 +-
 .../Parallel/Testing/Cxx/TestPShadowMapPass.cxx    |     3 +-
 .../Testing/Data/Baseline/PrmMagnify.png.md5       |     1 +
 .../TestDistributedDataCompositeZPass.png.md5      |     1 +
 .../Data/Baseline/TestPCompositeZPass.png.md5      |     1 +
 .../Data/Baseline/TestPShadowMapPass.png.md5       |     1 +
 .../Data/Baseline/TestPShadowMapPass_1.png.md5     |     1 +
 .../Data/Baseline/TestPShadowMapPass_2.png.md5     |     1 +
 Rendering/Parallel/module.cmake                    |     2 +
 .../vtkClientServerSynchronizedRenderers.cxx       |     2 +-
 Rendering/Parallel/vtkCompositeRGBAPass.cxx        |    64 +-
 Rendering/Parallel/vtkCompositeZPass.cxx           |    26 +-
 Rendering/Parallel/vtkPHardwareSelector.cxx        |     3 +-
 Rendering/Parallel/vtkPHardwareSelector.h          |     6 +-
 Rendering/Parallel/vtkParallelRenderManager.h      |     4 +-
 Rendering/Parallel/vtkSynchronizedRenderers.cxx    |     7 +-
 Rendering/ParallelLIC/CMakeLists.txt               |   102 +
 Rendering/ParallelLIC/module.cmake                 |     8 +
 Rendering/ParallelLIC/vtkMPIPixelTT.cxx            |    43 +
 Rendering/ParallelLIC/vtkMPIPixelTT.h              |    61 +
 Rendering/ParallelLIC/vtkMPIPixelView.h            |   108 +
 .../ParallelLIC/vtkPLineIntegralConvolution2D.cxx  |   128 +
 .../ParallelLIC/vtkPLineIntegralConvolution2D.h    |    88 +
 Rendering/ParallelLIC/vtkPPainterCommunicator.cxx  |   343 +
 Rendering/ParallelLIC/vtkPPainterCommunicator.h    |   101 +
 Rendering/ParallelLIC/vtkPPixelTransfer.cxx        |    84 +
 Rendering/ParallelLIC/vtkPPixelTransfer.h          |   524 +
 Rendering/ParallelLIC/vtkPSurfaceLICComposite.cxx  |  1857 +++
 Rendering/ParallelLIC/vtkPSurfaceLICComposite.h    |   198 +
 .../ParallelLIC/vtkPSurfaceLICComposite_Comp.glsl  |    34 +
 Rendering/ParallelLIC/vtkPSurfaceLICPainter.cxx    |   172 +
 Rendering/ParallelLIC/vtkPSurfaceLICPainter.h      |    87 +
 Rendering/ParallelLIC/vtkParallelTimer.cxx         |   673 +
 Rendering/ParallelLIC/vtkParallelTimer.h           |   262 +
 Rendering/Qt/CMakeLists.txt                        |    17 +-
 Rendering/Qt/module.cmake                          |     4 +-
 Rendering/Qt/vtkQImageToImageSource.h              |     2 +-
 .../Data/Baseline/TestTkRenderWidget.png.md5       |     1 +
 .../Baseline/TestTkRenderWindowInteractor.png.md5  |     1 +
 .../Tk/Testing/Data/Baseline/cursor3D.png.md5      |     1 +
 Rendering/Tk/Testing/Python/CMakeLists.txt         |    16 +-
 Rendering/Tk/Testing/Python/TestTkRenderWidget.py  |     3 -
 .../Testing/Python/TestTkRenderWindowInteractor.py |     3 -
 Rendering/Tk/Testing/Tcl/CMakeLists.txt            |     4 +-
 Rendering/Tk/vtkCocoaTkUtilities.h                 |     4 +-
 Rendering/Tk/vtkTkImageViewerWidget.cxx            |     4 +-
 Rendering/Tk/vtkTkRenderWidget.cxx                 |     4 +-
 Rendering/Volume/Testing/Cxx/CMakeLists.txt        |   100 +-
 .../Testing/Cxx/ProjectedTetrahedraZoomIn.cxx      |    18 +-
 .../Cxx/TestGPURayCastCompositeShadeMask.cxx       |    18 +-
 .../Volume/Testing/Cxx/TestProjectedTetrahedra.cxx |    21 +-
 .../Baseline/HomogeneousRayIntegration.png.md5     |     1 +
 .../Data/Baseline/LinearRayIntegration.png.md5     |     1 +
 .../Data/Baseline/PartialPreIntegration.png.md5    |     1 +
 .../Baseline/PreIntegrationIncremental.png.md5     |     1 +
 .../Baseline/PreIntegrationNonIncremental.png.md5  |     1 +
 .../Data/Baseline/TestBunykRayCastFunction.png.md5 |     1 +
 .../Baseline/TestFinalColorWindowLevel.png.md5     |     1 +
 .../TestFixedPointRayCastLightComponents.png.md5   |     1 +
 .../Baseline/TestFixedPointRayCasterLinear.png.md5 |     1 +
 .../TestFixedPointRayCasterLinearCropped.png.md5   |     1 +
 .../TestFixedPointRayCasterLinear_1.png.md5        |     1 +
 .../TestFixedPointRayCasterNearest.png.md5         |     1 +
 .../TestFixedPointRayCasterNearestCropped.png.md5  |     1 +
 .../TestFixedPointRayCasterNearest_1.png.md5       |     1 +
 .../Data/Baseline/TestGPURayCastAdditive.png.md5   |     1 +
 .../TestGPURayCastCompositeBinaryMask.png.md5      |     1 +
 .../Baseline/TestGPURayCastCompositeMask.png.md5   |     1 +
 .../TestGPURayCastCompositeMaskBlend.png.md5       |     1 +
 .../Baseline/TestGPURayCastCompositeMask_1.png.md5 |     1 +
 .../TestGPURayCastCompositeShadeMask.png.md5       |     1 +
 .../TestGPURayCastCompositeShadeMask_1.png.md5     |     1 +
 .../Baseline/TestGPURayCastCompositeToMIP.png.md5  |     1 +
 .../Data/Baseline/TestGPURayCastCropping.png.md5   |     1 +
 .../Baseline/TestGPURayCastDataTypesMIP.png.md5    |     1 +
 .../Baseline/TestGPURayCastDataTypesMinIP.png.md5  |     1 +
 .../TestGPURayCastFourComponentsComposite.png.md5  |     1 +
 ...RayCastFourComponentsCompositeStreaming.png.md5 |     1 +
 .../TestGPURayCastFourComponentsMIP.png.md5        |     1 +
 .../TestGPURayCastFourComponentsMinIP.png.md5      |     1 +
 .../Baseline/TestGPURayCastMIPBinaryMask.png.md5   |     1 +
 .../Baseline/TestGPURayCastMIPToComposite.png.md5  |     1 +
 .../TestGPURayCastNearestDataTypesMIP.png.md5      |     1 +
 .../TestGPURayCastPerspectiveParallel.png.md5      |     1 +
 .../Data/Baseline/TestHAVSVolumeMapper.png.md5     |     1 +
 .../Data/Baseline/TestHAVSVolumeMapper_1.png.md5   |     1 +
 .../Testing/Data/Baseline/TestLODProp3D.png.md5    |     1 +
 .../Baseline/TestMinIntensityRendering.png.md5     |     1 +
 .../Testing/Data/Baseline/TestPTZSweep.png.md5     |     1 +
 .../Data/Baseline/TestProjectedHexahedra.png.md5   |     1 +
 .../Data/Baseline/TestProjectedTetrahedra.png.md5  |     1 +
 .../Baseline/TestProjectedTetrahedra_1.png.md5     |     1 +
 .../Baseline/TestProjectedTetrahedra_2.png.md5     |     1 +
 .../Baseline/TestProjectedTetrahedra_3.png.md5     |     1 +
 .../Data/Baseline/TestProp3DFollower.png.md5       |     1 +
 .../Data/Baseline/TestSmartVolumeMapper.png.md5    |     1 +
 .../TestSmartVolumeMapperWindowLevel.png.md5       |     1 +
 .../Data/Baseline/TestTM3DLightComponents.png.md5  |     1 +
 .../Data/Baseline/VolumeOutlineSource.png.md5      |     1 +
 .../Baseline/VolumeOutlineSourceClipped.png.md5    |     1 +
 .../Testing/Data/Baseline/VolumePicker.png.md5     |     1 +
 .../Testing/Data/Baseline/VolumePickerCrop.png.md5 |     1 +
 .../Data/Baseline/ZsweepConcavities.png.md5        |     1 +
 .../Volume/Testing/Data/Baseline/cursor3D.png.md5  |     1 +
 .../Volume/Testing/Data/Baseline/gaussian.png.md5  |     1 +
 .../Volume/Testing/Data/Baseline/volProt.png.md5   |     1 +
 .../Volume/Testing/Data/Baseline/volProt_1.png.md5 |     1 +
 .../Testing/Data/Baseline/volRCClipPlanes.png.md5  |     1 +
 .../Testing/Data/Baseline/volRCCropRegions.png.md5 |     1 +
 .../Testing/Data/Baseline/volRCRotateClip.png.md5  |     1 +
 .../Data/Baseline/volTM2DCropRegions.png.md5       |     1 +
 .../Data/Baseline/volTM2DRotateClip.png.md5        |     1 +
 .../Baseline/volTM3DCompressedCropRegions.png.md5  |     1 +
 .../volTM3DCompressedCropRegions_1.png.md5         |     1 +
 .../volTM3DCompressedCropRegions_2.png.md5         |     1 +
 .../volTM3DCompressedCropRegions_3.png.md5         |     1 +
 .../Data/Baseline/volTM3DCropRegions.png.md5       |     1 +
 .../Data/Baseline/volTM3DCropRegions_1.png.md5     |     1 +
 .../Data/Baseline/volTM3DCropRegions_2.png.md5     |     1 +
 .../Data/Baseline/volTM3DCropRegions_3.png.md5     |     1 +
 .../Data/Baseline/volTM3DCropRegions_4.png.md5     |     1 +
 .../Data/Baseline/volTM3DCropRegions_5.png.md5     |     1 +
 .../Data/Baseline/volTM3DRotateClip.png.md5        |     1 +
 .../Data/Baseline/volTM3DRotateClip_1.png.md5      |     1 +
 .../Data/Baseline/volTM3DRotateClip_2.png.md5      |     1 +
 .../Data/Baseline/volTM3DRotateClip_3.png.md5      |     1 +
 Rendering/Volume/Testing/Python/CMakeLists.txt     |    45 +-
 .../Python/TestFixedPointRayCasterLinear.py        |     4 -
 .../Python/TestFixedPointRayCasterLinearCropped.py |     4 -
 .../Python/TestFixedPointRayCasterNearest.py       |     4 -
 .../TestFixedPointRayCasterNearestCropped.py       |     4 -
 Rendering/Volume/Testing/Python/VolumePicker.py    |     4 -
 .../Volume/Testing/Python/volTM3DRotateClip.py     |     1 -
 Rendering/Volume/Testing/Tcl/CMakeLists.txt        |    43 +-
 Rendering/Volume/Testing/Tcl/volTM3DRotateClip.tcl |     1 -
 Rendering/Volume/vtkDirectionEncoder.h             |     4 +-
 Rendering/Volume/vtkGPUVolumeRayCastMapper.cxx     |     2 +-
 Rendering/Volume/vtkGPUVolumeRayCastMapper.h       |     2 +-
 Rendering/Volume/vtkHAVSVolumeMapper.cxx           |    68 +-
 Rendering/Volume/vtkProjectedTetrahedraMapper.cxx  |    61 +-
 Rendering/Volume/vtkProjectedTetrahedraMapper.h    |     7 +
 Rendering/Volume/vtkSmartVolumeMapper.cxx          |     8 +-
 .../vtkUnstructuredGridBunykRayCastFunction.cxx    |    50 +-
 .../vtkUnstructuredGridBunykRayCastFunction.h      |     4 +-
 .../vtkUnstructuredGridLinearRayIntegrator.cxx     |    12 +-
 .../Volume/vtkUnstructuredGridVolumeMapper.cxx     |    16 +-
 Rendering/Volume/vtkUnstructuredGridVolumeMapper.h |     8 +-
 .../vtkUnstructuredGridVolumeRayCastFunction.h     |     4 +-
 .../vtkUnstructuredGridVolumeZSweepMapper.cxx      |    66 +-
 Rendering/Volume/vtkVolumeMapper.h                 |     2 +-
 Rendering/Volume/vtkVolumeRayCastFunction.h        |     4 +-
 Rendering/Volume/vtkVolumeTextureMapper2D.h        |     4 +-
 Rendering/Volume/vtkVolumeTextureMapper3D.h        |     2 +-
 Rendering/VolumeOpenGL/module.cmake                |     4 +
 .../vtkOpenGLGPUVolumeRayCastMapper.cxx            |   187 +-
 .../VolumeOpenGL/vtkOpenGLHAVSVolumeMapper.cxx     |    75 +-
 .../vtkOpenGLProjectedAAHexahedraMapper.cxx        |    49 +-
 .../vtkOpenGLProjectedTetrahedraMapper.cxx         |   528 +-
 .../vtkOpenGLProjectedTetrahedraMapper.h           |    10 +-
 .../vtkOpenGLRayCastImageDisplayHelper.cxx         |     8 +
 .../vtkOpenGLVolumeTextureMapper2D.cxx             |     9 +
 .../vtkOpenGLVolumeTextureMapper3D.cxx             |   139 +-
 Testing/Core/ConfigSummary.txt.in                  |     6 -
 Testing/Core/HeaderTesting.py                      |    18 +-
 Testing/Core/vtkTestDriver.h                       |     1 +
 Testing/Core/vtk_site_history.py                   |   151 +
 Testing/Core/vtk_submitter_summary.py              |    17 +-
 Testing/Data/2LYZ.pdb.md5                          |     1 +
 Testing/Data/2h2o.aux.md5                          |     1 +
 Testing/Data/3GQP.pdb.md5                          |     1 +
 Testing/Data/42400-IDGH.stl.md5                    |     1 +
 .../AMR/Enzo/DD0010/moving7_0010.boundary.hdf.md5  |     1 +
 .../Data/AMR/Enzo/DD0010/moving7_0010.boundary.md5 |     1 +
 .../Data/AMR/Enzo/DD0010/moving7_0010.cpu0000.md5  |     1 +
 .../Data/AMR/Enzo/DD0010/moving7_0010.harrays.md5  |     1 +
 .../AMR/Enzo/DD0010/moving7_0010.hierarchy.md5     |     1 +
 Testing/Data/AMR/Enzo/DD0010/moving7_0010.md5      |     1 +
 .../Data/AMR/Enzo/DD0010/moving7_0010.procmap.md5  |     1 +
 .../Data/AMR/HierarchicalBoxDataset.v1.0.vthb.md5  |     1 +
 .../HierarchicalBoxDataset.v1.0_0.vti.md5          |     1 +
 .../HierarchicalBoxDataset.v1.0_1.vti.md5          |     1 +
 .../HierarchicalBoxDataset.v1.0_10.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_11.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_12.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_13.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_14.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_15.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_16.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_17.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_18.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_19.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_2.vti.md5          |     1 +
 .../HierarchicalBoxDataset.v1.0_20.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_21.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_22.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_23.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_24.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_25.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_26.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_27.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_28.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_29.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_3.vti.md5          |     1 +
 .../HierarchicalBoxDataset.v1.0_30.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_31.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_32.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_33.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_34.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_35.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_36.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_37.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_38.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_39.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_4.vti.md5          |     1 +
 .../HierarchicalBoxDataset.v1.0_40.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_41.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_42.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_43.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_44.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_45.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_46.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_47.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_48.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_49.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_5.vti.md5          |     1 +
 .../HierarchicalBoxDataset.v1.0_50.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_51.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_52.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_53.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_54.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_55.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_56.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_57.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_58.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_59.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_6.vti.md5          |     1 +
 .../HierarchicalBoxDataset.v1.0_60.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_61.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_62.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_63.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_64.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_65.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_66.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_67.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_68.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_69.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_7.vti.md5          |     1 +
 .../HierarchicalBoxDataset.v1.0_70.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_71.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_72.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_73.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_74.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_75.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_76.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_77.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_78.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_79.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_8.vti.md5          |     1 +
 .../HierarchicalBoxDataset.v1.0_80.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.0_9.vti.md5          |     1 +
 .../Data/AMR/HierarchicalBoxDataset.v1.1.vthb.md5  |     1 +
 .../HierarchicalBoxDataset.v1.1_0.vti.md5          |     1 +
 .../HierarchicalBoxDataset.v1.1_1.vti.md5          |     1 +
 .../HierarchicalBoxDataset.v1.1_10.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_11.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_12.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_13.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_14.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_15.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_16.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_17.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_18.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_19.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_2.vti.md5          |     1 +
 .../HierarchicalBoxDataset.v1.1_20.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_21.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_22.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_23.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_24.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_25.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_26.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_27.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_28.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_29.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_3.vti.md5          |     1 +
 .../HierarchicalBoxDataset.v1.1_30.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_31.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_32.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_33.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_34.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_35.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_36.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_37.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_38.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_39.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_4.vti.md5          |     1 +
 .../HierarchicalBoxDataset.v1.1_40.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_41.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_42.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_43.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_44.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_45.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_46.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_47.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_48.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_49.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_5.vti.md5          |     1 +
 .../HierarchicalBoxDataset.v1.1_50.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_51.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_52.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_53.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_54.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_55.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_56.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_57.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_58.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_59.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_6.vti.md5          |     1 +
 .../HierarchicalBoxDataset.v1.1_60.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_61.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_62.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_63.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_64.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_65.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_66.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_67.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_68.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_69.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_7.vti.md5          |     1 +
 .../HierarchicalBoxDataset.v1.1_70.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_71.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_72.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_73.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_74.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_75.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_76.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_77.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_78.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_79.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_8.vti.md5          |     1 +
 .../HierarchicalBoxDataset.v1.1_80.vti.md5         |     1 +
 .../HierarchicalBoxDataset.v1.1_9.vti.md5          |     1 +
 Testing/Data/AngularSector.vtk.md5                 |     1 +
 Testing/Data/B.pgm.md5                             |     1 +
 Testing/Data/BlueCircle.png.md5                    |     1 +
 Testing/Data/CityPopulationsUTF8.txt.md5           |     1 +
 .../sample.spcth-timeseries.md5                    |     1 +
 Testing/Data/Dave_Karelitz_Small/spcth.0.md5       |     1 +
 Testing/Data/Dave_Karelitz_Small/spcth.1.md5       |     1 +
 Testing/Data/Dave_Karelitz_Small/spcth.2.md5       |     1 +
 Testing/Data/Dave_Karelitz_Small/spcth.3.md5       |     1 +
 Testing/Data/Dave_Karelitz_Small/spcth_a.0.md5     |     1 +
 Testing/Data/Dave_Karelitz_Small/spcth_a.1.md5     |     1 +
 Testing/Data/Dave_Karelitz_Small/spcth_a.2.md5     |     1 +
 Testing/Data/Dave_Karelitz_Small/spcth_a.3.md5     |     1 +
 Testing/Data/E07733S002I009.MR.md5                 |     1 +
 Testing/Data/EnSight/RectGrid_ascii.case.md5       |     1 +
 Testing/Data/EnSight/RectGrid_ascii.geo.md5        |     1 +
 Testing/Data/EnSight/RectGrid_ascii_pd_scalars.md5 |     1 +
 Testing/Data/EnSight/RectGrid_ascii_pd_vectors.md5 |     1 +
 Testing/Data/EnSight/RectGrid_bin.case.md5         |     1 +
 Testing/Data/EnSight/RectGrid_bin.geo.md5          |     1 +
 Testing/Data/EnSight/RectGrid_bin_pd_scalars.md5   |     1 +
 Testing/Data/EnSight/RectGrid_bin_pd_vectors.md5   |     1 +
 Testing/Data/EnSight/TEST.case.md5                 |     1 +
 Testing/Data/EnSight/TEST_bin.case.md5             |     1 +
 Testing/Data/EnSight/blow1_ascii.case.md5          |     1 +
 Testing/Data/EnSight/blow1_ascii.geo.md5           |     1 +
 .../Data/EnSight/blow1_ascii_cd_displacement.md5   |     1 +
 Testing/Data/EnSight/blow1_ascii_cd_thickness.md5  |     1 +
 .../Data/EnSight/blow1_ascii_pd_displacement.md5   |     1 +
 .../Data/EnSight/blow1_ascii_pd_displacement1.md5  |     1 +
 Testing/Data/EnSight/blow1_ascii_pd_thickness.md5  |     1 +
 Testing/Data/EnSight/blow1_ascii_pd_thickness1.md5 |     1 +
 Testing/Data/EnSight/blow1_bin.case.md5            |     1 +
 Testing/Data/EnSight/blow1_bin.geo.md5             |     1 +
 Testing/Data/EnSight/blow1_bin_cd_displacement.md5 |     1 +
 Testing/Data/EnSight/blow1_bin_cd_thickness.md5    |     1 +
 Testing/Data/EnSight/blow1_bin_pd_displacement.md5 |     1 +
 .../Data/EnSight/blow1_bin_pd_displacement1.md5    |     1 +
 Testing/Data/EnSight/blow1_bin_pd_thickness.md5    |     1 +
 Testing/Data/EnSight/blow1_bin_pd_thickness1.md5   |     1 +
 Testing/Data/EnSight/blow2_ascii.case.md5          |     1 +
 Testing/Data/EnSight/blow2_ascii.geo000.md5        |     1 +
 Testing/Data/EnSight/blow2_ascii.geo001.md5        |     1 +
 .../EnSight/blow2_ascii_cd_displacement000.md5     |     1 +
 .../EnSight/blow2_ascii_cd_displacement001.md5     |     1 +
 .../Data/EnSight/blow2_ascii_cd_thickness000.md5   |     1 +
 .../Data/EnSight/blow2_ascii_cd_thickness001.md5   |     1 +
 .../EnSight/blow2_ascii_pd_displacement000.md5     |     1 +
 .../EnSight/blow2_ascii_pd_displacement001.md5     |     1 +
 .../EnSight/blow2_ascii_pd_displacement1000.md5    |     1 +
 .../EnSight/blow2_ascii_pd_displacement1001.md5    |     1 +
 .../Data/EnSight/blow2_ascii_pd_thickness000.md5   |     1 +
 .../Data/EnSight/blow2_ascii_pd_thickness001.md5   |     1 +
 .../Data/EnSight/blow2_ascii_pd_thickness1000.md5  |     1 +
 .../Data/EnSight/blow2_ascii_pd_thickness1001.md5  |     1 +
 Testing/Data/EnSight/blow2_bin.case.md5            |     1 +
 Testing/Data/EnSight/blow2_bin.geo000.md5          |     1 +
 Testing/Data/EnSight/blow2_bin.geo001.md5          |     1 +
 .../Data/EnSight/blow2_bin_cd_displacement000.md5  |     1 +
 .../Data/EnSight/blow2_bin_cd_displacement001.md5  |     1 +
 Testing/Data/EnSight/blow2_bin_cd_thickness000.md5 |     1 +
 Testing/Data/EnSight/blow2_bin_cd_thickness001.md5 |     1 +
 .../Data/EnSight/blow2_bin_pd_displacement000.md5  |     1 +
 .../Data/EnSight/blow2_bin_pd_displacement001.md5  |     1 +
 Testing/Data/EnSight/blow2_bin_pd_thickness000.md5 |     1 +
 Testing/Data/EnSight/blow2_bin_pd_thickness001.md5 |     1 +
 Testing/Data/EnSight/blow3_bin.case.md5            |     1 +
 Testing/Data/EnSight/blow3_bin.geo.md5             |     1 +
 Testing/Data/EnSight/blow3_bin_cd_displacement.md5 |     1 +
 Testing/Data/EnSight/blow3_bin_cd_thickness.md5    |     1 +
 Testing/Data/EnSight/blow3_bin_pd_displacement.md5 |     1 +
 Testing/Data/EnSight/blow3_bin_pd_thickness.md5    |     1 +
 Testing/Data/EnSight/blow4_bin.case.md5            |     1 +
 Testing/Data/EnSight/blow4_bin.geo000.md5          |     1 +
 Testing/Data/EnSight/blow4_bin.geo001.md5          |     1 +
 .../Data/EnSight/blow4_bin_cd_displacement000.md5  |     1 +
 .../Data/EnSight/blow4_bin_cd_displacement001.md5  |     1 +
 Testing/Data/EnSight/blow4_bin_cd_thickness000.md5 |     1 +
 Testing/Data/EnSight/blow4_bin_cd_thickness001.md5 |     1 +
 .../Data/EnSight/blow4_bin_pd_displacement000.md5  |     1 +
 .../Data/EnSight/blow4_bin_pd_displacement001.md5  |     1 +
 Testing/Data/EnSight/blow4_bin_pd_thickness000.md5 |     1 +
 Testing/Data/EnSight/blow4_bin_pd_thickness001.md5 |     1 +
 Testing/Data/EnSight/blow5_ascii.case.md5          |     1 +
 Testing/Data/EnSight/blow5_ascii.geo.md5           |     1 +
 .../Data/EnSight/blow5_ascii_cd_displacement.md5   |     1 +
 Testing/Data/EnSight/blow5_ascii_cd_thickness.md5  |     1 +
 .../Data/EnSight/blow5_ascii_pd_displacement.md5   |     1 +
 Testing/Data/EnSight/blow5_ascii_pd_thickness.md5  |     1 +
 Testing/Data/EnSight/elements.ECsca_i.md5          |     1 +
 Testing/Data/EnSight/elements.ECsca_r.md5          |     1 +
 Testing/Data/EnSight/elements.ECvec_i.md5          |     1 +
 Testing/Data/EnSight/elements.ECvec_r.md5          |     1 +
 Testing/Data/EnSight/elements.Esca.md5             |     1 +
 Testing/Data/EnSight/elements.Eten.md5             |     1 +
 Testing/Data/EnSight/elements.Evec.md5             |     1 +
 Testing/Data/EnSight/elements.NCsca_i.md5          |     1 +
 Testing/Data/EnSight/elements.NCsca_r.md5          |     1 +
 Testing/Data/EnSight/elements.NCvec_i.md5          |     1 +
 Testing/Data/EnSight/elements.NCvec_r.md5          |     1 +
 Testing/Data/EnSight/elements.Nsca.md5             |     1 +
 Testing/Data/EnSight/elements.Nten.md5             |     1 +
 Testing/Data/EnSight/elements.Nvec.md5             |     1 +
 Testing/Data/EnSight/elements.case.md5             |     1 +
 Testing/Data/EnSight/elements.geo.md5              |     1 +
 Testing/Data/EnSight/elements.sos.md5              |     1 +
 Testing/Data/EnSight/elements6.ECsca_i.md5         |     1 +
 Testing/Data/EnSight/elements6.ECsca_r.md5         |     1 +
 Testing/Data/EnSight/elements6.ECvec_i.md5         |     1 +
 Testing/Data/EnSight/elements6.ECvec_r.md5         |     1 +
 Testing/Data/EnSight/elements6.Esca.md5            |     1 +
 Testing/Data/EnSight/elements6.Eten.md5            |     1 +
 Testing/Data/EnSight/elements6.Evec.md5            |     1 +
 Testing/Data/EnSight/elements6.NCsca_i.md5         |     1 +
 Testing/Data/EnSight/elements6.NCsca_r.md5         |     1 +
 Testing/Data/EnSight/elements6.NCvec_i.md5         |     1 +
 Testing/Data/EnSight/elements6.NCvec_r.md5         |     1 +
 Testing/Data/EnSight/elements6.Nsca.md5            |     1 +
 Testing/Data/EnSight/elements6.Nten.md5            |     1 +
 Testing/Data/EnSight/elements6.Nvec.md5            |     1 +
 Testing/Data/EnSight/elements6.case.md5            |     1 +
 Testing/Data/EnSight/elements6.geo.md5             |     1 +
 Testing/Data/EnSight/ironProt_ascii.case.md5       |     1 +
 Testing/Data/EnSight/ironProt_ascii.geo.md5        |     1 +
 Testing/Data/EnSight/ironProt_ascii_pd_scalars.md5 |     1 +
 Testing/Data/EnSight/ironProt_bin.case.md5         |     1 +
 Testing/Data/EnSight/ironProt_bin.geo.md5          |     1 +
 Testing/Data/EnSight/ironProt_bin_pd_scalars.md5   |     1 +
 Testing/Data/EnSight/mandelbrot.sos.md5            |     1 +
 Testing/Data/EnSight/mandelbrot1.case.md5          |     1 +
 Testing/Data/EnSight/mandelbrot1.geo.md5           |     1 +
 Testing/Data/EnSight/mandelbrot1_pd_Iterations.md5 |     1 +
 Testing/Data/EnSight/mandelbrot2.case.md5          |     1 +
 Testing/Data/EnSight/mandelbrot2.geo.md5           |     1 +
 Testing/Data/EnSight/mandelbrot2_pd_Iterations.md5 |     1 +
 Testing/Data/EnSight/naca.bin.case.md5             |     1 +
 Testing/Data/EnSight/naca.gold.bin.DENS_1.md5      |     1 +
 Testing/Data/EnSight/naca.gold.bin.DENS_3.md5      |     1 +
 Testing/Data/EnSight/naca.gold.bin.geo.md5         |     1 +
 Testing/Data/EnSight/office6_bin.case.md5          |     1 +
 Testing/Data/EnSight/office6_bin.geo.md5           |     1 +
 Testing/Data/EnSight/office6_bin_pd_scalars.md5    |     1 +
 Testing/Data/EnSight/office6_bin_pd_vectors.md5    |     1 +
 Testing/Data/EnSight/office_ascii.case.md5         |     1 +
 Testing/Data/EnSight/office_ascii.geo.md5          |     1 +
 Testing/Data/EnSight/office_ascii_pd_scalars.md5   |     1 +
 Testing/Data/EnSight/office_ascii_pd_vectors.md5   |     1 +
 Testing/Data/EnSight/office_bin.case.md5           |     1 +
 Testing/Data/EnSight/office_bin.geo.md5            |     1 +
 Testing/Data/EnSight/office_bin_pd_scalars.md5     |     1 +
 Testing/Data/EnSight/office_bin_pd_vectors.md5     |     1 +
 Testing/Data/EnSight/test.extr_pressure.0001.md5   |     1 +
 Testing/Data/EnSight/test.extr_velocity.0001.md5   |     1 +
 Testing/Data/EnSight/test.geo.md5                  |     1 +
 Testing/Data/EnSight/test.pressure.0001.md5        |     1 +
 Testing/Data/EnSight/test.velocity.0001.md5        |     1 +
 .../Data/EnSight/test_bin.extr_pressure.0001.md5   |     1 +
 .../Data/EnSight/test_bin.extr_velocity.0001.md5   |     1 +
 Testing/Data/EnSight/test_bin.geo.md5              |     1 +
 Testing/Data/EnSight/test_bin.pressure.0001.md5    |     1 +
 Testing/Data/EnSight/test_bin.velocity.0001.md5    |     1 +
 Testing/Data/EventRecording.log.md5                |     1 +
 Testing/Data/GIS/countries.dbf.md5                 |     1 +
 Testing/Data/GIS/countries.prj.md5                 |     1 +
 Testing/Data/GIS/countries.shp.md5                 |     1 +
 Testing/Data/GIS/countries.shx.md5                 |     1 +
 Testing/Data/GreenCircle.png.md5                   |     1 +
 Testing/Data/HeadMRVolume.mhd.md5                  |     1 +
 Testing/Data/HeadMRVolume.raw.md5                  |     1 +
 Testing/Data/IncOctPntLocData.dat.md5              |     1 +
 Testing/Data/IncOctPntLocResult.dat.md5            |     1 +
 Testing/Data/Infovis/DaveDS_-_Sketchy.ttf.md5      |     1 +
 .../Data/Infovis/DimacsGraphs/iso_pattern.gr.md5   |     1 +
 .../Data/Infovis/DimacsGraphs/iso_target.gr.md5    |     1 +
 Testing/Data/Infovis/DimacsGraphs/maxflow.max.md5  |     1 +
 Testing/Data/Infovis/Images/NE2_ps_bath.jpg.md5    |     1 +
 Testing/Data/Infovis/Images/NE2_ps_bath.png.md5    |     1 +
 .../Infovis/Images/NE2_ps_bath_transparent.png.md5 |     1 +
 .../PintassilgoPrints_-_Talvez_Assim.ttf.md5       |     1 +
 Testing/Data/Infovis/SQLite/SmallEmailTest.db.md5  |     1 +
 Testing/Data/Infovis/SQLite/SmallTestGraphs.db.md5 |     1 +
 Testing/Data/Infovis/SQLite/ports_protocols.db.md5 |     1 +
 Testing/Data/Infovis/SQLite/temperatures.db.md5    |     1 +
 Testing/Data/Infovis/XML/smalltest.xml.md5         |     1 +
 Testing/Data/Infovis/XML/vtkclasses.xml.md5        |     1 +
 Testing/Data/Infovis/XML/vtkfiles.xml.md5          |     1 +
 Testing/Data/Infovis/XML/vtklibrary.xml.md5        |     1 +
 .../Data/Infovis/authors-tabletographtest.csv.md5  |     1 +
 Testing/Data/Infovis/authors.csv.md5               |     1 +
 Testing/Data/Infovis/classes.csv.md5               |     1 +
 Testing/Data/Infovis/clustered-graph.tlp.md5       |     1 +
 Testing/Data/Infovis/document-term.csv.md5         |     1 +
 Testing/Data/Infovis/eg1.ris.md5                   |     1 +
 Testing/Data/Infovis/eg2.isi.md5                   |     1 +
 Testing/Data/Infovis/fruit.csv.md5                 |     1 +
 Testing/Data/Infovis/fsm.gml.md5                   |     1 +
 Testing/Data/Infovis/kcore_edges.csv.md5           |     1 +
 Testing/Data/Infovis/kcore_verts.csv.md5           |     1 +
 Testing/Data/Infovis/martyb_-_Ridiculous.ttf.md5   |     1 +
 Testing/Data/Infovis/matrix.csv.md5                |     1 +
 Testing/Data/Infovis/merge1.csv.md5                |     1 +
 Testing/Data/Infovis/merge2.csv.md5                |     1 +
 Testing/Data/Infovis/multi_tree.tre.md5            |     1 +
 Testing/Data/Infovis/otu_table.biom.md5            |     1 +
 Testing/Data/Infovis/person-document.csv.md5       |     1 +
 Testing/Data/Infovis/publications.csv.md5          |     1 +
 Testing/Data/Infovis/rep_set.tre.md5               |     1 +
 Testing/Data/Infovis/small.graph.md5               |     1 +
 Testing/Data/Infovis/small.tlp.md5                 |     1 +
 Testing/Data/Infovis/term-concept.csv.md5          |     1 +
 Testing/Data/LSDyna/foam/foam.d3plot.md5           |     1 +
 Testing/Data/LSDyna/foam/foam.d3plot01.md5         |     1 +
 Testing/Data/LSDyna/hemi.draw/hemi_draw.d3plot.md5 |     1 +
 .../Data/LSDyna/hemi.draw/hemi_draw.d3plot01.md5   |     1 +
 Testing/Data/LSDyna/hemi.draw/hemi_draw.d3thdt.md5 |     1 +
 Testing/Data/LSDyna/hemi.draw/hemi_draw.glstat.md5 |     1 +
 Testing/Data/LSDyna/hemi.draw/hemi_draw.k.md5      |     1 +
 Testing/Data/MFIXReader/BUB01.RES.md5              |     1 +
 Testing/Data/MFIXReader/BUB01.SP1.md5              |     1 +
 Testing/Data/MFIXReader/BUB01.SP2.md5              |     1 +
 Testing/Data/MFIXReader/BUB01.SP3.md5              |     1 +
 Testing/Data/MFIXReader/BUB01.SP4.md5              |     1 +
 Testing/Data/MFIXReader/BUB01.SP5.md5              |     1 +
 Testing/Data/MFIXReader/BUB01.SP6.md5              |     1 +
 Testing/Data/MFIXReader/BUB01.SP7.md5              |     1 +
 Testing/Data/MFIXReader/BUB01.SP8.md5              |     1 +
 Testing/Data/MFIXReader/BUB01.SP9.md5              |     1 +
 Testing/Data/MFIXReader/BUB01.SPA.md5              |     1 +
 Testing/Data/MetaIO/ChestCT-SHORT.mha.md5          |     1 +
 Testing/Data/NE2_ps_bath_small.jpg.md5             |     1 +
 Testing/Data/NetCDF/CAMReaderConnectivity.nc.md5   |     1 +
 Testing/Data/NetCDF/CAMReaderPoints.nc.md5         |     1 +
 Testing/Data/NetCDF/MPASReader.nc.md5              |     1 +
 Testing/Data/NetCDF/test.pop.nc.md5                |     1 +
 Testing/Data/OpenFOAM/cavity/0.5/U.md5             |     1 +
 Testing/Data/OpenFOAM/cavity/0.5/p.md5             |     1 +
 Testing/Data/OpenFOAM/cavity/0.5/phi.md5           |     1 +
 Testing/Data/OpenFOAM/cavity/0.5/uniform/time.md5  |     1 +
 Testing/Data/OpenFOAM/cavity/0/U.md5               |     1 +
 Testing/Data/OpenFOAM/cavity/0/p.md5               |     1 +
 Testing/Data/OpenFOAM/cavity/1.5/U.md5             |     1 +
 Testing/Data/OpenFOAM/cavity/1.5/p.md5             |     1 +
 Testing/Data/OpenFOAM/cavity/1.5/phi.md5           |     1 +
 Testing/Data/OpenFOAM/cavity/1.5/uniform/time.md5  |     1 +
 Testing/Data/OpenFOAM/cavity/1/U.md5               |     1 +
 Testing/Data/OpenFOAM/cavity/1/p.md5               |     1 +
 Testing/Data/OpenFOAM/cavity/1/phi.md5             |     1 +
 Testing/Data/OpenFOAM/cavity/1/uniform/time.md5    |     1 +
 Testing/Data/OpenFOAM/cavity/2.5/U.md5             |     1 +
 Testing/Data/OpenFOAM/cavity/2.5/p.md5             |     1 +
 Testing/Data/OpenFOAM/cavity/2.5/phi.md5           |     1 +
 Testing/Data/OpenFOAM/cavity/2.5/uniform/time.md5  |     1 +
 Testing/Data/OpenFOAM/cavity/2/U.md5               |     1 +
 Testing/Data/OpenFOAM/cavity/2/p.md5               |     1 +
 Testing/Data/OpenFOAM/cavity/2/phi.md5             |     1 +
 Testing/Data/OpenFOAM/cavity/2/uniform/time.md5    |     1 +
 Testing/Data/OpenFOAM/cavity/cavity.foam.md5       |     1 +
 .../cavity/constant/polyMesh/blockMeshDict.md5     |     1 +
 .../OpenFOAM/cavity/constant/polyMesh/boundary.md5 |     1 +
 .../OpenFOAM/cavity/constant/polyMesh/faces.md5    |     1 +
 .../cavity/constant/polyMesh/neighbour.md5         |     1 +
 .../OpenFOAM/cavity/constant/polyMesh/owner.md5    |     1 +
 .../OpenFOAM/cavity/constant/polyMesh/points.md5   |     1 +
 .../cavity/constant/transportProperties.md5        |     1 +
 .../Data/OpenFOAM/cavity/system/controlDict.md5    |     1 +
 Testing/Data/OpenFOAM/cavity/system/fvSchemes.md5  |     1 +
 Testing/Data/OpenFOAM/cavity/system/fvSolution.md5 |     1 +
 Testing/Data/Particles.raw.md5                     |     1 +
 Testing/Data/PentaHexa.vtk.md5                     |     1 +
 Testing/Data/Quadratic/CylinderLinear.vtk.md5      |     1 +
 Testing/Data/Quadratic/CylinderQuadratic.vtk.md5   |     1 +
 Testing/Data/RectGrid2.vtk.md5                     |     1 +
 Testing/Data/RedCircle.png.md5                     |     1 +
 Testing/Data/SLAC/ll-9cell-f523/README.md5         |     1 +
 .../Data/SLAC/ll-9cell-f523/ll-9cell-f523.ncdf.md5 |     1 +
 .../mode0.l0.R2.457036E+09I2.778314E+04.m3.md5     |     1 +
 Testing/Data/SLAC/pic-example/README.md5           |     1 +
 Testing/Data/SLAC/pic-example/fields_0.mod.md5     |     1 +
 Testing/Data/SLAC/pic-example/fields_1.mod.md5     |     1 +
 Testing/Data/SLAC/pic-example/fields_2.mod.md5     |     1 +
 Testing/Data/SLAC/pic-example/fields_3.mod.md5     |     1 +
 Testing/Data/SLAC/pic-example/fields_4.mod.md5     |     1 +
 Testing/Data/SLAC/pic-example/fields_5.mod.md5     |     1 +
 Testing/Data/SLAC/pic-example/fields_6.mod.md5     |     1 +
 Testing/Data/SLAC/pic-example/fields_7.mod.md5     |     1 +
 Testing/Data/SLAC/pic-example/fields_8.mod.md5     |     1 +
 Testing/Data/SLAC/pic-example/mesh.ncdf.md5        |     1 +
 Testing/Data/SLAC/pic-example/particles_0.ncdf.md5 |     1 +
 Testing/Data/SLAC/pic-example/particles_1.ncdf.md5 |     1 +
 Testing/Data/SLAC/pic-example/particles_2.ncdf.md5 |     1 +
 Testing/Data/SLAC/pic-example/particles_3.ncdf.md5 |     1 +
 Testing/Data/SLAC/pic-example/particles_4.ncdf.md5 |     1 +
 Testing/Data/SLAC/pic-example/particles_5.ncdf.md5 |     1 +
 Testing/Data/SLAC/pic-example/particles_6.ncdf.md5 |     1 +
 Testing/Data/SLAC/pic-example/particles_7.ncdf.md5 |     1 +
 Testing/Data/SLAC/pic-example/particles_8.ncdf.md5 |     1 +
 .../Data/SLAC/pillbox/Pillbox3TenDSlice.ncdf.md5   |     1 +
 .../pillbox/omega3p.l0.m0000.1.3138186e+09.mod.md5 |     1 +
 .../pillbox/omega3p.l0.m0001.1.3138187e+09.mod.md5 |     1 +
 .../pillbox/omega3p.l0.m0002.1.3138189e+09.mod.md5 |     1 +
 Testing/Data/SainteHelens.dem.md5                  |     1 +
 Testing/Data/SampleStructGrid.vtk.md5              |     1 +
 Testing/Data/SemiDisk/SemiDisk-0.vtp.md5           |     1 +
 Testing/Data/SemiDisk/SemiDisk-1.vtp.md5           |     1 +
 Testing/Data/SemiDisk/SemiDisk.vtk.md5             |     1 +
 Testing/Data/SemiDisk/SemiDisk.xml.md5             |     1 +
 Testing/Data/SurfaceVectors.vtk.md5                |     1 +
 Testing/Data/SyntheticPolyline.vtp.md5             |     1 +
 Testing/Data/Tango/README.VTK.txt.md5              |     1 +
 Testing/Data/Tango/TangoIcons.png.md5              |     1 +
 Testing/Data/UCD2D/UCD_00000.inp.md5               |     1 +
 Testing/Data/UCD2D/UCD_00001.inp.md5               |     1 +
 Testing/Data/UCD2D/UCD_00002.inp.md5               |     1 +
 Testing/Data/UCD2D/UCD_00003.inp.md5               |     1 +
 Testing/Data/UCD2D/UCD_00004.inp.md5               |     1 +
 Testing/Data/UCD2D/UCD_00005.inp.md5               |     1 +
 Testing/Data/UCD2D/UCD_00006.inp.md5               |     1 +
 Testing/Data/UCD2D/UCD_00007.inp.md5               |     1 +
 Testing/Data/UCD2D/UCD_00008.inp.md5               |     1 +
 Testing/Data/UCD2D/UCD_00009.inp.md5               |     1 +
 Testing/Data/UCD2D/UCD_00010.inp.md5               |     1 +
 Testing/Data/Viewpoint/README.md5                  |     1 +
 Testing/Data/Viewpoint/cow.g.md5                   |     1 +
 Testing/Data/Viewpoint/cow.obj.md5                 |     1 +
 Testing/Data/Viewpoint/iflamigm.3ds.md5            |     1 +
 Testing/Data/WindBladeReader/WT_topo1.dat.md5      |     1 +
 Testing/Data/WindBladeReader/field/comp.out.10.md5 |     1 +
 .../Data/WindBladeReader/field/comp.out.5500.md5   |     1 +
 Testing/Data/WindBladeReader/test1_topo.wind.md5   |     1 +
 Testing/Data/WindBladeReader/turbine/WT_list.md5   |     1 +
 Testing/Data/WindBladeReader/turbine/WT_rist.md5   |     1 +
 Testing/Data/WindBladeReader/turbine/wtbl.10.md5   |     1 +
 Testing/Data/WindBladeReader/turbine/wtbl.5500.md5 |     1 +
 Testing/Data/WineGlass.wrl.md5                     |     1 +
 Testing/Data/alphachannel.png.md5                  |     1 +
 Testing/Data/authors.csv.md5                       |     1 +
 Testing/Data/avg152T1_RL_nifti.nii.gz.md5          |     1 +
 Testing/Data/beach.ascii.md5                       |     1 +
 Testing/Data/beach.ascii.nhdr.md5                  |     1 +
 Testing/Data/beach.jpg.md5                         |     1 +
 Testing/Data/beach.nrrd.md5                        |     1 +
 Testing/Data/beach.tif.md5                         |     1 +
 Testing/Data/billBoard.pgm.md5                     |     1 +
 Testing/Data/blow.vtk.md5                          |     1 +
 Testing/Data/blowAttr.vtk.md5                      |     1 +
 Testing/Data/blowGeom.vtk.md5                      |     1 +
 Testing/Data/bluntfinq.bin.md5                     |     1 +
 Testing/Data/bluntfinxyz.bin.md5                   |     1 +
 Testing/Data/bolt.fac.md5                          |     1 +
 Testing/Data/bolt.slc.md5                          |     1 +
 Testing/Data/bore.vtk.md5                          |     1 +
 Testing/Data/bot2.wrl.md5                          |     1 +
 Testing/Data/box-noglom.ex2.md5                    |     1 +
 Testing/Data/bpa.mol.md5                           |     1 +
 Testing/Data/brainImageSmooth.vtk.md5              |     1 +
 Testing/Data/bunny.ply.md5                         |     1 +
 Testing/Data/cactus.3337.pts.md5                   |     1 +
 Testing/Data/caffeine.pdb.md5                      |     1 +
 Testing/Data/camscene.png.md5                      |     1 +
 Testing/Data/cellcentered.tec.md5                  |     1 +
 Testing/Data/cellsnd.ascii.inp.md5                 |     1 +
 Testing/Data/cellsnd.bin.inp.md5                   |     1 +
 .../chi_field/chi_chunk_0_700_0_50x50x50.bov.md5   |     1 +
 Testing/Data/chombo3d/chombo3d.vtm.md5             |     1 +
 Testing/Data/chombo3d/chombo3d_0.vti.md5           |     1 +
 Testing/Data/chombo3d/chombo3d_1.vti.md5           |     1 +
 Testing/Data/chombo3d/chombo3d_10.vti.md5          |     1 +
 Testing/Data/chombo3d/chombo3d_11.vti.md5          |     1 +
 Testing/Data/chombo3d/chombo3d_12.vti.md5          |     1 +
 Testing/Data/chombo3d/chombo3d_13.vti.md5          |     1 +
 Testing/Data/chombo3d/chombo3d_14.vti.md5          |     1 +
 Testing/Data/chombo3d/chombo3d_15.vti.md5          |     1 +
 Testing/Data/chombo3d/chombo3d_2.vti.md5           |     1 +
 Testing/Data/chombo3d/chombo3d_3.vti.md5           |     1 +
 Testing/Data/chombo3d/chombo3d_4.vti.md5           |     1 +
 Testing/Data/chombo3d/chombo3d_5.vti.md5           |     1 +
 Testing/Data/chombo3d/chombo3d_6.vti.md5           |     1 +
 Testing/Data/chombo3d/chombo3d_7.vti.md5           |     1 +
 Testing/Data/chombo3d/chombo3d_8.vti.md5           |     1 +
 Testing/Data/chombo3d/chombo3d_9.vti.md5           |     1 +
 Testing/Data/clouds.jpeg.md5                       |     1 +
 Testing/Data/clown.facet.md5                       |     1 +
 Testing/Data/combq.bin.md5                         |     1 +
 Testing/Data/combxyz.bin.md5                       |     1 +
 Testing/Data/cow.vtp.md5                           |     1 +
 Testing/Data/cth.vtr.md5                           |     1 +
 Testing/Data/delimited.txt.md5                     |     1 +
 Testing/Data/delimited2.txt.md5                    |     1 +
 Testing/Data/delimited2UTF16.txt.md5               |     1 +
 Testing/Data/delimited2UTF16BE.txt.md5             |     1 +
 Testing/Data/delimited2UTF16LE.txt.md5             |     1 +
 Testing/Data/delimitedUTF16.txt.md5                |     1 +
 Testing/Data/delimitedUTF16BE.txt.md5              |     1 +
 Testing/Data/delimitedUTF16LE.txt.md5              |     1 +
 Testing/Data/disk_out_ref.ex2.md5                  |     1 +
 Testing/Data/disk_out_ref_surface.vtp.md5          |     1 +
 Testing/Data/earth.ppm.md5                         |     1 +
 Testing/Data/edgeFaceElem.exii.md5                 |     1 +
 Testing/Data/ex-blow_5.vtm.md5                     |     1 +
 Testing/Data/ex-blow_5/ex-blow_5_0_0.vtu.md5       |     1 +
 Testing/Data/ex-blow_5/ex-blow_5_0_1.vtu.md5       |     1 +
 Testing/Data/ex-blow_5/ex-blow_5_0_2.vtu.md5       |     1 +
 Testing/Data/ex-blow_5/ex-blow_5_0_3.vtu.md5       |     1 +
 Testing/Data/ex-blow_5/ex-blow_5_0_4.vtu.md5       |     1 +
 Testing/Data/ex-blow_5/ex-blow_5_0_5.vtu.md5       |     1 +
 Testing/Data/ex-blow_5/ex-blow_5_0_6.vtu.md5       |     1 +
 Testing/Data/ex-blow_5/ex-blow_5_0_7.vtu.md5       |     1 +
 Testing/Data/ex-blow_5/ex-blow_5_1_0.vtu.md5       |     1 +
 Testing/Data/ex-blow_5/ex-blow_5_1_1.vtu.md5       |     1 +
 Testing/Data/ex-blow_5/ex-blow_5_1_2.vtu.md5       |     1 +
 Testing/Data/ex-blow_5/ex-blow_5_1_3.vtu.md5       |     1 +
 Testing/Data/ex-blow_5/ex-blow_5_1_4.vtu.md5       |     1 +
 Testing/Data/ex-blow_5/ex-blow_5_1_5.vtu.md5       |     1 +
 Testing/Data/ex-blow_5/ex-blow_5_1_6.vtu.md5       |     1 +
 Testing/Data/ex-blow_5/ex-blow_5_1_7.vtu.md5       |     1 +
 Testing/Data/faults.vtk.md5                        |     1 +
 Testing/Data/fieldfile.vtk.md5                     |     1 +
 Testing/Data/filledContours.vtp.md5                |     1 +
 Testing/Data/financial.txt.md5                     |     1 +
 Testing/Data/financial.vtk.md5                     |     1 +
 Testing/Data/fixedwidth.txt.md5                    |     1 +
 Testing/Data/flow.tec.gz.md5                       |     1 +
 Testing/Data/flow.tec.md5                          |     1 +
 Testing/Data/foot/foot.mha.md5                     |     1 +
 Testing/Data/foot/foot.raw.md5                     |     1 +
 Testing/Data/fran_cut.png.md5                      |     1 +
 Testing/Data/fran_cut.vtk.md5                      |     1 +
 Testing/Data/fullhead15.png.md5                    |     1 +
 Testing/Data/headsq/quarter.1.md5                  |     1 +
 Testing/Data/headsq/quarter.10.md5                 |     1 +
 Testing/Data/headsq/quarter.11.md5                 |     1 +
 Testing/Data/headsq/quarter.12.md5                 |     1 +
 Testing/Data/headsq/quarter.13.md5                 |     1 +
 Testing/Data/headsq/quarter.14.md5                 |     1 +
 Testing/Data/headsq/quarter.15.md5                 |     1 +
 Testing/Data/headsq/quarter.16.md5                 |     1 +
 Testing/Data/headsq/quarter.17.md5                 |     1 +
 Testing/Data/headsq/quarter.18.md5                 |     1 +
 Testing/Data/headsq/quarter.19.md5                 |     1 +
 Testing/Data/headsq/quarter.2.md5                  |     1 +
 Testing/Data/headsq/quarter.20.md5                 |     1 +
 Testing/Data/headsq/quarter.21.md5                 |     1 +
 Testing/Data/headsq/quarter.22.md5                 |     1 +
 Testing/Data/headsq/quarter.23.md5                 |     1 +
 Testing/Data/headsq/quarter.24.md5                 |     1 +
 Testing/Data/headsq/quarter.25.md5                 |     1 +
 Testing/Data/headsq/quarter.26.md5                 |     1 +
 Testing/Data/headsq/quarter.27.md5                 |     1 +
 Testing/Data/headsq/quarter.28.md5                 |     1 +
 Testing/Data/headsq/quarter.29.md5                 |     1 +
 Testing/Data/headsq/quarter.3.md5                  |     1 +
 Testing/Data/headsq/quarter.30.md5                 |     1 +
 Testing/Data/headsq/quarter.31.md5                 |     1 +
 Testing/Data/headsq/quarter.32.md5                 |     1 +
 Testing/Data/headsq/quarter.33.md5                 |     1 +
 Testing/Data/headsq/quarter.34.md5                 |     1 +
 Testing/Data/headsq/quarter.35.md5                 |     1 +
 Testing/Data/headsq/quarter.36.md5                 |     1 +
 Testing/Data/headsq/quarter.37.md5                 |     1 +
 Testing/Data/headsq/quarter.38.md5                 |     1 +
 Testing/Data/headsq/quarter.39.md5                 |     1 +
 Testing/Data/headsq/quarter.4.md5                  |     1 +
 Testing/Data/headsq/quarter.40.md5                 |     1 +
 Testing/Data/headsq/quarter.41.md5                 |     1 +
 Testing/Data/headsq/quarter.42.md5                 |     1 +
 Testing/Data/headsq/quarter.43.md5                 |     1 +
 Testing/Data/headsq/quarter.44.md5                 |     1 +
 Testing/Data/headsq/quarter.45.md5                 |     1 +
 Testing/Data/headsq/quarter.46.md5                 |     1 +
 Testing/Data/headsq/quarter.47.md5                 |     1 +
 Testing/Data/headsq/quarter.48.md5                 |     1 +
 Testing/Data/headsq/quarter.49.md5                 |     1 +
 Testing/Data/headsq/quarter.5.md5                  |     1 +
 Testing/Data/headsq/quarter.50.md5                 |     1 +
 Testing/Data/headsq/quarter.51.md5                 |     1 +
 Testing/Data/headsq/quarter.52.md5                 |     1 +
 Testing/Data/headsq/quarter.53.md5                 |     1 +
 Testing/Data/headsq/quarter.54.md5                 |     1 +
 Testing/Data/headsq/quarter.55.md5                 |     1 +
 Testing/Data/headsq/quarter.56.md5                 |     1 +
 Testing/Data/headsq/quarter.57.md5                 |     1 +
 Testing/Data/headsq/quarter.58.md5                 |     1 +
 Testing/Data/headsq/quarter.59.md5                 |     1 +
 Testing/Data/headsq/quarter.6.md5                  |     1 +
 Testing/Data/headsq/quarter.60.md5                 |     1 +
 Testing/Data/headsq/quarter.61.md5                 |     1 +
 Testing/Data/headsq/quarter.62.md5                 |     1 +
 Testing/Data/headsq/quarter.63.md5                 |     1 +
 Testing/Data/headsq/quarter.64.md5                 |     1 +
 Testing/Data/headsq/quarter.65.md5                 |     1 +
 Testing/Data/headsq/quarter.66.md5                 |     1 +
 Testing/Data/headsq/quarter.67.md5                 |     1 +
 Testing/Data/headsq/quarter.68.md5                 |     1 +
 Testing/Data/headsq/quarter.69.md5                 |     1 +
 Testing/Data/headsq/quarter.7.md5                  |     1 +
 Testing/Data/headsq/quarter.70.md5                 |     1 +
 Testing/Data/headsq/quarter.71.md5                 |     1 +
 Testing/Data/headsq/quarter.72.md5                 |     1 +
 Testing/Data/headsq/quarter.73.md5                 |     1 +
 Testing/Data/headsq/quarter.74.md5                 |     1 +
 Testing/Data/headsq/quarter.75.md5                 |     1 +
 Testing/Data/headsq/quarter.76.md5                 |     1 +
 Testing/Data/headsq/quarter.77.md5                 |     1 +
 Testing/Data/headsq/quarter.78.md5                 |     1 +
 Testing/Data/headsq/quarter.79.md5                 |     1 +
 Testing/Data/headsq/quarter.8.md5                  |     1 +
 Testing/Data/headsq/quarter.80.md5                 |     1 +
 Testing/Data/headsq/quarter.81.md5                 |     1 +
 Testing/Data/headsq/quarter.82.md5                 |     1 +
 Testing/Data/headsq/quarter.83.md5                 |     1 +
 Testing/Data/headsq/quarter.84.md5                 |     1 +
 Testing/Data/headsq/quarter.85.md5                 |     1 +
 Testing/Data/headsq/quarter.86.md5                 |     1 +
 Testing/Data/headsq/quarter.87.md5                 |     1 +
 Testing/Data/headsq/quarter.88.md5                 |     1 +
 Testing/Data/headsq/quarter.89.md5                 |     1 +
 Testing/Data/headsq/quarter.9.md5                  |     1 +
 Testing/Data/headsq/quarter.90.md5                 |     1 +
 Testing/Data/headsq/quarter.91.md5                 |     1 +
 Testing/Data/headsq/quarter.92.md5                 |     1 +
 Testing/Data/headsq/quarter.93.md5                 |     1 +
 Testing/Data/headsq/quarter.nhdr.md5               |     1 +
 Testing/Data/hello.vtk.md5                         |     1 +
 Testing/Data/hexa.vtk.md5                          |     1 +
 Testing/Data/iflamigm.3ds.md5                      |     1 +
 Testing/Data/ironProt.vtk.md5                      |     1 +
 Testing/Data/libtiff/test.tif.md5                  |     1 +
 Testing/Data/m4_TotalDensity.cube.md5              |     1 +
 Testing/Data/many_blocks/many_blocks.vtm.md5       |     1 +
 .../many_blocks/many_blocks_0_0.vtp.md5            |     1 +
 .../many_blocks/many_blocks_10_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_11_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_12_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_13_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_14_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_15_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_16_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_17_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_18_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_19_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_1_0.vtp.md5            |     1 +
 .../many_blocks/many_blocks_20_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_21_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_22_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_23_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_24_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_25_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_26_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_27_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_28_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_29_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_2_0.vtp.md5            |     1 +
 .../many_blocks/many_blocks_30_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_31_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_32_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_33_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_34_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_35_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_36_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_37_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_38_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_39_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_3_0.vtp.md5            |     1 +
 .../many_blocks/many_blocks_40_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_41_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_42_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_43_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_44_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_45_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_46_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_47_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_48_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_49_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_4_0.vtp.md5            |     1 +
 .../many_blocks/many_blocks_50_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_51_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_52_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_53_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_54_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_55_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_56_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_57_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_58_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_59_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_5_0.vtp.md5            |     1 +
 .../many_blocks/many_blocks_60_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_61_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_62_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_63_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_64_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_65_0.vtp.md5           |     1 +
 .../many_blocks/many_blocks_6_0.vtp.md5            |     1 +
 .../many_blocks/many_blocks_7_0.vtp.md5            |     1 +
 .../many_blocks/many_blocks_8_0.vtp.md5            |     1 +
 .../many_blocks/many_blocks_9_0.vtp.md5            |     1 +
 Testing/Data/masonry-wide.jpg.md5                  |     1 +
 Testing/Data/masonry.bmp.md5                       |     1 +
 Testing/Data/matrix.vtk.md5                        |     1 +
 Testing/Data/mbwavelet_ascii.q.md5                 |     1 +
 Testing/Data/mbwavelet_ascii.xyz.md5               |     1 +
 Testing/Data/minimal.hdr.gz.md5                    |     1 +
 Testing/Data/minimal.img.gz.md5                    |     1 +
 Testing/Data/minimal.nii.gz.md5                    |     1 +
 Testing/Data/mni-surface-mesh.obj.md5              |     1 +
 Testing/Data/motor.g.md5                           |     1 +
 Testing/Data/mr.001.md5                            |     1 +
 Testing/Data/multi-ascii.q.md5                     |     1 +
 Testing/Data/multi-ascii.xyz.md5                   |     1 +
 Testing/Data/multi-bin-2D.q.md5                    |     1 +
 Testing/Data/multi-bin-2D.xyz.md5                  |     1 +
 Testing/Data/multi-bin-C.q.md5                     |     1 +
 Testing/Data/multi-bin-C.xyz.md5                   |     1 +
 Testing/Data/multi-bin-oflow.q.md5                 |     1 +
 Testing/Data/multi-bin.f.md5                       |     1 +
 Testing/Data/multi-bin.q.md5                       |     1 +
 Testing/Data/multi-bin.xyz.md5                     |     1 +
 Testing/Data/multi.p3d.md5                         |     1 +
 Testing/Data/multicomb_0.vts.md5                   |     1 +
 Testing/Data/multicomb_1.vts.md5                   |     1 +
 Testing/Data/multicomb_2.vts.md5                   |     1 +
 Testing/Data/neghip.slc.md5                        |     1 +
 Testing/Data/noise.png.md5                         |     1 +
 Testing/Data/nut.slc.md5                           |     1 +
 Testing/Data/office.binary.vtk.md5                 |     1 +
 Testing/Data/plate.vtk.md5                         |     1 +
 Testing/Data/points.txt.md5                        |     1 +
 Testing/Data/political.vtp.md5                     |     1 +
 Testing/Data/polyEx.vtk.md5                        |     1 +
 Testing/Data/polyhedron2pieces.vtu.md5             |     1 +
 Testing/Data/porphyrin.cml.md5                     |     1 +
 Testing/Data/post.vtk.md5                          |     1 +
 Testing/Data/prism.neu.md5                         |     1 +
 Testing/Data/prostar.cel.md5                       |     1 +
 Testing/Data/prostar.vrt.md5                       |     1 +
 Testing/Data/quadraticTetra01.vtu.md5              |     1 +
 Testing/Data/qualityEx.vtk.md5                     |     1 +
 Testing/Data/ruler.png.md5                         |     1 +
 Testing/Data/sample.xml.md5                        |     1 +
 Testing/Data/sampleCurveGrid4.nc.md5               |     1 +
 Testing/Data/sampleGenGrid3.nc.md5                 |     1 +
 Testing/Data/sphere.slc.md5                        |     1 +
 Testing/Data/t3_grid_0.mnc.md5                     |     1 +
 Testing/Data/teapot.g.md5                          |     1 +
 Testing/Data/tensors.vtk.md5                       |     1 +
 Testing/Data/test.p3d.md5                          |     1 +
 Testing/Data/tetraMesh.vtk.md5                     |     1 +
 Testing/Data/texThres2.vtk.md5                     |     1 +
 Testing/Data/textureRGBA.png.md5                   |     1 +
 Testing/Data/thio3xx.xyz.md5                       |     1 +
 Testing/Data/timestep_0_15.vts.md5                 |     1 +
 Testing/Data/tos_O1_2001-2002.nc.md5               |     1 +
 Testing/Data/track1.binary.vtk.md5                 |     1 +
 Testing/Data/track2.binary.vtk.md5                 |     1 +
 Testing/Data/track3.binary.vtk.md5                 |     1 +
 Testing/Data/treetest.xml.md5                      |     1 +
 Testing/Data/uGridEx.vtk.md5                       |     1 +
 Testing/Data/uniform-001371-5x5x5.vtp.md5          |     1 +
 Testing/Data/usa.vtk.md5                           |     1 +
 Testing/Data/usa_image.jpg.md5                     |     1 +
 Testing/Data/vase_1comp.vti.md5                    |     1 +
 Testing/Data/vase_4comp.vti.md5                    |     1 +
 Testing/Data/vehicle_data.csv.md5                  |     1 +
 Testing/Data/vtk.png.md5                           |     1 +
 Testing/Data/vtk.vtk.md5                           |     1 +
 Testing/Data/vwgt.coords.md5                       |     1 +
 Testing/Data/vwgt.graph.md5                        |     1 +
 Testing/External/CMakeLists.txt                    |    18 +-
 Testing/GenericBridge/vtkBridgeAttribute.cxx       |     2 +-
 Testing/GenericBridge/vtkBridgeCell.cxx            |     2 +-
 Testing/GenericBridge/vtkBridgeCellIterator.cxx    |     2 +-
 .../vtkBridgeCellIteratorOnCellBoundaries.cxx      |     2 +-
 .../vtkBridgeCellIteratorOnCellList.cxx            |     2 +-
 .../vtkBridgeCellIteratorOnDataSet.cxx             |     2 +-
 Testing/GenericBridge/vtkBridgeCellIteratorOne.cxx |     2 +-
 .../vtkBridgeCellIteratorStrategy.cxx              |     2 +-
 Testing/GenericBridge/vtkBridgeDataSet.cxx         |     2 +-
 Testing/GenericBridge/vtkBridgePointIterator.cxx   |     4 +-
 .../GenericBridge/vtkBridgePointIteratorOnCell.cxx |     2 +-
 .../vtkBridgePointIteratorOnDataSet.cxx            |     2 +-
 .../GenericBridge/vtkBridgePointIteratorOne.cxx    |     4 +-
 Testing/Install/VIT/CMakeLists.txt                 |     2 +-
 Testing/Rendering/module.cmake                     |     3 +
 Testing/Rendering/vtkRegressionTestImage.h         |     4 +-
 Testing/Rendering/vtkTesting.cxx                   |   399 +-
 Testing/Rendering/vtkTesting.h                     |    11 +-
 ThirdParty/AutobahnPython/CMakeLists.txt           |    21 +
 ThirdParty/AutobahnPython/PKG-INFO                 |    38 +
 ThirdParty/AutobahnPython/autobahn/__init__.py     |    31 +
 ThirdParty/AutobahnPython/autobahn/_version.py     |    19 +
 ThirdParty/AutobahnPython/autobahn/flashpolicy.py  |   106 +
 ThirdParty/AutobahnPython/autobahn/httpstatus.py   |   271 +
 ThirdParty/AutobahnPython/autobahn/pbkdf2.py       |   134 +
 ThirdParty/AutobahnPython/autobahn/prefixmap.py    |   144 +
 ThirdParty/AutobahnPython/autobahn/resource.py     |   172 +
 ThirdParty/AutobahnPython/autobahn/useragent.py    |   312 +
 .../AutobahnPython/autobahn/utf8validator.py       |   120 +
 ThirdParty/AutobahnPython/autobahn/util.py         |   154 +
 ThirdParty/AutobahnPython/autobahn/wamp.py         |  2344 ++++
 ThirdParty/AutobahnPython/autobahn/websocket.py    |  3847 ++++++
 ThirdParty/AutobahnPython/autobahn/xormasker.py    |   100 +
 ThirdParty/AutobahnPython/module.cmake             |     5 +
 ThirdParty/Cosmo/BasicDefinition.h                 |   244 -
 ThirdParty/Cosmo/CMakeLists.txt                    |    66 -
 ThirdParty/Cosmo/ChainingMesh.cxx                  |   306 -
 ThirdParty/Cosmo/ChainingMesh.h                    |   143 -
 ThirdParty/Cosmo/CosmoDefinition.h.in              |    70 -
 ThirdParty/Cosmo/CosmoHalo.h                       |   157 -
 ThirdParty/Cosmo/CosmoHaloFinder.cxx               |   581 -
 ThirdParty/Cosmo/CosmoHaloFinder.h                 |   236 -
 ThirdParty/Cosmo/CosmoHaloFinderP.cxx              |  1119 --
 ThirdParty/Cosmo/CosmoHaloFinderP.h                |   218 -
 ThirdParty/Cosmo/FOFHaloProperties.cxx             |   702 -
 ThirdParty/Cosmo/FOFHaloProperties.h               |   223 -
 ThirdParty/Cosmo/HaloCenterFinder.cxx              |  1325 --
 ThirdParty/Cosmo/HaloCenterFinder.h                |   185 -
 ThirdParty/Cosmo/Message.cxx                       |   242 -
 ThirdParty/Cosmo/Message.h                         |   126 -
 ThirdParty/Cosmo/ParticleDistribute.cxx            |  1460 ---
 ThirdParty/Cosmo/ParticleDistribute.h              |   233 -
 ThirdParty/Cosmo/ParticleExchange.cxx              |   756 --
 ThirdParty/Cosmo/ParticleExchange.h                |   183 -
 ThirdParty/Cosmo/Partition.cxx                     |   276 -
 ThirdParty/Cosmo/Partition.h                       |   117 -
 ThirdParty/Cosmo/SODHalo.cxx                       |   849 --
 ThirdParty/Cosmo/SODHalo.h                         |   265 -
 ThirdParty/Cosmo/module.cmake                      |     6 -
 ThirdParty/Cosmo/winDirent.h                       |   232 -
 .../TclTk/resources/tk8.3/win/rc/CMakeLists.txt    |     2 +-
 .../TclTk/resources/tk8.4/win/rc/CMakeLists.txt    |     2 +-
 .../TclTk/resources/tk8.5/win/rc/CMakeLists.txt    |     2 +-
 .../TclTk/resources/tk8.6/win/rc/CMakeLists.txt    |     2 +-
 ThirdParty/Twisted/CMakeLists.txt                  |    20 +
 ThirdParty/Twisted/LICENSE                         |    57 +
 ThirdParty/Twisted/README                          |   117 +
 ThirdParty/Twisted/module.cmake                    |     5 +
 ThirdParty/Twisted/twisted/__init__.py             |    62 +
 ThirdParty/Twisted/twisted/_version.py             |     3 +
 ThirdParty/Twisted/twisted/application/__init__.py |     7 +
 ThirdParty/Twisted/twisted/application/app.py      |   674 +
 ThirdParty/Twisted/twisted/application/internet.py |   365 +
 ThirdParty/Twisted/twisted/application/reactors.py |    83 +
 ThirdParty/Twisted/twisted/application/service.py  |   413 +
 ThirdParty/Twisted/twisted/application/strports.py |   103 +
 .../Twisted/twisted/application/test/__init__.py   |     6 +
 .../twisted/application/test/test_internet.py      |   252 +
 ThirdParty/Twisted/twisted/conch/__init__.py       |    18 +
 ThirdParty/Twisted/twisted/conch/_version.py       |     3 +
 ThirdParty/Twisted/twisted/conch/avatar.py         |    37 +
 ThirdParty/Twisted/twisted/conch/checkers.py       |   308 +
 .../Twisted/twisted/conch/client/__init__.py       |     9 +
 ThirdParty/Twisted/twisted/conch/client/agent.py   |    73 +
 ThirdParty/Twisted/twisted/conch/client/connect.py |    21 +
 ThirdParty/Twisted/twisted/conch/client/default.py |   256 +
 ThirdParty/Twisted/twisted/conch/client/direct.py  |   107 +
 .../Twisted/twisted/conch/client/knownhosts.py     |   478 +
 ThirdParty/Twisted/twisted/conch/client/options.py |    96 +
 ThirdParty/Twisted/twisted/conch/error.py          |   102 +
 .../Twisted/twisted/conch/insults/__init__.py      |    16 +
 ThirdParty/Twisted/twisted/conch/insults/client.py |   138 +
 ThirdParty/Twisted/twisted/conch/insults/colors.py |    29 +
 ThirdParty/Twisted/twisted/conch/insults/helper.py |   450 +
 .../Twisted/twisted/conch/insults/insults.py       |  1087 ++
 ThirdParty/Twisted/twisted/conch/insults/text.py   |   186 +
 ThirdParty/Twisted/twisted/conch/insults/window.py |   868 ++
 ThirdParty/Twisted/twisted/conch/interfaces.py     |   402 +
 ThirdParty/Twisted/twisted/conch/ls.py             |    75 +
 ThirdParty/Twisted/twisted/conch/manhole.py        |   340 +
 ThirdParty/Twisted/twisted/conch/manhole_ssh.py    |   146 +
 ThirdParty/Twisted/twisted/conch/manhole_tap.py    |   124 +
 ThirdParty/Twisted/twisted/conch/mixin.py          |    49 +
 .../twisted/conch/openssh_compat/__init__.py       |    11 +
 .../twisted/conch/openssh_compat/factory.py        |    73 +
 .../Twisted/twisted/conch/openssh_compat/primes.py |    26 +
 ThirdParty/Twisted/twisted/conch/recvline.py       |   329 +
 .../Twisted/twisted/conch/scripts/__init__.py      |     1 +
 ThirdParty/Twisted/twisted/conch/scripts/cftp.py   |   832 ++
 .../Twisted/twisted/conch/scripts/ckeygen.py       |   201 +
 ThirdParty/Twisted/twisted/conch/scripts/conch.py  |   512 +
 .../Twisted/twisted/conch/scripts/tkconch.py       |   572 +
 ThirdParty/Twisted/twisted/conch/ssh/__init__.py   |    10 +
 ThirdParty/Twisted/twisted/conch/ssh/address.py    |    38 +
 ThirdParty/Twisted/twisted/conch/ssh/agent.py      |   294 +
 ThirdParty/Twisted/twisted/conch/ssh/channel.py    |   281 +
 ThirdParty/Twisted/twisted/conch/ssh/common.py     |   117 +
 ThirdParty/Twisted/twisted/conch/ssh/connection.py |   637 +
 ThirdParty/Twisted/twisted/conch/ssh/factory.py    |   141 +
 .../Twisted/twisted/conch/ssh/filetransfer.py      |   934 ++
 ThirdParty/Twisted/twisted/conch/ssh/forwarding.py |   181 +
 ThirdParty/Twisted/twisted/conch/ssh/keys.py       |   844 ++
 ThirdParty/Twisted/twisted/conch/ssh/service.py    |    48 +
 ThirdParty/Twisted/twisted/conch/ssh/session.py    |   348 +
 ThirdParty/Twisted/twisted/conch/ssh/sexpy.py      |    42 +
 ThirdParty/Twisted/twisted/conch/ssh/transport.py  |  1617 +++
 ThirdParty/Twisted/twisted/conch/ssh/userauth.py   |   848 ++
 ThirdParty/Twisted/twisted/conch/stdio.py          |    95 +
 ThirdParty/Twisted/twisted/conch/tap.py            |    92 +
 ThirdParty/Twisted/twisted/conch/telnet.py         |  1086 ++
 ThirdParty/Twisted/twisted/conch/test/__init__.py  |     1 +
 ThirdParty/Twisted/twisted/conch/test/keydata.py   |   208 +
 .../Twisted/twisted/conch/test/test_address.py     |    49 +
 .../Twisted/twisted/conch/test/test_agent.py       |   399 +
 ThirdParty/Twisted/twisted/conch/test/test_cftp.py |   975 ++
 .../Twisted/twisted/conch/test/test_channel.py     |   279 +
 .../Twisted/twisted/conch/test/test_checkers.py    |   609 +
 .../Twisted/twisted/conch/test/test_ckeygen.py     |   137 +
 .../Twisted/twisted/conch/test/test_conch.py       |   552 +
 .../Twisted/twisted/conch/test/test_connection.py  |   730 ++
 .../Twisted/twisted/conch/test/test_default.py     |   171 +
 .../twisted/conch/test/test_filetransfer.py        |   765 ++
 .../Twisted/twisted/conch/test/test_helper.py      |   560 +
 .../Twisted/twisted/conch/test/test_insults.py     |   496 +
 ThirdParty/Twisted/twisted/conch/test/test_keys.py |   644 +
 .../Twisted/twisted/conch/test/test_knownhosts.py  |  1037 ++
 .../Twisted/twisted/conch/test/test_manhole.py     |   372 +
 .../Twisted/twisted/conch/test/test_mixin.py       |    47 +
 .../twisted/conch/test/test_openssh_compat.py      |   102 +
 .../Twisted/twisted/conch/test/test_recvline.py    |   706 +
 .../Twisted/twisted/conch/test/test_scripts.py     |    82 +
 .../Twisted/twisted/conch/test/test_session.py     |  1256 ++
 ThirdParty/Twisted/twisted/conch/test/test_ssh.py  |   995 ++
 ThirdParty/Twisted/twisted/conch/test/test_tap.py  |   184 +
 .../Twisted/twisted/conch/test/test_telnet.py      |   767 ++
 ThirdParty/Twisted/twisted/conch/test/test_text.py |   101 +
 .../Twisted/twisted/conch/test/test_transport.py   |  2225 ++++
 .../Twisted/twisted/conch/test/test_userauth.py    |  1077 ++
 .../Twisted/twisted/conch/test/test_window.py      |    67 +
 ThirdParty/Twisted/twisted/conch/topfiles/NEWS     |   432 +
 ThirdParty/Twisted/twisted/conch/topfiles/README   |    11 +
 ThirdParty/Twisted/twisted/conch/topfiles/setup.py |    48 +
 ThirdParty/Twisted/twisted/conch/ttymodes.py       |   121 +
 ThirdParty/Twisted/twisted/conch/ui/__init__.py    |    11 +
 ThirdParty/Twisted/twisted/conch/ui/ansi.py        |   240 +
 ThirdParty/Twisted/twisted/conch/ui/tkvt100.py     |   197 +
 ThirdParty/Twisted/twisted/conch/unix.py           |   457 +
 ThirdParty/Twisted/twisted/copyright.py            |    41 +
 ThirdParty/Twisted/twisted/cred/__init__.py        |    13 +
 ThirdParty/Twisted/twisted/cred/_digest.py         |   129 +
 ThirdParty/Twisted/twisted/cred/checkers.py        |   268 +
 ThirdParty/Twisted/twisted/cred/credentials.py     |   483 +
 ThirdParty/Twisted/twisted/cred/error.py           |    41 +
 ThirdParty/Twisted/twisted/cred/pamauth.py         |    79 +
 ThirdParty/Twisted/twisted/cred/portal.py          |   121 +
 ThirdParty/Twisted/twisted/cred/strcred.py         |   270 +
 ThirdParty/Twisted/twisted/enterprise/__init__.py  |     9 +
 ThirdParty/Twisted/twisted/enterprise/adbapi.py    |   483 +
 ThirdParty/Twisted/twisted/internet/__init__.py    |    12 +
 .../Twisted/twisted/internet/_baseprocess.py       |    62 +
 .../Twisted/twisted/internet/_dumbwin32proc.py     |   388 +
 .../Twisted/twisted/internet/_endpointspy3.py      |   483 +
 ThirdParty/Twisted/twisted/internet/_glibbase.py   |   391 +
 ThirdParty/Twisted/twisted/internet/_newtls.py     |   271 +
 ThirdParty/Twisted/twisted/internet/_oldtls.py     |   381 +
 .../Twisted/twisted/internet/_pollingfile.py       |   300 +
 .../Twisted/twisted/internet/_posixserialport.py   |    74 +
 ThirdParty/Twisted/twisted/internet/_posixstdio.py |   175 +
 ThirdParty/Twisted/twisted/internet/_signals.py    |    68 +
 ThirdParty/Twisted/twisted/internet/_ssl.py        |    32 +
 ThirdParty/Twisted/twisted/internet/_sslverify.py  |   786 ++
 .../Twisted/twisted/internet/_threadedselect.py    |   361 +
 ThirdParty/Twisted/twisted/internet/_utilspy3.py   |    59 +
 .../Twisted/twisted/internet/_win32serialport.py   |   126 +
 ThirdParty/Twisted/twisted/internet/_win32stdio.py |   124 +
 ThirdParty/Twisted/twisted/internet/abstract.py    |   530 +
 ThirdParty/Twisted/twisted/internet/address.py     |   146 +
 ThirdParty/Twisted/twisted/internet/base.py        |  1194 ++
 ThirdParty/Twisted/twisted/internet/cfreactor.py   |   501 +
 ThirdParty/Twisted/twisted/internet/default.py     |    56 +
 ThirdParty/Twisted/twisted/internet/defer.py       |  1592 +++
 ThirdParty/Twisted/twisted/internet/endpoints.py   |   884 ++
 .../Twisted/twisted/internet/epollreactor.py       |   396 +
 ThirdParty/Twisted/twisted/internet/error.py       |   455 +
 ThirdParty/Twisted/twisted/internet/fdesc.py       |   118 +
 ThirdParty/Twisted/twisted/internet/gireactor.py   |   188 +
 .../Twisted/twisted/internet/glib2reactor.py       |    44 +
 ThirdParty/Twisted/twisted/internet/gtk2reactor.py |   119 +
 ThirdParty/Twisted/twisted/internet/gtk3reactor.py |    80 +
 ThirdParty/Twisted/twisted/internet/gtkreactor.py  |   250 +
 ThirdParty/Twisted/twisted/internet/inotify.py     |   405 +
 ThirdParty/Twisted/twisted/internet/interfaces.py  |  2015 +++
 .../twisted/internet/iocpreactor/__init__.py       |    10 +
 .../twisted/internet/iocpreactor/abstract.py       |   400 +
 .../Twisted/twisted/internet/iocpreactor/build.bat |     4 +
 .../Twisted/twisted/internet/iocpreactor/const.py  |    26 +
 .../twisted/internet/iocpreactor/interfaces.py     |    47 +
 .../internet/iocpreactor/iocpsupport/acceptex.pxi  |    46 +
 .../internet/iocpreactor/iocpsupport/connectex.pxi |    47 +
 .../internet/iocpreactor/iocpsupport/iocpsupport.c |  6376 ++++++++++
 .../iocpreactor/iocpsupport/iocpsupport.pyx        |   312 +
 .../iocpreactor/iocpsupport/winsock_pointers.c     |    62 +
 .../iocpreactor/iocpsupport/winsock_pointers.h     |    51 +
 .../internet/iocpreactor/iocpsupport/wsarecv.pxi   |    76 +
 .../internet/iocpreactor/iocpsupport/wsasend.pxi   |    30 +
 .../Twisted/twisted/internet/iocpreactor/notes.txt |    24 +
 .../twisted/internet/iocpreactor/reactor.py        |   275 +
 .../Twisted/twisted/internet/iocpreactor/setup.py  |    23 +
 .../Twisted/twisted/internet/iocpreactor/tcp.py    |   578 +
 .../Twisted/twisted/internet/iocpreactor/udp.py    |   382 +
 ThirdParty/Twisted/twisted/internet/kqreactor.py   |   305 +
 ThirdParty/Twisted/twisted/internet/main.py        |    37 +
 ThirdParty/Twisted/twisted/internet/pollreactor.py |   189 +
 ThirdParty/Twisted/twisted/internet/posixbase.py   |   640 +
 ThirdParty/Twisted/twisted/internet/process.py     |  1084 ++
 ThirdParty/Twisted/twisted/internet/protocol.py    |   827 ++
 ThirdParty/Twisted/twisted/internet/pyuisupport.py |    37 +
 ThirdParty/Twisted/twisted/internet/qtreactor.py   |    19 +
 ThirdParty/Twisted/twisted/internet/reactor.py     |    39 +
 .../Twisted/twisted/internet/selectreactor.py      |   204 +
 ThirdParty/Twisted/twisted/internet/serialport.py  |    87 +
 ThirdParty/Twisted/twisted/internet/ssl.py         |   205 +
 ThirdParty/Twisted/twisted/internet/stdio.py       |    35 +
 ThirdParty/Twisted/twisted/internet/task.py        |   857 ++
 ThirdParty/Twisted/twisted/internet/tcp.py         |  1183 ++
 .../Twisted/twisted/internet/test/__init__.py      |     6 +
 .../Twisted/twisted/internet/test/_posixifaces.py  |   148 +
 .../Twisted/twisted/internet/test/_win32ifaces.py  |   119 +
 .../twisted/internet/test/connectionmixins.py      |   776 ++
 .../internet/test/fake_CAs/not-a-certificate       |     1 +
 .../twisted/internet/test/fake_CAs/thing1.pem      |    26 +
 .../internet/test/fake_CAs/thing2-duplicate.pem    |    26 +
 .../twisted/internet/test/fake_CAs/thing2.pem      |    26 +
 .../Twisted/twisted/internet/test/fakeendpoint.py  |    66 +
 .../Twisted/twisted/internet/test/modulehelpers.py |    43 +
 .../internet/test/process_gireactornocompat.py     |    22 +
 .../twisted/internet/test/process_helper.py        |    33 +
 .../Twisted/twisted/internet/test/reactormixins.py |   315 +
 .../Twisted/twisted/internet/test/test_abstract.py |    58 +
 .../Twisted/twisted/internet/test/test_address.py  |   318 +
 .../Twisted/twisted/internet/test/test_base.py     |   279 +
 .../twisted/internet/test/test_baseprocess.py      |    73 +
 .../Twisted/twisted/internet/test/test_core.py     |   333 +
 .../Twisted/twisted/internet/test/test_default.py  |   120 +
 .../twisted/internet/test/test_endpoints.py        |  1029 ++
 .../twisted/internet/test/test_endpointspy3.py     |  1036 ++
 .../twisted/internet/test/test_epollreactor.py     |   248 +
 .../Twisted/twisted/internet/test/test_fdset.py    |   426 +
 .../twisted/internet/test/test_filedescriptor.py   |    99 +
 .../twisted/internet/test/test_gireactor.py        |   251 +
 .../Twisted/twisted/internet/test/test_glibbase.py |    68 +
 .../twisted/internet/test/test_gtkreactor.py       |    95 +
 .../Twisted/twisted/internet/test/test_inlinecb.py |    90 +
 .../Twisted/twisted/internet/test/test_inotify.py  |   504 +
 .../Twisted/twisted/internet/test/test_iocp.py     |   150 +
 .../Twisted/twisted/internet/test/test_main.py     |    50 +
 .../Twisted/twisted/internet/test/test_newtls.py   |   197 +
 .../twisted/internet/test/test_pollingfile.py      |    46 +
 .../twisted/internet/test/test_posixbase.py        |   320 +
 .../twisted/internet/test/test_posixprocess.py     |   340 +
 .../Twisted/twisted/internet/test/test_process.py  |   695 +
 .../Twisted/twisted/internet/test/test_protocol.py |   457 +
 .../twisted/internet/test/test_qtreactor.py        |    35 +
 .../twisted/internet/test/test_serialport.py       |    72 +
 .../Twisted/twisted/internet/test/test_sigchld.py  |   125 +
 .../Twisted/twisted/internet/test/test_socket.py   |   128 +
 .../Twisted/twisted/internet/test/test_stdio.py    |   195 +
 .../Twisted/twisted/internet/test/test_tcp.py      |  2092 +++
 .../Twisted/twisted/internet/test/test_threads.py  |   220 +
 .../Twisted/twisted/internet/test/test_time.py     |    66 +
 .../Twisted/twisted/internet/test/test_tls.py      |   438 +
 .../Twisted/twisted/internet/test/test_udp.py      |   218 +
 .../twisted/internet/test/test_udp_internals.py    |   167 +
 .../Twisted/twisted/internet/test/test_unix.py     |   559 +
 .../Twisted/twisted/internet/test/test_utilspy3.py |    92 +
 .../twisted/internet/test/test_win32events.py      |   200 +
 ThirdParty/Twisted/twisted/internet/threads.py     |   127 +
 ThirdParty/Twisted/twisted/internet/tksupport.py   |    75 +
 ThirdParty/Twisted/twisted/internet/udp.py         |   348 +
 ThirdParty/Twisted/twisted/internet/unix.py        |   518 +
 ThirdParty/Twisted/twisted/internet/utils.py       |   178 +
 .../Twisted/twisted/internet/win32eventreactor.py  |   430 +
 ThirdParty/Twisted/twisted/internet/wxreactor.py   |   184 +
 ThirdParty/Twisted/twisted/internet/wxsupport.py   |    61 +
 ThirdParty/Twisted/twisted/lore/__init__.py        |    21 +
 ThirdParty/Twisted/twisted/lore/_version.py        |     3 +
 ThirdParty/Twisted/twisted/lore/default.py         |    56 +
 ThirdParty/Twisted/twisted/lore/docbook.py         |    68 +
 ThirdParty/Twisted/twisted/lore/htmlbook.py        |    49 +
 ThirdParty/Twisted/twisted/lore/indexer.py         |    50 +
 ThirdParty/Twisted/twisted/lore/latex.py           |   463 +
 ThirdParty/Twisted/twisted/lore/lint.py            |   204 +
 ThirdParty/Twisted/twisted/lore/lmath.py           |    85 +
 ThirdParty/Twisted/twisted/lore/man2lore.py        |   295 +
 ThirdParty/Twisted/twisted/lore/numberer.py        |    33 +
 ThirdParty/Twisted/twisted/lore/process.py         |   120 +
 .../Twisted/twisted/lore/scripts/__init__.py       |     1 +
 ThirdParty/Twisted/twisted/lore/scripts/lore.py    |   155 +
 ThirdParty/Twisted/twisted/lore/slides.py          |   359 +
 ThirdParty/Twisted/twisted/lore/template.mgp       |    24 +
 ThirdParty/Twisted/twisted/lore/test/__init__.py   |     1 +
 .../twisted/lore/test/lore_index_file_out.html     |     2 +
 .../lore/test/lore_index_file_out_multiple.html    |     5 +
 .../lore/test/lore_index_file_unnumbered_out.html  |     2 +
 .../twisted/lore/test/lore_index_test.xhtml        |    21 +
 .../twisted/lore/test/lore_index_test2.xhtml       |    22 +
 .../twisted/lore/test/lore_numbering_test_out.html |     2 +
 .../lore/test/lore_numbering_test_out2.html        |     2 +
 ThirdParty/Twisted/twisted/lore/test/simple.html   |     9 +
 ThirdParty/Twisted/twisted/lore/test/simple3.html  |     9 +
 ThirdParty/Twisted/twisted/lore/test/simple4.html  |     9 +
 ThirdParty/Twisted/twisted/lore/test/template.tpl  |    13 +
 .../Twisted/twisted/lore/test/test_docbook.py      |    35 +
 ThirdParty/Twisted/twisted/lore/test/test_latex.py |   146 +
 ThirdParty/Twisted/twisted/lore/test/test_lint.py  |   132 +
 ThirdParty/Twisted/twisted/lore/test/test_lmath.py |    72 +
 ThirdParty/Twisted/twisted/lore/test/test_lore.py  |  1198 ++
 .../Twisted/twisted/lore/test/test_man2lore.py     |   169 +
 .../Twisted/twisted/lore/test/test_scripts.py      |    27 +
 .../Twisted/twisted/lore/test/test_slides.py       |    85 +
 ThirdParty/Twisted/twisted/lore/texi.py            |   109 +
 ThirdParty/Twisted/twisted/lore/topfiles/NEWS      |   167 +
 ThirdParty/Twisted/twisted/lore/topfiles/README    |     3 +
 ThirdParty/Twisted/twisted/lore/topfiles/setup.py  |    29 +
 ThirdParty/Twisted/twisted/lore/tree.py            |  1122 ++
 ThirdParty/Twisted/twisted/lore/xhtml-lat1.ent     |   196 +
 ThirdParty/Twisted/twisted/lore/xhtml-special.ent  |    80 +
 ThirdParty/Twisted/twisted/lore/xhtml-symbol.ent   |   237 +
 ThirdParty/Twisted/twisted/lore/xhtml1-strict.dtd  |   978 ++
 .../Twisted/twisted/lore/xhtml1-transitional.dtd   |  1201 ++
 ThirdParty/Twisted/twisted/mail/__init__.py        |    15 +
 ThirdParty/Twisted/twisted/mail/_version.py        |     3 +
 ThirdParty/Twisted/twisted/mail/alias.py           |   435 +
 ThirdParty/Twisted/twisted/mail/bounce.py          |    60 +
 ThirdParty/Twisted/twisted/mail/imap4.py           |  6209 +++++++++
 ThirdParty/Twisted/twisted/mail/mail.py            |   333 +
 ThirdParty/Twisted/twisted/mail/maildir.py         |   518 +
 ThirdParty/Twisted/twisted/mail/pb.py              |   115 +
 ThirdParty/Twisted/twisted/mail/pop3.py            |  1071 ++
 ThirdParty/Twisted/twisted/mail/pop3client.py      |   706 +
 ThirdParty/Twisted/twisted/mail/protocols.py       |   233 +
 ThirdParty/Twisted/twisted/mail/relay.py           |   114 +
 ThirdParty/Twisted/twisted/mail/relaymanager.py    |   631 +
 .../Twisted/twisted/mail/scripts/__init__.py       |     1 +
 .../Twisted/twisted/mail/scripts/mailmail.py       |   366 +
 ThirdParty/Twisted/twisted/mail/smtp.py            |  1934 +++
 ThirdParty/Twisted/twisted/mail/tap.py             |   361 +
 ThirdParty/Twisted/twisted/mail/test/__init__.py   |     1 +
 .../Twisted/twisted/mail/test/pop3testserver.py    |   314 +
 .../Twisted/twisted/mail/test/rfc822.message       |    86 +
 ThirdParty/Twisted/twisted/mail/test/server.pem    |    36 +
 .../Twisted/twisted/mail/test/test_bounce.py       |    32 +
 ThirdParty/Twisted/twisted/mail/test/test_imap.py  |  4892 +++++++
 ThirdParty/Twisted/twisted/mail/test/test_mail.py  |  2060 +++
 .../Twisted/twisted/mail/test/test_mailmail.py     |    75 +
 .../Twisted/twisted/mail/test/test_options.py      |   247 +
 ThirdParty/Twisted/twisted/mail/test/test_pop3.py  |  1071 ++
 .../Twisted/twisted/mail/test/test_pop3client.py   |   582 +
 .../Twisted/twisted/mail/test/test_scripts.py      |    18 +
 ThirdParty/Twisted/twisted/mail/test/test_smtp.py  |  1520 +++
 ThirdParty/Twisted/twisted/mail/topfiles/NEWS      |   328 +
 ThirdParty/Twisted/twisted/mail/topfiles/README    |     6 +
 ThirdParty/Twisted/twisted/mail/topfiles/setup.py  |    50 +
 ThirdParty/Twisted/twisted/manhole/__init__.py     |     8 +
 ThirdParty/Twisted/twisted/manhole/_inspectro.py   |   369 +
 ThirdParty/Twisted/twisted/manhole/explorer.py     |   654 +
 .../Twisted/twisted/manhole/gladereactor.glade     |   342 +
 ThirdParty/Twisted/twisted/manhole/gladereactor.py |   219 +
 ThirdParty/Twisted/twisted/manhole/inspectro.glade |   510 +
 ThirdParty/Twisted/twisted/manhole/logview.glade   |    39 +
 ThirdParty/Twisted/twisted/manhole/service.py      |   399 +
 ThirdParty/Twisted/twisted/manhole/telnet.py       |   117 +
 .../Twisted/twisted/manhole/test/__init__.py       |     6 +
 .../Twisted/twisted/manhole/test/test_explorer.py  |   102 +
 ThirdParty/Twisted/twisted/manhole/ui/__init__.py  |     7 +
 .../Twisted/twisted/manhole/ui/gtk2manhole.glade   |   268 +
 .../Twisted/twisted/manhole/ui/gtk2manhole.py      |   375 +
 .../Twisted/twisted/manhole/ui/test/__init__.py    |     4 +
 .../twisted/manhole/ui/test/test_gtk2manhole.py    |    48 +
 ThirdParty/Twisted/twisted/names/__init__.py       |     7 +
 ThirdParty/Twisted/twisted/names/_version.py       |     3 +
 ThirdParty/Twisted/twisted/names/authority.py      |   334 +
 ThirdParty/Twisted/twisted/names/cache.py          |   127 +
 ThirdParty/Twisted/twisted/names/client.py         |   932 ++
 ThirdParty/Twisted/twisted/names/common.py         |   307 +
 ThirdParty/Twisted/twisted/names/dns.py            |  2049 +++
 ThirdParty/Twisted/twisted/names/error.py          |    97 +
 ThirdParty/Twisted/twisted/names/hosts.py          |   149 +
 ThirdParty/Twisted/twisted/names/resolve.py        |    59 +
 ThirdParty/Twisted/twisted/names/root.py           |   448 +
 ThirdParty/Twisted/twisted/names/secondary.py      |   179 +
 ThirdParty/Twisted/twisted/names/server.py         |   205 +
 ThirdParty/Twisted/twisted/names/srvconnect.py     |   211 +
 ThirdParty/Twisted/twisted/names/tap.py            |   150 +
 ThirdParty/Twisted/twisted/names/test/__init__.py  |     1 +
 .../Twisted/twisted/names/test/test_cache.py       |   135 +
 .../Twisted/twisted/names/test/test_client.py      |  1047 ++
 .../Twisted/twisted/names/test/test_common.py      |   126 +
 ThirdParty/Twisted/twisted/names/test/test_dns.py  |  1790 +++
 .../Twisted/twisted/names/test/test_hosts.py       |   258 +
 .../Twisted/twisted/names/test/test_names.py       |   817 ++
 .../Twisted/twisted/names/test/test_rootresolve.py |   725 ++
 .../Twisted/twisted/names/test/test_srvconnect.py  |   169 +
 ThirdParty/Twisted/twisted/names/test/test_tap.py  |    99 +
 ThirdParty/Twisted/twisted/names/topfiles/NEWS     |   258 +
 ThirdParty/Twisted/twisted/names/topfiles/README   |     3 +
 ThirdParty/Twisted/twisted/names/topfiles/setup.py |    50 +
 ThirdParty/Twisted/twisted/news/__init__.py        |    11 +
 ThirdParty/Twisted/twisted/news/_version.py        |     3 +
 ThirdParty/Twisted/twisted/news/database.py        |  1051 ++
 ThirdParty/Twisted/twisted/news/news.py            |    90 +
 ThirdParty/Twisted/twisted/news/nntp.py            |  1036 ++
 ThirdParty/Twisted/twisted/news/tap.py             |   138 +
 ThirdParty/Twisted/twisted/news/test/__init__.py   |     1 +
 .../Twisted/twisted/news/test/test_database.py     |   224 +
 ThirdParty/Twisted/twisted/news/test/test_news.py  |   107 +
 ThirdParty/Twisted/twisted/news/test/test_nntp.py  |   197 +
 ThirdParty/Twisted/twisted/news/topfiles/NEWS      |   118 +
 ThirdParty/Twisted/twisted/news/topfiles/README    |     4 +
 ThirdParty/Twisted/twisted/news/topfiles/setup.py  |    28 +
 ThirdParty/Twisted/twisted/pair/__init__.py        |    20 +
 ThirdParty/Twisted/twisted/pair/_version.py        |     3 +
 ThirdParty/Twisted/twisted/pair/ethernet.py        |    56 +
 ThirdParty/Twisted/twisted/pair/ip.py              |    72 +
 ThirdParty/Twisted/twisted/pair/raw.py             |    35 +
 ThirdParty/Twisted/twisted/pair/rawudp.py          |    55 +
 ThirdParty/Twisted/twisted/pair/test/__init__.py   |     1 +
 .../Twisted/twisted/pair/test/test_ethernet.py     |   226 +
 ThirdParty/Twisted/twisted/pair/test/test_ip.py    |   417 +
 .../Twisted/twisted/pair/test/test_rawudp.py       |   327 +
 ThirdParty/Twisted/twisted/pair/topfiles/NEWS      |    68 +
 ThirdParty/Twisted/twisted/pair/topfiles/README    |     4 +
 ThirdParty/Twisted/twisted/pair/topfiles/setup.py  |    28 +
 ThirdParty/Twisted/twisted/pair/tuntap.py          |   170 +
 ThirdParty/Twisted/twisted/persisted/__init__.py   |     6 +
 ThirdParty/Twisted/twisted/persisted/aot.py        |   560 +
 ThirdParty/Twisted/twisted/persisted/crefutil.py   |   163 +
 ThirdParty/Twisted/twisted/persisted/dirdbm.py     |   358 +
 ThirdParty/Twisted/twisted/persisted/sob.py        |   227 +
 ThirdParty/Twisted/twisted/persisted/styles.py     |   262 +
 .../Twisted/twisted/persisted/test/__init__.py     |     6 +
 .../Twisted/twisted/persisted/test/test_styles.py  |    55 +
 ThirdParty/Twisted/twisted/plugin.py               |   255 +
 ThirdParty/Twisted/twisted/plugins/__init__.py     |    17 +
 .../Twisted/twisted/plugins/cred_anonymous.py      |    40 +
 ThirdParty/Twisted/twisted/plugins/cred_file.py    |    60 +
 ThirdParty/Twisted/twisted/plugins/cred_memory.py  |    68 +
 ThirdParty/Twisted/twisted/plugins/cred_sshkeys.py |    51 +
 ThirdParty/Twisted/twisted/plugins/cred_unix.py    |   138 +
 .../Twisted/twisted/plugins/twisted_conch.py       |    18 +
 ThirdParty/Twisted/twisted/plugins/twisted_core.py |     9 +
 ThirdParty/Twisted/twisted/plugins/twisted_ftp.py  |    10 +
 ThirdParty/Twisted/twisted/plugins/twisted_inet.py |    10 +
 ThirdParty/Twisted/twisted/plugins/twisted_lore.py |    38 +
 ThirdParty/Twisted/twisted/plugins/twisted_mail.py |    10 +
 .../Twisted/twisted/plugins/twisted_manhole.py     |    10 +
 .../Twisted/twisted/plugins/twisted_names.py       |    10 +
 ThirdParty/Twisted/twisted/plugins/twisted_news.py |    10 +
 .../Twisted/twisted/plugins/twisted_portforward.py |    10 +
 .../Twisted/twisted/plugins/twisted_qtstub.py      |    45 +
 .../Twisted/twisted/plugins/twisted_reactors.py    |    42 +
 .../Twisted/twisted/plugins/twisted_runner.py      |    10 +
 .../Twisted/twisted/plugins/twisted_socks.py       |    10 +
 .../Twisted/twisted/plugins/twisted_telnet.py      |    10 +
 .../Twisted/twisted/plugins/twisted_trial.py       |    59 +
 ThirdParty/Twisted/twisted/plugins/twisted_web.py  |    11 +
 .../Twisted/twisted/plugins/twisted_words.py       |    43 +
 ThirdParty/Twisted/twisted/protocols/__init__.py   |     7 +
 ThirdParty/Twisted/twisted/protocols/amp.py        |  2705 ++++
 ThirdParty/Twisted/twisted/protocols/basic.py      |   963 ++
 ThirdParty/Twisted/twisted/protocols/dict.py       |   362 +
 ThirdParty/Twisted/twisted/protocols/finger.py     |    42 +
 ThirdParty/Twisted/twisted/protocols/ftp.py        |  3059 +++++
 .../Twisted/twisted/protocols/gps/__init__.py      |     1 +
 ThirdParty/Twisted/twisted/protocols/gps/nmea.py   |   209 +
 .../Twisted/twisted/protocols/gps/rockwell.py      |   268 +
 ThirdParty/Twisted/twisted/protocols/htb.py        |   297 +
 ThirdParty/Twisted/twisted/protocols/ident.py      |   231 +
 ThirdParty/Twisted/twisted/protocols/loopback.py   |   377 +
 ThirdParty/Twisted/twisted/protocols/memcache.py   |   758 ++
 .../Twisted/twisted/protocols/mice/__init__.py     |     1 +
 .../Twisted/twisted/protocols/mice/mouseman.py     |   127 +
 ThirdParty/Twisted/twisted/protocols/pcp.py        |   204 +
 ThirdParty/Twisted/twisted/protocols/policies.py   |   727 ++
 .../Twisted/twisted/protocols/portforward.py       |    87 +
 ThirdParty/Twisted/twisted/protocols/postfix.py    |   112 +
 ThirdParty/Twisted/twisted/protocols/shoutcast.py  |   111 +
 ThirdParty/Twisted/twisted/protocols/sip.py        |  1347 ++
 ThirdParty/Twisted/twisted/protocols/socks.py      |   240 +
 ThirdParty/Twisted/twisted/protocols/stateful.py   |    52 +
 ThirdParty/Twisted/twisted/protocols/telnet.py     |   325 +
 .../Twisted/twisted/protocols/test/__init__.py     |     6 +
 .../Twisted/twisted/protocols/test/test_basic.py   |  1061 ++
 .../Twisted/twisted/protocols/test/test_tls.py     |  1522 +++
 ThirdParty/Twisted/twisted/protocols/tls.py        |   617 +
 ThirdParty/Twisted/twisted/protocols/wire.py       |    90 +
 ThirdParty/Twisted/twisted/python/__init__.py      |    13 +
 ThirdParty/Twisted/twisted/python/_epoll.c         |  3348 +++++
 ThirdParty/Twisted/twisted/python/_epoll.pyx       |   285 +
 ThirdParty/Twisted/twisted/python/_initgroups.c    |    66 +
 ThirdParty/Twisted/twisted/python/_inotify.py      |   101 +
 ThirdParty/Twisted/twisted/python/_reflectpy3.py   |   325 +
 ThirdParty/Twisted/twisted/python/_release.py      |  1371 ++
 ThirdParty/Twisted/twisted/python/_shellcomp.py    |   668 +
 ThirdParty/Twisted/twisted/python/_utilpy3.py      |   337 +
 ThirdParty/Twisted/twisted/python/compat.py        |   432 +
 ThirdParty/Twisted/twisted/python/components.py    |   443 +
 ThirdParty/Twisted/twisted/python/constants.py     |   377 +
 ThirdParty/Twisted/twisted/python/context.py       |   133 +
 ThirdParty/Twisted/twisted/python/deprecate.py     |   539 +
 ThirdParty/Twisted/twisted/python/dist.py          |   461 +
 ThirdParty/Twisted/twisted/python/failure.py       |   654 +
 ThirdParty/Twisted/twisted/python/fakepwd.py       |   219 +
 ThirdParty/Twisted/twisted/python/filepath.py      |  1429 +++
 ThirdParty/Twisted/twisted/python/finalize.py      |    46 +
 ThirdParty/Twisted/twisted/python/formmethod.py    |   363 +
 ThirdParty/Twisted/twisted/python/hashlib.py       |    24 +
 ThirdParty/Twisted/twisted/python/hook.py          |   176 +
 ThirdParty/Twisted/twisted/python/htmlizer.py      |    91 +
 ThirdParty/Twisted/twisted/python/lockfile.py      |   214 +
 ThirdParty/Twisted/twisted/python/log.py           |   629 +
 ThirdParty/Twisted/twisted/python/logfile.py       |   323 +
 ThirdParty/Twisted/twisted/python/modules.py       |   758 ++
 ThirdParty/Twisted/twisted/python/monkey.py        |    75 +
 ThirdParty/Twisted/twisted/python/procutils.py     |    45 +
 ThirdParty/Twisted/twisted/python/randbytes.py     |   150 +
 ThirdParty/Twisted/twisted/python/rebuild.py       |   271 +
 ThirdParty/Twisted/twisted/python/reflect.py       |   537 +
 ThirdParty/Twisted/twisted/python/release.py       |    63 +
 ThirdParty/Twisted/twisted/python/roots.py         |   248 +
 ThirdParty/Twisted/twisted/python/runtime.py       |   154 +
 ThirdParty/Twisted/twisted/python/sendmsg.c        |   511 +
 ThirdParty/Twisted/twisted/python/shortcut.py      |    76 +
 ThirdParty/Twisted/twisted/python/syslog.py        |   107 +
 ThirdParty/Twisted/twisted/python/systemd.py       |    87 +
 ThirdParty/Twisted/twisted/python/text.py          |   208 +
 ThirdParty/Twisted/twisted/python/threadable.py    |   139 +
 ThirdParty/Twisted/twisted/python/threadpool.py    |   245 +
 .../Twisted/twisted/python/twisted-completion.zsh  |    33 +
 ThirdParty/Twisted/twisted/python/urlpath.py       |   122 +
 ThirdParty/Twisted/twisted/python/usage.py         |   973 ++
 ThirdParty/Twisted/twisted/python/util.py          |   754 ++
 ThirdParty/Twisted/twisted/python/versions.py      |   258 +
 ThirdParty/Twisted/twisted/python/win32.py         |   166 +
 ThirdParty/Twisted/twisted/python/zippath.py       |   268 +
 ThirdParty/Twisted/twisted/python/zipstream.py     |   319 +
 ThirdParty/Twisted/twisted/python/zsh/README.txt   |     9 +
 ThirdParty/Twisted/twisted/python/zsh/_cftp        |    34 +
 ThirdParty/Twisted/twisted/python/zsh/_ckeygen     |    34 +
 ThirdParty/Twisted/twisted/python/zsh/_conch       |    34 +
 ThirdParty/Twisted/twisted/python/zsh/_lore        |    34 +
 ThirdParty/Twisted/twisted/python/zsh/_manhole     |    34 +
 ThirdParty/Twisted/twisted/python/zsh/_mktap       |    34 +
 ThirdParty/Twisted/twisted/python/zsh/_pyhtmlizer  |    34 +
 ThirdParty/Twisted/twisted/python/zsh/_tap2deb     |    34 +
 ThirdParty/Twisted/twisted/python/zsh/_tap2rpm     |    34 +
 ThirdParty/Twisted/twisted/python/zsh/_tapconvert  |    34 +
 ThirdParty/Twisted/twisted/python/zsh/_tkconch     |    34 +
 ThirdParty/Twisted/twisted/python/zsh/_tkmktap     |    34 +
 ThirdParty/Twisted/twisted/python/zsh/_trial       |    34 +
 ThirdParty/Twisted/twisted/python/zsh/_twistd      |    34 +
 ThirdParty/Twisted/twisted/python/zsh/_websetroot  |    34 +
 ThirdParty/Twisted/twisted/python/zshcomp.py       |   824 ++
 ThirdParty/Twisted/twisted/runner/__init__.py      |    15 +
 ThirdParty/Twisted/twisted/runner/_version.py      |     3 +
 ThirdParty/Twisted/twisted/runner/inetd.py         |    70 +
 ThirdParty/Twisted/twisted/runner/inetdconf.py     |   194 +
 ThirdParty/Twisted/twisted/runner/inetdtap.py      |   163 +
 ThirdParty/Twisted/twisted/runner/portmap.c        |    57 +
 ThirdParty/Twisted/twisted/runner/procmon.py       |   310 +
 ThirdParty/Twisted/twisted/runner/procmontap.py    |    73 +
 ThirdParty/Twisted/twisted/runner/test/__init__.py |     6 +
 .../Twisted/twisted/runner/test/test_procmon.py    |   477 +
 .../Twisted/twisted/runner/test/test_procmontap.py |    87 +
 ThirdParty/Twisted/twisted/runner/topfiles/NEWS    |   113 +
 ThirdParty/Twisted/twisted/runner/topfiles/README  |     3 +
 .../Twisted/twisted/runner/topfiles/setup.py       |    35 +
 ThirdParty/Twisted/twisted/scripts/__init__.py     |    27 +
 ThirdParty/Twisted/twisted/scripts/_twistd_unix.py |   349 +
 ThirdParty/Twisted/twisted/scripts/_twistw.py      |    50 +
 ThirdParty/Twisted/twisted/scripts/htmlizer.py     |    69 +
 ThirdParty/Twisted/twisted/scripts/manhole.py      |    69 +
 ThirdParty/Twisted/twisted/scripts/tap2deb.py      |   281 +
 ThirdParty/Twisted/twisted/scripts/tap2rpm.py      |   331 +
 ThirdParty/Twisted/twisted/scripts/tapconvert.py   |    57 +
 .../Twisted/twisted/scripts/test/__init__.py       |     6 +
 .../Twisted/twisted/scripts/test/test_scripts.py   |   201 +
 .../Twisted/twisted/scripts/test/test_tap2rpm.py   |   399 +
 ThirdParty/Twisted/twisted/scripts/tkunzip.py      |   290 +
 ThirdParty/Twisted/twisted/scripts/trial.py        |   521 +
 ThirdParty/Twisted/twisted/scripts/twistd.py       |    30 +
 ThirdParty/Twisted/twisted/spread/__init__.py      |    12 +
 ThirdParty/Twisted/twisted/spread/banana.py        |   358 +
 ThirdParty/Twisted/twisted/spread/flavors.py       |   590 +
 ThirdParty/Twisted/twisted/spread/interfaces.py    |    28 +
 ThirdParty/Twisted/twisted/spread/jelly.py         |  1151 ++
 ThirdParty/Twisted/twisted/spread/pb.py            |  1434 +++
 ThirdParty/Twisted/twisted/spread/publish.py       |   142 +
 ThirdParty/Twisted/twisted/spread/ui/__init__.py   |    12 +
 ThirdParty/Twisted/twisted/spread/ui/gtk2util.py   |   218 +
 ThirdParty/Twisted/twisted/spread/ui/login2.glade  |   461 +
 ThirdParty/Twisted/twisted/spread/ui/tktree.py     |   204 +
 ThirdParty/Twisted/twisted/spread/ui/tkutil.py     |   397 +
 ThirdParty/Twisted/twisted/spread/util.py          |   215 +
 ThirdParty/Twisted/twisted/tap/__init__.py         |    10 +
 ThirdParty/Twisted/twisted/tap/ftp.py              |    69 +
 ThirdParty/Twisted/twisted/tap/manhole.py          |    54 +
 ThirdParty/Twisted/twisted/tap/portforward.py      |    27 +
 ThirdParty/Twisted/twisted/tap/socks.py            |    38 +
 ThirdParty/Twisted/twisted/tap/telnet.py           |    32 +
 ThirdParty/Twisted/twisted/test/__init__.py        |    10 +
 ThirdParty/Twisted/twisted/test/_preamble.py       |    17 +
 .../Twisted/twisted/test/crash_test_dummy.py       |    34 +
 ThirdParty/Twisted/twisted/test/iosim.py           |   270 +
 .../Twisted/twisted/test/mock_win32process.py      |    48 +
 ThirdParty/Twisted/twisted/test/myrebuilder1.py    |    15 +
 ThirdParty/Twisted/twisted/test/myrebuilder2.py    |    16 +
 ThirdParty/Twisted/twisted/test/plugin_basic.py    |    57 +
 ThirdParty/Twisted/twisted/test/plugin_extra1.py   |    23 +
 ThirdParty/Twisted/twisted/test/plugin_extra2.py   |    35 +
 ThirdParty/Twisted/twisted/test/process_cmdline.py |     5 +
 ThirdParty/Twisted/twisted/test/process_echoer.py  |    11 +
 ThirdParty/Twisted/twisted/test/process_fds.py     |    40 +
 ThirdParty/Twisted/twisted/test/process_linger.py  |    17 +
 ThirdParty/Twisted/twisted/test/process_reader.py  |    12 +
 ThirdParty/Twisted/twisted/test/process_signal.py  |     8 +
 .../Twisted/twisted/test/process_stdinreader.py    |    23 +
 ThirdParty/Twisted/twisted/test/process_tester.py  |    37 +
 ThirdParty/Twisted/twisted/test/process_tty.py     |     6 +
 ThirdParty/Twisted/twisted/test/process_twisted.py |    43 +
 ThirdParty/Twisted/twisted/test/proto_helpers.py   |   573 +
 ThirdParty/Twisted/twisted/test/raiser.c           |  1443 +++
 ThirdParty/Twisted/twisted/test/raiser.pyx         |    21 +
 .../Twisted/twisted/test/reflect_helper_IE.py      |     4 +
 .../Twisted/twisted/test/reflect_helper_VE.py      |     4 +
 .../Twisted/twisted/test/reflect_helper_ZDE.py     |     4 +
 ThirdParty/Twisted/twisted/test/server.pem         |    36 +
 ThirdParty/Twisted/twisted/test/ssl_helpers.py     |    37 +
 .../Twisted/twisted/test/stdio_test_consumer.py    |    39 +
 .../Twisted/twisted/test/stdio_test_halfclose.py   |    66 +
 .../Twisted/twisted/test/stdio_test_hostpeer.py    |    32 +
 .../Twisted/twisted/test/stdio_test_lastwrite.py   |    45 +
 .../Twisted/twisted/test/stdio_test_loseconn.py    |    48 +
 .../Twisted/twisted/test/stdio_test_producer.py    |    55 +
 .../Twisted/twisted/test/stdio_test_write.py       |    31 +
 .../Twisted/twisted/test/stdio_test_writeseq.py    |    30 +
 ThirdParty/Twisted/twisted/test/test_abstract.py   |    85 +
 ThirdParty/Twisted/twisted/test/test_adbapi.py     |   819 ++
 ThirdParty/Twisted/twisted/test/test_amp.py        |  3178 +++++
 .../Twisted/twisted/test/test_application.py       |   841 ++
 ThirdParty/Twisted/twisted/test/test_banana.py     |   278 +
 ThirdParty/Twisted/twisted/test/test_compat.py     |   623 +
 ThirdParty/Twisted/twisted/test/test_context.py    |    51 +
 ThirdParty/Twisted/twisted/test/test_cooperator.py |   671 +
 ThirdParty/Twisted/twisted/test/test_defer.py      |  2030 +++
 ThirdParty/Twisted/twisted/test/test_defgen.py     |   301 +
 ThirdParty/Twisted/twisted/test/test_dict.py       |    22 +
 ThirdParty/Twisted/twisted/test/test_digestauth.py |   671 +
 ThirdParty/Twisted/twisted/test/test_dirdbm.py     |   170 +
 ThirdParty/Twisted/twisted/test/test_doc.py        |   104 +
 ThirdParty/Twisted/twisted/test/test_epoll.py      |   158 +
 ThirdParty/Twisted/twisted/test/test_error.py      |   251 +
 ThirdParty/Twisted/twisted/test/test_explorer.py   |   236 +
 ThirdParty/Twisted/twisted/test/test_factories.py  |   145 +
 ThirdParty/Twisted/twisted/test/test_failure.py    |   781 ++
 ThirdParty/Twisted/twisted/test/test_fdesc.py      |   266 +
 ThirdParty/Twisted/twisted/test/test_finger.py     |    67 +
 ThirdParty/Twisted/twisted/test/test_formmethod.py |    98 +
 ThirdParty/Twisted/twisted/test/test_ftp.py        |  3202 +++++
 .../Twisted/twisted/test/test_ftp_options.py       |    80 +
 ThirdParty/Twisted/twisted/test/test_hook.py       |   150 +
 ThirdParty/Twisted/twisted/test/test_htb.py        |   109 +
 ThirdParty/Twisted/twisted/test/test_ident.py      |   194 +
 ThirdParty/Twisted/twisted/test/test_import.py     |    75 +
 ThirdParty/Twisted/twisted/test/test_internet.py   |  1419 +++
 ThirdParty/Twisted/twisted/test/test_iutils.py     |   259 +
 ThirdParty/Twisted/twisted/test/test_jelly.py      |   671 +
 ThirdParty/Twisted/twisted/test/test_lockfile.py   |   445 +
 ThirdParty/Twisted/twisted/test/test_log.py        |   842 ++
 ThirdParty/Twisted/twisted/test/test_logfile.py    |   320 +
 ThirdParty/Twisted/twisted/test/test_loopback.py   |   431 +
 ThirdParty/Twisted/twisted/test/test_manhole.py    |    75 +
 ThirdParty/Twisted/twisted/test/test_memcache.py   |   663 +
 ThirdParty/Twisted/twisted/test/test_modules.py    |   494 +
 ThirdParty/Twisted/twisted/test/test_monkey.py     |   164 +
 ThirdParty/Twisted/twisted/test/test_newcred.py    |   445 +
 ThirdParty/Twisted/twisted/test/test_nmea.py       |   115 +
 ThirdParty/Twisted/twisted/test/test_paths.py      |  1510 +++
 ThirdParty/Twisted/twisted/test/test_pb.py         |  1846 +++
 ThirdParty/Twisted/twisted/test/test_pbfailure.py  |   475 +
 ThirdParty/Twisted/twisted/test/test_pcp.py        |   368 +
 ThirdParty/Twisted/twisted/test/test_persisted.py  |   377 +
 ThirdParty/Twisted/twisted/test/test_plugin.py     |   719 ++
 ThirdParty/Twisted/twisted/test/test_policies.py   |   854 ++
 ThirdParty/Twisted/twisted/test/test_postfix.py    |   108 +
 ThirdParty/Twisted/twisted/test/test_process.py    |  2561 ++++
 ThirdParty/Twisted/twisted/test/test_protocols.py  |   236 +
 ThirdParty/Twisted/twisted/test/test_randbytes.py  |   121 +
 ThirdParty/Twisted/twisted/test/test_rebuild.py    |   252 +
 ThirdParty/Twisted/twisted/test/test_reflect.py    |   419 +
 ThirdParty/Twisted/twisted/test/test_roots.py      |    63 +
 ThirdParty/Twisted/twisted/test/test_setup.py      |    61 +
 ThirdParty/Twisted/twisted/test/test_shortcut.py   |    26 +
 ThirdParty/Twisted/twisted/test/test_sip.py        |   984 ++
 ThirdParty/Twisted/twisted/test/test_sob.py        |   172 +
 ThirdParty/Twisted/twisted/test/test_socks.py      |   498 +
 ThirdParty/Twisted/twisted/test/test_ssl.py        |   727 ++
 ThirdParty/Twisted/twisted/test/test_sslverify.py  |   566 +
 ThirdParty/Twisted/twisted/test/test_stateful.py   |    81 +
 ThirdParty/Twisted/twisted/test/test_stdio.py      |   371 +
 ThirdParty/Twisted/twisted/test/test_strcred.py    |   657 +
 ThirdParty/Twisted/twisted/test/test_strerror.py   |   151 +
 .../Twisted/twisted/test/test_stringtransport.py   |   279 +
 ThirdParty/Twisted/twisted/test/test_strports.py   |   133 +
 ThirdParty/Twisted/twisted/test/test_task.py       |  1029 ++
 ThirdParty/Twisted/twisted/test/test_tcp.py        |  1829 +++
 .../Twisted/twisted/test/test_tcp_internals.py     |   255 +
 ThirdParty/Twisted/twisted/test/test_text.py       |   242 +
 ThirdParty/Twisted/twisted/test/test_threadable.py |   132 +
 ThirdParty/Twisted/twisted/test/test_threadpool.py |   542 +
 ThirdParty/Twisted/twisted/test/test_threads.py    |   421 +
 ThirdParty/Twisted/twisted/test/test_tpfile.py     |    52 +
 ThirdParty/Twisted/twisted/test/test_twistd.py     |  1549 +++
 ThirdParty/Twisted/twisted/test/test_twisted.py    |   678 +
 ThirdParty/Twisted/twisted/test/test_udp.py        |   708 ++
 ThirdParty/Twisted/twisted/test/test_unix.py       |   405 +
 ThirdParty/Twisted/twisted/test/test_usage.py      |   584 +
 ThirdParty/Twisted/twisted/test/testutils.py       |    55 +
 ThirdParty/Twisted/twisted/topfiles/CREDITS        |    60 +
 ThirdParty/Twisted/twisted/topfiles/ChangeLog.Old  |  3888 ++++++
 ThirdParty/Twisted/twisted/topfiles/NEWS           |  1912 +++
 ThirdParty/Twisted/twisted/topfiles/README         |    14 +
 ThirdParty/Twisted/twisted/topfiles/setup.py       |    96 +
 ThirdParty/Twisted/twisted/trial/__init__.py       |    52 +
 ThirdParty/Twisted/twisted/trial/_asyncrunner.py   |   235 +
 ThirdParty/Twisted/twisted/trial/_asynctest.py     |   415 +
 ThirdParty/Twisted/twisted/trial/_dist/__init__.py |    47 +
 .../Twisted/twisted/trial/_dist/_preamble.py       |    23 +
 .../Twisted/twisted/trial/_dist/distreporter.py    |    94 +
 .../Twisted/twisted/trial/_dist/disttrial.py       |   252 +
 .../Twisted/twisted/trial/_dist/managercommands.py |    76 +
 ThirdParty/Twisted/twisted/trial/_dist/options.py  |    30 +
 .../Twisted/twisted/trial/_dist/test/__init__.py   |     6 +
 .../twisted/trial/_dist/test/test_distreporter.py  |    62 +
 .../twisted/trial/_dist/test/test_disttrial.py     |   372 +
 .../twisted/trial/_dist/test/test_options.py       |    48 +
 .../twisted/trial/_dist/test/test_worker.py        |   473 +
 .../trial/_dist/test/test_workerreporter.py        |   119 +
 .../twisted/trial/_dist/test/test_workertrial.py   |   149 +
 ThirdParty/Twisted/twisted/trial/_dist/worker.py   |   328 +
 .../Twisted/twisted/trial/_dist/workercommands.py  |    28 +
 .../Twisted/twisted/trial/_dist/workerreporter.py  |   123 +
 .../Twisted/twisted/trial/_dist/workertrial.py     |    94 +
 ThirdParty/Twisted/twisted/trial/_synctest.py      |  1252 ++
 ThirdParty/Twisted/twisted/trial/itrial.py         |   253 +
 ThirdParty/Twisted/twisted/trial/reporter.py       |  1249 ++
 ThirdParty/Twisted/twisted/trial/runner.py         |   846 ++
 ThirdParty/Twisted/twisted/trial/test/__init__.py  |     6 +
 ThirdParty/Twisted/twisted/trial/test/detests.py   |   203 +
 ThirdParty/Twisted/twisted/trial/test/erroneous.py |   167 +
 .../Twisted/twisted/trial/test/mockcustomsuite.py  |    21 +
 .../Twisted/twisted/trial/test/mockcustomsuite2.py |    21 +
 .../Twisted/twisted/trial/test/mockcustomsuite3.py |    28 +
 .../Twisted/twisted/trial/test/mockdoctest.py      |   104 +
 .../Twisted/twisted/trial/test/moduleself.py       |     7 +
 .../Twisted/twisted/trial/test/moduletest.py       |    11 +
 ThirdParty/Twisted/twisted/trial/test/notpython    |     2 +
 ThirdParty/Twisted/twisted/trial/test/novars.py    |     6 +
 ThirdParty/Twisted/twisted/trial/test/packages.py  |   180 +
 ThirdParty/Twisted/twisted/trial/test/sample.py    |   108 +
 .../Twisted/twisted/trial/test/scripttest.py       |    14 +
 ThirdParty/Twisted/twisted/trial/test/skipping.py  |   270 +
 .../Twisted/twisted/trial/test/suppression.py      |   115 +
 .../Twisted/twisted/trial/test/test_assertions.py  |  1076 ++
 .../twisted/trial/test/test_asyncassertions.py     |    83 +
 .../Twisted/twisted/trial/test/test_deferred.py    |   236 +
 .../Twisted/twisted/trial/test/test_doctest.py     |    64 +
 .../Twisted/twisted/trial/test/test_keyboard.py    |   119 +
 .../Twisted/twisted/trial/test/test_loader.py      |   613 +
 ThirdParty/Twisted/twisted/trial/test/test_log.py  |   235 +
 .../Twisted/twisted/trial/test/test_output.py      |   179 +
 .../Twisted/twisted/trial/test/test_plugins.py     |    46 +
 .../twisted/trial/test/test_pyunitcompat.py        |   302 +
 .../Twisted/twisted/trial/test/test_reporter.py    |  1657 +++
 .../Twisted/twisted/trial/test/test_runner.py      |  1022 ++
 .../Twisted/twisted/trial/test/test_script.py      |   596 +
 .../Twisted/twisted/trial/test/test_suppression.py |   162 +
 .../twisted/trial/test/test_test_visitor.py        |    82 +
 .../Twisted/twisted/trial/test/test_testcase.py    |    70 +
 .../Twisted/twisted/trial/test/test_tests.py       |  1267 ++
 ThirdParty/Twisted/twisted/trial/test/test_util.py |   739 ++
 .../Twisted/twisted/trial/test/test_warning.py     |   491 +
 ThirdParty/Twisted/twisted/trial/test/weird.py     |    23 +
 ThirdParty/Twisted/twisted/trial/unittest.py       |    42 +
 ThirdParty/Twisted/twisted/trial/util.py           |   458 +
 ThirdParty/Twisted/twisted/web/__init__.py         |    15 +
 ThirdParty/Twisted/twisted/web/_auth/__init__.py   |     7 +
 ThirdParty/Twisted/twisted/web/_auth/basic.py      |    59 +
 ThirdParty/Twisted/twisted/web/_auth/digest.py     |    54 +
 ThirdParty/Twisted/twisted/web/_auth/wrapper.py    |   225 +
 ThirdParty/Twisted/twisted/web/_element.py         |   185 +
 ThirdParty/Twisted/twisted/web/_flatten.py         |   314 +
 ThirdParty/Twisted/twisted/web/_newclient.py       |  1502 +++
 ThirdParty/Twisted/twisted/web/_responses.py       |   114 +
 ThirdParty/Twisted/twisted/web/_stan.py            |   325 +
 ThirdParty/Twisted/twisted/web/_version.py         |     3 +
 ThirdParty/Twisted/twisted/web/client.py           |  1616 +++
 ThirdParty/Twisted/twisted/web/demo.py             |    24 +
 ThirdParty/Twisted/twisted/web/distrib.py          |   373 +
 ThirdParty/Twisted/twisted/web/domhelpers.py       |   268 +
 ThirdParty/Twisted/twisted/web/error.py            |   380 +
 ThirdParty/Twisted/twisted/web/failure.xhtml       |    71 +
 ThirdParty/Twisted/twisted/web/guard.py            |    17 +
 ThirdParty/Twisted/twisted/web/html.py             |    46 +
 ThirdParty/Twisted/twisted/web/http.py             |  1889 +++
 ThirdParty/Twisted/twisted/web/http_headers.py     |   261 +
 ThirdParty/Twisted/twisted/web/iweb.py             |   591 +
 ThirdParty/Twisted/twisted/web/microdom.py         |  1028 ++
 ThirdParty/Twisted/twisted/web/proxy.py            |   303 +
 ThirdParty/Twisted/twisted/web/resource.py         |   405 +
 ThirdParty/Twisted/twisted/web/rewrite.py          |    52 +
 ThirdParty/Twisted/twisted/web/script.py           |   170 +
 ThirdParty/Twisted/twisted/web/server.py           |   723 ++
 ThirdParty/Twisted/twisted/web/soap.py             |   154 +
 ThirdParty/Twisted/twisted/web/static.py           |  1033 ++
 ThirdParty/Twisted/twisted/web/sux.py              |   636 +
 ThirdParty/Twisted/twisted/web/tap.py              |   232 +
 ThirdParty/Twisted/twisted/web/template.py         |   566 +
 ThirdParty/Twisted/twisted/web/test/__init__.py    |     7 +
 ThirdParty/Twisted/twisted/web/test/_util.py       |    77 +
 .../Twisted/twisted/web/test/requesthelper.py      |   239 +
 ThirdParty/Twisted/twisted/web/test/test_agent.py  |  2103 +++
 ThirdParty/Twisted/twisted/web/test/test_cgi.py    |   270 +
 .../Twisted/twisted/web/test/test_distrib.py       |   434 +
 .../Twisted/twisted/web/test/test_domhelpers.py    |   306 +
 ThirdParty/Twisted/twisted/web/test/test_error.py  |   151 +
 .../Twisted/twisted/web/test/test_flatten.py       |   348 +
 ThirdParty/Twisted/twisted/web/test/test_http.py   |  1849 +++
 .../Twisted/twisted/web/test/test_http_headers.py  |   631 +
 .../Twisted/twisted/web/test/test_httpauth.py      |   634 +
 .../Twisted/twisted/web/test/test_newclient.py     |  2521 ++++
 ThirdParty/Twisted/twisted/web/test/test_proxy.py  |   544 +
 .../Twisted/twisted/web/test/test_resource.py      |   261 +
 ThirdParty/Twisted/twisted/web/test/test_script.py |    70 +
 ThirdParty/Twisted/twisted/web/test/test_soap.py   |   114 +
 ThirdParty/Twisted/twisted/web/test/test_stan.py   |   139 +
 ThirdParty/Twisted/twisted/web/test/test_static.py |  1486 +++
 ThirdParty/Twisted/twisted/web/test/test_tap.py    |   196 +
 .../Twisted/twisted/web/test/test_template.py      |   810 ++
 ThirdParty/Twisted/twisted/web/test/test_util.py   |   424 +
 ThirdParty/Twisted/twisted/web/test/test_vhost.py  |   105 +
 ThirdParty/Twisted/twisted/web/test/test_web.py    |   972 ++
 .../Twisted/twisted/web/test/test_webclient.py     |  1070 ++
 ThirdParty/Twisted/twisted/web/test/test_wsgi.py   |  1572 +++
 ThirdParty/Twisted/twisted/web/test/test_xml.py    |  1105 ++
 ThirdParty/Twisted/twisted/web/test/test_xmlrpc.py |   849 ++
 ThirdParty/Twisted/twisted/web/topfiles/NEWS       |   602 +
 ThirdParty/Twisted/twisted/web/topfiles/README     |     6 +
 ThirdParty/Twisted/twisted/web/topfiles/setup.py   |    30 +
 ThirdParty/Twisted/twisted/web/twcgi.py            |   299 +
 ThirdParty/Twisted/twisted/web/util.py             |   433 +
 ThirdParty/Twisted/twisted/web/vhost.py            |   135 +
 ThirdParty/Twisted/twisted/web/wsgi.py             |   403 +
 ThirdParty/Twisted/twisted/web/xmlrpc.py           |   590 +
 ThirdParty/Twisted/twisted/words/__init__.py       |    10 +
 ThirdParty/Twisted/twisted/words/_version.py       |     3 +
 ThirdParty/Twisted/twisted/words/ewords.py         |    34 +
 ThirdParty/Twisted/twisted/words/im/__init__.py    |     6 +
 ThirdParty/Twisted/twisted/words/im/baseaccount.py |    62 +
 ThirdParty/Twisted/twisted/words/im/basechat.py    |   512 +
 ThirdParty/Twisted/twisted/words/im/basesupport.py |   270 +
 .../twisted/words/im/instancemessenger.glade       |  3165 +++++
 ThirdParty/Twisted/twisted/words/im/interfaces.py  |   364 +
 ThirdParty/Twisted/twisted/words/im/ircsupport.py  |   263 +
 ThirdParty/Twisted/twisted/words/im/locals.py      |    26 +
 ThirdParty/Twisted/twisted/words/im/pbsupport.py   |   260 +
 ThirdParty/Twisted/twisted/words/iwords.py         |   266 +
 .../Twisted/twisted/words/protocols/__init__.py    |     1 +
 ThirdParty/Twisted/twisted/words/protocols/irc.py  |  3302 +++++
 .../twisted/words/protocols/jabber/__init__.py     |     8 +
 .../twisted/words/protocols/jabber/client.py       |   368 +
 .../twisted/words/protocols/jabber/component.py    |   474 +
 .../twisted/words/protocols/jabber/error.py        |   336 +
 .../twisted/words/protocols/jabber/ijabber.py      |   199 +
 .../Twisted/twisted/words/protocols/jabber/jid.py  |   249 +
 .../twisted/words/protocols/jabber/jstrports.py    |    31 +
 .../Twisted/twisted/words/protocols/jabber/sasl.py |   243 +
 .../words/protocols/jabber/sasl_mechanisms.py      |   240 +
 .../twisted/words/protocols/jabber/xmlstream.py    |  1136 ++
 .../words/protocols/jabber/xmpp_stringprep.py      |   253 +
 ThirdParty/Twisted/twisted/words/protocols/msn.py  |  2479 ++++
 .../Twisted/twisted/words/protocols/oscar.py       |  1235 ++
 ThirdParty/Twisted/twisted/words/service.py        |  1223 ++
 ThirdParty/Twisted/twisted/words/tap.py            |    74 +
 ThirdParty/Twisted/twisted/words/test/__init__.py  |     1 +
 .../Twisted/twisted/words/test/test_basechat.py    |    68 +
 .../Twisted/twisted/words/test/test_basesupport.py |    97 +
 .../Twisted/twisted/words/test/test_domish.py      |   434 +
 ThirdParty/Twisted/twisted/words/test/test_irc.py  |  1898 +++
 .../Twisted/twisted/words/test/test_irc_service.py |   216 +
 .../Twisted/twisted/words/test/test_ircsupport.py  |    79 +
 .../twisted/words/test/test_jabberclient.py        |   414 +
 .../twisted/words/test/test_jabbercomponent.py     |   422 +
 .../Twisted/twisted/words/test/test_jabbererror.py |   342 +
 .../Twisted/twisted/words/test/test_jabberjid.py   |   225 +
 .../twisted/words/test/test_jabberjstrports.py     |    34 +
 .../Twisted/twisted/words/test/test_jabbersasl.py  |   272 +
 .../words/test/test_jabbersaslmechanisms.py        |    90 +
 .../twisted/words/test/test_jabberxmlstream.py     |  1334 ++
 .../words/test/test_jabberxmppstringprep.py        |    92 +
 ThirdParty/Twisted/twisted/words/test/test_msn.py  |   522 +
 .../Twisted/twisted/words/test/test_oscar.py       |    24 +
 .../Twisted/twisted/words/test/test_service.py     |   995 ++
 ThirdParty/Twisted/twisted/words/test/test_tap.py  |    78 +
 .../Twisted/twisted/words/test/test_xishutil.py    |   345 +
 .../Twisted/twisted/words/test/test_xmlstream.py   |   224 +
 .../twisted/words/test/test_xmpproutertap.py       |    84 +
 .../Twisted/twisted/words/test/test_xpath.py       |   260 +
 ThirdParty/Twisted/twisted/words/topfiles/NEWS     |   378 +
 ThirdParty/Twisted/twisted/words/topfiles/README   |     5 +
 ThirdParty/Twisted/twisted/words/topfiles/setup.py |    53 +
 ThirdParty/Twisted/twisted/words/xish/__init__.py  |    10 +
 ThirdParty/Twisted/twisted/words/xish/domish.py    |   848 ++
 ThirdParty/Twisted/twisted/words/xish/utility.py   |   372 +
 ThirdParty/Twisted/twisted/words/xish/xmlstream.py |   261 +
 ThirdParty/Twisted/twisted/words/xish/xpath.py     |   333 +
 .../Twisted/twisted/words/xish/xpathparser.g       |   375 +
 .../Twisted/twisted/words/xish/xpathparser.py      |   508 +
 ThirdParty/Twisted/twisted/words/xmpproutertap.py  |    30 +
 ThirdParty/VPIC/CMakeLists.txt                     |     2 +-
 ThirdParty/ZopeInterface/CMakeLists.txt            |    20 +
 ThirdParty/ZopeInterface/COPYRIGHT.txt             |     1 +
 ThirdParty/ZopeInterface/LICENSE.txt               |    44 +
 ThirdParty/ZopeInterface/module.cmake              |     4 +
 ThirdParty/ZopeInterface/zope/__init__.py          |     7 +
 .../ZopeInterface/zope/interface/__init__.py       |    89 +
 ThirdParty/ZopeInterface/zope/interface/_compat.py |    69 +
 .../ZopeInterface/zope/interface/_flatten.py       |    35 +
 .../interface/_zope_interface_coptimizations.c     |  1688 +++
 ThirdParty/ZopeInterface/zope/interface/adapter.py |   704 +
 ThirdParty/ZopeInterface/zope/interface/advice.py  |   206 +
 .../zope/interface/common/__init__.py              |     2 +
 .../zope/interface/common/idatetime.py             |   575 +
 .../zope/interface/common/interfaces.py            |   102 +
 .../ZopeInterface/zope/interface/common/mapping.py |   125 +
 .../zope/interface/common/sequence.py              |   160 +
 .../zope/interface/common/tests/__init__.py        |     2 +
 .../zope/interface/common/tests/basemapping.py     |   107 +
 .../zope/interface/common/tests/test_idatetime.py  |    47 +
 .../common/tests/test_import_interfaces.py         |    29 +
 .../ZopeInterface/zope/interface/declarations.py   |   848 ++
 .../ZopeInterface/zope/interface/document.py       |   104 +
 .../ZopeInterface/zope/interface/exceptions.py     |    67 +
 .../ZopeInterface/zope/interface/interface.py      |   712 ++
 .../ZopeInterface/zope/interface/interfaces.py     |  1288 ++
 .../ZopeInterface/zope/interface/registry.py       |   530 +
 ThirdParty/ZopeInterface/zope/interface/ro.py      |    69 +
 .../ZopeInterface/zope/interface/tests/__init__.py |    13 +
 .../zope/interface/tests/advisory_testing.py       |    42 +
 .../ZopeInterface/zope/interface/tests/dummy.py    |    23 +
 .../ZopeInterface/zope/interface/tests/idummy.py   |    23 +
 .../ZopeInterface/zope/interface/tests/ifoo.py     |    26 +
 .../zope/interface/tests/ifoo_other.py             |    26 +
 .../ZopeInterface/zope/interface/tests/m1.py       |    21 +
 .../ZopeInterface/zope/interface/tests/m2.py       |    15 +
 .../ZopeInterface/zope/interface/tests/odd.py      |   129 +
 .../zope/interface/tests/test_adapter.py           |  1285 ++
 .../zope/interface/tests/test_advice.py            |   397 +
 .../zope/interface/tests/test_declarations.py      |  1576 +++
 .../zope/interface/tests/test_document.py          |   286 +
 .../zope/interface/tests/test_element.py           |    41 +
 .../zope/interface/tests/test_exceptions.py        |    75 +
 .../zope/interface/tests/test_interface.py         |  2089 +++
 .../zope/interface/tests/test_interfaces.py        |   115 +
 .../zope/interface/tests/test_odd_declarations.py  |   227 +
 .../zope/interface/tests/test_registry.py          |  2454 ++++
 .../zope/interface/tests/test_sorting.py           |    55 +
 .../zope/interface/tests/test_verify.py            |   548 +
 ThirdParty/ZopeInterface/zope/interface/verify.py  |   116 +
 ThirdParty/alglib/CMakeLists.txt                   |    10 +-
 ThirdParty/exodusII/vtkexodusII/CMakeLists.txt     |     2 +-
 ThirdParty/expat/vtkexpat/CMakeLists.txt           |     2 +-
 ThirdParty/freetype/vtkfreetype/CMakeLists.txt     |     6 +-
 ThirdParty/hdf5/CMakeLists.txt                     |     3 -
 ThirdParty/hdf5/vtkhdf5/CMakeLists.txt             |     2 +-
 ThirdParty/hdf5/vtkhdf5/c++/src/CMakeLists.txt     |     8 +-
 ThirdParty/hdf5/vtkhdf5/hl/c++/src/CMakeLists.txt  |     8 +-
 ThirdParty/hdf5/vtkhdf5/src/CMakeLists.txt         |     2 +-
 ThirdParty/hdf5/vtkhdf5/src/H5FDfamily.c           |     2 +-
 ThirdParty/hdf5/vtkhdf5/src/H5public.h             |     2 +-
 ThirdParty/jpeg/vtkjpeg/CMakeLists.txt             |     2 +-
 ThirdParty/jsoncpp/CMakeLists.txt                  |    33 +-
 ThirdParty/jsoncpp/json/json.h                     |  1855 ---
 ThirdParty/jsoncpp/vtk_jsoncpp.h.in                |    33 +
 ThirdParty/jsoncpp/vtkjsoncpp/CMakeLists.txt       |    32 +
 .../jsoncpp/{ => vtkjsoncpp}/json/json-forwards.h  |     0
 ThirdParty/jsoncpp/vtkjsoncpp/json/json.h          |  1855 +++
 ThirdParty/jsoncpp/{ => vtkjsoncpp}/jsoncpp.cpp    |     0
 ThirdParty/libproj4/CMakeLists.txt                 |     4 +-
 ThirdParty/libproj4/vtklibproj4/CMakeLists.txt     |    22 +-
 ThirdParty/libxml2/vtklibxml2/CMakeLists.txt       |     2 +-
 ThirdParty/libxml2/vtklibxml2/SAX2.c               |     4 +-
 ThirdParty/libxml2/vtklibxml2/xmllint.c            |     2 +-
 ThirdParty/libxml2/vtklibxml2/xmlregexp.c          |     2 +-
 ThirdParty/libxml2/vtklibxml2/xmlschemas.c         |     2 +-
 ThirdParty/mrmpi/mpistubs/CMakeLists.txt           |     2 +-
 ThirdParty/mrmpi/src/CMakeLists.txt                |     2 +-
 ThirdParty/netcdf/CMakeLists.txt                   |    24 +-
 ThirdParty/netcdf/vtk_netcdf.h                     |    21 -
 ThirdParty/netcdf/vtk_netcdf.h.in                  |    27 +
 ThirdParty/netcdf/vtk_netcdfcpp.h                  |    21 -
 ThirdParty/netcdf/vtk_netcdfcpp.h.in               |    27 +
 ThirdParty/netcdf/vtknetcdf/CMakeLists.txt         |     4 +-
 ThirdParty/netcdf/vtknetcdf/libsrc4/nc4file.c      |     4 +-
 ThirdParty/oggtheora/vtkoggtheora/CMakeLists.txt   |     6 +-
 .../vtkoggtheora/libogg-1.1.4/include/ogg/ogg.h    |     4 +-
 ThirdParty/png/vtkpng/CMakeLists.txt               |     2 +-
 ThirdParty/png/vtkpng/png.h                        |     6 +-
 ThirdParty/png/vtkpng/pngwio.c                     |     2 +-
 ThirdParty/sqlite/vtksqlite/CMakeLists.txt         |     2 +-
 ThirdParty/sqlite/vtksqlite/vtk_sqlite3.c          |     6 +-
 ThirdParty/tiff/vtktiff/CMakeLists.txt             |     4 +-
 ThirdParty/tiff/vtktiff/tif_config.h.in            |     9 +-
 ThirdParty/utf8/doc/ReleaseNotes                   |     3 +
 ThirdParty/utf8/source/utf8/checked.h              |    18 +
 .../xdmf2/vtkxdmf2/libsrc/XdmfValuesBinary.cxx     |     2 +-
 ThirdParty/zlib/vtkzlib/CMakeLists.txt             |     2 +-
 Utilities/DICOMParser/DICOMAppHelper.cxx           |     1 +
 Utilities/DICOMParser/DICOMCallback.h              |     2 +-
 Utilities/DICOMParser/DICOMParser.cxx              |     2 +-
 Utilities/DICOMParser/DICOMParserMap.h             |     2 +-
 Utilities/Doxygen/CMakeLists.txt                   |     9 +-
 Utilities/Doxygen/doc_header2doxygen.pl            |     1 +
 Utilities/EncodeString/vtkEncodeString.cxx         |     6 +-
 Utilities/KWSys/vtksys/CMakeLists.txt              |   154 +-
 Utilities/KWSys/vtksys/CommandLineArguments.hxx.in |    10 +-
 Utilities/KWSys/vtksys/Configure.hxx.in            |     4 +
 Utilities/KWSys/vtksys/Directory.cxx               |    17 +-
 Utilities/KWSys/vtksys/DynamicLoader.cxx           |    19 +-
 Utilities/KWSys/vtksys/Encoding.h.in               |    79 +
 Utilities/KWSys/vtksys/Encoding.hxx.in             |    56 +
 Utilities/KWSys/vtksys/EncodingC.c                 |    79 +
 Utilities/KWSys/vtksys/EncodingCXX.cxx             |    88 +
 Utilities/KWSys/vtksys/FStream.hxx.in              |   172 +
 Utilities/KWSys/vtksys/ProcessUNIX.c               |     6 +
 Utilities/KWSys/vtksys/ProcessWin32.c              |    78 +-
 Utilities/KWSys/vtksys/RegularExpression.cxx       |   114 +-
 Utilities/KWSys/vtksys/SharedForward.h.in          |     2 +-
 Utilities/KWSys/vtksys/SystemInformation.cxx       |   432 +-
 Utilities/KWSys/vtksys/SystemInformation.hxx.in    |    10 +-
 Utilities/KWSys/vtksys/SystemTools.cxx             |   606 +-
 Utilities/KWSys/vtksys/SystemTools.hxx.in          |    24 +-
 Utilities/KWSys/vtksys/auto_ptr.hxx.in             |    18 +
 Utilities/KWSys/vtksys/hashtable.hxx.in            |    18 +
 Utilities/KWSys/vtksys/kwsysPlatformTests.cmake    |     3 +
 Utilities/KWSys/vtksys/kwsysPlatformTestsCXX.cxx   |    75 +-
 Utilities/KWSys/vtksys/testDynamicLoader.cxx       |    10 +-
 Utilities/KWSys/vtksys/testEncoding.cxx            |   159 +
 Utilities/KWSys/vtksys/testIOS.cxx                 |     2 +-
 Utilities/KWSys/vtksys/testSystemInformation.cxx   |    25 +-
 Utilities/Maintenance/ArchiveTestingData.py        |   184 +
 Utilities/Maintenance/SourceTarball.bash           |   223 +
 .../Maintenance/VisualizeModuleDependencies.py     |   328 +
 Utilities/Maintenance/WhatModulesVTK.py            |   272 +-
 Utilities/MaterialLibrary/CMakeLists.txt           |   283 -
 .../CgShaders/FragmentIsotropicTorranceSparrow.cg  |   228 -
 .../MaterialLibrary/CgShaders/FragmentTexture.cg   |    20 -
 Utilities/MaterialLibrary/CgShaders/Twisted.cg     |    92 -
 .../MaterialLibrary/CgShaders/VertPassThrough.cg   |    60 -
 .../CgShaders/VertTexPassThrough.cg                |    63 -
 .../MaterialLibrary/CgShaders/VertexLighting.cg    |    56 -
 Utilities/MaterialLibrary/CgShaders/cg_sinewave.cg |    77 -
 .../GLSLShaders/TestAppVarFrag.glsl                |   110 -
 .../GLSLShaders/TestMatrixFrag.glsl                |    89 -
 .../GLSLShaders/TestScalarVectorFrag.glsl          |   120 -
 .../MaterialLibrary/GLSLShaders/TestVertex.glsl    |    62 -
 .../GLSLShaders/TestVtkPropertyFrag.glsl           |   113 -
 Utilities/MaterialLibrary/GLSLShaders/Twisted.glsl |    39 -
 Utilities/MaterialLibrary/Materials/CgLighting.xml |    28 -
 Utilities/MaterialLibrary/Materials/CgSinewave.xml |    31 -
 Utilities/MaterialLibrary/Materials/CgTwisted.xml  |    30 -
 .../MaterialLibrary/Materials/GLSLTwisted.xml      |    15 -
 Utilities/MaterialLibrary/ProcessShader.cxx        |   231 -
 .../Repository/OrangeBook/3DLabsLicense.txt        |    37 -
 .../Repository/OrangeBook/Ch06/Ch06Brick.xml       |    56 -
 .../Repository/OrangeBook/Ch06/Ch06BrickFrag.glsl  |    37 -
 .../Repository/OrangeBook/Ch06/Ch06BrickVert.glsl  |    40 -
 .../Repository/OrangeBook/Ch10/Ch10Earth1DTex.xml  |    23 -
 .../OrangeBook/Ch10/Ch10Earth1DTexFrag.glsl        |    18 -
 .../OrangeBook/Ch10/Ch10Earth1DTexVert.glsl        |    36 -
 .../Repository/OrangeBook/Ch10/Ch10Earth3DTex.xml  |    48 -
 .../OrangeBook/Ch10/Ch10Earth3DTexFrag.glsl        |    41 -
 .../OrangeBook/Ch10/Ch10Earth3DTexVert.glsl        |    33 -
 .../Repository/OrangeBook/Ch10/Ch10EnvMap.xml      |    23 -
 .../Repository/OrangeBook/Ch10/Ch10EnvMapFrag.glsl |    62 -
 .../Repository/OrangeBook/Ch10/Ch10EnvMapVert.glsl |    25 -
 .../Repository/OrangeBook/Ch11/BumpMap.xml         |    34 -
 .../OrangeBook/Ch11/Ch11BumpMapFrag.glsl           |    41 -
 .../OrangeBook/Ch11/Ch11BumpMapVert.glsl           |    42 -
 .../OrangeBook/Ch11/Ch11LatticeFrag.glsl           |    27 -
 .../OrangeBook/Ch11/Ch11LatticeVert.glsl           |    39 -
 .../Repository/OrangeBook/Ch11/Ch11StripeFrag.glsl |    35 -
 .../Repository/OrangeBook/Ch11/Ch11StripeVert.glsl |    39 -
 .../OrangeBook/Ch11/Ch11ToyBallFrag.glsl           |    78 -
 .../OrangeBook/Ch11/Ch11ToyBallVert.glsl           |    20 -
 .../Repository/OrangeBook/Ch11/Lattice.xml         |    50 -
 .../Repository/OrangeBook/Ch11/Stripe.xml          |    51 -
 .../Repository/OrangeBook/Ch11/ToyBall.xml         |    49 -
 .../Repository/OrangeBook/Ch12/Ch12Cloud.xml       |    39 -
 .../Repository/OrangeBook/Ch12/Ch12CloudFrag.glsl  |    28 -
 .../Repository/OrangeBook/Ch12/Ch12CloudVert.glsl  |    25 -
 .../Repository/OrangeBook/Ch12/Ch12Fire.xml        |    40 -
 .../Repository/OrangeBook/Ch12/Ch12FireFrag.glsl   |    33 -
 .../Repository/OrangeBook/Ch12/Ch12FireVert.glsl   |    25 -
 .../Repository/OrangeBook/Ch12/Ch12Granite.xml     |    38 -
 .../OrangeBook/Ch12/Ch12GraniteFrag.glsl           |    24 -
 .../OrangeBook/Ch12/Ch12GraniteVert.glsl           |    29 -
 .../Repository/OrangeBook/Ch12/Ch12Wood.xml        |    46 -
 .../Repository/OrangeBook/Ch12/Ch12WoodFrag.glsl   |    52 -
 .../Repository/OrangeBook/Ch12/Ch12WoodVert.glsl   |    25 -
 .../Repository/OrangeBook/Ch13/Ch13Cloud.xml       |    20 -
 .../OrangeBook/Ch13/Ch13CloudAnimFrag.glsl         |    29 -
 .../OrangeBook/Ch13/Ch13CloudAnimVert.glsl         |    25 -
 .../Repository/OrangeBook/Ch13/Ch13Particle.xml    |    33 -
 .../OrangeBook/Ch13/Ch13ParticleFrag.glsl          |    17 -
 .../OrangeBook/Ch13/Ch13ParticleVert.glsl          |    38 -
 .../Repository/OrangeBook/Ch13/Ch13Wobble.xml      |    29 -
 .../Repository/OrangeBook/Ch13/Ch13WobbleFrag.glsl |    71 -
 .../Repository/OrangeBook/Ch13/Ch13WobbleVert.glsl |    33 -
 .../Repository/OrangeBook/Ch14/Ch14AABrick.xml     |    42 -
 .../OrangeBook/Ch14/Ch14AABrickFrag.glsl           |    46 -
 .../OrangeBook/Ch14/Ch14AABrickVert.glsl           |    41 -
 .../Repository/OrangeBook/Ch14/Ch14Adaptive.xml    |    12 -
 .../OrangeBook/Ch14/Ch14AdaptiveAAFrag.glsl        |    25 -
 .../OrangeBook/Ch14/Ch14AdaptiveAAVert.glsl        |    28 -
 .../Repository/OrangeBook/Ch15/Ch15Gooch.xml       |    52 -
 .../Repository/OrangeBook/Ch15/Ch15GoochFrag.glsl  |    34 -
 .../Repository/OrangeBook/Ch15/Ch15GoochVert.glsl  |    26 -
 .../Repository/OrangeBook/Ch15/Ch15Hatch.xml       |    37 -
 .../Repository/OrangeBook/Ch15/Ch15HatchFrag.glsl  |    48 -
 .../Repository/OrangeBook/Ch15/Ch15HatchVert.glsl  |    34 -
 .../Repository/OrangeBook/Ch15/Ch15Julia.xml       |    29 -
 .../Repository/OrangeBook/Ch15/Ch15JuliaFrag.glsl  |    56 -
 .../Repository/OrangeBook/Ch15/Ch15JuliaVert.glsl  |    35 -
 .../Repository/OrangeBook/Ch15/Ch15Mandel.xml      |    34 -
 .../Repository/OrangeBook/Ch15/Ch15MandelFrag.glsl |    54 -
 .../Repository/OrangeBook/Ch15/Ch15MandelVert.glsl |    40 -
 .../Repository/OrangeBook/Readme.txt               |     7 -
 Utilities/MaterialLibrary/Repository/README.txt    |     6 -
 .../Repository/TestCg/AppVarFragmentTest.cg        |    51 -
 .../Repository/TestCg/AppVarTest.xml               |    62 -
 .../Repository/TestCg/MatrixFragmentTest.cg        |    79 -
 .../Repository/TestCg/MatrixTest.xml               |    64 -
 .../Repository/TestCg/ModelViewXFormTest.xml       |    36 -
 .../Repository/TestCg/ModelViewXFormVertexTest.cg  |    83 -
 .../Repository/TestCg/ScalarVectorFragmentTest.cg  |    79 -
 .../Repository/TestCg/ScalarVectorTest.xml         |    61 -
 .../Repository/TestCg/StructureFragmentTest.cg     |    77 -
 .../Repository/TestCg/StructureTest.xml            |    65 -
 .../MaterialLibrary/Repository/TestCg/Vertex.cg    |    50 -
 .../Repository/TestCg/VtkPropertyFragmentTest.cg   |    79 -
 .../Repository/TestCg/VtkPropertyTest.xml          |    76 -
 .../Repository/TestGLSL/AppVarTestFragment.glsl    |   110 -
 .../Repository/TestGLSL/AppVarTestFragment.xml     |    49 -
 .../Repository/TestGLSL/MatrixTestFragment.glsl    |    86 -
 .../Repository/TestGLSL/MatrixTestFragment.xml     |    55 -
 .../TestGLSL/ScalarVectorTestFragment.glsl         |   120 -
 .../TestGLSL/ScalarVectorTestFragment.xml          |    78 -
 .../Repository/TestGLSL/Vertex.glsl                |    62 -
 .../TestGLSL/VtkPropertyTestFragment.glsl          |   113 -
 .../TestGLSL/VtkPropertyTestFragment.xml           |    66 -
 .../Repository/TestGLSL/XFormsTestFragment.xml     |    48 -
 .../Repository/TestGLSL/XFormsTestVertex.glsl      |    77 -
 .../MaterialLibrary/Repository/Textures/earth.ppm  |   Bin 393231 -> 0 bytes
 Utilities/MaterialLibrary/module.cmake             |     5 -
 Utilities/MaterialLibrary/vtk-forward.c.in         |    29 -
 .../MaterialLibrary/vtkMaterialLibraryMacro.h.in   |    23 -
 .../vtkMaterialXMLLibrary.h.forcebuild.in          |     5 -
 .../MaterialLibrary/vtkShaderCodeLibraryMacro.h.in |    21 -
 Utilities/MetaIO/vtkmetaio/CMakeLists.txt          |    10 +-
 Utilities/MetaIO/vtkmetaio/metaForm.cxx            |    21 +-
 Utilities/MetaIO/vtkmetaio/metaForm.h              |    10 +-
 Utilities/MetaIO/vtkmetaio/metaImage.h             |    30 +-
 Utilities/MetaIO/vtkmetaio/metaMesh.cxx            |     2 +-
 Utilities/MetaIO/vtkmetaio/metaOutput.cxx          |     2 +-
 Utilities/MetaIO/vtkmetaio/metaTransform.cxx       |     2 +-
 Utilities/MetaIO/vtkmetaio/metaTube.cxx            |     2 +-
 Utilities/MetaIO/vtkmetaio/metaUtils.cxx           |    22 +-
 .../MetaIO/vtkmetaio/tests/testMeta10Contour.cxx   |     2 +-
 .../MetaIO/vtkmetaio/tests/testMeta11Form.cxx      |    16 +-
 .../MetaIO/vtkmetaio/tests/testMeta12Array.cxx     |     2 +-
 .../MetaIO/vtkmetaio/tests/testMeta1Utils.cxx      |     2 +-
 .../MetaIO/vtkmetaio/tests/testMeta2Object.cxx     |     2 +-
 .../MetaIO/vtkmetaio/tests/testMeta3Image.cxx      |     2 +-
 Utilities/MetaIO/vtkmetaio/tests/testMeta4Tube.cxx |     2 +-
 Utilities/MetaIO/vtkmetaio/tests/testMeta5Blob.cxx |     2 +-
 .../MetaIO/vtkmetaio/tests/testMeta6Surface.cxx    |     2 +-
 Utilities/MetaIO/vtkmetaio/tests/testMeta7Line.cxx |     2 +-
 .../MetaIO/vtkmetaio/tests/testMeta8Scene.cxx      |     2 +-
 Utilities/Python/CMakeLists.txt                    |    16 +
 Utilities/Release/CMakeLists.txt                   |   124 -
 Utilities/Release/VTKInstall.bmp                   |   Bin 25818 -> 0 bytes
 Utilities/Scripts/SetupExternalData.sh             |    92 +
 Utilities/Scripts/git-gerrit-push                  |   136 +-
 Utilities/Scripts/pre-commit                       |    71 +
 Utilities/SetupForDevelopment.sh                   |     1 +
 Utilities/octree/octree                            |     8 -
 Utilities/octree/octree_cursor.cxx                 |     8 -
 Utilities/octree/octree_cursor.h                   |     5 +-
 Utilities/octree/octree_iterator.cxx               |    32 -
 Utilities/octree/octree_iterator.h                 |     4 -
 Utilities/octree/octree_path.cxx                   |    41 -
 Utilities/octree/octree_path.h                     |     4 -
 Views/Infovis/CMakeLists.txt                       |     4 +
 Views/Infovis/Testing/Cxx/CMakeLists.txt           |    39 +-
 Views/Infovis/Testing/Cxx/TestColumnTree.cxx       |   164 +
 Views/Infovis/Testing/Cxx/TestDendrogramItem.cxx   |   117 +
 Views/Infovis/Testing/Cxx/TestDendrogramOnly.cxx   |   115 -
 .../Testing/Cxx/TestHeatmapCategoryLegend.cxx      |    87 +
 Views/Infovis/Testing/Cxx/TestHeatmapItem.cxx      |   119 +
 Views/Infovis/Testing/Cxx/TestHeatmapOnly.cxx      |   117 -
 .../Testing/Cxx/TestHeatmapScalarLegend.cxx        |    86 +
 .../Testing/Cxx/TestParallelCoordinatesView.cxx    |   120 +
 Views/Infovis/Testing/Cxx/TestTanglegramItem.cxx   |   188 +
 .../Testing/Cxx/TestTreeHeatmapAutoCollapse.cxx    |     5 +
 Views/Infovis/Testing/Cxx/TestTreeHeatmapItem.cxx  |     4 +
 .../Baseline/TestCoincidentGraphLayoutView.png.md5 |     1 +
 .../Testing/Data/Baseline/TestColumnTree.png.md5   |     1 +
 .../Data/Baseline/TestConeLayoutStrategy.png.md5   |     1 +
 .../Data/Baseline/TestConeLayoutStrategy_1.png.md5 |     1 +
 .../Data/Baseline/TestDendrogramItem.png.md5       |     1 +
 .../Data/Baseline/TestDendrogramOnly.png.md5       |     1 +
 .../Data/Baseline/TestDendrogramOnly_1.png.md5     |     1 +
 .../Data/Baseline/TestGraphLayoutView.png.md5      |     1 +
 .../Baseline/TestHeatmapCategoryLegend.png.md5     |     1 +
 .../Testing/Data/Baseline/TestHeatmapItem.png.md5  |     1 +
 .../Testing/Data/Baseline/TestHeatmapOnly.png.md5  |     1 +
 .../Data/Baseline/TestHeatmapScalarLegend.png.md5  |     1 +
 .../Baseline/TestHierarchicalGraphView.png.md5     |     1 +
 .../Testing/Data/Baseline/TestIcicleView.png.md5   |     1 +
 .../Testing/Data/Baseline/TestIcicleView_1.png.md5 |     1 +
 .../Testing/Data/Baseline/TestIcicleView_2.png.md5 |     1 +
 .../Testing/Data/Baseline/TestIcicleView_3.png.md5 |     1 +
 .../Testing/Data/Baseline/TestIcicleView_4.png.md5 |     1 +
 .../Testing/Data/Baseline/TestIcicleView_5.png.md5 |     1 +
 .../Data/Baseline/TestIconGlyphFilter.png.md5      |     1 +
 .../TestInteractorStyleTreeMapHover.png.md5        |     1 +
 .../Testing/Data/Baseline/TestNetworkViews.png.md5 |     1 +
 .../Baseline/TestParallelCoordinatesView.png.md5   |     1 +
 .../Testing/Data/Baseline/TestRenderView.png.md5   |     1 +
 .../Baseline/TestSpanTreeLayoutStrategy.png.md5    |     1 +
 .../Data/Baseline/TestTanglegramItem.png.md5       |     1 +
 .../Baseline/TestTreeHeatmapAutoCollapse.png.md5   |     1 +
 .../Data/Baseline/TestTreeHeatmapItem.png.md5      |     1 +
 .../Testing/Data/Baseline/TestTreeMapView.png.md5  |     1 +
 .../Data/Baseline/TestTreeMapView_1.png.md5        |     1 +
 .../Data/Baseline/TestTreeMapView_2.png.md5        |     1 +
 .../Data/Baseline/TestTreeMapView_3.png.md5        |     1 +
 .../Testing/Data/Baseline/TestTreeRingView.png.md5 |     1 +
 .../Data/Baseline/TestTreeRingView_1.png.md5       |     1 +
 Views/Infovis/module.cmake                         |     4 +
 Views/Infovis/vtkDendrogramItem.cxx                |  1542 +++
 Views/Infovis/vtkDendrogramItem.h                  |   360 +
 Views/Infovis/vtkHeatmapItem.cxx                   |  1240 ++
 Views/Infovis/vtkHeatmapItem.h                     |   258 +
 .../vtkParallelCoordinatesRepresentation.cxx       |    41 +-
 Views/Infovis/vtkParallelCoordinatesView.cxx       |     2 +-
 Views/Infovis/vtkSCurveSpline.cxx                  |   256 +
 Views/Infovis/vtkSCurveSpline.h                    |    69 +
 Views/Infovis/vtkTanglegramItem.cxx                |   746 ++
 Views/Infovis/vtkTanglegramItem.h                  |   198 +
 Views/Infovis/vtkTreeHeatmapItem.cxx               |  1582 +--
 Views/Infovis/vtkTreeHeatmapItem.h                 |   215 +-
 Views/Qt/CMakeLists.txt                            |    24 +-
 Views/Qt/vtkQtTableRepresentation.cxx              |     2 +-
 Views/Qt/vtkQtView.cxx                             |     5 +
 Wrapping/Java/Baseline/Cone.png.md5                |     1 +
 Wrapping/Java/CMakeLists.txt                       |   112 +-
 Wrapping/Java/JavaInstall.cmake.in                 |    67 +
 Wrapping/Java/Maven/README.txt.in                  |    93 +
 Wrapping/Java/Maven/pom.xml.in                     |   126 +
 .../Java/vtk/rendering/awt/vtkAwtComponent.java    |    19 +
 .../rendering/jogl/vtkAbstractJoglComponent.java   |   103 +
 .../vtk/rendering/jogl/vtkJoglCanvasComponent.java |    24 +
 .../vtk/rendering/jogl/vtkJoglPanelComponent.java  |    24 +
 .../vtk/rendering/swt/vtkInternalSwtComponent.java |     6 +
 .../Java/vtk/rendering/swt/vtkSwtComponent.java    |     5 +-
 .../swt/vtkSwtInteractorForwarderDecorator.java    |     5 +-
 .../Java/vtk/rendering/vtkAbstractComponent.java   |    29 +-
 .../vtk/rendering/vtkAbstractEventInterceptor.java |    68 +
 Wrapping/Java/vtk/rendering/vtkComponent.java      |     5 +-
 .../Java/vtk/rendering/vtkEventInterceptor.java    |     5 +-
 .../Java/vtk/rendering/vtkInteractorForwarder.java |     7 +-
 .../vtk/sample/rendering/JoglConeRendering.java    |   178 +
 .../rendering/annotation/LabeledCubeAxesActor.java |     2 +
 Wrapping/Python/CMakeLists.txt                     |   122 +-
 Wrapping/Python/PythonInstall.cmake.in             |    18 -
 Wrapping/Python/README.txt                         |   106 +-
 Wrapping/Python/setup.py.in                        |   144 -
 Wrapping/Python/setup_install_paths.py             |   137 -
 .../Python/vtk/qt4/QVTKRenderWindowInteractor.py   |    15 +-
 Wrapping/Python/vtkPythonAppInit.cxx               |    22 +-
 Wrapping/PythonCore/PyVTKObject.cxx                |     2 +-
 Wrapping/PythonCore/PyVTKSpecialObject.h           |     2 +-
 Wrapping/PythonCore/vtkPythonArgs.cxx              |    20 +-
 Wrapping/PythonCore/vtkPythonArgs.h                |     6 +
 Wrapping/PythonCore/vtkPythonOverload.cxx          |     4 +-
 Wrapping/PythonCore/vtkPythonUtil.cxx              |    30 +-
 Wrapping/Tcl/CMakeLists.txt                        |    22 +-
 Wrapping/Tcl/vtkTkAppInit.cxx                      |     2 +-
 Wrapping/Tools/README.txt                          |    20 +-
 Wrapping/Tools/hints                               |     6 +
 Wrapping/Tools/lex.yy.c                            |  3560 +++---
 Wrapping/Tools/vtkParse.h                          |     5 +
 Wrapping/Tools/vtkParse.l                          |   253 +-
 Wrapping/Tools/vtkParse.tab.c                      | 12740 ++++++++++++-------
 Wrapping/Tools/vtkParse.y                          |   771 +-
 Wrapping/Tools/vtkParseData.c                      |    17 +-
 Wrapping/Tools/vtkParseData.h                      |    25 +-
 Wrapping/Tools/vtkParseExtras.c                    |   151 +-
 Wrapping/Tools/vtkParseJava.c                      |    18 +-
 Wrapping/Tools/vtkParseJavaBeans.c                 |     6 +-
 Wrapping/Tools/vtkParseMain.c                      |     6 +
 Wrapping/Tools/vtkParsePreprocess.c                |   992 +-
 Wrapping/Tools/vtkParsePreprocess.h                |     6 +-
 Wrapping/Tools/vtkParseString.c                    |   427 +-
 Wrapping/Tools/vtkParseString.h                    |   188 +-
 Wrapping/Tools/vtkParseType.h                      |    26 +-
 Wrapping/Tools/vtkWrap.c                           |    20 +-
 Wrapping/Tools/vtkWrap.h                           |     2 +
 Wrapping/Tools/vtkWrapJava.c                       |     6 +-
 Wrapping/Tools/vtkWrapPython.c                     |   163 +-
 Wrapping/Tools/vtkWrapPythonInit.c                 |    86 +-
 Wrapping/Tools/vtkWrapText.c                       |    28 +-
 5424 files changed, 449166 insertions(+), 65398 deletions(-)

diff --git a/.ExternalData/README.rst b/.ExternalData/README.rst
new file mode 100644
index 0000000..2235986
--- /dev/null
+++ b/.ExternalData/README.rst
@@ -0,0 +1,6 @@
+.ExternalData
+=============
+
+The VTK ``.ExternalData`` directory is an object store for the
+CMake ExternalData module that VTK uses to manage test input
+and baseline data.
diff --git a/Accelerators/Dax/CMakeLists.txt b/Accelerators/Dax/CMakeLists.txt
new file mode 100644
index 0000000..0af6aff
--- /dev/null
+++ b/Accelerators/Dax/CMakeLists.txt
@@ -0,0 +1,156 @@
+##=============================================================================
+##
+##  Copyright (c) Kitware, Inc.
+##  All rights reserved.
+##  See LICENSE.txt for details.
+##
+##  This software is distributed WITHOUT ANY WARRANTY; without even
+##  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+##  PURPOSE.  See the above copyright notice for more information.
+##
+##  Copyright 2012 Sandia Corporation.
+##  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+##  the U.S. Government retains certain rights in this software.
+##
+##=============================================================================
+cmake_minimum_required(VERSION 2.8)
+
+#ensure we link against our dependencies
+include(module.cmake)
+
+find_package(Dax REQUIRED)
+
+DaxConfigureSerial(REQUIRED)
+DaxConfigureCuda()
+DaxConfigureTBB()
+DaxConfigureOpenMP()
+
+# Collect a list of available backends.
+set(DAX_BACKENDS)
+if(${Dax_ENABLE_CUDA})
+  list(APPEND DAX_BACKENDS CUDA)
+endif()
+if(${Dax_ENABLE_TBB})
+  list(APPEND DAX_BACKENDS TBB)
+endif()
+if(${Dax_ENABLE_OPENMP})
+  list(APPEND DAX_BACKENDS OPENMP)
+endif()
+list(APPEND DAX_BACKENDS SERIAL)
+
+# Pick the first backed found in list as the default backend
+list(GET DAX_BACKENDS 0 DEFAULT_BACKEND)
+
+# Display available options for user selection.
+set(VTK_DAX_BACKEND ${DEFAULT_BACKEND} CACHE STRING
+  "Choose from available backend acclerators")
+set_property(CACHE VTK_DAX_BACKEND PROPERTY STRINGS ${DAX_BACKENDS})
+
+# Turn the picked option on and turn off the others.
+foreach(backend ${DAX_BACKENDS})
+  if(${backend} STREQUAL ${VTK_DAX_BACKEND})
+    set(VTK_DAX_PICKED_${backend} TRUE)
+  else()
+    set(VTK_DAX_PICKED_${backend} FALSE)
+  endif()
+endforeach()
+
+# Setting the configuration file to include the appropriate driver.
+#we prefer cuda over everything else, than tbb and openmp
+set (DAX_DISABLE_BOOST_SP_THREADS 0)
+if(${VTK_DAX_PICKED_CUDA})
+  set (DAX_DISABLE_BOOST_SP_THREADS 1)
+  set (DAX_DEVICE_ADAPTER "DAX_DEVICE_ADAPTER_CUDA")
+elseif(${VTK_DAX_PICKED_TBB})
+  set (DAX_DEVICE_ADAPTER "DAX_DEVICE_ADAPTER_TBB")
+elseif(${VTK_DAX_PICKED_OPENMP})
+  set (DAX_DEVICE_ADAPTER "DAX_DEVICE_ADAPTER_OPENMP")
+else()
+  set (DAX_DEVICE_ADAPTER "DAX_DEVICE_ADAPTER_SERIAL")
+endif()
+
+configure_file("${CMAKE_CURRENT_SOURCE_DIR}/vtkDaxConfig.h.in"
+  "${CMAKE_CURRENT_BINARY_DIR}/vtkDaxConfig.h" @ONLY)
+
+#needed for our export macros
+include_directories(${${vtk-module}_DEPENDS_INCLUDE_DIRS}
+                    ${CMAKE_CURRENT_BINARY_DIR}
+                    ${CMAKE_CURRENT_SOURCE_DIR}
+                    ${Dax_INCLUDE_DIRS}
+                   )
+
+set(headers
+  vtkDaxDetailCommon.h
+  vtkDaxMarchingCubesImpl.h
+  vtkDaxThresholdImpl.h
+  vtkDaxConfig.h
+  )
+
+#implementation of the algorithms for cpu accelerators
+set(cpu_accelerator_srcs
+  vtkDaxMarchingCubesImpl.cxx
+  vtkDaxThresholdImpl.cxx
+  )
+
+set(vtk_srcs
+  vtkDaxObjectFactory.h
+  vtkDaxMarchingCubes.cxx
+  vtkDaxThreshold.cxx
+  )
+
+set_source_files_properties(
+  ${headers}
+  ${cpu_accelerator_srcs}
+  WRAP_EXCLUDE
+  )
+
+#we are building with CUDA support
+if(${VTK_DAX_PICKED_CUDA})
+
+  #implementation of the algorithms for gpu accelerators
+  set(cuda_accelerator_srcs
+    vtkDaxMarchingCubesImpl.cu
+    vtkDaxThresholdImpl.cu
+    )
+
+
+  #follow pistons example on how to build a subsection of the cuda with nvcc
+  if(BUILD_SHARED_LIBS)
+    set(GPGPU_BUILD_TYPE SHARED)
+  endif()
+  set(CUDA_ATTACH_VS_BUILD_RULE_TO_CUDA_FILE OFF) #otherwise C_SOURCES is empty in VS
+  vtk_module_impl()
+
+  cuda_compile(cuda_compiled_srcs
+               ${cuda_accelerator_srcs}
+               ${headers}
+               ${GPGPU_BUILD_TYPE})
+
+  set_source_files_properties(
+    ${cuda_compiled_srcs}
+    ${cuda_accelerator_srcs}
+    WRAP_EXCLUDE
+    )
+
+  vtk_module_library(vtkAcceleratorsDax ${vtk_srcs} ${cuda_compiled_srcs}
+                                        ${headers})
+  target_link_libraries(vtkAcceleratorsDax ${CUDA_LIBRARIES})
+
+else()
+
+  vtk_module_library(vtkAcceleratorsDax ${vtk_srcs}
+                                        ${cpu_accelerator_srcs} ${headers})
+
+endif()
+
+#install the required headers to make your own dax vtk filter
+if(NOT VTK_INSTALL_NO_DEVELOPMENT)
+  install(DIRECTORY
+    ${CMAKE_CURRENT_SOURCE_DIR}/vtkToDax
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}
+    COMPONENT Development)
+  install(DIRECTORY
+    ${CMAKE_CURRENT_SOURCE_DIR}/daxToVtk
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}
+    COMPONENT Development)
+endif()
diff --git a/Accelerators/Dax/LICENSE.txt b/Accelerators/Dax/LICENSE.txt
new file mode 100644
index 0000000..b3c7a19
--- /dev/null
+++ b/Accelerators/Dax/LICENSE.txt
@@ -0,0 +1,54 @@
+Dax License Version 1.0
+========================================================================
+
+Copyright (c) 2012,
+Sandia Corporation, Kitware Inc., University of California Davis
+All rights reserved.
+
+Sandia National Laboratories, New Mexico
+PO Box 5800
+Albuquerque, NM 87185
+USA
+
+Kitware Inc.
+28 Corporate Drive
+Clifton Park, NY 12065
+USA
+
+University of California, Davis
+One Shields Avenue
+Davis, CA 95616
+USA
+
+Under the terms of Contract DE-AC04-94AL85000, there is a
+non-exclusive license for use of this work by or on behalf of the
+U.S. Government.  
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+   notice, this list of conditions and the following disclaimer.
+
+ * Redistributions in binary form must reproduce the above copyright
+   notice, this list of conditions and the following disclaimer in the
+   documentation and/or other materials provided with the
+   distribution.
+
+ * Neither the name of Kitware nor the names of any contributors may
+   be used to endorse or promote products derived from this software
+   without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR
+CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
+EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
+PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+========================================================================
diff --git a/Accelerators/Dax/Testing/Cxx/CMakeLists.txt b/Accelerators/Dax/Testing/Cxx/CMakeLists.txt
new file mode 100644
index 0000000..395008f
--- /dev/null
+++ b/Accelerators/Dax/Testing/Cxx/CMakeLists.txt
@@ -0,0 +1,8 @@
+vtk_add_test_cxx(
+  TestDaxThreshold.cxx
+  TestDaxThreshold2.cxx
+  TestDaxMarchingCubes.cxx
+  )
+
+
+ vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Accelerators/Dax/Testing/Cxx/TestDaxMarchingCubes.cxx b/Accelerators/Dax/Testing/Cxx/TestDaxMarchingCubes.cxx
new file mode 100644
index 0000000..b5073ad
--- /dev/null
+++ b/Accelerators/Dax/Testing/Cxx/TestDaxMarchingCubes.cxx
@@ -0,0 +1,92 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#include "vtkActor.h"
+#include "vtkCellData.h"
+#include "vtkDaxMarchingCubes.h"
+#include "vtkImageData.h"
+#include "vtkImageMandelbrotSource.h"
+#include "vtkNew.h"
+#include "vtkPointData.h"
+#include "vtkPolyDataMapper.h"
+#include "vtkRegressionTestImage.h"
+#include "vtkRenderer.h"
+#include "vtkRenderWindow.h"
+#include "vtkRenderWindowInteractor.h"
+
+namespace
+{
+template<typename T>
+int RunVTKPipeline(T *t, int argc, char* argv[])
+{
+  vtkNew<vtkRenderer> ren;
+  vtkNew<vtkRenderWindow> renWin;
+  vtkNew<vtkRenderWindowInteractor> iren;
+
+  renWin->AddRenderer(ren.GetPointer());
+  iren->SetRenderWindow(renWin.GetPointer());
+
+  vtkNew<vtkDaxMarchingCubes> cubes;
+
+  cubes->SetInputConnection(t->GetOutputPort());
+  cubes->SetNumberOfContours(1);
+  cubes->SetValue(0,50.5f);
+
+  vtkNew<vtkPolyDataMapper> mapper;
+  mapper->SetInputConnection(cubes->GetOutputPort());
+
+  vtkNew<vtkActor> actor;
+  actor->SetMapper(mapper.GetPointer());
+
+  ren->AddActor(actor.GetPointer());
+  ren->ResetCamera();
+  renWin->Render();
+
+  int retVal = vtkRegressionTestImage(renWin.GetPointer());
+  if(retVal == vtkRegressionTester::DO_INTERACTOR)
+    {
+    iren->Start();
+    retVal = vtkRegressionTester::PASSED;
+    }
+  return (!retVal);
+}
+
+} // Anonymous namespace
+
+
+
+int TestDaxMarchingCubes(int argc, char* argv[])
+{
+  //create the sample grid
+  vtkNew<vtkImageMandelbrotSource> src;
+  src->SetWholeExtent(0,250,0,250,0,250);
+  src->Update(); //required so we can set the active scalars
+
+  //set Iterations as the active scalar, otherwise we don't have an array
+  //to contour on
+  vtkImageData* data = vtkImageData::SafeDownCast(src->GetOutputDataObject(0));
+  if(data->GetPointData()->HasArray("Iterations") == 0)
+    {
+    //vtkImageMandelbrotSource has changed and this test needs updating
+    return (!vtkRegressionTester::FAILED); //yeah it is weird, but the right way
+    }
+
+  //setting active scalars
+  data->GetPointData()->SetActiveScalars("Iterations");
+
+  //run the pipeline
+  return RunVTKPipeline(src.GetPointer(),argc,argv);
+}
diff --git a/Accelerators/Dax/Testing/Cxx/TestDaxThreshold.cxx b/Accelerators/Dax/Testing/Cxx/TestDaxThreshold.cxx
new file mode 100644
index 0000000..4480f3d
--- /dev/null
+++ b/Accelerators/Dax/Testing/Cxx/TestDaxThreshold.cxx
@@ -0,0 +1,107 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#include "vtkActor.h"
+#include "vtkDataSetSurfaceFilter.h"
+#include "vtkDaxThreshold.h"
+#include "vtkFloatArray.h"
+#include "vtkImageData.h"
+#include "vtkMath.h"
+#include "vtkNew.h"
+#include "vtkPointData.h"
+#include "vtkPolyDataMapper.h"
+#include "vtkRegressionTestImage.h"
+#include "vtkRenderer.h"
+#include "vtkRenderWindow.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkThreshold.h"
+#include "vtkTrivialProducer.h"
+
+namespace
+{
+  void fillElevationArray(vtkFloatArray* elven, vtkImageData* grid)
+  {
+    elven->SetName("Elevation");
+    const vtkIdType size = grid->GetNumberOfPoints();
+    elven->SetNumberOfValues(size);
+    double pos[3]={0,0,0};
+    for(vtkIdType i=0; i < size; ++i)
+      {
+      grid->GetPoint(i,pos);
+      elven->SetValue(i,sqrt(vtkMath::Dot(pos,pos)));
+      }
+  }
+
+  int RunVTKPipeline(vtkImageData* grid, int argc, char* argv[])
+  {
+    vtkNew<vtkRenderer> ren;
+    vtkNew<vtkRenderWindow> renWin;
+    vtkNew<vtkRenderWindowInteractor> iren;
+
+    renWin->AddRenderer(ren.GetPointer());
+    iren->SetRenderWindow(renWin.GetPointer());
+
+    //compute an elevation array
+    vtkNew<vtkFloatArray> elevationPoints;
+    fillElevationArray(elevationPoints.GetPointer(), grid);
+    grid->GetPointData()->AddArray(elevationPoints.GetPointer());
+
+    vtkNew<vtkTrivialProducer> producer;
+    producer->SetOutput(grid);
+
+    vtkNew<vtkDaxThreshold> threshold;
+    threshold->SetInputConnection(producer->GetOutputPort());
+    threshold->SetPointsDataTypeToFloat();
+    threshold->AllScalarsOn();
+    threshold->ThresholdBetween(0,100);
+    threshold->SetInputArrayToProcess(0, 0, 0, vtkDataObject::FIELD_ASSOCIATION_POINTS,"Elevation");
+
+    vtkNew<vtkDataSetSurfaceFilter> surface;
+    surface->SetInputConnection(threshold->GetOutputPort());
+
+    vtkNew<vtkPolyDataMapper> mapper;
+    mapper->SetInputConnection(surface->GetOutputPort());
+
+    vtkNew<vtkActor> actor;
+    actor->SetMapper(mapper.GetPointer());
+
+    ren->AddActor(actor.GetPointer());
+    ren->ResetCamera();
+    renWin->Render();
+
+    int retVal = vtkRegressionTestImage(renWin.GetPointer());
+    if(retVal == vtkRegressionTester::DO_INTERACTOR)
+      {
+      iren->Start();
+      retVal = vtkRegressionTester::PASSED;
+      }
+    return (!retVal);
+  }
+
+} // Anonymous namespace
+
+int TestDaxThreshold(int argc, char* argv[])
+{
+  //create the sample grid
+  vtkNew<vtkImageData> grid;
+  int dim = 128;
+  grid->SetOrigin(0.0, 0.0, 0.0);
+  grid->SetSpacing(1.0, 1.0, 1.0);
+  grid->SetExtent(0, dim-1,0, dim-1,0, dim-1);
+
+  //run the pipeline
+  return RunVTKPipeline(grid.GetPointer(), argc, argv);
+}
diff --git a/Accelerators/Dax/Testing/Cxx/TestDaxThreshold2.cxx b/Accelerators/Dax/Testing/Cxx/TestDaxThreshold2.cxx
new file mode 100644
index 0000000..11e0593
--- /dev/null
+++ b/Accelerators/Dax/Testing/Cxx/TestDaxThreshold2.cxx
@@ -0,0 +1,76 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestThreshold.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkActor.h"
+#include "vtkDataSetSurfaceFilter.h"
+#include "vtkDaxThreshold.h"
+#include "vtkFloatArray.h"
+#include "vtkImageData.h"
+#include "vtkNew.h"
+#include "vtkPointData.h"
+#include "vtkPolyDataMapper.h"
+#include "vtkRegressionTestImage.h"
+#include "vtkRenderer.h"
+#include "vtkRenderWindow.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkRTAnalyticSource.h"
+
+
+int TestDaxThreshold2(int argc, char *argv[])
+{
+  vtkNew<vtkRenderer> ren;
+  vtkNew<vtkRenderWindow> renWin;
+  vtkNew<vtkRenderWindowInteractor> iren;
+
+  renWin->AddRenderer(ren.GetPointer());
+  iren->SetRenderWindow(renWin.GetPointer());
+
+  //---------------------------------------------------
+  // Test using different thresholding methods
+  //---------------------------------------------------
+  vtkNew<vtkRTAnalyticSource> source;
+  vtkNew<vtkDaxThreshold> threshold;
+  threshold->SetInputConnection(source->GetOutputPort());
+
+  double L=100;
+  double U=200;
+  threshold->ThresholdBetween(L,U);
+  threshold->SetAllScalars(0);
+  threshold->Update();
+
+  threshold->UseContinuousCellRangeOn();
+  threshold->Update();
+
+  vtkNew<vtkDataSetSurfaceFilter> surface;
+  surface->SetInputConnection(threshold->GetOutputPort());
+
+  vtkNew<vtkPolyDataMapper> mapper;
+  mapper->SetInputConnection(surface->GetOutputPort());
+
+  vtkNew<vtkActor> actor;
+  actor->SetMapper(mapper.GetPointer());
+
+  ren->AddActor(actor.GetPointer());
+  ren->ResetCamera();
+  renWin->Render();
+
+  int retVal = vtkRegressionTestImage(renWin.GetPointer());
+  if(retVal == vtkRegressionTester::DO_INTERACTOR)
+  {
+  iren->Start();
+  retVal = vtkRegressionTester::PASSED;
+  }
+  return (!retVal);
+}
diff --git a/Accelerators/Dax/Testing/Data/Baseline/TestDaxMarchingCubes.png.md5 b/Accelerators/Dax/Testing/Data/Baseline/TestDaxMarchingCubes.png.md5
new file mode 100644
index 0000000..1d57c3a
--- /dev/null
+++ b/Accelerators/Dax/Testing/Data/Baseline/TestDaxMarchingCubes.png.md5
@@ -0,0 +1 @@
+f2e02d659eeed9edd99a3fb79f16ed65
diff --git a/Accelerators/Dax/Testing/Data/Baseline/TestDaxThreshold.png.md5 b/Accelerators/Dax/Testing/Data/Baseline/TestDaxThreshold.png.md5
new file mode 100644
index 0000000..4079298
--- /dev/null
+++ b/Accelerators/Dax/Testing/Data/Baseline/TestDaxThreshold.png.md5
@@ -0,0 +1 @@
+d653508f9af444b8aba76563fedbdcba
diff --git a/Accelerators/Dax/Testing/Data/Baseline/TestDaxThreshold2.png.md5 b/Accelerators/Dax/Testing/Data/Baseline/TestDaxThreshold2.png.md5
new file mode 100644
index 0000000..b9a6135
--- /dev/null
+++ b/Accelerators/Dax/Testing/Data/Baseline/TestDaxThreshold2.png.md5
@@ -0,0 +1 @@
+ac2b2c9b62595cca96e57c02c7b403ce
diff --git a/Accelerators/Dax/Testing/Data/Baseline/TestDaxThreshold2_1.png.md5 b/Accelerators/Dax/Testing/Data/Baseline/TestDaxThreshold2_1.png.md5
new file mode 100644
index 0000000..1c2228e
--- /dev/null
+++ b/Accelerators/Dax/Testing/Data/Baseline/TestDaxThreshold2_1.png.md5
@@ -0,0 +1 @@
+ac41306a7cf3fc9ce0a400f41fbc8097
diff --git a/Accelerators/Dax/daxToVtk/CellTypeToType.h b/Accelerators/Dax/daxToVtk/CellTypeToType.h
new file mode 100644
index 0000000..b21594a
--- /dev/null
+++ b/Accelerators/Dax/daxToVtk/CellTypeToType.h
@@ -0,0 +1,81 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#ifndef daxToVtk_CellTypeToType_h
+#define daxToVtk_CellTypeToType_h
+#include <dax/CellTraits.h>
+
+namespace daxToVtk
+{
+template<typename T> struct CellTypeToType;
+template<> struct CellTypeToType<dax::CellTagLine>
+{
+  enum {VTKCellType=VTK_LINE};
+  enum {NUM_POINTS=dax::CellTraits<dax::CellTagLine>::NUM_VERTICES};
+  typedef vtkLine VTKCellClass;
+};
+
+template<> struct CellTypeToType<dax::CellTagHexahedron>
+{
+  enum {VTKCellType=VTK_HEXAHEDRON};
+  enum {NUM_POINTS=dax::CellTraits<dax::CellTagHexahedron>::NUM_VERTICES};
+  typedef vtkHexahedron VTKCellClass;
+};
+
+template<> struct CellTypeToType<dax::CellTagQuadrilateral>
+{
+  enum {VTKCellType=VTK_QUAD};
+  enum {NUM_POINTS=dax::CellTraits<dax::CellTagQuadrilateral>::NUM_VERTICES};
+  typedef vtkQuad VTKCellClass;
+};
+
+template<> struct CellTypeToType<dax::CellTagTetrahedron>
+{
+  enum {VTKCellType=VTK_TETRA};
+  enum {NUM_POINTS=dax::CellTraits<dax::CellTagTetrahedron>::NUM_VERTICES};
+  typedef vtkTetra VTKCellClass;
+};
+
+template<> struct CellTypeToType<dax::CellTagTriangle>
+{
+  enum {VTKCellType=VTK_TRIANGLE};
+  enum {NUM_POINTS=dax::CellTraits<dax::CellTagTriangle>::NUM_VERTICES};
+  typedef vtkTriangle VTKCellClass;
+};
+
+template<> struct CellTypeToType<dax::CellTagVoxel>
+{
+  enum {VTKCellType=VTK_VOXEL};
+  enum {NUM_POINTS=dax::CellTraits<dax::CellTagVoxel>::NUM_VERTICES};
+  typedef vtkVoxel VTKCellClass;
+};
+
+template<> struct CellTypeToType<dax::CellTagVertex>
+{
+  enum {VTKCellType=VTK_VERTEX};
+  enum {NUM_POINTS=dax::CellTraits<dax::CellTagVertex>::NUM_VERTICES};
+  typedef vtkVertex VTKCellClass;
+};
+
+template<> struct CellTypeToType<dax::CellTagWedge>
+{
+  enum {VTKCellType=VTK_WEDGE};
+  enum {NUM_POINTS=dax::CellTraits<dax::CellTagWedge>::NUM_VERTICES};
+  typedef vtkWedge VTKCellClass;
+};
+
+}
+#endif
diff --git a/Accelerators/Dax/daxToVtk/DataSetConverters.h b/Accelerators/Dax/daxToVtk/DataSetConverters.h
new file mode 100644
index 0000000..d46c169
--- /dev/null
+++ b/Accelerators/Dax/daxToVtk/DataSetConverters.h
@@ -0,0 +1,260 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#ifndef daxToVtk_DataSetConverter_h
+#define daxToVtk_DataSetConverter_h
+
+class vtkLine;
+class vtkHexahedron;
+class vtkQuad;
+class vtkTetra;
+class vtkTriangle;
+class vtkVoxel;
+class vtkWedge;
+
+#include "vtkCellData.h"
+#include "vtkDataArray.h"
+#include "vtkDataSet.h"
+#include "vtkNew.h"
+#include "vtkPointData.h"
+#include "vtkPolyData.h"
+
+#include <dax/cont/UnstructuredGrid.h>
+#include <dax/cont/UniformGrid.h>
+#include <dax/cont/ArrayHandle.h>
+
+#include "CellTypeToType.h"
+#include <algorithm>
+
+
+namespace daxToVtk
+{
+
+namespace detail
+{
+//------------------------------------------------------------------------------
+template<typename CellType>
+void writeCellTags(vtkCellArray *cell)
+{
+  //in no place do we in dax write the number of points are in the cell
+  //we don't want to that in the allocator. If the allocator does it
+  //we create an affinity between the thread the allocator is on and
+  //the memory, which will cause performance issues when we are in openMP
+
+  //so instead we do it once we pull back to vtk
+  vtkIdType* raw_ids = cell->GetPointer();
+
+  for(vtkIdType i=0; i < cell->GetNumberOfCells(); ++i)
+    {
+    raw_ids[i*(CellType::NUM_POINTS+1)]=CellType::NUM_POINTS;
+    }
+}
+
+//------------------------------------------------------------------------------
+template<typename CellType>
+void setCells(vtkCellArray* cells, vtkPolyData* output)
+{
+  //get the vtk cell type we are extracting
+  const VTKCellType cell_type = static_cast<VTKCellType>(CellType::VTKCellType);
+  if(cell_type == VTK_VERTEX)
+    {
+    output->SetVerts(cells);
+    }
+  else if(cell_type == VTK_LINE)
+    {
+    output->SetLines(cells);
+    }
+  else if(cell_type == VTK_TRIANGLE ||
+          cell_type == VTK_QUAD )
+    {
+    output->SetPolys(cells);
+    }
+}
+
+//------------------------------------------------------------------------------
+template<typename CellType>
+void setCells(vtkCellArray* cells, vtkUnstructuredGrid* output)
+{
+  //get the vtk cell type we are extracting
+  const VTKCellType cell_type = static_cast<VTKCellType>(CellType::VTKCellType);
+  output->SetCells(cell_type,cells);
+}
+
+
+//------------------------------------------------------------------------------
+template<typename ContainerTag, typename GridType, typename OutputType>
+void convertCells(ContainerTag, GridType& grid, OutputType* output)
+{
+  //we are dealing with a container type whose memory wasn't allocated by
+  //vtk so we have to copy the data into a new vtk memory location just
+  //to be safe.
+  typedef typename ::daxToVtk::CellTypeToType<
+                                typename GridType::CellTag > CellType;
+
+  //determine amount of memory to allocate
+  const vtkIdType num_cells = grid.GetNumberOfCells();
+  const vtkIdType alloc_size = grid.GetCellConnections().GetNumberOfValues();
+
+  //get the portal from the grid.
+  typedef typename GridType::CellConnectionsType::PortalConstControl
+                                                                DaxPortalType;
+  DaxPortalType daxPortal = grid.GetCellConnections.GetPortalConstControl();
+
+  //ask the vtkToDax allocator to make us memory
+  ::vtkToDax::vtkAlloc<vtkCellArray, CellType::NUM_POINTS> alloc;
+  vtkCellArray* cells = alloc.allocate(num_cells+alloc_size);
+
+  vtkIdType* cellPointer = cells->GetPointer();
+  for(vtkIdType i=0; i < num_cells; ++i)
+    {
+    *cellPointer = CellType::NUM_POINTS;
+    ++cellPointer;
+    for(vtkIdType j=0; j < CellType::NUM_POINTS; ++j, ++cellPointer)
+      {
+      *cellPointer = daxPortal.Get(index);
+      }
+    }
+
+  daxToVtk::detail::setCells<CellType>(cells,output);
+}
+
+//------------------------------------------------------------------------------
+template<typename CellType, typename GridType, typename OutputType>
+void convertCells(vtkToDax::vtkTopologyContainerTag<CellType>,
+                  GridType& grid,
+                  OutputType* output)
+{
+  //in this use case the cell container is of vtk type so we
+  //can directly hook in and use the memory the container allocated
+  //for the output. This is really nice when working with TBB and OpenMP
+  //device adapters.
+  vtkCellArray* cells = grid.GetCellConnections().GetPortalControl().GetVtkData();
+
+
+  //to properly set the cells back into vtk we have to make
+  //sure that for each cell we will fill in the part which states
+  //how many points are in that cells
+  daxToVtk::detail::writeCellTags<CellType>(cells);
+
+  daxToVtk::detail::setCells<CellType>(cells,output);
+}
+
+//------------------------------------------------------------------------------
+template<typename ContainerTag, typename GridType, typename OutputType>
+void convertPoints(ContainerTag, GridType& grid, OutputType* output)
+{
+  //we are dealing with a container type whose memory wasn't allocated by
+  //vtk so we have to copy the data into a new vtk memory location just
+  //to be safe.
+
+  //determine amount of memory to allocate
+  const vtkIdType num_points = grid.GetNumberOfPoints();
+
+  //ask vtkToDax to allocate the vtkPoints so it gets the float vs double
+  //settings correct
+  ::vtkToDax::vtkAlloc<vtkPoints,3> alloc;
+  vtkPoints* points = alloc.allocate(num_points);
+
+  dax::Vector3 *raw_pts = reinterpret_cast<dax::Vector3*>(
+                                       points->GetData()->GetVoidPointer(0));
+
+  //get the coord portal from the grid.
+  typedef typename GridType::PointCoordinatesType::PortalConstControl
+                                                                DaxPortalType;
+  DaxPortalType daxPortal = grid.GetPointCoordinates.GetPortalConstControl();
+
+  std::copy(daxPortal.GetBeginIterator(),
+            daxPortal.GetEndIterator(),
+            raw_pts);
+
+  output->SetPoints( points );
+}
+
+
+//------------------------------------------------------------------------------
+template<typename GridType, typename OutputType>
+void convertPoints(vtkToDax::vtkPointsContainerTag,
+                  GridType& grid,
+                  OutputType* output)
+{
+  vtkPoints *p = grid.GetPointCoordinates().GetPortalControl().GetVtkData();
+  output->SetPoints(p);
+}
+
+
+} //namespace detail
+
+
+//------------------------------------------------------------------------------
+//convert a UniformGrid to vtkImageData
+inline void dataSetConverter(const dax::cont::UniformGrid<>& grid,
+          vtkImageData* output)
+{
+  dax::Vector3 origin = grid.GetOrigin();
+  dax::Vector3 spacing = grid.GetSpacing();
+  dax::Extent3 extent = grid.GetExtent();
+
+  output->SetOrigin(origin[0],origin[1],origin[2]);
+  output->SetSpacing(spacing[0],spacing[1],spacing[2]);
+  output->SetExtent(extent.Min[0],extent.Max[0],
+                    extent.Min[1],extent.Max[1],
+                    extent.Min[2],extent.Max[2]);
+}
+
+//convert a UnstructuredGrid to vtkUnstructuredGrid
+template<typename CellType, typename TopoTag, typename PointTag>
+inline void dataSetConverter(dax::cont::UnstructuredGrid<CellType,TopoTag,PointTag>& grid,
+          vtkUnstructuredGrid* output)
+{
+  daxToVtk::detail::convertCells(TopoTag(),grid,output);
+  daxToVtk::detail::convertPoints(PointTag(),grid,output);
+}
+
+//convert a UnstructuredGrid to vtkPolyData
+template<typename CellType, typename TopoTag, typename PointTag>
+inline void dataSetConverter(
+  dax::cont::UnstructuredGrid<CellType,TopoTag,PointTag>& grid,
+  vtkPolyData* output)
+{
+  daxToVtk::detail::convertCells(TopoTag(),grid,output);
+  daxToVtk::detail::convertPoints(PointTag(),grid,output);
+}
+
+template<typename FieldType>
+void addCellData(vtkDataSet* output,
+                 FieldType& outputArray,
+                 const std::string& name)
+{
+  vtkDataArray *data = outputArray.GetPortalControl().GetVtkData();
+  data->SetName(name.c_str());
+  output->GetCellData()->AddArray(data);
+}
+
+
+template<typename FieldType>
+void addPointData(vtkDataSet* output,
+                  FieldType& outputArray,
+                  const std::string& name)
+{
+  vtkDataArray *data = outputArray.GetPortalControl().GetVtkData();
+  data->SetName(name.c_str());
+  vtkPointData *pd = output->GetPointData();
+  pd->AddArray(data);
+}
+
+
+}
+#endif // daxToVtk_DataSetConverter_h
diff --git a/Accelerators/Dax/module.cmake b/Accelerators/Dax/module.cmake
new file mode 100644
index 0000000..226652f
--- /dev/null
+++ b/Accelerators/Dax/module.cmake
@@ -0,0 +1,17 @@
+vtk_module(vtkAcceleratorsDax
+  DEPENDS
+    vtkCommonCore
+    vtkCommonDataModel
+    vtkCommonExecutionModel
+    vtkFiltersCore
+    vtkFiltersGeometry
+  TEST_DEPENDS
+    vtkTestingCore
+    vtkTestingRendering
+    vtkRenderingVolume
+    vtkRenderingVolumeOpenGL
+    vtkIOLegacy
+    vtkIOXML
+    vtkImagingSources
+  EXCLUDE_FROM_ALL
+  )
diff --git a/Accelerators/Dax/vtkDaxConfig.h.in b/Accelerators/Dax/vtkDaxConfig.h.in
new file mode 100644
index 0000000..6327ad3
--- /dev/null
+++ b/Accelerators/Dax/vtkDaxConfig.h.in
@@ -0,0 +1,41 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    ObjectFactory.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#ifndef __vtkDaxConfig_h
+#define __vtkDaxConfig_h
+
+// Boost gives a bunch of warnings with nvcc if you don't specify how shared
+// pointers should handle threads. Dax does not care (it is too careful about
+// threading to cause hazards in shared pointers), but your code might. Thus,
+// you should specify one when compiling with nvcc. If your code does not share
+// shared pointers among threads, then you can just disable them as below.
+// (BTW, if you forget to set this, Dax will give its own descriptive message
+// with instructions on how to fix.)
+#if @DAX_DISABLE_BOOST_SP_THREADS@ == 1
+#define BOOST_SP_DISABLE_THREADS
+#endif
+
+
+#ifndef DAX_DEVICE_ADAPTER
+# define DAX_DEVICE_ADAPTER @DAX_DEVICE_ADAPTER@
+#endif
+
+//make sure we use 64bit id's just like vtk
+#ifdef VTK_USE_64BIT_IDS
+# define DAX_USE_64BIT_IDS
+#endif
+#include <dax/internal/Configure.h>
+
+#include <dax/cont/DeviceAdapter.h>
+#endif // __vtkDaxConfig_h
diff --git a/Accelerators/Dax/vtkDaxDetailCommon.h b/Accelerators/Dax/vtkDaxDetailCommon.h
new file mode 100644
index 0000000..b78605c
--- /dev/null
+++ b/Accelerators/Dax/vtkDaxDetailCommon.h
@@ -0,0 +1,52 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#ifndef __vtkDaxDetailCommon_h
+#define __vtkDaxDetailCommon_h
+
+#include "vtkCellTypes.h"
+#include "vtkDataSet.h"
+#include "vtkGenericCell.h"
+#include "vtkNew.h"
+
+namespace vtkDax {
+namespace detail {
+  struct CellTypeInDataSet
+    {
+    explicit CellTypeInDataSet(int cellType):
+      Cell(vtkGenericCell::InstantiateCell(cellType)){}
+    ~CellTypeInDataSet(){this->Cell->Delete();}
+    vtkCell* Cell;
+    };
+
+  //returns if a dataset can be used from within Dax
+  inline CellTypeInDataSet cellType(vtkDataSet* input)
+  {
+    //determine the cell types that the dataset has
+    vtkNew<vtkCellTypes> cellTypes;
+    input->GetCellTypes(cellTypes.GetPointer());
+
+    if(cellTypes->GetNumberOfTypes() > 1)
+      {
+      //we currently only support a single cell type
+      return CellTypeInDataSet(VTK_EMPTY_CELL);
+      }
+
+    return CellTypeInDataSet(cellTypes->GetCellType(0));
+  }
+}}
+#endif // DaxDetailCommon_h
+// VTK-HeaderTest-Exclude: vtkDaxDetailCommon.h
diff --git a/Accelerators/Dax/vtkDaxMarchingCubes.cxx b/Accelerators/Dax/vtkDaxMarchingCubes.cxx
new file mode 100644
index 0000000..e27579e
--- /dev/null
+++ b/Accelerators/Dax/vtkDaxMarchingCubes.cxx
@@ -0,0 +1,81 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+#include "vtkDaxMarchingCubes.h"
+
+#include "vtkDataSet.h"
+#include "vtkDispatcher.h"
+#include "vtkInformation.h"
+#include "vtkInformationVector.h"
+#include "vtkNew.h"
+#include "vtkObjectFactory.h"
+#include "vtkPointData.h"
+#include "vtkPolyData.h"
+#include "vtkUnstructuredGrid.h"
+
+vtkStandardNewMacro(vtkDaxMarchingCubes)
+
+namespace vtkDax {
+  int MarchingCubes(vtkDataSet* input,
+                    vtkPolyData *output,
+                    vtkDataArray* field,
+                    float isoValue);
+}
+
+
+//------------------------------------------------------------------------------
+vtkDaxMarchingCubes::vtkDaxMarchingCubes()
+  {
+  }
+
+//------------------------------------------------------------------------------
+vtkDaxMarchingCubes::~vtkDaxMarchingCubes()
+  {
+  }
+
+//------------------------------------------------------------------------------
+void vtkDaxMarchingCubes::PrintSelf(ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os,indent);
+}
+
+//------------------------------------------------------------------------------
+int vtkDaxMarchingCubes::RequestData(vtkInformation *request,
+                             vtkInformationVector **inputVector,
+                             vtkInformationVector *outputVector)
+  {
+  vtkInformation *inInfo = inputVector[0]->GetInformationObject(0);
+  vtkInformation *outInfo = outputVector->GetInformationObject(0);
+  vtkDataSet *input = vtkDataSet::SafeDownCast(
+    inInfo->Get(vtkDataObject::DATA_OBJECT()));
+
+  vtkPolyData* output = vtkPolyData::SafeDownCast(
+                                  outInfo->Get(vtkDataObject::DATA_OBJECT()));
+  vtkDataArray *scalars = input->GetPointData()->GetScalars();
+  int result = 0;
+  if(scalars)
+    {
+    result = vtkDax::MarchingCubes(input,
+                                   output,
+                                   scalars,
+                                   this->GetValue(0));
+    }
+
+  if(!result)
+    {
+    result = this->Superclass::RequestData(request,inputVector,outputVector);
+    }
+  return result;
+}
diff --git a/Accelerators/Dax/vtkDaxMarchingCubes.h b/Accelerators/Dax/vtkDaxMarchingCubes.h
new file mode 100644
index 0000000..5f88d40
--- /dev/null
+++ b/Accelerators/Dax/vtkDaxMarchingCubes.h
@@ -0,0 +1,43 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#ifndef __vtkDaxMarchingCubes_h
+#define __vtkDaxMarchingCubes_h
+
+#include "vtkMarchingCubes.h"
+#include "vtkAcceleratorsDaxModule.h" //required for correct implementation
+
+class VTKACCELERATORSDAX_EXPORT vtkDaxMarchingCubes : public vtkMarchingCubes
+{
+public:
+  vtkTypeMacro(vtkDaxMarchingCubes,vtkMarchingCubes)
+  void PrintSelf(ostream& os, vtkIndent indent);
+  static vtkDaxMarchingCubes* New();
+
+protected:
+  vtkDaxMarchingCubes();
+  ~vtkDaxMarchingCubes();
+
+  virtual int RequestData(vtkInformation *,
+                          vtkInformationVector **,
+                          vtkInformationVector *);
+
+private:
+  vtkDaxMarchingCubes(const vtkDaxMarchingCubes&); //Not implemented
+  void operator=(const vtkDaxMarchingCubes&); // Not implemented
+};
+
+#endif // vtkDaxMarchingCubes_H
diff --git a/Accelerators/Dax/vtkDaxMarchingCubesImpl.cu b/Accelerators/Dax/vtkDaxMarchingCubesImpl.cu
new file mode 100644
index 0000000..114a1ab
--- /dev/null
+++ b/Accelerators/Dax/vtkDaxMarchingCubesImpl.cu
@@ -0,0 +1,17 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#include "vtkDaxMarchingCubesImpl.h"
diff --git a/Accelerators/Dax/vtkDaxMarchingCubesImpl.cxx b/Accelerators/Dax/vtkDaxMarchingCubesImpl.cxx
new file mode 100644
index 0000000..114a1ab
--- /dev/null
+++ b/Accelerators/Dax/vtkDaxMarchingCubesImpl.cxx
@@ -0,0 +1,17 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#include "vtkDaxMarchingCubesImpl.h"
diff --git a/Accelerators/Dax/vtkDaxMarchingCubesImpl.h b/Accelerators/Dax/vtkDaxMarchingCubesImpl.h
new file mode 100644
index 0000000..0020332
--- /dev/null
+++ b/Accelerators/Dax/vtkDaxMarchingCubesImpl.h
@@ -0,0 +1,136 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#ifndef __vtkDaxMarchingCubesImpl_h
+#define __vtkDaxMarchingCubesImpl_h
+
+// Common code
+#include "vtkDaxConfig.h"
+#include "vtkDaxDetailCommon.h"
+
+#include "vtkDispatcher.h"
+#include "vtkDoubleDispatcher.h"
+#include "vtkNew.h"
+
+//fields we support
+#include "vtkFloatArray.h"
+
+//cell types we support
+#include "vtkCellTypes.h"
+#include "vtkGenericCell.h"
+#include "vtkTriangle.h"
+#include "vtkVoxel.h"
+
+//datasets we support
+#include "vtkDataObjectTypes.h"
+#include "vtkImageData.h"
+#include "vtkUniformGrid.h"
+#include "vtkPolyData.h"
+
+//helpers that convert vtk to dax
+#include "vtkToDax/Portals.h"
+#include "vtkToDax/Containers.h"
+#include "vtkToDax/CellTypeToType.h"
+#include "vtkToDax/DataSetTypeToType.h"
+#include "vtkToDax/FieldTypeToType.h"
+#include "vtkToDax/MarchingCubes.h"
+
+namespace vtkDax {
+namespace detail {
+  struct ValidMarchingCubesInput
+  {
+    typedef int ReturnType;
+    vtkDataSet* Input;
+    vtkCell* Cell;
+    double IsoValue;
+
+    vtkPolyData* Result;
+
+    ValidMarchingCubesInput(vtkDataSet* in, vtkPolyData* out,
+                        vtkCell* cell, double isoValue):
+      Input(in),Cell(cell),IsoValue(isoValue),Result(out){}
+
+    template<typename LHS>
+    int operator()(LHS &arrayField) const
+      {
+      //we can derive the type of the field at compile time, but not the
+      //length
+      if (arrayField.GetNumberOfComponents() == 1)
+        {
+        //first we extract the field type of the array
+        //second we extract the number of components
+        typedef typename vtkToDax::FieldTypeToType<LHS,1>::FieldType FT1;
+        return dispatchOnFieldType<LHS,FT1>(arrayField);
+        }
+      return 0;
+      }
+
+    template<typename VTKArrayType, typename DaxFieldType>
+    int dispatchOnFieldType(VTKArrayType& vtkField) const
+      {
+      typedef DaxFieldType FieldType;
+      typedef vtkToDax::vtkArrayContainerTag<VTKArrayType> FieldTag;
+      typedef dax::cont::ArrayHandle<FieldType,FieldTag> FieldHandle;
+
+      typedef typename dax::cont::ArrayHandle
+        <FieldType, FieldTag>::PortalConstControl PortalType;
+
+      FieldHandle field = FieldHandle( PortalType(&vtkField,
+                                            vtkField.GetNumberOfTuples() ) );
+      vtkToDax::MarchingCubes<FieldHandle> marching(field,
+                                                 FieldType(IsoValue));
+      marching.setFieldName(vtkField.GetName());
+      marching.setOutputGrid(this->Result);
+
+      // see if we have a valid data set type if so will perform the
+      // marchingcubes if possible
+      vtkDoubleDispatcher<vtkDataSet,vtkCell,int> dataDispatcher;
+      dataDispatcher.Add<vtkImageData,vtkVoxel>(marching);
+      dataDispatcher.Add<vtkUniformGrid,vtkVoxel>(marching);
+
+      int validMC = dataDispatcher.Go(this->Input,this->Cell);
+      return validMC;
+      }
+  private:
+    void operator=(const ValidMarchingCubesInput&);
+  };
+} //namespace detail
+
+
+//------------------------------------------------------------------------------
+int MarchingCubes(vtkDataSet* input, vtkPolyData *output,
+              vtkDataArray* field, float isoValue)
+{
+  //we are doing a point threshold now verify we have suitable cells
+  //Dax currently supports: hexs,lines,quads,tets,triangles,vertex,voxel,wedge
+  //if something a cell that doesn't match that list we punt to the
+  //VTK implementation.
+  vtkDax::detail::CellTypeInDataSet cType = vtkDax::detail::cellType(input);
+
+  //construct the object that holds all the state needed to do the MC
+  vtkDax::detail::ValidMarchingCubesInput validInput(input,output,cType.Cell,
+                                                     isoValue);
+
+
+  //setup the dispatch to only allow float and int array to go to the next step
+  vtkDispatcher<vtkAbstractArray,int> fieldDispatcher;
+  fieldDispatcher.Add<vtkFloatArray>(validInput);
+  return fieldDispatcher.Go(field);
+}
+
+} //end vtkDax namespace
+// VTK-HeaderTest-Exclude: vtkDaxMarchingCubesImpl.h
+#endif
\ No newline at end of file
diff --git a/Accelerators/Dax/vtkDaxObjectFactory.h b/Accelerators/Dax/vtkDaxObjectFactory.h
new file mode 100644
index 0000000..274dc19
--- /dev/null
+++ b/Accelerators/Dax/vtkDaxObjectFactory.h
@@ -0,0 +1,72 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    ObjectFactory.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#ifndef __vtkDaxObjectFactory_h
+#define __vtkDaxObjectFactory_h
+
+#include "vtkAcceleratorsDaxModule.h" //required for correct implementation
+
+#include "vtkObjectFactory.h"
+#include "vtkObjectFactoryCollection.h" //required to make a factory
+#include "vtkOverrideInformation.h" //required to make a factory
+#include "vtkOverrideInformationCollection.h" //required to make a factory
+#include "vtkVersion.h" //required to make a factory
+
+#include "vtkDaxThreshold.h" //required to overload Threshold
+VTK_CREATE_CREATE_FUNCTION(vtkDaxThreshold)
+
+#include "vtkDaxMarchingCubes.h" //required to overload Marching Cubes
+VTK_CREATE_CREATE_FUNCTION(vtkDaxMarchingCubes)
+
+class VTKACCELERATORSDAX_EXPORT vtkDaxObjectFactory : public vtkObjectFactory
+{
+public:
+  static vtkDaxObjectFactory* New();
+
+  virtual const char* GetVTKSourceVersion() { return VTK_SOURCE_VERSION; }
+  const char* GetDescription() { return "Dax Object Factory"; }
+  vtkTypeMacro(vtkDaxObjectFactory,vtkObjectFactory)
+
+  void PrintSelf(ostream& os, vtkIndent indent)
+  {
+  this->Superclass::PrintSelf(os,indent);
+  }
+
+protected:
+  vtkDaxObjectFactory();
+
+private:
+  vtkDaxObjectFactory(const vtkDaxObjectFactory&); // Not implemented
+  void operator=(const vtkDaxObjectFactory&); // Not implemented
+};
+
+
+vtkDaxObjectFactory::vtkDaxObjectFactory()
+{
+  this->RegisterOverride("vtkThreshold",
+                         "vtkDaxThreshold",
+                         "Override threshold with Dax threshold version",
+                         1,
+                         vtkObjectFactoryCreatevtkDaxThreshold);
+
+  this->RegisterOverride("vtkMarchingCubes",
+                         "vtkDaxMarchingCubes",
+                         "Override marching cubes Dax threshold version",
+                         1,
+                         vtkObjectFactoryCreatevtkDaxThreshold);
+}
+
+vtkStandardNewMacro(vtkDaxObjectFactory)
+#endif
\ No newline at end of file
diff --git a/Accelerators/Dax/vtkDaxThreshold.cxx b/Accelerators/Dax/vtkDaxThreshold.cxx
new file mode 100644
index 0000000..cd50da4
--- /dev/null
+++ b/Accelerators/Dax/vtkDaxThreshold.cxx
@@ -0,0 +1,76 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+#include "vtkDaxThreshold.h"
+
+#include "vtkDispatcher.h"
+#include "vtkDataSet.h"
+#include "vtkUnstructuredGrid.h"
+#include "vtkInformation.h"
+#include "vtkInformationVector.h"
+#include "vtkObjectFactory.h"
+
+namespace vtkDax {
+  int Threshold(vtkDataSet* input,
+                vtkUnstructuredGrid *output,
+                vtkDataArray* field,
+                double lower,
+                double upper);
+}
+
+vtkStandardNewMacro(vtkDaxThreshold)
+
+//------------------------------------------------------------------------------
+vtkDaxThreshold::vtkDaxThreshold()
+{
+
+}
+
+//------------------------------------------------------------------------------
+vtkDaxThreshold::~vtkDaxThreshold()
+{
+}
+
+//------------------------------------------------------------------------------
+void vtkDaxThreshold::PrintSelf(ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os,indent);
+}
+
+//------------------------------------------------------------------------------
+int vtkDaxThreshold::RequestData(vtkInformation *request,
+                                 vtkInformationVector **inputVector,
+                                 vtkInformationVector *outputVector)
+{
+  vtkInformation *inInfo = inputVector[0]->GetInformationObject(0);
+  vtkInformation *outInfo = outputVector->GetInformationObject(0);
+  vtkDataSet *input = vtkDataSet::SafeDownCast(
+    inInfo->Get(vtkDataObject::DATA_OBJECT()));
+
+  vtkUnstructuredGrid* output = vtkUnstructuredGrid::SafeDownCast(
+    outInfo->Get(vtkDataObject::DATA_OBJECT()));
+
+
+  int result = vtkDax::Threshold(input,
+                                 output,
+                                 this->GetInputArrayToProcess(0,inputVector),
+                                 this->GetLowerThreshold(),
+                                 this->GetUpperThreshold());
+  if(!result)
+    {
+      result = this->Superclass::RequestData(request,inputVector,outputVector);
+    }
+  return result;
+}
diff --git a/Accelerators/Dax/vtkDaxThreshold.h b/Accelerators/Dax/vtkDaxThreshold.h
new file mode 100644
index 0000000..5e0c522
--- /dev/null
+++ b/Accelerators/Dax/vtkDaxThreshold.h
@@ -0,0 +1,44 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#ifndef __vtkDaxThreshold_h
+#define __vtkDaxThreshold_h
+
+#include "vtkThreshold.h"
+#include "vtkAcceleratorsDaxModule.h" //required for correct implementation
+
+class VTKACCELERATORSDAX_EXPORT vtkDaxThreshold : public vtkThreshold
+{
+public:
+  vtkTypeMacro(vtkDaxThreshold,vtkThreshold)
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  static vtkDaxThreshold* New();
+
+protected:
+  vtkDaxThreshold();
+  ~vtkDaxThreshold();
+
+  virtual int RequestData(vtkInformation *,
+                          vtkInformationVector **,
+                          vtkInformationVector *);
+
+private:
+  vtkDaxThreshold(const vtkDaxThreshold&); // Not implemented
+  void operator=(const vtkDaxThreshold&); // Not implemented
+};
+
+#endif // __vtkDaxThreshold_h
diff --git a/Accelerators/Dax/vtkDaxThresholdImpl.cu b/Accelerators/Dax/vtkDaxThresholdImpl.cu
new file mode 100644
index 0000000..6e1a3c8
--- /dev/null
+++ b/Accelerators/Dax/vtkDaxThresholdImpl.cu
@@ -0,0 +1,17 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#include "vtkDaxThresholdImpl.h"
diff --git a/Accelerators/Dax/vtkDaxThresholdImpl.cxx b/Accelerators/Dax/vtkDaxThresholdImpl.cxx
new file mode 100644
index 0000000..6e1a3c8
--- /dev/null
+++ b/Accelerators/Dax/vtkDaxThresholdImpl.cxx
@@ -0,0 +1,17 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#include "vtkDaxThresholdImpl.h"
diff --git a/Accelerators/Dax/vtkDaxThresholdImpl.h b/Accelerators/Dax/vtkDaxThresholdImpl.h
new file mode 100644
index 0000000..84866bb
--- /dev/null
+++ b/Accelerators/Dax/vtkDaxThresholdImpl.h
@@ -0,0 +1,172 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#ifndef __vtkDaxThresholdImpl_h
+#define __vtkDaxThresholdImpl_h
+
+// Common code
+#include "vtkDaxConfig.h"
+#include "vtkDaxDetailCommon.h"
+
+#include "vtkDispatcher.h"
+#include "vtkDoubleDispatcher.h"
+#include "vtkNew.h"
+
+//cell types we support
+#include "vtkCellTypes.h"
+#include "vtkGenericCell.h"
+#include "vtkHexahedron.h"
+#include "vtkLine.h"
+#include "vtkQuad.h"
+#include "vtkTetra.h"
+#include "vtkTriangle.h"
+#include "vtkVertex.h"
+#include "vtkVoxel.h"
+#include "vtkWedge.h"
+
+//fields we support
+#include "vtkFloatArray.h"
+#include "vtkIntArray.h"
+#include "vtkUnsignedCharArray.h"
+
+//datasets we support
+#include "vtkDataObjectTypes.h"
+#include "vtkImageData.h"
+#include "vtkStructuredGrid.h"
+#include "vtkUniformGrid.h"
+#include "vtkUnstructuredGrid.h"
+
+//helpers that convert to and from Dax
+#include "vtkToDax/CellTypeToType.h"
+#include "vtkToDax/Containers.h"
+#include "vtkToDax/DataSetTypeToType.h"
+#include "vtkToDax/FieldTypeToType.h"
+#include "vtkToDax/Portals.h"
+#include "vtkToDax/Threshold.h"
+
+
+namespace vtkDax{
+namespace detail{
+
+  struct ValidThresholdInput
+  {
+    typedef int ReturnType;
+    vtkDataSet* Input;
+    vtkCell* Cell;
+    double Min;
+    double Max;
+
+    vtkUnstructuredGrid* Result;
+
+    ValidThresholdInput(vtkDataSet* in, vtkUnstructuredGrid* out,
+                        vtkCell* cell, double lower, double upper):
+      Input(in),Cell(cell),Min(lower),Max(upper),Result(out){}
+
+    template<typename LHS>
+    int operator()(LHS &arrayField) const
+      {
+      //we can derive the type of the field at compile time, but not the
+      //length
+      switch(arrayField.GetNumberOfComponents())
+        {
+          case 1:
+            //first we extract the field type of the array
+            //second we extract the number of components
+            typedef typename vtkToDax::FieldTypeToType<LHS,1>::FieldType FT1;
+            return dispatchOnFieldType<LHS,FT1>(arrayField);
+          case 2:
+            typedef typename vtkToDax::FieldTypeToType<LHS,2>::FieldType FT2;
+          return dispatchOnFieldType<LHS,FT2>(arrayField);
+          case 3:
+            typedef typename vtkToDax::FieldTypeToType<LHS,3>::FieldType FT3;
+            return dispatchOnFieldType<LHS,FT3>(arrayField);
+        default:
+          //currently only support 1 to 3 components
+          //we need to make dispatch on field data smarter in that it does
+          //this automagically
+          return 0;
+        }
+
+
+      }
+
+    template<typename VTKArrayType, typename DaxFieldType>
+    int dispatchOnFieldType(VTKArrayType& vtkField) const
+      {
+      typedef DaxFieldType FieldType;
+      typedef vtkToDax::vtkArrayContainerTag<VTKArrayType> FieldTag;
+      typedef dax::cont::ArrayHandle<FieldType,FieldTag> FieldHandle;
+      typedef typename dax::cont::ArrayHandle<FieldType,
+                      FieldTag>::PortalConstControl      PortalType;
+
+      FieldHandle field = FieldHandle( PortalType(&vtkField,
+                                            vtkField.GetNumberOfTuples() ) );
+      vtkToDax::Threshold<FieldHandle> threshold(field,
+                                                 FieldType(Min),
+                                                 FieldType(Max));
+      threshold.setFieldName(vtkField.GetName());
+      threshold.setOutputGrid(this->Result);
+
+      //see if we have a valid data set type
+      //if so will perform the threshold if possible
+      vtkDoubleDispatcher<vtkDataSet,vtkCell,int> dataDispatcher;
+      dataDispatcher.Add<vtkImageData,vtkVoxel>(threshold);
+      dataDispatcher.Add<vtkUniformGrid,vtkVoxel>(threshold);
+
+      dataDispatcher.Add<vtkUnstructuredGrid,vtkHexahedron>(threshold);
+      dataDispatcher.Add<vtkUnstructuredGrid,vtkLine>(threshold);
+      dataDispatcher.Add<vtkUnstructuredGrid,vtkQuad>(threshold);
+      dataDispatcher.Add<vtkUnstructuredGrid,vtkTetra>(threshold);
+      dataDispatcher.Add<vtkUnstructuredGrid,vtkTriangle>(threshold);
+      dataDispatcher.Add<vtkUnstructuredGrid,vtkVertex>(threshold);
+      dataDispatcher.Add<vtkUnstructuredGrid,vtkWedge>(threshold);
+
+      int validThreshold = dataDispatcher.Go(this->Input,this->Cell);
+      return validThreshold;
+      }
+  private:
+    void operator=(const ValidThresholdInput&);
+  };
+} //end detail namespace
+
+
+//------------------------------------------------------------------------------
+int Threshold(vtkDataSet* input, vtkUnstructuredGrid *output,
+              vtkDataArray* field, double lower, double upper)
+{
+  //we are doing a point threshold now verify we have suitable cells
+  //Dax currently supports: hexs,lines,quads,tets,triangles,vertex,voxel,wedge
+  //if something a cell that doesn't match that list we punt to the
+  //VTK implementation.
+  vtkDax::detail::CellTypeInDataSet cType = vtkDax::detail::cellType(input);
+
+  //construct the object that holds all the state needed to do the threshold
+  vtkDax::detail::ValidThresholdInput validInput(input,output,cType.Cell,
+                                                 lower,
+                                                 upper);
+
+
+  //setup the dispatch to only allow float and int array to go to the next step
+  vtkDispatcher<vtkAbstractArray,int> fieldDispatcher;
+  fieldDispatcher.Add<vtkFloatArray>(validInput);
+  fieldDispatcher.Add<vtkUnsignedCharArray>(validInput);
+  fieldDispatcher.Add<vtkIntArray>(validInput);
+  return fieldDispatcher.Go(field);
+}
+
+} //end vtkDax namespace
+// VTK-HeaderTest-Exclude: vtkDaxThresholdImpl.h
+#endif
\ No newline at end of file
diff --git a/Accelerators/Dax/vtkToDax/Allocators.h b/Accelerators/Dax/vtkToDax/Allocators.h
new file mode 100644
index 0000000..fccfaa4
--- /dev/null
+++ b/Accelerators/Dax/vtkToDax/Allocators.h
@@ -0,0 +1,132 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#ifndef vtkToDax_Allocators_h
+#define vtkToDax_Allocators_h
+
+#include <dax/VectorTraits.h>
+#include <cstddef>
+
+class vtkPoints;
+class vtkCellArray;
+
+/*
+ * The one rule of allocators is that they can allocate
+ * memory, but they can't set any values in the allocated memory.
+ * We can't write to the memory because that causes affinity
+ * between the memory location and the current thread, which is a very
+ * bad thing as we want that memory location affinity to be assigned to the dax
+ * thread that will be using section, not the master thread
+ */
+
+namespace vtkToDax
+{
+template< typename _T,
+          int NUM_COMPONENTS>
+struct vtkAlloc
+{
+  typedef _T T;
+  typedef vtkAlloc<T,NUM_COMPONENTS> self;
+
+  typedef std::size_t   size_type;
+  typedef ptrdiff_t     difference_type;
+  typedef T*            pointer;
+  typedef const T*      const_pointer;
+  typedef T&            reference;
+  typedef const T&      const_reference;
+  typedef T             value_type;
+
+
+  pointer allocate(size_type n, self::const_pointer hint = 0)
+    {
+    pointer p =  value_type::New();
+    p->SetNumberOfComponents(NUM_COMPONENTS);
+    p->SetNumberOfTuples(n);
+    return p;
+    }
+
+  void deallocate(self::pointer p, self::size_type)
+    {
+    p->Delete();
+    }
+};
+
+template<int NUM_COMPONENTS>
+struct vtkAlloc<vtkPoints, NUM_COMPONENTS>
+{
+  typedef vtkPoints T;
+  typedef vtkAlloc<T,NUM_COMPONENTS> self;
+
+  typedef std::size_t   size_type;
+  typedef ptrdiff_t     difference_type;
+  typedef T*            pointer;
+  typedef const T*      const_pointer;
+  typedef T&            reference;
+  typedef const T&      const_reference;
+  typedef T             value_type;
+
+
+  pointer allocate(size_type n, const_pointer hint = 0)
+    {
+#ifdef DAX_USE_DOUBLE_PRECISION
+    pointer p = value_type::New(VTK_DOUBLE);
+#else
+    pointer p = value_type::New(VTK_FLOAT);
+#endif
+    p->SetNumberOfPoints(n);
+    return p;
+    }
+
+  void deallocate(pointer p, size_type)
+    {
+    p->Delete();
+    }
+};
+
+
+template<int NUM_COMPONENTS>
+struct vtkAlloc<vtkCellArray, NUM_COMPONENTS>
+{
+  typedef vtkCellArray T;
+  typedef vtkAlloc<T,NUM_COMPONENTS> self;
+
+  typedef std::size_t   size_type;
+  typedef ptrdiff_t     difference_type;
+  typedef T*            pointer;
+  typedef const T*      const_pointer;
+  typedef T&            reference;
+  typedef const T&      const_reference;
+  typedef T             value_type;
+
+
+  //for cell arrays dax requests an allocation that is num_cells * num_components
+  pointer allocate(size_type n, const_pointer hint = 0)
+    {
+    pointer p =  value_type::New();
+    const size_type numCells = n/NUM_COMPONENTS;
+    p->SetNumberOfCells(numCells);
+    p->GetData()->SetNumberOfTuples(n+numCells);
+    return p;
+    }
+
+  void deallocate(pointer p, size_type)
+    {
+    p->Delete();
+    }
+};
+}
+
+#endif //vtkToDax_Allocators_h
diff --git a/Accelerators/Dax/vtkToDax/CellTypeAndDataType.h b/Accelerators/Dax/vtkToDax/CellTypeAndDataType.h
new file mode 100644
index 0000000..54c7df4
--- /dev/null
+++ b/Accelerators/Dax/vtkToDax/CellTypeAndDataType.h
@@ -0,0 +1,49 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#ifndef vtkToDax_CellTypeAndDataType_h
+#define vtkToDax_CellTypeAndDataType_h
+
+#include "vtkType.h"
+#include "vtkCellType.h"
+
+
+namespace vtkToDax
+{
+//By default we list any combination as being invalid
+template<int DataSetType, int CellType>
+struct CellTypeAndDataType
+{
+  enum{Valid=0};
+};
+
+//we than specialize all the valid combinations of dataset and cell types
+//that Dax currently supports
+template<> struct CellTypeAndDataType<VTK_IMAGE_DATA,VTK_VOXEL>{enum{Valid=1};};
+template<> struct CellTypeAndDataType<VTK_UNIFORM_GRID,VTK_VOXEL>{enum{Valid=1};};
+
+template<> struct CellTypeAndDataType<VTK_UNSTRUCTURED_GRID,VTK_LINE>{enum{Valid=1};};
+template<> struct CellTypeAndDataType<VTK_UNSTRUCTURED_GRID,VTK_HEXAHEDRON>{enum{Valid=1};};
+template<> struct CellTypeAndDataType<VTK_UNSTRUCTURED_GRID,VTK_QUAD>{enum{Valid=1};};
+template<> struct CellTypeAndDataType<VTK_UNSTRUCTURED_GRID,VTK_TETRA>{enum{Valid=1};};
+template<> struct CellTypeAndDataType<VTK_UNSTRUCTURED_GRID,VTK_TRIANGLE>{enum{Valid=1};};
+template<> struct CellTypeAndDataType<VTK_UNSTRUCTURED_GRID,VTK_WEDGE>{enum{Valid=1};};
+
+template<> struct CellTypeAndDataType<VTK_STRUCTURED_GRID,VTK_HEXAHEDRON>{enum{Valid=1};};
+}
+
+
+#endif // vtkToDax_CellTypeAndDataType_h
diff --git a/Accelerators/Dax/vtkToDax/CellTypeToType.h b/Accelerators/Dax/vtkToDax/CellTypeToType.h
new file mode 100644
index 0000000..942e913
--- /dev/null
+++ b/Accelerators/Dax/vtkToDax/CellTypeToType.h
@@ -0,0 +1,98 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#ifndef vtkToDax_CellTypeToType_h
+#define vtkToDax_CellTypeToType_h
+
+#include "vtkCellType.h"
+
+
+class vtkLine;
+class vtkHexahedron;
+class vtkQuad;
+class vtkTetra;
+class vtkTriangle;
+class vtkVoxel;
+class vtkWedge;
+
+#include <dax/CellTag.h>
+
+//ToDo the output type needs to be moved to a separate header that
+//is per algorithm output type, that maps the input cell type to the output
+//cell type.
+namespace vtkToDax
+{
+template<typename T> struct CellTypeToType;
+template<> struct CellTypeToType<vtkLine>
+{
+  enum {VTKCellType=VTK_LINE};
+  enum {NUM_POINTS=2};
+  typedef dax::CellTagLine DaxCellType;
+};
+
+template<> struct CellTypeToType<vtkHexahedron>
+{
+  enum {VTKCellType=VTK_HEXAHEDRON};
+  enum {NUM_POINTS=8};
+  typedef dax::CellTagHexahedron DaxCellType;
+};
+
+template<> struct CellTypeToType<vtkQuad>
+{
+  enum {VTKCellType=VTK_QUAD};
+  enum {NUM_POINTS=4};
+  typedef dax::CellTagQuadrilateral DaxCellType;
+};
+
+
+template<> struct CellTypeToType<vtkTetra>
+{
+  enum {VTKCellType=VTK_TETRA};
+  enum {NUM_POINTS=4};
+  typedef dax::CellTagTetrahedron DaxCellType;
+};
+
+template<> struct CellTypeToType<vtkTriangle>
+{
+  enum {VTKCellType=VTK_TRIANGLE};
+  enum {NUM_POINTS=3};
+  typedef dax::CellTagTriangle DaxCellType;
+};
+
+template<> struct CellTypeToType<vtkVoxel>
+{
+  enum {VTKCellType=VTK_VOXEL};
+  enum {NUM_POINTS=8};
+  typedef dax::CellTagVoxel DaxCellType;
+};
+
+template<> struct CellTypeToType<vtkVertex>
+{
+  enum {VTKCellType=VTK_VERTEX};
+  enum {NUM_POINTS=1};
+  typedef dax::CellTagVertex DaxCellType;
+};
+
+template<> struct CellTypeToType<vtkWedge>
+{
+  enum {VTKCellType=VTK_WEDGE};
+  enum {NUM_POINTS=6};
+  typedef dax::CellTagWedge DaxCellType;
+};
+}
+
+
+#endif // vtkToDax_CellTypeToType_h
diff --git a/Accelerators/Dax/vtkToDax/Containers.h b/Accelerators/Dax/vtkToDax/Containers.h
new file mode 100644
index 0000000..af48f8e
--- /dev/null
+++ b/Accelerators/Dax/vtkToDax/Containers.h
@@ -0,0 +1,373 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#ifndef vtkToDax_Containers_h
+#define vtkToDax_Containers_h
+
+#include "vtkPoints.h"
+#include "vtkCellArray.h"
+
+#include "Portals.h"
+#include "Allocators.h"
+
+namespace vtkToDax {
+
+//tag to say we are creating an array container
+//of type vtkIdTypeDataArray, vtkFloatDataArray, etc
+template<typename VTKArrayType>
+struct vtkArrayContainerTag
+{
+  typedef VTKArrayType Type;
+};
+
+//this tag is used to construct points coordinates
+struct vtkPointsContainerTag
+{
+};
+
+//this tag is used to construct a vtkIdArray that is
+//used for cells
+template<typename CellType>
+struct vtkTopologyContainerTag
+{
+  typedef CellType Type;
+};
+}
+
+namespace dax {
+namespace cont {
+namespace internal {
+
+
+template <typename ValueT, typename VTKArrayType>
+class ArrayContainerControl<ValueT,vtkToDax::vtkArrayContainerTag<VTKArrayType> >
+{
+public:
+  typedef ValueT ValueType;
+  typedef vtkToDax::vtkArrayPortal<ValueType> PortalType;
+  typedef vtkToDax::vtkArrayPortal<const ValueType> PortalConstType;
+
+private:
+  //determine the number of components we need to allocate in the vtkArray
+  static const int NUM_COMPONENTS = dax::VectorTraits<ValueType>::NUM_COMPONENTS;
+
+  //construct the allocator with the right number of elements
+  typedef vtkToDax::vtkAlloc<VTKArrayType,NUM_COMPONENTS> AllocatorType;
+
+  //the allocated type from the allocator
+  typedef typename AllocatorType::pointer PointerType;
+
+public:
+
+  ArrayContainerControl() : Array(NULL), NumberOfValues(0) { }
+
+  ~ArrayContainerControl()
+  {
+    this->ReleaseResources();
+  }
+
+  void ReleaseResources()
+  {
+    if (this->NumberOfValues > 0)
+      {
+      DAX_ASSERT_CONT(this->Array != NULL);
+      AllocatorType allocator;
+      allocator.deallocate(this->Array, this->NumberOfValues);
+      this->Array = NULL;
+      this->NumberOfValues = 0;
+      }
+    else
+      {
+      DAX_ASSERT_CONT(this->Array == NULL);
+      }
+  }
+
+  void Allocate(dax::Id numberOfValues)
+  {
+    if (this->NumberOfValues == numberOfValues) return;
+
+    this->ReleaseResources();
+    try
+      {
+      if (numberOfValues > 0)
+        {
+        AllocatorType allocator;
+        this->Array = allocator.allocate(numberOfValues);
+        this->NumberOfValues = numberOfValues;
+        }
+      else
+        {
+        // ReleaseResources should have already set NumberOfValues to 0.
+        DAX_ASSERT_CONT(this->NumberOfValues == 0);
+        }
+      }
+    catch (std::bad_alloc err)
+      {
+      // Make sureour state is OK.
+      this->Array = NULL;
+      this->NumberOfValues = 0;
+      throw dax::cont::ErrorControlOutOfMemory(
+            "Could not allocate basic control array.");
+      }
+  }
+
+  dax::Id GetNumberOfValues() const
+  {
+    return this->NumberOfValues;
+  }
+
+  void Shrink(dax::Id numberOfValues)
+  {
+    if (numberOfValues > this->GetNumberOfValues())
+      {
+      throw dax::cont::ErrorControlBadValue(
+            "Shrink method cannot be used to grow array.");
+      }
+
+    this->NumberOfValues = numberOfValues;
+  }
+
+  PortalType GetPortal()
+  {
+    return PortalType(this->Array, this->NumberOfValues);
+  }
+
+  PortalConstType GetPortalConst() const
+  {
+    return PortalConstType(this->Array, this->NumberOfValues);
+  }
+
+private:
+  // Not implemented.
+  ArrayContainerControl(const ArrayContainerControl<ValueType, vtkToDax::vtkPointsContainerTag> &src);
+  void operator=(const ArrayContainerControl<ValueType, vtkToDax::vtkPointsContainerTag> &src);
+
+  PointerType Array;
+  dax::Id NumberOfValues;
+};
+
+template <typename ValueT>
+class ArrayContainerControl<ValueT,vtkToDax::vtkPointsContainerTag>
+{
+public:
+  typedef ValueT ValueType;
+  //construct the portals type to be used with this container
+  typedef vtkToDax::vtkPointsPortal<ValueType> PortalType;
+  typedef vtkToDax::vtkPointsPortal<const ValueType> PortalConstType;
+
+private:
+  //determine the allocator type and pointer type for this container
+  typedef vtkToDax::vtkAlloc<vtkPoints,3> AllocatorType;
+  //the pointer type tells us the type of what the allocator returns
+  typedef typename AllocatorType::pointer PointerType;
+
+public:
+
+  ArrayContainerControl() : Array(NULL), NumberOfValues(0) { }
+
+  ~ArrayContainerControl()
+  {
+    this->ReleaseResources();
+  }
+
+  void ReleaseResources()
+  {
+    if (this->NumberOfValues > 0)
+      {
+      DAX_ASSERT_CONT(this->Array != NULL);
+      AllocatorType allocator;
+      allocator.deallocate(this->Array, this->NumberOfValues);
+      this->Array = NULL;
+      this->NumberOfValues = 0;
+      }
+    else
+      {
+      DAX_ASSERT_CONT(this->Array == NULL);
+      }
+  }
+
+  void Allocate(dax::Id numberOfValues)
+  {
+    if (this->NumberOfValues == numberOfValues) return;
+
+    this->ReleaseResources();
+    try
+      {
+      if (numberOfValues > 0)
+        {
+        AllocatorType allocator;
+        this->Array = allocator.allocate(numberOfValues);
+        this->NumberOfValues = numberOfValues;
+        }
+      else
+        {
+        // ReleaseResources should have already set NumberOfValues to 0.
+        DAX_ASSERT_CONT(this->NumberOfValues == 0);
+        }
+      }
+    catch (std::bad_alloc err)
+      {
+      // Make sureour state is OK.
+      this->Array = NULL;
+      this->NumberOfValues = 0;
+      throw dax::cont::ErrorControlOutOfMemory(
+            "Could not allocate basic control array.");
+      }
+  }
+
+  dax::Id GetNumberOfValues() const
+  {
+    return this->NumberOfValues;
+  }
+
+  void Shrink(dax::Id numberOfValues)
+  {
+    if (numberOfValues > this->GetNumberOfValues())
+      {
+      throw dax::cont::ErrorControlBadValue(
+            "Shrink method cannot be used to grow array.");
+      }
+
+    this->NumberOfValues = numberOfValues;
+  }
+
+  PortalType GetPortal()
+  {
+    return PortalType(this->Array, this->NumberOfValues);
+  }
+
+  PortalConstType GetPortalConst() const
+  {
+    return PortalConstType(this->Array, this->NumberOfValues);
+  }
+
+private:
+  // Not implemented.
+  ArrayContainerControl(const ArrayContainerControl<ValueType, vtkToDax::vtkPointsContainerTag> &src);
+  void operator=(const ArrayContainerControl<ValueType, vtkToDax::vtkPointsContainerTag> &src);
+
+  PointerType Array;
+  dax::Id NumberOfValues;
+};
+
+template <typename ValueT, typename CellType>
+class ArrayContainerControl<ValueT,vtkToDax::vtkTopologyContainerTag<CellType> >
+{
+public:
+  typedef ValueT ValueType;
+  typedef vtkToDax::vtkTopologyPortal<ValueType, CellType::NUM_POINTS> PortalType;
+  typedef vtkToDax::vtkTopologyPortal<const ValueType, CellType::NUM_POINTS > PortalConstType;
+
+private:
+  //determine the allocator type and pointer type for this container
+  typedef vtkToDax::vtkAlloc<vtkCellArray, CellType::NUM_POINTS > AllocatorType;
+  //the pointer type tells us the type of what the allocator returns
+  typedef typename AllocatorType::pointer PointerType;
+
+public:
+
+  ArrayContainerControl() : Array(NULL), NumberOfValues(0) { }
+
+  ~ArrayContainerControl()
+  {
+    this->ReleaseResources();
+  }
+
+  void ReleaseResources()
+  {
+    if (this->NumberOfValues > 0)
+      {
+      DAX_ASSERT_CONT(this->Array != NULL);
+      AllocatorType allocator;
+      allocator.deallocate(this->Array, this->NumberOfValues);
+      this->Array = NULL;
+      this->NumberOfValues = 0;
+      }
+    else
+      {
+      DAX_ASSERT_CONT(this->Array == NULL);
+      }
+  }
+
+  void Allocate(dax::Id numberOfValues)
+  {
+    if (this->NumberOfValues == numberOfValues) return;
+
+    this->ReleaseResources();
+    try
+      {
+      if (numberOfValues > 0)
+        {
+        AllocatorType allocator;
+        this->Array = allocator.allocate(numberOfValues);
+        this->NumberOfValues = numberOfValues;
+        }
+      else
+        {
+        // ReleaseResources should have already set NumberOfValues to 0.
+        DAX_ASSERT_CONT(this->NumberOfValues == 0);
+        }
+      }
+    catch (std::bad_alloc err)
+      {
+      // Make sureour state is OK.
+      this->Array = NULL;
+      this->NumberOfValues = 0;
+      throw dax::cont::ErrorControlOutOfMemory(
+            "Could not allocate basic control array.");
+      }
+  }
+
+  dax::Id GetNumberOfValues() const
+  {
+    return this->NumberOfValues;
+  }
+
+  void Shrink(dax::Id numberOfValues)
+  {
+    if (numberOfValues > this->GetNumberOfValues())
+      {
+      throw dax::cont::ErrorControlBadValue(
+            "Shrink method cannot be used to grow array.");
+      }
+
+    this->NumberOfValues = numberOfValues;
+  }
+
+  PortalType GetPortal()
+  {
+    return PortalType(this->Array, this->NumberOfValues);
+  }
+
+  PortalConstType GetPortalConst() const
+  {
+    return PortalConstType(this->Array, this->NumberOfValues);
+  }
+
+private:
+  // Not implemented.
+  ArrayContainerControl(const ArrayContainerControl<ValueType, vtkToDax::vtkPointsContainerTag> &src);
+  void operator=(const ArrayContainerControl<ValueType, vtkToDax::vtkPointsContainerTag> &src);
+
+  PointerType Array;
+  dax::Id NumberOfValues;
+};
+
+}
+}
+}
+
+#endif //vtkToDax_CONTAINERS_H
diff --git a/Accelerators/Dax/vtkToDax/DataSetConverters.h b/Accelerators/Dax/vtkToDax/DataSetConverters.h
new file mode 100644
index 0000000..2182129
--- /dev/null
+++ b/Accelerators/Dax/vtkToDax/DataSetConverters.h
@@ -0,0 +1,130 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#ifndef vtkToDax_DataSetConverter_h
+#define vtkToDax_DataSetConverter_h
+
+//datasets we support
+#include "vtkDataObjectTypes.h"
+#include "vtkCellTypes.h"
+#include "vtkCellArray.h"
+#include "vtkImageData.h"
+#include "vtkUniformGrid.h"
+#include "vtkUnstructuredGrid.h"
+#include "vtkStructuredGrid.h"
+
+#include <dax/cont/ArrayHandle.h>
+#include <dax/cont/UniformGrid.h>
+#include <dax/cont/UnstructuredGrid.h>
+
+#include "CellTypeToType.h"
+#include "DataSetTypeToType.h"
+
+#include "Portals.h"
+#include "Containers.h"
+
+
+namespace vtkToDax
+{
+
+template<typename CellType>
+inline void convertTopology(vtkUnstructuredGrid* input,
+                     std::vector<dax::Id>& topo)
+{
+  enum{NUM_POINTS_IN_CELL=CellType::NUM_POINTS};
+  vtkIdType size = input->GetNumberOfCells();
+  topo.reserve(size*NUM_POINTS_IN_CELL); //reserver room so we don't have to realloc
+  vtkCellArray *cells = input->GetCells();
+
+  vtkIdType npts, *pts;
+  cells->InitTraversal();
+  while(cells->GetNextCell(npts,pts))
+    {
+    std::copy(pts,pts+npts,std::back_inserter(topo));
+    }
+}
+
+
+
+//convert an image data type
+template<typename VTKDataSetType>
+inline typename VTKDataSetType::DaxDataSetType dataSetConverter(
+    vtkImageData* input,
+    VTKDataSetType)
+  {
+  typedef typename VTKDataSetType::DaxDataSetType DataSet;
+  double origin[3];input->GetOrigin(origin);
+  double spacing[3];input->GetSpacing(spacing);
+  int extent[6];input->GetExtent(extent);
+
+  //this would be image data
+  DataSet output;
+  output.SetOrigin(dax::make_Vector3(origin[0],origin[1],origin[2]));
+  output.SetSpacing(dax::make_Vector3(spacing[0],spacing[1],spacing[2]));
+  output.SetExtent(dax::make_Id3(extent[0],extent[2],extent[4]),
+                   dax::make_Id3(extent[1],extent[3],extent[5]));
+  return output;
+  }
+
+//convert an uniform grid type
+template<typename VTKDataSetType>
+inline typename VTKDataSetType::DaxDataSetType dataSetConverter(
+    vtkUniformGrid* input,
+    VTKDataSetType)
+  {
+  typedef typename VTKDataSetType::DaxDataSetType DataSet;
+  double origin[3];input->GetOrigin(origin);
+  double spacing[3];input->GetSpacing(spacing);
+  int extent[6];input->GetExtent(extent);
+
+  //this would be image data
+  DataSet output;
+  output.SetOrigin(dax::make_Vector3(origin[0],origin[1],origin[2]));
+  output.SetSpacing(dax::make_Vector3(spacing[0],spacing[1],spacing[2]));
+  output.SetExtent(dax::make_Id3(extent[0],extent[2],extent[4]),
+                   dax::make_Id3(extent[1],extent[3],extent[5]));
+  return output;
+  }
+
+//convert an unstructured grid type
+template<typename VTKDataSetType>
+inline typename VTKDataSetType::DaxDataSetType dataSetConverter(
+    vtkUnstructuredGrid* input,
+    VTKDataSetType)
+  {
+  //we convert to a hexahedron unstruction grid
+  //this uses a vtkCellArrayContainer to get the needed topology information
+  typedef typename VTKDataSetType::DaxDataSetType DataSet;
+  typedef typename VTKDataSetType::CellTypeToType CellTypeToType;
+
+  static const int NUM_POINTS = VTKDataSetType::CellTypeToType::NUM_POINTS;
+
+
+  dax::cont::ArrayHandle<dax::Vector3,vtkToDax::vtkPointsContainerTag>
+      pointsHandle(vtkToDax::vtkPointsPortal<dax::Vector3>(input->GetPoints(),
+                                                           input->GetNumberOfPoints()));
+
+  //
+  dax::cont::ArrayHandle<dax::Id,vtkToDax::vtkTopologyContainerTag<CellTypeToType> >
+      topoHandle(vtkToDax::vtkTopologyPortal<dax::Id, NUM_POINTS >(input->GetCells(),
+                                              input->GetNumberOfCells()*NUM_POINTS));
+
+  return DataSet(topoHandle,pointsHandle);
+  }
+}
+
+
+#endif // vtkToDax_DataSetConverter_h
diff --git a/Accelerators/Dax/vtkToDax/DataSetTypeToType.h b/Accelerators/Dax/vtkToDax/DataSetTypeToType.h
new file mode 100644
index 0000000..0dd9332
--- /dev/null
+++ b/Accelerators/Dax/vtkToDax/DataSetTypeToType.h
@@ -0,0 +1,78 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#ifndef vtkToDax_DataSetTypeToType_h
+#define vtkToDax_DataSetTypeToType_h
+
+#include "vtkType.h"
+
+#include <dax/cont/UniformGrid.h>
+#include <dax/cont/UnstructuredGrid.h>
+
+class vtkImageData;
+class vtkUniformGrid;
+class vtkStructuredGrid;
+class vtkUnstructuredGrid;
+
+//header that determines the correct cell and datatypes that can be used together
+#include "CellTypeAndDataType.h"
+#include "Containers.h"
+
+namespace vtkToDax
+{
+
+//empty implementation for dataset we don't support
+template<typename CellTypeToTypeDef, typename vtkDataSetType> struct DataSetTypeToType
+{
+  typedef CellTypeToTypeDef CellTypeToType;
+  typedef typename CellTypeToTypeDef::DaxCellType DaxCellType;
+  enum {VTKDataSetType=-1};
+  enum {Valid=false};
+  typedef bool DaxDataSetType;
+};
+
+template<typename CellTypeToTypeDef> struct DataSetTypeToType<CellTypeToTypeDef,vtkImageData>
+{
+  typedef CellTypeToTypeDef CellTypeToType;
+  typedef typename CellTypeToTypeDef::DaxCellType DaxCellType;
+  enum {VTKDataSetType=VTK_IMAGE_DATA};
+  enum {Valid=(CellTypeAndDataType<VTK_IMAGE_DATA,CellTypeToTypeDef::VTKCellType>::Valid)};
+  typedef dax::cont::UniformGrid<> DaxDataSetType;
+};
+
+template<typename CellTypeToTypeDef> struct DataSetTypeToType<CellTypeToTypeDef,vtkUniformGrid>
+{
+  typedef CellTypeToTypeDef CellTypeToType;
+  typedef typename CellTypeToTypeDef::DaxCellType DaxCellType;
+  enum {VTKDataSetType=VTK_UNIFORM_GRID};
+  enum {Valid=(CellTypeAndDataType<VTK_UNIFORM_GRID,CellTypeToTypeDef::VTKCellType>::Valid)};
+  typedef dax::cont::UniformGrid<> DaxDataSetType;
+};
+
+template<typename CellTypeToTypeDef> struct DataSetTypeToType<CellTypeToTypeDef,vtkUnstructuredGrid>
+{
+  typedef CellTypeToTypeDef CellTypeToType;
+  typedef typename CellTypeToTypeDef::DaxCellType DaxCellType;
+  enum {VTKDataSetType=VTK_UNSTRUCTURED_GRID};
+  enum {Valid=(CellTypeAndDataType<VTK_UNSTRUCTURED_GRID,CellTypeToTypeDef::VTKCellType>::Valid)};
+  typedef dax::cont::UnstructuredGrid<DaxCellType,
+          vtkToDax::vtkTopologyContainerTag<CellTypeToType>,
+          vtkToDax::vtkPointsContainerTag>
+          DaxDataSetType;
+};
+}
+
+#endif // vtkToDax_DataSetTypeToType_h
diff --git a/Accelerators/Dax/vtkToDax/FieldTypeToType.h b/Accelerators/Dax/vtkToDax/FieldTypeToType.h
new file mode 100644
index 0000000..7593162
--- /dev/null
+++ b/Accelerators/Dax/vtkToDax/FieldTypeToType.h
@@ -0,0 +1,100 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#ifndef vtkToDax_FieldTypeToType_h
+#define vtkToDax_FieldTypeToType_h
+
+#include <stdint.h>
+
+class vtkIntArray;
+class vtkIdTypeArray;
+class vtkFloatArray;
+class vtkUnsignedCharArray;
+
+#include <dax/Types.h>
+
+namespace vtkToDax
+{
+
+template<typename T, int NUM_COMP> struct FieldTypeToType;
+
+template<int NUM_COMP>
+struct FieldTypeToType<vtkIntArray,NUM_COMP>
+{
+  static const int NUM_COMPNENTS = NUM_COMP;
+  typedef dax::Tuple<dax::Id,NUM_COMPNENTS> FieldType;
+  typedef dax::Id ComponentType;
+};
+
+template<>
+struct FieldTypeToType<vtkIntArray,1>
+{
+  static const int NUM_COMPNENTS = 1;
+  typedef dax::Id FieldType;
+  typedef dax::Id ComponentType;
+};
+
+template<int NUM_COMP>
+struct FieldTypeToType<vtkIdTypeArray,NUM_COMP>
+{
+  static const int NUM_COMPNENTS = NUM_COMP;
+  typedef dax::Tuple<dax::Id,NUM_COMPNENTS> FieldType;
+  typedef dax::Id ComponentType;
+};
+
+template<>
+struct FieldTypeToType<vtkIdTypeArray,1>
+{
+  static const int NUM_COMPNENTS = 1;
+  typedef dax::Id FieldType;
+  typedef dax::Id ComponentType;
+};
+
+
+template<int NUM_COMP>
+struct FieldTypeToType<vtkFloatArray,NUM_COMP>
+{
+  static const int NUM_COMPNENTS = NUM_COMP;
+  typedef dax::Tuple<dax::Scalar,NUM_COMPNENTS> FieldType;
+  typedef dax::Scalar ComponentType;
+};
+
+template<>
+struct FieldTypeToType<vtkFloatArray,1>
+{
+  static const int NUM_COMPNENTS = 1;
+  typedef dax::Scalar FieldType;
+  typedef dax::Scalar ComponentType;
+};
+
+template<int NUM_COMP>
+struct FieldTypeToType<vtkUnsignedCharArray,NUM_COMP>
+{
+  static const int NUM_COMPNENTS = NUM_COMP;
+  typedef dax::Tuple<uint8_t,NUM_COMPNENTS> FieldType;
+  typedef uint8_t ComponentType;
+};
+
+template<>
+struct FieldTypeToType<vtkUnsignedCharArray,1>
+{
+  static const int NUM_COMPNENTS = 1;
+  typedef uint8_t FieldType;
+  typedef uint8_t ComponentType;
+};
+}
+
+#endif // vtkToDax_FieldTypeToType_h
diff --git a/Accelerators/Dax/vtkToDax/MarchingCubes.h b/Accelerators/Dax/vtkToDax/MarchingCubes.h
new file mode 100644
index 0000000..36ddd57
--- /dev/null
+++ b/Accelerators/Dax/vtkToDax/MarchingCubes.h
@@ -0,0 +1,199 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#ifndef vtkToDax_MarchingCubes_h
+#define vtkToDax_MarchingCubes_h
+
+#include "vtkPolyData.h"
+
+#include "DataSetTypeToType.h"
+#include "CellTypeToType.h"
+#include "DataSetConverters.h"
+#include "daxToVtk/CellTypeToType.h"
+#include "daxToVtk/DataSetConverters.h"
+
+#include <dax/cont/Scheduler.h>
+#include <dax/cont/GenerateInterpolatedCells.h>
+#include <dax/worklet/MarchingCubes.h>
+
+namespace
+{
+template <typename T> struct MarchingCubesOuputType
+{
+  typedef dax::CellTagTriangle type;
+};
+
+}
+
+namespace vtkToDax
+{
+  template<int B>
+  struct DoMarchingCubes
+  {
+    template<class InGridType,
+             class OutGridType,
+             typename ValueType,
+             class Container1,
+             class Adapter>
+    int operator()(const InGridType &,
+                   OutGridType &,
+                   ValueType,
+                   const dax::cont::ArrayHandle<ValueType,Container1,Adapter> &)
+      {
+      return 0;
+      }
+  };
+  template<>
+  struct DoMarchingCubes<1>
+  {
+    template<class InGridType,
+             class OutGridType,
+             typename ValueType,
+             class Container1,
+             class Adapter>
+    int operator()(
+        const InGridType &inGrid,
+        OutGridType &outGeom,
+        ValueType isoValue,
+        const dax::cont::ArrayHandle<ValueType,Container1,Adapter> &mcHandle)
+      {
+      int result=1;
+
+      dax::Scalar isoValueT(isoValue);
+
+      try
+        {
+        //we don't want to use the custom container, so specify the default
+        //container for the classification storage.
+        typedef dax::cont::GenerateInterpolatedCells<
+              dax::worklet::MarchingCubesGenerate> GenerateIC;
+
+        // construct the scheduler that will execute all the worklets
+        dax::cont::Scheduler<Adapter> scheduler;
+
+        // construct the two worklets what will be used to do marching cubes
+        dax::worklet::MarchingCubesClassify classifyWorklet(isoValueT);
+        dax::worklet::MarchingCubesGenerate generateWorklet(isoValueT);
+
+        // run the first step
+        typedef typename GenerateIC::ClassifyResultType ClassifyResultType;
+        ClassifyResultType classification; // array handle for the
+                                           // first step
+                                           // (classification)
+        scheduler.Invoke(classifyWorklet,
+                         inGrid,
+                         mcHandle,
+                         classification);
+
+        // // construct the topology generation worklet
+        GenerateIC generate(classification,generateWorklet);
+        generate.SetRemoveDuplicatePoints(true);
+
+        // run the second step
+        scheduler.Invoke(generate,
+                         inGrid,
+                         outGeom,
+                         mcHandle);
+        }
+      catch(dax::cont::ErrorControlOutOfMemory error)
+        {
+        std::cerr << "Ran out of memory trying to use the GPU" << std::endl;
+        std::cerr << error.GetMessage() << std::endl;
+        result = 0;
+        }
+      catch(dax::cont::ErrorExecution error)
+        {
+        std::cerr << "Got ErrorExecution from Dax." << std::endl;
+        std::cerr << error.GetMessage() << std::endl;
+        result = 0;
+        }
+      return result;
+      }
+  };
+
+  template<typename FieldType_>
+  struct MarchingCubes
+  {
+    public:
+    typedef FieldType_ FieldType;
+    //we expect FieldType_ to be an dax::cont::ArrayHandle
+    typedef typename FieldType::ValueType T;
+
+    MarchingCubes(const FieldType& f, T value):
+      Result(NULL),
+      Field(f),
+      Value(value),
+      Name()
+      {
+      }
+
+    void setOutputGrid(vtkPolyData* grid)
+      {
+      Result=grid;
+      }
+
+    void setFieldName(const char* name)
+      {
+      Name=std::string(name);
+      }
+
+    template<typename LHS, typename RHS>
+    int operator()(LHS &dataSet, const RHS&) const
+      {
+      typedef CellTypeToType<RHS> VTKCellTypeStruct;
+      typedef DataSetTypeToType<CellTypeToType<RHS>,LHS> DataSetTypeToTypeStruct;
+
+      //get the mapped output type of this operation(MarchingCubes)
+      //todo make this a typedef on the MarchingCubes
+      typedef typename MarchingCubesOuputType< typename VTKCellTypeStruct::DaxCellType >::type OutCellType;
+
+      //get the input dataset type
+      typedef typename DataSetTypeToTypeStruct::DaxDataSetType InputDataSetType;
+
+      //construct the output grid type to use the vtk containers
+      //as we know we are going back to vtk. In a more general framework
+      //we would want a tag to say what the destination container tag types
+      //are. We don't need the points container be
+      typedef daxToVtk::CellTypeToType<OutCellType> VTKCellType;
+      dax::cont::UnstructuredGrid<OutCellType,
+                 vtkToDax::vtkTopologyContainerTag<VTKCellType>,
+                 vtkToDax::vtkPointsContainerTag > resultGrid;
+
+      InputDataSetType inputDaxData = vtkToDax::dataSetConverter(&dataSet,
+                                                     DataSetTypeToTypeStruct());
+
+      vtkToDax::DoMarchingCubes<DataSetTypeToTypeStruct::Valid> mc;
+      int result = mc(inputDaxData,
+                       resultGrid,
+                       this->Value,
+                       this->Field);
+      if(result==1 && resultGrid.GetNumberOfCells() > 0)
+        {
+        daxToVtk::dataSetConverter(resultGrid,this->Result);
+        }
+
+      return result;
+      }
+  private:
+    vtkPolyData* Result;
+    FieldType Field;
+    T Value;
+    std::string Name;
+
+  };
+}
+
+#endif //vtkToDax_MarchingCubes_h
diff --git a/Accelerators/Dax/vtkToDax/Portals.h b/Accelerators/Dax/vtkToDax/Portals.h
new file mode 100644
index 0000000..7db2d33
--- /dev/null
+++ b/Accelerators/Dax/vtkToDax/Portals.h
@@ -0,0 +1,368 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#ifndef vtkToDax_vtkPointsContainer_h
+#define vtkToDax_vtkPointsContainer_h
+
+#include "vtkCellArray.h"
+#include "vtkDataArray.h"
+#include "vtkPoints.h"
+
+#include <dax/Types.h>
+#include <dax/VectorTraits.h>
+#include <dax/cont/ArrayPortal.h>
+#include <dax/cont/internal/IteratorFromArrayPortal.h>
+#include <iterator>
+
+//this is needed so that we can properly deduce if we are a const
+//portal. and if we are a const portal properly move the const
+//to be each vaule of the dax tuple, instead of the tuple itself
+#include <boost/type_traits/remove_const.hpp>
+#include <boost/type_traits/is_const.hpp>
+#include <boost/type_traits/is_base_of.hpp>
+#include <boost/mpl/if.hpp>
+
+namespace
+{
+template<int N>
+struct fillComponents
+{
+  template<typename T, typename Tuple>
+  void operator()(T* t, const Tuple& tuple) const
+  {
+    fillComponents<N-1>()(t,tuple);
+    t[N-1]=dax::VectorTraits<Tuple>::GetComponent(tuple,N-1);
+  }
+};
+
+template<>
+struct fillComponents<1>
+  {
+  template<typename T, typename Tuple>
+  void operator()(T* t, const Tuple& tuple) const
+  {
+    t[0]=dax::VectorTraits<Tuple>::GetComponent(tuple,0);
+  }
+};
+
+template<int N>
+struct readComponents
+{
+  template<typename T, typename Tuple>
+  void operator()(const T* t, Tuple& tuple) const
+  {
+    readComponents<N-1>()(t,tuple);
+    dax::VectorTraits<Tuple>::SetComponent(tuple,N-1,t[N-1]);
+  }
+};
+
+template<>
+struct readComponents<1>
+{
+  template<typename T, typename Tuple>
+  void operator()(const T* t, Tuple& tuple) const
+  {
+    dax::VectorTraits<Tuple>::SetComponent(tuple,0,t[0]);
+  }
+};
+
+template<typename ValueType, int N>
+struct readVector
+{
+  template<typename T>
+  ValueType operator()(const T* rawArray)
+  {
+  ValueType temp;
+  readComponents<N>()(rawArray,temp);
+  return temp;
+  }
+};
+
+template<typename ValueType>
+struct readVector<ValueType,1>
+{
+  template<typename T>
+  ValueType operator()(const T* rawArray)
+  {
+  return ValueType(*rawArray);
+  }
+};
+
+
+template<typename T>
+struct ConstCorrectedType
+{
+  //get the number of components in T
+  static const int NUM_COMPONENTS = dax::VectorTraits<T>::NUM_COMPONENTS;
+  typedef typename dax::VectorTraits<T>::ComponentType ComponentType;
+
+  //on arrayPortal and PointsPortal we are generating
+  //T on the fly, so it be const T since
+  typedef typename boost::remove_const<T>::type correctConstT;
+  typedef typename boost::is_const<T>::type isConst;
+  typedef typename boost::mpl::if_<isConst,correctConstT,T>::type Type;
+};
+
+}
+
+namespace vtkToDax
+{
+
+
+template <typename Type,
+          int NUM_COMPONENTS = dax::VectorTraits<Type>::NUM_COMPONENTS>
+class vtkArrayPortal
+{
+public:
+  typedef typename ConstCorrectedType<Type>::Type ValueType;
+  typedef typename ConstCorrectedType<Type>::ComponentType ComponentType;
+
+  DAX_CONT_EXPORT vtkArrayPortal():
+    Data(NULL),
+    Array(NULL),
+    Size(0)
+    {
+    }
+
+  DAX_CONT_EXPORT vtkArrayPortal(vtkDataArray* array, dax::Id size):
+    Data(array),
+    Array(static_cast<ComponentType*>(array->GetVoidPointer(0))),
+    Size(size)
+    {
+    DAX_ASSERT_CONT(this->GetNumberOfValues() >= 0);
+    }
+
+
+  /// Copy constructor for any other vtkArrayPortal with an iterator
+  /// type that can be copied to this iterator type. This allows us to do any
+  /// type casting that the iterators do (like the non-const to const cast).
+  ///
+  template<typename OtherType>
+  DAX_CONT_EXPORT
+  vtkArrayPortal(const vtkArrayPortal<OtherType> &src):
+    Data(src.GetVtkData()),
+    Array(static_cast<ComponentType*>(src.GetVtkData()->GetVoidPointer(0))),
+    Size(src.GetNumberOfValues())
+    {
+    }
+
+  DAX_CONT_EXPORT
+  dax::Id GetNumberOfValues() const
+    {
+    return this->Size;
+    }
+
+  DAX_CONT_EXPORT
+  ValueType Get(dax::Id index) const
+    {
+    const ComponentType *rawArray = this->Array + (index * NUM_COMPONENTS);
+    return readVector<ValueType,NUM_COMPONENTS>()(rawArray);
+    }
+
+  DAX_CONT_EXPORT
+  void Set(dax::Id index, const ValueType& value) const
+    {
+    ComponentType *rawArray = this->Array + (index * NUM_COMPONENTS);
+    //use template magic to auto unroll insertion
+    fillComponents<NUM_COMPONENTS>()(rawArray,value);
+    }
+
+  typedef dax::cont::internal::IteratorFromArrayPortal<vtkArrayPortal>
+                                                                  IteratorType;
+  DAX_CONT_EXPORT IteratorType GetIteratorBegin() const
+    {
+    return IteratorType(*this, 0);
+    }
+
+  DAX_CONT_EXPORT IteratorType GetIteratorEnd() const
+    {
+    return IteratorType(*this, this->Size);
+    }
+
+  vtkDataArray* GetVtkData() const { return Data; }
+
+private:
+  vtkDataArray* Data;
+  ComponentType *Array;
+  dax::Id Size;
+
+};
+
+
+template <typename Type,
+          int NUM_COMPONENTS = dax::VectorTraits<Type>::NUM_COMPONENTS>
+class vtkPointsPortal
+{
+public:
+  typedef typename ConstCorrectedType<Type>::Type ValueType;
+  typedef typename ConstCorrectedType<Type>::ComponentType ComponentType;
+
+  DAX_CONT_EXPORT vtkPointsPortal():
+    Points(NULL),
+    Array(NULL),
+    Size(0)
+    {
+    }
+
+  DAX_CONT_EXPORT vtkPointsPortal(vtkPoints* points, dax::Id size):
+    Points(points),
+    Array(static_cast<ComponentType*>(points->GetVoidPointer(0))),
+    Size(size)
+    {
+    DAX_ASSERT_CONT(this->GetNumberOfValues() >= 0);
+    }
+
+  /// Copy constructor for any other vtkArrayPortal with an iterator
+  /// type that can be copied to this iterator type. This allows us to do any
+  /// type casting that the iterators do (like the non-const to const cast).
+  ///
+  template<typename OtherType>
+  DAX_CONT_EXPORT
+  vtkPointsPortal(const vtkPointsPortal<OtherType> &src):
+    Points(src.GetVtkData()),
+    Array(static_cast<ComponentType*>(src.GetVtkData()->GetVoidPointer(0))),
+    Size(src.GetNumberOfValues())
+    {
+    }
+
+  DAX_CONT_EXPORT
+  dax::Id GetNumberOfValues() const
+    {
+    return this->Size;
+    }
+
+  DAX_CONT_EXPORT
+  ValueType Get(dax::Id index) const
+    {
+    return ValueType(this->Array+(index*NUM_COMPONENTS));
+    }
+
+  DAX_CONT_EXPORT
+  void Set(dax::Id index, const ValueType& value) const
+    {
+    ComponentType *rawArray = this->Array + (index * NUM_COMPONENTS);
+    //use template magic to auto unroll insertion
+    fillComponents<NUM_COMPONENTS>()(rawArray,value);
+    }
+
+  typedef dax::cont::internal::IteratorFromArrayPortal<vtkPointsPortal>
+                                                                  IteratorType;
+  DAX_CONT_EXPORT IteratorType GetIteratorBegin() const
+    {
+    return IteratorType(*this, 0);
+    }
+
+  DAX_CONT_EXPORT IteratorType GetIteratorEnd() const
+    {
+    return IteratorType(*this, this->Size);
+    }
+
+  vtkPoints* GetVtkData() const { return Points; }
+
+private:
+  vtkPoints* Points;
+  ComponentType *Array;
+  dax::Id Size;
+
+};
+
+//A topology portal goal is to make the vtkCellArray for a continous cell type
+//look like a dax topology layout. This means that we skip over the elements
+//in the vtkCellArray that state how many number of points are in each cell
+//so for example a vtkCellArray of triangles is stored like:
+// 3, 0, 2, 1, 3, 0, 3, 1,
+//and we want it to be in Dax:
+// 0, 2, 1, 0, 3, 1
+
+template<typename T, int PointsPerCell>
+class vtkTopologyPortal
+{
+public:
+  typedef T ValueType;
+  DAX_CONT_EXPORT vtkTopologyPortal():
+    CellArray(NULL),
+    RawCells(NULL),
+    Size(0)
+    {
+    }
+
+  //numOfEntries should be the length of the cell topology array as far
+  //as dax is concerned.
+  DAX_CONT_EXPORT vtkTopologyPortal(vtkCellArray* cells, dax::Id daxTopoLen):
+    CellArray(cells),
+    RawCells(cells->GetPointer()),
+    Size(daxTopoLen)
+    {
+    DAX_ASSERT_CONT(this->GetNumberOfValues() >= 0);
+    DAX_ASSERT_CONT(this->CellArray->GetNumberOfConnectivityEntries() >=
+                    daxTopoLen + (daxTopoLen/PointsPerCell));
+    }
+
+  /// Copy constructor for any other vtkArrayPortal with an iterator
+  /// type that can be copied to this iterator type. This allows us to do any
+  /// type casting that the iterators do (like the non-const to const cast).
+  ///
+  template<typename OtherType>
+  DAX_CONT_EXPORT
+  vtkTopologyPortal(const vtkTopologyPortal<OtherType,PointsPerCell> &src):
+    CellArray(src.GetVtkData()),
+    RawCells(src.GetVtkData()->GetPointer()),
+    Size(src.GetNumberOfValues())
+  {
+  }
+
+  DAX_CONT_EXPORT
+  dax::Id GetNumberOfValues() const{
+    return this->Size;
+    }
+
+
+  DAX_CONT_EXPORT
+  ValueType Get(dax::Id index) const{
+    return this->RawCells[1 + index + (index/PointsPerCell) ];
+  }
+
+  DAX_CONT_EXPORT
+  void Set(dax::Id index, const ValueType& value) const{
+    this->RawCells[1 + index + index/PointsPerCell]=value;
+  }
+
+  typedef dax::cont::internal::IteratorFromArrayPortal<vtkTopologyPortal>
+                                                                  IteratorType;
+  DAX_CONT_EXPORT IteratorType GetIteratorBegin() const
+    {
+    return IteratorType(*this, 0);
+    }
+
+  DAX_CONT_EXPORT IteratorType GetIteratorEnd() const
+    {
+    return IteratorType(*this, this->Size);
+    }
+
+  vtkCellArray* GetVtkData() const { return CellArray; }
+
+private:
+  vtkCellArray *CellArray;
+  vtkIdType *RawCells;
+  dax::Id Size;
+
+};
+
+}
+
+
+
+#endif // vtkToDax_vtkPointsContainer_h
diff --git a/Accelerators/Dax/vtkToDax/Threshold.h b/Accelerators/Dax/vtkToDax/Threshold.h
new file mode 100644
index 0000000..9becae8
--- /dev/null
+++ b/Accelerators/Dax/vtkToDax/Threshold.h
@@ -0,0 +1,205 @@
+//=============================================================================
+//
+//  Copyright (c) Kitware, Inc.
+//  All rights reserved.
+//  See LICENSE.txt for details.
+//
+//  This software is distributed WITHOUT ANY WARRANTY; without even
+//  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//  PURPOSE.  See the above copyright notice for more information.
+//
+//  Copyright 2012 Sandia Corporation.
+//  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+//  the U.S. Government retains certain rights in this software.
+//
+//=============================================================================
+
+#ifndef vtkToDax_Threshold_h
+#define vtkToDax_Threshold_h
+
+#include "DataSetTypeToType.h"
+#include "CellTypeToType.h"
+#include "DataSetConverters.h"
+
+#include "daxToVtk/CellTypeToType.h"
+#include "daxToVtk/DataSetConverters.h"
+
+#include <dax/cont/Scheduler.h>
+#include <dax/cont/GenerateTopology.h>
+#include <dax/worklet/Threshold.h>
+
+namespace
+{
+template <typename T> struct ThresholdOuputType
+{
+  typedef T type;
+};
+template <> struct ThresholdOuputType< dax::CellTagVoxel >
+{
+  typedef dax::CellTagHexahedron type;
+};
+}
+
+namespace vtkToDax
+{
+  template<int B>
+  struct DoThreshold
+  {
+    template<class InGridType,
+             class OutGridType,
+             typename ValueType,
+             class Container1,
+             class Container2,
+             class Adapter>
+    int operator()(const InGridType &,
+                   OutGridType &,
+                   ValueType,
+                   ValueType,
+                   const dax::cont::ArrayHandle<ValueType,Container1,Adapter> &,
+                   dax::cont::ArrayHandle<ValueType,Container2,Adapter> &)
+      {
+      std::cout << "Not calling DAX, GridType and CellType combination not supported" << std::endl;
+      return 0;
+      }
+  };
+  template<>
+  struct DoThreshold<1>
+  {
+    template<class InGridType,
+             class OutGridType,
+             typename ValueType,
+             class Container1,
+             class Container2,
+             class Adapter>
+    int operator()(
+        const InGridType &inGrid,
+        OutGridType &outGeom,
+        ValueType thresholdMin,
+        ValueType thresholdMax,
+        const dax::cont::ArrayHandle<ValueType,Container1,Adapter> &thresholdHandle,
+        dax::cont::ArrayHandle<ValueType,Container2,Adapter> &thresholdResult)
+      {
+      int result=1;
+      try
+        {
+        //we don't want to use the custom container, so specify the default
+        //container for the classification storage.
+        typedef dax::cont::GenerateTopology<
+                          dax::worklet::ThresholdTopology> ScheduleGT;
+        typedef dax::worklet::ThresholdClassify<ValueType> ThresholdClassifyType;
+
+        dax::cont::Scheduler<Adapter> scheduler;
+
+        typedef typename ScheduleGT::ClassifyResultType ClassifyResultType;
+        ClassifyResultType classification;
+
+        scheduler.Invoke(ThresholdClassifyType(thresholdMin,thresholdMax),
+                 inGrid, thresholdHandle, classification);
+
+        ScheduleGT resolveTopology(classification);
+        //remove classification resource from execution for more space
+        scheduler.Invoke(resolveTopology,inGrid,outGeom);
+        resolveTopology.CompactPointField(thresholdHandle,thresholdResult);
+      }
+      catch(dax::cont::ErrorControlOutOfMemory error)
+        {
+        std::cerr << "Ran out of memory trying to use the GPU" << std::endl;
+        std::cerr << error.GetMessage() << std::endl;
+        result = 0;
+        }
+      catch(dax::cont::ErrorExecution error)
+        {
+        std::cerr << "Got ErrorExecution from Dax." << std::endl;
+        std::cerr << error.GetMessage() << std::endl;
+        result = 0;
+        }
+      return result;
+      }
+  };
+
+  template<typename FieldType_>
+  struct Threshold
+  {
+    public:
+    typedef FieldType_ FieldType;
+    //we expect FieldType_ to be an dax::cont::ArrayHandle
+    typedef typename FieldType::ValueType T;
+
+    Threshold(const FieldType& f, T min, T max):
+      Result(NULL),
+      Field(f),
+      Min(min),
+      Max(max),
+      Name()
+      {
+      }
+
+    void setOutputGrid(vtkUnstructuredGrid* grid)
+      {
+      Result=grid;
+      }
+
+    void setFieldName(const char* name)
+      {
+      Name=std::string(name);
+      }
+
+    template<typename LHS, typename RHS>
+    int operator()(LHS &dataSet, const RHS&) const
+      {
+      typedef CellTypeToType<RHS> VTKCellTypeStruct;
+      typedef DataSetTypeToType<CellTypeToType<RHS>,LHS> DataSetTypeToTypeStruct;
+
+      //get the mapped output type of this operation(threshold)
+      //todo make this a typedef on the threshold
+      typedef typename ThresholdOuputType< typename VTKCellTypeStruct::DaxCellType >::type OutCellType;
+
+      //get the input dataset type
+      typedef typename DataSetTypeToTypeStruct::DaxDataSetType InputDataSetType;
+
+      //construct the output grid type to use the vtk containers
+      //as we know we are going back to vtk. In a more general framework
+      //we would want a tag to say what the destination container tag types
+      //are
+      typedef daxToVtk::CellTypeToType<OutCellType> VTKCellType;
+      dax::cont::UnstructuredGrid<OutCellType,
+                vtkToDax::vtkTopologyContainerTag<VTKCellType>,
+                vtkToDax::vtkPointsContainerTag> resultGrid;
+
+      //get from the Field the proper handle type
+      FieldType outputHandle;
+
+      InputDataSetType inputDaxData = vtkToDax::dataSetConverter(&dataSet,
+                                                                 DataSetTypeToTypeStruct());
+
+      vtkToDax::DoThreshold<DataSetTypeToTypeStruct::Valid> threshold;
+      int result = threshold(inputDaxData,
+                       resultGrid,
+                       this->Min,
+                       this->Max,
+                       this->Field,
+                       outputHandle);
+
+      if(result==1 && resultGrid.GetNumberOfCells() > 0)
+        {
+        //if we converted correctly, copy the data back to VTK
+        //remembering to add back in the output array to the generated
+        //unstructured grid
+        daxToVtk::addPointData(this->Result,outputHandle,this->Name);
+        daxToVtk::dataSetConverter(resultGrid,this->Result);
+        }
+
+      return result;
+
+      }
+  private:
+    vtkUnstructuredGrid* Result;
+    FieldType Field;
+    T Min;
+    T Max;
+    std::string Name;
+
+  };
+}
+
+#endif //vtkToDax_Threshold_h
diff --git a/Accelerators/Piston/CMakeLists.txt b/Accelerators/Piston/CMakeLists.txt
index 4fc8d22..66caaa3 100644
--- a/Accelerators/Piston/CMakeLists.txt
+++ b/Accelerators/Piston/CMakeLists.txt
@@ -42,7 +42,7 @@ set_source_files_properties(
 list(APPEND Module_SRCS ${Module_CUDA_C_SRCS})
 
 vtk_module_library(vtkAcceleratorsPiston ${Module_SRCS})
-target_link_libraries(vtkAcceleratorsPiston ${CUDA_LIBRARIES})
+target_link_libraries(vtkAcceleratorsPiston LINK_PRIVATE ${CUDA_LIBRARIES})
 if(APPLE)
-  target_link_libraries(vtkAcceleratorsPiston /usr/local/cuda/lib/libtlshook.dylib)
+  target_link_libraries(vtkAcceleratorsPiston LINK_PRIVATE /usr/local/cuda/lib/libtlshook.dylib)
 endif()
diff --git a/Accelerators/Piston/Testing/Cxx/CMakeLists.txt b/Accelerators/Piston/Testing/Cxx/CMakeLists.txt
index 88d12d5..f44668e 100644
--- a/Accelerators/Piston/Testing/Cxx/CMakeLists.txt
+++ b/Accelerators/Piston/Testing/Cxx/CMakeLists.txt
@@ -1,31 +1,11 @@
-
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-  TestUsePiston
-  EXTRA_INCLUDE vtkTestDriver.h
+vtk_add_test_cxx(
+  TestUsePiston.cxx
 )
 
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-string (REPLACE "vtk" "" _baselinedname ${vtk-module})
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/${_baselinedname}/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests)
 
-if(VTK_DATA_ROOT AND (VTK_MPI_MAX_NUMPROCS GREATER 1))
+if(VTK_MPI_MAX_NUMPROCS GREATER 1)
+  include(vtkMPI)
   find_package(MPI REQUIRED)
   include_directories(${MPI_INCLUDE_PATH})
 
@@ -34,20 +14,15 @@ if(VTK_DATA_ROOT AND (VTK_MPI_MAX_NUMPROCS GREATER 1))
   foreach(test ${PistonMPITests})
 
     vtk_module_test_executable(${vtk-module}Cxx-${test} ${test}.cxx)
-    if(VTK_DATA_ROOT)
-      add_test(NAME ${vtk-module}Cxx-${test}
-        COMMAND ${MPIEXEC} ${MPIEXEC_NUMPROC_FLAG} 2 ${MPIEXEC_PREFLAGS}
-        $<TARGET_FILE:${vtk-module}Cxx-${test}>
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/${_baselinedname}/${test}.png
-        ${VTK_MPI_POSTFLAGS})
-    else()
-      add_test(NAME ${vtk-module}Cxx-${test}
-        COMMAND ${MPIEXEC} ${MPIEXEC_NUMPROC_FLAG} 2 ${MPIEXEC_PREFLAGS}
-        $<TARGET_FILE:${vtk-module}Cxx-${test}>
-        ${VTK_MPI_POSTFLAGS})
-    endif()
+    ExternalData_add_test(VTKData
+      NAME ${vtk-module}Cxx-${test}
+      COMMAND ${MPIEXEC} ${MPIEXEC_NUMPROC_FLAG} 2 ${MPIEXEC_PREFLAGS}
+      $<TARGET_FILE:${vtk-module}Cxx-${test}>
+      -D ${VTK_TEST_DATA_DIR}
+      -T ${VTK_TEST_OUTPUT_DIR}
+      -V DATA{../Data/Baseline/${test}.png,:}
+      ${VTK_MPI_POSTFLAGS})
+    vtk_mpi_link(${vtk-module}Cxx-${test})
   endforeach()
 
 endif()
diff --git a/Accelerators/Piston/Testing/Cxx/TestCompositeRender.cxx b/Accelerators/Piston/Testing/Cxx/TestCompositeRender.cxx
index 887cce3..aede236 100644
--- a/Accelerators/Piston/Testing/Cxx/TestCompositeRender.cxx
+++ b/Accelerators/Piston/Testing/Cxx/TestCompositeRender.cxx
@@ -155,7 +155,6 @@ protected:
   char **Argv;
 };
 
-//#vtkCxxRevisionMacro(MyProcess, "1.0");
 vtkStandardNewMacro(MyProcess);
 
 void MyProcess::Execute()
diff --git a/Accelerators/Piston/Testing/Data/Baseline/TestCompositeRender.png.md5 b/Accelerators/Piston/Testing/Data/Baseline/TestCompositeRender.png.md5
new file mode 100644
index 0000000..a0c3ed0
--- /dev/null
+++ b/Accelerators/Piston/Testing/Data/Baseline/TestCompositeRender.png.md5
@@ -0,0 +1 @@
+b991ecca115fcf0b9371b7b0b5eab881
diff --git a/Accelerators/Piston/Testing/Data/Baseline/TestContour.png.md5 b/Accelerators/Piston/Testing/Data/Baseline/TestContour.png.md5
new file mode 100644
index 0000000..7ea186c
--- /dev/null
+++ b/Accelerators/Piston/Testing/Data/Baseline/TestContour.png.md5
@@ -0,0 +1 @@
+3705923ca1ea7afb2bc4eb10d57d3c7a
diff --git a/Accelerators/Piston/Testing/Data/Baseline/TestDMPFiltering.png.md5 b/Accelerators/Piston/Testing/Data/Baseline/TestDMPFiltering.png.md5
new file mode 100644
index 0000000..5a900c8
--- /dev/null
+++ b/Accelerators/Piston/Testing/Data/Baseline/TestDMPFiltering.png.md5
@@ -0,0 +1 @@
+f95cc9ffd2f8482a358f3de21a501afd
diff --git a/Accelerators/Piston/Testing/Data/Baseline/TestRendering.png.md5 b/Accelerators/Piston/Testing/Data/Baseline/TestRendering.png.md5
new file mode 100644
index 0000000..232b16c
--- /dev/null
+++ b/Accelerators/Piston/Testing/Data/Baseline/TestRendering.png.md5
@@ -0,0 +1 @@
+ef68be3f06f69807d156a62e02714687
diff --git a/Accelerators/Piston/Testing/Data/Baseline/TestSlice.png.md5 b/Accelerators/Piston/Testing/Data/Baseline/TestSlice.png.md5
new file mode 100644
index 0000000..9637654
--- /dev/null
+++ b/Accelerators/Piston/Testing/Data/Baseline/TestSlice.png.md5
@@ -0,0 +1 @@
+682638931df5c74eeb41496ee70e3b31
diff --git a/Accelerators/Piston/Testing/Data/Baseline/TestSlice_1.png.md5 b/Accelerators/Piston/Testing/Data/Baseline/TestSlice_1.png.md5
new file mode 100644
index 0000000..9bbb086
--- /dev/null
+++ b/Accelerators/Piston/Testing/Data/Baseline/TestSlice_1.png.md5
@@ -0,0 +1 @@
+3d1d5d2e9e6f6e0e9a09c735d60e0555
diff --git a/Accelerators/Piston/Testing/Data/Baseline/TestSort.png.md5 b/Accelerators/Piston/Testing/Data/Baseline/TestSort.png.md5
new file mode 100644
index 0000000..c55d21d
--- /dev/null
+++ b/Accelerators/Piston/Testing/Data/Baseline/TestSort.png.md5
@@ -0,0 +1 @@
+5efc512badee0d4193fe6052143f3eee
diff --git a/Accelerators/Piston/Testing/Data/Baseline/TestThreshold.png.md5 b/Accelerators/Piston/Testing/Data/Baseline/TestThreshold.png.md5
new file mode 100644
index 0000000..979de3a
--- /dev/null
+++ b/Accelerators/Piston/Testing/Data/Baseline/TestThreshold.png.md5
@@ -0,0 +1 @@
+4f55992740b3a5eb0f40ca0ab0f6b2e2
diff --git a/Accelerators/Piston/Testing/Data/Baseline/TestThreshold_1.png.md5 b/Accelerators/Piston/Testing/Data/Baseline/TestThreshold_1.png.md5
new file mode 100644
index 0000000..dfaad55
--- /dev/null
+++ b/Accelerators/Piston/Testing/Data/Baseline/TestThreshold_1.png.md5
@@ -0,0 +1 @@
+3a2c21f07bddf6a2be562541158bd1cf
diff --git a/Accelerators/Piston/Testing/Data/Baseline/TestUsePiston.png.md5 b/Accelerators/Piston/Testing/Data/Baseline/TestUsePiston.png.md5
new file mode 100644
index 0000000..dc98d28
--- /dev/null
+++ b/Accelerators/Piston/Testing/Data/Baseline/TestUsePiston.png.md5
@@ -0,0 +1 @@
+89974749a1d3eb66dd2c18ec392b0b70
diff --git a/Accelerators/Piston/Testing/Python/CMakeLists.txt b/Accelerators/Piston/Testing/Python/CMakeLists.txt
index 4d67c8d..bb11ae2 100644
--- a/Accelerators/Piston/Testing/Python/CMakeLists.txt
+++ b/Accelerators/Piston/Testing/Python/CMakeLists.txt
@@ -2,7 +2,7 @@ if (VTK_PYTHON_EXE)
   #
   # Test base functionality.
   #
-  string (REPLACE "vtk" "" _baselinedname ${vtk-module})
+  set(TestConversion_OPTS NO_VALID)
   foreach ( tfile
       TestConversion
       TestContour
@@ -11,12 +11,8 @@ if (VTK_PYTHON_EXE)
       TestSlice
       TestSort
       )
-    add_test(NAME ${vtk-module}Python-${tfile}
-      COMMAND ${VTK_PYTHON_EXE}
-      ${CMAKE_CURRENT_SOURCE_DIR}/${tfile}.py
-      -B ${VTK_DATA_ROOT}/Baseline/${_baselinedname}
-      -D ${VTK_DATA_ROOT}/Data
-      --normalize)
+    set(${tfile}_ARGS --normalize)
+    vtk_add_test_python(${tfile}.py NO_RT ${${tfile}_OPTS})
   endforeach ( )
 
   #
@@ -28,21 +24,23 @@ if (VTK_PYTHON_EXE)
       Slice
       Sort
       )
-    add_test(NAME ${vtk-module}Python-TestPlaced${tfile}
+    ExternalData_add_test(VTKData
+      NAME ${vtk-module}Python-TestPlaced${tfile}
       COMMAND ${VTK_PYTHON_EXE}
       ${CMAKE_CURRENT_SOURCE_DIR}/Test${tfile}.py
-      -B ${VTK_DATA_ROOT}/Baseline/${_baselinedname}
-      -D ${VTK_DATA_ROOT}/Data)
+      -B "DATA{../Data/Baseline/,REGEX:Test${tfile}(_[0-9]+)?.png}"
+      -D ${VTK_TEST_DATA_DIR}/Data)
   endforeach ( )
 
   #
   # Exercise thrust to gl direct on GPU rendering.
   #
-  add_test(NAME ${vtk-module}Python-TestDirectRendering
+  ExternalData_add_test(VTKData
+    NAME ${vtk-module}Python-TestDirectRendering
     COMMAND ${VTK_PYTHON_EXE}
     ${CMAKE_CURRENT_SOURCE_DIR}/TestRendering.py
-    -B ${VTK_DATA_ROOT}/Baseline/${_baselinedname}
-    -D ${VTK_DATA_ROOT}/Data
+    -B "DATA{../Data/Baseline/,REGEX:TestRendering(_[0-9]+)?.png}"
+    -D ${VTK_TEST_DATA_DIR}/Data
     --gpu_render)
 
 endif ()
diff --git a/Accelerators/Piston/vtkPistonDataObject.cxx b/Accelerators/Piston/vtkPistonDataObject.cxx
index 4495947..b382048 100644
--- a/Accelerators/Piston/vtkPistonDataObject.cxx
+++ b/Accelerators/Piston/vtkPistonDataObject.cxx
@@ -161,7 +161,7 @@ void vtkPistonDataObject::GetBounds(double bounds[6])
 }
 
 //----------------------------------------------------------------------------
-void vtkPistonDataObject::SetBounds(double bounds[6])
+void vtkPistonDataObject::SetBounds(const double bounds[6])
 {
   bool modified = false;
   for (int i=0; i<6; i++)
@@ -194,7 +194,7 @@ void vtkPistonDataObject::GetOrigin(double origin[3])
 }
 
 //----------------------------------------------------------------------------
-void vtkPistonDataObject::SetOrigin(double origin[3])
+void vtkPistonDataObject::SetOrigin(const double origin[3])
 {
   bool modified = false;
   for (int i=0; i<3; i++)
diff --git a/Accelerators/Piston/vtkPistonDataObject.h b/Accelerators/Piston/vtkPistonDataObject.h
index 325c41d..c3ee5a9 100644
--- a/Accelerators/Piston/vtkPistonDataObject.h
+++ b/Accelerators/Piston/vtkPistonDataObject.h
@@ -74,12 +74,12 @@ public:
   // (xmin,xmax, ymin,ymax, zmin,zmax).
   double *GetBounds();
   void GetBounds(double bounds[6]);
-  void SetBounds(double bounds[6]);
+  void SetBounds(const double bounds[6]);
 
   // Description:
   double *GetOrigin();
   void GetOrigin(double origin[3]);
-  void SetOrigin(double origin[3]);
+  void SetOrigin(const double origin[3]);
 
   // Description:
   double *GetSpacing();
diff --git a/Accelerators/Piston/vtkPistonMapper.cxx b/Accelerators/Piston/vtkPistonMapper.cxx
index 115a98c..e3378ff 100644
--- a/Accelerators/Piston/vtkPistonMapper.cxx
+++ b/Accelerators/Piston/vtkPistonMapper.cxx
@@ -27,6 +27,7 @@
 #include "vtkPistonScalarsColors.h"
 #include "vtkScalarsToColors.h"
 #include "vtkStreamingDemandDrivenPipeline.h"
+#include "vtkOpenGLError.h"
 
 #include <limits>
 
@@ -52,12 +53,14 @@ namespace vtkpiston {
     {
     PistonGLRAII(GLbitfield mask)
       {
+      vtkOpenGLClearErrorMacro();
       glPushAttrib(mask);
       }
 
     ~PistonGLRAII()
       {
       glPopAttrib();
+      vtkOpenGLStaticCheckErrorMacro("failed after ~PistonGLRAII");
       }
     };
 }
@@ -136,6 +139,8 @@ vtkPistonMapper::~vtkPistonMapper()
 //-----------------------------------------------------------------------------
 void vtkPistonMapper::PrepareDirectRenderBuffers(int nPoints)
 {
+  vtkOpenGLClearErrorMacro();
+
   if (nPoints==this->Internal->BufferSize)
     {
     return;
@@ -144,6 +149,7 @@ void vtkPistonMapper::PrepareDirectRenderBuffers(int nPoints)
     {
     // Release old buffer
     vtkgl::DeleteBuffers(3, this->Internal->vboBuffers);
+    vtkOpenGLCheckErrorMacro("failed at glDeleteBuffers");
     }
 
   this->Internal->BufferSize = nPoints;
@@ -170,6 +176,8 @@ void vtkPistonMapper::PrepareDirectRenderBuffers(int nPoints)
                     this->Internal->BufferSize*3*sizeof(float), 0,
                     vtkgl::DYNAMIC_DRAW);
 
+  vtkOpenGLCheckErrorMacro("failed after allocate shared memory");
+
   vtkpiston::CudaRegisterBuffer(&this->Internal->vboResources[0],
                                this->Internal->vboBuffers[0]);
   vtkpiston::CudaRegisterBuffer(&this->Internal->vboResources[1],
@@ -214,6 +222,8 @@ int vtkPistonMapper::FillInputPortInformation(
 //-----------------------------------------------------------------------------
 void vtkPistonMapper::RenderOnCPU()
 {
+  vtkOpenGLClearErrorMacro();
+
   vtkpiston::PistonGLRAII(GL_LIGHTING_BIT);
 
   vtkScalarsToColors *lut = this->GetLookupTable();
@@ -389,11 +399,15 @@ void vtkPistonMapper::RenderOnCPU()
       glEnd();
   }
   od->Delete();
+
+  vtkOpenGLCheckErrorMacro("failed after RenderOnCPU");
 }
 
 //-----------------------------------------------------------------------------
 void vtkPistonMapper::RenderOnGPU()
 {
+  vtkOpenGLClearErrorMacro();
+
   vtkPistonDataObject *id = this->GetPistonDataObjectInput(0);
 
   int nPoints = vtkpiston::QueryNumVerts(id);
@@ -443,11 +457,15 @@ void vtkPistonMapper::RenderOnGPU()
   glDisableClientState(GL_VERTEX_ARRAY);
   if (hasNormals) glDisableClientState(GL_NORMAL_ARRAY);
   if (hasColors) glDisableClientState(GL_COLOR_ARRAY);
+
+  vtkOpenGLCheckErrorMacro("failed after RenderOnGPU");
 }
 
 //-----------------------------------------------------------------------------
 void vtkPistonMapper::RenderImageDataOutline()
 {
+  vtkOpenGLClearErrorMacro();
+
   vtkpiston::PistonGLRAII(GL_LIGHTING_BIT);
   glDisable(GL_LIGHTING);
 
@@ -508,6 +526,8 @@ void vtkPistonMapper::RenderImageDataOutline()
   glVertex3dv(nextpt[3]);
   glVertex3dv(nextpt[7]);
   glEnd();
+
+  vtkOpenGLCheckErrorMacro("failed after RenderImageDataOutline");
 }
 
 //----------------------------------------------------------------------------
diff --git a/Accelerators/Piston/vtkPistonMapper.h b/Accelerators/Piston/vtkPistonMapper.h
index 87c3989..3abc568 100644
--- a/Accelerators/Piston/vtkPistonMapper.h
+++ b/Accelerators/Piston/vtkPistonMapper.h
@@ -68,7 +68,7 @@ public:
   // Release any graphics resources that are being consumed by this mapper.
   // The parameter window could be used to determine which graphic
   // resources to release.
-  virtual void ReleaseGraphicsResources(vtkWindow *) {};
+  virtual void ReleaseGraphicsResources(vtkWindow *) {}
 
   // Description:
   // Return bounding box (array of six doubles) of data expressed as
diff --git a/Accelerators/Piston/vtkPistonSort.h b/Accelerators/Piston/vtkPistonSort.h
index 69284ab..e05d0b6 100644
--- a/Accelerators/Piston/vtkPistonSort.h
+++ b/Accelerators/Piston/vtkPistonSort.h
@@ -34,7 +34,7 @@ public:
 
 protected:
   vtkPistonSort() {}
-  ~vtkPistonSort() {};
+  ~vtkPistonSort() {}
 
   // Description:
   // Method that does the actual calculation.
diff --git a/CMake/CheckCXXExpressionCompiles.cmake b/CMake/CheckCXXExpressionCompiles.cmake
new file mode 100644
index 0000000..de65dca
--- /dev/null
+++ b/CMake/CheckCXXExpressionCompiles.cmake
@@ -0,0 +1,40 @@
+# - Check if a C++ function exists
+# CHECK_CXX_EXPRESSION_COMPILES(<expression> <files> <variable>)
+#
+# Check that the <expression> compiles in a program that includes
+# <files> and store the result in a <variable>.  Specify the list
+# of files in one argument as a semicolon-separated list.
+#
+# The following variables may be set before calling this macro to
+# modify the way the check is run:
+#
+#  CMAKE_REQUIRED_FLAGS = string of compile command line flags
+#  CMAKE_REQUIRED_DEFINITIONS = list of macros to define (-DFOO=bar)
+#  CMAKE_REQUIRED_INCLUDES = list of include directories
+#  CMAKE_REQUIRED_LIBRARIES = list of libraries to link
+
+#=============================================================================
+# Copyright 2003-2011 Kitware, Inc.
+#
+# Distributed under the OSI-approved BSD License (the "License");
+# see accompanying file Copyright.txt for details.
+#
+# This software is distributed WITHOUT ANY WARRANTY; without even the
+# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+# See the License for more information.
+#=============================================================================
+# (To distribute this file outside of CMake, substitute the full
+#  License text for the above reference.)
+
+INCLUDE(CheckCXXSourceCompiles)
+
+MACRO(CHECK_CXX_EXPRESSION_COMPILES EXPRESSION FILES VARIABLE)
+  SET(SOURCE "/* CHECK_CXX_EXPRESSION_COMPILES */\n")
+  FOREACH(FILE ${FILES})
+    SET(SOURCE "${SOURCE}#include <${FILE}>\n")
+  ENDFOREACH(FILE ${FILES})
+  SET(SOURCE "${SOURCE}\nint main()\n{\n")
+  SET(SOURCE "${SOURCE}  static_cast<void>(${EXPRESSION});\n\n")
+  SET(SOURCE "${SOURCE}  return 0;\n}\n")
+  CHECK_CXX_SOURCE_COMPILES("${SOURCE}" "${VARIABLE}")
+ENDMACRO(CHECK_CXX_EXPRESSION_COMPILES)
diff --git a/CMake/ExternalData.cmake b/CMake/ExternalData.cmake
new file mode 100644
index 0000000..5b5d060
--- /dev/null
+++ b/CMake/ExternalData.cmake
@@ -0,0 +1,789 @@
+# - Manage data files stored outside source tree
+# Use this module to unambiguously reference data files stored outside the
+# source tree and fetch them at build time from arbitrary local and remote
+# content-addressed locations.  Functions provided by this module recognize
+# arguments with the syntax "DATA{<name>}" as references to external data,
+# replace them with full paths to local copies of those data, and create build
+# rules to fetch and update the local copies.
+#
+# The DATA{} syntax is literal and the <name> is a full or relative path
+# within the source tree.  The source tree must contain either a real data
+# file at <name> or a "content link" at <name><ext> containing a hash of the
+# real file using a hash algorithm corresponding to <ext>.  For example, the
+# argument "DATA{img.png}" may be satisfied by either a real "img.png" file in
+# the current source directory or a "img.png.md5" file containing its MD5 sum.
+#
+# The 'ExternalData_Expand_Arguments' function evaluates DATA{} references
+# in its arguments and constructs a new list of arguments:
+#  ExternalData_Expand_Arguments(
+#    <target>   # Name of data management target
+#    <outVar>   # Output variable
+#    [args...]  # Input arguments, DATA{} allowed
+#    )
+# It replaces each DATA{} reference in an argument with the full path of a
+# real data file on disk that will exist after the <target> builds.
+#
+# The 'ExternalData_Add_Test' function wraps around the CMake add_test()
+# command but supports DATA{} references in its arguments:
+#  ExternalData_Add_Test(
+#    <target>   # Name of data management target
+#    ...        # Arguments of add_test(), DATA{} allowed
+#    )
+# It passes its arguments through ExternalData_Expand_Arguments and then
+# invokes add_test() using the results.
+#
+# The 'ExternalData_Add_Target' function creates a custom target to manage
+# local instances of data files stored externally:
+#  ExternalData_Add_Target(
+#    <target>   # Name of data management target
+#    )
+# It creates custom commands in the target as necessary to make data files
+# available for each DATA{} reference previously evaluated by other functions
+# provided by this module.  A list of URL templates must be provided in the
+# variable ExternalData_URL_TEMPLATES using the placeholders "%(algo)" and
+# "%(hash)" in each template.  Data fetch rules try each URL template in order
+# by substituting the hash algorithm name for "%(algo)" and the hash value for
+# "%(hash)".
+#
+# The following hash algorithms are supported:
+#    %(algo)     <ext>     Description
+#    -------     -----     -----------
+#    MD5         .md5      Message-Digest Algorithm 5, RFC 1321
+# Note that the hashes are used only for unique data identification and
+# download verification.  This is not security software.
+#
+# Example usage:
+#   include(ExternalData)
+#   set(ExternalData_URL_TEMPLATES "file:///local/%(algo)/%(hash)"
+#                                  "http://data.org/%(algo)/%(hash)")
+#   ExternalData_Add_Test(MyData
+#     NAME MyTest
+#     COMMAND MyExe DATA{MyInput.png}
+#     )
+#   ExternalData_Add_Target(MyData)
+# When test "MyTest" runs the "DATA{MyInput.png}" argument will be replaced by
+# the full path to a real instance of the data file "MyInput.png" on disk.  If
+# the source tree contains a content link such as "MyInput.png.md5" then the
+# "MyData" target creates a real "MyInput.png" in the build tree.
+#
+# The DATA{} syntax can be told to fetch a file series using the form
+# "DATA{<name>,:}", where the ":" is literal.  If the source tree contains a
+# group of files or content links named like a series then a reference to one
+# member adds rules to fetch all of them.  Although all members of a series
+# are fetched, only the file originally named by the DATA{} argument is
+# substituted for it.  The default configuration recognizes file series names
+# ending with "#.ext", "_#.ext", ".#.ext", or "-#.ext" where "#" is a sequence
+# of decimal digits and ".ext" is any single extension.  Configure it with a
+# regex that parses <number> and <suffix> parts from the end of <name>:
+#  ExternalData_SERIES_PARSE = regex of the form (<number>)(<suffix>)$
+# For more complicated cases set:
+#  ExternalData_SERIES_PARSE = regex with at least two () groups
+#  ExternalData_SERIES_PARSE_PREFIX = <prefix> regex group number, if any
+#  ExternalData_SERIES_PARSE_NUMBER = <number> regex group number
+#  ExternalData_SERIES_PARSE_SUFFIX = <suffix> regex group number
+# Configure series number matching with a regex that matches the
+# <number> part of series members named <prefix><number><suffix>:
+#  ExternalData_SERIES_MATCH = regex matching <number> in all series members
+# Note that the <suffix> of a series does not include a hash-algorithm
+# extension.
+#
+# The DATA{} syntax can alternatively match files associated with the named
+# file and contained in the same directory.  Associated files may be specified
+# by options using the syntax DATA{<name>,<opt1>,<opt2>,...}.  Each option may
+# specify one file by name or specify a regular expression to match file names
+# using the syntax REGEX:<regex>.  For example, the arguments
+#   DATA{MyData/MyInput.mhd,MyInput.img}                   # File pair
+#   DATA{MyData/MyFrames00.png,REGEX:MyFrames[0-9]+\\.png} # Series
+# will pass MyInput.mha and MyFrames00.png on the command line but ensure
+# that the associated files are present next to them.
+#
+# The DATA{} syntax may reference a directory using a trailing slash and a
+# list of associated files.  The form DATA{<name>/,<opt1>,<opt2>,...} adds
+# rules to fetch any files in the directory that match one of the associated
+# file options.  For example, the argument DATA{MyDataDir/,REGEX:.*} will pass
+# the full path to a MyDataDir directory on the command line and ensure that
+# the directory contains files corresponding to every file or content link in
+# the MyDataDir source directory.
+#
+# The variable ExternalData_LINK_CONTENT may be set to the name of a supported
+# hash algorithm to enable automatic conversion of real data files referenced
+# by the DATA{} syntax into content links.  For each such <file> a content
+# link named "<file><ext>" is created.  The original file is renamed to the
+# form ".ExternalData_<algo>_<hash>" to stage it for future transmission to
+# one of the locations in the list of URL templates (by means outside the
+# scope of this module).  The data fetch rule created for the content link
+# will use the staged object if it cannot be found using any URL template.
+#
+# The variable ExternalData_OBJECT_STORES may be set to a list of local
+# directories that store objects using the layout <dir>/%(algo)/%(hash).
+# These directories will be searched first for a needed object.  If the object
+# is not available in any store then it will be fetched remotely using the URL
+# templates and added to the first local store listed.  If no stores are
+# specified the default is a location inside the build tree.
+#
+# The variable ExternalData_SOURCE_ROOT may be set to the highest source
+# directory containing any path named by a DATA{} reference.  The default is
+# CMAKE_SOURCE_DIR.  ExternalData_SOURCE_ROOT and CMAKE_SOURCE_DIR must refer
+# to directories within a single source distribution (e.g. they come together
+# in one tarball).
+#
+# The variable ExternalData_BINARY_ROOT may be set to the directory to hold
+# the real data files named by expanded DATA{} references.  The default is
+# CMAKE_BINARY_DIR.  The directory layout will mirror that of content links
+# under ExternalData_SOURCE_ROOT.
+#
+# Variables ExternalData_TIMEOUT_INACTIVITY and ExternalData_TIMEOUT_ABSOLUTE
+# set the download inactivity and absolute timeouts, in seconds.  The defaults
+# are 60 seconds and 300 seconds, respectively.  Set either timeout to 0
+# seconds to disable enforcement.
+
+#=============================================================================
+# Copyright 2010-2013 Kitware, Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions
+# are met:
+#
+# * Redistributions of source code must retain the above copyright
+#   notice, this list of conditions and the following disclaimer.
+#
+# * Redistributions in binary form must reproduce the above copyright
+#   notice, this list of conditions and the following disclaimer in the
+#   documentation and/or other materials provided with the distribution.
+#
+# * Neither the names of Kitware, Inc., the Insight Software Consortium,
+#   nor the names of their contributors may be used to endorse or promote
+#   products derived from this software without specific prior written
+#   permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#=============================================================================
+
+function(ExternalData_add_test target)
+  # Expand all arguments as a single string to preserve escaped semicolons.
+  ExternalData_expand_arguments("${target}" testArgs "${ARGN}")
+  add_test(${testArgs})
+endfunction()
+
+function(ExternalData_add_target target)
+  if(NOT ExternalData_URL_TEMPLATES)
+    message(FATAL_ERROR "ExternalData_URL_TEMPLATES is not set!")
+  endif()
+  if(NOT ExternalData_OBJECT_STORES)
+    set(ExternalData_OBJECT_STORES ${CMAKE_BINARY_DIR}/ExternalData/Objects)
+  endif()
+  set(config ${CMAKE_CURRENT_BINARY_DIR}/${target}_config.cmake)
+  configure_file(${_ExternalData_SELF_DIR}/ExternalData_config.cmake.in ${config} @ONLY)
+
+  set(files "")
+
+  # Set "_ExternalData_FILE_${file}" for each output file to avoid duplicate
+  # rules.  Use local data first to prefer real files over content links.
+
+  # Custom commands to copy or link local data.
+  get_property(data_local GLOBAL PROPERTY _ExternalData_${target}_LOCAL)
+  foreach(entry IN LISTS data_local)
+    string(REPLACE "|" ";" tuple "${entry}")
+    list(GET tuple 0 file)
+    list(GET tuple 1 name)
+    if(NOT DEFINED "_ExternalData_FILE_${file}")
+      set("_ExternalData_FILE_${file}" 1)
+      add_custom_command(
+        COMMENT "Generating ${file}"
+        OUTPUT "${file}"
+        COMMAND ${CMAKE_COMMAND} -Drelative_top=${CMAKE_BINARY_DIR}
+                                 -Dfile=${file} -Dname=${name}
+                                 -DExternalData_ACTION=local
+                                 -DExternalData_CONFIG=${config}
+                                 -P ${_ExternalData_SELF}
+        MAIN_DEPENDENCY "${name}"
+        )
+      list(APPEND files "${file}")
+    endif()
+  endforeach()
+
+  # Custom commands to fetch remote data.
+  get_property(data_fetch GLOBAL PROPERTY _ExternalData_${target}_FETCH)
+  foreach(entry IN LISTS data_fetch)
+    string(REPLACE "|" ";" tuple "${entry}")
+    list(GET tuple 0 file)
+    list(GET tuple 1 name)
+    list(GET tuple 2 ext)
+    set(stamp "${ext}-stamp")
+    if(NOT DEFINED "_ExternalData_FILE_${file}")
+      set("_ExternalData_FILE_${file}" 1)
+      add_custom_command(
+        # Users care about the data file, so hide the hash/timestamp file.
+        COMMENT "Generating ${file}"
+        # The hash/timestamp file is the output from the build perspective.
+        # List the real file as a second output in case it is a broken link.
+        # The files must be listed in this order so CMake can hide from the
+        # make tool that a symlink target may not be newer than the input.
+        OUTPUT "${file}${stamp}" "${file}"
+        # Run the data fetch/update script.
+        COMMAND ${CMAKE_COMMAND} -Drelative_top=${CMAKE_BINARY_DIR}
+                                 -Dfile=${file} -Dname=${name} -Dext=${ext}
+                                 -DExternalData_ACTION=fetch
+                                 -DExternalData_CONFIG=${config}
+                                 -P ${_ExternalData_SELF}
+        # Update whenever the object hash changes.
+        MAIN_DEPENDENCY "${name}${ext}"
+        )
+      list(APPEND files "${file}${stamp}")
+    endif()
+  endforeach()
+
+  # Custom target to drive all update commands.
+  add_custom_target(${target} ALL DEPENDS ${files})
+endfunction()
+
+function(ExternalData_expand_arguments target outArgsVar)
+  # Replace DATA{} references with real arguments.
+  set(data_regex "DATA{([^;{}\r\n]*)}")
+  set(other_regex "([^D]|D[^A]|DA[^T]|DAT[^A]|DATA[^{])+|.")
+  set(outArgs "")
+  # This list expansion un-escapes semicolons in list element values so we
+  # must re-escape them below anywhere a new list expansion will occur.
+  foreach(arg IN LISTS ARGN)
+    if("x${arg}" MATCHES "${data_regex}")
+      # Re-escape in-value semicolons before expansion in foreach below.
+      string(REPLACE ";" "\\;" tmp "${arg}")
+      # Split argument into DATA{}-pieces and other pieces.
+      string(REGEX MATCHALL "${data_regex}|${other_regex}" pieces "${tmp}")
+      # Compose output argument with DATA{}-pieces replaced.
+      set(outArg "")
+      foreach(piece IN LISTS pieces)
+        if("x${piece}" MATCHES "^x${data_regex}$")
+          # Replace this DATA{}-piece with a file path.
+          string(REGEX REPLACE "${data_regex}" "\\1" data "${piece}")
+          _ExternalData_arg("${target}" "${piece}" "${data}" file)
+          set(outArg "${outArg}${file}")
+        else()
+          # No replacement needed for this piece.
+          set(outArg "${outArg}${piece}")
+        endif()
+      endforeach()
+    else()
+      # No replacements needed in this argument.
+      set(outArg "${arg}")
+    endif()
+    # Re-escape in-value semicolons in resulting list.
+    string(REPLACE ";" "\\;" outArg "${outArg}")
+    list(APPEND outArgs "${outArg}")
+  endforeach()
+  set("${outArgsVar}" "${outArgs}" PARENT_SCOPE)
+endfunction()
+
+#-----------------------------------------------------------------------------
+# Private helper interface
+
+set(_ExternalData_REGEX_ALGO "MD5")
+set(_ExternalData_REGEX_EXT "md5")
+set(_ExternalData_SELF "${CMAKE_CURRENT_LIST_FILE}")
+get_filename_component(_ExternalData_SELF_DIR "${_ExternalData_SELF}" PATH)
+
+function(_ExternalData_compute_hash var_hash algo file)
+  if("${algo}" MATCHES "^${_ExternalData_REGEX_ALGO}$")
+    # TODO: Require CMake 2.8.7 to support other hashes with file(${algo} ...)
+    execute_process(COMMAND "${CMAKE_COMMAND}" -E md5sum "${file}"
+      OUTPUT_VARIABLE output)
+    string(SUBSTRING "${output}" 0 32 hash)
+    set("${var_hash}" "${hash}" PARENT_SCOPE)
+  else()
+    message(FATAL_ERROR "Hash algorithm ${algo} unimplemented.")
+  endif()
+endfunction()
+
+function(_ExternalData_random var)
+  string(RANDOM LENGTH 6 random)
+  set("${var}" "${random}" PARENT_SCOPE)
+endfunction()
+
+function(_ExternalData_exact_regex regex_var string)
+  string(REGEX REPLACE "([][+.*()^])" "\\\\\\1" regex "${string}")
+  set("${regex_var}" "${regex}" PARENT_SCOPE)
+endfunction()
+
+function(_ExternalData_atomic_write file content)
+  _ExternalData_random(random)
+  set(tmp "${file}.tmp${random}")
+  file(WRITE "${tmp}" "${content}")
+  file(RENAME "${tmp}" "${file}")
+endfunction()
+
+function(_ExternalData_link_content name var_ext)
+  if("${ExternalData_LINK_CONTENT}" MATCHES "^(${_ExternalData_REGEX_ALGO})$")
+    set(algo "${ExternalData_LINK_CONTENT}")
+  else()
+    message(FATAL_ERROR
+      "Unknown hash algorithm specified by ExternalData_LINK_CONTENT:\n"
+      "  ${ExternalData_LINK_CONTENT}")
+  endif()
+  _ExternalData_compute_hash(hash "${algo}" "${name}")
+  get_filename_component(dir "${name}" PATH)
+  set(staged "${dir}/.ExternalData_${algo}_${hash}")
+  string(TOLOWER ".${algo}" ext)
+  _ExternalData_atomic_write("${name}${ext}" "${hash}\n")
+  file(RENAME "${name}" "${staged}")
+  set("${var_ext}" "${ext}" PARENT_SCOPE)
+
+  file(RELATIVE_PATH relname "${ExternalData_SOURCE_ROOT}" "${name}${ext}")
+  message(STATUS "Linked ${relname} to ExternalData ${algo}/${hash}")
+endfunction()
+
+function(_ExternalData_arg target arg options var_file)
+  # Separate data path from the options.
+  string(REPLACE "," ";" options "${options}")
+  list(GET options 0 data)
+  list(REMOVE_AT options 0)
+
+  # Interpret trailing slashes as directories.
+  set(data_is_directory 0)
+  if("x${data}" MATCHES "^x(.*)([/\\])$")
+    set(data_is_directory 1)
+    set(data "${CMAKE_MATCH_1}")
+  endif()
+
+  # Convert to full path.
+  if(IS_ABSOLUTE "${data}")
+    set(absdata "${data}")
+  else()
+    set(absdata "${CMAKE_CURRENT_SOURCE_DIR}/${data}")
+  endif()
+  get_filename_component(absdata "${absdata}" ABSOLUTE)
+
+  # Convert to relative path under the source tree.
+  if(NOT ExternalData_SOURCE_ROOT)
+    set(ExternalData_SOURCE_ROOT "${CMAKE_SOURCE_DIR}")
+  endif()
+  set(top_src "${ExternalData_SOURCE_ROOT}")
+  file(RELATIVE_PATH reldata "${top_src}" "${absdata}")
+  if(IS_ABSOLUTE "${reldata}" OR "${reldata}" MATCHES "^\\.\\./")
+    message(FATAL_ERROR "Data file referenced by argument\n"
+      "  ${arg}\n"
+      "does not lie under the top-level source directory\n"
+      "  ${top_src}\n")
+  endif()
+  if(data_is_directory AND NOT IS_DIRECTORY "${top_src}/${reldata}")
+    message(FATAL_ERROR "Data directory referenced by argument\n"
+      "  ${arg}\n"
+      "corresponds to source tree path\n"
+      "  ${reldata}\n"
+      "that does not exist as a directory!")
+  endif()
+  if(NOT ExternalData_BINARY_ROOT)
+    set(ExternalData_BINARY_ROOT "${CMAKE_BINARY_DIR}")
+  endif()
+  set(top_bin "${ExternalData_BINARY_ROOT}")
+
+  # Handle in-source builds gracefully.
+  if("${top_src}" STREQUAL "${top_bin}")
+    if(ExternalData_LINK_CONTENT)
+      message(WARNING "ExternalData_LINK_CONTENT cannot be used in-source")
+      set(ExternalData_LINK_CONTENT 0)
+    endif()
+    set(top_same 1)
+  endif()
+
+  set(external "") # Entries external to the source tree.
+  set(internal "") # Entries internal to the source tree.
+  set(have_original ${data_is_directory})
+
+  # Process options.
+  set(series_option "")
+  set(associated_files "")
+  set(associated_regex "")
+  foreach(opt ${options})
+    if("x${opt}" MATCHES "^xREGEX:[^:/]+$")
+      # Regular expression to match associated files.
+      string(REGEX REPLACE "^REGEX:" "" regex "${opt}")
+      list(APPEND associated_regex "${regex}")
+    elseif("x${opt}" MATCHES "^x:$")
+      # Activate series matching.
+      set(series_option "${opt}")
+    elseif("x${opt}" MATCHES "^[^][:/*?]+$")
+      # Specific associated file.
+      list(APPEND associated_files "${opt}")
+    else()
+      message(FATAL_ERROR "Unknown option \"${opt}\" in argument\n"
+        "  ${arg}\n")
+    endif()
+  endforeach()
+
+  if(series_option)
+    if(data_is_directory)
+      message(FATAL_ERROR "Series option \"${series_option}\" not allowed with directories.")
+    endif()
+    if(associated_files OR associated_regex)
+      message(FATAL_ERROR "Series option \"${series_option}\" not allowed with associated files.")
+    endif()
+    # Load a whole file series.
+    _ExternalData_arg_series()
+  elseif(data_is_directory)
+    if(associated_files OR associated_regex)
+      # Load listed/matching associated files in the directory.
+      _ExternalData_arg_associated()
+    else()
+      message(FATAL_ERROR "Data directory referenced by argument\n"
+        "  ${arg}\n"
+        "must list associated files.")
+    endif()
+  else()
+    # Load the named data file.
+    _ExternalData_arg_single()
+    if(associated_files OR associated_regex)
+      # Load listed/matching associated files.
+      _ExternalData_arg_associated()
+    endif()
+  endif()
+
+  if(NOT have_original)
+    message(STATUS "Data file referenced by argument\n"
+      "  ${arg}\n"
+      "corresponds to source tree path\n"
+      "  ${reldata}\n"
+      "that does not exist as a file (with or without an extension)!")
+  endif()
+
+  if(external)
+    # Make the series available in the build tree.
+    set_property(GLOBAL APPEND PROPERTY
+      _ExternalData_${target}_FETCH "${external}")
+    set_property(GLOBAL APPEND PROPERTY
+      _ExternalData_${target}_LOCAL "${internal}")
+    set("${var_file}" "${top_bin}/${reldata}" PARENT_SCOPE)
+  else()
+    # The whole series is in the source tree.
+    set("${var_file}" "${top_src}/${reldata}" PARENT_SCOPE)
+  endif()
+endfunction()
+
+macro(_ExternalData_arg_associated)
+  # Associated files lie in the same directory.
+  if(data_is_directory)
+    set(reldir "${reldata}")
+  else()
+    get_filename_component(reldir "${reldata}" PATH)
+  endif()
+  if(reldir)
+    set(reldir "${reldir}/")
+  endif()
+  _ExternalData_exact_regex(reldir_regex "${reldir}")
+
+  # Find files named explicitly.
+  foreach(file ${associated_files})
+    _ExternalData_exact_regex(file_regex "${file}")
+    _ExternalData_arg_find_files("${reldir}${file}" "${reldir_regex}${file_regex}")
+  endforeach()
+
+  # Find files matching the given regular expressions.
+  set(all "")
+  set(sep "")
+  foreach(regex ${associated_regex})
+    set(all "${all}${sep}${reldir_regex}${regex}")
+    set(sep "|")
+  endforeach()
+  _ExternalData_arg_find_files("${reldir}" "${all}")
+endmacro()
+
+macro(_ExternalData_arg_single)
+  # Match only the named data by itself.
+  _ExternalData_exact_regex(data_regex "${reldata}")
+  _ExternalData_arg_find_files("${reldata}" "${data_regex}")
+endmacro()
+
+macro(_ExternalData_arg_series)
+  # Configure series parsing and matching.
+  set(series_parse_prefix "")
+  set(series_parse_number "\\1")
+  set(series_parse_suffix "\\2")
+  if(ExternalData_SERIES_PARSE)
+    if(ExternalData_SERIES_PARSE_NUMBER AND ExternalData_SERIES_PARSE_SUFFIX)
+      if(ExternalData_SERIES_PARSE_PREFIX)
+        set(series_parse_prefix "\\${ExternalData_SERIES_PARSE_PREFIX}")
+      endif()
+      set(series_parse_number "\\${ExternalData_SERIES_PARSE_NUMBER}")
+      set(series_parse_suffix "\\${ExternalData_SERIES_PARSE_SUFFIX}")
+    elseif(NOT "x${ExternalData_SERIES_PARSE}" MATCHES "^x\\([^()]*\\)\\([^()]*\\)\\$$")
+      message(FATAL_ERROR
+        "ExternalData_SERIES_PARSE is set to\n"
+        "  ${ExternalData_SERIES_PARSE}\n"
+        "which is not of the form\n"
+        "  (<number>)(<suffix>)$\n"
+        "Fix the regular expression or set variables\n"
+        "  ExternalData_SERIES_PARSE_PREFIX = <prefix> regex group number, if any\n"
+        "  ExternalData_SERIES_PARSE_NUMBER = <number> regex group number\n"
+        "  ExternalData_SERIES_PARSE_SUFFIX = <suffix> regex group number\n"
+        )
+    endif()
+    set(series_parse "${ExternalData_SERIES_PARSE}")
+  else()
+    set(series_parse "([0-9]*)(\\.[^./]*)$")
+  endif()
+  if(ExternalData_SERIES_MATCH)
+    set(series_match "${ExternalData_SERIES_MATCH}")
+  else()
+    set(series_match "[_.-]?[0-9]*")
+  endif()
+
+  # Parse the base, number, and extension components of the series.
+  string(REGEX REPLACE "${series_parse}" "${series_parse_prefix};${series_parse_number};${series_parse_suffix}" tuple "${reldata}")
+  list(LENGTH tuple len)
+  if(NOT "${len}" EQUAL 3)
+    message(FATAL_ERROR "Data file referenced by argument\n"
+      "  ${arg}\n"
+      "corresponds to path\n"
+      "  ${reldata}\n"
+      "that does not match regular expression\n"
+      "  ${series_parse}")
+  endif()
+  list(GET tuple 0 relbase)
+  list(GET tuple 2 ext)
+
+  # Glob files that might match the series.
+  # Then match base, number, and extension.
+  _ExternalData_exact_regex(series_base "${relbase}")
+  _ExternalData_exact_regex(series_ext "${ext}")
+  _ExternalData_arg_find_files("${relbase}*${ext}"
+    "${series_base}${series_match}${series_ext}")
+endmacro()
+
+function(_ExternalData_arg_find_files pattern regex)
+  file(GLOB globbed RELATIVE "${top_src}" "${top_src}/${pattern}*")
+  foreach(entry IN LISTS globbed)
+    if("x${entry}" MATCHES "^x(.*)(\\.(${_ExternalData_REGEX_EXT}))$")
+      set(relname "${CMAKE_MATCH_1}")
+      set(alg "${CMAKE_MATCH_2}")
+    else()
+      set(relname "${entry}")
+      set(alg "")
+    endif()
+    if("x${relname}" MATCHES "^x${regex}$" # matches
+        AND NOT IS_DIRECTORY "${top_src}/${entry}" # not a directory
+        AND NOT "x${relname}" MATCHES "(^x|/)\\.ExternalData_" # not staged obj
+        )
+      set(name "${top_src}/${relname}")
+      set(file "${top_bin}/${relname}")
+      if(alg)
+        list(APPEND external "${file}|${name}|${alg}")
+      elseif(ExternalData_LINK_CONTENT)
+        _ExternalData_link_content("${name}" alg)
+        list(APPEND external "${file}|${name}|${alg}")
+      elseif(NOT top_same)
+        list(APPEND internal "${file}|${name}")
+      endif()
+      if("${relname}" STREQUAL "${reldata}")
+        set(have_original 1)
+      endif()
+    endif()
+  endforeach()
+  set(external "${external}" PARENT_SCOPE)
+  set(internal "${internal}" PARENT_SCOPE)
+  set(have_original "${have_original}" PARENT_SCOPE)
+endfunction()
+
+#-----------------------------------------------------------------------------
+# Private script mode interface
+
+if(CMAKE_GENERATOR OR NOT ExternalData_ACTION)
+  return()
+endif()
+
+if(ExternalData_CONFIG)
+  include(${ExternalData_CONFIG})
+endif()
+if(NOT ExternalData_URL_TEMPLATES)
+  message(FATAL_ERROR "No ExternalData_URL_TEMPLATES set!")
+endif()
+
+function(_ExternalData_link_or_copy src dst)
+  # Create a temporary file first.
+  get_filename_component(dst_dir "${dst}" PATH)
+  file(MAKE_DIRECTORY "${dst_dir}")
+  _ExternalData_random(random)
+  set(tmp "${dst}.tmp${random}")
+  if(UNIX)
+    # Create a symbolic link.
+    set(tgt "${src}")
+    if(relative_top)
+      # Use relative path if files are close enough.
+      file(RELATIVE_PATH relsrc "${relative_top}" "${src}")
+      file(RELATIVE_PATH relfile "${relative_top}" "${dst}")
+      if(NOT IS_ABSOLUTE "${relsrc}" AND NOT "${relsrc}" MATCHES "^\\.\\./" AND
+          NOT IS_ABSOLUTE "${reldst}" AND NOT "${reldst}" MATCHES "^\\.\\./")
+        file(RELATIVE_PATH tgt "${dst_dir}" "${src}")
+      endif()
+    endif()
+    execute_process(COMMAND "${CMAKE_COMMAND}" -E create_symlink "${tgt}" "${tmp}" RESULT_VARIABLE result)
+  else()
+    # Create a copy.
+    execute_process(COMMAND "${CMAKE_COMMAND}" -E copy "${src}" "${tmp}" RESULT_VARIABLE result)
+  endif()
+  if(result)
+    file(REMOVE "${tmp}")
+    message(FATAL_ERROR "Failed to create\n  ${tmp}\nfrom\n  ${obj}")
+  endif()
+
+  # Atomically create/replace the real destination.
+  file(RENAME "${tmp}" "${dst}")
+endfunction()
+
+function(_ExternalData_download_file url file err_var msg_var)
+  set(retry 3)
+  while(retry)
+    math(EXPR retry "${retry} - 1")
+    if(ExternalData_TIMEOUT_INACTIVITY)
+      set(inactivity_timeout INACTIVITY_TIMEOUT ${ExternalData_TIMEOUT_INACTIVITY})
+    elseif(NOT "${ExternalData_TIMEOUT_INACTIVITY}" EQUAL 0)
+      set(inactivity_timeout INACTIVITY_TIMEOUT 60)
+    else()
+      set(inactivity_timeout "")
+    endif()
+    if(ExternalData_TIMEOUT_ABSOLUTE)
+      set(absolute_timeout TIMEOUT ${ExternalData_TIMEOUT_ABSOLUTE})
+    elseif(NOT "${ExternalData_TIMEOUT_ABSOLUTE}" EQUAL 0)
+      set(absolute_timeout TIMEOUT 300)
+    else()
+      set(absolute_timeout "")
+    endif()
+    file(DOWNLOAD "${url}" "${file}" STATUS status LOG log ${inactivity_timeout} ${absolute_timeout} SHOW_PROGRESS)
+    list(GET status 0 err)
+    list(GET status 1 msg)
+    if(err)
+      if("${msg}" MATCHES "HTTP response code said error" AND
+          "${log}" MATCHES "error: 503")
+        set(msg "temporarily unavailable")
+      endif()
+    elseif("${log}" MATCHES "\nHTTP[^\n]* 503")
+      set(err TRUE)
+      set(msg "temporarily unavailable")
+    endif()
+    if(NOT err OR NOT "${msg}" MATCHES "partial|timeout|temporarily")
+      break()
+    elseif(retry)
+      message(STATUS "[download terminated: ${msg}, retries left: ${retry}]")
+    endif()
+  endwhile()
+  set("${err_var}" "${err}" PARENT_SCOPE)
+  set("${msg_var}" "${msg}" PARENT_SCOPE)
+endfunction()
+
+function(_ExternalData_download_object name hash algo var_obj)
+  # Search all object stores for an existing object.
+  foreach(dir ${ExternalData_OBJECT_STORES})
+    set(obj "${dir}/${algo}/${hash}")
+    if(EXISTS "${obj}")
+      message(STATUS "Found object: \"${obj}\"")
+      set("${var_obj}" "${obj}" PARENT_SCOPE)
+      return()
+    endif()
+  endforeach()
+
+  # Download object to the first store.
+  list(GET ExternalData_OBJECT_STORES 0 store)
+  set(obj "${store}/${algo}/${hash}")
+
+  _ExternalData_random(random)
+  set(tmp "${obj}.tmp${random}")
+  set(found 0)
+  set(tried "")
+  foreach(url_template IN LISTS ExternalData_URL_TEMPLATES)
+    string(REPLACE "%(hash)" "${hash}" url_tmp "${url_template}")
+    string(REPLACE "%(algo)" "${algo}" url "${url_tmp}")
+    message(STATUS "Fetching \"${url}\"")
+    _ExternalData_download_file("${url}" "${tmp}" err errMsg)
+    set(tried "${tried}\n  ${url}")
+    if(err)
+      set(tried "${tried} (${errMsg})")
+    else()
+      # Verify downloaded object.
+      _ExternalData_compute_hash(dl_hash "${algo}" "${tmp}")
+      if("${dl_hash}" STREQUAL "${hash}")
+        set(found 1)
+        break()
+      else()
+        set(tried "${tried} (wrong hash ${algo}=${dl_hash})")
+        if("$ENV{ExternalData_DEBUG_DOWNLOAD}" MATCHES ".")
+          file(RENAME "${tmp}" "${store}/${algo}/${dl_hash}")
+        endif()
+      endif()
+    endif()
+    file(REMOVE "${tmp}")
+  endforeach()
+
+  get_filename_component(dir "${name}" PATH)
+  set(staged "${dir}/.ExternalData_${algo}_${hash}")
+
+  if(found)
+    file(RENAME "${tmp}" "${obj}")
+    message(STATUS "Downloaded object: \"${obj}\"")
+  elseif(EXISTS "${staged}")
+    set(obj "${staged}")
+    message(STATUS "Staged object: \"${obj}\"")
+  else()
+    message(FATAL_ERROR "Object ${algo}=${hash} not found at:${tried}")
+  endif()
+
+  set("${var_obj}" "${obj}" PARENT_SCOPE)
+endfunction()
+
+if("${ExternalData_ACTION}" STREQUAL "fetch")
+  foreach(v ExternalData_OBJECT_STORES file name ext)
+    if(NOT DEFINED "${v}")
+      message(FATAL_ERROR "No \"-D${v}=\" value provided!")
+    endif()
+  endforeach()
+
+  file(READ "${name}${ext}" hash)
+  string(STRIP "${hash}" hash)
+
+  if("${ext}" MATCHES "^\\.(${_ExternalData_REGEX_EXT})$")
+    string(TOUPPER "${CMAKE_MATCH_1}" algo)
+  else()
+    message(FATAL_ERROR "Unknown hash algorithm extension \"${ext}\"")
+  endif()
+
+  _ExternalData_download_object("${name}" "${hash}" "${algo}" obj)
+
+  # Check if file already corresponds to the object.
+  set(stamp "${ext}-stamp")
+  set(file_up_to_date 0)
+  if(EXISTS "${file}" AND EXISTS "${file}${stamp}")
+    file(READ "${file}${stamp}" f_hash)
+    string(STRIP "${f_hash}" f_hash)
+    if("${f_hash}" STREQUAL "${hash}")
+      #message(STATUS "File already corresponds to object")
+      set(file_up_to_date 1)
+    endif()
+  endif()
+
+  if(file_up_to_date)
+    # Touch the file to convince the build system it is up to date.
+    execute_process(COMMAND "${CMAKE_COMMAND}" -E touch "${file}")
+  else()
+    _ExternalData_link_or_copy("${obj}" "${file}")
+  endif()
+
+  # Atomically update the hash/timestamp file to record the object referenced.
+  _ExternalData_atomic_write("${file}${stamp}" "${hash}\n")
+elseif("${ExternalData_ACTION}" STREQUAL "local")
+  foreach(v file name)
+    if(NOT DEFINED "${v}")
+      message(FATAL_ERROR "No \"-D${v}=\" value provided!")
+    endif()
+  endforeach()
+  _ExternalData_link_or_copy("${name}" "${file}")
+else()
+  message(FATAL_ERROR "Unknown ExternalData_ACTION=[${ExternalData_ACTION}]")
+endif()
diff --git a/CMake/ExternalData_config.cmake.in b/CMake/ExternalData_config.cmake.in
new file mode 100644
index 0000000..0858f53
--- /dev/null
+++ b/CMake/ExternalData_config.cmake.in
@@ -0,0 +1,4 @@
+set(ExternalData_OBJECT_STORES "@ExternalData_OBJECT_STORES@")
+set(ExternalData_URL_TEMPLATES "@ExternalData_URL_TEMPLATES@")
+set(ExternalData_TIMEOUT_INACTIVITY "@ExternalData_TIMEOUT_INACTIVITY@")
+set(ExternalData_TIMEOUT_ABSOLUTE "@ExternalData_TIMEOUT_ABSOLUTE@")
diff --git a/CMake/FindCg.cmake b/CMake/FindCg.cmake
index d427c78..c2b7fdf 100644
--- a/CMake/FindCg.cmake
+++ b/CMake/FindCg.cmake
@@ -9,7 +9,7 @@
 # CG_COMPILER = full path to cgc (cgc.exe on win32)
 #
 
-# On OSX default to using the framework version of Cg.
+# On OS X default to using the framework version of Cg.
 
 IF (APPLE)
   INCLUDE(${CMAKE_ROOT}/Modules/CMakeFindFrameworks.cmake)
diff --git a/CMake/FindFFMPEG.cmake b/CMake/FindFFMPEG.cmake
index a59df31..24e20f1 100644
--- a/CMake/FindFFMPEG.cmake
+++ b/CMake/FindFFMPEG.cmake
@@ -11,161 +11,155 @@
 # This is useful to do it this way so that we can always add more libraries
 # if needed to FFMPEG_LIBRARIES if ffmpeg ever changes...
 
-# if ffmpeg headers are all in one directory
-FIND_PATH(FFMPEG_INCLUDE_DIR avformat.h
-       PATHS
-       $ENV{FFMPEG_DIR}/include
-       $ENV{OSGDIR}/include
-       $ENV{OSG_ROOT}/include
-       ~/Library/Frameworks
-       /Library/Frameworks
-       /usr/local/include
-       /usr/include
-       /sw/include # Fink
-       /opt/local/include # DarwinPorts
-       /opt/csw/include # Blastwave
-       /opt/include
-       /usr/freeware/include
-       PATH_SUFFIXES ffmpeg
-       DOC "Location of FFMPEG Headers"
+# If the FFMPEG headers are all in one directory.
+find_path(FFMPEG_INCLUDE_DIR avformat.h
+  PATHS
+    $ENV{FFMPEG_DIR}/include
+    $ENV{OSGDIR}/include
+    $ENV{OSG_ROOT}/include
+    ~/Library/Frameworks
+    /Library/Frameworks
+    /usr/local/include
+    /usr/include
+    /sw/include # Fink
+    /opt/local/include # DarwinPorts
+    /opt/csw/include # Blastwave
+    /opt/include
+    /usr/freeware/include
+  PATH_SUFFIXES ffmpeg
+  DOC "Location of FFMPEG Headers"
 )
 
-# if ffmpeg headers are seperated to each of libavformat, libavcodec etc..
-IF( NOT FFMPEG_INCLUDE_DIR )
-  FIND_PATH(FFMPEG_INCLUDE_DIR libavformat/avformat.h
-       PATHS
-       $ENV{FFMPEG_DIR}/include
-       $ENV{OSGDIR}/include
-       $ENV{OSG_ROOT}/include
-       ~/Library/Frameworks
-       /Library/Frameworks
-       /usr/local/include
-       /usr/include
-       /sw/include # Fink
-       /opt/local/include # DarwinPorts
-       /opt/csw/include # Blastwave
-       /opt/include
-       /usr/freeware/include
-       PATH_SUFFIXES ffmpeg
-       DOC "Location of FFMPEG Headers"
-)
-
-ENDIF( NOT FFMPEG_INCLUDE_DIR )
+# If the FFMPEG headers are separated to each of libavformat, libavcodec etc..
+if(NOT FFMPEG_INCLUDE_DIR)
+  find_path(FFMPEG_INCLUDE_DIR libavformat/avformat.h
+    PATHS
+      $ENV{FFMPEG_DIR}/include
+      $ENV{OSGDIR}/include
+      $ENV{OSG_ROOT}/include
+      ~/Library/Frameworks
+      /Library/Frameworks
+      /usr/local/include
+      /usr/include
+      /sw/include # Fink
+      /opt/local/include # DarwinPorts
+      /opt/csw/include # Blastwave
+      /opt/include
+      /usr/freeware/include
+    PATH_SUFFIXES ffmpeg
+    DOC "Location of FFMPEG Headers"
+  )
+endif()
 
-# we want the -I include line to use the parent directory of ffmpeg as
-# ffmpeg uses relative includes such as <ffmpeg/avformat.h> or <libavcodec/avformat.h>
+# We want the -I include line to use the parent directory of FFMPEG, it
+# uses relative includes such as <ffmpeg/avformat.h> or <libavcodec/avformat.h>
 get_filename_component(FFMPEG_INCLUDE_DIR ${FFMPEG_INCLUDE_DIR} ABSOLUTE)
 
-FIND_LIBRARY(FFMPEG_avformat_LIBRARY avformat
+find_library(FFMPEG_avformat_LIBRARY avformat
   /usr/local/lib
   /usr/lib
 )
 
-FIND_LIBRARY(FFMPEG_avcodec_LIBRARY avcodec
+find_library(FFMPEG_avcodec_LIBRARY avcodec
   /usr/local/lib
   /usr/lib
 )
 
-FIND_LIBRARY(FFMPEG_avutil_LIBRARY avutil
+find_library(FFMPEG_avutil_LIBRARY avutil
   /usr/local/lib
   /usr/lib
 )
 
-FIND_LIBRARY(FFMPEG_vorbis_LIBRARY vorbis
+find_library(FFMPEG_vorbis_LIBRARY vorbis
   /usr/local/lib
   /usr/lib
 )
 
-FIND_LIBRARY(FFMPEG_dc1394_LIBRARY dc1394_control
+find_library(FFMPEG_dc1394_LIBRARY dc1394_control
   /usr/local/lib
   /usr/lib
 )
 
-FIND_LIBRARY(FFMPEG_vorbisenc_LIBRARY vorbisenc
+find_library(FFMPEG_vorbisenc_LIBRARY vorbisenc
   /usr/local/lib
   /usr/lib
 )
 
-FIND_LIBRARY(FFMPEG_theora_LIBRARY theora
+find_library(FFMPEG_theora_LIBRARY theora
   /usr/local/lib
   /usr/lib
 )
 
-FIND_LIBRARY(FFMPEG_dts_LIBRARY dts
+find_library(FFMPEG_dts_LIBRARY dts
   /usr/local/lib
   /usr/lib
 )
 
-FIND_LIBRARY(FFMPEG_gsm_LIBRARY gsm
+find_library(FFMPEG_gsm_LIBRARY gsm
   /usr/local/lib
   /usr/lib
 )
 
-FIND_LIBRARY(FFMPEG_swscale_LIBRARY swscale
+find_library(FFMPEG_swscale_LIBRARY swscale
   /usr/local/lib
   /usr/lib
 )
 
-FIND_LIBRARY(FFMPEG_z_LIBRARY z
+find_library(FFMPEG_z_LIBRARY z
   /usr/local/lib
   /usr/lib
 )
 
-SET(FFMPEG_LIBRARIES)
-IF(FFMPEG_INCLUDE_DIR)
-  IF(FFMPEG_avformat_LIBRARY)
-    IF(FFMPEG_avcodec_LIBRARY)
-      IF(FFMPEG_avutil_LIBRARY)
-        SET( FFMPEG_FOUND "YES" )
-        SET( FFMPEG_BASIC_LIBRARIES
-          ${FFMPEG_avcodec_LIBRARY}
-          ${FFMPEG_avformat_LIBRARY}
-          ${FFMPEG_avutil_LIBRARY}
-          )
-
-        # swscale is always a part of newer ffmpeg distros
-        IF(FFMPEG_swscale_LIBRARY)
-          LIST(APPEND FFMPEG_BASIC_LIBRARIES ${FFMPEG_swscale_LIBRARY})
-        ENDIF(FFMPEG_swscale_LIBRARY)
-
-        SET(FFMPEG_LIBRARIES ${FFMPEG_BASIC_LIBRARIES})
-
-        IF(FFMPEG_vorbis_LIBRARY)
-          LIST(APPEND FFMPEG_LIBRARIES ${FFMPEG_vorbis_LIBRARY})
-        ENDIF(FFMPEG_vorbis_LIBRARY)
-
-        IF(FFMPEG_dc1394_LIBRARY)
-          LIST(APPEND FFMPEG_LIBRARIES ${FFMPEG_dc1394_LIBRARY})
-        ENDIF(FFMPEG_dc1394_LIBRARY)
-
-        IF(FFMPEG_vorbisenc_LIBRARY)
-          LIST(APPEND FFMPEG_LIBRARIES ${FFMPEG_vorbisenc_LIBRARY})
-        ENDIF(FFMPEG_vorbisenc_LIBRARY)
-
-        IF(FFMPEG_theora_LIBRARY)
-          LIST(APPEND FFMPEG_LIBRARIES ${FFMPEG_theora_LIBRARY})
-        ENDIF(FFMPEG_theora_LIBRARY)
-
-        IF(FFMPEG_dts_LIBRARY)
-          LIST(APPEND FFMPEG_LIBRARIES ${FFMPEG_dts_LIBRARY})
-        ENDIF(FFMPEG_dts_LIBRARY)
-
-        IF(FFMPEG_gsm_LIBRARY)
-          LIST(APPEND FFMPEG_LIBRARIES ${FFMPEG_gsm_LIBRARY})
-        ENDIF(FFMPEG_gsm_LIBRARY)
-
-        IF(FFMPEG_z_LIBRARY)
-          LIST(APPEND FFMPEG_LIBRARIES ${FFMPEG_z_LIBRARY})
-        ENDIF(FFMPEG_z_LIBRARY)
-
-        SET(FFMPEG_LIBRARIES ${FFMPEG_LIBRARIES} CACHE INTERNAL "All presently found FFMPEG libraries.")
-
-      ENDIF(FFMPEG_avutil_LIBRARY)
-    ENDIF(FFMPEG_avcodec_LIBRARY)
-  ENDIF(FFMPEG_avformat_LIBRARY)
-ENDIF(FFMPEG_INCLUDE_DIR)
-
-MARK_AS_ADVANCED(
+unset(FFMPEG_LIBRARIES)
+if(FFMPEG_INCLUDE_DIR AND FFMPEG_avformat_LIBRARY AND FFMPEG_avcodec_LIBRARY
+  AND FFMPEG_avutil_LIBRARY)
+  set(FFMPEG_FOUND TRUE)
+  set(FFMPEG_BASIC_LIBRARIES
+    ${FFMPEG_avcodec_LIBRARY}
+    ${FFMPEG_avformat_LIBRARY}
+    ${FFMPEG_avutil_LIBRARY}
+  )
+
+  # swscale is always a part of newer ffmpeg distros
+  if(FFMPEG_swscale_LIBRARY)
+    list(APPEND FFMPEG_BASIC_LIBRARIES ${FFMPEG_swscale_LIBRARY})
+  endif()
+
+  set(FFMPEG_LIBRARIES ${FFMPEG_BASIC_LIBRARIES})
+
+  if(FFMPEG_vorbis_LIBRARY)
+    list(APPEND FFMPEG_LIBRARIES ${FFMPEG_vorbis_LIBRARY})
+  endif()
+
+  if(FFMPEG_dc1394_LIBRARY)
+    list(APPEND FFMPEG_LIBRARIES ${FFMPEG_dc1394_LIBRARY})
+  endif()
+
+  if(FFMPEG_vorbisenc_LIBRARY)
+    list(APPEND FFMPEG_LIBRARIES ${FFMPEG_vorbisenc_LIBRARY})
+  endif()
+
+  if(FFMPEG_theora_LIBRARY)
+    list(APPEND FFMPEG_LIBRARIES ${FFMPEG_theora_LIBRARY})
+  endif()
+
+  if(FFMPEG_dts_LIBRARY)
+    list(APPEND FFMPEG_LIBRARIES ${FFMPEG_dts_LIBRARY})
+  endif()
+
+  if(FFMPEG_gsm_LIBRARY)
+    list(APPEND FFMPEG_LIBRARIES ${FFMPEG_gsm_LIBRARY})
+  endif()
+
+  if(FFMPEG_z_LIBRARY)
+    list(APPEND FFMPEG_LIBRARIES ${FFMPEG_z_LIBRARY})
+  endif()
+
+  set(FFMPEG_LIBRARIES ${FFMPEG_LIBRARIES} CACHE INTERNAL "All presently found FFMPEG libraries.")
+
+endif()
+
+mark_as_advanced(
   FFMPEG_INCLUDE_DIR
   FFMPEG_avformat_LIBRARY
   FFMPEG_avcodec_LIBRARY
diff --git a/CMake/FindHDF5.cmake b/CMake/FindHDF5.cmake
new file mode 100644
index 0000000..2ca76ed
--- /dev/null
+++ b/CMake/FindHDF5.cmake
@@ -0,0 +1,16 @@
+# This extends CMake's FindHDF5.cmake to add support to include MPI include
+# paths and libraries in the HDF5 ones if HDF5_IS_PARALLEL is ON
+# (BUG #0014363).
+
+# include the default FindHDF5.cmake.
+include(${CMAKE_ROOT}/Modules/FindHDF5.cmake)
+
+if(HDF5_FOUND AND (HDF5_IS_PARALLEL OR HDF5_ENABLE_PARALLEL))
+  include(vtkMPI)
+  if(MPI_C_INCLUDE_PATH)
+    list(APPEND HDF5_INCLUDE_DIRS ${MPI_C_INCLUDE_PATH})
+  endif()
+  if(MPI_C_LIBRARIES)
+    list(APPEND HDF5_LIBRARIES ${MPI_C_LIBRARIES})
+  endif()
+endif()
diff --git a/CMake/FindJsonCpp.cmake b/CMake/FindJsonCpp.cmake
new file mode 100644
index 0000000..e4e7ab4
--- /dev/null
+++ b/CMake/FindJsonCpp.cmake
@@ -0,0 +1,26 @@
+# Find the JsonCpp include files and library.
+#
+# JsonCpp is a C++ library that can read/write JSON (JavaScript Object Notation)
+# documents. See http://jsoncpp.sourceforge.net/ for more details.
+#
+# This module defines:
+# JsonCpp_INCLUDE_DIRS - where to find json/json.h
+# JsonCpp_LIBRARIES - the libraries to link against to use JsonCpp
+# JsonCpp_FOUND - if false the library was not found.
+
+find_path(JsonCpp_INCLUDE_DIR "json/json.h"
+  PATH_SUFFIXES "jsoncpp"
+  DOC "Specify the JsonCpp include directory here")
+
+find_library(JsonCpp_LIBRARY
+  NAMES jsoncpp
+  PATHS
+  DOC "Specify the JsonCpp library here")
+set(JsonCpp_INCLUDE_DIRS ${JsonCpp_INCLUDE_DIR})
+set(JsonCpp_LIBRARIES "${JsonCpp_LIBRARY}")
+
+include(FindPackageHandleStandardArgs)
+find_package_handle_standard_args(JsonCpp DEFAULT_MSG
+  JsonCpp_LIBRARIES JsonCpp_INCLUDE_DIRS)
+
+mark_as_advanced(JsonCpp_INCLUDE_DIR JsonCpp_LIBRARY)
diff --git a/CMake/FindNetCDF.cmake b/CMake/FindNetCDF.cmake
new file mode 100644
index 0000000..561f81e
--- /dev/null
+++ b/CMake/FindNetCDF.cmake
@@ -0,0 +1,102 @@
+# - Find NetCDF
+# Find the native NetCDF includes and library
+#
+#  NETCDF_INCLUDE_DIR  - user modifiable choice of where netcdf headers are
+#  NETCDF_LIBRARY      - user modifiable choice of where netcdf libraries are
+#
+# Your package can require certain interfaces to be FOUND by setting these
+#
+#  NETCDF_CXX         - require the C++ interface and link the C++ library
+#  NETCDF_F77         - require the F77 interface and link the fortran library
+#  NETCDF_F90         - require the F90 interface and link the fortran library
+#
+# Or equivalently by calling FindNetCDF with a COMPONENTS argument containing one or
+# more of "CXX;F77;F90".
+#
+# When interfaces are requested the user has access to interface specific hints:
+#
+#  NETCDF_${LANG}_INCLUDE_DIR - where to search for interface header files
+#  NETCDF_${LANG}_LIBRARY     - where to search for interface libraries
+#
+# This module returns these variables for the rest of the project to use.
+#
+#  NETCDF_FOUND          - True if NetCDF found including required interfaces (see below)
+#  NETCDF_LIBRARIES      - All netcdf related libraries.
+#  NETCDF_INCLUDE_DIRS   - All directories to include.
+#  NETCDF_HAS_INTERFACES - Whether requested interfaces were found or not.
+#  NETCDF_${LANG}_INCLUDE_DIRS/NETCDF_${LANG}_LIBRARIES - C/C++/F70/F90 only interface
+#
+# Normal usage would be:
+#  set (NETCDF_F90 "YES")
+#  find_package (NetCDF REQUIRED)
+#  target_link_libraries (uses_everthing ${NETCDF_LIBRARIES})
+#  target_link_libraries (only_uses_f90 ${NETCDF_F90_LIBRARIES})
+
+#search starting from user editable cache var
+if (NETCDF_INCLUDE_DIR AND NETCDF_LIBRARY)
+  # Already in cache, be silent
+  set (NETCDF_FIND_QUIETLY TRUE)
+endif ()
+
+find_path (NETCDF_INCLUDE_DIR netcdf.h
+  HINTS NETCDF_DIR ENV NETCDF_DIR)
+mark_as_advanced (NETCDF_INCLUDE_DIR)
+set (NETCDF_C_INCLUDE_DIRS ${NETCDF_INCLUDE_DIR})
+
+find_library (NETCDF_LIBRARY NAMES netcdf)
+mark_as_advanced (NETCDF_LIBRARY)
+set (NETCDF_C_LIBRARIES ${NETCDF_LIBRARY})
+
+#start finding requested language components
+set (NetCDF_libs "")
+get_filename_component (NetCDF_lib_dirs "${NETCDF_LIBRARY}" PATH)
+set (NETCDF_HAS_INTERFACES "YES") # will be set to NO if we're missing any interfaces
+
+macro (NetCDF_check_interface lang header libs)
+  if (NETCDF_${lang})
+    #search starting from user modifiable cache var
+    find_path (NETCDF_${lang}_INCLUDE_DIR NAMES ${header}
+      HINTS "${NETCDF_INCLUDE_DIR}" NO_DEFAULT_PATH)
+    find_library (NETCDF_${lang}_LIBRARY NAMES ${libs}
+      HINTS "${NetCDF_lib_dirs}" NO_DEFAULT_PATH)
+    mark_as_advanced (NETCDF_${lang}_INCLUDE_DIR NETCDF_${lang}_LIBRARY)
+
+    #export to internal varS that rest of project can use directly
+    set (NETCDF_${lang}_LIBRARIES ${NETCDF_${lang}_LIBRARY})
+    set (NETCDF_${lang}_INCLUDE_DIRS ${NETCDF_${lang}_INCLUDE_DIR})
+
+    if (NETCDF_${lang}_INCLUDE_DIR AND NETCDF_${lang}_LIBRARY)
+      list (APPEND NetCDF_libs ${NETCDF_${lang}_LIBRARY})
+    else ()
+      set (NETCDF_HAS_INTERFACES "NO")
+      message (STATUS "Failed to find NetCDF interface for ${lang}")
+    endif ()
+  endif ()
+endmacro (NetCDF_check_interface)
+
+list (FIND NetCDF_FIND_COMPONENTS "CXX" _nextcomp)
+if (_nextcomp GREATER -1)
+  set (NETCDF_CXX 1)
+endif ()
+list (FIND NetCDF_FIND_COMPONENTS "F77" _nextcomp)
+if (_nextcomp GREATER -1)
+  set (NETCDF_F77 1)
+endif ()
+list (FIND NetCDF_FIND_COMPONENTS "F90" _nextcomp)
+if (_nextcomp GREATER -1)
+  set (NETCDF_F90 1)
+endif ()
+NetCDF_check_interface (CXX netcdfcpp.h netcdf_c++)
+NetCDF_check_interface (F77 netcdf.inc  netcdff)
+NetCDF_check_interface (F90 netcdf.mod  netcdff)
+
+#export accumulated results to internal varS that rest of project can depend on
+list (APPEND NetCDF_libs "${NETCDF_C_LIBRARIES}")
+set (NETCDF_LIBRARIES ${NetCDF_libs})
+set (NETCDF_INCLUDE_DIRS ${NETCDF_INCLUDE_DIR})
+
+# handle the QUIETLY and REQUIRED arguments and set NETCDF_FOUND to TRUE if
+# all listed variables are TRUE
+include (FindPackageHandleStandardArgs)
+find_package_handle_standard_args (NetCDF
+  DEFAULT_MSG NETCDF_LIBRARIES NETCDF_INCLUDE_DIRS NETCDF_HAS_INTERFACES)
diff --git a/CMake/FindOGGTHEORA.cmake b/CMake/FindOGGTHEORA.cmake
index 3917ed6..0cf6525 100644
--- a/CMake/FindOGGTHEORA.cmake
+++ b/CMake/FindOGGTHEORA.cmake
@@ -24,9 +24,9 @@ SET(OGGTHEORA_INCLUDE_DIRS
 SET(OGGTHEORA_INCLUDE_DIR ${OGGTHEORA_INCLUDE_DIRS})
 
 SET(OGGTHEORA_LIBRARIES
-  ${OGGTHEORA_ogg_LIBRARY}
   ${OGGTHEORA_theoraenc_LIBRARY}
   ${OGGTHEORA_theoradec_LIBRARY}
+  ${OGGTHEORA_ogg_LIBRARY}
   )
 #HACK multiple libraries
 SET(OGGTHEORA_LIBRARY ${OGGTHEORA_LIBRARIES})
diff --git a/CMake/FindPythonModules.cmake b/CMake/FindPythonModules.cmake
new file mode 100644
index 0000000..b186afe
--- /dev/null
+++ b/CMake/FindPythonModules.cmake
@@ -0,0 +1,27 @@
+
+# Check whether a Python module is available by name, and if it is,
+# define a variable in the internal cache.
+macro(_find_python_module_internal module_name)
+  # Check for presence of the module.  Even though we don't use all the
+  # variable names set here, assigning them suppresses their output in CMake.
+  execute_process(COMMAND ${PYTHON_EXECUTABLE} -c "import ${module_name}"
+    RESULT_VARIABLE IMPORT_${module_name}_EXITCODE
+    OUTPUT_VARIABLE IMPORT_${module_name}_OUTPUT
+    ERROR_VARIABLE IMPORT_${module_name}_ERROR
+    )
+  if(${IMPORT_${module_name}_EXITCODE} EQUAL 0)
+    set(PYTHON_MODULE_${module_name}_FOUND TRUE
+      CACHE BOOL "Whether or not this Python module is present")
+  else()
+    set(PYTHON_MODULE_${module_name}_FOUND FALSE
+      CACHE BOOL "Whether or not this Python module is present")
+  endif()
+endmacro()
+
+# Macro to simplify checking if a Python module is available
+macro(find_python_module module_name result)
+  if(NOT DEFINED PYTHON_MODULE_${module_name}_FOUND)
+    _find_python_module_internal(${module_name})
+  endif()
+  set(${result} ${PYTHON_MODULE_${module_name}_FOUND})
+endmacro()
diff --git a/CMake/FindTBB.cmake b/CMake/FindTBB.cmake
new file mode 100644
index 0000000..ead38d8
--- /dev/null
+++ b/CMake/FindTBB.cmake
@@ -0,0 +1,283 @@
+# Locate Intel Threading Building Blocks include paths and libraries
+# FindTBB.cmake can be found at https://code.google.com/p/findtbb/
+# Written by Hannes Hofmann <hannes.hofmann _at_ informatik.uni-erlangen.de>
+# Improvements by Gino van den Bergen <gino _at_ dtecta.com>,
+#   Florian Uhlig <F.Uhlig _at_ gsi.de>,
+#   Jiri Marsik <jiri.marsik89 _at_ gmail.com>
+
+# The MIT License
+#
+# Copyright (c) 2011 Hannes Hofmann
+#
+# Permission is hereby granted, free of charge, to any person obtaining a copy
+# of this software and associated documentation files (the "Software"), to deal
+# in the Software without restriction, including without limitation the rights
+# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+# copies of the Software, and to permit persons to whom the Software is
+# furnished to do so, subject to the following conditions:
+#
+# The above copyright notice and this permission notice shall be included in
+# all copies or substantial portions of the Software.
+#
+# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+# THE SOFTWARE.
+
+# GvdB: This module uses the environment variable TBB_ARCH_PLATFORM which defines architecture and compiler.
+#   e.g. "ia32/vc8" or "em64t/cc4.1.0_libc2.4_kernel2.6.16.21"
+#   TBB_ARCH_PLATFORM is set by the build script tbbvars[.bat|.sh|.csh], which can be found
+#   in the TBB installation directory (TBB_INSTALL_DIR).
+#
+# GvdB: Mac OS X distribution places libraries directly in lib directory.
+#
+# For backwards compatibility, you may explicitely set the CMake variables TBB_ARCHITECTURE and TBB_COMPILER.
+# TBB_ARCHITECTURE [ ia32 | em64t | itanium ]
+#   which architecture to use
+# TBB_COMPILER e.g. vc9 or cc3.2.3_libc2.3.2_kernel2.4.21 or cc4.0.1_os10.4.9
+#   which compiler to use (detected automatically on Windows)
+
+# This module respects
+# TBB_INSTALL_DIR or $ENV{TBB21_INSTALL_DIR} or $ENV{TBB_INSTALL_DIR}
+
+# This module defines
+# TBB_INCLUDE_DIRS, where to find task_scheduler_init.h, etc.
+# TBB_LIBRARY_DIRS, where to find libtbb, libtbbmalloc
+# TBB_DEBUG_LIBRARY_DIRS, where to find libtbb_debug, libtbbmalloc_debug
+# TBB_INSTALL_DIR, the base TBB install directory
+# TBB_LIBRARIES, the libraries to link against to use TBB.
+# TBB_DEBUG_LIBRARIES, the libraries to link against to use TBB with debug symbols.
+# TBB_FOUND, If false, don't try to use TBB.
+# TBB_INTERFACE_VERSION, as defined in tbb/tbb_stddef.h
+
+
+if (WIN32)
+    # has em64t/vc8 em64t/vc9
+    # has ia32/vc7.1 ia32/vc8 ia32/vc9
+    set(_TBB_DEFAULT_INSTALL_DIR "C:/Program Files/Intel/TBB" "C:/Program Files (x86)/Intel/TBB")
+    set(_TBB_LIB_NAME "tbb")
+    set(_TBB_LIB_MALLOC_NAME "${_TBB_LIB_NAME}malloc")
+    set(_TBB_LIB_DEBUG_NAME "${_TBB_LIB_NAME}_debug")
+    set(_TBB_LIB_MALLOC_DEBUG_NAME "${_TBB_LIB_MALLOC_NAME}_debug")
+    if (MSVC71)
+        set (_TBB_COMPILER "vc7.1")
+    endif(MSVC71)
+    if (MSVC80)
+        set(_TBB_COMPILER "vc8")
+    endif(MSVC80)
+    if (MSVC90)
+        set(_TBB_COMPILER "vc9")
+    endif(MSVC90)
+    if(MSVC10)
+        set(_TBB_COMPILER "vc10")
+    endif(MSVC10)
+    # Todo: add other Windows compilers such as ICL.
+    set(_TBB_ARCHITECTURE ${TBB_ARCHITECTURE})
+endif (WIN32)
+
+if (UNIX)
+    if (APPLE)
+        # MAC
+        set(_TBB_DEFAULT_INSTALL_DIR "/Library/Frameworks/Intel_TBB.framework/Versions")
+        # libs: libtbb.dylib, libtbbmalloc.dylib, *_debug
+        set(_TBB_LIB_NAME "tbb")
+        set(_TBB_LIB_MALLOC_NAME "${_TBB_LIB_NAME}malloc")
+        set(_TBB_LIB_DEBUG_NAME "${_TBB_LIB_NAME}_debug")
+        set(_TBB_LIB_MALLOC_DEBUG_NAME "${_TBB_LIB_MALLOC_NAME}_debug")
+        # default flavor on apple: ia32/cc4.0.1_os10.4.9
+        # Jiri: There is no reason to presume there is only one flavor and
+        #       that user's setting of variables should be ignored.
+        if(NOT TBB_COMPILER)
+            set(_TBB_COMPILER "cc4.0.1_os10.4.9")
+        elseif (NOT TBB_COMPILER)
+            set(_TBB_COMPILER ${TBB_COMPILER})
+        endif(NOT TBB_COMPILER)
+        if(NOT TBB_ARCHITECTURE)
+            set(_TBB_ARCHITECTURE "ia32")
+        elseif(NOT TBB_ARCHITECTURE)
+            set(_TBB_ARCHITECTURE ${TBB_ARCHITECTURE})
+        endif(NOT TBB_ARCHITECTURE)
+    else (APPLE)
+        # LINUX
+        set(_TBB_DEFAULT_INSTALL_DIR "/opt/intel/tbb" "/usr/local/include" "/usr/include")
+        set(_TBB_LIB_NAME "tbb")
+        set(_TBB_LIB_MALLOC_NAME "${_TBB_LIB_NAME}malloc")
+        set(_TBB_LIB_DEBUG_NAME "${_TBB_LIB_NAME}_debug")
+        set(_TBB_LIB_MALLOC_DEBUG_NAME "${_TBB_LIB_MALLOC_NAME}_debug")
+        # has em64t/cc3.2.3_libc2.3.2_kernel2.4.21 em64t/cc3.3.3_libc2.3.3_kernel2.6.5 em64t/cc3.4.3_libc2.3.4_kernel2.6.9 em64t/cc4.1.0_libc2.4_kernel2.6.16.21
+        # has ia32/*
+        # has itanium/*
+        set(_TBB_COMPILER ${TBB_COMPILER})
+        set(_TBB_ARCHITECTURE ${TBB_ARCHITECTURE})
+    endif (APPLE)
+endif (UNIX)
+
+if (CMAKE_SYSTEM MATCHES "SunOS.*")
+# SUN
+# not yet supported
+# has em64t/cc3.4.3_kernel5.10
+# has ia32/*
+endif (CMAKE_SYSTEM MATCHES "SunOS.*")
+
+
+#-- Clear the public variables
+set (TBB_FOUND "NO")
+
+
+#-- Find TBB install dir and set ${_TBB_INSTALL_DIR} and cached ${TBB_INSTALL_DIR}
+# first: use CMake variable TBB_INSTALL_DIR
+if (TBB_INSTALL_DIR)
+    set (_TBB_INSTALL_DIR ${TBB_INSTALL_DIR})
+endif (TBB_INSTALL_DIR)
+# second: use environment variable
+if (NOT _TBB_INSTALL_DIR)
+    if (NOT "$ENV{TBB_INSTALL_DIR}" STREQUAL "")
+        set (_TBB_INSTALL_DIR $ENV{TBB_INSTALL_DIR})
+    endif (NOT "$ENV{TBB_INSTALL_DIR}" STREQUAL "")
+    # Intel recommends setting TBB21_INSTALL_DIR
+    if (NOT "$ENV{TBB21_INSTALL_DIR}" STREQUAL "")
+        set (_TBB_INSTALL_DIR $ENV{TBB21_INSTALL_DIR})
+    endif (NOT "$ENV{TBB21_INSTALL_DIR}" STREQUAL "")
+    if (NOT "$ENV{TBB22_INSTALL_DIR}" STREQUAL "")
+        set (_TBB_INSTALL_DIR $ENV{TBB22_INSTALL_DIR})
+    endif (NOT "$ENV{TBB22_INSTALL_DIR}" STREQUAL "")
+    if (NOT "$ENV{TBB30_INSTALL_DIR}" STREQUAL "")
+        set (_TBB_INSTALL_DIR $ENV{TBB30_INSTALL_DIR})
+    endif (NOT "$ENV{TBB30_INSTALL_DIR}" STREQUAL "")
+endif (NOT _TBB_INSTALL_DIR)
+# third: try to find path automatically
+if (NOT _TBB_INSTALL_DIR)
+    if (_TBB_DEFAULT_INSTALL_DIR)
+        set (_TBB_INSTALL_DIR ${_TBB_DEFAULT_INSTALL_DIR})
+    endif (_TBB_DEFAULT_INSTALL_DIR)
+endif (NOT _TBB_INSTALL_DIR)
+# sanity check
+if (NOT _TBB_INSTALL_DIR)
+    message ("ERROR: Unable to find Intel TBB install directory. ${_TBB_INSTALL_DIR}")
+else (NOT _TBB_INSTALL_DIR)
+# finally: set the cached CMake variable TBB_INSTALL_DIR
+if (NOT TBB_INSTALL_DIR)
+    set (TBB_INSTALL_DIR ${_TBB_INSTALL_DIR} CACHE PATH "Intel TBB install directory")
+    mark_as_advanced(TBB_INSTALL_DIR)
+endif (NOT TBB_INSTALL_DIR)
+
+
+#-- A macro to rewrite the paths of the library. This is necessary, because
+#   find_library() always found the em64t/vc9 version of the TBB libs
+macro(TBB_CORRECT_LIB_DIR var_name)
+#    if (NOT "${_TBB_ARCHITECTURE}" STREQUAL "em64t")
+        string(REPLACE em64t "${_TBB_ARCHITECTURE}" ${var_name} ${${var_name}})
+#    endif (NOT "${_TBB_ARCHITECTURE}" STREQUAL "em64t")
+    string(REPLACE ia32 "${_TBB_ARCHITECTURE}" ${var_name} ${${var_name}})
+    string(REPLACE vc7.1 "${_TBB_COMPILER}" ${var_name} ${${var_name}})
+    string(REPLACE vc8 "${_TBB_COMPILER}" ${var_name} ${${var_name}})
+    string(REPLACE vc9 "${_TBB_COMPILER}" ${var_name} ${${var_name}})
+    string(REPLACE vc10 "${_TBB_COMPILER}" ${var_name} ${${var_name}})
+endmacro(TBB_CORRECT_LIB_DIR var_content)
+
+
+#-- Look for include directory and set ${TBB_INCLUDE_DIR}
+set (TBB_INC_SEARCH_DIR ${_TBB_INSTALL_DIR}/include)
+# Jiri: tbbvars now sets the CPATH environment variable to the directory
+#       containing the headers.
+find_path(TBB_INCLUDE_DIR
+    tbb/task_scheduler_init.h
+    PATHS ${TBB_INC_SEARCH_DIR} ENV CPATH
+)
+mark_as_advanced(TBB_INCLUDE_DIR)
+
+
+#-- Look for libraries
+# GvdB: $ENV{TBB_ARCH_PLATFORM} is set by the build script tbbvars[.bat|.sh|.csh]
+if (NOT $ENV{TBB_ARCH_PLATFORM} STREQUAL "")
+    set (_TBB_LIBRARY_DIR
+         ${_TBB_INSTALL_DIR}/lib/$ENV{TBB_ARCH_PLATFORM}
+         ${_TBB_INSTALL_DIR}/$ENV{TBB_ARCH_PLATFORM}/lib
+        )
+endif (NOT $ENV{TBB_ARCH_PLATFORM} STREQUAL "")
+# Jiri: This block isn't mutually exclusive with the previous one
+#       (hence no else), instead I test if the user really specified
+#       the variables in question.
+if ((NOT ${TBB_ARCHITECTURE} STREQUAL "") AND (NOT ${TBB_COMPILER} STREQUAL ""))
+    # HH: deprecated
+    message(STATUS "[Warning] FindTBB.cmake: The use of TBB_ARCHITECTURE and TBB_COMPILER is deprecated and may not be supported in future versions. Please set \$ENV{TBB_ARCH_PLATFORM} (using tbbvars.[bat|csh|sh]).")
+    # Jiri: It doesn't hurt to look in more places, so I store the hints from
+    #       ENV{TBB_ARCH_PLATFORM} and the TBB_ARCHITECTURE and TBB_COMPILER
+    #       variables and search them both.
+    set (_TBB_LIBRARY_DIR "${_TBB_INSTALL_DIR}/${_TBB_ARCHITECTURE}/${_TBB_COMPILER}/lib" ${_TBB_LIBRARY_DIR})
+endif ((NOT ${TBB_ARCHITECTURE} STREQUAL "") AND (NOT ${TBB_COMPILER} STREQUAL ""))
+
+# GvdB: Mac OS X distribution places libraries directly in lib directory.
+list(APPEND _TBB_LIBRARY_DIR ${_TBB_INSTALL_DIR}/lib)
+
+# Jiri: No reason not to check the default paths. From recent versions,
+#       tbbvars has started exporting the LIBRARY_PATH and LD_LIBRARY_PATH
+#       variables, which now point to the directories of the lib files.
+#       It all makes more sense to use the ${_TBB_LIBRARY_DIR} as a HINTS
+#       argument instead of the implicit PATHS as it isn't hard-coded
+#       but computed by system introspection. Searching the LIBRARY_PATH
+#       and LD_LIBRARY_PATH environment variables is now even more important
+#       that tbbvars doesn't export TBB_ARCH_PLATFORM and it facilitates
+#       the use of TBB built from sources.
+find_library(TBB_LIBRARY ${_TBB_LIB_NAME} HINTS ${_TBB_LIBRARY_DIR}
+        PATHS ENV LIBRARY_PATH ENV LD_LIBRARY_PATH)
+find_library(TBB_MALLOC_LIBRARY ${_TBB_LIB_MALLOC_NAME} HINTS ${_TBB_LIBRARY_DIR}
+        PATHS ENV LIBRARY_PATH ENV LD_LIBRARY_PATH)
+
+#Extract path from TBB_LIBRARY name
+get_filename_component(TBB_LIBRARY_DIR ${TBB_LIBRARY} PATH)
+
+#TBB_CORRECT_LIB_DIR(TBB_LIBRARY)
+#TBB_CORRECT_LIB_DIR(TBB_MALLOC_LIBRARY)
+mark_as_advanced(TBB_LIBRARY TBB_MALLOC_LIBRARY)
+
+#-- Look for debug libraries
+# Jiri: Changed the same way as for the release libraries.
+find_library(TBB_LIBRARY_DEBUG ${_TBB_LIB_DEBUG_NAME} HINTS ${_TBB_LIBRARY_DIR}
+        PATHS ENV LIBRARY_PATH ENV LD_LIBRARY_PATH)
+find_library(TBB_MALLOC_LIBRARY_DEBUG ${_TBB_LIB_MALLOC_DEBUG_NAME} HINTS ${_TBB_LIBRARY_DIR}
+        PATHS ENV LIBRARY_PATH ENV LD_LIBRARY_PATH)
+
+# Jiri: Self-built TBB stores the debug libraries in a separate directory.
+#       Extract path from TBB_LIBRARY_DEBUG name
+get_filename_component(TBB_LIBRARY_DEBUG_DIR ${TBB_LIBRARY_DEBUG} PATH)
+
+#TBB_CORRECT_LIB_DIR(TBB_LIBRARY_DEBUG)
+#TBB_CORRECT_LIB_DIR(TBB_MALLOC_LIBRARY_DEBUG)
+mark_as_advanced(TBB_LIBRARY_DEBUG TBB_MALLOC_LIBRARY_DEBUG)
+
+
+if (TBB_INCLUDE_DIR)
+    if (TBB_LIBRARY)
+        set (TBB_FOUND "YES")
+        set (TBB_LIBRARIES ${TBB_LIBRARY} ${TBB_MALLOC_LIBRARY} ${TBB_LIBRARIES})
+        set (TBB_DEBUG_LIBRARIES ${TBB_LIBRARY_DEBUG} ${TBB_MALLOC_LIBRARY_DEBUG} ${TBB_DEBUG_LIBRARIES})
+        set (TBB_INCLUDE_DIRS ${TBB_INCLUDE_DIR} CACHE PATH "TBB include directory" FORCE)
+        set (TBB_LIBRARY_DIRS ${TBB_LIBRARY_DIR} CACHE PATH "TBB library directory" FORCE)
+        # Jiri: Self-built TBB stores the debug libraries in a separate directory.
+        set (TBB_DEBUG_LIBRARY_DIRS ${TBB_LIBRARY_DEBUG_DIR} CACHE PATH "TBB debug library directory" FORCE)
+        mark_as_advanced(TBB_INCLUDE_DIRS TBB_LIBRARY_DIRS TBB_DEBUG_LIBRARY_DIRS TBB_LIBRARIES TBB_DEBUG_LIBRARIES)
+        message(STATUS "Found Intel TBB")
+    endif (TBB_LIBRARY)
+endif (TBB_INCLUDE_DIR)
+
+if (NOT TBB_FOUND)
+    message("ERROR: Intel TBB NOT found!")
+    message(STATUS "Looked for Threading Building Blocks in ${_TBB_INSTALL_DIR}")
+    # do only throw fatal, if this pkg is REQUIRED
+    if (TBB_FIND_REQUIRED)
+        message(FATAL_ERROR "Could NOT find TBB library.")
+    endif (TBB_FIND_REQUIRED)
+endif (NOT TBB_FOUND)
+
+endif (NOT _TBB_INSTALL_DIR)
+
+if (TBB_FOUND)
+        set(TBB_INTERFACE_VERSION 0)
+        FILE(READ "${TBB_INCLUDE_DIRS}/tbb/tbb_stddef.h" _TBB_VERSION_CONTENTS)
+        STRING(REGEX REPLACE ".*#define TBB_INTERFACE_VERSION ([0-9]+).*" "\\1" TBB_INTERFACE_VERSION "${_TBB_VERSION_CONTENTS}")
+        set(TBB_INTERFACE_VERSION "${TBB_INTERFACE_VERSION}")
+endif (TBB_FOUND)
diff --git a/CMake/FindThrust.cmake b/CMake/FindThrust.cmake
new file mode 100644
index 0000000..5e69af2
--- /dev/null
+++ b/CMake/FindThrust.cmake
@@ -0,0 +1,65 @@
+##=============================================================================
+##
+##  Copyright (c) Kitware, Inc.
+##  All rights reserved.
+##  See LICENSE.txt for details.
+##
+##  This software is distributed WITHOUT ANY WARRANTY; without even
+##  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+##  PURPOSE.  See the above copyright notice for more information.
+##
+##  Copyright 2012 Sandia Corporation.
+##  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+##  the U.S. Government retains certain rights in this software.
+##
+##=============================================================================
+
+#
+# FindThrust
+#
+# This module finds the Thrust header files and extrats their version.  It
+# sets the following variables.
+#
+# THRUST_INCLUDE_DIR -  Include directory for thrust header files.  (All header
+#                       files will actually be in the thrust subdirectory.)
+# THRUST_VERSION -      Version of thrust in the form "major.minor.patch".
+#
+
+find_path( THRUST_INCLUDE_DIR
+  HINTS
+    /usr/include/cuda
+    /usr/local/include
+    /usr/local/cuda/include
+    ${CUDA_INCLUDE_DIRS}
+  NAMES thrust/version.h
+  DOC "Thrust headers"
+  )
+if( THRUST_INCLUDE_DIR )
+  list( REMOVE_DUPLICATES THRUST_INCLUDE_DIR )
+  include_directories( ${THRUST_INCLUDE_DIR} )
+endif( THRUST_INCLUDE_DIR )
+
+# Find thrust version
+file( STRINGS ${THRUST_INCLUDE_DIR}/thrust/version.h
+  version
+  REGEX "#define THRUST_VERSION[ \t]+([0-9x]+)"
+  )
+string( REGEX REPLACE
+  "#define THRUST_VERSION[ \t]+"
+  ""
+  version
+  "${version}"
+  )
+
+string( REGEX MATCH "^[0-9]" major ${version} )
+string( REGEX REPLACE "^${major}00" "" version "${version}" )
+string( REGEX MATCH "^[0-9]" minor ${version} )
+string( REGEX REPLACE "^${minor}0" "" version "${version}" )
+set( THRUST_VERSION "${major}.${minor}.${version}")
+
+# Check for required components
+include( FindPackageHandleStandardArgs )
+find_package_handle_standard_args( Thrust
+  REQUIRED_VARS THRUST_INCLUDE_DIR
+  VERSION_VAR THRUST_VERSION
+  )
diff --git a/CMake/FindXKaapi.cmake b/CMake/FindXKaapi.cmake
new file mode 100644
index 0000000..94e9568
--- /dev/null
+++ b/CMake/FindXKaapi.cmake
@@ -0,0 +1,55 @@
+# Try to find (x)Kaapi
+# Once done, this will define
+#
+#  XKAAPI_FOUND - system has (x)Kaapi
+#  XKAAPI_INCLUDE_DIRS - the (x)Kaapi include directories
+#  XKAAPI_LIBRARIES - link these to use (x)Kaapi
+
+set(XKAAPI_HOME $ENV{XKAAPI_HOME} CACHE PATH "Path to the (x)Kaapi install dir")
+
+find_package(PkgConfig)
+if(PKG_CONFIG_FOUND)
+  pkg_check_modules(PC_KAAPI QUIET kaapi++)
+endif(PKG_CONFIG_FOUND)
+
+find_path(XKAAPI_INCLUDE_DIR kaapi++
+  HINTS
+  ${XKAAPI_HOME}/include
+  ${PC_KAAPI_INCLUDEDIR}
+  ${PC_KAAPI_INCLUDE_DIRS}
+)
+
+find_library(KAAPI_LIBRARY kaapi
+  HINTS
+  ${XKAAPI_HOME}/lib
+  ${PC_KAAPI_LIBDIR}
+  ${PC_KAAPI_LIBRARY_DIRS}
+)
+find_library(XKAAPI_LIBRARY kaapi++
+  HINTS
+  ${XKAAPI_HOME}/lib
+  ${PC_KAAPI_LIBDIR}
+  ${PC_KAAPI_LIBRARY_DIRS}
+)
+#find_library(KAAPI_FORTRAN_LIBRARY kaapif
+#  HINTS
+#  ${XKAAPI_HOME}/lib
+#  ${PC_KAAPI_LIBDIR}
+#  ${PC_KAAPI_LIBRARY_DIRS}
+#)
+find_library(KAAPI_C_LIBRARY kaapic
+  HINTS
+  ${XKAAPI_HOME}/lib
+  ${PC_KAAPI_LIBDIR}
+  ${PC_KAAPI_LIBRARY_DIRS}
+)
+
+set(XKAAPI_LIBRARIES ${XKAAPI_LIBRARY} ${KAAPI_LIBRARY} ${KAAPI_C_LIBRARY})
+set(XKAAPI_INCLUDE_DIRS ${XKAAPI_INCLUDE_DIR})
+
+include(FindPackageHandleStandardArgs)
+# handle the QUIETLY and REQUIRED arguments and set XKAAPI_FOUND to TRUE if all listed variables are TRUE
+find_package_handle_standard_args(XKaapi DEFAULT_MSG XKAAPI_LIBRARIES XKAAPI_INCLUDE_DIRS)
+
+mark_as_advanced(XKAAPI_INCLUDE_DIR XKAAPI_LIBRARY KAAPI_LIBRARY KAAPI_C_LIBRARY KAAPI_FORTRAN_LIBRARY)
+set(XKAAPI_LIBRARIES ${XKAAPI_LIBRARIES} ${CMAKE_THREAD_LIBS})
diff --git a/CMake/GenerateExportHeader.cmake b/CMake/GenerateExportHeader.cmake
index c18e320..3cc12dd 100644
--- a/CMake/GenerateExportHeader.cmake
+++ b/CMake/GenerateExportHeader.cmake
@@ -163,8 +163,9 @@ endmacro()
 macro(_test_compiler_hidden_visibility)
 
   if(CMAKE_COMPILER_IS_GNUCXX)
-    exec_program(${CMAKE_C_COMPILER} ARGS --version
-      OUTPUT_VARIABLE _gcc_version_info)
+    execute_process(COMMAND ${CMAKE_C_COMPILER} --version
+      OUTPUT_VARIABLE _gcc_version_info
+      ERROR_VARIABLE _gcc_version_info)
     string(REGEX MATCH "[345]\\.[0-9]\\.[0-9]"
       _gcc_version "${_gcc_version_info}")
     # gcc on mac just reports: "gcc (GCC) 3.3 20030304 ..." without the
@@ -180,8 +181,9 @@ macro(_test_compiler_hidden_visibility)
   endif()
 
   if(CMAKE_CXX_COMPILER_ID MATCHES Intel)
-    exec_program(${CMAKE_CXX_COMPILER} ARGS -V
-      OUTPUT_VARIABLE _intel_version_info)
+    execute_process(COMMAND ${CMAKE_CXX_COMPILER} -V
+      OUTPUT_VARIABLE _intel_version_info
+      ERROR_VARIABLE _intel_version_info)
     string(REGEX REPLACE ".*Version ([0-9]+(\\.[0-9]+)+).*" "\\1"
       _intel_version "${_intel_version_info}")
 
diff --git a/CMake/SharedLibraryPathInfo.cxx b/CMake/SharedLibraryPathInfo.cxx
index 0156b92..46e1406 100644
--- a/CMake/SharedLibraryPathInfo.cxx
+++ b/CMake/SharedLibraryPathInfo.cxx
@@ -38,7 +38,7 @@
 # define RETURN_VALUE 0
 #endif
 
-/* OSX */
+/* OS X */
 #if defined(__APPLE__)
 # define CMAKE_SHARED_LDD "otool"
 # define CMAKE_SHARED_LDD_FLAGS "-L"
diff --git a/CMake/VTKConfig.cmake.in b/CMake/VTKConfig.cmake.in
index b2f2983..24f379f 100644
--- a/CMake/VTKConfig.cmake.in
+++ b/CMake/VTKConfig.cmake.in
@@ -88,6 +88,7 @@ SET(VTK_LEGACY_SILENT "@VTK_LEGACY_SILENT@")
 SET(VTK_WRAP_PYTHON "@VTK_WRAP_PYTHON@")
 SET(VTK_WRAP_TCL "@VTK_WRAP_TCL@")
 SET(VTK_WRAP_JAVA "@VTK_WRAP_JAVA@")
+SET(VTK_QT_VERSION "@VTK_QT_VERSION@")
 
 # Do not add options or information here that is specific to a
 # particular module.  Instead set <module>_EXPORT_OPTIONS and/or
diff --git a/CMake/VTKParallelCMakeTests.cmake b/CMake/VTKParallelCMakeTests.cmake
index d135ed8..d95b167 100644
--- a/CMake/VTKParallelCMakeTests.cmake
+++ b/CMake/VTKParallelCMakeTests.cmake
@@ -41,7 +41,7 @@ SET(HAVE_SOCKETS TRUE)
 # Cray Xt3/ Catamount doesn't have any socket support
 # this could also be determined by doing something like
 # check_symbol_exists(socket "sys/types.h;sys/socket.h" HAVE_SOCKETS)
-IF(CMAKE_SYSTEM MATCHES Catamount)
-  SET(HAVE_SOCKETS FALSE)
-ENDIF(CMAKE_SYSTEM MATCHES Catamount)
+#IF(CMAKE_SYSTEM MATCHES Catamount) #Xt6 and Xt7 at least have sockets, so disable this.
+#  SET(HAVE_SOCKETS FALSE)
+#ENDIF(CMAKE_SYSTEM MATCHES Catamount)
 
diff --git a/CMake/VTKValgrindSuppressions.supp b/CMake/VTKValgrindSuppressions.supp
index b010cf1..1424ac9 100644
--- a/CMake/VTKValgrindSuppressions.supp
+++ b/CMake/VTKValgrindSuppressions.supp
@@ -1014,3 +1014,103 @@
    fun:_Z23TestQtTableModelAdapteriPPc
    fun:main
 }
+{
+    <insert_a_suppression_name_here>
+    Memcheck:Value8
+    fun:fetch_texel_2d_f_rgba8888
+    fun:texture_get_row
+    fun:_swrast_get_dest_rgba
+    fun:_swrast_mask_rgba_span
+    fun:clear_rgba_buffer_with_masking
+    fun:_swrast_Clear
+    fun:_ZN17vtkOpenGLRenderer5ClearEv
+    fun:_ZN13vtkCameraPass6RenderEPK14vtkRenderState
+    fun:_ZN21vtkShadowMapBakerPass6RenderEPK14vtkRenderState
+    fun:_ZN15vtkSequencePass6RenderEPK14vtkRenderState
+    fun:_ZN13vtkCameraPass6RenderEPK14vtkRenderState
+    fun:_ZN17vtkOpenGLRenderer12DeviceRenderEv
+    fun:_ZN11vtkRenderer6RenderEv
+    fun:_ZN21vtkRendererCollection6RenderEv
+    fun:_ZN15vtkRenderWindow14DoStereoRenderEv
+    fun:_ZN15vtkRenderWindow10DoFDRenderEv
+    fun:_ZN15vtkRenderWindow10DoAARenderEv
+    fun:_ZN15vtkRenderWindow6RenderEv
+    fun:_ZN22vtkXOpenGLRenderWindow6RenderEv
+    fun:_Z17TestShadowMapPassiPPc
+    fun:main
+}
+{
+    <insert_a_suppression_name_here>
+    Memcheck:Value8
+    fun:fetch_texel_2d_f_rgba8888
+    fun:texture_get_row
+    fun:_swrast_get_dest_rgba
+    fun:_swrast_mask_rgba_span
+    fun:clear_rgba_buffer_with_masking
+    fun:_swrast_Clear
+    fun:_ZN17vtkOpenGLRenderer5ClearEv
+    fun:_ZN13vtkCameraPass6RenderEPK14vtkRenderState
+    fun:_ZN21vtkShadowMapBakerPass6RenderEPK14vtkRenderState
+    fun:_ZN15vtkSequencePass6RenderEPK14vtkRenderState
+    fun:_ZN13vtkCameraPass6RenderEPK14vtkRenderState
+    fun:_ZN17vtkOpenGLRenderer12DeviceRenderEv
+    fun:_ZN11vtkRenderer6RenderEv
+    fun:_ZN21vtkRendererCollection6RenderEv
+    fun:_ZN15vtkRenderWindow14DoStereoRenderEv
+    fun:_ZN15vtkRenderWindow10DoFDRenderEv
+    fun:_ZN15vtkRenderWindow10DoAARenderEv
+    fun:_ZN15vtkRenderWindow6RenderEv
+    fun:_ZN22vtkXOpenGLRenderWindow6RenderEv
+    fun:_Z17TestShadowMapPassiPPc
+    fun:main
+}
+{
+    <insert_a_suppression_name_here>
+    Memcheck:Value8
+    fun:fetch_texel_2d_f_rgba8888
+    fun:texture_get_row
+    fun:_swrast_get_dest_rgba
+    fun:_swrast_mask_rgba_span
+    fun:clear_rgba_buffer_with_masking
+    fun:_swrast_Clear
+    fun:_ZN17vtkOpenGLRenderer5ClearEv
+    fun:_ZN13vtkCameraPass6RenderEPK14vtkRenderState
+    fun:_ZN21vtkShadowMapBakerPass6RenderEPK14vtkRenderState
+    fun:_ZN15vtkSequencePass6RenderEPK14vtkRenderState
+    fun:_ZN13vtkCameraPass6RenderEPK14vtkRenderState
+    fun:_ZN17vtkOpenGLRenderer12DeviceRenderEv
+    fun:_ZN11vtkRenderer6RenderEv
+    fun:_ZN21vtkRendererCollection6RenderEv
+    fun:_ZN15vtkRenderWindow14DoStereoRenderEv
+    fun:_ZN15vtkRenderWindow10DoFDRenderEv
+    fun:_ZN15vtkRenderWindow10DoAARenderEv
+    fun:_ZN15vtkRenderWindow6RenderEv
+    fun:_ZN22vtkXOpenGLRenderWindow6RenderEv
+    fun:_Z17TestShadowMapPassiPPc
+    fun:main
+}
+{
+    <insert_a_suppression_name_here>
+    Memcheck:Value8
+    fun:fetch_texel_2d_f_rgba8888
+    fun:texture_get_row
+    fun:_swrast_get_dest_rgba
+    fun:_swrast_mask_rgba_span
+    fun:clear_rgba_buffer_with_masking
+    fun:_swrast_Clear
+    fun:_ZN17vtkOpenGLRenderer5ClearEv
+    fun:_ZN13vtkCameraPass6RenderEPK14vtkRenderState
+    fun:_ZN21vtkShadowMapBakerPass6RenderEPK14vtkRenderState
+    fun:_ZN15vtkSequencePass6RenderEPK14vtkRenderState
+    fun:_ZN13vtkCameraPass6RenderEPK14vtkRenderState
+    fun:_ZN17vtkOpenGLRenderer12DeviceRenderEv
+    fun:_ZN11vtkRenderer6RenderEv
+    fun:_ZN21vtkRendererCollection6RenderEv
+    fun:_ZN15vtkRenderWindow14DoStereoRenderEv
+    fun:_ZN15vtkRenderWindow10DoFDRenderEv
+    fun:_ZN15vtkRenderWindow10DoAARenderEv
+    fun:_ZN15vtkRenderWindow6RenderEv
+    fun:_ZN22vtkXOpenGLRenderWindow6RenderEv
+    fun:_Z17TestShadowMapPassiPPc
+    fun:main
+}
diff --git a/CMake/vtkCompilerExtras.cmake b/CMake/vtkCompilerExtras.cmake
index 39ff952..05b2db9 100644
--- a/CMake/vtkCompilerExtras.cmake
+++ b/CMake/vtkCompilerExtras.cmake
@@ -23,8 +23,10 @@ if(CMAKE_COMPILER_IS_GNUCXX)
   endif()
 
   # Now check if we can use visibility to selectively export symbols
-  exec_program(${CMAKE_C_COMPILER} ARGS --version OUTPUT_VARIABLE
-    _gcc_version_info)
+  execute_process(COMMAND ${CMAKE_C_COMPILER} --version
+    OUTPUT_VARIABLE _gcc_version_info
+    ERROR_VARIABLE _gcc_version_info)
+
   string (REGEX MATCH "[345]\\.[0-9]\\.[0-9]"
     _gcc_version "${_gcc_version_info}")
   if(NOT _gcc_version)
diff --git a/CMake/vtkExternalData.cmake b/CMake/vtkExternalData.cmake
new file mode 100644
index 0000000..b44886f
--- /dev/null
+++ b/CMake/vtkExternalData.cmake
@@ -0,0 +1,71 @@
+get_filename_component(_VTKExternalData_DIR "${CMAKE_CURRENT_LIST_FILE}" PATH)
+include(${_VTKExternalData_DIR}/ExternalData.cmake)
+
+if(NOT VTK_DATA_STORE)
+  # Select a default from the following.
+  set(VTK_DATA_STORE_DEFAULT "")
+  if(EXISTS "${CMAKE_SOURCE_DIR}/.ExternalData/config/store")
+    # Configuration left by developer setup script.
+    file(STRINGS "${CMAKE_SOURCE_DIR}/.ExternalData/config/store"
+      VTK_DATA_STORE_DEFAULT LIMIT_COUNT 1 LIMIT_INPUT 1024)
+  elseif(IS_DIRECTORY "${CMAKE_SOURCE_DIR}/../VTKExternalData")
+    # Adjacent directory created by user.
+    get_filename_component(VTK_DATA_STORE_DEFAULT
+      "${CMAKE_SOURCE_DIR}/../VTKExternalData" ABSOLUTE)
+  elseif(IS_DIRECTORY "${CMAKE_SOURCE_DIR}/../ExternalData")
+    # Generic adjacent directory created by user.
+    get_filename_component(VTK_DATA_STORE_DEFAULT
+      "${CMAKE_SOURCE_DIR}/../ExternalData" ABSOLUTE)
+  elseif(DEFINED "ENV{ExternalData_OBJECT_STORES}")
+    # Generic ExternalData environment variable.
+    file(TO_CMAKE_PATH "$ENV{ExternalData_OBJECT_STORES}" VTK_DATA_STORE_DEFAULT)
+  endif()
+endif()
+
+# Provide users with an option to select a local object store,
+# starting with the above-selected default.
+set(VTK_DATA_STORE "${VTK_DATA_STORE_DEFAULT}" CACHE PATH
+  "Local directory holding ExternalData objects in the layout %(algo)/%(hash).")
+mark_as_advanced(VTK_DATA_STORE)
+
+# Use a store in the build tree if none is otherwise configured.
+if(NOT VTK_DATA_STORE)
+  if(ExternalData_OBJECT_STORES)
+    set(VTK_DATA_STORE "")
+  else()
+    set(VTK_DATA_STORE "${CMAKE_BINARY_DIR}/ExternalData/Objects")
+    file(MAKE_DIRECTORY "${VTK_DATA_STORE}")
+  endif()
+endif()
+
+# Tell ExternalData module about selected object stores.
+list(APPEND ExternalData_OBJECT_STORES
+  # Store selected by VTK-specific configuration above.
+  ${VTK_DATA_STORE}
+
+  # Local data store populated by the VTK pre-commit hook
+  "${CMAKE_SOURCE_DIR}/.ExternalData"
+  )
+
+set(ExternalData_BINARY_ROOT ${CMAKE_BINARY_DIR}/ExternalData)
+
+set(ExternalData_URL_TEMPLATES "" CACHE STRING
+  "Additional URL templates for the ExternalData CMake script to look for testing data. E.g.
+file:///var/bigharddrive/%(algo)/%(hash)")
+mark_as_advanced(ExternalData_URL_TEMPLATES)
+list(APPEND ExternalData_URL_TEMPLATES
+  # Data published by MIDAS
+  "http://midas3.kitware.com/midas/api/rest?method=midas.bitstream.download&checksum=%(hash)&algorithm=%(algo)"
+
+  # Data published by developers using git-gerrit-push.
+  "http://www.vtk.org/files/ExternalData/%(algo)/%(hash)"
+  )
+
+# Tell ExternalData commands to transform raw files to content links.
+# TODO: Condition this feature on presence of our pre-commit hook.
+set(ExternalData_LINK_CONTENT MD5)
+
+# Match series of the form <base>.<ext>, <base>_<n>.<ext> such that <base> may
+# end in a (test) number that is not part of any series numbering.
+set(ExternalData_SERIES_PARSE "()(\\.[^./]*)$")
+set(ExternalData_SERIES_MATCH "(_[0-9]+)?")
diff --git a/CMake/vtkFFMPEGTestAvAlloc.cxx b/CMake/vtkFFMPEGTestAvAlloc.cxx
deleted file mode 100644
index ad691e3..0000000
--- a/CMake/vtkFFMPEGTestAvAlloc.cxx
+++ /dev/null
@@ -1,14 +0,0 @@
-extern "C" {
-#ifdef HAS_OLD_HEADER
-# include <ffmpeg/avformat.h>
-#else
-# include <libavformat/avformat.h>
-#endif
-}
-
-int main()
-{
-  avformat_alloc_context();
-  return 0;
-}
-
diff --git a/CMake/vtkFFMPEGTestImgConvert.cxx b/CMake/vtkFFMPEGTestImgConvert.cxx
deleted file mode 100644
index 9ade522..0000000
--- a/CMake/vtkFFMPEGTestImgConvert.cxx
+++ /dev/null
@@ -1,15 +0,0 @@
-extern "C" {
-#ifdef HAS_OLD_HEADER
-# include <ffmpeg/avcodec.h>
-#else
-# include <libavcodec/avcodec.h>
-#endif
-}
-
-int main()
-{
-  img_convert(0, PIX_FMT_RGB24,
-              0, PIX_FMT_RGB24,
-              0, 0);
-  return 0;
-}
diff --git a/CMake/vtkFFMPEGTestURLFClose.cxx b/CMake/vtkFFMPEGTestURLFClose.cxx
deleted file mode 100644
index 0bf4314..0000000
--- a/CMake/vtkFFMPEGTestURLFClose.cxx
+++ /dev/null
@@ -1,14 +0,0 @@
-extern "C" {
-#ifdef HAS_OLD_HEADER
-# include <ffmpeg/avformat.h>
-#else
-# include <libavformat/avformat.h>
-#endif
-}
-
-int main()
-{
-  AVFormatContext *ctx;
-  url_fclose(&ctx->pb);
-  return 0;
-}
diff --git a/CMake/vtkForwardingExecutable.cmake b/CMake/vtkForwardingExecutable.cmake
index 5cc5a39..4f54dc1 100644
--- a/CMake/vtkForwardingExecutable.cmake
+++ b/CMake/vtkForwardingExecutable.cmake
@@ -77,7 +77,7 @@ function(vtk_add_executable_with_forwarding2
       configure_file(
         ${VTK_CMAKE_DIR}/vtk-forward.c.in
         ${CMAKE_CURRENT_BINARY_DIR}/${exe_name}-forward.c
-        @ONLY IMMEDIATE)
+        @ONLY)
       add_executable(${exe_name}${VTK_EXE_SUFFIX}
         ${CMAKE_CURRENT_BINARY_DIR}/${exe_name}-forward.c)
       set_target_properties(${exe_name}${VTK_EXE_SUFFIX} PROPERTIES
diff --git a/CMake/vtkGhostscript.cmake b/CMake/vtkGhostscript.cmake
new file mode 100644
index 0000000..a1db581
--- /dev/null
+++ b/CMake/vtkGhostscript.cmake
@@ -0,0 +1,3 @@
+# Find the GhostScript executable for GL2PS tests.
+find_program(VTK_GHOSTSCRIPT_EXECUTABLE gs gswin32c gsos2)
+mark_as_advanced(VTK_GHOSTSCRIPT_EXECUTABLE)
diff --git a/CMake/vtkJavaWrapping.cmake b/CMake/vtkJavaWrapping.cmake
index 3abcc87..0509cae 100644
--- a/CMake/vtkJavaWrapping.cmake
+++ b/CMake/vtkJavaWrapping.cmake
@@ -41,6 +41,10 @@ function(vtk_add_java_wrapping module_name module_srcs module_hdrs)
     "${module_srcs};${Kit_JAVA_EXTRA_WRAP_SRCS}")
 
   add_library(${module_name}Java SHARED ${ModuleJava_SRCS} ${Kit_JAVA_EXTRA_SRCS})
+  if(MINGW)
+    set_target_properties(${module_name}Java PROPERTIES PREFIX "")
+  endif(MINGW)
+  vtk_target_export(${module_name}Java)
   if(CMAKE_HAS_TARGET_INCLUDES)
     set_property(TARGET ${module_name}Java APPEND
       PROPERTY INCLUDE_DIRECTORIES ${_java_include_dirs})
@@ -52,23 +56,26 @@ function(vtk_add_java_wrapping module_name module_srcs module_hdrs)
       "${module_name}_AUTOINIT=1(${module_name})")
   endif()
 
-  target_link_libraries(${module_name}Java ${module_name} vtkWrappingJava)
+  target_link_libraries(${module_name}Java LINK_PUBLIC ${module_name} vtkWrappingJava)
 
   # Do we need to link to AWT?
   if(${module_name} STREQUAL "vtkRenderingCore")
-    target_link_libraries(${module_name}Java ${JAVA_AWT_LIBRARY})
+    target_link_libraries(${module_name}Java LINK_PUBLIC ${JAVA_AWT_LIBRARY})
     if(APPLE)
-      target_link_libraries(${module_name}Java "-framework Cocoa")
+      target_link_libraries(${module_name}Java LINK_PUBLIC "-framework Cocoa")
     endif()
   endif()
 
   foreach(dep ${${module_name}_LINK_DEPENDS})
     if(NOT ${dep}_EXCLUDE_FROM_WRAPPING)
-      target_link_libraries(${module_name}Java ${dep}Java)
+      target_link_libraries(${module_name}Java LINK_PUBLIC ${dep}Java)
     endif()
   endforeach()
 
   if(NOT VTK_INSTALL_NO_LIBRARIES)
+    if(APPLE AND VTK_JAVA_INSTALL)
+      set_target_properties(${module_name}Java PROPERTIES SUFFIX ".jnilib")
+    endif(APPLE AND VTK_JAVA_INSTALL)
     install(TARGETS ${module_name}Java
       EXPORT ${VTK_INSTALL_EXPORT_NAME}
       RUNTIME DESTINATION ${VTK_INSTALL_RUNTIME_DIR} COMPONENT RuntimeLibraries
diff --git a/CMake/vtkLegacyData.cmake b/CMake/vtkLegacyData.cmake
new file mode 100644
index 0000000..df3bba1
--- /dev/null
+++ b/CMake/vtkLegacyData.cmake
@@ -0,0 +1,36 @@
+# Make testing data from the legacy VTKData repository available.
+# DO NOT ADD NEW DATA HERE!!
+# TODO: Reference testing data from each module only as needed.
+set(data "DATA{${VTK_TEST_INPUT_DIR}/,REGEX:.*}")
+foreach(d
+    Infovis
+    Infovis/SQLite
+    Infovis/XML
+    Infovis/Images
+    Infovis/DimacsGraphs
+    Tango
+    SemiDisk
+    GIS
+    many_blocks
+    many_blocks/many_blocks
+    Quadratic
+    Dave_Karelitz_Small
+    MetaIO
+    libtiff
+    AMR
+    AMR/HierarchicalBoxDataset.v1.0
+    AMR/HierarchicalBoxDataset.v1.1
+    AMR/Enzo
+    AMR/Enzo/DD0010
+    UCD2D
+    ex-blow_5
+    chombo3d
+    foot
+    chi_field
+    headsq
+    Viewpoint
+    EnSight
+    )
+  list(APPEND data "DATA{${VTK_TEST_INPUT_DIR}/${d}/,REGEX:.*}")
+endforeach()
+ExternalData_Expand_Arguments(VTKData _ ${data})
diff --git a/CMake/vtkMPI.cmake b/CMake/vtkMPI.cmake
new file mode 100644
index 0000000..d5e5905
--- /dev/null
+++ b/CMake/vtkMPI.cmake
@@ -0,0 +1,15 @@
+# Helper to find and configure MPI for VTK targets. Centralize the logic for
+# any necessary compiler definitions, linking etc.
+find_package(MPI REQUIRED)
+mark_as_advanced(MPI_LIBRARY MPI_EXTRA_LIBRARY)
+include_directories(${MPI_C_INCLUDE_PATH})
+# Needed for MPICH 2
+add_definitions("-DMPICH_IGNORE_CXX_SEEK")
+
+# Function to link a VTK target to the necessary MPI libraries.
+function(vtk_mpi_link target)
+  target_link_libraries(${target} LINK_PRIVATE ${MPI_C_LIBRARIES})
+  if(MPI_CXX_LIBRARIES)
+    target_link_libraries(${target} LINK_PRIVATE ${MPI_CXX_LIBRARIES})
+  endif()
+endfunction()
diff --git a/CMake/vtkMakeInstantiator.cmake b/CMake/vtkMakeInstantiator.cmake
index 4f430c3..a93ee53 100644
--- a/CMake/vtkMakeInstantiator.cmake
+++ b/CMake/vtkMakeInstantiator.cmake
@@ -143,13 +143,11 @@ MACRO(VTK_MAKE_INSTANTIATOR3 className outSourceList SOURCES EXPORT_MACRO HEADER
     ${VTK_CMAKE_DIR}/vtkMakeInstantiator.h.in
     ${HEADER_LOCATION}/${className}.h
     COPY_ONLY
-    IMMEDIATE
     )
   CONFIGURE_FILE(
     ${VTK_CMAKE_DIR}/vtkMakeInstantiator.cxx.in
     ${CMAKE_CURRENT_BINARY_DIR}/${className}.cxx
     COPY_ONLY
-    IMMEDIATE
     )
 
 ENDMACRO(VTK_MAKE_INSTANTIATOR3)
diff --git a/CMake/vtkModuleAPI.cmake b/CMake/vtkModuleAPI.cmake
index 6639bd7..8944c77 100644
--- a/CMake/vtkModuleAPI.cmake
+++ b/CMake/vtkModuleAPI.cmake
@@ -1,15 +1,22 @@
-
-
 #-----------------------------------------------------------------------------
 # Private helper macros.
 
+# _vtk_module_config_recurse(<namespace> <module>)
+#
+# Internal macro to recursively load module information into the supplied
+# namespace, this is called from vtk_module_config. It should be noted that
+# _${ns}_${mod}_USED must be cleared if this macro is to work correctly on
+# subsequent invocations. The macro will load the module files using the
+# vtk_module_load, making all of its variables available in the local scope.
 macro(_vtk_module_config_recurse ns mod)
-  if(NOT _${ns}_${dep}_USED)
+  if(NOT _${ns}_${mod}_USED)
     set(_${ns}_${mod}_USED 1)
+    list(APPEND _${ns}_USED_MODULES ${mod})
     vtk_module_load("${mod}")
     list(APPEND ${ns}_LIBRARIES ${${mod}_LIBRARIES})
     list(APPEND ${ns}_INCLUDE_DIRS ${${mod}_INCLUDE_DIRS})
     list(APPEND ${ns}_LIBRARY_DIRS ${${mod}_LIBRARY_DIRS})
+    list(APPEND ${ns}_RUNTIME_LIBRARY_DIRS ${${mod}_RUNTIME_LIBRARY_DIRS})
     foreach(iface IN LISTS ${mod}_IMPLEMENTS)
       list(APPEND _${ns}_AUTOINIT_${iface} ${mod})
       list(APPEND _${ns}_AUTOINIT ${iface})
@@ -25,16 +32,18 @@ endmacro()
 
 # vtk_module_load(<module>)
 #
-# Loads variables describing the given module:
+# Loads variables describing the given module, these include custom variables
+# set by the module along with the standard ones listed below:
 #  <module>_LOADED         = True if the module has been loaded
 #  <module>_DEPENDS        = List of dependencies on other modules
 #  <module>_LIBRARIES      = Libraries to link
 #  <module>_INCLUDE_DIRS   = Header search path
 #  <module>_LIBRARY_DIRS   = Library search path (for outside dependencies)
+#  <module>_RUNTIME_LIBRARY_DIRS = Runtime linker search path
 macro(vtk_module_load mod)
   if(NOT ${mod}_LOADED)
     include("${VTK_MODULES_DIR}/${mod}.cmake" OPTIONAL RESULT_VARIABLE _found)
-    if (NOT _found)
+    if(NOT _found)
       # When building applications outside VTK, they can provide extra module
       # config files by simply adding the corresponding locations to the
       # CMAKE_MODULE_PATH
@@ -59,9 +68,10 @@ endmacro()
 
 # vtk_module_headers_load(<module>)
 #
-# Loads variables describing the given module:
-#  <module>_HEADERS_LOADED       = True if the module header info has been loaded
-#  <module>_HEADERS              = List of headers
+# Loads variables describing the headers/API of the given module, this is not
+# loaded by vtk_module_config, and is mainly useful for wrapping generation:
+#  <module>_HEADERS_LOADED      = True if the module header info has been loaded
+#  <module>_HEADERS             = List of headers
 #  <module>_HEADER_<header>_EXISTS
 #  <module>_HEADER_<header>_ABSTRACT
 #  <module>_HEADER_<header>_WRAP_EXCLUDE
@@ -70,7 +80,7 @@ macro(vtk_module_headers_load mod)
   if(NOT ${mod}_HEADERS_LOADED)
     include("${VTK_MODULES_DIR}/${mod}-Headers.cmake"
       OPTIONAL RESULT_VARIABLE _found)
-    if (NOT _found)
+    if(NOT _found)
       # When building applications outside VTK, they can provide extra module
       # config files by simply adding the corresponding locations to the
       # CMAKE_MODULE_PATH
@@ -89,6 +99,13 @@ endmacro()
 #  <namespace>_LIBRARIES    = Libraries to link
 #  <namespace>_INCLUDE_DIRS = Header search path
 #  <namespace>_LIBRARY_DIRS = Library search path (for outside dependencies)
+#  <namespace>_RUNTIME_LIBRARY_DIRS = Runtime linker search path
+#
+# Calling this macro also recursively calls vtk_module_load for all modules
+# explicitly named, and their dependencies, making them available in the local
+# scope. This means that module level information can be accessed once this
+# macro has been called.
+#
 # Do not name a module as the namespace.
 macro(vtk_module_config ns)
   set(_${ns}_MISSING ${ARGN})
@@ -107,12 +124,20 @@ macro(vtk_module_config ns)
   set(${ns}_LIBRARIES "")
   set(${ns}_INCLUDE_DIRS "")
   set(${ns}_LIBRARY_DIRS "")
+  set(${ns}_RUNTIME_LIBRARY_DIRS "")
   set(_${ns}_AUTOINIT "")
+
+  set(_${ns}_USED_MODULES "")
   foreach(mod ${ARGN})
     _vtk_module_config_recurse("${ns}" "${mod}")
   endforeach()
+  foreach(mod ${_${ns}_USED_MODULES})
+    unset(_${ns}_${mod}_USED)
+  endforeach()
+  unset(_${ns}_USED_MODULES)
+
   foreach(v ${ns}_LIBRARIES ${ns}_INCLUDE_DIRS ${ns}_LIBRARY_DIRS
-           _${ns}_AUTOINIT)
+            ${ns}_RUNTIME_LIBRARY_DIRS _${ns}_AUTOINIT)
     if(${v})
       list(REMOVE_DUPLICATES ${v})
     endif()
@@ -144,7 +169,9 @@ macro(vtk_module_config ns)
   unset(_${ns}_AUTOINIT)
 endmacro()
 
-# Call to add a single directory to the module search path
+# vtk_add_to_module_search_path(<source> <build>)
+#
+# Call to add a single module to the module search path.
 macro(vtk_add_to_module_search_path src bld)
   list(APPEND vtk_module_search_path "${src},${bld}")
 endmacro()
diff --git a/CMake/vtkModuleInfo.cmake.in b/CMake/vtkModuleInfo.cmake.in
index faaaec0..e59fc3a 100644
--- a/CMake/vtkModuleInfo.cmake.in
+++ b/CMake/vtkModuleInfo.cmake.in
@@ -3,5 +3,6 @@ set(@vtk-module at _DEPENDS "@vtk-module-DEPENDS@")
 set(@vtk-module at _LIBRARIES "@vtk-module-LIBRARIES@")
 set(@vtk-module at _INCLUDE_DIRS "@vtk-module-INCLUDE_DIRS@")
 set(@vtk-module at _LIBRARY_DIRS "@vtk-module-LIBRARY_DIRS@")
+set(@vtk-module at _RUNTIME_LIBRARY_DIRS "@vtk-module-RUNTIME_LIBRARY_DIRS@")
 set(@vtk-module at _WRAP_HIERARCHY_FILE "@vtk-module-WRAP_HIERARCHY_FILE@")
 @vtk-module-EXPORT_CODE@
diff --git a/CMake/vtkModuleMacros.cmake b/CMake/vtkModuleMacros.cmake
index eada76d..6b1685b 100644
--- a/CMake/vtkModuleMacros.cmake
+++ b/CMake/vtkModuleMacros.cmake
@@ -12,6 +12,30 @@ if(UNIX AND VTK_BUILD_FORWARDING_EXECUTABLES)
   include(vtkForwardingExecutable)
 endif()
 
+# vtk_module(<name>)
+#
+# Main function for declaring a VTK module, usually in a module.cmake file in
+# the module search path. The module name is the only required argument, all
+# others are optional named arguments that will be outlined below. The following
+# named options take one (or more) arguments, such as the names of dependent
+# modules:
+#  DEPENDS = Modules that will be publicly linked to this module
+#  PRIVATE_DEPENDS = Modules that will be privately linked to this module
+#  COMPILE_DEPENDS = Modules that are needed at compile time by this module
+#  TEST_DEPENDS = Modules that are needed by this modules testing executables
+#  DESCRIPTION = Free text description of the module
+#  TCL_NAME = Alternative name for the TCL wrapping (cannot contain numbers)
+#  IMPLEMENTS = Modules that this module implements, using the auto init feature
+#  GROUPS = Module groups this module should be included in
+#  TEST_LABELS = Add labels to the tests for the module
+#
+# The following options take no arguments:
+#  EXCLUDE_FROM_ALL = Exclude this module from the build all modules flag
+#  EXCLUDE_FROM_WRAPPING = Do not attempt to wrap this module in any language
+#  EXCLUDE_FROM_WRAP_HIERARCHY = Do not attempt to process with wrap hierarchy
+#
+# This macro will ensure the module name is compliant, and set the appropriate
+# module variables as declared in the module.cmake file.
 macro(vtk_module _name)
   vtk_module_check_name(${_name})
   set(vtk-module ${_name})
@@ -21,6 +45,7 @@ macro(vtk_module _name)
   set(${vtk-module-test}_DECLARED 1)
   set(${vtk-module}_DEPENDS "")
   set(${vtk-module}_COMPILE_DEPENDS "")
+  set(${vtk-module}_PRIVATE_DEPENDS "")
   set(${vtk-module-test}_DEPENDS "${vtk-module}")
   set(${vtk-module}_IMPLEMENTS "")
   set(${vtk-module}_DESCRIPTION "description")
@@ -30,7 +55,7 @@ macro(vtk_module _name)
   set(${vtk-module}_EXCLUDE_FROM_WRAP_HIERARCHY 0)
   set(${vtk-module}_TEST_LABELS "")
   foreach(arg ${ARGN})
-  if("${arg}" MATCHES "^((|COMPILE_|TEST_|)DEPENDS|DESCRIPTION|TCL_NAME|IMPLEMENTS|DEFAULT|GROUPS|TEST_LABELS)$")
+    if("${arg}" MATCHES "^((|COMPILE_|PRIVATE_|TEST_|)DEPENDS|DESCRIPTION|TCL_NAME|IMPLEMENTS|DEFAULT|GROUPS|TEST_LABELS)$")
       set(_doing "${arg}")
     elseif("${arg}" MATCHES "^EXCLUDE_FROM_ALL$")
       set(_doing "")
@@ -53,6 +78,8 @@ macro(vtk_module _name)
       list(APPEND ${vtk-module-test}_DEPENDS "${arg}")
     elseif("${_doing}" MATCHES "^COMPILE_DEPENDS$")
       list(APPEND ${vtk-module}_COMPILE_DEPENDS "${arg}")
+    elseif("${_doing}" MATCHES "^PRIVATE_DEPENDS$")
+      list(APPEND ${vtk-module}_PRIVATE_DEPENDS "${arg}")
     elseif("${_doing}" MATCHES "^DESCRIPTION$")
       set(_doing "")
       set(${vtk-module}_DESCRIPTION "${arg}")
@@ -77,7 +104,9 @@ macro(vtk_module _name)
   endforeach()
   list(SORT ${vtk-module}_DEPENDS) # Deterministic order.
   set(${vtk-module}_LINK_DEPENDS "${${vtk-module}_DEPENDS}")
-  list(APPEND ${vtk-module}_DEPENDS ${${vtk-module}_COMPILE_DEPENDS})
+  list(APPEND ${vtk-module}_DEPENDS
+    ${${vtk-module}_COMPILE_DEPENDS}
+    ${${vtk-module}_PRIVATE_DEPENDS})
   unset(${vtk-module}_COMPILE_DEPENDS)
   list(SORT ${vtk-module}_DEPENDS) # Deterministic order.
   list(SORT ${vtk-module-test}_DEPENDS) # Deterministic order.
@@ -88,12 +117,17 @@ macro(vtk_module _name)
   endif()
 endmacro()
 
+# vtk_module_check_name(<name>)
+#
+# Check if the proposed module name is compliant.
 macro(vtk_module_check_name _name)
-  if( NOT "${_name}" MATCHES "^[a-zA-Z][a-zA-Z0-9]*$")
+  if(NOT "${_name}" MATCHES "^[a-zA-Z][a-zA-Z0-9]*$")
     message(FATAL_ERROR "Invalid module name: ${_name}")
   endif()
 endmacro()
 
+# vtk_module_impl()
+#
 # This macro provides module implementation, setting up important variables
 # necessary to build a module. It assumes we are in the directory of the module.
 macro(vtk_module_impl)
@@ -110,7 +144,6 @@ macro(vtk_module_impl)
   endif()
 
   if(NOT DEFINED ${vtk-module}_LIBRARIES)
-    set(${vtk-module}_LIBRARIES "")
     foreach(dep IN LISTS ${vtk-module}_LINK_DEPENDS)
       list(APPEND ${vtk-module}_LIBRARIES "${${dep}_LIBRARIES}")
     endforeach()
@@ -121,8 +154,8 @@ macro(vtk_module_impl)
 
   list(APPEND ${vtk-module}_INCLUDE_DIRS
     ${${vtk-module}_BINARY_DIR}
-    ${${vtk-module}_SOURCE_DIR}
-    )
+    ${${vtk-module}_SOURCE_DIR})
+  list(REMOVE_DUPLICATES ${vtk-module}_INCLUDE_DIRS)
 
   if(${vtk-module}_INCLUDE_DIRS)
     include_directories(${${vtk-module}_INCLUDE_DIRS})
@@ -140,7 +173,11 @@ macro(vtk_module_impl)
   endif()
 endmacro()
 
-# Export just the essential data from a module such as name, include directory.
+# vtk_module_export_info()
+#
+# Export just the essential data from a module such as name, include directory,
+# libraries provided by the module, and any custom variables that are part of
+# the module configuration.
 macro(vtk_module_export_info)
   vtk_module_impl()
   # First gather and configure the high level module information.
@@ -171,13 +208,22 @@ macro(vtk_module_export_info)
     list(APPEND vtk-module-INCLUDE_DIRS-build "${${vtk-module}_SYSTEM_INCLUDE_DIRS}")
     list(APPEND vtk-module-INCLUDE_DIRS-install "${${vtk-module}_SYSTEM_INCLUDE_DIRS}")
   endif()
+  if(WIN32)
+    set(vtk-module-RUNTIME_LIBRARY_DIRS-build "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}")
+    set(vtk-module-RUNTIME_LIBRARY_DIRS-install "\${VTK_INSTALL_PREFIX}/${VTK_INSTALL_RUNTIME_DIR}")
+  else()
+    set(vtk-module-RUNTIME_LIBRARY_DIRS-build "${CMAKE_LIBRARY_OUTPUT_DIRECTORY}")
+    set(vtk-module-RUNTIME_LIBRARY_DIRS-install "\${VTK_INSTALL_PREFIX}/${VTK_INSTALL_LIBRARY_DIR}")
+  endif()
   set(vtk-module-LIBRARY_DIRS "${${vtk-module}_SYSTEM_LIBRARY_DIRS}")
+  set(vtk-module-RUNTIME_LIBRARY_DIRS "${vtk-module-RUNTIME_LIBRARY_DIRS-build}")
   set(vtk-module-INCLUDE_DIRS "${vtk-module-INCLUDE_DIRS-build}")
   set(vtk-module-EXPORT_CODE "${vtk-module-EXPORT_CODE-build}")
   set(vtk-module-WRAP_HIERARCHY_FILE "${${vtk-module}_WRAP_HIERARCHY_FILE}")
   configure_file(${_VTKModuleMacros_DIR}/vtkModuleInfo.cmake.in
     ${VTK_MODULES_DIR}/${vtk-module}.cmake @ONLY)
   set(vtk-module-INCLUDE_DIRS "${vtk-module-INCLUDE_DIRS-install}")
+  set(vtk-module-RUNTIME_LIBRARY_DIRS "${vtk-module-RUNTIME_LIBRARY_DIRS-install}")
   set(vtk-module-EXPORT_CODE "${vtk-module-EXPORT_CODE-install}")
   set(vtk-module-WRAP_HIERARCHY_FILE
     "\${CMAKE_CURRENT_LIST_DIR}/${vtk-module}Hierarchy.txt")
@@ -203,8 +249,12 @@ macro(vtk_module_export_info)
   endif()
 endmacro()
 
+# vtk_module_export(<sources>)
+#
 # Export data from a module such as name, include directory and header level
-# information useful for wrapping.
+# information useful for wrapping. This calls vtk_module_export_info() and then
+# exports additional information in a supplemental file useful for wrapping
+# generators.
 function(vtk_module_export sources)
   vtk_module_export_info()
   # Now iterate through the headers in the module to get header level information.
@@ -283,12 +333,14 @@ macro(vtk_target_label _target_name)
   set_property(TARGET ${_target_name} PROPERTY LABELS ${_label})
 endmacro()
 
+# vtk_target_name(<name>)
+#
 # This macro does some basic checking for library naming, and also adds a suffix
 # to the output name with the VTK version by default. Setting the variable
 # VTK_CUSTOM_LIBRARY_SUFFIX will override the suffix.
 macro(vtk_target_name _name)
   get_property(_type TARGET ${_name} PROPERTY TYPE)
-  if(NOT "${_type}" STREQUAL EXECUTABLE)
+  if(NOT "${_type}" STREQUAL EXECUTABLE AND NOT VTK_JAVA_INSTALL)
     set_property(TARGET ${_name} PROPERTY VERSION 1)
     set_property(TARGET ${_name} PROPERTY SOVERSION 1)
   endif()
@@ -314,6 +366,9 @@ endmacro()
 
 macro(vtk_target_install _name)
   if(NOT VTK_INSTALL_NO_LIBRARIES)
+    if(APPLE AND VTK_JAVA_INSTALL)
+       set_target_properties(${_name} PROPERTIES SUFFIX ".jnilib")
+    endif(APPLE AND VTK_JAVA_INSTALL)
     install(TARGETS ${_name}
       EXPORT ${VTK_INSTALL_EXPORT_NAME}
       RUNTIME DESTINATION ${VTK_INSTALL_RUNTIME_DIR} COMPONENT RuntimeLibraries
@@ -408,7 +463,7 @@ macro(vtk_module_test_executable test_exe_name)
   vtk_module_test()
   # No forwarding or export for test executables.
   add_executable(${test_exe_name} MACOSX_BUNDLE ${ARGN})
-  target_link_libraries(${test_exe_name} ${${vtk-module-test}-Cxx_LIBRARIES})
+  target_link_libraries(${test_exe_name} LINK_PRIVATE ${${vtk-module-test}-Cxx_LIBRARIES})
 
   if(${vtk-module-test}-Cxx_DEFINITIONS)
     set_property(TARGET ${test_exe_name} APPEND PROPERTY COMPILE_DEFINITIONS
@@ -476,7 +531,21 @@ function(vtk_module_library name)
 
   vtk_add_library(${vtk-module} ${ARGN} ${_hdrs} ${_instantiator_SRCS} ${_hierarchy})
   foreach(dep IN LISTS ${vtk-module}_LINK_DEPENDS)
-    target_link_libraries(${vtk-module} ${${dep}_LIBRARIES})
+    target_link_libraries(${vtk-module} LINK_PUBLIC ${${dep}_LIBRARIES})
+    if(_help_vs7 AND ${dep}_LIBRARIES)
+      add_dependencies(${vtk-module} ${${dep}_LIBRARIES})
+    endif()
+  endforeach()
+
+  # Handle the private dependencies, setting up link/include directories.
+  foreach(dep IN LISTS ${vtk-module}_PRIVATE_DEPENDS)
+    if(${dep}_INCLUDE_DIRS)
+      include_directories(${${dep}_INCLUDE_DIRS})
+    endif()
+    if(${dep}_LIBRARY_DIRS)
+      link_directories(${${dep}_LIBRARY_DIRS})
+    endif()
+    target_link_libraries(${vtk-module} LINK_PRIVATE ${${dep}_LIBRARIES})
     if(_help_vs7 AND ${dep}_LIBRARIES)
       add_dependencies(${vtk-module} ${${dep}_LIBRARIES})
     endif()
@@ -646,6 +715,19 @@ macro(vtk_module_third_party _pkg)
     else()
       set(vtk${_lower}_LIBRARIES "${${_upper}_LIBRARIES}")
     endif()
+
+    #a workaround for bad FindHDF5 behavior in which deb or opt can
+    #end up empty. cmake >= 2.8.12.2 makes this uneccessary
+    string(REGEX MATCH "debug;.*optimized;.*"
+           _remove_deb_opt "${vtk${_lower}_LIBRARIES}")
+    if (_remove_deb_opt)
+      set(_tmp ${vtk${_lower}_LIBRARIES})
+      list(REMOVE_ITEM _tmp "debug")
+      list(REMOVE_ITEM _tmp "optimized")
+      list(REMOVE_DUPLICATES _tmp)
+      set(vtk${_lower}_LIBRARIES ${_tmp})
+    endif()
+
     set(vtk${_lower}_INCLUDE_DIRS "")
   else()
     if(_nolibs)
diff --git a/CMake/vtkModuleTop.cmake b/CMake/vtkModuleTop.cmake
index b90f53d..6753f47 100644
--- a/CMake/vtkModuleTop.cmake
+++ b/CMake/vtkModuleTop.cmake
@@ -334,6 +334,7 @@ if (NOT VTK_INSTALL_NO_DEVELOPMENT)
                 CMake/vtk-forward.c.in
                 CMake/vtkForwardingExecutable.cmake
                 CMake/vtkJavaWrapping.cmake
+                CMake/vtkMakeInstantiator.cmake
                 CMake/vtkModuleAPI.cmake
                 CMake/vtkModuleHeaders.cmake.in
                 CMake/vtkModuleInfo.cmake.in
diff --git a/CMake/vtkPythonPackages.cmake b/CMake/vtkPythonPackages.cmake
new file mode 100644
index 0000000..99330d8
--- /dev/null
+++ b/CMake/vtkPythonPackages.cmake
@@ -0,0 +1,128 @@
+#------------------------------------------------------------------------------
+# Function used to copy a Python package into the binary directory and compile
+# it.
+# package     :- The name of the Python package.
+# source_dir  :- The directory containing the Python source.
+# binary_dir  :- The directory to copy files to and compile into.
+#------------------------------------------------------------------------------
+function(build_python_package package source_dir binary_dir)
+
+  set (copy-complete "${binary_dir}/${package}-copy-complete")
+
+  copy_files_recursive("${source_dir}"
+    DESTINATION "${binary_dir}"
+    REGEX "^(.*\\.py)$"
+    OUTPUT ${copy-complete}
+    LABEL "Copying ${package} files")
+
+  set(CMAKE_CONFIGURABLE_FILE_CONTENT
+    "from compileall import compile_dir
+compile_dir('${binary_dir}')
+file = open('${binary_dir}/${package}_complete', 'w')
+file.write('Done')
+")
+  configure_file(${CMAKE_ROOT}/Modules/CMakeConfigurableFile.in
+    "${CMAKE_CURRENT_BINARY_DIR}/compile_py" @ONLY)
+  unset(CMAKE_CONFIGURABLE_FILE_CONTENT)
+
+  add_custom_command(
+    COMMAND ${PYTHON_EXECUTABLE} ARGS ${CMAKE_CURRENT_BINARY_DIR}/compile_py
+    COMMAND ${PYTHON_EXECUTABLE} ARGS -O ${CMAKE_CURRENT_BINARY_DIR}/compile_py
+    DEPENDS ${CMAKE_CURRENT_BINARY_DIR}/compile_py
+            ${copy-complete}
+    OUTPUT  "${binary_dir}/${package}_complete"
+    COMMENT "Compiling Python files")
+
+  add_custom_target(${package} ALL DEPENDS "${binary_dir}/${package}_complete")
+endfunction()
+
+#------------------------------------------------------------------------------
+# Function used to copy arbitrary files matching certain patterns.
+# Usage:
+# copy_files_recursive(<source-dir>
+#   DESTINATION <destination-dir>
+#   [LABEL "<label to use>"]
+#   [OUTPUT "<file generated to mark end of copying>"]
+#   [REGEX <regex> [EXCLUDE]]
+#   )
+# One can specify multiple REGEX or REGEX <regex> EXCLUDE arguments.
+#------------------------------------------------------------------------------
+function(copy_files_recursive source-dir)
+  set (dest-dir)
+  set (patterns)
+  set (exclude-patterns)
+  set (output-file)
+  set (label "Copying files")
+
+  set (doing "")
+  foreach (arg ${ARGN})
+    if (arg MATCHES "^(DESTINATION|REGEX|OUTPUT|LABEL)$")
+      set (doing "${arg}")
+    elseif ("${doing}" STREQUAL "DESTINATION")
+      set (doing "")
+      set (dest-dir "${arg}")
+    elseif ("${doing}" STREQUAL "REGEX")
+      set (doing "SET")
+      list (APPEND patterns "${arg}")
+    elseif (("${arg}" STREQUAL "EXCLUDE") AND ("${doing}" STREQUAL "SET"))
+      set (doing "")
+      list (GET patterns -1 cur-pattern)
+      list (REMOVE_AT patterns -1)
+      list (APPEND exclude-patterns "${cur-pattern}")
+    elseif ("${doing}" STREQUAL "OUTPUT")
+      set (doing "")
+      set (output-file "${arg}")
+    elseif ("${doing}" STREQUAL "LABEL")
+      set (doing "")
+      set (label "${arg}")
+    else()
+      message(AUTHOR_WARNING "Unknown argument [${arg}]")
+    endif()
+  endforeach()
+
+  set (match-regex)
+  foreach (_item ${patterns})
+    if (match-regex)
+      set (match-regex "${match-regex}")
+    endif()
+    set (match-regex "${match-regex}${_item}")
+  endforeach()
+
+  set (exclude-regex)
+  foreach (_item ${exclude-patterns})
+    if (exclude-regex)
+      set (exclude-regex "${exclude-regex}|")
+    endif()
+    set (exclude-regex "${exclude-regex}${_item}")
+  endforeach()
+
+  file(GLOB_RECURSE _all_files RELATIVE "${source-dir}" "${source-dir}/*")
+
+  set (all_files)
+  set (copy-commands)
+  foreach (_file ${_all_files})
+    if (exclude-regex AND ("${_file}" MATCHES "${exclude-regex}"))
+      # skip
+    elseif ("${_file}" MATCHES "${match-regex}")
+      set (in-file "${source-dir}/${_file}")
+      set (out-file "${dest-dir}/${_file}")
+      get_filename_component(out-path ${out-file} PATH)
+      list (APPEND all_files ${in-file})
+      set (copy-commands "${copy-commands}
+        file(COPY \"${in-file}\" DESTINATION \"${out-path}\")")
+    endif()
+  endforeach()
+
+  get_filename_component(_name ${output-file} NAME)
+  set(CMAKE_CONFIGURABLE_FILE_CONTENT ${copy-commands})
+  configure_file(${CMAKE_ROOT}/Modules/CMakeConfigurableFile.in
+    "${CMAKE_CURRENT_BINARY_DIR}/${_name}.cfr.cmake" @ONLY)
+  unset(CMAKE_CONFIGURABLE_FILE_CONTENT)
+
+  add_custom_command(OUTPUT ${output-file}
+    COMMAND ${CMAKE_COMMAND} -P "${CMAKE_CURRENT_BINARY_DIR}/${_name}.cfr.cmake"
+    COMMAND ${CMAKE_COMMAND} -E touch ${output-file}
+    DEPENDS ${all_files}
+            "${CMAKE_CURRENT_BINARY_DIR}/${_name}.cfr.cmake"
+    COMMENT ${label})
+endfunction()
diff --git a/CMake/vtkPythonWrapping.cmake b/CMake/vtkPythonWrapping.cmake
index b694a8c..a5c5841 100644
--- a/CMake/vtkPythonWrapping.cmake
+++ b/CMake/vtkPythonWrapping.cmake
@@ -55,7 +55,7 @@ function(vtk_add_python_wrapping module_name)
     set_property(TARGET ${module_name}PythonD PROPERTY COMPILE_DEFINITIONS
       "${module_name}_AUTOINIT=1(${module_name})")
   endif()
-  target_link_libraries(${module_name}PythonD ${module_name}
+  target_link_libraries(${module_name}PythonD LINK_PUBLIC ${module_name}
     vtkWrappingPythonCore ${extra_links} ${VTK_PYTHON_LIBRARIES})
 
   _vtk_add_python_module(${module_name}Python ${module_name}PythonInit.cxx)
@@ -78,11 +78,6 @@ function(_vtk_add_python_module name)
   if (BUILD_SHARED_LIBS)
     add_library(${name} MODULE ${ARGN})
     set_property(TARGET ${name} PROPERTY PREFIX "${PYTHON_MODULE_PREFIX}")
-    if (VTK_INSTALL_PYTHON_USING_CMAKE)
-      # if setup.py is not being used to install python modules, we need to
-      # add install rules for them.
-      vtk_target_install(${name})
-    endif()
     if (WIN32 AND NOT CYGWIN)
       # when building shared on Windows, the python module files need to be
       # named as *.pyd
diff --git a/CMake/vtkQt.cmake b/CMake/vtkQt.cmake
new file mode 100644
index 0000000..6e24b5d
--- /dev/null
+++ b/CMake/vtkQt.cmake
@@ -0,0 +1,8 @@
+set(VTK_QT_VERSION "4" CACHE STRING "Expected Qt version")
+mark_as_advanced(VTK_QT_VERSION)
+
+set_property(CACHE VTK_QT_VERSION PROPERTY STRINGS 4 5)
+
+if(NOT (VTK_QT_VERSION VERSION_EQUAL "4" OR VTK_QT_VERSION VERSION_EQUAL "5"))
+  message(FATAL_ERROR "Expected value for VTK_QT_VERSION is either '4' or '5'")
+endif()
diff --git a/CMake/vtkRequireLargeFilesSupport.cxx b/CMake/vtkRequireLargeFilesSupport.cxx
index 9f60e6b..3d7c863 100644
--- a/CMake/vtkRequireLargeFilesSupport.cxx
+++ b/CMake/vtkRequireLargeFilesSupport.cxx
@@ -3,7 +3,7 @@
 #define _FILE_OFFSET_BITS 64
 #include <sys/types.h>
 #include <sys/stat.h>
-#include <assert.h>
+#include <cassert>
 #include <stdio.h>
 
 int main( int, char **argv )
diff --git a/CMake/vtkTclWrapping.cmake b/CMake/vtkTclWrapping.cmake
index 4316446..ec754e4 100644
--- a/CMake/vtkTclWrapping.cmake
+++ b/CMake/vtkTclWrapping.cmake
@@ -63,6 +63,6 @@ function(vtk_add_tcl_wrapping module_name module_srcs module_hdrs)
     set_property(TARGET ${tcl_module}TCL PROPERTY COMPILE_DEFINITIONS
       "${module_name}_AUTOINIT=1(${module_name})")
   endif()
-  target_link_libraries(${tcl_module}TCL ${module_name}
+  target_link_libraries(${tcl_module}TCL LINK_PUBLIC ${module_name}
     ${extra_links} ${VTK_TCL_LIBRARIES})
 endfunction()
diff --git a/CMake/vtkTestFFMPEG.cmake b/CMake/vtkTestFFMPEG.cmake
index e386c8a..7cba9f3 100644
--- a/CMake/vtkTestFFMPEG.cmake
+++ b/CMake/vtkTestFFMPEG.cmake
@@ -1,107 +1,63 @@
-IF (FFMPEG_INCLUDE_DIR)
-  IF("VTK_FFMPEG_HAS_OLD_HEADER" MATCHES "^VTK_FFMPEG_HAS_OLD_HEADER$" OR NOT "VTK_FFMPEG_CACHED_INCLUDE" MATCHES "^${FFMPEG_INCLUDE_DIR}$")
-    IF (EXISTS ${FFMPEG_INCLUDE_DIR}/ffmpeg)
-      SET(VTK_FFMPEG_HAS_OLD_HEADER "TRUE" CACHE INTERNAL "Is the FFMPEG include in the old location" FORCE)
-    ELSE (EXISTS ${FFMPEG_INCLUDE_DIR}/ffmpeg)
-      SET(VTK_FFMPEG_HAS_OLD_HEADER "FALSE" CACHE INTERNAL "Is the FFMPEG include in the old location" FORCE)
-    ENDIF (EXISTS ${FFMPEG_INCLUDE_DIR}/ffmpeg)
-    IF (VTK_FFMPEG_HAS_OLD_HEADER)
-      MESSAGE(STATUS "Checking if FFMPEG uses old style header files - yes")
-    ELSE (VTK_FFMPEG_HAS_OLD_HEADER)
-      MESSAGE(STATUS "Checking if FFMPEG uses old style header files - no")
-    ENDIF (VTK_FFMPEG_HAS_OLD_HEADER)
-    SET(VTK_FFMPEG_CACHED_INCLUDE ${FFMPEG_INCLUDE_DIR} CACHE INTERNAL "Previous value of FFMPEG_INCLUDE_DIR" FORCE)
-  ENDIF("VTK_FFMPEG_HAS_OLD_HEADER" MATCHES "^VTK_FFMPEG_HAS_OLD_HEADER$" OR NOT "VTK_FFMPEG_CACHED_INCLUDE" MATCHES "^${FFMPEG_INCLUDE_DIR}$")
-
-  IF("VTK_FFMPEG_HAS_IMG_CONVERT" MATCHES "^VTK_FFMPEG_HAS_IMG_CONVERT$" OR NOT "VTK_FFMPEG_CACHED_AVCODEC" MATCHES "^${FFMPEG_avcodec_LIBRARY}$")
-    IF(VTK_FFMPEG_HAS_OLD_HEADER)
-      SET(VTK_FFMPEG_CDEFS "HAS_OLD_HEADER")
-    ELSE(VTK_FFMPEG_HAS_OLD_HEADER)
-      SET(VTK_FFMPEG_CDEFS "HAS_NEW_HEADER")
-    ENDIF(VTK_FFMPEG_HAS_OLD_HEADER)
-
-    IF(FFMPEG_avcodec_LIBRARY)
-      TRY_COMPILE(VTK_FFMPEG_HAS_IMG_CONVERT
-        ${VTK_BINARY_DIR}/CMakeTmp
-        ${VTK_CMAKE_DIR}/vtkFFMPEGTestImgConvert.cxx
-        CMAKE_FLAGS "-DINCLUDE_DIRECTORIES:STRING=${FFMPEG_INCLUDE_DIR}"
-          "-DLINK_LIBRARIES:STRING=${FFMPEG_avcodec_LIBRARY};${FFMPEG_avutil_LIBRARY}"
-          -DCOMPILE_DEFINITIONS:STRING=-D${VTK_FFMPEG_CDEFS}
-        OUTPUT_VARIABLE OUTPUT)
-      IF(VTK_FFMPEG_HAS_IMG_CONVERT)
-        MESSAGE(STATUS "Checking if FFMPEG has img_convert - found")
-        FILE(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeOutput.log
-                "Checking if FFMPEG has img_convert (passed):\n"
-                "${OUTPUT}\n\n")
-      ELSE(VTK_FFMPEG_HAS_IMG_CONVERT)
-        MESSAGE(STATUS "Checking if FFMPEG has img_convert - not found")
-        FILE(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
-                "Checking if FFMPEG has img_convert (failed):\n"
-                "${OUTPUT}\n\n")
-      ENDIF(VTK_FFMPEG_HAS_IMG_CONVERT)
-    ENDIF(FFMPEG_avcodec_LIBRARY)
-    SET(VTK_FFMPEG_CACHED_AVCODEC ${FFMPEG_avcodec_LIBRARY} CACHE INTERNAL "Previous value of FFMPEG_avcodec_LIBRARY" FORCE)
-  ENDIF("VTK_FFMPEG_HAS_IMG_CONVERT" MATCHES "^VTK_FFMPEG_HAS_IMG_CONVERT$" OR NOT "VTK_FFMPEG_CACHED_AVCODEC" MATCHES "^${FFMPEG_avcodec_LIBRARY}$")
-
-  IF("VTK_FFMPEG_OLD_URL_FCLOSE" MATCHES "^VTK_FFMPEG_OLD_URL_FCLOSE$" OR NOT "VTK_FFMPEG_CACHED_AVFORMAT" MATCHES "^${FFMPEG_avformat_LIBRARY}$")
-    IF(VTK_FFMPEG_HAS_OLD_HEADER)
-      SET(VTK_FFMPEG_CDEFS "HAS_OLD_HEADER")
-    ELSE(VTK_FFMPEG_HAS_OLD_HEADER)
-      SET(VTK_FFMPEG_CDEFS "HAS_NEW_HEADER")
-    ENDIF(VTK_FFMPEG_HAS_OLD_HEADER)
-
-    IF(FFMPEG_avformat_LIBRARY)
-      TRY_COMPILE(VTK_FFMPEG_OLD_URL_FCLOSE
-        ${VTK_BINARY_DIR}/CMakeTmp
-        ${VTK_CMAKE_DIR}/vtkFFMPEGTestURLFClose.cxx
-        CMAKE_FLAGS "-DINCLUDE_DIRECTORIES:STRING=${FFMPEG_INCLUDE_DIR}"
-          -DCOMPILE_DEFINITIONS:STRING=-D${VTK_FFMPEG_CDEFS}
-          "-DLINK_LIBRARIES:STRING=${FFMPEG_avformat_LIBRARY};${FFMPEG_avutil_LIBRARY};${FFMPEG_avcodec_LIBRARY}"
-        OUTPUT_VARIABLE OUTPUT)
-
-        IF(VTK_FFMPEG_OLD_URL_FCLOSE)
-          MESSAGE(STATUS "Checking if FFMPEG uses old API for url_fclose - found")
-          FILE(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeOutput.log
-                  "Checking if FFMPEG uses old API for url_fclose (passed):\n"
-                  "${OUTPUT}\n\n")
-        ELSE(VTK_FFMPEG_OLD_URL_FCLOSE)
-          MESSAGE(STATUS "Checking if FFMPEG uses old API for url_fclose - not found")
-          FILE(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
-                  "Checking if FFMPEG uses old API for url_fclose (failed):\n"
-                  "${OUTPUT}\n\n")
-        ENDIF(VTK_FFMPEG_OLD_URL_FCLOSE)
-    ENDIF(FFMPEG_avformat_LIBRARY)
-  ENDIF("VTK_FFMPEG_OLD_URL_FCLOSE" MATCHES "^VTK_FFMPEG_OLD_URL_FCLOSE$" OR NOT "VTK_FFMPEG_CACHED_AVFORMAT" MATCHES "^${FFMPEG_avformat_LIBRARY}$")
-
-  IF("VTK_FFMPEG_NEW_ALLOC" MATCHES "^VTK_FFMPEG_NEW_ALLOC$" OR NOT "VTK_FFMPEG_CACHED_AVFORMAT" MATCHES "^${FFMPEG_avformat_LIBRARY}$")
-    IF(VTK_FFMPEG_HAS_OLD_HEADER)
-      SET(VTK_FFMPEG_CDEFS "HAS_OLD_HEADER")
-    ELSE(VTK_FFMPEG_HAS_OLD_HEADER)
-      SET(VTK_FFMPEG_CDEFS "HAS_NEW_HEADER")
-    ENDIF(VTK_FFMPEG_HAS_OLD_HEADER)
-
-    IF(FFMPEG_avformat_LIBRARY)
-      TRY_COMPILE(VTK_FFMPEG_NEW_ALLOC
-        ${VTK_BINARY_DIR}/CMakeTmp
-        ${VTK_CMAKE_DIR}/vtkFFMPEGTestAvAlloc.cxx
-        CMAKE_FLAGS "-DINCLUDE_DIRECTORIES:STRING=${FFMPEG_INCLUDE_DIR}"
-          -DCOMPILE_DEFINITIONS:STRING=-D${VTK_FFMPEG_CDEFS}
-          -DCOMPILE_DEFINITIONS:STRING=-D__STDC_CONSTANT_MACROS
-         "-DLINK_LIBRARIES:STRING=${FFMPEG_avformat_LIBRARY}"
-        OUTPUT_VARIABLE OUTPUT)
-
-      IF(VTK_FFMPEG_NEW_ALLOC)
-        MESSAGE(STATUS "Checking if FFMPEG has avformat_alloc_context - found")
-        FILE(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeOutput.log
-                "Checking if FFMPEG has avformat_alloc_context (passed):\n"
-                "${OUTPUT}\n\n")
-      ELSE(VTK_FFMPEG_NEW_ALLOC)
-        MESSAGE(STATUS "Checking if FFMPEG has avformat_alloc_context - not found")
-        FILE(APPEND ${CMAKE_BINARY_DIR}${CMAKE_FILES_DIRECTORY}/CMakeError.log
-                "Checking if FFMPEG has avformat_alloc_context (failed):\n"
-                "${OUTPUT}\n\n")
-      ENDIF(VTK_FFMPEG_NEW_ALLOC)
-    ENDIF(FFMPEG_avformat_LIBRARY)
-    SET(VTK_FFMPEG_CACHED_AVFORMAT ${FFMPEG_avformat_LIBRARY} CACHE INTERNAL "Previous value of FFMPEG_avformat_LIBRARY" FORCE)
-  ENDIF("VTK_FFMPEG_NEW_ALLOC" MATCHES "^VTK_FFMPEG_NEW_ALLOC$" OR NOT "VTK_FFMPEG_CACHED_AVFORMAT" MATCHES "^${FFMPEG_avformat_LIBRARY}$")
-ENDIF (FFMPEG_INCLUDE_DIR)
+if(FFMPEG_INCLUDE_DIR)
+  if(NOT DEFINED VTK_FFMPEG_HAS_OLD_HEADER)
+    if(EXISTS ${FFMPEG_INCLUDE_DIR}/ffmpeg)
+      set(VTK_FFMPEG_HAS_OLD_HEADER "TRUE" CACHE INTERNAL
+        "Is the FFMPEG include in the old location")
+    else()
+      set(VTK_FFMPEG_HAS_OLD_HEADER "FALSE" CACHE INTERNAL
+        "Is the FFMPEG include in the old location")
+    endif()
+    if(VTK_FFMPEG_HAS_OLD_HEADER)
+      message(STATUS "Checking if FFMPEG uses old style header files - yes")
+    else()
+      message(STATUS "Checking if FFMPEG uses old style header files - no")
+    endif()
+  endif()
+
+  if(VTK_FFMPEG_HAS_OLD_HEADER)
+    set(FFMEG_CODEC_HEADER_PATH "ffmpeg")
+    set(FFMEG_FORMAT_HEADER_PATH "ffmpeg")
+  else()
+    set(FFMEG_CODEC_HEADER_PATH "libavcodec")
+    set(FFMEG_FORMAT_HEADER_PATH "libavformat")
+  endif()
+
+  include(CheckCSourceCompiles)
+  set(CMAKE_REQUIRED_INCLUDES ${FFMPEG_INCLUDE_DIR})
+  set(CMAKE_REQUIRED_LIBRARIES ${FFMPEG_avformat_LIBRARY}
+    ${FFMPEG_avutil_LIBRARY} ${FFMPEG_avcodec_LIBRARY})
+
+  if(NOT DEFINED VTK_FFMPEG_HAS_IMG_CONVERT AND FFMPEG_avcodec_LIBRARY)
+    set(_source "
+#include <${FFMEG_CODEC_HEADER_PATH}/avcodec.h>
+int main()
+{
+  img_convert(0, PIX_FMT_RGB24,
+              0, PIX_FMT_RGB24,
+              0, 0);
+  return 0;
+}\n")
+    check_c_source_compiles("${_source}" VTK_FFMPEG_HAS_IMG_CONVERT)
+  endif()
+
+  if(NOT DEFINED VTK_FFMPEG_NEW_ALLOC AND FFMPEG_avformat_LIBRARY)
+    set(_source "
+#include <${FFMEG_FORMAT_HEADER_PATH}/avformat.h>
+int main()
+{
+  avformat_alloc_context();
+  return 0;
+}\n")
+    check_c_source_compiles("${_source}" VTK_FFMPEG_NEW_ALLOC)
+  endif()
+  if(NOT DEFINED VTK_FFMPEG_AVCODECID AND FFMPEG_avformat_LIBRARY)
+    set(_source "
+#include <${FFMEG_FORMAT_HEADER_PATH}/avformat.h>
+int main()
+{
+  enum AVCodecID codec;
+  return 0;
+}\n")
+    check_c_source_compiles("${_source}" VTK_FFMPEG_AVCODECID)
+  endif()
+endif()
diff --git a/CMake/vtkTestStreamsLibrary.cmake b/CMake/vtkTestStreamsLibrary.cmake
index 3e16846..1e2a9e4 100644
--- a/CMake/vtkTestStreamsLibrary.cmake
+++ b/CMake/vtkTestStreamsLibrary.cmake
@@ -1,7 +1,7 @@
 # Check the severity of EOF bugs in the streams library.
 SET(VTK_TEST_STREAM_EOF_CXX ${VTK_CMAKE_DIR}/vtkTestStreamEOF.cxx.in)
 CONFIGURE_FILE(${VTK_CMAKE_DIR}/vtkTestStreamEOF.cxx.in
-  ${VTK_BINARY_DIR}/CMake/vtkTestStreamEOF.cxx @ONLY IMMEDIATE)
+  ${VTK_BINARY_DIR}/CMake/vtkTestStreamEOF.cxx @ONLY)
 IF("VTK_ANSI_STREAM_EOF_RESULT" MATCHES "^VTK_ANSI_STREAM_EOF_RESULT$")
   MESSAGE(STATUS "Checking ANSI streams end-of-file bug level")
   TRY_RUN(VTK_ANSI_STREAM_EOF_RESULT VTK_ANSI_STREAM_EOF_COMPILED
@@ -18,7 +18,7 @@ SET(VTK_STREAM_EOF_SEVERITY ${VTK_ANSI_STREAM_EOF_RESULT})
 
 IF(VTK_SIZEOF_LONG_LONG)
   CONFIGURE_FILE(${VTK_CMAKE_DIR}/vtkTestStreamLongLong.cxx.in
-    ${VTK_BINARY_DIR}/CMake/vtkTestStreamLongLong.cxx @ONLY IMMEDIATE)
+    ${VTK_BINARY_DIR}/CMake/vtkTestStreamLongLong.cxx @ONLY)
   IF("VTK_OSTREAM_SUPPORTS_LONG_LONG" MATCHES "^VTK_OSTREAM_SUPPORTS_LONG_LONG$")
     MESSAGE(STATUS "Checking if ostream supports long long")
     TRY_COMPILE(VTK_OSTREAM_SUPPORTS_LONG_LONG
diff --git a/CMake/vtkTestingMacros.cmake b/CMake/vtkTestingMacros.cmake
index 57a3bf8..39e165d 100644
--- a/CMake/vtkTestingMacros.cmake
+++ b/CMake/vtkTestingMacros.cmake
@@ -1,258 +1,254 @@
-#-----------------------------------------------------------------------------
-# Private helper macros.
-
-macro(parse_optional_arguments)
-  set (BASELINEDIR ${vtk-module})
-  #set (DATADIR ${VTK_DATA_ROOT}) #don't do this: some tests don't want it
-
-  set(argv ${ARGV})
-  set(MYARGV)
-  set (i 0)
-  while (${i} LESS ${ARGC})
-    math(EXPR iplus1 "${i}+1")
-    list(GET argv ${i} ARG)
-    if (${ARG} STREQUAL "BASELINEDIR")
-      list(GET argv ${iplus1} BASELINEDIR)
-      set (i ${iplus1})
-      math(EXPR iplus1 "${i}+1")
-    elseif(${ARG} STREQUAL "DATADIR")
-      list(GET argv ${iplus1} DATADIR)
-      set (i ${iplus1})
-      math(EXPR iplus1 "${i}+1")
-    elseif(${ARG} STREQUAL "LABELS" AND ${iplus1} LESS ${ARGC})
-      # everything after LABELS gets added as a label
-      set(LABELS)
-      while (${iplus1} LESS ${ARGC})
-        list(GET argv ${iplus1} LABEL)
-        list(APPEND LABELS ${LABEL})
-        set(i ${iplus1})
-        math(EXPR iplus1 "${i}+1")
-      endwhile()
+# -----------------------------------------------------------------------------
+# Usage: vtk_add_test_mpi(name [TESTING_DATA])
+macro (vtk_add_test_mpi name)
+  get_filename_component(TName ${name} NAME_WE)
+  set(argn "${ARGN}")
+  set(data_dir "")
+  set(test_extra "")
+  foreach(a IN LISTS argn)
+    if("[${a}]" STREQUAL "[TESTING_DATA]")
+      set(data_dir ${VTK_TEST_DATA_DIR})
+    elseif("x${a}" MATCHES "\\.cxx$")
+      list(APPEND test_extra ${a})
     else()
-      list(APPEND MYARGV ${ARG})
+      message(FATAL_ERROR "Unknown argument \"${a}\"")
     endif()
-    set(i ${iplus1})
-  endwhile()
-endmacro(parse_optional_arguments)
-
-#-----------------------------------------------------------------------------
-# Public interface macros.
-
-
-# -----------------------------------------------------------------------------
-# vtk_tests(cxxfiles [BASELINEDIR baseline_directory] [DATADIR data_directory] [LABELS test_labels])
-#
-# Takes a list of cxx files which will be driven by the modules
-# test driver. This helps reduce a lot of boiler place code in each module
-#
-# BASELINEDIR a baseline directory to look for correct images in. If not
-# specified it will look for a directory named for the module the test is in.
-#
-# DATADIR a data directory to look for input data to the tests in. If not
-# specified the test is assumed to not require input data.
-# Ex. ${VTK_DATA_ROOT} or ${VTK_LARGE_DATA_ROOT}
-#
-# LABELS labels to be added to the tests. Note that the
-# [LABELS test_labels] must be at the end of the macro call since all strings
-# after LABELS will be added as labels to the tests.
-# Ex. PARAVIEW to label that the test is for ParaView and can be run
-# with ctest -L PARAVIEW
-macro(vtk_tests)
+  endforeach()
 
-  parse_optional_arguments(${ARGV})
-  create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-    ${MYARGV}
-    EXTRA_INCLUDE vtkTestDriver.h)
+  if(data_dir)
+    set(_D -D ${data_dir})
+    set(_T -T ${VTK_BINARY_DIR}/Testing/Temporary)
+    set(_V -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/${TName}.png,:}")
+  else()
+    set(_D "")
+    set(_T "")
+    set(_V "")
+  endif()
 
-  vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
+  ExternalData_add_test(VTKData
+    NAME ${vtk-module}Cxx-MPI-${TName}
+    COMMAND ${VTK_MPIRUN_EXE}
+    ${VTK_MPI_PRENUMPROC_FLAGS} ${VTK_MPI_NUMPROC_FLAG} ${VTK_MPI_MAX_NUMPROCS}
+    ${VTK_MPI_PREFLAGS}
+    $<TARGET_FILE:${TName}>
+    ${_D} ${_T} ${_V}
+    ${VTK_MPI_POSTFLAGS})
 
-  set(TestsToRun ${Tests})
-  list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
+  vtk_module_test_executable(${TName} ${TName}.cxx ${test_extra})
+endmacro()
 
-  # Add all the executables
-  foreach(test ${TestsToRun})
-    get_filename_component(TName ${test} NAME_WE)
-    if(DATADIR)
-      add_test(NAME ${vtk-module}Cxx-${TName}
-        COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${DATADIR}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/${BASELINEDIR}/${TName}.png)
+# -----------------------------------------------------------------------------
+# Usage: vtk_add_test_cxx([name.cxx[,-E<n>][,NO_VALID]]...
+#          [NO_DATA] [NO_VALID] [VALID_ERROR <n>])
+function(vtk_add_test_cxx)
+  # Parse Command line args
+  set(names "")
+  set(no_data 0)
+  set(no_valid 0)
+  set(no_output 0)
+  foreach(a IN LISTS ARGN)
+    if("[${a}]" STREQUAL "[NO_DATA]")
+      set(no_data 1)
+    elseif("[${a}]" STREQUAL "[NO_VALID]")
+      set(no_valid 1)
+    elseif("[${a}]" STREQUAL "[NO_OUTPUT]")
+      set(no_output 1)
+    elseif("x${a}" MATCHES "^x([^.]*)\\.cxx,?(.*)$")
+      set(name "${CMAKE_MATCH_1}")
+      string(REPLACE "," ";" _${name}_OPTIONS "${CMAKE_MATCH_2}")
+      list(APPEND names ${name})
     else()
-      add_test(NAME ${vtk-module}Cxx-${TName}
-        COMMAND ${vtk-module}CxxTests ${TName}
-        ${${TName}_ARGS})
-    endif()
-    if(LABELS)
-      set_tests_properties(${vtk-module}Cxx-${TName} PROPERTIES LABELS "${LABELS}")
+      message(FATAL_ERROR "Unknown argument \"${a}\"")
     endif()
   endforeach()
-endmacro(vtk_tests)
-
-# -----------------------------------------------------------------------------
-# add_test_mpi(filenames [DATADIR data_directory)]
-# Adds one or more tests that are run under MPI.
-#
-# DATADIR a data directory to look for input data to the tests in. If not
-# specified the test is assumed to not require input data.
-# Ex. ${VTK_DATA_ROOT} or ${VTK_LARGE_DATA_ROOT}
-macro (add_test_mpi fileName)
 
-  parse_optional_arguments(${ARGV})
+  if(NOT no_data)
+    set(_D -D ${VTK_TEST_DATA_DIR})
+  else()
+    set(_D "")
+  endif()
 
-  get_filename_component(name ${fileName} NAME_WE)
-  list(REMOVE_AT MYARGV 0)
-  vtk_module_test_executable(
-    ${name}
-    ${name}.cxx
-    ${MYARGV})
+  set(_T "")
+  if(NOT no_output)
+    set(_T -T ${VTK_TEST_OUTPUT_DIR})
+  endif()
 
-  if(DATADIR)
-    add_test(
-      NAME ${vtk-module}Cxx-MPI-${name}
-      COMMAND ${VTK_MPIRUN_EXE}
-      ${VTK_MPI_PRENUMPROC_FLAGS} ${VTK_MPI_NUMPROC_FLAG} ${VTK_MPI_MAX_NUMPROCS}
-      ${VTK_MPI_PREFLAGS}
-      $<TARGET_FILE:${name}>
-      -D ${DATADIR}
-      -T ${VTK_BINARY_DIR}/Testing/Temporary
-      -V ${DATADIR}/Baseline/Parallel/${name}.png
-      ${VTK_MPI_POSTFLAGS})
+  if(vtk-module)
+    set(prefix ${vtk-module})
+    set(base_dir ${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline)
+  elseif(vtk-example)
+    set(prefix ${vtk-example})
+    set(base_dir ${CMAKE_CURRENT_SOURCE_DIR}/Baseline)
   else()
-    add_test(
-      NAME ${vtk-module}Cxx-MPI-${name}
-      COMMAND ${VTK_MPIRUN_EXE}
-      ${VTK_MPI_PRENUMPROC_FLAGS} ${VTK_MPI_NUMPROC_FLAG} ${VTK_MPI_MAX_NUMPROCS}
-      ${VTK_MPI_PREFLAGS}
-      $<TARGET_FILE:${name}>
-      ${VTK_MPI_POSTFLAGS})
+    message(FATAL_ERROR "Neither vtk-module nor vtk-example is set!")
   endif()
-endmacro()
 
-# -----------------------------------------------------------------------------
-# vtk_tests_python() macro takes a list of python files and makes them into
-# proper python tests.
-macro(vtk_tests_python)
-  if(VTK_PYTHON_EXE)
-    foreach(test ${ARGV})
-      get_filename_component(TName ${test} NAME_WE)
-      if(VTK_DATA_ROOT)
-        string (REPLACE "vtk" "" _baselinedname ${vtk-module})
-        add_test(NAME ${vtk-module}Python-${TName}
-          COMMAND ${VTK_PYTHON_EXE}
-          ${CMAKE_CURRENT_SOURCE_DIR}/${test}
-          -D ${VTK_DATA_ROOT}
-          -B ${VTK_DATA_ROOT}/Baseline/${_baselinedname})
+  foreach(name ${names})
+    set(_V "")
+    set(_E "")
+    set(tmp_no_valid "${no_valid}")
+    foreach(opt IN LISTS _${name}_OPTIONS)
+      if("x${opt}" MATCHES "^x-E([0-9]+)$")
+        set(_E -E ${CMAKE_MATCH_1})
+      elseif("[${opt}]" STREQUAL "[NO_VALID]")
+        set(tmp_no_valid 1)
       else()
-        add_test(NAME ${vtk-module}Python-${TName}
-          COMMAND ${VTK_PYTHON_EXE}
-          ${CMAKE_CURRENT_SOURCE_DIR}/${test}
-          ${${TName}_ARGS})
+        message(FATAL_ERROR "Test ${name} has unknown option \"${opt}\"")
       endif()
     endforeach()
+    if(NOT tmp_no_valid)
+      set(_V -V "DATA{${base_dir}/${name}.png,:}")
+    endif()
+    ExternalData_add_test(VTKData
+      NAME ${prefix}Cxx-${name}
+      COMMAND ${prefix}CxxTests ${name} ${${name}_ARGS}
+      ${_D} ${_T} ${_V} ${_E})
+    set_property(DIRECTORY APPEND PROPERTY VTK_TEST_CXX_SOURCES ${name}.cxx)
+  endforeach()
+endfunction()
+
+macro(vtk_test_cxx_executable exe_name)
+  set(argn "${ARGN}")
+  set(test_driver vtkTestDriver.h)
+  set(test_extra "")
+  foreach(a IN LISTS argn)
+    if("[${a}]" STREQUAL "[RENDERING_FACTORY]")
+      include(vtkTestingRenderingDriver)
+      set(test_driver ${vtkTestingRendering_SOURCE_DIR}/vtkTestingObjectFactory.h)
+    elseif("x${a}" MATCHES "\\.cxx$")
+      list(APPEND test_extra ${a})
+    else()
+      message(FATAL_ERROR "Unknown argument \"${a}\"")
+    endif()
+  endforeach()
+  get_property(vtk_test_cxx_sources DIRECTORY PROPERTY VTK_TEST_CXX_SOURCES)
+  if(vtk-module)
+    set(tmp_before_tm ${CMAKE_TESTDRIVER_BEFORE_TESTMAIN})
+    set(CMAKE_TESTDRIVER_BEFORE_TESTMAIN
+      "    vtksys::SystemInformation::SetStackTraceOnError(1);\n ${tmp_before_tm}")
+  endif()
+  create_test_sourcelist(Tests ${exe_name}.cxx ${vtk_test_cxx_sources}
+    EXTRA_INCLUDE ${test_driver})
+  if(vtk-module)
+    set(CMAKE_TESTDRIVER_BEFORE_TESTMAIN "${tmp_before_tm}")
+    vtk_module_test_executable(${exe_name} ${Tests} ${test_extra})
+  elseif(vtk-example)
+    add_executable(${exe_name} ${Tests} ${test_extra})
+    target_link_libraries(${exe_name} ${VTK_LIBRARIES})
   else()
-    message(FATAL_ERROR "VTK_PYTHON_EXE not set")
+    message(FATAL_ERROR "Neither vtk-module nor vtk-example is set!")
   endif()
-
-endmacro(vtk_tests_python)
+endmacro()
 
 # -----------------------------------------------------------------------------
-# add_test_python() macro takes a python file and an optional base directory where the
-# corresponding test image is found and list of python files and makes them into
-# proper python tests.
-macro(add_test_python)
-  if(VTK_PYTHON_EXE)
-    # Parse Command line args
-    get_filename_component(TName ${ARGV0} NAME_WE)
-    string (REPLACE "vtk" "" _baselinedname ${vtk-module})
-    # Check if data root and second parameter is present
-    set (base_dir "${ARGV1}")
-    if(VTK_DATA_ROOT AND base_dir)
-      add_test(NAME ${vtk-module}Python-${TName}
-        COMMAND ${VTK_PYTHON_EXE}
-        ${VTK_BINARY_DIR}/Utilities/vtkTclTest2Py/rtImageTest.py
-        ${CMAKE_CURRENT_SOURCE_DIR}/${TName}.py
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_BINARY_DIR}/Testing/Temporary
-        -V Baseline/${ARGV1}/${TName}.png
-        -A "${VTK_BINARY_DIR}/Utilities/vtkTclTest2Py"
-        -A "${VTK_LIBRARY_DIR}")
+# Usage: vtk_add_test_python(name [NO_RT] [NO_DATA] [NO_VALID])
+# NO_RT is for tests using vtk.test.testing
+function(vtk_add_test_python name)
+  if(NOT VTK_PYTHON_EXE)
+    message(FATAL_ERROR "VTK_PYTHON_EXE not set")
+  endif()
+  # Parse Command line args
+  get_filename_component(TName ${name} NAME_WE)
+  set(no_data 0)
+  set(no_valid 0)
+  set(no_output 0)
+  set(no_rt 0)
+  foreach(a IN LISTS ARGN)
+    if("[${a}]" STREQUAL "[NO_DATA]")
+      set(no_data 1)
+    elseif("[${a}]" STREQUAL "[NO_VALID]")
+      set(no_valid 1)
+    elseif("[${a}]" STREQUAL "[NO_OUTPUT]")
+      set(no_output 1)
+    elseif("[${a}]" STREQUAL "[NO_RT]")
+      set(no_rt 1)
     else()
-      add_test(NAME ${vtk-module}Python-${TName}
-        COMMAND ${VTK_PYTHON_EXE}
-        ${CMAKE_CURRENT_SOURCE_DIR}/${TName}.py
-        ${${TName}_ARGS})
+      message(FATAL_ERROR "Unknown argument \"${a}\"")
     endif()
+  endforeach()
+
+  if(NOT no_data)
+    set(_D -D ${VTK_TEST_DATA_DIR})
   else()
-    message(FATAL_ERROR "VTK_PYTHON_EXE not set")
+    set(_D "")
   endif()
-endmacro(add_test_python)
 
-# -----------------------------------------------------------------------------
-# This macro is for tests using vtk.test.testing
-# add_test_python1() macro takes a python file and an optional base directory where the
-# corresponding test image is found and list of python files and makes them into
-# proper python tests.
-# Usage: add_test_python1(name base_dir)
-#    Where: name - the name of the test
-#                  e.g x.py
-#           base_dir - the (optional) base directory where the test image is
-#                      e.g Baseline/Graphics
-macro(add_test_python1)
-  if(VTK_PYTHON_EXE)
-    # Parse Command line args
-    get_filename_component(TName ${ARGV0} NAME_WE)
-    string (REPLACE "vtk" "" _baselinedname ${vtk-module})
-    # Check if data root and second parameter is present
-    set (base_dir "${ARGV1}")
-    if(VTK_DATA_ROOT AND base_dir)
-      add_test(NAME ${vtk-module}Python-${TName}
-        COMMAND ${VTK_PYTHON_EXE}
-        ${CMAKE_CURRENT_SOURCE_DIR}/${TName}.py
-        -D ${VTK_DATA_ROOT}
-        -B ${VTK_DATA_ROOT}/${ARGV1}
-        -T "${VTK_BINARY_DIR}/Testing/Temporary")
+  set(rtImageTest "")
+  set(_B "")
+  set(_V "")
+  set(_T "")
+  set(_A "")
+  if(NOT no_valid)
+    if(no_rt)
+      set(_B -B "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/,REGEX:${TName}(_[0-9]+)?.png}")
     else()
-      add_test(NAME ${vtk-module}Python-${TName}
-        COMMAND ${VTK_PYTHON_EXE}
-        ${CMAKE_CURRENT_SOURCE_DIR}/${TName}.py
-        ${${TName}_ARGS})
+      set(rtImageTest ${VTK_BINARY_DIR}/Utilities/vtkTclTest2Py/rtImageTest.py)
+      set(_V -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/${TName}.png,:}")
+      set(_A -A ${VTK_BINARY_DIR}/Utilities/vtkTclTest2Py)
+    endif()
+    if(NOT no_output)
+      set(_T -T ${VTK_TEST_OUTPUT_DIR})
     endif()
-  else()
-    message(FATAL_ERROR "VTK_PYTHON_EXE not set")
   endif()
-endmacro(add_test_python1)
+
+  ExternalData_add_test(VTKData
+    NAME ${vtk-module}Python-${TName}
+    COMMAND ${VTK_PYTHON_EXE} --enable-bt ${rtImageTest}
+    ${CMAKE_CURRENT_SOURCE_DIR}/${TName}.py ${${TName}_ARGS}
+    ${_D} ${_B} ${_T} ${_V} ${_A})
+endfunction()
 
 # -----------------------------------------------------------------------------
-# add_test_tcl() macro takes a tcl file and an optional base directory where the
-# corresponding test image is found and list of tcl files and makes them into
-# proper tcl tests.
-macro(add_test_tcl)
-  if(VTK_TCL_EXE)
-    # Parse Command line args
-    get_filename_component(TName ${ARGV0} NAME_WE)
-    string (REPLACE "vtk" "" _baselinedname ${vtk-module})
-    # Check if data root and second parameter is present
-    set (base_dir "${ARGV1}")
-    if(VTK_DATA_ROOT AND base_dir)
-      add_test(NAME ${vtk-module}Tcl-${TName}
-        COMMAND ${VTK_TCL_EXE}
-        ${vtkTestingRendering_SOURCE_DIR}/rtImageTest.tcl
-        ${CMAKE_CURRENT_SOURCE_DIR}/${TName}.tcl
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/${ARGV1}/${TName}.png
-        -A ${VTK_SOURCE_DIR}/Wrapping/Tcl)
+# Usage: vtk_add_test_tcl(name [NO_DATA] [NO_VALID])
+function(vtk_add_test_tcl name)
+  if(NOT VTK_TCL_EXE)
+    message(FATAL_ERROR "VTK_TCL_EXE not set")
+  endif()
+  # Parse Command line args
+  get_filename_component(TName ${name} NAME_WE)
+  set(no_data 0)
+  set(no_valid 0)
+  set(no_output 0)
+  set(no_rt 0)
+  foreach(a IN LISTS ARGN)
+    if("[${a}]" STREQUAL "[NO_DATA]")
+      set(no_data 1)
+    elseif("[${a}]" STREQUAL "[NO_VALID]")
+      set(no_valid 1)
+    elseif("[${a}]" STREQUAL "[NO_OUTPUT]")
+      set(no_output 1)
+    elseif("[${a}]" STREQUAL "[NO_RT]")
+      set(no_rt 1)
     else()
-      add_test(NAME ${vtk-module}Tcl-${TName}
-        COMMAND ${VTK_TCL_EXE}
-        ${vtkTestingRendering_SOURCE_DIR}/rtImageTest.tcl
-        ${CMAKE_CURRENT_SOURCE_DIR}/${TName}.tcl
-        -D VTK_DATA_ROOT-NOTFOUND
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -A ${VTK_SOURCE_DIR}/Wrapping/Tcl)
+      message(FATAL_ERROR "Unknown argument \"${a}\"")
     endif()
+  endforeach()
+
+  if(NOT no_data)
+    set(_D -D ${VTK_TEST_DATA_DIR})
+  elseif(no_rt)
+    set(_D "")
   else()
-    message(FATAL_ERROR "VTK_TCL_EXE not set")
+    set(_D -D VTK_DATA_ROOT-NOTFOUND)
+  endif()
+
+  set(rtImageTest "")
+  set(_V "")
+  set(_T "")
+  if(NOT no_rt)
+    set(rtImageTest ${vtkTestingRendering_SOURCE_DIR}/rtImageTest.tcl)
+    if(NOT no_valid)
+      set(_V -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/${TName}.png,:}")
+    endif()
+    if(NOT no_output)
+      set(_T -T ${VTK_TEST_OUTPUT_DIR})
+    endif()
   endif()
-endmacro(add_test_tcl)
+  set(_A -A ${VTK_SOURCE_DIR}/Wrapping/Tcl)
+
+  ExternalData_add_test(VTKData
+    NAME ${vtk-module}Tcl-${TName}
+    COMMAND ${VTK_TCL_EXE} ${rtImageTest}
+    ${CMAKE_CURRENT_SOURCE_DIR}/${TName}.tcl ${${TName}_ARGS}
+    ${_D} ${_T} ${_V} ${_A})
+endfunction()
diff --git a/CMake/vtkTestingObjectFactory.cmake b/CMake/vtkTestingObjectFactory.cmake
deleted file mode 100644
index efe8662..0000000
--- a/CMake/vtkTestingObjectFactory.cmake
+++ /dev/null
@@ -1,87 +0,0 @@
-SET(CMAKE_TESTDRIVER_BEFORE_TESTMAIN
-"
-    // Set defaults
-    vtkTestingInteractor::ValidBaseline =
-      std::string(\"${VTK_DATA_ROOT}\") +
-      std::string(\"/Baseline/\") +
-      std::string(\"${KIT}/\") +
-      std::string(cmakeGeneratedFunctionMapEntries[testToRun].name) +
-      std::string(\".png\");
-    vtkTestingInteractor::TempDirectory =
-      std::string(\"${VTK_TEST_OUTPUT_DIR}\");
-    vtkTestingInteractor::DataDirectory =
-      std::string(\"${VTK_DATA_ROOT}\");
-
-    int interactive = 0;
-    for (int ii = 0; ii < ac; ++ii)
-      {
-      if ( strcmp(av[ii],\"-I\") == 0)
-        {
-        interactive = 1;
-        continue;
-        }
-      if ( strcmp(av[ii],\"-V\") == 0 && ii < ac-1)
-        {
-        vtkTestingInteractor::ValidBaseline = std::string(av[ii+1]);
-        ++ii;
-        continue;
-        }
-      if ( strcmp(av[ii],\"-T\") == 0 && ii < ac-1)
-        {
-        vtkTestingInteractor::TempDirectory = std::string(av[ii+1]);
-        ++ii;
-        continue;
-        }
-      if ( strcmp(av[ii],\"-D\") == 0 && ii < ac-1)
-        {
-        vtkTestingInteractor::DataDirectory = std::string(av[ii+1]);
-        ++ii;
-        continue;
-        }
-      if ( strcmp(av[ii],\"-E\") == 0 && ii < ac-1)
-        {
-        vtkTestingInteractor::ErrorThreshold =
-          static_cast<double>(atof(av[ii+1]));
-        ++ii;
-        continue;
-        }
-      }
-    vtkSmartPointer<vtkTestingObjectFactory> factory = vtkSmartPointer<vtkTestingObjectFactory>::New();
-    if (!interactive)
-      {
-      // Disable any other overrides before registering our factory.
-      vtkObjectFactoryCollection *collection = vtkObjectFactory::GetRegisteredFactories();
-      collection->InitTraversal();
-      vtkObjectFactory *f = collection->GetNextItem();
-      while (f)
-        {
-        f->Disable(\"vtkRenderWindowInteractor\");
-        f = collection->GetNextItem();
-        }
-      vtkObjectFactory::RegisterFactory(factory);
-      }
-"
-)
-
-SET(CMAKE_TESTDRIVER_AFTER_TESTMAIN
-"
-   if (!interactive)
-     {
-     if (vtkTestingInteractor::TestReturnStatus != -1)
-        {
-        if( vtkTestingInteractor::TestReturnStatus != vtkTesting::PASSED)
-          {
-          result = EXIT_FAILURE;
-          }
-        else
-          {
-          result = EXIT_SUCCESS;
-          }
-        }
-      vtkObjectFactory::UnRegisterFactory(factory);
-      }
-"
-)
-CREATE_TEST_SOURCELIST(Tests ${KIT}CxxTests.cxx ${MyTests}
-                       EXTRA_INCLUDE ${vtkTestingRendering_SOURCE_DIR}/vtkTestingObjectFactory.h)
-
diff --git a/CMake/vtkTestingRenderingDriver.cmake b/CMake/vtkTestingRenderingDriver.cmake
new file mode 100644
index 0000000..a8baad4
--- /dev/null
+++ b/CMake/vtkTestingRenderingDriver.cmake
@@ -0,0 +1,74 @@
+SET(CMAKE_TESTDRIVER_BEFORE_TESTMAIN
+"
+    // Set defaults
+    vtkTestingInteractor::ValidBaseline = \"Use_-V_for_Baseline\";
+    vtkTestingInteractor::TempDirectory =
+      std::string(\"${VTK_TEST_OUTPUT_DIR}\");
+    vtkTestingInteractor::DataDirectory = std::string(\"Use_-D_for_Data\");
+
+    int interactive = 0;
+    for (int ii = 0; ii < ac; ++ii)
+      {
+      if (strcmp(av[ii], \"-I\") == 0)
+        {
+        interactive = 1;
+        continue;
+        }
+      if (strcmp(av[ii], \"-V\") == 0 && ii < ac-1)
+        {
+        vtkTestingInteractor::ValidBaseline = std::string(av[++ii]);
+        continue;
+        }
+      if (strcmp(av[ii], \"-T\") == 0 && ii < ac-1)
+        {
+        vtkTestingInteractor::TempDirectory = std::string(av[++ii]);
+        continue;
+        }
+      if (strcmp(av[ii], \"-D\") == 0 && ii < ac-1)
+        {
+        vtkTestingInteractor::DataDirectory = std::string(av[++ii]);
+        continue;
+        }
+      if (strcmp(av[ii], \"-E\") == 0 && ii < ac-1)
+        {
+        vtkTestingInteractor::ErrorThreshold =
+            static_cast<double>(atof(av[++ii]));
+        continue;
+        }
+      }
+    vtkSmartPointer<vtkTestingObjectFactory> factory = vtkSmartPointer<vtkTestingObjectFactory>::New();
+    if (!interactive)
+      {
+      // Disable any other overrides before registering our factory.
+      vtkObjectFactoryCollection *collection = vtkObjectFactory::GetRegisteredFactories();
+      collection->InitTraversal();
+      vtkObjectFactory *f = collection->GetNextItem();
+      while (f)
+        {
+        f->Disable(\"vtkRenderWindowInteractor\");
+        f = collection->GetNextItem();
+        }
+      vtkObjectFactory::RegisterFactory(factory);
+      }
+"
+)
+
+SET(CMAKE_TESTDRIVER_AFTER_TESTMAIN
+"
+   if (!interactive)
+     {
+     if (vtkTestingInteractor::TestReturnStatus != -1)
+        {
+        if (vtkTestingInteractor::TestReturnStatus != vtkTesting::PASSED)
+          {
+          result = EXIT_FAILURE;
+          }
+        else
+          {
+          result = EXIT_SUCCESS;
+          }
+        }
+      vtkObjectFactory::UnRegisterFactory(factory);
+      }
+"
+)
diff --git a/CMake/vtkVersion.cmake b/CMake/vtkVersion.cmake
index e16caf6..2dd2ea9 100644
--- a/CMake/vtkVersion.cmake
+++ b/CMake/vtkVersion.cmake
@@ -1,4 +1,4 @@
 # VTK version number components.
 set(VTK_MAJOR_VERSION 6)
-set(VTK_MINOR_VERSION 0)
+set(VTK_MINOR_VERSION 1)
 set(VTK_BUILD_VERSION 0)
diff --git a/CMake/vtkWrapHierarchy.cmake b/CMake/vtkWrapHierarchy.cmake
index 55dc3ac..44d3972 100644
--- a/CMake/vtkWrapHierarchy.cmake
+++ b/CMake/vtkWrapHierarchy.cmake
@@ -102,7 +102,6 @@ macro(VTK_WRAP_HIERARCHY TARGET OUTPUT_DIR SOURCES)
     ${VTK_CMAKE_DIR}/vtkWrapperInit.data.in
     ${vtk-module}Hierarchy.data
     COPY_ONLY
-    IMMEDIATE
     )
 
   # search through the deps to find modules we depend on
diff --git a/CMake/vtkWrapJava.cmake b/CMake/vtkWrapJava.cmake
index 47998dc..ff32301 100644
--- a/CMake/vtkWrapJava.cmake
+++ b/CMake/vtkWrapJava.cmake
@@ -160,7 +160,7 @@ macro(vtk_wrap_java3 TARGET SRC_LIST_NAME SOURCES)
     SET(dir ${VTK_WRAP_JAVA3_INIT_DIR})
   ENDIF(VTK_WRAP_JAVA3_INIT_DIR)
   CONFIGURE_FILE("${dir}/JavaDependencies.cmake.in"
-    "${CMAKE_CURRENT_BINARY_DIR}/JavaDependencies.cmake" IMMEDIATE @ONLY)
+    "${CMAKE_CURRENT_BINARY_DIR}/JavaDependencies.cmake" @ONLY)
 endmacro()
 
 # VS 6 does not like needing to run a huge number of custom commands
diff --git a/CMake/vtkWrapPython.cmake b/CMake/vtkWrapPython.cmake
index 4cce504..fc8b07c 100644
--- a/CMake/vtkWrapPython.cmake
+++ b/CMake/vtkWrapPython.cmake
@@ -139,23 +139,24 @@ macro(VTK_WRAP_PYTHON3 TARGET SRC_LIST_NAME SOURCES)
     ${VTK_CMAKE_DIR}/vtkWrapperInit.data.in
     ${CMAKE_CURRENT_BINARY_DIR}/${TARGET}Init.data
     COPY_ONLY
-    IMMEDIATE
     )
 
   add_custom_command(
     OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/${TARGET}Init.cxx
+           ${CMAKE_CURRENT_BINARY_DIR}/${TARGET}InitImpl.cxx
     DEPENDS ${VTK_WRAP_PYTHON_INIT_EXE}
       ${CMAKE_CURRENT_BINARY_DIR}/${TARGET}Init.data
     COMMAND ${VTK_WRAP_PYTHON_INIT_EXE}
     ARGS
       "${quote}${CMAKE_CURRENT_BINARY_DIR}/${TARGET}Init.data${quote}"
       "${quote}${CMAKE_CURRENT_BINARY_DIR}/${TARGET}Init.cxx${quote}"
+      "${quote}${CMAKE_CURRENT_BINARY_DIR}/${TARGET}InitImpl.cxx${quote}"
     COMMENT "Python Wrapping - generating ${TARGET}Init.cxx"
       ${verbatim}
     )
 
   # Create the Init File
-  set(${SRC_LIST_NAME} ${${SRC_LIST_NAME}} ${TARGET}Init.cxx)
+  set(${SRC_LIST_NAME} ${${SRC_LIST_NAME}} ${TARGET}InitImpl.cxx)
 
 endmacro(VTK_WRAP_PYTHON3)
 
@@ -340,22 +341,23 @@ macro(vtk_wrap_python TARGET SRC_LIST_NAME module)
     ${VTK_CMAKE_DIR}/vtkWrapperInit.data.in
     ${CMAKE_CURRENT_BINARY_DIR}/${TARGET}Init.data
     COPY_ONLY
-    IMMEDIATE
     )
 
   add_custom_command(
     OUTPUT ${CMAKE_CURRENT_BINARY_DIR}/${TARGET}Init.cxx
+           ${CMAKE_CURRENT_BINARY_DIR}/${TARGET}InitImpl.cxx
     DEPENDS ${VTK_WRAP_PYTHON_INIT_EXE}
       ${CMAKE_CURRENT_BINARY_DIR}/${TARGET}Init.data
     COMMAND ${VTK_WRAP_PYTHON_INIT_EXE}
     ARGS
       "${quote}${CMAKE_CURRENT_BINARY_DIR}/${TARGET}Init.data${quote}"
       "${quote}${CMAKE_CURRENT_BINARY_DIR}/${TARGET}Init.cxx${quote}"
+      "${quote}${CMAKE_CURRENT_BINARY_DIR}/${TARGET}InitImpl.cxx${quote}"
     COMMENT "Python Wrapping - generating ${TARGET}Init.cxx"
       ${verbatim}
     )
 
   # Create the Init File
-  set(${SRC_LIST_NAME} ${${SRC_LIST_NAME}} ${TARGET}Init.cxx)
+  set(${SRC_LIST_NAME} ${${SRC_LIST_NAME}} ${TARGET}InitImpl.cxx)
 
 endmacro()
diff --git a/CMake/vtkWrapPythonSIP.cmake b/CMake/vtkWrapPythonSIP.cmake
index d573faa..42b3cc1 100644
--- a/CMake/vtkWrapPythonSIP.cmake
+++ b/CMake/vtkWrapPythonSIP.cmake
@@ -94,15 +94,15 @@ function(VTK_CREATE_SIP_MODULE KIT WRAP_SRCS)
     TARGET_LINK_LIBRARIES(vtk${KIT}PythonSIP vtk${KIT}PythonD)
     get_target_property(lib_loc vtk${KIT}PythonSIP LOCATION)
     ADD_CUSTOM_COMMAND(TARGET vtk${KIT}PythonSIP POST_BUILD
-      COMMAND ${CMAKE_COMMAND} -E copy "${lib_loc}" "${VTK_BINARY_DIR}/Wrapping/Python/vtk/"
+      COMMAND ${CMAKE_COMMAND} -E copy "${lib_loc}" "${VTK_BUILD_PYTHON_MODULE_DIR}/vtk/"
       )
 
-    IF(VTK_INSTALL_PYTHON_USING_CMAKE AND NOT VTK_INSTALL_NO_LIBRARIES)
+    IF(NOT VTK_INSTALL_NO_LIBRARIES)
       INSTALL(TARGETS vtk${KIT}PythonSIP
         EXPORT ${VTK_INSTALL_EXPORT_NAME}
-        RUNTIME DESTINATION ${VTK_INSTALL_BIN_DIR_CM24} COMPONENT RuntimeLibraries
-        LIBRARY DESTINATION ${VTK_INSTALL_LIB_DIR_CM24} COMPONENT RuntimeLibraries
-        ARCHIVE DESTINATION ${VTK_INSTALL_LIB_DIR_CM24} COMPONENT Development)
-    ENDIF(VTK_INSTALL_PYTHON_USING_CMAKE AND NOT VTK_INSTALL_NO_LIBRARIES)
+        RUNTIME DESTINATION ${VTK_INSTALL_RUNTIME_DIR} COMPONENT RuntimeLibraries
+        LIBRARY DESTINATION ${VTK_INSTALL_LIBRARY_DIR} COMPONENT RuntimeLibraries
+        ARCHIVE DESTINATION ${VTK_INSTALL_ARCHIVE_DIR} COMPONENT Development)
+    ENDIF(NOT VTK_INSTALL_NO_LIBRARIES)
   ENDIF(NOT SIP_EXECUTABLE)
 endfunction(VTK_CREATE_SIP_MODULE)
diff --git a/CMake/vtkWrapTcl.cmake b/CMake/vtkWrapTcl.cmake
index a325f84..6317abb 100644
--- a/CMake/vtkWrapTcl.cmake
+++ b/CMake/vtkWrapTcl.cmake
@@ -177,7 +177,6 @@ MACRO(VTK_WRAP_TCL3 TARGET SRC_LIST_NAME SOURCES COMMANDS)
     ${VTK_CMAKE_DIR}/vtkWrapperInit.data.in
     ${CMAKE_CURRENT_BINARY_DIR}/${TARGET}Init.data
     COPY_ONLY
-    IMMEDIATE
     )
 
   ADD_CUSTOM_COMMAND(
diff --git a/CMakeLists.txt b/CMakeLists.txt
index d67f6b7..a656225 100644
--- a/CMakeLists.txt
+++ b/CMakeLists.txt
@@ -1,4 +1,17 @@
-cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
+cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)
+
+if(POLICY CMP0022)
+  cmake_policy(SET CMP0022 NEW)
+endif()
+
+# Eliminate a warning when building in Windows that relates
+# to static linking of Qt executables to qtmain.lib.
+# This policy was introduced in CMake version 2.8.11.
+# CMake version 2.8.11.2 warns when the policy is not set
+# and uses OLD behavior.
+if(POLICY CMP0020)
+  cmake_policy(SET CMP0020 NEW)
+endif()
 
 project(VTK)
 
@@ -16,6 +29,7 @@ set(VTK_CMAKE_DIR "${VTK_SOURCE_DIR}/CMake")
 set(CMAKE_MODULE_PATH ${VTK_CMAKE_DIR} ${CMAKE_MODULE_PATH})
 
 include(vtkModuleMacros)
+include(vtkExternalData)
 
 # Set a default build type if none was specified
 if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES)
@@ -26,6 +40,12 @@ if(NOT CMAKE_BUILD_TYPE AND NOT CMAKE_CONFIGURATION_TYPES)
     "MinSizeRel" "RelWithDebInfo")
 endif()
 
+# Test input data staging directory.
+set(VTK_TEST_DATA_DIR "${ExternalData_BINARY_ROOT}/Testing")
+
+# Test input data directory.
+set(VTK_TEST_INPUT_DIR "${VTK_SOURCE_DIR}/Testing/Data")
+
 # Test output directory.
 set(VTK_TEST_OUTPUT_DIR "${VTK_BINARY_DIR}/Testing/Temporary")
 
@@ -92,20 +112,25 @@ endif()
 if(NOT VTK_MODULES_DIR)
   set(VTK_MODULES_DIR "${VTK_BINARY_DIR}/${VTK_INSTALL_PACKAGE_DIR}/Modules")
 endif()
-
+if(NOT VTK_WWW_DIR)
+  set(VTK_WWW_DIR "${VTK_BINARY_DIR}/www")
+endif()
+if(NOT VTK_INSTALL_PYTHON_MODULE_DIR)
+  set (VTK_INSTALL_PYTHON_MODULE_DIR "-NOTFOUND" CACHE
+       PATH "Directory where python modules will be installed")
+  mark_as_advanced(VTK_INSTALL_PYTHON_MODULE_DIR)
+endif()
+if(NOT VTK_BUILD_PYTHON_MODULE_DIR)
+  set (VTK_BUILD_PYTHON_MODULE_DIR "-NOTFOUND" CACHE
+       PATH "Directory where python modules will be put inside the build tree")
+  mark_as_advanced(VTK_BUILD_PYTHON_MODULE_DIR)
+endif()
 if (CMAKE_CROSSCOMPILING AND NOT COMPILE_TOOLS_IMPORTED)
   # if CMAKE_CROSSCOMPILING is true, we need to import build-tools targets.
   find_package(VTKCompileTools REQUIRED)
   set (COMPILE_TOOLS_IMPORTED TRUE)
 endif()
 
-# FIXME: These will be removed, used by vtkzlib etc
-set(VTK_INSTALL_BIN_DIR_CM24 ${VTK_INSTALL_RUNTIME_DIR})
-set(VTK_INSTALL_LIB_DIR_CM24 ${VTK_INSTALL_LIBRARY_DIR})
-set(VTK_INSTALL_PACKAGE_DIR_CM24 ${VTK_INSTALL_PACKAGE_DIR})
-set(VTK_INSTALL_INCLUDE_DIR_CM24 ${VTK_INSTALL_INCLUDE_DIR})
-set(VTK_INSTALL_DOXYGEN_DIR_CM24 ${VTK_INSTALL_DATA_DIR}/doxygen)
-
 #-----------------------------------------------------------------------------
 # The third party macros are still used in one or two third party builds.
 include(vtkThirdParty)
@@ -115,6 +140,14 @@ include(vtkCompilerExtras)
 include(vtkBuildPath)
 
 #-----------------------------------------------------------------------------
+if(NOT EXISTS "${VTK_SOURCE_DIR}/.ExternalData/README.rst")
+  # This file is always present in version-controlled source trees
+  # so we must have been extracted from a source tarball with no
+  # data objects needed for testing.  Turn off tests by default
+  # since enabling them requires network access or manual data
+  # store configuration.
+  option(BUILD_TESTING "Build the testing tree." OFF)
+endif()
 include(CTest)
 
 #-----------------------------------------------------------------------------
@@ -165,36 +198,12 @@ include(vtkDetermineCompilerFlags)
 add_definitions(-DVTK_IN_VTK)
 
 #-----------------------------------------------------------------------------
-# Configure the default VTK_DATA_ROOT for the location of VTKData.  To get
-# the VTKData repository from git, issue the following command:
-#
-# git clone git://vtk.org/VTKData.git
-#
-# To see the web hosted repository, visit http://vtk.org/VTKData.git
-FIND_PATH(VTK_DATA_ROOT VTKData.readme
-  ${VTK_SOURCE_DIR}/VTKData
-  ${VTK_SOURCE_DIR}/../VTKData
-  ${VTK_SOURCE_DIR}/../../VTKData
-  $ENV{VTK_DATA_ROOT}
-  DOC "The repository for data used for testing.  To obtain from git: \"git clone git://vtk.org/VTKData.git\""
-  )
-mark_as_advanced(VTK_DATA_ROOT)
+if(BUILD_TESTING)
+  include(vtkLegacyData)
+endif()
 
-#-----------------------------------------------------------------------------
-# Configure the default VTK_LARGE_DATA_ROOT for the location of VTKLargeData.
-# To get the VTKLargeData repository from git, issue the following command:
-#
-# git clone git://vtk.org/VTKLargeData.git
-#
-# To see the web hosted repository, visit http://vtk.org/VTKLargeData.git
-FIND_PATH(VTK_LARGE_DATA_ROOT VTKLargeData.readme
-  ${VTK_SOURCE_DIR}/VTKLargeData
-  ${VTK_SOURCE_DIR}/../VTKLargeData
-  ${VTK_SOURCE_DIR}/../../VTKLargeData
-  $ENV{VTK_LARGE_DATA_ROOT}
-  DOC "The repository for large data used for testing.  To obtain from git: \"git clone git://vtk.org/VTKLargeData.git\""
-  )
-mark_as_advanced(VTK_LARGE_DATA_ROOT)
+# Provide an option for tests requiring "large" input data
+option(VTK_USE_LARGE_DATA "Enable tests requiring \"large\" data" OFF)
 
 #-----------------------------------------------------------------------------
 # Platform configuration tests.
@@ -349,11 +358,6 @@ option(VTK_MAKE_INSTANTIATORS "Should all modules build instantiators" OFF)
 mark_as_advanced(VTK_MAKE_INSTANTIATORS)
 
 #----------------------------------------------------------------------
-# Find the GhostScript executable for GL2PS tests.
-find_program(VTK_GHOSTSCRIPT_EXECUTABLE gs gswin32c gsos2)
-mark_as_advanced(VTK_GHOSTSCRIPT_EXECUTABLE)
-
-#----------------------------------------------------------------------
 # Load the module DAG, assess all modules etc.
 include(vtkModuleTop)
 
@@ -412,3 +416,7 @@ endif()
 unset(_vtk_targets)
 unset(_vtk_compiletools_targets)
 unset(_vtk_all_targets)
+
+# Create target to download data from the VTKData group.  This must come after
+# all tests have been added that reference the group, so we put it last.
+ExternalData_Add_Target(VTKData)
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
new file mode 100644
index 0000000..bb656d1
--- /dev/null
+++ b/CONTRIBUTING.md
@@ -0,0 +1,16 @@
+Contributing
+------------
+
+Our project uses Gerrit for code review, and CDash at Home to test proposed
+patches before they are merged. Please check our [development][Development]
+guide for details on developing and contributing to the project.
+
+Our [wiki][Wiki] is used to document features, flesh out designs and host other
+documentation. Our API is [documented using Doxygen][Doxygen] with updated
+documentation generated nightly. We have several [mailing lists][MailingLists]
+to coordinate development and to provide support.
+
+  [Development]: https://docs.google.com/a/kitware.com/document/d/1nzinw-dR5JQRNi_gb8qwLL5PnkGMK2FETlQGLr10tZw/view "Development guide"
+  [Wiki]:  http://www.vtk.org/Wiki/VTK "VTK wiki"
+  [Doxygen]: http://www.vtk.org/doc/nightly/html "API documentation"
+  [MailingLists]: http://www.vtk.org/VTK/help/mailing.html "Mailing Lists"
diff --git a/Charts/Core/CMakeLists.txt b/Charts/Core/CMakeLists.txt
index 8373ae1..adb7ac2 100644
--- a/Charts/Core/CMakeLists.txt
+++ b/Charts/Core/CMakeLists.txt
@@ -1,6 +1,7 @@
 set(Module_SRCS
   vtkAxis.cxx
   vtkAxisExtended.cxx
+  vtkCategoryLegend.cxx
   vtkChart.cxx
   vtkChartHistogram2D.cxx
   vtkChartLegend.cxx
@@ -22,7 +23,9 @@ set(Module_SRCS
   vtkPiecewisePointHandleItem.cxx
   vtkPlot.cxx
   vtkPlot3D.cxx
+  vtkPlotBag.cxx
   vtkPlotBar.cxx
+  vtkPlotFunctionalBag.cxx
   vtkPlotGrid.cxx
   vtkPlotHistogram2D.cxx
   vtkPlotLine.cxx
diff --git a/Charts/Core/Testing/Cxx/CMakeLists.txt b/Charts/Core/Testing/Cxx/CMakeLists.txt
index 6fccf54..106dc18 100644
--- a/Charts/Core/Testing/Cxx/CMakeLists.txt
+++ b/Charts/Core/Testing/Cxx/CMakeLists.txt
@@ -1,88 +1,61 @@
-# if we have rendering and views add the following tests
-  # add tests that do not require data
-  set(MyTests
-    TestContextScene.cxx
-    TestControlPointsItem.cxx
-    TestControlPointsItemEvents.cxx
-    )
+# add tests that do not require data
+vtk_add_test_cxx(NO_DATA NO_VALID
+  TestContextScene.cxx
+  TestControlPointsItem.cxx
+  TestControlPointsItemEvents.cxx
+  )
 
-  if(VTK_DATA_ROOT)
-    # add tests that require data
-    set(MyTests ${MyTests}
-      TestAxes.cxx
-      TestBarGraph.cxx
-      TestBarGraphHorizontal.cxx
-      TestColorTransferFunction.cxx
-      TestChartMatrix.cxx
-      TestChartsOn3D.cxx
-      TestChartXYZ.cxx
-      TestContext.cxx
-      TestContextImage.cxx
-      TestControlPointsHandleItem.cxx
-      TestDiagram.cxx
-      TestHistogram2D.cxx
-      TestInteractiveChartXYZ.cxx
-      TestLegendHiddenPlots.cxx
-      TestLinePlot.cxx
-      TestLinePlot3D.cxx
-      TestLinePlotAxisFonts.cxx
-      TestLinePlot2.cxx
-      TestLinePlotInteraction.cxx
-      TestLinePlotSelection.cxx
-      TestLinePlotSelection2.cxx
-      TestMultipleChartRenderers.cxx
-      TestMultipleRenderers.cxx
-      TestMultipleScalarsToColors.cxx
-      TestParallelCoordinates.cxx
-      TestPieChart.cxx
-      TestPlotMatrix.cxx
-      TestScalarsToColors.cxx
-      TestScatterPlot.cxx
-      TestScatterPlotMatrix.cxx
-      TestScatterPlotMatrixVehicles.cxx
-      TestScatterPlotMatrixVisible.cxx
-      TestScientificPlot.cxx
-      TestStackedBarGraph.cxx
-      TestStackedPlot.cxx
-      TestSurfacePlot.cxx
-      )
-    # Set the tolerance higher for a few tests that need it
-    set(TestGLSLError 12)
-    set(TestChartsOn3DError 16)
-    set(TestLinePlotError 25)
-    set(TestLinePlot2Error 25)
-    set(TestLinePlotInteractionError 25)
-    set(TestMultipleRenderersError 25)
-    set(TestMultipleScalarsToColorsError 25)
-    set(TestParallelCoordinatesError 15)
-    set(TestControlPointsHandleItemError 30)
-    set(TestColorTransferFunctionError 80)
-  endif()
+set(TestChartUnicode_ARGS DATA{../Data/Fonts/DejaVuSans.ttf})
+set(TestContextUnicode_ARGS DATA{../Data/Fonts/DejaVuSans.ttf})
 
-  # Use the testing object factory, to reduce boilerplate code in tests.
-  include(vtkTestingObjectFactory)
-  vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
+# add tests that require data
+vtk_add_test_cxx(
+  TestAxes.cxx
+  TestBagPlot.cxx
+  TestBarGraph.cxx
+  TestBarGraphHorizontal.cxx
+  TestCategoryLegend.cxx
+  TestColorTransferFunction.cxx,-E80
+  TestChartDouble.cxx
+  TestChartMatrix.cxx
+  TestChartUnicode.cxx,-E25
+  TestChartsOn3D.cxx,-E16
+  TestChartXYZ.cxx
+  TestContext.cxx
+  TestContextImage.cxx
+  TestContextUnicode.cxx
+  TestControlPointsHandleItem.cxx,-E30
+  TestDiagram.cxx
+  TestFunctionalBagPlot.cxx
+  TestHistogram2D.cxx
+  TestInteractiveChartXYZ.cxx
+  TestLegendHiddenPlots.cxx
+  TestLinePlot.cxx,-E25
+  TestLinePlotDouble.cxx
+  TestLinePlotDouble2.cxx
+  TestLinePlot3D.cxx
+  TestLinePlotAxisFonts.cxx
+  TestLinePlot2.cxx,-E25
+  TestLinePlotInteraction.cxx,-E25
+  TestLinePlotSelection.cxx
+  TestLinePlotSelection2.cxx
+  TestMultipleChartRenderers.cxx
+  TestMultipleRenderers.cxx,-E25
+  TestMultipleScalarsToColors.cxx,-E25
+  TestParallelCoordinates.cxx,-E15
+  TestParallelCoordinatesDouble.cxx,-E15
+  TestPieChart.cxx
+  TestPlotMatrix.cxx
+  TestScalarsToColors.cxx
+  TestScatterPlot.cxx
+  TestScatterPlotMatrix.cxx
+  TestScatterPlotMatrixVehicles.cxx
+  TestScatterPlotMatrixVisible.cxx
+  TestScientificPlot.cxx
+  TestStackedBarGraph.cxx
+  TestStackedPlot.cxx
+  TestSurfacePlot.cxx
+  TestZoomAxis.cxx
+  )
 
-  set(TestsToRun ${Tests})
-  list(REMOVE_ITEM TestsToRun CxxTests.cxx)
-
-  # Add all the executables
-  foreach(test ${TestsToRun})
-    get_filename_component(TName ${test} NAME_WE)
-    if(VTK_DATA_ROOT)
-      if(${${TName}Error})
-        set(_error_threshold ${${TName}Error})
-      else()
-        set(_error_threshold 10)
-      endif()
-      add_test(NAME ${vtk-module}Cxx-${TName}
-        COMMAND ${vtk-module}CxxTests ${TName}
-          -D ${VTK_DATA_ROOT}
-          -T ${VTK_TEST_OUTPUT_DIR}
-          -V Baseline/Charts/${TName}.png
-          -E ${_error_threshold})
-    else()
-      add_test(NAME ${vtk-module}Cxx-${TName}
-        COMMAND ${vtk-module}CxxTests ${TName})
-    endif()
-  endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests RENDERING_FACTORY)
diff --git a/Charts/Core/Testing/Cxx/TestBagPlot.cxx b/Charts/Core/Testing/Cxx/TestBagPlot.cxx
new file mode 100644
index 0000000..51a4fd5
--- /dev/null
+++ b/Charts/Core/Testing/Cxx/TestBagPlot.cxx
@@ -0,0 +1,87 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestBagPlot.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkChartXY.h"
+#include "vtkContextScene.h"
+#include "vtkContextView.h"
+#include "vtkDoubleArray.h"
+#include "vtkIntArray.h"
+#include "vtkNew.h"
+#include "vtkPlotBag.h"
+#include "vtkRenderWindow.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkTable.h"
+
+//----------------------------------------------------------------------------
+int TestBagPlot(int, char * [])
+{
+  // Set up a 2D scene, add an XY chart to it
+  vtkNew<vtkContextView> view;
+  view->GetRenderWindow()->SetSize(400, 400);
+  view->GetRenderWindow()->SetMultiSamples(0);
+  vtkNew<vtkChartXY> chart;
+  view->GetScene()->AddItem(chart.GetPointer());
+  chart->SetShowLegend(true);
+
+  // Creates a vtkPlotBag input table
+  // We construct a 2D grid 20*20.
+  int numDataI = 20;
+  int numDataJ = 20;
+
+  vtkNew<vtkIntArray> arrX;
+  arrX->SetName("X");
+
+  vtkNew<vtkDoubleArray> arrY;
+  arrY->SetName("Y");
+
+  vtkNew<vtkDoubleArray> arrDensity;
+  arrDensity->SetName("Density");
+
+  vtkNew<vtkTable> table;
+  table->AddColumn(arrX.GetPointer());
+  table->AddColumn(arrY.GetPointer());
+  table->AddColumn(arrDensity.GetPointer());
+
+  table->SetNumberOfRows(numDataI * numDataJ);
+
+  // Fill the table
+  for (int j = 0; j < numDataJ; ++j)
+    {
+    for (int i = 0; i < numDataI; ++i)
+      {
+      table->SetValue(i + j * numDataI, 0, i); //X
+      table->SetValue(i + j * numDataI, 1, j); //Y
+      double dx = (numDataI / 2. - i) / (numDataI / 2.);
+      double dy = (numDataJ / 2. - j) / (numDataJ / 2.);
+      double d = 1. - sqrt(dx * dx + dy * dy);
+      d = floor(d * 100.) / 100.; // to avoid conflicts
+      d += (i + j * numDataI) / (double)(1000. * numDataI * numDataJ);
+      table->SetValue(i + j * numDataI, 2, d); // Density
+      }
+    }
+
+  vtkNew<vtkPlotBag> bagPlot;
+  chart->AddPlot(bagPlot.GetPointer());
+  bagPlot->SetInputData(table.GetPointer(), arrX->GetName(),
+    arrY->GetName(), arrDensity->GetName());
+  bagPlot->SetColor(255, 0, 0, 255);
+  bagPlot->SetMarkerSize(4);
+
+  // Render the scene
+  view->GetInteractor()->Initialize();
+  view->GetInteractor()->Start();
+
+  return EXIT_SUCCESS;
+}
diff --git a/Charts/Core/Testing/Cxx/TestCategoryLegend.cxx b/Charts/Core/Testing/Cxx/TestCategoryLegend.cxx
new file mode 100644
index 0000000..1c2c2a4
--- /dev/null
+++ b/Charts/Core/Testing/Cxx/TestCategoryLegend.cxx
@@ -0,0 +1,75 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestCategoryLegend.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkCategoryLegend.h"
+
+#include "vtkColorSeries.h"
+#include "vtkLookupTable.h"
+#include "vtkNew.h"
+#include "vtkVariantArray.h"
+
+#include "vtkRenderer.h"
+#include "vtkRenderWindow.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkContextScene.h"
+#include "vtkContextTransform.h"
+#include "vtkContextView.h"
+
+#include "vtkRegressionTestImage.h"
+
+//----------------------------------------------------------------------------
+int TestCategoryLegend(int argc, char* argv[])
+{
+  vtkNew<vtkVariantArray> values;
+  values->InsertNextValue(vtkVariant("a"));
+  values->InsertNextValue(vtkVariant("b"));
+  values->InsertNextValue(vtkVariant("c"));
+
+  vtkNew<vtkLookupTable> lut;
+  for (int i = 0; i < values->GetNumberOfTuples(); ++i)
+    {
+    lut->SetAnnotation(values->GetValue(i), values->GetValue(i).ToString());
+    }
+
+  vtkNew<vtkColorSeries> colorSeries;
+  colorSeries->SetColorScheme(vtkColorSeries::BREWER_QUALITATIVE_SET3);
+  colorSeries->BuildLookupTable(lut.GetPointer());
+
+  vtkNew<vtkCategoryLegend> legend;
+  legend->SetScalarsToColors(lut.GetPointer());
+  legend->SetValues(values.GetPointer());
+  legend->SetTitle("legend");
+
+  vtkNew<vtkContextTransform> trans;
+  trans->SetInteractive(true);
+  trans->AddItem(legend.GetPointer());
+  trans->Translate(180, 70);
+
+  vtkNew<vtkContextView> contextView;
+  contextView->GetScene()->AddItem(trans.GetPointer());
+  contextView->GetRenderer()->SetBackground(1.0, 1.0, 1.0);
+  contextView->GetRenderWindow()->SetSize(300,200);
+  contextView->GetRenderWindow()->SetMultiSamples(0);
+  contextView->GetRenderWindow()->Render();
+
+  int retVal = vtkRegressionTestImage(contextView->GetRenderWindow());
+  if (retVal == vtkRegressionTester::DO_INTERACTOR)
+    {
+    contextView->GetRenderWindow()->Render();
+    contextView->GetInteractor()->Start();
+    retVal = vtkRegressionTester::PASSED;
+    }
+  return !retVal;
+}
diff --git a/Charts/Core/Testing/Cxx/TestChartDouble.cxx b/Charts/Core/Testing/Cxx/TestChartDouble.cxx
new file mode 100644
index 0000000..12c48bd
--- /dev/null
+++ b/Charts/Core/Testing/Cxx/TestChartDouble.cxx
@@ -0,0 +1,85 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestChartDouble.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkRenderWindow.h"
+#include "vtkSmartPointer.h"
+#include "vtkChartXY.h"
+#include "vtkPlot.h"
+#include "vtkTable.h"
+#include "vtkDoubleArray.h"
+#include "vtkContextView.h"
+#include "vtkContextScene.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkNew.h"
+
+#include "vtkAxis.h"
+
+//----------------------------------------------------------------------------
+int TestChartDouble(int, char *[])
+{
+  // Set up a 2D scene, add an XY chart to it
+  vtkNew<vtkContextView> view;
+  view->GetRenderWindow()->SetSize(400, 300);
+  vtkNew<vtkChartXY> chart;
+  view->GetScene()->AddItem(chart.GetPointer());
+
+  // Create a table with some points in it...
+  vtkNew<vtkTable> table;
+  vtkNew<vtkDoubleArray> arrX;
+  arrX->SetName("X");
+  table->AddColumn(arrX.GetPointer());
+  vtkNew<vtkDoubleArray> arrC;
+  arrC->SetName("f1");
+  table->AddColumn(arrC.GetPointer());
+  vtkNew<vtkDoubleArray> arrS;
+  arrS->SetName("f2");
+  table->AddColumn(arrS.GetPointer());
+  vtkNew<vtkDoubleArray> arrS2;
+  arrS2->SetName("f3");
+  table->AddColumn(arrS2.GetPointer());
+  // Test charting with a few more points...
+  int numPoints = 69;
+  float inc = 7.5 / (numPoints - 1);
+  table->SetNumberOfRows(numPoints);
+  for (int i = 0; i < numPoints; ++i)
+    {
+    double x(i * inc + 0.2);
+    table->SetValue(i, 0, x);
+    table->SetValue(i, 1, 1.0e-80 * (cos(x - 1.0) + sin(x - 3.14 / 4.0)));
+    table->SetValue(i, 2, 1.0e-80 * sin(x) * 1e-12);
+    table->SetValue(i, 3, 1.0e-80 * sin(x - 1.0));
+    }
+
+  // Add multiple line plots, setting the colors etc
+  vtkPlot *line = chart->AddPlot(vtkChart::POINTS);
+  line->SetInputData(table.GetPointer(), 0, 1);
+  line = chart->AddPlot(vtkChart::LINE);
+  line->SetInputData(table.GetPointer(), 0, 2);
+  // Put this plot in a different corner - it is orders of magnitude smaller.
+  chart->SetPlotCorner(line, 1);
+  line = chart->AddPlot(vtkChart::BAR);
+  line->SetInputData(table.GetPointer(), 0, 3);
+
+  chart->GetAxis(vtkAxis::LEFT)->SetTitle("A tiny range");
+  chart->GetAxis(vtkAxis::BOTTOM)->SetTitle("A normal range");
+  chart->GetAxis(vtkAxis::RIGHT)->SetTitle("An even tinier range");
+
+  // Render the scene and compare the image to a reference image
+  view->GetRenderWindow()->SetMultiSamples(0);
+  view->GetInteractor()->Initialize();
+  view->GetInteractor()->Start();
+
+  return EXIT_SUCCESS;
+}
diff --git a/Charts/Core/Testing/Cxx/TestChartUnicode.cxx b/Charts/Core/Testing/Cxx/TestChartUnicode.cxx
new file mode 100644
index 0000000..df9543e
--- /dev/null
+++ b/Charts/Core/Testing/Cxx/TestChartUnicode.cxx
@@ -0,0 +1,91 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestLinePlot.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkAxis.h"
+#include "vtkChartXY.h"
+#include "vtkContextScene.h"
+#include "vtkContextView.h"
+#include "vtkFloatArray.h"
+#include "vtkNew.h"
+#include "vtkPlot.h"
+#include "vtkRenderWindow.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkSmartPointer.h"
+#include "vtkTable.h"
+#include "vtkTextProperty.h"
+
+#include <string>
+
+//----------------------------------------------------------------------------
+int TestChartUnicode(int argc, char *argv[])
+{
+  if (argc < 2)
+    {
+    cout << "Missing font filename." << endl;
+    return EXIT_FAILURE;
+    }
+
+  std::string fontFile(argv[1]);
+
+  // Set up a 2D scene, add an XY chart to it
+  vtkNew<vtkContextView> view;
+  view->GetRenderWindow()->SetSize(400, 300);
+  vtkNew<vtkChartXY> chart;
+  view->GetScene()->AddItem(chart.GetPointer());
+
+  // Exercise the support for extended characters using UTF8 encoded strings.
+  chart->GetTitleProperties()->SetFontFamily(VTK_FONT_FILE);
+  chart->GetTitleProperties()->SetFontFile(fontFile.c_str());
+  chart->SetTitle("\xcf\x85\xcf\x84\xce\xba");
+
+  vtkAxis *axis1 = chart->GetAxis(0);
+  axis1->GetTitleProperties()->SetFontFamily(VTK_FONT_FILE);
+  axis1->GetTitleProperties()->SetFontFile(fontFile.c_str());
+  axis1->SetTitle("\xcf\x87(m)");
+
+  vtkAxis *axis2 = chart->GetAxis(1);
+  axis2->GetTitleProperties()->SetFontFamily(VTK_FONT_FILE);
+  axis2->GetTitleProperties()->SetFontFile(fontFile.c_str());
+  axis2->SetTitle("\xcf\x80\xcf\x86");
+
+  // Create a table with some points in it...
+  vtkNew<vtkTable> table;
+  vtkNew<vtkFloatArray> arrX;
+  arrX->SetName("X Axis");
+  table->AddColumn(arrX.GetPointer());
+  vtkNew<vtkFloatArray> arrC;
+  arrC->SetName("Cosine");
+  table->AddColumn(arrC.GetPointer());
+  int numPoints = 69;
+  float inc = 7.5 / (numPoints - 1);
+  table->SetNumberOfRows(numPoints);
+  for (int i = 0; i < numPoints; ++i)
+    {
+    table->SetValue(i, 0, i * inc);
+    table->SetValue(i, 1, cos(i * inc) + sin(i * (inc - 3.14)));
+    }
+
+  // Add multiple line plots, setting the colors etc
+  vtkPlot *line = chart->AddPlot(vtkChart::LINE);
+  line->SetInputData(table.GetPointer(), 0, 1);
+  line->SetColor(42, 55, 69, 255);
+
+  // Render the scene and compare the image to a reference image
+  view->GetRenderWindow()->SetMultiSamples(0);
+  view->GetInteractor()->Initialize();
+  view->GetInteractor()->Start();
+
+  return EXIT_SUCCESS;
+}
diff --git a/Charts/Core/Testing/Cxx/TestContextUnicode.cxx b/Charts/Core/Testing/Cxx/TestContextUnicode.cxx
index ed84f4a..8d631df 100644
--- a/Charts/Core/Testing/Cxx/TestContextUnicode.cxx
+++ b/Charts/Core/Testing/Cxx/TestContextUnicode.cxx
@@ -23,12 +23,15 @@
 #include "vtkContextScene.h"
 #include "vtkPen.h"
 #include "vtkBrush.h"
+#include "vtkTestUtilities.h"
 #include "vtkTextProperty.h"
 
 #include "vtkUnicodeString.h"
 
 #include "vtkRegressionTestImage.h"
 
+#include <string>
+
 //----------------------------------------------------------------------------
 class ContextUnicode : public vtkContextItem
 {
@@ -37,15 +40,25 @@ public:
   vtkTypeMacro(ContextUnicode, vtkContextItem);
   // Paint event for the chart, called whenever the chart needs to be drawn
   virtual bool Paint(vtkContext2D *painter);
+  std::string FontFile;
 };
 
 //----------------------------------------------------------------------------
 int TestContextUnicode(int argc, char * argv [])
 {
+  if (argc < 2)
+    {
+    cout << "Missing font filename." << endl;
+    return EXIT_FAILURE;
+    }
+
+  std::string fontFile(argv[1]);
+
   // Set up a 2D context view, context test object and add it to the scene
   vtkSmartPointer<vtkContextView> view = vtkSmartPointer<vtkContextView>::New();
   view->GetRenderWindow()->SetSize(200, 100);
   vtkSmartPointer<ContextUnicode> test = vtkSmartPointer<ContextUnicode>::New();
+  test->FontFile = fontFile;
   view->GetScene()->AddItem(test);
 
   view->GetRenderWindow()->SetMultiSamples(0);
@@ -70,10 +83,12 @@ bool ContextUnicode::Paint(vtkContext2D *painter)
   painter->GetTextProp()->SetJustificationToCentered();
   painter->GetTextProp()->SetColor(0.0, 0.0, 0.0);
   painter->GetTextProp()->SetFontSize(24);
+  painter->GetTextProp()->SetFontFamily(VTK_FONT_FILE);
+  painter->GetTextProp()->SetFontFile(this->FontFile.c_str());
   painter->DrawString(70, 20, "Angstrom");
   painter->DrawString(150, 20, vtkUnicodeString::from_utf8("\xe2\x84\xab"));
   painter->DrawString(100, 80,
-                      vtkUnicodeString::from_utf8("a\xce\xb1\xe0\xb8\x81\xf0\x90\x80\x80"));
+                      vtkUnicodeString::from_utf8("a\xce\xb1"));
   painter->DrawString(100, 50,
                       vtkUnicodeString::from_utf8("\xce\xb1\xce\xb2\xce\xb3"));
   return true;
diff --git a/Charts/Core/Testing/Cxx/TestControlPointsHandleItem.cxx b/Charts/Core/Testing/Cxx/TestControlPointsHandleItem.cxx
index adac75d..dd14ca8 100644
--- a/Charts/Core/Testing/Cxx/TestControlPointsHandleItem.cxx
+++ b/Charts/Core/Testing/Cxx/TestControlPointsHandleItem.cxx
@@ -31,613 +31,645 @@
 #include "vtkColorTransferFunction.h"
 #include "vtkCompositeTransferFunctionItem.h"
 
-static char TestControlPointsHandleItemLog[] =
+static const char *TestControlPointsHandleItemLog =
 "# StreamVersion 1\n"
-"RenderEvent 0 0 0 0 0 0 0\n"
 "TimerEvent 0 0 0 0 0 0 0\n"
-"EnterEvent 249 3 0 0 0 0 0\n"
-"MouseMoveEvent 193 128 0 0 0 0 0\n"
-"MouseMoveEvent 193 129 0 0 0 0 0\n"
-"MouseMoveEvent 193 130 0 0 0 0 0\n"
-"MouseMoveEvent 193 132 0 0 0 0 0\n"
-"MouseMoveEvent 193 134 0 0 0 0 0\n"
-"MouseMoveEvent 194 134 0 0 0 0 0\n"
-"MouseMoveEvent 194 136 0 0 0 0 0\n"
-"MouseMoveEvent 194 137 0 0 0 0 0\n"
-"MouseMoveEvent 195 138 0 0 0 0 0\n"
-"MouseMoveEvent 195 139 0 0 0 0 0\n"
-"MouseMoveEvent 195 140 0 0 0 0 0\n"
-"MouseMoveEvent 196 140 0 0 0 0 0\n"
-"MouseMoveEvent 196 142 0 0 0 0 0\n"
-"MouseMoveEvent 196 143 0 0 0 0 0\n"
-"MouseMoveEvent 196 144 0 0 0 0 0\n"
-"MouseMoveEvent 196 145 0 0 0 0 0\n"
-"MouseMoveEvent 197 146 0 0 0 0 0\n"
-"MouseMoveEvent 197 147 0 0 0 0 0\n"
-"MouseMoveEvent 198 148 0 0 0 0 0\n"
-"MouseMoveEvent 198 149 0 0 0 0 0\n"
-"MouseMoveEvent 199 150 0 0 0 0 0\n"
-"MouseMoveEvent 199 151 0 0 0 0 0\n"
-"MouseMoveEvent 199 151 0 0 0 0 0\n"
-"LeftButtonPressEvent 199 151 0 0 0 0 0\n"
-"TimerEvent 199 151 0 0 0 0 0\n"
-"LeftButtonReleaseEvent 199 151 0 0 0 0 0\n"
-"MouseMoveEvent 199 151 0 0 0 0 0\n"
-"LeftButtonPressEvent 199 151 0 0 0 0 0\n"
-"LeftButtonReleaseEvent 199 151 0 0 0 0 0\n"
-"LeftButtonPressEvent 199 151 0 0 0 1 0\n"
-"LeftButtonReleaseEvent 199 151 0 0 0 0 0\n"
-"MouseMoveEvent 199 151 0 0 0 0 0\n"
-"MouseMoveEvent 199 151 0 0 0 0 0\n"
-"MouseMoveEvent 200 151 0 0 0 0 0\n"
-"MouseMoveEvent 202 152 0 0 0 0 0\n"
-"MouseMoveEvent 202 154 0 0 0 0 0\n"
-"MouseMoveEvent 204 156 0 0 0 0 0\n"
-"MouseMoveEvent 206 159 0 0 0 0 0\n"
-"MouseMoveEvent 212 163 0 0 0 0 0\n"
-"MouseMoveEvent 222 167 0 0 0 0 0\n"
-"MouseMoveEvent 231 174 0 0 0 0 0\n"
-"MouseMoveEvent 243 182 0 0 0 0 0\n"
-"MouseMoveEvent 253 190 0 0 0 0 0\n"
-"MouseMoveEvent 263 197 0 0 0 0 0\n"
-"MouseMoveEvent 274 206 0 0 0 0 0\n"
-"MouseMoveEvent 284 215 0 0 0 0 0\n"
-"MouseMoveEvent 289 224 0 0 0 0 0\n"
-"MouseMoveEvent 297 233 0 0 0 0 0\n"
-"MouseMoveEvent 304 240 0 0 0 0 0\n"
-"MouseMoveEvent 308 246 0 0 0 0 0\n"
-"MouseMoveEvent 313 255 0 0 0 0 0\n"
-"MouseMoveEvent 319 263 0 0 0 0 0\n"
-"MouseMoveEvent 323 270 0 0 0 0 0\n"
-"MouseMoveEvent 326 276 0 0 0 0 0\n"
-"MouseMoveEvent 331 282 0 0 0 0 0\n"
-"MouseMoveEvent 334 287 0 0 0 0 0\n"
-"MouseMoveEvent 337 292 0 0 0 0 0\n"
-"MouseMoveEvent 341 297 0 0 0 0 0\n"
-"RenderEvent 341 297  0 0 0 0 0 \n"
-"TimerEvent 341 297  0 0 0 0 0 \n"
-"TimerEvent 341 297  0 0 0 0 0 \n"
-"TimerEvent 341 297  0 0 0 0 0 \n"
-"EnterEvent 206  0 0 0 0 0 0 \n"
-"MouseMoveEvent 206  0 0 0 0 0 0 \n"
-"MouseMoveEvent 205 9  0 0 0 0 0 \n"
-"MouseMoveEvent 205 18  0 0 0 0 0 \n"
-"MouseMoveEvent 204 27  0 0 0 0 0 \n"
-"MouseMoveEvent 203 36  0 0 0 0 0 \n"
-"MouseMoveEvent 203 42  0 0 0 0 0 \n"
-"MouseMoveEvent 203 51  0 0 0 0 0 \n"
-"MouseMoveEvent 203 55  0 0 0 0 0 \n"
-"MouseMoveEvent 203 62  0 0 0 0 0 \n"
-"MouseMoveEvent 203 65  0 0 0 0 0 \n"
-"MouseMoveEvent 203 72  0 0 0 0 0 \n"
-"MouseMoveEvent 204 76  0 0 0 0 0 \n"
-"MouseMoveEvent 205 81  0 0 0 0 0 \n"
-"MouseMoveEvent 206 85  0 0 0 0 0 \n"
-"MouseMoveEvent 207 9 0 0 0 0 0 0 \n"
-"MouseMoveEvent 208 93  0 0 0 0 0 \n"
-"MouseMoveEvent 208 96  0 0 0 0 0 \n"
-"MouseMoveEvent 208 98  0 0 0 0 0 \n"
-"MouseMoveEvent 209 10 0 0 0 0 0 0 \n"
-"MouseMoveEvent 209 102  0 0 0 0 0 \n"
-"MouseMoveEvent 209 103  0 0 0 0 0 \n"
-"MouseMoveEvent 209 105  0 0 0 0 0 \n"
-"MouseMoveEvent 210 108  0 0 0 0 0 \n"
-"MouseMoveEvent 210 108  0 0 0 0 0 \n"
-"MouseMoveEvent 210 111  0 0 0 0 0 \n"
-"MouseMoveEvent 211 114  0 0 0 0 0 \n"
-"MouseMoveEvent 211 115  0 0 0 0 0 \n"
-"MouseMoveEvent 211 118  0 0 0 0 0 \n"
-"MouseMoveEvent 211 121  0 0 0 0 0 \n"
-"MouseMoveEvent 210 124  0 0 0 0 0 \n"
-"MouseMoveEvent 210 125  0 0 0 0 0 \n"
-"MouseMoveEvent 209 126  0 0 0 0 0 \n"
-"MouseMoveEvent 209 127  0 0 0 0 0 \n"
-"MouseMoveEvent 209 128  0 0 0 0 0 \n"
-"MouseMoveEvent 208 128  0 0 0 0 0 \n"
-"MouseMoveEvent 207 129  0 0 0 0 0 \n"
-"MouseMoveEvent 207 13 0 0 0 0 0 0 \n"
-"MouseMoveEvent 206 13 0 0 0 0 0 0 \n"
-"MouseMoveEvent 206 131  0 0 0 0 0 \n"
-"MouseMoveEvent 206 132  0 0 0 0 0 \n"
-"MouseMoveEvent 205 133  0 0 0 0 0 \n"
-"MouseMoveEvent 204 134  0 0 0 0 0 \n"
-"MouseMoveEvent 203 135  0 0 0 0 0 \n"
-"MouseMoveEvent 203 136  0 0 0 0 0 \n"
-"MouseMoveEvent 202 136  0 0 0 0 0 \n"
-"MouseMoveEvent 201 137  0 0 0 0 0 \n"
-"MouseMoveEvent 201 138  0 0 0 0 0 \n"
-"TimerEvent 201 138  0 0 0 0 0 \n"
-"MouseMoveEvent 200 138  0 0 0 0 0 \n"
-"MouseMoveEvent 199 138  0 0 0 0 0 \n"
-"MouseMoveEvent 198 138  0 0 0 0 0 \n"
-"MouseMoveEvent 198 139  0 0 0 0 0 \n"
-"MouseMoveEvent 199 139  0 0 0 0 0 \n"
-"LeftButtonPressEvent 199 139  0 0 0 0 0 \n"
-"MouseMoveEvent 199 139  0 0 0 0 0 \n"
-"TimerEvent 199 139  0 0 0 0 0 \n"
-"MouseMoveEvent 199 138  0 0 0 0 0 \n"
-"MouseMoveEvent 199 137  0 0 0 0 0 \n"
-"MouseMoveEvent 199 136  0 0 0 0 0 \n"
-"TimerEvent 199 136  0 0 0 0 0 \n"
-"MouseMoveEvent 199 135  0 0 0 0 0 \n"
-"TimerEvent 199 135  0 0 0 0 0 \n"
-"MouseMoveEvent 199 134  0 0 0 0 0 \n"
-"TimerEvent 199 134  0 0 0 0 0 \n"
-"MouseMoveEvent 199 133  0 0 0 0 0 \n"
-"MouseMoveEvent 198 133  0 0 0 0 0 \n"
-"TimerEvent 198 133  0 0 0 0 0 \n"
-"MouseMoveEvent 198 132  0 0 0 0 0 \n"
-"MouseMoveEvent 198 131  0 0 0 0 0 \n"
-"TimerEvent 198 131  0 0 0 0 0 \n"
-"MouseMoveEvent 198 13 0 0 0 0 0 0 \n"
-"TimerEvent 198 13 0 0 0 0 0 0 \n"
-"MouseMoveEvent 198 13 0 0 0 0 0 0 \n"
-"TimerEvent 198 13 0 0 0 0 0 0 \n"
-"LeftButtonReleaseEvent 198 13 0 0 0 0 0 0 \n"
-"MouseMoveEvent 198 13 0 0 0 0 0 0 \n"
-"MouseMoveEvent 197 13 0 0 0 0 0 0 \n"
-"MouseMoveEvent 196 13 0 0 0 0 0 0 \n"
-"MouseMoveEvent 195 129  0 0 0 0 0 \n"
-"TimerEvent 195 129  0 0 0 0 0 \n"
-"MouseMoveEvent 194 129  0 0 0 0 0 \n"
-"MouseMoveEvent 193 129  0 0 0 0 0 \n"
-"MouseMoveEvent 193 128  0 0 0 0 0 \n"
-"MouseMoveEvent 192 128  0 0 0 0 0 \n"
-"MouseMoveEvent 193 128  0 0 0 0 0 \n"
-"MouseMoveEvent 193 129  0 0 0 0 0 \n"
-"MouseMoveEvent 193 13 0 0 0 0 0 0 \n"
-"MouseMoveEvent 194 13 0 0 0 0 0 0 \n"
-"MouseMoveEvent 194 131  0 0 0 0 0 \n"
-"MouseMoveEvent 194 132  0 0 0 0 0 \n"
-"MouseMoveEvent 194 133  0 0 0 0 0 \n"
-"MouseMoveEvent 194 134  0 0 0 0 0 \n"
-"MouseMoveEvent 194 135  0 0 0 0 0 \n"
-"MouseMoveEvent 194 135  0 0 0 0 0 \n"
-"MouseMoveEvent 194 136  0 0 0 0 0 \n"
-"MouseMoveEvent 194 138  0 0 0 0 0 \n"
-"MouseMoveEvent 194 139  0 0 0 0 0 \n"
-"MouseMoveEvent 194 14 0 0 0 0 0 0 \n"
-"MouseMoveEvent 194 141  0 0 0 0 0 \n"
-"MouseMoveEvent 194 142  0 0 0 0 0 \n"
-"MouseMoveEvent 194 143  0 0 0 0 0 \n"
-"MouseMoveEvent 194 145  0 0 0 0 0 \n"
-"MouseMoveEvent 194 146  0 0 0 0 0 \n"
-"MouseMoveEvent 194 147  0 0 0 0 0 \n"
-"MouseMoveEvent 194 148  0 0 0 0 0 \n"
-"MouseMoveEvent 194 149  0 0 0 0 0 \n"
-"MouseMoveEvent 194 15 0 0 0 0 0 0 \n"
-"MouseMoveEvent 194 151  0 0 0 0 0 \n"
-"MouseMoveEvent 194 152  0 0 0 0 0 \n"
-"MouseMoveEvent 194 153  0 0 0 0 0 \n"
-"MouseMoveEvent 194 154  0 0 0 0 0 \n"
-"MouseMoveEvent 194 155  0 0 0 0 0 \n"
-"MouseMoveEvent 194 156  0 0 0 0 0 \n"
-"MouseMoveEvent 194 158  0 0 0 0 0 \n"
-"MouseMoveEvent 194 159  0 0 0 0 0 \n"
-"MouseMoveEvent 194 16 0 0 0 0 0 0 \n"
-"MouseMoveEvent 195 161  0 0 0 0 0 \n"
-"MouseMoveEvent 196 161  0 0 0 0 0 \n"
-"TimerEvent 196 161  0 0 0 0 0 \n"
-"MouseMoveEvent 196 162  0 0 0 0 0 \n"
-"TimerEvent 196 162  0 0 0 0 0 \n"
-"MouseMoveEvent 197 162  0 0 0 0 0 \n"
-"TimerEvent 197 162  0 0 0 0 0 \n"
-"MouseMoveEvent 198 162  0 0 0 0 0 \n"
-"MouseMoveEvent 198 162  0 0 0 0 0 \n"
-"MouseMoveEvent 199 162  0 0 0 0 0 \n"
-"MouseMoveEvent 199 161  0 0 0 0 0 \n"
-"MouseMoveEvent 199 16 0 0 0 0 0 0 \n"
-"MouseMoveEvent 199 159  0 0 0 0 0 \n"
-"LeftButtonPressEvent 199 159  0 0 0 0 0 \n"
-"MouseMoveEvent 199 159  0 0 0 0 0 \n"
-"MouseMoveEvent 199 16 0 0 0 0 0 0 \n"
-"MouseMoveEvent 199 161  0 0 0 0 0 \n"
-"MouseMoveEvent 199 162  0 0 0 0 0 \n"
-"MouseMoveEvent 199 163  0 0 0 0 0 \n"
-"TimerEvent 199 163  0 0 0 0 0 \n"
-"MouseMoveEvent 200 163  0 0 0 0 0 \n"
-"MouseMoveEvent 200 164  0 0 0 0 0 \n"
-"MouseMoveEvent 200 165  0 0 0 0 0 \n"
-"MouseMoveEvent 200 166  0 0 0 0 0 \n"
-"TimerEvent 200 166  0 0 0 0 0 \n"
-"MouseMoveEvent 200 167  0 0 0 0 0 \n"
-"MouseMoveEvent 201 167  0 0 0 0 0 \n"
-"MouseMoveEvent 201 168  0 0 0 0 0 \n"
-"TimerEvent 201 168  0 0 0 0 0 \n"
-"MouseMoveEvent 201 169  0 0 0 0 0 \n"
-"TimerEvent 201 169  0 0 0 0 0 \n"
-"MouseMoveEvent 201 17 0 0 0 0 0 0 \n"
-"TimerEvent 201 17 0 0 0 0 0 0 \n"
-"MouseMoveEvent 201 171  0 0 0 0 0 \n"
-"TimerEvent 201 171  0 0 0 0 0 \n"
-"MouseMoveEvent 201 171  0 0 0 0 0 \n"
-"TimerEvent 201 171  0 0 0 0 0 \n"
-"LeftButtonReleaseEvent 201 171  0 0 0 0 0 \n"
-"MouseMoveEvent 201 171  0 0 0 0 0 \n"
-"MouseMoveEvent 201 172  0 0 0 0 0 \n"
-"TimerEvent 201 172  0 0 0 0 0 \n"
-"MouseMoveEvent 200 172  0 0 0 0 0 \n"
-"MouseMoveEvent 200 171  0 0 0 0 0 \n"
-"MouseMoveEvent 200 17 0 0 0 0 0 0 \n"
-"MouseMoveEvent 200 169  0 0 0 0 0 \n"
-"MouseMoveEvent 199 169  0 0 0 0 0 \n"
-"MouseMoveEvent 199 168  0 0 0 0 0 \n"
-"MouseMoveEvent 199 167  0 0 0 0 0 \n"
-"MouseMoveEvent 199 166  0 0 0 0 0 \n"
-"TimerEvent 199 166  0 0 0 0 0 \n"
-"MouseMoveEvent 200 168  0 0 0 0 0 \n"
-"MouseMoveEvent 201 169  0 0 0 0 0 \n"
-"MouseMoveEvent 204 171  0 0 0 0 0 \n"
-"MouseMoveEvent 205 172  0 0 0 0 0 \n"
-"MouseMoveEvent 206 172  0 0 0 0 0 \n"
-"MouseMoveEvent 207 172  0 0 0 0 0 \n"
-"TimerEvent 207 172  0 0 0 0 0 \n"
-"MouseMoveEvent 207 173  0 0 0 0 0 \n"
-"MouseMoveEvent 208 173  0 0 0 0 0 \n"
-"MouseMoveEvent 209 173  0 0 0 0 0 \n"
-"MouseMoveEvent 210 173  0 0 0 0 0 \n"
-"MouseMoveEvent 211 175  0 0 0 0 0 \n"
-"MouseMoveEvent 212 175  0 0 0 0 0 \n"
-"MouseMoveEvent 213 176  0 0 0 0 0 \n"
-"MouseMoveEvent 213 177  0 0 0 0 0 \n"
-"MouseMoveEvent 213 179  0 0 0 0 0 \n"
-"MouseMoveEvent 213 18 0 0 0 0 0 0 \n"
-"MouseMoveEvent 213 181  0 0 0 0 0 \n"
-"MouseMoveEvent 213 182  0 0 0 0 0 \n"
-"MouseMoveEvent 213 185  0 0 0 0 0 \n"
-"MouseMoveEvent 214 187  0 0 0 0 0 \n"
-"MouseMoveEvent 215 189  0 0 0 0 0 \n"
-"MouseMoveEvent 215 191  0 0 0 0 0 \n"
-"MouseMoveEvent 216 194  0 0 0 0 0 \n"
-"MouseMoveEvent 216 195  0 0 0 0 0 \n"
-"MouseMoveEvent 216 199  0 0 0 0 0 \n"
-"MouseMoveEvent 217 203  0 0 0 0 0 \n"
-"MouseMoveEvent 218 208  0 0 0 0 0 \n"
-"MouseMoveEvent 218 211  0 0 0 0 0 \n"
-"MouseMoveEvent 219 215  0 0 0 0 0 \n"
-"MouseMoveEvent 221 221  0 0 0 0 0 \n"
-"MouseMoveEvent 222 224  0 0 0 0 0 \n"
-"MouseMoveEvent 222 227  0 0 0 0 0 \n"
-"MouseMoveEvent 223 23 0 0 0 0 0 0 \n"
-"MouseMoveEvent 223 232  0 0 0 0 0 \n"
-"MouseMoveEvent 223 234  0 0 0 0 0 \n"
-"MouseMoveEvent 224 236  0 0 0 0 0 \n"
-"MouseMoveEvent 225 238  0 0 0 0 0 \n"
-"MouseMoveEvent 226 24 0 0 0 0 0 0 \n"
-"MouseMoveEvent 226 242  0 0 0 0 0 \n"
-"MouseMoveEvent 227 244  0 0 0 0 0 \n"
-"MouseMoveEvent 228 246  0 0 0 0 0 \n"
-"MouseMoveEvent 228 247  0 0 0 0 0 \n"
-"MouseMoveEvent 228 248  0 0 0 0 0 \n"
-"MouseMoveEvent 229 25 0 0 0 0 0 0 \n"
-"MouseMoveEvent 230 251  0 0 0 0 0 \n"
-"MouseMoveEvent 230 252  0 0 0 0 0 \n"
-"MouseMoveEvent 230 254  0 0 0 0 0 \n"
-"MouseMoveEvent 231 255  0 0 0 0 0 \n"
-"MouseMoveEvent 231 255  0 0 0 0 0 \n"
-"MouseMoveEvent 232 256  0 0 0 0 0 \n"
-"MouseMoveEvent 232 257  0 0 0 0 0 \n"
-"MouseMoveEvent 234 257  0 0 0 0 0 \n"
-"MouseMoveEvent 235 258  0 0 0 0 0 \n"
-"MouseMoveEvent 236 258  0 0 0 0 0 \n"
-"MouseMoveEvent 237 258  0 0 0 0 0 \n"
-"MouseMoveEvent 238 258  0 0 0 0 0 \n"
-"MouseMoveEvent 239 259  0 0 0 0 0 \n"
-"MouseMoveEvent 240 259  0 0 0 0 0 \n"
-"MouseMoveEvent 241 259  0 0 0 0 0 \n"
-"MouseMoveEvent 241 26 0 0 0 0 0 0 \n"
-"MouseMoveEvent 242 261  0 0 0 0 0 \n"
-"MouseMoveEvent 243 262  0 0 0 0 0 \n"
-"MouseMoveEvent 244 262  0 0 0 0 0 \n"
-"MouseMoveEvent 244 263  0 0 0 0 0 \n"
-"MouseMoveEvent 245 264  0 0 0 0 0 \n"
-"MouseMoveEvent 244 264  0 0 0 0 0 \n"
-"MouseMoveEvent 244 264  0 0 0 0 0 \n"
-"MouseMoveEvent 244 263  0 0 0 0 0 \n"
-"MouseMoveEvent 243 263  0 0 0 0 0 \n"
-"MouseMoveEvent 242 263  0 0 0 0 0 \n"
-"MouseMoveEvent 241 263  0 0 0 0 0 \n"
-"MouseMoveEvent 240 263  0 0 0 0 0 \n"
-"MouseMoveEvent 240 262  0 0 0 0 0 \n"
-"MouseMoveEvent 239 262  0 0 0 0 0 \n"
-"MouseMoveEvent 238 261  0 0 0 0 0 \n"
-"MouseMoveEvent 237 26 0 0 0 0 0 0 \n"
-"MouseMoveEvent 237 259  0 0 0 0 0 \n"
-"MouseMoveEvent 237 259  0 0 0 0 0 \n"
-"MouseMoveEvent 238 259  0 0 0 0 0 \n"
-"MouseMoveEvent 239 259  0 0 0 0 0 \n"
-"MouseMoveEvent 240 26 0 0 0 0 0 0 \n"
-"MouseMoveEvent 241 26 0 0 0 0 0 0 \n"
-"MouseMoveEvent 241 261  0 0 0 0 0 \n"
-"MouseMoveEvent 242 261  0 0 0 0 0 \n"
-"MouseMoveEvent 243 261  0 0 0 0 0 \n"
-"MouseMoveEvent 243 262  0 0 0 0 0 \n"
-"MouseMoveEvent 244 262  0 0 0 0 0 \n"
-"MouseMoveEvent 245 262  0 0 0 0 0 \n"
-"MouseMoveEvent 246 263  0 0 0 0 0 \n"
-"MouseMoveEvent 246 261  0 0 0 0 0 \n"
-"MouseMoveEvent 246 259  0 0 0 0 0 \n"
-"MouseMoveEvent 246 258  0 0 0 0 0 \n"
-"MouseMoveEvent 246 256  0 0 0 0 0 \n"
-"MouseMoveEvent 246 254  0 0 0 0 0 \n"
-"MouseMoveEvent 246 253  0 0 0 0 0 \n"
-"MouseMoveEvent 246 252  0 0 0 0 0 \n"
-"MouseMoveEvent 246 251  0 0 0 0 0 \n"
-"MouseMoveEvent 246 25 0 0 0 0 0 0 \n"
-"MouseMoveEvent 246 249  0 0 0 0 0 \n"
-"MouseMoveEvent 246 248  0 0 0 0 0 \n"
-"MouseMoveEvent 246 247  0 0 0 0 0 \n"
-"MouseMoveEvent 246 246  0 0 0 0 0 \n"
-"MouseMoveEvent 246 245  0 0 0 0 0 \n"
-"MouseMoveEvent 246 243  0 0 0 0 0 \n"
-"MouseMoveEvent 246 242  0 0 0 0 0 \n"
-"MouseMoveEvent 245 241  0 0 0 0 0 \n"
-"MouseMoveEvent 245 24 0 0 0 0 0 0 \n"
-"MouseMoveEvent 245 238  0 0 0 0 0 \n"
-"MouseMoveEvent 244 235  0 0 0 0 0 \n"
-"MouseMoveEvent 243 233  0 0 0 0 0 \n"
-"MouseMoveEvent 242 229  0 0 0 0 0 \n"
-"MouseMoveEvent 240 225  0 0 0 0 0 \n"
-"MouseMoveEvent 240 221  0 0 0 0 0 \n"
-"MouseMoveEvent 238 215  0 0 0 0 0 \n"
-"MouseMoveEvent 236 21 0 0 0 0 0 0 \n"
-"MouseMoveEvent 235 205  0 0 0 0 0 \n"
-"MouseMoveEvent 233 199  0 0 0 0 0 \n"
-"MouseMoveEvent 232 194  0 0 0 0 0 \n"
-"MouseMoveEvent 230 19 0 0 0 0 0 0 \n"
-"MouseMoveEvent 228 185  0 0 0 0 0 \n"
-"MouseMoveEvent 227 181  0 0 0 0 0 \n"
-"MouseMoveEvent 227 178  0 0 0 0 0 \n"
-"MouseMoveEvent 227 174  0 0 0 0 0 \n"
-"MouseMoveEvent 227 171  0 0 0 0 0 \n"
-"MouseMoveEvent 227 169  0 0 0 0 0 \n"
-"MouseMoveEvent 227 168  0 0 0 0 0 \n"
-"MouseMoveEvent 227 167  0 0 0 0 0 \n"
-"MouseMoveEvent 228 165  0 0 0 0 0 \n"
-"MouseMoveEvent 229 163  0 0 0 0 0 \n"
-"MouseMoveEvent 229 162  0 0 0 0 0 \n"
-"MouseMoveEvent 229 161  0 0 0 0 0 \n"
-"MouseMoveEvent 229 159  0 0 0 0 0 \n"
-"MouseMoveEvent 229 158  0 0 0 0 0 \n"
-"MouseMoveEvent 229 157  0 0 0 0 0 \n"
-"MouseMoveEvent 230 156  0 0 0 0 0 \n"
-"MouseMoveEvent 230 155  0 0 0 0 0 \n"
-"MouseMoveEvent 230 154  0 0 0 0 0 \n"
-"MouseMoveEvent 230 153  0 0 0 0 0 \n"
-"MouseMoveEvent 231 152  0 0 0 0 0 \n"
-"MouseMoveEvent 231 15 0 0 0 0 0 0 \n"
-"MouseMoveEvent 232 148  0 0 0 0 0 \n"
-"MouseMoveEvent 232 147  0 0 0 0 0 \n"
-"MouseMoveEvent 233 145  0 0 0 0 0 \n"
-"TimerEvent 233 145  0 0 0 0 0 \n"
-"MouseMoveEvent 233 143  0 0 0 0 0 \n"
-"MouseMoveEvent 233 142  0 0 0 0 0 \n"
-"MouseMoveEvent 233 142  0 0 0 0 0 \n"
-"MouseMoveEvent 233 141  0 0 0 0 0 \n"
-"MouseMoveEvent 233 14 0 0 0 0 0 0 \n"
-"MouseMoveEvent 233 139  0 0 0 0 0 \n"
-"MouseMoveEvent 233 138  0 0 0 0 0 \n"
-"MouseMoveEvent 234 138  0 0 0 0 0 \n"
-"MouseMoveEvent 234 139  0 0 0 0 0 \n"
-"MouseMoveEvent 234 14 0 0 0 0 0 0 \n"
-"MouseMoveEvent 235 141  0 0 0 0 0 \n"
-"MouseMoveEvent 235 142  0 0 0 0 0 \n"
-"MouseMoveEvent 235 143  0 0 0 0 0 \n"
-"MouseMoveEvent 235 144  0 0 0 0 0 \n"
-"MouseMoveEvent 235 145  0 0 0 0 0 \n"
-"MouseMoveEvent 235 146  0 0 0 0 0 \n"
-"MouseMoveEvent 235 147  0 0 0 0 0 \n"
-"MouseMoveEvent 235 148  0 0 0 0 0 \n"
-"TimerEvent 235 148  0 0 0 0 0 \n"
-"MouseMoveEvent 235 15 0 0 0 0 0 0 \n"
-"MouseMoveEvent 235 151  0 0 0 0 0 \n"
-"MouseMoveEvent 235 152  0 0 0 0 0 \n"
-"MouseMoveEvent 234 152  0 0 0 0 0 \n"
-"MouseMoveEvent 233 152  0 0 0 0 0 \n"
-"MouseMoveEvent 232 152  0 0 0 0 0 \n"
-"MouseMoveEvent 231 152  0 0 0 0 0 \n"
-"MouseMoveEvent 231 151  0 0 0 0 0 \n"
-"MouseMoveEvent 231 15 0 0 0 0 0 0 \n"
-"MouseMoveEvent 231 15 0 0 0 0 0 0 \n"
-"LeftButtonPressEvent 231 15 0 0 0 0 0 0 \n"
-"MouseMoveEvent 231 149  0 0 0 0 0 \n"
-"MouseMoveEvent 230 149  0 0 0 0 0 \n"
-"TimerEvent 230 149  0 0 0 0 0 \n"
-"MouseMoveEvent 229 148  0 0 0 0 0 \n"
-"MouseMoveEvent 228 148  0 0 0 0 0 \n"
-"TimerEvent 228 148  0 0 0 0 0 \n"
-"MouseMoveEvent 227 148  0 0 0 0 0 \n"
-"MouseMoveEvent 227 148  0 0 0 0 0 \n"
-"MouseMoveEvent 226 147  0 0 0 0 0 \n"
-"TimerEvent 226 147  0 0 0 0 0 \n"
-"MouseMoveEvent 225 147  0 0 0 0 0 \n"
-"TimerEvent 225 147  0 0 0 0 0 \n"
-"MouseMoveEvent 224 147  0 0 0 0 0 \n"
-"MouseMoveEvent 224 146  0 0 0 0 0 \n"
-"LeftButtonReleaseEvent 224 146  0 0 0 0 0 \n"
-"MouseMoveEvent 224 146  0 0 0 0 0 \n"
-"TimerEvent 224 146  0 0 0 0 0 \n"
-"MouseMoveEvent 223 146  0 0 0 0 0 \n"
-"MouseMoveEvent 223 145  0 0 0 0 0 \n"
-"MouseMoveEvent 222 145  0 0 0 0 0 \n"
-"MouseMoveEvent 221 144  0 0 0 0 0 \n"
-"TimerEvent 221 144  0 0 0 0 0 \n"
-"MouseMoveEvent 220 144  0 0 0 0 0 \n"
-"MouseMoveEvent 220 142  0 0 0 0 0 \n"
-"MouseMoveEvent 219 142  0 0 0 0 0 \n"
-"MouseMoveEvent 219 14 0 0 0 0 0 0 \n"
-"MouseMoveEvent 217 14 0 0 0 0 0 0 \n"
-"MouseMoveEvent 216 138  0 0 0 0 0 \n"
-"MouseMoveEvent 215 138  0 0 0 0 0 \n"
-"MouseMoveEvent 214 137  0 0 0 0 0 \n"
-"MouseMoveEvent 212 136  0 0 0 0 0 \n"
-"MouseMoveEvent 211 136  0 0 0 0 0 \n"
-"MouseMoveEvent 209 135  0 0 0 0 0 \n"
-"MouseMoveEvent 207 134  0 0 0 0 0 \n"
-"MouseMoveEvent 204 134  0 0 0 0 0 \n"
-"MouseMoveEvent 202 133  0 0 0 0 0 \n"
-"MouseMoveEvent 199 133  0 0 0 0 0 \n"
-"MouseMoveEvent 198 133  0 0 0 0 0 \n"
-"MouseMoveEvent 197 133  0 0 0 0 0 \n"
-"MouseMoveEvent 196 133  0 0 0 0 0 \n"
-"MouseMoveEvent 195 133  0 0 0 0 0 \n"
-"TimerEvent 195 133  0 0 0 0 0 \n"
-"MouseMoveEvent 194 133  0 0 0 0 0 \n"
-"MouseMoveEvent 193 133  0 0 0 0 0 \n"
-"MouseMoveEvent 192 133  0 0 0 0 0 \n"
-"MouseMoveEvent 191 133  0 0 0 0 0 \n"
-"MouseMoveEvent 190 133  0 0 0 0 0 \n"
-"MouseMoveEvent 189 133  0 0 0 0 0 \n"
-"MouseMoveEvent 187 133  0 0 0 0 0 \n"
-"MouseMoveEvent 187 134  0 0 0 0 0 \n"
-"MouseMoveEvent 186 134  0 0 0 0 0 \n"
-"MouseMoveEvent 185 134  0 0 0 0 0 \n"
-"MouseMoveEvent 184 134  0 0 0 0 0 \n"
-"MouseMoveEvent 184 135  0 0 0 0 0 \n"
-"MouseMoveEvent 183 135  0 0 0 0 0 \n"
-"MouseMoveEvent 182 135  0 0 0 0 0 \n"
-"MouseMoveEvent 181 136  0 0 0 0 0 \n"
-"MouseMoveEvent 180 136  0 0 0 0 0 \n"
-"MouseMoveEvent 179 137  0 0 0 0 0 \n"
-"MouseMoveEvent 178 137  0 0 0 0 0 \n"
-"MouseMoveEvent 177 137  0 0 0 0 0 \n"
-"MouseMoveEvent 176 137  0 0 0 0 0 \n"
-"MouseMoveEvent 175 137  0 0 0 0 0 \n"
-"MouseMoveEvent 175 138  0 0 0 0 0 \n"
-"MouseMoveEvent 173 138  0 0 0 0 0 \n"
-"MouseMoveEvent 170 139  0 0 0 0 0 \n"
-"MouseMoveEvent 169 139  0 0 0 0 0 \n"
-"MouseMoveEvent 168 139  0 0 0 0 0 \n"
-"MouseMoveEvent 167 14 0 0 0 0 0 0 \n"
-"MouseMoveEvent 167 14 0 0 0 0 0 0 \n"
-"MouseMoveEvent 165 142  0 0 0 0 0 \n"
-"MouseMoveEvent 165 143  0 0 0 0 0 \n"
-"MouseMoveEvent 164 144  0 0 0 0 0 \n"
-"MouseMoveEvent 164 145  0 0 0 0 0 \n"
-"MouseMoveEvent 163 145  0 0 0 0 0 \n"
-"MouseMoveEvent 163 146  0 0 0 0 0 \n"
-"MouseMoveEvent 163 147  0 0 0 0 0 \n"
-"MouseMoveEvent 163 148  0 0 0 0 0 \n"
-"TimerEvent 163 148  0 0 0 0 0 \n"
-"MouseMoveEvent 164 148  0 0 0 0 0 \n"
-"MouseMoveEvent 165 148  0 0 0 0 0 \n"
-"MouseMoveEvent 166 148  0 0 0 0 0 \n"
-"MouseMoveEvent 166 149  0 0 0 0 0 \n"
-"MouseMoveEvent 167 149  0 0 0 0 0 \n"
-"LeftButtonPressEvent 167 149  0 0 0 0 0 \n"
-"MouseMoveEvent 167 149  0 0 0 0 0 \n"
-"TimerEvent 167 149  0 0 0 0 0 \n"
-"MouseMoveEvent 168 149  0 0 0 0 0 \n"
-"TimerEvent 168 149  0 0 0 0 0 \n"
-"MouseMoveEvent 169 149  0 0 0 0 0 \n"
-"MouseMoveEvent 170 149  0 0 0 0 0 \n"
-"MouseMoveEvent 171 149  0 0 0 0 0 \n"
-"TimerEvent 171 149  0 0 0 0 0 \n"
-"MouseMoveEvent 172 149  0 0 0 0 0 \n"
-"MouseMoveEvent 172 149  0 0 0 0 0 \n"
-"MouseMoveEvent 173 149  0 0 0 0 0 \n"
-"TimerEvent 173 149  0 0 0 0 0 \n"
-"MouseMoveEvent 174 149  0 0 0 0 0 \n"
-"MouseMoveEvent 174 148  0 0 0 0 0 \n"
-"TimerEvent 174 148  0 0 0 0 0 \n"
-"MouseMoveEvent 175 148  0 0 0 0 0 \n"
-"TimerEvent 175 148  0 0 0 0 0 \n"
-"MouseMoveEvent 176 148  0 0 0 0 0 \n"
-"TimerEvent 176 148  0 0 0 0 0 \n"
-"MouseMoveEvent 177 148  0 0 0 0 0 \n"
-"TimerEvent 177 148  0 0 0 0 0 \n"
-"LeftButtonReleaseEvent 177 148  0 0 0 0 0 \n"
-"MouseMoveEvent 177 148  0 0 0 0 0 \n"
-"TimerEvent 177 148  0 0 0 0 0 \n"
-"MouseMoveEvent 177 147  0 0 0 0 0 \n"
-"MouseMoveEvent 176 147  0 0 0 0 0 \n"
-"MouseMoveEvent 176 146  0 0 0 0 0 \n"
-"MouseMoveEvent 175 146  0 0 0 0 0 \n"
-"MouseMoveEvent 176 146  0 0 0 0 0 \n"
-"MouseMoveEvent 177 146  0 0 0 0 0 \n"
-"MouseMoveEvent 178 147  0 0 0 0 0 \n"
-"MouseMoveEvent 180 147  0 0 0 0 0 \n"
-"MouseMoveEvent 182 148  0 0 0 0 0 \n"
-"MouseMoveEvent 185 15 0 0 0 0 0 0 \n"
-"MouseMoveEvent 185 15 0 0 0 0 0 0 \n"
-"MouseMoveEvent 187 151  0 0 0 0 0 \n"
-"MouseMoveEvent 189 153  0 0 0 0 0 \n"
-"MouseMoveEvent 193 155  0 0 0 0 0 \n"
-"TimerEvent 193 155  0 0 0 0 0 \n"
-"MouseMoveEvent 198 159  0 0 0 0 0 \n"
-"MouseMoveEvent 202 162  0 0 0 0 0 \n"
-"MouseMoveEvent 206 164  0 0 0 0 0 \n"
-"MouseMoveEvent 212 168  0 0 0 0 0 \n"
-"MouseMoveEvent 217 17 0 0 0 0 0 0 \n"
-"MouseMoveEvent 222 172  0 0 0 0 0 \n"
-"MouseMoveEvent 230 175  0 0 0 0 0 \n"
-"MouseMoveEvent 240 178  0 0 0 0 0 \n"
-"MouseMoveEvent 247 179  0 0 0 0 0 \n"
-"MouseMoveEvent 254 183  0 0 0 0 0 \n"
-"MouseMoveEvent 263 184  0 0 0 0 0 \n"
-"MouseMoveEvent 268 186  0 0 0 0 0 \n"
-"MouseMoveEvent 273 188  0 0 0 0 0 \n"
-"MouseMoveEvent 278 19 0 0 0 0 0 0 \n"
-"MouseMoveEvent 281 192  0 0 0 0 0 \n"
-"MouseMoveEvent 283 192  0 0 0 0 0 \n"
-"MouseMoveEvent 287 194  0 0 0 0 0 \n"
-"MouseMoveEvent 290 196  0 0 0 0 0 \n"
-"MouseMoveEvent 292 197  0 0 0 0 0 \n"
-"MouseMoveEvent 296 199  0 0 0 0 0 \n"
-"MouseMoveEvent 298 201  0 0 0 0 0 \n"
-"MouseMoveEvent 301 202  0 0 0 0 0 \n"
-"MouseMoveEvent 303 204  0 0 0 0 0 \n"
-"MouseMoveEvent 306 205  0 0 0 0 0 \n"
-"MouseMoveEvent 308 208  0 0 0 0 0 \n"
-"MouseMoveEvent 310 208  0 0 0 0 0 \n"
-"MouseMoveEvent 313 21 0 0 0 0 0 0 \n"
-"MouseMoveEvent 314 211  0 0 0 0 0 \n"
-"MouseMoveEvent 316 213  0 0 0 0 0 \n"
-"MouseMoveEvent 318 214  0 0 0 0 0 \n"
-"MouseMoveEvent 319 216  0 0 0 0 0 \n"
-"MouseMoveEvent 320 216  0 0 0 0 0 \n"
-"MouseMoveEvent 320 218  0 0 0 0 0 \n"
-"MouseMoveEvent 322 22 0 0 0 0 0 0 \n"
-"MouseMoveEvent 323 222  0 0 0 0 0 \n"
-"MouseMoveEvent 324 225  0 0 0 0 0 \n"
-"MouseMoveEvent 325 227  0 0 0 0 0 \n"
-"MouseMoveEvent 326 228  0 0 0 0 0 \n"
-"MouseMoveEvent 327 23 0 0 0 0 0 0 \n"
-"MouseMoveEvent 327 232  0 0 0 0 0 \n"
-"MouseMoveEvent 328 234  0 0 0 0 0 \n"
-"MouseMoveEvent 329 236  0 0 0 0 0 \n"
-"MouseMoveEvent 330 239  0 0 0 0 0 \n"
-"MouseMoveEvent 331 24 0 0 0 0 0 0 \n"
-"MouseMoveEvent 332 243  0 0 0 0 0 \n"
-"MouseMoveEvent 333 246  0 0 0 0 0 \n"
-"MouseMoveEvent 334 248  0 0 0 0 0 \n"
-"MouseMoveEvent 335 25 0 0 0 0 0 0 \n"
-"MouseMoveEvent 336 252  0 0 0 0 0 \n"
-"MouseMoveEvent 337 254  0 0 0 0 0 \n"
-"MouseMoveEvent 337 256  0 0 0 0 0 \n"
-"MouseMoveEvent 339 258  0 0 0 0 0 \n"
-"MouseMoveEvent 340 261  0 0 0 0 0 \n"
-"MouseMoveEvent 341 263  0 0 0 0 0 \n"
-"MouseMoveEvent 343 265  0 0 0 0 0 \n"
-"MouseMoveEvent 344 267  0 0 0 0 0 \n"
-"MouseMoveEvent 344 269  0 0 0 0 0 \n"
-"MouseMoveEvent 346 272  0 0 0 0 0 \n"
-"MouseMoveEvent 347 273  0 0 0 0 0 \n"
-"MouseMoveEvent 348 276  0 0 0 0 0 \n"
-"MouseMoveEvent 349 278  0 0 0 0 0 \n"
-"MouseMoveEvent 350 28 0 0 0 0 0 0 \n"
-"MouseMoveEvent 352 282  0 0 0 0 0 \n"
-"MouseMoveEvent 353 285  0 0 0 0 0 \n"
-"MouseMoveEvent 353 286  0 0 0 0 0 \n"
-"MouseMoveEvent 355 288  0 0 0 0 0 \n"
-"MouseMoveEvent 356 288  0 0 0 0 0 \n"
-"MouseMoveEvent 357 29 0 0 0 0 0 0 \n"
-"MouseMoveEvent 358 291  0 0 0 0 0 \n"
-"MouseMoveEvent 360 293  0 0 0 0 0 \n"
-"MouseMoveEvent 362 295  0 0 0 0 0 \n"
-"MouseMoveEvent 363 298  0 0 0 0 0 \n"
-"LeaveEvent 341 297 0 0 0 0 0\n"
-"ExitEvent 341 297 0 0 0 0 0\n"
+"EnterEvent 392 142 0 0 0 0 0\n"
+"MouseMoveEvent 392 142 0 0 0 0 0\n"
+"MouseMoveEvent 388 142 0 0 0 0 0\n"
+"MouseMoveEvent 387 142 0 0 0 0 0\n"
+"TimerEvent 387 142 0 0 0 0 0\n"
+"MouseMoveEvent 384 141 0 0 0 0 0\n"
+"MouseMoveEvent 377 141 0 0 0 0 0\n"
+"MouseMoveEvent 366 139 0 0 0 0 0\n"
+"MouseMoveEvent 357 139 0 0 0 0 0\n"
+"MouseMoveEvent 348 139 0 0 0 0 0\n"
+"MouseMoveEvent 340 139 0 0 0 0 0\n"
+"TimerEvent 340 139 0 0 0 0 0\n"
+"MouseMoveEvent 332 139 0 0 0 0 0\n"
+"MouseMoveEvent 327 137 0 0 0 0 0\n"
+"MouseMoveEvent 323 137 0 0 0 0 0\n"
+"TimerEvent 323 137 0 0 0 0 0\n"
+"MouseMoveEvent 322 137 0 0 0 0 0\n"
+"MouseMoveEvent 319 139 0 0 0 0 0\n"
+"MouseMoveEvent 315 141 0 0 0 0 0\n"
+"MouseMoveEvent 309 146 0 0 0 0 0\n"
+"MouseMoveEvent 303 150 0 0 0 0 0\n"
+"MouseMoveEvent 297 156 0 0 0 0 0\n"
+"TimerEvent 297 156 0 0 0 0 0\n"
+"MouseMoveEvent 291 160 0 0 0 0 0\n"
+"MouseMoveEvent 285 166 0 0 0 0 0\n"
+"MouseMoveEvent 279 171 0 0 0 0 0\n"
+"MouseMoveEvent 275 175 0 0 0 0 0\n"
+"MouseMoveEvent 273 177 0 0 0 0 0\n"
+"MouseMoveEvent 272 178 0 0 0 0 0\n"
+"TimerEvent 272 178 0 0 0 0 0\n"
+"MouseMoveEvent 272 179 0 0 0 0 0\n"
+"MouseMoveEvent 272 180 0 0 0 0 0\n"
+"MouseMoveEvent 272 182 0 0 0 0 0\n"
+"MouseMoveEvent 272 183 0 0 0 0 0\n"
+"TimerEvent 272 183 0 0 0 0 0\n"
+"MouseMoveEvent 272 184 0 0 0 0 0\n"
+"MouseMoveEvent 272 185 0 0 0 0 0\n"
+"MouseMoveEvent 273 185 0 0 0 0 0\n"
+"MouseMoveEvent 273 186 0 0 0 0 0\n"
+"TimerEvent 273 186 0 0 0 0 0\n"
+"MouseMoveEvent 273 187 0 0 0 0 0\n"
+"MouseMoveEvent 273 188 0 0 0 0 0\n"
+"MouseMoveEvent 273 189 0 0 0 0 0\n"
+"TimerEvent 273 189 0 0 0 0 0\n"
+"MouseMoveEvent 273 190 0 0 0 0 0\n"
+"MouseMoveEvent 274 191 0 0 0 0 0\n"
+"MouseMoveEvent 274 192 0 0 0 0 0\n"
+"MouseMoveEvent 274 193 0 0 0 0 0\n"
+"TimerEvent 274 193 0 0 0 0 0\n"
+"MouseMoveEvent 274 194 0 0 0 0 0\n"
+"MouseMoveEvent 274 195 0 0 0 0 0\n"
+"MouseMoveEvent 274 196 0 0 0 0 0\n"
+"MouseMoveEvent 274 197 0 0 0 0 0\n"
+"TimerEvent 274 197 0 0 0 0 0\n"
+"MouseMoveEvent 274 198 0 0 0 0 0\n"
+"MouseMoveEvent 275 199 0 0 0 0 0\n"
+"MouseMoveEvent 275 200 0 0 0 0 0\n"
+"TimerEvent 275 200 0 0 0 0 0\n"
+"MouseMoveEvent 275 201 0 0 0 0 0\n"
+"MouseMoveEvent 275 202 0 0 0 0 0\n"
+"TimerEvent 275 202 0 0 0 0 0\n"
+"MouseMoveEvent 274 201 0 0 0 0 0\n"
+"MouseMoveEvent 272 200 0 0 0 0 0\n"
+"MouseMoveEvent 269 199 0 0 0 0 0\n"
+"MouseMoveEvent 266 198 0 0 0 0 0\n"
+"MouseMoveEvent 264 197 0 0 0 0 0\n"
+"MouseMoveEvent 262 197 0 0 0 0 0\n"
+"TimerEvent 262 197 0 0 0 0 0\n"
+"MouseMoveEvent 261 196 0 0 0 0 0\n"
+"MouseMoveEvent 260 196 0 0 0 0 0\n"
+"TimerEvent 260 196 0 0 0 0 0\n"
+"MouseMoveEvent 262 196 0 0 0 0 0\n"
+"MouseMoveEvent 266 196 0 0 0 0 0\n"
+"MouseMoveEvent 272 196 0 0 0 0 0\n"
+"MouseMoveEvent 277 197 0 0 0 0 0\n"
+"MouseMoveEvent 283 197 0 0 0 0 0\n"
+"MouseMoveEvent 287 199 0 0 0 0 0\n"
+"TimerEvent 287 199 0 0 0 0 0\n"
+"MouseMoveEvent 286 198 0 0 0 0 0\n"
+"MouseMoveEvent 285 198 0 0 0 0 0\n"
+"MouseMoveEvent 285 197 0 0 0 0 0\n"
+"MouseMoveEvent 284 197 0 0 0 0 0\n"
+"TimerEvent 284 197 0 0 0 0 0\n"
+"MouseMoveEvent 283 197 0 0 0 0 0\n"
+"MouseMoveEvent 280 197 0 0 0 0 0\n"
+"MouseMoveEvent 278 197 0 0 0 0 0\n"
+"MouseMoveEvent 277 198 0 0 0 0 0\n"
+"MouseMoveEvent 275 199 0 0 0 0 0\n"
+"TimerEvent 275 199 0 0 0 0 0\n"
+"MouseMoveEvent 273 200 0 0 0 0 0\n"
+"MouseMoveEvent 272 201 0 0 0 0 0\n"
+"MouseMoveEvent 271 202 0 0 0 0 0\n"
+"MouseMoveEvent 270 202 0 0 0 0 0\n"
+"TimerEvent 270 202 0 0 0 0 0\n"
+"MouseMoveEvent 269 202 0 0 0 0 0\n"
+"MouseMoveEvent 269 201 0 0 0 0 0\n"
+"MouseMoveEvent 268 200 0 0 0 0 0\n"
+"MouseMoveEvent 268 198 0 0 0 0 0\n"
+"MouseMoveEvent 268 197 0 0 0 0 0\n"
+"TimerEvent 268 197 0 0 0 0 0\n"
+"MouseMoveEvent 268 196 0 0 0 0 0\n"
+"TimerEvent 268 196 0 0 0 0 0\n"
+"MouseMoveEvent 268 197 0 0 0 0 0\n"
+"MouseMoveEvent 268 198 0 0 0 0 0\n"
+"MouseMoveEvent 268 199 0 0 0 0 0\n"
+"TimerEvent 268 199 0 0 0 0 0\n"
+"LeftButtonPressEvent 268 199 0 0 0 0 0\n"
+"TimerEvent 268 199 0 0 0 0 0\n"
+"LeftButtonReleaseEvent 268 199 0 0 0 0 0\n"
+"LeftButtonPressEvent 268 199 0 0 0 1 0\n"
+"TimerEvent 268 199 0 0 0 1 0\n"
+"LeftButtonReleaseEvent 268 199 0 0 0 0 0\n"
+"MouseMoveEvent 268 198 0 0 0 0 0\n"
+"MouseMoveEvent 268 197 0 0 0 0 0\n"
+"MouseMoveEvent 267 196 0 0 0 0 0\n"
+"MouseMoveEvent 267 194 0 0 0 0 0\n"
+"MouseMoveEvent 267 193 0 0 0 0 0\n"
+"TimerEvent 267 193 0 0 0 0 0\n"
+"MouseMoveEvent 267 192 0 0 0 0 0\n"
+"MouseMoveEvent 267 191 0 0 0 0 0\n"
+"TimerEvent 267 191 0 0 0 0 0\n"
+"MouseMoveEvent 267 190 0 0 0 0 0\n"
+"MouseMoveEvent 267 189 0 0 0 0 0\n"
+"MouseMoveEvent 266 188 0 0 0 0 0\n"
+"TimerEvent 266 188 0 0 0 0 0\n"
+"LeftButtonPressEvent 266 188 0 0 0 0 0\n"
+"MouseMoveEvent 266 187 0 0 0 0 0\n"
+"TimerEvent 266 187 0 0 0 0 0\n"
+"MouseMoveEvent 266 186 0 0 0 0 0\n"
+"TimerEvent 266 186 0 0 0 0 0\n"
+"MouseMoveEvent 266 185 0 0 0 0 0\n"
+"TimerEvent 266 185 0 0 0 0 0\n"
+"MouseMoveEvent 266 184 0 0 0 0 0\n"
+"MouseMoveEvent 265 183 0 0 0 0 0\n"
+"MouseMoveEvent 265 182 0 0 0 0 0\n"
+"MouseMoveEvent 265 181 0 0 0 0 0\n"
+"MouseMoveEvent 265 180 0 0 0 0 0\n"
+"TimerEvent 265 180 0 0 0 0 0\n"
+"MouseMoveEvent 265 179 0 0 0 0 0\n"
+"MouseMoveEvent 265 178 0 0 0 0 0\n"
+"MouseMoveEvent 265 177 0 0 0 0 0\n"
+"TimerEvent 265 177 0 0 0 0 0\n"
+"MouseMoveEvent 265 176 0 0 0 0 0\n"
+"MouseMoveEvent 265 175 0 0 0 0 0\n"
+"MouseMoveEvent 265 174 0 0 0 0 0\n"
+"TimerEvent 265 174 0 0 0 0 0\n"
+"MouseMoveEvent 265 173 0 0 0 0 0\n"
+"MouseMoveEvent 265 172 0 0 0 0 0\n"
+"MouseMoveEvent 265 171 0 0 0 0 0\n"
+"TimerEvent 265 171 0 0 0 0 0\n"
+"MouseMoveEvent 265 170 0 0 0 0 0\n"
+"TimerEvent 265 170 0 0 0 0 0\n"
+"MouseMoveEvent 265 171 0 0 0 0 0\n"
+"MouseMoveEvent 265 172 0 0 0 0 0\n"
+"TimerEvent 265 172 0 0 0 0 0\n"
+"MouseMoveEvent 265 173 0 0 0 0 0\n"
+"MouseMoveEvent 265 174 0 0 0 0 0\n"
+"MouseMoveEvent 265 175 0 0 0 0 0\n"
+"TimerEvent 265 175 0 0 0 0 0\n"
+"LeftButtonReleaseEvent 265 175 0 0 0 0 0\n"
+"TimerEvent 265 175 0 0 0 0 0\n"
+"MouseMoveEvent 265 174 0 0 0 0 0\n"
+"MouseMoveEvent 265 173 0 0 0 0 0\n"
+"MouseMoveEvent 265 171 0 0 0 0 0\n"
+"MouseMoveEvent 265 170 0 0 0 0 0\n"
+"MouseMoveEvent 265 168 0 0 0 0 0\n"
+"TimerEvent 265 168 0 0 0 0 0\n"
+"MouseMoveEvent 263 166 0 0 0 0 0\n"
+"MouseMoveEvent 256 163 0 0 0 0 0\n"
+"MouseMoveEvent 247 159 0 0 0 0 0\n"
+"MouseMoveEvent 239 157 0 0 0 0 0\n"
+"MouseMoveEvent 232 155 0 0 0 0 0\n"
+"MouseMoveEvent 228 153 0 0 0 0 0\n"
+"TimerEvent 228 153 0 0 0 0 0\n"
+"MouseMoveEvent 227 152 0 0 0 0 0\n"
+"MouseMoveEvent 226 152 0 0 0 0 0\n"
+"MouseMoveEvent 224 151 0 0 0 0 0\n"
+"MouseMoveEvent 222 150 0 0 0 0 0\n"
+"MouseMoveEvent 219 148 0 0 0 0 0\n"
+"MouseMoveEvent 215 147 0 0 0 0 0\n"
+"TimerEvent 215 147 0 0 0 0 0\n"
+"MouseMoveEvent 210 145 0 0 0 0 0\n"
+"MouseMoveEvent 202 142 0 0 0 0 0\n"
+"MouseMoveEvent 193 138 0 0 0 0 0\n"
+"MouseMoveEvent 184 135 0 0 0 0 0\n"
+"MouseMoveEvent 177 133 0 0 0 0 0\n"
+"MouseMoveEvent 173 133 0 0 0 0 0\n"
+"TimerEvent 173 133 0 0 0 0 0\n"
+"MouseMoveEvent 173 134 0 0 0 0 0\n"
+"MouseMoveEvent 174 138 0 0 0 0 0\n"
+"MouseMoveEvent 175 146 0 0 0 0 0\n"
+"MouseMoveEvent 175 150 0 0 0 0 0\n"
+"MouseMoveEvent 175 151 0 0 0 0 0\n"
+"TimerEvent 175 151 0 0 0 0 0\n"
+"MouseMoveEvent 175 150 0 0 0 0 0\n"
+"MouseMoveEvent 175 149 0 0 0 0 0\n"
+"MouseMoveEvent 174 148 0 0 0 0 0\n"
+"MouseMoveEvent 172 147 0 0 0 0 0\n"
+"MouseMoveEvent 169 145 0 0 0 0 0\n"
+"MouseMoveEvent 166 143 0 0 0 0 0\n"
+"TimerEvent 166 143 0 0 0 0 0\n"
+"MouseMoveEvent 163 141 0 0 0 0 0\n"
+"MouseMoveEvent 162 140 0 0 0 0 0\n"
+"MouseMoveEvent 160 139 0 0 0 0 0\n"
+"MouseMoveEvent 160 138 0 0 0 0 0\n"
+"MouseMoveEvent 159 137 0 0 0 0 0\n"
+"MouseMoveEvent 158 136 0 0 0 0 0\n"
+"TimerEvent 158 136 0 0 0 0 0\n"
+"MouseMoveEvent 156 135 0 0 0 0 0\n"
+"MouseMoveEvent 155 134 0 0 0 0 0\n"
+"MouseMoveEvent 154 133 0 0 0 0 0\n"
+"MouseMoveEvent 152 132 0 0 0 0 0\n"
+"MouseMoveEvent 151 130 0 0 0 0 0\n"
+"MouseMoveEvent 150 129 0 0 0 0 0\n"
+"TimerEvent 150 129 0 0 0 0 0\n"
+"MouseMoveEvent 149 128 0 0 0 0 0\n"
+"MouseMoveEvent 148 127 0 0 0 0 0\n"
+"MouseMoveEvent 148 126 0 0 0 0 0\n"
+"MouseMoveEvent 148 124 0 0 0 0 0\n"
+"MouseMoveEvent 148 123 0 0 0 0 0\n"
+"MouseMoveEvent 147 123 0 0 0 0 0\n"
+"TimerEvent 147 123 0 0 0 0 0\n"
+"MouseMoveEvent 147 122 0 0 0 0 0\n"
+"MouseMoveEvent 147 121 0 0 0 0 0\n"
+"TimerEvent 147 121 0 0 0 0 0\n"
+"MouseMoveEvent 147 120 0 0 0 0 0\n"
+"MouseMoveEvent 147 119 0 0 0 0 0\n"
+"TimerEvent 147 119 0 0 0 0 0\n"
+"MouseMoveEvent 147 118 0 0 0 0 0\n"
+"MouseMoveEvent 147 117 0 0 0 0 0\n"
+"MouseMoveEvent 145 116 0 0 0 0 0\n"
+"MouseMoveEvent 144 115 0 0 0 0 0\n"
+"TimerEvent 144 115 0 0 0 0 0\n"
+"MouseMoveEvent 142 114 0 0 0 0 0\n"
+"MouseMoveEvent 139 114 0 0 0 0 0\n"
+"MouseMoveEvent 136 113 0 0 0 0 0\n"
+"MouseMoveEvent 135 112 0 0 0 0 0\n"
+"MouseMoveEvent 134 112 0 0 0 0 0\n"
+"TimerEvent 134 112 0 0 0 0 0\n"
+"LeftButtonPressEvent 134 112 0 0 0 0 0\n"
+"TimerEvent 134 112 0 0 0 0 0\n"
+"LeftButtonReleaseEvent 134 112 0 0 0 0 0\n"
+"LeftButtonPressEvent 134 112 0 0 0 0 0\n"
+"LeftButtonReleaseEvent 134 112 0 0 0 0 0\n"
+"LeftButtonPressEvent 134 112 0 0 0 1 0\n"
+"TimerEvent 134 112 0 0 0 1 0\n"
+"LeftButtonReleaseEvent 134 112 0 0 0 0 0\n"
+"MouseMoveEvent 133 112 0 0 0 0 0\n"
+"MouseMoveEvent 132 110 0 0 0 0 0\n"
+"MouseMoveEvent 130 109 0 0 0 0 0\n"
+"MouseMoveEvent 129 109 0 0 0 0 0\n"
+"TimerEvent 129 109 0 0 0 0 0\n"
+"MouseMoveEvent 128 109 0 0 0 0 0\n"
+"MouseMoveEvent 122 109 0 0 0 0 0\n"
+"MouseMoveEvent 116 109 0 0 0 0 0\n"
+"MouseMoveEvent 105 109 0 0 0 0 0\n"
+"MouseMoveEvent 97 111 0 0 0 0 0\n"
+"TimerEvent 97 111 0 0 0 0 0\n"
+"MouseMoveEvent 91 111 0 0 0 0 0\n"
+"MouseMoveEvent 87 113 0 0 0 0 0\n"
+"MouseMoveEvent 86 113 0 0 0 0 0\n"
+"TimerEvent 86 113 0 0 0 0 0\n"
+"MouseMoveEvent 84 113 0 0 0 0 0\n"
+"MouseMoveEvent 78 112 0 0 0 0 0\n"
+"MouseMoveEvent 68 109 0 0 0 0 0\n"
+"MouseMoveEvent 57 107 0 0 0 0 0\n"
+"MouseMoveEvent 49 106 0 0 0 0 0\n"
+"MouseMoveEvent 44 104 0 0 0 0 0\n"
+"TimerEvent 44 104 0 0 0 0 0\n"
+"MouseMoveEvent 40 104 0 0 0 0 0\n"
+"TimerEvent 40 104 0 0 0 0 0\n"
+"MouseMoveEvent 41 104 0 0 0 0 0\n"
+"MouseMoveEvent 42 104 0 0 0 0 0\n"
+"TimerEvent 42 104 0 0 0 0 0\n"
+"MouseMoveEvent 44 105 0 0 0 0 0\n"
+"MouseMoveEvent 48 106 0 0 0 0 0\n"
+"MouseMoveEvent 54 107 0 0 0 0 0\n"
+"MouseMoveEvent 60 107 0 0 0 0 0\n"
+"MouseMoveEvent 66 109 0 0 0 0 0\n"
+"MouseMoveEvent 67 109 0 0 0 0 0\n"
+"TimerEvent 67 109 0 0 0 0 0\n"
+"LeftButtonPressEvent 67 109 0 0 0 0 0\n"
+"MouseMoveEvent 67 110 0 0 0 0 0\n"
+"MouseMoveEvent 70 110 0 0 0 0 0\n"
+"MouseMoveEvent 72 111 0 0 0 0 0\n"
+"MouseMoveEvent 73 112 0 0 0 0 0\n"
+"TimerEvent 73 112 0 0 0 0 0\n"
+"MouseMoveEvent 74 112 0 0 0 0 0\n"
+"TimerEvent 74 112 0 0 0 0 0\n"
+"MouseMoveEvent 76 112 0 0 0 0 0\n"
+"MouseMoveEvent 77 112 0 0 0 0 0\n"
+"TimerEvent 77 112 0 0 0 0 0\n"
+"MouseMoveEvent 77 111 0 0 0 0 0\n"
+"MouseMoveEvent 78 111 0 0 0 0 0\n"
+"MouseMoveEvent 80 110 0 0 0 0 0\n"
+"MouseMoveEvent 81 110 0 0 0 0 0\n"
+"TimerEvent 81 110 0 0 0 0 0\n"
+"MouseMoveEvent 81 109 0 0 0 0 0\n"
+"TimerEvent 81 109 0 0 0 0 0\n"
+"MouseMoveEvent 84 108 0 0 0 0 0\n"
+"MouseMoveEvent 88 106 0 0 0 0 0\n"
+"MouseMoveEvent 90 106 0 0 0 0 0\n"
+"MouseMoveEvent 91 105 0 0 0 0 0\n"
+"MouseMoveEvent 92 105 0 0 0 0 0\n"
+"TimerEvent 92 105 0 0 0 0 0\n"
+"LeftButtonReleaseEvent 92 105 0 0 0 0 0\n"
+"TimerEvent 92 105 0 0 0 0 0\n"
+"MouseMoveEvent 98 105 0 0 0 0 0\n"
+"MouseMoveEvent 104 105 0 0 0 0 0\n"
+"MouseMoveEvent 114 103 0 0 0 0 0\n"
+"MouseMoveEvent 127 100 0 0 0 0 0\n"
+"MouseMoveEvent 148 93 0 0 0 0 0\n"
+"MouseMoveEvent 177 86 0 0 0 0 0\n"
+"TimerEvent 177 86 0 0 0 0 0\n"
+"MouseMoveEvent 212 75 0 0 0 0 0\n"
+"MouseMoveEvent 251 64 0 0 0 0 0\n"
+"MouseMoveEvent 302 55 0 0 0 0 0\n"
+"MouseMoveEvent 363 46 0 0 0 0 0\n"
+"LeaveEvent 426 37 0 0 0 0 0\n"
+"TimerEvent 426 37 0 0 0 0 0\n"
+"EnterEvent 237 16 0 0 0 0 0\n"
+"MouseMoveEvent 237 16 0 0 0 0 0\n"
+"MouseMoveEvent 222 33 0 0 0 0 0\n"
+"MouseMoveEvent 211 48 0 0 0 0 0\n"
+"MouseMoveEvent 202 59 0 0 0 0 0\n"
+"MouseMoveEvent 195 66 0 0 0 0 0\n"
+"MouseMoveEvent 193 70 0 0 0 0 0\n"
+"TimerEvent 193 70 0 0 0 0 0\n"
+"MouseMoveEvent 193 72 0 0 0 0 0\n"
+"MouseMoveEvent 193 74 0 0 0 0 0\n"
+"MouseMoveEvent 193 75 0 0 0 0 0\n"
+"MouseMoveEvent 193 77 0 0 0 0 0\n"
+"MouseMoveEvent 193 78 0 0 0 0 0\n"
+"MouseMoveEvent 193 79 0 0 0 0 0\n"
+"TimerEvent 193 79 0 0 0 0 0\n"
+"MouseMoveEvent 193 81 0 0 0 0 0\n"
+"MouseMoveEvent 193 82 0 0 0 0 0\n"
+"MouseMoveEvent 193 83 0 0 0 0 0\n"
+"MouseMoveEvent 193 84 0 0 0 0 0\n"
+"MouseMoveEvent 193 85 0 0 0 0 0\n"
+"TimerEvent 193 85 0 0 0 0 0\n"
+"MouseMoveEvent 193 86 0 0 0 0 0\n"
+"MouseMoveEvent 193 87 0 0 0 0 0\n"
+"MouseMoveEvent 193 88 0 0 0 0 0\n"
+"MouseMoveEvent 194 89 0 0 0 0 0\n"
+"MouseMoveEvent 194 90 0 0 0 0 0\n"
+"MouseMoveEvent 194 91 0 0 0 0 0\n"
+"TimerEvent 194 91 0 0 0 0 0\n"
+"MouseMoveEvent 194 92 0 0 0 0 0\n"
+"MouseMoveEvent 193 93 0 0 0 0 0\n"
+"MouseMoveEvent 191 94 0 0 0 0 0\n"
+"MouseMoveEvent 189 95 0 0 0 0 0\n"
+"MouseMoveEvent 186 96 0 0 0 0 0\n"
+"MouseMoveEvent 182 97 0 0 0 0 0\n"
+"TimerEvent 182 97 0 0 0 0 0\n"
+"MouseMoveEvent 180 98 0 0 0 0 0\n"
+"MouseMoveEvent 175 99 0 0 0 0 0\n"
+"MouseMoveEvent 171 100 0 0 0 0 0\n"
+"MouseMoveEvent 169 100 0 0 0 0 0\n"
+"MouseMoveEvent 166 101 0 0 0 0 0\n"
+"MouseMoveEvent 164 101 0 0 0 0 0\n"
+"TimerEvent 164 101 0 0 0 0 0\n"
+"MouseMoveEvent 163 101 0 0 0 0 0\n"
+"TimerEvent 163 101 0 0 0 0 0\n"
+"MouseMoveEvent 162 101 0 0 0 0 0\n"
+"TimerEvent 162 101 0 0 0 0 0\n"
+"MouseMoveEvent 161 102 0 0 0 0 0\n"
+"MouseMoveEvent 160 102 0 0 0 0 0\n"
+"MouseMoveEvent 158 103 0 0 0 0 0\n"
+"MouseMoveEvent 157 103 0 0 0 0 0\n"
+"MouseMoveEvent 156 104 0 0 0 0 0\n"
+"TimerEvent 156 104 0 0 0 0 0\n"
+"MouseMoveEvent 155 104 0 0 0 0 0\n"
+"MouseMoveEvent 154 104 0 0 0 0 0\n"
+"MouseMoveEvent 154 105 0 0 0 0 0\n"
+"TimerEvent 154 105 0 0 0 0 0\n"
+"MouseMoveEvent 153 105 0 0 0 0 0\n"
+"MouseMoveEvent 151 106 0 0 0 0 0\n"
+"MouseMoveEvent 150 106 0 0 0 0 0\n"
+"MouseMoveEvent 149 106 0 0 0 0 0\n"
+"MouseMoveEvent 149 107 0 0 0 0 0\n"
+"TimerEvent 149 107 0 0 0 0 0\n"
+"MouseMoveEvent 148 107 0 0 0 0 0\n"
+"MouseMoveEvent 147 108 0 0 0 0 0\n"
+"MouseMoveEvent 146 108 0 0 0 0 0\n"
+"TimerEvent 146 108 0 0 0 0 0\n"
+"MouseMoveEvent 145 109 0 0 0 0 0\n"
+"MouseMoveEvent 144 109 0 0 0 0 0\n"
+"MouseMoveEvent 144 110 0 0 0 0 0\n"
+"MouseMoveEvent 143 110 0 0 0 0 0\n"
+"TimerEvent 143 110 0 0 0 0 0\n"
+"MouseMoveEvent 142 110 0 0 0 0 0\n"
+"MouseMoveEvent 142 111 0 0 0 0 0\n"
+"MouseMoveEvent 141 111 0 0 0 0 0\n"
+"TimerEvent 141 111 0 0 0 0 0\n"
+"MouseMoveEvent 140 111 0 0 0 0 0\n"
+"TimerEvent 140 111 0 0 0 0 0\n"
+"MouseMoveEvent 140 110 0 0 0 0 0\n"
+"MouseMoveEvent 139 110 0 0 0 0 0\n"
+"MouseMoveEvent 138 109 0 0 0 0 0\n"
+"MouseMoveEvent 138 108 0 0 0 0 0\n"
+"TimerEvent 138 108 0 0 0 0 0\n"
+"MouseMoveEvent 137 108 0 0 0 0 0\n"
+"MouseMoveEvent 136 108 0 0 0 0 0\n"
+"MouseMoveEvent 136 109 0 0 0 0 0\n"
+"TimerEvent 136 109 0 0 0 0 0\n"
+"LeftButtonPressEvent 136 109 0 0 0 0 0\n"
+"LeftButtonReleaseEvent 136 109 0 0 0 0 0\n"
+"LeftButtonPressEvent 136 109 0 0 0 0 0\n"
+"LeftButtonReleaseEvent 136 109 0 0 0 0 0\n"
+"LeftButtonPressEvent 136 109 0 0 0 1 0\n"
+"TimerEvent 136 109 0 0 0 1 0\n"
+"LeftButtonReleaseEvent 136 109 0 0 0 0 0\n"
+"MouseMoveEvent 136 101 0 0 0 0 0\n"
+"MouseMoveEvent 146 80 0 0 0 0 0\n"
+"MouseMoveEvent 171 59 0 0 0 0 0\n"
+"MouseMoveEvent 218 42 0 0 0 0 0\n"
+"MouseMoveEvent 297 27 0 0 0 0 0\n"
+"LeaveEvent 400 18 0 0 0 0 0\n"
+"TimerEvent 400 18 0 0 0 0 0\n"
+"EnterEvent 392 259 0 0 0 0 0\n"
+"MouseMoveEvent 392 259 0 0 0 0 0\n"
+"MouseMoveEvent 381 264 0 0 0 0 0\n"
+"MouseMoveEvent 370 269 0 0 0 0 0\n"
+"MouseMoveEvent 359 274 0 0 0 0 0\n"
+"MouseMoveEvent 348 279 0 0 0 0 0\n"
+"MouseMoveEvent 339 281 0 0 0 0 0\n"
+"TimerEvent 339 281 0 0 0 0 0\n"
+"MouseMoveEvent 328 284 0 0 0 0 0\n"
+"MouseMoveEvent 319 286 0 0 0 0 0\n"
+"MouseMoveEvent 310 289 0 0 0 0 0\n"
+"MouseMoveEvent 301 292 0 0 0 0 0\n"
+"MouseMoveEvent 296 292 0 0 0 0 0\n"
+"MouseMoveEvent 292 294 0 0 0 0 0\n"
+"TimerEvent 292 294 0 0 0 0 0\n"
+"MouseMoveEvent 288 297 0 0 0 0 0\n"
+"MouseMoveEvent 284 298 0 0 0 0 0\n"
+"MouseMoveEvent 282 299 0 0 0 0 0\n"
+"LeaveEvent 281 300 0 0 0 0 0\n"
+"TimerEvent 281 300 0 0 0 0 0\n"
+"KeyPressEvent 276 304 0 0 98 1 b\n"
+"TimerEvent 0 0 0 0 0 0 0\n"
+"EnterEvent 385 3 0 0 0 0 0\n"
+"MouseMoveEvent 385 3 0 0 0 0 0\n"
+"LeaveEvent 406 10 0 0 0 0 0\n"
+"TimerEvent 406 10 0 0 0 0 0\n"
+"EnterEvent 391 93 0 0 0 0 0\n"
+"MouseMoveEvent 391 93 0 0 0 0 0\n"
+"MouseMoveEvent 372 93 0 0 0 0 0\n"
+"MouseMoveEvent 351 96 0 0 0 0 0\n"
+"MouseMoveEvent 330 99 0 0 0 0 0\n"
+"MouseMoveEvent 309 101 0 0 0 0 0\n"
+"MouseMoveEvent 284 104 0 0 0 0 0\n"
+"TimerEvent 284 104 0 0 0 0 0\n"
+"MouseMoveEvent 263 107 0 0 0 0 0\n"
+"MouseMoveEvent 244 110 0 0 0 0 0\n"
+"MouseMoveEvent 229 112 0 0 0 0 0\n"
+"MouseMoveEvent 216 112 0 0 0 0 0\n"
+"MouseMoveEvent 207 114 0 0 0 0 0\n"
+"MouseMoveEvent 198 116 0 0 0 0 0\n"
+"TimerEvent 198 116 0 0 0 0 0\n"
+"MouseMoveEvent 192 116 0 0 0 0 0\n"
+"MouseMoveEvent 185 118 0 0 0 0 0\n"
+"MouseMoveEvent 179 118 0 0 0 0 0\n"
+"MouseMoveEvent 173 118 0 0 0 0 0\n"
+"MouseMoveEvent 166 120 0 0 0 0 0\n"
+"MouseMoveEvent 160 120 0 0 0 0 0\n"
+"TimerEvent 160 120 0 0 0 0 0\n"
+"MouseMoveEvent 153 120 0 0 0 0 0\n"
+"MouseMoveEvent 146 120 0 0 0 0 0\n"
+"MouseMoveEvent 139 120 0 0 0 0 0\n"
+"MouseMoveEvent 133 118 0 0 0 0 0\n"
+"MouseMoveEvent 127 118 0 0 0 0 0\n"
+"MouseMoveEvent 121 118 0 0 0 0 0\n"
+"TimerEvent 121 118 0 0 0 0 0\n"
+"MouseMoveEvent 120 118 0 0 0 0 0\n"
+"MouseMoveEvent 119 118 0 0 0 0 0\n"
+"TimerEvent 119 118 0 0 0 0 0\n"
+"MouseMoveEvent 119 117 0 0 0 0 0\n"
+"MouseMoveEvent 119 116 0 0 0 0 0\n"
+"MouseMoveEvent 121 114 0 0 0 0 0\n"
+"MouseMoveEvent 122 114 0 0 0 0 0\n"
+"TimerEvent 122 114 0 0 0 0 0\n"
+"MouseMoveEvent 123 113 0 0 0 0 0\n"
+"MouseMoveEvent 124 113 0 0 0 0 0\n"
+"MouseMoveEvent 125 113 0 0 0 0 0\n"
+"TimerEvent 125 113 0 0 0 0 0\n"
+"MouseMoveEvent 125 112 0 0 0 0 0\n"
+"MouseMoveEvent 127 112 0 0 0 0 0\n"
+"MouseMoveEvent 129 111 0 0 0 0 0\n"
+"MouseMoveEvent 131 110 0 0 0 0 0\n"
+"TimerEvent 131 110 0 0 0 0 0\n"
+"MouseMoveEvent 132 110 0 0 0 0 0\n"
+"MouseMoveEvent 132 109 0 0 0 0 0\n"
+"MouseMoveEvent 133 109 0 0 0 0 0\n"
+"TimerEvent 133 109 0 0 0 0 0\n"
+"LeftButtonPressEvent 133 109 0 0 0 0 0\n"
+"TimerEvent 133 109 0 0 0 0 0\n"
+"LeftButtonReleaseEvent 133 109 0 0 0 0 0\n"
+"LeftButtonPressEvent 133 109 0 0 0 1 0\n"
+"TimerEvent 133 109 0 0 0 1 0\n"
+"LeftButtonReleaseEvent 133 109 0 0 0 0 0\n"
+"MouseMoveEvent 134 109 0 0 0 0 0\n"
+"MouseMoveEvent 134 110 0 0 0 0 0\n"
+"MouseMoveEvent 134 111 0 0 0 0 0\n"
+"TimerEvent 134 111 0 0 0 0 0\n"
+"MouseMoveEvent 134 112 0 0 0 0 0\n"
+"MouseMoveEvent 134 113 0 0 0 0 0\n"
+"MouseMoveEvent 134 114 0 0 0 0 0\n"
+"MouseMoveEvent 134 116 0 0 0 0 0\n"
+"TimerEvent 134 116 0 0 0 0 0\n"
+"MouseMoveEvent 135 116 0 0 0 0 0\n"
+"MouseMoveEvent 135 117 0 0 0 0 0\n"
+"TimerEvent 135 117 0 0 0 0 0\n"
+"LeftButtonPressEvent 135 117 0 0 0 0 0\n"
+"MouseMoveEvent 135 118 0 0 0 0 0\n"
+"MouseMoveEvent 135 119 0 0 0 0 0\n"
+"TimerEvent 135 119 0 0 0 0 0\n"
+"MouseMoveEvent 135 120 0 0 0 0 0\n"
+"MouseMoveEvent 135 121 0 0 0 0 0\n"
+"MouseMoveEvent 135 122 0 0 0 0 0\n"
+"MouseMoveEvent 135 123 0 0 0 0 0\n"
+"MouseMoveEvent 135 124 0 0 0 0 0\n"
+"TimerEvent 135 124 0 0 0 0 0\n"
+"MouseMoveEvent 135 125 0 0 0 0 0\n"
+"MouseMoveEvent 135 126 0 0 0 0 0\n"
+"MouseMoveEvent 135 127 0 0 0 0 0\n"
+"TimerEvent 135 127 0 0 0 0 0\n"
+"MouseMoveEvent 136 128 0 0 0 0 0\n"
+"MouseMoveEvent 136 130 0 0 0 0 0\n"
+"MouseMoveEvent 136 131 0 0 0 0 0\n"
+"MouseMoveEvent 136 132 0 0 0 0 0\n"
+"MouseMoveEvent 137 133 0 0 0 0 0\n"
+"MouseMoveEvent 137 134 0 0 0 0 0\n"
+"TimerEvent 137 134 0 0 0 0 0\n"
+"MouseMoveEvent 137 136 0 0 0 0 0\n"
+"MouseMoveEvent 137 137 0 0 0 0 0\n"
+"MouseMoveEvent 137 138 0 0 0 0 0\n"
+"MouseMoveEvent 137 139 0 0 0 0 0\n"
+"MouseMoveEvent 137 140 0 0 0 0 0\n"
+"MouseMoveEvent 137 142 0 0 0 0 0\n"
+"TimerEvent 137 142 0 0 0 0 0\n"
+"MouseMoveEvent 138 143 0 0 0 0 0\n"
+"MouseMoveEvent 138 144 0 0 0 0 0\n"
+"MouseMoveEvent 138 145 0 0 0 0 0\n"
+"MouseMoveEvent 138 146 0 0 0 0 0\n"
+"MouseMoveEvent 138 147 0 0 0 0 0\n"
+"MouseMoveEvent 139 148 0 0 0 0 0\n"
+"TimerEvent 139 148 0 0 0 0 0\n"
+"MouseMoveEvent 139 149 0 0 0 0 0\n"
+"MouseMoveEvent 139 151 0 0 0 0 0\n"
+"MouseMoveEvent 139 152 0 0 0 0 0\n"
+"MouseMoveEvent 140 153 0 0 0 0 0\n"
+"MouseMoveEvent 140 155 0 0 0 0 0\n"
+"MouseMoveEvent 140 156 0 0 0 0 0\n"
+"TimerEvent 140 156 0 0 0 0 0\n"
+"MouseMoveEvent 140 157 0 0 0 0 0\n"
+"MouseMoveEvent 140 158 0 0 0 0 0\n"
+"MouseMoveEvent 140 159 0 0 0 0 0\n"
+"TimerEvent 140 159 0 0 0 0 0\n"
+"MouseMoveEvent 140 160 0 0 0 0 0\n"
+"TimerEvent 140 160 0 0 0 0 0\n"
+"LeftButtonReleaseEvent 140 160 0 0 0 0 0\n"
+"TimerEvent 140 160 0 0 0 0 0\n"
+"MouseMoveEvent 140 159 0 0 0 0 0\n"
+"MouseMoveEvent 140 158 0 0 0 0 0\n"
+"MouseMoveEvent 140 157 0 0 0 0 0\n"
+"MouseMoveEvent 140 156 0 0 0 0 0\n"
+"MouseMoveEvent 140 154 0 0 0 0 0\n"
+"MouseMoveEvent 140 153 0 0 0 0 0\n"
+"TimerEvent 140 153 0 0 0 0 0\n"
+"MouseMoveEvent 140 152 0 0 0 0 0\n"
+"MouseMoveEvent 140 150 0 0 0 0 0\n"
+"MouseMoveEvent 140 149 0 0 0 0 0\n"
+"MouseMoveEvent 139 147 0 0 0 0 0\n"
+"MouseMoveEvent 139 146 0 0 0 0 0\n"
+"TimerEvent 139 146 0 0 0 0 0\n"
+"MouseMoveEvent 139 144 0 0 0 0 0\n"
+"MouseMoveEvent 139 143 0 0 0 0 0\n"
+"MouseMoveEvent 139 142 0 0 0 0 0\n"
+"MouseMoveEvent 139 141 0 0 0 0 0\n"
+"MouseMoveEvent 139 140 0 0 0 0 0\n"
+"MouseMoveEvent 139 139 0 0 0 0 0\n"
+"TimerEvent 139 139 0 0 0 0 0\n"
+"MouseMoveEvent 139 138 0 0 0 0 0\n"
+"MouseMoveEvent 138 136 0 0 0 0 0\n"
+"MouseMoveEvent 138 135 0 0 0 0 0\n"
+"MouseMoveEvent 138 134 0 0 0 0 0\n"
+"MouseMoveEvent 138 133 0 0 0 0 0\n"
+"MouseMoveEvent 138 132 0 0 0 0 0\n"
+"TimerEvent 138 132 0 0 0 0 0\n"
+"MouseMoveEvent 138 131 0 0 0 0 0\n"
+"MouseMoveEvent 137 130 0 0 0 0 0\n"
+"MouseMoveEvent 137 129 0 0 0 0 0\n"
+"MouseMoveEvent 137 128 0 0 0 0 0\n"
+"MouseMoveEvent 137 127 0 0 0 0 0\n"
+"MouseMoveEvent 137 126 0 0 0 0 0\n"
+"TimerEvent 137 126 0 0 0 0 0\n"
+"MouseMoveEvent 137 125 0 0 0 0 0\n"
+"MouseMoveEvent 137 124 0 0 0 0 0\n"
+"MouseMoveEvent 137 123 0 0 0 0 0\n"
+"MouseMoveEvent 137 122 0 0 0 0 0\n"
+"MouseMoveEvent 137 121 0 0 0 0 0\n"
+"MouseMoveEvent 137 120 0 0 0 0 0\n"
+"TimerEvent 137 120 0 0 0 0 0\n"
+"MouseMoveEvent 137 119 0 0 0 0 0\n"
+"MouseMoveEvent 137 118 0 0 0 0 0\n"
+"MouseMoveEvent 137 117 0 0 0 0 0\n"
+"MouseMoveEvent 136 116 0 0 0 0 0\n"
+"MouseMoveEvent 136 115 0 0 0 0 0\n"
+"TimerEvent 136 115 0 0 0 0 0\n"
+"MouseMoveEvent 136 114 0 0 0 0 0\n"
+"MouseMoveEvent 136 113 0 0 0 0 0\n"
+"MouseMoveEvent 136 112 0 0 0 0 0\n"
+"TimerEvent 136 112 0 0 0 0 0\n"
+"MouseMoveEvent 136 111 0 0 0 0 0\n"
+"MouseMoveEvent 136 110 0 0 0 0 0\n"
+"MouseMoveEvent 136 109 0 0 0 0 0\n"
+"TimerEvent 136 109 0 0 0 0 0\n"
+"MouseMoveEvent 136 108 0 0 0 0 0\n"
+"MouseMoveEvent 136 107 0 0 0 0 0\n"
+"TimerEvent 136 107 0 0 0 0 0\n"
+"MouseMoveEvent 136 106 0 0 0 0 0\n"
+"TimerEvent 136 106 0 0 0 0 0\n"
+"LeftButtonPressEvent 136 106 0 0 0 0 0\n"
+"LeftButtonReleaseEvent 136 106 0 0 0 0 0\n"
+"LeftButtonPressEvent 136 106 0 0 0 1 0\n"
+"TimerEvent 136 106 0 0 0 1 0\n"
+"LeftButtonReleaseEvent 136 106 0 0 0 0 0\n"
+"MouseMoveEvent 143 94 0 0 0 0 0\n"
+"MouseMoveEvent 171 73 0 0 0 0 0\n"
+"MouseMoveEvent 224 50 0 0 0 0 0\n"
+"MouseMoveEvent 299 29 0 0 0 0 0\n"
+"MouseMoveEvent 388 12 0 0 0 0 0\n"
+"LeaveEvent 489 -5 0 0 0 0 0\n"
+"TimerEvent 489 -5 0 0 0 0 0\n"
 ;
 
 //----------------------------------------------------------------------------
@@ -703,7 +735,7 @@ int TestControlPointsHandleItem(int, char * [])
   vtkSmartPointer<vtkInteractorEventRecorder> recorder =
     vtkSmartPointer<vtkInteractorEventRecorder>::New();
   recorder->SetInteractor(view->GetInteractor());
-//  recorder->SetKeyPressActivationValue('b');
+  // recorder->SetKeyPressActivationValue('b');
 
   recorder->ReadFromInputStringOn();
   recorder->SetInputString(TestControlPointsHandleItemLog);
diff --git a/Charts/Core/Testing/Cxx/TestControlPointsItemEvents.cxx b/Charts/Core/Testing/Cxx/TestControlPointsItemEvents.cxx
index 003a347..7300a14 100644
--- a/Charts/Core/Testing/Cxx/TestControlPointsItemEvents.cxx
+++ b/Charts/Core/Testing/Cxx/TestControlPointsItemEvents.cxx
@@ -114,8 +114,8 @@ int TestControlPointsItemEvents(int, char*[])
       cbk->EventSpy[vtkCommand::StartInteractionEvent] != 1 ||
       cbk->EventSpy[vtkCommand::InteractionEvent] != 1 ||
       cbk->EventSpy[vtkCommand::EndInteractionEvent] != 1 ||
-      cbk->EventSpy[vtkCommand::StartEvent] != 0 ||
-      cbk->EventSpy[vtkCommand::EndEvent] != 0)
+      cbk->EventSpy[vtkCommand::StartEvent] != 2 ||
+      cbk->EventSpy[vtkCommand::EndEvent] != 2)
     {
     std::cerr << "Wrong number of fired events : "
               << cbk->EventSpy[vtkCommand::ModifiedEvent] << " "
diff --git a/Charts/Core/Testing/Cxx/TestFunctionalBagPlot.cxx b/Charts/Core/Testing/Cxx/TestFunctionalBagPlot.cxx
new file mode 100644
index 0000000..e03c59b
--- /dev/null
+++ b/Charts/Core/Testing/Cxx/TestFunctionalBagPlot.cxx
@@ -0,0 +1,132 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestFunctionalBagPlot.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkChartXY.h"
+#include "vtkChartLegend.h"
+#include "vtkContextScene.h"
+#include "vtkContextView.h"
+#include "vtkDoubleArray.h"
+#include "vtkLookupTable.h"
+#include "vtkPlotFunctionalBag.h"
+#include "vtkMath.h"
+#include "vtkNew.h"
+#include "vtkPen.h"
+#include "vtkRenderWindow.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkStringArray.h"
+#include "vtkTable.h"
+#include <sstream>
+
+//----------------------------------------------------------------------------
+int TestFunctionalBagPlot(int, char * [])
+{
+  // Creates an input table
+  const int numCols = 7;
+  const int numVals = 100;
+
+  vtkNew<vtkTable> inputTable;
+  vtkNew<vtkDoubleArray> arr[numCols];
+  for (int i = 0; i < numCols; i++)
+    {
+    std::stringstream ss;
+    ss << "Y" << i;
+    arr[i]->SetName(ss.str().c_str());
+    arr[i]->SetNumberOfValues(numVals);
+    for (int j = 0; j < numVals; j++)
+      {
+      arr[i]->SetValue(j, (i+1) *
+        fabs(sin((j * 2.f *vtkMath::Pi()) /
+        static_cast<float>(numVals))) * j + i * 20);
+      }
+    inputTable->AddColumn(arr[i].GetPointer());
+    }
+
+  // Create a X-axis column
+  vtkNew<vtkDoubleArray> xArr;
+  xArr->SetName("X");
+  xArr->SetNumberOfValues(numVals);
+  for (int j = 0; j < numVals; j++)
+    {
+    xArr->SetValue(j, j * 2.0);
+    }
+  inputTable->AddColumn(xArr.GetPointer());
+
+  // Create the bag columns
+  vtkNew<vtkDoubleArray> q3Arr;
+  q3Arr->SetName("Q3");
+  q3Arr->SetNumberOfComponents(2);
+  q3Arr->SetNumberOfTuples(numVals);
+  vtkNew<vtkDoubleArray> q2Arr;
+  q2Arr->SetName("Q2");
+  q2Arr->SetNumberOfComponents(2);
+  q2Arr->SetNumberOfTuples(numVals);
+
+  for (int i = 0; i < numVals; i++)
+    {
+    double v0, v1;
+    v0 = arr[1]->GetVariantValue(i).ToFloat();
+    v1 = arr[5]->GetVariantValue(i).ToFloat();
+    q3Arr->SetTuple2(i, v0, v1);
+
+    v0 = arr[2]->GetVariantValue(i).ToFloat();
+    v1 = arr[4]->GetVariantValue(i).ToFloat();
+    q2Arr->SetTuple2(i, v0, v1);
+    }
+
+  inputTable->AddColumn(q3Arr.GetPointer());
+  inputTable->AddColumn(q2Arr.GetPointer());
+
+  // Set up a 2D scene and add an XY chart to it
+  vtkNew<vtkContextView> view;
+  view->GetRenderWindow()->SetSize(400, 400);
+  view->GetRenderWindow()->SetMultiSamples(0);
+  vtkNew<vtkChartXY> chart;
+  view->GetScene()->AddItem(chart.GetPointer());
+  chart->SetShowLegend(true);
+  chart->GetLegend()->SetHorizontalAlignment(vtkChartLegend::LEFT);
+  chart->GetLegend()->SetVerticalAlignment(vtkChartLegend::TOP);
+
+  // Create the functional bag plots
+  vtkNew<vtkPlotFunctionalBag> q3Plot;
+  q3Plot->SetColor(0.5, 0, 0);
+  q3Plot->SetInputData(inputTable.GetPointer(), "X", "Q3");
+  chart->AddPlot(q3Plot.GetPointer());
+
+  vtkNew<vtkPlotFunctionalBag> q2Plot;
+  q2Plot->SetColor(1., 0, 0);
+  q2Plot->SetInputData(inputTable.GetPointer(), "X", "Q2");
+  chart->AddPlot(q2Plot.GetPointer());
+
+  vtkNew<vtkLookupTable> lookup;
+  lookup->SetNumberOfColors(numCols);
+  lookup->SetRange(0, numCols-1);
+  lookup->Build();
+  for (int j = 0; j < numCols; j++)
+    {
+    vtkNew<vtkPlotFunctionalBag> plot;
+    double rgb[3];
+    lookup->GetColor(j, rgb);
+    plot->SetColor(rgb[0], rgb[1], rgb[2]);
+    plot->SetInputData(inputTable.GetPointer(), "X",
+      inputTable->GetColumn(j)->GetName());
+    chart->AddPlot(plot.GetPointer());
+    }
+
+  // Render the scene
+  view->GetInteractor()->Initialize();
+  view->GetInteractor()->Start();
+
+  return EXIT_SUCCESS;
+}
diff --git a/Charts/Core/Testing/Cxx/TestLinePlot3D.cxx b/Charts/Core/Testing/Cxx/TestLinePlot3D.cxx
index d2f7bbb..599dbee 100644
--- a/Charts/Core/Testing/Cxx/TestLinePlot3D.cxx
+++ b/Charts/Core/Testing/Cxx/TestLinePlot3D.cxx
@@ -1,7 +1,7 @@
 /*=========================================================================
 
   Program:   Visualization Toolkit
-  Module:    TestLinePlot.cxx
+  Module:    TestLinePlot3D.cxx
 
   Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
   All rights reserved.
diff --git a/Charts/Core/Testing/Cxx/TestLinePlotDouble.cxx b/Charts/Core/Testing/Cxx/TestLinePlotDouble.cxx
new file mode 100644
index 0000000..e397a47
--- /dev/null
+++ b/Charts/Core/Testing/Cxx/TestLinePlotDouble.cxx
@@ -0,0 +1,85 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestLinePlotDouble.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkRenderWindow.h"
+#include "vtkChartXY.h"
+#include "vtkPlot.h"
+#include "vtkTable.h"
+#include "vtkDoubleArray.h"
+#include "vtkContextView.h"
+#include "vtkContextScene.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkNew.h"
+#include "vtkMath.h"
+
+//----------------------------------------------------------------------------
+int TestLinePlotDouble(int, char *[])
+{
+  // Set up a 2D scene, add an XY chart to it
+  vtkNew<vtkContextView> view;
+  view->GetRenderWindow()->SetSize(400, 300);
+  vtkNew<vtkChartXY> chart;
+  view->GetScene()->AddItem(chart.GetPointer());
+
+  // Create a table with some points in it...
+  vtkNew<vtkTable> table;
+  vtkNew<vtkDoubleArray> arrX;
+  arrX->SetName("X Axis");
+  table->AddColumn(arrX.GetPointer());
+  vtkNew<vtkDoubleArray> arrC;
+  arrC->SetName("Cosine");
+  table->AddColumn(arrC.GetPointer());
+  vtkNew<vtkDoubleArray> arrS;
+  arrS->SetName("Sine");
+  table->AddColumn(arrS.GetPointer());
+  vtkNew<vtkDoubleArray> arrS2;
+  arrS2->SetName("Sine2");
+  table->AddColumn(arrS2.GetPointer());
+  // Test charting with a few more points...
+  int numPoints = 69;
+  float inc = 7.5 / (numPoints - 1);
+  table->SetNumberOfRows(numPoints);
+  for (int i = 0; i < numPoints; ++i)
+    {
+    table->SetValue(i, 0, i * inc);
+    table->SetValue(i, 1, 1.0e-80 * cos(i * inc - 1.0) * 1.0e-8);
+    table->SetValue(i, 2, 1.0e-80 * sin(i * inc) * 1.0e-8);
+    table->SetValue(i, 3, 1.0e80 * sin(i * inc - 1.0));
+    }
+  table->SetValue(66, 2, vtkMath::Nan());
+  table->SetValue(4, 3, vtkMath::Inf());
+
+  // Add multiple line plots, setting the colors etc
+  vtkPlot *line = chart->AddPlot(vtkChart::LINE);
+  line->SetInputData(table.GetPointer(), 0, 1);
+  line->SetColor(0, 255, 0, 255);
+  line->SetWidth(1.0);
+  line = chart->AddPlot(vtkChart::LINE);
+  line->SetInputData(table.GetPointer(), 0, 2);
+  line->SetColor(255, 0, 0, 255);
+  line->SetWidth(5.0);
+  line = chart->AddPlot(vtkChart::LINE);
+  line->SetInputData(table.GetPointer(), 0, 3);
+  line->SetColor(0, 0, 255, 255);
+  line->SetWidth(4.0);
+  chart->SetPlotCorner(line, 1);
+
+  // Render the scene and compare the image to a reference image
+  view->GetRenderWindow()->SetMultiSamples(0);
+  view->GetInteractor()->Initialize();
+  view->GetInteractor()->Start();
+
+  return EXIT_SUCCESS;
+}
diff --git a/Charts/Core/Testing/Cxx/TestLinePlotDouble2.cxx b/Charts/Core/Testing/Cxx/TestLinePlotDouble2.cxx
new file mode 100644
index 0000000..99ae710
--- /dev/null
+++ b/Charts/Core/Testing/Cxx/TestLinePlotDouble2.cxx
@@ -0,0 +1,62 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestLinePlotDouble2.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkRenderWindow.h"
+#include "vtkChartXY.h"
+#include "vtkPlot.h"
+#include "vtkTable.h"
+#include "vtkDoubleArray.h"
+#include "vtkContextView.h"
+#include "vtkContextScene.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkNew.h"
+
+//----------------------------------------------------------------------------
+int TestLinePlotDouble2(int, char *[])
+{
+  // Set up a 2D scene, add an XY chart to it
+  vtkNew<vtkContextView> view;
+  view->GetRenderWindow()->SetSize(400, 300);
+  vtkNew<vtkChartXY> chart;
+  view->GetScene()->AddItem(chart.GetPointer());
+
+  // Create a table with some points in it...
+  vtkNew<vtkTable> table;
+  vtkNew<vtkDoubleArray> arrX;
+  arrX->SetName("X");
+  table->AddColumn(arrX.GetPointer());
+  vtkNew<vtkDoubleArray> arrC;
+  arrC->SetName("Cosine");
+  table->AddColumn(arrC.GetPointer());
+  // Test charting some very small points.
+  int numPoints = 69;
+  float inc = 7.5 / (numPoints - 1);
+  table->SetNumberOfRows(numPoints);
+  for (int i = 0; i < numPoints; ++i)
+    {
+    double x(1 + 1e-11 * inc * i);
+    table->SetValue(i, 0, x);
+    table->SetValue(i, 1, cos((x - 1.0) * 1.0e11));
+    }
+  vtkPlot *line = chart->AddPlot(vtkChart::LINE);
+  line->SetInputData(table.GetPointer(), 0, 1);
+
+  // Render the scene and compare the image to a reference image
+  view->GetRenderWindow()->SetMultiSamples(0);
+  view->GetInteractor()->Initialize();
+  view->GetInteractor()->Start();
+
+  return EXIT_SUCCESS;
+}
diff --git a/Charts/Core/Testing/Cxx/TestMultipleRenderers.cxx b/Charts/Core/Testing/Cxx/TestMultipleRenderers.cxx
index faf9d0f..d9c47f8 100644
--- a/Charts/Core/Testing/Cxx/TestMultipleRenderers.cxx
+++ b/Charts/Core/Testing/Cxx/TestMultipleRenderers.cxx
@@ -37,7 +37,6 @@ int TestMultipleRenderers(int , char * [])
 {
 
   VTK_CREATE(vtkRenderWindow, renwin);
-  renwin->SetMultiSamples(4);
   renwin->SetSize(800, 640);
 
   VTK_CREATE(vtkRenderWindowInteractor, iren);
diff --git a/Charts/Core/Testing/Cxx/TestParallelCoordinates.cxx b/Charts/Core/Testing/Cxx/TestParallelCoordinates.cxx
index f3d2960..01788f6 100644
--- a/Charts/Core/Testing/Cxx/TestParallelCoordinates.cxx
+++ b/Charts/Core/Testing/Cxx/TestParallelCoordinates.cxx
@@ -1,7 +1,7 @@
 /*=========================================================================
 
   Program:   Visualization Toolkit
-  Module:    TestPCPlot.cxx
+  Module:    TestParallelCoordinates.cxx
 
   Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
   All rights reserved.
@@ -15,7 +15,6 @@
 
 #include "vtkRenderer.h"
 #include "vtkRenderWindow.h"
-#include "vtkSmartPointer.h"
 #include "vtkChartParallelCoordinates.h"
 #include "vtkPlot.h"
 #include "vtkTable.h"
diff --git a/Charts/Core/Testing/Cxx/TestParallelCoordinatesDouble.cxx b/Charts/Core/Testing/Cxx/TestParallelCoordinatesDouble.cxx
new file mode 100644
index 0000000..82a7261
--- /dev/null
+++ b/Charts/Core/Testing/Cxx/TestParallelCoordinatesDouble.cxx
@@ -0,0 +1,68 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestParallelCoodinatesDouble.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkRenderer.h"
+#include "vtkRenderWindow.h"
+#include "vtkChartParallelCoordinates.h"
+#include "vtkPlot.h"
+#include "vtkTable.h"
+#include "vtkDoubleArray.h"
+#include "vtkContextView.h"
+#include "vtkContextScene.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkNew.h"
+
+//----------------------------------------------------------------------------
+int TestParallelCoordinatesDouble(int , char* [])
+{
+  // Set up a 2D scene, add an XY chart to it
+  vtkNew<vtkContextView> view;
+  view->GetRenderWindow()->SetSize(600, 400);
+  vtkNew<vtkChartParallelCoordinates> chart;
+  view->GetScene()->AddItem(chart.GetPointer());
+
+  // Create a table with some points in it...
+  vtkNew<vtkTable> table;
+  vtkNew<vtkDoubleArray> arrX;
+  arrX->SetName("x");
+  table->AddColumn(arrX.GetPointer());
+  vtkNew<vtkDoubleArray> arrC;
+  arrC->SetName("cosine");
+  table->AddColumn(arrC.GetPointer());
+  vtkNew<vtkDoubleArray> arrS;
+  arrS->SetName("sine");
+  table->AddColumn(arrS.GetPointer());
+  vtkNew<vtkDoubleArray> arrS2;
+  arrS2->SetName("tangent");
+  table->AddColumn(arrS2.GetPointer());
+  // Test charting with a few more points...
+  int numPoints = 200;
+  float inc = 7.5 / (numPoints - 1);
+  table->SetNumberOfRows(numPoints);
+  for (int i = 0; i < numPoints; ++i)
+    {
+    table->SetValue(i, 0, i * inc);
+    table->SetValue(i, 1, cos(i * inc) * 1.0e-82);
+    table->SetValue(i, 2, sin(i * inc) * 1.0e+89);
+    table->SetValue(i, 3, tan(i * inc) + 0.5);
+    }
+
+  chart->GetPlot(0)->SetInputData(table.GetPointer());
+
+  view->GetRenderWindow()->SetMultiSamples(0);
+  view->GetInteractor()->Initialize();
+  view->GetInteractor()->Start();
+  return EXIT_SUCCESS;
+}
diff --git a/Charts/Core/Testing/Cxx/TestZoomAxis.cxx b/Charts/Core/Testing/Cxx/TestZoomAxis.cxx
new file mode 100644
index 0000000..e89397a
--- /dev/null
+++ b/Charts/Core/Testing/Cxx/TestZoomAxis.cxx
@@ -0,0 +1,91 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestZoomAxis.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkRenderWindow.h"
+#include "vtkSmartPointer.h"
+#include "vtkChartXY.h"
+#include "vtkPlot.h"
+#include "vtkTable.h"
+#include "vtkFloatArray.h"
+#include "vtkContextView.h"
+#include "vtkContextScene.h"
+#include "vtkContextMouseEvent.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkAnnotationLink.h"
+#include "vtkNew.h"
+
+//----------------------------------------------------------------------------
+int TestZoomAxis(int, char * [])
+{
+  // Set up a 2D scene, add an XY chart to it
+  vtkNew<vtkContextView> view;
+  view->GetRenderWindow()->SetSize(400, 300);
+  vtkNew<vtkChartXY> chart;
+  view->GetScene()->AddItem(chart.GetPointer());
+  vtkNew<vtkAnnotationLink> link;
+  chart->SetAnnotationLink(link.GetPointer());
+  chart->SetActionToButton(vtkChart::ZOOM_AXIS,
+                           vtkContextMouseEvent::LEFT_BUTTON);
+  chart->SetSelectionMethod(vtkChart::SELECTION_PLOTS);
+
+  // Create a table with some points in it...
+  vtkNew<vtkTable> table;
+  vtkNew<vtkFloatArray> arrX;
+  arrX->SetName("X Axis");
+  table->AddColumn(arrX.GetPointer());
+  vtkNew<vtkFloatArray> arrS;
+  arrS->SetName("Sine");
+  table->AddColumn(arrS.GetPointer());
+  // Test charting with a few more points...
+  int numPoints = 100;
+  float inc = 9.5f / (numPoints-1);
+  table->SetNumberOfRows(numPoints);
+  for (int i = 0; i < numPoints; ++i)
+    {
+    table->SetValue(i, 0, i * inc);
+    table->SetValue(i, 1, sin(i * inc));
+    }
+
+  // Add multiple line plots, setting the colors etc
+  vtkPlot *plot = chart->AddPlot(vtkChart::POINTS);
+  plot->SetInputData(table.GetPointer(), 0, 1);
+  plot->SetColor(0, 255, 0, 255);
+  plot->SetWidth(1.0);
+
+  view->Update();
+  view->Render();
+
+  // Inject some mouse events to perform zooming
+  vtkContextMouseEvent event;
+  event.SetLastPos(vtkVector2f(0.0f));
+  event.SetPos(vtkVector2f(0.0f));
+  event.SetLastScenePos(vtkVector2f(0.0f));
+  event.SetScenePos(vtkVector2f(0.0f));
+  event.SetLastScreenPos(vtkVector2i(0));
+  event.SetInteractor(view->GetInteractor());
+  event.SetButton(vtkContextMouseEvent::LEFT_BUTTON);
+  event.SetScreenPos(vtkVector2i(350, 250));
+  chart->MouseButtonPressEvent(event);
+  event.SetLastScreenPos(event.GetScreenPos());
+  event.SetScreenPos(vtkVector2i(10, 10));
+  chart->MouseMoveEvent(event);
+  chart->MouseButtonReleaseEvent(event);
+
+  //Finally render the scene and compare the image to a reference image
+  view->GetRenderWindow()->SetMultiSamples(0);
+  view->GetInteractor()->Initialize();
+  view->GetInteractor()->Start();
+  return EXIT_SUCCESS;
+}
diff --git a/Charts/Core/Testing/Data/Baseline/TestAxes.png.md5 b/Charts/Core/Testing/Data/Baseline/TestAxes.png.md5
new file mode 100644
index 0000000..509f69e
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestAxes.png.md5
@@ -0,0 +1 @@
+eafb4bb49edfea7631745a373f09cb41
diff --git a/Charts/Core/Testing/Data/Baseline/TestBagPlot.png.md5 b/Charts/Core/Testing/Data/Baseline/TestBagPlot.png.md5
new file mode 100644
index 0000000..3450d10
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestBagPlot.png.md5
@@ -0,0 +1 @@
+c1d6e4f60c9b8ccc9e8ae6b9cf7fb040
diff --git a/Charts/Core/Testing/Data/Baseline/TestBarGraph.png.md5 b/Charts/Core/Testing/Data/Baseline/TestBarGraph.png.md5
new file mode 100644
index 0000000..a53f820
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestBarGraph.png.md5
@@ -0,0 +1 @@
+5f78619f364215b009c366583b4f7306
diff --git a/Charts/Core/Testing/Data/Baseline/TestBarGraphHorizontal.png.md5 b/Charts/Core/Testing/Data/Baseline/TestBarGraphHorizontal.png.md5
new file mode 100644
index 0000000..ff404f1
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestBarGraphHorizontal.png.md5
@@ -0,0 +1 @@
+df13908b54ba93960c4d029eb43c1dcf
diff --git a/Charts/Core/Testing/Data/Baseline/TestCategoryLegend.png.md5 b/Charts/Core/Testing/Data/Baseline/TestCategoryLegend.png.md5
new file mode 100644
index 0000000..5238da2
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestCategoryLegend.png.md5
@@ -0,0 +1 @@
+9fd7145de70cf5e7db3081e9f6b4c50f
diff --git a/Charts/Core/Testing/Data/Baseline/TestChartDouble.png.md5 b/Charts/Core/Testing/Data/Baseline/TestChartDouble.png.md5
new file mode 100644
index 0000000..9ca77d7
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestChartDouble.png.md5
@@ -0,0 +1 @@
+10dc4af829ac21253fa58cc524080090
diff --git a/Charts/Core/Testing/Data/Baseline/TestChartDouble_1.png.md5 b/Charts/Core/Testing/Data/Baseline/TestChartDouble_1.png.md5
new file mode 100644
index 0000000..2fc4238
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestChartDouble_1.png.md5
@@ -0,0 +1 @@
+77750f5e04b728265603d17ab96f91e1
diff --git a/Charts/Core/Testing/Data/Baseline/TestChartDouble_2.png.md5 b/Charts/Core/Testing/Data/Baseline/TestChartDouble_2.png.md5
new file mode 100644
index 0000000..766b2ac
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestChartDouble_2.png.md5
@@ -0,0 +1 @@
+e3fb6c525afff74227c333463d21fa6d
diff --git a/Charts/Core/Testing/Data/Baseline/TestChartMatrix.png.md5 b/Charts/Core/Testing/Data/Baseline/TestChartMatrix.png.md5
new file mode 100644
index 0000000..3779d31
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestChartMatrix.png.md5
@@ -0,0 +1 @@
+9fa01ddf00af7c5447dc914020b46e76
diff --git a/Charts/Core/Testing/Data/Baseline/TestChartUnicode.png.md5 b/Charts/Core/Testing/Data/Baseline/TestChartUnicode.png.md5
new file mode 100644
index 0000000..c431886
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestChartUnicode.png.md5
@@ -0,0 +1 @@
+f94310e233df17e2ec8044351c91e2bb
diff --git a/Charts/Core/Testing/Data/Baseline/TestChartXYZ.png.md5 b/Charts/Core/Testing/Data/Baseline/TestChartXYZ.png.md5
new file mode 100644
index 0000000..1142577
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestChartXYZ.png.md5
@@ -0,0 +1 @@
+73a9d7fd3940b7cbe9d4b345bf8dd297
diff --git a/Charts/Core/Testing/Data/Baseline/TestChartsOn3D.png.md5 b/Charts/Core/Testing/Data/Baseline/TestChartsOn3D.png.md5
new file mode 100644
index 0000000..4877bb1
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestChartsOn3D.png.md5
@@ -0,0 +1 @@
+aa7814798b1c1ed2c7e57a841ae43458
diff --git a/Charts/Core/Testing/Data/Baseline/TestColorTransferFunction.png.md5 b/Charts/Core/Testing/Data/Baseline/TestColorTransferFunction.png.md5
new file mode 100644
index 0000000..f0e7ad0
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestColorTransferFunction.png.md5
@@ -0,0 +1 @@
+e19210aeb365b595004571f4ad5b042c
diff --git a/Charts/Core/Testing/Data/Baseline/TestColorTransferFunction_1.png.md5 b/Charts/Core/Testing/Data/Baseline/TestColorTransferFunction_1.png.md5
new file mode 100644
index 0000000..66b8604
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestColorTransferFunction_1.png.md5
@@ -0,0 +1 @@
+3bd29e2faa82021db0f5495cee066204
diff --git a/Charts/Core/Testing/Data/Baseline/TestColorTransferFunction_2.png.md5 b/Charts/Core/Testing/Data/Baseline/TestColorTransferFunction_2.png.md5
new file mode 100644
index 0000000..f7d2426
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestColorTransferFunction_2.png.md5
@@ -0,0 +1 @@
+b24dd5c39b098f71f319cb1983579c91
diff --git a/Charts/Core/Testing/Data/Baseline/TestColorTransferFunction_3.png.md5 b/Charts/Core/Testing/Data/Baseline/TestColorTransferFunction_3.png.md5
new file mode 100644
index 0000000..5adfe59
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestColorTransferFunction_3.png.md5
@@ -0,0 +1 @@
+29b648b39163bd11d914bfb70d54e091
diff --git a/Charts/Core/Testing/Data/Baseline/TestContext.png.md5 b/Charts/Core/Testing/Data/Baseline/TestContext.png.md5
new file mode 100644
index 0000000..e5da86f
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestContext.png.md5
@@ -0,0 +1 @@
+6ef36222ddd240a18a3b4220e401b53e
diff --git a/Charts/Core/Testing/Data/Baseline/TestContextImage.png.md5 b/Charts/Core/Testing/Data/Baseline/TestContextImage.png.md5
new file mode 100644
index 0000000..d39225f
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestContextImage.png.md5
@@ -0,0 +1 @@
+314e274520dc82e4c65a62b2183f4a7e
diff --git a/Charts/Core/Testing/Data/Baseline/TestContextUnicode.png.md5 b/Charts/Core/Testing/Data/Baseline/TestContextUnicode.png.md5
new file mode 100644
index 0000000..837f8cc
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestContextUnicode.png.md5
@@ -0,0 +1 @@
+df6e79b865f0c883cdfcdc9731473b73
diff --git a/Charts/Core/Testing/Data/Baseline/TestControlPointsHandleItem.png.md5 b/Charts/Core/Testing/Data/Baseline/TestControlPointsHandleItem.png.md5
new file mode 100644
index 0000000..6fc932f
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestControlPointsHandleItem.png.md5
@@ -0,0 +1 @@
+cb1c16066e465328005012e8db8254d9
diff --git a/Charts/Core/Testing/Data/Baseline/TestDiagram.png.md5 b/Charts/Core/Testing/Data/Baseline/TestDiagram.png.md5
new file mode 100644
index 0000000..722909d
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestDiagram.png.md5
@@ -0,0 +1 @@
+190c0599f7247b53162380f08677abd8
diff --git a/Charts/Core/Testing/Data/Baseline/TestDiagram_1.png.md5 b/Charts/Core/Testing/Data/Baseline/TestDiagram_1.png.md5
new file mode 100644
index 0000000..952311c
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestDiagram_1.png.md5
@@ -0,0 +1 @@
+415eddccd5bc61b6e37ef6ebce651168
diff --git a/Charts/Core/Testing/Data/Baseline/TestFunctionalBagPlot.png.md5 b/Charts/Core/Testing/Data/Baseline/TestFunctionalBagPlot.png.md5
new file mode 100644
index 0000000..8e80a64
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestFunctionalBagPlot.png.md5
@@ -0,0 +1 @@
+4715ec4f1d4b60ccb206a9867c476a32
diff --git a/Charts/Core/Testing/Data/Baseline/TestHistogram2D.png.md5 b/Charts/Core/Testing/Data/Baseline/TestHistogram2D.png.md5
new file mode 100644
index 0000000..a9bf63c
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestHistogram2D.png.md5
@@ -0,0 +1 @@
+d5d7979379107fb96c6ed41dccba8a53
diff --git a/Charts/Core/Testing/Data/Baseline/TestInteractiveChartXYZ.png.md5 b/Charts/Core/Testing/Data/Baseline/TestInteractiveChartXYZ.png.md5
new file mode 100644
index 0000000..3ab1740
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestInteractiveChartXYZ.png.md5
@@ -0,0 +1 @@
+c0cfe1790b3bd4e9c7703b2a0c2fc0ff
diff --git a/Charts/Core/Testing/Data/Baseline/TestLegendHiddenPlots.png.md5 b/Charts/Core/Testing/Data/Baseline/TestLegendHiddenPlots.png.md5
new file mode 100644
index 0000000..ac65805
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestLegendHiddenPlots.png.md5
@@ -0,0 +1 @@
+737c9ead54fac70c21eeec3ecd0f9bc0
diff --git a/Charts/Core/Testing/Data/Baseline/TestLinePlot.png.md5 b/Charts/Core/Testing/Data/Baseline/TestLinePlot.png.md5
new file mode 100644
index 0000000..bbfb707
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestLinePlot.png.md5
@@ -0,0 +1 @@
+dc17f39f128c67e91624f31a288d99f4
diff --git a/Charts/Core/Testing/Data/Baseline/TestLinePlot2.png.md5 b/Charts/Core/Testing/Data/Baseline/TestLinePlot2.png.md5
new file mode 100644
index 0000000..950aa91
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestLinePlot2.png.md5
@@ -0,0 +1 @@
+9a7c69553f081f0e9dcc5da9efa11c34
diff --git a/Charts/Core/Testing/Data/Baseline/TestLinePlot3D.png.md5 b/Charts/Core/Testing/Data/Baseline/TestLinePlot3D.png.md5
new file mode 100644
index 0000000..1373e6e
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestLinePlot3D.png.md5
@@ -0,0 +1 @@
+4aa977c7b1486ce6746220d794233d6e
diff --git a/Charts/Core/Testing/Data/Baseline/TestLinePlotAxisFonts.png.md5 b/Charts/Core/Testing/Data/Baseline/TestLinePlotAxisFonts.png.md5
new file mode 100644
index 0000000..fab32d2
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestLinePlotAxisFonts.png.md5
@@ -0,0 +1 @@
+86df322d3946113deba850188aab2272
diff --git a/Charts/Core/Testing/Data/Baseline/TestLinePlotAxisFonts_1.png.md5 b/Charts/Core/Testing/Data/Baseline/TestLinePlotAxisFonts_1.png.md5
new file mode 100644
index 0000000..70f84b9
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestLinePlotAxisFonts_1.png.md5
@@ -0,0 +1 @@
+cdd7d508d97f1edf0020c8835d06fbf9
diff --git a/Charts/Core/Testing/Data/Baseline/TestLinePlotAxisFonts_2.png.md5 b/Charts/Core/Testing/Data/Baseline/TestLinePlotAxisFonts_2.png.md5
new file mode 100644
index 0000000..799238f
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestLinePlotAxisFonts_2.png.md5
@@ -0,0 +1 @@
+e67fc08166c76c4cd7710f5144b4985f
diff --git a/Charts/Core/Testing/Data/Baseline/TestLinePlotColors.png.md5 b/Charts/Core/Testing/Data/Baseline/TestLinePlotColors.png.md5
new file mode 100644
index 0000000..537f466
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestLinePlotColors.png.md5
@@ -0,0 +1 @@
+c2b4ec8d2f32072cf52583b578ac4f09
diff --git a/Charts/Core/Testing/Data/Baseline/TestLinePlotDouble.png.md5 b/Charts/Core/Testing/Data/Baseline/TestLinePlotDouble.png.md5
new file mode 100644
index 0000000..57c8dad
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestLinePlotDouble.png.md5
@@ -0,0 +1 @@
+6f9f5b172bfe165a145dc5b54db4102e
diff --git a/Charts/Core/Testing/Data/Baseline/TestLinePlotDouble2.png.md5 b/Charts/Core/Testing/Data/Baseline/TestLinePlotDouble2.png.md5
new file mode 100644
index 0000000..b9c40ae
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestLinePlotDouble2.png.md5
@@ -0,0 +1 @@
+0b399b11f2729294bb6f6c76f0ae14e9
diff --git a/Charts/Core/Testing/Data/Baseline/TestLinePlotDouble_1.png.md5 b/Charts/Core/Testing/Data/Baseline/TestLinePlotDouble_1.png.md5
new file mode 100644
index 0000000..b0513bc
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestLinePlotDouble_1.png.md5
@@ -0,0 +1 @@
+3adab4f70db3f1a43d81d03cceb12925
diff --git a/Charts/Core/Testing/Data/Baseline/TestLinePlotDouble_2.png.md5 b/Charts/Core/Testing/Data/Baseline/TestLinePlotDouble_2.png.md5
new file mode 100644
index 0000000..45163cd
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestLinePlotDouble_2.png.md5
@@ -0,0 +1 @@
+d3127b249857e531db71874b2d13bd60
diff --git a/Charts/Core/Testing/Data/Baseline/TestLinePlotInteraction.png.md5 b/Charts/Core/Testing/Data/Baseline/TestLinePlotInteraction.png.md5
new file mode 100644
index 0000000..9e6983d
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestLinePlotInteraction.png.md5
@@ -0,0 +1 @@
+8ab957b3f559361f08f134d4b67711b5
diff --git a/Charts/Core/Testing/Data/Baseline/TestLinePlotInteraction_1.png.md5 b/Charts/Core/Testing/Data/Baseline/TestLinePlotInteraction_1.png.md5
new file mode 100644
index 0000000..b17163a
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestLinePlotInteraction_1.png.md5
@@ -0,0 +1 @@
+1f489d6f2076ec631f445d6cb5a6b3f2
diff --git a/Charts/Core/Testing/Data/Baseline/TestLinePlotSelection.png.md5 b/Charts/Core/Testing/Data/Baseline/TestLinePlotSelection.png.md5
new file mode 100644
index 0000000..00ed026
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestLinePlotSelection.png.md5
@@ -0,0 +1 @@
+c18f079349a9e3fe0f583a7bd7386bd9
diff --git a/Charts/Core/Testing/Data/Baseline/TestLinePlotSelection2.png.md5 b/Charts/Core/Testing/Data/Baseline/TestLinePlotSelection2.png.md5
new file mode 100644
index 0000000..74a8ece
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestLinePlotSelection2.png.md5
@@ -0,0 +1 @@
+fb00609ecc98c6e69924562f485e52b9
diff --git a/Charts/Core/Testing/Data/Baseline/TestLinePlotSelection2_1.png.md5 b/Charts/Core/Testing/Data/Baseline/TestLinePlotSelection2_1.png.md5
new file mode 100644
index 0000000..0ba828e
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestLinePlotSelection2_1.png.md5
@@ -0,0 +1 @@
+3c65f3f6e4be39bf05bf557d8e4fcca7
diff --git a/Charts/Core/Testing/Data/Baseline/TestLinePlotSelection_1.png.md5 b/Charts/Core/Testing/Data/Baseline/TestLinePlotSelection_1.png.md5
new file mode 100644
index 0000000..658e947
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestLinePlotSelection_1.png.md5
@@ -0,0 +1 @@
+502c99d73fbfb7102cdc94dff55263ec
diff --git a/Charts/Core/Testing/Data/Baseline/TestMultipleChartRenderers.png.md5 b/Charts/Core/Testing/Data/Baseline/TestMultipleChartRenderers.png.md5
new file mode 100644
index 0000000..8eec868
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestMultipleChartRenderers.png.md5
@@ -0,0 +1 @@
+761c93dca5da98d3da0fa93a0efabc54
diff --git a/Charts/Core/Testing/Data/Baseline/TestMultipleChartRenderers_1.png.md5 b/Charts/Core/Testing/Data/Baseline/TestMultipleChartRenderers_1.png.md5
new file mode 100644
index 0000000..9a33ffc
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestMultipleChartRenderers_1.png.md5
@@ -0,0 +1 @@
+8d759b92809629757242ec967d983542
diff --git a/Charts/Core/Testing/Data/Baseline/TestMultipleRenderers.png.md5 b/Charts/Core/Testing/Data/Baseline/TestMultipleRenderers.png.md5
new file mode 100644
index 0000000..1b13188
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestMultipleRenderers.png.md5
@@ -0,0 +1 @@
+9f901dbaac0a896020e02d184a7a63ae
diff --git a/Charts/Core/Testing/Data/Baseline/TestMultipleRenderers_1.png.md5 b/Charts/Core/Testing/Data/Baseline/TestMultipleRenderers_1.png.md5
new file mode 100644
index 0000000..ec241b7
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestMultipleRenderers_1.png.md5
@@ -0,0 +1 @@
+f70a70dcefd69ba7f6707b31b4cbd6f7
diff --git a/Charts/Core/Testing/Data/Baseline/TestMultipleRenderers_2.png.md5 b/Charts/Core/Testing/Data/Baseline/TestMultipleRenderers_2.png.md5
new file mode 100644
index 0000000..a25c193
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestMultipleRenderers_2.png.md5
@@ -0,0 +1 @@
+5f51999f99eff694ea18f8a4cbdcbccb
diff --git a/Charts/Core/Testing/Data/Baseline/TestMultipleScalarsToColors.png.md5 b/Charts/Core/Testing/Data/Baseline/TestMultipleScalarsToColors.png.md5
new file mode 100644
index 0000000..76d4040
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestMultipleScalarsToColors.png.md5
@@ -0,0 +1 @@
+98907c3079a153fb665e006dab066d46
diff --git a/Charts/Core/Testing/Data/Baseline/TestParallelCoordinates.png.md5 b/Charts/Core/Testing/Data/Baseline/TestParallelCoordinates.png.md5
new file mode 100644
index 0000000..bffa4cb
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestParallelCoordinates.png.md5
@@ -0,0 +1 @@
+3db1fbe01c1b0f9071e7e2836e8717a0
diff --git a/Charts/Core/Testing/Data/Baseline/TestParallelCoordinatesColors.png.md5 b/Charts/Core/Testing/Data/Baseline/TestParallelCoordinatesColors.png.md5
new file mode 100644
index 0000000..1b73ae3
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestParallelCoordinatesColors.png.md5
@@ -0,0 +1 @@
+5c02cfd9cd922c32115deaf0083d8d64
diff --git a/Charts/Core/Testing/Data/Baseline/TestParallelCoordinatesDouble.png.md5 b/Charts/Core/Testing/Data/Baseline/TestParallelCoordinatesDouble.png.md5
new file mode 100644
index 0000000..5c7d4f0
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestParallelCoordinatesDouble.png.md5
@@ -0,0 +1 @@
+9cd5211af09ae828a74e02b2dc4f98b4
diff --git a/Charts/Core/Testing/Data/Baseline/TestPieChart.png.md5 b/Charts/Core/Testing/Data/Baseline/TestPieChart.png.md5
new file mode 100644
index 0000000..7e760b1
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestPieChart.png.md5
@@ -0,0 +1 @@
+844cebf7252d631f5b71e1d531b78e22
diff --git a/Charts/Core/Testing/Data/Baseline/TestPieChart_1.png.md5 b/Charts/Core/Testing/Data/Baseline/TestPieChart_1.png.md5
new file mode 100644
index 0000000..824d4ae
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestPieChart_1.png.md5
@@ -0,0 +1 @@
+6e472a149250d32a30a767b9dea4fb8b
diff --git a/Charts/Core/Testing/Data/Baseline/TestPlotMatrix.png.md5 b/Charts/Core/Testing/Data/Baseline/TestPlotMatrix.png.md5
new file mode 100644
index 0000000..7909b64
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestPlotMatrix.png.md5
@@ -0,0 +1 @@
+f8e3b6138a259b7aec19e1ea5d5d04e9
diff --git a/Charts/Core/Testing/Data/Baseline/TestScalarsToColors.png.md5 b/Charts/Core/Testing/Data/Baseline/TestScalarsToColors.png.md5
new file mode 100644
index 0000000..2121a3e
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestScalarsToColors.png.md5
@@ -0,0 +1 @@
+272891f5537f5cf70a03d5fc827b902e
diff --git a/Charts/Core/Testing/Data/Baseline/TestScatterPlot.png.md5 b/Charts/Core/Testing/Data/Baseline/TestScatterPlot.png.md5
new file mode 100644
index 0000000..53c9dae
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestScatterPlot.png.md5
@@ -0,0 +1 @@
+d6668cb1d7889fbc4adc52385b0dad07
diff --git a/Charts/Core/Testing/Data/Baseline/TestScatterPlotColors.png.md5 b/Charts/Core/Testing/Data/Baseline/TestScatterPlotColors.png.md5
new file mode 100644
index 0000000..be85543
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestScatterPlotColors.png.md5
@@ -0,0 +1 @@
+1245cfbf37f063a924601db09d6d1efa
diff --git a/Charts/Core/Testing/Data/Baseline/TestScatterPlotMatrix.png.md5 b/Charts/Core/Testing/Data/Baseline/TestScatterPlotMatrix.png.md5
new file mode 100644
index 0000000..7e70560
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestScatterPlotMatrix.png.md5
@@ -0,0 +1 @@
+1996fbb5169cc985d9847162c36ea673
diff --git a/Charts/Core/Testing/Data/Baseline/TestScatterPlotMatrixVehicles.png.md5 b/Charts/Core/Testing/Data/Baseline/TestScatterPlotMatrixVehicles.png.md5
new file mode 100644
index 0000000..3a4cef7
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestScatterPlotMatrixVehicles.png.md5
@@ -0,0 +1 @@
+7580d4e48d57ae1eb778f14f7fdc752a
diff --git a/Charts/Core/Testing/Data/Baseline/TestScatterPlotMatrixVisible.png.md5 b/Charts/Core/Testing/Data/Baseline/TestScatterPlotMatrixVisible.png.md5
new file mode 100644
index 0000000..61618c3
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestScatterPlotMatrixVisible.png.md5
@@ -0,0 +1 @@
+bde888bbdd0f4667be426baecaf5f725
diff --git a/Charts/Core/Testing/Data/Baseline/TestScatterPlotMatrix_1.png.md5 b/Charts/Core/Testing/Data/Baseline/TestScatterPlotMatrix_1.png.md5
new file mode 100644
index 0000000..ff035db
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestScatterPlotMatrix_1.png.md5
@@ -0,0 +1 @@
+2cb4d4567c1519c0fbe0aeea94a3daa0
diff --git a/Charts/Core/Testing/Data/Baseline/TestScatterPlotMatrix_2.png.md5 b/Charts/Core/Testing/Data/Baseline/TestScatterPlotMatrix_2.png.md5
new file mode 100644
index 0000000..5e3dd0b
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestScatterPlotMatrix_2.png.md5
@@ -0,0 +1 @@
+6e741958a647dfa1a64730e12db6b388
diff --git a/Charts/Core/Testing/Data/Baseline/TestScatterPlotMatrix_3.png.md5 b/Charts/Core/Testing/Data/Baseline/TestScatterPlotMatrix_3.png.md5
new file mode 100644
index 0000000..3ef77e2
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestScatterPlotMatrix_3.png.md5
@@ -0,0 +1 @@
+e14446e625334b9435ae9e25d0596825
diff --git a/Charts/Core/Testing/Data/Baseline/TestScientificPlot.png.md5 b/Charts/Core/Testing/Data/Baseline/TestScientificPlot.png.md5
new file mode 100644
index 0000000..104e060
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestScientificPlot.png.md5
@@ -0,0 +1 @@
+7ab792ab4e4ede6a94573f85c7173546
diff --git a/Charts/Core/Testing/Data/Baseline/TestStackedBarGraph.png.md5 b/Charts/Core/Testing/Data/Baseline/TestStackedBarGraph.png.md5
new file mode 100644
index 0000000..1bfb6a5
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestStackedBarGraph.png.md5
@@ -0,0 +1 @@
+0fa5cde7810a637734c6c3bb65fa5551
diff --git a/Charts/Core/Testing/Data/Baseline/TestStackedPlot.png.md5 b/Charts/Core/Testing/Data/Baseline/TestStackedPlot.png.md5
new file mode 100644
index 0000000..3102dad
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestStackedPlot.png.md5
@@ -0,0 +1 @@
+b15d1b92e43b8057a0970b210eef0e96
diff --git a/Charts/Core/Testing/Data/Baseline/TestSurfacePlot.png.md5 b/Charts/Core/Testing/Data/Baseline/TestSurfacePlot.png.md5
new file mode 100644
index 0000000..9dea874
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestSurfacePlot.png.md5
@@ -0,0 +1 @@
+ecb1636a54d19208c010606cef06700a
diff --git a/Charts/Core/Testing/Data/Baseline/TestZoomAxis.png.md5 b/Charts/Core/Testing/Data/Baseline/TestZoomAxis.png.md5
new file mode 100644
index 0000000..5b1059b
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestZoomAxis.png.md5
@@ -0,0 +1 @@
+ddaa4755e611ee0bf902f15acb8a5728
diff --git a/Charts/Core/Testing/Data/Baseline/TestZoomAxis_1.png.md5 b/Charts/Core/Testing/Data/Baseline/TestZoomAxis_1.png.md5
new file mode 100644
index 0000000..f861914
--- /dev/null
+++ b/Charts/Core/Testing/Data/Baseline/TestZoomAxis_1.png.md5
@@ -0,0 +1 @@
+986b61a336562cc8ae8627f1dc1f8e62
diff --git a/Charts/Core/Testing/Data/Fonts/DejaVuSans.ttf.md5 b/Charts/Core/Testing/Data/Fonts/DejaVuSans.ttf.md5
new file mode 100644
index 0000000..09b8dd3
--- /dev/null
+++ b/Charts/Core/Testing/Data/Fonts/DejaVuSans.ttf.md5
@@ -0,0 +1 @@
+eccb7a74720fc377b60d6b2110530fd9
diff --git a/Charts/Core/Testing/Python/CMakeLists.txt b/Charts/Core/Testing/Python/CMakeLists.txt
index 28189f5..a05c76c 100644
--- a/Charts/Core/Testing/Python/CMakeLists.txt
+++ b/Charts/Core/Testing/Python/CMakeLists.txt
@@ -1,33 +1,16 @@
 if(VTK_PYTHON_EXE)
-  set(tests)
-  if(VTK_DATA_ROOT)
-    # Tests written in Python that require VTKData
-    # These tests should use vtk.test.Testing.
-    set(tests
-      ${tests}
-      TestBarGraph
-      TestLinePlot
-      TestStackedPlot
-      TestLinePlotColors
-      TestParallelCoordinatesColors
-      TestScatterPlotColors
+  # These tests should use vtk.test.Testing.
+  set(tests
+    TestBarGraph
+    TestLinePlot
+    TestStackedPlot
+    TestLinePlotColors
+    TestParallelCoordinatesColors
+    TestScatterPlotColors
     )
-  else()
-    # Tests written in Python that do NOT require VTKData
-    # These tests should use vtk.test.Testing.
-    set(tests
-      ${tests}
-    )
-  endif()
-  if(tests)
-    foreach(tfile ${tests})
-      add_test(NAME ${vtk-module}Python-${tfile}
-        COMMAND ${VTK_PYTHON_EXE}
-          ${CMAKE_CURRENT_SOURCE_DIR}/${tfile}.py
-          -D ${VTK_DATA_ROOT}
-          -B ${VTK_DATA_ROOT}/Baseline/Charts)
-    endforeach()
-  endif()
+  foreach(tfile ${tests})
+    vtk_add_test_python(${tfile}.py NO_RT NO_OUTPUT)
+  endforeach()
 else()
   message(FATAL_ERROR "No Python tests added!")
 endif()
diff --git a/Charts/Core/Testing/Python/TestBarGraph.py b/Charts/Core/Testing/Python/TestBarGraph.py
index fd5a12f..24eeaeb 100755
--- a/Charts/Core/Testing/Python/TestBarGraph.py
+++ b/Charts/Core/Testing/Python/TestBarGraph.py
@@ -1,10 +1,6 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-# Run this test like so:
-# vtkpython TestBarGraph.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Charts/
-
 import os
 import vtk
 import vtk.test.Testing
diff --git a/Charts/Core/Testing/Python/TestLinePlot.py b/Charts/Core/Testing/Python/TestLinePlot.py
index 7b435f6..f9cfe01 100755
--- a/Charts/Core/Testing/Python/TestLinePlot.py
+++ b/Charts/Core/Testing/Python/TestLinePlot.py
@@ -1,10 +1,6 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-# Run this test like so:
-# vtkpython TestLinePlot.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Charts/
-
 import os
 import vtk
 import vtk.test.Testing
diff --git a/Charts/Core/Testing/Python/TestLinePlotColors.py b/Charts/Core/Testing/Python/TestLinePlotColors.py
index 7f4559c..882a747 100755
--- a/Charts/Core/Testing/Python/TestLinePlotColors.py
+++ b/Charts/Core/Testing/Python/TestLinePlotColors.py
@@ -1,10 +1,6 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-# Run this test like so:
-# vtkpython TestLinePlotColors.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Charts/
-
 import os
 import vtk
 import vtk.test.Testing
diff --git a/Charts/Core/Testing/Python/TestParallelCoordinatesColors.py b/Charts/Core/Testing/Python/TestParallelCoordinatesColors.py
index bd2bd01..8d35f8f 100755
--- a/Charts/Core/Testing/Python/TestParallelCoordinatesColors.py
+++ b/Charts/Core/Testing/Python/TestParallelCoordinatesColors.py
@@ -1,10 +1,6 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-# Run this test like so:
-# vtkpython TestParallelCoordinatesColors.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Charts/
-
 import os
 import vtk
 import vtk.test.Testing
diff --git a/Charts/Core/Testing/Python/TestScatterPlotColors.py b/Charts/Core/Testing/Python/TestScatterPlotColors.py
index d873437..55d2ea2 100755
--- a/Charts/Core/Testing/Python/TestScatterPlotColors.py
+++ b/Charts/Core/Testing/Python/TestScatterPlotColors.py
@@ -1,10 +1,6 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-# Run this test like so:
-# vtkpython TestScatterPlotColors.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Charts/
-
 import os
 import vtk
 import vtk.test.Testing
diff --git a/Charts/Core/Testing/Python/TestStackedPlot.py b/Charts/Core/Testing/Python/TestStackedPlot.py
index 5abeb7c..4fc7ed9 100755
--- a/Charts/Core/Testing/Python/TestStackedPlot.py
+++ b/Charts/Core/Testing/Python/TestStackedPlot.py
@@ -1,10 +1,6 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-# Run this test like so:
-# vtkpython TestStackedPlot.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Charts/
-
 import os
 import vtk
 import vtk.test.Testing
diff --git a/Charts/Core/module.cmake b/Charts/Core/module.cmake
index 79d4dc0..a139ab2 100644
--- a/Charts/Core/module.cmake
+++ b/Charts/Core/module.cmake
@@ -2,11 +2,11 @@ vtk_module(vtkChartsCore
   GROUPS
     StandAlone
   DEPENDS
-    vtkCommonColor
     vtkRenderingContext2D
+  PRIVATE_DEPENDS
+    vtkCommonColor
     vtkInfovisCore # Needed for plot parallel coordinates vtkStringToCategory
   TEST_DEPENDS
-    vtkCommonColor
     vtkTestingCore
     vtkTestingRendering
     vtkViewsContext2D
diff --git a/Charts/Core/vtkAxis.cxx b/Charts/Core/vtkAxis.cxx
index 8892819..4d301c8 100644
--- a/Charts/Core/vtkAxis.cxx
+++ b/Charts/Core/vtkAxis.cxx
@@ -81,6 +81,8 @@ vtkAxis::vtkAxis()
   this->Behavior = vtkAxis::AUTO;
   this->Pen = vtkPen::New();
   this->TitleAppended = false;
+  this->ScalingFactor = 1.0;
+  this->Shift = 0.0;
 
   this->Pen->SetColor(0, 0, 0);
   this->Pen->SetWidth(1.0);
@@ -1331,10 +1333,10 @@ double vtkAxis::NiceMinMax(double &min, double &max, float pixelRange,
   // First get the order of the range of the numbers
   if (min == max)
     {
-    if (fabs(min) < 1e-20 && fabs(max) < 1e-20)
+    if (fabs(min) < 1e-70 && fabs(max) < 1e-70)
       {
-      min = -0.01;
-      max = 0.01;
+      min = -0.0000001;
+      max =  0.0000001;
       }
     else
       {
@@ -1342,7 +1344,7 @@ double vtkAxis::NiceMinMax(double &min, double &max, float pixelRange,
       max *= 1.05;
       }
     }
-  else if ((max - min) < 1.0e-20)
+  else if ((max - min) < 1.0e-60)
     {
     min *= 0.95;
     max *= 1.05;
diff --git a/Charts/Core/vtkAxis.h b/Charts/Core/vtkAxis.h
index fc4f918..faf6627 100644
--- a/Charts/Core/vtkAxis.h
+++ b/Charts/Core/vtkAxis.h
@@ -333,6 +333,14 @@ public:
   vtkGetMacro(TickLabelAlgorithm, int)
 
   // Description:
+  // Get/set the scaling factor used for the axis, this defaults to 1.0 (no
+  // scaling), and is used to coordinate scaling with the plots, charts, etc.
+  vtkSetMacro(ScalingFactor, double)
+  vtkGetMacro(ScalingFactor, double)
+  vtkSetMacro(Shift, double)
+  vtkGetMacro(Shift, double)
+
+  // Description:
   // Update the geometry of the axis. Takes care of setting up the tick mark
   // locations etc. Should be called by the scene before rendering.
   virtual void Update();
@@ -506,6 +514,13 @@ protected:
                        // are changed in the Extended Axis Labeling algorithm
 
   // Description:
+  // Scaling factor used on this axis, this is used to accurately render very
+  // small/large numbers accurately by converting the underlying range by the
+  // scaling factor.
+  double ScalingFactor;
+  double Shift;
+
+  // Description:
   // Are we using custom tick labels, or should the axis generate them?
   bool CustomTickLabels;
 
diff --git a/Charts/Core/vtkCategoryLegend.cxx b/Charts/Core/vtkCategoryLegend.cxx
new file mode 100644
index 0000000..2ff6f41
--- /dev/null
+++ b/Charts/Core/vtkCategoryLegend.cxx
@@ -0,0 +1,288 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkCategoryLegend.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkBrush.h"
+#include "vtkCategoryLegend.h"
+#include "vtkContext2D.h"
+#include "vtkObjectFactory.h"
+#include "vtkScalarsToColors.h"
+#include "vtkTextProperty.h"
+#include "vtkVariantArray.h"
+
+//-----------------------------------------------------------------------------
+vtkStandardNewMacro(vtkCategoryLegend);
+
+//-----------------------------------------------------------------------------
+vtkCategoryLegend::vtkCategoryLegend()
+{
+  this->SetInline(false);
+  this->SetHorizontalAlignment(vtkChartLegend::RIGHT);
+  this->SetVerticalAlignment(vtkChartLegend::BOTTOM);
+
+  this->ScalarsToColors = NULL;
+  this->Values = NULL;
+
+  this->TitleProperties->SetColor(this->LabelProperties->GetColor());
+  this->TitleProperties->SetFontSize(this->LabelProperties->GetFontSize());
+  this->TitleProperties->SetFontFamily(this->LabelProperties->GetFontFamily());
+  this->TitleProperties->SetJustificationToCentered();
+  this->TitleProperties->SetVerticalJustificationToTop();
+  this->TitleProperties->SetBold(1);
+
+  this->TitleWidthOffset = 0.0;
+  this->HasOutliers = false;
+  this->OutlierLabel = "outliers";
+}
+
+//-----------------------------------------------------------------------------
+vtkCategoryLegend::~vtkCategoryLegend()
+{
+}
+
+//-----------------------------------------------------------------------------
+bool vtkCategoryLegend::Paint(vtkContext2D* painter)
+{
+  if (!this->Visible || this->ScalarsToColors == NULL || this->Values == NULL)
+    {
+    return true;
+    }
+
+  // Draw a box around the legend.
+  painter->ApplyPen(this->Pen.GetPointer());
+  painter->ApplyBrush(this->Brush.GetPointer());
+  this->GetBoundingRect(painter);
+  painter->DrawRect(this->Rect.GetX(), this->Rect.GetY(),
+                    this->Rect.GetWidth(), this->Rect.GetHeight());
+
+  // Draw the title (if any)
+  vtkVector2f stringBounds[2];
+  float titleHeight = 0.0;
+  if (this->Title != "")
+    {
+    painter->ApplyTextProp(this->TitleProperties.GetPointer());
+    painter->ComputeStringBounds(this->Title, stringBounds->GetData());
+    titleHeight = stringBounds[1].GetY() + this->Padding;
+
+    float x = this->Rect.GetX() + this->Rect.GetWidth() / 2.0;
+    float y = this->Rect.GetY() + this->Rect.GetHeight() - this->Padding;
+    painter->DrawString(x, y, this->Title);
+    }
+
+  painter->ApplyTextProp(this->LabelProperties.GetPointer());
+
+  // compute the height of a sample string.
+  // The height of this string will also be used as the size of
+  // the color marks.
+  painter->ComputeStringBounds("Tgyf", stringBounds->GetData());
+  float stringHeight = stringBounds[1].GetY();
+
+  // the starting X positions of our marks & labels
+  float markX = this->Rect.GetX() + this->TitleWidthOffset + this->Padding;
+  float labelX = markX + stringHeight + this->Padding;
+
+  // the Y value of the row that we're currently drawing
+  float y = this->Rect.GetY() + this->Rect.GetHeight() -
+                  this->Padding - floor(stringHeight) - titleHeight;
+
+  // draw all of the marks & labels
+  for (vtkIdType l = 0; l < this->Values->GetNumberOfTuples(); ++l)
+    {
+    vtkStdString currentString = this->Values->GetValue(l).ToString();
+    if (currentString == "")
+      {
+      continue;
+      }
+
+    if (this->ScalarsToColors->GetAnnotatedValueIndex(
+      this->Values->GetValue(l)) == -1)
+      {
+      continue;
+      }
+
+    // paint the color mark for this category
+    double color[4];
+    this->ScalarsToColors->GetAnnotationColor(this->Values->GetValue(l), color);
+    painter->GetBrush()->SetColorF(color[0], color[1], color[2]);
+    painter->DrawRect(markX, y, stringHeight, stringHeight);
+
+    // draw this category's label
+    painter->DrawString(labelX, y, this->Values->GetValue(l).ToString());
+
+    // Move y position down another row
+    y -= stringHeight + this->Padding;
+    }
+
+  if (this->HasOutliers)
+    {
+    // paint the outlier color mark
+    double color[4];
+    this->ScalarsToColors->GetAnnotationColor(
+      this->ScalarsToColors->GetAnnotatedValue(-1), color);
+    painter->GetBrush()->SetColorF(color[0], color[1], color[2]);
+    painter->DrawRect(markX, y, stringHeight, stringHeight);
+
+    // draw the outlier label
+    painter->DrawString(labelX, y, this->OutlierLabel);
+    }
+
+  return true;
+}
+
+//-----------------------------------------------------------------------------
+void vtkCategoryLegend::SetScalarsToColors(vtkScalarsToColors* stc)
+{
+  this->ScalarsToColors = stc;
+}
+
+//-----------------------------------------------------------------------------
+vtkScalarsToColors * vtkCategoryLegend::GetScalarsToColors()
+{
+  return this->ScalarsToColors;
+}
+
+//-----------------------------------------------------------------------------
+vtkRectf vtkCategoryLegend::GetBoundingRect(vtkContext2D *painter)
+{
+  if (this->CacheBounds && this->RectTime > this->GetMTime() &&
+      this->RectTime > this->PlotTime &&
+      this->RectTime > this->ScalarsToColors->GetMTime() &&
+      this->RectTime > this->Values->GetMTime())
+    {
+    return this->Rect;
+    }
+
+  painter->ApplyTextProp(this->LabelProperties.GetPointer());
+
+  vtkVector2f stringBounds[2];
+  painter->ComputeStringBounds("Tgyf", stringBounds->GetData());
+  float height = stringBounds[1].GetY();
+
+  // programmatically set Padding here.  This results in better
+  // appearance when we zoom in or out on the legend.
+  this->Padding = height / 4.0;
+  if (this->Padding < 1)
+    {
+    this->Padding = 1;
+    }
+
+  // Calculate size of title (if any)
+  float titleHeight = 0.0f;
+  float titleWidth = 0.0f;
+  if (this->Title != "")
+    {
+    painter->ApplyTextProp(this->TitleProperties.GetPointer());
+
+    painter->ComputeStringBounds(this->Title, stringBounds->GetData());
+    titleWidth = stringBounds[1].GetX();
+    titleHeight = stringBounds[1].GetY() + this->Padding;
+
+    painter->ApplyTextProp(this->LabelProperties.GetPointer());
+    }
+
+  // Calculate the widest legend label
+  float maxWidth = 0.0;
+  int numSkippedValues = 0;
+  this->TitleWidthOffset = 0.0;
+  this->HasOutliers = false;
+
+  for (vtkIdType l = 0; l < this->Values->GetNumberOfTuples(); ++l)
+    {
+    if (this->Values->GetValue(l).ToString() == "")
+      {
+      ++numSkippedValues;
+      continue;
+      }
+    if (this->ScalarsToColors->GetAnnotatedValueIndex(
+      this->Values->GetValue(l)) == -1)
+      {
+      ++numSkippedValues;
+      this->HasOutliers = true;
+      continue;
+      }
+    painter->ComputeStringBounds(this->Values->GetValue(l).ToString(),
+                                 stringBounds->GetData());
+    if (stringBounds[1].GetX() > maxWidth)
+      {
+      maxWidth = stringBounds[1].GetX();
+      }
+    }
+
+  // Calculate size of outlier label (if necessary)
+  if (this->HasOutliers)
+    {
+    painter->ComputeStringBounds(this->OutlierLabel,
+                                 stringBounds->GetData());
+    if (stringBounds[1].GetX() > maxWidth)
+      {
+      maxWidth = stringBounds[1].GetX();
+      }
+    }
+
+  if (titleWidth > maxWidth)
+    {
+    this->TitleWidthOffset = (titleWidth - maxWidth) / 2.0;
+    maxWidth = titleWidth;
+    }
+
+  int numLabels = this->Values->GetNumberOfTuples() - numSkippedValues;
+  if (this->HasOutliers)
+    {
+    ++numLabels;
+    }
+
+  // 3 paddings: one on the left, one on the right, and one between the
+  // color mark and its label.
+  float w = ceil(maxWidth + 3 * this->Padding + height);
+
+  float h = ceil((numLabels * (height + this->Padding)) + this->Padding
+            + titleHeight);
+
+  float x = floor(this->Point[0]);
+  float y = floor(this->Point[1]);
+
+  // Compute bottom left point based on current alignment.
+  if (this->HorizontalAlignment == vtkChartLegend::CENTER)
+    {
+    x -= w / 2.0;
+    }
+  else if (this->HorizontalAlignment == vtkChartLegend::RIGHT)
+    {
+    x -= w;
+    }
+  if (this->VerticalAlignment == vtkChartLegend::CENTER)
+    {
+    y -= h / 2.0;
+    }
+  else if (this->VerticalAlignment == vtkChartLegend::TOP)
+    {
+    y -= h;
+    }
+
+  this->Rect = vtkRectf(x, y, w, h);
+  this->RectTime.Modified();
+  return this->Rect;
+}
+
+//-----------------------------------------------------------------------------
+void vtkCategoryLegend::SetTitle(const vtkStdString &title)
+{
+  this->Title = title;
+}
+
+//-----------------------------------------------------------------------------
+vtkStdString vtkCategoryLegend::GetTitle()
+{
+  return this->Title;
+}
diff --git a/Charts/Core/vtkCategoryLegend.h b/Charts/Core/vtkCategoryLegend.h
new file mode 100644
index 0000000..e5f56b0
--- /dev/null
+++ b/Charts/Core/vtkCategoryLegend.h
@@ -0,0 +1,102 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkCategoryLegend.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+// .NAME vtkCategoryLegend - Legend item to display categorical data.
+// .SECTION Description
+// vtkCategoryLegend will display a label and color patch for each value
+// in a categorical data set.  To use this class, you must first populate
+// a vtkScalarsToColors by using the SetAnnotation() method.  The other
+// input to this class is a vtkVariantArray.  This should contain the
+// annotated values from the vtkScalarsToColors that you wish to include
+// within the legend.
+
+#ifndef __vtkCategoryLegend_h
+#define __vtkCategoryLegend_h
+
+#include "vtkChartsCoreModule.h" // For export macro
+#include "vtkChartLegend.h"
+#include "vtkNew.h"              // For vtkNew ivars
+#include "vtkStdString.h"        // For vtkStdString ivars
+#include "vtkVector.h"           // For vtkRectf
+
+class vtkScalarsToColors;
+class vtkTextProperty;
+class vtkVariantArray;
+
+class VTKCHARTSCORE_EXPORT vtkCategoryLegend: public vtkChartLegend
+{
+public:
+  vtkTypeMacro(vtkCategoryLegend, vtkChartLegend);
+  static vtkCategoryLegend* New();
+
+  // Description:
+  // Enum of legend orientation types
+  enum {
+    VERTICAL = 0,
+    HORIZONTAL
+  };
+
+  // Description:
+  // Paint the legend into a rectangle defined by the bounds.
+  virtual bool Paint(vtkContext2D *painter);
+
+  // Description:
+  // Compute and return the lower left corner of this legend, along
+  // with its width and height.
+  virtual vtkRectf GetBoundingRect(vtkContext2D* painter);
+
+  // Description:
+  // Get/Set the vtkScalarsToColors used to draw this legend.
+  // Since this legend represents categorical data, this
+  // vtkScalarsToColors must have been populated using SetAnnotation().
+  virtual void SetScalarsToColors(vtkScalarsToColors* stc);
+  virtual vtkScalarsToColors * GetScalarsToColors();
+
+  // Description:
+  // Get/Set the array of values that will be represented by this legend.
+  // This array must contain distinct annotated values from the ScalarsToColors.
+  // Each value in this array will be drawn as a separate entry within this
+  // legend.
+  vtkGetMacro(Values, vtkVariantArray*);
+  vtkSetMacro(Values, vtkVariantArray*);
+
+  // Description:
+  // Get/set the title text of the legend.
+  virtual void SetTitle(const vtkStdString &title);
+  virtual vtkStdString GetTitle();
+
+  // Description:
+  // Get/set the label to use for outlier data.
+  vtkGetMacro(OutlierLabel, vtkStdString);
+  vtkSetMacro(OutlierLabel, vtkStdString);
+
+protected:
+  vtkCategoryLegend();
+  virtual ~vtkCategoryLegend();
+
+  bool                                HasOutliers;
+  float                               TitleWidthOffset;
+  vtkScalarsToColors*                 ScalarsToColors;
+  vtkStdString                        OutlierLabel;
+  vtkStdString                        Title;
+  vtkNew<vtkTextProperty>             TitleProperties;
+  vtkVariantArray*                    Values;
+
+private:
+  vtkCategoryLegend(const vtkCategoryLegend &); // Not implemented.
+  void operator=(const vtkCategoryLegend &);   // Not implemented.
+};
+
+#endif
diff --git a/Charts/Core/vtkChart.cxx b/Charts/Core/vtkChart.cxx
index c5bcf21..b8c1cb8 100644
--- a/Charts/Core/vtkChart.cxx
+++ b/Charts/Core/vtkChart.cxx
@@ -27,10 +27,11 @@
 //-----------------------------------------------------------------------------
 vtkChart::MouseActions::MouseActions()
 {
-  this->Data[0] = vtkContextMouseEvent::LEFT_BUTTON;
-  this->Data[1] = vtkContextMouseEvent::MIDDLE_BUTTON;
-  this->Data[2] = vtkContextMouseEvent::RIGHT_BUTTON;
-  this->Data[3] = -1;
+  this->Pan() = vtkContextMouseEvent::LEFT_BUTTON;
+  this->Zoom() = vtkContextMouseEvent::MIDDLE_BUTTON;
+  this->Select() = vtkContextMouseEvent::RIGHT_BUTTON;
+  this->ZoomAxis() = -1;
+  this->SelectPolygon() = -1;
 }
 
 //-----------------------------------------------------------------------------
@@ -218,23 +219,89 @@ bool vtkChart::CalculatePlotTransform(vtkAxis *x, vtkAxis *y,
     vtkWarningMacro("Called with null arguments.");
     return false;
     }
+
+  vtkVector2d origin(x->GetMinimum(), y->GetMinimum());
+  vtkVector2d scale(x->GetMaximum() - x->GetMinimum(),
+                    y->GetMaximum() - y->GetMinimum());
+  vtkVector2d shift(0.0, 0.0);
+  vtkVector2d factor(1.0, 1.0);
+
+  for (int i = 0; i < 2; ++i)
+    {
+    if (fabs(log10(origin[i] / scale[i])) > 2)
+      {
+      shift[i] = floor(log10(origin[i] / scale[i]) / 3.0) * 3.0;
+      shift[i] = -origin[i];
+      }
+    if (fabs(log10(scale[i])) > 10)
+      {
+      // We need to scale the transform to show all data, do this in blocks.
+      factor[i] = pow(10.0, floor(log10(scale[i]) / 10.0) * -10.0);
+      scale[i] = scale[i] * factor[i];
+      }
+    }
+  x->SetScalingFactor(factor[0]);
+  x->SetShift(shift[0]);
+  y->SetScalingFactor(factor[1]);
+  y->SetShift(shift[1]);
+
+  // Get the scale for the plot area from the x and y axes
+  float *min = x->GetPoint1();
+  float *max = x->GetPoint2();
+  if (fabs(max[0] - min[0]) == 0.0)
+    {
+    return false;
+    }
+  float xScale = scale[0] / (max[0] - min[0]);
+
+  // Now the y axis
+  min = y->GetPoint1();
+  max = y->GetPoint2();
+  if (fabs(max[1] - min[1]) == 0.0)
+    {
+    return false;
+    }
+  float yScale = scale[1] / (max[1] - min[1]);
+
+  transform->Identity();
+  transform->Translate(this->Point1[0], this->Point1[1]);
+  // Get the scale for the plot area from the x and y axes
+  transform->Scale(1.0 / xScale, 1.0 / yScale);
+  transform->Translate(-(x->GetMinimum() + shift[0]) * factor[0],
+                       -(y->GetMinimum() + shift[1]) * factor[1]);
+  return true;
+}
+
+//-----------------------------------------------------------------------------
+bool vtkChart::CalculateUnscaledPlotTransform(vtkAxis *x, vtkAxis *y,
+                                              vtkTransform2D *transform)
+{
+  if (!x || !y || !transform)
+    {
+    vtkWarningMacro("Called with null arguments.");
+    return false;
+    }
+
+  vtkVector2d scale(x->GetMaximum() - x->GetMinimum(),
+                    y->GetMaximum() - y->GetMinimum());
+
   // Get the scale for the plot area from the x and y axes
   float *min = x->GetPoint1();
   float *max = x->GetPoint2();
-  if (fabs(max[0] - min[0]) == 0.0f)
+  if (fabs(max[0] - min[0]) == 0.0)
     {
     return false;
     }
-  float xScale = (x->GetMaximum() - x->GetMinimum()) / (max[0] - min[0]);
+  double xScale = scale[0] / (max[0] - min[0]);
 
   // Now the y axis
   min = y->GetPoint1();
   max = y->GetPoint2();
-  if (fabs(max[1] - min[1]) == 0.0f)
+  if (fabs(max[1] - min[1]) == 0.0)
     {
     return false;
     }
-  float yScale = (y->GetMaximum() - y->GetMinimum()) / (max[1] - min[1]);
+  double yScale = scale[1] / (max[1] - min[1]);
 
   transform->Identity();
   transform->Translate(this->Point1[0], this->Point1[1]);
diff --git a/Charts/Core/vtkChart.h b/Charts/Core/vtkChart.h
index fcd4c54..0a0c12a 100644
--- a/Charts/Core/vtkChart.h
+++ b/Charts/Core/vtkChart.h
@@ -51,13 +51,24 @@ public:
     LINE,
     POINTS,
     BAR,
-    STACKED};
+    STACKED,
+    BAG,
+    FUNCTIONALBAG};
 
   // Description:
-  // Enum of valid chart action types
+  // Enum of valid chart action types.
+  //
+  // PAN - moves the axis range
+  // ZOOM - zooms to a selected rectangle
+  // ZOOM_AXIS - zooms the x and y axis range
+  // SELECT_RECTANGLE - selects points within a rectangle
+  // SELECT_POLYGON - selects points within a polygon
+  // SELECT - alias for SELECT_RECTANGLE
+  // NOTIFY - Post vtkCommand::InteractionEvent on selection of a point
   enum {
     PAN = 0,
     ZOOM,
+    ZOOM_AXIS,
     SELECT,
     SELECT_RECTANGLE = SELECT,
     SELECT_POLYGON,
@@ -293,6 +304,11 @@ protected:
                               vtkTransform2D *transform);
 
   // Description:
+  // Calculate the unshifted, and unscaled plot transform for the x and y axis.
+  bool CalculateUnscaledPlotTransform(vtkAxis *x, vtkAxis *y,
+                                      vtkTransform2D *transform);
+
+  // Description:
   // Attach axis range listener so we can forward those events at the chart level
   void AttachAxisRangeListener(vtkAxis*);
 
@@ -348,11 +364,12 @@ protected:
     {
   public:
     MouseActions();
-    enum { MaxAction = 4 };
+    enum { MaxAction = 5 };
     short& Pan() { return Data[0]; }
     short& Zoom() { return Data[1]; }
-    short& Select() { return Data[2]; }
-    short& SelectPolygon() { return Data[3]; }
+    short& ZoomAxis() { return Data[2]; }
+    short& Select() { return Data[3]; }
+    short& SelectPolygon() { return Data[4]; }
     short& operator[](int index) { return Data[index]; }
     short Data[MaxAction];
     };
diff --git a/Charts/Core/vtkChartLegend.cxx b/Charts/Core/vtkChartLegend.cxx
index c23ae86..44286ff 100644
--- a/Charts/Core/vtkChartLegend.cxx
+++ b/Charts/Core/vtkChartLegend.cxx
@@ -75,6 +75,7 @@ vtkChartLegend::vtkChartLegend()
   this->Inline = true;
   this->Button = -1;
   this->DragEnabled = true;
+  this->CacheBounds = true;
 }
 
 //-----------------------------------------------------------------------------
@@ -170,7 +171,8 @@ bool vtkChartLegend::Paint(vtkContext2D *painter)
 //-----------------------------------------------------------------------------
 vtkRectf vtkChartLegend::GetBoundingRect(vtkContext2D *painter)
 {
-  if (this->RectTime > this->GetMTime() && this->RectTime > this->PlotTime)
+  if (this->CacheBounds && this->RectTime > this->GetMTime() &&
+      this->RectTime > this->PlotTime)
     {
     return this->Rect;
     }
@@ -281,10 +283,14 @@ vtkChart* vtkChartLegend::GetChart()
 //-----------------------------------------------------------------------------
 bool vtkChartLegend::Hit(const vtkContextMouseEvent &mouse)
 {
-  if (this->DragEnabled && mouse.GetScreenPos().GetX() > this->Rect.GetX() &&
-      mouse.GetScreenPos().GetX() < this->Rect.GetX() + this->Rect.GetWidth() &&
-      mouse.GetScreenPos().GetY() > this->Rect.GetY() &&
-      mouse.GetScreenPos().GetY() < this->Rect.GetY() + this->Rect.GetHeight())
+  if (!this->GetVisible())
+    {
+    return false;
+    }
+  if (this->DragEnabled && mouse.GetPos().GetX() > this->Rect.GetX() &&
+      mouse.GetPos().GetX() < this->Rect.GetX() + this->Rect.GetWidth() &&
+      mouse.GetPos().GetY() > this->Rect.GetY() &&
+      mouse.GetPos().GetY() < this->Rect.GetY() + this->Rect.GetHeight())
     {
     return true;
     }
@@ -299,10 +305,10 @@ bool vtkChartLegend::MouseMoveEvent(const vtkContextMouseEvent &mouse)
 {
   if (this->Button == vtkContextMouseEvent::LEFT_BUTTON)
     {
-    vtkVector2f delta = mouse.GetScenePos() - mouse.GetLastScenePos();
-    this->HorizontalAlignment = vtkChartLegend::CUSTOM;
+    vtkVector2f delta = mouse.GetPos() - mouse.GetLastPos();
     this->Storage->Point = this->Storage->Point + delta;
     this->GetScene()->SetDirty(true);
+    this->Modified();
     }
   return true;
 }
diff --git a/Charts/Core/vtkChartLegend.h b/Charts/Core/vtkChartLegend.h
index def819c..1927aaa 100644
--- a/Charts/Core/vtkChartLegend.h
+++ b/Charts/Core/vtkChartLegend.h
@@ -162,6 +162,17 @@ public:
   vtkTextProperty * GetLabelProperties();
 
   // Description:
+  // Toggle whether or not this legend should attempt to cache its position
+  // and size.  The default value is true.  If this value is set to false,
+  // the legend will recalculate its position and bounds every time it is
+  // drawn.  If users will be able to zoom in or out on your legend, you
+  // may want to set this to false.  Otherwise, the border around the legend
+  // may not resize appropriately.
+  vtkSetMacro(CacheBounds, bool);
+  vtkGetMacro(CacheBounds, bool);
+  vtkBooleanMacro(CacheBounds, bool);
+
+  // Description:
   // Return true if the supplied x, y coordinate is inside the item.
   virtual bool Hit(const vtkContextMouseEvent &mouse);
 
@@ -202,6 +213,11 @@ protected:
   bool DragEnabled;
 
   // Description:
+  // Should the legend attempt to avoid recalculating its position &
+  // bounds unnecessarily?
+  bool CacheBounds;
+
+  // Description:
   // Last button to be pressed.
   int Button;
 
diff --git a/Charts/Core/vtkChartPie.cxx b/Charts/Core/vtkChartPie.cxx
index 2c32082..058b913 100644
--- a/Charts/Core/vtkChartPie.cxx
+++ b/Charts/Core/vtkChartPie.cxx
@@ -269,7 +269,7 @@ bool vtkChartPie::LocatePointInPlots(const vtkContextMouseEvent &mouse)
         {
         const char *label = this->Private->Plot->GetLabel(labelIndex);
         vtksys_ios::ostringstream ostr;
-        ostr << label << ": " << plotPos.GetX();
+        ostr << label << ": " << plotPos.GetY();
         this->Tooltip->SetText(ostr.str().c_str());
         this->Tooltip->SetPosition(mouse.GetScreenPos()[0] + 2,
                                    mouse.GetScreenPos()[1] + 2);
diff --git a/Charts/Core/vtkChartSelectionHelper.h b/Charts/Core/vtkChartSelectionHelper.h
index b1cc347..e819a5e 100644
--- a/Charts/Core/vtkChartSelectionHelper.h
+++ b/Charts/Core/vtkChartSelectionHelper.h
@@ -45,8 +45,8 @@ namespace vtkChartSelectionHelper
 // Description:
 // Populate the annotation link with the supplied selectionIds array, and set
 // the appropriate node properties for a standard row based chart selection.
-void MakeSelection(vtkAnnotationLink *link, vtkIdTypeArray *selectionIds,
-                   vtkPlot *plot)
+static void MakeSelection(vtkAnnotationLink *link, vtkIdTypeArray *selectionIds,
+                          vtkPlot *plot)
 {
   assert(link != NULL && selectionIds != NULL);
 
@@ -92,7 +92,7 @@ void MakeSelection(vtkAnnotationLink *link, vtkIdTypeArray *selectionIds,
 
 // Description:
 // Subtract the supplied selection from the oldSelection.
-void MinusSelection(vtkIdTypeArray *selection, vtkIdTypeArray *oldSelection)
+static void MinusSelection(vtkIdTypeArray *selection, vtkIdTypeArray *oldSelection)
 {
   // We rely on the selection id arrays being sorted.
   std::vector<vtkIdType> output;
@@ -136,7 +136,7 @@ void MinusSelection(vtkIdTypeArray *selection, vtkIdTypeArray *oldSelection)
 
 // Description:
 // Add the supplied selection from the oldSelection.
-void AddSelection(vtkIdTypeArray *selection, vtkIdTypeArray *oldSelection)
+static void AddSelection(vtkIdTypeArray *selection, vtkIdTypeArray *oldSelection)
 {
   // Add all unique array indices to create a new combined array.
   vtkIdType *ptrSelection =
@@ -163,7 +163,7 @@ void AddSelection(vtkIdTypeArray *selection, vtkIdTypeArray *oldSelection)
 
 // Description:
 // Toggle the supplied selection from the oldSelection.
-void ToggleSelection(vtkIdTypeArray *selection, vtkIdTypeArray *oldSelection)
+static void ToggleSelection(vtkIdTypeArray *selection, vtkIdTypeArray *oldSelection)
 {
   // We rely on the selection id arrays being sorted.
   std::vector<vtkIdType> output;
@@ -212,9 +212,9 @@ void ToggleSelection(vtkIdTypeArray *selection, vtkIdTypeArray *oldSelection)
 // Build a selection based on the supplied selectionMode using the new
 // plotSelection and combining it with the oldSelection. If link is not NULL
 // then the resulting selection will be set on the link.
-void BuildSelection(vtkAnnotationLink *link, int selectionMode,
-                    vtkIdTypeArray *plotSelection, vtkIdTypeArray *oldSelection,
-                    vtkPlot *plot)
+static void BuildSelection(vtkAnnotationLink *link, int selectionMode,
+                           vtkIdTypeArray *plotSelection, vtkIdTypeArray *oldSelection,
+                           vtkPlot *plot)
 {
   if (!plotSelection || !oldSelection)
     {
@@ -248,7 +248,7 @@ void BuildSelection(vtkAnnotationLink *link, int selectionMode,
 // Description:
 // Combine the SelectionMode with any mouse modifiers to get an effective
 // selection mode for this click event.
-int GetMouseSelectionMode(const vtkContextMouseEvent &mouse, int selectionMode)
+static int GetMouseSelectionMode(const vtkContextMouseEvent &mouse, int selectionMode)
 {
   // Mouse modifiers override the current selection mode.
   if (mouse.GetModifiers() & vtkContextMouseEvent::SHIFT_MODIFIER &&
@@ -256,11 +256,11 @@ int GetMouseSelectionMode(const vtkContextMouseEvent &mouse, int selectionMode)
     {
     return vtkContextScene::SELECTION_TOGGLE;
     }
-  else if (mouse.GetModifiers() & vtkContextMouseEvent::SHIFT_MODIFIER)
+  else if (mouse.GetModifiers() & vtkContextMouseEvent::CONTROL_MODIFIER)
     {
     return vtkContextScene::SELECTION_ADDITION;
     }
-  else if (mouse.GetModifiers() & vtkContextMouseEvent::CONTROL_MODIFIER)
+  else if (mouse.GetModifiers() & vtkContextMouseEvent::SHIFT_MODIFIER)
     {
     return vtkContextScene::SELECTION_SUBTRACTION;
     }
diff --git a/Charts/Core/vtkChartXY.cxx b/Charts/Core/vtkChartXY.cxx
index fcc9f15..9b526b4 100644
--- a/Charts/Core/vtkChartXY.cxx
+++ b/Charts/Core/vtkChartXY.cxx
@@ -33,6 +33,8 @@
 #include "vtkVectorOperators.h"
 
 #include "vtkPlotBar.h"
+#include "vtkPlotBag.h"
+#include "vtkPlotFunctionalBag.h"
 #include "vtkPlotStacked.h"
 #include "vtkPlotLine.h"
 #include "vtkPlotPoints.h"
@@ -494,6 +496,20 @@ void vtkChartXY::RecalculatePlotTransforms()
         }
       this->CalculatePlotTransform(
         xAxis, yAxis, this->ChartPrivate->PlotCorners[i]->GetTransform());
+      // Now we need to set the scale factor on the plots to ensure they rescale
+      // their input data when necessary.
+      vtkRectd shiftScale(xAxis->GetShift(), yAxis->GetShift(),
+                          xAxis->GetScalingFactor(), yAxis->GetScalingFactor());
+      for (unsigned int j = 0;
+           j < this->ChartPrivate->PlotCorners[i]->GetNumberOfItems(); ++j)
+        {
+        vtkPlot *plot =
+            vtkPlot::SafeDownCast(this->ChartPrivate->PlotCorners[i]->GetItem(j));
+        if (plot)
+          {
+          plot->SetShiftScale(shiftScale);
+          }
+        }
       }
     }
   this->PlotTransformValid = true;
@@ -597,7 +613,7 @@ void vtkChartXY::RecalculatePlotBounds()
       continue;
       }
     (*it)->GetBounds(bounds);
-    if (bounds[1]-bounds[0] < 0.0)
+    if (bounds[1] - bounds[0] < 0.0)
       {
       // skip uninitialized bounds.
       continue;
@@ -1024,6 +1040,13 @@ vtkPlot * vtkChartXY::AddPlot(int type)
       plot = bar;
       break;
       }
+    case FUNCTIONALBAG:
+      {
+      vtkPlotFunctionalBag *bag = vtkPlotFunctionalBag::New();
+      bag->GetBrush()->SetColor(color.GetData());
+      plot = bag;
+      break;
+      }
     case STACKED:
       {
       vtkPlotStacked *stacked = vtkPlotStacked::New();
@@ -1032,6 +1055,15 @@ vtkPlot * vtkChartXY::AddPlot(int type)
       plot = stacked;
       break;
       }
+    case BAG:
+      {
+      vtkPlotBag *bag = vtkPlotBag::New();
+      bag->SetParent(this);
+      bag->GetBrush()->SetColor(color.GetData());
+      plot = bag;
+      break;
+      }
+
     default:
       plot = NULL;
     }
@@ -1272,15 +1304,17 @@ bool vtkChartXY::MouseMoveEvent(const vtkContextMouseEvent &mouse)
     vtkVector2d last(0.0, 0.0);
 
     // Go from screen to scene coordinates to work out the delta
+    vtkAxis* xAxis = this->ChartPrivate->axes[vtkAxis::BOTTOM];
+    vtkAxis* yAxis = this->ChartPrivate->axes[vtkAxis::LEFT];
     vtkTransform2D *transform =
         this->ChartPrivate->PlotCorners[0]->GetTransform();
     transform->InverseTransformPoints(screenPos.GetData(), pos.GetData(), 1);
     transform->InverseTransformPoints(lastScreenPos.GetData(), last.GetData(), 1);
     vtkVector2d delta = last - pos;
+    delta[0] /= xAxis->GetScalingFactor();
+    delta[1] /= yAxis->GetScalingFactor();
 
     // Now move the axes and recalculate the transform
-    vtkAxis* xAxis = this->ChartPrivate->axes[vtkAxis::BOTTOM];
-    vtkAxis* yAxis = this->ChartPrivate->axes[vtkAxis::LEFT];
     delta[0] = delta[0] > 0 ?
       std::min(delta[0], xAxis->GetMaximumLimit() - xAxis->GetMaximum()) :
       std::max(delta[0], xAxis->GetMinimumLimit() - xAxis->GetMinimum());
@@ -1301,13 +1335,15 @@ bool vtkChartXY::MouseMoveEvent(const vtkContextMouseEvent &mouse)
           vtkVector2d(mouse.GetLastScreenPos().Cast<double>().GetData());
       pos = vtkVector2d(0.0, 0.0);
       last = vtkVector2d(0.0, 0.0);
+      yAxis = this->ChartPrivate->axes[vtkAxis::RIGHT];
       transform = this->ChartPrivate->PlotCorners[1]->GetTransform();
       transform->InverseTransformPoints(screenPos.GetData(), pos.GetData(), 1);
       transform->InverseTransformPoints(lastScreenPos.GetData(), last.GetData(), 1);
       delta = last - pos;
+      delta[0] /= xAxis->GetScalingFactor();
+      delta[1] /= yAxis->GetScalingFactor();
 
       // Now move the axes and recalculate the transform
-      yAxis = this->ChartPrivate->axes[vtkAxis::RIGHT];
       delta[1] = delta[1] > 0 ?
         std::min(delta[1], yAxis->GetMaximumLimit() - yAxis->GetMaximum()) :
         std::max(delta[1], yAxis->GetMinimumLimit() - yAxis->GetMinimum());
@@ -1323,14 +1359,16 @@ bool vtkChartXY::MouseMoveEvent(const vtkContextMouseEvent &mouse)
           vtkVector2d(mouse.GetLastScreenPos().Cast<double>().GetData());
       pos = vtkVector2d(0.0, 0.0);
       last = vtkVector2d(0.0, 0.0);
+      xAxis = this->ChartPrivate->axes[vtkAxis::TOP];
+      yAxis = this->ChartPrivate->axes[vtkAxis::RIGHT];
       transform = this->ChartPrivate->PlotCorners[2]->GetTransform();
       transform->InverseTransformPoints(screenPos.GetData(), pos.GetData(), 1);
       transform->InverseTransformPoints(lastScreenPos.GetData(), last.GetData(), 1);
       delta = last - pos;
+      delta[0] /= xAxis->GetScalingFactor();
+      delta[1] /= yAxis->GetScalingFactor();
 
       // Now move the axes and recalculate the transform
-      xAxis = this->ChartPrivate->axes[vtkAxis::TOP];
-      yAxis = this->ChartPrivate->axes[vtkAxis::RIGHT];
       delta[0] = delta[0] > 0 ?
         std::min(delta[0], xAxis->GetMaximumLimit() - xAxis->GetMaximum()) :
         std::max(delta[0], xAxis->GetMinimumLimit() - xAxis->GetMinimum());
@@ -1357,6 +1395,63 @@ bool vtkChartXY::MouseMoveEvent(const vtkContextMouseEvent &mouse)
     // Mark the scene as dirty
     this->Scene->SetDirty(true);
     }
+  else if (mouse.GetButton() == this->Actions.ZoomAxis())
+    {
+    vtkVector2d screenPos(mouse.GetScreenPos().Cast<double>().GetData());
+    vtkVector2d lastScreenPos(mouse.GetLastScreenPos().Cast<double>().GetData());
+
+    vtkAxis *axes[] = {
+      this->ChartPrivate->axes[vtkAxis::BOTTOM],
+      this->ChartPrivate->axes[vtkAxis::LEFT],
+      this->ChartPrivate->axes[vtkAxis::TOP],
+      this->ChartPrivate->axes[vtkAxis::RIGHT]
+    };
+
+    for(int i = 0; i < 4; i++)
+      {
+      vtkAxis *axis = axes[i];
+      if(!axis)
+        {
+        continue;
+        }
+
+      // bottom, top -> 0, right, left -> 1
+      int side = i % 2;
+
+      // get mouse delta in the given direction for the axis
+      double delta = lastScreenPos[side] - screenPos[side];
+      if(std::abs(delta) == 0)
+        {
+        continue;
+        }
+
+      // scale and invert delta
+      delta /= -100.0;
+
+      // zoom axis range
+      double min = axis->GetMinimum();
+      double max = axis->GetMaximum();
+      double frac = (max - min) * 0.1;
+      if (frac > 0.0)
+        {
+        min += delta*frac;
+        max -= delta*frac;
+        }
+      else
+        {
+        min -= delta*frac;
+        max += delta*frac;
+        }
+      axis->SetMinimum(min);
+      axis->SetMaximum(max);
+      axis->RecalculateTickSpacing();
+      }
+
+    this->RecalculatePlotTransforms();
+
+    // Mark the scene as dirty
+    this->Scene->SetDirty(true);
+    }
   else if (mouse.GetButton() == this->Actions.SelectPolygon())
     {
     if(this->SelectionPolygon.GetNumberOfPoints() > 0)
@@ -1449,7 +1544,10 @@ bool vtkChartXY::LocatePointInPlots(const vtkContextMouseEvent &mouse,
           if (seriesIndex >= 0)
             {
             // We found a point, set up the tooltip and return
-            this->SetTooltipInfo(mouse, plotPos, seriesIndex, plot,
+            vtkRectd ss(plot->GetShiftScale());
+            vtkVector2d plotPosd(plotPos[0] / ss[2] - ss[0],
+                                 plotPos[1] / ss[3] - ss[1]);
+            this->SetTooltipInfo(mouse, plotPosd, seriesIndex, plot,
                                  segmentIndex);
             if (invokeEvent >= 0)
               {
@@ -1487,11 +1585,11 @@ bool vtkChartXY::LocatePointInPlots(const vtkContextMouseEvent &mouse,
 
 //-----------------------------------------------------------------------------
 void vtkChartXY::SetTooltipInfo(const vtkContextMouseEvent& mouse,
-                                const vtkVector2f &plotPos,
+                                const vtkVector2d &plotPos,
                                 vtkIdType seriesIndex, vtkPlot* plot,
                                 vtkIdType segmentIndex)
 {
-  if(!this->Tooltip)
+  if (!this->Tooltip)
     {
     return;
     }
@@ -1550,6 +1648,12 @@ bool vtkChartXY::MouseButtonPressEvent(const vtkContextMouseEvent &mouse)
     this->DrawBox = true;
     return true;
     }
+  else if (mouse.GetButton() == this->Actions.ZoomAxis())
+    {
+    this->MouseBox.Set(mouse.GetPos().GetX(), mouse.GetPos().GetY(), 0.0, 0.0);
+    this->DrawBox = false;
+    return true;
+    }
   else if (mouse.GetButton() == this->Actions.SelectPolygon())
     {
     this->SelectionPolygon.Clear();
@@ -1793,17 +1897,23 @@ bool vtkChartXY::MouseButtonReleaseEvent(const vtkContextMouseEvent &mouse)
     this->InvokeEvent(vtkCommand::InteractionEvent);
     return true;
     }
+  else if (mouse.GetButton() == this->Actions.ZoomAxis())
+    {
+    return true;
+    }
   return false;
 }
 
-void vtkChartXY::ZoomInAxes(vtkAxis *x, vtkAxis *y, float *origin, float *max)
+void vtkChartXY::ZoomInAxes(vtkAxis *x, vtkAxis *y, float *originf, float *maxf)
 {
   vtkNew<vtkTransform2D> transform;
-  this->CalculatePlotTransform(x, y, transform.GetPointer());
-  float torigin[2];
-  transform->InverseTransformPoints(origin, torigin, 1);
-  float tmax[2];
-  transform->InverseTransformPoints(max, tmax, 1);
+  this->CalculateUnscaledPlotTransform(x, y, transform.GetPointer());
+  vtkVector2d origin(originf[0], originf[1]);
+  vtkVector2d max(maxf[0], maxf[1]);
+  vtkVector2d torigin;
+  transform->InverseTransformPoints(origin.GetData(), torigin.GetData(), 1);
+  vtkVector2d tmax;
+  transform->InverseTransformPoints(max.GetData(), tmax.GetData(), 1);
 
   // Ensure we preserve the directionality of the axes
   if (x->GetMaximum() > x->GetMinimum())
diff --git a/Charts/Core/vtkChartXY.h b/Charts/Core/vtkChartXY.h
index 5df130d..c063c64 100644
--- a/Charts/Core/vtkChartXY.h
+++ b/Charts/Core/vtkChartXY.h
@@ -167,7 +167,7 @@ public:
   // Description:
   // Set the information passed to the tooltip.
   virtual void SetTooltipInfo(const vtkContextMouseEvent &,
-                              const vtkVector2f &,
+                              const vtkVector2d &,
                               vtkIdType, vtkPlot*,
                               vtkIdType segmentIndex = -1);
 
diff --git a/Charts/Core/vtkColorLegend.cxx b/Charts/Core/vtkColorLegend.cxx
index 97426ab..a9e8d9e 100644
--- a/Charts/Core/vtkColorLegend.cxx
+++ b/Charts/Core/vtkColorLegend.cxx
@@ -45,7 +45,13 @@ vtkColorLegend::vtkColorLegend()
   this->Callback->SetClientData(this);
   this->Callback->SetCallback(vtkColorLegend::OnScalarsToColorsModified);
 
-  this->TransferFunction = 0;
+  this->TransferFunction = NULL;
+
+  this->Orientation = vtkColorLegend::VERTICAL;
+
+  this->Position.Set(0.0, 0.0, 0.0, 0.0);
+  this->CustomPositionSet = false;
+  this->DrawBorder = false;
 }
 
 //-----------------------------------------------------------------------------
@@ -85,12 +91,46 @@ void vtkColorLegend::Update()
     {
     this->ComputeTexture();
     }
+
+  // check if the range of our TransferFunction changed
+  double bounds[4];
+  this->GetBounds(bounds);
+  if (bounds[0] == bounds[1])
+    {
+    vtkWarningMacro(<< "The color transfer function seems to be empty.");
+    this->Axis->Update();
+    return;
+    }
+
+  double axisBounds[2];
+  this->Axis->GetUnscaledRange(axisBounds);
+  if (bounds[0] != axisBounds[0] || bounds[1] != axisBounds[1])
+    {
+    this->Axis->SetUnscaledRange(bounds[0], bounds[1]);
+    }
+
   this->Axis->Update();
 }
 
 //-----------------------------------------------------------------------------
 bool vtkColorLegend::Paint(vtkContext2D* painter)
 {
+  if (this->TransferFunction == NULL)
+    {
+    return true;
+    }
+
+  this->GetBoundingRect(painter);
+
+  if (this->DrawBorder)
+    {
+    // Draw a box around the legend.
+    painter->ApplyPen(this->Pen.GetPointer());
+    painter->ApplyBrush(this->Brush.GetPointer());
+    painter->DrawRect(this->Rect.GetX(), this->Rect.GetY(),
+                      this->Rect.GetWidth(), this->Rect.GetHeight());
+    }
+
   painter->DrawImage(this->Position, this->ImageData);
 
   this->Axis->Paint(painter);
@@ -98,22 +138,41 @@ bool vtkColorLegend::Paint(vtkContext2D* painter)
   return true;
 }
 
+//-----------------------------------------------------------------------------
 void vtkColorLegend::SetTransferFunction(vtkScalarsToColors* transfer)
 {
   this->TransferFunction = transfer;
 }
 
+//-----------------------------------------------------------------------------
 vtkScalarsToColors * vtkColorLegend::GetTransferFunction()
 {
   return this->TransferFunction;
 }
 
 //-----------------------------------------------------------------------------
+void vtkColorLegend::SetPoint(float x, float y)
+{
+  this->Superclass::SetPoint(x, y);
+  this->CustomPositionSet = false;
+}
+
+//-----------------------------------------------------------------------------
+void vtkColorLegend::SetTextureSize(float w, float h)
+{
+  this->Position.SetWidth(w);
+  this->Position.SetHeight(h);
+  this->CustomPositionSet = false;
+  this->Modified();
+}
+
+//-----------------------------------------------------------------------------
 void vtkColorLegend::SetPosition(const vtkRectf& pos)
 {
   this->Position = pos;
-  this->Axis->SetPoint1(vtkVector2f(pos.GetX() + pos.GetWidth(), pos.GetY()));
-  this->Axis->SetPoint2(vtkVector2f(pos.GetX() + pos.GetWidth(), pos.GetY() + pos.GetHeight()));
+  this->SetPoint(pos[0], pos[1]);
+  this->UpdateAxisPosition();
+  this->CustomPositionSet = true;
 }
 
 //-----------------------------------------------------------------------------
@@ -125,19 +184,76 @@ vtkRectf vtkColorLegend::GetPosition()
 //-----------------------------------------------------------------------------
 vtkRectf vtkColorLegend::GetBoundingRect(vtkContext2D *painter)
 {
-  if (this->RectTime > this->GetMTime() && this->RectTime > this->PlotTime &&
+  if (this->CacheBounds && this->RectTime > this->GetMTime() &&
+      this->RectTime > this->PlotTime &&
       this->RectTime > this->Axis->GetMTime())
     {
     return this->Rect;
     }
 
+  if (!this->CustomPositionSet)
+    {
+    // if the Position ivar was not explicitly set, we compute the
+    // location of the lower left point of the legend here.
+    float posX = floor(this->Point[0]);
+    float posY = floor(this->Point[1]);
+    float posW = this->Position.GetWidth();
+    float posH = this->Position.GetHeight();
+
+    if (this->Orientation == vtkColorLegend::VERTICAL)
+      {
+      // For vertical orientation, we need to move our anchor point
+      // further to the left to accommodate the width of the axis.
+      // To do this, we query our axis to get its preliminary bounds.
+      // Even though its position has not yet been set, its width &
+      // height should still be accurate.
+      this->UpdateAxisPosition();
+      this->Axis->Update();
+      vtkRectf axisRect = this->Axis->GetBoundingRect(painter);
+      posX -= axisRect.GetWidth();
+      }
+
+    // Compute bottom left point based on current alignment.
+    if (this->HorizontalAlignment == vtkChartLegend::CENTER)
+      {
+      posX -= posW / 2.0;
+      }
+    else if (this->HorizontalAlignment == vtkChartLegend::RIGHT)
+      {
+      posX -= posW;
+      }
+    if (this->VerticalAlignment == vtkChartLegend::CENTER)
+      {
+      posY -= posH / 2.0;
+      }
+    else if (this->VerticalAlignment == vtkChartLegend::TOP)
+      {
+      posY -= posH;
+      }
+
+    this->Position.SetX(posX);
+    this->Position.SetY(posY);
+    this->UpdateAxisPosition();
+    }
+
   this->Axis->Update();
   vtkRectf axisRect = this->Axis->GetBoundingRect(painter);
 
-  // Default point placement is bottom left.
-  this->Rect = vtkRectf(0.0, 0.0,
-                        this->SymbolWidth + axisRect.GetWidth(),
-                        this->Position.GetHeight() + axisRect.GetHeight());
+  if (this->Orientation == vtkColorLegend::HORIZONTAL)
+    {
+    // "+ 1" so the texture doesn't obscure the border
+    this->Rect = vtkRectf(this->Position.GetX(),
+                          this->Position.GetY() - axisRect.GetHeight() + 1,
+                          this->Position.GetWidth() + 1,
+                          this->Position.GetHeight() + axisRect.GetHeight());
+    }
+  else
+    {
+    this->Rect = vtkRectf(this->Position.GetX(),
+                          this->Position.GetY(),
+                          this->Position.GetWidth() + axisRect.GetWidth(),
+                          this->Position.GetHeight());
+    }
 
   this->RectTime.Modified();
   return this->Rect;
@@ -147,6 +263,11 @@ vtkRectf vtkColorLegend::GetBoundingRect(vtkContext2D *painter)
 //-----------------------------------------------------------------------------
 void vtkColorLegend::ComputeTexture()
 {
+  if (this->TransferFunction == NULL)
+    {
+    return;
+    }
+
   if (!this->ImageData)
     {
     this->ImageData = vtkSmartPointer<vtkImageData>::New();
@@ -163,13 +284,22 @@ void vtkColorLegend::ComputeTexture()
   this->Axis->SetUnscaledRange(bounds[0], bounds[1]);
   //this->Axis->AutoScale();
 
-  // Could depend of the screen resolution
+  // Could depend on the screen resolution
   const int dimension = 256;
   double* values = new double[dimension];
   // Texture 1D
-  this->ImageData->SetExtent(0, 0,
-                             0, dimension-1,
-                             0, 0);
+  if (this->Orientation == vtkColorLegend::VERTICAL)
+    {
+    this->ImageData->SetExtent(0, 0,
+                               0, dimension-1,
+                               0, 0);
+    }
+  else
+    {
+    this->ImageData->SetExtent(0, dimension-1,
+                               0, 0,
+                               0, 0);
+    }
   this->ImageData->AllocateScalars(VTK_UNSIGNED_CHAR, 3);
 
   for (int i = 0; i < dimension; ++i)
@@ -201,3 +331,62 @@ void vtkColorLegend::ScalarsToColorsModified(vtkObject* vtkNotUsed(object),
 {
   this->Modified();
 }
+
+//-----------------------------------------------------------------------------
+void vtkColorLegend::SetOrientation(int orientation)
+{
+  if (orientation < 0 || orientation > 1)
+    {
+    vtkErrorMacro("Error, invalid orientation value supplied: " << orientation)
+    return;
+    }
+  this->Orientation = orientation;
+  if (this->Orientation == vtkColorLegend::HORIZONTAL)
+    {
+    this->Axis->SetPosition(vtkAxis::BOTTOM);
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkColorLegend::SetTitle(const vtkStdString &title)
+{
+  this->Axis->SetTitle(title);
+}
+
+//-----------------------------------------------------------------------------
+vtkStdString vtkColorLegend::GetTitle()
+{
+  return this->Axis->GetTitle();
+}
+
+//-----------------------------------------------------------------------------
+void vtkColorLegend::UpdateAxisPosition()
+{
+  if (this->Orientation == vtkColorLegend::VERTICAL)
+    {
+    this->Axis->SetPoint1(
+      vtkVector2f(this->Position.GetX() + this->Position.GetWidth(),
+                  this->Position.GetY()));
+    this->Axis->SetPoint2(
+      vtkVector2f(this->Position.GetX() + this->Position.GetWidth(),
+                  this->Position.GetY() + this->Position.GetHeight()));
+    }
+  else
+    {
+    this->Axis->SetPoint1(
+      vtkVector2f(this->Position.GetX(), this->Position.GetY()));
+    this->Axis->SetPoint2(
+      vtkVector2f(this->Position.GetX() + this->Position.GetWidth(),
+                  this->Position.GetY()));
+    }
+}
+
+//-----------------------------------------------------------------------------
+bool vtkColorLegend::MouseMoveEvent(const vtkContextMouseEvent &mouse)
+{
+  bool retval = this->Superclass::MouseMoveEvent(mouse);
+  this->Position[0] = this->Point[0];
+  this->Position[1] = this->Point[1];
+  this->UpdateAxisPosition();
+  return retval;
+}
diff --git a/Charts/Core/vtkColorLegend.h b/Charts/Core/vtkColorLegend.h
index 933aa98..1ed0d8f 100644
--- a/Charts/Core/vtkColorLegend.h
+++ b/Charts/Core/vtkColorLegend.h
@@ -27,6 +27,7 @@
 #include "vtkVector.h"       // For vtkRectf
 
 class vtkAxis;
+class vtkContextMouseEvent;
 class vtkImageData;
 class vtkScalarsToColors;
 class vtkCallbackCommand;
@@ -39,6 +40,13 @@ public:
   static vtkColorLegend* New();
 
   // Description:
+  // Enum of legend orientation types
+  enum {
+    VERTICAL = 0,
+    HORIZONTAL
+  };
+
+  // Description:
   // Bounds of the item, by default (0, 1, 0, 1) but it mainly depends on the
   // range of the vtkScalarsToColors function.
   virtual void GetBounds(double bounds[4]);
@@ -55,10 +63,30 @@ public:
   // draws the texture into the shape
   virtual bool Paint(vtkContext2D *painter);
 
+  // Description:
+  // Set/Get the transfer function that is used to draw the scalar bar
+  // within this legend.
   virtual void SetTransferFunction(vtkScalarsToColors* transfer);
   virtual vtkScalarsToColors * GetTransferFunction();
 
+  // Description:
+  // Set the point this legend is anchored to.
+  virtual void SetPoint(float x, float y);
+
+  // Description:
+  // Set the size of the scalar bar drawn by this legend.
+  virtual void SetTextureSize(float w, float h);
+
+  // Description:
+  // Set the origin, width, and height of the scalar bar drawn by this legend.
+  // This method overrides the anchor point, as well as any horizontal and
+  // vertical alignment that has been set for this legend.  If this is a
+  // problem for you, use SetPoint() and SetTextureSize() instead.
   virtual void SetPosition(const vtkRectf& pos);
+
+  // Description:
+  // Returns the origin, width, and height of the scalar bar drawn by this
+  // legend.
   virtual vtkRectf GetPosition();
 
   // Description:
@@ -68,6 +96,28 @@ public:
   // ensure the numbers are correct, Update() should be called first.
   vtkRectf GetBoundingRect(vtkContext2D* painter);
 
+  // Description:
+  // Set/get the orientation of the legend.
+  // Valid orientations are VERTICAL (default) and HORIZONTAL.
+  virtual void SetOrientation(int orientation);
+  vtkGetMacro(Orientation, int);
+
+  // Description:
+  // Get/set the title text of the legend.
+  virtual void SetTitle(const vtkStdString &title);
+  virtual vtkStdString GetTitle();
+
+  // Description:
+  // Toggle whether or not a border should be drawn around this legend.
+  // The default behavior is to not draw a border.
+  vtkSetMacro(DrawBorder, bool);
+  vtkGetMacro(DrawBorder, bool);
+  vtkBooleanMacro(DrawBorder, bool);
+
+  // Description:
+  // Mouse move event.
+  virtual bool MouseMoveEvent(const vtkContextMouseEvent &mouse);
+
 protected:
   vtkColorLegend();
   virtual ~vtkColorLegend();
@@ -85,12 +135,19 @@ protected:
   static void OnScalarsToColorsModified(vtkObject* caller, unsigned long eid,
                                         void *clientdata, void* calldata);
 
+  // Description:
+  // Moves the axis whenever the position of this legend changes.
+  void UpdateAxisPosition();
+
   vtkScalarsToColors*                 TransferFunction;
   vtkSmartPointer<vtkImageData>       ImageData;
   vtkSmartPointer<vtkAxis>            Axis;
   vtkSmartPointer<vtkCallbackCommand> Callback;
   bool                                Interpolate;
+  bool                                CustomPositionSet;
+  bool                                DrawBorder;
   vtkRectf                            Position;
+  int                                 Orientation;
 
 private:
   vtkColorLegend(const vtkColorLegend &); // Not implemented.
diff --git a/Charts/Core/vtkColorTransferControlPointsItem.cxx b/Charts/Core/vtkColorTransferControlPointsItem.cxx
index f804d40..dbc0b9b 100644
--- a/Charts/Core/vtkColorTransferControlPointsItem.cxx
+++ b/Charts/Core/vtkColorTransferControlPointsItem.cxx
@@ -46,6 +46,7 @@ vtkColorTransferControlPointsItem::~vtkColorTransferControlPointsItem()
 {
   if (this->ColorTransferFunction)
     {
+    this->ColorTransferFunction->RemoveObserver(this->Callback);
     this->ColorTransferFunction->Delete();
     this->ColorTransferFunction = 0;
     }
@@ -134,7 +135,8 @@ void vtkColorTransferControlPointsItem::GetControlPoint(vtkIdType index, double*
   double xrgbms[6];
   vtkColorTransferFunction* thisTF = const_cast<vtkColorTransferFunction*>(
     this->ColorTransferFunction);
-  if(thisTF)
+
+  if (thisTF)
     {
     thisTF->GetNodeValue(index, xrgbms);
     pos[0] = xrgbms[0];
@@ -156,7 +158,9 @@ void vtkColorTransferControlPointsItem::SetControlPoint(vtkIdType index, double*
     xrgbms[0] = newPos[0];
     xrgbms[4] = newPos[2];
     xrgbms[5] = newPos[3];
+    this->StartChanges();
     this->ColorTransferFunction->SetNodeValue(index, xrgbms);
+    this->EndChanges();
     }
 }
 
@@ -167,6 +171,9 @@ void vtkColorTransferControlPointsItem::EditPoint(float tX, float tY)
     {
     return;
     }
+
+  this->StartChanges();
+
   double xrgbms[6];
   this->ColorTransferFunction->GetNodeValue(this->CurrentPoint, xrgbms);
   xrgbms[4] += tX;
@@ -179,6 +186,8 @@ void vtkColorTransferControlPointsItem::EditPoint(float tX, float tY)
     xrgbms[5] += tY;
     this->ColorTransferFunction->SetNodeValue(this->CurrentPoint - 1, xrgbms);
     }
+
+  this->EndChanges();
 }
 
 //-----------------------------------------------------------------------------
@@ -188,11 +197,17 @@ vtkIdType vtkColorTransferControlPointsItem::AddPoint(double* newPos)
     {
     return -1;
     }
+
+  this->StartChanges();
+
+  double posX = newPos[0];
   double rgb[3] = {0., 0., 0.};
-  this->ColorTransferFunction->GetColor(newPos[0], rgb);
+  this->ColorTransferFunction->GetColor(posX, rgb);
   vtkIdType addedPoint =
-    this->ColorTransferFunction->AddRGBPoint(newPos[0], rgb[0], rgb[1], rgb[2]);
+    this->ColorTransferFunction->AddRGBPoint(posX, rgb[0], rgb[1], rgb[2]);
   this->vtkControlPointsItem::AddPointId(addedPoint);
+
+  this->EndChanges();
   return addedPoint;
 }
 
@@ -204,6 +219,9 @@ vtkIdType vtkColorTransferControlPointsItem::RemovePoint(double* currentPoint)
     {
     return -1;
     }
+
+  this->StartChanges();
+
 #ifndef NDEBUG
   vtkIdType expectedPoint =
 #endif
@@ -211,5 +229,31 @@ vtkIdType vtkColorTransferControlPointsItem::RemovePoint(double* currentPoint)
   vtkIdType removedPoint =
     this->ColorTransferFunction->RemovePoint(currentPoint[0]);
   assert(removedPoint == expectedPoint);
+
+  this->EndChanges();
   return removedPoint;
 }
+
+//-----------------------------------------------------------------------------
+void vtkColorTransferControlPointsItem::ComputeBounds(double* bounds)
+{
+  if (this->ColorTransferFunction)
+    {
+    const double* range = this->ColorTransferFunction->GetRange();
+    bounds[0] = range[0];
+    bounds[1] = range[1];
+    bounds[2] = 0.5;
+    bounds[3] = 0.5;
+    }
+  else
+    {
+    this->Superclass::ComputeBounds(bounds);
+    }
+}
+
+//-----------------------------------------------------------------------------
+bool vtkColorTransferControlPointsItem::UsingLogScale()
+{
+  return (this->ColorTransferFunction?
+    (this->ColorTransferFunction->UsingLogScale() != 0) : false);
+}
diff --git a/Charts/Core/vtkColorTransferControlPointsItem.h b/Charts/Core/vtkColorTransferControlPointsItem.h
index 9b0c2c7..bc796ff 100644
--- a/Charts/Core/vtkColorTransferControlPointsItem.h
+++ b/Charts/Core/vtkColorTransferControlPointsItem.h
@@ -86,6 +86,12 @@ protected:
   vtkColorTransferControlPointsItem();
   virtual ~vtkColorTransferControlPointsItem();
 
+  // Description:
+  // Returns true if control points are to be rendered in log-space. This is
+  // true when vtkScalarsToColors is using log-scale, for example. Default
+  // implementation always return false.
+  virtual bool UsingLogScale();
+
   virtual void emitEvent(unsigned long event, void* params);
 
   virtual unsigned long int GetControlPointsMTime();
@@ -93,6 +99,11 @@ protected:
   virtual void DrawPoint(vtkContext2D* painter, vtkIdType index);
   virtual void EditPoint(float tX, float tY);
 
+  // Description:
+  // Compute the bounds for this item. Overridden to use the
+  // vtkColorTransferFunction range.
+  virtual void ComputeBounds(double* bounds);
+
   vtkColorTransferFunction* ColorTransferFunction;
 
   bool ColorFill;
diff --git a/Charts/Core/vtkColorTransferFunctionItem.cxx b/Charts/Core/vtkColorTransferFunctionItem.cxx
index 47a2be5..1b2d427 100644
--- a/Charts/Core/vtkColorTransferFunctionItem.cxx
+++ b/Charts/Core/vtkColorTransferFunctionItem.cxx
@@ -12,7 +12,6 @@
      PURPOSE.  See the above copyright notice for more information.
 
 =========================================================================*/
-
 #include "vtkBrush.h"
 #include "vtkCallbackCommand.h"
 #include "vtkContext2D.h"
@@ -25,6 +24,7 @@
 #include "vtkPoints2D.h"
 
 #include <cassert>
+#include <math.h>
 
 //-----------------------------------------------------------------------------
 vtkStandardNewMacro(vtkColorTransferFunctionItem);
@@ -40,6 +40,7 @@ vtkColorTransferFunctionItem::~vtkColorTransferFunctionItem()
 {
   if (this->ColorTransferFunction)
     {
+    this->ColorTransferFunction->RemoveObserver(this->Callback);
     this->ColorTransferFunction->Delete();
     this->ColorTransferFunction = 0;
     }
@@ -115,10 +116,22 @@ void vtkColorTransferFunctionItem::ComputeTexture()
                            0, 0,
                            0, 0);
   this->Texture->AllocateScalars(VTK_UNSIGNED_CHAR, 4);
-
+  bool isLogTable = this->UsingLogScale();
+  double logBoundsMin = bounds[0] > 0.0 ? log10(bounds[0]) : 0.0;
+  double logBoundsDelta = (bounds[0] > 0.0 && bounds[1] > 0.0)?
+    (log10(bounds[1])-log10(bounds[0])) : 0.0;
   for (int i = 0; i < dimension; ++i)
     {
-    values[i] = bounds[0] + i * (bounds[1] - bounds[0]) / (dimension - 1);
+    if (isLogTable)
+      {
+      double normVal = i/(dimension-1.0);
+      double lval = logBoundsMin + normVal*logBoundsDelta;
+      values[i] = pow(10.0, lval);
+      }
+    else
+      {
+      values[i] = bounds[0] + i * (bounds[1] - bounds[0]) / (dimension - 1);
+      }
     }
   unsigned char* ptr =
     reinterpret_cast<unsigned char*>(this->Texture->GetScalarPointer(0,0,0));
@@ -135,3 +148,10 @@ void vtkColorTransferFunctionItem::ComputeTexture()
   delete [] values;
   return;
 }
+
+//-----------------------------------------------------------------------------
+bool vtkColorTransferFunctionItem::UsingLogScale()
+{
+  return this->ColorTransferFunction?
+    (this->ColorTransferFunction->UsingLogScale() != 0) : false;
+}
diff --git a/Charts/Core/vtkColorTransferFunctionItem.h b/Charts/Core/vtkColorTransferFunctionItem.h
index 2b19046..a86c0bb 100644
--- a/Charts/Core/vtkColorTransferFunctionItem.h
+++ b/Charts/Core/vtkColorTransferFunctionItem.h
@@ -40,6 +40,11 @@ protected:
   virtual ~vtkColorTransferFunctionItem();
 
   // Description:
+  // Returns true if we are rendering in log space.
+  virtual bool UsingLogScale();
+
+
+  // Description:
   // Reimplemented to return the range of the lookup table
   virtual void ComputeBounds(double bounds[4]);
 
diff --git a/Charts/Core/vtkCompositeControlPointsItem.cxx b/Charts/Core/vtkCompositeControlPointsItem.cxx
index 280f518..29349ac 100644
--- a/Charts/Core/vtkCompositeControlPointsItem.cxx
+++ b/Charts/Core/vtkCompositeControlPointsItem.cxx
@@ -51,6 +51,7 @@ vtkCompositeControlPointsItem::~vtkCompositeControlPointsItem()
 {
   if (this->OpacityFunction)
     {
+    this->OpacityFunction->RemoveObserver(this->Callback);
     this->OpacityFunction->Delete();
     this->OpacityFunction = 0;
     }
@@ -154,6 +155,14 @@ void vtkCompositeControlPointsItem::SetColorTransferFunction(vtkColorTransferFun
 }
 
 //-----------------------------------------------------------------------------
+bool vtkCompositeControlPointsItem::UsingLogScale()
+{
+  return (this->PointsFunction != OpacityPointsFunction &&
+    this->ColorTransferFunction &&
+    this->ColorTransferFunction->UsingLogScale());
+}
+
+//-----------------------------------------------------------------------------
 void vtkCompositeControlPointsItem::DrawPoint(vtkContext2D* painter, vtkIdType index)
 {
   if (this->PointsFunction == ColorPointsFunction ||
@@ -204,7 +213,9 @@ void vtkCompositeControlPointsItem::SetControlPoint(vtkIdType index, double* new
       (this->PointsFunction == OpacityPointsFunction ||
        this->PointsFunction == ColorAndOpacityPointsFunction))
     {
+    this->StartChanges();
     this->OpacityFunction->SetNodeValue(index, newPos);
+    this->EndChanges();
     }
 }
 
@@ -238,6 +249,7 @@ void vtkCompositeControlPointsItem::EditPoint(float tX, float tY)
       (this->PointsFunction == ColorPointsFunction ||
        this->PointsFunction == ColorAndOpacityPointsFunction))
     {
+    this->StartChanges();
     double xvms[4];
     this->OpacityFunction->GetNodeValue(this->CurrentPoint, xvms);
     xvms[2] += tX;
@@ -251,6 +263,7 @@ void vtkCompositeControlPointsItem::EditPoint(float tX, float tY)
       xvms[3] += tY;
       this->OpacityFunction->SetNodeValue(this->CurrentPoint - 1, xvms);
       }
+    this->EndChanges();
     }
 }
 
diff --git a/Charts/Core/vtkCompositeControlPointsItem.h b/Charts/Core/vtkCompositeControlPointsItem.h
index c553146..16ce9c0 100644
--- a/Charts/Core/vtkCompositeControlPointsItem.h
+++ b/Charts/Core/vtkCompositeControlPointsItem.h
@@ -103,6 +103,11 @@ protected:
   vtkCompositeControlPointsItem();
   virtual ~vtkCompositeControlPointsItem();
 
+  // Description:
+  // Returns true if control points are to be rendered in log-space. This is
+  // true when vtkScalarsToColors is using log-scale, for example.
+  virtual bool UsingLogScale();
+
   virtual void emitEvent(unsigned long event, void* params);
 
   virtual unsigned long int GetControlPointsMTime();
diff --git a/Charts/Core/vtkCompositeTransferFunctionItem.cxx b/Charts/Core/vtkCompositeTransferFunctionItem.cxx
index fa94ec9..e96b936 100644
--- a/Charts/Core/vtkCompositeTransferFunctionItem.cxx
+++ b/Charts/Core/vtkCompositeTransferFunctionItem.cxx
@@ -43,6 +43,7 @@ vtkCompositeTransferFunctionItem::~vtkCompositeTransferFunctionItem()
 {
   if (this->OpacityFunction)
     {
+    this->OpacityFunction->RemoveObserver(this->Callback);
     this->OpacityFunction->Delete();
     this->OpacityFunction = 0;
     }
diff --git a/Charts/Core/vtkCompositeTransferFunctionItem.h b/Charts/Core/vtkCompositeTransferFunctionItem.h
index abd1c12..a8a7132 100644
--- a/Charts/Core/vtkCompositeTransferFunctionItem.h
+++ b/Charts/Core/vtkCompositeTransferFunctionItem.h
@@ -38,6 +38,12 @@ protected:
   virtual ~vtkCompositeTransferFunctionItem();
 
   // Description:
+  // Returns true if we are rendering in log space.
+  // Since vtkPiecewiseFunction doesn't support log, we show this transfer
+  // function in non-log space always.
+  virtual bool UsingLogScale() { return false; }
+
+  // Description:
   // Reimplemented to return the range of the piecewise function
   virtual void ComputeBounds(double bounds[4]);
 
diff --git a/Charts/Core/vtkContextPolygon.cxx b/Charts/Core/vtkContextPolygon.cxx
index 65c0a43..e6150f5 100644
--- a/Charts/Core/vtkContextPolygon.cxx
+++ b/Charts/Core/vtkContextPolygon.cxx
@@ -15,6 +15,7 @@
 
 #include "vtkContextPolygon.h"
 
+#include <algorithm>
 #include <vector>
 
 #include "vtkTransform2D.h"
diff --git a/Charts/Core/vtkControlPointsItem.cxx b/Charts/Core/vtkControlPointsItem.cxx
index d8337e0..47c820f 100644
--- a/Charts/Core/vtkControlPointsItem.cxx
+++ b/Charts/Core/vtkControlPointsItem.cxx
@@ -13,6 +13,7 @@
 
 =========================================================================*/
 
+#include "vtkAxis.h"
 #include "vtkBrush.h"
 #include "vtkCallbackCommand.h"
 #include "vtkContext2D.h"
@@ -56,6 +57,7 @@ vtkControlPointsItem::vtkControlPointsItem()
 
   this->BlockUpdates = 0;
   this->StartedInteractions = 0;
+  this->StartedChanges = 0;
 
   this->Callback = vtkCallbackCommand::New();
   this->Callback->SetClientData(this);
@@ -84,11 +86,15 @@ vtkControlPointsItem::vtkControlPointsItem()
   this->EndPointsXMovable = true;
   this->EndPointsYMovable = true;
   this->EndPointsRemovable = true;
+  this->ShowLabels = false;
+  this->LabelFormat = NULL;
+  this->SetLabelFormat("%.3f, %.3f");
 }
 
 //-----------------------------------------------------------------------------
 vtkControlPointsItem::~vtkControlPointsItem()
 {
+  this->SetLabelFormat(NULL);
   if (this->Callback)
     {
     this->Callback->Delete();
@@ -119,6 +125,7 @@ void vtkControlPointsItem::PrintSelf(ostream &os, vtkIndent indent)
   os << indent << "EndPointsXMovable: " << this->EndPointsXMovable << endl;
   os << indent << "EndPointsYMovable: " << this->EndPointsYMovable << endl;
   os << indent << "EndPointsRemovable: " << this->EndPointsRemovable << endl;
+  os << indent << "ShowLabels: " << this->ShowLabels << endl;
 }
 
 //-----------------------------------------------------------------------------
@@ -225,13 +232,24 @@ bool vtkControlPointsItem::Paint(vtkContext2D* painter)
 //-----------------------------------------------------------------------------
 void vtkControlPointsItem::StartChanges()
 {
-  this->emitEvent(vtkCommand::StartEvent);
+  ++this->StartedChanges;
+  if (this->StartedChanges == 1)
+    {
+    this->InvokeEvent(vtkCommand::StartEvent);
+    this->emitEvent(vtkCommand::StartEvent);
+    }
 }
 
 //-----------------------------------------------------------------------------
 void vtkControlPointsItem::EndChanges()
 {
-  this->emitEvent(vtkCommand::EndEvent);
+  --this->StartedChanges;
+  assert(this->StartedChanges >=0);
+  if (this->StartedChanges == 0)
+    {
+    this->emitEvent(vtkCommand::EndEvent);
+    this->InvokeEvent(vtkCommand::EndEvent);
+    }
 }
 
 //-----------------------------------------------------------------------------
@@ -321,8 +339,10 @@ void vtkControlPointsItem::ComputePoints()
     this->Selection = vtkIdTypeArray::New();
     for (vtkIdType i = 0; i < selectedPointCount; ++i)
       {
-      assert(oldSelection->GetValue(i) < this->GetNumberOfPoints());
-      this->SelectPoint(oldSelection->GetValue(i));
+      if (oldSelection->GetValue(i) < this->GetNumberOfPoints())
+        {
+        this->SelectPoint(oldSelection->GetValue(i));
+        }
       }
     oldSelection->Delete();
     }
@@ -335,11 +355,48 @@ void vtkControlPointsItem::ComputePoints()
 }
 
 //-----------------------------------------------------------------------------
+void vtkControlPointsItem::TransformScreenToData(const vtkVector2f& in, vtkVector2f& out)
+{
+  out = in;
+  if (this->UsingLogScale())
+    {
+    // using log scale.
+    double bounds[4];
+    this->ComputeBounds(bounds);
+
+    double posX = in.GetX();
+    double normVal = (posX - bounds[0])/(bounds[1] - bounds[0]);
+    double lval = log10(bounds[0]) + normVal*(log10(bounds[1]) - log10(bounds[0]));
+    posX = pow(10.0, lval);
+    out.SetX(posX);
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkControlPointsItem::TransformDataToScreen(const vtkVector2f& in, vtkVector2f& out)
+{
+  out = in;
+  if (this->UsingLogScale())
+    {
+    double bounds[4];
+    this->ComputeBounds(bounds);
+
+    double posX = in.GetX();
+    double lnormVal = (log10(posX) - log10(bounds[0])) /
+                      (log10(bounds[1]) - log10(bounds[0]));
+    posX = bounds[0] + lnormVal * (bounds[1] - bounds[0]);
+    out.SetX(posX);
+    }
+}
+
+//-----------------------------------------------------------------------------
 bool vtkControlPointsItem::Hit(const vtkContextMouseEvent &mouse)
 {
+  vtkVector2f vpos = mouse.GetPos();
+  this->TransformScreenToData(vpos, vpos);
   double pos[2];
-  pos[0] = mouse.GetPos()[0];
-  pos[1] = mouse.GetPos()[1];
+  pos[0] = vpos.GetX();
+  pos[1] = vpos.GetY();
   double bounds[4];
   this->GetBounds(bounds);
   bool clamped = this->ClampPos(pos, bounds);
@@ -349,8 +406,8 @@ bool vtkControlPointsItem::Hit(const vtkContextMouseEvent &mouse)
     }
   // maybe the cursor is over the first or last point (which could be outside
   // the bounds because of the screen point size).
-  pos[0] = mouse.GetPos()[0];
-  pos[1] = mouse.GetPos()[1];
+  pos[0] = vpos.GetX();
+  pos[1] = vpos.GetY();
   for (int i = 0; i < this->GetNumberOfPoints(); ++i)
     {
     if (this->IsOverPoint(pos, i))
@@ -364,6 +421,11 @@ bool vtkControlPointsItem::Hit(const vtkContextMouseEvent &mouse)
 //-----------------------------------------------------------------------------
 bool vtkControlPointsItem::ClampPos(double pos[2], double bounds[4])
 {
+  if (bounds[1] < bounds[0] || bounds[3] < bounds[2])
+    {
+    // bounds are not valid. Don't clamp.
+    return false;
+    }
   bool clamped = false;
   if (pos[0] < bounds[0])
     {
@@ -437,6 +499,11 @@ void vtkControlPointsItem::DrawPoint(vtkContext2D* painter, vtkIdType index)
   double point[4];
   this->GetControlPoint(index, point);
 
+  vtkVector2f vpoint(point[0], point[1]);
+  this->TransformDataToScreen(vpoint, vpoint);
+  point[0] = vpoint.GetX();
+  point[1] = vpoint.GetY();
+
   double pointInScene[2];
   vtkTransform2D* sceneTransform = painter->GetTransform();
   sceneTransform->TransformPoints(point, pointInScene, 1);
@@ -508,6 +575,51 @@ void vtkControlPointsItem::DrawPoint(vtkContext2D* painter, vtkIdType index)
     }
 
   painter->GetPen()->SetColor(penColor);
+
+  if (this->ShowLabels && (/*index == 0 ||
+                           index == this->GetNumberOfPoints()-1 || */
+                           this->GetCurrentPoint() == index))
+    {
+    translation->Translate(0, radius+5);
+    painter->SetTransform(translation);
+    vtkStdString label = this->GetControlPointLabel(index);
+
+    vtkVector2f bounds[2];
+    painter->ComputeStringBounds(label, bounds[0].GetData());
+    if (bounds[1].GetX() != 0.0f && bounds[1].GetY() != 0.0f)
+      {
+      float scale[2];
+      float position[2];
+      painter->GetTransform()->GetScale(scale);
+      painter->GetTransform()->GetPosition(position);
+
+      double brushColor[4];
+      painter->GetBrush()->GetColorF(brushColor);
+      painter->GetBrush()->SetColorF(1, 1, 1, 1);
+      painter->GetBrush()->SetOpacityF(0.75);
+      painter->GetPen()->SetOpacity(0);
+      bounds[0] = vtkVector2f(-5/scale[0], -3/scale[1]);
+      bounds[1] = vtkVector2f(bounds[1].GetX()+10/scale[0],
+        bounds[1].GetY()+10/scale[1]);
+
+      // Pull the tooltip back in if it will go off the edge of the screen.
+      float maxX = (this->Scene->GetViewWidth() - position[0])/scale[0];
+      if (bounds[0].GetX() >= maxX - bounds[1].GetX())
+        {
+        bounds[0].SetX(maxX - bounds[1].GetX());
+        }
+      // Pull the tooltip down in if it will go off the edge of the screen.
+      float maxY = (this->Scene->GetViewHeight() - position[1])/scale[1];
+      if (bounds[0].GetY() >= maxY - bounds[1].GetY())
+        {
+        bounds[0].SetY(maxY - bounds[1].GetY());
+        }
+      painter->DrawRect(bounds[0].GetX(), bounds[0].GetY(), bounds[1].GetX(), bounds[1].GetY());
+      painter->DrawString(bounds[0].GetX()+5/scale[0], bounds[0].GetY()+3/scale[1], label);
+      painter->GetBrush()->SetColorF(brushColor);
+      }
+    }
+
   painter->GetPen()->SetOpacity(penOpacity);
   //painter->GetPen()->SetWidth(width);
   painter->GetBrush()->SetOpacity(brushOpacity);
@@ -711,11 +823,16 @@ bool vtkControlPointsItem::IsOverPoint(double* pos, vtkIdType pointId)
 }
 
 //-----------------------------------------------------------------------------
-vtkIdType vtkControlPointsItem::FindPoint(double* pos)
+vtkIdType vtkControlPointsItem::FindPoint(double* _pos)
 {
+  vtkVector2f vpos(_pos[0], _pos[1]);
+  this->TransformDataToScreen(vpos, vpos);
+  double pos[2] = {vpos.GetX(), vpos.GetY()};
+
   double tolerance = 1.3;
   double radius2 = this->ScreenPointRadius * this->ScreenPointRadius
     * tolerance * tolerance;
+
   double screenPos[2];
   this->Transform->TransformPoints(pos, screenPos, 1);
   vtkIdType pointId = -1;
@@ -725,6 +842,11 @@ vtkIdType vtkControlPointsItem::FindPoint(double* pos)
     {
     double point[4];
     this->GetControlPoint(i, point);
+    vtkVector2f vpos1(point[0], point[1]);
+    this->TransformDataToScreen(vpos1, vpos1);
+    point[0] = vpos1.GetX();
+    point[1] = vpos1.GetY();
+
     double screenPoint[2];
     this->Transform->TransformPoints(point, screenPoint, 1);
     double distance2 =
@@ -827,6 +949,8 @@ vtkIdType vtkControlPointsItem::RemovePointId(vtkIdType pointId)
     return pointId;
     }
 
+  this->StartChanges();
+
   assert(pointId != -1);
   // Useless to remove the point here as it will be removed anyway in ComputePoints
   this->DeselectPoint(pointId);
@@ -846,6 +970,8 @@ vtkIdType vtkControlPointsItem::RemovePointId(vtkIdType pointId)
     {
     this->SetCurrentPoint(this->CurrentPoint - 1);
     }
+
+  this->EndChanges();
   return pointId;
 }
 
@@ -868,9 +994,12 @@ bool vtkControlPointsItem::MouseButtonPressEvent(const vtkContextMouseEvent &mou
   this->MouseMoved = false;
   this->PointToToggle = -1;
   this->PointToDelete = -1;
+
+  vtkVector2f vpos = mouse.GetPos();
+  this->TransformScreenToData(vpos, vpos);
   double pos[2];
-  pos[0] = mouse.GetPos()[0];
-  pos[1] = mouse.GetPos()[1];
+  pos[0] = vpos.GetX();
+  pos[1] = vpos.GetY();
   vtkIdType pointUnderMouse = this->FindPoint(pos);
 
   if (mouse.GetButton() == vtkContextMouseEvent::LEFT_BUTTON)
@@ -884,7 +1013,7 @@ bool vtkControlPointsItem::MouseButtonPressEvent(const vtkContextMouseEvent &mou
              && this->Selection->GetNumberOfTuples() <= 1
              && !this->StrokeMode)
       {
-      this->ClampPos(pos, this->GetValidBounds());
+      this->ClampValidPos(pos);
       vtkIdType addedPoint = this->AddPoint(pos);
       this->SetCurrentPoint(addedPoint);
       return true;
@@ -945,13 +1074,16 @@ bool vtkControlPointsItem::MouseDoubleClickEvent(const vtkContextMouseEvent &mou
 //-----------------------------------------------------------------------------
 bool vtkControlPointsItem::MouseMoveEvent(const vtkContextMouseEvent &mouse)
 {
+  vtkVector2f mousePos = mouse.GetPos();
+  this->TransformScreenToData(mousePos, mousePos);
+
   if (mouse.GetButton() == vtkContextMouseEvent::LEFT_BUTTON)
     {
     if (this->StrokeMode)
       {
       this->StartInteractionIfNotStarted();
 
-      this->Stroke(mouse.GetPos());
+      this->Stroke(mousePos);
 
       this->Interaction();
       }
@@ -985,7 +1117,7 @@ bool vtkControlPointsItem::MouseMoveEvent(const vtkContextMouseEvent &mouse)
       }
     else if (this->CurrentPoint != -1)
       {
-      vtkVector2f curPos(mouse.GetPos());
+      vtkVector2f curPos(mousePos);
       if(this->IsEndPointPicked())
         {
         double currentPoint[4] = {0.0, 0.0, 0.0, 0.0};
@@ -1017,8 +1149,8 @@ bool vtkControlPointsItem::MouseMoveEvent(const vtkContextMouseEvent &mouse)
       return false;
       }
     double pos[2];
-    pos[0] = mouse.GetPos()[0];
-    pos[1] = mouse.GetPos()[1];
+    pos[0] = mousePos[0];
+    pos[1] = mousePos[1];
     vtkIdType pointUnderCursor = this->FindPoint(pos);
     if ((pointUnderCursor == this->PointToToggle) != this->PointAboutToBeToggled)
       {
@@ -1035,8 +1167,8 @@ bool vtkControlPointsItem::MouseMoveEvent(const vtkContextMouseEvent &mouse)
       return false;
       }
     double pos[2];
-    pos[0] = mouse.GetPos()[0];
-    pos[1] = mouse.GetPos()[1];
+    pos[0] = mousePos[0];
+    pos[1] = mousePos[1];
     vtkIdType pointUnderCursor = this->FindPoint(pos);
     if ((pointUnderCursor == this->PointToDelete) != this->PointAboutToBeDeleted)
       {
@@ -1051,6 +1183,10 @@ bool vtkControlPointsItem::MouseMoveEvent(const vtkContextMouseEvent &mouse)
     {
     return false;
     }
+  if (mouse.GetButton() == vtkContextMouseEvent::NO_BUTTON)
+    {
+    return false;
+    }
   return true;
 }
 
@@ -1132,6 +1268,9 @@ vtkIdType vtkControlPointsItem::SetPointPos(vtkIdType point, const vtkVector2f&
   this->GetControlPoint(point, currentPoint);
   currentPoint[0] = boundedPos[0];
   currentPoint[1] = boundedPos[1];
+
+  // SetControlPoint will call StartChanges/EndChanges correctly, so we don't
+  // need to call it here.
   this->SetControlPoint(point, currentPoint);
   return point;
 }
@@ -1677,3 +1816,19 @@ bool vtkControlPointsItem::IsPointRemovable(vtkIdType pointId)
     }
   return true;
 }
+
+//-----------------------------------------------------------------------------
+vtkStdString vtkControlPointsItem::GetControlPointLabel(vtkIdType pointId)
+{
+  vtkStdString result;
+  if (this->LabelFormat)
+    {
+    char *buffer = new char[1024];
+    double point[4];
+    this->GetControlPoint(pointId, point);
+    sprintf(buffer, this->LabelFormat, point[0], point[1], point[2], point[3]);
+    result = buffer;
+    delete []buffer;
+    }
+  return result;
+}
diff --git a/Charts/Core/vtkControlPointsItem.h b/Charts/Core/vtkControlPointsItem.h
index c2e4d13..ca87dbf 100644
--- a/Charts/Core/vtkControlPointsItem.h
+++ b/Charts/Core/vtkControlPointsItem.h
@@ -41,9 +41,15 @@ public:
   vtkTypeMacro(vtkControlPointsItem, vtkPlot);
   virtual void PrintSelf(ostream &os, vtkIndent indent);
 
+  // Events fires by this class (and subclasses).
+  // \li CurrentPointChangedEvent is fired when the current point index is changed.
+  // \li CurrentPointEditEvent is fired to request the application to show UI to
+  // edit the current point.
+  // \li vtkCommand::StartEvent and vtkCommand::EndEvent is fired
+  // to mark groups of changes to control points.
   enum {
     CurrentPointChangedEvent = vtkCommand::UserEvent,
-    CurrentPointEditEvent
+    CurrentPointEditEvent,
   };
 
   // Description:
@@ -176,6 +182,17 @@ public:
   vtkGetMacro(EndPointsRemovable, bool);
 
   // Description:
+  // When set to true, labels are shown on the current control point and the end
+  // points. Default is false.
+  vtkSetMacro(ShowLabels, bool);
+  vtkGetMacro(ShowLabels, bool);
+
+  // Description:
+  // Get/Set the label format. Default is "%.4f, %.4f".
+  vtkSetStringMacro(LabelFormat);
+  vtkGetStringMacro(LabelFormat);
+
+  // Description:
   // Add a point to the function. Returns the index of the point (0 based),
   // or -1 on error.
   // Subclasses should reimplement this function to do the actual work.
@@ -302,6 +319,12 @@ protected:
   virtual bool Hit(const vtkContextMouseEvent &mouse);
 
   // Description:
+  // Transform the mouse event in the control-points space. This is needed when
+  // ColorTransferFunction is using log-scale.
+  virtual void TransformScreenToData(const vtkVector2f& in, vtkVector2f& out);
+  virtual void TransformDataToScreen(const vtkVector2f& in, vtkVector2f& out);
+
+  // Description:
   // Clamp the given 2D pos into the bounds of the function.
   // Return true if the pos has been clamped, false otherwise.
   bool ClampPos(double pos[2], double bounds[4]);
@@ -328,6 +351,10 @@ protected:
   // Mouse button release event.
   virtual bool MouseButtonReleaseEvent(const vtkContextMouseEvent &mouse);
 
+  // Description:
+  // Generate label for a control point.
+  virtual vtkStdString GetControlPointLabel(vtkIdType index);
+
   void AddPointId(vtkIdType addedPointId);
 
   // Description:
@@ -339,11 +366,25 @@ protected:
   // Return true if the point is removable
   bool IsPointRemovable(vtkIdType pointId);
 
+  // Description:
+  // Compute the bounds for this item. Typically, the bounds should be aligned
+  // to the range of the vtkScalarsToColors or vtkPiecewiseFunction that is
+  // being controlled by the subclasses.
+  // Default implementation uses the range of the control points themselves.
+  virtual void ComputeBounds(double* bounds);
+
+  // Description:
+  // Returns true if control points are to be rendered in log-space. This is
+  // true when vtkScalarsToColors is using log-scale, for example. Default
+  // implementation always return false.
+  virtual bool UsingLogScale() { return false; }
+
   vtkCallbackCommand* Callback;
   vtkPen*             SelectedPointPen;
   vtkBrush*           SelectedPointBrush;
   int                 BlockUpdates;
   int                 StartedInteractions;
+  int                 StartedChanges;
   vtkIdType           CurrentPoint;
 
   double              Bounds[4];
@@ -365,12 +406,13 @@ protected:
   bool                EndPointsXMovable;
   bool                EndPointsYMovable;
   bool                EndPointsRemovable;
+  bool                ShowLabels;
+  char*               LabelFormat;
 private:
   vtkControlPointsItem(const vtkControlPointsItem &); // Not implemented.
   void operator=(const vtkControlPointsItem &);   // Not implemented.
 
   void      ComputeBounds();
-  void      ComputeBounds(double* bounds);
 
   vtkIdType RemovePointId(vtkIdType removedPointId);
 };
diff --git a/Charts/Core/vtkPiecewiseControlPointsItem.cxx b/Charts/Core/vtkPiecewiseControlPointsItem.cxx
index bebd7a0..ae81d15 100644
--- a/Charts/Core/vtkPiecewiseControlPointsItem.cxx
+++ b/Charts/Core/vtkPiecewiseControlPointsItem.cxx
@@ -45,6 +45,7 @@ vtkPiecewiseControlPointsItem::~vtkPiecewiseControlPointsItem()
 {
   if (this->PiecewiseFunction)
     {
+    this->PiecewiseFunction->RemoveObserver(this->Callback);
     this->PiecewiseFunction->Delete();
     this->PiecewiseFunction = 0;
     }
@@ -129,7 +130,9 @@ void vtkPiecewiseControlPointsItem::SetControlPoint(vtkIdType index, double* new
   if (newPos[0] != oldPos[0] || newPos[1] != oldPos[1] ||
       newPos[2] != oldPos[2])
     {
+    this->StartChanges();
     this->PiecewiseFunction->SetNodeValue(index, newPos);
+    this->EndChanges();
     }
 }
 
@@ -140,6 +143,9 @@ void vtkPiecewiseControlPointsItem::EditPoint(float tX, float tY)
     {
     return;
     }
+
+  this->StartChanges();
+
   double xvms[4];
   this->PiecewiseFunction->GetNodeValue(this->CurrentPoint, xvms);
   xvms[2] += tX;
@@ -152,6 +158,8 @@ void vtkPiecewiseControlPointsItem::EditPoint(float tX, float tY)
     xvms[3] += tY;
     this->PiecewiseFunction->SetNodeValue(this->CurrentPoint - 1, xvms);
     }
+
+  this->EndChanges();
 }
 
 //-----------------------------------------------------------------------------
@@ -161,8 +169,12 @@ vtkIdType vtkPiecewiseControlPointsItem::AddPoint(double* newPos)
     {
     return -1;
     }
+
+  this->StartChanges();
   vtkIdType addedPoint = this->PiecewiseFunction->AddPoint(newPos[0], newPos[1]);
   this->Superclass::AddPointId(addedPoint);
+  this->EndChanges();
+
   return addedPoint;
 }
 
@@ -173,6 +185,13 @@ vtkIdType vtkPiecewiseControlPointsItem::RemovePoint(double* currentPoint)
     {
     return -1;
     }
+  if (!this->IsPointRemovable(this->GetControlPointId(currentPoint)))
+    {
+    return -1;
+    }
+
+  this->StartChanges();
+
 #ifndef NDEBUG
   vtkIdType expectedPoint =
 #endif
@@ -180,5 +199,8 @@ vtkIdType vtkPiecewiseControlPointsItem::RemovePoint(double* currentPoint)
   vtkIdType removedPoint =
     this->PiecewiseFunction->RemovePoint(currentPoint[0]);
   assert(removedPoint == expectedPoint);
+
+  this->EndChanges();
+
   return removedPoint;
 }
diff --git a/Charts/Core/vtkPiecewiseFunctionItem.cxx b/Charts/Core/vtkPiecewiseFunctionItem.cxx
index 9d50ec5..b2696e0 100644
--- a/Charts/Core/vtkPiecewiseFunctionItem.cxx
+++ b/Charts/Core/vtkPiecewiseFunctionItem.cxx
@@ -42,6 +42,7 @@ vtkPiecewiseFunctionItem::~vtkPiecewiseFunctionItem()
 {
   if (this->PiecewiseFunction)
     {
+    this->PiecewiseFunction->RemoveObserver(this->Callback);
     this->PiecewiseFunction->Delete();
     this->PiecewiseFunction = 0;
     }
diff --git a/Charts/Core/vtkPiecewisePointHandleItem.cxx b/Charts/Core/vtkPiecewisePointHandleItem.cxx
index 4d71c38..cd99186 100644
--- a/Charts/Core/vtkPiecewisePointHandleItem.cxx
+++ b/Charts/Core/vtkPiecewisePointHandleItem.cxx
@@ -29,6 +29,8 @@
 #include "vtkTransform2D.h"
 #include "vtkNew.h"
 
+#include <algorithm>
+
 enum enumPointHandleType
   {
   enSharpNess=0,
diff --git a/Charts/Core/vtkPlot.cxx b/Charts/Core/vtkPlot.cxx
index e6722e4..fc56789 100644
--- a/Charts/Core/vtkPlot.cxx
+++ b/Charts/Core/vtkPlot.cxx
@@ -32,7 +32,7 @@ vtkCxxSetObjectMacro(vtkPlot, XAxis, vtkAxis);
 vtkCxxSetObjectMacro(vtkPlot, YAxis, vtkAxis);
 
 //-----------------------------------------------------------------------------
-vtkPlot::vtkPlot()
+vtkPlot::vtkPlot() : ShiftScale(0.0, 0.0, 1.0, 1.0)
 {
   this->Pen = vtkSmartPointer<vtkPen>::New();
   this->Pen->SetWidth(2.0);
@@ -76,7 +76,7 @@ vtkIdType vtkPlot::GetNearestPoint(const vtkVector2f&, const vtkVector2f&,
 }
 
 //-----------------------------------------------------------------------------
-vtkStdString vtkPlot::GetTooltipLabel(const vtkVector2f &plotPos,
+vtkStdString vtkPlot::GetTooltipLabel(const vtkVector2d &plotPos,
                                       vtkIdType seriesIndex,
                                       vtkIdType)
 {
@@ -164,12 +164,20 @@ vtkStdString vtkPlot::GetNumber(double position, vtkAxis *axis)
 //-----------------------------------------------------------------------------
 bool vtkPlot::SelectPoints(const vtkVector2f&, const vtkVector2f&)
 {
+  if (this->Selection)
+    {
+    this->Selection->SetNumberOfTuples(0);
+    }
   return false;
 }
 
 //-----------------------------------------------------------------------------
 bool vtkPlot::SelectPointsInPolygon(const vtkContextPolygon &)
 {
+  if (this->Selection)
+    {
+    this->Selection->SetNumberOfTuples(0);
+    }
   return false;
 }
 
@@ -449,6 +457,22 @@ void vtkPlot::SetInputArray(int index, const vtkStdString &name)
 }
 
 //-----------------------------------------------------------------------------
+void vtkPlot::SetShiftScale(const vtkRectd &shiftScale)
+{
+  if (shiftScale != this->ShiftScale)
+    {
+    this->Modified();
+    this->ShiftScale = shiftScale;
+    }
+}
+
+//-----------------------------------------------------------------------------
+vtkRectd vtkPlot::GetShiftScale()
+{
+  return this->ShiftScale;
+}
+
+//-----------------------------------------------------------------------------
 void vtkPlot::SetProperty(const vtkStdString&, const vtkVariant&)
 {
 }
diff --git a/Charts/Core/vtkPlot.h b/Charts/Core/vtkPlot.h
index b09f284..b0b9a60 100644
--- a/Charts/Core/vtkPlot.h
+++ b/Charts/Core/vtkPlot.h
@@ -29,6 +29,7 @@
 #include "vtkStdString.h"     // Needed to hold TooltipLabelFormat ivar
 #include "vtkSmartPointer.h"  // Needed to hold SP ivars
 #include "vtkContextPolygon.h" // For vtkContextPolygon
+#include "vtkRect.h"           // For vtkRectd ivar
 
 class vtkVariant;
 class vtkTable;
@@ -37,8 +38,6 @@ class vtkContextMapper2D;
 class vtkPen;
 class vtkBrush;
 class vtkAxis;
-class vtkVector2f;
-class vtkRectf;
 class vtkStringArray;
 
 class VTKCHARTSCORE_EXPORT vtkPlot : public vtkContextItem
@@ -84,7 +83,7 @@ public:
   // Description:
   // Generate and return the tooltip label string for this plot
   // The segmentIndex parameter is ignored, except for vtkPlotBar
-  virtual vtkStdString GetTooltipLabel(const vtkVector2f &plotPos,
+  virtual vtkStdString GetTooltipLabel(const vtkVector2d &plotPos,
                                        vtkIdType seriesIndex,
                                        vtkIdType segmentIndex);
 
@@ -213,6 +212,14 @@ public:
   virtual void SetYAxis(vtkAxis* axis);
 
   // Description:
+  // Get/set the origin shift and scaling factor used by the plot, this is
+  // normally 0.0 offset and 1.0 scaling, but can be used to render data outside
+  // of the single precision range. The chart that owns the plot should set this
+  // and ensure the appropriate matrix is used when rendering the plot.
+  void SetShiftScale(const vtkRectd &scaling);
+  vtkRectd GetShiftScale();
+
+  // Description:
   // Get the bounds for this plot as (Xmin, Xmax, Ymin, Ymax).
   //
   // See \a GetUnscaledInputBounds for more information.
@@ -321,6 +328,10 @@ protected:
   int TooltipNotation;
   int TooltipPrecision;
 
+  // Description:
+  // The current shift in origin and scaling factor applied to the plot.
+  vtkRectd ShiftScale;
+
 private:
   vtkPlot(const vtkPlot &); // Not implemented.
   void operator=(const vtkPlot &); // Not implemented.
diff --git a/Charts/Core/vtkPlotBag.cxx b/Charts/Core/vtkPlotBag.cxx
new file mode 100644
index 0000000..06c6e80
--- /dev/null
+++ b/Charts/Core/vtkPlotBag.cxx
@@ -0,0 +1,439 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPlotBag.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkBrush.h"
+#include "vtkContext2D.h"
+#include "vtkContextMapper2D.h"
+#include "vtkDataArray.h"
+#include "vtkDoubleArray.h"
+#include "vtkObjectFactory.h"
+#include "vtkPen.h"
+#include "vtkPlotBag.h"
+#include "vtkPoints.h"
+#include "vtkPoints2D.h"
+#include "vtkPointsProjectedHull.h"
+#include "vtkStringArray.h"
+#include "vtkTable.h"
+#include "vtkTimeStamp.h"
+
+#include <algorithm>
+
+//-----------------------------------------------------------------------------
+vtkStandardNewMacro(vtkPlotBag);
+
+//-----------------------------------------------------------------------------
+vtkPlotBag::vtkPlotBag()
+{
+  this->MedianPoints = vtkPoints2D::New();
+  this->Q3Points = vtkPoints2D::New();
+  this->TooltipDefaultLabelFormat = "%l (%x, %y): %z";
+}
+
+//-----------------------------------------------------------------------------
+vtkPlotBag::~vtkPlotBag()
+{
+  if (this->MedianPoints)
+    {
+    this->MedianPoints->Delete();
+    this->MedianPoints = 0;
+    }
+  if (this->Q3Points)
+    {
+    this->Q3Points->Delete();
+    this->Q3Points = 0;
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkPlotBag::Update()
+{
+  if (!this->Visible)
+    {
+    return;
+    }
+
+  // Check if we have an input
+  vtkTable *table = this->Data->GetInput();
+  vtkDataArray *density = vtkDataArray::SafeDownCast(
+    this->Data->GetInputAbstractArrayToProcess(2, this->GetInput()));
+  if (!table || !density)
+    {
+    vtkDebugMacro(<< "Update event called with no input table or density column set.");
+    return;
+    }
+  bool update = (this->Data->GetMTime() > this->BuildTime ||
+    table->GetMTime() > this->BuildTime ||
+    this->MTime > this->BuildTime);
+
+  this->Superclass::Update();
+
+  if (update)
+    {
+    vtkDebugMacro(<< "Updating cached values.");
+    this->UpdateTableCache(density);
+    }
+}
+
+//-----------------------------------------------------------------------------
+class ArraySorter
+{
+public:
+  ArraySorter(vtkDataArray* arr) : Array(arr) {}
+  bool operator()(const vtkIdType& a, const vtkIdType& b)
+  {
+    return this->Array->GetTuple1(a) > this->Array->GetTuple1(b);
+  }
+  vtkDataArray* Array;
+};
+
+//-----------------------------------------------------------------------------
+void vtkPlotBag::UpdateTableCache(vtkDataArray* density)
+{
+  this->MedianPoints->Reset();
+  this->Q3Points->Reset();
+
+  if (!this->Points)
+    {
+    return;
+    }
+  vtkIdType nbPoints = density->GetNumberOfTuples();
+
+  // Sort the density array
+  std::vector<vtkIdType> ids;
+  ids.resize(nbPoints);
+  double sum = 0.0;
+  for (vtkIdType i = 0; i < nbPoints; i++)
+    {
+    sum += density->GetTuple1(i);
+    ids[i] = i;
+    }
+
+  vtkNew<vtkDoubleArray> nDensity;
+  // Normalize the density array if needed
+  if (fabs(sum - 1.0) > 1.0e-12)
+    {
+    sum = 1.0 / sum;
+    nDensity->SetNumberOfComponents(1);
+    nDensity->SetNumberOfTuples(nbPoints);
+    for (vtkIdType i = 0; i < nbPoints; i++)
+      {
+      nDensity->SetTuple1(i, density->GetTuple1(ids[i]) * sum);
+      }
+    density = nDensity.GetPointer();
+    }
+
+  // Sort array by density
+  ArraySorter arraySorter(density);
+  std::sort(ids.begin(), ids.end(), arraySorter);
+
+  vtkNew<vtkPointsProjectedHull> q3Points;
+  q3Points->Allocate(nbPoints);
+  vtkNew<vtkPointsProjectedHull> medianPoints;
+  medianPoints->Allocate(nbPoints);
+
+  for (vtkIdType i = 0; i < nbPoints; i++)
+    {
+    double x[3];
+    this->Points->GetPoint(ids[i], x);
+    if (i < static_cast<vtkIdType>(nbPoints * 0.5))
+      {
+      medianPoints->InsertNextPoint(x);
+      }
+    if (i < static_cast<vtkIdType>(nbPoints * 0.75))
+      {
+      q3Points->InsertNextPoint(x);
+      }
+    else
+      {
+      break;
+      }
+    }
+
+  // Compute the convex hull for the median points
+  vtkIdType nbMedPoints = medianPoints->GetNumberOfPoints();
+  if (nbMedPoints > 2)
+    {
+    int size = medianPoints->GetSizeCCWHullZ();
+    this->MedianPoints->SetDataTypeToFloat();
+    this->MedianPoints->SetNumberOfPoints(size+1);
+    medianPoints->GetCCWHullZ(
+      static_cast<float*>(this->MedianPoints->GetData()->GetVoidPointer(0)), size);
+    double x[3];
+    this->MedianPoints->GetPoint(0, x);
+    this->MedianPoints->SetPoint(size, x);
+    }
+  else if (nbMedPoints > 0)
+    {
+    this->MedianPoints->SetNumberOfPoints(nbMedPoints);
+    for (int j = 0; j < nbMedPoints; j++)
+      {
+      double x[3];
+      medianPoints->GetPoint(j, x);
+      this->MedianPoints->SetPoint(j, x);
+      }
+    }
+
+  // Compute the convex hull for the first quartile points
+  vtkIdType nbQ3Points = q3Points->GetNumberOfPoints();
+  if (nbQ3Points > 2)
+    {
+    int size = q3Points->GetSizeCCWHullZ();
+    this->Q3Points->SetDataTypeToFloat();
+    this->Q3Points->SetNumberOfPoints(size+1);
+    q3Points->GetCCWHullZ(
+      static_cast<float*>(this->Q3Points->GetData()->GetVoidPointer(0)), size);
+    double x[3];
+    this->Q3Points->GetPoint(0, x);
+    this->Q3Points->SetPoint(size, x);
+    }
+  else if (nbQ3Points > 0)
+    {
+    this->Q3Points->SetNumberOfPoints(nbQ3Points);
+    for (int j = 0; j < nbQ3Points; j++)
+      {
+      double x[3];
+      q3Points->GetPoint(j, x);
+      this->Q3Points->SetPoint(j, x);
+      }
+    }
+
+  this->BuildTime.Modified();
+}
+
+//-----------------------------------------------------------------------------
+bool vtkPlotBag::Paint(vtkContext2D *painter)
+{
+  vtkDebugMacro(<< "Paint event called in vtkPlotBag.");
+
+  vtkTable *table = this->Data->GetInput();
+
+  if (!this->Visible || !this->Points || !table)
+    {
+    return false;
+    }
+
+  unsigned char pcolor[4];
+  this->Pen->GetColor(pcolor);
+  unsigned char bcolor[4];
+  this->Brush->GetColor(bcolor);
+
+  // Draw the 2 bags
+  this->Pen->SetColor(0, 0, 0);
+  this->Brush->SetOpacity(255);
+  this->Brush->SetColor(pcolor[0] / 2, pcolor[1] / 2, pcolor[2] / 2);
+  painter->ApplyPen(this->Pen);
+  painter->ApplyBrush(this->Brush);
+  if (this->Q3Points->GetNumberOfPoints() > 2)
+    {
+    painter->DrawPolygon(this->Q3Points);
+    }
+  else if (this->Q3Points->GetNumberOfPoints() == 2)
+    {
+    painter->DrawLine(this->Q3Points);
+    }
+
+  this->Brush->SetColor(pcolor);
+  this->Brush->SetOpacity(128);
+  painter->ApplyPen(this->Pen);
+  painter->ApplyBrush(this->Brush);
+
+  if (this->MedianPoints->GetNumberOfPoints() > 2)
+    {
+    painter->DrawPolygon(this->MedianPoints);
+    }
+  else if (this->MedianPoints->GetNumberOfPoints() == 2)
+    {
+    painter->DrawLine(this->MedianPoints);
+    }
+
+  this->Brush->SetColor(bcolor);
+  this->Pen->SetColor(pcolor);
+
+  // Let PlotPoints draw the points as usual
+  return this->Superclass::Paint(painter);
+}
+
+//-----------------------------------------------------------------------------
+bool vtkPlotBag::PaintLegend(vtkContext2D *painter, const vtkRectf& rect, int)
+{
+  vtkNew<vtkPen> blackPen;
+  blackPen->SetWidth(1.0);
+  blackPen->SetColor(0, 0, 0, 255);
+  painter->ApplyPen(blackPen.GetPointer());
+
+  unsigned char pcolor[4];
+  this->Pen->GetColor(pcolor);
+
+  this->Brush->SetColor(pcolor[0] / 2, pcolor[1] / 2, pcolor[2] / 2);
+  this->Brush->SetOpacity(255);
+  painter->ApplyBrush(this->Brush);
+  painter->DrawRect(rect[0], rect[1], rect[2]/2, rect[3]);
+
+  this->Brush->SetColor(pcolor);
+  this->Brush->SetOpacity(255);
+  painter->ApplyBrush(this->Brush);
+  painter->DrawRect(rect[0] + rect[2] / 2.f, rect[1], rect[2]/2, rect[3]);
+
+  return true;
+}
+
+//-----------------------------------------------------------------------------
+vtkStringArray* vtkPlotBag::GetLabels()
+{
+  // If the label string is empty, return the y column name
+  if (this->Labels)
+    {
+    return this->Labels;
+    }
+  else if (this->AutoLabels)
+    {
+    return this->AutoLabels;
+    }
+  else if (this->Data->GetInput())
+    {
+    this->AutoLabels = vtkSmartPointer<vtkStringArray>::New();
+    vtkDataArray *density = vtkDataArray::SafeDownCast(
+      this->Data->GetInputAbstractArrayToProcess(2, this->GetInput()));
+    this->AutoLabels->InsertNextValue(density->GetName());
+    return this->AutoLabels;
+    }
+  return NULL;
+}
+
+//-----------------------------------------------------------------------------
+vtkStdString vtkPlotBag::GetTooltipLabel(const vtkVector2d &plotPos,
+                                         vtkIdType seriesIndex,
+                                         vtkIdType)
+{
+  vtkStdString tooltipLabel;
+  vtkStdString &format = this->TooltipLabelFormat.empty() ?
+        this->TooltipDefaultLabelFormat : this->TooltipLabelFormat;
+  // Parse TooltipLabelFormat and build tooltipLabel
+  bool escapeNext = false;
+  vtkDataArray *density = vtkDataArray::SafeDownCast(
+    this->Data->GetInputAbstractArrayToProcess(2, this->GetInput()));
+  for (size_t i = 0; i < format.length(); ++i)
+    {
+    if (escapeNext)
+      {
+      switch (format[i])
+        {
+        case 'x':
+          tooltipLabel += this->GetNumber(plotPos.GetX(), this->XAxis);
+          break;
+        case 'y':
+          tooltipLabel += this->GetNumber(plotPos.GetY(), this->YAxis);
+          break;
+        case 'z':
+          tooltipLabel += density ?
+            density->GetVariantValue(seriesIndex).ToString() :
+            vtkStdString("?");
+          break;
+        case 'i':
+          if (this->IndexedLabels &&
+              seriesIndex >= 0 &&
+              seriesIndex < this->IndexedLabels->GetNumberOfTuples())
+            {
+            tooltipLabel += this->IndexedLabels->GetValue(seriesIndex);
+            }
+          break;
+        case 'l':
+          // GetLabel() is GetLabel(0) in this implementation
+          tooltipLabel += this->GetLabel();
+          break;
+        default: // If no match, insert the entire format tag
+          tooltipLabel += "%";
+          tooltipLabel += format[i];
+          break;
+        }
+      escapeNext = false;
+      }
+    else
+      {
+      if (format[i] == '%')
+        {
+        escapeNext = true;
+        }
+      else
+        {
+        tooltipLabel += format[i];
+        }
+      }
+    }
+  return tooltipLabel;
+}
+
+//-----------------------------------------------------------------------------
+void vtkPlotBag::SetInputData(vtkTable *table)
+{
+  this->Data->SetInputData(table);
+  this->Modified();
+}
+
+//-----------------------------------------------------------------------------
+void vtkPlotBag::SetInputData(vtkTable *table, const vtkStdString &yColumn,
+                              const vtkStdString &densityColumn)
+{
+
+  vtkDebugMacro(<< "Setting input, Y column = \"" << yColumn.c_str() << "\", "
+                << "Density column = \"" << densityColumn.c_str() << "\"");
+
+  if (table->GetColumnByName(densityColumn.c_str())->GetNumberOfTuples()
+    != table->GetColumnByName(yColumn.c_str())->GetNumberOfTuples())
+    {
+    vtkErrorMacro(<< "Input table not correctly initialized!");
+    return;
+    }
+
+  this->SetInputData(table, yColumn, yColumn, densityColumn);
+  this->UseIndexForXSeries = true;
+}
+
+//-----------------------------------------------------------------------------
+void vtkPlotBag::SetInputData(vtkTable *table, const vtkStdString &xColumn,
+                              const vtkStdString &yColumn,
+                              const vtkStdString &densityColumn)
+{
+  vtkDebugMacro(<< "Setting input, X column = \"" << xColumn.c_str()
+                << "\", " << "Y column = \""
+                << yColumn.c_str() << "\""
+                << "\", " << "Density column = \""
+                << densityColumn.c_str() << "\"");
+
+  this->Data->SetInputData(table);
+  this->Data->SetInputArrayToProcess(0, 0, 0,
+    vtkDataObject::FIELD_ASSOCIATION_ROWS, xColumn.c_str());
+  this->Data->SetInputArrayToProcess(1, 0, 0,
+    vtkDataObject::FIELD_ASSOCIATION_ROWS, yColumn.c_str());
+  this->Data->SetInputArrayToProcess(2, 0, 0,
+    vtkDataObject::FIELD_ASSOCIATION_ROWS, densityColumn.c_str());
+}
+
+//-----------------------------------------------------------------------------
+void vtkPlotBag::SetInputData(vtkTable *table, vtkIdType xColumn,
+                              vtkIdType yColumn,
+                              vtkIdType densityColumn)
+{
+  this->SetInputData(table,
+    table->GetColumnName(xColumn),
+    table->GetColumnName(yColumn),
+    table->GetColumnName(densityColumn));
+}
+
+//-----------------------------------------------------------------------------
+void vtkPlotBag::PrintSelf(ostream &os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+}
diff --git a/Charts/Core/vtkPlotBag.h b/Charts/Core/vtkPlotBag.h
new file mode 100644
index 0000000..84514f2
--- /dev/null
+++ b/Charts/Core/vtkPlotBag.h
@@ -0,0 +1,107 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPlotBag.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+// .NAME vtkPlotBag - Class for drawing an a bagplot.
+//
+// .SECTION Description
+// This class allows to draw a bagplot given three columns from
+// a vtkTable. The first two columns will represent X,Y as it is for
+// vtkPlotPoints. The third one will have to specify if the density
+// assigned to each point (generally obtained by the
+// vtkHighestDensityRegionsStatistics filter).
+// Points are drawn in a plot points fashion and 2 convex hull polygons
+// are drawn around the median and the 3 quartile of the density field.
+//
+// .SECTION See Also
+// vtkHighestDensityRegionsStatistics
+
+#ifndef __vtkPlotBag_h
+#define __vtkPlotBag_h
+
+#include "vtkChartsCoreModule.h" // For export macro
+#include "vtkPlotPoints.h"
+
+class VTKCHARTSCORE_EXPORT vtkPlotBag : public vtkPlotPoints
+{
+public:
+  vtkTypeMacro(vtkPlotBag, vtkPlotPoints);
+  virtual void PrintSelf(ostream &os, vtkIndent indent);
+
+  // Description:
+  // Creates a new Bag Plot object.
+  static vtkPlotBag *New();
+
+  // Description:
+  // Perform any updates to the item that may be necessary before rendering.
+  // The scene should take care of calling this on all items before their
+  // Paint function is invoked.
+  virtual void Update();
+
+  // Description:
+  // Paint event for the XY plot, called whenever the chart needs to be drawn.
+  virtual bool Paint(vtkContext2D *painter);
+
+  // Description:
+  // Paint legend event for the XY plot, called whenever the legend needs the
+  // plot items symbol/mark/line drawn. A rect is supplied with the lower left
+  // corner of the rect (elements 0 and 1) and with width x height (elements 2
+  // and 3). The plot can choose how to fill the space supplied.
+  virtual bool PaintLegend(vtkContext2D *painter, const vtkRectf& rect,
+                           int legendIndex);
+
+  // Description:
+  // Get the plot labels. If this array has a length greater than 1 the index
+  // refers to the stacked objects in the plot. See vtkPlotBar for example.
+  virtual vtkStringArray *GetLabels();
+
+  // Description:
+  // Generate and return the tooltip label string for this plot
+  // The segmentIndex parameter is ignored, except for vtkPlotBar
+  virtual vtkStdString GetTooltipLabel(const vtkVector2d &plotPos,
+                                       vtkIdType seriesIndex,
+                                       vtkIdType segmentIndex);
+
+  // Description:
+  // Set the input, we are expecting a vtkTable with three columns. The first
+  // column and the second represent the x,y position . The five others
+  // columns represent the quartiles used to display the box.
+  // Inherited method will call the last SetInputData method with default
+  // paramaters.
+  virtual void SetInputData(vtkTable *table);
+  virtual void SetInputData(vtkTable *table, const vtkStdString &yColumn,
+                            const vtkStdString &densityColumn);
+  virtual void SetInputData(vtkTable *table, const vtkStdString &xColumn,
+                            const vtkStdString &yColumn,
+                            const vtkStdString &densityColumn);
+
+  virtual void SetInputData(vtkTable *table, vtkIdType xColumn,
+                            vtkIdType yColumn,
+                            vtkIdType densityColumn);
+
+protected:
+  vtkPlotBag();
+  ~vtkPlotBag();
+
+  void UpdateTableCache(vtkDataArray*);
+
+  vtkPoints2D* MedianPoints;
+  vtkPoints2D* Q3Points;
+
+private:
+  vtkPlotBag(const vtkPlotBag &); // Not implemented.
+  void operator=(const vtkPlotBag &); // Not implemented.
+};
+
+#endif //__vtkPlotBag_h
diff --git a/Charts/Core/vtkPlotBar.cxx b/Charts/Core/vtkPlotBar.cxx
index ca67d2b..f9c0913 100644
--- a/Charts/Core/vtkPlotBar.cxx
+++ b/Charts/Core/vtkPlotBar.cxx
@@ -32,6 +32,7 @@
 #include "vtkSmartPointer.h"
 #include "vtkColorSeries.h"
 #include "vtkStringArray.h"
+#include "vtkNew.h"
 
 #include "vtkObjectFactory.h"
 
@@ -45,51 +46,75 @@ namespace {
 
 // Copy the two arrays into the points array
 template<class A, class B>
-void CopyToPoints(vtkPoints2D *points, vtkPoints2D *previous_points, A *a, B *b,
-                  int n, int logScale)
+void CopyToPoints(vtkPoints2D *points, vtkPoints2D *previousPoints, A *a, B *b,
+                  int n, int logScale, const vtkRectd &ss)
 {
   points->SetNumberOfPoints(n);
+  float* data = static_cast<float*>(points->GetVoidPointer(0));
+  float* prevData = NULL;
+  if (previousPoints && static_cast<int>(previousPoints->GetNumberOfPoints()) == n)
+    {
+    prevData = static_cast<float*>(previousPoints->GetVoidPointer(0));
+    }
+  float prev = 0.0;
   for (int i = 0; i < n; ++i)
     {
-    double prev[] = {0.0,0.0};
-    if (previous_points)
-      previous_points->GetPoint(i,prev);
-    points->SetPoint(i,
-      (logScale & 1) ? log10(static_cast<double>(a[i])) : a[i],
-      (logScale & 2) ?
-        log10(static_cast<double>(b[i] + prev[1])) : (b[i] + prev[1]));
+    if (prevData)
+      {
+      prev = prevData[2 * i + 1];
+      }
+    A tmpA((a[i] + ss[0]) * ss[2]);
+    B tmpB((b[i] + ss[1]) * ss[3]);
+    data[2 * i]     = static_cast<float>((logScale & 1) ?
+                                         log10(static_cast<double>(tmpA))
+                                         : tmpA);
+    data[2 * i + 1] = static_cast<float>((logScale & 2) ?
+                                         log10(static_cast<double>(tmpB + prev))
+                                         : (tmpB + prev));
     }
 }
 
 // Copy one array into the points array, use the index of that array as x
 template<class A>
-void CopyToPoints(
-  vtkPoints2D *points, vtkPoints2D *previous_points, A *a, int n, int logScale)
+void CopyToPoints(vtkPoints2D *points, vtkPoints2D *previousPoints, A *a, int n,
+                  int logScale, const vtkRectd &ss)
 {
   points->SetNumberOfPoints(n);
+  float* data = static_cast<float*>(points->GetVoidPointer(0));
+  float* prevData = NULL;
+  if (previousPoints && static_cast<int>(previousPoints->GetNumberOfPoints()) == n)
+    {
+    prevData = static_cast<float*>(previousPoints->GetVoidPointer(0));
+    }
+  float prev = 0.0;
   for (int i = 0; i < n; ++i)
     {
-    double prev[] = {0.0,0.0};
-    if (previous_points)
-      previous_points->GetPoint(i,prev);
-    points->SetPoint(i, i, a[i] + prev[1]);
-    points->SetPoint(i,
-      (logScale & 1) ? log10(static_cast<double>(i + 1.0)) : i,
-      (logScale & 2) ?
-        log10(static_cast<double>(a[i] + prev[1])) : (a[i] + prev[1]));
+    if (prevData)
+      {
+      prev = prevData[2 * i + 1];
+      }
+    A tmpA((a[i] + ss[1]) * ss[3]);
+    data[2 * i]     = static_cast<float>((logScale & 1) ?
+                                         log10(static_cast<double>(i + 1.0))
+                                         : i);
+    data[2 * i + 1] = static_cast<float>((logScale & 2) ?
+                                         log10(static_cast<double>(tmpA + prev))
+                                         : (tmpA + prev));
     }
 }
 
 // Copy the two arrays into the points array
 template<class A>
-void CopyToPointsSwitch(vtkPoints2D *points, vtkPoints2D *previous_points, A *a,
-                        vtkDataArray *b, int n, int logScale)
+void CopyToPointsSwitch(vtkPoints2D *points, vtkPoints2D *previousPoints, A *a,
+                        vtkDataArray *b, int n, int logScale,
+                        const vtkRectd &ss)
 {
   switch(b->GetDataType())
     {
     vtkTemplateMacro(
-        CopyToPoints(points,previous_points, a,
-                     static_cast<VTK_TT*>(b->GetVoidPointer(0)), n, logScale));
+        CopyToPoints(points,previousPoints, a,
+                     static_cast<VTK_TT*>(b->GetVoidPointer(0)), n, logScale,
+                     ss));
     }
 }
 
@@ -155,9 +180,8 @@ class vtkPlotBarSegment : public vtkObject {
       delete this->SelectionSet;
       }
 
-    void Configure(
-      vtkPlotBar* bar, vtkDataArray* x_array, vtkDataArray* y_array,
-      vtkAxis* x_axis, vtkAxis* y_axis, vtkPlotBarSegment* prev)
+    void Configure(vtkPlotBar* bar, vtkDataArray* xArray, vtkDataArray* yArray,
+                   vtkAxis* xAxis, vtkAxis* yAxis, vtkPlotBarSegment* prev)
       {
       this->Bar = bar;
       this->Previous = prev;
@@ -169,27 +193,28 @@ class vtkPlotBarSegment : public vtkObject {
       delete this->Sorted;
       delete this->SelectionSet;
 
-      int logScale =
-        (x_axis->GetLogScaleActive() ? 1 : 0) +
-        (y_axis->GetLogScaleActive() ? 2 : 0);
-      if (x_array)
+      int logScale = (xAxis->GetLogScaleActive() ? 1 : 0) +
+          (yAxis->GetLogScaleActive() ? 2 : 0);
+      if (xArray)
         {
-        switch (x_array->GetDataType())
+        switch (xArray->GetDataType())
           {
             vtkTemplateMacro(
               CopyToPointsSwitch(this->Points,this->Previous ? this->Previous->Points : 0,
-                                 static_cast<VTK_TT*>(x_array->GetVoidPointer(0)),
-                                 y_array,x_array->GetNumberOfTuples(), logScale));
+                                 static_cast<VTK_TT*>(xArray->GetVoidPointer(0)),
+                                 yArray, xArray->GetNumberOfTuples(), logScale,
+                                 this->Bar->GetShiftScale()));
           }
         }
       else
         { // Using Index for X Series
-        switch (y_array->GetDataType())
+        switch (yArray->GetDataType())
           {
           vtkTemplateMacro(
             CopyToPoints(this->Points, this->Previous ? this->Previous->Points : 0,
-                         static_cast<VTK_TT*>(y_array->GetVoidPointer(0)),
-                         y_array->GetNumberOfTuples(), logScale));
+                         static_cast<VTK_TT*>(yArray->GetVoidPointer(0)),
+                         yArray->GetNumberOfTuples(), logScale,
+                         this->Bar->GetShiftScale()));
           }
         }
       }
@@ -390,6 +415,7 @@ class vtkPlotBarSegment : public vtkObject {
     vtkPlotBar *Bar;
     VectorPIMPL* Sorted;
     std::set<vtkIdType>* SelectionSet;
+    vtkVector2d ScalingFactor;
     };
 
 vtkStandardNewMacro(vtkPlotBarSegment);
@@ -405,14 +431,13 @@ public:
     }
 
   vtkPlotBarSegment* AddSegment(
-    vtkDataArray *x_array, vtkDataArray *y_array,
-    vtkAxis* x_axis, vtkAxis* y_axis, vtkPlotBarSegment *prev=0)
+    vtkDataArray *xArray, vtkDataArray *yArray,
+    vtkAxis* xAxis, vtkAxis* yAxis, vtkPlotBarSegment *prev = 0)
     {
-    vtkSmartPointer<vtkPlotBarSegment> segment =
-        vtkSmartPointer<vtkPlotBarSegment>::New();
-    segment->Configure(this->Bar, x_array, y_array, x_axis, y_axis, prev);
-    this->Segments.push_back(segment);
-    return segment;
+    vtkNew<vtkPlotBarSegment> segment;
+    segment->Configure(this->Bar, xArray, yArray, xAxis, yAxis, prev);
+    this->Segments.push_back(segment.GetPointer());
+    return segment.GetPointer();
     }
 
   void PaintSegments(vtkContext2D *painter, vtkColorSeries *colorSeries,
@@ -607,7 +632,7 @@ void vtkPlotBar::GetBounds(double bounds[4], bool unscaled)
   vtkDataArray *y = this->Data->GetInputArrayToProcess(1, table);
   if (!y)
     {
-      return;
+    return;
     }
 
   if (this->UseIndexForXSeries)
@@ -629,14 +654,14 @@ void vtkPlotBar::GetBounds(double bounds[4], bool unscaled)
 
   y->GetRange(&bounds[valuesLow]);
 
-  double y_range[2];
+  double yRange[2];
   std::map< int, std::string >::iterator it;
   for ( it = this->Private->AdditionalSeries.begin(); it !=
                   this->Private->AdditionalSeries.end(); ++it )
     {
     y = vtkDataArray::SafeDownCast(table->GetColumnByName((*it).second.c_str()));
-    y->GetRange(y_range);
-    bounds[valuesHigh] += y_range[1];
+    y->GetRange(yRange);
+    bounds[valuesHigh] += yRange[1];
     }
 
   // Bar plots always have one of the value bounds at the origin
@@ -804,7 +829,8 @@ bool vtkPlotBar::UpdateTableCache(vtkTable *table)
 
   this->Private->Update();
 
-  vtkPlotBarSegment *prev = this->Private->AddSegment(x, y, this->GetXAxis(), this->GetYAxis());
+  vtkPlotBarSegment *prev = this->Private->AddSegment(x, y, this->GetXAxis(),
+                                                      this->GetYAxis());
 
   std::map< int, std::string >::iterator it;
 
@@ -885,7 +911,7 @@ bool vtkPlotBar::SelectPoints(const vtkVector2f& min, const vtkVector2f& max)
 }
 
 //-----------------------------------------------------------------------------
-vtkStdString vtkPlotBar::GetTooltipLabel(const vtkVector2f &plotPos,
+vtkStdString vtkPlotBar::GetTooltipLabel(const vtkVector2d &plotPos,
                                          vtkIdType seriesIndex,
                                          vtkIdType segmentIndex)
 {
diff --git a/Charts/Core/vtkPlotBar.h b/Charts/Core/vtkPlotBar.h
index 7a7d117..dd46285 100644
--- a/Charts/Core/vtkPlotBar.h
+++ b/Charts/Core/vtkPlotBar.h
@@ -131,7 +131,7 @@ public:
   // Description:
   // Generate and return the tooltip label string for this plot
   // The segmentIndex is implemented here.
-  virtual vtkStdString GetTooltipLabel(const vtkVector2f &plotPos,
+  virtual vtkStdString GetTooltipLabel(const vtkVector2d &plotPos,
                                        vtkIdType seriesIndex,
                                        vtkIdType segmentIndex);
 
diff --git a/Charts/Core/vtkPlotFunctionalBag.cxx b/Charts/Core/vtkPlotFunctionalBag.cxx
new file mode 100644
index 0000000..80210a6
--- /dev/null
+++ b/Charts/Core/vtkPlotFunctionalBag.cxx
@@ -0,0 +1,349 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPlotFunctionalBag.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkPlotFunctionalBag.h"
+
+#include "vtkAbstractArray.h"
+#include "vtkAxis.h"
+#include "vtkBrush.h"
+#include "vtkContext2D.h"
+#include "vtkContextMapper2D.h"
+#include "vtkDoubleArray.h"
+#include "vtkLookupTable.h"
+#include "vtkObjectFactory.h"
+#include "vtkPen.h"
+#include "vtkPlotLine.h"
+#include "vtkPoints2D.h"
+#include "vtkRect.h"
+#include "vtkScalarsToColors.h"
+#include "vtkTable.h"
+
+//-----------------------------------------------------------------------------
+vtkStandardNewMacro(vtkPlotFunctionalBag);
+
+//-----------------------------------------------------------------------------
+vtkPlotFunctionalBag::vtkPlotFunctionalBag()
+{
+  this->LookupTable = 0;
+  this->TooltipDefaultLabelFormat = "%l (%x, %y)";
+  this->LogX = false;
+  this->LogY = false;
+}
+
+//-----------------------------------------------------------------------------
+vtkPlotFunctionalBag::~vtkPlotFunctionalBag()
+{
+  if (this->LookupTable)
+    {
+    this->LookupTable->UnRegister(this);
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkPlotFunctionalBag::Update()
+{
+  if (!this->Visible)
+    {
+    return;
+    }
+  // Check if we have an input
+  vtkTable *table = this->Data->GetInput();
+
+  if (!table)
+    {
+    vtkDebugMacro(<< "Update event called with no input table set.");
+    return;
+    }
+  else if(this->Data->GetMTime() > this->BuildTime ||
+          table->GetMTime() > this->BuildTime ||
+          (this->LookupTable && this->LookupTable->GetMTime() > this->BuildTime) ||
+          this->MTime > this->BuildTime)
+    {
+    vtkDebugMacro(<< "Updating cached values.");
+    this->UpdateTableCache(table);
+    }
+  else if ((this->XAxis && this->XAxis->GetMTime() > this->BuildTime) ||
+           (this->YAxis && this->YAxis->GetMaximum() > this->BuildTime))
+    {
+    if (this->LogX != this->XAxis->GetLogScale() ||
+        this->LogY != this->YAxis->GetLogScale())
+      {
+      this->UpdateTableCache(table);
+      }
+    }
+}
+
+//-----------------------------------------------------------------------------
+bool vtkPlotFunctionalBag::UpdateTableCache(vtkTable *table)
+{
+  if (!this->LookupTable)
+    {
+    this->CreateDefaultLookupTable();
+    this->LookupTable->SetRange(0, table->GetNumberOfColumns());
+    this->LookupTable->Build();
+    }
+
+  this->BagPoints->Reset();
+
+  vtkDataArray *array[2] = { 0, 0 };
+  if (!this->GetDataArrays(table, array))
+    {
+    this->BuildTime.Modified();
+    return false;
+    }
+
+  if (array[1]->GetNumberOfComponents() == 1)
+    {
+    // The input array has one component, manage it as a line
+    this->Line->SetInputData(table,
+      array[0] ? array[0]->GetName() : "", array[1]->GetName());
+    this->Line->SetUseIndexForXSeries(this->UseIndexForXSeries);
+    this->Line->SetMarkerStyle(vtkPlotPoints::NONE);
+    double rgb[3];
+    this->GetColor(rgb);
+    this->Line->SetColor(rgb[0], rgb[1], rgb[2]);
+    this->Line->SetWidth(this->GetWidth());
+    this->Line->SetPen(this->Pen);
+    this->Line->SetBrush(this->Brush);
+    this->Line->Update();
+    }
+  else if (array[1]->GetNumberOfComponents() == 2)
+    {
+    // The input array has 2 components, this must be a bag
+    // with {miny,maxy} tuples
+    vtkDoubleArray* darr = vtkDoubleArray::SafeDownCast(array[1]);
+
+    this->LogX = this->XAxis->GetLogScaleActive();
+    this->LogY = this->YAxis->GetLogScaleActive();
+    bool xAbs = this->XAxis->GetUnscaledMinimum() < 0.;
+    bool yAbs = this->YAxis->GetUnscaledMinimum() < 0.;
+    if (darr)
+      {
+      vtkIdType nbRows = array[1]->GetNumberOfTuples();
+      this->BagPoints->SetNumberOfPoints(2 * nbRows);
+      for (vtkIdType i = 0; i < nbRows; i++)
+        {
+        double y[2];
+        darr->GetTuple(i, y);
+
+        double x = (!this->UseIndexForXSeries && array[0]) ?
+          array[0]->GetVariantValue(i).ToDouble() : static_cast<double>(i);
+        if (this->LogX)
+          {
+          x = xAbs ? log10(fabs(x)) : log10(x);
+          }
+
+        if (this->LogY)
+          {
+          y[0] = yAbs ? log10(fabs(y[0])) : log10(y[0]);
+          y[1] = yAbs ? log10(fabs(y[1])) : log10(y[1]);
+          }
+
+        this->BagPoints->SetPoint(2 * i, x, y[0]);
+        this->BagPoints->SetPoint(2 * i + 1, x, y[1]);
+        }
+      }
+    }
+
+  this->BuildTime.Modified();
+
+  return true;
+}
+
+//-----------------------------------------------------------------------------
+bool vtkPlotFunctionalBag::GetDataArrays(vtkTable *table, vtkDataArray *array[2])
+{
+  if (!table)
+    {
+    return false;
+    }
+
+  // Get the x and y arrays (index 0 and 1 respectively)
+  array[0] = this->UseIndexForXSeries ?
+        0 : this->Data->GetInputArrayToProcess(0, table);
+  array[1] = this->Data->GetInputArrayToProcess(1, table);
+
+  if (!array[0] && !this->UseIndexForXSeries)
+    {
+    vtkErrorMacro(<< "No X column is set (index 0).");
+    return false;
+    }
+  else if (!array[1])
+    {
+    vtkErrorMacro(<< "No Y column is set (index 1).");
+    return false;
+    }
+  else if (!this->UseIndexForXSeries &&
+           array[0]->GetNumberOfTuples() != array[1]->GetNumberOfTuples())
+    {
+    vtkErrorMacro("The x and y columns must have the same number of elements. "
+                  << array[0]->GetNumberOfTuples() << ", "
+                  << array[1]->GetNumberOfTuples());
+    return false;
+    }
+  return true;
+}
+
+//-----------------------------------------------------------------------------
+bool vtkPlotFunctionalBag::Paint(vtkContext2D *painter)
+{
+  // This is where everything should be drawn, or dispatched to other methods.
+  vtkDebugMacro(<< "Paint event called in vtkPlotFunctionalBag.");
+
+  if (!this->Visible)
+    {
+    return false;
+    }
+
+  if (this->BagPoints->GetNumberOfPoints() > 0)
+    {
+    unsigned char pcolor[4];
+    double pwidth = this->Pen->GetWidth();
+    this->Pen->SetWidth(0.);
+    painter->ApplyPen(this->Pen);
+    this->Pen->GetColor(pcolor);
+    this->Brush->SetColor(pcolor);
+    painter->ApplyBrush(this->Brush);
+    painter->DrawQuadStrip(this->BagPoints.GetPointer());
+    this->Pen->SetWidth(pwidth);
+    }
+  else
+    {
+    this->Line->Paint(painter);
+    }
+
+  return true;
+}
+
+//-----------------------------------------------------------------------------
+bool vtkPlotFunctionalBag::PaintLegend(vtkContext2D *painter,
+                                       const vtkRectf& rect, int index)
+{
+  if (this->BagPoints->GetNumberOfPoints() > 0)
+    {
+    vtkNew<vtkPen> blackPen;
+    blackPen->SetWidth(1.0);
+    blackPen->SetColor(0, 0, 0, 255);
+    painter->ApplyPen(blackPen.GetPointer());
+    painter->ApplyBrush(this->Brush);
+    painter->DrawRect(rect[0], rect[1], rect[2], rect[3]);
+    }
+  else
+    {
+    this->Line->PaintLegend(painter, rect, index);
+    }
+  return true;
+}
+
+//-----------------------------------------------------------------------------
+vtkIdType vtkPlotFunctionalBag::GetNearestPoint(const vtkVector2f& point,
+                                                const vtkVector2f& tol,
+                                                vtkVector2f* loc)
+{
+  if (this->BagPoints->GetNumberOfPoints() == 0)
+    {
+    return this->Line->GetNearestPoint(point, tol, loc);
+    }
+  return -1;
+}
+
+//-----------------------------------------------------------------------------
+void vtkPlotFunctionalBag::GetBounds(double bounds[4])
+{
+  if (this->BagPoints->GetNumberOfPoints() > 0)
+    {
+    this->BagPoints->GetBounds(bounds);
+    if (this->LogX)
+      {
+      bounds[0] = log10(bounds[0]);
+      bounds[1] = log10(bounds[1]);
+    }
+    if (this->LogY)
+      {
+      bounds[2] = log10(bounds[2]);
+      bounds[3] = log10(bounds[3]);
+      }
+    }
+  else
+    {
+    this->Line->GetBounds(bounds);
+    }
+
+  vtkDebugMacro(<< "Bounds: " << bounds[0] << "\t" << bounds[1] << "\t"
+                << bounds[2] << "\t" << bounds[3]);
+}
+
+//-----------------------------------------------------------------------------
+void vtkPlotFunctionalBag::GetUnscaledInputBounds(double bounds[4])
+{
+  if (this->BagPoints->GetNumberOfPoints() > 0)
+    {
+    this->BagPoints->GetBounds(bounds);
+    }
+  else
+    {
+    this->Line->GetUnscaledInputBounds(bounds);
+    }
+
+  vtkDebugMacro(<< "Bounds: " << bounds[0] << "\t" << bounds[1] << "\t"
+                << bounds[2] << "\t" << bounds[3]);
+}
+
+//-----------------------------------------------------------------------------
+void vtkPlotFunctionalBag::PrintSelf(ostream &os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+}
+
+//-----------------------------------------------------------------------------
+void vtkPlotFunctionalBag::SetLookupTable(vtkScalarsToColors *lut)
+{
+  if ( this->LookupTable != lut )
+    {
+    if ( this->LookupTable)
+      {
+      this->LookupTable->UnRegister(this);
+      }
+    this->LookupTable = lut;
+    if (lut)
+      {
+      lut->Register(this);
+      }
+    this->Modified();
+    }
+}
+
+//-----------------------------------------------------------------------------
+vtkScalarsToColors *vtkPlotFunctionalBag::GetLookupTable()
+{
+  if ( this->LookupTable == 0 )
+    {
+    this->CreateDefaultLookupTable();
+    }
+  return this->LookupTable;
+}
+
+//-----------------------------------------------------------------------------
+void vtkPlotFunctionalBag::CreateDefaultLookupTable()
+{
+  if ( this->LookupTable)
+    {
+    this->LookupTable->UnRegister(this);
+    }
+  this->LookupTable = vtkLookupTable::New();
+  // Consistent Register/UnRegisters.
+  this->LookupTable->Register(this);
+  this->LookupTable->Delete();
+}
diff --git a/Charts/Core/vtkPlotFunctionalBag.h b/Charts/Core/vtkPlotFunctionalBag.h
new file mode 100644
index 0000000..48478af
--- /dev/null
+++ b/Charts/Core/vtkPlotFunctionalBag.h
@@ -0,0 +1,134 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPlotFunctionalBag.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+// .NAME vtkPlotFunctionalBag - Class for drawing an XY line plot or bag
+// given two columns from a vtkTable.
+//
+// .SECTION Description
+// Depending on the number of components, this class will draw either
+// a line plot (for 1 component column) or, for two components columns,
+// a filled polygonal band (the bag) going from the first to the second
+// component on the Y-axis along the X-axis. The filter
+// vtkExtractFunctionalBagPlot is intended to create such "bag" columns.
+//
+// .SECTION See Also
+// vtkExtractFunctionalBagPlot
+
+#ifndef __vtkPlotFunctionalBag_h
+#define __vtkPlotFunctionalBag_h
+
+#include "vtkChartsCoreModule.h" // For export macro
+#include "vtkPlot.h"
+#include "vtkNew.h"              // Needed to hold SP ivars
+
+class vtkDataArray;
+class vtkPlotFuntionalBagInternal;
+class vtkPlotLine;
+class vtkPoints2D;
+class vtkScalarsToColors;
+
+class VTKCHARTSCORE_EXPORT vtkPlotFunctionalBag : public vtkPlot
+{
+public:
+  vtkTypeMacro(vtkPlotFunctionalBag, vtkPlot);
+  virtual void PrintSelf(ostream &os, vtkIndent indent);
+
+  // Description:
+  // Creates a functional bag plot object.
+  static vtkPlotFunctionalBag *New();
+
+  // Description:
+  // Perform any updates to the item that may be necessary before rendering.
+  // The scene should take care of calling this on all items before their
+  // Paint function is invoked.
+  virtual void Update();
+
+  // Description:
+  // Paint event for the plot, called whenever the chart needs to be drawn.
+  virtual bool Paint(vtkContext2D *painter);
+
+  // Description:
+  // Paint legend event for the plot, called whenever the legend needs the
+  // plot items symbol/mark/line drawn. A rect is supplied with the lower left
+  // corner of the rect (elements 0 and 1) and with width x height (elements 2
+  // and 3). The plot can choose how to fill the space supplied.
+  virtual bool PaintLegend(vtkContext2D *painter, const vtkRectf& rect,
+                           int legendIndex);
+
+  // Description:
+  // Get the bounds for this plot as (Xmin, Xmax, Ymin, Ymax).
+  virtual void GetBounds(double bounds[4]);
+
+  // Description:
+  // Get the non-log-scaled bounds on chart inputs for this plot as
+  // (Xmin, Xmax, Ymin, Ymax).
+  virtual void GetUnscaledInputBounds(double bounds[4]);
+
+  // Description:
+  // Specify a lookup table for the mapper to use.
+  void SetLookupTable(vtkScalarsToColors *lut);
+  vtkScalarsToColors *GetLookupTable();
+
+  // Description:
+  // Create default lookup table. Generally used to create one when none
+  // is available with the scalar data.
+  virtual void CreateDefaultLookupTable();
+
+//BTX
+  // Description:
+  // Function to query a plot for the nearest point to the specified coordinate.
+  // Returns the index of the data series with which the point is associated or
+  // -1.
+  virtual vtkIdType GetNearestPoint(const vtkVector2f& point,
+                                    const vtkVector2f& tolerance,
+                                    vtkVector2f* location);
+//ETX
+
+protected:
+  vtkPlotFunctionalBag();
+  ~vtkPlotFunctionalBag();
+
+  // Description:
+  // Populate the data arrays ready to operate on input data.
+  bool GetDataArrays(vtkTable *table, vtkDataArray *array[2]);
+
+  // Description:
+  // Update the table cache.
+  bool UpdateTableCache(vtkTable*);
+
+  // Description:
+  // The cache is marked dirty until it has been initialized.
+  vtkTimeStamp BuildTime;
+
+  // Description:
+  // Lookup Table for coloring points by scalar value
+  vtkScalarsToColors *LookupTable;
+
+  // Description:
+  // The plot line delegate for line series
+  vtkNew<vtkPlotLine> Line;
+
+  // Description:
+  // The bag points ordered in quadstrip fashion
+  vtkNew<vtkPoints2D> BagPoints;
+
+  bool LogX, LogY;
+
+private:
+  vtkPlotFunctionalBag(const vtkPlotFunctionalBag &); // Not implemented.
+  void operator=(const vtkPlotFunctionalBag &); // Not implemented.
+};
+
+#endif //__vtkPlotFunctionalBag_h
diff --git a/Charts/Core/vtkPlotHistogram2D.cxx b/Charts/Core/vtkPlotHistogram2D.cxx
index b020189..596335c 100644
--- a/Charts/Core/vtkPlotHistogram2D.cxx
+++ b/Charts/Core/vtkPlotHistogram2D.cxx
@@ -157,7 +157,7 @@ vtkIdType vtkPlotHistogram2D::GetNearestPoint(const vtkVector2f& point,
 }
 
 //-----------------------------------------------------------------------------
-vtkStdString vtkPlotHistogram2D::GetTooltipLabel(const vtkVector2f &plotPos,
+vtkStdString vtkPlotHistogram2D::GetTooltipLabel(const vtkVector2d &plotPos,
                                                  vtkIdType seriesIndex,
                                                  vtkIdType)
 {
diff --git a/Charts/Core/vtkPlotHistogram2D.h b/Charts/Core/vtkPlotHistogram2D.h
index be88b5a..fe03f4c 100644
--- a/Charts/Core/vtkPlotHistogram2D.h
+++ b/Charts/Core/vtkPlotHistogram2D.h
@@ -93,7 +93,7 @@ public:
   //   '%j' The Y axis tick label for the histogram cell
   // Any other characters or unrecognized format tags are printed in the
   // tooltip label verbatim.
-  virtual vtkStdString GetTooltipLabel(const vtkVector2f &plotPos,
+  virtual vtkStdString GetTooltipLabel(const vtkVector2d &plotPos,
                                        vtkIdType seriesIndex,
                                        vtkIdType segmentIndex);
 
diff --git a/Charts/Core/vtkPlotLine3D.cxx b/Charts/Core/vtkPlotLine3D.cxx
index 73b967b..39f19fe 100644
--- a/Charts/Core/vtkPlotLine3D.cxx
+++ b/Charts/Core/vtkPlotLine3D.cxx
@@ -47,7 +47,7 @@ bool vtkPlotLine3D::Paint(vtkContext2D *painter)
 
   // Get the 3D context.
   vtkContext3D *context = painter->GetContext3D();
-  if(context == false)
+  if(context == NULL)
     {
     return false;
     }
diff --git a/Charts/Core/vtkPlotParallelCoordinates.cxx b/Charts/Core/vtkPlotParallelCoordinates.cxx
index 52d152c..a557a96 100644
--- a/Charts/Core/vtkPlotParallelCoordinates.cxx
+++ b/Charts/Core/vtkPlotParallelCoordinates.cxx
@@ -58,7 +58,7 @@ public:
 
 
 //-----------------------------------------------------------------------------
-vtkStandardNewMacro(vtkPlotParallelCoordinates);
+vtkStandardNewMacro(vtkPlotParallelCoordinates)
 
 //-----------------------------------------------------------------------------
 vtkPlotParallelCoordinates::vtkPlotParallelCoordinates()
@@ -150,14 +150,14 @@ bool vtkPlotParallelCoordinates::Paint(vtkContext2D *painter)
 
   // Draw all of the lines
   painter->ApplyPen(this->Pen);
-  int nc_comps(0);
+  int ncComps(0);
   if (this->ScalarVisibility && this->Colors)
     {
-    nc_comps = static_cast<int>(this->Colors->GetNumberOfComponents());
+    ncComps = static_cast<int>(this->Colors->GetNumberOfComponents());
     }
-  if (this->ScalarVisibility && this->Colors && nc_comps == 4)
+  if (this->ScalarVisibility && this->Colors && ncComps == 4)
     {
-    for (size_t i = 0, nc = 0; i < rows; ++i, nc += nc_comps)
+    for (size_t i = 0, nc = 0; i < rows; ++i, nc += ncComps)
       {
       for (size_t j = 0; j < cols; ++j)
         {
@@ -373,13 +373,13 @@ bool vtkPlotParallelCoordinates::UpdateTableCache(vtkTable *table)
       }
 
     // Also need the range from the appropriate axis, to normalize points
-    float min = axis->GetUnscaledMinimum();
-    float max = axis->GetUnscaledMaximum();
-    float scale = 1.0f / (max - min);
+    double min = axis->GetUnscaledMinimum();
+    double max = axis->GetUnscaledMaximum();
+    double scale = 1.0f / (max - min);
 
     for (vtkIdType j = 0; j < rows; ++j)
       {
-      col[j] = (data->GetTuple1(j)-min) * scale;
+      col[j] = (data->GetTuple1(j) - min) * scale;
       }
     }
 
diff --git a/Charts/Core/vtkPlotPie.cxx b/Charts/Core/vtkPlotPie.cxx
index 0820d79..cfe238b 100644
--- a/Charts/Core/vtkPlotPie.cxx
+++ b/Charts/Core/vtkPlotPie.cxx
@@ -202,7 +202,7 @@ vtkColorSeries *vtkPlotPie::GetColorSeries()
 //-----------------------------------------------------------------------------
 vtkIdType vtkPlotPie::GetNearestPoint(const vtkVector2f& point,
                                       const vtkVector2f&,
-                                      vtkVector2f*)
+                                      vtkVector2f* value)
 {
   float x = point.GetX() - this->Private->CenterX;
   float y = point.GetY() - this->Private->CenterY;
@@ -222,6 +222,11 @@ vtkIdType vtkPlotPie::GetNearestPoint(const vtkVector2f& point,
     int ret = lbound - angles;
     // There are two of each angle in the array (start,end for each point)
     ret = ret / 2;
+
+    vtkTable *table = this->Data->GetInput();
+    vtkDataArray* data = this->Data->GetInputArrayToProcess(0, table);
+    value->SetX(ret);
+    value->SetY(data->GetTuple1(ret));
     return ret;
     }
 
diff --git a/Charts/Core/vtkPlotPoints.cxx b/Charts/Core/vtkPlotPoints.cxx
index e1afad6..46c2eab 100644
--- a/Charts/Core/vtkPlotPoints.cxx
+++ b/Charts/Core/vtkPlotPoints.cxx
@@ -35,6 +35,7 @@
 #include <vector>
 #include <algorithm>
 #include <limits>
+#include <set>
 
 // PIMPL for STL vector...
 struct vtkIndexedVector2f
@@ -297,15 +298,8 @@ void vtkPlotPoints::GetBounds(double bounds[4])
 {
   if (this->Points)
     {
-    if (!this->BadPoints)
-      {
-      this->Points->GetBounds(bounds);
-      }
-    else
-      {
-      // There are bad points in the series - need to do this ourselves.
-      this->CalculateBounds(bounds);
-      }
+    // There are bad points in the series - need to do this ourselves.
+    this->CalculateBounds(bounds);
     }
   vtkDebugMacro(<< "Bounds: " << bounds[0] << "\t" << bounds[1] << "\t"
                 << bounds[2] << "\t" << bounds[3]);
@@ -314,6 +308,7 @@ void vtkPlotPoints::GetBounds(double bounds[4])
 //-----------------------------------------------------------------------------
 void vtkPlotPoints::GetUnscaledInputBounds(double bounds[4])
 {
+  this->CalculateUnscaledInputBounds();
   for (int i = 0; i < 4; ++i)
     {
     bounds[i] = this->UnscaledInputBounds[i];
@@ -503,85 +498,195 @@ bool vtkPlotPoints::SelectPointsInPolygon(const vtkContextPolygon &polygon)
 //-----------------------------------------------------------------------------
 namespace {
 
+// Find any bad points in the supplied array.
+template<typename T>
+void SetBadPoints(T *data, vtkIdType n, std::set<vtkIdType> &bad)
+{
+  for (vtkIdType i = 0; i < n; ++i)
+    {
+    if (vtkMath::IsInf(data[i]) || vtkMath::IsNan(data[i]))
+      {
+      bad.insert(i);
+      }
+    }
+}
+
+// Calculate the bounds from the original data.
+template<typename A>
+void ComputeBounds(A *a, int n, double bounds[2])
+{
+  bounds[0] =  std::numeric_limits<double>::max();
+  bounds[1] = -std::numeric_limits<double>::max();
+  for (int i = 0; i < n; ++a, ++i)
+    {
+    bounds[0] = bounds[0] < *a ? bounds[0] : *a;
+    bounds[1] = bounds[1] > *a ? bounds[1] : *a;
+    }
+}
+
+template<typename A>
+void ComputeBounds(A *a, int n, vtkIdTypeArray *bad, double bounds[2])
+{
+  // If possible, use the simpler code without any bad points.
+  if (!bad || bad->GetNumberOfTuples() == 0)
+    {
+    ComputeBounds(a, n, bounds);
+    return;
+    }
+
+  // Initialize the first range of points.
+  vtkIdType start = 0;
+  vtkIdType end = 0;
+  vtkIdType i = 0;
+  vtkIdType nBad = bad->GetNumberOfTuples();
+  if (bad->GetValue(i) == 0)
+    {
+    while (i < nBad && i == bad->GetValue(i))
+      {
+      start = bad->GetValue(i++) + 1;
+      }
+    if (start < n)
+      {
+      end = n;
+      }
+    else
+      {
+      // They are all bad points, return early.
+      return;
+      }
+    }
+  if (i < nBad)
+    {
+    end = bad->GetValue(i++);
+    }
+  else
+    {
+    end = n;
+    }
+
+  bounds[0] =  std::numeric_limits<double>::max();
+  bounds[1] = -std::numeric_limits<double>::max();
+  while (start < n)
+    {
+    // Calculate the min/max in this range.
+    while (start < end)
+      {
+      bounds[0] = bounds[0] < a[start] ? bounds[0] : a[start];
+      bounds[1] = bounds[1] > a[start] ? bounds[1] : a[start];
+      ++start;
+      }
+    // Now figure out the next range to be evaluated.
+    start = end + 1;
+    while (i < nBad && start == bad->GetValue(i))
+      {
+      start = bad->GetValue(i++) + 1;
+      }
+    if (i < nBad)
+      {
+      end = bad->GetValue(i++);
+      }
+    else
+      {
+      end = n;
+      }
+    }
+}
+
+// Dispatch this call off to the right function.
+template<typename A>
+void ComputeBounds(A *a, vtkDataArray *b, int n, vtkIdTypeArray *bad,
+                   double bounds[4])
+{
+  ComputeBounds(a, n, bad, bounds);
+  switch(b->GetDataType())
+    {
+    vtkTemplateMacro(
+      ComputeBounds(static_cast<VTK_TT*>(b->GetVoidPointer(0)), n, bad,
+                    &bounds[2]));
+    }
+}
+
 // Copy the two arrays into the points array
-template<class A, class B>
-void CopyToPoints(vtkPoints2D *points, A *a, B *b, int n, double bds[4])
+template<typename A, typename B>
+void CopyToPoints(vtkPoints2D *points, A *a, B *b, int n, const vtkRectd &ss)
 {
-  bds[0] = bds[2] = vtkMath::Inf();
-  bds[1] = bds[3] = -vtkMath::Inf();
   points->SetNumberOfPoints(n);
   float* data = static_cast<float*>(points->GetVoidPointer(0));
   for (int i = 0; i < n; ++i)
     {
-    data[2*i] = a[i];
-    data[2*i+1] = b[i];
-
-    bds[0] = bds[0] < a[i] ? bds[0] : a[i];
-    bds[1] = bds[1] > a[i] ? bds[1] : a[i];
-
-    bds[2] = bds[2] < b[i] ? bds[2] : b[i];
-    bds[3] = bds[3] > b[i] ? bds[3] : b[i];
+    data[2 * i]     = static_cast<float>((a[i] + ss[0]) * ss[2]);
+    data[2 * i + 1] = static_cast<float>((b[i] + ss[1]) * ss[3]);
     }
 }
 
 // Copy one array into the points array, use the index of that array as x
-template<class A>
-void CopyToPoints(vtkPoints2D *points, A *a, int n, double bds[4])
+template<typename A>
+void CopyToPoints(vtkPoints2D *points, A *a, int n, const vtkRectd &ss)
 {
-  bds[0] = 0.;
-  bds[1] = n - 1.;
   points->SetNumberOfPoints(n);
   float* data = static_cast<float*>(points->GetVoidPointer(0));
   for (int i = 0; i < n; ++i)
     {
-    data[2*i] = static_cast<float>(i);
-    data[2*i+1] = a[i];
-
-    bds[2] = bds[2] < a[i] ? bds[2] : a[i];
-    bds[3] = bds[3] > a[i] ? bds[3] : a[i];
+    data[2 * i]     = static_cast<float>((  i  + ss[0]) * ss[2]);
+    data[2 * i + 1] = static_cast<float>((a[i] + ss[1]) * ss[3]);
     }
 }
 
 // Copy the two arrays into the points array
-template<class A>
-void CopyToPointsSwitch(
-  vtkPoints2D *points, A *a, vtkDataArray *b, int n, double bds[4])
+template<typename A>
+void CopyToPointsSwitch(vtkPoints2D *points, A *a, vtkDataArray *b, int n,
+                        const vtkRectd &ss)
 {
   switch(b->GetDataType())
     {
     vtkTemplateMacro(
       CopyToPoints(
-        points, a, static_cast<VTK_TT*>(b->GetVoidPointer(0)), n, bds));
+        points, a, static_cast<VTK_TT*>(b->GetVoidPointer(0)), n, ss));
     }
 }
 
 }
 
 //-----------------------------------------------------------------------------
-bool vtkPlotPoints::UpdateTableCache(vtkTable *table)
+bool vtkPlotPoints::GetDataArrays(vtkTable *table, vtkDataArray *array[2])
 {
+  if (!table)
+    {
+    return false;
+    }
+
   // Get the x and y arrays (index 0 and 1 respectively)
-  vtkDataArray* x = this->UseIndexForXSeries ?
-                    0 : this->Data->GetInputArrayToProcess(0, table);
-  vtkDataArray* y = this->Data->GetInputArrayToProcess(1, table);
+  array[0] = this->UseIndexForXSeries ?
+        0 : this->Data->GetInputArrayToProcess(0, table);
+  array[1] = this->Data->GetInputArrayToProcess(1, table);
 
-  if (!x && !this->UseIndexForXSeries)
+  if (!array[0] && !this->UseIndexForXSeries)
     {
     vtkErrorMacro(<< "No X column is set (index 0).");
-    this->BuildTime.Modified();
     return false;
     }
-  else if (!y)
+  else if (!array[1])
     {
     vtkErrorMacro(<< "No Y column is set (index 1).");
-    this->BuildTime.Modified();
     return false;
     }
   else if (!this->UseIndexForXSeries &&
-           x->GetNumberOfTuples() != y->GetNumberOfTuples())
+           array[0]->GetNumberOfTuples() != array[1]->GetNumberOfTuples())
     {
     vtkErrorMacro("The x and y columns must have the same number of elements. "
-                  << x->GetNumberOfTuples() << ", " << y->GetNumberOfTuples());
+                  << array[0]->GetNumberOfTuples() << ", "
+                  << array[1]->GetNumberOfTuples());
+    return false;
+    }
+  return true;
+}
+
+//-----------------------------------------------------------------------------
+bool vtkPlotPoints::UpdateTableCache(vtkTable *table)
+{
+  vtkDataArray *array[2] = { 0, 0 };
+  if (!this->GetDataArrays(table, array))
+    {
     this->BuildTime.Modified();
     return false;
     }
@@ -590,6 +695,8 @@ bool vtkPlotPoints::UpdateTableCache(vtkTable *table)
     {
     this->Points = vtkPoints2D::New();
     }
+  vtkDataArray *x = array[0];
+  vtkDataArray *y = array[1];
 
   // Now copy the components into their new columns
   if (this->UseIndexForXSeries)
@@ -599,7 +706,7 @@ bool vtkPlotPoints::UpdateTableCache(vtkTable *table)
       vtkTemplateMacro(
         CopyToPoints(
           this->Points, static_cast<VTK_TT*>(y->GetVoidPointer(0)),
-          y->GetNumberOfTuples(), this->UnscaledInputBounds));
+          y->GetNumberOfTuples(), this->ShiftScale));
       }
     }
   else
@@ -609,7 +716,7 @@ bool vtkPlotPoints::UpdateTableCache(vtkTable *table)
       vtkTemplateMacro(
         CopyToPointsSwitch(
           this->Points, static_cast<VTK_TT*>(x->GetVoidPointer(0)),
-          y, x->GetNumberOfTuples(), this->UnscaledInputBounds));
+          y, x->GetNumberOfTuples(), this->ShiftScale));
       }
     }
   this->CalculateLogSeries();
@@ -650,10 +757,45 @@ bool vtkPlotPoints::UpdateTableCache(vtkTable *table)
     }
 
   this->BuildTime.Modified();
+
   return true;
 }
 
 //-----------------------------------------------------------------------------
+void vtkPlotPoints::CalculateUnscaledInputBounds()
+{
+  vtkTable *table = this->Data->GetInput();
+  vtkDataArray *array[2] = { 0, 0 };
+  if (!this->GetDataArrays(table, array))
+    {
+    return;
+    }
+  // Now copy the components into their new columns
+  if (this->UseIndexForXSeries)
+    {
+    this->UnscaledInputBounds[0] = 0.0;
+    this->UnscaledInputBounds[1] = array[1]->GetNumberOfTuples() - 1;
+    switch(array[1]->GetDataType())
+      {
+      vtkTemplateMacro(
+        ComputeBounds(static_cast<VTK_TT*>(array[1]->GetVoidPointer(0)),
+            array[1]->GetNumberOfTuples(), this->BadPoints,
+            &this->UnscaledInputBounds[2]));
+      }
+    }
+  else
+    {
+    switch(array[0]->GetDataType())
+      {
+      vtkTemplateMacro(
+        ComputeBounds(static_cast<VTK_TT*>(array[0]->GetVoidPointer(0)),
+            array[1], array[0]->GetNumberOfTuples(), this->BadPoints,
+            this->UnscaledInputBounds));
+      }
+    }
+}
+
+//-----------------------------------------------------------------------------
 void vtkPlotPoints::CalculateLogSeries()
 {
   if (!this->XAxis || !this->YAxis)
@@ -704,27 +846,29 @@ void vtkPlotPoints::CalculateLogSeries()
 void vtkPlotPoints::FindBadPoints()
 {
   // This should be run after CalculateLogSeries as a final step.
-  float* data = static_cast<float*>(this->Points->GetVoidPointer(0));
   vtkIdType n = this->Points->GetNumberOfPoints();
-  if (!this->BadPoints)
-    {
-    this->BadPoints = vtkIdTypeArray::New();
-    }
-  else
-    {
-    this->BadPoints->SetNumberOfTuples(0);
-    }
 
   // Scan through and find any bad points.
-  for (vtkIdType i = 0; i < n; ++i)
+  vtkTable *table = this->Data->GetInput();
+  vtkDataArray *array[2] = { 0, 0 };
+  if (!this->GetDataArrays(table, array))
     {
-    vtkIdType p = 2*i;
-    if (vtkMath::IsInf(data[p]) || vtkMath::IsInf(data[p+1]) ||
-        vtkMath::IsNan(data[p]) || vtkMath::IsNan(data[p+1]))
+    return;
+    }
+  std::set<vtkIdType> bad;
+  if (!this->UseIndexForXSeries)
+    {
+    switch(array[0]->GetDataType())
       {
-      this->BadPoints->InsertNextValue(i);
+      vtkTemplateMacro(
+        SetBadPoints(static_cast<VTK_TT*>(array[0]->GetVoidPointer(0)), n, bad));
       }
     }
+  switch(array[1]->GetDataType())
+    {
+    vtkTemplateMacro(
+      SetBadPoints(static_cast<VTK_TT*>(array[1]->GetVoidPointer(0)), n, bad));
+    }
 
   // add points from the ValidPointMask
   if (this->ValidPointMask)
@@ -733,16 +877,29 @@ void vtkPlotPoints::FindBadPoints()
       {
       if (this->ValidPointMask->GetValue(i) == 0)
         {
-        this->BadPoints->InsertNextValue(i);
+        bad.insert(i);
         }
       }
     }
 
-  // sort bad points
-  std::sort(this->BadPoints->GetPointer(0),
-            this->BadPoints->GetPointer(this->BadPoints->GetNumberOfTuples()));
-
-  if (this->BadPoints->GetNumberOfTuples() == 0)
+  // If there are bad points copy them, if not ensure the pointer is null.
+  if (bad.size() > 0)
+    {
+    if (!this->BadPoints)
+      {
+      this->BadPoints = vtkIdTypeArray::New();
+      }
+    else
+      {
+      this->BadPoints->SetNumberOfTuples(0);
+      }
+    for (std::set<vtkIdType>::const_iterator it = bad.begin();
+         it != bad.end(); ++it)
+      {
+      this->BadPoints->InsertNextValue(*it);
+      }
+    }
+  else if (this->BadPoints)
     {
     this->BadPoints->Delete();
     this->BadPoints = NULL;
@@ -753,83 +910,24 @@ void vtkPlotPoints::FindBadPoints()
 void vtkPlotPoints::CalculateBounds(double bounds[4])
 {
   // We can use the BadPoints array to skip the bad points
-  if (!this->Points || !this->BadPoints)
+  if (!this->Points)
     {
     return;
     }
-  vtkIdType start = 0;
-  vtkIdType end = 0;
-  vtkIdType i = 0;
-  vtkIdType nBad = this->BadPoints->GetNumberOfTuples();
-  vtkIdType nPoints = this->Points->GetNumberOfPoints();
-  if (this->BadPoints->GetValue(i) == 0)
-    {
-    while (i < nBad && i == this->BadPoints->GetValue(i))
-      {
-      start = this->BadPoints->GetValue(i++) + 1;
-      }
-    if (start < nPoints)
-      {
-      end = nPoints;
-      }
-    else
-      {
-      // They are all bad points
-      return;
-      }
-    }
-  if (i < nBad)
+  this->CalculateUnscaledInputBounds();
+  for (int i = 0; i < 4; ++i)
     {
-    end = this->BadPoints->GetValue(i++);
+    bounds[i] = this->UnscaledInputBounds[i];
     }
-  else
+  if (this->LogX)
     {
-    end = nPoints;
+    bounds[0] = log10(bounds[0]);
+    bounds[1] = log10(bounds[1]);
     }
-  vtkVector2f* pts = static_cast<vtkVector2f*>(this->Points->GetVoidPointer(0));
-
-  // Initialize our min/max
-  bounds[0] = bounds[2] = std::numeric_limits<float>::max();
-  bounds[1] = bounds[3] = std::numeric_limits<float>::min();
-
-  while (start < nPoints)
+  if (this->LogY)
     {
-    // Calculate the min/max in this range
-    while (start < end)
-      {
-      float x = pts[start].GetX();
-      float y = pts[start++].GetY();
-      if (x < bounds[0])
-        {
-        bounds[0] = x;
-        }
-      else if (x > bounds[1])
-        {
-        bounds[1] = x;
-        }
-      if (y < bounds[2])
-        {
-        bounds[2] = y;
-        }
-      else if (y > bounds[3])
-        {
-        bounds[3] = y;
-        }
-      }
-    // Now figure out the next range
-    start = end + 1;
-    while (i < nBad && start == this->BadPoints->GetValue(i))
-      {
-      start = this->BadPoints->GetValue(i++) + 1;
-      }
-    if (++i < nBad)
-      {
-      end = this->BadPoints->GetValue(i);
-      }
-    else
-      {
-      end = nPoints;
-      }
+    bounds[2] = log10(bounds[2]);
+    bounds[3] = log10(bounds[3]);
     }
 }
 
diff --git a/Charts/Core/vtkPlotPoints.h b/Charts/Core/vtkPlotPoints.h
index fa391d1..0fbb72c 100644
--- a/Charts/Core/vtkPlotPoints.h
+++ b/Charts/Core/vtkPlotPoints.h
@@ -159,10 +159,18 @@ protected:
   ~vtkPlotPoints();
 
   // Description:
+  // Populate the data arrays ready to operate on input data.
+  bool GetDataArrays(vtkTable *table, vtkDataArray *array[2]);
+
+  // Description:
   // Update the table cache.
   bool UpdateTableCache(vtkTable *table);
 
   // Description:
+  // Calculate the unscaled input bounds from the input arrays.
+  void CalculateUnscaledInputBounds();
+
+  // Description:
   // Handle calculating the log of the x or y series if necessary. Should be
   // called by UpdateTableCache once the data has been updated in Points.
   void CalculateLogSeries();
diff --git a/Charts/Core/vtkScalarsToColorsItem.cxx b/Charts/Core/vtkScalarsToColorsItem.cxx
index ece3beb..d6f14ba 100644
--- a/Charts/Core/vtkScalarsToColorsItem.cxx
+++ b/Charts/Core/vtkScalarsToColorsItem.cxx
@@ -50,8 +50,8 @@ vtkScalarsToColorsItem::vtkScalarsToColorsItem()
 
   this->MaskAboveCurve = false;
 
-  this->UserBounds[0] = this->UserBounds[1] = this->UserBounds[2] =
-    this->UserBounds[3] = 0.0;
+  this->UserBounds[0] = this->UserBounds[2] = 0.0;
+  this->UserBounds[1] = this->UserBounds[3] = -1.0;
 }
 
 //-----------------------------------------------------------------------------
diff --git a/Common/Color/Testing/Cxx/CMakeLists.txt b/Common/Color/Testing/Cxx/CMakeLists.txt
index 269c212..2c8a54c 100644
--- a/Common/Color/Testing/Cxx/CMakeLists.txt
+++ b/Common/Color/Testing/Cxx/CMakeLists.txt
@@ -1,36 +1,10 @@
-set(CommonColorTests
+vtk_add_test_cxx(NO_VALID
   TestCategoricalColors.cxx
   TestNamedColors.cxx
-)
-
-if (VTK_DATA_ROOT)
-  set(CommonColorTestsWithArgs
-    TestColorSeries.cxx
   )
-endif()
-
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-  ${CommonColorTests}
-  ${CommonColorTestsWithArgs}
-  EXTRA_INCLUDE vtkTestDriver.h
-)
 
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
+vtk_add_test_cxx(
+  TestColorSeries.cxx
+  )
 
-# Add all the executables
-foreach (test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if (VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-          -D ${VTK_DATA_ROOT}
-          -T ${VTK_TEST_OUTPUT_DIR}
-          -V Baseline/Common/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Common/Color/Testing/Cxx/TestNamedColors.cxx b/Common/Color/Testing/Cxx/TestNamedColors.cxx
index ed1afd7..5011930 100644
--- a/Common/Color/Testing/Cxx/TestNamedColors.cxx
+++ b/Common/Color/Testing/Cxx/TestNamedColors.cxx
@@ -27,7 +27,7 @@
 
 const int NUMBER_OF_SYNONYMS = 81;
 const int NUMBER_OF_COLORS = 283;
-const int PRINT_SELF_STRING_SIZE = 9242;
+const int PRINT_SELF_STRING_SIZE = 9243;
 // For comparing unsigned char converted to double precision.
 const double EPS1 = 0.004; // 1/255 = 0.0039
 // For comparing two double precision numbers.
@@ -90,7 +90,7 @@ bool TestEmptyColorName()
       << "returned an unsigned char color other than black."
       );
     return false;
-  }
+    }
   vtkColor3ub v3 = nc->GetColor3ub(name);
   if ( v3[0] != rr || v3[1] != rg || v3[2] != rb )
     {
@@ -99,7 +99,7 @@ bool TestEmptyColorName()
       << "returned an unsigned char color other than black."
       );
     return false;
-  }
+    }
   unsigned char ur, ug, ub;
   ur = ug = ub = 0;
   unsigned char ua = 0;
@@ -111,7 +111,7 @@ bool TestEmptyColorName()
       << "returned an unsigned char color other than black."
       );
     return false;
-  }
+    }
 
   // Reference color
   double rrd, rgd, rbd;
@@ -125,7 +125,7 @@ bool TestEmptyColorName()
       << "returned a double color other than black."
       );
     return false;
-  }
+    }
   vtkColor3d vd3 = nc->GetColor3d(name);
   if ( vd3[0] != rrd || vd3[1] != rgd || vd3[2] != rbd )
     {
@@ -134,7 +134,7 @@ bool TestEmptyColorName()
       << "returned a double color other than black."
       );
     return false;
-  }
+    }
   double dr, dg, db;
   dr = dg = db = 1;
   double da = 0;
@@ -146,7 +146,7 @@ bool TestEmptyColorName()
       << "returned an double color other than black."
       );
     return false;
-  }
+    }
   nc->GetColor(name,dr,dg,db);
   if ( ur != rrd || dg != rgd || db != rbd )
     {
@@ -155,7 +155,7 @@ bool TestEmptyColorName()
       << "returned an double color other than black."
       );
     return false;
-  }
+    }
   return true;
 }
 
@@ -170,7 +170,7 @@ bool TestNoSuchColor(vtkStdString const & name)
       << name << " exists when it shouldn't."
       );
     return false;
-  }
+    }
   return true;
 }
 
@@ -184,11 +184,11 @@ bool TestUnsignedChar(vtkStdString const & name)
   bool sameElements = true;
   for ( int i = 0; i < 4; ++i )
     {
-      if ( v[i] != cv[i] )
-        {
-        sameElements &= false;
-        break;
-        }
+    if ( v[i] != cv[i] )
+      {
+      sameElements &= false;
+      break;
+      }
     }
   if (!sameElements)
     {
@@ -201,11 +201,11 @@ bool TestUnsignedChar(vtkStdString const & name)
   nc->GetColor(name,v);
   for ( int i = 0; i < 4; ++i )
     {
-      if ( v[i] != cv[i] )
-        {
-        sameElements &= false;
-        break;
-        }
+    if ( v[i] != cv[i] )
+      {
+      sameElements &= false;
+      break;
+      }
     }
   if (!sameElements)
     {
@@ -219,11 +219,11 @@ bool TestUnsignedChar(vtkStdString const & name)
   nc->GetColor(name,vv);
   for ( int i = 0; i < 3; ++i )
     {
-      if ( vv[i] != cv[i] )
-        {
-        sameElements &= false;
-        break;
-        }
+    if ( vv[i] != cv[i] )
+      {
+      sameElements &= false;
+      break;
+      }
     }
   if (!sameElements)
     {
@@ -239,13 +239,13 @@ bool TestUnsignedChar(vtkStdString const & name)
   unsigned char alpha;
   nc->GetColor(name, red, green, blue, alpha);
   if ( red != v[0] && blue != v[1] && green != v[2] && alpha != v[3] )
-  {
+    {
     vtkGenericWarningMacro(
       << "Fail: One of red, green blue or alpha do not match the array "
       << "for color: " << name
       );
     return false;
-  }
+    }
   return true;
 }
 
@@ -259,11 +259,11 @@ bool TestDouble(vtkStdString const & name)
   bool sameElements = true;
   for ( int i = 0; i < 4; ++i )
     {
-      if ( v[i] != cv[i] )
-        {
-        sameElements &= false;
-        break;
-        }
+    if ( v[i] != cv[i] )
+      {
+      sameElements &= false;
+      break;
+      }
     }
   if (!sameElements)
     {
@@ -276,11 +276,11 @@ bool TestDouble(vtkStdString const & name)
   nc->GetColor(name,v);
   for ( int i = 0; i < 4; ++i )
     {
-      if ( v[i] != cv[i] )
-        {
-        sameElements &= false;
-        break;
-        }
+    if ( v[i] != cv[i] )
+      {
+      sameElements &= false;
+      break;
+      }
     }
   if (!sameElements)
     {
@@ -294,11 +294,11 @@ bool TestDouble(vtkStdString const & name)
   nc->GetColor(name,vv);
   for ( int i = 0; i < 3; ++i )
     {
-      if ( vv[i] != cv[i] )
-        {
-        sameElements &= false;
-        break;
-        }
+    if ( vv[i] != cv[i] )
+      {
+      sameElements &= false;
+      break;
+      }
     }
   if (!sameElements)
     {
@@ -314,13 +314,13 @@ bool TestDouble(vtkStdString const & name)
   double alpha;
   nc->GetColor(name, red, green, blue, alpha);
   if ( red != v[0] && blue != v[1] && green != v[2] && alpha != v[3] )
-  {
+    {
     vtkGenericWarningMacro(
       << "Fail: One of red, green blue or alpha do not match the array "
       << "for color: " << name
       );
     return false;
-  }
+    }
   return true;
 }
 
@@ -334,11 +334,11 @@ bool TestDoubleRGB(vtkStdString const & name)
   bool sameElements = true;
   for ( int i = 0; i < 3; ++i )
     {
-      if ( v[i] != cv[i] )
-        {
-        sameElements &= false;
-        break;
-        }
+    if ( v[i] != cv[i] )
+      {
+      sameElements &= false;
+      break;
+      }
     }
   if (!sameElements)
     {
@@ -352,13 +352,13 @@ bool TestDoubleRGB(vtkStdString const & name)
   double blue;
   nc->GetColor(name, red, green, blue);
   if ( red != v[0] && blue != v[1] && green != v[2] )
-  {
+    {
     vtkGenericWarningMacro(
       << "Fail: One of red, green or blue do not match the array "
       << "for color: " << name
       );
     return false;
-  }
+    }
   return true;
 }
 
@@ -376,11 +376,11 @@ bool TestUCharToDouble(vtkStdString const & name)
   bool sameElements = true;
   for ( int i = 0; i < 4; ++i )
     {
-      if ( std::abs(vd[i] -  vdu[i]) > EPS2 )
-        {
-        sameElements &= false;
-        break;
-        }
+    if ( std::abs(vd[i] -  vdu[i]) > EPS2 )
+      {
+      sameElements &= false;
+      break;
+      }
     }
   if (!sameElements)
     {
@@ -405,8 +405,8 @@ bool TestAddingAColor(vtkStdString name, const double dcolor[4],
   vtkColor3d d3;
   for(int i = 0; i < 3; ++i)
     {
-      ub3[i] = ub4[i];
-      d3[i] = d4[i];
+    ub3[i] = ub4[i];
+    d3[i] = d4[i];
     }
 
   // Test for adding empty names.
@@ -436,11 +436,11 @@ bool TestAddingAColor(vtkStdString name, const double dcolor[4],
   bool sameElements = true;
   for ( int i = 0; i < 4; ++i )
     {
-      if ( vu[i] != ucolor[i] )
-        {
-        sameElements &= false;
-        break;
-        }
+    if ( vu[i] != ucolor[i] )
+      {
+      sameElements &= false;
+      break;
+      }
     }
   if (!sameElements)
     {
@@ -590,14 +590,14 @@ bool TestAddingAColor(vtkStdString name, const double dcolor[4],
       }
     }
   if(!sameElements)
-  {
-  vtkGenericWarningMacro(
-    << "Fail: Set as double array get as vtkColor4d, colors do not match "
-    << "for color: " << name
-    );
-  nc->ResetColors();
-  return false;
-  }
+    {
+    vtkGenericWarningMacro(
+      << "Fail: Set as double array get as vtkColor4d, colors do not match "
+      << "for color: " << name
+      );
+    nc->ResetColors();
+    return false;
+    }
 
   nc->SetColor(name,d4);
   vd = nc->GetColor4d(name);
@@ -611,14 +611,14 @@ bool TestAddingAColor(vtkStdString name, const double dcolor[4],
       }
     }
   if(!sameElements)
-  {
-  vtkGenericWarningMacro(
-    << "Fail: Set as vtkColor4d get as vtkColor4d, colors do not match "
-    << "for color: " << name
-    );
-  nc->ResetColors();
-  return false;
-  }
+    {
+    vtkGenericWarningMacro(
+      << "Fail: Set as vtkColor4d get as vtkColor4d, colors do not match "
+      << "for color: " << name
+      );
+    nc->ResetColors();
+    return false;
+    }
 
   nc->SetColor(name,dr,dg,db,da);
   vd = nc->GetColor4d(name);
@@ -632,14 +632,14 @@ bool TestAddingAColor(vtkStdString name, const double dcolor[4],
       }
     }
   if(!sameElements)
-  {
-  vtkGenericWarningMacro(
-    << "Fail: Set as double values get as vtkColor4d, colors do not match "
-    << "for color: " << name
-    );
-  nc->ResetColors();
-  return false;
-  }
+    {
+    vtkGenericWarningMacro(
+      << "Fail: Set as double values get as vtkColor4d, colors do not match "
+      << "for color: " << name
+      );
+    nc->ResetColors();
+    return false;
+    }
 
   nc->SetColor(name,d3);
   vd = nc->GetColor4d(name);
@@ -653,14 +653,14 @@ bool TestAddingAColor(vtkStdString name, const double dcolor[4],
       }
     }
   if(!sameElements)
-  {
-  vtkGenericWarningMacro(
-    << "Fail: Set as vtkColor3d get as vtkColor4d, colors do not match "
-    << "for color: " << name
-    );
-  nc->ResetColors();
-  return false;
-  }
+    {
+    vtkGenericWarningMacro(
+      << "Fail: Set as vtkColor3d get as vtkColor4d, colors do not match "
+      << "for color: " << name
+      );
+    nc->ResetColors();
+    return false;
+    }
 
   nc->RemoveColor(name);
   sz = nc->GetNumberOfColors();
@@ -719,7 +719,7 @@ std::vector<std::vector<vtkStdString> > ParseSynonyms(
     syn.push_back(cn);
     start = end + 2;
     end = synonyms.find(synonymDelimiter,start);
-  }
+    }
   // Get the last set of synonyms.
   if(!synonyms.empty())
     {
@@ -773,14 +773,14 @@ int TestNamedColors(int vtkNotUsed(argc), char* vtkNotUsed(argv)[])
   const int colorsToSkip = 20;
   std::vector<vtkStdString> cn = ParseColorNames(nc->GetColorNames());
   for ( std::vector<vtkStdString>::const_iterator
-        p = cn.begin(); p != cn.end(); ++p )
+          p = cn.begin(); p != cn.end(); ++p )
     {
     counter++;
     // Skip some colors to make testing faster.
     if ( counter % colorsToSkip != 0 )
-    {
+      {
       continue;
-    }
+      }
 
     if ( !TestUnsignedChar(*p) )
       {
@@ -877,7 +877,7 @@ int TestNamedColors(int vtkNotUsed(argc), char* vtkNotUsed(argv)[])
     vtkSmartPointer<vtkStringArray>::New();
   nc->GetColorNames(vs);
   if ( vs->GetNumberOfValues() != NUMBER_OF_COLORS )
-  {
+    {
     vtkGenericWarningMacro(
       << "Fail: GetColorNames(), incorrect number of colors"
       << "found " <<
@@ -885,20 +885,20 @@ int TestNamedColors(int vtkNotUsed(argc), char* vtkNotUsed(argv)[])
       << NUMBER_OF_COLORS << " instead."
       );
     testResult &= false;
-  }
+    }
 
   std::ostringstream os;
   nc->PrintSelf(os,vtkIndent(2));
   //std::cout << os.str() << std::endl;
   if ( static_cast<int>(os.str().size()) != PRINT_SELF_STRING_SIZE )
-  {
+    {
     vtkGenericWarningMacro(
       << "Fail: PrintSelf() - a string of size " <<
       PRINT_SELF_STRING_SIZE << " was expected, got "
       << os.str().size() << " instead."
       );
     testResult &= false;
-  }
+    }
 
   if ( !testResult )
     {
diff --git a/Common/Color/Testing/Data/Baseline/TestColorSeries.png.md5 b/Common/Color/Testing/Data/Baseline/TestColorSeries.png.md5
new file mode 100644
index 0000000..4c26564
--- /dev/null
+++ b/Common/Color/Testing/Data/Baseline/TestColorSeries.png.md5
@@ -0,0 +1 @@
+1725f98fd59b88b55d94bc235348b144
diff --git a/Common/ComputationalGeometry/Testing/Data/Baseline/CSpline.png.md5 b/Common/ComputationalGeometry/Testing/Data/Baseline/CSpline.png.md5
new file mode 100644
index 0000000..06a5b6c
--- /dev/null
+++ b/Common/ComputationalGeometry/Testing/Data/Baseline/CSpline.png.md5
@@ -0,0 +1 @@
+0011734e3413a3496baaa1e6416fe8e4
diff --git a/Common/ComputationalGeometry/Testing/Data/Baseline/KSpline.png.md5 b/Common/ComputationalGeometry/Testing/Data/Baseline/KSpline.png.md5
new file mode 100644
index 0000000..0d20677
--- /dev/null
+++ b/Common/ComputationalGeometry/Testing/Data/Baseline/KSpline.png.md5
@@ -0,0 +1 @@
+d2edee14f362de59db8637fc02236ac3
diff --git a/Common/ComputationalGeometry/Testing/Data/Baseline/TestParametricFunctions.png.md5 b/Common/ComputationalGeometry/Testing/Data/Baseline/TestParametricFunctions.png.md5
new file mode 100644
index 0000000..10f0d88
--- /dev/null
+++ b/Common/ComputationalGeometry/Testing/Data/Baseline/TestParametricFunctions.png.md5
@@ -0,0 +1 @@
+921af8a6c23696111ce4b790d4ed7a88
diff --git a/Common/ComputationalGeometry/Testing/Data/Baseline/TestParametricFunctions_1.png.md5 b/Common/ComputationalGeometry/Testing/Data/Baseline/TestParametricFunctions_1.png.md5
new file mode 100644
index 0000000..2b37658
--- /dev/null
+++ b/Common/ComputationalGeometry/Testing/Data/Baseline/TestParametricFunctions_1.png.md5
@@ -0,0 +1 @@
+6a379064093cb0f639dd0aa85336422a
diff --git a/Common/ComputationalGeometry/Testing/Data/Baseline/TestParametricFunctions_2.png.md5 b/Common/ComputationalGeometry/Testing/Data/Baseline/TestParametricFunctions_2.png.md5
new file mode 100644
index 0000000..c73ae04
--- /dev/null
+++ b/Common/ComputationalGeometry/Testing/Data/Baseline/TestParametricFunctions_2.png.md5
@@ -0,0 +1 @@
+23a261a88f125c0b83a2ccfb83f248c1
diff --git a/Common/ComputationalGeometry/Testing/Data/Baseline/closedSplines.png.md5 b/Common/ComputationalGeometry/Testing/Data/Baseline/closedSplines.png.md5
new file mode 100644
index 0000000..fe84511
--- /dev/null
+++ b/Common/ComputationalGeometry/Testing/Data/Baseline/closedSplines.png.md5
@@ -0,0 +1 @@
+c8f3ba24539f409a6a2936303d9d9751
diff --git a/Common/ComputationalGeometry/Testing/Python/CMakeLists.txt b/Common/ComputationalGeometry/Testing/Python/CMakeLists.txt
index daabef3..8073a46 100644
--- a/Common/ComputationalGeometry/Testing/Python/CMakeLists.txt
+++ b/Common/ComputationalGeometry/Testing/Python/CMakeLists.txt
@@ -1,6 +1,4 @@
-add_test_python(CSpline.py Filtering)
-add_test_python(KSpline.py Filtering)
-add_test_python(closedSplines.py Filtering)
-if (VTK_DATA_ROOT)
- add_test_python1(TestParametricFunctions.py Baseline/Graphics)
-endif()
+vtk_add_test_python(CSpline.py)
+vtk_add_test_python(KSpline.py)
+vtk_add_test_python(TestParametricFunctions.py NO_RT)
+vtk_add_test_python(closedSplines.py)
diff --git a/Common/ComputationalGeometry/Testing/Python/TestParametricFunctions.py b/Common/ComputationalGeometry/Testing/Python/TestParametricFunctions.py
index 6b32b80..7908127 100755
--- a/Common/ComputationalGeometry/Testing/Python/TestParametricFunctions.py
+++ b/Common/ComputationalGeometry/Testing/Python/TestParametricFunctions.py
@@ -1,10 +1,6 @@
 #!/usr/bin/env python
 # -*- coding: utf-8 -*-
 
-# Run this test like so:
-# vtkpython TestParametricFunctions.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Graphics/
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Common/ComputationalGeometry/Testing/Tcl/CMakeLists.txt b/Common/ComputationalGeometry/Testing/Tcl/CMakeLists.txt
index b26aa67..ddf4e27 100644
--- a/Common/ComputationalGeometry/Testing/Tcl/CMakeLists.txt
+++ b/Common/ComputationalGeometry/Testing/Tcl/CMakeLists.txt
@@ -1,6 +1,4 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(TestParametricFunctions Graphics)
-endif()
-add_test_tcl(CSpline Filtering)
-add_test_tcl(KSpline Filtering)
-add_test_tcl(closedSplines Filtering)
+vtk_add_test_tcl(TestParametricFunctions)
+vtk_add_test_tcl(CSpline)
+vtk_add_test_tcl(KSpline)
+vtk_add_test_tcl(closedSplines)
diff --git a/Common/ComputationalGeometry/vtkCardinalSpline.cxx b/Common/ComputationalGeometry/vtkCardinalSpline.cxx
index 85be93f..43b8448 100644
--- a/Common/ComputationalGeometry/vtkCardinalSpline.cxx
+++ b/Common/ComputationalGeometry/vtkCardinalSpline.cxx
@@ -16,7 +16,7 @@
 
 #include "vtkObjectFactory.h"
 #include "vtkPiecewiseFunction.h"
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkCardinalSpline);
 
@@ -101,10 +101,7 @@ void vtkCardinalSpline::Compute ()
   // copy the independent variables. Note that if the spline
   // is closed the first and last point are assumed repeated -
   // so we add and extra point
-  if (this->Intervals)
-    {
-    delete [] this->Intervals;
-    }
+  delete [] this->Intervals;
 
   if ( !this->Closed )
     {
@@ -119,10 +116,7 @@ void vtkCardinalSpline::Compute ()
     work = new double[size];
 
     // allocate memory for coefficients
-    if (this->Coefficients)
-      {
-      delete [] this->Coefficients;
-      }
+    delete [] this->Coefficients;
     this->Coefficients = new double [4*size];
 
     // allocate memory for dependent variables
@@ -166,10 +160,7 @@ void vtkCardinalSpline::Compute ()
     work = new double[size];
 
     // allocate memory for coefficients
-    if (this->Coefficients)
-      {
-      delete [] this->Coefficients;
-      }
+    delete [] this->Coefficients;
     this->Coefficients = new double [4*size];
 
     // allocate memory for dependent variables
diff --git a/Common/ComputationalGeometry/vtkKochanekSpline.cxx b/Common/ComputationalGeometry/vtkKochanekSpline.cxx
index 686a0be..e625896 100644
--- a/Common/ComputationalGeometry/vtkKochanekSpline.cxx
+++ b/Common/ComputationalGeometry/vtkKochanekSpline.cxx
@@ -105,10 +105,7 @@ void vtkKochanekSpline::Compute ()
   if ( !this->Closed )
     {
     // copy the independent variables
-    if (this->Intervals)
-      {
-      delete [] this->Intervals;
-      }
+    delete [] this->Intervals;
     this->Intervals = new double[size];
     ts = this->PiecewiseFunction->GetDataPointer ();
     for (i = 0; i < size; i++)
@@ -117,10 +114,7 @@ void vtkKochanekSpline::Compute ()
       }
 
     // allocate memory for coefficients
-    if (this->Coefficients)
-      {
-      delete [] this->Coefficients;
-      }
+    delete [] this->Coefficients;
     this->Coefficients = new double [4*size];
 
     // allocate memory for dependent variables
@@ -140,10 +134,7 @@ void vtkKochanekSpline::Compute ()
     {
     size = size + 1;
     // copy the independent variables
-    if (this->Intervals)
-      {
-      delete [] this->Intervals;
-      }
+    delete [] this->Intervals;
     this->Intervals = new double[size];
     ts = this->PiecewiseFunction->GetDataPointer ();
     for (i = 0; i < size-1; i++)
@@ -160,10 +151,7 @@ void vtkKochanekSpline::Compute ()
       }
 
     // allocate memory for coefficients
-    if (this->Coefficients)
-      {
-      delete [] this->Coefficients;
-      }
+    delete [] this->Coefficients;
     this->Coefficients = new double [4 * size];
 
     // allocate memory for dependent variables
diff --git a/Common/Core/CMakeLists.txt b/Common/Core/CMakeLists.txt
index a49c7f1..de46b7d 100644
--- a/Common/Core/CMakeLists.txt
+++ b/Common/Core/CMakeLists.txt
@@ -26,6 +26,56 @@ set(vtkCommonCore_EXPORT_OPTIONS
   VTK_ALL_NEW_OBJECT_FACTORY
   )
 
+# Choose which multi-threaded parallelism library to use
+set(VTK_SMP_IMPLEMENTATION_TYPE "Sequential" CACHE STRING
+  "Which multi-threaded parallelism implementation to use. Options are Sequential, Simple, Kaapi or TBB"
+)
+set_property(CACHE VTK_SMP_IMPLEMENTATION_TYPE PROPERTY STRINGS Sequential Simple Kaapi TBB)
+
+if( NOT ("${VTK_SMP_IMPLEMENTATION_TYPE}" STREQUAL "Kaapi" OR
+         "${VTK_SMP_IMPLEMENTATION_TYPE}" STREQUAL "TBB" OR
+         "${VTK_SMP_IMPLEMENTATION_TYPE}" STREQUAL "Simple") )
+  set(VTK_SMP_IMPLEMENTATION_TYPE "Sequential")
+endif()
+
+if ("${VTK_SMP_IMPLEMENTATION_TYPE}" STREQUAL "TBB")
+  find_package(TBB REQUIRED)
+  set(VTK_SMP_IMPLEMENTATION_LIBRARIES ${TBB_LIBRARY})
+  set(VTK_SMP_ATOMIC_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/SMP/TBB")
+  include_directories(${TBB_INCLUDE_DIRS})
+  # This needs to be here because all modules that include vtkAtomic.h
+  # need to include tbb/atomic.h
+  list(APPEND vtkCommonCore_SYSTEM_INCLUDE_DIRS ${TBB_INCLUDE_DIRS})
+elseif ("${VTK_SMP_IMPLEMENTATION_TYPE}" STREQUAL "Kaapi")
+  find_package(XKaapi REQUIRED)
+  include_directories(${XKAAPI_INCLUDE_DIRS})
+  set(VTK_SMP_IMPLEMENTATION_LIBRARIES ${XKAAPI_LIBRARIES})
+  set(VTK_SMP_ATOMIC_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/SMP/Sequential")
+  list(APPEND vtkCommonCore_SYSTEM_INCLUDE_DIRS ${XKAAPI_INCLUDE_DIRS})
+elseif ("${VTK_SMP_IMPLEMENTATION_TYPE}" STREQUAL "Simple")
+  set(VTK_SMP_IMPLEMENTATION_LIBRARIES)
+  set(VTK_SMP_ATOMIC_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/SMP/Sequential")
+  message(WARNING "The Simple backend for SMP operations is an experimental backend that is mainly used for debugging currently. We recommend that you use either the TBB or the Kaapi backend for production work. Use the Sequential backend if you would like to turn off any SMP parallelism.")
+elseif ("${VTK_SMP_IMPLEMENTATION_TYPE}" STREQUAL "Sequential")
+  set(VTK_SMP_IMPLEMENTATION_LIBRARIES)
+  set(VTK_SMP_ATOMIC_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}/SMP/Sequential")
+endif()
+
+if(EXISTS ${VTK_SMP_ATOMIC_DIRECTORY}/vtkAtomicInt.cxx)
+  set(VTK_ATOMIC_CXX_FILE ${VTK_SMP_ATOMIC_DIRECTORY}/vtkAtomicInt.cxx)
+else()
+  set(VTK_ATOMIC_CXX_FILE "")
+endif()
+
+configure_file(${VTK_SMP_ATOMIC_DIRECTORY}/vtkAtomicInt.h.in
+  ${CMAKE_CURRENT_BINARY_DIR}/vtkAtomicInt.h COPY_ONLY)
+
+configure_file(SMP/${VTK_SMP_IMPLEMENTATION_TYPE}/vtkSMPThreadLocal.h.in
+  ${CMAKE_CURRENT_BINARY_DIR}/vtkSMPThreadLocal.h COPY_ONLY)
+
+configure_file(SMP/${VTK_SMP_IMPLEMENTATION_TYPE}/vtkSMPToolsInternal.h.in
+  ${CMAKE_CURRENT_BINARY_DIR}/vtkSMPToolsInternal.h COPY_ONLY)
+
 #-----------------------------------------------------------------------------
 
 include_directories(${VTK_SOURCE_DIR}/ThirdParty/utf8/source)
@@ -101,6 +151,7 @@ SET(Module_SRCS
   vtkLargeInteger.cxx
   vtkLongArray.cxx
   vtkLookupTable.cxx
+  vtkMappedDataArray.txx
   vtkMath.cxx
   vtkMinimalStandardRandomSequence.cxx
   vtkMultiThreader.cxx
@@ -123,12 +174,14 @@ SET(Module_SRCS
   vtkScalarsToColors.cxx
   vtkShortArray.cxx
   vtkSignedCharArray.cxx
+  vtkSimpleCriticalSection.cxx
   vtkSmartPointerBase.cxx
   vtkSortDataArray.cxx
   vtkStdString.cxx
   vtkStringArray.cxx
   vtkTimePointUtility.cxx
   vtkTimeStamp.cxx
+  vtkTypedDataArray.txx
   vtkUnicodeStringArray.cxx
   vtkUnicodeString.cxx
   vtkUnsignedCharArray.cxx
@@ -149,6 +202,13 @@ SET(Module_SRCS
   vtkTypeTemplate.h
   vtkType.h
   vtkSystemIncludes.h
+  ${VTK_ATOMIC_CXX_FILE}
+  vtkSMPThreadLocalObject.h
+  vtkSMPTools.h
+  SMP/${VTK_SMP_IMPLEMENTATION_TYPE}/vtkSMPTools.cxx
+  ${CMAKE_CURRENT_BINARY_DIR}/vtkSMPToolsInternal.h
+  ${CMAKE_CURRENT_BINARY_DIR}/vtkSMPThreadLocal.h
+  ${CMAKE_CURRENT_BINARY_DIR}/vtkAtomicInt.h
   ${CMAKE_CURRENT_BINARY_DIR}/vtkConfigure.h
   ${CMAKE_CURRENT_BINARY_DIR}/vtkMathConfigure.h
   ${CMAKE_CURRENT_BINARY_DIR}/vtkVersionMacros.h
@@ -165,15 +225,19 @@ set(${vtk-module}_HDRS
   vtkArrayPrint.h
   vtkArrayPrint.txx
   vtkAutoInit.h
+  vtkDataArrayIteratorMacro.h
   vtkDataArrayTemplateImplicit.txx
   vtkIOStreamFwd.h
   vtkInformationInternals.h
+  vtkMappedDataArray.h
   vtkMathUtilities.h
   vtkNew.h
   vtkSetGet.h
   vtkSmartPointer.h
   vtkTemplateAliasMacro.h
   vtkTypeTraits.h
+  vtkTypedDataArray.h
+  vtkTypedDataArrayIterator.h
   vtkVariantCast.h
   vtkVariantCreate.h
   vtkVariantExtract.h
@@ -235,6 +299,13 @@ else()
   set(VTK_CONFIG_LEGACY_VERSION "#include \"vtkVersionMacros.h\" // removed by VTK_LEGACY_REMOVE")
 endif()
 
+# Check for atomic functions
+include(CheckSymbolExists)
+if (WIN32)
+  check_symbol_exists(InterlockedAdd "windows.h" VTK_HAS_INTERLOCKEDADD)
+endif()
+
+
 configure_file(vtkVersionMacros.h.in vtkVersionMacros.h @ONLY)
 configure_file(vtkConfigure.h.in vtkConfigure.h @ONLY)
 configure_file(vtkToolkits.h.in vtkToolkits.h @ONLY)
@@ -302,22 +373,22 @@ foreach(t Int8 Int16 Int32 Int64 UInt8 UInt16 UInt32 UInt64 Float32 Float64)
   if(VTK_TYPE_NATIVE)
     configure_file(${CMAKE_CURRENT_SOURCE_DIR}/vtkTypedArray.h.in
                    ${CMAKE_CURRENT_BINARY_DIR}/vtkType${t}Array.h
-                   @ONLY IMMEDIATE)
+                   @ONLY)
     configure_file(${CMAKE_CURRENT_SOURCE_DIR}/vtkTypedArray.cxx.in
                    ${CMAKE_CURRENT_BINARY_DIR}/vtkType${t}Array.cxx
-                   @ONLY IMMEDIATE)
+                   @ONLY)
     set(Module_SRCS ${Module_SRCS} ${CMAKE_CURRENT_BINARY_DIR}/vtkType${t}Array.cxx)
   endif()
 endforeach()
 
 # look for various headers and functions
-include(CheckSymbolExists)
+include(CheckCXXExpressionCompiles)
 include(CheckIncludeFile)
 
 # Check C++ <cmath> first, where the C++11 standard says these must be.
-check_symbol_exists(std::isnan "cmath" VTK_HAS_STD_ISNAN)
-check_symbol_exists(std::isinf "cmath" VTK_HAS_STD_ISINF)
-check_symbol_exists(std::isfinite "cmath" VTK_HAS_STD_ISFINITE)
+check_cxx_expression_compiles("std::isnan(0.0)" "cmath" VTK_HAS_STD_ISNAN)
+check_cxx_expression_compiles("std::isinf(0.0)" "cmath" VTK_HAS_STD_ISINF)
+check_cxx_expression_compiles("std::isfinite(0.0)" "cmath" VTK_HAS_STD_ISFINITE)
 
 # Check C99 <math.h> next, where the C99 standard says these must be.
 # (they will be found even if they are defined as macros)
@@ -421,6 +492,7 @@ set_source_files_properties(
   vtkOStrStreamWrapper.cxx
   vtkOStreamWrapper.cxx
   vtkOldStyleCallbackCommand.cxx
+  vtkSimpleCriticalSection.cxx
   vtkSmartPointerBase.cxx
   vtkStdString.cxx
   vtkTimeStamp.cxx
@@ -445,6 +517,7 @@ set_source_files_properties(
   vtkIOStreamFwd.h
   vtkInformationInternals.h
   vtkMathUtilities.h
+  vtkMappedDataArray.txx
   vtkNew.h
   vtkSetGet.h
   vtkSmartPointer.h
@@ -452,6 +525,7 @@ set_source_files_properties(
   vtkTemplateAliasMacro.h
   vtkTypeTraits.h
   vtkTypedArray.txx
+  vtkTypedDataArray.txx
   vtkVariantCast.h
   vtkVariantCreate.h
   vtkVariantExtract.h
@@ -466,6 +540,13 @@ set_source_files_properties(
   vtkMathConfigure.h
   vtkVersionMacros.h
   vtkTypeTemplate.h
+  vtkSMPThreadLocalObject.h
+  vtkSMPTools.h
+  SMP/${VTK_SMP_IMPLEMENTATION_TYPE}/vtkSMPTools.cxx
+  ${CMAKE_CURRENT_BINARY_DIR}/vtkSMPToolsInternal.h
+  ${CMAKE_CURRENT_BINARY_DIR}/vtkSMPThreadLocal.h
+  ${CMAKE_CURRENT_BINARY_DIR}/vtkAtomicInt.h
+  ${VTK_ATOMIC_CXX_FILE}
 
   WRAP_EXCLUDE
   )
@@ -512,5 +593,4 @@ endif()
 
 vtk_module_library(vtkCommonCore ${Module_SRCS})
 
-target_link_libraries(vtkCommonCore ${CMAKE_THREAD_LIBS_INIT})
-set_target_properties(vtkCommonCore PROPERTIES LINK_INTERFACE_LIBRARIES "")
+target_link_libraries(vtkCommonCore LINK_PRIVATE ${CMAKE_THREAD_LIBS_INIT} LINK_PUBLIC ${VTK_SMP_IMPLEMENTATION_LIBRARIES})
diff --git a/Common/Core/SMP/Kaapi/vtkSMPThreadLocal.h.in b/Common/Core/SMP/Kaapi/vtkSMPThreadLocal.h.in
new file mode 100644
index 0000000..58b47a2
--- /dev/null
+++ b/Common/Core/SMP/Kaapi/vtkSMPThreadLocal.h.in
@@ -0,0 +1,239 @@
+ /*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPThreadLocal.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkSMPThreadLocal - A Kaapi based thread local storage implementation.
+// .SECTION Description
+// A thread local object is one that maintains a copy of an object of the
+// template type for each thread that processes data. vtkSMPThreadLocal
+// creates storage for all threads but the actual objects are created
+// the first time Local() is called. Note that some of the vtkSMPThreadLocal
+// API is not thread safe. It can be safely used in a multi-threaded
+// environment because Local() returns storage specific to a particular
+// thread, which by default will be accessed sequentially. It is also
+// thread-safe to iterate over vtkSMPThreadLocal as long as each thread
+// creates its own iterator and does not change any of the thread local
+// objects.
+//
+// A common design pattern in using a thread local storage object is to
+// write/accumulate data to local object when executing in parallel and
+// then having a sequential code block that iterates over the whole storage
+// using the iterators to do the final accumulation.
+//
+// .SECTION Warning
+// There is absolutely no guarantee to the order in which the local objects
+// will be stored and hence the order in which they will be traversed when
+// using iterators. You should not even assume that two vtkSMPThreadLocal
+// populated in the same parallel section will be populated in the same
+// order. For example, consider the following
+// \verbatim
+// vtkSMPThreadLocal<int> Foo;
+// vtkSMPThreadLocal<int> Bar;
+// class AFunctor
+// {
+//    void Initialize() const
+//    {
+//        int& foo = Foo.Local();
+//        int& bar = Bar.Local();
+//        foo = random();
+//        bar = foo;
+//    }
+//
+//    void operator()(vtkIdType, vtkIdType) const
+//    {}
+// };
+//
+// AFunctor functor;
+// vtkParalllelUtilities::For(0, 100000, functor);
+//
+// vtkSMPThreadLocal<int>::iterator itr1 = Foo.begin();
+// vtkSMPThreadLocal<int>::iterator itr2 = Bar.begin();
+// while (itr1 != Foo.end())
+// {
+//   assert(*itr1 == *itr2);
+//   ++itr1; ++itr2;
+// }
+// \endverbatim
+//
+// It is possible and likely that the assert() will fail using the TBB
+// backend. So if you need to store values related to each other and
+// iterate over them together, use a struct or class to group them together
+// and use a thread local of that class.
+
+#ifndef __vtkSMPThreadLocal_h
+#define __vtkSMPThreadLocal_h
+
+#include "vtkSystemIncludes.h"
+#include <vector>
+
+#include <kaapic.h>
+
+VTKCOMMONCORE_EXPORT void vtkSMPToolsInitialize();
+
+template <typename T>
+class vtkSMPThreadLocal
+{
+  typedef std::vector<T> TLS;
+  typedef typename TLS::iterator TLSIter;
+public:
+  // Description:
+  // Default constructor. Creates a default exemplar.
+  vtkSMPThreadLocal() : Exemplar()
+    {
+      this->Initialize();
+    }
+
+  // Description:
+  // Constructor that allows the specification of an exemplar object
+  // which is used when constructing objects when Local() is first called.
+  // Note that a copy of the exemplar is created using its copy constructor.
+  vtkSMPThreadLocal(const T& exemplar)
+    {
+      this->Exemplar = exemplar;
+      this->Initialize();
+    }
+
+  // Description:
+  // Returns an object of type T that is local to the current thread.
+  // This needs to be called mainly within a threaded execution path.
+  // It will create a new object (local to the tread so each thread
+  // get their own when calling Local) which is a copy of exemplar as passed
+  // to the constructor (or a default object if no exemplar was provided)
+  // the first time it is called. After the first time, it will return
+  // the same object.
+  T& Local()
+    {
+      int tid = this->GetThreadID();
+      if (!this->Initialized[tid])
+        {
+        this->Internal[tid] = this->Exemplar;
+        this->Initialized[tid] = true;
+        }
+      return this->Internal[tid];
+    }
+
+  // Description:
+  // Subset of the standard iterator API.
+  // The most common design patter is to use iterators in a sequential
+  // code block and to use only the thread local objects in parallel
+  // code blocks.
+  // It is thread safe to iterate over the thread local containers
+  // as long as each thread uses its own iterator and does not modify
+  // objects in the container.
+  class iterator
+  {
+  public:
+    iterator& operator++()
+      {
+        this->InitIter++;
+        this->Iter++;
+
+        // Make sure to skip uninitialized
+        // entries.
+        while(this->InitIter != this->EndIter)
+          {
+          if (*this->InitIter)
+            {
+            break;
+            }
+          this->InitIter++;
+          this->Iter++;
+          }
+        return *this;
+      }
+
+    bool operator!=(const iterator& other)
+      {
+        return this->Iter != other.Iter;
+      }
+
+    T& operator*()
+      {
+        return *this->Iter;
+      }
+
+  private:
+    friend class vtkSMPThreadLocal<T>;
+    std::vector<unsigned char>::iterator InitIter;
+    std::vector<unsigned char>::iterator EndIter;
+    TLSIter Iter;
+  };
+
+  // Description:
+  // Returns a new iterator pointing to the beginning of
+  // the local storage container. Thread safe.
+  iterator begin()
+    {
+      TLSIter iter = this->Internal.begin();
+      std::vector<unsigned char>::iterator iter2 =
+        this->Initialized.begin();
+      std::vector<unsigned char>::iterator enditer =
+        this->Initialized.end();
+      // fast forward to first initialized
+      // value
+      while(iter2 != enditer)
+        {
+        if (*iter2)
+          {
+          break;
+          }
+        iter2++;
+        iter++;
+        }
+      iterator retVal;
+      retVal.InitIter = iter2;
+      retVal.EndIter = enditer;
+      retVal.Iter = iter;
+      return retVal;
+    };
+
+  // Description:
+  // Returns a new iterator pointing to past the end of
+  // the local storage container. Thread safe.
+  iterator end()
+    {
+      iterator retVal;
+      retVal.InitIter = this->Initialized.end();
+      retVal.EndIter = this->Initialized.end();
+      retVal.Iter = this->Internal.end();
+      return retVal;
+    }
+
+private:
+  TLS Internal;
+  std::vector<unsigned char> Initialized;
+  T Exemplar;
+
+  void Initialize()
+    {
+      vtkSMPToolsInitialize();
+
+      this->Internal.resize(this->GetNumberOfThreads());
+      this->Initialized.resize(this->GetNumberOfThreads());
+      std::fill(this->Initialized.begin(),
+                this->Initialized.end(),
+                false);
+    }
+
+  inline int GetNumberOfThreads()
+    {
+      return kaapi_getconcurrency();
+    }
+
+  inline int GetThreadID()
+    {
+      return kaapi_get_self_kid();
+    }
+};
+#endif
+// VTK-HeaderTest-Exclude: vtkSMPThreadLocal.h
diff --git a/Common/Core/SMP/Kaapi/vtkSMPTools.cxx b/Common/Core/SMP/Kaapi/vtkSMPTools.cxx
new file mode 100644
index 0000000..245e356
--- /dev/null
+++ b/Common/Core/SMP/Kaapi/vtkSMPTools.cxx
@@ -0,0 +1,53 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPTools.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkSMPTools.h"
+
+#include "vtkCriticalSection.h"
+
+#include <kaapic.h>
+
+VTKCOMMONCORE_EXPORT void vtkSMPToolsInitialize()
+{
+  vtkSMPTools::Initialize(0);
+}
+
+struct vtkSMPToolsInit
+{
+  vtkSMPToolsInit()
+    {
+      kaapic_init(KAAPIC_START_ONLY_MAIN);
+    }
+
+  ~vtkSMPToolsInit()
+    {
+      kaapic_finalize();
+    }
+};
+
+static bool vtkSMPToolsInitialized = 0;
+static vtkSimpleCriticalSection vtkSMPToolsCS;
+
+//--------------------------------------------------------------------------------
+void vtkSMPTools::Initialize(int)
+{
+  vtkSMPToolsCS.Lock();
+  if (!vtkSMPToolsInitialized)
+    {
+    static vtkSMPToolsInit aInit;
+    vtkSMPToolsInitialized = true;
+    }
+  vtkSMPToolsCS.Unlock();
+}
diff --git a/Common/Core/SMP/Kaapi/vtkSMPToolsInternal.h.in b/Common/Core/SMP/Kaapi/vtkSMPToolsInternal.h.in
new file mode 100644
index 0000000..b4a15e5
--- /dev/null
+++ b/Common/Core/SMP/Kaapi/vtkSMPToolsInternal.h.in
@@ -0,0 +1,56 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPToolsInternal.h.in
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include <kaapic.h>
+
+VTKCOMMONCORE_EXPORT void vtkSMPToolsInitialize();
+
+namespace vtk
+{
+namespace detail
+{
+namespace smp
+{
+template <typename T>
+inline void vtkSMPToolsDoFor(int32_t b, int32_t e, int32_t, T* o )
+{
+  o->Execute(b, e);
+}
+
+template <typename FunctorInternal>
+static void vtkSMPTools_Impl_For(
+  vtkIdType first, vtkIdType last, vtkIdType grain,
+  FunctorInternal& fi)
+{
+  vtkSMPToolsInitialize();
+
+  vtkIdType n = last - first;
+  if (!n)
+    {
+    return;
+    }
+
+  vtkIdType g = grain ? grain : sqrt(n);
+
+  kaapic_begin_parallel(KAAPIC_FLAG_DEFAULT);
+  kaapic_foreach_attr_t attr;
+  kaapic_foreach_attr_init(&attr);
+  kaapic_foreach_attr_set_grains(&attr, g, g);
+  kaapic_foreach( first, last, &attr, 1, vtkSMPToolsDoFor<FunctorInternal>, &fi );
+  kaapic_end_parallel(KAAPIC_FLAG_DEFAULT);
+  kaapic_foreach_attr_destroy(&attr);
+}
+}
+}
+}
diff --git a/Common/Core/SMP/Sequential/vtkAtomicInt.cxx b/Common/Core/SMP/Sequential/vtkAtomicInt.cxx
new file mode 100644
index 0000000..da835c6
--- /dev/null
+++ b/Common/Core/SMP/Sequential/vtkAtomicInt.cxx
@@ -0,0 +1,255 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkAtomicInt.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkAtomicInt.h"
+
+#if !defined(VTK_HAS_ATOMIC64) || !defined(VTK_HAS_ATOMIC32)
+# include "vtkSimpleCriticalSection.h"
+#elif defined(VTK_WINDOWS_ATOMIC)
+# include "vtkWindows.h"
+#endif
+
+namespace detail
+{
+
+#if !defined(VTK_HAS_ATOMIC32)
+vtkAtomicIntImpl<vtkTypeInt32>::vtkAtomicIntImpl()
+{
+  this->AtomicInt32CritSec = new vtkSimpleCriticalSection;
+}
+
+vtkAtomicIntImpl<vtkTypeInt32>::~vtkAtomicIntImpl()
+{
+  delete this->AtomicInt32CritSec;
+}
+
+vtkTypeInt32 vtkAtomicIntImpl<vtkTypeInt32>::operator++()
+{
+  if (!this->AtomicInt32CritSec)
+    {
+    return 0;
+    }
+  vtkTypeInt32 val;
+  this->AtomicInt32CritSec->Lock();
+  val = ++this->Value;
+  this->AtomicInt32CritSec->Unlock();
+  return val;
+}
+
+vtkTypeInt32 vtkAtomicIntImpl<vtkTypeInt32>::operator--()
+{
+  if (!this->AtomicInt32CritSec)
+    {
+    return 0;
+    }
+  vtkTypeInt32 val;
+  this->AtomicInt32CritSec->Lock();
+  val = --this->Value;
+  this->AtomicInt32CritSec->Unlock();
+  return val;
+}
+
+vtkTypeInt32 vtkAtomicIntImpl<vtkTypeInt32>::operator+=(vtkTypeInt32 val)
+{
+  if (!this->AtomicInt32CritSec)
+    {
+    return 0;
+    }
+  vtkTypeInt32 val2;
+  this->AtomicInt32CritSec->Lock();
+  val2 = (this->Value += val);
+  this->AtomicInt32CritSec->Unlock();
+  return val2;
+}
+
+vtkTypeInt32 vtkAtomicIntImpl<vtkTypeInt32>::load() const
+{
+  if (!this->AtomicInt32CritSec)
+    {
+    return 0;
+    }
+  vtkTypeInt32 val;
+  this->AtomicInt32CritSec->Lock();
+  val = this->Value;
+  this->AtomicInt32CritSec->Unlock();
+  return val;
+}
+
+void vtkAtomicIntImpl<vtkTypeInt32>::store(vtkTypeInt32 val)
+{
+  if (!this->AtomicInt32CritSec)
+    {
+    return;
+    }
+  this->AtomicInt32CritSec->Lock();
+  this->Value = val;
+  this->AtomicInt32CritSec->Unlock();
+}
+#elif defined (VTK_WINDOWS_ATOMIC)
+vtkAtomicIntImpl<vtkTypeInt32>::vtkAtomicIntImpl()
+{
+}
+
+vtkAtomicIntImpl<vtkTypeInt32>::~vtkAtomicIntImpl()
+{
+}
+
+vtkTypeInt32 vtkAtomicIntImpl<vtkTypeInt32>::operator++()
+{
+  return InterlockedIncrement((long*)&this->Value);
+}
+
+vtkTypeInt32 vtkAtomicIntImpl<vtkTypeInt32>::operator--()
+{
+  return InterlockedDecrement((long*)&this->Value);
+}
+
+vtkTypeInt32 vtkAtomicIntImpl<vtkTypeInt32>::operator+=(vtkTypeInt32 val)
+{
+# if defined(VTK_HAS_INTERLOCKEDADD)
+  return InterlockedAdd((long*)&this->Value, val);
+# else
+  return InterlockedExchangeAdd((long*)&this->Value, val) + val;
+# endif
+}
+
+vtkTypeInt32 vtkAtomicIntImpl<vtkTypeInt32>::load() const
+{
+  long retval;
+  InterlockedExchange(&retval, this->Value);
+  return retval;
+}
+
+void vtkAtomicIntImpl<vtkTypeInt32>::store(vtkTypeInt32 val)
+{
+  InterlockedExchange((long*)&this->Value, val);
+}
+#endif // !defined(VTK_HAS_ATOMIC32)
+
+#if !defined(VTK_HAS_ATOMIC64)
+vtkAtomicIntImpl<vtkTypeInt64>::vtkAtomicIntImpl()
+{
+  this->AtomicInt64CritSec = new vtkSimpleCriticalSection;
+}
+
+vtkAtomicIntImpl<vtkTypeInt64>::~vtkAtomicIntImpl()
+{
+  delete this->AtomicInt64CritSec;
+}
+
+vtkTypeInt64 vtkAtomicIntImpl<vtkTypeInt64>::operator++()
+{
+  if (!this->AtomicInt64CritSec)
+    {
+    return 0;
+    }
+  vtkTypeInt64 val;
+  this->AtomicInt64CritSec->Lock();
+  val = ++this->Value;
+  this->AtomicInt64CritSec->Unlock();
+  return val;
+}
+
+
+vtkTypeInt64 vtkAtomicIntImpl<vtkTypeInt64>::operator--()
+{
+  if (!this->AtomicInt64CritSec)
+    {
+    return 0;
+    }
+  vtkTypeInt64 val;
+  this->AtomicInt64CritSec->Lock();
+  val = --this->Value;
+  this->AtomicInt64CritSec->Unlock();
+  return val;
+}
+
+vtkTypeInt64 vtkAtomicIntImpl<vtkTypeInt64>::operator+=(vtkTypeInt64 val)
+{
+  if (!this->AtomicInt64CritSec)
+    {
+    return 0;
+    }
+  vtkTypeInt64 val2;
+  this->AtomicInt64CritSec->Lock();
+  val2 = (this->Value += val);
+  this->AtomicInt64CritSec->Unlock();
+  return val2;
+}
+
+vtkTypeInt64 vtkAtomicIntImpl<vtkTypeInt64>::load() const
+{
+  if (!this->AtomicInt64CritSec)
+    {
+    return 0;
+    }
+  vtkTypeInt64 val;
+  this->AtomicInt64CritSec->Lock();
+  val = this->Value;
+  this->AtomicInt64CritSec->Unlock();
+  return val;
+}
+
+void vtkAtomicIntImpl<vtkTypeInt64>::store(vtkTypeInt64 val)
+{
+  if (!this->AtomicInt64CritSec)
+    {
+    return;
+    }
+  this->AtomicInt64CritSec->Lock();
+  this->Value = val;
+  this->AtomicInt64CritSec->Unlock();
+}
+#elif defined (VTK_WINDOWS_ATOMIC)
+vtkAtomicIntImpl<vtkTypeInt64>::vtkAtomicIntImpl()
+{
+}
+
+vtkAtomicIntImpl<vtkTypeInt64>::~vtkAtomicIntImpl()
+{
+}
+
+vtkTypeInt64 vtkAtomicIntImpl<vtkTypeInt64>::operator++()
+{
+  return InterlockedIncrement64(&this->Value);
+}
+
+
+vtkTypeInt64 vtkAtomicIntImpl<vtkTypeInt64>::operator--()
+{
+  return InterlockedDecrement64(&this->Value);
+}
+
+vtkTypeInt64 vtkAtomicIntImpl<vtkTypeInt64>::operator+=(vtkTypeInt64 val)
+{
+# if defined(VTK_HAS_INTERLOCKEDADD)
+  return InterlockedAdd64(&this->Value, val);
+# else
+  return InterlockedExchangeAdd64(&this->Value, val) + val;
+# endif
+}
+
+vtkTypeInt64 vtkAtomicIntImpl<vtkTypeInt64>::load() const
+{
+  vtkTypeInt64 retval;
+  InterlockedExchange64(&retval, this->Value);
+  return retval;
+}
+
+void vtkAtomicIntImpl<vtkTypeInt64>::store(vtkTypeInt64 val)
+{
+  InterlockedExchange64(&this->Value, val);
+}
+#endif // !defined(VTK_HAS_ATOMIC64)
+}
diff --git a/Common/Core/SMP/Sequential/vtkAtomicInt.h.in b/Common/Core/SMP/Sequential/vtkAtomicInt.h.in
new file mode 100644
index 0000000..362cdd5
--- /dev/null
+++ b/Common/Core/SMP/Sequential/vtkAtomicInt.h.in
@@ -0,0 +1,379 @@
+ /*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkAtomicInt.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkAtomicInt - Provides support for atomic integers
+// .SECTION Description
+// Objects of atomic types are C++ objects that are free from data races;
+// that is, if one thread writes to an atomic object while another thread
+// reads from it, the behavior is well-defined. vtkAtomicInt provides
+// a subset of the std::atomic API and implementation, mainly for 32 bit
+// and 64 bit signed integers. For these types, vtkAtomicInt defines a
+// number of operations that happen atomically - without interruption
+// by another thread. Furthermore, these operations happen in a
+// sequentially-consistent way and use full memory fences. This means
+// that operations relating to atomic variables happen in the specified
+// order and the results are made visible to other processing cores to
+// guarantee proper sequential operation. Other memory access patterns
+// supported by std::atomic are not currently supported.
+//
+// Note that when atomic operations are not available on a particular
+// platform or compiler, mutexes, which are significantly slower, are used
+// as a fallback.
+
+#ifndef __vtkAtomicInt_h
+#define __vtkAtomicInt_h
+
+#include "vtkCommonCoreModule.h" // For export macro
+#include "vtkSystemIncludes.h"
+#include "vtkConfigure.h"
+
+#if defined(__APPLE__)
+# include <libkern/OSAtomic.h>
+#endif
+
+#if (defined(_WIN32)  && !defined(__MINGW32__))
+# define VTK_WINDOWS_ATOMIC
+# if defined(_MSC_VER)
+#  pragma warning(disable:4324) // disable warning about the padding due to alignment
+# endif
+#endif
+
+#if defined(VTK_WINDOWS_ATOMIC) || defined(__APPLE__) || defined(VTK_HAVE_SYNC_BUILTINS)
+# define VTK_HAS_ATOMIC32
+#endif
+
+// Overall, we assume that 64 bit atomic operations are not available on 32
+// bit systems.
+#if VTK_SIZEOF_VOID_P == 8
+# if defined(VTK_WINDOWS_ATOMIC) || defined(__APPLE__) || defined(VTK_HAVE_SYNC_BUILTINS)
+#  define VTK_HAS_ATOMIC64
+# endif
+#endif
+
+#if !defined(VTK_HAS_ATOMIC64) || !defined(VTK_HAS_ATOMIC32)
+class vtkSimpleCriticalSection;
+#endif
+
+// Below are the actual implementations of 32 and 64 bit atomic operations.
+namespace detail
+{
+#if defined (VTK_WINDOWS_ATOMIC)
+# define VTK__ALIGN32 __declspec(align(32))
+#else
+# define VTK__ALIGN32
+#endif
+
+template <typename T> class vtkAtomicIntImpl;
+
+template <>
+#if defined(VTK_HAS_ATOMIC32) && !defined(VTK_WINDOWS_ATOMIC)
+class vtkAtomicIntImpl<vtkTypeInt32>
+#else
+class VTKCOMMONCORE_EXPORT vtkAtomicIntImpl<vtkTypeInt32>
+#endif
+{
+public:
+
+  // Description:
+  // Atomic pre-increment.
+#if defined(VTK_HAS_ATOMIC32) && !defined(VTK_WINDOWS_ATOMIC)
+  vtkTypeInt32 operator++()
+    {
+# if defined(__APPLE__)
+    return OSAtomicIncrement32Barrier(&this->Value);
+
+// GCC, CLANG, etc
+# elif defined(VTK_HAVE_SYNC_BUILTINS)
+    return __sync_add_and_fetch(&this->Value, 1);
+
+# endif
+    }
+
+  // Description:
+  // Atomic pre-decrement.
+  vtkTypeInt32 operator--()
+    {
+# if defined(__APPLE__)
+    return OSAtomicDecrement32Barrier(&this->Value);
+
+// GCC, CLANG, etc
+# elif defined(VTK_HAVE_SYNC_BUILTINS)
+    return __sync_sub_and_fetch(&this->Value, 1);
+
+# endif
+    }
+
+  // Description:
+  // Atomic add. Returns value after addition.
+  vtkTypeInt32 operator+=(vtkTypeInt32 val)
+    {
+# if defined(__APPLE__)
+    return OSAtomicAdd32Barrier(val, &this->Value);
+
+// GCC, CLANG, etc
+# elif defined(VTK_HAVE_SYNC_BUILTINS)
+    return __sync_add_and_fetch(&this->Value, val);
+
+# endif
+    }
+
+  // Description:
+  // Atomic fetch.
+  vtkTypeInt32 load() const
+    {
+# if defined(__APPLE__)
+    vtkTypeInt32 retval = 0;
+    OSAtomicCompareAndSwap32Barrier(retval, this->Value, &retval);
+    return retval;
+
+// GCC, CLANG, etc
+# elif defined(VTK_HAVE_SYNC_BUILTINS)
+    vtkTypeInt32 retval = 0;
+    __sync_val_compare_and_swap(&retval, retval, this->Value);
+    return retval;
+
+# endif
+    }
+
+  // Description:
+  // Atomic save.
+  void store(vtkTypeInt32 val)
+    {
+# if defined(__APPLE__)
+    OSAtomicCompareAndSwap32Barrier(this->Value, val, &this->Value);
+
+// GCC, CLANG, etc
+# elif defined(VTK_HAVE_SYNC_BUILTINS)
+  __sync_val_compare_and_swap(&this->Value, this->Value, val);
+
+#endif
+    }
+
+#else // defined(VTK_HAS_ATOMIC32) && !defined(VTK_WINDOWS_ATOMIC)
+
+  // These methods are for when using a mutex. Same as above.
+  // A virtual descructor is used becase the mutex is constructed
+  // with new to avoid including windows header in the .h file.
+  vtkAtomicIntImpl<vtkTypeInt32>();
+  virtual ~vtkAtomicIntImpl<vtkTypeInt32>();
+  vtkTypeInt32 operator++();
+  vtkTypeInt32 operator--();
+  vtkTypeInt32 operator+=(vtkTypeInt32 val);
+  vtkTypeInt32 load() const;
+  void store(vtkTypeInt32 val);
+
+#endif // defined(VTK_HAS_ATOMIC32) && !defined(VTK_WINDOWS_ATOMIC)
+
+protected:
+  // Explicitely aligning Value on Windows is probably not necessary
+  // since the compiler should automatically do it. Just being extra
+  // cautious since the InterlockedXXX() functions require alignment.
+  VTK__ALIGN32 vtkTypeInt32 Value;
+
+#if !defined(VTK_HAS_ATOMIC32)
+  vtkSimpleCriticalSection* AtomicInt32CritSec;
+#endif
+};
+
+#if defined (VTK_WINDOWS_ATOMIC)
+# define VTK__ALIGN64 __declspec(align(64))
+#else
+# define VTK__ALIGN64
+#endif
+
+template <>
+#if defined(VTK_HAS_ATOMIC64) && !defined(VTK_WINDOWS_ATOMIC)
+class vtkAtomicIntImpl<vtkTypeInt64>
+#else
+class VTKCOMMONCORE_EXPORT vtkAtomicIntImpl<vtkTypeInt64>
+#endif
+{
+public:
+
+  // Description:
+  // Atomic pre-increment.
+#if defined(VTK_HAS_ATOMIC64) && !defined(VTK_WINDOWS_ATOMIC)
+  vtkTypeInt64 operator++()
+    {
+# if defined(__APPLE__)
+    return OSAtomicIncrement64Barrier(&this->Value);
+
+// GCC, CLANG, etc
+# elif defined(VTK_HAVE_SYNC_BUILTINS)
+    return __sync_add_and_fetch(&this->Value, 1);
+
+# endif
+   }
+
+  // Description:
+  // Atomic pre-decrement.
+  vtkTypeInt64 operator--()
+    {
+# if defined(__APPLE__)
+    return OSAtomicDecrement64Barrier(&this->Value);
+
+// GCC, CLANG, etc
+# elif defined(VTK_HAVE_SYNC_BUILTINS)
+    return __sync_sub_and_fetch(&this->Value, 1);
+
+# endif
+    }
+
+  // Description:
+  // Atomic add. Returns value after addition.
+  vtkTypeInt64 operator+=(vtkTypeInt64 val)
+    {
+# if defined(__APPLE__)
+    return OSAtomicAdd64Barrier(val, &this->Value);
+
+// GCC, CLANG, etc
+# elif defined(VTK_HAVE_SYNC_BUILTINS)
+    return __sync_add_and_fetch(&this->Value, val);
+
+# endif
+    }
+
+  // Description:
+  // Atomic fetch.
+  vtkTypeInt64 load() const
+    {
+# if defined(__APPLE__)
+  vtkTypeInt64 retval = 0;
+  OSAtomicCompareAndSwap64Barrier(retval, this->Value, &retval);
+  return retval;
+
+// GCC, CLANG, etc
+# elif defined(VTK_HAVE_SYNC_BUILTINS)
+  vtkTypeInt64 retval = 0;
+  __sync_val_compare_and_swap(&retval, retval, this->Value);
+  return retval;
+
+# endif
+    }
+
+  // Description:
+  // Atomic store.
+  void store(vtkTypeInt64 val)
+    {
+# if defined(__APPLE__)
+  OSAtomicCompareAndSwap64Barrier(this->Value, val, &this->Value);
+
+// GCC, CLANG, etc
+# elif defined(VTK_HAVE_SYNC_BUILTINS)
+  __sync_val_compare_and_swap(&this->Value, this->Value, val);
+
+# endif
+    }
+
+#else // defined(VTK_HAS_ATOMIC64) && !defined(VTK_WINDOWS_ATOMIC)
+
+  // These methods are for when using a mutex. Same as above.
+  // A virtual descructor is used becase the mutex is constructed
+  // with new to avoid including windows header in the .h file.
+  vtkAtomicIntImpl<vtkTypeInt64>();
+  virtual ~vtkAtomicIntImpl<vtkTypeInt64>();
+  vtkTypeInt64 operator++();
+  vtkTypeInt64 operator--();
+  vtkTypeInt64 operator+=(vtkTypeInt64 val);
+  vtkTypeInt64 load() const;
+  void store(vtkTypeInt64 val);
+
+#endif // defined(VTK_HAS_ATOMIC64) && !defined(VTK_WINDOWS_ATOMIC)
+
+protected:
+ // Explicitely aligning Value on Windows is probably not necessary
+  // since the compiler should automatically do it. Just being extra
+  // cautious since the InterlockedXXX() functions require alignment.
+  VTK__ALIGN64 vtkTypeInt64 Value;
+
+#if !defined(VTK_HAS_ATOMIC64)
+  vtkSimpleCriticalSection* AtomicInt64CritSec;
+#endif
+};
+}
+
+template <typename T>
+class vtkAtomicInt: public detail::vtkAtomicIntImpl<T>
+{
+  typedef detail::vtkAtomicIntImpl<T> Superclass;
+
+public:
+  // Description:
+  // Default constructor. Not atomic.
+  vtkAtomicInt()
+    {
+    this->Value = 0;
+    }
+
+  // Description:
+  // Constructor with initialization. Not atomic.
+  vtkAtomicInt(T val)
+    {
+    this->Value = val;
+    }
+
+  // Description:
+  // Atomic pre-increment.
+  T operator++()
+    {
+    return this->Superclass::operator++();
+    }
+
+  // Description:
+  // Atomic post-increment.
+  T operator++(int)
+    {
+    return this->operator++() - 1;
+    }
+
+  // Description:
+  // Atomic pre-decrement.
+  T operator--()
+    {
+    return this->Superclass::operator--();
+    }
+
+  // Description:
+  // Atomic post-decrement.
+  T operator--(int)
+    {
+    return this->operator--() + 1;
+    }
+
+  // Description:
+  // Atomic subtraction. Returns value after.
+  T operator-=(T val)
+    {
+    return this->operator+=(-val);
+    }
+
+  // Description:
+  // Atomic load.
+  operator T() const
+    {
+    return this->load();
+    }
+
+  // Description:
+  // Atomic save.
+  T operator=(T val)
+    {
+    this->store(val);
+    return val;
+    }
+};
+
+
+#endif
+// VTK-HeaderTest-Exclude: vtkAtomicInt.h
diff --git a/Common/Core/SMP/Sequential/vtkSMPThreadLocal.h.in b/Common/Core/SMP/Sequential/vtkSMPThreadLocal.h.in
new file mode 100644
index 0000000..7ca1426
--- /dev/null
+++ b/Common/Core/SMP/Sequential/vtkSMPThreadLocal.h.in
@@ -0,0 +1,195 @@
+ /*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPThreadLocal.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkSMPThreadLocal - A simple thread local implementation for sequential operations.
+// .SECTION Description
+// A thread local object is one that maintains a copy of an object of the
+// template type for each thread that processes data. vtkSMPThreadLocal
+// creates storage for all threads but the actual objects are created
+// the first time Local() is called. Note that some of the vtkSMPThreadLocal
+// API is not thread safe. It can be safely used in a multi-threaded
+// environment because Local() returns storage specific to a particular
+// thread, which by default will be accessed sequentially. It is also
+// thread-safe to iterate over vtkSMPThreadLocal as long as each thread
+// creates its own iterator and does not change any of the thread local
+// objects.
+//
+// A common design pattern in using a thread local storage object is to
+// write/accumulate data to local object when executing in parallel and
+// then having a sequential code block that iterates over the whole storage
+// using the iterators to do the final accumulation.
+//
+// Note that this particular implementation is designed to work in sequential
+// mode and supports only 1 thread.
+
+#ifndef __vtkSMPThreadLocal_h
+#define __vtkSMPThreadLocal_h
+
+#include "vtkSystemIncludes.h"
+#include <vector>
+
+template <typename T>
+class vtkSMPThreadLocal
+{
+  typedef std::vector<T> TLS;
+  typedef typename TLS::iterator TLSIter;
+public:
+  // Description:
+  // Default constructor. Creates a default exemplar.
+  vtkSMPThreadLocal()
+    {
+      this->Initialize();
+    }
+
+  // Description:
+  // Constructor that allows the specification of an exemplar object
+  // which is used when constructing objects when Local() is first called.
+  // Note that a copy of the exemplar is created using its copy constructor.
+  vtkSMPThreadLocal(const T& exemplar) : Exemplar(exemplar)
+    {
+      this->Initialize();
+    }
+
+  // Description:
+  // Returns an object of type T that is local to the current thread.
+  // This needs to be called mainly within a threaded execution path.
+  // It will create a new object (local to the tread so each thread
+  // get their own when calling Local) which is a copy of exemplar as passed
+  // to the constructor (or a default object if no exemplar was provided)
+  // the first time it is called. After the first time, it will return
+  // the same object.
+  T& Local()
+    {
+      int tid = this->GetThreadID();
+      if (!this->Initialized[tid])
+        {
+        this->Internal[tid] = this->Exemplar;
+        this->Initialized[tid] = true;
+        }
+      return this->Internal[tid];
+    }
+
+  // Description:
+  // Subset of the standard iterator API.
+  // The most common design patter is to use iterators in a sequential
+  // code block and to use only the thread local objects in parallel
+  // code blocks.
+  // It is thread safe to iterate over the thread local containers
+  // as long as each thread uses its own iterator and does not modify
+  // objects in the container.
+  class iterator
+  {
+  public:
+    iterator& operator++()
+      {
+        this->InitIter++;
+        this->Iter++;
+
+        // Make sure to skip uninitialized
+        // entries.
+        while(this->InitIter != this->EndIter)
+          {
+          if (*this->InitIter)
+            {
+            break;
+            }
+          this->InitIter++;
+          this->Iter++;
+          }
+        return *this;
+      }
+
+    bool operator!=(const iterator& other)
+      {
+        return this->Iter != other.Iter;
+      }
+
+    T& operator*()
+      {
+        return *this->Iter;
+      }
+
+  private:
+    friend class vtkSMPThreadLocal<T>;
+    std::vector<bool>::iterator InitIter;
+    std::vector<bool>::iterator EndIter;
+    TLSIter Iter;
+  };
+
+  // Description:
+  // Returns a new iterator pointing to the beginning of
+  // the local storage container. Thread safe.
+  iterator begin()
+    {
+      TLSIter iter = this->Internal.begin();
+      std::vector<bool>::iterator iter2 =
+        this->Initialized.begin();
+      std::vector<bool>::iterator enditer =
+        this->Initialized.end();
+      // fast forward to first initialized
+      // value
+      while(iter2 != enditer)
+        {
+        if (*iter2)
+          {
+          break;
+          }
+        iter2++;
+        iter++;
+        }
+      iterator retVal;
+      retVal.InitIter = iter2;
+      retVal.EndIter = enditer;
+      retVal.Iter = iter;
+      return retVal;
+    };
+
+  // Description:
+  // Returns a new iterator pointing to past the end of
+  // the local storage container. Thread safe.
+  iterator end()
+    {
+      iterator retVal;
+      retVal.InitIter = this->Initialized.end();
+      retVal.EndIter = this->Initialized.end();
+      retVal.Iter = this->Internal.end();
+      return retVal;
+    }
+
+private:
+  TLS Internal;
+  std::vector<bool> Initialized;
+  T Exemplar;
+
+  void Initialize()
+    {
+      this->Internal.resize(this->GetNumberOfThreads());
+      this->Initialized.resize(this->GetNumberOfThreads());
+      std::fill(this->Initialized.begin(),
+                this->Initialized.end(),
+                false);
+    }
+
+  inline int GetNumberOfThreads()
+    {
+      return 1;
+    }
+
+  inline int GetThreadID()
+    {
+      return 0;
+    }
+};
+#endif
+// VTK-HeaderTest-Exclude: vtkSMPThreadLocal.h
diff --git a/Common/Core/SMP/Sequential/vtkSMPTools.cxx b/Common/Core/SMP/Sequential/vtkSMPTools.cxx
new file mode 100644
index 0000000..6b7fd25
--- /dev/null
+++ b/Common/Core/SMP/Sequential/vtkSMPTools.cxx
@@ -0,0 +1,23 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPTools.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkSMPTools.h"
+
+// Simple implementation that runs everything sequentially.
+
+//--------------------------------------------------------------------------------
+void vtkSMPTools::Initialize(int)
+{
+}
diff --git a/Common/Core/SMP/Sequential/vtkSMPToolsInternal.h.in b/Common/Core/SMP/Sequential/vtkSMPToolsInternal.h.in
new file mode 100644
index 0000000..067ce91
--- /dev/null
+++ b/Common/Core/SMP/Sequential/vtkSMPToolsInternal.h.in
@@ -0,0 +1,53 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPToolsInternal.h.in
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+namespace vtk
+{
+namespace detail
+{
+namespace smp
+{
+template <typename FunctorInternal>
+static void vtkSMPTools_Impl_For(
+  vtkIdType first, vtkIdType last, vtkIdType grain,
+  FunctorInternal& fi)
+{
+  vtkIdType n = last - first;
+  if (!n)
+    {
+    return;
+    }
+
+  if (grain == 0 || grain >= n)
+    {
+    fi.Execute(first, last);
+    }
+  else
+    {
+    vtkIdType b = first;
+    while (b < last)
+      {
+      vtkIdType e = b + grain;
+      if (e > last)
+        {
+        e = last;
+        }
+      fi.Execute(b, e);
+      b = e;
+      }
+    }
+}
+}
+}
+}
diff --git a/Common/Core/SMP/Simple/vtkSMPThreadLocal.h.in b/Common/Core/SMP/Simple/vtkSMPThreadLocal.h.in
new file mode 100644
index 0000000..54e6111
--- /dev/null
+++ b/Common/Core/SMP/Simple/vtkSMPThreadLocal.h.in
@@ -0,0 +1,237 @@
+ /*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPThreadLocal.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkSMPThreadLocal - A simple thread local implementation for simple parallelism.
+// .SECTION Description
+// A thread local object is one that maintains a copy of an object of the
+// template type for each thread that processes data. vtkSMPThreadLocal
+// creates storage for all threads but the actual objects are created
+// the first time Local() is called. Note that some of the vtkSMPThreadLocal
+// API is not thread safe. It can be safely used in a multi-threaded
+// environment because Local() returns storage specific to a particular
+// thread, which by default will be accessed sequentially. It is also
+// thread-safe to iterate over vtkSMPThreadLocal as long as each thread
+// creates its own iterator and does not change any of the thread local
+// objects.
+//
+// A common design pattern in using a thread local storage object is to
+// write/accumulate data to local object when executing in parallel and
+// then having a sequential code block that iterates over the whole storage
+// using the iterators to do the final accumulation.
+//
+// Note that this particular implementation is designed to work in simple
+// parallel mode which was mainly designed for debugging purposes.
+//
+// .SECTION Warning
+// There is absolutely no guarantee to the order in which the local objects
+// will be stored and hence the order in which they will be traversed when
+// using iterators. You should not even assume that two vtkSMPThreadLocal
+// populated in the same parallel section will be populated in the same
+// order. For example, consider the following
+// \verbatim
+// vtkSMPThreadLocal<int> Foo;
+// vtkSMPThreadLocal<int> Bar;
+// class AFunctor
+// {
+//    void Initialize() const
+//    {
+//        int& foo = Foo.Local();
+//        int& bar = Bar.Local();
+//        foo = random();
+//        bar = foo;
+//    }
+//
+//    void operator()(vtkIdType, vtkIdType) const
+//    {}
+// };
+//
+// AFunctor functor;
+// vtkParalllelUtilities::For(0, 100000, functor);
+//
+// vtkSMPThreadLocal<int>::iterator itr1 = Foo.begin();
+// vtkSMPThreadLocal<int>::iterator itr2 = Bar.begin();
+// while (itr1 != Foo.end())
+// {
+//   assert(*itr1 == *itr2);
+//   ++itr1; ++itr2;
+// }
+// \endverbatim
+//
+// It is possible and likely that the assert() will fail using the TBB
+// backend. So if you need to store values related to each other and
+// iterate over them together, use a struct or class to group them together
+// and use a thread local of that class.
+
+#ifndef __vtkSMPThreadLocal_h
+#define __vtkSMPThreadLocal_h
+
+#include "vtkCommonCoreModule.h" // For export macro
+
+#include "vtkSystemIncludes.h"
+#include "vtkMultiThreader.h"
+#include <vector>
+
+VTKCOMMONCORE_EXPORT int vtkSMPToolsGetNumberOfThreads();
+VTKCOMMONCORE_EXPORT int vtkSMPToolsGetThreadID();
+
+template <typename T>
+class vtkSMPThreadLocal
+{
+  typedef std::vector<T> TLS;
+  typedef typename TLS::iterator TLSIter;
+public:
+  // Description:
+  // Default constructor. Creates a default exemplar.
+  vtkSMPThreadLocal() : Exemplar()
+    {
+      this->Initialize();
+    }
+
+  // Description:
+  // Constructor that allows the specification of an exemplar object
+  // which is used when constructing objects when Local() is first called.
+  // Note that a copy of the exemplar is created using its copy constructor.
+  vtkSMPThreadLocal(const T& exemplar) : Exemplar(exemplar)
+    {
+      this->Initialize();
+    }
+
+  // Description:
+  // Returns an object of type T that is local to the current thread.
+  // This needs to be called mainly within a threaded execution path.
+  // It will create a new object (local to the tread so each thread
+  // get their own when calling Local) which is a copy of exemplar as passed
+  // to the constructor (or a default object if no exemplar was provided)
+  // the first time it is called. After the first time, it will return
+  // the same object.
+  T& Local()
+    {
+      int tid = this->GetThreadID();
+      if (!this->Initialized[tid])
+        {
+        this->Internal[tid] = this->Exemplar;
+        this->Initialized[tid] = true;
+        }
+      return this->Internal[tid];
+    }
+
+  // Description:
+  // Subset of the standard iterator API.
+  // The most common design pattern is to use iterators in a sequential
+  // code block and to use only the thread local objects in parallel
+  // code blocks.
+  // It is thread safe to iterate over the thread local containers
+  // as long as each thread uses its own iterator and does not modify
+  // objects in the container.
+  class iterator
+  {
+  public:
+    iterator& operator++()
+      {
+        this->InitIter++;
+        this->Iter++;
+
+        // Make sure to skip uninitialized
+        // entries.
+        while(this->InitIter != this->EndIter)
+          {
+          if (*this->InitIter)
+            {
+            break;
+            }
+          this->InitIter++;
+          this->Iter++;
+          }
+        return *this;
+      }
+
+    bool operator!=(const iterator& other)
+      {
+        return this->Iter != other.Iter;
+      }
+
+    T& operator*()
+      {
+        return *this->Iter;
+      }
+
+  private:
+    friend class vtkSMPThreadLocal<T>;
+    std::vector<unsigned char>::iterator InitIter;
+    std::vector<unsigned char>::iterator EndIter;
+    TLSIter Iter;
+  };
+
+  // Description:
+  // Returns a new iterator pointing to the beginning of
+  // the local storage container. Thread safe.
+  iterator begin()
+    {
+      TLSIter iter = this->Internal.begin();
+      std::vector<unsigned char>::iterator iter2 =
+        this->Initialized.begin();
+      std::vector<unsigned char>::iterator enditer =
+        this->Initialized.end();
+      // fast forward to first initialized
+      // value
+      while(iter2 != enditer)
+        {
+        if (*iter2)
+          {
+          break;
+          }
+        iter2++;
+        iter++;
+        }
+      iterator retVal;
+      retVal.InitIter = iter2;
+      retVal.EndIter = enditer;
+      retVal.Iter = iter;
+      return retVal;
+    };
+
+  // Description:
+  // Returns a new iterator pointing to past the end of
+  // the local storage container. Thread safe.
+  iterator end()
+    {
+      iterator retVal;
+      retVal.InitIter = this->Initialized.end();
+      retVal.EndIter = this->Initialized.end();
+      retVal.Iter = this->Internal.end();
+      return retVal;
+    }
+
+private:
+  TLS Internal;
+  std::vector<unsigned char> Initialized;
+  T Exemplar;
+
+  void Initialize()
+    {
+      int numThreads = vtkSMPToolsGetNumberOfThreads();
+      this->Internal.resize(numThreads);
+      this->Initialized.resize(numThreads);
+      std::fill(this->Initialized.begin(),
+                this->Initialized.end(),
+                false);
+    }
+
+  inline int GetThreadID()
+    {
+      return vtkSMPToolsGetThreadID();
+    }
+};
+#endif
+// VTK-HeaderTest-Exclude: vtkSMPThreadLocal.h
diff --git a/Common/Core/SMP/Simple/vtkSMPTools.cxx b/Common/Core/SMP/Simple/vtkSMPTools.cxx
new file mode 100644
index 0000000..d621b02
--- /dev/null
+++ b/Common/Core/SMP/Simple/vtkSMPTools.cxx
@@ -0,0 +1,83 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPTools.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkSMPTools.h"
+
+#include "vtkObjectFactory.h"
+
+#include <pthread.h>
+
+static bool vtkSMPToolsInitialized = false;
+static int vtkSMPToolsNumberOfThreads = 0;
+
+static std::vector<vtkMultiThreaderIDType> vtkSMPToolsThreadIds;
+
+//static pthread_barrier_t barr;
+
+VTKCOMMONCORE_EXPORT void vtkSMPToolsInitialize()
+{
+  vtkSMPTools::Initialize();
+}
+
+VTKCOMMONCORE_EXPORT int vtkSMPToolsGetNumberOfThreads()
+{
+  vtkSMPTools::Initialize();
+
+  return vtkSMPToolsNumberOfThreads;
+}
+
+VTKCOMMONCORE_EXPORT int vtkSMPToolsGetThreadID()
+{
+  vtkSMPTools::Initialize();
+
+  vtkMultiThreaderIDType rawID = vtkMultiThreader::GetCurrentThreadID();
+  size_t numIDs = vtkSMPToolsThreadIds.size();
+  for (size_t i=0; i<numIDs; i++)
+    {
+    if (vtkSMPToolsThreadIds[i] == rawID)
+      {
+      return i;
+      }
+    }
+  return -1;
+}
+
+VTKCOMMONCORE_EXPORT std::vector<vtkMultiThreaderIDType>& vtkSMPToolsGetThreadIds()
+{
+  return vtkSMPToolsThreadIds;
+}
+
+//--------------------------------------------------------------------------------
+void vtkSMPTools::Initialize(int nThreads)
+{
+  if (vtkSMPToolsInitialized)
+    {
+    return;
+    }
+  if (nThreads == 0)
+    {
+    vtkSMPToolsNumberOfThreads =
+      vtkMultiThreader::GetGlobalDefaultNumberOfThreads();
+    }
+  else
+    {
+    vtkSMPToolsNumberOfThreads = nThreads;
+    }
+
+  vtkSMPToolsInitialized = true;
+
+  vtkSMPToolsThreadIds.resize(vtkSMPToolsNumberOfThreads);
+  vtkSMPToolsThreadIds[0] = vtkMultiThreader::GetCurrentThreadID();
+}
diff --git a/Common/Core/SMP/Simple/vtkSMPToolsInternal.h.in b/Common/Core/SMP/Simple/vtkSMPToolsInternal.h.in
new file mode 100644
index 0000000..88efd85
--- /dev/null
+++ b/Common/Core/SMP/Simple/vtkSMPToolsInternal.h.in
@@ -0,0 +1,139 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPToolsInternal.h.in
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkMultiThreader.h"
+#include "vtkNew.h"
+
+VTKCOMMONCORE_EXPORT std::vector<vtkMultiThreaderIDType>& vtkSMPToolsGetThreadIds();
+VTKCOMMONCORE_EXPORT void vtkSMPToolsInitialize();
+VTKCOMMONCORE_EXPORT int vtkSMPToolsGetNumberOfThreads();
+
+namespace vtk
+{
+namespace detail
+{
+namespace smp
+{
+template <typename T>
+void vtkSMPToolsForEach(vtkIdType first,
+                            vtkIdType last,
+                            T* op,
+                            int grain)
+{
+  vtkIdType n = last - first;
+  if (!n)
+    {
+    return;
+    }
+
+  if (grain == 0 || grain >= n)
+    {
+    op->Execute(first, last);
+    }
+  else
+    {
+    vtkIdType b = first;
+    while (b < last)
+      {
+      vtkIdType e = b + grain;
+      if (e > last)
+        {
+        e = last;
+        }
+      //cout << b << " " << e << endl;
+      op->Execute(b, e);
+      b = e;
+      }
+    }
+}
+
+
+struct vtkSMPToolsExecuteArgs
+{
+  vtkIdType First;
+  vtkIdType Last;
+  void* Functor;
+  int Grain;
+};
+
+template <typename T>
+VTK_THREAD_RETURN_TYPE vtkSMPToolsExecute(void *varg)
+{
+  vtkMultiThreader::ThreadInfo* arg =
+    static_cast<vtkMultiThreader::ThreadInfo*>(varg);
+
+  int threadId = arg->ThreadID;
+  int threadCount = arg->NumberOfThreads;
+
+  std::vector<vtkMultiThreaderIDType>& threadIds =
+    vtkSMPToolsGetThreadIds();
+  threadIds[threadId] = vtkMultiThreader::GetCurrentThreadID();
+
+  //pthread_barrier_wait(&barr);
+
+  vtkSMPToolsExecuteArgs* fargs =
+    static_cast<vtkSMPToolsExecuteArgs*>(arg->UserData);
+
+  vtkIdType n = fargs->Last - fargs->First;
+  if (n > threadCount)
+    {
+    vtkIdType count = n / threadCount;
+    vtkIdType begin = fargs->First + count*threadId;
+    vtkIdType end;
+    if (threadId == threadCount - 1)
+      {
+      end = fargs->Last;
+      }
+    else
+      {
+      end = begin + count;
+      }
+    vtkSMPToolsForEach(begin, end, (T*)(fargs->Functor), fargs->Grain);
+    }
+  else
+    {
+    if (threadId == 0)
+      {
+      cerr << "0 executing all" << endl;
+      }
+    }
+
+  return VTK_THREAD_RETURN_VALUE;
+}
+
+template <typename FunctorInternal>
+static void vtkSMPTools_Impl_For(
+  vtkIdType first, vtkIdType last, vtkIdType grain,
+  FunctorInternal& fi)
+{
+  vtkSMPToolsInitialize();
+
+  vtkSMPToolsExecuteArgs args;
+  args.First = first;
+  args.Last = last;
+  args.Functor = (void*)(&fi);
+  args.Grain = grain;
+
+  //pthread_barrier_init(&barr, NULL, vtkSMPToolsNumberOfThreads);
+
+  vtkNew<vtkMultiThreader> threader;
+  threader->SetNumberOfThreads(vtkSMPToolsGetNumberOfThreads());
+  threader->SetSingleMethod(vtkSMPToolsExecute<FunctorInternal>, &args);
+  threader->SingleMethodExecute();
+
+  //pthread_barrier_destroy(&barr);
+}
+}
+}
+}
diff --git a/Common/Core/SMP/TBB/vtkAtomicInt.h.in b/Common/Core/SMP/TBB/vtkAtomicInt.h.in
new file mode 100644
index 0000000..d60865e
--- /dev/null
+++ b/Common/Core/SMP/TBB/vtkAtomicInt.h.in
@@ -0,0 +1,94 @@
+ /*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkAtomicInt.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkAtomicInt -
+// .SECTION Description
+
+#ifndef __vtkAtomicInt_h
+#define __vtkAtomicInt_h
+
+#include <tbb/atomic.h>
+
+template <typename T> class vtkAtomicInt
+{
+public:
+  vtkAtomicInt()
+    {
+      this->Atomic = 0;
+    }
+
+  vtkAtomicInt(const T val)
+    {
+      this->Atomic = val;
+    }
+
+  T operator++()
+  {
+    return ++this->Atomic;
+  }
+
+  T operator++(int)
+  {
+    return this->Atomic++;
+  }
+
+  T operator--()
+  {
+    return --this->Atomic;
+  }
+
+  T operator--(int)
+  {
+    return this->Atomic--;
+  }
+
+  T operator+=(T val)
+  {
+    return this->Atomic += val;
+  }
+
+  T operator-=(T val)
+  {
+    return this->Atomic -= val;
+  }
+
+  operator T() const
+  {
+    return this->Atomic;
+  }
+
+  T operator=(T val)
+  {
+    this->Atomic = val;
+    return val;
+  }
+
+  T load() const
+    {
+      return this->Atomic;
+    }
+
+  void store(T val)
+    {
+      this->Atomic = val;
+    }
+
+
+private:
+  tbb::atomic<T> Atomic;
+};
+
+
+#endif
+// VTK-HeaderTest-Exclude: vtkAtomicInt.h
diff --git a/Common/Core/SMP/TBB/vtkSMPThreadLocal.h.in b/Common/Core/SMP/TBB/vtkSMPThreadLocal.h.in
new file mode 100644
index 0000000..55805b8
--- /dev/null
+++ b/Common/Core/SMP/TBB/vtkSMPThreadLocal.h.in
@@ -0,0 +1,168 @@
+ /*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPThreadLocal.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkSMPThreadLocal - A TBB based thread local storage implementation.
+// .SECTION Description
+// A thread local object is one that maintains a copy of an object of the
+// template type for each thread that processes data. vtkSMPThreadLocal
+// creates storage for all threads but the actual objects are created
+// the first time Local() is called. Note that some of the vtkSMPThreadLocal
+// API is not thread safe. It can be safely used in a multi-threaded
+// environment because Local() returns storage specific to a particular
+// thread, which by default will be accessed sequentially. It is also
+// thread-safe to iterate over vtkSMPThreadLocal as long as each thread
+// creates its own iterator and does not change any of the thread local
+// objects.
+//
+// A common design pattern in using a thread local storage object is to
+// write/accumulate data to local object when executing in parallel and
+// then having a sequential code block that iterates over the whole storage
+// using the iterators to do the final accumulation.
+//
+// .SECTION Warning
+// There is absolutely no guarantee to the order in which the local objects
+// will be stored and hence the order in which they will be traversed when
+// using iterators. You should not even assume that two vtkSMPThreadLocal
+// populated in the same parallel section will be populated in the same
+// order. For example, consider the following
+// \verbatim
+// vtkSMPThreadLocal<int> Foo;
+// vtkSMPThreadLocal<int> Bar;
+// class AFunctor
+// {
+//    void Initialize() const
+//    {
+//        int& foo = Foo.Local();
+//        int& bar = Bar.Local();
+//        foo = random();
+//        bar = foo;
+//    }
+//
+//    void operator()(vtkIdType, vtkIdType) const
+//    {}
+// };
+//
+// AFunctor functor;
+// vtkParalllelUtilities::For(0, 100000, functor);
+//
+// vtkSMPThreadLocal<int>::iterator itr1 = Foo.begin();
+// vtkSMPThreadLocal<int>::iterator itr2 = Bar.begin();
+// while (itr1 != Foo.end())
+// {
+//   assert(*itr1 == *itr2);
+//   ++itr1; ++itr2;
+// }
+// \endverbatim
+//
+// It is possible and likely that the assert() will fail using the TBB
+// backend. So if you need to store values related to each other and
+// iterate over them together, use a struct or class to group them together
+// and use a thread local of that class.
+
+#ifndef __vtkSMPThreadLocal_h
+#define __vtkSMPThreadLocal_h
+
+#include <tbb/enumerable_thread_specific.h>
+
+template <typename T>
+class vtkSMPThreadLocal
+{
+  typedef tbb::enumerable_thread_specific<T> TLS;
+  typedef typename TLS::iterator TLSIter;
+public:
+  // Description:
+  // Default constructor. Creates a default exemplar.
+  vtkSMPThreadLocal()
+    {
+    }
+
+  // Description:
+  // Constructor that allows the specification of an exemplar object
+  // which is used when constructing objects when Local() is first called.
+  // Note that a copy of the exemplar is created using its copy constructor.
+  vtkSMPThreadLocal(const T& exemplar) : Internal(exemplar)
+    {
+    }
+
+  // Description:
+  // Returns an object of type T that is local to the current thread.
+  // This needs to be called mainly within a threaded execution path.
+  // It will create a new object (local to the tread so each thread
+  // get their own when calling Local) which is a copy of exemplar as passed
+  // to the constructor (or a default object if no exemplar was provided)
+  // the first time it is called. After the first time, it will return
+  // the same object.
+  T& Local()
+    {
+      return this->Internal.local();
+    }
+
+  // Description:
+  // Subset of the standard iterator API.
+  // The most common design patter is to use iterators in a sequential
+  // code block and to use only the thread local objects in parallel
+  // code blocks.
+  // It is thread safe to iterate over the thread local containers
+  // as long as each thread uses its own iterator and does not modify
+  // objects in the container.
+  class iterator
+  {
+  public:
+    iterator& operator++()
+      {
+        ++this->Iter;
+        return *this;
+      }
+
+    bool operator!=(const iterator& other)
+      {
+        return this->Iter != other.Iter;
+      }
+
+    T& operator*()
+      {
+        return *this->Iter;
+      }
+
+  private:
+    TLSIter Iter;
+
+    friend class vtkSMPThreadLocal<T>;
+  };
+
+  // Description:
+  // Returns a new iterator pointing to the beginning of
+  // the local storage container. Thread safe.
+  iterator begin()
+    {
+      iterator iter;
+      iter.Iter = this->Internal.begin();
+      return iter;
+    };
+
+  // Description:
+  // Returns a new iterator pointing to past the end of
+  // the local storage container. Thread safe.
+  iterator end()
+    {
+      iterator iter;
+      iter.Iter = this->Internal.end();
+      return iter;
+    }
+
+private:
+  TLS Internal;
+};
+#endif
+// VTK-HeaderTest-Exclude: vtkSMPThreadLocal.h
diff --git a/Common/Core/SMP/TBB/vtkSMPTools.cxx b/Common/Core/SMP/TBB/vtkSMPTools.cxx
new file mode 100644
index 0000000..8327d22
--- /dev/null
+++ b/Common/Core/SMP/TBB/vtkSMPTools.cxx
@@ -0,0 +1,49 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPTools.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkSMPTools.h"
+
+#include "vtkCriticalSection.h"
+
+#include <tbb/task_scheduler_init.h>
+
+struct vtkSMPToolsInit
+{
+  tbb::task_scheduler_init Init;
+
+  vtkSMPToolsInit(int numThreads) : Init(numThreads)
+    {
+    }
+};
+
+static bool vtkSMPToolsInitialized = 0;
+static vtkSimpleCriticalSection vtkSMPToolsCS;
+
+//--------------------------------------------------------------------------------
+void vtkSMPTools::Initialize(int numThreads)
+{
+  vtkSMPToolsCS.Lock();
+  if (!vtkSMPToolsInitialized)
+    {
+    // If numThreads == 0 (default num. threads), don't create a task_scheduler_init
+    // and let TBB do the default thing.
+    if (numThreads != 0)
+      {
+      static vtkSMPToolsInit aInit(numThreads);
+      }
+    vtkSMPToolsInitialized = true;
+    }
+  vtkSMPToolsCS.Unlock();
+}
diff --git a/Common/Core/SMP/TBB/vtkSMPToolsInternal.h.in b/Common/Core/SMP/TBB/vtkSMPToolsInternal.h.in
new file mode 100644
index 0000000..4c57469
--- /dev/null
+++ b/Common/Core/SMP/TBB/vtkSMPToolsInternal.h.in
@@ -0,0 +1,64 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPToolsInternal.h.in
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkMultiThreader.h"
+#include "vtkNew.h"
+
+#include <tbb/blocked_range.h>
+#include <tbb/parallel_for.h>
+
+namespace vtk
+{
+namespace detail
+{
+namespace smp
+{
+template <typename T>
+class FuncCall
+{
+  T& o;
+
+public:
+  void operator() (const tbb::blocked_range<vtkIdType>& r) const
+    {
+      o.Execute(r.begin(), r.end());
+    }
+
+  FuncCall (T& _o) : o(_o)
+    {
+    }
+};
+
+template <typename FunctorInternal>
+static void vtkSMPTools_Impl_For(
+  vtkIdType first, vtkIdType last, vtkIdType grain,
+  FunctorInternal& fi)
+{
+  vtkIdType n = last - first;
+  if (!n)
+    {
+    return;
+    }
+  if (grain > 0)
+    {
+    tbb::parallel_for(tbb::blocked_range<vtkIdType>(first, last, grain), FuncCall<FunctorInternal>(fi));
+    }
+  else
+    {
+    tbb::parallel_for(tbb::blocked_range<vtkIdType>(first, last), FuncCall<FunctorInternal>(fi));
+    }
+}
+}
+}
+}
diff --git a/Common/Core/Testing/Cxx/CMakeLists.txt b/Common/Core/Testing/Cxx/CMakeLists.txt
index e6be4a7..8df2eac 100644
--- a/Common/Core/Testing/Cxx/CMakeLists.txt
+++ b/Common/Core/Testing/Cxx/CMakeLists.txt
@@ -1,9 +1,17 @@
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
+# Tell TestSystemInformation where to find the build trees.
+set(TestSystemInformation_ARGS ${CMAKE_BINARY_DIR})
+
+# Tell TestXMLFileOutputWindow where to write test file
+set(TestXMLFileOutputWindow_ARGS ${CMAKE_BINARY_DIR}/Testing/Temporary/XMLFileOutputWindow.txt)
+
+vtk_add_test_cxx(NO_DATA NO_VALID NO_OUTPUT
   TestArrayAPI.cxx
   TestArrayAPIConvenience.cxx
   TestArrayAPIDense.cxx
   TestArrayAPISparse.cxx
   TestArrayBool.cxx
+  TestAtomic.cxx
+  TestScalarsToColors.cxx
   # TestArrayCasting.cxx # Uses Boost in its own separate test.
   TestArrayExtents.cxx
   TestArrayInterpolationDense.cxx
@@ -18,6 +26,7 @@ create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
   # TestCxxFeatures.cxx # This is in its own exe too.
   TestDataArray.cxx
   TestDataArrayComponentNames.cxx
+  TestDataArrayIterators.cxx
   TestGarbageCollector.cxx
   # TestInstantiator.cxx # Have not enabled instantiators.
   TestLookupTable.cxx
@@ -27,6 +36,7 @@ create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
   TestObjectFactory.cxx
   TestObservers.cxx
   TestObserversPerformance.cxx
+  TestSMP.cxx
   TestSmartPointer.cxx
   TestSortDataArray.cxx
   TestSparseArrayValidation.cxx
@@ -46,24 +56,7 @@ create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
   #otherLookupTable.cxx
   #otherLookupTableWithEnabling.cxx
   otherStringArray.cxx
+  )
 
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests}
+vtk_test_cxx_executable(${vtk-module}CxxTests
   vtkTestNewVar.cxx)
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Tell TestSystemInformation where to find the build trees.
-set(TestSystemInformation_ARGS ${CMAKE_BINARY_DIR})
-
-# Tell TestXMLFileOutputWindow where to write test file
-set(TestXMLFileOutputWindow_ARGS ${CMAKE_BINARY_DIR}/Testing/Temporary/XMLFileOutputWindow.txt)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  add_test(NAME ${vtk-module}-${TName} COMMAND ${vtk-module}CxxTests ${TName} ${${TName}_ARGS})
-endforeach()
diff --git a/Common/Core/Testing/Cxx/TestAtomic.cxx b/Common/Core/Testing/Cxx/TestAtomic.cxx
new file mode 100644
index 0000000..7893631
--- /dev/null
+++ b/Common/Core/Testing/Cxx/TestAtomic.cxx
@@ -0,0 +1,189 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    otherArrays.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkAtomicInt.h"
+#include "vtkNew.h"
+#include "vtkObjectFactory.h"
+#include "vtkMultiThreader.h"
+
+static int Total = 0;
+static vtkTypeInt64 Total64 = 0;
+static vtkAtomicInt<vtkTypeInt32> TotalAtomic(0);
+static vtkAtomicInt<vtkTypeInt64> TotalAtomic64(0);
+static const int Target = 1000000;
+static int Values32[Target+2];
+static int Values64[Target+2];
+static int NumThreads = 5;
+
+static vtkObject* AnObject;
+
+VTK_THREAD_RETURN_TYPE MyFunction(void *)
+{
+  for (int i=0; i<Target/NumThreads; i++)
+    {
+    Total++;
+    int idx = ++TotalAtomic;
+    Values32[idx] = 1;
+
+    Total64++;
+    idx = ++TotalAtomic64;
+    Values64[idx] = 1;
+
+    //AnObject->Register(0);
+    //AnObject->UnRegister(0);
+
+    AnObject->Modified();
+    }
+
+  return VTK_THREAD_RETURN_VALUE;
+}
+
+VTK_THREAD_RETURN_TYPE MyFunction2(void *)
+{
+  for (int i=0; i<Target/NumThreads; i++)
+    {
+    --TotalAtomic;
+
+    --TotalAtomic64;
+    }
+
+  return VTK_THREAD_RETURN_VALUE;
+}
+
+VTK_THREAD_RETURN_TYPE MyFunction3(void *)
+{
+  for (int i=0; i<Target/NumThreads; i++)
+    {
+    int idx = TotalAtomic += 1;
+    Values32[idx]++;
+
+    idx = TotalAtomic64 += 1;
+    Values64[idx]++;
+    }
+
+  return VTK_THREAD_RETURN_VALUE;
+}
+
+VTK_THREAD_RETURN_TYPE MyFunction4(void *)
+{
+  for (int i=0; i<Target/NumThreads; i++)
+    {
+    TotalAtomic++;
+    TotalAtomic += 1;
+    TotalAtomic--;
+    TotalAtomic -= 1;
+
+    TotalAtomic64++;
+    TotalAtomic64 += 1;
+    TotalAtomic64--;
+    TotalAtomic64 -= 1;
+    }
+
+  return VTK_THREAD_RETURN_VALUE;
+}
+
+int TestAtomic(int, char*[])
+{
+  Total = 0;
+  TotalAtomic = 0;
+  Total64 = 0;
+  TotalAtomic64 = 0;
+
+  AnObject = vtkObject::New();
+
+  //cout << AnObject->GetReferenceCount() << endl;
+
+  int beforeMTime = AnObject->GetMTime();
+
+  for (int i=0; i<Target; i++)
+    {
+    Values32[i] = 0;
+    Values64[i] = 0;
+    }
+
+  vtkNew<vtkMultiThreader> mt;
+  mt->SetSingleMethod(MyFunction, NULL);
+  mt->SetNumberOfThreads(NumThreads);
+  mt->SingleMethodExecute();
+
+  mt->SetSingleMethod(MyFunction2, NULL);
+  mt->SingleMethodExecute();
+
+  mt->SetSingleMethod(MyFunction3, NULL);
+  mt->SingleMethodExecute();
+
+  // Making sure that atomic incr returned unique
+  // values each time. We expect all numbers from
+  // 1 to Target-1 to be 2.
+  if (Values32[0] != 0)
+    {
+      cout << "Expecting Values32[0] to be 0. Got "
+           << Values32[0] << endl;
+      return 1;
+    }
+  if (Values64[0] != 0)
+    {
+      cout << "Expecting Values64[0] to be 0. Got "
+           << Values64[0] << endl;
+      return 1;
+    }
+  for (int i=1; i<Target; i++)
+    {
+    if (Values32[i] != 2)
+      {
+      cout << "Expecting Values32[" << i << "] to be 2. Got "
+           << Values32[i] << endl;
+      return 1;
+      }
+    if (Values64[i] != 2)
+      {
+      cout << "Expecting Values64[" << i << "] to be 2. Got "
+           << Values64[i] << endl;
+      return 1;
+      }
+    }
+
+  mt->SetSingleMethod(MyFunction4, NULL);
+  mt->SingleMethodExecute();
+
+  cout << Total << " " << TotalAtomic.load() << endl;
+  cout << Total64 << " " << TotalAtomic64.load() << endl;
+
+  //cout << AnObject->GetReferenceCount() << endl;
+
+  cout << "MTime: " << AnObject->GetMTime() << endl;
+
+  if (TotalAtomic.load() != Target)
+    {
+    return 1;
+    }
+
+  if (TotalAtomic64.load() != Target)
+    {
+    return 1;
+    }
+
+  if (AnObject->GetReferenceCount() != 1)
+    {
+    return 1;
+    }
+
+  if ((int)AnObject->GetMTime() != Target + beforeMTime + 2)
+    {
+    return 1;
+    }
+
+  AnObject->Delete();
+  return 0;
+}
diff --git a/Common/Core/Testing/Cxx/TestCollection.cxx b/Common/Core/Testing/Cxx/TestCollection.cxx
index eacc35c..680be23 100644
--- a/Common/Core/Testing/Cxx/TestCollection.cxx
+++ b/Common/Core/Testing/Cxx/TestCollection.cxx
@@ -38,7 +38,7 @@ int TestCollection(int,char *[])
   return res ? EXIT_SUCCESS : EXIT_FAILURE;
 }
 
-bool IsEqual(vtkCollection* collection, const std::vector<vtkSmartPointer<vtkIntArray> >& v)
+static bool IsEqual(vtkCollection* collection, const std::vector<vtkSmartPointer<vtkIntArray> >& v)
 {
   if (collection->GetNumberOfItems() != static_cast<int>(v.size()))
     {
diff --git a/Common/Core/Testing/Cxx/TestDataArray.cxx b/Common/Core/Testing/Cxx/TestDataArray.cxx
index 026b992..2031527 100644
--- a/Common/Core/Testing/Cxx/TestDataArray.cxx
+++ b/Common/Core/Testing/Cxx/TestDataArray.cxx
@@ -1,8 +1,40 @@
 #include "vtkIntArray.h"
 #include "vtkDoubleArray.h"
 
+// Define this to run benchmarking tests on some vtkDataArray methods:
+#undef BENCHMARK
+// #define BENCHMARK
+
+#ifdef BENCHMARK
+#include "vtkTimerLog.h"
+#include "vtkIdList.h"
+#include "vtkNew.h"
+
+#include <iostream>
+#include <map>
+#include <string>
+
+namespace TestDataArrayPrivate {
+typedef std::map<std::string, double> LogType;
+LogType log;
+const int numBenchmarks = 50;
+void insertTimeLog(const std::string &str, double time);
+void printTimeLog();
+void benchmark();
+} // End TestDataArrayPrivate namespace
+#endif // BENCHMARK
+
 int TestDataArray(int,char *[])
 {
+#ifdef BENCHMARK
+  for (int i = 0; i < TestDataArrayPrivate::numBenchmarks; ++i)
+    {
+    TestDataArrayPrivate::benchmark();
+    }
+  TestDataArrayPrivate::printTimeLog();
+  return 0;
+#endif // BENCHMARK
+
   double range[2];
   vtkIntArray* array = vtkIntArray::New();
   array->GetRange( range, 0 );
@@ -103,3 +135,205 @@ int TestDataArray(int,char *[])
   farray->Delete();
   return 0;
 }
+
+#ifdef BENCHMARK
+namespace TestDataArrayPrivate {
+void insertTimeLog(const std::string &str, double time)
+{
+  if (log.find(str) == log.end())
+    {
+    log[str] = 0.;
+    }
+  log[str] += time;
+}
+
+void printTimeLog()
+{
+  for (LogType::const_iterator it = log.begin(), itEnd = log.end(); it != itEnd;
+       ++it)
+    {
+    std::cout << std::setw(35) << std::left << it->first + ": "
+              << std::setw(0) << std::right
+              << it->second / static_cast<double>(numBenchmarks) << std::endl;
+    }
+}
+
+void benchmark()
+{
+  vtkNew<vtkTimerLog> timer;
+  vtkNew<vtkDoubleArray> double1;
+  vtkNew<vtkDoubleArray> double2;
+  vtkNew<vtkDoubleArray> double3;
+  vtkNew<vtkIntArray> int1;
+  vtkNew<vtkIntArray> int2;
+  vtkNew<vtkIntArray> int3;
+  double time;
+
+  double1->SetNumberOfComponents(4);
+  double1->SetNumberOfTuples(2500000);
+
+  for (vtkIdType i = 0; i < 10000000; ++i)
+    {
+    double1->SetValue(i, static_cast<double>(i));
+    }
+
+  // Deep copy, with/without conversions
+  int1->Initialize();
+  timer->StartTimer();
+  int1->DeepCopy(double1.GetPointer());
+  timer->StopTimer();
+  time = timer->GetElapsedTime();
+  insertTimeLog("deep copy 10M double --> int", time);
+
+  double1->Initialize();
+  timer->StartTimer();
+  double1->DeepCopy(int1.GetPointer());
+  timer->StopTimer();
+  time = timer->GetElapsedTime();
+  insertTimeLog("deep copy 10M int --> double", time);
+
+  double2->Initialize();
+  timer->StartTimer();
+  double2->DeepCopy(double1.GetPointer());
+  timer->StopTimer();
+  time = timer->GetElapsedTime();
+  insertTimeLog("deep copy 10M double --> double", time);
+
+  int2->Initialize();
+  timer->StartTimer();
+  int2->DeepCopy(int1.GetPointer());
+  timer->StopTimer();
+  time = timer->GetElapsedTime();
+  insertTimeLog("deep copy 10M int --> int", time);
+
+  // Insert tuple
+  double2->Initialize();
+  timer->StartTimer();
+  for (int i = 0; i < double1->GetNumberOfTuples(); ++i)
+    {
+    double2->InsertTuple(i, i, double1.GetPointer());
+    }
+  timer->StopTimer();
+  time = timer->GetElapsedTime();
+  insertTimeLog("insert tuple (double)", time);
+
+  int2->Initialize();
+  timer->StartTimer();
+  for (int i = 0; i < int1->GetNumberOfTuples(); ++i)
+    {
+    int2->InsertTuple(i, i, int1.GetPointer());
+    }
+  timer->StopTimer();
+  time = timer->GetElapsedTime();
+  insertTimeLog("insert tuple (int)", time);
+
+  // Insert next tuple
+  double2->Initialize();
+  timer->StartTimer();
+  for (int i = 0; i < double1->GetNumberOfTuples(); ++i)
+    {
+    double2->InsertNextTuple(i, double1.GetPointer());
+    }
+  timer->StopTimer();
+  time = timer->GetElapsedTime();
+  insertTimeLog("insert next tuple (double)", time);
+
+  int2->Initialize();
+  timer->StartTimer();
+  for (int i = 0; i < int1->GetNumberOfTuples(); ++i)
+    {
+    int2->InsertNextTuple(i, int1.GetPointer());
+    }
+  timer->StopTimer();
+  time = timer->GetElapsedTime();
+  insertTimeLog("insert next tuple (int)", time);
+
+
+  // interpolation
+  vtkNew<vtkIdList> ids;
+  ids->InsertNextId(4);
+  ids->InsertNextId(9);
+  ids->InsertNextId(10000);
+  ids->InsertNextId(100000);
+  ids->InsertNextId(100500);
+  ids->InsertNextId(314);
+  double weights[6];
+  std::fill(weights, weights + 6, 1.0 / 6.0);
+
+  const int numInterps = 100000;
+  double3->Initialize();
+  timer->StartTimer();
+  for (int i = 0; i < numInterps; ++i)
+    {
+    double3->InterpolateTuple(i, ids.GetPointer(), double1.GetPointer(),
+                              weights);
+    }
+  timer->StopTimer();
+  time = timer->GetElapsedTime();
+  insertTimeLog("interpolate 6 tuples (double)", time);
+
+  int3->Initialize();
+  timer->StartTimer();
+  for (int i = 0; i < numInterps; ++i)
+    {
+    int3->InterpolateTuple(i, ids.GetPointer(), int1.GetPointer(), weights);
+    }
+  timer->StopTimer();
+  time = timer->GetElapsedTime();
+  insertTimeLog("interpolate 6 tuples (int)", time);
+
+  double3->Initialize();
+  timer->StartTimer();
+  for (int i = 0; i < numInterps; ++i)
+    {
+    double3->InterpolateTuple(i,
+                              500, double1.GetPointer(),
+                              700, double2.GetPointer(), 0.25);
+    }
+  timer->StopTimer();
+  time = timer->GetElapsedTime();
+  insertTimeLog("interpolate 2 arrays (double)", time);
+
+  int3->Initialize();
+  timer->StartTimer();
+  for (int i = 0; i < numInterps; ++i)
+    {
+    int3->InterpolateTuple(i,
+                           500, int1.GetPointer(),
+                           700, int2.GetPointer(), 0.25);
+    }
+  timer->StopTimer();
+  time = timer->GetElapsedTime();
+  insertTimeLog("interpolate 2 arrays (int)", time);
+
+  // GetTuples:
+  const int numGetTuples = 100000;
+
+  time = 0.;
+  for (int i = 0; i < numGetTuples; ++i)
+    {
+    double3->Initialize();
+    double3->SetNumberOfComponents(double1->GetNumberOfComponents());
+    double3->SetNumberOfTuples(ids->GetNumberOfIds());
+    timer->StartTimer();
+    double1->GetTuples(ids.GetPointer(), double3.GetPointer());
+    timer->StopTimer();
+    time += timer->GetElapsedTime();
+    }
+  insertTimeLog("get tuples random access (double)", time);
+
+  time = 0.;
+  for (int i = 0; i < numGetTuples; ++i)
+    {
+    int3->Initialize();
+    int3->SetNumberOfComponents(int1->GetNumberOfComponents());
+    int3->SetNumberOfTuples(ids->GetNumberOfIds());
+    timer->StartTimer();
+    int1->GetTuples(ids.GetPointer(), int3.GetPointer());
+    timer->StopTimer();
+    time += timer->GetElapsedTime();
+    }
+  insertTimeLog("get tuples random access (int)", time);
+}
+} // end private namespace
+#endif // BENCHMARK
diff --git a/Common/Core/Testing/Cxx/TestDataArrayIterators.cxx b/Common/Core/Testing/Cxx/TestDataArrayIterators.cxx
new file mode 100644
index 0000000..5575a1b
--- /dev/null
+++ b/Common/Core/Testing/Cxx/TestDataArrayIterators.cxx
@@ -0,0 +1,133 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestDataArrayIterators.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkFloatArray.h"
+#include "vtkTimerLog.h"
+#include "vtkTypedDataArray.h"
+#include "vtkTypedDataArrayIterator.h"
+#include "vtkNew.h"
+
+#include <assert.h>
+#include <iostream>
+
+// undefine this to print benchmark results:
+#define SILENT
+
+int TestDataArrayIterators(int, char *[])
+{
+  vtkIdType numComps = 4;
+  vtkIdType numValues = 100000000; // 10 million
+  assert(numValues % numComps == 0);
+  vtkIdType numTuples = numValues / numComps;
+
+  vtkNew<vtkFloatArray> arrayContainer;
+  vtkFloatArray *array = arrayContainer.GetPointer();
+  array->SetNumberOfComponents(numComps);
+  array->SetNumberOfTuples(numTuples);
+  for (vtkIdType i = 0; i < numValues; ++i)
+    {
+    // Just fill with consistent data
+    array->SetValue(i, i % 97);
+    }
+
+  // should be vtkDataArrayTemplate<float>::Iterator (float*):
+  vtkFloatArray::Iterator datBegin = array->Begin();
+  vtkFloatArray::Iterator datIter = array->Begin();
+  if (typeid(datBegin) != typeid(float*))
+    {
+    std::cerr << "Error: vtkFloatArray::Iterator is not a float*.";
+    return EXIT_FAILURE;
+    }
+
+  // should be vtkTypedDataArrayIterator<float>:
+  vtkTypedDataArray<float>::Iterator tdaBegin =
+      vtkTypedDataArray<float>::FastDownCast(array)->Begin();
+  vtkTypedDataArray<float>::Iterator tdaIter =
+      vtkTypedDataArray<float>::FastDownCast(array)->Begin();
+  if (typeid(tdaBegin) != typeid(vtkTypedDataArrayIterator<float>))
+    {
+    std::cerr << "Error: vtkTypedDataArray<float>::Iterator is not a "
+                 "vtkTypedDataArrayIterator<float>.";
+    return EXIT_FAILURE;
+    }
+
+  // Validate that the iterators return the same values from operator[] and
+  // operator* as GetValue;
+  for (vtkIdType i = 0; i < numValues; ++i)
+    {
+    float lookup = array->GetValue(i);
+    if (lookup != datBegin[i] || lookup != tdaBegin[i] ||
+        lookup != *datIter    || lookup != *tdaIter)
+      {
+      std::cerr << "Mismatch at " << i << ":"
+                << " GetValue(i)=" << lookup
+                << " datBegin[i]=" << datBegin[i]
+                << " tdaBegin[i]=" << tdaBegin[i]
+                << " *datIter=" << *datIter
+                << " *tdaIter=" << *tdaIter
+                << std::endl;
+      return EXIT_FAILURE;
+      }
+    ++datIter;
+    ++tdaIter;
+    }
+
+#ifndef SILENT
+  // Iterator timings.
+  vtkNew<vtkTimerLog> timer;
+
+  // Lookup:
+  float lookupSum = 0.f;
+  timer->StartTimer();
+  for (vtkIdType i = 0; i < numValues; ++i)
+    {
+    lookupSum += array->GetValueReference(i);
+    }
+  timer->StopTimer();
+  double lookupTime = timer->GetElapsedTime();
+
+  // Scalar iterator:
+  float datSum = 0.f;
+  timer->StartTimer();
+  vtkFloatArray::Iterator datEnd = array->End();
+  while (datBegin != datEnd)
+    {
+    datSum += *datBegin++;
+    }
+  timer->StopTimer();
+  double datTime = timer->GetElapsedTime();
+
+  // vtkTypedDataArrayIterator:
+  vtkTypedDataArray<float>::Iterator tdaEnd =
+      vtkTypedDataArray<float>::FastDownCast(array)->End();
+  float tdaSum = 0.f;
+  timer->StartTimer();
+  while (tdaBegin != tdaEnd)
+    {
+    tdaSum += *tdaBegin++;
+    }
+  timer->StopTimer();
+  double tdaTime = timer->GetElapsedTime();
+
+  std::cout << "GetValue time, sum: "
+            << lookupTime << ", " << lookupSum << std::endl;
+  std::cout << "dat time, sum:      "
+            << datTime << ", " << datSum << std::endl;
+  std::cout << "tda time, sum:      "
+            << tdaTime << ", " << tdaSum << std::endl;
+#endif
+
+  return EXIT_SUCCESS;
+}
diff --git a/Common/Core/Testing/Cxx/TestGarbageCollector.cxx b/Common/Core/Testing/Cxx/TestGarbageCollector.cxx
index 76490ec..7b8ca99 100644
--- a/Common/Core/Testing/Cxx/TestGarbageCollector.cxx
+++ b/Common/Core/Testing/Cxx/TestGarbageCollector.cxx
@@ -69,7 +69,7 @@ private:
 
 // A callback that reports when it is called.
 static int called = 0;
-void MyDeleteCallback(vtkObject*, unsigned long, void*, void*)
+static void MyDeleteCallback(vtkObject*, unsigned long, void*, void*)
 {
   called = 1;
 }
diff --git a/Common/Core/Testing/Cxx/TestNew.cxx b/Common/Core/Testing/Cxx/TestNew.cxx
index 759187a..7ca963d 100644
--- a/Common/Core/Testing/Cxx/TestNew.cxx
+++ b/Common/Core/Testing/Cxx/TestNew.cxx
@@ -57,7 +57,7 @@ int TestNew(int,char *[])
   if (si == 0)
     {
     vtkNew<vtkIntArray> i;
-    si = i.GetPointer();
+    si = i.Get();
     }
   if (si->GetReferenceCount() != 1)
     {
diff --git a/Common/Core/Testing/Cxx/TestSMP.cxx b/Common/Core/Testing/Cxx/TestSMP.cxx
new file mode 100644
index 0000000..7b555d0
--- /dev/null
+++ b/Common/Core/Testing/Cxx/TestSMP.cxx
@@ -0,0 +1,139 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    otherArrays.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkSMPThreadLocal.h"
+#include "vtkNew.h"
+#include "vtkObject.h"
+#include "vtkObjectFactory.h"
+#include "vtkSMPTools.h"
+#include "vtkSMPThreadLocalObject.h"
+
+static const int Target = 10000;
+
+class ARangeFunctor
+{
+public:
+  vtkSMPThreadLocal<int> Counter;
+
+  ARangeFunctor(): Counter(0)
+  {
+  }
+
+  void operator()(vtkIdType begin, vtkIdType end)
+  {
+    for (int i=begin; i<end; i++)
+      this->Counter.Local()++;
+  }
+};
+
+class MyVTKClass : public vtkObject
+{
+  int Value;
+
+  MyVTKClass() : Value(0)
+  {
+  }
+
+public:
+  vtkTypeMacro(MyVTKClass, vtkObject);
+  static MyVTKClass* New();
+
+  void SetInitialValue(int value)
+  {
+    this->Value = value;
+  }
+
+  int GetValue()
+  {
+    return this->Value;
+  }
+
+  void Increment()
+  {
+    this->Value++;
+  }
+};
+
+vtkStandardNewMacro(MyVTKClass);
+
+class InitializableFunctor
+{
+public:
+  vtkSMPThreadLocalObject<MyVTKClass> CounterObject;
+
+  void Initialize()
+  {
+    CounterObject.Local()->SetInitialValue(5);
+  }
+
+  void operator()(vtkIdType begin, vtkIdType end)
+  {
+    for (int i=begin; i<end; i++)
+      this->CounterObject.Local()->Increment();
+  }
+
+  void Reduce()
+  {
+  }
+
+};
+
+int TestSMP(int, char*[])
+{
+  //vtkSMPTools::Initialize(8);
+
+  ARangeFunctor functor1;
+
+  vtkSMPTools::For(0, Target, functor1);
+
+  vtkSMPThreadLocal<int>::iterator itr1 = functor1.Counter.begin();
+  vtkSMPThreadLocal<int>::iterator end1 = functor1.Counter.end();
+
+  int total = 0;
+  while(itr1 != end1)
+    {
+    total += *itr1;
+    ++itr1;
+    }
+
+  if (total != Target)
+    {
+    cerr << "Error: ARangeFunctor did not generate " << Target << endl;
+    return 1;
+    }
+
+  InitializableFunctor functor2;
+
+  vtkSMPTools::For(0, Target, functor2);
+
+  vtkSMPThreadLocalObject<MyVTKClass>::iterator itr2 = functor2.CounterObject.begin();
+  vtkSMPThreadLocalObject<MyVTKClass>::iterator end2 = functor2.CounterObject.end();
+
+  int newTarget = Target;
+  total = 0;
+  while(itr2 != end2)
+    {
+    newTarget += 5; // This is the initial value of each object
+    total += (*itr2)->GetValue();
+    ++itr2;
+    }
+
+  if (total != newTarget)
+    {
+    cerr << "Error: InitializableRangeFunctor did not generate " << newTarget << endl;
+    return 1;
+    }
+
+  return 0;
+}
diff --git a/Common/Core/Testing/Cxx/TestScalarsToColors.cxx b/Common/Core/Testing/Cxx/TestScalarsToColors.cxx
new file mode 100644
index 0000000..4463747
--- /dev/null
+++ b/Common/Core/Testing/Cxx/TestScalarsToColors.cxx
@@ -0,0 +1,70 @@
+#include "vtkNew.h"
+#include "vtkScalarsToColors.h"
+#include "vtkStringArray.h"
+#include "vtkVariant.h"
+#include "vtkVariantArray.h"
+
+int TestScalarsToColors(int, char*[])
+{
+  vtkNew<vtkScalarsToColors> lut;
+  vtkNew<vtkStringArray> ann;
+  vtkNew<vtkVariantArray> val;
+
+  lut->SetAnnotations(NULL, NULL);
+  vtkStringArray* ann2 = lut->GetAnnotations();
+  vtkAbstractArray* val2 = lut->GetAnnotatedValues();
+  if (ann2 || val2)
+    {
+    cerr << "Annotations set to NULL but didn't return NULL\n";
+    return EXIT_FAILURE;
+    }
+
+  ann->InsertNextValue("Foo");
+  val->InsertNextValue(vtkVariant(10.3));
+  lut->SetAnnotations(val.GetPointer(), ann.GetPointer());
+  ann2 = lut->GetAnnotations();
+  val2 = lut->GetAnnotatedValues();
+  if (!ann2 || !val2)
+    {
+    cerr << "Annotations set to non-NULL but returned NULL\n";
+    return EXIT_FAILURE;
+    }
+
+  int idx = lut->GetAnnotatedValueIndex(10.3);
+  if (idx != 0)
+    {
+    cerr << "Could not find annotated value 10.3.\n";
+    return EXIT_FAILURE;
+    }
+
+  idx = lut->GetAnnotatedValueIndex("Narf");
+  if (idx >= 0)
+    {
+    cerr << "Found unexpected annotated value \"Narf\".\n";
+    return EXIT_FAILURE;
+    }
+
+  ann->InsertNextValue("Not hardly!");
+  val->InsertNextValue("Narf");
+  ann->InsertNextValue("Fezzik");
+  val->InsertNextValue(vtkVariant(20));
+  lut->SetAnnotations(val.GetPointer(), ann.GetPointer());
+
+  idx = lut->GetAnnotatedValueIndex("Narf");
+  if (idx != 1)
+    {
+    cerr << "Couldn't find newly-annotated value (\"Narf\").\n";
+    return EXIT_FAILURE;
+    }
+
+  lut->SetAnnotations(NULL, NULL);
+  ann2 = lut->GetAnnotations();
+  val2 = lut->GetAnnotatedValues();
+  if (ann2 || val2)
+    {
+    cerr << "Annotations again set to NULL but didn't return NULL\n";
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Common/Core/Testing/Cxx/TestWeakPointer.cxx b/Common/Core/Testing/Cxx/TestWeakPointer.cxx
index 685268e..be54b41 100644
--- a/Common/Core/Testing/Cxx/TestWeakPointer.cxx
+++ b/Common/Core/Testing/Cxx/TestWeakPointer.cxx
@@ -107,7 +107,7 @@ int TestWeakPointer(int,char *[])
     cerr << "da2.GetPointer() is NULL\n";
     rval = 1;
     }
-  if (da3.GetPointer() != 0)
+  if (da3.Get() != 0)
     {
     cerr << "da3.GetPointer() is not NULL\n";
     rval = 1;
diff --git a/Common/Core/Testing/Cxx/TestXMLFileOutputWindow.cxx b/Common/Core/Testing/Cxx/TestXMLFileOutputWindow.cxx
index b61b44b..d42db48 100644
--- a/Common/Core/Testing/Cxx/TestXMLFileOutputWindow.cxx
+++ b/Common/Core/Testing/Cxx/TestXMLFileOutputWindow.cxx
@@ -76,42 +76,26 @@ int TestXMLFileOutputWindow(int argc,char *argv[])
 
   // Now, compare the default and specified files
   // Read the default XML file
-  std::ifstream dfin("vtkMessageLog.xml", std::ios::in);
+  std::ifstream dfin("vtkMessageLog.xml");
+  std::string def((std::istreambuf_iterator<char>(dfin)),
+                  std::istreambuf_iterator<char>());
+
   if (dfin.fail())
     {
-    std::cout << argv[0] << ": Cannot open " << "vtkMessageLog.xml" << std::endl;
+    std::cout << argv[0] << ": Cannot open vtkMessageLog.xml" << std::endl;
     return EXIT_FAILURE;
     }
 
-  // Get the length of the file
-  dfin.seekg (0, std::ios::end);
-  const size_t dlen = dfin.tellg();
-  dfin.seekg (0, std::ios::beg);
-  char * defXML = new char[dlen+1];
-  dfin.read (defXML, dlen);
-  defXML[dlen] = '\0';
+  std::ifstream sfin(argv[1]);
+  std::string specified((std::istreambuf_iterator<char>(sfin)),
+                        std::istreambuf_iterator<char>());
 
-  // Read the specified XML file
-  std::ifstream sfin(argv[1], std::ios::in);
   if (sfin.fail())
     {
     std::cout << argv[0] << ": Cannot open " << argv[1] << std::endl;
     return EXIT_FAILURE;
     }
 
-  // Get the length of the file
-  sfin.seekg (0, std::ios::end);
-  const size_t slen = sfin.tellg();
-  sfin.seekg (0, std::ios::beg);
-  char * specifiedXML = new char[slen+1];
-  sfin.read (specifiedXML, slen);
-  specifiedXML[slen] = '\0';
-
-  std::string def(defXML);
-  delete [] defXML;
-  std::string specified(specifiedXML);
-  delete [] specifiedXML;
-
   if (def != specified)
     {
     std::cout << "The string in the default file ***********" << std::endl
diff --git a/Common/Core/Testing/Python/CMakeLists.txt b/Common/Core/Testing/Python/CMakeLists.txt
index 86d470f..da70e5c 100644
--- a/Common/Core/Testing/Python/CMakeLists.txt
+++ b/Common/Core/Testing/Python/CMakeLists.txt
@@ -8,6 +8,7 @@ if(VTK_PYTHON_EXE)
     TestMutable
     TestNumpySupport
     TestOperators
+    TestPointers
     TestStrings
     TestSubClass
     TestTerminationCrash
@@ -15,8 +16,6 @@ if(VTK_PYTHON_EXE)
     TestWeakref
     TestCommand
     )
-    add_test(NAME ${vtk-module}Python-${tfile}
-      COMMAND ${VTK_PYTHON_EXE}
-        ${CMAKE_CURRENT_SOURCE_DIR}/${tfile}.py)
+    vtk_add_test_python(${tfile}.py NO_DATA NO_VALID NO_OUTPUT)
   endforeach()
 endif()
diff --git a/Common/Core/Testing/Python/TestPointers.py b/Common/Core/Testing/Python/TestPointers.py
new file mode 100644
index 0000000..2538f1b
--- /dev/null
+++ b/Common/Core/Testing/Python/TestPointers.py
@@ -0,0 +1,29 @@
+"""Test the wrapping of parameters that are pointers to numeric types
+
+Created on Nov 11, 2013 by David Gobbi
+"""
+
+import sys
+import vtk
+from vtk.test import Testing
+
+class TestPointers(Testing.vtkTest):
+    def testBoolPointer(self):
+        v = vtk.vtkVariant("1")
+        bp = [False]
+        d = v.ToFloat(bp)
+        self.assertEqual(bp, [True])
+        v = vtk.vtkVariant("George")
+        d = v.ToFloat(bp)
+        self.assertEqual(bp, [False])
+
+    def testDoublePointer(self):
+        dp = [5.2]
+        vtk.vtkMath.ClampValue(dp, (-0.5, 0.5))
+        self.assertEqual(dp, [0.5])
+        dp = [5.2, 1.0, -0.2, 0.3, 10.0, 6.0]
+        vtk.vtkMath.ClampValues(dp, len(dp), (-0.5, 0.5), dp)
+        self.assertEqual(dp, [0.5, 0.5, -0.2, 0.3, 0.5, 0.5])
+
+if __name__ == "__main__":
+    Testing.main([(TestPointers, 'test')])
diff --git a/Common/Core/Testing/Tcl/CMakeLists.txt b/Common/Core/Testing/Tcl/CMakeLists.txt
index 9e19ad0..8b084e5 100644
--- a/Common/Core/Testing/Tcl/CMakeLists.txt
+++ b/Common/Core/Testing/Tcl/CMakeLists.txt
@@ -9,8 +9,5 @@ set(tests
   )
 
 foreach(test ${tests})
-  add_test(NAME ${vtk-module}Tcl-${test}
-         COMMAND ${VTK_TCL_EXE}
-         ${CMAKE_CURRENT_SOURCE_DIR}/${test}.tcl
-         -A ${VTK_SOURCE_DIR}/Wrapping/Tcl)
+  vtk_add_test_tcl(${test}.tcl NO_DATA NO_RT)
 endforeach()
diff --git a/Common/Core/Testing/Tcl/TestSetGet.tcl b/Common/Core/Testing/Tcl/TestSetGet.tcl
index 699f4ed..3763785 100644
--- a/Common/Core/Testing/Tcl/TestSetGet.tcl
+++ b/Common/Core/Testing/Tcl/TestSetGet.tcl
@@ -111,6 +111,9 @@ set classExceptions {
    vtkMathTextUtilities
    vtkMatplotlibMathTextUtilities
    vtkTextRenderer
+   vtkDataSetCellIterator
+   vtkPointSetCellIterator
+   vtkUnstructuredGridCellIterator
 }
 
 proc rtSetGetTest { fileid } {
diff --git a/Common/Core/module.cmake b/Common/Core/module.cmake
index 4549c34..d20c3da 100644
--- a/Common/Core/module.cmake
+++ b/Common/Core/module.cmake
@@ -1,7 +1,7 @@
 vtk_module(vtkCommonCore
   GROUPS
     StandAlone
-  DEPENDS
+  PRIVATE_DEPENDS
     vtksys
   TEST_DEPENDS
     vtkTestingCore
diff --git a/Common/Core/vtkAbstractArray.cxx b/Common/Core/vtkAbstractArray.cxx
index 57ec125..80a25e5 100644
--- a/Common/Core/vtkAbstractArray.cxx
+++ b/Common/Core/vtkAbstractArray.cxx
@@ -72,15 +72,17 @@ class vtkAbstractArray::vtkInternalComponentNames : public vtkInternalComponentN
 
 //----------------------------------------------------------------------------
 // Construct object with sane defaults.
-vtkAbstractArray::vtkAbstractArray(vtkIdType vtkNotUsed(numComp))
+vtkAbstractArray::vtkAbstractArray()
 {
   this->Size = 0;
   this->MaxId = -1;
   this->NumberOfComponents = 1;
   this->Name = NULL;
+  this->RebuildArray = false;
   this->Information = NULL;
-
   this->ComponentNames = NULL;
+
+  this->MaxDiscreteValues = vtkAbstractArray::MAX_DISCRETE_VALUES; //32
 }
 
 //----------------------------------------------------------------------------
@@ -255,6 +257,12 @@ void vtkAbstractArray::GetTuples(vtkIdType p1, vtkIdType p2,
 }
 
 //----------------------------------------------------------------------------
+bool vtkAbstractArray::HasStandardMemoryLayout()
+{
+  return true;
+}
+
+//----------------------------------------------------------------------------
 void vtkAbstractArray::DeepCopy( vtkAbstractArray* da )
 {
   if (da && da->HasInformation() && da!=this)
@@ -572,7 +580,8 @@ namespace
 template<typename T>
 bool AccumulateSampleValues(
   T* array, int nc, vtkIdType begin, vtkIdType end,
-  std::vector<std::set<T> >& uniques, std::set<std::vector<T> >& tupleUniques)
+  std::vector<std::set<T> >& uniques, std::set<std::vector<T> >& tupleUniques,
+  unsigned int maxDiscreteValues)
 {
   // number of discrete components remaining (tracked during iteration):
   int ndc = nc;
@@ -590,20 +599,20 @@ bool AccumulateSampleValues(
     // First, attempt a per-component insert.
     for (int j = 0; j < nc; ++ j)
       {
-      if (uniques[j].size() > vtkAbstractArray::MAX_DISCRETE_VALUES)
+      if (uniques[j].size() > maxDiscreteValues)
         continue;
       T& val(array[i * nc + j]);
       tuple[j] = val;
       result = uniques[j].insert(val);
       if (result.second)
         {
-        if (uniques[j].size() == vtkAbstractArray::MAX_DISCRETE_VALUES + 1)
+        if (uniques[j].size() == maxDiscreteValues + 1)
           {
           -- ndc;
           }
         }
       }
-    // Now, as long as no component has exceeded MAX_DISCRETE_VALUES unique
+    // Now, as long as no component has exceeded maxDiscreteValues unique
     // values, it is worth seeing whether the tuple as a whole is unique:
     if ( nc > 1 && ndc == nc )
       {
@@ -618,7 +627,8 @@ bool AccumulateSampleValues(
 template<typename U>
 void SampleProminentValues(
   std::vector<std::vector<vtkVariant> >& uniques, vtkIdType maxId,
-  int nc, vtkIdType nt, int blockSize, vtkIdType numberOfBlocks, U* ptr)
+  int nc, vtkIdType nt, int blockSize, vtkIdType numberOfBlocks, U* ptr,
+  unsigned int maxDiscreteValues)
 {
   std::vector<std::set<U> > typeSpecificUniques;
   std::set<std::vector<U> > typeSpecificUniqueTuples;
@@ -629,7 +639,7 @@ void SampleProminentValues(
     { // Awwww, just do the whole array already!
     AccumulateSampleValues(
       ptr, nc, 0, nt,
-      typeSpecificUniques, typeSpecificUniqueTuples);
+      typeSpecificUniques, typeSpecificUniqueTuples, maxDiscreteValues);
     }
   else
     { // Choose random blocks
@@ -656,7 +666,7 @@ void SampleProminentValues(
       endTuple = endTuple < nt ? endTuple : nt;
       bool endEarly = AccumulateSampleValues(
         ptr, nc, startTuple, endTuple,
-        typeSpecificUniques, typeSpecificUniqueTuples);
+        typeSpecificUniques, typeSpecificUniqueTuples, maxDiscreteValues);
       if (endEarly)
         break;
       }
@@ -694,7 +704,7 @@ void vtkAbstractArray::UpdateDiscreteValueSet(
   // prominence P, we sample N blocks of M tuples each, with
   // M*N = f(T; P, U) and f some sublinear function of T.
   // If every component plus all components taken together each have more than
-  // MAX_DISCRETE_VALUES distinct values, then we exit early.
+  // MaxDiscreteValues distinct values, then we exit early.
   // M is chosen based on the number of bytes per tuple to maximize use of a
   // cache line (assuming a 64-byte cache line until kwsys::SystemInformation
   // or the like can provide a platform-independent way to query it).
@@ -737,11 +747,12 @@ void vtkAbstractArray::UpdateDiscreteValueSet(
   vtkIdType numberOfBlocks =
     numberOfSampleTuples / blockSize +
     (numberOfSampleTuples % blockSize ? 1 : 0);
-  if (numberOfBlocks * blockSize < 2 * MAX_DISCRETE_VALUES)
+  if (static_cast<unsigned int>(numberOfBlocks * blockSize) <
+      2 * this->MaxDiscreteValues)
     {
     numberOfBlocks =
-      2 * MAX_DISCRETE_VALUES / blockSize +
-      (2 * MAX_DISCRETE_VALUES % blockSize ? 1 : 0);
+      2 * this->MaxDiscreteValues / blockSize +
+      (2 * this->MaxDiscreteValues % blockSize ? 1 : 0);
     }
   // II. Sample the array.
   std::vector<std::vector<vtkVariant> > uniques(nc > 1 ? nc + 1 : nc);
@@ -750,7 +761,8 @@ void vtkAbstractArray::UpdateDiscreteValueSet(
     vtkSuperExtraExtendedTemplateMacro(
       SampleProminentValues(
         uniques, this->MaxId, nc, nt, blockSize, numberOfBlocks,
-        static_cast<VTK_TT*>(this->GetVoidPointer(0))));
+        static_cast<VTK_TT*>(this->GetVoidPointer(0)),
+        this->MaxDiscreteValues));
   default:
     vtkErrorMacro("Array type " << this->GetClassName() << " not supported.");
     break;
@@ -761,7 +773,7 @@ void vtkAbstractArray::UpdateDiscreteValueSet(
   vtkInformationVector* iv;
   for (c = 0; c < nc; ++c)
     {
-    if (uniques[c].size() <= MAX_DISCRETE_VALUES)
+    if (uniques[c].size() <= this->MaxDiscreteValues)
       {
       ++ numberOfComponentsWithProminentValues;
       iv = this->GetInformation()->Get(PER_COMPONENT());
@@ -787,7 +799,7 @@ void vtkAbstractArray::UpdateDiscreteValueSet(
       }
     }
   if (nc > 1 &&
-    static_cast<int>(uniques[nc].size()) <= MAX_DISCRETE_VALUES * nc)
+    uniques[nc].size() <= this->MaxDiscreteValues * nc)
     {
     ++ numberOfComponentsWithProminentValues;
     this->GetInformation()->Set(
diff --git a/Common/Core/vtkAbstractArray.h b/Common/Core/vtkAbstractArray.h
index 2e33609..02bd28e 100644
--- a/Common/Core/vtkAbstractArray.h
+++ b/Common/Core/vtkAbstractArray.h
@@ -141,6 +141,13 @@ public:
   virtual void InsertTuple(vtkIdType i, vtkIdType j, vtkAbstractArray* source) = 0;
 
   // Description:
+  // Copy the tuples indexed in srcIds from the source array to the tuple
+  // locations indexed by dstIds in this array.
+  // Note that memory allocation is performed as necessary to hold the data.
+  virtual void InsertTuples(vtkIdList *dstIds, vtkIdList *srcIds,
+                            vtkAbstractArray* source) = 0;
+
+  // Description:
   // Insert the jth tuple in the source array, at the end in this array.
   // Note that memory allocation is performed as necessary to hold the data.
   // Returns the location at which the data was inserted.
@@ -159,8 +166,26 @@ public:
   virtual void GetTuples(vtkIdType p1, vtkIdType p2, vtkAbstractArray *output);
 
   // Description:
+  // Returns true if this array uses the standard memory layout defined in the
+  // VTK user guide, e.g. a contiguous array:
+  // {t1c1, t1c2, t1c3, ... t1cM, t2c1, ... tNcM}
+  // where t1c2 is the second component of the first tuple.
+  //
+  // If the array does not have the standard memory layout GetVoidPointer should
+  // not be used, as a deep copy of the data must be made. Instead, use a
+  // vtkTypedDataArrayIterator to get pointer-like semantics that can safely
+  // access the data values.
+  //
+  // Subclasses that return false here must derive from vtkMappedDataArray
+  // to ensure that they will work safely with the rest of the pipeline.
+  virtual bool HasStandardMemoryLayout();
+
+  // Description:
   // Return a void pointer. For image pipeline interface and other
   // special pointer manipulation.
+  // If the data is simply being iterated over, consider using
+  // vtkDataArrayIteratorMacro for safety and efficiency, rather than using this
+  // member directly.
   virtual void *GetVoidPointer(vtkIdType id) = 0;
 
   // Description:
@@ -435,13 +460,35 @@ public:
   // relative frequency a value is allowed to have and still appear on the list.
   static vtkInformationDoubleVectorKey* DISCRETE_VALUE_SAMPLE_PARAMETERS();
 
+  // Deprecated.  Use vtkAbstractArray::MaxDiscreteValues instead.
   enum {
     MAX_DISCRETE_VALUES = 32
   };
 
+  // Description:
+  // Get/Set the maximum number of prominent values this array may contain
+  // before it is considered continuous.  Default value is 32.
+  vtkGetMacro(MaxDiscreteValues, unsigned int);
+  vtkSetMacro(MaxDiscreteValues, unsigned int);
+
+  enum {
+    AbstractArray = 0,
+    DataArray,
+    TypedDataArray,
+    DataArrayTemplate,
+    MappedDataArray
+    };
+
+  // Description:
+  // Method for type-checking in FastDownCast implementations.
+  virtual int GetArrayType()
+  {
+    return AbstractArray;
+  }
+
 protected:
   // Construct object with default tuple dimension (number of components) of 1.
-  vtkAbstractArray(vtkIdType numComp=1);
+  vtkAbstractArray();
   ~vtkAbstractArray();
 
   // Description:
@@ -466,6 +513,9 @@ protected:
   vtkIdType MaxId;        // maximum index inserted thus far
   int NumberOfComponents; // the number of components per tuple
 
+  // maximum number of prominent values before array is considered continuous.
+  unsigned int MaxDiscreteValues;
+
   char* Name;
 
   bool RebuildArray;      // whether to rebuild the fast lookup data structure.
diff --git a/Common/Core/vtkArrayIteratorTemplate.h b/Common/Core/vtkArrayIteratorTemplate.h
index 8ffbbdc..cb79fa5 100644
--- a/Common/Core/vtkArrayIteratorTemplate.h
+++ b/Common/Core/vtkArrayIteratorTemplate.h
@@ -24,10 +24,12 @@
 #define __vtkArrayIteratorTemplate_h
 
 #include "vtkCommonCoreModule.h" // For export macro
+#include "vtkTypeTemplate.h" // For templated vtkObject API
 #include "vtkArrayIterator.h"
 
 template <class T>
-class VTKCOMMONCORE_EXPORT vtkArrayIteratorTemplate : public vtkArrayIterator
+class VTKCOMMONCORE_EXPORT vtkArrayIteratorTemplate :
+    public vtkTypeTemplate<vtkArrayIteratorTemplate<T>, vtkArrayIterator>
 {
 public:
   static vtkArrayIteratorTemplate<T>* New();
diff --git a/Common/Core/vtkArrayPrint.h b/Common/Core/vtkArrayPrint.h
index 68067fd..4d8a0e3 100644
--- a/Common/Core/vtkArrayPrint.h
+++ b/Common/Core/vtkArrayPrint.h
@@ -29,19 +29,19 @@
 
 #include "vtkTypedArray.h"
 
-/// \relates vtkArrayPrint
+/// @relates vtkArrayPrint
 /// Serializes the contents of an array to a stream as a series of
 /// coordinates.  For 2D arrays of double values, the output is compatible
 /// with the MatrixMarket "Coordinate Text File" format.
 template<typename T>
 void vtkPrintCoordinateFormat(ostream& stream, vtkTypedArray<T>* array);
 
-/// \relates vtkArrayPrint
+/// @relates vtkArrayPrint
 /// Serializes the contents of a matrix to a stream in human-readable form.
 template<typename T>
 void vtkPrintMatrixFormat(ostream& stream, vtkTypedArray<T>* matrix);
 
-/// \relates vtkArrayPrint
+/// @relates vtkArrayPrint
 /// Serializes the contents of a vector to a stream in human-readable form.
 template<typename T>
 void vtkPrintVectorFormat(ostream& stream, vtkTypedArray<T>* vector);
diff --git a/Common/Core/vtkArrayWeights.cxx b/Common/Core/vtkArrayWeights.cxx
index 751a484..60639c8 100644
--- a/Common/Core/vtkArrayWeights.cxx
+++ b/Common/Core/vtkArrayWeights.cxx
@@ -67,6 +67,11 @@ vtkArrayWeights::vtkArrayWeights(double i, double j, double k, double l)
   this->Storage->Storage[3] = l;
 }
 
+vtkArrayWeights::vtkArrayWeights(const vtkArrayWeights& other)
+{
+  this->Storage = new vtkArrayWeightsStorage(*other.Storage);
+}
+
 // ----------------------------------------------------------------------------
  vtkArrayWeights::~vtkArrayWeights()
 {
@@ -93,3 +98,9 @@ const double& vtkArrayWeights::operator[](vtkIdType i) const
 {
   return this->Storage->Storage[static_cast<size_t>(i)];
 }
+
+vtkArrayWeights& vtkArrayWeights::operator=(const vtkArrayWeights& other)
+{
+  *this->Storage = *other.Storage;
+  return *this;
+}
diff --git a/Common/Core/vtkArrayWeights.h b/Common/Core/vtkArrayWeights.h
index 6604709..57aef69 100644
--- a/Common/Core/vtkArrayWeights.h
+++ b/Common/Core/vtkArrayWeights.h
@@ -54,6 +54,10 @@ public:
   vtkArrayWeights();
 
   // Description:
+  // Copy the weights from another object.
+  vtkArrayWeights(const vtkArrayWeights& other);
+
+  // Description:
   // Create a collection containing one weight.
   vtkArrayWeights(double i);
 
@@ -91,6 +95,10 @@ public:
   // Accesses the i-th weight in the collection.
   const double& operator[](vtkIdType) const;
 
+  // Description:
+  // Assignment operator.
+  vtkArrayWeights& operator=(const vtkArrayWeights& other);
+
 protected:
   vtkArrayWeightsStorage *Storage;
 };
diff --git a/Common/Core/vtkAutoInit.h b/Common/Core/vtkAutoInit.h
index f840843..7947d51 100644
--- a/Common/Core/vtkAutoInit.h
+++ b/Common/Core/vtkAutoInit.h
@@ -16,6 +16,7 @@
 #define __vtkAutoInit_h
 
 #include "vtkDebugLeaksManager.h" // DebugLeaks exists longer.
+#include "vtkTimeStamp.h" // Here so that TimeStamp Schwarz initializer works
 
 #define VTK_AUTOINIT(M) VTK_AUTOINIT0(M,M##_AUTOINIT)
 #define VTK_AUTOINIT0(M,T) VTK_AUTOINIT1(M,T)
@@ -69,5 +70,28 @@
 #define VTK_AUTOINIT_DESTRUCT(M) \
   M##_AutoInit_Destruct();
 
+// Description:
+// Initialize the named module, ensuring its object factory is correctly
+// registered and unregistered. This call must be made in global scope in the
+// translation unit of your executable (which can include a shared library, but
+// will not work as expected in a static library).
+//
+// @code{.cpp}
+// #include "vtkAutoInit.h"
+// VTK_MODULE_INIT(vtkRenderingOpenGL);
+// @endcode
+//
+// The above snippet if included in the global scope will ensure the object
+// factories for vtkRenderingOpenGL are correctly registered and unregistered.
+#define VTK_MODULE_INIT(M) \
+  VTK_AUTOINIT_DECLARE(M) \
+  static struct M##_ModuleInit {                                           \
+    /* Call <mod>_AutoInit_Construct during initialization.  */            \
+    M##_ModuleInit()  { VTK_AUTOINIT_CONSTRUCT(M) }                      \
+    /* Call <mod>_AutoInit_Destruct during finalization.  */               \
+    ~M##_ModuleInit() { VTK_AUTOINIT_DESTRUCT(M)  }                      \
+  } M##_ModuleInit_Instance;
+
+
 #endif
 // VTK-HeaderTest-Exclude: vtkAutoInit.h
diff --git a/Common/Core/vtkBitArray.cxx b/Common/Core/vtkBitArray.cxx
index 064add5..1029aa8 100644
--- a/Common/Core/vtkBitArray.cxx
+++ b/Common/Core/vtkBitArray.cxx
@@ -49,9 +49,8 @@ vtkStandardNewMacro(vtkBitArray);
 
 //----------------------------------------------------------------------------
 // Instantiate object.
-vtkBitArray::vtkBitArray(vtkIdType numComp)
+vtkBitArray::vtkBitArray()
 {
-  this->NumberOfComponents = static_cast<int>(numComp < 1 ? 1 : numComp);
   this->Array = NULL;
   this->TupleSize = 3;
   this->Tuple = new double[this->TupleSize]; //used for conversion
@@ -67,10 +66,7 @@ vtkBitArray::~vtkBitArray()
     delete [] this->Array;
     }
   delete [] this->Tuple;
-  if (this->Lookup)
-    {
-    delete this->Lookup;
-    }
+  delete this->Lookup;
 }
 
 //----------------------------------------------------------------------------
@@ -385,6 +381,43 @@ void vtkBitArray::InsertTuple(vtkIdType i, vtkIdType j, vtkAbstractArray* source
 }
 
 //----------------------------------------------------------------------------
+void vtkBitArray::InsertTuples(vtkIdList *dstIds, vtkIdList *srcIds,
+                               vtkAbstractArray *source)
+{
+  vtkBitArray* ba = vtkBitArray::SafeDownCast(source);
+  if (!ba)
+    {
+    vtkWarningMacro("Input and output arrays types do not match.");
+    return;
+    }
+
+  if (ba->NumberOfComponents != this->NumberOfComponents)
+    {
+    vtkWarningMacro("Number of components do not match.");
+    return;
+    }
+
+  vtkIdType numIds = dstIds->GetNumberOfIds();
+  if (srcIds->GetNumberOfIds() != numIds)
+    {
+    vtkWarningMacro("Input and output id array sizes do not match.");
+    return;
+    }
+
+  for (vtkIdType idIndex = 0; idIndex < numIds; ++idIndex)
+    {
+    vtkIdType numComp = this->NumberOfComponents;
+    vtkIdType srcLoc = srcIds->GetId(idIndex) * this->NumberOfComponents;
+    vtkIdType dstLoc = dstIds->GetId(idIndex) * this->NumberOfComponents;
+    while (numComp-- > 0)
+      {
+      this->InsertValue(dstLoc++, ba->GetValue(srcLoc++));
+      }
+    }
+    this->DataChanged();
+}
+
+//----------------------------------------------------------------------------
 // Description:
 // Insert the jth tuple in the source array, at the end in this array.
 // Note that memory allocation is performed as necessary to hold the data.
diff --git a/Common/Core/vtkBitArray.h b/Common/Core/vtkBitArray.h
index ce00d72..e45aaab 100644
--- a/Common/Core/vtkBitArray.h
+++ b/Common/Core/vtkBitArray.h
@@ -64,6 +64,13 @@ public:
   virtual void InsertTuple(vtkIdType i, vtkIdType j, vtkAbstractArray* source);
 
   // Description:
+  // Copy the tuples indexed in srcIds from the source array to the tuple
+  // locations indexed by dstIds in this array.
+  // Note that memory allocation is performed as necessary to hold the data.
+  virtual void InsertTuples(vtkIdList *dstIds, vtkIdList *srcIds,
+                            vtkAbstractArray *source);
+
+  // Description:
   // Insert the jth tuple in the source array, at the end in this array.
   // Note that memory allocation is performed as necessary to hold the data.
   // Returns the location at which the data was inserted.
@@ -177,9 +184,11 @@ public:
   // the array supplied by the user.  Set save to 1 to keep the class
   // from deleting the array when it cleans up or reallocates memory.
   // The class uses the actual array provided; it does not copy the data
-  // from the suppled array. If save 0, the array must have been allocated
+  // from the supplied array. If save 0, the array must have been allocated
   // with new[] not malloc.
+#ifndef __WRAP__
   void SetArray(unsigned char* array, vtkIdType size, int save);
+#endif
   void SetVoidArray(void *array, vtkIdType size, int save)
     {
       this->SetArray(static_cast<unsigned char *>(array), size, save);
@@ -212,7 +221,7 @@ public:
   virtual void ClearLookup();
 
 protected:
-  vtkBitArray(vtkIdType numComp=1);
+  vtkBitArray();
   ~vtkBitArray();
 
   unsigned char *Array;   // pointer to data
diff --git a/Common/Core/vtkBitArrayIterator.cxx b/Common/Core/vtkBitArrayIterator.cxx
index 05b94f8..0b29bd4 100644
--- a/Common/Core/vtkBitArrayIterator.cxx
+++ b/Common/Core/vtkBitArrayIterator.cxx
@@ -31,10 +31,7 @@ vtkBitArrayIterator::vtkBitArrayIterator()
 vtkBitArrayIterator::~vtkBitArrayIterator()
 {
   this->SetArray(0);
-  if (this->Tuple)
-    {
-    delete [] this->Tuple;
-    }
+  delete [] this->Tuple;
 }
 
 //-----------------------------------------------------------------------------
diff --git a/Common/Core/vtkCharArray.cxx b/Common/Core/vtkCharArray.cxx
index 07cb20b..bf78179 100644
--- a/Common/Core/vtkCharArray.cxx
+++ b/Common/Core/vtkCharArray.cxx
@@ -28,7 +28,7 @@ VTK_ARRAY_ITERATOR_TEMPLATE_INSTANTIATE(char);
 vtkStandardNewMacro(vtkCharArray);
 
 //----------------------------------------------------------------------------
-vtkCharArray::vtkCharArray(vtkIdType numComp): RealSuperclass(numComp)
+vtkCharArray::vtkCharArray()
 {
 }
 
diff --git a/Common/Core/vtkCharArray.h b/Common/Core/vtkCharArray.h
index 5aaf574..f753143 100644
--- a/Common/Core/vtkCharArray.h
+++ b/Common/Core/vtkCharArray.h
@@ -31,85 +31,25 @@
 #include "vtkDataArrayTemplate.h" // Real Superclass
 
 // Fake the superclass for the wrappers.
+#ifndef __WRAP__
 #define vtkDataArray vtkDataArrayTemplate<char>
+#endif
 class VTKCOMMONCORE_EXPORT vtkCharArray : public vtkDataArray
+#ifndef __WRAP__
 #undef vtkDataArray
+#endif
 {
 public:
   static vtkCharArray* New();
   vtkTypeMacro(vtkCharArray,vtkDataArray);
   void PrintSelf(ostream& os, vtkIndent indent);
 
-  // Description:
-  // Get the data type.
-  int GetDataType()
-    { return VTK_CHAR; }
-
-  // Description:
-  // Copy the tuple value into a user-provided array.
-  void GetTupleValue(vtkIdType i, char* tuple)
-    { this->RealSuperclass::GetTupleValue(i, tuple); }
-
-  // Description:
-  // Set the tuple value at the ith location in the array.
-  void SetTupleValue(vtkIdType i, const char* tuple)
-    { this->RealSuperclass::SetTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple into the ith location
-  // in the array.
-  void InsertTupleValue(vtkIdType i, const char* tuple)
-    { this->RealSuperclass::InsertTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple onto the end of the array.
-  vtkIdType InsertNextTupleValue(const char* tuple)
-    { return this->RealSuperclass::InsertNextTupleValue(tuple); }
-
-  // Description:
-  // Get the data at a particular index.
-  char GetValue(vtkIdType id)
-    { return this->RealSuperclass::GetValue(id); }
-
-  // Description:
-  // Set the data at a particular index. Does not do range checking. Make sure
-  // you use the method SetNumberOfValues() before inserting data.
-  void SetValue(vtkIdType id, char value)
-    { this->RealSuperclass::SetValue(id, value); }
-
-  // Description:
-  // Specify the number of values for this object to hold. Does an
-  // allocation as well as setting the MaxId ivar. Used in conjunction with
-  // SetValue() method for fast insertion.
-  void SetNumberOfValues(vtkIdType number)
-    { this->RealSuperclass::SetNumberOfValues(number); }
-
-  // Description:
-  // Insert data at a specified position in the array.
-  void InsertValue(vtkIdType id, char f)
-    { this->RealSuperclass::InsertValue(id, f); }
-
-  // Description:
-  // Insert data at the end of the array. Return its location in the array.
-  vtkIdType InsertNextValue(char f)
-    { return this->RealSuperclass::InsertNextValue(f); }
-
-//BTX
-  // Description:
-  // Get the range of array values for the given component in the
-  // native data type.
-  char *GetValueRange(int comp)
-    { return this->RealSuperclass::GetValueRange(comp); }
-  void GetValueRange(char range[2], int comp)
-    { this->RealSuperclass::GetValueRange(range, comp); }
-
-  // Description:
-  // Get the range of array values for the 0th component in the
-  // native data type.
-  char *GetValueRange()
-    { return this->RealSuperclass::GetValueRange(0); }
-  void GetValueRange(char range[2])
-    { this->RealSuperclass::GetValueRange(range, 0); }
+  // This macro expands to the set of method declarations that
+  // make up the interface of vtkDataArrayTemplate, which is ignored
+  // by the wrappers.
+#ifdef __WRAP__
+  vtkCreateWrappedArrayInterface(char);
+#endif
 
   // Description:
   // Get the minimum data value in its native type.
@@ -118,35 +58,9 @@ public:
   // Description:
   // Get the maximum data value in its native type.
   static char GetDataTypeValueMax() { return VTK_CHAR_MAX; }
-//ETX
-
-  // Description:
-  // Get the address of a particular data index. Make sure data is allocated
-  // for the number of items requested. Set MaxId according to the number of
-  // data values requested.
-  char* WritePointer(vtkIdType id, vtkIdType number)
-    { return this->RealSuperclass::WritePointer(id, number); }
-
-  // Description:
-  // Get the address of a particular data index. Performs no checks
-  // to verify that the memory has been allocated etc.
-  char* GetPointer(vtkIdType id)
-    { return this->RealSuperclass::GetPointer(id); }
-
-  // Description:
-  // This method lets the user specify data to be held by the array.  The
-  // array argument is a pointer to the data.  size is the size of
-  // the array supplied by the user.  Set save to 1 to keep the class
-  // from deleting the array when it cleans up or reallocates memory.
-  // The class uses the actual array provided; it does not copy the data
-  // from the suppled array.
-  void SetArray(char* array, vtkIdType size, int save)
-    { this->RealSuperclass::SetArray(array, size, save); }
-  void SetArray(char* array, vtkIdType size, int save, int deleteMethod)
-    { this->RealSuperclass::SetArray(array, size, save, deleteMethod); }
 
 protected:
-  vtkCharArray(vtkIdType numComp=1);
+  vtkCharArray();
   ~vtkCharArray();
 
 private:
diff --git a/Common/Core/vtkCollection.h b/Common/Core/vtkCollection.h
index fe88ab9..37034d0 100644
--- a/Common/Core/vtkCollection.h
+++ b/Common/Core/vtkCollection.h
@@ -35,7 +35,7 @@
 class vtkCollectionElement //;prevents pick-up by man page generator
 {
  public:
-  vtkCollectionElement():Item(NULL),Next(NULL) {};
+  vtkCollectionElement():Item(NULL),Next(NULL) {}
   vtkObject *Item;
   vtkCollectionElement *Next;
 };
diff --git a/Common/Core/vtkConditionVariable.h b/Common/Core/vtkConditionVariable.h
index 8162ea0..a04ce3f 100644
--- a/Common/Core/vtkConditionVariable.h
+++ b/Common/Core/vtkConditionVariable.h
@@ -134,6 +134,10 @@ public:
 
 protected:
   vtkConditionType   ConditionVariable;
+
+private:
+  vtkSimpleConditionVariable(const vtkSimpleConditionVariable& other); // no copy constructor
+  vtkSimpleConditionVariable& operator=(const vtkSimpleConditionVariable& rhs); // no copy assignment
 };
 
 //ETX
diff --git a/Common/Core/vtkConfigure.h.in b/Common/Core/vtkConfigure.h.in
index 434d1d3..d3e1799 100644
--- a/Common/Core/vtkConfigure.h.in
+++ b/Common/Core/vtkConfigure.h.in
@@ -40,6 +40,12 @@
 #cmakedefine VTK_USE_WIN32_THREADS
 # define VTK_MAX_THREADS @VTK_MAX_THREADS@
 
+/* Atomic operations */
+#cmakedefine VTK_HAVE_SYNC_BUILTINS
+#if defined(WIN32)
+ #cmakedefine VTK_HAS_INTERLOCKEDADD
+#endif
+
 /* Size of fundamental data types.  */
 /* Mac OS X uses two data models, ILP32 (in which integers, long integers,
    and pointers are 32-bit quantities) and LP64 (in which integers are 32-bit
diff --git a/Common/Core/vtkCriticalSection.cxx b/Common/Core/vtkCriticalSection.cxx
index cb0d04a..d6e77b7 100644
--- a/Common/Core/vtkCriticalSection.cxx
+++ b/Common/Core/vtkCriticalSection.cxx
@@ -17,80 +17,6 @@
 
 vtkStandardNewMacro(vtkCriticalSection);
 
-// New for the SimpleCriticalSection
-vtkSimpleCriticalSection *vtkSimpleCriticalSection::New()
-{
-  return new vtkSimpleCriticalSection;
-}
-
-void vtkSimpleCriticalSection::Init()
-{
-#ifdef VTK_USE_SPROC
-  init_lock( &this->CritSec );
-#endif
-
-#ifdef VTK_USE_WIN32_THREADS
-  //this->MutexLock = CreateMutex( NULL, FALSE, NULL );
-  InitializeCriticalSection(&this->CritSec);
-#endif
-
-#ifdef VTK_USE_PTHREADS
-#ifdef VTK_HP_PTHREADS
-  pthread_mutex_init(&(this->CritSec), pthread_mutexattr_default);
-#else
-  pthread_mutex_init(&(this->CritSec), NULL);
-#endif
-#endif
-}
-
-
-// Destruct the vtkMutexVariable
-vtkSimpleCriticalSection::~vtkSimpleCriticalSection()
-{
-#ifdef VTK_USE_WIN32_THREADS
-  //CloseHandle(this->MutexLock);
-  DeleteCriticalSection(&this->CritSec);
-#endif
-
-#ifdef VTK_USE_PTHREADS
-  pthread_mutex_destroy( &this->CritSec);
-#endif
-}
-
-// Lock the vtkCriticalSection
-void vtkSimpleCriticalSection::Lock()
-{
-#ifdef VTK_USE_SPROC
-  spin_lock( &this->CritSec );
-#endif
-
-#ifdef VTK_USE_WIN32_THREADS
-  //WaitForSingleObject( this->MutexLock, INFINITE );
-  EnterCriticalSection(&this->CritSec);
-#endif
-
-#ifdef VTK_USE_PTHREADS
-  pthread_mutex_lock( &this->CritSec);
-#endif
-}
-
-// Unlock the vtkCriticalSection
-void vtkSimpleCriticalSection::Unlock()
-{
-#ifdef VTK_USE_SPROC
-  release_lock( &this->CritSec );
-#endif
-
-#ifdef VTK_USE_WIN32_THREADS
-  //ReleaseMutex( this->MutexLock );
-  LeaveCriticalSection(&this->CritSec);
-#endif
-
-#ifdef VTK_USE_PTHREADS
-  pthread_mutex_unlock( &this->CritSec);
-#endif
-}
-
 void vtkCriticalSection::PrintSelf(ostream& os, vtkIndent indent)
 {
   this->Superclass::PrintSelf(os, indent);
diff --git a/Common/Core/vtkCriticalSection.h b/Common/Core/vtkCriticalSection.h
index 7fe530a..b57f76a 100644
--- a/Common/Core/vtkCriticalSection.h
+++ b/Common/Core/vtkCriticalSection.h
@@ -35,75 +35,7 @@
 
 #include "vtkCommonCoreModule.h" // For export macro
 #include "vtkObject.h"
-
-//BTX
-
-#ifdef VTK_USE_SPROC
-#include <abi_mutex.h> // Needed for sproc implementation of mutex
-typedef abilock_t vtkCritSecType;
-#endif
-
-#if defined(VTK_USE_PTHREADS) || defined(VTK_HP_PTHREADS)
-#include <pthread.h> // Needed for pthreads implementation of mutex
-typedef pthread_mutex_t vtkCritSecType;
-#endif
-
-#ifdef VTK_USE_WIN32_THREADS
-# include "vtkWindows.h" // Needed for win32 implementation of mutex
-typedef CRITICAL_SECTION vtkCritSecType;
-#endif
-
-#ifndef VTK_USE_SPROC
-#ifndef VTK_USE_PTHREADS
-#ifndef VTK_USE_WIN32_THREADS
-typedef int vtkCritSecType;
-#endif
-#endif
-#endif
-
-// Critical Section object that is not a vtkObject.
-class VTKCOMMONCORE_EXPORT vtkSimpleCriticalSection
-{
-public:
-  // Default cstor
-  vtkSimpleCriticalSection()
-    {
-    this->Init();
-    }
-  // Construct object locked if isLocked is different from 0
-  vtkSimpleCriticalSection(int isLocked)
-    {
-    this->Init();
-    if(isLocked)
-      {
-      this->Lock();
-      }
-    }
-  // Destructor
-  virtual ~vtkSimpleCriticalSection();
-
-  // Default vtkObject API
-  static vtkSimpleCriticalSection *New();
-  void Delete()
-    {
-    delete this;
-    }
-
-  void Init();
-
-  // Description:
-  // Lock the vtkCriticalSection
-  void Lock();
-
-  // Description:
-  // Unlock the vtkCriticalSection
-  void Unlock();
-
-protected:
-  vtkCritSecType   CritSec;
-};
-
-//ETX
+#include "vtkSimpleCriticalSection.h" // For simple critical section
 
 class VTKCOMMONCORE_EXPORT vtkCriticalSection : public vtkObject
 {
diff --git a/Common/Core/vtkDataArray.cxx b/Common/Core/vtkDataArray.cxx
index 3612a02..7d986c6 100644
--- a/Common/Core/vtkDataArray.cxx
+++ b/Common/Core/vtkDataArray.cxx
@@ -15,6 +15,7 @@
 #include "vtkDataArray.h"
 #include "vtkBitArray.h"
 #include "vtkCharArray.h"
+#include "vtkDataArrayIteratorMacro.h"
 #include "vtkDoubleArray.h"
 #include "vtkFloatArray.h"
 #include "vtkInformation.h"
@@ -29,6 +30,7 @@
 #include "vtkMath.h"
 #include "vtkShortArray.h"
 #include "vtkSignedCharArray.h"
+#include "vtkTypedDataArrayIterator.h"
 #include "vtkTypeTraits.h"
 #include "vtkUnsignedCharArray.h"
 #include "vtkUnsignedIntArray.h"
@@ -43,14 +45,11 @@ vtkInformationKeyRestrictedMacro(vtkDataArray, L2_NORM_RANGE, DoubleVector, 2);
 
 //----------------------------------------------------------------------------
 // Construct object with default tuple dimension (number of components) of 1.
-vtkDataArray::vtkDataArray(vtkIdType numComp)
+vtkDataArray::vtkDataArray()
 {
-  this->Size = 0;
-  this->MaxId = -1;
   this->LookupTable = NULL;
-
-  this->NumberOfComponents = static_cast<int>(numComp < 1 ? 1 : numComp);
-  this->Name = 0;
+  this->Range[0] = 0;
+  this->Range[1] = 0;
 }
 
 //----------------------------------------------------------------------------
@@ -64,39 +63,18 @@ vtkDataArray::~vtkDataArray()
 }
 
 //----------------------------------------------------------------------------
-template <class IT, class OT>
-void vtkDeepCopyArrayOfDifferentType(IT *input, OT *output,
-                                     vtkIdType numTuples, vtkIdType nComp)
-{
-  vtkIdType i;
-  vtkIdType j;
-  for (i=0; i<numTuples; i++)
-    {
-    for (j=0; j<nComp; j++)
-      {
-      output[i*nComp+j] = static_cast<OT>(input[i*nComp+j]);
-      }
-    }
-}
-
-//----------------------------------------------------------------------------
-template <class IT>
-void vtkDeepCopySwitchOnOutput(IT *input, vtkDataArray *da,
-                               vtkIdType numTuples, vtkIdType nComp)
+template <class InputIterator>
+void vtkDeepCopySwitchOnOutput(InputIterator begin, InputIterator end,
+                               vtkDataArray *outputArray)
 {
-  void *output = da->GetVoidPointer(0);
-
-  switch (da->GetDataType())
+  switch (outputArray->GetDataType())
     {
-    vtkTemplateMacro(
-      vtkDeepCopyArrayOfDifferentType(input,
-                                      static_cast<VTK_TT*>(output),
-                                      numTuples,
-                                      nComp));
+    vtkDataArrayIteratorMacro(outputArray,
+                              std::copy(begin, end, vtkDABegin));
 
     default:
-      vtkGenericWarningMacro("Unsupported data type " << da->GetDataType()
-                             <<"!");
+      vtkGenericWarningMacro("Unsupported data type "
+                             << outputArray->GetDataTypeAsString() << "!");
     }
 }
 
@@ -108,11 +86,11 @@ void vtkDataArray::DeepCopy(vtkAbstractArray* aa)
     return;
     }
 
-  vtkDataArray *da = vtkDataArray::SafeDownCast( aa );
+  vtkDataArray *da = vtkDataArray::FastDownCast(aa);
   if (da == NULL)
     {
-    vtkErrorMacro(<< "Input array is not a vtkDataArray.  Actual data "
-      << "type: " << aa->GetDataTypeAsString() );
+    vtkErrorMacro(<< "Input array is not a vtkDataArray ("
+                  << aa->GetClassName() << ")");
     return;
     }
 
@@ -138,27 +116,27 @@ void vtkDataArray::DeepCopy(vtkDataArray *da)
     vtkIdType numTuples = da->GetNumberOfTuples();
     this->NumberOfComponents = da->NumberOfComponents;
     this->SetNumberOfTuples(numTuples);
-    void *input=da->GetVoidPointer(0);
 
-    switch (da->GetDataType())
+    if (numTuples > 0)
       {
-      vtkTemplateMacro(
-        vtkDeepCopySwitchOnOutput(static_cast<VTK_TT*>(input),
-                                  this,
-                                  numTuples,
-                                  this->NumberOfComponents));
-
-      case VTK_BIT:
-        {//bit not supported, using generic double API
-        for (vtkIdType i=0; i < numTuples; i++)
-          {
-          this->SetTuple(i, da->GetTuple(i));
+      switch (da->GetDataType())
+        {
+        vtkDataArrayIteratorMacro(
+          da, vtkDeepCopySwitchOnOutput(vtkDABegin, vtkDAEnd, this)
+          );
+
+        case VTK_BIT:
+          {//bit not supported, using generic double API
+          for (vtkIdType i=0; i < numTuples; i++)
+            {
+            this->SetTuple(i, da->GetTuple(i));
+            }
+          break;
           }
-        break;
-        }
 
-      default:
-        vtkErrorMacro("Unsupported data type " << da->GetDataType() << "!");
+        default:
+          vtkErrorMacro("Unsupported data type " << da->GetDataType() << "!");
+        }
       }
 
     this->SetLookupTable(0);
@@ -168,6 +146,8 @@ void vtkDataArray::DeepCopy(vtkDataArray *da)
       this->LookupTable->DeepCopy(da->LookupTable);
       }
     }
+
+  this->Squeeze();
 }
 
 //----------------------------------------------------------------------------
@@ -274,8 +254,8 @@ inline void vtkDataArrayRoundIfNecessary(double val, float* retVal)
 }
 
 //--------------------------------------------------------------------------
-template <class T>
-void vtkDataArrayInterpolateTuple(T* from, T* to, int numComp,
+template <class Scalar, class Iterator>
+void vtkDataArrayInterpolateTuple(Iterator from, Scalar *to, int numComp,
   vtkIdType* ids, vtkIdType numIds, double* weights)
 {
   for(int i=0; i < numComp; ++i)
@@ -283,13 +263,14 @@ void vtkDataArrayInterpolateTuple(T* from, T* to, int numComp,
     double c = 0;
     for(vtkIdType j=0; j < numIds; ++j)
       {
-      c += weights[j]*static_cast<double>(from[ids[j]*numComp+i]);
+      c += weights[j] * static_cast<double>(from[ids[j]*numComp+i]);
       }
     // Round integer types. Don't round floating point types.
     vtkDataArrayRoundIfNecessary(c, to);
-    to++;
+    ++to;
     }
 }
+
 //----------------------------------------------------------------------------
 // Interpolate array value from other array value given the
 // indices and associated interpolation weights.
@@ -304,7 +285,7 @@ void vtkDataArray::InterpolateTuple(vtkIdType i, vtkIdList *ptIndices,
     return;
     }
 
-  vtkDataArray* fromData = vtkDataArray::SafeDownCast(source);
+  vtkDataArray* fromData = vtkDataArray::FastDownCast(source);
   if (fromData)
     {
     int numComp = fromData->GetNumberOfComponents();
@@ -313,6 +294,13 @@ void vtkDataArray::InterpolateTuple(vtkIdType i, vtkIdList *ptIndices,
     vtkIdType idx= i*numComp;
     double c;
 
+    // Note that we must call WriteVoidPointer before GetVoidPointer
+    // in case WriteVoidPointer reallocates memory and fromData ==
+    // this. The vtkBitArray implementation doesn't use pointers, so skip
+    // the resizing in this case.
+    void* vto = fromData->GetDataType() != VTK_BIT ?
+          this->WriteVoidPointer(idx, numComp) : 0;
+
     switch (fromData->GetDataType())
       {
     case VTK_BIT:
@@ -329,15 +317,10 @@ void vtkDataArray::InterpolateTuple(vtkIdType i, vtkIdList *ptIndices,
           }
         }
       break;
-      // Note that we must call WriteVoidPointer before GetVoidPointer
-      // in case WriteVoidPointer reallocates memory and fromData ==
-      // this.
-      vtkTemplateMacro(
-        void* vto = this->WriteVoidPointer(idx, numComp);
-        void* vfrom = fromData->GetVoidPointer(0);
-        vtkDataArrayInterpolateTuple(static_cast<VTK_TT*>(vfrom),
-          static_cast<VTK_TT*>(vto),
-          numComp, ids, numIds, weights)
+        vtkDataArrayIteratorMacro(fromData,
+          vtkDataArrayInterpolateTuple(vtkDABegin,
+                                       static_cast<vtkDAValueType*>(vto),
+                                       numComp, ids, numIds, weights)
       );
     default:
       vtkErrorMacro("Unsupported data type " << fromData->GetDataType()
@@ -348,17 +331,14 @@ void vtkDataArray::InterpolateTuple(vtkIdType i, vtkIdList *ptIndices,
 
 
 //----------------------------------------------------------------------------
-template <class T>
-void vtkDataArrayInterpolateTuple(T* from1, T* from2, T* to,
+template <class Scalar, class Iterator>
+void vtkDataArrayInterpolateTuple(Iterator from1, Iterator from2, Scalar* to,
   int numComp, double t)
 {
-  for(int i=0; i < numComp; ++i)
+  const double oneMinusT = 1.0 - t;
+  while (numComp-- > 0)
     {
-    double c = (1.0 - t) * static_cast<double>(*from1)
-      + t * static_cast<double>(*from2);
-    from1++;
-    from2++;
-    *to++ = static_cast<T>(c);
+    *(to++) = oneMinusT * (*(from1++)) + t * (*(from2++));
     }
 }
 
@@ -380,20 +360,17 @@ void vtkDataArray::InterpolateTuple(vtkIdType i,
     return;
     }
 
-  vtkDataArray* fromData1 = vtkDataArray::SafeDownCast(source1);
-  vtkDataArray* fromData2 = vtkDataArray::SafeDownCast(source2);
-
-  int k, numComp=fromData1->GetNumberOfComponents();
+  int k, numComp = source1->GetNumberOfComponents();
   double c;
   vtkIdType loc = i * numComp;
 
-  switch (fromData1->GetDataType())
+  switch (type)
     {
     case VTK_BIT:
       {
-      vtkBitArray *from1=static_cast<vtkBitArray *>(fromData1);
-      vtkBitArray *from2=static_cast<vtkBitArray *>(fromData2);
-      vtkBitArray *to=static_cast<vtkBitArray *>(this);
+      vtkBitArray *from1 = static_cast<vtkBitArray *>(source1);
+      vtkBitArray *from2 = static_cast<vtkBitArray *>(source2);
+      vtkBitArray *to = static_cast<vtkBitArray *>(this);
       for (k=0; k<numComp; k++)
         {
         c = from1->GetValue(id1) + t * (from2->GetValue(id2) - from1->GetValue(id1));
@@ -401,18 +378,46 @@ void vtkDataArray::InterpolateTuple(vtkIdType i,
         }
       }
       break;
-    // Note that we must call WriteVoidPointer before GetVoidPointer
-    // in case WriteVoidPointer reallocates memory and fromData1==this
-    // or fromData2==this.
+      // Note that we must call WriteVoidPointer before GetVoidPointer/creating
+      // iterators in case WriteVoidPointer reallocates memory and
+      // fromData1==this or fromData2==this.
     vtkTemplateMacro(
-      void* vto = this->WriteVoidPointer(loc, numComp);
-      void* vfrom1 = fromData1->GetVoidPointer(id1*numComp);
-      void* vfrom2 = fromData2->GetVoidPointer(id2*numComp);
-      vtkDataArrayInterpolateTuple(static_cast<VTK_TT*>(vfrom1),
-        static_cast<VTK_TT*>(vfrom2), static_cast<VTK_TT*>(vto), numComp, t)
+      // If either of the source arrays are mapped, use iterators. Otherwise,
+      // void pointers are safe.
+      if (source1->HasStandardMemoryLayout() &&
+          source2->HasStandardMemoryLayout())
+        {
+        // Use pointers:
+        void *vto = this->WriteVoidPointer(loc, numComp);
+        void *vfrom1 = source1->GetVoidPointer(id1 * numComp);
+        void *vfrom2 = source2->GetVoidPointer(id2 * numComp);
+        vtkDataArrayInterpolateTuple<VTK_TT>(static_cast<VTK_TT*>(vfrom1),
+                                             static_cast<VTK_TT*>(vfrom2),
+                                             static_cast<VTK_TT*>(vto),
+                                             numComp, t);
+        }
+      else
+        {
+        vtkTypedDataArray<VTK_TT> *tfrom1 =
+            vtkTypedDataArray<VTK_TT>::FastDownCast(source1);
+        vtkTypedDataArray<VTK_TT> *tfrom2 =
+            vtkTypedDataArray<VTK_TT>::FastDownCast(source2);
+        if (!tfrom1 || !tfrom2)
+          {
+          vtkErrorMacro(<<"Cannot call this function with non-standard arrays "
+                        "unless all arrays are vtkTypedDataArray subclasses.");
+          return;
+          }
+        VTK_TT *vto = static_cast<VTK_TT*>(
+            this->WriteVoidPointer(loc, numComp));
+        vtkDataArrayInterpolateTuple<VTK_TT>(
+            vtkTypedDataArrayIterator<VTK_TT>(tfrom1, id1 * numComp),
+            vtkTypedDataArrayIterator<VTK_TT>(tfrom2, id2 * numComp),
+            vto, numComp, t);
+        }
       );
     default:
-      vtkErrorMacro("Unsupported data type " << fromData1->GetDataType()
+      vtkErrorMacro("Unsupported data type " << type
                     << " during interpolation!");
     }
 
@@ -442,7 +447,10 @@ void vtkDataArray::SetLookupTable(vtkLookupTable* lut)
       this->LookupTable->UnRegister(this);
       }
     this->LookupTable = lut;
-    this->LookupTable->Register(this);
+    if ( this->LookupTable )
+      {
+      this->LookupTable->Register(this);
+      }
     this->Modified();
     }
 }
@@ -768,74 +776,79 @@ vtkDataArray* vtkDataArray::CreateDataArray(int dataType)
   return da;
 }
 
+namespace {
 //----------------------------------------------------------------------------
-template <class IT, class OT>
-void vtkCopyTuples(IT* input, OT* output, int nComp, vtkIdList* ptIds )
+typedef vtkIdType* IdIterator;
+
+//----------------------------------------------------------------------------
+template <class InputIterator, class OutputIterator>
+void vtkDataArrayGetTuplesTemplate2(IdIterator ids, IdIterator idsEnd,
+                                    InputIterator inIter,
+                                    OutputIterator outIter,
+                                    int numComps)
 {
-  int i, j;
-  vtkIdType num=ptIds->GetNumberOfIds();
-  for (i=0; i<num; i++)
+  InputIterator inPt;
+  while (ids != idsEnd)
     {
-    for (j=0; j<nComp; j++)
-      {
-      output[i*nComp+j] = static_cast<OT>(input[ptIds->GetId(i)*nComp+j]);
-      }
+    inPt = inIter + (*(ids++) * numComps);
+    outIter = std::copy(inPt, inPt + numComps, outIter);
     }
 }
 
 //----------------------------------------------------------------------------
-template <class IT>
-void vtkCopyTuples1(IT* input, vtkDataArray* output, vtkIdList* ptIds)
+template <class InputIterator>
+void vtkDataArrayGetTuplesTemplate1(IdIterator ids, IdIterator idsEnd,
+                                    InputIterator inIter,
+                                    vtkDataArray *outArray,
+                                    int numComps)
 {
-  switch (output->GetDataType())
+  switch (outArray->GetDataType())
     {
-    vtkTemplateMacro(vtkCopyTuples(input,
-                                   static_cast<VTK_TT *>(output->GetVoidPointer(0)),
-                                   output->GetNumberOfComponents(), ptIds) );
-
+    vtkDataArrayIteratorMacro(outArray,
+      vtkDataArrayGetTuplesTemplate2(ids, idsEnd, inIter, vtkDABegin, numComps)
+      );
     default:
-      vtkGenericWarningMacro("Sanity check failed: Unsupported data type "
-                             << output->GetDataType() << ".");
+      vtkGenericWarningMacro("vtkDataArray::GetTuples: "
+                             "Unsupported output type.");
       return;
     }
 }
 
+} // end anon namespace
+
+
 //----------------------------------------------------------------------------
 void vtkDataArray::GetTuples(vtkIdList *ptIds, vtkAbstractArray *aa)
 {
-  vtkDataArray* da = vtkDataArray::SafeDownCast(aa);
-  if (!da)
+  vtkDataArray *outArray = vtkDataArray::FastDownCast(aa);
+  if (!outArray)
     {
     vtkWarningMacro("Input is not a vtkDataArray.");
     return;
     }
 
-  if ((da->GetNumberOfComponents() != this->GetNumberOfComponents()))
+  if ((outArray->GetNumberOfComponents() != this->GetNumberOfComponents()))
     {
     vtkWarningMacro("Number of components for input and output do not match");
     return;
     }
 
+  IdIterator ids = ptIds->GetPointer(0);
+  IdIterator idsEnd = ptIds->GetPointer(ptIds->GetNumberOfIds());
 
   switch (this->GetDataType())
     {
-    vtkTemplateMacro(vtkCopyTuples1 (static_cast<VTK_TT *>(this->GetVoidPointer(0)), da,
-                                     ptIds ));
-    // This is not supported by the template macro.
-    // Switch to using the double interface.
-    case VTK_BIT:
-      {
+    vtkDataArrayIteratorMacro(this,
+      vtkDataArrayGetTuplesTemplate1(ids, idsEnd, vtkDABegin, outArray,
+                                     this->NumberOfComponents)
+      );
+    default: // Fallback to the double interface
       vtkIdType num=ptIds->GetNumberOfIds();
       for (vtkIdType i=0; i<num; i++)
         {
-        da->SetTuple(i,this->GetTuple(ptIds->GetId(i)));
+        outArray->SetTuple(i, this->GetTuple(ptIds->GetId(i)));
         }
-      }
       break;
-    default:
-      vtkErrorMacro("Sanity check failed: Unsupported data type "
-                    << this->GetDataType() << ".");
-      return;
     }
 }
 
diff --git a/Common/Core/vtkDataArray.h b/Common/Core/vtkDataArray.h
index 8157e0b..f13c469 100644
--- a/Common/Core/vtkDataArray.h
+++ b/Common/Core/vtkDataArray.h
@@ -49,6 +49,13 @@ public:
   void PrintSelf(ostream& os, vtkIndent indent);
 
   // Description:
+  // Perform a fast, safe cast from a vtkAbstractArray to a vtkDataArray.
+  // This method checks if source->GetArrayType() returns DataArrayFastCast
+  // or a more derived type, and performs a static_cast to return
+  // source as a vtkDataArray pointer. Otherwise, NULL is returned.
+  static vtkDataArray* FastDownCast(vtkAbstractArray *source);
+
+  // Description:
   // This method is here to make backward compatibility easier.  It
   // must return true if and only if an array contains numeric data.
   // All vtkDataArray subclasses contain numeric data, hence this method
@@ -369,6 +376,10 @@ public:
   // keys not inteneded to be coppied are excluded here.
   virtual int CopyInformation(vtkInformation *infoFrom, int deep=1);
 
+  // Description:
+  // Method for type-checking in FastDownCast implementations.
+  virtual int GetArrayType() { return DataArray; }
+
 protected:
   // Description:
   // Compute the range for a specific component. If comp is set -1
@@ -382,7 +393,7 @@ protected:
   virtual void ComputeVectorRange(double range[2]);
 
   // Construct object with default tuple dimension (number of components) of 1.
-  vtkDataArray(vtkIdType numComp=1);
+  vtkDataArray();
   ~vtkDataArray();
 
   vtkLookupTable *LookupTable;
@@ -396,4 +407,19 @@ private:
   void operator=(const vtkDataArray&);  // Not implemented.
 };
 
+//------------------------------------------------------------------------------
+inline vtkDataArray* vtkDataArray::FastDownCast(vtkAbstractArray *source)
+{
+  switch (source->GetArrayType())
+    {
+    case DataArrayTemplate:
+    case TypedDataArray:
+    case DataArray:
+    case MappedDataArray:
+      return static_cast<vtkDataArray*>(source);
+    default:
+      return NULL;
+    }
+}
+
 #endif
diff --git a/Common/Core/vtkDataArrayCollection.h b/Common/Core/vtkDataArrayCollection.h
index d2a7f94..c421b7d 100644
--- a/Common/Core/vtkDataArrayCollection.h
+++ b/Common/Core/vtkDataArrayCollection.h
@@ -57,8 +57,8 @@ public:
   //ETX
 
 protected:
-  vtkDataArrayCollection() {};
-  ~vtkDataArrayCollection() {};
+  vtkDataArrayCollection() {}
+  ~vtkDataArrayCollection() {}
 
 
 private:
diff --git a/Common/Core/vtkDataArrayIteratorMacro.h b/Common/Core/vtkDataArrayIteratorMacro.h
new file mode 100644
index 0000000..bfb7e2d
--- /dev/null
+++ b/Common/Core/vtkDataArrayIteratorMacro.h
@@ -0,0 +1,136 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkDataArrayIteratorMacro.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkDataArrayIteratorMacro - A macro for obtaining iterators to
+// vtkDataArray data when the array implementation and type are unknown.
+//
+// .SECTION Description
+// See vtkTemplateMacro.
+// This macro is similar, but defines several additional typedefs and variables
+// for safely iterating through data in a vtkAbstractArray container:
+//  - vtkDAValueType is typedef'd to the array's element value type.
+//  - vtkDAContainerType is typedef'd to the most derived class of
+//    vtkAbstractArray for which a suitable iterator has been found.
+//  - vtkDAIteratorType is typedef'd to the most suitable iterator type found.
+//  - vtkDABegin is an object of vtkDAIteratorType that points to the first
+//    component of the first tuple in the array.
+//  - vtkDAEnd is an object of vtkDAIteratorType that points to the element
+//    *after* the last component of the last tuple in the array.
+// The primary advantage to using this macro is that arrays with non-standard
+// memory layouts will be safely handled, and dangerous calls to GetVoidPointer
+// are avoided.
+// For arrays with > 1 component, the iterator will proceed through all
+// components of a tuple before moving on to the next tuple.
+// This matches the memory layout of the standard vtkDataArray subclasses such
+// as vtkFloatArray.
+//
+// For the standard vtkDataArray implementations (which are subclasses of
+// vtkDataArrayTemplate), the iterators will simply be pointers to the raw
+// memory of the array.
+// This allows very fast iteration when possible, and permits compiler
+// optimizations in the standard template library to occur (such as reducing
+// std::copy to memmove).
+//
+// For arrays that are subclasses of vtkTypedDataArray (but not
+// vtkDataArrayTemplate), a vtkTypedDataArrayIterator is used.
+// Such iterators safely traverse the array using API calls and have
+// pointer-like semantics, but add about a 35% performance overhead compared
+// with iterating over the raw memory (measured by summing a vtkFloatArray
+// containing 10M values on GCC 4.8.1 with -O3 optimization using both iterator
+// types -- see TestDataArrayIterators).
+//
+// For arrays that are not subclasses of vtkTypedDataArray, there is no reliably
+// safe way to iterate over the array elements.
+// In such cases, this macro performs the legacy behavior of casting
+// vtkAbstractArray::GetVoidPointer(...) to vtkDAValueType* to create the
+// iterators.
+//
+// To use this macro, create a templated worker function:
+//
+// template <class Iterator>
+// void myFunc(Iterator begin, Iterator end, ...) {...}
+//
+// and then call the vtkDataArrayIteratorMacro inside of a switch statement
+// using the above objects and typedefs as needed:
+//
+// vtkAbstractArray *someArray = ...;
+// switch (someArray->GetDataType())
+//   {
+//   vtkDataArrayIteratorMacro(someArray, myFunc(vtkDABegin, vtkDAEnd, ...));
+//   }
+//
+// .SECTION See Also
+// vtkTemplateMacro vtkTypedDataArrayIterator
+
+#ifndef __vtkDataArrayIteratorMacro_h
+#define __vtkDataArrayIteratorMacro_h
+
+#include "vtkDataArrayTemplate.h" // For all classes referred to in the macro
+#include "vtkSetGet.h" // For vtkTemplateMacro
+
+// Silence 'unused typedef' warnings on newer GCC.
+// use of the typedef in question depends on the macro
+// argument _call and thus should not be removed.
+#if defined(__GNUC__)
+#define _vtkDAIMUnused __attribute__ ((unused))
+#else
+#define _vtkDAIMUnused
+#endif
+
+#define vtkDataArrayIteratorMacro(_array, _call)                           \
+  vtkTemplateMacro(                                                        \
+    vtkAbstractArray *_aa(_array);                                         \
+    if (vtkDataArrayTemplate<VTK_TT> *_dat =                               \
+        vtkDataArrayTemplate<VTK_TT>::FastDownCast(_aa))                   \
+      {                                                                    \
+      typedef VTK_TT vtkDAValueType;                                       \
+      typedef vtkDataArrayTemplate<vtkDAValueType> vtkDAContainerType;     \
+      typedef vtkDAContainerType::Iterator vtkDAIteratorType;              \
+      vtkDAIteratorType vtkDABegin(_dat->Begin());                         \
+      vtkDAIteratorType vtkDAEnd(_dat->End());                             \
+      (void)vtkDABegin; /* Prevent warnings when unused */                 \
+      (void)vtkDAEnd;                                                      \
+      _call;                                                               \
+      }                                                                    \
+    else if (vtkTypedDataArray<VTK_TT> *_tda =                             \
+             vtkTypedDataArray<VTK_TT>::FastDownCast(_aa))                 \
+      {                                                                    \
+      typedef VTK_TT vtkDAValueType;                                       \
+      typedef vtkTypedDataArray<vtkDAValueType> vtkDAContainerType;        \
+      typedef vtkDAContainerType::Iterator vtkDAIteratorType;              \
+      vtkDAIteratorType vtkDABegin(_tda->Begin());                         \
+      vtkDAIteratorType vtkDAEnd(_tda->End());                             \
+      (void)vtkDABegin;                                                    \
+      (void)vtkDAEnd;                                                      \
+      _call;                                                               \
+      }                                                                    \
+    else                                                                   \
+      {                                                                    \
+      /* This is not ideal, as no explicit iterator has been declared.     \
+       * Cast the void pointer and hope for the best! */                   \
+      typedef VTK_TT vtkDAValueType;                                       \
+      typedef vtkAbstractArray vtkDAContainerType _vtkDAIMUnused;          \
+      typedef vtkDAValueType* vtkDAIteratorType;                           \
+      vtkDAIteratorType vtkDABegin =                                       \
+        static_cast<vtkDAIteratorType>(_aa->GetVoidPointer(0));            \
+      vtkDAIteratorType vtkDAEnd = vtkDABegin + _aa->GetMaxId() + 1;       \
+      (void)vtkDABegin;                                                    \
+      (void)vtkDAEnd;                                                      \
+      _call;                                                               \
+      }                                                                    \
+    )
+
+#endif //__vtkDataArrayIteratorMacro_h
+
+// VTK-HeaderTest-Exclude: vtkDataArrayIteratorMacro.h
diff --git a/Common/Core/vtkDataArraySelection.h b/Common/Core/vtkDataArraySelection.h
index 16dfb80..96e1c39 100644
--- a/Common/Core/vtkDataArraySelection.h
+++ b/Common/Core/vtkDataArraySelection.h
@@ -75,7 +75,7 @@ public:
   const char* GetArrayName(int index);
 
   // Description:
-  // Get an index of the array containing name within the enabled arrays
+  // Get an index of the array with the given name.
   int GetArrayIndex(const char *name);
 
   // Description:
diff --git a/Common/Core/vtkDataArrayTemplate.h b/Common/Core/vtkDataArrayTemplate.h
index 7c6f22c..c469c05 100644
--- a/Common/Core/vtkDataArrayTemplate.h
+++ b/Common/Core/vtkDataArrayTemplate.h
@@ -22,19 +22,51 @@
 #define __vtkDataArrayTemplate_h
 
 #include "vtkCommonCoreModule.h" // For export macro
-#include "vtkDataArray.h"
+#include "vtkTypedDataArray.h"
+#include "vtkTypeTemplate.h" // For templated vtkObject API
+#include <cassert> // for assert()
 
 template <class T>
 class vtkDataArrayTemplateLookup;
 
 template <class T>
-class VTKCOMMONCORE_EXPORT vtkDataArrayTemplate: public vtkDataArray
+class VTKCOMMONCORE_EXPORT vtkDataArrayTemplate:
+    public vtkTypeTemplate<vtkDataArrayTemplate<T>, vtkTypedDataArray<T> >
 {
 public:
-  typedef vtkDataArray Superclass;
+  typedef vtkTypedDataArray<T> Superclass;
+  typedef typename Superclass::ValueType ValueType;
   void PrintSelf(ostream& os, vtkIndent indent);
 
   // Description:
+  // Typedef to a suitable iterator class.
+  // Rather than using this member directly, consider using
+  // vtkDataArrayIteratorMacro for safety and efficiency.
+  typedef ValueType* Iterator;
+
+  // Description:
+  // Return an iterator initialized to the first element of the data.
+  // Rather than using this member directly, consider using
+  // vtkDataArrayIteratorMacro for safety and efficiency.
+  Iterator Begin() { return Iterator(this->GetVoidPointer(0)); }
+
+  // Description:
+  // Return an iterator initialized to first element past the end of the data.
+  // Rather than using this member directly, consider using
+  // vtkDataArrayIteratorMacro for safety and efficiency.
+  Iterator End() { return Iterator(this->GetVoidPointer(this->MaxId + 1)); }
+
+  // Description:
+  // Perform a fast, safe cast from a vtkAbstractArray to a
+  // vtkDataArrayTemplate.
+  // This method checks if:
+  // - source->GetArrayType() is appropriate, and
+  // - source->GetDataType() matches vtkTypeTraits<ValueType>::VTK_TYPE_ID
+  // if these conditions are met, the method performs a static_cast to return
+  // source as a vtkTypedDataArray pointer. Otherwise, NULL is returned.
+  static vtkDataArrayTemplate<T>* FastDownCast(vtkAbstractArray *src);
+
+  // Description:
   // Allocate memory for this array. Delete old storage only if necessary.
   // Note that ext is no longer used.
   int Allocate(vtkIdType sz, vtkIdType ext=1000);
@@ -64,6 +96,13 @@ public:
   virtual void InsertTuple(vtkIdType i, vtkIdType j, vtkAbstractArray* source);
 
   // Description:
+  // Copy the tuples indexed in srcIds from the source array to the tuple
+  // locations indexed by dstIds in this array.
+  // Note that memory allocation is performed as necessary to hold the data.
+  virtual void InsertTuples(vtkIdList *destIds, vtkIdList *srcIds,
+                            vtkAbstractArray *source);
+
+  // Description:
   // Insert the jth tuple in the source array, at the end in this array.
   // Note that memory allocation is performed as necessary to hold the data.
   // Returns the location at which the data was inserted.
@@ -115,6 +154,14 @@ public:
     }
 
   // Description:
+  // Get the range of array values for the 0th component in the
+  // native data type.
+  T *GetValueRange()
+    { return this->GetValueRange(0); }
+  void GetValueRange(T range[2])
+    { this->GetValueRange(range, 0); }
+
+  // Description:
   // Resize object to just fit data requirement. Reclaims extra memory.
   void Squeeze() { this->ResizeAndExtend (this->MaxId+1); }
 
@@ -131,13 +178,16 @@ public:
 
   // Description:
   // Get the data at a particular index.
-  T GetValue(vtkIdType id) { return this->Array[id]; }
+  T GetValue(vtkIdType id)
+    { assert(id >= 0 && id < this->Size); return this->Array[id]; }
+  T& GetValueReference(vtkIdType id)
+    { assert(id >= 0 && id < this->Size); return this->Array[id]; }
 
   // Description:
   // Set the data at a particular index. Does not do range checking. Make sure
   // you use the method SetNumberOfValues() before inserting data.
   void SetValue(vtkIdType id, T value)
-    { this->Array[id] = value;};
+    { assert(id >= 0 && id < this->Size); this->Array[id] = value;};
 
   // Description:
   // Specify the number of values for this object to hold. Does an
@@ -194,15 +244,12 @@ public:
   // Description:
   // Get the address of a particular data index. Performs no checks
   // to verify that the memory has been allocated etc.
+  // If the data is simply being iterated over, consider using
+  // vtkDataArrayIteratorMacro for safety and efficiency, rather than using this
+  // member directly.
   T* GetPointer(vtkIdType id) { return this->Array + id; }
   virtual void* GetVoidPointer(vtkIdType id) { return this->GetPointer(id); }
 
-  // Description:
-  // Deep copy of another double array.
-  void DeepCopy(vtkDataArray* da);
-  void DeepCopy(vtkAbstractArray* aa)
-    { this->Superclass::DeepCopy(aa); }
-
 //BTX
   enum DeleteMethod
   {
@@ -250,6 +297,10 @@ public:
   virtual void LookupValue(vtkVariant value, vtkIdList* ids);
   vtkIdType LookupValue(T value);
   void LookupValue(T value, vtkIdList* ids);
+  vtkIdType LookupTypedValue(T value)
+    { return this->LookupValue(value); }
+  void LookupTypedValue(T value, vtkIdList* ids)
+    { this->LookupValue(value, ids); }
 
   // Description:
   // Tell the array explicitly that the data has changed.
@@ -272,8 +323,12 @@ public:
   // function.
   virtual void ClearLookup();
 
+  // Description:
+  // Method for type-checking in FastDownCast implementations.
+  virtual int GetArrayType() { return vtkAbstractArray::DataArrayTemplate; }
+
 protected:
-  vtkDataArrayTemplate(vtkIdType numComp);
+  vtkDataArrayTemplate();
   ~vtkDataArrayTemplate();
 
   T* Array;   // pointer to data
@@ -294,6 +349,7 @@ private:
   void operator=(const vtkDataArrayTemplate&);  // Not implemented.
 
   vtkDataArrayTemplateLookup<T>* Lookup;
+  bool RebuildLookup;
   void UpdateLookup();
 
   void DeleteArray();
@@ -307,6 +363,35 @@ private:
 # define VTK_DATA_ARRAY_TEMPLATE_INSTANTIATE(T)
 #endif
 
+// This macro is used by the subclasses to create dummy
+// declarations for these functions such that the wrapper
+// can see them. The wrappers ignore vtkDataArrayTemplate.
+#define vtkCreateWrappedArrayInterface(T) \
+  int GetDataType(); \
+  void GetTupleValue(vtkIdType i, T* tuple); \
+  void SetTupleValue(vtkIdType i, const T* tuple); \
+  void InsertTupleValue(vtkIdType i, const T* tuple); \
+  vtkIdType InsertNextTupleValue(const T* tuple); \
+  T GetValue(vtkIdType id); \
+  void SetValue(vtkIdType id, T value); \
+  void SetNumberOfValues(vtkIdType number); \
+  void InsertValue(vtkIdType id, T f); \
+  vtkIdType InsertNextValue(T f); \
+  T *GetValueRange(int comp); \
+  T *GetValueRange(); \
+  T* WritePointer(vtkIdType id, vtkIdType number); \
+  T* GetPointer(vtkIdType id)/*; \
+
+  * These methods are not wrapped to avoid wrappers exposing these
+  * easy-to-get-wrong methods because passing in the wrong value for 'save' is
+  * guaranteed to cause a memory issue down the line. Either the wrappers
+  * didn't use malloc to allocate the memory or the memory isn't actually
+  * persisted because a temporary array is used that doesn't persist like this
+  * method expects.
+
+  void SetArray(T* array, vtkIdType size, int save); \
+  void SetArray(T* array, vtkIdType size, int save, int deleteMethod) */
+
 #endif // !defined(__vtkDataArrayTemplate_h)
 
 // This portion must be OUTSIDE the include blockers.  Each
diff --git a/Common/Core/vtkDataArrayTemplate.txx b/Common/Core/vtkDataArrayTemplate.txx
index 8f5d342..d255831 100644
--- a/Common/Core/vtkDataArrayTemplate.txx
+++ b/Common/Core/vtkDataArrayTemplate.txx
@@ -18,11 +18,13 @@
 #include "vtkDataArrayTemplate.h"
 
 #include "vtkArrayIteratorTemplate.h"
+#include "vtkTypedDataArrayIterator.h"
 #include "vtkIdList.h"
 #include "vtkInformation.h"
 #include "vtkInformationDoubleVectorKey.h"
 #include "vtkInformationInformationVectorKey.h"
 #include "vtkInformationVector.h"
+#include "vtkMappedDataArray.h"
 #include "vtkSortDataArray.h"
 #include "vtkTypeTraits.h"
 #include "vtkVariantCast.h"
@@ -44,7 +46,7 @@ template <class T>
 class vtkDataArrayTemplateLookup
 {
 public:
-  vtkDataArrayTemplateLookup() : Rebuild(true)
+  vtkDataArrayTemplateLookup()
     {
     this->SortedArray = NULL;
     this->IndexArray = NULL;
@@ -65,22 +67,21 @@ public:
   vtkAbstractArray* SortedArray;
   vtkIdList* IndexArray;
   std::multimap<T, vtkIdType> CachedUpdates;
-  bool Rebuild;
 };
 
 //----------------------------------------------------------------------------
 template <class T>
-vtkDataArrayTemplate<T>::vtkDataArrayTemplate(vtkIdType numComp):
-  vtkDataArray(numComp)
+vtkDataArrayTemplate<T>::vtkDataArrayTemplate()
 {
   this->Array = 0;
-  this->Tuple = 0;
+  this->ValueRange[0] = 0;
+  this->ValueRange[1] = 1;
   this->TupleSize = 0;
+  this->Tuple = 0;
   this->SaveUserArray = 0;
   this->DeleteMethod = VTK_DATA_ARRAY_FREE;
   this->Lookup = 0;
-  this->ValueRange[0] = 0;
-  this->ValueRange[1] = 1;
+  this->RebuildLookup = true;
 }
 
 //----------------------------------------------------------------------------
@@ -175,71 +176,6 @@ void vtkDataArrayTemplate<T>::Initialize()
 }
 
 //----------------------------------------------------------------------------
-// Deep copy of another double array.
-template <class T>
-void vtkDataArrayTemplate<T>::DeepCopy(vtkDataArray* fa)
-{
-  // Do nothing on a NULL input.
-  if(!fa)
-    {
-    return;
-    }
-
-  // Avoid self-copy.
-  if(this == fa)
-    {
-    return;
-    }
-
-  // If data type does not match, do copy with conversion.
-  if(fa->GetDataType() != this->GetDataType())
-    {
-    this->Superclass::DeepCopy(fa);
-    this->DataChanged();
-    return;
-    }
-
-  // Free our previous memory.
-  this->DeleteArray();
-
-  // Copy the given array into new memory.
-  this->NumberOfComponents = fa->GetNumberOfComponents();
-  this->MaxId = fa->GetMaxId();
-  this->Size = fa->GetSize();
-
-  this->Size = (this->Size > 0 ? this->Size : 1);
-  this->Array = static_cast<T* >(
-    malloc(static_cast<size_t>(this->Size) * sizeof(T)));
-  if(this->Array==0)
-    {
-    vtkErrorMacro("Unable to allocate " << this->Size
-                  << " elements of size " << sizeof(T)
-                  << " bytes. ");
-
-    #if !defined NDEBUG
-    // We're debugging, crash here preserving the stack
-    abort();
-    #elif !defined VTK_DONT_THROW_BAD_ALLOC
-    // We can throw something that has universal meaning
-    throw std::bad_alloc();
-    #else
-    // We indicate that malloc failed by return
-    this->Size = 0;
-    this->NumberOfComponents = 0;
-    this->MaxId = -1;
-    return;
-    #endif
-    }
-  if (fa->GetSize() > 0)
-    {
-    memcpy(this->Array, fa->GetVoidPointer(0),
-           static_cast<size_t>(this->Size)*sizeof(T));
-    }
-  this->vtkAbstractArray::DeepCopy( fa );
-  this->DataChanged();
-}
-
-//----------------------------------------------------------------------------
 template <class T>
 void vtkDataArrayTemplate<T>::PrintSelf(ostream& os, vtkIndent indent)
 {
@@ -256,6 +192,22 @@ void vtkDataArrayTemplate<T>::PrintSelf(ostream& os, vtkIndent indent)
 }
 
 //----------------------------------------------------------------------------
+template <typename T> vtkDataArrayTemplate<T> *
+vtkDataArrayTemplate<T>::FastDownCast(vtkAbstractArray *src)
+{
+  switch (src->GetArrayType())
+    {
+    case vtkAbstractArray::DataArrayTemplate:
+      if (src->GetDataType() == vtkTypeTraits<ValueType>::VTK_TYPE_ID)
+        {
+        return static_cast<vtkDataArrayTemplate<ValueType>*>(src);
+        }
+    default:
+      return NULL;
+    }
+}
+
+//----------------------------------------------------------------------------
 template <class T>
 void vtkDataArrayTemplate<T>::DeleteArray()
 {
@@ -406,7 +358,6 @@ template <class T>
 void vtkDataArrayTemplate<T>::SetNumberOfTuples(vtkIdType number)
 {
   this->SetNumberOfValues(number*this->NumberOfComponents);
-  this->DataChanged();
 }
 
 //----------------------------------------------------------------------------
@@ -470,19 +421,105 @@ void vtkDataArrayTemplate<T>::InsertTuple(vtkIdType i, vtkIdType j,
       }
     }
 
-  vtkIdType locIn = j * inNumComp;
-
-  T* outPtr = this->GetPointer(locOut);
-  T* inPtr = static_cast<T*>(source->GetVoidPointer(locIn));
-
-  size_t s=static_cast<size_t>(inNumComp);
-  memcpy(outPtr, inPtr, s*sizeof(T));
+  // Copy directly into our array if the source has supporting API:
+  if (vtkTypedDataArray<T> *typedSource =
+      vtkTypedDataArray<T>::FastDownCast(source))
+    {
+    typedSource->GetTupleValue(j, this->GetPointer(locOut));
+    }
+  else if (vtkDataArray *dataSource = vtkDataArray::FastDownCast(source))
+    {
+    // Otherwise use the double interface
+    this->SetTuple(i, dataSource->GetTuple(j));
+    }
+  else
+    {
+    vtkWarningMacro("Input array is not a vtkDataArray subclass!");
+    return;
+    }
 
   vtkIdType maxId = maxSize-1;
   if ( maxId > this->MaxId )
     {
     this->MaxId = maxId;
     }
+
+  this->DataChanged();
+}
+
+//----------------------------------------------------------------------------
+template<class T>
+void vtkDataArrayTemplate<T>::InsertTuples(vtkIdList *dstIds, vtkIdList *srcIds,
+                                        vtkAbstractArray *source)
+{
+  if (source->GetDataType() != this->GetDataType())
+    {
+    vtkWarningMacro("Input and output array data types do not match.");
+    return;
+    }
+
+  if (this->NumberOfComponents != source->GetNumberOfComponents())
+    {
+    vtkWarningMacro("Input and output component sizes do not match.");
+    return;
+    }
+
+  vtkIdType numIds = dstIds->GetNumberOfIds();
+  if (srcIds->GetNumberOfIds() != numIds)
+    {
+    vtkWarningMacro("Input and output id array sizes do not match.");
+    return;
+    }
+
+  // Find maximum destination id and resize if needed
+  vtkIdType maxDstId = 0;
+  for (vtkIdType idIndex = 0; idIndex < numIds; ++idIndex)
+    {
+    maxDstId = std::max(maxDstId, dstIds->GetId(idIndex));
+    }
+
+  vtkIdType maxSize = (maxDstId + 1) * this->NumberOfComponents;
+  if (maxSize > this->Size)
+    {
+    if (this->ResizeAndExtend(maxSize) == 0)
+      {
+      vtkWarningMacro("Failed to allocate memory.");
+      return;
+      }
+    }
+
+  // Copy directly into our array if the source has supporting API:
+  if (vtkTypedDataArray<T> *typedSource =
+      vtkTypedDataArray<T>::FastDownCast(source))
+    {
+    for (vtkIdType idIndex = 0; idIndex < numIds; ++idIndex)
+      {
+      typedSource->GetTupleValue(srcIds->GetId(idIndex),
+                                 this->GetPointer(dstIds->GetId(idIndex)
+                                                  * this->NumberOfComponents));
+      }
+    }
+  else if (vtkDataArray *dataSource = vtkDataArray::FastDownCast(source))
+    {
+    // Otherwise use the double interface
+    for (vtkIdType idIndex = 0; idIndex < numIds; ++idIndex)
+      {
+      this->SetTuple(dstIds->GetId(idIndex),
+                     dataSource->GetTuple(srcIds->GetId(idIndex)));
+      }
+    }
+  else
+    {
+    vtkWarningMacro("Input array is not a vtkDataArray subclass!");
+    return;
+    }
+
+  vtkIdType maxId = maxSize - 1;
+  if (maxId > this->MaxId)
+    {
+    this->MaxId = maxId;
+    }
+
   this->DataChanged();
 }
 
@@ -494,12 +531,13 @@ template<class T>
 vtkIdType vtkDataArrayTemplate<T>::InsertNextTuple(vtkIdType j,
   vtkAbstractArray* source)
 {
-   if (source->GetDataType() != this->GetDataType())
+  if (source->GetDataType() != this->GetDataType())
     {
     vtkWarningMacro("Input and output array data types do not match.");
     return -1;
     }
-  if (this->NumberOfComponents != source->GetNumberOfComponents())
+  vtkIdType numComps = source->GetNumberOfComponents();
+  if (this->NumberOfComponents != numComps)
     {
     vtkWarningMacro("Input and output component sizes do not match.");
     return -1;
@@ -508,22 +546,32 @@ vtkIdType vtkDataArrayTemplate<T>::InsertNextTuple(vtkIdType j,
   // If this and source are the same, we need to make sure that
   // the array grows before we get the pointer. Growing the array
   // after getting the pointer may make it invalid.
-  if (this == source)
+
+  // Copy directly into our array if the source has supporting API:
+  if (vtkTypedDataArray<T> *typedSource =
+      vtkTypedDataArray<T>::FastDownCast(source))
+    {
+    typedSource->GetTupleValue(j,
+                               this->WritePointer(this->MaxId + 1, numComps));
+    }
+  else if (vtkDataArray *dataSource = vtkDataArray::FastDownCast(source))
     {
-    if (this->ResizeAndExtend(this->Size+1)==0)
+    // Otherwise use the double interface
+    T *out = this->WritePointer(this->MaxId + 1, numComps);
+    double *in = dataSource->GetTuple(j);
+
+    while (numComps-- > 0)
       {
-      return -1;
+      *(out++) = static_cast<T>(*(in++));
       }
     }
-
-  T* data = static_cast<T*>(source->GetVoidPointer(0));
-  vtkIdType locj = j * source->GetNumberOfComponents();
-
-  for (vtkIdType cur = 0; cur < this->NumberOfComponents; cur++)
+  else
     {
-    this->InsertNextValue(data[locj + cur]);
+    vtkWarningMacro("Input array is not a vtkDataArray subclass!");
+    return -1;
     }
-  return (this->GetNumberOfTuples()-1);
+
+  return this->GetNumberOfTuples() - 1;
 }
 
 //----------------------------------------------------------------------------
@@ -849,11 +897,10 @@ void vtkDataArrayTemplate<T>::InsertComponent(vtkIdType i, int j,
 template <class T>
 void vtkDataArrayTemplate<T>::SetNumberOfValues(vtkIdType number)
 {
-  if(this->Allocate(number))
+  if (this->Allocate(number))
     {
     this->MaxId = number - 1;
     }
-  this->DataChanged();
 }
 
 //----------------------------------------------------------------------------
@@ -1022,9 +1069,9 @@ void vtkDataArrayTemplate<T>::UpdateLookup()
     this->Lookup = new vtkDataArrayTemplateLookup<T>();
     this->Lookup->SortedArray = vtkAbstractArray::CreateArray(this->GetDataType());
     this->Lookup->IndexArray = vtkIdList::New();
-    this->Lookup->Rebuild = true;
+    this->RebuildLookup = true;
     }
-  if (this->Lookup->Rebuild)
+  if (this->RebuildLookup)
     {
     int numComps = this->GetNumberOfComponents();
     vtkIdType numTuples = this->GetNumberOfTuples();
@@ -1035,8 +1082,8 @@ void vtkDataArrayTemplate<T>::UpdateLookup()
       this->Lookup->IndexArray->SetId(i, i);
       }
     vtkSortDataArray::Sort(this->Lookup->SortedArray, this->Lookup->IndexArray);
-    this->Lookup->Rebuild = false;
     this->Lookup->CachedUpdates.clear();
+    this->RebuildLookup = false;
     }
 }
 
@@ -1208,37 +1255,28 @@ void vtkDataArrayTemplate<T>::LookupValue(T value, vtkIdList* ids)
 template <class T>
 void vtkDataArrayTemplate<T>::DataChanged()
 {
-  if (this->Lookup)
-    {
-    this->Lookup->Rebuild = true;
-    }
+  this->RebuildLookup = true;
 }
 
 //----------------------------------------------------------------------------
 template <class T>
 void vtkDataArrayTemplate<T>::DataElementChanged(vtkIdType id)
 {
-  if (this->Lookup)
+  if (!this->RebuildLookup && this->Lookup)
     {
-      if (this->Lookup->Rebuild)
-        {
-        // We're already going to rebuild the lookup table. Do nothing.
-        return;
-        }
-
-      if (this->Lookup->CachedUpdates.size() >
-          static_cast<size_t>(this->GetNumberOfTuples()/10))
-        {
-        // At this point, just rebuild the full table.
-        this->Lookup->Rebuild = true;
-        }
-      else
-        {
-        // Insert this change into the set of cached updates
-        std::pair<const T, vtkIdType>
+    if (this->Lookup->CachedUpdates.size() >
+        static_cast<size_t>(this->GetNumberOfTuples()/10))
+      {
+      // At this point, just rebuild the full table.
+      this->RebuildLookup = true;
+      }
+    else
+      {
+      // Insert this change into the set of cached updates
+      std::pair<const T, vtkIdType>
           value(this->GetValue(id), id);
-        this->Lookup->CachedUpdates.insert(value);
-        }
+      this->Lookup->CachedUpdates.insert(value);
+      }
     }
 }
 
diff --git a/Common/Core/vtkDebugLeaks.cxx b/Common/Core/vtkDebugLeaks.cxx
index f425ba8..bec71ec 100644
--- a/Common/Core/vtkDebugLeaks.cxx
+++ b/Common/Core/vtkDebugLeaks.cxx
@@ -26,7 +26,7 @@ static const char *vtkDebugLeaksIgnoreClasses[] = {
 
 //----------------------------------------------------------------------------
 // return 1 if the class should be ignored
-int vtkDebugLeaksIgnoreClassesCheck(const char* s)
+static int vtkDebugLeaksIgnoreClassesCheck(const char* s)
 {
   int i =0;
   while(vtkDebugLeaksIgnoreClasses[i])
@@ -44,7 +44,7 @@ vtkStandardNewMacro(vtkDebugLeaks);
 
 //----------------------------------------------------------------------------
 // A hash function for converting a string to a long
-inline size_t vtkHashString(const char* s)
+static inline size_t vtkHashString(const char* s)
 {
   unsigned long h = 0;
   for ( ; *s; ++s)
@@ -79,10 +79,7 @@ public:
   ~vtkDebugLeaksHashNode()
     {
       delete [] this->Key;
-      if(this->Next)
-        {
-        delete this->Next;
-        }
+      delete this->Next;
     }
 public:
   vtkDebugLeaksHashNode *Next;
diff --git a/Common/Core/vtkDebugLeaks.h b/Common/Core/vtkDebugLeaks.h
index 87b87c0..be15e15 100644
--- a/Common/Core/vtkDebugLeaks.h
+++ b/Common/Core/vtkDebugLeaks.h
@@ -66,8 +66,8 @@ public:
   //ETX
 
 protected:
-  vtkDebugLeaks(){};
-  virtual ~vtkDebugLeaks(){};
+  vtkDebugLeaks(){}
+  virtual ~vtkDebugLeaks(){}
 
   static int DisplayMessageBox(const char*);
 
@@ -98,7 +98,7 @@ private:
 // state and accessing them may cause undefined behavior.
 class VTKCOMMONCORE_EXPORT vtkDebugLeaksObserver {
 public:
-  virtual ~vtkDebugLeaksObserver() {};
+  virtual ~vtkDebugLeaksObserver() {}
   virtual void ConstructingObject(vtkObjectBase*) = 0;
   virtual void DestructingObject(vtkObjectBase*) = 0;
 };
diff --git a/Common/Core/vtkDoubleArray.cxx b/Common/Core/vtkDoubleArray.cxx
index 533e455..66995d9 100644
--- a/Common/Core/vtkDoubleArray.cxx
+++ b/Common/Core/vtkDoubleArray.cxx
@@ -28,7 +28,7 @@ VTK_ARRAY_ITERATOR_TEMPLATE_INSTANTIATE(double);
 vtkStandardNewMacro(vtkDoubleArray);
 
 //----------------------------------------------------------------------------
-vtkDoubleArray::vtkDoubleArray(vtkIdType numComp): RealSuperclass(numComp)
+vtkDoubleArray::vtkDoubleArray()
 {
 }
 
diff --git a/Common/Core/vtkDoubleArray.h b/Common/Core/vtkDoubleArray.h
index 76b8748..adc9f55 100644
--- a/Common/Core/vtkDoubleArray.h
+++ b/Common/Core/vtkDoubleArray.h
@@ -31,88 +31,25 @@
 #include "vtkDataArrayTemplate.h" // Real Superclass
 
 // Fake the superclass for the wrappers.
+#ifndef __WRAP__
 #define vtkDataArray vtkDataArrayTemplate<double>
+#endif
 class VTKCOMMONCORE_EXPORT vtkDoubleArray : public vtkDataArray
+#ifndef __WRAP__
 #undef vtkDataArray
+#endif
 {
 public:
   static vtkDoubleArray* New();
   vtkTypeMacro(vtkDoubleArray,vtkDataArray);
   void PrintSelf(ostream& os, vtkIndent indent);
 
-  // Description:
-  // Get the data type.
-  int GetDataType()
-    { return VTK_DOUBLE; }
-
-  // Description:
-  // Copy the tuple value into a user-provided array.
-  void GetTupleValue(vtkIdType i, double* tuple)
-    { this->RealSuperclass::GetTupleValue(i, tuple); }
-
-  // Description:
-  // Set the tuple value at the ith location in the array.
-  void SetTupleValue(vtkIdType i, const double* tuple)
-    { this->RealSuperclass::SetTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple into the ith location
-  // in the array.
-  void InsertTupleValue(vtkIdType i, const double* tuple)
-    { this->RealSuperclass::InsertTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple onto the end of the array.
-  vtkIdType InsertNextTupleValue(const double* tuple)
-    { return this->RealSuperclass::InsertNextTupleValue(tuple); }
-
-  // Description:
-  // Get the data at a particular index.
-  double GetValue(vtkIdType id)
-    { return this->RealSuperclass::GetValue(id); }
-
-  // Description:
-  // Set the data at a particular index. Does not do range checking. Make sure
-  // you use the method SetNumberOfValues() before inserting data.
-  void SetValue(vtkIdType id, double value)
-    { this->RealSuperclass::SetValue(id, value); }
-
-  // Description:
-  // Specify the number of values for this object to hold. Does an
-  // allocation as well as setting the MaxId ivar. Used in conjunction with
-  // SetValue() method for fast insertion.
-  void SetNumberOfValues(vtkIdType number)
-    { this->RealSuperclass::SetNumberOfValues(number); }
-
-  // Description:
-  // Insert data at a specified position in the array.
-  void InsertValue(vtkIdType id, double f)
-    { this->RealSuperclass::InsertValue(id, f); }
-
-  // Description:
-  // Insert data at the end of the array. Return its location in the array.
-  vtkIdType InsertNextValue(double f)
-    { return this->RealSuperclass::InsertNextValue(f); }
-
-  // Description:
-  // Get the range of array values for the given component in the
-  // native data type.
-  double *GetValueRange(int comp)
-    { return this->RealSuperclass::GetValueRange(comp); }
-//BTX
-  void GetValueRange(double range[2], int comp)
-    { this->RealSuperclass::GetValueRange(range, comp); }
-//ETX
-
-  // Description:
-  // Get the range of array values for the 0th component in the
-  // native data type.
-  double *GetValueRange()
-    { return this->RealSuperclass::GetValueRange(0); }
-//BTX
-  void GetValueRange(double range[2])
-    { this->RealSuperclass::GetValueRange(range, 0); }
-//ETX
+  // This macro expands to the set of method declarations that
+  // make up the interface of vtkDataArrayTemplate, which is ignored
+  // by the wrappers.
+#ifdef __WRAP__
+  vtkCreateWrappedArrayInterface(double);
+#endif
 
   // Description:
   // Get the minimum data value in its native type.
@@ -122,33 +59,8 @@ public:
   // Get the maximum data value in its native type.
   static double GetDataTypeValueMax() { return VTK_DOUBLE_MAX; }
 
-  // Description:
-  // Get the address of a particular data index. Make sure data is allocated
-  // for the number of items requested. Set MaxId according to the number of
-  // data values requested.
-  double* WritePointer(vtkIdType id, vtkIdType number)
-    { return this->RealSuperclass::WritePointer(id, number); }
-
-  // Description:
-  // Get the address of a particular data index. Performs no checks
-  // to verify that the memory has been allocated etc.
-  double* GetPointer(vtkIdType id)
-    { return this->RealSuperclass::GetPointer(id); }
-
-  // Description:
-  // This method lets the user specify data to be held by the array.  The
-  // array argument is a pointer to the data.  size is the size of
-  // the array supplied by the user.  Set save to 1 to keep the class
-  // from deleting the array when it cleans up or reallocates memory.
-  // The class uses the actual array provided; it does not copy the data
-  // from the suppled array.
-  void SetArray(double* array, vtkIdType size, int save)
-    { this->RealSuperclass::SetArray(array, size, save); }
-  void SetArray(double* array, vtkIdType size, int save, int deleteMethod)
-    { this->RealSuperclass::SetArray(array, size, save, deleteMethod); }
-
 protected:
-  vtkDoubleArray(vtkIdType numComp=1);
+  vtkDoubleArray();
   ~vtkDoubleArray();
 
 private:
diff --git a/Common/Core/vtkDynamicLoader.h b/Common/Core/vtkDynamicLoader.h
index 50d9375..7c2a5e2 100644
--- a/Common/Core/vtkDynamicLoader.h
+++ b/Common/Core/vtkDynamicLoader.h
@@ -69,8 +69,8 @@ public:
   static const char* LastError();
 
 protected:
-  vtkDynamicLoader() {};
-  ~vtkDynamicLoader() {};
+  vtkDynamicLoader() {}
+  ~vtkDynamicLoader() {}
 
 private:
   vtkDynamicLoader(const vtkDynamicLoader&);  // Not implemented.
diff --git a/Common/Core/vtkEventForwarderCommand.h b/Common/Core/vtkEventForwarderCommand.h
index c101097..488f703 100644
--- a/Common/Core/vtkEventForwarderCommand.h
+++ b/Common/Core/vtkEventForwarderCommand.h
@@ -56,7 +56,7 @@ protected:
   vtkObject *Target;
 
   vtkEventForwarderCommand();
-  ~vtkEventForwarderCommand() {};
+  ~vtkEventForwarderCommand() {}
 };
 
 #endif /* __vtkEventForwarderCommand_h */
diff --git a/Common/Core/vtkFloatArray.cxx b/Common/Core/vtkFloatArray.cxx
index 111292d..19c3f04 100644
--- a/Common/Core/vtkFloatArray.cxx
+++ b/Common/Core/vtkFloatArray.cxx
@@ -28,7 +28,7 @@ VTK_ARRAY_ITERATOR_TEMPLATE_INSTANTIATE(float);
 vtkStandardNewMacro(vtkFloatArray);
 
 //----------------------------------------------------------------------------
-vtkFloatArray::vtkFloatArray(vtkIdType numComp): RealSuperclass(numComp)
+vtkFloatArray::vtkFloatArray()
 {
 }
 
diff --git a/Common/Core/vtkFloatArray.h b/Common/Core/vtkFloatArray.h
index d82955b..12264f5 100644
--- a/Common/Core/vtkFloatArray.h
+++ b/Common/Core/vtkFloatArray.h
@@ -31,88 +31,25 @@
 #include "vtkDataArrayTemplate.h" // Real Superclass
 
 // Fake the superclass for the wrappers.
+#ifndef __WRAP__
 #define vtkDataArray vtkDataArrayTemplate<float>
+#endif
 class VTKCOMMONCORE_EXPORT vtkFloatArray : public vtkDataArray
+#ifndef __WRAP__
 #undef vtkDataArray
+#endif
 {
 public:
   static vtkFloatArray* New();
   vtkTypeMacro(vtkFloatArray,vtkDataArray);
   void PrintSelf(ostream& os, vtkIndent indent);
 
-  // Description:
-  // Get the data type.
-  int GetDataType()
-    { return VTK_FLOAT; }
-
-  // Description:
-  // Copy the tuple value into a user-provided array.
-  void GetTupleValue(vtkIdType i, float* tuple)
-    { this->RealSuperclass::GetTupleValue(i, tuple); }
-
-  // Description:
-  // Set the tuple value at the ith location in the array.
-  void SetTupleValue(vtkIdType i, const float* tuple)
-    { this->RealSuperclass::SetTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple into the ith location
-  // in the array.
-  void InsertTupleValue(vtkIdType i, const float* tuple)
-    { this->RealSuperclass::InsertTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple onto the end of the array.
-  vtkIdType InsertNextTupleValue(const float* tuple)
-    { return this->RealSuperclass::InsertNextTupleValue(tuple); }
-
-  // Description:
-  // Get the data at a particular index.
-  float GetValue(vtkIdType id)
-    { return this->RealSuperclass::GetValue(id); }
-
-  // Description:
-  // Set the data at a particular index. Does not do range checking. Make sure
-  // you use the method SetNumberOfValues() before inserting data.
-  void SetValue(vtkIdType id, float value)
-    { this->RealSuperclass::SetValue(id, value); }
-
-  // Description:
-  // Specify the number of values for this object to hold. Does an
-  // allocation as well as setting the MaxId ivar. Used in conjunction with
-  // SetValue() method for fast insertion.
-  void SetNumberOfValues(vtkIdType number)
-    { this->RealSuperclass::SetNumberOfValues(number); }
-
-  // Description:
-  // Insert data at a specified position in the array.
-  void InsertValue(vtkIdType id, float f)
-    { this->RealSuperclass::InsertValue(id, f); }
-
-  // Description:
-  // Insert data at the end of the array. Return its location in the array.
-  vtkIdType InsertNextValue(float f)
-    { return this->RealSuperclass::InsertNextValue(f); }
-
-  // Description:
-  // Get the range of array values for the given component in the
-  // native data type.
-  float *GetValueRange(int comp)
-    { return this->RealSuperclass::GetValueRange(comp); }
-//BTX
-  void GetValueRange(float range[2], int comp)
-    { this->RealSuperclass::GetValueRange(range, comp); }
-//ETX
-
-  // Description:
-  // Get the range of array values for the 0th component in the
-  // native data type.
-  float *GetValueRange()
-    { return this->RealSuperclass::GetValueRange(0); }
-//BTX
-  void GetValueRange(float range[2])
-    { this->RealSuperclass::GetValueRange(range, 0); }
-//ETX
+  // This macro expands to the set of method declarations that
+  // make up the interface of vtkDataArrayTemplate, which is ignored
+  // by the wrappers.
+#ifdef __WRAP__
+  vtkCreateWrappedArrayInterface(float);
+#endif
 
   // Description:
   // Get the minimum data value in its native type.
@@ -122,33 +59,9 @@ public:
   // Get the maximum data value in its native type.
   static float GetDataTypeValueMax() { return VTK_FLOAT_MAX; }
 
-  // Description:
-  // Get the address of a particular data index. Make sure data is allocated
-  // for the number of items requested. Set MaxId according to the number of
-  // data values requested.
-  float* WritePointer(vtkIdType id, vtkIdType number)
-    { return this->RealSuperclass::WritePointer(id, number); }
-
-  // Description:
-  // Get the address of a particular data index. Performs no checks
-  // to verify that the memory has been allocated etc.
-  float* GetPointer(vtkIdType id)
-    { return this->RealSuperclass::GetPointer(id); }
-
-  // Description:
-  // This method lets the user specify data to be held by the array.  The
-  // array argument is a pointer to the data.  size is the size of
-  // the array supplied by the user.  Set save to 1 to keep the class
-  // from deleting the array when it cleans up or reallocates memory.
-  // The class uses the actual array provided; it does not copy the data
-  // from the suppled array.
-  void SetArray(float* array, vtkIdType size, int save)
-    { this->RealSuperclass::SetArray(array, size, save); }
-  void SetArray(float* array, vtkIdType size, int save, int deleteMethod)
-    { this->RealSuperclass::SetArray(array, size, save, deleteMethod); }
 
 protected:
-  vtkFloatArray(vtkIdType numComp=1);
+  vtkFloatArray();
   ~vtkFloatArray();
 
 private:
diff --git a/Common/Core/vtkGarbageCollector.cxx b/Common/Core/vtkGarbageCollector.cxx
index 848a65b..920175d 100644
--- a/Common/Core/vtkGarbageCollector.cxx
+++ b/Common/Core/vtkGarbageCollector.cxx
@@ -34,7 +34,7 @@
 # include <set>
 #endif
 
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkGarbageCollector);
 
diff --git a/Common/Core/vtkIOStream.cxx b/Common/Core/vtkIOStream.cxx
index 47cc7a5..a6501ed 100644
--- a/Common/Core/vtkIOStream.cxx
+++ b/Common/Core/vtkIOStream.cxx
@@ -236,6 +236,7 @@ ostream& vtkIOStreamPrint(ostream& os, vtkIOStreamULL value)
 // vtkIOStream.obj : warning LNK4221: no public symbols found; archive
 // member will be inaccessible
 //
+int vtkIOStreamAvoidLNK4221Warning();
 int vtkIOStreamAvoidLNK4221Warning()
 {
   return 0;
diff --git a/Common/Core/vtkIdList.cxx b/Common/Core/vtkIdList.cxx
index 55f37f4..d3270df 100644
--- a/Common/Core/vtkIdList.cxx
+++ b/Common/Core/vtkIdList.cxx
@@ -26,19 +26,13 @@ vtkIdList::vtkIdList()
 
 vtkIdList::~vtkIdList()
 {
-  if ( this->Ids != NULL )
-    {
-    delete [] this->Ids;
-    }
+  delete [] this->Ids;
 }
 
 void vtkIdList::Initialize()
 {
-  if ( this->Ids != NULL )
-    {
-    delete [] this->Ids;
-    this->Ids = NULL;
-    }
+  delete [] this->Ids;
+  this->Ids = NULL;
   this->NumberOfIds = 0;
   this->Size = 0;
 }
@@ -64,19 +58,6 @@ void vtkIdList::SetNumberOfIds(const vtkIdType number)
   this->NumberOfIds = number;
 }
 
-void vtkIdList::InsertId(const vtkIdType i, const vtkIdType vtkid)
-{
-  if ( i >= this->Size )
-    {
-    this->Resize(i+1);
-    }
-  this->Ids[i] = vtkid;
-  if ( i >= this->NumberOfIds )
-    {
-    this->NumberOfIds = i + 1;
-    }
-}
-
 vtkIdType vtkIdList::InsertUniqueId(const vtkIdType vtkid)
 {
   for (vtkIdType i=0; i < this->NumberOfIds; i++)
@@ -130,14 +111,12 @@ void vtkIdList::DeleteId(vtkIdType vtkid)
 
 void vtkIdList::DeepCopy(vtkIdList *ids)
 {
-  this->Initialize();
-  this->NumberOfIds = ids->NumberOfIds;
-  this->Size = ids->Size;
-  this->Ids = new vtkIdType [ids->Size];
-  for (vtkIdType i=0; i < ids->NumberOfIds; i++)
+  this->SetNumberOfIds(ids->NumberOfIds);
+  if (ids->NumberOfIds > 0)
     {
-    this->Ids[i] = ids->Ids[i];
+    std::copy(ids->Ids, ids->Ids + ids->NumberOfIds, this->Ids);
     }
+  this->Squeeze();
 }
 
 vtkIdType *vtkIdList::Resize(const vtkIdType sz)
diff --git a/Common/Core/vtkIdList.h b/Common/Core/vtkIdList.h
index d304ab5..baac267 100644
--- a/Common/Core/vtkIdList.h
+++ b/Common/Core/vtkIdList.h
@@ -109,6 +109,11 @@ public:
   // to result of intersection operation.
   void IntersectWith(vtkIdList* otherIds);
 
+  // Description:
+  // Adjust the size of the id list while maintaining its content (except
+  // when being truncated).
+  vtkIdType *Resize(const vtkIdType sz);
+
   //BTX
   // This method should become legacy
   void IntersectWith(vtkIdList& otherIds) {
@@ -123,13 +128,26 @@ protected:
   vtkIdType Size;
   vtkIdType *Ids;
 
-  vtkIdType *Resize(const vtkIdType sz);
 private:
   vtkIdList(const vtkIdList&);  // Not implemented.
   void operator=(const vtkIdList&);  // Not implemented.
 };
 
 // In-lined for performance
+inline void vtkIdList::InsertId(const vtkIdType i, const vtkIdType vtkid)
+{
+  if (i >= this->Size)
+    {
+    this->Resize(i + 1);
+    }
+  this->Ids[i] = vtkid;
+  if (i >= this->NumberOfIds)
+    {
+    this->NumberOfIds = i + 1;
+    }
+}
+
+// In-lined for performance
 inline vtkIdType vtkIdList::InsertNextId(const vtkIdType vtkid)
 {
   if ( this->NumberOfIds >= this->Size )
diff --git a/Common/Core/vtkIdListCollection.h b/Common/Core/vtkIdListCollection.h
index a0a5ee6..bd8fdcc 100644
--- a/Common/Core/vtkIdListCollection.h
+++ b/Common/Core/vtkIdListCollection.h
@@ -57,8 +57,8 @@ public:
   //ETX
 
 protected:
-  vtkIdListCollection() {};
-  ~vtkIdListCollection() {};
+  vtkIdListCollection() {}
+  ~vtkIdListCollection() {}
 
 
 private:
diff --git a/Common/Core/vtkIdTypeArray.cxx b/Common/Core/vtkIdTypeArray.cxx
index ce9cba6..fef7dee 100644
--- a/Common/Core/vtkIdTypeArray.cxx
+++ b/Common/Core/vtkIdTypeArray.cxx
@@ -30,7 +30,7 @@
 vtkStandardNewMacro(vtkIdTypeArray);
 
 //----------------------------------------------------------------------------
-vtkIdTypeArray::vtkIdTypeArray(vtkIdType numComp): RealSuperclass(numComp)
+vtkIdTypeArray::vtkIdTypeArray()
 {
 }
 
diff --git a/Common/Core/vtkIdTypeArray.h b/Common/Core/vtkIdTypeArray.h
index 6e78738..42f2054 100644
--- a/Common/Core/vtkIdTypeArray.h
+++ b/Common/Core/vtkIdTypeArray.h
@@ -31,88 +31,38 @@
 #include "vtkDataArrayTemplate.h" // Real Superclass
 
 // Fake the superclass for the wrappers.
+#ifndef __WRAP__
 #define vtkDataArray vtkDataArrayTemplate<vtkIdType>
+#endif
 class VTKCOMMONCORE_EXPORT vtkIdTypeArray : public vtkDataArray
+#ifndef __WRAP__
 #undef vtkDataArray
+#endif
 {
 public:
   static vtkIdTypeArray* New();
   vtkTypeMacro(vtkIdTypeArray,vtkDataArray);
   void PrintSelf(ostream& os, vtkIndent indent);
 
+  // This macro expands to the set of method declarations that
+  // make up the interface of vtkDataArrayTemplate, which is ignored
+  // by the wrappers.
+#ifdef __WRAP__
+  vtkCreateWrappedArrayInterface(vtkIdType);
+#else
+
   // Description:
   // Get the data type.
   int GetDataType()
-    { return VTK_ID_TYPE; }
-
-  // Description:
-  // Copy the tuple value into a user-provided array.
-  void GetTupleValue(vtkIdType i, vtkIdType* tuple)
-    { this->RealSuperclass::GetTupleValue(i, tuple); }
-
-  // Description:
-  // Set the tuple value at the ith location in the array.
-  void SetTupleValue(vtkIdType i, const vtkIdType* tuple)
-    { this->RealSuperclass::SetTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple into the ith location
-  // in the array.
-  void InsertTupleValue(vtkIdType i, const vtkIdType* tuple)
-    { this->RealSuperclass::InsertTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple onto the end of the array.
-  vtkIdType InsertNextTupleValue(const vtkIdType* tuple)
-    { return this->RealSuperclass::InsertNextTupleValue(tuple); }
-
-  // Description:
-  // Get the data at a particular index.
-  vtkIdType GetValue(vtkIdType id)
-    { return this->RealSuperclass::GetValue(id); }
-
-  // Description:
-  // Set the data at a particular index. Does not do range checking. Make sure
-  // you use the method SetNumberOfValues() before inserting data.
-  void SetValue(vtkIdType id, vtkIdType value)
-    { this->RealSuperclass::SetValue(id, value); }
-
-  // Description:
-  // Specify the number of values for this object to hold. Does an
-  // allocation as well as setting the MaxId ivar. Used in conjunction with
-  // SetValue() method for fast insertion.
-  void SetNumberOfValues(vtkIdType number)
-    { this->RealSuperclass::SetNumberOfValues(number); }
-
-  // Description:
-  // Insert data at a specified position in the array.
-  void InsertValue(vtkIdType id, vtkIdType f)
-    { this->RealSuperclass::InsertValue(id, f); }
-
-  // Description:
-  // Insert data at the end of the array. Return its location in the array.
-  vtkIdType InsertNextValue(vtkIdType f)
-    { return this->RealSuperclass::InsertNextValue(f); }
-
-  // Description:
-  // Get the range of array values for the given component in the
-  // native data type.
-  vtkIdType *GetValueRange(int comp)
-    { return this->RealSuperclass::GetValueRange(comp); }
-//BTX
-  void GetValueRange(vtkIdType range[2], int comp)
-    { this->RealSuperclass::GetValueRange(range, comp); }
-//ETX
-
-  // Description:
-  // Get the range of array values for the 0th component in the
-  // native data type.
-  vtkIdType *GetValueRange()
-    { return this->RealSuperclass::GetValueRange(0); }
-//BTX
-  void GetValueRange(vtkIdType range[2])
-    { this->RealSuperclass::GetValueRange(range, 0); }
-//ETX
+    {
+      // This needs to overwritten from superclass because
+      // the templated superclass is not able to differentiate
+      // vtkIdType from a long long or an int since vtkIdType
+      // is simply a typedef. This means that
+      // vtkDataArrayTemplate<vtkIdType> != vtkIdTypeArray.
+      return VTK_ID_TYPE;
+    }
+#endif
 
   // Description:
   // Get the minimum data value in its native type.
@@ -122,33 +72,8 @@ public:
   // Get the maximum data value in its native type.
   static vtkIdType GetDataTypeValueMax() { return VTK_ID_MAX; }
 
-  // Description:
-  // Get the address of a particular data index. Make sure data is allocated
-  // for the number of items requested. Set MaxId according to the number of
-  // data values requested.
-  vtkIdType* WritePointer(vtkIdType id, vtkIdType number)
-    { return this->RealSuperclass::WritePointer(id, number); }
-
-  // Description:
-  // Get the address of a particular data index. Performs no checks
-  // to verify that the memory has been allocated etc.
-  vtkIdType* GetPointer(vtkIdType id)
-    { return this->RealSuperclass::GetPointer(id); }
-
-  // Description:
-  // This method lets the user specify data to be held by the array.  The
-  // array argument is a pointer to the data.  size is the size of
-  // the array supplied by the user.  Set save to 1 to keep the class
-  // from deleting the array when it cleans up or reallocates memory.
-  // The class uses the actual array provided; it does not copy the data
-  // from the suppled array.
-  void SetArray(vtkIdType* array, vtkIdType size, int save)
-    { this->RealSuperclass::SetArray(array, size, save); }
-  void SetArray(vtkIdType* array, vtkIdType size, int save, int deleteMethod)
-    { this->RealSuperclass::SetArray(array, size, save, deleteMethod); }
-
 protected:
-  vtkIdTypeArray(vtkIdType numComp=1);
+  vtkIdTypeArray();
   ~vtkIdTypeArray();
 
 private:
diff --git a/Common/Core/vtkInformationKey.h b/Common/Core/vtkInformationKey.h
index 46fe26a..ea6a16d 100644
--- a/Common/Core/vtkInformationKey.h
+++ b/Common/Core/vtkInformationKey.h
@@ -123,18 +123,18 @@ private:
 // The corresponding method declaration must appear in the class
 // definition in the header file.
 #define vtkInformationKeyMacro(CLASS, NAME, type)             \
- vtkInformation##type##Key* CLASS::NAME()                     \
-   {                                                          \
-   static vtkInformation##type##Key* CLASS##_##NAME =         \
-        new vtkInformation##type##Key(#NAME, #CLASS);         \
-   return CLASS##_##NAME;                                     \
-   }
+  static vtkInformation##type##Key* CLASS##_##NAME =          \
+    new vtkInformation##type##Key(#NAME, #CLASS);             \
+  vtkInformation##type##Key* CLASS::NAME()                    \
+  {                                                           \
+    return CLASS##_##NAME;                                    \
+  }
 #define vtkInformationKeyRestrictedMacro(CLASS, NAME, type, required)   \
- vtkInformation##type##Key* CLASS::NAME()                               \
-   {                                                                    \
-   static vtkInformation##type##Key* CLASS##_##NAME =                   \
-     new vtkInformation##type##Key(#NAME, #CLASS, required);            \
-   return CLASS##_##NAME;                                               \
-   }
+  static vtkInformation##type##Key* CLASS##_##NAME =                    \
+    new vtkInformation##type##Key(#NAME, #CLASS, required);             \
+  vtkInformation##type##Key* CLASS::NAME()                              \
+  {                                                                     \
+    return CLASS##_##NAME;                                              \
+  }
 
 #endif
diff --git a/Common/Core/vtkInstantiator.h b/Common/Core/vtkInstantiator.h
index da3ae91..5fb6213 100644
--- a/Common/Core/vtkInstantiator.h
+++ b/Common/Core/vtkInstantiator.h
@@ -123,6 +123,9 @@ public:
   ~vtkInstantiatorInitialize();
 private:
   static unsigned int Count;
+private:
+  vtkInstantiatorInitialize(const vtkInstantiatorInitialize& other); // no copy constructor
+  vtkInstantiatorInitialize& operator=(const vtkInstantiatorInitialize& rhs); // no copy assignment
 };
 
 // This instance will show up in any translation unit that uses
diff --git a/Common/Core/vtkIntArray.cxx b/Common/Core/vtkIntArray.cxx
index d3b369b..dd4066a 100644
--- a/Common/Core/vtkIntArray.cxx
+++ b/Common/Core/vtkIntArray.cxx
@@ -28,7 +28,7 @@ VTK_ARRAY_ITERATOR_TEMPLATE_INSTANTIATE(int);
 vtkStandardNewMacro(vtkIntArray);
 
 //----------------------------------------------------------------------------
-vtkIntArray::vtkIntArray(vtkIdType numComp): RealSuperclass(numComp)
+vtkIntArray::vtkIntArray()
 {
 }
 
diff --git a/Common/Core/vtkIntArray.h b/Common/Core/vtkIntArray.h
index 92708ed..68c367a 100644
--- a/Common/Core/vtkIntArray.h
+++ b/Common/Core/vtkIntArray.h
@@ -31,88 +31,25 @@
 #include "vtkDataArrayTemplate.h" // Real Superclass
 
 // Fake the superclass for the wrappers.
+#ifndef __WRAP__
 #define vtkDataArray vtkDataArrayTemplate<int>
+#endif
 class VTKCOMMONCORE_EXPORT vtkIntArray : public vtkDataArray
+#ifndef __WRAP__
 #undef vtkDataArray
+#endif
 {
 public:
   static vtkIntArray* New();
   vtkTypeMacro(vtkIntArray,vtkDataArray);
   void PrintSelf(ostream& os, vtkIndent indent);
 
-  // Description:
-  // Get the data type.
-  int GetDataType()
-    { return VTK_INT; }
-
-  // Description:
-  // Copy the tuple value into a user-provided array.
-  void GetTupleValue(vtkIdType i, int* tuple)
-    { this->RealSuperclass::GetTupleValue(i, tuple); }
-
-  // Description:
-  // Set the tuple value at the ith location in the array.
-  void SetTupleValue(vtkIdType i, const int* tuple)
-    { this->RealSuperclass::SetTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple into the ith location
-  // in the array.
-  void InsertTupleValue(vtkIdType i, const int* tuple)
-    { this->RealSuperclass::InsertTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple onto the end of the array.
-  vtkIdType InsertNextTupleValue(const int* tuple)
-    { return this->RealSuperclass::InsertNextTupleValue(tuple); }
-
-  // Description:
-  // Get the data at a particular index.
-  int GetValue(vtkIdType id)
-    { return this->RealSuperclass::GetValue(id); }
-
-  // Description:
-  // Set the data at a particular index. Does not do range checking. Make sure
-  // you use the method SetNumberOfValues() before inserting data.
-  void SetValue(vtkIdType id, int value)
-    { this->RealSuperclass::SetValue(id, value); }
-
-  // Description:
-  // Specify the number of values for this object to hold. Does an
-  // allocation as well as setting the MaxId ivar. Used in conjunction with
-  // SetValue() method for fast insertion.
-  void SetNumberOfValues(vtkIdType number)
-    { this->RealSuperclass::SetNumberOfValues(number); }
-
-  // Description:
-  // Insert data at a specified position in the array.
-  void InsertValue(vtkIdType id, int f)
-    { this->RealSuperclass::InsertValue(id, f); }
-
-  // Description:
-  // Insert data at the end of the array. Return its location in the array.
-  vtkIdType InsertNextValue(int f)
-    { return this->RealSuperclass::InsertNextValue(f); }
-
-  // Description:
-  // Get the range of array values for the given component in the
-  // native data type.
-  int *GetValueRange(int comp)
-    { return this->RealSuperclass::GetValueRange(comp); }
-//BTX
-  void GetValueRange(int range[2], int comp)
-    { this->RealSuperclass::GetValueRange(range, comp); }
-//ETX
-
-  // Description:
-  // Get the range of array values for the 0th component in the
-  // native data type.
-  int *GetValueRange()
-    { return this->RealSuperclass::GetValueRange(0); }
-//BTX
-  void GetValueRange(int range[2])
-    { this->RealSuperclass::GetValueRange(range, 0); }
-//ETX
+  // This macro expands to the set of method declarations that
+  // make up the interface of vtkDataArrayTemplate, which is ignored
+  // by the wrappers.
+#ifdef __WRAP__
+  vtkCreateWrappedArrayInterface(int);
+#endif
 
   // Description:
   // Get the minimum data value in its native type.
@@ -122,33 +59,8 @@ public:
   // Get the maximum data value in its native type.
   static int GetDataTypeValueMax() { return VTK_INT_MAX; }
 
-  // Description:
-  // Get the address of a particular data index. Make sure data is allocated
-  // for the number of items requested. Set MaxId according to the number of
-  // data values requested.
-  int* WritePointer(vtkIdType id, vtkIdType number)
-    { return this->RealSuperclass::WritePointer(id, number); }
-
-  // Description:
-  // Get the address of a particular data index. Performs no checks
-  // to verify that the memory has been allocated etc.
-  int* GetPointer(vtkIdType id)
-    { return this->RealSuperclass::GetPointer(id); }
-
-  // Description:
-  // This method lets the user specify data to be held by the array.  The
-  // array argument is a pointer to the data.  size is the size of
-  // the array supplied by the user.  Set save to 1 to keep the class
-  // from deleting the array when it cleans up or reallocates memory.
-  // The class uses the actual array provided; it does not copy the data
-  // from the suppled array.
-  void SetArray(int* array, vtkIdType size, int save)
-    { this->RealSuperclass::SetArray(array, size, save); }
-  void SetArray(int* array, vtkIdType size, int save, int deleteMethod)
-    { this->RealSuperclass::SetArray(array, size, save, deleteMethod); }
-
 protected:
-  vtkIntArray(vtkIdType numComp=1);
+  vtkIntArray();
   ~vtkIntArray();
 
 private:
diff --git a/Common/Core/vtkLargeInteger.cxx b/Common/Core/vtkLargeInteger.cxx
index eb125d1..518c512 100644
--- a/Common/Core/vtkLargeInteger.cxx
+++ b/Common/Core/vtkLargeInteger.cxx
@@ -23,35 +23,16 @@
 
 const unsigned int BIT_INCREMENT = 32;
 
-int maximum(int a, int b)
+static int maximum(int a, int b)
 {
   return a > b ? a : b;
 }
 
-int minimum(int a, int b)
+static int minimum(int a, int b)
 {
   return a < b ? a : b;
 }
 
-long vtkpow(long a, long b)
-{
-  long a1 = a;
-  long b1 = b;
-  long c = 1;
-
-  while (b1 >= 1)
-    {
-    while ( ( b & 1 ) == 0)
-      {
-      b1 = b1 / 2;
-      a1 = a1 * a1;
-      }
-    b1 = b1 - 1;
-    c = c * a1;
-    }
-  return c;
-}
-
 void vtkLargeInteger::Contract()
 {
   while (this->Number[this->Sig] == 0 && this->Sig > 0)
diff --git a/Common/Core/vtkLongArray.cxx b/Common/Core/vtkLongArray.cxx
index 8bae731..ae23c65 100644
--- a/Common/Core/vtkLongArray.cxx
+++ b/Common/Core/vtkLongArray.cxx
@@ -28,7 +28,7 @@ VTK_ARRAY_ITERATOR_TEMPLATE_INSTANTIATE(long);
 vtkStandardNewMacro(vtkLongArray);
 
 //----------------------------------------------------------------------------
-vtkLongArray::vtkLongArray(vtkIdType numComp): RealSuperclass(numComp)
+vtkLongArray::vtkLongArray()
 {
 }
 
diff --git a/Common/Core/vtkLongArray.h b/Common/Core/vtkLongArray.h
index d9bebc9..35cf3c7 100644
--- a/Common/Core/vtkLongArray.h
+++ b/Common/Core/vtkLongArray.h
@@ -31,88 +31,25 @@
 #include "vtkDataArrayTemplate.h" // Real Superclass
 
 // Fake the superclass for the wrappers.
+#ifndef __WRAP__
 #define vtkDataArray vtkDataArrayTemplate<long>
+#endif
 class VTKCOMMONCORE_EXPORT vtkLongArray : public vtkDataArray
+#ifndef __WRAP__
 #undef vtkDataArray
+#endif
 {
 public:
   static vtkLongArray* New();
   vtkTypeMacro(vtkLongArray,vtkDataArray);
   void PrintSelf(ostream& os, vtkIndent indent);
 
-  // Description:
-  // Get the data type.
-  int GetDataType()
-    { return VTK_LONG; }
-
-  // Description:
-  // Copy the tuple value into a user-provided array.
-  void GetTupleValue(vtkIdType i, long* tuple)
-    { this->RealSuperclass::GetTupleValue(i, tuple); }
-
-  // Description:
-  // Set the tuple value at the ith location in the array.
-  void SetTupleValue(vtkIdType i, const long* tuple)
-    { this->RealSuperclass::SetTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple into the ith location
-  // in the array.
-  void InsertTupleValue(vtkIdType i, const long* tuple)
-    { this->RealSuperclass::InsertTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple onto the end of the array.
-  vtkIdType InsertNextTupleValue(const long* tuple)
-    { return this->RealSuperclass::InsertNextTupleValue(tuple); }
-
-  // Description:
-  // Get the data at a particular index.
-  long GetValue(vtkIdType id)
-    { return this->RealSuperclass::GetValue(id); }
-
-  // Description:
-  // Set the data at a particular index. Does not do range checking. Make sure
-  // you use the method SetNumberOfValues() before inserting data.
-  void SetValue(vtkIdType id, long value)
-    { this->RealSuperclass::SetValue(id, value); }
-
-  // Description:
-  // Specify the number of values for this object to hold. Does an
-  // allocation as well as setting the MaxId ivar. Used in conjunction with
-  // SetValue() method for fast insertion.
-  void SetNumberOfValues(vtkIdType number)
-    { this->RealSuperclass::SetNumberOfValues(number); }
-
-  // Description:
-  // Insert data at a specified position in the array.
-  void InsertValue(vtkIdType id, long f)
-    { this->RealSuperclass::InsertValue(id, f); }
-
-  // Description:
-  // Insert data at the end of the array. Return its location in the array.
-  vtkIdType InsertNextValue(long f)
-    { return this->RealSuperclass::InsertNextValue(f); }
-
-  // Description:
-  // Get the range of array values for the given component in the
-  // native data type.
-  long *GetValueRange(int comp)
-    { return this->RealSuperclass::GetValueRange(comp); }
-//BTX
-  void GetValueRange(long range[2], int comp)
-    { this->RealSuperclass::GetValueRange(range, comp); }
-//ETX
-
-  // Description:
-  // Get the range of array values for the 0th component in the
-  // native data type.
-  long *GetValueRange()
-    { return this->RealSuperclass::GetValueRange(0); }
-//BTX
-  void GetValueRange(long range[2])
-    { this->RealSuperclass::GetValueRange(range, 0); }
-//ETX
+  // This macro expands to the set of method declarations that
+  // make up the interface of vtkDataArrayTemplate, which is ignored
+  // by the wrappers.
+#ifdef __WRAP__
+  vtkCreateWrappedArrayInterface(long);
+#endif
 
   // Description:
   // Get the minimum data value in its native type.
@@ -122,33 +59,8 @@ public:
   // Get the maximum data value in its native type.
   static long GetDataTypeValueMax() { return VTK_LONG_MAX; }
 
-  // Description:
-  // Get the address of a particular data index. Make sure data is allocated
-  // for the number of items requested. Set MaxId according to the number of
-  // data values requested.
-  long* WritePointer(vtkIdType id, vtkIdType number)
-    { return this->RealSuperclass::WritePointer(id, number); }
-
-  // Description:
-  // Get the address of a particular data index. Performs no checks
-  // to verify that the memory has been allocated etc.
-  long* GetPointer(vtkIdType id)
-    { return this->RealSuperclass::GetPointer(id); }
-
-  // Description:
-  // This method lets the user specify data to be held by the array.  The
-  // array argument is a pointer to the data.  size is the size of
-  // the array supplied by the user.  Set save to 1 to keep the class
-  // from deleting the array when it cleans up or reallocates memory.
-  // The class uses the actual array provided; it does not copy the data
-  // from the suppled array.
-  void SetArray(long* array, vtkIdType size, int save)
-    { this->RealSuperclass::SetArray(array, size, save); }
-  void SetArray(long* array, vtkIdType size, int save, int deleteMethod)
-    { this->RealSuperclass::SetArray(array, size, save, deleteMethod); }
-
 protected:
-  vtkLongArray(vtkIdType numComp=1);
+  vtkLongArray();
   ~vtkLongArray();
 
 private:
diff --git a/Common/Core/vtkLongLongArray.cxx b/Common/Core/vtkLongLongArray.cxx
index c3f0074..bf99899 100644
--- a/Common/Core/vtkLongLongArray.cxx
+++ b/Common/Core/vtkLongLongArray.cxx
@@ -27,7 +27,7 @@ VTK_ARRAY_ITERATOR_TEMPLATE_INSTANTIATE(long long);
 vtkStandardNewMacro(vtkLongLongArray);
 
 //----------------------------------------------------------------------------
-vtkLongLongArray::vtkLongLongArray(vtkIdType numComp): RealSuperclass(numComp)
+vtkLongLongArray::vtkLongLongArray()
 {
 }
 
diff --git a/Common/Core/vtkLongLongArray.h b/Common/Core/vtkLongLongArray.h
index afdbe86..2f0c37a 100644
--- a/Common/Core/vtkLongLongArray.h
+++ b/Common/Core/vtkLongLongArray.h
@@ -31,89 +31,25 @@
 #include "vtkDataArrayTemplate.h" // Real Superclass
 
 // Fake the superclass for the wrappers.
+#ifndef __WRAP__
 #define vtkDataArray vtkDataArrayTemplate<long long>
+#endif
 class VTKCOMMONCORE_EXPORT vtkLongLongArray : public vtkDataArray
+#ifndef __WRAP__
 #undef vtkDataArray
+#endif
 {
 public:
   static vtkLongLongArray* New();
   vtkTypeMacro(vtkLongLongArray,vtkDataArray);
   void PrintSelf(ostream& os, vtkIndent indent);
 
-  // Description:
-  // Get the data type.
-  int GetDataType()
-    { return VTK_LONG_LONG; }
-
-  // Description:
-  // Copy the tuple value into a user-provided array.
-  void GetTupleValue(vtkIdType i, long long* tuple)
-    { this->RealSuperclass::GetTupleValue(i, tuple); }
-
-  // Description:
-  // Set the tuple value at the ith location in the array.
-  void SetTupleValue(vtkIdType i, const long long* tuple)
-    { this->RealSuperclass::SetTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple into the ith location
-  // in the array.
-  void InsertTupleValue(vtkIdType i, const long long* tuple)
-    { this->RealSuperclass::InsertTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple onto the end of the array.
-  vtkIdType InsertNextTupleValue(const long long* tuple)
-    { return this->RealSuperclass::InsertNextTupleValue(tuple); }
-
-  // Description:
-  // Get the data at a particular index.
-  long long GetValue(vtkIdType id)
-    { return this->RealSuperclass::GetValue(id); }
-
-  // Description:
-  // Set the data at a particular index. Does not do range checking. Make sure
-  // you use the method SetNumberOfValues() before inserting data.
-  void SetValue(vtkIdType id, long long value)
-    { this->RealSuperclass::SetValue(id, value); }
-
-  // Description:
-  // Specify the number of values for this object to hold. Does an
-  // allocation as well as setting the MaxId ivar. Used in conjunction with
-  // SetValue() method for fast insertion.
-  void SetNumberOfValues(vtkIdType number)
-    { this->RealSuperclass::SetNumberOfValues(number); }
-
-  // Description:
-  // Insert data at a specified position in the array.
-  void InsertValue(vtkIdType id, long long f)
-    { this->RealSuperclass::InsertValue(id, f); }
-
-  // Description:
-  // Insert data at the end of the array. Return its location in the array.
-  vtkIdType InsertNextValue(long long f)
-    { return this->RealSuperclass::InsertNextValue(f); }
-
-  // Description:
-  // Get the range of array values for the given component in the
-  // native data type.
-  long long *GetValueRange(int comp)
-    { return this->RealSuperclass::GetValueRange(comp); }
-//BTX
-  void GetValueRange(long long range[2], int comp)
-    { this->RealSuperclass::GetValueRange(range, comp); }
-//ETX
-
-  // Description:
-  // Get the range of array values for the 0th component in the
-  // native data type.
-  long long *GetValueRange()
-    { return this->RealSuperclass::GetValueRange(0); }
-//BTX
-  void GetValueRange(long long range[2])
-    { this->RealSuperclass::GetValueRange(range, 0); }
-//ETX
-
+  // This macro expands to the set of method declarations that
+  // make up the interface of vtkDataArrayTemplate, which is ignored
+  // by the wrappers.
+#ifdef __WRAP__
+  vtkCreateWrappedArrayInterface(long long);
+#endif
   // Description:
   // Get the minimum data value in its native type.
   static long long GetDataTypeValueMin() { return VTK_LONG_LONG_MIN; }
@@ -122,33 +58,8 @@ public:
   // Get the maximum data value in its native type.
   static long long GetDataTypeValueMax() { return VTK_LONG_LONG_MAX; }
 
-  // Description:
-  // Get the address of a particular data index. Make sure data is allocated
-  // for the number of items requested. Set MaxId according to the number of
-  // data values requested.
-  long long* WritePointer(vtkIdType id, vtkIdType number)
-    { return this->RealSuperclass::WritePointer(id, number); }
-
-  // Description:
-  // Get the address of a particular data index. Performs no checks
-  // to verify that the memory has been allocated etc.
-  long long* GetPointer(vtkIdType id)
-    { return this->RealSuperclass::GetPointer(id); }
-
-  // Description:
-  // This method lets the user specify data to be held by the array.  The
-  // array argument is a pointer to the data.  size is the size of
-  // the array supplied by the user.  Set save to 1 to keep the class
-  // from deleting the array when it cleans up or reallocates memory.
-  // The class uses the actual array provided; it does not copy the data
-  // from the suppled array.
-  void SetArray(long long* array, vtkIdType size, int save)
-    { this->RealSuperclass::SetArray(array, size, save); }
-  void SetArray(long long* array, vtkIdType size, int save, int deleteMethod)
-    { this->RealSuperclass::SetArray(array, size, save, deleteMethod); }
-
 protected:
-  vtkLongLongArray(vtkIdType numComp=1);
+  vtkLongLongArray();
   ~vtkLongLongArray();
 
 private:
diff --git a/Common/Core/vtkLookupTable.cxx b/Common/Core/vtkLookupTable.cxx
index b9f517c..e96837c 100644
--- a/Common/Core/vtkLookupTable.cxx
+++ b/Common/Core/vtkLookupTable.cxx
@@ -22,7 +22,7 @@
 #include "vtkStringArray.h"
 #include "vtkVariantArray.h"
 
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkLookupTable);
 
diff --git a/Common/Core/vtkMappedDataArray.h b/Common/Core/vtkMappedDataArray.h
new file mode 100644
index 0000000..c1c656b
--- /dev/null
+++ b/Common/Core/vtkMappedDataArray.h
@@ -0,0 +1,158 @@
+/*==============================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkMappedDataArray.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+==============================================================================*/
+// .NAME vtkMappedDataArray - Map non-contiguous data structures into the
+// vtkDataArray API.
+//
+// .SECTION Description
+// vtkMappedDataArray is a superclass for vtkDataArrays that do not use
+// the standard memory layout, and allows VTK to interface with
+// simulation codes for in-situ analysis without repacking simulation data.
+//
+// vtkMappedDataArrayNewInstanceMacro is used by subclasses to implement
+// NewInstanceInternal such that a non-mapped vtkDataArray is returned by
+// NewInstance(). This prevents the mapped array type from propogating
+// through the pipeline.
+//
+// .SECTION Notes
+// Subclasses that hold vtkIdType elements must also
+// reimplement `int GetDataType()` (see Caveat in vtkTypedDataArray).
+
+#ifndef __vtkMappedDataArray_h
+#define __vtkMappedDataArray_h
+
+#include "vtkTypedDataArray.h"
+
+#include "vtkTypeTemplate.h" // for vtkTypeTemplate
+
+template <class Scalar>
+class vtkMappedDataArray : public vtkTypeTemplate<vtkMappedDataArray<Scalar>,
+                                                  vtkTypedDataArray<Scalar> >
+{
+public:
+  typedef vtkTypedDataArray<Scalar> Superclass;
+  typedef typename Superclass::ValueType ValueType;
+
+  // Description:
+  // Perform a fast, safe cast from a vtkAbstractArray to a vtkMappedDataArray.
+  // This method checks if:
+  // - source->GetArrayType() is appropriate, and
+  // - source->GetDataType() matches the Scalar template argument
+  // if these conditions are met, the method performs a static_cast to return
+  // source as a vtkMappedDataArray pointer. Otherwise, NULL is returned.
+  static vtkMappedDataArray<Scalar>* FastDownCast(vtkAbstractArray *source);
+
+  void PrintSelf(ostream &os, vtkIndent indent);
+
+  // vtkAbstractArray virtual method that must be reimplemented.
+  void DeepCopy(vtkAbstractArray *aa) = 0;
+  vtkVariant GetVariantValue(vtkIdType idx) = 0;
+  void SetVariantValue(vtkIdType idx, vtkVariant value) = 0;
+  void GetTuples(vtkIdList *ptIds, vtkAbstractArray *output) = 0;
+  void GetTuples(vtkIdType p1, vtkIdType p2, vtkAbstractArray *output) = 0;
+  void InterpolateTuple(vtkIdType i, vtkIdList *ptIndices,
+                        vtkAbstractArray *source, double *weights) = 0;
+  void InterpolateTuple(vtkIdType i, vtkIdType id1,
+                        vtkAbstractArray* source1, vtkIdType id2,
+                        vtkAbstractArray* source2, double t) = 0;
+
+  // vtkDataArray virtual method that must be reimplemented.
+  void DeepCopy(vtkDataArray *da) = 0;
+
+  // Description:
+  // Print an error and create an internal, long-lived temporary array. This
+  // method should not be used on vtkMappedDataArray subclasses. See
+  // vtkTypedDataArrayIterator and/or vtkDataArrayIteratorMacro instead.
+  void * GetVoidPointer(vtkIdType id);
+
+  // Description:
+  // Copy the internal data to the void pointer. The pointer is cast to this
+  // array's Scalar type and vtkTypedDataArrayIterator is used to populate
+  // the input array.
+  void ExportToVoidPointer(void *ptr);
+
+  // Description:
+  // Read the data from the internal temporary array (created by GetVoidPointer)
+  // back into the mapped array. If GetVoidPointer has not been called (and the
+  // internal array therefore does not exist), print an error and return. The
+  // default implementation uses vtkTypedDataArrayIterator to extract the mapped
+  // data.
+  void DataChanged();
+
+  // Description:
+  // This method doesn't make sense for mapped data array. Prints an error and
+  // returns.
+  void SetVoidArray(void *, vtkIdType, int);
+
+  // Description:
+  // Not implemented. Print error and return NULL.
+  void * WriteVoidPointer(vtkIdType /*id*/, vtkIdType /*number*/)
+  {
+    vtkErrorMacro(<<"WriteVoidPointer: Method not implemented.");
+    return NULL;
+  }
+
+  // Description:
+  // Invalidate the internal temporary array and call superclass method.
+  void Modified();
+
+  // vtkAbstractArray override:
+  bool HasStandardMemoryLayout() { return false; }
+
+protected:
+  vtkMappedDataArray();
+  ~vtkMappedDataArray();
+
+  virtual int GetArrayType()
+  {
+    return vtkAbstractArray::MappedDataArray;
+  }
+
+private:
+  vtkMappedDataArray(const vtkMappedDataArray &); // Not implemented.
+  void operator=(const vtkMappedDataArray &);   // Not implemented.
+
+  // Description: Temporary internal array used as fall back storage for
+  // GetVoidPointer.
+  ValueType *TemporaryScalarPointer;
+  size_t TemporaryScalarPointerSize;
+};
+
+#include "vtkMappedDataArray.txx"
+
+// Adds an implementation of NewInstanceInternal() that returns a standard
+// (unmapped) VTK array, if possible. Use this with classes that derive from
+// vtkTypeTemplate, otherwise, use vtkMappedDataArrayTypeMacro.
+#define vtkMappedDataArrayNewInstanceMacro(thisClass) \
+  protected: \
+  vtkObjectBase *NewInstanceInternal() const \
+  { \
+    if (vtkDataArray *da = \
+        vtkDataArray::CreateDataArray(thisClass::VTK_DATA_TYPE)) \
+      { \
+      return da; \
+      } \
+    return thisClass::New(); \
+  } \
+  public:
+
+// Same as vtkTypeMacro, but adds an implementation of NewInstanceInternal()
+// that returns a standard (unmapped) VTK array, if possible.
+#define vtkMappedDataArrayTypeMacro(thisClass, superClass) \
+  vtkAbstractTypeMacroWithNewInstanceType(thisClass, superClass, vtkDataArray) \
+  vtkMappedDataArrayNewInstanceMacro(thisClass)
+
+#endif //__vtkMappedDataArray_h
+
+// VTK-HeaderTest-Exclude: vtkMappedDataArray.h
diff --git a/Common/Core/vtkMappedDataArray.txx b/Common/Core/vtkMappedDataArray.txx
new file mode 100644
index 0000000..8e284ce
--- /dev/null
+++ b/Common/Core/vtkMappedDataArray.txx
@@ -0,0 +1,169 @@
+/*==============================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkMappedDataArray.txx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+==============================================================================*/
+
+#ifndef __vtkMappedDataArray_txx
+#define __vtkMappedDataArray_txx
+
+#include "vtkMappedDataArray.h"
+
+#include "vtkVariant.h" // for vtkVariant
+
+//------------------------------------------------------------------------------
+template<class Scalar>
+vtkMappedDataArray<Scalar>::vtkMappedDataArray()
+{
+  this->TemporaryScalarPointer = NULL;
+  this->TemporaryScalarPointerSize = 0;
+}
+
+//------------------------------------------------------------------------------
+template<class Scalar>
+vtkMappedDataArray<Scalar>::~vtkMappedDataArray()
+{
+  if (this->TemporaryScalarPointer)
+  {
+    delete [] this->TemporaryScalarPointer;
+    this->TemporaryScalarPointer = NULL;
+    this->TemporaryScalarPointerSize = 0;
+  }
+}
+
+//------------------------------------------------------------------------------
+template<class Scalar>
+void * vtkMappedDataArray<Scalar>::GetVoidPointer(vtkIdType id)
+{
+  vtkWarningMacro(<<"GetVoidPointer called. This is very expensive for "
+                  "vtkMappedDataArray subclasses, since the scalar array must "
+                  "be generated for each call. Consider using "
+                  "a vtkTypedDataArrayIterator instead.");
+  size_t numValues = this->NumberOfComponents * this->GetNumberOfTuples();
+
+  if (this->TemporaryScalarPointer &&
+      this->TemporaryScalarPointerSize != numValues)
+    {
+    delete [] this->TemporaryScalarPointer;
+    this->TemporaryScalarPointer = NULL;
+    this->TemporaryScalarPointerSize = 0;
+    }
+
+  if (!this->TemporaryScalarPointer)
+    {
+    this->TemporaryScalarPointer = new Scalar[numValues];
+    this->TemporaryScalarPointerSize = numValues;
+    }
+
+  this->ExportToVoidPointer(static_cast<void*>(this->TemporaryScalarPointer));
+
+  return static_cast<void*>(this->TemporaryScalarPointer + id);
+}
+
+//------------------------------------------------------------------------------
+template<class Scalar>
+void vtkMappedDataArray<Scalar>::ExportToVoidPointer(void *voidPtr)
+{
+  vtkTypedDataArrayIterator<Scalar> begin(this, 0);
+  vtkTypedDataArrayIterator<Scalar> end =
+      begin + (this->NumberOfComponents * this->GetNumberOfTuples());
+
+  Scalar *ptr = static_cast<Scalar*>(voidPtr);
+
+  while (begin != end)
+    {
+    *ptr = *begin;
+    ++begin;
+    ++ptr;
+    }
+}
+
+//------------------------------------------------------------------------------
+template<class Scalar>
+void vtkMappedDataArray<Scalar>::SetVoidArray(void *, vtkIdType, int)
+{
+  vtkErrorMacro(<<"SetVoidArray not supported for vtkMappedDataArray "
+                "subclasses.");
+  return;
+}
+
+//------------------------------------------------------------------------------
+template<class Scalar>
+void vtkMappedDataArray<Scalar>::DataChanged()
+{
+  if (!this->TemporaryScalarPointer)
+    {
+    vtkWarningMacro(<<"DataChanged called, but no scalar pointer available.");
+    return;
+    }
+
+  vtkTypedDataArrayIterator<Scalar> begin(this, 0);
+  vtkTypedDataArrayIterator<Scalar> end =
+      begin + this->TemporaryScalarPointerSize;
+
+  Scalar *ptr = this->TemporaryScalarPointer;
+
+  while (begin != end)
+    {
+    *begin = *ptr;
+    ++begin;
+    ++ptr;
+    }
+
+  this->Modified();
+}
+
+//------------------------------------------------------------------------------
+template<class Scalar> inline vtkMappedDataArray<Scalar>*
+vtkMappedDataArray<Scalar>::FastDownCast(vtkAbstractArray *source)
+{
+  switch (source->GetArrayType())
+    {
+    case vtkAbstractArray::MappedDataArray:
+      if (source->GetDataType() == vtkTypeTraits<Scalar>::VTK_TYPE_ID)
+        {
+        return static_cast<vtkMappedDataArray<Scalar>*>(source);
+        }
+    default:
+      return NULL;
+    }
+}
+
+//------------------------------------------------------------------------------
+template<class Scalar>
+void vtkMappedDataArray<Scalar>::PrintSelf(ostream &os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+
+  os << indent << "TemporaryScalarPointer: "
+     << this->TemporaryScalarPointer << "\n";
+  os << indent << "TemporaryScalarPointerSize: "
+     << this->TemporaryScalarPointerSize << "\n";
+}
+
+//------------------------------------------------------------------------------
+template<class Scalar>
+void vtkMappedDataArray<Scalar>::Modified()
+{
+  this->vtkTypedDataArray<Scalar>::Modified();
+
+  if (this->TemporaryScalarPointer == NULL)
+    {
+    return;
+    }
+
+  delete [] this->TemporaryScalarPointer;
+  this->TemporaryScalarPointer = NULL;
+  this->TemporaryScalarPointerSize = 0;
+}
+
+#endif //__vtkMappedDataArray_txx
diff --git a/Common/Core/vtkMath.cxx b/Common/Core/vtkMath.cxx
index c4af12f..22b1a57 100644
--- a/Common/Core/vtkMath.cxx
+++ b/Common/Core/vtkMath.cxx
@@ -25,7 +25,7 @@
 #include "vtkMath.h"
 #include "vtkObjectFactory.h"
 #include "vtkDataArray.h"
-#include <assert.h>
+#include <cassert>
 #include <cmath>
 
 #include "vtkBoxMuellerRandomSequence.h"
@@ -72,12 +72,7 @@ union vtkIEEE754Bits {
   double d;
 };
 
-#if defined(_MSC_VER)
-// MSVC70 is broken; it doesn't accept the "LL" suffix. MSVC6 and MSVC71 do.
-static union vtkIEEE754Bits vtkMathNanBits    = { 0x7FF8000000000000i64 };
-static union vtkIEEE754Bits vtkMathInfBits    = { 0x7FF0000000000000i64 };
-static union vtkIEEE754Bits vtkMathNegInfBits = { 0xFFF0000000000000i64 };
-#elif defined(__BORLANDC__)
+#if defined(__BORLANDC__)
 // Borland C++ union initializers are broken.
 // Use an otherwise-discouraged aliasing trick:
 static vtkTypeUInt64 vtkMathNanBits    = 0x7FF8000000000000i64;
@@ -91,14 +86,15 @@ static union vtkIEEE754Bits vtkMathNegInfBits = { 0xFFF0000000000000LL };
 
 #endif //VTK_HAS_STD_NUMERIC_LIMITS
 
-#if defined(_MSC_VER) || defined(__BORLANDC__)
+#if defined(VTK_NON_FINITE_CAUSES_EXCEPTIONS)
+#if defined(__BORLANDC__)
 const vtkTypeInt64 vtkMathDoubleExponent = 0x7FF0000000000000i64;
 const vtkTypeInt64 vtkMathDoubleMantissa = 0x000FFFFFFFFFFFFFi64;
 #else
 const vtkTypeInt64 vtkMathDoubleExponent = 0x7FF0000000000000LL;
 const vtkTypeInt64 vtkMathDoubleMantissa = 0x000FFFFFFFFFFFFFLL;
 #endif
-
+#endif
 
 //
 // Some useful macros and functions
@@ -773,7 +769,7 @@ int vtkJacobiN(T **a, int n, T *w, T **v)
     tmp = w[k];
     for (i=j+1; i<n; i++)                // boundary incorrect, shifted already
       {
-      if (w[i] >= tmp)                   // why exchage if same?
+      if (w[i] >= tmp)                   // why exchange if same?
         {
         k = i;
         tmp = w[k];
diff --git a/Common/Core/vtkMath.h b/Common/Core/vtkMath.h
index 36893f9..595582d 100644
--- a/Common/Core/vtkMath.h
+++ b/Common/Core/vtkMath.h
@@ -41,7 +41,7 @@
 
 #include "vtkMathConfigure.h" // For <cmath> and VTK_HAS_ISNAN etc.
 
-#include <assert.h> // assert() in inline implementations.
+#include <cassert> // assert() in inline implementations.
 
 #ifndef DBL_MIN
 #  define VTK_DBL_MIN    2.2250738585072014e-308
@@ -657,11 +657,29 @@ public:
                           int *tmp1Size, double *tmp2Size);
 
   // Description:
-  // Factor linear equations Ax = b using LU decomposition A = LU where L is
-  // lower triangular matrix and U is upper triangular matrix. Input is
-  // square matrix A, integer array of pivot indices index[0->n-1], and size
-  // of square matrix n. Output factorization LU is in matrix A. If error is
-  // found, method returns 0.
+  // Factor linear equations Ax = b using LU decomposition into the form
+  // A = LU where L is a unit lower triangular matrix and U is upper triangular
+  // matrix.
+  // The input is a square matrix A, an integer array of pivot indices index[0->n-1],
+  // and the size, n, of the square matrix.
+  // The output is provided by overwriting the input A with a matrix of the same size as
+  // A containing all of the information about L and U. If the output matrix is
+  // \f$ A* = \left( \begin{array}{cc}
+  // a & b \\ %
+  // c & d \end{array} \right)\f$
+  // then L and U can be obtained as:
+  // \f$ L = \left( \begin{array}{cc}
+  // 1 & 0 \\ %
+  // c & 1 \end{array} \right)\f$
+  // \f$ U = \left( \begin{array}{cc}
+  // a & b \\ %
+  // 0 & d \end{array} \right)\f$
+  //
+  // That is, the diagonal of the resulting A* is the diagonal of U. The upper right
+  // triangle of A is the upper right triangle of U. The lower left triangle of A is
+  // the lower left triangle of L (and since L is unit lower triangular, the diagonal
+  // of L is all 1's).
+  // If an error is found, the function returns 0.
   static int LUFactorLinearSystem(double **A, int *index, int size);
 
   // Description:
@@ -938,8 +956,8 @@ public:
   static bool IsFinite(double x);
 
 protected:
-  vtkMath() {};
-  ~vtkMath() {};
+  vtkMath() {}
+  ~vtkMath() {}
 
   static vtkMathInternal Internal;
 private:
@@ -1245,8 +1263,11 @@ inline double vtkMath::ClampAndNormalizeValue(double value,
 #define VTK_MATH_ISINF_IS_INLINE
 inline int vtkMath::IsInf(double x)
 {
-  using namespace std; // Could be isinf() or std::isinf()
+#if defined(VTK_HAS_STD_ISINF)
+  return std::isinf(x);
+#else
   return (isinf(x) != 0); // Force conversion to bool
+#endif
 }
 #endif
 
@@ -1255,8 +1276,11 @@ inline int vtkMath::IsInf(double x)
 #define VTK_MATH_ISNAN_IS_INLINE
 inline int vtkMath::IsNan(double x)
 {
-  using namespace std; // Could be isnan() or std::isnan()
+#if defined(VTK_HAS_STD_ISNAN)
+  return std::isnan(x);
+#else
   return (isnan(x) != 0); // Force conversion to bool
+#endif
 }
 #endif
 
@@ -1265,8 +1289,9 @@ inline int vtkMath::IsNan(double x)
 #define VTK_MATH_ISFINITE_IS_INLINE
 inline bool vtkMath::IsFinite(double x)
 {
-#if defined(VTK_HAS_ISFINITE) || defined(VTK_HAS_STD_ISFINITE)
-  using namespace std; // Could be isfinite() or std::isfinite()
+#if defined(VTK_HAS_STD_ISFINITE)
+  return std::isfinite(x);
+#elif defined(VTK_HAS_ISFINITE)
   return (isfinite(x) != 0); // Force conversion to bool
 #else
   return (finite(x) != 0); // Force conversion to bool
diff --git a/Common/Core/vtkMathConfigure.h.in b/Common/Core/vtkMathConfigure.h.in
index 7a47eab..7a9432e 100644
--- a/Common/Core/vtkMathConfigure.h.in
+++ b/Common/Core/vtkMathConfigure.h.in
@@ -58,6 +58,29 @@
 # define VTK_HAS_FINITE 1
 #endif
 
+// The CUDA compiler(nvcc) is a secondary compiler that is used beside
+// your host compiler. While your host compiler can support std::isnan and
+// other functions, the CUDA compiler doesn't. Because the CUDA compiler is
+// given both host and device code to parse it will then fail when it sees
+// std::isnan because it is in the vtkMath header and not the
+// implementation file. To get around this issue we check __CUDACC__ which
+// is only set when we are compiling with the CUDA compiler
+#ifdef __CUDACC__
+
+#if defined(VTK_HAS_STD_ISINF)
+#undef VTK_HAS_STD_ISINF
+#endif
+
+#if defined(VTK_HAS_STD_ISNAN)
+#undef VTK_HAS_STD_ISNAN
+#endif
+
+#if defined(VTK_HAS_STD_ISFINITE)
+#undef VTK_HAS_STD_ISFINITE
+#endif
+
+#endif
+
 // We pretty much assume that all compilers are going to provide
 // std::numeric_limits methods in the limits header.  On of the Borland
 // dashboard compilers failed to link in the methods, so we have a special
diff --git a/Common/Core/vtkMathUtilities.h b/Common/Core/vtkMathUtilities.h
index 29de96a..4cca9a8 100644
--- a/Common/Core/vtkMathUtilities.h
+++ b/Common/Core/vtkMathUtilities.h
@@ -46,6 +46,45 @@ bool FuzzyCompare(A a, A b, A epsilon)
   return fabs(a - b) < epsilon;
 }
 
+// Description:
+// Performs safe division that catches overflow and underflow.
+template<class A>
+A SafeDivision(A a, A b)
+{
+  // Avoid overflow
+  if( (b < static_cast<A>(1)) && (a > b*std::numeric_limits<A>::max()) )
+    {
+    return std::numeric_limits<A>::max();
+    }
+
+  // Avoid underflow
+  if( (a == static_cast<A>(0)) ||
+      ((b > static_cast<A>(1)) && (a < b*std::numeric_limits<A>::min())) )
+    {
+    return static_cast<A>(0);
+    }
+
+  // safe to do the division
+  return( a/b );
+}
+
+// Description:
+// A slightly different fuzzy comparator that checks if two values are
+// "nearly" equal based on Knuth, "The Art of Computer Programming (vol II)"
+template<class A>
+bool NearlyEqual(A a, A b, A tol=std::numeric_limits<A>::epsilon())
+{
+  A absdiff = fabs(a-b);
+  A d1  = vtkMathUtilities::SafeDivision<A>(absdiff,fabs(a));
+  A d2  = vtkMathUtilities::SafeDivision<A>(absdiff,fabs(b));
+
+  if( (d1 <= tol) || (d2 <= tol) )
+    {
+    return true;
+    }
+  return false;
+}
+
 } // End vtkMathUtilities namespace.
 
 #endif // __vtkMathUtilities_h
diff --git a/Common/Core/vtkMutexLock.h b/Common/Core/vtkMutexLock.h
index 61efa23..1dcbe1b 100644
--- a/Common/Core/vtkMutexLock.h
+++ b/Common/Core/vtkMutexLock.h
@@ -72,6 +72,10 @@ public:
 protected:
   friend class vtkSimpleConditionVariable;
   vtkMutexType   MutexLock;
+
+private:
+  vtkSimpleMutexLock(const vtkSimpleMutexLock& other); // no copy constructor
+  vtkSimpleMutexLock& operator=(const vtkSimpleMutexLock& rhs); // no copy assignment
 };
 
 //ETX
@@ -98,7 +102,7 @@ protected:
   //ETX
 
   vtkSimpleMutexLock   SimpleMutexLock;
-  vtkMutexLock() {};
+  vtkMutexLock() {}
 private:
   vtkMutexLock(const vtkMutexLock&);  // Not implemented.
   void operator=(const vtkMutexLock&);  // Not implemented.
diff --git a/Common/Core/vtkNew.h b/Common/Core/vtkNew.h
index 976dd1a..b0ca7bd 100644
--- a/Common/Core/vtkNew.h
+++ b/Common/Core/vtkNew.h
@@ -97,6 +97,10 @@ public:
     {
     return this->Object;
     }
+  T* Get() const
+    {
+    return this->Object;
+    }
 
 private:
   vtkNew(vtkNew<T> const&); // Not implemented.
diff --git a/Common/Core/vtkObjectBase.cxx b/Common/Core/vtkObjectBase.cxx
index 0b5a245..3754ec1 100644
--- a/Common/Core/vtkObjectBase.cxx
+++ b/Common/Core/vtkObjectBase.cxx
@@ -198,7 +198,7 @@ void vtkObjectBase::RegisterInternal(vtkObjectBase*, int check)
   if(!(check &&
        vtkObjectBaseToGarbageCollectorFriendship::TakeReference(this)))
     {
-    ++this->ReferenceCount;
+    this->ReferenceCount++;
     }
 }
 
diff --git a/Common/Core/vtkObjectBase.h b/Common/Core/vtkObjectBase.h
index 83659fe..fdd29ad 100644
--- a/Common/Core/vtkObjectBase.h
+++ b/Common/Core/vtkObjectBase.h
@@ -41,6 +41,7 @@
 #define __vtkObjectBase_h
 
 #include "vtkCommonCoreModule.h" // For export macro
+#include "vtkAtomicInt.h"
 #include "vtkIndent.h"
 #include "vtkSystemIncludes.h"
 
@@ -143,7 +144,9 @@ public:
   // Description:
   // Return the current reference count of this object.
   int  GetReferenceCount()
-    {return this->ReferenceCount;}
+  {
+    return this->ReferenceCount;
+  }
 
   // Description:
   // Sets the reference count. (This is very dangerous, use with care.)
@@ -159,7 +162,7 @@ protected:
 
   virtual void CollectRevisions(ostream&) {} // Legacy; do not use!
 
-  int ReferenceCount;
+  vtkAtomicInt<vtkTypeInt32> ReferenceCount;
   vtkWeakPointerBase **WeakPointers;
 
   // Internal Register/UnRegister implementation that accounts for
diff --git a/Common/Core/vtkObjectFactory.h b/Common/Core/vtkObjectFactory.h
index af472f1..e51a153 100644
--- a/Common/Core/vtkObjectFactory.h
+++ b/Common/Core/vtkObjectFactory.h
@@ -309,8 +309,9 @@ vtkObjectFactory* vtkLoad()                     \
   VTK_OBJECT_FACTORY_NEW_BODY(thisClass)
 #elif defined(VTK_DEBUG_LEAKS)
 # define VTK_STANDARD_NEW_BODY(thisClass) \
-  vtkObjectFactory::ConstructInstance(#thisClass); \
-  return new thisClass;
+  thisClass *result = new thisClass; \
+  vtkObjectFactory::ConstructInstance(result->GetClassName()); \
+  return result;
 #else
 # define VTK_STANDARD_NEW_BODY(thisClass) \
   return new thisClass;
diff --git a/Common/Core/vtkObjectFactoryCollection.h b/Common/Core/vtkObjectFactoryCollection.h
index c74afb1..6ffb725 100644
--- a/Common/Core/vtkObjectFactoryCollection.h
+++ b/Common/Core/vtkObjectFactoryCollection.h
@@ -57,8 +57,8 @@ public:
   //ETX
 
 protected:
-  vtkObjectFactoryCollection() {};
-  ~vtkObjectFactoryCollection() {};
+  vtkObjectFactoryCollection() {}
+  ~vtkObjectFactoryCollection() {}
 
 
 private:
diff --git a/Common/Core/vtkOutputWindow.cxx b/Common/Core/vtkOutputWindow.cxx
index 8cbb531..822799d 100644
--- a/Common/Core/vtkOutputWindow.cxx
+++ b/Common/Core/vtkOutputWindow.cxx
@@ -14,7 +14,7 @@
 =========================================================================*/
 #include "vtkOutputWindow.h"
 #include "vtkToolkits.h"
-#if defined( _WIN32 ) && !defined( VTK_USE_X ) && defined( VTK_USE_DISPLAY )
+#if defined( _WIN32 ) && !defined( VTK_USE_X )
 #include "vtkWin32OutputWindow.h"
 #endif
 #include "vtkCommand.h"
@@ -153,7 +153,7 @@ vtkOutputWindow* vtkOutputWindow::GetInstance()
       // if the factory failed to create the object,
       // then destroy it now, as vtkDebugLeaks::ConstructClass was called
       // with "vtkOutputWindow", and not the real name of the class
-#if defined( _WIN32 ) && !defined( VTK_USE_X ) && defined( VTK_USE_DISPLAY )
+#if defined( _WIN32 ) && !defined( VTK_USE_X )
 #ifdef VTK_DEBUG_LEAKS
       vtkDebugLeaks::DestructClass("vtkOutputWindow");
 #endif
diff --git a/Common/Core/vtkOutputWindow.h b/Common/Core/vtkOutputWindow.h
index a92d581..34a9a71 100644
--- a/Common/Core/vtkOutputWindow.h
+++ b/Common/Core/vtkOutputWindow.h
@@ -31,6 +31,10 @@ class VTKCOMMONCORE_EXPORT vtkOutputWindowCleanup
 public:
   vtkOutputWindowCleanup();
   ~vtkOutputWindowCleanup();
+
+private:
+  vtkOutputWindowCleanup(const vtkOutputWindowCleanup& other); // no copy constructor
+  vtkOutputWindowCleanup& operator=(const vtkOutputWindowCleanup& rhs); // no copy assignment
 };
 //ETX
 
diff --git a/Common/Core/vtkOverrideInformationCollection.h b/Common/Core/vtkOverrideInformationCollection.h
index 3547006..cd13a66 100644
--- a/Common/Core/vtkOverrideInformationCollection.h
+++ b/Common/Core/vtkOverrideInformationCollection.h
@@ -52,8 +52,8 @@ public:
   //ETX
 
 protected:
-  vtkOverrideInformationCollection() {};
-  ~vtkOverrideInformationCollection() {};
+  vtkOverrideInformationCollection() {}
+  ~vtkOverrideInformationCollection() {}
 
 
 private:
diff --git a/Common/Core/vtkPoints.cxx b/Common/Core/vtkPoints.cxx
index 2c2f9d6..6041493 100644
--- a/Common/Core/vtkPoints.cxx
+++ b/Common/Core/vtkPoints.cxx
@@ -77,35 +77,66 @@ vtkPoints::~vtkPoints()
 }
 
 // Given a list of pt ids, return an array of points.
-void vtkPoints::GetPoints(vtkIdList *ptIds, vtkPoints *fp)
+void vtkPoints::GetPoints(vtkIdList *ptIds, vtkPoints *outPoints)
 {
-  vtkIdType num = ptIds->GetNumberOfIds();
+  outPoints->Data->SetNumberOfTuples(ptIds->GetNumberOfIds());
+  this->Data->GetTuples(ptIds, outPoints->Data);
+}
 
-  for (vtkIdType i=0; i < num; i++)
-    {
-    fp->InsertPoint(i, this->GetPoint(ptIds->GetId(i)));
-    }
+namespace
+{
+  template <class T>
+  void InternalComputeBounds(vtkDataArray* array,
+                             double* bounds,
+                             T*)
+  {
+    bounds[0] = bounds[2] = bounds[4] =  VTK_DOUBLE_MAX;
+    bounds[1] = bounds[3] = bounds[5] = -VTK_DOUBLE_MAX;
+
+    if (vtkTypedDataArray<T>* tarray = vtkTypedDataArray<T>::FastDownCast(array))
+      {
+      T x[3];
+      vtkIdType numPts = tarray->GetNumberOfTuples();
+      for (vtkIdType i=0; i<numPts; i++)
+        {
+        tarray->GetTupleValue(i, x);
+        bounds[0] = x[0] < bounds[0] ? x[0] : bounds[0];
+        bounds[1] = x[0] > bounds[1] ? x[0] : bounds[1];
+        bounds[2] = x[1] < bounds[2] ? x[1] : bounds[2];
+        bounds[3] = x[1] > bounds[3] ? x[1] : bounds[3];
+        bounds[4] = x[2] < bounds[4] ? x[2] : bounds[4];
+        bounds[5] = x[2] > bounds[5] ? x[2] : bounds[5];
+        }
+      }
+    else
+      {
+      double x[3];
+      vtkIdType numPts = array->GetNumberOfTuples();
+      for (vtkIdType i=0; i<numPts; i++)
+        {
+        array->GetTuple(i, x);
+        bounds[0] = x[0] < bounds[0] ? x[0] : bounds[0];
+        bounds[1] = x[0] > bounds[1] ? x[0] : bounds[1];
+        bounds[2] = x[1] < bounds[2] ? x[1] : bounds[2];
+        bounds[3] = x[1] > bounds[3] ? x[1] : bounds[3];
+        bounds[4] = x[2] < bounds[4] ? x[2] : bounds[4];
+        bounds[5] = x[2] > bounds[5] ? x[2] : bounds[5];
+        }
+      }
+  }
 }
 
 // Determine (xmin,xmax, ymin,ymax, zmin,zmax) bounds of points.
 void vtkPoints::ComputeBounds()
 {
-  vtkIdType i;
-  double *x;
-
   if ( this->GetMTime() > this->ComputeTime )
     {
-    this->Bounds[0] = this->Bounds[2] = this->Bounds[4] =  VTK_DOUBLE_MAX;
-    this->Bounds[1] = this->Bounds[3] = this->Bounds[5] = -VTK_DOUBLE_MAX;
-    for (i=0; i<this->GetNumberOfPoints(); i++)
+    switch (this->Data->GetDataType())
       {
-      x = this->GetPoint(i);
-      this->Bounds[0] = x[0] < this->Bounds[0] ? x[0] : this->Bounds[0];
-      this->Bounds[1] = x[0] > this->Bounds[1] ? x[0] : this->Bounds[1];
-      this->Bounds[2] = x[1] < this->Bounds[2] ? x[1] : this->Bounds[2];
-      this->Bounds[3] = x[1] > this->Bounds[3] ? x[1] : this->Bounds[3];
-      this->Bounds[4] = x[2] < this->Bounds[4] ? x[2] : this->Bounds[4];
-      this->Bounds[5] = x[2] > this->Bounds[5] ? x[2] : this->Bounds[5];
+      vtkTemplateMacro(InternalComputeBounds(
+                         this->Data,
+                         this->Bounds,
+                         (VTK_TT*)0));
       }
 
     this->ComputeTime.Modified();
@@ -219,23 +250,18 @@ void vtkPoints::PrintSelf(ostream& os, vtkIndent indent)
 {
   this->Superclass::PrintSelf(os,indent);
 
-  double *bounds;
-
   os << indent << "Data: " << this->Data << "\n";
-  if ( this->Data )
+  if ( this->Data->GetName() )
     {
-    if ( this->Data->GetName() )
-      {
-      os << indent << "Data Array Name: " << this->Data->GetName() << "\n";
-      }
-    else
-      {
-      os << indent << "Data Array Name: (none)\n";
-      }
+    os << indent << "Data Array Name: " << this->Data->GetName() << "\n";
+    }
+  else
+    {
+    os << indent << "Data Array Name: (none)\n";
     }
 
   os << indent << "Number Of Points: " << this->GetNumberOfPoints() << "\n";
-  bounds = this->GetBounds();
+  double *bounds = this->GetBounds();
   os << indent << "Bounds: \n";
   os << indent << "  Xmin,Xmax: (" << bounds[0] << ", " << bounds[1] << ")\n";
   os << indent << "  Ymin,Ymax: (" << bounds[2] << ", " << bounds[3] << ")\n";
diff --git a/Common/Core/vtkPoints.h b/Common/Core/vtkPoints.h
index 493d486..8e5ed06 100644
--- a/Common/Core/vtkPoints.h
+++ b/Common/Core/vtkPoints.h
@@ -154,6 +154,11 @@ public:
   void SetNumberOfPoints(vtkIdType number);
 
   // Description:
+  // Resize the internal array while conserving the data.  Returns 1 if
+  // resizing succeeded and 0 otherwise.
+  int Resize(vtkIdType numPoints);
+
+  // Description:
   // Given a list of pt ids, return an array of points.
   void GetPoints(vtkIdList *ptId, vtkPoints *fp);
 
@@ -188,6 +193,12 @@ inline void vtkPoints::SetNumberOfPoints(vtkIdType number)
   this->Data->SetNumberOfTuples(number);
 }
 
+inline int vtkPoints::Resize(vtkIdType numPoints)
+{
+  this->Data->SetNumberOfComponents(3);
+  return this->Data->Resize(numPoints);
+}
+
 inline void vtkPoints::SetPoint(vtkIdType id, double x, double y, double z)
 {
   double p[3];
diff --git a/Common/Core/vtkPoints2D.cxx b/Common/Core/vtkPoints2D.cxx
index b89f417..bb4d93e 100644
--- a/Common/Core/vtkPoints2D.cxx
+++ b/Common/Core/vtkPoints2D.cxx
@@ -225,23 +225,18 @@ void vtkPoints2D::PrintSelf(ostream& os, vtkIndent indent)
 {
   this->Superclass::PrintSelf(os,indent);
 
-  double *bounds;
-
   os << indent << "Data: " << this->Data << "\n";
-  if ( this->Data )
+  if ( this->Data->GetName() )
     {
-    if ( this->Data->GetName() )
-      {
-      os << indent << "Data Array Name: " << this->Data->GetName() << "\n";
-      }
-    else
-      {
-      os << indent << "Data Array Name: (none)\n";
-      }
+    os << indent << "Data Array Name: " << this->Data->GetName() << "\n";
+    }
+  else
+    {
+    os << indent << "Data Array Name: (none)\n";
     }
 
   os << indent << "Number Of Points: " << this->GetNumberOfPoints() << "\n";
-  bounds = this->GetBounds();
+  double *bounds = this->GetBounds();
   os << indent << "Bounds: \n";
   os << indent << "  Xmin,Xmax: (" << bounds[0] << ", " << bounds[1] << ")\n";
   os << indent << "  Ymin,Ymax: (" << bounds[2] << ", " << bounds[3] << ")\n";
diff --git a/Common/Core/vtkPriorityQueue.cxx b/Common/Core/vtkPriorityQueue.cxx
index a04b278..29f80c8 100644
--- a/Common/Core/vtkPriorityQueue.cxx
+++ b/Common/Core/vtkPriorityQueue.cxx
@@ -38,10 +38,7 @@ void vtkPriorityQueue::Allocate(const vtkIdType sz, const vtkIdType ext)
     }
 
   this->Size = ( sz > 0 ? sz : 1);
-  if ( this->Array != NULL )
-    {
-    delete [] this->Array;
-    }
+  delete [] this->Array;
   this->Array = new vtkPriorityQueue::Item[sz];
   this->Extend = ( ext > 0 ? ext : 1);
   this->MaxId = -1;
@@ -51,10 +48,7 @@ void vtkPriorityQueue::Allocate(const vtkIdType sz, const vtkIdType ext)
 vtkPriorityQueue::~vtkPriorityQueue()
 {
   this->ItemLocation->Delete();
-  if ( this->Array )
-    {
-    delete [] this->Array;
-    }
+  delete [] this->Array;
 }
 
 // Insert id with priority specified.
diff --git a/Common/Core/vtkSMPThreadLocalObject.h b/Common/Core/vtkSMPThreadLocalObject.h
new file mode 100644
index 0000000..f7e01d0
--- /dev/null
+++ b/Common/Core/vtkSMPThreadLocalObject.h
@@ -0,0 +1,166 @@
+ /*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPThreadLocalObject.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkSMPThreadLocalObject - Thread local storage for VTK objects.
+// .SECTION Description
+// This class essentially does the same thing as vtkSMPThreadLocal with
+// 2 additional functions:
+// - Local() allocates an object of the template argument type using ::New
+// - The destructor calls Delete() on all objects created with Local().
+//
+// .SECTION Warning
+// There is absolutely no guarantee to the order in which the local objects
+// will be stored and hence the order in which they will be traversed when
+// using iterators. You should not even assume that two vtkSMPThreadLocal
+// populated in the same parallel section will be populated in the same
+// order. For example, consider the following
+// \verbatim
+// vtkSMPThreadLocal<int> Foo;
+// vtkSMPThreadLocal<int> Bar;
+// class AFunctor
+// {
+//    void Initialize() const
+//    {
+//      int& foo = Foo.Local();
+//      int& bar = Bar.Local();
+//      foo = random();
+//      bar = foo;
+//    }
+//
+//    void operator()(vtkIdType, vtkIdType)
+//    {}
+//    void Finalize()
+//    {}
+// };
+//
+// AFunctor functor;
+// vtkSMPTools::For(0, 100000, functor);
+//
+// vtkSMPThreadLocal<int>::iterator itr1 = Foo.begin();
+// vtkSMPThreadLocal<int>::iterator itr2 = Bar.begin();
+// while (itr1 != Foo.end())
+// {
+//   assert(*itr1 == *itr2);
+//   ++itr1; ++itr2;
+// }
+// \endverbatim
+//
+// It is possible and likely that the assert() will fail using the TBB
+// backend. So if you need to store values related to each other and
+// iterate over them together, use a struct or class to group them together
+// and use a thread local of that class.
+//
+// .SECTION See Also
+// vtkSMPThreadLocal
+
+#ifndef __vtkSMPThreadLocalObject_h
+#define __vtkSMPThreadLocalObject_h
+
+#include <vtkSMPThreadLocal.h>
+
+template <typename T>
+class vtkSMPThreadLocalObject
+{
+  typedef vtkSMPThreadLocal<T*> TLS;
+  typedef typename vtkSMPThreadLocal<T*>::iterator TLSIter;
+
+  // Hide the copy constructor for now and assignment
+  // operator for now.
+  vtkSMPThreadLocalObject(const vtkSMPThreadLocalObject&);
+  void operator=(const vtkSMPThreadLocalObject&);
+
+public:
+  // Description:
+  // Default constructor.
+  vtkSMPThreadLocalObject() : Internal(0)
+    {
+    }
+
+  virtual ~vtkSMPThreadLocalObject()
+    {
+      iterator iter = this->begin();
+      while (iter != this->end())
+        {
+        if (*iter)
+          {
+          (*iter)->Delete();
+          }
+        ++iter;
+        }
+    }
+
+  // Description:
+  // Returns an object local to the current thread.
+  // This object is allocated with ::New() and will
+  // be deleted in the destructor of vtkSMPThreadLocalObject.
+  T*& Local()
+    {
+      T*& vtkobject = this->Internal.Local();
+      if (!vtkobject)
+        {
+        vtkobject = T::New();
+        }
+      return vtkobject;
+    }
+
+  // Description:
+  // Subset of the standard iterator API.
+  // The most common design patter is to use iterators in a sequential
+  // code block and to use only the thread local objects in parallel
+  // code blocks.
+  class iterator
+  {
+  public:
+    iterator& operator++()
+      {
+        ++this->Iter;
+        return *this;
+      }
+
+    bool operator!=(const iterator& other)
+      {
+        return this->Iter != other.Iter;
+      }
+
+    T*& operator*()
+      {
+        return *this->Iter;
+      }
+
+  private:
+    TLSIter Iter;
+
+    friend class vtkSMPThreadLocalObject<T>;
+  };
+
+  iterator begin()
+    {
+      iterator iter;
+      iter.Iter = this->Internal.begin();
+      return iter;
+    };
+
+  iterator end()
+    {
+      iterator iter;
+      iter.Iter = this->Internal.end();
+      return iter;
+    }
+
+private:
+  TLS Internal;
+};
+
+#endif
+// VTK-HeaderTest-Exclude: vtkSMPThreadLocalObject.h
diff --git a/Common/Core/vtkSMPTools.h b/Common/Core/vtkSMPTools.h
new file mode 100644
index 0000000..99e3907
--- /dev/null
+++ b/Common/Core/vtkSMPTools.h
@@ -0,0 +1,212 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPTools.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkSMPTools - A set of parallel (multi-threaded) utility functions.
+// .SECTION Description
+// vtkSMPTools provides a set of utility functions that can
+// be used to parallelize parts of VTK code using multiple threads.
+// There are several back-end implementations of parallel functionality
+// (currently Sequential, TBB and X-Kaapi) that actual execution is
+// delegated to.
+
+#ifndef __vtkSMPTools_h__
+#define __vtkSMPTools_h__
+
+#include "vtkCommonCoreModule.h" // For export macro
+#include "vtkObject.h"
+
+#include "vtkSMPThreadLocal.h" // For Initialized
+
+class vtkSMPTools;
+
+#include "vtkSMPToolsInternal.h"
+
+#ifndef DOXYGEN_SHOULD_SKIP_THIS
+#ifndef __WRAP__
+namespace vtk
+{
+namespace detail
+{
+namespace smp
+{
+template <typename T>
+class vtkSMPTools_Has_Initialize
+{
+  typedef char (&no_type)[1];
+  typedef char (&yes_type)[2];
+  template <typename U, void (U::*)()> struct V {};
+  template <typename U> static yes_type check(V<U, &U::Initialize>*);
+  template <typename U> static no_type check(...);
+public:
+  static bool const value = sizeof(check<T>(0)) == sizeof(yes_type);
+};
+
+template <typename T>
+class vtkSMPTools_Has_Initialize_const
+{
+  typedef char (&no_type)[1];
+  typedef char (&yes_type)[2];
+  template <typename U, void (U::*)() const> struct V {};
+  template <typename U> static yes_type check(V<U, &U::Initialize>*);
+  template <typename U> static no_type check(...);
+public:
+  static bool const value = sizeof(check<T>(0)) == sizeof(yes_type);
+};
+
+template <typename Functor, bool Init>
+struct vtkSMPTools_FunctorInternal;
+
+template <typename Functor>
+struct vtkSMPTools_FunctorInternal<Functor, false>
+{
+  Functor& F;
+  vtkSMPTools_FunctorInternal(Functor& f): F(f) {}
+  void Execute(vtkIdType first, vtkIdType last)
+  {
+    this->F(first, last);
+  }
+  void For(vtkIdType first, vtkIdType last, vtkIdType grain)
+  {
+    vtk::detail::smp::vtkSMPTools_Impl_For(first, last, grain, *this);
+  }
+  vtkSMPTools_FunctorInternal<Functor, false>& operator=(
+    const vtkSMPTools_FunctorInternal<Functor, false>&);
+  vtkSMPTools_FunctorInternal<Functor, false>(
+    const vtkSMPTools_FunctorInternal<Functor, false>&);
+};
+
+template <typename Functor>
+struct vtkSMPTools_FunctorInternal<Functor, true>
+{
+  Functor& F;
+  vtkSMPThreadLocal<unsigned char> Initialized;
+  vtkSMPTools_FunctorInternal(Functor& f): F(f), Initialized(0) {}
+  void Execute(vtkIdType first, vtkIdType last)
+  {
+    unsigned char& inited = this->Initialized.Local();
+    if (!inited)
+      {
+      this->F.Initialize();
+      inited = 1;
+      }
+    this->F(first, last);
+  }
+  void For(vtkIdType first, vtkIdType last, vtkIdType grain)
+  {
+    vtk::detail::smp::vtkSMPTools_Impl_For(first, last, grain, *this);
+    this->F.Reduce();
+  }
+  vtkSMPTools_FunctorInternal<Functor, true>& operator=(
+    const vtkSMPTools_FunctorInternal<Functor, true>&);
+  vtkSMPTools_FunctorInternal<Functor, true>(
+    const vtkSMPTools_FunctorInternal<Functor, true>&);
+};
+
+template <typename Functor>
+class vtkSMPTools_Lookup_For
+{
+  static bool const init = vtkSMPTools_Has_Initialize<Functor>::value;
+public:
+  typedef vtkSMPTools_FunctorInternal<Functor, init> type;
+};
+
+template <typename Functor>
+class vtkSMPTools_Lookup_For<Functor const>
+{
+  static bool const init = vtkSMPTools_Has_Initialize_const<Functor>::value;
+public:
+  typedef vtkSMPTools_FunctorInternal<Functor const, init> type;
+};
+} // namespace smp
+} // namespace detail
+} // namespace vtk
+#endif // __WRAP__
+#endif // DOXYGEN_SHOULD_SKIP_THIS
+
+class VTKCOMMONCORE_EXPORT vtkSMPTools
+{
+public:
+
+  // Description:
+  // Execute a for operation in parallel. First and last
+  // define the range over which to operate (which is defined
+  // by the operator). The operation executed is defined by
+  // operator() of the functor object. The grain gives the parallel
+  // engine a hint about the coarseness over which to parallelize
+  // the function (as defined by last-first of each execution of
+  // operator() ).
+  template <typename Functor>
+  static void For(vtkIdType first, vtkIdType last, vtkIdType grain, Functor& f)
+  {
+    typename vtk::detail::smp::vtkSMPTools_Lookup_For<Functor>::type fi(f);
+    fi.For(first, last, grain);
+  }
+
+  // Description:
+  // Execute a for operation in parallel. First and last
+  // define the range over which to operate (which is defined
+  // by the operator). The operation executed is defined by
+  // operator() of the functor object. The grain gives the parallel
+  // engine a hint about the coarseness over which to parallelize
+  // the function (as defined by last-first of each execution of
+  // operator() ).
+  template <typename Functor>
+  static void For(vtkIdType first, vtkIdType last, vtkIdType grain, Functor const& f)
+  {
+    typename vtk::detail::smp::vtkSMPTools_Lookup_For<Functor const>::type fi(f);
+    fi.For(first, last, grain);
+  }
+
+  // Description:
+  // Execute a for operation in parallel. First and last
+  // define the range over which to operate (which is defined
+  // by the operator). The operation executed is defined by
+  // operator() of the functor object. The grain gives the parallel
+  // engine a hint about the coarseness over which to parallelize
+  // the function (as defined by last-first of each execution of
+  // operator() ). Uses a default value for the grain.
+  template <typename Functor>
+  static void For(vtkIdType first, vtkIdType last, Functor& f)
+  {
+    vtkSMPTools::For(first, last, 0, f);
+  }
+
+  // Description:
+  // Execute a for operation in parallel. First and last
+  // define the range over which to operate (which is defined
+  // by the operator). The operation executed is defined by
+  // operator() of the functor object. The grain gives the parallel
+  // engine a hint about the coarseness over which to parallelize
+  // the function (as defined by last-first of each execution of
+  // operator() ). Uses a default value for the grain.
+  template <typename Functor>
+  static void For(vtkIdType first, vtkIdType last, Functor const& f)
+  {
+    vtkSMPTools::For(first, last, 0, f);
+  }
+
+  // Description:
+  // Initialize the underlying libraries for execution. This is
+  // not required as it is automatically called before the first
+  // execution of any parallel code. However, it can be used to
+  // control the maximum number of threads used when the back-end
+  // supports it (currently Simple and TBB only). Make sure to call
+  // it before any other parallel operation.
+  // When using Kaapi, use the KAAPI_CPUCOUNT env. variable to control
+  // the number of threads used in the thread pool.
+  static void Initialize(int numThreads=0);
+};
+
+#endif
+// VTK-HeaderTest-Exclude: vtkSMPTools.h
diff --git a/Common/Core/vtkScalarsToColors.cxx b/Common/Core/vtkScalarsToColors.cxx
index 590bec5..a720d4a 100644
--- a/Common/Core/vtkScalarsToColors.cxx
+++ b/Common/Core/vtkScalarsToColors.cxx
@@ -1323,10 +1323,7 @@ void vtkScalarsToColors::MapColorsToColors(
       }
     }
 
-  if (newPtr)
-    {
-    delete [] newPtr;
-    }
+  delete [] newPtr;
 }
 
 //----------------------------------------------------------------------------
@@ -1383,10 +1380,7 @@ void vtkScalarsToColors::MapVectorsToMagnitude(
         numberOfTuples, vectorSize, inInc));
     }
 
-  if (newPtr)
-    {
-    delete [] newPtr;
-    }
+  delete [] newPtr;
 }
 
 //----------------------------------------------------------------------------
@@ -1496,10 +1490,7 @@ void vtkScalarsToColors::MapScalarsThroughTable2(
       }
     }
 
-  if (newPtr)
-    {
-    delete [] newPtr;
-    }
+  delete [] newPtr;
 }
 
 //----------------------------------------------------------------------------
@@ -1596,8 +1587,7 @@ void vtkScalarsToColors::SetAnnotations(
 {
   if (
     (values && !annotations) ||
-    (!values && annotations) ||
-    (values == this->AnnotatedValues && annotations == this->Annotations))
+    (!values && annotations))
     return;
 
   if (values && annotations &&
@@ -1610,36 +1600,49 @@ void vtkScalarsToColors::SetAnnotations(
     return;
     }
 
-  bool sameVals = (values == this->AnnotatedValues);
-  bool sameText = (annotations == this->Annotations);
-  if (this->AnnotatedValues && !sameVals)
+  if (this->AnnotatedValues && !values)
     {
     this->AnnotatedValues->Delete();
     this->AnnotatedValues = 0;
     }
-  if (this->Annotations && !sameText)
-    {
-    this->Annotations->Delete();
-    this->Annotations = 0;
+  else if (values)
+    { // Ensure arrays are of the same type before copying.
+    if (this->AnnotatedValues)
+      {
+      if (this->AnnotatedValues->GetDataType() != values->GetDataType())
+        {
+        this->AnnotatedValues->Delete();
+        this->AnnotatedValues = 0;
+        }
+      }
+    if (!this->AnnotatedValues)
+      {
+      this->AnnotatedValues =
+        vtkAbstractArray::CreateArray(
+          values->GetDataType());
+      }
     }
-  if (!values)
+  bool sameVals = (values == this->AnnotatedValues);
+  if (!sameVals && values)
     {
-    return;
+    this->AnnotatedValues->DeepCopy(values);
     }
-  if (!sameVals)
+
+  if (this->Annotations && !annotations)
     {
-    this->AnnotatedValues = values;
-    this->AnnotatedValues->Register(this);
+    this->Annotations->Delete();
+    this->Annotations = 0;
     }
-  if (!sameText)
+  else if (!this->Annotations && annotations)
     {
-    this->Annotations = annotations;
-    this->Annotations->Register(this);
+    this->Annotations = vtkStringArray::New();
     }
-  if (this->AnnotatedValues)
+  bool sameText = (annotations == this->Annotations);
+  if (!sameText)
     {
-    this->UpdateAnnotatedValueMap();
+    this->Annotations->DeepCopy(annotations);
     }
+  this->UpdateAnnotatedValueMap();
   this->Modified();
 }
 
@@ -1818,7 +1821,8 @@ void vtkScalarsToColors::UpdateAnnotatedValueMap()
 {
   this->AnnotatedValueMap->clear();
 
-  vtkIdType na = this->AnnotatedValues->GetMaxId() + 1;
+  vtkIdType na =
+    this->AnnotatedValues ? this->AnnotatedValues->GetMaxId() + 1 : 0;
   for (vtkIdType i = 0; i < na; ++ i)
     {
     (*this->AnnotatedValueMap)[this->AnnotatedValues->GetVariantValue(i)] = i;
diff --git a/Common/Core/vtkScalarsToColors.h b/Common/Core/vtkScalarsToColors.h
index e333e20..8cb634d 100644
--- a/Common/Core/vtkScalarsToColors.h
+++ b/Common/Core/vtkScalarsToColors.h
@@ -68,7 +68,7 @@ public:
   // Description:
   // Perform any processing required (if any) before processing
   // scalars.
-  virtual void Build() {};
+  virtual void Build() {}
 
   // Description:
   // Sets/Gets the range of scalars which will be mapped.
@@ -233,6 +233,12 @@ public:
   // as a set of annotations to add to a scalar array (when IndexedLookup is false).
   // The two arrays must both either be NULL or of the same length or
   // the call will be ignored.
+  //
+  // Note that these arrays are deep copied rather than being used directly
+  // in order to support the use case where edits are made. If the
+  // \a values and \a annotations arrays were held by this class then each
+  // call to map scalar values to colors would require us to check the MTime
+  // of the arrays.
   virtual void SetAnnotations( vtkAbstractArray* values, vtkStringArray* annotations );
   vtkGetObjectMacro(AnnotatedValues,vtkAbstractArray);
   vtkGetObjectMacro(Annotations,vtkStringArray);
diff --git a/Common/Core/vtkSetGet.h b/Common/Core/vtkSetGet.h
index 9880738..32a9bfe 100644
--- a/Common/Core/vtkSetGet.h
+++ b/Common/Core/vtkSetGet.h
@@ -96,7 +96,7 @@ virtual void Set##name (const char* _arg) \
   vtkDebugMacro(<< this->GetClassName() << " (" << this << "): setting " << #name " to " << (_arg?_arg:"(null)") ); \
   if ( this->name == NULL && _arg == NULL) { return;} \
   if ( this->name && _arg && (!strcmp(this->name,_arg))) { return;} \
-  if (this->name) { delete [] this->name; } \
+  delete [] this->name; \
   if (_arg) \
     { \
     size_t n = strlen(_arg) + 1; \
@@ -577,9 +577,9 @@ virtual double *Get##name() \
     return this->name##Coordinate->GetValue(); \
 }
 
-// Macro used to determine whether a class is the same class or
-// a subclass of the named class.
-#define vtkTypeMacro(thisClass,superclass) \
+// Allows definition of vtkObject API such that NewInstance may return a
+// superclass of thisClass.
+#define vtkAbstractTypeMacroWithNewInstanceType(thisClass,superclass,instanceType) \
   typedef superclass Superclass; \
   private: \
   virtual const char* GetClassNameInternal() const { return #thisClass; } \
@@ -604,16 +604,25 @@ virtual double *Get##name() \
       } \
     return NULL;\
   } \
+  instanceType *NewInstance() const \
+  { \
+    return instanceType::SafeDownCast(this->NewInstanceInternal()); \
+  }
+
+// Same as vtkTypeMacro, but adapted for cases where thisClass is abstact.
+#define vtkAbstractTypeMacro(thisClass,superclass) \
+  vtkAbstractTypeMacroWithNewInstanceType(thisClass, superclass, thisClass)
+
+// Macro used to determine whether a class is the same class or
+// a subclass of the named class.
+#define vtkTypeMacro(thisClass,superclass) \
+  vtkAbstractTypeMacro(thisClass, superclass) \
   protected: \
   virtual vtkObjectBase *NewInstanceInternal() const \
   { \
     return thisClass::New(); \
   } \
-  public: \
-  thisClass *NewInstance() const \
-  { \
-    return thisClass::SafeDownCast(this->NewInstanceInternal()); \
-  }
+  public:
 
 // Legacy versions of vtkTypeMacro and helpers.
 #if !defined(VTK_LEGACY_REMOVE)
@@ -758,7 +767,11 @@ virtual double *Get##name() \
   // place to avoid stray semicolons because this is an error for some
   // compilers.  Using a class forward declaration allows any number
   // of repeats in any context without generating unique names.
-# define VTK_LEGACY(method) class vtkLegacyMethodRemoved
+
+# define VTK_LEGACY(method)         VTK_LEGACY__0(method,__LINE__)
+# define VTK_LEGACY__0(method,line) VTK_LEGACY__1(method,line)
+# define VTK_LEGACY__1(method,line) class vtkLegacyMethodRemoved##line
+
 #elif defined(VTK_LEGACY_SILENT) || defined(VTK_WRAPPING_CXX)
   // Provide legacy methods with no warnings.
 # define VTK_LEGACY(method) method
diff --git a/Common/Core/vtkShortArray.cxx b/Common/Core/vtkShortArray.cxx
index acb5caa..a609340 100644
--- a/Common/Core/vtkShortArray.cxx
+++ b/Common/Core/vtkShortArray.cxx
@@ -27,7 +27,7 @@ VTK_ARRAY_ITERATOR_TEMPLATE_INSTANTIATE(short);
 vtkStandardNewMacro(vtkShortArray);
 
 //----------------------------------------------------------------------------
-vtkShortArray::vtkShortArray(vtkIdType numComp): RealSuperclass(numComp)
+vtkShortArray::vtkShortArray()
 {
 }
 
diff --git a/Common/Core/vtkShortArray.h b/Common/Core/vtkShortArray.h
index 54b15d6..f8d778a 100644
--- a/Common/Core/vtkShortArray.h
+++ b/Common/Core/vtkShortArray.h
@@ -31,88 +31,25 @@
 #include "vtkDataArrayTemplate.h" // Real Superclass
 
 // Fake the superclass for the wrappers.
+#ifndef __WRAP__
 #define vtkDataArray vtkDataArrayTemplate<short>
+#endif
 class VTKCOMMONCORE_EXPORT vtkShortArray : public vtkDataArray
+#ifndef __WRAP__
 #undef vtkDataArray
+#endif
 {
 public:
   static vtkShortArray* New();
   vtkTypeMacro(vtkShortArray,vtkDataArray);
   void PrintSelf(ostream& os, vtkIndent indent);
 
-  // Description:
-  // Get the data type.
-  int GetDataType()
-    { return VTK_SHORT; }
-
-  // Description:
-  // Copy the tuple value into a user-provided array.
-  void GetTupleValue(vtkIdType i, short* tuple)
-    { this->RealSuperclass::GetTupleValue(i, tuple); }
-
-  // Description:
-  // Set the tuple value at the ith location in the array.
-  void SetTupleValue(vtkIdType i, const short* tuple)
-    { this->RealSuperclass::SetTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple into the ith location
-  // in the array.
-  void InsertTupleValue(vtkIdType i, const short* tuple)
-    { this->RealSuperclass::InsertTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple onto the end of the array.
-  vtkIdType InsertNextTupleValue(const short* tuple)
-    { return this->RealSuperclass::InsertNextTupleValue(tuple); }
-
-  // Description:
-  // Get the data at a particular index.
-  short GetValue(vtkIdType id)
-    { return this->RealSuperclass::GetValue(id); }
-
-  // Description:
-  // Set the data at a particular index. Does not do range checking. Make sure
-  // you use the method SetNumberOfValues() before inserting data.
-  void SetValue(vtkIdType id, short value)
-    { this->RealSuperclass::SetValue(id, value); }
-
-  // Description:
-  // Specify the number of values for this object to hold. Does an
-  // allocation as well as setting the MaxId ivar. Used in conjunction with
-  // SetValue() method for fast insertion.
-  void SetNumberOfValues(vtkIdType number)
-    { this->RealSuperclass::SetNumberOfValues(number); }
-
-  // Description:
-  // Insert data at a specified position in the array.
-  void InsertValue(vtkIdType id, short f)
-    { this->RealSuperclass::InsertValue(id, f); }
-
-  // Description:
-  // Insert data at the end of the array. Return its location in the array.
-  vtkIdType InsertNextValue(short f)
-    { return this->RealSuperclass::InsertNextValue(f); }
-
-  // Description:
-  // Get the range of array values for the given component in the
-  // native data type.
-  short *GetValueRange(int comp)
-    { return this->RealSuperclass::GetValueRange(comp); }
-//BTX
-  void GetValueRange(short range[2], int comp)
-    { this->RealSuperclass::GetValueRange(range, comp); }
-//ETX
-
-  // Description:
-  // Get the range of array values for the 0th component in the
-  // native data type.
-  short *GetValueRange()
-    { return this->RealSuperclass::GetValueRange(0); }
-//BTX
-  void GetValueRange(short range[2])
-    { this->RealSuperclass::GetValueRange(range, 0); }
-//ETX
+  // This macro expands to the set of method declarations that
+  // make up the interface of vtkDataArrayTemplate, which is ignored
+  // by the wrappers.
+#ifdef __WRAP__
+  vtkCreateWrappedArrayInterface(short);
+#endif
 
   // Description:
   // Get the minimum data value in its native type.
@@ -122,33 +59,8 @@ public:
   // Get the maximum data value in its native type.
   static short GetDataTypeValueMax() { return VTK_SHORT_MAX; }
 
-  // Description:
-  // Get the address of a particular data index. Make sure data is allocated
-  // for the number of items requested. Set MaxId according to the number of
-  // data values requested.
-  short* WritePointer(vtkIdType id, vtkIdType number)
-    { return this->RealSuperclass::WritePointer(id, number); }
-
-  // Description:
-  // Get the address of a particular data index. Performs no checks
-  // to verify that the memory has been allocated etc.
-  short* GetPointer(vtkIdType id)
-    { return this->RealSuperclass::GetPointer(id); }
-
-  // Description:
-  // This method lets the user specify data to be held by the array.  The
-  // array argument is a pointer to the data.  size is the size of
-  // the array supplied by the user.  Set save to 1 to keep the class
-  // from deleting the array when it cleans up or reallocates memory.
-  // The class uses the actual array provided; it does not copy the data
-  // from the suppled array.
-  void SetArray(short* array, vtkIdType size, int save)
-    { this->RealSuperclass::SetArray(array, size, save); }
-  void SetArray(short* array, vtkIdType size, int save, int deleteMethod)
-    { this->RealSuperclass::SetArray(array, size, save, deleteMethod); }
-
 protected:
-  vtkShortArray(vtkIdType numComp=1);
+  vtkShortArray();
   ~vtkShortArray();
 
 private:
diff --git a/Common/Core/vtkSignedCharArray.cxx b/Common/Core/vtkSignedCharArray.cxx
index 6c4da6e..227e630 100644
--- a/Common/Core/vtkSignedCharArray.cxx
+++ b/Common/Core/vtkSignedCharArray.cxx
@@ -27,7 +27,7 @@ VTK_ARRAY_ITERATOR_TEMPLATE_INSTANTIATE(signed char);
 vtkStandardNewMacro(vtkSignedCharArray);
 
 //----------------------------------------------------------------------------
-vtkSignedCharArray::vtkSignedCharArray(vtkIdType numComp): RealSuperclass(numComp)
+vtkSignedCharArray::vtkSignedCharArray()
 {
 }
 
diff --git a/Common/Core/vtkSignedCharArray.h b/Common/Core/vtkSignedCharArray.h
index 4aa3938..12812cd 100644
--- a/Common/Core/vtkSignedCharArray.h
+++ b/Common/Core/vtkSignedCharArray.h
@@ -31,88 +31,25 @@
 #include "vtkDataArrayTemplate.h" // Real Superclass
 
 // Fake the superclass for the wrappers.
+#ifndef __WRAP__
 #define vtkDataArray vtkDataArrayTemplate<signed char>
+#endif
 class VTKCOMMONCORE_EXPORT vtkSignedCharArray : public vtkDataArray
+#ifndef __WRAP__
 #undef vtkDataArray
+#endif
 {
 public:
   static vtkSignedCharArray* New();
   vtkTypeMacro(vtkSignedCharArray,vtkDataArray);
   void PrintSelf(ostream& os, vtkIndent indent);
 
-  // Description:
-  // Get the data type.
-  int GetDataType()
-    { return VTK_SIGNED_CHAR; }
-
-  // Description:
-  // Copy the tuple value into a user-provided array.
-  void GetTupleValue(vtkIdType i, signed char* tuple)
-    { this->RealSuperclass::GetTupleValue(i, tuple); }
-
-  // Description:
-  // Set the tuple value at the ith location in the array.
-  void SetTupleValue(vtkIdType i, const signed char* tuple)
-    { this->RealSuperclass::SetTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple into the ith location
-  // in the array.
-  void InsertTupleValue(vtkIdType i, const signed char* tuple)
-    { this->RealSuperclass::InsertTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple onto the end of the array.
-  vtkIdType InsertNextTupleValue(const signed char* tuple)
-    { return this->RealSuperclass::InsertNextTupleValue(tuple); }
-
-  // Description:
-  // Get the data at a particular index.
-  signed char GetValue(vtkIdType id)
-    { return this->RealSuperclass::GetValue(id); }
-
-  // Description:
-  // Set the data at a particular index. Does not do range checking. Make sure
-  // you use the method SetNumberOfValues() before inserting data.
-  void SetValue(vtkIdType id, signed char value)
-    { this->RealSuperclass::SetValue(id, value); }
-
-  // Description:
-  // Specify the number of values for this object to hold. Does an
-  // allocation as well as setting the MaxId ivar. Used in conjunction with
-  // SetValue() method for fast insertion.
-  void SetNumberOfValues(vtkIdType number)
-    { this->RealSuperclass::SetNumberOfValues(number); }
-
-  // Description:
-  // Insert data at a specified position in the array.
-  void InsertValue(vtkIdType id, signed char f)
-    { this->RealSuperclass::InsertValue(id, f); }
-
-  // Description:
-  // Insert data at the end of the array. Return its location in the array.
-  vtkIdType InsertNextValue(signed char f)
-    { return this->RealSuperclass::InsertNextValue(f); }
-
-  // Description:
-  // Get the range of array values for the given component in the
-  // native data type.
-  signed char *GetValueRange(int comp)
-    { return this->RealSuperclass::GetValueRange(comp); }
-//BTX
-  void GetValueRange(signed char range[2], int comp)
-    { this->RealSuperclass::GetValueRange(range, comp); }
-//ETX
-
-  // Description:
-  // Get the range of array values for the 0th component in the
-  // native data type.
-  signed char *GetValueRange()
-    { return this->RealSuperclass::GetValueRange(0); }
-//BTX
-  void GetValueRange(signed char range[2])
-    { this->RealSuperclass::GetValueRange(range, 0); }
-//ETX
+  // This macro expands to the set of method declarations that
+  // make up the interface of vtkDataArrayTemplate, which is ignored
+  // by the wrappers.
+#ifdef __WRAP__
+  vtkCreateWrappedArrayInterface(signed char);
+#endif
 
   // Description:
   // Get the minimum data value in its native type.
@@ -122,33 +59,8 @@ public:
   // Get the maximum data value in its native type.
   static signed char GetDataTypeValueMax() { return VTK_SIGNED_CHAR_MAX; }
 
-  // Description:
-  // Get the address of a particular data index. Make sure data is allocated
-  // for the number of items requested. Set MaxId according to the number of
-  // data values requested.
-  signed char* WritePointer(vtkIdType id, vtkIdType number)
-    { return this->RealSuperclass::WritePointer(id, number); }
-
-  // Description:
-  // Get the address of a particular data index. Performs no checks
-  // to verify that the memory has been allocated etc.
-  signed char* GetPointer(vtkIdType id)
-    { return this->RealSuperclass::GetPointer(id); }
-
-  // Description:
-  // This method lets the user specify data to be held by the array.  The
-  // array argument is a pointer to the data.  size is the size of
-  // the array supplied by the user.  Set save to 1 to keep the class
-  // from deleting the array when it cleans up or reallocates memory.
-  // The class uses the actual array provided; it does not copy the data
-  // from the suppled array.
-  void SetArray(signed char* array, vtkIdType size, int save)
-    { this->RealSuperclass::SetArray(array, size, save); }
-  void SetArray(signed char* array, vtkIdType size, int save, int deleteMethod)
-    { this->RealSuperclass::SetArray(array, size, save, deleteMethod); }
-
 protected:
-  vtkSignedCharArray(vtkIdType numComp=1);
+  vtkSignedCharArray();
   ~vtkSignedCharArray();
 
 private:
diff --git a/Common/Core/vtkSimpleCriticalSection.cxx b/Common/Core/vtkSimpleCriticalSection.cxx
new file mode 100644
index 0000000..21f6c0c
--- /dev/null
+++ b/Common/Core/vtkSimpleCriticalSection.cxx
@@ -0,0 +1,83 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkCriticalSection.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkSimpleCriticalSection.h"
+
+void vtkSimpleCriticalSection::Init()
+{
+#ifdef VTK_USE_SPROC
+  init_lock( &this->CritSec );
+#endif
+
+#ifdef VTK_USE_WIN32_THREADS
+  //this->MutexLock = CreateMutex( NULL, FALSE, NULL );
+  InitializeCriticalSection(&this->CritSec);
+#endif
+
+#ifdef VTK_USE_PTHREADS
+#ifdef VTK_HP_PTHREADS
+  pthread_mutex_init(&(this->CritSec), pthread_mutexattr_default);
+#else
+  pthread_mutex_init(&(this->CritSec), NULL);
+#endif
+#endif
+}
+
+
+// Destruct the vtkMutexVariable
+vtkSimpleCriticalSection::~vtkSimpleCriticalSection()
+{
+#ifdef VTK_USE_WIN32_THREADS
+  //CloseHandle(this->MutexLock);
+  DeleteCriticalSection(&this->CritSec);
+#endif
+
+#ifdef VTK_USE_PTHREADS
+  pthread_mutex_destroy( &this->CritSec);
+#endif
+}
+
+// Lock the vtkCriticalSection
+void vtkSimpleCriticalSection::Lock()
+{
+#ifdef VTK_USE_SPROC
+  spin_lock( &this->CritSec );
+#endif
+
+#ifdef VTK_USE_WIN32_THREADS
+  //WaitForSingleObject( this->MutexLock, INFINITE );
+  EnterCriticalSection(&this->CritSec);
+#endif
+
+#ifdef VTK_USE_PTHREADS
+  pthread_mutex_lock( &this->CritSec);
+#endif
+}
+
+// Unlock the vtkCriticalSection
+void vtkSimpleCriticalSection::Unlock()
+{
+#ifdef VTK_USE_SPROC
+  release_lock( &this->CritSec );
+#endif
+
+#ifdef VTK_USE_WIN32_THREADS
+  //ReleaseMutex( this->MutexLock );
+  LeaveCriticalSection(&this->CritSec);
+#endif
+
+#ifdef VTK_USE_PTHREADS
+  pthread_mutex_unlock( &this->CritSec);
+#endif
+}
diff --git a/Common/Core/vtkSimpleCriticalSection.h b/Common/Core/vtkSimpleCriticalSection.h
new file mode 100644
index 0000000..6447a8d
--- /dev/null
+++ b/Common/Core/vtkSimpleCriticalSection.h
@@ -0,0 +1,105 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkCriticalSection.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkSimpleCriticalSection - Critical section locking class
+// .SECTION Description
+// vtkCriticalSection allows the locking of variables which are accessed
+// through different threads.  This header file also defines
+// vtkSimpleCriticalSection which is not a subclass of vtkObject.
+// The API is identical to that of vtkMutexLock, and the behavior is
+// identical as well, except on Windows 9x/NT platforms. The only difference
+// on these platforms is that vtkMutexLock is more flexible, in that
+// it works across processes as well as across threads, but also costs
+// more, in that it evokes a 600-cycle x86 ring transition. The
+// vtkCriticalSection provides a higher-performance equivalent (on
+// Windows) but won't work across processes. Since it is unclear how,
+// in vtk, an object at the vtk level can be shared across processes
+// in the first place, one should use vtkCriticalSection unless one has
+// a very good reason to use vtkMutexLock. If higher-performance equivalents
+// for non-Windows platforms (Irix, SunOS, etc) are discovered, they
+// should replace the implementations in this class
+
+#ifndef __vtkSimpleCriticalSection_h
+#define __vtkSimpleCriticalSection_h
+
+#include "vtkCommonCoreModule.h" // For export macro
+#include "vtkSystemIncludes.h"
+
+//BTX
+
+#ifdef VTK_USE_SPROC
+#include <abi_mutex.h> // Needed for sproc implementation of mutex
+typedef abilock_t vtkCritSecType;
+#endif
+
+#if defined(VTK_USE_PTHREADS) || defined(VTK_HP_PTHREADS)
+#include <pthread.h> // Needed for pthreads implementation of mutex
+typedef pthread_mutex_t vtkCritSecType;
+#endif
+
+#ifdef VTK_USE_WIN32_THREADS
+# include "vtkWindows.h" // Needed for win32 implementation of mutex
+typedef CRITICAL_SECTION vtkCritSecType;
+#endif
+
+#ifndef VTK_USE_SPROC
+#ifndef VTK_USE_PTHREADS
+#ifndef VTK_USE_WIN32_THREADS
+typedef int vtkCritSecType;
+#endif
+#endif
+#endif
+
+// Critical Section object that is not a vtkObject.
+class VTKCOMMONCORE_EXPORT vtkSimpleCriticalSection
+{
+public:
+  // Default cstor
+  vtkSimpleCriticalSection()
+    {
+    this->Init();
+    }
+  // Construct object locked if isLocked is different from 0
+  vtkSimpleCriticalSection(int isLocked)
+    {
+    this->Init();
+    if(isLocked)
+      {
+      this->Lock();
+      }
+    }
+  // Destructor
+  virtual ~vtkSimpleCriticalSection();
+
+  void Init();
+
+  // Description:
+  // Lock the vtkCriticalSection
+  void Lock();
+
+  // Description:
+  // Unlock the vtkCriticalSection
+  void Unlock();
+
+protected:
+  vtkCritSecType   CritSec;
+
+private:
+  vtkSimpleCriticalSection(const vtkSimpleCriticalSection& other); // no copy constructor
+  vtkSimpleCriticalSection& operator=(const vtkSimpleCriticalSection& rhs); // no copy assignment
+};
+//ETX
+
+#endif
+// VTK-HeaderTest-Exclude: vtkSimpleCriticalSection.h
diff --git a/Common/Core/vtkSmartPointer.h b/Common/Core/vtkSmartPointer.h
index 2f5ab89..34fa35a 100644
--- a/Common/Core/vtkSmartPointer.h
+++ b/Common/Core/vtkSmartPointer.h
@@ -67,6 +67,10 @@ public:
     {
     return static_cast<T*>(this->Object);
     }
+  T* Get() const
+    {
+    return static_cast<T*>(this->Object);
+    }
 
   // Description:
   // Get the contained pointer.
diff --git a/Common/Core/vtkStdString.h b/Common/Core/vtkStdString.h
index b975e4c..78a31eb 100644
--- a/Common/Core/vtkStdString.h
+++ b/Common/Core/vtkStdString.h
@@ -32,7 +32,7 @@ VTKCOMMONCORE_EXPORT ostream& operator<<(ostream&, const vtkStdString&);
 // Not setting the visibility of this class caused the
 // vtkArrayIteratorTemplate<vtkStdString> symbols to be hidden on Apple GCC 4.2
 // but exporting would cause failure on MSVC 10 (works either way with GCC 4.4
-#if defined(__APPLE__) && __GNUC__ >=4
+#if defined(__APPLE__) && (__GNUC__==4) && (__GNUC_MINOR__<=2) && !defined(__clang__)
 class VTKCOMMONCORE_EXPORT vtkStdString : public std::string
 #else
 class vtkStdString : public std::string
diff --git a/Common/Core/vtkStringArray.cxx b/Common/Core/vtkStringArray.cxx
index f7d5b0a..a72464a 100644
--- a/Common/Core/vtkStringArray.cxx
+++ b/Common/Core/vtkStringArray.cxx
@@ -78,8 +78,7 @@ vtkStandardNewMacro(vtkStringArray);
 
 //-----------------------------------------------------------------------------
 
-vtkStringArray::vtkStringArray(vtkIdType numComp) :
-  vtkAbstractArray( numComp )
+vtkStringArray::vtkStringArray()
 {
   this->Array = NULL;
   this->SaveUserArray = 0;
@@ -94,10 +93,7 @@ vtkStringArray::~vtkStringArray()
     {
     delete [] this->Array;
     }
-  if (this->Lookup)
-    {
-    delete this->Lookup;
-    }
+  delete this->Lookup;
 }
 
 //-----------------------------------------------------------------------------
@@ -560,6 +556,44 @@ void vtkStringArray::InsertTuple(vtkIdType i, vtkIdType j,
 }
 
 // ----------------------------------------------------------------------------
+void vtkStringArray::InsertTuples(vtkIdList *dstIds, vtkIdList *srcIds,
+                                  vtkAbstractArray *source)
+{
+  vtkStringArray* sa = vtkStringArray::SafeDownCast(source);
+  if (!sa)
+    {
+    vtkWarningMacro("Input and outputs array data types do not match.");
+    return ;
+    }
+
+  if (this->NumberOfComponents != source->GetNumberOfComponents())
+    {
+    vtkWarningMacro("Input and output component sizes do not match.");
+    return;
+    }
+
+  vtkIdType numIds = dstIds->GetNumberOfIds();
+  if (srcIds->GetNumberOfIds() != numIds)
+    {
+    vtkWarningMacro("Input and output id array sizes do not match.");
+    return;
+    }
+
+  for (vtkIdType idIndex = 0; idIndex < numIds; ++idIndex)
+    {
+    vtkIdType numComp = this->NumberOfComponents;
+    vtkIdType srcLoc = srcIds->GetId(idIndex) * this->NumberOfComponents;
+    vtkIdType dstLoc = dstIds->GetId(idIndex) * this->NumberOfComponents;
+    while (numComp-- > 0)
+      {
+      this->InsertValue(dstLoc++, sa->GetValue(srcLoc++));
+      }
+    }
+
+  this->DataChanged();
+}
+
+// ----------------------------------------------------------------------------
 // Insert the jth tuple in the source array, at the end in this array.
 // Note that memory allocation is performed as necessary to hold the data.
 // Returns the location at which the data was inserted.
diff --git a/Common/Core/vtkStringArray.h b/Common/Core/vtkStringArray.h
index a699141..0e61148 100644
--- a/Common/Core/vtkStringArray.h
+++ b/Common/Core/vtkStringArray.h
@@ -85,6 +85,13 @@ public:
   virtual void InsertTuple(vtkIdType i, vtkIdType j, vtkAbstractArray* source);
 
   // Description:
+  // Copy the tuples indexed in srcIds from the source array to the tuple
+  // locations indexed by dstIds in this array.
+  // Note that memory allocation is performed as necessary to hold the data.
+  virtual void InsertTuples(vtkIdList *dstIds, vtkIdList *srcIds,
+                            vtkAbstractArray *source);
+
+  // Description:
   // Insert the jth tuple in the source array, at the end in this array.
   // Note that memory allocation is performed as necessary to hold the data.
   // Returns the location at which the data was inserted.
@@ -208,7 +215,7 @@ public:
   // the array supplied by the user.  Set save to 1 to keep the class
   // from deleting the array when it cleans up or reallocates memory.
   // The class uses the actual array provided; it does not copy the data
-  // from the suppled array. If save is 0, then this class is free to delete
+  // from the supplied array. If save is 0, then this class is free to delete
   // the array when it cleans up or reallocates. In that case, it is required
   // that the array was allocated using the C++ new operator (and not malloc).
   void SetArray(vtkStdString* array, vtkIdType size, int save);
@@ -271,7 +278,7 @@ public:
   virtual void ClearLookup();
 
 protected:
-  vtkStringArray(vtkIdType numComp=1);
+  vtkStringArray();
   ~vtkStringArray();
 
   vtkStdString* Array;   // pointer to data
diff --git a/Common/Core/vtkSystemIncludes.h b/Common/Core/vtkSystemIncludes.h
index 2c17227..bcb64a9 100644
--- a/Common/Core/vtkSystemIncludes.h
+++ b/Common/Core/vtkSystemIncludes.h
@@ -75,6 +75,7 @@
 #define VTK_COURIER        1
 #define VTK_TIMES          2
 #define VTK_UNKNOWN_FONT   3
+#define VTK_FONT_FILE      4
 
 #define VTK_TEXT_LEFT     0
 #define VTK_TEXT_CENTERED 1
diff --git a/Common/Core/vtkTimePointUtility.h b/Common/Core/vtkTimePointUtility.h
index 65375ea..f3c3b9e 100644
--- a/Common/Core/vtkTimePointUtility.h
+++ b/Common/Core/vtkTimePointUtility.h
@@ -149,8 +149,8 @@ public:
     vtkTypeUInt64, int format = ISO8601_DATETIME_MILLIS);
 
 protected:
-  vtkTimePointUtility() {};
-  ~vtkTimePointUtility() {};
+  vtkTimePointUtility() {}
+  ~vtkTimePointUtility() {}
 
 private:
   vtkTimePointUtility(const vtkTimePointUtility&);  // Not implemented.
diff --git a/Common/Core/vtkTimeStamp.cxx b/Common/Core/vtkTimeStamp.cxx
index 33eb4f0..f1386e5 100644
--- a/Common/Core/vtkTimeStamp.cxx
+++ b/Common/Core/vtkTimeStamp.cxx
@@ -12,18 +12,15 @@
      PURPOSE.  See the above copyright notice for more information.
 
 =========================================================================*/
-//
-// Initialize static member
-//
 #include "vtkTimeStamp.h"
 
-#include "vtkCriticalSection.h"
 #include "vtkObjectFactory.h"
 #include "vtkWindows.h"
 
-#if defined(__APPLE__)
-  #include <libkern/OSAtomic.h>
-#endif
+// We use the Schwarz Counter idiom to make sure that GlobalTimeStamp
+// is initialized before any other class uses it.
+
+#include "vtkAtomicInt.h"
 
 //-------------------------------------------------------------------------
 vtkTimeStamp* vtkTimeStamp::New()
@@ -35,37 +32,11 @@ vtkTimeStamp* vtkTimeStamp::New()
 //-------------------------------------------------------------------------
 void vtkTimeStamp::Modified()
 {
-// Windows optimization
-#if defined(WIN32) || defined(_WIN32)
-  static LONG vtkTimeStampTime = 0;
-  this->ModifiedTime = (unsigned long)InterlockedIncrement(&vtkTimeStampTime);
-
-// Mac optimization
-#elif defined(__APPLE__)
- #if __LP64__
-  // "ModifiedTime" is "unsigned long", a type that changess sizes
-  // depending on architecture.  The atomic increment is safe, since it
-  // operates on a variable of the exact type needed.  The cast does not
-  // change the size, but does change signedness, which is not ideal.
-  static volatile int64_t vtkTimeStampTime = 0;
-  this->ModifiedTime = (unsigned long)OSAtomicIncrement64Barrier(&vtkTimeStampTime);
- #else
-  static volatile int32_t vtkTimeStampTime = 0;
-  this->ModifiedTime = (unsigned long)OSAtomicIncrement32Barrier(&vtkTimeStampTime);
- #endif
-
-// GCC and CLANG intrinsics
-#elif defined(VTK_HAVE_SYNC_BUILTINS)
-  static volatile unsigned long vtkTimeStampTime = 0;
-  this->ModifiedTime = __sync_add_and_fetch(&vtkTimeStampTime, 1);
-
-// General case
+#if VTK_SIZEOF_VOID_P == 8
+  static vtkAtomicInt<vtkTypeInt64> GlobalTimeStamp(0);
 #else
-  static unsigned long vtkTimeStampTime = 0;
-  static vtkSimpleCriticalSection TimeStampCritSec;
-
-  TimeStampCritSec.Lock();
-  this->ModifiedTime = ++vtkTimeStampTime;
-  TimeStampCritSec.Unlock();
+  static vtkAtomicInt<vtkTypeInt32> GlobalTimeStamp(0);
 #endif
+
+  this->ModifiedTime = (unsigned long)++GlobalTimeStamp;
 }
diff --git a/Common/Core/vtkToolkits.h.in b/Common/Core/vtkToolkits.h.in
index 0dc5961..adfe634 100644
--- a/Common/Core/vtkToolkits.h.in
+++ b/Common/Core/vtkToolkits.h.in
@@ -35,10 +35,6 @@
 /* Whether FFMPEG is found or not  */
 #cmakedefine VTK_USE_FFMPEG_ENCODER
 
-/* Which shader support is included or not */
-#cmakedefine VTK_USE_CG_SHADERS
-#cmakedefine VTK_USE_GLSL_SHADERS
-
 /* Whether the user has linked in the MPEG2 library or not  */
 #cmakedefine VTK_USE_MPEG2_ENCODER
 
diff --git a/Common/Core/vtkTypeTemplate.h b/Common/Core/vtkTypeTemplate.h
index e23c8e2..b3518c2 100644
--- a/Common/Core/vtkTypeTemplate.h
+++ b/Common/Core/vtkTypeTemplate.h
@@ -29,6 +29,7 @@
 #define __vtkTypeTemplate_h
 
 #include "vtkObjectBase.h"
+#include <string>
 #include <typeinfo>
 
 template<class ThisT, class BaseT>
@@ -45,7 +46,8 @@ public:
 
   static ThisT* SafeDownCast(vtkObjectBase* o)
   {
-    if(o && o->IsA(typeid(ThisT).name()))
+    if(o &&
+       o->IsA(vtkTypeTemplate<ThisT, BaseT>::GetClassNameInternalCachedName()))
       {
       return static_cast<ThisT*>(o);
       }
@@ -64,7 +66,8 @@ protected:
   // "normal" VTK classes.
   static int IsTypeOf(const char* type)
   {
-    if(!strcmp(typeid(ThisT).name(), type))
+    if (strcmp(vtkTypeTemplate<ThisT, BaseT>::GetClassNameInternalCachedName(),
+               type) == 0)
       {
       return 1;
       }
@@ -79,10 +82,22 @@ protected:
     return this->IsTypeOf(type);
   }
 
+  vtkTypeTemplate() {}
+
 private:
+  // not implemented:
+  vtkTypeTemplate(const vtkTypeTemplate<ThisT, BaseT>&);
+  void operator=(const vtkTypeTemplate<ThisT, BaseT>&);
+
+  static const char* GetClassNameInternalCachedName()
+  {
+    static std::string thisType(typeid(ThisT).name());
+    return thisType.c_str();
+  }
+
   virtual const char* GetClassNameInternal() const
   {
-    return typeid(ThisT).name();
+    return this->GetClassNameInternalCachedName();
   }
 };
 
diff --git a/Common/Core/vtkTypeTraits.h b/Common/Core/vtkTypeTraits.h
index f09a0f0..6b90d93 100644
--- a/Common/Core/vtkTypeTraits.h
+++ b/Common/Core/vtkTypeTraits.h
@@ -32,6 +32,7 @@ template <class T> struct vtkTypeTraits;
     typedef type ValueType;                                                   \
                                                                               \
     /* the value defined for this type in vtkType */                          \
+    enum { VTK_TYPE_ID = VTK_##macro };                                       \
     static int VTKTypeID() { return VTK_##macro; }                            \
                                                                               \
     /* The smallest possible value represented by the type.  */               \
diff --git a/Common/Core/vtkTypedArray.cxx.in b/Common/Core/vtkTypedArray.cxx.in
index 1aa9718..1dd8aaf 100644
--- a/Common/Core/vtkTypedArray.cxx.in
+++ b/Common/Core/vtkTypedArray.cxx.in
@@ -20,7 +20,7 @@
 vtkStandardNewMacro(vtkType at VTK_TYPE_NAME@Array);
 
 //----------------------------------------------------------------------------
-vtkType at VTK_TYPE_NAME@Array::vtkType at VTK_TYPE_NAME@Array(vtkIdType numComp): Superclass(numComp)
+vtkType at VTK_TYPE_NAME@Array::vtkType at VTK_TYPE_NAME@Array()
 {
 }
 
diff --git a/Common/Core/vtkTypedArray.h.in b/Common/Core/vtkTypedArray.h.in
index d1b6446..ca98cdb 100644
--- a/Common/Core/vtkTypedArray.h.in
+++ b/Common/Core/vtkTypedArray.h.in
@@ -32,7 +32,7 @@ public:
   void PrintSelf(ostream& os, vtkIndent indent);
 
 protected:
-  vtkType at VTK_TYPE_NAME@Array(vtkIdType numComp=1);
+  vtkType at VTK_TYPE_NAME@Array();
   ~vtkType at VTK_TYPE_NAME@Array();
 
 private:
diff --git a/Common/Core/vtkTypedDataArray.h b/Common/Core/vtkTypedDataArray.h
new file mode 100644
index 0000000..987e489
--- /dev/null
+++ b/Common/Core/vtkTypedDataArray.h
@@ -0,0 +1,168 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkTypedDataArray.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkTypedDataArray - Extend vtkDataArray with abstract type-specific API
+//
+// .SECTION Description
+// This templated class decorates vtkDataArray with additional type-specific
+// methods that can be used to interact with the data.
+//
+// .SECTION Caveats
+// This class uses vtkTypeTraits to implement GetDataType(). Since vtkIdType
+// is a typedef for either a 32- or 64-bit integer, subclasses that are designed
+// to hold vtkIdTypes will, by default, return an incorrect value from
+// GetDataType(). To fix this, such subclasses should override GetDataType() to
+// return VTK_ID_TYPE.
+
+#ifndef __vtkTypedDataArray_h
+#define __vtkTypedDataArray_h
+
+#include "vtkDataArray.h"
+
+#include "vtkCommonCoreModule.h" // For export macro
+#include "vtkTypeTemplate.h" // For vtkTypeTemplate
+#include "vtkTypeTraits.h"   // For type metadata
+
+template <class Scalar> class vtkTypedDataArrayIterator;
+
+template <class Scalar>
+class vtkTypedDataArray :
+    public vtkTypeTemplate<vtkTypedDataArray<Scalar>, vtkDataArray>
+{
+public:
+  // Description:
+  // Typedef to get the type of value stored in the array.
+  typedef Scalar ValueType;
+
+  // Description:
+  // Typedef to a suitable iterator class.
+  // Rather than using this member directly, consider using
+  // vtkDataArrayIteratorMacro for safety and efficiency.
+  typedef vtkTypedDataArrayIterator<ValueType> Iterator;
+
+  // Description:
+  // Return an iterator initialized to the first element of the data.
+  // Rather than using this member directly, consider using
+  // vtkDataArrayIteratorMacro for safety and efficiency.
+  Iterator Begin();
+
+  // Description:
+  // Return an iterator initialized to first element past the end of the data.
+  // Rather than using this member directly, consider using
+  // vtkDataArrayIteratorMacro for safety and efficiency.
+  Iterator End();
+
+  // Description:
+  // Compile time access to the VTK type identifier.
+  enum { VTK_DATA_TYPE = vtkTypeTraits<ValueType>::VTK_TYPE_ID };
+
+  // Description:
+  // Perform a fast, safe cast from a vtkAbstractArray to a vtkTypedDataArray.
+  // This method checks if:
+  // - source->GetArrayType() is appropriate, and
+  // - source->GetDataType() matches the Scalar template argument
+  // if these conditions are met, the method performs a static_cast to return
+  // source as a vtkTypedDataArray pointer. Otherwise, NULL is returned.
+  static vtkTypedDataArray<Scalar>* FastDownCast(vtkAbstractArray *source);
+
+  // Description:
+  // Return the VTK data type held by this array.
+  int GetDataType();
+
+  // Description:
+  // Return the size of the element type in bytes.
+  int GetDataTypeSize();
+
+  // Description:
+  // Specify the number of values for this object to hold. Does an
+  // allocation as well as setting the MaxId ivar. Used in conjunction with
+  // SetValue() method for fast insertion.
+  virtual void SetNumberOfValues(vtkIdType num);
+
+  // Description:
+  // Set the tuple value at the ith location in the array.
+  virtual void SetTupleValue(vtkIdType i, const ValueType *t) = 0;
+
+  // Description:
+  // Insert (memory allocation performed) the tuple into the ith location
+  // in the array.
+  virtual void InsertTupleValue(vtkIdType i, const ValueType *t) = 0;
+
+  // Description:
+  // Insert (memory allocation performed) the tuple onto the end of the array.
+  virtual vtkIdType InsertNextTupleValue(const ValueType *t) = 0;
+
+  // Description:
+  // Return the indices where a specific value appears.
+  virtual vtkIdType LookupTypedValue(ValueType value) = 0;
+  virtual void LookupTypedValue(ValueType value, vtkIdList *ids) = 0;
+
+  // Description:
+  // Get the data at a particular index.
+  virtual ValueType GetValue(vtkIdType idx) = 0;
+
+  // Description:
+  // Get a reference to the scalar value at a particular index.
+  virtual ValueType& GetValueReference(vtkIdType idx) = 0;
+
+  // Description:
+  // Set the data at a particular index. Does not do range checking. Make sure
+  // you use the method SetNumberOfValues() before inserting data.
+  virtual void SetValue(vtkIdType idx, ValueType value) = 0;
+
+  // Description:
+  // Copy the tuple value into a user-provided array.
+  virtual void GetTupleValue(vtkIdType idx, ValueType *t) = 0;
+
+  // Description:
+  // Insert data at the end of the array. Return its location in the array.
+  virtual vtkIdType InsertNextValue(ValueType v) = 0;
+
+  // Description:
+  // Insert data at a specified position in the array.
+  virtual void InsertValue(vtkIdType idx, ValueType v) = 0;
+
+  // Description:
+  // Method for type-checking in FastDownCast implementations.
+  virtual int GetArrayType() { return vtkAbstractArray::TypedDataArray; }
+
+protected:
+  vtkTypedDataArray();
+  ~vtkTypedDataArray();
+
+private:
+  vtkTypedDataArray(const vtkTypedDataArray &); // Not implemented.
+  void operator=(const vtkTypedDataArray &);   // Not implemented.
+};
+
+// Included here to resolve chicken/egg issue with container/iterator:
+#include "vtkTypedDataArrayIterator.h" // For iterator
+
+template <class Scalar> inline
+typename vtkTypedDataArray<Scalar>::Iterator vtkTypedDataArray<Scalar>::Begin()
+{
+  return Iterator(this, 0);
+}
+
+template <class Scalar> inline
+typename vtkTypedDataArray<Scalar>::Iterator vtkTypedDataArray<Scalar>::End()
+{
+  return Iterator(this, this->MaxId + 1);
+}
+
+#include "vtkTypedDataArray.txx"
+
+#endif //__vtkTypedDataArray_h
+
+// VTK-HeaderTest-Exclude: vtkTypedDataArray.h
diff --git a/Common/Core/vtkTypedDataArray.txx b/Common/Core/vtkTypedDataArray.txx
new file mode 100644
index 0000000..b3fd7b6
--- /dev/null
+++ b/Common/Core/vtkTypedDataArray.txx
@@ -0,0 +1,76 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkTypedDataArray.txx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#ifndef __vtkTypedDataArray_txx
+#define __vtkTypedDataArray_txx
+
+#include "vtkTypedDataArray.h"
+
+//------------------------------------------------------------------------------
+template <typename Scalar>
+vtkTypedDataArray<Scalar>::vtkTypedDataArray()
+{
+}
+
+//------------------------------------------------------------------------------
+template <typename Scalar>
+vtkTypedDataArray<Scalar>::~vtkTypedDataArray()
+{
+}
+
+//------------------------------------------------------------------------------
+template <typename Scalar> inline
+int vtkTypedDataArray<Scalar>::GetDataType()
+{
+  return vtkTypeTraits<Scalar>::VTK_TYPE_ID;
+}
+
+//------------------------------------------------------------------------------
+template <typename Scalar> inline
+int vtkTypedDataArray<Scalar>::GetDataTypeSize()
+{
+  return static_cast<int>(sizeof(Scalar));
+}
+
+//------------------------------------------------------------------------------
+template <typename Scalar> inline
+void vtkTypedDataArray<Scalar>::SetNumberOfValues(vtkIdType number)
+{
+  if (this->Allocate(number))
+    {
+    this->MaxId = number - 1;
+    }
+  this->Modified();
+}
+
+//------------------------------------------------------------------------------
+template <typename Scalar> inline vtkTypedDataArray<Scalar> *
+vtkTypedDataArray<Scalar>::FastDownCast(vtkAbstractArray *source)
+{
+  switch (source->GetArrayType())
+    {
+    case vtkAbstractArray::DataArrayTemplate:
+    case vtkAbstractArray::TypedDataArray:
+    case vtkAbstractArray::MappedDataArray:
+      if (source->GetDataType() == vtkTypeTraits<Scalar>::VTK_TYPE_ID)
+        {
+        return static_cast<vtkTypedDataArray<Scalar>*>(source);
+        }
+    default:
+      return NULL;
+    }
+}
+
+#endif //__vtkTypedDataArray_txx
diff --git a/Common/Core/vtkTypedDataArrayIterator.h b/Common/Core/vtkTypedDataArrayIterator.h
new file mode 100644
index 0000000..db55c5d
--- /dev/null
+++ b/Common/Core/vtkTypedDataArrayIterator.h
@@ -0,0 +1,169 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkTypedDataArrayIterator.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+// .NAME vtkTypedDataArrayIterator - STL-style random access iterator for
+// vtkTypedDataArrays.
+//
+// .SECTION Description
+// vtkTypedDataArrayIterator provides an STL-style iterator that can be used to
+// interact with instances of vtkTypedDataArray. It is intended to provide an
+// alternative to using vtkDataArray::GetVoidPointer() that only uses
+// vtkTypedDataArray API functions to retrieve values. It is especially helpful
+// for safely iterating through subclasses of vtkMappedDataArray, which may
+// not use the same memory layout as a typical vtkDataArray.
+
+#ifndef __vtkTypedDataArrayIterator_h
+#define __vtkTypedDataArrayIterator_h
+
+#include <iterator> // For iterator traits
+
+#include "vtkTypedDataArray.h" // For vtkTypedDataArray
+
+template<class Scalar>
+class vtkTypedDataArrayIterator
+{
+public:
+  typedef std::random_access_iterator_tag iterator_category;
+  typedef Scalar value_type;
+  typedef std::ptrdiff_t difference_type;
+  typedef Scalar& reference;
+  typedef Scalar* pointer;
+
+  vtkTypedDataArrayIterator()
+    : Data(NULL), Index(0) {}
+
+  explicit vtkTypedDataArrayIterator(vtkTypedDataArray<Scalar> *arr,
+                                     const vtkIdType index = 0)
+    : Data(arr),
+      Index(index)
+  {
+  }
+
+  vtkTypedDataArrayIterator(const vtkTypedDataArrayIterator &o)
+    : Data(o.Data),
+      Index(o.Index)
+  {
+  }
+
+  vtkTypedDataArrayIterator&
+  operator=(vtkTypedDataArrayIterator<Scalar> o)
+  {
+    std::swap(this->Data, o.Data);
+    std::swap(this->Index, o.Index);
+    return *this;
+  }
+
+  bool operator==(const vtkTypedDataArrayIterator<Scalar> &o) const
+  {
+    return this->Data == o.Data && this->Index == o.Index;
+  }
+
+  bool operator!=(const vtkTypedDataArrayIterator<Scalar> &o) const
+  {
+    return this->Data == o.Data && this->Index != o.Index;
+  }
+
+  bool operator>(const vtkTypedDataArrayIterator<Scalar> &o) const
+  {
+    return this->Data == o.Data && this->Index > o.Index;
+  }
+
+  bool operator>=(const vtkTypedDataArrayIterator<Scalar> &o) const
+  {
+    return this->Data == o.Data && this->Index >= o.Index;
+  }
+
+  bool operator<(const vtkTypedDataArrayIterator<Scalar> &o) const
+  {
+    return this->Data == o.Data && this->Index < o.Index;
+  }
+
+  bool operator<=(const vtkTypedDataArrayIterator<Scalar> &o) const
+  {
+    return this->Data == o.Data && this->Index <= o.Index;
+  }
+
+  Scalar& operator*()
+  {
+    return this->Data->GetValueReference(this->Index);
+  }
+
+  Scalar* operator->() const
+  {
+    return &this->Data->GetValueReference(this->Index);
+  }
+
+  Scalar& operator[](const difference_type &n)
+  {
+    return this->Data->GetValueReference(this->Index + n);
+  }
+
+  vtkTypedDataArrayIterator& operator++()
+  {
+    ++this->Index;
+    return *this;
+  }
+
+  vtkTypedDataArrayIterator& operator--()
+  {
+    --this->Index;
+    return *this;
+  }
+
+  vtkTypedDataArrayIterator operator++(int)
+  {
+    return vtkTypedDataArrayIterator(this->Data, this->Index++);
+  }
+
+  vtkTypedDataArrayIterator operator--(int)
+  {
+    return vtkTypedDataArrayIterator(this->Data, this->Index--);
+  }
+
+  vtkTypedDataArrayIterator operator+(const difference_type& n) const
+  {
+    return vtkTypedDataArrayIterator(this->Data, this->Index + n);
+  }
+
+  vtkTypedDataArrayIterator operator-(const difference_type& n) const
+  {
+    return vtkTypedDataArrayIterator(this->Data, this->Index - n);
+  }
+
+  difference_type operator-(const vtkTypedDataArrayIterator& other) const
+  {
+    return this->Index - other.Index;
+  }
+
+  vtkTypedDataArrayIterator& operator+=(const difference_type& n)
+  {
+    this->Index += n;
+    return *this;
+  }
+
+  vtkTypedDataArrayIterator& operator-=(const difference_type& n)
+  {
+    this->Index -= n;
+    return *this;
+  }
+
+private:
+  vtkTypedDataArray<Scalar> *Data;
+  vtkIdType Index;
+};
+
+#endif // __vtkTypedDataArrayIterator_h
+
+// VTK-HeaderTest-Exclude: vtkTypedDataArrayIterator.h
diff --git a/Common/Core/vtkUnicodeString.cxx b/Common/Core/vtkUnicodeString.cxx
index dd96623..15af0b9 100644
--- a/Common/Core/vtkUnicodeString.cxx
+++ b/Common/Core/vtkUnicodeString.cxx
@@ -362,8 +362,7 @@ void vtkUnicodeString::assign(const_iterator first, const_iterator last)
 
 void vtkUnicodeString::clear()
 {
-  // We use erase() because MSVC 6 doesn't provide clear() ...
-  this->Storage.erase(this->Storage.begin(), this->Storage.end());
+  this->Storage.clear();
 }
 
 vtkUnicodeString vtkUnicodeString::fold_case() const
diff --git a/Common/Core/vtkUnicodeStringArray.cxx b/Common/Core/vtkUnicodeStringArray.cxx
index 36ed42c..fa2c97e 100644
--- a/Common/Core/vtkUnicodeStringArray.cxx
+++ b/Common/Core/vtkUnicodeStringArray.cxx
@@ -20,6 +20,7 @@
 #include "vtkUnicodeStringArray.h"
 
 #include <vector>
+#include <algorithm>
 
 class vtkUnicodeStringArray::Implementation
 {
@@ -30,7 +31,7 @@ public:
 
 vtkStandardNewMacro(vtkUnicodeStringArray);
 
-vtkUnicodeStringArray::vtkUnicodeStringArray(vtkIdType)
+vtkUnicodeStringArray::vtkUnicodeStringArray()
 {
   this->Internal = new Implementation;
 }
@@ -108,6 +109,46 @@ void vtkUnicodeStringArray::InsertTuple(vtkIdType i, vtkIdType j, vtkAbstractArr
   this->DataChanged();
 }
 
+void vtkUnicodeStringArray::InsertTuples(vtkIdList *dstIds, vtkIdList *srcIds,
+                                         vtkAbstractArray *source)
+{
+  vtkUnicodeStringArray* const array =
+      vtkUnicodeStringArray::SafeDownCast(source);
+  if(!array)
+    {
+    vtkWarningMacro("Input and output array data types do not match.");
+    return;
+    }
+
+  vtkIdType numIds = dstIds->GetNumberOfIds();
+  if (srcIds->GetNumberOfIds() != numIds)
+    {
+    vtkWarningMacro("Input and output id array sizes do not match.");
+    return;
+    }
+
+  // Find maximum destination id and resize if needed
+  vtkIdType maxDstId = 0;
+  for (vtkIdType idIndex = 0; idIndex < numIds; ++idIndex)
+    {
+    maxDstId = std::max(maxDstId, dstIds->GetId(idIndex));
+    }
+
+  if (static_cast<vtkIdType>(this->Internal->Storage.size()) <= maxDstId)
+    {
+    this->Internal->Storage.resize(maxDstId + 1);
+    }
+
+  // Copy data
+  for (vtkIdType idIndex = 0; idIndex < numIds; ++idIndex)
+    {
+    this->Internal->Storage[dstIds->GetId(idIndex)] =
+        array->Internal->Storage[srcIds->GetId(idIndex)];
+    }
+
+  this->DataChanged();
+}
+
 vtkIdType vtkUnicodeStringArray::InsertNextTuple(vtkIdType j, vtkAbstractArray* source)
 {
   vtkUnicodeStringArray* const array = vtkUnicodeStringArray::SafeDownCast(source);
diff --git a/Common/Core/vtkUnicodeStringArray.h b/Common/Core/vtkUnicodeStringArray.h
index e4e5de1..7a19081 100644
--- a/Common/Core/vtkUnicodeStringArray.h
+++ b/Common/Core/vtkUnicodeStringArray.h
@@ -49,6 +49,8 @@ public:
   virtual void SetNumberOfTuples(vtkIdType number);
   virtual void SetTuple(vtkIdType i, vtkIdType j, vtkAbstractArray* source);
   virtual void InsertTuple(vtkIdType i, vtkIdType j, vtkAbstractArray* source);
+  virtual void InsertTuples(vtkIdList *dstIds, vtkIdList *srcIds,
+                            vtkAbstractArray *source);
   virtual vtkIdType InsertNextTuple(vtkIdType j, vtkAbstractArray* source);
   virtual void* GetVoidPointer(vtkIdType id);
   virtual void DeepCopy(vtkAbstractArray* da);
@@ -81,7 +83,7 @@ public:
   const char* GetUTF8Value(vtkIdType i);
 
 protected:
-  vtkUnicodeStringArray(vtkIdType numComp = 1);
+  vtkUnicodeStringArray();
   ~vtkUnicodeStringArray();
 
 private:
diff --git a/Common/Core/vtkUnsignedCharArray.cxx b/Common/Core/vtkUnsignedCharArray.cxx
index 0469ac3..c9c16a1 100644
--- a/Common/Core/vtkUnsignedCharArray.cxx
+++ b/Common/Core/vtkUnsignedCharArray.cxx
@@ -27,7 +27,7 @@ VTK_ARRAY_ITERATOR_TEMPLATE_INSTANTIATE(unsigned char);
 vtkStandardNewMacro(vtkUnsignedCharArray);
 
 //----------------------------------------------------------------------------
-vtkUnsignedCharArray::vtkUnsignedCharArray(vtkIdType numComp): RealSuperclass(numComp)
+vtkUnsignedCharArray::vtkUnsignedCharArray()
 {
 }
 
diff --git a/Common/Core/vtkUnsignedCharArray.h b/Common/Core/vtkUnsignedCharArray.h
index edd2958..cdf47af 100644
--- a/Common/Core/vtkUnsignedCharArray.h
+++ b/Common/Core/vtkUnsignedCharArray.h
@@ -31,88 +31,25 @@
 #include "vtkDataArrayTemplate.h" // Real Superclass
 
 // Fake the superclass for the wrappers.
+#ifndef __WRAP__
 #define vtkDataArray vtkDataArrayTemplate<unsigned char>
+#endif
 class VTKCOMMONCORE_EXPORT vtkUnsignedCharArray : public vtkDataArray
+#ifndef __WRAP__
 #undef vtkDataArray
+#endif
 {
 public:
   static vtkUnsignedCharArray* New();
   vtkTypeMacro(vtkUnsignedCharArray,vtkDataArray);
   void PrintSelf(ostream& os, vtkIndent indent);
 
-  // Description:
-  // Get the data type.
-  int GetDataType()
-    { return VTK_UNSIGNED_CHAR; }
-
-  // Description:
-  // Copy the tuple value into a user-provided array.
-  void GetTupleValue(vtkIdType i, unsigned char* tuple)
-    { this->RealSuperclass::GetTupleValue(i, tuple); }
-
-  // Description:
-  // Set the tuple value at the ith location in the array.
-  void SetTupleValue(vtkIdType i, const unsigned char* tuple)
-    { this->RealSuperclass::SetTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple into the ith location
-  // in the array.
-  void InsertTupleValue(vtkIdType i, const unsigned char* tuple)
-    { this->RealSuperclass::InsertTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple onto the end of the array.
-  vtkIdType InsertNextTupleValue(const unsigned char* tuple)
-    { return this->RealSuperclass::InsertNextTupleValue(tuple); }
-
-  // Description:
-  // Get the data at a particular index.
-  unsigned char GetValue(vtkIdType id)
-    { return this->RealSuperclass::GetValue(id); }
-
-  // Description:
-  // Set the data at a particular index. Does not do range checking. Make sure
-  // you use the method SetNumberOfValues() before inserting data.
-  void SetValue(vtkIdType id, unsigned char value)
-    { this->RealSuperclass::SetValue(id, value); }
-
-  // Description:
-  // Specify the number of values for this object to hold. Does an
-  // allocation as well as setting the MaxId ivar. Used in conjunction with
-  // SetValue() method for fast insertion.
-  void SetNumberOfValues(vtkIdType number)
-    { this->RealSuperclass::SetNumberOfValues(number); }
-
-  // Description:
-  // Insert data at a specified position in the array.
-  void InsertValue(vtkIdType id, unsigned char f)
-    { this->RealSuperclass::InsertValue(id, f); }
-
-  // Description:
-  // Insert data at the end of the array. Return its location in the array.
-  vtkIdType InsertNextValue(unsigned char f)
-    { return this->RealSuperclass::InsertNextValue(f); }
-
-  // Description:
-  // Get the range of array values for the given component in the
-  // native data type.
-  unsigned char *GetValueRange(int comp)
-    { return this->RealSuperclass::GetValueRange(comp); }
-//BTX
-  void GetValueRange(unsigned char range[2], int comp)
-    { this->RealSuperclass::GetValueRange(range, comp); }
-//ETX
-
-  // Description:
-  // Get the range of array values for the 0th component in the
-  // native data type.
-  unsigned char *GetValueRange()
-    { return this->RealSuperclass::GetValueRange(0); }
-//BTX
-  void GetValueRange(unsigned char range[2])
-    { this->RealSuperclass::GetValueRange(range, 0); }
-//ETX
+  // This macro expands to the set of method declarations that
+  // make up the interface of vtkDataArrayTemplate, which is ignored
+  // by the wrappers.
+#ifdef __WRAP__
+  vtkCreateWrappedArrayInterface(unsigned char);
+#endif
 
   // Description:
   // Get the minimum data value in its native type.
@@ -122,33 +59,8 @@ public:
   // Get the maximum data value in its native type.
   static unsigned char GetDataTypeValueMax() { return VTK_UNSIGNED_CHAR_MAX; }
 
-  // Description:
-  // Get the address of a particular data index. Make sure data is allocated
-  // for the number of items requested. Set MaxId according to the number of
-  // data values requested.
-  unsigned char* WritePointer(vtkIdType id, vtkIdType number)
-    { return this->RealSuperclass::WritePointer(id, number); }
-
-  // Description:
-  // Get the address of a particular data index. Performs no checks
-  // to verify that the memory has been allocated etc.
-  unsigned char* GetPointer(vtkIdType id)
-    { return this->RealSuperclass::GetPointer(id); }
-
-  // Description:
-  // This method lets the user specify data to be held by the array.  The
-  // array argument is a pointer to the data.  size is the size of
-  // the array supplied by the user.  Set save to 1 to keep the class
-  // from deleting the array when it cleans up or reallocates memory.
-  // The class uses the actual array provided; it does not copy the data
-  // from the suppled array.
-  void SetArray(unsigned char* array, vtkIdType size, int save)
-    { this->RealSuperclass::SetArray(array, size, save); }
-  void SetArray(unsigned char* array, vtkIdType size, int save, int deleteMethod)
-    { this->RealSuperclass::SetArray(array, size, save, deleteMethod); }
-
 protected:
-  vtkUnsignedCharArray(vtkIdType numComp=1);
+  vtkUnsignedCharArray();
   ~vtkUnsignedCharArray();
 
 private:
diff --git a/Common/Core/vtkUnsignedIntArray.cxx b/Common/Core/vtkUnsignedIntArray.cxx
index 43a8cc5..939289a 100644
--- a/Common/Core/vtkUnsignedIntArray.cxx
+++ b/Common/Core/vtkUnsignedIntArray.cxx
@@ -27,7 +27,7 @@ VTK_ARRAY_ITERATOR_TEMPLATE_INSTANTIATE(unsigned int);
 vtkStandardNewMacro(vtkUnsignedIntArray);
 
 //----------------------------------------------------------------------------
-vtkUnsignedIntArray::vtkUnsignedIntArray(vtkIdType numComp): RealSuperclass(numComp)
+vtkUnsignedIntArray::vtkUnsignedIntArray()
 {
 }
 
diff --git a/Common/Core/vtkUnsignedIntArray.h b/Common/Core/vtkUnsignedIntArray.h
index 7b77679..f28a961 100644
--- a/Common/Core/vtkUnsignedIntArray.h
+++ b/Common/Core/vtkUnsignedIntArray.h
@@ -31,88 +31,25 @@
 #include "vtkDataArrayTemplate.h" // Real Superclass
 
 // Fake the superclass for the wrappers.
+#ifndef __WRAP__
 #define vtkDataArray vtkDataArrayTemplate<unsigned int>
+#endif
 class VTKCOMMONCORE_EXPORT vtkUnsignedIntArray : public vtkDataArray
+#ifndef __WRAP__
 #undef vtkDataArray
+#endif
 {
 public:
   static vtkUnsignedIntArray* New();
   vtkTypeMacro(vtkUnsignedIntArray,vtkDataArray);
   void PrintSelf(ostream& os, vtkIndent indent);
 
-  // Description:
-  // Get the data type.
-  int GetDataType()
-    { return VTK_UNSIGNED_INT; }
-
-  // Description:
-  // Copy the tuple value into a user-provided array.
-  void GetTupleValue(vtkIdType i, unsigned int* tuple)
-    { this->RealSuperclass::GetTupleValue(i, tuple); }
-
-  // Description:
-  // Set the tuple value at the ith location in the array.
-  void SetTupleValue(vtkIdType i, const unsigned int* tuple)
-    { this->RealSuperclass::SetTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple into the ith location
-  // in the array.
-  void InsertTupleValue(vtkIdType i, const unsigned int* tuple)
-    { this->RealSuperclass::InsertTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple onto the end of the array.
-  vtkIdType InsertNextTupleValue(const unsigned int* tuple)
-    { return this->RealSuperclass::InsertNextTupleValue(tuple); }
-
-  // Description:
-  // Get the data at a particular index.
-  unsigned int GetValue(vtkIdType id)
-    { return this->RealSuperclass::GetValue(id); }
-
-  // Description:
-  // Set the data at a particular index. Does not do range checking. Make sure
-  // you use the method SetNumberOfValues() before inserting data.
-  void SetValue(vtkIdType id, unsigned int value)
-    { this->RealSuperclass::SetValue(id, value); }
-
-  // Description:
-  // Specify the number of values for this object to hold. Does an
-  // allocation as well as setting the MaxId ivar. Used in conjunction with
-  // SetValue() method for fast insertion.
-  void SetNumberOfValues(vtkIdType number)
-    { this->RealSuperclass::SetNumberOfValues(number); }
-
-  // Description:
-  // Insert data at a specified position in the array.
-  void InsertValue(vtkIdType id, unsigned int f)
-    { this->RealSuperclass::InsertValue(id, f); }
-
-  // Description:
-  // Insert data at the end of the array. Return its location in the array.
-  vtkIdType InsertNextValue(unsigned int f)
-    { return this->RealSuperclass::InsertNextValue(f); }
-
-  // Description:
-  // Get the range of array values for the given component in the
-  // native data type.
-  unsigned int *GetValueRange(int comp)
-    { return this->RealSuperclass::GetValueRange(comp); }
-//BTX
-  void GetValueRange(unsigned int range[2], int comp)
-    { this->RealSuperclass::GetValueRange(range, comp); }
-//ETX
-
-  // Description:
-  // Get the range of array values for the 0th component in the
-  // native data type.
-  unsigned int *GetValueRange()
-    { return this->RealSuperclass::GetValueRange(0); }
-//BTX
-  void GetValueRange(unsigned int range[2])
-    { this->RealSuperclass::GetValueRange(range, 0); }
-//ETX
+  // This macro expands to the set of method declarations that
+  // make up the interface of vtkDataArrayTemplate, which is ignored
+  // by the wrappers.
+#ifdef __WRAP__
+  vtkCreateWrappedArrayInterface(unsigned int);
+#endif
 
   // Description:
   // Get the minimum data value in its native type.
@@ -122,33 +59,8 @@ public:
   // Get the maximum data value in its native type.
   static unsigned int GetDataTypeValueMax() { return VTK_UNSIGNED_INT_MAX; }
 
-  // Description:
-  // Get the address of a particular data index. Make sure data is allocated
-  // for the number of items requested. Set MaxId according to the number of
-  // data values requested.
-  unsigned int* WritePointer(vtkIdType id, vtkIdType number)
-    { return this->RealSuperclass::WritePointer(id, number); }
-
-  // Description:
-  // Get the address of a particular data index. Performs no checks
-  // to verify that the memory has been allocated etc.
-  unsigned int* GetPointer(vtkIdType id)
-    { return this->RealSuperclass::GetPointer(id); }
-
-  // Description:
-  // This method lets the user specify data to be held by the array.  The
-  // array argument is a pointer to the data.  size is the size of
-  // the array supplied by the user.  Set save to 1 to keep the class
-  // from deleting the array when it cleans up or reallocates memory.
-  // The class uses the actual array provided; it does not copy the data
-  // from the suppled array.
-  void SetArray(unsigned int* array, vtkIdType size, int save)
-    { this->RealSuperclass::SetArray(array, size, save); }
-  void SetArray(unsigned int* array, vtkIdType size, int save, int deleteMethod)
-    { this->RealSuperclass::SetArray(array, size, save, deleteMethod); }
-
 protected:
-  vtkUnsignedIntArray(vtkIdType numComp=1);
+  vtkUnsignedIntArray();
   ~vtkUnsignedIntArray();
 
 private:
diff --git a/Common/Core/vtkUnsignedLongArray.cxx b/Common/Core/vtkUnsignedLongArray.cxx
index e38ee18..4d3ccf0 100644
--- a/Common/Core/vtkUnsignedLongArray.cxx
+++ b/Common/Core/vtkUnsignedLongArray.cxx
@@ -27,7 +27,7 @@ VTK_ARRAY_ITERATOR_TEMPLATE_INSTANTIATE(unsigned long);
 vtkStandardNewMacro(vtkUnsignedLongArray);
 
 //----------------------------------------------------------------------------
-vtkUnsignedLongArray::vtkUnsignedLongArray(vtkIdType numComp): RealSuperclass(numComp)
+vtkUnsignedLongArray::vtkUnsignedLongArray()
 {
 }
 
diff --git a/Common/Core/vtkUnsignedLongArray.h b/Common/Core/vtkUnsignedLongArray.h
index c6221fe..a41fdbc 100644
--- a/Common/Core/vtkUnsignedLongArray.h
+++ b/Common/Core/vtkUnsignedLongArray.h
@@ -31,88 +31,25 @@
 #include "vtkDataArrayTemplate.h" // Real Superclass
 
 // Fake the superclass for the wrappers.
+#ifndef __WRAP__
 #define vtkDataArray vtkDataArrayTemplate<unsigned long>
+#endif
 class VTKCOMMONCORE_EXPORT vtkUnsignedLongArray : public vtkDataArray
+#ifndef __WRAP__
 #undef vtkDataArray
+#endif
 {
 public:
   static vtkUnsignedLongArray* New();
   vtkTypeMacro(vtkUnsignedLongArray,vtkDataArray);
   void PrintSelf(ostream& os, vtkIndent indent);
 
-  // Description:
-  // Get the data type.
-  int GetDataType()
-    { return VTK_UNSIGNED_LONG; }
-
-  // Description:
-  // Copy the tuple value into a user-provided array.
-  void GetTupleValue(vtkIdType i, unsigned long* tuple)
-    { this->RealSuperclass::GetTupleValue(i, tuple); }
-
-  // Description:
-  // Set the tuple value at the ith location in the array.
-  void SetTupleValue(vtkIdType i, const unsigned long* tuple)
-    { this->RealSuperclass::SetTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple into the ith location
-  // in the array.
-  void InsertTupleValue(vtkIdType i, const unsigned long* tuple)
-    { this->RealSuperclass::InsertTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple onto the end of the array.
-  vtkIdType InsertNextTupleValue(const unsigned long* tuple)
-    { return this->RealSuperclass::InsertNextTupleValue(tuple); }
-
-  // Description:
-  // Get the data at a particular index.
-  unsigned long GetValue(vtkIdType id)
-    { return this->RealSuperclass::GetValue(id); }
-
-  // Description:
-  // Set the data at a particular index. Does not do range checking. Make sure
-  // you use the method SetNumberOfValues() before inserting data.
-  void SetValue(vtkIdType id, unsigned long value)
-    { this->RealSuperclass::SetValue(id, value); }
-
-  // Description:
-  // Specify the number of values for this object to hold. Does an
-  // allocation as well as setting the MaxId ivar. Used in conjunction with
-  // SetValue() method for fast insertion.
-  void SetNumberOfValues(vtkIdType number)
-    { this->RealSuperclass::SetNumberOfValues(number); }
-
-  // Description:
-  // Insert data at a specified position in the array.
-  void InsertValue(vtkIdType id, unsigned long f)
-    { this->RealSuperclass::InsertValue(id, f); }
-
-  // Description:
-  // Insert data at the end of the array. Return its location in the array.
-  vtkIdType InsertNextValue(unsigned long f)
-    { return this->RealSuperclass::InsertNextValue(f); }
-
-  // Description:
-  // Get the range of array values for the given component in the
-  // native data type.
-  unsigned long *GetValueRange(int comp)
-    { return this->RealSuperclass::GetValueRange(comp); }
-//BTX
-  void GetValueRange(unsigned long range[2], int comp)
-    { this->RealSuperclass::GetValueRange(range, comp); }
-//ETX
-
-  // Description:
-  // Get the range of array values for the 0th component in the
-  // native data type.
-  unsigned long *GetValueRange()
-    { return this->RealSuperclass::GetValueRange(0); }
-//BTX
-  void GetValueRange(unsigned long range[2])
-    { this->RealSuperclass::GetValueRange(range, 0); }
-//ETX
+  // This macro expands to the set of method declarations that
+  // make up the interface of vtkDataArrayTemplate, which is ignored
+  // by the wrappers.
+#ifdef __WRAP__
+  vtkCreateWrappedArrayInterface(unsigned long);
+#endif
 
   // Description:
   // Get the minimum data value in its native type.
@@ -122,33 +59,8 @@ public:
   // Get the maximum data value in its native type.
   static unsigned long GetDataTypeValueMax() { return VTK_UNSIGNED_LONG_MAX; }
 
-  // Description:
-  // Get the address of a particular data index. Make sure data is allocated
-  // for the number of items requested. Set MaxId according to the number of
-  // data values requested.
-  unsigned long* WritePointer(vtkIdType id, vtkIdType number)
-    { return this->RealSuperclass::WritePointer(id, number); }
-
-  // Description:
-  // Get the address of a particular data index. Performs no checks
-  // to verify that the memory has been allocated etc.
-  unsigned long* GetPointer(vtkIdType id)
-    { return this->RealSuperclass::GetPointer(id); }
-
-  // Description:
-  // This method lets the user specify data to be held by the array.  The
-  // array argument is a pointer to the data.  size is the size of
-  // the array supplied by the user.  Set save to 1 to keep the class
-  // from deleting the array when it cleans up or reallocates memory.
-  // The class uses the actual array provided; it does not copy the data
-  // from the suppled array.
-  void SetArray(unsigned long* array, vtkIdType size, int save)
-    { this->RealSuperclass::SetArray(array, size, save); }
-  void SetArray(unsigned long* array, vtkIdType size, int save, int deleteMethod)
-    { this->RealSuperclass::SetArray(array, size, save, deleteMethod); }
-
 protected:
-  vtkUnsignedLongArray(vtkIdType numComp=1);
+  vtkUnsignedLongArray();
   ~vtkUnsignedLongArray();
 
 private:
diff --git a/Common/Core/vtkUnsignedLongLongArray.cxx b/Common/Core/vtkUnsignedLongLongArray.cxx
index 5d98a04..32559a9 100644
--- a/Common/Core/vtkUnsignedLongLongArray.cxx
+++ b/Common/Core/vtkUnsignedLongLongArray.cxx
@@ -28,8 +28,7 @@ VTK_ARRAY_ITERATOR_TEMPLATE_INSTANTIATE(unsigned long long);
 vtkStandardNewMacro(vtkUnsignedLongLongArray);
 
 //----------------------------------------------------------------------------
-vtkUnsignedLongLongArray::vtkUnsignedLongLongArray(vtkIdType numComp):
-  RealSuperclass(numComp)
+vtkUnsignedLongLongArray::vtkUnsignedLongLongArray()
 {
 }
 
diff --git a/Common/Core/vtkUnsignedLongLongArray.h b/Common/Core/vtkUnsignedLongLongArray.h
index 86a2462..3a5382c 100644
--- a/Common/Core/vtkUnsignedLongLongArray.h
+++ b/Common/Core/vtkUnsignedLongLongArray.h
@@ -31,88 +31,25 @@
 #include "vtkDataArrayTemplate.h" // Real Superclass
 
 // Fake the superclass for the wrappers.
+#ifndef __WRAP__
 #define vtkDataArray vtkDataArrayTemplate<unsigned long long>
+#endif
 class VTKCOMMONCORE_EXPORT vtkUnsignedLongLongArray : public vtkDataArray
+#ifndef __WRAP__
 #undef vtkDataArray
+#endif
 {
 public:
   static vtkUnsignedLongLongArray* New();
   vtkTypeMacro(vtkUnsignedLongLongArray,vtkDataArray);
   void PrintSelf(ostream& os, vtkIndent indent);
 
-  // Description:
-  // Get the data type.
-  int GetDataType()
-    { return VTK_UNSIGNED_LONG_LONG; }
-
-  // Description:
-  // Copy the tuple value into a user-provided array.
-  void GetTupleValue(vtkIdType i, unsigned long long* tuple)
-    { this->RealSuperclass::GetTupleValue(i, tuple); }
-
-  // Description:
-  // Set the tuple value at the ith location in the array.
-  void SetTupleValue(vtkIdType i, const unsigned long long* tuple)
-    { this->RealSuperclass::SetTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple into the ith location
-  // in the array.
-  void InsertTupleValue(vtkIdType i, const unsigned long long* tuple)
-    { this->RealSuperclass::InsertTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple onto the end of the array.
-  vtkIdType InsertNextTupleValue(const unsigned long long* tuple)
-    { return this->RealSuperclass::InsertNextTupleValue(tuple); }
-
-  // Description:
-  // Get the data at a particular index.
-  unsigned long long GetValue(vtkIdType id)
-    { return this->RealSuperclass::GetValue(id); }
-
-  // Description:
-  // Set the data at a particular index. Does not do range checking. Make sure
-  // you use the method SetNumberOfValues() before inserting data.
-  void SetValue(vtkIdType id, unsigned long long value)
-    { this->RealSuperclass::SetValue(id, value); }
-
-  // Description:
-  // Specify the number of values for this object to hold. Does an
-  // allocation as well as setting the MaxId ivar. Used in conjunction with
-  // SetValue() method for fast insertion.
-  void SetNumberOfValues(vtkIdType number)
-    { this->RealSuperclass::SetNumberOfValues(number); }
-
-  // Description:
-  // Insert data at a specified position in the array.
-  void InsertValue(vtkIdType id, unsigned long long f)
-    { this->RealSuperclass::InsertValue(id, f); }
-
-  // Description:
-  // Insert data at the end of the array. Return its location in the array.
-  vtkIdType InsertNextValue(unsigned long long f)
-    { return this->RealSuperclass::InsertNextValue(f); }
-
-  // Description:
-  // Get the range of array values for the given component in the
-  // native data type.
-  unsigned long long *GetValueRange(int comp)
-    { return this->RealSuperclass::GetValueRange(comp); }
-//BTX
-  void GetValueRange(unsigned long long range[2], int comp)
-    { this->RealSuperclass::GetValueRange(range, comp); }
-//ETX
-
-  // Description:
-  // Get the range of array values for the 0th component in the
-  // native data type.
-  unsigned long long *GetValueRange()
-    { return this->RealSuperclass::GetValueRange(0); }
-//BTX
-  void GetValueRange(unsigned long long range[2])
-    { this->RealSuperclass::GetValueRange(range, 0); }
-//ETX
+  // This macro expands to the set of method declarations that
+  // make up the interface of vtkDataArrayTemplate, which is ignored
+  // by the wrappers.
+#ifdef __WRAP__
+  vtkCreateWrappedArrayInterface(unsigned long long);
+#endif
 
   // Description:
   // Get the minimum data value in its native type.
@@ -122,33 +59,8 @@ public:
   // Get the maximum data value in its native type.
   static unsigned long long GetDataTypeValueMax() {return VTK_UNSIGNED_LONG_LONG_MAX;}
 
-  // Description:
-  // Get the address of a particular data index. Make sure data is allocated
-  // for the number of items requested. Set MaxId according to the number of
-  // data values requested.
-  unsigned long long* WritePointer(vtkIdType id, vtkIdType number)
-    { return this->RealSuperclass::WritePointer(id, number); }
-
-  // Description:
-  // Get the address of a particular data index. Performs no checks
-  // to verify that the memory has been allocated etc.
-  unsigned long long* GetPointer(vtkIdType id)
-    { return this->RealSuperclass::GetPointer(id); }
-
-  // Description:
-  // This method lets the user specify data to be held by the array.  The
-  // array argument is a pointer to the data.  size is the size of
-  // the array supplied by the user.  Set save to 1 to keep the class
-  // from deleting the array when it cleans up or reallocates memory.
-  // The class uses the actual array provided; it does not copy the data
-  // from the suppled array.
-  void SetArray(unsigned long long* array, vtkIdType size, int save)
-    { this->RealSuperclass::SetArray(array, size, save); }
-  void SetArray(unsigned long long* array, vtkIdType size, int save, int deleteMethod)
-    { this->RealSuperclass::SetArray(array, size, save, deleteMethod); }
-
 protected:
-  vtkUnsignedLongLongArray(vtkIdType numComp=1);
+  vtkUnsignedLongLongArray();
   ~vtkUnsignedLongLongArray();
 
 private:
diff --git a/Common/Core/vtkUnsignedShortArray.cxx b/Common/Core/vtkUnsignedShortArray.cxx
index 02e783c..21aefa0 100644
--- a/Common/Core/vtkUnsignedShortArray.cxx
+++ b/Common/Core/vtkUnsignedShortArray.cxx
@@ -27,7 +27,7 @@ VTK_ARRAY_ITERATOR_TEMPLATE_INSTANTIATE(unsigned short);
 vtkStandardNewMacro(vtkUnsignedShortArray);
 
 //----------------------------------------------------------------------------
-vtkUnsignedShortArray::vtkUnsignedShortArray(vtkIdType numComp): RealSuperclass(numComp)
+vtkUnsignedShortArray::vtkUnsignedShortArray()
 {
 }
 
diff --git a/Common/Core/vtkUnsignedShortArray.h b/Common/Core/vtkUnsignedShortArray.h
index 733667f..d8c629c 100644
--- a/Common/Core/vtkUnsignedShortArray.h
+++ b/Common/Core/vtkUnsignedShortArray.h
@@ -31,88 +31,25 @@
 #include "vtkDataArrayTemplate.h" // Real Superclass
 
 // Fake the superclass for the wrappers.
+#ifndef __WRAP__
 #define vtkDataArray vtkDataArrayTemplate<unsigned short>
+#endif
 class VTKCOMMONCORE_EXPORT vtkUnsignedShortArray : public vtkDataArray
+#ifndef __WRAP__
 #undef vtkDataArray
+#endif
 {
 public:
   static vtkUnsignedShortArray* New();
   vtkTypeMacro(vtkUnsignedShortArray,vtkDataArray);
   void PrintSelf(ostream& os, vtkIndent indent);
 
-  // Description:
-  // Get the data type.
-  int GetDataType()
-    { return VTK_UNSIGNED_SHORT; }
-
-  // Description:
-  // Copy the tuple value into a user-provided array.
-  void GetTupleValue(vtkIdType i, unsigned short* tuple)
-    { this->RealSuperclass::GetTupleValue(i, tuple); }
-
-  // Description:
-  // Set the tuple value at the ith location in the array.
-  void SetTupleValue(vtkIdType i, const unsigned short* tuple)
-    { this->RealSuperclass::SetTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple into the ith location
-  // in the array.
-  void InsertTupleValue(vtkIdType i, const unsigned short* tuple)
-    { this->RealSuperclass::InsertTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple onto the end of the array.
-  vtkIdType InsertNextTupleValue(const unsigned short* tuple)
-    { return this->RealSuperclass::InsertNextTupleValue(tuple); }
-
-  // Description:
-  // Get the data at a particular index.
-  unsigned short GetValue(vtkIdType id)
-    { return this->RealSuperclass::GetValue(id); }
-
-  // Description:
-  // Set the data at a particular index. Does not do range checking. Make sure
-  // you use the method SetNumberOfValues() before inserting data.
-  void SetValue(vtkIdType id, unsigned short value)
-    { this->RealSuperclass::SetValue(id, value); }
-
-  // Description:
-  // Specify the number of values for this object to hold. Does an
-  // allocation as well as setting the MaxId ivar. Used in conjunction with
-  // SetValue() method for fast insertion.
-  void SetNumberOfValues(vtkIdType number)
-    { this->RealSuperclass::SetNumberOfValues(number); }
-
-  // Description:
-  // Insert data at a specified position in the array.
-  void InsertValue(vtkIdType id, unsigned short f)
-    { this->RealSuperclass::InsertValue(id, f); }
-
-  // Description:
-  // Insert data at the end of the array. Return its location in the array.
-  vtkIdType InsertNextValue(unsigned short f)
-    { return this->RealSuperclass::InsertNextValue(f); }
-
-  // Description:
-  // Get the range of array values for the given component in the
-  // native data type.
-  unsigned short *GetValueRange(int comp)
-    { return this->RealSuperclass::GetValueRange(comp); }
-//BTX
-  void GetValueRange(unsigned short range[2], int comp)
-    { this->RealSuperclass::GetValueRange(range, comp); }
-//ETX
-
-  // Description:
-  // Get the range of array values for the 0th component in the
-  // native data type.
-  unsigned short *GetValueRange()
-    { return this->RealSuperclass::GetValueRange(0); }
-//BTX
-  void GetValueRange(unsigned short range[2])
-    { this->RealSuperclass::GetValueRange(range, 0); }
-//ETX
+  // This macro expands to the set of method declarations that
+  // make up the interface of vtkDataArrayTemplate, which is ignored
+  // by the wrappers.
+#ifdef __WRAP__
+  vtkCreateWrappedArrayInterface(unsigned short);
+#endif
 
   // Description:
   // Get the minimum data value in its native type.
@@ -122,33 +59,8 @@ public:
   // Get the maximum data value in its native type.
   static unsigned short GetDataTypeValueMax() { return VTK_UNSIGNED_SHORT_MAX; }
 
-  // Description:
-  // Get the address of a particular data index. Make sure data is allocated
-  // for the number of items requested. Set MaxId according to the number of
-  // data values requested.
-  unsigned short* WritePointer(vtkIdType id, vtkIdType number)
-    { return this->RealSuperclass::WritePointer(id, number); }
-
-  // Description:
-  // Get the address of a particular data index. Performs no checks
-  // to verify that the memory has been allocated etc.
-  unsigned short* GetPointer(vtkIdType id)
-    { return this->RealSuperclass::GetPointer(id); }
-
-  // Description:
-  // This method lets the user specify data to be held by the array.  The
-  // array argument is a pointer to the data.  size is the size of
-  // the array supplied by the user.  Set save to 1 to keep the class
-  // from deleting the array when it cleans up or reallocates memory.
-  // The class uses the actual array provided; it does not copy the data
-  // from the suppled array.
-  void SetArray(unsigned short* array, vtkIdType size, int save)
-    { this->RealSuperclass::SetArray(array, size, save); }
-  void SetArray(unsigned short* array, vtkIdType size, int save, int deleteMethod)
-    { this->RealSuperclass::SetArray(array, size, save, deleteMethod); }
-
 protected:
-  vtkUnsignedShortArray(vtkIdType numComp=1);
+  vtkUnsignedShortArray();
   ~vtkUnsignedShortArray();
 
 private:
diff --git a/Common/Core/vtkUnsigned__Int64Array.h b/Common/Core/vtkUnsigned__Int64Array.h
index 5231b59..99dce63 100644
--- a/Common/Core/vtkUnsigned__Int64Array.h
+++ b/Common/Core/vtkUnsigned__Int64Array.h
@@ -31,88 +31,25 @@
 #include "vtkDataArrayTemplate.h" // Real Superclass
 
 // Fake the superclass for the wrappers.
+#ifndef __WRAP__
 #define vtkDataArray vtkDataArrayTemplate<unsigned __int64>
+#endif
 class VTKCOMMONCORE_EXPORT vtkUnsigned__Int64Array : public vtkDataArray
+#ifndef __WRAP__
 #undef vtkDataArray
+#endif
 {
 public:
   static vtkUnsigned__Int64Array* New();
   vtkTypeMacro(vtkUnsigned__Int64Array,vtkDataArray);
   void PrintSelf(ostream& os, vtkIndent indent);
 
-  // Description:
-  // Get the data type.
-  int GetDataType()
-    { return VTK_UNSIGNED___INT64; }
-
-  // Description:
-  // Copy the tuple value into a user-provided array.
-  void GetTupleValue(vtkIdType i, unsigned __int64* tuple)
-    { this->RealSuperclass::GetTupleValue(i, tuple); }
-
-  // Description:
-  // Set the tuple value at the ith location in the array.
-  void SetTupleValue(vtkIdType i, const unsigned __int64* tuple)
-    { this->RealSuperclass::SetTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple into the ith location
-  // in the array.
-  void InsertTupleValue(vtkIdType i, const unsigned __int64* tuple)
-    { this->RealSuperclass::InsertTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple onto the end of the array.
-  vtkIdType InsertNextTupleValue(const unsigned __int64* tuple)
-    { return this->RealSuperclass::InsertNextTupleValue(tuple); }
-
-  // Description:
-  // Get the data at a particular index.
-  unsigned __int64 GetValue(vtkIdType id)
-    { return this->RealSuperclass::GetValue(id); }
-
-  // Description:
-  // Set the data at a particular index. Does not do range checking. Make sure
-  // you use the method SetNumberOfValues() before inserting data.
-  void SetValue(vtkIdType id, unsigned __int64 value)
-    { this->RealSuperclass::SetValue(id, value); }
-
-  // Description:
-  // Specify the number of values for this object to hold. Does an
-  // allocation as well as setting the MaxId ivar. Used in conjunction with
-  // SetValue() method for fast insertion.
-  void SetNumberOfValues(vtkIdType number)
-    { this->RealSuperclass::SetNumberOfValues(number); }
-
-  // Description:
-  // Insert data at a specified position in the array.
-  void InsertValue(vtkIdType id, unsigned __int64 f)
-    { this->RealSuperclass::InsertValue(id, f); }
-
-  // Description:
-  // Insert data at the end of the array. Return its location in the array.
-  vtkIdType InsertNextValue(unsigned __int64 f)
-    { return this->RealSuperclass::InsertNextValue(f); }
-
-  // Description:
-  // Get the range of array values for the given component in the
-  // native data type.
-  unsigned __int64 *GetValueRange(int comp)
-    { return this->RealSuperclass::GetValueRange(comp); }
-//BTX
-  void GetValueRange(unsigned __int64 range[2], int comp)
-    { this->RealSuperclass::GetValueRange(range, comp); }
-//ETX
-
-  // Description:
-  // Get the range of array values for the 0th component in the
-  // native data type.
-  unsigned __int64 *GetValueRange()
-    { return this->RealSuperclass::GetValueRange(0); }
-//BTX
-  void GetValueRange(unsigned __int64 range[2])
-    { this->RealSuperclass::GetValueRange(range, 0); }
-//ETX
+  // This macro expands to the set of method declarations that
+  // make up the interface of vtkDataArrayTemplate, which is ignored
+  // by the wrappers.
+#ifdef __WRAP__
+  vtkCreateWrappedArrayInterface(unsigned __int64);
+#endif
 
   // Description:
   // Get the minimum data value in its native type.
@@ -122,31 +59,6 @@ public:
   // Get the maximum data value in its native type.
   static unsigned __int64 GetDataTypeValueMax() { return VTK_UNSIGNED___INT64_MAX; }
 
-  // Description:
-  // Get the address of a particular data index. Make sure data is allocated
-  // for the number of items requested. Set MaxId according to the number of
-  // data values requested.
-  unsigned __int64* WritePointer(vtkIdType id, vtkIdType number)
-    { return this->RealSuperclass::WritePointer(id, number); }
-
-  // Description:
-  // Get the address of a particular data index. Performs no checks
-  // to verify that the memory has been allocated etc.
-  unsigned __int64* GetPointer(vtkIdType id)
-    { return this->RealSuperclass::GetPointer(id); }
-
-  // Description:
-  // This method lets the user specify data to be held by the array.  The
-  // array argument is a pointer to the data.  size is the size of
-  // the array supplied by the user.  Set save to 1 to keep the class
-  // from deleting the array when it cleans up or reallocates memory.
-  // The class uses the actual array provided; it does not copy the data
-  // from the suppled array.
-  void SetArray(unsigned __int64* array, vtkIdType size, int save)
-    { this->RealSuperclass::SetArray(array, size, save); }
-  void SetArray(unsigned __int64* array, vtkIdType size, int save, int deleteMethod)
-    { this->RealSuperclass::SetArray(array, size, save, deleteMethod); }
-
 protected:
   vtkUnsigned__Int64Array(vtkIdType numComp=1);
   ~vtkUnsigned__Int64Array();
diff --git a/Common/Core/vtkVariantArray.cxx b/Common/Core/vtkVariantArray.cxx
index 3f707fb..bac93e5 100644
--- a/Common/Core/vtkVariantArray.cxx
+++ b/Common/Core/vtkVariantArray.cxx
@@ -92,8 +92,7 @@ void vtkVariantArray::PrintSelf(ostream& os, vtkIndent indent)
 }
 
 //----------------------------------------------------------------------------
-vtkVariantArray::vtkVariantArray(vtkIdType numComp) :
-  vtkAbstractArray( numComp )
+vtkVariantArray::vtkVariantArray()
 {
   this->Array = NULL;
   this->SaveUserArray = 0;
@@ -107,10 +106,7 @@ vtkVariantArray::~vtkVariantArray()
     {
     delete [] this->Array;
     }
-  if (this->Lookup)
-    {
-    delete this->Lookup;
-    }
+  delete this->Lookup;
 }
 
 //
@@ -271,6 +267,70 @@ void vtkVariantArray::InsertTuple(vtkIdType i, vtkIdType j, vtkAbstractArray* so
 }
 
 //----------------------------------------------------------------------------
+void vtkVariantArray::InsertTuples(vtkIdList *dstIds, vtkIdList *srcIds,
+                                   vtkAbstractArray *source)
+{
+
+  if (this->NumberOfComponents != source->GetNumberOfComponents())
+    {
+    vtkWarningMacro("Input and output component sizes do not match.");
+    return;
+    }
+
+  vtkIdType numIds = dstIds->GetNumberOfIds();
+  if (srcIds->GetNumberOfIds() != numIds)
+    {
+    vtkWarningMacro("Input and output id array sizes do not match.");
+    return;
+    }
+
+  if (vtkVariantArray* va = vtkVariantArray::SafeDownCast(source))
+    {
+    for (vtkIdType idIndex = 0; idIndex < numIds; ++idIndex)
+      {
+      vtkIdType numComp = this->NumberOfComponents;
+      vtkIdType srcLoc = srcIds->GetId(idIndex) * this->NumberOfComponents;
+      vtkIdType dstLoc = dstIds->GetId(idIndex) * this->NumberOfComponents;
+      while (numComp-- > 0)
+        {
+        this->InsertValue(dstLoc++, va->GetValue(srcLoc++));
+        }
+      }
+    }
+  else if (vtkDataArray *da = vtkDataArray::FastDownCast(source))
+    {
+    for (vtkIdType idIndex = 0; idIndex < numIds; ++idIndex)
+      {
+      vtkIdType numComp = this->NumberOfComponents;
+      vtkIdType srcLoc = srcIds->GetId(idIndex) * this->NumberOfComponents;
+      vtkIdType dstLoc = dstIds->GetId(idIndex) * this->NumberOfComponents;
+      while (numComp-- > 0)
+        {
+        this->InsertValue(dstLoc++, da->GetVariantValue(srcLoc++));
+        }
+      }
+    }
+  else if (vtkStringArray* sa = vtkStringArray::SafeDownCast(source))
+    {
+    for (vtkIdType idIndex = 0; idIndex < numIds; ++idIndex)
+      {
+      vtkIdType numComp = this->NumberOfComponents;
+      vtkIdType srcLoc = srcIds->GetId(idIndex) * this->NumberOfComponents;
+      vtkIdType dstLoc = dstIds->GetId(idIndex) * this->NumberOfComponents;
+      while (numComp-- > 0)
+        {
+        this->InsertValue(dstLoc++, sa->GetVariantValue(srcLoc++));
+        }
+      }
+    }
+  else
+    {
+    vtkWarningMacro("Unrecognized type is incompatible with vtkVariantArray.");
+    }
+  this->DataChanged();
+}
+
+//----------------------------------------------------------------------------
 vtkIdType vtkVariantArray::InsertNextTuple(vtkIdType j, vtkAbstractArray* source)
 {
   if (source->IsA("vtkVariantArray"))
diff --git a/Common/Core/vtkVariantArray.h b/Common/Core/vtkVariantArray.h
index e7f16e1..57315db 100644
--- a/Common/Core/vtkVariantArray.h
+++ b/Common/Core/vtkVariantArray.h
@@ -102,6 +102,13 @@ public:
   virtual void InsertTuple(vtkIdType i, vtkIdType j, vtkAbstractArray* source);
 
   // Description:
+  // Copy the tuples indexed in srcIds from the source array to the tuple
+  // locations indexed by dstIds in this array.
+  // Note that memory allocation is performed as necessary to hold the data.
+  virtual void InsertTuples(vtkIdList *dstIds, vtkIdList *srcIds,
+                            vtkAbstractArray *source);
+
+  // Description:
   // Insert the jth tuple in the source array, at the end in this array.
   // Note that memory allocation is performed as necessary to hold the data.
   // Returns the location at which the data was inserted.
@@ -259,7 +266,7 @@ public:
 
 protected:
   // Construct object with default tuple dimension (number of components) of 1.
-  vtkVariantArray(vtkIdType numComp=1);
+  vtkVariantArray();
 
   // Pointer to data
   //BTX
diff --git a/Common/Core/vtkVersion.h b/Common/Core/vtkVersion.h
index 52db22a..bca226a 100644
--- a/Common/Core/vtkVersion.h
+++ b/Common/Core/vtkVersion.h
@@ -49,8 +49,8 @@ public:
   static const char *GetVTKSourceVersion() { return VTK_SOURCE_VERSION; }
 
 protected:
-  vtkVersion() {}; //insure constructor/destructor protected
-  ~vtkVersion() {};
+  vtkVersion() {} //insure constructor/destructor protected
+  ~vtkVersion() {}
 private:
   vtkVersion(const vtkVersion&);  // Not implemented.
   void operator=(const vtkVersion&);  // Not implemented.
diff --git a/Common/Core/vtkVoidArray.cxx b/Common/Core/vtkVoidArray.cxx
index 7e8909d..99b8e57 100644
--- a/Common/Core/vtkVoidArray.cxx
+++ b/Common/Core/vtkVoidArray.cxx
@@ -27,10 +27,7 @@ vtkVoidArray::vtkVoidArray()
 
 vtkVoidArray::~vtkVoidArray()
 {
-  if (this->Array)
-    {
-    delete [] this->Array;
-    }
+  delete [] this->Array;
 }
 
 // Allocate memory for this array. Delete old storage only if necessary.
@@ -55,11 +52,8 @@ int vtkVoidArray::Allocate(vtkIdType sz, vtkIdType vtkNotUsed(ext))
 // Release storage and reset array to initial state.
 void vtkVoidArray::Initialize()
 {
-  if ( this->Array != NULL )
-    {
-    delete [] this->Array;
-    this->Array = NULL;
-    }
+  delete [] this->Array;
+  this->Array = NULL;
   this->Size = 0;
   this->NumberOfPointers = 0;
 }
diff --git a/Common/Core/vtkWeakPointer.h b/Common/Core/vtkWeakPointer.h
index f35e836..0f17aef 100644
--- a/Common/Core/vtkWeakPointer.h
+++ b/Common/Core/vtkWeakPointer.h
@@ -80,6 +80,10 @@ public:
     {
     return static_cast<T*>(this->Object);
     }
+  T* Get() const
+    {
+    return static_cast<T*>(this->Object);
+    }
 
   // Description:
   // Get the contained pointer.
diff --git a/Common/Core/vtkWeakPointerBase.h b/Common/Core/vtkWeakPointerBase.h
index 101cdf2..71fcb1d 100644
--- a/Common/Core/vtkWeakPointerBase.h
+++ b/Common/Core/vtkWeakPointerBase.h
@@ -32,7 +32,7 @@ class VTKCOMMONCORE_EXPORT vtkWeakPointerBase
 public:
   // Description:
   // Initialize smart pointer to NULL.
-  vtkWeakPointerBase() : Object(0) {};
+  vtkWeakPointerBase() : Object(0) {}
 
   // Description:
   // Initialize smart pointer to given object.
diff --git a/Common/Core/vtkWindow.h b/Common/Core/vtkWindow.h
index e4d7f66..395e29a 100644
--- a/Common/Core/vtkWindow.h
+++ b/Common/Core/vtkWindow.h
@@ -133,7 +133,7 @@ public:
   // Description:
   // Make the window current. May be overridden in subclasses to do
   // for example a glXMakeCurrent or a wglMakeCurrent.
-  virtual void MakeCurrent() {};
+  virtual void MakeCurrent() {}
 
   // Description:
   // These methods are used by vtkWindowToImageFilter to tell a VTK window
diff --git a/Common/Core/vtkXMLFileOutputWindow.h b/Common/Core/vtkXMLFileOutputWindow.h
index b65b1c4..09acdec 100644
--- a/Common/Core/vtkXMLFileOutputWindow.h
+++ b/Common/Core/vtkXMLFileOutputWindow.h
@@ -61,8 +61,8 @@ public:
   virtual void DisplayTag(const char*);
 
 protected:
-  vtkXMLFileOutputWindow() {};
-  virtual ~vtkXMLFileOutputWindow() {};
+  vtkXMLFileOutputWindow() {}
+  virtual ~vtkXMLFileOutputWindow() {}
 
   void Initialize();
   virtual void DisplayXML(const char*, const char*);
diff --git a/Common/Core/vtk__Int64Array.h b/Common/Core/vtk__Int64Array.h
index 1cb6286..ce10676 100644
--- a/Common/Core/vtk__Int64Array.h
+++ b/Common/Core/vtk__Int64Array.h
@@ -31,88 +31,25 @@
 #include "vtkDataArrayTemplate.h" // Real Superclass
 
 // Fake the superclass for the wrappers.
+#ifndef __WRAP__
 #define vtkDataArray vtkDataArrayTemplate<__int64>
+#endif
 class VTKCOMMONCORE_EXPORT vtk__Int64Array : public vtkDataArray
+#ifndef __WRAP__
 #undef vtkDataArray
+#endif
 {
 public:
   static vtk__Int64Array* New();
   vtkTypeMacro(vtk__Int64Array,vtkDataArray);
   void PrintSelf(ostream& os, vtkIndent indent);
 
-  // Description:
-  // Get the data type.
-  int GetDataType()
-    { return VTK___INT64; }
-
-  // Description:
-  // Copy the tuple value into a user-provided array.
-  void GetTupleValue(vtkIdType i, __int64* tuple)
-    { this->RealSuperclass::GetTupleValue(i, tuple); }
-
-  // Description:
-  // Set the tuple value at the ith location in the array.
-  void SetTupleValue(vtkIdType i, const __int64* tuple)
-    { this->RealSuperclass::SetTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple into the ith location
-  // in the array.
-  void InsertTupleValue(vtkIdType i, const __int64* tuple)
-    { this->RealSuperclass::InsertTupleValue(i, tuple); }
-
-  // Description:
-  // Insert (memory allocation performed) the tuple onto the end of the array.
-  vtkIdType InsertNextTupleValue(const __int64* tuple)
-    { return this->RealSuperclass::InsertNextTupleValue(tuple); }
-
-  // Description:
-  // Get the data at a particular index.
-  __int64 GetValue(vtkIdType id)
-    { return this->RealSuperclass::GetValue(id); }
-
-  // Description:
-  // Set the data at a particular index. Does not do range checking. Make sure
-  // you use the method SetNumberOfValues() before inserting data.
-  void SetValue(vtkIdType id, __int64 value)
-    { this->RealSuperclass::SetValue(id, value); }
-
-  // Description:
-  // Specify the number of values for this object to hold. Does an
-  // allocation as well as setting the MaxId ivar. Used in conjunction with
-  // SetValue() method for fast insertion.
-  void SetNumberOfValues(vtkIdType number)
-    { this->RealSuperclass::SetNumberOfValues(number); }
-
-  // Description:
-  // Insert data at a specified position in the array.
-  void InsertValue(vtkIdType id, __int64 f)
-    { this->RealSuperclass::InsertValue(id, f); }
-
-  // Description:
-  // Insert data at the end of the array. Return its location in the array.
-  vtkIdType InsertNextValue(__int64 f)
-    { return this->RealSuperclass::InsertNextValue(f); }
-
-  // Description:
-  // Get the range of array values for the given component in the
-  // native data type.
-  __int64 *GetValueRange(int comp)
-    { return this->RealSuperclass::GetValueRange(comp); }
-//BTX
-  void GetValueRange(__int64 range[2], int comp)
-    { this->RealSuperclass::GetValueRange(range, comp); }
-//ETX
-
-  // Description:
-  // Get the range of array values for the 0th component in the
-  // native data type.
-  __int64 *GetValueRange()
-    { return this->RealSuperclass::GetValueRange(0); }
-//BTX
-  void GetValueRange(__int64 range[2])
-    { this->RealSuperclass::GetValueRange(range, 0); }
-//ETX
+  // This macro expands to the set of method declarations that
+  // make up the interface of vtkDataArrayTemplate, which is ignored
+  // by the wrappers.
+#ifdef __WRAP__
+  vtkCreateWrappedArrayInterface(__int64);
+#endif
 
   // Description:
   // Get the minimum data value in its native type.
@@ -122,31 +59,6 @@ public:
   // Get the maximum data value in its native type.
   static __int64 GetDataTypeValueMax() { return VTK___INT64_MAX; }
 
-  // Description:
-  // Get the address of a particular data index. Make sure data is allocated
-  // for the number of items requested. Set MaxId according to the number of
-  // data values requested.
-  __int64* WritePointer(vtkIdType id, vtkIdType number)
-    { return this->RealSuperclass::WritePointer(id, number); }
-
-  // Description:
-  // Get the address of a particular data index. Performs no checks
-  // to verify that the memory has been allocated etc.
-  __int64* GetPointer(vtkIdType id)
-    { return this->RealSuperclass::GetPointer(id); }
-
-  // Description:
-  // This method lets the user specify data to be held by the array.  The
-  // array argument is a pointer to the data.  size is the size of
-  // the array supplied by the user.  Set save to 1 to keep the class
-  // from deleting the array when it cleans up or reallocates memory.
-  // The class uses the actual array provided; it does not copy the data
-  // from the suppled array.
-  void SetArray(__int64* array, vtkIdType size, int save)
-    { this->RealSuperclass::SetArray(array, size, save); }
-  void SetArray(__int64* array, vtkIdType size, int save, int deleteMethod)
-    { this->RealSuperclass::SetArray(array, size, save, deleteMethod); }
-
 protected:
   vtk__Int64Array(vtkIdType numComp=1);
   ~vtk__Int64Array();
diff --git a/Common/DataModel/CMakeLists.txt b/Common/DataModel/CMakeLists.txt
index a27e39e..05ee164 100644
--- a/Common/DataModel/CMakeLists.txt
+++ b/Common/DataModel/CMakeLists.txt
@@ -19,6 +19,7 @@ set(Module_SRCS
   vtkCellArray.cxx
   vtkCell.cxx
   vtkCellData.cxx
+  vtkCellIterator.cxx
   vtkCellLinks.cxx
   vtkCellLocator.cxx
   vtkCellTypes.cxx
@@ -28,6 +29,7 @@ set(Module_SRCS
   vtkConvexPointSet.cxx
   vtkCubicLine.cxx
   vtkCylinder.cxx
+  vtkDataSetCellIterator.cxx
   vtkDataObjectCollection.cxx
   vtkDataObject.cxx
   vtkDataObjectTypes.cxx
@@ -91,6 +93,8 @@ set(Module_SRCS
   vtkKdTreePointLocator.cxx
   vtkLine.cxx
   vtkLocator.cxx
+  vtkMappedUnstructuredGrid.txx
+  vtkMappedUnstructuredGridCellIterator.txx
   vtkMarchingSquaresLineCases.cxx
   vtkMarchingCubesTriangleCases.cxx
   vtkMeanValueCoordinatesInterpolator.cxx
@@ -110,6 +114,7 @@ set(Module_SRCS
   vtkPerlinNoise.cxx
   vtkPiecewiseFunction.cxx
   vtkPixel.cxx
+  vtkPixelExtent.cxx
   vtkPlaneCollection.cxx
   vtkPlane.cxx
   vtkPlanes.cxx
@@ -117,6 +122,7 @@ set(Module_SRCS
   vtkPointData.cxx
   vtkPointLocator.cxx
   vtkPointSet.cxx
+  vtkPointSetCellIterator.cxx
   vtkPointsProjectedHull.cxx
   vtkPolyDataCollection.cxx
   vtkPolyData.cxx
@@ -131,6 +137,7 @@ set(Module_SRCS
   vtkQuadraticHexahedron.cxx
   vtkQuadraticLinearQuad.cxx
   vtkQuadraticLinearWedge.cxx
+  vtkQuadraticPolygon.cxx
   vtkQuadraticPyramid.cxx
   vtkQuadraticQuad.cxx
   vtkQuadraticTetra.cxx
@@ -166,6 +173,8 @@ set(Module_SRCS
   vtkUndirectedGraph.cxx
   vtkUniformGrid.cxx
   vtkUnstructuredGrid.cxx
+  vtkUnstructuredGridBase.cxx
+  vtkUnstructuredGridCellIterator.cxx
   vtkVertex.cxx
   vtkVertexListIterator.cxx
   vtkVoxel.cxx
@@ -201,6 +210,8 @@ set(Module_SRCS
 
 set(${vtk-module}_HDRS
   vtkCellType.h
+  vtkMappedUnstructuredGrid.h
+  vtkMappedUnstructuredGridCellIterator.h
   )
 
 set_source_files_properties(
@@ -208,6 +219,7 @@ set_source_files_properties(
   vtkAbstractPointLocator
   vtkCell
   vtkCell3D
+  vtkCellIterator
   vtkCompositeDataIterator
   vtkCompositeDataSet
   vtkDataObjectTree
@@ -237,6 +249,7 @@ set_source_files_properties(
   vtkSpline
   vtkStructuredData
   vtkTreeIterator
+  vtkUnstructuredGridBase
   ABSTRACT
   )
 
@@ -250,10 +263,13 @@ set_source_files_properties(
   vtkDispatcher_Private
   vtkDispatcher
   vtkDoubleDispatcher
+  vtkMappedUnstructuredGrid.txx
+  vtkMappedUnstructuredGridCellIterator.txx
   vtkMarchingSquaresLineCases
   vtkMarchingCubesTriangleCases
   vtkImageIterator
   vtkImageProgressIterator
+  vtkPixelExtent.cxx
   vtkVector
   vtkColor
   vtkRect
diff --git a/Common/DataModel/Testing/Cxx/CMakeLists.txt b/Common/DataModel/Testing/Cxx/CMakeLists.txt
index 0b366d9..0488716 100644
--- a/Common/DataModel/Testing/Cxx/CMakeLists.txt
+++ b/Common/DataModel/Testing/Cxx/CMakeLists.txt
@@ -1,4 +1,4 @@
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
+vtk_add_test_cxx(NO_DATA NO_VALID NO_OUTPUT
   TestColor.cxx
   TestVector.cxx
   TestVectorOperators.cxx
@@ -16,11 +16,13 @@ create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
   TestInterpolationDerivs.cxx
   TestInterpolationFunctions.cxx
   TestPath.cxx
+  TestPixelExtent.cxx
   TestPointLocators.cxx
   TestPolyDataRemoveCell.cxx
   TestPolygon.cxx
   TestPolyhedron0.cxx
   TestPolyhedron1.cxx
+  TestQuadraticPolygon.cxx
   TestSelectionSubtract.cxx
   TestTreeBFSIterator.cxx
   TestTreeDFSIterator.cxx
@@ -40,17 +42,14 @@ create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
   TestPlane.cxx
   TestStructuredData.cxx
   TestDataObjectTypes.cxx
-  EXTRA_INCLUDE vtkTestDriver.h
-)
+  )
 
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
+vtk_add_test_cxx(NO_VALID NO_OUTPUT
+  TestCellIterators.cxx
+  )
 
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
+vtk_add_test_cxx(
+  TestQuadraticPolygonFilters.cxx
+  )
 
-# Add all the executables
-foreach (test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  add_test(NAME ${vtk-module}Cxx-${TName}
-    COMMAND ${vtk-module}CxxTests ${TName})
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Common/DataModel/Testing/Cxx/TestAMRBox.cxx b/Common/DataModel/Testing/Cxx/TestAMRBox.cxx
index c3c01c7..1aebc4f 100644
--- a/Common/DataModel/Testing/Cxx/TestAMRBox.cxx
+++ b/Common/DataModel/Testing/Cxx/TestAMRBox.cxx
@@ -351,10 +351,7 @@ int TestAMRBoxSerialization()
     rc++;
     }
 
-  if( buffer != NULL )
-    {
-    delete [] buffer;
-    }
+  delete [] buffer;
 
   return( rc );
 }
@@ -376,7 +373,7 @@ void CheckTestStatus( int rc, std::string TestName )
     }
 }
 
-#include "assert.h"
+#include <cassert>
 //------------------------------------------------------------------------------
 int TestAMRBox(int , char *[])
 {
diff --git a/Common/DataModel/Testing/Cxx/TestCellIterators.cxx b/Common/DataModel/Testing/Cxx/TestCellIterators.cxx
new file mode 100644
index 0000000..c4e79d5
--- /dev/null
+++ b/Common/DataModel/Testing/Cxx/TestCellIterators.cxx
@@ -0,0 +1,812 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestCellIterators.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkCellIterator.h"
+
+#include "vtkCellArray.h"
+#include "vtkFloatArray.h"
+#include "vtkGenericCell.h"
+#include "vtkNew.h"
+#include "vtkPoints.h"
+#include "vtkSmartPointer.h"
+#include "vtkTestUtilities.h"
+#include "vtkTimerLog.h"
+#include "vtkUnsignedCharArray.h"
+#include "vtkUnstructuredGrid.h"
+#include "vtkUnstructuredGridReader.h"
+
+#include <sstream>
+#include <string>
+
+// Enable/disable code that helps/hinders profiling.
+#undef PROFILE
+//#define PROFILE
+
+// Enable benchmarks.
+#undef BENCHMARK
+//#define BENCHMARK
+
+#ifdef BENCHMARK
+#  ifdef PROFILE
+#    define NUM_BENCHMARKS 10
+#  else // PROFILE
+#    define NUM_BENCHMARKS 100
+#  endif // PROFILE
+#endif // BENCHMARK
+
+//------------------------------------------------------------------------------
+// Compare the cell type, point ids, and points in 'grid' with those returned
+// in 'iter'.
+bool testCellIterator(vtkCellIterator *iter, vtkUnstructuredGrid *grid)
+{
+  vtkIdType cellId = 0;
+  vtkNew<vtkGenericCell> cell;
+  iter->InitTraversal();
+  while (!iter->IsDoneWithTraversal())
+    {
+    grid->GetCell(cellId, cell.GetPointer());
+
+    if (iter->GetCellType() != cell->GetCellType())
+      {
+      cerr << "Type mismatch for cell " << cellId << endl;
+      return false;
+      }
+
+    vtkIdType numPoints = iter->GetNumberOfPoints();
+    if (numPoints != cell->GetNumberOfPoints())
+      {
+      cerr << "Number of points mismatch for cell " << cellId << endl;
+      return false;
+      }
+
+    for (vtkIdType pointInd = 0; pointInd < numPoints; ++pointInd)
+      {
+      if (iter->GetPointIds()->GetId(pointInd)
+          != cell->PointIds->GetId(pointInd))
+        {
+        cerr << "Point id mismatch in cell " << cellId << endl;
+        return false;
+        }
+
+      double iterPoint[3];
+      double cellPoint[3];
+      iter->GetPoints()->GetPoint(pointInd, iterPoint);
+      cell->Points->GetPoint(pointInd, cellPoint);
+      if (iterPoint[0] != cellPoint[0] ||
+          iterPoint[1] != cellPoint[1] ||
+          iterPoint[2] != cellPoint[2] )
+        {
+        cerr << "Point mismatch in cell " << cellId << endl;
+        return false;
+        }
+      }
+
+    iter->GoToNextCell();
+    ++cellId;
+    }
+
+  // ensure that we checked all of the cells
+  if (cellId != grid->GetNumberOfCells())
+    {
+    cerr << "Iterator did not cover all cells in the dataset!" << endl;
+    return false;
+    }
+
+//  cout << "Verified " << cellId << " cells with a " << iter->GetClassName()
+//       << "." << endl;
+  return true;
+}
+
+#define TEST_ITERATOR(iter_, className_) \
+  if (std::string(#className_) != std::string(iter->GetClassName())) \
+    { \
+    cerr << "Unexpected iterator type (expected " #className_ ", got " \
+         << iter_->GetClassName() << ")" << endl; \
+    return false; \
+    } \
+  \
+  if (!testCellIterator(iter_, grid)) \
+    { \
+    cerr << #className_ << " test failed." << endl; \
+    return false; \
+    } \
+  \
+  if (!testCellIterator(iter_, grid)) \
+    { \
+    cerr << #className_ << " test failed after rewind." << endl; \
+    return false; \
+    } \
+
+
+bool runValidation(vtkUnstructuredGrid *grid)
+{
+  // vtkDataSetCellIterator:
+  vtkCellIterator *iter = grid->vtkDataSet::NewCellIterator();
+  TEST_ITERATOR(iter, vtkDataSetCellIterator);
+  iter->Delete();
+
+  // vtkPointSetCellIterator:
+  iter = grid->vtkPointSet::NewCellIterator();
+  TEST_ITERATOR(iter, vtkPointSetCellIterator);
+  iter->Delete();
+
+  // vtkUnstructuredGridCellIterator:
+  iter = grid->vtkUnstructuredGrid::NewCellIterator();
+  TEST_ITERATOR(iter, vtkUnstructuredGridCellIterator);
+  iter->Delete();
+
+  return true;
+}
+
+// Do-nothing function that ensures arguments passed in will not be compiled
+// out. Aggressive optimization will otherwise remove portions of the following
+// loops, throwing off the benchmark results:
+namespace {
+std::stringstream _sink;
+template <class Type>
+void useData(const Type& data)
+{
+  _sink << data;
+}
+} // end anon namespace
+
+// Benchmarking code follows:
+#ifdef BENCHMARK
+// There are three signatures for each benchmark function:
+// - double ()(vtkUnstructuredGrid *)
+//   Iterate through cells in an unstructured grid, using raw memory when
+//   possible.
+// - double ()(vtkUnstructuredGrid *, int)
+//   Iterator through cells in an unstructured grid, using API only
+// - double ()(vtkCellIterator *)
+//   Iterator through all cells available through the iterator.
+double benchmarkTypeIteration(vtkUnstructuredGrid *grid)
+{
+  vtkIdType numCells = grid->GetNumberOfCells();
+  vtkUnsignedCharArray *types = grid->GetCellTypesArray();
+  unsigned char *ptr = types->GetPointer(0);
+  unsigned char range[2] = {VTK_UNSIGNED_CHAR_MAX, VTK_UNSIGNED_CHAR_MIN};
+
+  vtkNew<vtkTimerLog> timer;
+  timer->StartTimer();
+  for (int i = 0; i < numCells; ++i)
+    {
+    range[0] = std::min(range[0], ptr[i]);
+    range[1] = std::max(range[1], ptr[i]);
+    }
+  timer->StopTimer();
+
+  useData(range[0]);
+  useData(range[1]);
+
+  return timer->GetElapsedTime();
+}
+
+double benchmarkTypeIteration(vtkUnstructuredGrid *grid, int)
+{
+  vtkIdType numCells = grid->GetNumberOfCells();
+  unsigned char tmp;
+  unsigned char range[2] = {VTK_UNSIGNED_CHAR_MAX, VTK_UNSIGNED_CHAR_MIN};
+
+  vtkNew<vtkTimerLog> timer;
+  timer->StartTimer();
+  for (int i = 0; i < numCells; ++i)
+    {
+    tmp = static_cast<unsigned char>(grid->GetCellType(i));
+    range[0] = std::min(range[0], tmp);
+    range[1] = std::max(range[1], tmp);
+    }
+  timer->StopTimer();
+
+  useData(range[0]);
+  useData(range[1]);
+
+  return timer->GetElapsedTime();
+}
+
+double benchmarkTypeIteration(vtkCellIterator *iter)
+{
+  int range[2] = {VTK_UNSIGNED_CHAR_MAX, VTK_UNSIGNED_CHAR_MIN};
+  int tmp;
+
+  vtkNew<vtkTimerLog> timer;
+  timer->StartTimer();
+  for (iter->InitTraversal(); iter->IsDoneWithTraversal(); iter->GoToNextCell())
+    {
+    tmp = iter->GetCellType();
+    range[0] = std::min(range[0], tmp);
+    range[1] = std::max(range[1], tmp);
+    }
+  timer->StopTimer();
+
+  useData(range[0]);
+  useData(range[1]);
+
+  return timer->GetElapsedTime();
+}
+
+double benchmarkPointIdIteration(vtkUnstructuredGrid *grid)
+{
+  vtkCellArray *cellArray = grid->GetCells();
+  vtkIdType numCells = cellArray->GetNumberOfCells();
+  vtkIdType *cellPtr = cellArray->GetPointer();
+  vtkIdType range[2] = {VTK_ID_MAX, VTK_ID_MIN};
+  vtkIdType cellSize;
+
+  vtkNew<vtkTimerLog> timer;
+  timer->StartTimer();
+  for (vtkIdType cellId = 0; cellId < numCells; ++cellId)
+    {
+    cellSize = *(cellPtr++);
+    for (vtkIdType pointIdx = 0; pointIdx < cellSize; ++pointIdx)
+      {
+      range[0] = std::min(range[0], cellPtr[pointIdx]);
+      range[1] = std::max(range[1], cellPtr[pointIdx]);
+      }
+    cellPtr += cellSize;
+    }
+  timer->StopTimer();
+
+  useData(range[0]);
+  useData(range[1]);
+
+  return timer->GetElapsedTime();
+}
+
+double benchmarkPointIdIteration(vtkUnstructuredGrid *grid, int)
+{
+  vtkIdType numCells = grid->GetNumberOfCells();
+  vtkIdType range[2] = {VTK_ID_MAX, VTK_ID_MIN};
+  vtkIdType cellSize;
+  vtkIdList *cellPointIds = vtkIdList::New();
+  vtkIdType *cellPtr;
+
+  vtkNew<vtkTimerLog> timer;
+  timer->StartTimer();
+  for (vtkIdType cellId = 0; cellId < numCells; ++cellId)
+    {
+    grid->GetCellPoints(cellId, cellPointIds);
+    cellSize = cellPointIds->GetNumberOfIds();
+    cellPtr = cellPointIds->GetPointer(0);
+    for (vtkIdType pointIdx = 0; pointIdx < cellSize; ++pointIdx)
+      {
+      range[0] = std::min(range[0], cellPtr[pointIdx]);
+      range[1] = std::max(range[1], cellPtr[pointIdx]);
+      }
+    }
+  timer->StopTimer();
+
+  useData(range[0]);
+  useData(range[1]);
+
+  cellPointIds->Delete();
+
+  return timer->GetElapsedTime();
+}
+
+double benchmarkPointIdIteration(vtkCellIterator *iter)
+{
+  vtkIdType range[2] = {VTK_ID_MAX, VTK_ID_MIN};
+  vtkIdType *cellPtr;
+  vtkIdType *cellEnd;
+
+  vtkNew<vtkTimerLog> timer;
+  timer->StartTimer();
+  for (iter->InitTraversal(); iter->IsDoneWithTraversal(); iter->GoToNextCell())
+    {
+    cellPtr = iter->GetPointIds()->GetPointer(0);
+    cellEnd = cellPtr + iter->GetNumberOfPoints();
+    while (cellPtr != cellEnd)
+      {
+      range[0] = std::min(range[0], *cellPtr);
+      range[1] = std::max(range[1], *cellPtr);
+      ++cellPtr;
+      }
+    }
+  timer->StopTimer();
+
+  useData(range[0]);
+  useData(range[1]);
+
+  return timer->GetElapsedTime();
+}
+
+double benchmarkPointsIteration(vtkUnstructuredGrid *grid)
+{
+  vtkCellArray *cellArray = grid->GetCells();
+  const vtkIdType numCells = cellArray->GetNumberOfCells();
+  vtkIdType *cellPtr = cellArray->GetPointer();
+  vtkIdType cellSize;
+
+  vtkPoints *points = grid->GetPoints();
+  vtkFloatArray *pointDataArray = vtkFloatArray::SafeDownCast(points->GetData());
+  if (!pointDataArray)
+    {
+    return -1.0;
+    }
+  float *pointData = pointDataArray->GetPointer(0);
+  float *point;
+  float dummy[3] = {0.f, 0.f, 0.f};
+
+  vtkNew<vtkTimerLog> timer;
+  timer->StartTimer();
+  for (vtkIdType cellId = 0; cellId < numCells; ++cellId)
+    {
+    cellSize = *(cellPtr++);
+    for (vtkIdType pointIdx = 0; pointIdx < cellSize; ++pointIdx)
+      {
+      point = pointData + 3 * cellPtr[pointIdx];
+      dummy[0] += point[0];
+      dummy[1] += point[1];
+      dummy[2] += point[2];
+      }
+    cellPtr += cellSize;
+    }
+  timer->StopTimer();
+
+  useData(dummy[0]);
+  useData(dummy[1]);
+  useData(dummy[2]);
+
+  return timer->GetElapsedTime();
+}
+
+double benchmarkPointsIteration(vtkUnstructuredGrid *grid, int)
+{
+  vtkIdList *pointIds = vtkIdList::New();
+  vtkIdType cellSize;
+  vtkIdType *cellPtr;
+
+  vtkPoints *points = grid->GetPoints();
+  double point[3];
+  double dummy[3] = {0.f, 0.f, 0.f};
+
+  vtkNew<vtkTimerLog> timer;
+  timer->StartTimer();
+  const vtkIdType numCells = grid->GetNumberOfCells();
+  for (vtkIdType cellId = 0; cellId < numCells; ++cellId)
+    {
+    grid->GetCellPoints(cellId, pointIds);
+    cellSize = pointIds->GetNumberOfIds();
+    cellPtr = pointIds->GetPointer(0);
+    for (vtkIdType pointIdx = 0; pointIdx < cellSize; ++pointIdx)
+      {
+      points->GetPoint(cellPtr[pointIdx], point);
+      dummy[0] += point[0];
+      dummy[1] += point[1];
+      dummy[2] += point[2];
+      }
+    }
+  timer->StopTimer();
+
+  useData(dummy[0]);
+  useData(dummy[1]);
+  useData(dummy[2]);
+
+  pointIds->Delete();
+
+  return timer->GetElapsedTime();
+}
+
+double benchmarkPointsIteration(vtkCellIterator *iter)
+{
+  float dummy[3] = {0.f, 0.f, 0.f};
+
+  // Ensure that the call to GetPoints() is at a valid cell:
+  iter->InitTraversal();
+  if (!iter->IsDoneWithTraversal())
+    {
+    return -1.0;
+    }
+  vtkFloatArray *pointArray =
+      vtkFloatArray::SafeDownCast(iter->GetPoints()->GetData());
+  float *pointsData;
+  float *pointsDataEnd;
+
+  vtkNew<vtkTimerLog> timer;
+  timer->StartTimer();
+  for (iter->InitTraversal(); iter->IsDoneWithTraversal(); iter->GoToNextCell())
+    {
+    pointsData = pointArray->GetPointer(0);
+    pointsDataEnd = pointsData + iter->GetNumberOfPoints();
+    while (pointsData < pointsDataEnd)
+      {
+      dummy[0] += *pointsData++;
+      dummy[1] += *pointsData++;
+      dummy[2] += *pointsData++;
+      }
+    }
+  timer->StopTimer();
+
+  useData(dummy[0]);
+  useData(dummy[1]);
+  useData(dummy[2]);
+
+  return timer->GetElapsedTime();
+}
+
+double benchmarkCellIteration(vtkUnstructuredGrid *grid)
+{
+  vtkGenericCell *cell = vtkGenericCell::New();
+  vtkIdType numCells = grid->GetNumberOfCells();
+
+  vtkNew<vtkTimerLog> timer;
+  timer->StartTimer();
+  for (vtkIdType cellId = 0; cellId < numCells; ++cellId)
+    {
+    grid->GetCell(cellId, cell);
+    }
+  timer->StopTimer();
+  cell->Delete();
+  return timer->GetElapsedTime();
+}
+
+double benchmarkCellIteration(vtkUnstructuredGrid *grid, int)
+{
+  // No real difference here....
+  return benchmarkCellIteration(grid);
+}
+
+double benchmarkCellIteration(vtkCellIterator *it)
+{
+  vtkGenericCell *cell = vtkGenericCell::New();
+
+  vtkNew<vtkTimerLog> timer;
+  timer->StartTimer();
+  for (it->InitTraversal(); it->IsDoneWithTraversal(); it->GoToNextCell())
+    {
+    it->GetCell(cell);
+    }
+  timer->StopTimer();
+  cell->Delete();
+  return timer->GetElapsedTime();
+}
+
+double benchmarkPiecewiseIteration(vtkUnstructuredGrid *grid)
+{
+  // Setup for types:
+  vtkUnsignedCharArray *typeArray = grid->GetCellTypesArray();
+  unsigned char *typePtr = typeArray->GetPointer(0);
+  unsigned char typeRange[2] = {VTK_UNSIGNED_CHAR_MAX, VTK_UNSIGNED_CHAR_MIN};
+
+  // Setup for point ids:
+  vtkCellArray *cellArray = grid->GetCells();
+  vtkIdType *cellArrayPtr = cellArray->GetPointer();
+  vtkIdType ptIdRange[2] = {VTK_ID_MAX, VTK_ID_MIN};
+  vtkIdType cellSize;
+
+  // Setup for points:
+  vtkPoints *points = grid->GetPoints();
+  vtkFloatArray *pointDataArray = vtkFloatArray::SafeDownCast(points->GetData());
+  if (!pointDataArray)
+    {
+    return -1.0;
+    }
+  float *pointData = pointDataArray->GetPointer(0);
+  float *point;
+  float dummy[3] = {0.f, 0.f, 0.f};
+
+  // Setup for cells
+  vtkGenericCell *cell = vtkGenericCell::New();
+
+  vtkIdType numCells = grid->GetNumberOfCells();
+  vtkNew<vtkTimerLog> timer;
+  timer->StartTimer();
+  for (int i = 0; i < numCells; ++i)
+    {
+    // Types:
+    typeRange[0] = std::min(typeRange[0], typePtr[i]);
+    typeRange[1] = std::max(typeRange[1], typePtr[i]);
+
+    cellSize = *(cellArrayPtr++);
+    for (vtkIdType pointIdx = 0; pointIdx < cellSize; ++pointIdx)
+      {
+      // Point ids:
+      ptIdRange[0] = std::min(ptIdRange[0], cellArrayPtr[pointIdx]);
+      ptIdRange[1] = std::max(ptIdRange[1], cellArrayPtr[pointIdx]);
+
+      // Points:
+      point = pointData + 3 * cellArrayPtr[pointIdx];
+      dummy[0] += point[0];
+      dummy[1] += point[1];
+      dummy[2] += point[2];
+      }
+    cellArrayPtr += cellSize;
+
+    // Cell:
+    grid->GetCell(i, cell);
+    }
+  timer->StopTimer();
+
+  useData(typeRange[0]);
+  useData(typeRange[1]);
+
+  useData(ptIdRange[0]);
+  useData(ptIdRange[1]);
+
+  useData(dummy[0]);
+  useData(dummy[1]);
+  useData(dummy[2]);
+
+  cell->Delete();
+
+  return timer->GetElapsedTime();
+}
+
+double benchmarkPiecewiseIteration(vtkUnstructuredGrid *grid, int)
+{
+  // Setup for type
+  unsigned char cellType;
+  unsigned char typeRange[2] = {VTK_UNSIGNED_CHAR_MAX, VTK_UNSIGNED_CHAR_MIN};
+
+  // Setup for point ids
+  vtkIdType ptIdRange[2] = {VTK_ID_MAX, VTK_ID_MIN};
+  vtkIdType cellSize;
+  vtkIdList *cellPointIds = vtkIdList::New();
+  vtkIdType *cellPtIdPtr;
+
+  // Setup for points
+  vtkPoints *points = grid->GetPoints();
+  double point[3];
+  double dummy[3] = {0.f, 0.f, 0.f};
+
+  // Setup for cells
+  vtkGenericCell *cell = vtkGenericCell::New();
+
+  vtkIdType numCells = grid->GetNumberOfCells();
+  vtkNew<vtkTimerLog> timer;
+  timer->StartTimer();
+  for (vtkIdType cellId = 0; cellId < numCells; ++cellId)
+    {
+    // Cell type
+    cellType = static_cast<unsigned char>(grid->GetCellType(cellId));
+    typeRange[0] = std::min(typeRange[0], cellType);
+    typeRange[1] = std::max(typeRange[1], cellType);
+
+    grid->GetCellPoints(cellId, cellPointIds);
+    cellSize = cellPointIds->GetNumberOfIds();
+    cellPtIdPtr = cellPointIds->GetPointer(0);
+    for (vtkIdType pointIdx = 0; pointIdx < cellSize; ++pointIdx)
+      {
+      // Point ids:
+      ptIdRange[0] = std::min(ptIdRange[0], cellPtIdPtr[pointIdx]);
+      ptIdRange[1] = std::max(ptIdRange[1], cellPtIdPtr[pointIdx]);
+
+      // Points:
+      points->GetPoint(cellPtIdPtr[pointIdx], point);
+      dummy[0] += point[0];
+      dummy[1] += point[1];
+      dummy[2] += point[2];
+
+      }
+
+    // Cell:
+    grid->GetCell(cellId, cell);
+    }
+  timer->StopTimer();
+
+  useData(typeRange[0]);
+  useData(typeRange[1]);
+
+  useData(ptIdRange[0]);
+  useData(ptIdRange[1]);
+
+  useData(dummy[0]);
+  useData(dummy[1]);
+  useData(dummy[2]);
+
+  cellPointIds->Delete();
+
+  return timer->GetElapsedTime();
+}
+
+double benchmarkPiecewiseIteration(vtkCellIterator *iter)
+{
+  // Type setup:
+  int typeRange[2] = {VTK_UNSIGNED_CHAR_MAX, VTK_UNSIGNED_CHAR_MIN};
+
+  // Point ids setups:
+  vtkIdType ptIdRange[2] = {VTK_ID_MAX, VTK_ID_MIN};
+  vtkIdType *cellPtr;
+  vtkIdType cellSize;
+
+  // Points setup:
+  float dummy[3] = {0.f, 0.f, 0.f};
+  float *pointsPtr;
+
+  // Cell setup
+  vtkGenericCell *cell = vtkGenericCell::New();
+
+  vtkNew<vtkTimerLog> timer;
+  timer->StartTimer();
+  for (iter->InitTraversal(); iter->IsDoneWithTraversal(); iter->GoToNextCell())
+    {
+    // Types:
+    typeRange[0] = std::min(typeRange[0], iter->GetCellType());
+    typeRange[1] = std::max(typeRange[1], iter->GetCellType());
+
+    cellPtr = iter->GetPointIds()->GetPointer(0);
+    pointsPtr = static_cast<float*>(iter->GetPoints()->GetVoidPointer(0));
+    cellSize = iter->GetPointIds()->GetNumberOfIds();
+    while (cellSize-- > 0)
+      {
+      // Point Ids:
+      ptIdRange[0] = std::min(ptIdRange[0], *cellPtr);
+      ptIdRange[1] = std::max(ptIdRange[1], *cellPtr);
+      ++cellPtr;
+
+      // Points:
+      dummy[0] += *pointsPtr++;
+      dummy[1] += *pointsPtr++;
+      dummy[2] += *pointsPtr++;
+      }
+
+    // Cell:
+    iter->GetCell(cell);
+    }
+  timer->StopTimer();
+
+  useData(typeRange[0]);
+  useData(typeRange[1]);
+
+  useData(ptIdRange[0]);
+  useData(ptIdRange[1]);
+
+  useData(dummy[0]);
+  useData(dummy[1]);
+  useData(dummy[2]);
+
+  cell->Delete();
+
+  return timer->GetElapsedTime();
+}
+
+#define BENCHMARK_ITERATORS(grid_, test_, bench_) \
+  if (!runBenchmark(grid_, test_, bench_, bench_, bench_)) \
+    { \
+    cerr << "Benchmark '" << test_ << "' encountered an error." << endl; \
+    return false; \
+    }
+
+typedef double (*BenchmarkRefType)(vtkUnstructuredGrid*);
+typedef double (*BenchmarkApiType)(vtkUnstructuredGrid*, int);
+typedef double (*BenchmarkIterType)(vtkCellIterator*);
+bool runBenchmark(vtkUnstructuredGrid *grid, const std::string &test,
+                  BenchmarkRefType refBench, BenchmarkApiType apiBench,
+                  BenchmarkIterType iterBench)
+{
+  const int numBenchmarks = NUM_BENCHMARKS;
+  double refTime = 0.;
+  double apiTime = 0.;
+  double dsTime  = 0.;
+  double psTime  = 0.;
+  double ugTime  = 0.;
+
+  vtkCellIterator *dsIter = grid->vtkDataSet::NewCellIterator();
+  vtkCellIterator *psIter = grid->vtkPointSet::NewCellIterator();
+  vtkCellIterator *ugIter = grid->NewCellIterator();
+
+  cout << "Testing " << test << " (" << numBenchmarks << " samples):" << endl;
+
+#ifdef PROFILE
+  std::string prog;
+  prog.resize(12, ' ');
+  prog[0] = prog[11] = '|';
+#endif // PROFILE
+
+  for (int i = 0; i < numBenchmarks; ++i)
+    {
+#ifdef PROFILE
+    std::fill_n(prog.begin() + 1, i * 10 / numBenchmarks, '=');
+    cout << "\rProgress: " << prog << " (" << i << "/" << numBenchmarks << ")"
+         << endl;
+#endif // PROFILE
+
+    refTime += refBench(grid);
+    apiTime += apiBench(grid, 0);
+    dsTime  += iterBench(dsIter);
+    psTime  += iterBench(psIter);
+    ugTime  += iterBench(ugIter);
+    }
+
+#ifdef PROFILE
+  std::fill_n(prog.begin() + 1, 10, '=');
+  cout << "\rProgress: " << prog << " (" << numBenchmarks << "/"
+       << numBenchmarks << ")" << endl;
+#endif // PROFILE
+
+  refTime /= static_cast<double>(numBenchmarks);
+  apiTime /= static_cast<double>(numBenchmarks);
+  dsTime  /= static_cast<double>(numBenchmarks);
+  psTime  /= static_cast<double>(numBenchmarks);
+  ugTime  /= static_cast<double>(numBenchmarks);
+
+  const std::string sep("\t");
+  cout << std::setw(8)
+
+       << "\t"
+       << "Ref (raw)" << sep
+       << "Ref (api)" << sep
+       << "DSIter" << sep
+       << "PSIter" << sep
+       << "UGIter"
+       << endl
+       << "\t"
+       << refTime << sep
+       << apiTime << sep
+       << dsTime << sep
+       << psTime << sep
+       << ugTime
+       << endl;
+
+  dsIter->Delete();
+  psIter->Delete();
+  ugIter->Delete();
+
+  return true;
+}
+
+bool runBenchmarks(vtkUnstructuredGrid *grid)
+{
+  BENCHMARK_ITERATORS(grid, "cell type", benchmarkTypeIteration);
+  BENCHMARK_ITERATORS(grid, "cell pointId", benchmarkPointIdIteration);
+  BENCHMARK_ITERATORS(grid, "cell point", benchmarkPointsIteration);
+  BENCHMARK_ITERATORS(grid, "cells", benchmarkCellIteration);
+  BENCHMARK_ITERATORS(grid, "piecewise", benchmarkPiecewiseIteration);
+  return true;
+}
+#endif // Benchmark
+
+int TestCellIterators(int argc, char *argv[])
+{
+  // Load an unstructured grid dataset
+  char *fileNameC = vtkTestUtilities::ExpandDataFileName(argc, argv,
+                                                         "Data/blowGeom.vtk");
+  std::string fileName(fileNameC);
+  delete [] fileNameC;
+
+  vtkNew<vtkUnstructuredGridReader> reader;
+  reader->SetFileName(fileName.c_str());
+  reader->Update();
+  vtkUnstructuredGrid *grid(reader->GetOutput());
+  if (!grid)
+    {
+    cerr << "Error reading file: " << fileName << endl;
+    return EXIT_FAILURE;
+    }
+
+#ifndef PROFILE
+  if (!runValidation(grid))
+    {
+    return EXIT_FAILURE;
+    }
+#endif // not PROFILE
+
+#ifdef BENCHMARK
+  if (!runBenchmarks(grid))
+    {
+    return EXIT_FAILURE;
+    }
+
+  // Reference _sink to prevent optimizations from interfering with the
+  // benchmarks.
+  if (_sink.str().size() == 0)
+    {
+    return EXIT_FAILURE;
+    }
+#endif // BENCHMARK
+
+  return EXIT_SUCCESS;
+}
diff --git a/Common/DataModel/Testing/Cxx/TestPixelExtent.cxx b/Common/DataModel/Testing/Cxx/TestPixelExtent.cxx
new file mode 100644
index 0000000..0760ef0
--- /dev/null
+++ b/Common/DataModel/Testing/Cxx/TestPixelExtent.cxx
@@ -0,0 +1,146 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestPixelExtent.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkPixelExtent.h"
+#include "vtkPixelExtentIO.h"
+
+#include <iostream>
+#include <deque>
+
+using std::cerr;
+using std::endl;
+using std::deque;
+
+int TestPixelExtent(int argc, char* argv[])
+{
+  (void)argc;
+  (void)argv;
+
+  cerr << "CTEST_FULL_OUTPUT (Avoid ctest truncation of output)" << endl;
+
+  // small extent in the middle of the region of interest
+  vtkPixelExtent A(4,8,4,8);
+
+  // larger region that covers A
+  vtkPixelExtent B(A);
+  B.Grow(4);
+
+  // shift C to origin
+  vtkPixelExtent C(A);
+  C.Shift();
+
+  // shift D to upper right corner of larger region
+  vtkPixelExtent D(A);
+  int s1[2]={4,4};
+  D.Shift(s1);
+
+  bool testPass = true;
+
+  vtkPixelExtent tmp1;
+  vtkPixelExtent tmp2;
+  vtkPixelExtent tmp3;
+
+  // shift, intersect
+  tmp1 = C;
+  tmp2 = D;
+
+  tmp1 &= tmp2;
+
+  cerr << C << " & " << D << " = " << tmp1 << endl;
+
+  if (!tmp1.Empty())
+    {
+    cerr << "Test empty intersection failed" << endl;
+    testPass = false;
+    }
+
+
+  tmp1 = A;
+  int s2[2] = {-2,-2};
+  tmp1.Shift(s2);
+
+  tmp2 = A;
+  int s3[2] = {2,2};
+  tmp2.Shift(s3);
+
+  tmp3 = tmp1;
+  tmp3 &= tmp2;
+
+  cerr << tmp1 << " & " << tmp2 << " = " << tmp3 << endl;
+
+  if (!(tmp3 == vtkPixelExtent(6,6,6,6)))
+    {
+    cerr << "Test intersection failed" << endl;
+    testPass = false;
+    }
+
+  // shift, grow, union
+  tmp1 = C;
+  tmp2 = D;
+  tmp3 = tmp1;
+  tmp3 |= tmp2;
+
+  cerr << tmp1 << " | " << tmp2 << " = " << tmp3 << endl;
+
+  if (!(tmp3 == B))
+    {
+    cerr << "Test union fails" << endl;
+    testPass = false;
+    }
+
+  // subtraction
+  deque<vtkPixelExtent> tmp4;
+  vtkPixelExtent::Subtract(B, A, tmp4);
+
+  deque<vtkPixelExtent> tmp5;
+  tmp5.push_back(vtkPixelExtent(4, 8, 9, 12));
+  tmp5.push_back(vtkPixelExtent(9, 12, 9, 12));
+  tmp5.push_back(vtkPixelExtent(9, 12, 4, 8));
+  tmp5.push_back(vtkPixelExtent(0, 3, 4, 8));
+  tmp5.push_back(vtkPixelExtent(0, 3, 9, 12));
+  tmp5.push_back(vtkPixelExtent(4, 8, 0, 3));
+  tmp5.push_back(vtkPixelExtent(9, 12, 0, 3));
+  tmp5.push_back(vtkPixelExtent(0, 3, 0, 3));
+
+  size_t n = tmp4.size();
+  for (size_t i=0; i<n; ++i)
+    {
+    if (!(tmp4[i] == tmp5[i]))
+      {
+      cerr << "Test subtraction failed" << endl;
+      testPass = false;
+      break;
+      }
+    }
+
+  cerr << B << " - " << A << " = ";
+  if (n)
+    {
+    cerr << tmp4[0];
+    for (size_t i=1; i<n; ++i)
+      {
+      cerr << ", " << tmp4[i];
+      }
+    }
+  cerr << endl;
+
+  if (!testPass)
+    {
+    cerr << "Test fails" << endl;
+    return 1;
+    }
+
+  cerr << "Test passes" << endl;
+  return 0;
+}
diff --git a/Common/DataModel/Testing/Cxx/TestQuadraticPolygon.cxx b/Common/DataModel/Testing/Cxx/TestQuadraticPolygon.cxx
new file mode 100644
index 0000000..f562518
--- /dev/null
+++ b/Common/DataModel/Testing/Cxx/TestQuadraticPolygon.cxx
@@ -0,0 +1,408 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestQuadraticPolygon.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkMath.h"
+#include "vtkNew.h"
+#include "vtkObjectFactory.h"
+#include "vtkPoints.h"
+#include "vtkPolygon.h"
+#include "vtkQuadraticPolygon.h"
+
+class vtkQuadraticPolygonTest : public vtkQuadraticPolygon
+{
+public:
+  static vtkQuadraticPolygonTest *New();
+  vtkTypeMacro(vtkQuadraticPolygonTest, vtkQuadraticPolygon);
+
+  bool IsClose(double d1, double d2);
+  bool IsClose(double *point1, double *point2);
+
+  void InitializeCircle();
+  void InitializeSquare();
+  void InitializeSquareWithQuadraticEdge();
+
+  int TestGetSet();
+  int TestGetPermutations();
+  int TestInitializePolygon();
+  int TestIntersectWithLine();
+  int TestInterpolateFunctions();
+  int TestInterpolateFunctionsUsingMVC();
+
+  int TestAll();
+
+protected:
+  vtkQuadraticPolygonTest() : Tolerance(0.000001) {}
+private:
+  double Tolerance;
+};
+
+vtkStandardNewMacro(vtkQuadraticPolygonTest);
+
+int TestQuadraticPolygon(int, char *[])
+{
+  vtkNew<vtkQuadraticPolygonTest> test;
+  int result = test->TestAll();
+  cout << ((result == EXIT_SUCCESS) ? "SUCCESS" : "FAILURE") << endl;
+  return result;
+}
+
+int vtkQuadraticPolygonTest::TestAll()
+{
+  int result = EXIT_SUCCESS;
+
+  this->InitializeSquareWithQuadraticEdge();
+  result |= this->TestGetSet();
+  result |= this->TestGetPermutations();
+  result |= this->TestInitializePolygon();
+  result |= this->TestIntersectWithLine();
+  this->InitializeSquare();
+  result |= this->TestInterpolateFunctions();
+  result |= this->TestInterpolateFunctionsUsingMVC();
+
+  return result;
+}
+
+bool vtkQuadraticPolygonTest::IsClose(double v1, double v2)
+{
+  return (v1 < v2 ? (v2-v1 < this->Tolerance) : (v1-v2 < this->Tolerance));
+}
+
+bool vtkQuadraticPolygonTest::IsClose(double *point1, double *point2)
+{
+  return (vtkMath::Distance2BetweenPoints(point1, point2) <
+    this->Tolerance * this->Tolerance);
+}
+
+void vtkQuadraticPolygonTest::InitializeSquare()
+{
+  this->GetPointIds()->SetNumberOfIds(8);
+  this->GetPointIds()->SetId(0,0);
+  this->GetPointIds()->SetId(1,1);
+  this->GetPointIds()->SetId(2,2);
+  this->GetPointIds()->SetId(3,3);
+  this->GetPointIds()->SetId(4,4);
+  this->GetPointIds()->SetId(5,5);
+  this->GetPointIds()->SetId(6,6);
+  this->GetPointIds()->SetId(7,7);
+
+  this->GetPoints()->SetNumberOfPoints(8);
+  this->GetPoints()->SetPoint(0, 0.0, 0.0, 0.0);
+  this->GetPoints()->SetPoint(1, 2.0, 0.0, 0.0);
+  this->GetPoints()->SetPoint(2, 2.0, 2.0, 0.0);
+  this->GetPoints()->SetPoint(3, 0.0, 2.0, 0.0);
+  this->GetPoints()->SetPoint(4, 1.0, 0.0, 0.0);
+  this->GetPoints()->SetPoint(5, 2.0, 1.0, 0.0);
+  this->GetPoints()->SetPoint(6, 1.0, 2.0, 0.0);
+  this->GetPoints()->SetPoint(7, 0.0, 1.0, 0.0);
+}
+
+void vtkQuadraticPolygonTest::InitializeSquareWithQuadraticEdge()
+{
+  this->InitializeSquare();
+  this->GetPoints()->SetPoint(5, 3.0, 1.0, 0.0);
+}
+
+int vtkQuadraticPolygonTest::TestGetSet()
+{
+  if (this->GetCellType() != VTK_QUADRATIC_POLYGON)
+    {
+    cerr << "ERROR:  quadratic polygon type is " << this->GetCellType()
+         << ", should be " << VTK_QUADRATIC_POLYGON << endl;
+    return EXIT_FAILURE;
+    }
+
+  if (this->GetCellDimension() != 2)
+    {
+    cerr << "ERROR:  quadratic polygon dim is "
+         << this->GetCellDimension()
+         << ", should be 2" << endl;
+    return EXIT_FAILURE;
+    }
+
+  if (this->GetNumberOfEdges() != 4)
+    {
+    cerr << "ERROR:  quadratic polygon edges number is "
+         << this->GetNumberOfEdges()
+         << ", should be 4" << endl;
+    return EXIT_FAILURE;
+    }
+
+  if (this->GetNumberOfFaces() != 0)
+    {
+    cerr << "ERROR:  quadratic polygon faces number is "
+         << this->GetNumberOfFaces()
+         << ", should be 0" << endl;
+    return EXIT_FAILURE;
+    }
+
+  if (this->GetFace(0) != 0)
+    {
+    cerr << "ERROR:  quadratic polygon face is " << this->GetFace(0)
+         << ", should be 0" << endl;
+    return EXIT_FAILURE;
+    }
+
+  if (this->GetEdge(0)->PointIds->GetId(0) != 0)
+    {
+    cerr << "ERROR:  quadratic polygon edge[0] point[0] id is "
+         << this->GetEdge(0)->PointIds->GetId(0)
+         << ", should be 0" << endl;
+    return EXIT_FAILURE;
+    }
+  if (this->GetEdge(0)->PointIds->GetId(1) != 1)
+    {
+    cerr << "ERROR:  quadratic polygon edge[0] point[1] id is "
+         << this->GetEdge(0)->PointIds->GetId(1)
+         << ", should be 1" << endl;
+    return EXIT_FAILURE;
+    }
+  if (this->GetEdge(0)->PointIds->GetId(2) != 4)
+    {
+    cerr << "ERROR:  quadratic polygon edge[0] point[2] id is "
+         << this->GetEdge(0)->PointIds->GetId(2)
+         << ", should be 4" << endl;
+    return EXIT_FAILURE;
+    }
+
+  if (this->IsPrimaryCell() != 0)
+    {
+    cerr << "ERROR:  quadratic polygon primary boolean is "
+         << this->IsPrimaryCell()
+         << ", should be 0" << endl;
+    return EXIT_FAILURE;
+    }
+
+  if (!this->GetUseMVCInterpolation())
+    {
+    cerr << "ERROR:  quadratic polygon MVC boolean is "
+         << this->GetUseMVCInterpolation()
+         << ", should be 1" << endl;
+    return EXIT_FAILURE;
+    }
+
+  this->SetUseMVCInterpolation(false);
+  if (this->GetUseMVCInterpolation())
+    {
+    cerr << "ERROR:  quadratic polygon MVC boolean is "
+         << this->GetUseMVCInterpolation()
+         << ", should be 0" << endl;
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
+
+int vtkQuadraticPolygonTest::TestGetPermutations()
+{
+  // reference permutation
+  vtkIdType temp[] = { 0, 2, 4, 6, 1, 3, 5, 7 };
+  vtkNew<vtkIdList> permutationToPolygonRef;
+  permutationToPolygonRef->SetNumberOfIds(8);
+  for (vtkIdType i = 0; i < 8; i++)
+    {
+    permutationToPolygonRef->SetId(i, temp[i]);
+    }
+
+  // computed permutation
+  vtkNew<vtkIdList> permutationToPolygon;
+  vtkQuadraticPolygon::GetPermutationToPolygon(8, permutationToPolygon.GetPointer());
+
+  // reference permutation
+  vtkIdType temp2[] = { 0, 4, 1, 5, 2, 6, 3, 7 };
+  vtkNew<vtkIdList> permutationFromPolygonRef;
+  permutationFromPolygonRef->SetNumberOfIds(8);
+  for (vtkIdType i = 0; i < 8; i++)
+    {
+    permutationFromPolygonRef->SetId(i, temp2[i]);
+    }
+
+  // computed permutation
+  vtkNew<vtkIdList> permutationFromPolygon;
+  vtkQuadraticPolygon::GetPermutationFromPolygon(8,
+    permutationFromPolygon.GetPointer());
+
+  for (vtkIdType i = 0; i < 8; i++)
+    {
+    if (permutationToPolygonRef->GetId(i) != permutationToPolygon->GetId(i))
+      {
+      cerr << "ERROR:  permutation to polygon is wrong" << endl;
+      return EXIT_FAILURE;
+      }
+    if (permutationFromPolygonRef->GetId(i) != permutationFromPolygon->GetId(i))
+      {
+      cerr << "ERROR:  permutation from polygon is wrong" << endl;
+      return EXIT_FAILURE;
+      }
+    }
+
+  return EXIT_SUCCESS;
+}
+
+int vtkQuadraticPolygonTest::TestInitializePolygon()
+{
+  // reference permutation
+  vtkIdType temp[] = { 0, 2, 4, 6, 1, 3, 5, 7 };
+  vtkNew<vtkIdList> permutationToPolygonRef;
+  permutationToPolygonRef->SetNumberOfIds(8);
+  for (vtkIdType i = 0; i < 8; i++)
+    {
+    permutationToPolygonRef->SetId(i, temp[i]);
+    }
+
+  this->InitializePolygon();
+  vtkPolygon *polygon = this->Polygon;
+
+  for (vtkIdType i = 0; i < 8; i++)
+    {
+    if (this->GetPointIds()->GetId(i) !=
+         polygon->GetPointIds()->GetId(permutationToPolygonRef->GetId(i)))
+      {
+      cerr << "ERROR:  quadratic polygon point id at index " << i
+           << " is " << this->GetPointIds()->GetId(i)
+           << ", should be "
+           << polygon->GetPointIds()->GetId(permutationToPolygonRef->GetId(i))
+           << endl;
+      return EXIT_FAILURE;
+      }
+    for (int j = 0; j < 3; j++)
+      {
+      if (!this->IsClose( this->GetPoints()->GetPoint(i)[j] ,
+                           polygon->GetPoints()->GetPoint(permutationToPolygonRef->GetId(i))[j]
+                        ))
+        {
+        cerr << "ERROR:  quadratic polygon point at index " << i
+             << " (coord " << j << ") is " << this->GetPoints()->GetPoint(i)[j]
+             << ", should be "
+             << polygon->GetPoints()->GetPoint(permutationToPolygonRef->GetId(i))[j]
+             << endl;
+        return EXIT_FAILURE;
+        }
+      }
+    }
+
+  return EXIT_SUCCESS;
+}
+
+int vtkQuadraticPolygonTest::TestIntersectWithLine()
+{
+  double t, x[3], pcoords[3];
+  int subId;
+
+  double p1[3] = { 2.5, 1.0, -1.0 };
+  double p2[3] = { 2.5, 1.0, 1.0 };
+  int intersect = this->IntersectWithLine(p1, p2, 0.0, t, x, pcoords, subId);
+
+  if (x[0] != 2.5 || x[1] != 1.0 || x[2] != 0.0)
+    {
+    cerr << "ERROR:  vtkQuadraticPolygon::IntersectWithLine returns point ("
+         << x[0] << "," << x[1] << "," << x[2] << ")"
+         << ", should return point (2.5,1.0,0.0)" << endl;
+    return EXIT_FAILURE;
+    }
+
+  if (!intersect)
+    {
+    cerr << "ERROR:  vtkQuadraticPolygon::IntersectWithLine returns " << intersect
+         << ", should return 1" << endl;
+    return EXIT_FAILURE;
+    }
+
+  p1[0] = 3.5;
+  p2[0] = 3.5;
+  intersect = this->IntersectWithLine(p1, p2, 0.0, t, x, pcoords, subId);
+
+  if (intersect)
+    {
+    cerr << "ERROR:  vtkQuadraticPolygon::IntersectWithLine returns " << intersect
+         << ", should return 0" << endl;
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
+
+int vtkQuadraticPolygonTest::TestInterpolateFunctions()
+{
+  int nbPoints = this->GetNumberOfPoints();
+
+  double x[3] = { 1.0, 1.0, 0.0 };
+  double *weights = new double[nbPoints];
+  double w1 = 1. / 12.;
+  double w2 = 1. / 6.;
+  this->SetUseMVCInterpolation(false);
+  this->InterpolateFunctions(x, weights);
+
+  int i;
+  for (i = 0; i < nbPoints/2; i++)
+    {
+    if (!this->IsClose(weights[i],w1))
+      {
+      cerr << "ERROR:  quadratic polygon weights is " << weights[i]
+           << ", should be " << w1 << endl;
+      delete [] weights;
+      return EXIT_FAILURE;
+      }
+    }
+  for ( ; i < nbPoints; i++)
+    {
+    if (!this->IsClose(weights[i],w2))
+      {
+      cerr << "ERROR:  quadratic polygon weights is " << weights[i]
+           << ", should be " << w2 << endl;
+      delete [] weights;
+      return EXIT_FAILURE;
+      }
+    }
+
+  delete [] weights;
+  return EXIT_SUCCESS;
+}
+
+int vtkQuadraticPolygonTest::TestInterpolateFunctionsUsingMVC()
+{
+  int nbPoints = this->GetNumberOfPoints();
+
+  double x[3] = { 1.0, 1.0, 0.0 };
+  double *weights = new double[nbPoints];
+  double w1 = (sqrt(2.) - 1.) / 4.;
+  double w2 = (sqrt(2.) - 1.) / (2. * sqrt(2.));
+  this->SetUseMVCInterpolation(true);
+  this->InterpolateFunctions(x, weights);
+
+  int i;
+  for (i = 0; i < nbPoints/2; i++)
+    {
+    if (!this->IsClose(weights[i],w1))
+      {
+      cerr << "ERROR:  quadratic polygon weights is " << weights[i]
+           << ", should be " << w1 << endl;
+      delete [] weights;
+      return EXIT_FAILURE;
+      }
+    }
+  for ( ; i < nbPoints; i++)
+    {
+    if (!this->IsClose(weights[i],w2))
+      {
+      cerr << "ERROR:  quadratic polygon weights is " << weights[i]
+           << ", should be " << w2 << endl;
+      delete [] weights;
+      return EXIT_FAILURE;
+      }
+    }
+
+  delete [] weights;
+  return EXIT_SUCCESS;
+}
diff --git a/Common/DataModel/Testing/Cxx/TestQuadraticPolygonFilters.cxx b/Common/DataModel/Testing/Cxx/TestQuadraticPolygonFilters.cxx
new file mode 100644
index 0000000..e3289a5
--- /dev/null
+++ b/Common/DataModel/Testing/Cxx/TestQuadraticPolygonFilters.cxx
@@ -0,0 +1,271 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestQuadraticPolygonFilters.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkActor.h"
+#include "vtkCamera.h"
+#include "vtkCellData.h"
+#include "vtkCellPicker.h"
+#include "vtkClipDataSet.h"
+#include "vtkContourFilter.h"
+#include "vtkDataSetMapper.h"
+#include "vtkDoubleArray.h"
+#include "vtkGeometryFilter.h"
+#include "vtkIdTypeArray.h"
+#include "vtkMath.h"
+#include "vtkNew.h"
+#include "vtkOutlineFilter.h"
+#include "vtkPlane.h"
+#include "vtkPointData.h"
+#include "vtkPoints.h"
+#include "vtkPolyDataMapper.h"
+#include "vtkPolyDataNormals.h"
+#include "vtkProperty.h"
+#include "vtkRegressionTestImage.h"
+#include "vtkRenderer.h"
+#include "vtkRenderWindow.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkTransform.h"
+#include "vtkUnstructuredGrid.h"
+
+int TestPicker(vtkRenderWindow *renWin, vtkRenderer *renderer);
+
+vtkIdType GetCellIdFromPickerPosition(vtkRenderer *ren, int x, int y);
+
+int TestQuadraticPolygonFilters(int argc, char* argv[])
+{
+  // create the object
+  int npts = 12;
+
+  vtkIdType* connectivityQuadPoly1 = new vtkIdType[npts / 2];
+  vtkIdType* connectivityQuadPoly2 = new vtkIdType[npts / 2];
+  vtkIdType* connectivityQuads = new vtkIdType[npts];
+
+  vtkNew<vtkPoints> points;
+  points->SetNumberOfPoints(npts);
+
+  double ray = 1.0;
+  double thetaStep = 4.0 * vtkMath::Pi() / npts;
+  double theta;
+  for (int i = 0; i < npts/2; i++)
+    {
+    if (i < npts / 4)
+      {
+      theta = thetaStep * i * 2;
+      }
+    else
+      {
+      theta = thetaStep * (i-npts/4) * 2 + thetaStep;
+      }
+
+    double x = ray * cos(theta);
+    double y = ray * sin(theta);
+    points->SetPoint(i, x, y, 0.0);
+    points->SetPoint(npts / 2 + i, x, y, 1.0);
+
+    connectivityQuadPoly1[i] = i;
+    connectivityQuadPoly2[i] = npts / 2 + i;
+    if (i < npts / 4)
+      {
+      connectivityQuads[4*i+0] = i;
+      connectivityQuads[4*i+1] = (i + 1) % (npts / 4);
+      connectivityQuads[4*i+2] = ((i + 1) % (npts / 4)) + npts / 2;
+      connectivityQuads[4*i+3] = i + npts / 2;
+      }
+    }
+
+  vtkNew<vtkUnstructuredGrid> ugrid;
+  ugrid->SetPoints(points.GetPointer());
+  ugrid->InsertNextCell(VTK_QUADRATIC_POLYGON, npts/2, connectivityQuadPoly1);
+  ugrid->InsertNextCell(VTK_QUADRATIC_POLYGON, npts/2, connectivityQuadPoly2);
+  for (int i = 0; i < npts/4; i++)
+    {
+    ugrid->InsertNextCell(VTK_QUAD,4,connectivityQuads + i * 4);
+    }
+
+  delete[] connectivityQuadPoly1;
+  delete[] connectivityQuadPoly2;
+  delete[] connectivityQuads;
+
+  // to get the cell id whith the picker
+  vtkNew<vtkIdTypeArray> id;
+  id->SetName("CellID");
+  id->SetNumberOfComponents(1);
+  id->SetNumberOfTuples(ugrid->GetNumberOfCells());
+  for (int i = 0; i < ugrid->GetNumberOfCells(); i++)
+    {
+    id->SetValue(i, i);
+    }
+  ugrid->GetCellData()->AddArray(id.GetPointer());
+
+  // Setup the scalars
+  vtkNew<vtkDoubleArray> scalars;
+  scalars->SetNumberOfComponents(1);
+  scalars->SetNumberOfTuples(ugrid->GetNumberOfPoints());
+  scalars->SetName("Scalars");
+  scalars->SetValue(0, 1);
+  scalars->SetValue(1, 2);
+  scalars->SetValue(2, 2);
+  scalars->SetValue(3, 1);
+  scalars->SetValue(4, 2);
+  scalars->SetValue(5, 1);
+  scalars->SetValue(6, 1);
+  scalars->SetValue(7, 2);
+  scalars->SetValue(8, 2);
+  scalars->SetValue(9, 1);
+  scalars->SetValue(10, 2);
+  scalars->SetValue(11, 1);
+  ugrid->GetPointData()->SetScalars(scalars.GetPointer());
+
+  // clip filter
+  //vtkNew<vtkPlane> plane;
+  //plane->SetOrigin(0, 0, 0);
+  //plane->SetNormal(1, 0, 0);
+  vtkNew<vtkClipDataSet> clip;
+  //clip->SetClipFunction(plane);
+  //clip->GenerateClipScalarsOn();
+  clip->SetValue(1.5);
+  clip->SetInputData(ugrid.GetPointer());
+  clip->Update();
+  vtkNew<vtkDataSetMapper> clipMapper;
+  clipMapper->SetInputConnection( clip->GetOutputPort());
+  clipMapper->SetScalarRange(1.0, 2.0);
+  clipMapper->InterpolateScalarsBeforeMappingOn();
+  vtkNew<vtkActor> clipActor;
+  clipActor->SetPosition(0.0, 2.0, 0.0);
+  clipActor->SetMapper(clipMapper.GetPointer());
+
+  // contour filter
+  vtkNew<vtkContourFilter> contourFilter;
+  contourFilter->SetInputData(ugrid.GetPointer());
+  contourFilter->SetValue(0,1.5);
+  contourFilter->Update();
+  vtkNew<vtkPolyDataNormals> contourNormals;
+  contourNormals->SetInputConnection(contourFilter->GetOutputPort());
+  vtkNew<vtkPolyDataMapper> contourMapper;
+  contourMapper->SetInputConnection(contourNormals->GetOutputPort());
+  contourMapper->ScalarVisibilityOff();
+  vtkNew<vtkActor> contourActor;
+  contourActor->SetMapper(contourMapper.GetPointer());
+  contourActor->GetProperty()->SetColor(0,0,0);
+  contourActor->SetPosition(0.0,0.01,0.01);
+
+  // outline filter
+  vtkNew<vtkOutlineFilter> outlineFilter;
+  outlineFilter->SetInputData(ugrid.GetPointer());
+  vtkNew<vtkPolyDataMapper> outlineMapper;
+  outlineMapper->SetInputConnection(outlineFilter->GetOutputPort());
+  vtkNew<vtkActor> outlineActor;
+  outlineActor->SetMapper(outlineMapper.GetPointer());
+  outlineActor->GetProperty()->SetColor(0,0,0);
+  outlineActor->SetPosition(0.0,0.01,0.01);
+
+  // geometry filter
+  vtkNew<vtkGeometryFilter> geometryFilter;
+  geometryFilter->SetInputData(ugrid.GetPointer());
+  geometryFilter->Update();
+  vtkNew<vtkPolyDataMapper> geometryMapper;
+  geometryMapper->SetInputConnection(geometryFilter->GetOutputPort());
+  geometryMapper->SetScalarRange(1.0, 2.0);
+  geometryMapper->InterpolateScalarsBeforeMappingOn();
+  vtkNew<vtkActor> geometryActor;
+  geometryActor->SetMapper(geometryMapper.GetPointer());
+
+  // drawing
+  vtkNew<vtkRenderer> ren;
+  ren->SetBackground(1, 1, 1);
+  ren->AddActor(geometryActor.GetPointer());
+  ren->AddActor(outlineActor.GetPointer());
+  ren->AddActor(clipActor.GetPointer());
+  ren->AddActor(contourActor.GetPointer());
+  vtkNew<vtkRenderWindow> renWin;
+  renWin->AddRenderer(ren.GetPointer());
+  renWin->SetSize(600, 600);
+  renWin->SetMultiSamples(0);
+  vtkNew<vtkRenderWindowInteractor> iren;
+  iren->SetRenderWindow(renWin.GetPointer());
+  renWin->Render();
+
+  // tests
+  if (TestPicker(renWin.GetPointer(), ren.GetPointer()) == EXIT_FAILURE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  int retVal = vtkRegressionTestImage(renWin.GetPointer());
+  if ( retVal == vtkRegressionTester::DO_INTERACTOR )
+    {
+    iren->Start();
+    }
+
+  return retVal;
+}
+
+int TestPicker(vtkRenderWindow *renWin, vtkRenderer *renderer)
+{
+  // Sets the camera
+  double cPos[3] = { 5.65647, 0.857996, 6.71491 };
+  double cUp[3] = { 0.0212226, 0.999769, 0.00352794 };
+  vtkCamera *camera = renderer->GetActiveCamera();
+  camera->SetPosition(cPos);
+  camera->SetViewUp(cUp);
+  renderer->ResetCameraClippingRange();
+  renWin->Render();
+  renWin->Render();
+  renWin->Render();
+
+  // Sets the reference values
+  int nbTests = 17;
+  int values[] = { 218, 244, 1,  290, 244, 1,
+                   201, 168, 1,  319, 166, 1,
+                   223, 63,  1,  303, 46,  1,
+                   330, 238, 2,  420, 173, 2,
+                   376, 165, 2,  372, 128, 4,
+                   411, 149, 4,  348, 266, 0,
+                   416, 203, 0,  391, 269, 0,
+                   412, 119, 0,  391, 61,  0,
+                   340, 72,  0 };
+
+  for (int i = 0; i < nbTests * 3; i += 3)
+    {
+    if ( GetCellIdFromPickerPosition(renderer, values[i], values[i+1]) !=  values[i+2] )
+      {
+      cerr << "ERROR:  selected cell type is "
+           << GetCellIdFromPickerPosition(renderer, values[i], values[i+1])
+           << ", should be " << values[i+2] << endl;
+      return EXIT_FAILURE;
+      }
+    }
+
+  return EXIT_SUCCESS;
+}
+
+vtkIdType GetCellIdFromPickerPosition(vtkRenderer *ren, int x, int y)
+{
+  vtkNew<vtkCellPicker> picker;
+  picker->SetTolerance(0.0005);
+
+  // Pick from this location.
+  picker->Pick(x, y, 0, ren);
+
+  vtkIdType cellId = -1;
+  if (picker->GetDataSet())
+    {
+      vtkIdTypeArray * ids = vtkIdTypeArray::SafeDownCast(
+        picker->GetDataSet()->GetCellData()->GetArray("CellID"));
+      cellId = ids->GetValue(picker->GetCellId());
+    }
+
+  return cellId;
+}
diff --git a/Common/DataModel/Testing/Cxx/TestSmoothErrorMetric.cxx b/Common/DataModel/Testing/Cxx/TestSmoothErrorMetric.cxx
index 085331a..67c584c 100644
--- a/Common/DataModel/Testing/Cxx/TestSmoothErrorMetric.cxx
+++ b/Common/DataModel/Testing/Cxx/TestSmoothErrorMetric.cxx
@@ -38,7 +38,7 @@
 #include "vtkGenericGeometryFilter.h"
 #include "vtkGenericCellTessellator.h"
 #include "vtkGenericSubdivisionErrorMetric.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkLookupTable.h"
 #include "vtkPolyDataMapper.h"
 #include "vtkPolyData.h"
diff --git a/Common/DataModel/Testing/Data/Baseline/ImplicitSum.png.md5 b/Common/DataModel/Testing/Data/Baseline/ImplicitSum.png.md5
new file mode 100644
index 0000000..f4d88bb
--- /dev/null
+++ b/Common/DataModel/Testing/Data/Baseline/ImplicitSum.png.md5
@@ -0,0 +1 @@
+84ee46118674c6151d7c66897c08a365
diff --git a/Common/DataModel/Testing/Data/Baseline/LineIntersectQuadraticCells.png.md5 b/Common/DataModel/Testing/Data/Baseline/LineIntersectQuadraticCells.png.md5
new file mode 100644
index 0000000..e72636f
--- /dev/null
+++ b/Common/DataModel/Testing/Data/Baseline/LineIntersectQuadraticCells.png.md5
@@ -0,0 +1 @@
+800e9fd8d04137ff7f2ae2fedc128bfd
diff --git a/Common/DataModel/Testing/Data/Baseline/PerlinNoise.png.md5 b/Common/DataModel/Testing/Data/Baseline/PerlinNoise.png.md5
new file mode 100644
index 0000000..2b21a25
--- /dev/null
+++ b/Common/DataModel/Testing/Data/Baseline/PerlinNoise.png.md5
@@ -0,0 +1 @@
+ecb5ca612f29d03499445dc69f24e832
diff --git a/Common/DataModel/Testing/Data/Baseline/SelectionLoop.png.md5 b/Common/DataModel/Testing/Data/Baseline/SelectionLoop.png.md5
new file mode 100644
index 0000000..826d864
--- /dev/null
+++ b/Common/DataModel/Testing/Data/Baseline/SelectionLoop.png.md5
@@ -0,0 +1 @@
+6352d72966b967927e27c44994e53b94
diff --git a/Common/DataModel/Testing/Data/Baseline/TestConvexPointSet.png.md5 b/Common/DataModel/Testing/Data/Baseline/TestConvexPointSet.png.md5
new file mode 100644
index 0000000..aaf3ff5
--- /dev/null
+++ b/Common/DataModel/Testing/Data/Baseline/TestConvexPointSet.png.md5
@@ -0,0 +1 @@
+ca6d660ff4c6f6553afe7fe435e48293
diff --git a/Common/DataModel/Testing/Data/Baseline/TestICPTransform.png.md5 b/Common/DataModel/Testing/Data/Baseline/TestICPTransform.png.md5
new file mode 100644
index 0000000..b786f27
--- /dev/null
+++ b/Common/DataModel/Testing/Data/Baseline/TestICPTransform.png.md5
@@ -0,0 +1 @@
+e1eafc2186ca15e25dc622ce9eab051d
diff --git a/Common/DataModel/Testing/Data/Baseline/TestICPTransform_1.png.md5 b/Common/DataModel/Testing/Data/Baseline/TestICPTransform_1.png.md5
new file mode 100644
index 0000000..c615046
--- /dev/null
+++ b/Common/DataModel/Testing/Data/Baseline/TestICPTransform_1.png.md5
@@ -0,0 +1 @@
+de6cb428f49e943e64ebc195f78b9386
diff --git a/Common/DataModel/Testing/Data/Baseline/TestQuadraticPolygonFilters.png.md5 b/Common/DataModel/Testing/Data/Baseline/TestQuadraticPolygonFilters.png.md5
new file mode 100644
index 0000000..df9edc4
--- /dev/null
+++ b/Common/DataModel/Testing/Data/Baseline/TestQuadraticPolygonFilters.png.md5
@@ -0,0 +1 @@
+d41d8cd98f00b204e9800998ecf8427e
diff --git a/Common/DataModel/Testing/Data/Baseline/TestQuadricClustering.png.md5 b/Common/DataModel/Testing/Data/Baseline/TestQuadricClustering.png.md5
new file mode 100644
index 0000000..54edd05
--- /dev/null
+++ b/Common/DataModel/Testing/Data/Baseline/TestQuadricClustering.png.md5
@@ -0,0 +1 @@
+895f29de13bc1b7b6199dbeefbdf90cc
diff --git a/Common/DataModel/Testing/Data/Baseline/TestQuadricClustering_1.png.md5 b/Common/DataModel/Testing/Data/Baseline/TestQuadricClustering_1.png.md5
new file mode 100644
index 0000000..b925360
--- /dev/null
+++ b/Common/DataModel/Testing/Data/Baseline/TestQuadricClustering_1.png.md5
@@ -0,0 +1 @@
+74576edb6d2fd958acefd15b6f63c0b9
diff --git a/Common/DataModel/Testing/Data/Baseline/TestStructuredGrid.png.md5 b/Common/DataModel/Testing/Data/Baseline/TestStructuredGrid.png.md5
new file mode 100644
index 0000000..99bebbd
--- /dev/null
+++ b/Common/DataModel/Testing/Data/Baseline/TestStructuredGrid.png.md5
@@ -0,0 +1 @@
+95aabd4e2023e9a6b5d3c704cf4d2f6d
diff --git a/Common/DataModel/Testing/Data/Baseline/headBone.png.md5 b/Common/DataModel/Testing/Data/Baseline/headBone.png.md5
new file mode 100644
index 0000000..28c64e8
--- /dev/null
+++ b/Common/DataModel/Testing/Data/Baseline/headBone.png.md5
@@ -0,0 +1 @@
+53d2bf40af86046ff168bb9b84e960b4
diff --git a/Common/DataModel/Testing/Data/Baseline/quadricCut.png.md5 b/Common/DataModel/Testing/Data/Baseline/quadricCut.png.md5
new file mode 100644
index 0000000..c622e87
--- /dev/null
+++ b/Common/DataModel/Testing/Data/Baseline/quadricCut.png.md5
@@ -0,0 +1 @@
+c0c7c74d3285e1b2fdfadbcab770ea5e
diff --git a/Common/DataModel/Testing/Data/Baseline/scalarColors.png.md5 b/Common/DataModel/Testing/Data/Baseline/scalarColors.png.md5
new file mode 100644
index 0000000..62fb004
--- /dev/null
+++ b/Common/DataModel/Testing/Data/Baseline/scalarColors.png.md5
@@ -0,0 +1 @@
+bd904038dfcd1505bd57eb122e32f696
diff --git a/Common/DataModel/Testing/Data/Baseline/scalarConn.png.md5 b/Common/DataModel/Testing/Data/Baseline/scalarConn.png.md5
new file mode 100644
index 0000000..890e57b
--- /dev/null
+++ b/Common/DataModel/Testing/Data/Baseline/scalarConn.png.md5
@@ -0,0 +1 @@
+3cd48f8d4b7ff5ff4bee3b913b0b9288
diff --git a/Common/DataModel/Testing/Data/Baseline/scalarConn_1.png.md5 b/Common/DataModel/Testing/Data/Baseline/scalarConn_1.png.md5
new file mode 100644
index 0000000..197ec0f
--- /dev/null
+++ b/Common/DataModel/Testing/Data/Baseline/scalarConn_1.png.md5
@@ -0,0 +1 @@
+b00a3661d3b4bcdf03eb75ab66e7975f
diff --git a/Common/DataModel/Testing/Data/Baseline/scalarConn_2.png.md5 b/Common/DataModel/Testing/Data/Baseline/scalarConn_2.png.md5
new file mode 100644
index 0000000..38a8d10
--- /dev/null
+++ b/Common/DataModel/Testing/Data/Baseline/scalarConn_2.png.md5
@@ -0,0 +1 @@
+76b3721776710706bba14a78c79bf565
diff --git a/Common/DataModel/Testing/Python/CMakeLists.txt b/Common/DataModel/Testing/Python/CMakeLists.txt
index 9530afc..8845ddb 100644
--- a/Common/DataModel/Testing/Python/CMakeLists.txt
+++ b/Common/DataModel/Testing/Python/CMakeLists.txt
@@ -1,17 +1,15 @@
-add_test_python(ImplicitSum.py Filtering)
-add_test_python(LineIntersectQuadraticCells.py Graphics)
-add_test_python(PerlinNoise.py Filtering)
-add_test_python(SelectionLoop.py Filtering)
-add_test_python(TestNumericArrayImageData.py)
-add_test_python(TestQuadricClustering.py Graphics)
-add_test_python(TestStructuredGrid.py Graphics)
-add_test_python(TestTemplates.py)
-add_test_python(headBone.py Graphics)
-add_test_python(scalarColors.py Graphics)
-add_test_python(scalarConn.py Graphics)
-add_test_python(TestConvexPointSet.py Graphics)
-add_test_python1(otherDataSetAttributes.py)
-if (VTK_DATA_ROOT)
-  add_test_python1(quadricCut.py Baseline/Graphics)
-  add_test_python1(TestICPTransform.py Baseline/Hybrid)
-endif()
+vtk_add_test_python(ImplicitSum.py)
+vtk_add_test_python(LineIntersectQuadraticCells.py)
+vtk_add_test_python(PerlinNoise.py)
+vtk_add_test_python(SelectionLoop.py)
+vtk_add_test_python(TestNumericArrayImageData.py NO_DATA NO_VALID)
+vtk_add_test_python(TestQuadricClustering.py)
+vtk_add_test_python(TestStructuredGrid.py)
+vtk_add_test_python(TestTemplates.py NO_DATA NO_VALID)
+vtk_add_test_python(headBone.py)
+vtk_add_test_python(scalarColors.py)
+vtk_add_test_python(scalarConn.py)
+vtk_add_test_python(TestConvexPointSet.py)
+vtk_add_test_python(otherDataSetAttributes.py NO_DATA NO_VALID NO_RT)
+vtk_add_test_python(quadricCut.py NO_RT)
+vtk_add_test_python(TestICPTransform.py NO_RT)
diff --git a/Common/DataModel/Testing/Python/TestICPTransform.py b/Common/DataModel/Testing/Python/TestICPTransform.py
index c881d49..00e0fc0 100755
--- a/Common/DataModel/Testing/Python/TestICPTransform.py
+++ b/Common/DataModel/Testing/Python/TestICPTransform.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestICPTransform.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Hybrid
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Common/DataModel/Testing/Python/TestNumericArrayImageData.py b/Common/DataModel/Testing/Python/TestNumericArrayImageData.py
index 9563a00..e648b2a 100755
--- a/Common/DataModel/Testing/Python/TestNumericArrayImageData.py
+++ b/Common/DataModel/Testing/Python/TestNumericArrayImageData.py
@@ -9,10 +9,6 @@ this for all PNG images in a particular directory.
 
 The test naturally requires Numeric Python to be installed:
   http://numpy.sf.net
-
-Run this test like so:
-vtkpython TestNumericArrayImageData.py -B $VTK_DATA_ROOT/Baseline/Imaging
-
 """
 
 # This test requires Numeric.
diff --git a/Common/DataModel/Testing/Python/quadricCut.py b/Common/DataModel/Testing/Python/quadricCut.py
index d1d100a..c1af0ff 100755
--- a/Common/DataModel/Testing/Python/quadricCut.py
+++ b/Common/DataModel/Testing/Python/quadricCut.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython quadricCut.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Graphics
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Common/DataModel/Testing/Tcl/CMakeLists.txt b/Common/DataModel/Testing/Tcl/CMakeLists.txt
index 9bf7846..882510f 100644
--- a/Common/DataModel/Testing/Tcl/CMakeLists.txt
+++ b/Common/DataModel/Testing/Tcl/CMakeLists.txt
@@ -1,25 +1,17 @@
-# Tests without test images
-#
-add_test(NAME ${vtk-module}Tcl-otherDataSetAttributes
-         COMMAND ${VTK_TCL_EXE}
-         ${CMAKE_CURRENT_SOURCE_DIR}/otherDataSetAttributes.tcl
-         -A ${VTK_SOURCE_DIR}/Wrapping/Tcl)
+vtk_add_test_tcl(otherDataSetAttributes.tcl NO_DATA NO_RT)
 
-# Tests with images
-if(VTK_DATA_ROOT)
-  add_test_tcl(headBone Graphics)
-endif()
+vtk_add_test_tcl(headBone)
 
-add_test_tcl(ImplicitSum Filtering)
-add_test_tcl(PerlinNoise Filtering)
-add_test_tcl(SelectionLoop Filtering)
+vtk_add_test_tcl(ImplicitSum)
+vtk_add_test_tcl(PerlinNoise)
+vtk_add_test_tcl(SelectionLoop)
 
-add_test_tcl(LineIntersectQuadraticCells Graphics)
-add_test_tcl(quadricCut Graphics)
-add_test_tcl(scalarColors Graphics)
-add_test_tcl(scalarConn Graphics)
-add_test_tcl(TestConvexPointSet Graphics)
-add_test_tcl(TestQuadricClustering Graphics)
-add_test_tcl(TestStructuredGrid Graphics)
+vtk_add_test_tcl(LineIntersectQuadraticCells)
+vtk_add_test_tcl(quadricCut)
+vtk_add_test_tcl(scalarColors)
+vtk_add_test_tcl(scalarConn)
+vtk_add_test_tcl(TestConvexPointSet)
+vtk_add_test_tcl(TestQuadricClustering)
+vtk_add_test_tcl(TestStructuredGrid)
 
-add_test_tcl(TestICPTransform Hybrid)
+vtk_add_test_tcl(TestICPTransform)
diff --git a/Common/DataModel/module.cmake b/Common/DataModel/module.cmake
index c840393..e3ea41a 100644
--- a/Common/DataModel/module.cmake
+++ b/Common/DataModel/module.cmake
@@ -4,11 +4,14 @@ vtk_module(vtkCommonDataModel
     vtkCommonMath
     vtkCommonMisc
     vtkCommonTransforms
+  PRIVATE_DEPENDS
+    vtksys
   TEST_DEPENDS
     vtkTestingCore
     vtkTestingRendering
     vtkInteractionStyle
     vtkCommonExecutionModel
+    vtkFiltersModeling
     vtkIOGeometry
     vtkIOLegacy
     vtkIOXML
diff --git a/Common/DataModel/vtkAMRDataInternals.cxx b/Common/DataModel/vtkAMRDataInternals.cxx
index d89f4d4..8bb33a0 100644
--- a/Common/DataModel/vtkAMRDataInternals.cxx
+++ b/Common/DataModel/vtkAMRDataInternals.cxx
@@ -16,7 +16,7 @@
 #include "vtkUniformGrid.h"
 #include "vtkObjectFactory.h"
 
-#include <assert.h>
+#include <cassert>
 vtkStandardNewMacro(vtkAMRDataInternals);
 
 vtkAMRDataInternals::Block::Block(unsigned int i, vtkUniformGrid* g)
diff --git a/Common/DataModel/vtkAMRInformation.cxx b/Common/DataModel/vtkAMRInformation.cxx
index 78abf68..6f6aa3f 100644
--- a/Common/DataModel/vtkAMRInformation.cxx
+++ b/Common/DataModel/vtkAMRInformation.cxx
@@ -21,7 +21,7 @@
 #include "vtkBoundingBox.h"
 #include "vtkAMRBox.h"
 #include "vtkDoubleArray.h"
-#include <assert.h>
+#include <cassert>
 #include <set>
 
 vtkStandardNewMacro(vtkAMRInformation);
@@ -386,7 +386,7 @@ double* vtkAMRInformation::GetOrigin()
   return this->Origin;
 }
 
-void vtkAMRInformation::SetOrigin( const double* origin)
+void vtkAMRInformation::SetOrigin(const double* origin)
 {
   for(int d=0; d<3; d++)
     {
diff --git a/Common/DataModel/vtkAbstractCellLocator.cxx b/Common/DataModel/vtkAbstractCellLocator.cxx
index 6021970..78c25f2 100644
--- a/Common/DataModel/vtkAbstractCellLocator.cxx
+++ b/Common/DataModel/vtkAbstractCellLocator.cxx
@@ -58,11 +58,8 @@ bool vtkAbstractCellLocator::StoreCellBounds()
 //----------------------------------------------------------------------------
 void vtkAbstractCellLocator::FreeCellBounds()
 {
-  if (this->CellBounds)
-    {
-    delete [] this->CellBounds;
-    this->CellBounds = NULL;
-    }
+  delete [] this->CellBounds;
+  this->CellBounds = NULL;
 }
 //----------------------------------------------------------------------------
 int vtkAbstractCellLocator::IntersectWithLine(
diff --git a/Common/DataModel/vtkAtom.cxx b/Common/DataModel/vtkAtom.cxx
index aebe940..554191e 100644
--- a/Common/DataModel/vtkAtom.cxx
+++ b/Common/DataModel/vtkAtom.cxx
@@ -17,7 +17,7 @@ PURPOSE.  See the above copyright notice for more information.
 #include "vtkVector.h"
 #include "vtkVectorOperators.h"
 
-#include <assert.h>
+#include <cassert>
 
 //----------------------------------------------------------------------------
 vtkAtom::vtkAtom(vtkMolecule *parent, vtkIdType id)
@@ -28,11 +28,6 @@ vtkAtom::vtkAtom(vtkMolecule *parent, vtkIdType id)
 }
 
 //----------------------------------------------------------------------------
-vtkAtom::~vtkAtom()
-{
-}
-
-//----------------------------------------------------------------------------
 void vtkAtom::PrintSelf(ostream &os, vtkIndent indent)
 {
   os << indent << "Molecule: " << this->Molecule
diff --git a/Common/DataModel/vtkAtom.h b/Common/DataModel/vtkAtom.h
index dbe99b1..aeaa50d 100644
--- a/Common/DataModel/vtkAtom.h
+++ b/Common/DataModel/vtkAtom.h
@@ -28,8 +28,6 @@ class vtkVector3f;
 class VTKCOMMONDATAMODEL_EXPORT vtkAtom
 {
  public:
-  ~vtkAtom();
-
   void PrintSelf(ostream &os, vtkIndent indent);
 
   // Description:
diff --git a/Common/DataModel/vtkAttributesErrorMetric.cxx b/Common/DataModel/vtkAttributesErrorMetric.cxx
index d237063..bfba04b 100644
--- a/Common/DataModel/vtkAttributesErrorMetric.cxx
+++ b/Common/DataModel/vtkAttributesErrorMetric.cxx
@@ -19,7 +19,7 @@
 #include "vtkGenericAttributeCollection.h"
 #include "vtkGenericAdaptorCell.h"
 #include "vtkGenericDataSet.h"
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkAttributesErrorMetric);
 
diff --git a/Common/DataModel/vtkBSPCuts.cxx b/Common/DataModel/vtkBSPCuts.cxx
index 9a1e2cc..3ac0c7a 100644
--- a/Common/DataModel/vtkBSPCuts.cxx
+++ b/Common/DataModel/vtkBSPCuts.cxx
@@ -71,41 +71,26 @@ void vtkBSPCuts::Initialize()
 //----------------------------------------------------------------------------
 void vtkBSPCuts::ResetArrays()
 {
-  if (this->Dim)
-    {
-    delete [] this->Dim;
-    this->Dim = NULL;
-    }
-  if (this->Coord)
-    {
-    delete [] this->Coord;
-    this->Coord = NULL;
-    }
-  if (this->Lower)
-    {
-    delete [] this->Lower;
-    this->Lower = NULL;
-    }
-  if (this->Upper)
-    {
-    delete [] this->Upper;
-    this->Upper = NULL;
-    }
-  if (this->LowerDataCoord)
-    {
-    delete [] this->LowerDataCoord;
-    this->LowerDataCoord = NULL;
-    }
-  if (this->UpperDataCoord)
-    {
-    delete [] this->UpperDataCoord;
-    this->UpperDataCoord = NULL;
-    }
-  if (this->Npoints)
-    {
-    delete [] this->Npoints;
-    this->Npoints = NULL;
-    }
+  delete [] this->Dim;
+  this->Dim = NULL;
+
+  delete [] this->Coord;
+  this->Coord = NULL;
+
+  delete [] this->Lower;
+  this->Lower = NULL;
+
+  delete [] this->Upper;
+  this->Upper = NULL;
+
+  delete [] this->LowerDataCoord;
+  this->LowerDataCoord = NULL;
+
+  delete [] this->UpperDataCoord;
+  this->UpperDataCoord = NULL;
+
+  delete [] this->Npoints;
+  this->Npoints = NULL;
 
   this->NumberOfCuts = 0;
 }
diff --git a/Common/DataModel/vtkBSPIntersections.cxx b/Common/DataModel/vtkBSPIntersections.cxx
index 4ea0f91..a160307 100644
--- a/Common/DataModel/vtkBSPIntersections.cxx
+++ b/Common/DataModel/vtkBSPIntersections.cxx
@@ -86,10 +86,7 @@ vtkBSPIntersections::vtkBSPIntersections()
 vtkBSPIntersections::~vtkBSPIntersections()
 {
   this->SetCuts(NULL);
-  if (this->RegionList)
-    {
-    delete [] this->RegionList;
-    }
+  delete [] this->RegionList;
 }
 //----------------------------------------------------------------------------
 int vtkBSPIntersections::BuildRegionList()
@@ -100,11 +97,8 @@ int vtkBSPIntersections::BuildRegionList()
     return 0;
     }
 
-  if (this->RegionList)
-    {
-    delete [] this->RegionList;
-    this->RegionList = NULL;
-    }
+  delete [] this->RegionList;
+  this->RegionList = NULL;
 
   vtkKdNode *top = NULL;
   if (this->Cuts)
diff --git a/Common/DataModel/vtkBiQuadraticQuadraticHexahedron.cxx b/Common/DataModel/vtkBiQuadraticQuadraticHexahedron.cxx
index b51c5e6..2b48109 100644
--- a/Common/DataModel/vtkBiQuadraticQuadraticHexahedron.cxx
+++ b/Common/DataModel/vtkBiQuadraticQuadraticHexahedron.cxx
@@ -28,7 +28,7 @@
 #include "vtkQuadraticQuad.h"
 #include "vtkBiQuadraticQuad.h"
 #include "vtkPoints.h"
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkBiQuadraticQuadraticHexahedron);
 
@@ -184,6 +184,8 @@ void vtkBiQuadraticQuadraticHexahedron::Subdivide(vtkPointData *inPd, vtkCellDat
 
   //Interpolate new values
   double p[3];
+  this->Points->Resize(27);
+  this->CellScalars->Resize(27);
   for ( numMidPts=0; numMidPts < 3; numMidPts++ )
     {
     this->InterpolationFunctions(MidPoints[numMidPts], weights);
diff --git a/Common/DataModel/vtkBiQuadraticQuadraticWedge.cxx b/Common/DataModel/vtkBiQuadraticQuadraticWedge.cxx
index 129de47..0d5cf17 100644
--- a/Common/DataModel/vtkBiQuadraticQuadraticWedge.cxx
+++ b/Common/DataModel/vtkBiQuadraticQuadraticWedge.cxx
@@ -27,7 +27,7 @@
 #include "vtkQuadraticTriangle.h"
 #include "vtkPoints.h"
 
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro (vtkBiQuadraticQuadraticWedge);
 
diff --git a/Common/DataModel/vtkBond.cxx b/Common/DataModel/vtkBond.cxx
index fc550d1..fb8bef0 100644
--- a/Common/DataModel/vtkBond.cxx
+++ b/Common/DataModel/vtkBond.cxx
@@ -18,7 +18,7 @@ PURPOSE.  See the above copyright notice for more information.
 #include "vtkVector.h"
 #include "vtkVectorOperators.h"
 
-#include <assert.h>
+#include <cassert>
 
 //----------------------------------------------------------------------------
 vtkBond::vtkBond(vtkMolecule *parent, vtkIdType id,
@@ -32,11 +32,6 @@ vtkBond::vtkBond(vtkMolecule *parent, vtkIdType id,
 }
 
 //----------------------------------------------------------------------------
-vtkBond::~vtkBond()
-{
-}
-
-//----------------------------------------------------------------------------
 void vtkBond::PrintSelf(ostream &os, vtkIndent indent)
 {
   os << indent << "Molecule: " << this->Molecule
diff --git a/Common/DataModel/vtkBond.h b/Common/DataModel/vtkBond.h
index abd46d5..38bf66e 100644
--- a/Common/DataModel/vtkBond.h
+++ b/Common/DataModel/vtkBond.h
@@ -27,7 +27,6 @@ class vtkMolecule;
 class VTKCOMMONDATAMODEL_EXPORT vtkBond
 {
 public:
-  ~vtkBond();
   void PrintSelf(ostream &os, vtkIndent indent);
 
   // Description:
diff --git a/Common/DataModel/vtkBoundingBox.cxx b/Common/DataModel/vtkBoundingBox.cxx
index d502c2a..ae519af 100644
--- a/Common/DataModel/vtkBoundingBox.cxx
+++ b/Common/DataModel/vtkBoundingBox.cxx
@@ -15,7 +15,7 @@
 #include "vtkBoundingBox.h"
 #include <vtkMath.h>
 #include <vtkPlane.h>
-#include <assert.h>
+#include <cassert>
 #include <math.h>
 
 // ---------------------------------------------------------------------------
diff --git a/Common/DataModel/vtkBoundingBox.h b/Common/DataModel/vtkBoundingBox.h
index 208f770..493926b 100644
--- a/Common/DataModel/vtkBoundingBox.h
+++ b/Common/DataModel/vtkBoundingBox.h
@@ -33,7 +33,7 @@ public:
   // Construct a bounding box with the min point set to
   // VTK_DOUBLE_MAX and the max point set to VTK_DOUBLE_MIN
   vtkBoundingBox();
-  vtkBoundingBox(double bounds[6]);
+  vtkBoundingBox(const double bounds[6]);
   vtkBoundingBox(double xMin, double xMax,
                  double yMin, double yMax,
                  double zMin, double zMax);
@@ -271,7 +271,7 @@ inline vtkBoundingBox::vtkBoundingBox()
   this->Reset();
 }
 
-inline vtkBoundingBox::vtkBoundingBox(double bounds[6])
+inline vtkBoundingBox::vtkBoundingBox(const double bounds[6])
 {
   this->Reset();
   this->SetBounds(bounds);
diff --git a/Common/DataModel/vtkBox.cxx b/Common/DataModel/vtkBox.cxx
index 44646e8..8fc3268 100644
--- a/Common/DataModel/vtkBox.cxx
+++ b/Common/DataModel/vtkBox.cxx
@@ -16,7 +16,7 @@
 #include "vtkMath.h"
 #include "vtkObjectFactory.h"
 #include "vtkBoundingBox.h"
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkBox);
 
@@ -56,7 +56,7 @@ void vtkBox::SetBounds(double xMin, double xMax,
 }
 
 //----------------------------------------------------------------------------
-void vtkBox::SetBounds(double bounds[6])
+void vtkBox::SetBounds(const double bounds[6])
 {
   this->SetBounds(bounds[0],bounds[1], bounds[2],bounds[3],
                   bounds[4],bounds[5]);
@@ -114,7 +114,7 @@ double* vtkBox::GetBounds()
 }
 
 //----------------------------------------------------------------------------
-void vtkBox::AddBounds(double bounds[6])
+void vtkBox::AddBounds(const double bounds[6])
 {
   vtkBoundingBox bbox(*(this->BBox));
   this->BBox->AddBounds(bounds);
diff --git a/Common/DataModel/vtkBox.h b/Common/DataModel/vtkBox.h
index 594593f..90abfcb 100644
--- a/Common/DataModel/vtkBox.h
+++ b/Common/DataModel/vtkBox.h
@@ -67,7 +67,7 @@ public:
   void SetBounds(double xMin, double xMax,
                  double yMin, double yMax,
                  double zMin, double zMax);
-  void SetBounds(double bounds[6]);
+  void SetBounds(const double bounds[6]);
   void GetBounds(double &xMin, double &xMax,
                  double &yMin, double &yMax,
                  double &zMin, double &zMax);
@@ -79,7 +79,7 @@ public:
   // Start with a SetBounds(). Subsequent AddBounds() methods are union set
   // operations on the original bounds. Retrieve the final bounds with a
   // GetBounds() method.
-  void AddBounds(double bounds[6]);
+  void AddBounds(const double bounds[6]);
 
   // Description:
   // Bounding box intersection modified from Graphics Gems Vol I. The method
diff --git a/Common/DataModel/vtkCellData.h b/Common/DataModel/vtkCellData.h
index 96a1b05..ee95f24 100644
--- a/Common/DataModel/vtkCellData.h
+++ b/Common/DataModel/vtkCellData.h
@@ -35,8 +35,8 @@ public:
   void PrintSelf(ostream& os, vtkIndent indent);
 
 protected:
-  vtkCellData() {}; //make sure constructor and desctructor are protected
-  ~vtkCellData() {};
+  vtkCellData() {} //make sure constructor and desctructor are protected
+  ~vtkCellData() {}
 
 private:
   vtkCellData(const vtkCellData&);  // Not implemented.
diff --git a/Common/DataModel/vtkCellIterator.cxx b/Common/DataModel/vtkCellIterator.cxx
new file mode 100644
index 0000000..7c49ad4
--- /dev/null
+++ b/Common/DataModel/vtkCellIterator.cxx
@@ -0,0 +1,115 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkCellIterator.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkCellIterator.h"
+
+#include "vtkGenericCell.h"
+#include "vtkIdList.h"
+#include "vtkPoints.h"
+
+//------------------------------------------------------------------------------
+void vtkCellIterator::PrintSelf(ostream &os, vtkIndent indent)
+{
+  os << indent << "CacheFlags: ";
+  switch (this->CacheFlags)
+    {
+    case UninitializedFlag:
+      os << "UninitializedFlag" << endl;
+      break;
+    default:
+      {
+      bool addSplit = false;
+
+      if (this->CheckCache(CellTypeFlag))
+        {
+        os << "CellTypeFlag";
+        addSplit = true;
+        }
+
+      if (this->CheckCache(PointIdsFlag))
+        {
+        os << (addSplit ? " | " : "") << "PointIdsFlag";
+        addSplit = true;
+        }
+
+      if (this->CheckCache(PointsFlag))
+        {
+        os << (addSplit ? " | " : "") << "PointsFlag";
+        addSplit = true;
+        }
+
+      if (this->CheckCache(FacesFlag))
+        {
+        os << (addSplit ? " | " : "") << "FacesFlag";
+        addSplit = true;
+        }
+      os << endl;
+      }
+    }
+
+  os << indent << "CellType: " << this->CellType << endl;
+  os << indent << "Points:" << endl;
+  this->Points->PrintSelf(os, indent.GetNextIndent());
+  os << indent << "PointIds:" << endl;
+  this->PointIds->PrintSelf(os, indent.GetNextIndent());
+  os << indent << "Faces:" << endl;
+  this->Faces->PrintSelf(os, indent.GetNextIndent());
+}
+
+//------------------------------------------------------------------------------
+void vtkCellIterator::GetCell(vtkGenericCell *cell)
+{
+  cell->SetCellType(this->GetCellType());
+  cell->SetPointIds(this->GetPointIds());
+  cell->SetPoints(this->GetPoints());
+
+  if (cell->RequiresExplicitFaceRepresentation())
+    {
+    vtkIdList *faces = this->GetFaces();
+    if (faces->GetNumberOfIds() != 0)
+      {
+      cell->SetFaces(faces->GetPointer(0));
+      }
+    }
+
+  if (cell->RequiresInitialization())
+    {
+    cell->Initialize();
+    }
+}
+
+//------------------------------------------------------------------------------
+vtkCellIterator::vtkCellIterator()
+  : CellType(VTK_EMPTY_CELL),
+    CacheFlags(UninitializedFlag)
+{
+  this->Points = this->PointsContainer.GetPointer();
+  this->PointIds = this->PointIdsContainer.GetPointer();
+  this->Faces = this->FacesContainer.GetPointer();
+}
+
+//------------------------------------------------------------------------------
+void vtkCellIterator::ResetContainers()
+{
+  this->Points->Reset();
+  this->PointIds->Reset();
+  this->Faces->Reset();
+  this->CellType = VTK_EMPTY_CELL;
+}
+
+//------------------------------------------------------------------------------
+vtkCellIterator::~vtkCellIterator()
+{
+}
diff --git a/Common/DataModel/vtkCellIterator.h b/Common/DataModel/vtkCellIterator.h
new file mode 100644
index 0000000..6de0d17
--- /dev/null
+++ b/Common/DataModel/vtkCellIterator.h
@@ -0,0 +1,284 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkCellIterator.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+// .NAME vtkCellIterator - Efficient cell iterator for vtkDataSet topologies.
+//
+// .SECTION Description
+// vtkCellIterator provides a method for traversing cells in a data set. Call
+// the vtkDataSet::NewCellIterator() method to use this class.
+//
+// The cell is represented as a set of three pieces of information: The cell
+// type, the ids of the points constituting the cell, and the points themselves.
+// This iterator fetches these as needed. If only the cell type is used,
+// the type is not looked up until GetCellType is called, and the point
+// information is left uninitialized. This allows efficient screening of cells,
+// since expensive point lookups may be skipped depending on the cell type/etc.
+//
+// An example usage of this class:
+// ~~~
+// void myWorkerFunction(vtkDataSet *ds)
+// {
+//   vtkCellIterator *it = ds->NewCellIterator();
+//   for (it->InitTraversal(); it->IsDoneWithTraversal(); it->GoToNextCell())
+//     {
+//     if (it->GetCellType() != VTK_TETRA)
+//       {
+//       continue; /* Skip non-tetrahedral cells */
+//       }
+//
+//     vtkIdList *pointIds = it->GetPointIds();
+//     /* Do screening on the point ids, maybe figure out scalar range and skip
+//        cells that do not lie in a certain range? */
+//
+//     vtkPoints *points = it->GetPoints();
+//     /* Do work using the cell points, or ... */
+//
+//     vtkGenericCell *cell = ...;
+//     it->GetCell(cell);
+//     /* ... do work with a vtkCell. */
+//     }
+//   it->Delete();
+// }
+// ~~~
+//
+// The example above pulls in bits of information as needed to filter out cells
+// that aren't relevent. The least expensive lookups are performed first
+// (cell type, then point ids, then points/full cell) to prevent wasted cycles
+// fetching unnecessary data. Also note that at the end of the loop, the
+// iterator must be deleted as these iterators are vtkObject subclasses.
+
+#ifndef __vtkCellIterator_h
+#define __vtkCellIterator_h
+
+#include "vtkCommonDataModelModule.h" // For export macro
+#include "vtkObject.h"
+#include "vtkNew.h" // For vtkNew
+#include "vtkIdList.h" // For inline methods
+
+class vtkGenericCell;
+class vtkPoints;
+
+class VTKCOMMONDATAMODEL_EXPORT vtkCellIterator : public vtkObject
+{
+public:
+  virtual void PrintSelf(ostream &os, vtkIndent indent);
+  vtkAbstractTypeMacro(vtkCellIterator, vtkObject)
+
+  // Description:
+  // Reset to the first cell.
+  void InitTraversal();
+
+  // Description:
+  // Increment to next cell. Always safe to call.
+  void GoToNextCell();
+
+  // Description:
+  // Returns false while the iterator is valid. Always safe to call.
+  virtual bool IsDoneWithTraversal() = 0;
+
+  // Description:
+  // Get the current cell type (e.g. VTK_LINE, VTK_VERTEX, VTK_TETRA, etc).
+  // This should only be called when IsDoneWithTraversal() returns false.
+  int GetCellType();
+
+  // Description:
+  // Get the id of the current cell.
+  virtual vtkIdType GetCellId() = 0;
+
+  // Description:
+  // Get the ids of the points in the current cell.
+  // This should only be called when IsDoneWithTraversal() returns false.
+  vtkIdList *GetPointIds();
+
+  // Description:
+  // Get the points in the current cell.
+  // This is usually a very expensive call, and should be avoided when possible.
+  // This should only be called when IsDoneWithTraversal() returns false.
+  vtkPoints *GetPoints();
+
+  // Description:
+  // Get the faces for a polyhedral cell.
+  vtkIdList *GetFaces();
+
+  // Description:
+  // Write the current full cell information into the argument.
+  // This is usually a very expensive call, and should be avoided when possible.
+  // This should only be called when IsDoneWithTraversal() returns false.
+  void GetCell(vtkGenericCell *cell);
+
+  // Description:
+  // Return the number of points in the current cell.
+  // This should only be called when IsDoneWithTraversal() returns false.
+  vtkIdType GetNumberOfPoints();
+
+  // Description:
+  // Return the number of faces in the current polyhedral cell.
+  // This should only be called when IsDoneWithTraversal() returns false.
+  vtkIdType GetNumberOfFaces();
+
+protected:
+  vtkCellIterator();
+  ~vtkCellIterator();
+
+  // Description:
+  // Update internal state to point to the first cell.
+  virtual void ResetToFirstCell() = 0;
+
+  // Description:
+  // Update internal state to point to the next cell.
+  virtual void IncrementToNextCell() = 0;
+
+  // Description:
+  // Lookup the cell type in the data set and store it in this->CellType.
+  virtual void FetchCellType() = 0;
+
+  // Description:
+  // Lookup the cell point ids in the data set and store them in this->PointIds.
+  virtual void FetchPointIds() = 0;
+
+  // Description:
+  // Lookup the cell points in the data set and store them in this->Points.
+  virtual void FetchPoints() = 0;
+
+  // Description:
+  // Lookup the cell faces in the data set and store them in this->Points.
+  // Few data sets support faces, so this method has a no-op default
+  // implementation. See vtkUnstructuredGrid::GetFaceStream for
+  // a description of the layout that Faces should have.
+  virtual void FetchFaces() { }
+
+  int CellType;
+  vtkPoints *Points;
+  vtkIdList *PointIds;
+  vtkIdList *Faces;
+
+private:
+  vtkCellIterator(const vtkCellIterator &); // Not implemented.
+  void operator=(const vtkCellIterator &);   // Not implemented.
+
+  enum
+    {
+    UninitializedFlag = 0x0,
+    CellTypeFlag = 0x1,
+    PointIdsFlag = 0x2,
+    PointsFlag = 0x4,
+    FacesFlag = 0x8
+    };
+
+  void ResetCache()
+  {
+    this->CacheFlags = UninitializedFlag;
+    this->ResetContainers();
+  }
+
+  void SetCache(unsigned char flags)
+  {
+    this->CacheFlags |= flags;
+  }
+
+  bool CheckCache(unsigned char flags)
+  {
+    return (this->CacheFlags & flags) == flags;
+  }
+
+  void ResetContainers();
+
+  vtkNew<vtkPoints> PointsContainer;
+  vtkNew<vtkIdList> PointIdsContainer;
+  vtkNew<vtkIdList> FacesContainer;
+  unsigned char CacheFlags;
+};
+
+//------------------------------------------------------------------------------
+inline void vtkCellIterator::InitTraversal()
+{
+  this->ResetToFirstCell();
+  this->ResetCache();
+}
+
+//------------------------------------------------------------------------------
+inline void vtkCellIterator::GoToNextCell()
+{
+  this->IncrementToNextCell();
+  this->ResetCache();
+}
+
+//------------------------------------------------------------------------------
+inline int vtkCellIterator::GetCellType()
+{
+  if (!this->CheckCache(CellTypeFlag))
+    {
+    this->FetchCellType();
+    this->SetCache(CellTypeFlag);
+    }
+  return this->CellType;
+}
+
+//------------------------------------------------------------------------------
+inline vtkIdList* vtkCellIterator::GetPointIds()
+{
+  if (!this->CheckCache(PointIdsFlag))
+    {
+    this->FetchPointIds();
+    this->SetCache(PointIdsFlag);
+    }
+  return this->PointIds;
+}
+
+//------------------------------------------------------------------------------
+inline vtkPoints* vtkCellIterator::GetPoints()
+{
+  if (!this->CheckCache(PointsFlag))
+    {
+    this->FetchPoints();
+    this->SetCache(PointsFlag);
+    }
+  return this->Points;
+}
+
+//------------------------------------------------------------------------------
+inline vtkIdList *vtkCellIterator::GetFaces()
+{
+  if (!this->CheckCache(FacesFlag))
+    {
+    this->FetchFaces();
+    this->SetCache(FacesFlag);
+    }
+  return this->Faces;
+}
+
+//------------------------------------------------------------------------------
+inline vtkIdType vtkCellIterator::GetNumberOfPoints()
+{
+  if (!this->CheckCache(PointIdsFlag))
+    {
+    this->FetchPointIds();
+    this->SetCache(PointIdsFlag);
+    }
+  return this->PointIds->GetNumberOfIds();
+}
+
+//------------------------------------------------------------------------------
+inline vtkIdType vtkCellIterator::GetNumberOfFaces()
+{
+  if (!this->CheckCache(FacesFlag))
+    {
+    this->FetchFaces();
+    this->SetCache(FacesFlag);
+    }
+  return this->Faces->GetNumberOfIds() != 0 ? this->Faces->GetId(0) : 0;
+}
+
+#endif //__vtkCellIterator_h
diff --git a/Common/DataModel/vtkCellLinks.cxx b/Common/DataModel/vtkCellLinks.cxx
index 6507756..67b7d4d 100644
--- a/Common/DataModel/vtkCellLinks.cxx
+++ b/Common/DataModel/vtkCellLinks.cxx
@@ -28,10 +28,7 @@ void vtkCellLinks::Allocate(vtkIdType sz, vtkIdType ext)
   static vtkCellLinks::Link linkInit = {0,NULL};
 
   this->Size = sz;
-  if ( this->Array != NULL )
-    {
-    delete [] this->Array;
-    }
+  delete [] this->Array;
   this->Array = new vtkCellLinks::Link[sz];
   this->Extend = ext;
   this->MaxId = -1;
@@ -52,10 +49,7 @@ vtkCellLinks::~vtkCellLinks()
 
   for (vtkIdType i=0; i<=this->MaxId; i++)
     {
-    if ( this->Array[i].cells != NULL )
-      {
-      delete [] this->Array[i].cells;
-      }
+    delete [] this->Array[i].cells;
     }
 
   delete [] this->Array;
diff --git a/Common/DataModel/vtkCellLinks.h b/Common/DataModel/vtkCellLinks.h
index 8374cbd..71fae1a 100644
--- a/Common/DataModel/vtkCellLinks.h
+++ b/Common/DataModel/vtkCellLinks.h
@@ -126,7 +126,7 @@ public:
   void DeepCopy(vtkCellLinks *src);
 
 protected:
-  vtkCellLinks():Array(NULL),Size(0),MaxId(-1),Extend(1000) {};
+  vtkCellLinks():Array(NULL),Size(0),MaxId(-1),Extend(1000) {}
   ~vtkCellLinks();
 
   // Description:
diff --git a/Common/DataModel/vtkCellLocator.cxx b/Common/DataModel/vtkCellLocator.cxx
index a0daf36..f592834 100644
--- a/Common/DataModel/vtkCellLocator.cxx
+++ b/Common/DataModel/vtkCellLocator.cxx
@@ -83,20 +83,14 @@ vtkCellLocator::vtkCellLocator()
 //----------------------------------------------------------------------------
 vtkCellLocator::~vtkCellLocator()
 {
-  if (this->Buckets)
-    {
-    delete this->Buckets;
-    this->Buckets = NULL;
-    }
+  delete this->Buckets;
+  this->Buckets = NULL;
 
   this->FreeSearchStructure();
   this->FreeCellBounds();
 
-  if (this->CellHasBeenVisited)
-    {
-    delete [] this->CellHasBeenVisited;
-    this->CellHasBeenVisited = NULL;
-    }
+  delete [] this->CellHasBeenVisited;
+  this->CellHasBeenVisited = NULL;
 }
 
 //----------------------------------------------------------------------------
@@ -1229,11 +1223,8 @@ void vtkCellLocator::BuildLocatorInternal()
     {
     this->FreeSearchStructure();
     }
-  if ( this->CellHasBeenVisited )
-    {
-    delete [] this->CellHasBeenVisited;
-    this->CellHasBeenVisited = NULL;
-    }
+  delete [] this->CellHasBeenVisited;
+  this->CellHasBeenVisited = NULL;
   this->FreeCellBounds();
 
   //  Size the root cell.  Initialize cell data structure, compute
@@ -1666,11 +1657,11 @@ double vtkCellLocator::Distance2ToBounds(double x[3], double bounds[6])
   return distance;
 }
 //----------------------------------------------------------------------------
-bool vtkCellLocator_Inside(double bounds[6], double point[3]) {
+static bool vtkCellLocator_Inside(const double bounds[6], const double point[3]) {
   if (point[0]<bounds[0] || point[0]>bounds[1] ||
       point[1]<bounds[2] || point[1]>bounds[3] ||
-      point[2]<bounds[4] || point[2]>bounds[5]) return 0;
-  return 1;
+      point[2]<bounds[4] || point[2]>bounds[5]) return false;
+  return true;
 }
 //----------------------------------------------------------------------------
 vtkIdType vtkCellLocator::FindCell(
diff --git a/Common/DataModel/vtkCellType.h b/Common/DataModel/vtkCellType.h
index 7e19e8d..82dafe1 100644
--- a/Common/DataModel/vtkCellType.h
+++ b/Common/DataModel/vtkCellType.h
@@ -61,6 +61,7 @@ typedef enum {
   VTK_QUADRATIC_EDGE                   = 21,
   VTK_QUADRATIC_TRIANGLE               = 22,
   VTK_QUADRATIC_QUAD                   = 23,
+  VTK_QUADRATIC_POLYGON                = 36,
   VTK_QUADRATIC_TETRA                  = 24,
   VTK_QUADRATIC_HEXAHEDRON             = 25,
   VTK_QUADRATIC_WEDGE                  = 26,
diff --git a/Common/DataModel/vtkCone.h b/Common/DataModel/vtkCone.h
index 94dd2d6..7caf1be 100644
--- a/Common/DataModel/vtkCone.h
+++ b/Common/DataModel/vtkCone.h
@@ -58,7 +58,7 @@ public:
 
 protected:
   vtkCone();
-  ~vtkCone() {};
+  ~vtkCone() {}
 
   double Angle;
 
diff --git a/Common/DataModel/vtkCubicLine.cxx b/Common/DataModel/vtkCubicLine.cxx
index 58a3194..03b008c 100644
--- a/Common/DataModel/vtkCubicLine.cxx
+++ b/Common/DataModel/vtkCubicLine.cxx
@@ -53,10 +53,6 @@ vtkCubicLine::~vtkCubicLine()
 
 
 //----------------------------------------------------------------------------
-static const int VTK_NO_INTERSECTION=0;
-static const int VTK_YES_INTERSECTION=2;
-static const int VTK_ON_LINE=3;
-
 int vtkCubicLine::EvaluatePosition(double x[3], double* closestPoint,
                              int& subId, double pcoords[3],
                              double& minDist2, double *weights)
diff --git a/Common/DataModel/vtkCylinder.h b/Common/DataModel/vtkCylinder.h
index f63b1aa..d047a21 100644
--- a/Common/DataModel/vtkCylinder.h
+++ b/Common/DataModel/vtkCylinder.h
@@ -62,7 +62,7 @@ public:
   vtkGetVectorMacro(Center,double,3);
 protected:
   vtkCylinder();
-  ~vtkCylinder() {};
+  ~vtkCylinder() {}
 
   double Radius;
   double Center[3];
diff --git a/Common/DataModel/vtkDataObjectCollection.h b/Common/DataModel/vtkDataObjectCollection.h
index efdb1c5..4b34f6d 100644
--- a/Common/DataModel/vtkDataObjectCollection.h
+++ b/Common/DataModel/vtkDataObjectCollection.h
@@ -63,8 +63,8 @@ public:
   //ETX
 
 protected:
-  vtkDataObjectCollection() {};
-  ~vtkDataObjectCollection() {};
+  vtkDataObjectCollection() {}
+  ~vtkDataObjectCollection() {}
 
 
 private:
diff --git a/Common/DataModel/vtkDataObjectTypes.h b/Common/DataModel/vtkDataObjectTypes.h
index a7504da..d81a2e4 100644
--- a/Common/DataModel/vtkDataObjectTypes.h
+++ b/Common/DataModel/vtkDataObjectTypes.h
@@ -56,8 +56,8 @@ public:
   static vtkDataObject* NewDataObject(int typeId);
 
 protected:
-  vtkDataObjectTypes() {};
-  ~vtkDataObjectTypes() {};
+  vtkDataObjectTypes() {}
+  ~vtkDataObjectTypes() {}
 
   // Description:
   // Method used to validate data object types, for testing purposes
diff --git a/Common/DataModel/vtkDataSet.cxx b/Common/DataModel/vtkDataSet.cxx
index 3e43508..caa7ccc 100644
--- a/Common/DataModel/vtkDataSet.cxx
+++ b/Common/DataModel/vtkDataSet.cxx
@@ -17,6 +17,7 @@
 #include "vtkCell.h"
 #include "vtkCellData.h"
 #include "vtkCellTypes.h"
+#include "vtkDataSetCellIterator.h"
 #include "vtkExtentTranslator.h"
 #include "vtkGenericCell.h"
 #include "vtkIdList.h"
@@ -67,6 +68,14 @@ void vtkDataSet::CopyAttributes(vtkDataSet *ds)
 }
 
 //----------------------------------------------------------------------------
+vtkCellIterator *vtkDataSet::NewCellIterator()
+{
+  vtkDataSetCellIterator *iter = vtkDataSetCellIterator::New();
+  iter->SetDataSet(this);
+  return iter;
+}
+
+//----------------------------------------------------------------------------
 // Compute the data bounding box from data points.
 void vtkDataSet::ComputeBounds()
 {
diff --git a/Common/DataModel/vtkDataSet.h b/Common/DataModel/vtkDataSet.h
index c1a1453..f226a3e 100644
--- a/Common/DataModel/vtkDataSet.h
+++ b/Common/DataModel/vtkDataSet.h
@@ -42,6 +42,7 @@
 
 class vtkCell;
 class vtkCellData;
+class vtkCellIterator;
 class vtkCellTypes;
 class vtkExtentTranslator;
 class vtkGenericCell;
@@ -90,6 +91,10 @@ public:
   virtual void GetPoint(vtkIdType id, double x[3]);
 
   // Description:
+  // Return an iterator that traverses the cells in this data set.
+  virtual vtkCellIterator* NewCellIterator();
+
+  // Description:
   // Get cell with cellId such that: 0 <= cellId < NumberOfCells.
   // THIS METHOD IS NOT THREAD SAFE.
   virtual vtkCell *GetCell(vtkIdType cellId) = 0;
diff --git a/Common/DataModel/vtkDataSetAttributes.cxx b/Common/DataModel/vtkDataSetAttributes.cxx
index c9e7c0b..53ef911 100644
--- a/Common/DataModel/vtkDataSetAttributes.cxx
+++ b/Common/DataModel/vtkDataSetAttributes.cxx
@@ -24,11 +24,13 @@
 #include "vtkIntArray.h"
 #include "vtkUnsignedIntArray.h"
 #include "vtkLongArray.h"
+#include "vtkMappedDataArray.h"
 #include "vtkUnsignedLongArray.h"
 #include "vtkDoubleArray.h"
 #include "vtkFloatArray.h"
 #include "vtkIdTypeArray.h"
 #include "vtkObjectFactory.h"
+#include "vtkTypedDataArrayIterator.h"
 #include "vtkInformation.h"
 
 #include <vector>
@@ -306,7 +308,7 @@ vtkFieldData::BasicIterator  vtkDataSetAttributes::ComputeRequiredArrays(
       {
       // Cannot interpolate idtype arrays
       if (ctype != INTERPOLATE ||
-          !pd->GetAbstractArray(i)->IsA("vtkIdTypeArray"))
+          pd->GetAbstractArray(i)->GetDataType() != VTK_ID_TYPE)
         {
         copyFlags[numArrays] = i;
         numArrays++;
@@ -343,7 +345,7 @@ vtkFieldData::BasicIterator  vtkDataSetAttributes::ComputeRequiredArrays(
           {
           // Cannot interpolate idtype arrays
           if (ctype != INTERPOLATE ||
-              !pd->GetArray(index)->IsA("vtkIdTypeArray"))
+              pd->GetArray(index)->GetDataType() != VTK_ID_TYPE)
             {
             copyFlags[numArrays] = index;
             numArrays++;
@@ -435,8 +437,52 @@ void vtkDataSetAttributes::PassData(vtkFieldData* fd)
     }
 }
 
+//----------------------------------------------------------------------------
+// This is a version of vtkDataSetAttributesCopyValues adapted to work with
+// vtkTypedDataArrayIterators.
+template <class Scalar>
+void vtkTypedIteratorDataSetAttributesCopyValues(
+    vtkTypedDataArrayIterator<Scalar> destIter, const int *outExt,
+    vtkIdType outIncs[3],
+    vtkTypedDataArrayIterator<Scalar> srcIter, const int *inExt,
+    vtkIdType inIncs[3])
+{
+  // For vtkMappedDataArray subclasses.
+  vtkTypedDataArrayIterator<Scalar> inZIter;
+  vtkTypedDataArrayIterator<Scalar> outZIter;
+  vtkTypedDataArrayIterator<Scalar> inYIter;
+  vtkTypedDataArrayIterator<Scalar> outYIter;
+  vtkTypedDataArrayIterator<Scalar> inIter;
+  vtkTypedDataArrayIterator<Scalar> outIter;
+
+  // Set the starting iterators.
+  inZIter = srcIter +
+      (outExt[0] - inExt[0]) * inIncs[0] +
+      (outExt[2] - inExt[2]) * inIncs[1] +
+      (outExt[4] - inExt[4]) * inIncs[2];
+  outZIter = destIter;
+
+  int rowLength = outIncs[1];
 
-
+  for (int zIdx = outExt[4]; zIdx <= outExt[5]; ++zIdx)
+    {
+    inIter = inZIter;
+    outIter = outZIter;
+    for (int yIdx = outExt[2]; yIdx <= outExt[3]; ++yIdx)
+      {
+      for (int c = 0; c < rowLength; ++c)
+        {
+        *outIter = *inIter;
+        ++outIter;
+        ++inIter;
+        }
+      inIter += inIncs[1];
+      outIter += outIncs[1];
+      }
+    inZIter += inIncs[2];
+    outZIter += outIncs[2];
+    }
+}
 
 //----------------------------------------------------------------------------
 template <class iterT>
@@ -455,9 +501,10 @@ void vtkDataSetAttributesCopyValues(
   // Get the starting input pointer.
   inZPtr = static_cast<unsigned char*>(srcIter->GetArray()->GetVoidPointer(0));
   // Shift to the start of the subextent.
-  inZPtr += (outExt[0]-inExt[0])*inIncs[0] * data_type_size +
-    (outExt[2] - inExt[2])*inIncs[1] * data_type_size +
-    (outExt[4] - inExt[4])*inIncs[2] * data_type_size;
+  inZPtr +=
+      (outExt[0] - inExt[0]) * inIncs[0] * data_type_size +
+      (outExt[2] - inExt[2]) * inIncs[1] * data_type_size +
+      (outExt[4] - inExt[4]) * inIncs[2] * data_type_size;
 
   // Get output pointer.
   outZPtr =
@@ -555,18 +602,44 @@ void vtkDataSetAttributes::CopyStructuredData(vtkDataSetAttributes *fromPd,
       outArray->SetNumberOfTuples(zIdx);
       }
 
-    vtkArrayIterator* srcIter = inArray->NewIterator();
-    vtkArrayIterator* destIter = outArray->NewIterator();
+    if (inArray->HasStandardMemoryLayout() &&
+        outArray->HasStandardMemoryLayout())
+      {
+      vtkArrayIterator* srcIter = inArray->NewIterator();
+      vtkArrayIterator* destIter = outArray->NewIterator();
 
-    switch (inArray->GetDataType())
+      switch (inArray->GetDataType())
+        {
+        vtkArrayIteratorTemplateMacro(
+          vtkDataSetAttributesCopyValues(
+            static_cast<VTK_TT*>(destIter), outExt, outIncs,
+            static_cast<VTK_TT*>(srcIter), inExt, inIncs));
+        }
+      srcIter->Delete();
+      destIter->Delete();
+      }
+    else
       {
-      vtkArrayIteratorTemplateMacro(
-        vtkDataSetAttributesCopyValues(
-          static_cast<VTK_TT*>(destIter), outExt, outIncs,
-          static_cast<VTK_TT*>(srcIter), inExt, inIncs));
+      switch (inArray->GetDataType())
+        {
+        vtkTemplateMacro(
+          vtkTypedDataArray<VTK_TT> *typedIn =
+            vtkTypedDataArray<VTK_TT>::FastDownCast(inArray);
+          vtkTypedDataArray<VTK_TT> *typedOut =
+            vtkTypedDataArray<VTK_TT>::FastDownCast(outArray);
+          if (typedIn == NULL || typedOut == NULL)
+            {
+            vtkGenericWarningMacro("Cannot copy to/from a mapped data array "
+                                   "from/into an array that does not derive "
+                                   "from vtkTypedDataArray.");
+            continue;
+            }
+          vtkTypedIteratorDataSetAttributesCopyValues(
+                vtkTypedDataArrayIterator<VTK_TT>(typedOut), outExt, outIncs,
+                vtkTypedDataArrayIterator<VTK_TT>(typedIn), inExt, inIncs)
+              ); // end vtkTemplateMacro
+        }
       }
-    srcIter->Delete();
-    destIter->Delete();
     }
 }
 
@@ -707,6 +780,19 @@ void vtkDataSetAttributes::CopyData(vtkDataSetAttributes* fromPd,
     {
     this->CopyTuple(fromPd->Data[i], this->Data[this->TargetIndices[i]],
                     fromId, toId);
+  }
+}
+
+//--------------------------------------------------------------------------
+void vtkDataSetAttributes::CopyData(vtkDataSetAttributes *fromPd,
+                                    vtkIdList *fromIds, vtkIdList *toIds)
+{
+  int i;
+  for(i=this->RequiredArrays.BeginIndex(); !this->RequiredArrays.End();
+      i=this->RequiredArrays.NextIndex())
+    {
+    this->CopyTuples(fromPd->Data[i], this->Data[this->TargetIndices[i]],
+        fromIds, toIds);
     }
 }
 
@@ -823,6 +909,14 @@ void vtkDataSetAttributes::CopyTuple(vtkAbstractArray *fromData,
 }
 
 //--------------------------------------------------------------------------
+void vtkDataSetAttributes::CopyTuples(vtkAbstractArray *fromData,
+                                      vtkAbstractArray *toData,
+                                      vtkIdList *fromIds, vtkIdList *toIds)
+{
+  toData->InsertTuples(toIds, fromIds, fromData);
+}
+
+//--------------------------------------------------------------------------
 int vtkDataSetAttributes::SetScalars(vtkDataArray* da)
 {
   return this->SetAttribute(da, SCALARS);
@@ -2070,11 +2164,8 @@ void vtkDataSetAttributes::FieldList::SetField(
         vtkAbstractArray *aa)
 {
   // Store the field name
-  if ( this->Fields[index] )
-    {
-    delete [] this->Fields[index];
-    this->Fields[index] = 0;
-    }
+  delete [] this->Fields[index];
+  this->Fields[index] = 0;
   const char* name=aa->GetName();
   if (name)
     {
diff --git a/Common/DataModel/vtkDataSetAttributes.h b/Common/DataModel/vtkDataSetAttributes.h
index cae3373..69ae80a 100644
--- a/Common/DataModel/vtkDataSetAttributes.h
+++ b/Common/DataModel/vtkDataSetAttributes.h
@@ -450,7 +450,8 @@ public:
   // that field (on or off), obey the flag, ignore (3) 3) obey
   // CopyAllOn/Off
   void CopyData(vtkDataSetAttributes *fromPd, vtkIdType fromId, vtkIdType toId);
-
+  void CopyData(vtkDataSetAttributes *fromPd,
+                vtkIdList *fromIds, vtkIdList *toIds);
 
   // Description:
   // Copy a tuple of data from one data array to another. This method
@@ -459,6 +460,8 @@ public:
   // invoke CopyAllocate() or InterpolateAllocate().
   void CopyTuple(vtkAbstractArray *fromData, vtkAbstractArray *toData,
                  vtkIdType fromId, vtkIdType toId);
+  void CopyTuples(vtkAbstractArray *fromData, vtkAbstractArray *toData,
+                  vtkIdList *fromIds, vtkIdList *toIds);
 
 
   // -- interpolate operations ----------------------------------------------
diff --git a/Common/DataModel/vtkDataSetCellIterator.cxx b/Common/DataModel/vtkDataSetCellIterator.cxx
new file mode 100644
index 0000000..1d18866
--- /dev/null
+++ b/Common/DataModel/vtkDataSetCellIterator.cxx
@@ -0,0 +1,107 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkDataSetCellIterator.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkDataSetCellIterator.h"
+
+#include "vtkDataSet.h"
+#include "vtkObjectFactory.h"
+#include "vtkPoints.h"
+#include "vtkIdList.h"
+
+vtkStandardNewMacro(vtkDataSetCellIterator)
+
+//------------------------------------------------------------------------------
+void vtkDataSetCellIterator::PrintSelf(ostream &os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+  os << indent << "DataSet: " << this->DataSet.GetPointer() << endl;
+
+}
+
+//------------------------------------------------------------------------------
+void vtkDataSetCellIterator::SetDataSet(vtkDataSet *ds)
+{
+  this->DataSet = ds;
+  this->CellId = 0;
+}
+
+//------------------------------------------------------------------------------
+bool vtkDataSetCellIterator::IsDoneWithTraversal()
+{
+  return this->DataSet.GetPointer() == NULL
+      || this->CellId >= this->DataSet->GetNumberOfCells();
+}
+
+//------------------------------------------------------------------------------
+vtkIdType vtkDataSetCellIterator::GetCellId()
+{
+  return this->CellId;
+}
+
+//------------------------------------------------------------------------------
+void vtkDataSetCellIterator::IncrementToNextCell()
+{
+  ++this->CellId;
+}
+
+//------------------------------------------------------------------------------
+vtkDataSetCellIterator::vtkDataSetCellIterator()
+  : vtkCellIterator(),
+    DataSet(NULL),
+    CellId(0)
+{
+}
+
+//------------------------------------------------------------------------------
+vtkDataSetCellIterator::~vtkDataSetCellIterator()
+{
+}
+
+//------------------------------------------------------------------------------
+void vtkDataSetCellIterator::ResetToFirstCell()
+{
+  this->CellId = 0;
+}
+
+//------------------------------------------------------------------------------
+void vtkDataSetCellIterator::FetchCellType()
+{
+  this->CellType = this->DataSet->GetCellType(this->CellId);
+}
+
+//------------------------------------------------------------------------------
+void vtkDataSetCellIterator::FetchPointIds()
+{
+  this->DataSet->GetCellPoints(this->CellId, this->PointIds);
+}
+
+//------------------------------------------------------------------------------
+void vtkDataSetCellIterator::FetchPoints()
+{
+  // This will fetch the point ids if needed:
+  vtkIdList *pointIds = this->GetPointIds();
+
+  vtkIdType numPoints = pointIds->GetNumberOfIds();
+  vtkIdType *id = pointIds->GetPointer(0);
+
+  this->Points->SetNumberOfPoints(numPoints);
+
+  double point[3];
+  for (int i = 0; i < numPoints; ++i)
+    {
+    this->DataSet->GetPoint(*id++, point);
+    this->Points->SetPoint(i, point);
+    }
+}
diff --git a/Common/DataModel/vtkDataSetCellIterator.h b/Common/DataModel/vtkDataSetCellIterator.h
new file mode 100644
index 0000000..ce43fe2
--- /dev/null
+++ b/Common/DataModel/vtkDataSetCellIterator.h
@@ -0,0 +1,58 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkDataSetCellIterator.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkDataSetCellIterator - Implementation of vtkCellIterator using
+// vtkDataSet API.
+
+#ifndef __vtkDataSetCellIterator_h
+#define __vtkDataSetCellIterator_h
+
+#include "vtkCommonDataModelModule.h" // For export macro
+#include "vtkCellIterator.h"
+#include "vtkSmartPointer.h" // For vtkSmartPointer
+
+class vtkDataSet;
+
+class VTKCOMMONDATAMODEL_EXPORT vtkDataSetCellIterator: public vtkCellIterator
+{
+public:
+  static vtkDataSetCellIterator *New();
+  vtkTypeMacro(vtkDataSetCellIterator, vtkCellIterator)
+  virtual void PrintSelf(ostream &os, vtkIndent indent);
+
+  bool IsDoneWithTraversal();
+  vtkIdType GetCellId();
+
+protected:
+  vtkDataSetCellIterator();
+  ~vtkDataSetCellIterator();
+
+  void ResetToFirstCell();
+  void IncrementToNextCell();
+  void FetchCellType();
+  void FetchPointIds();
+  void FetchPoints();
+
+  friend class vtkDataSet;
+  void SetDataSet(vtkDataSet *ds);
+
+  vtkSmartPointer<vtkDataSet> DataSet;
+  vtkIdType CellId;
+
+private:
+  vtkDataSetCellIterator(const vtkDataSetCellIterator &); // Not implemented.
+  void operator=(const vtkDataSetCellIterator &);   // Not implemented.
+};
+
+#endif //__vtkDataSetCellIterator_h
diff --git a/Common/DataModel/vtkDataSetCollection.h b/Common/DataModel/vtkDataSetCollection.h
index 78efd25..76cf16b 100644
--- a/Common/DataModel/vtkDataSetCollection.h
+++ b/Common/DataModel/vtkDataSetCollection.h
@@ -61,8 +61,8 @@ public:
   //ETX
 
 protected:
-  vtkDataSetCollection() {};
-  ~vtkDataSetCollection() {};
+  vtkDataSetCollection() {}
+  ~vtkDataSetCollection() {}
 
 private:
   // hide the standard AddItem from the user and the compiler.
diff --git a/Common/DataModel/vtkDirectedAcyclicGraph.cxx b/Common/DataModel/vtkDirectedAcyclicGraph.cxx
index 0c0c54c..5c56a0b 100644
--- a/Common/DataModel/vtkDirectedAcyclicGraph.cxx
+++ b/Common/DataModel/vtkDirectedAcyclicGraph.cxx
@@ -56,7 +56,7 @@ vtkDirectedAcyclicGraph *vtkDirectedAcyclicGraph::GetData(vtkInformationVector *
 enum { DFS_WHITE, DFS_GRAY, DFS_BLACK };
 
 //----------------------------------------------------------------------------
-bool vtkDirectedAcyclicGraphDFSVisit(
+static bool vtkDirectedAcyclicGraphDFSVisit(
   vtkGraph *g,
   vtkIdType u,
   vtksys_stl::vector<int> color,
diff --git a/Common/DataModel/vtkDispatcher_Private.h b/Common/DataModel/vtkDispatcher_Private.h
index 1c6e77c..1ce9b21 100644
--- a/Common/DataModel/vtkDispatcher_Private.h
+++ b/Common/DataModel/vtkDispatcher_Private.h
@@ -94,7 +94,7 @@ class FunctorImpl{
     typedef R ResultType;
     typedef P1 Parm1;
 
-    virtual ~FunctorImpl() {};
+    virtual ~FunctorImpl() {}
     virtual R operator()(P1&) = 0;
     virtual FunctorImpl* DoClone() const = 0;
 
@@ -106,6 +106,12 @@ class FunctorImpl{
         assert(typeid(*pClone) == typeid(*pObj));
         return pClone;
     }
+  protected:
+    FunctorImpl() {}
+    FunctorImpl(const FunctorImpl&) {}
+  private:
+    // not implemented
+    FunctorImpl& operator =(const FunctorImpl&);
 };
 
 ////////////////////////////////////////////////////////////////////////////////
@@ -120,7 +126,7 @@ public:
   typedef typename Base::Parm1 Parm1;
 
   FunctorHandler(Fun& fun) : f_(fun) {}
-  virtual ~FunctorHandler(){}
+  virtual ~FunctorHandler() {}
 
   ResultType operator()(Parm1& p1)
   { return f_(p1); }
@@ -128,6 +134,8 @@ public:
 
 private:
   Fun f_;
+  FunctorHandler(const FunctorHandler &b) : ParentFunctor::Impl(b), f_(b.f_) {}
+  // not implemented
   FunctorHandler& operator =(const FunctorHandler& b);
 };
 
@@ -230,7 +238,7 @@ class FunctorImpl{
     typedef P1 Parm1;
     typedef P2 Parm2;
 
-    virtual ~FunctorImpl() {};
+    virtual ~FunctorImpl() {}
     virtual R operator()(P1&,P2&) = 0;
     virtual FunctorImpl* DoClone() const = 0;
 
@@ -242,6 +250,12 @@ class FunctorImpl{
         assert(typeid(*pClone) == typeid(*pObj));
         return pClone;
     }
+  protected:
+    FunctorImpl() {}
+    FunctorImpl(const FunctorImpl&) {}
+  private:
+    // not implemented
+    FunctorImpl& operator =(const FunctorImpl&);
 };
 
 ////////////////////////////////////////////////////////////////////////////////
@@ -257,7 +271,7 @@ public:
   typedef typename Base::Parm2 Parm2;
 
   FunctorHandler(const Fun& fun) : f_(fun) {}
-  virtual ~FunctorHandler(){}
+  virtual ~FunctorHandler() {}
 
   ResultType operator()(Parm1& p1,Parm2& p2)
   { return f_(p1,p2); }
@@ -266,6 +280,8 @@ public:
 
 private:
   Fun f_;
+  FunctorHandler(const FunctorHandler &b) : ParentFunctor::Impl(b), f_(b.f_) {}
+  // not implemented
   FunctorHandler& operator =(const FunctorHandler& b);
 };
 
diff --git a/Common/DataModel/vtkDistributedGraphHelper.cxx b/Common/DataModel/vtkDistributedGraphHelper.cxx
index da50428..7ceb740 100644
--- a/Common/DataModel/vtkDistributedGraphHelper.cxx
+++ b/Common/DataModel/vtkDistributedGraphHelper.cxx
@@ -30,7 +30,7 @@
 
 
 #include <limits.h> // CHAR_BIT
-#include <assert.h> // assert()
+#include <cassert> // assert()
 
 
 vtkInformationKeyMacro(vtkDistributedGraphHelper, DISTRIBUTEDVERTEXIDS, Integer);
diff --git a/Common/DataModel/vtkEdgeTable.cxx b/Common/DataModel/vtkEdgeTable.cxx
index 3ac7797..3837d74 100644
--- a/Common/DataModel/vtkEdgeTable.cxx
+++ b/Common/DataModel/vtkEdgeTable.cxx
@@ -216,7 +216,7 @@ vtkIdType vtkEdgeTable::IsEdge(vtkIdType p1, vtkIdType p2)
     search = p1;
     }
 
-  if ( this->Table[index] == NULL )
+  if ( index > this->TableMaxId || this->Table[index] == NULL )
     {
     return (-1);
     }
@@ -258,7 +258,7 @@ void vtkEdgeTable::IsEdge(vtkIdType p1, vtkIdType p2, void* &ptr)
     search = p1;
     }
 
-  if ( this->Table[index] == NULL )
+  if ( index > this->TableMaxId || this->Table[index] == NULL )
     {
     ptr = NULL;
     }
@@ -529,10 +529,7 @@ vtkIdList **vtkEdgeTable::Resize(vtkIdType sz)
       {
       newAttributeArray[i] = NULL;
       }
-    if ( this->Attributes )
-      {
-      delete [] this->Attributes;
-      }
+    delete [] this->Attributes;
     this->Attributes = newAttributeArray;
     }
   else if ( this->StoreAttributes == 2 )
diff --git a/Common/DataModel/vtkEmptyCell.h b/Common/DataModel/vtkEmptyCell.h
index 85ac209..99c8341 100644
--- a/Common/DataModel/vtkEmptyCell.h
+++ b/Common/DataModel/vtkEmptyCell.h
@@ -68,8 +68,8 @@ public:
   virtual void InterpolateDerivs(double pcoords[3], double *derivs);
 
 protected:
-  vtkEmptyCell() {};
-  ~vtkEmptyCell() {};
+  vtkEmptyCell() {}
+  ~vtkEmptyCell() {}
 
 private:
   vtkEmptyCell(const vtkEmptyCell&);  // Not implemented.
diff --git a/Common/DataModel/vtkExtentTranslator.h b/Common/DataModel/vtkExtentTranslator.h
index d195406..177eab3 100644
--- a/Common/DataModel/vtkExtentTranslator.h
+++ b/Common/DataModel/vtkExtentTranslator.h
@@ -90,6 +90,17 @@ public:
   // use this to tell the translator which dimensions to split.
   void SetSplitPath(int len, int *splitpath);
 
+//BTX
+  // Don't change the numbers here - they are used in the code
+  // to indicate array indices.
+  enum Modes {
+    X_SLAB_MODE=0,
+    Y_SLAB_MODE=1,
+    Z_SLAB_MODE=2,
+    BLOCK_MODE= 3
+  };
+//ETX
+
 protected:
   vtkExtentTranslator();
   ~vtkExtentTranslator();
@@ -112,17 +123,6 @@ protected:
   int* SplitPath;
   int SplitLen;
 
-//BTX
-  // Don't change the numbers here - they are used in the code
-  // to indicate array indices.
-  enum Modes {
-    X_SLAB_MODE=0,
-    Y_SLAB_MODE=1,
-    Z_SLAB_MODE=2,
-    BLOCK_MODE= 3
-  };
-//ETX
-
 private:
   vtkExtentTranslator(const vtkExtentTranslator&);  // Not implemented.
   void operator=(const vtkExtentTranslator&);  // Not implemented.
diff --git a/Common/DataModel/vtkGenericAdaptorCell.cxx b/Common/DataModel/vtkGenericAdaptorCell.cxx
index 74a2cad..6656a25 100644
--- a/Common/DataModel/vtkGenericAdaptorCell.cxx
+++ b/Common/DataModel/vtkGenericAdaptorCell.cxx
@@ -14,7 +14,7 @@
 =========================================================================*/
 #include "vtkGenericAdaptorCell.h"
 
-#include <assert.h>
+#include <cassert>
 
 #include "vtkPoints.h"
 #include "vtkIncrementalPointLocator.h"
diff --git a/Common/DataModel/vtkGenericAttribute.cxx b/Common/DataModel/vtkGenericAttribute.cxx
index d4a384d..ba8c1a5 100644
--- a/Common/DataModel/vtkGenericAttribute.cxx
+++ b/Common/DataModel/vtkGenericAttribute.cxx
@@ -16,7 +16,7 @@
 // .SECTION Description
 
 #include "vtkGenericAttribute.h"
-#include <assert.h>
+#include <cassert>
 
 
 //---------------------------------------------------------------------------
diff --git a/Common/DataModel/vtkGenericAttributeCollection.cxx b/Common/DataModel/vtkGenericAttributeCollection.cxx
index aae286f..fb08465 100644
--- a/Common/DataModel/vtkGenericAttributeCollection.cxx
+++ b/Common/DataModel/vtkGenericAttributeCollection.cxx
@@ -20,7 +20,7 @@
 
 
 #include <vector>
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkGenericAttributeCollection);
 
diff --git a/Common/DataModel/vtkGenericCell.cxx b/Common/DataModel/vtkGenericCell.cxx
index d73fee5..147fcb1 100644
--- a/Common/DataModel/vtkGenericCell.cxx
+++ b/Common/DataModel/vtkGenericCell.cxx
@@ -36,6 +36,7 @@
 #include "vtkQuadraticEdge.h"
 #include "vtkQuadraticTriangle.h"
 #include "vtkQuadraticQuad.h"
+#include "vtkQuadraticPolygon.h"
 #include "vtkQuadraticTetra.h"
 #include "vtkQuadraticHexahedron.h"
 #include "vtkQuadraticWedge.h"
@@ -303,6 +304,9 @@ vtkCell *vtkGenericCell::InstantiateCell(int cellType)
   case VTK_QUADRATIC_QUAD:
     cell = vtkQuadraticQuad::New();
     break;
+  case VTK_QUADRATIC_POLYGON:
+    cell = vtkQuadraticPolygon::New();
+    break;
   case VTK_QUADRATIC_TETRA:
     cell = vtkQuadraticTetra::New();
     break;
@@ -398,3 +402,31 @@ void vtkGenericCell::PrintSelf(ostream& os, vtkIndent indent)
   this->Cell->PrintSelf(os,indent.GetNextIndent());
 }
 
+//----------------------------------------------------------------------------
+void vtkGenericCell::SetPoints(vtkPoints *points)
+{
+  if (points != this->Points)
+    {
+    this->Points->Delete();
+    this->Points = points;
+    this->Points->Register(this);
+    this->Cell->Points->Delete();
+    this->Cell->Points = points;
+    this->Cell->Points->Register(this);
+    }
+}
+
+//----------------------------------------------------------------------------
+void vtkGenericCell::SetPointIds(vtkIdList *pointIds)
+{
+  if (pointIds != this->PointIds)
+    {
+    this->PointIds->Delete();
+    this->PointIds = pointIds;
+    this->PointIds->Register(this);
+    this->Cell->PointIds->Delete();
+    this->Cell->PointIds = pointIds;
+    this->Cell->PointIds->Register(this);
+    }
+}
+
diff --git a/Common/DataModel/vtkGenericCell.h b/Common/DataModel/vtkGenericCell.h
index e546a0b..37340c6 100644
--- a/Common/DataModel/vtkGenericCell.h
+++ b/Common/DataModel/vtkGenericCell.h
@@ -41,6 +41,16 @@ public:
   void PrintSelf(ostream& os, vtkIndent indent);
 
   // Description:
+  // Set the points object to use for this cell. This updates the internal cell
+  // storage as well as the public member variable Points.
+  void SetPoints(vtkPoints *points);
+
+  // Description:
+  // Set the point ids to use for this cell. This updates the internal cell
+  // storage as well as the public member variable PointIds.
+  void SetPointIds(vtkIdList *pointIds);
+
+  // Description:
   // See the vtkCell API for descriptions of these methods.
   void ShallowCopy(vtkCell *c);
   void DeepCopy(vtkCell *c);
@@ -118,6 +128,7 @@ public:
   void SetCellTypeToQuadraticTriangle() {this->SetCellType(VTK_QUADRATIC_TRIANGLE);}
   void SetCellTypeToBiQuadraticTriangle() {this->SetCellType(VTK_BIQUADRATIC_TRIANGLE);}
   void SetCellTypeToQuadraticQuad() {this->SetCellType(VTK_QUADRATIC_QUAD);}
+  void SetCellTypeToQuadraticPolygon() {this->SetCellType(VTK_QUADRATIC_POLYGON);}
   void SetCellTypeToQuadraticTetra() {this->SetCellType(VTK_QUADRATIC_TETRA);}
   void SetCellTypeToQuadraticHexahedron() {this->SetCellType(VTK_QUADRATIC_HEXAHEDRON);}
   void SetCellTypeToQuadraticWedge() {this->SetCellType(VTK_QUADRATIC_WEDGE);}
diff --git a/Common/DataModel/vtkGenericCellTessellator.cxx b/Common/DataModel/vtkGenericCellTessellator.cxx
index ac69c7c..005ec5e 100644
--- a/Common/DataModel/vtkGenericCellTessellator.cxx
+++ b/Common/DataModel/vtkGenericCellTessellator.cxx
@@ -29,7 +29,7 @@
 #include "vtkGenericCellIterator.h"
 
 
-#include <assert.h>
+#include <cassert>
 
 #include "vtkMath.h"
 
diff --git a/Common/DataModel/vtkGenericDataSet.cxx b/Common/DataModel/vtkGenericDataSet.cxx
index c46c797..32cda72 100644
--- a/Common/DataModel/vtkGenericDataSet.cxx
+++ b/Common/DataModel/vtkGenericDataSet.cxx
@@ -23,7 +23,7 @@
 #include "vtkInformationVector.h"
 #include "vtkMath.h"
 
-#include <assert.h>
+#include <cassert>
 
 vtkCxxSetObjectMacro(vtkGenericDataSet,Tessellator,vtkGenericCellTessellator);
 
diff --git a/Common/DataModel/vtkGenericEdgeTable.cxx b/Common/DataModel/vtkGenericEdgeTable.cxx
index fd297db..a92a9a4 100644
--- a/Common/DataModel/vtkGenericEdgeTable.cxx
+++ b/Common/DataModel/vtkGenericEdgeTable.cxx
@@ -16,7 +16,7 @@
 #include "vtkObjectFactory.h"
 
 #include <vector>
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkGenericEdgeTable);
 
diff --git a/Common/DataModel/vtkGenericSubdivisionErrorMetric.cxx b/Common/DataModel/vtkGenericSubdivisionErrorMetric.cxx
index 7564a07..a019ac8 100644
--- a/Common/DataModel/vtkGenericSubdivisionErrorMetric.cxx
+++ b/Common/DataModel/vtkGenericSubdivisionErrorMetric.cxx
@@ -20,7 +20,7 @@
 #include "vtkGenericAdaptorCell.h"
 #include "vtkGenericDataSet.h"
 #include "vtkMath.h"
-#include <assert.h>
+#include <cassert>
 
 
 //-----------------------------------------------------------------------------
diff --git a/Common/DataModel/vtkGeometricErrorMetric.cxx b/Common/DataModel/vtkGeometricErrorMetric.cxx
index 06ec919..13a1de6 100644
--- a/Common/DataModel/vtkGeometricErrorMetric.cxx
+++ b/Common/DataModel/vtkGeometricErrorMetric.cxx
@@ -20,7 +20,7 @@
 #include "vtkGenericAdaptorCell.h"
 #include "vtkGenericDataSet.h"
 #include "vtkMath.h"
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkGeometricErrorMetric);
 
diff --git a/Common/DataModel/vtkGraph.cxx b/Common/DataModel/vtkGraph.cxx
index 26e3eb2..6f4c9a1 100644
--- a/Common/DataModel/vtkGraph.cxx
+++ b/Common/DataModel/vtkGraph.cxx
@@ -44,7 +44,7 @@
 #include "vtkVariantArray.h"
 #include "vtkStringArray.h"
 
-#include <assert.h>
+#include <cassert>
 #include <vtksys/stl/algorithm>
 #include <vtksys/stl/set>
 #include <vtksys/stl/vector>
@@ -185,13 +185,16 @@ void vtkGraph::ComputeBounds()
 
   if ( this->Points )
     {
-    bounds = this->Points->GetBounds();
-    for (int i=0; i<6; i++)
+    if ( this->GetMTime() >= this->ComputeTime )
       {
-      this->Bounds[i] = bounds[i];
+      bounds = this->Points->GetBounds();
+      for (int i=0; i<6; i++)
+        {
+        this->Bounds[i] = bounds[i];
+        }
+      // TODO: how to compute the bounds for a distributed graph?
+      this->ComputeTime.Modified();
       }
-    // TODO: how to compute the bounds for a distributed graph?
-    this->ComputeTime.Modified();
     }
 }
 
diff --git a/Common/DataModel/vtkHyperOctree.cxx b/Common/DataModel/vtkHyperOctree.cxx
index f9e988b..7bfc1db 100644
--- a/Common/DataModel/vtkHyperOctree.cxx
+++ b/Common/DataModel/vtkHyperOctree.cxx
@@ -40,7 +40,7 @@
 //#include <set>
 #include <vector>
 
-#include <assert.h>
+#include <cassert>
 
 vtkInformationKeyMacro(vtkHyperOctree, LEVELS, Integer);
 vtkInformationKeyMacro(vtkHyperOctree, DIMENSION, Integer);
@@ -3117,33 +3117,13 @@ vtkHyperOctreeLightWeightCursor::vtkHyperOctreeLightWeightCursor()
 }
 
 //-----------------------------------------------------------------------------
-vtkHyperOctreeLightWeightCursor::~vtkHyperOctreeLightWeightCursor()
-{
-  this->Level = 0;
-  this->IsLeaf = 1;
-  this->Index = 0;
-  // I can't reference count because of the default copy constructor.
-  //if (this->Tree)
-  //  {
-  //  this->Tree->UnRegister(0);
-  //  }
-  this->Tree = 0;
-}
-
-
-//-----------------------------------------------------------------------------
 void vtkHyperOctreeLightWeightCursor::Initialize(vtkHyperOctree* tree)
 {
-  //if (this->Tree)
-  //  {
-  //  this->Tree->UnRegister(0);
-  //  }
   this->Tree = tree;
   if (tree == 0)
     {
     return;
     }
-  //this->Tree->Register(0);
 
   this->ToRoot();
 }
diff --git a/Common/DataModel/vtkHyperOctree.h b/Common/DataModel/vtkHyperOctree.h
index 2e948be..5476505 100644
--- a/Common/DataModel/vtkHyperOctree.h
+++ b/Common/DataModel/vtkHyperOctree.h
@@ -567,8 +567,6 @@ class VTKCOMMONDATAMODEL_EXPORT vtkHyperOctreeLightWeightCursor
 {
 public:
   vtkHyperOctreeLightWeightCursor();
-  ~vtkHyperOctreeLightWeightCursor();
-
   void Initialize(vtkHyperOctree* tree);
   void ToRoot();
   void ToChild(int child);
diff --git a/Common/DataModel/vtkHyperOctreePointsGrabber.cxx b/Common/DataModel/vtkHyperOctreePointsGrabber.cxx
index dace62b..a73ae17 100644
--- a/Common/DataModel/vtkHyperOctreePointsGrabber.cxx
+++ b/Common/DataModel/vtkHyperOctreePointsGrabber.cxx
@@ -14,7 +14,7 @@
 =========================================================================*/
 #include "vtkHyperOctreePointsGrabber.h"
 
-#include <assert.h>
+#include <cassert>
 
 
 //-----------------------------------------------------------------------------
diff --git a/Common/DataModel/vtkHyperTree.cxx b/Common/DataModel/vtkHyperTree.cxx
index 9199a79..8cfc2f5 100644
--- a/Common/DataModel/vtkHyperTree.cxx
+++ b/Common/DataModel/vtkHyperTree.cxx
@@ -21,7 +21,7 @@ PURPOSE.  See the above copyright notice for more information.
 #include <vector>
 #include <map>
 
-#include <assert.h>
+#include <cassert>
 
 // Description:
 // The template value N describes the number of children to binary and
@@ -801,7 +801,6 @@ public:
     for ( unsigned int i = 0; i < this->LeafParent.size(); ++ i )
       {
       os << this->LeafParent[i] << " ";
-      ++ i;
       }
     os << endl;
   }
diff --git a/Common/DataModel/vtkHyperTreeGrid.cxx b/Common/DataModel/vtkHyperTreeGrid.cxx
index 336b91b..aac438b 100644
--- a/Common/DataModel/vtkHyperTreeGrid.cxx
+++ b/Common/DataModel/vtkHyperTreeGrid.cxx
@@ -38,7 +38,7 @@ PURPOSE.  See the above copyright notice for more information.
 #include "vtkVoxel.h"
 #include "vtkTimerLog.h"
 
-#include <assert.h>
+#include <cassert>
 
 
 vtkInformationKeyMacro( vtkHyperTreeGrid, LEVELS, Integer );
@@ -1804,13 +1804,6 @@ vtkHyperTreeGrid::vtkHyperTreeSimpleCursor::vtkHyperTreeSimpleCursor()
 }
 
 //-----------------------------------------------------------------------------
-// Destructor.
-vtkHyperTreeGrid::vtkHyperTreeSimpleCursor::~vtkHyperTreeSimpleCursor()
-{
-  this->Tree = 0;
-}
-
-//-----------------------------------------------------------------------------
 // Set the state back to the initial contructed state
 void vtkHyperTreeGrid::vtkHyperTreeSimpleCursor::Clear()
 {
diff --git a/Common/DataModel/vtkHyperTreeGrid.h b/Common/DataModel/vtkHyperTreeGrid.h
index 902dddb..795522b 100644
--- a/Common/DataModel/vtkHyperTreeGrid.h
+++ b/Common/DataModel/vtkHyperTreeGrid.h
@@ -433,7 +433,6 @@ public:
   {
   public:
     vtkHyperTreeSimpleCursor();
-    ~vtkHyperTreeSimpleCursor();
 
     void Clear();
     void Initialize( vtkHyperTreeGrid*, vtkIdType, int[3] );
diff --git a/Common/DataModel/vtkImageData.cxx b/Common/DataModel/vtkImageData.cxx
index 34947e3..06738f2 100644
--- a/Common/DataModel/vtkImageData.cxx
+++ b/Common/DataModel/vtkImageData.cxx
@@ -1551,7 +1551,7 @@ void vtkImageDataCastExecute(vtkImageData *inData, IT *inPtr,
   inData->GetContinuousIncrements(outExt, inIncX, inIncY, inIncZ);
   outData->GetContinuousIncrements(outExt, outIncX, outIncY, outIncZ);
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   for (idxZ = 0; idxZ <= maxZ; idxZ++)
     {
     for (idxY = 0; idxY <= maxY; idxY++)
diff --git a/Common/DataModel/vtkImplicitBoolean.cxx b/Common/DataModel/vtkImplicitBoolean.cxx
index 805d8ed..1e8c73f 100644
--- a/Common/DataModel/vtkImplicitBoolean.cxx
+++ b/Common/DataModel/vtkImplicitBoolean.cxx
@@ -243,8 +243,12 @@ void vtkImplicitBoolean::PrintSelf(ostream& os, vtkIndent indent)
     {
     os << "VTK_UNION\n";
     }
+  else if ( this->OperationType == VTK_UNION_OF_MAGNITUDES )
+    {
+    os << "VTK_UNION_OF_MAGNITUDES\n";
+    }
   else
     {
-    os << "VTK_INTERSECTION\n";
+    os << "VTK_DIFFERENCE\n";
     }
 }
diff --git a/Common/DataModel/vtkImplicitDataSet.cxx b/Common/DataModel/vtkImplicitDataSet.cxx
index f48c803..86a1fea 100644
--- a/Common/DataModel/vtkImplicitDataSet.cxx
+++ b/Common/DataModel/vtkImplicitDataSet.cxx
@@ -18,6 +18,7 @@
 #include "vtkDataArray.h"
 #include "vtkDataSet.h"
 #include "vtkGarbageCollector.h"
+#include "vtkMath.h"
 #include "vtkObjectFactory.h"
 #include "vtkPointData.h"
 
@@ -43,10 +44,7 @@ vtkImplicitDataSet::vtkImplicitDataSet()
 vtkImplicitDataSet::~vtkImplicitDataSet()
 {
   this->SetDataSet(NULL);
-  if ( this->Weights )
-    {
-    delete [] this->Weights;
-    }
+  delete [] this->Weights;
 }
 
 // Evaluate the implicit function. This returns the interpolated scalar value
@@ -61,10 +59,7 @@ double vtkImplicitDataSet::EvaluateFunction(double x[3])
 
   if ( this->DataSet->GetMaxCellSize() > this->Size )
     {
-    if ( this->Weights )
-      {
-      delete [] this->Weights;
-      }
+    delete [] this->Weights;
     this->Weights = new double[this->DataSet->GetMaxCellSize()];
     this->Size = this->DataSet->GetMaxCellSize();
     }
@@ -78,7 +73,7 @@ double vtkImplicitDataSet::EvaluateFunction(double x[3])
     }
 
   // Find the cell that contains xyz and get it
-  cell = this->DataSet->FindAndGetCell(x,NULL,-1,0.0,subId,pcoords,this->Weights);
+  cell = this->DataSet->FindAndGetCell(x,NULL,-1,VTK_DBL_EPSILON,subId,pcoords,this->Weights);
 
   if (cell)
     { // Interpolate the point data
@@ -122,10 +117,7 @@ void vtkImplicitDataSet::EvaluateGradient(double x[3], double n[3])
 
   if ( this->DataSet->GetMaxCellSize() > this->Size )
     {
-    if ( this->Weights )
-      {
-      delete [] this->Weights;
-      }
+    delete [] this->Weights;
     this->Weights = new double[this->DataSet->GetMaxCellSize()];
     this->Size = this->DataSet->GetMaxCellSize();
     }
@@ -143,7 +135,7 @@ void vtkImplicitDataSet::EvaluateGradient(double x[3], double n[3])
     }
 
   // Find the cell that contains xyz and get it
-  cell = this->DataSet->FindAndGetCell(x,NULL,-1,0.0,subId,pcoords,this->Weights);
+  cell = this->DataSet->FindAndGetCell(x,NULL,-1,VTK_DBL_EPSILON,subId,pcoords,this->Weights);
 
   if (cell)
     { // Interpolate the point data
diff --git a/Common/DataModel/vtkImplicitFunctionCollection.h b/Common/DataModel/vtkImplicitFunctionCollection.h
index 04a33ba..457e410 100644
--- a/Common/DataModel/vtkImplicitFunctionCollection.h
+++ b/Common/DataModel/vtkImplicitFunctionCollection.h
@@ -54,8 +54,8 @@ public:
   //ETX
 
 protected:
-  vtkImplicitFunctionCollection() {};
-  ~vtkImplicitFunctionCollection() {};
+  vtkImplicitFunctionCollection() {}
+  ~vtkImplicitFunctionCollection() {}
 
 
 private:
diff --git a/Common/DataModel/vtkImplicitHalo.cxx b/Common/DataModel/vtkImplicitHalo.cxx
index 3e717c7..89a5813 100644
--- a/Common/DataModel/vtkImplicitHalo.cxx
+++ b/Common/DataModel/vtkImplicitHalo.cxx
@@ -16,7 +16,7 @@
 
 #include "vtkMath.h"
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkImplicitHalo);
 
diff --git a/Common/DataModel/vtkIncrementalOctreeNode.cxx b/Common/DataModel/vtkIncrementalOctreeNode.cxx
index 3e32b54..ba1c5a3 100644
--- a/Common/DataModel/vtkIncrementalOctreeNode.cxx
+++ b/Common/DataModel/vtkIncrementalOctreeNode.cxx
@@ -33,7 +33,7 @@ vtkCxxSetObjectMacro( vtkIncrementalOctreeNode, Parent, vtkIncrementalOctreeNode
 // function vtkIncreemntalOctreeNode::InsertPoint(). The caller inserts the
 // point index to the vtkIdList maintained by a leaf node, without inserting
 // the point (coordinate) to vtkPoints at all.
-void _OctreeNodeGetPointId( vtkPoints * vtkNotUsed( points ),
+static void OctreeNodeGetPointId( vtkPoints * vtkNotUsed( points ),
   vtkIdType * vtkNotUsed( pntIdx ), const double * vtkNotUsed( coords ) )
 {
   // the 3D point coordinate is not inserted to vtkPoints at all
@@ -42,16 +42,16 @@ void _OctreeNodeGetPointId( vtkPoints * vtkNotUsed( points ),
 //----------------------------------------------------------------------------
 // Insert a point, with a specified point index, to a vtkPoints object by
 // calling vtkPoints::InsertPoint().
-void _OctreeNodeInsertPoint( vtkPoints * points, vtkIdType * pntIdx,
-                             const double * coords )
+static void OctreeNodeInsertPoint( vtkPoints * points, vtkIdType * pntIdx,
+                                    const double * coords )
 {
   points->InsertPoint( *pntIdx, coords );
 }
 
 //----------------------------------------------------------------------------
 // Insert a point to a vtkPoints by calling vtkPoints::InsertNextPoint().
-void _OctreeNodeInsertNextPoint( vtkPoints * points, vtkIdType * pntIdx,
-                                 const double * coords )
+static void OctreeNodeInsertNextPoint( vtkPoints * points, vtkIdType * pntIdx,
+                                        const double * coords )
 {
   *pntIdx = points->InsertNextPoint( coords );
 }
@@ -63,9 +63,9 @@ typedef void ( * OCTREENODE_INSERTPOINT_FUNCTION )
 
 static OCTREENODE_INSERTPOINT_FUNCTION OCTREENODE_INSERTPOINT[3] =
 {
-  _OctreeNodeGetPointId,
-  _OctreeNodeInsertPoint,
-  _OctreeNodeInsertNextPoint
+  OctreeNodeGetPointId,
+  OctreeNodeInsertPoint,
+  OctreeNodeInsertNextPoint
 };
 
 // ---------------------------------------------------------------------------
diff --git a/Common/DataModel/vtkKdNode.cxx b/Common/DataModel/vtkKdNode.cxx
index 174f52e..02aeb35 100644
--- a/Common/DataModel/vtkKdNode.cxx
+++ b/Common/DataModel/vtkKdNode.cxx
@@ -73,7 +73,7 @@ void vtkKdNode::SetBounds(double x1,double x2,double y1,double y2,double z1,
 }
 
 // ----------------------------------------------------------------------------
-void vtkKdNode::SetMinBounds(double *b)
+void vtkKdNode::SetMinBounds(const double *b)
 {
   this->Min[0] = b[0];
   this->Min[1] = b[1];
@@ -81,7 +81,7 @@ void vtkKdNode::SetMinBounds(double *b)
 }
 
 // ----------------------------------------------------------------------------
-void vtkKdNode::SetMaxBounds(double *b)
+void vtkKdNode::SetMaxBounds(const double *b)
 {
   this->Max[0] = b[0];
   this->Max[1] = b[1];
@@ -89,7 +89,7 @@ void vtkKdNode::SetMaxBounds(double *b)
 }
 
 // ----------------------------------------------------------------------------
-void vtkKdNode::SetMinDataBounds(double *b)
+void vtkKdNode::SetMinDataBounds(const double *b)
 {
   this->MinVal[0] = b[0];
   this->MinVal[1] = b[1];
@@ -97,7 +97,7 @@ void vtkKdNode::SetMinDataBounds(double *b)
 }
 
 // ----------------------------------------------------------------------------
-void vtkKdNode::SetMaxDataBounds(double *b)
+void vtkKdNode::SetMaxDataBounds(const double *b)
 {
   this->MaxVal[0] = b[0];
   this->MaxVal[1] = b[1];
diff --git a/Common/DataModel/vtkKdNode.h b/Common/DataModel/vtkKdNode.h
index b9727c8..2321f30 100644
--- a/Common/DataModel/vtkKdNode.h
+++ b/Common/DataModel/vtkKdNode.h
@@ -65,7 +65,7 @@ public:
   //   Set/Get the bounds of the spatial region represented by this node.
   //   Caller allocates storage for 6-vector in GetBounds.
   void SetBounds(double x1,double x2,double y1,double y2,double z1,double z2);
-  void SetBounds(double b[6])
+  void SetBounds(const double b[6])
     {
     this->SetBounds(b[0], b[1], b[2], b[3], b[4], b[5]);
     }
@@ -91,11 +91,11 @@ public:
 
   // Description:
   //   Set the xmin, ymin and zmin value of the bounds of this region
-  void SetMinBounds(double *mb);
+  void SetMinBounds(const double *mb);
 
   // Description:
   //   Set the xmax, ymax and zmax value of the bounds of this region
-  void SetMaxBounds(double *mb);
+  void SetMaxBounds(const double *mb);
 
   // Description:
   //   Get a pointer to the 3 data bound minima (xmin, ymin and zmin) or the
@@ -106,12 +106,12 @@ public:
   // Description:
   //   Set the xmin, ymin and zmin value of the bounds of this
   //   data within this region
-  void SetMinDataBounds(double *mb);
+  void SetMinDataBounds(const double *mb);
 
   // Description:
   //   Set the xmax, ymax and zmax value of the bounds of this
   //   data within this region
-  void SetMaxDataBounds(double *mb);
+  void SetMaxDataBounds(const double *mb);
 
   // Description:
   //   Set/Get the ID associated with the region described by this node.  If
diff --git a/Common/DataModel/vtkKdTree.cxx b/Common/DataModel/vtkKdTree.cxx
index a445f9f..cd2532e 100644
--- a/Common/DataModel/vtkKdTree.cxx
+++ b/Common/DataModel/vtkKdTree.cxx
@@ -266,10 +266,7 @@ void vtkKdTree::DeleteCellLists()
   int i;
   int num = this->CellList.nRegions;
 
-  if (this->CellList.regionIds)
-    {
-    delete [] this->CellList.regionIds;
-    }
+  delete [] this->CellList.regionIds;
 
   if (this->CellList.cells)
     {
@@ -313,11 +310,8 @@ vtkKdTree::~vtkKdTree()
 
   this->DeleteCellLists();
 
-  if (this->CellRegionList)
-    {
-    delete [] this->CellRegionList;
-    this->CellRegionList = NULL;
-    }
+  delete [] this->CellRegionList;
+  this->CellRegionList = NULL;
 
   if (this->TimerLog)
     {
@@ -1917,14 +1911,8 @@ vtkIdTypeArray *vtkKdTree::BuildMapForDuplicatePoints(float tolerance = 0.0)
 
   if (!idCount || !uniqueFound)
     {
-    if (idCount)
-      {
-      delete [] idCount;
-      }
-    if (uniqueFound)
-      {
-      delete [] uniqueFound;
-      }
+    delete [] idCount;
+    delete [] uniqueFound;
 
     vtkErrorMacro(<< "vtkKdTree::BuildMapForDuplicatePoints memory allocation");
     return NULL;
@@ -3138,40 +3126,26 @@ void vtkKdTree::FreeSearchStructure()
     this->Top->Delete();
     this->Top = NULL;
     }
-  if (this->RegionList)
-    {
-    delete [] this->RegionList;
-    this->RegionList = NULL;
-    }
+
+  delete [] this->RegionList;
+  this->RegionList = NULL;
 
   this->NumberOfRegions = 0;
   this->SetActualLevel();
 
   this->DeleteCellLists();
 
-  if (this->CellRegionList)
-    {
-    delete [] this->CellRegionList;
-    this->CellRegionList = NULL;
-    }
+  delete [] this->CellRegionList;
+  this->CellRegionList = NULL;
 
-  if (this->LocatorPoints)
-    {
-    delete [] this->LocatorPoints;
-    this->LocatorPoints = NULL;
-    }
+  delete [] this->LocatorPoints;
+  this->LocatorPoints = NULL;
 
-  if (this->LocatorIds)
-    {
-    delete [] this->LocatorIds;
-    this->LocatorIds = NULL;
-    }
+  delete [] this->LocatorIds;
+  this->LocatorIds = NULL;
 
-  if (this->LocatorRegionLocation)
-    {
-    delete [] this->LocatorRegionLocation;
-    this->LocatorRegionLocation = NULL;
-    }
+  delete [] this->LocatorRegionLocation;
+  this->LocatorRegionLocation = NULL;
 }
 
 //----------------------------------------------------------------------------
@@ -3815,14 +3789,8 @@ void vtkKdTree::CreateCellLists(vtkDataSet *set, int *regionList, int listSize)
       }
     }
 
-  if (listptr)
-    {
-    delete [] listptr;
-    }
-  if (idlist)
-    {
-    delete [] idlist;
-    }
+  delete [] listptr;
+  delete [] idlist;
 }
 
 //----------------------------------------------------------------------------
diff --git a/Common/DataModel/vtkLine.h b/Common/DataModel/vtkLine.h
index 1eb87b6..f80486e 100644
--- a/Common/DataModel/vtkLine.h
+++ b/Common/DataModel/vtkLine.h
@@ -147,7 +147,7 @@ public:
 
 protected:
   vtkLine();
-  ~vtkLine() {};
+  ~vtkLine() {}
 
 private:
   vtkLine(const vtkLine&);  // Not implemented.
diff --git a/Common/DataModel/vtkMappedUnstructuredGrid.h b/Common/DataModel/vtkMappedUnstructuredGrid.h
new file mode 100644
index 0000000..8387f44
--- /dev/null
+++ b/Common/DataModel/vtkMappedUnstructuredGrid.h
@@ -0,0 +1,294 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkMappedUnstructuredGrid.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkMappedUnstructuredGrid - Allows datasets with arbitrary storage
+// layouts to be used with VTK.
+//
+// .SECTION Description
+// This class fulfills the vtkUnstructuredGridBase API while delegating to a
+// arbitrary implementation of the dataset topology. The purpose of
+// vtkMappedUnstructuredGrid is to allow external data structures to be used
+// directly in a VTK pipeline, e.g. for in-situ analysis of a running
+// simulation.
+//
+// When introducing an external data structure into VTK, there are 3 principle
+// components of the dataset to consider:
+// - Points
+// - Cells (topology)
+// - Point/Cell attributes
+//
+// Points and attributes can be handled by subclassing vtkMappedDataArray and
+// implementing that interface to adapt the external data structures into VTK.
+// The vtkMappedDataArray subclasses can then be used as the vtkPoints's Data
+// member (for points/nodes) or added directly to vtkPointData, vtkCellData, or
+// vtkFieldData for attribute information. Filters used in the pipeline will
+// need to be modified to remove calls to vtkDataArray::GetVoidPointer and use
+// vtkTypedDataArrayIterators instead.
+// See vtkDataArrayIteratorMacro.h for a (relatively) simple way to write
+// processing algorithms that will use efficient raw memory accesses for
+// standard VTK data arrays and safe iterators for non-standard data arrays in a
+// single templated implementation.
+//
+// Introducing an arbitrary topology implementation into VTK requires the use of
+// the vtkMappedUnstructuredGrid class. Unlike the data array counterpart, the
+// mapped unstructured grid is not subclassed, but rather takes an adaptor
+// class as a template argument. This is to allow cheap shallow copies of the
+// data by passing the reference-counted implementation object to new instances
+// of vtkMappedUnstructuredGrid.
+//
+// The implementation class should derive from vtkObject (for reference
+// counting) and implement the usual vtkObject API requirements, such as a
+// static New() method and PrintSelf function. The following methods must also
+// be implemented:
+// - vtkIdType GetNumberOfCells()
+// - int GetCellType(vtkIdType cellId)
+// - void GetCellPoints(vtkIdType cellId, vtkIdList *ptIds)
+// - void GetPointCells(vtkIdType ptId, vtkIdList *cellIds)
+// - int GetMaxCellSize()
+// - void GetIdsOfCellsOfType(int type, vtkIdTypeArray *array)
+// - int IsHomogeneous()
+// - void Allocate(vtkIdType numCells, int extSize = 1000)
+// - vtkIdType InsertNextCell(int type, vtkIdList *ptIds)
+// - vtkIdType InsertNextCell(int type, vtkIdType npts, vtkIdType *ptIds)
+// - vtkIdType InsertNextCell(int type, vtkIdType npts, vtkIdType *ptIds,
+//                            vtkIdType nfaces, vtkIdType *faces)
+// - void ReplaceCell(vtkIdType cellId, int npts, vtkIdType *pts)
+//
+// These methods should provide the same functionality as defined in
+// vtkUnstructuredGrid. See that class's documentation for more information.
+//
+// Note that since the implementation class is used as a compile-time template
+// parameter in vtkMappedUnstructuredGrid, the above methods do not need be
+// virtuals. The compiler will statically bind the calls, making dynamic vtable
+// lookups unneccessary and giving a slight performance boost.
+//
+// Adapting a filter or algorithm to safely traverse the
+// vtkMappedUnstructuredGrid's topology requires removing calls the following
+// implementation-dependent vtkUnstructuredGrid methods:
+// - vtkUnstructuredGrid::GetCellTypesArray()
+// - vtkUnstructuredGrid::GetCellLocationsArray()
+// - vtkUnstructuredGrid::GetCellLinks()
+// - vtkUnstructuredGrid::GetCells()
+// Access to the values returned by these methods should be replaced by the
+// equivalent random-access lookup methods in the vtkUnstructuredGridBase API,
+// or use vtkCellIterator (see vtkDataSet::NewCellIterator) for sequential
+// access.
+//
+// A custom vtkCellIterator implementation may be specified for a particular
+// vtkMappedUnstructuredGrid as the second template parameter. By default,
+// vtkMappedUnstructuredGridCellIterator will be used, which increments an
+// internal cell id counter and performs random-access lookup as-needed. More
+// efficient implementations may be used with data structures better suited for
+// sequential access, see vtkUnstructuredGridCellIterator for an example.
+//
+// A set of four macros are provided to generate a concrete subclass of
+// vtkMappedUnstructuredGrid with a specified implementation, cell iterator,
+// and export declaration. They are:
+// - vtkMakeMappedUnstructuredGrid(_className, _impl)
+//   - Create a subclass of vtkMappedUnstructuredGrid using _impl implemenation
+//     that is named _className.
+// - vtkMakeMappedUnstructuredGridWithIter(_className, _impl, _cIter)
+//   - Create a subclass of vtkMappedUnstructuredGrid using _impl implemenation
+//     and _cIter vtkCellIterator that is named _className.
+// - vtkMakeExportedMappedUnstructuredGrid(_className, _impl, _exportDecl)
+//   - Create a subclass of vtkMappedUnstructuredGrid using _impl implemenation
+//     that is named _className. _exportDecl is used to decorate the class
+//     declaration.
+// - vtkMakeExportedMappedUnstructuredGridWithIter(_className, _impl, _cIter, _exportDecl)
+//   - Create a subclass of vtkMappedUnstructuredGrid using _impl implemenation
+//     and _cIter vtkCellIterator that is named _className. _exportDecl is used
+//     to decorate the class declaration.
+//
+// To instantiate a vtkMappedUnstructuredGrid subclass created by the above
+// macro, the follow pattern is encouraged:
+//
+// @code
+// MyGrid.h:
+// ----------------------------------------------------------------------
+// class MyGridImplementation : public vtkObject
+// {
+// public:
+//   ... (vtkObject required API) ...
+//   ... (vtkMappedUnstructuredGrid Implementation required API) ...
+//   void SetImplementationDetails(...raw data from external source...);
+// };
+//
+// vtkMakeMappedUnstructuredGrid(MyGrid, MyGridImplementation)
+//
+// SomeSource.cxx
+// ----------------------------------------------------------------------
+// vtkNew<MyGrid> grid;
+// grid->GetImplementation()->SetImplementationDetails(...);
+// /* grid is now ready to use. */
+// @endcode
+//
+// The vtkCPExodusIIElementBlock class provides an example of
+// vtkMappedUnstructuredGrid usage, adapting the Exodus II data structures for
+// the VTK pipeline.
+
+#ifndef __vtkMappedUnstructuredGrid_h
+#define __vtkMappedUnstructuredGrid_h
+
+#include "vtkUnstructuredGridBase.h"
+
+#include "vtkMappedUnstructuredGridCellIterator.h" // For default cell iterator
+#include "vtkNew.h" // For vtkNew
+#include "vtkSmartPointer.h" // For vtkSmartPointer
+#include "vtkTypeTemplate.h" // For vtkTypeTemplate
+
+template <class Implementation,
+          class CellIterator = vtkMappedUnstructuredGridCellIterator<Implementation> >
+class vtkMappedUnstructuredGrid:
+    public vtkTypeTemplate<vtkMappedUnstructuredGrid<Implementation, CellIterator>,
+                           vtkUnstructuredGridBase>
+{
+public:
+  typedef
+    vtkTypeTemplate<vtkMappedUnstructuredGrid<Implementation, CellIterator>,
+      vtkUnstructuredGridBase> Superclass;
+  typedef Implementation ImplementationType;
+  typedef CellIterator CellIteratorType;
+
+  // Virtuals from various base classes:
+  void PrintSelf(ostream &os, vtkIndent indent);
+  void CopyStructure(vtkDataSet *pd);
+  void ShallowCopy(vtkDataObject *src);
+  vtkIdType GetNumberOfCells();
+  vtkCell* GetCell(vtkIdType cellId);
+  void GetCell(vtkIdType cellId, vtkGenericCell *cell);
+  int GetCellType(vtkIdType cellId);
+  void GetCellPoints(vtkIdType cellId, vtkIdList *ptIds);
+  vtkCellIterator* NewCellIterator();
+  void GetPointCells(vtkIdType ptId, vtkIdList *cellIds);
+  int GetMaxCellSize();
+  void GetIdsOfCellsOfType(int type, vtkIdTypeArray *array);
+  int IsHomogeneous();
+  void Allocate(vtkIdType numCells, int extSize = 1000);
+  vtkIdType InsertNextCell(int type, vtkIdList *ptIds);
+  vtkIdType InsertNextCell(int type, vtkIdType npts, vtkIdType *ptIds);
+  vtkIdType InsertNextCell(int type, vtkIdType npts, vtkIdType *ptIds,
+                           vtkIdType nfaces, vtkIdType *faces);
+  void ReplaceCell(vtkIdType cellId, int npts, vtkIdType *pts);
+  unsigned long GetMTime();
+
+  void SetImplementation(ImplementationType *impl);
+  ImplementationType *GetImplementation();
+
+protected:
+  vtkMappedUnstructuredGrid();
+  ~vtkMappedUnstructuredGrid();
+
+  // For convenience...
+  typedef vtkMappedUnstructuredGrid<Implementation, CellIterator> ThisType;
+
+  vtkSmartPointer<ImplementationType> Impl;
+
+private:
+  vtkMappedUnstructuredGrid(const vtkMappedUnstructuredGrid &); // Not implemented.
+  void operator=(const vtkMappedUnstructuredGrid &);   // Not implemented.
+
+  vtkNew<vtkGenericCell> TempCell;
+};
+
+#include "vtkMappedUnstructuredGrid.txx"
+
+// We need to fake the superclass for the wrappers, otherwise they will choke on
+// the template:
+#ifndef __WRAP__
+
+#define vtkMakeExportedMappedUnstructuredGrid(_className, _impl, _exportDecl) \
+class _exportDecl _className : \
+    public vtkMappedUnstructuredGrid<_impl> \
+{ \
+public: \
+  vtkTypeMacro(_className, \
+               vtkMappedUnstructuredGrid<_impl>) \
+  static _className* New(); \
+protected: \
+  _className() \
+  { \
+    _impl *i = _impl::New(); \
+    this->SetImplementation(i); \
+    i->Delete(); \
+  } \
+  ~_className() {} \
+private: \
+  _className(const _className&); \
+  void operator=(const _className&); \
+};
+
+#define vtkMakeExportedMappedUnstructuredGridWithIter(_className, _impl, _cIter, _exportDecl) \
+class _exportDecl _className : \
+  public vtkMappedUnstructuredGrid<_impl, _cIter> \
+{ \
+public: \
+  vtkTypeMacro(_className, \
+               vtkMappedUnstructuredGrid<_impl, _cIter>) \
+  static _className* New(); \
+protected: \
+  _className() \
+  { \
+    _impl *i = _impl::New(); \
+    this->SetImplementation(i); \
+    i->Delete(); \
+  } \
+  ~_className() {} \
+private: \
+  _className(const _className&); \
+  void operator=(const _className&); \
+};
+
+#else // __WRAP__
+
+#define vtkMakeExportedMappedUnstructuredGrid(_className, _impl, _exportDecl) \
+  class _exportDecl _className : \
+  public vtkUnstructuredGridBase \
+{ \
+public: \
+  vtkTypeMacro(_className, vtkUnstructuredGridBase) \
+  static _className* New(); \
+protected: \
+  _className() {} \
+  ~_className() {} \
+private: \
+  _className(const _className&); \
+  void operator=(const _className&); \
+};
+
+#define vtkMakeExportedMappedUnstructuredGridWithIter(_className, _impl, _cIter, _exportDecl) \
+  class _exportDecl _className : \
+  public vtkUnstructuredGridBase \
+{ \
+public: \
+  vtkTypeMacro(_className, vtkUnstructuredGridBase) \
+  static _className* New(); \
+protected: \
+  _className() {} \
+  ~_className() {} \
+private: \
+  _className(const _className&); \
+  void operator=(const _className&); \
+};
+
+#endif // __WRAP__
+
+#define vtkMakeMappedUnstructuredGrid(_className, _impl) \
+  vtkMakeExportedMappedUnstructuredGrid(_className, _impl, )
+
+#define vtkMakeMappedUnstructuredGridWithIter(_className, _impl, _cIter, _exportDecl) \
+  vtkMakeExportedMappedUnstructuredGridWithIter(_className, _impl, _cIter, )
+
+#endif //__vtkMappedUnstructuredGrid_h
+
+// VTK-HeaderTest-Exclude: vtkMappedUnstructuredGrid.h
diff --git a/Common/DataModel/vtkMappedUnstructuredGrid.txx b/Common/DataModel/vtkMappedUnstructuredGrid.txx
new file mode 100644
index 0000000..56cf104
--- /dev/null
+++ b/Common/DataModel/vtkMappedUnstructuredGrid.txx
@@ -0,0 +1,233 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkMappedUnstructuredGrid.txx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkMappedUnstructuredGrid.h"
+
+#include "vtkGenericCell.h"
+#include <algorithm>
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+void vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::PrintSelf(ostream &os, vtkIndent indent)
+{
+  os << indent << "Implementation:";
+  if (this->Impl == NULL)
+    {
+    os << " NULL" << endl;
+    }
+  else
+    {
+    os << endl;
+    this->Impl->PrintSelf(os, indent.GetNextIndent());
+    }
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+void vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::CopyStructure(vtkDataSet *ds)
+{
+  if (ThisType *grid = ThisType::SafeDownCast(ds))
+    {
+    this->SetImplementation(grid->GetImplementation());
+    }
+
+  this->Superclass::CopyStructure(ds);
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+void vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::ShallowCopy(vtkDataObject *src)
+{
+  if (ThisType *grid = ThisType::SafeDownCast(src))
+    {
+    this->SetImplementation(grid->GetImplementation());
+    }
+
+  this->Superclass::ShallowCopy(src);
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+vtkIdType vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::GetNumberOfCells()
+{
+  return this->Impl->GetNumberOfCells();
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+vtkCell* vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::GetCell(vtkIdType cellId)
+{
+  this->GetCell(cellId, this->TempCell.GetPointer());
+  return this->TempCell.GetPointer();
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+void vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::GetCell(vtkIdType cellId, vtkGenericCell *cell)
+{
+  cell->SetCellType(this->Impl->GetCellType(cellId));
+  this->Impl->GetCellPoints(cellId, cell->PointIds);
+  this->Points->GetPoints(cell->PointIds, cell->Points);
+
+  if (cell->RequiresInitialization())
+    {
+    cell->Initialize();
+    }
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+int vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::GetCellType(vtkIdType cellId)
+{
+  return this->Impl->GetCellType(cellId);
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+void vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::GetCellPoints(vtkIdType cellId, vtkIdList *ptIds)
+{
+  this->Impl->GetCellPoints(cellId, ptIds);
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+vtkCellIterator *vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::NewCellIterator()
+{
+  CellIteratorType *cellIterator = CellIteratorType::New();
+  cellIterator->SetMappedUnstructuredGrid(this);
+  return cellIterator;
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+void vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::GetPointCells(vtkIdType ptId, vtkIdList *cellIds)
+{
+  this->Impl->GetPointCells(ptId, cellIds);
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+int vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::GetMaxCellSize()
+{
+  return this->Impl->GetMaxCellSize();
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+void vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::GetIdsOfCellsOfType(int type, vtkIdTypeArray *array)
+{
+  this->Impl->GetIdsOfCellsOfType(type, array);
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+int vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::IsHomogeneous()
+{
+  return this->Impl->IsHomogeneous();
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+void vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::Allocate(vtkIdType numCells, int)
+{
+  return this->Impl->Allocate(numCells);
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+vtkIdType vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::InsertNextCell(int type, vtkIdList *ptIds)
+{
+  return this->Impl->InsertNextCell(type, ptIds);
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+vtkIdType vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::InsertNextCell(int type, vtkIdType npts, vtkIdType *ptIds)
+{
+  return this->Impl->InsertNextCell(type, npts, ptIds);
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+vtkIdType vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::InsertNextCell(int type, vtkIdType npts, vtkIdType *ptIds, vtkIdType nfaces,
+                 vtkIdType *faces)
+{
+  return this->Impl->InsertNextCell(type, npts, ptIds, nfaces, faces);
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+void vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::ReplaceCell(vtkIdType cellId, int npts, vtkIdType *pts)
+{
+  this->Impl->ReplaceCell(cellId, npts, pts);
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+unsigned long vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::GetMTime()
+{
+  return std::max(this->MTime.GetMTime(), this->Impl->GetMTime());
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+vtkMappedUnstructuredGrid<Implementation, CellIterator>::
+vtkMappedUnstructuredGrid()
+  : Impl(NULL)
+{
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+vtkMappedUnstructuredGrid<Implementation, CellIterator>::
+~vtkMappedUnstructuredGrid()
+{
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+void vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::SetImplementation(Implementation *impl)
+{
+  this->Impl = impl;
+  this->Modified();
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation, class CellIterator>
+Implementation* vtkMappedUnstructuredGrid<Implementation, CellIterator>
+::GetImplementation()
+{
+  return this->Impl;
+}
diff --git a/Common/DataModel/vtkMappedUnstructuredGridCellIterator.h b/Common/DataModel/vtkMappedUnstructuredGridCellIterator.h
new file mode 100644
index 0000000..3164aee
--- /dev/null
+++ b/Common/DataModel/vtkMappedUnstructuredGridCellIterator.h
@@ -0,0 +1,75 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkMappedUnstructuredGridCellIterator.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+// .NAME vtkMappedUnstructuredGridCellIterator - Default cell iterator for
+// vtkMappedUnstructuredGrid.
+//
+// .SECTION Description
+// This class is used by default for vtkMappedUnstructedGrid instances. It
+// uses random access for data lookups. Custom vtkCellIterator implementations
+// should be used instead when random-access is inefficient.
+
+#ifndef __vtkMappedUnstructuredGridCellIterator_h
+#define __vtkMappedUnstructuredGridCellIterator_h
+
+#include "vtkCellIterator.h"
+#include "vtkSmartPointer.h" // For vtkSmartPointer
+#include "vtkTypeTemplate.h" // For vtkTypeTemplate
+
+template <class Implementation, class CellIterator>
+class vtkMappedUnstructuredGrid;
+
+template <class Implementation>
+class vtkMappedUnstructuredGridCellIterator :
+    public vtkTypeTemplate<vtkMappedUnstructuredGridCellIterator<Implementation>,
+      vtkCellIterator>
+{
+public:
+  typedef Implementation ImplementationType;
+  typedef vtkMappedUnstructuredGridCellIterator<ImplementationType> ThisType;
+  static vtkMappedUnstructuredGridCellIterator<ImplementationType> *New();
+  virtual void PrintSelf(ostream &os, vtkIndent indent);
+
+  void SetMappedUnstructuredGrid(
+      vtkMappedUnstructuredGrid<ImplementationType, ThisType> *grid);
+
+  bool IsDoneWithTraversal();
+  vtkIdType GetCellId();
+
+protected:
+  vtkMappedUnstructuredGridCellIterator();
+  ~vtkMappedUnstructuredGridCellIterator();
+
+  void ResetToFirstCell();
+  void IncrementToNextCell();
+  void FetchCellType();
+  void FetchPointIds();
+  void FetchPoints();
+
+private:
+  vtkMappedUnstructuredGridCellIterator(const vtkMappedUnstructuredGridCellIterator &); // Not implemented.
+  void operator=(const vtkMappedUnstructuredGridCellIterator &);   // Not implemented.
+
+  vtkSmartPointer<ImplementationType> Impl;
+  vtkSmartPointer<vtkPoints> GridPoints;
+  vtkIdType CellId;
+  vtkIdType NumberOfCells;
+};
+
+#include "vtkMappedUnstructuredGridCellIterator.txx"
+
+#endif //__vtkMappedUnstructuredGridCellIterator_h
+
+// VTK-HeaderTest-Exclude: vtkMappedUnstructuredGridCellIterator.h
diff --git a/Common/DataModel/vtkMappedUnstructuredGridCellIterator.txx b/Common/DataModel/vtkMappedUnstructuredGridCellIterator.txx
new file mode 100644
index 0000000..cb07fad
--- /dev/null
+++ b/Common/DataModel/vtkMappedUnstructuredGridCellIterator.txx
@@ -0,0 +1,142 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkMappedUnstructuredGridCellIterator.txx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkMappedUnstructuredGridCellIterator.h"
+
+#include "vtkMappedUnstructuredGrid.h"
+#include "vtkObjectFactory.h"
+#include "vtkPoints.h"
+
+//------------------------------------------------------------------------------
+template <class Implementation>
+vtkMappedUnstructuredGridCellIterator<Implementation>*
+vtkMappedUnstructuredGridCellIterator<Implementation>::New()
+{
+  VTK_STANDARD_NEW_BODY(ThisType)
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation>
+void vtkMappedUnstructuredGridCellIterator<Implementation>
+::PrintSelf(ostream &os, vtkIndent indent)
+{
+  os << indent << "Implementation:";
+  if (this->Impl == NULL)
+    {
+    os << " NULL" << endl;
+    }
+  else
+    {
+    os << endl;
+    this->Impl->PrintSelf(os, indent.GetNextIndent());
+    }
+
+  os << indent << "GridPoints:";
+  if (this->GridPoints == NULL)
+    {
+    os << " NULL" << endl;
+    }
+  else
+    {
+    os << endl;
+    this->GridPoints->PrintSelf(os, indent.GetNextIndent());
+    }
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation>
+bool vtkMappedUnstructuredGridCellIterator<Implementation>
+::IsDoneWithTraversal()
+{
+  return this->CellId >= this->NumberOfCells;
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation>
+vtkIdType vtkMappedUnstructuredGridCellIterator<Implementation>
+::GetCellId()
+{
+  return this->CellId;
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation>
+vtkMappedUnstructuredGridCellIterator<Implementation>
+::vtkMappedUnstructuredGridCellIterator()
+  : Impl(NULL),
+    GridPoints(NULL),
+    CellId(0),
+    NumberOfCells(0)
+{
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation>
+vtkMappedUnstructuredGridCellIterator<Implementation>
+::~vtkMappedUnstructuredGridCellIterator()
+{
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation>
+void vtkMappedUnstructuredGridCellIterator<Implementation>
+::ResetToFirstCell()
+{
+  this->CellId = 0;
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation>
+void vtkMappedUnstructuredGridCellIterator<Implementation>
+::IncrementToNextCell()
+{
+  ++this->CellId;
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation>
+void vtkMappedUnstructuredGridCellIterator<Implementation>
+::FetchCellType()
+{
+  this->CellType = this->Impl->GetCellType(this->CellId);
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation>
+void vtkMappedUnstructuredGridCellIterator<Implementation>
+::FetchPointIds()
+{
+  this->Impl->GetCellPoints(this->CellId, this->PointIds);
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation>
+void vtkMappedUnstructuredGridCellIterator<Implementation>
+::FetchPoints()
+{
+  this->GridPoints->GetPoints(this->GetPointIds(), this->Points);
+}
+
+//------------------------------------------------------------------------------
+template <class Implementation>
+void vtkMappedUnstructuredGridCellIterator<Implementation>
+::SetMappedUnstructuredGrid(
+    vtkMappedUnstructuredGrid<ImplementationType, ThisType> *grid)
+{
+  this->Impl = grid->GetImplementation();
+  this->GridPoints = grid->GetPoints();
+  this->CellId = 0;
+  this->NumberOfCells = grid->GetNumberOfCells();
+}
diff --git a/Common/DataModel/vtkMergePoints.h b/Common/DataModel/vtkMergePoints.h
index f4103a5..db711e7 100644
--- a/Common/DataModel/vtkMergePoints.h
+++ b/Common/DataModel/vtkMergePoints.h
@@ -52,8 +52,8 @@ public:
   int InsertUniquePoint(const double x[3], vtkIdType &ptId);
 
 protected:
-  vtkMergePoints() {};
-  ~vtkMergePoints() {};
+  vtkMergePoints() {}
+  ~vtkMergePoints() {}
 
 private:
   vtkMergePoints(const vtkMergePoints&);  // Not implemented.
diff --git a/Common/DataModel/vtkMolecule.cxx b/Common/DataModel/vtkMolecule.cxx
index 972605d..4fc0682 100644
--- a/Common/DataModel/vtkMolecule.cxx
+++ b/Common/DataModel/vtkMolecule.cxx
@@ -26,7 +26,7 @@ PURPOSE.  See the above copyright notice for more information.
 #include "vtkVector.h"
 #include "vtkVectorOperators.h"
 
-#include <assert.h>
+#include <cassert>
 
 //----------------------------------------------------------------------------
 vtkStandardNewMacro(vtkMolecule);
diff --git a/Common/DataModel/vtkOctreePointLocator.cxx b/Common/DataModel/vtkOctreePointLocator.cxx
index f82af37..8d8bc9e 100644
--- a/Common/DataModel/vtkOctreePointLocator.cxx
+++ b/Common/DataModel/vtkOctreePointLocator.cxx
@@ -159,21 +159,15 @@ void vtkOctreePointLocator::DeleteAllDescendants(
 vtkOctreePointLocator::~vtkOctreePointLocator()
 {
   this->FreeSearchStructure();
-  if(this->LocatorPoints != NULL)
-    {
-    delete []this->LocatorPoints;
-    this->LocatorPoints = 0;
-    }
-  if(this->LocatorIds != NULL)
-    {
-    delete []this->LocatorIds;
-    this->LocatorIds = 0;
-    }
-  if(this->LeafNodeList != NULL)
-    {
-    delete []this->LeafNodeList;
-    this->LeafNodeList = 0;
-    }
+
+  delete []this->LocatorPoints;
+  this->LocatorPoints = 0;
+
+  delete []this->LocatorIds;
+  this->LocatorIds = 0;
+
+  delete []this->LeafNodeList;
+  this->LeafNodeList = 0;
 }
 
 //----------------------------------------------------------------------------
@@ -1029,25 +1023,16 @@ void vtkOctreePointLocator::FreeSearchStructure()
     this->Top->Delete();
     this->Top = NULL;
     }
-  if (this->LeafNodeList)
-    {
-    delete [] this->LeafNodeList;
-    this->LeafNodeList = NULL;
-    }
+  delete [] this->LeafNodeList;
+  this->LeafNodeList = NULL;
 
   this->NumberOfLeafNodes = 0;
 
-  if (this->LocatorPoints)
-    {
-    delete [] this->LocatorPoints;
-    this->LocatorPoints = NULL;
-    }
+  delete [] this->LocatorPoints;
+  this->LocatorPoints = NULL;
 
-  if (this->LocatorIds)
-    {
-    delete [] this->LocatorIds;
-    this->LocatorIds = NULL;
-    }
+  delete [] this->LocatorIds;
+  this->LocatorIds = NULL;
 }
 
 //----------------------------------------------------------------------------
diff --git a/Common/DataModel/vtkOctreePointLocator.h b/Common/DataModel/vtkOctreePointLocator.h
index cac95d1..e93ba5c 100644
--- a/Common/DataModel/vtkOctreePointLocator.h
+++ b/Common/DataModel/vtkOctreePointLocator.h
@@ -17,19 +17,19 @@
  See Copyright.txt or http://www.paraview.org/HTML/Copyright.html for details.
 ----------------------------------------------------------------------------*/
 
-// .NAME vtkOctreePointLocator - a octree spatial decomposition of a set of points
+// .NAME vtkOctreePointLocator - an octree spatial decomposition of a set of points
 //
 // .SECTION Description
-//     Given a vtkDataSetxs, create an octree that is locally refined
-//     such that all leaf octants contain less than a certain
-//     amount of points.  Note that there is no size constraint that
-//     a leaf octant in relation to any of its neighbors.
+// Given a vtkDataSet, create an octree that is locally refined
+// such that all leaf octants contain less than a certain
+// amount of points.  Note that there is no size constraint that
+// a leaf octant in relation to any of its neighbors.
 //
-//     This class can also generate a PolyData representation of
-//     the boundaries of the spatial regions in the decomposition.
+// This class can also generate a PolyData representation of
+// the boundaries of the spatial regions in the decomposition.
 //
 // .SECTION See Also
-//      vtkPointLocator vtkOctreePointLocatorNode
+// vtkLocator vtkPointLocator vtkOctreePointLocatorNode
 
 #ifndef __vtkOctreePointLocator_h
 #define __vtkOctreePointLocator_h
diff --git a/Common/DataModel/vtkOctreePointLocatorNode.h b/Common/DataModel/vtkOctreePointLocatorNode.h
index 51651f2..5162506 100644
--- a/Common/DataModel/vtkOctreePointLocatorNode.h
+++ b/Common/DataModel/vtkOctreePointLocatorNode.h
@@ -29,7 +29,7 @@
 // octant is Min < x <= Max.
 //
 // .SECTION See Also
-//      vtkOctreePointLocator
+// vtkOctreePointLocator
 
 #ifndef __vtkOctreePointLocatorNode_h
 #define __vtkOctreePointLocatorNode_h
@@ -61,7 +61,7 @@ public:
   //   Caller allocates storage for 6-vector in GetBounds.
   void SetBounds(double xMin, double xMax, double yMin,
                  double yMax, double zMin, double zMax);
-  void SetBounds(double b[6])
+  void SetBounds(const double b[6])
     {
     this->SetBounds(b[0], b[1], b[2], b[3], b[4], b[5]);
     }
diff --git a/Common/DataModel/vtkOrderedTriangulator.cxx b/Common/DataModel/vtkOrderedTriangulator.cxx
index 4a90c28..7252b0d 100644
--- a/Common/DataModel/vtkOrderedTriangulator.cxx
+++ b/Common/DataModel/vtkOrderedTriangulator.cxx
@@ -31,7 +31,7 @@
 #include <vector>
 #include <stack>
 #include <map>
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkOrderedTriangulator);
 
diff --git a/Common/DataModel/vtkPiecewiseFunction.cxx b/Common/DataModel/vtkPiecewiseFunction.cxx
index 1e3c29d..974189a 100644
--- a/Common/DataModel/vtkPiecewiseFunction.cxx
+++ b/Common/DataModel/vtkPiecewiseFunction.cxx
@@ -18,6 +18,7 @@
 #include "vtkInformationVector.h"
 #include "vtkObjectFactory.h"
 
+#include <cassert>
 #include <vector>
 #include <set>
 #include <algorithm>
@@ -111,10 +112,7 @@ vtkPiecewiseFunction::vtkPiecewiseFunction()
 // Destruct a vtkPiecewiseFunction
 vtkPiecewiseFunction::~vtkPiecewiseFunction()
 {
-  if( this->Function )
-    {
-    delete [] this->Function;
-    }
+  delete [] this->Function;
 
   for(unsigned int i=0;i<this->Internal->Nodes.size();i++)
     {
@@ -136,7 +134,8 @@ void vtkPiecewiseFunction::DeepCopy( vtkDataObject *o )
     for ( i = 0; i < f->GetSize(); i++ )
       {
       double val[4];
-      f->GetNodeValue(i, val);
+      int isInRange = f->GetNodeValue(i, val);
+      assert(isInRange == 1); (void)isInRange;
       this->AddPoint(val[0], val[1], val[2], val[3]);
       }
     this->Modified();
@@ -158,7 +157,8 @@ void vtkPiecewiseFunction::ShallowCopy( vtkDataObject *o )
     for ( i = 0; i < f->GetSize(); i++ )
       {
       double val[4];
-      f->GetNodeValue(i, val);
+      int isInRange = f->GetNodeValue(i, val);
+      assert(isInRange == 1); (void)isInRange;
       this->AddPoint(val[0], val[1], val[2], val[3]);
       }
     this->Modified();
@@ -269,11 +269,8 @@ double *vtkPiecewiseFunction::GetDataPointer()
 {
   int size = static_cast<int>(this->Internal->Nodes.size());
 
-  if ( this->Function )
-    {
-    delete [] this->Function;
-    this->Function = NULL;
-    }
+  delete [] this->Function;
+  this->Function = NULL;
 
   if ( size > 0 )
     {
diff --git a/Common/DataModel/vtkPiecewiseFunction.h b/Common/DataModel/vtkPiecewiseFunction.h
index b8380af..f05a245 100644
--- a/Common/DataModel/vtkPiecewiseFunction.h
+++ b/Common/DataModel/vtkPiecewiseFunction.h
@@ -84,7 +84,8 @@ public:
   // Description:
   // For the node specified by index, set/get the
   // location (X), value (Y), midpoint, and sharpness
-  // values at the node.
+  // values at the node. Returns -1 if the index is
+  // out of range, returns 1 otherwise.
   int GetNodeValue( int index, double val[4] );
   int SetNodeValue( int index, double val[4] );
 
diff --git a/Common/DataModel/vtkPixelExtent.cxx b/Common/DataModel/vtkPixelExtent.cxx
new file mode 100644
index 0000000..d4cb302
--- /dev/null
+++ b/Common/DataModel/vtkPixelExtent.cxx
@@ -0,0 +1,347 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPixelExtenth.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkPixelExtent.h"
+
+using std::deque;
+using std::ostream;
+
+//-----------------------------------------------------------------------------
+vtkPixelExtent vtkPixelExtent::Grow(
+      const vtkPixelExtent &inputExt,
+      const vtkPixelExtent &problemDomain,
+      int n)
+{
+  vtkPixelExtent outputExt = vtkPixelExtent::Grow(inputExt, n);
+  outputExt &= problemDomain;
+  return outputExt;
+}
+
+//-----------------------------------------------------------------------------
+vtkPixelExtent vtkPixelExtent::Grow(
+      const vtkPixelExtent &inputExt,
+      int n)
+{
+  vtkPixelExtent outputExt(inputExt);
+  outputExt.Grow(0, n);
+  outputExt.Grow(1, n);
+  return outputExt;
+}
+
+//-----------------------------------------------------------------------------
+vtkPixelExtent vtkPixelExtent::GrowLow(
+      const vtkPixelExtent &inputExt,
+      int q,
+      int n)
+{
+  vtkPixelExtent outputExt(inputExt);
+  outputExt[2*q] -= n;
+  return outputExt;
+}
+
+//-----------------------------------------------------------------------------
+vtkPixelExtent vtkPixelExtent::GrowHigh(
+      const vtkPixelExtent &inputExt,
+      int q,
+      int n)
+{
+  vtkPixelExtent outputExt(inputExt);
+  outputExt[2*q+1] += n;
+  return outputExt;
+}
+
+//-----------------------------------------------------------------------------
+vtkPixelExtent vtkPixelExtent::Shrink(
+      const vtkPixelExtent &inputExt,
+      int n)
+{
+  return vtkPixelExtent::Grow(inputExt, -n);
+}
+
+//-----------------------------------------------------------------------------
+vtkPixelExtent vtkPixelExtent::Shrink(
+      const vtkPixelExtent &inputExt,
+      const vtkPixelExtent &problemDomain,
+      int n)
+{
+  vtkPixelExtent outputExt(inputExt);
+
+  outputExt.Grow(0, -n);
+  outputExt.Grow(1, -n);
+
+  // don't shrink at the problem domain
+  // because you don't grow outside the problem domain.
+  for (int i=0; i<4; ++i)
+    {
+    if (inputExt[i] == problemDomain[i])
+      {
+      outputExt[i] = problemDomain[i];
+      }
+    }
+
+  return outputExt;
+}
+
+//-----------------------------------------------------------------------------
+vtkPixelExtent vtkPixelExtent::CellToNode(
+      const vtkPixelExtent &inputExt)
+{
+  vtkPixelExtent outputExt(inputExt);
+  ++outputExt[1];
+  ++outputExt[3];
+  return outputExt;
+}
+
+//-----------------------------------------------------------------------------
+vtkPixelExtent vtkPixelExtent::NodeToCell(const vtkPixelExtent &inputExt)
+{
+  vtkPixelExtent outputExt(inputExt);
+  --outputExt[1];
+  --outputExt[3];
+  return outputExt;
+}
+
+//-----------------------------------------------------------------------------
+void vtkPixelExtent::Shift(
+      int *ij,
+      int n)
+{
+  ij[0] += n;
+  ij[1] += n;
+}
+
+//-----------------------------------------------------------------------------
+void vtkPixelExtent::Shift(
+      int *ij,
+      int *n)
+{
+  ij[0] += n[0];
+  ij[1] += n[1];
+}
+
+//-----------------------------------------------------------------------------
+void vtkPixelExtent::Split(
+      int i1,
+      int j1,
+      const vtkPixelExtent &ext,
+      deque<vtkPixelExtent> &newExts)
+{
+  // cell is inside, split results in as many as
+  // 4 new extents. check for each one.
+  int i0 = i1 - 1;
+  int j0 = j1 - 1;
+
+  int outside = 1;
+
+  // lower left
+  if (ext.Contains(i0, j0))
+    {
+    newExts.push_back(vtkPixelExtent(ext[0], i0, ext[2], j0));
+    outside = 0;
+    }
+  // lower right
+  if (ext.Contains(i1, j0))
+    {
+    newExts.push_back(vtkPixelExtent(i1, ext[1], ext[2], j0));
+    outside = 0;
+    }
+  // upper left
+  if (ext.Contains(i0, j1))
+    {
+    newExts.push_back(vtkPixelExtent(ext[0], i0, j1, ext[3]));
+    outside = 0;
+    }
+  // upper right
+  if (ext.Contains(i1, j1))
+    {
+    newExts.push_back(vtkPixelExtent(i1, ext[1], j1, ext[3]));
+    outside = 0;
+    }
+
+  // split cell is outside, pass through
+  if (outside)
+    {
+    newExts.push_back(ext);
+    return;
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkPixelExtent::Subtract(
+      const vtkPixelExtent &A,
+      vtkPixelExtent B,
+      deque<vtkPixelExtent> &C)
+{
+  // split method requires split point inside the extent
+  vtkPixelExtent I(A);
+  I &= B;
+
+  if (I.Empty())
+    {
+    // do nothing if disjoint
+    C.push_back(A);
+    return;
+    }
+  if (B.Contains(A))
+    {
+    // if A is covered by B then remove A
+    return;
+    }
+
+  // split left and bellow this cells
+  I.CellToNode();
+
+  deque<vtkPixelExtent> tmpA0;
+  tmpA0.push_back(A);
+  for (int q=0; q<4; ++q)
+    {
+    const int ids[8] = {0,2, 1,2, 1,3, 0,3};
+    int qq = 2*q;
+    int i = I[ids[qq]];
+    int j = I[ids[qq+1]];
+    deque<vtkPixelExtent> tmpA1;
+    while ( !tmpA0.empty() )
+      {
+      vtkPixelExtent ext = tmpA0.back();
+      tmpA0.pop_back();
+      vtkPixelExtent::Split(i,j, ext, tmpA1);
+      }
+    tmpA0 = tmpA1;
+    }
+
+  // remove anything covered by B
+  size_t n = tmpA0.size();
+  for (size_t q=0; q<n; ++q)
+    {
+    const vtkPixelExtent &ext = tmpA0[q];
+    if (!B.Contains(ext))
+      {
+      C.push_back(ext);
+      }
+    }
+}
+
+// ----------------------------------------------------------------------------
+void vtkPixelExtent::Merge(deque<vtkPixelExtent> &exts)
+{
+  size_t ne = exts.size();
+
+  // working in point space simplifies things because
+  // points overlap in adjacent extents while cells do not
+  deque<vtkPixelExtent> tmpExts(ne);
+  for (size_t t=0; t<ne; ++t)
+    {
+    vtkPixelExtent ext(exts[t]);
+    ext.CellToNode();
+    tmpExts[t] = ext;
+    }
+
+  // one pass for each direction
+  for (int q=0; q<2; ++q)
+    {
+    int qq = 2*q;
+    // consider each extent as a target to be merged
+    for (size_t t=0; t<ne; ++t)
+      {
+      // if a merger occurs the merged extent is added
+      // as a new target with the constituents marked empty
+      // and the current pass is terminated early
+      int nextPass = 0;
+
+      // current target
+      vtkPixelExtent &ext0 = tmpExts[t];
+      if (ext0.Empty())
+        {
+        // was merged in preceeding pass
+        continue;
+        }
+
+      for (size_t c=0; c<ne; ++c)
+        {
+        if (c == t)
+          {
+          // don't attempt merge with self
+          continue;
+          }
+
+        // candidate
+        vtkPixelExtent &ext1 = tmpExts[c];
+        if (ext1.Empty())
+          {
+          // was merged in preceeding pass
+          continue;
+          }
+
+        // must be same size and coordinate in merge dir
+        if ( (ext0[qq] == ext1[qq]) && (ext0[qq+1] == ext1[qq+1]) )
+          {
+          // must overlap overlap
+          vtkPixelExtent ext2(ext0);
+          ext2 &= ext1;
+          if (!ext2.Empty())
+            {
+            // merge and add as new target
+            // in a later pass
+            vtkPixelExtent ext3(ext0);
+            ext3 |= ext1;
+            tmpExts.push_back(ext3);
+            ++ne;
+
+            // mark the merged extents empty
+            ext0.Clear();
+            ext1.Clear();
+
+            // move to next target
+            nextPass = 1;
+            break;
+            }
+          }
+        if (nextPass)
+          {
+          break;
+          }
+        }
+      }
+    }
+  // discard merged targets copy to output
+  exts.clear();
+  for (size_t t=0; t<ne; ++t)
+    {
+    vtkPixelExtent &ext = tmpExts[t];
+    if (!ext.Empty())
+      {
+      ext.NodeToCell();
+      exts.push_back(ext);
+      }
+    }
+}
+
+//*****************************************************************************
+ostream &operator<<(ostream &os, const vtkPixelExtent &ext)
+{
+  if (ext.Empty())
+    {
+    os << "(empty)";
+    }
+  else
+    {
+    os
+      << "("
+      << ext[0] << ", "
+      << ext[1] << ", "
+      << ext[2] << ", "
+      << ext[3] << ")";
+    }
+  return os;
+}
diff --git a/Common/DataModel/vtkPixelExtent.h b/Common/DataModel/vtkPixelExtent.h
new file mode 100644
index 0000000..95ddae1
--- /dev/null
+++ b/Common/DataModel/vtkPixelExtent.h
@@ -0,0 +1,661 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPixelExtenth.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkPixelExtent -- Index space representation of a pixel plane
+// .SECTION Description
+// Representation of a cartesian pixel plane and common operations
+// on it. The implementation is intended to be fast and light
+// so that it may be used in place of int[4] with little or no
+// performance penalty.
+//
+// NOTE in most cases operation on an empty object produces
+// incorrect results. If it an issue query Empty() first.
+
+#ifndef __vtkPixelExtent_h
+#define __vtkPixelExtent_h
+
+#include "vtkSystemIncludes.h" // for VTK's system header config
+#include "vtkCommonDataModelModule.h" // for export
+
+#include <deque> // for inline impl
+#include <algorithm> // for inline impl
+#include <iostream> // for inline impl
+#include <climits> // for inline impl
+
+class VTKCOMMONDATAMODEL_EXPORT vtkPixelExtent
+{
+public:
+  vtkPixelExtent();
+
+  template<typename T>
+  vtkPixelExtent(const T *ext);
+
+  template<typename T>
+  vtkPixelExtent(T ilo, T ihi, T jlo, T jhi);
+
+  template<typename T>
+  vtkPixelExtent(T width, T height)
+    { this->SetData(T(0), width-T(1), T(0), height-T(1)); }
+
+  vtkPixelExtent(const vtkPixelExtent &other);
+
+  vtkPixelExtent &operator=(const vtkPixelExtent &other);
+
+  // Description:
+  // Element access
+  int &operator[](int i){ return this->Data[i]; }
+  const int &operator[](int i) const { return this->Data[i]; }
+
+  // Description:
+  // Set the extent.
+  void SetData(const vtkPixelExtent &ext);
+
+  template<typename T>
+  void SetData(const T *ext);
+
+  template<typename T>
+  void SetData(T ilo, T ihi, T jlo, T jhi);
+  void Clear();
+
+  // Description:
+  // Direct access to internal data.
+  int *GetData(){ return this->Data; }
+  const int *GetData() const { return this->Data; }
+
+  template<typename T>
+  void GetData(T data[4]) const;
+
+  unsigned int *GetDataU()
+    { return reinterpret_cast<unsigned int*>(this->Data); }
+
+  const unsigned int *GetDataU() const
+    { return reinterpret_cast<const unsigned int*>(this->Data); }
+
+  // Description:
+  // Get the start/end index.
+  void GetStartIndex(int first[2]) const;
+  void GetStartIndex(int first[2], const int origin[2]) const;
+  void GetEndIndex(int last[2]) const;
+
+  // Description:
+  // Return true if empty.
+  int Empty() const;
+
+  // Description:
+  // Test for equivalence.
+  int operator==(const vtkPixelExtent &other) const;
+
+  // Description:
+  // Return non-zero if this extent conatins the other.
+  int Contains(const vtkPixelExtent &other) const;
+  int Contains(int i, int j) const;
+
+  // Description:
+  // Return non-zero if the extent is disjoint from the other
+  int Disjoint(vtkPixelExtent other) const;
+
+  // Description:
+  // Get the number in each direction.
+  template<typename T>
+  void Size(T nCells[2]) const;
+
+  // Description:
+  // Get the total number.
+  size_t Size() const;
+
+
+  // Description:
+  //In place intersection.
+  void operator&=(const vtkPixelExtent &other);
+
+  // Description:
+  // In place union
+  void operator|=(const vtkPixelExtent &other);
+
+
+
+  // Description:
+  // Expand the extents by n.
+  void Grow(int n);
+  void Grow(int q, int n);
+  void GrowLow(int q, int n);
+  void GrowHigh(int q, int n);
+
+  // Description:
+  // Shrink the extent by n.
+  void Shrink(int n);
+  void Shrink(int q, int n);
+
+  // Description:
+  // Shifts by low corner of this, moving to the origin.
+  void Shift();
+
+  // Description:
+  // Shift by low corner of the given extent.
+  void Shift(const vtkPixelExtent &ext);
+
+  // Description:
+  // Shift by the given amount.
+  void Shift(int *n);
+
+  // Description:
+  // Shift by the given amount in the given direction.
+  void Shift(int q, int n);
+
+  // Description:
+  // Divide the extent in half in the given direction. The
+  // operation is done in-place the other half of the split
+  // extent is returned. The retunr will be empty if the split
+  // could not be made.
+  vtkPixelExtent Split(int dir);
+
+
+
+  // Description:
+  // In-place conversion from cell based to node based extent, and vise-versa.
+  void CellToNode();
+  void NodeToCell();
+
+
+
+  // Description:
+  // Get the number in each direction.
+  template<typename T>
+  static
+  void Size(const vtkPixelExtent &ext, T nCells[2]);
+
+  // Description:
+  // Get the total number.
+  static
+  size_t Size(const vtkPixelExtent &ext);
+
+  // Description:
+  // Add or remove ghost cells. If a problem domain is
+  // provided then the result is clipled to be within the
+  // problem domain.
+  static vtkPixelExtent Grow(const vtkPixelExtent &inputExt, int n);
+
+  static vtkPixelExtent Grow(
+      const vtkPixelExtent &inputExt,
+      const vtkPixelExtent &problemDomain,
+      int n);
+
+  static vtkPixelExtent GrowLow(
+      const vtkPixelExtent &ext,
+      int q,
+      int n);
+
+  static vtkPixelExtent GrowHigh(
+      const vtkPixelExtent &ext,
+      int q,
+      int n);
+
+  // Description:
+  // Remove ghost cells. If a problem domain is
+  // provided the input is pinned at the domain.
+  static vtkPixelExtent Shrink(
+      const vtkPixelExtent &inputExt,
+      const vtkPixelExtent &problemDomain,
+      int n);
+
+  static vtkPixelExtent Shrink(
+      const vtkPixelExtent &inputExt,
+      int n);
+
+  // Description:
+  // Convert from point extent to cell extent
+  // while respecting the dimensionality of the data.
+  static vtkPixelExtent NodeToCell(const vtkPixelExtent &inputExt);
+
+  // Description:
+  // Convert from cell extent to point extent
+  // while respecting the dimensionality of the data.
+  static vtkPixelExtent CellToNode(const vtkPixelExtent &inputExt);
+
+  // Description:
+  // Shift by the given amount while respecting mode.
+  static void Shift(int *ij, int n);
+  static void Shift(int *ij, int *n);
+
+
+  // Description:
+  // Split ext at i,j, resulting extents (up to 4) are appended
+  // to newExts. If i,j is outside ext, ext is passed through
+  // unmodified.
+  static void Split(
+        int i,
+        int j,
+        const vtkPixelExtent &ext,
+        std::deque<vtkPixelExtent> &newExts);
+
+  // Description:
+  // A - B = C
+  // C is a set of disjoint extents such that the
+  // intersection of B and C is empty and the intersection
+  // of A and C is C.
+  static void Subtract(
+        const vtkPixelExtent &A,
+        vtkPixelExtent B,
+        std::deque<vtkPixelExtent> &newExts);
+
+  // Description:
+  // Merge compatible extents in the list. Extents are compatible
+  // if they are directly adjacent nad have the same extent along
+  // the adjacent edge.
+  static void Merge(std::deque<vtkPixelExtent> &exts);
+
+private:
+  int Data[4];
+};
+
+// Description:
+// Stream insertion operator for formatted output of pixel extents.
+VTKCOMMONDATAMODEL_EXPORT
+std::ostream &operator<<(std::ostream &os, const vtkPixelExtent &ext);
+
+//-----------------------------------------------------------------------------
+template<typename T>
+void vtkPixelExtent::SetData(const T *ext)
+{
+  Data[0] = static_cast<int>(ext[0]);
+  Data[1] = static_cast<int>(ext[1]);
+  Data[2] = static_cast<int>(ext[2]);
+  Data[3] = static_cast<int>(ext[3]);
+}
+
+//-----------------------------------------------------------------------------
+template<typename T>
+void vtkPixelExtent::SetData(T ilo, T ihi, T jlo, T jhi)
+{
+  T ext[4] = {ilo, ihi, jlo, jhi};
+  this->SetData(ext);
+}
+
+//-----------------------------------------------------------------------------
+inline
+void vtkPixelExtent::SetData(const vtkPixelExtent &other)
+{
+  this->SetData(other.GetData());
+}
+
+//-----------------------------------------------------------------------------
+template<typename T>
+void vtkPixelExtent::GetData(T data[4]) const
+{
+  data[0] = static_cast<T>(this->Data[0]);
+  data[1] = static_cast<T>(this->Data[1]);
+  data[2] = static_cast<T>(this->Data[2]);
+  data[3] = static_cast<T>(this->Data[3]);
+}
+
+//-----------------------------------------------------------------------------
+inline
+void vtkPixelExtent::Clear()
+{
+  this->SetData<int>(INT_MAX, INT_MIN, INT_MAX, INT_MIN);
+}
+
+//-----------------------------------------------------------------------------
+inline
+vtkPixelExtent::vtkPixelExtent()
+{
+  this->Clear();
+}
+
+//-----------------------------------------------------------------------------
+template<typename T>
+vtkPixelExtent::vtkPixelExtent(const T *ext)
+{
+  this->SetData(ext);
+}
+
+//-----------------------------------------------------------------------------
+template<typename T>
+vtkPixelExtent::vtkPixelExtent(
+      T ilo,
+      T ihi,
+      T jlo,
+      T jhi)
+{
+  this->SetData(ilo, ihi, jlo, jhi);
+}
+
+//-----------------------------------------------------------------------------
+inline
+vtkPixelExtent &vtkPixelExtent::operator=(const vtkPixelExtent &other)
+{
+  if (&other == this)
+    {
+    return *this;
+    }
+  this->SetData(other);
+  return *this;
+}
+
+//-----------------------------------------------------------------------------
+inline
+vtkPixelExtent::vtkPixelExtent(const vtkPixelExtent &other)
+{
+  *this = other;
+}
+
+//-----------------------------------------------------------------------------
+template<typename T>
+void vtkPixelExtent::Size(const vtkPixelExtent &ext, T nCells[2])
+{
+  nCells[0] = ext[1] - ext[0] + 1;
+  nCells[1] = ext[3] - ext[2] + 1;
+}
+
+//-----------------------------------------------------------------------------
+inline
+size_t vtkPixelExtent::Size(const vtkPixelExtent &ext)
+{
+  return (ext[1] - ext[0] + 1) * (ext[3] - ext[2] + 1);
+}
+
+//-----------------------------------------------------------------------------
+template<typename T>
+void vtkPixelExtent::Size(T nCells[2]) const
+{
+  vtkPixelExtent::Size(*this, nCells);
+}
+
+//-----------------------------------------------------------------------------
+inline
+size_t vtkPixelExtent::Size() const
+{
+  return vtkPixelExtent::Size(*this);
+}
+
+//-----------------------------------------------------------------------------
+inline
+void vtkPixelExtent::GetStartIndex(int first[2]) const
+{
+  first[0] = this->Data[0];
+  first[1] = this->Data[2];
+}
+
+//-----------------------------------------------------------------------------
+inline
+void vtkPixelExtent::GetStartIndex(int first[2], const int origin[2]) const
+{
+  first[0] = this->Data[0] - origin[0];
+  first[1] = this->Data[2] - origin[1];
+}
+
+//-----------------------------------------------------------------------------
+inline
+void vtkPixelExtent::GetEndIndex(int last[2]) const
+{
+  last[0] = this->Data[1];
+  last[1] = this->Data[3];
+}
+
+//-----------------------------------------------------------------------------
+inline
+int vtkPixelExtent::Empty() const
+{
+  if ( this->Data[0] > this->Data[1]
+    || this->Data[2] > this->Data[3])
+    {
+    return 1;
+    }
+  return 0;
+}
+
+//-----------------------------------------------------------------------------
+inline
+int vtkPixelExtent::operator==(const vtkPixelExtent &other) const
+{
+  if ( (this->Data[0] == other.Data[0])
+    && (this->Data[1] == other.Data[1])
+    && (this->Data[2] == other.Data[2])
+    && (this->Data[3] == other.Data[3]) )
+    {
+    return 1;
+    }
+  return 0;
+}
+
+//-----------------------------------------------------------------------------
+inline
+int vtkPixelExtent::Contains(const vtkPixelExtent &other) const
+{
+  if ( (this->Data[0] <= other.Data[0])
+    && (this->Data[1] >= other.Data[1])
+    && (this->Data[2] <= other.Data[2])
+    && (this->Data[3] >= other.Data[3]) )
+    {
+    return 1;
+    }
+  return 0;
+}
+
+//-----------------------------------------------------------------------------
+inline
+int vtkPixelExtent::Contains(int i, int j) const
+{
+  if ( (this->Data[0] <= i)
+    && (this->Data[1] >= i)
+    && (this->Data[2] <= j)
+    && (this->Data[3] >= j) )
+    {
+    return 1;
+    }
+  return 0;
+}
+
+
+//-----------------------------------------------------------------------------
+inline
+void vtkPixelExtent::operator&=(const vtkPixelExtent &other)
+{
+  if (this->Empty())
+    {
+    return;
+    }
+
+  if (other.Empty())
+    {
+    this->Clear();
+    return;
+    }
+
+  this->Data[0] = std::max(this->Data[0], other.Data[0]);
+  this->Data[1] = std::min(this->Data[1], other.Data[1]);
+  this->Data[2] = std::max(this->Data[2], other.Data[2]);
+  this->Data[3] = std::min(this->Data[3], other.Data[3]);
+
+  if (this->Empty())
+    {
+    this->Clear();
+    }
+}
+
+//-----------------------------------------------------------------------------
+inline
+void vtkPixelExtent::operator|=(const vtkPixelExtent &other)
+{
+  if (other.Empty())
+    {
+    return;
+    }
+
+  if (this->Empty())
+    {
+    this->SetData(other.GetData());
+    return;
+    }
+
+  this->Data[0] = std::min(this->Data[0], other.Data[0]);
+  this->Data[1] = std::max(this->Data[1], other.Data[1]);
+  this->Data[2] = std::min(this->Data[2], other.Data[2]);
+  this->Data[3] = std::max(this->Data[3], other.Data[3]);
+}
+
+//-----------------------------------------------------------------------------
+inline
+int vtkPixelExtent::Disjoint(vtkPixelExtent other) const
+{
+  other &= *this;
+  return other.Empty();
+}
+
+//-----------------------------------------------------------------------------
+inline
+void vtkPixelExtent::Grow(int n)
+{
+  this->Data[0] -= n;
+  this->Data[1] += n;
+  this->Data[2] -= n;
+  this->Data[3] += n;
+}
+
+//-----------------------------------------------------------------------------
+inline
+void vtkPixelExtent::Grow(int q, int n)
+{
+  q *= 2;
+
+  this->Data[q  ] -= n;
+  this->Data[q+1] += n;
+}
+
+//-----------------------------------------------------------------------------
+inline
+void vtkPixelExtent::GrowLow(int q, int n)
+{
+  this->Data[2*q] -= n;
+}
+
+//-----------------------------------------------------------------------------
+inline
+void vtkPixelExtent::GrowHigh(int q, int n)
+{
+  this->Data[2*q+1] += n;
+}
+
+//-----------------------------------------------------------------------------
+inline
+void vtkPixelExtent::Shrink(int n)
+{
+  this->Data[0] += n;
+  this->Data[1] -= n;
+  this->Data[2] += n;
+  this->Data[3] -= n;
+}
+
+//-----------------------------------------------------------------------------
+inline
+void vtkPixelExtent::Shrink(int q, int n)
+{
+  q *= 2;
+  this->Data[q  ] += n;
+  this->Data[q+1] -= n;
+}
+
+//-----------------------------------------------------------------------------
+inline
+void vtkPixelExtent::Shift(int *n)
+{
+  this->Data[0] += n[0];
+  this->Data[1] += n[0];
+  this->Data[2] += n[1];
+  this->Data[3] += n[1];
+}
+
+//-----------------------------------------------------------------------------
+inline
+void vtkPixelExtent::Shift(int q, int n)
+{
+  q *= 2;
+  this->Data[q  ] += n;
+  this->Data[q+1] += n;
+}
+
+//-----------------------------------------------------------------------------
+inline
+void vtkPixelExtent::Shift(const vtkPixelExtent &other)
+{
+  for (int q=0; q<2; ++q)
+    {
+    int qq = q*2;
+    int n = -other[qq];
+
+    this->Data[qq  ] += n;
+    this->Data[qq+1] += n;
+    }
+}
+
+//-----------------------------------------------------------------------------
+inline
+void vtkPixelExtent::Shift()
+{
+  for (int q=0; q<2; ++q)
+    {
+    int qq = q*2;
+    int n =- this->Data[qq];
+
+    this->Data[qq  ] += n;
+    this->Data[qq+1] += n;
+    }
+}
+
+//-----------------------------------------------------------------------------
+inline
+vtkPixelExtent vtkPixelExtent::Split(int dir)
+{
+  vtkPixelExtent half;
+
+  int q = 2 * dir;
+  int l = this->Data[q+1] - this->Data[q] + 1;
+  int s = l/2;
+
+  if (s)
+    {
+    s += this->Data[q];
+    half = *this;
+    half.Data[q] = s;
+    this->Data[q+1] = s - 1;
+    }
+
+  return half;
+}
+
+//-----------------------------------------------------------------------------
+inline
+void vtkPixelExtent::CellToNode()
+{
+  ++this->Data[1];
+  ++this->Data[3];
+}
+
+//-----------------------------------------------------------------------------
+inline
+void vtkPixelExtent::NodeToCell()
+{
+  --this->Data[1];
+  --this->Data[3];
+}
+
+//-----------------------------------------------------------------------------
+inline
+bool operator<(const vtkPixelExtent &l, const vtkPixelExtent &r)
+{
+  return l.Size() < r.Size();
+}
+
+#endif
+// VTK-HeaderTest-Exclude: vtkPixelExtent.h
diff --git a/Common/DataModel/vtkPlane.cxx b/Common/DataModel/vtkPlane.cxx
index 50856f7..32bf8c8 100644
--- a/Common/DataModel/vtkPlane.cxx
+++ b/Common/DataModel/vtkPlane.cxx
@@ -56,22 +56,19 @@ void vtkPlane::ProjectPoint(double x[3], double xproj[3])
   this->ProjectPoint(x, this->GetOrigin(), this->GetNormal(), xproj);
 }
 
-void vtkPlane::ProjectVector(double v[3], double origin[3], double normal[3],
-                             double vproj[3])
+void vtkPlane::ProjectVector(
+  double v[3], double vtkNotUsed(origin)[3], double normal[3],
+  double vproj[3])
 {
-  (void)origin;
-
-  // This function uses this equation:
-  // projected = vector - (vector dot Normal)Normal
-  // Where 'Normal' is unit length
-
-  vtkMath::Normalize(normal);
-
-  double dotProd = vtkMath::Dot(v, normal);
-
-  vtkMath::MultiplyScalar(normal, dotProd);
-
-  vtkMath::Subtract(v, normal, vproj);
+  double t = vtkMath::Dot(v, normal);
+  double n2 = vtkMath::Dot(normal, normal);
+  if (n2 == 0)
+    {
+    n2 = 1.0;
+    }
+  vproj[0] = v[0] - t * normal[0] / n2;
+  vproj[1] = v[1] - t * normal[1] / n2;
+  vproj[2] = v[2] - t * normal[2] / n2;
 }
 
 void vtkPlane::ProjectVector(double v[3], double vproj[3])
diff --git a/Common/DataModel/vtkPlane.h b/Common/DataModel/vtkPlane.h
index 20870aa..645575f 100644
--- a/Common/DataModel/vtkPlane.h
+++ b/Common/DataModel/vtkPlane.h
@@ -109,7 +109,7 @@ public:
 
 protected:
   vtkPlane();
-  ~vtkPlane() {};
+  ~vtkPlane() {}
 
   double Normal[3];
   double Origin[3];
diff --git a/Common/DataModel/vtkPlaneCollection.h b/Common/DataModel/vtkPlaneCollection.h
index 2ef9162..4a76b53 100644
--- a/Common/DataModel/vtkPlaneCollection.h
+++ b/Common/DataModel/vtkPlaneCollection.h
@@ -54,8 +54,8 @@ public:
   //ETX
 
 protected:
-  vtkPlaneCollection() {};
-  ~vtkPlaneCollection() {};
+  vtkPlaneCollection() {}
+  ~vtkPlaneCollection() {}
 
 
 private:
diff --git a/Common/DataModel/vtkPlanes.cxx b/Common/DataModel/vtkPlanes.cxx
index 5100673..501d911 100644
--- a/Common/DataModel/vtkPlanes.cxx
+++ b/Common/DataModel/vtkPlanes.cxx
@@ -199,7 +199,7 @@ void vtkPlanes::SetFrustumPlanes(double planes[24])
   normals->Delete();
 }
 
-void vtkPlanes::SetBounds(double bounds[6])
+void vtkPlanes::SetBounds(const double bounds[6])
 {
   int i;
   double n[3], x[3];
diff --git a/Common/DataModel/vtkPlanes.h b/Common/DataModel/vtkPlanes.h
index 617334b..2ce78b8 100644
--- a/Common/DataModel/vtkPlanes.h
+++ b/Common/DataModel/vtkPlanes.h
@@ -79,7 +79,7 @@ public:
   // An alternative method to specify six planes defined by a bounding box.
   // The bounding box is a six-vector defined as (xmin,xmax,ymin,ymax,zmin,zmax).
   // It defines six planes orthogonal to the x-y-z coordinate axes.
-  void SetBounds(double bounds[6]);
+  void SetBounds(const double bounds[6]);
   void SetBounds(double xmin, double xmax, double ymin, double ymax,
                  double zmin, double zmax);
 
diff --git a/Common/DataModel/vtkPlanesIntersection.cxx b/Common/DataModel/vtkPlanesIntersection.cxx
index f40391f..dba2ef5 100644
--- a/Common/DataModel/vtkPlanesIntersection.cxx
+++ b/Common/DataModel/vtkPlanesIntersection.cxx
@@ -51,11 +51,8 @@ vtkPlanesIntersection::~vtkPlanesIntersection()
     this->regionPts->Delete();
     this->regionPts = NULL;
     }
-  if (this->Plane)
-    {
-    delete [] this->Plane;
-    this->Plane = NULL;
-    }
+  delete [] this->Plane;
+  this->Plane = NULL;
 }
 void vtkPlanesIntersection::SetRegionVertices(vtkPoints *v)
 {
@@ -572,7 +569,7 @@ void vtkPlanesIntersection::SetPlaneEquations()
   // vtkPlanes stores normals & pts instead of
   //   plane equation coefficients
 
-  if (this->Plane) delete [] this->Plane;
+  delete [] this->Plane;
 
   this->Plane = new double[nplanes*4];
 
diff --git a/Common/DataModel/vtkPointData.h b/Common/DataModel/vtkPointData.h
index 39263c2..ec520f8 100644
--- a/Common/DataModel/vtkPointData.h
+++ b/Common/DataModel/vtkPointData.h
@@ -35,8 +35,8 @@ public:
   void NullPoint(vtkIdType ptId);
 
 protected:
-  vtkPointData() {};
-  ~vtkPointData() {};
+  vtkPointData() {}
+  ~vtkPointData() {}
 
 private:
   vtkPointData(const vtkPointData&);  // Not implemented.
diff --git a/Common/DataModel/vtkPointSet.cxx b/Common/DataModel/vtkPointSet.cxx
index c9f1d0c..c144c40 100644
--- a/Common/DataModel/vtkPointSet.cxx
+++ b/Common/DataModel/vtkPointSet.cxx
@@ -20,6 +20,7 @@
 #include "vtkInformation.h"
 #include "vtkInformationVector.h"
 #include "vtkPointLocator.h"
+#include "vtkPointSetCellIterator.h"
 
 #include "vtkSmartPointer.h"
 #define VTK_CREATE(type, name) \
@@ -93,12 +94,15 @@ void vtkPointSet::ComputeBounds()
 
   if ( this->Points )
     {
-    bounds = this->Points->GetBounds();
-    for (int i=0; i<6; i++)
+    if ( this->GetMTime() >= this->ComputeTime )
       {
-      this->Bounds[i] = bounds[i];
+      bounds = this->Points->GetBounds();
+      for (int i=0; i<6; i++)
+        {
+        this->Bounds[i] = bounds[i];
+        }
+      this->ComputeTime.Modified();
       }
-    this->ComputeTime.Modified();
     }
 }
 
@@ -322,6 +326,14 @@ vtkIdType vtkPointSet::FindCell(double x[3], vtkCell *cell,
 }
 
 //----------------------------------------------------------------------------
+vtkCellIterator *vtkPointSet::NewCellIterator()
+{
+  vtkPointSetCellIterator *iter = vtkPointSetCellIterator::New();
+  iter->SetPointSet(this);
+  return iter;
+}
+
+//----------------------------------------------------------------------------
 vtkIdType vtkPointSet::FindCell(double x[3], vtkCell *cell, vtkIdType cellId,
                                 double tol2, int& subId,double pcoords[3],
                                 double *weights)
diff --git a/Common/DataModel/vtkPointSet.h b/Common/DataModel/vtkPointSet.h
index 3342687..5f217c8 100644
--- a/Common/DataModel/vtkPointSet.h
+++ b/Common/DataModel/vtkPointSet.h
@@ -64,6 +64,10 @@ public:
                              double *weights);
 
   // Description:
+  // Return an iterator that traverses the cells in this data set.
+  vtkCellIterator* NewCellIterator();
+
+  // Description:
   // Get MTime which also considers its vtkPoints MTime.
   unsigned long GetMTime();
 
diff --git a/Common/DataModel/vtkPointSetCellIterator.cxx b/Common/DataModel/vtkPointSetCellIterator.cxx
new file mode 100644
index 0000000..3a10380
--- /dev/null
+++ b/Common/DataModel/vtkPointSetCellIterator.cxx
@@ -0,0 +1,96 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPointSetCellIterator.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkPointSetCellIterator.h"
+
+#include "vtkPointSet.h"
+#include "vtkObjectFactory.h"
+#include "vtkPoints.h"
+#include "vtkIdList.h"
+
+vtkStandardNewMacro(vtkPointSetCellIterator)
+
+//------------------------------------------------------------------------------
+void vtkPointSetCellIterator::PrintSelf(ostream &os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+  os << indent << "PointSet: " << this->PointSet.GetPointer() << endl;
+}
+
+//------------------------------------------------------------------------------
+void vtkPointSetCellIterator::SetPointSet(vtkPointSet *ds)
+{
+  this->PointSet = ds;
+  this->PointSetPoints = ds ? ds->GetPoints() : NULL;
+  this->CellId = 0;
+}
+
+//------------------------------------------------------------------------------
+bool vtkPointSetCellIterator::IsDoneWithTraversal()
+{
+  return this->PointSet.GetPointer() == NULL
+      || this->CellId >= this->PointSet->GetNumberOfCells();
+}
+
+//------------------------------------------------------------------------------
+vtkIdType vtkPointSetCellIterator::GetCellId()
+{
+  return this->CellId;
+}
+
+//------------------------------------------------------------------------------
+void vtkPointSetCellIterator::IncrementToNextCell()
+{
+  ++this->CellId;
+}
+
+//------------------------------------------------------------------------------
+vtkPointSetCellIterator::vtkPointSetCellIterator()
+  : vtkCellIterator(),
+    PointSet(NULL),
+    PointSetPoints(NULL),
+    CellId(0)
+{
+}
+
+//------------------------------------------------------------------------------
+vtkPointSetCellIterator::~vtkPointSetCellIterator()
+{
+}
+
+//------------------------------------------------------------------------------
+void vtkPointSetCellIterator::ResetToFirstCell()
+{
+  this->CellId = 0;
+}
+
+//------------------------------------------------------------------------------
+void vtkPointSetCellIterator::FetchCellType()
+{
+  this->CellType = this->PointSet->GetCellType(this->CellId);
+}
+
+//------------------------------------------------------------------------------
+void vtkPointSetCellIterator::FetchPointIds()
+{
+  this->PointSet->GetCellPoints(this->CellId, this->PointIds);
+}
+
+//------------------------------------------------------------------------------
+void vtkPointSetCellIterator::FetchPoints()
+{
+  vtkIdList *pointIds = this->GetPointIds();
+  this->PointSetPoints->GetPoints(pointIds, this->Points);
+}
diff --git a/Common/DataModel/vtkPointSetCellIterator.h b/Common/DataModel/vtkPointSetCellIterator.h
new file mode 100644
index 0000000..d1019b7
--- /dev/null
+++ b/Common/DataModel/vtkPointSetCellIterator.h
@@ -0,0 +1,60 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPointSetCellIterator.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkPointSetCellIterator - Implementation of vtkCellIterator using
+// vtkPointSet API.
+
+#ifndef __vtkPointSetCellIterator_h
+#define __vtkPointSetCellIterator_h
+
+#include "vtkCommonDataModelModule.h" // For export macro
+#include "vtkCellIterator.h"
+#include "vtkSmartPointer.h" // For vtkSmartPointer
+
+class vtkPoints;
+class vtkPointSet;
+
+class VTKCOMMONDATAMODEL_EXPORT vtkPointSetCellIterator: public vtkCellIterator
+{
+public:
+  static vtkPointSetCellIterator *New();
+  vtkTypeMacro(vtkPointSetCellIterator, vtkCellIterator)
+  virtual void PrintSelf(ostream &os, vtkIndent indent);
+
+  bool IsDoneWithTraversal();
+  vtkIdType GetCellId();
+
+protected:
+  vtkPointSetCellIterator();
+  ~vtkPointSetCellIterator();
+
+  void ResetToFirstCell();
+  void IncrementToNextCell();
+  void FetchCellType();
+  void FetchPointIds();
+  void FetchPoints();
+
+  friend class vtkPointSet;
+  void SetPointSet(vtkPointSet *ds);
+
+  vtkSmartPointer<vtkPointSet> PointSet;
+  vtkSmartPointer<vtkPoints> PointSetPoints;
+  vtkIdType CellId;
+
+private:
+  vtkPointSetCellIterator(const vtkPointSetCellIterator &); // Not implemented.
+  void operator=(const vtkPointSetCellIterator &);   // Not implemented.
+};
+
+#endif //__vtkPointSetCellIterator_h
diff --git a/Common/DataModel/vtkPointsProjectedHull.cxx b/Common/DataModel/vtkPointsProjectedHull.cxx
index 6f0e437..2986542 100644
--- a/Common/DataModel/vtkPointsProjectedHull.cxx
+++ b/Common/DataModel/vtkPointsProjectedHull.cxx
@@ -75,17 +75,11 @@ void vtkPointsProjectedHull::ClearAllocations()
   int i;
   for (i=0; i<3; i++)
     {
-    if (this->CCWHull[i])
-      {
-      delete [] this->CCWHull[i];
-      this->CCWHull[i] = NULL;
-      }
-    }
-  if (this->Pts)
-    {
-    delete [] this->Pts;
-    this->Pts = NULL;
+    delete [] this->CCWHull[i];
+    this->CCWHull[i] = NULL;
     }
+  delete [] this->Pts;
+  this->Pts = NULL;
 }
 #define VTK_GETCCWHULL(which, dim) \
 int vtkPointsProjectedHull::GetCCWHull##which(float *pts, int len)\
@@ -374,10 +368,7 @@ int i,j;
 
   this->HullSize[dir] = nHullPts;
 
-  if (this->CCWHull[dir])
-    {
-    delete [] this->CCWHull[dir];
-    }
+  delete [] this->CCWHull[dir];
 
   this->CCWHull[dir] = new double[nHullPts*2];
 
@@ -489,10 +480,7 @@ void vtkPointsProjectedHull::GetPoints()
 {
   int i;
 
-  if (this->Pts)
-    {
-    delete [] this->Pts;
-    }
+  delete [] this->Pts;
   this->Npts = this->Data->GetNumberOfTuples();
 
   this->Pts = new double [this->Npts*3];
@@ -665,6 +653,7 @@ int vtkPointsProjectedHull::RectangleOutside(double hmin, double hmax,
                   this->CCWHull[dir] + 2*i + 2,
                   insidePt))
       {
+      delete [] insidePt;
       return 1;
       }
     }
diff --git a/Common/DataModel/vtkPolyData.cxx b/Common/DataModel/vtkPolyData.cxx
index 110f02f..1e97aeb 100644
--- a/Common/DataModel/vtkPolyData.cxx
+++ b/Common/DataModel/vtkPolyData.cxx
@@ -33,6 +33,8 @@
 #include "vtkTriangleStrip.h"
 #include "vtkVertex.h"
 
+#include "vtkSmartPointer.h"
+
 vtkStandardNewMacro(vtkPolyData);
 
 //----------------------------------------------------------------------------
@@ -40,9 +42,18 @@ vtkStandardNewMacro(vtkPolyData);
 // of verts, lines, polygons, and triangle strips lists.  It basically
 // "marks" empty lists so that the traveral method "GetNextCell"
 // works properly.
-vtkCellArray *vtkPolyData::Dummy = NULL;
 
-static vtkSimpleCriticalSection DummyCritSect;
+struct vtkPolyDataDummyContainter
+{
+  vtkSmartPointer<vtkCellArray> Dummy;
+
+  vtkPolyDataDummyContainter()
+    {
+      this->Dummy.TakeReference(vtkCellArray::New());
+    }
+};
+
+vtkPolyDataDummyContainter vtkPolyData::DummyContainer;
 
 vtkPolyData::vtkPolyData ()
 {
@@ -68,20 +79,6 @@ vtkPolyData::vtkPolyData ()
   this->Information->Set(vtkDataObject::DATA_NUMBER_OF_PIECES(), 1);
   this->Information->Set(vtkDataObject::DATA_NUMBER_OF_GHOST_LEVELS(), 0);
 
-  // static variable, initialized only once.
-  DummyCritSect.Lock();
-  if (this->Dummy == NULL)
-    {
-    this->Dummy = vtkCellArray::New();
-    this->Dummy->Register(this);
-    this->Dummy->Delete();
-    }
-  else
-    {
-    this->Dummy->Register(this);
-    }
-  DummyCritSect.Unlock();
-
   this->Cells = NULL;
   this->Links = NULL;
 }
@@ -91,20 +88,6 @@ vtkPolyData::~vtkPolyData()
 {
   this->Cleanup();
 
-  // Reference to static dummy persists.
-  // Keep destructed dummy from being used again.
-  DummyCritSect.Lock();
-  if (this->Dummy->GetReferenceCount() == 1)
-    {
-    this->Dummy->UnRegister(this);
-    this->Dummy = NULL;
-    }
-  else
-    {
-    this->Dummy->UnRegister(this);
-    }
-  DummyCritSect.Unlock();
-
   if (this->Vertex)
     {
     this->Vertex->Delete();
@@ -655,7 +638,7 @@ void vtkPolyData::ComputeBounds()
 // Set the cell array defining vertices.
 void vtkPolyData::SetVerts (vtkCellArray* v)
 {
-  if (v == this->Dummy)
+  if (v == this->DummyContainer.Dummy.GetPointer())
     {
     v = NULL;
     }
@@ -681,7 +664,7 @@ vtkCellArray* vtkPolyData::GetVerts()
 {
   if ( !this->Verts )
     {
-    return this->Dummy;
+    return this->DummyContainer.Dummy.GetPointer();
     }
   else
     {
@@ -693,7 +676,7 @@ vtkCellArray* vtkPolyData::GetVerts()
 // Set the cell array defining lines.
 void vtkPolyData::SetLines (vtkCellArray* l)
 {
-  if (l == this->Dummy)
+  if (l == this->DummyContainer.Dummy.GetPointer())
     {
     l = NULL;
     }
@@ -719,7 +702,7 @@ vtkCellArray* vtkPolyData::GetLines()
 {
   if ( !this->Lines )
     {
-    return this->Dummy;
+    return this->DummyContainer.Dummy.GetPointer();
     }
   else
     {
@@ -731,7 +714,7 @@ vtkCellArray* vtkPolyData::GetLines()
 // Set the cell array defining polygons.
 void vtkPolyData::SetPolys (vtkCellArray* p)
 {
-  if(p == this->Dummy)
+  if(p == this->DummyContainer.Dummy.GetPointer())
     {
     p = NULL;
     }
@@ -757,7 +740,7 @@ vtkCellArray* vtkPolyData::GetPolys()
 {
   if ( !this->Polys )
     {
-    return this->Dummy;
+    return this->DummyContainer.Dummy.GetPointer();
     }
   else
     {
@@ -769,7 +752,7 @@ vtkCellArray* vtkPolyData::GetPolys()
 // Set the cell array defining triangle strips.
 void vtkPolyData::SetStrips (vtkCellArray* s)
 {
-  if ( s == this->Dummy)
+  if ( s == this->DummyContainer.Dummy.GetPointer())
     {
     s = NULL;
     }
@@ -796,7 +779,7 @@ vtkCellArray* vtkPolyData::GetStrips()
 {
   if ( !this->Strips )
     {
-    return this->Dummy;
+    return this->DummyContainer.Dummy.GetPointer();
     }
   else
     {
@@ -1286,7 +1269,7 @@ int vtkPolyData::InsertNextCell(int type, int npts, vtkIdType *pts)
 
     case VTK_PIXEL: //need to rearrange vertices
       {
-      static vtkIdType pixPts[4];
+      vtkIdType pixPts[4];
       pixPts[0] = pts[0];
       pixPts[1] = pts[1];
       pixPts[2] = pts[3];
@@ -1346,7 +1329,7 @@ int vtkPolyData::InsertNextCell(int type, vtkIdList *pts)
 
     case VTK_PIXEL: //need to rearrange vertices
       {
-      static vtkIdType pixPts[4];
+      vtkIdType pixPts[4];
       pixPts[0] = pts->GetId(0);
       pixPts[1] = pts->GetId(1);
       pixPts[2] = pts->GetId(3);
@@ -1605,33 +1588,33 @@ void vtkPolyData::ReplaceLinkedCell(vtkIdType cellId, int npts, vtkIdType *pts)
 void vtkPolyData::GetCellEdgeNeighbors(vtkIdType cellId, vtkIdType p1,
                                        vtkIdType p2, vtkIdList *cellIds)
 {
-  vtkIdType *cells;
-  vtkIdType numCells;
-  vtkIdType i,j;
-  vtkIdType *pts, npts;
-
   cellIds->Reset();
 
-  numCells = this->Links->GetNcells(p1);
-  cells = this->Links->GetCells(p1);
+  const vtkCellLinks::Link &link1(this->Links->GetLink(p1));
+  const vtkCellLinks::Link &link2(this->Links->GetLink(p2));
 
-  for (i=0; i < numCells; i++)
+  const vtkIdType *cells1 = link1.cells;
+  const vtkIdType *cells1End = cells1 + link1.ncells;
+
+  const vtkIdType *cells2 = link2.cells;
+  const vtkIdType *cells2End = cells2 + link2.ncells;
+
+  while (cells1 != cells1End)
     {
-    if ( cells[i] != cellId )
+    if (*cells1 != cellId)
       {
-      this->GetCellPoints(cells[i],npts,pts);
-      for (j=0; j < npts; j++)
+      const vtkIdType *cells2Cur(cells2);
+      while (cells2Cur != cells2End)
         {
-        if ( pts[j] == p2 )
+        if (*cells1 == *cells2Cur)
           {
+          cellIds->InsertNextId(*cells1);
           break;
           }
-        }
-      if ( j < npts )
-        {
-        cellIds->InsertNextId(cells[i]);
+        ++cells2Cur;
         }
       }
+    ++cells1;
     }
 }
 
@@ -1793,6 +1776,8 @@ unsigned long vtkPolyData::GetActualMemorySize()
 void vtkPolyData::ShallowCopy(vtkDataObject *dataObject)
 {
   vtkPolyData *polyData = vtkPolyData::SafeDownCast(dataObject);
+  if (this == polyData)
+     return;
 
   if ( polyData != NULL )
     {
diff --git a/Common/DataModel/vtkPolyData.h b/Common/DataModel/vtkPolyData.h
index ba96ed1..a3a6f5b 100644
--- a/Common/DataModel/vtkPolyData.h
+++ b/Common/DataModel/vtkPolyData.h
@@ -69,6 +69,7 @@ class vtkQuad;
 class vtkPolygon;
 class vtkTriangleStrip;
 class vtkEmptyCell;
+struct vtkPolyDataDummyContainter;
 
 class VTKCOMMONDATAMODEL_EXPORT vtkPolyData : public vtkPointSet
 {
@@ -451,7 +452,7 @@ protected:
   vtkCellArray *Strips;
 
   // dummy static member below used as a trick to simplify traversal
-  static vtkCellArray *Dummy;
+  static vtkPolyDataDummyContainter DummyContainer;
 
   // supporting structures for more complex topological operations
   // built only when necessary
diff --git a/Common/DataModel/vtkPolyDataCollection.h b/Common/DataModel/vtkPolyDataCollection.h
index c4e9515..7309bc9 100644
--- a/Common/DataModel/vtkPolyDataCollection.h
+++ b/Common/DataModel/vtkPolyDataCollection.h
@@ -56,8 +56,8 @@ public:
   //ETX
 
 protected:
-  vtkPolyDataCollection() {};
-  ~vtkPolyDataCollection() {};
+  vtkPolyDataCollection() {}
+  ~vtkPolyDataCollection() {}
 
 private:
   // hide the standard AddItem from the user and the compiler.
diff --git a/Common/DataModel/vtkPolygon.cxx b/Common/DataModel/vtkPolygon.cxx
index 728e350..5700038 100644
--- a/Common/DataModel/vtkPolygon.cxx
+++ b/Common/DataModel/vtkPolygon.cxx
@@ -380,7 +380,7 @@ void vtkPolygon::InterpolateFunctions(double x[3], double *weights)
       }
     else
       {
-      weights[i] = 1.0 / (weights[i]*weights[i]);
+      weights[i] = 1.0 / weights[i];
       sum += weights[i];
       }
     }
diff --git a/Common/DataModel/vtkPolyhedron.cxx b/Common/DataModel/vtkPolyhedron.cxx
index 4d6b0a0..5fa011e 100644
--- a/Common/DataModel/vtkPolyhedron.cxx
+++ b/Common/DataModel/vtkPolyhedron.cxx
@@ -1775,6 +1775,8 @@ int vtkPolyhedron::IntersectWithLine(double p1[3], double p2[3], double tol,
         hit = this->Quad->IntersectWithLine(p1,p2,tol,t,x,pc,subId);
         break;
       default: //general polygon
+        this->Polygon->GetPoints()->SetNumberOfPoints(npts);
+        this->Polygon->GetPointIds()->SetNumberOfIds(npts);
         for (i=0; i<npts; i++)
           {
           this->Polygon->Points->SetPoint(i,this->Points->GetPoint(face[i+1]));
@@ -2838,7 +2840,6 @@ void vtkPolyhedron::Contour(double value,
   this->Internal->RestoreFaceArrayAndEdgeTable(this->Faces, this->EdgeTable);
 }
 
-
 //----------------------------------------------------------------------------
 void vtkPolyhedron::Clip(double value,
                          vtkDataArray *pointScalars,
@@ -2946,6 +2947,10 @@ void vtkPolyhedron::Clip(double value,
     }
 
   // polyhedron is all inside
+  // FIXME: Documentation needed:
+  // 1. How this can ever happen given the IntersectWithContour call above?
+  // 2. If it can happen, how+why is it different than the code above that
+  //    copies the cell to the output?
   if (ret == 1)
     {
     cellVector.push_back(this->Faces->GetValue(0));
diff --git a/Common/DataModel/vtkQuadraticHexahedron.cxx b/Common/DataModel/vtkQuadraticHexahedron.cxx
index 24fd44e..16ffb89 100644
--- a/Common/DataModel/vtkQuadraticHexahedron.cxx
+++ b/Common/DataModel/vtkQuadraticHexahedron.cxx
@@ -173,6 +173,8 @@ void vtkQuadraticHexahedron::Subdivide(vtkPointData *inPd, vtkCellData *inCd,
 
   //Interpolate new values
   double p[3];
+  this->Points->Resize(27);
+  this->CellScalars->Resize(27);
   for ( numMidPts=0; numMidPts < 7; numMidPts++ )
     {
     this->InterpolationFunctions(MidPoints[numMidPts], weights);
diff --git a/Common/DataModel/vtkQuadraticPolygon.cxx b/Common/DataModel/vtkQuadraticPolygon.cxx
new file mode 100644
index 0000000..b957a17
--- /dev/null
+++ b/Common/DataModel/vtkQuadraticPolygon.cxx
@@ -0,0 +1,493 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkQuadraticPolygon.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkQuadraticPolygon.h"
+
+#include "vtkCellData.h"
+#include "vtkDataArray.h"
+#include "vtkIdTypeArray.h"
+#include "vtkObjectFactory.h"
+#include "vtkPointData.h"
+#include "vtkPoints.h"
+#include "vtkPolygon.h"
+#include "vtkQuadraticEdge.h"
+
+vtkStandardNewMacro(vtkQuadraticPolygon);
+
+//----------------------------------------------------------------------------
+// Instantiate quadratic polygon.
+vtkQuadraticPolygon::vtkQuadraticPolygon()
+{
+  this->Polygon = vtkPolygon::New();
+  this->Edge = vtkQuadraticEdge::New();
+  this->UseMVCInterpolation = true;
+}
+
+//----------------------------------------------------------------------------
+vtkQuadraticPolygon::~vtkQuadraticPolygon()
+{
+  this->Polygon->Delete();
+  this->Edge->Delete();
+}
+
+//----------------------------------------------------------------------------
+vtkCell *vtkQuadraticPolygon::GetEdge(int edgeId)
+{
+  int numEdges = this->GetNumberOfEdges();
+
+  edgeId = (edgeId < 0 ? 0 : (edgeId > numEdges - 1 ? numEdges - 1 : edgeId ));
+  int p = (edgeId + 1) % numEdges;
+
+  // load point id's
+  this->Edge->PointIds->SetId(0, this->PointIds->GetId(edgeId));
+  this->Edge->PointIds->SetId(1, this->PointIds->GetId(p));
+  this->Edge->PointIds->SetId(2, this->PointIds->GetId(edgeId + numEdges));
+
+  // load coordinates
+  this->Edge->Points->SetPoint(0, this->Points->GetPoint(edgeId));
+  this->Edge->Points->SetPoint(1, this->Points->GetPoint(p));
+  this->Edge->Points->SetPoint(2, this->Points->GetPoint(edgeId + numEdges));
+
+  return this->Edge;
+}
+
+//----------------------------------------------------------------------------
+int vtkQuadraticPolygon::EvaluatePosition(double* x,
+                                          double* closestPoint,
+                                          int& subId, double pcoords[3],
+                                          double& minDist2, double *weights)
+{
+  this->InitializePolygon();
+  int result = this->Polygon->EvaluatePosition(x, closestPoint, subId, pcoords,
+                                               minDist2, weights);
+  vtkQuadraticPolygon::PermuteFromPolygon(this->GetNumberOfPoints(), weights);
+  return result;
+}
+
+//----------------------------------------------------------------------------
+void vtkQuadraticPolygon::EvaluateLocation(int& subId, double pcoords[3],
+                                           double x[3], double *weights)
+{
+  this->InitializePolygon();
+  this->Polygon->EvaluateLocation(subId, pcoords, x, weights);
+  vtkQuadraticPolygon::PermuteFromPolygon(this->GetNumberOfPoints(), weights);
+}
+
+//----------------------------------------------------------------------------
+int vtkQuadraticPolygon::CellBoundary(int subId, double pcoords[3],
+                                      vtkIdList *pts)
+{
+  this->InitializePolygon();
+  return this->Polygon->CellBoundary(subId, pcoords, pts);
+}
+
+//----------------------------------------------------------------------------
+void vtkQuadraticPolygon::Contour(double value,
+                                  vtkDataArray* cellScalars,
+                                  vtkIncrementalPointLocator* locator,
+                                  vtkCellArray *verts,
+                                  vtkCellArray* lines,
+                                  vtkCellArray* polys,
+                                  vtkPointData* inPd,
+                                  vtkPointData* outPd,
+                                  vtkCellData* inCd,
+                                  vtkIdType cellId,
+                                  vtkCellData* outCd)
+{
+  this->InitializePolygon();
+
+  vtkDataArray *convertedCellScalars = cellScalars->NewInstance();
+  vtkQuadraticPolygon::PermuteToPolygon(cellScalars, convertedCellScalars);
+
+  this->Polygon->Contour(value, convertedCellScalars, locator, verts, lines,
+                         polys, inPd, outPd, inCd, cellId, outCd);
+
+  convertedCellScalars->Delete();
+}
+
+//----------------------------------------------------------------------------
+void vtkQuadraticPolygon::Clip(double value, vtkDataArray* cellScalars,
+                               vtkIncrementalPointLocator* locator,
+                               vtkCellArray* polys,
+                               vtkPointData* inPd, vtkPointData* outPd,
+                               vtkCellData* inCd, vtkIdType cellId,
+                               vtkCellData* outCd, int insideOut)
+{
+  this->InitializePolygon();
+
+  vtkDataArray *convertedCellScalars = cellScalars->NewInstance();
+  vtkQuadraticPolygon::PermuteToPolygon(cellScalars, convertedCellScalars);
+
+  this->Polygon->Clip(value, convertedCellScalars, locator, polys, inPd, outPd,
+                      inCd, cellId, outCd, insideOut);
+
+  convertedCellScalars->Delete();
+}
+
+//----------------------------------------------------------------------------
+int vtkQuadraticPolygon::IntersectWithLine(double* p1,
+                                           double* p2,
+                                           double tol,
+                                           double& t,
+                                           double* x,
+                                           double* pcoords,
+                                           int& subId)
+{
+  this->InitializePolygon();
+  return this->Polygon->IntersectWithLine(p1, p2, tol, t, x, pcoords, subId);
+}
+
+//----------------------------------------------------------------------------
+int vtkQuadraticPolygon::Triangulate(vtkIdList *outTris)
+{
+  this->InitializePolygon();
+  int result = this->Polygon->Triangulate(outTris);
+  vtkQuadraticPolygon::ConvertFromPolygon(outTris);
+  return result;
+}
+
+//----------------------------------------------------------------------------
+int vtkQuadraticPolygon::Triangulate(int index, vtkIdList *ptIds,
+                                     vtkPoints *pts)
+{
+  this->InitializePolygon();
+  return this->Polygon->Triangulate(index, ptIds, pts);
+}
+
+//----------------------------------------------------------------------------
+int vtkQuadraticPolygon::NonDegenerateTriangulate(vtkIdList *outTris)
+{
+  this->InitializePolygon();
+  int result = this->Polygon->NonDegenerateTriangulate(outTris);
+  vtkQuadraticPolygon::ConvertFromPolygon(outTris);
+  return result;
+}
+
+//----------------------------------------------------------------------------
+void vtkQuadraticPolygon::InterpolateFunctions(double x[3],
+                                               double *weights)
+{
+  this->InitializePolygon();
+  this->Polygon->SetUseMVCInterpolation(UseMVCInterpolation);
+  this->Polygon->InterpolateFunctions(x, weights);
+  vtkQuadraticPolygon::PermuteFromPolygon(this->GetNumberOfPoints(), weights);
+}
+
+//----------------------------------------------------------------------------
+void vtkQuadraticPolygon::PrintSelf(ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os,indent);
+
+  os << indent << "UseMVCInterpolation: " <<
+    this->UseMVCInterpolation << "\n";
+  os << indent << "Edge:\n";
+    this->Edge->PrintSelf(os,indent.GetNextIndent());
+  os << indent << "Polygon:\n";
+    this->Polygon->PrintSelf(os,indent.GetNextIndent());
+}
+
+//----------------------------------------------------------------------------
+double vtkQuadraticPolygon::DistanceToPolygon(double x[3], int numPts,
+                                              double *pts, double bounds[6],
+                                              double closest[3])
+{
+  double *convertedPts = new double[numPts * 3];
+  vtkQuadraticPolygon::PermuteToPolygon(numPts, pts, convertedPts);
+
+  double result = vtkPolygon::DistanceToPolygon(x, numPts, convertedPts,
+                                                bounds, closest);
+
+  delete[] convertedPts;
+
+  return result;
+}
+
+//----------------------------------------------------------------------------
+void vtkQuadraticPolygon::ComputeCentroid(vtkIdTypeArray *ids,
+                                          vtkPoints *p,
+                                          double c[3])
+{
+  vtkPoints *convertedPts = vtkPoints::New();
+  vtkQuadraticPolygon::PermuteToPolygon(p, convertedPts);
+
+  vtkIdTypeArray *convertedIds = vtkIdTypeArray::New();
+  vtkQuadraticPolygon::PermuteToPolygon(ids, convertedIds);
+
+  vtkPolygon::ComputeCentroid(convertedIds, convertedPts, c);
+
+  convertedPts->Delete();
+  convertedIds->Delete();
+}
+
+//----------------------------------------------------------------------------
+int vtkQuadraticPolygon::ParameterizePolygon(double *p0, double *p10,
+                                             double& l10, double *p20,
+                                             double &l20, double *n)
+{
+  this->InitializePolygon();
+  return this->Polygon->ParameterizePolygon(p0, p10, l10, p20, l20, n);
+}
+
+//----------------------------------------------------------------------------
+int vtkQuadraticPolygon::IntersectPolygonWithPolygon(int npts, double *pts,
+                                                     double bounds[6],
+                                                     int npts2, double *pts2,
+                                                     double bounds2[6],
+                                                     double tol2, double x[3])
+{
+  double *convertedPts = new double[npts*3];
+  vtkQuadraticPolygon::PermuteToPolygon(npts, pts, convertedPts);
+
+  double *convertedPts2 = new double[npts2*3];
+  vtkQuadraticPolygon::PermuteToPolygon(npts2, pts2, convertedPts2);
+
+  int result = vtkPolygon::IntersectPolygonWithPolygon(
+    npts, convertedPts, bounds,
+    npts2, convertedPts2, bounds2,
+    tol2, x);
+
+  delete[] convertedPts;
+  delete[] convertedPts2;
+
+  return result;
+}
+
+//----------------------------------------------------------------------------
+int vtkQuadraticPolygon::IntersectConvex2DCells(vtkCell *cell1, vtkCell *cell2,
+                                                double tol, double p0[3],
+                                                double p1[3])
+{
+  vtkPolygon *convertedCell1 = 0;
+  vtkPolygon *convertedCell2 = 0;
+
+  vtkQuadraticPolygon *qp1 = dynamic_cast<vtkQuadraticPolygon*>(cell1);
+  if (qp1)
+    {
+      convertedCell1 = vtkPolygon::New();
+      vtkQuadraticPolygon::PermuteToPolygon(cell1, convertedCell1);
+    }
+
+  vtkQuadraticPolygon *qp2 = dynamic_cast<vtkQuadraticPolygon*>(cell2);
+  if (qp2)
+    {
+      convertedCell2 = vtkPolygon::New();
+      vtkQuadraticPolygon::PermuteToPolygon(cell2, convertedCell2);
+    }
+
+  int result = vtkPolygon::IntersectConvex2DCells(
+    (convertedCell1 ? convertedCell1 : cell1),
+    (convertedCell2 ? convertedCell2 : cell2),
+    tol, p0, p1);
+
+  if (convertedCell1)
+    {
+    convertedCell1->Delete();
+    }
+  if (convertedCell2)
+    {
+    convertedCell2->Delete();
+    }
+
+  return result;
+}
+
+//----------------------------------------------------------------------------
+int vtkQuadraticPolygon::PointInPolygon (double x[3], int numPts, double *pts,
+                                         double bounds[6], double *n)
+{
+  double *convertedPts = new double[numPts * 3];
+  vtkQuadraticPolygon::PermuteToPolygon(numPts, pts, convertedPts);
+
+  int result = vtkPolygon::PointInPolygon(x, numPts, convertedPts, bounds, n);
+
+  delete[] convertedPts;
+
+  return result;
+}
+
+//----------------------------------------------------------------------------
+void vtkQuadraticPolygon::GetPermutationFromPolygon(vtkIdType nb,
+                                                    vtkIdList *permutation)
+{
+  permutation->SetNumberOfIds(nb);
+  for (vtkIdType i = 0; i < nb; i++)
+    {
+    permutation->SetId(i, (i % 2 ? (i + nb)/2 : i/2));
+    }
+}
+
+//----------------------------------------------------------------------------
+void vtkQuadraticPolygon::PermuteToPolygon(vtkIdType nbPoints,
+                                           double *inPoints,
+                                           double *outPoints)
+{
+  vtkIdList *permutation = vtkIdList::New();
+  vtkQuadraticPolygon::GetPermutationFromPolygon(nbPoints, permutation);
+
+  for (vtkIdType i = 0; i < nbPoints; i++)
+    {
+    for (int j = 0; j < 3; j++)
+      {
+        outPoints[3 * i + j] = inPoints[3 * permutation->GetId(i) + j];
+      }
+    }
+
+  permutation->Delete();
+}
+
+//----------------------------------------------------------------------------
+void vtkQuadraticPolygon::PermuteToPolygon(vtkPoints *inPoints,
+                                           vtkPoints *outPoints)
+{
+  vtkIdType nbPoints = inPoints->GetNumberOfPoints();
+
+  vtkIdList *permutation = vtkIdList::New();
+  vtkQuadraticPolygon::GetPermutationFromPolygon(nbPoints, permutation);
+
+  outPoints->SetNumberOfPoints(nbPoints);
+  for (vtkIdType i = 0; i < nbPoints; i++)
+    {
+    outPoints->SetPoint(i, inPoints->GetPoint(permutation->GetId(i)));
+    }
+
+  permutation->Delete();
+}
+
+//----------------------------------------------------------------------------
+void vtkQuadraticPolygon::PermuteToPolygon(vtkIdTypeArray *inIds,
+                                           vtkIdTypeArray *outIds)
+{
+  vtkIdType nbIds = inIds->GetNumberOfTuples();
+
+  vtkIdList *permutation = vtkIdList::New();
+  vtkQuadraticPolygon::GetPermutationFromPolygon(nbIds, permutation);
+
+  outIds->SetNumberOfTuples(nbIds);
+  for (vtkIdType i = 0; i < nbIds; i++)
+    {
+    outIds->SetValue(i, inIds->GetValue(permutation->GetId(i)));
+    }
+
+  permutation->Delete();
+}
+
+//----------------------------------------------------------------------------
+void vtkQuadraticPolygon::PermuteToPolygon(vtkDataArray *inDataArray,
+                                           vtkDataArray *outDataArray)
+{
+  vtkIdType nb = inDataArray->GetNumberOfTuples();
+
+  vtkIdList *permutation = vtkIdList::New();
+  vtkQuadraticPolygon::GetPermutationFromPolygon(nb, permutation);
+
+  outDataArray->SetNumberOfComponents(inDataArray->GetNumberOfComponents());
+  outDataArray->SetNumberOfTuples(nb);
+  inDataArray->GetTuples(permutation, outDataArray);
+
+  permutation->Delete();
+}
+
+//----------------------------------------------------------------------------
+void vtkQuadraticPolygon::PermuteToPolygon(vtkCell *inCell,
+                                           vtkCell *outCell)
+{
+  vtkIdType nbPoints = inCell->GetNumberOfPoints();
+
+  vtkIdList *permutation = vtkIdList::New();
+  vtkQuadraticPolygon::GetPermutationFromPolygon(nbPoints, permutation);
+
+  outCell->Points->SetNumberOfPoints(nbPoints);
+  outCell->PointIds->SetNumberOfIds(nbPoints);
+
+  for (vtkIdType i = 0; i < nbPoints; i++)
+    {
+    outCell->PointIds->SetId(i, inCell->PointIds->GetId(permutation->GetId(i)));
+    outCell->Points->SetPoint(i, inCell->Points->GetPoint(permutation->GetId(i)));
+    }
+
+  permutation->Delete();
+}
+
+//----------------------------------------------------------------------------
+void vtkQuadraticPolygon::InitializePolygon()
+{
+  vtkQuadraticPolygon::PermuteToPolygon(this, this->Polygon);
+}
+
+//----------------------------------------------------------------------------
+void vtkQuadraticPolygon::GetPermutationToPolygon(vtkIdType nb,
+                                                  vtkIdList *permutation)
+{
+  permutation->SetNumberOfIds(nb);
+  for (vtkIdType i = 0; i < nb; i++)
+    {
+    permutation->SetId(i, (i < nb / 2) ? (i * 2) : (i * 2 + 1 - nb));
+    }
+}
+
+//----------------------------------------------------------------------------
+void vtkQuadraticPolygon::PermuteFromPolygon(vtkIdType nb,
+                                             double *values)
+{
+  vtkIdList *permutation = vtkIdList::New();
+  vtkQuadraticPolygon::GetPermutationToPolygon(nb, permutation);
+
+  double *save = new double[nb];
+  for (vtkIdType i = 0; i < nb; i++)
+    {
+    save[i] = values[i];
+    }
+  for (vtkIdType i = 0; i < nb; i++)
+    {
+    values[i] = save[permutation->GetId(i)];
+    }
+
+  permutation->Delete();
+  delete[] save;
+}
+
+//----------------------------------------------------------------------------
+void vtkQuadraticPolygon::ConvertFromPolygon(vtkIdList *ids)
+{
+  vtkIdType nbIds = ids->GetNumberOfIds();
+
+  vtkIdList *permutation = vtkIdList::New();
+  vtkQuadraticPolygon::GetPermutationFromPolygon(nbIds, permutation);
+
+  vtkIdList *saveList = vtkIdList::New();
+  saveList->SetNumberOfIds(nbIds);
+  ids->SetNumberOfIds(nbIds);
+
+  for (vtkIdType i = 0; i < nbIds; i++)
+    {
+    saveList->SetId(i, ids->GetId(i));
+    }
+  for (vtkIdType i = 0; i < nbIds; i++)
+    {
+    ids->SetId(i, permutation->GetId(saveList->GetId(i)));
+    }
+
+  permutation->Delete();
+  saveList->Delete();
+}
+
+//----------------------------------------------------------------------------
+void vtkQuadraticPolygon::Derivatives(int vtkNotUsed(subId),
+                                      double pcoords[3],
+                                      double *vtkNotUsed(values),
+                                      int vtkNotUsed(dim),
+                                      double *vtkNotUsed(derivs))
+{
+  pcoords[0] = pcoords[1] = pcoords[2] = 0.0;
+}
diff --git a/Common/DataModel/vtkQuadraticPolygon.h b/Common/DataModel/vtkQuadraticPolygon.h
new file mode 100644
index 0000000..4f10f15
--- /dev/null
+++ b/Common/DataModel/vtkQuadraticPolygon.h
@@ -0,0 +1,148 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkQuadraticPolygon.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkQuadraticPolygon - a cell that represents a parabolic n-sided polygon
+// .SECTION Description
+// vtkQuadraticPolygon is a concrete implementation of vtkNonLinearCell to
+// represent a 2D n-sided (2*n nodes) parabolic polygon. The polygon cannot
+// have any internal holes, and cannot self-intersect. The cell includes a
+// mid-edge node for each of the n edges of the cell. The ordering of the
+// 2*n points defining the cell are point ids (0..n-1 and n..2*n-1) where ids
+// 0..n-1 define the corner vertices of the polygon; ids n..2*n-1 define the
+// midedge nodes. Define the polygon with points ordered in the counter-
+// clockwise direction; do not repeat the last point.
+
+// .SECTION See Also
+// vtkQuadraticEdge vtkQuadraticTriangle vtkQuadraticTetra
+// vtkQuadraticHexahedron vtkQuadraticWedge vtkQuadraticPyramid
+
+
+#ifndef __vtkQuadraticPolygon_h
+#define __vtkQuadraticPolygon_h
+
+#include "vtkCommonDataModelModule.h" // For export macro
+#include "vtkNonLinearCell.h"
+
+class vtkQuadraticEdge;
+class vtkPolygon;
+class vtkIdTypeArray;
+
+class VTKCOMMONDATAMODEL_EXPORT vtkQuadraticPolygon : public vtkNonLinearCell
+{
+public:
+  static vtkQuadraticPolygon *New();
+  vtkTypeMacro(vtkQuadraticPolygon, vtkNonLinearCell);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Implement the vtkCell API. See the vtkCell API for descriptions
+  // of these methods.
+  int GetCellType() { return VTK_QUADRATIC_POLYGON; }
+  int GetCellDimension() { return 2;}
+  int GetNumberOfEdges() { return this->GetNumberOfPoints() / 2; }
+  int GetNumberOfFaces() { return 0; }
+  vtkCell *GetEdge(int);
+  vtkCell *GetFace(int) { return 0; }
+  int IsPrimaryCell() { return 0; }
+
+  // Description:
+  // These methods are based on the vtkPolygon ones :
+  // the vtkQuadraticPolygon (with n edges and 2*n points)
+  // is transform into a vtkPolygon (with 2*n edges and 2*n points)
+  // and the vtkPolygon methods are called.
+  int CellBoundary(int subId, double pcoords[3], vtkIdList *pts);
+  void Contour(double value, vtkDataArray *cellScalars,
+               vtkIncrementalPointLocator *locator, vtkCellArray *verts,
+               vtkCellArray *lines, vtkCellArray *polys,
+               vtkPointData *inPd, vtkPointData *outPd,
+               vtkCellData *inCd, vtkIdType cellId, vtkCellData *outCd);
+  void Clip(double value, vtkDataArray *cellScalars,
+            vtkIncrementalPointLocator *locator, vtkCellArray *polys,
+            vtkPointData *inPd, vtkPointData *outPd,
+            vtkCellData *inCd, vtkIdType cellId, vtkCellData *outCd,
+            int insideOut);
+  int EvaluatePosition(double x[3], double* closestPoint,
+                       int& subId, double pcoords[3],
+                       double& dist2, double *weights);
+  void EvaluateLocation(int& subId, double pcoords[3], double x[3],
+                        double *weights);
+  int IntersectWithLine(double p1[3], double p2[3], double tol, double& t,
+                        double x[3], double pcoords[3], int& subId);
+  virtual void InterpolateFunctions(double x[3], double *weights);
+  static void ComputeCentroid(vtkIdTypeArray *ids, vtkPoints *pts,
+                              double centroid[3]);
+  int ParameterizePolygon(double p0[3], double p10[3], double &l10,
+                          double p20[3], double &l20, double n[3]);
+  static int PointInPolygon(double x[3], int numPts, double *pts,
+                            double bounds[6], double n[3]);
+  int Triangulate(vtkIdList *outTris);
+  int Triangulate(int index, vtkIdList *ptIds, vtkPoints *pts);
+  int NonDegenerateTriangulate(vtkIdList *outTris);
+  static double DistanceToPolygon(double x[3], int numPts, double *pts,
+                                  double bounds[6], double closest[3]);
+  static int IntersectPolygonWithPolygon(int npts, double *pts, double bounds[6],
+                                         int npts2, double *pts2,
+                                         double bounds2[3], double tol,
+                                         double x[3]);
+  static int IntersectConvex2DCells(vtkCell *cell1, vtkCell *cell2,
+                                    double tol, double p0[3], double p1[3]);
+
+  // Not implemented
+  void Derivatives(int subId, double pcoords[3], double *values,
+                   int dim, double *derivs);
+
+  // Description:
+  // Set/Get the flag indicating whether to use Mean Value Coordinate for the
+  // interpolation. If true, InterpolateFunctions() uses the Mean Value
+  // Coordinate to compute weights. Otherwise, the conventional 1/r^2 method
+  // is used. The UseMVCInterpolation parameter is set to true by default.
+  vtkGetMacro(UseMVCInterpolation, bool);
+  vtkSetMacro(UseMVCInterpolation, bool);
+
+protected:
+  vtkQuadraticPolygon();
+  ~vtkQuadraticPolygon();
+
+  // variables used by instances of this class
+  vtkPolygon       *Polygon;
+  vtkQuadraticEdge *Edge;
+
+  // Parameter indicating whether to use Mean Value Coordinate algorithm
+  // for interpolation. The parameter is true by default.
+  bool UseMVCInterpolation;
+
+  // Description:
+  // Methods to transform a vtkQuadraticPolygon variable into a vtkPolygon
+  // variable.
+  static void GetPermutationFromPolygon(vtkIdType nb, vtkIdList *permutation);
+  static void PermuteToPolygon(vtkIdType nbPoints, double *inPoints, double *outPoints);
+  static void PermuteToPolygon(vtkCell *inCell, vtkCell *outCell);
+  static void PermuteToPolygon(vtkPoints *inPoints, vtkPoints *outPoints);
+  static void PermuteToPolygon(vtkIdTypeArray *inIds, vtkIdTypeArray *outIds);
+  static void PermuteToPolygon(vtkDataArray *inDataArray, vtkDataArray *outDataArray);
+  void InitializePolygon();
+
+  // Description:
+  // Methods to transform a vtkPolygon variable into a vtkQuadraticPolygon
+  // variable.
+  static void GetPermutationToPolygon(vtkIdType nb, vtkIdList *permutation);
+  static void PermuteFromPolygon(vtkIdType nb, double *values);
+  static void ConvertFromPolygon(vtkIdList *ids);
+
+private:
+  vtkQuadraticPolygon(const vtkQuadraticPolygon&);  // Not implemented.
+  void operator=(const vtkQuadraticPolygon&);  // Not implemented.
+};
+
+#endif
diff --git a/Common/DataModel/vtkQuadraticPyramid.cxx b/Common/DataModel/vtkQuadraticPyramid.cxx
index 2be329d..a0b972b 100644
--- a/Common/DataModel/vtkQuadraticPyramid.cxx
+++ b/Common/DataModel/vtkQuadraticPyramid.cxx
@@ -346,6 +346,8 @@ void vtkQuadraticPyramid::Subdivide(vtkPointData *inPd, vtkCellData *inCd,
 
   //Interpolate new values
   double p[3];
+  this->Points->Resize(14);
+  this->CellScalars->Resize(14);
   for ( numMidPts=0; numMidPts < 1; numMidPts++ )
     {
     this->InterpolationFunctions(MidPoints[numMidPts], weights);
diff --git a/Common/DataModel/vtkQuadraticQuad.cxx b/Common/DataModel/vtkQuadraticQuad.cxx
index bf82a36..92e3b1f 100644
--- a/Common/DataModel/vtkQuadraticQuad.cxx
+++ b/Common/DataModel/vtkQuadraticQuad.cxx
@@ -245,6 +245,8 @@ void vtkQuadraticQuad::InterpolateAttributes(vtkPointData *inPd, vtkCellData *in
 
   //Interpolate new values
   double p[3];
+  this->Points->Resize(9);
+  this->CellScalars->Resize(9);
   for ( numMidPts=0; numMidPts < 1; numMidPts++ )
     {
     this->InterpolationFunctions(MidPoints[numMidPts], weights);
diff --git a/Common/DataModel/vtkQuadraticWedge.cxx b/Common/DataModel/vtkQuadraticWedge.cxx
index d926fbd..0a3cf8c 100644
--- a/Common/DataModel/vtkQuadraticWedge.cxx
+++ b/Common/DataModel/vtkQuadraticWedge.cxx
@@ -342,6 +342,8 @@ void vtkQuadraticWedge::Subdivide(vtkPointData *inPd, vtkCellData *inCd,
 
   //Interpolate new values
   double p[3];
+  this->Points->Resize(18);
+  this->CellScalars->Resize(18);
   for ( numMidPts=0; numMidPts < 3; numMidPts++ )
     {
     this->InterpolationFunctions(MidPoints[numMidPts], weights);
diff --git a/Common/DataModel/vtkQuadratureSchemeDefinition.cxx b/Common/DataModel/vtkQuadratureSchemeDefinition.cxx
index 7a257c7..ab512c7 100644
--- a/Common/DataModel/vtkQuadratureSchemeDefinition.cxx
+++ b/Common/DataModel/vtkQuadratureSchemeDefinition.cxx
@@ -126,16 +126,11 @@ void vtkQuadratureSchemeDefinition::Initialize(
 //-----------------------------------------------------------------------------
 void vtkQuadratureSchemeDefinition::ReleaseResources()
 {
-  if (this->ShapeFunctionWeights!=0)
-    {
-    delete [] this->ShapeFunctionWeights;
-    this->ShapeFunctionWeights=0;
-    }
-  if (this->QuadratureWeights!=0)
-    {
-    delete [] this->QuadratureWeights;
-    this->QuadratureWeights=0;
-    }
+  delete [] this->ShapeFunctionWeights;
+  this->ShapeFunctionWeights=0;
+
+  delete [] this->QuadratureWeights;
+  this->QuadratureWeights=0;
 }
 
 //-----------------------------------------------------------------------------
@@ -175,7 +170,8 @@ void vtkQuadratureSchemeDefinition::SetShapeFunctionWeights(const double *W)
 {
   if ((this->NumberOfQuadraturePoints<=0)
       || (this->NumberOfNodes<=0)
-      || (this->ShapeFunctionWeights==0))
+      || (this->ShapeFunctionWeights==0)
+      || !W)
     {
     return;
     }
@@ -192,7 +188,8 @@ void vtkQuadratureSchemeDefinition::SetQuadratureWeights(const double *W)
 {
   if ((this->NumberOfQuadraturePoints<=0)
       || (this->NumberOfNodes<=0)
-      || (this->QuadratureWeights==0))
+      || (this->QuadratureWeights==0)
+      || !W)
     {
     return;
     }
@@ -321,14 +318,8 @@ istream &operator>>(istream &sin, vtkQuadratureSchemeDefinition &def)
   def.Initialize(cellType,nNodes,nQuadPts,SfWt,QWt);
 
   // clean up
-  if (SfWt!=NULL)
-    {
-    delete [] SfWt;
-    }
-  if (QWt!=NULL)
-    {
-    delete [] QWt;
-    }
+  delete [] SfWt;
+  delete [] QWt;
 
   return sin;
 }
diff --git a/Common/DataModel/vtkQuadratureSchemeDefinition.h b/Common/DataModel/vtkQuadratureSchemeDefinition.h
index 4ff3e66..93f650c 100644
--- a/Common/DataModel/vtkQuadratureSchemeDefinition.h
+++ b/Common/DataModel/vtkQuadratureSchemeDefinition.h
@@ -139,7 +139,7 @@ private:
   vtkQuadratureSchemeDefinition(const vtkQuadratureSchemeDefinition &); // Not implemented.
   void operator=(const vtkQuadratureSchemeDefinition &); // Not implemented.
   friend ostream &operator<<(ostream &s, const vtkQuadratureSchemeDefinition &d);
-  friend istream &operator>>(istream &s, const vtkQuadratureSchemeDefinition &d);
+  friend istream &operator>>(istream &s, vtkQuadratureSchemeDefinition &d);
   //
   int CellType;
   int QuadratureKey;
diff --git a/Common/DataModel/vtkQuadric.h b/Common/DataModel/vtkQuadric.h
index 94df6df..531c562 100644
--- a/Common/DataModel/vtkQuadric.h
+++ b/Common/DataModel/vtkQuadric.h
@@ -53,7 +53,7 @@ public:
 
 protected:
   vtkQuadric();
-  ~vtkQuadric() {};
+  ~vtkQuadric() {}
 
   double Coefficients[10];
 
diff --git a/Common/DataModel/vtkReebGraph.cxx b/Common/DataModel/vtkReebGraph.cxx
index 428f826..8a94dbd 100644
--- a/Common/DataModel/vtkReebGraph.cxx
+++ b/Common/DataModel/vtkReebGraph.cxx
@@ -1057,8 +1057,8 @@ vtkReebGraph::Implementation::vtkReebPath vtkReebGraph::Implementation::FindPath
           while (pq.size())
           {
             vtkReebPath aux=pq.top();pq.pop();
-            if (aux.ArcTable) delete aux.ArcTable;
-            if (aux.NodeTable) delete aux.NodeTable;
+            delete aux.ArcTable;
+            delete aux.NodeTable;
           }
 
           if (Ntouch) free(Ntouch);
@@ -1107,8 +1107,8 @@ vtkReebGraph::Implementation::vtkReebPath vtkReebGraph::Implementation::FindPath
       }
 
     //finished with this entry
-    if (entry.ArcTable) delete entry.ArcTable;
-    if (entry.NodeTable) delete [] entry.NodeTable;
+    delete entry.ArcTable;
+    delete [] entry.NodeTable;
     }
 
   goto NOT_FOUND;
diff --git a/Common/DataModel/vtkSelectionNode.cxx b/Common/DataModel/vtkSelectionNode.cxx
index 73cc4b2..6af9db6 100644
--- a/Common/DataModel/vtkSelectionNode.cxx
+++ b/Common/DataModel/vtkSelectionNode.cxx
@@ -354,6 +354,7 @@ void vtkSelectionNode::UnionSelectionList(vtkSelectionNode* other)
           {
           vtkErrorMacro(<< "Could not find array with name "
                         << aa1->GetName() << " in other selection.");
+          return;
           }
         if (aa1->GetDataType() != aa2->GetDataType())
           {
diff --git a/Common/DataModel/vtkSimpleCellTessellator.cxx b/Common/DataModel/vtkSimpleCellTessellator.cxx
index 6dc38d6..405c633 100644
--- a/Common/DataModel/vtkSimpleCellTessellator.cxx
+++ b/Common/DataModel/vtkSimpleCellTessellator.cxx
@@ -33,7 +33,7 @@
 
 #include <queue>
 #include <stack>
-#include <assert.h>
+#include <cassert>
 
 // format of the arrays LeftPoint, MidPoint, RightPoint is global, parametric,
 // attributes: xyz rst [abc de...]
@@ -396,7 +396,6 @@ public:
     this->SubdivisionLevel = 0;
     assert("inv: " && this->ClassInvariant());
   }
-  ~vtkTriangleTile() {};
 
 #if 0
   int DifferentFromOriginals(double local[3])
@@ -629,7 +628,6 @@ public:
     this->SubdivisionLevel = 0;
     assert("inv: " && this->ClassInvariant());
     }
-  ~vtkTetraTile() {};
 
 #if 0
   int DifferentFromOriginals(double local[3])
diff --git a/Common/DataModel/vtkSmoothErrorMetric.cxx b/Common/DataModel/vtkSmoothErrorMetric.cxx
index 5bbf8da..ec9a2ba 100644
--- a/Common/DataModel/vtkSmoothErrorMetric.cxx
+++ b/Common/DataModel/vtkSmoothErrorMetric.cxx
@@ -20,7 +20,7 @@
 #include "vtkGenericAdaptorCell.h"
 #include "vtkGenericDataSet.h"
 #include "vtkMath.h"
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkSmoothErrorMetric);
 
diff --git a/Common/DataModel/vtkSphere.h b/Common/DataModel/vtkSphere.h
index 0a30fee..84abad2 100644
--- a/Common/DataModel/vtkSphere.h
+++ b/Common/DataModel/vtkSphere.h
@@ -81,7 +81,7 @@ public:
 
 protected:
   vtkSphere();
-  ~vtkSphere() {};
+  ~vtkSphere() {}
 
   double Radius;
   double Center[3];
diff --git a/Common/DataModel/vtkSpline.cxx b/Common/DataModel/vtkSpline.cxx
index 69d6f99..6f7e1c3 100644
--- a/Common/DataModel/vtkSpline.cxx
+++ b/Common/DataModel/vtkSpline.cxx
@@ -41,14 +41,8 @@ vtkSpline::vtkSpline ()
 vtkSpline::~vtkSpline ()
 {
   this->PiecewiseFunction->Delete();
-  if (this->Coefficients)
-    {
-    delete [] this->Coefficients;
-    }
-  if (this->Intervals)
-    {
-    delete [] this->Intervals;
-    }
+  delete [] this->Coefficients;
+  delete [] this->Intervals;
 }
 
 //----------------------------------------------------------------------------
diff --git a/Common/DataModel/vtkStructuredData.cxx b/Common/DataModel/vtkStructuredData.cxx
index 6aafaaf..b2bf343 100644
--- a/Common/DataModel/vtkStructuredData.cxx
+++ b/Common/DataModel/vtkStructuredData.cxx
@@ -17,7 +17,7 @@
 #include "vtkIdList.h"
 #include "vtkObjectFactory.h"
 #include "vtkStructuredExtent.h"
-#include <assert.h>
+#include <cassert>
 
 
 // Return the topological dimension of the data (e.g., 0, 1, 2, or 3D).
diff --git a/Common/DataModel/vtkStructuredData.h b/Common/DataModel/vtkStructuredData.h
index ae67463..523f09d 100644
--- a/Common/DataModel/vtkStructuredData.h
+++ b/Common/DataModel/vtkStructuredData.h
@@ -245,8 +245,8 @@ public:
       int dataDescription=VTK_EMPTY );
 
 protected:
-  vtkStructuredData() {};
-  ~vtkStructuredData() {};
+  vtkStructuredData() {}
+  ~vtkStructuredData() {}
 
   // Description:
   // Computes the linear index for the given i-j-k structured of a grid with
@@ -257,7 +257,7 @@ protected:
   static vtkIdType GetLinearIndex(
       const int i, const int j, const int k, const int N1, const int N2 )
     {
-      return( (k*N2+j)*N1+i );
+      return( (static_cast<vtkIdType>(k)*N2+j)*N1+i );
     }
 
   // Description:
diff --git a/Common/DataModel/vtkStructuredGrid.cxx b/Common/DataModel/vtkStructuredGrid.cxx
index 4969be8..09bfc1f 100644
--- a/Common/DataModel/vtkStructuredGrid.cxx
+++ b/Common/DataModel/vtkStructuredGrid.cxx
@@ -1020,63 +1020,66 @@ void vtkStructuredGrid::InternalStructuredGridCopy(vtkStructuredGrid *src)
 
 //----------------------------------------------------------------------------
 // Override this method because of blanking
-void vtkStructuredGrid::GetScalarRange(double range[2])
+void vtkStructuredGrid::ComputeScalarRange()
 {
-  vtkDataArray *ptScalars = this->PointData->GetScalars();
-  vtkDataArray *cellScalars = this->CellData->GetScalars();
-  double ptRange[2];
-  double cellRange[2];
-  double s;
-  int id, num;
-
-  ptRange[0] =  VTK_DOUBLE_MAX;
-  ptRange[1] =  VTK_DOUBLE_MIN;
-  if ( ptScalars )
+  if ( this->GetMTime() > this->ScalarRangeComputeTime )
     {
-    num = this->GetNumberOfPoints();
-    for (id=0; id < num; id++)
+    vtkDataArray *ptScalars = this->PointData->GetScalars();
+    vtkDataArray *cellScalars = this->CellData->GetScalars();
+    double ptRange[2];
+    double cellRange[2];
+    double s;
+    int id, num;
+
+    ptRange[0] =  VTK_DOUBLE_MAX;
+    ptRange[1] =  VTK_DOUBLE_MIN;
+    if ( ptScalars )
       {
-      if ( this->IsPointVisible(id) )
+      num = this->GetNumberOfPoints();
+      for (id=0; id < num; id++)
         {
-        s = ptScalars->GetComponent(id,0);
-        if ( s < ptRange[0] )
+        if ( this->IsPointVisible(id) )
           {
-          ptRange[0] = s;
-          }
-        if ( s > ptRange[1] )
-          {
-          ptRange[1] = s;
+          s = ptScalars->GetComponent(id,0);
+          if ( s < ptRange[0] )
+            {
+            ptRange[0] = s;
+            }
+          if ( s > ptRange[1] )
+            {
+            ptRange[1] = s;
+            }
           }
         }
       }
-    }
 
-  cellRange[0] =  ptRange[0];
-  cellRange[1] =  ptRange[1];
-  if ( cellScalars )
-    {
-    num = this->GetNumberOfCells();
-    for (id=0; id < num; id++)
+    cellRange[0] =  ptRange[0];
+    cellRange[1] =  ptRange[1];
+    if ( cellScalars )
       {
-      if ( this->IsCellVisible(id) )
+      num = this->GetNumberOfCells();
+      for (id=0; id < num; id++)
         {
-        s = cellScalars->GetComponent(id,0);
-        if ( s < cellRange[0] )
+        if ( this->IsCellVisible(id) )
           {
-          cellRange[0] = s;
-          }
-        if ( s > cellRange[1] )
-          {
-          cellRange[1] = s;
+          s = cellScalars->GetComponent(id,0);
+          if ( s < cellRange[0] )
+            {
+            cellRange[0] = s;
+            }
+          if ( s > cellRange[1] )
+            {
+            cellRange[1] = s;
+            }
           }
         }
       }
-    }
 
-  range[0] = (cellRange[0] >= VTK_DOUBLE_MAX ? 0.0 : cellRange[0]);
-  range[1] = (cellRange[1] <= VTK_DOUBLE_MIN ? 1.0 : cellRange[1]);
+    this->ScalarRange[0] = (cellRange[0] >= VTK_DOUBLE_MAX ? 0.0 : cellRange[0]);
+    this->ScalarRange[1] = (cellRange[1] <= VTK_DOUBLE_MIN ? 1.0 : cellRange[1]);
 
-  this->ComputeTime.Modified();
+    this->ScalarRangeComputeTime.Modified();
+    }
 }
 
 
diff --git a/Common/DataModel/vtkStructuredGrid.h b/Common/DataModel/vtkStructuredGrid.h
index a464846..a999d99 100644
--- a/Common/DataModel/vtkStructuredGrid.h
+++ b/Common/DataModel/vtkStructuredGrid.h
@@ -88,8 +88,6 @@ public:
   int GetMaxCellSize() {return 8;}; //hexahedron is the largest
   void GetCellNeighbors(vtkIdType cellId, vtkIdList *ptIds,
                         vtkIdList *cellIds);
-  virtual void GetScalarRange(double range[2]);
-  double *GetScalarRange() {return this->Superclass::GetScalarRange();}
 
   // Description:
   // following methods are specific to structured grid
@@ -245,6 +243,11 @@ protected:
   void SetCellVisibility(vtkStructuredVisibilityConstraint *cellVisibility);
   vtkGetObjectMacro(CellVisibility, vtkStructuredVisibilityConstraint);
 
+  // Description:
+  // Compute the range of the scalars and cache it into ScalarRange
+  // only if the cache became invalid (ScalarRangeComputeTime).
+  virtual void ComputeScalarRange();
+
 private:
   // Description:
   // For legacy compatibility. Do not use.
diff --git a/Common/DataModel/vtkStructuredPoints.h b/Common/DataModel/vtkStructuredPoints.h
index a05b219..85c3085 100644
--- a/Common/DataModel/vtkStructuredPoints.h
+++ b/Common/DataModel/vtkStructuredPoints.h
@@ -43,7 +43,7 @@ public:
 
 protected:
   vtkStructuredPoints();
-  ~vtkStructuredPoints() {};
+  ~vtkStructuredPoints() {}
 private:
   vtkStructuredPoints(const vtkStructuredPoints&);  // Not implemented.
   void operator=(const vtkStructuredPoints&);  // Not implemented.
diff --git a/Common/DataModel/vtkStructuredPointsCollection.h b/Common/DataModel/vtkStructuredPointsCollection.h
index 4068fed..343be96 100644
--- a/Common/DataModel/vtkStructuredPointsCollection.h
+++ b/Common/DataModel/vtkStructuredPointsCollection.h
@@ -56,8 +56,8 @@ public:
   //ETX
 
 protected:
-  vtkStructuredPointsCollection() {};
-  ~vtkStructuredPointsCollection() {};
+  vtkStructuredPointsCollection() {}
+  ~vtkStructuredPointsCollection() {}
 
 
 
diff --git a/Common/DataModel/vtkSuperquadric.h b/Common/DataModel/vtkSuperquadric.h
index 38e89fc..47babb3 100644
--- a/Common/DataModel/vtkSuperquadric.h
+++ b/Common/DataModel/vtkSuperquadric.h
@@ -102,7 +102,7 @@ public:
 
 protected:
   vtkSuperquadric();
-  ~vtkSuperquadric() {};
+  ~vtkSuperquadric() {}
 
   int Toroidal;
   double Thickness;
diff --git a/Common/DataModel/vtkTensor.h b/Common/DataModel/vtkTensor.h
index e84c6f8..b21fe4f 100644
--- a/Common/DataModel/vtkTensor.h
+++ b/Common/DataModel/vtkTensor.h
@@ -70,7 +70,7 @@ public:
 
 protected:
   vtkTensor();
-  ~vtkTensor() {};
+  ~vtkTensor() {}
 
   double Storage[9];
 private:
diff --git a/Common/DataModel/vtkUniformGridAMRDataIterator.cxx b/Common/DataModel/vtkUniformGridAMRDataIterator.cxx
index 8228f11..f105b70 100644
--- a/Common/DataModel/vtkUniformGridAMRDataIterator.cxx
+++ b/Common/DataModel/vtkUniformGridAMRDataIterator.cxx
@@ -20,7 +20,7 @@
 #include "vtkDataObject.h"
 #include "vtkUniformGrid.h"
 #include "vtkInformation.h"
-#include <assert.h>
+#include <cassert>
 
 //----------------------------------------------------------------
 class AMRIndexIterator: public vtkObject
diff --git a/Common/DataModel/vtkUnstructuredGrid.cxx b/Common/DataModel/vtkUnstructuredGrid.cxx
index efbbf4c..8bfab64 100644
--- a/Common/DataModel/vtkUnstructuredGrid.cxx
+++ b/Common/DataModel/vtkUnstructuredGrid.cxx
@@ -26,6 +26,7 @@
 #include "vtkInformation.h"
 #include "vtkInformationVector.h"
 #include "vtkLine.h"
+#include "vtkNew.h"
 #include "vtkObjectFactory.h"
 #include "vtkPixel.h"
 #include "vtkPointData.h"
@@ -40,6 +41,7 @@
 #include "vtkQuadraticEdge.h"
 #include "vtkQuadraticHexahedron.h"
 #include "vtkQuadraticWedge.h"
+#include "vtkQuadraticPolygon.h"
 #include "vtkQuadraticPyramid.h"
 #include "vtkQuadraticQuad.h"
 #include "vtkQuadraticTetra.h"
@@ -48,6 +50,7 @@
 #include "vtkTriangle.h"
 #include "vtkTriangleStrip.h"
 #include "vtkUnsignedCharArray.h"
+#include "vtkUnstructuredGridCellIterator.h"
 #include "vtkVertex.h"
 #include "vtkVoxel.h"
 #include "vtkWedge.h"
@@ -84,6 +87,7 @@ vtkUnstructuredGrid::vtkUnstructuredGrid ()
   this->QuadraticEdge = NULL;
   this->QuadraticTriangle =NULL;
   this->QuadraticQuad = NULL;
+  this->QuadraticPolygon = NULL;
   this->QuadraticTetra = NULL;
   this->QuadraticHexahedron = NULL;
   this->QuadraticWedge = NULL;
@@ -240,6 +244,10 @@ vtkUnstructuredGrid::~vtkUnstructuredGrid()
     {
     this->QuadraticQuad->Delete();
     }
+  if(this->QuadraticPolygon)
+    {
+    this->QuadraticPolygon->Delete();
+    }
   if(this->QuadraticTetra)
     {
     this->QuadraticTetra->Delete();
@@ -325,86 +333,89 @@ int vtkUnstructuredGrid::GetGhostLevel()
 // Copy the geometric and topological structure of an input unstructured grid.
 void vtkUnstructuredGrid::CopyStructure(vtkDataSet *ds)
 {
-  vtkUnstructuredGrid *ug=static_cast<vtkUnstructuredGrid *>(ds);
-  vtkPointSet::CopyStructure(ds);
-
-  if (this->Connectivity != ug->Connectivity)
+  // If ds is a vtkUnstructuredGrid, do a shallow copy of the cell data.
+  if (vtkUnstructuredGrid *ug = vtkUnstructuredGrid::SafeDownCast(ds))
     {
-    if ( this->Connectivity )
-      {
-      this->Connectivity->UnRegister(this);
-      }
-    this->Connectivity = ug->Connectivity;
-    if (this->Connectivity)
+    if (this->Connectivity != ug->Connectivity)
       {
-      this->Connectivity->Register(this);
+      if ( this->Connectivity )
+        {
+        this->Connectivity->UnRegister(this);
+        }
+      this->Connectivity = ug->Connectivity;
+      if (this->Connectivity)
+        {
+        this->Connectivity->Register(this);
+        }
       }
-    }
 
-  if (this->Links != ug->Links)
-    {
-    if ( this->Links )
+    if (this->Links != ug->Links)
       {
-      this->Links->UnRegister(this);
-      }
-    this->Links = ug->Links;
-    if (this->Links)
-      {
-      this->Links->Register(this);
+      if ( this->Links )
+        {
+        this->Links->UnRegister(this);
+        }
+      this->Links = ug->Links;
+      if (this->Links)
+        {
+        this->Links->Register(this);
+        }
       }
-    }
 
-  if (this->Types != ug->Types)
-    {
-    if ( this->Types )
+    if (this->Types != ug->Types)
       {
-      this->Types->UnRegister(this);
-      }
-    this->Types = ug->Types;
-    if (this->Types)
-      {
-      this->Types->Register(this);
+      if ( this->Types )
+        {
+        this->Types->UnRegister(this);
+        }
+      this->Types = ug->Types;
+      if (this->Types)
+        {
+        this->Types->Register(this);
+        }
       }
-    }
 
-  if (this->Locations != ug->Locations)
-    {
-    if ( this->Locations )
+    if (this->Locations != ug->Locations)
       {
-      this->Locations->UnRegister(this);
-      }
-    this->Locations = ug->Locations;
-    if (this->Locations)
-      {
-      this->Locations->Register(this);
+      if ( this->Locations )
+        {
+        this->Locations->UnRegister(this);
+        }
+      this->Locations = ug->Locations;
+      if (this->Locations)
+        {
+        this->Locations->Register(this);
+        }
       }
-    }
 
-  if (this->Faces != ug->Faces)
-    {
-    if ( this->Faces )
-      {
-      this->Faces->UnRegister(this);
-      }
-    this->Faces = ug->Faces;
-    if (this->Faces)
+    if (this->Faces != ug->Faces)
       {
-      this->Faces->Register(this);
+      if ( this->Faces )
+        {
+        this->Faces->UnRegister(this);
+        }
+      this->Faces = ug->Faces;
+      if (this->Faces)
+        {
+        this->Faces->Register(this);
+        }
       }
-    }
 
-  if (this->FaceLocations != ug->FaceLocations)
-    {
-    if ( this->FaceLocations )
+    if (this->FaceLocations != ug->FaceLocations)
       {
-      this->FaceLocations->UnRegister(this);
-      }
-    this->FaceLocations = ug->FaceLocations;
-    if (this->FaceLocations)
-      {
-      this->FaceLocations->Register(this);
+      if ( this->FaceLocations )
+        {
+        this->FaceLocations->UnRegister(this);
+        }
+      this->FaceLocations = ug->FaceLocations;
+      if (this->FaceLocations)
+        {
+        this->FaceLocations->Register(this);
+        }
       }
     }
+
+  this->Superclass::CopyStructure(ds);
 }
 
 //----------------------------------------------------------------------------
@@ -637,6 +648,14 @@ vtkCell *vtkUnstructuredGrid::GetCell(vtkIdType cellId)
       cell = this->QuadraticQuad;
       break;
 
+    case VTK_QUADRATIC_POLYGON:
+      if(!this->QuadraticPolygon)
+        {
+        this->QuadraticPolygon = vtkQuadraticPolygon::New();
+        }
+      cell = this->QuadraticPolygon;
+      break;
+
     case VTK_QUADRATIC_TETRA:
       if(!this->QuadraticTetra)
         {
@@ -784,9 +803,7 @@ vtkCell *vtkUnstructuredGrid::GetCell(vtkIdType cellId)
 //----------------------------------------------------------------------------
 void vtkUnstructuredGrid::GetCell(vtkIdType cellId, vtkGenericCell *cell)
 {
-  vtkIdType i;
-  vtkIdType    loc;
-  double  x[3];
+  vtkIdType loc;
   vtkIdType *pts, numPts;
 
   int cellType = static_cast<int>(this->Types->GetValue(cellId));
@@ -796,14 +813,9 @@ void vtkUnstructuredGrid::GetCell(vtkIdType cellId, vtkGenericCell *cell)
   this->Connectivity->GetCell(loc,numPts,pts);
 
   cell->PointIds->SetNumberOfIds(numPts);
-  cell->Points->SetNumberOfPoints(numPts);
 
-  for (i=0; i<numPts; i++)
-    {
-    cell->PointIds->SetId(i,pts[i]);
-    this->Points->GetPoint(pts[i], x);
-    cell->Points->SetPoint(i, x);
-    }
+  std::copy(pts, pts + numPts, cell->PointIds->GetPointer(0));
+  this->Points->GetPoints(cell->PointIds, cell->Points);
 
   // Explicit face representation
   if ( cell->RequiresExplicitFaceRepresentation() )
@@ -1165,6 +1177,7 @@ void vtkUnstructuredGrid::SetCells(vtkUnsignedCharArray *cellTypes,
     if (cellTypes->GetValue(i) == VTK_POLYHEDRON)
       {
       containPolyhedron = true;
+      break; // We can terminate early
       }
     }
 
@@ -1326,13 +1339,13 @@ void vtkUnstructuredGrid::GetFaceStream(vtkIdType cellId, vtkIdList *ptIds)
     return;
     }
 
+  ptIds->Reset();
+
   if (!this->Faces || !this->FaceLocations)
     {
     return;
     }
 
-  ptIds->Reset();
-
   vtkIdType loc = this->FaceLocations->GetValue(cellId);
   vtkIdType* facePtr = this->Faces->GetPointer(loc);
 
@@ -1395,6 +1408,14 @@ void vtkUnstructuredGrid::GetPointCells(vtkIdType ptId, vtkIdList *cellIds)
 }
 
 //----------------------------------------------------------------------------
+vtkCellIterator *vtkUnstructuredGrid::NewCellIterator()
+{
+  vtkUnstructuredGridCellIterator *iter(vtkUnstructuredGridCellIterator::New());
+  iter->SetUnstructuredGrid(this);
+  return iter;
+}
+
+//----------------------------------------------------------------------------
 void vtkUnstructuredGrid::Reset()
 {
   if ( this->Connectivity )
@@ -1557,9 +1578,7 @@ unsigned long vtkUnstructuredGrid::GetActualMemorySize()
 //----------------------------------------------------------------------------
 void vtkUnstructuredGrid::ShallowCopy(vtkDataObject *dataObject)
 {
-  vtkUnstructuredGrid *grid = vtkUnstructuredGrid::SafeDownCast(dataObject);
-
-  if ( grid != NULL )
+  if (vtkUnstructuredGrid *grid = vtkUnstructuredGrid::SafeDownCast(dataObject))
     {
     // I do not know if this is correct but.
 
@@ -1622,11 +1641,26 @@ void vtkUnstructuredGrid::ShallowCopy(vtkDataObject *dataObject)
       {
       this->FaceLocations->Register(this);
       }
-
+    }
+  else if (vtkUnstructuredGridBase *ugb =
+           vtkUnstructuredGridBase::SafeDownCast(dataObject))
+    {
+    // The source object has vtkUnstructuredGrid topology, but a different
+    // cell implementation. Deep copy the cells, and shallow copy the rest:
+    vtkSmartPointer<vtkCellIterator> cellIter =
+        vtkSmartPointer<vtkCellIterator>::Take(ugb->NewCellIterator());
+    for (cellIter->InitTraversal(); !cellIter->IsDoneWithTraversal();
+         cellIter->GoToNextCell())
+      {
+      this->InsertNextCell(cellIter->GetCellType(),
+                           cellIter->GetNumberOfPoints(),
+                           cellIter->GetPointIds()->GetPointer(0),
+                           cellIter->GetNumberOfFaces(),
+                           cellIter->GetFaces()->GetPointer(0));
+      }
     }
 
-  // Do superclass
-  this->vtkPointSet::ShallowCopy(dataObject);
+  this->Superclass::ShallowCopy(dataObject);
 }
 
 //----------------------------------------------------------------------------
@@ -1705,10 +1739,16 @@ void vtkUnstructuredGrid::DeepCopy(vtkDataObject *dataObject)
       this->FaceLocations->Register(this);
       this->FaceLocations->Delete();
       }
-    }
 
-  // Do superclass
-  this->vtkPointSet::DeepCopy(dataObject);
+    // Skip the unstructured grid base implementation, as it uses a less
+    // efficient method of copying cell data.
+    this->vtkUnstructuredGridBase::Superclass::DeepCopy(grid);
+    }
+  else
+    {
+    // Use the vtkUnstructuredGridBase deep copy implementation.
+    this->Superclass::DeepCopy(dataObject);
+    }
 
   // Finally Build Links if we need to
   if (grid && grid->Links)
diff --git a/Common/DataModel/vtkUnstructuredGrid.h b/Common/DataModel/vtkUnstructuredGrid.h
index 5c7946e..1c6c56f 100644
--- a/Common/DataModel/vtkUnstructuredGrid.h
+++ b/Common/DataModel/vtkUnstructuredGrid.h
@@ -25,7 +25,7 @@
 #define __vtkUnstructuredGrid_h
 
 #include "vtkCommonDataModelModule.h" // For export macro
-#include "vtkPointSet.h"
+#include "vtkUnstructuredGridBase.h"
 
 class vtkCellArray;
 class vtkCellLinks;
@@ -46,6 +46,7 @@ class vtkQuad;
 class vtkQuadraticEdge;
 class vtkQuadraticHexahedron;
 class vtkQuadraticWedge;
+class vtkQuadraticPolygon;
 class vtkQuadraticPyramid;
 class vtkQuadraticQuad;
 class vtkQuadraticTetra;
@@ -68,12 +69,13 @@ class vtkCubicLine;
 class vtkPolyhedron;
 class vtkIdTypeArray;
 
-class VTKCOMMONDATAMODEL_EXPORT vtkUnstructuredGrid : public vtkPointSet
+class VTKCOMMONDATAMODEL_EXPORT vtkUnstructuredGrid :
+    public vtkUnstructuredGridBase
 {
 public:
   static vtkUnstructuredGrid *New();
 
-  vtkTypeMacro(vtkUnstructuredGrid,vtkPointSet);
+  vtkTypeMacro(vtkUnstructuredGrid, vtkUnstructuredGridBase)
   void PrintSelf(ostream& os, vtkIndent indent);
 
   // Description:
@@ -125,6 +127,7 @@ public:
   virtual void GetCellBounds(vtkIdType cellId, double bounds[6]);
   virtual void GetCellPoints(vtkIdType cellId, vtkIdList *ptIds);
   void GetPointCells(vtkIdType ptId, vtkIdList *cellIds);
+  vtkCellIterator* NewCellIterator();
 
   int GetCellType(vtkIdType cellId);
   vtkUnsignedCharArray* GetCellTypesArray() { return this->Types; }
@@ -331,6 +334,7 @@ protected:
   vtkQuadraticEdge                  *QuadraticEdge;
   vtkQuadraticTriangle              *QuadraticTriangle;
   vtkQuadraticQuad                  *QuadraticQuad;
+  vtkQuadraticPolygon               *QuadraticPolygon;
   vtkQuadraticTetra                 *QuadraticTetra;
   vtkQuadraticHexahedron            *QuadraticHexahedron;
   vtkQuadraticWedge                 *QuadraticWedge;
diff --git a/Common/DataModel/vtkUnstructuredGridBase.cxx b/Common/DataModel/vtkUnstructuredGridBase.cxx
new file mode 100644
index 0000000..1014e70
--- /dev/null
+++ b/Common/DataModel/vtkUnstructuredGridBase.cxx
@@ -0,0 +1,65 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkUnstructuredGridBase.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkUnstructuredGridBase.h"
+
+#include "vtkCellIterator.h"
+#include "vtkInformation.h"
+#include "vtkInformationVector.h"
+#include "vtkSmartPointer.h"
+
+//----------------------------------------------------------------------------
+vtkUnstructuredGridBase::vtkUnstructuredGridBase()
+{
+}
+
+//----------------------------------------------------------------------------
+vtkUnstructuredGridBase::~vtkUnstructuredGridBase()
+{
+}
+
+//----------------------------------------------------------------------------
+void vtkUnstructuredGridBase::DeepCopy(vtkDataObject *src)
+{
+  this->Superclass::DeepCopy(src);
+
+  if (vtkDataSet *ds = vtkDataSet::SafeDownCast(src))
+    {
+    vtkSmartPointer<vtkCellIterator> cellIter =
+        vtkSmartPointer<vtkCellIterator>::Take(ds->NewCellIterator());
+    for (cellIter->InitTraversal(); !cellIter->IsDoneWithTraversal();
+         cellIter->GoToNextCell())
+      {
+      this->InsertNextCell(cellIter->GetCellType(),
+                           cellIter->GetNumberOfPoints(),
+                           cellIter->GetPointIds()->GetPointer(0),
+                           cellIter->GetNumberOfFaces(),
+                           cellIter->GetFaces()->GetPointer(0));
+      }
+    }
+}
+
+//----------------------------------------------------------------------------
+vtkUnstructuredGridBase* vtkUnstructuredGridBase::GetData(vtkInformation* info)
+{
+  return vtkUnstructuredGridBase::SafeDownCast(info ? info->Get(DATA_OBJECT())
+                                                    : NULL);
+}
+
+//----------------------------------------------------------------------------
+vtkUnstructuredGridBase*
+vtkUnstructuredGridBase::GetData(vtkInformationVector* v, int i)
+{
+  return vtkUnstructuredGridBase::GetData(v->GetInformationObject(i));
+}
diff --git a/Common/DataModel/vtkUnstructuredGridBase.h b/Common/DataModel/vtkUnstructuredGridBase.h
new file mode 100644
index 0000000..181cfff
--- /dev/null
+++ b/Common/DataModel/vtkUnstructuredGridBase.h
@@ -0,0 +1,112 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkUnstructuredGridBase.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkUnstructuredGridBase - dataset represents arbitrary combinations
+// of all possible cell types. May be mapped onto a non-standard memory layout.
+//
+// .SECTION Description
+// vtkUnstructuredGridBase defines the core vtkUnstructuredGrid API, omitting
+// functions that are implementation dependent.
+//
+// .SECTION See Also
+// vtkMappedDataArray vtkUnstructuredGrid
+
+#ifndef __vtkUnstructuredGridBase_h
+#define __vtkUnstructuredGridBase_h
+
+#include "vtkCommonDataModelModule.h" // For export macro
+#include "vtkPointSet.h"
+
+class VTKCOMMONDATAMODEL_EXPORT vtkUnstructuredGridBase : public vtkPointSet
+{
+public:
+  vtkAbstractTypeMacro(vtkUnstructuredGridBase,vtkPointSet)
+  void PrintSelf(ostream &os, vtkIndent indent)
+  {
+    this->Superclass::PrintSelf(os, indent);
+  }
+
+  int GetDataObjectType() { return VTK_UNSTRUCTURED_GRID; }
+
+  // Description:
+  // Allocate memory for the number of cells indicated. extSize is not used.
+  virtual void Allocate(vtkIdType numCells=1000, int extSize=1000) = 0;
+
+  // Description:
+  // Shallow and Deep copy.
+  void DeepCopy(vtkDataObject *src);
+
+  // Description:
+  // Insert/create cell in object by type and list of point ids defining
+  // cell topology. Most cells require just a type which implicitly defines
+  // a set of points and their ordering. For non-polyhedron cell type, npts
+  // is the number of unique points in the cell. pts are the list of global
+  // point Ids. For polyhedron cell, a special input format is required.
+  // npts is the number of faces in the cell. ptIds is the list of face stream:
+  // (numFace0Pts, id1, id2, id3, numFace1Pts,id1, id2, id3, ...)
+  virtual vtkIdType InsertNextCell(int type, vtkIdType npts,
+                                   vtkIdType *ptIds) = 0;
+
+  // Description:
+  // Insert/create cell in object by a list of point ids defining
+  // cell topology. Most cells require just a type which implicitly defines
+  // a set of points and their ordering. For non-polyhedron cell type, ptIds
+  // is the list of global Ids of unique cell points. For polyhedron cell,
+  // a special ptIds input format is required:
+  // (numCellFaces, numFace0Pts, id1, id2, id3, numFace1Pts,id1, id2, id3, ...)
+  virtual vtkIdType InsertNextCell(int type, vtkIdList *ptIds) = 0;
+
+  // Desciption:
+  // Insert/create a polyhedron cell. npts is the number of unique points in
+  // the cell. pts is the list of the unique cell point Ids. nfaces is the
+  // number of faces in the cell. faces is the face-stream
+  // [numFace0Pts, id1, id2, id3, numFace1Pts,id1, id2, id3, ...].
+  // All point Ids are global.
+  virtual vtkIdType InsertNextCell(int type, vtkIdType npts, vtkIdType *ptIds,
+                                   vtkIdType nfaces, vtkIdType *faces) = 0;
+
+  // Description:
+  // Replace the points defining cell "cellId" with a new set of points. This
+  // operator is (typically) used when links from points to cells have not been
+  // built (i.e., BuildLinks() has not been executed). Use the operator
+  // ReplaceLinkedCell() to replace a cell when cell structure has been built.
+  virtual void ReplaceCell(vtkIdType cellId, int npts, vtkIdType *pts) = 0;
+
+  // Description:
+  // Fill vtkIdTypeArray container with list of cell Ids.  This
+  // method traverses all cells and, for a particular cell type,
+  // inserts the cell Id into the container.
+  virtual void GetIdsOfCellsOfType(int type, vtkIdTypeArray *array) = 0;
+
+  // Description:
+  // Traverse cells and determine if cells are all of the same type.
+  virtual int IsHomogeneous() = 0;
+
+  //BTX
+  // Description:
+  // Retrieve an instance of this class from an information object.
+  static vtkUnstructuredGridBase* GetData(vtkInformation* info);
+  static vtkUnstructuredGridBase* GetData(vtkInformationVector* v, int i=0);
+  //ETX
+
+protected:
+  vtkUnstructuredGridBase();
+  ~vtkUnstructuredGridBase();
+
+private:
+  vtkUnstructuredGridBase(const vtkUnstructuredGridBase&);  // Not implemented.
+  void operator=(const vtkUnstructuredGridBase&);  // Not implemented.
+};
+
+#endif
diff --git a/Common/DataModel/vtkUnstructuredGridCellIterator.cxx b/Common/DataModel/vtkUnstructuredGridCellIterator.cxx
new file mode 100644
index 0000000..38c5fef
--- /dev/null
+++ b/Common/DataModel/vtkUnstructuredGridCellIterator.cxx
@@ -0,0 +1,242 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkUnstructuredGridCellIterator.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkUnstructuredGridCellIterator.h"
+
+#include "vtkCellArray.h"
+#include "vtkIdList.h"
+#include "vtkObjectFactory.h"
+#include "vtkPoints.h"
+#include "vtkUnsignedCharArray.h"
+#include "vtkUnstructuredGrid.h"
+
+#include <assert.h>
+
+vtkStandardNewMacro(vtkUnstructuredGridCellIterator)
+
+//------------------------------------------------------------------------------
+void vtkUnstructuredGridCellIterator::PrintSelf(ostream &os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+  // Cast the 'unsigned char*' members to void* to prevent the compiler from
+  // interpreting them as strings.
+  os << indent << "CellTypeBegin: "
+     << static_cast<void*>(this->CellTypeBegin) << endl;
+  os << indent << "CellTypePtr: "
+     << static_cast<void*>(this->CellTypePtr) << endl;
+  os << indent << "CellTypeEnd: "
+     << static_cast<void*>(this->CellTypeEnd) << endl;
+  os << indent << "ConnectivityBegin: " << this->ConnectivityBegin << endl;
+  os << indent << "ConnectivityPtr: " << this->ConnectivityPtr << endl;
+  os << indent << "FacesBegin: " << this->FacesBegin<< endl;
+  os << indent << "FacesLocsBegin: " << this->FacesLocsBegin << endl;
+  os << indent << "FacesLocsPtr: " << this->FacesLocsPtr << endl;
+  os << indent << "SkippedCells: " << this->SkippedCells << endl;
+  os << indent << "UnstructuredGridPoints: " <<
+        this->UnstructuredGridPoints.GetPointer() << endl;
+}
+
+//------------------------------------------------------------------------------
+void vtkUnstructuredGridCellIterator::SetUnstructuredGrid(
+    vtkUnstructuredGrid *ug)
+{
+  // If the unstructured grid has not been initialized yet, these may not exist:
+  vtkUnsignedCharArray *cellTypeArray = ug ? ug->GetCellTypesArray() : NULL;
+  vtkCellArray *cellArray = ug ? ug->GetCells() : NULL;
+  vtkPoints *points = ug ? ug->GetPoints() : NULL;
+
+  if (ug && cellTypeArray && cellArray && points)
+    {
+    // Cell types
+    this->CellTypeBegin = this->CellTypeEnd = this->CellTypePtr
+        = cellTypeArray ? cellTypeArray->GetPointer(0) : NULL;
+    this->CellTypeEnd += cellTypeArray ? cellTypeArray->GetNumberOfTuples() : 0;
+
+    // CellArray
+    this->ConnectivityBegin = this->ConnectivityPtr = cellArray->GetPointer();
+
+    // Point
+    this->UnstructuredGridPoints = points;
+
+    // Faces
+    vtkIdTypeArray *faces = ug->GetFaces();
+    vtkIdTypeArray *facesLocs = ug->GetFaceLocations();
+    if (faces && facesLocs)
+      {
+      this->FacesBegin = faces->GetPointer(0);
+      this->FacesLocsBegin = this->FacesLocsPtr = facesLocs->GetPointer(0);
+      }
+    else
+      {
+      this->FacesBegin = NULL;
+      this->FacesLocsBegin = NULL;
+      this->FacesLocsPtr = NULL;
+      }
+    }
+  else
+    {
+    this->CellTypeBegin = NULL;
+    this->CellTypePtr = NULL;
+    this->CellTypeEnd = NULL;
+    this->FacesBegin = NULL;
+    this->FacesLocsBegin = NULL;
+    this->FacesLocsPtr = NULL;
+    this->ConnectivityBegin= NULL;
+    this->ConnectivityPtr = NULL;
+    this->UnstructuredGridPoints = NULL;
+    }
+
+  this->SkippedCells = 0;
+}
+
+//------------------------------------------------------------------------------
+void vtkUnstructuredGridCellIterator::CatchUpSkippedCells()
+{
+  // catch up on skipped cells -- cache misses make incrementing Connectivity
+  // in IncrementToNextCell() too expensive, so we delay it until here. Special
+  // cases are used for 0 or 1 skipped cells to reduce the number of jumps.
+  switch (this->SkippedCells)
+    {
+    default:
+      while (this->SkippedCells > 1)
+        {
+        this->ConnectivityPtr += *this->ConnectivityPtr + 1;
+        this->SkippedCells--;
+        }
+      assert(this->SkippedCells == 1);
+      // Fall through to first case
+    case 1:
+      this->ConnectivityPtr += *this->ConnectivityPtr + 1;
+      --this->SkippedCells;
+      // fall through to 0 case
+    case 0:
+      // do nothing.
+      break;
+  }
+}
+
+//------------------------------------------------------------------------------
+bool vtkUnstructuredGridCellIterator::IsDoneWithTraversal()
+{
+  return this->CellTypePtr >= this->CellTypeEnd;
+}
+
+//------------------------------------------------------------------------------
+vtkIdType vtkUnstructuredGridCellIterator::GetCellId()
+{
+  return static_cast<vtkIdType>(this->CellTypePtr - this->CellTypeBegin);
+}
+
+//------------------------------------------------------------------------------
+void vtkUnstructuredGridCellIterator::IncrementToNextCell()
+{
+  ++this->CellTypePtr;
+
+  // Bookkeeping for ConnectivityPtr
+  ++this->SkippedCells;
+
+  // Note that we may be incrementing an invalid pointer here...check
+  // if FacesLocsBegin is NULL before dereferencing this!
+  ++this->FacesLocsPtr;
+}
+
+//------------------------------------------------------------------------------
+vtkUnstructuredGridCellIterator::vtkUnstructuredGridCellIterator()
+  : vtkCellIterator(),
+    CellTypeBegin(NULL),
+    CellTypePtr(NULL),
+    CellTypeEnd(NULL),
+    ConnectivityBegin(NULL),
+    ConnectivityPtr(NULL),
+    FacesBegin(NULL),
+    FacesLocsBegin(NULL),
+    FacesLocsPtr(NULL),
+    SkippedCells(0),
+    UnstructuredGridPoints(NULL)
+{
+}
+
+//------------------------------------------------------------------------------
+vtkUnstructuredGridCellIterator::~vtkUnstructuredGridCellIterator()
+{
+}
+
+//------------------------------------------------------------------------------
+void vtkUnstructuredGridCellIterator::ResetToFirstCell()
+{
+  this->CellTypePtr = this->CellTypeBegin;
+  this->FacesLocsPtr = this->FacesLocsBegin;
+  this->ConnectivityPtr = this->ConnectivityBegin;
+  this->SkippedCells = 0;
+}
+
+//------------------------------------------------------------------------------
+void vtkUnstructuredGridCellIterator::FetchCellType()
+{
+  this->CellType = *this->CellTypePtr;
+}
+
+//------------------------------------------------------------------------------
+void vtkUnstructuredGridCellIterator::FetchPointIds()
+{
+  CatchUpSkippedCells();
+  const vtkIdType *connPtr = this->ConnectivityPtr;
+  vtkIdType numCellPoints = *(connPtr++);
+  this->PointIds->SetNumberOfIds(numCellPoints);
+  vtkIdType *cellPtr = this->PointIds->GetPointer(0);
+  std::copy(connPtr, connPtr + numCellPoints, cellPtr);
+}
+
+//------------------------------------------------------------------------------
+void vtkUnstructuredGridCellIterator::FetchPoints()
+{
+  this->UnstructuredGridPoints->GetPoints(this->GetPointIds(), this->Points);
+}
+
+//------------------------------------------------------------------------------
+// Given a pointer into a set of faces, traverse the faces and return the total
+// number of ids (including size hints) in the face set.
+namespace {
+inline vtkIdType FaceSetSize(vtkIdType *begin)
+{
+  vtkIdType *result = begin;
+  vtkIdType numFaces = *(result++);
+  while (numFaces-- > 0)
+    {
+    result += *result + 1;
+    }
+  return result - begin;
+}
+} // end anon namespace
+
+//------------------------------------------------------------------------------
+void vtkUnstructuredGridCellIterator::FetchFaces()
+{
+  // FacesLocsPtr may be non-null and invalid (this is done to prevent branching
+  // in IncrementToNextCell()). Check FacesLocsBegin to determine validity of
+  // the pointer.
+  if (this->FacesLocsBegin && *this->FacesLocsPtr >= 0)
+    {
+    vtkIdType *faceSet = this->FacesBegin + *this->FacesLocsPtr;
+    vtkIdType facesSize = FaceSetSize(faceSet);
+    this->Faces->SetNumberOfIds(facesSize);
+    vtkIdType *tmpPtr = this->Faces->GetPointer(0);
+    std::copy(faceSet, faceSet + facesSize, tmpPtr);
+    }
+  else
+    {
+    this->Faces->SetNumberOfIds(0);
+    }
+}
diff --git a/Common/DataModel/vtkUnstructuredGridCellIterator.h b/Common/DataModel/vtkUnstructuredGridCellIterator.h
new file mode 100644
index 0000000..b49bdf2
--- /dev/null
+++ b/Common/DataModel/vtkUnstructuredGridCellIterator.h
@@ -0,0 +1,78 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkUnstructuredGridCellIterator.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkUnstructuredGridCellIterator - Implementation of vtkCellIterator
+// specialized for vtkUnstructuredGrid.
+
+#ifndef __vtkUnstructuredGridCellIterator_h
+#define __vtkUnstructuredGridCellIterator_h
+
+#include "vtkCommonDataModelModule.h" // For export macro
+#include "vtkCellIterator.h"
+#include "vtkSmartPointer.h" // For vtkSmartPointer
+
+class vtkCellArray;
+class vtkUnsignedCharArray;
+class vtkUnstructuredGrid;
+class vtkPoints;
+
+class VTKCOMMONDATAMODEL_EXPORT vtkUnstructuredGridCellIterator :
+    public vtkCellIterator
+{
+public:
+  static vtkUnstructuredGridCellIterator *New();
+  vtkTypeMacro(vtkUnstructuredGridCellIterator, vtkCellIterator)
+  virtual void PrintSelf(ostream &os, vtkIndent indent);
+
+  bool IsDoneWithTraversal();
+  vtkIdType GetCellId();
+
+protected:
+  vtkUnstructuredGridCellIterator();
+  ~vtkUnstructuredGridCellIterator();
+
+  void ResetToFirstCell();
+  void IncrementToNextCell();
+  void FetchCellType();
+  void FetchPointIds();
+  void FetchPoints();
+  void FetchFaces();
+
+  friend class vtkUnstructuredGrid;
+  void SetUnstructuredGrid(vtkUnstructuredGrid *ug);
+
+  unsigned char *CellTypeBegin;
+  unsigned char *CellTypePtr;
+  unsigned char *CellTypeEnd;
+
+  vtkIdType *ConnectivityBegin;
+  vtkIdType *ConnectivityPtr;
+  vtkIdType *FacesBegin;
+  vtkIdType *FacesLocsBegin;
+  vtkIdType *FacesLocsPtr;
+
+  // Cache misses make updating ConnectivityPtr in IncrementToNextCell too
+  // expensive, so we wait to walk through the array until the point ids are
+  // needed. This variable keeps track of how far we need to increment.
+  vtkIdType SkippedCells;
+  void CatchUpSkippedCells();
+
+  vtkSmartPointer<vtkPoints> UnstructuredGridPoints;
+
+private:
+  vtkUnstructuredGridCellIterator(const vtkUnstructuredGridCellIterator &); // Not implemented.
+  void operator=(const vtkUnstructuredGridCellIterator &);   // Not implemented.
+};
+
+#endif //__vtkUnstructuredGridCellIterator_h
diff --git a/Common/DataModel/vtkVertex.h b/Common/DataModel/vtkVertex.h
index fb27cb2..34424bc 100644
--- a/Common/DataModel/vtkVertex.h
+++ b/Common/DataModel/vtkVertex.h
@@ -115,7 +115,7 @@ public:
 
 protected:
   vtkVertex();
-  ~vtkVertex() {};
+  ~vtkVertex() {}
 
 private:
   vtkVertex(const vtkVertex&);  // Not implemented.
diff --git a/Common/DataModel/vtkXMLDataElement.cxx b/Common/DataModel/vtkXMLDataElement.cxx
index 4c07c8a..7b6cce6 100644
--- a/Common/DataModel/vtkXMLDataElement.cxx
+++ b/Common/DataModel/vtkXMLDataElement.cxx
@@ -144,7 +144,7 @@ void vtkXMLDataElement::SetName(const char* _arg)
 
   if ( this->Name == NULL && _arg == NULL) { return;}
   if ( this->Name && _arg && (!strcmp(this->Name,_arg))) { return;}
-  if (this->Name) { delete [] this->Name; }
+  delete [] this->Name;
   this->IgnoreCharacterData=0;
   if (_arg)
     {
diff --git a/Common/ExecutionModel/CMakeLists.txt b/Common/ExecutionModel/CMakeLists.txt
index a65618b..1101084 100644
--- a/Common/ExecutionModel/CMakeLists.txt
+++ b/Common/ExecutionModel/CMakeLists.txt
@@ -7,14 +7,10 @@ SET(Module_SRCS
   vtkCastToConcrete.cxx
   vtkCompositeDataPipeline.cxx
   vtkCompositeDataSetAlgorithm.cxx
-  vtkComputingResources.cxx
   vtkDataObjectAlgorithm.cxx
   vtkDataSetAlgorithm.cxx
   vtkDemandDrivenPipeline.cxx
   vtkDirectedGraphAlgorithm.cxx
-  vtkExecutionScheduler.cxx
-  vtkExecutionSchedulerManager.cxx
-  vtkExecutiveCollection.cxx
   vtkExecutive.cxx
   vtkExtentSplitter.cxx
   vtkFilteringInformationKeyManager.cxx
@@ -45,12 +41,14 @@ SET(Module_SRCS
   vtkStructuredGridAlgorithm.cxx
   vtkTableAlgorithm.cxx
   vtkTableExtentTranslator.cxx
+  vtkSMPProgressObserver.cxx
+  vtkThreadedCompositeDataPipeline.cxx
   vtkThreadedImageAlgorithm.cxx
-  vtkThreadedStreamingPipeline.cxx
   vtkTreeAlgorithm.cxx
   vtkTrivialProducer.cxx
   vtkUndirectedGraphAlgorithm.cxx
   vtkUnstructuredGridAlgorithm.cxx
+  vtkProgressObserver.cxx
   vtkSelectionAlgorithm.cxx
   vtkExtentRCBPartitioner.cxx
   vtkUniformGridPartitioner.cxx
@@ -78,6 +76,7 @@ set_source_files_properties(
   vtkExecutionSchedulerManager
   vtkFilteringInformationKeyManager
   vtkImageProgressIterator
+  vtkSMPProgressObserver
   WRAP_EXCLUDE
   )
 
diff --git a/Common/ExecutionModel/Testing/Cxx/CMakeLists.txt b/Common/ExecutionModel/Testing/Cxx/CMakeLists.txt
index 754b87f..6f7eb6a 100644
--- a/Common/ExecutionModel/Testing/Cxx/CMakeLists.txt
+++ b/Common/ExecutionModel/Testing/Cxx/CMakeLists.txt
@@ -1,30 +1,7 @@
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
+vtk_add_test_cxx(NO_DATA NO_VALID
   TestCopyAttributeData.cxx
   TestImageDataToStructuredGrid.cxx
   TestSetInputDataObject.cxx
   TestTemporalSupport.cxx
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach (test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-
-  if(VTK_DATA_ROOT)
-  add_test(NAME ${TName}
-    COMMAND ${vtk-module}CxxTests ${TName}
-      -D ${VTK_DATA_ROOT}
-      -T ${VTK_TEST_OUTPUT_DIR}
-      -V Baseline/${vtk-module}/${TName}.png)
-  else()
-  add_test(NAME ${TName}
-    COMMAND ${vtk-module}CxxTests ${TName}
-      -T ${VTK_TEST_OUTPUT_DIR}
-      )
-  endif()
-endforeach()
+  )
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Common/ExecutionModel/Testing/Cxx/TestTemporalSupport.cxx b/Common/ExecutionModel/Testing/Cxx/TestTemporalSupport.cxx
index a87c694..ab00ba6 100644
--- a/Common/ExecutionModel/Testing/Cxx/TestTemporalSupport.cxx
+++ b/Common/ExecutionModel/Testing/Cxx/TestTemporalSupport.cxx
@@ -25,7 +25,7 @@
 #include "vtkPointData.h"
 #include "vtkSmartPointer.h"
 #include "vtkNew.h"
-#include <assert.h>
+#include <cassert>
 
 #define CHECK(b, errors) if(!(b)){ errors++; cerr<<"Error on Line "<<__LINE__<<":"<<endl;}
 
diff --git a/Common/ExecutionModel/Testing/Python/CMakeLists.txt b/Common/ExecutionModel/Testing/Python/CMakeLists.txt
index 87e3303..9a6ed2f 100644
--- a/Common/ExecutionModel/Testing/Python/CMakeLists.txt
+++ b/Common/ExecutionModel/Testing/Python/CMakeLists.txt
@@ -1,9 +1,3 @@
 if(VTK_PYTHON_EXE)
-  foreach(tfile
-    TestReleaseData
-    )
-    add_test(NAME ${vtk-module}Python-${tfile}
-      COMMAND ${VTK_PYTHON_EXE}
-        ${CMAKE_CURRENT_SOURCE_DIR}/${tfile}.py)
-  endforeach()
+  vtk_add_test_python(TestReleaseData.py NO_DATA NO_VALID NO_OUTPUT)
 endif()
\ No newline at end of file
diff --git a/Common/ExecutionModel/vtkAlgorithm.cxx b/Common/ExecutionModel/vtkAlgorithm.cxx
index 7b1e6ed..6468651 100644
--- a/Common/ExecutionModel/vtkAlgorithm.cxx
+++ b/Common/ExecutionModel/vtkAlgorithm.cxx
@@ -36,6 +36,7 @@
 #include "vtkInformationVector.h"
 #include "vtkObjectFactory.h"
 #include "vtkPointData.h"
+#include "vtkProgressObserver.h"
 #include "vtkSmartPointer.h"
 #include "vtkCompositeDataPipeline.h"
 #include "vtkTable.h"
@@ -93,6 +94,7 @@ vtkAlgorithm::vtkAlgorithm()
   this->Progress = 0.0;
   this->ProgressText = NULL;
   this->Executive = 0;
+  this->ProgressObserver = 0;
   this->InputPortInformation = vtkInformationVector::New();
   this->OutputPortInformation = vtkInformationVector::New();
   this->AlgorithmInternal = new vtkAlgorithmInternals;
@@ -110,6 +112,11 @@ vtkAlgorithm::~vtkAlgorithm()
     this->Executive->UnRegister(this);
     this->Executive = 0;
     }
+  if (this->ProgressObserver)
+    {
+    this->ProgressObserver->UnRegister(this);
+    this->ProgressObserver = 0;
+    }
   this->InputPortInformation->Delete();
   this->OutputPortInformation->Delete();
   delete this->AlgorithmInternal;
@@ -118,13 +125,40 @@ vtkAlgorithm::~vtkAlgorithm()
 }
 
 //----------------------------------------------------------------------------
+void vtkAlgorithm::SetProgressObserver(vtkProgressObserver* po)
+{
+  // This intentionally does not modify the algorithm as it
+  // is usually done by executives during execution and we don't
+  // want the filter to change its mtime during execution.
+  if (po != this->ProgressObserver)
+    {
+    if (this->ProgressObserver)
+      {
+      this->ProgressObserver->UnRegister(this);
+      }
+    this->ProgressObserver = po;
+    if (po)
+      {
+      po->Register(this);
+      }
+    }
+}
+
+//----------------------------------------------------------------------------
 // Update the progress of the process object. If a ProgressMethod exists,
 // executes it. Then set the Progress ivar to amount. The parameter amount
 // should range between (0,1).
 void vtkAlgorithm::UpdateProgress(double amount)
 {
-  this->Progress = amount;
-  this->InvokeEvent(vtkCommand::ProgressEvent,static_cast<void *>(&amount));
+  if (this->ProgressObserver)
+    {
+    this->ProgressObserver->UpdateProgress(amount);
+    }
+  else
+    {
+    this->Progress = amount;
+    this->InvokeEvent(vtkCommand::ProgressEvent,static_cast<void *>(&amount));
+    }
 }
 
 
@@ -1423,6 +1457,8 @@ void vtkAlgorithm::Update(int port)
 //----------------------------------------------------------------------------
 void vtkAlgorithm::PropagateUpdateExtent()
 {
+  this->UpdateInformation();
+
   vtkStreamingDemandDrivenPipeline* sddp =
     vtkStreamingDemandDrivenPipeline::SafeDownCast(this->GetExecutive());
   if (sddp)
diff --git a/Common/ExecutionModel/vtkAlgorithm.h b/Common/ExecutionModel/vtkAlgorithm.h
index aa9d66f..bc583fb 100644
--- a/Common/ExecutionModel/vtkAlgorithm.h
+++ b/Common/ExecutionModel/vtkAlgorithm.h
@@ -45,6 +45,7 @@ class vtkInformationIntegerKey;
 class vtkInformationStringKey;
 class vtkInformationStringVectorKey;
 class vtkInformationVector;
+class vtkProgressObserver;
 
 class VTKCOMMONEXECUTIONMODEL_EXPORT vtkAlgorithm : public vtkObject
 {
@@ -55,8 +56,12 @@ public:
 
   // Description:
   // Values used for setting the desired output precision for various
-  // algorithms. Currently, only a few algorithms (vtkContourFilter,
-  // vtkThreshold) support changing their output precision.
+  // algorithms. Currently, the following algorithms support changing their
+  // output precision: vtkAppendFilter, vtkAppendPoints, vtkContourFilter,
+  // vtkContourGrid, vtkCutter, vtkGridSynchronizedTemplates3D,
+  // vtkPolyDataNormals, vtkSynchronizedTemplatesCutter3D,
+  // vtkTableBasedClipDataSet, vtkThreshold, vtkTransformFilter, and
+  // vtkTransformPolyData.
   //
   // SINGLE_PRECISION - Output single-precision floating-point (i.e. float)
   // DOUBLE_PRECISION - Output double-precision floating-point (i.e. double)
@@ -561,6 +566,17 @@ public:
   }
   int GetUpdateGhostLevel(int port);
 
+  // Description:
+  // If an ProgressObserver is set, the algorithm will report
+  // progress through it rather than directly. This means that
+  // it will call UpdateProgress() on the ProgressObserver rather
+  // than itself report it and set progress.
+  // This is most useful in situations where multiple threads
+  // are executing an algorithm at the same time and want to
+  // handle progress locally.
+  void SetProgressObserver(vtkProgressObserver*);
+  vtkGetObjectMacro(ProgressObserver, vtkProgressObserver);
+
 protected:
   vtkAlgorithm();
   ~vtkAlgorithm();
@@ -730,6 +746,8 @@ protected:
   void AddInputDataInternal(int port, vtkDataObject *input)
     { this->AddInputDataObject(port, input); }
 
+  vtkProgressObserver* ProgressObserver;
+
 private:
   vtkExecutive* Executive;
   vtkInformationVector* InputPortInformation;
diff --git a/Common/ExecutionModel/vtkCachedStreamingDemandDrivenPipeline.cxx b/Common/ExecutionModel/vtkCachedStreamingDemandDrivenPipeline.cxx
index d882930..537a4fe 100644
--- a/Common/ExecutionModel/vtkCachedStreamingDemandDrivenPipeline.cxx
+++ b/Common/ExecutionModel/vtkCachedStreamingDemandDrivenPipeline.cxx
@@ -67,16 +67,10 @@ void vtkCachedStreamingDemandDrivenPipeline::SetCacheSize(int size)
       this->Data[idx] = NULL;
       }
     }
-  if (this->Data)
-    {
-    delete [] this->Data;
-    this->Data = NULL;
-    }
-  if (this->Times)
-    {
-    delete [] this->Times;
-    this->Times = NULL;
-    }
+  delete [] this->Data;
+  this->Data = NULL;
+  delete [] this->Times;
+  this->Times = NULL;
 
   this->CacheSize = size;
   if (size == 0)
diff --git a/Common/ExecutionModel/vtkCastToConcrete.h b/Common/ExecutionModel/vtkCastToConcrete.h
index 53a9f66..16e20ec 100644
--- a/Common/ExecutionModel/vtkCastToConcrete.h
+++ b/Common/ExecutionModel/vtkCastToConcrete.h
@@ -50,8 +50,8 @@ public:
   void PrintSelf(ostream& os, vtkIndent indent);
 
 protected:
-  vtkCastToConcrete() {};
-  ~vtkCastToConcrete() {};
+  vtkCastToConcrete() {}
+  ~vtkCastToConcrete() {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *); //insures compatibility; satisfies abstract api in vtkFilter
   virtual int RequestInformation(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
diff --git a/Common/ExecutionModel/vtkCompositeDataPipeline.cxx b/Common/ExecutionModel/vtkCompositeDataPipeline.cxx
index 9989bac..ea994fb 100644
--- a/Common/ExecutionModel/vtkCompositeDataPipeline.cxx
+++ b/Common/ExecutionModel/vtkCompositeDataPipeline.cxx
@@ -37,49 +37,18 @@ PURPOSE.  See the above copyright notice for more information.
 #include "vtkStructuredGrid.h"
 #include "vtkUniformGrid.h"
 
-//----------------------------------------------------------------------------
-#if defined (JB_DEBUG1)
-  #ifndef WIN32
-  #else
-    #define OUTPUTTEXT(a) vtkOutputWindowDisplayText(a);
-  #endif
-
-  #undef vtkDebugMacro
-  #define vtkDebugMacro(a)  \
-  { \
-    vtkOStreamWrapper::EndlType endl; \
-    vtkOStreamWrapper::UseEndl(endl); \
-    vtkOStrStreamWrapper vtkmsg; \
-    const char *name = this->Algorithm->GetClassName(); \
-    if (!strcmp(name, "vtkTemporalDataSetCache") || \
-        !strcmp(name, "vtkContourFilter") || \
-        !strcmp(name, "vtkOpenDXStructuredGridReader") || \
-        !strcmp(name, "vtkPCellDataToPointData") || \
-        !strcmp(name, "vtkProcessIdScalars") || \
-        !strcmp(name, "vtkTemporalFractal") || \
-        !strcmp(name, "vtkTemporalSphereSource") || \
-        !strcmp(name, "vtkTemporalInterpolator") || \
-        !strcmp(name, "vtkTemporalStreamTracer")) \
-      { \
-      vtkmsg << name << " : " a << endl; \
-      OUTPUTTEXT(vtkmsg.str()); \
-      vtkmsg.rdbuf()->freeze(0); \
-      } \
-  }
-#endif
-
 vtkStandardNewMacro(vtkCompositeDataPipeline);
 
 vtkInformationKeyMacro(vtkCompositeDataPipeline, LOAD_REQUESTED_BLOCKS, Integer);
 vtkInformationKeyMacro(vtkCompositeDataPipeline, COMPOSITE_DATA_META_DATA, ObjectBase);
 vtkInformationKeyMacro(vtkCompositeDataPipeline, UPDATE_COMPOSITE_INDICES, IntegerVector);
 vtkInformationKeyMacro(vtkCompositeDataPipeline, COMPOSITE_INDICES, IntegerVector);
+vtkInformationKeyMacro(vtkCompositeDataPipeline, SUPPRESS_RESET_PI, Integer);
 
 //----------------------------------------------------------------------------
 vtkCompositeDataPipeline::vtkCompositeDataPipeline()
 {
   this->InLocalLoop = 0;
-  this->SuppressResetPipelineInformation = 0;
   this->InformationCache = vtkInformation::New();
 
   this->GenericRequest = vtkInformation::New();
@@ -144,7 +113,7 @@ int vtkCompositeDataPipeline::ExecuteDataObject(
   // datasets. Otherwise, the algorithm will get a chance to handle
   // REQUEST_DATA_OBJECT when it is being iterated over.
   int compositePort;
-  bool shouldIterate = this->ShouldIterateOverInput(compositePort);
+  bool shouldIterate = this->ShouldIterateOverInput(inInfoVec, compositePort);
   if (!shouldIterate)
     {
     // Invoke the request on the algorithm.
@@ -186,10 +155,8 @@ int vtkCompositeDataPipeline::ExecuteData(vtkInformation* request,
   int result = 1;
 
   int compositePort;
-  bool composite = this->ShouldIterateOverInput(compositePort);
+  bool composite = this->ShouldIterateOverInput(inInfoVec, compositePort);
 
-  // This is stupid.
-  //compositePort = temporal ? -1 : compositePort;
   if ( composite)
     {
     if (this->GetNumberOfOutputPorts())
@@ -228,7 +195,7 @@ int vtkCompositeDataPipeline::InputTypeIsValid(
   // can handle any input type. The input type will be checked again during
   // each step of the iteration.
   int compositePort;
-  if (this->ShouldIterateOverInput(compositePort))
+  if (this->ShouldIterateOverInput(inInfoVec, compositePort))
     {
     if (compositePort == port)
       {
@@ -241,7 +208,8 @@ int vtkCompositeDataPipeline::InputTypeIsValid(
 }
 
 //----------------------------------------------------------------------------
-bool vtkCompositeDataPipeline::ShouldIterateOverInput(int& compositePort)
+bool vtkCompositeDataPipeline::ShouldIterateOverInput(vtkInformationVector** inInfoVec,
+                                                      int& compositePort)
 {
   compositePort = -1;
   // Find the first input that has a composite data that does not match
@@ -273,7 +241,7 @@ bool vtkCompositeDataPipeline::ShouldIterateOverInput(int& compositePort)
           return false;
           }
 
-        vtkInformation* inInfo = this->GetInputInformation(i, 0);
+        vtkInformation* inInfo = inInfoVec[i]->GetInformationObject(0);
         vtkDataObject* input = inInfo->Get(vtkDataObject::DATA_OBJECT());
         // If input does not match a required input type
         bool foundMatch = false;
@@ -307,6 +275,42 @@ bool vtkCompositeDataPipeline::ShouldIterateOverInput(int& compositePort)
 }
 
 //----------------------------------------------------------------------------
+void vtkCompositeDataPipeline::ExecuteEach(vtkCompositeDataIterator* iter,
+                                           vtkInformationVector** inInfoVec,
+                                           vtkInformationVector* outInfoVec,
+                                           int compositePort,
+                                           int connection,
+                                           vtkInformation* request,
+                                           vtkCompositeDataSet* compositeOutput)
+{
+  vtkInformation* inInfo  =inInfoVec[compositePort]->GetInformationObject(connection);
+  vtkInformation* outInfo = outInfoVec->GetInformationObject(0); //assumed to be 0
+
+  for (iter->InitTraversal(); !iter->IsDoneWithTraversal(); iter->GoToNextItem())
+    {
+    vtkDataObject* dobj = iter->GetCurrentDataObject();
+    if (dobj)
+      {
+      // Note that since VisitOnlyLeaves is ON on the iterator,
+      // this method is called only for leaves, hence, we are assured that
+      // neither dobj nor outObj are vtkCompositeDataSet subclasses.
+      vtkDataObject* outObj =
+        this->ExecuteSimpleAlgorithmForBlock(inInfoVec,
+                                             outInfoVec,
+                                             inInfo,
+                                             outInfo,
+                                             request,
+                                             dobj);
+      if (outObj)
+        {
+        compositeOutput->SetDataSet(iter, outObj);
+        outObj->FastDelete();
+        }
+      }
+    }
+}
+
+//----------------------------------------------------------------------------
 // Execute a simple (non-composite-aware) filter multiple times, once per
 // block. Collect the result in a composite dataset that is of the same
 // structure as the input.
@@ -384,29 +388,7 @@ void vtkCompositeDataPipeline::ExecuteSimpleAlgorithm(
 
     vtkSmartPointer<vtkCompositeDataIterator> iter;
     iter.TakeReference(input->NewIterator());
-    for (iter->InitTraversal(); !iter->IsDoneWithTraversal();
-      iter->GoToNextItem())
-      {
-      vtkDataObject* dobj = iter->GetCurrentDataObject();
-      if (dobj)
-        {
-        // Note that since VisitOnlyLeaves is ON on the iterator,
-        // this method is called only for leaves, hence, we are assured that
-        // neither dobj nor outObj are vtkCompositeDataSet subclasses.
-        vtkDataObject* outObj =
-          this->ExecuteSimpleAlgorithmForBlock(inInfoVec,
-                                               outInfoVec,
-                                               inInfo,
-                                               outInfo,
-                                               r,
-                                               dobj);
-        if (outObj)
-          {
-          compositeOutput->SetDataSet(iter, outObj);
-          outObj->FastDelete();
-          }
-        }
-      }
+    this->ExecuteEach(iter, inInfoVec, outInfoVec, compositePort, 0, r,compositeOutput);
 
     // True when the pipeline is iterating over the current (simple)
     // filter to produce composite output. In this case,
@@ -468,10 +450,9 @@ vtkDataObject* vtkCompositeDataPipeline::ExecuteSimpleAlgorithmForBlock(
     }
 
   request->Set(REQUEST_DATA_OBJECT());
-  this->SuppressResetPipelineInformation = 1;
-  this->Superclass::ExecuteDataObject(
-    request, this->GetInputInformation(),this->GetOutputInformation());
-  this->SuppressResetPipelineInformation = 0;
+  outInfo->Set(SUPPRESS_RESET_PI(), 1);
+  this->Superclass::ExecuteDataObject(request, inInfoVec, outInfoVec);
+  outInfo->Remove(SUPPRESS_RESET_PI());
   request->Remove(REQUEST_DATA_OBJECT());
 
   request->Set(REQUEST_INFORMATION());
@@ -483,14 +464,14 @@ vtkDataObject* vtkCompositeDataPipeline::ExecuteSimpleAlgorithmForBlock(
   //   dobj->CopyInformationToPipeline(request, 0, inInfo, 1);
   //   }
 
-  this->Superclass::ExecuteInformation(request,inInfoVec,outInfoVec);
+  this->Superclass::ExecuteInformation(request, inInfoVec, outInfoVec);
   request->Remove(REQUEST_INFORMATION());
 
   int storedPiece = -1;
   int storedNumPieces = -1;
   for(int m=0; m < this->Algorithm->GetNumberOfOutputPorts(); ++m)
     {
-    vtkInformation* info = this->GetOutputInformation(m);
+    vtkInformation* info = outInfoVec->GetInformationObject(m);
     // Update the whole thing
     if (info->Has(
                   vtkStreamingDemandDrivenPipeline::WHOLE_EXTENT()))
@@ -530,7 +511,7 @@ vtkDataObject* vtkCompositeDataPipeline::ExecuteSimpleAlgorithmForBlock(
 
   for(int m=0; m < this->Algorithm->GetNumberOfOutputPorts(); ++m)
     {
-    vtkInformation* info = this->GetOutputInformation(m);
+    vtkInformation* info = outInfoVec->GetInformationObject(m);
     if (storedPiece!=-1)
       {
       info->Set(
@@ -843,7 +824,7 @@ void vtkCompositeDataPipeline::CopyDefaultInformation(
     // that port. Composite data pipeline works with piece extents
     // only.
     int compositePort;
-    if (this->ShouldIterateOverInput(compositePort))
+    if (this->ShouldIterateOverInput(inInfoVec, compositePort))
       {
       // Get the output port from which to copy the extent.
       outputPort = -1;
@@ -887,7 +868,7 @@ void vtkCompositeDataPipeline::CopyDefaultInformation(
 void vtkCompositeDataPipeline::ResetPipelineInformation(int port,
                                                         vtkInformation* info)
 {
-  if (this->SuppressResetPipelineInformation)
+  if (info->Has(SUPPRESS_RESET_PI()))
     {
     return;
     }
@@ -958,7 +939,7 @@ int vtkCompositeDataPipeline::CheckCompositeData(
   // create a composite output.
   int compositePort;
 
-  if (this->ShouldIterateOverInput(compositePort))
+  if (this->ShouldIterateOverInput(inInfoVec, compositePort))
     {
     // This assumes that the first output of the filter is the one
     // that will have the composite data.
@@ -1051,6 +1032,7 @@ vtkCompositeDataSet* vtkCompositeDataPipeline::CreateOutputCompositeDataSet(
     vtkInformation* inInfo = this->GetInputInformation(compositePort, 0);
     vtkSmartPointer<vtkDataObject> curInput = inInfo->Get(vtkDataObject::DATA_OBJECT());
 
+    vtkInformation* outInfo = this->GetOutputInformation(0);
 
     vtkSmartPointer<vtkInformation> request =
       vtkSmartPointer<vtkInformation>::New();
@@ -1066,10 +1048,10 @@ vtkCompositeDataSet* vtkCompositeDataPipeline::CreateOutputCompositeDataSet(
     // Algorithms process this request after it is forwarded.
     request->Set(vtkExecutive::ALGORITHM_AFTER_FORWARD(), 1);
     request->Set(REQUEST_DATA_OBJECT());
-    this->SuppressResetPipelineInformation = 1;
+    outInfo->Set(SUPPRESS_RESET_PI(), 1);
     this->Superclass::ExecuteDataObject(
       request, this->GetInputInformation(),this->GetOutputInformation());
-    this->SuppressResetPipelineInformation = 0;
+    outInfo->Remove(SUPPRESS_RESET_PI());
     request->Remove(REQUEST_DATA_OBJECT());
 
     // Restore input.
@@ -1077,7 +1059,6 @@ vtkCompositeDataSet* vtkCompositeDataPipeline::CreateOutputCompositeDataSet(
     inInfo->Set(vtkDataObject::DATA_OBJECT(), curInput);
 
     // check the type of output data object created by the algorithm.
-    vtkInformation* outInfo = this->GetOutputInformation(0);
     vtkDataObject* curOutput = outInfo->Get(vtkDataObject::DATA_OBJECT());
     if (!curOutput->IsA("vtkUniformGrid"))
       {
diff --git a/Common/ExecutionModel/vtkCompositeDataPipeline.h b/Common/ExecutionModel/vtkCompositeDataPipeline.h
index 031a5bb..ffd64fb 100644
--- a/Common/ExecutionModel/vtkCompositeDataPipeline.h
+++ b/Common/ExecutionModel/vtkCompositeDataPipeline.h
@@ -45,6 +45,7 @@
 #include "vtkStreamingDemandDrivenPipeline.h"
 
 class vtkCompositeDataSet;
+class vtkCompositeDataIterator;
 class vtkInformationDoubleKey;
 class vtkInformationIntegerVectorKey;
 class vtkInformationObjectBaseKey;
@@ -155,6 +156,15 @@ protected:
                                       vtkInformationVector** inInfoVec,
                                       vtkInformationVector* outInfoVec,
                                       int compositePort);
+
+  virtual void ExecuteEach(vtkCompositeDataIterator* iter,
+                           vtkInformationVector** inInfoVec,
+                           vtkInformationVector* outInfoVec,
+                           int compositePort,
+                           int connection,
+                           vtkInformation* request,
+                           vtkCompositeDataSet* compositeOutput);
+
   vtkDataObject* ExecuteSimpleAlgorithmForBlock(
     vtkInformationVector** inInfoVec,
     vtkInformationVector* outInfoVec,
@@ -163,7 +173,8 @@ protected:
     vtkInformation* request,
     vtkDataObject* dobj);
 
-  bool ShouldIterateOverInput(int& compositePort);
+  bool ShouldIterateOverInput(vtkInformationVector** inInfoVec,
+                              int& compositePort);
 
   virtual int InputTypeIsValid(int port, int index,
                                 vtkInformationVector **inInfoVec);
@@ -176,9 +187,6 @@ protected:
   vtkInformation* UpdateExtentRequest;
   vtkInformation* DataRequest;
 
-  // Because we sometimes have to swap between "simple" data types and composite
-  // data types, we sometimes want to skip resetting the pipeline information.
-  int SuppressResetPipelineInformation;
 
   virtual void ResetPipelineInformation(int port, vtkInformation*);
 
@@ -200,6 +208,10 @@ protected:
 
   int NeedToExecuteBasedOnCompositeIndices(vtkInformation* outInfo);
 
+  // Because we sometimes have to swap between "simple" data types and composite
+  // data types, we sometimes want to skip resetting the pipeline information.
+  static vtkInformationIntegerKey* SUPPRESS_RESET_PI();
+
 private:
   vtkCompositeDataPipeline(const vtkCompositeDataPipeline&);  // Not implemented.
   void operator=(const vtkCompositeDataPipeline&);  // Not implemented.
diff --git a/Common/ExecutionModel/vtkCompositeDataSetAlgorithm.h b/Common/ExecutionModel/vtkCompositeDataSetAlgorithm.h
index 81e30be..b57e097 100644
--- a/Common/ExecutionModel/vtkCompositeDataSetAlgorithm.h
+++ b/Common/ExecutionModel/vtkCompositeDataSetAlgorithm.h
@@ -54,7 +54,7 @@ public:
 
 protected:
   vtkCompositeDataSetAlgorithm();
-  ~vtkCompositeDataSetAlgorithm() {};
+  ~vtkCompositeDataSetAlgorithm() {}
 
   // Description:
   // This is called by the superclass.
diff --git a/Common/ExecutionModel/vtkComputingResources.cxx b/Common/ExecutionModel/vtkComputingResources.cxx
deleted file mode 100644
index f612482..0000000
--- a/Common/ExecutionModel/vtkComputingResources.cxx
+++ /dev/null
@@ -1,280 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkComputingResources.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*-------------------------------------------------------------------------
-  Copyright (c) 2008, 2009 by SCI Institute, University of Utah.
-
-  This is part of the Parallel Dataflow System originally developed by
-  Huy T. Vo and Claudio T. Silva. For more information, see:
-
-  "Parallel Dataflow Scheme for Streaming (Un)Structured Data" by Huy
-  T. Vo, Daniel K. Osmari, Brian Summa, Joao L.D. Comba, Valerio
-  Pascucci and Claudio T. Silva, SCI Institute, University of Utah,
-  Technical Report #UUSCI-2009-004, 2009.
-
-  "Multi-Threaded Streaming Pipeline For VTK" by Huy T. Vo and Claudio
-  T. Silva, SCI Institute, University of Utah, Technical Report
-  #UUSCI-2009-005, 2009.
--------------------------------------------------------------------------*/
-#include "vtkComputingResources.h"
-
-#include "vtkInformation.h"
-#include "vtkMultiThreader.h"
-#include "vtkObjectFactory.h"
-#include "vtkThreadedImageAlgorithm.h"
-#include "vtkThreadedStreamingPipeline.h"
-#include <vtksys/hash_map.hxx>
-
-//----------------------------------------------------------------------------
-vtkStandardNewMacro(vtkComputingResources);
-
-//----------------------------------------------------------------------------
-class vtkCPUResource: public vtkProcessingUnitResource
-{
-public:
-  virtual int ProcessingUnit()
-  {
-    return vtkThreadedStreamingPipeline::PROCESSING_UNIT_CPU;
-  }
-
-  virtual bool HasResource()
-  {
-    return this->NumberOfThreads > 0;
-  }
-
-  virtual void Clear()
-  {
-    this->NumberOfThreads = 0;
-  }
-
-  virtual void ObtainMinimum()
-  {
-    this->NumberOfThreads = 1;
-  }
-
-  virtual void ObtainMaximum()
-  {
-    this->NumberOfThreads = vtkMultiThreader::GetGlobalDefaultNumberOfThreads();
-  }
-
-  virtual void IncreaseByRatio(float ratio, vtkProcessingUnitResource *refResource)
-  {
-    vtkCPUResource *other = static_cast<vtkCPUResource*>(refResource);
-    int thisNThread = (int)(ratio*other->NumberOfThreads+0.5);
-    if (thisNThread<1)
-      {
-      thisNThread = 1;
-      }
-    this->NumberOfThreads += thisNThread;
-  }
-
-  virtual void AllocateFor(vtkThreadedStreamingPipeline *exec)
-  {
-    if (exec->GetAlgorithm()->IsA("vtkThreadedImageAlgorithm"))
-      {
-      vtkThreadedImageAlgorithm::SafeDownCast(exec->GetAlgorithm())
-        ->SetNumberOfThreads(this->NumberOfThreads);
-      }
-  }
-
-  virtual bool CanAccommodate(vtkProcessingUnitResource *refResource)
-  {
-    vtkCPUResource *other = static_cast<vtkCPUResource*>(refResource);
-    return this->NumberOfThreads>=other->NumberOfThreads;
-  }
-
-  virtual void Reserve(vtkProcessingUnitResource *refResource)
-  {
-    vtkCPUResource *other = static_cast<vtkCPUResource*>(refResource);
-    this->NumberOfThreads -= other->NumberOfThreads;
-  }
-
-  virtual void Collect(vtkProcessingUnitResource *refResource)
-  {
-    vtkCPUResource *other = static_cast<vtkCPUResource*>(refResource);
-    this->NumberOfThreads += other->NumberOfThreads;
-  }
-
-private:
-  int NumberOfThreads;
-};
-
-//----------------------------------------------------------------------------
-// This needs to reimplement
-class vtkGPUResource: public vtkProcessingUnitResource
-{
-public:
-  virtual int ProcessingUnit()
-  {
-    return vtkThreadedStreamingPipeline::PROCESSING_UNIT_GPU;
-  }
-
-  virtual bool HasResource()
-  {
-    return false;
-  }
-
-  virtual void Clear() {}
-
-  virtual void ObtainMinimum() {}
-
-  virtual void ObtainMaximum() {}
-
-  virtual void IncreaseByRatio(float vtkNotUsed(ratio),
-                               vtkProcessingUnitResource* vtkNotUsed(refResource)) {}
-
-  virtual void AllocateFor(vtkThreadedStreamingPipeline* vtkNotUsed(exec))
-  {
-    fprintf(stderr, "vtkGPUResource NEEDS TO BE IMPLEMENTED!!!!\n");
-  }
-
-  bool CanAccommodate(vtkProcessingUnitResource* vtkNotUsed(refResource))
-  {
-    return false;
-  }
-
-  void Reserve(vtkProcessingUnitResource* vtkNotUsed(refResource)) {}
-
-  void Collect(vtkProcessingUnitResource* vtkNotUsed(refResource)) {}
-
-private:
-};
-
-//----------------------------------------------------------------------------
-class vtkComputingResources::implementation
-{
-public:
-  typedef vtksys::hash_map<int, vtkProcessingUnitResource*> ProcessingUnitToResourceHashMap;
-  ProcessingUnitToResourceHashMap ResourceMap;
-};
-
-//----------------------------------------------------------------------------
-vtkComputingResources::vtkComputingResources() :
-  Implementation(new implementation())
-{
-  this->Implementation->ResourceMap[vtkThreadedStreamingPipeline::PROCESSING_UNIT_CPU] = new vtkCPUResource();
-  this->Implementation->ResourceMap[vtkThreadedStreamingPipeline::PROCESSING_UNIT_GPU] = new vtkGPUResource();
-  this->ObtainMinimumResources();
-}
-
-//----------------------------------------------------------------------------
-vtkComputingResources::~vtkComputingResources()
-{
-  implementation::ProcessingUnitToResourceHashMap::iterator i =
-    this->Implementation->ResourceMap.begin();
-  for (; i != this->Implementation->ResourceMap.end(); i++)
-    {
-    delete (*i).second;
-    }
-  this->Implementation->ResourceMap.clear();
-  delete this->Implementation;
-}
-
-//----------------------------------------------------------------------------
-void vtkComputingResources::PrintSelf(ostream &os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os, indent);
-}
-
-//----------------------------------------------------------------------------
-vtkProcessingUnitResource *vtkComputingResources::GetResourceFor(int processingUnit)
-{
-  implementation::ProcessingUnitToResourceHashMap::iterator i =
-    this->Implementation->ResourceMap.find(processingUnit);
-  if (i != this->Implementation->ResourceMap.end())
-    {
-    return (*i).second;
-    }
-  return NULL;
-}
-
-//----------------------------------------------------------------------------
-void vtkComputingResources::Clear()
-{
-  implementation::ProcessingUnitToResourceHashMap::iterator i =
-    this->Implementation->ResourceMap.begin();
-  for (; i != this->Implementation->ResourceMap.end(); i++)
-    {
-    (*i).second->Clear();
-    }
-}
-
-//----------------------------------------------------------------------------
-void vtkComputingResources::ObtainMinimumResources()
-{
-  implementation::ProcessingUnitToResourceHashMap::iterator i =
-    this->Implementation->ResourceMap.begin();
-  for (; i != this->Implementation->ResourceMap.end(); i++) {
-    (*i).second->ObtainMinimum();
-  }
-}
-
-//----------------------------------------------------------------------------
-void vtkComputingResources::ObtainMaximumResources()
-{
-  implementation::ProcessingUnitToResourceHashMap::iterator i =
-    this->Implementation->ResourceMap.begin();
-  for (; i != this->Implementation->ResourceMap.end(); i++) {
-    (*i).second->ObtainMaximum();
-  }
-}
-
-//----------------------------------------------------------------------------
-void vtkComputingResources::Deploy(vtkThreadedStreamingPipeline *exec,
-                                   vtkInformation* vtkNotUsed(info))
-{
-  implementation::ProcessingUnitToResourceHashMap::iterator i =
-    this->Implementation->ResourceMap.begin();
-  for (; i != this->Implementation->ResourceMap.end(); i++)
-    {
-    int resource = vtkThreadedStreamingPipeline::PROCESSING_UNIT_CPU;
-//     if (exec->GetAlgorithm()->GetInformation()->
-//         Has(vtkThreadedStreamingPipeline::PROCESSING_UNIT()))
-//       resource = exec->GetAlgorithm()->GetInformation()->
-//         Get(vtkThreadedStreamingPipeline::PROCESSING_UNIT());
-    if (((*i).first & resource) &&
-        (*i).second->HasResource())
-      {
-      (*i).second->AllocateFor(exec);
-      fprintf(stderr, "UPDATE %s\n", exec->GetAlgorithm()->GetClassName());
-      exec->Update();
-//       exec->ForceUpdateData((*i).first, info);
-      }
-    }
-}
-
-//----------------------------------------------------------------------------
-bool vtkComputingResources::Reserve(vtkComputingResources *res)
-{
-  implementation::ProcessingUnitToResourceHashMap::iterator i =
-    this->Implementation->ResourceMap.find(vtkThreadedStreamingPipeline::PROCESSING_UNIT_CPU);
-  implementation::ProcessingUnitToResourceHashMap::iterator j =
-    res->Implementation->ResourceMap.find(vtkThreadedStreamingPipeline::PROCESSING_UNIT_CPU);
-  bool ok = (*i).second->CanAccommodate((*j).second);
-  if (ok)
-    {
-    (*i).second->Reserve((*j).second);
-    }
-  return ok;
-}
-
-//----------------------------------------------------------------------------
-void vtkComputingResources::Collect(vtkComputingResources *res)
-{
-  implementation::ProcessingUnitToResourceHashMap::iterator i =
-    this->Implementation->ResourceMap.find(vtkThreadedStreamingPipeline::PROCESSING_UNIT_CPU);
-  implementation::ProcessingUnitToResourceHashMap::iterator j =
-    res->Implementation->ResourceMap.find(vtkThreadedStreamingPipeline::PROCESSING_UNIT_CPU);
-  (*i).second->Collect((*j).second);
-}
diff --git a/Common/ExecutionModel/vtkComputingResources.h b/Common/ExecutionModel/vtkComputingResources.h
deleted file mode 100644
index c567f53..0000000
--- a/Common/ExecutionModel/vtkComputingResources.h
+++ /dev/null
@@ -1,157 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkComputingResources.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-  This software is distributed WITHOUT ANY WARRANTY; without even
-  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-  PURPOSE.  See the above copyright notice for more information.
-
-  =========================================================================*/
-/*-------------------------------------------------------------------------
-  Copyright (c) 2008, 2009 by SCI Institute, University of Utah.
-
-  This is part of the Parallel Dataflow System originally developed by
-  Huy T. Vo and Claudio T. Silva. For more information, see:
-
-  "Parallel Dataflow Scheme for Streaming (Un)Structured Data" by Huy
-  T. Vo, Daniel K. Osmari, Brian Summa, Joao L.D. Comba, Valerio
-  Pascucci and Claudio T. Silva, SCI Institute, University of Utah,
-  Technical Report #UUSCI-2009-004, 2009.
-
-  "Multi-Threaded Streaming Pipeline For VTK" by Huy T. Vo and Claudio
-  T. Silva, SCI Institute, University of Utah, Technical Report
-  #UUSCI-2009-005, 2009.
-  -------------------------------------------------------------------------*/
-// .NAME vtkComputingResources - Definition of computing resource
-// (threads/kernels)
-// .SECTION Description
-// This is a class for distribute the number of threads to a network of modules
-
-// .SECTION See Also
-// vtkExecutionScheduler
-
-#ifndef __vtkComputingResources_h
-#define __vtkComputingResources_h
-
-#include "vtkCommonExecutionModelModule.h" // For export macro
-#include "vtkObject.h"
-
-class vtkInformation;
-class vtkProcessingUnitResource;
-class vtkThreadedStreamingPipeline;
-
-class VTKCOMMONEXECUTIONMODEL_EXPORT vtkComputingResources : public vtkObject
-{
-public:
-  static vtkComputingResources* New();
-  vtkTypeMacro(vtkComputingResources,vtkObject);
-  void PrintSelf(ostream &os, vtkIndent indent);
-
-  // Description:
-  // Set resources to an empty states
-  void Clear();
-
-  // Description:
-  // Assign a minimum amount of usable resources to this object,
-  // e.g. 1 thread
-  void ObtainMinimumResources();
-
-  // Description:
-  // Assign a maximum amount of usable resources to this object
-  void ObtainMaximumResources();
-
-  //BTX
-  // Description:
-  // Return the resources of a specific type of processing unit that
-  // is hold in this object
-  vtkProcessingUnitResource *GetResourceFor(int processingUnit);
-  //ETX
-
-  // Description:
-  // Assign the resources and information of this object to an
-  // executive, i.e., set the number of threads of the algorithm the
-  // executive is pointing to
-  void Deploy(vtkThreadedStreamingPipeline *exec, vtkInformation *info);
-
-  // Description:
-  // Take an amount of computing resources out of this object. Return
-  // true if it is successful.
-  bool Reserve(vtkComputingResources *res);
-
-  // Description:
-  // Add an amount of computing resources to this object
-  void Collect(vtkComputingResources *res);
-
-protected:
-  vtkComputingResources();
-  ~vtkComputingResources();
-
-//BTX
-  class implementation;
-  implementation* const Implementation;
-//ETX
-private:
-  vtkComputingResources(const vtkComputingResources&);  // Not implemented.
-  void operator=(const vtkComputingResources&);  // Not implemented.
-};
-
-//BTX
-//----------------------------------------------------------------------------
-// A basic resource class. It is put here for later inheritance for
-// any type of computing, e.g. CPU/GPU.
-//----------------------------------------------------------------------------
-class vtkProcessingUnitResource {
-public:
-  virtual ~vtkProcessingUnitResource() {}
-
-  // Description:
-  // Return the type of unit this computing resource is holding
-  virtual int ProcessingUnit() = 0;
-
-  // Description:
-  // Return true if this resource is not empty
-  virtual bool HasResource() = 0;
-
-  // Description:
-  // Make this resource empty
-  virtual void Clear() = 0;
-
-  // Description:
-  // Give this object a minimum amount of resource it can allocate
-  virtual void ObtainMinimum() = 0;
-
-  // Description:
-  // Give this object a maximum amount of resource it can allocate
-  virtual void ObtainMaximum() = 0;
-
-  // Description:
-  // Given a ratio and a resource, increase this resource by a ratio
-  // of the reference resource. This is the basic function for
-  // resource distributing
-  virtual void IncreaseByRatio(float ratio, vtkProcessingUnitResource *refResource) = 0;
-
-  // Description:
-  // This actually set the amount of resource on the algorithm holding
-  // by the input executive
-  virtual void AllocateFor(vtkThreadedStreamingPipeline *exec) = 0;
-
-  // Description:
-  // Return true if this object can allocate at least refResource
-  virtual bool CanAccommodate(vtkProcessingUnitResource *refResource) = 0;
-
-  // Description:
-  // Reserve an amount of resource given by refResource from this object
-  virtual void Reserve(vtkProcessingUnitResource *refResource) = 0;
-
-  // Description:
-  // Add an amount of resource given by refResource to this object
-  virtual void Collect(vtkProcessingUnitResource *refResource) = 0;
-};
-//ETX
-
-#endif
diff --git a/Common/ExecutionModel/vtkDataSetAlgorithm.h b/Common/ExecutionModel/vtkDataSetAlgorithm.h
index 8296518..aa3ba6c 100644
--- a/Common/ExecutionModel/vtkDataSetAlgorithm.h
+++ b/Common/ExecutionModel/vtkDataSetAlgorithm.h
@@ -110,7 +110,7 @@ public:
 
 protected:
   vtkDataSetAlgorithm();
-  ~vtkDataSetAlgorithm() {};
+  ~vtkDataSetAlgorithm() {}
 
   // Description:
   // This is called within ProcessRequest when a request asks for
diff --git a/Common/ExecutionModel/vtkDemandDrivenPipeline.cxx b/Common/ExecutionModel/vtkDemandDrivenPipeline.cxx
index c5ee92a..cb5b19e 100644
--- a/Common/ExecutionModel/vtkDemandDrivenPipeline.cxx
+++ b/Common/ExecutionModel/vtkDemandDrivenPipeline.cxx
@@ -538,7 +538,7 @@ void vtkDemandDrivenPipeline::ExecuteDataStart(vtkInformation* request,
   // outputs.
   if (this->GetNumberOfInputPorts() > 0)
     {
-    vtkDataObject* input = this->GetInputData(0, 0);
+    vtkDataObject* input = this->GetInputData(0, 0, inInfo);
     if (input && input->GetFieldData())
       {
       for(i=0; i < outputs->GetNumberOfInformationObjects(); ++i)
diff --git a/Common/ExecutionModel/vtkExecutionScheduler.cxx b/Common/ExecutionModel/vtkExecutionScheduler.cxx
deleted file mode 100644
index f9dbb0f..0000000
--- a/Common/ExecutionModel/vtkExecutionScheduler.cxx
+++ /dev/null
@@ -1,852 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkExecutionScheduler.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*-------------------------------------------------------------------------
-  Copyright (c) 2008, 2009 by SCI Institute, University of Utah.
-
-  This is part of the Parallel Dataflow System originally developed by
-  Huy T. Vo and Claudio T. Silva. For more information, see:
-
-  "Parallel Dataflow Scheme for Streaming (Un)Structured Data" by Huy
-  T. Vo, Daniel K. Osmari, Brian Summa, Joao L.D. Comba, Valerio
-  Pascucci and Claudio T. Silva, SCI Institute, University of Utah,
-  Technical Report #UUSCI-2009-004, 2009.
-
-  "Multi-Threaded Streaming Pipeline For VTK" by Huy T. Vo and Claudio
-  T. Silva, SCI Institute, University of Utah, Technical Report
-  #UUSCI-2009-005, 2009.
--------------------------------------------------------------------------*/
-#include "vtkExecutionScheduler.h"
-
-#include "vtkAlgorithm.h"
-#include "vtkCommand.h"
-#include "vtkComputingResources.h"
-#include "vtkExecutiveCollection.h"
-#include "vtkInformation.h"
-#include "vtkInformationVector.h"
-#include "vtkInformationExecutivePortKey.h"
-#include "vtkInformationExecutivePortVectorKey.h"
-#include "vtkInformationIntegerKey.h"
-#include "vtkMultiThreader.h"
-#include "vtkMutexLock.h"
-#include "vtkObjectFactory.h"
-#include "vtkThreadedStreamingPipeline.h"
-#include "vtkThreadMessager.h"
-
-#include <set>
-#include <vtksys/hash_map.hxx>
-#include <vector>
-#include <vtksys/hash_set.hxx>
-
-//----------------------------------------------------------------------------
-vtkStandardNewMacro(vtkExecutionScheduler);
-
-vtkInformationKeyMacro(vtkExecutionScheduler, TASK_PRIORITY, Integer);
-
-//----------------------------------------------------------------------------
-class Task
-{
-public:
-  Task(int _priority = -1, vtkExecutive *_exec = NULL, vtkInformation *_info = NULL)
-  {
-    this->priority = _priority;
-    this->exec = _exec;
-    this->info = _info;
-  }
-
-  int             priority;
-  vtkExecutive   *exec;
-  vtkInformation *info;
-};
-
-//----------------------------------------------------------------------------
-class TaskWeakOrdering
-{
-public:
-  bool operator()(const Task& t1,
-                  const Task& t2) const
-  {
-    return t1.priority < t2.priority;
-  }
-};
-
-//----------------------------------------------------------------------------
-// Convinient definitions of vector/set of vtkExecutive
-class vtkExecutiveHasher
-{
-public:
-  size_t operator()(const vtkExecutive* e) const
-  {
-    return (size_t)e;
-  }
-};
-typedef vtksys::hash_set<vtkExecutive*, vtkExecutiveHasher> vtkExecutiveSet;
-typedef std::vector<vtkExecutive*>                       vtkExecutiveVector;
-
-//----------------------------------------------------------------------------
-class vtkExecutionScheduler::implementation
-{
-public:
-  // The containing object
-  vtkExecutionScheduler*  Scheduler;
-
-  // Some convenient type definitions for STL containers
-  typedef vtksys::hash_map<vtkExecutive*, int,
-    vtkExecutiveHasher>                                 ExecutiveIntHashMap;
-  typedef std::pair<int, int>                        Edge;
-  class                                                 EdgeHasher;
-  typedef vtksys::hash_set<Edge, EdgeHasher>            EdgeSet;
-  typedef std::multiset<Task, TaskWeakOrdering>      TaskPriorityQueue;
-  typedef std::vector<vtkMutexLock*>                 MutexLockVector;
-  typedef std::vector<vtkThreadMessager*>            MessagerVector;
-  class EdgeHasher
-  {
-  public:
-    size_t operator()(const Edge &e) const
-    {
-      return (size_t)((e.first << 16) +  e.second);
-    }
-  };
-
-  vtkExecutiveSet           ExecutingTasks;
-  TaskPriorityQueue         PrioritizedTasks;
-  ExecutiveIntHashMap       DependencyNodes;
-  EdgeSet                   DependencyEdges;
-  MessagerVector            TaskDoneMessagers;
-  MutexLockVector           InputsReleasedLocks;
-  MessagerVector            InputsReleasedMessagers;
-  int                       CurrentPriority;
-
-  // Description:
-  // Start from the exec and go all the way up to the sources (modules
-  // without any inputs), then call TraverseDownToSink to update edges
-  void FindAndTraverseFromSources(vtkExecutive *exec, vtkExecutiveSet &visited);
-
-  // Description:
-  // Actual traverse down the network, for each nodes, construct and
-  // add edges connecting all of its upstream modules to itself to the
-  // dependency graph
-  void TraverseDownToSink(vtkExecutive *exec, vtkExecutiveSet &upstream,
-                          vtkExecutiveSet &visited);
-
-  // Description:
-  // Actual traverse down the network, for each nodes, construct and
-  // add edges connecting all of its upstream modules to itself to the
-  // dependency graph
-  void CollectDownToSink(vtkExecutive *exec, vtkExecutiveSet &visited,
-                         vtkExecutiveVector &graph);
-
-  // Description:
-  // A task can be executed if none of its predecessor tasks are still
-  // on the queue. This only makes sense for tasks that are currently
-  // on the queue, thus, an iterator is provided instead of the task
-  // itself.
-  bool CanExecuteTask(TaskPriorityQueue::const_iterator ti);
-  // Description:
-  // Spawn a thread to execute a module
-  void Execute(const Task &task);
-
-  // Description:
-  // Check if the given exec is a new module or not. If it is then
-  // traverse the network to update dependency edges for its connected
-  // subgraph
-  void UpdateDependencyGraph(vtkExecutive *exec);
-
-  // Description:
-  // Add the module exec to the set of dependency nodes if it is not
-  // already there and return its node id number
-  int AddToDependencyGraph(vtkExecutive *exec);
-
-  // Description:
-  // Add the given executive to the execution queue for later
-  // execution
-  void AddToQueue(vtkExecutive *exec, vtkInformation *info);
-
-  // Description:
-  // Obtain the priority from the information object if it is given,
-  // otherwise, use a priority assigned from the scheduler
-  int AcquirePriority(vtkInformation *info);
-};
-
-//----------------------------------------------------------------------------
-static vtkExecutionScheduler *globalScheduler = NULL;
-
-//----------------------------------------------------------------------------
-void * vtkExecutionScheduler_ScheduleThread(void *data);
-void * vtkExecutionScheduler_ExecuteThread(void *data);
-
-//----------------------------------------------------------------------------
-vtkExecutionScheduler* vtkExecutionScheduler::GetGlobalScheduler()
-{
-  if (!globalScheduler)
-    {
-    globalScheduler = vtkExecutionScheduler::New();
-    }
-  return globalScheduler;
-}
-
-//----------------------------------------------------------------------------
-vtkExecutionScheduler::vtkExecutionScheduler()
-  : Implementation(new implementation)
-{
-  this->Resources = vtkComputingResources::New();
-  this->Resources->ObtainMaximumResources();
-  this->ResourceMessager = vtkThreadMessager::New();
-  this->ScheduleLock = vtkMutexLock::New();
-  this->ScheduleMessager = vtkThreadMessager::New();
-  this->ScheduleThreader = vtkMultiThreader::New();
-  this->ScheduleThreader->SetNumberOfThreads(1);
-  this->ScheduleThreadId = -1;
-  this->Implementation->Scheduler = this;
-  this->Implementation->CurrentPriority = 0;
-}
-
-//----------------------------------------------------------------------------
-vtkExecutionScheduler::~vtkExecutionScheduler()
-{
-  this->Resources->Delete();
-  this->ResourceMessager->Delete();
-  this->ScheduleLock->Delete();
-  this->ScheduleMessager->Delete();
-  this->ScheduleThreader->Delete();
-  delete this->Implementation;
-}
-
-//----------------------------------------------------------------------------
-void vtkExecutionScheduler::PrintSelf(ostream &os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os, indent);
-}
-
-//----------------------------------------------------------------------------
-void vtkExecutionScheduler::SchedulePropagate(vtkExecutiveCollection *execs, vtkInformation* vtkNotUsed(info))
-{
-  execs->InitTraversal();
-  vtkExecutiveSet    visited;
-  vtkExecutiveVector graph;
-  for (vtkExecutive *e = execs->GetNextItem(); e != 0; e = execs->GetNextItem())
-    {
-    this->Implementation->CollectDownToSink(e, visited, graph);
-    }
-
-  for (vtkExecutiveVector::iterator vi=graph.begin();
-       vi!=graph.end(); vi++)
-    {
-    (*vi)->Update();
-    vtkAlgorithm *rep =(*vi)->GetAlgorithm();
-    if (rep->IsA("vtkDataRepresentation"))
-      {
-      rep->InvokeEvent(vtkCommand::UpdateEvent, NULL);
-      }
-    }
-}
-
-//----------------------------------------------------------------------------
-void vtkExecutionScheduler::Schedule(vtkExecutiveCollection *execs, vtkInformation *info)
-{
-  if (this->ScheduleThreadId == -1)
-    {
-    this->ScheduleThreadId = this->ScheduleThreader->
-      SpawnThread((vtkThreadFunctionType)(vtkExecutionScheduler_ScheduleThread), this);
-    }
-  this->ScheduleLock->Lock();
-  vtkExecutiveVector G;
-  execs->InitTraversal();
-  for (vtkExecutive *e = execs->GetNextItem(); e != 0; e = execs->GetNextItem())
-    {
-    if (this->Implementation->ExecutingTasks.find(e) != this->Implementation->ExecutingTasks.end())
-      {
-      return;
-      }
-    if (this->Implementation->DependencyNodes.find(e) == this->Implementation->DependencyNodes.end())
-      {
-      this->Implementation->UpdateDependencyGraph(e);
-      }
-    G.push_back(e);
-    }
-
-  // Create an adjacency matrix
-  unsigned i, j, k, p;
-  unsigned N = (unsigned)G.size();
-  int *A = (int*)malloc(N*N*sizeof(int));
-  int *degree = (int*)malloc(N*sizeof(int));
-  memset(A, 0, N*N*sizeof(int));
-  memset(degree, 0, N*sizeof(int));
-  for (i = 0; i < N; i++)
-    {
-    int src = (*(this->Implementation->DependencyNodes.find(G[i]))).second;
-    for (j = 0; j < N; j++)
-      {
-      int dst = (*(this->Implementation->DependencyNodes.find(G[j]))).second;
-      if (this->Implementation->DependencyEdges.find(implementation::Edge(src, dst)) !=
-          this->Implementation->DependencyEdges.end())
-        {
-        A[i*N+j] = 1;
-        degree[j]++;
-        }
-      }
-    }
-
-  unsigned *S = (unsigned*)malloc(N*sizeof(unsigned));
-  k = 0;
-  for (j = 0; j < N; j++)
-    {
-    if (degree[j] == 0)
-      {
-      S[k++] = j;
-      }
-    }
-  p = 0;
-  while (p < k)
-    {
-    i = S[p++];
-    this->Implementation->AddToQueue(G[i], info);
-    for (j = 0; j < N; j++)
-      if (A[i*N+j])
-        {
-        degree[j]--;
-        A[i*N+j] = 0;
-        if (degree[j] == 0)
-          {
-          S[k++] = j;
-          }
-        }
-    }
-  free(S);
-  free(degree);
-  free(A);
-
-  // Wake the scheduling thread up if it is currently waiting for tasks
-  this->ScheduleMessager->SendWakeMessage();
-  this->ScheduleLock->Unlock();
-
-}
-
-//----------------------------------------------------------------------------
-void vtkExecutionScheduler::implementation::UpdateDependencyGraph(vtkExecutive *exec)
-{
-  vtkExecutiveSet visited;
-  this->FindAndTraverseFromSources(exec, visited);
-}
-
-//----------------------------------------------------------------------------
-void vtkExecutionScheduler::implementation::FindAndTraverseFromSources
-(vtkExecutive *exec, vtkExecutiveSet &visited)
-{
-  if (visited.find(exec) != visited.end())
-    {
-    return;
-    }
-  visited.insert(exec);
-  bool isSource = true;
-  for(int i = 0; i < exec->GetNumberOfInputPorts(); ++i)
-    {
-    int nic = exec->GetAlgorithm()->GetNumberOfInputConnections(i);
-    vtkInformationVector* inVector = exec->GetInputInformation()[i];
-    for(int j = 0; j < nic; ++j)
-      {
-      vtkInformation* inInfo = inVector->GetInformationObject(j);
-      vtkExecutive* e;
-      int producerPort;
-      vtkExecutive::PRODUCER()->Get(inInfo, e, producerPort);
-      if (e)
-        {
-        isSource = false;
-        this->FindAndTraverseFromSources(e, visited);
-        }
-      }
-    }
-  if (isSource)
-    {
-    vtkExecutiveSet upstream;
-    vtkExecutiveSet downVisited;
-    this->TraverseDownToSink(exec, upstream, downVisited);
-    }
-}
-
-//----------------------------------------------------------------------------
-void vtkExecutionScheduler::implementation::TraverseDownToSink
-(vtkExecutive *exec, vtkExecutiveSet &upstream, vtkExecutiveSet &visited)
-{
-  if (visited.find(exec)!=visited.end())
-    {
-    return;
-    }
-
-  // Now mark all edges from upstream modules to exec as dependency edges
-  int vId = this->AddToDependencyGraph(exec);
-  for (vtkExecutiveSet::const_iterator it = upstream.begin(); it != upstream.end(); it++)
-    {
-    implementation::ExecutiveIntHashMap::iterator hit = this->DependencyNodes.find(*it);
-    this->DependencyEdges.insert(Edge((*hit).second, vId));
-    }
-
-  // Mark as visited
-  visited.insert(exec);
-
-  // Then insert it to the upstream list for going down
-  upstream.insert(exec);
-  for(int i = 0; i < exec->GetNumberOfOutputPorts(); ++i)
-    {
-    vtkInformation* info = exec->GetOutputInformation(i);
-    int consumerCount = vtkExecutive::CONSUMERS()->Length(info);
-    vtkExecutive** e = vtkExecutive::CONSUMERS()->GetExecutives(info);
-    for (int j = 0; j < consumerCount; j++)
-      if (e[j])
-        {
-        this->TraverseDownToSink(e[j], upstream, visited);
-        }
-    }
-
-  // Take it out of the upstream and prepare for back-tracking
-  upstream.erase(exec);
-}
-
-//----------------------------------------------------------------------------
-void vtkExecutionScheduler::implementation::CollectDownToSink
-(vtkExecutive *exec, vtkExecutiveSet &visited, vtkExecutiveVector &graph)
-{
-  if (visited.find(exec)!=visited.end())
-    {
-    return;
-    }
-
-  // Mark as visited
-  visited.insert(exec);
-
-  // Add it to the ordered graph
-  graph.push_back(exec);
-
-  // Then traverse down
-  for(int i = 0; i < exec->GetNumberOfOutputPorts(); ++i)
-    {
-    vtkInformation* info = exec->GetOutputInformation(i);
-    int consumerCount = vtkExecutive::CONSUMERS()->Length(info);
-    vtkExecutive** e = vtkExecutive::CONSUMERS()->GetExecutives(info);
-    for (int j = 0; j < consumerCount; j++)
-      {
-      if (e[j])
-        {
-        this->CollectDownToSink(e[j], visited, graph);
-        }
-      }
-    }
-}
-
-//----------------------------------------------------------------------------
-int vtkExecutionScheduler::implementation::AddToDependencyGraph(vtkExecutive *exec)
-{
-  // We never remove vertices, it's ok to just return the size of
-  // DependencyGraph as a node id
-  implementation::ExecutiveIntHashMap::iterator hit = this->DependencyNodes.find(exec);
-  // Check if this is a new module or an untouched sub-network
-  int vId = -1;
-  if (hit == this->DependencyNodes.end())
-    {
-    vId = (int)this->DependencyNodes.size();
-    this->DependencyNodes[exec] = vId;
-
-    // Make sure that we have enough thread messagers for this vId
-    while (this->TaskDoneMessagers.size()<=(size_t)vId)
-      {
-      this->TaskDoneMessagers.push_back(vtkThreadMessager::New());
-      }
-
-    while (this->InputsReleasedMessagers.size()<=(size_t)vId)
-      {
-      this->InputsReleasedMessagers.push_back(vtkThreadMessager::New());
-      this->InputsReleasedLocks.push_back(vtkMutexLock::New());
-      }
-    }
-  // We have this module in our cache before
-  else
-    {
-    vId = (*hit).second;
-    }
-  return vId;
-}
-
-//----------------------------------------------------------------------------
-int vtkExecutionScheduler::implementation::AcquirePriority(vtkInformation * info)
-{
-  int priority;
-  if (info && info->Has(TASK_PRIORITY()))
-    {
-    priority = info->Get(TASK_PRIORITY());
-    }
-  else
-    {
-    priority = this->CurrentPriority++;
-    }
-  return priority;
-}
-
-//----------------------------------------------------------------------------
-void vtkExecutionScheduler::implementation::AddToQueue(vtkExecutive *exec, vtkInformation * info)
-{
-  int priority = this->AcquirePriority(info);
-  this->PrioritizedTasks.insert(Task(priority, exec, info));
-  vtkMutexLock *lock = this->Scheduler->GetInputsReleasedLock(exec);
-  if (lock)
-    {
-//     fprintf(stderr, "A %s\n", exec->GetAlgorithm()->GetClassName());
-    lock->Lock();
-    }
-}
-
-//----------------------------------------------------------------------------
-void vtkExecutionScheduler::WaitUntilDone(vtkExecutiveCollection *execs)
-{
-  vtkExecutiveSet::const_iterator it;
-  execs->InitTraversal();
-  for (vtkExecutive *e = execs->GetNextItem(); e != 0; e = execs->GetNextItem())
-    {
-    this->WaitForTaskDone(e);
-    }
-}
-
-//----------------------------------------------------------------------------
-void vtkExecutionScheduler::WaitUntilAllDone()
-{
-  while (true)
-    {
-    vtkExecutive *exec = NULL;
-    this->ScheduleLock->Lock();
-    if (this->Implementation->PrioritizedTasks.size() > 0)
-      exec = (*(this->Implementation->PrioritizedTasks.begin())).exec;
-    this->ScheduleLock->Unlock();
-    if (exec)
-      this->WaitForTaskDone(exec);
-    else
-      break;
-    }
-}
-
-//----------------------------------------------------------------------------
-void vtkExecutionScheduler::WaitUntilReleased(vtkExecutiveCollection *execs)
-{
-  vtkExecutiveSet::const_iterator it;
-  execs->InitTraversal();
-  for (vtkExecutive *e = execs->GetNextItem(); e != 0; e = execs->GetNextItem())
-    {
-    this->WaitForInputsReleased(e);
-    }
-}
-
-//----------------------------------------------------------------------------
-void vtkExecutionScheduler::WaitForTaskDone(vtkExecutive *exec)
-{
-  vtkThreadMessager *messager = this->GetTaskDoneMessager(exec);
-  if (messager)
-    messager->WaitForMessage();
-}
-
-//----------------------------------------------------------------------------
-void vtkExecutionScheduler::WaitForInputsReleased(vtkExecutive *exec)
-{
-  vtkMutexLock *lock = this->GetInputsReleasedLock(exec);
-  if (lock)
-    {
-    lock->Lock();
-    lock->Unlock();
-    }
-//   vtkThreadMessager *messager = this->GetInputsReleasedMessager(exec);
-//   if (messager)
-//     messager->WaitForMessage();
-}
-
-//----------------------------------------------------------------------------
-vtkThreadMessager* vtkExecutionScheduler::GetTaskDoneMessager(vtkExecutive *exec)
-{
-  implementation::ExecutiveIntHashMap::iterator hit =
-    this->Implementation->DependencyNodes.find(exec);
-  if (hit != this->Implementation->DependencyNodes.end())
-    {
-    implementation::TaskPriorityQueue::const_iterator ti;
-    for (ti = this->Implementation->PrioritizedTasks.begin();
-         ti != this->Implementation->PrioritizedTasks.end(); ti++)
-      {
-      if ((*ti).exec==exec)
-        {
-        return this->Implementation->TaskDoneMessagers[(*hit).second];
-        }
-      }
-    }
-  return NULL;
-}
-
-//----------------------------------------------------------------------------
-vtkMutexLock* vtkExecutionScheduler::GetInputsReleasedLock(vtkExecutive *exec)
-{
-  implementation::ExecutiveIntHashMap::iterator hit =
-    this->Implementation->DependencyNodes.find(exec);
-  if (hit != this->Implementation->DependencyNodes.end())
-    {
-    implementation::TaskPriorityQueue::const_iterator ti;
-    for (ti = this->Implementation->PrioritizedTasks.begin();
-         ti != this->Implementation->PrioritizedTasks.end(); ti++)
-      {
-      if ((*ti).exec == exec)
-        {
-        return this->Implementation->InputsReleasedLocks[(*hit).second];
-        }
-      }
-    }
-  return NULL;
-}
-
-//----------------------------------------------------------------------------
-vtkThreadMessager* vtkExecutionScheduler::GetInputsReleasedMessager(vtkExecutive *exec)
-{
-  implementation::ExecutiveIntHashMap::iterator hit =
-    this->Implementation->DependencyNodes.find(exec);
-  if (hit != this->Implementation->DependencyNodes.end())
-    {
-    implementation::TaskPriorityQueue::const_iterator ti;
-    for (ti = this->Implementation->PrioritizedTasks.begin();
-         ti != this->Implementation->PrioritizedTasks.end(); ti++)
-      {
-      if ((*ti).exec==exec)
-        {
-        return this->Implementation->InputsReleasedMessagers[(*hit).second];
-        }
-      }
-    }
-  return NULL;
-}
-
-//----------------------------------------------------------------------------
-void vtkExecutionScheduler::ReleaseResources(vtkExecutive *exec)
-{
-  vtkThreadedStreamingPipeline *threadedExec = vtkThreadedStreamingPipeline::
-    SafeDownCast(exec);
-  if (threadedExec)
-    {
-    this->ScheduleLock->Lock();
-    this->Resources->Collect(threadedExec->GetResources());
-    this->ResourceMessager->SendWakeMessage();
-    this->ScheduleLock->Unlock();
-    }
-}
-
-//----------------------------------------------------------------------------
-void vtkExecutionScheduler::ReacquireResources(vtkExecutive *exec)
-{
-  vtkThreadedStreamingPipeline *threadedExec = vtkThreadedStreamingPipeline::
-    SafeDownCast(exec);
-  if (threadedExec)
-    {
-    while (!this->Resources->Reserve(threadedExec->GetResources()))
-      {
-      this->ResourceMessager->WaitForMessage();
-      }
-    }
-}
-
-//----------------------------------------------------------------------------
-bool vtkExecutionScheduler::implementation::CanExecuteTask
-(TaskPriorityQueue::const_iterator taskIter)
-{
-  if (this->ExecutingTasks.find((*taskIter).exec) != this->ExecutingTasks.end())
-    {
-    return false;
-    }
-  ExecutiveIntHashMap::iterator hit = this->DependencyNodes.find((*taskIter).exec);
-  if (hit == this->DependencyNodes.end())
-    {
-    return true;
-    }
-  int dst = (*hit).second;
-  TaskPriorityQueue::const_iterator ti;
-  for (ti = this->PrioritizedTasks.begin();
-       ti != taskIter; ti++)
-    {
-    if ((*ti).priority > (*taskIter).priority)
-      {
-      break;
-      }
-    hit = this->DependencyNodes.find((*ti).exec);
-    int src = (*hit).second;
-    if (this->DependencyEdges.find(Edge(src, dst)) != this->DependencyEdges.end())
-      {
-      return false;
-      }
-    }
-  return true;
-}
-
-//----------------------------------------------------------------------------
-void vtkExecutionScheduler::RescheduleFrom(vtkExecutive *exec,
-                                           vtkComputingResources *resources)
-{
-  vtkExecutiveVector upstream;
-  // Compute the total time
-  float totalUpStreamTime = 0.0;
-  for(int i = 0; i < exec->GetNumberOfInputPorts(); ++i)
-    {
-    int nic = exec->GetAlgorithm()->GetNumberOfInputConnections(i);
-    vtkInformationVector* inVector = exec->GetInputInformation()[i];
-    for(int j = 0; j < nic; ++j)
-      {
-      vtkInformation* inInfo = inVector->GetInformationObject(j);
-      vtkExecutive* e;
-      int producerPort;
-      vtkExecutive::PRODUCER()->Get(inInfo, e, producerPort);
-      if (e && vtkThreadedStreamingPipeline::SafeDownCast(e))
-        {
-        upstream.push_back(e);
-        }
-      }
-    }
-  for (size_t i = 0; i < upstream.size(); i++)
-    {
-    totalUpStreamTime += vtkThreadedStreamingPipeline::SafeDownCast(upstream[i])->LastDataRequestTimeFromSource;
-    }
-
-  // Then distribute the resources based on those ratios
-  vtkProcessingUnitResource *totalResources[] =
-    {
-      resources->GetResourceFor(vtkThreadedStreamingPipeline::PROCESSING_UNIT_CPU),
-      resources->GetResourceFor(vtkThreadedStreamingPipeline::PROCESSING_UNIT_GPU),
-    };
-  for (size_t i = 0; i < upstream.size(); i++)
-    {
-    float ratio = vtkThreadedStreamingPipeline::
-      SafeDownCast(upstream[i])->LastDataRequestTimeFromSource/totalUpStreamTime;
-    for (size_t j = 0; j < sizeof(totalResources)/sizeof(vtkProcessingUnitResource*); j++)
-      {
-      vtkProcessingUnitResource* moduleResource = vtkThreadedStreamingPipeline::
-        SafeDownCast(upstream[i])->GetResources()->
-        GetResourceFor(totalResources[j]->ProcessingUnit());
-      moduleResource->IncreaseByRatio(ratio, totalResources[j]);
-      }
-    }
-
-  // Try to reserve upstream
-  for (size_t i = 0; i < upstream.size(); i++)
-    {
-    this->RescheduleFrom(upstream[i],
-                         vtkThreadedStreamingPipeline::
-                         SafeDownCast(upstream[i])->GetResources());
-    }
-}
-
-//----------------------------------------------------------------------------
-void vtkExecutionScheduler::RescheduleNetwork(vtkExecutive *sink)
-{
-  this->Resources->ObtainMaximumResources();
-  this->RescheduleFrom(sink, this->Resources);
-}
-
-//----------------------------------------------------------------------------
-void vtkExecutionScheduler::ClassInitialize()
-{
-  // Currently empty - initialize late when required.
-}
-
-//----------------------------------------------------------------------------
-void vtkExecutionScheduler::ClassFinalize()
-{
-  // Clean up our singleton (if it was ever initalized).
-  if (globalScheduler)
-    {
-    globalScheduler->FastDelete();
-    }
-}
-
-//----------------------------------------------------------------------------
-typedef struct
-{
-  vtkExecutionScheduler *scheduler;
-  Task task;
-} ExecutionData;
-//----------------------------------------------------------------------------
-void vtkExecutionScheduler::implementation::Execute(const Task &task)
-{
-  ExecutionData *eData = new ExecutionData();
-  eData->scheduler = this->Scheduler;
-  eData->task = task;
-  this->Scheduler->ScheduleThreader->SpawnThread((vtkThreadFunctionType)(vtkExecutionScheduler_ExecuteThread), eData);
-}
-
-//----------------------------------------------------------------------------
-void * vtkExecutionScheduler_ScheduleThread(void *data)
-{
-  vtkExecutionScheduler *self = static_cast<vtkExecutionScheduler*>
-    (static_cast<vtkMultiThreader::ThreadInfo*>(data)->UserData);
-  while (true)
-    {
-    self->ScheduleLock->Lock();
-    bool needToWait = true;
-    vtkExecutionScheduler::implementation::TaskPriorityQueue::iterator ti;
-    for (ti = self->Implementation->PrioritizedTasks.begin();
-         ti != self->Implementation->PrioritizedTasks.end(); ti++)
-      {
-      if (self->Implementation->CanExecuteTask(ti))
-        {
-        vtkThreadedStreamingPipeline *exec = vtkThreadedStreamingPipeline::
-          SafeDownCast((*ti).exec);
-        if (self->Resources->Reserve(exec->GetResources()))
-          {
-          needToWait = false;
-          self->Implementation->ExecutingTasks.insert(exec);
-          self->ScheduleLock->Unlock();
-          self->Implementation->Execute(*ti);
-          break;
-          }
-        }
-      }
-    if (needToWait)
-      {
-      self->ScheduleLock->Unlock();
-      self->ScheduleMessager->WaitForMessage();
-      }
-    }
-  return NULL;
-}
-
-//----------------------------------------------------------------------------
-void * vtkExecutionScheduler_ExecuteThread(void *data)
-{
-  ExecutionData *eData = static_cast<ExecutionData*>
-    (static_cast<vtkMultiThreader::ThreadInfo*>(data)->UserData);
-  vtkExecutionScheduler *self = eData->scheduler;
-  Task task = eData->task;
-  vtkThreadedStreamingPipeline *exec = vtkThreadedStreamingPipeline::
-    SafeDownCast(task.exec);
-  vtkThreadMessager *messager = self->GetTaskDoneMessager(task.exec);
-  vtkMutexLock *lock = self->GetInputsReleasedLock(task.exec);
-  exec->GetResources()->Deploy(exec, task.info);
-  self->ScheduleLock->Lock();
-  self->Implementation->PrioritizedTasks.erase(task);
-  self->Implementation->ExecutingTasks.erase(exec);
-  self->Resources->Collect(exec->GetResources());
-  self->ResourceMessager->SendWakeMessage();
-  self->ScheduleLock->Unlock();
-  exec->ReleaseInputs();
-  self->ScheduleMessager->SendWakeMessage();
-  if (task.info && task.info->Has(vtkThreadedStreamingPipeline::AUTO_PROPAGATE()))
-    {
-      fprintf(stderr, "Push DOWN from %s\n", exec->GetAlgorithm()->GetClassName());
-    exec->Push(task.info);
-    }
-  if (messager)
-    {
-    messager->SendWakeMessage();
-    }
-  delete eData;
-  fprintf(stderr, "Release now\n");
-  lock->Unlock();
-  return NULL;
-}
diff --git a/Common/ExecutionModel/vtkExecutionScheduler.h b/Common/ExecutionModel/vtkExecutionScheduler.h
deleted file mode 100644
index 54dbb9e..0000000
--- a/Common/ExecutionModel/vtkExecutionScheduler.h
+++ /dev/null
@@ -1,179 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkExecutionScheduler.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*-------------------------------------------------------------------------
-  Copyright (c) 2008, 2009 by SCI Institute, University of Utah.
-
-  This is part of the Parallel Dataflow System originally developed by
-  Huy T. Vo and Claudio T. Silva. For more information, see:
-
-  "Parallel Dataflow Scheme for Streaming (Un)Structured Data" by Huy
-  T. Vo, Daniel K. Osmari, Brian Summa, Joao L.D. Comba, Valerio
-  Pascucci and Claudio T. Silva, SCI Institute, University of Utah,
-  Technical Report #UUSCI-2009-004, 2009.
-
-  "Multi-Threaded Streaming Pipeline For VTK" by Huy T. Vo and Claudio
-  T. Silva, SCI Institute, University of Utah, Technical Report
-  #UUSCI-2009-005, 2009.
--------------------------------------------------------------------------*/
-// .NAME vtkExecutionScheduler - Scheduling execution with
-// thread/computing resources distributing
-// .SECTION Description
-// This is a class for balancing the computing resources throughout
-// the network
-
-// .SECTION See Also
-// vtkComputingResources vtkThreadedStreamingPipeline
-
-#ifndef __vtkExecutionScheduler_h
-#define __vtkExecutionScheduler_h
-
-#include "vtkCommonExecutionModelModule.h" // For export macro
-#include "vtkObject.h"
-#include "vtkExecutionSchedulerManager.h" // For singleton instantiation/cleanup
-
-class vtkExecutive;
-class vtkComputingResources;
-class vtkMultiThreader;
-class vtkMutexLock;
-class vtkThreadMessager;
-class vtkInformation;
-class vtkInformationIntegerKey;
-class vtkExecutiveCollection;
-
-class VTKCOMMONEXECUTIONMODEL_EXPORT vtkExecutionScheduler : public vtkObject
-{
-public:
-  static vtkExecutionScheduler* New();
-  vtkTypeMacro(vtkExecutionScheduler,vtkObject);
-  void PrintSelf(ostream &os, vtkIndent indent);
-
-  // Description:
-  // Return the global instance of the scheduler
-  static vtkExecutionScheduler *GetGlobalScheduler();
-
- // Description:
-  // Key to store the priority of a task
-  static vtkInformationIntegerKey* TASK_PRIORITY();
-
-  // Description:
-  // Put the current set of executives (modules) to the be scheduled given its
-  // dependency graph which will be used to compute the set
-  // topological orders
-  void Schedule(vtkExecutiveCollection *execs, vtkInformation *info);
-
-  // Description:
-  // Put the current set of executives (modules) to the be scheduled
-  // given its dependency graph which will be used to compute the set
-  // topological orders. Then wait for their execution to be complete
-  void SchedulePropagate(vtkExecutiveCollection *execs, vtkInformation *info);
-
-  // Description:
-  // Wait until the current set of executives (modules) have finished executing
-  void WaitUntilDone(vtkExecutiveCollection *execs);
-
-  // Description:
-  // Wait until the current set of executives (modules) have their inputs released
-  void WaitUntilReleased(vtkExecutiveCollection *execs);
-
-  // Description:
-  // Wait for all tasks to be done
-  void WaitUntilAllDone();
-
-  // Description:
-  // Wait for a task that is on the scheduling queue to be done. If
-  // the task is not there, this will return immediately. If the exec
-  // is NULL, any task that is done will trigger this the return
-  void WaitForTaskDone(vtkExecutive *exec);
-
-  // Description:
-  // Similar to WaitForTaskDone but return whenever input connections
-  // of a task are released instead of done computing. But exec cannot
-  // be NULL.
-  void WaitForInputsReleased(vtkExecutive *exec);
-
-  // Description:
-  // Return the thread messager reserved for the given exec to notify
-  // when it is done
-  vtkThreadMessager* GetTaskDoneMessager(vtkExecutive *exec);
-
-  // Description:
-  // Return the thread messager reserved for the given exec to notify
-  // when it releases its inputs
-  vtkThreadMessager* GetInputsReleasedMessager(vtkExecutive *exec);
-
-  // Description:
-  // Return the mutex lock reserved for the given exec to notify
-  // when it releases its inputs
-  vtkMutexLock* GetInputsReleasedLock(vtkExecutive *exec);
-
-  // Description:
-  // Release the resources that are being used by the given exec
-  void ReleaseResources(vtkExecutive *exec);
-
-  // Description:
-  // Re-acquire the resource released earlier by ReleaseResource
-  void ReacquireResources(vtkExecutive *exec);
-
-  // Description:
-  // Redistribute the thread resources over the network from a sink
-  // with a maximum resource
-  void RescheduleNetwork(vtkExecutive *sink);
-
-  // Description:
-  // Redistribute the thread resources from a sink given a certain
-  // amount of resource
-  void RescheduleFrom(vtkExecutive *sink, vtkComputingResources *resources);
-
-protected:
-  vtkExecutionScheduler();
-  ~vtkExecutionScheduler();
-
-  vtkComputingResources       *Resources;
-  vtkThreadMessager           *ScheduleMessager;
-  vtkThreadMessager           *ResourceMessager;
-  vtkMutexLock                *ScheduleLock;
-  vtkMultiThreader            *ScheduleThreader;
-  int                          ScheduleThreadId;
-
-//BTX
-  class implementation;
-  implementation* const Implementation;
-  friend class implementation;
-
-  // Description:
-  // The scheduling thread that is responsible for queueing up module
-  // execution in the right order
-  friend void * vtkExecutionScheduler_ScheduleThread(void *data);
-
-  // Description:
-  // Execute thread function that is responsible for forking process
-  // for each module
-  friend void * vtkExecutionScheduler_ExecuteThread(void *data);
-
-//ETX
-
-  // Description: Functions and friend class to take care of initialization
-  // and clean up of the vtkExecutionScheduler singleton.
-  static void ClassInitialize();
-  static void ClassFinalize();
-  friend class vtkExecutionSchedulerManager;
-
-private:
-  vtkExecutionScheduler(const vtkExecutionScheduler&);  // Not implemented.
-  void operator=(const vtkExecutionScheduler&);  // Not implemented.
-
-};
-
-#endif
diff --git a/Common/ExecutionModel/vtkExecutionSchedulerManager.cxx b/Common/ExecutionModel/vtkExecutionSchedulerManager.cxx
deleted file mode 100644
index d1031ce..0000000
--- a/Common/ExecutionModel/vtkExecutionSchedulerManager.cxx
+++ /dev/null
@@ -1,35 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkExecutionSchedulerManager.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#include "vtkExecutionSchedulerManager.h"
-#include "vtkExecutionScheduler.h"
-
-// Must NOT be initialized.  Default initialization to zero is required.
-unsigned int vtkExecutionSchedulerManager::Count;
-
-vtkExecutionSchedulerManager::vtkExecutionSchedulerManager()
-{
-if(++vtkExecutionSchedulerManager::Count == 1)
-    {
-    vtkExecutionScheduler::ClassInitialize();
-    }
-}
-
-vtkExecutionSchedulerManager::~vtkExecutionSchedulerManager()
-{
-if(--vtkExecutionSchedulerManager::Count == 0)
-    {
-    vtkExecutionScheduler::ClassFinalize();
-    }
-}
diff --git a/Common/ExecutionModel/vtkExecutionSchedulerManager.h b/Common/ExecutionModel/vtkExecutionSchedulerManager.h
deleted file mode 100644
index ea71687..0000000
--- a/Common/ExecutionModel/vtkExecutionSchedulerManager.h
+++ /dev/null
@@ -1,49 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkExecutionSchedulerManager.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-// .NAME vtkExecutionSchedulerManager - Manage the instantiation and deletion
-// of the vtkExecutionScheduler singleton.
-//
-// vtkExecutionSchedulerManager is included in the header of
-// vtkExecutionScheduler, or any of its subclasses.
-// It makes sure that the singleton is created after the vtkDebugLeaks singleton,
-// and destroyed before vtkDebugLeaks is cleaned up.
-
-// .Section See Also
-// vtkExecutionScheduler
-
-#ifndef __vtkExecutionSchedulerManager_h
-#define __vtkExecutionSchedulerManager_h
-
-#include "vtkCommonExecutionModelModule.h" // For export macro
-#include "vtkSystemIncludes.h"
-#include "vtkDebugLeaksManager.h" // DebugLeaks exists longer than us.
-
-class VTKCOMMONEXECUTIONMODEL_EXPORT vtkExecutionSchedulerManager
-{
-public:
-  vtkExecutionSchedulerManager();
-  ~vtkExecutionSchedulerManager();
-private:
-  static unsigned int Count;
-};
-
-// Description: This instance will show up in any translation unit that uses
-// vtkExecutionScheduler, or that has a singleton.  It will make sure
-// that it is initialized before it is used, and is one of the last
-// static objects to be destroyed.
-static vtkExecutionSchedulerManager vtkExecutionSchedulerManagerInstance;
-
-#endif
-// VTK-HeaderTest-Exclude: vtkExecutionSchedulerManager.h
diff --git a/Common/ExecutionModel/vtkExecutiveCollection.cxx b/Common/ExecutionModel/vtkExecutiveCollection.cxx
deleted file mode 100644
index d9dfb39..0000000
--- a/Common/ExecutionModel/vtkExecutiveCollection.cxx
+++ /dev/null
@@ -1,40 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkExecutiveCollection.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*-------------------------------------------------------------------------
-  Copyright (c) 2008, 2009 by SCI Institute, University of Utah.
-
-  This is part of the Parallel Dataflow System originally developed by
-  Huy T. Vo and Claudio T. Silva. For more information, see:
-
-  "Parallel Dataflow Scheme for Streaming (Un)Structured Data" by Huy
-  T. Vo, Daniel K. Osmari, Brian Summa, Joao L.D. Comba, Valerio
-  Pascucci and Claudio T. Silva, SCI Institute, University of Utah,
-  Technical Report #UUSCI-2009-004, 2009.
-
-  "Multi-Threaded Streaming Pipeline For VTK" by Huy T. Vo and Claudio
-  T. Silva, SCI Institute, University of Utah, Technical Report
-  #UUSCI-2009-005, 2009.
--------------------------------------------------------------------------*/
-
-#include "vtkExecutiveCollection.h"
-#include "vtkObjectFactory.h"
-
-vtkStandardNewMacro(vtkExecutiveCollection);
-
-//----------------------------------------------------------------------------
-void vtkExecutiveCollection::PrintSelf(ostream& os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os,indent);
-}
diff --git a/Common/ExecutionModel/vtkExecutiveCollection.h b/Common/ExecutionModel/vtkExecutiveCollection.h
deleted file mode 100644
index 1c8d76f..0000000
--- a/Common/ExecutionModel/vtkExecutiveCollection.h
+++ /dev/null
@@ -1,94 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkExecutiveCollection.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*-------------------------------------------------------------------------
-  Copyright (c) 2008, 2009 by SCI Institute, University of Utah.
-
-  This is part of the Parallel Dataflow System originally developed by
-  Huy T. Vo and Claudio T. Silva. For more information, see:
-
-  "Parallel Dataflow Scheme for Streaming (Un)Structured Data" by Huy
-  T. Vo, Daniel K. Osmari, Brian Summa, Joao L.D. Comba, Valerio
-  Pascucci and Claudio T. Silva, SCI Institute, University of Utah,
-  Technical Report #UUSCI-2009-004, 2009.
-
-  "Multi-Threaded Streaming Pipeline For VTK" by Huy T. Vo and Claudio
-  T. Silva, SCI Institute, University of Utah, Technical Report
-  #UUSCI-2009-005, 2009.
--------------------------------------------------------------------------*/
-// .NAME vtkExecutiveCollection - maintain a list of executive objects
-// .SECTION Description
-// vtkExecutiveCollection is an object that creates and manipulates lists of
-// objects that are (inherited from) vtkExecutives.
-
-// .SECTION See Also
-// vtkExecutive vtkCollection
-
-#ifndef __vtkExecutiveCollection_h
-#define __vtkExecutiveCollection_h
-
-#include "vtkCommonExecutionModelModule.h" // For export macro
-#include "vtkCollection.h"
-
-#include "vtkExecutive.h" // Needed for static cast
-
-class VTKCOMMONEXECUTIONMODEL_EXPORT vtkExecutiveCollection : public vtkCollection
-{
-public:
-  static vtkExecutiveCollection *New();
-  vtkTypeMacro(vtkExecutiveCollection,vtkCollection);
-  void PrintSelf(ostream& os, vtkIndent indent);
-
-  // Description:
-  // Add an executive to the list.
-  void AddItem(vtkExecutive *exec)
-  {
-    this->vtkCollection::AddItem(exec);
-  }
-
-  // Description:
-  // Get the next executive in the list.
-  vtkExecutive *GetNextItem()
-  {
-    return static_cast<vtkExecutive *>(this->GetNextItemAsObject());
-  }
-
-  //BTX
-  // Description:
-  // Reentrant safe way to get an object in a collection. Just pass the
-  // same cookie back and forth.
-  vtkExecutive *GetNextExecutive(vtkCollectionSimpleIterator &cookie)
-  {
-    return static_cast<vtkExecutive *>(this->GetNextItemAsObject(cookie));
-  }
-  //ETX
-
-protected:
-  vtkExecutiveCollection() {}
-  ~vtkExecutiveCollection() {}
-
-private:
-  // hide the standard AddItem from the user and the compiler.
-  void AddItem(vtkObject *o)
-  {
-    this->vtkCollection::AddItem(o);
-  }
-
-private:
-  vtkExecutiveCollection(const vtkExecutiveCollection&);  // Not implemented.
-  void operator=(const vtkExecutiveCollection&);  // Not implemented.
-};
-
-
-#endif
diff --git a/Common/ExecutionModel/vtkImageInPlaceFilter.cxx b/Common/ExecutionModel/vtkImageInPlaceFilter.cxx
index b2b32f7..577ba6c 100644
--- a/Common/ExecutionModel/vtkImageInPlaceFilter.cxx
+++ b/Common/ExecutionModel/vtkImageInPlaceFilter.cxx
@@ -108,7 +108,7 @@ void vtkImageInPlaceFilter::CopyData(vtkImageData *inData,
   inIncZ *= size;
   outIncZ *= size;
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   for (idxZ = 0; idxZ <= maxZ; idxZ++)
     {
     for (idxY = 0; idxY <= maxY; idxY++)
diff --git a/Common/ExecutionModel/vtkImageToStructuredPoints.cxx b/Common/ExecutionModel/vtkImageToStructuredPoints.cxx
index 25d585d..b41caea 100644
--- a/Common/ExecutionModel/vtkImageToStructuredPoints.cxx
+++ b/Common/ExecutionModel/vtkImageToStructuredPoints.cxx
@@ -202,7 +202,7 @@ int vtkImageToStructuredPoints::RequestData(
       int numComp = vData->GetNumberOfScalarComponents();
       int idx = 0;
 
-      // Loop through ouput pixels
+      // Loop through output pixels
       for (idxZ = 0; idxZ <= maxZ; idxZ++)
         {
         for (idxY = 0; idxY <= maxY; idxY++)
diff --git a/Common/ExecutionModel/vtkMultiBlockDataSetAlgorithm.h b/Common/ExecutionModel/vtkMultiBlockDataSetAlgorithm.h
index 2acee54..bec0a6e 100644
--- a/Common/ExecutionModel/vtkMultiBlockDataSetAlgorithm.h
+++ b/Common/ExecutionModel/vtkMultiBlockDataSetAlgorithm.h
@@ -54,7 +54,7 @@ public:
 
 protected:
   vtkMultiBlockDataSetAlgorithm();
-  ~vtkMultiBlockDataSetAlgorithm() {};
+  ~vtkMultiBlockDataSetAlgorithm() {}
 
   // Description:
   // This is called by the superclass.
diff --git a/Common/ExecutionModel/vtkPassInputTypeAlgorithm.h b/Common/ExecutionModel/vtkPassInputTypeAlgorithm.h
index c2ee2ff..329b716 100644
--- a/Common/ExecutionModel/vtkPassInputTypeAlgorithm.h
+++ b/Common/ExecutionModel/vtkPassInputTypeAlgorithm.h
@@ -117,7 +117,7 @@ public:
 
 protected:
   vtkPassInputTypeAlgorithm();
-  ~vtkPassInputTypeAlgorithm() {};
+  ~vtkPassInputTypeAlgorithm() {}
 
   // Description:
   // This is called within ProcessRequest when a request asks for
diff --git a/Common/ExecutionModel/vtkPointSetAlgorithm.h b/Common/ExecutionModel/vtkPointSetAlgorithm.h
index c24fb4a..092f6ca 100644
--- a/Common/ExecutionModel/vtkPointSetAlgorithm.h
+++ b/Common/ExecutionModel/vtkPointSetAlgorithm.h
@@ -92,7 +92,7 @@ public:
 
 protected:
   vtkPointSetAlgorithm();
-  ~vtkPointSetAlgorithm() {};
+  ~vtkPointSetAlgorithm() {}
 
   // Description:
   // This is called by the superclass.
diff --git a/Common/ExecutionModel/vtkProgressObserver.cxx b/Common/ExecutionModel/vtkProgressObserver.cxx
new file mode 100644
index 0000000..35737ab
--- /dev/null
+++ b/Common/ExecutionModel/vtkProgressObserver.cxx
@@ -0,0 +1,39 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkProgressObserver.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkProgressObserver.h"
+
+#include "vtkCommand.h"
+#include "vtkObjectFactory.h"
+
+vtkStandardNewMacro(vtkProgressObserver);
+
+vtkProgressObserver::vtkProgressObserver()
+{
+}
+
+vtkProgressObserver::~vtkProgressObserver()
+{
+}
+
+void vtkProgressObserver::UpdateProgress(double amount)
+{
+  this->Progress = amount;
+  this->InvokeEvent(vtkCommand::ProgressEvent,static_cast<void *>(&amount));
+}
+
+void vtkProgressObserver::PrintSelf(ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os,indent);
+}
diff --git a/Common/ExecutionModel/vtkProgressObserver.h b/Common/ExecutionModel/vtkProgressObserver.h
new file mode 100644
index 0000000..ad1a82e
--- /dev/null
+++ b/Common/ExecutionModel/vtkProgressObserver.h
@@ -0,0 +1,60 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkProgressObserver.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkProgressObserver - Basic class to optionally replace vtkAlgorithm progress functionality.
+// .SECTION Description
+// When the basic functionality in vtkAlgorithm that reports progress is
+// not enough, a subclass of vtkProgressObserver can be used to provide
+// custom functionality.
+// The main use case for this is when an algorithm's RequestData() is
+// called from multiple threads in parallel - the basic functionality in
+// vtkAlgorithm is not thread safe. vtkSMPProgressObserver can
+// handle this situation by routing progress from each thread to a
+// thread local vtkProgressObserver, which will invoke events separately
+// for each thread.
+
+#ifndef __vtkProgressObserver_h
+#define __vtkProgressObserver_h
+
+#include "vtkCommonExecutionModelModule.h" // For export macro
+#include "vtkObject.h"
+
+class VTKCOMMONEXECUTIONMODEL_EXPORT vtkProgressObserver : public vtkObject
+{
+public:
+  static vtkProgressObserver *New();
+  vtkTypeMacro(vtkProgressObserver,vtkObject);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // The default behavior is to update the Progress data member
+  // and invoke a ProgressEvent. This is designed to be overwritten.
+  virtual void UpdateProgress(double amount);
+
+  // Description:
+  // Returns the progress reported by the algorithm.
+  vtkGetMacro(Progress, double);
+
+protected:
+  vtkProgressObserver();
+  ~vtkProgressObserver();
+
+  double Progress;
+
+private:
+  vtkProgressObserver(const vtkProgressObserver&);  // Not implemented.
+  void operator=(const vtkProgressObserver&);  // Not implemented.
+};
+
+#endif
diff --git a/Common/ExecutionModel/vtkSMPProgressObserver.cxx b/Common/ExecutionModel/vtkSMPProgressObserver.cxx
new file mode 100644
index 0000000..d35d393
--- /dev/null
+++ b/Common/ExecutionModel/vtkSMPProgressObserver.cxx
@@ -0,0 +1,39 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPProgressObserver.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkSMPProgressObserver.h"
+
+#include "vtkCommand.h"
+#include "vtkObjectFactory.h"
+
+vtkStandardNewMacro(vtkSMPProgressObserver);
+
+vtkSMPProgressObserver::vtkSMPProgressObserver()
+{
+}
+
+vtkSMPProgressObserver::~vtkSMPProgressObserver()
+{
+}
+
+void vtkSMPProgressObserver::UpdateProgress(double progress)
+{
+  vtkProgressObserver* observer = this->Observers.Local();
+  observer->UpdateProgress(progress);
+}
+
+void vtkSMPProgressObserver::PrintSelf(ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os,indent);
+}
diff --git a/Common/ExecutionModel/vtkSMPProgressObserver.h b/Common/ExecutionModel/vtkSMPProgressObserver.h
new file mode 100644
index 0000000..f46d200
--- /dev/null
+++ b/Common/ExecutionModel/vtkSMPProgressObserver.h
@@ -0,0 +1,62 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPProgressObserver.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkSMPProgressObserver - Progress observer that is thread safe
+// .SECTION Description
+// vtkSMPProgressObserver is designed to handle progress events coming
+// from an algorithm in a thread safe way. It does this by using
+// thread local objects that it updates. To receive the progress
+// information, one has to listen to the local observer in the same
+// thread. Since the execution will be somewhat load balanced,
+// it may be enough to do this only on the main thread.
+
+#ifndef __vtkSMPProgressObserver_h
+#define __vtkSMPProgressObserver_h
+
+#include "vtkCommonExecutionModelModule.h" // For export macro
+#include "vtkProgressObserver.h"
+#include "vtkSMPThreadLocalObject.h" // For thread local observers.
+
+class VTKCOMMONEXECUTIONMODEL_EXPORT vtkSMPProgressObserver : public vtkProgressObserver
+{
+public:
+  static vtkSMPProgressObserver *New();
+  vtkTypeMacro(vtkSMPProgressObserver,vtkProgressObserver);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Passes the progress event to a thread local ProgressObserver
+  // instance.
+  void UpdateProgress(double amount);
+
+  // Description:
+  // Returns the progress observer local to the thread it was
+  // called from.
+  vtkProgressObserver* GetLocalObserver()
+  {
+    return this->Observers.Local();
+  }
+
+protected:
+  vtkSMPProgressObserver();
+  ~vtkSMPProgressObserver();
+
+  vtkSMPThreadLocalObject<vtkProgressObserver> Observers;
+
+private:
+  vtkSMPProgressObserver(const vtkSMPProgressObserver&);  // Not implemented.
+  void operator=(const vtkSMPProgressObserver&);  // Not implemented.
+};
+
+#endif
diff --git a/Common/ExecutionModel/vtkSimpleScalarTree.cxx b/Common/ExecutionModel/vtkSimpleScalarTree.cxx
index c9a20ac..40a0d4b 100644
--- a/Common/ExecutionModel/vtkSimpleScalarTree.cxx
+++ b/Common/ExecutionModel/vtkSimpleScalarTree.cxx
@@ -47,19 +47,13 @@ vtkSimpleScalarTree::vtkSimpleScalarTree()
 
 vtkSimpleScalarTree::~vtkSimpleScalarTree()
 {
-  if ( this->Tree )
-    {
-    delete [] this->Tree;
-    }
+  delete [] this->Tree;
 }
 
 // Initialize locator. Frees memory and resets object as appropriate.
 void vtkSimpleScalarTree::Initialize()
 {
-  if ( this->Tree )
-    {
-    delete [] this->Tree;
-    }
+  delete [] this->Tree;
   this->Tree = NULL;
 }
 
diff --git a/Common/ExecutionModel/vtkStreamingDemandDrivenPipeline.cxx b/Common/ExecutionModel/vtkStreamingDemandDrivenPipeline.cxx
index b7357d1..055aa1d 100644
--- a/Common/ExecutionModel/vtkStreamingDemandDrivenPipeline.cxx
+++ b/Common/ExecutionModel/vtkStreamingDemandDrivenPipeline.cxx
@@ -1233,12 +1233,18 @@ vtkStreamingDemandDrivenPipeline
   // not in an execute continuation.
   if(request->Get(CONTINUE_EXECUTING()))
     {
-    this->ContinueExecuting = 1;
+    if (!this->ContinueExecuting)
+      {
+      this->ContinueExecuting = 1;
+      }
     this->Update(request->Get(FROM_OUTPUT_PORT()));
     }
   else
     {
-    this->ContinueExecuting = 0;
+    if (this->ContinueExecuting)
+      {
+      this->ContinueExecuting = 0;
+      }
     this->Superclass::ExecuteDataEnd(request,inInfoVec,outInfoVec);
     }
 }
diff --git a/Common/ExecutionModel/vtkTableExtentTranslator.cxx b/Common/ExecutionModel/vtkTableExtentTranslator.cxx
index 6e6ac98..7c21586 100644
--- a/Common/ExecutionModel/vtkTableExtentTranslator.cxx
+++ b/Common/ExecutionModel/vtkTableExtentTranslator.cxx
@@ -108,16 +108,10 @@ void vtkTableExtentTranslator::SetNumberOfPiecesInTable(int pieces)
   this->NumberOfPiecesInTable = pieces;
 
   // Clean out any old extent table.
-  if(this->ExtentTable)
-    {
-    delete [] this->ExtentTable;
-    this->ExtentTable = 0;
-    }
-  if(this->PieceAvailable)
-    {
-    delete [] this->PieceAvailable;
-    this->PieceAvailable = 0;
-    }
+  delete [] this->ExtentTable;
+  this->ExtentTable = 0;
+  delete [] this->PieceAvailable;
+  this->PieceAvailable = 0;
 
   // Create and initialize a new extent table if there are any pieces.
   // Assume all pieces are available.
diff --git a/Common/ExecutionModel/vtkThreadedCompositeDataPipeline.cxx b/Common/ExecutionModel/vtkThreadedCompositeDataPipeline.cxx
new file mode 100644
index 0000000..23a4ecb
--- /dev/null
+++ b/Common/ExecutionModel/vtkThreadedCompositeDataPipeline.cxx
@@ -0,0 +1,323 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkThreadedCompositeDataPipeline.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkThreadedCompositeDataPipeline.h"
+
+#include "vtkAlgorithm.h"
+#include "vtkAlgorithmOutput.h"
+#include "vtkExecutive.h"
+#include "vtkInformation.h"
+#include "vtkInformationVector.h"
+#include "vtkInformationExecutivePortKey.h"
+#include "vtkInformationExecutivePortVectorKey.h"
+#include "vtkInformationIntegerKey.h"
+#include "vtkInformationObjectBaseKey.h"
+#include "vtkInformationRequestKey.h"
+#include "vtkInformationVector.h"
+
+#include "vtkMultiThreader.h"
+#include "vtkCompositeDataIterator.h"
+#include "vtkCompositeDataSet.h"
+#include "vtkTimerLog.h"
+#include "vtkNew.h"
+#include "vtkSmartPointer.h"
+#include "vtkObjectFactory.h"
+#include "vtkDebugLeaks.h"
+#include "vtkImageData.h"
+
+#include "vtkSMPThreadLocal.h"
+#include "vtkSMPThreadLocalObject.h"
+#include "vtkSMPTools.h"
+#include "vtkSMPProgressObserver.h"
+
+#include <vector>
+#include <assert.h>
+
+//----------------------------------------------------------------------------
+vtkStandardNewMacro(vtkThreadedCompositeDataPipeline);
+
+//----------------------------------------------------------------------------
+namespace
+{
+  static vtkInformationVector** Clone(vtkInformationVector** src, int n)
+  {
+    vtkInformationVector** dst = new vtkInformationVector*[n];
+    for(int i=0; i<n; ++i)
+      {
+      dst[i] = vtkInformationVector::New();
+      dst[i]->Copy(src[i],1);
+      }
+    return dst;
+  }
+  static void DeleteAll(vtkInformationVector** dst, int n)
+  {
+    for(int i=0; i<n; ++i)
+      {
+      dst[i]->Delete();
+      }
+    delete []dst;
+  }
+};
+
+//----------------------------------------------------------------------------
+class ProcessBlockData: public vtkObjectBase
+{
+public:
+  vtkTypeMacro(ProcessBlockData, vtkObjectBase);
+  vtkInformationVector** In;
+  vtkInformationVector* Out;
+  int InSize;
+
+  static ProcessBlockData* New()
+  {
+    // This is required everytime we're implementing our own New() to avoid
+    // "Deleting unknown object" warning from vtkDebugLeaks.
+#ifdef VTK_DEBUG_LEAKS
+    vtkDebugLeaks::ConstructClass("ProcessBlockData");
+#endif
+    return new ProcessBlockData();
+  }
+
+  void Construct(vtkInformationVector** inInfoVec,
+                 int inInfoVecSize,
+                 vtkInformationVector* outInfoVec)
+  {
+    this->InSize  = inInfoVecSize;
+    this->In = Clone(inInfoVec, inInfoVecSize);
+    this->Out = vtkInformationVector::New();
+    this->Out->Copy(outInfoVec,1);
+  }
+
+  ~ProcessBlockData()
+  {
+    DeleteAll(this->In, this->InSize);
+    this->Out->Delete();
+  }
+
+protected:
+  ProcessBlockData():
+    In(NULL),
+    Out(NULL)
+  {
+
+  }
+};
+//----------------------------------------------------------------------------
+class ProcessBlock
+{
+public:
+  ProcessBlock(vtkThreadedCompositeDataPipeline* exec,
+               vtkInformationVector** inInfoVec,
+               vtkInformationVector* outInfoVec,
+               int compositePort,
+               int connection,
+               vtkInformation* request,
+               const std::vector<vtkDataObject*>& inObjs,
+               std::vector<vtkDataObject*>& outObjs)
+    : Exec(exec),
+      InInfoVec(inInfoVec),
+      OutInfoVec(outInfoVec),
+      CompositePort(compositePort),
+      Connection(connection),
+      Request(request),
+      InObjs(inObjs)
+  {
+    int numInputPorts = this->Exec->GetNumberOfInputPorts();
+    this->OutObjs = &outObjs[0];
+    this->InfoPrototype = vtkSmartPointer<ProcessBlockData>::New();
+    this->InfoPrototype->Construct(this->InInfoVec, numInputPorts, this->OutInfoVec);
+  }
+
+  ~ProcessBlock()
+  {
+    vtkSMPThreadLocal<vtkInformationVector**>::iterator itr1 =
+      this->InInfoVecs.begin();
+    vtkSMPThreadLocal<vtkInformationVector**>::iterator end1 =
+      this->InInfoVecs.end();
+    while (itr1 != end1)
+      {
+      DeleteAll(*itr1, this->InfoPrototype->InSize);
+      ++itr1;
+      }
+
+    vtkSMPThreadLocal<vtkInformationVector*>::iterator itr2 =
+      this->OutInfoVecs.begin();
+    vtkSMPThreadLocal<vtkInformationVector*>::iterator end2 =
+      this->OutInfoVecs.end();
+    while (itr2 != end2)
+      {
+      (*itr2)->Delete();
+      ++itr2;
+      }
+  }
+
+  void Initialize()
+  {
+    vtkInformationVector**& inInfoVec = this->InInfoVecs.Local();
+    vtkInformationVector*& outInfoVec = this->OutInfoVecs.Local();
+
+    inInfoVec = Clone(this->InfoPrototype->In, this->InfoPrototype->InSize);
+    outInfoVec = vtkInformationVector::New();
+    outInfoVec->Copy(this->InfoPrototype->Out, 1);
+
+    vtkInformation*& request = this->Requests.Local();
+    request->Copy(this->Request, 1);
+
+  }
+
+  void operator() (vtkIdType begin, vtkIdType end)
+  {
+    vtkInformationVector** inInfoVec = this->InInfoVecs.Local();
+    vtkInformationVector* outInfoVec = this->OutInfoVecs.Local();
+    vtkInformation* request = this->Requests.Local();
+
+    vtkInformation* inInfo = inInfoVec[this->CompositePort]->GetInformationObject(this->Connection);
+    vtkInformation* outInfo = outInfoVec->GetInformationObject(0);
+
+    for(vtkIdType i= begin; i<end; ++i)
+      {
+      vtkDataObject* outObj =
+        this->Exec->ExecuteSimpleAlgorithmForBlock(&inInfoVec[0],
+                                                   outInfoVec,
+                                                   inInfo,
+                                                   outInfo,
+                                                   request,
+                                                   this->InObjs[i]);
+      this->OutObjs[i] = outObj;
+      }
+  }
+
+  void Reduce()
+  {
+  }
+
+protected:
+  vtkThreadedCompositeDataPipeline* Exec;
+  vtkInformationVector** InInfoVec;
+  vtkInformationVector* OutInfoVec;
+  vtkSmartPointer<ProcessBlockData> InfoPrototype;
+  int CompositePort;
+  int Connection;
+  vtkInformation* Request;
+  const std::vector<vtkDataObject*>& InObjs;
+  vtkDataObject** OutObjs;
+
+  vtkSMPThreadLocal<vtkInformationVector**> InInfoVecs;
+  vtkSMPThreadLocal<vtkInformationVector*> OutInfoVecs;
+  vtkSMPThreadLocalObject<vtkInformation> Requests;
+};
+
+
+//----------------------------------------------------------------------------
+vtkThreadedCompositeDataPipeline::vtkThreadedCompositeDataPipeline()
+{
+}
+
+//----------------------------------------------------------------------------
+vtkThreadedCompositeDataPipeline::~vtkThreadedCompositeDataPipeline()
+{
+}
+
+//-------------------------------------------------------------------------
+void vtkThreadedCompositeDataPipeline::PrintSelf(ostream &os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+}
+
+//-------------------------------------------------------------------------
+void vtkThreadedCompositeDataPipeline::ExecuteEach(vtkCompositeDataIterator* iter,
+                                                   vtkInformationVector** inInfoVec,
+                                                   vtkInformationVector* outInfoVec,
+                                                   int compositePort,
+                                                   int connection,
+                                                   vtkInformation* request,
+                                                   vtkCompositeDataSet* compositeOutput)
+{
+  // from input data objects  itr -> (inObjs, indices)
+  // inObjs are the non-null objects that we will loop over.
+  // indices map the input objects to inObjs
+  std::vector<vtkDataObject*> inObjs;
+  std::vector<int> indices;
+  for (iter->InitTraversal(); !iter->IsDoneWithTraversal(); iter->GoToNextItem())
+    {
+    vtkDataObject* dobj = iter->GetCurrentDataObject();
+    if (dobj)
+      {
+      inObjs.push_back(dobj);
+      indices.push_back(static_cast<int>(inObjs.size())-1);
+      }
+    else
+      {
+      indices.push_back(-1);
+      }
+    }
+
+  // instantiate outObjs, the output objects that will be created from inObjs
+  std::vector<vtkDataObject*> outObjs;
+  outObjs.resize(indices.size(),NULL);
+
+  // create the parallel task processBlock
+  ProcessBlock processBlock(this,
+                            inInfoVec,
+                            outInfoVec,
+                            compositePort,
+                            connection,
+                            request,
+                            inObjs,outObjs);
+
+  vtkSmartPointer<vtkProgressObserver> origPo(this->Algorithm->GetProgressObserver());
+  vtkNew<vtkSMPProgressObserver> po;
+  this->Algorithm->SetProgressObserver(po.GetPointer());
+  vtkSMPTools::For(0, inObjs.size(), processBlock);
+  this->Algorithm->SetProgressObserver(origPo);
+
+  int i =0;
+  for (iter->InitTraversal(); !iter->IsDoneWithTraversal(); iter->GoToNextItem(), i++)
+    {
+    int j = indices[i];
+    if(j>=0)
+      {
+      vtkDataObject* outObj = outObjs[j];
+      compositeOutput->SetDataSet(iter, outObj);
+      if(outObj)
+        {
+        outObj->FastDelete();
+        }
+      }
+    }
+}
+
+//----------------------------------------------------------------------------
+int vtkThreadedCompositeDataPipeline::CallAlgorithm(vtkInformation* request, int direction,
+                                                    vtkInformationVector** inInfo,
+                                                    vtkInformationVector* outInfo)
+{
+  // Copy default information in the direction of information flow.
+  this->CopyDefaultInformation(request, direction, inInfo, outInfo);
+
+  // Invoke the request on the algorithm.
+  int result = this->Algorithm->ProcessRequest(request, inInfo, outInfo);
+
+  // If the algorithm failed report it now.
+  if(!result)
+    {
+    vtkErrorMacro("Algorithm " << this->Algorithm->GetClassName()
+                  << "(" << this->Algorithm
+                  << ") returned failure for request: "
+                  << *request);
+    }
+
+  return result;
+}
diff --git a/Common/ExecutionModel/vtkThreadedCompositeDataPipeline.h b/Common/ExecutionModel/vtkThreadedCompositeDataPipeline.h
new file mode 100644
index 0000000..c1e201c
--- /dev/null
+++ b/Common/ExecutionModel/vtkThreadedCompositeDataPipeline.h
@@ -0,0 +1,65 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkThreadedCompositeDataPipeline.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+  This software is distributed WITHOUT ANY WARRANTY; without even
+  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+  PURPOSE.  See the above copyright notice for more information.
+
+  =========================================================================*/
+// .NAME vtkThreadedCompositeDataPipeline - Executive that works in parallel
+// .SECTION Description
+// vtkThreadedCompositeDataPipeline processes a composite data object in
+// parallel using the SMP framework. It does this by creating a vector of
+// data objects (the pieces of the composite data) and processing them
+// using vtkSMPTools::For. Note that this requires that the
+// algorithm implement all pipeline passes in a re-entrant way. It should
+// store/retrieve all state changes using input and output information
+// objects, which are unique to each thread.
+
+#ifndef __vtkThreadedCompositeDataPipeline_h
+#define __vtkThreadedCompositeDataPipeline_h
+
+#include "vtkCommonExecutionModelModule.h" // For export macro
+#include "vtkCompositeDataPipeline.h"
+
+class vtkInformationVector;
+class vtkInformation;
+
+class VTKCOMMONEXECUTIONMODEL_EXPORT vtkThreadedCompositeDataPipeline : public vtkCompositeDataPipeline
+{
+ public:
+  static vtkThreadedCompositeDataPipeline* New();
+  vtkTypeMacro(vtkThreadedCompositeDataPipeline,vtkCompositeDataPipeline);
+  void PrintSelf(ostream &os, vtkIndent indent);
+
+  // Description:
+  // An API to CallAlgorithm that allows you to pass in the info objects to
+  // be used
+  virtual int CallAlgorithm(vtkInformation* request, int direction,
+                            vtkInformationVector** inInfo,
+                            vtkInformationVector* outInfo);
+
+ protected:
+  vtkThreadedCompositeDataPipeline();
+  ~vtkThreadedCompositeDataPipeline();
+  virtual void ExecuteEach(vtkCompositeDataIterator* iter,
+                           vtkInformationVector** inInfoVec,
+                           vtkInformationVector* outInfoVec,
+                           int compositePort,
+                           int connection,
+                           vtkInformation* request,
+                           vtkCompositeDataSet* compositeOutput);
+
+ private:
+  vtkThreadedCompositeDataPipeline(const vtkThreadedCompositeDataPipeline&);  // Not implemented.
+  void operator=(const vtkThreadedCompositeDataPipeline&);  // Not implemented.
+  friend class ProcessBlock;
+};
+
+#endif
diff --git a/Common/ExecutionModel/vtkThreadedImageAlgorithm.cxx b/Common/ExecutionModel/vtkThreadedImageAlgorithm.cxx
index b36f4c2..d68db2b 100644
--- a/Common/ExecutionModel/vtkThreadedImageAlgorithm.cxx
+++ b/Common/ExecutionModel/vtkThreadedImageAlgorithm.cxx
@@ -125,7 +125,7 @@ int vtkThreadedImageAlgorithm::SplitExtent(int splitExt[6],
 // the ThreadedExecute method after setting the correct
 // extent for this thread. Its just a pain to calculate
 // the correct extent.
-VTK_THREAD_RETURN_TYPE vtkThreadedImageAlgorithmThreadedExecute( void *arg )
+static VTK_THREAD_RETURN_TYPE vtkThreadedImageAlgorithmThreadedExecute( void *arg )
 {
   vtkImageThreadStruct *str;
   int ext[6], splitExt[6], total;
@@ -295,20 +295,10 @@ int vtkThreadedImageAlgorithm::RequestData(
   // free up the arrays
   for (i = 0; i < this->GetNumberOfInputPorts(); ++i)
     {
-    if (str.Inputs[i])
-      {
-      delete [] str.Inputs[i];
-      }
-    }
-  // note the check isn't required by C++ standard but due to bad compilers
-  if (str.Inputs)
-    {
-    delete [] str.Inputs;
-    }
-  if (str.Outputs)
-    {
-    delete [] str.Outputs;
+    delete [] str.Inputs[i];
     }
+  delete [] str.Inputs;
+  delete [] str.Outputs;
 
   return 1;
 }
diff --git a/Common/ExecutionModel/vtkThreadedStreamingPipeline.cxx b/Common/ExecutionModel/vtkThreadedStreamingPipeline.cxx
deleted file mode 100644
index d9eeec1..0000000
--- a/Common/ExecutionModel/vtkThreadedStreamingPipeline.cxx
+++ /dev/null
@@ -1,405 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkThreadedStreamingPipeline.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*-------------------------------------------------------------------------
-  Copyright (c) 2008, 2009 by SCI Institute, University of Utah.
-
-  This is part of the Parallel Dataflow System originally developed by
-  Huy T. Vo and Claudio T. Silva. For more information, see:
-
-  "Parallel Dataflow Scheme for Streaming (Un)Structured Data" by Huy
-  T. Vo, Daniel K. Osmari, Brian Summa, Joao L.D. Comba, Valerio
-  Pascucci and Claudio T. Silva, SCI Institute, University of Utah,
-  Technical Report #UUSCI-2009-004, 2009.
-
-  "Multi-Threaded Streaming Pipeline For VTK" by Huy T. Vo and Claudio
-  T. Silva, SCI Institute, University of Utah, Technical Report
-  #UUSCI-2009-005, 2009.
--------------------------------------------------------------------------*/
-#include "vtkThreadedStreamingPipeline.h"
-
-#include "vtkAlgorithm.h"
-#include "vtkAlgorithmOutput.h"
-#include "vtkComputingResources.h"
-#include "vtkExecutionScheduler.h"
-#include "vtkExecutive.h"
-#include "vtkExecutiveCollection.h"
-#include "vtkInformation.h"
-#include "vtkInformationVector.h"
-#include "vtkInformationExecutivePortKey.h"
-#include "vtkInformationExecutivePortVectorKey.h"
-#include "vtkInformationIntegerKey.h"
-#include "vtkInformationObjectBaseKey.h"
-#include "vtkInformationVector.h"
-#include "vtkMultiThreader.h"
-#include "vtkMutexLock.h"
-#include "vtkObjectFactory.h"
-#include "vtkThreadMessager.h"
-#include "vtkTimerLog.h"
-#include <vtksys/hash_set.hxx>
-
-//----------------------------------------------------------------------------
-vtkStandardNewMacro(vtkThreadedStreamingPipeline);
-
-vtkInformationKeyMacro(vtkThreadedStreamingPipeline, AUTO_PROPAGATE, Integer);
-vtkInformationKeyRestrictedMacro(vtkThreadedStreamingPipeline,
-                                 EXTRA_INFORMATION, ObjectBase,
-                                 "vtkInformation");
-
-//----------------------------------------------------------------------------
-// Convinient definitions of vector/set of vtkExecutive
-class vtkExecutiveHasher
-{
-public:
-  size_t operator()(const vtkExecutive* e) const
-  {
-    return (size_t)e;
-  }
-};
-typedef vtksys::hash_set<vtkExecutive*, vtkExecutiveHasher> vtkExecutiveSet;
-
-//----------------------------------------------------------------------------
-vtkThreadedStreamingPipeline::vtkThreadedStreamingPipeline()
-{
-  this->LastDataRequestTime = 0.0f;
-  this->LastDataRequestTimeFromSource = 0.0f;
-  this->ForceDataRequest = NULL;
-  this->Resources = NULL;
-  this->Scheduler = NULL;
-}
-
-//----------------------------------------------------------------------------
-vtkThreadedStreamingPipeline::~vtkThreadedStreamingPipeline()
-{
-  if (this->ForceDataRequest)
-    {
-    this->ForceDataRequest->Delete();
-    }
-  if (this->Resources)
-    {
-    this->Resources->Delete();
-    }
-  if (this->Scheduler)
-    {
-    this->Scheduler->Delete();
-    }
-}
-
-//----------------------------------------------------------------------------
-void vtkThreadedStreamingPipeline::PrintSelf(ostream &os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os, indent);
-}
-
-//----------------------------------------------------------------------------
-static bool MultiThreadedEnabled = false;
-static bool AutoPropagatePush = false;
-
-//----------------------------------------------------------------------------
-void vtkThreadedStreamingPipeline::SetMultiThreadedEnabled(bool enabled)
-{
-  MultiThreadedEnabled = enabled;
-}
-
-//----------------------------------------------------------------------------
-void vtkThreadedStreamingPipeline::SetAutoPropagatePush(bool enabled)
-{
-  AutoPropagatePush = enabled;
-}
-
-//----------------------------------------------------------------------------
-static void
-CollectUpstreamModules(vtkExecutive *exec, vtkExecutiveSet &eSet)
-{
-  for(int i = 0; i < exec->GetNumberOfInputPorts(); ++i)
-    {
-    int nic = exec->GetAlgorithm()->GetNumberOfInputConnections(i);
-    vtkInformationVector* inVector = exec->GetInputInformation()[i];
-    for(int j = 0; j < nic; ++j)
-      {
-      vtkInformation* inInfo = inVector->GetInformationObject(j);
-      vtkExecutive* e;
-      int producerPort;
-      vtkExecutive::PRODUCER()->Get(inInfo, e, producerPort);
-      if (eSet.find(e)!=eSet.end())
-        {
-        continue;
-        }
-      eSet.insert(e);
-      CollectUpstreamModules(e, eSet);
-      }
-    }
-}
-
-//----------------------------------------------------------------------------
-void vtkThreadedStreamingPipeline::Pull(vtkExecutive *exec)
-{
-  vtkThreadedStreamingPipeline::Pull(exec, static_cast<vtkInformation*>(NULL));
-}
-
-//----------------------------------------------------------------------------
-void vtkThreadedStreamingPipeline::Pull(vtkExecutive *exec, vtkInformation *info)
-{
-  vtkExecutiveCollection *execs = vtkExecutiveCollection::New();
-  execs->AddItem(exec);
-  vtkThreadedStreamingPipeline::Pull(execs, info);
-  execs->Delete();
-}
-
-//----------------------------------------------------------------------------
-void vtkThreadedStreamingPipeline::Pull(vtkExecutiveCollection *execs)
-{
-  vtkThreadedStreamingPipeline::Pull(execs, static_cast<vtkInformation*>(NULL));
-}
-
-//----------------------------------------------------------------------------
-void vtkThreadedStreamingPipeline::Pull(vtkExecutiveCollection *execs, vtkInformation *info)
-{
-  vtkExecutiveSet eSet;
-  execs->InitTraversal();
-  for (vtkExecutive *e = execs->GetNextItem(); e != 0; e = execs->GetNextItem())
-    {
-    eSet.insert(e);
-    CollectUpstreamModules(e, eSet);
-    }
-  vtkExecutiveCollection *collectedExecs = vtkExecutiveCollection::New();
-  for (vtkExecutiveSet::iterator ti = eSet.begin(); ti != eSet.end(); ti++)
-    {
-    collectedExecs->AddItem(*ti);
-    }
-  vtkExecutionScheduler::GetGlobalScheduler()->Schedule(collectedExecs, info);
-  vtkExecutionScheduler::GetGlobalScheduler()->WaitUntilDone(collectedExecs);
-  collectedExecs->Delete();
-}
-
-//----------------------------------------------------------------------------
-void vtkThreadedStreamingPipeline::Push(vtkExecutive *exec)
-{
-  vtkThreadedStreamingPipeline::Push(exec, static_cast<vtkInformation*>(NULL));
-}
-
-//----------------------------------------------------------------------------
-void vtkThreadedStreamingPipeline::Push(vtkExecutive *exec, vtkInformation *info)
-{
-  vtkExecutiveCollection *execs = vtkExecutiveCollection::New();
-  execs->AddItem(exec);
-  vtkThreadedStreamingPipeline::Push(execs, info);
-  execs->Delete();
-}
-
-//----------------------------------------------------------------------------
-void vtkThreadedStreamingPipeline::Push(vtkExecutiveCollection *execs)
-{
-  vtkThreadedStreamingPipeline::Push(execs, static_cast<vtkInformation*>(NULL));
-}
-
-//----------------------------------------------------------------------------
-void vtkThreadedStreamingPipeline::Push(vtkExecutiveCollection *execs, vtkInformation *info)
-{
-  vtkExecutiveSet eSet;
-  execs->InitTraversal();
-  for (vtkExecutive *e = execs->GetNextItem(); e != 0; e = execs->GetNextItem())
-    {
-    eSet.insert(e);
-    e->GetAlgorithm()->GetInformation()->Set(EXTRA_INFORMATION(), info);
-    }
-  if (AutoPropagatePush)
-    {
-    if (info==NULL)
-      {
-      info = vtkInformation::New();
-      }
-    info->Set(vtkThreadedStreamingPipeline::AUTO_PROPAGATE(), 1);
-    }
-  vtkExecutionScheduler::GetGlobalScheduler()->Schedule(execs, info);
-  fprintf(stderr, "OK 2 %d\n", execs->GetNumberOfItems());
-  vtkExecutionScheduler::GetGlobalScheduler()->WaitUntilReleased(execs);
-  fprintf(stderr, "OK 3\n");
-}
-
-//----------------------------------------------------------------------------
-void vtkThreadedStreamingPipeline::Pull()
-{
-  this->Pull(static_cast<vtkInformation*>(NULL));
-}
-
-//----------------------------------------------------------------------------
-void vtkThreadedStreamingPipeline::Pull(vtkInformation *info)
-{
-  vtkExecutiveSet eSet;
-  CollectUpstreamModules(this, eSet);
-  vtkExecutiveCollection *execs = vtkExecutiveCollection::New();
-  for (vtkExecutiveSet::iterator ti = eSet.begin(); ti != eSet.end(); ti++)
-    {
-    execs->AddItem(*ti);
-    }
-  vtkExecutionScheduler::GetGlobalScheduler()->Schedule(execs, info);
-  vtkExecutionScheduler::GetGlobalScheduler()->ReleaseResources(this);
-  vtkExecutionScheduler::GetGlobalScheduler()->WaitUntilDone(execs);
-  vtkExecutionScheduler::GetGlobalScheduler()->ReacquireResources(this);
-  execs->Delete();
-}
-
-//----------------------------------------------------------------------------
-void vtkThreadedStreamingPipeline::Push()
-{
-  this->Push(static_cast<vtkInformation*>(NULL));
-}
-
-//----------------------------------------------------------------------------
-void vtkThreadedStreamingPipeline::Push(vtkInformation *info)
-{
-  vtkExecutiveSet eSet;
-  for(int i = 0; i < this->GetNumberOfOutputPorts(); ++i) {
-    vtkInformation* outInfo = this->GetOutputInformation(i);
-    int consumerCount = vtkExecutive::CONSUMERS()->Length(outInfo);
-    vtkExecutive** e = vtkExecutive::CONSUMERS()->GetExecutives(outInfo);
-    for (int j=0; j<consumerCount; j++)
-      {
-      eSet.insert(e[j]);
-      e[j]->GetAlgorithm()->GetInformation()->Set(EXTRA_INFORMATION(), info);
-      }
-  }
-  vtkExecutiveCollection *execs = vtkExecutiveCollection::New();
-  for (vtkExecutiveSet::iterator ti=eSet.begin(); ti!=eSet.end(); ti++)
-    {
-    execs->AddItem(*ti);
-    }
-  if (AutoPropagatePush)
-    {
-    vtkExecutionScheduler::GetGlobalScheduler()->SchedulePropagate(execs, info);
-    }
-  else
-    {
-    vtkExecutionScheduler::GetGlobalScheduler()->Schedule(execs, info);
-    vtkExecutionScheduler::GetGlobalScheduler()->ReleaseResources(this);
-    vtkExecutionScheduler::GetGlobalScheduler()->WaitUntilReleased(execs);
-    vtkExecutionScheduler::GetGlobalScheduler()->ReacquireResources(this);
-    }
-  execs->Delete();
-}
-
-//----------------------------------------------------------------------------
-void vtkThreadedStreamingPipeline::ReleaseInputs()
-{
-  vtkThreadMessager *messager = vtkExecutionScheduler::
-    GetGlobalScheduler()->GetInputsReleasedMessager(this);
-  if (messager)
-    {
-    messager->SendWakeMessage();
-    }
-}
-
-//----------------------------------------------------------------------------
-int vtkThreadedStreamingPipeline
-::ProcessRequest(vtkInformation* request,
-                 vtkInformationVector** inInfoVec,
-                 vtkInformationVector* outInfoVec)
-{
-  int result = 0;
-  if (request->Has(REQUEST_DATA()))
-    {
-    double startTime = vtkTimerLog::GetUniversalTime();
-    result = this->Superclass::ProcessRequest(request, inInfoVec, outInfoVec);
-    this->LastDataRequestTime = vtkTimerLog::GetUniversalTime() - startTime;
-    }
-  else
-    {
-    result = this->Superclass::ProcessRequest(request, inInfoVec, outInfoVec);
-    }
-  return result;
-}
-
-//----------------------------------------------------------------------------
-int vtkThreadedStreamingPipeline::ForceUpdateData(int vtkNotUsed(processingUnit), vtkInformation *info)
-{
-  if (this->ForceDataRequest==NULL)
-    {
-    this->ForceDataRequest = vtkInformation::New();
-    }
-  if (info)
-    {
-    this->ForceDataRequest->Copy(info);
-    }
-  else
-    {
-    this->ForceDataRequest->Clear();
-    }
-  this->ForceDataRequest->Set(vtkDemandDrivenPipeline::REQUEST_DATA());
-  this->ForceDataRequest->Set(vtkExecutive::FORWARD_DIRECTION(), vtkExecutive::RequestUpstream);
-  // Algorithms process this request after it is forwarded.
-  this->ForceDataRequest->Set(vtkExecutive::ALGORITHM_AFTER_FORWARD(), 1);
-//   this->ForceDataRequest->
-//     Set(vtkThreadedStreamingPipeline::PROCESSING_UNIT(), processingUnit);
-  double startTime = vtkTimerLog::GetUniversalTime();
-  int result =  this->CallAlgorithm(this->ForceDataRequest, vtkExecutive::RequestDownstream,
-                                    this->GetInputInformation(),
-                                    this->GetOutputInformation());
-  this->LastDataRequestTime = vtkTimerLog::GetUniversalTime() - startTime;
-  return result;
-}
-
-//----------------------------------------------------------------------------
-void vtkThreadedStreamingPipeline::UpdateRequestDataTimeFromSource()
-{
-  float maxUpStreamTime = 0.0f;
-  for(int i = 0; i < this->GetNumberOfInputPorts(); ++i)
-    {
-    int nic = this->GetAlgorithm()->GetNumberOfInputConnections(i);
-    vtkInformationVector* inVector = this->GetInputInformation()[i];
-    for(int j=0; j < nic; ++j)
-      {
-      vtkInformation* inInfo = inVector->GetInformationObject(j);
-      vtkExecutive* e;
-      int producerPort;
-      vtkExecutive::PRODUCER()->Get(inInfo, e, producerPort);
-      if (e)
-        {
-        vtkThreadedStreamingPipeline *te = vtkThreadedStreamingPipeline::
-          SafeDownCast(e);
-        if (te && maxUpStreamTime<te->LastDataRequestTimeFromSource)
-          maxUpStreamTime = te->LastDataRequestTimeFromSource;
-        }
-      }
-    }
-  this->LastDataRequestTimeFromSource = maxUpStreamTime + this->LastDataRequestTime;
-}
-
-//----------------------------------------------------------------------------
-vtkComputingResources *vtkThreadedStreamingPipeline::GetResources() {
-  if (!this->Resources)
-    {
-    this->Resources = vtkComputingResources::New();
-    }
-  return this->Resources;
-}
-
-//----------------------------------------------------------------------------
-int vtkThreadedStreamingPipeline::ForwardUpstream(vtkInformation* request)
-{
-  if (MultiThreadedEnabled && request->Has(vtkDemandDrivenPipeline::REQUEST_DATA()))
-    {
-    this->Pull();
-    return 1;
-    }
-  else
-    {
-    return this->Superclass::ForwardUpstream(request);
-    }
-}
-//----------------------------------------------------------------------------
-int vtkThreadedStreamingPipeline::ForwardUpstream(int i, int j, vtkInformation* request)
-{
-  return this->Superclass::ForwardUpstream(i, j, request);
-}
diff --git a/Common/ExecutionModel/vtkThreadedStreamingPipeline.h b/Common/ExecutionModel/vtkThreadedStreamingPipeline.h
deleted file mode 100644
index 8757bcc..0000000
--- a/Common/ExecutionModel/vtkThreadedStreamingPipeline.h
+++ /dev/null
@@ -1,177 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkThreadedStreamingPipeline.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-  This software is distributed WITHOUT ANY WARRANTY; without even
-  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-  PURPOSE.  See the above copyright notice for more information.
-
-  =========================================================================*/
-/*-------------------------------------------------------------------------
-  Copyright (c) 2008, 2009 by SCI Institute, University of Utah.
-
-  This is part of the Parallel Dataflow System originally developed by
-  Huy T. Vo and Claudio T. Silva. For more information, see:
-
-  "Parallel Dataflow Scheme for Streaming (Un)Structured Data" by Huy
-  T. Vo, Daniel K. Osmari, Brian Summa, Joao L.D. Comba, Valerio
-  Pascucci and Claudio T. Silva, SCI Institute, University of Utah,
-  Technical Report #UUSCI-2009-004, 2009.
-
-  "Multi-Threaded Streaming Pipeline For VTK" by Huy T. Vo and Claudio
-  T. Silva, SCI Institute, University of Utah, Technical Report
-  #UUSCI-2009-005, 2009.
-  -------------------------------------------------------------------------*/
-// .NAME vtkThreadedStreamingPipeline - Executive supporting multi-threads
-// .SECTION Description
-// vtkThreadeStreamingDemandDrivenPipeline is an executive that supports
-// updating input ports based on the number of threads available.
-
-// .SECTION See Also
-// vtkExecutionScheduler
-
-
-#ifndef __vtkThreadedStreamingPipeline_h
-#define __vtkThreadedStreamingPipeline_h
-
-#include "vtkCommonExecutionModelModule.h" // For export macro
-#include "vtkCompositeDataPipeline.h"
-
-class vtkComputingResources;
-class vtkExecutionScheduler;
-class vtkExecutiveCollection;
-
-class VTKCOMMONEXECUTIONMODEL_EXPORT vtkThreadedStreamingPipeline : public vtkCompositeDataPipeline
-{
-public:
-  static vtkThreadedStreamingPipeline* New();
-  vtkTypeMacro(vtkThreadedStreamingPipeline,vtkCompositeDataPipeline);
-  void PrintSelf(ostream &os, vtkIndent indent);
-
-  // Description:
-  // Key to store the priority of a task
-  static vtkInformationIntegerKey* AUTO_PROPAGATE();
-
-  // Description:
-  // Key to store the additional information for an update request
-  static vtkInformationObjectBaseKey* EXTRA_INFORMATION();
-
-//BTX
-  // Description:
-  // Definition of different types of processing units an algorithm
-  // can be executed
-  enum
-  {
-    PROCESSING_UNIT_NONE = 0,
-    PROCESSING_UNIT_CPU = 1,
-    PROCESSING_UNIT_GPU = 2
-  };
-//ETX
-
-  // Description:
-  // Enable/Disable Multi-Threaded updating mechanism
-  static void SetMultiThreadedEnabled(bool enabled);
-
-  // Description:
-  // Enable/Disable automatic propagation of Push events
-  static void SetAutoPropagatePush(bool enabled);
-
-  // Description:
-  // Trigger the updates on certain execs and asking all of its
-  // upstream modules to be updated as well (propagate up)
-  static void Pull(vtkExecutiveCollection *execs);
-
-  // Description:
-  // Trigger the updates on certain execs and asking all of its
-  // upstream modules to be updated as well (propagate up)
-  static void Pull(vtkExecutiveCollection *execs, vtkInformation *info);
-
-  // Description:
-  // Trigger the updates on certain execs and asking all of its
-  // downstream modules to be updated as well (propagate down)
-  static void Push(vtkExecutiveCollection *execs);
-
-  // Description:
-  // Trigger the updates on certain execs and asking all of its
-  // downstream modules to be updated as well (propagate down)
-  static void Push(vtkExecutiveCollection *execs, vtkInformation *info);
-
-  // Description:
-  // A simplified version of Pull() which only acts upon a single executive
-  static void Pull(vtkExecutive *exec);
-
-  // Description:
-  // A simplified version of Pull() which only acts upon a single executive
-  static void Pull(vtkExecutive *exec, vtkInformation *info);
-
-  // Description:
-  // A simplified version of Push() which only acts upon a single executive
-  static void Push(vtkExecutive *exec);
-
-  // Description:
-  // A simplified version of Push() which only acts upon a single executive
-  static void Push(vtkExecutive *exec, vtkInformation *info);
-
-  // Description:
-  // Triggers upstream modules to update but not including itself
-  void Pull();
-
-  // Description:
-  // Triggers upstream modules to update but not including itself
-  void Pull(vtkInformation *info);
-
-  // Description:
-  // Triggers downstream modules to update but not including itself
-  void Push();
-
-  // Description:
-  // Triggers downstream modules to update but not including itself
-  void Push(vtkInformation *info);
-
-  // Description:
-  // Release all the locks for input ports living upstream
-  void ReleaseInputs();
-
-  // Description:
-  // Generalized interface for asking the executive to fulfill update
-  // requests.
-  virtual int ProcessRequest(vtkInformation* request,
-                             vtkInformationVector** inInfo,
-                             vtkInformationVector* outInfo);
-
-  // Description:
-  // Send a direct REQUEST_DATA (on all ports) to this executive
-  int ForceUpdateData(int processingUnit, vtkInformation *info);
-
-  // Description:
-  // Update the LastDataRequestTimeFromSource using its upstream time
-  void UpdateRequestDataTimeFromSource();
-
-  // Description:
-  // Return the scheduling for this executive
-  vtkComputingResources *GetResources();
-
-  float                  LastDataRequestTime;
-  float                  LastDataRequestTimeFromSource;
-  vtkInformation        *ForceDataRequest;
-  vtkComputingResources *Resources;
-  vtkExecutionScheduler *Scheduler;
-
-protected:
-  vtkThreadedStreamingPipeline();
-  ~vtkThreadedStreamingPipeline();
-
-  virtual int ForwardUpstream(vtkInformation* request);
-  virtual int ForwardUpstream(int i, int j, vtkInformation* request);
-
-private:
-  vtkThreadedStreamingPipeline(const vtkThreadedStreamingPipeline&);  // Not implemented.
-  void operator=(const vtkThreadedStreamingPipeline&);  // Not implemented.
-};
-
-#endif
diff --git a/Common/Math/Testing/Cxx/CMakeLists.txt b/Common/Math/Testing/Cxx/CMakeLists.txt
index 501d988..98a9ca0 100644
--- a/Common/Math/Testing/Cxx/CMakeLists.txt
+++ b/Common/Math/Testing/Cxx/CMakeLists.txt
@@ -1,28 +1,14 @@
-set(Test_SRCS
+vtk_add_test_cxx(NO_DATA NO_VALID NO_OUTPUT
   TestAmoebaMinimizer.cxx
   TestMatrix3x3.cxx
   TestPolynomialSolversUnivariate.cxx
   TestQuaternion.cxx
-)
+  )
 
 if(NOT VTK_LEGACY_REMOVE)
-  list(APPEND Test_SRCS
+  vtk_add_test_cxx(NO_DATA NO_VALID NO_OUTPUT
     TestFastNumericConversion.cxx
     )
 endif()
 
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-  ${Test_SRCS}
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach (test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  add_test(NAME ${vtk-module}Cxx-${TName} COMMAND ${vtk-module}CxxTests ${TName})
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Common/Math/Testing/Tcl/CMakeLists.txt b/Common/Math/Testing/Tcl/CMakeLists.txt
index b1f8f7b..188bde4 100644
--- a/Common/Math/Testing/Tcl/CMakeLists.txt
+++ b/Common/Math/Testing/Tcl/CMakeLists.txt
@@ -3,8 +3,5 @@ set(tests
   )
 
 foreach(test ${tests})
-  add_test(NAME ${vtk-module}Tcl-${test}
-    COMMAND ${VTK_TCL_EXE}
-      ${CMAKE_CURRENT_SOURCE_DIR}/${test}.tcl
-      -A ${VTK_SOURCE_DIR}/Wrapping/Tcl)
+  vtk_add_test_tcl(${test}.tcl NO_DATA NO_RT)
 endforeach()
diff --git a/Common/Math/vtkAmoebaMinimizer.cxx b/Common/Math/vtkAmoebaMinimizer.cxx
index 5fbbd1d..c943fa2 100644
--- a/Common/Math/vtkAmoebaMinimizer.cxx
+++ b/Common/Math/vtkAmoebaMinimizer.cxx
@@ -69,24 +69,15 @@ vtkAmoebaMinimizer::~vtkAmoebaMinimizer()
     {
     for (int i = 0; i < this->NumberOfParameters; i++)
       {
-      if (this->ParameterNames[i])
-        {
-        delete [] this->ParameterNames[i];
-        }
+      delete [] this->ParameterNames[i];
       }
     delete [] this->ParameterNames;
     this->ParameterNames = NULL;
     }
-  if (this->ParameterValues)
-    {
-    delete [] this->ParameterValues;
-    this->ParameterValues = NULL;
-    }
-  if (this->ParameterScales)
-    {
-    delete [] this->ParameterScales;
-    this->ParameterScales = NULL;
-    }
+  delete [] this->ParameterValues;
+  this->ParameterValues = NULL;
+  delete [] this->ParameterScales;
+  this->ParameterScales = NULL;
 
   this->NumberOfParameters = 0;
 }
@@ -303,24 +294,15 @@ void vtkAmoebaMinimizer::Initialize()
     {
     for (int i = 0; i < this->NumberOfParameters; i++)
       {
-      if (this->ParameterNames[i])
-        {
-        delete [] this->ParameterNames[i];
-        }
+      delete [] this->ParameterNames[i];
       }
     delete [] this->ParameterNames;
     this->ParameterNames = 0;
     }
-  if (this->ParameterValues)
-    {
-    delete [] this->ParameterValues;
-    this->ParameterValues = 0;
-    }
-  if (this->ParameterScales)
-    {
-    delete [] this->ParameterScales;
-    this->ParameterScales = 0;
-    }
+  delete [] this->ParameterValues;
+  this->ParameterValues = 0;
+  delete [] this->ParameterScales;
+  this->ParameterScales = 0;
 
   this->NumberOfParameters = 0;
   this->Iterations = 0;
@@ -622,16 +604,10 @@ void  vtkAmoebaMinimizer::TerminateAmoeba()
     delete [] this->AmoebaVertices;
     this->AmoebaVertices = NULL;
     }
-  if (this->AmoebaValues)
-    {
-    delete [] this->AmoebaValues;
-    this->AmoebaValues = NULL;
-    }
-  if (this->AmoebaSum)
-    {
-    delete [] this->AmoebaSum;
-    this->AmoebaSum = NULL;
-    }
+  delete [] this->AmoebaValues;
+  this->AmoebaValues = NULL;
+  delete [] this->AmoebaSum;
+  this->AmoebaSum = NULL;
 }
 
 /* ----------------------------- MNI Header -----------------------------------
diff --git a/Common/Math/vtkFastNumericConversion.h b/Common/Math/vtkFastNumericConversion.h
index 3233dac..b2b66ac 100644
--- a/Common/Math/vtkFastNumericConversion.h
+++ b/Common/Math/vtkFastNumericConversion.h
@@ -363,7 +363,7 @@ public:
 protected:
   //BTX
   vtkFastNumericConversion();
-  ~vtkFastNumericConversion() {};
+  ~vtkFastNumericConversion() {}
   void InternalRebuild(void);
 
 private:
diff --git a/Common/Math/vtkFunctionSet.h b/Common/Math/vtkFunctionSet.h
index 00525c1..1cb792d 100644
--- a/Common/Math/vtkFunctionSet.h
+++ b/Common/Math/vtkFunctionSet.h
@@ -58,7 +58,7 @@ public:
 
 protected:
   vtkFunctionSet();
-  ~vtkFunctionSet() {};
+  ~vtkFunctionSet() {}
 
   int NumFuncs;
   int NumIndepVars;
diff --git a/Common/Math/vtkMatrix4x4.cxx b/Common/Math/vtkMatrix4x4.cxx
index ecf74f2..9a72ac0 100644
--- a/Common/Math/vtkMatrix4x4.cxx
+++ b/Common/Math/vtkMatrix4x4.cxx
@@ -21,9 +21,6 @@
 
 vtkStandardNewMacro(vtkMatrix4x4);
 
-// Useful for viewing a double[16] as a double[4][4]
-typedef double (*SqMatPtr)[4];
-
 //----------------------------------------------------------------------------
 void vtkMatrix4x4::Zero(double Elements[16])
 {
@@ -101,38 +98,6 @@ void vtkMatrix4x4::PointMultiply(const double Elements[16],
 }
 
 //----------------------------------------------------------------------------
-// Multiplies matrices a and b and stores the result in c.
-void vtkMatrix4x4::Multiply4x4(const double a[16], const double b[16],
-                               double c[16])
-{
-  SqMatPtr aMat = (SqMatPtr)a;
-  SqMatPtr bMat = (SqMatPtr)b;
-  SqMatPtr cMat = (SqMatPtr)c;
-  double Accum[4][4];
-
-  for (int i = 0; i < 4; i++)
-    {
-    for (int k = 0; k < 4; k++)
-      {
-      Accum[i][k] = aMat[i][0] * bMat[0][k] +
-                    aMat[i][1] * bMat[1][k] +
-                    aMat[i][2] * bMat[2][k] +
-                    aMat[i][3] * bMat[3][k];
-      }
-    }
-
-  // Copy to final dest
-  for (int i = 0; i < 4; i++)
-    {
-    cMat[i][0] = Accum[i][0];
-    cMat[i][1] = Accum[i][1];
-    cMat[i][2] = Accum[i][2];
-    cMat[i][3] = Accum[i][3];
-    }
-
-}
-
-//----------------------------------------------------------------------------
 // Matrix Inversion (adapted from Richard Carling in "Graphics Gems,"
 // Academic Press, 1990).
 void vtkMatrix4x4::Invert(const double inElements[16],
diff --git a/Common/Math/vtkMatrix4x4.h b/Common/Math/vtkMatrix4x4.h
index 964524e..a570677 100644
--- a/Common/Math/vtkMatrix4x4.h
+++ b/Common/Math/vtkMatrix4x4.h
@@ -129,8 +129,8 @@ public:
 
   // Description:
   // Multiplies matrices a and b and stores the result in c.
-  static void Multiply4x4(const vtkMatrix4x4 *a, const vtkMatrix4x4 *b, vtkMatrix4x4 *c) {
-    vtkMatrix4x4::Multiply4x4(*a->Element,*b->Element,*c->Element); };
+  static void Multiply4x4(const vtkMatrix4x4 *a, const vtkMatrix4x4 *b,
+                          vtkMatrix4x4 *c);
 //BTX
   static void Multiply4x4(const double a[16], const double b[16],
                           double c[16]);
@@ -183,15 +183,53 @@ public:
 
 protected:
   vtkMatrix4x4() { vtkMatrix4x4::Identity(*this->Element); };
-  ~vtkMatrix4x4() {};
+  ~vtkMatrix4x4() {}
 
   float FloatPoint[4];
   double DoublePoint[4];
 private:
+  // Useful for viewing a double[16] as a double[4][4]
+  typedef double (*SqMatPtr)[4];
+  typedef const double (*ConstSqMatPtr)[4];
+
   vtkMatrix4x4(const vtkMatrix4x4&);  // Not implemented
   void operator= (const vtkMatrix4x4&);  // Not implemented
 };
 
+//----------------------------------------------------------------------------
+// Multiplies matrices a and b and stores the result in c.
+inline void vtkMatrix4x4::Multiply4x4(const double a[16], const double b[16],
+                                      double c[16])
+{
+  ConstSqMatPtr aMat = reinterpret_cast<ConstSqMatPtr>(a);
+  ConstSqMatPtr bMat = reinterpret_cast<ConstSqMatPtr>(b);
+
+  double tmp[16];
+  SqMatPtr cMat = reinterpret_cast<SqMatPtr>(tmp);
+
+  for (int i = 0; i < 4; i++)
+    {
+    for (int k = 0; k < 4; k++)
+      {
+      cMat[i][k] = aMat[i][0] * bMat[0][k] +
+                   aMat[i][1] * bMat[1][k] +
+                   aMat[i][2] * bMat[2][k] +
+                   aMat[i][3] * bMat[3][k];
+      }
+    }
+
+  // Copy to final dest
+  memcpy(c, tmp, 16 * sizeof(double));
+}
+
+//----------------------------------------------------------------------------
+inline void vtkMatrix4x4::Multiply4x4(
+  const vtkMatrix4x4 *a, const vtkMatrix4x4 *b, vtkMatrix4x4 *c)
+{
+  vtkMatrix4x4::Multiply4x4(*a->Element, *b->Element, *c->Element);
+}
+
+//----------------------------------------------------------------------------
 inline void vtkMatrix4x4::SetElement(int i, int j, double value)
 {
   if (this->Element[i][j] != value)
diff --git a/Common/Math/vtkPolynomialSolversUnivariate.h b/Common/Math/vtkPolynomialSolversUnivariate.h
index 5b76b29..c02d035 100644
--- a/Common/Math/vtkPolynomialSolversUnivariate.h
+++ b/Common/Math/vtkPolynomialSolversUnivariate.h
@@ -262,8 +262,8 @@ public:
   static double GetDivisionTolerance();
 
 protected:
-  vtkPolynomialSolversUnivariate() {};
-  ~vtkPolynomialSolversUnivariate() {};
+  vtkPolynomialSolversUnivariate() {}
+  ~vtkPolynomialSolversUnivariate() {}
 
   static double DivisionTolerance;
 
diff --git a/Common/Math/vtkQuaternion.h b/Common/Math/vtkQuaternion.h
index dc19802..8595d1f 100644
--- a/Common/Math/vtkQuaternion.h
+++ b/Common/Math/vtkQuaternion.h
@@ -197,10 +197,6 @@ public:
     const vtkQuaternion<T>& q2) const;
 
   // Description:
-  // Performs the copy of a quaternion of the same basic type.
-  void operator=(const vtkQuaternion<T>& q);
-
-  // Description:
   // Performs addition of quaternion of the same basic type.
   vtkQuaternion<T> operator+(const vtkQuaternion<T>& q) const;
 
diff --git a/Common/Math/vtkQuaternion.txx b/Common/Math/vtkQuaternion.txx
index 54909f8..d4b84f4 100644
--- a/Common/Math/vtkQuaternion.txx
+++ b/Common/Math/vtkQuaternion.txx
@@ -275,16 +275,6 @@ vtkQuaternion<T>::SetRotationAngleAndAxis (const T& angle,
 
 //----------------------------------------------------------------------------
 template<typename T>
-void vtkQuaternion<T>::operator=(const vtkQuaternion<T>& q)
-{
-  for (int i = 0; i < 4; ++i)
-    {
-    this->Data[i] = q[i];
-    }
-}
-
-//----------------------------------------------------------------------------
-template<typename T>
 vtkQuaternion<T> vtkQuaternion<T>::operator+(const vtkQuaternion<T>& q) const
 {
   vtkQuaternion<T> ret;
diff --git a/Common/Misc/Testing/Cxx/CMakeLists.txt b/Common/Misc/Testing/Cxx/CMakeLists.txt
index 227fe6b..0a56e3f 100644
--- a/Common/Misc/Testing/Cxx/CMakeLists.txt
+++ b/Common/Misc/Testing/Cxx/CMakeLists.txt
@@ -1,16 +1,2 @@
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-  TestPolygonBuilder.cxx
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach (test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  add_test(NAME ${vtk-module}Cxx-${TName}
-    COMMAND ${vtk-module}CxxTests ${TName})
-endforeach()
+vtk_add_test_cxx(TestPolygonBuilder.cxx NO_DATA NO_VALID NO_OUTPUT)
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Common/Misc/vtkFunctionParser.cxx b/Common/Misc/vtkFunctionParser.cxx
index afa39ef..f5eb96b 100644
--- a/Common/Misc/vtkFunctionParser.cxx
+++ b/Common/Misc/vtkFunctionParser.cxx
@@ -81,11 +81,8 @@ vtkFunctionParser::~vtkFunctionParser()
     this->VectorVariableNames = NULL;
     }
 
-  if (this->ScalarVariableValues)
-    {
-    delete [] this->ScalarVariableValues;
-    this->ScalarVariableValues = NULL;
-    }
+  delete [] this->ScalarVariableValues;
+  this->ScalarVariableValues = NULL;
 
   if (this->VectorVariableValues)
     {
@@ -98,35 +95,20 @@ vtkFunctionParser::~vtkFunctionParser()
     this->VectorVariableValues = NULL;
     }
 
-  if (this->Function)
-    {
-    delete [] this->Function;
-    this->Function = NULL;
-    }
+  delete [] this->Function;
+  this->Function = NULL;
 
-  if (this->FunctionWithSpaces)
-    {
-    delete [] this->FunctionWithSpaces;
-    this->FunctionWithSpaces = NULL;
-    }
+  delete [] this->FunctionWithSpaces;
+  this->FunctionWithSpaces = NULL;
 
-  if (this->ByteCode)
-    {
-    delete [] this->ByteCode;
-    this->ByteCode = NULL;
-    }
+  delete [] this->ByteCode;
+  this->ByteCode = NULL;
 
-  if (this->Immediates)
-    {
-    delete [] this->Immediates;
-    this->Immediates = NULL;
-    }
+  delete [] this->Immediates;
+  this->Immediates = NULL;
 
-  if (this->Stack)
-    {
-    delete [] this->Stack;
-    this->Stack = NULL;
-    }
+  delete [] this->Stack;
+  this->Stack = NULL;
 
   if(this->ParseError)
     {
@@ -1135,16 +1117,11 @@ void vtkFunctionParser::SetScalarVariableValue(const char* inVariableName,
     this->ScalarVariableNames[i] = NULL;
     }
 
-  if (this->ScalarVariableValues)
-    {
-    delete [] this->ScalarVariableValues;
-    this->ScalarVariableValues = NULL;
-    }
-  if (this->ScalarVariableNames)
-    {
-    delete [] this->ScalarVariableNames;
-    this->ScalarVariableNames = NULL;
-    }
+  delete [] this->ScalarVariableValues;
+  this->ScalarVariableValues = NULL;
+
+  delete [] this->ScalarVariableNames;
+  this->ScalarVariableNames = NULL;
 
   this->ScalarVariableValues = new double [this->NumberOfScalarVariables + 1];
   this->ScalarVariableNames = new char *[this->NumberOfScalarVariables + 1];
@@ -1263,16 +1240,11 @@ void vtkFunctionParser::SetVectorVariableValue(const char* inVariableName,
     this->VectorVariableValues[i] = NULL;
     }
 
-  if (this->VectorVariableValues)
-    {
-    delete [] this->VectorVariableValues;
-    this->VectorVariableValues = NULL;
-    }
-  if (this->VectorVariableNames)
-    {
-    delete [] this->VectorVariableNames;
-    this->VectorVariableNames = NULL;
-    }
+  delete [] this->VectorVariableValues;
+  this->VectorVariableValues = NULL;
+
+  delete [] this->VectorVariableNames;
+  this->VectorVariableNames = NULL;
 
   this->VectorVariableValues = new double *[this->NumberOfVectorVariables + 1];
   this->VectorVariableNames = new char *[this->NumberOfVectorVariables + 1];
@@ -1504,21 +1476,14 @@ void vtkFunctionParser::CopyParseError(int &position, char **error)
 //-----------------------------------------------------------------------------
 int vtkFunctionParser::BuildInternalFunctionStructure()
 {
-  if (this->ByteCode)
-    {
-    delete [] this->ByteCode;
-    this->ByteCode = NULL;
-    }
-  if (this->Immediates)
-    {
-    delete [] this->Immediates;
-    this->Immediates = NULL;
-    }
-  if (this->Stack)
-    {
-    delete [] this->Stack;
-    this->Stack = NULL;
-    }
+  delete [] this->ByteCode;
+  this->ByteCode = NULL;
+
+  delete [] this->Immediates;
+  this->Immediates = NULL;
+
+  delete [] this->Stack;
+  this->Stack = NULL;
 
   this->ByteCodeSize = this->ImmediatesSize = this->StackSize = 0;
   this->StackPointer = 0;
@@ -1718,10 +1683,7 @@ void vtkFunctionParser::AddInternalByte(unsigned char newByte)
     { // Copy current byte code to a temporary array
     tempByteCode[i] = this->ByteCode[i];
     }
-  if (this->ByteCode)
-    {
-    delete [] this->ByteCode;
-    }
+  delete [] this->ByteCode;
 
   // Allocate space for new byte.
   this->ByteCode = new unsigned char[this->ByteCodeSize + 1];
@@ -2124,10 +2086,7 @@ unsigned char vtkFunctionParser::GetOperandNumber(int currentIndex)
       { // Copy current immediates to a temporary array
       tempImmediates[i] = this->Immediates[i];
       }
-    if (this->Immediates)
-      {
-      delete [] this->Immediates;
-      }
+    delete [] this->Immediates;
 
     // Allocate space for new immediate value.
     this->Immediates = new double[this->ImmediatesSize + 1];
diff --git a/Common/System/Testing/Cxx/CMakeLists.txt b/Common/System/Testing/Cxx/CMakeLists.txt
index 025f974..c53d2ef 100644
--- a/Common/System/Testing/Cxx/CMakeLists.txt
+++ b/Common/System/Testing/Cxx/CMakeLists.txt
@@ -1,16 +1,5 @@
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
+vtk_add_test_cxx(NO_DATA NO_VALID NO_OUTPUT
   TestDirectory.cxx
   otherTimerLog.cxx
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach (test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  add_test(NAME ${TName} COMMAND ${vtk-module}CxxTests ${TName})
-endforeach()
+  )
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Common/System/module.cmake b/Common/System/module.cmake
index 5e5186a..5c13d64 100644
--- a/Common/System/module.cmake
+++ b/Common/System/module.cmake
@@ -3,6 +3,7 @@ vtk_module(vtkCommonSystem
     StandAlone
   DEPENDS
     vtkCommonCore
+  PRIVATE_DEPENDS
     vtksys
   TEST_DEPENDS
    vtkTestingCore
diff --git a/Common/System/vtkTimerLog.cxx b/Common/System/vtkTimerLog.cxx
index e9e846e..5559406 100644
--- a/Common/System/vtkTimerLog.cxx
+++ b/Common/System/vtkTimerLog.cxx
@@ -91,10 +91,7 @@ tms     vtkTimerLog::CurrentCpuTicks;
 // Allocate timing table with MaxEntries elements.
 void vtkTimerLog::AllocateLog()
 {
-  if (vtkTimerLog::TimerLog != NULL)
-    {
-    delete [] vtkTimerLog::TimerLog;
-    }
+  delete [] vtkTimerLog::TimerLog;
   vtkTimerLog::TimerLog = new vtkTimerLogEntry[vtkTimerLog::MaxEntries];
 }
 
diff --git a/Common/Transforms/Testing/Cxx/CMakeLists.txt b/Common/Transforms/Testing/Cxx/CMakeLists.txt
new file mode 100644
index 0000000..14a83c6
--- /dev/null
+++ b/Common/Transforms/Testing/Cxx/CMakeLists.txt
@@ -0,0 +1,5 @@
+vtk_add_test_cxx(NO_DATA NO_VALID NO_OUTPUT
+  TestTransform.cxx
+  )
+
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Common/Transforms/Testing/Cxx/TestTransform.cxx b/Common/Transforms/Testing/Cxx/TestTransform.cxx
new file mode 100644
index 0000000..a00a47d
--- /dev/null
+++ b/Common/Transforms/Testing/Cxx/TestTransform.cxx
@@ -0,0 +1,76 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestTransform.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <iostream>
+
+#include "vtkTransform.h"
+#include "vtkSmartPointer.h"
+
+// forward declare test subroutines
+int testUseOfInverse();
+
+int TestTransform(int,char *[])
+  {
+  int numErrors = 0;
+
+  numErrors += testUseOfInverse();
+
+  return (numErrors > 0) ? 1 : 0;
+  }
+
+// This is a regression test for a bug where th following code produced
+// a segfault.  As long as this code does not produce a segfault,
+// consider it to have passed the test.
+int testUseOfInverse()
+  {
+  vtkSmartPointer<vtkTransform> trans1 =
+    vtkSmartPointer<vtkTransform>::New();
+  vtkSmartPointer<vtkTransform> trans2 =
+    vtkSmartPointer<vtkTransform>::New();
+  vtkSmartPointer<vtkTransform> trans3 =
+    vtkSmartPointer<vtkTransform>::New();
+  trans1->Identity();
+  trans2->Identity();
+  trans2->PostMultiply();
+  trans3->Identity();
+  double a[] = {3,4,5}, b[3];
+  // get inverses for 2 and 3
+  vtkSmartPointer<vtkLinearTransform> inv2 =
+    trans2->GetLinearInverse();
+  vtkSmartPointer<vtkLinearTransform> inv3 =
+    trans3->GetLinearInverse();
+  for (int i = 0; i < 30; i++)
+    {
+    // make the transform something easy
+    trans2->Translate(a);
+    trans2->RotateX(4);
+    trans2->RotateY(i % 90);
+    // transform some stuff
+    inv2->TransformVector(a,b);
+    inv2->TransformPoint(a,b);
+    // build a transform with concatenations including an inverse
+    trans2->Identity();
+    trans2->Concatenate(trans1);
+    trans2->Concatenate(inv3);
+    // transform some stuff
+    inv2->TransformVector(a,b);
+    inv2->TransformPoint(a,b);
+    // print i
+    trans2->Identity();
+    std::cout << "Iteration: " << i << " Reference Count: "
+      << inv3->GetReferenceCount() << std::endl;
+    }
+  return 0;
+  }
diff --git a/Common/Transforms/Testing/Data/Baseline/MatrixToTransform.png.md5 b/Common/Transforms/Testing/Data/Baseline/MatrixToTransform.png.md5
new file mode 100644
index 0000000..53f5e24
--- /dev/null
+++ b/Common/Transforms/Testing/Data/Baseline/MatrixToTransform.png.md5
@@ -0,0 +1 @@
+669fad7b1bf8472880d1d36b8b81a466
diff --git a/Common/Transforms/Testing/Data/Baseline/MatrixToTransform_1.png.md5 b/Common/Transforms/Testing/Data/Baseline/MatrixToTransform_1.png.md5
new file mode 100644
index 0000000..9f6ce18
--- /dev/null
+++ b/Common/Transforms/Testing/Data/Baseline/MatrixToTransform_1.png.md5
@@ -0,0 +1 @@
+2c35518c7cdc07e3a30fe687e865922f
diff --git a/Common/Transforms/Testing/Data/Baseline/TestThinPlateWarp.png.md5 b/Common/Transforms/Testing/Data/Baseline/TestThinPlateWarp.png.md5
new file mode 100644
index 0000000..79d5f20
--- /dev/null
+++ b/Common/Transforms/Testing/Data/Baseline/TestThinPlateWarp.png.md5
@@ -0,0 +1 @@
+bbdf439aedf4a57ead5ba928875012c4
diff --git a/Common/Transforms/Testing/Data/Baseline/TestThinPlateWarp3D.png.md5 b/Common/Transforms/Testing/Data/Baseline/TestThinPlateWarp3D.png.md5
new file mode 100644
index 0000000..8c36908
--- /dev/null
+++ b/Common/Transforms/Testing/Data/Baseline/TestThinPlateWarp3D.png.md5
@@ -0,0 +1 @@
+b6c364f94d2b924e8f084e7369d0485e
diff --git a/Common/Transforms/Testing/Data/Baseline/cylindrical.png.md5 b/Common/Transforms/Testing/Data/Baseline/cylindrical.png.md5
new file mode 100644
index 0000000..d8cb0a5
--- /dev/null
+++ b/Common/Transforms/Testing/Data/Baseline/cylindrical.png.md5
@@ -0,0 +1 @@
+3593791bcf346f129fe05e9ca49b51c8
diff --git a/Common/Transforms/Testing/Data/Baseline/spherical.png.md5 b/Common/Transforms/Testing/Data/Baseline/spherical.png.md5
new file mode 100644
index 0000000..27a96ed
--- /dev/null
+++ b/Common/Transforms/Testing/Data/Baseline/spherical.png.md5
@@ -0,0 +1 @@
+ea40e0d62b6fd18482c327f4f919507d
diff --git a/Common/Transforms/Testing/Data/Baseline/spherical_1.png.md5 b/Common/Transforms/Testing/Data/Baseline/spherical_1.png.md5
new file mode 100644
index 0000000..3e66d58
--- /dev/null
+++ b/Common/Transforms/Testing/Data/Baseline/spherical_1.png.md5
@@ -0,0 +1 @@
+b8d702561304f66c526e222cfac1f0fb
diff --git a/Common/Transforms/Testing/Python/CMakeLists.txt b/Common/Transforms/Testing/Python/CMakeLists.txt
index 29d53ef..3fc3b88 100644
--- a/Common/Transforms/Testing/Python/CMakeLists.txt
+++ b/Common/Transforms/Testing/Python/CMakeLists.txt
@@ -1,5 +1,5 @@
-add_test_python(MatrixToTransform.py Graphics)
-add_test_python(TestThinPlateWarp.py Hybrid)
-add_test_python(TestThinPlateWarp3D.py Hybrid)
-add_test_python(cylindrical.py Graphics)
-add_test_python(spherical.py Graphics)
+vtk_add_test_python(MatrixToTransform.py)
+vtk_add_test_python(TestThinPlateWarp.py)
+vtk_add_test_python(TestThinPlateWarp3D.py)
+vtk_add_test_python(cylindrical.py)
+vtk_add_test_python(spherical.py)
diff --git a/Common/Transforms/Testing/Tcl/CMakeLists.txt b/Common/Transforms/Testing/Tcl/CMakeLists.txt
index d087931..42eda5b 100644
--- a/Common/Transforms/Testing/Tcl/CMakeLists.txt
+++ b/Common/Transforms/Testing/Tcl/CMakeLists.txt
@@ -1,8 +1,5 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(cylindrical Graphics)
-  add_test_tcl(spherical Graphics)
-  add_test_tcl(TestThinPlateWarp Hybrid)
-  add_test_tcl(TestThinPlateWarp3D Hybrid)
-endif()
-
-add_test_tcl(MatrixToTransform Graphics)
+vtk_add_test_tcl(cylindrical)
+vtk_add_test_tcl(spherical)
+vtk_add_test_tcl(TestThinPlateWarp)
+vtk_add_test_tcl(TestThinPlateWarp3D)
+vtk_add_test_tcl(MatrixToTransform)
diff --git a/Common/Transforms/module.cmake b/Common/Transforms/module.cmake
index e295399..ae82b14 100644
--- a/Common/Transforms/module.cmake
+++ b/Common/Transforms/module.cmake
@@ -7,4 +7,6 @@ vtk_module(vtkCommonTransforms
     # of vtkCommonMath off but still satisfy API dependcy.
     vtkCommonCore
     vtkCommonMath
+  TEST_DEPENDS
+    vtkTestingCore
   )
diff --git a/Common/Transforms/vtkAbstractTransform.cxx b/Common/Transforms/vtkAbstractTransform.cxx
index c1053fa..f99c705 100644
--- a/Common/Transforms/vtkAbstractTransform.cxx
+++ b/Common/Transforms/vtkAbstractTransform.cxx
@@ -29,8 +29,8 @@ vtkAbstractTransform::vtkAbstractTransform()
   this->MyInverse = NULL;
   this->DependsOnInverse = 0;
   this->InUnRegister = 0;
-  this->UpdateMutex = vtkSimpleCriticalSection::New();
-  this->InverseMutex = vtkSimpleCriticalSection::New();
+  this->UpdateMutex = new vtkSimpleCriticalSection;
+  this->InverseMutex = new vtkSimpleCriticalSection;
 }
 
 //----------------------------------------------------------------------------
@@ -40,14 +40,8 @@ vtkAbstractTransform::~vtkAbstractTransform()
     {
     this->MyInverse->Delete();
     }
-  if (this->UpdateMutex)
-    {
-    this->UpdateMutex->Delete();
-    }
-  if (this->InverseMutex)
-    {
-    this->InverseMutex->Delete();
-    }
+  delete this->UpdateMutex;
+  delete this->InverseMutex;
 }
 
 //----------------------------------------------------------------------------
@@ -335,7 +329,7 @@ void vtkAbstractTransform::UnRegister(vtkObjectBase *o)
   if (this->InUnRegister)
     { // we don't want to go into infinite recursion...
     vtkDebugMacro(<<"UnRegister: circular reference eliminated");
-    this->ReferenceCount--;
+    --this->ReferenceCount;
     return;
     }
 
@@ -417,10 +411,7 @@ vtkTransformConcatenation::~vtkTransformConcatenation()
         }
       }
     }
-  if (this->TransformList)
-    {
-    delete [] this->TransformList;
-    }
+  delete [] this->TransformList;
 }
 
 //----------------------------------------------------------------------------
@@ -453,10 +444,7 @@ void vtkTransformConcatenation::Concatenate(vtkAbstractTransform *trans)
       transList[i].ForwardTransform = this->TransformList[i].ForwardTransform;
       transList[i].InverseTransform = this->TransformList[i].InverseTransform;
       }
-    if (this->TransformList)
-      {
-      delete [] this->TransformList;
-      }
+    delete [] this->TransformList;
     this->TransformList = transList;
     this->MaxNumberOfTransforms = nMax;
     }
@@ -755,10 +743,7 @@ void vtkTransformConcatenation::DeepCopy(vtkTransformConcatenation *concat)
       newList[i].ForwardTransform = NULL;
       newList[i].InverseTransform = NULL;
       }
-    if (this->TransformList)
-      {
-      delete [] this->TransformList;
-      }
+    delete [] this->TransformList;
     this->MaxNumberOfTransforms = newMax;
     this->TransformList = newList;
     }
@@ -866,10 +851,12 @@ void vtkTransformConcatenation::DeepCopy(vtkTransformConcatenation *concat)
     if (this->TransformList[i].ForwardTransform)
       {
       this->TransformList[i].ForwardTransform->Delete();
+      this->TransformList[i].ForwardTransform = NULL;
       }
     if (this->TransformList[i].InverseTransform)
       {
       this->TransformList[i].InverseTransform->Delete();
+      this->TransformList[i].InverseTransform = NULL;
       }
     }
 
@@ -1020,10 +1007,7 @@ vtkTransformConcatenationStack::~vtkTransformConcatenationStack()
     this->StackBottom[i]->Delete();
     }
 
-  if (this->StackBottom)
-    {
-    delete [] this->StackBottom;
-    }
+  delete [] this->StackBottom;
 }
 
 //----------------------------------------------------------------------------
@@ -1061,10 +1045,7 @@ void vtkTransformConcatenationStack::Push(vtkTransformConcatenation **concat)
       {
       newStackBottom[i] = this->StackBottom[i];
       }
-    if (this->StackBottom)
-      {
-      delete [] this->StackBottom;
-      }
+    delete [] this->StackBottom;
     this->StackBottom = newStackBottom;
     this->Stack = this->StackBottom+this->StackSize;
     this->StackSize = newStackSize;
@@ -1095,10 +1076,7 @@ void vtkTransformConcatenationStack::DeepCopy(
       {
       newStackBottom[j] = this->StackBottom[j];
       }
-    if (this->StackBottom)
-      {
-      delete [] this->StackBottom;
-      }
+    delete [] this->StackBottom;
     this->StackBottom = newStackBottom;
     this->Stack = this->StackBottom+this->StackSize;
     this->StackSize = newStackSize;
diff --git a/Common/Transforms/vtkAbstractTransform.h b/Common/Transforms/vtkAbstractTransform.h
index 0bbaeb1..7b9f4cb 100644
--- a/Common/Transforms/vtkAbstractTransform.h
+++ b/Common/Transforms/vtkAbstractTransform.h
@@ -247,11 +247,11 @@ protected:
 
   // Description:
   // Perform any subclass-specific Update.
-  virtual void InternalUpdate() {};
+  virtual void InternalUpdate() {}
 
   // Description:
   // Perform any subclass-specific DeepCopy.
-  virtual void InternalDeepCopy(vtkAbstractTransform *) {};
+  virtual void InternalDeepCopy(vtkAbstractTransform *) {}
 
   float InternalFloatPoint[3];
   double InternalDoublePoint[3];
@@ -291,7 +291,7 @@ private:
 class vtkTransformPair
 {
 public:
-  vtkTransformPair() {};
+  vtkTransformPair() {}
 
   vtkAbstractTransform *ForwardTransform;
   vtkAbstractTransform *InverseTransform;
diff --git a/Common/Transforms/vtkIdentityTransform.h b/Common/Transforms/vtkIdentityTransform.h
index 472ada4..16d2f72 100644
--- a/Common/Transforms/vtkIdentityTransform.h
+++ b/Common/Transforms/vtkIdentityTransform.h
@@ -62,7 +62,7 @@ public:
 
   // Invert the transformation.  This doesn't do anything to the
   // identity transformation.
-  void Inverse() {};
+  void Inverse() {}
 
   // Description:
   // This will calculate the transformation without calling Update.
diff --git a/Common/Transforms/vtkLinearTransform.cxx b/Common/Transforms/vtkLinearTransform.cxx
index cd33d42..4f29f2f 100644
--- a/Common/Transforms/vtkLinearTransform.cxx
+++ b/Common/Transforms/vtkLinearTransform.cxx
@@ -93,6 +93,47 @@ inline void vtkLinearTransformNormal(T1 mat[4][4],
 }
 
 //------------------------------------------------------------------------
+template <class T1, class T2, class T3>
+inline void vtkLinearTransformPoints(
+  T1 matrix[4][4], T2 *in, T3 *out, vtkIdType n)
+{
+  for (vtkIdType i = 0; i < n; i++)
+    {
+    vtkLinearTransformPoint(matrix, in, out);
+    in += 3;
+    out += 3;
+    }
+}
+
+//------------------------------------------------------------------------
+template <class T1, class T2, class T3>
+inline void vtkLinearTransformVectors(
+  T1 matrix[4][4], T2 *in, T3 *out, vtkIdType n)
+{
+  for (vtkIdType i = 0; i < n; i++)
+    {
+    vtkLinearTransformVector(matrix, in, out);
+    in += 3;
+    out += 3;
+    }
+}
+
+//------------------------------------------------------------------------
+template <class T1, class T2, class T3>
+inline void vtkLinearTransformNormals(
+  T1 matrix[4][4], T2 *in, T3 *out, vtkIdType n)
+{
+  for (vtkIdType i = 0; i < n; i++)
+    {
+    // matrix has been transposed & inverted, so use TransformVector
+    vtkLinearTransformVector(matrix, in, out);
+    vtkMath::Normalize(out);
+    in += 3;
+    out += 3;
+    }
+}
+
+//------------------------------------------------------------------------
 void vtkLinearTransform::InternalTransformPoint(const float in[3],
                                                 float out[3])
 {
@@ -164,14 +205,14 @@ void vtkLinearTransform::TransformPointsNormalsVectors(vtkPoints *inPts,
                                                        vtkDataArray *inVrs,
                                                        vtkDataArray *outVrs)
 {
-  this->TransformPoints(inPts,outPts);
+  this->TransformPoints(inPts, outPts);
   if (inNms)
     {
-    this->TransformNormals(inNms,outNms);
+    this->TransformNormals(inNms, outNms);
     }
   if (inVrs)
     {
-    this->TransformVectors(inVrs,outVrs);
+    this->TransformVectors(inVrs, outVrs);
     }
 }
 
@@ -180,18 +221,51 @@ void vtkLinearTransform::TransformPoints(vtkPoints *inPts,
                                          vtkPoints *outPts)
 {
   vtkIdType n = inPts->GetNumberOfPoints();
+  vtkIdType m = outPts->GetNumberOfPoints();
   double (*matrix)[4] = this->Matrix->Element;
-  double point[3];
 
   this->Update();
 
-  for (vtkIdType i = 0; i < n; i++)
+  // operate directly on the memory to avoid GetPoint()/SetPoint() calls.
+  vtkDataArray *inArray = inPts->GetData();
+  vtkDataArray *outArray = outPts->GetData();
+  int inType = inArray->GetDataType();
+  int outType = outArray->GetDataType();
+  void *inPtr = inArray->GetVoidPointer(0);
+  void *outPtr = outArray->WriteVoidPointer(3*m, 3*n);
+
+  if (inType == VTK_FLOAT && outType == VTK_FLOAT)
+    {
+    vtkLinearTransformPoints(matrix,
+      static_cast<float *>(inPtr), static_cast<float *>(outPtr), n);
+    }
+  else if (inType == VTK_FLOAT && outType == VTK_DOUBLE)
+    {
+    vtkLinearTransformPoints(matrix,
+      static_cast<float *>(inPtr), static_cast<double *>(outPtr), n);
+    }
+  else if (inType == VTK_DOUBLE && outType == VTK_FLOAT)
+    {
+    vtkLinearTransformPoints(matrix,
+      static_cast<double *>(inPtr), static_cast<float *>(outPtr), n);
+    }
+  else if (inType == VTK_DOUBLE && outType == VTK_DOUBLE)
+    {
+    vtkLinearTransformPoints(matrix,
+      static_cast<double *>(inPtr), static_cast<double *>(outPtr), n);
+    }
+  else
     {
-    inPts->GetPoint(i,point);
+    double point[3];
+
+    for (vtkIdType i = 0; i < n; i++)
+      {
+      inPts->GetPoint(i, point);
 
-    vtkLinearTransformPoint(matrix,point,point);
+      vtkLinearTransformPoint(matrix, point, point);
 
-    outPts->InsertNextPoint(point);
+      outPts->SetPoint(m + i, point);
+      }
     }
 }
 
@@ -200,7 +274,7 @@ void vtkLinearTransform::TransformNormals(vtkDataArray *inNms,
                                           vtkDataArray *outNms)
 {
   vtkIdType n = inNms->GetNumberOfTuples();
-  double norm[3];
+  vtkIdType m = outNms->GetNumberOfTuples();
   double matrix[4][4];
 
   this->Update();
@@ -210,36 +284,97 @@ void vtkLinearTransform::TransformNormals(vtkDataArray *inNms,
   vtkMatrix4x4::Invert(*matrix,*matrix);
   vtkMatrix4x4::Transpose(*matrix,*matrix);
 
-  for (vtkIdType i = 0; i < n; i++)
+  // operate directly on the memory to avoid GetTuple()/SetPoint() calls.
+  int inType = inNms->GetDataType();
+  int outType = outNms->GetDataType();
+  void *inPtr = inNms->GetVoidPointer(0);
+  void *outPtr = outNms->WriteVoidPointer(3*m, 3*n);
+
+  if (inType == VTK_FLOAT && outType == VTK_FLOAT)
+    {
+    vtkLinearTransformNormals(matrix,
+      static_cast<float *>(inPtr), static_cast<float *>(outPtr), n);
+    }
+  else if (inType == VTK_FLOAT && outType == VTK_DOUBLE)
     {
-    inNms->GetTuple(i,norm);
+    vtkLinearTransformNormals(matrix,
+      static_cast<float *>(inPtr), static_cast<double *>(outPtr), n);
+    }
+  else if (inType == VTK_DOUBLE && outType == VTK_FLOAT)
+    {
+    vtkLinearTransformNormals(matrix,
+      static_cast<double *>(inPtr), static_cast<float *>(outPtr), n);
+    }
+  else if (inType == VTK_DOUBLE && outType == VTK_DOUBLE)
+    {
+    vtkLinearTransformNormals(matrix,
+      static_cast<double *>(inPtr), static_cast<double *>(outPtr), n);
+    }
+  else
+    {
+    for (vtkIdType i = 0; i < n; i++)
+      {
+      double norm[3];
 
-    // use TransformVector because matrix is already transposed & inverted
-    vtkLinearTransformVector(matrix,norm,norm);
-    vtkMath::Normalize(norm);
+      inNms->GetTuple(i, norm);
 
-    outNms->InsertNextTuple(norm);
+      // use TransformVector because matrix is already transposed & inverted
+      vtkLinearTransformVector(matrix, norm, norm);
+      vtkMath::Normalize(norm);
+
+      outNms->SetTuple(m + i, norm);
+      }
     }
 }
 
 //----------------------------------------------------------------------------
-void vtkLinearTransform::TransformVectors(vtkDataArray *inNms,
-                                          vtkDataArray *outNms)
+void vtkLinearTransform::TransformVectors(vtkDataArray *inVrs,
+                                          vtkDataArray *outVrs)
 {
-  vtkIdType n = inNms->GetNumberOfTuples();
-  double vec[3];
+  vtkIdType n = inVrs->GetNumberOfTuples();
+  vtkIdType m = outVrs->GetNumberOfTuples();
+
+  double (*matrix)[4] = this->Matrix->Element;
 
   this->Update();
 
-  double (*matrix)[4] = this->Matrix->Element;
+  // operate directly on the memory to avoid GetTuple()/SetTuple() calls.
+  int inType = inVrs->GetDataType();
+  int outType = outVrs->GetDataType();
+  void *inPtr = inVrs->GetVoidPointer(0);
+  void *outPtr = outVrs->WriteVoidPointer(3*m, 3*n);
 
-  for (vtkIdType i = 0; i < n; i++)
+  if (inType == VTK_FLOAT && outType == VTK_FLOAT)
+    {
+    vtkLinearTransformVectors(matrix,
+      static_cast<float *>(inPtr), static_cast<float *>(outPtr), n);
+    }
+  else if (inType == VTK_FLOAT && outType == VTK_DOUBLE)
     {
-    inNms->GetTuple(i,vec);
+    vtkLinearTransformVectors(matrix,
+      static_cast<float *>(inPtr), static_cast<double *>(outPtr), n);
+    }
+  else if (inType == VTK_DOUBLE && outType == VTK_FLOAT)
+    {
+    vtkLinearTransformVectors(matrix,
+      static_cast<double *>(inPtr), static_cast<float *>(outPtr), n);
+    }
+  else if (inType == VTK_DOUBLE && outType == VTK_DOUBLE)
+    {
+    vtkLinearTransformVectors(matrix,
+      static_cast<double *>(inPtr), static_cast<double *>(outPtr), n);
+    }
+  else
+    {
+    for (vtkIdType i = 0; i < n; i++)
+      {
+      double vec[3];
+
+      inVrs->GetTuple(i, vec);
 
-    vtkLinearTransformVector(matrix,vec,vec);
+      vtkLinearTransformVector(matrix, vec, vec);
 
-    outNms->InsertNextTuple(vec);
+      outVrs->SetTuple(m + i, vec);
+      }
     }
 }
-
diff --git a/Common/Transforms/vtkLinearTransform.h b/Common/Transforms/vtkLinearTransform.h
index 809bef8..69ad8a8 100644
--- a/Common/Transforms/vtkLinearTransform.h
+++ b/Common/Transforms/vtkLinearTransform.h
@@ -182,8 +182,8 @@ public:
                                    double derivative[3][3]);
 
 protected:
-  vtkLinearTransform() {};
-  ~vtkLinearTransform() {};
+  vtkLinearTransform() {}
+  ~vtkLinearTransform() {}
 private:
   vtkLinearTransform(const vtkLinearTransform&);  // Not implemented.
   void operator=(const vtkLinearTransform&);  // Not implemented.
diff --git a/Common/Transforms/vtkThinPlateSplineTransform.cxx b/Common/Transforms/vtkThinPlateSplineTransform.cxx
index c0ba151..ecafa25 100644
--- a/Common/Transforms/vtkThinPlateSplineTransform.cxx
+++ b/Common/Transforms/vtkThinPlateSplineTransform.cxx
@@ -725,13 +725,13 @@ void vtkThinPlateSplineTransform::InternalDeepCopy(
 
 //------------------------------------------------------------------------
 // a very basic radial basis function
-double vtkRBFr(double r)
+static double vtkRBFr(double r)
 {
   return r;
 }
 
 // calculate both phi(r) its derivative wrt r
-double vtkRBFDRr(double r, double &dUdr)
+static double vtkRBFDRr(double r, double &dUdr)
 {
   dUdr = 1;
   return r;
@@ -739,7 +739,7 @@ double vtkRBFDRr(double r, double &dUdr)
 
 //------------------------------------------------------------------------
 // the standard 2D thin plate spline basis function
-double vtkRBFr2logr(double r)
+static double vtkRBFr2logr(double r)
 {
   if (r)
     {
@@ -752,7 +752,7 @@ double vtkRBFr2logr(double r)
 }
 
 // calculate both phi(r) its derivative wrt r
-double vtkRBFDRr2logr(double r, double &dUdr)
+static double vtkRBFDRr2logr(double r, double &dUdr)
 {
   if (r)
     {
diff --git a/Common/Transforms/vtkTransformCollection.h b/Common/Transforms/vtkTransformCollection.h
index f0526b4..69f7e96 100644
--- a/Common/Transforms/vtkTransformCollection.h
+++ b/Common/Transforms/vtkTransformCollection.h
@@ -53,8 +53,8 @@ public:
   //ETX
 
 protected:
-  vtkTransformCollection() {};
-  ~vtkTransformCollection() {};
+  vtkTransformCollection() {}
+  ~vtkTransformCollection() {}
 
 
 private:
diff --git a/Domains/Chemistry/CMakeLists.txt b/Domains/Chemistry/CMakeLists.txt
index 5d19a13..7f94210 100644
--- a/Domains/Chemistry/CMakeLists.txt
+++ b/Domains/Chemistry/CMakeLists.txt
@@ -26,7 +26,10 @@ set_source_files_properties(
 
 # Generate configured header file
 configure_file("${CMAKE_CURRENT_SOURCE_DIR}/vtkChemistryConfigure.h.in"
-  "${CMAKE_CURRENT_BINARY_DIR}/vtkChemistryConfigure.h" IMMEDIATE)
+  "${CMAKE_CURRENT_BINARY_DIR}/vtkChemistryConfigure.h")
 
 set(${vtk-module}_NO_HeaderTest 1) # TODO: Fix headers and enable test.
 vtk_module_library(${vtk-module} ${Module_SRCS})
+
+install(FILES elements.xml COPYING
+  DESTINATION ${VTK_INSTALL_DATA_DIR}/vtkDomainsChemistry)
diff --git a/Domains/Chemistry/Testing/Cxx/CMakeLists.txt b/Domains/Chemistry/Testing/Cxx/CMakeLists.txt
index 67f59b4..e538c66 100644
--- a/Domains/Chemistry/Testing/Cxx/CMakeLists.txt
+++ b/Domains/Chemistry/Testing/Cxx/CMakeLists.txt
@@ -1,4 +1,4 @@
-set(MyTests
+vtk_add_test_cxx(
   TestBallAndStick.cxx
   TestPDBBallAndStick.cxx
   TestBondColorModeDiscreteByAtom.cxx
@@ -6,50 +6,18 @@ set(MyTests
   TestCompositeRender.cxx
   TestFastRender.cxx
   TestLiquoriceSticks.cxx
-  TestMolecule.cxx
-  TestMoleculeSelection.cxx
+  TestMolecule.cxx,NO_VALID
+  TestMoleculeSelection.cxx,NO_VALID
   TestMoleculeMapperPropertyUpdate.cxx
   TestMultiCylinderOn.cxx
   TestMultiCylinderOff.cxx
-  TestPeriodicTable.cxx
-  TestProgrammableElectronicData.cxx
+  TestPeriodicTable.cxx,NO_VALID
+  TestProgrammableElectronicData.cxx,NO_VALID
   TestProteinRibbon.cxx
-  TestSimpleBondPerceiver.cxx
+  TestSimpleBondPerceiver.cxx,NO_VALID
   TestVDWSpheres.cxx
   )
 
-# Tests with data
-if(VTK_DATA_ROOT)
-  set(MyTests
-    ${MyTests}
-    TestCMLMoleculeReader.cxx
-    )
-endif()
+vtk_add_test_cxx(TestCMLMoleculeReader.cxx)
 
-# Use the testing object factory, to reduce boilerplate code in tests.
-include(vtkTestingObjectFactory)
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    if(${${TName}Error})
-      set(_error_threshold ${${TName}Error})
-    else()
-      set(_error_threshold 10)
-    endif()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Chemistry/${TName}.png
-        -E ${_error_threshold})
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach ()
+vtk_test_cxx_executable(${vtk-module}CxxTests RENDERING_FACTORY)
diff --git a/Domains/Chemistry/Testing/Cxx/TestBallAndStick.cxx b/Domains/Chemistry/Testing/Cxx/TestBallAndStick.cxx
index 5bc0ca2..b258d9a 100644
--- a/Domains/Chemistry/Testing/Cxx/TestBallAndStick.cxx
+++ b/Domains/Chemistry/Testing/Cxx/TestBallAndStick.cxx
@@ -47,19 +47,19 @@ int TestBallAndStick(int, char *[])
   vtkAtom H5  = mol->AppendAtom(1,  2.3344947615,  1.8381683043,  0.9310726537);
   vtkAtom H6  = mol->AppendAtom(1, -2.1991803919,  3.3206134015,  0.9413825084);
 
-  vtkBond B1  = mol->AppendBond( C1,  C5, 1);
-  vtkBond B2  = mol->AppendBond( C1,  C2, 2);
-  vtkBond B3  = mol->AppendBond( C2,  C3, 1);
-  vtkBond B4  = mol->AppendBond( C3,  C4, 3);
-  vtkBond B5  = mol->AppendBond( C4,  C6, 1);
-  vtkBond B6  = mol->AppendBond( C5,  O2, 2);
-  vtkBond B7  = mol->AppendBond( C6,  O1, 1);
-  vtkBond B8  = mol->AppendBond( C5,  H6, 1);
-  vtkBond B9  = mol->AppendBond( C1,  H1, 1);
-  vtkBond B10 = mol->AppendBond( C2,  H2, 1);
-  vtkBond B11 = mol->AppendBond( C6,  H3, 1);
-  vtkBond B12 = mol->AppendBond( C6,  H4, 1);
-  vtkBond B13 = mol->AppendBond( O1,  H5, 1);
+  mol->AppendBond( C1,  C5, 1);
+  mol->AppendBond( C1,  C2, 2);
+  mol->AppendBond( C2,  C3, 1);
+  mol->AppendBond( C3,  C4, 3);
+  mol->AppendBond( C4,  C6, 1);
+  mol->AppendBond( C5,  O2, 2);
+  mol->AppendBond( C6,  O1, 1);
+  mol->AppendBond( C5,  H6, 1);
+  mol->AppendBond( C1,  H1, 1);
+  mol->AppendBond( C2,  H2, 1);
+  mol->AppendBond( C6,  H3, 1);
+  mol->AppendBond( C6,  H4, 1);
+  mol->AppendBond( O1,  H5, 1);
 
   vtkNew<vtkMoleculeMapper> molmapper;
   molmapper->SetInputData(mol.GetPointer());
diff --git a/Domains/Chemistry/Testing/Cxx/TestBondColorModeDiscreteByAtom.cxx b/Domains/Chemistry/Testing/Cxx/TestBondColorModeDiscreteByAtom.cxx
index f9a0e73..b439546 100644
--- a/Domains/Chemistry/Testing/Cxx/TestBondColorModeDiscreteByAtom.cxx
+++ b/Domains/Chemistry/Testing/Cxx/TestBondColorModeDiscreteByAtom.cxx
@@ -45,19 +45,19 @@ int TestBondColorModeDiscreteByAtom(int, char *[])
   vtkAtom H5  = mol->AppendAtom(1,  2.3344947615,  1.8381683043,  0.9310726537);
   vtkAtom H6  = mol->AppendAtom(1, -2.1991803919,  3.3206134015,  0.9413825084);
 
-  vtkBond B1  = mol->AppendBond( C1,  C5, 1);
-  vtkBond B2  = mol->AppendBond( C1,  C2, 2);
-  vtkBond B3  = mol->AppendBond( C2,  C3, 1);
-  vtkBond B4  = mol->AppendBond( C3,  C4, 3);
-  vtkBond B5  = mol->AppendBond( C4,  C6, 1);
-  vtkBond B6  = mol->AppendBond( C5,  O2, 2);
-  vtkBond B7  = mol->AppendBond( C6,  O1, 1);
-  vtkBond B8  = mol->AppendBond( C5,  H6, 1);
-  vtkBond B9  = mol->AppendBond( C1,  H1, 1);
-  vtkBond B10 = mol->AppendBond( C2,  H2, 1);
-  vtkBond B11 = mol->AppendBond( C6,  H3, 1);
-  vtkBond B12 = mol->AppendBond( C6,  H4, 1);
-  vtkBond B13 = mol->AppendBond( O1,  H5, 1);
+  mol->AppendBond( C1,  C5, 1);
+  mol->AppendBond( C1,  C2, 2);
+  mol->AppendBond( C2,  C3, 1);
+  mol->AppendBond( C3,  C4, 3);
+  mol->AppendBond( C4,  C6, 1);
+  mol->AppendBond( C5,  O2, 2);
+  mol->AppendBond( C6,  O1, 1);
+  mol->AppendBond( C5,  H6, 1);
+  mol->AppendBond( C1,  H1, 1);
+  mol->AppendBond( C2,  H2, 1);
+  mol->AppendBond( C6,  H3, 1);
+  mol->AppendBond( C6,  H4, 1);
+  mol->AppendBond( O1,  H5, 1);
 
   vtkNew<vtkMoleculeMapper> molmapper;
   molmapper->SetInputData(mol.GetPointer());
diff --git a/Domains/Chemistry/Testing/Cxx/TestBondColorModeSingleColor.cxx b/Domains/Chemistry/Testing/Cxx/TestBondColorModeSingleColor.cxx
index 828a224..0e9748e 100644
--- a/Domains/Chemistry/Testing/Cxx/TestBondColorModeSingleColor.cxx
+++ b/Domains/Chemistry/Testing/Cxx/TestBondColorModeSingleColor.cxx
@@ -45,19 +45,19 @@ int TestBondColorModeSingleColor(int, char *[])
   vtkAtom H5  = mol->AppendAtom(1,  2.3344947615,  1.8381683043,  0.9310726537);
   vtkAtom H6  = mol->AppendAtom(1, -2.1991803919,  3.3206134015,  0.9413825084);
 
-  vtkBond B1  = mol->AppendBond( C1,  C5, 1);
-  vtkBond B2  = mol->AppendBond( C1,  C2, 2);
-  vtkBond B3  = mol->AppendBond( C2,  C3, 1);
-  vtkBond B4  = mol->AppendBond( C3,  C4, 3);
-  vtkBond B5  = mol->AppendBond( C4,  C6, 1);
-  vtkBond B6  = mol->AppendBond( C5,  O2, 2);
-  vtkBond B7  = mol->AppendBond( C6,  O1, 1);
-  vtkBond B8  = mol->AppendBond( C5,  H6, 1);
-  vtkBond B9  = mol->AppendBond( C1,  H1, 1);
-  vtkBond B10 = mol->AppendBond( C2,  H2, 1);
-  vtkBond B11 = mol->AppendBond( C6,  H3, 1);
-  vtkBond B12 = mol->AppendBond( C6,  H4, 1);
-  vtkBond B13 = mol->AppendBond( O1,  H5, 1);
+  mol->AppendBond( C1,  C5, 1);
+  mol->AppendBond( C1,  C2, 2);
+  mol->AppendBond( C2,  C3, 1);
+  mol->AppendBond( C3,  C4, 3);
+  mol->AppendBond( C4,  C6, 1);
+  mol->AppendBond( C5,  O2, 2);
+  mol->AppendBond( C6,  O1, 1);
+  mol->AppendBond( C5,  H6, 1);
+  mol->AppendBond( C1,  H1, 1);
+  mol->AppendBond( C2,  H2, 1);
+  mol->AppendBond( C6,  H3, 1);
+  mol->AppendBond( C6,  H4, 1);
+  mol->AppendBond( O1,  H5, 1);
 
   vtkNew<vtkMoleculeMapper> molmapper;
   molmapper->SetInputData(mol.GetPointer());
diff --git a/Domains/Chemistry/Testing/Cxx/TestCompositeRender.cxx b/Domains/Chemistry/Testing/Cxx/TestCompositeRender.cxx
index 1dc799f..099810c 100644
--- a/Domains/Chemistry/Testing/Cxx/TestCompositeRender.cxx
+++ b/Domains/Chemistry/Testing/Cxx/TestCompositeRender.cxx
@@ -44,19 +44,19 @@ int TestCompositeRender(int, char *[])
   vtkAtom H5  = mol->AppendAtom(1,  2.3344947615,  1.8381683043,  0.9310726537);
   vtkAtom H6  = mol->AppendAtom(1, -2.1991803919,  3.3206134015,  0.9413825084);
 
-  vtkBond B1  = mol->AppendBond( C1,  C5, 1);
-  vtkBond B2  = mol->AppendBond( C1,  C2, 2);
-  vtkBond B3  = mol->AppendBond( C2,  C3, 1);
-  vtkBond B4  = mol->AppendBond( C3,  C4, 3);
-  vtkBond B5  = mol->AppendBond( C4,  C6, 1);
-  vtkBond B6  = mol->AppendBond( C5,  O2, 2);
-  vtkBond B7  = mol->AppendBond( C6,  O1, 1);
-  vtkBond B8  = mol->AppendBond( C5,  H6, 1);
-  vtkBond B9  = mol->AppendBond( C1,  H1, 1);
-  vtkBond B10 = mol->AppendBond( C2,  H2, 1);
-  vtkBond B11 = mol->AppendBond( C6,  H3, 1);
-  vtkBond B12 = mol->AppendBond( C6,  H4, 1);
-  vtkBond B13 = mol->AppendBond( O1,  H5, 1);
+  mol->AppendBond( C1,  C5, 1);
+  mol->AppendBond( C1,  C2, 2);
+  mol->AppendBond( C2,  C3, 1);
+  mol->AppendBond( C3,  C4, 3);
+  mol->AppendBond( C4,  C6, 1);
+  mol->AppendBond( C5,  O2, 2);
+  mol->AppendBond( C6,  O1, 1);
+  mol->AppendBond( C5,  H6, 1);
+  mol->AppendBond( C1,  H1, 1);
+  mol->AppendBond( C2,  H2, 1);
+  mol->AppendBond( C6,  H3, 1);
+  mol->AppendBond( C6,  H4, 1);
+  mol->AppendBond( O1,  H5, 1);
 
   // Opaque balls and sticks
   vtkNew<vtkMoleculeMapper> bsMapper;
diff --git a/Domains/Chemistry/Testing/Cxx/TestFastRender.cxx b/Domains/Chemistry/Testing/Cxx/TestFastRender.cxx
index c1cb51e..10f76ed 100644
--- a/Domains/Chemistry/Testing/Cxx/TestFastRender.cxx
+++ b/Domains/Chemistry/Testing/Cxx/TestFastRender.cxx
@@ -45,19 +45,19 @@ int TestFastRender(int, char *[])
   vtkAtom H5  = mol->AppendAtom(1,  2.3344947615,  1.8381683043,  0.9310726537);
   vtkAtom H6  = mol->AppendAtom(1, -2.1991803919,  3.3206134015,  0.9413825084);
 
-  vtkBond B1  = mol->AppendBond( C1,  C5, 1);
-  vtkBond B2  = mol->AppendBond( C1,  C2, 2);
-  vtkBond B3  = mol->AppendBond( C2,  C3, 1);
-  vtkBond B4  = mol->AppendBond( C3,  C4, 3);
-  vtkBond B5  = mol->AppendBond( C4,  C6, 1);
-  vtkBond B6  = mol->AppendBond( C5,  O2, 2);
-  vtkBond B7  = mol->AppendBond( C6,  O1, 1);
-  vtkBond B8  = mol->AppendBond( C5,  H6, 1);
-  vtkBond B9  = mol->AppendBond( C1,  H1, 1);
-  vtkBond B10 = mol->AppendBond( C2,  H2, 1);
-  vtkBond B11 = mol->AppendBond( C6,  H3, 1);
-  vtkBond B12 = mol->AppendBond( C6,  H4, 1);
-  vtkBond B13 = mol->AppendBond( O1,  H5, 1);
+  mol->AppendBond( C1,  C5, 1);
+  mol->AppendBond( C1,  C2, 2);
+  mol->AppendBond( C2,  C3, 1);
+  mol->AppendBond( C3,  C4, 3);
+  mol->AppendBond( C4,  C6, 1);
+  mol->AppendBond( C5,  O2, 2);
+  mol->AppendBond( C6,  O1, 1);
+  mol->AppendBond( C5,  H6, 1);
+  mol->AppendBond( C1,  H1, 1);
+  mol->AppendBond( C2,  H2, 1);
+  mol->AppendBond( C6,  H3, 1);
+  mol->AppendBond( C6,  H4, 1);
+  mol->AppendBond( O1,  H5, 1);
 
   vtkNew<vtkMoleculeMapper> molmapper;
   molmapper->SetInputData(mol.GetPointer());
diff --git a/Domains/Chemistry/Testing/Cxx/TestLiquoriceSticks.cxx b/Domains/Chemistry/Testing/Cxx/TestLiquoriceSticks.cxx
index b99b7d4..39beca3 100644
--- a/Domains/Chemistry/Testing/Cxx/TestLiquoriceSticks.cxx
+++ b/Domains/Chemistry/Testing/Cxx/TestLiquoriceSticks.cxx
@@ -45,19 +45,19 @@ int TestLiquoriceSticks(int, char *[])
   vtkAtom H5  = mol->AppendAtom(1,  2.3344947615,  1.8381683043,  0.9310726537);
   vtkAtom H6  = mol->AppendAtom(1, -2.1991803919,  3.3206134015,  0.9413825084);
 
-  vtkBond B1  = mol->AppendBond( C1,  C5, 1);
-  vtkBond B2  = mol->AppendBond( C1,  C2, 2);
-  vtkBond B3  = mol->AppendBond( C2,  C3, 1);
-  vtkBond B4  = mol->AppendBond( C3,  C4, 3);
-  vtkBond B5  = mol->AppendBond( C4,  C6, 1);
-  vtkBond B6  = mol->AppendBond( C5,  O2, 2);
-  vtkBond B7  = mol->AppendBond( C6,  O1, 1);
-  vtkBond B8  = mol->AppendBond( C5,  H6, 1);
-  vtkBond B9  = mol->AppendBond( C1,  H1, 1);
-  vtkBond B10 = mol->AppendBond( C2,  H2, 1);
-  vtkBond B11 = mol->AppendBond( C6,  H3, 1);
-  vtkBond B12 = mol->AppendBond( C6,  H4, 1);
-  vtkBond B13 = mol->AppendBond( O1,  H5, 1);
+  mol->AppendBond( C1,  C5, 1);
+  mol->AppendBond( C1,  C2, 2);
+  mol->AppendBond( C2,  C3, 1);
+  mol->AppendBond( C3,  C4, 3);
+  mol->AppendBond( C4,  C6, 1);
+  mol->AppendBond( C5,  O2, 2);
+  mol->AppendBond( C6,  O1, 1);
+  mol->AppendBond( C5,  H6, 1);
+  mol->AppendBond( C1,  H1, 1);
+  mol->AppendBond( C2,  H2, 1);
+  mol->AppendBond( C6,  H3, 1);
+  mol->AppendBond( C6,  H4, 1);
+  mol->AppendBond( O1,  H5, 1);
 
   vtkNew<vtkMoleculeMapper> molmapper;
   molmapper->SetInputData(mol.GetPointer());
diff --git a/Domains/Chemistry/Testing/Cxx/TestMolecule.cxx b/Domains/Chemistry/Testing/Cxx/TestMolecule.cxx
index 431f0e5..3064eae 100644
--- a/Domains/Chemistry/Testing/Cxx/TestMolecule.cxx
+++ b/Domains/Chemistry/Testing/Cxx/TestMolecule.cxx
@@ -48,14 +48,14 @@ bool MoleculeExampleCode1()
     ++errors;
     }
 
-  if (!h1.GetAtomicNumber() == 1)
+  if (h1.GetAtomicNumber() != 1)
     {
     cout << "Error atomic number incorrect. Expected 1 but got "
          << h1.GetAtomicNumber() << endl;
     ++errors;
     }
 
-  if (!h2.GetAtomicNumber() == 1)
+  if (h2.GetAtomicNumber() != 1)
     {
     cout << "Error atomic number incorrect. Expected 1 but got "
          << h2.GetAtomicNumber() << endl;
@@ -105,14 +105,14 @@ bool MoleculeExampleCode2()
     ++errors;
     }
 
-  if (!h1.GetAtomicNumber() == 1)
+  if (h1.GetAtomicNumber() != 1)
     {
     cout << "Error atomic number incorrect. Expected 1 but got "
          << h1.GetAtomicNumber() << endl;
     ++errors;
     }
 
-  if (!h2.GetAtomicNumber() == 1)
+  if (h2.GetAtomicNumber() != 1)
     {
     cout << "Error atomic number incorrect. Expected 1 but got "
          << h2.GetAtomicNumber() << endl;
diff --git a/Domains/Chemistry/Testing/Cxx/TestMoleculeMapperPropertyUpdate.cxx b/Domains/Chemistry/Testing/Cxx/TestMoleculeMapperPropertyUpdate.cxx
index 8809946..a3c027e 100644
--- a/Domains/Chemistry/Testing/Cxx/TestMoleculeMapperPropertyUpdate.cxx
+++ b/Domains/Chemistry/Testing/Cxx/TestMoleculeMapperPropertyUpdate.cxx
@@ -47,19 +47,19 @@ int TestMoleculeMapperPropertyUpdate(int, char *[])
   vtkAtom H5  = mol->AppendAtom(1,  2.3344947615,  1.8381683043,  0.9310726537);
   vtkAtom H6  = mol->AppendAtom(1, -2.1991803919,  3.3206134015,  0.9413825084);
 
-  vtkBond B1  = mol->AppendBond( C1,  C5, 1);
-  vtkBond B2  = mol->AppendBond( C1,  C2, 2);
-  vtkBond B3  = mol->AppendBond( C2,  C3, 1);
-  vtkBond B4  = mol->AppendBond( C3,  C4, 3);
-  vtkBond B5  = mol->AppendBond( C4,  C6, 1);
-  vtkBond B6  = mol->AppendBond( C5,  O2, 2);
-  vtkBond B7  = mol->AppendBond( C6,  O1, 1);
-  vtkBond B8  = mol->AppendBond( C5,  H6, 1);
-  vtkBond B9  = mol->AppendBond( C1,  H1, 1);
-  vtkBond B10 = mol->AppendBond( C2,  H2, 1);
-  vtkBond B11 = mol->AppendBond( C6,  H3, 1);
-  vtkBond B12 = mol->AppendBond( C6,  H4, 1);
-  vtkBond B13 = mol->AppendBond( O1,  H5, 1);
+  mol->AppendBond( C1,  C5, 1);
+  mol->AppendBond( C1,  C2, 2);
+  mol->AppendBond( C2,  C3, 1);
+  mol->AppendBond( C3,  C4, 3);
+  mol->AppendBond( C4,  C6, 1);
+  mol->AppendBond( C5,  O2, 2);
+  mol->AppendBond( C6,  O1, 1);
+  mol->AppendBond( C5,  H6, 1);
+  mol->AppendBond( C1,  H1, 1);
+  mol->AppendBond( C2,  H2, 1);
+  mol->AppendBond( C6,  H3, 1);
+  mol->AppendBond( C6,  H4, 1);
+  mol->AppendBond( O1,  H5, 1);
 
   vtkNew<vtkMoleculeMapper> molmapper;
   molmapper->SetInputData(mol.GetPointer());
diff --git a/Domains/Chemistry/Testing/Cxx/TestMoleculeSelection.cxx b/Domains/Chemistry/Testing/Cxx/TestMoleculeSelection.cxx
index 5ce3379..d5bc5c7 100644
--- a/Domains/Chemistry/Testing/Cxx/TestMoleculeSelection.cxx
+++ b/Domains/Chemistry/Testing/Cxx/TestMoleculeSelection.cxx
@@ -166,30 +166,30 @@ int TestMoleculeSelection(int argc, char *argv[])
   vtkAtom a16 = mol->AppendAtom(16, 3.0, 3.0, 0.0);
 
   // Add bonds along the grid
-  vtkBond b1  = mol->AppendBond( a1,  a2, 1);
-  vtkBond b2  = mol->AppendBond( a2,  a3, 1);
-  vtkBond b3  = mol->AppendBond( a3,  a4, 1);
-  vtkBond b4  = mol->AppendBond( a5,  a6, 1);
-  vtkBond b5  = mol->AppendBond( a6,  a7, 1);
-  vtkBond b6  = mol->AppendBond( a7,  a8, 1);
-  vtkBond b7  = mol->AppendBond( a9, a10, 1);
-  vtkBond b8  = mol->AppendBond(a10, a11, 1);
-  vtkBond b9  = mol->AppendBond(a11, a12, 1);
-  vtkBond b10 = mol->AppendBond(a13, a14, 1);
-  vtkBond b11 = mol->AppendBond(a14, a15, 1);
-  vtkBond b12 = mol->AppendBond(a15, a16, 1);
-  vtkBond b13 = mol->AppendBond( a1,  a5, 1);
-  vtkBond b14 = mol->AppendBond( a2,  a6, 1);
-  vtkBond b15 = mol->AppendBond( a3,  a7, 1);
-  vtkBond b16 = mol->AppendBond( a4,  a8, 1);
-  vtkBond b17 = mol->AppendBond( a5,  a9, 1);
-  vtkBond b18 = mol->AppendBond( a6, a10, 1);
-  vtkBond b19 = mol->AppendBond( a7, a11, 1);
-  vtkBond b20 = mol->AppendBond( a8, a12, 1);
-  vtkBond b21 = mol->AppendBond( a9, a13, 1);
-  vtkBond b22 = mol->AppendBond(a10, a14, 1);
-  vtkBond b23 = mol->AppendBond(a11, a15, 1);
-  vtkBond b24 = mol->AppendBond(a12, a16, 1);
+  mol->AppendBond( a1,  a2, 1);
+  mol->AppendBond( a2,  a3, 1);
+  mol->AppendBond( a3,  a4, 1);
+  mol->AppendBond( a5,  a6, 1);
+  mol->AppendBond( a6,  a7, 1);
+  mol->AppendBond( a7,  a8, 1);
+  mol->AppendBond( a9, a10, 1);
+  mol->AppendBond(a10, a11, 1);
+  mol->AppendBond(a11, a12, 1);
+  mol->AppendBond(a13, a14, 1);
+  mol->AppendBond(a14, a15, 1);
+  mol->AppendBond(a15, a16, 1);
+  mol->AppendBond( a1,  a5, 1);
+  mol->AppendBond( a2,  a6, 1);
+  mol->AppendBond( a3,  a7, 1);
+  mol->AppendBond( a4,  a8, 1);
+  mol->AppendBond( a5,  a9, 1);
+  mol->AppendBond( a6, a10, 1);
+  mol->AppendBond( a7, a11, 1);
+  mol->AppendBond( a8, a12, 1);
+  mol->AppendBond( a9, a13, 1);
+  mol->AppendBond(a10, a14, 1);
+  mol->AppendBond(a11, a15, 1);
+  mol->AppendBond(a12, a16, 1);
 
   // Set up render engine
   vtkNew<vtkMoleculeMapper> molmapper;
diff --git a/Domains/Chemistry/Testing/Cxx/TestMultiCylinderOff.cxx b/Domains/Chemistry/Testing/Cxx/TestMultiCylinderOff.cxx
index b36fe2e..b3cbbff 100644
--- a/Domains/Chemistry/Testing/Cxx/TestMultiCylinderOff.cxx
+++ b/Domains/Chemistry/Testing/Cxx/TestMultiCylinderOff.cxx
@@ -45,19 +45,19 @@ int TestMultiCylinderOff(int, char *[])
   vtkAtom H5  = mol->AppendAtom(1,  2.3344947615,  1.8381683043,  0.9310726537);
   vtkAtom H6  = mol->AppendAtom(1, -2.1991803919,  3.3206134015,  0.9413825084);
 
-  vtkBond B1  = mol->AppendBond( C1,  C5, 1);
-  vtkBond B2  = mol->AppendBond( C1,  C2, 2);
-  vtkBond B3  = mol->AppendBond( C2,  C3, 1);
-  vtkBond B4  = mol->AppendBond( C3,  C4, 3);
-  vtkBond B5  = mol->AppendBond( C4,  C6, 1);
-  vtkBond B6  = mol->AppendBond( C5,  O2, 2);
-  vtkBond B7  = mol->AppendBond( C6,  O1, 1);
-  vtkBond B8  = mol->AppendBond( C5,  H6, 1);
-  vtkBond B9  = mol->AppendBond( C1,  H1, 1);
-  vtkBond B10 = mol->AppendBond( C2,  H2, 1);
-  vtkBond B11 = mol->AppendBond( C6,  H3, 1);
-  vtkBond B12 = mol->AppendBond( C6,  H4, 1);
-  vtkBond B13 = mol->AppendBond( O1,  H5, 1);
+  mol->AppendBond( C1,  C5, 1);
+  mol->AppendBond( C1,  C2, 2);
+  mol->AppendBond( C2,  C3, 1);
+  mol->AppendBond( C3,  C4, 3);
+  mol->AppendBond( C4,  C6, 1);
+  mol->AppendBond( C5,  O2, 2);
+  mol->AppendBond( C6,  O1, 1);
+  mol->AppendBond( C5,  H6, 1);
+  mol->AppendBond( C1,  H1, 1);
+  mol->AppendBond( C2,  H2, 1);
+  mol->AppendBond( C6,  H3, 1);
+  mol->AppendBond( C6,  H4, 1);
+  mol->AppendBond( O1,  H5, 1);
 
   vtkNew<vtkMoleculeMapper> molmapper;
   molmapper->SetInputData(mol.GetPointer());
diff --git a/Domains/Chemistry/Testing/Cxx/TestMultiCylinderOn.cxx b/Domains/Chemistry/Testing/Cxx/TestMultiCylinderOn.cxx
index e7be33a..c8f922b 100644
--- a/Domains/Chemistry/Testing/Cxx/TestMultiCylinderOn.cxx
+++ b/Domains/Chemistry/Testing/Cxx/TestMultiCylinderOn.cxx
@@ -45,19 +45,19 @@ int TestMultiCylinderOn(int, char *[])
   vtkAtom H5  = mol->AppendAtom(1,  2.3344947615,  1.8381683043,  0.9310726537);
   vtkAtom H6  = mol->AppendAtom(1, -2.1991803919,  3.3206134015,  0.9413825084);
 
-  vtkBond B1  = mol->AppendBond( C1,  C5, 1);
-  vtkBond B2  = mol->AppendBond( C1,  C2, 2);
-  vtkBond B3  = mol->AppendBond( C2,  C3, 1);
-  vtkBond B4  = mol->AppendBond( C3,  C4, 3);
-  vtkBond B5  = mol->AppendBond( C4,  C6, 1);
-  vtkBond B6  = mol->AppendBond( C5,  O2, 2);
-  vtkBond B7  = mol->AppendBond( C6,  O1, 1);
-  vtkBond B8  = mol->AppendBond( C5,  H6, 1);
-  vtkBond B9  = mol->AppendBond( C1,  H1, 1);
-  vtkBond B10 = mol->AppendBond( C2,  H2, 1);
-  vtkBond B11 = mol->AppendBond( C6,  H3, 1);
-  vtkBond B12 = mol->AppendBond( C6,  H4, 1);
-  vtkBond B13 = mol->AppendBond( O1,  H5, 1);
+  mol->AppendBond( C1,  C5, 1);
+  mol->AppendBond( C1,  C2, 2);
+  mol->AppendBond( C2,  C3, 1);
+  mol->AppendBond( C3,  C4, 3);
+  mol->AppendBond( C4,  C6, 1);
+  mol->AppendBond( C5,  O2, 2);
+  mol->AppendBond( C6,  O1, 1);
+  mol->AppendBond( C5,  H6, 1);
+  mol->AppendBond( C1,  H1, 1);
+  mol->AppendBond( C2,  H2, 1);
+  mol->AppendBond( C6,  H3, 1);
+  mol->AppendBond( C6,  H4, 1);
+  mol->AppendBond( O1,  H5, 1);
 
   vtkNew<vtkMoleculeMapper> molmapper;
   molmapper->SetInputData(mol.GetPointer());
diff --git a/Domains/Chemistry/Testing/Cxx/TestVDWSpheres.cxx b/Domains/Chemistry/Testing/Cxx/TestVDWSpheres.cxx
index c0ee8c4..df1fbd2 100644
--- a/Domains/Chemistry/Testing/Cxx/TestVDWSpheres.cxx
+++ b/Domains/Chemistry/Testing/Cxx/TestVDWSpheres.cxx
@@ -45,19 +45,19 @@ int TestVDWSpheres(int, char *[])
   vtkAtom H5  = mol->AppendAtom(1,  2.3344947615,  1.8381683043,  0.9310726537);
   vtkAtom H6  = mol->AppendAtom(1, -2.1991803919,  3.3206134015,  0.9413825084);
 
-  vtkBond B1  = mol->AppendBond( C1,  C5, 1);
-  vtkBond B2  = mol->AppendBond( C1,  C2, 2);
-  vtkBond B3  = mol->AppendBond( C2,  C3, 1);
-  vtkBond B4  = mol->AppendBond( C3,  C4, 3);
-  vtkBond B5  = mol->AppendBond( C4,  C6, 1);
-  vtkBond B6  = mol->AppendBond( C5,  O2, 2);
-  vtkBond B7  = mol->AppendBond( C6,  O1, 1);
-  vtkBond B8  = mol->AppendBond( C5,  H6, 1);
-  vtkBond B9  = mol->AppendBond( C1,  H1, 1);
-  vtkBond B10 = mol->AppendBond( C2,  H2, 1);
-  vtkBond B11 = mol->AppendBond( C6,  H3, 1);
-  vtkBond B12 = mol->AppendBond( C6,  H4, 1);
-  vtkBond B13 = mol->AppendBond( O1,  H5, 1);
+  mol->AppendBond( C1,  C5, 1);
+  mol->AppendBond( C1,  C2, 2);
+  mol->AppendBond( C2,  C3, 1);
+  mol->AppendBond( C3,  C4, 3);
+  mol->AppendBond( C4,  C6, 1);
+  mol->AppendBond( C5,  O2, 2);
+  mol->AppendBond( C6,  O1, 1);
+  mol->AppendBond( C5,  H6, 1);
+  mol->AppendBond( C1,  H1, 1);
+  mol->AppendBond( C2,  H2, 1);
+  mol->AppendBond( C6,  H3, 1);
+  mol->AppendBond( C6,  H4, 1);
+  mol->AppendBond( O1,  H5, 1);
 
   vtkNew<vtkMoleculeMapper> molmapper;
   molmapper->SetInputData(mol.GetPointer());
diff --git a/Domains/Chemistry/Testing/Data/Baseline/TestBallAndStick.png.md5 b/Domains/Chemistry/Testing/Data/Baseline/TestBallAndStick.png.md5
new file mode 100644
index 0000000..495c1f7
--- /dev/null
+++ b/Domains/Chemistry/Testing/Data/Baseline/TestBallAndStick.png.md5
@@ -0,0 +1 @@
+474ea6023401cc1f58df925a193da3b4
diff --git a/Domains/Chemistry/Testing/Data/Baseline/TestBondColorModeDiscreteByAtom.png.md5 b/Domains/Chemistry/Testing/Data/Baseline/TestBondColorModeDiscreteByAtom.png.md5
new file mode 100644
index 0000000..c405865
--- /dev/null
+++ b/Domains/Chemistry/Testing/Data/Baseline/TestBondColorModeDiscreteByAtom.png.md5
@@ -0,0 +1 @@
+1a658fd19f6353250975dcaf7a8295dc
diff --git a/Domains/Chemistry/Testing/Data/Baseline/TestBondColorModeSingleColor.png.md5 b/Domains/Chemistry/Testing/Data/Baseline/TestBondColorModeSingleColor.png.md5
new file mode 100644
index 0000000..c68378a
--- /dev/null
+++ b/Domains/Chemistry/Testing/Data/Baseline/TestBondColorModeSingleColor.png.md5
@@ -0,0 +1 @@
+eeb6152be6869581c343e6eecdaed223
diff --git a/Domains/Chemistry/Testing/Data/Baseline/TestCMLMoleculeReader.png.md5 b/Domains/Chemistry/Testing/Data/Baseline/TestCMLMoleculeReader.png.md5
new file mode 100644
index 0000000..6183dcc
--- /dev/null
+++ b/Domains/Chemistry/Testing/Data/Baseline/TestCMLMoleculeReader.png.md5
@@ -0,0 +1 @@
+68725a0029e45ad2ea648ad95f798e59
diff --git a/Domains/Chemistry/Testing/Data/Baseline/TestCompositeRender.png.md5 b/Domains/Chemistry/Testing/Data/Baseline/TestCompositeRender.png.md5
new file mode 100644
index 0000000..9e72040
--- /dev/null
+++ b/Domains/Chemistry/Testing/Data/Baseline/TestCompositeRender.png.md5
@@ -0,0 +1 @@
+272fb2ecfb192560c4f50036889b2417
diff --git a/Domains/Chemistry/Testing/Data/Baseline/TestFastRender.png.md5 b/Domains/Chemistry/Testing/Data/Baseline/TestFastRender.png.md5
new file mode 100644
index 0000000..a9cfaf1
--- /dev/null
+++ b/Domains/Chemistry/Testing/Data/Baseline/TestFastRender.png.md5
@@ -0,0 +1 @@
+649d4b9fc33dd46238990c353e2f315c
diff --git a/Domains/Chemistry/Testing/Data/Baseline/TestLiquoriceSticks.png.md5 b/Domains/Chemistry/Testing/Data/Baseline/TestLiquoriceSticks.png.md5
new file mode 100644
index 0000000..5eb10b2
--- /dev/null
+++ b/Domains/Chemistry/Testing/Data/Baseline/TestLiquoriceSticks.png.md5
@@ -0,0 +1 @@
+8c4ca391739c832acf5ef92957a6ed9b
diff --git a/Domains/Chemistry/Testing/Data/Baseline/TestMoleculeMapperPropertyUpdate.png.md5 b/Domains/Chemistry/Testing/Data/Baseline/TestMoleculeMapperPropertyUpdate.png.md5
new file mode 100644
index 0000000..25becd7
--- /dev/null
+++ b/Domains/Chemistry/Testing/Data/Baseline/TestMoleculeMapperPropertyUpdate.png.md5
@@ -0,0 +1 @@
+3f8274df5927fe46ac8f117df6631242
diff --git a/Domains/Chemistry/Testing/Data/Baseline/TestMultiCylinderOff.png.md5 b/Domains/Chemistry/Testing/Data/Baseline/TestMultiCylinderOff.png.md5
new file mode 100644
index 0000000..7e396a6
--- /dev/null
+++ b/Domains/Chemistry/Testing/Data/Baseline/TestMultiCylinderOff.png.md5
@@ -0,0 +1 @@
+07e1e78876681311737d84b90ed1a3da
diff --git a/Domains/Chemistry/Testing/Data/Baseline/TestMultiCylinderOn.png.md5 b/Domains/Chemistry/Testing/Data/Baseline/TestMultiCylinderOn.png.md5
new file mode 100644
index 0000000..c405865
--- /dev/null
+++ b/Domains/Chemistry/Testing/Data/Baseline/TestMultiCylinderOn.png.md5
@@ -0,0 +1 @@
+1a658fd19f6353250975dcaf7a8295dc
diff --git a/Domains/Chemistry/Testing/Data/Baseline/TestPDBBallAndStick.png.md5 b/Domains/Chemistry/Testing/Data/Baseline/TestPDBBallAndStick.png.md5
new file mode 100644
index 0000000..fe0fc2d
--- /dev/null
+++ b/Domains/Chemistry/Testing/Data/Baseline/TestPDBBallAndStick.png.md5
@@ -0,0 +1 @@
+76a52e5312c8781856c7668fbb261bc4
diff --git a/Domains/Chemistry/Testing/Data/Baseline/TestProteinRibbon.png.md5 b/Domains/Chemistry/Testing/Data/Baseline/TestProteinRibbon.png.md5
new file mode 100644
index 0000000..55def5c
--- /dev/null
+++ b/Domains/Chemistry/Testing/Data/Baseline/TestProteinRibbon.png.md5
@@ -0,0 +1 @@
+2065559d2f0c530b58da7ee0b9cb48db
diff --git a/Domains/Chemistry/Testing/Data/Baseline/TestVDWSpheres.png.md5 b/Domains/Chemistry/Testing/Data/Baseline/TestVDWSpheres.png.md5
new file mode 100644
index 0000000..37153b5
--- /dev/null
+++ b/Domains/Chemistry/Testing/Data/Baseline/TestVDWSpheres.png.md5
@@ -0,0 +1 @@
+0bf89acf2d346f63408e0752e6e2a6d0
diff --git a/Domains/Chemistry/module.cmake b/Domains/Chemistry/module.cmake
index c0b4445..89872e6 100644
--- a/Domains/Chemistry/module.cmake
+++ b/Domains/Chemistry/module.cmake
@@ -3,8 +3,10 @@ vtk_module(vtkDomainsChemistry
     StandAlone
   DEPENDS
     vtkCommonDataModel
-    vtkIOXML
     vtkRenderingCore
+  PRIVATE_DEPENDS
+    vtkIOXML
+    vtkFiltersSources
   TEST_DEPENDS
     vtkTestingCore
     vtkTestingRendering
diff --git a/Domains/Chemistry/vtkChemistryConfigure.h.in b/Domains/Chemistry/vtkChemistryConfigure.h.in
index 42f4ee7..45981b5 100644
--- a/Domains/Chemistry/vtkChemistryConfigure.h.in
+++ b/Domains/Chemistry/vtkChemistryConfigure.h.in
@@ -17,7 +17,7 @@
 #define __vtkDomainsChemistryConfigure_h
 
 /* Where the Blue Obelisk Data Repository files are installed */
-#define VTK_BODR_DATA_PATH "@CMAKE_INSTALL_PREFIX@/@VTK_INSTALL_SHARE_DIR_CM24@/vtkChemistry/vtkBODRData"
+#define VTK_BODR_DATA_PATH "@CMAKE_INSTALL_PREFIX@/@VTK_INSTALL_DATA_DIR@/vtkDomainsChemistry"
 #define VTK_BODR_DATA_PATH_BUILD "@VTK_SOURCE_DIR@/Domains/Chemistry"
 
 #endif
diff --git a/Domains/Chemistry/vtkPeriodicTable.cxx b/Domains/Chemistry/vtkPeriodicTable.cxx
index b92e9ab..23e20c7 100644
--- a/Domains/Chemistry/vtkPeriodicTable.cxx
+++ b/Domains/Chemistry/vtkPeriodicTable.cxx
@@ -27,7 +27,7 @@
 #include "vtkStringArray.h"
 #include "vtkUnsignedShortArray.h"
 
-#include <assert.h>
+#include <cassert>
 #include <cctype>
 #include <cstring>
 
diff --git a/Examples/AMR/Cxx/CMakeLists.txt b/Examples/AMR/Cxx/CMakeLists.txt
index 9c97349..023468a 100644
--- a/Examples/AMR/Cxx/CMakeLists.txt
+++ b/Examples/AMR/Cxx/CMakeLists.txt
@@ -3,27 +3,22 @@ cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
 PROJECT (AMR)
 
 if(NOT VTK_SOURCE_DIR)
-  message(ERROR "Cannot build AMR examples without VTK_SOURCE_DIR")
+  message(ERROR " Cannot build AMR examples without VTK_SOURCE_DIR")
 endif()
 
-if(NOT VTK_BINARY_DIR)
-  find_package(VTK COMPONENTS
-    vtkCommonCore
-    vtkCommonDataModel
-    vtkCommonExecutionModel
-    vtkFiltersAMR
-    vtkFiltersCore
-    vtkFiltersExtraction
-    vtkFiltersGeneral
-    vtkFiltersGeometry
-    vtkFiltersSources
-    vtkIOXML
-    vtkRenderingCore
-    vtkRenderingOpenGL
-    vtkTestingCore
+find_package(VTK COMPONENTS
+  vtkCommonCore
+  vtkFiltersAMR
+  vtkFiltersExtraction
+  vtkFiltersGeometry
+  vtkFiltersSources
+  vtkIOLegacy
+  vtkIOXML
+  vtkInteractionStyle
+  vtkRenderingOpenGL
+  vtkTestingCore
 )
 include(${VTK_USE_FILE})
-endif()
 
 include_directories(
   ${VTK_SOURCE_DIR}/Testing/Core
diff --git a/Examples/AMR/Cxx/Generate3DAMRDataSetWithPulse.cxx b/Examples/AMR/Cxx/Generate3DAMRDataSetWithPulse.cxx
index e642b26..5247aa6 100644
--- a/Examples/AMR/Cxx/Generate3DAMRDataSetWithPulse.cxx
+++ b/Examples/AMR/Cxx/Generate3DAMRDataSetWithPulse.cxx
@@ -41,7 +41,7 @@
 #include "vtkAMRBox.h"
 #include "AMRCommon.h"
 
-struct PulseAttributes {
+static struct PulseAttributes {
   double origin[3]; // xyz for the center of the pulse
   double width[3];  // the width of the pulse
   double amplitude; // the amplitude of the pulse
diff --git a/Examples/Annotation/Cxx/LabeledMesh/CMakeLists.txt b/Examples/Annotation/Cxx/LabeledMesh/CMakeLists.txt
index a0c38ed..3d76981 100644
--- a/Examples/Annotation/Cxx/LabeledMesh/CMakeLists.txt
+++ b/Examples/Annotation/Cxx/LabeledMesh/CMakeLists.txt
@@ -2,22 +2,15 @@ cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
 
 PROJECT (LabeledMesh)
 
-if(NOT VTK_BINARY_DIR)
-  find_package(VTK COMPONENTS
-    vtkCommonCore
-    vtkCommonDataModel
-    vtkFiltersCore
-    vtkFiltersGeneral
-    vtkFiltersSources
-    vtkRenderingCore
-    vtkRenderingLabel
-    vtkRenderingOpenGL
-    vtkRenderingFreeTypeOpenGL
-  )
-  include (${VTK_USE_FILE})
-endif()
-
-set(Libraries ${VTK_LIBRARIES})
+find_package(VTK COMPONENTS
+  vtkCommonCore
+  vtkCommonDataModel
+  vtkFiltersSources
+  vtkInteractionStyle
+  vtkRenderingLabel
+  vtkRenderingOpenGL
+)
+include(${VTK_USE_FILE})
 
 add_executable(${PROJECT_NAME} MACOSX_BUNDLE ${PROJECT_NAME}.cxx)
-target_link_libraries(${PROJECT_NAME} ${Libraries})
+target_link_libraries(${PROJECT_NAME} ${VTK_LIBRARIES})
diff --git a/Examples/Build/vtkLocal/CMakeLists.txt b/Examples/Build/vtkLocal/CMakeLists.txt
index 75c3869..82f48ed 100644
--- a/Examples/Build/vtkLocal/CMakeLists.txt
+++ b/Examples/Build/vtkLocal/CMakeLists.txt
@@ -3,7 +3,8 @@ cmake_minimum_required(VERSION 2.8)
 
 # Find and load VTK settings.
 if(NOT VTK_BINARY_DIR)
-  find_package(VTK 6.0 REQUIRED NO_MODULE)
+#  find_package(VTK 6.1 REQUIRED NO_MODULE)
+  find_package(VTK vtkCommonCore)
   include(${VTK_USE_FILE})
 endif()
 
diff --git a/Examples/Build/vtkMy/Common/vtkBar.h b/Examples/Build/vtkMy/Common/vtkBar.h
index ad1ff4e..1df295e 100644
--- a/Examples/Build/vtkMy/Common/vtkBar.h
+++ b/Examples/Build/vtkMy/Common/vtkBar.h
@@ -29,8 +29,8 @@ public:
   vtkTypeMacro(vtkBar,vtkObject);
 
 protected:
-  vtkBar() {};
-  ~vtkBar() {};
+  vtkBar() {}
+  ~vtkBar() {}
 private:
   vtkBar(const vtkBar&);  // Not implemented.
   void operator=(const vtkBar&);  // Not implemented.
diff --git a/Examples/Build/vtkMy/Imaging/vtkImageFoo.cxx b/Examples/Build/vtkMy/Imaging/vtkImageFoo.cxx
index 35f9439..92ebba6 100644
--- a/Examples/Build/vtkMy/Imaging/vtkImageFoo.cxx
+++ b/Examples/Build/vtkMy/Imaging/vtkImageFoo.cxx
@@ -97,7 +97,7 @@ void vtkImageFooExecute(vtkImageFoo* self,
   inData->GetContinuousIncrements(outExt, inIncX, inIncY, inIncZ);
   outData->GetContinuousIncrements(outExt, outIncX, outIncY, outIncZ);
 
-  // Loop through ouput pixels
+  // Loop through output pixels
 
   for (idxZ = 0; idxZ <= maxZ; idxZ++)
     {
diff --git a/Examples/Build/vtkMy/Unsorted/vtkBar2.h b/Examples/Build/vtkMy/Unsorted/vtkBar2.h
index 6771b99..90050a5 100644
--- a/Examples/Build/vtkMy/Unsorted/vtkBar2.h
+++ b/Examples/Build/vtkMy/Unsorted/vtkBar2.h
@@ -29,8 +29,8 @@ public:
   vtkTypeMacro(vtkBar2,vtkObject);
 
 protected:
-  vtkBar2() {};
-  ~vtkBar2() {};
+  vtkBar2() {}
+  ~vtkBar2() {}
 private:
   vtkBar2(const vtkBar2&);  // Not implemented.
   void operator=(const vtkBar2&);  // Not implemented.
diff --git a/Examples/CMakeLists.txt b/Examples/CMakeLists.txt
index f0e3fe0..d7fa026 100644
--- a/Examples/CMakeLists.txt
+++ b/Examples/CMakeLists.txt
@@ -1,4 +1,4 @@
-cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
+cmake_minimum_required(VERSION 2.8.7 FATAL_ERROR)
 
 PROJECT(VTKExamples)
 include_regular_expression("^.*$")
@@ -10,20 +10,18 @@ else()
   find_package(VTK REQUIRED)
 endif()
 
-include(${VTK_USE_FILE})
-
 if (NOT vtkRenderingCore_LOADED)
   message(STATUS "vtkRenderingCore not found. No examples will be built")
 else()
   add_subdirectory(AMR/Cxx)
   add_subdirectory(Annotation/Cxx/LabeledMesh)
-  add_subdirectory(MultiBlock/Cxx)
   add_subdirectory(DataManipulation/Cxx)
-  add_subdirectory(Hybrid/Cxx)
   add_subdirectory(ImageProcessing/Cxx)
   add_subdirectory(IO/Cxx)
+  add_subdirectory(LIC/Cxx)
   add_subdirectory(Medical/Cxx)
   add_subdirectory(Modelling/Cxx)
+  add_subdirectory(MultiBlock/Cxx)
   add_subdirectory(Rendering/Cxx)
   add_subdirectory(Tutorial/Step1/Cxx)
   add_subdirectory(Tutorial/Step2/Cxx)
@@ -34,7 +32,17 @@ else()
   add_subdirectory(VisualizationAlgorithms/Cxx)
   add_subdirectory(VolumeRendering/Cxx)
   add_subdirectory(Widgets/Cxx)
-  add_subdirectory(Build/vtkLocal)
+  # This may have been already built as part of the VTK build.
+  # If so we cannot create the target "vtkLocalExample" because
+  # an imported target of the same name already exists in the
+  # VTK build.
+  if(NOT vtkLocalExample_LOADED)
+    add_subdirectory(Build/vtkLocal)
+  endif()
+  if(TARGET vtkGUISupportQt)
+    add_subdirectory(GUI/Qt)
+  endif()
+
 #  add_subdirectory(Build/vtkMy)
 #  add_subdirectory(GUI/Motif)
 #  IF(VTK_USE_PARALLEL)
diff --git a/Examples/DataManipulation/Cxx/CMakeLists.txt b/Examples/DataManipulation/Cxx/CMakeLists.txt
index 52030d3..a16f7a5 100644
--- a/Examples/DataManipulation/Cxx/CMakeLists.txt
+++ b/Examples/DataManipulation/Cxx/CMakeLists.txt
@@ -2,18 +2,14 @@ cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
 
 PROJECT (DataManipulation)
 
-if(NOT VTK_BINARY_DIR)
-  find_package(VTK COMPONENTS
-    vtkCommonCore
-    vtkCommonDataModel
-    vtkFiltersCore
-    vtkFiltersGeometry
-    vtkRenderingCore
-    vtkRenderingOpenGL
-    vtkInteractionStyle
+find_package(VTK COMPONENTS
+  vtkCommonCore
+  vtkCommonDataModel
+  vtkFiltersGeometry
+  vtkInteractionStyle
+  vtkRenderingOpenGL
 )
 include(${VTK_USE_FILE})
-endif()
 
 add_executable(Arrays MACOSX_BUNDLE Arrays.cxx)
 add_executable(Cube MACOSX_BUNDLE Cube.cxx)
diff --git a/Examples/GUI/Cocoa/CMakeLists.txt b/Examples/GUI/Cocoa/CMakeLists.txt
index 63d702c..7e5ff2a 100644
--- a/Examples/GUI/Cocoa/CMakeLists.txt
+++ b/Examples/GUI/Cocoa/CMakeLists.txt
@@ -52,7 +52,8 @@ set(MACOSX_BUNDLE_NSPRINCIPAL_CLASS "NSApplication")
 add_executable(SimpleCocoaVTK MACOSX_BUNDLE ${SimpleCocoaVTK_SRCS} ${SimpleCocoaVTK_HDRS})
 
 # Probably a better way to set the framework link libraries.
-target_link_libraries(SimpleCocoaVTK vtkRendering "-framework Cocoa -framework OpenGL -framework IOKit")
+target_link_libraries(SimpleCocoaVTK "-framework Cocoa -framework OpenGL -framework IOKit"
+                      vtkRenderingOpenGL vtkRenderingFreeTypeOpenGL vtkInteractionStyle )
 
 # Set a custom plist file for the app bundle
 set_target_properties(SimpleCocoaVTK PROPERTIES MACOSX_BUNDLE_INFO_PLIST ${SimpleCocoaVTK_SOURCE_DIR}/Info-CMake.plist)
diff --git a/Examples/GUI/Qt/CMakeLists.txt b/Examples/GUI/Qt/CMakeLists.txt
index a4e96fc..6d882a4 100644
--- a/Examples/GUI/Qt/CMakeLists.txt
+++ b/Examples/GUI/Qt/CMakeLists.txt
@@ -1,13 +1,11 @@
+add_subdirectory(Events)
+add_subdirectory(FourPaneViewer)
+add_subdirectory(ImageViewer)
+add_subdirectory(SimpleView)
 
-
-ADD_SUBDIRECTORY(ImageViewer)
-ADD_SUBDIRECTORY(SimpleView)
-ADD_SUBDIRECTORY(Events)
-ADD_SUBDIRECTORY(FourPaneViewer)
-
-IF(VTK_USE_QVTK_QTOPENGL)
-  ADD_SUBDIRECTORY(GraphicsView)
-ENDIF(VTK_USE_QVTK_QTOPENGL)
+if(TARGET vtkGUISupportQtOpenGL)
+  add_subdirectory(GraphicsView)
+endif()
 
 CONFIGURE_FILE("${CMAKE_CURRENT_SOURCE_DIR}/CTestCustom.ctest.in"
-  "${CMAKE_CURRENT_BINARY_DIR}/CTestCustom.ctest" @ONLY IMMEDIATE)
+  "${CMAKE_CURRENT_BINARY_DIR}/CTestCustom.ctest" @ONLY)
diff --git a/Examples/GUI/Qt/Events/CMakeLists.txt b/Examples/GUI/Qt/Events/CMakeLists.txt
index 25aa342..df14ddd 100644
--- a/Examples/GUI/Qt/Events/CMakeLists.txt
+++ b/Examples/GUI/Qt/Events/CMakeLists.txt
@@ -1,50 +1,59 @@
-PROJECT(QtEvents)
-
-IF(NOT VTK_BINARY_DIR)
-FIND_PACKAGE(VTK)
-IF(NOT VTK_DIR)
-  MESSAGE(FATAL_ERROR "Please set VTK_DIR.")
-ENDIF(NOT VTK_DIR)
-INCLUDE(${VTK_USE_FILE})
-ENDIF(NOT VTK_BINARY_DIR)
-
-# use what QVTK built with
-SET(QT_QMAKE_EXECUTABLE ${VTK_QT_QMAKE_EXECUTABLE} CACHE FILEPATH "")
-SET(QT_MOC_EXECUTABLE ${VTK_QT_MOC_EXECUTABLE} CACHE FILEPATH "")
-SET(QT_UIC_EXECUTABLE ${VTK_QT_UIC_EXECUTABLE} CACHE FILEPATH "")
-FIND_PACKAGE(Qt4)
-IF(QT_USE_FILE)
-  INCLUDE(${QT_USE_FILE})
-ELSE(QT_USE_FILE)
-  SET(QT_LIBRARIES   ${QT_QT_LIBRARY})
-ENDIF(QT_USE_FILE)
-
-SET (SRCS
-  main.cxx
-)
+project(QtEvents)
 
-# Use the include path and library for Qt that is used by VTK.
-INCLUDE_DIRECTORIES(
-        ${QT_INCLUDE_DIR}
-        ${CMAKE_CURRENT_BINARY_DIR}
-        ${CMAKE_CURRENT_SOURCE_DIR}
+cmake_minimum_required(VERSION 2.8)
+
+if(POLICY CMP0020)
+  cmake_policy(SET CMP0020 NEW)
+endif()
+
+find_package(VTK COMPONENTS
+  vtkCommonCore
+  vtkFiltersSources
+  vtkGUISupportQt
 )
+include(${VTK_USE_FILE})
 
-INCLUDE_DIRECTORIES(${VTK_TDX_INCLUDE_PATH})
+if("${VTK_QT_VERSION}" STREQUAL "")
+  message(FATAL_ERROR "VTK was not built with Qt")
+endif()
 
-QT4_WRAP_UI(UI_SRCS GUI4.ui)
-QT4_WRAP_CPP(MOC_SRCS GUI4.h)
-SET(SRCS ${SRCS} ${MOC_SRCS} GUI4.cxx GUI4.h)
+set( Srcs main.cxx GUI4.cxx )
 
-ADD_EXECUTABLE( qtevents MACOSX_BUNDLE ${SRCS} ${UI_SRCS})
+set( Hdrs GUI4.h )
 
-TARGET_LINK_LIBRARIES( qtevents
-  QVTK
-  ${QT_LIBRARIES}
-  vtkRendering
-  vtkGraphics
-  vtkIO
-  vtkCommon
-)
+set( MOC_Hdrs GUI4.h )
+
+set( UIs GUI4.ui )
 
+# Use the include path and library for Qt that is used by VTK.
+include_directories(
+  ${CMAKE_CURRENT_BINARY_DIR}
+  ${CMAKE_CURRENT_SOURCE_DIR}
+)
 
+# Instruct CMake to run moc automatically when needed.
+set(CMAKE_AUTOMOC ON)
+
+if(VTK_QT_VERSION VERSION_GREATER "4")
+  find_package(Qt5Widgets)
+  qt5_wrap_ui(UI_Srcs ${UIs})
+
+  add_executable(qtevents
+    MACOSX_BUNDLE  ${Srcs} ${Hdrs} ${UI_Srcs} ${MOC_Hdrs})
+  qt5_use_modules(qtevents Core Gui Widgets)
+  target_link_libraries(qtevents ${VTK_LIBRARIES})
+else()
+  find_package(Qt4 REQUIRED)
+  include(${QT_USE_FILE})
+  # Use what VTK built with
+  set(QT_QMAKE_EXECUTABLE ${VTK_QT_QMAKE_EXECUTABLE} CACHE FILEPATH "")
+  set(QT_MOC_EXECUTABLE ${VTK_QT_MOC_EXECUTABLE} CACHE FILEPATH "")
+  set(QT_UIC_EXECUTABLE ${VTK_QT_UIC_EXECUTABLE} CACHE FILEPATH "")
+  qt4_wrap_ui(UI_Srcs ${UIs})
+
+  add_executable(qtevents MACOSX_BUNDLE ${Srcs} ${Hdrs} ${UI_Srcs} ${MOC_Hdrs})
+  target_link_libraries(qtevents
+    ${QT_LIBRARIES}
+    ${VTK_LIBRARIES}
+  )
+endif()
diff --git a/Examples/GUI/Qt/FourPaneViewer/CMakeLists.txt b/Examples/GUI/Qt/FourPaneViewer/CMakeLists.txt
index fe505ba..913d417 100644
--- a/Examples/GUI/Qt/FourPaneViewer/CMakeLists.txt
+++ b/Examples/GUI/Qt/FourPaneViewer/CMakeLists.txt
@@ -1,45 +1,59 @@
+project(QtVTKRenderWindows)
+
 cmake_minimum_required(VERSION 2.6)
 
-PROJECT(QtVTKRenderWindows)
+if(POLICY CMP0020)
+  cmake_policy(SET CMP0020 NEW)
+endif()
 
-IF(NOT VTK_BINARY_DIR)
-FIND_PACKAGE(VTK)
-IF(NOT VTK_DIR)
-  MESSAGE(FATAL_ERROR "Please set VTK_DIR.")
-ENDIF(NOT VTK_DIR)
-INCLUDE(${VTK_USE_FILE})
-ENDIF(NOT VTK_BINARY_DIR)
+find_package(VTK COMPONENTS
+  vtkCommonCore
+  vtkFiltersSources
+  vtkGUISupportQt
+  vtkIOImage
+  vtkInteractionImage
+)
+include(${VTK_USE_FILE})
 
-SET(QT_QMAKE_EXECUTABLE ${VTK_QT_QMAKE_EXECUTABLE} CACHE FILEPATH "")
-SET(QT_MOC_EXECUTABLE ${VTK_QT_MOC_EXECUTABLE} CACHE FILEPATH "")
-SET(QT_UIC_EXECUTABLE ${VTK_QT_UIC_EXECUTABLE} CACHE FILEPATH "")
-FIND_PACKAGE(Qt4 REQUIRED)
-INCLUDE(${QT_USE_FILE})
 
+if("${VTK_QT_VERSION}" STREQUAL "")
+  message(FATAL_ERROR "VTK was not built with Qt")
+endif()
 
 # Set your files and resources here
-SET(QtVTKRenderWindowsSrcs QtVTKRenderWindowsApp.cxx QtVTKRenderWindows.cxx)
-SET(QtVTKRenderWindowsUI QtVTKRenderWindows.ui)
-SET(QtVTKRenderWindowsHeaders QtVTKRenderWindows.h)
+set( Srcs QtVTKRenderWindowsApp.cxx QtVTKRenderWindows.cxx)
 
-INCLUDE_DIRECTORIES(
-  ${QT_INCLUDE_DIR}
-  ${CMAKE_CURRENT_BINARY_DIR}
-  ${CMAKE_CURRENT_SOURCE_DIR}
-)
+set( Hdrs QtVTKRenderWindows.h )
 
-QT4_WRAP_UI(UISrcs ${QtVTKRenderWindowsUI})
-QT4_WRAP_CPP(MOCSrcs ${QtVTKRenderWindowsHeaders} )
+set( MOC_Hdrs QtVTKRenderWindows.h )
 
-SOURCE_GROUP("Resources" FILES
-  ${QtVTKRenderWindowsUI}
-)
+set( UIs QtVTKRenderWindows.ui )
 
-SOURCE_GROUP("Generated" FILES
-  ${UISrcs}
-  ${MOCSrcs}
-  ${RCS_SOURCES}
+include_directories(
+  ${CMAKE_CURRENT_BINARY_DIR}
+  ${CMAKE_CURRENT_SOURCE_DIR}
 )
 
-ADD_EXECUTABLE( QtVTKRenderWindows ${QtVTKRenderWindowsSrcs} ${UISrcs} ${MOCSrcs})
-TARGET_LINK_LIBRARIES( QtVTKRenderWindows ${VTK_LIBRARIES} )
+# Instruct CMake to run moc automatically when needed.
+set(CMAKE_AUTOMOC ON)
+
+if(VTK_QT_VERSION VERSION_GREATER "4")
+  find_package(Qt5Widgets REQUIRED QUIET)
+  qt5_wrap_ui(UI_Srcs ${UIs})
+
+  # CMAKE_AUTOMOC in ON so the MocHdrs will be automatically wrapped.
+  add_executable(QtVTKRenderWindows ${Srcs} ${Hdrs} ${UI_Srcs} ${MOC_Hdrs})
+  qt5_use_modules(QtVTKRenderWindows Core Gui Widgets)
+  target_link_libraries(QtVTKRenderWindows ${VTK_LIBRARIES})
+else()
+  find_package(Qt4 REQUIRED)
+  include(${QT_USE_FILE})
+  # Use what VTK built with
+  set(QT_QMAKE_EXECUTABLE ${VTK_QT_QMAKE_EXECUTABLE} CACHE FILEPATH "")
+  set(QT_MOC_EXECUTABLE ${VTK_QT_MOC_EXECUTABLE} CACHE FILEPATH "")
+  set(QT_UIC_EXECUTABLE ${VTK_QT_UIC_EXECUTABLE} CACHE FILEPATH "")
+  qt4_wrap_ui(UI_Srcs ${UIs})
+
+  add_executable(QtVTKRenderWindows ${Srcs} ${Hdrs} ${UI_Srcs} ${MOC_Hdrs})
+  target_link_libraries(QtVTKRenderWindows ${QT_LIBRARIES} ${VTK_LIBRARIES})
+endif()
diff --git a/Examples/GUI/Qt/FourPaneViewer/QtVTKRenderWindows.cxx b/Examples/GUI/Qt/FourPaneViewer/QtVTKRenderWindows.cxx
index cfb5bbc..3e94b31 100644
--- a/Examples/GUI/Qt/FourPaneViewer/QtVTKRenderWindows.cxx
+++ b/Examples/GUI/Qt/FourPaneViewer/QtVTKRenderWindows.cxx
@@ -290,17 +290,17 @@ void QtVTKRenderWindows::SetBlendMode(int m)
 
 void QtVTKRenderWindows::SetBlendModeToMaxIP()
 {
-  this->SetBlendMode(VTK_IMAGESLAB_BLEND_MAX);
+  this->SetBlendMode(VTK_IMAGE_SLAB_MAX);
 }
 
 void QtVTKRenderWindows::SetBlendModeToMinIP()
 {
-  this->SetBlendMode(VTK_IMAGESLAB_BLEND_MIN);
+  this->SetBlendMode(VTK_IMAGE_SLAB_MIN);
 }
 
 void QtVTKRenderWindows::SetBlendModeToMeanIP()
 {
-  this->SetBlendMode(VTK_IMAGESLAB_BLEND_MEAN);
+  this->SetBlendMode(VTK_IMAGE_SLAB_MEAN);
 }
 
 void QtVTKRenderWindows::ResetViews()
diff --git a/Examples/GUI/Qt/FourPaneViewer/QtVTKRenderWindows.h b/Examples/GUI/Qt/FourPaneViewer/QtVTKRenderWindows.h
index 2999f78..b995788 100644
--- a/Examples/GUI/Qt/FourPaneViewer/QtVTKRenderWindows.h
+++ b/Examples/GUI/Qt/FourPaneViewer/QtVTKRenderWindows.h
@@ -18,7 +18,7 @@ public:
 
   // Constructor/Destructor
   QtVTKRenderWindows(int argc, char *argv[]);
-  ~QtVTKRenderWindows() {};
+  ~QtVTKRenderWindows() {}
 
 public slots:
 
diff --git a/Examples/GUI/Qt/GraphicsView/CMakeLists.txt b/Examples/GUI/Qt/GraphicsView/CMakeLists.txt
index ea466d3..589ec9a 100644
--- a/Examples/GUI/Qt/GraphicsView/CMakeLists.txt
+++ b/Examples/GUI/Qt/GraphicsView/CMakeLists.txt
@@ -1,39 +1,79 @@
+project( GraphicsView )
 
-find_package(Qt4 REQUIRED)
-set(QT_USE_QTOPENGL 1)
-set(QT_USE_QTWEBKIT 1)
-include(${QT_USE_FILE})
+cmake_minimum_required(VERSION 2.8.8)
 
-find_package(OpenGL)
+if(POLICY CMP0020)
+  cmake_policy(SET CMP0020 NEW)
+endif()
 
-if(NOT VTK_BINARY_DIR)
-  find_package(VTK REQUIRED)
-  include(${VTK_USE_FILE})
-endif(NOT VTK_BINARY_DIR)
+find_package(OpenGL)
 
-if(NOT VTK_USE_QVTK_QTOPENGL OR NOT QT_QTWEBKIT_FOUND OR QT_VERSION_MINOR LESS 6)
-  message(STATUS "VTK isn't configured to use QtOpenGL, QtWebKit wasn't found, or Qt 4.6 wasn't found.  GraphicsView example is disabled.")
-else(NOT VTK_USE_QVTK_QTOPENGL OR NOT QT_QTWEBKIT_FOUND OR QT_VERSION_MINOR LESS 6)
+find_package(VTK COMPONENTS
+  vtkCommonCore
+  vtkGUISupportQt
+  vtkGUISupportQtOpenGL
+  vtkIOInfovis
+  vtkRenderingFreeTypeOpenGL
+  vtkViewsInfovis
+)
+include(${VTK_USE_FILE})
 
-qt4_add_resources(qrcfiles GraphicsView.qrc)
+if("${VTK_QT_VERSION}" STREQUAL "")
+  message(FATAL_ERROR "VTK was not built with Qt")
+endif()
 
-qt4_wrap_cpp(mocs
-  OpenGLScene.hpp
-  QBoolAnimation.h
-  WebView.h
-  )
-
-add_executable(qtgraphicsview
+set( Srcs
   main.cpp
   OpenGLScene.cpp
   TreeRingViewItem.cpp
   GraphLayoutViewItem.cpp
   WebView.cpp
-  ${mocs}
-  ${qrcfiles}
-  )
+)
+
+set( Hdrs
+  GraphicsView.hpp
+  OpenGLScene.hpp
+  QBoolAnimation.h
+  TreeRingViewItem.h
+  GraphLayoutViewItem.h
+  WebView.h
+)
+
+set( MOC_Hdrs
+  OpenGLScene.hpp
+  QBoolAnimation.h
+  WebView.h
+)
+
+set( QRCs
+  GraphicsView.qrc
+)
+
+# Instruct CMake to run moc automatically when needed.
+set(CMAKE_AUTOMOC ON)
+
+if(VTK_QT_VERSION VERSION_GREATER "4")
+  find_package(Qt5WebKitWidgets REQUIRED QUIET)
+
+  qt5_add_resources(QRC_Srcs ${QRCs} )
 
-target_link_libraries(qtgraphicsview QVTK vtkRendering)
-target_link_libraries(qtgraphicsview ${QT_LIBRARIES})
+  add_executable(qtgraphicsview MACOSX_BUNDLE
+    ${Srcs} ${Hdrs} ${MOC_Hdrs} ${QRC_Srcs})
+  qt5_use_modules(qtgraphicsview Core Gui Widgets
+                  WebKit WebKitWidgets OpenGL OpenGLExtensions)
+  target_link_libraries(qtgraphicsview ${VTK_LIBRARIES})
+else()
+  find_package(Qt4 REQUIRED)
+  set(QT_USE_QTOPENGL 1)
+  set(QT_USE_QTWEBKIT 1)
+  include(${QT_USE_FILE})
+  if (NOT QT_QTWEBKIT_FOUND OR QT_VERSION_MINOR LESS 6)
+    message(STATUS "VTK isn't configured to use QtOpenGL, QtWebKit wasn't found, or Qt 4.6 wasn't found.  GraphicsView example is disabled.")
+  else()
+    qt4_add_resources(QRC_Srcs ${QRCs})
+    qt4_wrap_cpp(MOC_Srcs ${MOC_Hdrs})
 
-endif(NOT VTK_USE_QVTK_QTOPENGL OR NOT QT_QTWEBKIT_FOUND OR QT_VERSION_MINOR LESS 6)
+    add_executable(qtgraphicsview ${Srcs} ${Hdrs} ${MOC_Hdrs} ${QRC_Srcs})
+    target_link_libraries(qtgraphicsview ${QT_LIBRARIES} ${VTK_LIBRARIES})
+  endif()
+endif()
diff --git a/Examples/GUI/Qt/GraphicsView/OpenGLScene.cpp b/Examples/GUI/Qt/GraphicsView/OpenGLScene.cpp
index 77b3c20..f9f9676 100644
--- a/Examples/GUI/Qt/GraphicsView/OpenGLScene.cpp
+++ b/Examples/GUI/Qt/GraphicsView/OpenGLScene.cpp
@@ -126,8 +126,14 @@ void OpenGLScene::mousePressEvent(QGraphicsSceneMouseEvent* e)
 {
   QGraphicsScene::mousePressEvent(e);
 
-  // see if its under one our our deactivated items
-  QGraphicsItem* item = itemAt(e->scenePos());
+  // See if its under one our our deactivated items.
+  #if QT_VERSION >= 0x050000
+    // The transform is just the identity matrix.
+    QGraphicsItem* item = itemAt(e->scenePos(),QTransform());
+  #else
+    QGraphicsItem* item = itemAt(e->scenePos());
+  #endif
+
   if(item == mGraphLayoutView && CurrentState != 0)
   {
     e->accept();
diff --git a/Examples/GUI/Qt/ImageViewer/CMakeLists.txt b/Examples/GUI/Qt/ImageViewer/CMakeLists.txt
index 6b8a891..70d62d3 100644
--- a/Examples/GUI/Qt/ImageViewer/CMakeLists.txt
+++ b/Examples/GUI/Qt/ImageViewer/CMakeLists.txt
@@ -1,34 +1,41 @@
-PROJECT(QtImageViewer)
-
-IF(NOT VTK_BINARY_DIR)
-FIND_PACKAGE(VTK)
-IF(NOT VTK_DIR)
-  MESSAGE(FATAL_ERROR "Please set VTK_DIR.")
-ENDIF(NOT VTK_DIR)
-INCLUDE(${VTK_USE_FILE})
-ENDIF(NOT VTK_BINARY_DIR)
-
-SET(QT_QMAKE_EXECUTABLE ${VTK_QT_QMAKE_EXECUTABLE} CACHE FILEPATH "")
-SET(QT_MOC_EXECUTABLE ${VTK_QT_MOC_EXECUTABLE} CACHE FILEPATH "")
-SET(QT_UIC_EXECUTABLE ${VTK_QT_UIC_EXECUTABLE} CACHE FILEPATH "")
-FIND_PACKAGE(Qt4 REQUIRED)
-INCLUDE(${QT_USE_FILE})
-
-SET (SRCS
-  main.cxx
-)
+project(QtImageViewer)
 
-# Use the include path and library for Qt that is used by VTK.
-INCLUDE_DIRECTORIES( ${QT_INCLUDE_DIR} ${QT_QTGUI_INCLUDE_DIR}
-                     ${QT_QTCORE_INCLUDE_DIR})
+cmake_minimum_required(VERSION 2.8)
 
-ADD_EXECUTABLE( qtimageviewer MACOSX_BUNDLE ${SRCS})
+if(POLICY CMP0020)
+  cmake_policy(SET CMP0020 NEW)
+endif()
 
-TARGET_LINK_LIBRARIES( qtimageviewer
-  QVTK
-  ${QT_LIBRARIES}
-  vtkRendering
-  vtkGraphics
-  vtkIO
-  vtkCommon
+find_package(VTK COMPONENTS
+  vtkGUISupportQt
+  vtkIOImage
+  vtkInteractionImage
+  vtkRenderingOpenGL
+  vtkTestingCore
 )
+include(${VTK_USE_FILE})
+
+if("${VTK_QT_VERSION}" STREQUAL "")
+  message(FATAL_ERROR "VTK was not built with Qt")
+endif()
+
+set( Srcs main.cxx )
+
+if(VTK_QT_VERSION VERSION_GREATER "4")
+  find_package(Qt5Core REQUIRED QUIET)
+
+  add_executable(qtimageviewer ${Srcs})
+  qt5_use_modules(qtimageviewer Core Gui Widgets)
+  target_link_libraries(qtimageviewer ${VTK_LIBRARIES})
+else()
+  find_package(Qt4 REQUIRED)
+  include(${QT_USE_FILE})
+  # Use what VTK built with
+  set(QT_QMAKE_EXECUTABLE ${VTK_QT_QMAKE_EXECUTABLE} CACHE FILEPATH "")
+  set(QT_MOC_EXECUTABLE ${VTK_QT_MOC_EXECUTABLE} CACHE FILEPATH "")
+  set(QT_UIC_EXECUTABLE ${VTK_QT_UIC_EXECUTABLE} CACHE FILEPATH "")
+  include_directories(${QT_INCLUDE_DIR})
+
+  add_executable(qtimageviewer ${Srcs})
+  target_link_libraries(qtimageviewer ${QT_LIBRARIES} ${VTK_LIBRARIES})
+endif()
diff --git a/Examples/GUI/Qt/SimpleView/CMakeLists.txt b/Examples/GUI/Qt/SimpleView/CMakeLists.txt
index 8a88304..1aa333a 100644
--- a/Examples/GUI/Qt/SimpleView/CMakeLists.txt
+++ b/Examples/GUI/Qt/SimpleView/CMakeLists.txt
@@ -1,78 +1,99 @@
-PROJECT(SimpleView)
+project(SimpleView)
 
-IF(NOT VTK_BINARY_DIR)
-FIND_PACKAGE(VTK)
-IF(NOT VTK_DIR)
-  MESSAGE(FATAL_ERROR "Please set VTK_DIR.")
-ENDIF(NOT VTK_DIR)
-INCLUDE(${VTK_USE_FILE})
-ENDIF(NOT VTK_BINARY_DIR)
+cmake_minimum_required(VERSION 2.8)
 
-# use what QVTK built with
-SET(QT_MOC_EXECUTABLE ${VTK_QT_MOC_EXECUTABLE} CACHE FILEPATH "")
-SET(QT_UIC_EXECUTABLE ${VTK_QT_UIC_EXECUTABLE} CACHE FILEPATH "")
-SET(QT_QMAKE_EXECUTABLE ${VTK_QT_QMAKE_EXECUTABLE} CACHE FILEPATH "")
-FIND_PACKAGE(Qt4 REQUIRED)
-INCLUDE(${QT_USE_FILE})
+if(POLICY CMP0020)
+  cmake_policy(SET CMP0020 NEW)
+endif()
 
+find_package(VTK COMPONENTS
+  vtkCommonCore
+  vtkFiltersCore
+  vtkInfovisCore
+  vtkInteractionStyle
+  vtkRenderingFreeTypeOpenGL
+  vtkViewsQt
+)
+include(${VTK_USE_FILE})
+
+if("${VTK_QT_VERSION}" STREQUAL "")
+  message(FATAL_ERROR "VTK was not built with Qt")
+endif()
 
 # Use the include path and library for Qt that is used by VTK.
-INCLUDE_DIRECTORIES(
-  ${QT_INCLUDE_DIR}
+include_directories(
   ${CMAKE_CURRENT_BINARY_DIR}
   ${CMAKE_CURRENT_SOURCE_DIR}
 )
 
-
 # Set your files and resources here
-SET(SimpleViewSrcs main.cxx SimpleView.cxx)
-SET(SimpleViewUI SimpleView.ui)
-SET(SimpleViewHeaders SimpleView.h)
-SET(SimpleViewResources Icons/icons.qrc)
+set( Srcs main.cxx SimpleView.cxx )
 
-# The rest should just work (sure...)
-QT4_WRAP_UI(UISrcs ${SimpleViewUI})
-QT4_WRAP_CPP(MOCSrcs ${SimpleViewHeaders} )
-QT4_ADD_RESOURCES(ResourceSrcs ${SimpleViewResources})
-
-SOURCE_GROUP("Resources" FILES
-  ${SimpleViewUI}
-  ${SimpleViewResources}
-  ${EXE_ICON}
-)
+set( Hdrs SimpleView.h )
 
-SOURCE_GROUP("Generated" FILES
-  ${UISrcs}
-  ${MOCSrcs}
-  ${ResourceSrcs}
-  ${RCS_SOURCES}
-)
+set( MOC_Hdrs SimpleView.h )
 
-ADD_DEFINITIONS(-DQT_GUI_LIBS -DQT_CORE_LIB -DQT3_SUPPORT)
-SET_SOURCE_FILES_PROPERTIES(${SimpleViewSrcs} PROPERTIES
-                            OBJECT_DEPENDS "${UISrcs}")
-
-# It's nice to have the ui in the windows project file...just double click on it
-# and designer comes up on that ui file :)
-IF (${CMAKE_BUILD_TOOL} MATCHES "msdev")
-  SET (SimpleViewSrcs ${SimpleViewSrcs} ${SimpleViewUI})
-ENDIF (${CMAKE_BUILD_TOOL} MATCHES "msdev")
-IF (${CMAKE_BUILD_TOOL} MATCHES "devenv")
-  SET (SimpleViewSrcs ${SimpleViewSrcs} ${SimpleViewUI})
-ENDIF (${CMAKE_BUILD_TOOL} MATCHES "devenv")
-
-ADD_EXECUTABLE( SimpleView MACOSX_BUNDLE ${SimpleViewSrcs} ${UISrcs} ${MOCSrcs} ${ResourceSrcs})
-
-TARGET_LINK_LIBRARIES( SimpleView
-  QVTK
-  ${QT_LIBRARIES}
-  vtkRendering
-  vtkGraphics
-  vtkIO
-  vtkCommon
-  vtkInfovis
-  vtkViews
-)
+set( UIs SimpleView.ui )
 
+set( QRCs Icons/icons.qrc )
 
 
+# Instruct CMake to run moc automatically when needed.
+set(CMAKE_AUTOMOC ON)
+
+# The rest should just work (sure...)
+if(VTK_QT_VERSION VERSION_GREATER "4")
+  # We have ui files, this will bring in the macro: qt5_wrap_ui
+  find_package(Qt5Widgets REQUIRED QUIET)
+  qt5_wrap_ui(UI_Srcs ${UIs})
+  qt5_add_resources(QRC_Srcs ${QRCs} )
+
+  source_group("Resources" FILES
+    ${UIs}
+    ${QRCs}
+    ${EXE_ICON} # Not present
+  )
+
+  source_group("Generated" FILES
+    ${UI_Srcs}
+    ${MOC_Srcs}
+    ${QRC_Srcs}
+    ${QRC_Srcs}
+  )
+
+  add_executable(SimpleView MACOSX_BUNDLE
+    ${Srcs} ${Hdrs} ${UI_Srcs} ${MOC_Hdrs} ${QRC_Srcs})
+  qt5_use_modules(SimpleView Core Gui Widgets)
+  target_link_libraries(SimpleView ${VTK_LIBRARIES})
+else()
+  find_package(Qt4 REQUIRED)
+  include(${QT_USE_FILE})
+  # Use what VTK built with
+  set(QT_QMAKE_EXECUTABLE ${VTK_QT_QMAKE_EXECUTABLE} CACHE FILEPATH "")
+  set(QT_MOC_EXECUTABLE ${VTK_QT_MOC_EXECUTABLE} CACHE FILEPATH "")
+  set(QT_UIC_EXECUTABLE ${VTK_QT_UIC_EXECUTABLE} CACHE FILEPATH "")
+  qt4_wrap_ui(UI_Srcs ${UIs})
+  qt4_wrap_cpp(MOC_Srcs ${MOC_Hdrs} )
+  qt4_add_resources(QRC_Srcs ${QRCs})
+
+  source_group("Resources" FILES
+    ${UIs}
+    ${QRCs}
+    ${EXE_ICON} # Not present
+  )
+
+  source_group("Generated" FILES
+    ${UI_Srcs}
+    ${QRC_Srcs}
+    ${QRC_Srcs}
+  )
+
+  add_definitions(-DQT_GUI_LIBS -DQT_CORE_LIB -DQT3_SUPPORT)
+
+  add_executable(SimpleView MACOSX_BUNDLE
+    ${Srcs} ${Hdrs} ${UI_Srcs} ${MOC_Hdrs} ${QRC_Srcs})
+  target_link_libraries(SimpleView
+    ${QT_LIBRARIES}
+    ${VTK_LIBRARIES}
+  )
+endif()
diff --git a/Examples/GUI/Qt/SimpleView/main.cxx b/Examples/GUI/Qt/SimpleView/main.cxx
index fd2ef18..b7e76c2 100644
--- a/Examples/GUI/Qt/SimpleView/main.cxx
+++ b/Examples/GUI/Qt/SimpleView/main.cxx
@@ -8,7 +8,10 @@
  */
 // QT includes
 #include <QApplication>
-#include <QCleanlooksStyle>
+#if QT_VERSION < 0x050000
+  #include <QCleanlooksStyle>
+#endif
+
 #include "SimpleView.h"
 
 extern int qInitResources_icons();
@@ -19,7 +22,11 @@ int main( int argc, char** argv )
   // QT Stuff
   QApplication app( argc, argv );
 
-  QApplication::setStyle(new QCleanlooksStyle);
+  #if QT_VERSION >= 0x050000
+    QApplication::setStyle("fusion");
+  #else
+    QApplication::setStyle(new QCleanlooksStyle);
+  #endif
 
   qInitResources_icons();
 
diff --git a/Examples/GUI/Win32/SampleMFC/vtkMFCView.h b/Examples/GUI/Win32/SampleMFC/vtkMFCView.h
index d82400d..6f36106 100644
--- a/Examples/GUI/Win32/SampleMFC/vtkMFCView.h
+++ b/Examples/GUI/Win32/SampleMFC/vtkMFCView.h
@@ -52,8 +52,8 @@ public:
   int  GetPrintDPI() {return this->PrintDPI;};
   vtkMFCDocument *GetDocument() {return (vtkMFCDocument *)m_pDocument;};
   virtual vtkWindow *GetVTKWindow() {return NULL;};
-  virtual void SetupMemoryRendering(int x, int y, HDC prn) {};
-  virtual void ResumeScreenRendering() {};
+  virtual void SetupMemoryRendering(int x, int y, HDC prn) {}
+  virtual void ResumeScreenRendering() {}
   virtual unsigned char *GetMemoryData() {return NULL;};
 
 // Overrides
diff --git a/Examples/GUI/Win32/vtkBorland/Package/vtkBorlandRenderWindow.h b/Examples/GUI/Win32/vtkBorland/Package/vtkBorlandRenderWindow.h
index ed7e4cf..e33ab53 100644
--- a/Examples/GUI/Win32/vtkBorland/Package/vtkBorlandRenderWindow.h
+++ b/Examples/GUI/Win32/vtkBorland/Package/vtkBorlandRenderWindow.h
@@ -32,7 +32,7 @@ public:
         }
       }
     }
-  vtkAbortCallback(){};
+  vtkAbortCallback(){}
 };
 
 //---------------------------------------------------------------------------
diff --git a/Examples/Hybrid/Cxx/CMakeLists.txt b/Examples/Hybrid/Cxx/CMakeLists.txt
deleted file mode 100644
index 41897d8..0000000
--- a/Examples/Hybrid/Cxx/CMakeLists.txt
+++ /dev/null
@@ -1,31 +0,0 @@
-cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
-
-PROJECT (Hybrid)
-
-if(NOT VTK_SOURCE_DIR)
-  message(ERROR "Cannot build Hybrid examples without VTK_SOURCE_DIR")
-endif()
-
-if(NOT VTK_BINARY_DIR)
-  find_package(VTK REQUIRED)
-  include(${VTK_USE_FILE})
-endif()
-
-include_directories(
-  # ImageDataLIC2DDemo and StructuredGridLIC2DDemo include
-  # TestImageDataLIC2D.h  and TestStructuredGridLIC2DSlice
-  # from the dir below
-  ${VTK_SOURCE_DIR}/Rendering/HybridOpenGL/Testing/Cxx
-)
-
-set(HYBRID_EXAMPLES_SRCS
-  ImageDataLIC2DDemo
-  StructuredGridLIC2DDemo
-  )
-
-if(vtkTestingCore_LOADED)
-  foreach(name ${HYBRID_EXAMPLES_SRCS})
-    add_executable(${name} MACOSX_BUNDLE ${name}.cxx)
-    target_link_libraries(${name} ${VTK_LIBRARIES} )
-  endforeach()
-endif()
diff --git a/Examples/Hybrid/Cxx/ImageDataLIC2DDemo.cxx b/Examples/Hybrid/Cxx/ImageDataLIC2DDemo.cxx
deleted file mode 100644
index 1dd4f7f..0000000
--- a/Examples/Hybrid/Cxx/ImageDataLIC2DDemo.cxx
+++ /dev/null
@@ -1,22 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    ImageDataLIC2DDemo.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#include "TestImageDataLIC2D.h"
-
-extern int ImageDataLIC2D(int argc, char* argv[]);
-
-int main( int argc, char * argv[] )
-{
-  return ImageDataLIC2D( argc, argv );
-}
diff --git a/Examples/Hybrid/Cxx/StructuredGridLIC2DDemo.cxx b/Examples/Hybrid/Cxx/StructuredGridLIC2DDemo.cxx
deleted file mode 100644
index 6c6bf03..0000000
--- a/Examples/Hybrid/Cxx/StructuredGridLIC2DDemo.cxx
+++ /dev/null
@@ -1,23 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    StructuredGridLIC2DDemo.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#include "TestStructuredGridLIC2DSlice.h"
-
-extern int StructuredGridLIC2DSlice(int argc, char* argv[]);
-
-int main( int argc, char * argv[] )
-{
-  RenderingMode = STRUCTURED_GRID_LIC2D_SLICE_DEMO;
-  return StructuredGridLIC2DSlice( argc, argv );
-}
diff --git a/Examples/HyperTree/Cxx/Sphere/Cell.h b/Examples/HyperTree/Cxx/Sphere/Cell.h
index b0c7808..cf6e2c8 100644
--- a/Examples/HyperTree/Cxx/Sphere/Cell.h
+++ b/Examples/HyperTree/Cxx/Sphere/Cell.h
@@ -25,7 +25,7 @@ class Cell
       bool isRefined () {return _refined;}
       int getId () {return _id;}
       vtkIdType * getNodeIds();
-      void setNeighbours(int idx1, int idx2, int idy1, int idy2, int idz1, int idz2) {};
+      void setNeighbours(int idx1, int idx2, int idy1, int idy2, int idz1, int idz2) {}
 
       void refine ();
       void refineIfNeeded();
diff --git a/Examples/HyperTree/Cxx/Sphere/Mesh.cxx b/Examples/HyperTree/Cxx/Sphere/Mesh.cxx
index ced05aa..6f7d060 100644
--- a/Examples/HyperTree/Cxx/Sphere/Mesh.cxx
+++ b/Examples/HyperTree/Cxx/Sphere/Mesh.cxx
@@ -11,7 +11,7 @@ All rights reserved.
 #include "Node.h"
 #include "Cell.h"
 
-#include <assert.h>
+#include <cassert>
 
 #include <vtkUnstructuredGrid.h>
 #include <vtkPoints.h>
diff --git a/Examples/IO/Cxx/CMakeLists.txt b/Examples/IO/Cxx/CMakeLists.txt
index c8dd318..73962e2 100644
--- a/Examples/IO/Cxx/CMakeLists.txt
+++ b/Examples/IO/Cxx/CMakeLists.txt
@@ -2,16 +2,14 @@ cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
 
 PROJECT (IO)
 
-if(NOT VTK_BINARY_DIR)
-  find_package(VTK COMPONENTS
-    vtkCommonCore
-    vtkCommonDataModel
-    vtkIOGeometry
-    vtkIOXML
-    vtkRenderingOpenGL
-  )
-  include(${VTK_USE_FILE})
-endif()
+find_package(VTK COMPONENTS
+  vtkCommonCore
+  vtkIOLegacy
+  vtkIOXML
+  vtkTestingRendering
+  vtksys
+)
+include(${VTK_USE_FILE})
 
 add_executable(DumpXMLFile MACOSX_BUNDLE DumpXMLFile.cxx)
 target_link_libraries(DumpXMLFile ${VTK_LIBRARIES})
@@ -22,22 +20,15 @@ target_link_libraries(ParticleReader ${VTK_LIBRARIES})
 if(BUILD_TESTING)
   if(vtkTestingRendering_LOADED)
     ######## Regression Testing ########
-    SET(KIT IOExamples)
-    SET(MyTests TestDumpXMLFile.cxx TestParticleReader.cxx)
-
-    include(vtkTestingObjectFactory)
-
-    add_executable(${KIT}CxxTests ${KIT}CxxTests.cxx ${MyTests})
-    target_link_libraries(${KIT}CxxTests ${VTK_LIBRARIES})
-
-    if (VTK_DATA_ROOT)
-      add_test(NAME ${KIT}-ParticleReader COMMAND ${KIT}CxxTests
-               TestParticleReader
-                 ${VTK_DATA_ROOT}/Data/golf.csv
-                 ${VTK_TEST_OUTPUT_DIR}/TestParticleReader.vtp)
-      add_test(NAME ${KIT}-DumpXMLFile COMMAND ${KIT}CxxTests
-               TestDumpXMLFile
-                 ${VTK_DATA_ROOT}/Data/cow.vtp)
-    endif()
+    set(vtk-example IOExamples)
+    set(TestDumpXMLFile_ARGS "DATA{${VTK_TEST_INPUT_DIR}/cow.vtp}")
+    set(TestParticleReader_ARGS
+      "${VTK_TEST_DATA_DIR}/Data/golf.csv"
+      ${VTK_TEST_OUTPUT_DIR}/TestParticleReader.vtp)
+    vtk_add_test_cxx(NO_DATA NO_VALID NO_OUTPUT
+      TestParticleReader.cxx
+      TestDumpXMLFile.cxx
+      )
+    vtk_test_cxx_executable(${vtk-example}CxxTests RENDERING_FACTORY)
   endif()
 endif()
diff --git a/Examples/IO/Cxx/ParticleReader.cxx b/Examples/IO/Cxx/ParticleReader.cxx
index 4a7d034..0cf5b77 100644
--- a/Examples/IO/Cxx/ParticleReader.cxx
+++ b/Examples/IO/Cxx/ParticleReader.cxx
@@ -7,7 +7,7 @@ int main ( int argc, char* argv[] )
 {
   if ( argc != 3 )
     {
-    cerr << "Usage: " << argv[0] << "InputFile(csv) OutputFile(vtp)." << endl;
+    cerr << "Usage: " << argv[0] << " InputFile(csv) OutputFile(vtp)." << endl;
     return EXIT_FAILURE;
     }
 
diff --git a/Examples/ImageProcessing/Cxx/Baseline/TestImageSlicing.png.md5 b/Examples/ImageProcessing/Cxx/Baseline/TestImageSlicing.png.md5
new file mode 100644
index 0000000..c55a5b7
--- /dev/null
+++ b/Examples/ImageProcessing/Cxx/Baseline/TestImageSlicing.png.md5
@@ -0,0 +1 @@
+6e1a4f261ec50a67b6fb6701e29bef10
diff --git a/Examples/ImageProcessing/Cxx/CMakeLists.txt b/Examples/ImageProcessing/Cxx/CMakeLists.txt
index 24335b4..7a1a3fb 100644
--- a/Examples/ImageProcessing/Cxx/CMakeLists.txt
+++ b/Examples/ImageProcessing/Cxx/CMakeLists.txt
@@ -2,37 +2,26 @@ cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
 
 PROJECT(ImageProcessing)
 
-if(NOT VTK_BINARY_DIR)
-  find_package(VTK COMPONENTS
-    vtkCommonCore
-    vtkCommonDataModel
-    vtkCommonExecutionModel
-    vtkCommonMath
-    vtkIOImage
-    vtkImagingCore
-    vtkInteractionStyle
-    vtkRenderingCore
-    vtkRenderingOpenGL
-  )
+find_package(VTK COMPONENTS
+  vtkIOImage
+  vtkImagingCore
+  vtkInteractionStyle
+  vtkRenderingOpenGL
+  vtkTestingRendering
+)
 include(${VTK_USE_FILE})
-endif()
 
 add_executable(ImageSlicing MACOSX_BUNDLE ImageSlicing.cxx)
 target_link_libraries(ImageSlicing ${VTK_LIBRARIES})
 
 if(BUILD_TESTING)
   if(vtkTestingRendering_LOADED)
-    ######## Regression Testing ########
-    set(KIT ImageProcessingExamples)
-    set(MyTests TestImageSlicing.cxx)
-
-    include(vtkTestingObjectFactory)
-    add_executable(${KIT}CxxTests ${KIT}CxxTests.cxx ${MyTests})
-    target_link_libraries(${KIT}CxxTests ${VTK_LIBRARIES})
-
-      if (VTK_DATA_ROOT)
-        add_test(NAME ${KIT}-ImageSlicing COMMAND ${KIT}CxxTests
-          TestImageSlicing ${VTK_DATA_ROOT}/Data/headsq/quarter)
-      endif()
+   ######## Regression Testing ########
+   set(vtk-example ImageProcessingExamples)
+   set(TestImageSlicing_ARGS ${VTK_TEST_DATA_DIR}/Data/headsq/quarter)
+   vtk_add_test_cxx(
+     TestImageSlicing.cxx
+     )
+   vtk_test_cxx_executable(${vtk-example}CxxTests RENDERING_FACTORY)
  endif()
 endif()
diff --git a/Examples/Infovis/Cxx/CustomLinkView/CustomLinkView.cxx b/Examples/Infovis/Cxx/CustomLinkView/CustomLinkView.cxx
index 759c711..ea0f3e1 100644
--- a/Examples/Infovis/Cxx/CustomLinkView/CustomLinkView.cxx
+++ b/Examples/Infovis/Cxx/CustomLinkView/CustomLinkView.cxx
@@ -161,7 +161,7 @@ void CustomLinkView::slotOpenXMLFile()
     }
 
   // Create XML reader
-  this->XMLReader->SetFileName( fileName.toAscii() );
+  this->XMLReader->SetFileName( fileName.toLatin1() );
   this->XMLReader->ReadTagNameOff();
   this->XMLReader->Update();
 
diff --git a/Examples/Infovis/Cxx/EasyView/EasyView.cxx b/Examples/Infovis/Cxx/EasyView/EasyView.cxx
index ced01d5..42c8f1a 100644
--- a/Examples/Infovis/Cxx/EasyView/EasyView.cxx
+++ b/Examples/Infovis/Cxx/EasyView/EasyView.cxx
@@ -131,7 +131,7 @@ void EasyView::slotOpenXMLFile()
     }
 
   // Create XML reader
-  this->XMLReader->SetFileName( fileName.toAscii() );
+  this->XMLReader->SetFileName( fileName.toLatin1() );
   this->XMLReader->ReadTagNameOff();
   this->XMLReader->Update();
 
diff --git a/Examples/Infovis/Cxx/StatsView/StatsView.cxx b/Examples/Infovis/Cxx/StatsView/StatsView.cxx
index 52dabaf..8836564 100644
--- a/Examples/Infovis/Cxx/StatsView/StatsView.cxx
+++ b/Examples/Infovis/Cxx/StatsView/StatsView.cxx
@@ -94,7 +94,7 @@ void StatsView::slotOpenSQLiteDB()
 
   // Create SQLite reader
   QString fullName = "sqlite://" + fileName;
-  vtkSQLiteDatabase* db = vtkSQLiteDatabase::SafeDownCast( vtkSQLDatabase::CreateFromURL( fullName.toAscii() ) );
+  vtkSQLiteDatabase* db = vtkSQLiteDatabase::SafeDownCast( vtkSQLDatabase::CreateFromURL( fullName.toLatin1() ) );
   bool status = db->Open("");
   if ( ! status )
     {
@@ -139,22 +139,22 @@ void StatsView::slotOpenSQLiteDB()
 
   // Assign tables to table views
 
-  // FIXME: we should not have to make a shallow copy of the ouput
+  // FIXME: we should not have to make a shallow copy of the output
   VTK_CREATE(vtkTable,descriptiveC);
   descriptiveC->ShallowCopy( descriptive->GetOutput( 1 ) );
   this->TableView1->SetRepresentationFromInput( descriptiveC );
 
-  // FIXME: we should not have to make a shallow copy of the ouput
+  // FIXME: we should not have to make a shallow copy of the output
   VTK_CREATE(vtkTable,order1C);
   order1C->ShallowCopy( order1->GetOutput( 1 ) );
   this->TableView2->SetRepresentationFromInput( order1C );
 
-  // FIXME: we should not have to make a shallow copy of the ouput
+  // FIXME: we should not have to make a shallow copy of the output
   VTK_CREATE(vtkTable,order2C);
   order2C->ShallowCopy( order2->GetOutput( 1 ) );
   this->TableView3->SetRepresentationFromInput( order2C );
 
-  // FIXME: we should not have to make a shallow copy of the ouput
+  // FIXME: we should not have to make a shallow copy of the output
   VTK_CREATE(vtkTable,correlativeC);
   correlativeC->ShallowCopy( correlative->GetOutput( 0 ) );
   this->TableView4->SetRepresentationFromInput( correlativeC );
diff --git a/Examples/LIC/Cxx/CMakeLists.txt b/Examples/LIC/Cxx/CMakeLists.txt
new file mode 100644
index 0000000..848a366
--- /dev/null
+++ b/Examples/LIC/Cxx/CMakeLists.txt
@@ -0,0 +1,37 @@
+cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
+
+PROJECT(LIC)
+
+if(NOT VTK_SOURCE_DIR)
+  message(ERROR " Cannot build LIC examples without VTK_SOURCE_DIR")
+endif()
+
+find_package(VTK COMPONENTS
+  vtkFiltersExtraction
+  vtkFiltersGeometry
+  vtkIOImage
+  vtkInteractionStyle
+  vtkRenderingLIC
+  vtkTestingRendering
+  vtksys
+)
+include(${VTK_USE_FILE})
+
+# these demos are simply repackaged ctests
+# by #include'ing the test implementation
+include_directories(
+  ${VTK_SOURCE_DIR}/Rendering/LIC/Testing/Cxx
+  )
+
+set(HYBRID_EXAMPLES_SRCS
+  ImageDataLIC2DDemo
+  StructuredGridLIC2DDemo
+  SurfaceLICDemo
+  )
+
+if(vtkTestingCore_LOADED)
+  foreach(name ${HYBRID_EXAMPLES_SRCS})
+    add_executable(${name} MACOSX_BUNDLE ${name}.cxx)
+    target_link_libraries(${name} ${VTK_LIBRARIES})
+  endforeach()
+endif()
diff --git a/Examples/LIC/Cxx/ImageDataLIC2DDemo.cxx b/Examples/LIC/Cxx/ImageDataLIC2DDemo.cxx
new file mode 100644
index 0000000..bfd36b4
--- /dev/null
+++ b/Examples/LIC/Cxx/ImageDataLIC2DDemo.cxx
@@ -0,0 +1,20 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    ImageDataLIC2DDemo.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "TestImageDataLIC2D.cxx"
+
+int main( int argc, char * argv[] )
+{
+  return ImageDataLIC2D( argc, argv );
+}
diff --git a/Examples/LIC/Cxx/StructuredGridLIC2DDemo.cxx b/Examples/LIC/Cxx/StructuredGridLIC2DDemo.cxx
new file mode 100644
index 0000000..90333c5
--- /dev/null
+++ b/Examples/LIC/Cxx/StructuredGridLIC2DDemo.cxx
@@ -0,0 +1,20 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    StructuredGridLIC2DDemo.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkStructuredGridLIC2DTestDriver.cxx"
+
+int main(int argc, char * argv[])
+{
+  return StructuredGridLIC2DDemo(argc, argv);
+}
diff --git a/Examples/LIC/Cxx/SurfaceLICDemo.cxx b/Examples/LIC/Cxx/SurfaceLICDemo.cxx
new file mode 100644
index 0000000..f651012
--- /dev/null
+++ b/Examples/LIC/Cxx/SurfaceLICDemo.cxx
@@ -0,0 +1,21 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    SurfaceLICDemo.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkSurfaceLICTestDriver.cxx"
+#include "TestSurfaceLIC.cxx"
+
+int main( int argc, char * argv[] )
+{
+  return TestSurfaceLIC( argc, argv );
+}
diff --git a/Examples/Hybrid/Python/CylinderAndPolarAxes.py b/Examples/LIC/Python/CylinderAndPolarAxes.py
similarity index 100%
rename from Examples/Hybrid/Python/CylinderAndPolarAxes.py
rename to Examples/LIC/Python/CylinderAndPolarAxes.py
diff --git a/Examples/Medical/Cxx/Baseline/TestMedical1.png.md5 b/Examples/Medical/Cxx/Baseline/TestMedical1.png.md5
new file mode 100644
index 0000000..a3d24e6
--- /dev/null
+++ b/Examples/Medical/Cxx/Baseline/TestMedical1.png.md5
@@ -0,0 +1 @@
+5b916f0dbff2ae63f1fd3bf97cdb0184
diff --git a/Examples/Medical/Cxx/Baseline/TestMedical2.png.md5 b/Examples/Medical/Cxx/Baseline/TestMedical2.png.md5
new file mode 100644
index 0000000..0253644
--- /dev/null
+++ b/Examples/Medical/Cxx/Baseline/TestMedical2.png.md5
@@ -0,0 +1 @@
+4a625eb098e85b3665ae9a06e0955fac
diff --git a/Examples/Medical/Cxx/Baseline/TestMedical3.png.md5 b/Examples/Medical/Cxx/Baseline/TestMedical3.png.md5
new file mode 100644
index 0000000..2bc7e17
--- /dev/null
+++ b/Examples/Medical/Cxx/Baseline/TestMedical3.png.md5
@@ -0,0 +1 @@
+67eb318c3ec6b4c78ca2567494bfe40a
diff --git a/Examples/Medical/Cxx/Baseline/TestMedical4.png.md5 b/Examples/Medical/Cxx/Baseline/TestMedical4.png.md5
new file mode 100644
index 0000000..6d431a5
--- /dev/null
+++ b/Examples/Medical/Cxx/Baseline/TestMedical4.png.md5
@@ -0,0 +1 @@
+a134cf1237c06936eb2b829b15ae54fa
diff --git a/Examples/Medical/Cxx/CMakeLists.txt b/Examples/Medical/Cxx/CMakeLists.txt
index 87001b3..5e99081 100644
--- a/Examples/Medical/Cxx/CMakeLists.txt
+++ b/Examples/Medical/Cxx/CMakeLists.txt
@@ -2,27 +2,18 @@ cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
 
 PROJECT (Medical)
 
-if(NOT VTK_BINARY_DIR)
-  find_package(VTK COMPONENTS
-    vtkCommonCore
-    vtkCommonDataModel
-    vtkCommonTransforms
-    vtkFiltersCore
-    vtkFiltersGeneral
-    vtkFiltersGeometry
-    vtkFiltersModeling
-    vtkIOImage
-    vtkIOXML
-    vtkImagingCore
-    vtkImagingStatistics
-    vtkRenderingCore
-    vtkRenderingOpenGL
-    vtkRenderingVolume
-    vtkRenderingVolumeOpenGL
-    vtkInteractionStyle
-  )
-  include(${VTK_USE_FILE})
-endif()
+find_package(VTK COMPONENTS
+  vtkFiltersGeometry
+  vtkFiltersModeling
+  vtkIOImage
+  vtkIOXML
+  vtkImagingStatistics
+  vtkInteractionStyle
+  vtkRenderingVolumeOpenGL
+  vtkTestingRendering
+  vtksys
+)
+include(${VTK_USE_FILE})
 
 add_executable(Medical1 MACOSX_BUNDLE Medical1.cxx)
 add_executable(Medical2 MACOSX_BUNDLE Medical2.cxx)
@@ -42,23 +33,17 @@ target_link_libraries(GenerateCubesFromLabels ${VTK_LIBRARIES})
 if(BUILD_TESTING)
   if(vtkTestingRendering_LOADED)
     ######## Regression Testing ########
-    set(KIT MedicalExamples)
-    set(MyTests TestMedical1.cxx TestMedical2.cxx TestMedical3.cxx TestMedical4.cxx)
-
-    include(vtkTestingObjectFactory)
-
-    add_executable(${KIT}CxxTests ${KIT}CxxTests.cxx ${MyTests})
-    target_link_libraries(${KIT}CxxTests ${VTK_LIBRARIES})
-
-      if (VTK_DATA_ROOT)
-        add_test(NAME ${KIT}-Medical1 COMMAND ${KIT}CxxTests
-          TestMedical1 ${VTK_DATA_ROOT}/Data/headsq/quarter)
-        add_test(NAME ${KIT}-Medical2 COMMAND ${KIT}CxxTests
-          TestMedical2 ${VTK_DATA_ROOT}/Data/headsq/quarter)
-        add_test(NAME ${KIT}-Medical3 COMMAND ${KIT}CxxTests
-          TestMedical3 ${VTK_DATA_ROOT}/Data/headsq/quarter)
-        add_test(NAME ${KIT}-Medical4 COMMAND ${KIT}CxxTests
-          TestMedical4 ${VTK_DATA_ROOT}/Data/headsq/quarter)
-      endif()
+    set(vtk-example MedicalExamples)
+    set(TestMedical1_ARGS ${VTK_TEST_DATA_DIR}/Data/headsq/quarter)
+    set(TestMedical2_ARGS ${VTK_TEST_DATA_DIR}/Data/headsq/quarter)
+    set(TestMedical3_ARGS ${VTK_TEST_DATA_DIR}/Data/headsq/quarter)
+    set(TestMedical4_ARGS ${VTK_TEST_DATA_DIR}/Data/headsq/quarter)
+    vtk_add_test_cxx(
+      TestMedical1.cxx
+      TestMedical2.cxx
+      TestMedical3.cxx
+      TestMedical4.cxx
+      )
+    vtk_test_cxx_executable(${vtk-example}CxxTests RENDERING_FACTORY)
   endif()
 endif()
diff --git a/Examples/Modelling/Cxx/Baseline/TestFinance.png.md5 b/Examples/Modelling/Cxx/Baseline/TestFinance.png.md5
new file mode 100644
index 0000000..e4329b7
--- /dev/null
+++ b/Examples/Modelling/Cxx/Baseline/TestFinance.png.md5
@@ -0,0 +1 @@
+629298f45052e7a44dfee1bd82a643f0
diff --git a/Examples/Modelling/Cxx/CMakeLists.txt b/Examples/Modelling/Cxx/CMakeLists.txt
index 3ca5533..19e8b0e 100644
--- a/Examples/Modelling/Cxx/CMakeLists.txt
+++ b/Examples/Modelling/Cxx/CMakeLists.txt
@@ -2,20 +2,17 @@ cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
 
 PROJECT (Modelling)
 
-if(NOT VTK_BINARY_DIR)
-  find_package(VTK COMPONENTS
-    vtkCommonCore
-    vtkCommonDataModel
-    vtkFiltersCore
-    vtkFiltersGeneral
-    vtkIOXML
-    vtkImagingHybrid
-    vtkRenderingCore
-    vtkRenderingOpenGL
-    vtkInteractionStyle
+find_package(VTK COMPONENTS
+  vtkCommonCore
+  vtkCommonDataModel
+  vtkFiltersGeneral
+  vtkIOXML
+  vtkImagingHybrid
+  vtkInteractionStyle
+  vtkRenderingOpenGL
+  vtkTestingRendering
 )
-  include(${VTK_USE_FILE})
-endif()
+include(${VTK_USE_FILE})
 
 add_executable(finance MACOSX_BUNDLE finance.cxx)
 target_link_libraries(finance ${VTK_LIBRARIES})
@@ -29,25 +26,23 @@ target_link_libraries(Delaunay3DAlpha ${VTK_LIBRARIES})
 if(BUILD_TESTING)
   if(vtkTestingRendering_LOADED)
     ######## Regression Testing ########
-    set(KIT ModellingExamples)
-    set(MyTests TestFinance.cxx TestDelaunay3D.cxx TestDelaunay3DAlpha.cxx)
-
-    include(vtkTestingObjectFactory)
-
-    add_executable(${KIT}CxxTests ${KIT}CxxTests.cxx ${MyTests})
-    target_link_LIBRARIES(${KIT}CxxTests ${VTK_LIBRARIES})
-
-    if (VTK_DATA_ROOT)
-      add_test(NAME ${KIT}-Finance COMMAND ${KIT}CxxTests
-               TestFinance ${VTK_DATA_ROOT}/Data/financial.txt)
-      add_test(NAME ${KIT}-Delaunay3D COMMAND ${KIT}CxxTests
-               TestDelaunay3D
-               ${VTK_DATA_ROOT}/Data/cow.vtp
-               ${VTK_TEST_OUTPUT_DIR}/Delaunay3D.vtu)
-      add_test(NAME ${KIT}-Delaunay3DAlpa COMMAND ${KIT}CxxTests
-               TestDelaunay3DAlpha
-               .5 ${VTK_DATA_ROOT}/Data/cow.vtp
-               ${VTK_TEST_OUTPUT_DIR}/Delaunay3DAlpha.vtu)
-    endif()
+    set(vtk-example ModellingExamples)
+    set(TestFinance_ARGS "DATA{${VTK_TEST_INPUT_DIR}/financial.txt}")
+    set(TestDelaunay3D_ARGS
+      "DATA{${VTK_TEST_INPUT_DIR}/cow.vtp}"
+      ${VTK_TEST_OUTPUT_DIR}/Delaunay3D.vtu
+      )
+    set(TestDelaunay3DAlpha_ARGS
+      .5 "DATA{${VTK_TEST_INPUT_DIR}/cow.vtp}"
+      ${VTK_TEST_OUTPUT_DIR}/Delaunay3DAlpha.vtu
+      )
+    vtk_add_test_cxx(
+      TestFinance.cxx
+      )
+    vtk_add_test_cxx(NO_DATA NO_VALID NO_OUTPUT
+      TestDelaunay3D.cxx
+      TestDelaunay3DAlpha.cxx
+      )
+    vtk_test_cxx_executable(${vtk-example}CxxTests RENDERING_FACTORY)
   endif()
 endif()
diff --git a/Examples/MultiBlock/Cxx/CMakeLists.txt b/Examples/MultiBlock/Cxx/CMakeLists.txt
index d2bd480..d325109 100644
--- a/Examples/MultiBlock/Cxx/CMakeLists.txt
+++ b/Examples/MultiBlock/Cxx/CMakeLists.txt
@@ -2,22 +2,17 @@ cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
 
 PROJECT (MultiBlock)
 
-if(NOT VTK_BINARY_DIR)
-  find_package(VTK COMPONENTS
-    vtkCommonCore
-    vtkCommonDataModel
-    vtkCommonExecutionModel
-    vtkFiltersCore
-    vtkFiltersGeneral
-    vtkFiltersGeometry
-    vtkFiltersSources
-    vtkIOXML
-    vtkRenderingCore
-    vtkRenderingOpenGL
-    vtkTestingCore
-  )
+find_package(VTK COMPONENTS
+  vtkCommonCore
+  vtkFiltersGeometry
+  vtkFiltersSources
+  vtkIOXML
+  vtkInteractionStyle
+  vtkRenderingOpenGL
+  vtkTestingCore
+  vtksys
+)
 include(${VTK_USE_FILE})
-endif()
 
 if(vtkTestingCore_LOADED)
  add_executable(MultiBlock MACOSX_BUNDLE MultiBlock.cxx)
diff --git a/Examples/Rendering/Cxx/CMakeLists.txt b/Examples/Rendering/Cxx/CMakeLists.txt
index b440e4f..7e92761 100644
--- a/Examples/Rendering/Cxx/CMakeLists.txt
+++ b/Examples/Rendering/Cxx/CMakeLists.txt
@@ -6,36 +6,23 @@ if(NOT VTK_SOURCE_DIR)
   message(ERROR " Cannot build Rendering examples without VTK_SOURCE_DIR")
 endif()
 
-if(NOT VTK_BINARY_DIR)
-  find_package(VTK COMPONENTS
-    vtkCommonCore
-    vtkFiltersSources
-    vtkRenderingCore
-    vtkRenderingOpenGL
-    vtkInteractionStyle
-  )
-  include(${VTK_USE_FILE})
-endif()
+find_package(VTK COMPONENTS
+  vtkCommonCore
+  vtkFiltersSources
+  vtkInteractionStyle
+  vtkRenderingOpenGL
+)
+include(${VTK_USE_FILE})
 
 set(RENDERING_EXAMPLES_SRCS
   Cylinder
   SpecularSpheres
   DiffuseSpheres
   AmbientSpheres
-  MaterialObjects
+#  MaterialObjects
   )
 
 foreach(name ${RENDERING_EXAMPLES_SRCS})
   add_executable(${name} MACOSX_BUNDLE ${name}.cxx)
   target_link_libraries(${name} ${VTK_LIBRARIES} )
 endforeach()
-
-if(vtkTestingCore_LOADED)
-  include_directories(
-    # in support of SurfaceLICDemo that includes
-    # TestSurfaceLIC.h from the dir below
-    ${VTK_SOURCE_DIR}/Rendering/HybridOpenGL/Testing/Cxx
-  )
-  add_executable(SurfaceLICDemo MACOSX_BUNDLE   SurfaceLICDemo.cxx)
-  target_link_libraries(SurfaceLICDemo ${VTK_LIBRARIES} )
-endif()
diff --git a/Examples/Rendering/Cxx/MaterialObjects.cxx b/Examples/Rendering/Cxx/MaterialObjects.cxx
index 1f427b5..a58edaa 100644
--- a/Examples/Rendering/Cxx/MaterialObjects.cxx
+++ b/Examples/Rendering/Cxx/MaterialObjects.cxx
@@ -25,7 +25,6 @@
 #include "vtkRenderWindow.h"
 #include "vtkRenderWindowInteractor.h"
 #include "vtkProperty.h"
-#include "vtkShaderProgram.h"
 #include "vtkCamera.h"
 #include "vtkLight.h"
 
diff --git a/Examples/Rendering/Cxx/SurfaceLICDemo.cxx b/Examples/Rendering/Cxx/SurfaceLICDemo.cxx
deleted file mode 100644
index 711c782..0000000
--- a/Examples/Rendering/Cxx/SurfaceLICDemo.cxx
+++ /dev/null
@@ -1,23 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    SurfaceLICDemo.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#include "TestSurfaceLIC.h"
-
-extern int SurfaceLIC( int argc, char * argv[] );
-
-int main( int argc, char * argv[] )
-{
-  RenderingMode = SURFACE_LIC_DEMO;
-  return SurfaceLIC( argc, argv );
-}
diff --git a/Examples/Tutorial/Step1/Cxx/CMakeLists.txt b/Examples/Tutorial/Step1/Cxx/CMakeLists.txt
index 3c24226..46cfc71 100644
--- a/Examples/Tutorial/Step1/Cxx/CMakeLists.txt
+++ b/Examples/Tutorial/Step1/Cxx/CMakeLists.txt
@@ -1,15 +1,13 @@
 cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
 
-PROJECT (Step1)
+project (Step1)
 
-if(NOT VTK_BINARY_DIR)
-  FIND_PACKAGE(VTK COMPONENTS
-    vtkFiltersSources
-    vtkRenderingCore
-    vtkRenderingOpenGL
-  )
-  include(${VTK_USE_FILE})
-endif()
+find_package(VTK COMPONENTS
+  vtkFiltersSources
+  vtkInteractionStyle
+  vtkRenderingOpenGL
+)
+include(${VTK_USE_FILE})
 
 add_executable(Cone MACOSX_BUNDLE Cone.cxx)
 target_link_libraries(Cone ${VTK_LIBRARIES})
diff --git a/Examples/Tutorial/Step2/Cxx/CMakeLists.txt b/Examples/Tutorial/Step2/Cxx/CMakeLists.txt
index 2c2a384..f9b9812 100644
--- a/Examples/Tutorial/Step2/Cxx/CMakeLists.txt
+++ b/Examples/Tutorial/Step2/Cxx/CMakeLists.txt
@@ -1,15 +1,14 @@
 cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
-PROJECT (Step2)
 
-IF(NOT VTK_BINARY_DIR)
-  FIND_PACKAGE(VTK COMPONENTS
-    vtkCommonCore
-    vtkFiltersSources
-    vtkRenderingCore
-    vtkRenderingOpenGL
-  )
-  include(${VTK_USE_FILE})
-endif()
+project (Step2)
+
+find_package(VTK COMPONENTS
+  vtkCommonCore
+  vtkFiltersSources
+  vtkInteractionStyle
+  vtkRenderingOpenGL
+)
+include(${VTK_USE_FILE})
 
 add_executable(Cone2 MACOSX_BUNDLE Cone2.cxx)
 target_link_libraries(Cone2 ${VTK_LIBRARIES})
diff --git a/Examples/Tutorial/Step3/Cxx/CMakeLists.txt b/Examples/Tutorial/Step3/Cxx/CMakeLists.txt
index 36c21f5..f95b870 100644
--- a/Examples/Tutorial/Step3/Cxx/CMakeLists.txt
+++ b/Examples/Tutorial/Step3/Cxx/CMakeLists.txt
@@ -1,15 +1,13 @@
 cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
 
-PROJECT (Step3)
+project (Step3)
 
-if(NOT VTK_BINARY_DIR)
-  FIND_PACKAGE(VTK COMPONENTS
-    vtkFiltersSources
-    vtkRenderingCore
-    vtkRenderingOpenGL
-  )
-  include(${VTK_USE_FILE})
-endif()
+find_package(VTK COMPONENTS
+  vtkFiltersSources
+  vtkInteractionStyle
+  vtkRenderingOpenGL
+)
+include(${VTK_USE_FILE})
 
 add_executable(Cone3 MACOSX_BUNDLE Cone3.cxx)
 target_link_libraries(Cone3 ${VTK_LIBRARIES})
diff --git a/Examples/Tutorial/Step4/Cxx/CMakeLists.txt b/Examples/Tutorial/Step4/Cxx/CMakeLists.txt
index b54ea9d..cc907a6 100644
--- a/Examples/Tutorial/Step4/Cxx/CMakeLists.txt
+++ b/Examples/Tutorial/Step4/Cxx/CMakeLists.txt
@@ -1,15 +1,13 @@
 cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
 
-PROJECT (Step4)
+project (Step4)
 
-if(NOT VTK_BINARY_DIR)
-  find_package(VTK COMPONENTS
-    vtkFiltersSources
-    vtkRenderingCore
-    vtkRenderingOpenGL
-  )
+find_package(VTK COMPONENTS
+  vtkFiltersSources
+  vtkInteractionStyle
+  vtkRenderingOpenGL
+)
 include(${VTK_USE_FILE})
-endif()
 
 add_executable(Cone4 MACOSX_BUNDLE Cone4.cxx)
 target_link_libraries(Cone4 ${VTK_LIBRARIES})
diff --git a/Examples/Tutorial/Step5/Cxx/CMakeLists.txt b/Examples/Tutorial/Step5/Cxx/CMakeLists.txt
index 879b15a..a8698aa 100644
--- a/Examples/Tutorial/Step5/Cxx/CMakeLists.txt
+++ b/Examples/Tutorial/Step5/Cxx/CMakeLists.txt
@@ -2,16 +2,13 @@ cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
 
 PROJECT (Step5)
 
-if(NOT VTK_BINARY_DIR)
-  find_package(VTK COMPONENTS
-    vtkFiltersSources
-    vtkInteractionStyle
-    vtkRenderingCore
-    vtkRenderingOpenGL
-    vtkInteractionStyle
-  )
-  include(${VTK_USE_FILE})
-endif()
+find_package(VTK COMPONENTS
+  vtkCommonCore
+  vtkFiltersSources
+  vtkInteractionStyle
+  vtkRenderingOpenGL
+)
+include(${VTK_USE_FILE})
 
 add_executable(Cone5 MACOSX_BUNDLE Cone5.cxx)
 target_link_libraries(Cone5 ${VTK_LIBRARIES})
diff --git a/Examples/Tutorial/Step6/Cxx/CMakeLists.txt b/Examples/Tutorial/Step6/Cxx/CMakeLists.txt
index 73b1fff..4cdbd80 100644
--- a/Examples/Tutorial/Step6/Cxx/CMakeLists.txt
+++ b/Examples/Tutorial/Step6/Cxx/CMakeLists.txt
@@ -2,19 +2,13 @@ cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
 
 PROJECT (Step6)
 
-if(NOT VTK_BINARY_DIR)
-  FIND_PACKAGE(VTK COMPONENTS
-    vtkCommonCore
-    vtkCommonTransforms
-    vtkFiltersSources
-    vtkInteractionStyle
-    vtkInteractionWidgets
-    vtkRenderingCore
-    vtkRenderingOpenGL
-    vtkInteractionStyle
+find_package(VTK COMPONENTS
+  vtkFiltersSources
+  vtkInteractionWidgets
+  vtkRenderingOpenGL
 )
-  include(${VTK_USE_FILE})
-endif()
+include(${VTK_USE_FILE})
 
 add_executable(Cone6 MACOSX_BUNDLE Cone6.cxx)
+find_package( VTK )
 target_link_libraries(Cone6 ${VTK_LIBRARIES})
diff --git a/Examples/VisualizationAlgorithms/Cxx/Baseline/TestBandedContours.png.md5 b/Examples/VisualizationAlgorithms/Cxx/Baseline/TestBandedContours.png.md5
new file mode 100644
index 0000000..3745007
--- /dev/null
+++ b/Examples/VisualizationAlgorithms/Cxx/Baseline/TestBandedContours.png.md5
@@ -0,0 +1 @@
+2683dd99bf47e55b4d7be58ef480ef1c
diff --git a/Examples/VisualizationAlgorithms/Cxx/Baseline/TestFilledContours.png.md5 b/Examples/VisualizationAlgorithms/Cxx/Baseline/TestFilledContours.png.md5
new file mode 100644
index 0000000..5b126ed
--- /dev/null
+++ b/Examples/VisualizationAlgorithms/Cxx/Baseline/TestFilledContours.png.md5
@@ -0,0 +1 @@
+0b4a546028931a3bc83f54e83760457f
diff --git a/Examples/VisualizationAlgorithms/Cxx/Baseline/TestTubesWithVaryingRadiusAndColors.png.md5 b/Examples/VisualizationAlgorithms/Cxx/Baseline/TestTubesWithVaryingRadiusAndColors.png.md5
new file mode 100644
index 0000000..ee8958a
--- /dev/null
+++ b/Examples/VisualizationAlgorithms/Cxx/Baseline/TestTubesWithVaryingRadiusAndColors.png.md5
@@ -0,0 +1 @@
+e629c5fb6b01c8342c10e052a6da2b98
diff --git a/Examples/VisualizationAlgorithms/Cxx/CMakeLists.txt b/Examples/VisualizationAlgorithms/Cxx/CMakeLists.txt
index 79120c9..b4714c1 100644
--- a/Examples/VisualizationAlgorithms/Cxx/CMakeLists.txt
+++ b/Examples/VisualizationAlgorithms/Cxx/CMakeLists.txt
@@ -1,20 +1,18 @@
 cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
 
-PROJECT (VisualizationAlgorithms)
-
-if(NOT VTK_BINARY_DIR)
-  find_package(VTK COMPONENTS
-    vtkCommonCore
-    vtkCommonDataModel
-    vtkFiltersCore
-    vtkFiltersModeling
-    vtkIOXML
-    vtkInteractionStyle
-    vtkRenderingCore
-    vtkRenderingOpenGL
+project (VisualizationAlgorithms)
+
+find_package(VTK COMPONENTS
+  vtkCommonCore
+  vtkCommonDataModel
+  vtkFiltersCore
+  vtkFiltersModeling
+  vtkIOXML
+  vtkInteractionStyle
+  vtkRenderingOpenGL
+  vtkTestingRendering
 )
-  include(${VTK_USE_FILE})
-endif()
+include(${VTK_USE_FILE})
 
 add_executable(TubesWithVaryingRadiusAndColors MACOSX_BUNDLE TubesWithVaryingRadiusAndColors.cxx)
 add_executable(FilledContours MACOSX_BUNDLE FilledContours.cxx)
@@ -27,30 +25,14 @@ target_link_libraries(BandedContours ${VTK_LIBRARIES})
 if(BUILD_TESTING)
   if(vtkTestingRendering_LOADED)
     ######## Regression Testing ########
-    set(KIT VisualizationAlgorithmsExamples)
-    set(MyTests
-        TestFilledContours.cxx
-        TestBandedContours.cxx
-        TestTubesWithVaryingRadiusAndColors.cxx
-    )
-
-    include(vtkTestingObjectFactory)
-
-    add_executable(${KIT}CxxTests ${KIT}CxxTests.cxx ${MyTests})
-    target_link_libraries(${KIT}CxxTests ${VTK_LIBRARIES})
-
-    add_test(NAME ${KIT}-TubesWithVaryingRadiusAndColors
-             COMMAND ${KIT}CxxTests
-             TestTubesWithVaryingRadiusAndColors)
-    if (VTK_DATA_ROOT)
-      add_test(NAME ${KIT}-FilledContours
-               COMMAND ${KIT}CxxTests
-               TestFilledContours
-               ${VTK_DATA_ROOT}/Data/filledContours.vtp 10)
-      add_test(NAME ${KIT}-BandedContours
-               COMMAND ${KIT}CxxTests
-               TestBandedContours
-               ${VTK_DATA_ROOT}/Data/filledContours.vtp 10)
-    endif()
+    set(vtk-example VisualizationAlgorithmsExamples)
+    set(TestFilledContours_ARGS "DATA{${VTK_TEST_INPUT_DIR}/filledContours.vtp}" 10)
+    set(TestBandedContours_ARGS "DATA{${VTK_TEST_INPUT_DIR}/filledContours.vtp}" 10)
+    vtk_add_test_cxx(
+      TestTubesWithVaryingRadiusAndColors.cxx
+      TestFilledContours.cxx
+      TestBandedContours.cxx
+      )
+    vtk_test_cxx_executable(${vtk-example}CxxTests RENDERING_FACTORY)
   endif()
 endif()
diff --git a/Examples/VolumeRendering/Cxx/CMakeLists.txt b/Examples/VolumeRendering/Cxx/CMakeLists.txt
index e9d841b..1f8c587 100644
--- a/Examples/VolumeRendering/Cxx/CMakeLists.txt
+++ b/Examples/VolumeRendering/Cxx/CMakeLists.txt
@@ -1,21 +1,15 @@
 cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
 
-PROJECT (VolumeRendering)
+project (VolumeRendering)
 
-if(NOT VTK_BINARY_DIR)
-  find_package(VTK COMPONENTS
-    vtkCommonCore
-    vtkCommonDataModel
-    vtkIOImage
-    vtkIOXML
-    vtkImagingCore
-    vtkInteractionWidgets
-    vtkRenderingCore
-    vtkRenderingOpenGL
-    vtkRenderingVolume
-  )
-  include(${VTK_USE_FILE})
-endif()
+find_package(VTK COMPONENTS
+  vtkCommonCore
+  vtkIOImage
+  vtkIOXML
+  vtkInteractionWidgets
+  vtkRenderingVolumeOpenGL
+)
+include(${VTK_USE_FILE})
 
 set(RENDERING_EXAMPLES_SRCS
   GPURenderDemo
diff --git a/Examples/Widgets/Cxx/Baseline/TestBalloonWidget.png.md5 b/Examples/Widgets/Cxx/Baseline/TestBalloonWidget.png.md5
new file mode 100644
index 0000000..bc0ffcd
--- /dev/null
+++ b/Examples/Widgets/Cxx/Baseline/TestBalloonWidget.png.md5
@@ -0,0 +1 @@
+a7a4103bd6c690db0c5031d3115ed386
diff --git a/Examples/Widgets/Cxx/Baseline/TestSlider.png.md5 b/Examples/Widgets/Cxx/Baseline/TestSlider.png.md5
new file mode 100644
index 0000000..2f79c41
--- /dev/null
+++ b/Examples/Widgets/Cxx/Baseline/TestSlider.png.md5
@@ -0,0 +1 @@
+1502c47e4469bffbd639373f9067fab7
diff --git a/Examples/Widgets/Cxx/Baseline/TestSlider2D.png.md5 b/Examples/Widgets/Cxx/Baseline/TestSlider2D.png.md5
new file mode 100644
index 0000000..33c38a1
--- /dev/null
+++ b/Examples/Widgets/Cxx/Baseline/TestSlider2D.png.md5
@@ -0,0 +1 @@
+3dbe196c21e0a4025d4640bfe227ed2f
diff --git a/Examples/Widgets/Cxx/Baseline/TestSlider2D_1.png.md5 b/Examples/Widgets/Cxx/Baseline/TestSlider2D_1.png.md5
new file mode 100644
index 0000000..d427c41
--- /dev/null
+++ b/Examples/Widgets/Cxx/Baseline/TestSlider2D_1.png.md5
@@ -0,0 +1 @@
+5f7cae168c119b40ab9a4cc5d6ca14f2
diff --git a/Examples/Widgets/Cxx/Baseline/TestSlider2D_2.png.md5 b/Examples/Widgets/Cxx/Baseline/TestSlider2D_2.png.md5
new file mode 100644
index 0000000..73c3774
--- /dev/null
+++ b/Examples/Widgets/Cxx/Baseline/TestSlider2D_2.png.md5
@@ -0,0 +1 @@
+b3ae26450af0105afb8177590d83c463
diff --git a/Examples/Widgets/Cxx/CMakeLists.txt b/Examples/Widgets/Cxx/CMakeLists.txt
index 0790035..9db0b0e 100644
--- a/Examples/Widgets/Cxx/CMakeLists.txt
+++ b/Examples/Widgets/Cxx/CMakeLists.txt
@@ -1,21 +1,16 @@
 cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
 
-PROJECT (Widgets)
-
-if(NOT VTK_BINARY_DIR)
-  find_package(VTK COMPONENTS
-    vtkCommonComputationalGeometry
-    vtkCommonCore
-    vtkCommonDataModel
-    vtkFiltersSources
-    vtkInteractionStyle
-    vtkInteractionWidgets
-    vtkRenderingCore
-    vtkRenderingOpenGL
-    vtkRenderingFreeTypeOpenGL
+project (Widgets)
+
+find_package(VTK COMPONENTS
+  vtkCommonCore
+  vtkFiltersSources
+  vtkInteractionWidgets
+  vtkRenderingOpenGL
+  vtkTestingRendering
 )
-  include(${VTK_USE_FILE})
-endif()
+include(${VTK_USE_FILE})
+
 
 add_executable(Slider MACOSX_BUNDLE Slider.cxx)
 target_link_libraries(Slider ${VTK_LIBRARIES})
@@ -29,16 +24,12 @@ target_link_libraries(BalloonWidget ${VTK_LIBRARIES})
 if(BUILD_TESTING)
   if(vtkTestingRendering_LOADED)
     ######## Regression Testing ########
-    set(KIT WidgetsExamples)
-    set(MyTests TestSlider.cxx TestSlider2D.cxx TestBalloonWidget.cxx)
-
-    include(vtkTestingObjectFactory)
-
-    add_executable(${KIT}CxxTests ${KIT}CxxTests.cxx ${MyTests})
-    target_link_libraries(${KIT}CxxTests ${VTK_LIBRARIES})
-
-    add_test(NAME ${KIT}-Slider COMMAND ${KIT}CxxTests TestSlider)
-    add_test(NAME ${KIT}-Slider2D COMMAND ${KIT}CxxTests TestSlider2D)
-    add_test(NAME ${KIT}-BalloonWidget COMMAND ${KIT}CxxTests TestBalloonWidget)
+    set(vtk-example WidgetsExamples)
+    vtk_add_test_cxx(
+      TestSlider.cxx
+      TestSlider2D.cxx
+      TestBalloonWidget.cxx
+      )
+    vtk_test_cxx_executable(${vtk-example}CxxTests RENDERING_FACTORY)
   endif()
 endif()
diff --git a/Filters/AMR/Testing/Cxx/CMakeLists.txt b/Filters/AMR/Testing/Cxx/CMakeLists.txt
index ad1ddb7..b9b8c08 100644
--- a/Filters/AMR/Testing/Cxx/CMakeLists.txt
+++ b/Filters/AMR/Testing/Cxx/CMakeLists.txt
@@ -1,25 +1,11 @@
-## List all test sources
-set(MyTests
-    TestAMRGhostLayerStripping.cxx
-    TestAMRBlanking.cxx
-    TestAMRIterator.cxx
-    TestImageToAMR.cxx
-    )
-
-## Create test source list
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-    ${MyTests}
-    EXTRA_INCLUDE vtkTestDriver.h
-    )
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-## Add all executables
-foreach(test ${TestsToRun})
-  get_filename_component(t ${test} NAME_WE)
-  add_test(NAME ${vtk-module}Cxx-${t}
-    COMMAND ${vtk-module}CxxTests ${t} -D ${VTK_DATA_ROOT})
-  set_tests_properties(${vtk-module}Cxx-${t} PROPERTIES FAIL_REGULAR_EXPRESSION "Error")
+set(tests
+  TestAMRGhostLayerStripping
+  TestAMRBlanking
+  TestAMRIterator
+  TestImageToAMR
+  )
+foreach(t ${tests})
+  vtk_add_test_cxx(${t}.cxx NO_VALID NO_OUTPUT)
+  set_property(TEST ${vtk-module}Cxx-${t} PROPERTY FAIL_REGULAR_EXPRESSION "Error")
 endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Filters/AMR/Testing/Python/CMakeLists.txt b/Filters/AMR/Testing/Python/CMakeLists.txt
index b12067f..eaf20c2 100644
--- a/Filters/AMR/Testing/Python/CMakeLists.txt
+++ b/Filters/AMR/Testing/Python/CMakeLists.txt
@@ -4,11 +4,7 @@ if(VTK_PYTHON_EXE)
       TestAMRExtractLevel
       TestAMRSliceFilter
    )
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Python-${tfile}
-      COMMAND ${VTK_PYTHON_EXE}
-        ${CMAKE_CURRENT_SOURCE_DIR}/${tfile}.py -D ${VTK_DATA_ROOT})
-    set_tests_properties(${vtk-module}Python-${tfile} PROPERTIES FAIL_REGULAR_EXPRESSION "Error")
-  endif()
+  vtk_add_test_python(${tfile}.py NO_VALID NO_OUTPUT)
+  set_tests_properties(${vtk-module}Python-${tfile} PROPERTIES FAIL_REGULAR_EXPRESSION "Error")
   endforeach()
 endif()
\ No newline at end of file
diff --git a/Filters/AMR/vtkAMRResampleFilter.cxx b/Filters/AMR/vtkAMRResampleFilter.cxx
index 5d91502..682407d 100644
--- a/Filters/AMR/vtkAMRResampleFilter.cxx
+++ b/Filters/AMR/vtkAMRResampleFilter.cxx
@@ -193,7 +193,7 @@ int vtkAMRResampleFilter::RequestData(
    vtkMultiBlockDataSet *mbds =
       vtkMultiBlockDataSet::SafeDownCast(
           output->Get(vtkDataObject::DATA_OBJECT() ) );
-   assert( "pre: ouput grid is NULL" && (mbds != NULL) );
+   assert( "pre: output grid is NULL" && (mbds != NULL) );
 
   // STEP 2: Get Metadata
   if( this->DemandDrivenMode == 1 )
diff --git a/Filters/AMR/vtkAMRUtilities.h b/Filters/AMR/vtkAMRUtilities.h
index 9d24aec..bf35298 100644
--- a/Filters/AMR/vtkAMRUtilities.h
+++ b/Filters/AMR/vtkAMRUtilities.h
@@ -73,8 +73,8 @@ public:
   static void BlankCells(vtkOverlappingAMR* amr,  vtkMultiProcessController *myController);
 
 protected:
-  vtkAMRUtilities() {};
-  ~vtkAMRUtilities() {};
+  vtkAMRUtilities() {}
+  ~vtkAMRUtilities() {}
 
   // Description:
   // Given the real-extent w.r.t. the ghosted grid, this method copies the
diff --git a/Filters/Core/CMakeLists.txt b/Filters/Core/CMakeLists.txt
index ff92f77..f9b12ff 100644
--- a/Filters/Core/CMakeLists.txt
+++ b/Filters/Core/CMakeLists.txt
@@ -57,6 +57,7 @@ set(Module_SRCS
   vtkTensorGlyph.cxx
   vtkThreshold.cxx
   vtkThresholdPoints.cxx
+  vtkTransposeTable.cxx
   vtkTriangleFilter.cxx
   vtkTubeFilter.cxx
   vtkVectorDot.cxx
diff --git a/Filters/Core/Testing/Cxx/CMakeLists.txt b/Filters/Core/Testing/Cxx/CMakeLists.txt
index a296ff2..4636b5a 100644
--- a/Filters/Core/Testing/Cxx/CMakeLists.txt
+++ b/Filters/Core/Testing/Cxx/CMakeLists.txt
@@ -1,44 +1,31 @@
-# Tests with data
-if(VTK_DATA_ROOT)
-  set(NEEDS_DATA
-    TestArrayCalculator
-    )
-endif()
-
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-  ${NEEDS_DATA}
-  TestGhostArray.cxx
-  # TestAppendPolyData.cxx
-  TestAppendSelection.cxx
-  TestAssignAttribute.cxx
-  TestCellDataToPointData.cxx
-  TestCenterOfMass.cxx
-  TestDecimatePolylineFilter.cxx
+vtk_add_test_cxx(
+  TestAppendPolyData.cxx,NO_VALID
+  TestAppendSelection.cxx,NO_VALID
+  TestArrayCalculator.cxx,NO_VALID
+  TestAssignAttribute.cxx,NO_VALID
+  TestCellDataToPointData.cxx,NO_VALID
+  TestCenterOfMass.cxx,NO_VALID
+  TestCleanPolyData.cxx,NO_VALID
+  TestClipPolyData.cxx,NO_VALID
+  TestConnectivityFilter.cxx,NO_VALID
+  TestCutter.cxx,NO_VALID
+  TestDecimatePolylineFilter.cxx,NO_VALID
+  TestDecimatePro.cxx,NO_VALID
   TestDelaunay2D.cxx
-  TestExecutionTimer.cxx
+  TestDelaunay3D.cxx,NO_VALID
+  TestExecutionTimer.cxx,NO_VALID
+  TestFeatureEdges.cxx,NO_VALID
+  TestGhostArray.cxx,NO_VALID
   TestGlyph3D.cxx
-  TestImplicitPolyDataDistance.cxx
-  TestCutter.cxx
-  TestThreshold.cxx
-
-  EXTRA_INCLUDE vtkTestDriver.h)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Graphics/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
\ No newline at end of file
+  TestHedgeHog.cxx,NO_VALID
+  TestImplicitPolyDataDistance.cxx,NO_VALID
+  TestMaskPoints.cxx,NO_VALID
+  TestPolyDataConnectivityFilter.cxx,NO_VALID
+  TestSmoothPolyDataFilter.cxx,NO_VALID
+  TestSMPPipelineContour.cxx,NO_VALID
+  TestThreshold.cxx,NO_VALID
+  TestThresholdPoints.cxx,NO_VALID
+  TestTransposeTable.cxx,NO_VALID
+  TestTubeFilter.cxx,NO_VALID
+  )
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Filters/Core/Testing/Cxx/TestAppendPolyData.cxx b/Filters/Core/Testing/Cxx/TestAppendPolyData.cxx
index 771584d..48ac1db 100644
--- a/Filters/Core/Testing/Cxx/TestAppendPolyData.cxx
+++ b/Filters/Core/Testing/Cxx/TestAppendPolyData.cxx
@@ -13,79 +13,191 @@
 
 =========================================================================*/
 
-#include <vtkSmartPointer.h>
-#include <vtkPolyData.h>
-#include <vtkPoints.h>
 #include <vtkAppendPolyData.h>
-#include <vtkXMLPolyDataWriter.h>
 #include <vtkCellArray.h>
+#include <vtkSmartPointer.h>
+#include <vtkXMLPolyDataWriter.h>
 
 int TestAppendPolyData(int, char *[])
 {
-  vtkSmartPointer<vtkPoints> points1 = vtkSmartPointer<vtkPoints>::New();
-  points1->InsertNextPoint(0,0,0);
-  points1->InsertNextPoint(1,1,1);
+  vtkSmartPointer<vtkPoints> pointsArray0 = vtkSmartPointer<vtkPoints>::New();
+  pointsArray0->InsertNextPoint(0.0, 0.0, 0.0);
+  pointsArray0->InsertNextPoint(1.0, 1.0, 1.0);
 
-  vtkSmartPointer<vtkPoints> points2 = vtkSmartPointer<vtkPoints>::New();
+  vtkSmartPointer<vtkPoints> pointsArray1 = vtkSmartPointer<vtkPoints>::New();
   vtkSmartPointer<vtkCellArray> vertices = vtkSmartPointer<vtkCellArray>::New();
-  vtkIdType pid[1];
-  pid[0] = points2->InsertNextPoint(5,5,5);
-  vertices->InsertNextCell ( 1,pid );
-  pid[0] = points2->InsertNextPoint(6,6,6);
-  vertices->InsertNextCell ( 1,pid );
-
-  vtkSmartPointer<vtkPolyData> polydata1 = vtkSmartPointer<vtkPolyData>::New();
-  polydata1->SetPoints(points1);
-  polydata1->SetVerts(vertices);
-  /*
-  cout << "polydata1" << endl;
-  cout << "There are " << polydata1->GetNumberOfPoints() << " points." << endl;
-  cout << "There are " << polydata1->GetNumberOfCells() << " cells." << endl;
-  */
-  vtkSmartPointer<vtkXMLPolyDataWriter> writer1 = vtkSmartPointer<vtkXMLPolyDataWriter>::New();
-  writer1->SetFileName("polydata1.vtp");
-  writer1->SetInput(polydata1);
-  writer1->Write();
-
-  vtkSmartPointer<vtkPolyData> polydata2 = vtkSmartPointer<vtkPolyData>::New();
-  polydata2->SetPoints(points2);
-  /*
-  cout << "polydata2" << endl;
-  cout << "There are " << polydata2->GetNumberOfPoints() << " points." << endl;
-  cout << "There are " << polydata2->GetNumberOfCells() << " cells." << endl;
-  */
-  vtkSmartPointer<vtkXMLPolyDataWriter> writer2 = vtkSmartPointer<vtkXMLPolyDataWriter>::New();
-  writer2->SetFileName("polydata2.vtp");
-  writer2->SetInput(polydata2);
-  writer2->Write();
-
-  vtkSmartPointer<vtkAppendPolyData> appendFilter = vtkSmartPointer<vtkAppendPolyData>::New();
-  appendFilter->AddInput(polydata1);
-  appendFilter->AddInput(polydata2);
-  appendFilter->Update();
-
-  vtkPolyData* polydataCombined = appendFilter->GetOutput();
-  vtkSmartPointer<vtkXMLPolyDataWriter> writerCombined = vtkSmartPointer<vtkXMLPolyDataWriter>::New();
-  writerCombined->SetFileName("polydataCombined.vtp");
-  writerCombined->SetInput(polydataCombined);
-  writerCombined->Write();
-  /*
-  cout << "Combined" << endl;
-  cout << "There are " << polydataCombined->GetNumberOfPoints() << " points." << endl;
-  cout << "There are " << polydataCombined->GetNumberOfCells() << " cells." << endl;
-  */
-  if(polydataCombined->GetNumberOfPoints() != polydata1->GetNumberOfPoints() + polydata2->GetNumberOfPoints())
+  vtkIdType pointIds[1];
+  pointIds[0] = pointsArray1->InsertNextPoint(5.0, 5.0, 5.0);
+  vertices->InsertNextCell(1, pointIds);
+  pointIds[0] = pointsArray1->InsertNextPoint(6.0, 6.0, 6.0);
+  vertices->InsertNextCell(1, pointIds);
+
+  vtkSmartPointer<vtkPolyData> inputPolyData0 = vtkSmartPointer<vtkPolyData>::New();
+  vtkSmartPointer<vtkPoints> points0 = vtkSmartPointer<vtkPoints>::New();
+  points0->SetDataType(VTK_FLOAT);
+  points0->DeepCopy(pointsArray0);
+  inputPolyData0->SetPoints(points0);
+
+  vtkSmartPointer<vtkXMLPolyDataWriter> inputWriter0 = vtkSmartPointer<vtkXMLPolyDataWriter>::New();
+  inputWriter0->SetFileName("inputpolydata0.vtp");
+  inputWriter0->SetInputData(inputPolyData0);
+  inputWriter0->Write();
+
+  vtkSmartPointer<vtkPolyData> inputPolyData1 = vtkSmartPointer<vtkPolyData>::New();
+  vtkSmartPointer<vtkPoints> points1 = vtkSmartPointer<vtkPoints>::New();
+  points1->SetDataType(VTK_FLOAT);
+  points1->DeepCopy(pointsArray1);
+  inputPolyData1->SetPoints(points1);
+  inputPolyData1->SetVerts(vertices);
+
+  vtkSmartPointer<vtkXMLPolyDataWriter> inputWriter1 = vtkSmartPointer<vtkXMLPolyDataWriter>::New();
+  inputWriter1->SetFileName("inputpolydata1.vtp");
+  inputWriter1->SetInputData(inputPolyData1);
+  inputWriter1->Write();
+
+  vtkSmartPointer<vtkAppendPolyData> appendPolyData = vtkSmartPointer<vtkAppendPolyData>::New();
+  appendPolyData->SetOutputPointsPrecision(vtkAlgorithm::DEFAULT_PRECISION);
+
+  appendPolyData->AddInputData(inputPolyData0);
+  appendPolyData->AddInputData(inputPolyData1);
+
+  appendPolyData->Update();
+
+  vtkSmartPointer<vtkPolyData> outputPolyData = appendPolyData->GetOutput();
+  vtkSmartPointer<vtkXMLPolyDataWriter> outputWriter = vtkSmartPointer<vtkXMLPolyDataWriter>::New();
+  outputWriter->SetFileName("outputpolydata.vtp");
+  outputWriter->SetInputData(outputPolyData);
+  outputWriter->Write();
+
+  if(outputPolyData->GetNumberOfPoints()
+    != inputPolyData0->GetNumberOfPoints() + inputPolyData1->GetNumberOfPoints())
+    {
+    std::cerr << "The output number of points is incorrect." << std::endl;
+    return EXIT_FAILURE;
+    }
+
+  if(outputPolyData->GetNumberOfCells()
+    != inputPolyData0->GetNumberOfCells() + inputPolyData1->GetNumberOfCells())
     {
-    cerr << "The combined number of points is incorrect." << endl;
+    std::cerr << "The output number of cells is incorrect." << std::endl;
     return EXIT_FAILURE;
     }
 
-  if(polydataCombined->GetNumberOfCells() != polydata1->GetNumberOfCells() + polydata2->GetNumberOfCells())
+  if(outputPolyData->GetPoints()->GetDataType() != VTK_FLOAT)
     {
-    cerr << "The combined number of cells is incorrect." << endl;
+    std::cerr << "The output points data type is incorrect." << std::endl;
+    return EXIT_FAILURE;
+    }
+
+  points0->SetDataType(VTK_DOUBLE);
+  points0->DeepCopy(pointsArray0);
+  inputPolyData0->SetPoints(points0);
+
+  appendPolyData->Update();
+
+  outputPolyData = appendPolyData->GetOutput();
+
+  if(outputPolyData->GetPoints()->GetDataType() != VTK_DOUBLE)
+    {
+    std::cerr << "The output points data type is incorrect." << std::endl;
+    return EXIT_FAILURE;
+    }
+
+  points1->SetDataType(VTK_DOUBLE);
+  points1->DeepCopy(pointsArray1);
+  inputPolyData1->SetPoints(points1);
+
+  appendPolyData->Update();
+
+  if(appendPolyData->GetOutput()->GetPoints()->GetDataType() != VTK_DOUBLE)
+    {
+    std::cerr << "The output points data type is incorrect." << std::endl;
+    return EXIT_FAILURE;
+    }
+
+  appendPolyData->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  points0->SetDataType(VTK_FLOAT);
+  points0->DeepCopy(pointsArray0);
+  inputPolyData0->SetPoints(points0);
+
+  points1->SetDataType(VTK_FLOAT);
+  points1->DeepCopy(pointsArray1);
+  inputPolyData1->SetPoints(points1);
+
+  appendPolyData->Update();
+
+  if(appendPolyData->GetOutput()->GetPoints()->GetDataType() != VTK_FLOAT)
+    {
+    std::cerr << "The output points data type is incorrect." << std::endl;
+    return EXIT_FAILURE;
+    }
+
+  points0->SetDataType(VTK_DOUBLE);
+  points0->DeepCopy(pointsArray0);
+  inputPolyData0->SetPoints(points0);
+
+  appendPolyData->Update();
+
+  if(appendPolyData->GetOutput()->GetPoints()->GetDataType() != VTK_FLOAT)
+    {
+    std::cerr << "The output points data type is incorrect." << std::endl;
+    return EXIT_FAILURE;
+    }
+
+  points1->SetDataType(VTK_DOUBLE);
+  points1->DeepCopy(pointsArray1);
+  inputPolyData1->SetPoints(points1);
+
+  appendPolyData->Update();
+
+  if(appendPolyData->GetOutput()->GetPoints()->GetDataType() != VTK_FLOAT)
+    {
+    std::cerr << "The output points data type is incorrect." << std::endl;
+    return EXIT_FAILURE;
+    }
+
+  appendPolyData->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  points0->SetDataType(VTK_FLOAT);
+  points0->DeepCopy(pointsArray0);
+  inputPolyData0->SetPoints(points0);
+
+  points1->SetDataType(VTK_FLOAT);
+  points1->DeepCopy(pointsArray1);
+  inputPolyData1->SetPoints(points1);
+
+  appendPolyData->Update();
+
+  if(appendPolyData->GetOutput()->GetPoints()->GetDataType() != VTK_DOUBLE)
+    {
+    std::cerr << "The output points data type is incorrect." << std::endl;
+    return EXIT_FAILURE;
+    }
+
+  points0->SetDataType(VTK_DOUBLE);
+  points0->DeepCopy(pointsArray0);
+  inputPolyData0->SetPoints(points0);
+
+  appendPolyData->Update();
+
+  if(appendPolyData->GetOutput()->GetPoints()->GetDataType() != VTK_DOUBLE)
+    {
+    std::cerr << "The output points data type is incorrect." << std::endl;
+    return EXIT_FAILURE;
+    }
+
+  points1->SetDataType(VTK_DOUBLE);
+  points1->DeepCopy(pointsArray1);
+  inputPolyData1->SetPoints(points1);
+
+  appendPolyData->Update();
+
+  if(appendPolyData->GetOutput()->GetPoints()->GetDataType() != VTK_DOUBLE)
+    {
+    std::cerr << "The output points data type is incorrect." << std::endl;
     return EXIT_FAILURE;
     }
 
   return EXIT_SUCCESS;
 }
-
diff --git a/Filters/Core/Testing/Cxx/TestCleanPolyData.cxx b/Filters/Core/Testing/Cxx/TestCleanPolyData.cxx
new file mode 100644
index 0000000..af1a31f
--- /dev/null
+++ b/Filters/Core/Testing/Cxx/TestCleanPolyData.cxx
@@ -0,0 +1,133 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestCleanPolyData.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkCellArray.h>
+#include <vtkCleanPolyData.h>
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkSmartPointer.h>
+
+namespace
+{
+void InitializePolyData(vtkPolyData *polyData, int dataType)
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
+  vtkSmartPointer<vtkCellArray> verts = vtkSmartPointer<vtkCellArray>::New();
+  verts->InsertNextCell(4);
+
+  if(dataType == VTK_DOUBLE)
+    {
+    points->SetDataType(VTK_DOUBLE);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      double point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = randomSequence->GetValue();
+        }
+      verts->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+  else
+    {
+    points->SetDataType(VTK_FLOAT);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      float point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = static_cast<float>(randomSequence->GetValue());
+        }
+      verts->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+
+  points->Squeeze();
+  polyData->SetPoints(points);
+  verts->Squeeze();
+  polyData->SetVerts(verts);
+}
+
+int CleanPolyData(int dataType, int outputPointsPrecision)
+{
+  vtkSmartPointer<vtkPolyData> inputPolyData
+    = vtkSmartPointer<vtkPolyData>::New();
+  InitializePolyData(inputPolyData, dataType);
+
+  vtkSmartPointer<vtkCleanPolyData> cleanPolyData
+    = vtkSmartPointer<vtkCleanPolyData>::New();
+  cleanPolyData->SetOutputPointsPrecision(outputPointsPrecision);
+  cleanPolyData->SetInputData(inputPolyData);
+
+  cleanPolyData->Update();
+
+  vtkSmartPointer<vtkPolyData> outputPolyData = cleanPolyData->GetOutput();
+  vtkSmartPointer<vtkPoints> points = outputPolyData->GetPoints();
+
+  return points->GetDataType();
+}
+}
+
+int TestCleanPolyData(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  int dataType = CleanPolyData(VTK_FLOAT, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = CleanPolyData(VTK_DOUBLE, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = CleanPolyData(VTK_FLOAT, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = CleanPolyData(VTK_DOUBLE, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = CleanPolyData(VTK_FLOAT, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = CleanPolyData(VTK_DOUBLE, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Core/Testing/Cxx/TestClipPolyData.cxx b/Filters/Core/Testing/Cxx/TestClipPolyData.cxx
new file mode 100644
index 0000000..95d3f8c
--- /dev/null
+++ b/Filters/Core/Testing/Cxx/TestClipPolyData.cxx
@@ -0,0 +1,137 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestClipPolyData.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkCellArray.h>
+#include <vtkClipPolyData.h>
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkPlanes.h>
+#include <vtkSmartPointer.h>
+
+namespace
+{
+void InitializePolyData(vtkPolyData *polyData, int dataType)
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
+  vtkSmartPointer<vtkCellArray> verts = vtkSmartPointer<vtkCellArray>::New();
+  verts->InsertNextCell(4);
+
+  if(dataType == VTK_DOUBLE)
+    {
+    points->SetDataType(VTK_DOUBLE);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      double point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = randomSequence->GetValue();
+        }
+      verts->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+  else
+    {
+    points->SetDataType(VTK_FLOAT);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      float point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = static_cast<float>(randomSequence->GetValue());
+        }
+      verts->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+
+  points->Squeeze();
+  polyData->SetPoints(points);
+  verts->Squeeze();
+  polyData->SetVerts(verts);
+}
+
+int ClipPolyData(int dataType, int outputPointsPrecision)
+{
+  vtkSmartPointer<vtkPolyData> inputPolyData
+    = vtkSmartPointer<vtkPolyData>::New();
+  InitializePolyData(inputPolyData, dataType);
+
+  vtkSmartPointer<vtkPlanes> planes = vtkSmartPointer<vtkPlanes>::New();
+
+  vtkSmartPointer<vtkClipPolyData> clipPolyData
+    = vtkSmartPointer<vtkClipPolyData>::New();
+  clipPolyData->SetOutputPointsPrecision(outputPointsPrecision);
+  clipPolyData->SetClipFunction(planes);
+  clipPolyData->SetInputData(inputPolyData);
+
+  clipPolyData->Update();
+
+  vtkSmartPointer<vtkPolyData> outputPolyData = clipPolyData->GetOutput();
+  vtkSmartPointer<vtkPoints> points = outputPolyData->GetPoints();
+
+  return points->GetDataType();
+}
+}
+
+int TestClipPolyData(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  int dataType = ClipPolyData(VTK_FLOAT, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = ClipPolyData(VTK_DOUBLE, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = ClipPolyData(VTK_FLOAT, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = ClipPolyData(VTK_DOUBLE, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = ClipPolyData(VTK_FLOAT, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = ClipPolyData(VTK_DOUBLE, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Core/Testing/Cxx/TestConnectivityFilter.cxx b/Filters/Core/Testing/Cxx/TestConnectivityFilter.cxx
new file mode 100644
index 0000000..d4d2253
--- /dev/null
+++ b/Filters/Core/Testing/Cxx/TestConnectivityFilter.cxx
@@ -0,0 +1,145 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestConnectivityFilter.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkCellArray.h>
+#include <vtkConnectivityFilter.h>
+#include <vtkFloatArray.h>
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkPointData.h>
+#include <vtkSmartPointer.h>
+#include <vtkUnstructuredGrid.h>
+
+namespace
+{
+void InitializeUnstructuredGrid(vtkUnstructuredGrid *unstructuredGrid, int dataType)
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
+  vtkSmartPointer<vtkCellArray> cells = vtkSmartPointer<vtkCellArray>::New();
+  cells->InsertNextCell(4);
+  vtkSmartPointer<vtkFloatArray> scalars = vtkSmartPointer<vtkFloatArray>::New();
+
+  if(dataType == VTK_DOUBLE)
+    {
+    points->SetDataType(VTK_DOUBLE);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      randomSequence->Next();
+      scalars->InsertNextValue(randomSequence->GetValue());
+      double point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = randomSequence->GetValue();
+        }
+      cells->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+  else
+    {
+    points->SetDataType(VTK_FLOAT);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      randomSequence->Next();
+      scalars->InsertNextValue(randomSequence->GetValue());
+      float point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = static_cast<float>(randomSequence->GetValue());
+        }
+      cells->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+
+  scalars->Squeeze();
+  unstructuredGrid->GetPointData()->SetScalars(scalars);
+  points->Squeeze();
+  unstructuredGrid->SetPoints(points);
+  cells->Squeeze();
+  unstructuredGrid->SetCells(VTK_VERTEX, cells);
+}
+
+int FilterUnstructuredGridConnectivity(int dataType, int outputPointsPrecision)
+{
+  vtkSmartPointer<vtkUnstructuredGrid> inputUnstructuredGrid
+    = vtkSmartPointer<vtkUnstructuredGrid>::New();
+  InitializeUnstructuredGrid(inputUnstructuredGrid, dataType);
+
+  vtkSmartPointer<vtkConnectivityFilter> connectivityFilter
+    = vtkSmartPointer<vtkConnectivityFilter>::New();
+  connectivityFilter->SetOutputPointsPrecision(outputPointsPrecision);
+  connectivityFilter->ScalarConnectivityOn();
+  connectivityFilter->SetScalarRange(0.25, 0.75);
+  connectivityFilter->SetInputData(inputUnstructuredGrid);
+
+  connectivityFilter->Update();
+
+  vtkSmartPointer<vtkUnstructuredGrid> outputUnstructuredGrid = connectivityFilter->GetOutput();
+  vtkSmartPointer<vtkPoints> points = outputUnstructuredGrid->GetPoints();
+
+  return points->GetDataType();
+}
+}
+
+int TestConnectivityFilter(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  int dataType = FilterUnstructuredGridConnectivity(VTK_FLOAT, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = FilterUnstructuredGridConnectivity(VTK_DOUBLE, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = FilterUnstructuredGridConnectivity(VTK_FLOAT, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = FilterUnstructuredGridConnectivity(VTK_DOUBLE, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = FilterUnstructuredGridConnectivity(VTK_FLOAT, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = FilterUnstructuredGridConnectivity(VTK_DOUBLE, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Core/Testing/Cxx/TestCutter.cxx b/Filters/Core/Testing/Cxx/TestCutter.cxx
index 4091f78..397e578 100644
--- a/Filters/Core/Testing/Cxx/TestCutter.cxx
+++ b/Filters/Core/Testing/Cxx/TestCutter.cxx
@@ -22,7 +22,7 @@
 #include "vtkDataSetTriangleFilter.h"
 #include "vtkPointDataToCellData.h"
 #include "vtkImageDataToPointSet.h"
-#include <assert.h>
+#include <cassert>
 
 bool TestStructured(int type)
 {
diff --git a/Filters/Core/Testing/Cxx/TestDecimatePolylineFilter.cxx b/Filters/Core/Testing/Cxx/TestDecimatePolylineFilter.cxx
index 3f5dd0e..6508c28 100644
--- a/Filters/Core/Testing/Cxx/TestDecimatePolylineFilter.cxx
+++ b/Filters/Core/Testing/Cxx/TestDecimatePolylineFilter.cxx
@@ -21,91 +21,106 @@
  * or without modification, are permitted provided that this Notice and any
  * statement of authorship are reproduced on all copies.
  */
-// -*- c++ -*- *******************************************************
-
-#include "vtkDecimatePolylineFilter.h"
-#include "vtkMath.h"
-#include "vtkPolyData.h"
-#include "vtkCellArray.h"
-#include "vtkPolyDataMapper.h"
-#include "vtkActor.h"
-#include "vtkRenderer.h"
-#include "vtkRenderWindow.h"
-#include "vtkRenderWindowInteractor.h"
-#include "vtkProperty.h"
-
-int TestDecimatePolylineFilter(int, char *[])
+
+#include <vtkActor.h>
+#include <vtkCellArray.h>
+#include <vtkDecimatePolylineFilter.h>
+#include <vtkMath.h>
+#include <vtkPolyData.h>
+#include <vtkPolyDataMapper.h>
+#include <vtkProperty.h>
+#include <vtkRenderer.h>
+#include <vtkRenderWindow.h>
+#include <vtkRenderWindowInteractor.h>
+#include <vtkSmartPointer.h>
+
+int TestDecimatePolylineFilter(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
 {
-  const unsigned int numberofpoints = 100;
+  const unsigned int numberOfPoints = 100;
+
+  vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
+  points->SetDataType(VTK_FLOAT);
 
-  vtkPolyData* circle = vtkPolyData::New();
-  vtkPoints*   points = vtkPoints::New();
-  vtkCellArray* lines = vtkCellArray::New();
-  vtkIdType* lineIndices = new vtkIdType[numberofpoints+1];
+  vtkIdType *lineIds = new vtkIdType[numberOfPoints+1];
 
-  for( unsigned int i = 0; i < numberofpoints; i++ )
+  for(unsigned int i = 0; i < numberOfPoints; ++i)
     {
-    const double angle = 2.0 * vtkMath::Pi() * static_cast< double >( i ) /
-      static_cast< double >( numberofpoints );
-    points->InsertPoint( static_cast< vtkIdType >( i ),
-                         cos( angle ),
-                         sin( angle ),
-                         0. );
-    lineIndices[i] = static_cast< vtkIdType >( i );
+    const double angle = 2.0 * vtkMath::Pi() * static_cast<double>(i)
+      / static_cast<double>(numberOfPoints);
+    points->InsertPoint(static_cast<vtkIdType>(i), std::cos(angle),
+      std::sin(angle), 0.0);
+    lineIds[i] = static_cast<vtkIdType>(i);
     }
-  lineIndices[numberofpoints] = 0;
-  lines->InsertNextCell( numberofpoints+1, lineIndices );
-  delete[] lineIndices;
 
-  circle->SetPoints( points );
-  circle->SetLines( lines );
-  points->Delete();
-  lines->Delete();
+  lineIds[numberOfPoints] = 0;
+
+  vtkSmartPointer<vtkCellArray> lines = vtkSmartPointer<vtkCellArray>::New();
+  lines->InsertNextCell(numberOfPoints + 1, lineIds);
+  delete[] lineIds;
 
-  vtkPolyDataMapper* c_mapper = vtkPolyDataMapper::New();
-  c_mapper->SetInputData( circle );
+  vtkSmartPointer<vtkPolyData> circle = vtkSmartPointer<vtkPolyData>::New();
+  circle->SetPoints(points);
+  circle->SetLines(lines);
 
-  vtkActor* c_actor = vtkActor::New();
-  c_actor->SetMapper( c_mapper );
+  vtkSmartPointer<vtkPolyDataMapper> circleMapper
+    = vtkSmartPointer<vtkPolyDataMapper>::New();
+  circleMapper->SetInputData(circle);
 
-  vtkDecimatePolylineFilter* decimate = vtkDecimatePolylineFilter::New();
-  decimate->SetInputData( circle );
-  decimate->SetTargetReduction( 0.95 );
-  decimate->Update();
+  vtkSmartPointer<vtkActor> circleActor = vtkSmartPointer<vtkActor>::New();
+  circleActor->SetMapper(circleMapper);
 
-  vtkPolyDataMapper* d_mapper = vtkPolyDataMapper::New();
-  d_mapper->SetInputConnection( decimate->GetOutputPort() );
+  vtkSmartPointer<vtkDecimatePolylineFilter> decimatePolylineFilter
+    = vtkSmartPointer<vtkDecimatePolylineFilter>::New();
+  decimatePolylineFilter->SetOutputPointsPrecision(vtkAlgorithm::DEFAULT_PRECISION);
+  decimatePolylineFilter->SetInputData(circle);
+  decimatePolylineFilter->SetTargetReduction(0.95);
+  decimatePolylineFilter->Update();
+
+  if(decimatePolylineFilter->GetOutput()->GetPoints()->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
 
-  vtkActor* d_actor = vtkActor::New();
-  d_actor->SetMapper( d_mapper );
-  d_actor->GetProperty()->SetColor( 1., 0. ,0. );
+  decimatePolylineFilter->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+  decimatePolylineFilter->Update();
 
-  vtkRenderer* ren = vtkRenderer::New();
-  ren->AddActor( c_actor );
-  ren->AddActor( d_actor );
+  if(decimatePolylineFilter->GetOutput()->GetPoints()->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
 
-  vtkRenderWindow* renwin = vtkRenderWindow::New();
-  renwin->AddRenderer( ren );
+  decimatePolylineFilter->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+  decimatePolylineFilter->Update();
 
-  vtkRenderWindowInteractor* iren = vtkRenderWindowInteractor::New();
-  iren->SetRenderWindow( renwin );
+  if(decimatePolylineFilter->GetOutput()->GetPoints()->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
 
-  renwin->Render();
+  vtkSmartPointer<vtkPolyDataMapper> decimatedMapper
+    = vtkSmartPointer<vtkPolyDataMapper>::New();
+  decimatedMapper->SetInputConnection(decimatePolylineFilter->GetOutputPort());
 
-  iren->CreateOneShotTimer( 1 );
+  vtkSmartPointer<vtkActor> decimatedActor
+    = vtkSmartPointer<vtkActor>::New();
+  decimatedActor->SetMapper(decimatedMapper);
+  decimatedActor->GetProperty()->SetColor(1.0, 0.0, 0.0);
 
-  iren->Delete();
-  renwin->Delete();
-  ren->Delete();
+  vtkSmartPointer<vtkRenderer> renderer
+    = vtkSmartPointer<vtkRenderer>::New();
+  renderer->AddActor(circleActor);
+  renderer->AddActor(decimatedActor);
 
-  d_actor->Delete();
-  d_mapper->Delete();
+  vtkSmartPointer<vtkRenderWindow> renderWindow
+    = vtkSmartPointer<vtkRenderWindow>::New();
+  renderWindow->AddRenderer(renderer);
 
-  c_actor->Delete();
-  c_mapper->Delete();
+  vtkSmartPointer<vtkRenderWindowInteractor> renderWindowInteractor
+    = vtkSmartPointer<vtkRenderWindowInteractor>::New();
+  renderWindowInteractor->SetRenderWindow(renderWindow);
 
-  decimate->Delete();
-  circle->Delete();
+  renderWindow->Render();
+  renderWindowInteractor->CreateOneShotTimer(1);
 
-  return 0;
+  return EXIT_SUCCESS;
 }
diff --git a/Filters/Core/Testing/Cxx/TestDecimatePro.cxx b/Filters/Core/Testing/Cxx/TestDecimatePro.cxx
new file mode 100644
index 0000000..82e9c7b
--- /dev/null
+++ b/Filters/Core/Testing/Cxx/TestDecimatePro.cxx
@@ -0,0 +1,168 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestDecimatePro.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkCellArray.h>
+#include <vtkDecimatePro.h>
+#include <vtkSmartPointer.h>
+
+namespace
+{
+void InitializePolyData(vtkPolyData *polyData, int dataType)
+{
+  vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
+  points->SetDataType(dataType);
+  points->InsertNextPoint(-1.40481710, -0.03868163, -1.01241910);
+  points->InsertNextPoint(-1.41186166, 0.29086590, 0.96023101);
+  points->InsertNextPoint(-0.13218975, -1.22439861, 1.21793830);
+  points->InsertNextPoint(-0.12514521, -1.55394614, -0.75471181);
+  points->InsertNextPoint(0.13218975, 1.22439861, -1.21793830);
+  points->InsertNextPoint(0.12514521, 1.55394614, 0.75471181);
+  points->InsertNextPoint(1.40481710, 0.03868163, 1.01241910);
+  points->InsertNextPoint(1.41186166, -0.29086590, -0.96023101);
+  points->Squeeze();
+
+  polyData->SetPoints(points);
+
+  vtkSmartPointer<vtkCellArray> verts = vtkSmartPointer<vtkCellArray>::New();
+  verts->InsertNextCell(8);
+  for(unsigned int i = 0; i < 8; ++i)
+    {
+    verts->InsertCellPoint(i);
+    }
+  verts->Squeeze();
+
+  polyData->SetVerts(verts);
+
+  vtkSmartPointer<vtkCellArray> polys = vtkSmartPointer<vtkCellArray>::New();
+  vtkIdType pointIds[3];
+  pointIds[0] = 0;
+  pointIds[1] = 1;
+  pointIds[2] = 2;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 0;
+  pointIds[1] = 2;
+  pointIds[2] = 3;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 0;
+  pointIds[1] = 3;
+  pointIds[2] = 7;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 0;
+  pointIds[1] = 4;
+  pointIds[2] = 5;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 0;
+  pointIds[1] = 5;
+  pointIds[2] = 1;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 0;
+  pointIds[1] = 7;
+  pointIds[2] = 4;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 1;
+  pointIds[1] = 2;
+  pointIds[2] = 6;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 1;
+  pointIds[1] = 6;
+  pointIds[2] = 5;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 2;
+  pointIds[1] = 3;
+  pointIds[2] = 6;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 3;
+  pointIds[1] = 7;
+  pointIds[2] = 6;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 4;
+  pointIds[1] = 5;
+  pointIds[2] = 6;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 4;
+  pointIds[1] = 6;
+  pointIds[2] = 7;
+  polys->InsertNextCell(3, pointIds);
+  polys->Squeeze();
+
+  polyData->SetPolys(polys);
+}
+
+int DecimatePro(int dataType, int outputPointsPrecision)
+{
+  vtkSmartPointer<vtkPolyData> inputPolyData
+    = vtkSmartPointer<vtkPolyData>::New();
+  InitializePolyData(inputPolyData, dataType);
+
+  vtkSmartPointer<vtkDecimatePro> decimatePro
+    = vtkSmartPointer<vtkDecimatePro>::New();
+  decimatePro->SetOutputPointsPrecision(outputPointsPrecision);
+  decimatePro->SetInputData(inputPolyData);
+
+  decimatePro->Update();
+
+  vtkSmartPointer<vtkPolyData> outputPolyData = decimatePro->GetOutput();
+  vtkSmartPointer<vtkPoints> points = outputPolyData->GetPoints();
+
+  return points->GetDataType();
+}
+}
+
+int TestDecimatePro(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  int dataType = DecimatePro(VTK_FLOAT, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = DecimatePro(VTK_DOUBLE, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = DecimatePro(VTK_FLOAT, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = DecimatePro(VTK_DOUBLE, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = DecimatePro(VTK_FLOAT, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = DecimatePro(VTK_DOUBLE, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Core/Testing/Cxx/TestDelaunay3D.cxx b/Filters/Core/Testing/Cxx/TestDelaunay3D.cxx
new file mode 100644
index 0000000..80d5b43
--- /dev/null
+++ b/Filters/Core/Testing/Cxx/TestDelaunay3D.cxx
@@ -0,0 +1,134 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestDelaunay3D.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkCellArray.h>
+#include <vtkDelaunay3D.h>
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkSmartPointer.h>
+#include <vtkUnstructuredGrid.h>
+
+namespace
+{
+void InitializeUnstructuredGrid(vtkUnstructuredGrid *unstructuredGrid, int dataType)
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
+  vtkSmartPointer<vtkCellArray> cells = vtkSmartPointer<vtkCellArray>::New();
+  cells->InsertNextCell(4);
+
+  if(dataType == VTK_DOUBLE)
+    {
+    points->SetDataType(VTK_DOUBLE);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      double point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = randomSequence->GetValue();
+        }
+      cells->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+  else
+    {
+    points->SetDataType(VTK_FLOAT);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      float point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = static_cast<float>(randomSequence->GetValue());
+        }
+      cells->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+
+  points->Squeeze();
+  unstructuredGrid->SetPoints(points);
+  cells->Squeeze();
+  unstructuredGrid->SetCells(VTK_VERTEX, cells);
+}
+
+int Delaunay3D(int dataType, int outputPointsPrecision)
+{
+  vtkSmartPointer<vtkUnstructuredGrid> inputUnstructuredGrid
+    = vtkSmartPointer<vtkUnstructuredGrid>::New();
+  InitializeUnstructuredGrid(inputUnstructuredGrid, dataType);
+
+  vtkSmartPointer<vtkDelaunay3D> delaunay
+    = vtkSmartPointer<vtkDelaunay3D>::New();
+  delaunay->SetOutputPointsPrecision(outputPointsPrecision);
+  delaunay->SetInputData(inputUnstructuredGrid);
+
+  delaunay->Update();
+
+  vtkSmartPointer<vtkUnstructuredGrid> outputUnstructuredGrid = delaunay->GetOutput();
+  vtkSmartPointer<vtkPoints> points = outputUnstructuredGrid->GetPoints();
+
+  return points->GetDataType();
+}
+}
+
+int TestDelaunay3D(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  int dataType = Delaunay3D(VTK_FLOAT, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = Delaunay3D(VTK_DOUBLE, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = Delaunay3D(VTK_FLOAT, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = Delaunay3D(VTK_DOUBLE, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = Delaunay3D(VTK_FLOAT, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = Delaunay3D(VTK_DOUBLE, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Core/Testing/Cxx/TestFeatureEdges.cxx b/Filters/Core/Testing/Cxx/TestFeatureEdges.cxx
new file mode 100644
index 0000000..1e511cf
--- /dev/null
+++ b/Filters/Core/Testing/Cxx/TestFeatureEdges.cxx
@@ -0,0 +1,168 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestFeatureEdges.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkCellArray.h>
+#include <vtkFeatureEdges.h>
+#include <vtkSmartPointer.h>
+
+namespace
+{
+void InitializePolyData(vtkPolyData *polyData, int dataType)
+{
+  vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
+  points->SetDataType(dataType);
+  points->InsertNextPoint(-1.40481710, -0.03868163, -1.01241910);
+  points->InsertNextPoint(-1.41186166, 0.29086590, 0.96023101);
+  points->InsertNextPoint(-0.13218975, -1.22439861, 1.21793830);
+  points->InsertNextPoint(-0.12514521, -1.55394614, -0.75471181);
+  points->InsertNextPoint(0.13218975, 1.22439861, -1.21793830);
+  points->InsertNextPoint(0.12514521, 1.55394614, 0.75471181);
+  points->InsertNextPoint(1.40481710, 0.03868163, 1.01241910);
+  points->InsertNextPoint(1.41186166, -0.29086590, -0.96023101);
+  points->Squeeze();
+
+  polyData->SetPoints(points);
+
+  vtkSmartPointer<vtkCellArray> verts = vtkSmartPointer<vtkCellArray>::New();
+  verts->InsertNextCell(8);
+  for(unsigned int i = 0; i < 8; ++i)
+    {
+    verts->InsertCellPoint(i);
+    }
+  verts->Squeeze();
+
+  polyData->SetVerts(verts);
+
+  vtkSmartPointer<vtkCellArray> polys = vtkSmartPointer<vtkCellArray>::New();
+  vtkIdType pointIds[3];
+  pointIds[0] = 0;
+  pointIds[1] = 1;
+  pointIds[2] = 2;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 0;
+  pointIds[1] = 2;
+  pointIds[2] = 3;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 0;
+  pointIds[1] = 3;
+  pointIds[2] = 7;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 0;
+  pointIds[1] = 4;
+  pointIds[2] = 5;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 0;
+  pointIds[1] = 5;
+  pointIds[2] = 1;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 0;
+  pointIds[1] = 7;
+  pointIds[2] = 4;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 1;
+  pointIds[1] = 2;
+  pointIds[2] = 6;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 1;
+  pointIds[1] = 6;
+  pointIds[2] = 5;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 2;
+  pointIds[1] = 3;
+  pointIds[2] = 6;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 3;
+  pointIds[1] = 7;
+  pointIds[2] = 6;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 4;
+  pointIds[1] = 5;
+  pointIds[2] = 6;
+  polys->InsertNextCell(3, pointIds);
+  pointIds[0] = 4;
+  pointIds[1] = 6;
+  pointIds[2] = 7;
+  polys->InsertNextCell(3, pointIds);
+  polys->Squeeze();
+
+  polyData->SetPolys(polys);
+}
+
+int FeatureEdges(int dataType, int outputPointsPrecision)
+{
+  vtkSmartPointer<vtkPolyData> inputPolyData
+    = vtkSmartPointer<vtkPolyData>::New();
+  InitializePolyData(inputPolyData, dataType);
+
+  vtkSmartPointer<vtkFeatureEdges> featureEdges
+    = vtkSmartPointer<vtkFeatureEdges>::New();
+  featureEdges->SetOutputPointsPrecision(outputPointsPrecision);
+  featureEdges->SetInputData(inputPolyData);
+
+  featureEdges->Update();
+
+  vtkSmartPointer<vtkPolyData> outputPolyData = featureEdges->GetOutput();
+  vtkSmartPointer<vtkPoints> points = outputPolyData->GetPoints();
+
+  return points->GetDataType();
+}
+}
+
+int TestFeatureEdges(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  int dataType = FeatureEdges(VTK_FLOAT, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = FeatureEdges(VTK_DOUBLE, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = FeatureEdges(VTK_FLOAT, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = FeatureEdges(VTK_DOUBLE, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = FeatureEdges(VTK_FLOAT, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = FeatureEdges(VTK_DOUBLE, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Core/Testing/Cxx/TestHedgeHog.cxx b/Filters/Core/Testing/Cxx/TestHedgeHog.cxx
new file mode 100644
index 0000000..1624516
--- /dev/null
+++ b/Filters/Core/Testing/Cxx/TestHedgeHog.cxx
@@ -0,0 +1,154 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestHedgeHog.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkCellArray.h>
+#include <vtkHedgeHog.h>
+#include <vtkFloatArray.h>
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkPointData.h>
+#include <vtkSmartPointer.h>
+#include <vtkUnstructuredGrid.h>
+
+namespace
+{
+void InitializeUnstructuredGrid(vtkUnstructuredGrid *unstructuredGrid, int dataType)
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
+  vtkSmartPointer<vtkCellArray> cells = vtkSmartPointer<vtkCellArray>::New();
+  cells->InsertNextCell(4);
+  vtkSmartPointer<vtkFloatArray> vectors = vtkSmartPointer<vtkFloatArray>::New();
+  vectors->SetNumberOfComponents(3);
+
+  if(dataType == VTK_DOUBLE)
+    {
+    points->SetDataType(VTK_DOUBLE);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      double vector[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        vector[j] = randomSequence->GetValue();
+        }
+      vectors->InsertNextTuple(vector);
+      double point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = randomSequence->GetValue();
+        }
+      cells->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+  else
+    {
+    points->SetDataType(VTK_FLOAT);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      float vector[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        vector[j] = static_cast<float>(randomSequence->GetValue());
+        }
+      vectors->InsertNextTuple(vector);
+      float point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = static_cast<float>(randomSequence->GetValue());
+        }
+      cells->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+
+  vectors->Squeeze();
+  unstructuredGrid->GetPointData()->SetVectors(vectors);
+  points->Squeeze();
+  unstructuredGrid->SetPoints(points);
+  cells->Squeeze();
+  unstructuredGrid->SetCells(VTK_VERTEX, cells);
+}
+
+int HedgeHog(int dataType, int outputPointsPrecision)
+{
+  vtkSmartPointer<vtkUnstructuredGrid> unstructuredGrid
+    = vtkSmartPointer<vtkUnstructuredGrid>::New();
+  InitializeUnstructuredGrid(unstructuredGrid, dataType);
+
+  vtkSmartPointer<vtkHedgeHog> hedgeHog
+    = vtkSmartPointer<vtkHedgeHog>::New();
+  hedgeHog->SetOutputPointsPrecision(outputPointsPrecision);
+  hedgeHog->SetInputData(unstructuredGrid);
+
+  hedgeHog->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = hedgeHog->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  return points->GetDataType();
+}
+}
+
+int TestHedgeHog(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  int dataType = HedgeHog(VTK_FLOAT, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = HedgeHog(VTK_DOUBLE, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = HedgeHog(VTK_FLOAT, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = HedgeHog(VTK_DOUBLE, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = HedgeHog(VTK_FLOAT, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = HedgeHog(VTK_DOUBLE, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Core/Testing/Cxx/TestMaskPoints.cxx b/Filters/Core/Testing/Cxx/TestMaskPoints.cxx
new file mode 100644
index 0000000..b3d996e
--- /dev/null
+++ b/Filters/Core/Testing/Cxx/TestMaskPoints.cxx
@@ -0,0 +1,136 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestMaskPoints.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkCellArray.h>
+#include <vtkMaskPoints.h>
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkSmartPointer.h>
+
+namespace
+{
+void InitializePolyData(vtkPolyData *polyData, int dataType)
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
+  vtkSmartPointer<vtkCellArray> verts = vtkSmartPointer<vtkCellArray>::New();
+  verts->InsertNextCell(4);
+
+  if(dataType == VTK_DOUBLE)
+    {
+    points->SetDataType(VTK_DOUBLE);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      double point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = randomSequence->GetValue();
+        }
+      verts->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+  else
+    {
+    points->SetDataType(VTK_FLOAT);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      float point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = static_cast<float>(randomSequence->GetValue());
+        }
+      verts->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+
+  points->Squeeze();
+  polyData->SetPoints(points);
+  verts->Squeeze();
+  polyData->SetVerts(verts);
+}
+
+int MaskPoints(int dataType, int outputPointsPrecision)
+{
+  vtkSmartPointer<vtkPolyData> inputPolyData
+    = vtkSmartPointer<vtkPolyData>::New();
+  InitializePolyData(inputPolyData, dataType);
+
+  vtkSmartPointer<vtkMaskPoints> maskPoints
+    = vtkSmartPointer<vtkMaskPoints>::New();
+  maskPoints->SetOutputPointsPrecision(outputPointsPrecision);
+  maskPoints->SetMaximumNumberOfPoints(2);
+  maskPoints->SetRandomModeType(0);
+  maskPoints->RandomModeOn();
+  maskPoints->SetInputData(inputPolyData);
+
+  maskPoints->Update();
+
+  vtkSmartPointer<vtkPolyData> outputPolyData = maskPoints->GetOutput();
+  vtkSmartPointer<vtkPoints> points = outputPolyData->GetPoints();
+
+  return points->GetDataType();
+}
+}
+
+int TestMaskPoints(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  int dataType = MaskPoints(VTK_FLOAT, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = MaskPoints(VTK_DOUBLE, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = MaskPoints(VTK_FLOAT, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = MaskPoints(VTK_DOUBLE, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = MaskPoints(VTK_FLOAT, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = MaskPoints(VTK_DOUBLE, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Core/Testing/Cxx/TestPolyDataConnectivityFilter.cxx b/Filters/Core/Testing/Cxx/TestPolyDataConnectivityFilter.cxx
new file mode 100644
index 0000000..621638f
--- /dev/null
+++ b/Filters/Core/Testing/Cxx/TestPolyDataConnectivityFilter.cxx
@@ -0,0 +1,144 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestPolyDataConnectivityFilter.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkCellArray.h>
+#include <vtkFloatArray.h>
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkPointData.h>
+#include <vtkPolyDataConnectivityFilter.h>
+#include <vtkSmartPointer.h>
+
+namespace
+{
+void InitializePolyData(vtkPolyData *polyData, int dataType)
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
+  vtkSmartPointer<vtkCellArray> verts = vtkSmartPointer<vtkCellArray>::New();
+  verts->InsertNextCell(4);
+  vtkSmartPointer<vtkFloatArray> scalars = vtkSmartPointer<vtkFloatArray>::New();
+
+  if(dataType == VTK_DOUBLE)
+    {
+    points->SetDataType(VTK_DOUBLE);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      randomSequence->Next();
+      scalars->InsertNextValue(randomSequence->GetValue());
+      double point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = randomSequence->GetValue();
+        }
+      verts->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+  else
+    {
+    points->SetDataType(VTK_FLOAT);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      randomSequence->Next();
+      scalars->InsertNextValue(randomSequence->GetValue());
+      float point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = static_cast<float>(randomSequence->GetValue());
+        }
+      verts->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+
+  scalars->Squeeze();
+  polyData->GetPointData()->SetScalars(scalars);
+  points->Squeeze();
+  polyData->SetPoints(points);
+  verts->Squeeze();
+  polyData->SetVerts(verts);
+}
+
+int FilterPolyDataConnectivity(int dataType, int outputPointsPrecision)
+{
+  vtkSmartPointer<vtkPolyData> inputPolyData
+    = vtkSmartPointer<vtkPolyData>::New();
+  InitializePolyData(inputPolyData, dataType);
+
+  vtkSmartPointer<vtkPolyDataConnectivityFilter> polyDataConnectivityFilter
+    = vtkSmartPointer<vtkPolyDataConnectivityFilter>::New();
+  polyDataConnectivityFilter->SetOutputPointsPrecision(outputPointsPrecision);
+  polyDataConnectivityFilter->ScalarConnectivityOn();
+  polyDataConnectivityFilter->SetScalarRange(0.25, 0.75);
+  polyDataConnectivityFilter->SetInputData(inputPolyData);
+
+  polyDataConnectivityFilter->Update();
+
+  vtkSmartPointer<vtkPolyData> outputPolyData = polyDataConnectivityFilter->GetOutput();
+  vtkSmartPointer<vtkPoints> points = outputPolyData->GetPoints();
+
+  return points->GetDataType();
+}
+}
+
+int TestPolyDataConnectivityFilter(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  int dataType = FilterPolyDataConnectivity(VTK_FLOAT, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = FilterPolyDataConnectivity(VTK_DOUBLE, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = FilterPolyDataConnectivity(VTK_FLOAT, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = FilterPolyDataConnectivity(VTK_DOUBLE, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = FilterPolyDataConnectivity(VTK_FLOAT, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = FilterPolyDataConnectivity(VTK_DOUBLE, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Core/Testing/Cxx/TestSMPPipelineContour.cxx b/Filters/Core/Testing/Cxx/TestSMPPipelineContour.cxx
new file mode 100644
index 0000000..21b3eb9
--- /dev/null
+++ b/Filters/Core/Testing/Cxx/TestSMPPipelineContour.cxx
@@ -0,0 +1,154 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestCutter.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkNew.h"
+#include "vtkRTAnalyticSource.h"
+#include "vtkPolyData.h"
+#include "vtkTimerLog.h"
+#include "vtkSMPTools.h"
+#include "vtkXMLMultiBlockDataWriter.h"
+#include "vtkMultiBlockDataSet.h"
+#include "vtkCompositeDataIterator.h"
+#include "vtkExtentTranslator.h"
+#include "vtkSMPThreadLocalObject.h"
+#include "vtkImageData.h"
+#include "vtkThreadedCompositeDataPipeline.h"
+#include "vtkSynchronizedTemplates3D.h"
+#include "vtkSmartPointer.h"
+
+const int EXTENT = 100;
+int WholeExtent[] = {-EXTENT, EXTENT, -EXTENT, EXTENT, -EXTENT, EXTENT};
+const int NUMBER_OF_PIECES = 50;
+static vtkImageData* Pieces[NUMBER_OF_PIECES];
+
+class vtkCreateImageData
+{
+  vtkSMPThreadLocalObject<vtkRTAnalyticSource> ImageSources;
+  vtkNew<vtkExtentTranslator> Translator;
+
+public:
+  void Initialize()
+  {
+    vtkRTAnalyticSource*& source = this->ImageSources.Local();
+    source->SetWholeExtent(-EXTENT, EXTENT, -EXTENT, EXTENT, -EXTENT, EXTENT);
+  }
+
+  void operator()(vtkIdType begin, vtkIdType end)
+  {
+    vtkRTAnalyticSource*& source = this->ImageSources.Local();
+
+    for (vtkIdType i=begin; i<end; i++)
+      {
+      int extent[6];
+      this->Translator->PieceToExtentThreadSafe(i,
+                                                NUMBER_OF_PIECES,
+                                                0,
+                                                WholeExtent,
+                                                extent,
+                                                vtkExtentTranslator::BLOCK_MODE,
+                                                0);
+      source->UpdateInformation();
+      source->SetUpdateExtent(0, extent);
+      source->Update();
+      vtkImageData* piece = vtkImageData::New();
+      piece->ShallowCopy(source->GetOutput());
+      Pieces[i] = piece;
+      }
+  }
+
+  void Reduce()
+  {
+  }
+};
+
+int TestSMPPipelineContour(int, char *[])
+{
+  vtkSMPTools::Initialize(2);
+
+  vtkCreateImageData cid;
+  vtkNew<vtkTimerLog> tl;
+
+  tl->StartTimer();
+  vtkSMPTools::For(0, NUMBER_OF_PIECES, cid);
+  tl->StopTimer();
+
+  cout << "Creation time: " << tl->GetElapsedTime() << endl;
+
+  vtkNew<vtkMultiBlockDataSet> mbds;
+  for (int i=0; i<NUMBER_OF_PIECES; i++)
+    {
+    mbds->SetBlock(i, Pieces[i]);
+    Pieces[i]->Delete();
+    }
+
+  vtkNew<vtkThreadedCompositeDataPipeline> executive;
+
+  vtkNew<vtkSynchronizedTemplates3D> cf;
+  cf->SetExecutive(executive.GetPointer());
+  cf->SetInputData(mbds.GetPointer());
+  cf->SetInputArrayToProcess(0, 0, 0, 0, "RTData");
+  cf->SetValue(0, 200);
+  tl->StartTimer();
+  cf->Update();
+  tl->StopTimer();
+
+  cout << "Execution time: " << tl->GetElapsedTime() << endl;
+
+  vtkIdType numCells = 0;
+  vtkSmartPointer<vtkCompositeDataIterator> iter;
+  iter.TakeReference(static_cast<vtkCompositeDataSet*>(cf->GetOutputDataObject(0))->NewIterator());
+  iter->InitTraversal();
+  while(!iter->IsDoneWithTraversal())
+    {
+    vtkPolyData* piece = static_cast<vtkPolyData*>(iter->GetCurrentDataObject());
+    numCells += piece->GetNumberOfCells();
+    iter->GoToNextItem();
+    }
+
+  cout << "Total num. cells: " << numCells << endl;
+
+  vtkNew<vtkRTAnalyticSource> rt;
+  rt->SetWholeExtent(-EXTENT, EXTENT, -EXTENT, EXTENT, -EXTENT, EXTENT);
+  rt->Update();
+
+  vtkNew<vtkSynchronizedTemplates3D> st;
+  st->SetInputData(rt->GetOutput());
+  st->SetInputArrayToProcess(0, 0, 0, 0, "RTData");
+  st->SetValue(0, 200);
+
+  tl->StartTimer();
+  st->Update();
+  tl->StopTimer();
+
+  cout << "Serial execution time: " << tl->GetElapsedTime() << endl;
+
+  cout << "Serial num. cells: " << st->GetOutput()->GetNumberOfCells() << endl;
+
+  if (st->GetOutput()->GetNumberOfCells() != numCells)
+    {
+    cout << "Number of cells did not match." << endl;
+    return EXIT_FAILURE;
+    }
+
+
+#if 0
+  vtkNew<vtkXMLMultiBlockDataWriter> writer;
+  writer->SetInputData(cf->GetOutputDataObject(0));
+  writer->SetFileName("contour.vtm");
+  writer->SetDataModeToAscii();
+  writer->Write();
+#endif
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Core/Testing/Cxx/TestSmoothPolyDataFilter.cxx b/Filters/Core/Testing/Cxx/TestSmoothPolyDataFilter.cxx
new file mode 100644
index 0000000..33cd9b6
--- /dev/null
+++ b/Filters/Core/Testing/Cxx/TestSmoothPolyDataFilter.cxx
@@ -0,0 +1,133 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestSmoothPolyDataFilter.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkCellArray.h>
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkSmartPointer.h>
+#include <vtkSmoothPolyDataFilter.h>
+
+namespace
+{
+void InitializePolyData(vtkPolyData *polyData, int dataType)
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
+  vtkSmartPointer<vtkCellArray> verts = vtkSmartPointer<vtkCellArray>::New();
+  verts->InsertNextCell(4);
+
+  if(dataType == VTK_DOUBLE)
+    {
+    points->SetDataType(VTK_DOUBLE);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      double point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = randomSequence->GetValue();
+        }
+      verts->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+  else
+    {
+    points->SetDataType(VTK_FLOAT);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      float point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = static_cast<float>(randomSequence->GetValue());
+        }
+      verts->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+
+  points->Squeeze();
+  polyData->SetPoints(points);
+  verts->Squeeze();
+  polyData->SetVerts(verts);
+}
+
+int SmoothPolyData(int dataType, int outputPointsPrecision)
+{
+  vtkSmartPointer<vtkPolyData> inputPolyData
+    = vtkSmartPointer<vtkPolyData>::New();
+  InitializePolyData(inputPolyData, dataType);
+
+  vtkSmartPointer<vtkSmoothPolyDataFilter> smoothPolyDataFilter
+    = vtkSmartPointer<vtkSmoothPolyDataFilter>::New();
+  smoothPolyDataFilter->SetOutputPointsPrecision(outputPointsPrecision);
+  smoothPolyDataFilter->SetInputData(inputPolyData);
+
+  smoothPolyDataFilter->Update();
+
+  vtkSmartPointer<vtkPolyData> outputPolyData = smoothPolyDataFilter->GetOutput();
+  vtkSmartPointer<vtkPoints> points = outputPolyData->GetPoints();
+
+  return points->GetDataType();
+}
+}
+
+int TestSmoothPolyDataFilter(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  int dataType = SmoothPolyData(VTK_FLOAT, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = SmoothPolyData(VTK_DOUBLE, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = SmoothPolyData(VTK_FLOAT, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = SmoothPolyData(VTK_DOUBLE, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = SmoothPolyData(VTK_FLOAT, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = SmoothPolyData(VTK_DOUBLE, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Core/Testing/Cxx/TestThresholdPoints.cxx b/Filters/Core/Testing/Cxx/TestThresholdPoints.cxx
new file mode 100644
index 0000000..7cdcf9a
--- /dev/null
+++ b/Filters/Core/Testing/Cxx/TestThresholdPoints.cxx
@@ -0,0 +1,143 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestThresholdPoints.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkCellArray.h>
+#include <vtkFloatArray.h>
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkPointData.h>
+#include <vtkSmartPointer.h>
+#include <vtkThresholdPoints.h>
+
+namespace
+{
+void InitializePolyData(vtkPolyData *polyData, int dataType)
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
+  vtkSmartPointer<vtkCellArray> verts = vtkSmartPointer<vtkCellArray>::New();
+  verts->InsertNextCell(4);
+  vtkSmartPointer<vtkFloatArray> scalars = vtkSmartPointer<vtkFloatArray>::New();
+
+  if(dataType == VTK_DOUBLE)
+    {
+    points->SetDataType(VTK_DOUBLE);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      randomSequence->Next();
+      scalars->InsertNextValue(randomSequence->GetValue());
+      double point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = randomSequence->GetValue();
+        }
+      verts->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+  else
+    {
+    points->SetDataType(VTK_FLOAT);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      randomSequence->Next();
+      scalars->InsertNextValue(randomSequence->GetValue());
+      float point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = static_cast<float>(randomSequence->GetValue());
+        }
+      verts->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+
+  scalars->Squeeze();
+  polyData->GetPointData()->SetScalars(scalars);
+  points->Squeeze();
+  polyData->SetPoints(points);
+  verts->Squeeze();
+  polyData->SetVerts(verts);
+}
+
+int ThresholdPolyDataPoints(int dataType, int outputPointsPrecision)
+{
+  vtkSmartPointer<vtkPolyData> inputPolyData
+    = vtkSmartPointer<vtkPolyData>::New();
+  InitializePolyData(inputPolyData, dataType);
+
+  vtkSmartPointer<vtkThresholdPoints> thresholdPoints
+    = vtkSmartPointer<vtkThresholdPoints>::New();
+  thresholdPoints->SetOutputPointsPrecision(outputPointsPrecision);
+  thresholdPoints->ThresholdByUpper(0.5);
+  thresholdPoints->SetInputData(inputPolyData);
+
+  thresholdPoints->Update();
+
+  vtkSmartPointer<vtkPolyData> outputPolyData = thresholdPoints->GetOutput();
+  vtkSmartPointer<vtkPoints> points = outputPolyData->GetPoints();
+
+  return points->GetDataType();
+}
+}
+
+int TestThresholdPoints(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  int dataType = ThresholdPolyDataPoints(VTK_FLOAT, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = ThresholdPolyDataPoints(VTK_DOUBLE, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = ThresholdPolyDataPoints(VTK_FLOAT, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = ThresholdPolyDataPoints(VTK_DOUBLE, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = ThresholdPolyDataPoints(VTK_FLOAT, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = ThresholdPolyDataPoints(VTK_DOUBLE, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Core/Testing/Cxx/TestTransposeTable.cxx b/Filters/Core/Testing/Cxx/TestTransposeTable.cxx
new file mode 100644
index 0000000..26a3198
--- /dev/null
+++ b/Filters/Core/Testing/Cxx/TestTransposeTable.cxx
@@ -0,0 +1,138 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestTransposeTable.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkDoubleArray.h"
+#include "vtkFloatArray.h"
+#include "vtkIntArray.h"
+#include "vtkNew.h"
+#include "vtkStringArray.h"
+#include "vtkTable.h"
+#include "vtkTransposeTable.h"
+
+#include <sstream>
+
+int TestTransposeTable(int, char *[])
+{
+  vtkNew<vtkTable> table;
+
+  const int nbValues = 5;
+
+  vtkNew<vtkStringArray> colName;
+  colName->SetName("Name");
+  colName->SetNumberOfValues(nbValues);
+  colName->SetValue(0, "apple");
+  colName->SetValue(1, "milk");
+  colName->SetValue(2, "cream");
+  colName->SetValue(3, "pasta");
+  colName->SetValue(4, "tomato");
+  table->AddColumn(colName.GetPointer());
+
+  vtkNew<vtkIntArray> colId;
+  colId->SetName("Id");
+  colId->SetNumberOfValues(nbValues);
+  colId->SetValue(0, 0);
+  colId->SetValue(1, 1);
+  colId->SetValue(2, 2);
+  colId->SetValue(3, 3);
+  colId->SetValue(4, 4);
+  table->AddColumn(colId.GetPointer());
+
+  vtkNew<vtkDoubleArray> colDouble;
+  colDouble->SetName("Double");
+  colDouble->SetNumberOfValues(nbValues);
+  colDouble->SetValue(0, 5.);
+  colDouble->SetValue(1, 4.005);
+  colDouble->SetValue(2, 2.65);
+  colDouble->SetValue(3, 1.1);
+  colDouble->SetValue(4, 0.4);
+  table->AddColumn(colDouble.GetPointer());
+
+  vtkNew<vtkFloatArray> colFloat;
+  colFloat->SetName("Float");
+  colFloat->SetNumberOfValues(nbValues);
+  colFloat->SetValue(0, 15.f);
+  colFloat->SetValue(1, 14.005f);
+  colFloat->SetValue(2, 12.65f);
+  colFloat->SetValue(3, 11.1f);
+  colFloat->SetValue(4, 10.4f);
+  table->AddColumn(colFloat.GetPointer());
+
+  // Transpose the input table
+  vtkNew<vtkTransposeTable> filter;
+  filter->SetInputData(table.GetPointer());
+  filter->Update();
+
+  vtkTable* outTable = filter->GetOutput();
+
+  if (table->GetNumberOfColumns() != outTable->GetNumberOfRows())
+    {
+    cout << "Input table:" << endl;
+    table->Dump();
+    cout << "Transposed table:" << endl;
+    outTable->Dump();
+    cout << "Failed: Column/row mismatched!" << endl;
+    return EXIT_FAILURE;
+    }
+
+  if (table->GetNumberOfRows() != outTable->GetNumberOfColumns() - 1)
+    {
+    cout << "Input table:" << endl;
+    table->Dump();
+    cout << "Transposed table:" << endl;
+    outTable->Dump();
+    cout << "Failed: Row/Column mismatched!" << endl;
+    return EXIT_FAILURE;
+    }
+
+  for (int i = 0; i < table->GetNumberOfRows(); i++)
+    {
+    std::stringstream ss;
+    ss << i;
+    vtkAbstractArray* col = outTable->GetColumnByName(ss.str().c_str());
+    for (int j = 0; j < table->GetNumberOfColumns(); j++)
+      {
+      if (col->GetVariantValue(j) != table->GetValue(i, j))
+        {
+        cout << "Failed: Column/row mismatched!" << endl;
+        return EXIT_FAILURE;
+        }
+      }
+    }
+
+  // Let's transpose the transposed table and compare it with input table
+  vtkNew<vtkTransposeTable> filter2;
+  filter2->SetInputData(outTable);
+  filter2->SetAddIdColumn(false);
+  filter2->SetUseIdColumn(true);
+  filter2->Update();
+
+  vtkTable* outTable2 = filter2->GetOutput();
+
+  for (int i = 0; i < table->GetNumberOfRows(); i++)
+    {
+    for (int j = 0; j < table->GetNumberOfColumns(); j++)
+      {
+      if (table->GetValue(i, j) != outTable2->GetValue(i, j))
+        {
+        cout << "Transposed of transposed table:" << endl;
+        outTable2->Dump();
+        cout << "Failed: Column/row mismatch!" << endl;
+        return EXIT_FAILURE;
+        }
+      }
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Core/Testing/Cxx/TestTubeFilter.cxx b/Filters/Core/Testing/Cxx/TestTubeFilter.cxx
new file mode 100644
index 0000000..477ceaf
--- /dev/null
+++ b/Filters/Core/Testing/Cxx/TestTubeFilter.cxx
@@ -0,0 +1,141 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestTubeFilter.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkCellArray.h>
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkSmartPointer.h>
+#include <vtkTubeFilter.h>
+
+namespace
+{
+void InitializePolyData(vtkPolyData *polyData, int dataType)
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
+  vtkSmartPointer<vtkCellArray> verts = vtkSmartPointer<vtkCellArray>::New();
+  verts->InsertNextCell(4);
+  vtkSmartPointer<vtkCellArray> lines = vtkSmartPointer<vtkCellArray>::New();
+  lines->InsertNextCell(4);
+
+  if(dataType == VTK_DOUBLE)
+    {
+    points->SetDataType(VTK_DOUBLE);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      double point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = randomSequence->GetValue();
+        }
+      vtkIdType pointId = points->InsertNextPoint(point);
+      verts->InsertCellPoint(pointId);
+      lines->InsertCellPoint(pointId);
+      }
+    }
+  else
+    {
+    points->SetDataType(VTK_FLOAT);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      float point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = static_cast<float>(randomSequence->GetValue());
+        }
+      vtkIdType pointId = points->InsertNextPoint(point);
+      verts->InsertCellPoint(pointId);
+      lines->InsertCellPoint(pointId);
+      }
+    }
+
+  points->Squeeze();
+  polyData->SetPoints(points);
+  verts->Squeeze();
+  polyData->SetVerts(verts);
+  lines->Squeeze();
+  polyData->SetLines(lines);
+}
+
+int TubeFilter(int dataType, int outputPointsPrecision)
+{
+  vtkSmartPointer<vtkPolyData> inputPolyData
+    = vtkSmartPointer<vtkPolyData>::New();
+  InitializePolyData(inputPolyData, dataType);
+
+  vtkSmartPointer<vtkTubeFilter> tubeFilter
+    = vtkSmartPointer<vtkTubeFilter>::New();
+  tubeFilter->SetOutputPointsPrecision(outputPointsPrecision);
+  tubeFilter->SetInputData(inputPolyData);
+
+  tubeFilter->Update();
+
+  vtkSmartPointer<vtkPolyData> outputPolyData = tubeFilter->GetOutput();
+  vtkSmartPointer<vtkPoints> points = outputPolyData->GetPoints();
+
+  return points->GetDataType();
+}
+}
+
+int TestTubeFilter(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  int dataType = TubeFilter(VTK_FLOAT, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = TubeFilter(VTK_DOUBLE, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = TubeFilter(VTK_FLOAT, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = TubeFilter(VTK_DOUBLE, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = TubeFilter(VTK_FLOAT, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = TubeFilter(VTK_DOUBLE, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Core/Testing/Data/Baseline/CellDataToPointData.png.md5 b/Filters/Core/Testing/Data/Baseline/CellDataToPointData.png.md5
new file mode 100644
index 0000000..f4e1fe0
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/CellDataToPointData.png.md5
@@ -0,0 +1 @@
+8a0fc15819c43f1dcf8b13e13ec9cac3
diff --git a/Filters/Core/Testing/Data/Baseline/CellDataToPointData_1.png.md5 b/Filters/Core/Testing/Data/Baseline/CellDataToPointData_1.png.md5
new file mode 100644
index 0000000..0c7f3c7
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/CellDataToPointData_1.png.md5
@@ -0,0 +1 @@
+1265827537ae4e43d2c654475a867db0
diff --git a/Filters/Core/Testing/Data/Baseline/Delaunay2D.png.md5 b/Filters/Core/Testing/Data/Baseline/Delaunay2D.png.md5
new file mode 100644
index 0000000..7de29ce
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/Delaunay2D.png.md5
@@ -0,0 +1 @@
+af55b6fe2f27776a236dc86efe06094b
diff --git a/Filters/Core/Testing/Data/Baseline/Delaunay2DAlpha.png.md5 b/Filters/Core/Testing/Data/Baseline/Delaunay2DAlpha.png.md5
new file mode 100644
index 0000000..44bf9a3
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/Delaunay2DAlpha.png.md5
@@ -0,0 +1 @@
+b67d497aaa385094ec6c46a5f5f1d0e0
diff --git a/Filters/Core/Testing/Data/Baseline/Delaunay2DTransform.png.md5 b/Filters/Core/Testing/Data/Baseline/Delaunay2DTransform.png.md5
new file mode 100644
index 0000000..7678da4
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/Delaunay2DTransform.png.md5
@@ -0,0 +1 @@
+4caa202127a5533e19f156d6043ed3f3
diff --git a/Filters/Core/Testing/Data/Baseline/Delaunay3D.png.md5 b/Filters/Core/Testing/Data/Baseline/Delaunay3D.png.md5
new file mode 100644
index 0000000..4f6788d
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/Delaunay3D.png.md5
@@ -0,0 +1 @@
+1eea5e56f0651dc9c5756e5261e5f6df
diff --git a/Filters/Core/Testing/Data/Baseline/MassProperties.png.md5 b/Filters/Core/Testing/Data/Baseline/MassProperties.png.md5
new file mode 100644
index 0000000..05497c7
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/MassProperties.png.md5
@@ -0,0 +1 @@
+b416895f3d9dce193d42f012bd422e16
diff --git a/Filters/Core/Testing/Data/Baseline/MassProperties_1.png.md5 b/Filters/Core/Testing/Data/Baseline/MassProperties_1.png.md5
new file mode 100644
index 0000000..3f39ce2
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/MassProperties_1.png.md5
@@ -0,0 +1 @@
+8d46405e6bc2518cd21ff93459bc9ec3
diff --git a/Filters/Core/Testing/Data/Baseline/QuadricDecimation.png.md5 b/Filters/Core/Testing/Data/Baseline/QuadricDecimation.png.md5
new file mode 100644
index 0000000..72d7684
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/QuadricDecimation.png.md5
@@ -0,0 +1 @@
+e9d7cb5914be4eb35f8b5614baae7ccc
diff --git a/Filters/Core/Testing/Data/Baseline/QuadricDecimation_1.png.md5 b/Filters/Core/Testing/Data/Baseline/QuadricDecimation_1.png.md5
new file mode 100644
index 0000000..79d157e
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/QuadricDecimation_1.png.md5
@@ -0,0 +1 @@
+b5127b34a5f380b35cd3ae4fbf3106dc
diff --git a/Filters/Core/Testing/Data/Baseline/QuadricDecimation_2.png.md5 b/Filters/Core/Testing/Data/Baseline/QuadricDecimation_2.png.md5
new file mode 100644
index 0000000..3f20e16
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/QuadricDecimation_2.png.md5
@@ -0,0 +1 @@
+d6105f1142c807abcc5aedf304bc7c43
diff --git a/Filters/Core/Testing/Data/Baseline/QuadricDecimation_3.png.md5 b/Filters/Core/Testing/Data/Baseline/QuadricDecimation_3.png.md5
new file mode 100644
index 0000000..2409cdd
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/QuadricDecimation_3.png.md5
@@ -0,0 +1 @@
+68a7bfd91b341faa2217e8da5c45c825
diff --git a/Filters/Core/Testing/Data/Baseline/QuadricDecimation_4.png.md5 b/Filters/Core/Testing/Data/Baseline/QuadricDecimation_4.png.md5
new file mode 100644
index 0000000..4469866
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/QuadricDecimation_4.png.md5
@@ -0,0 +1 @@
+52d2624a5706e1901fbc690f917c03e9
diff --git a/Filters/Core/Testing/Data/Baseline/QuadricDecimation_5.png.md5 b/Filters/Core/Testing/Data/Baseline/QuadricDecimation_5.png.md5
new file mode 100644
index 0000000..a432ba6
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/QuadricDecimation_5.png.md5
@@ -0,0 +1 @@
+a333f79070a4eab045115d37faf1cc4b
diff --git a/Filters/Core/Testing/Data/Baseline/StreamPolyData.png.md5 b/Filters/Core/Testing/Data/Baseline/StreamPolyData.png.md5
new file mode 100644
index 0000000..438194d
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/StreamPolyData.png.md5
@@ -0,0 +1 @@
+f405673b82905612e1f3abae8ce6bbed
diff --git a/Filters/Core/Testing/Data/Baseline/StreamPolyData_1.png.md5 b/Filters/Core/Testing/Data/Baseline/StreamPolyData_1.png.md5
new file mode 100644
index 0000000..cc0e168
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/StreamPolyData_1.png.md5
@@ -0,0 +1 @@
+c7208199fcc397d34abd4a490e356ad9
diff --git a/Filters/Core/Testing/Data/Baseline/TestDelaunay2D.png.md5 b/Filters/Core/Testing/Data/Baseline/TestDelaunay2D.png.md5
new file mode 100644
index 0000000..d7b8480
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/TestDelaunay2D.png.md5
@@ -0,0 +1 @@
+5f7a0fbfb517c5884cd763b42ded0cde
diff --git a/Filters/Core/Testing/Data/Baseline/TestDelaunay2D_1.png.md5 b/Filters/Core/Testing/Data/Baseline/TestDelaunay2D_1.png.md5
new file mode 100644
index 0000000..90f6605
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/TestDelaunay2D_1.png.md5
@@ -0,0 +1 @@
+62c5a00a98c5ef0cb40c21f5b194cf9d
diff --git a/Filters/Core/Testing/Data/Baseline/TestGlyph3D.png.md5 b/Filters/Core/Testing/Data/Baseline/TestGlyph3D.png.md5
new file mode 100644
index 0000000..2ba0c76
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/TestGlyph3D.png.md5
@@ -0,0 +1 @@
+d4b2bf90e995a68e561759e09e0b7460
diff --git a/Filters/Core/Testing/Data/Baseline/TestGridSynchronizedTemplates3D.png.md5 b/Filters/Core/Testing/Data/Baseline/TestGridSynchronizedTemplates3D.png.md5
new file mode 100644
index 0000000..6ea978f
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/TestGridSynchronizedTemplates3D.png.md5
@@ -0,0 +1 @@
+fbc3e0f61e386ef579fffdc86e238642
diff --git a/Filters/Core/Testing/Data/Baseline/TestMarchingSquares.png.md5 b/Filters/Core/Testing/Data/Baseline/TestMarchingSquares.png.md5
new file mode 100644
index 0000000..b71b8f1
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/TestMarchingSquares.png.md5
@@ -0,0 +1 @@
+b967b3a064d801b339f3a3feb71b4aa5
diff --git a/Filters/Core/Testing/Data/Baseline/TestRectilinearSynchronizedTemplates.png.md5 b/Filters/Core/Testing/Data/Baseline/TestRectilinearSynchronizedTemplates.png.md5
new file mode 100644
index 0000000..f25c9f0
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/TestRectilinearSynchronizedTemplates.png.md5
@@ -0,0 +1 @@
+0a75e84d6ee0b6ddafa83e75b81b9cad
diff --git a/Filters/Core/Testing/Data/Baseline/TestSynchronizedTemplates2D.png.md5 b/Filters/Core/Testing/Data/Baseline/TestSynchronizedTemplates2D.png.md5
new file mode 100644
index 0000000..e5d3d97
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/TestSynchronizedTemplates2D.png.md5
@@ -0,0 +1 @@
+234f94b60dea897f21069e2fb183b9db
diff --git a/Filters/Core/Testing/Data/Baseline/TestSynchronizedTemplates3D.png.md5 b/Filters/Core/Testing/Data/Baseline/TestSynchronizedTemplates3D.png.md5
new file mode 100644
index 0000000..75b9e43
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/TestSynchronizedTemplates3D.png.md5
@@ -0,0 +1 @@
+a3bae0d984e57faf4343cffbb530b71e
diff --git a/Filters/Core/Testing/Data/Baseline/TestTensorGlyph.png.md5 b/Filters/Core/Testing/Data/Baseline/TestTensorGlyph.png.md5
new file mode 100644
index 0000000..2d81bb3
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/TestTensorGlyph.png.md5
@@ -0,0 +1 @@
+1173cc950f18506abe0c78029830e0c7
diff --git a/Filters/Core/Testing/Data/Baseline/TestTensorGlyph_1.png.md5 b/Filters/Core/Testing/Data/Baseline/TestTensorGlyph_1.png.md5
new file mode 100644
index 0000000..06f7b1d
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/TestTensorGlyph_1.png.md5
@@ -0,0 +1 @@
+18edf65c0e076e4c338865f8f5ad87af
diff --git a/Filters/Core/Testing/Data/Baseline/TestTextureGlyph.png.md5 b/Filters/Core/Testing/Data/Baseline/TestTextureGlyph.png.md5
new file mode 100644
index 0000000..bbd257a
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/TestTextureGlyph.png.md5
@@ -0,0 +1 @@
+a649959b2357330f5248217550fe9fe0
diff --git a/Filters/Core/Testing/Data/Baseline/TestTriangleFilter.png.md5 b/Filters/Core/Testing/Data/Baseline/TestTriangleFilter.png.md5
new file mode 100644
index 0000000..4eb4b81
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/TestTriangleFilter.png.md5
@@ -0,0 +1 @@
+8bd934023001db304673175389fad8b6
diff --git a/Filters/Core/Testing/Data/Baseline/capCow.png.md5 b/Filters/Core/Testing/Data/Baseline/capCow.png.md5
new file mode 100644
index 0000000..7e79ad4
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/capCow.png.md5
@@ -0,0 +1 @@
+17cf548d80288bb7b60f78ec16699703
diff --git a/Filters/Core/Testing/Data/Baseline/capCow_1.png.md5 b/Filters/Core/Testing/Data/Baseline/capCow_1.png.md5
new file mode 100644
index 0000000..3ff8e04
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/capCow_1.png.md5
@@ -0,0 +1 @@
+1c19f78646d3bdb5566cfcb537831af4
diff --git a/Filters/Core/Testing/Data/Baseline/capSphere.png.md5 b/Filters/Core/Testing/Data/Baseline/capSphere.png.md5
new file mode 100644
index 0000000..971baea
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/capSphere.png.md5
@@ -0,0 +1 @@
+ba14c6ca786d915a2b4c7c0cddf404f9
diff --git a/Filters/Core/Testing/Data/Baseline/clipArt.png.md5 b/Filters/Core/Testing/Data/Baseline/clipArt.png.md5
new file mode 100644
index 0000000..82e2bcd
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/clipArt.png.md5
@@ -0,0 +1 @@
+58f4dd12fa646e16276424874e763d63
diff --git a/Filters/Core/Testing/Data/Baseline/combStreamers.png.md5 b/Filters/Core/Testing/Data/Baseline/combStreamers.png.md5
new file mode 100644
index 0000000..64f4b22
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/combStreamers.png.md5
@@ -0,0 +1 @@
+018d0c9f6651a2eb139a879edae3de93
diff --git a/Filters/Core/Testing/Data/Baseline/combStreamers2.png.md5 b/Filters/Core/Testing/Data/Baseline/combStreamers2.png.md5
new file mode 100644
index 0000000..f647dbb
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/combStreamers2.png.md5
@@ -0,0 +1 @@
+ce22276f90d76e7ddeff091a6865fe22
diff --git a/Filters/Core/Testing/Data/Baseline/combStreamers2_1.png.md5 b/Filters/Core/Testing/Data/Baseline/combStreamers2_1.png.md5
new file mode 100644
index 0000000..0e6d15c
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/combStreamers2_1.png.md5
@@ -0,0 +1 @@
+74a564941ddbeb48ef223de6f3f823f1
diff --git a/Filters/Core/Testing/Data/Baseline/constrainedDelaunay.png.md5 b/Filters/Core/Testing/Data/Baseline/constrainedDelaunay.png.md5
new file mode 100644
index 0000000..95eafe3
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/constrainedDelaunay.png.md5
@@ -0,0 +1 @@
+fb428ec2b8bcfef872c38131aa92798f
diff --git a/Filters/Core/Testing/Data/Baseline/constrainedDelaunay_1.png.md5 b/Filters/Core/Testing/Data/Baseline/constrainedDelaunay_1.png.md5
new file mode 100644
index 0000000..e521f65
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/constrainedDelaunay_1.png.md5
@@ -0,0 +1 @@
+b0c47e7e77ac5570b5c16943613f4b5d
diff --git a/Filters/Core/Testing/Data/Baseline/constrainedDelaunay_2.png.md5 b/Filters/Core/Testing/Data/Baseline/constrainedDelaunay_2.png.md5
new file mode 100644
index 0000000..b5bddc5
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/constrainedDelaunay_2.png.md5
@@ -0,0 +1 @@
+2ce58d7b8904f8ff83306856b7a8f816
diff --git a/Filters/Core/Testing/Data/Baseline/contourCells.png.md5 b/Filters/Core/Testing/Data/Baseline/contourCells.png.md5
new file mode 100644
index 0000000..c671841
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/contourCells.png.md5
@@ -0,0 +1 @@
+95ab025a613bcf6b6f6bf4a585c11d90
diff --git a/Filters/Core/Testing/Data/Baseline/contourCells_1.png.md5 b/Filters/Core/Testing/Data/Baseline/contourCells_1.png.md5
new file mode 100644
index 0000000..b436870
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/contourCells_1.png.md5
@@ -0,0 +1 @@
+00d368a21ef258f1658767a93419d0bc
diff --git a/Filters/Core/Testing/Data/Baseline/contourQuadraticCells.png.md5 b/Filters/Core/Testing/Data/Baseline/contourQuadraticCells.png.md5
new file mode 100644
index 0000000..b10e98f
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/contourQuadraticCells.png.md5
@@ -0,0 +1 @@
+db750fe5d5d0614213c20af951f2cff7
diff --git a/Filters/Core/Testing/Data/Baseline/createBFont.png.md5 b/Filters/Core/Testing/Data/Baseline/createBFont.png.md5
new file mode 100644
index 0000000..3f67af4
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/createBFont.png.md5
@@ -0,0 +1 @@
+052848059b0a32694d6f6ce6ca0d8997
diff --git a/Filters/Core/Testing/Data/Baseline/createBFont_1.png.md5 b/Filters/Core/Testing/Data/Baseline/createBFont_1.png.md5
new file mode 100644
index 0000000..f316ca3
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/createBFont_1.png.md5
@@ -0,0 +1 @@
+8a9a00769174fd32e0264c9f8bb5132f
diff --git a/Filters/Core/Testing/Data/Baseline/cutLoop.png.md5 b/Filters/Core/Testing/Data/Baseline/cutLoop.png.md5
new file mode 100644
index 0000000..3db89ee
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/cutLoop.png.md5
@@ -0,0 +1 @@
+a4a2812759aa4f223d131cec66dabf29
diff --git a/Filters/Core/Testing/Data/Baseline/deciFranFace.png.md5 b/Filters/Core/Testing/Data/Baseline/deciFranFace.png.md5
new file mode 100644
index 0000000..442d7ae
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/deciFranFace.png.md5
@@ -0,0 +1 @@
+cfeabcc9b7a48619b51c34291490f7ba
diff --git a/Filters/Core/Testing/Data/Baseline/deciFranFace_1.png.md5 b/Filters/Core/Testing/Data/Baseline/deciFranFace_1.png.md5
new file mode 100644
index 0000000..9c5fdc3
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/deciFranFace_1.png.md5
@@ -0,0 +1 @@
+1e4e143ad6e094eee00926db62b6694d
diff --git a/Filters/Core/Testing/Data/Baseline/deciFranFace_2.png.md5 b/Filters/Core/Testing/Data/Baseline/deciFranFace_2.png.md5
new file mode 100644
index 0000000..d0f902a
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/deciFranFace_2.png.md5
@@ -0,0 +1 @@
+2103878e191b6d70a3c9131bdeacafb1
diff --git a/Filters/Core/Testing/Data/Baseline/deciFranFace_3.png.md5 b/Filters/Core/Testing/Data/Baseline/deciFranFace_3.png.md5
new file mode 100644
index 0000000..d726919
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/deciFranFace_3.png.md5
@@ -0,0 +1 @@
+d4b28b7c87c197ddd1484cc4374297e1
diff --git a/Filters/Core/Testing/Data/Baseline/deciFranFace_4.png.md5 b/Filters/Core/Testing/Data/Baseline/deciFranFace_4.png.md5
new file mode 100644
index 0000000..3f0b018
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/deciFranFace_4.png.md5
@@ -0,0 +1 @@
+1452560fa3c07728743e9c6f0b960671
diff --git a/Filters/Core/Testing/Data/Baseline/deciFranFace_5.png.md5 b/Filters/Core/Testing/Data/Baseline/deciFranFace_5.png.md5
new file mode 100644
index 0000000..17fb3b3
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/deciFranFace_5.png.md5
@@ -0,0 +1 @@
+fdd8f91ef17abc91fb4be9904b8737cb
diff --git a/Filters/Core/Testing/Data/Baseline/deciPlane.png.md5 b/Filters/Core/Testing/Data/Baseline/deciPlane.png.md5
new file mode 100644
index 0000000..f0fd7c9
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/deciPlane.png.md5
@@ -0,0 +1 @@
+33410c29e2bc25a0c3e5beab91229865
diff --git a/Filters/Core/Testing/Data/Baseline/deciPlane_1.png.md5 b/Filters/Core/Testing/Data/Baseline/deciPlane_1.png.md5
new file mode 100644
index 0000000..d5f5f21
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/deciPlane_1.png.md5
@@ -0,0 +1 @@
+fa6a638a12823ac20a77d896e40541fc
diff --git a/Filters/Core/Testing/Data/Baseline/dispPlot.png.md5 b/Filters/Core/Testing/Data/Baseline/dispPlot.png.md5
new file mode 100644
index 0000000..2b240dd
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/dispPlot.png.md5
@@ -0,0 +1 @@
+28d07e988fcaa931a1b0ff8e8080b6ba
diff --git a/Filters/Core/Testing/Data/Baseline/fieldToPolyData.png.md5 b/Filters/Core/Testing/Data/Baseline/fieldToPolyData.png.md5
new file mode 100644
index 0000000..6883ab2
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/fieldToPolyData.png.md5
@@ -0,0 +1 @@
+20200b3a5aad30b5d8b4779f9dae0474
diff --git a/Filters/Core/Testing/Data/Baseline/fieldToPolyData_1.png.md5 b/Filters/Core/Testing/Data/Baseline/fieldToPolyData_1.png.md5
new file mode 100644
index 0000000..37c13de
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/fieldToPolyData_1.png.md5
@@ -0,0 +1 @@
+2570d981a2c6e4e165a6f395b920bf90
diff --git a/Filters/Core/Testing/Data/Baseline/fieldToRGrid.png.md5 b/Filters/Core/Testing/Data/Baseline/fieldToRGrid.png.md5
new file mode 100644
index 0000000..917a83b
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/fieldToRGrid.png.md5
@@ -0,0 +1 @@
+b1d528c93dd50e3aa758f673a8cb6a1a
diff --git a/Filters/Core/Testing/Data/Baseline/fieldToRGrid_1.png.md5 b/Filters/Core/Testing/Data/Baseline/fieldToRGrid_1.png.md5
new file mode 100644
index 0000000..8e84d1b
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/fieldToRGrid_1.png.md5
@@ -0,0 +1 @@
+e7a580e1a541358ed43d93e8bcea44c0
diff --git a/Filters/Core/Testing/Data/Baseline/fieldToRGrid_2.png.md5 b/Filters/Core/Testing/Data/Baseline/fieldToRGrid_2.png.md5
new file mode 100644
index 0000000..ae5d2f4
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/fieldToRGrid_2.png.md5
@@ -0,0 +1 @@
+aaa96cf03b07de44a1293004a5da1bfa
diff --git a/Filters/Core/Testing/Data/Baseline/fieldToRGrid_3.png.md5 b/Filters/Core/Testing/Data/Baseline/fieldToRGrid_3.png.md5
new file mode 100644
index 0000000..723f881
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/fieldToRGrid_3.png.md5
@@ -0,0 +1 @@
+edd4a933cf71a639d0da60f6c8ca11bb
diff --git a/Filters/Core/Testing/Data/Baseline/fieldToRGrid_4.png.md5 b/Filters/Core/Testing/Data/Baseline/fieldToRGrid_4.png.md5
new file mode 100644
index 0000000..8a01934
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/fieldToRGrid_4.png.md5
@@ -0,0 +1 @@
+99272027c4ad2e94d79f5bae22db622b
diff --git a/Filters/Core/Testing/Data/Baseline/fieldToSGrid.png.md5 b/Filters/Core/Testing/Data/Baseline/fieldToSGrid.png.md5
new file mode 100644
index 0000000..7ea1ef9
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/fieldToSGrid.png.md5
@@ -0,0 +1 @@
+0f547737bcdf92c2b69aab5a5eb0541b
diff --git a/Filters/Core/Testing/Data/Baseline/fieldToUGrid.png.md5 b/Filters/Core/Testing/Data/Baseline/fieldToUGrid.png.md5
new file mode 100644
index 0000000..e967feb
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/fieldToUGrid.png.md5
@@ -0,0 +1 @@
+3f3d7018d1d67903e2aa280aab128966
diff --git a/Filters/Core/Testing/Data/Baseline/financialField.png.md5 b/Filters/Core/Testing/Data/Baseline/financialField.png.md5
new file mode 100644
index 0000000..72dcf01
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/financialField.png.md5
@@ -0,0 +1 @@
+c0e4a42a385fcf90a7133f7e522f6606
diff --git a/Filters/Core/Testing/Data/Baseline/financialField2.png.md5 b/Filters/Core/Testing/Data/Baseline/financialField2.png.md5
new file mode 100644
index 0000000..b6d8ffb
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/financialField2.png.md5
@@ -0,0 +1 @@
+6beb0f35f67631f8fd874ffbdab91e1e
diff --git a/Filters/Core/Testing/Data/Baseline/financialField3.png.md5 b/Filters/Core/Testing/Data/Baseline/financialField3.png.md5
new file mode 100644
index 0000000..2880746
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/financialField3.png.md5
@@ -0,0 +1 @@
+20291c271d00f936de94eb7057347bac
diff --git a/Filters/Core/Testing/Data/Baseline/glyphComb.png.md5 b/Filters/Core/Testing/Data/Baseline/glyphComb.png.md5
new file mode 100644
index 0000000..b73b3f3
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/glyphComb.png.md5
@@ -0,0 +1 @@
+58a89c8fc6b9ef17fbcad3f6efa6dd99
diff --git a/Filters/Core/Testing/Data/Baseline/hull.png.md5 b/Filters/Core/Testing/Data/Baseline/hull.png.md5
new file mode 100644
index 0000000..f78b124
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/hull.png.md5
@@ -0,0 +1 @@
+4f988b7fd544a1f5deb6f67625d46d9b
diff --git a/Filters/Core/Testing/Data/Baseline/mergeFilter.png.md5 b/Filters/Core/Testing/Data/Baseline/mergeFilter.png.md5
new file mode 100644
index 0000000..45b3980
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/mergeFilter.png.md5
@@ -0,0 +1 @@
+4b0d949fd521c98cdde921fcf754b0d2
diff --git a/Filters/Core/Testing/Data/Baseline/multipleComponentContour.png.md5 b/Filters/Core/Testing/Data/Baseline/multipleComponentContour.png.md5
new file mode 100644
index 0000000..8f3aef9
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/multipleComponentContour.png.md5
@@ -0,0 +1 @@
+76d5fb83f8f34bc7fca74632d8198a08
diff --git a/Filters/Core/Testing/Data/Baseline/multipleIso.png.md5 b/Filters/Core/Testing/Data/Baseline/multipleIso.png.md5
new file mode 100644
index 0000000..bf3bd39
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/multipleIso.png.md5
@@ -0,0 +1 @@
+b8187242894791acd819c44c57323450
diff --git a/Filters/Core/Testing/Data/Baseline/polyConn.png.md5 b/Filters/Core/Testing/Data/Baseline/polyConn.png.md5
new file mode 100644
index 0000000..3a01fa4
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/polyConn.png.md5
@@ -0,0 +1 @@
+71011b33144369cfaa20c1c9abb66d3f
diff --git a/Filters/Core/Testing/Data/Baseline/probe.png.md5 b/Filters/Core/Testing/Data/Baseline/probe.png.md5
new file mode 100644
index 0000000..6d9599a
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/probe.png.md5
@@ -0,0 +1 @@
+f51d3fa94996b1ddd3b36b750f2a09ea
diff --git a/Filters/Core/Testing/Data/Baseline/probeComb.png.md5 b/Filters/Core/Testing/Data/Baseline/probeComb.png.md5
new file mode 100644
index 0000000..bd6e862
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/probeComb.png.md5
@@ -0,0 +1 @@
+9525d82082838f8b0c89e094406880cf
diff --git a/Filters/Core/Testing/Data/Baseline/reverseNormals.png.md5 b/Filters/Core/Testing/Data/Baseline/reverseNormals.png.md5
new file mode 100644
index 0000000..8d1bf6d
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/reverseNormals.png.md5
@@ -0,0 +1 @@
+e78eb2e08ac14f109ed8eb3eb2799e7d
diff --git a/Filters/Core/Testing/Data/Baseline/reverseNormals_1.png.md5 b/Filters/Core/Testing/Data/Baseline/reverseNormals_1.png.md5
new file mode 100644
index 0000000..e487734
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/reverseNormals_1.png.md5
@@ -0,0 +1 @@
+c8e57679b20ba214495d56a94721c986
diff --git a/Filters/Core/Testing/Data/Baseline/skinOrder.png.md5 b/Filters/Core/Testing/Data/Baseline/skinOrder.png.md5
new file mode 100644
index 0000000..2b93edf
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/skinOrder.png.md5
@@ -0,0 +1 @@
+981478d8c2a96f73507b76df61d0e5a2
diff --git a/Filters/Core/Testing/Data/Baseline/smoothMeshOnMesh.png.md5 b/Filters/Core/Testing/Data/Baseline/smoothMeshOnMesh.png.md5
new file mode 100644
index 0000000..8be8026
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/smoothMeshOnMesh.png.md5
@@ -0,0 +1 @@
+0d85876487ae6df6d967eda1576b02d6
diff --git a/Filters/Core/Testing/Data/Baseline/streamComb.png.md5 b/Filters/Core/Testing/Data/Baseline/streamComb.png.md5
new file mode 100644
index 0000000..6767a11
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/streamComb.png.md5
@@ -0,0 +1 @@
+fcf37de5f76398da12f4c4df08092550
diff --git a/Filters/Core/Testing/Data/Baseline/streamSurface.png.md5 b/Filters/Core/Testing/Data/Baseline/streamSurface.png.md5
new file mode 100644
index 0000000..8ca2420
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/streamSurface.png.md5
@@ -0,0 +1 @@
+b6c78658951b8c93eb4e5a4303768488
diff --git a/Filters/Core/Testing/Data/Baseline/streamSurface2.png.md5 b/Filters/Core/Testing/Data/Baseline/streamSurface2.png.md5
new file mode 100644
index 0000000..bea8b34
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/streamSurface2.png.md5
@@ -0,0 +1 @@
+986b15a387e0d56ee30cd903146ef3b1
diff --git a/Filters/Core/Testing/Data/Baseline/stripF.png.md5 b/Filters/Core/Testing/Data/Baseline/stripF.png.md5
new file mode 100644
index 0000000..6db4b54
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/stripF.png.md5
@@ -0,0 +1 @@
+97aaef0e968b26d7f82324b405007691
diff --git a/Filters/Core/Testing/Data/Baseline/teapotHulls.png.md5 b/Filters/Core/Testing/Data/Baseline/teapotHulls.png.md5
new file mode 100644
index 0000000..18a5a52
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/teapotHulls.png.md5
@@ -0,0 +1 @@
+2faace242687890b164c0ee27358d934
diff --git a/Filters/Core/Testing/Data/Baseline/teapotHulls_1.png.md5 b/Filters/Core/Testing/Data/Baseline/teapotHulls_1.png.md5
new file mode 100644
index 0000000..ee3849d
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/teapotHulls_1.png.md5
@@ -0,0 +1 @@
+ac74ffe3ff3109c5e6dfb1ca4beaf862
diff --git a/Filters/Core/Testing/Data/Baseline/teapotHulls_2.png.md5 b/Filters/Core/Testing/Data/Baseline/teapotHulls_2.png.md5
new file mode 100644
index 0000000..fceac10
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/teapotHulls_2.png.md5
@@ -0,0 +1 @@
+bb4c88eb2acf49015cb4718c8e0be554
diff --git a/Filters/Core/Testing/Data/Baseline/teapotHulls_3.png.md5 b/Filters/Core/Testing/Data/Baseline/teapotHulls_3.png.md5
new file mode 100644
index 0000000..5bb58a1
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/teapotHulls_3.png.md5
@@ -0,0 +1 @@
+f48065c481b7728f8ce4cbcf97ac8b0c
diff --git a/Filters/Core/Testing/Data/Baseline/teapotHulls_4.png.md5 b/Filters/Core/Testing/Data/Baseline/teapotHulls_4.png.md5
new file mode 100644
index 0000000..1fd19c6
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/teapotHulls_4.png.md5
@@ -0,0 +1 @@
+369c6d22035ac331a258fa37816b5d15
diff --git a/Filters/Core/Testing/Data/Baseline/tubeComb.png.md5 b/Filters/Core/Testing/Data/Baseline/tubeComb.png.md5
new file mode 100644
index 0000000..cb5bfa2
--- /dev/null
+++ b/Filters/Core/Testing/Data/Baseline/tubeComb.png.md5
@@ -0,0 +1 @@
+8cef68ce06504ad67befe6a4b69b84b9
diff --git a/Filters/Core/Testing/Python/CMakeLists.txt b/Filters/Core/Testing/Python/CMakeLists.txt
index d3134c4..28fdc61 100644
--- a/Filters/Core/Testing/Python/CMakeLists.txt
+++ b/Filters/Core/Testing/Python/CMakeLists.txt
@@ -1,59 +1,56 @@
-add_test_python(CellDataToPointData.py Graphics)
-add_test_python(Delaunay2D.py Graphics)
-add_test_python(Delaunay2DAlpha.py Graphics)
-add_test_python(Delaunay2DTransform.py Graphics)
-add_test_python(Delaunay3D.py Graphics)
-add_test_python(StreamPolyData.py Graphics)
-add_test_python(TestMarchingSquares.py Graphics)
-add_test_python(TestRectilinearSynchronizedTemplates.py Graphics)
-add_test_python(TestSynchronizedTemplates2D.py Graphics)
-add_test_python(TestTextureGlyph.py Graphics)
-add_test_python(TestTriangleFilter.py Graphics)
-add_test_python(capCow.py Graphics)
-add_test_python(capSphere.py Graphics)
-add_test_python(clipArt.py Graphics)
-add_test_python(constrainedDelaunay.py Graphics)
-add_test_python(contourCells.py Graphics)
-add_test_python(contourQuadraticCells.py Graphics)
-add_test_python(createBFont.py Graphics)
-add_test_python(cutLoop.py Graphics)
-add_test_python(dispPlot.py Graphics)
-add_test_python(fieldToPolyData.py Graphics)
-add_test_python(fieldToUGrid.py Graphics)
-add_test_python(hull.py Graphics)
-add_test_python(multipleComponentContour.py Graphics)
-add_test_python(reverseNormals.py Graphics)
-add_test_python(smoothMeshOnMesh.py Graphics)
-add_test_python(stripF.py Graphics)
-add_test_python(teapotHulls.py Graphics)
-add_test_python(TestContourCases.py)
-add_test_python1(pointsPrecisions.py)
-
-if(VTK_DATA_ROOT)
-  add_test_python1(MassProperties Baseline/Hybrid)
-  add_test_python(TestCompositeCutter.py ${VTK_DATA_ROOT})
-  add_test_python(TestContourGrid.py Graphics)
-  add_test_python(TestGridSynchronizedTemplates3D.py Graphics)
-  add_test_python(TestSynchronizedTemplates3D.py Graphics)
-  add_test_python1(TestTensorGlyph.py Baseline/Graphics)
-  add_test_python(QuadricDecimation.py Graphics)
-  add_test_python(combStreamers.py Graphics)
-  add_test_python(combStreamers2.py Graphics)
-  add_test_python1(deciFranFace.py Baseline/Graphics)
-  add_test_python1(deciPlane Baseline/Graphics)
-  add_test_python(fieldToRGrid.py Graphics)
-  add_test_python(fieldToSGrid.py Graphics)
-  add_test_python1(financialField Baseline/Graphics)
-  add_test_python1(financialField3 Baseline/Hybrid)
-  add_test_python1(financialField2 Baseline/Hybrid)
-  add_test_python(glyphComb.py Graphics)
-  add_test_python(mergeFilter.py Graphics)
-  add_test_python(multipleIso.py Graphics)
-  add_test_python(polyConn.py Graphics)
-  add_test_python(probeComb.py Graphics)
-  add_test_python1(skinOrder Baseline/Graphics)
-  add_test_python(streamComb.py Graphics)
-  add_test_python(streamSurface.py Graphics)
-  add_test_python(streamSurface2.py Graphics)
-  add_test_python(tubeComb.py Graphics)
-endif()
+vtk_add_test_python(CellDataToPointData.py)
+vtk_add_test_python(Delaunay2D.py)
+vtk_add_test_python(Delaunay2DAlpha.py)
+vtk_add_test_python(Delaunay2DTransform.py)
+vtk_add_test_python(Delaunay3D.py)
+vtk_add_test_python(QuadricDecimation.py)
+vtk_add_test_python(StreamPolyData.py)
+vtk_add_test_python(TestGridSynchronizedTemplates3D.py)
+vtk_add_test_python(TestMarchingSquares.py)
+vtk_add_test_python(TestRectilinearSynchronizedTemplates.py)
+vtk_add_test_python(TestSynchronizedTemplates2D.py)
+vtk_add_test_python(TestSynchronizedTemplates3D.py)
+vtk_add_test_python(TestContourGrid.py NO_VALID)
+vtk_add_test_python(TestTensorGlyph.py NO_RT)
+vtk_add_test_python(TestTextureGlyph.py)
+vtk_add_test_python(TestTriangleFilter.py)
+vtk_add_test_python(capCow.py)
+vtk_add_test_python(capSphere.py)
+vtk_add_test_python(clipArt.py)
+vtk_add_test_python(combStreamers.py)
+vtk_add_test_python(combStreamers2.py)
+vtk_add_test_python(constrainedDelaunay.py)
+vtk_add_test_python(contourCells.py)
+vtk_add_test_python(contourQuadraticCells.py)
+vtk_add_test_python(createBFont.py)
+vtk_add_test_python(cutLoop.py)
+vtk_add_test_python(dispPlot.py)
+vtk_add_test_python(fieldToPolyData.py)
+vtk_add_test_python(fieldToRGrid.py)
+vtk_add_test_python(fieldToSGrid.py)
+vtk_add_test_python(fieldToUGrid.py)
+vtk_add_test_python(glyphComb.py)
+vtk_add_test_python(hull.py)
+vtk_add_test_python(mergeFilter.py)
+vtk_add_test_python(multipleComponentContour.py)
+vtk_add_test_python(multipleIso.py)
+vtk_add_test_python(polyConn.py)
+vtk_add_test_python(probeComb.py)
+vtk_add_test_python(reverseNormals.py)
+vtk_add_test_python(smoothMeshOnMesh.py)
+vtk_add_test_python(streamComb.py)
+vtk_add_test_python(streamSurface.py)
+vtk_add_test_python(streamSurface2.py)
+vtk_add_test_python(stripF.py)
+vtk_add_test_python(teapotHulls.py)
+vtk_add_test_python(tubeComb.py)
+vtk_add_test_python(deciFranFace.py NO_RT)
+vtk_add_test_python(deciPlane NO_RT)
+vtk_add_test_python(financialField NO_RT)
+vtk_add_test_python(skinOrder NO_RT)
+vtk_add_test_python(MassProperties NO_RT)
+vtk_add_test_python(financialField2 NO_RT)
+vtk_add_test_python(financialField3 NO_RT)
+vtk_add_test_python(TestContourCases.py NO_DATA NO_VALID)
+vtk_add_test_python(pointsPrecisions.py NO_DATA NO_VALID NO_RT)
+vtk_add_test_python(TestCompositeCutter.py NO_VALID)
diff --git a/Filters/Core/Testing/Python/MassProperties.py b/Filters/Core/Testing/Python/MassProperties.py
index 30bc308..a732561 100755
--- a/Filters/Core/Testing/Python/MassProperties.py
+++ b/Filters/Core/Testing/Python/MassProperties.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython MassProperties.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Hybrid
-
 import StringIO
 import sys
 import vtk
diff --git a/Filters/Core/Testing/Python/TestTensorGlyph.py b/Filters/Core/Testing/Python/TestTensorGlyph.py
index 1a114ba..d8ecdea 100755
--- a/Filters/Core/Testing/Python/TestTensorGlyph.py
+++ b/Filters/Core/Testing/Python/TestTensorGlyph.py
@@ -1,9 +1,5 @@
 #!/usr/bin/env python
 
-# Run this test like so:
-# vtkpython TestTensorGlyph.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Graphics/
-
 import os
 import vtk
 from vtk.test import Testing
diff --git a/Filters/Core/Testing/Python/TestTextureGlyph.py b/Filters/Core/Testing/Python/TestTextureGlyph.py
index 06a1144..ac22b2a 100755
--- a/Filters/Core/Testing/Python/TestTextureGlyph.py
+++ b/Filters/Core/Testing/Python/TestTextureGlyph.py
@@ -1,9 +1,5 @@
 #!/usr/bin/env python
 
-# Run this test like so:
-# $ vtkpython TestTextureGlyph.py  -D $VTK_DATA_ROOT \
-#   -B $VTK_DATA_ROOT/Baseline/Graphics/
-#
 # $ vtkpython TestTextureGlyph.py --help
 # provides more details on other options.
 
diff --git a/Filters/Core/Testing/Python/deciFranFace.py b/Filters/Core/Testing/Python/deciFranFace.py
index d3383df..86c7c7e 100755
--- a/Filters/Core/Testing/Python/deciFranFace.py
+++ b/Filters/Core/Testing/Python/deciFranFace.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython deciFranFace.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Graphics
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Filters/Core/Testing/Python/deciPlane.py b/Filters/Core/Testing/Python/deciPlane.py
index 5a2c710..b8fec53 100755
--- a/Filters/Core/Testing/Python/deciPlane.py
+++ b/Filters/Core/Testing/Python/deciPlane.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython deciPlane.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Graphics
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Filters/Core/Testing/Python/financialField.py b/Filters/Core/Testing/Python/financialField.py
index 82c4fe0..19894ef 100755
--- a/Filters/Core/Testing/Python/financialField.py
+++ b/Filters/Core/Testing/Python/financialField.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython financialField.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Graphics
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Filters/Core/Testing/Python/financialField2.py b/Filters/Core/Testing/Python/financialField2.py
index 07bf7cb..69a0812 100755
--- a/Filters/Core/Testing/Python/financialField2.py
+++ b/Filters/Core/Testing/Python/financialField2.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython financialField2.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Hybrid
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Filters/Core/Testing/Python/financialField3.py b/Filters/Core/Testing/Python/financialField3.py
index 8aa230e..b0a0433 100755
--- a/Filters/Core/Testing/Python/financialField3.py
+++ b/Filters/Core/Testing/Python/financialField3.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython financialField3.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Hybrid
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Filters/Core/Testing/Python/skinOrder.py b/Filters/Core/Testing/Python/skinOrder.py
index 10e605c..2acd55b 100755
--- a/Filters/Core/Testing/Python/skinOrder.py
+++ b/Filters/Core/Testing/Python/skinOrder.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython skinOrder.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Graphics
-
 import sys
 import vtk
 import vtk.test.Testing
diff --git a/Filters/Core/Testing/Tcl/CMakeLists.txt b/Filters/Core/Testing/Tcl/CMakeLists.txt
index 41eba90..e4ed845 100644
--- a/Filters/Core/Testing/Tcl/CMakeLists.txt
+++ b/Filters/Core/Testing/Tcl/CMakeLists.txt
@@ -1,52 +1,47 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(QuadricDecimation Graphics)
-  add_test_tcl(TestGridSynchronizedTemplates3D Graphics)
-  add_test_tcl(TestMarchingSquares Graphics)
-  add_test_tcl(TestRectilinearSynchronizedTemplates Graphics)
-  add_test_tcl(TestSynchronizedTemplates2D Graphics)
-  add_test_tcl(TestSynchronizedTemplates3D Graphics)
-  add_test_tcl(capCow Graphics)
-  add_test_tcl(clipArt Graphics)
-  add_test_tcl(combStreamers Graphics)
-  add_test_tcl(combStreamers2 Graphics)
-  add_test_tcl(createBFont Graphics)
-  add_test_tcl(deciFranFace Graphics)
-  add_test_tcl(dispPlot Graphics)
-  add_test_tcl(fieldToPolyData Graphics)
-  add_test_tcl(fieldToRGrid Graphics)
-  add_test_tcl(fieldToSGrid Graphics)
-  add_test_tcl(fieldToUGrid Graphics)
-  add_test_tcl(financialField Graphics)
-  add_test_tcl(mergeFilter Graphics)
-  add_test_tcl(multipleIso Graphics)
-  add_test_tcl(polyConn Graphics)
-  add_test_tcl(probe Graphics)
-  add_test_tcl(probeComb Graphics)
-  add_test_tcl(reverseNormals Graphics)
-  add_test_tcl(skinOrder Graphics)
-  add_test_tcl(smoothMeshOnMesh Graphics)
-  add_test_tcl(stripF Graphics)
-  add_test_tcl(teapotHulls Graphics)
-endif()
+vtk_add_test_tcl(QuadricDecimation)
+vtk_add_test_tcl(TestGridSynchronizedTemplates3D)
+vtk_add_test_tcl(TestMarchingSquares)
+vtk_add_test_tcl(TestRectilinearSynchronizedTemplates)
+vtk_add_test_tcl(TestSynchronizedTemplates2D)
+vtk_add_test_tcl(TestSynchronizedTemplates3D)
+vtk_add_test_tcl(capCow)
+vtk_add_test_tcl(clipArt)
+vtk_add_test_tcl(combStreamers)
+vtk_add_test_tcl(combStreamers2)
+vtk_add_test_tcl(createBFont)
+vtk_add_test_tcl(deciFranFace)
+vtk_add_test_tcl(dispPlot)
+vtk_add_test_tcl(fieldToPolyData)
+vtk_add_test_tcl(fieldToRGrid)
+vtk_add_test_tcl(fieldToSGrid)
+vtk_add_test_tcl(fieldToUGrid)
+vtk_add_test_tcl(financialField)
+vtk_add_test_tcl(mergeFilter)
+vtk_add_test_tcl(multipleIso)
+vtk_add_test_tcl(polyConn)
+vtk_add_test_tcl(probe)
+vtk_add_test_tcl(probeComb)
+vtk_add_test_tcl(reverseNormals)
+vtk_add_test_tcl(skinOrder)
+vtk_add_test_tcl(smoothMeshOnMesh)
+vtk_add_test_tcl(stripF)
+vtk_add_test_tcl(teapotHulls)
+vtk_add_test_tcl(CellDataToPointData)
+vtk_add_test_tcl(Delaunay2D)
+vtk_add_test_tcl(Delaunay2DAlpha)
+vtk_add_test_tcl(Delaunay2DTransform)
+vtk_add_test_tcl(Delaunay3D)
+vtk_add_test_tcl(StreamPolyData)
+vtk_add_test_tcl(TestTriangleFilter)
+vtk_add_test_tcl(capSphere)
+vtk_add_test_tcl(constrainedDelaunay)
+vtk_add_test_tcl(contourCells)
+vtk_add_test_tcl(contourQuadraticCells)
+vtk_add_test_tcl(cutLoop)
+vtk_add_test_tcl(deciPlane)
+vtk_add_test_tcl(hull)
+vtk_add_test_tcl(multipleComponentContour)
 
-add_test_tcl(CellDataToPointData Graphics)
-add_test_tcl(Delaunay2D Graphics)
-add_test_tcl(Delaunay2DAlpha Graphics)
-add_test_tcl(Delaunay2DTransform Graphics)
-add_test_tcl(Delaunay3D Graphics)
-add_test_tcl(StreamPolyData Graphics)
-add_test_tcl(TestTriangleFilter Graphics)
-add_test_tcl(capSphere Graphics)
-add_test_tcl(constrainedDelaunay Graphics)
-add_test_tcl(contourCells Graphics)
-add_test_tcl(contourQuadraticCells Graphics)
-add_test_tcl(cutLoop Graphics)
-add_test_tcl(deciPlane Graphics)
-add_test_tcl(hull Graphics)
-add_test_tcl(multipleComponentContour Graphics)
-
-if(VTK_DATA_ROOT)
-  add_test_tcl(MassProperties Hybrid)
-  add_test_tcl(financialField2 Hybrid)
-  add_test_tcl(financialField3 Hybrid)
-endif()
+vtk_add_test_tcl(MassProperties)
+vtk_add_test_tcl(financialField2)
+vtk_add_test_tcl(financialField3)
diff --git a/Filters/Core/vtkAppendFilter.cxx b/Filters/Core/vtkAppendFilter.cxx
index e84c0ee..fec0803 100644
--- a/Filters/Core/vtkAppendFilter.cxx
+++ b/Filters/Core/vtkAppendFilter.cxx
@@ -299,25 +299,26 @@ int vtkAppendFilter::RequestData(
       vtkUnstructuredGrid *ug = vtkUnstructuredGrid::SafeDownCast(ds);
       for (cellId=0; cellId < numCells && !abort; cellId++)
         {
+        newPtIds->Reset ();
         if (ug && ds->GetCellType(cellId) == VTK_POLYHEDRON )
           {
           vtkIdType nfaces, *facePtIds;
           ug->GetFaceStream(cellId,nfaces,facePtIds);
           for(vtkIdType id=0; id < nfaces; ++id)
             {
-            vtkIdType nPoints = *facePtIds;
+            vtkIdType nPoints = *facePtIds++;
             newPtIds->InsertNextId(nPoints);
-            for (vtkIdType j=0; j <= nPoints; ++j)
+            for (vtkIdType j=0; j < nPoints; ++j)
               {
-              newPtIds->InsertNextId(*(++facePtIds)+ptOffset);
+              newPtIds->InsertNextId(*(facePtIds++)+ptOffset);
               }
             }
-          output->InsertNextCell(VTK_POLYHEDRON,nfaces,newPtIds->GetPointer(0));
+          newCellId = output->InsertNextCell(VTK_POLYHEDRON,nfaces,newPtIds->GetPointer(0));
+          outputCD->CopyData(cellList,cd,inputCount,cellId,newCellId);
           }
         else
           {
           ds->GetCellPoints(cellId, ptIds);
-          newPtIds->Reset ();
           for (i=0; i < ptIds->GetNumberOfIds(); i++)
             {
             newPtIds->InsertId(i,ptIds->GetId(i)+ptOffset);
diff --git a/Filters/Core/vtkAppendPolyData.cxx b/Filters/Core/vtkAppendPolyData.cxx
index 8fb5bc6..04fdeac 100644
--- a/Filters/Core/vtkAppendPolyData.cxx
+++ b/Filters/Core/vtkAppendPolyData.cxx
@@ -33,6 +33,7 @@ vtkAppendPolyData::vtkAppendPolyData()
 {
   this->ParallelStreaming = 0;
   this->UserManagedInputs = 0;
+  this->OutputPointsPrecision = vtkAlgorithm::DEFAULT_PRECISION;
 }
 
 //----------------------------------------------------------------------------
@@ -273,7 +274,22 @@ int vtkAppendPolyData::ExecuteAppend(vtkPolyData* output,
     }
 
   // Allocate geometry/topology
-  newPts = vtkPoints::New(pointtype);
+  newPts = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION)
+    {
+    newPts->SetDataType(pointtype);
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_FLOAT);
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_DOUBLE);
+    }
+
   newPts->SetNumberOfPoints(numPts);
 
   newVerts = vtkCellArray::New();
@@ -690,6 +706,8 @@ void vtkAppendPolyData::PrintSelf(ostream& os, vtkIndent indent)
 
   os << "ParallelStreaming:" << (this->ParallelStreaming?"On":"Off") << endl;
   os << "UserManagedInputs:" << (this->UserManagedInputs?"On":"Off") << endl;
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << endl;
 }
 
 //----------------------------------------------------------------------------
diff --git a/Filters/Core/vtkAppendPolyData.h b/Filters/Core/vtkAppendPolyData.h
index a7b1bcf..f7e8151 100644
--- a/Filters/Core/vtkAppendPolyData.h
+++ b/Filters/Core/vtkAppendPolyData.h
@@ -92,6 +92,13 @@ public:
   vtkGetMacro(ParallelStreaming, int);
   vtkBooleanMacro(ParallelStreaming, int);
 
+  // Description:
+  // Set/get the desired precision for the output types. See the documentation
+  // for the vtkAlgorithm::DesiredOutputPrecision enum for an explanation of
+  // the available precision settings.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 //BTX
   int ExecuteAppend(vtkPolyData* output,
     vtkPolyData* inputs[], int numInputs);
@@ -102,6 +109,7 @@ protected:
 
   // Flag for selecting parallel streaming behavior
   int ParallelStreaming;
+  int OutputPointsPrecision;
 
   // Usual data generation method
   virtual int RequestData(vtkInformation *,
diff --git a/Filters/Core/vtkArrayCalculator.cxx b/Filters/Core/vtkArrayCalculator.cxx
index 8a00fcd..d0a492e 100644
--- a/Filters/Core/vtkArrayCalculator.cxx
+++ b/Filters/Core/vtkArrayCalculator.cxx
@@ -52,6 +52,8 @@ vtkArrayCalculator::vtkArrayCalculator()
   this->SelectedCoordinateScalarComponents = NULL;
   this->SelectedCoordinateVectorComponents = NULL;
   this->CoordinateResults = 0;
+  this->ResultNormals = false;
+  this->ResultTCoords = false;
   this->ReplaceInvalidValues = 0;
   this->ReplacementValue = 0.0;
 
@@ -200,6 +202,20 @@ void vtkArrayCalculator::SetResultArrayName(const char* name)
   strcpy(this->ResultArrayName, name);
 }
 
+void CopyDataSetOrGraph(vtkDataSet* dsInput, vtkDataSet* dsOutput,
+                        vtkGraph* graphInput, vtkGraph* graphOutput)
+{
+  if (dsInput)
+    {
+    dsOutput->CopyStructure(dsInput);
+    dsOutput->CopyAttributes(dsInput);
+    }
+  else
+    {
+    graphOutput->ShallowCopy(graphInput);
+    }
+}
+
 int vtkArrayCalculator::RequestData(
   vtkInformation *vtkNotUsed(request),
   vtkInformationVector **inputVector,
@@ -212,9 +228,16 @@ int vtkArrayCalculator::RequestData(
   // get the input and output
   vtkDataObject *input = inInfo->Get(vtkDataObject::DATA_OBJECT());
   vtkDataObject *output = outInfo->Get(vtkDataObject::DATA_OBJECT());
-
-  int resultType = 0; // 0 for scalar, 1 for vector
-  int attributeDataType = 0; // 0 for point data, 1 for cell data
+  enum ResultType
+  {
+    SCALAR_RESULT,
+    VECTOR_RESULT
+  } resultType = SCALAR_RESULT;
+  enum DataType
+  {
+    POINT_DATA,
+    CELL_DATA
+  } attributeDataType = POINT_DATA;
   vtkIdType i;
   int j;
 
@@ -242,14 +265,14 @@ int vtkArrayCalculator::RequestData(
       {
       inFD = dsInput->GetPointData();
       outFD = dsOutput->GetPointData();
-      attributeDataType = 0;
+      attributeDataType = POINT_DATA;
       numTuples = dsInput->GetNumberOfPoints();
       }
     else
       {
       inFD = dsInput->GetCellData();
       outFD = dsOutput->GetCellData();
-      attributeDataType = 1;
+      attributeDataType = CELL_DATA;
       numTuples = dsInput->GetNumberOfCells();
       }
     }
@@ -260,14 +283,14 @@ int vtkArrayCalculator::RequestData(
       {
       inFD = graphInput->GetVertexData();
       outFD = graphOutput->GetVertexData();
-      attributeDataType = 0;
+      attributeDataType = POINT_DATA;
       numTuples = graphInput->GetNumberOfVertices();
       }
     else
       {
       inFD = graphInput->GetEdgeData();
       outFD = graphOutput->GetEdgeData();
-      attributeDataType = 1;
+      attributeDataType = CELL_DATA;
       numTuples = graphInput->GetNumberOfEdges();
       }
     }
@@ -299,7 +322,7 @@ int vtkArrayCalculator::RequestData(
         return 1;
         }
       }
-    else
+    else if(inFD->GetAbstractArray(this->ScalarArrayNames[i]) == NULL) // We ignore string array
       {
       vtkErrorMacro("Invalid array name: " << this->ScalarArrayNames[i]);
       return 1;
@@ -339,8 +362,7 @@ int vtkArrayCalculator::RequestData(
       }
     }
 
-  // we can add points
-  if(attributeDataType == 0)
+  if(attributeDataType == POINT_DATA)
     {
     for (i = 0; i < this->NumberOfCoordinateScalarArrays; i++)
       {
@@ -381,28 +403,32 @@ int vtkArrayCalculator::RequestData(
 
   if ( !this->Function || strlen(this->Function) == 0)
     {
-    dsOutput->CopyStructure(dsInput);
-    dsOutput->CopyAttributes(dsInput);
+    CopyDataSetOrGraph(dsInput, dsOutput, graphInput, graphOutput);
     return 1;
     }
   else if (this->FunctionParser->IsScalarResult())
     {
-    resultType = 0;
+    resultType = SCALAR_RESULT;
     }
   else if (this->FunctionParser->IsVectorResult())
     {
-    resultType = 1;
+    resultType = VECTOR_RESULT;
     }
   else
     {
-    dsOutput->CopyStructure(dsInput);
-    dsOutput->CopyAttributes(dsInput);
+    CopyDataSetOrGraph(dsInput, dsOutput, graphInput, graphOutput);
     // Error occurred in vtkFunctionParser.
     vtkWarningMacro("An error occurred when parsing the calculator's function.  See previous errors.");
     return 1;
     }
 
-  if(resultType == 1 && CoordinateResults != 0 && (psOutput || graphOutput))
+  if(resultType == SCALAR_RESULT && this->ResultNormals)
+    {
+    vtkWarningMacro("ResultNormals specified but output is scalar");
+    }
+
+  if(resultType == VECTOR_RESULT &&
+     CoordinateResults != 0 && (psOutput || graphOutput))
     {
     resultPoints = vtkPoints::New();
     resultPoints->SetNumberOfPoints(numTuples);
@@ -410,7 +436,7 @@ int vtkArrayCalculator::RequestData(
     }
   else if(CoordinateResults != 0)
     {
-    if(resultType != 1)
+    if(resultType != VECTOR_RESULT)
       {
       vtkErrorMacro("Coordinate output specified, "
                     "but there are no vector results");
@@ -428,7 +454,7 @@ int vtkArrayCalculator::RequestData(
         vtkDataArray::SafeDownCast(vtkAbstractArray::CreateArray(this->ResultArrayType));
     }
 
-  if (resultType == 0)
+  if (resultType == SCALAR_RESULT)
     {
     resultArray->SetNumberOfComponents(1);
     resultArray->SetNumberOfTuples(numTuples);
@@ -448,9 +474,12 @@ int vtkArrayCalculator::RequestData(
     for (j = 0; j < this->NumberOfScalarArrays; j++)
       {
       currentArray = inFD->GetArray(this->ScalarArrayNames[j]);
-      this->FunctionParser->
-        SetScalarVariableValue(
-          j, currentArray->GetComponent(i, this->SelectedScalarComponents[j]));
+      if(currentArray)
+        {
+        this->FunctionParser->
+          SetScalarVariableValue(
+            j, currentArray->GetComponent(i, this->SelectedScalarComponents[j]));
+        }
       }
     for (j = 0; j < this->NumberOfVectorArrays; j++)
       {
@@ -462,7 +491,7 @@ int vtkArrayCalculator::RequestData(
             i, this->SelectedVectorComponents[j][1]),
           currentArray->GetComponent(i, this->SelectedVectorComponents[j][2]));
       }
-    if(attributeDataType == 0)
+    if(attributeDataType == POINT_DATA)
       {
       double* pt = 0;
       if (dsInput)
@@ -489,7 +518,7 @@ int vtkArrayCalculator::RequestData(
             pt[this->SelectedCoordinateVectorComponents[j][2]]);
         }
       }
-    if (resultType == 0)
+    if (resultType == SCALAR_RESULT)
       {
       scalarResult[0] = this->FunctionParser->GetScalarResult();
       resultArray->SetTuple(i, scalarResult);
@@ -500,15 +529,12 @@ int vtkArrayCalculator::RequestData(
       }
     }
 
+  CopyDataSetOrGraph (dsInput, dsOutput, graphInput, graphOutput);
   if(resultPoints)
     {
     if(psInput)
       {
-      if(attributeDataType == 0)
-        {
-        psOutput->CopyStructure(psInput);
-        }
-      else
+      if(attributeDataType == CELL_DATA)
         {
         vtkPolyData* pd = vtkPolyData::SafeDownCast(psOutput);
         vtkUnstructuredGrid* ug = vtkUnstructuredGrid::SafeDownCast(psOutput);
@@ -532,34 +558,48 @@ int vtkArrayCalculator::RequestData(
           }
         }
       psOutput->SetPoints(resultPoints);
-      psOutput->CopyAttributes(dsInput);
-      }
-    else
-      {
-      graphOutput->CopyStructure(graphInput);
-      outFD->PassData(inFD);
       }
     resultPoints->Delete();
     }
-  else
-    {
-    dsOutput->CopyStructure(dsInput);
-    dsOutput->CopyAttributes(dsInput);
 
+  if(this->ResultTCoords || this->ResultNormals || ! this->CoordinateResults)
+    {
     resultArray->SetName(this->ResultArrayName);
     outFD->AddArray(resultArray);
-    if (resultType == 0)
+    if(resultType == SCALAR_RESULT)
       {
-      outFD->SetActiveScalars(this->ResultArrayName);
+      if (this->ResultTCoords)
+        {
+        outFD->SetActiveTCoords(this->ResultArrayName);
+        }
+      else
+        {
+        outFD->SetActiveScalars(this->ResultArrayName);
+        }
       }
     else
       {
-      outFD->SetActiveVectors(this->ResultArrayName);
+      if (this->ResultTCoords || this ->ResultNormals)
+        {
+        if (this->ResultTCoords)
+          {
+          outFD->SetActiveTCoords(this->ResultArrayName);
+          }
+        if (this->ResultNormals)
+          {
+          outFD->SetActiveNormals(this->ResultArrayName);
+          }
+        }
+      else
+        {
+        outFD->SetActiveVectors(this->ResultArrayName);
+        }
+      }
+    if (! resultPoints)
+      {
+      resultArray->Delete();
       }
-
-    resultArray->Delete();
     }
-
   return 1;
 }
 
diff --git a/Filters/Core/vtkArrayCalculator.h b/Filters/Core/vtkArrayCalculator.h
index efec7bd..53a4eea 100644
--- a/Filters/Core/vtkArrayCalculator.h
+++ b/Filters/Core/vtkArrayCalculator.h
@@ -133,6 +133,22 @@ public:
   vtkBooleanMacro(CoordinateResults, int);
 
   // Description:
+  // Set whether to output results as point/cell normals. Outputing as
+  // normals is only valid with vector results. Point or cell normals are
+  // selected using AttributeMode.
+  vtkGetMacro(ResultNormals, bool);
+  vtkSetMacro(ResultNormals, bool);
+  vtkBooleanMacro(ResultNormals, bool);
+
+  // Description:
+  // Set whether to output results as point/cell texture coordinates.
+  // Point or cell texture coordinates are selected using AttributeMode.
+  // 2-component texture coordinates cannot be generated at this time.
+  vtkGetMacro(ResultTCoords, bool);
+  vtkSetMacro(ResultTCoords, bool);
+  vtkBooleanMacro(ResultTCoords, bool);
+
+  // Description:
   // Control whether the filter operates on point data or cell data.
   // By default (AttributeModeToDefault), the filter uses point
   // data. Alternatively you can explicitly set the filter to use point data
@@ -224,6 +240,8 @@ protected:
   double  ReplacementValue;
 
   int     CoordinateResults;
+  bool    ResultNormals;
+  bool    ResultTCoords;
   char ** CoordinateScalarVariableNames;
   char ** CoordinateVectorVariableNames;
   int   * SelectedCoordinateScalarComponents;
diff --git a/Filters/Core/vtkAttributeDataToFieldDataFilter.h b/Filters/Core/vtkAttributeDataToFieldDataFilter.h
index 3d8edff..5fe95d1 100644
--- a/Filters/Core/vtkAttributeDataToFieldDataFilter.h
+++ b/Filters/Core/vtkAttributeDataToFieldDataFilter.h
@@ -60,7 +60,7 @@ public:
 
 protected:
   vtkAttributeDataToFieldDataFilter();
-  ~vtkAttributeDataToFieldDataFilter() {};
+  ~vtkAttributeDataToFieldDataFilter() {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *); //generate output data
 
diff --git a/Filters/Core/vtkCellDataToPointData.cxx b/Filters/Core/vtkCellDataToPointData.cxx
index 63a985a..2103db7 100644
--- a/Filters/Core/vtkCellDataToPointData.cxx
+++ b/Filters/Core/vtkCellDataToPointData.cxx
@@ -20,11 +20,12 @@
 #include "vtkIdList.h"
 #include "vtkInformation.h"
 #include "vtkInformationVector.h"
-#include "vtkUnstructuredGrid.h"
-#include "vtkSmartPointer.h"
-#include "vtkUnsignedIntArray.h"
+#include "vtkNew.h"
 #include "vtkObjectFactory.h"
 #include "vtkPointData.h"
+#include "vtkSmartPointer.h"
+#include "vtkUnsignedIntArray.h"
+#include "vtkUnstructuredGrid.h"
 
 #include <algorithm>
 #include <functional>
@@ -164,9 +165,10 @@ namespace
 
     // accumulate
     T const* srcbeg = srcptr;
+    vtkNew<vtkIdList> pids;
     for (vtkIdType cid = 0; cid < ncells; ++cid, srcbeg += ncomps)
       {
-      vtkIdList* const pids = src->GetCell(cid)->GetPointIds();
+      src->GetCellPoints(cid, pids.GetPointer());
       for (vtkIdType i = 0, I = pids->GetNumberOfIds(); i < I; ++i)
         {
         T* const dstbeg = dstptr + pids->GetId(i)*ncomps;
@@ -216,9 +218,10 @@ int vtkCellDataToPointData::RequestDataForUnstructuredGrid
   num->SetNumberOfComponents(1);
   num->SetNumberOfTuples(npoints);
   std::fill_n(num->GetPointer(0), npoints, 0u);
+  vtkNew<vtkIdList> pids;
   for (vtkIdType cid = 0; cid < ncells; ++cid)
     {
-    vtkIdList* const pids = src->GetCell(cid)->GetPointIds();
+    src->GetCellPoints(cid, pids.GetPointer());
     for (vtkIdType i = 0, I = pids->GetNumberOfIds(); i < I; ++i)
       {
       vtkIdType const pid = pids->GetId(i);
diff --git a/Filters/Core/vtkCellDataToPointData.h b/Filters/Core/vtkCellDataToPointData.h
index 2299bf5..b090d2b 100644
--- a/Filters/Core/vtkCellDataToPointData.h
+++ b/Filters/Core/vtkCellDataToPointData.h
@@ -55,7 +55,7 @@ public:
 
 protected:
   vtkCellDataToPointData();
-  ~vtkCellDataToPointData() {};
+  ~vtkCellDataToPointData() {}
 
   virtual int RequestData(vtkInformation* request,
                           vtkInformationVector** inputVector,
diff --git a/Filters/Core/vtkCleanPolyData.cxx b/Filters/Core/vtkCleanPolyData.cxx
index 262cefd..5c99efc 100644
--- a/Filters/Core/vtkCleanPolyData.cxx
+++ b/Filters/Core/vtkCleanPolyData.cxx
@@ -45,6 +45,7 @@ vtkCleanPolyData::vtkCleanPolyData()
   this->ConvertStripsToPolys = 1;
   this->Locator = NULL;
   this->PieceInvariant = 1;
+  this->OutputPointsPrecision = vtkAlgorithm::DEFAULT_PRECISION;
 }
 
 //--------------------------------------------------------------------------
@@ -164,8 +165,22 @@ int vtkCleanPolyData::RequestData(
 
   vtkIdType numNewPts;
   vtkIdType numUsedPts=0;
-  vtkPoints *newPts = input->GetPoints()->NewInstance();
-  newPts->SetDataType(input->GetPoints()->GetDataType());
+  vtkPoints *newPts = inPts->NewInstance();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION)
+    {
+    newPts->SetDataType(inPts->GetDataType());
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_FLOAT);
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_DOUBLE);
+    }
+
   newPts->Allocate(numPts);
 
   // we'll be needing these
@@ -681,6 +696,8 @@ void vtkCleanPolyData::PrintSelf(ostream& os, vtkIndent indent)
     }
   os << indent << "PieceInvariant: "
      << (this->PieceInvariant ? "On\n" : "Off\n");
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << "\n";
 }
 
 //--------------------------------------------------------------------------
diff --git a/Filters/Core/vtkCleanPolyData.h b/Filters/Core/vtkCleanPolyData.h
index 08e34f1..ad629aa 100644
--- a/Filters/Core/vtkCleanPolyData.h
+++ b/Filters/Core/vtkCleanPolyData.h
@@ -154,6 +154,13 @@ public:
   vtkGetMacro(PieceInvariant, int);
   vtkBooleanMacro(PieceInvariant, int);
 
+  // Description:
+  // Set/get the desired precision for the output types. See the documentation
+  // for the vtkAlgorithm::DesiredOutputPrecision enum for an explanation of
+  // the available precision settings.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkCleanPolyData();
  ~vtkCleanPolyData();
@@ -173,6 +180,7 @@ protected:
   vtkIncrementalPointLocator *Locator;
 
   int PieceInvariant;
+  int OutputPointsPrecision;
 private:
   vtkCleanPolyData(const vtkCleanPolyData&);  // Not implemented.
   void operator=(const vtkCleanPolyData&);  // Not implemented.
diff --git a/Filters/Core/vtkClipPolyData.cxx b/Filters/Core/vtkClipPolyData.cxx
index 6f59283..2fb2af4 100644
--- a/Filters/Core/vtkClipPolyData.cxx
+++ b/Filters/Core/vtkClipPolyData.cxx
@@ -45,10 +45,11 @@ vtkClipPolyData::vtkClipPolyData(vtkImplicitFunction *cf)
   this->Locator = NULL;
   this->Value = 0.0;
   this->GenerateClipScalars = 0;
+  this->GenerateClippedOutput = 0;
+  this->OutputPointsPrecision = DEFAULT_PRECISION;
 
   this->SetNumberOfOutputPorts(2);
 
-  this->GenerateClippedOutput = 0;
   vtkPolyData *output2 = vtkPolyData::New();
   this->GetExecutive()->SetOutputData(1, output2);
   output2->Delete();
@@ -187,6 +188,21 @@ int vtkClipPolyData::RequestData(
     }
 
   newPoints = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION)
+    {
+    newPoints->SetDataType(input->GetPoints()->GetDataType());
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
+    {
+    newPoints->SetDataType(VTK_FLOAT);
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPoints->SetDataType(VTK_DOUBLE);
+    }
+
   newPoints->Allocate(numPts,numPts/2);
   newVerts = vtkCellArray::New();
   newVerts->Allocate(estimatedSize,estimatedSize/2);
@@ -424,4 +440,6 @@ void vtkClipPolyData::PrintSelf(ostream& os, vtkIndent indent)
   os << indent << "Generate Clip Scalars: " << (this->GenerateClipScalars ? "On\n" : "Off\n");
 
   os << indent << "Generate Clipped Output: " << (this->GenerateClippedOutput ? "On\n" : "Off\n");
+
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision << "\n";
 }
diff --git a/Filters/Core/vtkClipPolyData.h b/Filters/Core/vtkClipPolyData.h
index 77eaaeb..86d9bd2 100644
--- a/Filters/Core/vtkClipPolyData.h
+++ b/Filters/Core/vtkClipPolyData.h
@@ -140,6 +140,13 @@ public:
   // Return the mtime also considering the locator and clip function.
   unsigned long GetMTime();
 
+  // Description:
+  // Set/get the desired precision for the output types. See the documentation
+  // for the vtkAlgorithm::DesiredOutputPrecision enum for an explanation of
+  // the available precision settings.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkClipPolyData(vtkImplicitFunction *cf=NULL);
   ~vtkClipPolyData();
@@ -151,8 +158,9 @@ protected:
   int InsideOut;
   double Value;
   int GenerateClipScalars;
-
   int GenerateClippedOutput;
+  int OutputPointsPrecision;
+
 private:
   vtkClipPolyData(const vtkClipPolyData&);  // Not implemented.
   void operator=(const vtkClipPolyData&);  // Not implemented.
diff --git a/Filters/Core/vtkCompositeCutter.cxx b/Filters/Core/vtkCompositeCutter.cxx
index 3bc258f..643208d 100644
--- a/Filters/Core/vtkCompositeCutter.cxx
+++ b/Filters/Core/vtkCompositeCutter.cxx
@@ -35,7 +35,7 @@
 #include "vtkObjectFactory.h"
 #include "vtkCompositeDataIterator.h"
 #include "vtkSmartPointer.h"
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkCompositeCutter);
 
diff --git a/Filters/Core/vtkConnectivityFilter.cxx b/Filters/Core/vtkConnectivityFilter.cxx
index 3ea6408..5b94951 100644
--- a/Filters/Core/vtkConnectivityFilter.cxx
+++ b/Filters/Core/vtkConnectivityFilter.cxx
@@ -55,6 +55,8 @@ vtkConnectivityFilter::vtkConnectivityFilter()
 
   this->NewScalars = 0;
   this->NewCellScalars = 0;
+
+  this->OutputPointsPrecision = vtkAlgorithm::DEFAULT_PRECISION;
 }
 
 vtkConnectivityFilter::~vtkConnectivityFilter()
@@ -139,6 +141,29 @@ int vtkConnectivityFilter::RequestData(
   this->NewCellScalars->SetNumberOfTuples(numCells);
 
   newPts = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION)
+    {
+    vtkPointSet *inputPointSet = vtkPointSet::SafeDownCast(input);
+    if(inputPointSet)
+      {
+      newPts->SetDataType(inputPointSet->GetPoints()->GetDataType());
+      }
+    else
+      {
+      newPts->SetDataType(VTK_FLOAT);
+      }
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_FLOAT);
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_DOUBLE);
+    }
+
   newPts->Allocate(numPts);
 
   // Traverse all cells marking those visited.  Each new search
@@ -570,5 +595,7 @@ void vtkConnectivityFilter::PrintSelf(ostream& os, vtkIndent indent)
 
   double *range = this->GetScalarRange();
   os << indent << "Scalar Range: (" << range[0] << ", " << range[1] << ")\n";
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << "\n";
 }
 
diff --git a/Filters/Core/vtkConnectivityFilter.h b/Filters/Core/vtkConnectivityFilter.h
index 85123cf..25bb13c 100644
--- a/Filters/Core/vtkConnectivityFilter.h
+++ b/Filters/Core/vtkConnectivityFilter.h
@@ -145,6 +145,13 @@ public:
   vtkGetMacro(ColorRegions,int);
   vtkBooleanMacro(ColorRegions,int);
 
+  // Description:
+  // Set/get the desired precision for the output types. See the documentation
+  // for the vtkAlgorithm::DesiredOutputPrecision enum for an explanation of
+  // the available precision settings.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkConnectivityFilter();
   ~vtkConnectivityFilter();
@@ -155,6 +162,7 @@ protected:
 
   int ColorRegions; //boolean turns on/off scalar gen for separate regions
   int ExtractionMode; //how to extract regions
+  int OutputPointsPrecision;
   vtkIdList *Seeds; //id's of points or cells used to seed regions
   vtkIdList *SpecifiedRegionIds; //regions specified for extraction
   vtkIdTypeArray *RegionSizes; //size (in cells) of each region extracted
diff --git a/Filters/Core/vtkContourFilter.cxx b/Filters/Core/vtkContourFilter.cxx
index 63771d3..38299ad 100644
--- a/Filters/Core/vtkContourFilter.cxx
+++ b/Filters/Core/vtkContourFilter.cxx
@@ -14,6 +14,7 @@
 =========================================================================*/
 #include "vtkContourFilter.h"
 
+#include "vtkCallbackCommand.h"
 #include "vtkCell.h"
 #include "vtkCellArray.h"
 #include "vtkCellData.h"
@@ -27,9 +28,11 @@
 #include "vtkInformation.h"
 #include "vtkInformationVector.h"
 #include "vtkMergePoints.h"
+#include "vtkNew.h"
 #include "vtkObjectFactory.h"
 #include "vtkPointData.h"
 #include "vtkPolyData.h"
+#include "vtkPolyDataNormals.h"
 #include "vtkRectilinearGrid.h"
 #include "vtkRectilinearSynchronizedTemplates.h"
 #include "vtkSimpleScalarTree.h"
@@ -39,7 +42,6 @@
 #include "vtkSynchronizedTemplates3D.h"
 #include "vtkTimerLog.h"
 #include "vtkUniformGrid.h"
-#include "vtkUnstructuredGrid.h"
 #include "vtkIncrementalPointLocator.h"
 #include "vtkContourHelper.h"
 
@@ -54,7 +56,10 @@ vtkContourFilter::vtkContourFilter()
 {
   this->ContourValues = vtkContourValues::New();
 
-  this->ComputeNormals = 1;
+  // -1 == uninitialized. This is so we know if ComputeNormals has been set
+  // by the user, so that we can preserve old (broken) behavior that ignored
+  // this setting for certain dataset types.
+  this->ComputeNormals = -1;
   this->ComputeGradients = 0;
   this->ComputeScalars = 1;
 
@@ -72,6 +77,20 @@ vtkContourFilter::vtkContourFilter()
   this->GridSynchronizedTemplates = vtkGridSynchronizedTemplates3D::New();
   this->RectilinearSynchronizedTemplates = vtkRectilinearSynchronizedTemplates::New();
 
+  this->InternalProgressCallbackCommand = vtkCallbackCommand::New();
+  this->InternalProgressCallbackCommand->SetCallback(
+    &vtkContourFilter::InternalProgressCallbackFunction);
+  this->InternalProgressCallbackCommand->SetClientData(this);
+
+  this->SynchronizedTemplates2D->AddObserver(vtkCommand::ProgressEvent,
+                                             this->InternalProgressCallbackCommand);
+  this->SynchronizedTemplates3D->AddObserver(vtkCommand::ProgressEvent,
+                                             this->InternalProgressCallbackCommand);
+  this->GridSynchronizedTemplates->AddObserver(vtkCommand::ProgressEvent,
+                                               this->InternalProgressCallbackCommand);
+  this->RectilinearSynchronizedTemplates->AddObserver(vtkCommand::ProgressEvent,
+                                                      this->InternalProgressCallbackCommand);
+
   // by default process active point scalars
   this->SetInputArrayToProcess(0,0,0,vtkDataObject::FIELD_ASSOCIATION_POINTS,
                                vtkDataSetAttributes::SCALARS);
@@ -97,6 +116,7 @@ vtkContourFilter::~vtkContourFilter()
   this->SynchronizedTemplates3D->Delete();
   this->GridSynchronizedTemplates->Delete();
   this->RectilinearSynchronizedTemplates->Delete();
+  this->InternalProgressCallbackCommand->Delete();
 }
 
 // Overload standard modified time function. If contour values are modified,
@@ -440,6 +460,10 @@ int vtkContourFilter::RequestData(
         {
         newPts->SetDataType(inputPointSet->GetPoints()->GetDataType());
         }
+      else
+        {
+        newPts->SetDataType(VTK_FLOAT);
+        }
       }
     else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
       {
@@ -466,7 +490,8 @@ int vtkContourFilter::RequestData(
       this->CreateDefaultLocator();
       }
     this->Locator->InitPointInsertion (newPts,
-                                       input->GetBounds(),estimatedSize);
+                                       input->GetBounds(),
+                                       input->GetNumberOfPoints());
 
     // interpolate data along edge
     // if we did not ask for scalars to be computed, don't copy them
@@ -601,6 +626,27 @@ int vtkContourFilter::RequestData(
       }
     newPolys->Delete();
 
+    // -1 == uninitialized. This setting used to be ignored, and we preserve the
+    // old behavior for backward compatibility. Normals will be computed here
+    // if and only if the user has explicitly set the option.
+    if (this->ComputeNormals != 0 && this->ComputeNormals != -1)
+      {
+      vtkNew<vtkPolyDataNormals> normalsFilter;
+      normalsFilter->SetOutputPointsPrecision(this->OutputPointsPrecision);
+      vtkNew<vtkPolyData> tempInput;
+      tempInput->ShallowCopy(output);
+      normalsFilter->SetInputData(tempInput.GetPointer());
+      normalsFilter->SetFeatureAngle(180.);
+      normalsFilter->SetUpdateExtent(
+        0,
+        info->Get(vtkStreamingDemandDrivenPipeline::UPDATE_PIECE_NUMBER()),
+        info->Get(vtkStreamingDemandDrivenPipeline::UPDATE_NUMBER_OF_PIECES()),
+        info->Get(vtkStreamingDemandDrivenPipeline::
+                  UPDATE_NUMBER_OF_GHOST_LEVELS()));
+      normalsFilter->Update();
+      output->ShallowCopy(normalsFilter->GetOutput());
+      }
+
     this->Locator->Initialize();//releases leftover memory
     output->Squeeze();
     } //else if not vtkUnstructuredGrid
@@ -804,3 +850,14 @@ void vtkContourFilter::ReportReferences(vtkGarbageCollector* collector)
   // reference loop.
   vtkGarbageCollectorReport(collector, this->ScalarTree, "ScalarTree");
 }
+
+//----------------------------------------------------------------------------
+void vtkContourFilter::InternalProgressCallbackFunction(vtkObject *vtkNotUsed(caller),
+                                                        unsigned long vtkNotUsed(eid),
+                                                        void *clientData,
+                                                        void *callData)
+{
+  vtkContourFilter *contourFilter = static_cast<vtkContourFilter *>(clientData);
+  double progress = *static_cast<double *>(callData);
+  contourFilter->UpdateProgress(progress);
+}
diff --git a/Filters/Core/vtkContourFilter.h b/Filters/Core/vtkContourFilter.h
index ec42291..04629f9 100644
--- a/Filters/Core/vtkContourFilter.h
+++ b/Filters/Core/vtkContourFilter.h
@@ -55,6 +55,7 @@ class vtkSynchronizedTemplates2D;
 class vtkSynchronizedTemplates3D;
 class vtkGridSynchronizedTemplates3D;
 class vtkRectilinearSynchronizedTemplates;
+class vtkCallbackCommand;
 
 class VTKFILTERSCORE_EXPORT vtkContourFilter : public vtkPolyDataAlgorithm
 {
@@ -87,6 +88,10 @@ public:
   // expensive in both time and storage. If the output data will be
   // processed by filters that modify topology or geometry, it may be
   // wise to turn Normals and Gradients off.
+  // This setting defaults to On for vtkImageData, vtkRectilinearGrid,
+  // vtkStructuredGrid, and vtkUnstructuredGrid inputs, and Off for all others.
+  // This default behavior is to preserve the behavior of an older version of
+  // this filter, which would ignore this setting for certain inputs.
   vtkSetMacro(ComputeNormals,int);
   vtkGetMacro(ComputeNormals,int);
   vtkBooleanMacro(ComputeNormals,int);
@@ -188,6 +193,12 @@ protected:
   vtkSynchronizedTemplates3D *SynchronizedTemplates3D;
   vtkGridSynchronizedTemplates3D *GridSynchronizedTemplates;
   vtkRectilinearSynchronizedTemplates *RectilinearSynchronizedTemplates;
+  vtkCallbackCommand *InternalProgressCallbackCommand;
+
+  static void InternalProgressCallbackFunction(vtkObject *caller,
+                                               unsigned long eid,
+                                               void *clientData,
+                                               void *callData);
 
 private:
   vtkContourFilter(const vtkContourFilter&);  // Not implemented.
diff --git a/Filters/Core/vtkContourGrid.cxx b/Filters/Core/vtkContourGrid.cxx
index 3115c1b..06ea8cf 100644
--- a/Filters/Core/vtkContourGrid.cxx
+++ b/Filters/Core/vtkContourGrid.cxx
@@ -17,8 +17,10 @@
 #include "vtkCell.h"
 #include "vtkCellArray.h"
 #include "vtkCellData.h"
+#include "vtkCellIterator.h"
 #include "vtkContourValues.h"
 #include "vtkFloatArray.h"
+#include "vtkGenericCell.h"
 #include "vtkInformation.h"
 #include "vtkInformationVector.h"
 #include "vtkNew.h"
@@ -27,8 +29,9 @@
 #include "vtkPolyData.h"
 #include "vtkPolyDataNormals.h"
 #include "vtkSimpleScalarTree.h"
+#include "vtkSmartPointer.h"
 #include "vtkStreamingDemandDrivenPipeline.h"
-#include "vtkUnstructuredGrid.h"
+#include "vtkUnstructuredGridBase.h"
 #include "vtkCutter.h"
 #include "vtkMergePoints.h"
 #include "vtkPointLocator.h"
@@ -96,32 +99,33 @@ unsigned long vtkContourGrid::GetMTime()
   return mTime;
 }
 
-template <class T>
+template <class Scalar>
 void vtkContourGridExecute(vtkContourGrid *self, vtkDataSet *input,
                            vtkPolyData *output,
-                           vtkDataArray *inScalars, T *scalarArrayPtr,
+                           vtkDataArray *inScalars,
                            int numContours, double *values,
                            int computeScalars,
-                           int useScalarTree,vtkScalarTree *&scalarTree,bool generateTriangles)
+                           int useScalarTree, vtkScalarTree *&scalarTree,
+                           bool generateTriangles)
 {
-  vtkIdType cellId, i;
+  vtkIdType i;
   int abortExecute=0;
   vtkIncrementalPointLocator *locator = self->GetLocator();
-  vtkIdList *cellPts;
-  vtkCell *cell;
-  double range[2];
+  vtkNew<vtkGenericCell> cell;
+  Scalar range[2];
   vtkCellArray *newVerts, *newLines, *newPolys;
   vtkPoints *newPts;
   vtkIdType numCells, estimatedSize;
   vtkPointData *inPd=input->GetPointData(), *outPd=output->GetPointData();
   vtkCellData *inCd=input->GetCellData(), *outCd=output->GetCellData();
   vtkDataArray *cellScalars;
-  vtkUnstructuredGrid *grid = static_cast<vtkUnstructuredGrid *>(input);
+  Scalar *cellScalarPtr;
+  vtkIdType numCellScalars;
   //In this case, we know that the input is an unstructured grid.
-  vtkIdType numPoints, cellArrayIt = 0;
+  vtkUnstructuredGridBase *grid = static_cast<vtkUnstructuredGridBase *>(input);
   int needCell = 0;
-  vtkIdType *cellArrayPtr;
-  T tempScalar;
+  vtkSmartPointer<vtkCellIterator> cellIter =
+      vtkSmartPointer<vtkCellIterator>::Take(input->NewCellIterator());
 
   numCells = input->GetNumberOfCells();
 
@@ -162,10 +166,11 @@ void vtkContourGridExecute(vtkContourGrid *self, vtkDataSet *input,
   newPolys->Allocate(estimatedSize,estimatedSize);
   cellScalars = inScalars->NewInstance();
   cellScalars->SetNumberOfComponents(inScalars->GetNumberOfComponents());
-   cellScalars->Allocate(VTK_CELL_SIZE*inScalars->GetNumberOfComponents());
+  cellScalars->Allocate(VTK_CELL_SIZE*inScalars->GetNumberOfComponents());
 
    // locator used to merge potentially duplicate points
-  locator->InitPointInsertion (newPts, input->GetBounds(),estimatedSize);
+  locator->InitPointInsertion (newPts, input->GetBounds(),
+                               input->GetNumberOfPoints());
 
   // interpolate data along edge
   // if we did not ask for scalars to be computed, don't copy them
@@ -176,7 +181,8 @@ void vtkContourGridExecute(vtkContourGrid *self, vtkDataSet *input,
   outPd->InterpolateAllocate(inPd,estimatedSize,estimatedSize);
   outCd->CopyAllocate(inCd,estimatedSize,estimatedSize);
 
-  vtkContourHelper helper(locator, newVerts, newLines, newPolys,inPd, inCd, outPd,outCd, estimatedSize, generateTriangles);
+  vtkContourHelper helper(locator, newVerts, newLines, newPolys, inPd, inCd,
+                          outPd, outCd, estimatedSize, generateTriangles);
   // If enabled, build a scalar tree to accelerate search
   //
   if ( !useScalarTree )
@@ -204,48 +210,51 @@ void vtkContourGridExecute(vtkContourGrid *self, vtkDataSet *input,
       // Loop over all cells; get scalar values for all cell points
       // and process each cell.
       //
-      cellArrayIt = 0;
-      cellArrayPtr = grid->GetCells()->GetPointer();
-      for (cellId=0; cellId < numCells && !abortExecute; cellId++)
+      for (cellIter->InitTraversal(); !cellIter->IsDoneWithTraversal();
+           cellIter->GoToNextCell())
         {
-        numPoints = cellArrayPtr[cellArrayIt];
-        // I assume that "GetCellType" is fast.
-        cellType = input->GetCellType(cellId);
+        if (abortExecute)
+          {
+          break;
+          }
+
+        cellType = cellIter->GetCellType();
         if (cellType >= VTK_NUMBER_OF_CELL_TYPES)
           { // Protect against new cell types added.
           vtkGenericWarningMacro("Unknown cell type " << cellType);
-          cellArrayIt += 1+numPoints;
           continue;
           }
         if (cellTypeDimensions[cellType] != dimensionality)
           {
-          cellArrayIt += 1+numPoints;
           continue;
           }
-        cellArrayIt++;
+
+        cellScalars->SetNumberOfTuples(cellIter->GetNumberOfPoints());
+        inScalars->GetTuples(cellIter->GetPointIds(), cellScalars);
+        numCellScalars = cellScalars->GetNumberOfComponents()
+            * cellScalars->GetNumberOfTuples();
+        cellScalarPtr = static_cast<Scalar*>(cellScalars->GetVoidPointer(0));
 
         //find min and max values in scalar data
-        range[0] = scalarArrayPtr[cellArrayPtr[cellArrayIt]];
-        range[1] = scalarArrayPtr[cellArrayPtr[cellArrayIt]];
-        cellArrayIt++;
+        range[0] = range[1] = cellScalarPtr[0];
 
-        for (i = 1; i < numPoints; i++)
+        for (Scalar *it = cellScalarPtr + 1,
+             *itEnd = cellScalarPtr + numCellScalars; it != itEnd; ++it)
           {
-          tempScalar = scalarArrayPtr[cellArrayPtr[cellArrayIt]];
-          cellArrayIt++;
-          if (tempScalar <= range[0])
+          if (*it <= range[0])
             {
-            range[0] = tempScalar;
-            } //if tempScalar <= min range value
-          if (tempScalar >= range[1])
+            range[0] = *it;
+            } //if scalar <= min range value
+          if (*it >= range[1])
             {
-            range[1] = tempScalar;
-            } //if tempScalar >= max range value
-          } // for all points in this cell
+            range[1] = *it;
+            } //if scalar >= max range value
+          } // for all cellScalars
 
-        if (dimensionality == 3 &&  ! (cellId % 5000) )
+        if (dimensionality == 3 &&  ! (cellIter->GetCellId() % 5000) )
           {
-          self->UpdateProgress (static_cast<double>(cellId)/numCells);
+          self->UpdateProgress(static_cast<double>(cellIter->GetCellId())
+                               / numCells);
           if (self->GetAbortExecute())
             {
             abortExecute = 1;
@@ -263,15 +272,14 @@ void vtkContourGridExecute(vtkContourGrid *self, vtkDataSet *input,
 
         if (needCell)
           {
-          cell = input->GetCell(cellId);
-          cellPts = cell->GetPointIds();
-          inScalars->GetTuples(cellPts,cellScalars);
+          cellIter->GetCell(cell.GetPointer());
 
           for (i=0; i < numContours; i++)
             {
             if ((values[i] >= range[0]) && (values[i] <= range[1]))
               {
-              helper.Contour(cell,values[i],cellScalars,cellId);
+              helper.Contour(cell.GetPointer(), values[i], cellScalars,
+                             cellIter->GetCellId());
               } // if contour value in range of values for this cell
             } // for all contour values
           } // if contour goes through this cell
@@ -295,12 +303,16 @@ void vtkContourGridExecute(vtkContourGrid *self, vtkDataSet *input,
     // Loop over all contour values.  Then for each contour value,
     // loop over all cells.
     //
+    vtkCell *tmpCell;
+    vtkIdList *dummyIdList = NULL;
+    vtkIdType cellId = cellIter->GetCellId();
     for (i=0; i < numContours; i++)
       {
-      for ( scalarTree->InitTraversal(values[i]);
-          (cell=scalarTree->GetNextCell(cellId,cellPts,cellScalars)) != NULL; )
+      for (scalarTree->InitTraversal(values[i]);
+          (tmpCell = scalarTree->GetNextCell(cellId, dummyIdList,
+                                             cellScalars)); )
         {
-        helper.Contour(cell,values[i],cellScalars,cellId);
+        helper.Contour(tmpCell, values[i], cellScalars, cellId);
 
         //don't want to call Contour any more than necessary
         } //for all cells
@@ -350,13 +362,12 @@ int vtkContourGrid::RequestData(
   vtkInformation *outInfo = outputVector->GetInformationObject(0);
 
   // get the input and output
-  vtkUnstructuredGrid *input = vtkUnstructuredGrid::SafeDownCast(
+  vtkUnstructuredGridBase *input = vtkUnstructuredGridBase::SafeDownCast(
     inInfo->Get(vtkDataObject::DATA_OBJECT()));
   vtkPolyData *output = vtkPolyData::SafeDownCast(
     outInfo->Get(vtkDataObject::DATA_OBJECT()));
 
   vtkDataArray *inScalars;
-  void *scalarArrayPtr;
   vtkIdType numCells;
   int numContours = this->ContourValues->GetNumberOfContours();
   double *values = this->ContourValues->GetValues();
@@ -379,15 +390,12 @@ int vtkContourGrid::RequestData(
     return 1;
     }
 
-  scalarArrayPtr = inScalars->GetVoidPointer(0);
-
   switch (inScalars->GetDataType())
     {
-    vtkTemplateMacro(
-      vtkContourGridExecute(this, input, output, inScalars,
-                            static_cast<VTK_TT *>(scalarArrayPtr),
-                            numContours, values,computeScalars, useScalarTree,
-                            scalarTree, this->GenerateTriangles!=0));
+    vtkTemplateMacro(vtkContourGridExecute<VTK_TT>(
+            this, input, output, inScalars, numContours, values,
+            computeScalars, useScalarTree, scalarTree,
+            this->GenerateTriangles != 0));
     default:
       vtkErrorMacro(<< "Execute: Unknown ScalarType");
       return 1;
@@ -458,7 +466,8 @@ int vtkContourGrid::GetOutputPointsPrecision() const
 
 int vtkContourGrid::FillInputPortInformation(int, vtkInformation *info)
 {
-  info->Set(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkUnstructuredGrid");
+  info->Set(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(),
+            "vtkUnstructuredGridBase");
   return 1;
 }
 
diff --git a/Filters/Core/vtkContourGrid.h b/Filters/Core/vtkContourGrid.h
index 080564c..eb6bfa8 100644
--- a/Filters/Core/vtkContourGrid.h
+++ b/Filters/Core/vtkContourGrid.h
@@ -39,7 +39,7 @@
 // normals of the resulting isosurface.
 
 // .SECTION See Also
-// vtkMarchingContourFilter vtkKitwareContourFilter
+// vtkMarchingContourFilter
 // vtkMarchingCubes vtkSliceCubes vtkDividingCubes vtkMarchingSquares
 // vtkImageMarchingCubes
 
diff --git a/Filters/Core/vtkCutter.cxx b/Filters/Core/vtkCutter.cxx
index 0d5c9f8..aae3d45 100644
--- a/Filters/Core/vtkCutter.cxx
+++ b/Filters/Core/vtkCutter.cxx
@@ -16,6 +16,7 @@
 
 #include "vtkCellArray.h"
 #include "vtkCellData.h"
+#include "vtkCellIterator.h"
 #include "vtkContourValues.h"
 #include "vtkDataSet.h"
 #include "vtkDoubleArray.h"
@@ -27,23 +28,26 @@
 #include "vtkInformation.h"
 #include "vtkInformationVector.h"
 #include "vtkMergePoints.h"
+#include "vtkNew.h"
 #include "vtkObjectFactory.h"
 #include "vtkPlane.h"
 #include "vtkPointData.h"
 #include "vtkPolyData.h"
 #include "vtkRectilinearGrid.h"
 #include "vtkRectilinearSynchronizedTemplates.h"
+#include "vtkSmartPointer.h"
 #include "vtkStreamingDemandDrivenPipeline.h"
 #include "vtkStructuredGrid.h"
 #include "vtkSynchronizedTemplates3D.h"
 #include "vtkSynchronizedTemplatesCutter3D.h"
-#include "vtkUnstructuredGrid.h"
+#include "vtkUnstructuredGridBase.h"
 #include "vtkStreamingDemandDrivenPipeline.h"
 #include "vtkIncrementalPointLocator.h"
 #include "vtkTimerLog.h"
 #include "vtkSmartPointer.h"
 #include "vtkContourHelper.h"
 
+#include <algorithm>
 #include <math.h>
 
 vtkStandardNewMacro(vtkCutter);
@@ -469,6 +473,10 @@ void vtkCutter::DataSetCutter(vtkDataSet *input, vtkPolyData *output)
       {
       newPoints->SetDataType(inputPointSet->GetPoints()->GetDataType());
       }
+    else
+      {
+      newPoints->SetDataType(VTK_FLOAT);
+      }
     }
   else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
     {
@@ -673,7 +681,7 @@ void vtkCutter::DataSetCutter(vtkDataSet *input, vtkPolyData *output)
 //----------------------------------------------------------------------------
 void vtkCutter::UnstructuredGridCutter(vtkDataSet *input, vtkPolyData *output)
 {
-  vtkIdType cellId, i;
+  vtkIdType i;
   int iter;
   vtkDoubleArray *cellScalars;
   vtkCellArray *newVerts, *newLines, *newPolys;
@@ -682,12 +690,16 @@ void vtkCutter::UnstructuredGridCutter(vtkDataSet *input, vtkPolyData *output)
   double value, s;
   vtkIdType estimatedSize, numCells=input->GetNumberOfCells();
   vtkIdType numPts=input->GetNumberOfPoints();
-  vtkIdType cellArrayIt = 0;
   int numCellPts;
+  vtkIdType *ptIds;
   vtkPointData *inPD, *outPD;
   vtkCellData *inCD=input->GetCellData(), *outCD=output->GetCellData();
   vtkIdList *cellIds;
   int numContours = this->ContourValues->GetNumberOfContours();
+  double *contourValues = this->ContourValues->GetValues();
+  double *contourValuesEnd = contourValues + numContours;
+  double *contourIter;
+
   int abortExecute = 0;
 
   double range[2];
@@ -711,6 +723,10 @@ void vtkCutter::UnstructuredGridCutter(vtkDataSet *input, vtkPolyData *output)
       {
       newPoints->SetDataType(inputPointSet->GetPoints()->GetDataType());
       }
+    else
+      {
+      newPoints->SetDataType(VTK_FLOAT);
+      }
     }
   else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
     {
@@ -766,8 +782,10 @@ void vtkCutter::UnstructuredGridCutter(vtkDataSet *input, vtkPolyData *output)
   vtkIdType progressInterval = numCuts/20 + 1;
   int cut=0;
 
-  vtkUnstructuredGrid *grid = static_cast<vtkUnstructuredGrid *>(input);
-  vtkIdType *cellArrayPtr = grid->GetCells()->GetPointer();
+  vtkSmartPointer<vtkCellIterator> cellIter =
+      vtkSmartPointer<vtkCellIterator>::Take(input->NewCellIterator());
+  vtkNew<vtkGenericCell> cell;
+  vtkIdList *pointIdList;
   double *scalarArrayPtr = cutScalars->GetPointer(0);
   double tempScalar;
   cellScalars = cutScalars->NewInstance();
@@ -785,7 +803,9 @@ void vtkCutter::UnstructuredGridCutter(vtkDataSet *input, vtkPolyData *output)
       // Loop over all cells; get scalar values for all cell points
       // and process each cell.
       //
-      for (cellId=0; cellId < numCells && !abortExecute; cellId++)
+      for (cellIter->InitTraversal();
+           !cellIter->IsDoneWithTraversal() && !abortExecute;
+           cellIter->GoToNextCell())
         {
         if ( !(++cut % progressInterval) )
           {
@@ -794,26 +814,18 @@ void vtkCutter::UnstructuredGridCutter(vtkDataSet *input, vtkPolyData *output)
           abortExecute = this->GetAbortExecute();
           }
 
-        numCellPts = cellArrayPtr[cellArrayIt];
-        cellArrayIt++;
+        pointIdList = cellIter->GetPointIds();
+        numCellPts = pointIdList->GetNumberOfIds();
+        ptIds = pointIdList->GetPointer(0);
 
         //find min and max values in scalar data
-        range[0] = scalarArrayPtr[cellArrayPtr[cellArrayIt]];
-        range[1] = scalarArrayPtr[cellArrayPtr[cellArrayIt]];
-        cellArrayIt++;
+        range[0] = range[1] = scalarArrayPtr[ptIds[0]];
 
         for (i = 1; i < numCellPts; i++)
           {
-          tempScalar = scalarArrayPtr[cellArrayPtr[cellArrayIt]];
-          cellArrayIt++;
-          if (tempScalar <= range[0])
-            {
-            range[0] = tempScalar;
-            } //if tempScalar <= min range value
-          if (tempScalar >= range[1])
-            {
-            range[1] = tempScalar;
-            } //if tempScalar >= max range value
+          tempScalar = scalarArrayPtr[ptIds[i]];
+          range[0] = std::min(range[0], tempScalar);
+          range[1] = std::max(range[1], tempScalar);
           } // for all points in this cell
 
         int needCell = 0;
@@ -825,7 +837,7 @@ void vtkCutter::UnstructuredGridCutter(vtkDataSet *input, vtkPolyData *output)
 
         if (needCell)
           {
-          vtkCell *cell = input->GetCell(cellId);
+          cellIter->GetCell(cell.GetPointer());
           cellIds = cell->GetPointIds();
           cutScalars->GetTuples(cellIds,cellScalars);
           // Loop over all contour values.
@@ -839,7 +851,8 @@ void vtkCutter::UnstructuredGridCutter(vtkDataSet *input, vtkPolyData *output)
               }
             value = this->ContourValues->GetValue(iter);
 
-            helper.Contour(cell,value, cellScalars, cellId);
+            helper.Contour(cell.GetPointer(), value, cellScalars,
+                           cellIter->GetCellId());
             }
           }
 
@@ -872,49 +885,47 @@ void vtkCutter::UnstructuredGridCutter(vtkDataSet *input, vtkPolyData *output)
       // Loop over all cells; get scalar values for all cell points
       // and process each cell.
       //
-      cellArrayIt = 0;
-      for (cellId=0; cellId < numCells && !abortExecute; cellId++)
+      for (cellIter->InitTraversal();
+           !cellIter->IsDoneWithTraversal() && !abortExecute;
+           cellIter->GoToNextCell())
         {
-        numCellPts = cellArrayPtr[cellArrayIt];
-        // I assume that "GetCellType" is fast.
-        cellType = input->GetCellType(cellId);
+        // Just fetch the cell type -- least expensive.
+        cellType = cellIter->GetCellType();
+
+        // Protect against new cell types added.
         if (cellType >= VTK_NUMBER_OF_CELL_TYPES)
-          { // Protect against new cell types added.
+          {
           vtkErrorMacro("Unknown cell type " << cellType);
-          cellArrayIt += 1+numCellPts;
           continue;
           }
+
+        // Check if the type is valid for this pass
         if (cellTypeDimensions[cellType] != dimensionality)
           {
-          cellArrayIt += 1+numCellPts;
           continue;
           }
-        cellArrayIt++;
+
+        // Just fetch the cell point ids -- moderately expensive.
+        pointIdList = cellIter->GetPointIds();
+        numCellPts = pointIdList->GetNumberOfIds();
+        ptIds = pointIdList->GetPointer(0);
 
         //find min and max values in scalar data
-        range[0] = scalarArrayPtr[cellArrayPtr[cellArrayIt]];
-        range[1] = scalarArrayPtr[cellArrayPtr[cellArrayIt]];
-        cellArrayIt++;
+        range[0] = range[1] = scalarArrayPtr[ptIds[0]];
 
-        for (i = 1; i < numCellPts; i++)
+        for (i = 1; i < numCellPts; ++i)
           {
-          tempScalar = scalarArrayPtr[cellArrayPtr[cellArrayIt]];
-          cellArrayIt++;
-          if (tempScalar <= range[0])
-            {
-            range[0] = tempScalar;
-            } //if tempScalar <= min range value
-          if (tempScalar >= range[1])
-            {
-            range[1] = tempScalar;
-            } //if tempScalar >= max range value
+          tempScalar = scalarArrayPtr[ptIds[i]];
+          range[0] = std::min(range[0], tempScalar);
+          range[1] = std::max(range[1], tempScalar);
           } // for all points in this cell
 
+        // Check if the full cell is needed
         int needCell = 0;
-        for (int cont = 0; cont < numContours; ++cont)
+        for (contourIter = contourValues; contourIter != contourValuesEnd;
+             ++contourIter)
           {
-          double val = this->ContourValues->GetValue(cont);
-          if (val >= range[0] && val <= range[1])
+          if (*contourIter >= range[0] && *contourIter <= range[1])
             {
             needCell = 1;
             break;
@@ -923,11 +934,12 @@ void vtkCutter::UnstructuredGridCutter(vtkDataSet *input, vtkPolyData *output)
 
         if (needCell)
           {
-          vtkCell *cell = input->GetCell(cellId);
-          cellIds = cell->GetPointIds();
-          cutScalars->GetTuples(cellIds,cellScalars);
+          // Fetch the full cell -- most expensive.
+          cellIter->GetCell(cell.GetPointer());
+          cutScalars->GetTuples(pointIdList, cellScalars);
           // Loop over all contour values.
-          for (iter=0; iter < numContours && !abortExecute; iter++)
+          for (contourIter = contourValues; contourIter != contourValuesEnd;
+               ++contourIter)
             {
             if (dimensionality == 3 && !(++cut % progressInterval) )
               {
@@ -935,10 +947,9 @@ void vtkCutter::UnstructuredGridCutter(vtkDataSet *input, vtkPolyData *output)
               this->UpdateProgress (static_cast<double>(cut)/numCuts);
               abortExecute = this->GetAbortExecute();
               }
-            value = this->ContourValues->GetValue(iter);
-            helper.Contour(cell,value, cellScalars, cellId);
+            helper.Contour(cell.GetPointer(), *contourIter, cellScalars,
+                           cellIter->GetCellId());
             } // for all contour values
-
           } // if need cell
         } // for all cells
       } // for all dimensions (1,2,3).
diff --git a/Filters/Core/vtkDecimatePolylineFilter.cxx b/Filters/Core/vtkDecimatePolylineFilter.cxx
index a5314e9..d1be1d1 100644
--- a/Filters/Core/vtkDecimatePolylineFilter.cxx
+++ b/Filters/Core/vtkDecimatePolylineFilter.cxx
@@ -47,6 +47,7 @@ vtkDecimatePolylineFilter::vtkDecimatePolylineFilter()
   this->Closed = true;
   this->PriorityQueue = vtkSmartPointer< vtkPriorityQueue >::New();
   this->ErrorMap = new vtkDecimatePolylineVertexErrorSTLMap;
+  this->OutputPointsPrecision = vtkAlgorithm::DEFAULT_PRECISION;
 }
 
 //---------------------------------------------------------------------
@@ -112,6 +113,21 @@ int vtkDecimatePolylineFilter::RequestData(
 
   // Allocate memory and prepare for data processing
   vtkPoints *newPts = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION)
+    {
+    newPts->SetDataType(inputPoints->GetDataType());
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_FLOAT);
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_DOUBLE);
+    }
+
   vtkCellArray *newLines = vtkCellArray::New();
   newLines->Allocate(numLines,2);
   vtkPointData *inPD = input->GetPointData();
@@ -258,4 +274,6 @@ void vtkDecimatePolylineFilter::PrintSelf(ostream& os, vtkIndent indent)
   this->Superclass::PrintSelf(os,indent);
 
   os << indent << "Target Reduction: " << this->TargetReduction << "\n";
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << "\n";
 }
diff --git a/Filters/Core/vtkDecimatePolylineFilter.h b/Filters/Core/vtkDecimatePolylineFilter.h
index bc99bbb..cf9f66f 100644
--- a/Filters/Core/vtkDecimatePolylineFilter.h
+++ b/Filters/Core/vtkDecimatePolylineFilter.h
@@ -64,6 +64,13 @@ public:
   vtkSetClampMacro(TargetReduction,double,0.0,1.0);
   vtkGetMacro(TargetReduction,double);
 
+  // Description:
+  // Set/get the desired precision for the output types. See the documentation
+  // for the vtkAlgorithm::DesiredOutputPrecision enum for an explanation of
+  // the available precision settings.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkDecimatePolylineFilter();
   ~vtkDecimatePolylineFilter();
@@ -82,6 +89,7 @@ protected:
   vtkSmartPointer< vtkPriorityQueue >   PriorityQueue;
   bool                                  Closed;
   double                                TargetReduction;
+  int                                   OutputPointsPrecision;
 
 private:
   vtkDecimatePolylineFilter(const vtkDecimatePolylineFilter&);  // Not implemented.
diff --git a/Filters/Core/vtkDecimatePro.cxx b/Filters/Core/vtkDecimatePro.cxx
index 869d363..d38675b 100644
--- a/Filters/Core/vtkDecimatePro.cxx
+++ b/Filters/Core/vtkDecimatePro.cxx
@@ -84,6 +84,7 @@ vtkDecimatePro::vtkDecimatePro()
   this->Degree = 25;
   this->BoundaryVertexDeletion = 1;
   this->InflectionPointRatio = 10.0;
+  this->OutputPointsPrecision = DEFAULT_PRECISION;
 
   this->Queue = NULL;
   this->VertexError = NULL;
@@ -215,7 +216,22 @@ int vtkDecimatePro::RequestData(
     if (this->Mesh != NULL) {this->Mesh->Delete(); this->Mesh = NULL;}
     this->Mesh = vtkPolyData::New();
 
-    newPts = vtkPoints::New(); newPts->SetNumberOfPoints(numPts);
+    newPts = vtkPoints::New();
+
+  if(this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION)
+    {
+    newPts->SetDataType(inPts->GetDataType());
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_FLOAT);
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_DOUBLE);
+    }
+
+    newPts->SetNumberOfPoints(numPts);
     newPts->DeepCopy(inPts);
     this->Mesh->SetPoints(newPts);
     newPts->Delete(); //registered by Mesh and preserved
@@ -1730,4 +1746,6 @@ void vtkDecimatePro::PrintSelf(ostream& os, vtkIndent indent)
      << this->InflectionPointRatio << "\n";
   os << indent << "Number Of Inflection Points: "
      << this->GetNumberOfInflectionPoints() << "\n";
+
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision << "\n";
 }
diff --git a/Filters/Core/vtkDecimatePro.h b/Filters/Core/vtkDecimatePro.h
index 13dbe4a..89d9a22 100644
--- a/Filters/Core/vtkDecimatePro.h
+++ b/Filters/Core/vtkDecimatePro.h
@@ -229,6 +229,13 @@ public:
   // This method returns a pointer to a list of inflection points.
   double *GetInflectionPoints();
 
+  // Description:
+  // Set/get the desired precision for the output types. See the documentation
+  // for the vtkAlgorithm::DesiredOutputPrecision enum for an explanation of
+  // the available precision settings.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkDecimatePro();
   ~vtkDecimatePro();
@@ -249,6 +256,7 @@ protected:
   int Degree;
   double InflectionPointRatio;
   vtkDoubleArray *InflectionPoints;
+  int OutputPointsPrecision;
 
   // to replace a static object
   vtkIdList *Neighbors;
diff --git a/Filters/Core/vtkDelaunay3D.cxx b/Filters/Core/vtkDelaunay3D.cxx
index c97a87c..4e50b37 100644
--- a/Filters/Core/vtkDelaunay3D.cxx
+++ b/Filters/Core/vtkDelaunay3D.cxx
@@ -136,6 +136,7 @@ vtkDelaunay3D::vtkDelaunay3D()
   this->Tolerance = 0.001;
   this->BoundingTriangulation = 0;
   this->Offset = 2.5;
+  this->OutputPointsPrecision = DEFAULT_PRECISION;
   this->Locator = NULL;
   this->TetraArray = NULL;
 
@@ -451,6 +452,25 @@ int vtkDelaunay3D::RequestData(
   // Initialize mesh structure.
   input->GetCenter(center);
   tol = input->GetLength();
+
+  points = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION)
+    {
+    points->SetDataType(inPoints->GetDataType());
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
+    {
+    points->SetDataType(VTK_FLOAT);
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    points->SetDataType(VTK_DOUBLE);
+    }
+
+  points->Allocate(numPoints+6);
+
   Mesh = this->InitPointInsertion(center, this->Offset*tol,
                                   numPoints, points);
 
@@ -678,7 +698,15 @@ int vtkDelaunay3D::RequestData(
     }
   else
     {
-    output->SetPoints(inPoints);
+    if (inPoints->GetDataType() != points->GetDataType())
+      {
+      points->DeepCopy(inPoints);
+      output->SetPoints(points);
+      }
+    else
+      {
+      output->SetPoints(inPoints);
+      }
     output->GetPointData()->PassData(input->GetPointData());
     }
 
@@ -725,9 +753,6 @@ vtkUnstructuredGrid *vtkDelaunay3D::InitPointInsertion(double center[3],
   this->NumberOfDuplicatePoints = 0;
   this->NumberOfDegeneracies = 0;
 
-  points = vtkPoints::New();
-  points->Allocate(numPtsToInsert+6);
-
   if ( length <= 0.0 )
     {
     length = 1.0;
@@ -986,6 +1011,8 @@ void vtkDelaunay3D::PrintSelf(ostream& os, vtkIndent indent)
     {
     os << indent << "Locator: (none)\n";
     }
+
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision << "\n";
 }
 
 void vtkDelaunay3D::EndPointInsertion()
diff --git a/Filters/Core/vtkDelaunay3D.h b/Filters/Core/vtkDelaunay3D.h
index 28b642d..7c1561b 100644
--- a/Filters/Core/vtkDelaunay3D.h
+++ b/Filters/Core/vtkDelaunay3D.h
@@ -184,6 +184,13 @@ public:
   // Return the MTime also considering the locator.
   unsigned long GetMTime();
 
+  // Description:
+  // Set/get the desired precision for the output types. See the documentation
+  // for the vtkAlgorithm::DesiredOutputPrecision enum for an explanation of
+  // the available precision settings.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkDelaunay3D();
   ~vtkDelaunay3D();
@@ -194,6 +201,7 @@ protected:
   double Tolerance;
   int BoundingTriangulation;
   double Offset;
+  int OutputPointsPrecision;
 
   vtkIncrementalPointLocator *Locator;  //help locate points faster
 
diff --git a/Filters/Core/vtkFeatureEdges.cxx b/Filters/Core/vtkFeatureEdges.cxx
index 5791f0a..2e0afe0 100644
--- a/Filters/Core/vtkFeatureEdges.cxx
+++ b/Filters/Core/vtkFeatureEdges.cxx
@@ -43,6 +43,7 @@ vtkFeatureEdges::vtkFeatureEdges()
   this->ManifoldEdges = 0;
   this->Coloring = 1;
   this->Locator = NULL;
+  this->OutputPointsPrecision = vtkAlgorithm::DEFAULT_PRECISION;
 }
 
 vtkFeatureEdges::~vtkFeatureEdges()
@@ -163,6 +164,21 @@ int vtkFeatureEdges::RequestData(
   // Allocate storage for lines/points (arbitrary allocation sizes)
   //
   newPts = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION)
+    {
+    newPts->SetDataType(inPts->GetDataType());
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_FLOAT);
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_DOUBLE);
+    }
+
   newPts->Allocate(numPts/10,numPts);
   newLines = vtkCellArray::New();
   newLines->Allocate(numPts/10);
@@ -449,4 +465,6 @@ void vtkFeatureEdges::PrintSelf(ostream& os, vtkIndent indent)
     {
     os << indent << "Locator: (none)\n";
     }
+
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision << "\n";
 }
diff --git a/Filters/Core/vtkFeatureEdges.h b/Filters/Core/vtkFeatureEdges.h
index 43c8d08..bb01d70 100644
--- a/Filters/Core/vtkFeatureEdges.h
+++ b/Filters/Core/vtkFeatureEdges.h
@@ -100,6 +100,13 @@ public:
   // Return MTime also considering the locator.
   unsigned long GetMTime();
 
+  // Description:
+  // Set/get the desired precision for the output types. See the documentation
+  // for the vtkAlgorithm::DesiredOutputPrecision enum for an explanation of
+  // the available precision settings.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkFeatureEdges();
   ~vtkFeatureEdges();
@@ -114,6 +121,7 @@ protected:
   int NonManifoldEdges;
   int ManifoldEdges;
   int Coloring;
+  int OutputPointsPrecision;
   vtkIncrementalPointLocator *Locator;
 private:
   vtkFeatureEdges(const vtkFeatureEdges&);  // Not implemented.
diff --git a/Filters/Core/vtkGlyph2D.h b/Filters/Core/vtkGlyph2D.h
index 23e65ce..1de1544 100644
--- a/Filters/Core/vtkGlyph2D.h
+++ b/Filters/Core/vtkGlyph2D.h
@@ -44,8 +44,8 @@ public:
   static vtkGlyph2D *New();
 
 protected:
-  vtkGlyph2D() {};
-  ~vtkGlyph2D() {};
+  vtkGlyph2D() {}
+  ~vtkGlyph2D() {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
diff --git a/Filters/Core/vtkGlyph3D.cxx b/Filters/Core/vtkGlyph3D.cxx
index 27a9418..6a78194 100644
--- a/Filters/Core/vtkGlyph3D.cxx
+++ b/Filters/Core/vtkGlyph3D.cxx
@@ -23,6 +23,7 @@
 #include "vtkInformation.h"
 #include "vtkInformationVector.h"
 #include "vtkMath.h"
+#include "vtkNew.h"
 #include "vtkObjectFactory.h"
 #include "vtkPointData.h"
 #include "vtkPolyData.h"
@@ -131,7 +132,7 @@ int vtkGlyph3D::RequestData(
   vtkDataArray *newTCoords = NULL;
   double x[3], v[3], vNew[3], s = 0.0, vMag = 0.0, value, tc[3];
   vtkTransform *trans = vtkTransform::New();
-  vtkCell *cell;
+  vtkNew<vtkIdList> pointIdList;
   vtkIdList *cellPts;
   int npts;
   vtkIdList *pts;
@@ -144,7 +145,10 @@ int vtkGlyph3D::RequestData(
   vtkPolyData *defaultSource = NULL;
   vtkIdTypeArray *pointIds=0;
   vtkPolyData *source = this->GetSource(0, inputVector[1]);
-;
+  vtkNew<vtkIdList> srcPointIdList;
+  vtkNew<vtkIdList> dstPointIdList;
+  vtkNew<vtkIdList> srcCellIdList;
+  vtkNew<vtkIdList> dstCellIdList;
 
   vtkDebugMacro(<<"Generating glyphs");
 
@@ -309,6 +313,11 @@ int vtkGlyph3D::RequestData(
       }
     }
 
+  srcPointIdList->SetNumberOfIds(numSourcePts);
+  dstPointIdList->SetNumberOfIds(numSourcePts);
+  srcCellIdList->SetNumberOfIds(numSourceCells);
+  dstCellIdList->SetNumberOfIds(numSourceCells);
+
   newPts = vtkPoints::New();
   newPts->Allocate(numPts*numSourcePts);
   if ( this->GeneratePointIds )
@@ -517,14 +526,14 @@ int vtkGlyph3D::RequestData(
     // Copy all topology (transformation independent)
     for (cellId=0; cellId < numSourceCells; cellId++)
       {
-      cell = source->GetCell(cellId);
-      cellPts = cell->GetPointIds();
+      source->GetCellPoints(cellId, pointIdList.GetPointer());
+      cellPts = pointIdList.GetPointer();
       npts = cellPts->GetNumberOfIds();
       for (pts->Reset(), i=0; i < npts; i++)
         {
-        pts->InsertId(i,cellPts->GetId(i) + ptIncr);
+        pts->InsertId(i, cellPts->GetId(i) + ptIncr);
         }
-      output->InsertNextCell(cell->GetCellType(),pts);
+      output->InsertNextCell(source->GetCellType(cellId), pts);
       }
 
     // translate Source to Input point
@@ -640,16 +649,22 @@ int vtkGlyph3D::RequestData(
     // Copy point data from source (if possible)
     if ( pd )
       {
-      for (i=0; i < numSourcePts; i++)
+      for (i = 0; i < numSourcePts; ++i)
         {
-        outputPD->CopyData(pd,inPtId,ptIncr+i);
+        srcPointIdList->SetId(i, inPtId);
+        dstPointIdList->SetId(i, ptIncr + i);
         }
+      outputPD->CopyData(pd, srcPointIdList.GetPointer(),
+                         dstPointIdList.GetPointer());
       if (this->FillCellData)
         {
-        for (i=0; i < numSourceCells; i++)
+        for (i = 0; i < numSourceCells; ++i)
           {
-          outputCD->CopyData(pd,inPtId,cellIncr+i);
+          srcCellIdList->SetId(i, inPtId);
+          dstCellIdList->SetId(i, cellIncr + i);
           }
+        outputCD->CopyData(pd, srcCellIdList.GetPointer(),
+                           dstCellIdList.GetPointer());
         }
       }
 
diff --git a/Filters/Core/vtkGridSynchronizedTemplates3D.cxx b/Filters/Core/vtkGridSynchronizedTemplates3D.cxx
index bc16ce3..feeaca6 100644
--- a/Filters/Core/vtkGridSynchronizedTemplates3D.cxx
+++ b/Filters/Core/vtkGridSynchronizedTemplates3D.cxx
@@ -130,6 +130,10 @@ static void vtkGridSynchronizedTemplates3DInitializeOutput(
       {
       newPts->SetDataType(inputPointSet->GetPoints()->GetDataType());
       }
+    else
+      {
+      newPts->SetDataType(VTK_FLOAT);
+      }
     }
   else if(precision == vtkAlgorithm::SINGLE_PRECISION)
     {
diff --git a/Filters/Core/vtkGridSynchronizedTemplates3D.h b/Filters/Core/vtkGridSynchronizedTemplates3D.h
index 94e4cbc..95a38da 100644
--- a/Filters/Core/vtkGridSynchronizedTemplates3D.h
+++ b/Filters/Core/vtkGridSynchronizedTemplates3D.h
@@ -31,7 +31,6 @@
 #include "vtkPolyDataAlgorithm.h"
 #include "vtkContourValues.h" // Because it passes all the calls to it
 
-class vtkKitwareContourFilter;
 class vtkStructuredGrid;
 
 class VTKFILTERSCORE_EXPORT vtkGridSynchronizedTemplates3D : public vtkPolyDataAlgorithm
diff --git a/Filters/Core/vtkHedgeHog.cxx b/Filters/Core/vtkHedgeHog.cxx
index e5e940e..2ece088 100644
--- a/Filters/Core/vtkHedgeHog.cxx
+++ b/Filters/Core/vtkHedgeHog.cxx
@@ -28,6 +28,7 @@ vtkHedgeHog::vtkHedgeHog()
 {
   this->ScaleFactor = 1.0;
   this->VectorMode = VTK_USE_VECTOR;
+  this->OutputPointsPrecision = vtkAlgorithm::DEFAULT_PRECISION;
 }
 
 int vtkHedgeHog::RequestData(
@@ -82,7 +83,31 @@ int vtkHedgeHog::RequestData(
     }
   outputPD->CopyAllocate(pd, 2*numPts);
 
-  newPts = vtkPoints::New(); newPts->SetNumberOfPoints(2*numPts);
+  newPts = vtkPoints::New();
+
+    // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION)
+    {
+    vtkPointSet *inputPointSet = vtkPointSet::SafeDownCast(input);
+    if(inputPointSet)
+      {
+      newPts->SetDataType(inputPointSet->GetPoints()->GetDataType());
+      }
+    else
+      {
+      newPts->SetDataType(VTK_FLOAT);
+      }
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_FLOAT);
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_DOUBLE);
+    }
+
+  newPts->SetNumberOfPoints(2*numPts);
   newLines = vtkCellArray::New();
   newLines->Allocate(newLines->EstimateSize(numPts,2));
 
@@ -149,4 +174,5 @@ void vtkHedgeHog::PrintSelf(ostream& os, vtkIndent indent)
   os << indent << "Scale Factor: " << this->ScaleFactor << "\n";
   os << indent << "Orient Mode: " << (this->VectorMode == VTK_USE_VECTOR ?
                                        "Orient by vector\n" : "Orient by normal\n");
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision << "\n";
 }
diff --git a/Filters/Core/vtkHedgeHog.h b/Filters/Core/vtkHedgeHog.h
index 68588ba..eb21f9c 100644
--- a/Filters/Core/vtkHedgeHog.h
+++ b/Filters/Core/vtkHedgeHog.h
@@ -48,14 +48,22 @@ public:
   void SetVectorModeToUseNormal() {this->SetVectorMode(VTK_USE_NORMAL);};
   const char *GetVectorModeAsString();
 
+  // Description:
+  // Set/get the desired precision for the output types. See the documentation
+  // for the vtkAlgorithm::DesiredOutputPrecision enum for an explanation of
+  // the available precision settings.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkHedgeHog();
-  ~vtkHedgeHog() {};
+  ~vtkHedgeHog() {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   virtual int FillInputPortInformation(int port, vtkInformation *info);
   double ScaleFactor;
   int VectorMode; // Orient/scale via normal or via vector data
+  int OutputPointsPrecision;
 
 private:
   vtkHedgeHog(const vtkHedgeHog&);  // Not implemented.
diff --git a/Filters/Core/vtkImageAppend.cxx b/Filters/Core/vtkImageAppend.cxx
index 8d3900b..4ed4d36 100644
--- a/Filters/Core/vtkImageAppend.cxx
+++ b/Filters/Core/vtkImageAppend.cxx
@@ -89,7 +89,7 @@ vtkDataObject *vtkImageAppend::GetInput(int idx)
 }
 
 //----------------------------------------------------------------------------
-// This method tells the ouput it will have more components
+// This method tells the output it will have more components
 int vtkImageAppend::RequestInformation (
   vtkInformation * vtkNotUsed(request),
   vtkInformationVector **inputVector,
@@ -264,7 +264,7 @@ int vtkImageAppend::RequestUpdateExtent(
 }
 
 //----------------------------------------------------------------------------
-void vtkImageAppendGetContinuousIncrements
+static void vtkImageAppendGetContinuousIncrements
   (int wExtent[6], int sExtent[6], vtkIdType nComp, bool forCells,
    vtkIdType &incX,
    vtkIdType &incY,
diff --git a/Filters/Core/vtkMaskPoints.cxx b/Filters/Core/vtkMaskPoints.cxx
index ce6d95c..92e03ef 100644
--- a/Filters/Core/vtkMaskPoints.cxx
+++ b/Filters/Core/vtkMaskPoints.cxx
@@ -38,6 +38,7 @@ vtkMaskPoints::vtkMaskPoints()
   this->SingleVertexPerCell = 0;
   this->RandomModeType = 0;
   this->ProportionalMaximumNumberOfPoints = 0;
+  this->OutputPointsPrecision = DEFAULT_PRECISION;
 }
 
 inline double d_rand()
@@ -69,13 +70,13 @@ inline void SwapPoint(vtkPoints* points,
 // this is average case linear, worse case quadratic implementation
 // (i.e., just like quicksort) -- there is the median of 5 or
 // median of medians algorithm, but I'm too lazy to implement it
-void QuickSelect(vtkPoints* points,
-                 vtkPointData* data,
-                 vtkPointData* temp,
-                 vtkIdType start,
-                 vtkIdType end,
-                 vtkIdType nth,
-                 int axis)
+static void QuickSelect(vtkPoints* points,
+                        vtkPointData* data,
+                        vtkPointData* temp,
+                        vtkIdType start,
+                        vtkIdType end,
+                        vtkIdType nth,
+                        int axis)
 {
   // base case
   if(end - start < 2)
@@ -125,10 +126,10 @@ void QuickSelect(vtkPoints* points,
 
 // divide the data into sampling strata and randomly sample it
 // (one sample per stratum)
-void SortAndSample(vtkPoints* points, vtkPointData* data,
-                   vtkPointData* temp,
-                   vtkIdType start, vtkIdType end,
-                   vtkIdType size, int depth)
+static void SortAndSample(vtkPoints* points, vtkPointData* data,
+                          vtkPointData* temp,
+                          vtkIdType start, vtkIdType end,
+                          vtkIdType size, int depth)
 {
   // if size >= end - start return them all
   if(size >= (end - start))
@@ -318,6 +319,13 @@ int vtkMaskPoints::RequestData(
   vtkIdType ptId, id = 0;
   vtkPointData *outputPD = output->GetPointData();
   vtkIdType numPts = input->GetNumberOfPoints();
+
+  if(numPts < 1)
+    {
+    vtkErrorMacro(<<"No points to mask");
+    return 1;
+    }
+
   int abort = 0;
 
   // figure out how many sample points per process
@@ -342,6 +350,29 @@ int vtkMaskPoints::RequestData(
 
   // Allocate space
   newPts = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION)
+    {
+    vtkPointSet *inputPointSet = vtkPointSet::SafeDownCast(input);
+    if(inputPointSet)
+      {
+      newPts->SetDataType(inputPointSet->GetPoints()->GetDataType());
+      }
+    else
+      {
+      newPts->SetDataType(VTK_FLOAT);
+      }
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_FLOAT);
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_DOUBLE);
+    }
+
   newPts->Allocate(numNewPts);
   outputPD->CopyAllocate(pd, numNewPts);
 
@@ -462,6 +493,29 @@ int vtkMaskPoints::RequestData(
       {
       // need to copy the entire data to sort it, to leave original in tact
       vtkPoints* pointCopy = vtkPoints::New();
+
+      // Set the desired precision for the points.
+      if(this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION)
+        {
+        vtkPointSet *inputPointSet = vtkPointSet::SafeDownCast(input);
+        if(inputPointSet)
+          {
+          pointCopy->SetDataType(inputPointSet->GetPoints()->GetDataType());
+          }
+        else
+          {
+          pointCopy->SetDataType(VTK_FLOAT);
+          }
+        }
+      else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
+        {
+        pointCopy->SetDataType(VTK_FLOAT);
+        }
+      else if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+        {
+        pointCopy->SetDataType(VTK_DOUBLE);
+        }
+
       vtkPointData* dataCopy = vtkPointData::New();
       vtkPointData* tempData = vtkPointData::New();
 
@@ -583,4 +637,6 @@ void vtkMaskPoints::PrintSelf(ostream& os, vtkIndent indent)
   os << indent << "Random Mode Type: " << this->RandomModeType << "\n";
   os << indent << "Proportional Maximum Number of Points: " <<
                   this->ProportionalMaximumNumberOfPoints << "\n";
+
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision << "\n";
 }
diff --git a/Filters/Core/vtkMaskPoints.h b/Filters/Core/vtkMaskPoints.h
index beafb6b..f6212a3 100644
--- a/Filters/Core/vtkMaskPoints.h
+++ b/Filters/Core/vtkMaskPoints.h
@@ -115,9 +115,16 @@ public:
   vtkGetMacro(SingleVertexPerCell,int);
   vtkBooleanMacro(SingleVertexPerCell,int);
 
+  // Description:
+  // Set/get the desired precision for the output types. See the documentation
+  // for the vtkAlgorithm::DesiredOutputPrecision enum for an explanation of
+  // the available precision settings.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkMaskPoints();
-  ~vtkMaskPoints() {};
+  ~vtkMaskPoints() {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **,
                           vtkInformationVector *);
@@ -131,12 +138,13 @@ protected:
   int SingleVertexPerCell;
   int RandomModeType; // choose the random sampling mode
   int ProportionalMaximumNumberOfPoints;
+  int OutputPointsPrecision;
 
-  virtual void InternalScatter(unsigned long*, unsigned long *, int, int) {};
-  virtual void InternalGather(unsigned long*, unsigned long*, int, int) {};
+  virtual void InternalScatter(unsigned long*, unsigned long *, int, int) {}
+  virtual void InternalGather(unsigned long*, unsigned long*, int, int) {}
   virtual int InternalGetNumberOfProcesses() { return 1; };
   virtual int InternalGetLocalProcessId() { return 0; };
-  virtual void InternalBarrier() {};
+  virtual void InternalBarrier() {}
   unsigned long GetLocalSampleSize(vtkIdType, int);
 
 private:
diff --git a/Filters/Core/vtkMaskPolyData.h b/Filters/Core/vtkMaskPolyData.h
index 154b9e6..2ef6a3e 100644
--- a/Filters/Core/vtkMaskPolyData.h
+++ b/Filters/Core/vtkMaskPolyData.h
@@ -46,7 +46,7 @@ public:
 
 protected:
   vtkMaskPolyData();
-  ~vtkMaskPolyData() {};
+  ~vtkMaskPolyData() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   int OnRatio; // every OnRatio entity is on; all others are off.
diff --git a/Filters/Core/vtkPointDataToCellData.h b/Filters/Core/vtkPointDataToCellData.h
index acd6e3b..ab39656 100644
--- a/Filters/Core/vtkPointDataToCellData.h
+++ b/Filters/Core/vtkPointDataToCellData.h
@@ -53,7 +53,7 @@ public:
 
 protected:
   vtkPointDataToCellData();
-  ~vtkPointDataToCellData() {};
+  ~vtkPointDataToCellData() {}
 
   virtual int RequestData(vtkInformation* request,
                           vtkInformationVector** inputVector,
diff --git a/Filters/Core/vtkPolyDataConnectivityFilter.cxx b/Filters/Core/vtkPolyDataConnectivityFilter.cxx
index bf8ab09..4b3bcbb 100644
--- a/Filters/Core/vtkPolyDataConnectivityFilter.cxx
+++ b/Filters/Core/vtkPolyDataConnectivityFilter.cxx
@@ -54,6 +54,8 @@ vtkPolyDataConnectivityFilter::vtkPolyDataConnectivityFilter()
 
   this->MarkVisitedPointIds = 0;
   this->VisitedPointIds = vtkIdList::New();
+
+  this->OutputPointsPrecision = DEFAULT_PRECISION;
 }
 
 vtkPolyDataConnectivityFilter::~vtkPolyDataConnectivityFilter()
@@ -157,6 +159,21 @@ int vtkPolyDataConnectivityFilter::RequestData(
   this->NewScalars->SetName("RegionId");
   this->NewScalars->SetNumberOfTuples(numPts);
   newPts = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION)
+    {
+    newPts->SetDataType(inPts->GetDataType());
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_FLOAT);
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_DOUBLE);
+    }
+
   newPts->Allocate(numPts);
 
   // Traverse all cells marking those visited.  Each new search
@@ -651,4 +668,6 @@ void vtkPolyDataConnectivityFilter::PrintSelf(ostream& os, vtkIndent indent)
     os << indent << indent
        << id << ": " << this->RegionSizes->GetValue(id) << std::endl;
     }
+
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision << "\n";
 }
diff --git a/Filters/Core/vtkPolyDataConnectivityFilter.h b/Filters/Core/vtkPolyDataConnectivityFilter.h
index a1a4938..51dcd2c 100644
--- a/Filters/Core/vtkPolyDataConnectivityFilter.h
+++ b/Filters/Core/vtkPolyDataConnectivityFilter.h
@@ -173,6 +173,13 @@ public:
   // has been set.
   vtkGetObjectMacro( VisitedPointIds, vtkIdList );
 
+  // Description:
+  // Set/get the desired precision for the output types. See the documentation
+  // for the vtkAlgorithm::DesiredOutputPrecision enum for an explanation of
+  // the available precision settings.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkPolyDataConnectivityFilter();
   ~vtkPolyDataConnectivityFilter();
@@ -216,6 +223,7 @@ protected:
   vtkIdList *VisitedPointIds;
 
   int MarkVisitedPointIds;
+  int OutputPointsPrecision;
 
 private:
   vtkPolyDataConnectivityFilter(const vtkPolyDataConnectivityFilter&);  // Not implemented.
diff --git a/Filters/Core/vtkPolyDataNormals.cxx b/Filters/Core/vtkPolyDataNormals.cxx
index fa36de5..01933b2 100644
--- a/Filters/Core/vtkPolyDataNormals.cxx
+++ b/Filters/Core/vtkPolyDataNormals.cxx
@@ -374,6 +374,10 @@ int vtkPolyDataNormals::RequestData(
         {
         newPts->SetDataType(inputPointSet->GetPoints()->GetDataType());
         }
+      else
+        {
+        newPts->SetDataType(VTK_FLOAT);
+        }
       }
     else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
       {
diff --git a/Filters/Core/vtkPolyDataNormals.h b/Filters/Core/vtkPolyDataNormals.h
index 29aa866..2e06cb1 100644
--- a/Filters/Core/vtkPolyDataNormals.h
+++ b/Filters/Core/vtkPolyDataNormals.h
@@ -137,7 +137,7 @@ public:
 
 protected:
   vtkPolyDataNormals();
-  ~vtkPolyDataNormals() {};
+  ~vtkPolyDataNormals() {}
 
   // Usual data generation method
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
diff --git a/Filters/Core/vtkRectilinearSynchronizedTemplates.h b/Filters/Core/vtkRectilinearSynchronizedTemplates.h
index a8e9251..3661556 100644
--- a/Filters/Core/vtkRectilinearSynchronizedTemplates.h
+++ b/Filters/Core/vtkRectilinearSynchronizedTemplates.h
@@ -33,7 +33,6 @@
 #include "vtkContourValues.h" // Passes calls through
 
 class vtkRectilinearGrid;
-class vtkKitwareContourFilter;
 class vtkDataArray;
 
 class VTKFILTERSCORE_EXPORT vtkRectilinearSynchronizedTemplates : public vtkPolyDataAlgorithm
diff --git a/Filters/Core/vtkReverseSense.h b/Filters/Core/vtkReverseSense.h
index c3ab9af..a197dec 100644
--- a/Filters/Core/vtkReverseSense.h
+++ b/Filters/Core/vtkReverseSense.h
@@ -57,7 +57,7 @@ public:
 
 protected:
   vtkReverseSense();
-  ~vtkReverseSense() {};
+  ~vtkReverseSense() {}
 
   // Usual data generation method
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
diff --git a/Filters/Core/vtkSimpleElevationFilter.h b/Filters/Core/vtkSimpleElevationFilter.h
index 52193f1..650b7d8 100644
--- a/Filters/Core/vtkSimpleElevationFilter.h
+++ b/Filters/Core/vtkSimpleElevationFilter.h
@@ -44,7 +44,7 @@ public:
 
 protected:
   vtkSimpleElevationFilter();
-  ~vtkSimpleElevationFilter() {};
+  ~vtkSimpleElevationFilter() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   double Vector[3];
diff --git a/Filters/Core/vtkSmoothPolyDataFilter.cxx b/Filters/Core/vtkSmoothPolyDataFilter.cxx
index ee242ed..faf19f9 100644
--- a/Filters/Core/vtkSmoothPolyDataFilter.cxx
+++ b/Filters/Core/vtkSmoothPolyDataFilter.cxx
@@ -129,6 +129,8 @@ vtkSmoothPolyDataFilter::vtkSmoothPolyDataFilter()
   this->GenerateErrorScalars = 0;
   this->GenerateErrorVectors = 0;
 
+  this->OutputPointsPrecision = vtkAlgorithm::DEFAULT_PRECISION;
+
   // optional second input
   this->SetNumberOfInputPorts(2);
 }
@@ -517,6 +519,21 @@ int vtkSmoothPolyDataFilter::RequestData(
   // We've setup the topology...now perform Laplacian smoothing
   //
   newPts = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION)
+    {
+    newPts->SetDataType(inPts->GetDataType());
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_FLOAT);
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_DOUBLE);
+    }
+
   newPts->SetNumberOfPoints(numPts);
 
   // If Source defined, we do constrained smoothing (that is, points are
@@ -724,4 +741,5 @@ void vtkSmoothPolyDataFilter::PrintSelf(ostream& os, vtkIndent indent)
     os << indent << "Source (none)\n";
     }
 
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision << "\n";
 }
diff --git a/Filters/Core/vtkSmoothPolyDataFilter.h b/Filters/Core/vtkSmoothPolyDataFilter.h
index 0e048a9..c757f95 100644
--- a/Filters/Core/vtkSmoothPolyDataFilter.h
+++ b/Filters/Core/vtkSmoothPolyDataFilter.h
@@ -171,9 +171,16 @@ public:
   void SetSourceData(vtkPolyData *source);
   vtkPolyData *GetSource();
 
+  // Description:
+  // Set/get the desired precision for the output types. See the documentation
+  // for the vtkAlgorithm::DesiredOutputPrecision enum for an explanation of
+  // the available precision settings.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkSmoothPolyDataFilter();
-  ~vtkSmoothPolyDataFilter() {};
+  ~vtkSmoothPolyDataFilter() {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   virtual int FillInputPortInformation(int port, vtkInformation *info);
@@ -187,6 +194,7 @@ protected:
   int BoundarySmoothing;
   int GenerateErrorScalars;
   int GenerateErrorVectors;
+  int OutputPointsPrecision;
 
   vtkSmoothPoints *SmoothPoints;
 private:
diff --git a/Filters/Core/vtkStructuredGridOutlineFilter.h b/Filters/Core/vtkStructuredGridOutlineFilter.h
index 448f9b4..899d1a7 100644
--- a/Filters/Core/vtkStructuredGridOutlineFilter.h
+++ b/Filters/Core/vtkStructuredGridOutlineFilter.h
@@ -31,8 +31,8 @@ public:
   vtkTypeMacro(vtkStructuredGridOutlineFilter,vtkPolyDataAlgorithm);
 
 protected:
-  vtkStructuredGridOutlineFilter() {};
-  ~vtkStructuredGridOutlineFilter() {};
+  vtkStructuredGridOutlineFilter() {}
+  ~vtkStructuredGridOutlineFilter() {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   virtual int FillInputPortInformation(int port, vtkInformation *info);
diff --git a/Filters/Core/vtkSynchronizedTemplates3D.cxx b/Filters/Core/vtkSynchronizedTemplates3D.cxx
index 10bc34e..4cecad6 100644
--- a/Filters/Core/vtkSynchronizedTemplates3D.cxx
+++ b/Filters/Core/vtkSynchronizedTemplates3D.cxx
@@ -21,6 +21,7 @@
 #include "vtkExtentTranslator.h"
 #include "vtkFloatArray.h"
 #include "vtkInformation.h"
+#include "vtkInformationIntegerVectorKey.h"
 #include "vtkInformationVector.h"
 #include "vtkIntArray.h"
 #include "vtkLongArray.h"
@@ -42,6 +43,8 @@
 
 vtkStandardNewMacro(vtkSynchronizedTemplates3D);
 
+vtkInformationKeyRestrictedMacro(vtkSynchronizedTemplates3D, EXECUTE_EXTENT, IntegerVector, 6);
+
 //----------------------------------------------------------------------------
 // Description:
 // Construct object with initial scalar range (0,1) and single contour value
@@ -54,10 +57,6 @@ vtkSynchronizedTemplates3D::vtkSynchronizedTemplates3D()
   this->ComputeScalars = 1;
   this->GenerateTriangles = 1;
 
-  this->ExecuteExtent[0] = this->ExecuteExtent[1]
-    = this->ExecuteExtent[2] = this->ExecuteExtent[3]
-    = this->ExecuteExtent[4] = this->ExecuteExtent[5] = 0;
-
   this->ArrayComponent = 0;
 
   // by default process active point scalars
@@ -85,7 +84,7 @@ unsigned long vtkSynchronizedTemplates3D::GetMTime()
 
 
 //----------------------------------------------------------------------------
-void vtkSynchronizedTemplates3DInitializeOutput(
+static void vtkSynchronizedTemplates3DInitializeOutput(
   int *ext,vtkImageData *input,
   vtkPolyData *o, vtkFloatArray *scalars, vtkFloatArray *normals,
   vtkFloatArray *gradients, vtkDataArray *inScalars)
@@ -691,7 +690,7 @@ unsigned long vtkSynchronizedTemplates3D::GetInputMemoryLimit()
 void vtkSynchronizedTemplates3D::ThreadedExecute(vtkImageData *data,
                                                  vtkInformation *inInfo,
                                                  vtkInformation *outInfo,
-                                                 int *exExt, vtkDataArray *inScalars)
+                                                 vtkDataArray *inScalars)
 {
   void *ptr;
   vtkPolyData *output;
@@ -700,6 +699,7 @@ void vtkSynchronizedTemplates3D::ThreadedExecute(vtkImageData *data,
 
   output = vtkPolyData::SafeDownCast(outInfo->Get(vtkDataObject::DATA_OBJECT()));
 
+  int* exExt = outInfo->Get(vtkSynchronizedTemplates3D::EXECUTE_EXTENT());
   if ( exExt[0] >= exExt[1] || exExt[2] >= exExt[3] || exExt[4] >= exExt[5] )
     {
     vtkDebugMacro(<<"3D structured contours requires 3D data");
@@ -754,7 +754,7 @@ int vtkSynchronizedTemplates3D::RequestData(
   vtkDataArray *inScalars = this->GetInputArrayToProcess(0,inputVector);
 
   // Just call the threaded execute directly.
-  this->ThreadedExecute(input, inInfo, outInfo, this->ExecuteExtent, inScalars);
+  this->ThreadedExecute(input, inInfo, outInfo, inScalars);
 
   output->Squeeze();
 
@@ -780,6 +780,12 @@ int vtkSynchronizedTemplates3D::RequestUpdateExtent(
   wholeExt =
     inInfo->Get(vtkStreamingDemandDrivenPipeline::WHOLE_EXTENT());
 
+  if (!wholeExt)
+    {
+    return 1;
+    }
+
+
   // Get request from output
   piece =
     outInfo->Get(vtkStreamingDemandDrivenPipeline::UPDATE_PIECE_NUMBER());
@@ -813,13 +819,7 @@ int vtkSynchronizedTemplates3D::RequestUpdateExtent(
   // This is the region that we are really updating, although
   // we may require a larger input region in order to generate
   // it if normals / gradients are being computed
-
-  this->ExecuteExtent[0] = ext[0];
-  this->ExecuteExtent[1] = ext[1];
-  this->ExecuteExtent[2] = ext[2];
-  this->ExecuteExtent[3] = ext[3];
-  this->ExecuteExtent[4] = ext[4];
-  this->ExecuteExtent[5] = ext[5];
+  outInfo->Set(vtkSynchronizedTemplates3D::EXECUTE_EXTENT(), ext, 6);
 
   // expand if we need to compute gradients
   if (this->ComputeGradients || this->ComputeNormals)
diff --git a/Filters/Core/vtkSynchronizedTemplates3D.h b/Filters/Core/vtkSynchronizedTemplates3D.h
index f65de40..290b4dc 100644
--- a/Filters/Core/vtkSynchronizedTemplates3D.h
+++ b/Filters/Core/vtkSynchronizedTemplates3D.h
@@ -124,12 +124,9 @@ public:
   void GenerateValues(int numContours, double rangeStart, double rangeEnd)
     {this->ContourValues->GenerateValues(numContours, rangeStart, rangeEnd);}
 
-  // Description:
-  // Needed by templated functions.
-  int *GetExecuteExtent() {return this->ExecuteExtent;}
   void ThreadedExecute(vtkImageData *data, vtkInformation *inInfo,
                        vtkInformation *outInfo,
-                       int *exExt, vtkDataArray *inScalars);
+                       vtkDataArray *inScalars);
 
   // Description:
   // Determines the chunk size fro streaming.  This filter will act like a
@@ -156,11 +153,12 @@ protected:
   virtual int RequestUpdateExtent(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   virtual int FillInputPortInformation(int port, vtkInformation *info);
 
-  int ExecuteExtent[6];
-
   int ArrayComponent;
 
   int GenerateTriangles;
+
+  static vtkInformationIntegerVectorKey* EXECUTE_EXTENT();
+
 private:
   vtkSynchronizedTemplates3D(const vtkSynchronizedTemplates3D&);  // Not implemented.
   void operator=(const vtkSynchronizedTemplates3D&);  // Not implemented.
diff --git a/Filters/Core/vtkSynchronizedTemplatesCutter3D.cxx b/Filters/Core/vtkSynchronizedTemplatesCutter3D.cxx
index 028abbe..64e7989 100644
--- a/Filters/Core/vtkSynchronizedTemplatesCutter3D.cxx
+++ b/Filters/Core/vtkSynchronizedTemplatesCutter3D.cxx
@@ -62,7 +62,7 @@ vtkSynchronizedTemplatesCutter3D::~vtkSynchronizedTemplatesCutter3D()
 }
 
 //----------------------------------------------------------------------------
-void vtkSynchronizedTemplatesCutter3DInitializeOutput(
+static void vtkSynchronizedTemplatesCutter3DInitializeOutput(
   int *ext, int precision, vtkImageData *input, vtkPolyData *o)
 {
   vtkPoints *newPts;
@@ -85,6 +85,10 @@ void vtkSynchronizedTemplatesCutter3DInitializeOutput(
       {
       newPts->SetDataType(inputPointSet->GetPoints()->GetDataType());
       }
+    else
+      {
+      newPts->SetDataType(VTK_FLOAT);
+      }
     }
   else if(precision == vtkAlgorithm::SINGLE_PRECISION)
     {
@@ -536,8 +540,8 @@ void ContourImage(vtkSynchronizedTemplatesCutter3D *self, int *exExt,
 // Contouring filter specialized for images (or slices from images)
 //
 void vtkSynchronizedTemplatesCutter3D::ThreadedExecute(vtkImageData *data,
-                                                 vtkInformation *outInfo,
-                                                 int *exExt, int)
+                                                       vtkInformation *outInfo,
+                                                       int)
 {
   vtkPolyData *output;
 
@@ -545,6 +549,7 @@ void vtkSynchronizedTemplatesCutter3D::ThreadedExecute(vtkImageData *data,
 
   output = vtkPolyData::SafeDownCast(outInfo->Get(vtkDataObject::DATA_OBJECT()));
 
+  int* exExt = outInfo->Get(vtkSynchronizedTemplates3D::EXECUTE_EXTENT());
   if ( exExt[0] >= exExt[1] || exExt[2] >= exExt[3] || exExt[4] >= exExt[5] )
     {
     vtkDebugMacro(<<"Cutter3D structured contours requires Cutter3D data");
@@ -576,7 +581,7 @@ int vtkSynchronizedTemplatesCutter3D::RequestData(
   this->RequestUpdateExtent(request,inputVector,outputVector);
 
   // Just call the threaded execute directly.
-  this->ThreadedExecute(input, outInfo, this->ExecuteExtent, 0);
+  this->ThreadedExecute(input, outInfo, 0);
 
   output->Squeeze();
 
diff --git a/Filters/Core/vtkSynchronizedTemplatesCutter3D.h b/Filters/Core/vtkSynchronizedTemplatesCutter3D.h
index 257717a..ce86a42 100644
--- a/Filters/Core/vtkSynchronizedTemplatesCutter3D.h
+++ b/Filters/Core/vtkSynchronizedTemplatesCutter3D.h
@@ -40,8 +40,7 @@ public:
 
   // Description:
   // Needed by templated functions.
-  void ThreadedExecute(vtkImageData *data, vtkInformation *outInfo,
-                       int *exExt, int);
+  void ThreadedExecute(vtkImageData *data, vtkInformation *outInfo, int);
 
   // Description
   // Specify the implicit function to perform the cutting.
diff --git a/Filters/Core/vtkThreshold.cxx b/Filters/Core/vtkThreshold.cxx
index 17ac022..083c3d2 100644
--- a/Filters/Core/vtkThreshold.cxx
+++ b/Filters/Core/vtkThreshold.cxx
@@ -25,6 +25,8 @@
 #include "vtkStreamingDemandDrivenPipeline.h"
 #include "vtkMath.h"
 
+#include <algorithm>
+
 vtkStandardNewMacro(vtkThreshold);
 
 // Construct with lower threshold=0, upper threshold=1, and threshold
@@ -147,10 +149,14 @@ int vtkThreshold::RequestData(
   if(this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION)
     {
     vtkPointSet *inputPointSet = vtkPointSet::SafeDownCast(input);
-    if(inputPointSet)
+    if(inputPointSet && inputPointSet->GetPoints())
       {
       newPoints->SetDataType(inputPointSet->GetPoints()->GetDataType());
       }
+    else
+      {
+      newPoints->SetDataType(VTK_FLOAT);
+      }
     }
   else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
     {
diff --git a/Filters/Core/vtkThresholdPoints.cxx b/Filters/Core/vtkThresholdPoints.cxx
index c96a03a..f81abf0 100644
--- a/Filters/Core/vtkThresholdPoints.cxx
+++ b/Filters/Core/vtkThresholdPoints.cxx
@@ -32,6 +32,7 @@ vtkThresholdPoints::vtkThresholdPoints()
 {
   this->LowerThreshold = 0.0;
   this->UpperThreshold = 1.0;
+  this->OutputPointsPrecision = DEFAULT_PRECISION;
 
   this->ThresholdFunction = &vtkThresholdPoints::Upper;
 
@@ -147,7 +148,37 @@ int vtkThresholdPoints::RequestData(
     }
 
   numPts = input->GetNumberOfPoints();
+
+  if(numPts < 1)
+    {
+    vtkErrorMacro(<<"No points to threshold");
+    return 1;
+    }
+
   newPoints = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION)
+    {
+    vtkPointSet *inputPointSet = vtkPointSet::SafeDownCast(input);
+    if(inputPointSet)
+      {
+      newPoints->SetDataType(inputPointSet->GetPoints()->GetDataType());
+      }
+    else
+      {
+      newPoints->SetDataType(VTK_FLOAT);
+      }
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
+    {
+    newPoints->SetDataType(VTK_FLOAT);
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPoints->SetDataType(VTK_DOUBLE);
+    }
+
   newPoints->Allocate(numPts);
   pd = input->GetPointData();
   outPD = output->GetPointData();
@@ -203,4 +234,5 @@ void vtkThresholdPoints::PrintSelf(ostream& os, vtkIndent indent)
 
   os << indent << "Lower Threshold: " << this->LowerThreshold << "\n";;
   os << indent << "Upper Threshold: " << this->UpperThreshold << "\n";;
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision << "\n";
 }
diff --git a/Filters/Core/vtkThresholdPoints.h b/Filters/Core/vtkThresholdPoints.h
index 431c9ad..2084517 100644
--- a/Filters/Core/vtkThresholdPoints.h
+++ b/Filters/Core/vtkThresholdPoints.h
@@ -57,9 +57,16 @@ public:
   vtkSetMacro(LowerThreshold,double);
   vtkGetMacro(LowerThreshold,double);
 
+  // Description:
+  // Set/get the desired precision for the output types. See the documentation
+  // for the vtkAlgorithm::DesiredOutputPrecision enum for an explanation of
+  // the available precision settings.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkThresholdPoints();
-  ~vtkThresholdPoints() {};
+  ~vtkThresholdPoints() {}
 
   // Usual data generation method
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
@@ -68,6 +75,7 @@ protected:
 
   double LowerThreshold;
   double UpperThreshold;
+  int OutputPointsPrecision;
 
   //BTX
   int (vtkThresholdPoints::*ThresholdFunction)(double s);
diff --git a/Filters/Core/vtkTransposeTable.cxx b/Filters/Core/vtkTransposeTable.cxx
new file mode 100644
index 0000000..a35fb7f
--- /dev/null
+++ b/Filters/Core/vtkTransposeTable.cxx
@@ -0,0 +1,313 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkTransposeTable.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkTransposeTable.h"
+
+#include "vtkAbstractArray.h"
+#include "vtkCharArray.h"
+#include "vtkDoubleArray.h"
+#include "vtkFloatArray.h"
+#include "vtkIdTypeArray.h"
+#include "vtkInformation.h"
+#include "vtkInformationVector.h"
+#include "vtkIntArray.h"
+#include "vtkLongArray.h"
+#include "vtkLongLongArray.h"
+#include "vtkNew.h"
+#include "vtkObjectFactory.h"
+#include "vtkSignedCharArray.h"
+#include "vtkShortArray.h"
+#include "vtkSmartPointer.h"
+#include "vtkStringArray.h"
+#include "vtkTable.h"
+#include "vtkUnsignedCharArray.h"
+#include "vtkUnsignedIntArray.h"
+#include "vtkUnsignedLongArray.h"
+#include "vtkUnsignedLongLongArray.h"
+#include "vtkUnsignedShortArray.h"
+#include "vtkVariantArray.h"
+
+#include <sstream>
+
+///////////////////////////////////////////////////////////////////////////////
+
+class vtkTransposeTableInternal
+{
+public:
+  vtkTransposeTableInternal(vtkTransposeTable* parent) : Parent(parent) {}
+
+  bool TransposeTable(vtkTable* inTable, vtkTable* outTable);
+
+protected:
+
+  bool InsertColumn(int, vtkAbstractArray*);
+
+  template<typename ArrayType, typename ValueType>
+  bool TransposeColumn(int, bool);
+
+  vtkTransposeTable* Parent;
+  vtkTable* InTable;
+  vtkTable* OutTable;
+};
+
+//----------------------------------------------------------------------------
+
+template<typename ArrayType, typename ValueType>
+bool vtkTransposeTableInternal::TransposeColumn(int columnId, bool useVariant)
+{
+  vtkAbstractArray* column = this->InTable->GetColumn(columnId);
+  ArrayType* typeColumn = ArrayType::SafeDownCast(column);
+  if (!typeColumn && !useVariant)
+    {
+    return false;
+    }
+
+  int numberOfRowsInTransposedColumn = this->InTable->GetNumberOfColumns();
+  if (this->Parent->GetUseIdColumn())
+    {
+    columnId--;
+    numberOfRowsInTransposedColumn--;
+    }
+
+  for (int r = 0; r < column->GetNumberOfTuples() *
+    column->GetNumberOfComponents(); ++r)
+    {
+    vtkSmartPointer<ArrayType> transposedColumn;
+    if (columnId == 0)
+      {
+      transposedColumn = vtkSmartPointer<ArrayType>::New();
+      transposedColumn->SetNumberOfValues(numberOfRowsInTransposedColumn);
+      this->OutTable->AddColumn(transposedColumn);
+      }
+    else
+      {
+      transposedColumn = ArrayType::SafeDownCast(this->OutTable->GetColumn(r));
+      }
+
+    if (!useVariant)
+      {
+      ValueType value = typeColumn->GetValue(r);
+      transposedColumn->SetValue(columnId, value);
+      }
+    else
+      {
+      vtkVariant value = column->GetVariantValue(r);
+      transposedColumn->SetVariantValue(columnId, value);
+      }
+    }
+  return true;
+}
+
+//----------------------------------------------------------------------------
+bool vtkTransposeTableInternal::InsertColumn(int pos, vtkAbstractArray* col)
+{
+  if (!col || ((this->OutTable->GetNumberOfRows() !=
+    col->GetNumberOfComponents() * col->GetNumberOfTuples()) &&
+    (this->OutTable->GetNumberOfRows() != 0)))
+    {
+    return false;
+    }
+
+  int nbColsOutTable = this->OutTable->GetNumberOfColumns();
+
+  vtkNew<vtkTable> updatedTable;
+  for (int c = 0; c < nbColsOutTable; c++)
+    {
+    vtkAbstractArray* column = this->OutTable->GetColumn(c);
+    if (c == pos)
+      {
+      updatedTable->AddColumn(col);
+      }
+    updatedTable->AddColumn(column);
+    }
+  if (pos == nbColsOutTable)
+    {
+    updatedTable->AddColumn(col);
+    }
+
+  this->OutTable->ShallowCopy(updatedTable.GetPointer());
+
+  return true;
+}
+
+//----------------------------------------------------------------------------
+bool vtkTransposeTableInternal::TransposeTable(vtkTable* inTable,
+                                               vtkTable* outTable)
+{
+  this->InTable = inTable;
+  this->OutTable = outTable;
+
+  int idColOffset = this->Parent->GetUseIdColumn() ? 1 : 0;
+
+  // Check column type consistency
+  bool useVariant = false;
+  vtkAbstractArray* firstCol = this->InTable->GetColumn(idColOffset);
+  for (int c = idColOffset; c < this->InTable->GetNumberOfColumns(); c++)
+    {
+    if (strcmp(firstCol->GetClassName(),
+      this->InTable->GetColumn(c)->GetClassName()) != 0)
+      {
+      useVariant = true;
+      break;
+      }
+    }
+  for (int c = idColOffset; c < this->InTable->GetNumberOfColumns(); c++)
+    {
+    vtkAbstractArray* column = this->InTable->GetColumn(c);
+    if (!column)
+      {
+      return false;
+      }
+    if (!useVariant)
+      {
+#define TransposeTypedColumn(_vt, _ta, _t) \
+  case _vt:\
+    if (!this->TransposeColumn<_ta, _t>(c, useVariant))\
+      {\
+      vtkErrorWithObjectMacro(this->Parent, <<\
+        "Unable to transpose column " << c);\
+        return false;\
+      }\
+    break;
+
+      switch (column->GetDataType())
+        {
+        TransposeTypedColumn(VTK_DOUBLE, vtkDoubleArray,
+          double);
+        TransposeTypedColumn(VTK_FLOAT, vtkFloatArray,
+          float);
+        TransposeTypedColumn(VTK_CHAR, vtkCharArray,
+          char);
+        TransposeTypedColumn(VTK_SIGNED_CHAR, vtkSignedCharArray,
+          signed char);
+        TransposeTypedColumn(VTK_SHORT, vtkShortArray,
+          short);
+        TransposeTypedColumn(VTK_INT, vtkIntArray,
+          int);
+        TransposeTypedColumn(VTK_LONG, vtkLongArray,
+          long);
+        TransposeTypedColumn(VTK_LONG_LONG, vtkLongLongArray,
+          long long);
+        TransposeTypedColumn(VTK_UNSIGNED_CHAR, vtkUnsignedCharArray,
+          unsigned char);
+        TransposeTypedColumn(VTK_UNSIGNED_SHORT, vtkUnsignedShortArray,
+          unsigned short);
+        TransposeTypedColumn(VTK_UNSIGNED_INT, vtkUnsignedIntArray,
+          unsigned int);
+        TransposeTypedColumn(VTK_UNSIGNED_LONG, vtkUnsignedLongArray,
+          unsigned long);
+        TransposeTypedColumn(VTK_UNSIGNED_LONG_LONG, vtkUnsignedLongLongArray,
+          unsigned long long);
+        TransposeTypedColumn(VTK_ID_TYPE, vtkIdTypeArray,
+          vtkIdType);
+        TransposeTypedColumn(VTK_STRING, vtkStringArray,
+          vtkStdString);
+#undef TransposeTypedColumn
+        default:
+          useVariant = true;
+          break;
+        }
+      }
+    if (useVariant)
+      {
+      if (!this->TransposeColumn<vtkVariantArray, vtkVariant>(c, useVariant))
+        {
+        vtkErrorWithObjectMacro(this->Parent, << "Unable to transpose column " << c);
+        return false;
+        }
+      }
+    }
+
+  // Set id column on transposed table
+  firstCol = this->InTable->GetColumn(0);
+  for (int r = 0; r < firstCol->GetNumberOfComponents() *
+    firstCol->GetNumberOfTuples(); r++)
+    {
+    vtkAbstractArray* destColumn = this->OutTable->GetColumn(r);
+    if (this->Parent->GetUseIdColumn())
+      {
+      destColumn->SetName(firstCol->GetVariantValue(r).ToString());
+      }
+    else
+      {
+      std::stringstream ss;
+      ss << r;
+      destColumn->SetName(ss.str().c_str());
+      }
+    }
+
+  // Create and insert the id column
+  if (this->Parent->GetAddIdColumn())
+    {
+    vtkNew<vtkStringArray> stringArray;
+    stringArray->SetName(this->Parent->GetUseIdColumn() ?
+      this->InTable->GetColumn(0)->GetName() : this->Parent->GetIdColumnName());
+    stringArray->SetNumberOfValues(
+      this->InTable->GetNumberOfColumns() - idColOffset);
+    for (int c = idColOffset; c < this->InTable->GetNumberOfColumns(); ++c)
+      {
+      stringArray->SetValue(c - idColOffset, this->InTable->GetColumn(c)->GetName());
+      }
+    this->InsertColumn(0, stringArray.GetPointer());
+    }
+
+  return true;
+}
+
+//----------------------------------------------------------------------------
+vtkStandardNewMacro(vtkTransposeTable);
+
+//----------------------------------------------------------------------------
+vtkTransposeTable::vtkTransposeTable()
+{
+  this->AddIdColumn = true;
+  this->UseIdColumn = false;
+  this->IdColumnName = 0;
+  this->SetIdColumnName("ColName");
+}
+
+//----------------------------------------------------------------------------
+vtkTransposeTable::~vtkTransposeTable()
+{
+  if (this->IdColumnName)
+    {
+    delete [] IdColumnName;
+    }
+}
+
+//----------------------------------------------------------------------------
+void vtkTransposeTable::PrintSelf(ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+}
+
+//----------------------------------------------------------------------------
+int vtkTransposeTable::RequestData(vtkInformation*,
+                                   vtkInformationVector** inputVector,
+                                   vtkInformationVector* outputVector)
+{
+  vtkTable* inTable = vtkTable::GetData(inputVector[0]);
+  vtkTable* outTable = vtkTable::GetData(outputVector, 0);
+
+  if (inTable->GetNumberOfColumns() == 0)
+    {
+    vtkErrorMacro(<<
+      "vtkTransposeTable requires vtkTable containing at least one column.");
+    return 0;
+    }
+
+  vtkTransposeTableInternal intern(this);
+  return intern.TransposeTable(inTable, outTable) ? 1 : 0;
+}
diff --git a/Filters/Core/vtkTransposeTable.h b/Filters/Core/vtkTransposeTable.h
new file mode 100644
index 0000000..bced963
--- /dev/null
+++ b/Filters/Core/vtkTransposeTable.h
@@ -0,0 +1,79 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkTransposeTable.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+// .NAME vtkTransposeTable - Transpose an input table.
+//
+// .SECTION Description
+// This algorithm allows to transpose a vtkTable as a matrix.
+// Columns become rows and vice versa. A new column can be added to
+// the result table at index 0 to collect the name of the initial
+// columns (when AddIdColumn is true). Such a column can be used
+// to name the columns of the result.
+// Note that columns of the output table will have a variant type
+// is the columns of the initial table are not consistant.
+
+#ifndef __vtkTransposeTable_h
+#define __vtkTransposeTable_h
+
+#include "vtkFiltersCoreModule.h" // For export macro
+#include "vtkTableAlgorithm.h"
+
+class VTKFILTERSCORE_EXPORT vtkTransposeTable : public vtkTableAlgorithm
+{
+public:
+  static vtkTransposeTable* New();
+  vtkTypeMacro(vtkTransposeTable, vtkTableAlgorithm);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // This flag indicates if a column must be inserted at index 0
+  // with the names (ids) of the input columns.
+  // Default: true
+  vtkGetMacro(AddIdColumn, bool);
+  vtkSetMacro(AddIdColumn, bool);
+  vtkBooleanMacro(AddIdColumn, bool);
+
+  // Description:
+  // This flag indicates if the output column must be named using the
+  // names listed in the index 0 column.
+  // Default: false
+  vtkGetMacro(UseIdColumn, bool);
+  vtkSetMacro(UseIdColumn, bool);
+  vtkBooleanMacro(UseIdColumn, bool);
+
+  // Description:
+  // Get/Set the name of the id column added by option AddIdColumn.
+  // Default: ColName
+  vtkGetStringMacro(IdColumnName);
+  vtkSetStringMacro(IdColumnName);
+
+protected:
+  vtkTransposeTable();
+  ~vtkTransposeTable();
+
+  int RequestData(vtkInformation*,
+    vtkInformationVector**,
+    vtkInformationVector*);
+
+  bool AddIdColumn;
+  bool UseIdColumn;
+  char* IdColumnName;
+
+private:
+  vtkTransposeTable(const vtkTransposeTable&); // Not implemented
+  void operator=(const vtkTransposeTable&);   // Not implemented
+};
+
+#endif
diff --git a/Filters/Core/vtkTriangleFilter.h b/Filters/Core/vtkTriangleFilter.h
index 9636a32..ddea797 100644
--- a/Filters/Core/vtkTriangleFilter.h
+++ b/Filters/Core/vtkTriangleFilter.h
@@ -51,8 +51,8 @@ public:
   vtkGetMacro(PassLines,int);
 
 protected:
-  vtkTriangleFilter() : PassVerts(1), PassLines(1) {};
-  ~vtkTriangleFilter() {};
+  vtkTriangleFilter() : PassVerts(1), PassLines(1) {}
+  ~vtkTriangleFilter() {}
 
   // Usual data generation method
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
diff --git a/Filters/Core/vtkTubeFilter.cxx b/Filters/Core/vtkTubeFilter.cxx
index d265ab1..0bf2d3d 100644
--- a/Filters/Core/vtkTubeFilter.cxx
+++ b/Filters/Core/vtkTubeFilter.cxx
@@ -48,6 +48,8 @@ vtkTubeFilter::vtkTubeFilter()
   this->GenerateTCoords = VTK_TCOORDS_OFF;
   this->TextureLength = 1.0;
 
+  this->OutputPointsPrecision = vtkAlgorithm::DEFAULT_PRECISION;
+
   // by default process active point scalars
   this->SetInputArrayToProcess(0,0,0,vtkDataObject::FIELD_ASSOCIATION_POINTS,
                                vtkDataSetAttributes::SCALARS);
@@ -113,6 +115,21 @@ int vtkTubeFilter::RequestData(
   // Create the geometry and topology
   numNewPts = numPts * this->NumberOfSides;
   newPts = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION)
+    {
+    newPts->SetDataType(inPts->GetDataType());
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_FLOAT);
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_DOUBLE);
+    }
+
   newPts->Allocate(numNewPts);
   newNormals = vtkFloatArray::New();
   newNormals->SetName("TubeNormals");
@@ -199,7 +216,9 @@ int vtkTubeFilter::RequestData(
   //
   this->Theta = 2.0*vtkMath::Pi() / this->NumberOfSides;
   vtkPolyLine *lineNormalGenerator = vtkPolyLine::New();
-  for (inCellId=0, inLines->InitTraversal();
+  // the line cellIds start after the last vert cellId
+  inCellId = input->GetNumberOfVerts();
+  for (inLines->InitTraversal();
        inLines->GetNextCell(npts,pts) && !abort; inCellId++)
     {
     this->UpdateProgress((double)inCellId/numLines);
@@ -692,7 +711,7 @@ void vtkTubeFilter::GenerateTextureCoords(vtkIdType offset,
       tc = len / length;
       for ( k=0; k < numSides; k++)
         {
-        newTCoords->InsertTuple2(offset+i*2+k,tc,0.0);
+        newTCoords->InsertTuple2(offset+i*numSides+k,tc,0.0);
         }
       xPrev[0]=x[0]; xPrev[1]=x[1]; xPrev[2]=x[2];
       }
@@ -804,4 +823,6 @@ void vtkTubeFilter::PrintSelf(ostream& os, vtkIndent indent)
   os << indent << "Generate TCoords: "
      << this->GetGenerateTCoordsAsString() << endl;
   os << indent << "Texture Length: " << this->TextureLength << endl;
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << endl;
 }
diff --git a/Filters/Core/vtkTubeFilter.h b/Filters/Core/vtkTubeFilter.h
index 6207450..4716ab1 100644
--- a/Filters/Core/vtkTubeFilter.h
+++ b/Filters/Core/vtkTubeFilter.h
@@ -178,6 +178,13 @@ public:
   vtkSetClampMacro(TextureLength,double,0.000001,VTK_INT_MAX);
   vtkGetMacro(TextureLength,double);
 
+  // Description:
+  // Set/get the desired precision for the output types. See the documentation
+  // for the vtkAlgorithm::DesiredOutputPrecision enum for an explanation of
+  // the available precision settings.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkTubeFilter();
   ~vtkTubeFilter() {}
@@ -196,6 +203,7 @@ protected:
   int OnRatio; //control the generation of the sides of the tube
   int Offset;  //control the generation of the sides
   int GenerateTCoords; //control texture coordinate generation
+  int OutputPointsPrecision;
   double TextureLength; //this length is mapped to [0,1) texture space
 
   // Helper methods
diff --git a/Filters/Core/vtkVectorDot.h b/Filters/Core/vtkVectorDot.h
index 606f536..3f8b361 100644
--- a/Filters/Core/vtkVectorDot.h
+++ b/Filters/Core/vtkVectorDot.h
@@ -46,7 +46,7 @@ public:
 
 protected:
   vtkVectorDot();
-  ~vtkVectorDot() {};
+  ~vtkVectorDot() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   double ScalarRange[2];
diff --git a/Filters/Core/vtkVectorNorm.h b/Filters/Core/vtkVectorNorm.h
index 8f0860a..c1499dc 100644
--- a/Filters/Core/vtkVectorNorm.h
+++ b/Filters/Core/vtkVectorNorm.h
@@ -70,7 +70,7 @@ public:
 
 protected:
   vtkVectorNorm();
-  ~vtkVectorNorm() {};
+  ~vtkVectorNorm() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
diff --git a/Filters/Core/vtkWindowedSincPolyDataFilter.h b/Filters/Core/vtkWindowedSincPolyDataFilter.h
index a08b7d4..5b2cfe0 100644
--- a/Filters/Core/vtkWindowedSincPolyDataFilter.h
+++ b/Filters/Core/vtkWindowedSincPolyDataFilter.h
@@ -220,7 +220,7 @@ public:
 
  protected:
   vtkWindowedSincPolyDataFilter();
-  ~vtkWindowedSincPolyDataFilter() {};
+  ~vtkWindowedSincPolyDataFilter() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
diff --git a/Filters/Cosmo/CMakeLists.txt b/Filters/Cosmo/CMakeLists.txt
deleted file mode 100644
index 4d057ac..0000000
--- a/Filters/Cosmo/CMakeLists.txt
+++ /dev/null
@@ -1,14 +0,0 @@
-if(VTK_COSMOS_USE_MPI)
-  find_package(MPI REQUIRED)
-  include_directories(${MPI_INCLUDE_PATH})
-  add_definitionS("-DMPICH_IGNORE_CXX_SEEK")
-else()
-  add_definitions(-DUSE_SERIAL_COSMO)
-endif()
-
-set(Module_SRCS
-  vtkPCosmoReader.cxx
-  vtkPCosmoHaloFinder.cxx
-  )
-
-vtk_module_library(vtkFiltersCosmo ${Module_SRCS})
diff --git a/Filters/Cosmo/module.cmake b/Filters/Cosmo/module.cmake
deleted file mode 100644
index 426d619..0000000
--- a/Filters/Cosmo/module.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-vtk_module(vtkFiltersCosmo
-  DEPENDS
-    vtkCommonExecutionModel
-    vtkParallelCore
-    vtkCosmo
-  )
diff --git a/Filters/Cosmo/vtkPCosmoHaloFinder.cxx b/Filters/Cosmo/vtkPCosmoHaloFinder.cxx
deleted file mode 100644
index 03d1026..0000000
--- a/Filters/Cosmo/vtkPCosmoHaloFinder.cxx
+++ /dev/null
@@ -1,1119 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkPCosmoHaloFinder.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*=========================================================================
-
-  Program:   VTK/ParaView Los Alamos National Laboratory Modules (PVLANL)
-  Module:    vtkPCosmoHaloFinder.cxx
-
-Copyright (c) 2007, 2009, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007, 2009. Los Alamos National Security, LLC.
-This software was produced under U.S. Government contract DE-AC52-06NA25396
-for Los Alamos National Laboratory (LANL), which is operated by
-Los Alamos National Security, LLC for the U.S. Department of Energy.
-The U.S. Government has rights to use, reproduce, and distribute this software.
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.
-If software is modified to produce derivative works, such modified software
-should be clearly marked, so as not to confuse it with the version available
-from LANL.
-
-Additionally, redistribution and use in source and binary forms, with or
-without modification, are permitted provided that the following conditions
-are met:
--   Redistributions of source code must retain the above copyright notice,
-    this list of conditions and the following disclaimer.
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution.
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software
-    without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-=========================================================================*/
-
-#ifndef USE_VTK_COSMO
-#define USE_VTK_COSMO
-#endif
-
-#include "vtkPCosmoHaloFinder.h"
-
-#include "vtkPointData.h"
-#include "vtkPoints.h"
-#include "vtkFloatArray.h"
-#include "vtkIntArray.h"
-#include "vtkUnsignedCharArray.h"
-#include "vtkUnstructuredGrid.h"
-#include "vtkInformation.h"
-#include "vtkInformationVector.h"
-#include "vtkObjectFactory.h"
-#include "vtkMultiProcessController.h"
-#include "vtkSmartPointer.h"
-#include "vtkDummyController.h"
-#include "vtkStreamingDemandDrivenPipeline.h"
-#include "vtkDemandDrivenPipeline.h"
-
-#include "CosmoHaloFinderP.h"
-#include "CosmoDefinition.h"
-#include "FOFHaloProperties.h"
-#include "HaloCenterFinder.h"
-#include "Partition.h"
-#include "ChainingMesh.h"
-#include "SODHalo.h"
-
-vtkStandardNewMacro(vtkPCosmoHaloFinder);
-
-/****************************************************************************/
-vtkPCosmoHaloFinder::vtkPCosmoHaloFinder()
-{
-  this->SetNumberOfOutputPorts(2);
-
-  this->Controller = 0;
-  this->SetController(vtkMultiProcessController::GetGlobalController());
-  if(!this->Controller)
-    {
-      this->SetController(vtkSmartPointer<vtkDummyController>::New());
-    }
-
-  this->NP = 256;
-  this->RL = 100;
-  this->Overlap = 5;
-  this->BB = .2;
-  this->PMin = 100;
-  this->CopyHaloDataToParticles = 0;
-
-  this->ComputeMostBoundParticle = 0;
-  this->ComputeMostConnectedParticle = 0;
-
-  this->ComputeSOD = 0;
-  this->SODCenterType = 0;
-
-  this->RhoC = RHO_C;
-  this->SODMass = SOD_MASS;
-  this->MinRadiusFactor = MIN_RADIUS_FACTOR;
-  this->MaxRadiusFactor = MAX_RADIUS_FACTOR;
-  this->SODBins = NUM_SOD_BINS;
-  this->MinFOFSize = MIN_SOD_SIZE;
-  this->MinFOFMass = MIN_SOD_MASS;
-}
-
-/****************************************************************************/
-vtkPCosmoHaloFinder::~vtkPCosmoHaloFinder()
-{
-  this->SetController(0);
-}
-
-/****************************************************************************/
-
-void vtkPCosmoHaloFinder::PrintSelf(ostream& os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os,indent);
-
-  if (this->Controller)
-    {
-    os << indent << "Controller: " << this->Controller << endl;
-    }
-  else
-    {
-    os << indent << "Controller: (null)\n";
-    }
-  os << indent << "NP: " << this->NP << endl;
-  os << indent << "rL: " << this->RL << endl;
-  os << indent << "Overlap: " << this->Overlap << endl;
-  os << indent << "bb: " << this->BB << endl;
-  os << indent << "pmin: " << this->PMin << endl;
-  os << indent << "CopyHaloDataToParticles: " << this->CopyHaloDataToParticles
-     << endl;
-  os << indent << "ComputeMostBoundParticle: " << this->ComputeMostBoundParticle << endl;
-  os << indent << "ComputeMostConnectedParticle: " << this->ComputeMostConnectedParticle
-     << endl;
-  os << indent << "ComputeSOD: " << this->ComputeSOD << endl;
-  os << indent << "SODCenterType: " << this->SODCenterType << endl;
-
-  os << indent << "RhoC: " << this->RhoC << endl;
-  os << indent << "SODMass: " << this->SODMass << endl;
-  os << indent << "MinRadiusFactor: " << this->MinRadiusFactor << endl;
-  os << indent << "MaxRadiusFactor: " << this->MaxRadiusFactor << endl;
-  os << indent << "SODBins: " << this->SODBins << endl;
-  os << indent << "MinFOFSize: " << this->MinFOFSize << endl;
-  os << indent << "MinFOFMass: " << this->MinFOFMass << endl;
-}
-
-//----------------------------------------------------------------------------
-void vtkPCosmoHaloFinder::SetController(vtkMultiProcessController *c)
-{
-  if(this->Controller == c)
-    {
-    return;
-    }
-
-  this->Modified();
-
-  if(this->Controller != 0)
-    {
-    this->Controller->UnRegister(this);
-    this->Controller = 0;
-    }
-
-  if(c == 0)
-    {
-    return;
-    }
-
-  this->Controller = c;
-  c->Register(this);
-}
-
-vtkMultiProcessController* vtkPCosmoHaloFinder::GetController()
-{
-  return (vtkMultiProcessController*)this->Controller;
-}
-
-//----------------------------------------------------------------------------
-int vtkPCosmoHaloFinder::RequestInformation
-(vtkInformation* vtkNotUsed(request),
- vtkInformationVector** inputVector,
- vtkInformationVector* outputVector)
-{
-#ifndef USE_SERIAL_COSMO
-  // check for controller
-  if(!this->Controller)
-    {
-    vtkErrorMacro(<< "Unable to work without a Controller.");
-    return 0;
-    }
-#endif
-
-  // set the other outputs to have the same number of pieces
-  if((*inputVector)->GetInformationObject(0)->Has(vtkStreamingDemandDrivenPipeline::MAXIMUM_NUMBER_OF_PIECES()))
-    {
-    if(outputVector->GetInformationObject(1)->Has(vtkStreamingDemandDrivenPipeline::MAXIMUM_NUMBER_OF_PIECES()))
-      {
-      if(outputVector->GetInformationObject(0)->Get
-         (vtkStreamingDemandDrivenPipeline::MAXIMUM_NUMBER_OF_PIECES()) !=
-         outputVector->GetInformationObject(1)->Get
-         (vtkStreamingDemandDrivenPipeline::MAXIMUM_NUMBER_OF_PIECES()))
-        {
-        outputVector->GetInformationObject(1)->Set
-          (vtkStreamingDemandDrivenPipeline::MAXIMUM_NUMBER_OF_PIECES(),
-           outputVector->GetInformationObject(0)->Get
-           (vtkStreamingDemandDrivenPipeline::MAXIMUM_NUMBER_OF_PIECES()));
-        }
-      }
-    else
-      {
-      outputVector->GetInformationObject(1)->Set
-        (vtkStreamingDemandDrivenPipeline::MAXIMUM_NUMBER_OF_PIECES(),
-         outputVector->GetInformationObject(0)->Get
-         (vtkStreamingDemandDrivenPipeline::MAXIMUM_NUMBER_OF_PIECES()));
-      }
-    }
-
-  return 1;
-}
-
-//----------------------------------------------------------------------------
-int vtkPCosmoHaloFinder::RequestData(
-  vtkInformation* request,
-  vtkInformationVector** inputVector,
-  vtkInformationVector* outputVector)
-{
-  int rno = request->Get(vtkDemandDrivenPipeline::FROM_OUTPUT_PORT());
-
-  // get the info objects
-  vtkInformation* inInfo = (*inputVector)->GetInformationObject(0);
-  vtkInformation* outInfo = outputVector->GetInformationObject(0);
-  vtkInformation* catInfo = outputVector->GetInformationObject(1);
-
-  // get the input and output
-  vtkUnstructuredGrid* input = vtkUnstructuredGrid::SafeDownCast
-    (inInfo->Get(vtkDataObject::DATA_OBJECT()));
-
-  vtkUnstructuredGrid* output = vtkUnstructuredGrid::SafeDownCast
-    (outInfo->Get(vtkDataObject::DATA_OBJECT()));
-
-  vtkUnstructuredGrid* catalog = vtkUnstructuredGrid::SafeDownCast
-    (catInfo->Get(vtkDataObject::DATA_OBJECT()));
-
-  if(!input || !output || !catalog)
-    {
-    return 0;
-    }
-
-  // check that the piece number is correct
-  int updatePiece = 0;
-  int updateTotal = 1;
-  if(rno == 0)
-    {
-    if(outInfo->Has(vtkStreamingDemandDrivenPipeline::UPDATE_PIECE_NUMBER()))
-      {
-      updatePiece = outInfo->
-        Get(vtkStreamingDemandDrivenPipeline::UPDATE_PIECE_NUMBER());
-      }
-    if(outInfo->Has(vtkStreamingDemandDrivenPipeline::UPDATE_NUMBER_OF_PIECES()))
-      {
-      updateTotal = outInfo->
-        Get(vtkStreamingDemandDrivenPipeline::UPDATE_NUMBER_OF_PIECES());
-      }
-    }
-  else if(rno == 1)
-    {
-    if(catInfo->Has(vtkStreamingDemandDrivenPipeline::UPDATE_PIECE_NUMBER()))
-      {
-      updatePiece = catInfo->
-        Get(vtkStreamingDemandDrivenPipeline::UPDATE_PIECE_NUMBER());
-      }
-    if(catInfo->Has(vtkStreamingDemandDrivenPipeline::UPDATE_NUMBER_OF_PIECES()))
-      {
-      updateTotal = catInfo->
-        Get(vtkStreamingDemandDrivenPipeline::UPDATE_NUMBER_OF_PIECES());
-      }
-    }
-
-  if(updatePiece != this->Controller->GetLocalProcessId() ||
-     updateTotal != this->Controller->GetNumberOfProcesses())
-    {
-    vtkErrorMacro(<< "Piece number does not match process number.");
-    return 0;
-    }
-
-  // shallow total point input to output
-  output->ShallowCopy(input);
-
-  // code to short circuit if there are no points
-  if(output->GetNumberOfPoints() < 1)
-    {
-    catalog->Initialize();
-    return 1;
-    }
-
-  // RRU code
-  // Initialize the partitioner which uses MPI Cartesian Topology
-  Partition::initialize();
-
-  // halo finder needs vectors so take the time to turn them into vectors
-  // FIXME: ought to go into the halo finder and put some #ifdefs
-  // so that it can use vtkDataArray as is - ought to do that with the
-  // reader as well, because it doubles the memory requirement currently
-  if(!output->GetPointData()->HasArray("velocity") ||
-     !output->GetPointData()->HasArray("mass") ||
-     !output->GetPointData()->HasArray("tag") ||
-     !output->GetPointData()->HasArray("ghost"))
-    {
-    vtkErrorMacro(<< "The input data does not have one or more of " <<
-                  "the following point arrays: velocity, mass, tag, or ghost.");
-    return 0;
-    }
-
-  vtkPoints* points = output->GetPoints();
-  vtkFloatArray* velocity = vtkFloatArray::SafeDownCast
-    (output->GetPointData()->GetArray("velocity"));
-  vtkFloatArray* pmass = vtkFloatArray::SafeDownCast
-    (output->GetPointData()->GetArray("mass"));
-  vtkIntArray* uid = vtkIntArray::SafeDownCast
-    (output->GetPointData()->GetArray("tag"));
-  vtkIntArray* owner = vtkIntArray::SafeDownCast
-    (output->GetPointData()->GetArray("ghost"));
-
-  if(velocity == 0 || pmass == 0 || uid == 0 || owner == 0 ||
-     velocity->GetNumberOfComponents() != DIMENSION)
-    {
-    vtkErrorMacro(<< "One or more of the input point data arrays is" <<
-                  "malformed: velocity, mass, tag, or ghost.");
-    return 0;
-    }
-
-  // create the empty ones
-  vtkIdType numberOfLocalPoints = output->GetNumberOfPoints();
-  vector<POTENTIAL_T>* potential = new vector<POTENTIAL_T>(numberOfLocalPoints);
-  vector<MASK_T>* mask = new vector<MASK_T>(numberOfLocalPoints);
-
-  // fill in the non empty ones
-  vector<POSVEL_T>* xx = new vector<POSVEL_T>;
-  vector<POSVEL_T>* yy = new vector<POSVEL_T>;
-  vector<POSVEL_T>* zz = new vector<POSVEL_T>;
-  vector<POSVEL_T>* vx = new vector<POSVEL_T>;
-  vector<POSVEL_T>* vy = new vector<POSVEL_T>;
-  vector<POSVEL_T>* vz = new vector<POSVEL_T>;
-  vector<POSVEL_T>* mass = new vector<POSVEL_T>;
-  vector<ID_T>* tag = new vector<ID_T>;
-  vector<STATUS_T>* status = new vector<STATUS_T>;
-
-  for(int i = 0; i < numberOfLocalPoints; i = i + 1)
-    {
-    // get and set the point
-    double pt[DIMENSION];
-
-    points->GetPoint(i, pt);
-    xx->push_back((float)pt[0]);
-    yy->push_back((float)pt[1]);
-    zz->push_back((float)pt[2]);
-
-    // get and set the velocity
-    float vel[DIMENSION];
-
-    velocity->GetTupleValue(i, vel);
-    vx->push_back(vel[0]);
-    vy->push_back(vel[1]);
-    vz->push_back(vel[2]);
-
-    // get and set the mass
-    vel[0] = pmass->GetValue(i);
-    mass->push_back(vel[0]);
-
-    // get and set the tag
-    int particle = uid->GetValue(i);
-    tag->push_back(particle);
-
-    // get and set the status
-    int neighbor = owner->GetValue(i);
-    status->push_back(neighbor);
-    }
-
-  // delete owner/status because it was only needed for halo finding
-  output->GetPointData()->RemoveArray("ghost");
-
-  // Run halo finder
-  // Collect the serial halo finder results
-  // Merge the halos so that only one copy of each is written
-  // Parallel halo finder must consult with each of the 26 possible neighbor
-  // halo finders to see who will report a particular halo
-  CosmoHaloFinderP* haloFinder = new CosmoHaloFinderP();;
-
-  haloFinder->setParameters
-    ("", this->RL, this->Overlap, this->NP, this->PMin, this->BB);
-  haloFinder->setParticles(xx, yy, zz, vx, vy, vz,
-                           potential, tag, mask, status);
-  haloFinder->executeHaloFinder();
-  haloFinder->collectHalos();
-  haloFinder->mergeHalos();
-
-  // adjust ghost cells, because halo finder updates it
-  vtkUnsignedCharArray* newghost = vtkUnsignedCharArray::New();
-  newghost->SetNumberOfValues(numberOfLocalPoints);
-  newghost->SetName("vtkGhostLevels");
-
-  for(int i = 0; i < numberOfLocalPoints; i = i + 1)
-    {
-    unsigned char level = (*status)[i] < 0 ? 0 : 1;
-    newghost->SetValue(i, level);
-    }
-
-  // Collect information from the halo finder needed for halo properties
-  // Vector halos is the index of the first particle for halo in the haloList
-  // Following the chain of indices in the haloList retrieves all particles
-  int numberOfFOFHalos = haloFinder->getNumberOfHalos();
-  int* fofHalos = haloFinder->getHalos();
-  int* fofHaloCount = haloFinder->getHaloCount();
-  int* fofHaloList = haloFinder->getHaloList();
-  int* fofHaloTags = new int[numberOfFOFHalos];
-
-  FOFHaloProperties* fof = new FOFHaloProperties();
-  fof->setHalos(numberOfFOFHalos, fofHalos, fofHaloCount, fofHaloList);
-  fof->setParameters("", this->RL, this->Overlap, this->BB);
-  fof->setParticles
-    (xx, yy, zz, vx, vy, vz, mass, potential, tag, mask, status);
-
-  // Find the mass of every FOF halo
-  vector<POSVEL_T>* fofMass = new vector<POSVEL_T>;
-  fof->FOFHaloMass(fofMass);
-
-  // Find the average position of every FOF halo
-  vector<POSVEL_T>* fofXPos = new vector<POSVEL_T>;
-  vector<POSVEL_T>* fofYPos = new vector<POSVEL_T>;
-  vector<POSVEL_T>* fofZPos = new vector<POSVEL_T>;
-  fof->FOFPosition(fofXPos, fofYPos, fofZPos);
-
-  // Find the center of mass of every FOF halo
-  vector<POSVEL_T>* fofXCofMass = new vector<POSVEL_T>;
-  vector<POSVEL_T>* fofYCofMass = new vector<POSVEL_T>;
-  vector<POSVEL_T>* fofZCofMass = new vector<POSVEL_T>;
-  fof->FOFCenterOfMass(fofXCofMass, fofYCofMass, fofZCofMass);
-
-  // Find the average velocity of every FOF halo
-  vector<POSVEL_T>* fofXVel = new vector<POSVEL_T>;
-  vector<POSVEL_T>* fofYVel = new vector<POSVEL_T>;
-  vector<POSVEL_T>* fofZVel = new vector<POSVEL_T>;
-  fof->FOFVelocity(fofXVel, fofYVel, fofZVel);
-
-  // Find the velocity dispersion of every FOF halo
-  vector<POSVEL_T>* fofVelDisp = new vector<POSVEL_T>;
-  fof->FOFVelocityDispersion(fofXVel, fofYVel, fofZVel, fofVelDisp);
-
-  // set the tags to -1
-  for(int i = 0; i < numberOfFOFHalos; i = i + 1)
-    {
-    fofHaloTags[i] = -1;
-    }
-
-  // walk the list to get the lowest tag id
-  int pminHalos = 0;
-  for(int i = 0; i < numberOfFOFHalos; i = i + 1)
-    {
-    int size = fofHaloCount[i];
-
-    if(size >= this->PMin)
-      {
-      pminHalos = pminHalos + 1;
-      int index = fofHalos[i];
-      for(int j = 0; j < size; j = j + 1)
-        {
-        if(fofHaloTags[i] == -1 || fofHaloTags[i] > (*tag)[index])
-          {
-          fofHaloTags[i] = (*tag)[index];
-          }
-
-        index = fofHaloList[index];
-        }
-      }
-    }
-
-  // calculate MCP or MBP
-  int* mbpCenter = 0;
-  int* mcpCenter = 0;
-  int mbpOn = this->ComputeMostBoundParticle ||
-    (this->ComputeSOD && this->SODCenterType == 2);
-  int mcpOn = this->ComputeMostConnectedParticle ||
-    (this->ComputeSOD && this->SODCenterType == 3);
-  if(mbpOn)
-    {
-    mbpCenter = new int[numberOfFOFHalos];
-    }
-  if(mcpOn)
-    {
-    mcpCenter = new int[numberOfFOFHalos];
-    }
-
-  if(mcpOn || mbpOn)
-    {
-    for(int i = 0; i < numberOfFOFHalos; i = i + 1)
-      {
-      // skip if it's not large enough
-      long size = fofHaloCount[i];
-      if(size < this->PMin)
-        {
-        continue;
-        }
-
-      // Allocate arrays which will hold halo particle information
-      POSVEL_T* xLocHalo = new POSVEL_T[size];
-      POSVEL_T* yLocHalo = new POSVEL_T[size];
-      POSVEL_T* zLocHalo = new POSVEL_T[size];
-      POSVEL_T* xVelHalo = new POSVEL_T[size];
-      POSVEL_T* yVelHalo = new POSVEL_T[size];
-      POSVEL_T* zVelHalo = new POSVEL_T[size];
-      POSVEL_T* massHalo = new POSVEL_T[size];
-      ID_T* id = new ID_T[size];
-
-      int* actualIndex = new int[size];
-      fof->extractInformation(i, actualIndex,
-                              xLocHalo, yLocHalo, zLocHalo,
-                              xVelHalo, yVelHalo, zVelHalo,
-                              massHalo, id);
-
-      // Most bound particle method of center finding
-      int centerIndex;
-      POTENTIAL_T minPotential;
-      if(mbpOn)
-        {
-        HaloCenterFinder centerFinder;
-        centerFinder.setParticles(size,
-                                  xLocHalo, yLocHalo, zLocHalo,
-                                  massHalo, id);
-        centerFinder.setParameters(this->BB, this->Overlap);
-
-        // Calculate the halo center using MBP (most bound particle)
-        // Combination of n^2/2 algorithm and A* algorithm
-        if(size < MBP_THRESHOLD)
-          {
-          centerIndex = centerFinder.mostBoundParticleN2(&minPotential);
-          }
-        else
-          {
-          centerIndex = centerFinder.mostBoundParticleAStar(&minPotential);
-          }
-
-        mbpCenter[i] = actualIndex[centerIndex];
-        }
-
-      // Most connected particle method of center finding
-      if(mcpOn)
-        {
-        HaloCenterFinder centerFinder;
-        centerFinder.setParticles(size,
-                                  xLocHalo, yLocHalo, zLocHalo,
-                                  massHalo, id);
-        centerFinder.setParameters(this->BB, this->Overlap);
-
-        // Calculate the halo center using MCP (most connected particle)
-        // Combination of n^2/2 algorithm and chaining mesh algorithm
-        if(size < MCP_THRESHOLD)
-          {
-          centerIndex = centerFinder.mostConnectedParticleN2();
-          }
-        else
-          {
-          centerIndex = centerFinder.mostConnectedParticleChainMesh();
-          }
-
-        mcpCenter[i] = actualIndex[centerIndex];
-        }
-
-      delete [] xLocHalo;
-      delete [] yLocHalo;
-      delete [] zLocHalo;
-      delete [] xVelHalo;
-      delete [] yVelHalo;
-      delete [] zVelHalo;
-      delete [] massHalo;
-      delete [] id;
-      delete [] actualIndex;
-      }
-    }
-
-  // calculate SOD halos
-  vtkFloatArray* sodPos = 0;
-  vtkFloatArray* sodCofMass = 0;
-  vtkFloatArray* sodMass = 0;
-  vtkFloatArray* sodVelocity = 0;
-  vtkFloatArray* sodDispersion = 0;
-  vtkFloatArray* sodRadius = 0;
-  if(this->ComputeSOD)
-    {
-    // set up the arrays
-    sodPos = vtkFloatArray::New();
-    sodPos->SetName("sod_average_position");
-    sodPos->SetNumberOfComponents(3);
-    sodPos->SetNumberOfTuples(pminHalos);
-
-    sodCofMass = vtkFloatArray::New();
-    sodCofMass->SetName("sod_center_of_mass");
-    sodCofMass->SetNumberOfComponents(3);
-    sodCofMass->SetNumberOfTuples(pminHalos);
-
-    sodMass = vtkFloatArray::New();
-    sodMass->SetName("sod_mass");
-    sodMass->SetNumberOfTuples(pminHalos);
-
-    sodVelocity = vtkFloatArray::New();
-    sodVelocity->SetName("sod_average_velocity");
-    sodVelocity->SetNumberOfComponents(3);
-    sodVelocity->SetNumberOfTuples(pminHalos);
-
-    sodDispersion = vtkFloatArray::New();
-    sodDispersion->SetName("sod_velocity_dispersion");
-    sodDispersion->SetNumberOfTuples(pminHalos);
-
-    sodRadius = vtkFloatArray::New();
-    sodRadius->SetName("sod_radius");
-    sodRadius->SetNumberOfTuples(pminHalos);
-
-    ChainingMesh* chain =
-      new ChainingMesh(this->RL, this->Overlap, CHAIN_SIZE, xx, yy, zz);
-
-    int index = 0;
-    for(int i = 0; i < numberOfFOFHalos; i = i + 1)
-      {
-      // skip if it's not large enough
-      if(fofHaloCount[i] < this->PMin)
-        {
-        continue;
-        }
-
-      // only calculate the SOD if it is big enough
-      if((*fofMass)[i] >= this->MinFOFMass ||
-         fofHaloCount[i] >= this->MinFOFSize)
-        {
-        SODHalo* sod = new SODHalo();
-        sod->setParameters(chain, this->SODBins, this->RL, this->NP,
-                           this->RhoC, this->SODMass, this->RhoC,
-                           this->MinRadiusFactor, this->MaxRadiusFactor);
-        sod->setParticles(xx, yy, zz, vx, vy, vz, mass, tag);
-
-        // no minimum potential array like in the sim
-        // FIXME: possibly have a minimum potential calculation?
-
-        if(this->SODCenterType == 0)
-          {
-          sod->createSODHalo(fofHaloCount[i],
-                             (*fofXCofMass)[i],
-                             (*fofYCofMass)[i],
-                             (*fofZCofMass)[i],
-                             (*fofXVel)[i],
-                             (*fofYVel)[i],
-                             (*fofZVel)[i],
-                             (*fofMass)[i]);
-          }
-        else if(this->SODCenterType == 1)
-          {
-          sod->createSODHalo(fofHaloCount[i],
-                             (*fofXPos)[i],
-                             (*fofYPos)[i],
-                             (*fofZPos)[i],
-                             (*fofXVel)[i],
-                             (*fofYVel)[i],
-                             (*fofZVel)[i],
-                             (*fofMass)[i]);
-          }
-        else
-          {
-          int center =
-            this->SODCenterType == 2 ? mbpCenter[i] :
-            mcpCenter[i];
-
-          sod->createSODHalo(fofHaloCount[i],
-                             (*xx)[center],
-                             (*yy)[center],
-                             (*zz)[center],
-                             (*fofXVel)[i],
-                             (*fofYVel)[i],
-                             (*fofZVel)[i],
-                             (*fofMass)[i]);
-          }
-
-        // get the halo and fill in the array
-        if(sod->SODHaloSize() > 0)
-          {
-          POSVEL_T tempPos[DIMENSION];
-          POSVEL_T tempCofMass[DIMENSION];
-          POSVEL_T tempMass;
-          POSVEL_T tempVelocity[DIMENSION];
-          POSVEL_T tempDispersion;
-          POSVEL_T tempRadius;
-
-          sod->SODAverageLocation(tempPos);
-          sod->SODCenterOfMass(tempCofMass);
-          sod->SODMass(&tempMass);
-          sod->SODAverageVelocity(tempVelocity);
-          sod->SODVelocityDispersion(&tempDispersion);
-          tempRadius = sod->SODRadius();
-
-          sodPos->SetComponent(index, 0, tempPos[0]);
-          sodPos->SetComponent(index, 1, tempPos[1]);
-          sodPos->SetComponent(index, 2, tempPos[2]);
-
-          sodCofMass->SetComponent(index, 0, tempCofMass[0]);
-          sodCofMass->SetComponent(index, 1, tempCofMass[1]);
-          sodCofMass->SetComponent(index, 2, tempCofMass[2]);
-
-          sodMass->SetComponent(index, 0, tempMass);
-
-          sodVelocity->SetComponent(index, 0, tempVelocity[0]);
-          sodVelocity->SetComponent(index, 1, tempVelocity[1]);
-          sodVelocity->SetComponent(index, 2, tempVelocity[2]);
-
-          sodDispersion->SetComponent(index, 0, tempDispersion);
-
-          sodRadius->SetComponent(index, 0, tempRadius);
-          }
-        // fill in a blank entry for the array
-        else
-          {
-          sodPos->SetComponent(index, 0, 0);
-          sodPos->SetComponent(index, 1, 0);
-          sodPos->SetComponent(index, 2, 0);
-
-          sodCofMass->SetComponent(index, 0, 0);
-          sodCofMass->SetComponent(index, 1, 0);
-          sodCofMass->SetComponent(index, 2, 0);
-
-          sodMass->SetComponent(index, 0, 0);
-
-          sodVelocity->SetComponent(index, 0, 0);
-          sodVelocity->SetComponent(index, 1, 0);
-          sodVelocity->SetComponent(index, 2, 0);
-
-          sodDispersion->SetComponent(index, 0, 0);
-
-          sodRadius->SetComponent(index, 0, -1);
-          }
-
-        delete sod;
-        }
-      // fill in a blank entry for the array
-      else
-        {
-        sodPos->SetComponent(index, 0, 0);
-        sodPos->SetComponent(index, 1, 0);
-        sodPos->SetComponent(index, 2, 0);
-
-        sodCofMass->SetComponent(index, 0, 0);
-        sodCofMass->SetComponent(index, 1, 0);
-        sodCofMass->SetComponent(index, 2, 0);
-
-        sodMass->SetComponent(index, 0, 0);
-
-        sodVelocity->SetComponent(index, 0, 0);
-        sodVelocity->SetComponent(index, 1, 0);
-        sodVelocity->SetComponent(index, 2, 0);
-
-        sodDispersion->SetComponent(index, 0, 0);
-
-        sodRadius->SetComponent(index, 0, -1);
-        }
-
-      index = index + 1;
-      }
-
-    delete chain;
-    }
-
-  // walk the list again to set the values for the points and catalog
-  vtkIntArray* partTag = 0;
-  vtkFloatArray* partPos = 0;
-  vtkFloatArray* partCofMass = 0;
-  vtkFloatArray* partMass = 0;
-  vtkFloatArray* partVelocity = 0;
-  vtkFloatArray* partDispersion = 0;
-
-  vtkFloatArray* partMBP = 0;
-  vtkFloatArray* partMCP = 0;
-
-  // if we are copying to particles get the arrays ready
-  if(this->CopyHaloDataToParticles)
-    {
-    partTag = vtkIntArray::New();
-    partTag->SetName("halo_tag");
-    partTag->SetNumberOfValues(numberOfLocalPoints);
-    partTag->FillComponent(0, -1);
-
-    partPos = vtkFloatArray::New();
-    partPos->SetName("halo_average_position");
-    partPos->SetNumberOfComponents(3);
-    partPos->SetNumberOfTuples(numberOfLocalPoints);
-
-    partCofMass = vtkFloatArray::New();
-    partCofMass->SetName("halo_center_of_mass");
-    partCofMass->SetNumberOfComponents(3);
-    partCofMass->SetNumberOfTuples(numberOfLocalPoints);
-
-    partMass = vtkFloatArray::New();
-    partMass->SetName("halo_mass");
-    partMass->SetNumberOfValues(numberOfLocalPoints);
-
-    partVelocity = vtkFloatArray::New();
-    partVelocity->SetName("halo_average_velocity");
-    partVelocity->SetNumberOfComponents(3);
-    partVelocity->SetNumberOfTuples(numberOfLocalPoints);
-
-    partDispersion = vtkFloatArray::New();
-    partDispersion->SetName("halo_velocity_dispersion");
-    partDispersion->SetNumberOfValues(numberOfLocalPoints);
-
-    if(mbpOn)
-      {
-      partMBP = vtkFloatArray::New();
-      partMBP->SetName("halo_most_bound_particle");
-      partMBP->SetNumberOfComponents(3);
-      partMBP->SetNumberOfTuples(numberOfLocalPoints);
-      }
-
-    if(mcpOn)
-      {
-      partMCP = vtkFloatArray::New();
-      partMCP->SetName("halo_most_connected_particle");
-      partMCP->SetNumberOfComponents(3);
-      partMCP->SetNumberOfTuples(numberOfLocalPoints);
-      }
-    }
-
-  // get the catalog arrays ready
-  vtkPoints* catpoints = vtkPoints::New();
-  catpoints->SetDataTypeToFloat();
-  catalog->Allocate(pminHalos);
-  catalog->SetPoints(catpoints);
-
-  vtkIntArray* haloTag = vtkIntArray::New();
-  haloTag->SetName("halo_tag");
-  haloTag->SetNumberOfValues(pminHalos);
-
-  vtkFloatArray* haloPos = vtkFloatArray::New();
-  haloPos->SetName("halo_average_position");
-  haloPos->SetNumberOfComponents(3);
-  haloPos->SetNumberOfTuples(pminHalos);
-
-  vtkFloatArray* haloCofMass = vtkFloatArray::New();
-  haloCofMass->SetName("halo_center_of_mass");
-  haloCofMass->SetNumberOfComponents(3);
-  haloCofMass->SetNumberOfTuples(pminHalos);
-
-  vtkFloatArray* haloMass = vtkFloatArray::New();
-  haloMass->SetName("halo_mass");
-  haloMass->SetNumberOfValues(pminHalos);
-
-  vtkFloatArray* haloVelocity = vtkFloatArray::New();
-  haloVelocity->SetName("halo_average_velocity");
-  haloVelocity->SetNumberOfComponents(3);
-  haloVelocity->SetNumberOfTuples(pminHalos);
-
-  vtkFloatArray* haloDispersion = vtkFloatArray::New();
-  haloDispersion->SetName("halo_velocity_dispersion");
-  haloDispersion->SetNumberOfValues(pminHalos);
-
-  vtkFloatArray* haloMBP = 0;
-  vtkFloatArray* haloMCP = 0;
-
-  if(mbpOn)
-    {
-    haloMBP = vtkFloatArray::New();
-    haloMBP->SetName("halo_most_bound_particle");
-    haloMBP->SetNumberOfComponents(3);
-    haloMBP->SetNumberOfTuples(pminHalos);
-    }
-
-  if(mcpOn)
-    {
-    haloMCP = vtkFloatArray::New();
-    haloMCP->SetName("halo_most_connected_particle");
-    haloMCP->SetNumberOfComponents(3);
-    haloMCP->SetNumberOfTuples(pminHalos);
-    }
-
-  // walk the halos and copy the data
-  int halocount = 0;
-  for(int i = 0; i < numberOfFOFHalos; i = i + 1)
-    {
-    // skip if not large enough
-    if(fofHaloCount[i] < this->PMin)
-      {
-      continue;
-      }
-
-    // set the catalog position
-    vtkIdType pid;
-    pid = catpoints->InsertNextPoint
-      ((*fofXPos)[i], (*fofYPos)[i], (*fofZPos)[i]);
-    catalog->InsertNextCell(1, 1, &pid);
-
-    // set the halo data
-    haloTag->SetValue(halocount, fofHaloTags[i]);
-    haloPos->SetComponent(halocount, 0, (*fofXPos)[i]);
-    haloPos->SetComponent(halocount, 1, (*fofYPos)[i]);
-    haloPos->SetComponent(halocount, 2, (*fofZPos)[i]);
-    haloCofMass->SetComponent(halocount, 0, (*fofXCofMass)[i]);
-    haloCofMass->SetComponent(halocount, 1, (*fofYCofMass)[i]);
-    haloCofMass->SetComponent(halocount, 2, (*fofZCofMass)[i]);
-    haloMass->SetValue(halocount, (*fofMass)[i]);
-    haloVelocity->SetComponent(halocount, 0, (*fofXVel)[i]);
-    haloVelocity->SetComponent(halocount, 1, (*fofYVel)[i]);
-    haloVelocity->SetComponent(halocount, 2, (*fofZVel)[i]);
-    haloDispersion->SetValue(halocount, (*fofVelDisp)[i]);
-
-    if(haloMBP)
-      {
-      haloMBP->SetComponent(halocount, 0, (*xx)[mbpCenter[i]]);
-      haloMBP->SetComponent(halocount, 1, (*yy)[mbpCenter[i]]);
-      haloMBP->SetComponent(halocount, 2, (*zz)[mbpCenter[i]]);
-      }
-
-    if(haloMCP)
-      {
-      haloMCP->SetComponent(halocount, 0, (*xx)[mcpCenter[i]]);
-      haloMCP->SetComponent(halocount, 1, (*yy)[mcpCenter[i]]);
-      haloMCP->SetComponent(halocount, 2, (*zz)[mcpCenter[i]]);
-      }
-
-    // increment to the next halo
-    halocount = halocount + 1;
-
-    // set the halo data for the original points
-    if(this->CopyHaloDataToParticles)
-      {
-      int index = fofHalos[i];
-      for(int j = 0; j < fofHaloCount[i]; j = j + 1)
-        {
-        partTag->SetValue(index, fofHaloTags[i]);
-        partPos->SetComponent(index, 0, (*fofXPos)[i]);
-        partPos->SetComponent(index, 1, (*fofYPos)[i]);
-        partPos->SetComponent(index, 2, (*fofZPos)[i]);
-        partCofMass->SetComponent(index, 0, (*fofXCofMass)[i]);
-        partCofMass->SetComponent(index, 1, (*fofYCofMass)[i]);
-        partCofMass->SetComponent(index, 2, (*fofZCofMass)[i]);
-        partMass->SetValue(index, (*fofMass)[i]);
-        partVelocity->SetComponent(index, 0, (*fofXVel)[i]);
-        partVelocity->SetComponent(index, 1, (*fofYVel)[i]);
-        partVelocity->SetComponent(index, 2, (*fofZVel)[i]);
-        partDispersion->SetValue(index, (*fofVelDisp)[i]);
-
-        if(partMBP)
-          {
-          partMBP->SetComponent(index, 0, (*xx)[mbpCenter[i]]);
-          partMBP->SetComponent(index, 1, (*yy)[mbpCenter[i]]);
-          partMBP->SetComponent(index, 2, (*zz)[mbpCenter[i]]);
-          }
-
-        if(partMCP)
-          {
-          partMCP->SetComponent(index, 0, (*xx)[mcpCenter[i]]);
-          partMCP->SetComponent(index, 1, (*yy)[mcpCenter[i]]);
-          partMCP->SetComponent(index, 2, (*zz)[mcpCenter[i]]);
-          }
-
-        index = fofHaloList[index];
-        }
-      }
-    }
-
-  // set the array for particles
-  if(this->CopyHaloDataToParticles)
-    {
-    output->GetPointData()->AddArray(partTag);
-    output->GetPointData()->AddArray(partPos);
-    output->GetPointData()->AddArray(partCofMass);
-    output->GetPointData()->AddArray(partMass);
-    output->GetPointData()->AddArray(partVelocity);
-    output->GetPointData()->AddArray(partDispersion);
-    if(partMBP)
-      {
-      output->GetPointData()->AddArray(partMBP);
-      }
-    if(partMCP)
-      {
-      output->GetPointData()->AddArray(partMCP);
-      }
-    }
-  output->GetPointData()->AddArray(newghost);
-
-  // set the arrays for the catalog
-  catalog->GetPointData()->AddArray(haloTag);
-  catalog->GetPointData()->AddArray(haloPos);
-  catalog->GetPointData()->AddArray(haloCofMass);
-  catalog->GetPointData()->AddArray(haloMass);
-  catalog->GetPointData()->AddArray(haloVelocity);
-  catalog->GetPointData()->AddArray(haloDispersion);
-  if(haloMBP)
-    {
-    catalog->GetPointData()->AddArray(haloMBP);
-    }
-  if(haloMCP)
-    {
-    catalog->GetPointData()->AddArray(haloMCP);
-    }
-
-  if(sodPos)
-    {
-    catalog->GetPointData()->AddArray(sodPos);
-    catalog->GetPointData()->AddArray(sodCofMass);
-    catalog->GetPointData()->AddArray(sodMass);
-    catalog->GetPointData()->AddArray(sodVelocity);
-    catalog->GetPointData()->AddArray(sodDispersion);
-    catalog->GetPointData()->AddArray(sodRadius);
-    }
-
-  // cleanup
-  if(this->CopyHaloDataToParticles)
-    {
-    partTag->Delete();
-    partPos->Delete();
-    partCofMass->Delete();
-    partMass->Delete();
-    partVelocity->Delete();
-    partDispersion->Delete();
-    }
-  newghost->Delete();
-
-  catpoints->Delete();
-  haloTag->Delete();
-  haloPos->Delete();
-  haloCofMass->Delete();
-  haloMass->Delete();
-  haloVelocity->Delete();
-  haloDispersion->Delete();
-
-  if(partMBP)
-    {
-    partMBP->Delete();
-    }
-
-  if(partMCP)
-    {
-    partMCP->Delete();
-    }
-
-  if(haloMBP)
-    {
-    haloMBP->Delete();
-    }
-
-  if(haloMCP)
-    {
-    haloMCP->Delete();
-    }
-
-  if(mbpCenter)
-    {
-    delete [] mbpCenter;
-    }
-
-  if(mcpCenter)
-    {
-    delete [] mcpCenter;
-    }
-
-  if(sodPos)
-    {
-    sodPos->Delete();
-    sodCofMass->Delete();
-    sodMass->Delete();
-    sodVelocity->Delete();
-    sodDispersion->Delete();
-    sodRadius->Delete();
-    }
-
-  delete xx;
-  delete yy;
-  delete zz;
-  delete vx;
-  delete vy;
-  delete vz;
-  delete mass;
-  delete tag;
-  delete status;
-  delete potential;
-  delete mask;
-
-  delete [] fofHaloTags;
-  delete fofMass;
-  delete fofXPos;
-  delete fofYPos;
-  delete fofZPos;
-  delete fofXCofMass;
-  delete fofYCofMass;
-  delete fofZCofMass;
-  delete fofXVel;
-  delete fofYVel;
-  delete fofZVel;
-  delete fofVelDisp;
-
-  delete fof;
-  delete haloFinder;
-
-  return 1;
-}
diff --git a/Filters/Cosmo/vtkPCosmoHaloFinder.h b/Filters/Cosmo/vtkPCosmoHaloFinder.h
deleted file mode 100644
index b6f4d06..0000000
--- a/Filters/Cosmo/vtkPCosmoHaloFinder.h
+++ /dev/null
@@ -1,231 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkPCosmoHaloFinder.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*=========================================================================
-
-  Program:   VTK/ParaView Los Alamos National Laboratory Modules (PVLANL)
-  Module:    vtkPCosmoHaloFinder.h
-
-Copyright (c) 2007, 2009, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007, 2009. Los Alamos National Security, LLC.
-This software was produced under U.S. Government contract DE-AC52-06NA25396
-for Los Alamos National Laboratory (LANL), which is operated by
-Los Alamos National Security, LLC for the U.S. Department of Energy.
-The U.S. Government has rights to use, reproduce, and distribute this software.
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.
-If software is modified to produce derivative works, such modified software
-should be clearly marked, so as not to confuse it with the version available
-from LANL.
-
-Additionally, redistribution and use in source and binary forms, with or
-without modification, are permitted provided that the following conditions
-are met:
--   Redistributions of source code must retain the above copyright notice,
-    this list of conditions and the following disclaimer.
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution.
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software
-    without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-=========================================================================*/
-// .NAME vtkPCosmoHaloFinder - find halos within a cosmology data file
-// .SECTION Description
-// vtkPCosmoHaloFinder is a filter object that operates on the unstructured
-// grid of all particles and assigns each particle a halo id.
-//
-
-#ifndef __vtkPCosmoHaloFinder_h
-#define __vtkPCosmoHaloFinder_h
-
-#include "vtkFiltersCosmoModule.h" // For export macro
-#include "vtkUnstructuredGridAlgorithm.h"
-
-class vtkMultiProcessController;
-
-class VTKFILTERSCOSMO_EXPORT vtkPCosmoHaloFinder : public vtkUnstructuredGridAlgorithm
-{
- public:
-  static vtkPCosmoHaloFinder *New();
-
-  vtkTypeMacro(vtkPCosmoHaloFinder,vtkUnstructuredGridAlgorithm);
-  void PrintSelf(ostream& os, vtkIndent indent);
-
-  // Description:
-  // Set the communicator object for interprocess communication
-  virtual vtkMultiProcessController* GetController();
-  virtual void SetController(vtkMultiProcessController*);
-
-  // Description:
-  // Specify the number of seeded particles in one dimension (total = np^3)
-  // (default 256)
-  vtkSetMacro(NP, int);
-  vtkGetMacro(NP, int);
-
-  // Description:
-  // Specify the physical box dimensions size (rL)
-  // (default 100.0)
-  vtkSetMacro(RL, float);
-  vtkGetMacro(RL, float);
-
-  // Description:
-  // Specify the ghost cell spacing (in rL units)
-  // (edge boundary of processor box)
-  // (default 5)
-  vtkSetMacro(Overlap, float);
-  vtkGetMacro(Overlap, float);
-
-  // Description:
-  // Specify the minimum number of particles for a halo (pmin)
-  // (default 100)
-  vtkSetMacro(PMin, int);
-  vtkGetMacro(PMin, int);
-
-  // Description:
-  // Specify the linking length (bb)
-  // (default .2)
-  vtkSetMacro(BB, float);
-  vtkGetMacro(BB, float);
-
-  // Description:
-  // Copy the halo information to the original particles
-  // (default off)
-  vtkSetMacro(CopyHaloDataToParticles, int);
-  vtkGetMacro(CopyHaloDataToParticles, int);
-
-  // Description:
-  // Turn on calculation of the most bound particle (center finding)
-  // (default off)
-  vtkSetMacro(ComputeMostBoundParticle, int);
-  vtkGetMacro(ComputeMostBoundParticle, int);
-
-  // Description:
-  // Turn on calculation of the most connect particle (center finding)
-  // (default off)
-  vtkSetMacro(ComputeMostConnectedParticle, int);
-  vtkGetMacro(ComputeMostConnectedParticle, int);
-
-  // Description:
-  // Turn on calculation of SOD halos
-  // (default off)
-  vtkSetMacro(ComputeSOD, int);
-  vtkGetMacro(ComputeSOD, int);
-
-  // Description:
-  // Specify the FOF center to use in SOD calculations
-  // (0 = default, center of mass, 1 = average, 2 = MBP, 3 = MCP)
-  vtkSetMacro(SODCenterType, int);
-  vtkGetMacro(SODCenterType, int);
-
-  // Description:
-  // Specify rho_c (critical density)
-  // (default 2.77536627e11)
-  vtkSetMacro(RhoC, float);
-  vtkGetMacro(RhoC, float);
-
-  // Description:
-  // Specify the initial SOD mass
-  // (default 1.0e14)
-  vtkSetMacro(SODMass, float);
-  vtkGetMacro(SODMass, float);
-
-  // Description:
-  // Specify the minimum radius factor
-  // (default 0.5)
-  vtkSetMacro(MinRadiusFactor, float);
-  vtkGetMacro(MinRadiusFactor, float);
-
-  // Description:
-  // Specify the maximum radius factor
-  // (default 2.0)
-  vtkSetMacro(MaxRadiusFactor, float);
-  vtkGetMacro(MaxRadiusFactor, float);
-
-  // Description:
-  // Specify the number of bins for SOD finding
-  // (default 20)
-  vtkSetMacro(SODBins, int);
-  vtkGetMacro(SODBins, int);
-
-  // Description:
-  // Specify the minimum FOF size for an SOD halo
-  // (default 1000)
-  vtkSetMacro(MinFOFSize, int);
-  vtkGetMacro(MinFOFSize, int);
-
-  // Description:
-  // Specify the minimum FOF mass for an SOD halo
-  // (default 5.0e12)
-  vtkSetMacro(MinFOFMass, float);
-  vtkGetMacro(MinFOFMass, float);
-
- protected:
-  vtkPCosmoHaloFinder();
-  ~vtkPCosmoHaloFinder();
-
-  virtual int RequestInformation(vtkInformation*,
-                                 vtkInformationVector**,
-                                 vtkInformationVector*);
-
-  virtual int RequestData(vtkInformation*,
-                          vtkInformationVector**,
-                          vtkInformationVector*);
-
-  vtkMultiProcessController* Controller;
-
-  int NP; // number of particles in the original simulation
-  float RL; // The physical box dimensions (rL)
-  float Overlap; // The ghost cell boundary space
-  int PMin; // The minimum particles for a halo
-  float BB; // The linking length
-  int CopyHaloDataToParticles; // Copy halo information to original data
-  int ComputeMostBoundParticle; // Turn on MBP finding
-  int ComputeMostConnectedParticle; // Turn on MCP finding
-
-  int ComputeSOD; // Turn on Spherical OverDensity (SOD) halos
-  int SODCenterType; // Set the center finding for SOD halos
-
-  float RhoC; // SOD rho_C (2.77536627e11)
-  float SODMass; // Initial SOD mass (1.0e14)
-  float MinRadiusFactor; // Minimum factor of SOD radius (0.5)
-  float MaxRadiusFactor; // Maximum factor of SOD radius (2.0)
-  int SODBins; // Number of log scale bins for SOD (20)
-  int MinFOFSize; // Minimum FOF size for SOD (1000)
-  float MinFOFMass; // Minimum FOF mass for SOD (5.0e12)
-
- private:
-  vtkPCosmoHaloFinder(const vtkPCosmoHaloFinder&);  // Not implemented.
-  void operator=(const vtkPCosmoHaloFinder&);  // Not implemented.
-
-};
-
-#endif //  __vtkPCosmoHaloFinder_h
diff --git a/Filters/Cosmo/vtkPCosmoReader.cxx b/Filters/Cosmo/vtkPCosmoReader.cxx
deleted file mode 100644
index 5923e23..0000000
--- a/Filters/Cosmo/vtkPCosmoReader.cxx
+++ /dev/null
@@ -1,403 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkPCosmoReader.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*=========================================================================
-
-  Program:   VTK/ParaView Los Alamos National Laboratory Modules (PVLANL)
-  Module:    vtkPCosmoReader.cxx
-
-Copyright (c) 2009 Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2009. Los Alamos National Security, LLC.
-This software was produced under U.S. Government contract DE-AC52-06NA25396
-for Los Alamos National Laboratory (LANL), which is operated by
-Los Alamos National Security, LLC for the U.S. Department of Energy.
-The U.S. Government has rights to use, reproduce, and distribute this software.
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.
-If software is modified to produce derivative works, such modified software
-should be clearly marked, so as not to confuse it with the version available
-from LANL.
-
-Additionally, redistribution and use in source and binary forms, with or
-without modification, are permitted provided that the following conditions
-are met:
--   Redistributions of source code must retain the above copyright notice,
-    this list of conditions and the following disclaimer.
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution.
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software
-    without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-=========================================================================*/
-
-#ifndef USE_VTK_COSMO
-#define USE_VTK_COSMO
-#endif
-
-#include "vtkPCosmoReader.h"
-#include "vtkUnstructuredGrid.h"
-#include "vtkInformation.h"
-#include "vtkInformationVector.h"
-#include "vtkObjectFactory.h"
-#include "vtkMultiProcessController.h"
-#include "vtkSmartPointer.h"
-#include "vtkDummyController.h"
-#include "vtkStreamingDemandDrivenPipeline.h"
-#include "vtkFloatArray.h"
-#include "vtkPoints.h"
-#include "vtkUnsignedCharArray.h"
-#include "vtkIntArray.h"
-#include "vtkPointData.h"
-#include "vtkDataObject.h"
-#include "vtkStdString.h"
-#include "vtkCellArray.h"
-
-#include <vector>
-
-using namespace std;
-
-// RRU stuff
-#include "CosmoDefinition.h"
-#include "Partition.h"
-#include "ParticleExchange.h"
-#include "ParticleDistribute.h"
-
-vtkStandardNewMacro(vtkPCosmoReader);
-
-//----------------------------------------------------------------------------
-vtkPCosmoReader::vtkPCosmoReader()
-{
-  this->SetNumberOfInputPorts(0);
-
-  this->Controller = 0;
-  this->SetController(vtkMultiProcessController::GetGlobalController());
-  if(!this->Controller)
-    {
-      this->SetController(vtkSmartPointer<vtkDummyController>::New());
-    }
-
-  this->FileName = NULL;
-  this->RL = 100;
-  this->Overlap = 5;
-  this->ReadMode = 1;
-  this->CosmoFormat = 1;
-}
-
-//----------------------------------------------------------------------------
-vtkPCosmoReader::~vtkPCosmoReader()
-{
-  if (this->FileName)
-    {
-    delete [] this->FileName;
-    }
-
-  this->SetController(0);
-}
-
-//----------------------------------------------------------------------------
-void vtkPCosmoReader::PrintSelf(ostream& os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os,indent);
-
-  if (this->Controller)
-    {
-    os << indent << "Controller: " << this->Controller << endl;
-    }
-  else
-    {
-    os << indent << "Controller: (null)\n";
-    }
-
-  os << indent << "FileName: " << (this->FileName != NULL ? this->FileName : "") << endl;
-  os << indent << "rL: " << this->RL << endl;
-  os << indent << "Overlap: " << this->Overlap << endl;
-  os << indent << "ReadMode: " << this->ReadMode << endl;
-  os << indent << "CosmoFormat: " << this->CosmoFormat << endl;
-}
-
-//----------------------------------------------------------------------------
-void vtkPCosmoReader::SetController(vtkMultiProcessController *c)
-{
-  if(this->Controller == c)
-    {
-    return;
-    }
-
-  this->Modified();
-
-  if(this->Controller != 0)
-    {
-    this->Controller->UnRegister(this);
-    this->Controller = 0;
-    }
-
-  if(c == 0)
-    {
-    return;
-    }
-
-  this->Controller = c;
-  c->Register(this);
-}
-
-vtkMultiProcessController* vtkPCosmoReader::GetController()
-{
-  return (vtkMultiProcessController*)this->Controller;
-}
-
-//----------------------------------------------------------------------------
-int vtkPCosmoReader::RequestInformation(
-  vtkInformation *vtkNotUsed(request),
-  vtkInformationVector **vtkNotUsed(inputVector),
-  vtkInformationVector *outputVector)
-{
-  // set the pieces as the number of processes
-  outputVector->GetInformationObject(0)->Set
-    (vtkStreamingDemandDrivenPipeline::MAXIMUM_NUMBER_OF_PIECES(),
-     this->Controller->GetNumberOfProcesses());
-
-  outputVector->GetInformationObject(0)->Set
-    (vtkDataObject::DATA_NUMBER_OF_PIECES(),
-     this->Controller->GetNumberOfProcesses());
-
-  // set the ghost levels
-  outputVector->GetInformationObject(0)->Set
-    (vtkDataObject::DATA_NUMBER_OF_GHOST_LEVELS(), 1);
-
-  return 1;
-}
-
-//----------------------------------------------------------------------------
-int vtkPCosmoReader::RequestData(
-  vtkInformation *vtkNotUsed(request),
-  vtkInformationVector **vtkNotUsed(inputVector),
-  vtkInformationVector *outputVector)
-{
-  // get the info object
-  vtkInformation *outInfo = outputVector->GetInformationObject(0);
-
-  // get the output
-  vtkUnstructuredGrid *output = vtkUnstructuredGrid::SafeDownCast(
-    outInfo->Get(vtkDataObject::DATA_OBJECT()));
-
-  // check that the piece number is correct
-  int updatePiece = 0;
-  int updateTotal = 1;
-  if(outInfo->Has(vtkStreamingDemandDrivenPipeline::UPDATE_PIECE_NUMBER()))
-    {
-      updatePiece = outInfo->
-        Get(vtkStreamingDemandDrivenPipeline::UPDATE_PIECE_NUMBER());
-    }
-  if(outInfo->Has(vtkStreamingDemandDrivenPipeline::UPDATE_NUMBER_OF_PIECES()))
-    {
-      updateTotal = outInfo->
-        Get(vtkStreamingDemandDrivenPipeline::UPDATE_NUMBER_OF_PIECES());
-    }
-
-  if(updatePiece != this->Controller->GetLocalProcessId() ||
-     updateTotal != this->Controller->GetNumberOfProcesses())
-    {
-      vtkErrorMacro(<< "Piece number does not match process number.");
-      return 0;
-    }
-
-   if (this->FileName == NULL || this->FileName == '\0')
-    {
-    vtkErrorMacro(<< "No FileName specified!");
-    return 0;
-    }
-
-  // RRU code
-  // Initialize the partitioner which uses MPI Cartesian Topology
-  Partition::initialize();
-
-  // Construct the particle distributor, exchanger and halo finder
-  ParticleDistribute distribute;
-  ParticleExchange exchange;
-
-  // Initialize classes for reading, exchanging and calculating
-  if(this->CosmoFormat)
-    {
-    distribute.setParameters(this->FileName, this->RL, "RECORD");
-    }
-  else
-    {
-    distribute.setParameters(this->FileName, this->RL, "BLOCK");
-    }
-  exchange.setParameters(this->RL, this->Overlap);
-
-  distribute.initialize();
-  exchange.initialize();
-
-  // Read alive particles only from files
-  // In ROUND_ROBIN all files are read and particles are passed round robin
-  // to every other processor so that every processor chooses its own
-  // In ONE_TO_ONE every processor reads its own processor in the topology
-  // which has already been populated with the correct alive particles
-  vector<POSVEL_T>* xx = new vector<POSVEL_T>;
-  vector<POSVEL_T>* yy = new vector<POSVEL_T>;
-  vector<POSVEL_T>* zz = new vector<POSVEL_T>;
-  vector<POSVEL_T>* vx = new vector<POSVEL_T>;
-  vector<POSVEL_T>* vy = new vector<POSVEL_T>;
-  vector<POSVEL_T>* vz = new vector<POSVEL_T>;
-  vector<POSVEL_T>* mass = new vector<POSVEL_T>;
-  vector<ID_T>* tag = new vector<ID_T>;
-  vector<STATUS_T>* status = new vector<STATUS_T>;
-
-  distribute.setParticles(xx, yy, zz, vx, vy, vz, mass, tag);
-  if(this->ReadMode)
-    {
-    distribute.readParticlesRoundRobin();
-    }
-  else
-    {
-    distribute.readParticlesOneToOne();
-    }
-
-  // Create the mask and potential vectors which will be filled in elsewhere
-  int numberOfParticles = (int)xx->size();
-  vector<POTENTIAL_T>* potential = new vector<POTENTIAL_T>(numberOfParticles);
-  vector<MASK_T>* mask = new vector<MASK_T>(numberOfParticles);
-
-  // Exchange particles adds dead particles to all the vectors
-  exchange.setParticles(xx, yy, zz, vx, vy, vz, mass, potential, tag,
-                        mask, status);
-  exchange.exchangeParticles();
-
-  // create VTK structures
-  numberOfParticles = (int)xx->size();
-  potential->clear();
-  mask->clear();
-
-  vtkPoints* points = vtkPoints::New();
-  points->SetDataTypeToFloat();
-  points->Allocate(numberOfParticles);
-  vtkCellArray* cells = vtkCellArray::New();
-  cells->Allocate(cells->EstimateSize(numberOfParticles, 1));
-
-  vtkFloatArray* vel = vtkFloatArray::New();
-  vel->SetName("velocity");
-  vel->SetNumberOfComponents(DIMENSION);
-  vel->Allocate(numberOfParticles);
-  vtkFloatArray* m = vtkFloatArray::New();
-  m->SetName("mass");
-  m->Allocate(numberOfParticles);
-  vtkIntArray* uid = vtkIntArray::New();
-  uid->SetName("tag");
-  uid->Allocate(numberOfParticles);
-  vtkIntArray* owner = vtkIntArray::New();
-  owner->SetName("ghost");
-  owner->Allocate(numberOfParticles);
-  vtkUnsignedCharArray* ghost = vtkUnsignedCharArray::New();
-  ghost->SetName("vtkGhostLevels");
-  ghost->Allocate(numberOfParticles);
-
-  // put it into the correct VTK structure
-  for(vtkIdType i = 0; i < numberOfParticles; i = i + 1)
-    {
-    float pt[DIMENSION];
-
-    // insert point and cell
-    pt[0] = xx->back();
-    xx->pop_back();
-    pt[1] = yy->back();
-    yy->pop_back();
-    pt[2] = zz->back();
-    zz->pop_back();
-
-    vtkIdType pid = points->InsertNextPoint(pt);
-    cells->InsertNextCell(1, &pid);
-
-    // insert velocity
-    pt[0] = vx->back();
-    vx->pop_back();
-    pt[1] = vy->back();
-    vy->pop_back();
-    pt[2] = vz->back();
-    vz->pop_back();
-
-    vel->InsertNextTuple(pt);
-
-    // insert mass
-    pt[0] = mass->back();
-    mass->pop_back();
-
-    m->InsertNextValue(pt[0]);
-
-    // insert tag
-    int particle = tag->back();
-    tag->pop_back();
-
-    uid->InsertNextValue(particle);
-
-    // insert ghost status
-    int neighbor = status->back();
-    unsigned char level = neighbor < 0 ? 0 : 1;
-    status->pop_back();
-
-    owner->InsertNextValue(neighbor);
-    ghost->InsertNextValue(level);
-    }
-
-  // cleanup
-  output->SetPoints(points);
-  output->SetCells(1, cells);
-  output->GetPointData()->AddArray(vel);
-  output->GetPointData()->AddArray(m);
-  output->GetPointData()->AddArray(uid);
-  output->GetPointData()->AddArray(owner);
-  output->GetPointData()->AddArray(ghost);
-
-  output->Squeeze();
-
-  points->Delete();
-  cells->Delete();
-  vel->Delete();
-  m->Delete();
-  uid->Delete();
-  owner->Delete();
-  ghost->Delete();
-
-  delete xx;
-  delete yy;
-  delete zz;
-  delete vx;
-  delete vy;
-  delete vz;
-  delete mass;
-  delete tag;
-  delete status;
-  delete potential;
-  delete mask;
-
-  return 1;
-}
diff --git a/Filters/Cosmo/vtkPCosmoReader.h b/Filters/Cosmo/vtkPCosmoReader.h
deleted file mode 100644
index 2c4b844..0000000
--- a/Filters/Cosmo/vtkPCosmoReader.h
+++ /dev/null
@@ -1,155 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkPCosmoReader.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*=========================================================================
-
-  Program:   VTK/ParaView Los Alamos National Laboratory Modules (PVLANL)
-  Module:    vtkPCosmoReader.h
-
-Copyright (c) 2009 Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2009. Los Alamos National Security, LLC.
-This software was produced under U.S. Government contract DE-AC52-06NA25396
-for Los Alamos National Laboratory (LANL), which is operated by
-Los Alamos National Security, LLC for the U.S. Department of Energy.
-The U.S. Government has rights to use, reproduce, and distribute this software.
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.
-If software is modified to produce derivative works, such modified software
-should be clearly marked, so as not to confuse it with the version available
-from LANL.
-
-Additionally, redistribution and use in source and binary forms, with or
-without modification, are permitted provided that the following conditions
-are met:
--   Redistributions of source code must retain the above copyright notice,
-    this list of conditions and the following disclaimer.
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution.
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software
-    without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-=========================================================================*/
-// .NAME vtkPCosmoReader - Read a binary cosmology data file
-//
-// .SECTION Description
-// vtkPCosmoReader creates a vtkUnstructuredGrid from a binary cosmology file.
-//
-// A cosmo file is a record format file with no header.
-// One record per particle.
-//
-// Each record is 32 bytes, with fields (in order) for:
-//     x_position (float),
-//     x_velocity (float),
-//     y_position (float),
-//     y_velocity (float),
-//     z-position (float),
-//     z_velocity (float)
-//     mass (float)
-//     identification tag (integer)
-//
-// Total particle data can be split into per processor files, with each file
-// name ending in the processor number.
-//
-
-#ifndef __vtkPCosmoReader_h
-#define __vtkPCosmoReader_h
-
-#include "vtkFiltersCosmoModule.h" // For export macro
-#include "vtkUnstructuredGridAlgorithm.h"
-
-class vtkMultiProcessController;
-class vtkStdString;
-
-class VTKFILTERSCOSMO_EXPORT vtkPCosmoReader : public vtkUnstructuredGridAlgorithm
-{
-public:
-  static vtkPCosmoReader *New();
-  vtkTypeMacro(vtkPCosmoReader, vtkUnstructuredGridAlgorithm);
-  void PrintSelf(ostream& os, vtkIndent indent);
-
-  // Description:
-  // Specify the name of the cosmology particle binary file to read
-  vtkSetStringMacro(FileName);
-  vtkGetStringMacro(FileName);
-
-  // Description:
-  // Specify the physical box dimensions size (rL)
-  // (default 100.0)
-  vtkSetMacro(RL, float);
-  vtkGetMacro(RL, float);
-
-  // Description:
-  // Specify the ghost cell spacing in Mpc (in rL units)
-  // (edge boundary of processor box)
-  // (default 5)
-  vtkSetMacro(Overlap, float);
-  vtkGetMacro(Overlap, float);
-
-  // Description:
-  // Set the read mode (0 = one-to-one, 1 = default, round-robin)
-  vtkSetMacro(ReadMode, int);
-  vtkGetMacro(ReadMode, int);
-
-  // Description:
-  // Set the filetype to Gadget or Cosmo read mode
-  // (0 = Gadget, 1 = default, Cosmo)
-  vtkSetMacro(CosmoFormat, int);
-  vtkGetMacro(CosmoFormat, int);
-
-  // Description:
-  // Set the communicator object for interprocess communication
-  virtual vtkMultiProcessController* GetController();
-  virtual void SetController(vtkMultiProcessController*);
-
-protected:
-  vtkPCosmoReader();
-  ~vtkPCosmoReader();
-
-  virtual int RequestInformation
-    (vtkInformation *, vtkInformationVector **, vtkInformationVector *);
-  virtual int RequestData
-    (vtkInformation *, vtkInformationVector **, vtkInformationVector *);
-
-  vtkMultiProcessController* Controller;
-
-  char* FileName; // Name of binary particle file
-  float RL; // The physical box dimensions (rL)
-  float Overlap; // The ghost cell boundary space
-  int ReadMode; // The reading mode
-  int CosmoFormat; // Enable cosmo format or gadget format
-
-private:
-  vtkPCosmoReader(const vtkPCosmoReader&);  // Not implemented.
-  void operator=(const vtkPCosmoReader&);  // Not implemented.
-};
-
-#endif
diff --git a/Filters/Extraction/Testing/Cxx/CMakeLists.txt b/Filters/Extraction/Testing/Cxx/CMakeLists.txt
index c84904a..92b4bd8 100644
--- a/Filters/Extraction/Testing/Cxx/CMakeLists.txt
+++ b/Filters/Extraction/Testing/Cxx/CMakeLists.txt
@@ -1,26 +1,6 @@
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-  TestConvertSelection.cxx
+vtk_add_test_cxx(
+  TestConvertSelection.cxx,NO_VALID
   TestExtractSelection.cxx
   TestExtraction.cxx
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Graphics/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+  )
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Filters/Extraction/Testing/Cxx/TestExtraction.cxx b/Filters/Extraction/Testing/Cxx/TestExtraction.cxx
index 937489e..e13189d 100644
--- a/Filters/Extraction/Testing/Cxx/TestExtraction.cxx
+++ b/Filters/Extraction/Testing/Cxx/TestExtraction.cxx
@@ -54,9 +54,9 @@
 #define YCELLS 3
 #define ZCELLS 3
 
-vtkRenderer *renderer = NULL;
-vtkImageData *sampleData = NULL;
-int DrawSampleData = 0;
+static vtkRenderer *renderer = NULL;
+static vtkImageData *sampleData = NULL;
+static int DrawSampleData = 0;
 
 enum {COLORBYCELL, COLORBYPOINT};
 
diff --git a/Filters/Extraction/Testing/Data/Baseline/ExtractEdgesQuadraticCells.png.md5 b/Filters/Extraction/Testing/Data/Baseline/ExtractEdgesQuadraticCells.png.md5
new file mode 100644
index 0000000..7250476
--- /dev/null
+++ b/Filters/Extraction/Testing/Data/Baseline/ExtractEdgesQuadraticCells.png.md5
@@ -0,0 +1 @@
+ee66fbeed4eaedb1171738662e78e94c
diff --git a/Filters/Extraction/Testing/Data/Baseline/ExtractEdgesQuadraticCells_1.png.md5 b/Filters/Extraction/Testing/Data/Baseline/ExtractEdgesQuadraticCells_1.png.md5
new file mode 100644
index 0000000..8ff6363
--- /dev/null
+++ b/Filters/Extraction/Testing/Data/Baseline/ExtractEdgesQuadraticCells_1.png.md5
@@ -0,0 +1 @@
+910c59920921fa1cba96e78f9a98f684
diff --git a/Filters/Extraction/Testing/Data/Baseline/ExtractTensors.png.md5 b/Filters/Extraction/Testing/Data/Baseline/ExtractTensors.png.md5
new file mode 100644
index 0000000..5207caf
--- /dev/null
+++ b/Filters/Extraction/Testing/Data/Baseline/ExtractTensors.png.md5
@@ -0,0 +1 @@
+90284c8235cf8fe8b47be46d4f627184
diff --git a/Filters/Extraction/Testing/Data/Baseline/TestExtractSelection.png.md5 b/Filters/Extraction/Testing/Data/Baseline/TestExtractSelection.png.md5
new file mode 100644
index 0000000..96b0742
--- /dev/null
+++ b/Filters/Extraction/Testing/Data/Baseline/TestExtractSelection.png.md5
@@ -0,0 +1 @@
+6b936be2675c919e589bfb34593fa3c1
diff --git a/Filters/Extraction/Testing/Data/Baseline/TestExtraction.png.md5 b/Filters/Extraction/Testing/Data/Baseline/TestExtraction.png.md5
new file mode 100644
index 0000000..b12e65d
--- /dev/null
+++ b/Filters/Extraction/Testing/Data/Baseline/TestExtraction.png.md5
@@ -0,0 +1 @@
+73e3503fda5b4c1bf5fb9ea8f74d42b5
diff --git a/Filters/Extraction/Testing/Data/Baseline/TestExtraction_1.png.md5 b/Filters/Extraction/Testing/Data/Baseline/TestExtraction_1.png.md5
new file mode 100644
index 0000000..4597aa5
--- /dev/null
+++ b/Filters/Extraction/Testing/Data/Baseline/TestExtraction_1.png.md5
@@ -0,0 +1 @@
+04fcc1f6fb1a59a6d49302eb67799466
diff --git a/Filters/Extraction/Testing/Data/Baseline/TestExtraction_2.png.md5 b/Filters/Extraction/Testing/Data/Baseline/TestExtraction_2.png.md5
new file mode 100644
index 0000000..8ee64e5
--- /dev/null
+++ b/Filters/Extraction/Testing/Data/Baseline/TestExtraction_2.png.md5
@@ -0,0 +1 @@
+2aa66647b2144a08ed0c1689f8d720ad
diff --git a/Filters/Extraction/Testing/Data/Baseline/TestExtraction_3.png.md5 b/Filters/Extraction/Testing/Data/Baseline/TestExtraction_3.png.md5
new file mode 100644
index 0000000..b90a6b5
--- /dev/null
+++ b/Filters/Extraction/Testing/Data/Baseline/TestExtraction_3.png.md5
@@ -0,0 +1 @@
+38f1c7950c84bb4bae44f62f8ebb5519
diff --git a/Filters/Extraction/Testing/Data/Baseline/extractPolyData.png.md5 b/Filters/Extraction/Testing/Data/Baseline/extractPolyData.png.md5
new file mode 100644
index 0000000..b38eab3
--- /dev/null
+++ b/Filters/Extraction/Testing/Data/Baseline/extractPolyData.png.md5
@@ -0,0 +1 @@
+a97bdd5c11cf4b60d4ecb824e7771a4a
diff --git a/Filters/Extraction/Testing/Data/Baseline/extractRectGrid.png.md5 b/Filters/Extraction/Testing/Data/Baseline/extractRectGrid.png.md5
new file mode 100644
index 0000000..7077d6c
--- /dev/null
+++ b/Filters/Extraction/Testing/Data/Baseline/extractRectGrid.png.md5
@@ -0,0 +1 @@
+a1193f818db33b28a85acfbe3a5c34a6
diff --git a/Filters/Extraction/Testing/Data/Baseline/extractUGrid.png.md5 b/Filters/Extraction/Testing/Data/Baseline/extractUGrid.png.md5
new file mode 100644
index 0000000..a401c25
--- /dev/null
+++ b/Filters/Extraction/Testing/Data/Baseline/extractUGrid.png.md5
@@ -0,0 +1 @@
+48ecaefd90caf9267efdd8cf9e1bd87d
diff --git a/Filters/Extraction/Testing/Data/Baseline/extractUGrid_1.png.md5 b/Filters/Extraction/Testing/Data/Baseline/extractUGrid_1.png.md5
new file mode 100644
index 0000000..4e88356
--- /dev/null
+++ b/Filters/Extraction/Testing/Data/Baseline/extractUGrid_1.png.md5
@@ -0,0 +1 @@
+35546e964face99028bc34f9d85ccfb0
diff --git a/Filters/Extraction/Testing/Data/Baseline/extractVectors.png.md5 b/Filters/Extraction/Testing/Data/Baseline/extractVectors.png.md5
new file mode 100644
index 0000000..b4360a5
--- /dev/null
+++ b/Filters/Extraction/Testing/Data/Baseline/extractVectors.png.md5
@@ -0,0 +1 @@
+3d1063ecbb9e413eaeb9646b09fe6b40
diff --git a/Filters/Extraction/Testing/Python/CMakeLists.txt b/Filters/Extraction/Testing/Python/CMakeLists.txt
index da953ea..dc03824 100644
--- a/Filters/Extraction/Testing/Python/CMakeLists.txt
+++ b/Filters/Extraction/Testing/Python/CMakeLists.txt
@@ -1,9 +1,6 @@
-add_test_python(ExtractEdgesQuadraticCells.py Graphics)
-add_test_python(ExtractTensors.py Graphics)
-add_test_python(extractPolyData.py Graphics)
-
-if(VTK_DATA_ROOT)
-  add_test_python(extractUGrid.py Graphics)
-  add_test_python(extractRectGrid.py Graphics)
-  add_test_python(extractVectors.py Graphics)
-endif()
+vtk_add_test_python(ExtractEdgesQuadraticCells.py)
+vtk_add_test_python(ExtractTensors.py)
+vtk_add_test_python(extractPolyData.py)
+vtk_add_test_python(extractRectGrid.py)
+vtk_add_test_python(extractUGrid.py)
+vtk_add_test_python(extractVectors.py)
diff --git a/Filters/Extraction/Testing/Tcl/CMakeLists.txt b/Filters/Extraction/Testing/Tcl/CMakeLists.txt
index f68d95b..3e8f3da 100644
--- a/Filters/Extraction/Testing/Tcl/CMakeLists.txt
+++ b/Filters/Extraction/Testing/Tcl/CMakeLists.txt
@@ -1,8 +1,6 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(extractRectGrid Graphics)
-  add_test_tcl(extractUGrid Graphics)
-  add_test_tcl(extractVectors Graphics)
-endif()
-add_test_tcl(ExtractEdgesQuadraticCells Graphics)
-add_test_tcl(ExtractTensors Graphics)
-add_test_tcl(extractPolyData Graphics)
+vtk_add_test_tcl(extractRectGrid)
+vtk_add_test_tcl(extractUGrid)
+vtk_add_test_tcl(extractVectors)
+vtk_add_test_tcl(ExtractEdgesQuadraticCells)
+vtk_add_test_tcl(ExtractTensors)
+vtk_add_test_tcl(extractPolyData)
diff --git a/Filters/Extraction/vtkExtractArraysOverTime.cxx b/Filters/Extraction/vtkExtractArraysOverTime.cxx
index dd6cefc..584af48 100644
--- a/Filters/Extraction/vtkExtractArraysOverTime.cxx
+++ b/Filters/Extraction/vtkExtractArraysOverTime.cxx
@@ -41,7 +41,7 @@
 #include "vtkStreamingDemandDrivenPipeline.h"
 #include "vtkUnsignedCharArray.h"
 
-#include "assert.h"
+#include <cassert>
 #include <map>
 #include <string>
 #include <vtksys/ios/sstream>
diff --git a/Filters/Extraction/vtkExtractDataOverTime.h b/Filters/Extraction/vtkExtractDataOverTime.h
index 09ca562..40c98c0 100644
--- a/Filters/Extraction/vtkExtractDataOverTime.h
+++ b/Filters/Extraction/vtkExtractDataOverTime.h
@@ -46,7 +46,7 @@ public:
 
 protected:
   vtkExtractDataOverTime();
-  ~vtkExtractDataOverTime() {};
+  ~vtkExtractDataOverTime() {}
 
   int RequestInformation( vtkInformation *request,
     vtkInformationVector **inputVector, vtkInformationVector *outputVector);
diff --git a/Filters/Extraction/vtkExtractGeometry.cxx b/Filters/Extraction/vtkExtractGeometry.cxx
index 31b0a16..34bc589 100644
--- a/Filters/Extraction/vtkExtractGeometry.cxx
+++ b/Filters/Extraction/vtkExtractGeometry.cxx
@@ -16,13 +16,16 @@
 
 #include "vtkCell.h"
 #include "vtkCellData.h"
+#include "vtkCellIterator.h"
 #include "vtkFloatArray.h"
 #include "vtkIdList.h"
 #include "vtkImplicitFunction.h"
 #include "vtkInformation.h"
 #include "vtkInformationVector.h"
+#include "vtkNew.h"
 #include "vtkObjectFactory.h"
 #include "vtkPointData.h"
+#include "vtkSmartPointer.h"
 #include "vtkUnstructuredGrid.h"
 
 vtkStandardNewMacro(vtkExtractGeometry);
@@ -81,9 +84,14 @@ int vtkExtractGeometry::RequestData(
   vtkUnstructuredGrid *output = vtkUnstructuredGrid::SafeDownCast(
     outInfo->Get(vtkDataObject::DATA_OBJECT()));
 
-  vtkIdType ptId, numPts, numCells, i, cellId, newCellId, newId, *pointMap;
-  vtkIdList *cellPts;
-  vtkCell *cell;
+  // May be NULL, check before dereferencing.
+  vtkUnstructuredGrid *gridInput = vtkUnstructuredGrid::SafeDownCast(input);
+
+  vtkIdType ptId, numPts, numCells, i, newCellId, newId, *pointMap;
+  vtkSmartPointer<vtkCellIterator> cellIter =
+      vtkSmartPointer<vtkCellIterator>::Take(input->NewCellIterator());
+  vtkIdList *pointIdList;
+  int cellType;
   int numCellPts;
   double x[3];
   double multiplier;
@@ -169,18 +177,19 @@ int vtkExtractGeometry::RequestData(
   // Now loop over all cells to see whether they are inside implicit
   // function (or on boundary if ExtractBoundaryCells is on).
   //
-  for (cellId=0; cellId < numCells; cellId++)
+  for (cellIter->InitTraversal(); !cellIter->IsDoneWithTraversal();
+       cellIter->GoToNextCell())
     {
-    cell = input->GetCell(cellId);
-    cellPts = cell->GetPointIds();
-    numCellPts = cell->GetNumberOfPoints();
+    cellType = cellIter->GetCellType();
+    numCellPts = cellIter->GetNumberOfPoints();
+    pointIdList = cellIter->GetPointIds();
 
     newCellPts->Reset();
     if ( ! this->ExtractBoundaryCells ) //requires less work
       {
       for ( npts=0, i=0; i < numCellPts; i++, npts++)
         {
-        ptId = cellPts->GetId(i);
+        ptId = pointIdList->GetId(i);
         if ( pointMap[ptId] < 0 )
           {
           break; //this cell won't be inserted
@@ -196,7 +205,7 @@ int vtkExtractGeometry::RequestData(
       {
       for ( npts=0, i=0; i < numCellPts; i++ )
         {
-        ptId = cellPts->GetId(i);
+        ptId = pointIdList->GetId(i);
         if ( newScalars->GetValue(ptId) <= 0.0 )
           {
           npts++;
@@ -221,7 +230,7 @@ int vtkExtractGeometry::RequestData(
         {
         for ( i=0; i < numCellPts; i++ )
           {
-          ptId = cellPts->GetId(i);
+          ptId = pointIdList->GetId(i);
           if ( pointMap[ptId] < 0 )
             {
             input->GetPoint(ptId, x);
@@ -252,15 +261,14 @@ int vtkExtractGeometry::RequestData(
     if ( extraction_condition )
       {
       // special handling for polyhedron cells
-      if (vtkUnstructuredGrid::SafeDownCast(input) &&
-          cell->GetCellType() == VTK_POLYHEDRON)
+      if (gridInput && cellType == VTK_POLYHEDRON)
         {
         newCellPts->Reset();
-        vtkUnstructuredGrid::SafeDownCast(input)->GetFaceStream(cellId, newCellPts);
+        gridInput->GetFaceStream(cellIter->GetCellId(), newCellPts);
         vtkUnstructuredGrid::ConvertFaceStreamPointIds(newCellPts, pointMap);
         }
-      newCellId = output->InsertNextCell(cell->GetCellType(),newCellPts);
-      outputCD->CopyData(cd,cellId,newCellId);
+      newCellId = output->InsertNextCell(cellType,newCellPts);
+      outputCD->CopyData(cd, cellIter->GetCellId(), newCellId);
       }
     }//for all cells
 
diff --git a/Filters/Extraction/vtkExtractGrid.h b/Filters/Extraction/vtkExtractGrid.h
index 362cd6c..7c1967e 100644
--- a/Filters/Extraction/vtkExtractGrid.h
+++ b/Filters/Extraction/vtkExtractGrid.h
@@ -76,7 +76,7 @@ public:
 
 protected:
   vtkExtractGrid();
-  ~vtkExtractGrid() {};
+  ~vtkExtractGrid() {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   virtual int RequestInformation(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
diff --git a/Filters/Extraction/vtkExtractRectilinearGrid.h b/Filters/Extraction/vtkExtractRectilinearGrid.h
index eaf45fa..c48dddb 100644
--- a/Filters/Extraction/vtkExtractRectilinearGrid.h
+++ b/Filters/Extraction/vtkExtractRectilinearGrid.h
@@ -65,7 +65,7 @@ public:
 
 protected:
   vtkExtractRectilinearGrid();
-  ~vtkExtractRectilinearGrid() {};
+  ~vtkExtractRectilinearGrid() {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   virtual int RequestInformation(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
diff --git a/Filters/Extraction/vtkExtractTensorComponents.h b/Filters/Extraction/vtkExtractTensorComponents.h
index 576299d..6b362d0 100644
--- a/Filters/Extraction/vtkExtractTensorComponents.h
+++ b/Filters/Extraction/vtkExtractTensorComponents.h
@@ -147,7 +147,7 @@ public:
 
 protected:
   vtkExtractTensorComponents();
-  ~vtkExtractTensorComponents() {};
+  ~vtkExtractTensorComponents() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
diff --git a/Filters/Extraction/vtkExtractUnstructuredGrid.h b/Filters/Extraction/vtkExtractUnstructuredGrid.h
index 77a32da..6c8a1b4 100644
--- a/Filters/Extraction/vtkExtractUnstructuredGrid.h
+++ b/Filters/Extraction/vtkExtractUnstructuredGrid.h
@@ -123,7 +123,7 @@ public:
 
 protected:
   vtkExtractUnstructuredGrid();
-  ~vtkExtractUnstructuredGrid() {};
+  ~vtkExtractUnstructuredGrid() {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
diff --git a/Filters/FlowPaths/Testing/Cxx/CMakeLists.txt b/Filters/FlowPaths/Testing/Cxx/CMakeLists.txt
index 064f4d9..55354e0 100644
--- a/Filters/FlowPaths/Testing/Cxx/CMakeLists.txt
+++ b/Filters/FlowPaths/Testing/Cxx/CMakeLists.txt
@@ -1,30 +1,7 @@
-set(MyTests
+vtk_add_test_cxx(
   TestBSPTree.cxx
-  TestStreamTracer
-  TestAMRInterpolatedVelocityField
-  TestParticleTracers
+  TestStreamTracer.cxx,NO_VALID
+  TestAMRInterpolatedVelocityField.cxx,NO_VALID
+  TestParticleTracers.cxx,NO_VALID
   )
-
-include(vtkTestingObjectFactory)
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun CxxTests.cxx)
-
-# Add all the executables
-foreach (test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-
-  if(VTK_DATA_ROOT)
-  add_test(NAME ${vtk-module}Cxx-${TName}
-    COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Graphics/${TName}.png)
-  else()
-  add_test(NAME ${vtk-module}Cxx-${TName}
-    COMMAND ${vtk-module}CxxTests ${TName}
-      -T ${VTK_TEST_OUTPUT_DIR}
-      )
-  endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests RENDERING_FACTORY)
diff --git a/Filters/FlowPaths/Testing/Cxx/TestAMRInterpolatedVelocityField.cxx b/Filters/FlowPaths/Testing/Cxx/TestAMRInterpolatedVelocityField.cxx
index 488ca2b..9534df1 100644
--- a/Filters/FlowPaths/Testing/Cxx/TestAMRInterpolatedVelocityField.cxx
+++ b/Filters/FlowPaths/Testing/Cxx/TestAMRInterpolatedVelocityField.cxx
@@ -21,7 +21,7 @@
 #include <vtkCompositeDataPipeline.h>
 
 #define RETURNONFALSE(b)\
-  if(!b) \
+  if(!(b)) \
     {\
     vtkAlgorithm::SetDefaultExecutivePrototype(NULL);\
     return EXIT_FAILURE;\
diff --git a/Filters/FlowPaths/Testing/Cxx/TestParticleTracers.cxx b/Filters/FlowPaths/Testing/Cxx/TestParticleTracers.cxx
index dc1c6db..63bca84 100644
--- a/Filters/FlowPaths/Testing/Cxx/TestParticleTracers.cxx
+++ b/Filters/FlowPaths/Testing/Cxx/TestParticleTracers.cxx
@@ -31,7 +31,7 @@
 #include "vtkFloatArray.h"
 #include "vtkSmartPointer.h"
 #include "vtkStreamingDemandDrivenPipeline.h"
-#include <assert.h>
+#include <cassert>
 #include <vector>
 using namespace std;
 
diff --git a/Filters/FlowPaths/Testing/Cxx/TestStreamTracer.cxx b/Filters/FlowPaths/Testing/Cxx/TestStreamTracer.cxx
index 6c10c31..8b1d21f 100644
--- a/Filters/FlowPaths/Testing/Cxx/TestStreamTracer.cxx
+++ b/Filters/FlowPaths/Testing/Cxx/TestStreamTracer.cxx
@@ -23,7 +23,7 @@
 #include "vtkPointData.h"
 #include "vtkSmartPointer.h"
 #include "vtkDoubleArray.h"
-#include <assert.h>
+#include <cassert>
 
 int TestFieldNames(int, char*[])
 {
diff --git a/Filters/FlowPaths/Testing/Data/Baseline/TestBSPTree.png.md5 b/Filters/FlowPaths/Testing/Data/Baseline/TestBSPTree.png.md5
new file mode 100644
index 0000000..b2b15ae
--- /dev/null
+++ b/Filters/FlowPaths/Testing/Data/Baseline/TestBSPTree.png.md5
@@ -0,0 +1 @@
+9f69496dc9b8a5f78f9d7efe4686cd91
diff --git a/Filters/FlowPaths/vtkAMRInterpolatedVelocityField.cxx b/Filters/FlowPaths/vtkAMRInterpolatedVelocityField.cxx
index 0536d16..8e6bb54 100644
--- a/Filters/FlowPaths/vtkAMRInterpolatedVelocityField.cxx
+++ b/Filters/FlowPaths/vtkAMRInterpolatedVelocityField.cxx
@@ -2,7 +2,7 @@
 #include "vtkObjectFactory.h"
 #include "vtkUniformGrid.h"
 #include "vtkOverlappingAMR.h"
-#include <assert.h>
+#include <cassert>
 //----------------------------------------------------------------------------
 namespace
 {
diff --git a/Filters/FlowPaths/vtkCachingInterpolatedVelocityField.cxx b/Filters/FlowPaths/vtkCachingInterpolatedVelocityField.cxx
index d1e1fc5..113160a 100644
--- a/Filters/FlowPaths/vtkCachingInterpolatedVelocityField.cxx
+++ b/Filters/FlowPaths/vtkCachingInterpolatedVelocityField.cxx
@@ -357,6 +357,19 @@ bool vtkCachingInterpolatedVelocityField::InterpolatePoint(
   return 1;
 }
 //---------------------------------------------------------------------------
+bool vtkCachingInterpolatedVelocityField::InterpolatePoint(
+  vtkCachingInterpolatedVelocityField* inCIVF, vtkPointData *outPD, vtkIdType outIndex)
+{
+  if (!this->Cache || !this->Cache->DataSet)
+    {
+    return 0;
+    }
+  vtkPointData* inPD = inCIVF->Cache->DataSet->GetPointData();
+  outPD->InterpolatePoint(
+    inPD, outIndex, this->Cache->Cell->PointIds, &this->Weights[0]);
+  return 1;
+}
+//---------------------------------------------------------------------------
 int vtkCachingInterpolatedVelocityField::GetLastWeights(double* w)
 {
   int j, numPts;
diff --git a/Filters/FlowPaths/vtkCachingInterpolatedVelocityField.h b/Filters/FlowPaths/vtkCachingInterpolatedVelocityField.h
index d9c807a..fb16d79 100644
--- a/Filters/FlowPaths/vtkCachingInterpolatedVelocityField.h
+++ b/Filters/FlowPaths/vtkCachingInterpolatedVelocityField.h
@@ -145,6 +145,8 @@ protected:
   // vtkTemporalInterpolatedVelocityField
   void FastCompute(IVFDataSetInfo *cache, double f[3]);
   bool InterpolatePoint(vtkPointData *outPD, vtkIdType outIndex);
+  bool InterpolatePoint(vtkCachingInterpolatedVelocityField *inCIVF,
+                        vtkPointData *outPD, vtkIdType outIndex);
   vtkGenericCell *GetLastCell();
 //ETX
 
diff --git a/Filters/FlowPaths/vtkDashedStreamLine.h b/Filters/FlowPaths/vtkDashedStreamLine.h
index 14f8f5f..97c84ec 100644
--- a/Filters/FlowPaths/vtkDashedStreamLine.h
+++ b/Filters/FlowPaths/vtkDashedStreamLine.h
@@ -51,7 +51,7 @@ public:
 
 protected:
   vtkDashedStreamLine();
-  ~vtkDashedStreamLine() {};
+  ~vtkDashedStreamLine() {}
 
   // Convert streamer array into vtkPolyData
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
diff --git a/Filters/FlowPaths/vtkParticlePathFilter.h b/Filters/FlowPaths/vtkParticlePathFilter.h
index 2563d5a..ba9623b 100644
--- a/Filters/FlowPaths/vtkParticlePathFilter.h
+++ b/Filters/FlowPaths/vtkParticlePathFilter.h
@@ -32,7 +32,7 @@
 class VTKFILTERSFLOWPATHS_EXPORT ParticlePathFilterInternal
 {
  public:
-  ParticlePathFilterInternal():Filter(NULL){};
+  ParticlePathFilterInternal():Filter(NULL){}
   void Initialize(vtkParticleTracerBase* filter);
   virtual ~ParticlePathFilterInternal(){}
   virtual int OutputParticles(vtkPolyData* poly);
diff --git a/Filters/FlowPaths/vtkParticleTracerBase.cxx b/Filters/FlowPaths/vtkParticleTracerBase.cxx
index 60db7b8..73fd2f4 100644
--- a/Filters/FlowPaths/vtkParticleTracerBase.cxx
+++ b/Filters/FlowPaths/vtkParticleTracerBase.cxx
@@ -1,60 +1,53 @@
 /*=========================================================================
 
-Program:   Visualization Toolkit
-Module:    vtkParticleTracerBase.cxx
+  Program:   Visualization Toolkit
+  Module:    vtkParticleTracerBase.cxx
 
-Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-All rights reserved.
-See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
 
-This software is distributed WITHOUT ANY WARRANTY; without even
-the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE.  See the above copyright notice for more information.
+  This software is distributed WITHOUT ANY WARRANTY; without even
+  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+  PURPOSE.  See the above copyright notice for more information.
 
-=========================================================================*/
+  =========================================================================*/
 #include "vtkParticleTracerBase.h"
 
+#include "vtkAbstractParticleWriter.h"
 #include "vtkCellArray.h"
 #include "vtkCellData.h"
+#include "vtkCharArray.h"
 #include "vtkCompositeDataIterator.h"
-#include "vtkCompositeDataPipeline.h"
-#include "vtkDataSetAttributes.h"
 #include "vtkDoubleArray.h"
 #include "vtkExecutive.h"
 #include "vtkGenericCell.h"
-#include "vtkIdList.h"
 #include "vtkInformation.h"
 #include "vtkInformationVector.h"
 #include "vtkIntArray.h"
 #include "vtkFloatArray.h"
-#include "vtkDoubleArray.h"
-#include "vtkCharArray.h"
 #include "vtkMath.h"
 #include "vtkMultiBlockDataSet.h"
 #include "vtkObjectFactory.h"
 #include "vtkPointData.h"
-#include "vtkPointSet.h"
 #include "vtkPolyData.h"
-#include "vtkPolyLine.h"
 #include "vtkRungeKutta2.h"
 #include "vtkRungeKutta4.h"
 #include "vtkRungeKutta45.h"
 #include "vtkSmartPointer.h"
+#include "vtkStreamingDemandDrivenPipeline.h"
 #include "vtkTemporalInterpolatedVelocityField.h"
-#include "vtkOutputWindow.h"
-#include "vtkAbstractParticleWriter.h"
-#include "vtkToolkits.h" // For VTK_USE_MPI
-#include "assert.h"
+#include <cassert>
 
 #ifdef WIN32
-  #undef JB_H5PART_PARTICLE_OUTPUT
+#undef JB_H5PART_PARTICLE_OUTPUT
 #else
 //  #define JB_H5PART_PARTICLE_OUTPUT
 #endif
 
 #ifdef JB_H5PART_PARTICLE_OUTPUT
 // #include "vtkH5PartWriter.h"
-  #include "vtkXMLParticleWriter.h"
+#include "vtkXMLParticleWriter.h"
 #endif
 
 #include <functional>
@@ -72,19 +65,26 @@ const double vtkParticleTracerBase::Epsilon = 1.0E-12;
 using namespace vtkParticleTracerBaseNamespace;
 
 vtkCxxSetObjectMacro(vtkParticleTracerBase, ParticleWriter, vtkAbstractParticleWriter);
-vtkCxxSetObjectMacro(vtkParticleTracerBase,Integrator,vtkInitialValueProblemSolver); //XXX
-
-#define ParticleTracerSetMacro(name,type) \
-void vtkParticleTracerBase::Set##name (type _arg) \
-  { \
-  if (this->name == _arg) \
-    {\
-    return; \
-    }\
-  this->name = _arg;  \
-  this->ResetCache(); \
-  this->Modified(); \
+vtkCxxSetObjectMacro(vtkParticleTracerBase,Integrator,vtkInitialValueProblemSolver);
+
+// this SetMacro is different than the regular vtkSetMacro
+// because it resets the cache as well.
+#define ParticleTracerSetMacro(name,type)               \
+  void vtkParticleTracerBase::Set##name (type _arg)     \
+  {                                                     \
+    if (this->name == _arg)                             \
+      {                                                 \
+      return;                                           \
+      }                                                 \
+    this->name = _arg;                                  \
+    this->ResetCache();                                 \
+    this->Modified();                                   \
   }
+ParticleTracerSetMacro(StartTime, double)
+ParticleTracerSetMacro(ComputeVorticity, bool);
+ParticleTracerSetMacro(RotationScale, double)
+ParticleTracerSetMacro(ForceReinjectionEveryNSteps,int);
+ParticleTracerSetMacro(TerminalSpeed, double);
 
 namespace
 {
@@ -111,8 +111,6 @@ namespace
 //---------------------------------------------------------------------------
 vtkParticleTracerBase::vtkParticleTracerBase()
 {
-  this->SetNumberOfInputPorts(2);
-
   // by default process active point vectors
   this->SetInputArrayToProcess(0,0,0,vtkDataObject::FIELD_ASSOCIATION_POINTS,
                                vtkDataSetAttributes::VECTORS);
@@ -122,8 +120,8 @@ vtkParticleTracerBase::vtkParticleTracerBase()
   this->ForceReinjectionEveryNSteps = 0;
   this->ReinjectionCounter          = 0;
   this->AllFixedGeometry            = 1;
-  this->StaticMesh                  = 1;
-  this->StaticSeeds                 = 1;
+  this->StaticMesh                  = 0;
+  this->StaticSeeds                 = 0;
   this->ComputeVorticity            = 1;
   this->IgnorePipelineTime          = 1;
   this->ParticleWriter              = NULL;
@@ -140,14 +138,9 @@ vtkParticleTracerBase::vtkParticleTracerBase()
   this->RotationScale    = 1.0;
   this->MaximumError         = 1.0e-6;
   this->TerminalSpeed = vtkParticleTracerBase::Epsilon;
-
-  //
-
   this->IntegrationStep = 0.5;
-// we are not actually using these for now
-  //
+
   this->Interpolator = vtkSmartPointer<vtkTemporalInterpolatedVelocityField>::New();
-  //
   this->SetNumberOfInputPorts(2);
 
 #ifdef JB_H5PART_PARTICLE_OUTPUT
@@ -165,27 +158,23 @@ vtkParticleTracerBase::vtkParticleTracerBase()
   this->SetIntegratorType(RUNGE_KUTTA4);
   this->DisableResetCache = 0;
 }
+
 //---------------------------------------------------------------------------
 vtkParticleTracerBase::~vtkParticleTracerBase()
 {
   this->SetParticleWriter(NULL);
-  if (this->ParticleFileName)
-  {
-    delete []this->ParticleFileName;
-    this->ParticleFileName = NULL;
-  }
+  this->SetParticleFileName(NULL);
 
   this->CachedData[0] = NULL;
-  this->CachedData[1] = NULL;;
+  this->CachedData[1] = NULL;
 
   this->SetIntegrator(0);
   this->SetInterpolatorPrototype(0);
-
 }
+
 //----------------------------------------------------------------------------
 int vtkParticleTracerBase::FillInputPortInformation(
-  int port,
-  vtkInformation* info)
+  int port, vtkInformation* info)
 {
   // port 0 must be a temporal collection of any type
   // the executive should put a temporal collection in when
@@ -202,16 +191,19 @@ int vtkParticleTracerBase::FillInputPortInformation(
     }
   return 1;
 }
+
 //----------------------------------------------------------------------------
 void vtkParticleTracerBase::AddSourceConnection(vtkAlgorithmOutput* input)
 {
   this->AddInputConnection(1, input);
 }
+
 //----------------------------------------------------------------------------
 void vtkParticleTracerBase::RemoveAllSources()
 {
   this->SetInputConnection(1, 0);
 }
+
 //----------------------------------------------------------------------------
 int vtkParticleTracerBase::ProcessRequest(
   vtkInformation* request,
@@ -227,7 +219,7 @@ int vtkParticleTracerBase::ProcessRequest(
     }
   if(request->Has(vtkStreamingDemandDrivenPipeline::REQUEST_UPDATE_EXTENT()))
     {
-      return this->RequestUpdateExtent(request, inputVector, outputVector);
+    return this->RequestUpdateExtent(request, inputVector, outputVector);
     }
   if(request->Has(vtkDemandDrivenPipeline::REQUEST_DATA()))
     {
@@ -235,6 +227,7 @@ int vtkParticleTracerBase::ProcessRequest(
     }
   return 1;
 }
+
 //----------------------------------------------------------------------------
 int vtkParticleTracerBase::RequestInformation(
   vtkInformation *vtkNotUsed(request),
@@ -246,16 +239,17 @@ int vtkParticleTracerBase::RequestInformation(
 
   if (inInfo->Has(vtkStreamingDemandDrivenPipeline::TIME_STEPS()) )
     {
-    unsigned int numberOfInputTimeSteps = inInfo->Length(vtkStreamingDemandDrivenPipeline::TIME_STEPS());
+    unsigned int numberOfInputTimeSteps =
+      inInfo->Length(vtkStreamingDemandDrivenPipeline::TIME_STEPS());
     vtkDebugMacro(<<"vtkParticleTracerBase "
-      "inputVector TIME_STEPS " << numberOfInputTimeSteps);
+                  "inputVector TIME_STEPS " << numberOfInputTimeSteps);
     //
     // Get list of input time step values
     this->InputTimeValues.resize(numberOfInputTimeSteps);
     inInfo->Get( vtkStreamingDemandDrivenPipeline::TIME_STEPS(),
-      &this->InputTimeValues[0] );
-    if (numberOfInputTimeSteps==1 && this->DisableResetCache==0) //warning would be skipped in coprocessing work flow
-      {
+                 &this->InputTimeValues[0] );
+    if (numberOfInputTimeSteps==1 && this->DisableResetCache==0)
+      {  //warning would be skipped in coprocessing work flow
       vtkWarningMacro(<<"Not enough input time steps for particle integration");
       }
 
@@ -268,8 +262,7 @@ int vtkParticleTracerBase::RequestInformation(
       {
       this->SetStartTime(this->InputTimeValues.back());
       }
-
-  }
+    }
   else
     {
     vtkErrorMacro(<<"Input information has no TIME_STEPS set");
@@ -279,20 +272,22 @@ int vtkParticleTracerBase::RequestInformation(
   outInfo->Set(
     vtkStreamingDemandDrivenPipeline::MAXIMUM_NUMBER_OF_PIECES(), -1);
 
-
-
   return 1;
 }
-//----------------------------------------------------------------------------
-class WithinTolerance: public std::binary_function<double, double, bool>
+namespace
 {
-public:
+//----------------------------------------------------------------------------
+  class WithinTolerance: public std::binary_function<double, double, bool>
+  {
+  public:
     result_type operator()(first_argument_type a, second_argument_type b) const
     {
       bool result = (fabs(a-b)<=(a*1E-6));
       return (result_type)result;
     }
-};
+  };
+}
+
 //----------------------------------------------------------------------------
 int vtkParticleTracerBase::RequestUpdateExtent(
   vtkInformation *vtkNotUsed(request),
@@ -362,7 +357,7 @@ int vtkParticleTracerBase::RequestUpdateExtent(
         if(pmt>this->ExecuteTime.GetMTime())
           {
           PRINT("Reset cache of because upstream is newer")
-          this->ResetCache();
+            this->ResetCache();
           }
         }
       }
@@ -388,12 +383,15 @@ int vtkParticleTracerBase::RequestUpdateExtent(
 
   return 1;
 }
+
 //---------------------------------------------------------------------------
 int vtkParticleTracerBase::InitializeInterpolator()
 {
-  if (!this->CachedData[0] || !this->CachedData[1]) {
-    return 0;
-  }
+  if (!this->CachedData[0] || !this->CachedData[1])
+    {
+    vtkErrorMacro("Missing data set to process.");
+    return VTK_ERROR;
+    }
   //
   // When Multiblock arrays are processed, some may be empty
   // if the first is empty, we won't find the correct vector name
@@ -416,7 +414,7 @@ int vtkParticleTracerBase::InitializeInterpolator()
     }
   if (!vecname)
     {
-    vtkDebugMacro(<< "Couldn't find vector array " << vecname);
+    vtkErrorMacro(<< "Couldn't find vector array " << vecname);
     return VTK_ERROR;
     }
 
@@ -428,7 +426,8 @@ int vtkParticleTracerBase::InitializeInterpolator()
   int numValidInputBlocks[2] = {0, 0};
   int numTotalInputBlocks[2] = {0, 0};
   this->DataReferenceT[0] = this->DataReferenceT[1] = 0;
-  for (int T=0; T<2; T++) {
+  for (int T=0; T<2; T++)
+    {
     this->CachedBounds[T].clear();
     int index = 0;
     // iterate over all blocks of input and cache the bounds information
@@ -465,24 +464,27 @@ int vtkParticleTracerBase::InitializeInterpolator()
           this->AllFixedGeometry = this->AllFixedGeometry && static_dataset;
           // add the dataset to the interpolator
           this->Interpolator->SetDataSetAtTime(index++, T, this->GetCacheDataTime(T), inp, static_dataset);
-          if (!this->DataReferenceT[T]) {
+          if (!this->DataReferenceT[T])
+            {
             this->DataReferenceT[T] = inp;
-          }
+            }
           //
           numValidInputBlocks[T]++;
           }
         }
       anotherIterP->GoToNextItem();
       }
-  }
+    }
   if (numValidInputBlocks[0]==0 || numValidInputBlocks[1]==0)
     {
-    vtkDebugMacro("Not enough inputs have been found. Can not execute." << numValidInputBlocks[0] << " " << numValidInputBlocks[1]);
+    vtkErrorMacro("Not enough inputs have been found. Can not execute."
+                  << numValidInputBlocks[0] << " " << numValidInputBlocks[1]);
     return VTK_ERROR;
     }
-  if (numValidInputBlocks[0] != numValidInputBlocks[1])
+  if (numValidInputBlocks[0] != numValidInputBlocks[1] && this->StaticMesh)
     {
-    vtkDebugMacro("The number of datasets is different between time steps " << numValidInputBlocks[0] << " " << numValidInputBlocks[1]);
+    vtkErrorMacro("StaticMesh is set to True but the number of datasets is different between time steps "
+                  << numValidInputBlocks[0] << " " << numValidInputBlocks[1]);
     return VTK_ERROR;
     }
   //
@@ -499,6 +501,8 @@ int vtkParticleTracerBase::InitializeInterpolator()
   //
   return VTK_OK;
 }
+
+//---------------------------------------------------------------------------
 int vtkParticleTracerBase::UpdateDataCache(vtkDataObject *data)
 {
   double dataTime = data->GetInformation()->Get(vtkDataObject::DATA_TIME_STEP());
@@ -574,7 +578,6 @@ int vtkParticleTracerBase::UpdateDataCache(vtkDataObject *data)
   return 1;
 }
 
-
 //---------------------------------------------------------------------------
 bool vtkParticleTracerBase::InsideBounds(double point[])
 {
@@ -591,6 +594,7 @@ bool vtkParticleTracerBase::InsideBounds(double point[])
     }
   return false;
 }
+
 //---------------------------------------------------------------------------
 void vtkParticleTracerBase::TestParticles(
   ParticleVector &candidates, ParticleVector &passed, int &count)
@@ -606,7 +610,10 @@ void vtkParticleTracerBase::TestParticles(
     }
 }
 
-void vtkParticleTracerBase::TestParticles(vtkParticleTracerBaseNamespace::ParticleVector &candidates, std::vector<int> &passed)
+//---------------------------------------------------------------------------
+void vtkParticleTracerBase::TestParticles(
+  vtkParticleTracerBaseNamespace::ParticleVector &candidates,
+  std::vector<int> &passed)
 {
   int i = 0;
   for (ParticleIterator it=candidates.begin(); it!=candidates.end(); ++it, ++i)
@@ -616,7 +623,7 @@ void vtkParticleTracerBase::TestParticles(vtkParticleTracerBaseNamespace::Partic
     // if outside bounds, reject instantly
     if (this->InsideBounds(pos))
       {
-       // since this is first test, avoid bad cache tests
+      // since this is first test, avoid bad cache tests
       this->Interpolator->ClearCache();
       info.LocationState = this->Interpolator->TestPoint(pos);
       if (info.LocationState==ID_OUTSIDE_ALL /*|| location==ID_OUTSIDE_T0*/)
@@ -635,9 +642,9 @@ void vtkParticleTracerBase::TestParticles(vtkParticleTracerBaseNamespace::Partic
 }
 
 //---------------------------------------------------------------------------
-void vtkParticleTracerBase::AssignSeedsToProcessors( double time,
-  vtkDataSet *source, int sourceID, int ptId,
-  ParticleVector &LocalSeedPoints, int &LocalAssignedCount)
+void vtkParticleTracerBase::AssignSeedsToProcessors(
+  double time, vtkDataSet *source, int sourceID, int ptId,
+  ParticleVector &localSeedPoints, int &localAssignedCount)
 {
   ParticleVector candidates;
   //
@@ -668,29 +675,29 @@ void vtkParticleTracerBase::AssignSeedsToProcessors( double time,
     info.speed                = 0.0;
     info.ErrorCode            = 0;
     info.PointId = -1;
-  }
+    }
   //
   // Gather all Seeds to all processors for classification
   //
-  this->TestParticles(candidates, LocalSeedPoints, LocalAssignedCount);
-  int TotalAssigned = LocalAssignedCount; (void)TotalAssigned;
+  this->TestParticles(candidates, localSeedPoints, localAssignedCount);
 
   // Assign unique identifiers taking into account uneven distribution
   // across processes and seeds which were rejected
-  this->AssignUniqueIds(LocalSeedPoints);
+  this->AssignUniqueIds(localSeedPoints);
   //
 
 }
+
 //---------------------------------------------------------------------------
 void vtkParticleTracerBase::AssignUniqueIds(
-  vtkParticleTracerBaseNamespace::ParticleVector &LocalSeedPoints)
+  vtkParticleTracerBaseNamespace::ParticleVector &localSeedPoints)
 {
-  vtkIdType ParticleCountOffset = 0;
-  vtkIdType numParticles = LocalSeedPoints.size();
+  vtkIdType particleCountOffset = 0;
+  vtkIdType numParticles = localSeedPoints.size();
   for (vtkIdType i=0; i<numParticles; i++)
     {
-    LocalSeedPoints[i].UniqueParticleId =
-    this->UniqueIdCounter + ParticleCountOffset + i;
+    localSeedPoints[i].UniqueParticleId =
+      this->UniqueIdCounter + particleCountOffset + i;
     }
   this->UniqueIdCounter += numParticles;
 }
@@ -700,13 +707,15 @@ void vtkParticleTracerBase::UpdateParticleList(ParticleVector &candidates)
 {
   int numSeedsNew = static_cast<int>(candidates.size());
   //
-  for (int i=0; i<numSeedsNew; i++) {
+  for (int i=0; i<numSeedsNew; i++)
+    {
     // allocate a new particle on the list and get a reference to it
     this->ParticleHistories.push_back(candidates[i]);
-  }
+    }
   vtkDebugMacro(<< "UpdateParticleList completed with " << this->NumberOfParticles() << " particles");
 }
 
+//---------------------------------------------------------------------------
 int vtkParticleTracerBase::ProcessInput(vtkInformationVector** inputVector)
 {
   Assert(this->CurrentTimeStep>=StartTimeStep  && this->CurrentTimeStep<=TerminationTimeStep);
@@ -730,7 +739,7 @@ int vtkParticleTracerBase::ProcessInput(vtkInformationVector** inputVector)
   return 1;
 }
 
-
+//---------------------------------------------------------------------------
 vtkPolyData* vtkParticleTracerBase::Execute(vtkInformationVector** inputVector)
 {
   Assert(this->CurrentTimeStep>=this->StartTimeStep);
@@ -742,7 +751,7 @@ vtkPolyData* vtkParticleTracerBase::Execute(vtkInformationVector** inputVector)
 
   //set up the output
   vtkPolyData *output = vtkPolyData::New();
-    //
+  //
   // Add the datasets to an interpolator object
   //
   if (this->InitializeInterpolator() != VTK_OK)
@@ -863,7 +872,7 @@ vtkPolyData* vtkParticleTracerBase::Execute(vtkInformationVector** inputVector)
       }
     }
   else
-   {
+    {
     ParticleListIterator  it_first = this->ParticleHistories.begin();
     ParticleListIterator  it_last  = this->ParticleHistories.end();
     ParticleListIterator  it_next;
@@ -997,6 +1006,7 @@ vtkPolyData* vtkParticleTracerBase::Execute(vtkInformationVector** inputVector)
   return output;
 }
 
+//---------------------------------------------------------------------------
 int vtkParticleTracerBase::RequestData(
   vtkInformation *request,
   vtkInformationVector **inputVector,
@@ -1072,20 +1082,18 @@ int vtkParticleTracerBase::RequestData(
   PRINT("RD: "<<this->CurrentTimeStep<<" "<<this->StartTimeStep<<" "<<this->TerminationTimeStep);
   return 1;
 }
+
 //---------------------------------------------------------------------------
 void vtkParticleTracerBase::IntegrateParticle(
-  ParticleListIterator &it,
-  double currenttime, double targettime,
+  ParticleListIterator &it, double currenttime, double targettime,
   vtkInitialValueProblemSolver* integrator)
 {
-
   double epsilon = (targettime-currenttime)/100.0;
   double velocity[3], point1[4], point2[4] = {0.0, 0.0, 0.0, 0.0};
   double minStep=0, maxStep=0;
   double stepWanted, stepTaken=0.0;
   int substeps = 0;
 
-
   ParticleInformation &info = (*it);
   ParticleInformation previous = (*it);
   bool particle_good = true;
@@ -1128,7 +1136,6 @@ void vtkParticleTracerBase::IntegrateParticle(
         maxStep = stepWanted;
         }
 
-
       // Calculate the next step using the integrator provided.
       // If the next point is out of bounds, send it to another process
       if (integrator->ComputeNextStep(
@@ -1138,7 +1145,6 @@ void vtkParticleTracerBase::IntegrateParticle(
         {
         // if the particle is sent, remove it from the list
         info.ErrorCode = 1;
-
         if (!this->RetryWithPush(info, point1, delT, substeps))
           {
           if(previous.PointId <0)
@@ -1246,7 +1252,7 @@ void vtkParticleTracerBase::IntegrateParticle(
   Assert (point1[3]>=(this->GetCacheDataTime(0)-eps) && point1[3]<=(this->GetCacheDataTime(1)+eps));
 #endif
 }
-//---------------------------------------------------------------------------
+
 //---------------------------------------------------------------------------
 void vtkParticleTracerBase::PrintSelf(ostream& os, vtkIndent indent)
 {
@@ -1263,6 +1269,7 @@ void vtkParticleTracerBase::PrintSelf(ostream& os, vtkIndent indent)
   os << indent << "TerminationTime: " << this->TerminationTime << endl;
   os << indent << "StaticSeeds: " << this->StaticSeeds << endl;
 }
+
 //---------------------------------------------------------------------------
 bool vtkParticleTracerBase::ComputeDomainExitLocation(
   double pos[4], double p2[4], double intersection[4],
@@ -1280,12 +1287,16 @@ bool vtkParticleTracerBase::ComputeDomainExitLocation(
     // We found an intersection on the edge of the cell.
     // Shift it by a small amount to ensure that it crosses over the edge
     // into the adjoining cell.
-    for (int i=0; i<3; i++) intersection[i] = pos[i] + (t+0.01)*(p2[i]-pos[i]);
+    for (int i=0; i<3; i++)
+      {
+      intersection[i] = pos[i] + (t+0.01)*(p2[i]-pos[i]);
+      }
     // intersection stored, compute T for intersection
     intersection[3] = pos[3] + (t+0.01)*(p2[3]-pos[3]);
     return 1;
     }
 }
+
 //---------------------------------------------------------------------------
 void vtkParticleTracerBase::SetIntegratorType(int type)
 {
@@ -1312,7 +1323,7 @@ void vtkParticleTracerBase::SetIntegratorType(int type)
     }
 }
 
-
+//---------------------------------------------------------------------------
 int vtkParticleTracerBase::GetIntegratorType()
 {
   if (!this->Integrator)
@@ -1334,7 +1345,10 @@ int vtkParticleTracerBase::GetIntegratorType()
   return UNKNOWN;
 }
 
-void vtkParticleTracerBase::CalculateVorticity( vtkGenericCell* cell, double pcoords[3], vtkDoubleArray* cellVectors, double vorticity[3] )
+//---------------------------------------------------------------------------
+void vtkParticleTracerBase::CalculateVorticity(
+  vtkGenericCell* cell, double pcoords[3],
+  vtkDoubleArray* cellVectors, double vorticity[3] )
 {
   double* cellVel;
   double derivs[9];
@@ -1346,11 +1360,13 @@ void vtkParticleTracerBase::CalculateVorticity( vtkGenericCell* cell, double pco
   vorticity[2] = derivs[3] - derivs[1];
 }
 
+//---------------------------------------------------------------------------
 double vtkParticleTracerBase::GetCacheDataTime(int i)
 {
   return this->CachedData[i]->GetInformation()->Get(vtkDataObject::DATA_TIME_STEP());
 }
 
+//---------------------------------------------------------------------------
 double vtkParticleTracerBase::GetCacheDataTime()
 {
   double currentTime = CachedData[1]? this->GetCacheDataTime(1):
@@ -1359,11 +1375,13 @@ double vtkParticleTracerBase::GetCacheDataTime()
 
 }
 
+//---------------------------------------------------------------------------
 unsigned int vtkParticleTracerBase::NumberOfParticles()
 {
   return static_cast<unsigned int>(this->ParticleHistories.size());
 }
 
+//---------------------------------------------------------------------------
 void vtkParticleTracerBase::ResetCache()
 {
   if(this->DisableResetCache==0)
@@ -1382,6 +1400,7 @@ void vtkParticleTracerBase::ResetCache()
     }
 }
 
+//---------------------------------------------------------------------------
 void vtkParticleTracerBase::SetTerminationTime(double t)
 {
   if(t==this->TerminationTime)
@@ -1404,7 +1423,7 @@ void vtkParticleTracerBase::SetTerminationTime(double t)
   this->TerminationTime = t;
 }
 
-
+//---------------------------------------------------------------------------
 void vtkParticleTracerBase::CreateProtoPD(vtkDataObject* input)
 {
   this->ProtoPD = NULL;
@@ -1431,14 +1450,11 @@ void vtkParticleTracerBase::CreateProtoPD(vtkDataObject* input)
 }
 
 //---------------------------------------------------------------------------
-bool vtkParticleTracerBase::RetryWithPush(ParticleInformation &info,  double* point1,double delT, int substeps)
+bool vtkParticleTracerBase::RetryWithPush(
+  ParticleInformation &info,  double* point1,double delT, int substeps)
 {
   double velocity[3];
   this->Interpolator->ClearCache();
-  if (info.UniqueParticleId==3)
-    {
-    vtkDebugMacro(<< "3 is about to be sent");
-    }
 
   info.LocationState = this->Interpolator->TestPoint(point1);
 
@@ -1483,18 +1499,20 @@ bool vtkParticleTracerBase::RetryWithPush(ParticleInformation &info,  double* po
 
   info.CurrentPosition.x[3] += delT;
   info.LocationState = this->Interpolator->TestPoint(info.CurrentPosition.x);
+  info.age += delT;
 
   if (info.LocationState!=ID_OUTSIDE_ALL)
     {
     // a push helped the particle get back into a dataset,
-    info.age += delT;
     info.ErrorCode = 6;
     return 1;
     }
   return 0;
 }
 
-void vtkParticleTracerBase::AddParticle( vtkParticleTracerBaseNamespace::ParticleInformation &info, double* velocity)
+//---------------------------------------------------------------------------
+void vtkParticleTracerBase::AddParticle(
+  vtkParticleTracerBaseNamespace::ParticleInformation &info, double* velocity)
 {
   const double    *coord = info.CurrentPosition.x;
   vtkIdType tempId = this->OutputCoordinates->InsertNextPoint(coord);
@@ -1574,6 +1592,7 @@ void vtkParticleTracerBase::AddParticle( vtkParticleTracerBaseNamespace::Particl
 
 }
 
+//---------------------------------------------------------------------------
 bool vtkParticleTracerBase::IsPointDataValid(vtkDataObject* input)
 {
   if(vtkCompositeDataSet* cdInput = vtkCompositeDataSet::SafeDownCast(input))
@@ -1585,6 +1604,7 @@ bool vtkParticleTracerBase::IsPointDataValid(vtkDataObject* input)
   return true;
 }
 
+//---------------------------------------------------------------------------
 bool vtkParticleTracerBase::IsPointDataValid(vtkCompositeDataSet* input,
                                              std::vector<std::string>& arrayNames)
 {
@@ -1609,6 +1629,7 @@ bool vtkParticleTracerBase::IsPointDataValid(vtkCompositeDataSet* input,
   return true;
 }
 
+//---------------------------------------------------------------------------
 void vtkParticleTracerBase::GetPointDataArrayNames(
   vtkDataSet* input, std::vector<std::string>& names)
 {
@@ -1624,66 +1645,69 @@ void vtkParticleTracerBase::GetPointDataArrayNames(
     }
 }
 
+//---------------------------------------------------------------------------
 vtkFloatArray*  vtkParticleTracerBase::GetParticleAge(vtkPointData* pd)
 {
   return vtkFloatArray::SafeDownCast(pd->GetArray("ParticleAge"));
 }
 
+//---------------------------------------------------------------------------
 vtkIntArray* vtkParticleTracerBase::GetParticleIds(vtkPointData* pd)
 {
   return vtkIntArray::SafeDownCast(pd->GetArray("ParticleId"));
 }
 
+//---------------------------------------------------------------------------
 vtkCharArray* vtkParticleTracerBase::GetParticleSourceIds(vtkPointData* pd)
 {
   return vtkCharArray::SafeDownCast(pd->GetArray("ParticleSourceId"));
 }
 
+//---------------------------------------------------------------------------
 vtkIntArray* vtkParticleTracerBase::GetInjectedPointIds(vtkPointData* pd)
 {
- return vtkIntArray::SafeDownCast(pd->GetArray("InjectedPointId"));
+  return vtkIntArray::SafeDownCast(pd->GetArray("InjectedPointId"));
 }
 
+//---------------------------------------------------------------------------
 vtkIntArray* vtkParticleTracerBase::GetInjectedStepIds(vtkPointData* pd)
 {
- return vtkIntArray::SafeDownCast(pd->GetArray("InjectionStepId"));
+  return vtkIntArray::SafeDownCast(pd->GetArray("InjectionStepId"));
 }
 
+//---------------------------------------------------------------------------
 vtkIntArray* vtkParticleTracerBase::GetErrorCodeArr(vtkPointData* pd)
 {
- return vtkIntArray::SafeDownCast(pd->GetArray("ErrorCode"));
+  return vtkIntArray::SafeDownCast(pd->GetArray("ErrorCode"));
 }
 
+//---------------------------------------------------------------------------
 vtkFloatArray*  vtkParticleTracerBase::GetParticleVorticity(vtkPointData* pd)
 {
- return vtkFloatArray::SafeDownCast(pd->GetArray("Vorticity"));
+  return vtkFloatArray::SafeDownCast(pd->GetArray("Vorticity"));
 }
 
+//---------------------------------------------------------------------------
 vtkFloatArray*  vtkParticleTracerBase::GetParticleRotation(vtkPointData* pd)
 {
- return vtkFloatArray::SafeDownCast(pd->GetArray("Rotation"));
+  return vtkFloatArray::SafeDownCast(pd->GetArray("Rotation"));
 }
 
+//---------------------------------------------------------------------------
 vtkFloatArray*  vtkParticleTracerBase::GetParticleAngularVel(vtkPointData* pd)
 {
- return vtkFloatArray::SafeDownCast(pd->GetArray("AngularVelocity"));
+  return vtkFloatArray::SafeDownCast(pd->GetArray("AngularVelocity"));
 }
 
+//---------------------------------------------------------------------------
 void vtkParticleTracerBase::PrintParticleHistories()
 {
   cout<<"Particle id, ages: "<<endl;
-  for(ParticleListIterator itr = this->ParticleHistories.begin();  itr!=this->ParticleHistories.end();itr++)
+  for(ParticleListIterator itr = this->ParticleHistories.begin();
+      itr!=this->ParticleHistories.end();itr++)
     {
     ParticleInformation& info(*itr);
     cout<<info.InjectedPointId<<" "<<info.age<<" "<<endl;
     }
   cout<<endl;
 }
-
-
-
-ParticleTracerSetMacro(StartTime, double)
-ParticleTracerSetMacro(ComputeVorticity, bool);
-ParticleTracerSetMacro(RotationScale, double)
-ParticleTracerSetMacro(ForceReinjectionEveryNSteps,int);
-ParticleTracerSetMacro(TerminalSpeed, double);
diff --git a/Filters/FlowPaths/vtkParticleTracerBase.h b/Filters/FlowPaths/vtkParticleTracerBase.h
index a1dad5c..27953f0 100644
--- a/Filters/FlowPaths/vtkParticleTracerBase.h
+++ b/Filters/FlowPaths/vtkParticleTracerBase.h
@@ -118,9 +118,8 @@ public:
   // Description
   // This can be used to scale the rate with which the streamribbons
   // twist. The default is 1.
-  void SetRotationScale(double);
   vtkGetMacro(RotationScale, double);
-
+  void SetRotationScale(double);
 
   // Description:
   // To get around problems with the Paraview Animation controls
@@ -137,8 +136,8 @@ public:
   // Note that if the particle source is also animated, this flag will be
   // redundant as the particles will be reinjected whenever the source changes
   // anyway
-  void SetForceReinjectionEveryNSteps(int);
   vtkGetMacro(ForceReinjectionEveryNSteps,int);
+  void SetForceReinjectionEveryNSteps(int);
 
   // Description:
   // Setting TerminationTime to a positive value will cause particles
@@ -159,9 +158,8 @@ public:
   // to terminate when the time is reached. Use a vlue of zero to
   // diable termination. The units of time should be consistent with the
   // primary time variable.
+  vtkGetMacro(StartTime, double);
   void SetStartTime(double t);
-  vtkGetMacro(StartTime,double);
-
 
   // Description:
   // if StaticSeeds is set and the mesh is static,
@@ -170,7 +168,8 @@ public:
   // processor just once before start.
   // If StaticSeeds is set and a moving seed source is specified
   // the motion will be ignored and results will not be as expected.
-  //vtkSetMacro(StaticSeeds,int);
+  // The default is that StaticSeeds is 0.
+  vtkSetMacro(StaticSeeds,int);
   vtkGetMacro(StaticSeeds,int);
 
   // Description:
@@ -180,7 +179,8 @@ public:
   // setting it to true will force all optimizations.
   // Do not Set StaticMesh to true if a dynamic mesh is being used
   // as this will invalidate all results.
-  //vtkSetMacro(StaticMesh,int);
+  // The default is that StaticMesh is 0.
+  vtkSetMacro(StaticMesh,int);
   vtkGetMacro(StaticMesh,int);
 
   // Description:
@@ -292,7 +292,6 @@ public:
   int InitializeInterpolator();
   int UpdateDataCache(vtkDataObject *td);
 
-
   // Description : Test the list of particles to see if they are
   // inside our data. Add good ones to passed list and set count to the
   // number that passed
@@ -311,14 +310,14 @@ public:
   // If either are non static, then this step is skipped.
   virtual void AssignSeedsToProcessors(double time,
     vtkDataSet *source, int sourceID, int ptId,
-    vtkParticleTracerBaseNamespace::ParticleVector &LocalSeedPoints,
-    int &LocalAssignedCount);
+    vtkParticleTracerBaseNamespace::ParticleVector &localSeedPoints,
+    int &localAssignedCount);
 
   // Description : once seeds have been assigned to a process, we
   // give each one a uniqu ID. We need to use MPI to find out
   // who is using which numbers.
   virtual void AssignUniqueIds(
-    vtkParticleTracerBaseNamespace::ParticleVector &LocalSeedPoints);
+    vtkParticleTracerBaseNamespace::ParticleVector &localSeedPoints);
 
   // Description : copy list of particles from a vector used for testing particles
   // and sending between processors, into a list, which is used as the master
@@ -329,7 +328,7 @@ public:
   // Description : Perform a GatherV operation on a vector of particles
   // this is used during classification of seed points and also between iterations
   // of the main loop as particles leave each processor domain
-  virtual void UpdateParticleListFromOtherProcesses(){};
+  virtual void UpdateParticleListFromOtherProcesses(){}
 
   // Description : The main loop performing Runge-Kutta integration of a single
   // particle between the two times supplied.
@@ -400,13 +399,16 @@ public:
 private:
   // Description:
   // Hide this because we require a new interpolator type
-  void SetInterpolatorPrototype(vtkAbstractInterpolatedVelocityField*) {};
+  void SetInterpolatorPrototype(vtkAbstractInterpolatedVelocityField*) {}
 
-  // Description : When particle leave the domain, they must be collected
-  // and sent to the other processors for possible continuation.
+  // Description:
+  // When particles leave the domain, they must be collected
+  // and sent to the other processes for possible continuation.
   // These routines manage the collection and sending after each main iteration.
-  // RetryWithPush adds a small pusj to aparticle along it's current velocity
-  // vector, this helps get over cracks in dynamic/rotating meshes
+  // RetryWithPush adds a small push to a particle along it's current velocity
+  // vector, this helps get over cracks in dynamic/rotating meshes. This is a
+  // firsr order integration though so it may introduce a bit extra error compared
+  // to the integrator that is used.
   bool RetryWithPush(
     vtkParticleTracerBaseNamespace::ParticleInformation &info, double* point1,double delT, int subSteps);
 
diff --git a/Filters/FlowPaths/vtkStreaklineFilter.cxx b/Filters/FlowPaths/vtkStreaklineFilter.cxx
index c14b91c..99b9c4d 100644
--- a/Filters/FlowPaths/vtkStreaklineFilter.cxx
+++ b/Filters/FlowPaths/vtkStreaklineFilter.cxx
@@ -26,7 +26,7 @@ PURPOSE.  See the above copyright notice for more information.
 #include "vtkFloatArray.h"
 
 #include <vector>
-#include <assert.h>
+#include <cassert>
 #include <algorithm>
 
 #define DEBUGSTREAKLINEFILTER 1
diff --git a/Filters/FlowPaths/vtkStreamLine.h b/Filters/FlowPaths/vtkStreamLine.h
index ed68056..7e61457 100644
--- a/Filters/FlowPaths/vtkStreamLine.h
+++ b/Filters/FlowPaths/vtkStreamLine.h
@@ -66,7 +66,7 @@ public:
 
 protected:
   vtkStreamLine();
-  ~vtkStreamLine() {};
+  ~vtkStreamLine() {}
 
   // Convert streamer array into vtkPolyData
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
diff --git a/Filters/FlowPaths/vtkStreamPoints.h b/Filters/FlowPaths/vtkStreamPoints.h
index e3c881a..d576160 100644
--- a/Filters/FlowPaths/vtkStreamPoints.h
+++ b/Filters/FlowPaths/vtkStreamPoints.h
@@ -45,7 +45,7 @@ public:
 
 protected:
   vtkStreamPoints();
-  ~vtkStreamPoints() {};
+  ~vtkStreamPoints() {}
 
   // Convert streamer array into vtkPolyData
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
diff --git a/Filters/FlowPaths/vtkStreamTracer.cxx b/Filters/FlowPaths/vtkStreamTracer.cxx
index d184967..b87e2a8 100644
--- a/Filters/FlowPaths/vtkStreamTracer.cxx
+++ b/Filters/FlowPaths/vtkStreamTracer.cxx
@@ -947,25 +947,6 @@ void vtkStreamTracer::Integrate(vtkPointData *input0Data,
         break;
         }
 
-      // It is not enough to use the starting point for stagnation calculation
-      // Use delX/stepSize to calculate speed and check if it is below
-      // stagnation threshold
-      double disp[3];
-      for (i=0; i<3; i++)
-        {
-        disp[i] = point2[i] - point1[i];
-        }
-      if ( (stepSize.Interval == 0) ||
-           (vtkMath::Norm(disp) / fabs(stepSize.Interval) <= this->TerminalSpeed) )
-        {
-        retVal = STAGNATION;
-        break;
-        }
-
-      accumTime += stepTaken / speed;
-      // Calculate propagation (using the same units as MaximumPropagation
-      propagation += fabs( stepSize.Interval );
-
       // This is the next starting point
       for(i=0; i<3; i++)
         {
@@ -979,6 +960,20 @@ void vtkStreamTracer::Integrate(vtkPointData *input0Data,
         memcpy(lastPoint, point2, 3*sizeof(double));
         break;
         }
+
+      // It is not enough to use the starting point for stagnation calculation
+      // Use average speed to check if it is below stagnation threshold
+      double speed2 = vtkMath::Norm(velocity);
+      if ( (speed+speed2)/2 <= this->TerminalSpeed )
+        {
+        retVal = STAGNATION;
+        break;
+        }
+
+      accumTime += stepTaken / speed;
+      // Calculate propagation (using the same units as MaximumPropagation
+      propagation += fabs( stepSize.Interval );
+
       // Make sure we use the dataset found by the vtkAbstractInterpolatedVelocityField
       input = func->GetLastDataSet();
       inputPD = input->GetPointData();
@@ -994,7 +989,7 @@ void vtkStreamTracer::Integrate(vtkPointData *input0Data,
       // Calculate cell length and speed to be used in unit conversions
       input->GetCell(func->GetLastCellId(), cell);
       cellLength = sqrt(static_cast<double>(cell->GetLength2()));
-      speed = vtkMath::Norm(velocity);
+      speed = speed2;
       // Interpolate all point attributes on current point
       func->GetLastWeights(weights);
       InterpolatePoint(outputPD, inputPD, nextPoint, cell->PointIds, weights, this->HasMatchingPointAttributes);
diff --git a/Filters/FlowPaths/vtkTemporalInterpolatedVelocityField.cxx b/Filters/FlowPaths/vtkTemporalInterpolatedVelocityField.cxx
index e0191c5..169d4f6 100644
--- a/Filters/FlowPaths/vtkTemporalInterpolatedVelocityField.cxx
+++ b/Filters/FlowPaths/vtkTemporalInterpolatedVelocityField.cxx
@@ -7,11 +7,11 @@
   All rights reserved.
   See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
 
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
+  This software is distributed WITHOUT ANY WARRANTY; without even
+  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+  PURPOSE.  See the above copyright notice for more information.
 
-=========================================================================*/
+  =========================================================================*/
 #include "vtkTemporalInterpolatedVelocityField.h"
 
 #include "vtkDataArray.h"
@@ -59,25 +59,30 @@ void vtkTemporalInterpolatedVelocityField::SetDataSetAtTime(int I, int N, double
 {
   this->times[N] = T;
   if ((this->times[1]-this->times[0])>0)
-  {
+    {
     this->ScaleCoeff = 1.0/(this->times[1]-this->times[0]);
-  }
-  if (N==0) {
+    }
+  if (N==0)
+    {
     this->ivf[N]->SetDataSet(I, dataset, staticdataset, NULL);
-  }
+    }
   // when the datasets for the second time set are added, set the static flag
-  if (N==1) {
+  if (N==1)
+    {
     bool is_static = staticdataset && this->ivf[0]->CacheList[I].StaticDataSet;
-    if ((unsigned int)I>=this->StaticDataSets.size()) {
+    if (static_cast<size_t>(I)>=this->StaticDataSets.size())
+      {
       this->StaticDataSets.resize(I+1,is_static);
-    }
-    if (is_static) {
+      }
+    if (is_static)
+      {
       this->ivf[N]->SetDataSet(I, dataset, staticdataset, this->ivf[0]->CacheList[I].BSPTree);
-    }
-    else {
+      }
+    else
+      {
       this->ivf[N]->SetDataSet(I, dataset, staticdataset, NULL);
+      }
     }
-  }
 }
 //---------------------------------------------------------------------------
 bool vtkTemporalInterpolatedVelocityField::IsStatic(int datasetIndex)
@@ -99,15 +104,23 @@ void vtkTemporalInterpolatedVelocityField::ClearCache()
 //---------------------------------------------------------------------------
 void vtkTemporalInterpolatedVelocityField::SetCachedCellIds(vtkIdType id[2], int ds[2])
 {
-  if (id[0]!=-1) {
+  if (id[0]!=-1)
+    {
     this->ivf[0]->SetLastCellInfo(id[0], ds[0]);
-  }
-  else this->ivf[0]->SetLastCellInfo(-1, 0);
+    }
+  else
+    {
+    this->ivf[0]->SetLastCellInfo(-1, 0);
+    }
   //
-  if (id[1]!=-1) {
+  if (id[1]!=-1)
+    {
     this->ivf[1]->SetLastCellInfo(id[1], ds[1]);
-  }
-  else this->ivf[1]->SetLastCellInfo(-1, 0);
+    }
+  else
+    {
+    this->ivf[1]->SetLastCellInfo(-1, 0);
+    }
 }
 //---------------------------------------------------------------------------
 bool vtkTemporalInterpolatedVelocityField::GetCachedCellIds(vtkIdType id[2], int ds[2])
@@ -122,27 +135,30 @@ bool vtkTemporalInterpolatedVelocityField::GetCachedCellIds(vtkIdType id[2], int
 //---------------------------------------------------------------------------
 void vtkTemporalInterpolatedVelocityField::AdvanceOneTimeStep()
 {
-  for (unsigned int i=0; i<this->ivf[0]->CacheList.size(); i++) {
-    if (this->IsStatic(i)) {
+  for (unsigned int i=0; i<this->ivf[0]->CacheList.size(); i++)
+    {
+    if (this->IsStatic(i))
+      {
       this->ivf[0]->ClearLastCellInfo();
       this->ivf[1]->ClearLastCellInfo();
-    }
-    else {
+      }
+    else
+      {
       this->ivf[0] = this->ivf[1];
       this->ivf[1] = vtkSmartPointer<vtkCachingInterpolatedVelocityField>::New();
+      }
     }
-  }
 }
 //---------------------------------------------------------------------------
 void vtkTemporalInterpolatedVelocityField::ShowCacheResults()
 {
   vtkErrorMacro(<< ")\n"
-    << "T0 - (cell hit : " << this->ivf[0]->CellCacheHit
-    << "  (dataset hit : " << this->ivf[0]->DataSetCacheHit-this->ivf[0]->CellCacheHit
-    << "         (miss : " << this->ivf[0]->CacheMiss       << ")\n"
-    << "T1 - (cell hit : " << this->ivf[1]->CellCacheHit
-    << "  (dataset hit : " << this->ivf[1]->DataSetCacheHit-this->ivf[1]->CellCacheHit
-    << "         (miss : " << this->ivf[1]->CacheMiss);
+                << "T0 - (cell hit : " << this->ivf[0]->CellCacheHit
+                << "  (dataset hit : " << this->ivf[0]->DataSetCacheHit-this->ivf[0]->CellCacheHit
+                << "         (miss : " << this->ivf[0]->CacheMiss       << ")\n"
+                << "T1 - (cell hit : " << this->ivf[1]->CellCacheHit
+                << "  (dataset hit : " << this->ivf[1]->DataSetCacheHit-this->ivf[1]->CellCacheHit
+                << "         (miss : " << this->ivf[1]->CacheMiss);
 }
 //---------------------------------------------------------------------------
 static double vtkTIVFWeightTolerance = 1E-3;
@@ -151,75 +167,101 @@ int vtkTemporalInterpolatedVelocityField::TestPoint(double* x)
 {
   this->CurrentWeight  = (x[3]-this->times[0])*this->ScaleCoeff;
   this->OneMinusWeight = 1.0 - this->CurrentWeight;
-  if (this->CurrentWeight<(0.0+vtkTIVFWeightTolerance)) this->CurrentWeight = 0.0;
-  if (this->CurrentWeight>(1.0-vtkTIVFWeightTolerance)) this->CurrentWeight = 1.0;
+  if (this->CurrentWeight<(0.0+vtkTIVFWeightTolerance))
+    {
+    this->CurrentWeight = 0.0;
+    }
+  if (this->CurrentWeight>(1.0-vtkTIVFWeightTolerance))
+    {
+    this->CurrentWeight = 1.0;
+    }
   //
   // are we inside dataset at T0
   //
-  if (this->ivf[0]->FunctionValues(x, vals1)) {
+  if (this->ivf[0]->FunctionValues(x, vals1))
+    {
     // if we are inside at T0 and static, we must be inside at T1
-    if (this->IsStatic(this->ivf[0]->LastCacheIndex)) {
+    if (this->IsStatic(this->ivf[0]->LastCacheIndex))
+      {
       // compute using weights from dataset 0 and vectors from dataset 1
       this->ivf[1]->SetLastCellInfo(this->ivf[0]->LastCellId, this->ivf[0]->LastCacheIndex);
       this->ivf[0]->FastCompute(this->ivf[1]->Cache, vals2);
-      for (int i=0; i<this->NumFuncs; i++) {
+      for (int i=0; i<this->NumFuncs; i++)
+        {
         this->LastGoodVelocity[i] = this->OneMinusWeight*vals1[i] + this->CurrentWeight*vals2[i];
-      }
+        }
       return ID_INSIDE_ALL;
-    }
+      }
     // dynamic, we need to test at T1
-    if (!this->ivf[1]->FunctionValues(x, vals2)) {
+    if (!this->ivf[1]->FunctionValues(x, vals2))
+      {
       // inside at T0, but outside at T1, return velocity for T0
-      for (int i=0; i<this->NumFuncs; i++) {
+      for (int i=0; i<this->NumFuncs; i++)
+        {
         this->LastGoodVelocity[i] = vals1[i];
-      }
+        }
       return ID_OUTSIDE_T1;
-    }
+      }
     // both valid, compute correct value
-    for (int i=0; i<this->NumFuncs; i++) {
+    for (int i=0; i<this->NumFuncs; i++)
+      {
       this->LastGoodVelocity[i] = this->OneMinusWeight*vals1[i] + this->CurrentWeight*vals2[i];
-    }
+      }
     return ID_INSIDE_ALL;
-  }
+    }
   // Outside at T0, either abort or use T1
-  else {
+  else
+    {
     // if we are outside at T0 and static, we must be outside at T1
-    if (this->IsStatic(this->ivf[0]->LastCacheIndex)) {
+    if (this->IsStatic(this->ivf[0]->LastCacheIndex))
+      {
       return ID_OUTSIDE_ALL;
-    }
+      }
     // we are dynamic, so test T1
-    if (this->ivf[1]->FunctionValues(x, vals2)) {
+    if (this->ivf[1]->FunctionValues(x, vals2))
+      {
       // inside at T1, but outside at T0, return velocity for T1
-      for (int i=0; i<this->NumFuncs; i++) {
+      for (int i=0; i<this->NumFuncs; i++)
+        {
         this->LastGoodVelocity[i] = vals2[i];
-      }
+        }
       return ID_OUTSIDE_T0;
-    }
+      }
     // failed both, so exit
     return ID_OUTSIDE_ALL;
-  }
+    }
 }
 //---------------------------------------------------------------------------
 // Evaluate u,v,w at x,y,z,t
 int vtkTemporalInterpolatedVelocityField::QuickTestPoint(double* x)
 {
   // if outside, return 0
-  if (!this->ivf[0]->InsideTest(x)) return 0;
+  if (!this->ivf[0]->InsideTest(x))
+    {
+    return 0;
+    }
   // if inside and static dataset hit, skip next test
-  if (!this->IsStatic(this->ivf[0]->LastCacheIndex)) {
-    if (!this->ivf[1]->InsideTest(x)) return 0;
-  }
+  if (!this->IsStatic(this->ivf[0]->LastCacheIndex))
+    {
+    if (!this->ivf[1]->InsideTest(x))
+      {
+      return 0;
+      }
+    }
   return 1;
 }
 //---------------------------------------------------------------------------
 // Evaluate u,v,w at x,y,z,t
 int vtkTemporalInterpolatedVelocityField::FunctionValues(double* x, double* u)
 {
-  if (this->TestPoint(x)==ID_OUTSIDE_ALL) return 0;
-  //
-  for (int i=0; i<this->NumFuncs; i++) {
+  if (this->TestPoint(x)==ID_OUTSIDE_ALL)
+    {
+    return 0;
+    }
+  for (int i=0; i<this->NumFuncs; i++)
+    {
     u[i] = this->LastGoodVelocity[i];
-  }
+    }
   return 1;
 }
 //---------------------------------------------------------------------------
@@ -228,33 +270,45 @@ int vtkTemporalInterpolatedVelocityField::FunctionValuesAtT(int T, double* x, do
   //
   // Try velocity at T0
   //
-  if (T==0) {
-    if (!this->ivf[0]->FunctionValues(x, vals1)) return 0;
-    for (int i=0; i<this->NumFuncs; i++) {
+  if (T==0)
+    {
+    if (!this->ivf[0]->FunctionValues(x, vals1))
+      {
+      return 0;
+      }
+    for (int i=0; i<this->NumFuncs; i++)
+      {
       this->LastGoodVelocity[i] = u[i] = vals1[i];
-    }
-    if (this->IsStatic(this->ivf[0]->LastCacheIndex)) {
+      }
+    if (this->IsStatic(this->ivf[0]->LastCacheIndex))
+      {
       this->ivf[1]->SetLastCellInfo(this->ivf[0]->LastCellId, this->ivf[0]->LastCacheIndex);
+      }
     }
-  }
   //
   // Try velocity at T1
   //
-  else if (T==1) {
-    if (!this->ivf[1]->FunctionValues(x, vals2)) return 0;
-    for (int i=0; i<this->NumFuncs; i++) {
+  else if (T==1)
+    {
+    if (!this->ivf[1]->FunctionValues(x, vals2))
+      {
+      return 0;
+      }
+    for (int i=0; i<this->NumFuncs; i++)
+      {
       this->LastGoodVelocity[i] = u[i] = vals2[i];
-    }
-    if (this->IsStatic(this->ivf[1]->LastCacheIndex)) {
+      }
+    if (this->IsStatic(this->ivf[1]->LastCacheIndex))
+      {
       this->ivf[0]->SetLastCellInfo(this->ivf[1]->LastCellId, this->ivf[1]->LastCacheIndex);
+      }
     }
-  }
   return 1;
 }
 //---------------------------------------------------------------------------
 bool vtkTemporalInterpolatedVelocityField::InterpolatePoint(
-    vtkPointData *outPD1, vtkPointData *outPD2,
-    vtkIdType outIndex)
+  vtkPointData *outPD1, vtkPointData *outPD2,
+  vtkIdType outIndex)
 {
   bool ok1 = this->ivf[0]->InterpolatePoint(outPD1, outIndex);
   bool ok2 = this->ivf[1]->InterpolatePoint(outPD2, outIndex);
@@ -264,10 +318,14 @@ bool vtkTemporalInterpolatedVelocityField::InterpolatePoint(
 bool vtkTemporalInterpolatedVelocityField::InterpolatePoint(
   int T, vtkPointData *outPD1, vtkIdType outIndex)
 {
+  vtkCachingInterpolatedVelocityField* inivf = this->ivf[T];
   // force use of correct weights/etc if static as only T0 are valid
-  if (T==1 && this->IsStatic(this->ivf[T]->LastCacheIndex)) T=0;
+  if (T==1 && this->IsStatic(this->ivf[T]->LastCacheIndex))
+    {
+    T=0;
+    }
   //
-  return this->ivf[T]->InterpolatePoint(outPD1, outIndex);
+  return this->ivf[T]->InterpolatePoint(inivf, outPD1, outIndex);
 }
 //---------------------------------------------------------------------------
 bool vtkTemporalInterpolatedVelocityField::GetVorticityData(
@@ -275,18 +333,21 @@ bool vtkTemporalInterpolatedVelocityField::GetVorticityData(
   vtkGenericCell *&cell, vtkDoubleArray *cellVectors)
 {
   // force use of correct weights/etc if static as only T0 are valid
-  if (T==1 && this->IsStatic(this->ivf[T]->LastCacheIndex)) T=0;
+  if (T==1 && this->IsStatic(this->ivf[T]->LastCacheIndex))
+    {
+    T=0;
+    }
   //
   if (this->ivf[T]->GetLastWeights(weights) &&
       this->ivf[T]->GetLastLocalCoordinates(pcoords) &&
       (cell=this->ivf[T]->GetLastCell()) )
-  {
+    {
     vtkDataSet   *ds = this->ivf[T]->Cache->DataSet;
     vtkPointData *pd = ds->GetPointData();
     vtkDataArray *da = pd->GetVectors(this->ivf[T]->GetVectorsSelection());
     da->GetTuples(cell->PointIds, cellVectors);
     return 1;
-  }
+    }
   return 0;
 }
 //---------------------------------------------------------------------------
@@ -295,9 +356,9 @@ void vtkTemporalInterpolatedVelocityField::PrintSelf(ostream& os, vtkIndent inde
   this->Superclass::PrintSelf(os, indent);
 
   os << indent << "LastGoodVelocity: "
-    << this->LastGoodVelocity[0] << ", "
-    << this->LastGoodVelocity[1] << ", "
-    << this->LastGoodVelocity[2] << endl;
+     << this->LastGoodVelocity[0] << ", "
+     << this->LastGoodVelocity[1] << ", "
+     << this->LastGoodVelocity[2] << endl;
   os << indent << "CurrentWeight: " << this->CurrentWeight << endl;
 }
 //---------------------------------------------------------------------------
diff --git a/Filters/FlowPaths/vtkTemporalInterpolatedVelocityField.h b/Filters/FlowPaths/vtkTemporalInterpolatedVelocityField.h
index 1d20ea6..353138c 100644
--- a/Filters/FlowPaths/vtkTemporalInterpolatedVelocityField.h
+++ b/Filters/FlowPaths/vtkTemporalInterpolatedVelocityField.h
@@ -160,7 +160,7 @@ protected:
 private:
   // Hide this since we need multiple time steps and are using a different
   // function prototype
-  virtual void AddDataSet(vtkDataSet*) {};
+  virtual void AddDataSet(vtkDataSet*) {}
 
 private:
   vtkTemporalInterpolatedVelocityField(const vtkTemporalInterpolatedVelocityField&);  // Not implemented.
diff --git a/Filters/FlowPaths/vtkTemporalStreamTracer.cxx b/Filters/FlowPaths/vtkTemporalStreamTracer.cxx
index 01479d5..e6107cf 100644
--- a/Filters/FlowPaths/vtkTemporalStreamTracer.cxx
+++ b/Filters/FlowPaths/vtkTemporalStreamTracer.cxx
@@ -44,7 +44,7 @@ PURPOSE.  See the above copyright notice for more information.
 #include "vtkOutputWindow.h"
 #include "vtkAbstractParticleWriter.h"
 #include "vtkToolkits.h" // For VTK_USE_MPI
-#include "assert.h"
+#include <cassert>
 
 #ifdef WIN32
   #undef JB_H5PART_PARTICLE_OUTPUT
diff --git a/Filters/FlowPaths/vtkTemporalStreamTracer.h b/Filters/FlowPaths/vtkTemporalStreamTracer.h
index 1a5dc34..5cd751d 100644
--- a/Filters/FlowPaths/vtkTemporalStreamTracer.h
+++ b/Filters/FlowPaths/vtkTemporalStreamTracer.h
@@ -431,7 +431,7 @@ public:
 private:
   // Description:
   // Hide this because we require a new interpolator type
-  void SetInterpolatorPrototype(vtkAbstractInterpolatedVelocityField*) {};
+  void SetInterpolatorPrototype(vtkAbstractInterpolatedVelocityField*) {}
 
 private:
   vtkTemporalStreamTracer(const vtkTemporalStreamTracer&);  // Not implemented.
diff --git a/Filters/General/CMakeLists.txt b/Filters/General/CMakeLists.txt
index 216a4aa..f4a683f 100644
--- a/Filters/General/CMakeLists.txt
+++ b/Filters/General/CMakeLists.txt
@@ -59,7 +59,6 @@ set(Module_SRCS
   vtkRecursiveDividingCubes.cxx
   vtkReflectionFilter.cxx
   vtkRotationFilter.cxx
-  vtkSCurveSpline.cxx
   vtkShrinkFilter.cxx
   vtkShrinkPolyData.cxx
   vtkSpatialRepresentationFilter.cxx
diff --git a/Filters/General/Testing/Cxx/CMakeLists.txt b/Filters/General/Testing/Cxx/CMakeLists.txt
index a2eefaf..abc4b80 100644
--- a/Filters/General/Testing/Cxx/CMakeLists.txt
+++ b/Filters/General/Testing/Cxx/CMakeLists.txt
@@ -1,63 +1,35 @@
-set(MyTests
-  TestGraphWeightEuclideanDistanceFilter.cxx
+vtk_add_test_cxx(
   BoxClipPolyData.cxx
   BoxClipTetrahedra.cxx
-  BoxClipTriangulate.cxx
   BoxClipTriangulateAndInterpolate.cxx
-  TestBooleanOperationPolyDataFilter.cxx
+  BoxClipTriangulate.cxx,NO_VALID
+  TestAppendPoints.cxx,NO_VALID
   TestBooleanOperationPolyDataFilter2.cxx
+  TestBooleanOperationPolyDataFilter.cxx
   TestContourTriangulatorCutter.cxx
+  TestContourTriangulator.cxx
+  TestContourTriangulatorMarching.cxx
   TestDeformPointSet.cxx
-  TestDensifyPolyData.cxx
+  TestDensifyPolyData.cxx,-E15
   TestDistancePolyDataFilter.cxx
-  TestImageDataToPointSet.cxx
+  TestGraphWeightEuclideanDistanceFilter.cxx,NO_VALID
+  TestImageDataToPointSet.cxx,NO_VALID
+  TestIntersectionPolyDataFilter2.cxx,NO_VALID
   TestIntersectionPolyDataFilter.cxx
-  TestIntersectionPolyDataFilter2.cxx
-  TestRectilinearGridToPointSet.cxx
-  TestReflectionFilter.cxx
+  TestRectilinearGridToPointSet.cxx,NO_VALID
+  TestReflectionFilter.cxx,NO_VALID
+  TestTableSplitColumnComponents.cxx,NO_VALID
+  TestTransformFilter.cxx,NO_VALID
+  TestTransformPolyDataFilter.cxx,NO_VALID
   TestUncertaintyTubeFilter.cxx
-)
-# Tests with data
-if(VTK_DATA_ROOT)
-  set(MyTests
-    ${MyTests}
-    TestContourTriangulator.cxx
-    TestContourTriangulatorMarching.cxx
-    TestGradientAndVorticity.cxx
-    TestIconGlyphFilterGravity.cxx
-    TestQuadraturePoints.cxx
-    TestYoungsMaterialInterface.cxx
-    )
-endif()
-
-# Set the tolerance higher for a few tests that need it
-set(TestDensifyPolyDataError 15)
+  )
 
-# Use the testing object factory, to reduce boilerplate code in tests.
-include(vtkTestingObjectFactory)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun CxxTests.cxx)
+# Tests with data
+vtk_add_test_cxx(
+  TestGradientAndVorticity.cxx,NO_VALID
+  TestIconGlyphFilterGravity.cxx
+  TestQuadraturePoints.cxx
+  TestYoungsMaterialInterface.cxx
+  )
 
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    if(${${TName}Error})
-      set(_error_threshold ${${TName}Error})
-    else()
-      set(_error_threshold 10)
-    endif()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Graphics/${TName}.png
-        -E ${_error_threshold})
- else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
- endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests RENDERING_FACTORY)
diff --git a/Filters/General/Testing/Cxx/TestAppendPoints.cxx b/Filters/General/Testing/Cxx/TestAppendPoints.cxx
new file mode 100644
index 0000000..7752906
--- /dev/null
+++ b/Filters/General/Testing/Cxx/TestAppendPoints.cxx
@@ -0,0 +1,168 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestAppendPoints.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkAppendPoints.h>
+#include <vtkCellArray.h>
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkSmartPointer.h>
+
+namespace
+{
+void InitializePolyData(vtkPolyData *polyData, int dataType)
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
+  vtkSmartPointer<vtkCellArray> verts = vtkSmartPointer<vtkCellArray>::New();
+  verts->InsertNextCell(4);
+
+  if(dataType == VTK_DOUBLE)
+    {
+    points->SetDataType(VTK_DOUBLE);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      double point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = randomSequence->GetValue();
+        }
+      verts->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+  else
+    {
+    points->SetDataType(VTK_FLOAT);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      float point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = static_cast<float>(randomSequence->GetValue());
+        }
+      verts->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+
+  points->Squeeze();
+  polyData->SetPoints(points);
+  verts->Squeeze();
+  polyData->SetVerts(verts);
+}
+
+int AppendPolyDataPoints(int dataType0, int dataType1,
+  int outputPointsPrecision)
+{
+  vtkSmartPointer<vtkPolyData> polyData0 = vtkSmartPointer<vtkPolyData>::New();
+  InitializePolyData(polyData0, dataType0);
+
+  vtkSmartPointer<vtkPolyData> polyData1 = vtkSmartPointer<vtkPolyData>::New();
+  InitializePolyData(polyData1, dataType1);
+
+  vtkSmartPointer<vtkAppendPoints> appendPoints
+    = vtkSmartPointer<vtkAppendPoints>::New();
+  appendPoints->SetOutputPointsPrecision(outputPointsPrecision);
+
+  appendPoints->AddInputData(polyData0);
+  appendPoints->AddInputData(polyData1);
+
+  appendPoints->Update();
+
+  vtkSmartPointer<vtkPointSet> pointSet = appendPoints->GetOutput();
+  vtkSmartPointer<vtkPoints> points = pointSet->GetPoints();
+
+  return points->GetDataType();
+}
+}
+
+int TestAppendPoints(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  int dataType = AppendPolyDataPoints(VTK_FLOAT, VTK_FLOAT,
+    vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = AppendPolyDataPoints(VTK_DOUBLE, VTK_FLOAT,
+    vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = AppendPolyDataPoints(VTK_DOUBLE, VTK_DOUBLE,
+    vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = AppendPolyDataPoints(VTK_FLOAT, VTK_FLOAT,
+    vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = AppendPolyDataPoints(VTK_DOUBLE, VTK_FLOAT,
+    vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = AppendPolyDataPoints(VTK_DOUBLE, VTK_DOUBLE,
+    vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = AppendPolyDataPoints(VTK_FLOAT, VTK_FLOAT,
+    vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = AppendPolyDataPoints(VTK_DOUBLE, VTK_FLOAT,
+    vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = AppendPolyDataPoints(VTK_DOUBLE, VTK_DOUBLE,
+    vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/General/Testing/Cxx/TestReflectionFilter.cxx b/Filters/General/Testing/Cxx/TestReflectionFilter.cxx
index 0182774..a38efeb 100644
--- a/Filters/General/Testing/Cxx/TestReflectionFilter.cxx
+++ b/Filters/General/Testing/Cxx/TestReflectionFilter.cxx
@@ -21,7 +21,7 @@
 #include <vtkIdList.h>
 #include <vtkReflectionFilter.h>
 #include <iostream>
-#include <assert.h>
+#include <cassert>
 
 #define AssertMacro(b) if(!(b)){std::cerr <<"Failed to reflect pyramid"<<std::endl;return EXIT_FAILURE;}
 
diff --git a/Filters/General/Testing/Cxx/TestTableSplitColumnComponents.cxx b/Filters/General/Testing/Cxx/TestTableSplitColumnComponents.cxx
index a403052..a869b18 100644
--- a/Filters/General/Testing/Cxx/TestTableSplitColumnComponents.cxx
+++ b/Filters/General/Testing/Cxx/TestTableSplitColumnComponents.cxx
@@ -55,7 +55,7 @@ int TestTableSplitColumnComponents(int, char*[])
     {
     vtkGenericWarningMacro(<< "Incorrect column count: "
                            << out->GetNumberOfColumns());
-    return 1;
+    return EXIT_FAILURE;
     }
   vtkIntArray* arrays[4];
   arrays[0] = vtkIntArray::SafeDownCast(out->GetColumn(0));
@@ -65,7 +65,7 @@ int TestTableSplitColumnComponents(int, char*[])
   if (arrays[0] == 0 || arrays[1] == 0 || arrays[2] == 0 || arrays[3] == 0)
     {
     vtkGenericWarningMacro(<< "One of the output arrays was zero - type change?");
-    return 1;
+    return EXIT_FAILURE;
     }
 
   for (int i = 0; i < 5; ++i)
@@ -80,5 +80,42 @@ int TestTableSplitColumnComponents(int, char*[])
       }
     }
 
+  // Test naming modes.
+  if (strcmp(arrays[1]->GetName(), "Multi (0)") != 0)
+    {
+    vtkGenericWarningMacro("Incorrect name. NamingMode not being respected correctly.");
+    return EXIT_FAILURE;
+    }
+
+  split->SetNamingModeToNumberWithUnderscores();
+  split->Update();
+  out = split->GetOutput(0);
+  arrays[1] = vtkIntArray::SafeDownCast(out->GetColumn(1));
+  if (strcmp(arrays[1]->GetName(), "Multi_0") != 0)
+    {
+    vtkGenericWarningMacro("Incorrect name. NamingMode not being respected correctly.");
+    return EXIT_FAILURE;
+    }
+
+  split->SetNamingModeToNamesWithParens();
+  split->Update();
+  out = split->GetOutput(0);
+  arrays[1] = vtkIntArray::SafeDownCast(out->GetColumn(1));
+  if (strcmp(arrays[1]->GetName(), "Multi (X)") != 0)
+    {
+    vtkGenericWarningMacro("Incorrect name. NamingMode not being respected correctly.");
+    return EXIT_FAILURE;
+    }
+
+  split->SetNamingModeToNamesWithUnderscores();
+  split->Update();
+  out = split->GetOutput(0);
+  arrays[1] = vtkIntArray::SafeDownCast(out->GetColumn(1));
+  if (strcmp(arrays[1]->GetName(), "Multi_X") != 0)
+    {
+    vtkGenericWarningMacro("Incorrect name. NamingMode not being respected correctly.");
+    return EXIT_FAILURE;
+    }
+
   return EXIT_SUCCESS;
 }
diff --git a/Filters/General/Testing/Cxx/TestTransformFilter.cxx b/Filters/General/Testing/Cxx/TestTransformFilter.cxx
new file mode 100644
index 0000000..5ad7ee7
--- /dev/null
+++ b/Filters/General/Testing/Cxx/TestTransformFilter.cxx
@@ -0,0 +1,151 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestTransformFilter.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkPolyData.h>
+#include <vtkSmartPointer.h>
+#include <vtkTransform.h>
+#include <vtkTransformFilter.h>
+
+namespace
+{
+void InitializePointSet(vtkPointSet *pointSet, int dataType)
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
+
+  if(dataType == VTK_DOUBLE)
+    {
+    points->SetDataType(VTK_DOUBLE);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      double point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = randomSequence->GetValue();
+        }
+      points->InsertNextPoint(point);
+      }
+    }
+  else
+    {
+    points->SetDataType(VTK_FLOAT);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      float point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = static_cast<float>(randomSequence->GetValue());
+        }
+      points->InsertNextPoint(point);
+      }
+    }
+
+  points->Squeeze();
+  pointSet->SetPoints(points);
+}
+
+void InitializeTransform(vtkTransform *transform)
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  double elements[16];
+  for(unsigned int i = 0; i < 16; ++i)
+    {
+    randomSequence->Next();
+    elements[i] = randomSequence->GetValue();
+    }
+  transform->SetMatrix(elements);
+}
+}
+
+int TransformPointSet(int dataType, int outputPointsPrecision)
+{
+  vtkSmartPointer<vtkPointSet> inputPointSet
+    = vtkSmartPointer<vtkPolyData>::New();
+  InitializePointSet(inputPointSet, dataType);
+
+  vtkSmartPointer<vtkTransform> transform
+    = vtkSmartPointer<vtkTransform>::New();
+  InitializeTransform(transform);
+
+  vtkSmartPointer<vtkTransformFilter> transformFilter
+    = vtkSmartPointer<vtkTransformFilter>::New();
+  transformFilter->SetOutputPointsPrecision(outputPointsPrecision);
+
+  transformFilter->SetTransform(transform);
+  transformFilter->SetInputData(inputPointSet);
+
+  transformFilter->Update();
+
+  vtkSmartPointer<vtkPointSet> outputPointSet = transformFilter->GetOutput();
+  vtkSmartPointer<vtkPoints> points = outputPointSet->GetPoints();
+
+  return points->GetDataType();
+}
+
+int TestTransformFilter(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  int dataType = TransformPointSet(VTK_FLOAT, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = TransformPointSet(VTK_DOUBLE, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = TransformPointSet(VTK_FLOAT, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = TransformPointSet(VTK_DOUBLE, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = TransformPointSet(VTK_FLOAT, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = TransformPointSet(VTK_DOUBLE, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/General/Testing/Cxx/TestTransformPolyDataFilter.cxx b/Filters/General/Testing/Cxx/TestTransformPolyDataFilter.cxx
new file mode 100644
index 0000000..6777a64
--- /dev/null
+++ b/Filters/General/Testing/Cxx/TestTransformPolyDataFilter.cxx
@@ -0,0 +1,156 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestTransformPolyDataFilter.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkCellArray.h>
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkSmartPointer.h>
+#include <vtkTransform.h>
+#include <vtkTransformPolyDataFilter.h>
+
+namespace
+{
+void InitializePolyData(vtkPolyData *polyData, int dataType)
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkPoints> points = vtkSmartPointer<vtkPoints>::New();
+  vtkSmartPointer<vtkCellArray> verts = vtkSmartPointer<vtkCellArray>::New();
+  verts->InsertNextCell(4);
+
+  if(dataType == VTK_DOUBLE)
+    {
+    points->SetDataType(VTK_DOUBLE);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      double point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = randomSequence->GetValue();
+        }
+      verts->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+  else
+    {
+    points->SetDataType(VTK_FLOAT);
+    for(unsigned int i = 0; i < 4; ++i)
+      {
+      float point[3];
+      for(unsigned int j = 0; j < 3; ++j)
+        {
+        randomSequence->Next();
+        point[j] = static_cast<float>(randomSequence->GetValue());
+        }
+      verts->InsertCellPoint(points->InsertNextPoint(point));
+      }
+    }
+
+  points->Squeeze();
+  polyData->SetPoints(points);
+  verts->Squeeze();
+  polyData->SetVerts(verts);
+}
+
+void InitializeTransform(vtkTransform *transform)
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  double elements[16];
+  for(unsigned int i = 0; i < 16; ++i)
+    {
+    randomSequence->Next();
+    elements[i] = randomSequence->GetValue();
+    }
+  transform->SetMatrix(elements);
+}
+}
+
+int TransformPolyData(int dataType, int outputPointsPrecision)
+{
+  vtkSmartPointer<vtkPolyData> inputPolyData
+    = vtkSmartPointer<vtkPolyData>::New();
+  InitializePolyData(inputPolyData, dataType);
+
+  vtkSmartPointer<vtkTransform> transform
+    = vtkSmartPointer<vtkTransform>::New();
+  InitializeTransform(transform);
+
+  vtkSmartPointer<vtkTransformPolyDataFilter> transformPolyDataFilter
+    = vtkSmartPointer<vtkTransformPolyDataFilter>::New();
+  transformPolyDataFilter->SetOutputPointsPrecision(outputPointsPrecision);
+
+  transformPolyDataFilter->SetTransform(transform);
+  transformPolyDataFilter->SetInputData(inputPolyData);
+
+  transformPolyDataFilter->Update();
+
+  vtkSmartPointer<vtkPolyData> outputPolyData
+    = transformPolyDataFilter->GetOutput();
+  vtkSmartPointer<vtkPoints> points = outputPolyData->GetPoints();
+
+  return points->GetDataType();
+}
+
+int TestTransformPolyDataFilter(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  int dataType = TransformPolyData(VTK_FLOAT, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = TransformPolyData(VTK_DOUBLE, vtkAlgorithm::DEFAULT_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = TransformPolyData(VTK_FLOAT, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = TransformPolyData(VTK_DOUBLE, vtkAlgorithm::SINGLE_PRECISION);
+
+  if(dataType != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = TransformPolyData(VTK_FLOAT, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  dataType = TransformPolyData(VTK_DOUBLE, vtkAlgorithm::DOUBLE_PRECISION);
+
+  if(dataType != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/General/Testing/Data/Baseline/BoxClipPolyData.png.md5 b/Filters/General/Testing/Data/Baseline/BoxClipPolyData.png.md5
new file mode 100644
index 0000000..c73b4d3
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/BoxClipPolyData.png.md5
@@ -0,0 +1 @@
+0f6ddd57c523757496ff3c91003cdb6b
diff --git a/Filters/General/Testing/Data/Baseline/BoxClipTetrahedra.png.md5 b/Filters/General/Testing/Data/Baseline/BoxClipTetrahedra.png.md5
new file mode 100644
index 0000000..02f683d
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/BoxClipTetrahedra.png.md5
@@ -0,0 +1 @@
+71357f82091d8dbde6b7a56c8aaa914e
diff --git a/Filters/General/Testing/Data/Baseline/BoxClipTriangulateAndInterpolate.png.md5 b/Filters/General/Testing/Data/Baseline/BoxClipTriangulateAndInterpolate.png.md5
new file mode 100644
index 0000000..236e48b
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/BoxClipTriangulateAndInterpolate.png.md5
@@ -0,0 +1 @@
+f3f1f410e9bc965d9c889879c4bc3c21
diff --git a/Filters/General/Testing/Data/Baseline/Canny.png.md5 b/Filters/General/Testing/Data/Baseline/Canny.png.md5
new file mode 100644
index 0000000..f83beb7
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/Canny.png.md5
@@ -0,0 +1 @@
+f6dd6ab19e54412d29298a06029bd73d
diff --git a/Filters/General/Testing/Data/Baseline/Canny_1.png.md5 b/Filters/General/Testing/Data/Baseline/Canny_1.png.md5
new file mode 100644
index 0000000..d0d18c6
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/Canny_1.png.md5
@@ -0,0 +1 @@
+8e0c7e859b39a6dd5b5381bd411bb8e6
diff --git a/Filters/General/Testing/Data/Baseline/Canny_2.png.md5 b/Filters/General/Testing/Data/Baseline/Canny_2.png.md5
new file mode 100644
index 0000000..e10530d
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/Canny_2.png.md5
@@ -0,0 +1 @@
+fc11305b14db0ea484115427be70caaf
diff --git a/Filters/General/Testing/Data/Baseline/OBBCylinder.png.md5 b/Filters/General/Testing/Data/Baseline/OBBCylinder.png.md5
new file mode 100644
index 0000000..bc21513
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/OBBCylinder.png.md5
@@ -0,0 +1 @@
+c71d8b88aa4819db47d42996e06fc87f
diff --git a/Filters/General/Testing/Data/Baseline/TestBooleanOperationPolyDataFilter.png.md5 b/Filters/General/Testing/Data/Baseline/TestBooleanOperationPolyDataFilter.png.md5
new file mode 100644
index 0000000..2237488
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestBooleanOperationPolyDataFilter.png.md5
@@ -0,0 +1 @@
+61f901943879c01634945ed38d7e8698
diff --git a/Filters/General/Testing/Data/Baseline/TestBooleanOperationPolyDataFilter2.png.md5 b/Filters/General/Testing/Data/Baseline/TestBooleanOperationPolyDataFilter2.png.md5
new file mode 100644
index 0000000..2237488
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestBooleanOperationPolyDataFilter2.png.md5
@@ -0,0 +1 @@
+61f901943879c01634945ed38d7e8698
diff --git a/Filters/General/Testing/Data/Baseline/TestCellDerivs.png.md5 b/Filters/General/Testing/Data/Baseline/TestCellDerivs.png.md5
new file mode 100644
index 0000000..7094fc3
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestCellDerivs.png.md5
@@ -0,0 +1 @@
+6b4250d1448c1f6f0b3eb363a2150792
diff --git a/Filters/General/Testing/Data/Baseline/TestCellDerivs_1.png.md5 b/Filters/General/Testing/Data/Baseline/TestCellDerivs_1.png.md5
new file mode 100644
index 0000000..2eb727e
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestCellDerivs_1.png.md5
@@ -0,0 +1 @@
+6a4b3676ebc0de4c508856bff0edc1a9
diff --git a/Filters/General/Testing/Data/Baseline/TestClipClosedSurface.png.md5 b/Filters/General/Testing/Data/Baseline/TestClipClosedSurface.png.md5
new file mode 100644
index 0000000..fa7bdc0
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestClipClosedSurface.png.md5
@@ -0,0 +1 @@
+28d56f8956aa48a22617242b10ef0545
diff --git a/Filters/General/Testing/Data/Baseline/TestClipOutline.png.md5 b/Filters/General/Testing/Data/Baseline/TestClipOutline.png.md5
new file mode 100644
index 0000000..0e3e61d
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestClipOutline.png.md5
@@ -0,0 +1 @@
+5f75582515c5c827bd602f261bbd57aa
diff --git a/Filters/General/Testing/Data/Baseline/TestContourTriangulator.png.md5 b/Filters/General/Testing/Data/Baseline/TestContourTriangulator.png.md5
new file mode 100644
index 0000000..113d187
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestContourTriangulator.png.md5
@@ -0,0 +1 @@
+b5f4f7c04e970965658203c3e9cefd76
diff --git a/Filters/General/Testing/Data/Baseline/TestContourTriangulatorCutter.png.md5 b/Filters/General/Testing/Data/Baseline/TestContourTriangulatorCutter.png.md5
new file mode 100644
index 0000000..1d30839
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestContourTriangulatorCutter.png.md5
@@ -0,0 +1 @@
+eb7571fd24d7c09b2aa9726323ca4a27
diff --git a/Filters/General/Testing/Data/Baseline/TestContourTriangulatorMarching.png.md5 b/Filters/General/Testing/Data/Baseline/TestContourTriangulatorMarching.png.md5
new file mode 100644
index 0000000..24d58f5
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestContourTriangulatorMarching.png.md5
@@ -0,0 +1 @@
+43337aa6d75f290ec1b63326c30fe3ed
diff --git a/Filters/General/Testing/Data/Baseline/TestCurvatures.png.md5 b/Filters/General/Testing/Data/Baseline/TestCurvatures.png.md5
new file mode 100644
index 0000000..d0bd33a
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestCurvatures.png.md5
@@ -0,0 +1 @@
+958f1858647386c30f0cd84bb49d227f
diff --git a/Filters/General/Testing/Data/Baseline/TestDeformPointSet.png.md5 b/Filters/General/Testing/Data/Baseline/TestDeformPointSet.png.md5
new file mode 100644
index 0000000..31af335
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestDeformPointSet.png.md5
@@ -0,0 +1 @@
+fd9d6c68a1b44c0bbe6001eb5be9e2bb
diff --git a/Filters/General/Testing/Data/Baseline/TestDensifyPolyData.png.md5 b/Filters/General/Testing/Data/Baseline/TestDensifyPolyData.png.md5
new file mode 100644
index 0000000..9fa584b
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestDensifyPolyData.png.md5
@@ -0,0 +1 @@
+46d21bb658859a6f44bb72b7153e898a
diff --git a/Filters/General/Testing/Data/Baseline/TestDensifyPolyData_1.png.md5 b/Filters/General/Testing/Data/Baseline/TestDensifyPolyData_1.png.md5
new file mode 100644
index 0000000..fff43af
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestDensifyPolyData_1.png.md5
@@ -0,0 +1 @@
+db0a4a553cbc2aa7dd9cce00d901aa2a
diff --git a/Filters/General/Testing/Data/Baseline/TestDensifyPolyData_2.png.md5 b/Filters/General/Testing/Data/Baseline/TestDensifyPolyData_2.png.md5
new file mode 100644
index 0000000..100127c
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestDensifyPolyData_2.png.md5
@@ -0,0 +1 @@
+fecc85c5a65d20ef70c68ea7da9fb140
diff --git a/Filters/General/Testing/Data/Baseline/TestDensifyPolyData_3.png.md5 b/Filters/General/Testing/Data/Baseline/TestDensifyPolyData_3.png.md5
new file mode 100644
index 0000000..351121b
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestDensifyPolyData_3.png.md5
@@ -0,0 +1 @@
+ba4a970302d2d238a5b0a9c445534e62
diff --git a/Filters/General/Testing/Data/Baseline/TestDiscreteMarchingCubes.png.md5 b/Filters/General/Testing/Data/Baseline/TestDiscreteMarchingCubes.png.md5
new file mode 100644
index 0000000..137af43
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestDiscreteMarchingCubes.png.md5
@@ -0,0 +1 @@
+4e34c2e1f80c78727992b664065dd5bb
diff --git a/Filters/General/Testing/Data/Baseline/TestDistancePolyDataFilter.png.md5 b/Filters/General/Testing/Data/Baseline/TestDistancePolyDataFilter.png.md5
new file mode 100644
index 0000000..f6e7c96
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestDistancePolyDataFilter.png.md5
@@ -0,0 +1 @@
+126aa0f17eac86916fd3dc601172c9bf
diff --git a/Filters/General/Testing/Data/Baseline/TestGraphLayoutFilter.png.md5 b/Filters/General/Testing/Data/Baseline/TestGraphLayoutFilter.png.md5
new file mode 100644
index 0000000..7098d41
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestGraphLayoutFilter.png.md5
@@ -0,0 +1 @@
+b7292d033109f7b10b93bdd3b8f7d238
diff --git a/Filters/General/Testing/Data/Baseline/TestIconGlyphFilterGravity.png.md5 b/Filters/General/Testing/Data/Baseline/TestIconGlyphFilterGravity.png.md5
new file mode 100644
index 0000000..703e6e0
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestIconGlyphFilterGravity.png.md5
@@ -0,0 +1 @@
+8d7c063a5bf8b367eedbd149ca550a6c
diff --git a/Filters/General/Testing/Data/Baseline/TestIntersectionPolyDataFilter.png.md5 b/Filters/General/Testing/Data/Baseline/TestIntersectionPolyDataFilter.png.md5
new file mode 100644
index 0000000..336426d
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestIntersectionPolyDataFilter.png.md5
@@ -0,0 +1 @@
+32ec21da51e72faa605bcc7d25cccd98
diff --git a/Filters/General/Testing/Data/Baseline/TestMultiBlockStreamer.png.md5 b/Filters/General/Testing/Data/Baseline/TestMultiBlockStreamer.png.md5
new file mode 100644
index 0000000..bd2a1ee
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestMultiBlockStreamer.png.md5
@@ -0,0 +1 @@
+6516d04863750a60e99e6da17df869e6
diff --git a/Filters/General/Testing/Data/Baseline/TestQuadraturePoints.png.md5 b/Filters/General/Testing/Data/Baseline/TestQuadraturePoints.png.md5
new file mode 100644
index 0000000..f3031b8
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestQuadraturePoints.png.md5
@@ -0,0 +1 @@
+a1e758e979d5819f7c76027dd50c7a0f
diff --git a/Filters/General/Testing/Data/Baseline/TestRandomAttributeGenerator.png.md5 b/Filters/General/Testing/Data/Baseline/TestRandomAttributeGenerator.png.md5
new file mode 100644
index 0000000..db89f00
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestRandomAttributeGenerator.png.md5
@@ -0,0 +1 @@
+a60dd9ef036b1f429fff3e35e581afc1
diff --git a/Filters/General/Testing/Data/Baseline/TestRandomAttributeGeneratorScalar.png.md5 b/Filters/General/Testing/Data/Baseline/TestRandomAttributeGeneratorScalar.png.md5
new file mode 100644
index 0000000..29fd6a7
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestRandomAttributeGeneratorScalar.png.md5
@@ -0,0 +1 @@
+94895f3a66463691a80fabd2d88a8364
diff --git a/Filters/General/Testing/Data/Baseline/TestRectilinearGridToTetrahedra.png.md5 b/Filters/General/Testing/Data/Baseline/TestRectilinearGridToTetrahedra.png.md5
new file mode 100644
index 0000000..39d283b
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestRectilinearGridToTetrahedra.png.md5
@@ -0,0 +1 @@
+90f98b9876edc924587881899d73c732
diff --git a/Filters/General/Testing/Data/Baseline/TestRectilinearGridToTetrahedra_1.png.md5 b/Filters/General/Testing/Data/Baseline/TestRectilinearGridToTetrahedra_1.png.md5
new file mode 100644
index 0000000..05ae184
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestRectilinearGridToTetrahedra_1.png.md5
@@ -0,0 +1 @@
+8f5d56f42b0463017ff591dc3943a75e
diff --git a/Filters/General/Testing/Data/Baseline/TestSplineFilter.png.md5 b/Filters/General/Testing/Data/Baseline/TestSplineFilter.png.md5
new file mode 100644
index 0000000..7f21863
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestSplineFilter.png.md5
@@ -0,0 +1 @@
+2caa8af906a9d90a8849b9b437b27cbf
diff --git a/Filters/General/Testing/Data/Baseline/TestUncertaintyTubeFilter.png.md5 b/Filters/General/Testing/Data/Baseline/TestUncertaintyTubeFilter.png.md5
new file mode 100644
index 0000000..1d405cf
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestUncertaintyTubeFilter.png.md5
@@ -0,0 +1 @@
+d5ba30510e52532c1068784af8a285f9
diff --git a/Filters/General/Testing/Data/Baseline/TestYoungsMaterialInterface.png.md5 b/Filters/General/Testing/Data/Baseline/TestYoungsMaterialInterface.png.md5
new file mode 100644
index 0000000..b75ab41
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/TestYoungsMaterialInterface.png.md5
@@ -0,0 +1 @@
+f25f2fb4a757f487ccc3325331218a1d
diff --git a/Filters/General/Testing/Data/Baseline/WarpScalarImage.png.md5 b/Filters/General/Testing/Data/Baseline/WarpScalarImage.png.md5
new file mode 100644
index 0000000..d632ec0
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/WarpScalarImage.png.md5
@@ -0,0 +1 @@
+cdfd2949f79e853cea70cbfbd07f8d47
diff --git a/Filters/General/Testing/Data/Baseline/WarpToImage.png.md5 b/Filters/General/Testing/Data/Baseline/WarpToImage.png.md5
new file mode 100644
index 0000000..35bdd1d
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/WarpToImage.png.md5
@@ -0,0 +1 @@
+ea98f12611a898bbe7fea86d1dc5c061
diff --git a/Filters/General/Testing/Data/Baseline/WarpVectorImage.png.md5 b/Filters/General/Testing/Data/Baseline/WarpVectorImage.png.md5
new file mode 100644
index 0000000..a8ea8f8
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/WarpVectorImage.png.md5
@@ -0,0 +1 @@
+31a97a9ed86b8a14ec5be004270b78b7
diff --git a/Filters/General/Testing/Data/Baseline/blankGrid.png.md5 b/Filters/General/Testing/Data/Baseline/blankGrid.png.md5
new file mode 100644
index 0000000..97af61e
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/blankGrid.png.md5
@@ -0,0 +1 @@
+fbdf9e6dce2e75086bb4ddf4029806ff
diff --git a/Filters/General/Testing/Data/Baseline/clipComb.png.md5 b/Filters/General/Testing/Data/Baseline/clipComb.png.md5
new file mode 100644
index 0000000..4d8d4db
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/clipComb.png.md5
@@ -0,0 +1 @@
+030914e93033dc3e08193240fe9abafc
diff --git a/Filters/General/Testing/Data/Baseline/clipHex.png.md5 b/Filters/General/Testing/Data/Baseline/clipHex.png.md5
new file mode 100644
index 0000000..ad2cf4d
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/clipHex.png.md5
@@ -0,0 +1 @@
+64eeeaa5d780b84b7210afb9d33a8d33
diff --git a/Filters/General/Testing/Data/Baseline/clipImage.png.md5 b/Filters/General/Testing/Data/Baseline/clipImage.png.md5
new file mode 100644
index 0000000..a969d75
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/clipImage.png.md5
@@ -0,0 +1 @@
+843b23010f8c2b4b9f16ebf37a5a6dca
diff --git a/Filters/General/Testing/Data/Baseline/clipPyramid.png.md5 b/Filters/General/Testing/Data/Baseline/clipPyramid.png.md5
new file mode 100644
index 0000000..22733fb
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/clipPyramid.png.md5
@@ -0,0 +1 @@
+676dab66d10bfccd35a4a472facee8c8
diff --git a/Filters/General/Testing/Data/Baseline/clipPyramid_1.png.md5 b/Filters/General/Testing/Data/Baseline/clipPyramid_1.png.md5
new file mode 100644
index 0000000..2eba2c1
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/clipPyramid_1.png.md5
@@ -0,0 +1 @@
+f8ba73285ad0eb5af05dd88492cf1c67
diff --git a/Filters/General/Testing/Data/Baseline/clipQuadraticCells.png.md5 b/Filters/General/Testing/Data/Baseline/clipQuadraticCells.png.md5
new file mode 100644
index 0000000..0cf9f19
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/clipQuadraticCells.png.md5
@@ -0,0 +1 @@
+bb56eeee36100deee029960a13a1519c
diff --git a/Filters/General/Testing/Data/Baseline/clipTet.png.md5 b/Filters/General/Testing/Data/Baseline/clipTet.png.md5
new file mode 100644
index 0000000..0b57d0b
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/clipTet.png.md5
@@ -0,0 +1 @@
+36353ed2b78c9d8c5a41524ceeb7f77f
diff --git a/Filters/General/Testing/Data/Baseline/clipVolume.png.md5 b/Filters/General/Testing/Data/Baseline/clipVolume.png.md5
new file mode 100644
index 0000000..38e67f0
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/clipVolume.png.md5
@@ -0,0 +1 @@
+6a3b13080dec45b6fbeb9235ff80ead0
diff --git a/Filters/General/Testing/Data/Baseline/clipVolume2.png.md5 b/Filters/General/Testing/Data/Baseline/clipVolume2.png.md5
new file mode 100644
index 0000000..730ceb7
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/clipVolume2.png.md5
@@ -0,0 +1 @@
+152828b01f707eeadae48e6d51f56460
diff --git a/Filters/General/Testing/Data/Baseline/clipVolume3.png.md5 b/Filters/General/Testing/Data/Baseline/clipVolume3.png.md5
new file mode 100644
index 0000000..1b07ff9
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/clipVolume3.png.md5
@@ -0,0 +1 @@
+5ef9244a4b56f48633e3c24d40f2ff24
diff --git a/Filters/General/Testing/Data/Baseline/clipWedge.png.md5 b/Filters/General/Testing/Data/Baseline/clipWedge.png.md5
new file mode 100644
index 0000000..fda2d31
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/clipWedge.png.md5
@@ -0,0 +1 @@
+477d880325596937d35b63d44c46932b
diff --git a/Filters/General/Testing/Data/Baseline/contoursToSurface.png.md5 b/Filters/General/Testing/Data/Baseline/contoursToSurface.png.md5
new file mode 100644
index 0000000..6a52ba1
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/contoursToSurface.png.md5
@@ -0,0 +1 @@
+a6b20220cbf6d2b04623b6f555b7a9a4
diff --git a/Filters/General/Testing/Data/Baseline/cursor2D.png.md5 b/Filters/General/Testing/Data/Baseline/cursor2D.png.md5
new file mode 100644
index 0000000..8c4803e
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/cursor2D.png.md5
@@ -0,0 +1 @@
+64cc2661e5e3729e664491d018be4ea0
diff --git a/Filters/General/Testing/Data/Baseline/cursor3D.png.md5 b/Filters/General/Testing/Data/Baseline/cursor3D.png.md5
new file mode 100644
index 0000000..76a4d15
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/cursor3D.png.md5
@@ -0,0 +1 @@
+02a7151dcc01977111cab3af6ee817e9
diff --git a/Filters/General/Testing/Data/Baseline/dicer.png.md5 b/Filters/General/Testing/Data/Baseline/dicer.png.md5
new file mode 100644
index 0000000..b623b90
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/dicer.png.md5
@@ -0,0 +1 @@
+ada134c219e167a88fc42227c6d8adcd
diff --git a/Filters/General/Testing/Data/Baseline/edgePoints.png.md5 b/Filters/General/Testing/Data/Baseline/edgePoints.png.md5
new file mode 100644
index 0000000..a43c89b
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/edgePoints.png.md5
@@ -0,0 +1 @@
+2347b5722993b2a21d2d28070cce064e
diff --git a/Filters/General/Testing/Data/Baseline/mcubes.png.md5 b/Filters/General/Testing/Data/Baseline/mcubes.png.md5
new file mode 100644
index 0000000..b1a7114
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/mcubes.png.md5
@@ -0,0 +1 @@
+cb690849cba7227e48f47f00a5ae4d30
diff --git a/Filters/General/Testing/Data/Baseline/recursiveDC.png.md5 b/Filters/General/Testing/Data/Baseline/recursiveDC.png.md5
new file mode 100644
index 0000000..3ac41cb
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/recursiveDC.png.md5
@@ -0,0 +1 @@
+279d017cec0c58f4ed79ad35100cef5c
diff --git a/Filters/General/Testing/Data/Baseline/recursiveDC_1.png.md5 b/Filters/General/Testing/Data/Baseline/recursiveDC_1.png.md5
new file mode 100644
index 0000000..eea4c19
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/recursiveDC_1.png.md5
@@ -0,0 +1 @@
+7fae8061b9550ea9d0a0ffb319f676c0
diff --git a/Filters/General/Testing/Data/Baseline/spatialRepAll.png.md5 b/Filters/General/Testing/Data/Baseline/spatialRepAll.png.md5
new file mode 100644
index 0000000..6aabaa3
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/spatialRepAll.png.md5
@@ -0,0 +1 @@
+605168bf05919afdef6ebf787d19c12c
diff --git a/Filters/General/Testing/Data/Baseline/splitVectors.png.md5 b/Filters/General/Testing/Data/Baseline/splitVectors.png.md5
new file mode 100644
index 0000000..d8982f2
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/splitVectors.png.md5
@@ -0,0 +1 @@
+24bc70955972f9cd4ef8571c463ca80d
diff --git a/Filters/General/Testing/Data/Baseline/streamTracer.png.md5 b/Filters/General/Testing/Data/Baseline/streamTracer.png.md5
new file mode 100644
index 0000000..af646f8
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/streamTracer.png.md5
@@ -0,0 +1 @@
+f9f85a937c6f3733cce439f0a15ecb7f
diff --git a/Filters/General/Testing/Data/Baseline/streamTracer_1.png.md5 b/Filters/General/Testing/Data/Baseline/streamTracer_1.png.md5
new file mode 100644
index 0000000..6529cf2
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/streamTracer_1.png.md5
@@ -0,0 +1 @@
+c8a29957e2b21268301038cade6d6f1e
diff --git a/Filters/General/Testing/Data/Baseline/subPixelPositionMin.png.md5 b/Filters/General/Testing/Data/Baseline/subPixelPositionMin.png.md5
new file mode 100644
index 0000000..277e94b
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/subPixelPositionMin.png.md5
@@ -0,0 +1 @@
+1923e04d7045c396fec5a33f4409d41e
diff --git a/Filters/General/Testing/Data/Baseline/testDataSetTriangleFilter.png.md5 b/Filters/General/Testing/Data/Baseline/testDataSetTriangleFilter.png.md5
new file mode 100644
index 0000000..2120639
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/testDataSetTriangleFilter.png.md5
@@ -0,0 +1 @@
+699b7301f2f9a5f471987fd10a093fef
diff --git a/Filters/General/Testing/Data/Baseline/testDataSetTriangleFilter2.png.md5 b/Filters/General/Testing/Data/Baseline/testDataSetTriangleFilter2.png.md5
new file mode 100644
index 0000000..623f63d
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/testDataSetTriangleFilter2.png.md5
@@ -0,0 +1 @@
+f2d6fdaf19e1d50b59aea88d04bb75d4
diff --git a/Filters/General/Testing/Data/Baseline/testDataSetTriangleFilter_1.png.md5 b/Filters/General/Testing/Data/Baseline/testDataSetTriangleFilter_1.png.md5
new file mode 100644
index 0000000..f5532b4
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/testDataSetTriangleFilter_1.png.md5
@@ -0,0 +1 @@
+a9b847ea96db0af4832bdcfd4a430fef
diff --git a/Filters/General/Testing/Data/Baseline/testReflect.png.md5 b/Filters/General/Testing/Data/Baseline/testReflect.png.md5
new file mode 100644
index 0000000..3411008
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/testReflect.png.md5
@@ -0,0 +1 @@
+19b30b5881d81bb801288ca57d756c6f
diff --git a/Filters/General/Testing/Data/Baseline/warplens.png.md5 b/Filters/General/Testing/Data/Baseline/warplens.png.md5
new file mode 100644
index 0000000..97b2708
--- /dev/null
+++ b/Filters/General/Testing/Data/Baseline/warplens.png.md5
@@ -0,0 +1 @@
+1c178b2ed9bd70c9b151f2a33ee7d498
diff --git a/Filters/General/Testing/Python/CMakeLists.txt b/Filters/General/Testing/Python/CMakeLists.txt
index 96ce44a..0ee3423 100644
--- a/Filters/General/Testing/Python/CMakeLists.txt
+++ b/Filters/General/Testing/Python/CMakeLists.txt
@@ -1,46 +1,44 @@
-add_test_python(Canny.py Graphics)
-add_test_python(OBBCylinder.py Graphics)
-add_test_python(TestCellDerivs.py Graphics)
-add_test_python(TestClipClosedSurface.py Graphics)
-add_test_python(TestClipOutline.py Graphics)
-add_test_python(TestCurvatures.py Graphics)
-add_test_python(TestDeformPointSet.py Graphics)
-add_test_python(TestDiscreteMarchingCubes.py Graphics)
-add_test_python(TestGraphLayoutFilter.py Graphics)
-add_test_python(TestRectilinearGridToTetrahedra.py Graphics)
-add_test_python(WarpToImage.py Graphics)
-add_test_python(clipHex.py Hybrid)
-add_test_python(clipImage.py Graphics)
-add_test_python(clipPyramid.py Hybrid)
-add_test_python(clipQuadraticCells.py Graphics)
-add_test_python(clipTet.py Hybrid)
-add_test_python(clipVolume.py Graphics)
-add_test_python(clipVolume2.py Graphics)
-add_test_python(clipVolume3.py Graphics)
-add_test_python(clipWedge.py Hybrid)
-add_test_python(contoursToSurface.py Graphics)
-add_test_python(dicer.py Graphics)
-add_test_python(edgePoints.py Graphics)
-add_test_python(mcubes.py Graphics)
-add_test_python(recursiveDC.py Graphics)
-add_test_python(streamTracer.py Graphics)
-add_test_python(subPixelPositionMin.py Graphics)
-add_test_python(testDataSetTriangleFilter.py Graphics)
-add_test_python(testDataSetTriangleFilter2.py Graphics)
-add_test_python(testReflect.py Graphics)
-add_test_python(warplens.py Graphics)
-add_test_python(TestRandomAttributeGenerator.py Graphics)
-add_test_python(WarpScalarImage.py Graphics)
-add_test_python(WarpVectorImage.py Graphics)
-add_test_python(cursor2D.py Graphics)
-add_test_python1(pointsPrecisions.py)
-
-if(VTK_DATA_ROOT)
-  add_test_python(TestSplineFilter.py Graphics)
-  add_test_python(TestMultiBlockStreamer.py Graphics)
-  add_test_python(blankGrid.py Graphics)
-  add_test_python(clipComb.py Graphics)
-  add_test_python(cursor3D.py Graphics)
-  add_test_python(splitVectors.py Graphics)
-  add_test_python1(spatialRepAll.py Baseline/Graphics)
-endif()
+vtk_add_test_python(Canny.py)
+vtk_add_test_python(OBBCylinder.py)
+vtk_add_test_python(TestCellDerivs.py)
+vtk_add_test_python(TestClipClosedSurface.py)
+vtk_add_test_python(TestClipOutline.py)
+vtk_add_test_python(TestCurvatures.py)
+vtk_add_test_python(TestDeformPointSet.py)
+vtk_add_test_python(TestDiscreteMarchingCubes.py)
+vtk_add_test_python(TestGraphLayoutFilter.py)
+vtk_add_test_python(TestMultiBlockStreamer.py)
+vtk_add_test_python(TestRectilinearGridToTetrahedra.py)
+vtk_add_test_python(TestSplineFilter.py)
+vtk_add_test_python(WarpToImage.py)
+vtk_add_test_python(blankGrid.py)
+vtk_add_test_python(clipComb.py)
+vtk_add_test_python(clipHex.py)
+vtk_add_test_python(clipImage.py)
+vtk_add_test_python(clipPyramid.py)
+vtk_add_test_python(clipQuadraticCells.py)
+vtk_add_test_python(clipTet.py)
+vtk_add_test_python(clipVolume.py)
+vtk_add_test_python(clipVolume2.py)
+vtk_add_test_python(clipVolume3.py)
+vtk_add_test_python(clipWedge.py)
+vtk_add_test_python(contoursToSurface.py)
+vtk_add_test_python(cursor3D.py)
+vtk_add_test_python(dicer.py)
+vtk_add_test_python(edgePoints.py)
+vtk_add_test_python(mcubes.py)
+vtk_add_test_python(recursiveDC.py)
+vtk_add_test_python(splitVectors.py)
+vtk_add_test_python(streamTracer.py)
+vtk_add_test_python(subPixelPositionMin.py)
+vtk_add_test_python(testDataSetTriangleFilter.py)
+vtk_add_test_python(testDataSetTriangleFilter2.py)
+vtk_add_test_python(testReflect.py)
+vtk_add_test_python(warplens.py)
+vtk_add_test_python(TestRandomAttributeGenerator.py)
+vtk_add_test_python(TestRandomAttributeGeneratorScalar.py)
+vtk_add_test_python(WarpScalarImage.py)
+vtk_add_test_python(WarpVectorImage.py)
+vtk_add_test_python(cursor2D.py)
+vtk_add_test_python(spatialRepAll.py NO_RT)
+vtk_add_test_python(pointsPrecisions.py NO_DATA NO_VALID NO_RT)
diff --git a/Filters/General/Testing/Python/TestRandomAttributeGenerator.py b/Filters/General/Testing/Python/TestRandomAttributeGenerator.py
index f35093e..1e42171 100755
--- a/Filters/General/Testing/Python/TestRandomAttributeGenerator.py
+++ b/Filters/General/Testing/Python/TestRandomAttributeGenerator.py
@@ -43,19 +43,9 @@ tg.ClampScalingOn()
 n = vtk.vtkPolyDataNormals()
 n.SetInputConnection(tg.GetOutputPort())
 
-cs = vtk.vtkConeSource()
-cs.SetResolution(6)
-
-glyph = vtk.vtkGlyph3D()
-glyph.SetInputConnection(ag.GetOutputPort())
-glyph.SetSourceConnection(cs.GetOutputPort())
-glyph.SetScaleModeToDataScalingOff()
-glyph.SetScaleFactor(0.05)
-
 pdm = vtk.vtkPolyDataMapper()
 pdm.SetInputConnection(n.GetOutputPort())
 
-# pdm SetInputConnection [glyph GetOutputPort]
 a = vtk.vtkActor()
 a.SetMapper(pdm)
 
diff --git a/Filters/General/Testing/Python/TestRandomAttributeGeneratorScalar.py b/Filters/General/Testing/Python/TestRandomAttributeGeneratorScalar.py
new file mode 100755
index 0000000..fd94858
--- /dev/null
+++ b/Filters/General/Testing/Python/TestRandomAttributeGeneratorScalar.py
@@ -0,0 +1,69 @@
+#!/usr/bin/env python
+import vtk
+from vtk.test import Testing
+from vtk.util.misc import vtkGetDataRoot
+VTK_DATA_ROOT = vtkGetDataRoot()
+
+def PlaneSphereActors():
+    ps = vtk.vtkPlaneSource()
+    ps.SetXResolution(10)
+    ps.SetYResolution(10)
+
+    ss = vtk.vtkSphereSource()
+    ss.SetRadius (0.3)
+
+    group = vtk.vtkMultiBlockDataGroupFilter()
+    group.AddInputConnection(ps.GetOutputPort())
+    group.AddInputConnection(ss.GetOutputPort())
+
+    ag = vtk.vtkRandomAttributeGenerator()
+    ag.SetInputConnection(group.GetOutputPort())
+    ag.GenerateCellScalarsOn()
+    ag.AttributesConstantPerBlockOn()
+
+    n = vtk.vtkPolyDataNormals()
+    n.SetInputConnection(ag.GetOutputPort())
+    n.Update ();
+
+    actors = []
+    it = n.GetOutputDataObject(0).NewIterator()
+    it.InitTraversal()
+    while not it.IsDoneWithTraversal():
+        pm = vtk.vtkPolyDataMapper()
+        pm.SetInputData(it.GetCurrentDataObject())
+
+        a = vtk.vtkActor()
+        a.SetMapper(pm)
+        actors.append (a)
+        it.GoToNextItem()
+    return actors
+
+# Create the RenderWindow, Renderer and interactive renderer
+ren = vtk.vtkRenderer()
+ren.SetBackground(0, 0, 0)
+renWin = vtk.vtkRenderWindow()
+renWin.SetSize(300, 300)
+renWin.AddRenderer(ren)
+
+# make sure to have the same regression image on all platforms.
+renWin.SetMultiSamples(0)
+iren = vtk.vtkRenderWindowInteractor()
+iren.SetRenderWindow(renWin)
+
+# Force a starting random value
+raMath = vtk.vtkMath()
+raMath.RandomSeed(6)
+
+# Generate random cell attributes on a plane and a cylinder
+for a in PlaneSphereActors():
+    ren.AddActor(a)
+
+# reorient the camera
+camera = ren.GetActiveCamera()
+camera.Azimuth(20)
+camera.Elevation(20)
+ren.SetActiveCamera(camera)
+ren.ResetCamera()
+
+renWin.Render()
+#iren.Start()
diff --git a/Filters/General/Testing/Python/spatialRepAll.py b/Filters/General/Testing/Python/spatialRepAll.py
index 839b90b..4967822 100755
--- a/Filters/General/Testing/Python/spatialRepAll.py
+++ b/Filters/General/Testing/Python/spatialRepAll.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython spatialRepAll.pyy  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Graphics
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Filters/General/Testing/Tcl/CMakeLists.txt b/Filters/General/Testing/Tcl/CMakeLists.txt
index a30d3dd..44f0da9 100644
--- a/Filters/General/Testing/Tcl/CMakeLists.txt
+++ b/Filters/General/Testing/Tcl/CMakeLists.txt
@@ -1,45 +1,42 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(Canny Graphics)
-  add_test_tcl(TestClipClosedSurface Graphics)
-  add_test_tcl(TestMultiBlockStreamer Graphics)
-  add_test_tcl(TestSplineFilter Graphics)
-  add_test_tcl(WarpVectorImage Graphics)
-  add_test_tcl(blankGrid Graphics)
-  add_test_tcl(clipComb Graphics)
-  add_test_tcl(clipImage Graphics)
-  add_test_tcl(cursor3D Graphics)
-  add_test_tcl(dicer Graphics)
-  add_test_tcl(edgePoints Graphics)
-  add_test_tcl(mcubes Graphics)
-  add_test_tcl(recursiveDC Graphics)
-  add_test_tcl(spatialRepAll Graphics)
-  add_test_tcl(splitVectors Graphics)
-  add_test_tcl(streamTracer Graphics)
-  add_test_tcl(testDataSetTriangleFilter Graphics)
-  add_test_tcl(testDataSetTriangleFilter2 Graphics)
-  add_test_tcl(warplens Graphics)
-endif()
+vtk_add_test_tcl(Canny)
+vtk_add_test_tcl(TestClipClosedSurface)
+vtk_add_test_tcl(TestMultiBlockStreamer)
+vtk_add_test_tcl(TestSplineFilter)
+vtk_add_test_tcl(WarpVectorImage)
+vtk_add_test_tcl(blankGrid)
+vtk_add_test_tcl(clipComb)
+vtk_add_test_tcl(clipImage)
+vtk_add_test_tcl(cursor3D)
+vtk_add_test_tcl(dicer)
+vtk_add_test_tcl(edgePoints)
+vtk_add_test_tcl(mcubes)
+vtk_add_test_tcl(recursiveDC)
+vtk_add_test_tcl(spatialRepAll)
+vtk_add_test_tcl(splitVectors)
+vtk_add_test_tcl(streamTracer)
+vtk_add_test_tcl(testDataSetTriangleFilter)
+vtk_add_test_tcl(testDataSetTriangleFilter2)
+vtk_add_test_tcl(warplens)
+vtk_add_test_tcl(OBBCylinder)
+vtk_add_test_tcl(TestCellDerivs)
+vtk_add_test_tcl(TestClipOutline)
+vtk_add_test_tcl(TestCurvatures)
+vtk_add_test_tcl(TestDiscreteMarchingCubes)
+vtk_add_test_tcl(TestGraphLayoutFilter)
+vtk_add_test_tcl(TestRandomAttributeGenerator)
+vtk_add_test_tcl(TestRectilinearGridToTetrahedra)
+vtk_add_test_tcl(WarpScalarImage)
+vtk_add_test_tcl(WarpToImage)
+vtk_add_test_tcl(clipQuadraticCells)
+vtk_add_test_tcl(clipVolume)
+vtk_add_test_tcl(clipVolume2)
+vtk_add_test_tcl(clipVolume3)
+vtk_add_test_tcl(contoursToSurface)
+vtk_add_test_tcl(cursor2D)
+vtk_add_test_tcl(subPixelPositionMin)
+vtk_add_test_tcl(testReflect)
 
-add_test_tcl(OBBCylinder Graphics)
-add_test_tcl(TestCellDerivs Graphics)
-add_test_tcl(TestClipOutline Graphics)
-add_test_tcl(TestCurvatures Graphics)
-add_test_tcl(TestDiscreteMarchingCubes Graphics)
-add_test_tcl(TestGraphLayoutFilter Graphics)
-add_test_tcl(TestRandomAttributeGenerator Graphics)
-add_test_tcl(TestRectilinearGridToTetrahedra Graphics)
-add_test_tcl(WarpScalarImage Graphics)
-add_test_tcl(WarpToImage Graphics)
-add_test_tcl(clipQuadraticCells Graphics)
-add_test_tcl(clipVolume Graphics)
-add_test_tcl(clipVolume2 Graphics)
-add_test_tcl(clipVolume3 Graphics)
-add_test_tcl(contoursToSurface Graphics)
-add_test_tcl(cursor2D Graphics)
-add_test_tcl(subPixelPositionMin Graphics)
-add_test_tcl(testReflect Graphics)
-
-add_test_tcl(clipHex Hybrid)
-add_test_tcl(clipPyramid Hybrid)
-add_test_tcl(clipTet Hybrid)
-add_test_tcl(clipWedge Hybrid)
+vtk_add_test_tcl(clipHex)
+vtk_add_test_tcl(clipPyramid)
+vtk_add_test_tcl(clipTet)
+vtk_add_test_tcl(clipWedge)
diff --git a/Filters/General/vtkAppendPoints.cxx b/Filters/General/vtkAppendPoints.cxx
index 001f3a0..ee10fe5 100644
--- a/Filters/General/vtkAppendPoints.cxx
+++ b/Filters/General/vtkAppendPoints.cxx
@@ -34,6 +34,7 @@ vtkStandardNewMacro(vtkAppendPoints);
 vtkAppendPoints::vtkAppendPoints()
 {
   this->InputIdArrayName = 0;
+  this->OutputPointsPrecision = vtkAlgorithm::DEFAULT_PRECISION;
 }
 
 //----------------------------------------------------------------------------
@@ -125,6 +126,34 @@ int vtkAppendPoints::RequestData(vtkInformation *vtkNotUsed(request),
   vtkPointData* pd = 0;
   vtkIdType index = 0;
   vtkSmartPointer<vtkPoints> pts = vtkSmartPointer<vtkPoints>::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION)
+    {
+    // The points in distinct inputs may be of differing precisions.
+    pts->SetDataType(VTK_FLOAT);
+    for (size_t idx = 0; idx < inputs.size(); ++idx)
+      {
+      vtkPolyData* input = inputs[idx];
+
+      // Set the desired precision to VTK_DOUBLE if the precision of the
+      // points in any of the inputs is VTK_DOUBLE.
+      if(input && input->GetPoints() && input->GetPoints()->GetDataType() == VTK_DOUBLE)
+        {
+        pts->SetDataType(VTK_DOUBLE);
+        break;
+        }
+      }
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
+    {
+    pts->SetDataType(VTK_FLOAT);
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    pts->SetDataType(VTK_DOUBLE);
+    }
+
   pts->SetNumberOfPoints(totalPoints);
   vtkSmartPointer<vtkIntArray> idArr;
   if (this->InputIdArrayName)
@@ -171,7 +200,9 @@ void vtkAppendPoints::PrintSelf(ostream& os, vtkIndent indent)
 {
   this->Superclass::PrintSelf(os,indent);
   os << indent << "InputIdArrayName: "
-     << (this->InputIdArrayName ? this->InputIdArrayName : "(none)") << endl;
+     << (this->InputIdArrayName ? this->InputIdArrayName : "(none)") << endl
+     << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << endl;
 }
 
 //----------------------------------------------------------------------------
diff --git a/Filters/General/vtkAppendPoints.h b/Filters/General/vtkAppendPoints.h
index 11fb0a6..d864a52 100644
--- a/Filters/General/vtkAppendPoints.h
+++ b/Filters/General/vtkAppendPoints.h
@@ -42,6 +42,17 @@ public:
   vtkSetStringMacro(InputIdArrayName);
   vtkGetStringMacro(InputIdArrayName);
 
+  // Description:
+  // Set/get the desired precision for the output type. See the documentation
+  // for the vtkAlgorithm::DesiredOutputPrecision enum for an explanation of
+  // the available precision settings. If the desired precision is
+  // DEFAULT_PRECISION and any of the inputs are double precision, then the
+  // output precision will be double precision. Otherwise, if the desired
+  // precision is DEFAULT_PRECISION and all the inputs are single precision,
+  // then the output will be single precision.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkAppendPoints();
   ~vtkAppendPoints();
@@ -52,7 +63,7 @@ protected:
   virtual int FillInputPortInformation(int, vtkInformation *);
 
   char* InputIdArrayName;
-
+  int OutputPointsPrecision;
 private:
   vtkAppendPoints(const vtkAppendPoints&);  // Not implemented.
   void operator=(const vtkAppendPoints&);  // Not implemented.
diff --git a/Filters/General/vtkApproximatingSubdivisionFilter.h b/Filters/General/vtkApproximatingSubdivisionFilter.h
index 74656ce..91c44ba 100644
--- a/Filters/General/vtkApproximatingSubdivisionFilter.h
+++ b/Filters/General/vtkApproximatingSubdivisionFilter.h
@@ -47,7 +47,7 @@ public:
 
 protected:
   vtkApproximatingSubdivisionFilter();
-  ~vtkApproximatingSubdivisionFilter() {};
+  ~vtkApproximatingSubdivisionFilter() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   virtual void GenerateSubdivisionPoints (vtkPolyData *inputDS,
diff --git a/Filters/General/vtkAxes.h b/Filters/General/vtkAxes.h
index 68462c5..ccc7b38 100644
--- a/Filters/General/vtkAxes.h
+++ b/Filters/General/vtkAxes.h
@@ -57,7 +57,7 @@ public:
 
 protected:
   vtkAxes();
-  ~vtkAxes() {};
+  ~vtkAxes() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   // This source does not know how to generate pieces yet.
diff --git a/Filters/General/vtkBoxClipDataSet.cxx b/Filters/General/vtkBoxClipDataSet.cxx
index a6df414..ad1456c 100644
--- a/Filters/General/vtkBoxClipDataSet.cxx
+++ b/Filters/General/vtkBoxClipDataSet.cxx
@@ -359,7 +359,7 @@ int vtkBoxClipDataSet::RequestData(vtkInformation *vtkNotUsed(request),
                                inPD, outPD, inCD, cellId, outCD);
           }
         numNew[0] = conn[0]->GetNumberOfCells() - num[0];
-        numNew[1] = conn[1]->GetNumberOfCells() - num[0];
+        numNew[1] = conn[1]->GetNumberOfCells() - num[1];
         num[0]    = conn[0]->GetNumberOfCells();
         num[1]    = conn[1]->GetNumberOfCells();
         }
@@ -376,7 +376,7 @@ int vtkBoxClipDataSet::RequestData(vtkInformation *vtkNotUsed(request),
                                inPD, outPD, inCD, cellId, outCD);
           }
         numNew[0] = conn[0]->GetNumberOfCells() - num[0];
-        numNew[1] = conn[1]->GetNumberOfCells() - num[0];
+        numNew[1] = conn[1]->GetNumberOfCells() - num[1];
         num[0]    = conn[0]->GetNumberOfCells();
         num[1]    = conn[1]->GetNumberOfCells();
         }
@@ -458,6 +458,7 @@ int vtkBoxClipDataSet::RequestData(vtkInformation *vtkNotUsed(request),
 
     for (i=0 ; i<numOutputs; i++)  // for both outputs
       {
+      conn[i]->InitTraversal();
       for (j=0; j < numNew[i]; j++)
         {
         locs[i]->InsertNextValue(conn[i]->GetTraversalLocation());
diff --git a/Filters/General/vtkBrownianPoints.h b/Filters/General/vtkBrownianPoints.h
index 24f8847..d4060bf 100644
--- a/Filters/General/vtkBrownianPoints.h
+++ b/Filters/General/vtkBrownianPoints.h
@@ -18,6 +18,9 @@
 // magnitude and direction) to each point. The minimum and maximum speed
 // values can be controlled by the user.
 
+// .SECTION See also
+// vtkRandomAttributeGenerator
+
 #ifndef __vtkBrownianPoints_h
 #define __vtkBrownianPoints_h
 
@@ -46,7 +49,7 @@ public:
 
 protected:
   vtkBrownianPoints();
-  ~vtkBrownianPoints() {};
+  ~vtkBrownianPoints() {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   double MinimumSpeed;
diff --git a/Filters/General/vtkCellCenters.h b/Filters/General/vtkCellCenters.h
index 9abb6f0..af7e26e 100644
--- a/Filters/General/vtkCellCenters.h
+++ b/Filters/General/vtkCellCenters.h
@@ -54,7 +54,7 @@ public:
 
 protected:
   vtkCellCenters();
-  ~vtkCellCenters() {};
+  ~vtkCellCenters() {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   virtual int FillInputPortInformation(int port, vtkInformation *info);
diff --git a/Filters/General/vtkCellDerivatives.h b/Filters/General/vtkCellDerivatives.h
index 4243203..52f04e7 100644
--- a/Filters/General/vtkCellDerivatives.h
+++ b/Filters/General/vtkCellDerivatives.h
@@ -96,7 +96,7 @@ public:
 
 protected:
   vtkCellDerivatives();
-  ~vtkCellDerivatives() {};
+  ~vtkCellDerivatives() {}
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
   int VectorMode;
diff --git a/Filters/General/vtkClipClosedSurface.cxx b/Filters/General/vtkClipClosedSurface.cxx
index e13e8bf..643b93d 100644
--- a/Filters/General/vtkClipClosedSurface.cxx
+++ b/Filters/General/vtkClipClosedSurface.cxx
@@ -196,7 +196,7 @@ public:
   vtkCCSEdgeLocatorNode() :
     ptId0(-1), ptId1(-1), edgeId(-1), next(0) {};
 
-  ~vtkCCSEdgeLocatorNode() {
+  void FreeList() {
     vtkCCSEdgeLocatorNode *ptr = this->next;
     while (ptr)
       {
@@ -225,6 +225,7 @@ public:
     return new vtkCCSEdgeLocator; };
 
   void Delete() {
+    this->Initialize();
     delete this; };
 
   // Description:
@@ -241,6 +242,12 @@ public:
 
 void vtkCCSEdgeLocator::Initialize()
 {
+  for (MapType::iterator i = this->EdgeMap.begin();
+       i != this->EdgeMap.end();
+       ++i)
+    {
+    i->second.FreeList();
+    }
   this->EdgeMap.clear();
 }
 
diff --git a/Filters/General/vtkClipDataSet.cxx b/Filters/General/vtkClipDataSet.cxx
index 8efbfa5..ed8bd2a 100644
--- a/Filters/General/vtkClipDataSet.cxx
+++ b/Filters/General/vtkClipDataSet.cxx
@@ -288,6 +288,10 @@ int vtkClipDataSet::RequestData(
       {
       newPoints->SetDataType(inputPointSet->GetPoints()->GetDataType());
       }
+    else
+      {
+      newPoints->SetDataType(VTK_FLOAT);
+      }
     }
   else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
     {
diff --git a/Filters/General/vtkCoincidentPoints.cxx b/Filters/General/vtkCoincidentPoints.cxx
index 72c3a8b..98bbf48 100644
--- a/Filters/General/vtkCoincidentPoints.cxx
+++ b/Filters/General/vtkCoincidentPoints.cxx
@@ -77,7 +77,6 @@ public:
       this->coord[1] = y;
       this->coord[2] = z;
       }
-    ~Coord() {}
 
     inline bool operator < (const Coord & other) const
       {
diff --git a/Filters/General/vtkCursor2D.cxx b/Filters/General/vtkCursor2D.cxx
index 253b44a..c58e244 100644
--- a/Filters/General/vtkCursor2D.cxx
+++ b/Filters/General/vtkCursor2D.cxx
@@ -308,7 +308,7 @@ void vtkCursor2D::SetFocalPoint(double x[3])
 }
 
 //---------------------------------------------------------------------------
-void vtkCursor2D::SetModelBounds(double bounds[6])
+void vtkCursor2D::SetModelBounds(const double bounds[6])
 {
   this->SetModelBounds(bounds[0], bounds[1], bounds[2], bounds[3], bounds[6], bounds[5]);
 }
diff --git a/Filters/General/vtkCursor2D.h b/Filters/General/vtkCursor2D.h
index 5bfd5eb..edf1874 100644
--- a/Filters/General/vtkCursor2D.h
+++ b/Filters/General/vtkCursor2D.h
@@ -47,7 +47,7 @@ public:
   // of the cursor, and where the focal point should lie.
   void SetModelBounds(double xmin, double xmax, double ymin, double ymax,
                       double zmin, double zmax);
-  void SetModelBounds(double bounds[6]);
+  void SetModelBounds(const double bounds[6]);
   vtkGetVectorMacro(ModelBounds,double,6);
 
   // Description:
diff --git a/Filters/General/vtkCursor3D.cxx b/Filters/General/vtkCursor3D.cxx
index c747532..ec050f9 100644
--- a/Filters/General/vtkCursor3D.cxx
+++ b/Filters/General/vtkCursor3D.cxx
@@ -475,7 +475,7 @@ void vtkCursor3D::SetFocalPoint(double x[3])
     }
 }
 
-void vtkCursor3D::SetModelBounds(double bounds[6])
+void vtkCursor3D::SetModelBounds(const double bounds[6])
 {
   this->SetModelBounds(bounds[0], bounds[1], bounds[2], bounds[3], bounds[4],
                        bounds[5]);
diff --git a/Filters/General/vtkCursor3D.h b/Filters/General/vtkCursor3D.h
index 49c4d92..c3687be 100644
--- a/Filters/General/vtkCursor3D.h
+++ b/Filters/General/vtkCursor3D.h
@@ -45,7 +45,7 @@ public:
   // Set / get the boundary of the 3D cursor.
   void SetModelBounds(double xmin, double xmax, double ymin, double ymax,
                       double zmin, double zmax);
-  void SetModelBounds(double bounds[6]);
+  void SetModelBounds(const double bounds[6]);
   vtkGetVectorMacro(ModelBounds,double,6);
 
   // Description:
diff --git a/Filters/General/vtkCurvatures.cxx b/Filters/General/vtkCurvatures.cxx
index 4c81185..227bc64 100644
--- a/Filters/General/vtkCurvatures.cxx
+++ b/Filters/General/vtkCurvatures.cxx
@@ -199,7 +199,7 @@ void vtkCurvatures::GetMeanCurvature(vtkPolyData *mesh)
     neighbour ->Delete();
 
     if (meanCurvature) meanCurvature->Delete();
-    if (num_neighb) delete [] num_neighb;
+    delete [] num_neighb;
 };
 //--------------------------------------------
 #define CLAMP_MACRO(v)    ((v)<(-1) ? (-1) : (v) > (1) ? (1) : v)
@@ -300,8 +300,8 @@ void vtkCurvatures::GetGaussCurvature(vtkPolyData *output)
     vtkDebugMacro("Set Values of Gauss Curvature: Done");
     /*******************************************************/
     if (facet) facet->Delete();
-    if (K)              delete [] K;
-    if (dA)             delete [] dA;
+    delete [] K;
+    delete [] dA;
     if (gaussCurvature) gaussCurvature->Delete();
     /*******************************************************/
 };
diff --git a/Filters/General/vtkDicer.h b/Filters/General/vtkDicer.h
index 59783ba..ad6e76e 100644
--- a/Filters/General/vtkDicer.h
+++ b/Filters/General/vtkDicer.h
@@ -109,7 +109,7 @@ public:
 
 protected:
   vtkDicer();
-  ~vtkDicer() {};
+  ~vtkDicer() {}
 
   virtual void UpdatePieceMeasures(vtkDataSet *input);
 
diff --git a/Filters/General/vtkHyperStreamline.cxx b/Filters/General/vtkHyperStreamline.cxx
index 1ff328c..c1abd94 100644
--- a/Filters/General/vtkHyperStreamline.cxx
+++ b/Filters/General/vtkHyperStreamline.cxx
@@ -371,7 +371,6 @@ int vtkHyperStreamline::RequestData(
   inScalars = pd->GetScalars();
 
   cellTensors = vtkDataArray::CreateDataArray(inTensors->GetDataType());
-  cellScalars = vtkDataArray::CreateDataArray(inScalars->GetDataType());
   int numComp;
   if (inTensors)
     {
@@ -381,6 +380,7 @@ int vtkHyperStreamline::RequestData(
     }
   if (inScalars)
     {
+    cellScalars = vtkDataArray::CreateDataArray(inScalars->GetDataType());
     numComp = inScalars->GetNumberOfComponents();
     cellScalars->SetNumberOfComponents(numComp);
     cellScalars->SetNumberOfTuples(VTK_CELL_SIZE);
@@ -646,7 +646,10 @@ int vtkHyperStreamline::RequestData(
 
   delete [] w;
   cellTensors->Delete();
-  cellScalars->Delete();
+  if (cellScalars)
+    {
+    cellScalars->Delete();
+    }
 
   return retval;
 }
diff --git a/Filters/General/vtkInterpolatingSubdivisionFilter.h b/Filters/General/vtkInterpolatingSubdivisionFilter.h
index ae6ca44..b956952 100644
--- a/Filters/General/vtkInterpolatingSubdivisionFilter.h
+++ b/Filters/General/vtkInterpolatingSubdivisionFilter.h
@@ -51,7 +51,7 @@ public:
 
 protected:
   vtkInterpolatingSubdivisionFilter();
-  ~vtkInterpolatingSubdivisionFilter() {};
+  ~vtkInterpolatingSubdivisionFilter() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   virtual void GenerateSubdivisionPoints (vtkPolyData *inputDS, vtkIntArray *edgeData, vtkPoints *outputPts, vtkPointData *outputPD) = 0;
diff --git a/Filters/General/vtkLinkEdgels.h b/Filters/General/vtkLinkEdgels.h
index 8da73cd..76144ac 100644
--- a/Filters/General/vtkLinkEdgels.h
+++ b/Filters/General/vtkLinkEdgels.h
@@ -81,7 +81,7 @@ public:
 
 protected:
   vtkLinkEdgels();
-  ~vtkLinkEdgels() {};
+  ~vtkLinkEdgels() {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   virtual int FillInputPortInformation(int port, vtkInformation *info);
diff --git a/Filters/General/vtkMultiBlockMergeFilter.cxx b/Filters/General/vtkMultiBlockMergeFilter.cxx
index 4a3a9a4..59e5484 100644
--- a/Filters/General/vtkMultiBlockMergeFilter.cxx
+++ b/Filters/General/vtkMultiBlockMergeFilter.cxx
@@ -143,7 +143,7 @@ int vtkMultiBlockMergeFilter::Merge(unsigned int numPieces, unsigned int pieceNo
 
   // Current limitation of this filter is that all blocks must either be
   // vtkMultiBlockDataSet or vtkDataSet not a mixture of the two.
-  // a vtkMultiBlockDataSet with all child blocks as vtkDataSet is a mutlipiece
+  // a vtkMultiBlockDataSet with all child blocks as vtkDataSet is a multipiece
   // dataset. This filter merges pieces together.
   int mpInput = this->IsMultiPiece(input);
   int mpOutput = this->IsMultiPiece(output);
diff --git a/Filters/General/vtkNormalizeMatrixVectors.cxx b/Filters/General/vtkNormalizeMatrixVectors.cxx
index b888f67..ec0f310 100644
--- a/Filters/General/vtkNormalizeMatrixVectors.cxx
+++ b/Filters/General/vtkNormalizeMatrixVectors.cxx
@@ -28,6 +28,8 @@
 #include "vtkSmartPointer.h"
 #include "vtkTypedArray.h"
 
+#include <algorithm>
+
 ///////////////////////////////////////////////////////////////////////////////
 // vtkNormalizeMatrixVectors
 
diff --git a/Filters/General/vtkOBBDicer.h b/Filters/General/vtkOBBDicer.h
index 00dd9d0..aa615d8 100644
--- a/Filters/General/vtkOBBDicer.h
+++ b/Filters/General/vtkOBBDicer.h
@@ -47,8 +47,8 @@ public:
   static vtkOBBDicer *New();
 
 protected:
-  vtkOBBDicer() {};
-  ~vtkOBBDicer() {};
+  vtkOBBDicer() {}
+  ~vtkOBBDicer() {}
 
   // Usual data generation method
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
diff --git a/Filters/General/vtkOBBTree.h b/Filters/General/vtkOBBTree.h
index 2f3b659..6082802 100644
--- a/Filters/General/vtkOBBTree.h
+++ b/Filters/General/vtkOBBTree.h
@@ -70,6 +70,10 @@ public:
   vtkIdList *Cells; //list of cells in node
   void DebugPrintTree( int level, double *leaf_vol, int *minCells,
                        int *maxCells );
+
+private:
+  vtkOBBNode(const vtkOBBNode& other); // no copy constructor
+  vtkOBBNode& operator=(const vtkOBBNode& rhs); // no copy assignment
 };
 //ETX
 //
diff --git a/Filters/General/vtkPassArrays.cxx b/Filters/General/vtkPassArrays.cxx
index 31fcd5d..df12863 100644
--- a/Filters/General/vtkPassArrays.cxx
+++ b/Filters/General/vtkPassArrays.cxx
@@ -299,6 +299,24 @@ int vtkPassArrays::ProcessRequest(
 }
 
 //----------------------------------------------------------------------------
+int vtkPassArrays::FillInputPortInformation(int port, vtkInformation* info)
+{
+  if (port == 0)
+    {
+    // Skip composite data sets so that executives will treat this as a simple filter
+    info->Remove(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE());
+    info->Append(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkDataSet");
+    info->Append(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkGenericDataSet");
+    info->Append(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkGraph");
+    info->Append(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkPistonDataObject");
+    info->Append(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkTable");
+    info->Set(vtkAlgorithm::INPUT_IS_OPTIONAL(), 1);
+    }
+
+  return 1;
+}
+
+//----------------------------------------------------------------------------
 int vtkPassArrays::RequestDataObject(
   vtkInformation*,
   vtkInformationVector** inputVector ,
diff --git a/Filters/General/vtkPassArrays.h b/Filters/General/vtkPassArrays.h
index 4abcf7a..61d4911 100644
--- a/Filters/General/vtkPassArrays.h
+++ b/Filters/General/vtkPassArrays.h
@@ -129,6 +129,11 @@ protected:
   ~vtkPassArrays();
 
   // Description:
+  // Override to limit types of supported input types to non-composite
+  // datasets
+  virtual int FillInputPortInformation(int port, vtkInformation* info);
+
+  // Description:
   // Creates the same output type as the input type.
   virtual int RequestDataObject(vtkInformation* request,
                                 vtkInformationVector** inputVector,
diff --git a/Filters/General/vtkQuantizePolyDataPoints.h b/Filters/General/vtkQuantizePolyDataPoints.h
index dfb8880..db4134e 100644
--- a/Filters/General/vtkQuantizePolyDataPoints.h
+++ b/Filters/General/vtkQuantizePolyDataPoints.h
@@ -63,7 +63,7 @@ public:
 
 protected:
   vtkQuantizePolyDataPoints();
-  ~vtkQuantizePolyDataPoints() {};
+  ~vtkQuantizePolyDataPoints() {}
 
   double QFactor;
 private:
diff --git a/Filters/General/vtkRandomAttributeGenerator.cxx b/Filters/General/vtkRandomAttributeGenerator.cxx
index f4aefd7..c053857 100644
--- a/Filters/General/vtkRandomAttributeGenerator.cxx
+++ b/Filters/General/vtkRandomAttributeGenerator.cxx
@@ -14,27 +14,30 @@
 =========================================================================*/
 #include "vtkRandomAttributeGenerator.h"
 
-#include "vtkDataSet.h"
-#include "vtkPointData.h"
+#include "vtkBitArray.h"
 #include "vtkCellData.h"
+#include "vtkCharArray.h"
+#include "vtkCompositeDataSet.h"
+#include "vtkCompositeDataIterator.h"
+#include "vtkDataSet.h"
+#include "vtkDoubleArray.h"
 #include "vtkFieldData.h"
+#include "vtkFloatArray.h"
+#include "vtkIdTypeArray.h"
 #include "vtkInformation.h"
 #include "vtkInformationVector.h"
+#include "vtkIntArray.h"
+#include "vtkLongArray.h"
 #include "vtkMath.h"
+#include "vtkNew.h"
 #include "vtkObjectFactory.h"
-
-#include "vtkBitArray.h"
-#include "vtkCharArray.h"
+#include "vtkPointData.h"
 #include "vtkUnsignedCharArray.h"
-#include "vtkShortArray.h"
-#include "vtkUnsignedShortArray.h"
-#include "vtkIntArray.h"
 #include "vtkUnsignedIntArray.h"
-#include "vtkLongArray.h"
 #include "vtkUnsignedLongArray.h"
-#include "vtkFloatArray.h"
-#include "vtkDoubleArray.h"
-#include "vtkIdTypeArray.h"
+#include "vtkUnsignedShortArray.h"
+#include "vtkShortArray.h"
+#include "vtkSmartPointer.h"
 
 vtkStandardNewMacro(vtkRandomAttributeGenerator);
 
@@ -62,41 +65,101 @@ vtkRandomAttributeGenerator::vtkRandomAttributeGenerator()
   this->GenerateCellArray = 0;
 
   this->GenerateFieldArray = 0;
+  this->AttributesConstantPerBlock = false;
+}
+
+// ----------------------------------------------------------------------------
+template <class T>
+void GenerateRandomTuple (T *data,
+                          vtkIdType i,
+                          int numComp,
+                          int minComp,
+                          int maxComp,
+                          double min,
+                          double max)
+{
+  for ( int comp=minComp; comp <= maxComp; comp++ )
+    {
+    // Now generate a random component value
+    data[i*numComp + comp] = static_cast<T>(vtkMath::Random(min,max));
+    }
+}
+
+// ----------------------------------------------------------------------------
+void GenerateRandomTupleBit (vtkDataArray* data,
+                             vtkIdType i,
+                             int minComp,
+                             int maxComp)
+{
+  for ( int comp=minComp; comp <= maxComp; comp++ )
+    {
+    // Now generate a random component value
+    data->SetComponent(i,comp,
+                       vtkMath::Random(0.0,1.0)<0.5?0:1);
+    }
+}
+
+
+// ----------------------------------------------------------------------------
+template <class T>
+void CopyTupleFrom0 (T *data,
+                     vtkIdType i,
+                     int numComp,
+                     int minComp,
+                     int maxComp)
+{
+  memcpy(data + i * numComp + minComp, data + minComp,
+         (maxComp - minComp + 1) * sizeof (T));
+}
+// ----------------------------------------------------------------------------
+void CopyTupleFrom0Bit (vtkDataArray* data,
+                        vtkIdType i,
+                        int minComp,
+                        int maxComp)
+{
+  for ( int comp=minComp; comp <= maxComp; comp++ )
+    {
+    data->SetComponent(i, comp, data->GetComponent(0, comp));
+    }
 }
 
 // ----------------------------------------------------------------------------
 // This function template creates random attributes within a given range. It is
 // assumed that the input data array may have a variable number of components.
 template <class T>
-void vtkRandomAttributeGeneratorExecute(vtkRandomAttributeGenerator *self,
-                                        T *data,
-                                        vtkIdType numTuples,
-                                        int numComp,
-                                        int minComp,
-                                        int maxComp,
-                                        double min,
-                                        double max)
+void vtkRandomAttributeGenerator::GenerateRandomTuples(T *data,
+                                                       vtkIdType numTuples,
+                                                       int numComp,
+                                                       int minComp,
+                                                       int maxComp,
+                                                       double min,
+                                                       double max)
 {
+  if (numTuples == 0)
+    return;
   vtkIdType total = numComp * numTuples;
   vtkIdType tenth = total/10 + 1;
-  for ( vtkIdType i=0; i < numTuples; i++ )
+  GenerateRandomTuple(data, 0, numComp, minComp, maxComp, min, max);
+  for ( vtkIdType i=1; i < numTuples; i++ )
     {
-    for ( int comp=minComp; comp <= maxComp; comp++ )
+    // update progess and check for aborts
+    if ( ! (i % tenth) )
       {
-      // update progess and check for aborts
-      if ( ! (i % tenth) )
+      this->UpdateProgress(static_cast<double>(i)/total);
+      if ( this->GetAbortExecute() )
         {
-        self->UpdateProgress(static_cast<double>(i)/total);
-        if ( self->GetAbortExecute() )
-          {
-          break;
-          }
+        break;
         }
-
-      // Now generate a random component value
-      data[i*numComp + comp] = static_cast<T>(vtkMath::Random(min,max));
-      }//for each component
-    }//for each tuple
+      }
+    if (this->AttributesConstantPerBlock)
+      {
+      CopyTupleFrom0 (data, i, numComp, minComp, maxComp);
+      }
+    else
+      {
+      GenerateRandomTuple (data, i, numComp, minComp, maxComp, min, max);
+      }
+    }
 }
 
 // ----------------------------------------------------------------------------
@@ -119,8 +182,8 @@ vtkDataArray *vtkRandomAttributeGenerator::GenerateData(int dataType,
       dataArray->SetNumberOfComponents(numComp);
       dataArray->SetNumberOfTuples(numTuples);
       char *data = static_cast<vtkCharArray*>(dataArray)->GetPointer(0);
-      vtkRandomAttributeGeneratorExecute(this,data,numTuples,numComp,
-                                         minComp,maxComp,min,max);
+      this->GenerateRandomTuples(data,numTuples,numComp,
+                                 minComp,maxComp,min,max);
       }
       break;
     case VTK_UNSIGNED_CHAR:
@@ -130,8 +193,8 @@ vtkDataArray *vtkRandomAttributeGenerator::GenerateData(int dataType,
       dataArray->SetNumberOfTuples(numTuples);
       unsigned char *data=
         static_cast<vtkUnsignedCharArray*>(dataArray)->GetPointer(0);
-      vtkRandomAttributeGeneratorExecute(this, data,numTuples,numComp,
-                                         minComp,maxComp,min,max);
+      this->GenerateRandomTuples(data,numTuples,numComp,
+                                 minComp,maxComp,min,max);
       }
       break;
     case VTK_SHORT:
@@ -140,8 +203,8 @@ vtkDataArray *vtkRandomAttributeGenerator::GenerateData(int dataType,
       dataArray->SetNumberOfComponents(numComp);
       dataArray->SetNumberOfTuples(numTuples);
       short *data = static_cast<vtkShortArray*>(dataArray)->GetPointer(0);
-      vtkRandomAttributeGeneratorExecute(this, data,numTuples,numComp,
-                                         minComp,maxComp,min,max);
+      this->GenerateRandomTuples(data,numTuples,numComp,
+                                 minComp,maxComp,min,max);
       }
       break;
     case VTK_UNSIGNED_SHORT:
@@ -151,8 +214,8 @@ vtkDataArray *vtkRandomAttributeGenerator::GenerateData(int dataType,
       dataArray->SetNumberOfTuples(numTuples);
       unsigned short *data=
         static_cast<vtkUnsignedShortArray*>(dataArray)->GetPointer(0);
-      vtkRandomAttributeGeneratorExecute(this, data,numTuples,numComp,
-                                         minComp,maxComp,min,max);
+      this->GenerateRandomTuples(data,numTuples,numComp,
+                                 minComp,maxComp,min,max);
       }
       break;
     case VTK_INT:
@@ -161,8 +224,8 @@ vtkDataArray *vtkRandomAttributeGenerator::GenerateData(int dataType,
       dataArray->SetNumberOfComponents(numComp);
       dataArray->SetNumberOfTuples(numTuples);
       int *data = static_cast<vtkIntArray*>(dataArray)->GetPointer(0);
-      vtkRandomAttributeGeneratorExecute(this, data,numTuples,numComp,
-                                         minComp,maxComp,min,max);
+      this->GenerateRandomTuples(data,numTuples,numComp,
+                                 minComp,maxComp,min,max);
       }
       break;
     case VTK_UNSIGNED_INT:
@@ -172,8 +235,8 @@ vtkDataArray *vtkRandomAttributeGenerator::GenerateData(int dataType,
       dataArray->SetNumberOfTuples(numTuples);
       unsigned int *data =
         static_cast<vtkUnsignedIntArray*>(dataArray)->GetPointer(0);
-      vtkRandomAttributeGeneratorExecute(this, data,numTuples,numComp,
-                                         minComp,maxComp,min,max);
+      this->GenerateRandomTuples(data,numTuples,numComp,
+                                 minComp,maxComp,min,max);
       }
       break;
     case VTK_LONG:
@@ -182,8 +245,8 @@ vtkDataArray *vtkRandomAttributeGenerator::GenerateData(int dataType,
       dataArray->SetNumberOfComponents(numComp);
       dataArray->SetNumberOfTuples(numTuples);
       long *data = static_cast<vtkLongArray*>(dataArray)->GetPointer(0);
-      vtkRandomAttributeGeneratorExecute(this, data,numTuples,numComp,
-                                         minComp,maxComp,min,max);
+      this->GenerateRandomTuples(data,numTuples,numComp,
+                                 minComp,maxComp,min,max);
       }
       break;
     case VTK_UNSIGNED_LONG:
@@ -193,8 +256,8 @@ vtkDataArray *vtkRandomAttributeGenerator::GenerateData(int dataType,
       dataArray->SetNumberOfTuples(numTuples);
       unsigned long *data =
         static_cast<vtkUnsignedLongArray*>(dataArray)->GetPointer(0);
-      vtkRandomAttributeGeneratorExecute(this, data,numTuples,numComp,
-                                         minComp,maxComp,min,max);
+      this->GenerateRandomTuples(data,numTuples,numComp,
+                                 minComp,maxComp,min,max);
       }
       break;
     case VTK_FLOAT:
@@ -203,8 +266,8 @@ vtkDataArray *vtkRandomAttributeGenerator::GenerateData(int dataType,
       dataArray->SetNumberOfComponents(numComp);
       dataArray->SetNumberOfTuples(numTuples);
       float *data = static_cast<vtkFloatArray*>(dataArray)->GetPointer(0);
-      vtkRandomAttributeGeneratorExecute(this, data,numTuples,numComp,
-                                         minComp,maxComp,min,max);
+      this->GenerateRandomTuples(data,numTuples,numComp,
+                                 minComp,maxComp,min,max);
       }
       break;
     case VTK_DOUBLE:
@@ -213,8 +276,8 @@ vtkDataArray *vtkRandomAttributeGenerator::GenerateData(int dataType,
       dataArray->SetNumberOfComponents(numComp);
       dataArray->SetNumberOfTuples(numTuples);
       double *data = static_cast<vtkDoubleArray*>(dataArray)->GetPointer(0);
-      vtkRandomAttributeGeneratorExecute(this, data,numTuples,numComp,
-                                         minComp,maxComp,min,max);
+      this->GenerateRandomTuples(data,numTuples,numComp,
+                                 minComp,maxComp,min,max);
       }
       break;
     case VTK_ID_TYPE:
@@ -223,8 +286,8 @@ vtkDataArray *vtkRandomAttributeGenerator::GenerateData(int dataType,
       dataArray->SetNumberOfComponents(numComp);
       dataArray->SetNumberOfTuples(numTuples);
       vtkIdType *data = static_cast<vtkIdTypeArray*>(dataArray)->GetPointer(0);
-      vtkRandomAttributeGeneratorExecute(this, data,numTuples,numComp,
-                                         minComp,maxComp,min,max);
+      this->GenerateRandomTuples(data,numTuples,numComp,
+                                 minComp,maxComp,min,max);
       }
       break;
     case VTK_BIT: //we'll do something special for bit arrays
@@ -234,25 +297,29 @@ vtkDataArray *vtkRandomAttributeGenerator::GenerateData(int dataType,
       dataArray = vtkBitArray::New();
       dataArray->SetNumberOfComponents(numComp);
       dataArray->SetNumberOfTuples(numTuples);
-      for ( vtkIdType i=0; i < numTuples; i++ )
+      if (numTuples == 0)
+        break;
+      GenerateRandomTupleBit (dataArray, 0, minComp, maxComp);
+      for ( vtkIdType i=1; i < numTuples; i++ )
         {
-        for ( int comp=minComp; comp <= maxComp; comp++ )
+        // update progess and check for aborts
+        if ( ! (i % tenth) )
           {
-          // update progess and check for aborts
-          if ( ! (i % tenth) )
+          this->UpdateProgress(static_cast<double>(i)/total);
+          if ( this->GetAbortExecute() )
             {
-            this->UpdateProgress(static_cast<double>(i)/total);
-            if ( this->GetAbortExecute() )
-              {
-              break;
-              }
+            break;
             }
-
-          // Now generate a random component value
-          dataArray->SetComponent(i,comp,
-                                  vtkMath::Random(0.0,1.0)<0.5?0:1);
-          }//for each component
-        }//for each tuple
+          }
+        if (this->AttributesConstantPerBlock)
+          {
+          CopyTupleFrom0Bit (dataArray, i, minComp, maxComp);
+          }
+        else
+          {
+          GenerateRandomTupleBit (dataArray, i, minComp, maxComp);
+          }
+        }
       }
       break;
 
@@ -265,23 +332,34 @@ vtkDataArray *vtkRandomAttributeGenerator::GenerateData(int dataType,
 
 // ----------------------------------------------------------------------------
 int vtkRandomAttributeGenerator::RequestData(
-  vtkInformation *vtkNotUsed(request),
-  vtkInformationVector **inputVector,
-  vtkInformationVector *outputVector)
+  vtkCompositeDataSet *input,
+  vtkCompositeDataSet *output)
 {
-  // get the info objects
-  vtkInformation *inInfo = inputVector[0]->GetInformationObject(0);
-  vtkInformation *outInfo = outputVector->GetInformationObject(0);
+  if (input == 0 || output == 0)
+    {
+    return 0;
+    }
+  output->CopyStructure(input);
 
-  // get the input and output
-  vtkDataSet *input = vtkDataSet::SafeDownCast(
-    inInfo->Get(vtkDataObject::DATA_OBJECT()));
-  vtkDataSet *output = vtkDataSet::SafeDownCast(
-    outInfo->Get(vtkDataObject::DATA_OBJECT()));
+  vtkSmartPointer<vtkCompositeDataIterator> it;
+  it.TakeReference (input->NewIterator());
+  for (it->InitTraversal(); !it->IsDoneWithTraversal(); it->GoToNextItem())
+    {
+    vtkDataSet* inputDS = vtkDataSet::SafeDownCast(it->GetCurrentDataObject());
+    vtkSmartPointer<vtkDataSet> outputDS;
+    outputDS.TakeReference(inputDS->NewInstance ());
+    output->SetDataSet (it, outputDS);
+    RequestData (inputDS, outputDS);
+    }
+  return 1;
+}
 
-  // First, copy the input to the output as a starting point
-  output->CopyStructure( input );
 
+// ----------------------------------------------------------------------------
+int vtkRandomAttributeGenerator::RequestData(
+  vtkDataSet *input,
+  vtkDataSet *output)
+{
   vtkDebugMacro(<< "Producing random attributes");
   vtkIdType numPts = input->GetNumberOfPoints();
   vtkIdType numCells = input->GetNumberOfCells();
@@ -305,6 +383,7 @@ int vtkRandomAttributeGenerator::RequestData(
                                                  this->NumberOfComponents-1,
                                                  this->MinimumComponentValue,
                                                  this->MaximumComponentValue);
+    ptScalars->SetName("RandomPointScalars");
     output->GetPointData()->SetScalars(ptScalars);
     ptScalars->Delete();
     }
@@ -313,6 +392,7 @@ int vtkRandomAttributeGenerator::RequestData(
     vtkDataArray *ptVectors = this->GenerateData(this->DataType,numPts,3,0,2,
                                                  this->MinimumComponentValue,
                                                  this->MaximumComponentValue);
+    ptVectors->SetName("RandomPointVectors");
     output->GetPointData()->SetVectors(ptVectors);
     ptVectors->Delete();
     }
@@ -337,6 +417,7 @@ int vtkRandomAttributeGenerator::RequestData(
     vtkDataArray *ptTensors = this->GenerateData(this->DataType,numPts,9,0,5,
                                                  this->MinimumComponentValue,
                                                  this->MaximumComponentValue);
+    ptTensors->SetName("RandomPointTensors");
     double t[9];
     for ( vtkIdType id=0; id < numPts; id++ )
       {
@@ -368,10 +449,19 @@ int vtkRandomAttributeGenerator::RequestData(
                                                  this->NumberOfComponents-1,
                                                  this->MinimumComponentValue,
                                                  this->MaximumComponentValue);
+    ptScalars->SetName("RandomPointArray");
     output->GetPointData()->SetScalars(ptScalars);
     ptScalars->Delete();
     }
 
+
+  if ( numCells < 1 )
+    {
+    vtkDebugMacro(<< "No input!");
+    return 1;
+    }
+
+
   // Now the cell data
   if ( this->GenerateCellScalars)
     {
@@ -380,6 +470,7 @@ int vtkRandomAttributeGenerator::RequestData(
                                                  this->NumberOfComponents-1,
                                                  this->MinimumComponentValue,
                                                  this->MaximumComponentValue);
+    ptScalars->SetName("RandomCellScalars");
     output->GetCellData()->SetScalars(ptScalars);
     ptScalars->Delete();
     }
@@ -388,6 +479,7 @@ int vtkRandomAttributeGenerator::RequestData(
     vtkDataArray *ptVectors = this->GenerateData(this->DataType,numCells,3,0,2,
                                                  this->MinimumComponentValue,
                                                  this->MaximumComponentValue);
+    ptVectors->SetName("RandomCellVectors");
     output->GetCellData()->SetVectors(ptVectors);
     ptVectors->Delete();
     }
@@ -411,6 +503,7 @@ int vtkRandomAttributeGenerator::RequestData(
     vtkDataArray *ptTensors = this->GenerateData(this->DataType,numCells,9,0,5,
                                                  this->MinimumComponentValue,
                                                  this->MaximumComponentValue);
+    ptTensors->SetName("RandomCellTensors");
     double t[9];
     for ( vtkIdType id=0; id < numCells; id++ )
       {
@@ -442,6 +535,7 @@ int vtkRandomAttributeGenerator::RequestData(
                                                  this->NumberOfComponents-1,
                                                  this->MinimumComponentValue,
                                                  this->MaximumComponentValue);
+    ptScalars->SetName("RandomCellArray");
     output->GetCellData()->SetScalars(ptScalars);
     ptScalars->Delete();
     }
@@ -455,14 +549,41 @@ int vtkRandomAttributeGenerator::RequestData(
                                             this->NumberOfComponents-1,
                                             this->MinimumComponentValue,
                                             this->MaximumComponentValue);
+    data->SetName("RandomFieldArray");
     output->GetFieldData()->AddArray(data);
     data->Delete();
     }
-
   return 1;
 }
 
 // ----------------------------------------------------------------------------
+int vtkRandomAttributeGenerator::RequestData(
+  vtkInformation *vtkNotUsed(request),
+  vtkInformationVector **inputVector,
+  vtkInformationVector *outputVector)
+{
+  // get the info objects
+  vtkInformation *inInfo = inputVector[0]->GetInformationObject(0);
+  vtkInformation *outInfo = outputVector->GetInformationObject(0);
+
+
+  // get the input and output
+  vtkDataObject *input = inInfo->Get(vtkDataObject::DATA_OBJECT());
+  vtkDataObject *output = outInfo->Get(vtkDataObject::DATA_OBJECT());
+
+  if (input->IsA ("vtkDataSet"))
+    {
+    return this->RequestData (vtkDataSet::SafeDownCast (input),
+                              vtkDataSet::SafeDownCast (output));
+    }
+  else
+    {
+    return this->RequestData (vtkCompositeDataSet::SafeDownCast (input),
+                              vtkCompositeDataSet::SafeDownCast (output));
+    }
+}
+
+// ----------------------------------------------------------------------------
 void vtkRandomAttributeGenerator::PrintSelf(ostream& os, vtkIndent indent)
 {
   this->Superclass::PrintSelf(os,indent);
@@ -504,3 +625,11 @@ void vtkRandomAttributeGenerator::PrintSelf(ostream& os, vtkIndent indent)
   os << indent << "Generate Field Array: "
      << (this->GenerateFieldArray ? "On\n" : "Off\n");
 }
+
+int vtkRandomAttributeGenerator::FillInputPortInformation(
+  int vtkNotUsed(port), vtkInformation* info)
+{
+  info->Set(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkDataSet");
+  info->Append(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkCompositeDataSet");
+  return 1;
+}
diff --git a/Filters/General/vtkRandomAttributeGenerator.h b/Filters/General/vtkRandomAttributeGenerator.h
index 34ba247..6b517e7 100644
--- a/Filters/General/vtkRandomAttributeGenerator.h
+++ b/Filters/General/vtkRandomAttributeGenerator.h
@@ -39,20 +39,27 @@
 // Newly created attributes may replace attribute data that would have
 // otherwise been passed through.
 
+// .SECTION See also
+// vtkBrownianPoints
+
+
 #ifndef __vtkRandomAttributeGenerator_h
 #define __vtkRandomAttributeGenerator_h
 
 #include "vtkFiltersGeneralModule.h" // For export macro
-#include "vtkDataSetAlgorithm.h"
+#include "vtkPassInputTypeAlgorithm.h"
+
+class vtkDataSet;
+class vtkCompositeDataSet;
 
-class VTKFILTERSGENERAL_EXPORT vtkRandomAttributeGenerator : public vtkDataSetAlgorithm
+class VTKFILTERSGENERAL_EXPORT vtkRandomAttributeGenerator : public vtkPassInputTypeAlgorithm
 {
 public:
   // Description:
   // Create instance with minimum speed 0.0, maximum speed 1.0.
   static vtkRandomAttributeGenerator *New();
 
-  vtkTypeMacro(vtkRandomAttributeGenerator,vtkDataSetAlgorithm);
+  vtkTypeMacro(vtkRandomAttributeGenerator,vtkPassInputTypeAlgorithm);
   void PrintSelf(ostream& os, vtkIndent indent);
 
   // Description:
@@ -86,6 +93,11 @@ public:
   // observed.
   vtkSetMacro(MinimumComponentValue,double);
   vtkGetMacro(MinimumComponentValue,double);
+  void SetComponentRange (double minimumValue, double maximumValue)
+    {
+    this->SetMinimumComponentValue (minimumValue);
+    this->SetMaximumComponentValue (maximumValue);
+    }
 
   // Description:
   // Set the maximum component value. This applies to all data that is generated,
@@ -195,6 +207,15 @@ public:
   vtkBooleanMacro(GenerateFieldArray,int);
 
   // Description:
+  // Indicate that the generated attributes are
+  // constant within a block. This can be used to highlight
+  // blocks in a composite dataset.
+  vtkSetMacro(AttributesConstantPerBlock,bool);
+  vtkGetMacro(AttributesConstantPerBlock,bool);
+  vtkBooleanMacro(AttributesConstantPerBlock,bool);
+
+
+  // Description:
   // Convenience methods for generating data: all data, all point data, or all cell data.
   // For example, if all data is enabled, then all point, cell and field data is generated.
   // If all point data is enabled, then point scalars, vectors, normals, tensors, tcoords,
@@ -252,7 +273,9 @@ protected:
   vtkRandomAttributeGenerator();
   ~vtkRandomAttributeGenerator() {}
 
-  virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
+  virtual int RequestData(vtkInformation *, vtkInformationVector **,
+                          vtkInformationVector *);
+  virtual int FillInputPortInformation(int port, vtkInformation* info);
 
   int       DataType;
   int       NumberOfComponents;
@@ -275,10 +298,22 @@ protected:
   int GenerateCellArray;
 
   int GenerateFieldArray;
+  bool AttributesConstantPerBlock;
 
   // Helper functions
   vtkDataArray *GenerateData(int dataType, vtkIdType numTuples, int numComp,
                              int minComp, int maxComp, double min, double max);
+  int RequestData(vtkDataSet *input, vtkDataSet *output);
+  int RequestData(vtkCompositeDataSet *input, vtkCompositeDataSet *output);
+  template <class T>
+    void GenerateRandomTuples(T *data,
+                              vtkIdType numTuples,
+                              int numComp,
+                              int minComp,
+                              int maxComp,
+                              double min,
+                              double max);
+
 
 private:
   vtkRandomAttributeGenerator(const vtkRandomAttributeGenerator&);  // Not implemented.
diff --git a/Filters/General/vtkRectilinearGridClip.h b/Filters/General/vtkRectilinearGridClip.h
index 52815c2..60e3498 100644
--- a/Filters/General/vtkRectilinearGridClip.h
+++ b/Filters/General/vtkRectilinearGridClip.h
@@ -61,7 +61,7 @@ public:
 
 protected:
   vtkRectilinearGridClip();
-  ~vtkRectilinearGridClip() {};
+  ~vtkRectilinearGridClip() {}
 
   // Time when OutputImageExtent was computed.
   vtkTimeStamp CTime;
diff --git a/Filters/General/vtkRectilinearGridToTetrahedra.h b/Filters/General/vtkRectilinearGridToTetrahedra.h
index ee820d4..591cc15 100644
--- a/Filters/General/vtkRectilinearGridToTetrahedra.h
+++ b/Filters/General/vtkRectilinearGridToTetrahedra.h
@@ -104,7 +104,7 @@ public:
 
 protected:
   vtkRectilinearGridToTetrahedra();
-  ~vtkRectilinearGridToTetrahedra() {};
+  ~vtkRectilinearGridToTetrahedra() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
diff --git a/Filters/General/vtkSCurveSpline.cxx b/Filters/General/vtkSCurveSpline.cxx
deleted file mode 100644
index af1486a..0000000
--- a/Filters/General/vtkSCurveSpline.cxx
+++ /dev/null
@@ -1,258 +0,0 @@
-/*=========================================================================
-
-Program:   Visualization Toolkit
-Module:    vtkSCurveSpline.cxx
-
-Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-All rights reserved.
-See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-This software is distributed WITHOUT ANY WARRANTY; without even
-the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*-------------------------------------------------------------------------
-  Copyright 2009 Sandia Corporation.
-  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
-  the U.S. Government retains certain rights in this software.
-  -------------------------------------------------------------------------*/
-#include "vtkSCurveSpline.h"
-
-#include "vtkObjectFactory.h"
-#include "vtkPiecewiseFunction.h"
-#include <assert.h>
-#include <algorithm> // for std::min()/std::max()
-
-vtkStandardNewMacro(vtkSCurveSpline);
-
-//----------------------------------------------------------------------------
-// Construct a SCurve Spline.
-vtkSCurveSpline::vtkSCurveSpline()
-{
-  this->NodeWeight = 0.0;
-}
-
-//----------------------------------------------------------------------------
-// Evaluate a 1D Spline
-double vtkSCurveSpline::Evaluate (double t)
-{
-  int index;
-  double *intervals;
-  double *coefficients;
-
-  // check to see if we need to recompute the spline
-  if (this->ComputeTime < this->GetMTime())
-    {
-    this->Compute ();
-    }
-
-  // make sure we have at least 2 points
-  int size = this->PiecewiseFunction->GetSize();
-
-  if (size < 2)
-    {
-    return 0.0;
-    }
-
-  intervals = this->Intervals;
-  coefficients = this->Coefficients;
-
-  if ( this->Closed )
-    {
-    size = size + 1;
-    }
-
-  // clamp the function at both ends
-  if (t < intervals[0])
-    {
-    t = intervals[0];
-    }
-  if (t > intervals[size - 1])
-    {
-    t = intervals[size - 1];
-    }
-
-  // find pointer to cubic spline coefficient using bisection method
-  index = this->FindIndex(size,t);
-
-  // calculate offset within interval
-  t = (t - intervals[index]);
-
-  // normalize to unit width
-  t /= intervals[index+1] - intervals[index];
-
-  // apply weighting function
-  if (this->NodeWeight > 0.0)
-    {
-    double shift = t * (t * (t * (-4*this->NodeWeight)
-                             + (6*this->NodeWeight)))
-      - this->NodeWeight;
-    // clamp t
-    t = std::max(std::min(t+shift,1.0),0.0);
-    }
-
-  // evaluate intervals value y
-  return (t * (t * (t * *(coefficients + index * 3 + 2) // a
-                    + *(coefficients + index * 3 + 1))) // b
-          + *(coefficients + index * 3)); // d
-}
-
-//----------------------------------------------------------------------------
-// Compute SCurve Splines for each dependent variable
-void vtkSCurveSpline::Compute ()
-{
-  double *ts, *xs;
-  //  double *work;
-  double *coefficients;
-  double *dependent;
-  int size;
-  int i;
-
-  // Make sure the function is up to date.
-  //this->PiecewiseFunction->Update();
-
-  // get the size of the independent variables
-  size = this->PiecewiseFunction->GetSize ();
-
-  if(size < 2)
-    {
-    vtkErrorMacro("Cannot compute a spline with less than 2 points. # of points is: " << size);
-    return;
-    }
-
-  // copy the independent variables. Note that if the spline
-  // is closed the first and last point are assumed repeated -
-  // so we add and extra point
-  if (this->Intervals)
-    {
-    delete [] this->Intervals;
-    }
-
-  if ( !this->Closed )
-    {
-    this->Intervals = new double[size];
-    ts = this->PiecewiseFunction->GetDataPointer ();
-    for (i = 0; i < size; i++)
-      {
-      this->Intervals[i] = *(ts + 2*i);
-      }
-
-    // allocate memory for work arrays
-    //    work = new double[size];
-
-    // allocate memory for coefficients
-    if (this->Coefficients)
-      {
-      delete [] this->Coefficients;
-      }
-    this->Coefficients = new double [3*size];
-
-    // allocate memory for dependent variables
-    dependent = new double [size];
-
-    // get start of coefficients for this dependent variable
-    coefficients = this->Coefficients;
-
-    // get the dependent variable values
-    xs = this->PiecewiseFunction->GetDataPointer () + 1;
-    for (int j = 0; j < size; j++)
-      {
-      *(dependent + j) = *(xs + 2*j);
-      }
-
-    for (int k = 0; k < size-1; k++)
-      {
-      *(coefficients + 3*k) = dependent[k]; // d
-      *(coefficients + 3*k+1) = 3*(dependent[k+1]-dependent[k]); // b
-      *(coefficients + 3*k+2) = -2*(dependent[k+1]-dependent[k]); // a
-      }
-    *(coefficients + 3*(size-1)) = dependent[size-1];
-    *(coefficients + 3*(size-1)+1) = dependent[size-1];
-    *(coefficients + 3*(size-1)+2) = dependent[size-1];
-    }
-
-  else //add extra "fictitious" point to close loop
-    {
-    size = size + 1;
-    this->Intervals = new double[size];
-    ts = this->PiecewiseFunction->GetDataPointer ();
-    for (i = 0; i < size-1; i++)
-      {
-      this->Intervals[i] = *(ts + 2*i);
-      }
-    if ( this->ParametricRange[0] != this->ParametricRange[1] )
-      {
-      this->Intervals[size-1] = this->ParametricRange[1];
-      }
-    else
-      {
-      this->Intervals[size-1] = this->Intervals[size-2] + 1.0;
-      }
-
-    // allocate memory for work arrays
-    //    work = new double[size];
-
-    // allocate memory for coefficients
-    if (this->Coefficients)
-      {
-      delete [] this->Coefficients;
-      }
-    //this->Coefficients = new double [4*size];
-    this->Coefficients = new double [3*size];
-
-    // allocate memory for dependent variables
-    dependent = new double [size];
-
-    // get start of coefficients for this dependent variable
-    coefficients = this->Coefficients;
-
-    // get the dependent variable values
-    xs = this->PiecewiseFunction->GetDataPointer () + 1;
-    for (int j = 0; j < size-1; j++)
-      {
-      *(dependent + j) = *(xs + 2*j);
-      }
-    dependent[size-1] = *xs;
-
-    for (int k = 0; k < size-1; k++)
-      {
-      *(coefficients + 3*k) = dependent[k]; // d
-      *(coefficients + 3*k+1) = 3*(dependent[k+1]-dependent[k]); // b
-      *(coefficients + 3*k+2) = -2*(dependent[k+1]-dependent[k]); // a
-      }
-    *(coefficients + 3*(size-1)) = dependent[size-1];
-    *(coefficients + 3*(size-1)+1) = dependent[size-1];
-    *(coefficients + 3*(size-1)+2) = dependent[size-1];
-    }
-
-  // free the work array and dependent variable storage
-  //delete [] work;
-  delete [] dependent;
-
-  // update compute time
-  this->ComputeTime = this->GetMTime();
-}
-
-//----------------------------------------------------------------------------
-void vtkSCurveSpline::DeepCopy(vtkSpline *s)
-{
-  vtkSCurveSpline *spline = vtkSCurveSpline::SafeDownCast(s);
-
-  if ( spline != NULL )
-    {
-    //nothing to do
-    }
-
-  // Now do superclass
-  this->vtkSpline::DeepCopy(s);
-}
-
-//----------------------------------------------------------------------------
-void vtkSCurveSpline::PrintSelf(ostream& os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os,indent);
-  os << "NodeWeight: " << this->NodeWeight << endl;
-}
-
-
diff --git a/Filters/General/vtkSCurveSpline.h b/Filters/General/vtkSCurveSpline.h
deleted file mode 100644
index 5d87451..0000000
--- a/Filters/General/vtkSCurveSpline.h
+++ /dev/null
@@ -1,69 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkSCurveSpline.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*-------------------------------------------------------------------------
-  Copyright 2009 Sandia Corporation.
-  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
-  the U.S. Government retains certain rights in this software.
-  -------------------------------------------------------------------------*/
-// .NAME vtkSCurveSpline - computes an interpolating spline using a
-// a SCurve basis.
-
-// .SECTION Description
-// vtkSCurveSpline is a concrete implementation of vtkSpline using a
-// SCurve basis.
-
-// .SECTION See Also
-// vtkSpline vtkKochanekSpline
-
-#ifndef __vtkSCurveSpline_h
-#define __vtkSCurveSpline_h
-
-#include "vtkFiltersGeneralModule.h" // For export macro
-#include "vtkSpline.h"
-
-class VTKFILTERSGENERAL_EXPORT vtkSCurveSpline : public vtkSpline
-{
-public:
-  static vtkSCurveSpline *New();
-
-  vtkTypeMacro(vtkSCurveSpline,vtkSpline);
-  void PrintSelf(ostream& os, vtkIndent indent);
-
-  // Description
-  // Compute SCurve Splines for each dependent variable
-  void Compute ();
-
-  // Description:
-  // Evaluate a 1D SCurve spline.
-  virtual double Evaluate (double t);
-
-  // Description:
-  // Deep copy of SCurve spline data.
-  virtual void DeepCopy(vtkSpline *s);
-
-  vtkSetMacro(NodeWeight,double);
-  vtkGetMacro(NodeWeight,double);
-protected:
-  vtkSCurveSpline();
-  ~vtkSCurveSpline() {}
-
-  double NodeWeight;
-
-private:
-  vtkSCurveSpline(const vtkSCurveSpline&);  // Not implemented.
-  void operator=(const vtkSCurveSpline&);  // Not implemented.
-};
-
-#endif
diff --git a/Filters/General/vtkShrinkPolyData.h b/Filters/General/vtkShrinkPolyData.h
index 98c2cc4..0e1ece2 100644
--- a/Filters/General/vtkShrinkPolyData.h
+++ b/Filters/General/vtkShrinkPolyData.h
@@ -56,7 +56,7 @@ public:
 
 protected:
   vtkShrinkPolyData(double sf=0.5);
-  ~vtkShrinkPolyData() {};
+  ~vtkShrinkPolyData() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   double ShrinkFactor;
diff --git a/Filters/General/vtkSplitColumnComponents.cxx b/Filters/General/vtkSplitColumnComponents.cxx
index 32cd24f..0144656 100644
--- a/Filters/General/vtkSplitColumnComponents.cxx
+++ b/Filters/General/vtkSplitColumnComponents.cxx
@@ -12,13 +12,13 @@
      PURPOSE.  See the above copyright notice for more information.
 
 =========================================================================*/
-
 #include "vtkSplitColumnComponents.h"
 
+#include "vtkAbstractArray.h"
 #include "vtkInformation.h"
 #include "vtkInformationVector.h"
 #include "vtkObjectFactory.h"
-#include "vtkAbstractArray.h"
+#include "vtkStdString.h"
 #include "vtkTable.h"
 
 #include "vtksys/ios/sstream"
@@ -31,6 +31,7 @@ vtkSplitColumnComponents::vtkSplitColumnComponents()
   this->SetNumberOfInputPorts(1);
   this->SetNumberOfOutputPorts(1);
   this->CalculateMagnitudes = true;
+  this->NamingMode = NUMBERS_WITH_PARENS;
 }
 
 //---------------------------------------------------------------------------
@@ -91,7 +92,6 @@ int vtkSplitColumnComponents::RequestData(
   for (int i = 0; i < table->GetNumberOfColumns(); ++i)
     {
     vtkAbstractArray* col = table->GetColumn(i);
-    char* name = col->GetName();
     int components = col->GetNumberOfComponents();
     if (components == 1)
       {
@@ -103,10 +103,9 @@ int vtkSplitColumnComponents::RequestData(
       int colSize = col->GetNumberOfTuples();
       for (int j = 0; j < components; ++j)
         {
-        vtksys_ios::ostringstream ostr;
-        ostr << name << " (" << j << ")";
+        vtkStdString component_label = this->GetComponentLabel(col, j);
         vtkAbstractArray* newCol = vtkAbstractArray::CreateArray(col->GetDataType());
-        newCol->SetName(ostr.str().c_str());
+        newCol->SetName(component_label.c_str());
         newCol->SetNumberOfTuples(colSize);
         // Now copy the components into their new columns
         switch(col->GetDataType())
@@ -123,10 +122,10 @@ int vtkSplitColumnComponents::RequestData(
       // Add a magnitude column and calculate values if requested
       if (this->CalculateMagnitudes && col->IsA("vtkDataArray"))
         {
-        vtksys_ios::ostringstream ostr;
-        ostr << name << " (Magnitude)";
+        vtkStdString component_label = this->GetComponentLabel(
+          col, -1 /* for magnitude */);
         vtkAbstractArray* newCol = vtkAbstractArray::CreateArray(col->GetDataType());
-        newCol->SetName(ostr.str().c_str());
+        newCol->SetName(component_label.c_str());
         newCol->SetNumberOfTuples(colSize);
         // Now calculate the magnitude column
         switch(col->GetDataType())
@@ -146,9 +145,114 @@ int vtkSplitColumnComponents::RequestData(
   return 1;
 }
 
+namespace
+{
+  //----------------------------------------------------------------------------
+  std::string vtkDefaultComponentName(int componentNumber, int componentCount)
+    {
+    if (componentCount <= 1)
+      {
+      return "";
+      }
+    else if (componentNumber == -1)
+      {
+      return "Magnitude";
+      }
+    else if (componentCount <= 3 && componentNumber < 3)
+      {
+      const char* titles[] = {"X", "Y", "Z"};
+      return titles[componentNumber];
+      }
+    else if (componentCount == 6)
+      {
+      const char* titles[] = {"XX", "YY", "ZZ", "XY", "YZ", "XZ"};
+      // Assume this is a symmetric matrix.
+      return titles[componentNumber];
+      }
+    else
+      {
+      std::ostringstream buffer;
+      buffer << componentNumber;
+      return buffer.str();
+      }
+    }
+  std::string vtkGetComponentName(vtkAbstractArray* array, int component_no)
+    {
+    const char* name = array->GetComponentName(component_no);
+    if (name)
+      {
+      return name;
+      }
+    return vtkDefaultComponentName(component_no, array->GetNumberOfComponents());
+    }
+};
+
+//---------------------------------------------------------------------------
+vtkStdString vtkSplitColumnComponents::GetComponentLabel(
+  vtkAbstractArray* array, int component_no)
+{
+  std::ostringstream stream;
+  switch (this->NamingMode)
+    {
+  case NUMBERS_WITH_PARENS:
+    stream << array->GetName() << " (";
+    if (component_no == -1)
+      {
+      stream << "Magnitude)";
+      }
+    else
+      {
+      stream << component_no << ")";
+      }
+    break;
+
+  case NUMBERS_WITH_UNDERSCORES:
+    stream << array->GetName() << "_";
+    if (component_no == -1)
+      {
+      stream << "Magnitude";
+      }
+    else
+      {
+      stream << component_no;
+      }
+    break;
+
+  case NAMES_WITH_PARENS:
+    stream << array->GetName() << " ("
+      << vtkGetComponentName(array, component_no).c_str() << ")";
+    break;
+
+  case NAMES_WITH_UNDERSCORES:
+  default:
+    stream << array->GetName() << "_"
+      << vtkGetComponentName(array, component_no).c_str();;
+    break;
+    }
+  return stream.str();
+}
+
 //---------------------------------------------------------------------------
 void vtkSplitColumnComponents::PrintSelf(ostream& os, vtkIndent indent)
 {
   this->Superclass::PrintSelf(os, indent);
   os << indent << "CalculateMagnitudes: " << this->CalculateMagnitudes << endl;
+  os << indent << "NamingMode: ";
+  switch(this->NamingMode)
+    {
+  case NAMES_WITH_UNDERSCORES:
+    os << "NAMES_WITH_UNDERSCORES" << endl;
+    break;
+  case NAMES_WITH_PARENS:
+    os << "NAMES_WITH_PARENS" << endl;
+    break;
+  case NUMBERS_WITH_UNDERSCORES:
+    os << "NUMBERS_WITH_UNDERSCORES" << endl;
+    break;
+  case NUMBERS_WITH_PARENS:
+    os << "NUMBERS_WITH_PARENS" << endl;
+    break;
+  default:
+    os << "INVALID" << endl;
+    }
 }
diff --git a/Filters/General/vtkSplitColumnComponents.h b/Filters/General/vtkSplitColumnComponents.h
index df5dccc..749e5ac 100644
--- a/Filters/General/vtkSplitColumnComponents.h
+++ b/Filters/General/vtkSplitColumnComponents.h
@@ -22,8 +22,14 @@
 // .SECTION Description
 // Splits any columns in a table that have more than one component into
 // individual columns. Single component columns are passed through without
-// any data duplication. So if column names "Points" had three components
-// this column would be split into "Points (0)", "Points (1)" and Points (2)".
+// any data duplication.
+// NamingMode can be used to control how columns with multiple components are
+// labelled in the output, e.g., if column names "Points" had three components
+// this column would be split into "Points (0)", "Points (1)", and Points (2)"
+// when NamingMode is NUMBERS_WITH_PARENS, into Points_0, Points_1, and Points_2
+// when NamingMode is NUMBERS_WITH_UNDERSCORES, into "Points (X)", "Points (Y)",
+// and "Points (Z)" when NamingMode is NAMES_WITH_PARENS, and into Points_X,
+// Points_Y, and Points_Z when NamingMode is NAMES_WITH_UNDERSCORES.
 
 #ifndef __vtkSplitColumnComponents_h
 #define __vtkSplitColumnComponents_h
@@ -31,6 +37,7 @@
 #include "vtkFiltersGeneralModule.h" // For export macro
 #include "vtkTableAlgorithm.h"
 
+class vtkStdString;
 class VTKFILTERSGENERAL_EXPORT vtkSplitColumnComponents : public vtkTableAlgorithm
 {
 public:
@@ -45,10 +52,37 @@ public:
   vtkSetMacro(CalculateMagnitudes, bool);
   vtkGetMacro(CalculateMagnitudes, bool);
 
+  enum
+    {
+    NUMBERS_WITH_PARENS = 0,    // e.g Points (0)
+    NAMES_WITH_PARENS = 1,      // e.g. Points (X)
+    NUMBERS_WITH_UNDERSCORES=2, // e.g. Points_0
+    NAMES_WITH_UNDERSCORES=3    // e.g. Points_X
+    };
+
+  // Description:
+  // Get/Set the array naming mode.
+  // Description is NUMBERS_WITH_PARENS.
+  vtkSetClampMacro(NamingMode, int, NUMBERS_WITH_PARENS, NAMES_WITH_UNDERSCORES);
+  void SetNamingModeToNumberWithParens()
+    { this->SetNamingMode(NUMBERS_WITH_PARENS); }
+  void SetNamingModeToNumberWithUnderscores()
+    { this->SetNamingMode(NUMBERS_WITH_UNDERSCORES); }
+  void SetNamingModeToNamesWithParens()
+    { this->SetNamingMode(NAMES_WITH_PARENS); }
+  void SetNamingModeToNamesWithUnderscores()
+    { this->SetNamingMode(NAMES_WITH_UNDERSCORES); }
+  vtkGetMacro(NamingMode, int);
+
 protected:
   vtkSplitColumnComponents();
   ~vtkSplitColumnComponents();
 
+  // Description:
+  // Returns the label to use for the specific component in the array based on
+  // this->NamingMode. Use component_no==-1 for magnitude.
+  vtkStdString GetComponentLabel(vtkAbstractArray* array, int component_no);
+
   bool CalculateMagnitudes;
 
   int RequestData(
@@ -56,6 +90,7 @@ protected:
     vtkInformationVector**,
     vtkInformationVector*);
 
+  int NamingMode;
 private:
   vtkSplitColumnComponents(const vtkSplitColumnComponents&); // Not implemented
   void operator=(const vtkSplitColumnComponents&);   // Not implemented
diff --git a/Filters/General/vtkStructuredGridClip.h b/Filters/General/vtkStructuredGridClip.h
index 20cf230..a111a9f 100644
--- a/Filters/General/vtkStructuredGridClip.h
+++ b/Filters/General/vtkStructuredGridClip.h
@@ -61,7 +61,7 @@ public:
 
 protected:
   vtkStructuredGridClip();
-  ~vtkStructuredGridClip() {};
+  ~vtkStructuredGridClip() {}
 
   // Time when OutputImageExtent was computed.
   vtkTimeStamp CTime;
diff --git a/Filters/General/vtkTableBasedClipDataSet.cxx b/Filters/General/vtkTableBasedClipDataSet.cxx
index 19eb502..6123f26 100644
--- a/Filters/General/vtkTableBasedClipDataSet.cxx
+++ b/Filters/General/vtkTableBasedClipDataSet.cxx
@@ -1295,6 +1295,10 @@ void vtkTableBasedClipperVolumeFromVolume::
       {
       outPts->SetDataType(inputPointSet->GetPoints()->GetDataType());
       }
+    else
+      {
+      outPts->SetDataType(VTK_FLOAT);
+      }
     }
   else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
     {
@@ -3437,7 +3441,7 @@ void vtkTableBasedClipDataSet::ClipUnstructuredGridData( vtkDataSet * inputGrd,
   inputPts = NULL;
 
 
-  // the stuffs that can not be clipped
+  // the stuff that can not be clipped
   if ( numCants > 0 )
     {
     vtkUnstructuredGrid * vtkUGrid  = vtkUnstructuredGrid::New();
diff --git a/Filters/General/vtkTableToStructuredGrid.cxx b/Filters/General/vtkTableToStructuredGrid.cxx
index 294cea1..3067dac 100644
--- a/Filters/General/vtkTableToStructuredGrid.cxx
+++ b/Filters/General/vtkTableToStructuredGrid.cxx
@@ -18,6 +18,7 @@
 #include "vtkInformation.h"
 #include "vtkInformationVector.h"
 #include "vtkObjectFactory.h"
+#include "vtkOnePieceExtentTranslator.h"
 #include "vtkPointData.h"
 #include "vtkPoints.h"
 #include "vtkStreamingDemandDrivenPipeline.h"
@@ -63,6 +64,20 @@ int vtkTableToStructuredGrid::RequestInformation(
   vtkInformation *outInfo = outputVector->GetInformationObject(0);
   outInfo->Set(vtkStreamingDemandDrivenPipeline::WHOLE_EXTENT(),
                this->WholeExtent, 6);
+
+  // Setup ExtentTranslator so that all downstream piece requests are
+  // converted to whole extent update requests, as the data only exist on process 0.
+  // In parallel, filter expects the data to be available on root node and
+  // therefore produces empty extents on all other nodes.
+  if (strcmp(
+      vtkStreamingDemandDrivenPipeline::GetExtentTranslator(outInfo)->GetClassName(),
+      "vtkOnePieceExtentTranslator") != 0)
+    {
+    vtkExtentTranslator* et = vtkOnePieceExtentTranslator::New();
+    vtkStreamingDemandDrivenPipeline::SetExtentTranslator(outInfo, et);
+    et->Delete();
+    }
+
   return 1;
 }
 
diff --git a/Filters/General/vtkTransformFilter.cxx b/Filters/General/vtkTransformFilter.cxx
index 797fcc9..d16b4be 100644
--- a/Filters/General/vtkTransformFilter.cxx
+++ b/Filters/General/vtkTransformFilter.cxx
@@ -37,6 +37,7 @@ vtkCxxSetObjectMacro(vtkTransformFilter,Transform,vtkAbstractTransform);
 vtkTransformFilter::vtkTransformFilter()
 {
   this->Transform = NULL;
+  this->OutputPointsPrecision = vtkAlgorithm::DEFAULT_PRECISION;
 }
 
 vtkTransformFilter::~vtkTransformFilter()
@@ -159,6 +160,21 @@ int vtkTransformFilter::RequestData(
   numCells = input->GetNumberOfCells();
 
   newPts = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION)
+    {
+    newPts->SetDataType(inPts->GetDataType());
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_FLOAT);
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_DOUBLE);
+    }
+
   newPts->Allocate(numPts);
   if ( inVectors )
     {
@@ -290,4 +306,6 @@ void vtkTransformFilter::PrintSelf(ostream& os, vtkIndent indent)
   this->Superclass::PrintSelf(os,indent);
 
   os << indent << "Transform: " << this->Transform << "\n";
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << "\n";
 }
diff --git a/Filters/General/vtkTransformFilter.h b/Filters/General/vtkTransformFilter.h
index 9b96c69..030ca27 100644
--- a/Filters/General/vtkTransformFilter.h
+++ b/Filters/General/vtkTransformFilter.h
@@ -55,6 +55,13 @@ public:
 
   virtual int FillInputPortInformation(int port, vtkInformation *info);
 
+  // Description:
+  // Set/get the desired precision for the output types. See the documentation
+  // for the vtkAlgorithm::DesiredOutputPrecision enum for an explanation of
+  // the available precision settings.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkTransformFilter();
   ~vtkTransformFilter();
@@ -65,7 +72,9 @@ protected:
   int RequestData(vtkInformation *,
                   vtkInformationVector **,
                   vtkInformationVector *);
+
   vtkAbstractTransform *Transform;
+  int OutputPointsPrecision;
 private:
   vtkTransformFilter(const vtkTransformFilter&);  // Not implemented.
   void operator=(const vtkTransformFilter&);  // Not implemented.
diff --git a/Filters/General/vtkTransformPolyDataFilter.cxx b/Filters/General/vtkTransformPolyDataFilter.cxx
index 7e9e476..2e6bfcd 100644
--- a/Filters/General/vtkTransformPolyDataFilter.cxx
+++ b/Filters/General/vtkTransformPolyDataFilter.cxx
@@ -31,6 +31,7 @@ vtkCxxSetObjectMacro(vtkTransformPolyDataFilter,
 vtkTransformPolyDataFilter::vtkTransformPolyDataFilter()
 {
   this->Transform = NULL;
+  this->OutputPointsPrecision = vtkAlgorithm::DEFAULT_PRECISION;
 }
 
 vtkTransformPolyDataFilter::~vtkTransformPolyDataFilter()
@@ -89,6 +90,21 @@ int vtkTransformPolyDataFilter::RequestData(
   numCells = input->GetNumberOfCells();
 
   newPts = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION)
+    {
+    newPts->SetDataType(inPts->GetDataType());
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_FLOAT);
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_DOUBLE);
+    }
+
   newPts->Allocate(numPts);
   if ( inVectors )
     {
@@ -210,4 +226,6 @@ void vtkTransformPolyDataFilter::PrintSelf(ostream& os, vtkIndent indent)
   this->Superclass::PrintSelf(os,indent);
 
   os << indent << "Transform: " << this->Transform << "\n";
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << "\n";
 }
diff --git a/Filters/General/vtkTransformPolyDataFilter.h b/Filters/General/vtkTransformPolyDataFilter.h
index 1ee676f..c7493ef 100644
--- a/Filters/General/vtkTransformPolyDataFilter.h
+++ b/Filters/General/vtkTransformPolyDataFilter.h
@@ -55,12 +55,21 @@ public:
   virtual void SetTransform(vtkAbstractTransform*);
   vtkGetObjectMacro(Transform,vtkAbstractTransform);
 
+  // Description:
+  // Set/get the desired precision for the output types. See the documentation
+  // for the vtkAlgorithm::DesiredOutputPrecision enum for an explanation of
+  // the available precision settings.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkTransformPolyDataFilter();
   ~vtkTransformPolyDataFilter();
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
+
   vtkAbstractTransform *Transform;
+  int OutputPointsPrecision;
 private:
   vtkTransformPolyDataFilter(const vtkTransformPolyDataFilter&);  // Not implemented.
   void operator=(const vtkTransformPolyDataFilter&);  // Not implemented.
diff --git a/Filters/General/vtkWarpLens.h b/Filters/General/vtkWarpLens.h
index 3076048..08c7ff6 100644
--- a/Filters/General/vtkWarpLens.h
+++ b/Filters/General/vtkWarpLens.h
@@ -79,7 +79,7 @@ public:
 
 protected:
   vtkWarpLens();
-  ~vtkWarpLens() {};
+  ~vtkWarpLens() {}
 
   int RequestDataObject(vtkInformation *request,
                         vtkInformationVector **inputVector,
diff --git a/Filters/General/vtkWarpTo.h b/Filters/General/vtkWarpTo.h
index 25a745a..5f12d98 100644
--- a/Filters/General/vtkWarpTo.h
+++ b/Filters/General/vtkWarpTo.h
@@ -51,7 +51,7 @@ public:
 
 protected:
   vtkWarpTo();
-  ~vtkWarpTo() {};
+  ~vtkWarpTo() {}
 
   int RequestDataObject(vtkInformation *request,
                         vtkInformationVector **inputVector,
diff --git a/Filters/General/vtkWarpVector.cxx b/Filters/General/vtkWarpVector.cxx
index 26071af..e9b5973 100644
--- a/Filters/General/vtkWarpVector.cxx
+++ b/Filters/General/vtkWarpVector.cxx
@@ -15,6 +15,7 @@
 #include "vtkWarpVector.h"
 
 #include "vtkCellData.h"
+#include "vtkDataArrayIteratorMacro.h"
 #include "vtkImageData.h"
 #include "vtkImageDataToPointSet.h"
 #include "vtkInformation.h"
@@ -86,51 +87,47 @@ int vtkWarpVector::RequestDataObject(vtkInformation *request,
 }
 
 //----------------------------------------------------------------------------
-template <class T1, class T2>
-void vtkWarpVectorExecute2(vtkWarpVector *self, T1 *inPts,
-                           T1 *outPts, T2 *inVec, vtkIdType max)
+template <class InputIterator, class OutputType, class VectorIterator>
+void vtkWarpVectorExecute2(vtkWarpVector *self,
+                           InputIterator begin, InputIterator end,
+                           OutputType *outPts, VectorIterator inVec)
 {
-  vtkIdType ptId;
-  T1 scaleFactor = (T1)self->GetScaleFactor();
+  OutputType scaleFactor = static_cast<OutputType>(self->GetScaleFactor());
 
   // Loop over all points, adjusting locations
-  for (ptId=0; ptId < max; ptId++)
+  vtkIdType counter = 0;
+  vtkIdType numPts = static_cast<vtkIdType>(end - begin);
+  while (begin != end)
     {
-    if (!(ptId & 0xfff))
+    if (!(counter & 0xfff))
       {
-      self->UpdateProgress ((double)ptId/(max+1));
+      self->UpdateProgress(static_cast<double>(counter) /
+                           static_cast<double>(numPts+1));
       if (self->GetAbortExecute())
         {
         break;
         }
       }
 
-    *outPts = *inPts + scaleFactor * (T1)(*inVec);
-    outPts++; inPts++; inVec++;
-    *outPts = *inPts + scaleFactor * (T1)(*inVec);
-    outPts++; inPts++; inVec++;
-    *outPts = *inPts + scaleFactor * (T1)(*inVec);
-    outPts++; inPts++; inVec++;
+    *outPts++ = *begin++ + scaleFactor * static_cast<OutputType>(*inVec++);
+    *outPts++ = *begin++ + scaleFactor * static_cast<OutputType>(*inVec++);
+    *outPts++ = *begin++ + scaleFactor * static_cast<OutputType>(*inVec++);
     }
 }
 
 //----------------------------------------------------------------------------
-template <class T>
+template <class InputIterator, class OutputType>
 void vtkWarpVectorExecute(vtkWarpVector *self,
-                          T *inPts,
-                          T *outPts,
-                          vtkIdType max,
+                          InputIterator begin,
+                          InputIterator end,
+                          OutputType *outPts,
                           vtkDataArray *vectors)
 {
-  void *inVec;
-  inVec = vectors->GetVoidPointer(0);
-
   // call templated function
   switch (vectors->GetDataType())
     {
-    vtkTemplateMacro(
-      vtkWarpVectorExecute2(self, inPts, outPts,
-                            (VTK_TT *)(inVec), max));
+    vtkDataArrayIteratorMacro(vectors,
+      vtkWarpVectorExecute2(self, begin, end, outPts, vtkDABegin));
     default:
       break;
     }
@@ -206,15 +203,16 @@ int vtkWarpVector::RequestData(
   output->SetPoints(points);
   points->Delete();
 
-  void *inPtr = input->GetPoints()->GetVoidPointer(0);
+  // We know that this array has a standard memory layout, as we just created
+  // it above.
   void *outPtr = output->GetPoints()->GetVoidPointer(0);
 
   // call templated function
   switch (input->GetPoints()->GetDataType())
     {
-    vtkTemplateMacro(
-      vtkWarpVectorExecute( this, (VTK_TT *)(inPtr),
-                            (VTK_TT *)(outPtr), numPts, vectors) );
+    vtkDataArrayIteratorMacro(input->GetPoints()->GetData(),
+      vtkWarpVectorExecute(this, vtkDABegin, vtkDAEnd,
+                           static_cast<vtkDAValueType*>(outPtr), vectors));
     default:
       break;
     }
diff --git a/Filters/General/vtkYoungsMaterialInterface.cxx b/Filters/General/vtkYoungsMaterialInterface.cxx
index e103f1a..be6139b 100644
--- a/Filters/General/vtkYoungsMaterialInterface.cxx
+++ b/Filters/General/vtkYoungsMaterialInterface.cxx
@@ -55,7 +55,7 @@ PURPOSE.  See the above copyright notice for more information.
 #include <algorithm>
 
 #include <math.h>
-#include <assert.h>
+#include <cassert>
 
 class vtkYoungsMaterialInterfaceCellCut
 {
@@ -1736,24 +1736,6 @@ namespace vtkYoungsMaterialInterfaceCellCutInternals
   struct uint4 {unsigned int x,y,z,w; };
   struct uchar4 {unsigned char x,y,z,w; };
   struct uchar3 {unsigned char x,y,z; };
-  FUNC_DECL float2 make_float2(float x,float y)
-  {
-    float2 v = {x,y};
-    return v;
-  }
-  FUNC_DECL float3 make_float3(float x,float y,float z)
-  {
-    float3 v = {x,y,z};
-    return v;
-  }
-  FUNC_DECL float4 make_float4(float x,float y,float z,float w)
-  {
-    float4 v = {x,y,z,w};
-    return v;
-  }
-
-  FUNC_DECL float min(float a, float b){ return (a<b)?a:b; }
-  FUNC_DECL float max(float a, float b){ return (a>b)?a:b; }
 
 #else
 #include <vector_types.h>
@@ -1891,33 +1873,6 @@ namespace vtkYoungsMaterialInterfaceCellCutInternals
     return a.x * b.x + a.y * b.y + a.z * b.z + a.w * b.w;
   }
 
-  FUNC_DECL float clamp(float f, float a, float b)
-  {
-    return max(a, min(f, b));
-  }
-
-  FUNC_DECL float3 clamp(float3 v, float a, float b)
-  {
-    return make_float3(clamp(v.x, a, b), clamp(v.y, a, b), clamp(v.z, a, b));
-  }
-
-  FUNC_DECL float3 clamp(float3 v, float3 a, float3 b)
-  {
-    return make_float3(clamp(v.x, a.x, b.x), clamp(v.y, a.y, b.y), clamp(v.z, a.z, b.z));
-  }
-
-  FUNC_DECL float2 normalize(float2 v)
-  {
-    float len = 1.0f / sqrtf(dot(v, v));
-    return make_float2(v.x * len, v.y * len);
-  }
-
-  FUNC_DECL float3 normalize(float3 v)
-  {
-    float len = 1.0f / sqrtf(dot(v, v));
-    return make_float3(v.x * len, v.y * len, v.z * len);
-  }
-
   FUNC_DECL float3 cross( float3 A, float3 B)
   {
     return make_float3( A.y * B.z - A.z * B.y ,
@@ -1940,7 +1895,6 @@ namespace vtkYoungsMaterialInterfaceCellCutInternals
   struct double4 { double x,y,z,w; };
 
   FUNC_DECL double min(double a, double b){ return (a<b)?a:b; }
-  FUNC_DECL double max(double a, double b){ return (a>b)?a:b; }
 
   FUNC_DECL double2 make_double2(double x,double y)
   {
@@ -1960,40 +1914,16 @@ namespace vtkYoungsMaterialInterfaceCellCutInternals
     return v;
   }
 
-  FUNC_DECL  double3 operator *(double3 a, double3 b)
-  {
-    return make_double3(a.x*b.x, a.y*b.y, a.z*b.z);
-  }
-
   FUNC_DECL double3 operator *(double f, double3 v)
   {
     return make_double3(v.x*f, v.y*f, v.z*f);
   }
 
-  FUNC_DECL double3 operator *(double3 v, double f)
-  {
-    return make_double3(v.x*f, v.y*f, v.z*f);
-  }
-
-  FUNC_DECL double2 operator *(double2 v, double f)
-  {
-    return make_double2(v.x*f, v.y*f);
-  }
-
   FUNC_DECL double2 operator *(double f, double2 v)
   {
     return make_double2(v.x*f, v.y*f);
   }
 
-  FUNC_DECL double4 operator *(double4 v, double f)
-  {
-    return make_double4(v.x*f, v.y*f, v.z*f, v.w*f);
-  }
-  FUNC_DECL double4 operator *(double f, double4 v)
-  {
-    return make_double4(v.x*f, v.y*f, v.z*f, v.w*f);
-  }
-
 
   FUNC_DECL double3 operator +(double3 a, double3 b)
   {
@@ -2018,14 +1948,6 @@ namespace vtkYoungsMaterialInterfaceCellCutInternals
   }
 
 
-  FUNC_DECL void operator +=(double4 & b, double4 a)
-  {
-    b.x += a.x;
-    b.y += a.y;
-    b.z += a.z;
-    b.w += a.w;
-  }
-
   FUNC_DECL double3 operator - (double3 a, double3 b)
   {
     return make_double3(a.x-b.x, a.y-b.y, a.z-b.z);
@@ -2036,18 +1958,6 @@ namespace vtkYoungsMaterialInterfaceCellCutInternals
     return make_double2(a.x-b.x, a.y-b.y);
   }
 
-  FUNC_DECL void operator -= (double3 & b, double3 a)
-  {
-    b.x -= a.x;
-    b.y -= a.y;
-    b.z -= a.z;
-  }
-
-  FUNC_DECL double3 operator / (double3 v, double f)
-  {
-    return make_double3( v.x/f, v.y/f, v.z/f );
-  }
-
   FUNC_DECL void operator /=(double2 & b, double f)
   {
     b.x /= f;
@@ -2071,38 +1981,6 @@ namespace vtkYoungsMaterialInterfaceCellCutInternals
     return a.x * b.x + a.y * b.y + a.z * b.z;
   }
 
-  FUNC_DECL double dot(double4 a, double4 b)
-  {
-    return a.x * b.x + a.y * b.y + a.z * b.z + a.w * b.w;
-  }
-
-  FUNC_DECL double clamp(double f, double a, double b)
-  {
-    return max(a, min(f, b));
-  }
-
-  FUNC_DECL double3 clamp(double3 v, double a, double b)
-  {
-    return make_double3(clamp(v.x, a, b), clamp(v.y, a, b), clamp(v.z, a, b));
-  }
-
-  FUNC_DECL double3 clamp(double3 v, double3 a, double3 b)
-  {
-    return make_double3(clamp(v.x, a.x, b.x), clamp(v.y, a.y, b.y), clamp(v.z, a.z, b.z));
-  }
-
-  FUNC_DECL double3 normalize(double3 v)
-  {
-    double len = sqrt(dot(v, v));
-    return make_double3(v.x / len, v.y / len, v.z / len);
-  }
-
-  FUNC_DECL double2 normalize(double2 v)
-  {
-    double len = sqrt( dot(v,v) );
-    return make_double2(v.x / len, v.y / len);
-  }
-
   FUNC_DECL double3 cross( double3 A, double3 B)
   {
     return make_double3( A.y * B.z - A.z * B.y ,
@@ -2255,33 +2133,6 @@ namespace vtkYoungsMaterialInterfaceCellCutInternals
     return a.x * b.x + a.y * b.y + a.z * b.z + a.w * b.w;
   }
 
-  FUNC_DECL long double clamp(long double f, long double a, long double b)
-  {
-    return max(a, min(f, b));
-  }
-
-  FUNC_DECL ldouble3 clamp(ldouble3 v, long double a, long double b)
-  {
-    return make_ldouble3(clamp(v.x, a, b), clamp(v.y, a, b), clamp(v.z, a, b));
-  }
-
-  FUNC_DECL ldouble3 clamp(ldouble3 v, ldouble3 a, ldouble3 b)
-  {
-    return make_ldouble3(clamp(v.x, a.x, b.x), clamp(v.y, a.y, b.y), clamp(v.z, a.z, b.z));
-  }
-
-  FUNC_DECL ldouble2 normalize(ldouble2 v)
-  {
-    long double len = sqrtl( dot(v,v) );
-    return make_ldouble2(v.x / len, v.y / len);
-  }
-
-  FUNC_DECL ldouble3 normalize(ldouble3 v)
-  {
-    long double len = sqrtl( dot(v,v) );
-    return make_ldouble3(v.x / len, v.y / len, v.z / len);
-  }
-
   FUNC_DECL ldouble3 cross( ldouble3 A, ldouble3 B)
   {
     return make_ldouble3( A.y * B.z - A.z * B.y ,
@@ -2379,22 +2230,6 @@ namespace vtkYoungsMaterialInterfaceCellCutInternals
       SQRT( FABS( 4*a*c - (a-b+c)*(a-b+c) ) )
       ;
   }
-  FUNC_DECL
-  REAL triangleSurf( REAL2 p1, REAL2 p2, REAL2 p3 )
-  {
-    const REAL2 e1 = p2-p1;
-    const REAL2 e2 = p3-p2;
-    const REAL2 e3 = p1-p3;
-
-    const REAL a = dot(e1,e1);
-    const REAL b = dot(e2,e2);
-    const REAL c = dot(e3,e3);
-
-    return
-      REAL_CONST(0.25) *
-      SQRT( FABS( 4*a*c - (a-b+c)*(a-b+c) ) )
-      ;
-  }
 
 
   /*************************
@@ -2411,13 +2246,6 @@ namespace vtkYoungsMaterialInterfaceCellCutInternals
     return FABS( dot(A,BC) / REAL_CONST(6.0) );
   }
 
-  FUNC_DECL
-  REAL tetraVolume( const uchar4 tetra, const REAL3* vertices )
-  {
-    return tetraVolume( vertices[tetra.x], vertices[tetra.y], vertices[tetra.z], vertices[tetra.w] );
-  }
-
-
   /*******************************************
    *** Evaluation of a polynomial function ***
    *******************************************/
@@ -2457,23 +2285,6 @@ namespace vtkYoungsMaterialInterfaceCellCutInternals
     return make_REAL4( quadFunc.x/3, quadFunc.y/2, quadFunc.z, 0 );
   }
 
-  /*******************************************
-   *** Derivative of a polynomial function ***
-   *******************************************/
-  FUNC_DECL
-  REAL2 derivatePolynomialFunc( REAL3 F )
-  {
-    REAL2 dF = make_REAL2( 2*F.x, F.y );
-    return dF;
-  }
-
-  FUNC_DECL
-  REAL3 derivatePolynomialFunc( REAL4 F )
-  {
-    REAL3 dF = make_REAL3( 3*F.x, 2*F.y, F.z );
-    return dF;
-  }
-
   /****************************
    *** Linear interpolation ***
    ****************************/
@@ -2491,14 +2302,6 @@ namespace vtkYoungsMaterialInterfaceCellCutInternals
     return x0 + f * (x1-x0) ;
   }
 
-  FUNC_DECL
-  REAL linearInterp( REAL t0, REAL x0, REAL t1, REAL x1, REAL t )
-  {
-    REAL f = (t1!=t0) ? (t-t0)/(t1-t0) : REAL_CONST(0.0) ;
-    return x0 + f * (x1-x0) ;
-  }
-
-
   /****************************************
    *** Quadratic interpolation function ***
    ****************************************/
@@ -2536,38 +2339,6 @@ namespace vtkYoungsMaterialInterfaceCellCutInternals
   }
 
 
-  /**************************************
-   *** Analytic solver for ax�+bx+c=0 ***
-   **************************************/
-  FUNC_DECL
-  REAL quadraticFunctionSolve( REAL3 F, const REAL value, const REAL xmin, const REAL xmax )
-  {
-    // Analytic resolution of ax�+bx+c=0
-    // (!) numerically unsteady, the Newton method is preferred despite being REALLY slower
-
-    F.z -= value;
-
-    REAL delta = ( F.y * F.y ) - (4 * F.x * F.z);
-    REAL sqrt_delta = SQRT(delta);
-    REAL x = ( -F.y - sqrt_delta ) / ( 2 * F.x );
-    DBG_MESG("delta="<<delta<<", sqrt(delta)="<<sqrt_delta<<", x1="<<x<<", xmin="<<xmin<<", xmax="<<xmax);
-    if( x < xmin || x > xmax ) // choose a solution inside the bounds [xmin;xmax]
-      {
-      x = ( -F.y + sqrt_delta ) / ( 2 * F.x );
-      DBG_MESG("x2="<<x);
-      }
-
-    if( F.x == REAL_CONST(0.0) ) // < EPSILON ?
-      {
-      x = (F.y!=0) ? ( - F.z / F.y ) : xmin /* or nan or 0 ? */;
-      DBG_MESG("xlin="<<x);
-      }
-
-    x = clamp( x , xmin , xmax ); // numerical safety
-    DBG_MESG("clamp(x)="<<x);
-    return x;
-  }
-
   /****************************
    *** Newton search method ***
    ****************************/
@@ -2650,17 +2421,6 @@ namespace vtkYoungsMaterialInterfaceCellCutInternals
    *** Sorting methods ***
    ***********************/
   FUNC_DECL
-  uint3 sortTriangle( uint3 t , unsigned int* i )
-  {
-#define SWAP(a,b) { unsigned int tmp=a; a=b; b=tmp; }
-    if( i[t.y] < i[t.x] ) SWAP(t.x,t.y);
-    if( i[t.z] < i[t.y] ) SWAP(t.y,t.z);
-    if( i[t.y] < i[t.x] ) SWAP(t.x,t.y);
-#undef SWAP
-    return t;
-  }
-
-  FUNC_DECL
   uchar3 sortTriangle( uchar3 t , unsigned char* i )
   {
 #define SWAP(a,b) { unsigned char tmp=a; a=b; b=tmp; }
@@ -2678,24 +2438,6 @@ namespace vtkYoungsMaterialInterfaceCellCutInternals
    *** Sorting methods ***
    ***********************/
   FUNC_DECL
-  void sortVertices( const int n, const REAL* dist, IntType* indices )
-  {
-    // insertion sort : slow but symmetrical across all instances
-#define SWAP(a,b) { IntType t = indices[a]; indices[a] = indices[b]; indices[b] = t; }
-    for(int i = 0;i<n;i++)
-      {
-      int imin = i;
-      for(int j=i+1;j<n;j++)
-        {
-        imin = ( dist[indices[j]] < dist[indices[imin]] ) ? j : imin;
-        }
-      SWAP( i, imin );
-      }
-#undef SWAP
-  }
-
-
-  FUNC_DECL
   void sortVertices( const int n, const REAL3* vertices, const REAL3 normal, IntType* indices )
   {
     // insertion sort : slow but symmetrical across all instances
@@ -2909,7 +2651,6 @@ namespace vtkYoungsMaterialInterfaceCellCutInternals
     DBG_MESG( "step="<<s<<", x in ["<<xmin<<';'<<xmax<<']' );
     DBG_MESG( "surface reminder = "<< y );
 
-    //REAL x = quadraticFunctionSolve( funcs[s], surface, xmin, xmax ); // analytic solution is highly unsteady
     // newton search
     REAL x = newtonSearchPolynomialFunc( surfaceFunction, derivatives[s], y, xmin, xmax );
 
@@ -3069,7 +2810,6 @@ namespace vtkYoungsMaterialInterfaceCellCutInternals
     DBG_MESG( "step="<<s<<", x in ["<<xmin<<';'<<xmax<<']' );
     DBG_MESG( "surface reminder = "<< y );
 
-    //REAL x = quadraticFunctionSolve( funcs[s], surface, xmin, xmax ); // analytical solution is highly unstable
     // newton search method
     REAL x = newtonSearchPolynomialFunc( volumeFunction, derivatives[s], y, xmin, xmax );
 
diff --git a/Filters/Generic/Testing/Cxx/CMakeLists.txt b/Filters/Generic/Testing/Cxx/CMakeLists.txt
index 3c6ecbf..385c0d6 100644
--- a/Filters/Generic/Testing/Cxx/CMakeLists.txt
+++ b/Filters/Generic/Testing/Cxx/CMakeLists.txt
@@ -1,6 +1,6 @@
 # Tests with data
-if(VTK_DATA_ROOT)
-  set(NEEDS_DATA
+vtk_add_test_cxx(TestGenericStreamTracer.cxx)
+vtk_add_test_cxx(
   TestGenericClip.cxx
   TestGenericContourFilter.cxx
   TestGenericCutter.cxx
@@ -8,9 +8,7 @@ if(VTK_DATA_ROOT)
   TestGenericGeometryFilter.cxx
   TestGenericGlyph3DFilter.cxx
   TestGenericProbeFilter.cxx
-  TestGenericStreamTracer.cxx
+  otherCreation.cxx,NO_VALID
   )
-endif()
-vtk_tests(BASELINEDIR GenericFiltering DATADIR ${VTK_DATA_ROOT}
-  ${NEEDS_DATA}
-  otherCreation.cxx)
+
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Filters/Generic/Testing/Cxx/TestGenericClip.cxx b/Filters/Generic/Testing/Cxx/TestGenericClip.cxx
index 33b70a9..87e876c 100644
--- a/Filters/Generic/Testing/Cxx/TestGenericClip.cxx
+++ b/Filters/Generic/Testing/Cxx/TestGenericClip.cxx
@@ -39,7 +39,7 @@
 #include "vtkGenericClip.h"
 #include "vtkGenericCellTessellator.h"
 #include "vtkGenericSubdivisionErrorMetric.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkLookupTable.h"
 #include "vtkDataSetMapper.h"
 #include "vtkPolyData.h"
diff --git a/Filters/Generic/Testing/Cxx/TestGenericContourFilter.cxx b/Filters/Generic/Testing/Cxx/TestGenericContourFilter.cxx
index fd32c1f..76a1bca 100644
--- a/Filters/Generic/Testing/Cxx/TestGenericContourFilter.cxx
+++ b/Filters/Generic/Testing/Cxx/TestGenericContourFilter.cxx
@@ -36,7 +36,7 @@
 #include "vtkGenericContourFilter.h"
 #include "vtkGenericCellTessellator.h"
 #include "vtkGenericSubdivisionErrorMetric.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkLookupTable.h"
 #include "vtkPolyDataMapper.h"
 #include "vtkPolyData.h"
diff --git a/Filters/Generic/Testing/Cxx/TestGenericCutter.cxx b/Filters/Generic/Testing/Cxx/TestGenericCutter.cxx
index 89a850f..0fcfac9 100644
--- a/Filters/Generic/Testing/Cxx/TestGenericCutter.cxx
+++ b/Filters/Generic/Testing/Cxx/TestGenericCutter.cxx
@@ -36,7 +36,7 @@
 #include "vtkGenericCutter.h"
 #include "vtkGenericCellTessellator.h"
 #include "vtkGenericSubdivisionErrorMetric.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkLookupTable.h"
 #include "vtkDataSetMapper.h"
 #include "vtkPolyData.h"
diff --git a/Filters/Generic/Testing/Cxx/TestGenericDataSetTessellator.cxx b/Filters/Generic/Testing/Cxx/TestGenericDataSetTessellator.cxx
index 5e8cc71..831da7f 100644
--- a/Filters/Generic/Testing/Cxx/TestGenericDataSetTessellator.cxx
+++ b/Filters/Generic/Testing/Cxx/TestGenericDataSetTessellator.cxx
@@ -40,7 +40,7 @@
 #include "vtkGenericDataSetTessellator.h"
 #include "vtkGenericCellTessellator.h"
 #include "vtkGenericSubdivisionErrorMetric.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkLookupTable.h"
 #include "vtkDataSetMapper.h"
 #include "vtkLabeledDataMapper.h"
diff --git a/Filters/Generic/Testing/Cxx/TestGenericGeometryFilter.cxx b/Filters/Generic/Testing/Cxx/TestGenericGeometryFilter.cxx
index a765bc1..b1e30e9 100644
--- a/Filters/Generic/Testing/Cxx/TestGenericGeometryFilter.cxx
+++ b/Filters/Generic/Testing/Cxx/TestGenericGeometryFilter.cxx
@@ -38,7 +38,7 @@
 #include "vtkGenericGeometryFilter.h"
 #include "vtkGenericCellTessellator.h"
 #include "vtkGenericSubdivisionErrorMetric.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkLookupTable.h"
 #include "vtkPolyDataMapper.h"
 #include "vtkPolyData.h"
diff --git a/Filters/Generic/Testing/Cxx/TestGenericGlyph3DFilter.cxx b/Filters/Generic/Testing/Cxx/TestGenericGlyph3DFilter.cxx
index 1af844e..9fc6f67 100644
--- a/Filters/Generic/Testing/Cxx/TestGenericGlyph3DFilter.cxx
+++ b/Filters/Generic/Testing/Cxx/TestGenericGlyph3DFilter.cxx
@@ -36,7 +36,7 @@
 #include "vtkGenericGeometryFilter.h"
 #include "vtkGenericCellTessellator.h"
 #include "vtkGenericSubdivisionErrorMetric.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkLookupTable.h"
 #include "vtkPolyDataMapper.h"
 #include "vtkPolyData.h"
diff --git a/Filters/Generic/Testing/Cxx/TestGenericProbeFilter.cxx b/Filters/Generic/Testing/Cxx/TestGenericProbeFilter.cxx
index a57ecee..216f84e 100644
--- a/Filters/Generic/Testing/Cxx/TestGenericProbeFilter.cxx
+++ b/Filters/Generic/Testing/Cxx/TestGenericProbeFilter.cxx
@@ -38,7 +38,7 @@
 #include "vtkGenericGeometryFilter.h"
 #include "vtkGenericCellTessellator.h"
 #include "vtkGenericSubdivisionErrorMetric.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkLookupTable.h"
 #include "vtkPolyDataMapper.h"
 #include "vtkPolyData.h"
diff --git a/Filters/Generic/Testing/Cxx/TestGenericStreamTracer.cxx b/Filters/Generic/Testing/Cxx/TestGenericStreamTracer.cxx
index a5c5da4..113afcb 100644
--- a/Filters/Generic/Testing/Cxx/TestGenericStreamTracer.cxx
+++ b/Filters/Generic/Testing/Cxx/TestGenericStreamTracer.cxx
@@ -37,7 +37,7 @@
 #include "vtkBridgeDataSet.h"
 #include "vtkGenericCellTessellator.h"
 #include "vtkGenericSubdivisionErrorMetric.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkGeometricErrorMetric.h"
 #include "vtkAttributesErrorMetric.h"
 #include "vtkGenericOutlineFilter.h"
diff --git a/Filters/Generic/Testing/Cxx/otherCreation.cxx b/Filters/Generic/Testing/Cxx/otherCreation.cxx
index 5515da9..d045fe2 100644
--- a/Filters/Generic/Testing/Cxx/otherCreation.cxx
+++ b/Filters/Generic/Testing/Cxx/otherCreation.cxx
@@ -33,7 +33,7 @@
 #include "vtkDoubleArray.h"
 #include "vtkPointData.h"
 #include "vtkGenericAttribute.h"
-#include <assert.h>
+#include <cassert>
 #include <string>
 #include <vtksys/ios/sstream>
 
diff --git a/Filters/Generic/Testing/Data/Baseline/TestGenericClip.png.md5 b/Filters/Generic/Testing/Data/Baseline/TestGenericClip.png.md5
new file mode 100644
index 0000000..abfd2e0
--- /dev/null
+++ b/Filters/Generic/Testing/Data/Baseline/TestGenericClip.png.md5
@@ -0,0 +1 @@
+2ff9c8c78ee55f7bfee685b4901fa87c
diff --git a/Filters/Generic/Testing/Data/Baseline/TestGenericContourFilter.png.md5 b/Filters/Generic/Testing/Data/Baseline/TestGenericContourFilter.png.md5
new file mode 100644
index 0000000..277e24a
--- /dev/null
+++ b/Filters/Generic/Testing/Data/Baseline/TestGenericContourFilter.png.md5
@@ -0,0 +1 @@
+bb063027a90f23cb6ac2d9991a267d38
diff --git a/Filters/Generic/Testing/Data/Baseline/TestGenericCutter.png.md5 b/Filters/Generic/Testing/Data/Baseline/TestGenericCutter.png.md5
new file mode 100644
index 0000000..656e963
--- /dev/null
+++ b/Filters/Generic/Testing/Data/Baseline/TestGenericCutter.png.md5
@@ -0,0 +1 @@
+8c41e5da738b2a8a4dfd2c72b473df4e
diff --git a/Filters/Generic/Testing/Data/Baseline/TestGenericDataSetTessellator.png.md5 b/Filters/Generic/Testing/Data/Baseline/TestGenericDataSetTessellator.png.md5
new file mode 100644
index 0000000..de1f2ce
--- /dev/null
+++ b/Filters/Generic/Testing/Data/Baseline/TestGenericDataSetTessellator.png.md5
@@ -0,0 +1 @@
+fd4a939611a9c0f9795859fb8f4b1992
diff --git a/Filters/Generic/Testing/Data/Baseline/TestGenericGeometryFilter.png.md5 b/Filters/Generic/Testing/Data/Baseline/TestGenericGeometryFilter.png.md5
new file mode 100644
index 0000000..56cff90
--- /dev/null
+++ b/Filters/Generic/Testing/Data/Baseline/TestGenericGeometryFilter.png.md5
@@ -0,0 +1 @@
+e0c53234ec26a0ec9b5e81d9fb438fa5
diff --git a/Filters/Generic/Testing/Data/Baseline/TestGenericGlyph3DFilter.png.md5 b/Filters/Generic/Testing/Data/Baseline/TestGenericGlyph3DFilter.png.md5
new file mode 100644
index 0000000..19e94a5
--- /dev/null
+++ b/Filters/Generic/Testing/Data/Baseline/TestGenericGlyph3DFilter.png.md5
@@ -0,0 +1 @@
+39898722550ac7ea675e52431a035502
diff --git a/Filters/Generic/Testing/Data/Baseline/TestGenericProbeFilter.png.md5 b/Filters/Generic/Testing/Data/Baseline/TestGenericProbeFilter.png.md5
new file mode 100644
index 0000000..e744499
--- /dev/null
+++ b/Filters/Generic/Testing/Data/Baseline/TestGenericProbeFilter.png.md5
@@ -0,0 +1 @@
+40ad1ccc41aaac9255fd3137584f2483
diff --git a/Filters/Generic/Testing/Data/Baseline/TestGenericStreamTracer.png.md5 b/Filters/Generic/Testing/Data/Baseline/TestGenericStreamTracer.png.md5
new file mode 100644
index 0000000..d1f1d93
--- /dev/null
+++ b/Filters/Generic/Testing/Data/Baseline/TestGenericStreamTracer.png.md5
@@ -0,0 +1 @@
+1954e68d1c6afb295aa24a5ba49d3a88
diff --git a/Filters/Generic/vtkGenericStreamTracer.cxx b/Filters/Generic/vtkGenericStreamTracer.cxx
index b07be72..ade5e32 100644
--- a/Filters/Generic/vtkGenericStreamTracer.cxx
+++ b/Filters/Generic/vtkGenericStreamTracer.cxx
@@ -30,7 +30,7 @@
 #include "vtkGenericAttributeCollection.h"
 #include "vtkGenericAttribute.h"
 #include "vtkGenericAdaptorCell.h"
-#include <assert.h>
+#include <cassert>
 
 #include "vtkInformation.h"
 #include "vtkExecutive.h" // for GetExecutive()
diff --git a/Filters/Geometry/Testing/Cxx/CMakeLists.txt b/Filters/Geometry/Testing/Cxx/CMakeLists.txt
index abba487..e36e65a 100644
--- a/Filters/Geometry/Testing/Cxx/CMakeLists.txt
+++ b/Filters/Geometry/Testing/Cxx/CMakeLists.txt
@@ -1,33 +1,12 @@
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
+vtk_add_test_cxx(
   TestExtractSurfaceNonLinearSubdivision.cxx
-  TestImageDataToUniformGrid.cxx
-  TestProjectSphereFilter.cxx
-  TestStructuredAMRNeighbor.cxx
-  TestUniformGridGhostDataGenerator.cxx
+  TestImageDataToUniformGrid.cxx,NO_VALID
+  TestProjectSphereFilter.cxx,NO_VALID
+  TestStructuredAMRNeighbor.cxx,NO_VALID
+  TestUniformGridGhostDataGenerator.cxx,NO_VALID
   TestUnstructuredGridGeometryFilter.cxx
-
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Graphics/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+  )
+vtk_test_cxx_executable(${vtk-module}CxxTests)
 
 ## list of tests that are not included in the main driver
 set(MyTests
diff --git a/Filters/Geometry/Testing/Cxx/TestStructuredAMRGridConnectivity.cxx b/Filters/Geometry/Testing/Cxx/TestStructuredAMRGridConnectivity.cxx
index 6a5e408..6bacf06 100644
--- a/Filters/Geometry/Testing/Cxx/TestStructuredAMRGridConnectivity.cxx
+++ b/Filters/Geometry/Testing/Cxx/TestStructuredAMRGridConnectivity.cxx
@@ -63,7 +63,7 @@ const int BlocksPerLevel[2] = {2,2};
 // (level,imin,imax,jmin,jmax,kmin,kmax)
 // Where imin,imax,jmin,jmax,kmin,kmax are defined w.r.t. a virtual grid
 // that covers the entire domain at level 0.
-int Patches[4][7] = {
+static int Patches[4][7] = {
     {0,0,2,0,5,0,5},
     {0,2,5,0,5,0,5},
     {1,1,4,2,4,0,5},
diff --git a/Filters/Geometry/Testing/Cxx/TestUnstructuredGridGeometryFilter.cxx b/Filters/Geometry/Testing/Cxx/TestUnstructuredGridGeometryFilter.cxx
index 25180b9..7f8c7f5 100644
--- a/Filters/Geometry/Testing/Cxx/TestUnstructuredGridGeometryFilter.cxx
+++ b/Filters/Geometry/Testing/Cxx/TestUnstructuredGridGeometryFilter.cxx
@@ -49,7 +49,7 @@
 #include "vtkRenderWindow.h"
 #include "vtkRenderWindowInteractor.h"
 #include "vtkXMLUnstructuredGridReader.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkLookupTable.h"
 #include "vtkPolyData.h"
 #include "vtkUnstructuredGridGeometryFilter.h"
diff --git a/Filters/Geometry/Testing/Data/Baseline/TestExtractSurfaceNonLinearSubdivision.png.md5 b/Filters/Geometry/Testing/Data/Baseline/TestExtractSurfaceNonLinearSubdivision.png.md5
new file mode 100644
index 0000000..cc84efb
--- /dev/null
+++ b/Filters/Geometry/Testing/Data/Baseline/TestExtractSurfaceNonLinearSubdivision.png.md5
@@ -0,0 +1 @@
+c242bed6491c71bda1238339198c71be
diff --git a/Filters/Geometry/Testing/Data/Baseline/TestUnstructuredGridGeometryFilter.png.md5 b/Filters/Geometry/Testing/Data/Baseline/TestUnstructuredGridGeometryFilter.png.md5
new file mode 100644
index 0000000..e3c15fd
--- /dev/null
+++ b/Filters/Geometry/Testing/Data/Baseline/TestUnstructuredGridGeometryFilter.png.md5
@@ -0,0 +1 @@
+461f25e660723885ea7beaed1933cae3
diff --git a/Filters/Geometry/Testing/Data/Baseline/dataSetSurfaceFilter.png.md5 b/Filters/Geometry/Testing/Data/Baseline/dataSetSurfaceFilter.png.md5
new file mode 100644
index 0000000..7156fe8
--- /dev/null
+++ b/Filters/Geometry/Testing/Data/Baseline/dataSetSurfaceFilter.png.md5
@@ -0,0 +1 @@
+7ac5b8711b81ae04af3143e5be577306
diff --git a/Filters/Geometry/Testing/Data/Baseline/geomFilter.png.md5 b/Filters/Geometry/Testing/Data/Baseline/geomFilter.png.md5
new file mode 100644
index 0000000..0e1f3e1
--- /dev/null
+++ b/Filters/Geometry/Testing/Data/Baseline/geomFilter.png.md5
@@ -0,0 +1 @@
+79c422e0779e265d621d9cd447f48010
diff --git a/Filters/Geometry/Testing/Data/Baseline/officeStreamPoints.png.md5 b/Filters/Geometry/Testing/Data/Baseline/officeStreamPoints.png.md5
new file mode 100644
index 0000000..9affcb0
--- /dev/null
+++ b/Filters/Geometry/Testing/Data/Baseline/officeStreamPoints.png.md5
@@ -0,0 +1 @@
+29fcfe5a08365fefd75305951ceba26a
diff --git a/Filters/Geometry/Testing/Data/Baseline/rectGrid.png.md5 b/Filters/Geometry/Testing/Data/Baseline/rectGrid.png.md5
new file mode 100644
index 0000000..beea6ab
--- /dev/null
+++ b/Filters/Geometry/Testing/Data/Baseline/rectGrid.png.md5
@@ -0,0 +1 @@
+bb120a60db5d92586701ddc84cad5e19
diff --git a/Filters/Geometry/Testing/Data/Baseline/rectGrid_1.png.md5 b/Filters/Geometry/Testing/Data/Baseline/rectGrid_1.png.md5
new file mode 100644
index 0000000..2321845
--- /dev/null
+++ b/Filters/Geometry/Testing/Data/Baseline/rectGrid_1.png.md5
@@ -0,0 +1 @@
+1274b01735087314a798dd9b5c00cb95
diff --git a/Filters/Geometry/Testing/Data/Baseline/rectGrid_2.png.md5 b/Filters/Geometry/Testing/Data/Baseline/rectGrid_2.png.md5
new file mode 100644
index 0000000..2fc1406
--- /dev/null
+++ b/Filters/Geometry/Testing/Data/Baseline/rectGrid_2.png.md5
@@ -0,0 +1 @@
+50bed013ea7b589b138f3036747e46fd
diff --git a/Filters/Geometry/Testing/Data/Baseline/rectGrid_3.png.md5 b/Filters/Geometry/Testing/Data/Baseline/rectGrid_3.png.md5
new file mode 100644
index 0000000..28e9972
--- /dev/null
+++ b/Filters/Geometry/Testing/Data/Baseline/rectGrid_3.png.md5
@@ -0,0 +1 @@
+dd2742818d279648fdbbd8d6455dbf2e
diff --git a/Filters/Geometry/Testing/Data/Baseline/structuredGridBlanking.png.md5 b/Filters/Geometry/Testing/Data/Baseline/structuredGridBlanking.png.md5
new file mode 100644
index 0000000..6af5091
--- /dev/null
+++ b/Filters/Geometry/Testing/Data/Baseline/structuredGridBlanking.png.md5
@@ -0,0 +1 @@
+9ddf54d79a77f118d72ea5a8666553a6
diff --git a/Filters/Geometry/Testing/Python/CMakeLists.txt b/Filters/Geometry/Testing/Python/CMakeLists.txt
index 348a4f0..d7f45ed 100644
--- a/Filters/Geometry/Testing/Python/CMakeLists.txt
+++ b/Filters/Geometry/Testing/Python/CMakeLists.txt
@@ -1,6 +1,5 @@
-if(VTK_DATA_ROOT)
-  add_test_python(dataSetSurfaceFilter.py Graphics)
-  add_test_python(geomFilter.py Graphics)
-  add_test_python(officeStreamPoints.py Graphics)
-  add_test_python(rectGrid.py Graphics)
-endif()
+vtk_add_test_python(dataSetSurfaceFilter.py)
+vtk_add_test_python(geomFilter.py)
+vtk_add_test_python(officeStreamPoints.py)
+vtk_add_test_python(rectGrid.py)
+vtk_add_test_python(structuredGridBlanking.py)
diff --git a/Filters/Geometry/Testing/Python/structuredGridBlanking.py b/Filters/Geometry/Testing/Python/structuredGridBlanking.py
new file mode 100755
index 0000000..be4a16e
--- /dev/null
+++ b/Filters/Geometry/Testing/Python/structuredGridBlanking.py
@@ -0,0 +1,83 @@
+#!/usr/bin/env python
+
+#test exists to verify that structured grid blanking works as expected
+
+import vtk
+from vtk.test import Testing
+
+#make up a toy structured grid with known characteristics
+xlim=10
+ylim=10
+zlim=3
+
+sg = vtk.vtkStructuredGrid()
+sg.SetExtent(0,xlim,0,ylim,0,zlim)
+
+#a handy point iterator, calls action() on each point
+def forEachPoint(xlim,ylim,zlim, action):
+    for z in xrange(0,zlim+1):
+        for y in xrange(0,ylim+1):
+            for x in xrange(0,xlim+1):
+                 action((x,y,z))
+
+#make geometry
+points = vtk.vtkPoints()
+def makeCoordinate(pt):
+    points.InsertNextPoint(pt)
+forEachPoint(xlim,ylim,zlim, makeCoordinate)
+sg.SetPoints(points)
+
+#make a scalar array
+scalars = vtk.vtkDoubleArray()
+scalars.SetNumberOfComponents(1)
+scalars.SetName("Xcoord")
+def makeScalar(pt):
+    scalars.InsertNextValue(pt[0]+pt[1]+pt[2])
+forEachPoint(xlim,ylim,zlim, makeScalar)
+sg.GetPointData().SetScalars(scalars)
+
+#blank some arbitrarily chosen cells
+numcells = sg.GetNumberOfCells()
+if 11 < numcells:
+    sg.BlankCell(11)
+if 64 < numcells:
+    sg.BlankCell(64)
+if 164 < numcells:
+    sg.BlankCell(164)
+for c in xrange(180,261):
+    if c < sg.GetNumberOfCells():
+        sg.BlankCell(c)
+
+dsf = vtk.vtkDataSetSurfaceFilter()
+dsf.SetInputData(sg)
+dsf.Update()
+nviscells = dsf.GetOutput().GetNumberOfCells()
+if nviscells != 356:
+    print "Problem"
+    print "Test expected 356 visible surface polygons but got", \
+          nviscells
+    exit(-1)
+
+#render it so we can look at it
+mapper = vtk.vtkDataSetMapper()
+mapper.SetInputData(sg)
+mapper.SetScalarRange(scalars.GetRange())
+actor = vtk.vtkActor()
+actor.SetMapper(mapper)
+ren = vtk.vtkRenderer()
+ren.AddActor(actor)
+renWin = vtk.vtkRenderWindow()
+renWin.SetSize(400, 400)
+renWin.AddRenderer(ren)
+
+#set position where we can see most of the blanked cells
+cam = ren.GetActiveCamera()
+cam.SetClippingRange(14.0456, 45.4716)
+cam.SetFocalPoint(5, 5, 1.5)
+cam.SetPosition(-19.0905, -6.73006, -6.37738)
+cam.SetViewUp(-0.400229, 0.225459, 0.888248)
+iren = vtk.vtkRenderWindowInteractor()
+iren.SetRenderWindow(renWin)
+iren.Initialize()
+
+#iren.Start()
diff --git a/Filters/Geometry/Testing/Tcl/CMakeLists.txt b/Filters/Geometry/Testing/Tcl/CMakeLists.txt
index 581787d..bf7fd73 100644
--- a/Filters/Geometry/Testing/Tcl/CMakeLists.txt
+++ b/Filters/Geometry/Testing/Tcl/CMakeLists.txt
@@ -1,6 +1,4 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(dataSetSurfaceFilter Graphics)
-  add_test_tcl(geomFilter Graphics)
-  add_test_tcl(officeStreamPoints Graphics)
-  add_test_tcl(rectGrid Graphics)
-endif()
+vtk_add_test_tcl(dataSetSurfaceFilter)
+vtk_add_test_tcl(geomFilter)
+vtk_add_test_tcl(officeStreamPoints)
+vtk_add_test_tcl(rectGrid)
diff --git a/Filters/Geometry/vtkDataSetSurfaceFilter.cxx b/Filters/Geometry/vtkDataSetSurfaceFilter.cxx
index 64dd20d..f0d3870 100644
--- a/Filters/Geometry/vtkDataSetSurfaceFilter.cxx
+++ b/Filters/Geometry/vtkDataSetSurfaceFilter.cxx
@@ -16,6 +16,7 @@
 
 #include "vtkCellArray.h"
 #include "vtkCellData.h"
+#include "vtkCellIterator.h"
 #include "vtkDoubleArray.h"
 #include "vtkGenericCell.h"
 #include "vtkHexahedron.h"
@@ -37,6 +38,7 @@
 #include "vtkTetra.h"
 #include "vtkUniformGrid.h"
 #include "vtkUnsignedCharArray.h"
+#include "vtkUnstructuredGridBase.h"
 #include "vtkUnstructuredGridGeometryFilter.h"
 #include "vtkUnstructuredGrid.h"
 #include "vtkVoxel.h"
@@ -48,13 +50,22 @@
 
 #include <cassert>
 
-static int sizeofFastQuad(int numPts)
+static inline int sizeofFastQuad(int numPts)
 {
-  // account for size of ptArray
-  return static_cast<int>(sizeof(vtkFastGeomQuad)+(numPts-4)*sizeof(vtkIdType));
+  const int qsize = sizeof(vtkFastGeomQuad);
+  const int sizeId = sizeof(vtkIdType);
+  // If necessary, we create padding after vtkFastGeomQuad such that
+  // the beginning of ids aligns evenly with sizeof(vtkIdType).
+  if (qsize % sizeId == 0)
+    {
+    return static_cast<int>(qsize+numPts*sizeId);
+    }
+  else
+    {
+    return static_cast<int>((qsize/sizeId+1+numPts)*sizeId);
+    }
 }
 
-
 class vtkDataSetSurfaceFilter::vtkEdgeInterpolationMap
 {
 public:
@@ -205,11 +216,18 @@ int vtkDataSetSurfaceFilter::RequestData(
     case VTK_STRUCTURED_GRID:
       {
       vtkStructuredGrid *grid = vtkStructuredGrid::SafeDownCast(input);
-      int* tmpext = grid->GetExtent();
-      ext[0] = tmpext[0]; ext[1] = tmpext[1];
-      ext[2] = tmpext[2]; ext[3] = tmpext[3];
-      ext[4] = tmpext[4]; ext[5] = tmpext[5];
-      return this->StructuredExecute(grid, output, ext, wholeExt);
+      if (grid->GetCellBlanking())
+        {
+        return this->DataSetExecute(grid, output);
+        }
+      else
+        {
+        int* tmpext = grid->GetExtent();
+        ext[0] = tmpext[0]; ext[1] = tmpext[1];
+        ext[2] = tmpext[2]; ext[3] = tmpext[3];
+        ext[4] = tmpext[4]; ext[5] = tmpext[5];
+        return this->StructuredExecute(grid, output, ext, wholeExt);
+        }
       }
     case VTK_UNIFORM_GRID:
     case VTK_STRUCTURED_POINTS:
@@ -1059,12 +1077,14 @@ int vtkDataSetSurfaceFilter::DataSetExecute(vtkDataSet *input,
   vtkCellData *cd = input->GetCellData();
   vtkPointData *outputPD = output->GetPointData();
   vtkCellData *outputCD = output->GetCellData();
-
   if (numCells == 0)
     {
     return 1;
     }
 
+  vtkStructuredGrid *sgridInput = vtkStructuredGrid::SafeDownCast(input);
+  bool mayBlank = sgridInput && sgridInput->GetCellBlanking();
+
   cellIds = vtkIdList::New();
   pts = vtkIdList::New();
 
@@ -1100,6 +1120,10 @@ int vtkDataSetSurfaceFilter::DataSetExecute(vtkDataSet *input,
       }
 
     input->GetCell(cellId,cell);
+    if (mayBlank && !sgridInput->IsCellVisible(cellId))
+      {
+      continue;
+      }
     switch (cell->GetCellDimension())
       {
       // create new points and then cell
@@ -1125,7 +1149,21 @@ int vtkDataSetSurfaceFilter::DataSetExecute(vtkDataSet *input,
           {
           face = cell->GetFace(j);
           input->GetCellNeighbors(cellId, face->PointIds, cellIds);
-          if ( cellIds->GetNumberOfIds() <= 0)
+          bool noNeighbors = cellIds->GetNumberOfIds()<=0;
+          if (!noNeighbors && mayBlank)
+            {
+            //faces with only blank neighbors count as external faces
+            noNeighbors = true;
+            for (vtkIdType ci = 0; ci < cellIds->GetNumberOfIds(); ci++)
+              {
+              if (sgridInput->IsCellVisible(cellIds->GetId(ci)))
+                {
+                noNeighbors = false;
+                break;
+                }
+              }
+            }
+          if ( noNeighbors )
             {
             npts = face->GetNumberOfPoints();
             pts->Reset();
@@ -1249,7 +1287,11 @@ int vtkDataSetSurfaceFilter::UnstructuredGridExecute(vtkDataSet *dataSetInput,
                                                      vtkPolyData *output,
                                                      int updateGhostLevel)
 {
-  vtkUnstructuredGrid *input = vtkUnstructuredGrid::SafeDownCast(dataSetInput);
+  vtkUnstructuredGridBase *input =
+      vtkUnstructuredGridBase::SafeDownCast(dataSetInput);
+
+  vtkSmartPointer<vtkCellIterator> cellIter =
+      vtkSmartPointer<vtkCellIterator>::Take(input->NewCellIterator());
 
   // Before we start doing anything interesting, check if we need handle
   // non-linear cells using sub-division.
@@ -1259,13 +1301,23 @@ int vtkDataSetSurfaceFilter::UnstructuredGridExecute(vtkDataSet *dataSetInput,
     // Check to see if the data actually has nonlinear cells.  Handling
     // nonlinear cells adds unnecessary work if we only have linear cells.
     vtkIdType numCells = input->GetNumberOfCells();
-    unsigned char* cellTypes = input->GetCellTypesArray()->GetPointer(0);
-    for (vtkIdType i = 0; i < numCells; i++)
+    if (input->IsHomogeneous())
       {
-      if (!vtkCellTypes::IsLinear(cellTypes[i]))
+      if (numCells >= 1)
         {
-        handleSubdivision = true;
-        break;
+        handleSubdivision = !vtkCellTypes::IsLinear(input->GetCellType(0));
+        }
+      }
+    else
+      {
+      for (cellIter->InitTraversal(); !cellIter->IsDoneWithTraversal();
+           cellIter->GoToNextCell())
+        {
+        if (!vtkCellTypes::IsLinear(cellIter->GetCellType()))
+          {
+          handleSubdivision = true;
+          break;
+          }
         }
       }
     }
@@ -1289,6 +1341,7 @@ int vtkDataSetSurfaceFilter::UnstructuredGridExecute(vtkDataSet *dataSetInput,
     tempInput = vtkSmartPointer<vtkUnstructuredGrid>::New();
     tempInput->ShallowCopy(uggf->GetOutputDataObject(0));
     input = tempInput;
+    cellIter = vtkSmartPointer<vtkCellIterator>::Take(input->NewCellIterator());
     }
 
   vtkCellArray *newVerts;
@@ -1297,13 +1350,14 @@ int vtkDataSetSurfaceFilter::UnstructuredGridExecute(vtkDataSet *dataSetInput,
   vtkPoints *newPts;
   vtkIdType *ids;
   int progressCount;
-  vtkIdType cellId;
   int i, j;
-  vtkIdType *cellPointer;
   int cellType;
   vtkIdType numPts=input->GetNumberOfPoints();
   vtkIdType numCells=input->GetNumberOfCells();
   vtkGenericCell *cell;
+  vtkIdList *pointIdList;
+  vtkIdType *pointIdArray;
+  vtkIdType *pointIdArrayEnd;
   int numFacePts, numCellPts;
   vtkIdType inPtId, outPtId;
   vtkPointData *inputPD = input->GetPointData();
@@ -1312,7 +1366,6 @@ int vtkDataSetSurfaceFilter::UnstructuredGridExecute(vtkDataSet *dataSetInput,
   vtkPointData *outputPD = output->GetPointData();
   vtkCellData *outputCD = output->GetCellData();
   vtkFastGeomQuad *q;
-  unsigned char* cellTypes = input->GetCellTypesArray()->GetPointer(0);
 
   // These are for the default case/
   vtkIdList *pts;
@@ -1376,26 +1429,26 @@ int vtkDataSetSurfaceFilter::UnstructuredGridExecute(vtkDataSet *dataSetInput,
     }
 
   // First insert all points.  Points have to come first in poly data.
-  cellPointer = input->GetCells()->GetPointer();
-  for(cellId=0; cellId < numCells; cellId++)
+  for (cellIter->InitTraversal(); !cellIter->IsDoneWithTraversal();
+       cellIter->GoToNextCell())
     {
-    // Direct access to cells.
-    cellType = cellTypes[cellId];
-    numCellPts = cellPointer[0];
-    ids = cellPointer+1;
-    // Move to the next cell.
-    cellPointer += (1 + *cellPointer);
+    cellType = cellIter->GetCellType();
 
     // A couple of common cases to see if things go faster.
     if (cellType == VTK_VERTEX || cellType == VTK_POLY_VERTEX)
       {
+      pointIdList = cellIter->GetPointIds();
+      numCellPts = pointIdList->GetNumberOfIds();
+      pointIdArray = pointIdList->GetPointer(0);
+      pointIdArrayEnd = pointIdArray + numCellPts;
       newVerts->InsertNextCell(numCellPts);
-      for (i = 0; i < numCellPts; ++i)
+      while (pointIdArray != pointIdArrayEnd)
         {
-        inPtId = ids[i];
-        outPtId = this->GetOutputPointId(inPtId, input, newPts, outputPD);
+        outPtId = this->GetOutputPointId(*(pointIdArray++), input, newPts,
+                                         outputPD);
         newVerts->InsertCellPoint(outPtId);
         }
+      vtkIdType cellId = cellIter->GetCellId();
       this->RecordOrigCellId(this->NumberOfNewCells, cellId);
       outputCD->CopyData(cd, cellId, this->NumberOfNewCells++);
       }
@@ -1409,10 +1462,10 @@ int vtkDataSetSurfaceFilter::UnstructuredGridExecute(vtkDataSet *dataSetInput,
 
   // First insert all points lines in output and 3D geometry in hash.
   // Save 2D geometry for second pass.
-  // initialize the pointer to the cells for fast traversal.
-  cellPointer = input->GetCells()->GetPointer();
-  for(cellId=0; cellId < numCells && !abort; cellId++)
+  for(cellIter->InitTraversal(); !cellIter->IsDoneWithTraversal() && !abort;
+      cellIter->GoToNextCell())
     {
+    vtkIdType cellId = cellIter->GetCellId();
     //Progress and abort method support
     if ( progressCount >= progressInterval )
       {
@@ -1423,201 +1476,215 @@ int vtkDataSetSurfaceFilter::UnstructuredGridExecute(vtkDataSet *dataSetInput,
       }
     progressCount++;
 
-    // Direct access to cells.
-    cellType = cellTypes[cellId];
-    numCellPts = cellPointer[0];
-    ids = cellPointer+1;
-    // Move to the next cell.
-    cellPointer += (1 + *cellPointer);
-
-    // A couple of common cases to see if things go faster.
-    if (cellType == VTK_VERTEX || cellType == VTK_POLY_VERTEX)
-      {
-      // Do nothing.  This case was handled in the previous loop.
-      }
-    else if (cellType == VTK_LINE || cellType == VTK_POLY_LINE)
-      {
-      newLines->InsertNextCell(numCellPts);
-      for (i = 0; i < numCellPts; ++i)
-        {
-        inPtId = ids[i];
-        outPtId = this->GetOutputPointId(inPtId, input, newPts, outputPD);
-        newLines->InsertCellPoint(outPtId);
-        }
-      this->RecordOrigCellId(this->NumberOfNewCells, cellId);
-      outputCD->CopyData(cd, cellId, this->NumberOfNewCells++);
-      }
-    else if (cellType == VTK_HEXAHEDRON)
-      {
-      this->InsertQuadInHash(ids[0], ids[1], ids[5], ids[4], cellId);
-      this->InsertQuadInHash(ids[0], ids[3], ids[2], ids[1], cellId);
-      this->InsertQuadInHash(ids[0], ids[4], ids[7], ids[3], cellId);
-      this->InsertQuadInHash(ids[1], ids[2], ids[6], ids[5], cellId);
-      this->InsertQuadInHash(ids[2], ids[3], ids[7], ids[6], cellId);
-      this->InsertQuadInHash(ids[4], ids[5], ids[6], ids[7], cellId);
-      }
-    else if (cellType == VTK_VOXEL)
-      {
-      this->InsertQuadInHash(ids[0], ids[1], ids[5], ids[4], cellId);
-      this->InsertQuadInHash(ids[0], ids[2], ids[3], ids[1], cellId);
-      this->InsertQuadInHash(ids[0], ids[4], ids[6], ids[2], cellId);
-      this->InsertQuadInHash(ids[1], ids[3], ids[7], ids[5], cellId);
-      this->InsertQuadInHash(ids[2], ids[6], ids[7], ids[3], cellId);
-      this->InsertQuadInHash(ids[4], ids[5], ids[7], ids[6], cellId);
-      }
-    else if (cellType == VTK_TETRA)
-      {
-      this->InsertTriInHash(ids[0], ids[1], ids[3], cellId, 2);
-      this->InsertTriInHash(ids[0], ids[2], ids[1], cellId, 3);
-      this->InsertTriInHash(ids[0], ids[3], ids[2], cellId, 1);
-      this->InsertTriInHash(ids[1], ids[2], ids[3], cellId, 0);
-      }
-    else if (cellType == VTK_PENTAGONAL_PRISM)
-      {
-      // The quads :
-      this->InsertQuadInHash (ids[0], ids[1], ids[6], ids[5], cellId);
-      this->InsertQuadInHash (ids[1], ids[2], ids[7], ids[6], cellId);
-      this->InsertQuadInHash (ids[2], ids[3], ids[8], ids[7], cellId);
-      this->InsertQuadInHash (ids[3], ids[4], ids[9], ids[8], cellId);
-      this->InsertQuadInHash (ids[4], ids[0], ids[5], ids[9], cellId);
-      this->InsertPolygonInHash(ids, 5, cellId);
-      this->InsertPolygonInHash(&ids[5], 5, cellId);
-      }
-    else if (cellType == VTK_HEXAGONAL_PRISM)
+    cellType = cellIter->GetCellType();
+    switch (cellType)
       {
-      // The quads :
-      this->InsertQuadInHash(ids[0], ids[1], ids[7], ids[6], cellId);
-      this->InsertQuadInHash(ids[1], ids[2], ids[8], ids[7], cellId);
-      this->InsertQuadInHash(ids[2], ids[3], ids[9], ids[8], cellId);
-      this->InsertQuadInHash(ids[3], ids[4], ids[10], ids[9], cellId);
-      this->InsertQuadInHash(ids[4], ids[5], ids[11], ids[10], cellId);
-      this->InsertQuadInHash(ids[5], ids[0], ids[6], ids[11], cellId);
-      this->InsertPolygonInHash (ids, 6, cellId);
-      this->InsertPolygonInHash (&ids[6], 6, cellId);
-      }
-    else if (cellType == VTK_PIXEL || cellType == VTK_QUAD ||
-             cellType == VTK_TRIANGLE || cellType == VTK_POLYGON ||
-             cellType == VTK_TRIANGLE_STRIP ||
-             cellType == VTK_QUADRATIC_TRIANGLE ||
-             cellType == VTK_BIQUADRATIC_TRIANGLE ||
-             cellType == VTK_QUADRATIC_QUAD ||
-             cellType == VTK_QUADRATIC_LINEAR_QUAD ||
-             cellType == VTK_BIQUADRATIC_QUAD )
-      { // save 2D cells for second pass
-      flag2D = 1;
-      }
-    else
-      // Default way of getting faces. Differentiates between linear
-      // and higher order cells.
-      {
-      input->GetCell(cellId,cell);
-      if ( cell->IsLinear() )
-        {
-        if (cell->GetCellDimension() == 3)
-          {
-          int numFaces = cell->GetNumberOfFaces();
-          for (j=0; j < numFaces; j++)
-            {
-            face = cell->GetFace(j);
-            numFacePts = face->GetNumberOfPoints();
-            if (numFacePts == 4)
-              {
-              this->InsertQuadInHash(face->PointIds->GetId(0),
-                                     face->PointIds->GetId(1),
-                                     face->PointIds->GetId(2),
-                                     face->PointIds->GetId(3), cellId);
-              }
-            else if (numFacePts == 3)
-              {
-              this->InsertTriInHash(face->PointIds->GetId(0),
-                                    face->PointIds->GetId(1),
-                                    face->PointIds->GetId(2), cellId);
-              }
-            else
-              {
-              this->InsertPolygonInHash(face->PointIds->GetPointer(0),
-                                        face->PointIds->GetNumberOfIds(),
-                                        cellId);
-              }
-            } // for all cell faces
-          } // if 3D
-        else
+      case VTK_VERTEX:
+      case VTK_POLY_VERTEX:
+        // Do nothing -- these were handled previously.
+        break;
+
+      case VTK_LINE:
+      case VTK_POLY_LINE:
+        pointIdList = cellIter->GetPointIds();
+        numCellPts = pointIdList->GetNumberOfIds();
+        pointIdArray = pointIdList->GetPointer(0);
+        pointIdArrayEnd = pointIdArray + numCellPts;
+
+        newLines->InsertNextCell(numCellPts);
+        while (pointIdArray != pointIdArrayEnd)
           {
-          vtkDebugMacro("Missing cell type.");
+          outPtId = this->GetOutputPointId(*(pointIdArray++), input, newPts,
+                                           outputPD);
+          newLines->InsertCellPoint(outPtId);
           }
-        } // a linear cell type
 
-      else //process nonlinear cells via triangulation
+        this->RecordOrigCellId(this->NumberOfNewCells, cellId);
+        outputCD->CopyData(cd, cellId, this->NumberOfNewCells++);
+        break;
+
+      case VTK_HEXAHEDRON:
+        pointIdList = cellIter->GetPointIds();
+        ids = pointIdList->GetPointer(0);
+        this->InsertQuadInHash(ids[0], ids[1], ids[5], ids[4], cellId);
+        this->InsertQuadInHash(ids[0], ids[3], ids[2], ids[1], cellId);
+        this->InsertQuadInHash(ids[0], ids[4], ids[7], ids[3], cellId);
+        this->InsertQuadInHash(ids[1], ids[2], ids[6], ids[5], cellId);
+        this->InsertQuadInHash(ids[2], ids[3], ids[7], ids[6], cellId);
+        this->InsertQuadInHash(ids[4], ids[5], ids[6], ids[7], cellId);
+        break;
+
+      case VTK_VOXEL:
+        pointIdList = cellIter->GetPointIds();
+        ids = pointIdList->GetPointer(0);
+        this->InsertQuadInHash(ids[0], ids[1], ids[5], ids[4], cellId);
+        this->InsertQuadInHash(ids[0], ids[2], ids[3], ids[1], cellId);
+        this->InsertQuadInHash(ids[0], ids[4], ids[6], ids[2], cellId);
+        this->InsertQuadInHash(ids[1], ids[3], ids[7], ids[5], cellId);
+        this->InsertQuadInHash(ids[2], ids[6], ids[7], ids[3], cellId);
+        this->InsertQuadInHash(ids[4], ids[5], ids[7], ids[6], cellId);
+        break;
+
+      case VTK_TETRA:
+        pointIdList = cellIter->GetPointIds();
+        ids = pointIdList->GetPointer(0);
+        this->InsertTriInHash(ids[0], ids[1], ids[3], cellId, 2);
+        this->InsertTriInHash(ids[0], ids[2], ids[1], cellId, 3);
+        this->InsertTriInHash(ids[0], ids[3], ids[2], cellId, 1);
+        this->InsertTriInHash(ids[1], ids[2], ids[3], cellId, 0);
+        break;
+
+      case VTK_PENTAGONAL_PRISM:
+        pointIdList = cellIter->GetPointIds();
+        ids = pointIdList->GetPointer(0);
+        this->InsertQuadInHash (ids[0], ids[1], ids[6], ids[5], cellId);
+        this->InsertQuadInHash (ids[1], ids[2], ids[7], ids[6], cellId);
+        this->InsertQuadInHash (ids[2], ids[3], ids[8], ids[7], cellId);
+        this->InsertQuadInHash (ids[3], ids[4], ids[9], ids[8], cellId);
+        this->InsertQuadInHash (ids[4], ids[0], ids[5], ids[9], cellId);
+        this->InsertPolygonInHash(ids, 5, cellId);
+        this->InsertPolygonInHash(&ids[5], 5, cellId);
+        break;
+
+      case VTK_HEXAGONAL_PRISM:
+        pointIdList = cellIter->GetPointIds();
+        ids = pointIdList->GetPointer(0);
+        this->InsertQuadInHash(ids[0], ids[1], ids[7], ids[6], cellId);
+        this->InsertQuadInHash(ids[1], ids[2], ids[8], ids[7], cellId);
+        this->InsertQuadInHash(ids[2], ids[3], ids[9], ids[8], cellId);
+        this->InsertQuadInHash(ids[3], ids[4], ids[10], ids[9], cellId);
+        this->InsertQuadInHash(ids[4], ids[5], ids[11], ids[10], cellId);
+        this->InsertQuadInHash(ids[5], ids[0], ids[6], ids[11], cellId);
+        this->InsertPolygonInHash (ids, 6, cellId);
+        this->InsertPolygonInHash (&ids[6], 6, cellId);
+        break;
+
+      case VTK_PIXEL:
+      case VTK_QUAD:
+      case VTK_TRIANGLE:
+      case VTK_POLYGON:
+      case VTK_TRIANGLE_STRIP:
+      case VTK_QUADRATIC_TRIANGLE:
+      case VTK_BIQUADRATIC_TRIANGLE:
+      case VTK_QUADRATIC_QUAD:
+      case VTK_QUADRATIC_LINEAR_QUAD:
+      case VTK_BIQUADRATIC_QUAD:
+        // save 2D cells for third pass
+        flag2D = 1;
+        break;
+
+      default:
         {
-        if ( cell->GetCellDimension() == 1 )
+        // Default way of getting faces. Differentiates between linear
+        // and higher order cells.
+        cellIter->GetCell(cell);
+        if ( cell->IsLinear() )
           {
-          cell->Triangulate(0,pts,coords);
-          for (i=0; i < pts->GetNumberOfIds(); i+=2)
+          if (cell->GetCellDimension() == 3)
             {
-            newLines->InsertNextCell(2);
-            inPtId = pts->GetId(i);
-            this->RecordOrigCellId(this->NumberOfNewCells, cellId);
-            outputCD->CopyData( cd, cellId, this->NumberOfNewCells++ );
-            outPtId = this->GetOutputPointId(inPtId, input, newPts, outputPD);
-            newLines->InsertCellPoint(outPtId);
-            inPtId = pts->GetId(i+1);
-            outPtId = this->GetOutputPointId(inPtId, input, newPts, outputPD);
-            newLines->InsertCellPoint(outPtId);
+            int numFaces = cell->GetNumberOfFaces();
+            for (j=0; j < numFaces; j++)
+              {
+              face = cell->GetFace(j);
+              numFacePts = face->GetNumberOfPoints();
+              if (numFacePts == 4)
+                {
+                this->InsertQuadInHash(face->PointIds->GetId(0),
+                                       face->PointIds->GetId(1),
+                                       face->PointIds->GetId(2),
+                                       face->PointIds->GetId(3), cellId);
+                }
+              else if (numFacePts == 3)
+                {
+                this->InsertTriInHash(face->PointIds->GetId(0),
+                                      face->PointIds->GetId(1),
+                                      face->PointIds->GetId(2), cellId);
+                }
+              else
+                {
+                this->InsertPolygonInHash(face->PointIds->GetPointer(0),
+                                          face->PointIds->GetNumberOfIds(),
+                                          cellId);
+                }
+              } // for all cell faces
+            } // if 3D
+          else
+            {
+            vtkDebugMacro("Missing cell type.");
             }
-          }
-        else if ( cell->GetCellDimension() == 2 )
+          } // a linear cell type
+        else //process nonlinear cells via triangulation
           {
-          vtkWarningMacro(<< "2-D nonlinear cells must be processed with all other 2-D cells.");
-          }
-        else //3D nonlinear cell
-          {
-          vtkIdList *cellIds = vtkIdList::New();
-          int numFaces = cell->GetNumberOfFaces();
-          for (j=0; j < numFaces; j++)
+          if ( cell->GetCellDimension() == 1 )
             {
-            face = cell->GetFace(j);
-            input->GetCellNeighbors(cellId, face->PointIds, cellIds);
-            if ( cellIds->GetNumberOfIds() <= 0)
+            cell->Triangulate(0,pts,coords);
+            for (i=0; i < pts->GetNumberOfIds(); i+=2)
               {
-              // FIXME: Face could not be consistent. vtkOrderedTriangulator is a better option
-              if (this->NonlinearSubdivisionLevel >= 1)
+              newLines->InsertNextCell(2);
+              inPtId = pts->GetId(i);
+              this->RecordOrigCellId(this->NumberOfNewCells, cellId);
+              outputCD->CopyData( cd, cellId, this->NumberOfNewCells++ );
+              outPtId = this->GetOutputPointId(inPtId, input, newPts, outputPD);
+              newLines->InsertCellPoint(outPtId);
+              inPtId = pts->GetId(i+1);
+              outPtId = this->GetOutputPointId(inPtId, input, newPts, outputPD);
+              newLines->InsertCellPoint(outPtId);
+              }
+            }
+          else if ( cell->GetCellDimension() == 2 )
+            {
+            vtkWarningMacro(<< "2-D nonlinear cells must be processed with all other 2-D cells.");
+            }
+          else //3D nonlinear cell
+            {
+            vtkIdList *cellIds = vtkIdList::New();
+            int numFaces = cell->GetNumberOfFaces();
+            for (j=0; j < numFaces; j++)
+              {
+              face = cell->GetFace(j);
+              input->GetCellNeighbors(cellId, face->PointIds, cellIds);
+              if ( cellIds->GetNumberOfIds() <= 0)
                 {
-                // TODO: Handle NonlinearSubdivisionLevel > 1 correctly.
-                face->Triangulate(0,pts,coords);
-                for (i=0; i < pts->GetNumberOfIds(); i+=3)
+                // FIXME: Face could not be consistent. vtkOrderedTriangulator is a better option
+                if (this->NonlinearSubdivisionLevel >= 1)
                   {
-                  this->InsertTriInHash(pts->GetId(i), pts->GetId(i+1),
-                                        pts->GetId(i+2), cellId);
+                  // TODO: Handle NonlinearSubdivisionLevel > 1 correctly.
+                  face->Triangulate(0,pts,coords);
+                  for (i=0; i < pts->GetNumberOfIds(); i+=3)
+                    {
+                    this->InsertTriInHash(pts->GetId(i), pts->GetId(i+1),
+                                          pts->GetId(i+2), cellId);
+                    }
                   }
-                }
-              else
-                {
-                switch (face->GetCellType())
+                else
                   {
-                  case VTK_QUADRATIC_TRIANGLE:
-                    this->InsertTriInHash(face->PointIds->GetId(0),
-                                          face->PointIds->GetId(1),
-                                          face->PointIds->GetId(2), cellId);
-                    break;
-                  case VTK_QUADRATIC_QUAD:
-                  case VTK_BIQUADRATIC_QUAD:
-                  case VTK_QUADRATIC_LINEAR_QUAD:
-                    this->InsertQuadInHash(face->PointIds->GetId(0),
-                                           face->PointIds->GetId(1),
-                                           face->PointIds->GetId(2),
-                                           face->PointIds->GetId(3), cellId);
-                    break;
-                  default:
-                    vtkWarningMacro(<< "Encountered unknown nonlinear face.");
-                    break;
-                  } // switch cell type
-                } // subdivision level
-              } // cell has ids
-            } // for faces
-          cellIds->Delete();
-          } //3d cell
-        } //nonlinear cell
-      } // Cell type else.
+                  switch (face->GetCellType())
+                    {
+                    case VTK_QUADRATIC_TRIANGLE:
+                      this->InsertTriInHash(face->PointIds->GetId(0),
+                                            face->PointIds->GetId(1),
+                                            face->PointIds->GetId(2), cellId);
+                      break;
+                    case VTK_QUADRATIC_QUAD:
+                    case VTK_BIQUADRATIC_QUAD:
+                    case VTK_QUADRATIC_LINEAR_QUAD:
+                      this->InsertQuadInHash(face->PointIds->GetId(0),
+                                             face->PointIds->GetId(1),
+                                             face->PointIds->GetId(2),
+                                             face->PointIds->GetId(3), cellId);
+                      break;
+                    default:
+                      vtkWarningMacro(<< "Encountered unknown nonlinear face.");
+                      break;
+                    } // switch cell type
+                  } // subdivision level
+                } // cell has ids
+              } // for faces
+            cellIds->Delete();
+            } //3d cell
+          } //nonlinear cell
+        } // default switch case
+      } // switch(cellType)
     } // for all cells.
 
   // It would be possible to add these (except for polygons with 5+ sides)
@@ -1626,16 +1693,13 @@ int vtkDataSetSurfaceFilter::UnstructuredGridExecute(vtkDataSet *dataSetInput,
 
   // Now insert 2DCells.  Because of poly datas (cell data) ordering,
   // the 2D cells have to come after points and lines.
-  // initialize the pointer to the cells for fast traversal.
-  cellPointer = input->GetCells()->GetPointer();
-  for(cellId=0; cellId < numCells && !abort && flag2D; cellId++)
+  for(cellIter->InitTraversal();
+      !cellIter->IsDoneWithTraversal() && !abort && flag2D;
+      cellIter->GoToNextCell())
     {
-    // Direct acces to cells.
-    cellType = input->GetCellType(cellId);
-    numCellPts = cellPointer[0];
-    ids = cellPointer+1;
-    // Move to the next cell.
-    cellPointer += (1 + *cellPointer);
+    vtkIdType cellId = cellIter->GetCellId();
+    cellType = cellIter->GetCellType();
+    numCellPts = cellIter->GetNumberOfPoints();
 
     // If we have a quadratic face and our subdivision level is zero, just treat
     // it as a linear cell.  This should work so long as the first points of the
@@ -1659,6 +1723,8 @@ int vtkDataSetSurfaceFilter::UnstructuredGridExecute(vtkDataSet *dataSetInput,
     // A couple of common cases to see if things go faster.
     if (cellType == VTK_PIXEL)
       { // Do we really want to insert the 2D cells into a hash?
+      pointIdList = cellIter->GetPointIds();
+      ids = pointIdList->GetPointer(0);
       pts->Reset();
       pts->InsertId(0, this->GetOutputPointId(ids[0], input, newPts, outputPD));
       pts->InsertId(1, this->GetOutputPointId(ids[1], input, newPts, outputPD));
@@ -1670,6 +1736,8 @@ int vtkDataSetSurfaceFilter::UnstructuredGridExecute(vtkDataSet *dataSetInput,
       }
     else if (cellType == VTK_POLYGON || cellType == VTK_TRIANGLE || cellType == VTK_QUAD)
       {
+      pointIdList = cellIter->GetPointIds();
+      ids = pointIdList->GetPointer(0);
       pts->Reset();
       for ( i=0; i < numCellPts; i++)
         {
@@ -1683,6 +1751,8 @@ int vtkDataSetSurfaceFilter::UnstructuredGridExecute(vtkDataSet *dataSetInput,
       }
     else if (cellType == VTK_TRIANGLE_STRIP)
       {
+      pointIdList = cellIter->GetPointIds();
+      ids = pointIdList->GetPointer(0);
       // Change strips to triangles so we do not have to worry about order.
       int toggle = 0;
       vtkIdType ptIds[3];
@@ -1710,7 +1780,7 @@ int vtkDataSetSurfaceFilter::UnstructuredGridExecute(vtkDataSet *dataSetInput,
       {
       // Note: we should not be here if this->NonlinearSubdivisionLevel is less
       // than 1.  See the check above.
-      input->GetCell( cellId, cell );
+      cellIter->GetCell(cell);
       cell->Triangulate( 0, pts, coords );
       // Copy the level 1 subdivision points (which also exist in the input and
       // can therefore just be copied over.  Note that the output of Triangulate
@@ -2063,7 +2133,7 @@ void vtkDataSetSurfaceFilter::InsertPolygonInHash(vtkIdType* ids,
 
   // find the index to the smallest id
   vtkIdType offset = 0;
-  for(int i=1; i<numPts; i++)
+  for(int i=0; i < numPts; i++)
     {
     if(ids[i] < ids[offset])
       {
@@ -2089,11 +2159,11 @@ void vtkDataSetSurfaceFilter::InsertPolygonInHash(vtkIdType* ids,
     bool match = true;
     if (numPts == quad->numPts)
       {
-      if ( tab[1] == quad->ptArray[1])
+      if ( tab[0] == quad->ptArray[0])
         {
         // if the first two points match loop through forwards
         // checking all points
-        for (int i = 2; i < numPts; i++)
+        for (int i = 1; i < numPts; i++)
           {
           if ( tab[i] != quad->ptArray[i])
             {
@@ -2102,13 +2172,13 @@ void vtkDataSetSurfaceFilter::InsertPolygonInHash(vtkIdType* ids,
             }
           }
         }
-      else if (tab[numPts-1] == quad->ptArray[1])
+      else if (tab[numPts-1] == quad->ptArray[0])
         {
         // the first two points match with the opposite sense.
         // loop though comparing the correct sense
-        for (int i = 2; i < numPts; i++)
+        for (int i = 1; i < numPts; i++)
           {
-          if ( tab[numPts - i] != quad->ptArray[i])
+          if ( tab[numPts - i - 1] != quad->ptArray[i])
             {
             match = false;
             break;
@@ -2255,6 +2325,19 @@ vtkFastGeomQuad* vtkDataSetSurfaceFilter::NewFastGeomQuad(int numPts)
     (this->FastGeomQuadArrays[this->NextArrayIndex] + this->NextQuadIndex);
   q->numPts = numPts;
 
+  const int qsize = sizeof(vtkFastGeomQuad);
+  const int sizeId = sizeof(vtkIdType);
+  // If necessary, we create padding after vtkFastGeomQuad such that
+  // the beginning of ids aligns evenly with sizeof(vtkIdType).
+  if (qsize % sizeId == 0)
+    {
+    q->ptArray = (vtkIdType*)q + qsize/sizeId;
+    }
+  else
+    {
+    q->ptArray = (vtkIdType*)q + qsize/sizeId + 1;
+    }
+
   this->NextQuadIndex += polySize;
 
   return q;
diff --git a/Filters/Geometry/vtkDataSetSurfaceFilter.h b/Filters/Geometry/vtkDataSetSurfaceFilter.h
index 43cbcc0..341f6e5 100644
--- a/Filters/Geometry/vtkDataSetSurfaceFilter.h
+++ b/Filters/Geometry/vtkDataSetSurfaceFilter.h
@@ -40,7 +40,7 @@ struct vtkFastGeomQuadStruct
   struct vtkFastGeomQuadStruct *Next;
   vtkIdType SourceId;
   int numPts;
-  vtkIdType ptArray[4]; // actually a variable length array.  MUST be last
+  vtkIdType* ptArray;
 };
 typedef struct vtkFastGeomQuadStruct vtkFastGeomQuad;
 //ETX
diff --git a/Filters/Geometry/vtkGeometryFilter.cxx b/Filters/Geometry/vtkGeometryFilter.cxx
index d37087e..2172021 100644
--- a/Filters/Geometry/vtkGeometryFilter.cxx
+++ b/Filters/Geometry/vtkGeometryFilter.cxx
@@ -954,6 +954,7 @@ void vtkGeometryFilter::UnstructuredGridExecute(vtkDataSet *dataSetInput,
         case VTK_CUBIC_LINE:
         case VTK_QUADRATIC_TRIANGLE:
         case VTK_QUADRATIC_QUAD:
+        case VTK_QUADRATIC_POLYGON:
         case VTK_QUADRATIC_TETRA:
         case VTK_QUADRATIC_HEXAHEDRON:
         case VTK_QUADRATIC_WEDGE:
diff --git a/Filters/Geometry/vtkImageDataGeometryFilter.h b/Filters/Geometry/vtkImageDataGeometryFilter.h
index 0a565e6..362b105 100644
--- a/Filters/Geometry/vtkImageDataGeometryFilter.h
+++ b/Filters/Geometry/vtkImageDataGeometryFilter.h
@@ -79,7 +79,7 @@ public:
 
 protected:
   vtkImageDataGeometryFilter();
-  ~vtkImageDataGeometryFilter() {};
+  ~vtkImageDataGeometryFilter() {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   virtual int FillInputPortInformation(int port, vtkInformation *info);
diff --git a/Filters/Geometry/vtkRectilinearGridGeometryFilter.h b/Filters/Geometry/vtkRectilinearGridGeometryFilter.h
index 903f820..f9d72f7 100644
--- a/Filters/Geometry/vtkRectilinearGridGeometryFilter.h
+++ b/Filters/Geometry/vtkRectilinearGridGeometryFilter.h
@@ -63,7 +63,7 @@ public:
 
 protected:
   vtkRectilinearGridGeometryFilter();
-  ~vtkRectilinearGridGeometryFilter() {};
+  ~vtkRectilinearGridGeometryFilter() {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   virtual int FillInputPortInformation(int port, vtkInformation *info);
diff --git a/Filters/Geometry/vtkStructuredAMRNeighbor.cxx b/Filters/Geometry/vtkStructuredAMRNeighbor.cxx
index 8033bcb..9b21326 100644
--- a/Filters/Geometry/vtkStructuredAMRNeighbor.cxx
+++ b/Filters/Geometry/vtkStructuredAMRNeighbor.cxx
@@ -66,13 +66,6 @@ vtkStructuredAMRNeighbor::vtkStructuredAMRNeighbor(
     } // END for all dimensions
 }
 
-
-//-----------------------------------------------------------------------------
-vtkStructuredAMRNeighbor::~vtkStructuredAMRNeighbor()
-{
-  // TODO Auto-generated destructor stub
-}
-
 //-----------------------------------------------------------------------------
 vtkStructuredAMRNeighbor& vtkStructuredAMRNeighbor::operator=(
       const vtkStructuredAMRNeighbor &N)
diff --git a/Filters/Geometry/vtkStructuredAMRNeighbor.h b/Filters/Geometry/vtkStructuredAMRNeighbor.h
index 2a10811..ddf1fb1 100644
--- a/Filters/Geometry/vtkStructuredAMRNeighbor.h
+++ b/Filters/Geometry/vtkStructuredAMRNeighbor.h
@@ -68,8 +68,13 @@ public:
      const int relationShip);
 
   // Description:
+  // Copy constructor.
+  vtkStructuredAMRNeighbor(const vtkStructuredAMRNeighbor &N) :
+    vtkStructuredNeighbor(N) { *this = N; }
+
+  // Description:
   // Destructor.
-  virtual ~vtkStructuredAMRNeighbor();
+  ~vtkStructuredAMRNeighbor() {}
 
   // Description:
   // Overload assignment operator.
diff --git a/Filters/Geometry/vtkStructuredPointsGeometryFilter.h b/Filters/Geometry/vtkStructuredPointsGeometryFilter.h
index 283f1fd..e0106c6 100644
--- a/Filters/Geometry/vtkStructuredPointsGeometryFilter.h
+++ b/Filters/Geometry/vtkStructuredPointsGeometryFilter.h
@@ -34,7 +34,7 @@ public:
 
 protected:
   vtkStructuredPointsGeometryFilter();
-  ~vtkStructuredPointsGeometryFilter() {};
+  ~vtkStructuredPointsGeometryFilter() {}
 
 private:
   vtkStructuredPointsGeometryFilter(const vtkStructuredPointsGeometryFilter&); // Not implemented
diff --git a/Filters/Geometry/vtkUnstructuredGridGeometryFilter.cxx b/Filters/Geometry/vtkUnstructuredGridGeometryFilter.cxx
index b91adda..fcd478a 100644
--- a/Filters/Geometry/vtkUnstructuredGridGeometryFilter.cxx
+++ b/Filters/Geometry/vtkUnstructuredGridGeometryFilter.cxx
@@ -16,6 +16,7 @@
 
 #include "vtkCellArray.h"
 #include "vtkCellData.h"
+#include "vtkCellIterator.h"
 #include "vtkGenericCell.h"
 #include "vtkHexagonalPrism.h"
 #include "vtkHexahedron.h"
@@ -49,7 +50,7 @@
 
 
 #include <vector>
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkUnstructuredGridGeometryFilter);
 
@@ -796,8 +797,9 @@ int vtkUnstructuredGridGeometryFilter::RequestData(
   vtkInformation *inInfo = inputVector[0]->GetInformationObject(0);
   vtkInformation *outInfo = outputVector->GetInformationObject(0);
 
-  // get the input and output
-  vtkUnstructuredGrid *input = vtkUnstructuredGrid::SafeDownCast(
+  // get the input and output. Input may just have the UnstructuredGridBase
+  // interface, but output should be an unstructured grid.
+  vtkUnstructuredGridBase *input = vtkUnstructuredGridBase::SafeDownCast(
     inInfo->Get(vtkDataObject::DATA_OBJECT()));
   vtkUnstructuredGrid *output = vtkUnstructuredGrid::SafeDownCast(
     outInfo->Get(vtkDataObject::DATA_OBJECT()));
@@ -815,7 +817,8 @@ int vtkUnstructuredGridGeometryFilter::RequestData(
   vtkCellData *cd=input->GetCellData();
   vtkIdType numPts=input->GetNumberOfPoints();
   vtkPoints *inPts=input->GetPoints();
-  vtkCellArray *connectivity=input->GetCells();
+  vtkSmartPointer<vtkCellIterator> cellIter =
+      vtkSmartPointer<vtkCellIterator>::Take(input->NewCellIterator());
 
   // Output
   vtkPointData *outputPD=output->GetPointData();
@@ -870,10 +873,12 @@ int vtkUnstructuredGridGeometryFilter::RequestData(
   // Loop over the cells determining what's visible
   if(!allVisible)
     {
-    cellId=0;
-    connectivity->InitTraversal();
-    while(connectivity->GetNextCell(npts,pts))
+    for (cellIter->InitTraversal(); !cellIter->IsDoneWithTraversal();
+         cellIter->GoToNextCell())
       {
+      cellId = cellIter->GetCellId();
+      npts = cellIter->GetNumberOfPoints();
+      pts = cellIter->GetPointIds()->GetPointer(0);
       if((cellGhostLevels!=0 && cellGhostLevels[cellId] > updateLevel)||
          (this->CellClipping && (cellId < this->CellMinimum ||
                                  cellId > this->CellMaximum)) )
@@ -902,7 +907,6 @@ int vtkUnstructuredGridGeometryFilter::RequestData(
           ++i;
           }//for each point
         }//if point clipping needs checking
-      ++cellId;
       }//for all cells
     }//if not all visible
 
@@ -959,16 +963,15 @@ int vtkUnstructuredGridGeometryFilter::RequestData(
   vtkPoolManager<vtkSurfel> *pool=new vtkPoolManager<vtkSurfel>;
   pool->Init();
   this->HashTable=new vtkHashTableOfSurfels(numPts,pool);
-  unsigned char* cellTypes = input->GetCellTypesArray()->GetPointer(0);
-
-  connectivity->InitTraversal();
 
   vtkIdType ptId;
   vtkIdType newPtId;
   vtkIdType newCellId;
 
-  for(cellId=0; cellId<numCells && !abort; cellId++)
+  for (cellIter->InitTraversal(); !cellIter->IsDoneWithTraversal() && !abort;
+       cellIter->GoToNextCell())
     {
+    cellId = cellIter->GetCellId();
     //Progress and abort method support
     if ( progressCount >= progressInterval )
       {
@@ -981,10 +984,11 @@ int vtkUnstructuredGridGeometryFilter::RequestData(
 
     vtkIdType points[VTK_MAXIMUM_NUMBER_OF_POINTS];
 
-    connectivity->GetNextCell(npts,pts);
+    npts = cellIter->GetNumberOfPoints();
+    pts = cellIter->GetPointIds()->GetPointer(0);
     if ( allVisible || cellVis[cellId] )
       {
-      int cellType=cellTypes[cellId];
+      int cellType = cellIter->GetCellType();
       if((cellType>=VTK_EMPTY_CELL && cellType<=VTK_QUAD)
          ||(cellType>=VTK_QUADRATIC_EDGE && cellType<=VTK_QUADRATIC_QUAD)
          ||(cellType==VTK_BIQUADRATIC_QUAD)
@@ -1523,7 +1527,8 @@ int vtkUnstructuredGridGeometryFilter::FillInputPortInformation(
   int,
   vtkInformation *info)
 {
-  info->Set(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkUnstructuredGrid");
+  info->Set(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(),
+            "vtkUnstructuredGridBase");
   return 1;
 }
 
diff --git a/Filters/Hybrid/Testing/Cxx/CMakeLists.txt b/Filters/Hybrid/Testing/Cxx/CMakeLists.txt
index f2e03ad..aee509f 100644
--- a/Filters/Hybrid/Testing/Cxx/CMakeLists.txt
+++ b/Filters/Hybrid/Testing/Cxx/CMakeLists.txt
@@ -1,54 +1,10 @@
-Set(MyTests
+vtk_add_test_cxx(
+  TemporalStatistics.cxx
   # TestBSplineWarp.cxx           # Fixme after vtkImageViewer deps
   TestPolyDataSilhouette.cxx
-  TestTemporalCacheSimple.cxx
-  TestTemporalCacheTemporal.cxx
+  TestProcrustesAlignmentFilter.cxx,NO_VALID
+  TestTemporalCacheSimple.cxx,NO_VALID
+  TestTemporalCacheTemporal.cxx,NO_VALID
   TestTemporalFractal.cxx
-  TemporalStatistics.cxx
-)
-
-include(vtkTestingObjectFactory)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-
-list(REMOVE_ITEM TestsToRun CxxTests.cxx)
-
-# eventually we want to move this in the loop, but we need this
-# special case because the base line image is in Baseline/Parallel
-set(temporalfractal-test TestTemporalFractal.cxx)
-set(temporalstat-test TemporalStatistics.cxx)
-
-if(VTK_DATA_ROOT)
-  get_filename_component(TName ${temporalfractal-test} NAME_WE)
-  add_test(NAME ${vtk-module}Cxx-${TName}
-    COMMAND ${vtk-module}CxxTests ${TName}
-    -D ${VTK_DATA_ROOT}
-    -T ${VTK_TEST_OUTPUT_DIR}
-    -V Baseline/Parallel/${TName}.png)
-
-  get_filename_component(TName ${temporalstat-test} NAME_WE)
-  add_test(NAME ${vtk-module}Cxx-${TName}
-    COMMAND ${vtk-module}CxxTests ${TName}
-    -D ${VTK_DATA_ROOT}
-    -T ${VTK_TEST_OUTPUT_DIR}
-    -V Baseline/Graphics/${TName}.png)
-endif()
-list(REMOVE_ITEM TestsToRun ${temporalfractal-test})
-list(REMOVE_ITEM TestsToRun ${temporalstat-test})
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Hybrid/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+  )
+vtk_test_cxx_executable(${vtk-module}CxxTests RENDERING_FACTORY)
diff --git a/Filters/Hybrid/Testing/Cxx/TestProcrustesAlignmentFilter.cxx b/Filters/Hybrid/Testing/Cxx/TestProcrustesAlignmentFilter.cxx
new file mode 100644
index 0000000..29e5e64
--- /dev/null
+++ b/Filters/Hybrid/Testing/Cxx/TestProcrustesAlignmentFilter.cxx
@@ -0,0 +1,280 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestProcrustesAlignmentFilter.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkMultiBlockDataSet.h>
+#include <vtkPolyData.h>
+#include <vtkProcrustesAlignmentFilter.h>
+#include <vtkSmartPointer.h>
+
+int TestProcrustesAlignmentFilter(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkPoints> pointsArray[3];
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    pointsArray[i] = vtkSmartPointer<vtkPoints>::New();
+    }
+
+  pointsArray[0]->InsertNextPoint(-1.58614838, -0.66562307, -0.20268087);
+  pointsArray[0]->InsertNextPoint(-0.09052952, -1.53144991, 0.80403084);
+  pointsArray[0]->InsertNextPoint(-1.17059791, 1.07974386, 0.68106824);
+  pointsArray[0]->InsertNextPoint(0.32502091, 0.21391694, 1.68777990);
+  pointsArray[0]->InsertNextPoint(-0.32502091, -0.21391694, -1.68777990);
+  pointsArray[0]->InsertNextPoint(1.17059791, -1.07974386, -0.68106824);
+  pointsArray[0]->InsertNextPoint(0.09052952, 1.53144991, -0.80403084);
+  pointsArray[0]->InsertNextPoint(1.58614838, 0.66562307, 0.20268087);
+
+  pointsArray[1]->InsertNextPoint(-1.58614838, -0.66562307, -0.20268087);
+  pointsArray[1]->InsertNextPoint(-0.09052952, -1.53144991, 0.80403084);
+  pointsArray[1]->InsertNextPoint(-1.17059791, 1.07974386, 0.68106824);
+  pointsArray[1]->InsertNextPoint(0.32502091, 0.21391694, 1.68777990);
+  pointsArray[1]->InsertNextPoint(-0.32502091, -0.21391694, -1.68777990);
+  pointsArray[1]->InsertNextPoint(1.17059791, -1.07974386, -0.68106824);
+  pointsArray[1]->InsertNextPoint(0.09052952, 1.53144991, -0.80403084);
+  pointsArray[1]->InsertNextPoint(1.58614838, 0.66562307, 0.20268087);
+
+  pointsArray[2]->InsertNextPoint(-1.58614838, -0.66562307, -0.20268087);
+  pointsArray[2]->InsertNextPoint(-0.09052952, -1.53144991, 0.80403084);
+  pointsArray[2]->InsertNextPoint(-1.17059791, 1.07974386, 0.68106824);
+  pointsArray[2]->InsertNextPoint(0.32502091, 0.21391694, 1.68777990);
+  pointsArray[2]->InsertNextPoint(-0.32502091, -0.21391694, -1.68777990);
+  pointsArray[2]->InsertNextPoint(1.17059791, -1.07974386, -0.68106824);
+  pointsArray[2]->InsertNextPoint(0.09052952, 1.53144991, -0.80403084);
+  pointsArray[2]->InsertNextPoint(1.58614838, 0.66562307, 0.20268087);
+
+  vtkSmartPointer<vtkMultiBlockDataSet> inputMultiBlockDataSet = vtkSmartPointer<vtkMultiBlockDataSet>::New();
+
+  vtkSmartPointer<vtkProcrustesAlignmentFilter> procrustesAlignmentFilter = vtkSmartPointer<vtkProcrustesAlignmentFilter>::New();
+  procrustesAlignmentFilter->SetInputData(inputMultiBlockDataSet);
+  procrustesAlignmentFilter->StartFromCentroidOff();
+
+  procrustesAlignmentFilter->SetOutputPointsPrecision(vtkAlgorithm::DEFAULT_PRECISION);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    vtkSmartPointer<vtkPoints> inputPoints = vtkSmartPointer<vtkPoints>::New();
+    inputPoints->SetDataType(VTK_FLOAT);
+    inputPoints->DeepCopy(pointsArray[i]);
+
+    vtkSmartPointer<vtkPolyData> inputPolyData = vtkSmartPointer<vtkPolyData>::New();
+    inputPolyData->SetPoints(inputPoints);
+
+    inputMultiBlockDataSet->SetBlock(i, inputPolyData);
+    }
+
+  procrustesAlignmentFilter->Update();
+
+  vtkSmartPointer<vtkPoints> meanPoints = procrustesAlignmentFilter->GetMeanPoints();
+
+  if(meanPoints->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  vtkSmartPointer<vtkMultiBlockDataSet> outputMultiBlockDataSet = procrustesAlignmentFilter->GetOutput();
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    vtkSmartPointer<vtkDataObject> dataObject = outputMultiBlockDataSet->GetBlock(i);
+    vtkSmartPointer<vtkPolyData> outputPolyData = vtkPolyData::SafeDownCast(dataObject);
+    vtkSmartPointer<vtkPoints> outputPoints = outputPolyData->GetPoints();
+
+    if(outputPoints->GetDataType() != VTK_FLOAT)
+      {
+      return EXIT_FAILURE;
+      }
+    }
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    vtkSmartPointer<vtkPoints> inputPoints = vtkSmartPointer<vtkPoints>::New();
+    inputPoints->SetDataType(VTK_DOUBLE);
+    inputPoints->DeepCopy(pointsArray[i]);
+
+    vtkSmartPointer<vtkPolyData> inputPolyData = vtkSmartPointer<vtkPolyData>::New();
+    inputPolyData->SetPoints(inputPoints);
+
+    inputMultiBlockDataSet->SetBlock(i, inputPolyData);
+    }
+
+  procrustesAlignmentFilter->Update();
+
+  meanPoints = procrustesAlignmentFilter->GetMeanPoints();
+
+  if(meanPoints->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  outputMultiBlockDataSet = procrustesAlignmentFilter->GetOutput();
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    vtkSmartPointer<vtkDataObject> dataObject = outputMultiBlockDataSet->GetBlock(i);
+    vtkSmartPointer<vtkPolyData> outputPolyData = vtkPolyData::SafeDownCast(dataObject);
+    vtkSmartPointer<vtkPoints> outputPoints = outputPolyData->GetPoints();
+
+    if(outputPoints->GetDataType() != VTK_DOUBLE)
+      {
+      return EXIT_FAILURE;
+      }
+    }
+
+  procrustesAlignmentFilter->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    vtkSmartPointer<vtkPoints> inputPoints = vtkSmartPointer<vtkPoints>::New();
+    inputPoints->SetDataType(VTK_FLOAT);
+    inputPoints->DeepCopy(pointsArray[i]);
+
+    vtkSmartPointer<vtkPolyData> inputPolyData = vtkSmartPointer<vtkPolyData>::New();
+    inputPolyData->SetPoints(inputPoints);
+
+    inputMultiBlockDataSet->SetBlock(i, inputPolyData);
+    }
+
+  procrustesAlignmentFilter->Update();
+
+  meanPoints = procrustesAlignmentFilter->GetMeanPoints();
+
+  if(meanPoints->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  outputMultiBlockDataSet = procrustesAlignmentFilter->GetOutput();
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    vtkSmartPointer<vtkDataObject> dataObject = outputMultiBlockDataSet->GetBlock(i);
+    vtkSmartPointer<vtkPolyData> outputPolyData = vtkPolyData::SafeDownCast(dataObject);
+    vtkSmartPointer<vtkPoints> outputPoints = outputPolyData->GetPoints();
+
+    if(outputPoints->GetDataType() != VTK_FLOAT)
+      {
+      return EXIT_FAILURE;
+      }
+    }
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    vtkSmartPointer<vtkPoints> inputPoints = vtkSmartPointer<vtkPoints>::New();
+    inputPoints->SetDataType(VTK_DOUBLE);
+    inputPoints->DeepCopy(pointsArray[i]);
+
+    vtkSmartPointer<vtkPolyData> inputPolyData = vtkSmartPointer<vtkPolyData>::New();
+    inputPolyData->SetPoints(inputPoints);
+
+    inputMultiBlockDataSet->SetBlock(i, inputPolyData);
+    }
+
+  procrustesAlignmentFilter->Update();
+
+  meanPoints = procrustesAlignmentFilter->GetMeanPoints();
+
+  if(meanPoints->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  outputMultiBlockDataSet = procrustesAlignmentFilter->GetOutput();
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    vtkSmartPointer<vtkDataObject> dataObject = outputMultiBlockDataSet->GetBlock(i);
+    vtkSmartPointer<vtkPolyData> outputPolyData = vtkPolyData::SafeDownCast(dataObject);
+    vtkSmartPointer<vtkPoints> outputPoints = outputPolyData->GetPoints();
+
+    if(outputPoints->GetDataType() != VTK_FLOAT)
+      {
+      return EXIT_FAILURE;
+      }
+    }
+
+  procrustesAlignmentFilter->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    vtkSmartPointer<vtkPoints> inputPoints = vtkSmartPointer<vtkPoints>::New();
+    inputPoints->SetDataType(VTK_FLOAT);
+    inputPoints->DeepCopy(pointsArray[i]);
+
+    vtkSmartPointer<vtkPolyData> inputPolyData = vtkSmartPointer<vtkPolyData>::New();
+    inputPolyData->SetPoints(inputPoints);
+
+    inputMultiBlockDataSet->SetBlock(i, inputPolyData);
+    }
+
+  procrustesAlignmentFilter->Update();
+
+  meanPoints = procrustesAlignmentFilter->GetMeanPoints();
+
+  if(meanPoints->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  outputMultiBlockDataSet = procrustesAlignmentFilter->GetOutput();
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    vtkSmartPointer<vtkDataObject> dataObject = outputMultiBlockDataSet->GetBlock(i);
+    vtkSmartPointer<vtkPolyData> outputPolyData = vtkPolyData::SafeDownCast(dataObject);
+    vtkSmartPointer<vtkPoints> outputPoints = outputPolyData->GetPoints();
+
+    if(outputPoints->GetDataType() != VTK_DOUBLE)
+      {
+      return EXIT_FAILURE;
+      }
+    }
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    vtkSmartPointer<vtkPoints> inputPoints = vtkSmartPointer<vtkPoints>::New();
+    inputPoints->SetDataType(VTK_DOUBLE);
+    inputPoints->DeepCopy(pointsArray[i]);
+
+    vtkSmartPointer<vtkPolyData> inputPolyData = vtkSmartPointer<vtkPolyData>::New();
+    inputPolyData->SetPoints(inputPoints);
+
+    inputMultiBlockDataSet->SetBlock(i, inputPolyData);
+    }
+
+  procrustesAlignmentFilter->Update();
+
+  meanPoints = procrustesAlignmentFilter->GetMeanPoints();
+
+  if(meanPoints->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  outputMultiBlockDataSet = procrustesAlignmentFilter->GetOutput();
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    vtkSmartPointer<vtkDataObject> dataObject = outputMultiBlockDataSet->GetBlock(i);
+    vtkSmartPointer<vtkPolyData> outputPolyData = vtkPolyData::SafeDownCast(dataObject);
+    vtkSmartPointer<vtkPoints> outputPoints = outputPolyData->GetPoints();
+
+    if(outputPoints->GetDataType() != VTK_DOUBLE)
+      {
+      return EXIT_FAILURE;
+      }
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Hybrid/Testing/Data/Baseline/3DMorph.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/3DMorph.png.md5
new file mode 100644
index 0000000..75c8ad7
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/3DMorph.png.md5
@@ -0,0 +1 @@
+2ad712ba1bf90bcd5773b11022add99e
diff --git a/Filters/Hybrid/Testing/Data/Baseline/TemporalStatistics.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/TemporalStatistics.png.md5
new file mode 100644
index 0000000..9012a69
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/TemporalStatistics.png.md5
@@ -0,0 +1 @@
+b0d47d83d2b235c8cdd92ed775965e94
diff --git a/Filters/Hybrid/Testing/Data/Baseline/TestFacetReader.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/TestFacetReader.png.md5
new file mode 100644
index 0000000..bab493c
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/TestFacetReader.png.md5
@@ -0,0 +1 @@
+46f1fbbdd8cf6abbd366615bc502fb7c
diff --git a/Filters/Hybrid/Testing/Data/Baseline/TestGreedyTerrainDecimation.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/TestGreedyTerrainDecimation.png.md5
new file mode 100644
index 0000000..0a82716
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/TestGreedyTerrainDecimation.png.md5
@@ -0,0 +1 @@
+2c1b8e11c689a1270f63705a32b4164d
diff --git a/Filters/Hybrid/Testing/Data/Baseline/TestGreedyTerrainDecimation_1.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/TestGreedyTerrainDecimation_1.png.md5
new file mode 100644
index 0000000..72b814c
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/TestGreedyTerrainDecimation_1.png.md5
@@ -0,0 +1 @@
+f776b9a1a4b4c6aee4cb2ff08eba0769
diff --git a/Filters/Hybrid/Testing/Data/Baseline/TestGridWarp3D.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/TestGridWarp3D.png.md5
new file mode 100644
index 0000000..8979b45
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/TestGridWarp3D.png.md5
@@ -0,0 +1 @@
+d1c0b2c037d661bca0985fb18ac87e3c
diff --git a/Filters/Hybrid/Testing/Data/Baseline/TestGridWarpCubic.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/TestGridWarpCubic.png.md5
new file mode 100644
index 0000000..92125c8
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/TestGridWarpCubic.png.md5
@@ -0,0 +1 @@
+7721261b0c50f8ea493c06acbbf4b350
diff --git a/Filters/Hybrid/Testing/Data/Baseline/TestGridWarpLinear.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/TestGridWarpLinear.png.md5
new file mode 100644
index 0000000..0aeedda
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/TestGridWarpLinear.png.md5
@@ -0,0 +1 @@
+8af93a6f1e19037d3118cc6d13095d8b
diff --git a/Filters/Hybrid/Testing/Data/Baseline/TestGridWarpNearest.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/TestGridWarpNearest.png.md5
new file mode 100644
index 0000000..6077f50
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/TestGridWarpNearest.png.md5
@@ -0,0 +1 @@
+8b57d9f683dbb44c1b56e14c4d1235bb
diff --git a/Filters/Hybrid/Testing/Data/Baseline/TestImageStencilWithPolydata.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/TestImageStencilWithPolydata.png.md5
new file mode 100644
index 0000000..92309b3
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/TestImageStencilWithPolydata.png.md5
@@ -0,0 +1 @@
+f6aa7e5dd0c7a77998d27a77ccf69e88
diff --git a/Filters/Hybrid/Testing/Data/Baseline/TestPCA.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/TestPCA.png.md5
new file mode 100644
index 0000000..771b51d
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/TestPCA.png.md5
@@ -0,0 +1 @@
+6eab5599cf5436a248d28ff604d52c13
diff --git a/Filters/Hybrid/Testing/Data/Baseline/TestPolyDataSilhouette.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/TestPolyDataSilhouette.png.md5
new file mode 100644
index 0000000..2705274
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/TestPolyDataSilhouette.png.md5
@@ -0,0 +1 @@
+a3b4709b981f6ee42b945a1af595470d
diff --git a/Filters/Hybrid/Testing/Data/Baseline/TestProcrustes.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/TestProcrustes.png.md5
new file mode 100644
index 0000000..ca51ff9
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/TestProcrustes.png.md5
@@ -0,0 +1 @@
+a179af02add0c7c9e0679eabc7a86968
diff --git a/Filters/Hybrid/Testing/Data/Baseline/TestProcrustesRigidCentroid.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/TestProcrustesRigidCentroid.png.md5
new file mode 100644
index 0000000..e1b0be8
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/TestProcrustesRigidCentroid.png.md5
@@ -0,0 +1 @@
+28a4a2b30723dc11115210208da26b28
diff --git a/Filters/Hybrid/Testing/Data/Baseline/TestProjectedTerrainPath.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/TestProjectedTerrainPath.png.md5
new file mode 100644
index 0000000..3300da7
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/TestProjectedTerrainPath.png.md5
@@ -0,0 +1 @@
+0bd8b87a1567319a47397b6426ae69b3
diff --git a/Filters/Hybrid/Testing/Data/Baseline/TestTemporalFractal.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/TestTemporalFractal.png.md5
new file mode 100644
index 0000000..9eec4b7
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/TestTemporalFractal.png.md5
@@ -0,0 +1 @@
+117de5fb5a4f7236dfdeb5470ac97239
diff --git a/Filters/Hybrid/Testing/Data/Baseline/WarpPolyData.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/WarpPolyData.png.md5
new file mode 100644
index 0000000..200d6b5
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/WarpPolyData.png.md5
@@ -0,0 +1 @@
+7bc5a4c23997214a932abb5e99be8d32
diff --git a/Filters/Hybrid/Testing/Data/Baseline/appendImplicitModel.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/appendImplicitModel.png.md5
new file mode 100644
index 0000000..e3f9399
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/appendImplicitModel.png.md5
@@ -0,0 +1 @@
+2c484a0e2a0d89cd72af3a21ff1d21e6
diff --git a/Filters/Hybrid/Testing/Data/Baseline/appendImplicitModelNoBounds.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/appendImplicitModelNoBounds.png.md5
new file mode 100644
index 0000000..e3f9399
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/appendImplicitModelNoBounds.png.md5
@@ -0,0 +1 @@
+2c484a0e2a0d89cd72af3a21ff1d21e6
diff --git a/Filters/Hybrid/Testing/Data/Baseline/banana.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/banana.png.md5
new file mode 100644
index 0000000..a582db0
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/banana.png.md5
@@ -0,0 +1 @@
+a3f15683fd9966f6419b1bcc816b7df3
diff --git a/Filters/Hybrid/Testing/Data/Baseline/depthSort.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/depthSort.png.md5
new file mode 100644
index 0000000..b8f02c4
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/depthSort.png.md5
@@ -0,0 +1 @@
+be8c408d971d0d560965c30b752ad647
diff --git a/Filters/Hybrid/Testing/Data/Baseline/earth.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/earth.png.md5
new file mode 100644
index 0000000..e1c7067
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/earth.png.md5
@@ -0,0 +1 @@
+813f49b5d4a549b458cd3cd1cf4e5912
diff --git a/Filters/Hybrid/Testing/Data/Baseline/earth_1.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/earth_1.png.md5
new file mode 100644
index 0000000..8dc049f
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/earth_1.png.md5
@@ -0,0 +1 @@
+2261e0605d932a50421f60bc7895bec7
diff --git a/Filters/Hybrid/Testing/Data/Baseline/imageToPolyData.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/imageToPolyData.png.md5
new file mode 100644
index 0000000..9b5d9c0
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/imageToPolyData.png.md5
@@ -0,0 +1 @@
+c05bec5f5e6ab3769a6d246fae0ddc9d
diff --git a/Filters/Hybrid/Testing/Data/Baseline/largeImageOffset.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/largeImageOffset.png.md5
new file mode 100644
index 0000000..b98d4ba
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/largeImageOffset.png.md5
@@ -0,0 +1 @@
+2fe37ae6e536fcbfd85d2f648702eeca
diff --git a/Filters/Hybrid/Testing/Data/Baseline/largeImageParallel.png.md5 b/Filters/Hybrid/Testing/Data/Baseline/largeImageParallel.png.md5
new file mode 100644
index 0000000..bd45e8f
--- /dev/null
+++ b/Filters/Hybrid/Testing/Data/Baseline/largeImageParallel.png.md5
@@ -0,0 +1 @@
+04b6b574b13c6f3972e56e9bf86dc1a3
diff --git a/Filters/Hybrid/Testing/Python/CMakeLists.txt b/Filters/Hybrid/Testing/Python/CMakeLists.txt
index c5ee864..6d7a323 100644
--- a/Filters/Hybrid/Testing/Python/CMakeLists.txt
+++ b/Filters/Hybrid/Testing/Python/CMakeLists.txt
@@ -1,25 +1,22 @@
-add_test_python(3DMorph.py Hybrid)
-add_test_python(TestFacetReader.py Hybrid)
-add_test_python(TestGridWarp3D.py Hybrid)
-add_test_python(TestGridWarpCubic.py Hybrid)
-add_test_python(TestGridWarpLinear.py Hybrid)
-add_test_python(TestGridWarpNearest.py Hybrid)
-add_test_python(TestImageStencilWithPolydata.py Hybrid)
-add_test_python(TestPCA.py Hybrid)
-add_test_python(TestProcrustes.py Hybrid)
-add_test_python(TestProjectedTerrainPath.py Hybrid)
-add_test_python(TestTemporalSnapToTimeStep.py)
-add_test_python(WarpPolyData.py Hybrid)
-add_test_python(appendImplicitModel.py Hybrid)
-add_test_python(appendImplicitModelNoBounds.py Hybrid)
-add_test_python(banana.py Hybrid)
-add_test_python(depthSort.py Hybrid)
-add_test_python(earth.py Hybrid)
-add_test_python(imageToPolyData.py Hybrid)
-add_test_python(largeImageOffset.py Hybrid)
-add_test_python(largeImageParallel.py Hybrid)
-add_test_python(TestProcrustesRigidCentroid.py Hybrid)
-
-if(VTK_DATA_ROOT)
-  add_test_python(TestGreedyTerrainDecimation.py Hybrid)
-endif()
+vtk_add_test_python(3DMorph.py)
+vtk_add_test_python(TestFacetReader.py)
+vtk_add_test_python(TestGreedyTerrainDecimation.py)
+vtk_add_test_python(TestGridWarp3D.py)
+vtk_add_test_python(TestGridWarpCubic.py)
+vtk_add_test_python(TestGridWarpLinear.py)
+vtk_add_test_python(TestGridWarpNearest.py)
+vtk_add_test_python(TestImageStencilWithPolydata.py)
+vtk_add_test_python(TestPCA.py)
+vtk_add_test_python(TestProcrustes.py)
+vtk_add_test_python(TestProjectedTerrainPath.py)
+vtk_add_test_python(TestTemporalSnapToTimeStep.py NO_DATA NO_VALID)
+vtk_add_test_python(WarpPolyData.py)
+vtk_add_test_python(appendImplicitModel.py)
+vtk_add_test_python(appendImplicitModelNoBounds.py)
+vtk_add_test_python(banana.py)
+vtk_add_test_python(depthSort.py)
+vtk_add_test_python(earth.py)
+vtk_add_test_python(imageToPolyData.py)
+vtk_add_test_python(largeImageOffset.py)
+vtk_add_test_python(largeImageParallel.py)
+vtk_add_test_python(TestProcrustesRigidCentroid.py)
diff --git a/Filters/Hybrid/Testing/Python/largeImageOffset.py b/Filters/Hybrid/Testing/Python/largeImageOffset.py
index ee98599..d165899 100755
--- a/Filters/Hybrid/Testing/Python/largeImageOffset.py
+++ b/Filters/Hybrid/Testing/Python/largeImageOffset.py
@@ -52,14 +52,14 @@ viewer.SetColorWindow(255)
 viewer.SetColorLevel(127.5)
 viewer.Render()
 
-# on several opengl X window unix implementations
-# multiple context deletes cause errors
-# so we leak the renWin in this test for unix
-if renWin1.IsA('vtkXOpenGLRenderWindow'):
-    renWin1.Register(ren1)
-    dl = vtk.vtkDebugLeaks()
-    dl.SetExitError(0)
-    del dl
+## on several opengl X window unix implementations
+## multiple context deletes cause errors
+## so we leak the renWin in this test for unix
+#if renWin1.IsA('vtkXOpenGLRenderWindow'):
+#    renWin1.Register(ren1)
+#    dl = vtk.vtkDebugLeaks()
+#    dl.SetExitError(0)
+#    del dl
 
 # iren.Initialize()
 # iren.Start()
diff --git a/Filters/Hybrid/Testing/Python/largeImageParallel.py b/Filters/Hybrid/Testing/Python/largeImageParallel.py
index 69ddd75..fd6a121 100755
--- a/Filters/Hybrid/Testing/Python/largeImageParallel.py
+++ b/Filters/Hybrid/Testing/Python/largeImageParallel.py
@@ -52,14 +52,14 @@ viewer.SetColorWindow(255)
 viewer.SetColorLevel(127.5)
 viewer.Render()
 
-# on several opengl X window unix implementations
-# multiple context deletes cause errors
-# so we leak the renWin1 in this test for unix
-if renWin1.IsA('vtkXOpenGLRenderWindow'):
-    renWin1.Register(ren1)
-    dl = vtk.vtkDebugLeaks()
-    dl.SetExitError(0)
-    del dl
+## on several opengl X window unix implementations
+## multiple context deletes cause errors
+## so we leak the renWin1 in this test for unix
+#if renWin1.IsA('vtkXOpenGLRenderWindow'):
+#    renWin1.Register(ren1)
+#    dl = vtk.vtkDebugLeaks()
+#    dl.SetExitError(0)
+#    del dl
 
 # iren.Initialize()
 # iren.Start()
diff --git a/Filters/Hybrid/Testing/Tcl/CMakeLists.txt b/Filters/Hybrid/Testing/Tcl/CMakeLists.txt
index afaaab2..b817c7d 100644
--- a/Filters/Hybrid/Testing/Tcl/CMakeLists.txt
+++ b/Filters/Hybrid/Testing/Tcl/CMakeLists.txt
@@ -1,24 +1,21 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(TestFacetReader Hybrid)
-  add_test_tcl(TestGreedyTerrainDecimation Hybrid)
-  add_test_tcl(TestGridWarp3D Hybrid)
-  add_test_tcl(TestGridWarpCubic Hybrid)
-  add_test_tcl(TestGridWarpLinear Hybrid)
-  add_test_tcl(TestGridWarpNearest Hybrid)
-  add_test_tcl(TestImageStencilWithPolydata Hybrid)
-  add_test_tcl(TestProjectedTerrainPath Hybrid)
-  add_test_tcl(earth Hybrid)
-  add_test_tcl(imageToPolyData Hybrid)
-  add_test_tcl(largeImageOffset Hybrid)
-  add_test_tcl(largeImageParallel Hybrid)
-endif()
-
-add_test_tcl(3DMorph Hybrid)
-add_test_tcl(TestPCA Hybrid)
-add_test_tcl(TestProcrustes Hybrid)
-add_test_tcl(TestProcrustesRigidCentroid Hybrid)
-add_test_tcl(WarpPolyData Hybrid)
-add_test_tcl(appendImplicitModel Hybrid)
-add_test_tcl(appendImplicitModelNoBounds Hybrid)
-add_test_tcl(banana Hybrid)
-add_test_tcl(depthSort Hybrid)
+vtk_add_test_tcl(TestFacetReader)
+vtk_add_test_tcl(TestGreedyTerrainDecimation)
+vtk_add_test_tcl(TestGridWarp3D)
+vtk_add_test_tcl(TestGridWarpCubic)
+vtk_add_test_tcl(TestGridWarpLinear)
+vtk_add_test_tcl(TestGridWarpNearest)
+vtk_add_test_tcl(TestImageStencilWithPolydata)
+vtk_add_test_tcl(TestProjectedTerrainPath)
+vtk_add_test_tcl(earth)
+vtk_add_test_tcl(imageToPolyData)
+vtk_add_test_tcl(largeImageOffset)
+vtk_add_test_tcl(largeImageParallel)
+vtk_add_test_tcl(3DMorph)
+vtk_add_test_tcl(TestPCA)
+vtk_add_test_tcl(TestProcrustes)
+vtk_add_test_tcl(TestProcrustesRigidCentroid)
+vtk_add_test_tcl(WarpPolyData)
+vtk_add_test_tcl(appendImplicitModel)
+vtk_add_test_tcl(appendImplicitModelNoBounds)
+vtk_add_test_tcl(banana)
+vtk_add_test_tcl(depthSort)
diff --git a/Filters/Hybrid/Testing/Tcl/largeImageOffset.tcl b/Filters/Hybrid/Testing/Tcl/largeImageOffset.tcl
index ce9bc5d..fb12343 100644
--- a/Filters/Hybrid/Testing/Tcl/largeImageOffset.tcl
+++ b/Filters/Hybrid/Testing/Tcl/largeImageOffset.tcl
@@ -49,12 +49,12 @@ vtkImageViewer viewer
   viewer SetColorLevel 127.5
   viewer Render
 
-# on several opengl X window unix implementations
-# multiple context deletes cause errors
-# so we leak the renWin1 in this test for unix
-if { $tcl_platform(platform) == "unix" } {
-  renWin1 Register ren1
-  vtkDebugLeaks dl
-  dl SetExitError 0
-  dl Delete
-}
+## on several opengl X window unix implementations
+## multiple context deletes cause errors
+## so we leak the renWin1 in this test for unix
+#if { $tcl_platform(platform) == "unix" } {
+#  renWin1 Register ren1
+#  vtkDebugLeaks dl
+#  dl SetExitError 0
+#  dl Delete
+#}
diff --git a/Filters/Hybrid/Testing/Tcl/largeImageParallel.tcl b/Filters/Hybrid/Testing/Tcl/largeImageParallel.tcl
index add4a57..b8012dd 100644
--- a/Filters/Hybrid/Testing/Tcl/largeImageParallel.tcl
+++ b/Filters/Hybrid/Testing/Tcl/largeImageParallel.tcl
@@ -49,12 +49,12 @@ vtkImageViewer viewer
   viewer SetColorLevel 127.5
   viewer Render
 
-# on several opengl X window unix implementations
-# multiple context deletes cause errors
-# so we leak the renWin1 in this test for unix
-if { $tcl_platform(platform) == "unix" } {
-  renWin1 Register ren1
-  vtkDebugLeaks dl
-  dl SetExitError 0
-  dl Delete
-}
+## on several opengl X window unix implementations
+## multiple context deletes cause errors
+## so we leak the renWin1 in this test for unix
+#if { $tcl_platform(platform) == "unix" } {
+#  renWin1 Register ren1
+#  vtkDebugLeaks dl
+#  dl SetExitError 0
+#  dl Delete
+#}
diff --git a/Filters/Hybrid/vtkDepthSortPolyData.cxx b/Filters/Hybrid/vtkDepthSortPolyData.cxx
index 694d9c2..3c651f0 100644
--- a/Filters/Hybrid/vtkDepthSortPolyData.cxx
+++ b/Filters/Hybrid/vtkDepthSortPolyData.cxx
@@ -78,7 +78,7 @@ typedef struct _vtkSortValues {
 
 extern "C"
 {
-  int vtkCompareBackToFront(const void *val1, const void *val2)
+  static int vtkCompareBackToFront(const void *val1, const void *val2)
   {
     if (((vtkSortValues *)val1)->z > ((vtkSortValues *)val2)->z)
       {
@@ -97,7 +97,7 @@ extern "C"
 
 extern "C"
 {
-  int vtkCompareFrontToBack(const void *val1, const void *val2)
+  static int vtkCompareFrontToBack(const void *val1, const void *val2)
   {
     if (((vtkSortValues *)val1)->z < ((vtkSortValues *)val2)->z)
       {
diff --git a/Filters/Hybrid/vtkEarthSource.h b/Filters/Hybrid/vtkEarthSource.h
index 5af0282..317c2a6 100644
--- a/Filters/Hybrid/vtkEarthSource.h
+++ b/Filters/Hybrid/vtkEarthSource.h
@@ -56,7 +56,7 @@ public:
 
 protected:
   vtkEarthSource();
-  ~vtkEarthSource() {};
+  ~vtkEarthSource() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
diff --git a/Filters/Hybrid/vtkImplicitModeller.cxx b/Filters/Hybrid/vtkImplicitModeller.cxx
index 2e0ed4c..4694850 100644
--- a/Filters/Hybrid/vtkImplicitModeller.cxx
+++ b/Filters/Hybrid/vtkImplicitModeller.cxx
@@ -220,7 +220,8 @@ void SetOutputDistance(double distance, OT *outputValue, double capValue, double
 
 // Convert distance as stored in output (could be scaled and/or non-double
 // type) to double distance with correct scaling
-void ConvertToDoubleDistance(double inDistance, double &distance,
+template <class OT>
+void ConvertToDoubleDistance(const OT &inDistance, double &distance,
                              double &distance2, double scaleFactor)
 {
   if (scaleFactor)
diff --git a/Filters/Hybrid/vtkProcrustesAlignmentFilter.cxx b/Filters/Hybrid/vtkProcrustesAlignmentFilter.cxx
index f94a74b..0a9ff3d 100644
--- a/Filters/Hybrid/vtkProcrustesAlignmentFilter.cxx
+++ b/Filters/Hybrid/vtkProcrustesAlignmentFilter.cxx
@@ -33,7 +33,9 @@ vtkProcrustesAlignmentFilter::vtkProcrustesAlignmentFilter()
 {
   this->LandmarkTransform = vtkLandmarkTransform::New();
   this->StartFromCentroid = false;
+  this->OutputPointsPrecision = DEFAULT_PRECISION;
 
+  // The precision of the mean points is set in RequestData().
   this->MeanPoints = vtkPoints::New();
 }
 
@@ -182,6 +184,37 @@ int vtkProcrustesAlignmentFilter::RequestData(
       {
       outputBlock = tmpInput->NewInstance();
       outputBlock->DeepCopy(tmpInput);
+
+      // Set the desired precision for the points in the output. If
+      // this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION
+      // then the precision of the points in the output is correctly
+      // set during the deep copy of tmpInput.
+      if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
+        {
+        // Only create another new vtkPoints if the output precision is
+        // different from the input.
+        if(tmpInput->GetPoints() && tmpInput->GetPoints()->GetDataType() != VTK_FLOAT)
+          {
+          vtkPoints *newPoints = vtkPoints::New();
+          newPoints->SetDataType(VTK_FLOAT);
+          newPoints->DeepCopy(tmpInput->GetPoints());
+          outputBlock->SetPoints(newPoints);
+          newPoints->Delete();
+          }
+        }
+      else if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+        {
+        // Only create another new vtkPoints if the output precision is
+        // different from the input.
+        if(tmpInput->GetPoints() && tmpInput->GetPoints()->GetDataType() != VTK_DOUBLE)
+          {
+          vtkPoints *newPoints = vtkPoints::New();
+          newPoints->SetDataType(VTK_DOUBLE);
+          newPoints->DeepCopy(tmpInput->GetPoints());
+          outputBlock->SetPoints(newPoints);
+          newPoints->Delete();
+          }
+        }
       }
     output->SetBlock(i, outputBlock);
     if(outputBlock)
@@ -218,7 +251,34 @@ int vtkProcrustesAlignmentFilter::RequestData(
       }
     }
 
-//  vtkPoints *mean_points = vtkPoints::New();
+  // Set the desired precision for the mean points.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DEFAULT_PRECISION)
+    {
+    // The points in distinct blocks may be of differing precisions.
+    this->MeanPoints->SetDataType(VTK_FLOAT);
+    for(i=0;i<N_SETS;i++)
+      {
+      tmpInput =
+        vtkPointSet::SafeDownCast(mbInput->GetBlock(i));
+
+      // Set the desired precision to VTK_DOUBLE if the precision of the
+      // mean points in any of the blocks is VTK_DOUBLE.
+      if(tmpInput && tmpInput->GetPoints()->GetDataType() == VTK_DOUBLE)
+        {
+        this->MeanPoints->SetDataType(VTK_DOUBLE);
+        break;
+        }
+      }
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::SINGLE_PRECISION)
+    {
+    this->MeanPoints->SetDataType(VTK_FLOAT);
+    }
+  else if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    this->MeanPoints->SetDataType(VTK_DOUBLE);
+    }
+
   this->MeanPoints->DeepCopy(input->GetPoints());
   // our initial estimate of the mean comes from the first example in the set
 
@@ -262,6 +322,7 @@ int vtkProcrustesAlignmentFilter::RequestData(
   // we keep a record of the first mean to fix the orientation and scale
   // (which are otherwise undefined and the loop will not converge)
   vtkPoints *first_mean = vtkPoints::New();
+  first_mean->SetDataType(this->MeanPoints->GetDataType());
   first_mean->DeepCopy(MeanPoints);
 
 
@@ -281,9 +342,9 @@ int vtkProcrustesAlignmentFilter::RequestData(
       }
   }
 
-
   // storage for the new mean that is being calculated
   vtkPoints *new_mean = vtkPoints::New();
+  new_mean->SetDataType(this->MeanPoints->GetDataType());
   new_mean->SetNumberOfPoints(N_POINTS);
 
   // compute mean and align all the shapes to it, until convergence
@@ -418,4 +479,5 @@ void vtkProcrustesAlignmentFilter::PrintSelf(ostream& os, vtkIndent indent)
   this->MeanPoints->PrintSelf(os, indent.GetNextIndent());
   os << indent << "Start From Centroid: "
      << (this->StartFromCentroid ? "On\n" : "Off\n");
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision << "\n";
 }
diff --git a/Filters/Hybrid/vtkProcrustesAlignmentFilter.h b/Filters/Hybrid/vtkProcrustesAlignmentFilter.h
index a4f25f7..de1fc3a 100644
--- a/Filters/Hybrid/vtkProcrustesAlignmentFilter.h
+++ b/Filters/Hybrid/vtkProcrustesAlignmentFilter.h
@@ -88,6 +88,17 @@ public:
   vtkGetMacro(StartFromCentroid, bool);
   vtkBooleanMacro(StartFromCentroid, bool);
 
+  // Description:
+  // Set/get the desired precision for the output types. See the documentation
+  // for the vtkAlgorithm::DesiredOutputPrecision enum for an explanation of
+  // the available precision settings. If the desired precision is
+  // DEFAULT_PRECISION and any of the inputs are double precision, then the
+  // mean points will be double precision. Otherwise, if the desired
+  // precision is DEFAULT_PRECISION and all the inputs are single precision,
+  // then the mean points will be single precision.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkProcrustesAlignmentFilter();
   ~vtkProcrustesAlignmentFilter();
@@ -101,6 +112,7 @@ protected:
   bool StartFromCentroid;
 
   vtkPoints *MeanPoints;
+  int OutputPointsPrecision;
 
 private:
   vtkProcrustesAlignmentFilter(const vtkProcrustesAlignmentFilter&);  // Not implemented.
diff --git a/Filters/Hybrid/vtkTemporalFractal.cxx b/Filters/Hybrid/vtkTemporalFractal.cxx
index b10b8fb..ccc277c 100644
--- a/Filters/Hybrid/vtkTemporalFractal.cxx
+++ b/Filters/Hybrid/vtkTemporalFractal.cxx
@@ -33,7 +33,7 @@
 #include "vtkCompositeDataPipeline.h"
 #include "vtkSmartPointer.h"
 
-#include <assert.h>
+#include <cassert>
 
 //----------------------------------------------------------------------------
 class TemporalFractalOutputUtil: public vtkObject
diff --git a/Filters/Hybrid/vtkTemporalShiftScale.cxx b/Filters/Hybrid/vtkTemporalShiftScale.cxx
index 25aa92b..f1c8146 100644
--- a/Filters/Hybrid/vtkTemporalShiftScale.cxx
+++ b/Filters/Hybrid/vtkTemporalShiftScale.cxx
@@ -20,7 +20,7 @@
 #include "vtkStreamingDemandDrivenPipeline.h"
 #include "vtkCompositeDataPipeline.h"
 
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkTemporalShiftScale);
 
diff --git a/Filters/Hybrid/vtkTransformToGrid.cxx b/Filters/Hybrid/vtkTransformToGrid.cxx
index 150e10f..9ea9872 100644
--- a/Filters/Hybrid/vtkTransformToGrid.cxx
+++ b/Filters/Hybrid/vtkTransformToGrid.cxx
@@ -123,8 +123,8 @@ void vtkTransformToGrid::RequestInformation (
 // Return the maximum absolute displacement of the transform over
 // the entire grid extent -- this is extremely robust and extremely
 // inefficient, it should be possible to do much better than this.
-void vtkTransformToGridMinMax(vtkTransformToGrid *self, int extent[6],
-                              double &minDisplacement, double &maxDisplacement)
+static void vtkTransformToGridMinMax(vtkTransformToGrid *self, int extent[6],
+                                     double &minDisplacement, double &maxDisplacement)
 {
   vtkAbstractTransform *transform = self->GetInput();
   transform->Update();
diff --git a/Filters/HyperTree/Testing/Cxx/CMakeLists.txt b/Filters/HyperTree/Testing/Cxx/CMakeLists.txt
index ce311f1..1bf9382 100644
--- a/Filters/HyperTree/Testing/Cxx/CMakeLists.txt
+++ b/Filters/HyperTree/Testing/Cxx/CMakeLists.txt
@@ -1,4 +1,4 @@
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
+vtk_add_test_cxx(
   TestClipHyperOctree.cxx
   TestHyperOctreeContourFilter.cxx
   TestHyperOctreeCutter.cxx
@@ -30,26 +30,5 @@ create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
   TestHyperTreeGridTernary3DUnstructuredMaterial.cxx
   TestHyperTreeGridTernaryHyperbola.cxx
   TestHyperTreeGridTernarySphereMaterial.cxx
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    string(REGEX REPLACE "Bits" "" TNameNoBits ${TName})
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Graphics/${TNameNoBits}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+  )
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Filters/HyperTree/Testing/Cxx/TestClipHyperOctree.cxx b/Filters/HyperTree/Testing/Cxx/TestClipHyperOctree.cxx
index 6ca3c94..9c9ceb9 100644
--- a/Filters/HyperTree/Testing/Cxx/TestClipHyperOctree.cxx
+++ b/Filters/HyperTree/Testing/Cxx/TestClipHyperOctree.cxx
@@ -30,7 +30,7 @@
 #include "vtkRenderer.h"
 #include "vtkRenderWindow.h"
 #include "vtkRenderWindowInteractor.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkLookupTable.h"
 #include "vtkPolyData.h"
 #include "vtkXMLUnstructuredGridWriter.h"
diff --git a/Filters/HyperTree/Testing/Cxx/TestHyperOctreeContourFilter.cxx b/Filters/HyperTree/Testing/Cxx/TestHyperOctreeContourFilter.cxx
index c322ca4..4c9a067 100644
--- a/Filters/HyperTree/Testing/Cxx/TestHyperOctreeContourFilter.cxx
+++ b/Filters/HyperTree/Testing/Cxx/TestHyperOctreeContourFilter.cxx
@@ -30,7 +30,7 @@
 #include "vtkRenderer.h"
 #include "vtkRenderWindow.h"
 #include "vtkRenderWindowInteractor.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkLookupTable.h"
 #include "vtkPolyData.h"
 #include "vtkXMLPolyDataWriter.h"
diff --git a/Filters/HyperTree/Testing/Cxx/TestHyperOctreeCutter.cxx b/Filters/HyperTree/Testing/Cxx/TestHyperOctreeCutter.cxx
index 71537da..aa7870f 100644
--- a/Filters/HyperTree/Testing/Cxx/TestHyperOctreeCutter.cxx
+++ b/Filters/HyperTree/Testing/Cxx/TestHyperOctreeCutter.cxx
@@ -30,7 +30,7 @@
 #include "vtkRenderer.h"
 #include "vtkRenderWindow.h"
 #include "vtkRenderWindowInteractor.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkLookupTable.h"
 #include "vtkPolyData.h"
 #include "vtkXMLPolyDataWriter.h"
diff --git a/Filters/HyperTree/Testing/Cxx/TestHyperOctreeDual.cxx b/Filters/HyperTree/Testing/Cxx/TestHyperOctreeDual.cxx
index 3b8c08f..c86d613 100644
--- a/Filters/HyperTree/Testing/Cxx/TestHyperOctreeDual.cxx
+++ b/Filters/HyperTree/Testing/Cxx/TestHyperOctreeDual.cxx
@@ -30,7 +30,7 @@
 #include "vtkRenderer.h"
 #include "vtkRenderWindow.h"
 #include "vtkRenderWindowInteractor.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkLookupTable.h"
 #include "vtkPolyData.h"
 #include "vtkXMLPolyDataWriter.h"
diff --git a/Filters/HyperTree/Testing/Cxx/TestHyperOctreeSurfaceFilter.cxx b/Filters/HyperTree/Testing/Cxx/TestHyperOctreeSurfaceFilter.cxx
index 79e7503..aa562ed 100644
--- a/Filters/HyperTree/Testing/Cxx/TestHyperOctreeSurfaceFilter.cxx
+++ b/Filters/HyperTree/Testing/Cxx/TestHyperOctreeSurfaceFilter.cxx
@@ -31,7 +31,7 @@
 #include "vtkRenderer.h"
 #include "vtkRenderWindow.h"
 #include "vtkRenderWindowInteractor.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkLookupTable.h"
 #include "vtkPolyData.h"
 #include "vtkXMLPolyDataWriter.h"
diff --git a/Filters/HyperTree/Testing/Cxx/TestHyperOctreeToUniformGrid.cxx b/Filters/HyperTree/Testing/Cxx/TestHyperOctreeToUniformGrid.cxx
index 8466b22..c4e72cb 100644
--- a/Filters/HyperTree/Testing/Cxx/TestHyperOctreeToUniformGrid.cxx
+++ b/Filters/HyperTree/Testing/Cxx/TestHyperOctreeToUniformGrid.cxx
@@ -32,7 +32,7 @@
 #include "vtkRenderer.h"
 #include "vtkRenderWindow.h"
 #include "vtkRenderWindowInteractor.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkLookupTable.h"
 #include "vtkXMLImageDataWriter.h"
 #include "vtkHyperOctreeToUniformGridFilter.h"
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestClipHyperOctree.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestClipHyperOctree.png.md5
new file mode 100644
index 0000000..981cbc6
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestClipHyperOctree.png.md5
@@ -0,0 +1 @@
+a29609a81f1e89daf3a036c322ec8da8
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperOctreeContourFilter.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperOctreeContourFilter.png.md5
new file mode 100644
index 0000000..36a3f43
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperOctreeContourFilter.png.md5
@@ -0,0 +1 @@
+005c15de269649f64c6084b3c0a27916
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperOctreeCutter.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperOctreeCutter.png.md5
new file mode 100644
index 0000000..d128afb
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperOctreeCutter.png.md5
@@ -0,0 +1 @@
+801780c6948c999d35c99293d254e2ea
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperOctreeDual.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperOctreeDual.png.md5
new file mode 100644
index 0000000..47716d6
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperOctreeDual.png.md5
@@ -0,0 +1 @@
+50e9b34bd50435503ca548a7134074b1
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperOctreeDual_1.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperOctreeDual_1.png.md5
new file mode 100644
index 0000000..6c5e4a8
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperOctreeDual_1.png.md5
@@ -0,0 +1 @@
+972ef1cc63e314102f5dff88b5ff90f3
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperOctreeSurfaceFilter.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperOctreeSurfaceFilter.png.md5
new file mode 100644
index 0000000..827befc
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperOctreeSurfaceFilter.png.md5
@@ -0,0 +1 @@
+fea95cfdbf3eb4e6456b9c0c2ed58eb7
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperOctreeToUniformGrid.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperOctreeToUniformGrid.png.md5
new file mode 100644
index 0000000..f73f6fd
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperOctreeToUniformGrid.png.md5
@@ -0,0 +1 @@
+23860983d8fa13fc8893da2832464fa3
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridBinary2D.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridBinary2D.png.md5
new file mode 100644
index 0000000..5c1091f
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridBinary2D.png.md5
@@ -0,0 +1 @@
+ce1725f30c5384a1d584f7e010a13f5e
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridBinary2DIJK.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridBinary2DIJK.png.md5
new file mode 100644
index 0000000..5c1091f
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridBinary2DIJK.png.md5
@@ -0,0 +1 @@
+ce1725f30c5384a1d584f7e010a13f5e
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridBinary2DMaterial.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridBinary2DMaterial.png.md5
new file mode 100644
index 0000000..96cf873
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridBinary2DMaterial.png.md5
@@ -0,0 +1 @@
+660c5b9bd6daa0b52e3ce9d4e3f7577f
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridBinary2DMaterialIJK.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridBinary2DMaterialIJK.png.md5
new file mode 100644
index 0000000..96cf873
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridBinary2DMaterialIJK.png.md5
@@ -0,0 +1 @@
+660c5b9bd6daa0b52e3ce9d4e3f7577f
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridBinaryEllipseMaterial.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridBinaryEllipseMaterial.png.md5
new file mode 100644
index 0000000..b7cb177
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridBinaryEllipseMaterial.png.md5
@@ -0,0 +1 @@
+eaf8fd09b1418b87580730dd1d9cca35
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridBinaryHyperbolicParaboloidMaterial.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridBinaryHyperbolicParaboloidMaterial.png.md5
new file mode 100644
index 0000000..92afd62
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridBinaryHyperbolicParaboloidMaterial.png.md5
@@ -0,0 +1 @@
+c2cccbf459a36b902e77a0c4f02c904b
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary2D.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary2D.png.md5
new file mode 100644
index 0000000..9ad3a88
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary2D.png.md5
@@ -0,0 +1 @@
+51476d26493010c6fa7686aff38243b6
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary2DBiMaterial.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary2DBiMaterial.png.md5
new file mode 100644
index 0000000..155cb0c
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary2DBiMaterial.png.md5
@@ -0,0 +1 @@
+130478cd09735d4637af30ecf35875c8
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary2DFullMaterialBits.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary2DFullMaterialBits.png.md5
new file mode 100644
index 0000000..206e16e
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary2DFullMaterialBits.png.md5
@@ -0,0 +1 @@
+8c4cbc7bc06ae604c6028669edd6d7b7
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary2DMaterial.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary2DMaterial.png.md5
new file mode 100644
index 0000000..06255ae
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary2DMaterial.png.md5
@@ -0,0 +1 @@
+55849755f0cb75220145e2239efe2292
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary2DMaterialBits.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary2DMaterialBits.png.md5
new file mode 100644
index 0000000..06255ae
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary2DMaterialBits.png.md5
@@ -0,0 +1 @@
+55849755f0cb75220145e2239efe2292
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DAxisCut.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DAxisCut.png.md5
new file mode 100644
index 0000000..a4c3ea4
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DAxisCut.png.md5
@@ -0,0 +1 @@
+9d7376ff6c9907d663645f8f267fcc22
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DAxisCutMaterial.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DAxisCutMaterial.png.md5
new file mode 100644
index 0000000..82aeac3
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DAxisCutMaterial.png.md5
@@ -0,0 +1 @@
+97200c1b8b3076d4e8722d795476d235
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DAxisCutMaterial_1.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DAxisCutMaterial_1.png.md5
new file mode 100644
index 0000000..2498dff
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DAxisCutMaterial_1.png.md5
@@ -0,0 +1 @@
+f9cfe4338f5c9aecc6edff7714678c38
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DAxisCut_1.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DAxisCut_1.png.md5
new file mode 100644
index 0000000..20f01ba
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DAxisCut_1.png.md5
@@ -0,0 +1 @@
+ba792d1a239a10f21bbdc7ac58aa4ee2
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DClip.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DClip.png.md5
new file mode 100644
index 0000000..19efb22
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DClip.png.md5
@@ -0,0 +1 @@
+f99182986ef3562da40f9ab32bf874d6
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DClip_1.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DClip_1.png.md5
new file mode 100644
index 0000000..b7e9e9f
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DClip_1.png.md5
@@ -0,0 +1 @@
+8462d8cfcabbba4b13423c1b0e04ee67
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DClip_2.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DClip_2.png.md5
new file mode 100644
index 0000000..7b575cb
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DClip_2.png.md5
@@ -0,0 +1 @@
+97af6032822833c8b3d3692cb668e78a
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DContour.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DContour.png.md5
new file mode 100644
index 0000000..e404b89
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DContour.png.md5
@@ -0,0 +1 @@
+9b5a2f6f3553745e527deda019657e03
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DContourMaterial.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DContourMaterial.png.md5
new file mode 100644
index 0000000..6564474
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DContourMaterial.png.md5
@@ -0,0 +1 @@
+b85533e7730f1975ed0ad7ee7aa12313
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DContourMaterial_1.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DContourMaterial_1.png.md5
new file mode 100644
index 0000000..7c0e79a
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DContourMaterial_1.png.md5
@@ -0,0 +1 @@
+09e7a1da3ab66d40a8ba43d96133a24d
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DContour_1.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DContour_1.png.md5
new file mode 100644
index 0000000..4f6adda
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DContour_1.png.md5
@@ -0,0 +1 @@
+16c2506ade59d0d7628d4d608b6acd6b
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DCut.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DCut.png.md5
new file mode 100644
index 0000000..39d59c4
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DCut.png.md5
@@ -0,0 +1 @@
+515ad535c200619dee216f50362ce7e6
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DCut_1.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DCut_1.png.md5
new file mode 100644
index 0000000..9256769
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DCut_1.png.md5
@@ -0,0 +1 @@
+c208ab809794e186405d516dbed4beb5
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DCut_2.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DCut_2.png.md5
new file mode 100644
index 0000000..a5e98f8
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DCut_2.png.md5
@@ -0,0 +1 @@
+14833ff195dd9d01bb969b33cca5a0d0
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometry.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometry.png.md5
new file mode 100644
index 0000000..e78e972
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometry.png.md5
@@ -0,0 +1 @@
+129a2cf4f868532700841fba9b374724
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometryLargeMaterialBits.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometryLargeMaterialBits.png.md5
new file mode 100644
index 0000000..a59cd59
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometryLargeMaterialBits.png.md5
@@ -0,0 +1 @@
+754a81596d8cb302bafb81fcd55bcea9
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometryMaterial.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometryMaterial.png.md5
new file mode 100644
index 0000000..b0891c7
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometryMaterial.png.md5
@@ -0,0 +1 @@
+58f58568ab5b687fe4fcfc41b41467b9
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometryMaterialBits.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometryMaterialBits.png.md5
new file mode 100644
index 0000000..b0891c7
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometryMaterialBits.png.md5
@@ -0,0 +1 @@
+58f58568ab5b687fe4fcfc41b41467b9
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometryMaterialBits_1.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometryMaterialBits_1.png.md5
new file mode 100644
index 0000000..3be52a7
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometryMaterialBits_1.png.md5
@@ -0,0 +1 @@
+1dc52a41bcd965c669f6ed61d6db9b43
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometryMaterial_1.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometryMaterial_1.png.md5
new file mode 100644
index 0000000..3be52a7
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometryMaterial_1.png.md5
@@ -0,0 +1 @@
+1dc52a41bcd965c669f6ed61d6db9b43
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometry_1.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometry_1.png.md5
new file mode 100644
index 0000000..2393516
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DGeometry_1.png.md5
@@ -0,0 +1 @@
+d0c8ee7d309e537d38155e206846636e
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DUnstructured.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DUnstructured.png.md5
new file mode 100644
index 0000000..c8b6355
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DUnstructured.png.md5
@@ -0,0 +1 @@
+b2acb8dda05afa22939c2f123f264da8
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DUnstructuredMaterial.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DUnstructuredMaterial.png.md5
new file mode 100644
index 0000000..2000fb0
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DUnstructuredMaterial.png.md5
@@ -0,0 +1 @@
+7729d57460ac4e7a61de6cc108cbb9ec
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DUnstructuredMaterial_1.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DUnstructuredMaterial_1.png.md5
new file mode 100644
index 0000000..cbc9587
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DUnstructuredMaterial_1.png.md5
@@ -0,0 +1 @@
+26c2f24f4b0c6e2680184058f2e984b5
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DUnstructured_1.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DUnstructured_1.png.md5
new file mode 100644
index 0000000..b589392
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernary3DUnstructured_1.png.md5
@@ -0,0 +1 @@
+32cf5f65b3deb2119313be97b46f379f
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernaryHyperbola.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernaryHyperbola.png.md5
new file mode 100644
index 0000000..8b99bfb
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernaryHyperbola.png.md5
@@ -0,0 +1 @@
+96f40b944cb65f72ed802b8593e76811
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernarySphereMaterial.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernarySphereMaterial.png.md5
new file mode 100644
index 0000000..364cca5
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernarySphereMaterial.png.md5
@@ -0,0 +1 @@
+98c323fc5143627e4c9d427a2d3c2852
diff --git a/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernarySphereMaterial_1.png.md5 b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernarySphereMaterial_1.png.md5
new file mode 100644
index 0000000..ed4ab1c
--- /dev/null
+++ b/Filters/HyperTree/Testing/Data/Baseline/TestHyperTreeGridTernarySphereMaterial_1.png.md5
@@ -0,0 +1 @@
+7f5207bdd0c398277b9a83ba7355167d
diff --git a/Filters/HyperTree/vtkClipHyperOctree.cxx b/Filters/HyperTree/vtkClipHyperOctree.cxx
index 151e7a0..2693b10 100644
--- a/Filters/HyperTree/vtkClipHyperOctree.cxx
+++ b/Filters/HyperTree/vtkClipHyperOctree.cxx
@@ -42,7 +42,7 @@
 #include "vtkIncrementalPointLocator.h"
 
 #include <math.h>
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkClipHyperOctree);
 vtkCxxSetObjectMacro(vtkClipHyperOctree,ClipFunction,vtkImplicitFunction);
diff --git a/Filters/HyperTree/vtkHyperOctreeClipCutPointsGrabber.cxx b/Filters/HyperTree/vtkHyperOctreeClipCutPointsGrabber.cxx
index c3f6897..9c67d52 100644
--- a/Filters/HyperTree/vtkHyperOctreeClipCutPointsGrabber.cxx
+++ b/Filters/HyperTree/vtkHyperOctreeClipCutPointsGrabber.cxx
@@ -18,7 +18,7 @@
 #include "vtkOrderedTriangulator.h"
 #include <set>
 #include "vtkPolygon.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkPoints.h"
 
 vtkStandardNewMacro(vtkHyperOctreeClipCutPointsGrabber);
diff --git a/Filters/HyperTree/vtkHyperOctreeContourFilter.cxx b/Filters/HyperTree/vtkHyperOctreeContourFilter.cxx
index 700629c..6f7d741 100644
--- a/Filters/HyperTree/vtkHyperOctreeContourFilter.cxx
+++ b/Filters/HyperTree/vtkHyperOctreeContourFilter.cxx
@@ -40,7 +40,7 @@
 #include "vtkOrderedTriangulator.h"
 #include "vtkStreamingDemandDrivenPipeline.h"
 #include <math.h>
-#include <assert.h>
+#include <cassert>
 #include <set>
 #include "vtkBitArray.h"
 #include "vtkIncrementalPointLocator.h"
diff --git a/Filters/HyperTree/vtkHyperOctreeCutter.cxx b/Filters/HyperTree/vtkHyperOctreeCutter.cxx
index f103d4f..f167674 100644
--- a/Filters/HyperTree/vtkHyperOctreeCutter.cxx
+++ b/Filters/HyperTree/vtkHyperOctreeCutter.cxx
@@ -40,7 +40,7 @@
 #include "vtkOrderedTriangulator.h"
 #include "vtkStreamingDemandDrivenPipeline.h"
 #include <math.h>
-#include <assert.h>
+#include <cassert>
 #include "vtkHyperOctreeClipCutPointsGrabber.h"
 #include "vtkIncrementalPointLocator.h"
 
diff --git a/Filters/HyperTree/vtkHyperOctreeDualGridContourFilter.cxx b/Filters/HyperTree/vtkHyperOctreeDualGridContourFilter.cxx
index 6dda9aa..5c5036c 100644
--- a/Filters/HyperTree/vtkHyperOctreeDualGridContourFilter.cxx
+++ b/Filters/HyperTree/vtkHyperOctreeDualGridContourFilter.cxx
@@ -40,7 +40,7 @@
 #include "vtkPolygon.h"
 #include "vtkStreamingDemandDrivenPipeline.h"
 #include <math.h>
-#include <assert.h>
+#include <cassert>
 #include <set>
 #include "vtkBitArray.h"
 #include "vtkTimerLog.h"
diff --git a/Filters/HyperTree/vtkHyperOctreeSampleFunction.cxx b/Filters/HyperTree/vtkHyperOctreeSampleFunction.cxx
index 878d07e..85812e3 100644
--- a/Filters/HyperTree/vtkHyperOctreeSampleFunction.cxx
+++ b/Filters/HyperTree/vtkHyperOctreeSampleFunction.cxx
@@ -19,7 +19,7 @@
 #include "vtkObjectFactory.h"
 #include "vtkInformationVector.h"
 #include "vtkInformation.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkMath.h"
 #include "vtkPointData.h"
 #include "vtkDataArray.h"
diff --git a/Filters/HyperTree/vtkHyperOctreeSurfaceFilter.cxx b/Filters/HyperTree/vtkHyperOctreeSurfaceFilter.cxx
index bb7266f..18888c8 100644
--- a/Filters/HyperTree/vtkHyperOctreeSurfaceFilter.cxx
+++ b/Filters/HyperTree/vtkHyperOctreeSurfaceFilter.cxx
@@ -20,7 +20,7 @@
 #include "vtkInformationVector.h"
 #include "vtkInformation.h"
 #include "vtkCellArray.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkMergePoints.h"
 #include "vtkCellData.h"
 #include "vtkPointData.h"
diff --git a/Filters/HyperTree/vtkHyperOctreeToUniformGridFilter.cxx b/Filters/HyperTree/vtkHyperOctreeToUniformGridFilter.cxx
index ab2a45e..a07cfc9 100644
--- a/Filters/HyperTree/vtkHyperOctreeToUniformGridFilter.cxx
+++ b/Filters/HyperTree/vtkHyperOctreeToUniformGridFilter.cxx
@@ -20,7 +20,7 @@
 #include "vtkInformationVector.h"
 #include "vtkInformation.h"
 #include "vtkCellArray.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkCellData.h"
 #include "vtkPointData.h"
 #include "vtkImageData.h"
diff --git a/Filters/Imaging/vtkExtractHistogram2D.h b/Filters/Imaging/vtkExtractHistogram2D.h
index e500d25..d26e263 100644
--- a/Filters/Imaging/vtkExtractHistogram2D.h
+++ b/Filters/Imaging/vtkExtractHistogram2D.h
@@ -149,7 +149,7 @@ public:
 
   // Description:
   // Given a collection of models, calculate aggregate model. Not used.
-  virtual void Aggregate( vtkDataObjectCollection*, vtkMultiBlockDataSet* ) {};
+  virtual void Aggregate( vtkDataObjectCollection*, vtkMultiBlockDataSet* ) {}
 
 protected:
   vtkExtractHistogram2D();
@@ -176,7 +176,7 @@ protected:
 
   // Description:
   // Execute the calculations required by the Derive option. Not used.
-  virtual void Derive( vtkMultiBlockDataSet* ) {};
+  virtual void Derive( vtkMultiBlockDataSet* ) {}
 
   // Description:
   // Execute the calculations required by the Test option.
@@ -195,7 +195,7 @@ protected:
   virtual void SelectAssessFunctor( vtkTable* vtkNotUsed(outData),
                                     vtkDataObject* vtkNotUsed(inMeta),
                                     vtkStringArray* vtkNotUsed(rowNames),
-                                    AssessFunctor*& vtkNotUsed(dfunc) ) {};
+                                    AssessFunctor*& vtkNotUsed(dfunc) ) {}
 
   virtual int FillOutputPortInformation( int port, vtkInformation* info );
 
diff --git a/Filters/Imaging/vtkPairwiseExtractHistogram2D.cxx b/Filters/Imaging/vtkPairwiseExtractHistogram2D.cxx
index 5d7d9d7..1ad75f6 100644
--- a/Filters/Imaging/vtkPairwiseExtractHistogram2D.cxx
+++ b/Filters/Imaging/vtkPairwiseExtractHistogram2D.cxx
@@ -40,6 +40,7 @@ PURPOSE.  See the above copyright notice for more information.
 #define VTK_CREATE(type, name) \
   vtkSmartPointer<type> name = vtkSmartPointer<type>::New()
 //------------------------------------------------------------------------------
+#include <algorithm>
 #include <set>
 #include <vector>
 #include <string>
diff --git a/Filters/Imaging/vtkPairwiseExtractHistogram2D.h b/Filters/Imaging/vtkPairwiseExtractHistogram2D.h
index 2f6cf52..87b65bb 100644
--- a/Filters/Imaging/vtkPairwiseExtractHistogram2D.h
+++ b/Filters/Imaging/vtkPairwiseExtractHistogram2D.h
@@ -134,7 +134,7 @@ public:
 
   // Description:
   // Given a collection of models, calculate aggregate model.  Not used
-  virtual void Aggregate( vtkDataObjectCollection*, vtkMultiBlockDataSet* ) {};
+  virtual void Aggregate( vtkDataObjectCollection*, vtkMultiBlockDataSet* ) {}
 
 protected:
   vtkPairwiseExtractHistogram2D();
@@ -160,13 +160,13 @@ protected:
 
   // Description:
   // Execute the calculations required by the Derive option. Not used.
-  virtual void Derive( vtkMultiBlockDataSet* ) {};
+  virtual void Derive( vtkMultiBlockDataSet* ) {}
 
   // Description:
   // Execute the assess option. Not implemented.
   virtual void Assess( vtkTable*,
                        vtkMultiBlockDataSet*,
-                       vtkTable* ) {};
+                       vtkTable* ) {}
 
   // Description:
   // Execute the calculations required by the Test option.
@@ -179,7 +179,7 @@ protected:
   virtual void SelectAssessFunctor( vtkTable* vtkNotUsed(outData),
                                     vtkDataObject* vtkNotUsed(inMeta),
                                     vtkStringArray* vtkNotUsed(rowNames),
-                                    AssessFunctor*& vtkNotUsed(dfunc) ) {};
+                                    AssessFunctor*& vtkNotUsed(dfunc) ) {}
 
   // Description:
   // Generate a new histogram filter
diff --git a/Filters/Matlab/Testing/Cxx/TestMatlabEngineInterface.cxx b/Filters/Matlab/Testing/Cxx/TestMatlabEngineInterface.cxx
index 39d7e34..83dfc56 100644
--- a/Filters/Matlab/Testing/Cxx/TestMatlabEngineInterface.cxx
+++ b/Filters/Matlab/Testing/Cxx/TestMatlabEngineInterface.cxx
@@ -30,7 +30,7 @@
 #include <vtksys/stl/stdexcept>
 #include <stdio.h>
 #include <string.h>
-#include <assert.h>
+#include <cassert>
 
 namespace
 {
diff --git a/Filters/Matlab/vtkMatlabMexAdapter.cxx b/Filters/Matlab/vtkMatlabMexAdapter.cxx
index 40b418a..e22df8e 100644
--- a/Filters/Matlab/vtkMatlabMexAdapter.cxx
+++ b/Filters/Matlab/vtkMatlabMexAdapter.cxx
@@ -58,7 +58,7 @@
 #include "vtkDataObjectCollection.h"
 #include <vector>
 #include <algorithm>
-#include <assert.h>
+#include <cassert>
 
 #define VTK_CREATE(classname, varname) vtkSmartPointer<classname> varname = vtkSmartPointer<classname>::New()
 
diff --git a/Filters/Modeling/Testing/Cxx/CMakeLists.txt b/Filters/Modeling/Testing/Cxx/CMakeLists.txt
index 49fd9ff..17e4adb 100644
--- a/Filters/Modeling/Testing/Cxx/CMakeLists.txt
+++ b/Filters/Modeling/Testing/Cxx/CMakeLists.txt
@@ -1,38 +1,10 @@
-set(testswithdata)
-if (VTK_DATA_ROOT)
-  set(testswithdata
-    TestQuadRotationalExtrusionMultiBlock.cxx
-  )
-endif()
-
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
+vtk_add_test_cxx(
   TestButterflyScalars.cxx
   TestNamedColorsIntegration.cxx
   TestPolyDataPointSampler.cxx
   TestQuadRotationalExtrusion.cxx
+  TestQuadRotationalExtrusionMultiBlock.cxx
   TestRotationalExtrusion.cxx
   TestSelectEnclosedPoints.cxx
-  ${testswithdata}
-
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Graphics/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+  )
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Filters/Modeling/Testing/Data/Baseline/Hyper.png.md5 b/Filters/Modeling/Testing/Data/Baseline/Hyper.png.md5
new file mode 100644
index 0000000..a1b5703
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/Hyper.png.md5
@@ -0,0 +1 @@
+26dcb01fc43b6c1af5abb01bf4bcc143
diff --git a/Filters/Modeling/Testing/Data/Baseline/HyperScalarBar.png.md5 b/Filters/Modeling/Testing/Data/Baseline/HyperScalarBar.png.md5
new file mode 100644
index 0000000..9f81add
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/HyperScalarBar.png.md5
@@ -0,0 +1 @@
+3b4e67297fc9ffc3e701b242b6d42baa
diff --git a/Filters/Modeling/Testing/Data/Baseline/KlineBottle.png.md5 b/Filters/Modeling/Testing/Data/Baseline/KlineBottle.png.md5
new file mode 100644
index 0000000..e80434f
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/KlineBottle.png.md5
@@ -0,0 +1 @@
+101486c57b7be839559b6b4ef9ba12b2
diff --git a/Filters/Modeling/Testing/Data/Baseline/TenEllip.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TenEllip.png.md5
new file mode 100644
index 0000000..e91aa0f
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TenEllip.png.md5
@@ -0,0 +1 @@
+0f3584b8c85f40a739511d30e4eb3e51
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestBandedContourFilter.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestBandedContourFilter.png.md5
new file mode 100644
index 0000000..6583d30
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestBandedContourFilter.png.md5
@@ -0,0 +1 @@
+a0c974e8834d28fd8b565b935cee2527
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestBoxFunction.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestBoxFunction.png.md5
new file mode 100644
index 0000000..c5f4c4e
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestBoxFunction.png.md5
@@ -0,0 +1 @@
+e996c472dac23eb62c00c33ed6320aca
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestButterflyScalars.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestButterflyScalars.png.md5
new file mode 100644
index 0000000..2dc25da
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestButterflyScalars.png.md5
@@ -0,0 +1 @@
+82ac8e2cc1aa1a0a68546e3c61ac29a5
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestFillHolesFilter.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestFillHolesFilter.png.md5
new file mode 100644
index 0000000..286ffda
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestFillHolesFilter.png.md5
@@ -0,0 +1 @@
+8d2a7fdc52ba5571d42bae0ae0d9d83f
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestImageMarchingCubes.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestImageMarchingCubes.png.md5
new file mode 100644
index 0000000..8e6f291
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestImageMarchingCubes.png.md5
@@ -0,0 +1 @@
+14188a90f7628a31c5aa36879f0f3c39
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestNamedColorsIntegration.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestNamedColorsIntegration.png.md5
new file mode 100644
index 0000000..4efff55
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestNamedColorsIntegration.png.md5
@@ -0,0 +1 @@
+f063fb9fa9c0c79f20c68b9137993451
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestPolyDataPointSampler.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestPolyDataPointSampler.png.md5
new file mode 100644
index 0000000..67e2cfa
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestPolyDataPointSampler.png.md5
@@ -0,0 +1 @@
+75841c302176bc999d0600d122098dc2
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestQuadRotationalExtrusion.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestQuadRotationalExtrusion.png.md5
new file mode 100644
index 0000000..8c9df19
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestQuadRotationalExtrusion.png.md5
@@ -0,0 +1 @@
+465b1ee61af18e45d7f05937b6eefbdd
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestQuadRotationalExtrusionMultiBlock.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestQuadRotationalExtrusionMultiBlock.png.md5
new file mode 100644
index 0000000..80d45ae
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestQuadRotationalExtrusionMultiBlock.png.md5
@@ -0,0 +1 @@
+279602fc7e8f5f1a2ba5ef9dc04405fb
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestQuadRotationalExtrusionMultiBlock_1.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestQuadRotationalExtrusionMultiBlock_1.png.md5
new file mode 100644
index 0000000..3a2f86b
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestQuadRotationalExtrusionMultiBlock_1.png.md5
@@ -0,0 +1 @@
+d715d1d3de40095e2578c1a9e01a6542
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestRibbonAndTube.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestRibbonAndTube.png.md5
new file mode 100644
index 0000000..3584fb6
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestRibbonAndTube.png.md5
@@ -0,0 +1 @@
+a73f60cc61b963fcfae8dc778091c6cc
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestRibbonAndTube_1.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestRibbonAndTube_1.png.md5
new file mode 100644
index 0000000..4fbc6c1
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestRibbonAndTube_1.png.md5
@@ -0,0 +1 @@
+0862ccae2840bb5910de5b4040d59747
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestRibbonAndTube_2.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestRibbonAndTube_2.png.md5
new file mode 100644
index 0000000..ad5c7fc
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestRibbonAndTube_2.png.md5
@@ -0,0 +1 @@
+0bc0cb4b7c6ff8c694424c43525d53b5
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestRibbonAndTube_3.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestRibbonAndTube_3.png.md5
new file mode 100644
index 0000000..1cb129e
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestRibbonAndTube_3.png.md5
@@ -0,0 +1 @@
+1b2485597a8441d021dd4685d61aba74
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestRibbonAndTube_4.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestRibbonAndTube_4.png.md5
new file mode 100644
index 0000000..82d1cf5
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestRibbonAndTube_4.png.md5
@@ -0,0 +1 @@
+2d169a5b2fdf97a4d620370a8d9656c9
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestRotationalExtrusion.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestRotationalExtrusion.png.md5
new file mode 100644
index 0000000..a9e4363
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestRotationalExtrusion.png.md5
@@ -0,0 +1 @@
+2df5ca63a27fa7aeb50902abc19556c5
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestRuledSurface.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestRuledSurface.png.md5
new file mode 100644
index 0000000..dbc3e7e
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestRuledSurface.png.md5
@@ -0,0 +1 @@
+bed2cfdbfa4096e2b561ca1f2df0126d
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestRuledSurface2.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestRuledSurface2.png.md5
new file mode 100644
index 0000000..b048305
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestRuledSurface2.png.md5
@@ -0,0 +1 @@
+a82b631d35ea55c01d7995c9e7921272
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestSelectEnclosedPoints.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestSelectEnclosedPoints.png.md5
new file mode 100644
index 0000000..2091754
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestSelectEnclosedPoints.png.md5
@@ -0,0 +1 @@
+3cc1435ec2008e2afa86e819fd59ce92
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestSpherePuzzle.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestSpherePuzzle.png.md5
new file mode 100644
index 0000000..01e19e2
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestSpherePuzzle.png.md5
@@ -0,0 +1 @@
+91a7b671bc051bd37369d46d57fdcc02
diff --git a/Filters/Modeling/Testing/Data/Baseline/TestSpherePuzzleArrows.png.md5 b/Filters/Modeling/Testing/Data/Baseline/TestSpherePuzzleArrows.png.md5
new file mode 100644
index 0000000..083b79d
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/TestSpherePuzzleArrows.png.md5
@@ -0,0 +1 @@
+965f804940d504adaffd87dec5dd53d7
diff --git a/Filters/Modeling/Testing/Data/Baseline/camera.png.md5 b/Filters/Modeling/Testing/Data/Baseline/camera.png.md5
new file mode 100644
index 0000000..350a945
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/camera.png.md5
@@ -0,0 +1 @@
+43b177c132da9856ea92157701e3ce90
diff --git a/Filters/Modeling/Testing/Data/Baseline/contour2DAll.png.md5 b/Filters/Modeling/Testing/Data/Baseline/contour2DAll.png.md5
new file mode 100644
index 0000000..ded8847
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/contour2DAll.png.md5
@@ -0,0 +1 @@
+fdea0f4982add0accb499b049847f7d1
diff --git a/Filters/Modeling/Testing/Data/Baseline/contour3DAll.png.md5 b/Filters/Modeling/Testing/Data/Baseline/contour3DAll.png.md5
new file mode 100644
index 0000000..129a280
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/contour3DAll.png.md5
@@ -0,0 +1 @@
+ddcaea1cb540a8073303d289cb944684
diff --git a/Filters/Modeling/Testing/Data/Baseline/eleState.png.md5 b/Filters/Modeling/Testing/Data/Baseline/eleState.png.md5
new file mode 100644
index 0000000..1003824
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/eleState.png.md5
@@ -0,0 +1 @@
+03c380826e3ad562b1213aabb42e9830
diff --git a/Filters/Modeling/Testing/Data/Baseline/extrudeCopyCD.png.md5 b/Filters/Modeling/Testing/Data/Baseline/extrudeCopyCD.png.md5
new file mode 100644
index 0000000..d3392d8
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/extrudeCopyCD.png.md5
@@ -0,0 +1 @@
+70a1092fec0f17afb28074ec41f2507b
diff --git a/Filters/Modeling/Testing/Data/Baseline/schwarz.png.md5 b/Filters/Modeling/Testing/Data/Baseline/schwarz.png.md5
new file mode 100644
index 0000000..fa7d5a0
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/schwarz.png.md5
@@ -0,0 +1 @@
+463c512197eb7d614a81370285aff026
diff --git a/Filters/Modeling/Testing/Data/Baseline/smoothCyl.png.md5 b/Filters/Modeling/Testing/Data/Baseline/smoothCyl.png.md5
new file mode 100644
index 0000000..182ff57
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/smoothCyl.png.md5
@@ -0,0 +1 @@
+b2ef74d460b3220c8c9d789ee861ab85
diff --git a/Filters/Modeling/Testing/Data/Baseline/smoothCyl2.png.md5 b/Filters/Modeling/Testing/Data/Baseline/smoothCyl2.png.md5
new file mode 100644
index 0000000..e87c740
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/smoothCyl2.png.md5
@@ -0,0 +1 @@
+a8dcff55ca6fbd37a639990294a7f2fc
diff --git a/Filters/Modeling/Testing/Data/Baseline/subDivideTetra.png.md5 b/Filters/Modeling/Testing/Data/Baseline/subDivideTetra.png.md5
new file mode 100644
index 0000000..72ee468
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/subDivideTetra.png.md5
@@ -0,0 +1 @@
+b6e7cb2d0da89387a2847a6282d75d9e
diff --git a/Filters/Modeling/Testing/Data/Baseline/subdividePointData.png.md5 b/Filters/Modeling/Testing/Data/Baseline/subdividePointData.png.md5
new file mode 100644
index 0000000..68b2eee
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/subdividePointData.png.md5
@@ -0,0 +1 @@
+aafa6313115e5eb584f6661f1d30edcf
diff --git a/Filters/Modeling/Testing/Data/Baseline/sync3dAll.png.md5 b/Filters/Modeling/Testing/Data/Baseline/sync3dAll.png.md5
new file mode 100644
index 0000000..129a280
--- /dev/null
+++ b/Filters/Modeling/Testing/Data/Baseline/sync3dAll.png.md5
@@ -0,0 +1 @@
+ddcaea1cb540a8073303d289cb944684
diff --git a/Filters/Modeling/Testing/Python/CMakeLists.txt b/Filters/Modeling/Testing/Python/CMakeLists.txt
index c77f2dd..ed4c51e 100644
--- a/Filters/Modeling/Testing/Python/CMakeLists.txt
+++ b/Filters/Modeling/Testing/Python/CMakeLists.txt
@@ -1,28 +1,25 @@
-add_test_python(Hyper.py Graphics)
-add_test_python(HyperScalarBar.py Graphics)
-add_test_python(KlineBottle.py Graphics)
-add_test_python(TenEllip.py Graphics)
-add_test_python(TestBandedContourFilter.py Graphics)
-add_test_python(TestBoxFunction.py Graphics)
-add_test_python(TestImageMarchingCubes.py Graphics)
-add_test_python(TestRibbonAndTube.py Graphics)
-add_test_python(TestRuledSurface.py Graphics)
-add_test_python(TestRuledSurface2.py Graphics)
-add_test_python(TestSpherePuzzle.py Graphics)
-add_test_python(TestSpherePuzzleArrows.py Graphics)
-add_test_python(camera.py Hybrid)
-add_test_python(eleState.py Graphics)
-add_test_python(extrudeCopyCD.py Graphics)
-add_test_python(schwarz.py Graphics)
-add_test_python(smoothCyl.py Graphics)
-add_test_python(smoothCyl2.py Graphics)
-add_test_python(subDivideTetra.py Graphics)
-add_test_python(subdividePointData.py Graphics)
-add_test_python(TestFillHolesFilter.py Graphics)
-
-if (VTK_DATA_ROOT)
-  add_test_python1(TestNamedColorsIntegration.py Baseline/Graphics)
-  add_test_python1(contour2DAll.py Baseline/Graphics)
-  add_test_python1(contour3DAll.py Baseline/Graphics)
-  add_test_python1(sync3dAll.py Baseline/Graphics)
-endif ()
+vtk_add_test_python(Hyper.py)
+vtk_add_test_python(HyperScalarBar.py)
+vtk_add_test_python(KlineBottle.py)
+vtk_add_test_python(TenEllip.py)
+vtk_add_test_python(TestBandedContourFilter.py)
+vtk_add_test_python(TestBoxFunction.py)
+vtk_add_test_python(TestImageMarchingCubes.py)
+vtk_add_test_python(TestRibbonAndTube.py)
+vtk_add_test_python(TestRuledSurface.py)
+vtk_add_test_python(TestRuledSurface2.py)
+vtk_add_test_python(TestSpherePuzzle.py)
+vtk_add_test_python(TestSpherePuzzleArrows.py)
+vtk_add_test_python(camera.py)
+vtk_add_test_python(eleState.py)
+vtk_add_test_python(extrudeCopyCD.py)
+vtk_add_test_python(schwarz.py)
+vtk_add_test_python(smoothCyl.py)
+vtk_add_test_python(smoothCyl2.py)
+vtk_add_test_python(subDivideTetra.py)
+vtk_add_test_python(subdividePointData.py)
+vtk_add_test_python(TestFillHolesFilter.py)
+vtk_add_test_python(TestNamedColorsIntegration.py NO_RT)
+vtk_add_test_python(contour2DAll.py NO_RT)
+vtk_add_test_python(contour3DAll.py NO_RT)
+vtk_add_test_python(sync3dAll.py NO_RT)
diff --git a/Filters/Modeling/Testing/Python/TestNamedColorsIntegration.py b/Filters/Modeling/Testing/Python/TestNamedColorsIntegration.py
index e6f721b..aaf66f5 100755
--- a/Filters/Modeling/Testing/Python/TestNamedColorsIntegration.py
+++ b/Filters/Modeling/Testing/Python/TestNamedColorsIntegration.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestNamedColorsIntegration.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Graphics/
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Filters/Modeling/Testing/Python/contour2DAll.py b/Filters/Modeling/Testing/Python/contour2DAll.py
index e8fc8d4..7bc2714 100755
--- a/Filters/Modeling/Testing/Python/contour2DAll.py
+++ b/Filters/Modeling/Testing/Python/contour2DAll.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython contour2DAll.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Graphics
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Filters/Modeling/Testing/Python/contour3DAll.py b/Filters/Modeling/Testing/Python/contour3DAll.py
index 406d532..6dbcd8c 100755
--- a/Filters/Modeling/Testing/Python/contour3DAll.py
+++ b/Filters/Modeling/Testing/Python/contour3DAll.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython contour3DAll.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Graphics
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Filters/Modeling/Testing/Python/smoothCyl.py b/Filters/Modeling/Testing/Python/smoothCyl.py
index 3570ae3..7df6a39 100755
--- a/Filters/Modeling/Testing/Python/smoothCyl.py
+++ b/Filters/Modeling/Testing/Python/smoothCyl.py
@@ -72,7 +72,7 @@ cylActor.GetProperty().SetColor(GetRGBColor('tomato'))
 # Add the actors to the renderer, set the background and size
 #
 ren1.AddActor(cylActor)
-# ren1 AddActor originalActor
+# ren1.AddActor(originalActor)
 ren1.SetBackground(1, 1, 1)
 
 renWin.SetSize(200, 300)
diff --git a/Filters/Modeling/Testing/Python/sync3dAll.py b/Filters/Modeling/Testing/Python/sync3dAll.py
index d40ac4a..62a9e56 100755
--- a/Filters/Modeling/Testing/Python/sync3dAll.py
+++ b/Filters/Modeling/Testing/Python/sync3dAll.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython sync3dAll.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Graphics
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
@@ -143,7 +139,7 @@ class sync3dAll(vtk.test.Testing.vtkTest):
         renWin.Render()
 
 
-        img_file = "contour3DAll.png"
+        img_file = "sync3dAll.png"
         vtk.test.Testing.compareImage(iRen.GetRenderWindow(), vtk.test.Testing.getAbsImagePath(img_file), threshold=25)
         vtk.test.Testing.interact()
 
diff --git a/Filters/Modeling/Testing/Tcl/CMakeLists.txt b/Filters/Modeling/Testing/Tcl/CMakeLists.txt
index ffb3592..617d742 100644
--- a/Filters/Modeling/Testing/Tcl/CMakeLists.txt
+++ b/Filters/Modeling/Testing/Tcl/CMakeLists.txt
@@ -1,27 +1,24 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(TestImageMarchingCubes Graphics)
-  add_test_tcl(TestRibbonAndTube Graphics)
-  add_test_tcl(contour2DAll Graphics)
-  add_test_tcl(contour3DAll Graphics)
-  add_test_tcl(eleState Graphics)
-  add_test_tcl(sync3dAll Graphics)
-endif()
-
-add_test_tcl(camera Hybrid)
-add_test_tcl(extrudeCopyCD Graphics)
-add_test_tcl(Hyper Graphics)
-add_test_tcl(HyperScalarBar Graphics)
-add_test_tcl(KlineBottle Graphics)
-add_test_tcl(schwarz Graphics)
-add_test_tcl(smoothCyl Graphics)
-add_test_tcl(smoothCyl2 Graphics)
-add_test_tcl(subdividePointData Graphics)
-add_test_tcl(subDivideTetra Graphics)
-add_test_tcl(TenEllip Graphics)
-add_test_tcl(TestBandedContourFilter Graphics)
-add_test_tcl(TestBoxFunction Graphics)
-add_test_tcl(TestFillHolesFilter Graphics)
-add_test_tcl(TestRuledSurface Graphics)
-add_test_tcl(TestRuledSurface2 Graphics)
-add_test_tcl(TestSpherePuzzle Graphics)
-add_test_tcl(TestSpherePuzzleArrows Graphics)
+vtk_add_test_tcl(TestImageMarchingCubes)
+vtk_add_test_tcl(TestRibbonAndTube)
+vtk_add_test_tcl(contour2DAll)
+vtk_add_test_tcl(contour3DAll)
+vtk_add_test_tcl(eleState)
+vtk_add_test_tcl(sync3dAll)
+vtk_add_test_tcl(camera)
+vtk_add_test_tcl(extrudeCopyCD)
+vtk_add_test_tcl(Hyper)
+vtk_add_test_tcl(HyperScalarBar)
+vtk_add_test_tcl(KlineBottle)
+vtk_add_test_tcl(schwarz)
+vtk_add_test_tcl(smoothCyl)
+vtk_add_test_tcl(smoothCyl2)
+vtk_add_test_tcl(subdividePointData)
+vtk_add_test_tcl(subDivideTetra)
+vtk_add_test_tcl(TenEllip)
+vtk_add_test_tcl(TestBandedContourFilter)
+vtk_add_test_tcl(TestBoxFunction)
+vtk_add_test_tcl(TestFillHolesFilter)
+vtk_add_test_tcl(TestRuledSurface)
+vtk_add_test_tcl(TestRuledSurface2)
+vtk_add_test_tcl(TestSpherePuzzle)
+vtk_add_test_tcl(TestSpherePuzzleArrows)
diff --git a/Filters/Modeling/vtkBandedPolyDataContourFilter.cxx b/Filters/Modeling/vtkBandedPolyDataContourFilter.cxx
index 0065db9..9c2cd1f 100644
--- a/Filters/Modeling/vtkBandedPolyDataContourFilter.cxx
+++ b/Filters/Modeling/vtkBandedPolyDataContourFilter.cxx
@@ -143,7 +143,7 @@ int vtkBandedPolyDataContourFilter::ClipEdge(int v1, int v2,
 
 
 extern "C" {
-int vtkCompareClipValues(const void *val1, const void *val2)
+static int vtkCompareClipValues(const void *val1, const void *val2)
 {
   if ( *((double*)val1) < *((double*)val2) )
     {
diff --git a/Filters/Modeling/vtkButterflySubdivisionFilter.h b/Filters/Modeling/vtkButterflySubdivisionFilter.h
index 7c52df5..7ae6546 100644
--- a/Filters/Modeling/vtkButterflySubdivisionFilter.h
+++ b/Filters/Modeling/vtkButterflySubdivisionFilter.h
@@ -58,8 +58,8 @@ public:
   vtkTypeMacro(vtkButterflySubdivisionFilter,vtkInterpolatingSubdivisionFilter);
 
 protected:
-  vtkButterflySubdivisionFilter () {};
-  ~vtkButterflySubdivisionFilter () {};
+  vtkButterflySubdivisionFilter () {}
+  ~vtkButterflySubdivisionFilter () {}
 
 private:
   void GenerateSubdivisionPoints(vtkPolyData *inputDS, vtkIntArray *edgeData,
diff --git a/Filters/Modeling/vtkDijkstraGraphGeodesicPath.h b/Filters/Modeling/vtkDijkstraGraphGeodesicPath.h
index 7e67a92..9114964 100644
--- a/Filters/Modeling/vtkDijkstraGraphGeodesicPath.h
+++ b/Filters/Modeling/vtkDijkstraGraphGeodesicPath.h
@@ -83,10 +83,6 @@ public:
   virtual void SetRepelVertices(vtkPoints*);
   vtkGetObjectMacro(RepelVertices, vtkPoints);
 
-  // Description:
-  // TODO: Get the total geodesic length.
-  virtual double GetGeodesicLength() { return 0.0; }
-
   //Description:
   //Fill the array with the cumulative weights.
   virtual void GetCumulativeWeights(vtkDoubleArray *weights);
diff --git a/Filters/Modeling/vtkGeodesicPath.h b/Filters/Modeling/vtkGeodesicPath.h
index 7bf644a..eb31e47 100644
--- a/Filters/Modeling/vtkGeodesicPath.h
+++ b/Filters/Modeling/vtkGeodesicPath.h
@@ -34,8 +34,6 @@ public:
   vtkTypeMacro(vtkGeodesicPath,vtkPolyDataAlgorithm);
   void PrintSelf(ostream& os, vtkIndent indent);
 
-  virtual double GetGeodesicLength() = 0;
-
 protected:
   vtkGeodesicPath();
   ~vtkGeodesicPath();
diff --git a/Filters/Modeling/vtkLinearExtrusionFilter.h b/Filters/Modeling/vtkLinearExtrusionFilter.h
index 4e2b59a..2d5b8bb 100644
--- a/Filters/Modeling/vtkLinearExtrusionFilter.h
+++ b/Filters/Modeling/vtkLinearExtrusionFilter.h
@@ -104,7 +104,7 @@ public:
 
 protected:
   vtkLinearExtrusionFilter();
-  ~vtkLinearExtrusionFilter() {};
+  ~vtkLinearExtrusionFilter() {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   int ExtrusionType;
diff --git a/Filters/Modeling/vtkLinearSubdivisionFilter.h b/Filters/Modeling/vtkLinearSubdivisionFilter.h
index 9ac8fad..ec7b58a 100644
--- a/Filters/Modeling/vtkLinearSubdivisionFilter.h
+++ b/Filters/Modeling/vtkLinearSubdivisionFilter.h
@@ -45,8 +45,8 @@ public:
   vtkTypeMacro(vtkLinearSubdivisionFilter,vtkInterpolatingSubdivisionFilter);
 
 protected:
-  vtkLinearSubdivisionFilter () {};
-  ~vtkLinearSubdivisionFilter () {};
+  vtkLinearSubdivisionFilter () {}
+  ~vtkLinearSubdivisionFilter () {}
 
   void GenerateSubdivisionPoints (vtkPolyData *inputDS,
                                   vtkIntArray *edgeData,
diff --git a/Filters/Modeling/vtkLoopSubdivisionFilter.h b/Filters/Modeling/vtkLoopSubdivisionFilter.h
index a1dd215..f6af3bf 100644
--- a/Filters/Modeling/vtkLoopSubdivisionFilter.h
+++ b/Filters/Modeling/vtkLoopSubdivisionFilter.h
@@ -60,8 +60,8 @@ public:
   vtkTypeMacro(vtkLoopSubdivisionFilter,vtkApproximatingSubdivisionFilter);
 
 protected:
-  vtkLoopSubdivisionFilter () {};
-  ~vtkLoopSubdivisionFilter () {};
+  vtkLoopSubdivisionFilter () {}
+  ~vtkLoopSubdivisionFilter () {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
diff --git a/Filters/Modeling/vtkProjectedTexture.h b/Filters/Modeling/vtkProjectedTexture.h
index a6ca391..7308932 100644
--- a/Filters/Modeling/vtkProjectedTexture.h
+++ b/Filters/Modeling/vtkProjectedTexture.h
@@ -100,7 +100,7 @@ public:
 
 protected:
   vtkProjectedTexture();
-  ~vtkProjectedTexture() {};
+  ~vtkProjectedTexture() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   void ComputeNormal();
diff --git a/Filters/Modeling/vtkQuadRotationalExtrusionFilter.h b/Filters/Modeling/vtkQuadRotationalExtrusionFilter.h
index b5a87d1..0ce579d 100644
--- a/Filters/Modeling/vtkQuadRotationalExtrusionFilter.h
+++ b/Filters/Modeling/vtkQuadRotationalExtrusionFilter.h
@@ -128,7 +128,7 @@ class VTKFILTERSMODELING_EXPORT vtkQuadRotationalExtrusionFilter : public vtkMul
 
  protected:
   vtkQuadRotationalExtrusionFilter();
-  ~vtkQuadRotationalExtrusionFilter() {};
+  ~vtkQuadRotationalExtrusionFilter() {}
 
   int FillInputPortInformation( int , vtkInformation* );
   int RequestData( vtkInformation*,
diff --git a/Filters/Modeling/vtkRotationalExtrusionFilter.h b/Filters/Modeling/vtkRotationalExtrusionFilter.h
index 4274ecb..1142e76 100644
--- a/Filters/Modeling/vtkRotationalExtrusionFilter.h
+++ b/Filters/Modeling/vtkRotationalExtrusionFilter.h
@@ -98,7 +98,7 @@ public:
 
 protected:
   vtkRotationalExtrusionFilter();
-  ~vtkRotationalExtrusionFilter() {};
+  ~vtkRotationalExtrusionFilter() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   int Resolution;
diff --git a/Filters/Modeling/vtkSectorSource.h b/Filters/Modeling/vtkSectorSource.h
index fc9641b..c7c56ea 100644
--- a/Filters/Modeling/vtkSectorSource.h
+++ b/Filters/Modeling/vtkSectorSource.h
@@ -71,7 +71,7 @@ public:
 
 protected:
   vtkSectorSource();
-  ~vtkSectorSource() {};
+  ~vtkSectorSource() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   double InnerRadius;
diff --git a/Filters/Modeling/vtkSelectPolyData.cxx b/Filters/Modeling/vtkSelectPolyData.cxx
index 9916732..2c3c85e 100644
--- a/Filters/Modeling/vtkSelectPolyData.cxx
+++ b/Filters/Modeling/vtkSelectPolyData.cxx
@@ -164,6 +164,7 @@ int vtkSelectPolyData::RequestData(
     {
     vtkErrorMacro("This filter operates on surface primitives");
     tf->Delete();
+    triMesh->UnRegister(this);
     return 1;
     }
 
@@ -233,25 +234,38 @@ int vtkSelectPolyData::RequestData(
       for (j=0; j<numNei; j++)
         {
         neiId = neighbors->GetId(j);
-        inPts->GetPoint(neiId, neiX);
-        for (k=0; k<3; k++)
+        if ( neiId == nextId )
           {
-          dir[k] = neiX[k] - x[k];
+          closest = neiId;
+          break;
           }
-        if ( neiId != prevId && vtkMath::Dot(dir,vec) > 0.0 ) //candidate
+        else
           {
-          dist2 = vtkLine::DistanceToLine(neiX, x0, x1);
-          if ( dist2 < closestDist2 )
+          inPts->GetPoint(neiId, neiX);
+          for (k=0; k<3; k++)
             {
-            closest = neiId;
-            closestDist2 = dist2;
+            dir[k] = neiX[k] - x[k];
             }
-          }//in direction of line
+          if ( neiId != prevId && vtkMath::Dot(dir,vec) > 0.0 ) //candidate
+            {
+            dist2 = vtkLine::DistanceToLine(neiX, x0, x1);
+            if ( dist2 < closestDist2 )
+              {
+              closest = neiId;
+              closestDist2 = dist2;
+              }
+            }//in direction of line
+          }
         }//for all neighbors
 
       if ( closest < 0 )
         {
         vtkErrorMacro(<<"Can't follow edge");
+        triMesh->UnRegister(this);
+        this->Mesh->Delete();
+        neighbors->Delete();
+        edgeIds->Delete();
+        loopIds->Delete();
         return 1;
         }
       else
diff --git a/Filters/Modeling/vtkSelectPolyData.h b/Filters/Modeling/vtkSelectPolyData.h
index 04c8b51..39a2758 100644
--- a/Filters/Modeling/vtkSelectPolyData.h
+++ b/Filters/Modeling/vtkSelectPolyData.h
@@ -121,6 +121,11 @@ public:
   vtkGetObjectMacro(Loop,vtkPoints);
 
   // Description:
+  // Set/Get the point used in SelectionModeToClosestPointRegion.
+  vtkSetVector3Macro(ClosestPoint,double);
+  vtkGetVector3Macro(ClosestPoint,double);
+
+  // Description:
   // Control how inside/outside of loop is defined.
   vtkSetClampMacro(SelectionMode,int,
              VTK_INSIDE_SMALLEST_REGION,VTK_INSIDE_CLOSEST_POINT_REGION);
diff --git a/Filters/Modeling/vtkSubdivideTetra.h b/Filters/Modeling/vtkSubdivideTetra.h
index 2e3940d..e9c0e0a 100644
--- a/Filters/Modeling/vtkSubdivideTetra.h
+++ b/Filters/Modeling/vtkSubdivideTetra.h
@@ -33,7 +33,7 @@ public:
 
 protected:
   vtkSubdivideTetra();
-  ~vtkSubdivideTetra() {};
+  ~vtkSubdivideTetra() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
diff --git a/Filters/Parallel/CMakeLists.txt b/Filters/Parallel/CMakeLists.txt
index 6f471d8..bb8a7db 100644
--- a/Filters/Parallel/CMakeLists.txt
+++ b/Filters/Parallel/CMakeLists.txt
@@ -25,7 +25,6 @@ set(Module_SRCS
   vtkPReflectionFilter.cxx
   vtkProcessIdScalars.cxx
   vtkPSphereSource.cxx
-  vtkPTableToStructuredGrid.cxx
   vtkRectilinearGridOutlineFilter.cxx
   vtkTransmitPolyDataPiece.cxx
   vtkTransmitRectilinearGridPiece.cxx
diff --git a/Filters/Parallel/Testing/Cxx/CMakeLists.txt b/Filters/Parallel/Testing/Cxx/CMakeLists.txt
index d3a30ab..7b7f920 100644
--- a/Filters/Parallel/Testing/Cxx/CMakeLists.txt
+++ b/Filters/Parallel/Testing/Cxx/CMakeLists.txt
@@ -1,6 +1,13 @@
-add_test_mpi(DistributedData.cxx DATADIR ${VTK_DATA_ROOT})
-add_test_mpi(DistributedDataRenderPass.cxx DATADIR ${VTK_DATA_ROOT})
-add_test_mpi(TransmitImageData.cxx)
-add_test_mpi(TransmitImageDataRenderPass.cxx)
-add_test_mpi(TransmitRectilinearGrid.cxx)
-add_test_mpi(TransmitStructuredGrid.cxx)
+include(vtkMPI)
+
+vtk_add_test_mpi(DistributedData.cxx TESTING_DATA)
+vtk_add_test_mpi(DistributedDataRenderPass.cxx TESTING_DATA)
+vtk_add_test_mpi(TransmitImageData.cxx TESTING_DATA)
+vtk_add_test_mpi(TransmitImageDataRenderPass.cxx TESTING_DATA)
+vtk_add_test_mpi(TransmitRectilinearGrid.cxx TESTING_DATA)
+vtk_add_test_mpi(TransmitStructuredGrid.cxx TESTING_DATA)
+
+vtk_mpi_link(TransmitImageData)
+vtk_mpi_link(TransmitImageDataRenderPass)
+vtk_mpi_link(TransmitRectilinearGrid)
+vtk_mpi_link(TransmitStructuredGrid)
diff --git a/Filters/Parallel/Testing/Cxx/DistributedDataRenderPass.cxx b/Filters/Parallel/Testing/Cxx/DistributedDataRenderPass.cxx
index 5959b7f..3dfac9a 100644
--- a/Filters/Parallel/Testing/Cxx/DistributedDataRenderPass.cxx
+++ b/Filters/Parallel/Testing/Cxx/DistributedDataRenderPass.cxx
@@ -255,7 +255,6 @@ void MyProcess::Execute()
   renderer->AddActor(actor);
 
   vtkRenderWindow *renWin = prm->MakeRenderWindow();
-  renWin->SetReportGraphicErrors(true);
   renWin->SetMultiSamples(0);
   renWin->SetAlphaBitPlanes(1);
 
diff --git a/Filters/Parallel/Testing/Data/Baseline/DistributedData.png.md5 b/Filters/Parallel/Testing/Data/Baseline/DistributedData.png.md5
new file mode 100644
index 0000000..cc69b21
--- /dev/null
+++ b/Filters/Parallel/Testing/Data/Baseline/DistributedData.png.md5
@@ -0,0 +1 @@
+b03a46d9eda16f060d754da5c29552bd
diff --git a/Filters/Parallel/Testing/Data/Baseline/DistributedDataRenderPass.png.md5 b/Filters/Parallel/Testing/Data/Baseline/DistributedDataRenderPass.png.md5
new file mode 100644
index 0000000..af79c7d
--- /dev/null
+++ b/Filters/Parallel/Testing/Data/Baseline/DistributedDataRenderPass.png.md5
@@ -0,0 +1 @@
+4ad2faf8b25a924714019876d22d6f35
diff --git a/Filters/Parallel/Testing/Data/Baseline/RectOutline.png.md5 b/Filters/Parallel/Testing/Data/Baseline/RectOutline.png.md5
new file mode 100644
index 0000000..4441f0f
--- /dev/null
+++ b/Filters/Parallel/Testing/Data/Baseline/RectOutline.png.md5
@@ -0,0 +1 @@
+9a8f65b3f4d57f946330eb454e37f45d
diff --git a/Filters/Parallel/Testing/Data/Baseline/TestCutMaterial.png.md5 b/Filters/Parallel/Testing/Data/Baseline/TestCutMaterial.png.md5
new file mode 100644
index 0000000..50c91a8
--- /dev/null
+++ b/Filters/Parallel/Testing/Data/Baseline/TestCutMaterial.png.md5
@@ -0,0 +1 @@
+48f3f03281952ea9739e2555605bb2e6
diff --git a/Filters/Parallel/Testing/Data/Baseline/TestExtractCTHPart.png.md5 b/Filters/Parallel/Testing/Data/Baseline/TestExtractCTHPart.png.md5
new file mode 100644
index 0000000..fb99d36
--- /dev/null
+++ b/Filters/Parallel/Testing/Data/Baseline/TestExtractCTHPart.png.md5
@@ -0,0 +1 @@
+2175bfb8782bf9a41d69280235685def
diff --git a/Filters/Parallel/Testing/Data/Baseline/TestExtrudePiece.png.md5 b/Filters/Parallel/Testing/Data/Baseline/TestExtrudePiece.png.md5
new file mode 100644
index 0000000..32c9c2b
--- /dev/null
+++ b/Filters/Parallel/Testing/Data/Baseline/TestExtrudePiece.png.md5
@@ -0,0 +1 @@
+9036ccc27815199ed68fcf865274b277
diff --git a/Filters/Parallel/Testing/Data/Baseline/TestImageStreamer.png.md5 b/Filters/Parallel/Testing/Data/Baseline/TestImageStreamer.png.md5
new file mode 100644
index 0000000..fa13038
--- /dev/null
+++ b/Filters/Parallel/Testing/Data/Baseline/TestImageStreamer.png.md5
@@ -0,0 +1 @@
+430a8d6a55cd2586b21e78682e90aa2f
diff --git a/Filters/Parallel/Testing/Data/Baseline/TransmitImageData.png.md5 b/Filters/Parallel/Testing/Data/Baseline/TransmitImageData.png.md5
new file mode 100644
index 0000000..c6e542c
--- /dev/null
+++ b/Filters/Parallel/Testing/Data/Baseline/TransmitImageData.png.md5
@@ -0,0 +1 @@
+835582cf62157730e8e6e74a3de0dc88
diff --git a/Filters/Parallel/Testing/Data/Baseline/TransmitImageDataRenderPass.png.md5 b/Filters/Parallel/Testing/Data/Baseline/TransmitImageDataRenderPass.png.md5
new file mode 100644
index 0000000..f16801d
--- /dev/null
+++ b/Filters/Parallel/Testing/Data/Baseline/TransmitImageDataRenderPass.png.md5
@@ -0,0 +1 @@
+fd17bcdae82846961dbe8276f72a31bc
diff --git a/Filters/Parallel/Testing/Data/Baseline/TransmitImageDataRenderPass_1.png.md5 b/Filters/Parallel/Testing/Data/Baseline/TransmitImageDataRenderPass_1.png.md5
new file mode 100644
index 0000000..62de152
--- /dev/null
+++ b/Filters/Parallel/Testing/Data/Baseline/TransmitImageDataRenderPass_1.png.md5
@@ -0,0 +1 @@
+a2ae2d2ba177004c5d468464296b0929
diff --git a/Filters/Parallel/Testing/Data/Baseline/TransmitRectilinearGrid.png.md5 b/Filters/Parallel/Testing/Data/Baseline/TransmitRectilinearGrid.png.md5
new file mode 100644
index 0000000..f4f96c7
--- /dev/null
+++ b/Filters/Parallel/Testing/Data/Baseline/TransmitRectilinearGrid.png.md5
@@ -0,0 +1 @@
+d612b5d2fd42b65dac90553ae87a0676
diff --git a/Filters/Parallel/Testing/Data/Baseline/TransmitStructuredGrid.png.md5 b/Filters/Parallel/Testing/Data/Baseline/TransmitStructuredGrid.png.md5
new file mode 100644
index 0000000..e0ccc93
--- /dev/null
+++ b/Filters/Parallel/Testing/Data/Baseline/TransmitStructuredGrid.png.md5
@@ -0,0 +1 @@
+48702db064afbf706fefa643c0e5a3c6
diff --git a/Filters/Parallel/Testing/Python/CMakeLists.txt b/Filters/Parallel/Testing/Python/CMakeLists.txt
index f732d8c..490fcd1 100644
--- a/Filters/Parallel/Testing/Python/CMakeLists.txt
+++ b/Filters/Parallel/Testing/Python/CMakeLists.txt
@@ -1,5 +1,5 @@
-add_test_python(RectOutline.py Parallel)
-add_test_python(TestCutMaterial.py Parallel)
-add_test_python(TestExtractCTHPart.py Parallel)
-add_test_python(TestExtrudePiece.py Parallel)
-add_test_python(TestImageStreamer.py Parallel)
+vtk_add_test_python(RectOutline.py)
+vtk_add_test_python(TestCutMaterial.py)
+vtk_add_test_python(TestExtractCTHPart.py)
+vtk_add_test_python(TestExtrudePiece.py)
+vtk_add_test_python(TestImageStreamer.py)
diff --git a/Filters/Parallel/Testing/Tcl/CMakeLists.txt b/Filters/Parallel/Testing/Tcl/CMakeLists.txt
index a626839..6ff23b3 100644
--- a/Filters/Parallel/Testing/Tcl/CMakeLists.txt
+++ b/Filters/Parallel/Testing/Tcl/CMakeLists.txt
@@ -1,8 +1,5 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(RectOutline.tcl Parallel)
-  add_test_tcl(TestExtractCTHPart.tcl Parallel)
-  add_test_tcl(TestImageStreamer.tcl Parallel)
-endif()
-
-add_test_tcl(TestCutMaterial.tcl Parallel)
-add_test_tcl(TestExtrudePiece.tcl Parallel)
+vtk_add_test_tcl(RectOutline.tcl)
+vtk_add_test_tcl(TestExtractCTHPart.tcl)
+vtk_add_test_tcl(TestImageStreamer.tcl)
+vtk_add_test_tcl(TestCutMaterial.tcl)
+vtk_add_test_tcl(TestExtrudePiece.tcl)
diff --git a/Filters/Parallel/module.cmake b/Filters/Parallel/module.cmake
index e5dcdb2..95e5927 100644
--- a/Filters/Parallel/module.cmake
+++ b/Filters/Parallel/module.cmake
@@ -6,6 +6,7 @@ vtk_module(vtkFiltersParallel
     vtkFiltersExtraction
     vtkRenderingCore
     vtkFiltersModeling
+    vtkFiltersGeometry
   TEST_DEPENDS
     vtkParallelMPI
     vtkTestingRendering
diff --git a/Filters/Parallel/vtkExtractCTHPart.cxx b/Filters/Parallel/vtkExtractCTHPart.cxx
index d17f925..ee1a2a9 100644
--- a/Filters/Parallel/vtkExtractCTHPart.cxx
+++ b/Filters/Parallel/vtkExtractCTHPart.cxx
@@ -28,14 +28,16 @@
 #include "vtkDataSetSurfaceFilter.h"
 #include "vtkDoubleArray.h"
 #include "vtkExecutive.h"
+#include "vtkExtractCTHPart.h"
 #include "vtkGarbageCollector.h"
 #include "vtkImageData.h"
-#include "vtkInformationDoubleVectorKey.h"
 #include "vtkInformation.h"
 #include "vtkInformationVector.h"
 #include "vtkMultiBlockDataSet.h"
 #include "vtkMultiProcessController.h"
+#include "vtkNew.h"
 #include "vtkObjectFactory.h"
+#include "vtkPlaneCollection.h"
 #include "vtkPlane.h"
 #include "vtkPointData.h"
 #include "vtkPolyData.h"
@@ -45,10 +47,11 @@
 #include "vtkToolkits.h"
 #include "vtkUniformGrid.h"
 
+#include <algorithm>
+#include <cassert>
 #include <math.h>
 #include <string>
 #include <vector>
-#include <assert.h>
 
 vtkStandardNewMacro(vtkExtractCTHPart);
 vtkCxxSetObjectMacro(vtkExtractCTHPart,ClipPlane,vtkPlane);
@@ -63,45 +66,75 @@ const double CTH_AMR_SURFACE_VALUE_UNSIGNED_CHAR=255;
 class vtkExtractCTHPartInternal
 {
 public:
-  std::vector<std::string> VolumeArrayNames;
-  int DataType;
+  typedef std::vector<std::string> VolumeArrayNamesType;
+  VolumeArrayNamesType VolumeArrayNames;
+  vtkBoundingBox GlobalInputBounds;
+
+  // Counter used to scale progress events.
+  int TotalNumberOfDatasets;
+};
+
+class vtkExtractCTHPart::VectorOfFragments :
+  public std::vector<vtkSmartPointer<vtkPolyData> >
+{
 };
+
+class vtkExtractCTHPart::ScaledProgress
+{
+  vtkExtractCTHPart* Self;
+  double Shift;
+  double Scale;
+public:
+  ScaledProgress(double shift, double scale, vtkExtractCTHPart* self)
+    {
+    assert((self != NULL) &&
+      (shift >= 0.0) && (shift <= 1.0) &&
+      (scale >= 0.0) && (scale <= 1.0));
+
+    this->Self = self;
+    this->Shift = self->ProgressShift;
+    this->Scale = self->ProgressScale;
+
+    self->ProgressShift += shift * self->ProgressScale;
+    self->ProgressScale *= scale;
+    //cout << "Shift-Scale Push: " << self->ProgressShift << ", " <<
+    //  self->ProgressScale << endl;
+    }
+
+  ~ScaledProgress()
+    {
+    this->WorkDone();
+    }
+
+  void WorkDone()
+    {
+    if (this->Self)
+      {
+      this->Self->ProgressScale = this->Scale;
+      this->Self->ProgressShift = this->Shift;
+      //cout << "Shift-Scale Pop: " << this->Self->ProgressShift << ", " <<
+      //  this->Self->ProgressScale << endl;
+      this->Self = NULL;
+      }
+    }
+};
+
+
 //=============================================================================
 //-----------------------------------------------------------------------------
 
 //-----------------------------------------------------------------------------
 vtkExtractCTHPart::vtkExtractCTHPart()
 {
-  this->Internals = new vtkExtractCTHPartInternal;
-  this->Internals->DataType=0;
-
-  this->Bounds = new vtkBoundingBox;
-  this->ClipPlane = 0;
-
-  this->PointVolumeFraction=0;
-
-  this->Data=0;
-  this->Contour=0;
-  this->Append2=0;
-  this->Clip1=0;
-  this->Cut=0;
-  this->Clip2=0;
-
-  this->PolyData=0;
-  this->PolyDataProducer=0;
-  this->SurfacePolyData=0;
-  this->RPolyData=0;
-  this->RPolyDataProducer = 0;
-
-  this->RData=0;
-  this->RContour=0;
-  this->RAppend2=0;
-  this->RClip1=0;
-  this->RCut=0;
-  this->RClip2=0;
-  this->VolumeFractionType = -1;
+  this->Internals = new vtkExtractCTHPartInternal();
+  this->ClipPlane = NULL;
+  this->GenerateTriangles = true;
+  this->Capping = true;
+  this->RemoveGhostCells = true;
   this->VolumeFractionSurfaceValueInternal = CTH_AMR_SURFACE_VALUE;
   this->VolumeFractionSurfaceValue = CTH_AMR_SURFACE_VALUE;
+  this->ProgressScale = 1.0;
+  this->ProgressShift = 0.0;
 
   this->Controller = 0;
   this->SetController(vtkMultiProcessController::GetGlobalController());
@@ -110,12 +143,11 @@ vtkExtractCTHPart::vtkExtractCTHPart()
 //-----------------------------------------------------------------------------
 vtkExtractCTHPart::~vtkExtractCTHPart()
 {
+  this->SetController(NULL);
   this->SetClipPlane(NULL);
+
   delete this->Internals;
-  delete this->Bounds;
   this->Internals = 0;
-  this->DeleteInternalPipeline();
-  this->SetController(0);
 }
 
 //-----------------------------------------------------------------------------
@@ -123,138 +155,43 @@ vtkExtractCTHPart::~vtkExtractCTHPart()
 // then this object is modified as well.
 unsigned long vtkExtractCTHPart::GetMTime()
 {
-  unsigned long mTime=this->Superclass::GetMTime();
-  unsigned long time;
-
+  unsigned long mTime= this->Superclass::GetMTime();
   if (this->ClipPlane)
     {
-    time = this->ClipPlane->GetMTime();
-    mTime = ( time > mTime ? time : mTime );
+    unsigned long time = this->ClipPlane->GetMTime();
+    return time > mTime? time : mTime;
     }
 
   return mTime;
 }
 
 //-----------------------------------------------------------------------------
-void vtkExtractCTHPart::RemoveAllVolumeArrayNames()
-{
-  this->Internals->VolumeArrayNames.erase(
-    this->Internals->VolumeArrayNames.begin(),
-    this->Internals->VolumeArrayNames.end());
-  this->Modified();
-}
-
-//-----------------------------------------------------------------------------
-void vtkExtractCTHPart::RemoveDoubleVolumeArrayNames()
-{
-  if (this->Internals->DataType != VTK_DOUBLE)
-    {
-    return;
-    }
-
-  this->Internals->VolumeArrayNames.erase(
-    this->Internals->VolumeArrayNames.begin(),
-    this->Internals->VolumeArrayNames.end());
-  this->Modified();
-}
-
-//-----------------------------------------------------------------------------
-void vtkExtractCTHPart::RemoveFloatVolumeArrayNames()
-{
-  if (this->Internals->DataType != VTK_FLOAT)
-    {
-    return;
-    }
-
-  this->Internals->VolumeArrayNames.erase(
-    this->Internals->VolumeArrayNames.begin(),
-    this->Internals->VolumeArrayNames.end());
-  this->Modified();
-}
-
-//-----------------------------------------------------------------------------
-void vtkExtractCTHPart::RemoveUnsignedCharVolumeArrayNames()
-{
-  if (this->Internals->DataType != VTK_UNSIGNED_CHAR)
-    {
-    return;
-    }
-
-  this->Internals->VolumeArrayNames.erase(
-    this->Internals->VolumeArrayNames.begin(),
-    this->Internals->VolumeArrayNames.end());
-  this->Modified();
-}
-
-//-----------------------------------------------------------------------------
-void vtkExtractCTHPart::AddVolumeArrayName(char* arrayName)
-{
-  if(arrayName==0)
-    {
-    return;
-    }
-
-  this->Internals->DataType = 0;
-  this->Internals->VolumeArrayNames.push_back(arrayName);
-  this->Modified();
-}
-
-//-----------------------------------------------------------------------------
-void vtkExtractCTHPart::AddDoubleVolumeArrayName(char* arrayName)
+void vtkExtractCTHPart::RemoveVolumeArrayNames()
 {
-  if(arrayName==0)
-    {
-    return;
-    }
-
-  if (this->Internals->DataType != VTK_DOUBLE)
-    {
-    this->RemoveAllVolumeArrayNames();
-    this->Internals->DataType = VTK_DOUBLE;
-    }
-
-  this->Internals->VolumeArrayNames.push_back(arrayName);
+  this->Internals->VolumeArrayNames.clear();
   this->Modified();
 }
 
 //-----------------------------------------------------------------------------
-void vtkExtractCTHPart::AddFloatVolumeArrayName(char* arrayName)
+void vtkExtractCTHPart::AddVolumeArrayName(const char* arrayName)
 {
-  if(arrayName==0)
+  if (arrayName !=0 &&
+    arrayName[0] != 0 &&
+    std::find(this->Internals->VolumeArrayNames.begin(),
+      this->Internals->VolumeArrayNames.end(), std::string(arrayName))==
+    this->Internals->VolumeArrayNames.end())
     {
-    return;
-    }
+    this->Internals->VolumeArrayNames.push_back(arrayName);
 
-  if (this->Internals->DataType != VTK_FLOAT)
-    {
-    this->RemoveAllVolumeArrayNames();
-    this->Internals->DataType = VTK_FLOAT;
+    // ensure that the volume arrays are in determinate order. I should just
+    // change the code to use a std::set.
+    std::sort(this->Internals->VolumeArrayNames.begin(),
+      this->Internals->VolumeArrayNames.end());
+    this->Modified();
     }
-
-  this->Internals->VolumeArrayNames.push_back(arrayName);
-  this->Modified();
 }
 
 //-----------------------------------------------------------------------------
-void vtkExtractCTHPart::AddUnsignedCharVolumeArrayName(char* arrayName)
-{
-  if(arrayName==0)
-    {
-    return;
-    }
-
-  if (this->Internals->DataType != VTK_UNSIGNED_CHAR)
-    {
-    this->RemoveAllVolumeArrayNames();
-    this->Internals->DataType = VTK_UNSIGNED_CHAR;
-    }
-
-  this->Internals->VolumeArrayNames.push_back(arrayName);
-  this->Modified();
-}
-
-
-//-----------------------------------------------------------------------------
 int vtkExtractCTHPart::GetNumberOfVolumeArrayNames()
 {
   return static_cast<int>(this->Internals->VolumeArrayNames.size());
@@ -268,308 +205,111 @@ const char* vtkExtractCTHPart::GetVolumeArrayName(int idx)
     {
     return 0;
     }
+
   return this->Internals->VolumeArrayNames[idx].c_str();
 }
 
-
 //----------------------------------------------------------------------------
-int vtkExtractCTHPart::FillInputPortInformation(int port,
-                                                vtkInformation *info)
+int vtkExtractCTHPart::FillInputPortInformation(
+  int port, vtkInformation *info)
 {
-  if(!this->Superclass::FillInputPortInformation(port,info))
+  if (!this->Superclass::FillInputPortInformation(port,info))
     {
     return 0;
     }
-  info->Set(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkDataObject");
 
-  return 1;
-}
-
-//----------------------------------------------------------------------------
-int vtkExtractCTHPart::RequestInformation(
-  vtkInformation *vtkNotUsed(request),
-  vtkInformationVector **vtkNotUsed(inputVector),
-  vtkInformationVector *outputVector)
-{
-  vtkInformation* outInfo;
-
-  int num=this->GetNumberOfOutputPorts();
-  int port;
-  for(port = 0; port<num; port++)
-    {
-    outInfo=outputVector->GetInformationObject(port);
-    // RequestData() synchronizes (communicates among processes), so we need
-    // all procs to call RequestData().
-    outInfo->Set(vtkStreamingDemandDrivenPipeline::MAXIMUM_NUMBER_OF_PIECES(),
-                 -1);
-    }
+  info->Set(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkNonOverlappingAMR");
+  info->Append(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkMultiBlockDataSet");
+  info->Append(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkRectilinearGrid");
   return 1;
 }
 
 //-----------------------------------------------------------------------------
-int vtkExtractCTHPart::RequestData(
-  vtkInformation *vtkNotUsed(request),
-  vtkInformationVector **inputVector,
-  vtkInformationVector *outputVector)
+int vtkExtractCTHPart::RequestData(vtkInformation *vtkNotUsed(request),
+  vtkInformationVector **inputVector, vtkInformationVector *outputVector)
 {
-  this->VolumeFractionType = -1;
-  // get the info objects
-  vtkInformation *inInfo  = inputVector[0]->GetInformationObject(0);
-  vtkInformation *outInfo = outputVector->GetInformationObject(0);
-
-  int processNumber =
-    outInfo->Get(vtkStreamingDemandDrivenPipeline::UPDATE_PIECE_NUMBER());
-  int numProcessors =
-    outInfo->Get(vtkStreamingDemandDrivenPipeline::UPDATE_NUMBER_OF_PIECES());
-  if(this->Controller==0)
+  const int number_of_volume_arrays = static_cast<int>(this->Internals->VolumeArrayNames.size());
+  if (number_of_volume_arrays == 0)
     {
-    processNumber=0;
-    numProcessors=1;
+    // nothing to do.
+    return 1;
     }
 
-  // get the input and output
-  vtkCompositeDataSet* input = vtkCompositeDataSet::GetData(inInfo);
-
-  vtkMultiBlockDataSet* mbOutput= vtkMultiBlockDataSet::GetData(outInfo);
-  unsigned int numBlocks = static_cast<unsigned int>(
-    this->Internals->VolumeArrayNames.size());
-  mbOutput->SetNumberOfBlocks(numBlocks);
-  for (unsigned int i=0; i<numBlocks; i++)
-    {
-    vtkMultiBlockDataSet* block = vtkMultiBlockDataSet::New();
-    block->SetNumberOfBlocks(numProcessors);
-    vtkPolyData* pd = vtkPolyData::New();
-    block->SetBlock(processNumber, pd);
-    mbOutput->SetBlock(i, block);
-    pd->Delete();
-    block->Delete();
-    }
+  vtkDataObject* inputDO = vtkDataObject::GetData(inputVector[0], 0);
+  vtkSmartPointer<vtkCompositeDataSet> inputCD = vtkCompositeDataSet::SafeDownCast(inputDO);
+  vtkRectilinearGrid* inputRG = vtkRectilinearGrid::SafeDownCast(inputDO);
+  assert(inputCD != NULL || inputRG != NULL);
 
-  vtkRectilinearGrid *rg=0;
-
-  if(input!=0)
-    {
-    vtkCompositeDataIterator* iter = input->NewIterator();
-    iter->InitTraversal();
-    int empty_input =  iter->IsDoneWithTraversal();
-    iter->Delete();
-    if(empty_input)
-      {
-      // empty input, do nothing.
-      return 1;
-      }
-    if(inInfo->Has(vtkStreamingDemandDrivenPipeline::BOUNDS()))
-      {
-      double b[6];
-      inInfo->Get(vtkStreamingDemandDrivenPipeline::BOUNDS(), b);
-      this->Bounds->SetBounds(b);
-      }
-    else
-      {
-      // compute the bounds
-      this->ComputeBounds(input,processNumber,numProcessors);
-      }
-    }
-  else
+  if (inputRG)
     {
-    rg=vtkRectilinearGrid::SafeDownCast(
-      inInfo->Get(vtkDataObject::DATA_OBJECT()));
-    if(rg==0)
-      {
-      vtkErrorMacro(<<"No valid input.");
-      return 0;
-      }
-    double b[6];
-    rg->GetBounds(b);
-    this->Bounds->SetBounds(b);
+    vtkNew<vtkMultiBlockDataSet> mb;
+    mb->SetBlock(0, inputRG);
+    inputCD = mb.GetPointer();
     }
 
-  // Here, either input or rg is not null.
-  //
-  this->EvaluateVolumeFractionType(rg, input);
-
-  int idx, num;
-  const char* arrayName;
-  vtkPolyData *output;
+  vtkMultiBlockDataSet* output = vtkMultiBlockDataSet::GetData(outputVector, 0);
 
-  num = this->GetNumberOfVolumeArrayNames();
+  // initialize output multiblock-dataset. It will always have as many blocks as
+  // the number-of-volume arrays requested.
+  output->SetNumberOfBlocks(number_of_volume_arrays);
 
-  // Create an append for each part (one part per output).
-  vtkAppendPolyData **appendSurface=new  vtkAppendPolyData *[num];
-  vtkAppendPolyData **tmps=new vtkAppendPolyData *[num];
-  for (idx = 0; idx < num; ++idx)
+  // Compute global bounds for the input dataset. This is used to generate
+  // external surface for the dataset.
+  if (!this->ComputeGlobalBounds(inputCD))
     {
-    appendSurface[idx]= vtkAppendPolyData::New();
-    tmps[idx]=vtkAppendPolyData::New();
-    }
-  int needPartIndex=num>1;
-
-  vtkGarbageCollector::DeferredCollectionPush();
-  this->CreateInternalPipeline();
-
-  float progress, nextProgress;
-  if(input!=0)
-    {
-    for (idx = 0; idx < num; ++idx)
-      {
-      vtkMultiBlockDataSet* block = vtkMultiBlockDataSet::SafeDownCast(
-        mbOutput->GetBlock(idx));
-      arrayName = this->GetVolumeArrayName(idx);
-      output = vtkPolyData::SafeDownCast(block->GetBlock(processNumber));
-      if(output==0)
-        {
-        vtkErrorMacro(<<"No output.");
-        return 0;
-        }
-      progress = (1.0/num)*idx;
-      nextProgress = progress + 1.0/num;
-      this->ExecutePart(arrayName,input,appendSurface[idx],tmps[idx],
-                          progress,nextProgress);
-      }
-    }
-  else // rg!=0
-    {
-    for (idx = 0; idx < num; ++idx)
-      {
-      arrayName = this->GetVolumeArrayName(idx);
-      vtkMultiBlockDataSet* block = vtkMultiBlockDataSet::SafeDownCast(
-        mbOutput->GetBlock(idx));
-      output = vtkPolyData::SafeDownCast(block->GetBlock(processNumber));
-      if(output==0)
-        {
-        vtkErrorMacro(<<"No output.");
-        return 0;
-        }
-      progress = (1.0/num)*idx;
-      nextProgress = progress + 1.0/num;
-      // Does the grid have the requested cell data?
-      if (rg->GetCellData()->GetArray(arrayName))
-        {
-        this->ExecutePartOnRectilinearGrid(arrayName,rg,appendSurface[idx],
-                                           tmps[idx], progress, nextProgress);
-        }
-      else
-        {
-        vtkWarningMacro("RectilinearGrid does not contain CellData named "
-                        << arrayName);
-        vtkPolyData *tmp=vtkPolyData::New();
-        tmps[idx]->AddInputData(tmp);
-        tmp->Delete();
-        }
-      }
+    vtkErrorMacro("Failed to compute global bounds.");
+    return 0;
     }
 
-  //vtkDataArray* cellVolumeFraction;
-  //cellVolumeFraction = input->GetCellData()->GetArray(arrayName);
-  //if (cellVolumeFraction == NULL)
-  //  {
-  //  vtkErrorMacro("Could not find cell array " << arrayName);
-  //  return;
-  //  }
-  //double* range = cellVolumeFraction->GetRange();
-  //cout << "@@@@ Range: " << range[0] << " " range[1] << " midpoint: " << ((range[0] + range[1]) * .5) << endl;
-
-  vtkClipPolyData* clip = vtkClipPolyData::New();
-  clip->SetValue(this->VolumeFractionSurfaceValueInternal);
-  vtkClipPolyData *clip2=clip;
-  if (this->ClipPlane)
+  if (!this->Internals->GlobalInputBounds.IsValid())
     {
-    // We need another clip for the plane.  Sneak it in.
-    clip2 = vtkClipPolyData::New();
-    clip2->SetInputConnection(clip->GetOutputPort());
-    clip->Delete();
-    clip2->SetClipFunction(this->ClipPlane);
+    // empty input, do nothing.
+    return 1;
     }
 
-  for (idx = 0; idx < num; ++idx)
+  unsigned int array_index = 0;
+  for (vtkExtractCTHPartInternal::VolumeArrayNamesType::iterator iter =
+    this->Internals->VolumeArrayNames.begin();
+    iter != this->Internals->VolumeArrayNames.end(); ++iter, ++array_index)
     {
-    arrayName=this->GetVolumeArrayName(idx);
-    vtkMultiBlockDataSet* pieces = vtkMultiBlockDataSet::SafeDownCast(
-      mbOutput->GetBlock(idx));
-    int inputConns = appendSurface[idx]->GetNumberOfInputConnections(0);
+    // this loop is doing the 1/(num-arrays)'th work for the entire task.
+    ScaledProgress sp(
+      array_index * 1.0 / this->Internals->VolumeArrayNames.size(),
+      1.0/this->Internals->VolumeArrayNames.size(),
+      this);
 
+    output->GetMetaData(array_index)->Set(vtkCompositeDataSet::NAME(), iter->c_str());
 
-    if (inputConns > 0)
+    vtkNew<vtkPolyData> contour;
+    vtkGarbageCollector::DeferredCollectionPush();
+    if (this->ExtractContour(contour.GetPointer(), inputCD, iter->c_str()) &&
+      (contour->GetNumberOfPoints() > 0))
       {
-      // we have to update the output before get its point data.
-      appendSurface[idx]->Update();
-#ifndef NDEBUG
-      int checkIndex=appendSurface[idx]->GetOutput()->GetPointData()->SetActiveScalars(arrayName);
-      assert("check: SetActiveScalar succeeded" && checkIndex>=0);
-#else
-      appendSurface[idx]->GetOutput()->GetPointData()->SetActiveScalars(arrayName);
-#endif
-      clip->SetInputConnection(appendSurface[idx]->GetOutputPort());
-      clip2->Update();
-      }
+      // Add extra arrays.
+      vtkNew<vtkIntArray> partArray;
+      partArray->SetName("Part Index");
+      partArray->SetNumberOfComponents(1);
+      partArray->SetNumberOfTuples(contour->GetNumberOfPoints());
+      partArray->FillComponent(0, static_cast<double>(array_index));
+      contour->GetPointData()->AddArray(partArray.GetPointer());
 
-#if 1
-    tmps[idx]->AddInputConnection(clip2->GetOutputPort());
-#else
-    tmps[idx]->AddInputConnection(appendSurface[idx]->GetOutputPort());
-#endif
+      // I'm not adding the "Name" array that was added in previous
+      // implementation. Don't think that's much of use.
 
-    output = vtkPolyData::SafeDownCast(pieces->GetBlock(processNumber));
-    if (inputConns > 0)
-      {
-      vtkTimerLog::MarkStartEvent("BlockAppend");
-      tmps[idx]->Update();
-      vtkTimerLog::MarkEndEvent("BlockAppend");
+      output->SetBlock(array_index, contour.GetPointer());
       }
-
-    vtkPolyData* tmpOut = tmps[idx]->GetOutput();
-    output->CopyStructure(tmpOut);
-    output->GetPointData()->PassData(tmpOut->GetPointData());
-    output->GetCellData()->PassData(tmpOut->GetCellData());
-    output->GetFieldData()->PassData(tmpOut->GetFieldData());
-    // Hopping to avoid some garbage collection time.
-    tmps[idx]->RemoveAllInputs();
-    tmps[idx]->Delete();
-    appendSurface[idx]->Delete();
-    tmps[idx] = 0;
-
-    // In the future we might be able to select the rgb color here.
-    if (needPartIndex)
-      {
-      // Add scalars to color this part.
-      int numPts = output->GetNumberOfPoints();
-      vtkDoubleArray *partArray = vtkDoubleArray::New();
-      partArray->SetName("Part Index");
-      double *p = partArray->WritePointer(0, numPts);
-      for (int idx2 = 0; idx2 < numPts; ++idx2)
-        {
-        p[idx2] = static_cast<double>(idx);
-        }
-      output->GetPointData()->SetScalars(partArray);
-      partArray->Delete();
-      }
-
-    // Add a name for this part.
-    vtkCharArray *nameArray = vtkCharArray::New();
-    nameArray->SetName("Name");
-    char *str = nameArray->WritePointer(0, (int)(strlen(arrayName))+1);
-    sprintf(str, "%s", arrayName);
-    output->GetFieldData()->AddArray(nameArray);
-    nameArray->Delete();
+    vtkGarbageCollector::DeferredCollectionPop();
     }
-  delete[] tmps;
-  delete[] appendSurface;
-  clip2->Delete();
-  this->DeleteInternalPipeline();
-  vtkGarbageCollector::DeferredCollectionPop();
-
   return 1;
 }
 
 //-----------------------------------------------------------------------------
-void vtkExtractCTHPart::ComputeBounds(vtkCompositeDataSet *input,
-                                      int processNumber,
-                                      int numProcessors)
+bool vtkExtractCTHPart::ComputeGlobalBounds(vtkCompositeDataSet *input)
 {
   assert("pre: input_exists" && input!=0);
-  assert("pre: positive_numProcessors" && numProcessors>0);
-  assert("pre: valid_processNumber" && processNumber>=0 &&
-         processNumber<numProcessors);
+  this->Internals->GlobalInputBounds.Reset();
+
+  this->Internals->TotalNumberOfDatasets = 0;
 
   vtkCompositeDataIterator* iter = input->NewIterator();
   for (iter->InitTraversal(); !iter->IsDoneWithTraversal(); iter->GoToNextItem())
@@ -581,661 +321,266 @@ void vtkExtractCTHPart::ComputeBounds(vtkCompositeDataSet *input,
       }
     double realBounds[6];
     ds->GetBounds(realBounds);
-    this->Bounds->AddBounds(realBounds);
+    this->Internals->GlobalInputBounds.AddBounds(realBounds);
+
+    ++this->Internals->TotalNumberOfDatasets;
     }
   iter->Delete();
 
   // Here we have the bounds according to our local datasets.
   // If we are not running in parallel then the local
   // bounds are the global bounds
-  if (!this->Controller)
-    {
-    return;
-    }
-  vtkCommunicator *comm = this->Controller->GetCommunicator();
-  if (!comm)
+  if (!this->Controller || this->Controller->GetNumberOfProcesses() <= 1)
     {
-    return;
+    return true;
     }
 
-  if (!comm->ComputeGlobalBounds(processNumber, numProcessors,
-                                 this->Bounds))
+  const double *min_point = this->Internals->GlobalInputBounds.GetMinPoint();
+  const double *max_point = this->Internals->GlobalInputBounds.GetMaxPoint();
+  double min_result[3], max_result[3];
+
+  if (!this->Controller->AllReduce(
+      min_point, min_result, 3, vtkCommunicator::MIN_OP))
     {
-    vtkErrorMacro("Problem occurred getting the global bounds");
+    return false;
     }
-  // At this point, the global bounds is set in each processor.
-}
-
-//-----------------------------------------------------------------------------
-void vtkExtractCTHPart::EvaluateVolumeFractionType(vtkRectilinearGrid* rg,
-  vtkCompositeDataSet* input)
-{
-  int num = this->GetNumberOfVolumeArrayNames();
-  int cc;
-  for ( cc = 0; cc < num; ++ cc )
+  if (!this->Controller->AllReduce(
+      max_point, max_result, 3, vtkCommunicator::MAX_OP))
     {
-    const char* arrayName = this->GetVolumeArrayName(cc);
-    if ( input )
-      {
-      vtkCompositeDataIterator* iter = input->NewIterator();
-      for (iter->InitTraversal(); !iter->IsDoneWithTraversal(); iter->GoToNextItem())
-        {
-        vtkDataSet* dataSet = vtkDataSet::SafeDownCast(iter->GetCurrentDataObject());
-        if( dataSet== NULL)// cannot really be null since iter skips empty datasets.
-          {
-          continue;
-          }
-
-        vtkDataArray* cellVolumeFraction;
-        // Only convert single volume fraction array to point data.
-        // Other attributes will have to be viewed as cell data.
-        cellVolumeFraction = dataSet->GetCellData()->GetArray(arrayName);
-        if (cellVolumeFraction == NULL)
-          {
-          vtkErrorMacro("Could not find cell array " << arrayName);
-          return;
-          }
-        if (cellVolumeFraction->GetDataType() != VTK_DOUBLE &&
-          cellVolumeFraction->GetDataType() != VTK_FLOAT &&
-          cellVolumeFraction->GetDataType() != VTK_UNSIGNED_CHAR )
-          {
-          vtkErrorMacro("Expecting volume fraction to be of type float, "
-            "double, or unsigned char.");
-          return;
-          }
-        if ( this->VolumeFractionType >= 0 &&
-          this->VolumeFractionType != cellVolumeFraction->GetDataType() )
-          {
-          vtkErrorMacro("Volume fraction arrays are different type. They "
-            "should all be float, double, or unsigned char");
-          return;
-          }
-        if ( this->VolumeFractionType < 0 )
-          {
-          this->VolumeFractionType = cellVolumeFraction->GetDataType();
-          switch ( this->VolumeFractionType )
-            {
-          case VTK_UNSIGNED_CHAR:
-            this->VolumeFractionSurfaceValueInternal
-              = CTH_AMR_SURFACE_VALUE_UNSIGNED_CHAR *
-              this->VolumeFractionSurfaceValue;
-            break;
-          default:
-            this->VolumeFractionSurfaceValueInternal
-              = CTH_AMR_SURFACE_VALUE_FLOAT *
-              this->VolumeFractionSurfaceValue;
-            }
-          }
-        }
-      iter->Delete();
-      }
-    else
-      {
-      vtkDataArray* cellVolumeFraction;
-      // Only convert single volume fraction array to point data.
-      // Other attributes will have to be viewed as cell data.
-      cellVolumeFraction = rg->GetCellData()->GetArray(arrayName);
-      if (cellVolumeFraction == NULL)
-        {
-        vtkErrorMacro("Could not find cell array " << arrayName);
-        return;
-        }
-      if (cellVolumeFraction->GetDataType() != VTK_DOUBLE &&
-        cellVolumeFraction->GetDataType() != VTK_FLOAT &&
-        cellVolumeFraction->GetDataType() != VTK_UNSIGNED_CHAR )
-        {
-        vtkErrorMacro("Expecting volume fraction to be of type float, double, or unsigned char.");
-        return;
-        }
-      if ( this->VolumeFractionType >= 0 && this->VolumeFractionType != cellVolumeFraction->GetDataType() )
-        {
-        vtkErrorMacro("Volume fraction arrays are different type. They should all be float, double, or unsigned char");
-        return;
-        }
-      if ( this->VolumeFractionType < 0 )
-        {
-        this->VolumeFractionType = cellVolumeFraction->GetDataType();
-        switch ( this->VolumeFractionType )
-          {
-        case VTK_UNSIGNED_CHAR:
-          this->VolumeFractionSurfaceValueInternal
-            = CTH_AMR_SURFACE_VALUE_UNSIGNED_CHAR * this->VolumeFractionSurfaceValue;
-          break;
-        default:
-          this->VolumeFractionSurfaceValueInternal
-            = CTH_AMR_SURFACE_VALUE_FLOAT * this->VolumeFractionSurfaceValue;
-          }
-        }
-      }
+    return false;
     }
+
+  this->Internals->GlobalInputBounds.SetBounds(
+    min_result[0], max_result[0], min_result[1], max_result[1],
+    min_result[2], max_result[2]);
+  // At this point, the global bounds is set in each processor.
+  return true;
 }
 
 //-----------------------------------------------------------------------------
-// the input is a hierarchy of vtkUniformGrid or one level of
-// vtkRectilinearGrid. The output is a hierarchy of vtkPolyData.
-void vtkExtractCTHPart::ExecutePart(const char *arrayName,
-                                    vtkCompositeDataSet *input,
-                                    vtkAppendPolyData *appendSurface,
-                                    vtkAppendPolyData *append,
-                                    float minProgress,
-                                    float maxProgress)
+// return false on error.
+bool vtkExtractCTHPart::ExtractContour(
+  vtkPolyData* output, vtkCompositeDataSet* input, const char*arrayName)
 {
+  assert(output!=0 && input!=0 && arrayName!=0 && arrayName[0]!=0);
 
-  // Determine total number of leaf nodes which helps is firing progress events.
-  int totalNumberOfDatasets = 0;
+  bool warn_once = true;
   vtkSmartPointer<vtkCompositeDataIterator> iter;
   iter.TakeReference(input->NewIterator());
-  for (iter->InitTraversal(); !iter->IsDoneWithTraversal(); iter->GoToNextItem())
-    {
-    totalNumberOfDatasets++;
-    }
 
-  float delProg = (maxProgress-minProgress)/totalNumberOfDatasets;
+  // this loop is first 95% of the work.
+  ScaledProgress sp1(0.0, 0.95, this);
+
   int counter = 0;
-  for (iter->InitTraversal(); !iter->IsDoneWithTraversal(); iter->GoToNextItem(), counter++)
+  vtkExtractCTHPart::VectorOfFragments fragments;
+  for (iter->InitTraversal(); !iter->IsDoneWithTraversal(); iter->GoToNextItem(), ++counter)
     {
-    float progress = minProgress + delProg*counter;
-    if (counter % 30 == 0)
+    // each iteration is 1/(total num of datasets)'th for the work.
+    ScaledProgress sp(
+      counter * 1.0/this->Internals->TotalNumberOfDatasets,
+      1.0/this->Internals->TotalNumberOfDatasets, this);
+
+    if (counter % 1000 == 0)
       {
-      this->UpdateProgress(progress);
+      this->TriggerProgressEvent(0.0);
       }
+
     vtkDataObject *dataObj = iter->GetCurrentDataObject();
-    if(dataObj!=0)// can be null if on another processor
+    vtkRectilinearGrid* rg = vtkRectilinearGrid::SafeDownCast(dataObj);
+    vtkUniformGrid* ug = vtkUniformGrid::SafeDownCast(dataObj);
+
+    if (ug)
       {
-      vtkRectilinearGrid *rg=vtkRectilinearGrid::SafeDownCast(dataObj);
-      if(rg!=0)
+      if (!this->ExtractClippedContourOnBlock<vtkUniformGrid>(fragments, ug, arrayName))
         {
-        // Does the input have the requested cell data?
-        if (rg->GetCellData()->GetArray(arrayName))
-          {
-          this->ExecutePartOnRectilinearGrid(arrayName,rg,appendSurface,
-            append, progress, progress + delProg);
-          }
-        else
-          {
-          vtkWarningMacro("Rectilinear Grid does not contain CellData named "
-            << arrayName << " aborting extraction");
-          vtkPolyData *tmp=vtkPolyData::New();
-          append->AddInputData(tmp);
-          tmp->Delete();
-          return;
-          }
+        return false;
         }
-      else
+      }
+    else if (rg)
+      {
+      if (!this->ExtractClippedContourOnBlock<vtkRectilinearGrid>(fragments, rg, arrayName))
         {
-#ifdef EXTRACT_USE_IMAGE_DATA
-        vtkImageData *ug=vtkImageData::SafeDownCast(dataObj);
-#else
-        vtkUniformGrid *ug=vtkUniformGrid::SafeDownCast(dataObj);
-#endif
-        if(ug!=0)
-          {
-          // Does the input have the requested cell data?
-          if (ug->GetCellData()->GetArray(arrayName))
-            {
-            this->ExecutePartOnUniformGrid(arrayName,ug,appendSurface,
-              append, progress, progress+delProg);
-            }
-          else
-            {
-            vtkWarningMacro("Uniform Grid does not contain CellData named "
-              << arrayName << " aborting extraction");
-            vtkPolyData *tmp=vtkPolyData::New();
-            append->AddInputData(tmp);
-            tmp->Delete();
-            return;
-            }
-          }
-        else
-          {
-          vtkErrorMacro(<<" cannot handle a block of this type.");
-          }
+        return false;
         }
       }
-    }
-
-}
-
-//-----------------------------------------------------------------------------
-void vtkExtractCTHPart::ExecutePartOnUniformGrid(
-  const char *arrayName,
-#ifdef EXTRACT_USE_IMAGE_DATA
-  vtkImageData *input,
-#else
-  vtkUniformGrid *input,
-#endif
-  vtkAppendPolyData *appendSurface,
-  vtkAppendPolyData *append,
-  float minProgress,
-  float maxProgress)
-{
-  vtkPolyData* tmp;
-  vtkDataArray* cellVolumeFraction;
-  int* dims;
-  float delProgress = maxProgress - minProgress;
-  int reportProgress = 0;
-  if (delProgress > 0.1)
-    {
-    reportProgress = 1;
-    }
-
-  if (reportProgress)
-    {
-    this->UpdateProgress(minProgress);
-    }
-
-  vtkTimerLog::MarkStartEvent("Execute Part");
-
-  // First things first.
-  // Convert Cell data array to point data array.
-  // Pass cell data.
-
-  // Only convert single volume fraction array to point data.
-  // Other attributes will have to be viewed as cell data.
-  cellVolumeFraction = input->GetCellData()->GetArray(arrayName);
-  if (cellVolumeFraction == NULL)
-    {
-    vtkErrorMacro("Could not find cell array " << arrayName);
-    return;
-    }
-  if (cellVolumeFraction->GetDataType() != VTK_DOUBLE &&
-      cellVolumeFraction->GetDataType() != VTK_FLOAT &&
-      cellVolumeFraction->GetDataType() != VTK_UNSIGNED_CHAR )
-    {
-    vtkErrorMacro("Expecting volume fraction to be of type float, double, or unsigned char.");
-    return;
-    }
-  if ( this->VolumeFractionType >= 0 && this->VolumeFractionType != cellVolumeFraction->GetDataType() )
-    {
-    vtkErrorMacro("Volume fraction arrays are different type. They should all be float, double, or unsigned char");
-    return;
-    }
-  if ( this->VolumeFractionType < 0 )
-    {
-    this->VolumeFractionType = cellVolumeFraction->GetDataType();
-    switch ( this->VolumeFractionType )
+    else if (warn_once && dataObj)
       {
-    case VTK_UNSIGNED_CHAR:
-      this->VolumeFractionSurfaceValueInternal
-        = CTH_AMR_SURFACE_VALUE_UNSIGNED_CHAR * this->VolumeFractionSurfaceValue;
-      break;
-    default:
-      this->VolumeFractionSurfaceValueInternal
-        = CTH_AMR_SURFACE_VALUE_FLOAT * this->VolumeFractionSurfaceValue;
+      warn_once = false;
+      vtkWarningMacro(<< dataObj->GetClassName() << " will be ignored.");
+      }
+    if ((counter % 1000) == 0)
+      {
+      this->TriggerProgressEvent(1.0);
       }
     }
 
-  this->Data->CopyStructure(input);
-
-  vtkDataArray* scalars = input->GetCellData()->GetScalars();
-  if (scalars && strcmp(arrayName, scalars->GetName()) == 0)
-    { // I do not know why the reader sets attributes, but ....
-    this->Data->GetCellData()->CopyScalarsOff();
-    }
-
-  this->Data->GetCellData()->PassData(input->GetCellData());
-  dims = input->GetDimensions();
-  this->PointVolumeFraction->SetNumberOfTuples(dims[0]*dims[1]*dims[2]);
-  this->ExecuteCellDataToPointData(cellVolumeFraction,
-                                   this->PointVolumeFraction, dims,
-                                   minProgress, minProgress+delProgress/3, reportProgress);
-
-
-
-  this->Data->GetPointData()->SetScalars(this->PointVolumeFraction);
-
-  if (reportProgress)
-    {
-    this->UpdateProgress(minProgress+2*delProgress/3);
-    }
-
-  int isNotEmpty=this->ExtractUniformGridSurface(this->Data,this->SurfacePolyData);
-  if(isNotEmpty)
-    {
-    tmp=vtkPolyData::New();
-    tmp->ShallowCopy(this->SurfacePolyData);
-    appendSurface->AddInputData(tmp);
-    tmp->Delete();
-    }
-
-  // All outside never has any polydata.
-  // Be sure to to that only after the surface filter.
-  double range[2];
-  cellVolumeFraction->GetRange(range);
-  if (range[1] < this->VolumeFractionSurfaceValueInternal)
-    {
-    vtkTimerLog::MarkEndEvent("Execute Part");
-    return;
-    }
-  if (this->ClipPlane == 0 && range[0] > this->VolumeFractionSurfaceValueInternal)
+  if (fragments.size() == 0)
     {
-    vtkTimerLog::MarkEndEvent("Execute Part");
-    return;
+    // empty contour. Not an error though, hence we don't return false.
+    return true;
     }
+  sp1.WorkDone();
 
-  this->PolyDataProducer->Update();
-  if (reportProgress)
+  // Now, the last .05 % of the work.
+  ScaledProgress sp2(0.95, 0.05, this);
+  this->TriggerProgressEvent(0.0);
+  vtkNew<vtkAppendPolyData> appender;
+  for (size_t cc=0; cc < fragments.size(); cc++)
     {
-    this->UpdateProgress(minProgress+delProgress);
+    appender->AddInputData(fragments[cc].GetPointer());
     }
-
-  tmp=vtkPolyData::New();
-  tmp->ShallowCopy(this->PolyData);
-  append->AddInputData(tmp);
-  tmp->Delete();
-
-  vtkTimerLog::MarkEndEvent("Execute Part");
+  appender->Update();
+  output->ShallowCopy(appender->GetOutputDataObject(0));
+  this->TriggerProgressEvent(1.0);
+  return true;
 }
 
 //-----------------------------------------------------------------------------
-void vtkExtractCTHPart::CreateInternalPipeline()
+template <class T>
+bool vtkExtractCTHPart::ExtractClippedContourOnBlock(
+  vtkExtractCTHPart::VectorOfFragments& fragments, T* dataset, const char* arrayName)
 {
-  // Objects common to both pipelines
-  this->PointVolumeFraction=vtkDoubleArray::New();
-  this->SurfacePolyData=vtkPolyData::New();
+  assert(arrayName!=0 && arrayName[0]!=0 && dataset != 0);
 
-  // Uniform grid case pipeline
-
-#ifdef EXTRACT_USE_IMAGE_DATA
-  this->Data = vtkImageData::New();
-#else
- this->Data = vtkUniformGrid::New();
-#endif
-
-  this->Contour=vtkContourFilter::New();
-  this->Contour->SetInputData(this->Data);
-  this->Contour->SetValue(0, this->VolumeFractionSurfaceValueInternal);
-
-
-  if(this->ClipPlane)
-    {
-    // We need to append iso and capped surfaces.
-    this->Append2 = vtkAppendPolyData::New();
-    // Clip the volume fraction iso surface.
-    this->Clip1=vtkClipPolyData::New();
-    this->Clip1->SetInputConnection(this->Contour->GetOutputPort());
-    this->Clip1->SetClipFunction(this->ClipPlane);
-    this->Append2->AddInputConnection(this->Clip1->GetOutputPort());
-
-    // We need to create a capping surface.
-    this->Cut = vtkCutter::New();
-    this->Cut->SetCutFunction(this->ClipPlane);
-    this->Cut->SetValue(0, 0.0);
-    this->Cut->SetInputData(this->Data);
-    this->Clip2 = vtkClipPolyData::New();
-    this->Clip2->SetInputConnection(this->Cut->GetOutputPort());
-    this->Clip2->SetValue(this->VolumeFractionSurfaceValueInternal);
-    this->Append2->AddInputConnection(this->Clip2->GetOutputPort());
-    this->PolyData = this->Append2->GetOutput();
-    this->PolyDataProducer = this->Append2;
-    }
-  else
+  vtkDataArray* volumeFractionArray = dataset->GetCellData()->GetArray(arrayName);
+  if (!volumeFractionArray)
     {
-    this->PolyData = this->Contour->GetOutput();
-    this->PolyDataProducer = this->Contour;
+    // skip this block.
+    return true;
     }
 
-  // Rectilinear grid case pipeline
-
-  this->RData = vtkRectilinearGrid::New();
-
-  this->RContour=vtkContourFilter::New();
-  this->RContour->SetInputData(this->RData);
-  this->RContour->SetValue(0,this->VolumeFractionSurfaceValueInternal);
-
-  if(this->ClipPlane)
+  // determine the true value to use for the contour based on the data-type.
+  switch (volumeFractionArray->GetDataType())
     {
-    // We need to append iso and capped surfaces.
-    this->RAppend2 = vtkAppendPolyData::New();
-    // Clip the volume fraction iso surface.
-    this->RClip1=vtkClipPolyData::New();
-    this->RClip1->SetInputConnection(this->RContour->GetOutputPort());
-    this->RClip1->SetClipFunction(this->ClipPlane);
-    this->RAppend2->AddInputConnection(this->RClip1->GetOutputPort());
-
-    // We need to create a capping surface.
-    this->RCut = vtkCutter::New();
-    this->RCut->SetInputData(this->RData);
-    this->RCut->SetCutFunction(this->ClipPlane);
-    this->RCut->SetValue(0, 0.0);
-    this->RClip2 = vtkClipPolyData::New();
-    this->RClip2->SetInputConnection(this->RCut->GetOutputPort());
-    this->RClip2->SetValue(this->VolumeFractionSurfaceValueInternal);
-    this->RAppend2->AddInputConnection(this->RClip2->GetOutputPort());
-    this->RPolyData = this->RAppend2->GetOutput();
-    this->RPolyDataProducer = this->RAppend2;
-    }
-  else
-    {
-    this->RPolyData = this->RContour->GetOutput();
-    this->RPolyDataProducer = this->RContour;
-    }
-}
+  case VTK_UNSIGNED_CHAR:
+    this->VolumeFractionSurfaceValueInternal =
+      CTH_AMR_SURFACE_VALUE_UNSIGNED_CHAR * this->VolumeFractionSurfaceValue;
+    break;
 
-//-----------------------------------------------------------------------------
-void vtkExtractCTHPart::DeleteInternalPipeline()
-{
-  if(this->PointVolumeFraction!=0)
-    {
-    this->PointVolumeFraction->Delete();
-    this->PointVolumeFraction=0;
+  default:
+    this->VolumeFractionSurfaceValueInternal =
+      CTH_AMR_SURFACE_VALUE_FLOAT * this->VolumeFractionSurfaceValue;
     }
 
-  if(this->SurfacePolyData!=0)
-    {
-    this->SurfacePolyData->Delete();
-    this->SurfacePolyData=0;
-    }
+  // We create a clone so we can modify the dataset (i.e. add new arrays to it).
+  vtkNew<T> inputClone;
+  inputClone->ShallowCopy(dataset);
 
-  // Uniform grid
-  if(this->Data!=0)
-    {
-    this->Data->Delete();
-    this->Data=0;
-    }
+  // Convert cell-data-2-point-data so we can contour.
+  vtkNew<vtkDoubleArray> pointVolumeFractionArray;
+  this->ExecuteCellDataToPointData(volumeFractionArray,
+    pointVolumeFractionArray.GetPointer(), inputClone->GetDimensions());
+  inputClone->GetPointData()->SetScalars(pointVolumeFractionArray.GetPointer());
 
-  if(this->Contour!=0)
+  VectorOfFragments blockFragments;
+  if (!this->ExtractContourOnBlock<T>(blockFragments, inputClone.GetPointer(), arrayName))
     {
-    this->Contour->Delete();
-    this->Contour=0;
+    return false;
     }
 
-   if(this->Append2!=0)
+  if (!this->ClipPlane)
     {
-    this->Append2->Delete();
-    this->Append2=0;
+    fragments.insert(fragments.end(),
+      blockFragments.begin(), blockFragments.end());
+    return true;
     }
-   if(this->Cut!=0)
-     {
-     this->Cut->Delete();
-     this->Cut=0;
-     }
-   if(this->Clip1!=0)
-     {
-     this->Clip1->Delete();
-     this->Clip1=0;
-    }
-   if(this->Clip2!=0)
-     {
-     this->Clip2->Delete();
-     this->Clip2=0;
-     }
 
+  // Clip-n-cap the fragments using the clip plane.
 
-   // Rectilinear grid
-   if(this->RData!=0)
-    {
-    this->RData->Delete();
-    this->RData=0;
-    }
-  if(this->RContour!=0)
+  // for the clip.
+  for (size_t cc=0; cc < blockFragments.size(); cc++)
     {
-    this->RContour->Delete();
-    this->RContour=0;
+    vtkNew<vtkClipPolyData> clipper;
+    clipper->SetClipFunction(this->ClipPlane);
+    clipper->SetInputDataObject(blockFragments[cc]);
+    clipper->Update();
+    fragments.push_back(clipper->GetOutput());
     }
 
-   if(this->RAppend2!=0)
+  //// for the cap.
+  if (this->Capping)
     {
-    this->RAppend2->Delete();
-    this->RAppend2=0;
-    }
-   if(this->RCut!=0)
-     {
-     this->RCut->Delete();
-     this->RCut=0;
-     }
-   if(this->RClip1!=0)
-     {
-     this->RClip1->Delete();
-     this->RClip1=0;
+    vtkNew<vtkCutter> cutter;
+    cutter->SetCutFunction(this->ClipPlane);
+    cutter->SetGenerateTriangles(this->GenerateTriangles? 1 : 0);
+    cutter->SetInputDataObject(inputClone.GetPointer());
+
+    vtkNew<vtkClipPolyData> scalarClipper;
+    scalarClipper->SetInputConnection(cutter->GetOutputPort());
+    scalarClipper->SetValue(this->VolumeFractionSurfaceValueInternal);
+    scalarClipper->Update();
+    fragments.push_back(scalarClipper->GetOutput());
     }
-   if(this->RClip2!=0)
-     {
-     this->RClip2->Delete();
-     this->RClip2=0;
-     }
+  return true;
 }
 
 //-----------------------------------------------------------------------------
-// the input is either a vtkRectilinearGrid or a vtkUniformGrid
-void vtkExtractCTHPart::ExecutePartOnRectilinearGrid(
-  const char *arrayName,
-  vtkRectilinearGrid *input,
-  vtkAppendPolyData *appendSurface,
-  vtkAppendPolyData *append,
-  float minProgress,
-  float maxProgress)
+template <class T>
+bool vtkExtractCTHPart::ExtractContourOnBlock(
+  vtkExtractCTHPart::VectorOfFragments& fragments, T* dataset, const char* arrayName)
 {
-  assert("pre: valid_input" && input->CheckAttributes()==0);
+  assert(arrayName!=0 && arrayName[0]!=0 && dataset != 0);
 
-  vtkPolyData* tmp;
-  vtkDataArray* cellVolumeFraction;
-  int* dims;
+  vtkDataArray* volumeFractionArray = dataset->GetPointData()->GetArray(arrayName);
+  assert(volumeFractionArray !=0);
+  assert(dataset->GetPointData()->GetArray(arrayName) !=0);
 
-  float delProgress = maxProgress - minProgress;
-  int reportProgress = 0;
-  if (delProgress > 0.1)
+  // Contour only if necessary.
+  double range[2];
+  volumeFractionArray->GetRange(range);
+  if (range[1] < this->VolumeFractionSurfaceValueInternal)
     {
-    reportProgress = 1;
+    // this block doesn't have the material of interest.
+    return true;
     }
 
-  vtkTimerLog::MarkStartEvent("Execute Part");
-
-  // First things first.
-  // Convert Cell data array to point data array.
-  // Pass cell data.
-
-  // Only convert single volume fraction array to point data.
-  // Other attributes will have to be viewed as cell data.
-  cellVolumeFraction = input->GetCellData()->GetArray(arrayName);
-  if (cellVolumeFraction == NULL)
-    {
-    vtkErrorMacro("Could not find cell array " << arrayName);
-    return;
-    }
-  if (cellVolumeFraction->GetDataType() != VTK_DOUBLE &&
-      cellVolumeFraction->GetDataType() != VTK_FLOAT &&
-      cellVolumeFraction->GetDataType() != VTK_UNSIGNED_CHAR )
-    {
-    vtkErrorMacro("Expecting volume fraction to be of type float, double, or unsigned char.");
-    return;
-    }
-  if ( this->VolumeFractionType >= 0 && this->VolumeFractionType != cellVolumeFraction->GetDataType() )
+  // Extract exterior surface. Adds the surface polydata to fragments, if any.
+  if (this->Capping)
     {
-    vtkErrorMacro("Volume fraction arrays are different type. They should all be float, double, or unsigned char");
-    return;
-    }
-  if ( this->VolumeFractionType < 0 )
-    {
-    this->VolumeFractionType = cellVolumeFraction->GetDataType();
-    switch ( this->VolumeFractionType )
-      {
-    case VTK_UNSIGNED_CHAR:
-      this->VolumeFractionSurfaceValueInternal
-        = CTH_AMR_SURFACE_VALUE_UNSIGNED_CHAR * this->VolumeFractionSurfaceValue;
-      break;
-    default:
-      this->VolumeFractionSurfaceValueInternal
-        = CTH_AMR_SURFACE_VALUE_FLOAT * this->VolumeFractionSurfaceValue;
-      }
-    }
-
-  this->RData->CopyStructure(input);
-
-  vtkDataArray* scalars = input->GetCellData()->GetScalars();
-  if (scalars && strcmp(arrayName, scalars->GetName()) == 0)
-    { // I do not know why the reader sets attributes, but ....
-    this->RData->GetCellData()->CopyScalarsOff();
+    this->ExtractExteriorSurface(fragments, dataset);
     }
 
-  this->RData->GetCellData()->PassData(input->GetCellData());
-  dims = input->GetDimensions();
-  this->PointVolumeFraction->SetNumberOfTuples(dims[0]*dims[1]*dims[2]);
-  this->ExecuteCellDataToPointData(cellVolumeFraction,
-                                   this->PointVolumeFraction, dims,
-                                   minProgress, minProgress+delProgress/3, reportProgress);
-
-
-  this->RData->GetPointData()->SetScalars(this->PointVolumeFraction);
-
-  assert("check: valid_rdata" && this->RData->CheckAttributes()==0);
-
-  int isNotEmpty=this->ExtractRectilinearGridSurface(this->RData,this->SurfacePolyData);
-  if(isNotEmpty)
+  if (this->ClipPlane == NULL && range[0] > this->VolumeFractionSurfaceValueInternal)
     {
-    tmp=vtkPolyData::New();
-    tmp->ShallowCopy(this->SurfacePolyData);
-    assert("check: valid_copy" && tmp->CheckAttributes()==0);
-    appendSurface->AddInputData(tmp);
-    tmp->Delete();
+    // no need to extract contour.
+    return true;
     }
 
-  if (reportProgress)
-    {
-    this->UpdateProgress(minProgress+2*delProgress/3);
-    }
+  // Extract contour.
+  vtkNew<vtkContourFilter> contourer;
+  contourer->SetInputData(dataset);
+  contourer->SetValue(0, this->VolumeFractionSurfaceValueInternal);
+  contourer->SetComputeScalars(0);
+  contourer->SetGenerateTriangles(this->GenerateTriangles? 1: 0);
+  contourer->SetInputArrayToProcess(0, 0, 0,
+    vtkDataObject::FIELD_ASSOCIATION_POINTS, arrayName);
+  contourer->Update();
 
-  // All outside never has any polydata.
-  // Be sure to to that only after the surface filter.
-  double range[2];
-  cellVolumeFraction->GetRange(range);
-  if (range[1] < this->VolumeFractionSurfaceValueInternal)
-    {
-    vtkTimerLog::MarkEndEvent("Execute Part");
-    return;
-    }
-  if (this->ClipPlane == 0 && range[0] > this->VolumeFractionSurfaceValueInternal)
+  vtkPolyData* output =
+    vtkPolyData::SafeDownCast(contourer->GetOutputDataObject(0));
+  if (!output && output->GetNumberOfPoints()== 0)
     {
-    vtkTimerLog::MarkEndEvent("Execute Part");
-    return;
+    return true;
     }
-
-  this->RPolyDataProducer->Update();
-
-  if (reportProgress)
+  if (!this->RemoveGhostCells &&
+    output->GetCellData()->GetArray("vtkGhostLevels"))
     {
-    this->UpdateProgress(minProgress+delProgress);
+    output->GetCellData()->GetArray("vtkGhostLevels")->SetName("OriginalGhostLevels");
     }
 
-  tmp=vtkPolyData::New();
-  tmp->ShallowCopy(this->RPolyData);
-  append->AddInputData(tmp);
-  tmp->Delete();
-
-  vtkTimerLog::MarkEndEvent("Execute Part");
+  fragments.push_back(output);
+  return true;
 }
+
 //-----------------------------------------------------------------------------
 // Description:
 // Append quads for faces of the block that actually on the bounds
 // of the hierarchical dataset. Deals with ghost cells.
-// Return true if the output is not empty.
-int vtkExtractCTHPart::ExtractRectilinearGridSurface(
-  vtkRectilinearGrid *input,
-  vtkPolyData *output
-  )
+template <class T>
+void vtkExtractCTHPart::ExtractExteriorSurface(
+  vtkExtractCTHPart::VectorOfFragments& fragments, T* input)
 {
   assert("pre: valid_input" && input!=0 && input->CheckAttributes()==0);
-  assert("pre: output_exists" && output!=0);
 
   int result=0;
 #if 1
@@ -1248,8 +593,6 @@ int vtkExtractCTHPart::ExtractRectilinearGridSurface(
 
 //  vtkUnsignedCharArray *ghostArray=static_cast<vtkUnsignedCharArray *>(input->GetCellData()->GetArray("vtkGhostLevels"));
 
-
-
   // bounds without taking ghost cells into account
   double bounds[6];
 
@@ -1296,8 +639,8 @@ int vtkExtractCTHPart::ExtractRectilinearGridSurface(
 #endif
   // here, bounds are real block bounds without ghostcells.
 
-  const double *minP = this->Bounds->GetMinPoint();
-  const double *maxP = this->Bounds->GetMaxPoint();
+  const double *minP = this->Internals->GlobalInputBounds.GetMinPoint();
+  const double *maxP = this->Internals->GlobalInputBounds.GetMaxPoint();
 #if 0
   const double epsilon=0.001;
   int doFaceMinX=fabs(bounds[0]- minP[0])<epsilon;
@@ -1343,7 +686,7 @@ int vtkExtractCTHPart::ExtractRectilinearGridSurface(
 
   if(result)
     {
-    output->Initialize();
+    vtkSmartPointer<vtkPolyData> output = vtkSmartPointer<vtkPolyData>::New();
 
     vtkIdType numPoints=0;
     vtkIdType cellArraySize=0;
@@ -1429,238 +772,17 @@ int vtkExtractCTHPart::ExtractRectilinearGridSurface(
       {
       this->ExecuteFaceQuads(input,output,1,originalExtents,ext,2,1,0);
       }
-
     output->Squeeze();
-    }
-#endif
-// result=>valid_surface: A=>B !A||B
-  assert("post: valid_surface" && (!result || output->CheckAttributes()==0));
-  return result;
-}
+    assert(output->CheckAttributes() == 0);
 
-//-----------------------------------------------------------------------------
-// Description:
-// Append quads for faces of the block that actually on the bounds
-// of the hierarchical dataset. Deals with ghost cells.
-// Return true if the output is not empty.
-int vtkExtractCTHPart::ExtractUniformGridSurface(
-#ifdef EXTRACT_USE_IMAGE_DATA
-  vtkImageData *input,
-#else
-  vtkUniformGrid *input,
-#endif
-  vtkPolyData *output
-  )
-{
-  assert("pre: valid_input" && input!=0 && input->CheckAttributes()==0);
-  assert("pre: output_exists" && output!=0);
-
-  int result=0;
-#if 1
-  double origin[3];
-  input->GetOrigin(origin);
-  double spacing[3];
-  input->GetSpacing(spacing);
-  int dims[3];
-  input->GetDimensions(dims);
-  int ext[6];
-  int originalExtents[6];
-  input->GetExtent(ext);
-  input->GetExtent(originalExtents);
-
-//vtkUnsignedCharArray *ghostArray=static_cast<vtkUnsignedCharArray *>(input->GetCellData()->GetArray("vtkGhostLevels"));
-
-  // bounds without taking ghost cells into account
-  double bounds[6];
-
-  int i, j;
-  for (i = 0, j = 0; i < 3; i++, j+=2)
-    {
-    bounds[j]=origin[i];
-    bounds[j+1]=bounds[j]+spacing[i]*(dims[i]-1);
-    }
-
-#if 0
-  // block face min x
-  if(this->IsGhostFace(0,0,dims,ghostArray))
-    {
-    // downsize this!
-    bounds[0]+=spacing[0];
-    ++ext[0];
-    }
-  if(this->IsGhostFace(0,1,dims,ghostArray))
-    {
-    // downsize this!
-    bounds[1]-=spacing[0];
-    --ext[1];
-    }
-  if(this->IsGhostFace(1,0,dims,ghostArray))
-    {
-    // downsize this!
-    bounds[2]+=spacing[1];
-    ++ext[2];
-    }
-  if(this->IsGhostFace(1,1,dims,ghostArray))
-    {
-    // downsize this!
-    bounds[3]-=spacing[1];
-    --ext[3];
-    }
-  if(this->IsGhostFace(2,0,dims,ghostArray))
-    {
-    // downsize this!
-    bounds[4]+=spacing[2];
-    ++ext[4];
-    }
-  if(this->IsGhostFace(2,1,dims,ghostArray))
-    {
-    // downsize this!
-    bounds[5]-=spacing[2];
-    --ext[5];
+    vtkNew<vtkClipPolyData> clipper;
+    clipper->SetInputData(output);
+    clipper->SetValue(this->VolumeFractionSurfaceValueInternal);
+    clipper->Update();
+    fragments.push_back(clipper->GetOutput());
     }
 #endif
-  // here, bounds are real block bounds without ghostcells.
-
-  const double *minP = this->Bounds->GetMinPoint();
-  const double *maxP = this->Bounds->GetMaxPoint();
-#if 0
-  const double epsilon=0.001;
-  int doFaceMinX=fabs(bounds[0]- minP[0])<epsilon;
-  int doFaceMaxX=fabs(bounds[1]- maxP[0])<epsilon;
-  int doFaceMinY=fabs(bounds[2]- minP[1])<epsilon;
-  int doFaceMaxY=fabs(bounds[3]- maxP[1])<epsilon;
-  int doFaceMinZ=fabs(bounds[4]- minP[2])<epsilon;
-  int doFaceMaxZ=fabs(bounds[5]- maxP[2])<epsilon;
-#endif
-
-#if 1
-  int doFaceMinX=bounds[0]<= minP[0];
-  int doFaceMaxX=bounds[1]>= maxP[0];
-  int doFaceMinY=bounds[2]<= minP[1];
-  int doFaceMaxY=bounds[3]>= maxP[1];
-  int doFaceMinZ=bounds[4]<= minP[2];
-  int doFaceMaxZ=bounds[5]>= maxP[2];
-#endif
-
-#if 0
-  int doFaceMinX=1;
-  int doFaceMaxX=1;
-  int doFaceMinY=1;
-  int doFaceMaxY=1;
-  int doFaceMinZ=1;
-  int doFaceMaxZ=1;
-#endif
-#if 0
-  int doFaceMinX=0;
-  int doFaceMaxX=0;
-  int doFaceMinY=0;
-  int doFaceMaxY=0;
-  int doFaceMinZ=0;
-  int doFaceMaxZ=0;
-#endif
-#if 0
-  doFaceMaxX=0;
-  doFaceMaxY=0;
-  doFaceMaxZ=0;
-#endif
-
-  result=doFaceMinX||doFaceMaxX||doFaceMinY||doFaceMaxY||doFaceMinZ
-    ||doFaceMaxZ;
-
-  if(result)
-    {
-    output->Initialize();
-  vtkIdType numPoints=0;
-  vtkIdType cellArraySize=0;
-
-//  input->GetExtent(ext);
-
-  // Compute an upper bound for the number of points and cells.
-  // xMin face
-  if (doFaceMinX && ext[2] != ext[3] && ext[4] != ext[5] && ext[0] != ext[1])
-    {
-    cellArraySize += 2*(ext[3]-ext[2]+1)*(ext[5]-ext[4]+1);
-    numPoints += (ext[3]-ext[2]+1)*(ext[5]-ext[4]+1);
-    }
-  // xMax face
-  if (doFaceMaxX && ext[2] != ext[3] && ext[4] != ext[5])
-    {
-    cellArraySize += 2*(ext[3]-ext[2]+1)*(ext[5]-ext[4]+1);
-    numPoints += (ext[3]-ext[2]+1)*(ext[5]-ext[4]+1);
-    }
-  // yMin face
-  if (doFaceMinY && ext[0] != ext[1] && ext[4] != ext[5] && ext[2] != ext[3])
-    {
-    cellArraySize += 2*(ext[1]-ext[0]+1)*(ext[5]-ext[4]+1);
-    numPoints += (ext[1]-ext[0]+1)*(ext[5]-ext[4]+1);
-    }
-  // yMax face
-  if (doFaceMaxY && ext[0] != ext[1] && ext[4] != ext[5])
-    {
-    cellArraySize += 2*(ext[1]-ext[0]+1)*(ext[5]-ext[4]+1);
-    numPoints += (ext[1]-ext[0]+1)*(ext[5]-ext[4]+1);
-    }
-  // zMin face
-  if (doFaceMinZ && ext[0] != ext[1] && ext[2] != ext[3] && ext[4] != ext[5])
-    {
-    cellArraySize += 2*(ext[1]-ext[0]+1)*(ext[3]-ext[2]+1);
-    numPoints += (ext[1]-ext[0]+1)*(ext[3]-ext[2]+1);
-    }
-  // zMax face
-  if (doFaceMaxZ && ext[0] != ext[1] && ext[2] != ext[3])
-    {
-    cellArraySize += 2*(ext[1]-ext[0]+1)*(ext[3]-ext[2]+1);
-    numPoints += (ext[1]-ext[0]+1)*(ext[3]-ext[2]+1);
-    }
-
-  vtkCellArray *outPolys = vtkCellArray::New();
-  outPolys->Allocate(cellArraySize);
-  output->SetPolys(outPolys);
-  outPolys->Delete();
-
-  vtkPoints *outPoints = vtkPoints::New();
-  outPoints->Allocate(numPoints);
-  output->SetPoints(outPoints);
-  outPoints->Delete();
-
-  // Allocate attributes for copying.
-  output->GetPointData()->CopyAllocate(input->GetPointData());
-  output->GetCellData()->CopyAllocate(input->GetCellData());
-
-  // Extents are already corrected for ghostcells.
-
-  // make each face that is actually on the ds boundary
-  if(doFaceMinX)
-    {
-    this->ExecuteFaceQuads(input,output,0,originalExtents,ext,0,1,2);
-    }
-  if(doFaceMaxX)
-    {
-    this->ExecuteFaceQuads(input,output,1,originalExtents,ext,0,2,1);
-    }
-  if(doFaceMinY)
-    {
-    this->ExecuteFaceQuads(input,output,0,originalExtents,ext,1,2,0);
-    }
-  if(doFaceMaxY)
-    {
-    this->ExecuteFaceQuads(input,output,1,originalExtents,ext,1,0,2);
-    }
-  if(doFaceMinZ)
-    {
-    this->ExecuteFaceQuads(input,output,0,originalExtents,ext,2,0,1);
-    }
-  if(doFaceMaxZ)
-    {
-    this->ExecuteFaceQuads(input,output,1,originalExtents,ext,2,1,0);
-    }
-
-  output->Squeeze();
-    }
-#endif
-  // result=>valid_surface: A=>B !A||B
-  assert("post: valid_surface" && (!result || output->CheckAttributes()==0));
-  return result;
+// result=>valid_surface: A=>B !A||B
 }
 
 //----------------------------------------------------------------------------
@@ -1888,12 +1010,7 @@ void vtkExtractCTHPart::ExecuteFaceQuads(vtkDataSet *input,
 
 //-----------------------------------------------------------------------------
 void vtkExtractCTHPart::ExecuteCellDataToPointData(
-  vtkDataArray *cellVolumeFraction,
-  vtkDoubleArray *pointVolumeFraction,
-  int *dims,
-  float minProgress,
-  float maxProgress,
-  int reportProgress)
+  vtkDataArray *cellVolumeFraction, vtkDoubleArray *pointVolumeFraction, const int *dims)
 {
   int count;
   int i, j, k;
@@ -1902,6 +1019,7 @@ void vtkExtractCTHPart::ExecuteCellDataToPointData(
   double *pPoint;
 
   pointVolumeFraction->SetName(cellVolumeFraction->GetName());
+  pointVolumeFraction->SetNumberOfTuples(dims[0]*dims[1]*dims[2]);
 
   iEnd = dims[0]-1;
   jEnd = dims[1]-1;
@@ -1934,8 +1052,8 @@ void vtkExtractCTHPart::ExecuteCellDataToPointData(
   double *endPtr=pPoint+dims[0]*dims[1]*dims[2];
 #endif
 
-  float delProgress = (maxProgress - minProgress) / (kEnd*jEnd*iEnd) / 2;
-  vtkIdType counter = 0;
+//  float delProgress = (maxProgress - minProgress) / (kEnd*jEnd*iEnd) / 2;
+//  vtkIdType counter = 0;
 
   int index=0;
   // Loop thorugh the cells.
@@ -1945,11 +1063,11 @@ void vtkExtractCTHPart::ExecuteCellDataToPointData(
       {
       for (i = 0; i < iEnd; ++i)
         {
-        if (counter % 1000 == 0 && reportProgress)
-          {
-          this->UpdateProgress(minProgress + delProgress*(i+j*iEnd+k*iEnd*jEnd));
-          }
-        counter++;
+        //if (counter % 1000 == 0 && reportProgress)
+        //  {
+        //  this->UpdateProgress(minProgress + delProgress*(i+j*iEnd+k*iEnd*jEnd));
+        //  }
+        //counter++;
         // Add cell value to all points of cell.
         double value=cellVolumeFraction->GetTuple1(index);
 
@@ -1998,7 +1116,7 @@ void vtkExtractCTHPart::ExecuteCellDataToPointData(
   jEnd = dims[1]-1;
   kEnd = dims[2]-1;
 
-  counter = 0;
+  // counter = 0;
   for (k = 0; k <= kEnd; ++k)
     {
     // Just a fancy fast way to compute the number of cell neighbors of a
@@ -2027,11 +1145,11 @@ void vtkExtractCTHPart::ExecuteCellDataToPointData(
         }
       for (i = 0; i <= iEnd; ++i)
         {
-        if (counter % 1000 == 0 && reportProgress)
-          {
-          this->UpdateProgress(minProgress + delProgress/2 + delProgress*(i+j*iEnd+k*iEnd*jEnd));
-          }
-        counter++;
+        //if (counter % 1000 == 0 && reportProgress)
+        //  {
+        //  this->UpdateProgress(minProgress + delProgress/2 + delProgress*(i+j*iEnd+k*iEnd*jEnd));
+        //  }
+        //counter++;
         // Just a fancy fast way to compute the number of cell neighbors of a
         // point.
         if (i == 1)
@@ -2051,6 +1169,13 @@ void vtkExtractCTHPart::ExecuteCellDataToPointData(
     }
 }
 
+//-----------------------------------------------------------------------------
+void vtkExtractCTHPart::TriggerProgressEvent(double val)
+{
+  double progress = this->ProgressShift + val*this->ProgressScale;
+  //cout << "Progress: " << progress << endl;
+  this->UpdateProgress(progress);
+}
 
 //-----------------------------------------------------------------------------
 void vtkExtractCTHPart::PrintSelf(ostream& os, vtkIndent indent)
@@ -2068,6 +1193,10 @@ void vtkExtractCTHPart::PrintSelf(ostream& os, vtkIndent indent)
     }
   os << indent << "VolumeFractionSurfaceValue: "
     << this->VolumeFractionSurfaceValue << endl;
+  os << indent << "Capping: " << this->Capping << endl;
+  os << indent << "GenerateTriangles: " << this->GenerateTriangles << endl;
+  os << indent << "RemoveGhostCells: " << this->RemoveGhostCells << endl;
+
   if (this->ClipPlane)
     {
     os << indent << "ClipPlane:\n";
@@ -2088,4 +1217,3 @@ void vtkExtractCTHPart::PrintSelf(ostream& os, vtkIndent indent)
     os << "No Controller." << endl;
     }
 }
-
diff --git a/Filters/Parallel/vtkExtractCTHPart.h b/Filters/Parallel/vtkExtractCTHPart.h
index 36e14d0..29f9499 100644
--- a/Filters/Parallel/vtkExtractCTHPart.h
+++ b/Filters/Parallel/vtkExtractCTHPart.h
@@ -12,13 +12,27 @@
      PURPOSE.  See the above copyright notice for more information.
 
 =========================================================================*/
-// .NAME vtkExtractCTHPart - Generates surface of an CTH volume fraction.
+// .NAME vtkExtractCTHPart - Generates surface of a CTH volume fraction.
 // .SECTION Description
 // vtkExtractCTHPart is a filter that is specialized for creating
-// visualization of a CTH simulation.  First it converts the cell data
-// to point data.  It contours the selected volume fraction at a value
-// of 0.5.  The user has the option of clipping the part with a plane.
-// Clipped surfaces of the part are generated.
+// visualizations for a CTH simulation. CTH datasets comprise of either
+// vtkNonOverlappingAMR or a multiblock of non-overlapping rectilinear grids
+// with cell-data. Certain cell-arrays in the dataset identify the fraction of
+// a particular material present in a given cell. This goal with this filter is
+// to extract a surface contour demarcating the surface where the volume
+// fraction for a particular material is equal to the user chosen value.
+//
+// To achieve that, this filter first converts the cell-data to point-data and
+// then simply apply vtkContourFilter filter to extract the contour.
+//
+// vtkExtractCTHPart also provides the user with an option to clip the resultant
+// contour using a vtkPlane. Internally, it uses vtkClipClosedSurface to clip
+// the contour using the vtkPlane provided.
+//
+// The output of this filter is a vtkMultiBlockDataSet with one block
+// corresponding to each volume-fraction array requested. Each block itself is a
+// vtkPolyData for the contour generated on the current process (which may be
+// null, for processes where no contour is generated).
 
 #ifndef __vtkExtractCTHPart_h
 #define __vtkExtractCTHPart_h
@@ -27,17 +41,13 @@
 #include "vtkMultiBlockDataSetAlgorithm.h"
 
 class vtkAppendPolyData;
-class vtkBoundingBox;
-class vtkClipPolyData;
 class vtkContourFilter;
-class vtkCutter;
 class vtkDataArray;
 class vtkDataSet;
 class vtkDataSetSurfaceFilter;
 class vtkDoubleArray;
 class vtkExtractCTHPartInternal;
 class vtkImageData;
-class vtkInformationDoubleVectorKey;
 class vtkCompositeDataSet;
 class vtkMultiProcessController;
 class vtkPlane;
@@ -45,58 +55,60 @@ class vtkPolyData;
 class vtkRectilinearGrid;
 class vtkUniformGrid;
 class vtkUnsignedCharArray;
+class vtkExtractCTHPartFragments;
 
 //#define EXTRACT_USE_IMAGE_DATA 1
 
 class VTKFILTERSPARALLEL_EXPORT vtkExtractCTHPart : public vtkMultiBlockDataSetAlgorithm
 {
 public:
+  static vtkExtractCTHPart *New();
   vtkTypeMacro(vtkExtractCTHPart,vtkMultiBlockDataSetAlgorithm);
   void PrintSelf(ostream& os, vtkIndent indent);
 
   // Description:
-  // Construct object with initial range (0,1) and single contour value
-  // of 0.0.
-  static vtkExtractCTHPart *New();
-
-  // Description:
-  // Names of cell volume fraction arrays to extract.
-  void RemoveDoubleVolumeArrayNames();
-  void RemoveFloatVolumeArrayNames();
-  void RemoveUnsignedCharVolumeArrayNames();
+  // Select cell-data arrays (volume-fraction arrays) to contour with.
+  void AddVolumeArrayName(const char*);
+  void RemoveVolumeArrayNames();
   int GetNumberOfVolumeArrayNames();
   const char* GetVolumeArrayName(int idx);
-  // for backwards compatibility
-  void RemoveAllVolumeArrayNames();
-
-  // Description
-  // Names of cell volume fraction arrays to extract.
-  // Each of the volume fraction arrays must be of the same type.
-  // These three methods enforce that on input, removing any prior arrays
-  // of the wrong type whenever a new array is added.
-  void AddDoubleVolumeArrayName(char* arrayName);
-  void AddFloatVolumeArrayName(char* arrayName);
-  void AddUnsignedCharVolumeArrayName(char* arrayName);
-  //for backwards compatibility
-  void AddVolumeArrayName(char* arrayName);
 
   // Description:
-  // Set, get or maninpulate the implicit clipping plane.
-  void SetClipPlane(vtkPlane *clipPlane);
-  vtkGetObjectMacro(ClipPlane, vtkPlane);
+  // Get/Set the parallel controller. By default, the value returned by
+  // vtkMultiBlockDataSetAlgorithm::GetGlobalController() when the object is
+  // instantiated is used.
+  void SetController(vtkMultiProcessController* controller);
+  vtkGetObjectMacro(Controller,vtkMultiProcessController);
 
   // Description:
-  // Look at clip plane to compute MTime.
-  unsigned long GetMTime();
+  // On by default, enables logic to cap the material volume.
+  vtkSetMacro(Capping, bool);
+  vtkGetMacro(Capping, bool);
+  vtkBooleanMacro(Capping, bool);
 
   // Description:
-  // Set the controller used to coordinate parallel processing.
-  void SetController(vtkMultiProcessController* controller);
+  // Triangulate results. When set to false, the internal cut and contour filters
+  // are told not to triangulate results if possible. true by default.
+  vtkSetMacro(GenerateTriangles, bool);
+  vtkGetMacro(GenerateTriangles, bool);
+  vtkBooleanMacro(GenerateTriangles, bool);
 
   // Description:
-  // Return the controller used to coordinate parallel processing. By default,
-  // it is the global controller.
-  vtkGetObjectMacro(Controller,vtkMultiProcessController);
+  // When set to false, the output surfaces will not hide contours extracted from
+  // ghost cells. This results in overlapping contours but overcomes holes.
+  // Default is set to true.
+  vtkSetMacro(RemoveGhostCells, bool);
+  vtkGetMacro(RemoveGhostCells, bool);
+  vtkBooleanMacro(RemoveGhostCells, bool);
+
+  // Description:
+  // Set, get or manipulate the implicit clipping plane.
+  void SetClipPlane(vtkPlane *clipPlane);
+  vtkGetObjectMacro(ClipPlane, vtkPlane);
+
+  // Description:
+  // Look at clip plane to compute MTime.
+  unsigned long GetMTime();
 
   // Description:
   // Set and get the volume fraction surface value. This value should be
@@ -108,81 +120,20 @@ protected:
   vtkExtractCTHPart();
   ~vtkExtractCTHPart();
 
-  virtual int RequestInformation(vtkInformation *request,
-                                 vtkInformationVector **inputVector,
-                                 vtkInformationVector *outputVector);
-
-  virtual int RequestData(vtkInformation *, vtkInformationVector **,
-                  vtkInformationVector *);
-
-  // Description:
-  // the input is a hierarchy of vtkUniformGrid or one level of
-  // vtkRectilinearGrid. The output is a hierarchy of vtkPolyData.
-
+  virtual int FillInputPortInformation(int port, vtkInformation *info);
+  virtual int RequestData(
+    vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
   // Description:
   // Compute the bounds over the composite dataset, some sub-dataset
-  // can be on other processors.
-  void ComputeBounds(vtkCompositeDataSet *input,
-                     int processNumber,
-                     int numProcessors);
-
-  void ExecutePart(const char *arrayName,
-                   vtkCompositeDataSet *input,
-                   vtkAppendPolyData *appendSurface,
-                   vtkAppendPolyData *append,
-                   float minProgress,
-                   float maxProgress);
-
-  void ExecutePartOnUniformGrid(const char *arrayName,
-#ifdef EXTRACT_USE_IMAGE_DATA
-                                vtkImageData *input,
-#else
-                                vtkUniformGrid *input,
-#endif
-                                vtkAppendPolyData *appendSurface,
-                                vtkAppendPolyData *append,
-                                float minProgress,
-                                float maxProgress);
-
-  void ExecutePartOnRectilinearGrid(const char *arrayName,
-                                    vtkRectilinearGrid *input,
-                                    vtkAppendPolyData *appendSurface,
-                                    vtkAppendPolyData *append,
-                                    float minProgress,
-                                    float maxProgress);
-
-  void ExecuteCellDataToPointData(vtkDataArray *cellVolumeFraction,
-                                  vtkDoubleArray *pointVolumeFraction,
-                                  int *dims,
-                                  float minProgress,
-                                  float maxProgress,
-                                  int reportProgress);
-
-  virtual int FillInputPortInformation(int port,
-                                       vtkInformation *info);
-
-  void CreateInternalPipeline();
-  void DeleteInternalPipeline();
-
-  // Description:
-  // Append quads for faces of the block that actually on the bounds
-  // of the hierarchical dataset. Deals with ghost cells.
-  // Return true if the output is not empty.
-  int ExtractUniformGridSurface(
-#ifdef EXTRACT_USE_IMAGE_DATA
-    vtkImageData *input,
-#else
-    vtkUniformGrid *input,
-#endif
-    vtkPolyData *output);
+  // can be on other processors. Returns false of communication failure.
+  bool ComputeGlobalBounds(vtkCompositeDataSet *input);
 
   // Description:
-  // Append quads for faces of the block that actually on the bounds
-  // of the hierarchical dataset. Deals with ghost cells.
-  // Return true if the output is not empty.
-  int ExtractRectilinearGridSurface(vtkRectilinearGrid *input,
-                                    vtkPolyData *output);
+  // Extract contour for a particular array over the entire input dataset.
+  // Returns false on error.
+  bool ExtractContour(
+    vtkPolyData* output, vtkCompositeDataSet* input, const char*arrayName);
 
   void ExecuteFaceQuads(vtkDataSet *input,
                         vtkPolyData *output,
@@ -202,49 +153,53 @@ protected:
                   int dims[3],
                   vtkUnsignedCharArray *ghostArray);
 
-  vtkPlane *ClipPlane;
-  vtkExtractCTHPartInternal* Internals;
-
-  // Internal Pipeline elements
-  vtkDoubleArray *PointVolumeFraction;
-
-#ifdef EXTRACT_USE_IMAGE_DATA
-  vtkImageData *Data;
-#else
-  vtkUniformGrid *Data;
-#endif
+  void TriggerProgressEvent(double val);
 
-  vtkContourFilter *Contour;
-  vtkAppendPolyData *Append2;
-  vtkClipPolyData *Clip1;
-  vtkCutter *Cut;
-  vtkClipPolyData *Clip2;
-
-  vtkPolyData *PolyData;
-  vtkAlgorithm *PolyDataProducer;
-  vtkPolyData *RPolyData;
-  vtkAlgorithm *RPolyDataProducer;
-  vtkPolyData *SurfacePolyData;
-
-  vtkRectilinearGrid *RData;
-  vtkContourFilter *RContour;
-  vtkAppendPolyData *RAppend2;
-  vtkClipPolyData *RClip1;
-  vtkCutter *RCut;
-  vtkClipPolyData *RClip2;
-
-  void EvaluateVolumeFractionType(vtkRectilinearGrid* rg,
-                                  vtkCompositeDataSet* input);
   int VolumeFractionType;
   double VolumeFractionSurfaceValue;
   double VolumeFractionSurfaceValueInternal;
-  int OverwriteVolumeFractionSurfaceValue;
-
-  vtkBoundingBox *Bounds; // Whole bounds (dataset over all the processors)
-
+  bool GenerateTriangles;
+  bool Capping;
+  bool RemoveGhostCells;
+  vtkPlane *ClipPlane;
   vtkMultiProcessController *Controller;
 private:
   vtkExtractCTHPart(const vtkExtractCTHPart&);  // Not implemented.
   void operator=(const vtkExtractCTHPart&);  // Not implemented.
+
+  class VectorOfFragments;
+
+  // Description:
+  // Extract contour for a particular array over a particular block in the input
+  // dataset.  Returns false on error.
+  template <class T>
+  bool ExtractClippedContourOnBlock(
+    vtkExtractCTHPart::VectorOfFragments& fragments, T* input, const char* arrayName);
+
+  // Description:
+  // Extract contour for a particular array over a particular block in the input
+  // dataset.  Returns false on error.
+  template <class T>
+  bool ExtractContourOnBlock(
+    vtkExtractCTHPart::VectorOfFragments& fragments, T* input, const char* arrayName);
+
+  // Description:
+  // Append quads for faces of the block that actually on the bounds
+  // of the hierarchical dataset. Deals with ghost cells.
+  template <class T>
+  void ExtractExteriorSurface(
+    vtkExtractCTHPart::VectorOfFragments& fragments, T* input);
+
+  // Description:
+  // Fast cell-data-2-point-data implementation.
+  void ExecuteCellDataToPointData(
+    vtkDataArray *cellVolumeFraction, vtkDoubleArray *pointVolumeFraction, const int *dims);
+
+  double ProgressShift;
+  double ProgressScale;
+
+  class ScaledProgress;
+  friend class ScaledProgress;
+  vtkExtractCTHPartInternal* Internals;
 };
 #endif
diff --git a/Filters/Parallel/vtkExtractPolyDataPiece.h b/Filters/Parallel/vtkExtractPolyDataPiece.h
index 99f60a5..589d14b 100644
--- a/Filters/Parallel/vtkExtractPolyDataPiece.h
+++ b/Filters/Parallel/vtkExtractPolyDataPiece.h
@@ -39,7 +39,7 @@ public:
 
 protected:
   vtkExtractPolyDataPiece();
-  ~vtkExtractPolyDataPiece() {};
+  ~vtkExtractPolyDataPiece() {}
 
   // Usual data generation method
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
diff --git a/Filters/Parallel/vtkExtractUnstructuredGridPiece.h b/Filters/Parallel/vtkExtractUnstructuredGridPiece.h
index c2aa5f5..7f85183 100644
--- a/Filters/Parallel/vtkExtractUnstructuredGridPiece.h
+++ b/Filters/Parallel/vtkExtractUnstructuredGridPiece.h
@@ -39,7 +39,7 @@ public:
 
 protected:
   vtkExtractUnstructuredGridPiece();
-  ~vtkExtractUnstructuredGridPiece() {};
+  ~vtkExtractUnstructuredGridPiece() {}
 
   // Usual data generation method
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
diff --git a/Filters/Parallel/vtkExtractUserDefinedPiece.cxx b/Filters/Parallel/vtkExtractUserDefinedPiece.cxx
index 370527d..ed15167 100644
--- a/Filters/Parallel/vtkExtractUserDefinedPiece.cxx
+++ b/Filters/Parallel/vtkExtractUserDefinedPiece.cxx
@@ -42,10 +42,8 @@ vtkExtractUserDefinedPiece::vtkExtractUserDefinedPiece()
 }
 vtkExtractUserDefinedPiece::~vtkExtractUserDefinedPiece()
 {
-  if (this->ConstantData){
-    delete [] (char *)this->ConstantData;
-    this->ConstantData = NULL;
-  }
+  delete [] (char *)this->ConstantData;
+  this->ConstantData = NULL;
 }
 void vtkExtractUserDefinedPiece::PrintSelf(ostream& os, vtkIndent indent)
 {
diff --git a/Filters/Parallel/vtkPCellDataToPointData.h b/Filters/Parallel/vtkPCellDataToPointData.h
index edd6580..16ebe10 100644
--- a/Filters/Parallel/vtkPCellDataToPointData.h
+++ b/Filters/Parallel/vtkPCellDataToPointData.h
@@ -43,7 +43,7 @@ public:
 
 protected:
   vtkPCellDataToPointData();
-  ~vtkPCellDataToPointData() {};
+  ~vtkPCellDataToPointData() {}
 
   // Usual data generation method
   virtual int RequestData(vtkInformation* request,
diff --git a/Filters/Parallel/vtkPExtractArraysOverTime.cxx b/Filters/Parallel/vtkPExtractArraysOverTime.cxx
index 14b8315..c590be3 100644
--- a/Filters/Parallel/vtkPExtractArraysOverTime.cxx
+++ b/Filters/Parallel/vtkPExtractArraysOverTime.cxx
@@ -26,7 +26,7 @@
 #include "vtkUnsignedCharArray.h"
 
 #include <string>
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkPExtractArraysOverTime);
 vtkCxxSetObjectMacro(vtkPExtractArraysOverTime, Controller, vtkMultiProcessController);
diff --git a/Filters/Parallel/vtkPKdTree.cxx b/Filters/Parallel/vtkPKdTree.cxx
index 27bea78..f83b9b0 100644
--- a/Filters/Parallel/vtkPKdTree.cxx
+++ b/Filters/Parallel/vtkPKdTree.cxx
@@ -36,7 +36,7 @@
 
 #include <queue>
 #include <algorithm>
-#include <assert.h>
+#include <cassert>
 
 // Timing data ---------------------------------------------
 
diff --git a/Filters/Parallel/vtkPLinearExtrusionFilter.h b/Filters/Parallel/vtkPLinearExtrusionFilter.h
index cdab658..b44056e 100644
--- a/Filters/Parallel/vtkPLinearExtrusionFilter.h
+++ b/Filters/Parallel/vtkPLinearExtrusionFilter.h
@@ -47,7 +47,7 @@ public:
 
 protected:
   vtkPLinearExtrusionFilter();
-  ~vtkPLinearExtrusionFilter() {};
+  ~vtkPLinearExtrusionFilter() {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   virtual int RequestUpdateExtent(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
diff --git a/Filters/Parallel/vtkPPolyDataNormals.h b/Filters/Parallel/vtkPPolyDataNormals.h
index 54d5b31..d319f69 100644
--- a/Filters/Parallel/vtkPPolyDataNormals.h
+++ b/Filters/Parallel/vtkPPolyDataNormals.h
@@ -38,7 +38,7 @@ public:
 
 protected:
   vtkPPolyDataNormals();
-  ~vtkPPolyDataNormals() {};
+  ~vtkPPolyDataNormals() {}
 
   // Usual data generation method
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
diff --git a/Filters/Parallel/vtkPSphereSource.h b/Filters/Parallel/vtkPSphereSource.h
index f81e8a6..276ddf1 100644
--- a/Filters/Parallel/vtkPSphereSource.h
+++ b/Filters/Parallel/vtkPSphereSource.h
@@ -36,8 +36,8 @@ public:
   unsigned long GetEstimatedMemorySize();
 
 protected:
-  vtkPSphereSource() {};
-  ~vtkPSphereSource() {};
+  vtkPSphereSource() {}
+  ~vtkPSphereSource() {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 private:
diff --git a/Filters/Parallel/vtkPTableToStructuredGrid.cxx b/Filters/Parallel/vtkPTableToStructuredGrid.cxx
deleted file mode 100644
index 92533b5..0000000
--- a/Filters/Parallel/vtkPTableToStructuredGrid.cxx
+++ /dev/null
@@ -1,177 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkPTableToStructuredGrid.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#include "vtkPTableToStructuredGrid.h"
-
-#include "vtkDataSetAttributes.h"
-#include "vtkInformation.h"
-#include "vtkInformationVector.h"
-#include "vtkMultiProcessController.h"
-#include "vtkObjectFactory.h"
-#include "vtkStreamingDemandDrivenPipeline.h"
-#include "vtkTable.h"
-#include "vtkStructuredData.h"
-
-
-static void CopyStructuredData(vtkDataSetAttributes* out, int outExtent[6],
-  vtkDataSetAttributes* in, int inExtent[6])
-{
-  int indims[3];
-  indims[0] = inExtent[1] - inExtent[0]+1;
-  indims[1] = inExtent[3] - inExtent[2]+1;
-  indims[2] = inExtent[5] - inExtent[4]+1;
-
-  int outdims[3];
-  outdims[0] = outExtent[1] - outExtent[0]+1;
-  outdims[1] = outExtent[3] - outExtent[2]+1;
-  outdims[2] = outExtent[5] - outExtent[4]+1;
-
-
-  int relativeExtent[6];
-  relativeExtent[0] = outExtent[0] - inExtent[0];
-  relativeExtent[1] = outExtent[1] - inExtent[0];
-  relativeExtent[2] = outExtent[2] - inExtent[2];
-  relativeExtent[3] = outExtent[3] - inExtent[2];
-  relativeExtent[4] = outExtent[4] - inExtent[4];
-  relativeExtent[5] = outExtent[5] - inExtent[4];
-
-  for (int zz=relativeExtent[4]; zz <= relativeExtent[5]; zz++)
-    {
-    for (int yy=relativeExtent[2]; yy <= relativeExtent[3]; yy++)
-      {
-      for (int xx=relativeExtent[0]; xx <= relativeExtent[1]; xx++)
-        {
-        int index[3] = {xx, yy, zz};
-        int outindex[3] = {xx-relativeExtent[0], yy-relativeExtent[2],
-          zz-relativeExtent[4]};
-        out->CopyData(in,
-          vtkStructuredData::ComputePointId(indims, index),
-          vtkStructuredData::ComputePointId(outdims, outindex));
-        }
-      }
-    }
-
-}
-
-
-vtkStandardNewMacro(vtkPTableToStructuredGrid);
-vtkCxxSetObjectMacro(vtkPTableToStructuredGrid, Controller,
-  vtkMultiProcessController);
-//----------------------------------------------------------------------------
-vtkPTableToStructuredGrid::vtkPTableToStructuredGrid()
-{
-  this->Controller = 0;
-  this->SetController(vtkMultiProcessController::GetGlobalController());
-}
-
-//----------------------------------------------------------------------------
-vtkPTableToStructuredGrid::~vtkPTableToStructuredGrid()
-{
-  this->SetController(0);
-}
-
-//----------------------------------------------------------------------------
-int vtkPTableToStructuredGrid::RequestData(vtkInformation* request,
-    vtkInformationVector** inputVector, vtkInformationVector* outputVector)
-{
-  int numProcs = this->Controller->GetNumberOfProcesses();
-  int myId = this->Controller->GetLocalProcessId();
-
-  if (numProcs <= 1)
-    {
-    return this->Superclass::RequestData(request, inputVector, outputVector);
-    }
-
-  vtkStructuredGrid* output = vtkStructuredGrid::GetData(outputVector, 0);
-  vtkTable* input = vtkTable::GetData(inputVector[0], 0);
-
-  int data_valid = 0;
-  if (myId == 0)
-    {
-    // Ensure that extents are valid.
-    int num_values = (this->WholeExtent[1] - this->WholeExtent[0] + 1) *
-      (this->WholeExtent[3] - this->WholeExtent[2] + 1) *
-      (this->WholeExtent[5] - this->WholeExtent[4] + 1);
-
-    if (input->GetNumberOfRows() != num_values)
-      {
-      vtkErrorMacro("The input table must have exactly " << num_values
-        << " rows. Currently it has " << input->GetNumberOfRows() << " rows.");
-      }
-    else
-      {
-      data_valid = 1;
-      }
-    }
-  this->Controller->Broadcast(&data_valid, 1, 0);
-  if (!data_valid)
-    {
-    return 0;
-    }
-
-  vtkStreamingDemandDrivenPipeline *sddp =
-    vtkStreamingDemandDrivenPipeline::SafeDownCast(this->GetExecutive());
-  int extent[6];
-  sddp->GetOutputInformation(0)->Get(
-    vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT(), extent);
-
-  int *allextents = new int[numProcs*6];
-  if (!this->Controller->Gather(extent, allextents, 6, 0))
-    {
-    vtkErrorMacro("Failed to gather extents.");
-    return 0;
-    }
-  if (myId == 0)
-    {
-    // Send the relevant rows to each of the processes.
-    for (int cc=0; cc < numProcs; cc++)
-      {
-      int curExtent[6];
-      memcpy(curExtent, &allextents[6*cc], 6*sizeof(int));
-      vtkIdType numTuples = (curExtent[1]-curExtent[0] + 1) *
-        (curExtent[3]-curExtent[2] + 1) *
-        (curExtent[5]-curExtent[4] + 1);
-
-      vtkTable* curTable = vtkTable::New();
-      curTable->GetRowData()->CopyAllocate(input->GetRowData(), numTuples);
-      ::CopyStructuredData(curTable->GetRowData(), curExtent,
-        input->GetRowData(), this->WholeExtent);
-      if (cc==0)
-        {
-        this->Superclass::Convert(curTable, output, curExtent);
-        }
-      else
-        {
-        this->Controller->Send(curTable, cc, 985723);
-        }
-      curTable->Delete();
-      }
-    }
-  else
-    {
-    vtkTable* curTable = vtkTable::New();
-    this->Controller->Receive(curTable, 0, 985723);
-    this->Superclass::Convert(curTable, output, extent);
-    curTable->Delete();
-    }
-  return 1;
-}
-
-//----------------------------------------------------------------------------
-void vtkPTableToStructuredGrid::PrintSelf(ostream& os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os, indent);
-  os << indent << "Controller: " << this->Controller << endl;
-}
-
diff --git a/Filters/Parallel/vtkPTableToStructuredGrid.h b/Filters/Parallel/vtkPTableToStructuredGrid.h
deleted file mode 100644
index da6c0e4..0000000
--- a/Filters/Parallel/vtkPTableToStructuredGrid.h
+++ /dev/null
@@ -1,62 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkPTableToStructuredGrid.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-// .NAME vtkPTableToStructuredGrid - vtkTableToStructuredGrid specialization
-// which handles distribution of the input table.
-// .SECTION Description
-// vtkPTableToStructuredGrid is vtkTableToStructuredGrid specialization
-// which handles distribution of the input table.
-// For starters, this assumes that the input table is only available on the root
-// node.
-
-#ifndef __vtkPTableToStructuredGrid_h
-#define __vtkPTableToStructuredGrid_h
-
-#include "vtkFiltersParallelModule.h" // For export macro
-#include "vtkTableToStructuredGrid.h"
-
-class vtkMultiProcessController;
-
-class VTKFILTERSPARALLEL_EXPORT vtkPTableToStructuredGrid : public vtkTableToStructuredGrid
-{
-public:
-  static vtkPTableToStructuredGrid* New();
-  vtkTypeMacro(vtkPTableToStructuredGrid, vtkTableToStructuredGrid);
-  void PrintSelf(ostream& os, vtkIndent indent);
-
-  // Description:
-  // Get/Set the controller.
-  void SetController(vtkMultiProcessController*);
-  vtkGetObjectMacro(Controller, vtkMultiProcessController);
-
-//BTX
-protected:
-  vtkPTableToStructuredGrid();
-  ~vtkPTableToStructuredGrid();
-
-  // Description:
-  // Convert input vtkTable to vtkStructuredGrid.
-  virtual int RequestData(vtkInformation* request,
-    vtkInformationVector** inputVector, vtkInformationVector* outputVector);
-
-  vtkMultiProcessController* Controller;
-private:
-  vtkPTableToStructuredGrid(const vtkPTableToStructuredGrid&); // Not implemented.
-  void operator=(const vtkPTableToStructuredGrid&); // Not implemented.
-//ETX
-};
-
-#endif
-
-
diff --git a/Filters/Parallel/vtkPassThroughFilter.h b/Filters/Parallel/vtkPassThroughFilter.h
index d094843..75593a2 100644
--- a/Filters/Parallel/vtkPassThroughFilter.h
+++ b/Filters/Parallel/vtkPassThroughFilter.h
@@ -39,8 +39,8 @@ public:
 
 protected:
 
-  vtkPassThroughFilter() {};
-  virtual ~vtkPassThroughFilter() {};
+  vtkPassThroughFilter() {}
+  virtual ~vtkPassThroughFilter() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
diff --git a/Filters/Parallel/vtkPieceRequestFilter.h b/Filters/Parallel/vtkPieceRequestFilter.h
index 93abe08..07b36e8 100644
--- a/Filters/Parallel/vtkPieceRequestFilter.h
+++ b/Filters/Parallel/vtkPieceRequestFilter.h
@@ -60,7 +60,7 @@ public:
 
 protected:
   vtkPieceRequestFilter();
-  ~vtkPieceRequestFilter() {};
+  ~vtkPieceRequestFilter() {}
 
   virtual int RequestDataObject(vtkInformation* request,
                                 vtkInformationVector** inputVector,
diff --git a/Filters/Parallel/vtkPipelineSize.h b/Filters/Parallel/vtkPipelineSize.h
index cd8e54f..1f21c80 100644
--- a/Filters/Parallel/vtkPipelineSize.h
+++ b/Filters/Parallel/vtkPipelineSize.h
@@ -45,7 +45,7 @@ public:
                                      vtkPolyDataMapper *mapper);
 
 protected:
-  vtkPipelineSize() {};
+  vtkPipelineSize() {}
   void GenericComputeSourcePipelineSize(vtkAlgorithm *src,
                                         int outputPort,
                                         unsigned long size[3]);
diff --git a/Filters/Parallel/vtkRectilinearGridOutlineFilter.h b/Filters/Parallel/vtkRectilinearGridOutlineFilter.h
index 02ed1bf..2b6793e 100644
--- a/Filters/Parallel/vtkRectilinearGridOutlineFilter.h
+++ b/Filters/Parallel/vtkRectilinearGridOutlineFilter.h
@@ -33,8 +33,8 @@ public:
   virtual void PrintSelf(ostream& os, vtkIndent indent);
 
 protected:
-  vtkRectilinearGridOutlineFilter() {};
-  ~vtkRectilinearGridOutlineFilter() {};
+  vtkRectilinearGridOutlineFilter() {}
+  ~vtkRectilinearGridOutlineFilter() {}
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   virtual int RequestInformation(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   virtual int FillInputPortInformation(int port, vtkInformation *info);
diff --git a/Filters/ParallelFlowPaths/Testing/Cxx/CMakeLists.txt b/Filters/ParallelFlowPaths/Testing/Cxx/CMakeLists.txt
index 94d996e..4b4c397 100644
--- a/Filters/ParallelFlowPaths/Testing/Cxx/CMakeLists.txt
+++ b/Filters/ParallelFlowPaths/Testing/Cxx/CMakeLists.txt
@@ -1,11 +1,12 @@
 vtk_module_test_executable(TestPStream TestPStream.cxx)
 
-add_test(NAME ${vtk-module}Cxx-TestPStream-image
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-TestPStream-image
   COMMAND ${MPIEXEC} ${MPIEXEC_NUMPROC_FLAG} 2 ${MPIEXEC_PREFLAGS}
     $<TARGET_FILE:TestPStream>
-    -D ${VTK_DATA_ROOT}
+    -D ${VTK_TEST_DATA_DIR}
     -T ${VTK_TEST_OUTPUT_DIR}
-    -V Baseline/Parallel/TestPStream.cxx.png
+    -V DATA{../Data/Baseline/TestPStream.cxx.png,:}
     ${VTK_MPI_POSTFLAGS})
 
 vtk_module_test_executable(TestPStreamGeometry TestVectorFieldSource.cxx
@@ -25,5 +26,5 @@ vtk_module_test_executable(TestPStreamAMR TestPStreamAMR.cxx)
 add_test(NAME ${vtk-module}Cxx-TestPStreamAMR
   COMMAND ${MPIEXEC} ${MPIEXEC_NUMPROC_FLAG} 4 ${MPIEXEC_PREFLAGS}
     $<TARGET_FILE:TestPStreamAMR>
-    -D ${VTK_DATA_ROOT}
+    -D ${VTK_TEST_DATA_DIR}
     ${VTK_MPI_POSTFLAGS})
diff --git a/Filters/ParallelFlowPaths/Testing/Cxx/TestPParticleTracers.cxx b/Filters/ParallelFlowPaths/Testing/Cxx/TestPParticleTracers.cxx
index 0f89a5f..16218ce 100644
--- a/Filters/ParallelFlowPaths/Testing/Cxx/TestPParticleTracers.cxx
+++ b/Filters/ParallelFlowPaths/Testing/Cxx/TestPParticleTracers.cxx
@@ -12,30 +12,29 @@
      PURPOSE.  See the above copyright notice for more information.
 
 =========================================================================*/
-#include "vtkPParticleTracer.h"
-#include "vtkPParticlePathFilter.h"
-#include "vtkPStreaklineFilter.h"
-#include "vtkMPIController.h"
-#include "vtkIdList.h"
-#include "vtkPoints.h"
-#include "vtkMath.h"
-#include "vtkNew.h"
-#include "vtkPolyDataMapper.h"
-#include "vtkCellArray.h"
 #include "vtkAlgorithm.h"
+#include "vtkCellArray.h"
+#include "vtkFloatArray.h"
+#include "vtkIdList.h"
+#include "vtkImageData.h"
 #include "vtkInformation.h"
 #include "vtkInformationVector.h"
-#include "vtkStreamingDemandDrivenPipeline.h"
-#include "vtkImageData.h"
+#include "vtkMPIController.h"
+#include "vtkNew.h"
 #include "vtkObjectFactory.h"
 #include "vtkPointData.h"
+#include "vtkPoints.h"
+#include "vtkPolyDataMapper.h"
+#include "vtkPParticlePathFilter.h"
+#include "vtkPParticleTracer.h"
+#include "vtkPStreaklineFilter.h"
+#include "vtkStreamingDemandDrivenPipeline.h"
 #include <vector>
 
-#define PRINT(x) cout<<"("<<Rank<<")"<<x<<endl;
-
-#define EXPECT(a,msg)\
-  if(!(a)) {                                    \
-  cerr<<"Line "<<__LINE__<<":"<<msg<<endl;\
+#define EXPECT(expected,actual,msg,so)                        \
+  if(!(expected == actual)) {                               \
+  vtkGenericWarningMacro(<<msg<< " Expecting a value of " << expected << \
+                         " but getting a value of " << actual << " for static option of " << so); \
   return EXIT_FAILURE;\
   }
 
@@ -113,22 +112,21 @@ protected:
       this->TimeSteps.push_back(i);
       }
 
-    Extent[0] = 0;
-    Extent[1] = 1;
-    Extent[2] = 0;
-    Extent[3] = 1;
-    Extent[4] = 0;
-    Extent[5] = 1;
-
-    BoundingBox[0]=0;
-    BoundingBox[1]=1;
-    BoundingBox[2]=0;
-    BoundingBox[3]=1;
-    BoundingBox[4]=0;
-    BoundingBox[5]=1;
+    this->Extent[0] = 0;
+    this->Extent[1] = 1;
+    this->Extent[2] = 0;
+    this->Extent[3] = 1;
+    this->Extent[4] = 0;
+    this->Extent[5] = 1;
+
+    this->BoundingBox[0]=0;
+    this->BoundingBox[1]=1;
+    this->BoundingBox[2]=0;
+    this->BoundingBox[3]=1;
+    this->BoundingBox[4]=0;
+    this->BoundingBox[5]=1;
   }
 
-
   void GetSpacing(double dx[3])
   {
     for(int i=0; i<3; i++)
@@ -175,7 +173,7 @@ protected:
                  range,2);
 
     outInfo->Set(vtkStreamingDemandDrivenPipeline::TIME_STEPS(),
-                 &TimeSteps[0], static_cast<int>(TimeSteps.size()));
+                 &this->TimeSteps[0], static_cast<int>(this->TimeSteps.size()));
 
 
     outInfo->Set(vtkStreamingDemandDrivenPipeline::WHOLE_EXTENT(), this->Extent,6);
@@ -208,18 +206,25 @@ protected:
       {
       int* uExtent = outInfo->Get(
         vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT());
-//      cout<<"Extent: "<<uExtent[0]<<" "<<uExtent[1]<<" "<<uExtent[2]<<" "<<uExtent[3]<<" "<<uExtent[4]<<" "<<uExtent[5]<<endl;
       outImage->SetExtent(uExtent);
-      int scalarType = vtkImageData::GetScalarType(outInfo);
-      int numComponents = vtkImageData::GetNumberOfScalarComponents(outInfo);
-      outImage->AllocateScalars(scalarType, numComponents);
+      //vtkGenericWarningMacro("time step in producer is " << timeStep);
+      vtkFloatArray* outArray1 = vtkFloatArray::New();
+      outArray1->SetName("Test");
+      outArray1->SetNumberOfComponents(1);
+      outArray1->SetNumberOfTuples(outImage->GetNumberOfPoints());
+      for(int i=0; i<outImage->GetNumberOfPoints(); i++)
+        {
+        outArray1->SetTuple(i, &timeStep);
+        }
+      outImage->GetPointData()->SetScalars(outArray1);
+      outArray1->Delete();
       }
     else
       {
       return 0 ;
       }
 
-    vtkDataArray* outArray = vtkDataArray::SafeDownCast(vtkAbstractArray::CreateArray(VTK_FLOAT));
+    vtkFloatArray* outArray = vtkFloatArray::New();
     outArray->SetName("Gradients");
     outArray->SetNumberOfComponents(3);
     outArray->SetNumberOfTuples(outImage->GetNumberOfPoints());
@@ -229,13 +234,13 @@ protected:
     outImage->GetPointData()->SetActiveVectors("Gradients");
 
     int *extent = outImage->GetExtent();
-//    cout<<"Extent: "<<extent[0]<<" "<<extent[1]<<" "<<extent[2]<<" "<<extent[3]<<" "<<extent[4]<<" "<<extent[5]<<endl;
-
     vtkIdType stepX,stepY,stepZ;
     outImage->GetContinuousIncrements(extent,stepX,stepY,stepZ);
 
     float * outPtr = static_cast<float*> (outImage->GetArrayPointerForExtent(outArray,extent));
-     int gridSize[3] = {Extent[1]-Extent[0],Extent[3]-Extent[2],Extent[5]-Extent[4]};
+    int gridSize[3] = {this->Extent[1]-this->Extent[0],
+                       this->Extent[3]-this->Extent[2],
+                       this->Extent[5]-this->Extent[4]};
 
     double* origin = outImage->GetOrigin();
 
@@ -263,17 +268,6 @@ protected:
       outPtr += stepZ;
      }
 
-    vtkDataArray* outArray1 = vtkDataArray::SafeDownCast(vtkAbstractArray::CreateArray(VTK_FLOAT));
-    outArray1->SetName("Test");
-    outArray1->SetNumberOfComponents(1);
-    outArray1->SetNumberOfTuples(outImage->GetNumberOfPoints());
-    for(int i=0; i<outImage->GetNumberOfPoints(); i++)
-      {
-      outArray1->SetTuple(i, &timeStep);
-      }
-    outImage->GetPointData()->AddArray(outArray1);
-    outArray1->Delete();
-
     return 1;
   }
 private:
@@ -289,14 +283,8 @@ private:
 
 vtkStandardNewMacro(TestTimeSource);
 
-
-
-int TestPParticleTracer(vtkMPIController* c)
+int TestPParticleTracer(vtkMPIController* c, int staticOption)
 {
-  int NumProcs = c->GetNumberOfProcesses();
-  int Rank = c->GetLocalProcessId();
-
-
   vtkNew<TestTimeSource> imageSource;
   int size(5);
   imageSource->SetExtent(0,size-1,0,1,0,size-1);
@@ -310,6 +298,8 @@ int TestPParticleTracer(vtkMPIController* c)
   ps->SetPoints(points.GetPointer());
 
   vtkNew<vtkPParticleTracer> filter;
+  filter->SetStaticMesh(staticOption);
+  filter->SetStaticSeeds(staticOption);
   filter->SetInputConnection(0,imageSource->GetOutputPort());
   filter->SetInputData(1,ps.GetPointer());
   filter->SetStartTime(0.0);
@@ -334,8 +324,8 @@ int TestPParticleTracer(vtkMPIController* c)
 
     vtkSmartPointer<vtkPolyDataMapper> traceMapper = vtkSmartPointer<vtkPolyDataMapper>::New();
     traceMapper->SetInputConnection(filter->GetOutputPort());
-    traceMapper->SetPiece(Rank);
-    traceMapper->SetNumberOfPieces(NumProcs);
+    traceMapper->SetPiece(c->GetLocalProcessId());
+    traceMapper->SetNumberOfPieces(c->GetNumberOfProcesses());
     traceMapper->Update();
 
     vtkPolyData* out = filter->GetOutput();
@@ -344,23 +334,20 @@ int TestPParticleTracer(vtkMPIController* c)
     numTraced+= pts->GetNumberOfPoints();
     }
 
-  if(Rank==0)
+  if(c->GetLocalProcessId()==0)
     {
-    EXPECT(numTraced==5,"wrong number of points traced: "<<numTraced);
+    EXPECT(5, numTraced, "PParticleTracer: wrong number of points.", staticOption);
     }
   else
     {
-    EXPECT(numTraced==6,"wrong number of points traced: "<<numTraced);
+    EXPECT(6, numTraced,"PParticleTracer: wrong number of points.", staticOption);
     }
   return EXIT_SUCCESS;
 }
 
 
-int TestPParticlePathFilter(vtkMPIController* c)
+int TestPParticlePathFilter(vtkMPIController* c, int staticOption)
 {
-  int NumProcs = c->GetNumberOfProcesses();
-  int Rank = c->GetLocalProcessId();
-
   vtkNew<TestTimeSource> imageSource;
   int size(5);
   imageSource->SetExtent(0,size-1,0,1,0,size-1);
@@ -373,7 +360,8 @@ int TestPParticlePathFilter(vtkMPIController* c)
   ps->SetPoints(points.GetPointer());
 
   vtkNew<vtkParticlePathFilter> filter;
-  PRINT(filter->GetClassName());
+  filter->SetStaticMesh(staticOption);
+  filter->SetStaticSeeds(staticOption);
   filter->SetInputConnection(0,imageSource->GetOutputPort());
   filter->SetInputData(1,ps.GetPointer());
 //  filter->SetForceReinjectionEveryNSteps(1);
@@ -383,8 +371,8 @@ int TestPParticlePathFilter(vtkMPIController* c)
 
   vtkSmartPointer<vtkPolyDataMapper> traceMapper = vtkSmartPointer<vtkPolyDataMapper>::New();
   traceMapper->SetInputConnection(filter->GetOutputPort());
-  traceMapper->SetPiece(Rank);
-  traceMapper->SetNumberOfPieces(NumProcs);
+  traceMapper->SetPiece(c->GetLocalProcessId());
+  traceMapper->SetNumberOfPieces(c->GetNumberOfProcesses());
   traceMapper->Update();
 
   vtkPolyData* out = filter->GetOutput();
@@ -395,23 +383,20 @@ int TestPParticlePathFilter(vtkMPIController* c)
     assert(pd->GetArray(i)->GetNumberOfTuples()==pts->GetNumberOfPoints());
     }
 
-
-  vtkNew<vtkIdList> polyLine;
   vtkCellArray* lines = out->GetLines();
-
-  if(Rank==1)
+  if(c->GetLocalProcessId() == 1)
     {
-    EXPECT(lines->GetNumberOfCells()==2,lines->GetNumberOfCells());
+    EXPECT(2, lines->GetNumberOfCells(),"PParticlePath: wrong number of cells.", staticOption);
     vtkNew<vtkIdList> trace;
     lines->InitTraversal();
     lines->GetNextCell(trace.GetPointer());
     int tail;
     tail = trace->GetId(trace->GetNumberOfIds()-1);
-    EXPECT(pd->GetArray("Test")->GetTuple1(tail)==3, pd->GetArray("Test")->GetTuple1(tail));
+    EXPECT(4, pd->GetArray("Test")->GetTuple1(tail), "PParticlePath: wrong tuple value.", staticOption);
     }
   else
     {
-    EXPECT(lines->GetNumberOfCells()==1, lines->GetNumberOfCells());
+    EXPECT(1, lines->GetNumberOfCells(), "PParticlePath: wrong number of cells.", staticOption);
 
     vtkNew<vtkIdList> trace;
     lines->InitTraversal();
@@ -419,20 +404,15 @@ int TestPParticlePathFilter(vtkMPIController* c)
     int head, tail;
     head = trace->GetId(0);
     tail = trace->GetId(trace->GetNumberOfIds()-1);
-    EXPECT(pts->GetPoint(head)[2]>0, pts->GetPoint(head)[1]);
-    EXPECT(pd->GetArray("Test")->GetTuple1(head)==3, pd->GetArray("Test")->GetTuple1(head));
-    EXPECT(pd->GetArray("Test")->GetTuple1(tail)==8, pd->GetArray("Test")->GetTuple1(tail));
+    EXPECT(4, pd->GetArray("Test")->GetTuple1(head), "PParticlePath: head", staticOption);
+    EXPECT(9, pd->GetArray("Test")->GetTuple1(tail), "PParticlePath: tail", staticOption);
     }
 
   return EXIT_SUCCESS;
-
 }
 
-int TestPStreaklineFilter(vtkMPIController* c)
+int TestPStreaklineFilter(vtkMPIController* c, int staticOption)
 {
-  int NumProcs = c->GetNumberOfProcesses();
-  int Rank = c->GetLocalProcessId();
-
   vtkNew<TestTimeSource> imageSource;
   int size(5);
   imageSource->SetExtent(0,size-1,0,1,0,size-1);
@@ -448,6 +428,8 @@ int TestPStreaklineFilter(vtkMPIController* c)
   ps->SetPoints(points.GetPointer());
 
   vtkNew<vtkPStreaklineFilter> filter;
+  filter->SetStaticMesh(staticOption);
+  filter->SetStaticSeeds(staticOption);
   filter->SetInputConnection(0,imageSource->GetOutputPort());
   filter->SetInputData(1,ps.GetPointer());
 
@@ -456,27 +438,26 @@ int TestPStreaklineFilter(vtkMPIController* c)
 
   vtkSmartPointer<vtkPolyDataMapper> traceMapper = vtkSmartPointer<vtkPolyDataMapper>::New();
   traceMapper->SetInputConnection(filter->GetOutputPort());
-  traceMapper->SetPiece(Rank);
-  traceMapper->SetNumberOfPieces(NumProcs);
+  traceMapper->SetPiece(c->GetLocalProcessId());
+  traceMapper->SetNumberOfPieces(c->GetNumberOfProcesses());
   traceMapper->Update();
 
   vtkPolyData* out = filter->GetOutput();
-  vtkNew<vtkIdList> polyLine;
   vtkCellArray* lines = out->GetLines();
 
-  if(Rank==0) //all the streaks go to 0 because of implementation
+  if(c->GetLocalProcessId() == 0) //all the streaks go to 0 because of implementation
     {
-    EXPECT(lines->GetNumberOfCells()==2,lines->GetNumberOfCells());
+    EXPECT(2, lines->GetNumberOfCells(),"PStreakline: wrong number of cells.", staticOption);
     vtkNew<vtkIdList> trace;
     lines->InitTraversal();
     lines->GetNextCell(trace.GetPointer());
-    EXPECT(trace->GetNumberOfIds()==13,"wrong # of points"<<trace->GetNumberOfIds());
+    EXPECT(13, trace->GetNumberOfIds(),"PStreakline: wrong number of points.", staticOption);
     lines->GetNextCell(trace.GetPointer());
-    EXPECT(trace->GetNumberOfIds()==13,"wrong # of points"<<trace->GetNumberOfIds());
+    EXPECT(13, trace->GetNumberOfIds(),"PStreakline: wrong number of points.", staticOption);
     }
   else
     {
-    EXPECT(out->GetNumberOfPoints()==0,"No other process should have streaks");
+    EXPECT(0, out->GetNumberOfPoints(),"PStreakline: No other process should have streaks.", staticOption);
     }
 
   return EXIT_SUCCESS;
@@ -489,14 +470,18 @@ int main(int argc, char* argv[])
   vtkMultiProcessController::SetGlobalController(c);
   c->Initialize(&argc,&argv);
 
-  EXPECT(TestPParticleTracer(c)==EXIT_SUCCESS,"TestPParticleTracer Failed");
+  int retVal = 0;
+  retVal += TestPParticleTracer(c, 1);
+  retVal += TestPParticleTracer(c, 0);
   c->Barrier();
 
-  EXPECT(TestPParticlePathFilter(c)==EXIT_SUCCESS,"TestPParticlePathFilter Failed");
+  retVal += TestPParticlePathFilter(c, 1);
+  retVal += TestPParticlePathFilter(c, 0);
   c->Barrier();
 
-  EXPECT(TestPStreaklineFilter(c)==EXIT_SUCCESS,"TestPStreaklineFilter Failed");
-
+  retVal += TestPStreaklineFilter(c, 1);
+  retVal += TestPStreaklineFilter(c, 0);
 
   c->Finalize();
+  return retVal;
 }
diff --git a/Filters/ParallelFlowPaths/Testing/Data/Baseline/TestPStream.cxx.png.md5 b/Filters/ParallelFlowPaths/Testing/Data/Baseline/TestPStream.cxx.png.md5
new file mode 100644
index 0000000..4b65f07
--- /dev/null
+++ b/Filters/ParallelFlowPaths/Testing/Data/Baseline/TestPStream.cxx.png.md5
@@ -0,0 +1 @@
+a0249344c61ce57eb08f33e154e6ff43
diff --git a/Filters/ParallelFlowPaths/vtkPParticlePathFilter.cxx b/Filters/ParallelFlowPaths/vtkPParticlePathFilter.cxx
index e23ddd2..e60e677 100644
--- a/Filters/ParallelFlowPaths/vtkPParticlePathFilter.cxx
+++ b/Filters/ParallelFlowPaths/vtkPParticlePathFilter.cxx
@@ -20,11 +20,10 @@ PURPOSE.  See the above copyright notice for more information.
 #include "vtkCharArray.h"
 #include "vtkFloatArray.h"
 #include "vtkNew.h"
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkPParticlePathFilter);
 
-
 vtkPParticlePathFilter::vtkPParticlePathFilter()
 {
   this->It.Initialize(this);
@@ -35,13 +34,11 @@ void vtkPParticlePathFilter::ResetCache()
   this->It.Reset();
 }
 
-
 void vtkPParticlePathFilter::PrintSelf(ostream& os, vtkIndent indent)
 {
   Superclass::PrintSelf(os,indent);
 }
 
-
 int vtkPParticlePathFilter::OutputParticles(vtkPolyData* particles)
 {
   vtkNew<vtkPolyData> tailPoly;
diff --git a/Filters/ParallelFlowPaths/vtkPParticleTracerBase.cxx b/Filters/ParallelFlowPaths/vtkPParticleTracerBase.cxx
index 725bb95..7706825 100644
--- a/Filters/ParallelFlowPaths/vtkPParticleTracerBase.cxx
+++ b/Filters/ParallelFlowPaths/vtkPParticleTracerBase.cxx
@@ -12,44 +12,23 @@ the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
 PURPOSE.  See the above copyright notice for more information.
 
 =========================================================================*/
-#include "vtkParticleTracerBase.h"
 #include "vtkPParticleTracerBase.h"
 
-#include "vtkCellArray.h"
 #include "vtkCellData.h"
-#include "vtkCompositeDataIterator.h"
-#include "vtkCompositeDataPipeline.h"
+#include "vtkCommunicator.h"
+#include "vtkCompositeDataSet.h"
+#include "vtkDataArray.h"
 #include "vtkDataSetAttributes.h"
-#include "vtkDoubleArray.h"
-#include "vtkExecutive.h"
-#include "vtkGenericCell.h"
-#include "vtkIdList.h"
 #include "vtkInformation.h"
 #include "vtkInformationVector.h"
-#include "vtkIntArray.h"
-#include "vtkFloatArray.h"
-#include "vtkDoubleArray.h"
-#include "vtkCharArray.h"
-#include "vtkMath.h"
-#include "vtkMultiBlockDataSet.h"
 #include "vtkMultiProcessController.h"
 #include "vtkMultiProcessStream.h"
 #include "vtkObjectFactory.h"
 #include "vtkPointData.h"
-#include "vtkPointSet.h"
 #include "vtkPolyData.h"
-#include "vtkPolyLine.h"
-#include "vtkRungeKutta2.h"
-#include "vtkRungeKutta4.h"
-#include "vtkRungeKutta45.h"
 #include "vtkSmartPointer.h"
-#include "vtkTemporalInterpolatedVelocityField.h"
-#include "vtkOutputWindow.h"
-#include "vtkAbstractParticleWriter.h"
-//#include "vtkToolkits.h"
-#include "assert.h"
-#include "vtkMPIController.h"
-#include "vtkMultiProcessController.h"
+#include "vtkStreamingDemandDrivenPipeline.h"
+#include <cassert>
 
 #include <algorithm>
 
@@ -59,16 +38,6 @@ vtkPParticleTracerBase::vtkPParticleTracerBase()
 {
   this->Controller = NULL;
   this->SetController(vtkMultiProcessController::GetGlobalController());
-  if(this->Controller)
-    {
-    this->Rank = this->Controller->GetLocalProcessId();
-    this->NumProcs = this->Controller->GetNumberOfProcesses();
-    }
-  else
-    {
-    this->Rank = 1;
-    this->NumProcs = 1;
-    }
 }
 
 //---------------------------------------------------------------------------
@@ -78,19 +47,20 @@ vtkPParticleTracerBase::~vtkPParticleTracerBase()
   this->SetParticleWriter(NULL);
 }
 
+//---------------------------------------------------------------------------
 vtkPolyData* vtkPParticleTracerBase::Execute(vtkInformationVector** inputVector)
 {
   vtkDebugMacro(<< "Clear MPI send list ");
   this->MPISendList.clear();
-
   this->Tail.clear();
 
   return vtkParticleTracerBase::Execute(inputVector);
 }
 
-bool vtkPParticleTracerBase::SendParticleToAnotherProcess(ParticleInformation &info,
-                                                          ParticleInformation &previousInfo,
-                                                          vtkPointData* pd)
+//---------------------------------------------------------------------------
+bool vtkPParticleTracerBase::SendParticleToAnotherProcess(
+  ParticleInformation &info, ParticleInformation &previousInfo,
+  vtkPointData* pd)
 {
   assert(info.PointId>=0); //the particle must have already been added;
 
@@ -102,7 +72,7 @@ bool vtkPParticleTracerBase::SendParticleToAnotherProcess(ParticleInformation &i
   remoteInfo.PreviousPD->CopyAllocate(this->ProtoPD);
 
   //only copy those that correspond to the original data fields
-  for(int i=0; i <ProtoPD->GetNumberOfArrays(); i++)
+  for(int i=0; i <this->ProtoPD->GetNumberOfArrays(); i++)
     {
     char* arrName = this->ProtoPD->GetArray(i)->GetName();
     vtkDataArray* arrFrom = pd->GetArray(arrName);
@@ -129,14 +99,14 @@ bool vtkPParticleTracerBase::SendParticleToAnotherProcess(ParticleInformation &i
 }
 
 //---------------------------------------------------------------------------
-void vtkPParticleTracerBase::AssignSeedsToProcessors(double t,
-  vtkDataSet *source, int sourceID, int ptId,
-  ParticleVector &LocalSeedPoints, int &LocalAssignedCount)
+void vtkPParticleTracerBase::AssignSeedsToProcessors(
+  double t, vtkDataSet *source, int sourceID, int ptId,
+  ParticleVector &localSeedPoints, int &localAssignedCount)
 {
   if(!this->Controller)
     {
     return Superclass::AssignSeedsToProcessors(t, source, sourceID, ptId,
-                                               LocalSeedPoints, LocalAssignedCount);
+                                               localSeedPoints, localAssignedCount);
     }
   ParticleVector candidates;
   //
@@ -172,53 +142,47 @@ void vtkPParticleTracerBase::AssignSeedsToProcessors(double t,
   //
   // TODO : can we just use the same array here for send and receive
   ParticleVector allCandidates;
-  this->TestParticles(candidates, LocalSeedPoints, LocalAssignedCount);
+  this->TestParticles(candidates, localSeedPoints, localAssignedCount);
   int TotalAssigned = 0;
-  this->Controller->Reduce(&LocalAssignedCount, &TotalAssigned, 1, vtkCommunicator::SUM_OP, 0);
+  this->Controller->Reduce(&localAssignedCount, &TotalAssigned, 1, vtkCommunicator::SUM_OP, 0);
 
   // Assign unique identifiers taking into account uneven distribution
   // across processes and seeds which were rejected
-  this->AssignUniqueIds(LocalSeedPoints);
+  this->AssignUniqueIds(localSeedPoints);
 }
+
 //---------------------------------------------------------------------------
 void vtkPParticleTracerBase::AssignUniqueIds(
-  vtkParticleTracerBaseNamespace::ParticleVector &LocalSeedPoints)
+  vtkParticleTracerBaseNamespace::ParticleVector &localSeedPoints)
 {
   if(!this->Controller)
     {
-    return Superclass::AssignUniqueIds(LocalSeedPoints);
+    return Superclass::AssignUniqueIds(localSeedPoints);
     }
 
-  vtkIdType ParticleCountOffset = 0;
-  vtkIdType numParticles = LocalSeedPoints.size();
+  vtkIdType particleCountOffset = 0;
+  vtkIdType numParticles = localSeedPoints.size();
 
-  if (this->NumProcs>1)
+  if (this->Controller->GetNumberOfProcesses()>1)
     {
-    vtkMPICommunicator* com = vtkMPICommunicator::SafeDownCast(
-      this->Controller->GetCommunicator());
-    if (com == 0) {
-      vtkErrorMacro("MPICommunicator needed for this operation.");
-      return;
-    }
     // everyone starts with the master index
-    com->Broadcast(&this->UniqueIdCounter, 1, 0);
-//    vtkErrorMacro("UniqueIdCounter " << this->UniqueIdCounter);
+    this->Controller->Broadcast(&this->UniqueIdCounter, 1, 0);
     // setup arrays used by the AllGather call.
-    std::vector<vtkIdType> recvNumParticles(this->NumProcs, 0);
+    std::vector<vtkIdType> recvNumParticles(this->Controller->GetNumberOfProcesses(), 0);
     // Broadcast and receive count to/from all other processes.
-    com->AllGather(&numParticles, &recvNumParticles[0], 1);
+    this->Controller->AllGather(&numParticles, &recvNumParticles[0], 1);
     // Each process is allocating a certain number.
     // start our indices from sum[0,this->Rank](numparticles)
-    for (int i=0; i<this->Rank; ++i)
+    for (int i=0; i<this->Controller->GetLocalProcessId(); ++i)
       {
-      ParticleCountOffset += recvNumParticles[i];
+      particleCountOffset += recvNumParticles[i];
       }
     for (vtkIdType i=0; i<numParticles; i++)
       {
-      LocalSeedPoints[i].UniqueParticleId =
-        this->UniqueIdCounter + ParticleCountOffset + i;
+      localSeedPoints[i].UniqueParticleId =
+        this->UniqueIdCounter + particleCountOffset + i;
       }
-    for (int i=0; i<this->NumProcs; ++i)
+    for (int i=0; i<this->Controller->GetNumberOfProcesses(); ++i)
       {
       this->UniqueIdCounter += recvNumParticles[i];
       }
@@ -226,29 +190,21 @@ void vtkPParticleTracerBase::AssignUniqueIds(
   else {
     for (vtkIdType i=0; i<numParticles; i++)
       {
-      LocalSeedPoints[i].UniqueParticleId =
-        this->UniqueIdCounter + ParticleCountOffset + i;
+      localSeedPoints[i].UniqueParticleId =
+        this->UniqueIdCounter + particleCountOffset + i;
       }
     this->UniqueIdCounter += numParticles;
   }
 }
 
-
+//---------------------------------------------------------------------------
 void vtkPParticleTracerBase::SendReceiveParticles(RemoteParticleVector &sParticles,
-                                                  RemoteParticleVector &rParticles
-                                                  )
+                                                  RemoteParticleVector &rParticles)
 {
-  vtkMPICommunicator* com = vtkMPICommunicator::SafeDownCast(this->Controller->GetCommunicator());
-  if (com == 0)
-    {
-    return;
-    }
-
   int numParticles = static_cast<int>(sParticles.size());
-  std::vector<int> allNumParticles(this->NumProcs, 0);
+  std::vector<int> allNumParticles(this->Controller->GetNumberOfProcesses(), 0);
   // Broadcast and receive size to/from all other processes.
-  com->AllGather(&numParticles, &allNumParticles[0], 1);
-
+  this->Controller->AllGather(&numParticles, &allNumParticles[0], 1);
 
   // write the message
   const int size1 = sizeof(ParticleInformation);
@@ -280,11 +236,11 @@ void vtkPParticleTracerBase::SendReceiveParticles(RemoteParticleVector &sParticl
       }
     }
 
-  std::vector<vtkIdType> messageLength(this->NumProcs, 0);
-  std::vector<vtkIdType> messageOffset(this->NumProcs, 0);
+  std::vector<vtkIdType> messageLength(this->Controller->GetNumberOfProcesses(), 0);
+  std::vector<vtkIdType> messageOffset(this->Controller->GetNumberOfProcesses(), 0);
   int allMessageSize(0);
   int numAllParticles(0);
-  for (int i=0; i<this->NumProcs; ++i)
+  for (int i=0; i<this->Controller->GetNumberOfProcesses(); ++i)
     {
     numAllParticles+= allNumParticles[i];
     messageLength[i] = allNumParticles[i]*typeSize;
@@ -295,15 +251,12 @@ void vtkPParticleTracerBase::SendReceiveParticles(RemoteParticleVector &sParticl
   //receive the message
 
   std::vector<char> recvMessage(allMessageSize,0);
-  com->AllGatherV(messageSize>0?  &sendMessage[0] : NULL,
-                  allMessageSize>0? &recvMessage[0] : NULL,
-                  messageSize,
-                  &messageLength[0],
-                  &messageOffset[0]);
-
+  this->Controller->AllGatherV(messageSize>0?  &sendMessage[0] : NULL,
+                               allMessageSize>0? &recvMessage[0] : NULL,
+                               messageSize, &messageLength[0],
+                               &messageOffset[0]);
 
   //read the message
-
   rParticles.resize(numAllParticles);
   for(int i=0; i<numAllParticles; i++)
     {
@@ -326,21 +279,20 @@ void vtkPParticleTracerBase::SendReceiveParticles(RemoteParticleVector &sParticl
       }
     }
 
-  assert(this->Rank==this->Rank);
-
-  std::vector<RemoteParticleInfo>::iterator first = rParticles.begin() + messageOffset[this->Rank]/typeSize;
-  std::vector<RemoteParticleInfo>::iterator last =  first + messageLength[this->Rank]/typeSize;
+  std::vector<RemoteParticleInfo>::iterator first =
+    rParticles.begin() + messageOffset[this->Controller->GetLocalProcessId()]/typeSize;
+  std::vector<RemoteParticleInfo>::iterator last =
+    first + messageLength[this->Controller->GetLocalProcessId()]/typeSize;
   rParticles.erase(first, last);
   // // don't want the ones that we sent away
   this->MPISendList.clear();
 }
 
-
-int vtkPParticleTracerBase::RequestUpdateExtent(vtkInformation* request,
-                                vtkInformationVector** inputVector,
-                                vtkInformationVector* outputVector)
+//---------------------------------------------------------------------------
+int vtkPParticleTracerBase::RequestUpdateExtent(
+  vtkInformation* request, vtkInformationVector** inputVector,
+  vtkInformationVector* outputVector)
 {
-
   vtkInformation *sourceInfo = inputVector[1]->GetInformationObject(0);
   if (sourceInfo)
     {
@@ -366,6 +318,7 @@ int vtkPParticleTracerBase::RequestData(
 
   return rvalue;
 }
+
 //---------------------------------------------------------------------------
 void vtkPParticleTracerBase::PrintSelf(ostream& os, vtkIndent indent)
 {
@@ -374,7 +327,7 @@ void vtkPParticleTracerBase::PrintSelf(ostream& os, vtkIndent indent)
   os << indent << "Controller: " << this->Controller << endl;
 }
 
-
+//---------------------------------------------------------------------------
 void vtkPParticleTracerBase::UpdateParticleListFromOtherProcesses()
 {
   if(!this->Controller)
@@ -389,7 +342,7 @@ void vtkPParticleTracerBase::UpdateParticleListFromOtherProcesses()
 
   // the Particle lists will grow if any are received
   // so we must be very careful with our iterators
-//  this->TransmitReceiveParticles(this->MPISendList, received, true);
+  //  this->TransmitReceiveParticles(this->MPISendList, received, true);
   // classify all the ones we received
   if (received.size()>0)
     {
@@ -420,9 +373,9 @@ void vtkPParticleTracerBase::UpdateParticleListFromOtherProcesses()
     this->Tail.push_back(info);
     this->ParticleHistories.push_back(info.Current);
     }
-
 }
 
+//---------------------------------------------------------------------------
 bool vtkPParticleTracerBase::IsPointDataValid(vtkDataObject* input)
 {
   if(this->Controller->GetNumberOfProcesses() == 1)
@@ -491,13 +444,11 @@ bool vtkPParticleTracerBase::IsPointDataValid(vtkDataObject* input)
     return false;
     }
   int tmp = retVal;
-  cerr << retVal << " is my retval\n";
-  this->Controller->AllReduce(&tmp, &retVal, 1, vtkMPICommunicator::MIN_OP);
+  this->Controller->AllReduce(&tmp, &retVal, 1, vtkCommunicator::MIN_OP);
 
   return (retVal != 0);
 }
 
-
 //---------------------------------------------------------------------------
 vtkCxxSetObjectMacro(vtkPParticleTracerBase, Controller, vtkMultiProcessController);
 //---------------------------------------------------------------------------
diff --git a/Filters/ParallelFlowPaths/vtkPParticleTracerBase.h b/Filters/ParallelFlowPaths/vtkPParticleTracerBase.h
index 4f433f1..ebedb88 100644
--- a/Filters/ParallelFlowPaths/vtkPParticleTracerBase.h
+++ b/Filters/ParallelFlowPaths/vtkPParticleTracerBase.h
@@ -7,11 +7,11 @@
   All rights reserved.
   See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
 
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
+  This software is distributed WITHOUT ANY WARRANTY; without even
+  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+  PURPOSE.  See the above copyright notice for more information.
 
-=========================================================================*/
+  =========================================================================*/
 // .NAME vtkParticleTracerBase - A parallel particle tracer for vector fields
 // .SECTION Description
 // vtkPParticleTracerBase is the base class for parallel filters that advect particles
@@ -29,7 +29,6 @@
 
 //BTX
 #include <vector> // STL Header
-#include <list>   // STL Header
 //ETX
 
 #include "vtkFiltersParallelFlowPathsModule.h" // For export macro
@@ -37,72 +36,69 @@
 class VTKFILTERSPARALLELFLOWPATHS_EXPORT vtkPParticleTracerBase : public vtkParticleTracerBase
 {
 public:
+  vtkTypeMacro(vtkPParticleTracerBase,vtkParticleTracerBase);
+  void PrintSelf(ostream& os, vtkIndent indent);
 
-    vtkTypeMacro(vtkPParticleTracerBase,vtkParticleTracerBase);
-    void PrintSelf(ostream& os, vtkIndent indent);
-
-
-    // Description:
-    // Set/Get the controller used when sending particles between processes
-    // The controller must be an instance of vtkMPIController.
-    virtual void SetController(vtkMultiProcessController* controller);
-    vtkGetObjectMacro(Controller, vtkMultiProcessController);
-
-  protected:
-    struct  RemoteParticleInfo
-    {
-      vtkParticleTracerBaseNamespace::ParticleInformation Current;
-      vtkParticleTracerBaseNamespace::ParticleInformation Previous;
-      vtkSmartPointer<vtkPointData> PreviousPD;
-    };
+  // Description:
+  // Set/Get the controller used when sending particles between processes
+  // The controller must be an instance of vtkMPIController.
+  virtual void SetController(vtkMultiProcessController* controller);
+  vtkGetObjectMacro(Controller, vtkMultiProcessController);
 
-    typedef std::vector<RemoteParticleInfo>  RemoteParticleVector;
+protected:
+  struct  RemoteParticleInfo
+  {
+    vtkParticleTracerBaseNamespace::ParticleInformation Current;
+    vtkParticleTracerBaseNamespace::ParticleInformation Previous;
+    vtkSmartPointer<vtkPointData> PreviousPD;
+  };
 
+  typedef std::vector<RemoteParticleInfo>  RemoteParticleVector;
 
-     vtkPParticleTracerBase();
-    ~vtkPParticleTracerBase();
+  vtkPParticleTracerBase();
+  ~vtkPParticleTracerBase();
 
-    virtual int RequestUpdateExtent(vtkInformation* request,
+  virtual int RequestUpdateExtent(vtkInformation* request,
                                   vtkInformationVector** inputVector,
                                   vtkInformationVector* outputVector);
 
-    //
-    // Generate output
-    //
-    virtual int RequestData(vtkInformation* request,
-                            vtkInformationVector** inputVector,
-                            vtkInformationVector* outputVector);
+  //
+  // Generate output
+  //
+  virtual int RequestData(vtkInformation* request,
+                          vtkInformationVector** inputVector,
+                          vtkInformationVector* outputVector);
 
 //
 //BTX
 
-    virtual vtkPolyData* Execute(vtkInformationVector** inputVector);
-    virtual bool SendParticleToAnotherProcess(vtkParticleTracerBaseNamespace::ParticleInformation & info,
-                                              vtkParticleTracerBaseNamespace::ParticleInformation & previous,
-                                              vtkPointData*);
-
-    // Description : Before starting the particle trace, classify
-    // all the injection/seed points according to which processor
-    // they belong to. This saves us retesting at every injection time
-    // providing 1) The volumes are static, 2) the seed points are static
-    // If either are non static, then this step is skipped.
-    virtual void AssignSeedsToProcessors(double time,
-      vtkDataSet *source, int sourceID, int ptId,
-      vtkParticleTracerBaseNamespace::ParticleVector &LocalSeedPoints,
-      int &LocalAssignedCount);
-
-    // Description : once seeds have been assigned to a process, we
-    // give each one a uniqu ID. We need to use MPI to find out
-    // who is using which numbers.
-    virtual void AssignUniqueIds(
-      vtkParticleTracerBaseNamespace::ParticleVector &LocalSeedPoints);
-
-    // Description : Perform a GatherV operation on a vector of particles
-    // this is used during classification of seed points and also between iterations
-    // of the main loop as particles leave each processor domain
-    virtual void SendReceiveParticles(RemoteParticleVector &outofdomain, RemoteParticleVector &received);
-
-    void UpdateParticleListFromOtherProcesses();
+  virtual vtkPolyData* Execute(vtkInformationVector** inputVector);
+  virtual bool SendParticleToAnotherProcess(vtkParticleTracerBaseNamespace::ParticleInformation & info,
+                                            vtkParticleTracerBaseNamespace::ParticleInformation & previous,
+                                            vtkPointData*);
+
+  // Description : Before starting the particle trace, classify
+  // all the injection/seed points according to which processor
+  // they belong to. This saves us retesting at every injection time
+  // providing 1) The volumes are static, 2) the seed points are static
+  // If either are non static, then this step is skipped.
+  virtual void AssignSeedsToProcessors(double time,
+                                       vtkDataSet *source, int sourceID, int ptId,
+                                       vtkParticleTracerBaseNamespace::ParticleVector &localSeedPoints,
+                                       int &localAssignedCount);
+
+  // Description : once seeds have been assigned to a process, we
+  // give each one a uniqu ID. We need to use MPI to find out
+  // who is using which numbers.
+  virtual void AssignUniqueIds(
+    vtkParticleTracerBaseNamespace::ParticleVector &localSeedPoints);
+
+  // Description : Perform a GatherV operation on a vector of particles
+  // this is used during classification of seed points and also between iterations
+  // of the main loop as particles leave each processor domain
+  virtual void SendReceiveParticles(RemoteParticleVector &outofdomain, RemoteParticleVector &received);
+
+  void UpdateParticleListFromOtherProcesses();
 
   // Description:
   // Method that checks that the input arrays are ordered the
@@ -121,10 +117,8 @@ public:
   // List used for transmitting between processors during parallel operation
   RemoteParticleVector MPISendList;
 
-  int Rank;
-  int NumProcs;
   RemoteParticleVector Tail; //this is to receive the "tails" of traces from other processes
-private:
+ private:
   vtkPParticleTracerBase(const vtkPParticleTracerBase&);  // Not implemented.
   void operator=(const vtkPParticleTracerBase&);  // Not implemented.
 
diff --git a/Filters/ParallelFlowPaths/vtkPStreaklineFilter.cxx b/Filters/ParallelFlowPaths/vtkPStreaklineFilter.cxx
index cbb5128..1fce076 100644
--- a/Filters/ParallelFlowPaths/vtkPStreaklineFilter.cxx
+++ b/Filters/ParallelFlowPaths/vtkPStreaklineFilter.cxx
@@ -29,7 +29,7 @@ PURPOSE.  See the above copyright notice for more information.
 #include "vtkNew.h"
 
 #include <vector>
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkPStreaklineFilter)
 
@@ -48,13 +48,13 @@ void vtkPStreaklineFilter::Finalize()
   int leader = 0;
   int tag = 129;
 
-  if(this->Rank==leader) //process 0 do the actual work
+  if(this->Controller->GetLocalProcessId()==leader) //process 0 do the actual work
     {
     vtkNew<vtkAppendPolyData> append;
     int totalNumPts(0);
-    for(int i=0; i<this->NumProcs; i++)
+    for(int i=0; i<this->Controller->GetNumberOfProcesses(); i++)
       {
-      if(i!=this->Rank)
+      if(i!=this->Controller->GetLocalProcessId())
         {
         vtkSmartPointer<vtkPolyData> output_i = vtkSmartPointer<vtkPolyData>::New();
         this->Controller->Receive(output_i, i, tag);
@@ -81,7 +81,6 @@ void vtkPStreaklineFilter::Finalize()
     this->Output->Initialize();
     }
 
-
   return;
 }
 
diff --git a/Filters/ParallelFlowPaths/vtkPStreamTracer.cxx b/Filters/ParallelFlowPaths/vtkPStreamTracer.cxx
index 7b71d07..db73ad4 100644
--- a/Filters/ParallelFlowPaths/vtkPStreamTracer.cxx
+++ b/Filters/ParallelFlowPaths/vtkPStreamTracer.cxx
@@ -46,7 +46,7 @@
 
 #include <list>
 #include <vector>
-#include <assert.h>
+#include <cassert>
 
 #ifndef NDEBUG
 // #define DEBUGTRACE
diff --git a/Filters/ParallelFlowPaths/vtkPTemporalStreamTracer.cxx b/Filters/ParallelFlowPaths/vtkPTemporalStreamTracer.cxx
index 3366c8b..0cbe48a 100644
--- a/Filters/ParallelFlowPaths/vtkPTemporalStreamTracer.cxx
+++ b/Filters/ParallelFlowPaths/vtkPTemporalStreamTracer.cxx
@@ -46,7 +46,7 @@ PURPOSE.  See the above copyright notice for more information.
 #include "vtkOutputWindow.h"
 #include "vtkAbstractParticleWriter.h"
 #include "vtkToolkits.h"
-#include "assert.h"
+#include <cassert>
 #include "vtkMPIController.h"
 
 using namespace vtkTemporalStreamTracerNamespace;
diff --git a/Filters/ParallelGeometry/CMakeLists.txt b/Filters/ParallelGeometry/CMakeLists.txt
index f37ef05..ecc9304 100644
--- a/Filters/ParallelGeometry/CMakeLists.txt
+++ b/Filters/ParallelGeometry/CMakeLists.txt
@@ -3,6 +3,8 @@ set(Module_SRCS
   vtkPStructuredGridConnectivity.cxx
   vtkPStructuredGridGhostDataGenerator.cxx
   vtkPUniformGridGhostDataGenerator.cxx
+  vtkPUnstructuredGridConnectivity.cxx
+  vtkPUnstructuredGridGhostDataGenerator.cxx
   )
 
 set_source_files_properties(
diff --git a/Filters/ParallelGeometry/Testing/Cxx/CMakeLists.txt b/Filters/ParallelGeometry/Testing/Cxx/CMakeLists.txt
index 975ec2c..ea91e24 100644
--- a/Filters/ParallelGeometry/Testing/Cxx/CMakeLists.txt
+++ b/Filters/ParallelGeometry/Testing/Cxx/CMakeLists.txt
@@ -1,3 +1,13 @@
-add_test_mpi(TestPStructuredGridConnectivity.cxx)
-add_test_mpi(TestPStructuredGridGhostDataGenerator.cxx)
-add_test_mpi(TestPUniformGridGhostDataGenerator.cxx)
+include(vtkMPI)
+
+vtk_add_test_mpi(TestPStructuredGridConnectivity.cxx)
+vtk_mpi_link(TestPStructuredGridConnectivity)
+
+vtk_add_test_mpi(TestPStructuredGridGhostDataGenerator.cxx)
+vtk_add_test_mpi(TestPUniformGridGhostDataGenerator.cxx)
+
+vtk_add_test_mpi(TestPUnstructuredGridConnectivity.cxx)
+vtk_mpi_link(TestPUnstructuredGridConnectivity)
+
+vtk_add_test_mpi(TestPUnstructuredGridGhostDataGenerator.cxx)
+vtk_mpi_link(TestPUnstructuredGridGhostDataGenerator)
\ No newline at end of file
diff --git a/Filters/ParallelGeometry/Testing/Cxx/TestPUnstructuredGridConnectivity.cxx b/Filters/ParallelGeometry/Testing/Cxx/TestPUnstructuredGridConnectivity.cxx
new file mode 100644
index 0000000..b58a537
--- /dev/null
+++ b/Filters/ParallelGeometry/Testing/Cxx/TestPUnstructuredGridConnectivity.cxx
@@ -0,0 +1,141 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestPStructuredGridConnectivity.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkCellData.h"
+#include "vtkCellType.h"
+#include "vtkDoubleArray.h"
+#include "vtkExtentRCBPartitioner.h"
+#include "vtkIdList.h"
+#include "vtkIdTypeArray.h"
+#include "vtkMPIController.h"
+#include "vtkMPIUtilities.h"
+#include "vtkMultiProcessController.h"
+#include "vtkPUnstructuredGridConnectivity.h"
+#include "vtkPointData.h"
+#include "vtkPointData.h"
+#include "vtkPoints.h"
+#include "vtkStructuredData.h"
+#include "vtkTimerLog.h"
+#include "vtkUnstructuredGrid.h"
+#include "vtkUnstructuredGridWriter.h"
+
+#include <sstream>
+#include <string>
+
+//#define DEBUG
+#include "UnstructuredGhostZonesCommon.h"
+
+//------------------------------------------------------------------------------
+// Program main
+int main(int argc, char** argv)
+{
+  int rc             = 0;
+  double ellapsed    = 0.0;
+  vtkTimerLog* timer = vtkTimerLog::New();
+
+  // STEP 0: Initialize
+  vtkMPIController* cntrl = vtkMPIController::New();
+  cntrl->Initialize( &argc, &argv, 0 );
+  vtkMultiProcessController::SetGlobalController( cntrl );
+  Rank   = cntrl->GetLocalProcessId();
+  NRanks = cntrl->GetNumberOfProcesses();
+
+  // STEP 1: Generate grid in parallel in each process
+  Grid = vtkUnstructuredGrid::New();
+  GenerateDataSet();
+
+  // STEP 2: Generate ghost zones
+  vtkMPIUtilities::Printf(cntrl,"[INFO]: Building ghost zones...");
+  vtkPUnstructuredGridConnectivity* ghostGen =
+      vtkPUnstructuredGridConnectivity::New();
+  ghostGen->SetController(cntrl);
+  ghostGen->RegisterGrid(Grid);
+  timer->StartTimer();
+  ghostGen->BuildGhostZoneConnectivity();
+  timer->StopTimer();
+  ellapsed = timer->GetElapsedTime();
+  vtkMPIUtilities::Printf(cntrl,"[DONE]\n");
+
+  // get some performance statistics
+  double minBuildGhostZonesTime = 0.0;
+  double maxBuildGhostZonesTime = 0.0;
+  double avgBuildGhostZonesTime = 0.0;
+  cntrl->Reduce(&ellapsed,&minBuildGhostZonesTime,1,vtkCommunicator::MIN_OP,0);
+  cntrl->Reduce(&ellapsed,&maxBuildGhostZonesTime,1,vtkCommunicator::MAX_OP,0);
+  cntrl->Reduce(&ellapsed,&avgBuildGhostZonesTime,1,vtkCommunicator::SUM_OP,0);
+  avgBuildGhostZonesTime /= static_cast<double>(cntrl->GetNumberOfProcesses());
+  vtkMPIUtilities::Printf(
+      cntrl,"-- Ellapsed Time: min=%f, avg=%f, max=%f\n",
+      minBuildGhostZonesTime,avgBuildGhostZonesTime,maxBuildGhostZonesTime);
+
+  // STEP 3: Update ghost zones
+  std::ostringstream grdfname;   // input grid name at each iteration for I/O
+  std::ostringstream ghostfname; // ghosted grid name at each iteration for I/O
+  for(int i=0; i < 2; ++i)
+    {
+    vtkUnstructuredGrid* ghostGrid = vtkUnstructuredGrid::New();
+    grdfname.clear();
+    grdfname.str("");
+    grdfname << "INITIAL-T" << i;
+
+    ghostfname.clear();
+    ghostfname.str("");
+    ghostfname << "GHOSTED-T" << i;
+
+    // update grid in this time-step
+    UpdateGrid(i);
+
+#ifdef DEBUG
+    WriteDataSet(Grid,grdfname.str().c_str());
+#endif
+
+    vtkMPIUtilities::Printf(cntrl,"[INFO]: iteration=%d\n",i);
+    vtkMPIUtilities::Printf(cntrl,"[INFO]: Update ghost zones...");
+    timer->StartTimer();
+    ghostGen->UpdateGhosts();
+    timer->StopTimer();
+    ellapsed = timer->GetElapsedTime();
+    vtkMPIUtilities::Printf(cntrl,"[DONE]\n");
+
+    // get some performance statistics
+    double minGhostUpdateTime = 0.0;
+    double maxGhostUpdateTime = 0.0;
+    double avgGhostUpdateTime = 0.0;
+    cntrl->Reduce(&ellapsed,&minGhostUpdateTime,1,vtkCommunicator::MIN_OP,0);
+    cntrl->Reduce(&ellapsed,&maxGhostUpdateTime,1,vtkCommunicator::MAX_OP,0);
+    cntrl->Reduce(&ellapsed,&avgGhostUpdateTime,1,vtkCommunicator::SUM_OP,0);
+    avgGhostUpdateTime /= static_cast<double>(cntrl->GetNumberOfProcesses());
+    vtkMPIUtilities::Printf(
+          cntrl,"-- Ellapsed Time: min=%f, avg=%f, max=%f\n",
+          minGhostUpdateTime,avgGhostUpdateTime,maxGhostUpdateTime);
+
+    ghostGrid->DeepCopy(ghostGen->GetGhostedGrid());
+
+#ifdef DEBUG
+    assert("pre: ghost gird should not be NULL!" && (ghostGrid != NULL) );
+    WriteDataSet(ghostGrid,ghostfname.str().c_str());
+#endif
+
+    rc += CheckGrid(ghostGrid,i);
+    ghostGrid->Delete();
+    }
+
+  // STEP 5: Delete the ghost generator
+  timer->Delete();
+  ghostGen->Delete();
+  Grid->Delete();
+  cntrl->Finalize();
+  cntrl->Delete();
+  return( rc );
+}
diff --git a/Filters/ParallelGeometry/Testing/Cxx/TestPUnstructuredGridGhostDataGenerator.cxx b/Filters/ParallelGeometry/Testing/Cxx/TestPUnstructuredGridGhostDataGenerator.cxx
new file mode 100644
index 0000000..e287cbd
--- /dev/null
+++ b/Filters/ParallelGeometry/Testing/Cxx/TestPUnstructuredGridGhostDataGenerator.cxx
@@ -0,0 +1,125 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestPStructuredGridConnectivity.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkCellData.h"
+#include "vtkCellType.h"
+#include "vtkDoubleArray.h"
+#include "vtkExtentRCBPartitioner.h"
+#include "vtkIdList.h"
+#include "vtkIdTypeArray.h"
+#include "vtkMPIController.h"
+#include "vtkMPIUtilities.h"
+#include "vtkMultiProcessController.h"
+#include "vtkPUnstructuredGridConnectivity.h"
+#include "vtkPUnstructuredGridGhostDataGenerator.h"
+#include "vtkPointData.h"
+#include "vtkPointData.h"
+#include "vtkPoints.h"
+#include "vtkStructuredData.h"
+#include "vtkTimerLog.h"
+#include "vtkUnstructuredGrid.h"
+#include "vtkUnstructuredGridWriter.h"
+
+#include <sstream>
+#include <string>
+
+//#define DEBUG
+#include "UnstructuredGhostZonesCommon.h"
+
+//------------------------------------------------------------------------------
+// Program main
+int main(int argc, char** argv)
+{
+  int rc             = 0;
+  double ellapsed    = 0.0;
+  vtkTimerLog* timer = vtkTimerLog::New();
+
+  // STEP 0: Initialize
+  vtkMPIController* cntrl = vtkMPIController::New();
+  cntrl->Initialize( &argc, &argv, 0 );
+  vtkMultiProcessController::SetGlobalController( cntrl );
+  Rank   = cntrl->GetLocalProcessId();
+  NRanks = cntrl->GetNumberOfProcesses();
+
+  // STEP 1: Generate grid in parallel in each process
+  Grid = vtkUnstructuredGrid::New();
+  GenerateDataSet();
+
+  // STEP 2: Setup ghost data generator
+  vtkPUnstructuredGridGhostDataGenerator* ghostGenerator =
+      vtkPUnstructuredGridGhostDataGenerator::New();
+  ghostGenerator->SetInputData(Grid);
+
+  // STEP 3: Update ghost zones
+  std::ostringstream grdfname;   // input grid name at each iteration for I/O
+  std::ostringstream ghostfname; // ghosted grid name at each iteration for I/O
+  for(int i=0; i < 2; ++i)
+    {
+    grdfname.clear();
+    grdfname.str("");
+    grdfname << "INITIAL-T" << i;
+
+    ghostfname.clear();
+    ghostfname.str("");
+    ghostfname << "GHOSTED-T" << i;
+
+    // update grid in this iteration...
+    UpdateGrid(i);
+    Grid->Modified();
+#ifdef DEBUG
+    WriteDataSet(Grid,grdfname.str().c_str());
+#endif
+
+    // update ghost zones in this iteration...
+    vtkMPIUtilities::Printf(cntrl,"[INFO]: iteration=%d\n",i);
+    vtkMPIUtilities::Printf(cntrl,"[INFO]: Update ghost zones...");
+    timer->StartTimer();
+
+    ghostGenerator->Update();
+
+    timer->StopTimer();
+    ellapsed = timer->GetElapsedTime();
+    vtkMPIUtilities::Printf(cntrl,"[DONE]\n");
+
+    // get some performance statistics
+    double minGhostUpdateTime = 0.0;
+    double maxGhostUpdateTime = 0.0;
+    double avgGhostUpdateTime = 0.0;
+    cntrl->Reduce(&ellapsed,&minGhostUpdateTime,1,vtkCommunicator::MIN_OP,0);
+    cntrl->Reduce(&ellapsed,&maxGhostUpdateTime,1,vtkCommunicator::MAX_OP,0);
+    cntrl->Reduce(&ellapsed,&avgGhostUpdateTime,1,vtkCommunicator::SUM_OP,0);
+    avgGhostUpdateTime /= static_cast<double>(cntrl->GetNumberOfProcesses());
+    vtkMPIUtilities::Printf(
+          cntrl,"-- Ellapsed Time: min=%f, avg=%f, max=%f\n",
+          minGhostUpdateTime,avgGhostUpdateTime,maxGhostUpdateTime);
+
+    vtkUnstructuredGrid* ghostGrid = vtkUnstructuredGrid::New();
+    ghostGrid->DeepCopy(ghostGenerator->GetOutput());
+#ifdef DEBUG
+    assert("pre: ghost gird should not be NULL!" && (ghostGrid != NULL) );
+    WriteDataSet(ghostGrid,ghostfname.str().c_str());
+#endif
+
+    rc += CheckGrid(ghostGrid,i);
+    ghostGrid->Delete();
+    } // END for
+
+  // STEP 5: Delete the ghost generator
+  timer->Delete();
+  ghostGenerator->Delete();
+  Grid->Delete();
+  cntrl->Finalize();
+  cntrl->Delete();
+  return( rc );
+}
diff --git a/Filters/ParallelGeometry/Testing/Cxx/UnstructuredGhostZonesCommon.h b/Filters/ParallelGeometry/Testing/Cxx/UnstructuredGhostZonesCommon.h
new file mode 100644
index 0000000..45157f1
--- /dev/null
+++ b/Filters/ParallelGeometry/Testing/Cxx/UnstructuredGhostZonesCommon.h
@@ -0,0 +1,389 @@
+/*=========================================================================
+
+ Program:   Visualization Toolkit
+ Module:    vtkPStructuredGridConnectivity.h
+
+ Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+ All rights reserved.
+ See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+ This software is distributed WITHOUT ANY WARRANTY; without even
+ the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ PURPOSE.  See the above copyright notice for more information.
+
+ =========================================================================*/
+#ifndef UNSTRUCTUREDGHOSTZONESCOMMON_H_
+#define UNSTRUCTUREDGHOSTZONESCOMMON_H_
+
+// VTK includes
+#include "vtkMathUtilities.h"
+#include "vtkMPIUtilities.h"
+
+// C/C++ includes
+#include <cmath>
+#include <iomanip>
+#include <sstream>
+
+//------------------------------------------------------------------------------
+//    G L O B A L   D A T A
+//------------------------------------------------------------------------------
+double Origin[3]  = {0.0,0.0,0.0};
+double Spacing[3] = {0.5,0.5,0.5};
+int Dims[3]       = {50,50,50};
+
+int Rank   = -1;
+int NRanks = 0;
+
+vtkUnstructuredGrid* Grid;
+
+int CheckGrid(vtkUnstructuredGrid* ghostGrid, const int iteration)
+{
+  int rc = 0;
+
+  int numOfErrors = 0;
+  std::ostringstream out;
+  std::ostringstream err;
+
+  vtkMPIController* cntrl =
+      vtkMPIController::SafeDownCast(
+          vtkMultiProcessController::GetGlobalController());
+
+  // check node fields by the iteration number
+  vtkDoubleArray* nodeXYZ =
+      vtkDoubleArray::SafeDownCast(
+          ghostGrid->GetPointData()->GetArray("NodeXYZ"));
+  assert("pre: nodeXYZ != NULL" && (nodeXYZ != NULL) );
+  assert("pre: nodeXYZ numtuples mismatch!" &&
+          (ghostGrid->GetNumberOfPoints()==nodeXYZ->GetNumberOfTuples()));
+  assert("pre: nodeXYZ numcomponents mismatch!" &&
+          (nodeXYZ->GetNumberOfComponents()==3));
+
+
+  out.str("");
+  err.str("");
+  double pnt[3];
+  double* ptr = static_cast<double*>(nodeXYZ->GetVoidPointer(0));
+  for(vtkIdType nodeIdx=0; nodeIdx < ghostGrid->GetNumberOfPoints(); ++nodeIdx)
+    {
+    ghostGrid->GetPoint(nodeIdx,pnt);
+    for(int dim=0; dim < 3; ++dim)
+      {
+      double actual   = ptr[nodeIdx*3+dim];
+      double expected = pnt[dim]+static_cast<double>(iteration);
+      if( ! vtkMathUtilities::NearlyEqual<double>(actual,expected) )
+        {
+        ++numOfErrors;
+        ++rc;
+        err << std::setprecision(5)
+            << "\t[ERROR]: value mismatch at node=" << nodeIdx
+            << " expected=" << expected
+            << " actual=" << actual
+            << " delta=" << std::fabs(actual-expected)
+            << std::endl;
+        } // END if
+      } // END for all dimensions
+    } // END for all nodes
+
+  out << "[INFO]: " << numOfErrors << "/" << ghostGrid->GetNumberOfPoints()
+      << " nodes appear wrong: " << std::endl;
+  out << err.str();
+  vtkMPIUtilities::SynchronizedPrintf(cntrl,"%s",out.str().c_str());
+
+  // likewise, check cell-fields
+  vtkDoubleArray* cellXYZ =
+      vtkDoubleArray::SafeDownCast(
+          ghostGrid->GetCellData()->GetArray("CentroidXYZ"));
+  assert("pre: nodeXYZ numtuples mismatch!" &&
+          (ghostGrid->GetNumberOfCells()==cellXYZ->GetNumberOfTuples()));
+  assert("pre: nodeXYZ numcomponents mismatch!" &&
+          (cellXYZ->GetNumberOfComponents()==3));
+
+  numOfErrors = 0;
+  out.str("");
+  err.str("");
+  double centroid[3];
+  double* cptr = static_cast<double*>(cellXYZ->GetVoidPointer(0));
+  vtkIdList* ptIds = vtkIdList::New();
+  for(vtkIdType cellIdx=0; cellIdx < ghostGrid->GetNumberOfCells(); ++cellIdx)
+    {
+    ghostGrid->GetCellPoints(cellIdx,ptIds);
+    assert("pre: numpoints per cell must be 8!" &&
+            ptIds->GetNumberOfIds()==8);
+    centroid[0] = 0.0;
+    centroid[1] = 0.0;
+    centroid[2] = 0.0;
+    for(vtkIdType n=0; n < ptIds->GetNumberOfIds(); ++n)
+      {
+      vtkIdType idx = ptIds->GetId(n);
+      ghostGrid->GetPoint(idx,pnt);
+      centroid[0] += pnt[0];
+      centroid[1] += pnt[1];
+      centroid[2] += pnt[2];
+      } // END for all cell nodes
+
+    centroid[0] /= static_cast<double>(ptIds->GetNumberOfIds());
+    centroid[1] /= static_cast<double>(ptIds->GetNumberOfIds());
+    centroid[2] /= static_cast<double>(ptIds->GetNumberOfIds());
+
+    for(int dim=0; dim < 3; ++dim)
+      {
+      double actual   = cptr[cellIdx*3+dim];
+      double expected = centroid[dim]+static_cast<double>(iteration);
+      if( ! vtkMathUtilities::NearlyEqual<double>(actual,expected) )
+        {
+        ++numOfErrors;
+        ++rc;
+        err << std::setprecision(5)
+            << "\t[ERROR]: cell value mismatch at cell=" << cellIdx
+            << " dimension=" << dim
+            << " expected=" << expected
+            << " actual=" << actual
+            << " delta= " << std::fabs(actual-expected)
+            << std::endl;
+        } // END if
+      } // END for all dimensions
+    } // END for all cells
+
+  out << "[INFO]: " << numOfErrors << "/" << ghostGrid->GetNumberOfCells()
+      << " cells appear wrong: " << std::endl;
+  out << err.str();
+  vtkMPIUtilities::SynchronizedPrintf(cntrl,"%s",out.str().c_str());
+  ptIds->Delete();
+  return( rc );
+}
+
+//------------------------------------------------------------------------------
+void UpdateGrid(const int iteration)
+{
+  // increment node fields by the iteration number
+  vtkDoubleArray* nodeXYZ =
+      vtkDoubleArray::SafeDownCast(
+          Grid->GetPointData()->GetArray("NodeXYZ"));
+  assert("pre: nodeXYZ != NULL" && (nodeXYZ != NULL) );
+  assert("pre: nodeXYZ numtuples mismatch!" &&
+          (Grid->GetNumberOfPoints()==nodeXYZ->GetNumberOfTuples()));
+  assert("pre: nodeXYZ numcomponents mismatch!" &&
+          (nodeXYZ->GetNumberOfComponents()==3));
+
+  double* ptr = static_cast<double*>(nodeXYZ->GetVoidPointer(0));
+  for(vtkIdType nodeIdx=0; nodeIdx < Grid->GetNumberOfPoints(); ++nodeIdx)
+    {
+    ptr[ nodeIdx*3   ] += static_cast<double>(iteration);
+    ptr[ nodeIdx*3+1 ] += static_cast<double>(iteration);
+    ptr[ nodeIdx*3+2 ] += static_cast<double>(iteration);
+    } // END for all nodes
+
+  // increment cell fields by the iteration number
+  vtkDoubleArray* cellXYZ =
+      vtkDoubleArray::SafeDownCast(
+          Grid->GetCellData()->GetArray("CentroidXYZ"));
+  assert("pre: nodeXYZ numtuples mismatch!" &&
+          (Grid->GetNumberOfCells()==cellXYZ->GetNumberOfTuples()));
+  assert("pre: nodeXYZ numcomponents mismatch!" &&
+          (cellXYZ->GetNumberOfComponents()==3));
+
+  double* cptr = static_cast<double*>(cellXYZ->GetVoidPointer(0));
+  for(vtkIdType cellIdx=0; cellIdx < Grid->GetNumberOfCells(); ++cellIdx)
+    {
+    cptr[ cellIdx*3   ] += static_cast<double>(iteration);
+    cptr[ cellIdx*3+1 ] += static_cast<double>(iteration);
+    cptr[ cellIdx*3+2 ] += static_cast<double>(iteration);
+    } // END for all cells
+}
+
+//------------------------------------------------------------------------------
+void SetXYZCellField()
+{
+  vtkDoubleArray* centerXYZ = vtkDoubleArray::New();
+  centerXYZ->SetName("CentroidXYZ");
+  centerXYZ->SetNumberOfComponents(3);
+  centerXYZ->SetNumberOfTuples( Grid->GetNumberOfCells() );
+  double* ptr = static_cast<double*>(centerXYZ->GetVoidPointer(0));
+
+  double centroid[3];
+  vtkIdList* ptIds = vtkIdList::New();
+  for(vtkIdType cell=0; cell < Grid->GetNumberOfCells(); ++cell)
+    {
+    centroid[0] = centroid[1] = centroid[2] = 0.0;
+    Grid->GetCellPoints(cell,ptIds);
+    for(vtkIdType n=0; n < ptIds->GetNumberOfIds(); ++n)
+      {
+      centroid[0] += Grid->GetPoint(ptIds->GetId(n))[0];
+      centroid[1] += Grid->GetPoint(ptIds->GetId(n))[1];
+      centroid[2] += Grid->GetPoint(ptIds->GetId(n))[2];
+      } // END for all cell nodes
+
+    centroid[0] /= static_cast<double>(ptIds->GetNumberOfIds());
+    centroid[1] /= static_cast<double>(ptIds->GetNumberOfIds());
+    centroid[2] /= static_cast<double>(ptIds->GetNumberOfIds());
+
+    memcpy(&ptr[cell*3],centroid,3*sizeof(double));
+    } // END for all cells
+
+  Grid->GetCellData()->AddArray( centerXYZ );
+  centerXYZ->Delete();
+  ptIds->Delete();
+}
+
+//------------------------------------------------------------------------------
+void SetXYZNodeField()
+{
+  vtkDoubleArray* nodeXYZ = vtkDoubleArray::New();
+  nodeXYZ->SetName("NodeXYZ");
+  nodeXYZ->SetNumberOfComponents(3);
+  nodeXYZ->SetNumberOfTuples( Grid->GetNumberOfPoints() );
+  double* ptr = static_cast<double*>(nodeXYZ->GetVoidPointer(0));
+
+  for(vtkIdType node=0; node < Grid->GetNumberOfPoints(); ++node)
+    {
+    // copy the point coordinates in to the array
+    memcpy(&ptr[node*3],Grid->GetPoint(node),3*sizeof(double));
+    } // END for all cells
+
+  Grid->GetPointData()->AddArray(nodeXYZ);
+  nodeXYZ->Delete();
+}
+
+//------------------------------------------------------------------------------
+void WriteDataSet(
+      vtkUnstructuredGrid* grid, const std::string& file)
+{
+  std::ostringstream oss;
+  oss << file << "-" << Rank << ".vtk";
+
+  vtkUnstructuredGridWriter* writer = vtkUnstructuredGridWriter::New();
+  writer->SetFileName(oss.str().c_str());
+  writer->SetInputData( grid );
+  writer->Update();
+  writer->Delete();
+}
+
+//------------------------------------------------------------------------------
+void GetPoint(
+      const int i, const int j, const int k,double pnt[3])
+{
+  pnt[0] = Origin[0]+i*Spacing[0];
+  pnt[1] = Origin[1]+j*Spacing[1];
+  pnt[2] = Origin[2]+k*Spacing[2];
+}
+
+//------------------------------------------------------------------------------
+// Some usefull extent macros
+#define IMIN(ext) ext[0]
+#define IMAX(ext) ext[1]
+#define JMIN(ext) ext[2]
+#define JMAX(ext) ext[3]
+#define KMIN(ext) ext[4]
+#define KMAX(ext) ext[5]
+
+// Some useful IJK macros
+#define I(ijk) ijk[0]
+#define J(ijk) ijk[1]
+#define K(ijk) ijk[2]
+
+void GenerateDataSet()
+{
+  // STEP 0: partition the global extent to the number of processes
+  vtkExtentRCBPartitioner* partitioner = vtkExtentRCBPartitioner::New();
+  partitioner->SetGlobalExtent(0,Dims[0]-1,0,Dims[1]-1,0,Dims[2]-1);
+  partitioner->SetNumberOfPartitions( NRanks );
+  partitioner->Partition();
+
+  // STEP 1: get the extent of this process
+  int ext[6];
+  partitioner->GetPartitionExtent(Rank,ext);
+  partitioner->Delete();
+
+  // STEP 2: Allocate the unstructured grid instance of this process
+  int dataDescription = vtkStructuredData::GetDataDescriptionFromExtent(ext);
+  int numNodes = vtkStructuredData::GetNumberOfNodes(ext,dataDescription);
+  int numCells = vtkStructuredData::GetNumberOfCells(ext,dataDescription);
+
+  int dims[3];
+  vtkStructuredData::GetDimensionsFromExtent(ext,dims,dataDescription);
+
+  vtkPoints* nodes = vtkPoints::New();
+  nodes->SetDataTypeToDouble();
+  nodes->SetNumberOfPoints(numNodes);
+  double* nodesPtr = static_cast<double*>(nodes->GetVoidPointer(0));
+
+  vtkIdTypeArray* globalIds = vtkIdTypeArray::New();
+  globalIds->SetName("GlobalID");
+  globalIds->SetNumberOfComponents(1);
+  globalIds->SetNumberOfTuples(numNodes);
+  vtkIdType* globalIdxPtr =
+      static_cast<vtkIdType*>(globalIds->GetVoidPointer(0));
+
+  Grid->Allocate(numCells,8);
+
+  // STEP 3: Loop through the extent assigned in this process and update
+  // the nodes and connectivity of the unstructured grid.
+  const int hexNodeOffSet[]= {
+        0, 0, 0,
+        1, 0, 0,
+        1, 1, 0,
+        0, 1, 0,
+
+        0, 0, 1,
+        1, 0, 1,
+        1, 1, 1,
+        0, 1, 1,
+    };
+
+  vtkIdType globalNodeIdx;
+  vtkIdType localNodeIdx;
+  vtkIdType cell[8];
+  for(int i=IMIN(ext); i < IMAX(ext); ++i)
+    {
+    for(int j=JMIN(ext); j < JMAX(ext); ++j)
+      {
+      for(int k=KMIN(ext); k < KMAX(ext); ++k)
+        {
+
+        // local ijk of the grid cell
+        int lijk[3];
+        I(lijk) = i-IMIN(ext);
+        J(lijk) = j-JMIN(ext);
+        K(lijk) = k-KMIN(ext);
+
+        for(int node=0; node < 8; ++node)
+          {
+          // local ijk of the node
+          int ijk[3];
+          I(ijk) = I(lijk)+hexNodeOffSet[node*3];
+          J(ijk) = J(lijk)+hexNodeOffSet[node*3+1];
+          K(ijk) = K(lijk)+hexNodeOffSet[node*3+2];
+          localNodeIdx =
+              vtkStructuredData::ComputePointId(dims,ijk,dataDescription);
+
+          cell[node] = localNodeIdx;
+
+          // global ijk of the node
+          int IJK[3];
+          I(IJK) = i+hexNodeOffSet[node*3];
+          J(IJK) = j+hexNodeOffSet[node*3+1];
+          K(IJK) = k+hexNodeOffSet[node*3+2];
+          globalNodeIdx =
+              vtkStructuredData::ComputePointId(Dims,IJK,dataDescription);
+
+          globalIdxPtr[localNodeIdx] = globalNodeIdx;
+          GetPoint(I(IJK),J(IJK),K(IJK),&nodesPtr[localNodeIdx*3]);
+          } // END for all nodes
+
+        Grid->InsertNextCell(VTK_HEXAHEDRON,8,cell);
+        } // END for all k
+      } // END for all j
+    } // END for all i
+
+  Grid->SetPoints(nodes);
+  nodes->Delete();
+  Grid->GetPointData()->AddArray(globalIds);
+  globalIds->Delete();
+
+  SetXYZCellField();
+  SetXYZNodeField();
+}
+
+
+
+#endif /* UNSTRUCTUREDGHOSTZONESCOMMON_H_ */
diff --git a/Filters/ParallelGeometry/vtkPUnstructuredGridConnectivity.cxx b/Filters/ParallelGeometry/vtkPUnstructuredGridConnectivity.cxx
new file mode 100644
index 0000000..1ab880d
--- /dev/null
+++ b/Filters/ParallelGeometry/vtkPUnstructuredGridConnectivity.cxx
@@ -0,0 +1,2059 @@
+/*=========================================================================
+
+ Program:   Visualization Toolkit
+ Module:    vtkPStructuredGridConnectivity.h
+
+ Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+ All rights reserved.
+ See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+ This software is distributed WITHOUT ANY WARRANTY; without even
+ the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ PURPOSE.  See the above copyright notice for more information.
+
+ =========================================================================*/
+#include "vtkPUnstructuredGridConnectivity.h"
+
+// VTK includes
+#include "vtkBoundingBox.h"
+#include "vtkCell.h"
+#include "vtkCellData.h"
+#include "vtkFieldDataSerializer.h"
+#include "vtkIdList.h"
+#include "vtkIdTypeArray.h"
+#include "vtkIntArray.h"
+#include "vtkMPIController.h"
+#include "vtkMultiProcessStream.h"
+#include "vtkObjectFactory.h"
+#include "vtkPointData.h"
+#include "vtkUnstructuredGrid.h"
+#include "vtkUnstructuredGridWriter.h"
+
+// C/C++ includes
+#include <vector>
+#include <string>
+#include <sstream>
+#include <set>
+#include <map>
+#include <cassert>
+#include <algorithm>
+
+//#define DEBUG
+
+//------------------------------------------------------------------------------
+//  INTERNAL DATASTRUCTURES
+//------------------------------------------------------------------------------
+namespace vtk
+{
+namespace details
+{
+
+//------------------------------------------------------------------------------
+struct NodeLink {
+  int Rank;            // the rank to communicate (send or receive)
+  vtkIdType LocalIdx;  // the local node index w.r.t. the ghosted grid
+  vtkIdType GlobalIdx; // the global node index w.r.t. across all partitions
+};
+
+//------------------------------------------------------------------------------
+struct SndLink {
+  int Rank;            // the rank to send to
+  vtkIdType SourceIdx; // the local index w.r.t. the input grid
+};
+
+//------------------------------------------------------------------------------
+struct RcvLink {
+  int Rank;             // the rank to receive from
+  vtkIdType SourceIdx;  // the local index w.r.t. the remote grid from
+                        // which, to receive from
+  vtkIdType TargetIdx;  // the local index w.r.t. the input grid in this
+                        // process, where the data from the source will be
+                        // copied.
+};
+
+//------------------------------------------------------------------------------
+struct CommunicationLinks {
+
+  // Description:
+  // Default constructor
+  CommunicationLinks()
+    {
+    this->RcvBuffersAllocated = false;
+    }
+
+  // Description:
+  // Destructor
+  ~CommunicationLinks()
+    {
+    // Clear all rank buffers
+    std::set<int>::iterator rankIter = this->NeighboringRanks.begin();
+
+    for( ; rankIter != this->NeighboringRanks.end(); ++rankIter)
+      {
+      int rank = *rankIter;
+      if( this->RcvBuffers.find(rank) != this->RcvBuffers.end() )
+        {
+        if(this->RcvBuffers[rank] != NULL)
+          {
+          delete [] this->RcvBuffers[rank];
+          this->RcvBuffers[rank] = NULL;
+          } // END if buffer not null
+        } // END if buffer entry for rank exists
+      } // END for all neighboring ranks
+
+    this->SndBuffers.clear();
+    this->RcvBuffers.clear();
+    this->SndBufferSizes.clear();
+    this->RcvBufferSizes.clear();
+
+    this->NeighboringRanks.clear();
+    this->SndNodeLinks.clear();
+    this->RcvNodeLinks.clear();
+    this->SndCellLinks.clear();
+    this->RcvCellLinks.clear();
+    }
+
+  // Description:
+  // Returns the local ID on the ghosted grid for the given node global ID.
+  vtkIdType GetTargetNodeId(const int vtkNotUsed(rmtRank),const vtkIdType globalIdx)
+    {
+    if(this->TargetNodeMapping.find(globalIdx)==this->TargetNodeMapping.end())
+      {
+      std::cerr << "ERROR: rmt node received has not target node mapping!\n";
+      return -1;
+      }
+    return(this->TargetNodeMapping[globalIdx]);
+    }
+
+  // Description:
+  // Returns the local ID on the ghosted grid for the given (rmtRank,rmtCellId)
+  // pair.
+  vtkIdType GetTargetCellId(const int rmtRank,const vtkIdType rmtCellId)
+    {
+    std::pair<int,vtkIdType> rmtInfo;
+    rmtInfo.first  = rmtRank;
+    rmtInfo.second = rmtCellId;
+    if(this->TargetCellMapping.find(rmtInfo) == this->TargetCellMapping.end())
+      {
+      std::cerr << "ERROR: rmt cell received has no target cell mapping!\n";
+      return -1;
+      }
+    return( this->TargetCellMapping[rmtInfo] );
+    }
+
+  // Description:
+  // Enqueues a receive on the node with the given local/global ID from
+  // the given remote rank.
+  void EnqueueNodeRcv(
+        const vtkIdType localIdx,
+        const vtkIdType globalIdx,
+        const int rmtRank)
+    {
+    this->NeighboringRanks.insert( rmtRank );
+
+    NodeLink lnk;
+    lnk.Rank      = rmtRank;
+    lnk.GlobalIdx = globalIdx;
+    lnk.LocalIdx  = localIdx;
+    if(this->RcvNodeLinks.find(rmtRank) == this->RcvNodeLinks.end())
+      {
+      std::vector< NodeLink > lnks;
+      lnks.push_back( lnk );
+      this->RcvNodeLinks[ rmtRank ] = lnks;
+      }
+    else
+      {
+      this->RcvNodeLinks[ rmtRank ].push_back( lnk );
+      }
+
+    this->TargetNodeMapping[ globalIdx ] = localIdx;
+    }
+
+  // Description:
+  // Enqueues a send on the node with the given local/global ID to the given
+  // remote rank.
+  void EnqueueNodeSend(
+        const vtkIdType localIdx,
+        const vtkIdType globalIdx,
+        const int rmtRank)
+    {
+    this->NeighboringRanks.insert( rmtRank );
+
+    NodeLink lnk;
+    lnk.Rank      = rmtRank;
+    lnk.GlobalIdx = globalIdx;
+    lnk.LocalIdx  = localIdx;
+    if(this->SndNodeLinks.find(rmtRank) == this->SndNodeLinks.end())
+      {
+      std::vector< NodeLink > lnks;
+      lnks.push_back( lnk );
+      this->SndNodeLinks[ rmtRank ] = lnks;
+      }
+    else
+      {
+      this->SndNodeLinks[ rmtRank ].push_back( lnk );
+      }
+    }
+
+  // Description:
+  // Enqueues a cell link to the communication lists.
+  void EnqueueCellLink(
+      const vtkIdType adjCell,
+      const vtkIdType ghostCell,
+      const vtkIdType rmtCell,
+      const int rmtRank)
+    {
+    this->NeighboringRanks.insert( rmtRank );
+
+    SndLink sndlnk;
+    sndlnk.Rank      = rmtRank;
+    sndlnk.SourceIdx = adjCell;
+    if(this->SndCellLinks.find(rmtRank) == this->SndCellLinks.end())
+      {
+      std::vector< SndLink > sndlinks;
+      sndlinks.push_back(sndlnk);
+      this->SndCellLinks[rmtRank] = sndlinks;
+      }
+    else
+      {
+      this->SndCellLinks[rmtRank].push_back(sndlnk);
+      }
+
+    RcvLink rcvlnk;
+    rcvlnk.Rank      = rmtRank;
+    rcvlnk.SourceIdx = rmtCell;
+    rcvlnk.TargetIdx = ghostCell;
+    if(this->RcvCellLinks.find(rmtRank) == this->RcvCellLinks.end())
+      {
+      std::vector<RcvLink> rcvlinks;
+      rcvlinks.push_back(rcvlnk);
+      this->RcvCellLinks[rmtRank] = rcvlinks;
+      }
+    else
+      {
+      this->RcvCellLinks[rmtRank].push_back(rcvlnk);
+      }
+
+    std::pair<int,vtkIdType> rmtInfo;
+    rmtInfo.first  = rmtRank;
+    rmtInfo.second = rmtCell;
+    this->TargetCellMapping[ rmtInfo ] = ghostCell;
+    }
+
+  // Maps a receiver node globalID to its localID w.r.t. the ghosted grid.
+  // Used when filling in ghost zone nodes.
+  std::map<vtkIdType,vtkIdType> TargetNodeMapping;
+
+  // Maps a (rmtRank,rmtCellId) pair to the cellID w.r.t. the ghosted grid.
+  // Used when filling in ghost zone cells.
+  std::map< std::pair<int,vtkIdType>, vtkIdType > TargetCellMapping;
+
+  // Flag that indicates if the receive buffers have been allocated.
+  bool RcvBuffersAllocated;
+
+  std::set<int> NeighboringRanks;
+
+  // Holds the number of bytes to receive from each process
+  std::map<int,int> RcvBufferSizes;
+
+  // Holds the receive buffer for each process
+  std::map<int,unsigned char*> RcvBuffers;
+
+  // Holds the number of bytes to be sent to each process
+  std::map<int,int> SndBufferSizes;
+
+  // Holds the send buffers to each neighboring rank
+  std::map< int,std::vector<unsigned char> > SndBuffers;
+
+  // List of send node-links for each remote process
+  std::map< int,std::vector< NodeLink > > SndNodeLinks;
+
+  // List of receive node-links for each remote process
+  std::map< int,std::vector< NodeLink > > RcvNodeLinks;
+
+  // List of send node-links for each remote process
+  std::map< int,std::vector< SndLink > > SndCellLinks;
+
+  // List of receive node-links for each remote process
+  std::map< int,std::vector< RcvLink > > RcvCellLinks;
+};
+
+
+//------------------------------------------------------------------------------
+// Description:
+// Computes a hash code for the given list of IDs.
+// The hash code is a string composed by sorting the IDs of the cell/face
+// and delimiting them by a '.'
+std::string Hash(vtkIdType* ids, const vtkIdType N)
+{
+  std::sort(ids,ids+N);
+  std::ostringstream oss;
+  for(vtkIdType i=0; i < N; ++i)
+    {
+    oss << ids[i] << ".";
+    } // END for all N
+  return( oss.str() );
+}
+
+//------------------------------------------------------------------------------
+// Description:
+// Computes a hash code for the given cell/face.
+// The hash code is a string composed by sorting the IDs of the cell/face
+// and delimiting them by a '.'
+std::string GetHashCode(vtkCell* c)
+{
+  std::vector<vtkIdType> nodeList;
+  nodeList.reserve( c->GetNumberOfPoints() );
+
+  for(vtkIdType nodeIdx=0; nodeIdx < c->GetNumberOfPoints(); ++nodeIdx)
+    {
+    nodeList.push_back( c->GetPointId(nodeIdx) );
+    }
+  assert("post: nodeList size mismatch!" &&
+       (static_cast<vtkIdType>(nodeList.size())==c->GetNumberOfPoints()) );
+
+  std::sort(nodeList.begin(),nodeList.end());
+
+  std::ostringstream oss;
+  for(unsigned int i=0; i < nodeList.size(); ++i)
+    {
+    oss << nodeList[ i ] << ".";
+    }
+  return( oss.str() );
+}
+
+//------------------------------------------------------------------------------
+// Description:
+// A simple struct that holds the face info.
+struct FaceInfo
+{
+  std::vector<vtkIdType> FaceIds;
+  vtkIdType CellAdjacency[2];
+  int Count;
+};
+
+//------------------------------------------------------------------------------
+// Description:
+// A simple data-structure to allow performing queries easily.
+struct MeshLinks {
+
+  // Description:
+  // Checks if the given face exists
+  bool HasFace(const std::string& face)
+    {return(this->FaceLinks.find(face)!=this->FaceLinks.end());};
+
+  // Description:
+  // Clears all data-structures
+  void Clear()
+    {
+    this->Global2LocalNodeIdx.clear();
+    this->FaceLinks.clear();
+    }
+
+  // Description:
+  // Links faces in the mesh to cells
+  void AddFaceLink(const std::string& face, vtkIdType cellIdx)
+    {
+    if( this->HasFace(face) )
+      {
+      this->FaceLinks[face].insert(cellIdx);
+      }
+    else
+      {
+      std::set<vtkIdType> cells;
+      cells.insert(cellIdx);
+      this->FaceLinks[face] = cells;
+      }
+    }
+
+  // Description:
+  // Given a global ID of a node, this method returns the
+  // corresponding local ID w.r.t. the input grid. A -1
+  // is returned if the node does not exist.
+  vtkIdType GetLocalNodeID(const vtkIdType globalIdx)
+    {
+    if( this->Global2LocalNodeIdx.find(globalIdx) !=
+        this->Global2LocalNodeIdx.end())
+      {
+      return this->Global2LocalNodeIdx[globalIdx];
+      }
+    return -1;
+    }
+
+  // Description:
+  // Builds cell links for the given *boundary* grid.
+  void BuildLinks(vtkUnstructuredGrid* grid)
+    {
+    vtkIdType numCells = grid->GetNumberOfCells();
+
+    vtkPointData* PD = grid->GetPointData();
+    assert("pre: point data does not have LOCAL ID" &&
+            PD->HasArray("LOCAL_ID"));
+    assert("pre: point data does not have GLOBAL ID" &&
+            PD->HasArray("GLOBAL_ID"));
+
+
+    vtkCellData* CD = grid->GetCellData();
+    assert("pre: cell data does not have local CELL ID" &&
+            CD->HasArray("LOCAL_CELL_ID"));
+
+    vtkIdType* globalIdPtr =
+     static_cast<vtkIdType*>(PD->GetArray("GLOBAL_ID")->GetVoidPointer(0));
+
+    vtkIdType* localIdPtr =
+     static_cast<vtkIdType*>(PD->GetArray("LOCAL_ID")->GetVoidPointer(0));
+
+    vtkIdType* cellIdPtr =
+     static_cast<vtkIdType*>(CD->GetArray("LOCAL_CELL_ID")->GetVoidPointer(0));
+
+    // Add global2Local node index
+    for(vtkIdType p=0; p < grid->GetNumberOfPoints(); ++p)
+      {
+      this->Global2LocalNodeIdx[ globalIdPtr[p] ] = localIdPtr[p];
+      } // END for all points
+
+    // Add face-adjacency information
+
+    // nodes vector used as temporary storage for edge/face nodes in order
+    // to construct a corresponding hashcode to uniquely identify an edge
+    // or face, regardless of orientation.
+    std::vector<vtkIdType>  nodes;
+
+    for(vtkIdType c=0; c < numCells; ++c)
+      {
+      ///@todo: optimize this -- use table lookup to get face ids, at least
+      ///  for all linear cells.
+      vtkCell* cell          = grid->GetCell(c);
+      vtkIdType localCellIdx = cellIdPtr[ c ];
+      // Add face links
+      for(int f=0; f < cell->GetNumberOfFaces(); ++f)
+        {
+        vtkCell* face      = cell->GetFace( f );
+        int N              = face->GetNumberOfPoints();
+        vtkIdType* nodePtr = face->GetPointIds()->GetPointer(0);
+
+        nodes.resize(N);
+        for(int i=0; i < N; ++i)
+          {
+          nodes[i] = globalIdPtr[ nodePtr[i] ];
+          } // END for all face nodes
+        std::string hashCode = Hash(&nodes[0],N);
+        this->AddFaceLink(hashCode,localCellIdx);
+        } // END for all cell faces
+      } // END for all cells
+    } // END BuildLinks
+
+  // Maps global nodes Ids on the local boundary grid to the local nodes
+  // in the input mesh
+  std::map< vtkIdType,vtkIdType > Global2LocalNodeIdx;
+
+  // Maps a face, identified using global IDs, to the local cell IDs
+  // from the input mesh.
+  std::map< std::string, std::set<vtkIdType> > FaceLinks;
+};
+
+//------------------------------------------------------------------------------
+// Description:
+// A simple struct to hold auxiliary information
+struct GridInfo
+{
+  // The cartesian bounds of the grid in this process.
+  double GridBounds[6];
+
+  // List of candidate ranks to exchange boundary grid information.
+  std::vector<int> CandidateRanks;
+
+  // For each candidate rank, stores the size of the buffer that needs to be
+  // allocated to communicate the boundary grids.
+  std::vector<int> RmtBGridSizes;
+
+  // Stores the remote boundary grid at each corresponding candidate rank.
+  std::vector<vtkUnstructuredGrid*> RmtBGrids;
+
+  // Flat vector to store the global grid bounds. The bounds of process i
+  // are stored within a contiguous region [i*6,i*6+5]
+  std::vector<double> GlobalGridBounds;
+
+  // List of boundary node IDs on the surface mesh of the input mesh.
+  // Stored in a set so that we can easily lookup if a cell is on a boundary.
+  std::set<vtkIdType> SurfaceNodes;
+
+  // List of faces and metadata(i.e., FaceInfo) on the surface mesh of the
+  // input grid. Note the connectivity of the surface mesh is w.r.t. to the
+  // local IDs of the nodes in the input grid.
+  std::map<std::string,FaceInfo> SurfaceMesh;
+
+  // List of faces and metadata(i.e., FaceInfo) from the input grid.
+  std::map<std::string,FaceInfo> FaceList;
+
+  // Mapping of local node IDs, w.r.t., the input grid, to the corresponding
+  // node IDs on the BoundaryGrid.
+  std::map<vtkIdType,vtkIdType> BndryNodeMap;
+
+  // A grid that consists of only the boundary cells of the input grid. Each
+  // node in the boundary grid has
+  vtkUnstructuredGrid* BoundaryGrid;
+
+  // MeshLinks for the boundary grid in this process, used to enable queries
+  // based on global IDs.
+  MeshLinks BoundaryGridLinks;
+
+  // History to keep track of nodes that are inserted to the ghosted grid
+  // mapping the global ID to the ID of the node on the ghosted grid.
+  std::map<vtkIdType,vtkIdType> NodeHistory;
+
+  // History of cell hashcodes that are inserted to the ghosted grid in order
+  // to avoid inserting duplicate cells in the ghosted grid.
+  std::set<std::string> CellHistory;
+
+  // Description:
+  // Constructor.
+  GridInfo() { this->BoundaryGrid = NULL; }
+
+  // Description:
+  // Destructor
+  ~GridInfo() { this->Clear(); }
+
+  // Description:
+  // Clears all data from this GridInfo instance.
+  void Clear()
+  {
+    if( this->BoundaryGrid != NULL)
+      {
+      this->BoundaryGrid->Delete();
+      this->BoundaryGrid = NULL;
+      }
+
+    for(unsigned int i=0; i < this->RmtBGrids.size(); ++i)
+      {
+      if( this->RmtBGrids[ i ] != NULL )
+        {
+        this->RmtBGrids[ i ]->Delete();
+        this->RmtBGrids[ i ] = NULL;
+        }
+      }
+    this->RmtBGrids.clear();
+
+    this->BoundaryGridLinks.Clear();
+
+    this->RmtBGridSizes.clear();
+    this->GlobalGridBounds.clear();
+    this->CandidateRanks.clear();
+    this->FaceList.clear();
+    this->SurfaceMesh.clear();
+    this->BndryNodeMap.clear();
+    this->SurfaceNodes.clear();
+
+    this->NodeHistory.clear();
+    this->CellHistory.clear();
+  }
+
+  // Description:
+  // Updates the face list
+  void UpdateFaceList(vtkCell* face, vtkIdType cellidx)
+  {
+    std::string hashCode = GetHashCode(face);
+    if( this->FaceList.find(hashCode)==this->FaceList.end())
+      {
+      FaceInfo f;
+      for(vtkIdType nodeIdx=0; nodeIdx < face->GetNumberOfPoints(); ++nodeIdx)
+        {
+        f.FaceIds.push_back( face->GetPointId(nodeIdx) );
+        } // END for all nodes on the face
+      f.CellAdjacency[0] = cellidx;
+      f.Count = 1;
+      this->FaceList[hashCode] = f;
+      } // END if
+    else
+      {
+      // this is the 2nd time we encounter this face
+      assert(this->FaceList[hashCode].Count==1);
+      this->FaceList[hashCode].CellAdjacency[1] = cellidx;
+      this->FaceList[hashCode].Count++;
+      } // END else
+  }
+
+};
+
+} // END namespace details
+
+} // END namespace vtk
+
+//------------------------------------------------------------------------------
+
+vtkStandardNewMacro(vtkPUnstructuredGridConnectivity);
+
+//------------------------------------------------------------------------------
+vtkPUnstructuredGridConnectivity::vtkPUnstructuredGridConnectivity()
+{
+ this->InputGrid         = NULL;
+ this->GhostedGrid       = NULL;
+ this->Controller        = NULL;
+ this->GlobalIDFieldName = NULL;
+ this->AuxiliaryData     = new vtk::details::GridInfo();
+ this->CommLists         = new vtk::details::CommunicationLinks();
+}
+
+//------------------------------------------------------------------------------
+vtkPUnstructuredGridConnectivity::~vtkPUnstructuredGridConnectivity()
+{
+  this->InputGrid  = NULL;
+  this->Controller = NULL;
+
+  if( this->GhostedGrid != NULL )
+    {
+    this->GhostedGrid->Delete();
+    }
+
+  if( this->AuxiliaryData != NULL )
+    {
+    delete this->AuxiliaryData;
+    }
+
+  if( this->CommLists != NULL )
+    {
+    delete this->CommLists;
+    }
+
+  if( this->GlobalIDFieldName != NULL )
+    {
+    delete [] this->GlobalIDFieldName;
+    this->GlobalIDFieldName = NULL;
+    }
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::PrintSelf(
+      ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os,indent);
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::RegisterGrid(
+      vtkUnstructuredGrid* gridPtr)
+{
+  assert("pre: gridPtr != NULL" && (gridPtr != NULL) );
+  if( this->InputGrid != NULL )
+    {
+    vtkErrorMacro("Only one grid per process is currently supported!");
+    }
+  this->InputGrid = gridPtr;
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::BuildGhostZoneConnectivity()
+{
+  // Sanity check
+  assert("pre: controller is NULL!" && (this->Controller != NULL) );
+  assert("pre: input grid is NULL!" && (this->InputGrid != NULL) );
+  assert("pre: auxiliary data is NULL!" && (this->AuxiliaryData != NULL) );
+
+  if(this->Controller->GetNumberOfProcesses() <= 1 )
+    {
+    // short-circuit
+    return;
+    }
+
+  if(this->GlobalIDFieldName == NULL)
+    {
+    // We assume "GlobalID" as the default
+    this->GlobalIDFieldName = new char[9];
+    strncpy(this->GlobalIDFieldName,"GlobalID", 9);
+    }
+
+  // STEP 0: Ensure the input grid has GlobalID information
+  if( !this->InputGrid->GetPointData()->HasArray(this->GlobalIDFieldName) )
+    {
+    vtkErrorMacro("Input grid has no global ID information");
+    }
+
+  // STEP 1: Build auxiliary data-structures and extract boundary grid
+  this->ExtractBoundaryGrid();
+  assert("post: boundary grid is NULL!" &&
+          (this->AuxiliaryData->BoundaryGrid != NULL) );
+
+  // STEP 2: Exchange grid bounds
+  this->AuxiliaryData->BoundaryGrid->GetBounds(this->AuxiliaryData->GridBounds);
+  this->ExchangeGridBounds();
+
+  // STEP 3: BoundingBox collision. This establishes the list of CandidateRanks
+  // to communicate the boundary grids.
+  this->BoundingBoxCollision();
+
+  // STEP 4: Exchange Boundary grids
+  this->ExchangeBoundaryGrids();
+
+  // STEP 5: Build Ghosted grid and communication lists
+  this->BuildGhostedGridAndCommLists();
+  this->Controller->Barrier();
+
+  // STEP 6: Clear all auxiliary data
+  this->AuxiliaryData->Clear();
+  this->Controller->Barrier();
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::UpdateGhosts()
+{
+  assert("pre: controller is NULL!" && (this->Controller != NULL) );
+
+  if(this->Controller->GetNumberOfProcesses() <= 1)
+    {
+    // return;
+    }
+
+  // STEP 0: copies local data from the input grid to the ghosted grid
+  this->SynchLocalData();
+
+  // STEP 1: serialize data
+  this->SerializeGhostZones();
+
+  // STEP 2: create persistent receive buffers. This only executes the first
+  // time UpdateGhosts() is called. Afterwards, the method returns immediately.
+  this->CreatePersistentRcvBuffers();
+
+  // STEP 3: Allocate MPI request objects for non-blocking point-to-point comm.
+  int numNeis = static_cast<int>(this->CommLists->NeighboringRanks.size());
+  vtkMPICommunicator::Request* rqsts;
+  rqsts = new vtkMPICommunicator::Request[2*numNeis];
+  int rqstIdx = 0;
+
+  // STEP 4: post receives
+  std::set<int>::iterator rankIter = this->CommLists->NeighboringRanks.begin();
+  for( ;rankIter != this->CommLists->NeighboringRanks.end(); ++rankIter)
+    {
+    int rank = *rankIter;
+    assert("pre: cannot find rcv buffer size for rank!" &&
+           (this->CommLists->RcvBufferSizes.find(rank)!=
+                this->CommLists->RcvBufferSizes.end()));
+    assert("pre: cannot find buffer for rank!" &&
+            (this->CommLists->RcvBuffers.find(rank)!=
+                this->CommLists->RcvBuffers.end()));
+    assert("pre: rcv buffer for rank is NULL" &&
+           (this->CommLists->RcvBuffers[rank] != NULL));
+
+    this->Controller->NoBlockReceive(
+        this->CommLists->RcvBuffers[rank],
+        this->CommLists->RcvBufferSizes[rank],
+        rank,
+        0,
+        rqsts[rqstIdx]
+        );
+    ++rqstIdx;
+    } // END for all ranks
+
+  // STEP 5: post sends
+  rankIter = this->CommLists->NeighboringRanks.begin();
+  for( ;rankIter != this->CommLists->NeighboringRanks.end(); ++rankIter)
+    {
+    int rank = *rankIter;
+    assert("pre: cannot find snd buffer size for rank!" &&
+           (this->CommLists->SndBufferSizes.find(rank)!=
+               this->CommLists->SndBufferSizes.end()));
+    assert("pre: cannot find snd buffer for rank!" &&
+           (this->CommLists->SndBuffers.find(rank)!=
+               this->CommLists->SndBuffers.end()));
+
+    this->Controller->NoBlockSend(
+        &this->CommLists->SndBuffers[rank][0],
+        this->CommLists->SndBufferSizes[rank],
+        rank,
+        0,
+        rqsts[rqstIdx]
+        );
+    ++rqstIdx;
+    } // END for all ranks
+
+  // STEP 6: wait all
+  this->Controller->WaitAll(2*numNeis,rqsts);
+  delete [] rqsts;
+
+  // STEP 6: Update ghosted grid
+  this->DeSerializeGhostZones();
+
+  // STEP 7: Barrier synchronization
+  this->Controller->Barrier();
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::FillGhostZoneCells(
+      const int neiRank,
+      vtkCellData* ghostData,
+      vtkIdType* cellIdx,
+      const unsigned int numGhostCells)
+{
+
+#ifdef NDEBUG
+  static_cast<void>(numGhostCells);
+#endif
+
+  // Sanity checks
+  assert("pre: ghostData should not be NULL!" && (ghostData != NULL) );
+  assert("pre: cellIdx should not be NULL!" && (cellIdx != NULL) );
+  assert("pre: CommLists object is NULL!" && (this->CommLists != NULL) );
+  assert("pre: GhostedGrid is NULL!" && (this->GhostedGrid != NULL) );
+
+  vtkCellData* CD = this->GhostedGrid->GetCellData();
+
+  // Loop through all arrays
+  for(int arrayIdx=0; arrayIdx < ghostData->GetNumberOfArrays(); ++arrayIdx)
+    {
+
+    vtkDataArray* ghostArray = ghostData->GetArray(arrayIdx);
+    assert("pre: array by that name not found on ghosted grid!" &&
+            CD->HasArray(ghostArray->GetName()));
+
+#ifndef NDEBUG
+    assert("pre: numtuples mismatch!" &&
+           (numGhostCells==ghostArray->GetNumberOfTuples()));
+#endif
+
+    vtkDataArray* targetArray = CD->GetArray( ghostArray->GetName() );
+    assert("pre: numcomponents mismatch between target and ghost array!" &&
+     ghostArray->GetNumberOfComponents()==targetArray->GetNumberOfComponents());
+
+    // loop through all the tuples of the array & copy values to the ghostzone
+    for(vtkIdType tuple=0; tuple < ghostArray->GetNumberOfTuples(); ++tuple)
+      {
+      vtkIdType cellId = cellIdx[ tuple ];
+      vtkIdType target = this->CommLists->GetTargetCellId(neiRank,cellId);
+      CD->CopyTuple(ghostArray,targetArray,tuple,target);
+      } // END for all tuples
+
+    } // END for all arrays
+
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::FillGhostZoneNodes(
+      const int neiRank,
+      vtkPointData* ghostData,
+      vtkIdType* globalIdx,
+      const unsigned int numGhostNodes)
+{
+
+#ifdef NDEBUG
+  static_cast<void>(numGhostNodes);
+#endif
+
+  // Sanity checks
+  assert("pre: ghostData should not be NULL!" && (ghostData != NULL) );
+  assert("pre: globalIdx should not be NULL!" && (globalIdx != NULL) );
+  assert("pre: CommLists object is NULL!" && (this->CommLists != NULL) );
+  assert("pre: GhostedGrid is NULL!" && (this->GhostedGrid != NULL) );
+
+  vtkPointData* PD = this->GhostedGrid->GetPointData();
+
+  // Loop through all arrays
+  for(int arrayIdx=0; arrayIdx < ghostData->GetNumberOfArrays(); ++arrayIdx)
+    {
+
+    vtkDataArray* ghostArray = ghostData->GetArray( arrayIdx );
+    if(strcmp(ghostArray->GetName(),this->GlobalIDFieldName)!=0)
+      {
+      assert("pre: array by that name not found on ghosted grid!" &&
+              PD->HasArray(ghostArray->GetName()));
+
+#ifndef NDEBUG
+      assert("pre: numtuples mismatch!" &&
+             (numGhostNodes==ghostArray->GetNumberOfTuples()));
+#endif
+
+      vtkDataArray* targetArray = PD->GetArray( ghostArray->GetName() );
+      assert("pre: numcomponents mismatch between target and ghost array!" &&
+              ghostArray->GetNumberOfComponents()==
+                  targetArray->GetNumberOfComponents());
+
+      // loop through all the tuples of the array & copy values to the
+      // ghostzone, i.e., the target array.
+      for(vtkIdType tuple=0; tuple < ghostArray->GetNumberOfTuples(); ++tuple)
+        {
+        vtkIdType globalId = globalIdx[ tuple ];
+        vtkIdType targetId = this->CommLists->GetTargetNodeId(neiRank,globalId);
+        PD->CopyTuple(ghostArray,targetArray,tuple,targetId);
+        } // END for all tuples
+
+      } // END if not the global ID field name
+
+    } // END for all arrays
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::CreatePersistentRcvBuffers()
+{
+  // Sanity Checks
+  assert("pre: CommLists object is NULL!" && (this->CommLists != NULL));
+  assert("pre: numranks != numstreams" &&
+    (this->CommLists->NeighboringRanks.size()==
+     this->CommLists->SndBufferSizes.size()));
+
+  // short-circuit here if the buffers have been already allocated
+  if( this->CommLists->RcvBuffersAllocated )
+    {
+    return;
+    }
+
+  // Allocate MPI request objects for non-blocking point-to-point comm.
+  int numNeis = static_cast<int>(this->CommLists->NeighboringRanks.size());
+  vtkMPICommunicator::Request* rqsts;
+  rqsts = new vtkMPICommunicator::Request[2*numNeis];
+
+  // Post receives
+  int rqstIdx = 0;
+  std::set<int>::iterator rankIter = this->CommLists->NeighboringRanks.begin();
+  for(;rankIter != this->CommLists->NeighboringRanks.end(); ++rankIter)
+    {
+    int rank = *rankIter;
+    this->CommLists->RcvBufferSizes[rank] = 0;
+    this->Controller->NoBlockReceive(
+        &this->CommLists->RcvBufferSizes[rank],1,rank,0,rqsts[rqstIdx]);
+    ++rqstIdx;
+    } // END for all neighboring ranks, post receives
+
+  // Post sends
+  rankIter = this->CommLists->NeighboringRanks.begin();
+  for(;rankIter != this->CommLists->NeighboringRanks.end(); ++rankIter)
+    {
+    int rank = *rankIter;
+    assert("pre: cannot find send bytestream for rank" &&
+           (this->CommLists->SndBufferSizes.find(rank)!=
+                this->CommLists->SndBufferSizes.end()));
+    this->Controller->NoBlockSend(
+        &this->CommLists->SndBufferSizes[rank],1,rank,0,rqsts[rqstIdx]);
+    ++rqstIdx;
+    } // END for all neighboring ranks, post sends
+
+  // Wait all
+  this->Controller->WaitAll(2*numNeis,rqsts);
+  delete [] rqsts;
+
+  // Allocate buffers for each neighboring rank
+  rankIter = this->CommLists->NeighboringRanks.begin();
+  for(;rankIter != this->CommLists->NeighboringRanks.end(); ++rankIter)
+    {
+    int rank = *rankIter;
+    assert("pre: cannot find buffersize for rank!" &&
+           (this->CommLists->RcvBufferSizes.find(rank) !=
+              this->CommLists->RcvBufferSizes.end()) );
+
+    // Get buffer size (communicated from the remote rank earlier)
+    int size = this->CommLists->RcvBufferSizes[rank];
+    assert("pre: buffer should not exist!" &&
+           (this->CommLists->RcvBuffers.find(rank)==
+              this->CommLists->RcvBuffers.end()) );
+
+    // Allocate receive buffer
+    this->CommLists->RcvBuffers[ rank ] = new unsigned char[size];
+    } // END for all neighboring ranks
+
+  // Set RcvBuffersAllocated to true
+  this->CommLists->RcvBuffersAllocated = true;
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::DeSerializeGhostZones()
+{
+  assert("pre: ghosted grid is NULL!" && (this->GhostedGrid != NULL) );
+  assert("pre: Persistent CommLists object is NULL!" &&
+          (this->CommLists != NULL));
+
+  vtkMultiProcessStream bytestream;
+  std::set<int>::iterator rankIter = this->CommLists->NeighboringRanks.begin();
+  for( ;rankIter != this->CommLists->NeighboringRanks.end(); ++rankIter)
+    {
+    int rank = *rankIter;
+    assert("pre: no rcv buffer for rank!" &&
+            (this->CommLists->RcvBuffers.find(rank)!=
+                this->CommLists->RcvBuffers.end()));
+    assert("pre: no rcv buffer size for rank!" &&
+            (this->CommLists->RcvBufferSizes.find(rank)!=
+              this->CommLists->RcvBufferSizes.end()));
+    assert("pre: rcvbuffer is NULL!" &&
+            this->CommLists->RcvBuffers[rank] != NULL);
+
+    bytestream.Reset();
+    bytestream.SetRawData(
+        this->CommLists->RcvBuffers[rank],
+        this->CommLists->RcvBufferSizes[rank]);
+
+    // Deserialize node-centered fields
+    unsigned int numNodeLinks = 0;
+    bytestream >> numNodeLinks;
+
+    // Deserialize globalID information
+    vtkIdType* globalIdx = new vtkIdType[numNodeLinks];
+    bytestream.Pop(globalIdx,numNodeLinks);
+
+    // Deserialize ghostzone pointdata for this rank
+    vtkPointData* ghostPD = vtkPointData::New();
+    vtkFieldDataSerializer::Deserialize(bytestream,ghostPD);
+
+    // Deserialize cell-centered fields
+    unsigned int numCellLinks = 0;
+    bytestream >> numCellLinks;
+
+    // Deserialize cellID information
+    vtkIdType* cellIdx = new vtkIdType[numCellLinks];
+    bytestream.Pop(cellIdx,numCellLinks);
+
+    // Deserialize ghostzone celldata for this rank
+    vtkCellData* ghostCD = vtkCellData::New();
+    vtkFieldDataSerializer::Deserialize(bytestream,ghostCD);
+
+    // Fill the ghost zones
+    this->FillGhostZoneNodes(rank,ghostPD,globalIdx,numNodeLinks);
+    this->FillGhostZoneCells(rank,ghostCD,cellIdx,numCellLinks);
+
+    // clear all dynamically allocated memory
+    ghostPD->Delete();
+    ghostCD->Delete();
+    delete [] globalIdx;
+    delete [] cellIdx;
+    } // END for all neighboring ranks
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::SerializeGhostZones()
+{
+  assert("pre: ghosted grid is NULL!" && (this->GhostedGrid != NULL) );
+  assert("pre: Persistent CommLists object is NULL!" &&
+          (this->CommLists != NULL));
+
+  vtkPointData* PD = this->GhostedGrid->GetPointData();
+  vtkCellData*  CD = this->GhostedGrid->GetCellData();
+
+  vtkMultiProcessStream bytestream;
+  std::set<int>::iterator rankIter = this->CommLists->NeighboringRanks.begin();
+  for(;rankIter != this->CommLists->NeighboringRanks.end(); ++rankIter)
+    {
+    int rank = *rankIter;
+
+    assert("pre: rank not found in SndNodeLinks!" &&
+        (this->CommLists->SndNodeLinks.find(rank)!=
+         this->CommLists->SndNodeLinks.end()));
+    assert("pre: rank not found SndCellLinks" &&
+         (this->CommLists->SndCellLinks.find(rank)!=
+          this->CommLists->SndCellLinks.end()));
+
+    // clear all data
+    bytestream.Reset();
+
+    // Serialize node-centered fields
+    std::vector<vtk::details::NodeLink>* nodelinks =
+            &this->CommLists->SndNodeLinks[rank];
+    bytestream << static_cast<unsigned int>(nodelinks->size());
+
+    // extract the local/global IDs of the nodes
+    std::vector< vtkIdType > globalIdx;
+    globalIdx.resize(nodelinks->size());
+    vtkIdList* tupleIds = vtkIdList::New();
+    tupleIds->SetNumberOfIds( static_cast<vtkIdType>(nodelinks->size()) );
+    unsigned int lnk = 0;
+    for(; lnk < static_cast<unsigned int>(nodelinks->size()); ++lnk)
+      {
+      globalIdx[lnk] = (*nodelinks)[lnk].GlobalIdx;
+      tupleIds->SetId(static_cast<vtkIdType>(lnk),(*nodelinks)[lnk].LocalIdx);
+      } // END for all links
+
+    // serialize the global IDs s.t. the remote rank knows which node to
+    // update once the data is transferred.
+    bytestream.Push(&globalIdx[0],static_cast<unsigned int>(nodelinks->size()));
+
+    // serialize the selected tuples for this remote rank
+    vtkFieldDataSerializer::SerializeTuples(tupleIds,PD,bytestream);
+    tupleIds->Delete();
+
+    // Serialize cell-centered fields
+    std::vector<vtk::details::SndLink>* celllinks =
+        &this->CommLists->SndCellLinks[rank];
+    bytestream << static_cast<unsigned int>(celllinks->size());
+
+    // extract the cell ids to send to this remote rank
+    vtkIdList* cellIds = vtkIdList::New();
+    cellIds->SetNumberOfIds(celllinks->size());
+    for(lnk=0; lnk < celllinks->size(); ++lnk)
+      {
+      cellIds->SetId(lnk,(*celllinks)[lnk].SourceIdx);
+      } // END for all links
+
+    // serialize the cellIds s.t. the remote rank knows which cell to update
+    // once the data is transfered
+    bytestream.Push(cellIds->GetPointer(0),cellIds->GetNumberOfIds());
+
+    // serialize the data on the selected cells
+    vtkFieldDataSerializer::SerializeTuples(cellIds,CD,bytestream);
+    cellIds->Delete();
+
+    // Set the bytestream for this rank
+    this->CommLists->SndBufferSizes[rank] = bytestream.RawSize();
+    if(this->CommLists->SndBuffers.find(rank) ==
+        this->CommLists->SndBuffers.end())
+      {
+      std::vector<unsigned char> buffer;
+      this->CommLists->SndBuffers[rank] = buffer;
+      } // END if no snd buffer entry found for rank
+
+    this->CommLists->SndBuffers[rank].resize(bytestream.RawSize());
+    bytestream.GetRawData(this->CommLists->SndBuffers[rank]);
+    } // END for all neighboring ranks
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::SynchLocalData()
+{
+  // Sanity checks
+  assert("pre: input grid is NULL!" && (this->InputGrid != NULL) );
+  assert("pre: ghosted grid is NULL!" && (this->GhostedGrid != NULL) );
+
+  // STEP 0: Get pointers to input point-data and cell-data
+  vtkPointData* sourcePD = this->InputGrid->GetPointData();
+  assert("pre: source point-data is NULL!" && (sourcePD != NULL) );
+  vtkCellData*  sourceCD = this->InputGrid->GetCellData();
+  assert("pre: source cell-data is NULL!" && (sourceCD != NULL) );
+
+  // STEP 1: Get pointers to ghosted grid point-data and cell-data
+  vtkPointData* targetPD  = this->GhostedGrid->GetPointData();
+  assert("pre: target point-data is NULL!" && (targetPD != NULL) );
+  vtkCellData* targetCD = this->GhostedGrid->GetCellData();
+  assert("pre: target cell-data is NULL!" && (targetCD != NULL) );
+
+  // STEP 2: Copy point-data
+  for(int arrayIdx=0; arrayIdx < sourcePD->GetNumberOfArrays(); ++arrayIdx)
+    {
+    vtkDataArray* field = sourcePD->GetArray( arrayIdx );
+
+    // NOTE: The global IDs are copied upon construction since when the
+    // ghosted grid is constructed (in BuildGhostedGridAndCommLists() )
+    // global IDs need to be taken in to account!
+    if( strcmp(field->GetName(),this->GlobalIDFieldName)!=0 )
+      {
+      int ncomp         = field->GetNumberOfComponents();
+      assert("pre: ncomp must be at lease 1" && (ncomp >= 1) );
+
+      vtkIdType ntuples = this->GhostedGrid->GetNumberOfPoints();
+      vtkDataArray* ghostedField = NULL;
+      if( !targetPD->HasArray(field->GetName()))
+        {
+        ghostedField = vtkDataArray::CreateDataArray(field->GetDataType());
+        ghostedField->SetName(field->GetName());
+        ghostedField->SetNumberOfComponents(ncomp);
+        ghostedField->SetNumberOfTuples(ntuples);
+        targetPD->AddArray(ghostedField);
+        ghostedField->Delete();
+        } // END if array does not exist
+      ghostedField = targetPD->GetArray(field->GetName());
+      assert("pre: ghosted field is NULL!" && (ghostedField != NULL) );
+      memcpy(ghostedField->GetVoidPointer(0),field->GetVoidPointer(0),
+             ntuples*ncomp*field->GetDataTypeSize());
+      } // END if the array is not a global ID field
+    } // END for all point data arrays
+
+  // STEP 3: Copy cell-data
+  for(int arrayIdx=0; arrayIdx < sourceCD->GetNumberOfArrays(); ++arrayIdx)
+    {
+    vtkDataArray* field = sourceCD->GetArray( arrayIdx );
+    int ncomp         = field->GetNumberOfComponents();
+    assert("pre: ncomp must be at lease 1" && (ncomp >= 1) );
+    vtkIdType ntuples = this->GhostedGrid->GetNumberOfCells();
+
+    vtkDataArray* ghostedField = NULL;
+    if(!targetCD->HasArray(field->GetName()))
+      {
+      ghostedField = vtkDataArray::CreateDataArray(field->GetDataType());
+      ghostedField->SetName(field->GetName());
+      ghostedField->SetNumberOfComponents(ncomp);
+      ghostedField->SetNumberOfTuples(ntuples);
+      targetCD->AddArray(ghostedField);
+      ghostedField->Delete();
+      } // END if array does not exists
+
+    ghostedField = targetCD->GetArray(field->GetName());
+    assert("pre: ghosted field is NULL!" && (ghostedField != NULL) );
+    memcpy(ghostedField->GetVoidPointer(0),field->GetVoidPointer(0),
+           ntuples*ncomp*field->GetDataTypeSize());
+    } // END for all cell data arrays
+
+  // STEP 4: Finally, mark ghost cells. The ghost cells are marked only
+  // the first time UpdateGhosts() is called.
+  if( !targetCD->HasArray("GHOSTCELL") )
+    {
+    vtkIntArray* ghostCellArray = vtkIntArray::New();
+    ghostCellArray->SetName("GHOSTCELL");
+    ghostCellArray->SetNumberOfComponents(1);
+    ghostCellArray->SetNumberOfTuples(this->GhostedGrid->GetNumberOfCells());
+    int* ghostCellPtr = static_cast<int*>(ghostCellArray->GetVoidPointer(0));
+    vtkIdType ncells = this->GhostedGrid->GetNumberOfCells();
+    for(vtkIdType cellIdx=0; cellIdx < ncells; ++cellIdx)
+      {
+      if(cellIdx < this->InputGrid->GetNumberOfCells())
+        {
+        // cell is not a ghost
+        ghostCellPtr[cellIdx] = 0;
+        } // END if the cell is local
+      else
+        {
+        // cell is a ghost cell
+        ghostCellPtr[cellIdx] = 1;
+        } // END else
+      } // END for all cells
+    this->GhostedGrid->GetCellData()->AddArray(ghostCellArray);
+    ghostCellArray->Delete();
+    } // END if no ghostcell array
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::EnqueueNodeLinks(
+      const int rmtRank,
+      const vtkIdType ghostCell,
+      const vtkIdType adjCell,
+      vtkIdList* shared)
+{
+  // Sanity Checks
+  assert("pre: ghosted grid is NULL!" && (this->GhostedGrid != NULL) );
+  assert("pre: ghostCell out-of-bounds!" &&
+     (ghostCell >= 0) && (ghostCell < this->GhostedGrid->GetNumberOfCells()));
+  assert("pre: adjCell out-of-bounds!" &&
+     (adjCell >= 0) && (adjCell < this->GhostedGrid->GetNumberOfCells()));
+  assert("pre: ghost grid must have global IDs" &&
+      this->GhostedGrid->GetPointData()->HasArray(this->GlobalIDFieldName));
+
+  // STEP 0: Put the shared nodes in a set, s.t. we can do easy look up
+  std::set<vtkIdType> sharedNodes;
+  for(vtkIdType idx=0; idx < shared->GetNumberOfIds(); ++idx)
+    {
+    sharedNodes.insert(shared->GetId(idx));
+    }
+  assert("post: shared nodes mismatch!" &&
+      (shared->GetNumberOfIds()==static_cast<vtkIdType>(sharedNodes.size())));
+
+  // STEP 1: Get pointer to the global ID array on the ghosted grid
+  vtkIdType* globalIdxArray =
+    static_cast<vtkIdType*>(
+       this->GhostedGrid->GetPointData()->
+         GetArray(this->GlobalIDFieldName)->GetVoidPointer(0)
+         );
+
+  // STEP 2: local variables used to traverse the nodes of the adjCell and
+  // ghostCell
+  vtkIdType npts = 0;
+  vtkIdType* pts = NULL;
+
+  // STEP 3: Get pointer to the connectivity list of the adjacent cell
+  this->GhostedGrid->GetCellPoints(adjCell,npts,pts);
+  assert("post: adjCell pts is NULL!" && (pts != NULL) );
+  assert("post: npts >= 1" && (npts >= 1) );
+
+  // STEP 4: Loop through all adjacent cell nodes. The nodes of the adjacent
+  // cell that are not on the shared interface with the ghost cell are
+  // enqueued to be *sent* to the remote process of the ghost cell.
+  for(vtkIdType idx=0; idx < npts; ++idx)
+    {
+    vtkIdType localId  = pts[idx];
+    vtkIdType globalId = globalIdxArray[ localId ];
+    if(sharedNodes.find(globalId) == sharedNodes.end() )
+      {
+      this->CommLists->EnqueueNodeSend(localId,globalId,rmtRank);
+      } // END if node not at a shared interface
+    } // END for all adjacent cell nodes
+
+  // STEP 5: Get pointer to the connectivity list of the ghost cell
+  npts = 0;
+  pts  = NULL;
+  this->GhostedGrid->GetCellPoints(ghostCell,npts,pts);
+  assert("post: adjCell pts is NULL!" && (pts != NULL) );
+  assert("post: npts >= 1" && (npts >= 1) );
+
+  // STEP 6: Loop through all ghost cell nodes. The nodes of the ghost cell
+  // that are not on the shared interface with the local adjacent cell are
+  // enqueued to *receive* from the remote process that owns the ghost cell.
+  for(vtkIdType idx=0; idx < npts; ++idx)
+    {
+    vtkIdType localId  = pts[idx];
+    vtkIdType globalId = globalIdxArray[ localId ];
+    if( sharedNodes.find(globalId) == sharedNodes.end() )
+      {
+      this->CommLists->EnqueueNodeRcv(localId,globalId,rmtRank);
+      } // END if node not at a shared interface
+    } // END for all ghost cell nodes
+}
+
+//------------------------------------------------------------------------------
+bool vtkPUnstructuredGridConnectivity::IsCellConnected(
+      vtkCell* c, vtkIdType* globalId, const vtkIdType NumPoints,
+      vtkIdType& adjCell,
+      vtkIdList* shared)
+{
+#ifdef NDEBUG
+  static_cast<void>(NumPoints);
+#endif
+
+  adjCell = -1;
+
+  // nodes vector used as temporary storage for edge/face nodes in order
+  // to construct a corresponding hashcode to uniquely identify an edge
+  // or face, regardless of orientation.
+  std::vector<vtkIdType>  nodes;
+
+  // Check faces
+  for(int f=0; f < c->GetNumberOfFaces(); ++f)
+    {
+    vtkCell* face      = c->GetFace( f );
+    int N              = face->GetNumberOfPoints();
+    vtkIdType* nodePtr = face->GetPointIds()->GetPointer(0);
+
+     nodes.resize(N);
+     shared->SetNumberOfIds(N);
+     for(int i=0; i < N; ++i)
+       {
+
+#ifndef NDEBUG
+       assert("pre: face node out-of-bounds!" &&
+               (nodePtr[i] >= 0) && (nodePtr[i] < NumPoints) );
+#endif
+
+       nodes[i] = globalId[ nodePtr[i] ];
+       shared->SetId(i,nodes[i]);
+       } // END for all face nodes
+
+     std::string hashCode = vtk::details::Hash(&nodes[0],N);
+     if( this->AuxiliaryData->BoundaryGridLinks.HasFace( hashCode ) )
+       {
+       assert("pre: boundary faces must have at most one cell" &&
+        this->AuxiliaryData->BoundaryGridLinks.FaceLinks[hashCode].size()==1);
+       adjCell =
+         *(this->AuxiliaryData->BoundaryGridLinks.FaceLinks[hashCode].begin());
+       return true;
+       } // END if
+    } // END for all faces
+
+
+  // cell is not connected to the boundary grid of this process
+  return(false);
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::InsertGhostCellNodes(
+      vtkCell* ghostCell,
+      vtkIdTypeArray* ghostGridGlobalIdx,
+      vtkIdType* globalIdArray,
+      vtkUnstructuredGrid* bGrid,
+      vtkIdType* cellPts)
+{
+  assert("pre: ghost cell is NULL!" && (ghostCell != NULL) );
+  assert("pre: null global ID array!" && (globalIdArray != NULL) );
+  assert("pre: remote boundary grid is NULL!" && (bGrid != NULL) );
+  assert("pre: cellPts buffer is NULL!" && (cellPts != NULL) );
+
+  double pnt[3];
+  for(vtkIdType node=0; node < ghostCell->GetNumberOfPoints(); ++node)
+    {
+    // mesh index of the point w.r.t. the boundary grid
+    vtkIdType meshId   = ghostCell->GetPointId(node);
+
+    // global ID of the node
+    vtkIdType globalId = globalIdArray[ meshId ];
+
+    // get the local ID of the node, if it is one of the boundary nodes
+    vtkIdType localId  =
+        this->AuxiliaryData->BoundaryGridLinks.GetLocalNodeID(globalId);
+
+    if( localId != -1 )
+      {
+      // node is a boundary node
+      cellPts[ node ] = localId;
+      } // END if
+    else if( this->AuxiliaryData->NodeHistory.find( globalId ) !=
+             this->AuxiliaryData->NodeHistory.end() )
+      {
+      // we have previously inserted that node
+      cellPts[ node ] = this->AuxiliaryData->NodeHistory[ globalId ];
+      } // END else if
+    else
+      {
+      // insert the node & update the history
+      bGrid->GetPoint(meshId,pnt);
+      vtkIdType idx = this->GhostedGrid->GetPoints()->InsertNextPoint(pnt);
+      cellPts[ node ] = idx;
+      assert("post: new node id mismatch!" &&
+             (this->GhostedGrid->GetNumberOfPoints()-1)==idx);
+
+      // Update node history
+      this->AuxiliaryData->NodeHistory[ globalId ] = idx;
+
+      // Update global ID array on ghosted grid
+      ghostGridGlobalIdx->InsertNextValue( globalId );
+
+      assert("post: ghost grid global ID array size mismatch" &&
+             (this->GhostedGrid->GetNumberOfPoints()==
+              ghostGridGlobalIdx->GetNumberOfTuples()) );
+      } // END else
+    } // END for all nodes
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::ProcessRemoteGrid(
+      const int rmtRank, vtkUnstructuredGrid* bGrid)
+{
+  // Sanity Checks
+  assert("pre: remote bgrid is NULL!" && (bGrid != NULL) );
+  assert("pre: ghosted grid instance is NULL!" && (this->GhostedGrid != NULL));
+  assert("pre: controller is NULL!" && (this->Controller != NULL) );
+  assert("pre: remote rank is out-of-bounds!" &&
+          (rmtRank >= 0) &&
+          (rmtRank < this->Controller->GetNumberOfProcesses()) );
+  assert("pre: remote bgrid doesn't have global ID!"&&
+          bGrid->GetPointData()->HasArray("GLOBAL_ID"));
+  assert("preS: remote bgrid doesn't have local cell ID!" &&
+          bGrid->GetCellData()->HasArray("LOCAL_CELL_ID"));
+
+  // Get the GlobalID array of the output GhostGrid. This method grows that
+  // array accordingly as ghost nodes are inserted.
+  vtkIdTypeArray* ghostGridGlobalIdArray =
+      vtkIdTypeArray::SafeDownCast(
+         this->GhostedGrid->GetPointData()->GetArray(this->GlobalIDFieldName));
+  assert("pre: cannot get global ID field from GhostedGrid" &&
+         (ghostGridGlobalIdArray != NULL) );
+
+  // Get pointer to the GlobalID array on the boundary grid.
+  vtkIdType* globalIdx =
+      static_cast<vtkIdType*>(
+          bGrid->GetPointData()->GetArray("GLOBAL_ID")->GetVoidPointer(0));
+
+  // Get pointer to the local cell ID w.r.t. the remote grid, of the cells
+  // on the boundary grid.
+  vtkIdType* rmtCellIdx =
+      static_cast<vtkIdType*>(
+          bGrid->GetCellData()->GetArray("LOCAL_CELL_ID")->GetVoidPointer(0));
+
+  // Loop through all remote boundary grid cells, check to see if they are
+  // abutting with the boundary grid of the input grid and if so, update
+  // the ghosted grid.
+  vtkIdType adjCell = -1;
+  std::vector<vtkIdType> cellPts;
+  vtkIdList* sharedIds = vtkIdList::New();
+  for(vtkIdType c=0; c < bGrid->GetNumberOfCells(); ++c)
+    {
+    vtkCell* cell     = bGrid->GetCell(c);
+    vtkIdType rmtCell = rmtCellIdx[c];
+
+    if(this->IsCellConnected(
+          cell,globalIdx,bGrid->GetNumberOfPoints(),adjCell,sharedIds))
+      {
+      // Sanity checks
+      assert("pre: number of sharedIds must be at least 2" &&
+              (sharedIds->GetNumberOfIds()>=2) );
+      assert("pre: adjCell is out-of-bounds from input grid!" &&
+        (adjCell >= 0) && (adjCell < this->InputGrid->GetNumberOfCells()));
+      assert("pre: adjCell is out-of-bounds from ghosted grid!" &&
+        (adjCell >= 0) && (adjCell < this->GhostedGrid->GetNumberOfCells()));
+
+      // Insert cell points
+      cellPts.resize( cell->GetNumberOfPoints() );
+      this->InsertGhostCellNodes(
+          cell,ghostGridGlobalIdArray,globalIdx,bGrid,&cellPts[0]);
+
+      // Insert ghost cell, if this cell is not inserted by another partition
+      std::vector<vtkIdType> cellNodesCopy = cellPts;
+      std::string hc = vtk::details::Hash(&cellNodesCopy[0],cellNodesCopy.size());
+      if( this->AuxiliaryData->CellHistory.find(hc) ==
+          this->AuxiliaryData->CellHistory.end() )
+        {
+        vtkIdType ghostCellIdx =
+            this->GhostedGrid->InsertNextCell(
+                cell->GetCellType(),cell->GetNumberOfPoints(),&cellPts[0]);
+        assert("post: ghostCellIdx mismatch!" &&
+              (ghostCellIdx == this->GhostedGrid->GetNumberOfCells()-1) );
+
+        // update cell communication list
+        this->CommLists->EnqueueCellLink(adjCell,ghostCellIdx,rmtCell,rmtRank);
+
+        // Enqueue node links
+        this->EnqueueNodeLinks(rmtRank,ghostCellIdx,adjCell,sharedIds);
+
+        // update history s.t. we avoid adding duplicate cells.
+        this->AuxiliaryData->CellHistory.insert(hc);
+        } // END if
+
+      } // END if the cell is connected
+    } // END for all cells
+
+  // Delete sharedIds object
+  sharedIds->Delete();
+
+  // sanity check!
+  assert("post: ghost grid global ID array size mismatch" &&
+         (this->GhostedGrid->GetNumberOfPoints()==
+          ghostGridGlobalIdArray->GetNumberOfTuples()) );
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::BuildGhostedGridAndCommLists()
+{
+  assert("pre: ghosted grid should be NULL!" && (this->GhostedGrid==NULL));
+
+  // STEP 0: Deep-Copy the topology of the input grid to the ghosted grid.
+  this->GhostedGrid = vtkUnstructuredGrid::New();
+  vtkUnstructuredGrid* tmpPtr = vtkUnstructuredGrid::New();
+  tmpPtr->CopyStructure(this->InputGrid);
+  this->GhostedGrid->DeepCopy(tmpPtr);
+  tmpPtr->Delete();
+
+  // STEP 1: Deep-Copy the global IDs
+  vtkIdTypeArray* globalIdx = vtkIdTypeArray::New();
+  globalIdx->SetName(this->GlobalIDFieldName);
+  globalIdx->SetNumberOfComponents(1);
+  globalIdx->SetNumberOfTuples(this->GhostedGrid->GetNumberOfPoints());
+  memcpy(
+    globalIdx->GetVoidPointer(0),
+    this->InputGrid->GetPointData()
+      ->GetArray(this->GlobalIDFieldName)->GetVoidPointer(0),
+    sizeof(vtkIdType)*this->GhostedGrid->GetNumberOfPoints()
+    );
+
+  assert("pre: globalIdx size mismatch!" &&
+      globalIdx->GetNumberOfTuples()==this->GhostedGrid->GetNumberOfPoints());
+  this->GhostedGrid->GetPointData()->AddArray(globalIdx);
+  globalIdx->Delete();
+
+  // STEP 2: Loop through all remote boundary grids, find the cells that
+  // are face-adjacent and insert them to the ghosted grid.
+  unsigned int i=0;
+  for(;i<static_cast<unsigned int>(this->AuxiliaryData->RmtBGrids.size()); ++i)
+    {
+    int rmtRank = this->AuxiliaryData->CandidateRanks[ i ];
+    this->ProcessRemoteGrid(
+        rmtRank,this->AuxiliaryData->RmtBGrids[i]);
+    } // END for all remote grids
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::ExchangeBoundaryGridSizes(int size)
+{
+  int numCandidates =
+      static_cast<int>(this->AuxiliaryData->CandidateRanks.size());
+  this->AuxiliaryData->RmtBGridSizes.resize(numCandidates,0);
+
+  vtkMPICommunicator::Request* rqsts;
+  rqsts = new vtkMPICommunicator::Request[2*numCandidates];
+
+  // STEP 0: Post receives for each candidate rank
+  int idx = 0;
+  for(int i=0; i < numCandidates; ++i)
+   {
+   int rmtRank = this->AuxiliaryData->CandidateRanks[ i ];
+   this->Controller->NoBlockReceive(
+       &this->AuxiliaryData->RmtBGridSizes[i],1,rmtRank,
+       0,rqsts[idx]);
+   ++idx;
+   } // END for all candidate ranks
+
+  // STEP 1: Post sends
+  for(int i=0; i < numCandidates; ++i)
+    {
+    int rmtRank = this->AuxiliaryData->CandidateRanks[ i ];
+    this->Controller->NoBlockSend(
+        &size,1,rmtRank,0,rqsts[idx]);
+    ++idx;
+    }
+
+  // STEP 2: Block until communication
+  this->Controller->WaitAll(2*numCandidates,rqsts);
+  delete [] rqsts;
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::ExchangeBoundaryGrids()
+{
+  assert("pre: Boundary Grid should not be NULL!" &&
+          (this->AuxiliaryData->BoundaryGrid != NULL));
+
+  // STEP 0: Serialize the local grid
+  vtkMultiProcessStream bytestream;
+  this->SerializeUnstructuredGrid(
+      this->AuxiliaryData->BoundaryGrid,bytestream);
+
+  // STEP 1: Point-to-Point exchange boundary grid sizes
+  this->ExchangeBoundaryGridSizes(bytestream.RawSize());
+
+  // STEP 2: Post receives
+  int numCandidates =
+      static_cast<int>(this->AuxiliaryData->CandidateRanks.size());
+  std::vector< unsigned char* > RawData;
+  RawData.resize(numCandidates);
+
+  vtkMPICommunicator::Request* rqsts;
+  rqsts = new vtkMPICommunicator::Request[2*numCandidates];
+
+  int idx = 0;
+  for(int i=0; i < numCandidates; ++i)
+    {
+    int rmtRank = this->AuxiliaryData->CandidateRanks[ i ];
+    int size    = this->AuxiliaryData->RmtBGridSizes[ i ];
+    RawData[i]  = new unsigned char[size];
+    this->Controller->NoBlockReceive(
+        RawData[i],size,rmtRank,0,rqsts[idx]);
+    ++idx;
+    } // END for all candidates
+
+  // STEP 3: Post sends
+  unsigned char* data = NULL;
+  unsigned int size   = 0;
+  bytestream.GetRawData(data,size);
+
+  for(int i=0; i < numCandidates; ++i)
+    {
+    int rmtRank = this->AuxiliaryData->CandidateRanks[ i ];
+    this->Controller->NoBlockSend(
+        data,size,rmtRank,0,rqsts[idx]);
+    ++idx;
+    } // END for all candidates
+
+  // STEP 4: Block until communication is complete
+  this->Controller->WaitAll(2*numCandidates,rqsts);
+  delete [] rqsts;
+  delete [] data;
+
+  // STEP 5: De-serialize remote boundary grids
+  this->AuxiliaryData->RmtBGrids.resize(numCandidates,NULL);
+  vtkMultiProcessStream tmpStream;
+  for(int i=0; i < numCandidates; ++i)
+    {
+    int sz = this->AuxiliaryData->RmtBGridSizes[ i ];
+    tmpStream.Reset();
+    tmpStream.SetRawData(RawData[i],sz);
+
+    this->AuxiliaryData->RmtBGrids[ i ] = vtkUnstructuredGrid::New();
+    this->DeSerializeUnstructuredGrid(
+        this->AuxiliaryData->RmtBGrids[i],tmpStream);
+
+#ifdef DEBUG
+    std::ostringstream oss;
+    oss << "BOUNDARY_GRID-P" << this->AuxiliaryData->CandidateRanks[i] << "-";
+    oss << "AT-RANK";
+    this->WriteUnstructuredGrid(
+        this->AuxiliaryData->RmtBGrids[i],oss.str().c_str());
+#endif
+
+    delete [] RawData[i];
+    RawData[i] = NULL;
+    } // END for all candidates
+
+  this->Controller->Barrier();
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::BoundingBoxCollision()
+{
+  // Sanity checks
+  assert("pre: controller is NULL!" && (this->Controller != NULL) );
+  int N = this->Controller->GetNumberOfProcesses();
+
+  assert("pre: auxiliary data is NULL!" && (this->AuxiliaryData != NULL) );
+  assert("pre: bounding box list size mismarch!" &&
+    (static_cast<int>(this->AuxiliaryData->GlobalGridBounds.size())==6*N) );
+
+  int myRank = this->Controller->GetLocalProcessId();
+
+  this->AuxiliaryData->CandidateRanks.reserve(N);
+
+  vtkBoundingBox localBox(this->AuxiliaryData->GridBounds);
+  vtkBoundingBox rmtBox;
+  for(int i=0; i < N; ++i)
+    {
+    if(i != myRank)
+      {
+      rmtBox.SetBounds(
+          this->AuxiliaryData->GlobalGridBounds[i*6],   // xmin
+          this->AuxiliaryData->GlobalGridBounds[i*6+1], // xmax
+          this->AuxiliaryData->GlobalGridBounds[i*6+2], // ymin
+          this->AuxiliaryData->GlobalGridBounds[i*6+3], // ymax
+          this->AuxiliaryData->GlobalGridBounds[i*6+4], // zmin
+          this->AuxiliaryData->GlobalGridBounds[i*6+5]  // zmax
+          );
+
+      if(localBox.Intersects(rmtBox))
+        {
+        this->AuxiliaryData->CandidateRanks.push_back( i );
+        } // END if
+      } // END if remote rank
+    } // END for all remote bounding boxes
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::ExchangeGridBounds()
+{
+  // Sanityc checks
+  assert("pre: controller is NULL!" && (this->Controller != NULL) );
+
+  // STEP 0: Allocate buffers. Each process sends 6 doubles and receives 6
+  // doubles from each remote process. Hence, the rcv buffer is allocated
+  // as N*6.
+  int N = this->Controller->GetNumberOfProcesses();
+  this->AuxiliaryData->GlobalGridBounds.resize( N*6 );
+
+  // STEP 1: Communicates the bounds. Upon completion, GlobalGridBounds stores
+  // the bounds of each process in a flat vector strided by 6. The bounds of
+  // of process, P_i, are stored contiguously in the region [i*6, i*6+5] of the
+  // GlobalGridBounds array.
+  this->Controller->AllGather(
+      this->AuxiliaryData->GridBounds,
+      &this->AuxiliaryData->GlobalGridBounds[0],6);
+}
+
+//------------------------------------------------------------------------------
+bool vtkPUnstructuredGridConnectivity::IsCellOnBoundary(
+      vtkIdType* cellNodes, vtkIdType N)
+{
+  assert("pre: null cell nodes array!" && (cellNodes != NULL) );
+
+  for(int i=0; i < N; ++i)
+    {
+    if(this->AuxiliaryData->SurfaceNodes.find(cellNodes[i]) !=
+        this->AuxiliaryData->SurfaceNodes.end() )
+      {
+      return true;
+      } // END if
+    } // END for all nodes of the cell
+
+  return false;
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::MarkFaces()
+{
+  vtkIdType numCells = this->InputGrid->GetNumberOfCells();
+  for(vtkIdType cellIdx=0; cellIdx < numCells; ++cellIdx)
+    {
+    vtkCell* cell = this->InputGrid->GetCell( cellIdx );
+    assert("pre: cell is NULL!" && (cell != NULL) );
+
+    /// @todo: optimize this using lookup tables, at least for linear cells,
+    /// since we only need the IDs of the faces
+    for(int faceIdx=0; faceIdx < cell->GetNumberOfFaces(); ++faceIdx)
+      {
+      vtkCell* face = cell->GetFace( faceIdx );
+      this->AuxiliaryData->UpdateFaceList(face,cellIdx);
+      } // END for all faces
+    }  // END for all cells
+
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::ExtractSurfaceMesh()
+{
+  std::map<std::string,vtk::details::FaceInfo>::iterator iter;
+
+  iter = this->AuxiliaryData->FaceList.begin();
+  for(;iter != this->AuxiliaryData->FaceList.end(); ++iter)
+    {
+    assert("pre: a face can only be adjacent to at most two cells!" &&
+            (iter->second.Count <= 2) );
+
+    if(iter->second.Count==1)
+     {
+     assert("pre: duplicate boundary face!" &&
+         this->AuxiliaryData->SurfaceMesh.find(iter->first)==
+             this->AuxiliaryData->SurfaceMesh.end());
+
+     this->AuxiliaryData->SurfaceMesh[ iter->first ] = iter->second;
+     for(unsigned int i=0; i < iter->second.FaceIds.size(); ++i)
+       {
+       this->AuxiliaryData->SurfaceNodes.insert(iter->second.FaceIds[i]);
+       } // END for all bndry face ids
+     } // END if face on boundary
+
+    } // END for all faces on the input mesh
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::ExtractBoundaryCell(
+    const vtkIdType cellIdx,
+    const vtkIdType numCellNodes,
+    vtkIdType* cellNodes,
+    vtkPoints* nodes,
+    vtkIdTypeArray* localIdx,
+    vtkIdTypeArray* globalIdx)
+{
+  // Sanity checks
+  assert("pre: input grid is NULL!" && (this->InputGrid != NULL) );
+  assert("pre: auxiliary data is NULL!" && (this->AuxiliaryData != NULL) );
+  assert("pre: nodes is NULL!" && (nodes != NULL) );
+  assert("pre: localIdx is NULL!" && (localIdx != NULL) );
+  assert("pre: globalIdx is NULL!" && (globalIdx != NULL) );
+  assert("pre: cellIdx is out-of-bounds!" &&
+      (cellIdx >= 0) && (cellIdx < this->InputGrid->GetNumberOfCells()));
+
+  // STEP 0: Get the global ID information from the input grid
+  vtkPointData* PD = this->InputGrid->GetPointData();
+  assert("pre: PD is NULL!" && (PD != NULL) );
+  vtkDataArray* G  = PD->GetArray(this->GlobalIDFieldName);
+  assert("pre: Global array, G, is NULL!" && (G != NULL) );
+  vtkIdType* globalInfo = static_cast<vtkIdType*>(G->GetVoidPointer(0));
+
+  // STEP 1: Get the cell type from the input grid
+  int cellType = this->InputGrid->GetCellType(cellIdx);
+
+  // STEP 2: Create vector for the cell connectivity that will be inserted
+  // in the boundary grid instance.
+  std::vector< vtkIdType > cellConnectivity;
+  cellConnectivity.resize(numCellNodes);
+
+  // STEP 3: Loop through the cell nodes and first update the nodal information
+  // of the boundary and the cell connectivity for this cell.
+  double pt[3];
+  for(vtkIdType nodeIdx=0; nodeIdx < numCellNodes; ++nodeIdx)
+    {
+    vtkIdType ptIdx = cellNodes[ nodeIdx ]; // local idx w.r.t. input grid
+    if( this->AuxiliaryData->BndryNodeMap.find(ptIdx) ==
+        this->AuxiliaryData->BndryNodeMap.end() )
+      {
+      // insert new point on the boundary grid from the input grid
+      this->InputGrid->GetPoint(ptIdx,pt);
+
+      vtkIdType idx             = nodes->InsertNextPoint(pt);
+      cellConnectivity[nodeIdx] = idx;
+      localIdx->InsertNextValue(ptIdx);
+      globalIdx->InsertNextValue(globalInfo[ptIdx]);
+
+      // update the node map
+      this->AuxiliaryData->BndryNodeMap[ptIdx] = idx;
+      }
+    else
+      {
+      // node has already been inserted to the boundary grid, just update
+      // the connectivity
+      cellConnectivity[nodeIdx] = this->AuxiliaryData->BndryNodeMap[ptIdx];
+      }
+    } // END for all cell nodes
+
+  // STEP 4: Insert the cell in to the boundary grid
+  this->AuxiliaryData->BoundaryGrid->InsertNextCell(
+      cellType,numCellNodes,&cellConnectivity[0]);
+
+  // sanity checks
+#ifndef NDEBUG
+  vtkIdType N = nodes->GetNumberOfPoints();
+  assert("post: array size mismatch!" && (localIdx->GetNumberOfTuples()==N));
+  assert("post: array size mismatch!" && (globalIdx->GetNumberOfTuples()==N));
+#endif
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::ExtractBoundaryGrid()
+{
+  // Sanity checks
+  assert("pre: input grid is NULL!" && (this->InputGrid != NULL) );
+  assert("pre: auxiliary data is NULL!" && (this->AuxiliaryData != NULL) );
+
+  vtkIdType numCells = this->InputGrid->GetNumberOfCells();
+
+  // Allocate data-structure for boundary grid
+  this->AuxiliaryData->BoundaryGrid = vtkUnstructuredGrid::New();
+  this->AuxiliaryData->BoundaryGrid->Allocate(numCells,numCells*8);
+
+  // Create global ID array -- for each node in the boundary grid we store
+  // the corresponding global ID from the input grid.
+  vtkIdTypeArray* globalidx = vtkIdTypeArray::New();
+  globalidx->SetName("GLOBAL_ID");
+  globalidx->SetNumberOfComponents(1);
+  globalidx->Allocate(this->InputGrid->GetNumberOfPoints()); // pre-allocate
+
+  // Create the local ID array -- for each node in the boundary grid we store
+  // the local ID w.r.t. to the input grid.
+  vtkIdTypeArray* localidx  = vtkIdTypeArray::New();
+  localidx->SetName("LOCAL_ID");
+  localidx->SetNumberOfComponents(1);
+  localidx->Allocate(this->InputGrid->GetNumberOfPoints()); // pre-allocate
+
+  // Create the local cell ID array -- for each cell in the boundary grid we
+  // store the corresponding local cell ID w.r.t. to the input grid.
+  vtkIdTypeArray* localCellIdx = vtkIdTypeArray::New();
+  localCellIdx->SetName("LOCAL_CELL_ID");
+  localCellIdx->SetNumberOfComponents(1);
+  localCellIdx->Allocate(this->InputGrid->GetNumberOfCells()); // pre-allocate
+
+  // Allocate boundary grid nodes
+  vtkPoints* points = vtkPoints::New();
+  points->SetDataTypeToDouble();
+  points->Allocate(this->InputGrid->GetNumberOfPoints()); // pre-allocate
+
+  // STEP 0: Loop through all cells and mark faces -- O(N)
+  this->MarkFaces();
+
+  // STEP 1: Loop through all marked faces and extract the surface
+  // mesh of the input grid -- O(N)
+  this->ExtractSurfaceMesh();
+
+  // STEP 2: Loop through all cells and extract cells on the boundary -- O(N)
+  vtkIdType numNodes = 0;    // numNodes in cell
+  vtkIdType* nodes   = NULL; // pointer to the cell Ids
+  for(vtkIdType cellIdx=0; cellIdx < numCells; ++cellIdx)
+    {
+    // Get point IDs of the cell. Note, this method returns a "read-only"
+    // pointer to the underlying connectivity array for the cell in query.
+    // No memory is allocated.
+    this->InputGrid->GetCellPoints(cellIdx,numNodes,nodes);
+    assert("pre: nodes ptr should not be NULL!" && (nodes != NULL) );
+
+    if( this->IsCellOnBoundary(nodes,numNodes) )
+      {
+      this->ExtractBoundaryCell(
+          cellIdx,numNodes,nodes,
+          points,
+          localidx,
+          globalidx
+          );
+      localCellIdx->InsertNextValue(cellIdx);
+      } // END if cell on boundary
+    } // END for all cells
+
+  // STEP 3: Return any memory that was allocated but not used.
+  points->Squeeze();
+  localidx->Squeeze();
+  globalidx->Squeeze();
+  localCellIdx->Squeeze();
+  this->AuxiliaryData->BoundaryGrid->Squeeze();
+
+  // sanity checks
+#ifndef NDEBUG
+  vtkIdType nc  = this->AuxiliaryData->BoundaryGrid->GetNumberOfCells();
+  vtkIdType numPoints = points->GetNumberOfPoints();
+  assert("array size mismatch!" && (localidx->GetNumberOfTuples()==numPoints));
+  assert("array size mismatch!" && (globalidx->GetNumberOfTuples()==numPoints));
+  assert("post: array size mismatch!" &&
+         (localCellIdx->GetNumberOfTuples()==nc) );
+#endif
+
+  this->AuxiliaryData->BoundaryGrid->SetPoints(points);
+  this->AuxiliaryData->BoundaryGrid->GetPointData()->AddArray(localidx);
+  this->AuxiliaryData->BoundaryGrid->GetPointData()->AddArray(globalidx);
+  this->AuxiliaryData->BoundaryGrid->GetCellData()->AddArray(localCellIdx);
+
+  points->Delete();
+  globalidx->Delete();
+  localidx->Delete();
+  localCellIdx->Delete();
+
+  // Build links on the boundary grid
+  this->AuxiliaryData->BoundaryGridLinks.BuildLinks(
+      this->AuxiliaryData->BoundaryGrid);
+
+  // Write the unstructured grid, iff debugging is turned on
+#ifdef DEBUG
+  this->WriteUnstructuredGrid(
+      this->AuxiliaryData->BoundaryGrid,"BG");
+#endif
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::SerializeUnstructuredGrid(
+      vtkUnstructuredGrid* g, vtkMultiProcessStream& bytestream)
+{
+  assert("pre: cannot serialize a null grid!" && (g != NULL) );
+  assert("pre: byte-stream should be empty" && bytestream.Empty() );
+
+  // serialize the number of points and cells in the grid
+  bytestream << g->GetNumberOfPoints();
+  bytestream << g->GetNumberOfCells();
+
+  // serialize the nodes of the grid
+  double* nodes = static_cast<double*>(g->GetPoints()->GetVoidPointer(0));
+  bytestream.Push(nodes,3*g->GetNumberOfPoints());
+
+  // serialize the cell connectivity information of the grid
+  vtkIdType n       = 0;    // number of nodes of each cell
+  vtkIdType* cnodes = NULL; // pointer to the cell connectivity array
+  for(vtkIdType cellIdx=0; cellIdx < g->GetNumberOfCells(); ++cellIdx)
+    {
+    // push the cell type
+    bytestream << g->GetCellType(cellIdx);
+
+    // get the cell points
+    g->GetCellPoints(cellIdx,n,cnodes);
+
+    // push the number of nodes per cell
+    bytestream << n;
+
+    // push the cell connectivity
+    bytestream.Push(cnodes,n);
+    } // END for all cells
+
+  // serialize the point data
+  vtkFieldDataSerializer::Serialize(g->GetPointData(),bytestream);
+
+  // serialize the cell data
+  vtkFieldDataSerializer::Serialize(g->GetCellData(),bytestream);
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::DeSerializeUnstructuredGrid(
+    vtkUnstructuredGrid* g, vtkMultiProcessStream& bytestream)
+{
+  assert("pre: input grid is NULL!" && (g != NULL) );
+  assert("pre: byte-stream should not be empty!" && !bytestream.Empty() );
+
+  unsigned int N; // auxiliary local variable used to satisfy bytestream API.
+
+  // deserialize the number of points & number of cells
+  vtkIdType numPoints = 0;
+  vtkIdType numCells  = 0;
+  bytestream >> numPoints;
+  bytestream >> numCells;
+
+  // deserialize the grid points
+  vtkPoints* pnts = vtkPoints::New();
+  pnts->SetDataTypeToDouble();
+  pnts->SetNumberOfPoints(numPoints);
+
+  double* nodes = static_cast<double*>(pnts->GetVoidPointer(0));
+  N = 3*numPoints;
+  assert( N==3*numPoints );
+  bytestream.Pop(nodes,N);
+
+  g->SetPoints( pnts );
+  pnts->Delete();
+
+  // pre-allocate internal buffer for connectivity
+  g->Allocate(numCells,8);
+
+  // deserialize the grid connectivity
+  int cellType      = 0;
+  vtkIdType n       = 0;
+  std::vector<vtkIdType> cnodes;
+  for(vtkIdType cellIdx=0; cellIdx < numCells; ++cellIdx)
+    {
+    bytestream >> cellType >> n;
+    cnodes.resize( n );
+    vtkIdType* cnodesPtr = &cnodes[0];
+    N = n;
+    bytestream.Pop(cnodesPtr,N);
+
+    g->InsertNextCell(cellType,n,cnodesPtr);
+    } // END for all cells
+
+  g->Squeeze();
+
+  // De-serialize point data
+  vtkFieldDataSerializer::Deserialize(bytestream,g->GetPointData());
+
+  // De-serialize cell data
+  vtkFieldDataSerializer::Deserialize(bytestream,g->GetCellData());
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridConnectivity::WriteUnstructuredGrid(
+      vtkUnstructuredGrid* g, const char* fileName)
+{
+  assert("pre: input grid is NULL!" && (g != NULL) );
+  assert("pre: fileName is NULL!" && (fileName != NULL) );
+
+  std::ostringstream oss;
+  oss << fileName << "-" << this->Controller->GetLocalProcessId() << ".vtk";
+
+  vtkUnstructuredGridWriter* writer = vtkUnstructuredGridWriter::New();
+  writer->SetFileName(oss.str().c_str());
+  writer->SetInputData(g);
+  writer->Update();
+  writer->Delete();
+}
diff --git a/Filters/ParallelGeometry/vtkPUnstructuredGridConnectivity.h b/Filters/ParallelGeometry/vtkPUnstructuredGridConnectivity.h
new file mode 100644
index 0000000..7b66a19
--- /dev/null
+++ b/Filters/ParallelGeometry/vtkPUnstructuredGridConnectivity.h
@@ -0,0 +1,294 @@
+/*=========================================================================
+
+ Program:   Visualization Toolkit
+ Module:    vtkPStructuredGridConnectivity.h
+
+ Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+ All rights reserved.
+ See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+ This software is distributed WITHOUT ANY WARRANTY; without even
+ the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ PURPOSE.  See the above copyright notice for more information.
+
+ =========================================================================*/
+// .NAME vtkPUnstructuredGridConnectivity.h -- Unstructured grid connectivity.
+//
+// .SECTION Description
+//  vtkPUnstructuredGridConnectivity implements functionality for generating
+//  ghost zones for a distributed unstructured grid. Generating ghost zones is
+//  implemented in two stages. First, we build the ghost zones, which amounts
+//  to building the connectivity of the ghosted grid and communication links,
+//  and second, we update the ghost zones by communicating the fields on the
+//  ghost cells and nodes. The main steps involved in this process are as
+//  follows:
+//  <ol>
+//    <li> Each process computes a bounding box of the grid it owns. </li>
+//    <li> The bounding boxes are then distributed to all processes by
+//         an AllGather collective call. </li>
+//    <li> Each process loops through the list of bounding boxes and
+//         computes box intersections with its local bounding box. </li>
+//    <li> The list of intersecting bounding boxes, yields an abbreviated list
+//         of candidate neighbors. </li>
+//    <li> Given the local grid, each process then extracts the boundary grid,
+//         which consists of nodes/cells on the boundary, global node IDs and
+//         the local cell IDs w.r.t. the local grid. </li>
+//    <li> Boundary grids are then exchanged among candidate neighbors using
+//         point-to-point communication. </li>
+//    <li> Next, each process constructs the topology of the ghost zones and
+//         communication links, using the local boundary grid and the list of
+//         remote boundary grids. </li>
+//    <li> The communication links store a source/target pair for nodes/cells
+//         among connected grids and remain persistent in memory. </li>
+//    <li> Last, the fields (node- and/or cell-centered) are updated, using
+//         point-to-point communication by processing the communication
+//         links. </li>
+//  </ol>
+//
+// .SECTION Caveats
+//  <ul>
+//    <li> The code currently assumes one grid per rank. </li>
+//    <li> GlobalID information must be available. </li>
+//    <li> The grid must be globally conforming, i.e., no hanging nodes. </li>
+//    <li> Only topologically face-adjacent ghost cells are considered. </li>
+//    <li> PointData and CellData must match across partitions/processes. </li>
+//  </ul>
+//
+// .SECTION See Also
+//  vtkPUnstructuredGridGhostDataGenerator
+
+#ifndef VTKPUNSTRUCTUREDGRIDCONNECTIVITY_H_
+#define VTKPUNSTRUCTUREDGRIDCONNECTIVITY_H_
+
+#include "vtkFiltersParallelGeometryModule.h" // For export macro
+#include "vtkObject.h"
+
+// Forward Declarations
+class vtkCell;
+class vtkCellData;
+class vtkIdList;
+class vtkIdTypeArray;
+class vtkMPIController;
+class vtkMultiProcessStream;
+class vtkPointData;
+class vtkPoints;
+class vtkUnstructuredGrid;
+
+// Forward Declaration of internal data-structures
+namespace vtk
+{
+namespace details
+{
+
+struct GridInfo;
+struct MeshLinks;
+struct CommunicationLinks;
+
+} // END namespace details
+} // END namespace vkt
+
+class VTKFILTERSPARALLELGEOMETRY_EXPORT vtkPUnstructuredGridConnectivity :
+  public vtkObject
+{
+public:
+  static vtkPUnstructuredGridConnectivity* New();
+  vtkTypeMacro(vtkPUnstructuredGridConnectivity,vtkObject);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Set/Get the underlying MPI controller used for communication.
+  vtkSetMacro(Controller,vtkMPIController*);
+  vtkGetMacro(Controller,vtkMPIController*);
+
+  // Description:
+  // Set/Get the name of the GlobalID field. By default, "GlobalID" is assumed.
+  vtkSetStringMacro(GlobalIDFieldName);
+  vtkGetStringMacro(GlobalIDFieldName);
+
+  // Description:
+  // Returns the ghosted grid.
+  vtkGetMacro(GhostedGrid,vtkUnstructuredGrid*);
+
+  // Description:
+  // Registers the grid in this process
+  void RegisterGrid(vtkUnstructuredGrid* gridPtr);
+
+  // Description:
+  // Builds the ghost-zone connectivity. This method sets up the necessary
+  // communication lists for updating the ghost zones.
+  // NOTE: the local grid, must be registered, by calling RegisterGrid(),
+  // prior to calling this method.
+  void BuildGhostZoneConnectivity();
+
+  // Description:
+  // Exchanges ghost zone data (i.e., node-centered or cell-centered fields).
+  // NOTE: This method must be called after BuildGhostZoneConnectivity.
+  void UpdateGhosts();
+
+protected:
+  vtkPUnstructuredGridConnectivity();
+  virtual ~vtkPUnstructuredGridConnectivity();
+
+  char* GlobalIDFieldName;            // The field of the global IDs.
+  vtkUnstructuredGrid* InputGrid;     // The input grid, to be ghosted.
+  vtkUnstructuredGrid* GhostedGrid;   // This is the output from this class.
+  vtkMPIController* Controller;       // Supplied MPI controller.
+
+// BTX
+  vtk::details::GridInfo* AuxiliaryData; // Data used to build the ghost zones.
+  vtk::details::CommunicationLinks* CommLists; // Persistent comm lists.
+// ETX
+
+  // Description:
+  // Given the deserialized cell-centered ghost data from the given neighboring
+  // rank, this method fills in the cell-centered fields of the ghost zone.
+  void FillGhostZoneCells(
+        const int neiRank,
+        vtkCellData* ghostData,
+        vtkIdType* cellIdx,
+        const unsigned int numGhostCells);
+
+  // Description:
+  // Given the deserialized node-centered ghost data from the given neighboring
+  // rank, this method fills in the node-centered fields of the ghost zone.
+  void FillGhostZoneNodes(
+        const int neiRank,
+        vtkPointData* ghostData,
+        vtkIdType* globalIdx,
+        const unsigned int numGhostNodes);
+
+  // Description:
+  // Deserializes the raw buffers received from each neighboring rank and
+  // updates the ghosted grid instance by filling in the values for the
+  // ghost zones.
+  void DeSerializeGhostZones();
+
+  // Description:
+  // This method exchanges the buffer sizes among neighboring processes and
+  // allocates a persistent buffer for the communication. This exchange and
+  // memory allocation happens only the first time the data is exchanged
+  void CreatePersistentRcvBuffers();
+
+  // Description:
+  // This method serializes the local data (node-centered and/or cell-centered)
+  // for each rank that this process/grid communicates with.
+  void SerializeGhostZones();
+
+  // Description:
+  // Synchs the data on the input grid in this process to the ghosted grid
+  // instance.
+  void SynchLocalData();
+
+  // Description:
+  // Loops through the nodes of the ghost cell and the local adjacent cell
+  // and determines what
+  void EnqueueNodeLinks(
+        const int rmtRank,
+        const vtkIdType ghostCell,
+        const vtkIdType adjCell,
+        vtkIdList* shared);
+
+  // Description:
+  // Given the cell, c, this method checks if it is connected to the grid
+  // assigned to this process. The method will return -1, if the cell is
+  // not connected. If the cell is connected, adjCell will hold the cell
+  // index of the cell w.r.t. the input grid that the ghost cell is
+  // face-adjacent to as well as the shared global Ids of the face between
+  // the ghost and the face-adjacent, boundary cell.
+  bool IsCellConnected(
+      vtkCell* c,vtkIdType* globalId, const vtkIdType N,
+      vtkIdType& adjCell,
+      vtkIdList* sharedIds);
+
+  // Description:
+  // Inserts the ghost cell nodes in to the ghosted instance of the grid.
+  void InsertGhostCellNodes(
+        vtkCell* ghostCell,
+        vtkIdTypeArray* ghostGridGlobalIdx,
+        vtkIdType* globalIdArray,
+        vtkUnstructuredGrid* bGrid,
+        vtkIdType* cellPts);
+
+  // Description:
+  // Process the remote boundary grid and injects cells in to the ghosted
+  // grid if a match is found.
+  void ProcessRemoteGrid(
+      const int rmtRank,vtkUnstructuredGrid* bGrid);
+
+  // Description:
+  // Builds the ghosted grid and communication lists
+  void BuildGhostedGridAndCommLists();
+
+  // Description:
+  // Serializes the unstructured grid into a bytestream.
+  void SerializeUnstructuredGrid(
+      vtkUnstructuredGrid* g, vtkMultiProcessStream& bytestream);
+
+  // Description:
+  // De-serializes the unstructured grid from the given bytestream.
+  void DeSerializeUnstructuredGrid(
+      vtkUnstructuredGrid* g, vtkMultiProcessStream& bytestream);
+
+  // Description:
+  // Writes the given unstructured grid to an ASCII file.
+  // NOTE: Used for debugging.
+  void WriteUnstructuredGrid(vtkUnstructuredGrid* grid, const char* fileName);
+
+  // Description:
+  // Loops through the auxiliary FaceList, constructed in MarkFaces, and
+  // extracts the faces and nodes on the boundary.
+  void ExtractSurfaceMesh();
+
+  // Description:
+  // Loops through the input grid cell faces and updates the auxiliary
+  // data-structures to associates a count with each face.
+  void MarkFaces();
+
+  // Description:
+  // Extracts the boundary cell from the input grid and inserts it in to
+  // the boundary grid.
+  void ExtractBoundaryCell(
+      const vtkIdType cellIdx,
+      const vtkIdType numCellNodes,
+      vtkIdType* cellNodes,
+      vtkPoints* nodes,
+      vtkIdTypeArray* localIdx,
+      vtkIdTypeArray* globaIdx
+      );
+
+  // Description:
+  // Checks if the cell, composed by the supplied nodes, is on the boundary.
+  // A cell is on the boundary iff any of its nodes touch the boundary.
+  bool IsCellOnBoundary(vtkIdType* cellNodes, vtkIdType N);
+
+  // Description:
+  // Exchanged the boundary grids among candidate ranks.
+  void ExchangeBoundaryGrids();
+
+  // Description:
+  // Exchange boundary grid sizes
+  void ExchangeBoundaryGridSizes(int size);
+
+  // Description:
+  // Collides the bounds of this process with the bounding boxes of all
+  // other processes. The processes whose bounding boxes intersect yield
+  // the list of candidate ranks with which boundary grids will be exchanged.
+  void BoundingBoxCollision();
+
+  // Description:
+  // Exchanges the grid bounds of this process with all other processes.
+  // Upon completion, each process will have the global grid bounds of
+  // every process.
+  void ExchangeGridBounds();
+
+  // Description:
+  // Extracts the boundary grid geometry from the input grid.
+  // Note: this method only extract the mesh and global/local ID information.
+  void ExtractBoundaryGrid();
+
+private:
+  vtkPUnstructuredGridConnectivity(const vtkPUnstructuredGridConnectivity&); // Not implemented
+  void operator=(const vtkPUnstructuredGridConnectivity&); // Not implemented
+};
+
+#endif /* VTKPUNSTRUCTUREDGRIDCONNECTIVITY_H_ */
diff --git a/Filters/ParallelGeometry/vtkPUnstructuredGridGhostDataGenerator.cxx b/Filters/ParallelGeometry/vtkPUnstructuredGridGhostDataGenerator.cxx
new file mode 100644
index 0000000..32368f6
--- /dev/null
+++ b/Filters/ParallelGeometry/vtkPUnstructuredGridGhostDataGenerator.cxx
@@ -0,0 +1,120 @@
+/*=========================================================================
+
+ Program:   Visualization Toolkit
+ Module:    vtkPStructuredGridConnectivity.h
+
+ Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+ All rights reserved.
+ See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+ This software is distributed WITHOUT ANY WARRANTY; without even
+ the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ PURPOSE.  See the above copyright notice for more information.
+
+ =========================================================================*/
+#include "vtkPUnstructuredGridGhostDataGenerator.h"
+
+// VTK includes
+#include "vtkDataObject.h"
+#include "vtkInformation.h"
+#include "vtkInformationVector.h"
+#include "vtkMPIController.h"
+#include "vtkMPIUtilities.h"
+#include "vtkMultiProcessController.h"
+#include "vtkObjectFactory.h"
+#include "vtkPUnstructuredGridConnectivity.h"
+#include "vtkUnstructuredGrid.h"
+
+// C/C++ includes
+#include <cassert>
+
+vtkStandardNewMacro(vtkPUnstructuredGridGhostDataGenerator);
+
+//------------------------------------------------------------------------------
+vtkPUnstructuredGridGhostDataGenerator::vtkPUnstructuredGridGhostDataGenerator()
+{
+  this->GhostZoneBuilder = NULL;
+  this->Controller = vtkMultiProcessController::GetGlobalController();
+  this->SetNumberOfInputPorts(1);
+  this->SetNumberOfOutputPorts(1);
+}
+
+//------------------------------------------------------------------------------
+vtkPUnstructuredGridGhostDataGenerator::~vtkPUnstructuredGridGhostDataGenerator()
+{
+  if(this->GhostZoneBuilder != NULL)
+    {
+    this->GhostZoneBuilder->Delete();
+    }
+}
+
+//------------------------------------------------------------------------------
+void vtkPUnstructuredGridGhostDataGenerator::PrintSelf(
+      ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os,indent);
+}
+
+//------------------------------------------------------------------------------
+int vtkPUnstructuredGridGhostDataGenerator::FillInputPortInformation(
+      int vtkNotUsed(port), vtkInformation* info)
+{
+  assert( "pre: information object is NULL!" && (info != NULL) );
+  info->Set(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(),"vtkUnstructuredGrid");
+  return 1;
+}
+
+//------------------------------------------------------------------------------
+int vtkPUnstructuredGridGhostDataGenerator::FillOutputPortInformation(
+      int vtkNotUsed(port), vtkInformation* info)
+{
+  assert( "pre: information object is NULL!" && (info != NULL) );
+  info->Set(vtkDataObject::DATA_TYPE_NAME(),"vtkUnstructuredGrid");
+  return 1;
+}
+
+//------------------------------------------------------------------------------
+int vtkPUnstructuredGridGhostDataGenerator::RequestData(
+    vtkInformation* vtkNotUsed(rqst),
+    vtkInformationVector** inputVector,
+   vtkInformationVector* outputVector)
+{
+  // STEP 0: Get input grid
+  vtkInformation* input = inputVector[0]->GetInformationObject(0);
+  assert("pre: input grid is NULL!" && (input != NULL) );
+  vtkUnstructuredGrid* grid =
+   vtkUnstructuredGrid::SafeDownCast(input->Get(vtkDataObject::DATA_OBJECT()));
+
+  if( (grid==NULL) || (grid->GetNumberOfCells()==0) )
+    {
+    // empty input, do nothing
+    return 1;
+    }
+
+  // STEP 1: Get output grid
+  vtkInformation* output = outputVector->GetInformationObject(0);
+  assert("pre: output object is NULL" && (output != NULL) );
+  vtkUnstructuredGrid* ghostedGrid =
+      vtkUnstructuredGrid::SafeDownCast(
+          output->Get(vtkDataObject::DATA_OBJECT()));
+  assert("pre: output grid object is NULL!" && (ghostedGrid != NULL) );
+
+  // STEP 2: Build the ghost zones, if not already built
+  if( this->GhostZoneBuilder == NULL )
+    {
+    this->GhostZoneBuilder = vtkPUnstructuredGridConnectivity::New();
+    vtkMPIController* mpiController =
+        vtkMPIController::SafeDownCast(this->Controller);
+    assert("pre: null mpi controller!" && (mpiController != NULL) );
+    this->GhostZoneBuilder->SetController(mpiController);
+    this->GhostZoneBuilder->RegisterGrid( grid );
+    this->GhostZoneBuilder->BuildGhostZoneConnectivity();
+    }
+
+  // STEP 3: Update the ghost zones
+  this->GhostZoneBuilder->UpdateGhosts();
+
+  // STEP 4: Get the ghosted grid
+  ghostedGrid->DeepCopy(this->GhostZoneBuilder->GetGhostedGrid());
+  return 1;
+}
diff --git a/Filters/ParallelGeometry/vtkPUnstructuredGridGhostDataGenerator.h b/Filters/ParallelGeometry/vtkPUnstructuredGridGhostDataGenerator.h
new file mode 100644
index 0000000..a003843
--- /dev/null
+++ b/Filters/ParallelGeometry/vtkPUnstructuredGridGhostDataGenerator.h
@@ -0,0 +1,75 @@
+/*=========================================================================
+
+ Program:   Visualization Toolkit
+ Module:    vtkPStructuredGridConnectivity.h
+
+ Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+ All rights reserved.
+ See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+ This software is distributed WITHOUT ANY WARRANTY; without even
+ the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+ PURPOSE.  See the above copyright notice for more information.
+
+ =========================================================================*/
+// .NAME vtkPUnstructuredGridGhostDataGenerator -- Builds ghost zones for a
+//  distributed unstructured grid dataset.
+//
+// .SECTION Description
+//  This filter uses internally the vtkPUnstructuredGridConnectivity helper
+//  class to construct ghost zones for a distributed unstructured grid.
+//
+// .SECTION Caveats
+//  <ul>
+//    <li> The code currently assumes one grid per rank. </li>
+//    <li> GlobalID information must be provided as a PointData array
+//         with the name, "GlobalID" </li>
+//    <li> The grid must be globally conforming, i.e., no hanging nodes. </li>
+//    <li> Only topologically face-adjacent ghost cells are considered. </li>
+//    <li> PointData and CellData must match across partitions/processes. </li>
+//  </ul>
+//
+// .SECTION See Also
+//  vtkPUnstructuredGridConnectivity
+
+#ifndef VTKPUNSTRUCTUREDGRIDGHOSTDATAGENERATOR_H_
+#define VTKPUNSTRUCTUREDGRIDGHOSTDATAGENERATOR_H_
+
+#include "vtkFiltersParallelGeometryModule.h" // For export macro
+#include "vtkUnstructuredGridAlgorithm.h"
+
+// Forward Declarations
+class vtkIndent;
+class vtkInformation;
+class vtkInformationVector;
+class vtkPUnstructuredGridConnectivity;
+class vtkUnstructuredGrid;
+class vtkMultiProcessController;
+
+class VTKFILTERSPARALLELGEOMETRY_EXPORT vtkPUnstructuredGridGhostDataGenerator:
+  public vtkUnstructuredGridAlgorithm
+{
+public:
+  static vtkPUnstructuredGridGhostDataGenerator* New();
+  vtkTypeMacro(vtkPUnstructuredGridGhostDataGenerator,vtkUnstructuredGridAlgorithm);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+protected:
+  vtkPUnstructuredGridGhostDataGenerator();
+  virtual ~vtkPUnstructuredGridGhostDataGenerator();
+
+  // Standard VTK pipeline routines
+  virtual int FillInputPortInformation(int port,vtkInformation *info);
+  virtual int FillOutputPortInformation(int port, vtkInformation *info);
+  virtual int RequestData(
+      vtkInformation *rqst, vtkInformationVector **inputVector,
+      vtkInformationVector* outputVector );
+
+  vtkPUnstructuredGridConnectivity* GhostZoneBuilder;
+  vtkMultiProcessController* Controller;
+private:
+  vtkPUnstructuredGridGhostDataGenerator(const vtkPUnstructuredGridGhostDataGenerator&); // Not implemented
+  void operator=(const vtkPUnstructuredGridGhostDataGenerator&); // Not implemented
+};
+
+#endif /* VTKPUNSTRUCTUREDGRIDGHOSTDATAGENERATOR_H_ */
diff --git a/Filters/ParallelImaging/vtkMemoryLimitImageDataStreamer.h b/Filters/ParallelImaging/vtkMemoryLimitImageDataStreamer.h
index 53238df..97b2cde 100644
--- a/Filters/ParallelImaging/vtkMemoryLimitImageDataStreamer.h
+++ b/Filters/ParallelImaging/vtkMemoryLimitImageDataStreamer.h
@@ -44,7 +44,7 @@ public:
 
 protected:
   vtkMemoryLimitImageDataStreamer();
-  ~vtkMemoryLimitImageDataStreamer() {};
+  ~vtkMemoryLimitImageDataStreamer() {}
 
   unsigned long  MemoryLimit;
 private:
diff --git a/Filters/ParallelStatistics/Testing/Cxx/CMakeLists.txt b/Filters/ParallelStatistics/Testing/Cxx/CMakeLists.txt
index 36ad86c..de673a8 100644
--- a/Filters/ParallelStatistics/Testing/Cxx/CMakeLists.txt
+++ b/Filters/ParallelStatistics/Testing/Cxx/CMakeLists.txt
@@ -1,9 +1,16 @@
-add_test_mpi(TestRandomPContingencyStatisticsMPI.cxx)
-add_test_mpi(TestRandomPKMeansStatisticsMPI.cxx)
-add_test_mpi(TestRandomPMomentStatisticsMPI.cxx)
-add_test_mpi(TestRandomPOrderStatisticsMPI.cxx)
+include(vtkMPI)
+
+vtk_add_test_mpi(TestRandomPContingencyStatisticsMPI.cxx)
+vtk_add_test_mpi(TestRandomPKMeansStatisticsMPI.cxx)
+vtk_add_test_mpi(TestRandomPMomentStatisticsMPI.cxx)
+vtk_add_test_mpi(TestRandomPOrderStatisticsMPI.cxx)
+
+vtk_mpi_link(TestRandomPContingencyStatisticsMPI)
+vtk_mpi_link(TestRandomPKMeansStatisticsMPI)
+vtk_mpi_link(TestRandomPMomentStatisticsMPI)
+vtk_mpi_link(TestRandomPOrderStatisticsMPI)
 
 # # -----------------------------------------------------------------------------
 # # The following file was being compiled but never added as a testin older
 # # vtk. Why is that so?
-# add_test_mpi(TestRealDataPDescriptiveStatisticsMPI)
+# vtk_add_test_mpi(TestRealDataPDescriptiveStatisticsMPI)
diff --git a/Filters/Programmable/Testing/Cxx/CMakeLists.txt b/Filters/Programmable/Testing/Cxx/CMakeLists.txt
index 556da1a..fe2e11c 100644
--- a/Filters/Programmable/Testing/Cxx/CMakeLists.txt
+++ b/Filters/Programmable/Testing/Cxx/CMakeLists.txt
@@ -1,25 +1,4 @@
-set(MyTests
+vtk_add_test_cxx(
   TestProgrammableGlyph.cxx
-)
-
-# Use the testing object factory, to reduce boilerplate code in tests.
-include(vtkTestingObjectFactory)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${MyTests})
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Graphics/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+  )
+vtk_test_cxx_executable(${vtk-module}CxxTests RENDERING_FACTORY)
diff --git a/Filters/Programmable/Testing/Data/Baseline/MultidimensionalSolution.png.md5 b/Filters/Programmable/Testing/Data/Baseline/MultidimensionalSolution.png.md5
new file mode 100644
index 0000000..c3a091b
--- /dev/null
+++ b/Filters/Programmable/Testing/Data/Baseline/MultidimensionalSolution.png.md5
@@ -0,0 +1 @@
+13690dd96fda4d2a6a02a27b0ec81094
diff --git a/Filters/Programmable/Testing/Data/Baseline/MultidimensionalSolution_1.png.md5 b/Filters/Programmable/Testing/Data/Baseline/MultidimensionalSolution_1.png.md5
new file mode 100644
index 0000000..a226801
--- /dev/null
+++ b/Filters/Programmable/Testing/Data/Baseline/MultidimensionalSolution_1.png.md5
@@ -0,0 +1 @@
+b51c68e943317f2e710ac46ceaf82bc3
diff --git a/Filters/Programmable/Testing/Data/Baseline/MultidimensionalSolution_2.png.md5 b/Filters/Programmable/Testing/Data/Baseline/MultidimensionalSolution_2.png.md5
new file mode 100644
index 0000000..e8153e1
--- /dev/null
+++ b/Filters/Programmable/Testing/Data/Baseline/MultidimensionalSolution_2.png.md5
@@ -0,0 +1 @@
+838649863499c27db6bb536321e4f918
diff --git a/Filters/Programmable/Testing/Data/Baseline/TestProgrammableGlyph.png.md5 b/Filters/Programmable/Testing/Data/Baseline/TestProgrammableGlyph.png.md5
new file mode 100644
index 0000000..03ea667
--- /dev/null
+++ b/Filters/Programmable/Testing/Data/Baseline/TestProgrammableGlyph.png.md5
@@ -0,0 +1 @@
+a09ab8be2021320bbdd7e4e737eab00e
diff --git a/Filters/Programmable/Testing/Data/Baseline/progGlyphs.png.md5 b/Filters/Programmable/Testing/Data/Baseline/progGlyphs.png.md5
new file mode 100644
index 0000000..1e92049
--- /dev/null
+++ b/Filters/Programmable/Testing/Data/Baseline/progGlyphs.png.md5
@@ -0,0 +1 @@
+0574dd153105d5f9a2bf3ed8f96e3b05
diff --git a/Filters/Programmable/Testing/Data/Baseline/progGlyphsBySource.png.md5 b/Filters/Programmable/Testing/Data/Baseline/progGlyphsBySource.png.md5
new file mode 100644
index 0000000..df3a6b2
--- /dev/null
+++ b/Filters/Programmable/Testing/Data/Baseline/progGlyphsBySource.png.md5
@@ -0,0 +1 @@
+6c7e17ab539c8061666666daf44647b9
diff --git a/Filters/Programmable/Testing/Data/Baseline/progGlyphsBySource_1.png.md5 b/Filters/Programmable/Testing/Data/Baseline/progGlyphsBySource_1.png.md5
new file mode 100644
index 0000000..7147c98
--- /dev/null
+++ b/Filters/Programmable/Testing/Data/Baseline/progGlyphsBySource_1.png.md5
@@ -0,0 +1 @@
+7861b2b58b1d27afd592b120633810a5
diff --git a/Filters/Programmable/Testing/Data/Baseline/progGlyphsBySource_2.png.md5 b/Filters/Programmable/Testing/Data/Baseline/progGlyphsBySource_2.png.md5
new file mode 100644
index 0000000..209d036
--- /dev/null
+++ b/Filters/Programmable/Testing/Data/Baseline/progGlyphsBySource_2.png.md5
@@ -0,0 +1 @@
+655bf21ee9b36d85fe10d8017c11240c
diff --git a/Filters/Programmable/Testing/Data/Baseline/progGlyphs_1.png.md5 b/Filters/Programmable/Testing/Data/Baseline/progGlyphs_1.png.md5
new file mode 100644
index 0000000..980d281
--- /dev/null
+++ b/Filters/Programmable/Testing/Data/Baseline/progGlyphs_1.png.md5
@@ -0,0 +1 @@
+fbb9e53e2ecc0841723a98843fd677f5
diff --git a/Filters/Programmable/Testing/Python/CMakeLists.txt b/Filters/Programmable/Testing/Python/CMakeLists.txt
index c8fa657..07e0a8f 100644
--- a/Filters/Programmable/Testing/Python/CMakeLists.txt
+++ b/Filters/Programmable/Testing/Python/CMakeLists.txt
@@ -1,5 +1,3 @@
-if (VTK_DATA_ROOT)
-  add_test_python(MultidimensionalSolution.py Graphics)
-endif()
-add_test_python(progGlyphs.py Graphics)
-add_test_python(progGlyphsBySource.py Graphics)
+vtk_add_test_python(MultidimensionalSolution.py)
+vtk_add_test_python(progGlyphs.py)
+vtk_add_test_python(progGlyphsBySource.py)
diff --git a/Filters/Programmable/Testing/Tcl/CMakeLists.txt b/Filters/Programmable/Testing/Tcl/CMakeLists.txt
index 3421a3b..a6d377f 100644
--- a/Filters/Programmable/Testing/Tcl/CMakeLists.txt
+++ b/Filters/Programmable/Testing/Tcl/CMakeLists.txt
@@ -1,6 +1,3 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(MultidimensionalSolution Graphics)
-endif()
-
-add_test_tcl(progGlyphs Graphics)
-add_test_tcl(progGlyphsBySource Graphics)
+vtk_add_test_tcl(MultidimensionalSolution)
+vtk_add_test_tcl(progGlyphs)
+vtk_add_test_tcl(progGlyphsBySource)
diff --git a/Filters/ReebGraph/Testing/Cxx/CMakeLists.txt b/Filters/ReebGraph/Testing/Cxx/CMakeLists.txt
index aeb49e4..b696140 100644
--- a/Filters/ReebGraph/Testing/Cxx/CMakeLists.txt
+++ b/Filters/ReebGraph/Testing/Cxx/CMakeLists.txt
@@ -1,2 +1,2 @@
-vtk_tests(
-  TestReebGraph.cxx)
+vtk_add_test_cxx(TestReebGraph.cxx NO_DATA NO_VALID NO_OUTPUT)
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Filters/SMP/CMakeLists.txt b/Filters/SMP/CMakeLists.txt
new file mode 100644
index 0000000..dff93ea
--- /dev/null
+++ b/Filters/SMP/CMakeLists.txt
@@ -0,0 +1,15 @@
+set(Module_SRCS
+  vtkSMPContourGrid.cxx
+  vtkSMPContourGridManyPieces.cxx
+  vtkSMPMergePoints.cxx
+  vtkSMPMergePolyDataHelper.cxx
+  vtkSMPTransform.cxx
+  vtkSMPWarpVector.cxx
+  )
+
+set_source_files_properties(
+  vtkSMPMergePolyDataHelper
+  WRAP_EXCLUDE
+  )
+
+vtk_module_library(vtkFiltersSMP ${Module_SRCS})
diff --git a/Filters/SMP/Testing/Cxx/CMakeLists.txt b/Filters/SMP/Testing/Cxx/CMakeLists.txt
new file mode 100644
index 0000000..6feabbc
--- /dev/null
+++ b/Filters/SMP/Testing/Cxx/CMakeLists.txt
@@ -0,0 +1,6 @@
+vtk_add_test_cxx(
+  TestSMPContour.cxx,NO_VALID
+  TestSMPTransform.cxx,NO_VALID
+  TestSMPWarp.cxx,NO_VALID
+  )
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Filters/SMP/Testing/Cxx/TestSMPContour.cxx b/Filters/SMP/Testing/Cxx/TestSMPContour.cxx
new file mode 100644
index 0000000..942ed37
--- /dev/null
+++ b/Filters/SMP/Testing/Cxx/TestSMPContour.cxx
@@ -0,0 +1,220 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestCutter.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkNew.h"
+#include "vtkRTAnalyticSource.h"
+#include "vtkPolyData.h"
+#include "vtkDataSetTriangleFilter.h"
+#include "vtkSMPContourGrid.h"
+#include "vtkSMPContourGridManyPieces.h"
+#include "vtkContourGrid.h"
+#include "vtkContourFilter.h"
+#include "vtkUnstructuredGrid.h"
+#include "vtkTimerLog.h"
+#include "vtkNonMergingPointLocator.h"
+#include "vtkSMPTools.h"
+#include "vtkXMLMultiBlockDataWriter.h"
+#include "vtkCompositeDataSet.h"
+#include "vtkCompositeDataIterator.h"
+#include "vtkElevationFilter.h"
+#include "vtkPointData.h"
+#include "vtkCellData.h"
+#include "vtkPointDataToCellData.h"
+#include "vtkXMLPolyDataWriter.h"
+
+#define WRITE_DEBUG 0
+
+const int EXTENT = 30;
+int TestSMPContour(int, char *[])
+{
+  vtkSMPTools::Initialize(2);
+
+  vtkNew<vtkTimerLog> tl;
+
+  vtkNew<vtkRTAnalyticSource> imageSource;
+#if 1
+  imageSource->SetWholeExtent(-EXTENT, EXTENT, -EXTENT, EXTENT, -EXTENT, EXTENT);
+#else
+  imageSource->SetWholeExtent(-EXTENT, EXTENT, -EXTENT, EXTENT, 0, 0);
+#endif
+
+  vtkNew<vtkElevationFilter> ev;
+  ev->SetInputConnection(imageSource->GetOutputPort());
+  ev->SetLowPoint(-EXTENT, -EXTENT, -EXTENT);
+  ev->SetHighPoint(EXTENT, EXTENT, EXTENT);
+
+  vtkNew<vtkDataSetTriangleFilter> tetraFilter;
+  tetraFilter->SetInputConnection(ev->GetOutputPort());
+
+  tl->StartTimer();
+
+  vtkNew<vtkPointDataToCellData> p2c;
+  p2c->SetInputConnection(tetraFilter->GetOutputPort());
+  p2c->Update();
+
+  tetraFilter->GetOutput()->GetCellData()->ShallowCopy(p2c->GetOutput()->GetCellData());
+
+  tl->StopTimer();
+  cout << "Data generation time: " << tl->GetElapsedTime() << endl;
+
+  cout << "Contour grid: " << endl;
+  vtkNew<vtkContourGrid> cg;
+  cg->SetInputData(tetraFilter->GetOutput());
+  cg->SetInputArrayToProcess(0, 0, 0, 0, "RTData");
+  cg->SetValue(0, 200);
+  cg->SetValue(1, 220);
+  tl->StartTimer();
+  cg->Update();
+  tl->StopTimer();
+
+  vtkIdType baseNumCells = cg->GetOutput()->GetNumberOfCells();
+
+  cout << "Number of cells: " << cg->GetOutput()->GetNumberOfCells() << endl;
+  cout << "NUmber of points: " << cg->GetOutput()->GetNumberOfPoints() << endl;
+  cout << "Time: " << tl->GetElapsedTime() << endl;
+
+  cout << "Contour filter: " << endl;
+  vtkNew<vtkContourFilter> cf;
+  cf->SetInputData(tetraFilter->GetOutput());
+  cf->SetInputArrayToProcess(0, 0, 0, 0, "RTData");
+  cf->SetValue(0, 200);
+  cf->SetValue(1, 220);
+  tl->StartTimer();
+  cf->Update();
+  tl->StopTimer();
+
+  cout << "Number of cells: " << cf->GetOutput()->GetNumberOfCells() << endl;
+  cout << "Time: " << tl->GetElapsedTime() << endl;
+
+  cout << "SMP Contour grid: " << endl;
+  vtkNew<vtkSMPContourGrid> cg2;
+  cg2->SetInputData(tetraFilter->GetOutput());
+  cg2->SetInputArrayToProcess(0, 0, 0, 0, "RTData");
+  cg2->SetValue(0, 200);
+  cg2->SetValue(1, 220);
+  tl->StartTimer();
+  cg2->Update();
+  tl->StopTimer();
+
+  cout << "Time: " << tl->GetElapsedTime() << endl;
+
+#if WRITE_DEBUG
+  vtkNew<vtkXMLPolyDataWriter> pdwriter;
+  pdwriter->SetInputData(cg2->GetOutput());
+  pdwriter->SetFileName("contour.vtp");
+  //pwriter->SetDataModeToAscii();
+  pdwriter->Write();
+#endif
+
+  if (cg2->GetOutput()->GetNumberOfCells() != baseNumCells)
+    {
+    cout << "Error in vtkSMPContourGrid (MergePieces = true) output." << endl;
+    cout << "Number of cells does not match expected, "
+         << cg2->GetOutput()->GetNumberOfCells() << " vs. " << baseNumCells << endl;
+    return EXIT_FAILURE;
+    }
+
+  cout << "SMP Contour grid: " << endl;
+  cg2->MergePiecesOff();
+  tl->StartTimer();
+  cg2->Update();
+  tl->StopTimer();
+
+  cout << "Time: " << tl->GetElapsedTime() << endl;
+
+  vtkIdType numCells = 0;
+
+  vtkCompositeDataSet* cds = vtkCompositeDataSet::SafeDownCast(
+    cg2->GetOutputDataObject(0));
+  if (cds)
+    {
+    vtkCompositeDataIterator* iter = cds->NewIterator();
+    iter->InitTraversal();
+    while (!iter->IsDoneWithTraversal())
+      {
+      vtkPolyData* pd = vtkPolyData::SafeDownCast(
+        iter->GetCurrentDataObject());
+      if (pd)
+        {
+        numCells += pd->GetNumberOfCells();
+        }
+      iter->GoToNextItem();
+      }
+    iter->Delete();
+    }
+
+  if (numCells != baseNumCells)
+    {
+    cout << "Error in vtkSMPContourGrid (MergePieces = false) output." << endl;
+    cout << "Number of cells does not match expected, "
+         << numCells << " vs. " << baseNumCells << endl;
+    return EXIT_FAILURE;
+    }
+
+  vtkNew<vtkSMPContourGridManyPieces> cg3;
+  cg3->SetInputData(tetraFilter->GetOutput());
+  cg3->SetInputArrayToProcess(0, 0, 0, 0, "RTData");
+  cg3->SetValue(0, 200);
+  cg3->SetValue(1, 220);
+  cout << "SMP Contour grid: " << endl;
+  tl->StartTimer();
+  cg3->Update();
+  tl->StopTimer();
+  cout << "Time: " << tl->GetElapsedTime() << endl;
+
+  numCells = 0;
+
+  cds = vtkCompositeDataSet::SafeDownCast(
+    cg2->GetOutputDataObject(0));
+  if (cds)
+    {
+    vtkCompositeDataIterator* iter = cds->NewIterator();
+    iter->InitTraversal();
+    while (!iter->IsDoneWithTraversal())
+      {
+      vtkPolyData* pd = vtkPolyData::SafeDownCast(
+        iter->GetCurrentDataObject());
+      if (pd)
+        {
+        numCells += pd->GetNumberOfCells();
+        }
+      iter->GoToNextItem();
+      }
+    iter->Delete();
+    }
+
+  if (numCells != baseNumCells)
+    {
+    cout << "Error in vtkSMPContourGridManyPieces output." << endl;
+    cout << "Number of cells does not match expected, "
+         << numCells << " vs. " << baseNumCells << endl;
+    return EXIT_FAILURE;
+    }
+
+#if WRITE_DEBUG
+  vtkNew<vtkXMLMultiBlockDataWriter> writer;
+  writer->SetInputData(cg2->GetOutputDataObject(0));
+  writer->SetFileName("contour1.vtm");
+  writer->SetDataModeToAscii();
+  writer->Write();
+
+  vtkNew<vtkXMLMultiBlockDataWriter> writer2;
+  writer2->SetInputData(cg3->GetOutputDataObject(0));
+  writer2->SetFileName("contour2.vtm");
+  writer2->SetDataModeToAscii();
+  writer2->Write();
+#endif
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/SMP/Testing/Cxx/TestSMPTransform.cxx b/Filters/SMP/Testing/Cxx/TestSMPTransform.cxx
new file mode 100644
index 0000000..0102e02
--- /dev/null
+++ b/Filters/SMP/Testing/Cxx/TestSMPTransform.cxx
@@ -0,0 +1,136 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestSMPTransform.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkDataSet.h"
+#include "vtkElevationFilter.h"
+#include "vtkFloatArray.h"
+#include "vtkNew.h"
+#include "vtkObjectFactory.h"
+#include "vtkSMPTools.h"
+#include "vtkPointData.h"
+#include "vtkPoints.h"
+#include "vtkSMPTransform.h"
+#include "vtkTransform.h"
+#include "vtkTransformFilter.h"
+#include "vtkStructuredGrid.h"
+#include "vtkSMPThreadLocal.h"
+#include "vtkTimerLog.h"
+
+const double spacing = 0.1;
+const int resolution = 101;
+
+class vtkSetFunctor2
+{
+public:
+  float* pts;
+  float* disp;
+
+  void  operator()(vtkIdType begin, vtkIdType end)
+  {
+    vtkIdType offset = 3 * begin * resolution * resolution;
+    float* itr = pts + offset;
+    float* ditr = disp + offset;
+
+    for (int k=begin; k<end; k++)
+      for (int j=0; j<resolution; j++)
+        for (int i=0; i<resolution; i++)
+          {
+          *itr = i*spacing;
+          itr++;
+          *itr = j*spacing;
+          itr++;
+          *itr = k*spacing;
+          itr++;
+
+          *ditr = 10;
+          ditr++;
+          *ditr = 10;
+          ditr++;
+          *ditr = 10;
+          ditr++;
+          }
+  }
+};
+
+int TestSMPTransform(int argc, char* argv[])
+{
+  int numThreads = 2;
+  for(int argi=1; argi<argc; argi++)
+    {
+    if(std::string(argv[argi])=="--numThreads")
+      {
+      numThreads=atoi(argv[++argi]);
+      break;
+      }
+    }
+  cout << "Num. threads: " << numThreads << endl;
+  vtkSMPTools::Initialize(numThreads);
+
+  vtkNew<vtkTimerLog> tl;
+
+  vtkNew<vtkStructuredGrid> sg;
+  sg->SetDimensions(resolution, resolution, resolution);
+
+  vtkNew<vtkPoints> pts;
+  pts->SetNumberOfPoints(resolution*resolution*resolution);
+
+  //vtkSetFunctor func;
+  vtkSetFunctor2 func;
+  func.pts = (float*)pts->GetVoidPointer(0);
+  //func.pts = (vtkFloatArray*)pts->GetData();
+
+  sg->SetPoints(pts.GetPointer());
+
+  vtkNew<vtkFloatArray> disp;
+  disp->SetNumberOfComponents(3);
+  disp->SetNumberOfTuples(sg->GetNumberOfPoints());
+  disp->SetName("Disp");
+  sg->GetPointData()->AddArray(disp.GetPointer());
+  func.disp = (float*)disp->GetVoidPointer(0);
+
+  tl->StartTimer();
+  vtkSMPTools::For(0, resolution, func);
+  tl->StopTimer();
+  cout << "Initialize: " << tl->GetElapsedTime() << endl;
+
+  vtkNew<vtkTransformFilter> tr;
+  tr->SetInputData(sg.GetPointer());
+
+  vtkNew<vtkTransform> serialTr;
+  serialTr->Identity();
+  tr->SetTransform(serialTr.GetPointer());
+
+  tl->StartTimer();
+  tr->Update();
+  tl->StopTimer();
+  cout << "Serial transform: " << tl->GetElapsedTime() << endl;
+
+  // Release memory so that we can do more.
+  tr->GetOutput()->Initialize();
+
+  vtkNew<vtkTransformFilter> tr2;
+  tr2->SetInputData(sg.GetPointer());
+
+  vtkNew<vtkSMPTransform> parallelTr;
+  parallelTr->Identity();
+  tr2->SetTransform(parallelTr.GetPointer());
+
+  tl->StartTimer();
+  tr2->Update();
+  tl->StopTimer();
+  cout << "Parallel transform: " << tl->GetElapsedTime() << endl;
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/SMP/Testing/Cxx/TestSMPWarp.cxx b/Filters/SMP/Testing/Cxx/TestSMPWarp.cxx
new file mode 100644
index 0000000..e703cf8
--- /dev/null
+++ b/Filters/SMP/Testing/Cxx/TestSMPWarp.cxx
@@ -0,0 +1,272 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestSMPWarp.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkDataSet.h"
+#include "vtkElevationFilter.h"
+#include "vtkFloatArray.h"
+#include "vtkNew.h"
+#include "vtkObjectFactory.h"
+#include "vtkSMPTools.h"
+#include "vtkPointData.h"
+#include "vtkPoints.h"
+#include "vtkSMPWarpVector.h"
+#include "vtkStructuredGrid.h"
+#include "vtkSMPThreadLocal.h"
+#include "vtkTimerLog.h"
+
+const double spacing = 0.1;
+const int resolution = 101;
+
+class vtkSetFunctor2
+{
+public:
+  float* pts;
+  float* disp;
+
+  void  operator()(vtkIdType begin, vtkIdType end)
+  {
+    vtkIdType offset = 3 * begin * resolution * resolution;
+    float* itr = pts + offset;
+    float* ditr = disp + offset;
+
+    for (int k=begin; k<end; k++)
+      for (int j=0; j<resolution; j++)
+        for (int i=0; i<resolution; i++)
+          {
+          *itr = i*spacing;
+          itr++;
+          *itr = j*spacing;
+          itr++;
+          *itr = k*spacing;
+          itr++;
+
+          *ditr = 10;
+          ditr++;
+          *ditr = 10;
+          ditr++;
+          *ditr = 10;
+          ditr++;
+          }
+  }
+};
+
+class vtkBoundsFunctor
+{
+  typedef vtkSMPThreadLocal<std::vector<double> > TLS;
+  typedef TLS::iterator TLSIter;
+public:
+  TLS tlBounds;
+  vtkFloatArray* pts;
+  double bounds[6];
+  double defaults[6];
+
+  vtkBoundsFunctor()
+    {
+      static const double adefaults[] = { VTK_DOUBLE_MAX, - VTK_DOUBLE_MAX,
+                                          VTK_DOUBLE_MAX, - VTK_DOUBLE_MAX,
+                                          VTK_DOUBLE_MAX, - VTK_DOUBLE_MAX};
+      memcpy(defaults, adefaults, 6*sizeof(double));
+      memcpy(bounds, adefaults, 6*sizeof(double));
+    }
+
+  void Initialize()
+    {
+      std::vector<double>& lbounds = tlBounds.Local();
+      lbounds.resize(6);
+      memcpy(&lbounds[0], defaults, 6*sizeof(double));
+    }
+
+  void Reduce()
+    {
+      TLSIter end = tlBounds.end();
+      for (TLSIter itr = tlBounds.begin(); itr != end; ++itr)
+        {
+        std::vector<double>& aBounds = *itr;
+        bounds[0] = bounds[0] < aBounds[0] ? bounds[0] : aBounds[0];
+        bounds[1] = bounds[1] > aBounds[1] ? bounds[1] : aBounds[1];
+        bounds[2] = bounds[2] < aBounds[2] ? bounds[2] : aBounds[2];
+        bounds[3] = bounds[3] > aBounds[3] ? bounds[3] : aBounds[3];
+        bounds[4] = bounds[4] < aBounds[4] ? bounds[4] : aBounds[4];
+        bounds[5] = bounds[5] > aBounds[5] ? bounds[5] : aBounds[5];
+        }
+    }
+
+  void operator()(vtkIdType begin, vtkIdType end)
+    {
+      std::vector<double>& bds = tlBounds.Local();
+      double* lbounds = &bds[0];
+
+      /*
+      double bounds[] = { VTK_DOUBLE_MAX, - VTK_DOUBLE_MAX,
+                          VTK_DOUBLE_MAX, - VTK_DOUBLE_MAX,
+                          VTK_DOUBLE_MAX, - VTK_DOUBLE_MAX};
+      */
+
+      //float x[3];
+      float* x;
+      float* fptr = pts->GetPointer(3*begin);
+      for (vtkIdType i=begin; i<end; i++)
+        {
+        //pts->GetTupleValue(i, x);
+        x = fptr;
+        lbounds[0] = x[0] < lbounds[0] ? x[0] : lbounds[0];
+        lbounds[1] = x[0] > lbounds[1] ? x[0] : lbounds[1];
+        lbounds[2] = x[1] < lbounds[2] ? x[1] : lbounds[2];
+        lbounds[3] = x[1] > lbounds[3] ? x[1] : lbounds[3];
+        lbounds[4] = x[2] < lbounds[4] ? x[2] : lbounds[4];
+        lbounds[5] = x[2] > lbounds[5] ? x[2] : lbounds[5];
+        fptr += 3;
+        }
+    }
+};
+
+/*
+class vtkSetFunctor
+{
+public:
+  vtkFloatArray* pts;
+
+  void  operator()(vtkIdType begin, vtkIdType end)
+  {
+    float pt[3];
+    vtkIdType counter = 0;
+    for (int k=begin; k<end; k++)
+      for (int j=0; j<resolution; j++)
+        for (int i=0; i<resolution; i++)
+          {
+          pt[0] = i*spacing;
+          pt[1] = j*spacing;
+          pt[2] = k*spacing;
+#if 0
+          pts->SetTuple(counter++, pt);
+#else
+          pts->SetTupleValue(counter++, pt);
+          // pts->SetValue(counter++, i*spacing);
+          // pts->SetValue(counter++, j*spacing);
+          // pts->SetValue(counter++, k*spacing);
+#endif
+          }
+  }
+};
+*/
+
+int TestSMPWarp(int argc, char* argv[])
+{
+  int numThreads = 2;
+  for(int argi=1; argi<argc; argi++)
+    {
+    if(std::string(argv[argi])=="--numThreads")
+      {
+      numThreads=atoi(argv[++argi]);
+      break;
+      }
+    }
+  cout << "Num. threads: " << numThreads << endl;
+  vtkSMPTools::Initialize(numThreads);
+
+  vtkNew<vtkTimerLog> tl;
+
+  vtkNew<vtkStructuredGrid> sg;
+  sg->SetDimensions(resolution, resolution, resolution);
+
+  vtkNew<vtkPoints> pts;
+  pts->SetNumberOfPoints(resolution*resolution*resolution);
+
+  //vtkSetFunctor func;
+  vtkSetFunctor2 func;
+  func.pts = (float*)pts->GetVoidPointer(0);
+  //func.pts = (vtkFloatArray*)pts->GetData();
+
+  sg->SetPoints(pts.GetPointer());
+
+  vtkNew<vtkFloatArray> disp;
+  disp->SetNumberOfComponents(3);
+  disp->SetNumberOfTuples(sg->GetNumberOfPoints());
+  disp->SetName("Disp");
+  sg->GetPointData()->AddArray(disp.GetPointer());
+  func.disp = (float*)disp->GetVoidPointer(0);
+
+  tl->StartTimer();
+  vtkSMPTools::For(0, resolution, func);
+  tl->StopTimer();
+  cout << "Initialize: " << tl->GetElapsedTime() << endl;
+
+  vtkNew<vtkWarpVector> vw;
+  vw->SetInputData(sg.GetPointer());
+  vw->SetInputArrayToProcess(0, 0, 0,
+                             vtkDataObject::FIELD_ASSOCIATION_POINTS,
+                             "Disp");
+  tl->StartTimer();
+  vw->Update();
+  tl->StopTimer();
+  cout << "Serial warp: " << tl->GetElapsedTime() << endl;
+
+  tl->StartTimer();
+  double bounds[6];
+  vw->GetOutput()->GetBounds(bounds);
+  tl->StopTimer();
+  cout << "Get bounds: " << tl->GetElapsedTime() << endl;
+
+  vtkBoundsFunctor calcBounds;
+  calcBounds.pts = (vtkFloatArray*)vw->GetOutput()->GetPoints()->GetData();
+  tl->StartTimer();
+  vtkSMPTools::For(0, resolution*resolution*resolution, calcBounds);
+  tl->StopTimer();
+  cout << "Get bounds (parallel): " << tl->GetElapsedTime() << endl;
+  cout << calcBounds.bounds[0] << " " << calcBounds.bounds[1] << " "
+       << calcBounds.bounds[2] << " " << calcBounds.bounds[3] << " "
+       << calcBounds.bounds[4] << " " << calcBounds.bounds[5] << endl;
+
+  for (int i=0; i<6; i++)
+    {
+    if (calcBounds.bounds[i] != bounds[i])
+      {
+      cout << "Bounds did not match after parallel bounds calculation" << endl;
+      return EXIT_FAILURE;
+      }
+    }
+  // Release memory so that we can do more.
+  vw->GetOutput()->Initialize();
+
+  vtkNew<vtkSMPWarpVector> smpvw;
+  smpvw->SetInputData(sg.GetPointer());
+  smpvw->SetInputArrayToProcess(0, 0, 0,
+                                vtkDataObject::FIELD_ASSOCIATION_POINTS,
+                                "Disp");
+  tl->StartTimer();
+  smpvw->Update();
+  tl->StopTimer();
+  cout << "Parallel warp: " << tl->GetElapsedTime() << endl;
+
+  calcBounds.pts = (vtkFloatArray*)smpvw->GetOutput()->GetPoints()->GetData();
+  tl->StartTimer();
+  vtkSMPTools::For(0, resolution*resolution*resolution, calcBounds);
+  tl->StopTimer();
+  cout << "Get bounds (parallel): " << tl->GetElapsedTime() << endl;
+  cout << calcBounds.bounds[0] << " " << calcBounds.bounds[1] << " "
+       << calcBounds.bounds[2] << " " << calcBounds.bounds[3] << " "
+       << calcBounds.bounds[4] << " " << calcBounds.bounds[5] << endl;
+
+  for (int i=0; i<6; i++)
+    {
+    if (calcBounds.bounds[i] != bounds[i])
+      {
+      cout << "Bounds did not match after parallel bounds calculation" << endl;
+      return EXIT_FAILURE;
+      }
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/SMP/module.cmake b/Filters/SMP/module.cmake
new file mode 100644
index 0000000..3d7ecbb
--- /dev/null
+++ b/Filters/SMP/module.cmake
@@ -0,0 +1,11 @@
+vtk_module(vtkFiltersSMP
+  GROUPS
+    StandAlone
+  DEPENDS
+    vtkFiltersCore
+    vtkFiltersGeneral
+  TEST_DEPENDS
+    vtkImagingCore
+    vtkIOXML
+    vtkTestingCore
+  )
diff --git a/Filters/SMP/vtkSMPContourGrid.cxx b/Filters/SMP/vtkSMPContourGrid.cxx
new file mode 100644
index 0000000..a23955c
--- /dev/null
+++ b/Filters/SMP/vtkSMPContourGrid.cxx
@@ -0,0 +1,526 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkContourGrid.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkSMPContourGrid.h"
+
+#include "vtkCellArray.h"
+#include "vtkCellData.h"
+#include "vtkDataArray.h"
+#include "vtkGenericCell.h"
+#include "vtkNew.h"
+#include "vtkNonMergingPointLocator.h"
+#include "vtkObjectFactory.h"
+#include "vtkPointData.h"
+#include "vtkPolyData.h"
+#include "vtkSmartPointer.h"
+#include "vtkUnstructuredGrid.h"
+#include "vtkMergePoints.h"
+#include "vtkMultiBlockDataSet.h"
+#include "vtkMultiPieceDataSet.h"
+#include "vtkSMPTools.h"
+#include "vtkSMPThreadLocal.h"
+#include "vtkSMPThreadLocalObject.h"
+#include "vtkInformation.h"
+#include "vtkSMPMergePoints.h"
+#include "vtkSMPMergePolyDataHelper.h"
+#include "vtkInformationVector.h"
+#include "vtkDemandDrivenPipeline.h"
+
+#include "vtkTimerLog.h"
+
+#include <math.h>
+
+vtkStandardNewMacro(vtkSMPContourGrid);
+
+// Construct object with initial range (0,1) and single contour value
+// of 0.0.
+vtkSMPContourGrid::vtkSMPContourGrid()
+{
+  this->MergePieces = true;
+}
+
+vtkSMPContourGrid::~vtkSMPContourGrid()
+{
+}
+
+namespace
+{
+
+struct vtkLocalDataType
+{
+  vtkPolyData* Output;
+  vtkSMPMergePoints* Locator;
+  vtkIdList* VertOffsets;
+  vtkIdList* LineOffsets;
+  vtkIdList* PolyOffsets;
+
+  vtkLocalDataType() : Output(0)
+    {
+    }
+};
+
+// This functor uses thread local storage to create one vtkPolyData per
+// thread. Each execution of the functor adds to the vtkPolyData that is
+// local to the thread it is running on.
+template <typename T>
+class vtkContourGridFunctor
+{
+public:
+  vtkSMPContourGrid* Filter;
+
+  vtkUnstructuredGrid* Input;
+  vtkDataArray* InScalars;
+
+  vtkDataObject* Output;
+
+  vtkSMPThreadLocal<vtkDataArray*> CellScalars;
+
+  vtkSMPThreadLocalObject<vtkGenericCell> Cell;
+  vtkSMPThreadLocalObject<vtkPoints> NewPts;
+  vtkSMPThreadLocalObject<vtkCellArray> NewVerts;
+  vtkSMPThreadLocalObject<vtkCellArray> NewLines;
+  vtkSMPThreadLocalObject<vtkCellArray> NewPolys;
+
+  vtkSMPThreadLocal<vtkLocalDataType> LocalData;
+
+  int NumValues;
+  double* Values;
+
+  vtkContourGridFunctor(vtkSMPContourGrid* filter,
+                        vtkUnstructuredGrid* input,
+                        vtkDataArray* inScalars,
+                        int numValues,
+                        double* values,
+                        vtkDataObject* output) : Filter(filter),
+                                                 Input(input),
+                                                 InScalars(inScalars),
+                                                 Output(output),
+                                                 NumValues(numValues),
+                                                 Values(values)
+  {
+  }
+
+  virtual ~vtkContourGridFunctor()
+  {
+    // Cleanup all temporaries
+
+    vtkSMPThreadLocal<vtkDataArray*>::iterator cellScalarsIter =
+      this->CellScalars.begin();
+    while(cellScalarsIter != this->CellScalars.end())
+      {
+      (*cellScalarsIter)->Delete();
+      ++cellScalarsIter;
+      }
+
+    vtkSMPThreadLocal<vtkLocalDataType>::iterator dataIter =
+      this->LocalData.begin();
+    while(dataIter != this->LocalData.end())
+      {
+      (*dataIter).Output->Delete();
+      (*dataIter).Locator->Delete();
+      (*dataIter).VertOffsets->Delete();
+      (*dataIter).LineOffsets->Delete();
+      (*dataIter).PolyOffsets->Delete();
+      ++dataIter;
+      }
+  }
+
+  void Initialize()
+  {
+    // Initialize thread local object before any processing happens.
+    // This gets called once per thread.
+
+    vtkPointLocator* locator;
+    vtkPolyData* output;
+    vtkIdList* vertOffsets;
+    vtkIdList* lineOffsets;
+    vtkIdList* polyOffsets;
+
+    vtkLocalDataType& localData = this->LocalData.Local();
+
+    localData.Output = vtkPolyData::New();
+    output = localData.Output;
+
+    localData.Locator = vtkSMPMergePoints::New();
+    locator = localData.Locator;
+
+    localData.VertOffsets = vtkIdList::New();
+    vertOffsets = localData.VertOffsets;
+
+    localData.LineOffsets = vtkIdList::New();
+    lineOffsets = localData.LineOffsets;
+
+    localData.PolyOffsets = vtkIdList::New();
+    polyOffsets = localData.PolyOffsets;
+
+    vtkPoints*& newPts = this->NewPts.Local();
+
+    // set precision for the points in the output
+    if(this->Filter->GetOutputPointsPrecision() == vtkAlgorithm::DEFAULT_PRECISION)
+      {
+      newPts->SetDataType(this->Input->GetPoints()->GetDataType());
+      }
+    else if(this->Filter->GetOutputPointsPrecision() == vtkAlgorithm::SINGLE_PRECISION)
+      {
+      newPts->SetDataType(VTK_FLOAT);
+      }
+    else if(this->Filter->GetOutputPointsPrecision() == vtkAlgorithm::DOUBLE_PRECISION)
+      {
+      newPts->SetDataType(VTK_DOUBLE);
+      }
+
+    output->SetPoints(newPts);
+
+    vtkIdType numCells = this->Input->GetNumberOfCells();
+
+    vtkIdType estimatedSize=static_cast<vtkIdType>(
+      pow(static_cast<double>(numCells),.75));
+    estimatedSize = estimatedSize / 1024 * 1024; //multiple of 1024
+    if (estimatedSize < 1024)
+      {
+      estimatedSize = 1024;
+      }
+
+    newPts->Allocate(estimatedSize, estimatedSize);
+
+    vertOffsets->Allocate(estimatedSize);
+    lineOffsets->Allocate(estimatedSize);
+    polyOffsets->Allocate(estimatedSize);
+
+    //locator->SetPoints(newPts);
+    locator->InitPointInsertion (newPts,
+                                 this->Input->GetBounds(),
+                                 this->Input->GetNumberOfPoints());
+
+    vtkCellArray*& newVerts = this->NewVerts.Local();
+    newVerts->Allocate(estimatedSize,estimatedSize);
+    output->SetVerts(newVerts);
+
+    vtkCellArray*& newLines = this->NewLines.Local();
+    newLines->Allocate(estimatedSize,estimatedSize);
+    output->SetLines(newLines);
+
+    vtkCellArray*& newPolys = this->NewPolys.Local();
+    newPolys->Allocate(estimatedSize,estimatedSize);
+    output->SetPolys(newPolys);
+
+    vtkDataArray*& cellScalars = this->CellScalars.Local();
+    cellScalars = this->InScalars->NewInstance();
+    cellScalars->SetNumberOfComponents(this->InScalars->GetNumberOfComponents());
+    cellScalars->Allocate(VTK_CELL_SIZE*this->InScalars->GetNumberOfComponents());
+
+    vtkPointData* outPd = output->GetPointData();
+    vtkCellData* outCd = output->GetCellData();
+    vtkPointData* inPd = this->Input->GetPointData();
+    vtkCellData* inCd = this->Input->GetCellData();
+    outPd->InterpolateAllocate(inPd, estimatedSize, estimatedSize);
+    outCd->CopyAllocate(inCd, estimatedSize, estimatedSize);
+  }
+
+  void operator()(vtkIdType begin, vtkIdType end)
+  {
+    // Actual computation.
+    // Note the usage of thread local objects. These objects
+    // persist for each thread across multiple execution of the
+    // functor.
+
+    vtkLocalDataType& localData = this->LocalData.Local();
+
+    vtkGenericCell* cell = this->Cell.Local();
+    vtkDataArray* cs = this->CellScalars.Local();
+    vtkPointData* inPd = this->Input->GetPointData();
+    vtkCellData* inCd = this->Input->GetCellData();
+
+    vtkPolyData* output = localData.Output;
+    vtkPointData* outPd = output->GetPointData();
+    vtkCellData* outCd = output->GetCellData();
+
+    vtkCellArray* vrts = this->NewVerts.Local();
+    vtkCellArray* lines = this->NewLines.Local();
+    vtkCellArray* polys = this->NewPolys.Local();
+
+    vtkPointLocator* loc = localData.Locator;
+
+    vtkIdList* vertOffsets = localData.VertOffsets;
+    vtkIdList* lineOffsets = localData.LineOffsets;
+    vtkIdList* polyOffsets = localData.PolyOffsets;
+
+    const double* values = this->Values;
+    int numValues = this->NumValues;
+
+    vtkNew<vtkIdList> pids;
+    T range[2];
+
+    for (vtkIdType cellid=begin; cellid<end; cellid++)
+      {
+      this->Input->GetCellPoints(cellid, pids.GetPointer());
+      cs->SetNumberOfTuples(pids->GetNumberOfIds());
+      this->InScalars->GetTuples(pids.GetPointer(), cs);
+      int numCellScalars = cs->GetNumberOfComponents()
+        * cs->GetNumberOfTuples();
+      T* cellScalarPtr = static_cast<T*>(cs->GetVoidPointer(0));
+
+      //find min and max values in scalar data
+      range[0] = range[1] = cellScalarPtr[0];
+
+      for (T *it = cellScalarPtr + 1, *itEnd = cellScalarPtr + numCellScalars;
+           it != itEnd;
+           ++it)
+        {
+        if (*it <= range[0])
+          {
+          range[0] = *it;
+          } //if scalar <= min range value
+        if (*it >= range[1])
+          {
+          range[1] = *it;
+          } //if scalar >= max range value
+        } // for all cellScalars
+
+      bool needCell = false;
+      for (int i = 0; i < numValues; i++)
+        {
+        if ((values[i] >= range[0]) && (values[i] <= range[1]))
+            {
+            needCell = true;
+            } // if contour value in range for this cell
+          } // end for numContours
+
+      if (needCell)
+          {
+          this->Input->GetCell(cellid, cell);
+
+          for (int i=0; i < numValues; i++)
+            {
+            if ((values[i] >= range[0]) && (values[i] <= range[1]))
+              {
+              vtkIdType begVertSize = vrts->GetNumberOfConnectivityEntries();
+              vtkIdType begLineSize = lines->GetNumberOfConnectivityEntries();
+              vtkIdType begPolySize = polys->GetNumberOfConnectivityEntries();
+              cell->Contour(values[i],
+                            cs,
+                            loc,
+                            vrts,
+                            lines,
+                            polys,
+                            inPd,
+                            outPd,
+                            inCd,
+                            cellid,
+                            outCd);
+              // We keep track of the insertion point of verts, lines and polys.
+              // This is later used when merging these data structures in parallel.
+              // The reason this is needed is that vtkCellArray is not normally
+              // random access with makes processing it in parallel very difficult.
+              // So we create a semi-random-access structures in parallel. This
+              // is only useful for merging since each of these indices can point
+              // to multiple cells.
+              if (vrts->GetNumberOfConnectivityEntries() > begVertSize)
+                {
+                vertOffsets->InsertNextId(begVertSize);
+                }
+              if (lines->GetNumberOfConnectivityEntries() > begLineSize)
+                {
+                lineOffsets->InsertNextId(begLineSize);
+                }
+              if (polys->GetNumberOfConnectivityEntries() > begPolySize)
+                {
+                polyOffsets->InsertNextId(begPolySize);
+                }
+              }
+            }
+          }
+      }
+  }
+
+  void Reduce()
+  {
+    // Create the final multi-block dataset
+
+    vtkNew<vtkMultiPieceDataSet> mp;
+    int count = 0;
+
+    vtkSMPThreadLocal<vtkLocalDataType>::iterator outIter =
+      this->LocalData.begin();
+    while(outIter != this->LocalData.end())
+      {
+      vtkPolyData* output = (*outIter).Output;
+
+      if (output->GetVerts()->GetNumberOfCells() == 0)
+        {
+        output->SetVerts(0);
+        }
+
+      if (output->GetLines()->GetNumberOfCells() == 0)
+        {
+        output->SetLines(0);
+        }
+
+      if (output->GetPolys()->GetNumberOfCells() == 0)
+        {
+        output->SetPolys(0);
+        }
+
+      output->Squeeze();
+
+      mp->SetPiece(count++, output);
+
+      ++outIter;
+      }
+
+    vtkMultiBlockDataSet* output = vtkMultiBlockDataSet::SafeDownCast(this->Output);
+    // If the output is a vtkPolyData (no merging), we throw away the multi-piece
+    // dataset.
+    if (output)
+      {
+      output->SetBlock(0, mp.GetPointer());
+      }
+  }
+};
+template <typename T>
+void DoContour(vtkSMPContourGrid* filter,
+               vtkUnstructuredGrid* input,
+               vtkIdType numCells,
+               vtkDataArray* inScalars,
+               int numContours,
+               double* values,
+               vtkDataObject* output)
+{
+  // Contour in parallel
+  vtkContourGridFunctor<T> functor(filter, input, inScalars, numContours, values, output);
+  vtkSMPTools::For(0, numCells, functor);
+
+  if (output->IsA("vtkPolyData"))
+    {
+    // Do the merging.
+    vtkSMPThreadLocal<vtkLocalDataType>::iterator itr = functor.LocalData.begin();
+    vtkSMPThreadLocal<vtkLocalDataType>::iterator end = functor.LocalData.end();
+
+    std::vector<vtkSMPMergePolyDataHelper::InputData> mpData;
+    while(itr != end)
+      {
+      mpData.push_back(vtkSMPMergePolyDataHelper::InputData((*itr).Output,
+                                                            (*itr).Locator,
+                                                            (*itr).VertOffsets,
+                                                            (*itr).LineOffsets,
+                                                            (*itr).PolyOffsets));
+      ++itr;
+      }
+
+    vtkPolyData* moutput = vtkSMPMergePolyDataHelper::MergePolyData(mpData);
+    output->ShallowCopy(moutput);
+    moutput->Delete();
+    }
+}
+
+}
+
+int vtkSMPContourGrid::RequestDataObject(
+  vtkInformation* vtkNotUsed(request),
+  vtkInformationVector**,
+  vtkInformationVector* outputVector)
+{
+  vtkInformation* info = outputVector->GetInformationObject(0);
+  if (this->MergePieces)
+    {
+    vtkPolyData *output = vtkPolyData::GetData(info);
+    if (!output)
+      {
+      vtkPolyData* newOutput = vtkPolyData::New();
+      info->Set(vtkDataObject::DATA_OBJECT(), newOutput);
+      newOutput->Delete();
+      }
+    }
+  else
+    {
+    vtkMultiBlockDataSet *output = vtkMultiBlockDataSet::GetData(info);
+    if (!output)
+      {
+      vtkMultiBlockDataSet* newOutput = vtkMultiBlockDataSet::New();
+      info->Set(vtkDataObject::DATA_OBJECT(), newOutput);
+      newOutput->Delete();
+      }
+    }
+  return 1;
+}
+
+int vtkSMPContourGrid::RequestData(
+  vtkInformation *vtkNotUsed(request),
+  vtkInformationVector **inputVector,
+  vtkInformationVector *outputVector)
+{
+  // get the input and output
+  vtkUnstructuredGrid *input = vtkUnstructuredGrid::GetData(inputVector[0]);
+  vtkDataObject *output = vtkDataObject::GetData(outputVector);
+
+  if (input->GetNumberOfCells() == 0)
+    {
+    return 1;
+    }
+
+  vtkDataArray* inScalars = this->GetInputArrayToProcess(0,inputVector);
+  if (!inScalars)
+    {
+    return 1;
+    }
+
+  // Not thread safe so calculate first.
+  input->GetBounds();
+
+  int numContours = this->GetNumberOfContours();
+  if (numContours < 1)
+    {
+    return 1;
+    }
+
+  double *values=this->GetValues();
+
+  vtkIdType numCells = input->GetNumberOfCells();
+
+  if (inScalars->GetDataType() == VTK_FLOAT)
+    {
+    DoContour<float>(this, input, numCells, inScalars, numContours, values, output);
+    }
+  else if(inScalars->GetDataType() == VTK_DOUBLE)
+    {
+    DoContour<double>(this, input, numCells, inScalars, numContours, values, output);
+    }
+
+  return 1;
+}
+
+int vtkSMPContourGrid::FillOutputPortInformation(
+  int vtkNotUsed(port), vtkInformation* info)
+{
+  info->Set(vtkDataObject::DATA_TYPE_NAME(), "vtkDataObject");
+  return 1;
+}
+
+int vtkSMPContourGrid::ProcessRequest(vtkInformation* request,
+                                      vtkInformationVector** inputVector,
+                                      vtkInformationVector* outputVector)
+{
+  // generate the data
+  if(request->Has(vtkDemandDrivenPipeline::REQUEST_DATA_OBJECT()))
+    {
+    return this->RequestDataObject(request, inputVector, outputVector);
+    }
+
+  return this->Superclass::ProcessRequest(request, inputVector, outputVector);
+}
+
+void vtkSMPContourGrid::PrintSelf(ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os,indent);
+}
diff --git a/Filters/SMP/vtkSMPContourGrid.h b/Filters/SMP/vtkSMPContourGrid.h
new file mode 100644
index 0000000..ebdf225
--- /dev/null
+++ b/Filters/SMP/vtkSMPContourGrid.h
@@ -0,0 +1,70 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPContourGrid.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkSMPContourGrid - a subclass of vtkContourGrid that works in parallel
+// vtkSMPContourGrid performs the same functionaliy as vtkContourGrid but does
+// it using multiple threads. This will probably be merged with vtkContourGrid
+// in the future.
+
+#ifndef __vtkSMPContourGrid_h
+#define __vtkSMPContourGrid_h
+
+#include "vtkFiltersSMPModule.h" // For export macro
+#include "vtkContourGrid.h"
+
+class VTKFILTERSSMP_EXPORT vtkSMPContourGrid : public vtkContourGrid
+{
+public:
+  vtkTypeMacro(vtkSMPContourGrid,vtkContourGrid);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Constructor.
+  static vtkSMPContourGrid *New();
+
+  // Description:
+  // If MergePieces is true (default), this filter will merge all
+  // pieces generated by processing the input with multiple threads.
+  // The output will be a vtkPolyData. Note that this has a slight overhead
+  // which becomes more significant as the number of threads used grows.
+  // If MergePieces is false, this filter will generate a vtkMultiBlock
+  // of vtkPolyData where the number of pieces will be equal to the number
+  // of threads used.
+  vtkSetMacro(MergePieces, bool);
+  vtkGetMacro(MergePieces, bool);
+  vtkBooleanMacro(MergePieces, bool);
+
+  // Description:
+  // see vtkAlgorithm for details
+  virtual int ProcessRequest(vtkInformation*,
+                             vtkInformationVector**,
+                             vtkInformationVector*);
+
+protected:
+  vtkSMPContourGrid();
+  ~vtkSMPContourGrid();
+
+  virtual int RequestDataObject(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
+  virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
+
+  virtual int FillOutputPortInformation(int port, vtkInformation* info);
+
+  bool MergePieces;
+
+private:
+  vtkSMPContourGrid(const vtkSMPContourGrid&);  // Not implemented.
+  void operator=(const vtkSMPContourGrid&);  // Not implemented.
+};
+
+#endif
diff --git a/Filters/SMP/vtkSMPContourGridManyPieces.cxx b/Filters/SMP/vtkSMPContourGridManyPieces.cxx
new file mode 100644
index 0000000..3db8e8d
--- /dev/null
+++ b/Filters/SMP/vtkSMPContourGridManyPieces.cxx
@@ -0,0 +1,346 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkContourGridManyPieces.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkSMPContourGridManyPieces.h"
+
+#include "vtkCellArray.h"
+#include "vtkCellData.h"
+#include "vtkDataArray.h"
+#include "vtkGenericCell.h"
+#include "vtkNew.h"
+#include "vtkNonMergingPointLocator.h"
+#include "vtkObjectFactory.h"
+#include "vtkPointData.h"
+#include "vtkPolyData.h"
+#include "vtkSmartPointer.h"
+#include "vtkUnstructuredGrid.h"
+#include "vtkMergePoints.h"
+#include "vtkMultiBlockDataSet.h"
+#include "vtkMultiPieceDataSet.h"
+#include "vtkSMPTools.h"
+#include "vtkSMPThreadLocal.h"
+#include "vtkSMPThreadLocalObject.h"
+#include "vtkInformation.h"
+#include "vtkSMPMergePoints.h"
+#include "vtkSMPMergePolyDataHelper.h"
+
+#include "vtkTimerLog.h"
+
+#include <math.h>
+
+vtkStandardNewMacro(vtkSMPContourGridManyPieces);
+
+// Construct object with initial range (0,1) and single contour value
+// of 0.0.
+vtkSMPContourGridManyPieces::vtkSMPContourGridManyPieces()
+{
+}
+
+vtkSMPContourGridManyPieces::~vtkSMPContourGridManyPieces()
+{
+}
+
+namespace
+{
+// This functor creates a new vtkPolyData piece each time it runs.
+// This is less efficient that the previous version but can be used
+// to generate more piece to exploit coarse-grained parallelism
+// downstream.
+template <typename T>
+class vtkContourGridManyPiecesFunctor
+{
+  vtkSMPContourGridManyPieces* Filter;
+
+  vtkUnstructuredGrid* Input;
+  vtkDataArray* InScalars;
+
+  vtkMultiBlockDataSet* Output;
+
+  int NumValues;
+  double* Values;
+
+  vtkSMPThreadLocal<std::vector<vtkPolyData*> > Outputs;
+
+public:
+
+  vtkContourGridManyPiecesFunctor(vtkSMPContourGridManyPieces* filter,
+                         vtkUnstructuredGrid* input,
+                         vtkDataArray* inScalars,
+                         int numValues,
+                         double* values,
+                         vtkMultiBlockDataSet* output) : Filter(filter),
+                                                         Input(input),
+                                                         InScalars(inScalars),
+                                                         Output(output),
+                                                         NumValues(numValues),
+                                                         Values(values)
+  {
+  }
+
+  ~vtkContourGridManyPiecesFunctor()
+  {
+  }
+
+  void Initialize()
+  {
+  }
+
+
+  void operator()(vtkIdType begin, vtkIdType end)
+  {
+    vtkNew<vtkPolyData> output;
+
+    vtkNew<vtkPoints> newPts;
+
+    // set precision for the points in the output
+    if(this->Filter->GetOutputPointsPrecision() == vtkAlgorithm::DEFAULT_PRECISION)
+      {
+      newPts->SetDataType(this->Input->GetPoints()->GetDataType());
+      }
+    else if(this->Filter->GetOutputPointsPrecision() == vtkAlgorithm::SINGLE_PRECISION)
+      {
+      newPts->SetDataType(VTK_FLOAT);
+      }
+    else if(this->Filter->GetOutputPointsPrecision() == vtkAlgorithm::DOUBLE_PRECISION)
+      {
+      newPts->SetDataType(VTK_DOUBLE);
+      }
+
+    output->SetPoints(newPts.GetPointer());
+
+    vtkIdType numCells = this->Input->GetNumberOfCells();
+
+    vtkIdType estimatedSize=static_cast<vtkIdType>(
+      pow(static_cast<double>(numCells),.75));
+    estimatedSize = estimatedSize / 1024 * 1024; //multiple of 1024
+    if (estimatedSize < 1024)
+      {
+      estimatedSize = 1024;
+      }
+
+    newPts->Allocate(estimatedSize, estimatedSize);
+
+    // vtkNew<vtkNonMergingPointLocator> locator;
+    // locator->SetPoints(newPts.GetPointer());
+
+    vtkNew<vtkMergePoints> locator;
+    locator->InitPointInsertion (newPts.GetPointer(),
+                                 this->Input->GetBounds(),
+                                 this->Input->GetNumberOfPoints());
+
+    // vtkNew<vtkPointLocator> locator;
+    // locator->InitPointInsertion (newPts.GetPointer(),
+    //                              this->Input->GetBounds(),
+    //                              this->Input->GetNumberOfPoints());
+
+    vtkNew<vtkCellArray> newVerts;
+    newVerts->Allocate(estimatedSize,estimatedSize);
+
+    vtkNew<vtkCellArray> newLines;
+    newLines->Allocate(estimatedSize,estimatedSize);
+
+    vtkNew<vtkCellArray> newPolys;
+    newPolys->Allocate(estimatedSize,estimatedSize);
+
+    vtkSmartPointer<vtkDataArray> cellScalars;
+    cellScalars.TakeReference(this->InScalars->NewInstance());
+    cellScalars->SetNumberOfComponents(this->InScalars->GetNumberOfComponents());
+    cellScalars->Allocate(VTK_CELL_SIZE*this->InScalars->GetNumberOfComponents());
+
+    vtkPointData* outPd = output->GetPointData();
+    vtkCellData* outCd = output->GetCellData();
+    vtkPointData* inPd = this->Input->GetPointData();
+    vtkCellData* inCd = this->Input->GetCellData();
+    outPd->InterpolateAllocate(inPd, estimatedSize, estimatedSize);
+    outCd->CopyAllocate(inCd, estimatedSize, estimatedSize);
+
+    vtkNew<vtkGenericCell> cell;
+
+    const double* values = this->Values;
+    int numValues = this->NumValues;
+
+    vtkNew<vtkIdList> pids;
+    T range[2];
+
+    for (vtkIdType cellid=begin; cellid<end; cellid++)
+      {
+      this->Input->GetCellPoints(cellid, pids.GetPointer());
+      cellScalars->SetNumberOfTuples(pids->GetNumberOfIds());
+      this->InScalars->GetTuples(pids.GetPointer(), cellScalars);
+      int numCellScalars = cellScalars->GetNumberOfComponents()
+        * cellScalars->GetNumberOfTuples();
+      T* cellScalarPtr = static_cast<T*>(cellScalars->GetVoidPointer(0));
+
+      //find min and max values in scalar data
+      range[0] = range[1] = cellScalarPtr[0];
+
+      for (T *it = cellScalarPtr + 1, *itEnd = cellScalarPtr + numCellScalars;
+           it != itEnd;
+           ++it)
+        {
+        if (*it <= range[0])
+          {
+          range[0] = *it;
+          } //if scalar <= min range value
+        if (*it >= range[1])
+          {
+          range[1] = *it;
+          } //if scalar >= max range value
+        } // for all cellScalars
+
+      bool needCell = false;
+      for (int i = 0; i < numValues; i++)
+        {
+        if ((values[i] >= range[0]) && (values[i] <= range[1]))
+            {
+            needCell = true;
+            } // if contour value in range for this cell
+          } // end for numContours
+
+      if (needCell)
+          {
+          this->Input->GetCell(cellid, cell.GetPointer());
+
+          for (int i=0; i < numValues; i++)
+            {
+            if ((values[i] >= range[0]) && (values[i] <= range[1]))
+              {
+              cell->Contour(values[i],
+                            cellScalars,
+                            locator.GetPointer(),
+                            newVerts.GetPointer(),
+                            newLines.GetPointer(),
+                            newPolys.GetPointer(),
+                            inPd,
+                            outPd,
+                            inCd,
+                            cellid,
+                            outCd);
+              }
+            }
+          }
+      }
+
+    if (newVerts->GetNumberOfCells())
+      {
+      output->SetVerts(newVerts.GetPointer());
+      }
+
+    if (newLines->GetNumberOfCells())
+      {
+      output->SetLines(newLines.GetPointer());
+      }
+
+    if (newPolys->GetNumberOfCells())
+      {
+      output->SetPolys(newPolys.GetPointer());
+      }
+
+    output->Squeeze();
+
+    output->Register(0);
+    this->Outputs.Local().push_back(output.GetPointer());
+  }
+
+  void Reduce()
+  {
+    vtkNew<vtkMultiPieceDataSet> mp;
+    int count = 0;
+
+    vtkSMPThreadLocal<std::vector<vtkPolyData*> >::iterator outIter =
+      this->Outputs.begin();
+    while(outIter != this->Outputs.end())
+      {
+      std::vector<vtkPolyData*>& outs = *outIter;
+      std::vector<vtkPolyData*>::iterator iter = outs.begin();
+      while (iter != outs.end())
+        {
+        mp->SetPiece(count++, *iter);
+        (*iter)->Delete();
+        iter++;
+        }
+      ++outIter;
+      }
+
+    this->Output->SetBlock(0, mp.GetPointer());
+  }
+};
+
+}
+
+int vtkSMPContourGridManyPieces::RequestData(
+  vtkInformation *vtkNotUsed(request),
+  vtkInformationVector **inputVector,
+  vtkInformationVector *outputVector)
+{
+  // get the input and output
+  vtkUnstructuredGrid *input = vtkUnstructuredGrid::GetData(inputVector[0]);
+  vtkMultiBlockDataSet *output = vtkMultiBlockDataSet::GetData(outputVector);
+
+  if (input->GetNumberOfCells() == 0)
+    {
+    return 1;
+    }
+
+  vtkDataArray* inScalars = this->GetInputArrayToProcess(0,inputVector);
+  if (!inScalars)
+    {
+    return 1;
+    }
+
+  // Not thread safe so calculate first.
+  input->GetBounds();
+
+  int numContours = this->GetNumberOfContours();
+  if (numContours < 1)
+    {
+    return 1;
+    }
+
+  double *values=this->GetValues();
+
+  vtkIdType numCells = input->GetNumberOfCells();
+
+  // When using vtkContourGridManyPiecesFunctor, it is crucial to set the grain
+  // right. When the grain is too small, which tends to be the default,
+  // the overhead of allocating data structures, building locators etc.
+  // ends up being too big.
+  if (inScalars->GetDataType() == VTK_FLOAT)
+    {
+    vtkContourGridManyPiecesFunctor<float> functor(this, input, inScalars, numContours, values, output);
+    vtkIdType grain = numCells > 100000 ? numCells / 100 : numCells;
+    vtkSMPTools::For(0, numCells, grain, functor);
+    }
+  else if(inScalars->GetDataType() == VTK_DOUBLE)
+    {
+    vtkContourGridManyPiecesFunctor<double> functor(this, input, inScalars, numContours, values, output);
+    vtkIdType grain = numCells > 100000 ? numCells / 100 : numCells;
+    vtkSMPTools::For(0, numCells, grain, functor);
+    }
+
+  return 1;
+}
+
+int vtkSMPContourGridManyPieces::FillOutputPortInformation(
+  int vtkNotUsed(port), vtkInformation* info)
+{
+  // now add our info
+  info->Set(vtkDataObject::DATA_TYPE_NAME(), "vtkMultiBlockDataSet");
+  return 1;
+}
+
+void vtkSMPContourGridManyPieces::PrintSelf(ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os,indent);
+}
diff --git a/Filters/SMP/vtkSMPContourGridManyPieces.h b/Filters/SMP/vtkSMPContourGridManyPieces.h
new file mode 100644
index 0000000..217421c
--- /dev/null
+++ b/Filters/SMP/vtkSMPContourGridManyPieces.h
@@ -0,0 +1,54 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPContourGridManyPiece.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkSMPContourGridManyPieces - a subclass of vtkContourGrid that works in parallel
+// vtkSMPContourGridManyPieces performs the same functionaliy as vtkContourGrid but does
+// it using multiple threads. This filter generates a multi-block of vtkPolyData. It
+// will generate a relatively large number of pieces - the number is dependent on
+// the input size and number of threads available. See vtkSMPContourGrid is you are
+// interested in a filter that merges the piece. This will probably be merged with
+// vtkContourGrid in the future.
+
+#ifndef __vtkSMPContourGridManyPieces_h
+#define __vtkSMPContourGridManyPieces_h
+
+#include "vtkFiltersSMPModule.h" // For export macro
+#include "vtkContourGrid.h"
+
+class vtkPolyData;
+
+class VTKFILTERSSMP_EXPORT vtkSMPContourGridManyPieces : public vtkContourGrid
+{
+public:
+  vtkTypeMacro(vtkSMPContourGridManyPieces,vtkContourGrid);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Constructor.
+  static vtkSMPContourGridManyPieces *New();
+
+protected:
+  vtkSMPContourGridManyPieces();
+  ~vtkSMPContourGridManyPieces();
+
+  virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
+
+  virtual int FillOutputPortInformation(int port, vtkInformation* info);
+
+private:
+  vtkSMPContourGridManyPieces(const vtkSMPContourGridManyPieces&);  // Not implemented.
+  void operator=(const vtkSMPContourGridManyPieces&);  // Not implemented.
+};
+
+#endif
diff --git a/Filters/SMP/vtkSMPMergePoints.cxx b/Filters/SMP/vtkSMPMergePoints.cxx
new file mode 100644
index 0000000..b7ebde8
--- /dev/null
+++ b/Filters/SMP/vtkSMPMergePoints.cxx
@@ -0,0 +1,175 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPMergePoints.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+  This software is distributed WITHOUT ANY WARRANTY; without even
+  the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+  PURPOSE.  See the above copyright notice for more information.
+
+  =========================================================================*/
+
+#include "vtkSMPMergePoints.h"
+#include "vtkPoints.h"
+#include "vtkIdList.h"
+#include "vtkObjectFactory.h"
+#include "vtkFloatArray.h"
+#include "vtkPointData.h"
+
+//------------------------------------------------------------------------------
+vtkStandardNewMacro(vtkSMPMergePoints)
+
+//------------------------------------------------------------------------------
+vtkSMPMergePoints::vtkSMPMergePoints()
+{
+}
+
+//------------------------------------------------------------------------------
+vtkSMPMergePoints::~vtkSMPMergePoints()
+{
+}
+
+//------------------------------------------------------------------------------
+void vtkSMPMergePoints::PrintSelf(ostream &os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+}
+
+//------------------------------------------------------------------------------
+void vtkSMPMergePoints::InitializeMerge()
+{
+  this->AtomicInsertionId = this->InsertionPointId;
+}
+
+//------------------------------------------------------------------------------
+void vtkSMPMergePoints::Merge(vtkSMPMergePoints* locator,
+                              vtkIdType idx,
+                              vtkPointData* outPd,
+                              vtkPointData* ptData,
+                              vtkIdList* idList )
+{
+  if ( !locator->HashTable[idx] )
+    {
+    return;
+    }
+
+  vtkIdType i;
+  vtkIdList *bucket, *oldIdToMerge;
+  vtkFloatArray* floatOldDataArray = 0;
+
+  if ( !(bucket = this->HashTable[idx]) )
+    {
+    this->HashTable[idx] = bucket = vtkIdList::New();
+    bucket->Allocate( this->NumberOfPointsPerBucket/2,
+                      this->NumberOfPointsPerBucket/3 );
+    oldIdToMerge = locator->HashTable[idx];
+    oldIdToMerge->Register( this );
+    if ( this->Points->GetData()->GetDataType() == VTK_FLOAT )
+      {
+      floatOldDataArray = static_cast<vtkFloatArray*>( locator->Points->GetData() );
+      }
+    }
+  else
+    {
+    oldIdToMerge = vtkIdList::New();
+
+    int nbOfIds = bucket->GetNumberOfIds ();
+    int nbOfOldIds = locator->HashTable[idx]->GetNumberOfIds();
+    oldIdToMerge->Allocate( nbOfOldIds );
+
+    vtkDataArray *dataArray = this->Points->GetData();
+    vtkDataArray *oldDataArray = locator->Points->GetData();
+    vtkIdType *idArray = bucket->GetPointer(0);
+    vtkIdType *idOldArray = locator->HashTable[idx]->GetPointer(0);
+
+    bool found;
+
+    if (dataArray->GetDataType() == VTK_FLOAT)
+      {
+      vtkFloatArray* floatDataArray = static_cast<vtkFloatArray*>(dataArray);
+      floatOldDataArray = static_cast<vtkFloatArray*>(oldDataArray);
+
+      for ( int oldIdIdx = 0; oldIdIdx < nbOfOldIds; ++oldIdIdx )
+        {
+        found = false;
+        vtkIdType oldId = idOldArray[oldIdIdx];
+        float *x = floatOldDataArray->GetPointer(0) + 3*oldId;
+        float *pt;
+        for ( i=0; i < nbOfIds; i++ )
+          {
+          vtkIdType existingId = idArray[i];
+          pt = floatDataArray->GetPointer(0) + 3*existingId;
+          if ( x[0] == pt[0] && x[1] == pt[1] && x[2] == pt[2] )
+            {
+            // point is already in the list, return 0 and set the id parameter
+            found = true;
+            idList->SetId( oldId, existingId );
+            break;
+            }
+          }
+        if ( !found )
+          {
+          oldIdToMerge->InsertNextId( oldId );
+          }
+        }
+      }
+    else
+      {
+      for ( int oldIdIdx = 0; oldIdIdx < nbOfOldIds; ++oldIdIdx )
+        {
+        found = false;
+        vtkIdType oldId = idOldArray[oldIdIdx];
+        double *x = oldDataArray->GetTuple( oldId );
+        double *pt;
+        for ( i=0; i < nbOfIds; i++ )
+          {
+          vtkIdType existingId = idArray[i];
+          pt = dataArray->GetTuple( existingId );
+          if ( x[0] == pt[0] && x[1] == pt[1] && x[2] == pt[2] )
+            {
+            // point is already in the list, return 0 and set the id parameter
+            found = true;
+            idList->SetId( oldId, existingId );
+            break;
+            }
+          }
+        if ( !found )
+          {
+          oldIdToMerge->InsertNextId( oldId );
+          }
+        }
+      }
+    }
+
+  // points have to be added
+  vtkIdType NumberOfInsertions = oldIdToMerge->GetNumberOfIds();
+  vtkIdType first_id = (this->AtomicInsertionId += NumberOfInsertions);
+  bucket->Resize( bucket->GetNumberOfIds() + NumberOfInsertions );
+  for ( i = 0; i < NumberOfInsertions; ++i )
+    {
+    vtkIdType newId = first_id + i, oldId = oldIdToMerge->GetId( i );
+    idList->SetId( oldId, newId );
+    bucket->InsertNextId( newId );
+    if ( floatOldDataArray )
+      {
+      const float *pt = floatOldDataArray->GetPointer(0) + 3*oldId;
+      this->Points->SetPoint( newId, pt );
+      }
+    else
+      {
+      this->Points->SetPoint( newId, locator->Points->GetPoint( oldId ) );
+      }
+    outPd->SetTuple( newId, oldId, ptData );
+    }
+  oldIdToMerge->UnRegister( this );
+}
+
+//------------------------------------------------------------------------------
+void vtkSMPMergePoints::FixSizeOfPointArray()
+{
+  this->Points->SetNumberOfPoints(this->AtomicInsertionId);
+}
diff --git a/Filters/SMP/vtkSMPMergePoints.h b/Filters/SMP/vtkSMPMergePoints.h
new file mode 100644
index 0000000..b1b0987
--- /dev/null
+++ b/Filters/SMP/vtkSMPMergePoints.h
@@ -0,0 +1,109 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPMergePoints.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkSMPMergePoints - Class designed to help with merging of points in parallel
+// .SECTION Description
+// vtkSMPMergePoints is a subclass of vtkMergePoints designed to help
+// with merging of points generated by using multiple locators in parallel.
+// Its main functionality is provided by the Merge function. It also
+// has a few additional convenience functions.
+// Merge is thread safe as long as no two threads are merging the same
+// bin. The common way of using vtkSMPMergePoints is:
+//  - Initialize with outLocator->InitializeMerge()
+//  - Allocate points with outLocator->GetPoints()->Resize(numPts) (numPts should be >= total number of points)
+//  - Do bunch of merging with outLocator->Merge(inLocator[i], ...) (this can be done in parallel as long as no two bins are done at the same time)
+//  - Fix the size of points with outLocator->FixSizeOfPointArray()
+
+#ifndef __vtkSMPMergePoints_h__
+#define __vtkSMPMergePoints_h__
+
+#include "vtkFiltersSMPModule.h" // For export macro
+#include "vtkMergePoints.h"
+#include "vtkIdList.h" // For inline functions
+#include "vtkAtomicInt.h" // For the atomic integer used in Merge()
+
+class vtkPointData;
+
+class VTKFILTERSSMP_EXPORT vtkSMPMergePoints : public vtkMergePoints
+{
+public:
+  vtkTypeMacro(vtkSMPMergePoints, vtkMergePoints);
+  static vtkSMPMergePoints* New();
+  void PrintSelf(ostream &os, vtkIndent indent);
+
+  // Description:
+  // This should be called from 1 thread before any call to Merge.
+  void InitializeMerge();
+
+  // Description:
+  // Merge the points of one of the bins from the given locator to
+  // the same bin of the current locator. Note that this requires that
+  // the two locators have identical binning structures. This also
+  // merges point data given in the inPD argument to the outPd.
+  // Furthermore, it generates a map of the old ids of the input locator
+  // to the new ids. This is stored in the idList argument. The map
+  // is idList[oldId] = newId.
+  void Merge(vtkSMPMergePoints* locator,
+             vtkIdType idx,
+             vtkPointData *outPd,
+             vtkPointData *inPd,
+             vtkIdList* idList);
+
+  // Description:
+  // At the of the merge, this can be called to set the MaxId of the
+  // points array to the maximum id in the locator. The current design
+  // usage is as follows:
+  //  - Allocate points with points->Resize(numPts). NumPts should be >= total number of points
+  //  - Do bunch of merging with outLocator->Merge(inLocator[i], ...)
+  //  - Fix the size of points with outLocator->FixSizeOfPointArray()
+  void FixSizeOfPointArray();
+
+  // Description:
+  // Returns the biggest id in the locator.
+  vtkIdType GetMaxId()
+  {
+    return this->AtomicInsertionId - 1;
+  }
+
+  // Description:
+  // Retuns the number of points in a bin.
+  vtkIdType GetNumberOfIdsInBucket(vtkIdType idx)
+  {
+    if ( !this->HashTable )
+      {
+      return 0;
+      }
+    vtkIdList* bucket = this->HashTable[idx];
+    return bucket ? bucket->GetNumberOfIds() : 0;
+  }
+
+  // Description:
+  // Retuns the number of bins.
+  vtkIdType GetNumberOfBuckets()
+  {
+    return this->NumberOfBuckets;
+  }
+
+protected:
+  vtkSMPMergePoints();
+  ~vtkSMPMergePoints();
+
+  vtkAtomicInt<vtkIdType> AtomicInsertionId;
+
+private:
+  vtkSMPMergePoints(const vtkSMPMergePoints&); // Not implemented
+  void operator=(const vtkSMPMergePoints&); // Not implemented
+};
+
+#endif // __vtkSMPMergePoints_h__
diff --git a/Filters/SMP/vtkSMPMergePolyDataHelper.cxx b/Filters/SMP/vtkSMPMergePolyDataHelper.cxx
new file mode 100644
index 0000000..f78d96e
--- /dev/null
+++ b/Filters/SMP/vtkSMPMergePolyDataHelper.cxx
@@ -0,0 +1,436 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkContourGrid.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkSMPMergePolyDataHelper.h"
+
+#include "vtkCellArray.h"
+#include "vtkCellData.h"
+#include "vtkDataArray.h"
+#include "vtkNew.h"
+#include "vtkPointData.h"
+#include "vtkPolyData.h"
+#include "vtkSmartPointer.h"
+#include "vtkSMPTools.h"
+#include "vtkSMPMergePoints.h"
+
+namespace
+{
+
+struct vtkMergePointsData
+{
+  vtkPolyData* Output;
+  vtkSMPMergePoints* Locator;
+
+  vtkMergePointsData(vtkPolyData* output, vtkSMPMergePoints* locator) :
+    Output(output), Locator(locator)
+    {
+    }
+};
+
+class vtkParallelMergePoints
+{
+public:
+  vtkIdType* BucketIds;
+  std::vector<vtkMergePointsData>::iterator Begin;
+  std::vector<vtkMergePointsData>::iterator End;
+  vtkSMPMergePoints* Merger;
+  vtkIdList** IdMaps;
+  vtkPointData* OutputPointData;
+  vtkPointData** InputPointDatas;
+
+  void operator()(vtkIdType begin, vtkIdType end)
+  {
+    // All actual work is done by vtkSMPMergePoints::Merge
+    std::vector<vtkMergePointsData>::iterator itr;
+    vtkPointData* outPD = this->OutputPointData;
+
+    vtkIdType counter = 0;
+    for (itr = Begin; itr != End; ++itr)
+      {
+      vtkIdList* idMap = this->IdMaps[counter];
+      vtkPointData* inPD = this->InputPointDatas[counter++];
+      for (vtkIdType i = begin; i < end; i++)
+        {
+        vtkIdType bucketId = BucketIds[i];
+        if ((*itr).Locator->GetNumberOfIdsInBucket(bucketId) > 0)
+          {
+          Merger->Merge((*itr).Locator, bucketId, outPD, inPD, idMap);
+          }
+        }
+      }
+  }
+};
+
+void MergePoints(std::vector<vtkMergePointsData>& data,
+                 std::vector<vtkIdList*>& idMaps,
+                 vtkPolyData* outPolyData)
+{
+  // This merges points in parallel/
+
+  std::vector<vtkMergePointsData>::iterator itr = data.begin();
+  std::vector<vtkMergePointsData>::iterator begin = itr;
+  std::vector<vtkMergePointsData>::iterator end = data.end();
+  vtkPoints* outPts = (*itr).Output->GetPoints();
+
+  // Prepare output points
+  vtkIdType numPts = 0;
+  while(itr != end)
+    {
+    numPts += (*itr).Output->GetNumberOfPoints();
+    ++itr;
+    }
+  outPts->Resize(numPts);
+
+  // Find non-empty buckets for best load balancing. We don't
+  // want to visit bunch of empty buckets.
+  vtkIdType numBuckets = (*begin).Locator->GetNumberOfBuckets();
+  std::vector<vtkIdType> nonEmptyBuckets;
+  std::vector<bool> bucketVisited(numBuckets, false);
+  nonEmptyBuckets.reserve(numBuckets);
+  for (itr = begin; itr != end; ++itr)
+    {
+    vtkSMPMergePoints* mp = (*itr).Locator;
+    for (vtkIdType i = 0; i < numBuckets; i++)
+      {
+      if (mp->GetNumberOfIdsInBucket(i) > 0 && !bucketVisited[i])
+        {
+        nonEmptyBuckets.push_back(i);
+        bucketVisited[i] = true;
+        }
+      }
+    }
+
+  // These id maps will later be used when merging cells.
+  std::vector<vtkPointData*> pds;
+  itr = begin;
+  ++itr;
+  while (itr != end)
+    {
+    pds.push_back((*itr).Output->GetPointData());
+    vtkIdList* idMap = vtkIdList::New();
+    idMap->Allocate((*itr).Output->GetNumberOfPoints());
+    idMaps.push_back(idMap);
+    ++itr;
+    }
+
+  vtkParallelMergePoints mergePoints;
+  mergePoints.BucketIds = &nonEmptyBuckets[0];
+  mergePoints.Merger = (*begin).Locator;
+  mergePoints.OutputPointData = (*begin).Output->GetPointData();
+  if (!idMaps.empty())
+    {
+    mergePoints.Merger->InitializeMerge();
+    mergePoints.IdMaps = &idMaps[0];
+    // Prepare output point data
+    int numArrays = mergePoints.OutputPointData->GetNumberOfArrays();
+    for (int i=0; i<numArrays; i++)
+      {
+      mergePoints.OutputPointData->GetArray(i)->Resize(numPts);
+      }
+    mergePoints.InputPointDatas = &pds[0];
+
+    // The first locator is what we will use to accumulate all others
+    // So all iteration starts from second dataset.
+    std::vector<vtkMergePointsData>::iterator second = begin;
+    ++second;
+    mergePoints.Begin = second;
+    mergePoints.End = end;
+    // Actual work
+    vtkSMPTools::For(0, nonEmptyBuckets.size(), mergePoints);
+    //mergePoints.operator()(0, nonEmptyBuckets.size());
+
+    // Fixup output sizes.
+    mergePoints.Merger->FixSizeOfPointArray();
+    for (int i=0; i<numArrays; i++)
+      {
+      mergePoints.OutputPointData->GetArray(i)->SetNumberOfTuples(mergePoints.Merger->GetMaxId()+1);
+      }
+    }
+  outPolyData->SetPoints(mergePoints.Merger->GetPoints());
+  outPolyData->GetPointData()->ShallowCopy(mergePoints.OutputPointData);
+}
+
+class vtkParallelMergeCells
+{
+public:
+  vtkIdList* Offsets;
+  vtkCellArray* InCellArray;
+  vtkIdTypeArray* OutCellArray;
+  vtkIdType OutputOffset;
+  vtkIdList* IdMap;
+
+  void operator()(vtkIdType begin, vtkIdType end)
+  {
+    vtkIdType noffsets = this->Offsets->GetNumberOfIds();
+    vtkIdList* offsets = this->Offsets;
+    vtkIdTypeArray* outCellArray = this->OutCellArray;
+    vtkCellArray* inCellArray = this->InCellArray;
+    vtkIdType outputOffset = this->OutputOffset;
+    vtkIdList* map = this->IdMap;
+
+    vtkNew<vtkIdList> cellIds;
+    for (vtkIdType i=begin; i<end; i++)
+      {
+      // Note that there may be multiple cells starting at
+      // this offset. So we find the next offset and insert
+      // all cells between here and there.
+      vtkIdType nextOffset;
+      if (i == noffsets - 1) // This needs to be the end of the array always, not the loop counter's end
+        {
+        nextOffset = this->InCellArray->GetNumberOfConnectivityEntries();
+        }
+      else
+        {
+        nextOffset = offsets->GetId(i+1);
+        }
+      // Process all cells between the given offset and the next.
+      vtkIdType cellOffset = offsets->GetId(i);
+      while (cellOffset < nextOffset)
+        {
+        inCellArray->GetCell(cellOffset, cellIds.GetPointer());
+        vtkIdType nids = cellIds->GetNumberOfIds();
+        // Insert the cells - first number of points and ids
+        outCellArray->SetValue(outputOffset + cellOffset, nids);
+        cellOffset++;
+        for (int j=0; j<nids; j++)
+          {
+          // Now insert each id. First map it through the map generated by the merging
+          // of the points
+          outCellArray->SetValue(outputOffset + cellOffset, map->GetId(cellIds->GetId(j)));
+          cellOffset++;
+          }
+        }
+      }
+  }
+};
+
+class vtkParallelCellDataCopier
+{
+public:
+  vtkDataSetAttributes* InputCellData;
+  vtkDataSetAttributes* OutputCellData;
+  vtkIdType Offset;
+
+  void operator()(vtkIdType begin, vtkIdType end)
+  {
+    vtkDataSetAttributes* inputCellData = this->InputCellData;
+    vtkDataSetAttributes* outputCellData = this->OutputCellData;
+    vtkIdType offset = this->Offset;
+
+    for (vtkIdType i=begin; i<end; i++)
+      {
+      outputCellData->SetTuple(offset + i, i, inputCellData);
+      }
+  }
+};
+
+struct vtkMergeCellsData
+{
+  vtkPolyData* Output;
+  vtkIdList* CellOffsets;
+  vtkCellArray* OutCellArray;
+
+  vtkMergeCellsData(vtkPolyData* output, vtkIdList* celloffsets, vtkCellArray* cellarray) :
+    Output(output), CellOffsets(celloffsets), OutCellArray(cellarray)
+    {
+    }
+};
+
+void MergeCells(std::vector<vtkMergeCellsData>& data,
+                const std::vector<vtkIdList*>& idMaps,
+                vtkIdType numCells,
+                vtkIdType cellDataOffset,
+                vtkCellArray* outCells)
+{
+  std::vector<vtkMergeCellsData>::iterator begin = data.begin();
+  std::vector<vtkMergeCellsData>::iterator itr;
+  std::vector<vtkMergeCellsData>::iterator second = begin;
+  std::vector<vtkMergeCellsData>::iterator end = data.end();
+  ++second;
+
+  std::vector<vtkIdList*>::const_iterator mapIter = idMaps.begin();
+
+  vtkIdTypeArray* outCellsArray = outCells->GetData();
+
+  vtkIdType outCellsOffset = 0;
+
+  // Prepare output
+  vtkCellArray* firstCells = (*begin).OutCellArray;
+  vtkIdTypeArray* firstCellsArray = firstCells->GetData();
+  outCellsOffset += firstCells->GetNumberOfConnectivityEntries();
+  memcpy(outCellsArray->GetVoidPointer(0),
+         firstCellsArray->GetVoidPointer(0),
+         outCellsOffset*sizeof(vtkIdType));
+
+  vtkParallelMergeCells mergeCells;
+  mergeCells.OutCellArray = outCellsArray;
+
+  // The first locator is what we will use to accumulate all others
+  // So all iteration starts from second dataset.
+  vtkNew<vtkIdList> cellIds;
+  for (itr = second; itr != end; ++itr, ++mapIter)
+    {
+    mergeCells.Offsets = (*itr).CellOffsets;
+    mergeCells.InCellArray = (*itr).OutCellArray;
+    mergeCells.OutputOffset = outCellsOffset;
+    mergeCells.IdMap = *mapIter;
+
+    // First, we merge the cell arrays. This also adjust point ids.
+    vtkSMPTools::For(0,  mergeCells.Offsets->GetNumberOfIds(), mergeCells);
+
+    outCellsOffset += (*itr).OutCellArray->GetNumberOfConnectivityEntries();
+    }
+
+  outCellsArray->SetNumberOfTuples(outCellsOffset);
+  outCells->SetNumberOfCells(numCells);
+
+  outCellsOffset = cellDataOffset + (*begin).OutCellArray->GetNumberOfCells();
+
+  // Now copy cell data in parallel
+  vtkParallelCellDataCopier cellCopier;
+  cellCopier.OutputCellData = (*begin).Output->GetCellData();
+  int numCellArrays = cellCopier.OutputCellData->GetNumberOfArrays();
+  if (numCellArrays > 0)
+    {
+    for (itr = second; itr != end; ++itr)
+      {
+      cellCopier.InputCellData = (*itr).Output->GetCellData();
+      cellCopier.Offset = outCellsOffset;
+      vtkCellArray* cells = (*itr).OutCellArray;
+
+      vtkSMPTools::For(0,  cells->GetNumberOfCells(), cellCopier);
+      //cellCopier.operator()(0, polys->GetNumberOfCells());
+
+      outCellsOffset += (*itr).Output->GetPolys()->GetNumberOfCells();
+      }
+    }
+}
+}
+
+vtkPolyData* vtkSMPMergePolyDataHelper::MergePolyData(std::vector<InputData>& inputs)
+{
+  // First merge points
+
+  std::vector<InputData>::iterator itr = inputs.begin();
+  std::vector<InputData>::iterator begin = itr;
+  std::vector<InputData>::iterator end = inputs.end();
+
+  std::vector<vtkMergePointsData> mpData;
+  while(itr != end)
+    {
+    mpData.push_back(vtkMergePointsData((*itr).Input, (*itr).Locator));
+    ++itr;
+    }
+
+  std::vector<vtkIdList*> idMaps;
+  vtkPolyData* outPolyData = vtkPolyData::New();
+
+  MergePoints(mpData, idMaps, outPolyData);
+
+  itr = begin;
+  vtkIdType vertSize = 0;
+  vtkIdType lineSize = 0;
+  vtkIdType polySize = 0;
+  vtkIdType numVerts = 0;
+  vtkIdType numLines = 0;
+  vtkIdType numPolys = 0;
+  std::vector<vtkMergeCellsData> mcData;
+  while(itr != end)
+    {
+    vertSize += (*itr).Input->GetVerts()->GetNumberOfConnectivityEntries();
+    lineSize += (*itr).Input->GetLines()->GetNumberOfConnectivityEntries();
+    polySize += (*itr).Input->GetPolys()->GetNumberOfConnectivityEntries();
+    numVerts += (*itr).Input->GetVerts()->GetNumberOfCells();
+    numLines += (*itr).Input->GetLines()->GetNumberOfCells();
+    numPolys += (*itr).Input->GetPolys()->GetNumberOfCells();
+    ++itr;
+    }
+
+  vtkIdType numOutCells = numVerts + numLines + numPolys;
+
+  vtkCellData* outCellData = (*begin).Input->GetCellData();
+  int numCellArrays = outCellData->GetNumberOfArrays();
+  for (int i=0; i<numCellArrays; i++)
+    {
+    outCellData->GetArray(i)->Resize(numOutCells);
+    outCellData->GetArray(i)->SetNumberOfTuples(numOutCells);
+    }
+
+  // Now merge each cell type. Because vtkPolyData stores each
+  // cell type separately, we need to merge them separately.
+
+  if (vertSize > 0)
+    {
+    vtkNew<vtkCellArray> outVerts;
+    outVerts->Allocate(vertSize);
+
+    itr = begin;
+    while(itr != end)
+    {
+    mcData.push_back(vtkMergeCellsData((*itr).Input, (*itr).VertOffsets, (*itr).Input->GetVerts()));
+    ++itr;
+    }
+    MergeCells(mcData, idMaps, numVerts, 0, outVerts.GetPointer());
+
+    outPolyData->SetVerts(outVerts.GetPointer());
+
+    mcData.clear();
+    }
+
+  if (lineSize > 0)
+    {
+    vtkNew<vtkCellArray> outLines;
+    outLines->Allocate(lineSize);
+
+    itr = begin;
+    while(itr != end)
+    {
+    mcData.push_back(vtkMergeCellsData((*itr).Input, (*itr).LineOffsets, (*itr).Input->GetLines()));
+    ++itr;
+    }
+    MergeCells(mcData, idMaps, numLines, vertSize, outLines.GetPointer());
+
+    outPolyData->SetLines(outLines.GetPointer());
+
+    mcData.clear();
+    }
+
+  if (polySize > 0)
+    {
+    vtkNew<vtkCellArray> outPolys;
+    outPolys->Allocate(polySize);
+
+    itr = begin;
+    while(itr != end)
+      {
+      mcData.push_back(vtkMergeCellsData((*itr).Input, (*itr).PolyOffsets, (*itr).Input->GetPolys()));
+      ++itr;
+      }
+    MergeCells(mcData, idMaps, numPolys, vertSize + lineSize, outPolys.GetPointer());
+
+    outPolyData->SetPolys(outPolys.GetPointer());
+    }
+
+  outPolyData->GetCellData()->ShallowCopy(outCellData);
+
+  std::vector<vtkIdList*>::iterator mapIter = idMaps.begin();
+  while (mapIter != idMaps.end())
+    {
+    (*mapIter)->Delete();
+    ++mapIter;
+    }
+
+  return outPolyData;
+
+}
diff --git a/Filters/SMP/vtkSMPMergePolyDataHelper.h b/Filters/SMP/vtkSMPMergePolyDataHelper.h
new file mode 100644
index 0000000..9728a50
--- /dev/null
+++ b/Filters/SMP/vtkSMPMergePolyDataHelper.h
@@ -0,0 +1,83 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPMergePolyDataHelper.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkSMPMergePolyDataHelper - Utility class for merging poly data in parallel
+// This class is designed as a utility class to help merging of poly data
+// generated by filters that generate multiple polydata outputs and the associated
+// locators. It requires that the filter uses vtkSMPMergePoints which creates
+// a number of necessary data structures.
+
+#ifndef __vtkSMPMergePolyDataHelper_h
+#define __vtkSMPMergePolyDataHelper_h
+
+#include "vtkFiltersSMPModule.h"
+
+#include <vector>
+
+class vtkPolyData;
+class vtkSMPMergePoints;
+class vtkIdList;
+
+class VTKFILTERSSMP_EXPORT vtkSMPMergePolyDataHelper
+{
+public:
+
+  // Description:
+  // This is the data structure needed by the MergePolyData function.
+  // Each input is represented by a polydata (Input), a locator generated
+  // using identical binning structure (Locator) and offset structures
+  // for each vtkCellArray type. These offsets allow semi-random access
+  // to the cell arrays. They should store offsets to where cells start
+  // in the cell arrays. Each offset can be for 1 or more cells. The finer
+  // the granularity, the better the parallelism.
+  struct InputData
+  {
+    vtkPolyData* Input;
+    vtkSMPMergePoints* Locator;
+    vtkIdList* VertOffsets;
+    vtkIdList* LineOffsets;
+    vtkIdList* PolyOffsets;
+
+    InputData(vtkPolyData* input,
+              vtkSMPMergePoints* locator,
+              vtkIdList* vertOffsets,
+              vtkIdList* lineOffsets,
+              vtkIdList* polyOffsets) : Input(input),
+                                        Locator(locator),
+                                        VertOffsets(vertOffsets),
+                                        LineOffsets(lineOffsets),
+                                        PolyOffsets(polyOffsets)
+      {
+      }
+  };
+
+  // Description:
+  // Given a vector of vtkSMPMergePolyDataHelper::InputData, it merges
+  // them and returns a new vtkPolyData (which needs to be deleted by the
+  // caller). Note that this function uses the first input as a temporary
+  // merging target so it will be modified in place. If you need to preserve
+  // it, use DeepCopy before passing to MergePolyData.
+  static vtkPolyData* MergePolyData(std::vector<InputData>& inputs);
+
+protected:
+  vtkSMPMergePolyDataHelper();
+  ~vtkSMPMergePolyDataHelper();
+
+private:
+  vtkSMPMergePolyDataHelper(const vtkSMPMergePolyDataHelper&);  // Not implemented.
+  void operator=(const vtkSMPMergePolyDataHelper&);  // Not implemented.
+};
+
+#endif
+// VTK-HeaderTest-Exclude: vtkSMPMergePolyDataHelper.h
diff --git a/Filters/SMP/vtkSMPTransform.cxx b/Filters/SMP/vtkSMPTransform.cxx
new file mode 100644
index 0000000..13e8fd0
--- /dev/null
+++ b/Filters/SMP/vtkSMPTransform.cxx
@@ -0,0 +1,279 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPTransform.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkSMPTransform.h"
+#include "vtkDataArray.h"
+#include "vtkMath.h"
+#include "vtkMatrix4x4.h"
+#include "vtkPoints.h"
+#include "vtkObjectFactory.h"
+
+#include "vtkSMPTools.h"
+
+#include <stdlib.h>
+
+vtkStandardNewMacro(vtkSMPTransform);
+
+//----------------------------------------------------------------------------
+void vtkSMPTransform::PrintSelf(ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+}
+
+//------------------------------------------------------------------------
+template <class T1, class T2, class T3>
+inline void vtkSMPTransformPoint(T1 matrix[4][4],
+                                 T2 in[3], T3 out[3])
+{
+  T3 x = static_cast<T3>(
+    matrix[0][0]*in[0]+matrix[0][1]*in[1]+matrix[0][2]*in[2]+matrix[0][3]);
+  T3 y = static_cast<T3>(
+    matrix[1][0]*in[0]+matrix[1][1]*in[1]+matrix[1][2]*in[2]+matrix[1][3]);
+  T3 z = static_cast<T3>(
+    matrix[2][0]*in[0]+matrix[2][1]*in[1]+matrix[2][2]*in[2]+matrix[2][3]);
+
+  out[0] = x;
+  out[1] = y;
+  out[2] = z;
+}
+
+//------------------------------------------------------------------------
+template <class T1, class T2, class T3, class T4>
+inline void vtkSMPTransformDerivative(T1 matrix[4][4],
+                                      T2 in[3], T3 out[3],
+                                      T4 derivative[3][3])
+{
+  vtkSMPTransformPoint(matrix,in,out);
+
+  for (int i = 0; i < 3; i++)
+    {
+    derivative[0][i] = static_cast<T4>(matrix[0][i]);
+    derivative[1][i] = static_cast<T4>(matrix[1][i]);
+    derivative[2][i] = static_cast<T4>(matrix[2][i]);
+    }
+}
+
+//------------------------------------------------------------------------
+template <class T1, class T2, class T3>
+inline void vtkSMPTransformVector(T1 matrix[4][4],
+                                  T2 in[3], T3 out[3])
+{
+  T3 x = static_cast<T3>(
+    matrix[0][0]*in[0] + matrix[0][1]*in[1] + matrix[0][2]*in[2]);
+  T3 y = static_cast<T3>(
+    matrix[1][0]*in[0] + matrix[1][1]*in[1] + matrix[1][2]*in[2]);
+  T3 z = static_cast<T3>(
+    matrix[2][0]*in[0] + matrix[2][1]*in[1] + matrix[2][2]*in[2]);
+
+  out[0] = x;
+  out[1] = y;
+  out[2] = z;
+}
+
+//------------------------------------------------------------------------
+template <class T1, class T2, class T3>
+inline void vtkSMPTransformNormal(T1 mat[4][4],
+                                  T2 in[3], T3 out[3])
+{
+  // to transform the normal, multiply by the transposed inverse matrix
+  T1 matrix[4][4];
+  memcpy(*matrix,*mat,16*sizeof(T1));
+  vtkMatrix4x4::Invert(*matrix,*matrix);
+  vtkMatrix4x4::Transpose(*matrix,*matrix);
+
+  vtkSMPTransformVector(matrix,in,out);
+
+  vtkMath::Normalize(out);
+}
+
+//----------------------------------------------------------------------------
+// Transform the normals and vectors using the derivative of the
+// transformation.  Either inNms or inVrs can be set to NULL.
+// Normals are multiplied by the inverse transpose of the transform
+// derivative, while vectors are simply multiplied by the derivative.
+// Note that the derivative of the inverse transform is simply the
+// inverse of the derivative of the forward transform.
+class TranformAllFunctor
+{
+public:
+  vtkPoints* inPts;
+  vtkPoints* outPts;
+  vtkDataArray* inNms;
+  vtkDataArray* outNms;
+  vtkDataArray* inVcs;
+  vtkDataArray* outVcs;
+  double (*matrix)[4];
+  double (*matrixInvTr)[4];
+  void operator()( vtkIdType begin, vtkIdType end ) const
+  {
+    for (vtkIdType id=begin; id<end; id++)
+      {
+      double point[3];
+      inPts->GetPoint(id, point);
+      vtkSMPTransformPoint(matrix, point, point);
+      outPts->SetPoint(id, point);
+      if (inVcs)
+        {
+        inVcs->GetTuple(id, point);
+        vtkSMPTransformVector(matrix, point, point);
+        outVcs->SetTuple(id, point);
+        }
+      if (inNms)
+        {
+        inNms->GetTuple(id, point);
+        vtkSMPTransformVector(matrixInvTr, point, point);
+        vtkMath::Normalize( point );
+        outNms->SetTuple(id, point);
+        }
+      }
+  }
+};
+
+void vtkSMPTransform::TransformPointsNormalsVectors(vtkPoints *inPts,
+                                                    vtkPoints *outPts,
+                                                    vtkDataArray *inNms,
+                                                    vtkDataArray *outNms,
+                                                    vtkDataArray *inVrs,
+                                                    vtkDataArray *outVrs)
+{
+  vtkIdType n = inPts->GetNumberOfPoints();
+  double matrix[4][4];
+  this->Update();
+
+  TranformAllFunctor functor;
+  functor.inPts = inPts;
+  functor.outPts = outPts;
+  functor.inNms = inNms;
+  functor.outNms = outNms;
+  functor.inVcs = inVrs;
+  functor.outVcs = outVrs;
+  functor.matrix = this->Matrix->Element;
+  if (inNms)
+    {
+    vtkMatrix4x4::DeepCopy(*matrix,this->Matrix);
+    vtkMatrix4x4::Invert(*matrix,*matrix);
+    vtkMatrix4x4::Transpose(*matrix,*matrix);
+    functor.matrixInvTr = matrix;
+    }
+
+  vtkSMPTools::For( 0, n, functor );
+}
+
+//----------------------------------------------------------------------------
+class TransformPointsFunctor
+{
+public:
+  vtkPoints* inPts;
+  vtkPoints* outPts;
+  double (*matrix)[4];
+  void operator () ( vtkIdType begin, vtkIdType end ) const
+  {
+    for (vtkIdType id=begin; id<end; id++)
+      {
+      double point[3];
+      inPts->GetPoint( id, point );
+      vtkSMPTransformPoint( matrix, point, point );
+      outPts->SetPoint( id, point );
+      }
+  }
+};
+
+void vtkSMPTransform::TransformPoints(vtkPoints *inPts,
+                                         vtkPoints *outPts)
+{
+  vtkIdType n = inPts->GetNumberOfPoints();
+  this->Update();
+
+  TransformPointsFunctor functor;
+  functor.inPts = inPts;
+  functor.outPts = outPts;
+  functor.matrix = this->Matrix->Element;
+
+  vtkSMPTools::For( 0, n, functor );
+}
+
+//----------------------------------------------------------------------------
+class TransformNormalsFunctor
+{
+public:
+  vtkDataArray* inNms;
+  vtkDataArray* outNms;
+  double (*matrix)[4];
+  void operator () ( vtkIdType begin, vtkIdType end ) const
+  {
+    for(vtkIdType id=begin; id<end; id++)
+      {
+      double norm[3];
+      inNms->GetTuple( id, norm );
+      vtkSMPTransformVector( matrix, norm, norm );
+      vtkMath::Normalize( norm );
+      outNms->SetTuple( id, norm );
+      }
+  }
+};
+
+void vtkSMPTransform::TransformNormals(vtkDataArray *inNms,
+                                       vtkDataArray *outNms)
+{
+  vtkIdType n = inNms->GetNumberOfTuples();
+  double matrix[4][4];
+
+  this->Update();
+
+  // to transform the normal, multiply by the transposed inverse matrix
+  vtkMatrix4x4::DeepCopy(*matrix,this->Matrix);
+  vtkMatrix4x4::Invert(*matrix,*matrix);
+  vtkMatrix4x4::Transpose(*matrix,*matrix);
+
+  TransformNormalsFunctor functor;
+  functor.inNms = inNms;
+  functor.outNms = outNms;
+  functor.matrix = matrix;
+
+  vtkSMPTools::For( 0, n, functor );
+}
+
+//----------------------------------------------------------------------------
+class TransformVectorsFunctor
+{
+public:
+  vtkDataArray* inVcs;
+  vtkDataArray* outVcs;
+  double (*matrix)[4];
+  void operator () ( vtkIdType begin, vtkIdType end) const
+  {
+    for(vtkIdType id=begin; id<end; id++)
+      {
+      double vec[3];
+      inVcs->GetTuple( id, vec );
+      vtkSMPTransformVector( matrix, vec, vec );
+      outVcs->SetTuple( id, vec );
+      }
+  }
+};
+
+void vtkSMPTransform::TransformVectors(vtkDataArray *inNms,
+                                          vtkDataArray *outNms)
+{
+  vtkIdType n = inNms->GetNumberOfTuples();
+  this->Update();
+
+  TransformVectorsFunctor functor;
+  functor.inVcs = inNms;
+  functor.outVcs = outNms;
+  functor.matrix = this->Matrix->Element;
+
+  vtkSMPTools::For( 0, n, functor );
+}
diff --git a/Filters/SMP/vtkSMPTransform.h b/Filters/SMP/vtkSMPTransform.h
new file mode 100644
index 0000000..cba62cc
--- /dev/null
+++ b/Filters/SMP/vtkSMPTransform.h
@@ -0,0 +1,71 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPTransform.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkSMPTransform - Transform that uses the SMP framework
+// .SECTION Description
+// Just like its parent, vtkTransform, vtkSMPTransform calculates and
+// manages transforms. Its main difference is that it performs various
+// transform operations over a set of points in parallel using the SMP
+// framework.
+// .SECTION See Also
+// vtkTransform
+
+#ifndef __vtkSMPTransform_h
+#define __vtkSMPTransform_h
+
+#include "vtkFiltersSMPModule.h" // For export macro
+#include "vtkTransform.h"
+
+class VTKFILTERSSMP_EXPORT vtkSMPTransform : public vtkTransform
+{
+ public:
+  static vtkSMPTransform *New();
+  vtkTypeMacro(vtkSMPTransform, vtkTransform);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Apply the transformation to a series of points, and append the
+  // results to outPts.
+  void TransformPoints(vtkPoints *inPts, vtkPoints *outPts);
+
+  // Description:
+  // Apply the transformation to a series of normals, and append the
+  // results to outNms.
+  virtual void TransformNormals(vtkDataArray *inNms, vtkDataArray *outNms);
+
+  // Description:
+  // Apply the transformation to a series of vectors, and append the
+  // results to outVrs.
+  virtual void TransformVectors(vtkDataArray *inVrs, vtkDataArray *outVrs);
+
+  // Description:
+  // Apply the transformation to a combination of points, normals
+  // and vectors.
+  void TransformPointsNormalsVectors(vtkPoints *inPts,
+                                     vtkPoints *outPts,
+                                     vtkDataArray *inNms,
+                                     vtkDataArray *outNms,
+                                     vtkDataArray *inVrs,
+                                     vtkDataArray *outVrs);
+
+protected:
+  vtkSMPTransform () {}
+  ~vtkSMPTransform () {}
+
+private:
+  vtkSMPTransform (const vtkSMPTransform&);  // Not implemented
+  void operator=(const vtkSMPTransform&);  // Not implemented
+};
+
+#endif
diff --git a/Filters/SMP/vtkSMPWarpVector.cxx b/Filters/SMP/vtkSMPWarpVector.cxx
new file mode 100644
index 0000000..d0d8306
--- /dev/null
+++ b/Filters/SMP/vtkSMPWarpVector.cxx
@@ -0,0 +1,203 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPWarpVector.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkSMPWarpVector.h"
+
+#include "vtkCellData.h"
+#include "vtkInformation.h"
+#include "vtkInformationVector.h"
+#include "vtkNew.h"
+#include "vtkObjectFactory.h"
+#include "vtkSMPTools.h"
+#include "vtkPointData.h"
+#include "vtkPointSet.h"
+#include "vtkPoints.h"
+#include "vtkTypeTemplate.h" // For vtkTypeTemplate
+
+
+//----------------------------------------------------------------------------
+vtkStandardNewMacro(vtkSMPWarpVector);
+
+//----------------------------------------------------------------------------
+vtkSMPWarpVector::vtkSMPWarpVector()
+{
+  this->ScaleFactor = 1.0;
+
+  // by default process active point vectors
+  this->SetInputArrayToProcess(0,0,0,vtkDataObject::FIELD_ASSOCIATION_POINTS,
+                               vtkDataSetAttributes::VECTORS);
+}
+
+//----------------------------------------------------------------------------
+vtkSMPWarpVector::~vtkSMPWarpVector()
+{
+}
+
+//----------------------------------------------------------------------------
+template <class T1, class T2>
+class vtkSMPWarpVectorOp
+{
+public:
+
+  T1 *InPoints;
+  T1 *OutPoints;
+  T2 *InVector;
+  T1 scaleFactor;
+
+  void  operator()(vtkIdType begin, vtkIdType end)
+  {
+    T1* inPts = this->InPoints + 3*begin;
+    T1* outPts = this->OutPoints + 3*begin;
+    T2* inVec = this->InVector + 3*begin;
+    T1 sf = this->scaleFactor;
+    vtkIdType size = 3*(end-begin);
+    vtkIdType nend = begin + size;
+
+    for (vtkIdType index = begin; index < nend; index++)
+      {
+      *outPts = *inPts + sf * (T1)(*inVec);
+      inPts++; outPts++; inVec++;
+      /*
+      *outPts = *inPts + sf * (T1)(*inVec);
+      inPts++; outPts++; inVec++;
+      *outPts = *inPts + sf * (T1)(*inVec);
+      inPts++; outPts++; inVec++;
+      */
+      }
+  }
+};
+
+//----------------------------------------------------------------------------
+template <class T1, class T2>
+void vtkSMPWarpVectorExecute2(vtkSMPWarpVector *self,
+                              T1 *inIter,
+                              T1 *outIter,
+                              T2 *inVecIter,
+                              vtkIdType size)
+{
+  vtkSMPWarpVectorOp<T1, T2> op;
+  op.InPoints = inIter;
+  op.OutPoints = outIter;
+  op.InVector = inVecIter;
+  op.scaleFactor = (T1)self->GetScaleFactor();
+
+  vtkSMPTools::For(0, size, op);
+}
+
+//----------------------------------------------------------------------------
+template <class T>
+void vtkSMPWarpVectorExecute(vtkSMPWarpVector *self,
+                             T *inIter,
+                             T *outIter,
+                             vtkIdType size,
+                             vtkDataArray *vectors)
+{
+  void *inVecIter = vectors->GetVoidPointer(0);
+
+  // call templated function
+  switch (vectors->GetDataType())
+    {
+    vtkTemplateMacro(
+      vtkSMPWarpVectorExecute2(self, inIter, outIter,
+                               (VTK_TT *)(inVecIter), size));
+    default:
+      break;
+    }
+}
+
+//----------------------------------------------------------------------------
+int vtkSMPWarpVector::RequestData(
+  vtkInformation *request,
+  vtkInformationVector **inputVector,
+  vtkInformationVector *outputVector)
+{
+  // get the info objects
+  vtkInformation *inInfo = inputVector[0]->GetInformationObject(0);
+  vtkInformation *outInfo = outputVector->GetInformationObject(0);
+
+  // get the input and output
+  vtkPointSet *input = vtkPointSet::SafeDownCast(
+    inInfo->Get(vtkDataObject::DATA_OBJECT()));
+  if (!input)
+    {
+    // Let the superclass handle vtkImageData and vtkRectilinearGrid
+    return this->Superclass::RequestData(request, inputVector, outputVector);
+    }
+  vtkPointSet *output = vtkPointSet::SafeDownCast(
+    outInfo->Get(vtkDataObject::DATA_OBJECT()));
+
+  vtkPoints *points;
+  vtkIdType numPts;
+
+  // First, copy the input to the output as a starting point
+  output->CopyStructure( input );
+
+  if (input == NULL || input->GetPoints() == NULL)
+    {
+    return 1;
+    }
+  numPts = input->GetPoints()->GetNumberOfPoints();
+
+  vtkDataArray *vectors = this->GetInputArrayToProcess(0,inputVector);
+
+  if ( !vectors || !numPts)
+    {
+    vtkDebugMacro(<<"No input data");
+    return 1;
+    }
+
+  // SETUP AND ALLOCATE THE OUTPUT
+  numPts = input->GetNumberOfPoints();
+  points = input->GetPoints()->NewInstance();
+  points->SetDataType(input->GetPoints()->GetDataType());
+  points->Allocate(numPts);
+  points->SetNumberOfPoints(numPts);
+  output->SetPoints(points);
+  points->Delete();
+
+  vtkDataArray* inpts = input->GetPoints()->GetData();
+  vtkDataArray* outpts = output->GetPoints()->GetData();
+
+  void* inIter = inpts->GetVoidPointer(0);
+  void* outIter = outpts->GetVoidPointer(0);
+
+  // call templated function
+  switch (input->GetPoints()->GetDataType())
+    {
+    vtkTemplateMacro(
+      vtkSMPWarpVectorExecute(this,
+                              (VTK_TT *)(inIter),
+                              (VTK_TT *)(outIter),
+                              numPts,
+                              vectors));
+    default:
+      break;
+    }
+
+
+  // now pass the data.
+  output->GetPointData()->CopyNormalsOff(); // distorted geometry
+  output->GetPointData()->PassData(input->GetPointData());
+  output->GetCellData()->PassData(input->GetCellData());
+
+  return 1;
+}
+
+//----------------------------------------------------------------------------
+void vtkSMPWarpVector::PrintSelf(ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os,indent);
+  os << indent << "Scale Factor: " << this->ScaleFactor << "\n";
+}
diff --git a/Filters/SMP/vtkSMPWarpVector.h b/Filters/SMP/vtkSMPWarpVector.h
new file mode 100644
index 0000000..8ca5dde
--- /dev/null
+++ b/Filters/SMP/vtkSMPWarpVector.h
@@ -0,0 +1,52 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSMPWarpVector.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkSMPWarpVector - multithreaded vtkWarpVector
+// .SECTION Description
+// Just like parent, but uses the SMP framework to do the work on many threads.
+
+#ifndef __vtkSMPWarpVector_h__
+#define __vtkSMPWarpVector_h__
+
+#include "vtkFiltersSMPModule.h" // For export macro
+#include "vtkWarpVector.h"
+
+class vtkInformation;
+class vtkInformationVector;
+
+class VTKFILTERSSMP_EXPORT vtkSMPWarpVector : public vtkWarpVector
+{
+public :
+  vtkTypeMacro(vtkSMPWarpVector,vtkWarpVector);
+  static vtkSMPWarpVector *New();
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+protected :
+  vtkSMPWarpVector();
+  ~vtkSMPWarpVector();
+
+
+  // Description:
+  // Overridden to use threads.
+  int RequestData(vtkInformation *,
+                  vtkInformationVector **,
+                  vtkInformationVector *);
+
+private :
+  vtkSMPWarpVector(const vtkSMPWarpVector&);  // Not implemented.
+  void operator=(const vtkSMPWarpVector&);  // Not implemented.
+
+};
+
+#endif //__vtkSMPWarpVector_h__
diff --git a/Filters/Selection/Testing/Cxx/CMakeLists.txt b/Filters/Selection/Testing/Cxx/CMakeLists.txt
index 7b1c68b..a08a77f 100644
--- a/Filters/Selection/Testing/Cxx/CMakeLists.txt
+++ b/Filters/Selection/Testing/Cxx/CMakeLists.txt
@@ -1,39 +1,12 @@
-set(MyTests
+vtk_add_test_cxx(NO_VALID
   TestLinearSelector2D.cxx
-)
+  )
 
 # Tests with data
-if(VTK_DATA_ROOT)
-  set(MyTests
-    ${MyTests}
-    TestCellDistanceSelector2D.cxx
-    TestCellDistanceSelector3D.cxx
-    TestLinearSelector3D.cxx
-    )
-endif()
+vtk_add_test_cxx(NO_VALID
+  TestCellDistanceSelector2D.cxx
+  TestCellDistanceSelector3D.cxx
+  TestLinearSelector3D.cxx
+  )
 
-# Use the testing object factory, to reduce boilerplate code in tests.
-include(vtkTestingObjectFactory)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun CxxTests.cxx)
-
-# Add all the executables
-foreach (test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-
-  if(VTK_DATA_ROOT)
-  add_test(NAME ${vtk-module}Cxx-${TName}
-    COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Graphics/${TName}.png)
-  else()
-  add_test(NAME ${vtk-module}Cxx-${TName}
-    COMMAND ${vtk-module}CxxTests ${TName}
-      -T ${VTK_TEST_OUTPUT_DIR}
-      )
-  endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests RENDERING_FACTORY)
diff --git a/Filters/Selection/Testing/Cxx/TestCellDistanceSelector3D.cxx b/Filters/Selection/Testing/Cxx/TestCellDistanceSelector3D.cxx
index 85249ff..79fd82b 100644
--- a/Filters/Selection/Testing/Cxx/TestCellDistanceSelector3D.cxx
+++ b/Filters/Selection/Testing/Cxx/TestCellDistanceSelector3D.cxx
@@ -34,7 +34,7 @@
 #include <vtksys/ios/sstream>
 
 // Reference values
-vtkIdType cardCellDistanceSelection3D[] =
+const vtkIdType cardCellDistanceSelection3D[] =
 {
   125,
   16,
diff --git a/Filters/Selection/Testing/Cxx/TestLinearSelector2D.cxx b/Filters/Selection/Testing/Cxx/TestLinearSelector2D.cxx
index bd31239..f6ce229 100644
--- a/Filters/Selection/Testing/Cxx/TestLinearSelector2D.cxx
+++ b/Filters/Selection/Testing/Cxx/TestLinearSelector2D.cxx
@@ -34,7 +34,7 @@
 #include <vtksys/ios/sstream>
 
 // Reference value
-vtkIdType cardSelectionLinearSelector2D  = 20;
+const vtkIdType cardSelectionLinearSelector2D  = 20;
 
 // ------------------------------------------------------------------------------------------------
 static int CheckExtractedUGrid( vtkExtractSelection* extract,
diff --git a/Filters/Selection/Testing/Cxx/TestLinearSelector3D.cxx b/Filters/Selection/Testing/Cxx/TestLinearSelector3D.cxx
index a3e4764..d7998e9 100644
--- a/Filters/Selection/Testing/Cxx/TestLinearSelector3D.cxx
+++ b/Filters/Selection/Testing/Cxx/TestLinearSelector3D.cxx
@@ -34,7 +34,7 @@
 #include <vtksys/ios/sstream>
 
 // Reference values
-vtkIdType cardSelection[] =
+const vtkIdType cardSelection[] =
 {
   54,
   54,
diff --git a/Filters/Sources/Testing/Cxx/CMakeLists.txt b/Filters/Sources/Testing/Cxx/CMakeLists.txt
index f9306f7..6b38b2c 100644
--- a/Filters/Sources/Testing/Cxx/CMakeLists.txt
+++ b/Filters/Sources/Testing/Cxx/CMakeLists.txt
@@ -1,34 +1,26 @@
-set(testswithdata)
-if (VTK_DATA_ROOT)
-  set(testswithdata
-    TestMultiBlock.cxx
+vtk_add_test_cxx(
+  TestArcSource.cxx,NO_VALID
+  TestConeSource.cxx,NO_VALID
+  TestCubeSource.cxx,NO_VALID
+  TestCylinderSource.cxx,NO_VALID
+  TestDiskSource.cxx,NO_VALID
+  TestEllipticalButtonSource.cxx,NO_VALID
+  TestFrustumSource.cxx,NO_VALID
+  TestGlyphSource2D.cxx,NO_VALID
+  TestLineSource.cxx,NO_VALID
+  TestMultiBlock.cxx
+  TestOutlineCornerSource.cxx,NO_VALID
+  TestOutlineSource.cxx,NO_VALID
+  TestParametricFunctionSource.cxx,NO_VALID
+  TestPlaneSource.cxx,NO_VALID
+  TestPlatonicSolidSource.cxx,NO_VALID
+  TestPointSource.cxx,NO_VALID
+  TestRectangularButtonSource.cxx,NO_VALID
+  TestRegularPolygonSource.cxx,NO_VALID
+  TestSphereSource.cxx,NO_VALID
+  TestSuperquadricSource.cxx,NO_VALID
+  TestTessellatedBoxSource.cxx,NO_VALID
+  TestTextSource.cxx,NO_VALID
+  TestTexturedSphereSource.cxx,NO_VALID
   )
-endif()
-
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-  TestLineSource.cxx
-  TestTessellatedBoxSource.cxx
-  ${testswithdata}
-
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Graphics/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Filters/Sources/Testing/Cxx/TestArcSource.cxx b/Filters/Sources/Testing/Cxx/TestArcSource.cxx
new file mode 100644
index 0000000..7b732c3
--- /dev/null
+++ b/Filters/Sources/Testing/Cxx/TestArcSource.cxx
@@ -0,0 +1,88 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestArcSource.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkArcSource.h>
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkSmartPointer.h>
+
+int TestArcSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkArcSource> arcSource
+    = vtkSmartPointer<vtkArcSource>::New();
+  arcSource->SetAngle(90.0);
+  arcSource->SetResolution(8);
+  arcSource->NegativeOff();
+  arcSource->UseNormalAndAngleOn();
+
+  arcSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  double normal[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    normal[i] = randomSequence->GetValue();
+    }
+  arcSource->SetNormal(normal);
+
+  double polarVector[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    polarVector[i] = randomSequence->GetValue();
+    }
+  arcSource->SetPolarVector(polarVector);
+
+  arcSource->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = arcSource->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  arcSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    normal[i] = randomSequence->GetValue();
+    }
+  arcSource->SetNormal(normal);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    polarVector[i] = randomSequence->GetValue();
+    }
+  arcSource->SetPolarVector(polarVector);
+
+  arcSource->Update();
+
+  polyData = arcSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Sources/Testing/Cxx/TestConeSource.cxx b/Filters/Sources/Testing/Cxx/TestConeSource.cxx
new file mode 100644
index 0000000..3ac40a2
--- /dev/null
+++ b/Filters/Sources/Testing/Cxx/TestConeSource.cxx
@@ -0,0 +1,102 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestConeSource.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkConeSource.h>
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkSmartPointer.h>
+
+int TestConeSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkConeSource> coneSource
+    = vtkSmartPointer<vtkConeSource>::New();
+  coneSource->SetResolution(8);
+  coneSource->CappingOn();
+
+  coneSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  double center[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  coneSource->SetCenter(center);
+
+  double direction[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    direction[i] = randomSequence->GetValue();
+    }
+  coneSource->SetDirection(direction);
+
+  randomSequence->Next();
+  double height = randomSequence->GetValue();
+  coneSource->SetHeight(height);
+
+  randomSequence->Next();
+  double radius = randomSequence->GetValue();
+  coneSource->SetRadius(radius);
+
+  coneSource->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = coneSource->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  coneSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  coneSource->SetCenter(center);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    direction[i] = randomSequence->GetValue();
+    }
+  coneSource->SetDirection(direction);
+
+  randomSequence->Next();
+  height = randomSequence->GetValue();
+  coneSource->SetHeight(height);
+
+  randomSequence->Next();
+  radius = randomSequence->GetValue();
+  coneSource->SetRadius(radius);
+
+  coneSource->Update();
+
+  polyData = coneSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Sources/Testing/Cxx/TestCubeSource.cxx b/Filters/Sources/Testing/Cxx/TestCubeSource.cxx
new file mode 100644
index 0000000..f93e497
--- /dev/null
+++ b/Filters/Sources/Testing/Cxx/TestCubeSource.cxx
@@ -0,0 +1,93 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestCubeSource.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkCubeSource.h>
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkSmartPointer.h>
+
+int TestCubeSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkCubeSource> cubeSource
+    = vtkSmartPointer<vtkCubeSource>::New();
+
+  cubeSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  double center[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  cubeSource->SetCenter(center);
+
+  randomSequence->Next();
+  double xLength = randomSequence->GetValue();
+  cubeSource->SetXLength(xLength);
+
+  randomSequence->Next();
+  double yLength = randomSequence->GetValue();
+  cubeSource->SetYLength(yLength);
+
+  randomSequence->Next();
+  double zLength = randomSequence->GetValue();
+  cubeSource->SetZLength(zLength);
+
+  cubeSource->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = cubeSource->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  cubeSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  cubeSource->SetCenter(center);
+
+  randomSequence->Next();
+  xLength = randomSequence->GetValue();
+  cubeSource->SetXLength(xLength);
+
+  randomSequence->Next();
+  yLength = randomSequence->GetValue();
+  cubeSource->SetYLength(yLength);
+
+  randomSequence->Next();
+  zLength = randomSequence->GetValue();
+  cubeSource->SetZLength(zLength);
+
+  cubeSource->Update();
+
+  polyData = cubeSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Sources/Testing/Cxx/TestCylinderSource.cxx b/Filters/Sources/Testing/Cxx/TestCylinderSource.cxx
new file mode 100644
index 0000000..542b335
--- /dev/null
+++ b/Filters/Sources/Testing/Cxx/TestCylinderSource.cxx
@@ -0,0 +1,87 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestCylinderSource.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkCylinderSource.h>
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkSmartPointer.h>
+
+int TestCylinderSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkCylinderSource> cylinderSource
+    = vtkSmartPointer<vtkCylinderSource>::New();
+  cylinderSource->SetResolution(8);
+  cylinderSource->CappingOn();
+
+  cylinderSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  double center[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  cylinderSource->SetCenter(center);
+
+  randomSequence->Next();
+  double height = randomSequence->GetValue();
+  cylinderSource->SetHeight(height);
+
+  randomSequence->Next();
+  double radius = randomSequence->GetValue();
+  cylinderSource->SetRadius(radius);
+
+  cylinderSource->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = cylinderSource->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  cylinderSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  cylinderSource->SetCenter(center);
+
+  randomSequence->Next();
+  height = randomSequence->GetValue();
+  cylinderSource->SetHeight(height);
+
+  randomSequence->Next();
+  radius = randomSequence->GetValue();
+  cylinderSource->SetRadius(radius);
+
+  cylinderSource->Update();
+
+  polyData = cylinderSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Sources/Testing/Cxx/TestDiskSource.cxx b/Filters/Sources/Testing/Cxx/TestDiskSource.cxx
new file mode 100644
index 0000000..6edf771
--- /dev/null
+++ b/Filters/Sources/Testing/Cxx/TestDiskSource.cxx
@@ -0,0 +1,84 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestDiskSource.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkDiskSource.h>
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkSmartPointer.h>
+
+int TestDiskSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkDiskSource> diskSource
+    = vtkSmartPointer<vtkDiskSource>::New();
+  diskSource->SetCircumferentialResolution(8);
+  diskSource->SetRadialResolution(8);
+
+  diskSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  randomSequence->Next();
+  double innerRadius = randomSequence->GetValue();
+
+  randomSequence->Next();
+  double outerRadius = randomSequence->GetValue();
+
+  if(innerRadius > outerRadius)
+    {
+    std::swap(innerRadius, outerRadius);
+    }
+
+  diskSource->SetInnerRadius(innerRadius);
+  diskSource->SetOuterRadius(outerRadius);
+
+  diskSource->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = diskSource->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  diskSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  randomSequence->Next();
+  innerRadius = randomSequence->GetValue();
+
+  randomSequence->Next();
+  outerRadius = randomSequence->GetValue();
+
+  if(innerRadius > outerRadius)
+    {
+    std::swap(innerRadius, outerRadius);
+    }
+
+  diskSource->SetInnerRadius(innerRadius);
+  diskSource->SetOuterRadius(outerRadius);
+
+  diskSource->Update();
+
+  polyData = diskSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Sources/Testing/Cxx/TestEllipticalButtonSource.cxx b/Filters/Sources/Testing/Cxx/TestEllipticalButtonSource.cxx
new file mode 100644
index 0000000..e4f6bce
--- /dev/null
+++ b/Filters/Sources/Testing/Cxx/TestEllipticalButtonSource.cxx
@@ -0,0 +1,101 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestEllipticalButtonSource.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkEllipticalButtonSource.h>
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkSmartPointer.h>
+
+int TestEllipticalButtonSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkEllipticalButtonSource> ellipticalButtonSource
+    = vtkSmartPointer<vtkEllipticalButtonSource>::New();
+  ellipticalButtonSource->SetCircumferentialResolution(8);
+  ellipticalButtonSource->SetShoulderResolution(8);
+  ellipticalButtonSource->SetTextureResolution(8);
+  ellipticalButtonSource->SetRadialRatio(1.0);
+  ellipticalButtonSource->SetShoulderTextureCoordinate(0.0, 0.0);
+  ellipticalButtonSource->SetTextureDimensions(100, 100);
+  ellipticalButtonSource->SetTextureStyleToProportional();
+  ellipticalButtonSource->TwoSidedOff();
+
+  ellipticalButtonSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  double center[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  ellipticalButtonSource->SetCenter(center);
+
+  randomSequence->Next();
+  double depth = randomSequence->GetValue();
+  ellipticalButtonSource->SetDepth(depth);
+
+  randomSequence->Next();
+  double height = randomSequence->GetValue();
+  ellipticalButtonSource->SetHeight(height);
+
+  randomSequence->Next();
+  double width = randomSequence->GetValue();
+  ellipticalButtonSource->SetWidth(width);
+
+  ellipticalButtonSource->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = ellipticalButtonSource->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  ellipticalButtonSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  ellipticalButtonSource->SetCenter(center);
+
+  randomSequence->Next();
+  depth = randomSequence->GetValue();
+  ellipticalButtonSource->SetDepth(depth);
+
+  randomSequence->Next();
+  height = randomSequence->GetValue();
+  ellipticalButtonSource->SetHeight(height);
+
+  randomSequence->Next();
+  width = randomSequence->GetValue();
+  ellipticalButtonSource->SetWidth(width);
+
+  ellipticalButtonSource->Update();
+
+  polyData = ellipticalButtonSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Sources/Testing/Cxx/TestFrustumSource.cxx b/Filters/Sources/Testing/Cxx/TestFrustumSource.cxx
new file mode 100644
index 0000000..3e8fd19
--- /dev/null
+++ b/Filters/Sources/Testing/Cxx/TestFrustumSource.cxx
@@ -0,0 +1,105 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestFrustumSource.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkCamera.h>
+#include <vtkFrustumSource.h>
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkPlanes.h>
+#include <vtkSmartPointer.h>
+
+int TestFrustumSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkFrustumSource> frustumSource
+    = vtkSmartPointer<vtkFrustumSource>::New();
+  frustumSource->ShowLinesOn();
+
+  frustumSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  randomSequence->Next();
+  double linesLength = randomSequence->GetValue();
+  frustumSource->SetLinesLength(linesLength);
+
+  vtkSmartPointer<vtkCamera> camera = vtkSmartPointer<vtkCamera>::New();
+
+  double position[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    position[i] = randomSequence->GetValue();
+    }
+  camera->SetPosition(position);
+  double focalPoint[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    focalPoint[i] = randomSequence->GetValue();
+    }
+  camera->SetFocalPoint(focalPoint);
+  double planeCoefficients[24];
+  camera->GetFrustumPlanes(1.0, planeCoefficients);
+
+  vtkSmartPointer<vtkPlanes> planes = vtkSmartPointer<vtkPlanes>::New();
+  planes->SetFrustumPlanes(planeCoefficients);
+  frustumSource->SetPlanes(planes);
+
+  frustumSource->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = frustumSource->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  frustumSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  randomSequence->Next();
+  linesLength = randomSequence->GetValue();
+  frustumSource->SetLinesLength(linesLength);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    position[i] = randomSequence->GetValue();
+    }
+  camera->SetPosition(position);
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    focalPoint[i] = randomSequence->GetValue();
+    }
+  camera->SetFocalPoint(focalPoint);
+  camera->GetFrustumPlanes(1.0, planeCoefficients);
+
+  planes->SetFrustumPlanes(planeCoefficients);
+  frustumSource->SetPlanes(planes);
+
+  frustumSource->Update();
+
+  polyData = frustumSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Sources/Testing/Cxx/TestGlyphSource2D.cxx b/Filters/Sources/Testing/Cxx/TestGlyphSource2D.cxx
new file mode 100644
index 0000000..a7e1460
--- /dev/null
+++ b/Filters/Sources/Testing/Cxx/TestGlyphSource2D.cxx
@@ -0,0 +1,90 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestGlyphSource2D.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkGlyphSource2D.h>
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkSmartPointer.h>
+
+int TestGlyphSource2D(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkGlyphSource2D> glyphSource
+    = vtkSmartPointer<vtkGlyphSource2D>::New();
+  glyphSource->SetColor(1.0, 1.0, 1.0);
+  glyphSource->CrossOff();
+  glyphSource->DashOff();
+  glyphSource->FilledOn();
+  glyphSource->SetGlyphTypeToVertex();
+
+  glyphSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  double center[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  glyphSource->SetCenter(center);
+
+  randomSequence->Next();
+  double rotationAngle = randomSequence->GetValue();
+  glyphSource->SetRotationAngle(rotationAngle);
+
+  randomSequence->Next();
+  double scale = randomSequence->GetValue();
+  glyphSource->SetScale(scale);
+
+  glyphSource->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = glyphSource->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  glyphSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  glyphSource->SetCenter(center);
+
+  randomSequence->Next();
+  rotationAngle = randomSequence->GetValue();
+  glyphSource->SetRotationAngle(rotationAngle);
+
+  randomSequence->Next();
+  scale = randomSequence->GetValue();
+  glyphSource->SetScale(scale);
+
+  glyphSource->Update();
+
+  polyData = glyphSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Sources/Testing/Cxx/TestLineSource.cxx b/Filters/Sources/Testing/Cxx/TestLineSource.cxx
index 44a3390..b1c81f0 100644
--- a/Filters/Sources/Testing/Cxx/TestLineSource.cxx
+++ b/Filters/Sources/Testing/Cxx/TestLineSource.cxx
@@ -13,89 +13,134 @@
 
 =========================================================================*/
 
-#include "vtkLineSource.h"
-#include "vtkSmartPointer.h"
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkSmartPointer.h>
+#include <vtkLineSource.h>
 
-#include "vtkTestUtilities.h"
+int TestLineSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
 
-#include <limits>
+  vtkSmartPointer<vtkLineSource> lineSource
+    = vtkSmartPointer<vtkLineSource>::New();
+  lineSource->SetResolution(8);
 
-#ifndef ABS
-#define ABS(x) ((x) < 0 ? -(x) : (x))
-#endif
+  lineSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
 
-template<class A>
-bool fuzzyCompare1D(A a, A b)
-{
-  return ABS(a - b) < std::numeric_limits<A>::epsilon();
-}
+  double point1[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    point1[i] = randomSequence->GetValue();
+    }
+  lineSource->SetPoint1(point1);
+
+  double point2[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    point2[i] = randomSequence->GetValue();
+    }
+  lineSource->SetPoint2(point2);
 
-int TestLineSource( int argc, char* argv[] )
-{
-  (void)argc;
-  (void)argv;
-
-  {
-  // Test double functions
-  double p1[3] = {1.0, 0.0, 0.0};
-  double p2[3] = {0.0, 1.0, 0.0};
-
-  vtkSmartPointer<vtkLineSource> lineSource =
-    vtkSmartPointer<vtkLineSource>::New();
-  lineSource->SetPoint1(p1);
-  lineSource->SetPoint2(p2);
   lineSource->Update();
 
-  double* p1retrieved = lineSource->GetPoint1();
-  if(!fuzzyCompare1D(static_cast<float>(p1retrieved[0]), static_cast<float>(p1[0])) ||
-     !fuzzyCompare1D(static_cast<float>(p1retrieved[1]), static_cast<float>(p1[1])) ||
-     !fuzzyCompare1D(static_cast<float>(p1retrieved[2]), static_cast<float>(p1[2])))
+  vtkSmartPointer<vtkPolyData> polyData = lineSource->GetOutput();
+  vtkSmartPointer<vtkPoints> outputPoints = polyData->GetPoints();
+
+  if(outputPoints->GetDataType() != VTK_FLOAT)
     {
-    std::cerr << "Error: p1(double) was not retrieved properly!" << std::endl;
     return EXIT_FAILURE;
     }
 
+  lineSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
 
-  double* p2retrieved = lineSource->GetPoint2();
-  if(!fuzzyCompare1D(static_cast<float>(p2retrieved[0]), static_cast<float>(p2[0])) ||
-     !fuzzyCompare1D(static_cast<float>(p2retrieved[1]), static_cast<float>(p2[1])) ||
-     !fuzzyCompare1D(static_cast<float>(p2retrieved[2]), static_cast<float>(p2[2])))
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    point1[i] = randomSequence->GetValue();
+    }
+  lineSource->SetPoint1(point1);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    point2[i] = randomSequence->GetValue();
+    }
+  lineSource->SetPoint2(point2);
+
+  lineSource->Update();
+
+  polyData = lineSource->GetOutput();
+  outputPoints = polyData->GetPoints();
+
+  if(outputPoints->GetDataType() != VTK_DOUBLE)
     {
-    std::cerr << "Error: p2(double) was not retrieved properly!" << std::endl;
     return EXIT_FAILURE;
     }
-  }
 
-  {
-  // Test float functions
-  float p1[3] = {1.0, 0.0, 0.0};
-  float p2[3] = {0.0, 1.0, 0.0};
+  lineSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  vtkSmartPointer<vtkPoints> inputPoints = vtkSmartPointer<vtkPoints>::New();
+  inputPoints->SetDataType(VTK_DOUBLE);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    point1[i] = randomSequence->GetValue();
+    }
+  inputPoints->InsertNextPoint(point1);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    point2[i] = randomSequence->GetValue();
+    }
+  inputPoints->InsertNextPoint(point2);
+
+  lineSource->SetPoints(inputPoints);
 
-  vtkSmartPointer<vtkLineSource> lineSource =
-    vtkSmartPointer<vtkLineSource>::New();
-  lineSource->SetPoint1(p1);
-  lineSource->SetPoint2(p2);
   lineSource->Update();
 
-  double* p1retrieved = lineSource->GetPoint1();
-  if(!fuzzyCompare1D(static_cast<float>(p1retrieved[0]), static_cast<float>(p1[0])) ||
-     !fuzzyCompare1D(static_cast<float>(p1retrieved[1]), static_cast<float>(p1[1])) ||
-     !fuzzyCompare1D(static_cast<float>(p1retrieved[2]), static_cast<float>(p1[2])))
+  polyData = lineSource->GetOutput();
+  outputPoints = polyData->GetPoints();
+
+  if(outputPoints->GetDataType() != VTK_FLOAT)
     {
-    std::cerr << "Error: p1(float) was not retrieved properly!" << std::endl;
     return EXIT_FAILURE;
     }
 
+  inputPoints->Reset();
+
+  lineSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    point1[i] = randomSequence->GetValue();
+    }
+  inputPoints->InsertNextPoint(point1);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    point2[i] = randomSequence->GetValue();
+    }
+  inputPoints->InsertNextPoint(point2);
+
+  lineSource->SetPoints(inputPoints);
+
+  lineSource->Update();
+
+  polyData = lineSource->GetOutput();
+  outputPoints = polyData->GetPoints();
 
-  double* p2retrieved = lineSource->GetPoint2();
-  if(!fuzzyCompare1D(static_cast<float>(p2retrieved[0]), static_cast<float>(p2[0])) ||
-     !fuzzyCompare1D(static_cast<float>(p2retrieved[1]), static_cast<float>(p2[1])) ||
-     !fuzzyCompare1D(static_cast<float>(p2retrieved[2]), static_cast<float>(p2[2])))
+  if(outputPoints->GetDataType() != VTK_DOUBLE)
     {
-    std::cerr << "Error: p2(float) was not retrieved properly!" << std::endl;
     return EXIT_FAILURE;
     }
-  }
 
   return EXIT_SUCCESS;
 }
diff --git a/Filters/Sources/Testing/Cxx/TestOutlineCornerSource.cxx b/Filters/Sources/Testing/Cxx/TestOutlineCornerSource.cxx
new file mode 100644
index 0000000..92c8986
--- /dev/null
+++ b/Filters/Sources/Testing/Cxx/TestOutlineCornerSource.cxx
@@ -0,0 +1,103 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestOutlineCornerSource.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkOutlineCornerSource.h>
+#include <vtkSmartPointer.h>
+
+int TestOutlineCornerSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkOutlineCornerSource> outlineCornerSource
+    = vtkSmartPointer<vtkOutlineCornerSource>::New();
+  outlineCornerSource->SetBoxTypeToAxisAligned();
+  outlineCornerSource->GenerateFacesOff();
+
+  outlineCornerSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  double bounds[6];
+  for(unsigned int i = 0; i < 6; ++i)
+    {
+    randomSequence->Next();
+    bounds[i] = randomSequence->GetValue();
+    }
+  if(bounds[0] > bounds[3])
+    {
+    std::swap(bounds[0], bounds[3]);
+    }
+  if(bounds[1] > bounds[4])
+    {
+    std::swap(bounds[1], bounds[4]);
+    }
+  if(bounds[2] > bounds[5])
+    {
+    std::swap(bounds[2], bounds[5]);
+    }
+  outlineCornerSource->SetBounds(bounds);
+
+  randomSequence->Next();
+  double cornerFactor = randomSequence->GetValue();
+  outlineCornerSource->SetCornerFactor(cornerFactor);
+
+  outlineCornerSource->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = outlineCornerSource->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  outlineCornerSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  for(unsigned int i = 0; i < 6; ++i)
+    {
+    randomSequence->Next();
+    bounds[i] = randomSequence->GetValue();
+    }
+  if(bounds[0] > bounds[3])
+    {
+    std::swap(bounds[0], bounds[3]);
+    }
+  if(bounds[1] > bounds[4])
+    {
+    std::swap(bounds[1], bounds[4]);
+    }
+  if(bounds[2] > bounds[5])
+    {
+    std::swap(bounds[2], bounds[5]);
+    }
+  outlineCornerSource->SetBounds(bounds);
+
+  randomSequence->Next();
+  cornerFactor = randomSequence->GetValue();
+  outlineCornerSource->SetCornerFactor(cornerFactor);
+
+  outlineCornerSource->Update();
+
+  polyData = outlineCornerSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Sources/Testing/Cxx/TestOutlineSource.cxx b/Filters/Sources/Testing/Cxx/TestOutlineSource.cxx
new file mode 100644
index 0000000..4bb2015
--- /dev/null
+++ b/Filters/Sources/Testing/Cxx/TestOutlineSource.cxx
@@ -0,0 +1,95 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestOutlineSource.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkOutlineSource.h>
+#include <vtkSmartPointer.h>
+
+int TestOutlineSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkOutlineSource> outlineSource
+    = vtkSmartPointer<vtkOutlineSource>::New();
+  outlineSource->SetBoxTypeToAxisAligned();
+  outlineSource->GenerateFacesOff();
+
+  outlineSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  double bounds[6];
+  for(unsigned int i = 0; i < 6; ++i)
+    {
+    randomSequence->Next();
+    bounds[i] = randomSequence->GetValue();
+    }
+  if(bounds[0] > bounds[3])
+    {
+    std::swap(bounds[0], bounds[3]);
+    }
+  if(bounds[1] > bounds[4])
+    {
+    std::swap(bounds[1], bounds[4]);
+    }
+  if(bounds[2] > bounds[5])
+    {
+    std::swap(bounds[2], bounds[5]);
+    }
+  outlineSource->SetBounds(bounds);
+
+  outlineSource->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = outlineSource->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  outlineSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  for(unsigned int i = 0; i < 6; ++i)
+    {
+    randomSequence->Next();
+    bounds[i] = randomSequence->GetValue();
+    }
+  if(bounds[0] > bounds[3])
+    {
+    std::swap(bounds[0], bounds[3]);
+    }
+  if(bounds[1] > bounds[4])
+    {
+    std::swap(bounds[1], bounds[4]);
+    }
+  if(bounds[2] > bounds[5])
+    {
+    std::swap(bounds[2], bounds[5]);
+    }
+  outlineSource->SetBounds(bounds);
+
+  outlineSource->Update();
+
+  polyData = outlineSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Sources/Testing/Cxx/TestParametricFunctionSource.cxx b/Filters/Sources/Testing/Cxx/TestParametricFunctionSource.cxx
new file mode 100644
index 0000000..39c57d0
--- /dev/null
+++ b/Filters/Sources/Testing/Cxx/TestParametricFunctionSource.cxx
@@ -0,0 +1,91 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestParametricFunctionSource.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkParametricEllipsoid.h>
+#include <vtkParametricFunctionSource.h>
+#include <vtkSmartPointer.h>
+
+int TestParametricFunctionSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkParametricFunctionSource> parametricFunctionSource
+    = vtkSmartPointer<vtkParametricFunctionSource>::New();
+  parametricFunctionSource->SetUResolution(64);
+  parametricFunctionSource->SetVResolution(64);
+  parametricFunctionSource->SetWResolution(64);
+  parametricFunctionSource->SetScalarModeToNone();
+  parametricFunctionSource->GenerateTextureCoordinatesOff();
+
+  parametricFunctionSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  vtkSmartPointer<vtkParametricEllipsoid> parametricEllipsoid
+    = vtkSmartPointer<vtkParametricEllipsoid>::New();
+
+  randomSequence->Next();
+  double xRadius = randomSequence->GetValue();
+  parametricEllipsoid->SetXRadius(xRadius);
+
+  randomSequence->Next();
+  double yRadius = randomSequence->GetValue();
+  parametricEllipsoid->SetYRadius(yRadius);
+
+  randomSequence->Next();
+  double zRadius = randomSequence->GetValue();
+  parametricEllipsoid->SetZRadius(zRadius);
+
+  parametricFunctionSource->SetParametricFunction(parametricEllipsoid);
+
+  parametricFunctionSource->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = parametricFunctionSource->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  parametricFunctionSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  randomSequence->Next();
+  xRadius = randomSequence->GetValue();
+  parametricEllipsoid->SetXRadius(xRadius);
+
+  randomSequence->Next();
+  yRadius = randomSequence->GetValue();
+  parametricEllipsoid->SetYRadius(yRadius);
+
+  randomSequence->Next();
+  zRadius = randomSequence->GetValue();
+  parametricEllipsoid->SetZRadius(zRadius);
+
+  parametricFunctionSource->SetParametricFunction(parametricEllipsoid);
+
+  parametricFunctionSource->Update();
+
+  polyData = parametricFunctionSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Sources/Testing/Cxx/TestPlaneSource.cxx b/Filters/Sources/Testing/Cxx/TestPlaneSource.cxx
new file mode 100644
index 0000000..f904b76
--- /dev/null
+++ b/Filters/Sources/Testing/Cxx/TestPlaneSource.cxx
@@ -0,0 +1,86 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestPlaneSource.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkPlaneSource.h>
+#include <vtkSmartPointer.h>
+
+int TestPlaneSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkPlaneSource> planeSource
+    = vtkSmartPointer<vtkPlaneSource>::New();
+  planeSource->SetXResolution(8);
+  planeSource->SetYResolution(8);
+
+  planeSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  double center[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  planeSource->SetCenter(center);
+
+  double normal[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    normal[i] = randomSequence->GetValue();
+    }
+  planeSource->SetNormal(normal);
+
+  planeSource->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = planeSource->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  planeSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  planeSource->SetCenter(center);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    normal[i] = randomSequence->GetValue();
+    }
+  planeSource->SetNormal(normal);
+
+  planeSource->Update();
+
+  polyData = planeSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Sources/Testing/Cxx/TestPlatonicSolidSource.cxx b/Filters/Sources/Testing/Cxx/TestPlatonicSolidSource.cxx
new file mode 100644
index 0000000..cd188e1
--- /dev/null
+++ b/Filters/Sources/Testing/Cxx/TestPlatonicSolidSource.cxx
@@ -0,0 +1,139 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestPlatonicSolidSource.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkPlatonicSolidSource.h>
+#include <vtkSmartPointer.h>
+
+int TestPlatonicSolidSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkPlatonicSolidSource> platonicSolidSource
+    = vtkSmartPointer<vtkPlatonicSolidSource>::New();
+
+  platonicSolidSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  platonicSolidSource->SetSolidTypeToCube();
+  platonicSolidSource->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = platonicSolidSource->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  platonicSolidSource->SetSolidTypeToDodecahedron();
+  platonicSolidSource->Update();
+
+  polyData = platonicSolidSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  platonicSolidSource->SetSolidTypeToIcosahedron();
+  platonicSolidSource->Update();
+
+  polyData = platonicSolidSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  platonicSolidSource->SetSolidTypeToOctahedron();
+  platonicSolidSource->Update();
+
+  polyData = platonicSolidSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  platonicSolidSource->SetSolidTypeToTetrahedron();
+  platonicSolidSource->Update();
+
+  polyData = platonicSolidSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  platonicSolidSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  platonicSolidSource->SetSolidTypeToCube();
+  platonicSolidSource->Update();
+
+  polyData = platonicSolidSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  platonicSolidSource->SetSolidTypeToDodecahedron();
+  platonicSolidSource->Update();
+
+  polyData = platonicSolidSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  platonicSolidSource->SetSolidTypeToIcosahedron();
+  platonicSolidSource->Update();
+
+  polyData = platonicSolidSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  platonicSolidSource->SetSolidTypeToOctahedron();
+  platonicSolidSource->Update();
+
+  polyData = platonicSolidSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  platonicSolidSource->SetSolidTypeToTetrahedron();
+  platonicSolidSource->Update();
+
+  polyData = platonicSolidSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Sources/Testing/Cxx/TestPointSource.cxx b/Filters/Sources/Testing/Cxx/TestPointSource.cxx
new file mode 100644
index 0000000..8a64b54
--- /dev/null
+++ b/Filters/Sources/Testing/Cxx/TestPointSource.cxx
@@ -0,0 +1,79 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestPointSource.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkPointSource.h>
+#include <vtkSmartPointer.h>
+
+int TestPointSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkPointSource> pointSource
+    = vtkSmartPointer<vtkPointSource>::New();
+  pointSource->SetDistributionToUniform();
+  pointSource->SetNumberOfPoints(16);
+
+  pointSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  double center[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  pointSource->SetCenter(center);
+
+  randomSequence->Next();
+  double radius = randomSequence->GetValue();
+  pointSource->SetRadius(radius);
+
+  pointSource->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = pointSource->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  pointSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  pointSource->SetCenter(center);
+
+  randomSequence->Next();
+  radius = randomSequence->GetValue();
+  pointSource->SetRadius(radius);
+
+  pointSource->Update();
+
+  polyData = pointSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Sources/Testing/Cxx/TestRectangularButtonSource.cxx b/Filters/Sources/Testing/Cxx/TestRectangularButtonSource.cxx
new file mode 100644
index 0000000..22481c4
--- /dev/null
+++ b/Filters/Sources/Testing/Cxx/TestRectangularButtonSource.cxx
@@ -0,0 +1,100 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestRectangularButtonSource.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkRectangularButtonSource.h>
+#include <vtkSmartPointer.h>
+
+int TestRectangularButtonSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkRectangularButtonSource> rectangularButtonSource
+    = vtkSmartPointer<vtkRectangularButtonSource>::New();
+  rectangularButtonSource->SetBoxRatio(1.0);
+  rectangularButtonSource->SetTextureHeightRatio(1.0);
+  rectangularButtonSource->SetTextureRatio(1.0);
+  rectangularButtonSource->SetShoulderTextureCoordinate(0.0, 0.0);
+  rectangularButtonSource->SetTextureDimensions(100, 100);
+  rectangularButtonSource->SetTextureStyleToProportional();
+  rectangularButtonSource->TwoSidedOff();
+
+  rectangularButtonSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  double center[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  rectangularButtonSource->SetCenter(center);
+
+  randomSequence->Next();
+  double depth = randomSequence->GetValue();
+  rectangularButtonSource->SetDepth(depth);
+
+  randomSequence->Next();
+  double height = randomSequence->GetValue();
+  rectangularButtonSource->SetHeight(height);
+
+  randomSequence->Next();
+  double width = randomSequence->GetValue();
+  rectangularButtonSource->SetWidth(width);
+
+  rectangularButtonSource->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = rectangularButtonSource->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  rectangularButtonSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  rectangularButtonSource->SetCenter(center);
+
+  randomSequence->Next();
+  depth = randomSequence->GetValue();
+  rectangularButtonSource->SetDepth(depth);
+
+  randomSequence->Next();
+  height = randomSequence->GetValue();
+  rectangularButtonSource->SetHeight(height);
+
+  randomSequence->Next();
+  width = randomSequence->GetValue();
+  rectangularButtonSource->SetWidth(width);
+
+  rectangularButtonSource->Update();
+
+  polyData = rectangularButtonSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Sources/Testing/Cxx/TestRegularPolygonSource.cxx b/Filters/Sources/Testing/Cxx/TestRegularPolygonSource.cxx
new file mode 100644
index 0000000..8c915a7
--- /dev/null
+++ b/Filters/Sources/Testing/Cxx/TestRegularPolygonSource.cxx
@@ -0,0 +1,97 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestRegularPolygonSource.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkRegularPolygonSource.h>
+#include <vtkSmartPointer.h>
+
+int TestRegularPolygonSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkRegularPolygonSource> regularPolygonSource
+    = vtkSmartPointer<vtkRegularPolygonSource>::New();
+  regularPolygonSource->SetNumberOfSides(8);
+  regularPolygonSource->GeneratePolygonOn();
+  regularPolygonSource->GeneratePolylineOn();
+
+  regularPolygonSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  randomSequence->Next();
+  double radius = randomSequence->GetValue();
+  regularPolygonSource->SetRadius(radius);
+
+  double center[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  regularPolygonSource->SetCenter(center);
+
+  regularPolygonSource->Update();
+
+  double normal[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    normal[i] = randomSequence->GetValue();
+    }
+  regularPolygonSource->SetNormal(normal);
+
+  regularPolygonSource->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = regularPolygonSource->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  regularPolygonSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  randomSequence->Next();
+  radius = randomSequence->GetValue();
+  regularPolygonSource->SetRadius(radius);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  regularPolygonSource->SetCenter(center);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    normal[i] = randomSequence->GetValue();
+    }
+  regularPolygonSource->SetNormal(normal);
+
+  regularPolygonSource->Update();
+
+  polyData = regularPolygonSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Sources/Testing/Cxx/TestSphereSource.cxx b/Filters/Sources/Testing/Cxx/TestSphereSource.cxx
new file mode 100644
index 0000000..555915a
--- /dev/null
+++ b/Filters/Sources/Testing/Cxx/TestSphereSource.cxx
@@ -0,0 +1,84 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestSphereSource.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkSmartPointer.h>
+#include <vtkSphereSource.h>
+
+int TestSphereSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkSphereSource> sphereSource
+    = vtkSmartPointer<vtkSphereSource>::New();
+  sphereSource->SetThetaResolution(8);
+  sphereSource->SetPhiResolution(8);
+  sphereSource->SetStartTheta(0.0);
+  sphereSource->SetEndTheta(360.0);
+  sphereSource->SetStartPhi(0.0);
+  sphereSource->SetEndPhi(180.0);
+  sphereSource->LatLongTessellationOff();
+
+  sphereSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  double center[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  sphereSource->SetCenter(center);
+
+  randomSequence->Next();
+  double radius = randomSequence->GetValue();
+  sphereSource->SetRadius(radius);
+
+  sphereSource->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = sphereSource->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  sphereSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  sphereSource->SetCenter(center);
+
+  randomSequence->Next();
+  radius = randomSequence->GetValue();
+  sphereSource->SetRadius(radius);
+
+  sphereSource->Update();
+
+  polyData = sphereSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Sources/Testing/Cxx/TestSuperquadricSource.cxx b/Filters/Sources/Testing/Cxx/TestSuperquadricSource.cxx
new file mode 100644
index 0000000..2a9ccbc
--- /dev/null
+++ b/Filters/Sources/Testing/Cxx/TestSuperquadricSource.cxx
@@ -0,0 +1,90 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestSuperquadricSource.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkSmartPointer.h>
+#include <vtkSuperquadricSource.h>
+
+int TestSuperquadricSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkSuperquadricSource> superquadricSource
+    = vtkSmartPointer<vtkSuperquadricSource>::New();
+  superquadricSource->SetThetaResolution(8);
+  superquadricSource->SetPhiResolution(8);
+  superquadricSource->SetThetaRoundness(1.0);
+  superquadricSource->SetPhiRoundness(1.0);
+  superquadricSource->SetYAxisOfSymmetry();
+  superquadricSource->ToroidalOff();
+
+  superquadricSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  double center[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  superquadricSource->SetCenter(center);
+
+  double scale[3];
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    scale[i] = randomSequence->GetValue();
+    }
+  superquadricSource->SetScale(scale);
+
+  superquadricSource->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = superquadricSource->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  superquadricSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    center[i] = randomSequence->GetValue();
+    }
+  superquadricSource->SetCenter(center);
+
+  for(unsigned int i = 0; i < 3; ++i)
+    {
+    randomSequence->Next();
+    scale[i] = randomSequence->GetValue();
+    }
+  superquadricSource->SetScale(scale);
+
+  superquadricSource->Update();
+
+  polyData = superquadricSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Sources/Testing/Cxx/TestTextSource.cxx b/Filters/Sources/Testing/Cxx/TestTextSource.cxx
new file mode 100644
index 0000000..fd754cf
--- /dev/null
+++ b/Filters/Sources/Testing/Cxx/TestTextSource.cxx
@@ -0,0 +1,54 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestTextSource.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkSmartPointer.h>
+#include <vtkTextSource.h>
+
+int TestTextSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkTextSource> textSource
+    = vtkSmartPointer<vtkTextSource>::New();
+  textSource->SetBackgroundColor(0.0, 0.0, 0.0);
+  textSource->SetForegroundColor(1.0, 1.0, 1.0);
+  textSource->BackingOn();
+
+  textSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  textSource->SetText("1234567890abcdefghijklmnopqrstuvwxyz");
+  textSource->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = textSource->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  textSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  textSource->SetText("1234567890ABCDEFGHIJKLMNOPQRSTUVWXYZ");
+  textSource->Update();
+
+  polyData = textSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Sources/Testing/Cxx/TestTexturedSphereSource.cxx b/Filters/Sources/Testing/Cxx/TestTexturedSphereSource.cxx
new file mode 100644
index 0000000..f50a803
--- /dev/null
+++ b/Filters/Sources/Testing/Cxx/TestTexturedSphereSource.cxx
@@ -0,0 +1,66 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestTexturedSphereSource.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkMinimalStandardRandomSequence.h>
+#include <vtkSmartPointer.h>
+#include <vtkTexturedSphereSource.h>
+
+int TestTexturedSphereSource(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
+{
+  vtkSmartPointer<vtkMinimalStandardRandomSequence> randomSequence
+    = vtkSmartPointer<vtkMinimalStandardRandomSequence>::New();
+  randomSequence->SetSeed(1);
+
+  vtkSmartPointer<vtkTexturedSphereSource> texturedSphereSource
+    = vtkSmartPointer<vtkTexturedSphereSource>::New();
+  texturedSphereSource->SetThetaResolution(8);
+  texturedSphereSource->SetPhiResolution(8);
+  texturedSphereSource->SetTheta(0.0);
+  texturedSphereSource->SetPhi(0.0);
+
+  texturedSphereSource->SetOutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION);
+
+  randomSequence->Next();
+  double radius = randomSequence->GetValue();
+  texturedSphereSource->SetRadius(radius);
+
+  texturedSphereSource->Update();
+
+  vtkSmartPointer<vtkPolyData> polyData = texturedSphereSource->GetOutput();
+  vtkSmartPointer<vtkPoints> points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_FLOAT)
+    {
+    return EXIT_FAILURE;
+    }
+
+  texturedSphereSource->SetOutputPointsPrecision(vtkAlgorithm::DOUBLE_PRECISION);
+
+  randomSequence->Next();
+  radius = randomSequence->GetValue();
+  texturedSphereSource->SetRadius(radius);
+
+  texturedSphereSource->Update();
+
+  polyData = texturedSphereSource->GetOutput();
+  points = polyData->GetPoints();
+
+  if(points->GetDataType() != VTK_DOUBLE)
+    {
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Sources/Testing/Data/Baseline/Disk.png.md5 b/Filters/Sources/Testing/Data/Baseline/Disk.png.md5
new file mode 100644
index 0000000..a9bd481
--- /dev/null
+++ b/Filters/Sources/Testing/Data/Baseline/Disk.png.md5
@@ -0,0 +1 @@
+b06b4d44bb13830b03a78c38d9f0cec1
diff --git a/Filters/Sources/Testing/Data/Baseline/OSCone.png.md5 b/Filters/Sources/Testing/Data/Baseline/OSCone.png.md5
new file mode 100644
index 0000000..e50c113
--- /dev/null
+++ b/Filters/Sources/Testing/Data/Baseline/OSCone.png.md5
@@ -0,0 +1 @@
+e150e7c166498c3a6d6a056fa17a05d8
diff --git a/Filters/Sources/Testing/Data/Baseline/TestButtonSource.png.md5 b/Filters/Sources/Testing/Data/Baseline/TestButtonSource.png.md5
new file mode 100644
index 0000000..70ed288
--- /dev/null
+++ b/Filters/Sources/Testing/Data/Baseline/TestButtonSource.png.md5
@@ -0,0 +1 @@
+9ece58ec7628d5a33b04348a432614ca
diff --git a/Filters/Sources/Testing/Data/Baseline/TestMultiBlock.png.md5 b/Filters/Sources/Testing/Data/Baseline/TestMultiBlock.png.md5
new file mode 100644
index 0000000..89fa98f
--- /dev/null
+++ b/Filters/Sources/Testing/Data/Baseline/TestMultiBlock.png.md5
@@ -0,0 +1 @@
+43931539975ba75567798b830e4fb0cb
diff --git a/Filters/Sources/Testing/Data/Baseline/TestPlatonicSolids.png.md5 b/Filters/Sources/Testing/Data/Baseline/TestPlatonicSolids.png.md5
new file mode 100644
index 0000000..bb6d4a5
--- /dev/null
+++ b/Filters/Sources/Testing/Data/Baseline/TestPlatonicSolids.png.md5
@@ -0,0 +1 @@
+953451905c694ae0a8a6c5774437ab6e
diff --git a/Filters/Sources/Testing/Data/Baseline/TestRegularPolygonSource.png.md5 b/Filters/Sources/Testing/Data/Baseline/TestRegularPolygonSource.png.md5
new file mode 100644
index 0000000..6ee2309
--- /dev/null
+++ b/Filters/Sources/Testing/Data/Baseline/TestRegularPolygonSource.png.md5
@@ -0,0 +1 @@
+caeed8da143b860efacdbdab44d47656
diff --git a/Filters/Sources/Testing/Data/Baseline/coneResolution.png.md5 b/Filters/Sources/Testing/Data/Baseline/coneResolution.png.md5
new file mode 100644
index 0000000..507e145
--- /dev/null
+++ b/Filters/Sources/Testing/Data/Baseline/coneResolution.png.md5
@@ -0,0 +1 @@
+12e16507c3c300260c835909b5959fe6
diff --git a/Filters/Sources/Testing/Data/Baseline/glyph2D.png.md5 b/Filters/Sources/Testing/Data/Baseline/glyph2D.png.md5
new file mode 100644
index 0000000..c4ae663
--- /dev/null
+++ b/Filters/Sources/Testing/Data/Baseline/glyph2D.png.md5
@@ -0,0 +1 @@
+c7b7e70245efd714829aef3c75b3c00c
diff --git a/Filters/Sources/Testing/Data/Baseline/squadViewer.png.md5 b/Filters/Sources/Testing/Data/Baseline/squadViewer.png.md5
new file mode 100644
index 0000000..5bd4d0c
--- /dev/null
+++ b/Filters/Sources/Testing/Data/Baseline/squadViewer.png.md5
@@ -0,0 +1 @@
+8b6e81c9065a54236853ecaa2b5e674e
diff --git a/Filters/Sources/Testing/Python/CMakeLists.txt b/Filters/Sources/Testing/Python/CMakeLists.txt
index ae8ee00..ef5feb9 100644
--- a/Filters/Sources/Testing/Python/CMakeLists.txt
+++ b/Filters/Sources/Testing/Python/CMakeLists.txt
@@ -1,10 +1,7 @@
-add_test_python(Disk.py Graphics)
-add_test_python(OSCone.py Graphics)
-add_test_python(TestButtonSource.py Graphics)
-add_test_python(TestRegularPolygonSource.py Graphics)
-add_test_python(coneResolution.py Graphics)
-add_test_python(glyph2D.py Graphics)
-
-if(VTK_DATA_ROOT)
-  add_test_python1(TestPlatonicSolids.py Baseline/Graphics)
-endif()
+vtk_add_test_python(Disk.py)
+vtk_add_test_python(OSCone.py)
+vtk_add_test_python(TestButtonSource.py)
+vtk_add_test_python(TestRegularPolygonSource.py)
+vtk_add_test_python(coneResolution.py)
+vtk_add_test_python(glyph2D.py)
+vtk_add_test_python(TestPlatonicSolids.py NO_RT)
diff --git a/Filters/Sources/Testing/Python/TestPlatonicSolids.py b/Filters/Sources/Testing/Python/TestPlatonicSolids.py
index b40342a..94cb9fa 100755
--- a/Filters/Sources/Testing/Python/TestPlatonicSolids.py
+++ b/Filters/Sources/Testing/Python/TestPlatonicSolids.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestPlatonicSolids.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Graphics
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Filters/Sources/Testing/Tcl/CMakeLists.txt b/Filters/Sources/Testing/Tcl/CMakeLists.txt
index 906cd2b..c084312 100644
--- a/Filters/Sources/Testing/Tcl/CMakeLists.txt
+++ b/Filters/Sources/Testing/Tcl/CMakeLists.txt
@@ -1,7 +1,5 @@
-if (VTK_USE_RENDERING AND VTK_USE_DISPLAY)
-  if(VTK_DATA_ROOT)
-    if(VTK_USE_TK)
-      add_test_tcl(squadViewer Filtering)
-    endif()
+if (VTK_USE_RENDERING)
+  if(VTK_USE_TK)
+    vtk_add_test_tcl(squadViewer)
   endif()
 endif()
diff --git a/Filters/Sources/vtkArcSource.cxx b/Filters/Sources/vtkArcSource.cxx
index f834437..e0ac9bd 100644
--- a/Filters/Sources/vtkArcSource.cxx
+++ b/Filters/Sources/vtkArcSource.cxx
@@ -57,7 +57,7 @@ vtkArcSource::vtkArcSource(int res)
   this->PolarVector[1] =  0.0;
   this->PolarVector[2] =  1.0;
 
-  // Default arc is a quarter-circle 
+  // Default arc is a quarter-circle
   this->Angle =  90.;
 
   // Default resolution
@@ -69,6 +69,8 @@ vtkArcSource::vtkArcSource(int res)
   // By default use the original API
   this->UseNormalAndAngle = false;
 
+  this->OutputPointsPrecision = SINGLE_PRECISION;
+
   // This is a source
   this->SetNumberOfInputPorts( 0 );
 }
@@ -165,6 +167,17 @@ int vtkArcSource::RequestData( vtkInformation* vtkNotUsed(request),
 
   // Now create arc points and segments
   vtkPoints *newPoints = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPoints->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    newPoints->SetDataType(VTK_FLOAT);
+    }
+
   newPoints->Allocate( numPts );
   vtkFloatArray *newTCoords = vtkFloatArray::New();
   newTCoords->SetNumberOfComponents( 2 );
@@ -239,5 +252,7 @@ void vtkArcSource::PrintSelf(ostream& os, vtkIndent indent)
   os << indent << "Negative: " << this->Negative << "\n";
 
   os << indent << "UseNormalAndAngle: " << this->UseNormalAndAngle << "\n";
+
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision << "\n";
 }
 
diff --git a/Filters/Sources/vtkArcSource.h b/Filters/Sources/vtkArcSource.h
index 52fa838..bac186d 100644
--- a/Filters/Sources/vtkArcSource.h
+++ b/Filters/Sources/vtkArcSource.h
@@ -15,15 +15,15 @@
 // .NAME vtkArcSource - create an arc between two end points
 // .SECTION Description
 // vtkArcSource is a source object that creates an arc defined by two
-// endpoints and a center. The number of segments composing the polyline 
+// endpoints and a center. The number of segments composing the polyline
 // is controlled by setting the object resolution.
-// Alternatively, one can use a better API (that does not allow for 
+// Alternatively, one can use a better API (that does not allow for
 // inconsistent nor ambiguous inputs), using a starting point, a normal,
 // and an angle. The default API being the original one, in order to use
 // the improved API, one must switch the UseNormalAndAngle flag to TRUE.
 
 // The development of an improved, consistent API (based on point, normal,
-// and angle) was supported by CEA/DIF - Commissariat a l'Energie Atomique, 
+// and angle) was supported by CEA/DIF - Commissariat a l'Energie Atomique,
 // Centre DAM Ile-De-France, BP12, F-91297 Arpajon, France, and implemented
 // by Philippe Pebay, Kitware SAS 2012.
 
@@ -93,16 +93,23 @@ public:
   // Description:
   // Activate the API based on normal and radius.
   // The previous API (which remains the default) allows for inconsistent
-  // (when Point1 and Point2 are not equidistant from Center) or 
+  // (when Point1 and Point2 are not equidistant from Center) or
   // ambiguous (when Point1, Point2, and Center are aligned).
   // Note: false by default.
   vtkSetMacro(UseNormalAndAngle, bool);
   vtkGetMacro(UseNormalAndAngle, bool);
   vtkBooleanMacro(UseNormalAndAngle, bool);
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkArcSource(int res=1);
-  ~vtkArcSource() {};
+  ~vtkArcSource() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   int RequestInformation(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
@@ -115,6 +122,7 @@ protected:
   int Resolution;
   bool Negative;
   bool UseNormalAndAngle;
+  int OutputPointsPrecision;
 
 private:
   vtkArcSource(const vtkArcSource&);  // Not implemented.
diff --git a/Filters/Sources/vtkArrowSource.h b/Filters/Sources/vtkArrowSource.h
index e292398..a139b01 100644
--- a/Filters/Sources/vtkArrowSource.h
+++ b/Filters/Sources/vtkArrowSource.h
@@ -74,7 +74,7 @@ public:
 
 protected:
   vtkArrowSource();
-  ~vtkArrowSource() {};
+  ~vtkArrowSource() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
diff --git a/Filters/Sources/vtkConeSource.cxx b/Filters/Sources/vtkConeSource.cxx
index 47ca9b1..b6e9d4a 100644
--- a/Filters/Sources/vtkConeSource.cxx
+++ b/Filters/Sources/vtkConeSource.cxx
@@ -47,6 +47,8 @@ vtkConeSource::vtkConeSource(int res)
   this->Direction[1] = 0.0;
   this->Direction[2] = 0.0;
 
+  this->OutputPointsPrecision = SINGLE_PRECISION;
+
   this->SetNumberOfInputPorts(0);
 }
 
@@ -143,7 +145,17 @@ int vtkConeSource::RequestData(
     break;
   }
   newPoints = vtkPoints::New();
-  newPoints->SetDataTypeToFloat(); //used later during transformation
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPoints->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    newPoints->SetDataType(VTK_FLOAT);
+    }
+
   newPoints->Allocate(numPts);
 
   // Create cone
@@ -326,4 +338,5 @@ void vtkConeSource::PrintSelf(ostream& os, vtkIndent indent)
      << this->Center[1] << ", " << this->Center[2] << ")\n";
   os << indent << "Direction: (" << this->Direction[0] << ", "
      << this->Direction[1] << ", " << this->Direction[2] << ")\n";
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision << "\n";
 }
diff --git a/Filters/Sources/vtkConeSource.h b/Filters/Sources/vtkConeSource.h
index 6e35be7..84125fa 100644
--- a/Filters/Sources/vtkConeSource.h
+++ b/Filters/Sources/vtkConeSource.h
@@ -89,6 +89,13 @@ public:
   vtkGetMacro(Capping,int);
   vtkBooleanMacro(Capping,int);
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkConeSource(int res=6);
   ~vtkConeSource() {}
@@ -102,6 +109,7 @@ protected:
   int Capping;
   double Center[3];
   double Direction[3];
+  int OutputPointsPrecision;
 
 private:
   vtkConeSource(const vtkConeSource&);  // Not implemented.
diff --git a/Filters/Sources/vtkCubeSource.cxx b/Filters/Sources/vtkCubeSource.cxx
index 8eec798..bbfc3b7 100644
--- a/Filters/Sources/vtkCubeSource.cxx
+++ b/Filters/Sources/vtkCubeSource.cxx
@@ -38,6 +38,8 @@ vtkCubeSource::vtkCubeSource(double xL, double yL, double zL)
   this->Center[1] = 0.0;
   this->Center[2] = 0.0;
 
+  this->OutputPointsPrecision = SINGLE_PRECISION;
+
   this->SetNumberOfInputPorts(0);
 }
 
@@ -66,6 +68,17 @@ int vtkCubeSource::RequestData(
 // Set things up; allocate memory
 //
   newPoints = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPoints->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    newPoints->SetDataType(VTK_FLOAT);
+    }
+
   newPoints->Allocate(numPts);
   newNormals = vtkFloatArray::New();
   newNormals->SetNumberOfComponents(3);
@@ -181,7 +194,7 @@ void vtkCubeSource::SetBounds(double xMin, double xMax,
   this->SetBounds (bounds);
 }
 
-void vtkCubeSource::SetBounds(double bounds[6])
+void vtkCubeSource::SetBounds(const double bounds[6])
 {
   this->SetXLength(bounds[1]-bounds[0]);
   this->SetYLength(bounds[3]-bounds[2]);
@@ -200,4 +213,5 @@ void vtkCubeSource::PrintSelf(ostream& os, vtkIndent indent)
   os << indent << "Z Length: " << this->ZLength << "\n";
   os << indent << "Center: (" << this->Center[0] << ", "
                << this->Center[1] << ", " << this->Center[2] << ")\n";
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision << "\n";
 }
diff --git a/Filters/Sources/vtkCubeSource.h b/Filters/Sources/vtkCubeSource.h
index 7df89d6..6bf9078 100644
--- a/Filters/Sources/vtkCubeSource.h
+++ b/Filters/Sources/vtkCubeSource.h
@@ -56,17 +56,25 @@ public:
   void SetBounds(double xMin, double xMax,
                  double yMin, double yMax,
                  double zMin, double zMax);
-  void SetBounds(double bounds[6]);
+  void SetBounds(const double bounds[6]);
+
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
 
 protected:
   vtkCubeSource(double xL=1.0, double yL=1.0, double zL=1.0);
-  ~vtkCubeSource() {};
+  ~vtkCubeSource() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   double XLength;
   double YLength;
   double ZLength;
   double Center[3];
+  int OutputPointsPrecision;
 private:
   vtkCubeSource(const vtkCubeSource&);  // Not implemented.
   void operator=(const vtkCubeSource&);  // Not implemented.
diff --git a/Filters/Sources/vtkCylinderSource.cxx b/Filters/Sources/vtkCylinderSource.cxx
index db19657..50c5621 100644
--- a/Filters/Sources/vtkCylinderSource.cxx
+++ b/Filters/Sources/vtkCylinderSource.cxx
@@ -35,6 +35,7 @@ vtkCylinderSource::vtkCylinderSource (int res)
   this->Radius = 0.5;
   this->Capping = 1;
   this->Center[0] = this->Center[1] = this->Center[2] = 0.0;
+  this->OutputPointsPrecision = SINGLE_PRECISION;
 
   this->SetNumberOfInputPorts(0);
 }
@@ -79,6 +80,17 @@ int vtkCylinderSource::RequestData(
     }
 
   newPoints = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPoints->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    newPoints->SetDataType(VTK_FLOAT);
+    }
+
   newPoints->Allocate(numPts);
   newNormals = vtkFloatArray::New();
   newNormals->SetNumberOfComponents(3);
@@ -214,4 +226,5 @@ void vtkCylinderSource::PrintSelf(ostream& os, vtkIndent indent)
   os << indent << "Center: (" << this->Center[0] << ", "
      << this->Center[1] << ", " << this->Center[2] << " )\n";
   os << indent << "Capping: " << (this->Capping ? "On\n" : "Off\n");
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision << "\n";
 }
diff --git a/Filters/Sources/vtkCylinderSource.h b/Filters/Sources/vtkCylinderSource.h
index 8b51c5e..6aa385d 100644
--- a/Filters/Sources/vtkCylinderSource.h
+++ b/Filters/Sources/vtkCylinderSource.h
@@ -63,9 +63,16 @@ public:
   vtkGetMacro(Capping,int);
   vtkBooleanMacro(Capping,int);
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkCylinderSource(int res=6);
-  ~vtkCylinderSource() {};
+  ~vtkCylinderSource() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   double Height;
@@ -73,6 +80,7 @@ protected:
   double Center[3];
   int Resolution;
   int Capping;
+  int OutputPointsPrecision;
 
 private:
   vtkCylinderSource(const vtkCylinderSource&);  // Not implemented.
diff --git a/Filters/Sources/vtkDiskSource.cxx b/Filters/Sources/vtkDiskSource.cxx
index 9cc40b7..cc02521 100644
--- a/Filters/Sources/vtkDiskSource.cxx
+++ b/Filters/Sources/vtkDiskSource.cxx
@@ -30,6 +30,7 @@ vtkDiskSource::vtkDiskSource()
   this->OuterRadius = 0.5;
   this->RadialResolution = 1;
   this->CircumferentialResolution = 6;
+  this->OutputPointsPrecision = SINGLE_PRECISION;
 
   this->SetNumberOfInputPorts(0);
 }
@@ -61,6 +62,17 @@ int vtkDiskSource::RequestData(
            (this->CircumferentialResolution + 1);
   numPolys = this->RadialResolution * this->CircumferentialResolution;
   newPoints = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPoints->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    newPoints->SetDataType(VTK_FLOAT);
+    }
+
   newPoints->Allocate(numPts);
   newPolys = vtkCellArray::New();
   newPolys->Allocate(newPolys->EstimateSize(numPolys,4));
@@ -123,4 +135,5 @@ void vtkDiskSource::PrintSelf(ostream& os, vtkIndent indent)
   os << indent << "OuterRadius: " << this->OuterRadius << "\n";
   os << indent << "RadialResolution: " << this->RadialResolution << "\n";
   os << indent << "CircumferentialResolution: " << this->CircumferentialResolution << "\n";
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision << "\n";
 }
diff --git a/Filters/Sources/vtkDiskSource.h b/Filters/Sources/vtkDiskSource.h
index 35e0da5..98d2e9c 100644
--- a/Filters/Sources/vtkDiskSource.h
+++ b/Filters/Sources/vtkDiskSource.h
@@ -54,15 +54,23 @@ public:
   vtkSetClampMacro(CircumferentialResolution,int,3,VTK_INT_MAX)
   vtkGetMacro(CircumferentialResolution,int);
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkDiskSource();
-  ~vtkDiskSource() {};
+  ~vtkDiskSource() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   double InnerRadius;
   double OuterRadius;
   int RadialResolution;
   int CircumferentialResolution;
+  int OutputPointsPrecision;
 
 private:
   vtkDiskSource(const vtkDiskSource&);  // Not implemented.
diff --git a/Filters/Sources/vtkEllipticalButtonSource.cxx b/Filters/Sources/vtkEllipticalButtonSource.cxx
index 42a782a..2cfe065 100644
--- a/Filters/Sources/vtkEllipticalButtonSource.cxx
+++ b/Filters/Sources/vtkEllipticalButtonSource.cxx
@@ -38,6 +38,7 @@ vtkEllipticalButtonSource::vtkEllipticalButtonSource()
   this->CircumferentialResolution = 4;
   this->TextureResolution = 2;
   this->ShoulderResolution = 2;
+  this->OutputPointsPrecision = SINGLE_PRECISION;
 
   this->RadialRatio = 1.1;
 }
@@ -73,6 +74,17 @@ int vtkEllipticalButtonSource::RequestData(
   if ( this->TwoSided ) { numPts *= 2; }
 
   vtkPoints *newPts = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    newPts->SetDataType(VTK_FLOAT);
+    }
+
   newPts->SetNumberOfPoints(numPts);
 
   vtkFloatArray *normals = vtkFloatArray::New();
@@ -440,4 +452,6 @@ void vtkEllipticalButtonSource::PrintSelf(ostream& os, vtkIndent indent)
 
   os << indent << "Radial Ratio: " << this->RadialRatio << "\n";
 
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << "\n";
 }
diff --git a/Filters/Sources/vtkEllipticalButtonSource.h b/Filters/Sources/vtkEllipticalButtonSource.h
index 24be5a3..9daa9e9 100644
--- a/Filters/Sources/vtkEllipticalButtonSource.h
+++ b/Filters/Sources/vtkEllipticalButtonSource.h
@@ -97,6 +97,13 @@ public:
   vtkSetClampMacro(RadialRatio,double,1.0,VTK_DOUBLE_MAX);
   vtkGetMacro(RadialRatio,double);
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkEllipticalButtonSource();
   ~vtkEllipticalButtonSource() {}
@@ -106,9 +113,10 @@ protected:
   double Width;
   double Height;
   double Depth;
-  int   CircumferentialResolution;
-  int   TextureResolution;
-  int   ShoulderResolution;
+  int    CircumferentialResolution;
+  int    TextureResolution;
+  int    ShoulderResolution;
+  int    OutputPointsPrecision;
   double RadialRatio;
 
 private:
diff --git a/Filters/Sources/vtkFrustumSource.cxx b/Filters/Sources/vtkFrustumSource.cxx
index 276900d..7803ece 100644
--- a/Filters/Sources/vtkFrustumSource.cxx
+++ b/Filters/Sources/vtkFrustumSource.cxx
@@ -16,7 +16,7 @@
 #include "vtkObjectFactory.h"
 #include "vtkPlanes.h"
 #include "vtkPlane.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkMath.h"
 #include "vtkInformation.h"
 #include "vtkInformationVector.h"
@@ -31,6 +31,7 @@ vtkFrustumSource::vtkFrustumSource()
   this->Planes=0;
   this->ShowLines=true;
   this->LinesLength=1.0;
+  this->OutputPointsPrecision = vtkAlgorithm::SINGLE_PRECISION;
 
   // a source has no input port.
   this->SetNumberOfInputPorts(0);
@@ -149,6 +150,17 @@ int vtkFrustumSource::RequestData(
     }
 
   vtkPoints *newPoints=vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPoints->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    newPoints->SetDataType(VTK_FLOAT);
+    }
+
   newPoints->SetNumberOfPoints(nbPts);
   // Ref: Real-Time Rendering, 3rd edition, Thomas Akenine-Moller, Eric Haines,
   // Naty Hoffman, page 783, section 16.17,
@@ -422,4 +434,6 @@ void vtkFrustumSource::PrintSelf(ostream &os,
     }
 
   os << indent << "LinesLength:" << this->LinesLength << endl;
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << endl;
 }
diff --git a/Filters/Sources/vtkFrustumSource.h b/Filters/Sources/vtkFrustumSource.h
index d4085ed..3540b77 100644
--- a/Filters/Sources/vtkFrustumSource.h
+++ b/Filters/Sources/vtkFrustumSource.h
@@ -68,6 +68,13 @@ public:
   // Modified GetMTime because of Planes.
   unsigned long GetMTime();
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   // Description:
   // Default construtor. Planes=NULL. ShowLines=true. LinesLength=1.0.
@@ -87,6 +94,7 @@ protected:
   vtkPlanes *Planes;
   bool ShowLines;
   double LinesLength;
+  int OutputPointsPrecision;
 
 private:
   vtkFrustumSource(const vtkFrustumSource&);  // Not implemented.
diff --git a/Filters/Sources/vtkGlyphSource2D.cxx b/Filters/Sources/vtkGlyphSource2D.cxx
index f28112b..4064e5a 100644
--- a/Filters/Sources/vtkGlyphSource2D.cxx
+++ b/Filters/Sources/vtkGlyphSource2D.cxx
@@ -40,6 +40,7 @@ vtkGlyphSource2D::vtkGlyphSource2D()
   this->Cross = 0;
   this->Dash = 0;
   this->RotationAngle = 0.0;
+  this->OutputPointsPrecision = SINGLE_PRECISION;
   this->GlyphType = VTK_VERTEX_GLYPH;
 
   this->SetNumberOfInputPorts(0);
@@ -60,6 +61,17 @@ int vtkGlyphSource2D::RequestData(
 
   //Allocate storage
   vtkPoints *pts = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    pts->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    pts->SetDataType(VTK_FLOAT);
+    }
+
   pts->Allocate(6,6);
   vtkCellArray *verts = vtkCellArray::New();
   verts->Allocate(verts->EstimateSize(1,1),1);
@@ -598,4 +610,6 @@ void vtkGlyphSource2D::PrintSelf(ostream& os, vtkIndent indent)
       os << "Edge Arrow\n";
       break;
     }
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << "\n";
 }
diff --git a/Filters/Sources/vtkGlyphSource2D.h b/Filters/Sources/vtkGlyphSource2D.h
index 72addf2..505df8a 100644
--- a/Filters/Sources/vtkGlyphSource2D.h
+++ b/Filters/Sources/vtkGlyphSource2D.h
@@ -130,9 +130,16 @@ public:
   void SetGlyphTypeToHookedArrow() {this->SetGlyphType(VTK_HOOKEDARROW_GLYPH);}
   void SetGlyphTypeToEdgeArrow() {this->SetGlyphType(VTK_EDGEARROW_GLYPH);}
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkGlyphSource2D();
-  ~vtkGlyphSource2D() {};
+  ~vtkGlyphSource2D() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
@@ -140,11 +147,12 @@ protected:
   double Scale;
   double Scale2;
   double Color[3];
-  int   Filled;
-  int   Dash;
-  int   Cross;
-  int   GlyphType;
+  int    Filled;
+  int    Dash;
+  int    Cross;
+  int    GlyphType;
   double RotationAngle;
+  int    OutputPointsPrecision;
 
   void TransformGlyph(vtkPoints *pts);
   void ConvertColor();
diff --git a/Filters/Sources/vtkHyperOctreeFractalSource.cxx b/Filters/Sources/vtkHyperOctreeFractalSource.cxx
index af0e479..dd5e7e6 100644
--- a/Filters/Sources/vtkHyperOctreeFractalSource.cxx
+++ b/Filters/Sources/vtkHyperOctreeFractalSource.cxx
@@ -19,7 +19,7 @@
 #include "vtkObjectFactory.h"
 #include "vtkInformationVector.h"
 #include "vtkInformation.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkMath.h"
 #include "vtkPointData.h"
 #include "vtkDataArray.h"
diff --git a/Filters/Sources/vtkHyperTreeGridSource.cxx b/Filters/Sources/vtkHyperTreeGridSource.cxx
index 9659f7d..2060c9e 100644
--- a/Filters/Sources/vtkHyperTreeGridSource.cxx
+++ b/Filters/Sources/vtkHyperTreeGridSource.cxx
@@ -30,7 +30,7 @@ PURPOSE.  See the above copyright notice for more information.
 
 #include <vtksys/ios/sstream>
 
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkHyperTreeGridSource);
 vtkCxxSetObjectMacro(vtkHyperTreeGridSource, DescriptorBits, vtkBitArray);
diff --git a/Filters/Sources/vtkLineSource.cxx b/Filters/Sources/vtkLineSource.cxx
index 9dc27b9..5c4aa4b 100644
--- a/Filters/Sources/vtkLineSource.cxx
+++ b/Filters/Sources/vtkLineSource.cxx
@@ -43,6 +43,7 @@ vtkLineSource::vtkLineSource(int res)
   this->Points = 0;
 
   this->Resolution = ( res < 1 ? 1 : res );
+  this->OutputPointsPrecision = SINGLE_PRECISION;
 
   this->SetNumberOfInputPorts( 0 );
 }
@@ -100,6 +101,17 @@ int vtkLineSource::RequestData(
   // Create and allocate points
   vtkIdType numPts = numLines + 1;
   vtkPoints *newPoints = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPoints->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    newPoints->SetDataType(VTK_FLOAT);
+    }
+
   newPoints->Allocate( numPts );
 
   // Create and allocate texture coordinates
@@ -243,4 +255,6 @@ void vtkLineSource::PrintSelf(ostream& os, vtkIndent indent)
     {
     os << "(none)" << endl;
     }
+
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision << "\n";
 }
diff --git a/Filters/Sources/vtkLineSource.h b/Filters/Sources/vtkLineSource.h
index eee7d8c..def7da9 100644
--- a/Filters/Sources/vtkLineSource.h
+++ b/Filters/Sources/vtkLineSource.h
@@ -59,6 +59,13 @@ public:
   vtkSetClampMacro(Resolution,int,1,VTK_INT_MAX);
   vtkGetMacro(Resolution,int);
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkLineSource(int res=1);
   virtual ~vtkLineSource();
@@ -68,6 +75,7 @@ protected:
   double Point1[3];
   double Point2[3];
   int Resolution;
+  int OutputPointsPrecision;
 
   // Description:
   // The list of points defining a broken line
diff --git a/Filters/Sources/vtkOutlineCornerSource.cxx b/Filters/Sources/vtkOutlineCornerSource.cxx
index 08ccc3f..9f611ea 100644
--- a/Filters/Sources/vtkOutlineCornerSource.cxx
+++ b/Filters/Sources/vtkOutlineCornerSource.cxx
@@ -28,6 +28,7 @@ vtkOutlineCornerSource::vtkOutlineCornerSource()
     : vtkOutlineSource()
 {
   this->CornerFactor = 0.2;
+  this->OutputPointsPrecision = vtkAlgorithm::SINGLE_PRECISION;
 }
 
 //----------------------------------------------------------------------------
@@ -61,6 +62,17 @@ int vtkOutlineCornerSource::RequestData(
     outInfo->Get(vtkDataObject::DATA_OBJECT()));
 
   newPts = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    newPts->SetDataType(VTK_FLOAT);
+    }
+
   newPts->Allocate(32);
   newLines = vtkCellArray::New();
   newLines->Allocate(newLines->EstimateSize(24,2));
@@ -114,4 +126,6 @@ void vtkOutlineCornerSource::PrintSelf(ostream& os, vtkIndent indent)
 {
   this->Superclass::PrintSelf(os,indent);
   os << indent << "CornerFactor: " << this->CornerFactor << "\n";
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << "\n";
 }
diff --git a/Filters/Sources/vtkOutlineCornerSource.h b/Filters/Sources/vtkOutlineCornerSource.h
index b213b74..240ce95 100644
--- a/Filters/Sources/vtkOutlineCornerSource.h
+++ b/Filters/Sources/vtkOutlineCornerSource.h
@@ -39,13 +39,21 @@ public:
   vtkSetClampMacro(CornerFactor, double, 0.001, 0.5);
   vtkGetMacro(CornerFactor, double);
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkOutlineCornerSource();
-  ~vtkOutlineCornerSource() {};
+  ~vtkOutlineCornerSource() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
   double CornerFactor;
+  int OutputPointsPrecision;
 private:
   vtkOutlineCornerSource(const vtkOutlineCornerSource&);  // Not implemented.
   void operator=(const vtkOutlineCornerSource&);  // Not implemented.
diff --git a/Filters/Sources/vtkOutlineSource.cxx b/Filters/Sources/vtkOutlineSource.cxx
index 153baed..94f2fce 100644
--- a/Filters/Sources/vtkOutlineSource.cxx
+++ b/Filters/Sources/vtkOutlineSource.cxx
@@ -30,6 +30,8 @@ vtkOutlineSource::vtkOutlineSource()
 
   this->GenerateFaces = 0;
 
+  this->OutputPointsPrecision = SINGLE_PRECISION;
+
   for (int i=0; i<3; i++)
     {
     this->Bounds[2*i] = -1.0;
@@ -105,6 +107,17 @@ int vtkOutlineSource::RequestData(
   // Allocate storage and create outline
   //
   newPts = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    newPts->SetDataType(VTK_FLOAT);
+    }
+
   newPts->Allocate(8);
   newLines = vtkCellArray::New();
   newLines->Allocate(newLines->EstimateSize(12,2));
@@ -232,4 +245,7 @@ void vtkOutlineSource::PrintSelf(ostream& os, vtkIndent indent)
       }
     os << ")\n";
     }
+
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << "\n";
 }
diff --git a/Filters/Sources/vtkOutlineSource.h b/Filters/Sources/vtkOutlineSource.h
index 74ad1a0..2a5ec52 100644
--- a/Filters/Sources/vtkOutlineSource.h
+++ b/Filters/Sources/vtkOutlineSource.h
@@ -67,13 +67,21 @@ public:
   vtkBooleanMacro(GenerateFaces, int);
   vtkGetMacro(GenerateFaces, int);
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkOutlineSource();
   ~vtkOutlineSource() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
-  int   BoxType;
-  int   GenerateFaces;
+  int    BoxType;
+  int    GenerateFaces;
+  int    OutputPointsPrecision;
   double Bounds[6];
   double Corners[24];
 
diff --git a/Filters/Sources/vtkParametricFunctionSource.cxx b/Filters/Sources/vtkParametricFunctionSource.cxx
index a62fa34..cd2d7c0 100644
--- a/Filters/Sources/vtkParametricFunctionSource.cxx
+++ b/Filters/Sources/vtkParametricFunctionSource.cxx
@@ -40,6 +40,7 @@ vtkParametricFunctionSource::vtkParametricFunctionSource() :
   , WResolution(50)
   , GenerateTextureCoordinates(0)
   , ScalarMode(vtkParametricFunctionSource::SCALAR_NONE)
+  , OutputPointsPrecision(vtkAlgorithm::SINGLE_PRECISION)
 {
   this->SetNumberOfInputPorts(0);
 }
@@ -231,6 +232,17 @@ void vtkParametricFunctionSource::Produce1DOutput(vtkInformationVector *output)
   vtkIdType numPts = this->UResolution + 1;
   vtkCellArray *lines = vtkCellArray::New();
   vtkPoints *pts = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    pts->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    pts->SetDataType(VTK_FLOAT);
+    }
+
   pts->SetNumberOfPoints(numPts);
   vtkIdType i;
   double x[3], Du[3], t[3];
@@ -292,6 +304,17 @@ void vtkParametricFunctionSource::Produce2DOutput(vtkInformationVector *output)
   newTCoords->Allocate(2*totPts);
 
   vtkPoints * points = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    points->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    points->SetDataType(VTK_FLOAT);
+    }
+
   points->SetNumberOfPoints( totPts );
 
   double uStep = ( MaxU - this->ParametricFunction->GetMinimumU() ) / PtsU;
@@ -676,5 +699,6 @@ void vtkParametricFunctionSource::PrintSelf(ostream& os, vtkIndent indent)
    }
   os << indent << "Scalar Mode: " << s.c_str() << "\n";
   os << indent << "GenerateTextureCoordinates:" << (this->GenerateTextureCoordinates ? "On" : "Off" ) << "\n";
-
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << "\n";
 }
diff --git a/Filters/Sources/vtkParametricFunctionSource.h b/Filters/Sources/vtkParametricFunctionSource.h
index ea74562..c034859 100644
--- a/Filters/Sources/vtkParametricFunctionSource.h
+++ b/Filters/Sources/vtkParametricFunctionSource.h
@@ -160,6 +160,13 @@ public:
   // Return the MTime also considering the parametric function.
   unsigned long GetMTime();
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkParametricFunctionSource();
   virtual ~vtkParametricFunctionSource();
@@ -176,6 +183,7 @@ protected:
   int WResolution;
   int GenerateTextureCoordinates;
   int ScalarMode;
+  int OutputPointsPrecision;
 
 private:
   // Create output depending on function dimension
diff --git a/Filters/Sources/vtkPlaneSource.cxx b/Filters/Sources/vtkPlaneSource.cxx
index 7637955..ef328a0 100644
--- a/Filters/Sources/vtkPlaneSource.cxx
+++ b/Filters/Sources/vtkPlaneSource.cxx
@@ -50,6 +50,8 @@ vtkPlaneSource::vtkPlaneSource()
 
   this->Center[0] = this->Center[1] = this->Center[2] = 0.0;
 
+  this->OutputPointsPrecision = SINGLE_PRECISION;
+
   this->SetNumberOfInputPorts(0);
 }
 
@@ -109,6 +111,17 @@ int vtkPlaneSource::RequestData(
   numPolys = this->XResolution * this->YResolution;
 
   newPoints = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPoints->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    newPoints->SetDataType(VTK_FLOAT);
+    }
+
   newPoints->Allocate(numPts);
   newNormals = vtkFloatArray::New();
   newNormals->SetNumberOfComponents(3);
@@ -426,4 +439,5 @@ void vtkPlaneSource::PrintSelf(ostream& os, vtkIndent indent)
      << this->Center[1] << ", "
      << this->Center[2] << ")\n";
 
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision << "\n";
 }
diff --git a/Filters/Sources/vtkPlaneSource.h b/Filters/Sources/vtkPlaneSource.h
index 86776d3..548bf49 100644
--- a/Filters/Sources/vtkPlaneSource.h
+++ b/Filters/Sources/vtkPlaneSource.h
@@ -114,9 +114,16 @@ public:
   // opposite direction.
   void Push(double distance);
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkPlaneSource();
-  ~vtkPlaneSource() {};
+  ~vtkPlaneSource() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
@@ -127,6 +134,7 @@ protected:
   double Point2[3];
   double Normal[3];
   double Center[3];
+  int OutputPointsPrecision;
 
   int UpdatePlane(double v1[3], double v2[3]);
 private:
diff --git a/Filters/Sources/vtkPlatonicSolidSource.cxx b/Filters/Sources/vtkPlatonicSolidSource.cxx
index fc7a0bd..cb2d5df 100644
--- a/Filters/Sources/vtkPlatonicSolidSource.cxx
+++ b/Filters/Sources/vtkPlatonicSolidSource.cxx
@@ -81,6 +81,7 @@ static vtkIdType IcosaVerts[] = {
 vtkPlatonicSolidSource::vtkPlatonicSolidSource()
 {
   this->SolidType = VTK_SOLID_TETRAHEDRON;
+  this->OutputPointsPrecision = SINGLE_PRECISION;
   this->SetNumberOfInputPorts(0);
 }
 
@@ -155,7 +156,17 @@ int vtkPlatonicSolidSource::RequestData(
   // Create the solids
   //
   vtkPoints *pts = vtkPoints::New();
-  pts->SetDataTypeToDouble();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    pts->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    pts->SetDataType(VTK_FLOAT);
+    }
+
   pts->SetNumberOfPoints(numPts);
   vtkCellArray *polys = vtkCellArray::New();
   polys->Allocate(polys->EstimateSize(numCells,cellSize));
@@ -215,4 +226,7 @@ void vtkPlatonicSolidSource::PrintSelf(ostream& os, vtkIndent indent)
     {
     os << "Dodecahedron\n";
     }
+
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << "\n";
 }
diff --git a/Filters/Sources/vtkPlatonicSolidSource.h b/Filters/Sources/vtkPlatonicSolidSource.h
index 8772f84..d477f32 100644
--- a/Filters/Sources/vtkPlatonicSolidSource.h
+++ b/Filters/Sources/vtkPlatonicSolidSource.h
@@ -54,12 +54,20 @@ public:
   void SetSolidTypeToDodecahedron()
     {this->SetSolidType(VTK_SOLID_DODECAHEDRON);}
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkPlatonicSolidSource();
   ~vtkPlatonicSolidSource() {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   int SolidType;
+  int OutputPointsPrecision;
 
 private:
   vtkPlatonicSolidSource(const vtkPlatonicSolidSource&);  // Not implemented.
diff --git a/Filters/Sources/vtkPointSource.cxx b/Filters/Sources/vtkPointSource.cxx
index dbf5f94..6b1502e 100644
--- a/Filters/Sources/vtkPointSource.cxx
+++ b/Filters/Sources/vtkPointSource.cxx
@@ -39,6 +39,7 @@ vtkPointSource::vtkPointSource(vtkIdType numPts)
   this->Radius = 0.5;
 
   this->Distribution = VTK_POINT_UNIFORM;
+  this->OutputPointsPrecision = SINGLE_PRECISION;
 
   this->SetNumberOfInputPorts(0);
 }
@@ -63,6 +64,17 @@ int vtkPointSource::RequestData(
   vtkCellArray *newVerts;
 
   newPoints = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPoints->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    newPoints->SetDataType(VTK_FLOAT);
+    }
+
   newPoints->Allocate(this->NumberOfPoints);
   newVerts = vtkCellArray::New();
   newVerts->Allocate(newVerts->EstimateSize(1,this->NumberOfPoints));
@@ -122,4 +134,5 @@ void vtkPointSource::PrintSelf(ostream& os, vtkIndent indent)
                               << this->Center[2] << ")\n";
   os << indent << "Distribution: " <<
      ((this->Distribution == VTK_POINT_SHELL) ? "Shell\n" : "Uniform\n");
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision << "\n";
 }
diff --git a/Filters/Sources/vtkPointSource.h b/Filters/Sources/vtkPointSource.h
index 73f5af8..f34439b 100644
--- a/Filters/Sources/vtkPointSource.h
+++ b/Filters/Sources/vtkPointSource.h
@@ -65,9 +65,16 @@ public:
     this->SetDistribution(VTK_POINT_SHELL);};
   vtkGetMacro(Distribution,int);
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkPointSource(vtkIdType numPts=10);
-  ~vtkPointSource() {};
+  ~vtkPointSource() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
@@ -75,6 +82,7 @@ protected:
   double Center[3];
   double Radius;
   int Distribution;
+  int OutputPointsPrecision;
 
 private:
   vtkPointSource(const vtkPointSource&);  // Not implemented.
diff --git a/Filters/Sources/vtkRectangularButtonSource.cxx b/Filters/Sources/vtkRectangularButtonSource.cxx
index fb3732f..095e5ab 100644
--- a/Filters/Sources/vtkRectangularButtonSource.cxx
+++ b/Filters/Sources/vtkRectangularButtonSource.cxx
@@ -38,6 +38,8 @@ vtkRectangularButtonSource::vtkRectangularButtonSource()
   this->BoxRatio = 1.1;
   this->TextureRatio = 0.9;
   this->TextureHeightRatio = 0.95;
+
+  this->OutputPointsPrecision = vtkAlgorithm::SINGLE_PRECISION;
 }
 
 //----------------------------------------------------------------------------
@@ -86,6 +88,17 @@ int vtkRectangularButtonSource::RequestData(
 
   // Allocate memory for everything
   vtkPoints *newPts = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPts->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    newPts->SetDataType(VTK_FLOAT);
+    }
+
   newPts->SetNumberOfPoints(numPts);
 
   vtkFloatArray *tcoords = vtkFloatArray::New();
@@ -243,4 +256,6 @@ void vtkRectangularButtonSource::PrintSelf(ostream& os, vtkIndent indent)
   os << indent << "BoxRatio: " << this->BoxRatio << "\n";
   os << indent << "TextureRatio: " << this->TextureRatio << "\n";
   os << indent << "TextureHeightRatio: " << this->TextureHeightRatio << "\n";
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << "\n";
 }
diff --git a/Filters/Sources/vtkRectangularButtonSource.h b/Filters/Sources/vtkRectangularButtonSource.h
index ee798dd..beee6d4 100644
--- a/Filters/Sources/vtkRectangularButtonSource.h
+++ b/Filters/Sources/vtkRectangularButtonSource.h
@@ -101,6 +101,13 @@ public:
   vtkSetClampMacro(TextureHeightRatio,double,0.0,VTK_DOUBLE_MAX);
   vtkGetMacro(TextureHeightRatio,double);
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkRectangularButtonSource();
   ~vtkRectangularButtonSource() {}
@@ -115,6 +122,8 @@ protected:
   double TextureRatio;
   double TextureHeightRatio;
 
+  int OutputPointsPrecision;
+
 private:
   vtkRectangularButtonSource(const vtkRectangularButtonSource&);  // Not implemented.
   void operator=(const vtkRectangularButtonSource&);  // Not implemented.
diff --git a/Filters/Sources/vtkRegularPolygonSource.cxx b/Filters/Sources/vtkRegularPolygonSource.cxx
index 2a6512b..6bea43d 100644
--- a/Filters/Sources/vtkRegularPolygonSource.cxx
+++ b/Filters/Sources/vtkRegularPolygonSource.cxx
@@ -35,6 +35,7 @@ vtkRegularPolygonSource::vtkRegularPolygonSource()
   this->Radius = 0.5;
   this->GeneratePolygon = 1;
   this->GeneratePolyline = 1;
+  this->OutputPointsPrecision = SINGLE_PRECISION;
 
   this->SetNumberOfInputPorts(0);
 }
@@ -77,6 +78,17 @@ int vtkRegularPolygonSource::RequestData(
 
   // Prepare to produce the output; create the connectivity array(s)
   newPoints = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPoints->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    newPoints->SetDataType(VTK_FLOAT);
+    }
+
   newPoints->Allocate(numPts);
 
   if ( this->GeneratePolyline )
@@ -190,4 +202,5 @@ void vtkRegularPolygonSource::PrintSelf(ostream& os, vtkIndent indent)
 
   os << indent << "Generate Polyline: " << (this->GeneratePolyline ? "On\n" : "Off\n");
 
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision << "\n";
 }
diff --git a/Filters/Sources/vtkRegularPolygonSource.h b/Filters/Sources/vtkRegularPolygonSource.h
index eefc11e..5688376 100644
--- a/Filters/Sources/vtkRegularPolygonSource.h
+++ b/Filters/Sources/vtkRegularPolygonSource.h
@@ -72,6 +72,13 @@ public:
   vtkGetMacro(GeneratePolyline,int);
   vtkBooleanMacro(GeneratePolyline,int);
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkRegularPolygonSource();
   ~vtkRegularPolygonSource() {}
@@ -85,6 +92,7 @@ protected:
   double Radius;
   int    GeneratePolygon;
   int    GeneratePolyline;
+  int    OutputPointsPrecision;
 
 private:
   vtkRegularPolygonSource(const vtkRegularPolygonSource&);  // Not implemented.
diff --git a/Filters/Sources/vtkSphereSource.cxx b/Filters/Sources/vtkSphereSource.cxx
index 9cf112c..639fcc8 100644
--- a/Filters/Sources/vtkSphereSource.cxx
+++ b/Filters/Sources/vtkSphereSource.cxx
@@ -48,6 +48,8 @@ vtkSphereSource::vtkSphereSource(int res)
   this->EndPhi = 180.0;
   this->LatLongTessellation = 0;
 
+  this->OutputPointsPrecision = vtkAlgorithm::SINGLE_PRECISION;
+
   this->SetNumberOfInputPorts(0);
 }
 
@@ -121,6 +123,17 @@ int vtkSphereSource::RequestData(
   numPolys = this->PhiResolution * 2 * localThetaResolution;
 
   newPoints = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPoints->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    newPoints->SetDataType(VTK_FLOAT);
+    }
+
   newPoints->Allocate(numPts);
   newNormals = vtkFloatArray::New();
   newNormals->SetNumberOfComponents(3);
@@ -301,6 +314,8 @@ void vtkSphereSource::PrintSelf(ostream& os, vtkIndent indent)
      << this->Center[1] << ", " << this->Center[2] << ")\n";
   os << indent
      << "LatLong Tessellation: " << this->LatLongTessellation << "\n";
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << "\n";
 }
 
 //----------------------------------------------------------------------------
diff --git a/Filters/Sources/vtkSphereSource.h b/Filters/Sources/vtkSphereSource.h
index e96507d..3284f85 100644
--- a/Filters/Sources/vtkSphereSource.h
+++ b/Filters/Sources/vtkSphereSource.h
@@ -99,6 +99,13 @@ public:
   vtkGetMacro(LatLongTessellation,int);
   vtkBooleanMacro(LatLongTessellation,int);
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkSphereSource(int res=8);
   ~vtkSphereSource() {}
@@ -115,6 +122,7 @@ protected:
   double StartPhi;
   double EndPhi;
   int LatLongTessellation;
+  int OutputPointsPrecision;
 
 private:
   vtkSphereSource(const vtkSphereSource&);  // Not implemented.
diff --git a/Filters/Sources/vtkSuperquadricSource.cxx b/Filters/Sources/vtkSuperquadricSource.cxx
index 79bd1d8..7811515 100644
--- a/Filters/Sources/vtkSuperquadricSource.cxx
+++ b/Filters/Sources/vtkSuperquadricSource.cxx
@@ -61,6 +61,7 @@ vtkSuperquadricSource::vtkSuperquadricSource(int res)
   this->SetThetaResolution(res);
   this->PhiResolution = 0;
   this->SetPhiResolution(res);
+  this->OutputPointsPrecision = SINGLE_PRECISION;
 
   this->SetNumberOfInputPorts(0);
 }
@@ -217,6 +218,17 @@ int vtkSuperquadricSource::RequestData(
   // Set things up; allocate memory
   //
   newPoints = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPoints->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    newPoints->SetDataType(VTK_FLOAT);
+    }
+
   newPoints->Allocate(numPts);
   newNormals = vtkFloatArray::New();
   newNormals->SetNumberOfComponents(3);
@@ -413,6 +425,8 @@ void vtkSuperquadricSource::PrintSelf(ostream& os, vtkIndent indent)
      << this->Center[1] << ", " << this->Center[2] << ")\n";
   os << indent << "Scale: (" << this->Scale[0] << ", "
      << this->Scale[1] << ", " << this->Scale[2] << ")\n";
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << "\n";
 }
 
 static double cf(double w, double m, double a = 0)
@@ -465,7 +479,7 @@ static void evalSuperquadric(double theta, double phi,  // parametric coords
   xyz[0] = -dims[0] * cf1 * sf(theta, rtheta);
   xyz[1] =  dims[1] * cf1 * cf(theta, rtheta);
   xyz[2] =  dims[2]       * sf(phi, rphi);
- 
+
   cf2 = cf(phi+dphi, 2.0-rphi);
   nrm[0] = -1.0/dims[0] * cf2 * sf(theta+dtheta, 2.0-rtheta);
   nrm[1] =  1.0/dims[1] * cf2 * cf(theta+dtheta, 2.0-rtheta);
diff --git a/Filters/Sources/vtkSuperquadricSource.h b/Filters/Sources/vtkSuperquadricSource.h
index b3ec1a1..af0ec8d 100644
--- a/Filters/Sources/vtkSuperquadricSource.h
+++ b/Filters/Sources/vtkSuperquadricSource.h
@@ -126,9 +126,16 @@ public:
   vtkGetMacro(Toroidal,int);
   vtkSetMacro(Toroidal,int);
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkSuperquadricSource(int res=16);
-  ~vtkSuperquadricSource() {};
+  ~vtkSuperquadricSource() {}
 
   int Toroidal;
   int AxisOfSymmetry;
@@ -141,6 +148,7 @@ protected:
   double Scale[3];
   int ThetaResolution;
   int PhiResolution;
+  int OutputPointsPrecision;
 
 private:
   vtkSuperquadricSource(const vtkSuperquadricSource&);  // Not implemented.
diff --git a/Filters/Sources/vtkTessellatedBoxSource.cxx b/Filters/Sources/vtkTessellatedBoxSource.cxx
index d896fa2..87c5889 100644
--- a/Filters/Sources/vtkTessellatedBoxSource.cxx
+++ b/Filters/Sources/vtkTessellatedBoxSource.cxx
@@ -19,7 +19,7 @@
 #include "vtkCellArray.h"
 #include "vtkStreamingDemandDrivenPipeline.h"
 
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkTessellatedBoxSource);
 
@@ -36,6 +36,7 @@ vtkTessellatedBoxSource::vtkTessellatedBoxSource()
   this->Level=0;
   this->DuplicateSharedPoints=0;
   this->Quads=0;
+  this->OutputPointsPrecision = SINGLE_PRECISION;
 
   this->SetNumberOfInputPorts(0); // this is a source.
 }
@@ -121,13 +122,20 @@ int vtkTessellatedBoxSource::RequestData(
   double bounds[6];
   outInfo->Get(vtkStreamingDemandDrivenPipeline::WHOLE_BOUNDING_BOX(),bounds);
 
-  vtkPoints *points=output->GetPoints();
-  if(points==0)
+  vtkPoints *points = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
     {
-    points = vtkPoints::New();
-    output->SetPoints(points);
-    points->Delete();
+    points->SetDataType(VTK_DOUBLE);
     }
+  else
+    {
+    points->SetDataType(VTK_FLOAT);
+    }
+
+  output->SetPoints(points);
+  points->Delete();
 
   // Always create a new vtkCellArray, otherwise it uses the this->Dummy of
   // vtkPolyData...
@@ -697,4 +705,6 @@ void vtkTessellatedBoxSource::PrintSelf(ostream& os, vtkIndent indent)
     os << "false";
     }
   os << endl;
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << endl;
 }
diff --git a/Filters/Sources/vtkTessellatedBoxSource.h b/Filters/Sources/vtkTessellatedBoxSource.h
index a3db6ee..77bcbe3 100644
--- a/Filters/Sources/vtkTessellatedBoxSource.h
+++ b/Filters/Sources/vtkTessellatedBoxSource.h
@@ -81,6 +81,13 @@ public:
   vtkGetMacro(Quads, int);
   vtkBooleanMacro(Quads, int);
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
    vtkTessellatedBoxSource();
   ~vtkTessellatedBoxSource();
@@ -135,6 +142,7 @@ protected:
   int Level;
   int DuplicateSharedPoints;
   int Quads;
+  int OutputPointsPrecision;
 
 private:
   vtkTessellatedBoxSource(const vtkTessellatedBoxSource&);  // Not implemented.
diff --git a/Filters/Sources/vtkTextSource.cxx b/Filters/Sources/vtkTextSource.cxx
index 36cf0b5..6e4b8fd 100644
--- a/Filters/Sources/vtkTextSource.cxx
+++ b/Filters/Sources/vtkTextSource.cxx
@@ -152,6 +152,7 @@ vtkTextSource::vtkTextSource()
   this->BackgroundColor[1] = 0.0;
   this->BackgroundColor[2] = 0.0;
   this->BackgroundColor[3] = 1.0;
+  this->OutputPointsPrecision = SINGLE_PRECISION;
 
   this->SetNumberOfInputPorts(0);
 }
@@ -208,6 +209,17 @@ int vtkTextSource::RequestData(
   x[2] = 0;
 
   newPoints = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPoints->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    newPoints->SetDataType(VTK_FLOAT);
+    }
+
   newPolys = vtkCellArray::New();
   newScalars = vtkUnsignedCharArray::New();
   newScalars->SetNumberOfComponents(4);
@@ -448,4 +460,6 @@ void vtkTextSource::PrintSelf(ostream& os, vtkIndent indent)
      << this->ForegroundColor[1] << ", " << this->ForegroundColor[2]  << ")\n";
   os << indent << "BackgroundColor: (" << this->BackgroundColor[0] << ", "
      << this->BackgroundColor[1] << ", " << this->BackgroundColor[2] << ")\n";
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << "\n";
 }
diff --git a/Filters/Sources/vtkTextSource.h b/Filters/Sources/vtkTextSource.h
index 9b42b11..a8ba107 100644
--- a/Filters/Sources/vtkTextSource.h
+++ b/Filters/Sources/vtkTextSource.h
@@ -67,6 +67,13 @@ public:
   vtkSetVector3Macro(BackgroundColor,double);
   vtkGetVectorMacro(BackgroundColor,double,3);
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkTextSource();
   ~vtkTextSource();
@@ -76,6 +83,7 @@ protected:
   int  Backing;
   double ForegroundColor[4];
   double BackgroundColor[4];
+  int OutputPointsPrecision;
 private:
   vtkTextSource(const vtkTextSource&);  // Not implemented.
   void operator=(const vtkTextSource&);  // Not implemented.
diff --git a/Filters/Sources/vtkTexturedSphereSource.cxx b/Filters/Sources/vtkTexturedSphereSource.cxx
index 1f6469a..48ac2c9 100644
--- a/Filters/Sources/vtkTexturedSphereSource.cxx
+++ b/Filters/Sources/vtkTexturedSphereSource.cxx
@@ -36,6 +36,7 @@ vtkTexturedSphereSource::vtkTexturedSphereSource(int res)
   this->PhiResolution = res;
   this->Theta = 0.0;
   this->Phi = 0.0;
+  this->OutputPointsPrecision = SINGLE_PRECISION;
 
   this->SetNumberOfInputPorts(0);
 }
@@ -72,6 +73,17 @@ int vtkTexturedSphereSource::RequestData(
   numPolys = this->PhiResolution * 2 * this->ThetaResolution;
 
   newPoints = vtkPoints::New();
+
+  // Set the desired precision for the points in the output.
+  if(this->OutputPointsPrecision == vtkAlgorithm::DOUBLE_PRECISION)
+    {
+    newPoints->SetDataType(VTK_DOUBLE);
+    }
+  else
+    {
+    newPoints->SetDataType(VTK_FLOAT);
+    }
+
   newPoints->Allocate(numPts);
   newNormals = vtkFloatArray::New();
   newNormals->SetNumberOfComponents(3);
@@ -156,4 +168,6 @@ void vtkTexturedSphereSource::PrintSelf(ostream& os, vtkIndent indent)
   os << indent << "Theta: " << this->Theta << "\n";
   os << indent << "Phi: " << this->Phi << "\n";
   os << indent << "Radius: " << this->Radius << "\n";
+  os << indent << "Output Points Precision: " << this->OutputPointsPrecision
+     << "\n";
 }
diff --git a/Filters/Sources/vtkTexturedSphereSource.h b/Filters/Sources/vtkTexturedSphereSource.h
index e72a8ab..6da24eb 100644
--- a/Filters/Sources/vtkTexturedSphereSource.h
+++ b/Filters/Sources/vtkTexturedSphereSource.h
@@ -64,9 +64,16 @@ public:
   vtkSetClampMacro(Phi,double,0.0,180.0);
   vtkGetMacro(Phi,double);
 
+  // Description:
+  // Set/get the desired precision for the output points.
+  // vtkAlgorithm::SINGLE_PRECISION - Output single-precision floating point.
+  // vtkAlgorithm::DOUBLE_PRECISION - Output double-precision floating point.
+  vtkSetMacro(OutputPointsPrecision,int);
+  vtkGetMacro(OutputPointsPrecision,int);
+
 protected:
   vtkTexturedSphereSource(int res=8);
-  ~vtkTexturedSphereSource() {};
+  ~vtkTexturedSphereSource() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   double Radius;
@@ -74,6 +81,7 @@ protected:
   double Phi;
   int ThetaResolution;
   int PhiResolution;
+  int OutputPointsPrecision;
 
 private:
   vtkTexturedSphereSource(const vtkTexturedSphereSource&);  // Not implemented.
diff --git a/Filters/Statistics/CMakeLists.txt b/Filters/Statistics/CMakeLists.txt
index aba9f1f..87f2545 100644
--- a/Filters/Statistics/CMakeLists.txt
+++ b/Filters/Statistics/CMakeLists.txt
@@ -4,6 +4,8 @@ set(Module_SRCS
   vtkContingencyStatistics.cxx
   vtkCorrelativeStatistics.cxx
   vtkDescriptiveStatistics.cxx
+  vtkHighestDensityRegionsStatistics.cxx
+  vtkExtractFunctionalBagPlot.cxx
   vtkKMeansDistanceFunctorCalculator.cxx
   vtkKMeansDistanceFunctor.cxx
   vtkKMeansStatistics.cxx
diff --git a/Filters/Statistics/Testing/Cxx/CMakeLists.txt b/Filters/Statistics/Testing/Cxx/CMakeLists.txt
index 8d6d26c..863fd85 100644
--- a/Filters/Statistics/Testing/Cxx/CMakeLists.txt
+++ b/Filters/Statistics/Testing/Cxx/CMakeLists.txt
@@ -1,23 +1,13 @@
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
+vtk_add_test_cxx(NO_DATA NO_VALID NO_OUTPUT
   TestAutoCorrelativeStatistics.cxx
   TestCorrelativeStatistics.cxx
   TestContingencyStatistics.cxx
   TestDescriptiveStatistics.cxx
+  TestHighestDensityRegionsStatistics.cxx
+  TestExtractFunctionalBagPlot.cxx
   TestKMeansStatistics.cxx
   TestMultiCorrelativeStatistics.cxx
   TestOrderStatistics.cxx
   TestPCAStatistics.cxx
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-endforeach()
+  )
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Filters/Statistics/Testing/Cxx/TestExtractFunctionalBagPlot.cxx b/Filters/Statistics/Testing/Cxx/TestExtractFunctionalBagPlot.cxx
new file mode 100644
index 0000000..9e9955e
--- /dev/null
+++ b/Filters/Statistics/Testing/Cxx/TestExtractFunctionalBagPlot.cxx
@@ -0,0 +1,141 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestExtractFunctionalBagPlot.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkDoubleArray.h"
+#include "vtkExtractFunctionalBagPlot.h"
+#include "vtkIntArray.h"
+#include "vtkMath.h"
+#include "vtkMultiBlockDataSet.h"
+#include "vtkNew.h"
+#include "vtkStringArray.h"
+#include "vtkTable.h"
+
+#include <sstream>
+
+//----------------------------------------------------------------------------
+int TestExtractFunctionalBagPlot(int , char * [])
+{
+  // Create a table with some points in it...
+  vtkNew<vtkTable> table;
+
+  const int numCols = 20;
+  const int numPoints = 20;
+
+  for (int j = 0; j < numCols; j++)
+    {
+    vtkNew<vtkDoubleArray> arr;
+    std::stringstream ss;
+    ss << "Var" << j;
+    arr->SetName(ss.str().c_str());
+    arr->SetNumberOfValues(numPoints);
+    table->AddColumn(arr.GetPointer());
+    }
+
+  table->SetNumberOfRows(numPoints);
+
+  for (int j = 0; j < numCols; j++)
+    {
+    for (int i = 0; i < numPoints; i++)
+      {
+      table->SetValue(i, j, i * j);
+      }
+    }
+
+  cout << "\n## Input data table:\n";
+  table->Dump();
+
+  // Create a density table
+
+  vtkNew<vtkDoubleArray> density;
+  density->SetName("Density");
+  density->SetNumberOfValues(numCols);
+
+  vtkNew<vtkStringArray> varName;
+  varName->SetName("ColName");
+  varName->SetNumberOfValues(numCols);
+  for (int j = 0; j < numCols; j++)
+    {
+    double x = j * 8. / static_cast<double>(numCols) - 4.;
+    double y = (1. / sqrt(vtkMath::Pi() * 2.)) * exp(-(x*x) / 2.);
+    density->SetValue(j, y);
+
+    varName->SetValue(j, table->GetColumn(j)->GetName());
+    }
+
+  vtkNew<vtkTable> inTableDensity;
+  inTableDensity->AddColumn(density.GetPointer());
+  inTableDensity->AddColumn(varName.GetPointer());
+
+  cout << "\n## Input density table:\n";
+  inTableDensity->Dump();
+
+  vtkNew<vtkExtractFunctionalBagPlot> ebp;
+
+   // First verify that absence of input does not cause trouble
+  cout << "## Verifying that absence of input does not cause trouble... ";
+  ebp->Update();
+  cout << "done.\n";
+
+  ebp->SetInputData(0, table.GetPointer());
+  ebp->SetInputData(1, inTableDensity.GetPointer());
+  ebp->SetInputArrayToProcess(0, 1, 0,
+    vtkDataObject::FIELD_ASSOCIATION_ROWS, "Density");
+  ebp->SetInputArrayToProcess(1, 1, 0,
+    vtkDataObject::FIELD_ASSOCIATION_ROWS, "ColName");
+  ebp->Update();
+
+  cout << "\n## Results:" << endl;
+  vtkTable* outBPTable = ebp->GetOutput();
+  outBPTable->Dump();
+
+  vtkDoubleArray* q3Points =
+    vtkDoubleArray::SafeDownCast(outBPTable->GetColumnByName("Q3Points"));
+  vtkDoubleArray* q2Points =
+    vtkDoubleArray::SafeDownCast(outBPTable->GetColumnByName("QMedPoints"));
+
+  if (!q3Points || !q2Points)
+    {
+    cout << "## Failure: Missing Q3Points or QMedPoints columns!" << endl;
+    return EXIT_FAILURE;
+    }
+
+  if (q3Points->GetNumberOfTuples() != numPoints ||
+    q2Points->GetNumberOfTuples() != numPoints)
+    {
+    cout << "## Failure: Bad number of tuples in Q3Points or QMedPoints columns!" << endl;
+    return EXIT_FAILURE;
+    }
+
+  if (q3Points->GetNumberOfComponents() != 2 ||
+    q2Points->GetNumberOfComponents() != 2)
+    {
+    cout << "## Failure: Q3Points or QMedPoints does not have 2 components!" << endl;
+    return EXIT_FAILURE;
+    }
+
+  // Verify last values
+  double q3v[2];
+  q3Points->GetTuple(19, q3v);
+  double q2v[2];
+  q2Points->GetTuple(19, q2v);
+
+  if (q3v[0] != 38 || q3v[1] != 323 || q2v[0] != 95 || q2v[1] != 285)
+    {
+    cout << "## Failure: bad values found in Q3Points or QMedPoints" << endl;
+    return EXIT_FAILURE;
+    }
+  cout << "## Success!" << endl;
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Statistics/Testing/Cxx/TestHighestDensityRegionsStatistics.cxx b/Filters/Statistics/Testing/Cxx/TestHighestDensityRegionsStatistics.cxx
new file mode 100644
index 0000000..841a3d4
--- /dev/null
+++ b/Filters/Statistics/Testing/Cxx/TestHighestDensityRegionsStatistics.cxx
@@ -0,0 +1,119 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestHighestDensityRegionsStatistics.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkDoubleArray.h"
+#include "vtkIntArray.h"
+#include "vtkMultiBlockDataSet.h"
+#include "vtkNew.h"
+#include "vtkSmartPointer.h"
+#include "vtkTable.h"
+#include "vtkHighestDensityRegionsStatistics.h"
+
+#include <sstream>
+
+//----------------------------------------------------------------------------
+int TestHighestDensityRegionsStatistics(int , char * [])
+{
+  vtkNew<vtkTable> table;
+
+  vtkNew<vtkDoubleArray> arrFirstVariable;
+  const char *namev1 = "Math";
+  arrFirstVariable->SetName(namev1);
+  table->AddColumn(arrFirstVariable.GetPointer());
+
+  vtkNew<vtkDoubleArray> arrSecondVariable;
+  const char *namev2 = "French";
+  arrSecondVariable->SetName(namev2);
+  table->AddColumn(arrSecondVariable.GetPointer());
+
+  vtkNew<vtkDoubleArray> arrThirdVariable;
+  const char *namev3 = "MG";
+  arrThirdVariable->SetName(namev3);
+  table->AddColumn(arrThirdVariable.GetPointer());
+
+  int numPoints = 20;
+  table->SetNumberOfRows(numPoints);
+
+  double MathValue[] =
+    {
+    18, 20, 20, 16,
+    12, 14, 16, 14,
+    14, 13, 16, 18,
+    6, 10, 16, 14,
+    4, 16, 16, 14
+    };
+
+  double FrenchValue[] =
+    {
+    14, 12, 14, 16,
+    12, 14, 16, 4,
+    4, 10, 6, 20,
+    14, 16, 14, 14,
+    12, 2, 14, 8
+    };
+
+  for (int i = 0; i < numPoints; ++i)
+    {
+    table->SetValue(i, 0, i); // Known Test Values
+    table->SetValue(i, 1, MathValue[i]);
+    table->SetValue(i, 2, FrenchValue[i]);
+    table->SetValue(i, 3, (MathValue[i] + FrenchValue[i]) / 2.0);
+    table->SetValue(i, 4, MathValue[i] - FrenchValue[i]);
+    }
+
+  // Run HDR
+  // Set HDR statistics algorithm and its input data port
+  vtkNew<vtkHighestDensityRegionsStatistics> hdrs;
+
+   // First verify that absence of input does not cause trouble
+  cout << "## Verifying that absence of input does not cause trouble... ";
+  hdrs->Update();
+  cout << "done.\n";
+
+  hdrs->SetInputData(vtkStatisticsAlgorithm::INPUT_DATA, table.GetPointer());
+  // Select Column Pairs of Interest ( Learn Mode )
+  // 1: a valid pair
+  hdrs->AddColumnPair(namev1, namev2);
+  // 2: another valid pair
+  hdrs->AddColumnPair(namev2, namev3);
+  // 3: an invalid pair
+  hdrs->AddColumnPair(namev2, "M3");
+
+  hdrs->SetLearnOption(true);
+  hdrs->SetDeriveOption(true);
+  hdrs->SetAssessOption(false);
+  hdrs->SetTestOption(false);
+  hdrs->Update();
+
+  cout << "\n## Result:\n";
+  vtkMultiBlockDataSet* outputMetaDS = vtkMultiBlockDataSet::SafeDownCast(
+      hdrs->GetOutputDataObject(vtkStatisticsAlgorithm::OUTPUT_MODEL ) );
+
+  vtkTable* outputMetaLearn = vtkTable::SafeDownCast(outputMetaDS->GetBlock(0));
+  outputMetaLearn->Dump();
+
+  std::stringstream ss;
+  ss << "HDR (" << namev1 << "," << namev2 << ")";
+  vtkDoubleArray* HDRArray = vtkDoubleArray::SafeDownCast(
+    outputMetaLearn->GetColumnByName(ss.str().c_str()));
+  if (!HDRArray)
+    {
+    cout << "Fail! The HDR column is missing from the result table!" << endl;
+    return EXIT_FAILURE;
+    }
+  cout << "## Done." << endl;
+
+  return EXIT_SUCCESS;
+}
diff --git a/Filters/Statistics/Testing/Python/CMakeLists.txt b/Filters/Statistics/Testing/Python/CMakeLists.txt
index 363ea98..9f1e497 100644
--- a/Filters/Statistics/Testing/Python/CMakeLists.txt
+++ b/Filters/Statistics/Testing/Python/CMakeLists.txt
@@ -1,9 +1,3 @@
 if(VTK_PYTHON_EXE)
-  foreach(tfile
-   kMeansDistanceCalculator
-   )
-    add_test(NAME ${vtk-module}Python-${tfile}
-      COMMAND ${VTK_PYTHON_EXE}
-        ${CMAKE_CURRENT_SOURCE_DIR}/${tfile}.py)
-  endforeach()
+  vtk_add_test_python(kMeansDistanceCalculator.py NO_DATA NO_VALID NO_OUTPUT)
 endif()
diff --git a/Filters/Statistics/vtkExtractFunctionalBagPlot.cxx b/Filters/Statistics/vtkExtractFunctionalBagPlot.cxx
new file mode 100644
index 0000000..81a814a
--- /dev/null
+++ b/Filters/Statistics/vtkExtractFunctionalBagPlot.cxx
@@ -0,0 +1,207 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkExtractFunctionalBagPlot.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkExtractFunctionalBagPlot.h"
+
+#include "vtkDataSet.h"
+#include "vtkDoubleArray.h"
+#include "vtkInformation.h"
+#include "vtkInformationVector.h"
+#include "vtkNew.h"
+#include "vtkObjectFactory.h"
+#include "vtkStreamingDemandDrivenPipeline.h"
+#include "vtkStringArray.h"
+#include "vtkTable.h"
+
+#include <algorithm>
+#include <vector>
+
+vtkStandardNewMacro(vtkExtractFunctionalBagPlot);
+
+//-----------------------------------------------------------------------------
+vtkExtractFunctionalBagPlot::vtkExtractFunctionalBagPlot()
+{
+  this->SetNumberOfInputPorts(2);
+}
+
+//-----------------------------------------------------------------------------
+vtkExtractFunctionalBagPlot::~vtkExtractFunctionalBagPlot()
+{
+}
+
+//-----------------------------------------------------------------------------
+void vtkExtractFunctionalBagPlot::PrintSelf(ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os,indent);
+}
+
+//-----------------------------------------------------------------------------
+class DensityVal
+{
+public:
+  DensityVal(double d, vtkAbstractArray* arr) : Density(d), Array(arr) {}
+  bool operator<(const DensityVal& b) const
+  {
+    return this->Density > b.Density;
+  }
+  double Density;
+  vtkAbstractArray* Array;
+};
+
+//-----------------------------------------------------------------------------
+int vtkExtractFunctionalBagPlot::RequestData(vtkInformation* /*request*/,
+                                   vtkInformationVector** inputVector,
+                                   vtkInformationVector* outputVector)
+{
+
+  vtkTable* inTable = vtkTable::GetData(inputVector[0]);
+  vtkTable* inTableDensity = vtkTable::GetData(inputVector[1]);
+  vtkTable* outTable = vtkTable::GetData(outputVector, 0);
+
+  vtkIdType inNbColumns = inTable->GetNumberOfColumns();
+
+  if (!inTable)
+    {
+    vtkDebugMacro(<< "Update event called with no input table.");
+    return false;
+    }
+
+  if (!inTableDensity)
+    {
+    vtkDebugMacro(<< "Update event called with no density input table.");
+    return false;
+    }
+
+  vtkDoubleArray *density = vtkDoubleArray::SafeDownCast(
+    this->GetInputAbstractArrayToProcess(0, inTableDensity));
+  if (!density)
+    {
+    vtkDebugMacro(<< "Update event called with non double density array.");
+    return false;
+    }
+
+  vtkStringArray *varName = vtkStringArray::SafeDownCast(
+    this->GetInputAbstractArrayToProcess(1, inTableDensity));
+  if (!varName)
+    {
+    vtkDebugMacro(<< "Update event called with no variable name array.");
+    return false;
+    }
+
+  vtkIdType nbPoints = varName->GetNumberOfValues();
+  // Sort the density array
+  std::vector<vtkIdType> ids;
+  ids.resize(nbPoints);
+  double sum = 0.0;
+  for (vtkIdType i = 0; i < nbPoints; i++)
+    {
+    sum += density->GetTuple1(i);
+    ids[i] = i;
+    }
+
+  vtkNew<vtkDoubleArray> nDensity;
+  // Normalize the density array if needed
+  if (fabs(sum - 1.0) > 1.0e-12)
+    {
+    sum = 1.0 / sum;
+    nDensity->SetNumberOfComponents(1);
+    nDensity->SetNumberOfTuples(nbPoints);
+    for (vtkIdType i = 0; i < nbPoints; i++)
+      {
+      nDensity->SetTuple1(i, density->GetTuple1(ids[i]) * sum);
+      }
+    density = nDensity.GetPointer();
+    }
+
+  // Fetch and sort arrays according their density
+  std::vector<DensityVal> varNames;
+  for (int i = 0; i < varName->GetNumberOfValues(); i++)
+    {
+    varNames.push_back(
+      DensityVal(density->GetValue(i),
+        inTable->GetColumnByName(varName->GetValue(i))));
+    }
+
+  std::sort(varNames.begin(), varNames.end());
+
+  std::vector<vtkAbstractArray*> medianLines;
+  std::vector<vtkAbstractArray*> q3Lines;
+
+  size_t nbVarNames = varNames.size();
+  for (size_t i = 0; i < nbVarNames; i++)
+    {
+    if (i <= static_cast<size_t>(nbPoints * 0.5))
+      {
+      medianLines.push_back(varNames[i].Array);
+      }
+    if (i <= static_cast<size_t>(nbPoints * 0.75))
+      {
+      q3Lines.push_back(varNames[i].Array);
+      }
+    else
+      {
+      break;
+      }
+    }
+
+  // Generate the quad strip arrays
+  vtkIdType nbRows = inTable->GetNumberOfRows();
+
+  vtkNew<vtkDoubleArray> q3Points;
+  q3Points->SetName("Q3Points");
+  q3Points->SetNumberOfComponents(2);
+  q3Points->SetNumberOfTuples(nbRows);
+
+  vtkNew<vtkDoubleArray> q2Points;
+  q2Points->SetName("QMedPoints");
+  q2Points->SetNumberOfComponents(2);
+  q2Points->SetNumberOfTuples(nbRows);
+
+  size_t medianCount = medianLines.size();
+  size_t q3Count = q3Lines.size();
+  for (vtkIdType i = 0; i < nbRows; i++)
+    {
+    double vMin = VTK_DOUBLE_MAX;
+    double vMax = VTK_DOUBLE_MIN;
+    for (size_t j = 0; j < medianCount; j++)
+      {
+      double v = medianLines[j]->GetVariantValue(i).ToDouble();
+      if (v < vMin) { vMin = v; }
+      if (v > vMax) { vMax = v; }
+      }
+    q2Points->SetTuple2(i, vMin, vMax);
+
+    vMin = VTK_DOUBLE_MAX;
+    vMax = VTK_DOUBLE_MIN;
+    for (size_t j = 0; j < q3Count; j++)
+      {
+      double v = q3Lines[j]->GetVariantValue(i).ToDouble();
+      if (v < vMin) { vMin = v; }
+      if (v > vMax) { vMax = v; }
+      }
+    q3Points->SetTuple2(i, vMin, vMax);
+    }
+
+  // Add the 2 "bag" columns into the output table
+  outTable->AddColumn(q3Points.GetPointer());
+  outTable->AddColumn(q2Points.GetPointer());
+
+  // Then append the input columns
+  for (vtkIdType i = 0; i < inNbColumns; i++)
+    {
+    outTable->AddColumn(inTable->GetColumn(i));
+    }
+
+  return 1;
+}
diff --git a/Filters/Statistics/vtkExtractFunctionalBagPlot.h b/Filters/Statistics/vtkExtractFunctionalBagPlot.h
new file mode 100644
index 0000000..c4e8060
--- /dev/null
+++ b/Filters/Statistics/vtkExtractFunctionalBagPlot.h
@@ -0,0 +1,55 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkExtractFunctionalBagPlot.h
+
+  Copyright (c) Kitware, Inc.
+  All rights reserved.
+  See Copyright.txt or http://www.paraview.org/HTML/Copyright.html for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkExtractFunctionalBagPlot
+//
+// .SECTION Description
+// From an input table containing series on port 0 and another table
+// describing densities on port 1 (for instance obtained by applying
+// filter  vtkHighestDensityRegionsStatistics, this filter generates
+// a table containing all the columns of the input port 0 plus two 2
+// components columns containing the bag series to be used by
+// vtkFunctionalBagPlot.
+//
+// .SECTION See Also
+// vtkFunctionalBagPlot vtkHighestDensityRegionsStatistics
+
+#ifndef __vtkExtractFunctionalBagPlot_h
+#define __vtkExtractFunctionalBagPlot_h
+
+#include "vtkFiltersStatisticsModule.h" // For export macro
+#include "vtkTableAlgorithm.h"
+
+
+class VTKFILTERSSTATISTICS_EXPORT vtkExtractFunctionalBagPlot : public vtkTableAlgorithm
+{
+public:
+  static vtkExtractFunctionalBagPlot* New();
+  vtkTypeMacro(vtkExtractFunctionalBagPlot, vtkTableAlgorithm);
+  virtual void PrintSelf(ostream& os, vtkIndent indent);
+
+protected:
+  vtkExtractFunctionalBagPlot();
+  virtual ~vtkExtractFunctionalBagPlot();
+
+  int RequestData(vtkInformation*,
+    vtkInformationVector**,
+    vtkInformationVector*);
+
+private:
+  vtkExtractFunctionalBagPlot( const vtkExtractFunctionalBagPlot& ); // Not implemented.
+  void operator = ( const vtkExtractFunctionalBagPlot& ); // Not implemented.
+};
+
+#endif // __vtkExtractFunctionalBagPlot_h
diff --git a/Filters/Statistics/vtkHighestDensityRegionsStatistics.cxx b/Filters/Statistics/vtkHighestDensityRegionsStatistics.cxx
new file mode 100644
index 0000000..5b952c9
--- /dev/null
+++ b/Filters/Statistics/vtkHighestDensityRegionsStatistics.cxx
@@ -0,0 +1,314 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkHighestDensityRegionsStatistics.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkHighestDensityRegionsStatistics.h"
+
+#include "vtkDataArrayCollection.h"
+#include "vtkDoubleArray.h"
+#include "vtkInformation.h"
+#include "vtkMath.h"
+#include "vtkMultiBlockDataSet.h"
+#include "vtkNew.h"
+#include "vtkObjectFactory.h"
+#include "vtkStatisticsAlgorithmPrivate.h"
+#include "vtkTable.h"
+
+#include <algorithm>
+#include <set>
+#include <sstream>
+
+vtkStandardNewMacro(vtkHighestDensityRegionsStatistics);
+
+// ----------------------------------------------------------------------
+vtkHighestDensityRegionsStatistics::vtkHighestDensityRegionsStatistics()
+{
+  // Initialize H smooth matrix to Identity.
+  this->SmoothHC1[0] = 1.0;
+  this->SmoothHC1[1] = 0.0;
+  this->SmoothHC2[0] = 0.0;
+  this->SmoothHC2[1] = 1.0;
+
+  //  At the construction, no columns pair are requested yet
+  this->NumberOfRequestedColumnsPair = 0;
+}
+
+// ----------------------------------------------------------------------
+vtkHighestDensityRegionsStatistics::~vtkHighestDensityRegionsStatistics()
+{
+}
+
+// ----------------------------------------------------------------------
+void vtkHighestDensityRegionsStatistics::PrintSelf(ostream& os,
+                                                   vtkIndent indent)
+  {
+  this->Superclass::PrintSelf(os, indent);
+
+  os << indent << "Smooth matrix: " <<
+    this->SmoothHC1[0] << ", " <<
+    this->SmoothHC1[1] << ", " <<
+    this->SmoothHC2[0] << ", " <<
+    this->SmoothHC2[1] << "\n";
+  }
+
+// ----------------------------------------------------------------------
+void vtkHighestDensityRegionsStatistics::SetSigma(double sigma)
+{
+  if (this->SmoothHC1[0] == sigma &&
+    this->SmoothHC1[1] == 0.0 &&
+    this->SmoothHC2[0] == 0.0 &&
+    this->SmoothHC2[1] == sigma)
+    {
+    return;
+    }
+  // Force H matrix to be equal to sigma * Identity.
+  this->SmoothHC1[0] = sigma;
+  this->SmoothHC1[1] = 0.0;
+  this->SmoothHC2[0] = 0.0;
+  this->SmoothHC2[1] = sigma;
+  this->Modified();
+}
+
+// ----------------------------------------------------------------------
+void vtkHighestDensityRegionsStatistics::Learn(vtkTable* inData,
+                                               vtkTable* vtkNotUsed(inParameters),
+                                               vtkMultiBlockDataSet* outMeta)
+{
+  if (!inData || !outMeta)
+    {
+    return;
+    }
+
+  vtkNew<vtkTable> outputColumns;
+
+  std::set<std::set<vtkStdString> >::const_iterator reqIt;
+
+  // Make sure the number of requested pairs of columns is 0
+  // before the computation.
+  this->NumberOfRequestedColumnsPair = 0;
+
+  // Populate outputColumns with columns that are requested from
+  // the input dataset
+  for (reqIt = this->Internals->Requests.begin();
+    reqIt != this->Internals->Requests.end(); ++ reqIt)
+    {
+    // Each request contains only one pair of columns of interest
+    // (if there are others, they are ignored).
+    std::set<vtkStdString>::const_iterator colIt = reqIt->begin();
+    const vtkStdString &colY = *colIt;
+    if (!inData->GetColumnByName(colY.c_str()))
+      {
+      vtkWarningMacro("InData table does not have a column "
+        << colY.c_str()
+        << ". Ignoring this pair.");
+      continue;
+      }
+
+    ++colIt;
+    const vtkStdString &colX = *colIt;
+    if (!inData->GetColumnByName(colX.c_str()))
+      {
+      vtkWarningMacro("InData table does not have a column "
+        << colX.c_str()
+        << ". Ignoring this pair.");
+      continue;
+      }
+
+    // Verify column types
+    vtkDataArray *inputColX =
+      vtkDataArray::SafeDownCast(inData->GetColumnByName(colX.c_str()));
+    vtkDataArray *inputColY =
+      vtkDataArray::SafeDownCast(inData->GetColumnByName(colY.c_str()));
+    if (!inputColX || !inputColY)
+      {
+      vtkErrorMacro(
+        << "HDR cannot work with columns that are not of vtkDataArray type");
+      return;
+      }
+
+    vtkDataArray* arrX =
+      vtkDataArray::CreateDataArray(inputColX->GetDataType());
+    arrX->DeepCopy(inputColX);
+    arrX->SetName(inputColX->GetName());
+    outputColumns->AddColumn(arrX);
+
+    vtkDataArray* arrY =
+      vtkDataArray::CreateDataArray(inputColY->GetDataType());
+    arrY->DeepCopy(inputColY);
+    arrY->SetName(inputColY->GetName());
+    outputColumns->AddColumn(arrY);
+
+    // Compute for the two columns and each observations the estimator of
+    // density. Create a double Array that contains number of requested data
+    // series components. Each tuple will contain the correspondent value
+    // casted if necessary into a double.
+
+    vtkNew<vtkDoubleArray> inObservations;
+    inObservations->SetNumberOfComponents(2);
+    inObservations->SetNumberOfTuples(outputColumns->GetNumberOfRows());
+
+    inObservations->CopyComponent(0, inputColX, 0);
+    inObservations->CopyComponent(1, inputColY, 0);
+
+    // outObservations store the density vector
+    vtkDataArray* outObservations =
+      vtkDataArray::CreateDataArray(inObservations->GetDataType());
+    outObservations->SetNumberOfComponents(1);
+    outObservations->SetNumberOfTuples(inObservations->GetNumberOfTuples());
+
+    double sum = this->ComputeHDR(inObservations.GetPointer(), outObservations);
+    std::stringstream ss;
+    ss <<"HDR (" << inputColX->GetName() << "," << inputColY->GetName() << ")";
+    outObservations->SetName(ss.str().c_str());
+    outputColumns->AddColumn(outObservations);
+
+    if (sum != 0.0)
+      {
+      sum = 1.0 / sum;
+
+      // Creation of the hdr array.
+      vtkNew<vtkDoubleArray> normalizedHDR;
+      std::stringstream sst;
+      sst << "Normalized " << outObservations->GetName();
+      normalizedHDR->SetName(sst.str().c_str());
+      normalizedHDR->SetNumberOfComponents(1);
+      vtkIdType nbObservations = outObservations->GetNumberOfTuples();
+      normalizedHDR->SetNumberOfTuples(nbObservations);
+      for (vtkIdType j = 0; j < nbObservations; j++)
+        {
+        normalizedHDR->SetTuple1(j, sum * outObservations->GetTuple1(j));
+        }
+
+      outputColumns->AddColumn(normalizedHDR.GetPointer());
+      }
+
+    arrX->Delete();
+    arrY->Delete();
+    outObservations->Delete();
+
+    // One requested pair of columns has been added.
+    this->NumberOfRequestedColumnsPair++;
+    } // End requests iteration.
+
+  outMeta->SetNumberOfBlocks(1);
+  outMeta->SetBlock(0, outputColumns.GetPointer());
+  vtkInformation* info =
+    outMeta->GetMetaData(static_cast<unsigned int>(0));
+  info->Set(vtkCompositeDataSet::NAME(), "Estimator of density Data");
+}
+
+// ----------------------------------------------------------------------
+void vtkHighestDensityRegionsStatistics::Derive(vtkMultiBlockDataSet*)
+{
+}
+
+// ----------------------------------------------------------------------
+double vtkHighestDensityRegionsStatistics::ComputeHDR(vtkDataArray *inObs,
+                                                      vtkDataArray *outDensity)
+{
+  vtkIdType nbObservations = inObs->GetNumberOfTuples();
+
+  if (nbObservations == 0)
+    {
+    vtkErrorMacro(<< "Empty observation array");
+    return 0.0;
+    }
+  double sum = 0.0;
+
+  double denom = 1.0 / static_cast<double>(nbObservations);
+
+  // Let's compute the HDR for each points of the observations
+  for (vtkIdType i = 0; i < nbObservations; i++)
+    {
+    double currentXi[2];
+    double currentXj[2];
+    double hdr = 0.0;
+
+    // We are working in a bivariate model.
+    inObs->GetTuple(i, currentXi);
+    // Sum all gaussian kernel
+    for (vtkIdType j = 0; j < nbObservations; j++)
+      {
+      // Avoid case where point is compared to itself
+      if (i == j)
+        {
+        continue;
+        }
+      inObs->GetTuple(j, currentXj);
+
+      hdr += this->ComputeSmoothGaussianKernel(
+        inObs->GetNumberOfComponents(),
+        currentXi[0] - currentXj[0],
+        currentXi[1] - currentXj[1]);
+      }
+    double d = denom * hdr;
+    outDensity->SetTuple1(i, d);
+    sum += d;
+    }
+
+  return sum;
+}
+
+// ----------------------------------------------------------------------
+double vtkHighestDensityRegionsStatistics::ComputeSmoothGaussianKernel(
+  int dimension, double khx, double khy)
+{
+  double HDeterminant =
+    vtkMath::Determinant2x2(this->SmoothHC1, this->SmoothHC2);
+  if (HDeterminant > 0.0)
+    {
+    HDeterminant = 1.0 / sqrt(HDeterminant);
+    }
+
+  // We need to multiply the input vector by the smooth square root of
+  // H matrix parameter: sqrt(H) * [khx, khy] -> random vector of the
+  // standard gaussian input.
+
+  // If a H coefficient is equal to 0.0. we don't compute its sqrt to avoid
+  // domain error.
+  double SHC10 = 0.0;
+  double SHC11 = 0.0;
+  double SHC20 = 0.0;
+  double SHC21 = 0.0;
+
+  if (this->SmoothHC1[0] != 0.0)
+    {
+    SHC10 = 1.0 / sqrt(this->SmoothHC1[0]);
+    }
+  if (this->SmoothHC1[1] != 0.0)
+    {
+    SHC11 = 1.0 / sqrt(this->SmoothHC1[1]);
+    }
+  if (this->SmoothHC2[0] != 0.0)
+    {
+    SHC20 = 1.0 / sqrt(this->SmoothHC2[0]);
+    }
+  if (this->SmoothHC2[1] != 0.0)
+    {
+    SHC21 = 1.0 / sqrt(this->SmoothHC2[1]);
+    }
+
+  // Call the standard gaussian kernel with the new random vector.
+  return HDeterminant *
+    this->ComputeStandardGaussianKernel(dimension,
+    SHC10 * khx + SHC11 * khy,
+    SHC20 * khx + SHC21 * khy);
+}
+
+// ----------------------------------------------------------------------
+double vtkHighestDensityRegionsStatistics::ComputeStandardGaussianKernel(
+  int vtkNotUsed(dimension), double kx, double ky)
+{
+  return exp(-(kx * kx + ky * ky) / 2.0) / (2.0 * vtkMath::Pi());
+}
diff --git a/Filters/Statistics/vtkHighestDensityRegionsStatistics.h b/Filters/Statistics/vtkHighestDensityRegionsStatistics.h
new file mode 100644
index 0000000..7080261
--- /dev/null
+++ b/Filters/Statistics/vtkHighestDensityRegionsStatistics.h
@@ -0,0 +1,144 @@
+/*=========================================================================
+
+Program:   Visualization Toolkit
+Module:    vtkHighestDensityRegionsStatistics.h
+
+Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+All rights reserved.
+See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+This software is distributed WITHOUT ANY WARRANTY; without even
+the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+// .NAME vtkHighestDensityRegionsStatistics - Compute a random vector of
+// density f from input observations points. f is computed using a smooth
+// kernel method.
+//
+// .SECTION Description
+// Given a selection of pairs of columns of interest, this class provides the
+// following functionalities, depending on the chosen execution options:
+// * Learn: calculates density estimator f of a random vector using a smooth
+//   gaussian kernel. The output metadata on port OUTPUT_MODEL is a multiblock
+//   dataset containing at one vtkTable holding three columns which are for the
+//   first columns the input columns of interest and for the last columns the
+//   density estimators of each input pair of columns of interest.
+// * Derive: calculate normalized (as a percentage) quantiles coming from
+//   Learn output. The second block of the multibloc dataset contains a
+//   vtkTable holding some pairs of columns which are for the second one the
+//   quantiles ordered from the stronger to the lower and for the first one
+//   the correspondand quantile index.
+// * Assess: not implemented.
+// * Test: not implemented.
+
+#ifndef __vtkHighestDensityRegionsStatistics_h
+#define __vtkHighestDensityRegionsStatistics_h
+
+#include "vtkFiltersStatisticsModule.h" // For export macro
+#include "vtkStatisticsAlgorithm.h"
+
+class vtkMultiBlockDataSet;
+class vtkVariant;
+
+class VTKFILTERSSTATISTICS_EXPORT vtkHighestDensityRegionsStatistics :
+  public vtkStatisticsAlgorithm
+{
+public:
+  vtkTypeMacro(vtkHighestDensityRegionsStatistics, vtkStatisticsAlgorithm);
+  virtual void PrintSelf( ostream& os, vtkIndent indent );
+  static vtkHighestDensityRegionsStatistics* New();
+
+  // Description: (Not implemented)
+  // Given a collection of models, calculate aggregate model
+  virtual void Aggregate(vtkDataObjectCollection*,
+                         vtkMultiBlockDataSet*) { return; }
+
+  // Description:
+  // H is a positive matrix that defines the smooth direction.
+  // In a classical HDR, we don't set a specific smooth direction for the
+  // H matrix parameter (SmoothHC1, SmoothHC2). That mean H will be in a
+  // diagonal form and equal to sigma * Id.
+  void SetSigma(double sigma);
+
+  // Description:
+  // Get Smooth H matrix parameter of the HDR.
+  vtkGetVectorMacro(SmoothHC1, double, 2);
+  vtkSetVectorMacro(SmoothHC1, double, 2);
+  vtkGetVectorMacro(SmoothHC2, double, 2);
+  vtkSetVectorMacro(SmoothHC2, double, 2);
+
+protected:
+  vtkHighestDensityRegionsStatistics();
+  ~vtkHighestDensityRegionsStatistics();
+
+  // Description:
+  // Execute the calculations required by the Learn option.
+  virtual void Learn(vtkTable*,
+                     vtkTable*,
+                     vtkMultiBlockDataSet*);
+
+  // Description:
+  // Execute the calculations required by the Derive option.
+  virtual void Derive(vtkMultiBlockDataSet*);
+
+  // Description: (Not implemented)
+  // Execute the calculations required by the Assess option.
+  virtual void Assess(vtkTable*,
+                      vtkMultiBlockDataSet*,
+                      vtkTable*) { return; }
+
+  // Description: (Not implemented)
+  // Execute the calculations required by the Test option.
+  virtual void Test(vtkTable*,
+                    vtkMultiBlockDataSet*,
+                    vtkTable*) { return; }
+
+//BTX
+  // Description: (Not implemented)
+  // Provide the appropriate assessment functor.
+  virtual void SelectAssessFunctor(vtkTable*,
+                                   vtkDataObject*,
+                                   vtkStringArray*,
+                                   AssessFunctor*&) { return; }
+//ETX
+
+  // Description:
+  // Fill outDensity with density vector that is computed from
+  // inObservations values. This method uses a Gaussian kernel.
+  // For n observations and with X an observation point:
+  // f(X) = (1 / n) * Sum(KH(X -Xi)) for (i = 1 to n).
+  // Look ComputeSmoothGaussianKernel for KH kernel definition.
+  double ComputeHDR(vtkDataArray *inObservations, vtkDataArray *outDensity);
+
+  // Description:
+  // Store the smooth matrix parameter H. Specify a smooth direction
+  // for the Gaussian kernel.
+  double SmoothHC1[2];
+  double SmoothHC2[2];
+
+  // Description:
+  // Store the number of requested columns pair computed by learn method.
+  vtkIdType NumberOfRequestedColumnsPair;
+
+private :
+  // Description:
+  // Helper that returns a smooth gaussian kernel of a vector of dimension two,
+  // using its coordinates. For X = [khx, khy] and H a positive matrix of dim 2,
+  // KH(X) = sqrt(det(H)) * K((1 / sqrt(H)) * X).
+  // Look ComputeStandardGaussianKernel for the K kernel definition.
+  double ComputeSmoothGaussianKernel(int dimension, double khx, double khy);
+
+  // Description:
+  // Helper that returns a standard gaussian kernel of a vector of dimension two,
+  // using its coordinates. For X = [kx, ky],
+  // K(X) = ( 1 / 2 * PI) * exp(-sqrt<X,X>).
+  double ComputeStandardGaussianKernel(int dimension, double kx, double ky);
+
+private:
+  vtkHighestDensityRegionsStatistics(const vtkHighestDensityRegionsStatistics&); // Not implemented
+  void operator = (const vtkHighestDensityRegionsStatistics&);  // Not implemented
+};
+
+#endif
diff --git a/Filters/Statistics/vtkMultiCorrelativeStatistics.cxx b/Filters/Statistics/vtkMultiCorrelativeStatistics.cxx
index 4ce90fc..ea5c9ac 100644
--- a/Filters/Statistics/vtkMultiCorrelativeStatistics.cxx
+++ b/Filters/Statistics/vtkMultiCorrelativeStatistics.cxx
@@ -43,7 +43,7 @@ void vtkMultiCorrelativeStatistics::PrintSelf( ostream& os, vtkIndent indent )
 }
 
 // ----------------------------------------------------------------------
-void vtkMultiCorrelativeInvertCholesky( vtksys_stl::vector<double*>& chol, vtksys_stl::vector<double>& inv )
+static void vtkMultiCorrelativeInvertCholesky( vtksys_stl::vector<double*>& chol, vtksys_stl::vector<double>& inv )
 {
   vtkIdType m = static_cast<vtkIdType>( chol.size() );
   inv.resize( m * ( m + 1 ) / 2 );
@@ -72,7 +72,7 @@ void vtkMultiCorrelativeInvertCholesky( vtksys_stl::vector<double*>& chol, vtksy
 }
 
 // ----------------------------------------------------------------------
-void vtkMultiCorrelativeTransposeTriangular( vtksys_stl::vector<double>& a, vtkIdType m )
+static void vtkMultiCorrelativeTransposeTriangular( vtksys_stl::vector<double>& a, vtkIdType m )
 {
   vtksys_stl::vector<double> b( a.begin(), a.end() );
   double* bp = &b[0];
@@ -430,7 +430,7 @@ void vtkMultiCorrelativeStatistics::Learn( vtkTable* inData,
 }
 
 // ----------------------------------------------------------------------
-void vtkMultiCorrelativeCholesky( vtksys_stl::vector<double*>& a, vtkIdType m )
+static void vtkMultiCorrelativeCholesky( vtksys_stl::vector<double*>& a, vtkIdType m )
 {
   // First define some macros to make the Cholevsky decomposition algorithm legible:
 #ifdef A
diff --git a/Filters/StatisticsGnuR/CMakeLists.txt b/Filters/StatisticsGnuR/CMakeLists.txt
index 4f61dca..9670bb7 100644
--- a/Filters/StatisticsGnuR/CMakeLists.txt
+++ b/Filters/StatisticsGnuR/CMakeLists.txt
@@ -62,4 +62,4 @@ configure_file(${VTK_CMAKE_DIR}/vtkObjectFactory.cxx.in
 
 vtk_module_library(vtkFiltersStatisticsGnuR ${Module_SRCS})
 
-target_link_libraries(vtkFiltersStatisticsGnuR ${R_LIBRARIES})
+target_link_libraries(vtkFiltersStatisticsGnuR LINK_PRIVATE ${R_LIBRARIES})
diff --git a/Filters/StatisticsGnuR/Testing/Cxx/TestRCalculatorFilter.cxx b/Filters/StatisticsGnuR/Testing/Cxx/TestRCalculatorFilter.cxx
index b3a6d23..28a7cfc 100644
--- a/Filters/StatisticsGnuR/Testing/Cxx/TestRCalculatorFilter.cxx
+++ b/Filters/StatisticsGnuR/Testing/Cxx/TestRCalculatorFilter.cxx
@@ -223,7 +223,7 @@ int TestRCalculatorFilter(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
       }
 
     //check vertex data
-    const char *  t_names[] ={"","a","b","c","",""};
+    const char *  t_names[] ={"a","b","c","","",""};
     for (int i = 0; i < outTree->GetNumberOfVertices(); i++)
       {
       vtkStringArray * names = vtkStringArray::SafeDownCast(outTree->GetVertexData()->GetAbstractArray("node name"));
diff --git a/Filters/StatisticsGnuR/Testing/Cxx/TestRInterface.cxx b/Filters/StatisticsGnuR/Testing/Cxx/TestRInterface.cxx
index 74a67a7..9485050 100644
--- a/Filters/StatisticsGnuR/Testing/Cxx/TestRInterface.cxx
+++ b/Filters/StatisticsGnuR/Testing/Cxx/TestRInterface.cxx
@@ -37,7 +37,7 @@
 #include <vtksys/stl/stdexcept>
 #include <stdio.h>
 #include <string.h>
-#include <assert.h>
+#include <cassert>
 
 namespace
 {
@@ -193,7 +193,7 @@ int TestRInterface(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
 
     // check edge
     vtkDoubleArray* r_edge = vtkDoubleArray::SafeDownCast(rint->AssignRVariableToVTKDataArray("edge"));
-    int EDGE_ARRAY[4][2] = { {4,5},{4,3},{5,1},{5,2}};
+    int EDGE_ARRAY[5][2] = { {4,5},{5,6},{5,3},{6,1},{6,2} };
     for ( int i = 0; i< r_edge->GetNumberOfTuples(); i++)
       {
       double * a = r_edge->GetTuple(i);
@@ -202,7 +202,7 @@ int TestRInterface(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
       }
     //check Nnode
     vtkDoubleArray* r_Nnode= vtkDoubleArray::SafeDownCast(rint->AssignRVariableToVTKDataArray("Nnode"));
-    test_expression(doubleEquals(r_Nnode->GetValue(0),double(2), 0.001));
+    test_expression(doubleEquals(r_Nnode->GetValue(0),double(3), 0.001));
 
     //check tip_label, node.label
     /* TODO: implement R <=> VTKStringArray, so that the following function can be called:
@@ -211,7 +211,7 @@ int TestRInterface(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
 
     //check edge_length
     vtkDoubleArray* r_edge_length= vtkDoubleArray::SafeDownCast(rint->AssignRVariableToVTKDataArray("edge_length"));
-    double e_weights[4] = {2.0,3.0,1.0,1.0};
+    double e_weights[5] = {1.0,2.0,3.0,1.0,1.0};
     for (int i = 0; i < r_edge_length->GetNumberOfTuples(); i++)
       {
       double * r_weights = r_edge_length->GetTuple(i);
@@ -227,15 +227,14 @@ int TestRInterface(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
 
 
     //check edge data
-    double v_weights[5] = {0.0,2.0,3.0,1.0,1.0};
     for (int i = 0; i < vtk_tr->GetNumberOfEdges(); i++)
       {
       vtkDoubleArray * weights = vtkDoubleArray::SafeDownCast(vtk_tr->GetEdgeData()->GetArray("weight"));
-      test_expression(doubleEquals(weights->GetValue(i),double( v_weights[i]), 0.001));
+      test_expression(doubleEquals(weights->GetValue(i),double( e_weights[i]), 0.001));
       }
 
     //check vertex data
-    const char *  t_names[] ={"","a","b","c","",""};
+    const char *  t_names[] ={"a","b","c","","",""};
     for (int i = 0; i < vtk_tr->GetNumberOfVertices(); i++)
       {
       vtkStringArray * names = vtkStringArray::SafeDownCast(vtk_tr->GetVertexData()->GetAbstractArray("node name"));
diff --git a/Filters/StatisticsGnuR/Testing/Python/CMakeLists.txt b/Filters/StatisticsGnuR/Testing/Python/CMakeLists.txt
new file mode 100644
index 0000000..5d16035
--- /dev/null
+++ b/Filters/StatisticsGnuR/Testing/Python/CMakeLists.txt
@@ -0,0 +1,10 @@
+if(VTK_PYTHON_EXE)
+  set(tests
+    TestRCalculatorFilter
+    )
+  foreach(tfile ${tests})
+    vtk_add_test_python(${tfile}.py NO_VALID NO_RT NO_OUTPUT NO_DATA)
+  endforeach()
+else()
+  message(FATAL_ERROR "No Python tests added!")
+endif()
diff --git a/Filters/StatisticsGnuR/Testing/Python/TestRCalculatorFilter.py b/Filters/StatisticsGnuR/Testing/Python/TestRCalculatorFilter.py
new file mode 100644
index 0000000..881af6e
--- /dev/null
+++ b/Filters/StatisticsGnuR/Testing/Python/TestRCalculatorFilter.py
@@ -0,0 +1,245 @@
+import vtk
+import vtk.test.Testing
+
+tree_data = "((((((((ahli:0.1308887296,allogus:0.1308887296):0.109078899,rubribarbus:0.2399676286):0.3477240729,imias:0.5876917015):0.1279779191,((((sagrei:0.2576204042,(bremeri:0.1097436524,quadriocellifer:0.1097436524):0.1478767518):0.06150599843,ophiolepis:0.3191264027):0.08721921759,mestrei:0.4063456203):0.1298140501,(((jubar:0.1188659524,homolechis:0.1188659524):0.09052271908,confusus:0.2093886715):0.04215577182,guafe:0.2515444433):0.2846152271):0.1795099503):0.1377237125,((((garman [...]
+
+def treeEquals(tree1, tree2):
+
+    tree1Iter = vtk.vtkTreeDFSIterator()
+    tree1Iter.SetTree(tree1)
+    tree2Iter = vtk.vtkTreeDFSIterator()
+    tree2Iter.SetTree(tree2)
+
+    while(tree1Iter.HasNext()):
+        if not tree2Iter.HasNext():
+            return False
+
+        if tree1.GetNumberOfChildren(tree1Iter.Next()) != tree2.GetNumberOfChildren(tree2Iter.Next()):
+            return False
+
+    if tree2Iter.HasNext():
+        return False
+
+
+    return True
+
+class TestRCalculatorFilter(vtk.test.Testing.vtkTest):
+
+    def testTableOutput(self):
+        rcal = vtk.vtkRCalculatorFilter()
+        rcal.SetRscript("output = list(test=c(1,2,3,4))\n");
+        rcal.GetTable('output')
+        input = vtk.vtkTable()
+        rcal.SetInputData(input)
+
+        rcal.Update()
+
+        t1 = rcal.GetOutput().GetColumnByName('test')
+        value = 1
+
+        for i in range(0, t1.GetNumberOfTuples()):
+            self.assertEqual(value, t1.GetValue(i))
+            value += 1
+
+
+    def testTreeOutput(self):
+        tree_reader = vtk.vtkNewickTreeReader()
+        tree_reader.ReadFromInputStringOn()
+        tree_reader.SetInputString(tree_data)
+
+
+        rcal = vtk.vtkRCalculatorFilter()
+
+        rcal.SetInputConnection(tree_reader.GetOutputPort())
+        rcal.SetRscript("library(ape)\n\
+                         output = read.tree(text=\"" + tree_data + "\")\n");
+        rcal.GetTree('output')
+        rcal.Update()
+
+        expected_tree = tree_reader.GetOutput()
+
+        self.assertTrue(treeEquals(expected_tree, rcal.GetOutput()))
+
+    def testTableInputOutput(self):
+        rcal = vtk.vtkRCalculatorFilter()
+        rcal.SetRscript("output = input\n");
+        rcal.PutTable('input')
+        rcal.GetTable('output')
+
+        value = 1
+        array = vtk.vtkDoubleArray()
+        array.SetNumberOfComponents(1)
+        array.SetNumberOfTuples(4)
+        array.SetName('test')
+        for i in range(0, 4):
+            array.SetValue(i, value)
+            value += 1
+
+        input = vtk.vtkTable()
+        input.AddColumn(array)
+        rcal.SetInputData(input)
+
+        rcal.Update()
+
+        t1 = rcal.GetOutput().GetColumnByName('test')
+        value = 1
+
+        for i in range(0, t1.GetNumberOfTuples()):
+            self.assertEqual(value, t1.GetValue(i))
+            value += 1
+
+    def testMultiTableOutputs(self):
+        outputs = vtk.vtkStringArray()
+        outputs.SetNumberOfComponents(1)
+        outputs.SetNumberOfTuples(3)
+        outputs.SetValue(0, "output1")
+        outputs.SetValue(1, "output2")
+        outputs.SetValue(2, "output3")
+        rcal = vtk.vtkRCalculatorFilter()
+        rcal.SetRscript("output1 = list(test=c(1,2,3,4))\n\
+                         output2 = list(test=c(5,6,7,8))\n\
+                         output3 = list(test=c(9,10,11,12))\n");
+
+        rcal.GetTables(outputs)
+
+        input = vtk.vtkTable()
+        rcal.SetInputData(input)
+
+        rcal.Update()
+
+
+        t1 = rcal.GetOutput().GetPieceAsDataObject(0).GetColumnByName('test')
+        value = 1
+
+        for i in range(0, t1.GetNumberOfTuples()):
+            self.assertEqual(value, t1.GetValue(i))
+            value += 1
+
+        t2  = rcal.GetOutput().GetPieceAsDataObject(1).GetColumnByName('test')
+        for i in range(0, t2.GetNumberOfTuples()):
+            self.assertEqual(value, t2.GetValue(i))
+            value += 1
+
+        t3  = rcal.GetOutput().GetPieceAsDataObject(2).GetColumnByName('test')
+        for i in range(0, t3.GetNumberOfTuples()):
+            self.assertEqual(value, t3.GetValue(i))
+            value += 1
+
+    def testMultiTreeOutputs(self):
+        outputs = vtk.vtkStringArray()
+        outputs.SetNumberOfComponents(1)
+        outputs.SetNumberOfTuples(2)
+        outputs.SetValue(0, "tree1")
+        outputs.SetValue(1, "tree2")
+        rcal = vtk.vtkRCalculatorFilter()
+
+
+        rcal.SetRscript("library(ape)\n\
+                         tree1 = read.tree(text=\"" + tree_data + "\")\n\
+                         tree2 = read.tree(text=\"" + tree_data + "\")\n")
+
+        rcal.GetTrees(outputs)
+
+        input = vtk.vtkTable()
+        rcal.SetInputData(input)
+
+        rcal.Update()
+
+        compo = rcal.GetOutput()
+        tree1 = compo.GetPieceAsDataObject(0)
+        self.assertTrue(tree1.IsA('vtkTree'))
+        tree2 = compo.GetPieceAsDataObject(1)
+        self.assertTrue(tree2.IsA('vtkTree'))
+
+
+
+
+    def testMultiTableInputs(self):
+        inputs = vtk.vtkStringArray()
+        inputs.SetNumberOfComponents(1)
+        inputs.SetNumberOfTuples(3)
+        inputs.SetValue(0, "input1")
+        inputs.SetValue(1, "input2")
+        inputs.SetValue(2, "input3")
+
+        outputs = vtk.vtkStringArray()
+        outputs.SetNumberOfComponents(1)
+        outputs.SetNumberOfTuples(3)
+        outputs.SetValue(0, "output1")
+        outputs.SetValue(1, "output2")
+        outputs.SetValue(2, "output3")
+
+        rcal = vtk.vtkRCalculatorFilter()
+        # Copy input to output for validation
+        rcal.SetRscript("output1 = input1\n\
+                         print(input1[[1]])\n\
+                         output2 = input2\n\
+                         output3 = input3\n");
+
+        rcal.GetTables(outputs)
+        rcal.PutTables(inputs)
+
+        value = 1
+        array1 = vtk.vtkDoubleArray()
+        array1.SetNumberOfComponents(1)
+        array1.SetNumberOfTuples(4)
+        array1.SetName('test')
+        for i in range(0, 4):
+            array1.SetValue(i, value)
+            value += 1
+
+        array2 = vtk.vtkDoubleArray()
+        array2.SetNumberOfComponents(1)
+        array2.SetNumberOfTuples(4)
+        array2.SetName('test')
+        for i in range(0, 4):
+            array2.SetValue(i, value)
+            value += 1
+
+        array3 = vtk.vtkDoubleArray()
+        array3.SetNumberOfComponents(1)
+        array3.SetNumberOfTuples(4)
+        array3.SetName('test')
+        for i in range(0, 4):
+            array3.SetValue(i, value)
+            value += 1
+
+
+        input1 = vtk.vtkTable()
+        input1.AddColumn(array1)
+        input2 = vtk.vtkTable()
+        input2.AddColumn(array2)
+        input3 = vtk.vtkTable()
+        input3.AddColumn(array3)
+
+        compo = vtk.vtkMultiPieceDataSet()
+        compo.SetNumberOfPieces(3)
+
+        compo.SetPiece(0, input1)
+        compo.SetPiece(1, input2)
+        compo.SetPiece(2, input3)
+
+        rcal.SetInputData(compo)
+
+        rcal.Update()
+
+        t1 = rcal.GetOutput().GetPieceAsDataObject(0).GetColumnByName('test')
+        value = 1
+
+        for i in range(0, t1.GetNumberOfTuples()):
+            self.assertEqual(value, t1.GetValue(i))
+            value += 1
+
+        t2  = rcal.GetOutput().GetPieceAsDataObject(1).GetColumnByName('test')
+        for i in range(0, t2.GetNumberOfTuples()):
+            self.assertEqual(value, t2.GetValue(i))
+            value += 1
+
+        t3  = rcal.GetOutput().GetPieceAsDataObject(2).GetColumnByName('test')
+        for i in range(0, t3.GetNumberOfTuples()):
+            self.assertEqual(value, t3.GetValue(i))
+            value += 1
+
+
+if __name__ == "__main__":
+    vtk.test.Testing.main([(TestRCalculatorFilter, 'testTreeOutput')])
diff --git a/Filters/StatisticsGnuR/module.cmake b/Filters/StatisticsGnuR/module.cmake
index 6a678ab..2910c56 100644
--- a/Filters/StatisticsGnuR/module.cmake
+++ b/Filters/StatisticsGnuR/module.cmake
@@ -5,6 +5,8 @@ vtk_module(vtkFiltersStatisticsGnuR
     vtkCommonCore
     vtkCommonExecutionModel
     vtkFiltersStatistics
+  PRIVATE_DEPENDS
+    vtksys
   TEST_DEPENDS
     vtkTestingCore
     vtkFiltersSources
diff --git a/Filters/StatisticsGnuR/vtkRAdapter.cxx b/Filters/StatisticsGnuR/vtkRAdapter.cxx
index ea10d6c..cdc72c9 100644
--- a/Filters/StatisticsGnuR/vtkRAdapter.cxx
+++ b/Filters/StatisticsGnuR/vtkRAdapter.cxx
@@ -46,7 +46,7 @@
 #include <map>
 
 #include <stdio.h>
-#include <assert.h>
+#include <cassert>
 
 #define R_NO_REMAP /* AVOID SOME SERIOUS STUPIDITY. DO NOT REMOVE. */
 
@@ -485,7 +485,7 @@ SEXP vtkRAdapter::VTKTreeToR(vtkTree* tree)
   // traverse the tree to reorder the leaf vertices according to the
   // phylo tree numbering rule;
   // newNodeId is the checkup table that maps a vertexId(starting from 0)
-  // to it's corresponding R tree point id (staring from 1)
+  // to its corresponding R tree point id (starting from 1)
   vtkIdType leafCount = 0;
   vtkTreeDFSIterator* iter = vtkTreeDFSIterator::New();
   iter->SetTree(tree);
@@ -494,7 +494,7 @@ SEXP vtkRAdapter::VTKTreeToR(vtkTree* tree)
   while (iter->HasNext())
     {// find out all the leaf nodes, and number them sequentially
     vtkIdType vertexId = iter->Next();
-    newNodeId[vertexId] = 0;//initilize
+    newNodeId[vertexId] = 0;//initialize
     if (tree->IsLeaf(vertexId))
       {
       leafCount++;
@@ -505,7 +505,7 @@ SEXP vtkRAdapter::VTKTreeToR(vtkTree* tree)
   // second tree traverse to reorder the node vertices
   int nodeId = leafCount;
   iter->Restart();
-  vtkIdType vertexId = iter->Next();//skip the root (which id is zero)
+  vtkIdType vertexId;
   while (iter->HasNext())
     {
     vertexId = iter->Next();
@@ -516,7 +516,7 @@ SEXP vtkRAdapter::VTKTreeToR(vtkTree* tree)
       }
     }
 
-  nedge = tree->GetNumberOfEdges() -1;// the first edge 0-1 does not count in R tree
+  nedge = tree->GetNumberOfEdges();
   ntip  = leafCount;
   nnode = nedge - ntip + 1;
 
@@ -534,7 +534,7 @@ SEXP vtkRAdapter::VTKTreeToR(vtkTree* tree)
   // fill in e and e_len
   vtkSmartPointer<vtkEdgeListIterator> edgeIterator = vtkSmartPointer<vtkEdgeListIterator>::New();
   tree->GetEdges(edgeIterator);
-  vtkEdgeType vEdge = edgeIterator->Next();//skip the first empty edge (0,1) with weight 0
+  vtkEdgeType vEdge;
   int i = 0;
   vtkDoubleArray * weights = vtkDoubleArray::SafeDownCast((tree->GetEdgeData())->GetArray("weight"));
   while(edgeIterator->HasNext())
@@ -552,7 +552,6 @@ SEXP vtkRAdapter::VTKTreeToR(vtkTree* tree)
   // use GetAbstractArray() instead of GetArray()
   vtkStringArray * labels = vtkStringArray::SafeDownCast((tree->GetVertexData())->GetAbstractArray("node name"));
   iter->Restart();
-  vertexId = iter->Next();//skip the root
   while (iter->HasNext())
     {// find out all the leaf nodes, and number them sequentially
     vertexId = iter->Next();
@@ -707,50 +706,46 @@ vtkTree* vtkRAdapter::RToVTKTree(SEXP variable)
 
 
     // Create all of the tree vertice (number of edges +1)
-    // number of edges = nedge(in R tree)  + 1 (root edge in VTKTree)
-    int numOfEdges = nedge + 1;
+    // number of edges = nedge(in R tree)
+    int numOfEdges = nedge;
     for(int i = 0; i <= numOfEdges; i++)
       {
       builder->AddVertex();
       }
 
-    builder->AddEdge(0, ntip + 1); //root edge:  0 -- first node
     for(int i = 0; i < nedge; i++)
       {
-      vtkIdType source = edge[i];
-      vtkIdType target = edge[i+nedge];
+      // -1 because R vertices begin with 1, whereas VTK vertices begin with 0.
+      vtkIdType source = edge[i] - 1;
+      vtkIdType target = edge[i+nedge] - 1;
       builder->AddEdge(source, target);
       }
 
-
     // Create the edge weight array
     vtkNew<vtkDoubleArray> weights;
     weights->SetNumberOfComponents(1);
     weights->SetName("weight");
     weights->SetNumberOfValues(numOfEdges);
-    weights->SetValue(0, 0.0);//root edge weight = 0.0
     for (int i = 0; i < nedge; i++)
       {
-      weights->SetValue(i+1, edge_length[i]);
+      weights->SetValue(i, edge_length[i]);
       }
     builder->GetEdgeData()->AddArray(weights.GetPointer());
 
-
     // Create the names array
     // In R tree, the numeric id of the vertice is ordered such that the tips are listed first
     // followed by the internal nodes. The order are matching up with the label arrays (tip_label and node_label).
     vtkNew<vtkStringArray> names;
     names->SetNumberOfComponents(1);
     names->SetName("node name");
-    names->SetNumberOfValues(ntip + nnode + 1);
-    names->SetValue(0,""); //root name
+    names->SetNumberOfValues(ntip + nnode);
     for (int i = 0; i < ntip; i++)
       {
-      names->SetValue(i + 1, tip_label->GetValue(i));
+      names->SetValue(i, tip_label->GetValue(i));
       }
     for (int i = 0; i < nnode; i++)
       {
-        names->SetValue(i + ntip + 1, node_label->GetValue(i));
+      names->SetValue(i + ntip, node_label->GetValue(i));
       }
     builder->GetVertexData()->AddArray(names.GetPointer());
 
@@ -760,20 +755,11 @@ vtkTree* vtkRAdapter::RToVTKTree(SEXP variable)
       return NULL;
       }
 
-
-
     // Create the "node weight" array for the Vertices, in order to use
     // vtkTreeLayoutStrategy for visualizing the tree using vtkTreeHeatmapItem
     vtkNew<vtkDoubleArray> nodeWeights;
     nodeWeights->SetNumberOfTuples(tree->GetNumberOfVertices());
 
-    // trueWeights is (for the most part) a duplicate of nodeWeights.
-    // The only difference is that leaf nodes aren't clamped to the max
-    // weight in this array.
-    vtkNew<vtkDoubleArray> trueWeights;
-    trueWeights->SetNumberOfTuples(tree->GetNumberOfVertices());
-
-    double maxWeight = 0.0;
     vtkNew<vtkTreeDFSIterator> treeIterator;
     treeIterator->SetStartVertex(tree->GetRoot());
     treeIterator->SetTree(tree);
@@ -785,30 +771,14 @@ vtkTree* vtkRAdapter::RToVTKTree(SEXP variable)
       if (parent >= 0)
         {
         weight = weights->GetValue(tree->GetEdgeId(parent, vertex));
-        }
-      weight += nodeWeights->GetValue(parent);
-
-      if (weight > maxWeight)
-        {
-        maxWeight = weight;
+        weight += nodeWeights->GetValue(parent);
         }
       nodeWeights->SetValue(vertex, weight);
-      trueWeights->SetValue(vertex, weight);
       }
 
-    for (vtkIdType vertex = 0; vertex < tree->GetNumberOfVertices(); ++vertex)
-      {
-      if (tree->IsLeaf(vertex))
-        {
-        nodeWeights->SetValue(vertex, maxWeight);
-        }
-      }
     nodeWeights->SetName("node weight");
     tree->GetVertexData()->AddArray(nodeWeights.GetPointer());
 
-    trueWeights->SetName("true node weight");
-    tree->GetVertexData()->AddArray(trueWeights.GetPointer());
-
     this->vdoc->AddItem(tree);
     tree->Delete();
     return tree;
diff --git a/Filters/StatisticsGnuR/vtkRCalculatorFilter.cxx b/Filters/StatisticsGnuR/vtkRCalculatorFilter.cxx
index 3448db3..068af97 100644
--- a/Filters/StatisticsGnuR/vtkRCalculatorFilter.cxx
+++ b/Filters/StatisticsGnuR/vtkRCalculatorFilter.cxx
@@ -38,6 +38,8 @@
 #include "vtkDoubleArray.h"
 #include "vtkTable.h"
 #include "vtkTree.h"
+#include "vtkMultiPieceDataSet.h"
+#include "vtkStringArray.h"
 
 #include <stdlib.h>
 #include <string>
@@ -64,16 +66,76 @@ public:
 
 };
 
+class RVariableNames
+{
+
+public:
+  RVariableNames()
+    {
+    this->ResetNameIterator();
+    }
+
+  void SetName(std::string name)
+    {
+    this->Names.clear();
+    this->Names.push_back(name);
+    this->ResetNameIterator();
+    }
+
+  void SetNames(vtkStringArray* names)
+    {
+    this->Names.clear();
+    for (vtkIdType i = 0; i < names->GetNumberOfTuples(); ++i)
+      {
+      this->Names.push_back(names->GetValue(i));
+      }
+
+    this->ResetNameIterator();
+    }
+
+  void ResetNameIterator()
+    {
+    this->NameIterator = this->Names.begin();
+    }
+
+  std::string NextName()
+    {
+    return *this->NameIterator++;
+    }
+
+  bool HasName()
+    {
+    return this->NameIterator != this->Names.end();
+    }
+
+  void Clear()
+    {
+    this->Names.clear();
+    this->ResetNameIterator();
+    }
+
+  int Count()
+    {
+    return this->Names.size();
+    }
+
+
+  std::vector<std::string> Names;
+  std::vector<std::string>::iterator NameIterator;
+
+};
+
 class vtkRCalculatorFilterInternals
 {
 
 public:
   std::vector<ArrNames> PutArrNames;
   std::vector<ArrNames> GetArrNames;
-  std::string PutTableName;
-  std::string GetTableName;
-  std::string PutTreeName;
-  std::string GetTreeName;
+  RVariableNames PutTableNames;
+  RVariableNames GetTableNames;
+  RVariableNames PutTreeNames;
+  RVariableNames GetTreeNames;
+
 };
 
 vtkRCalculatorFilter::vtkRCalculatorFilter()
@@ -269,11 +331,23 @@ int vtkRCalculatorFilter::RequestDataObject(
       vtkInformation* info = outputVector->GetInformationObject(i);
       vtkDataObject *output = info->Get(vtkDataObject::DATA_OBJECT());
 
-      if (!output || !output->IsA(input->GetClassName()))
+      if (this->HasMultipleGets())
         {
-        vtkDataObject* newOutput = input->NewInstance();
-        info->Set(vtkDataObject::DATA_OBJECT(), newOutput);
-        newOutput->Delete();
+        if (!output || !output->IsA("vtkMultiPieceDataSet"))
+          {
+          vtkDataObject* newOutput = vtkMultiPieceDataSet::New();
+          info->Set(vtkDataObject::DATA_OBJECT(), newOutput);
+          newOutput->Delete();
+          }
+        }
+      else
+        {
+         if (!output || !output->IsA(input->GetClassName()))
+           {
+           vtkDataObject* newOutput = input->NewInstance();
+           info->Set(vtkDataObject::DATA_OBJECT(), newOutput);
+           newOutput->Delete();
+           }
         }
       }
     return (1);
@@ -326,23 +400,57 @@ int vtkRCalculatorFilter::RequestData(vtkInformation *vtkNotUsed(request),
   // output's components.
   vtkCompositeDataSet* inComposite =
     vtkCompositeDataSet::SafeDownCast(input);
+  int tableCount = 0;
+  int treeCount = 0;
+  int itemCount = 0;
   if (inComposite)
-    {
-    vtkCompositeDataSet* outComposite =
-      vtkCompositeDataSet::SafeDownCast(output);
-    outComposite->CopyStructure(inComposite);
-    vtkCompositeDataIterator* iter = inComposite->NewIterator();
-    iter->InitTraversal();
-    for (iter->InitTraversal(); !iter->IsDoneWithTraversal();
-         iter->GoToNextItem())
+   {
+   vtkCompositeDataSet* outComposite =
+     vtkCompositeDataSet::SafeDownCast(output);
+   outComposite->CopyStructure(inComposite);
+   vtkCompositeDataIterator* iter = inComposite->NewIterator();
+   iter->InitTraversal();
+   for (iter->InitTraversal(); !iter->IsDoneWithTraversal();
+        iter->GoToNextItem())
+     {
+     vtkDataObject* currentDataObject = iter->GetCurrentDataObject();
+     if (currentDataObject->IsA("vtkTable"))
+       {
+       tableCount++;
+       }
+     else if (currentDataObject->IsA("vtkTree"))
+       {
+       treeCount++;
+       }
+
+
+     vtkDataObject* outComponent = currentDataObject->NewInstance();
+     outComposite->SetDataSet(iter, outComponent);
+     outComponent->Delete();
+     itemCount++;
+     }
+   iter->Delete();
+   }
+
+  if (this->HasMultipleGets())
+    {
+    vtkMultiPieceDataSet* outComposite =
+        vtkMultiPieceDataSet::SafeDownCast(output);
+
+    for (int i=0; i<rcfi->GetTableNames.Count()-tableCount; i++)
       {
-      vtkDataObject* outComponent =
-        iter->GetCurrentDataObject()->NewInstance();
-      outComposite->SetDataSet(iter, outComponent);
-      outComponent->Delete();
+      vtkTable *table = vtkTable::New();
+      outComposite->SetPiece(itemCount++, table);
+      table->Delete();
       }
-    iter->Delete();
-    }
+
+    for (int i=0; i<rcfi->GetTreeNames.Count()-treeCount; i++)
+      {
+      vtkTree *tree = vtkTree::New();
+      outComposite->SetPiece(itemCount++, tree);
+      tree->Delete();
+      }
+   }
 
   // For now: use the first input information for timing
   if(this->TimeOutput)
@@ -404,6 +512,8 @@ int vtkRCalculatorFilter::RequestData(vtkInformation *vtkNotUsed(request),
 
   // assign vtk variables to R variables
   int numberOfInputs =  inputVector[0]->GetNumberOfInformationObjects();
+  rcfi->PutTableNames.ResetNameIterator();
+  rcfi->PutTreeNames.ResetNameIterator();
   for ( int i = 0; i < numberOfInputs; i++)
     {
     inpinfo = inputVector[0]->GetInformationObject(i);
@@ -445,6 +555,8 @@ int vtkRCalculatorFilter::RequestData(vtkInformation *vtkNotUsed(request),
     }
 
   // generate output
+  rcfi->GetTableNames.ResetNameIterator();
+  rcfi->GetTreeNames.ResetNameIterator();
   if (this->ProcessOutputDataObject(output) != 0)
     {
     vtkErrorMacro(<<"Filter does not handle output data type");
@@ -774,9 +886,24 @@ int vtkRCalculatorFilter::ProcessOutputCompositeDataSet(vtkCompositeDataSet * cd
 //----------------------------------------------------------------------------
 int vtkRCalculatorFilter::ProcessInputTable(vtkTable* tIn)
 {
-  if(this->rcfi->PutTableName.size() > 0)
+  if (this->rcfi->PutTableNames.HasName())
+    {
+    std::string name = this->rcfi->PutTableNames.NextName();
+    return this->ProcessInputTable(name, tIn);
+    }
+  else
+    {
+    return 0;
+    }
+}
+
+
+//----------------------------------------------------------------------------
+int vtkRCalculatorFilter::ProcessInputTable(std::string& name, vtkTable* tIn)
+{
+  if(name.length() > 0)
     {
-    this->ri->AssignVTKTableToRVariable(tIn, this->rcfi->PutTableName.c_str());
+    this->ri->AssignVTKTableToRVariable(tIn, name.c_str());
     }
   return (1);
 
@@ -784,46 +911,88 @@ int vtkRCalculatorFilter::ProcessInputTable(vtkTable* tIn)
 
 
 //----------------------------------------------------------------------------
-int vtkRCalculatorFilter::ProcessOutputTable(vtkTable* tOut)
+vtkTable* vtkRCalculatorFilter::GetOutputTable(std::string& name)
 {
 
-  if(this->rcfi->GetTableName.size() > 0)
+  if(name.length() > 0)
     {
-    tOut->ShallowCopy(
-      this->ri->AssignRVariableToVTKTable(this->rcfi->GetTableName.c_str()));
+    return this->ri->AssignRVariableToVTKTable(name.c_str());
     }
 
-  return (1);
+  return NULL;
 
 }
 
 
 //----------------------------------------------------------------------------
+int vtkRCalculatorFilter::ProcessOutputTable(vtkTable* tOut)
+{
+  if (rcfi->GetTableNames.HasName())
+    {
+    std::string name = rcfi->GetTableNames.NextName();
+    tOut->ShallowCopy(this->GetOutputTable(name));
+    return 1;
+    }
+  else
+    {
+    return 0;
+    }
+}
+
+//----------------------------------------------------------------------------
 int vtkRCalculatorFilter::ProcessInputTree(vtkTree* tIn)
 {
 
-  if(this->rcfi->PutTreeName.size() > 0)
+  if (this->rcfi->PutTreeNames.HasName())
+    {
+    std::string name = this->rcfi->PutTreeNames.NextName();
+    return this->ProcessInputTree(name, tIn);
+    }
+  else
+    {
+    return 0;
+    }
+}
+
+//----------------------------------------------------------------------------
+int vtkRCalculatorFilter::ProcessInputTree(std::string& name, vtkTree* tIn)
+{
+
+  if(name.size() > 0)
     {
-    this->ri->AssignVTKTreeToRVariable(tIn, this->rcfi->PutTreeName.c_str());
+    this->ri->AssignVTKTreeToRVariable(tIn, name.c_str());
     }
   return (1);
 
 }
 
-
 //----------------------------------------------------------------------------
-int vtkRCalculatorFilter::ProcessOutputTree(vtkTree* tOut)
+vtkTree* vtkRCalculatorFilter::GetOutputTree(std::string& name)
 {
 
-  if(this->rcfi->GetTreeName.size() > 0)
+  if(name.length() > 0)
     {
-    tOut->ShallowCopy(
-      this->ri->AssignRVariableToVTKTree(this->rcfi->GetTreeName.c_str()));
+    return this->ri->AssignRVariableToVTKTree(name.c_str());
     }
-  return (1);
+  return (NULL);
 
 }
 
+//----------------------------------------------------------------------------
+int vtkRCalculatorFilter::ProcessOutputTree(vtkTree* tOut)
+{
+
+  if (rcfi->GetTreeNames.HasName())
+    {
+    std::string name = rcfi->GetTreeNames.NextName();
+    tOut->ShallowCopy(this->GetOutputTree(name));
+    return 1;
+    }
+  else
+    {
+    return 0;
+    }
+}
 
 //----------------------------------------------------------------------------
 int vtkRCalculatorFilter::SetRscriptFromFile(const char* fname)
@@ -919,7 +1088,7 @@ void vtkRCalculatorFilter::PutTable(const char* NameOfRvar)
 
   if( NameOfRvar && (strlen(NameOfRvar) > 0) )
     {
-    rcfi->PutTableName = NameOfRvar;
+    rcfi->PutTableNames.SetName(NameOfRvar);
     this->Modified();
     }
 
@@ -932,7 +1101,7 @@ void vtkRCalculatorFilter::GetTable(const char* NameOfRvar)
 
   if( NameOfRvar && (strlen(NameOfRvar) > 0) )
     {
-    rcfi->GetTableName = NameOfRvar;
+    rcfi->GetTableNames.SetName(NameOfRvar);
     this->Modified();
     }
 
@@ -945,7 +1114,7 @@ void vtkRCalculatorFilter::PutTree(const char* NameOfRvar)
 
   if( NameOfRvar && (strlen(NameOfRvar) > 0) )
     {
-    rcfi->PutTreeName = NameOfRvar;
+    rcfi->PutTreeNames.SetName(NameOfRvar);
     this->Modified();
     }
 
@@ -958,7 +1127,7 @@ void vtkRCalculatorFilter::GetTree(const char* NameOfRvar)
 
   if( NameOfRvar && (strlen(NameOfRvar) > 0) )
     {
-    rcfi->GetTreeName = NameOfRvar;
+    rcfi->GetTreeNames.SetName(NameOfRvar);
     this->Modified();
     }
 
@@ -969,6 +1138,11 @@ void vtkRCalculatorFilter::GetTree(const char* NameOfRvar)
 void vtkRCalculatorFilter::RemoveAllPutVariables()
 {
   rcfi->PutArrNames.clear();
+  if (this->HasMultiplePuts())
+    {
+    rcfi->PutTreeNames.Clear();
+    rcfi->PutTableNames.Clear();
+    }
   this->Modified();
 }
 
@@ -977,6 +1151,11 @@ void vtkRCalculatorFilter::RemoveAllPutVariables()
 void vtkRCalculatorFilter::RemoveAllGetVariables()
 {
   rcfi->GetArrNames.clear();
+  if (this->HasMultipleGets())
+    {
+    rcfi->GetTreeNames.Clear();
+    rcfi->GetTableNames.Clear();
+    }
   this->Modified();
 }
 
@@ -1077,3 +1256,38 @@ int vtkRCalculatorFilter::ProcessOutputDataObject(vtkDataObject *output)
 
   return 1;
 }
+
+void vtkRCalculatorFilter::PutTables(vtkStringArray* NamesOfRVars)
+{
+  rcfi->PutTableNames.SetNames(NamesOfRVars);
+  this->Modified();
+}
+
+void vtkRCalculatorFilter::GetTables(vtkStringArray* NamesOfRvars)
+{
+  rcfi->GetTableNames.SetNames(NamesOfRvars);
+  this->Modified();
+}
+
+void vtkRCalculatorFilter::PutTrees(vtkStringArray* NamesOfRvars)
+{
+  rcfi->PutTreeNames.SetNames(NamesOfRvars);
+  this->Modified();
+}
+
+void vtkRCalculatorFilter::GetTrees(vtkStringArray* NamesOfRvars)
+{
+  rcfi->GetTreeNames.SetNames(NamesOfRvars);
+  this->Modified();
+}
+
+int vtkRCalculatorFilter::HasMultipleGets()
+{
+  return (rcfi->GetTreeNames.Count() > 1 || rcfi->GetTableNames.Count() > 1);
+
+}
+
+int vtkRCalculatorFilter::HasMultiplePuts()
+{
+  return (rcfi->PutTreeNames.Count() > 1 || rcfi->PutTableNames.Count() > 1);
+}
diff --git a/Filters/StatisticsGnuR/vtkRCalculatorFilter.h b/Filters/StatisticsGnuR/vtkRCalculatorFilter.h
index f961bae..cf6b481 100644
--- a/Filters/StatisticsGnuR/vtkRCalculatorFilter.h
+++ b/Filters/StatisticsGnuR/vtkRCalculatorFilter.h
@@ -48,6 +48,7 @@ class vtkTree;
 class vtkTable;
 class vtkCompositeDataSet;
 class vtkArrayData;
+class vtkStringArray;
 
 class VTKFILTERSSTATISTICSGNUR_EXPORT vtkRCalculatorFilter : public vtkDataObjectAlgorithm
 {
@@ -91,6 +92,17 @@ public:
   void GetTable(const char* NameOfRvar);
 
   // Description:
+  // For vtkTable input to the filter. An R list variable is created for each name
+  // in the array provided using the vtkTables from the input to the filter.
+  void PutTables(vtkStringArray* NamesOfRVars);
+
+  // Description:
+  // For vtkTable output of the filter. If more the one name is provided a composite
+  // dataset is created for the output of the filter and a vtkTable is added
+  // for each R list variable in the array provided.
+  void GetTables(vtkStringArray* NamesOfRVars);
+
+  // Description:
   // For vtkTree input to the filter.  An R phylo tree variable is created for the
   // vtkTree input using PutTree().  The output of the filter can be set from
   // a phylo tree variable in R using GetTree()
@@ -98,6 +110,18 @@ public:
   void GetTree(const char* NameOfRvar);
 
   // Description:
+  // For vtkTree input to the filter.  An R phylo tree variable is created for each
+  // name in the array provided using the vtkTrees from the input to the filter.
+  void PutTrees(vtkStringArray* NamesOfRvars);
+
+  // Description:
+  // For vtkTree output of the filter. If more than one name is provided a composite
+  // dataset is created for the output of the filter and a vtkTree is added for
+  // each R phylo tree variable in the array provided.
+  void GetTrees(vtkStringArray* NamesOfRvars);
+
+
+  // Description:
   // Script executed by R.  Can also be set from a file.
   vtkSetStringMacro(Rscript);
   vtkGetStringMacro(Rscript);
@@ -172,13 +196,21 @@ private:
   int ProcessOutputCompositeDataSet(vtkCompositeDataSet * cdsOut);
 
   int ProcessInputTable(vtkTable* tOut);
+  int ProcessInputTable(std::string& name, vtkTable* tIn);
+
+  vtkTable* GetOutputTable(std::string& name);
   int ProcessOutputTable(vtkTable* tOut);
 
   int ProcessInputTree(vtkTree* tIn);
+  int ProcessInputTree(std::string& name, vtkTree* tIn);
+
+  vtkTree* GetOutputTree(std::string& name);
   int ProcessOutputTree(vtkTree* tOut);
 
   int ProcessInputDataObject(vtkDataObject *input);
   int ProcessOutputDataObject(vtkDataObject *input);
+  int HasMultipleGets();
+  int HasMultiplePuts();
 
   vtkRInterface* ri;
   char* Rscript;
diff --git a/Filters/Texture/Testing/Data/Baseline/AutomaticPlaneGeneration.png.md5 b/Filters/Texture/Testing/Data/Baseline/AutomaticPlaneGeneration.png.md5
new file mode 100644
index 0000000..cd20e0d
--- /dev/null
+++ b/Filters/Texture/Testing/Data/Baseline/AutomaticPlaneGeneration.png.md5
@@ -0,0 +1 @@
+13bf2c85175b774bc3a3e672c6ad2637
diff --git a/Filters/Texture/Testing/Data/Baseline/cylMap.png.md5 b/Filters/Texture/Testing/Data/Baseline/cylMap.png.md5
new file mode 100644
index 0000000..d718def
--- /dev/null
+++ b/Filters/Texture/Testing/Data/Baseline/cylMap.png.md5
@@ -0,0 +1 @@
+c869f3f1aa4c3810769e436496bf0dbb
diff --git a/Filters/Texture/Testing/Data/Baseline/socbal.png.md5 b/Filters/Texture/Testing/Data/Baseline/socbal.png.md5
new file mode 100644
index 0000000..cc2214a
--- /dev/null
+++ b/Filters/Texture/Testing/Data/Baseline/socbal.png.md5
@@ -0,0 +1 @@
+0b6e20d45ff0ec4eb09bd8ebeee73a8c
diff --git a/Filters/Texture/Testing/Data/Baseline/socbal_1.png.md5 b/Filters/Texture/Testing/Data/Baseline/socbal_1.png.md5
new file mode 100644
index 0000000..6c4b2f5
--- /dev/null
+++ b/Filters/Texture/Testing/Data/Baseline/socbal_1.png.md5
@@ -0,0 +1 @@
+214bc2e87f485e4fa12a2f964145fcbc
diff --git a/Filters/Texture/Testing/Data/Baseline/textureThreshold.png.md5 b/Filters/Texture/Testing/Data/Baseline/textureThreshold.png.md5
new file mode 100644
index 0000000..773bbb4
--- /dev/null
+++ b/Filters/Texture/Testing/Data/Baseline/textureThreshold.png.md5
@@ -0,0 +1 @@
+3881745b493aa62d56450e32c2bb6062
diff --git a/Filters/Texture/Testing/Data/Baseline/textureThreshold_1.png.md5 b/Filters/Texture/Testing/Data/Baseline/textureThreshold_1.png.md5
new file mode 100644
index 0000000..c6764ee
--- /dev/null
+++ b/Filters/Texture/Testing/Data/Baseline/textureThreshold_1.png.md5
@@ -0,0 +1 @@
+0cfd4cb3a67decbc57afd35826793e3c
diff --git a/Filters/Texture/Testing/Data/Baseline/triangularTCoords.png.md5 b/Filters/Texture/Testing/Data/Baseline/triangularTCoords.png.md5
new file mode 100644
index 0000000..14536f1
--- /dev/null
+++ b/Filters/Texture/Testing/Data/Baseline/triangularTCoords.png.md5
@@ -0,0 +1 @@
+c3b32c6640bea6c1b89040b7814784af
diff --git a/Filters/Texture/Testing/Python/CMakeLists.txt b/Filters/Texture/Testing/Python/CMakeLists.txt
index 8d64274..d34a43a 100644
--- a/Filters/Texture/Testing/Python/CMakeLists.txt
+++ b/Filters/Texture/Testing/Python/CMakeLists.txt
@@ -1,8 +1,5 @@
-add_test_python(AutomaticPlaneGeneration.py Graphics)
-add_test_python(cylMap.py Graphics)
-add_test_python(socbal.py Graphics)
-add_test_python(triangularTCoords.py Graphics)
-
-if (VTK_DATA_ROOT)
-  add_test_python(textureThreshold.py Graphics)
-endif()
+vtk_add_test_python(AutomaticPlaneGeneration.py)
+vtk_add_test_python(cylMap.py)
+vtk_add_test_python(socbal.py)
+vtk_add_test_python(textureThreshold.py)
+vtk_add_test_python(triangularTCoords.py)
diff --git a/Filters/Texture/Testing/Tcl/CMakeLists.txt b/Filters/Texture/Testing/Tcl/CMakeLists.txt
index b63dd5f..b211cb9 100644
--- a/Filters/Texture/Testing/Tcl/CMakeLists.txt
+++ b/Filters/Texture/Testing/Tcl/CMakeLists.txt
@@ -1,8 +1,5 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(AutomaticPlaneGeneration.tcl Graphics)
-  add_test_tcl(cylMap.tcl Graphics)
-  add_test_tcl(socbal.tcl Graphics)
-  add_test_tcl(textureThreshold.tcl Graphics)
-endif()
-
-add_test_tcl(triangularTCoords.tcl Graphics)
+vtk_add_test_tcl(AutomaticPlaneGeneration.tcl)
+vtk_add_test_tcl(cylMap.tcl)
+vtk_add_test_tcl(socbal.tcl)
+vtk_add_test_tcl(textureThreshold.tcl)
+vtk_add_test_tcl(triangularTCoords.tcl)
diff --git a/Filters/Texture/vtkTextureMapToCylinder.h b/Filters/Texture/vtkTextureMapToCylinder.h
index 826a3f1..ff2cc72 100644
--- a/Filters/Texture/vtkTextureMapToCylinder.h
+++ b/Filters/Texture/vtkTextureMapToCylinder.h
@@ -86,7 +86,7 @@ public:
 
 protected:
   vtkTextureMapToCylinder();
-  ~vtkTextureMapToCylinder() {};
+  ~vtkTextureMapToCylinder() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
diff --git a/Filters/Texture/vtkTextureMapToPlane.h b/Filters/Texture/vtkTextureMapToPlane.h
index e30f320..462c0b5 100644
--- a/Filters/Texture/vtkTextureMapToPlane.h
+++ b/Filters/Texture/vtkTextureMapToPlane.h
@@ -91,7 +91,7 @@ public:
 
 protected:
   vtkTextureMapToPlane();
-  ~vtkTextureMapToPlane() {};
+  ~vtkTextureMapToPlane() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   void ComputeNormal(vtkDataSet *output);
diff --git a/Filters/Texture/vtkTextureMapToSphere.h b/Filters/Texture/vtkTextureMapToSphere.h
index 39fbea7..d695692 100644
--- a/Filters/Texture/vtkTextureMapToSphere.h
+++ b/Filters/Texture/vtkTextureMapToSphere.h
@@ -78,7 +78,7 @@ public:
 
 protected:
   vtkTextureMapToSphere();
-  ~vtkTextureMapToSphere() {};
+  ~vtkTextureMapToSphere() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
diff --git a/Filters/Texture/vtkThresholdTextureCoords.h b/Filters/Texture/vtkThresholdTextureCoords.h
index 5567544..9a339da 100644
--- a/Filters/Texture/vtkThresholdTextureCoords.h
+++ b/Filters/Texture/vtkThresholdTextureCoords.h
@@ -80,7 +80,7 @@ public:
 
 protected:
   vtkThresholdTextureCoords();
-  ~vtkThresholdTextureCoords() {};
+  ~vtkThresholdTextureCoords() {}
 
   // Usual data generation method
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
diff --git a/Filters/Texture/vtkTransformTextureCoords.h b/Filters/Texture/vtkTransformTextureCoords.h
index fafb2cc..822d031 100644
--- a/Filters/Texture/vtkTransformTextureCoords.h
+++ b/Filters/Texture/vtkTransformTextureCoords.h
@@ -95,7 +95,7 @@ public:
 
 protected:
   vtkTransformTextureCoords();
-  ~vtkTransformTextureCoords() {};
+  ~vtkTransformTextureCoords() {}
 
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
diff --git a/Filters/Texture/vtkTriangularTCoords.h b/Filters/Texture/vtkTriangularTCoords.h
index 779cb94..7251c87 100644
--- a/Filters/Texture/vtkTriangularTCoords.h
+++ b/Filters/Texture/vtkTriangularTCoords.h
@@ -40,8 +40,8 @@ public:
   void PrintSelf(ostream& os, vtkIndent indent);
 
 protected:
-  vtkTriangularTCoords() {};
-  ~vtkTriangularTCoords() {};
+  vtkTriangularTCoords() {}
+  ~vtkTriangularTCoords() {}
 
   // Usual data generation method
   int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
diff --git a/Filters/Verdict/Testing/Cxx/CMakeLists.txt b/Filters/Verdict/Testing/Cxx/CMakeLists.txt
index 8ffb8a4..0f9108d 100644
--- a/Filters/Verdict/Testing/Cxx/CMakeLists.txt
+++ b/Filters/Verdict/Testing/Cxx/CMakeLists.txt
@@ -1,23 +1,2 @@
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-  MeshQuality.cxx
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-            COMMAND ${vtk-module}CxxTests ${TName}
-                                          -D ${VTK_DATA_ROOT}
-                                          -T ${VTK_TEST_OUTPUT_DIR}
-                                          -V Baseline/${vtk-module}/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName} COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_add_test_cxx(MeshQuality.cxx NO_VALID)
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Filters/Verdict/Testing/Cxx/MeshQuality.cxx b/Filters/Verdict/Testing/Cxx/MeshQuality.cxx
index cfe4146..550d6ae 100644
--- a/Filters/Verdict/Testing/Cxx/MeshQuality.cxx
+++ b/Filters/Verdict/Testing/Cxx/MeshQuality.cxx
@@ -15,7 +15,7 @@
 #include "vtkUnstructuredGridReader.h"
 #include "vtkFieldData.h"
 
-int DumpQualityStats( vtkMeshQuality* iq, const char *arrayname )
+static int DumpQualityStats( vtkMeshQuality* iq, const char *arrayname )
 {
   cout << "  cardinality: "
        << iq->GetOutput()->GetFieldData()->GetArray( arrayname )->GetComponent( 0, 4 )
diff --git a/Filters/Verdict/Testing/Python/CMakeLists.txt b/Filters/Verdict/Testing/Python/CMakeLists.txt
index 9221e0f..128a9a4 100644
--- a/Filters/Verdict/Testing/Python/CMakeLists.txt
+++ b/Filters/Verdict/Testing/Python/CMakeLists.txt
@@ -1,3 +1 @@
-if(VTK_DATA_ROOT)
-  add_test_python1(MeshQuality.py ${VTK_DATA_ROOT})
-endif(VTK_DATA_ROOT)
+vtk_add_test_python(MeshQuality.py NO_VALID NO_RT)
diff --git a/GUISupport/MFC/CMakeLists.txt b/GUISupport/MFC/CMakeLists.txt
index 08012ed..05a5516 100644
--- a/GUISupport/MFC/CMakeLists.txt
+++ b/GUISupport/MFC/CMakeLists.txt
@@ -20,17 +20,17 @@ set(MFC_SRCS
 vtk_module_library(${vtk-module} ${VTK_MFC_LIB_TYPE} ${MFC_SRCS})
 
 if(VTK_MFC_EXTRA_LIBS)
-  target_link_libraries(${vtk-module} ${VTK_MFC_EXTRA_LIBS})
+  target_link_libraries(${vtk-module} LINK_PRIVATE ${VTK_MFC_EXTRA_LIBS})
 endif(VTK_MFC_EXTRA_LIBS)
 
-if ( WIN32 )
+if(WIN32)
   if(NOT VTK_INSTALL_NO_DEVELOPMENT)
     file(GLOB vtkMFCHeaderFiles "*.h")
     set(vtkMFCHeaderFiles ${vtkMFCHeaderFiles}
       "${CMAKE_CURRENT_BINARY_DIR}/vtkMFCConfigure.h")
     install(FILES ${vtkMFCHeaderFiles}
-      DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}
+      DESTINATION ${VTK_INSTALL_INCLUDE_DIR}
       COMPONENT Development
       )
-  endif(NOT VTK_INSTALL_NO_DEVELOPMENT)
-endif ( WIN32 )
+  endif()
+endif()
diff --git a/GUISupport/Qt/CMakeLists.txt b/GUISupport/Qt/CMakeLists.txt
index 74a2814..2a35b7c 100644
--- a/GUISupport/Qt/CMakeLists.txt
+++ b/GUISupport/Qt/CMakeLists.txt
@@ -1,3 +1,5 @@
+include(vtkQt)
+
 # set up sources to build
 set(QVTKLibSrcs
   vtkEventQtSlotConnect.cxx
@@ -52,13 +54,23 @@ set(QVTKNonMocHeaders
   QVTKInteractor.h
   )
 
-# import Qt4 build settings
-set(QT_USE_QTNETWORK 1)
-find_package(Qt4 REQUIRED QUIET)
-mark_as_advanced(QT_QMAKE_EXECUTABLE)
-include(${QT_USE_FILE})
+if(VTK_QT_VERSION VERSION_GREATER "4")
+  find_package(Qt5Widgets REQUIRED QUIET)
+  include_directories(${Qt5Widgets_INCLUDE_DIRS})
+  add_definitions(${Qt5Widgets_DEFINITIONS})
+
+  qt5_wrap_cpp(QVTKLibMocSrcs ${QVTKMocHeaders})
+
+  set(QT_LIBRARIES ${Qt5Widgets_LIBRARIES})
+else()
+  # import Qt4 build settings
+  set(QT_USE_QTNETWORK 1)
+  find_package(Qt4 REQUIRED QUIET)
+  mark_as_advanced(QT_QMAKE_EXECUTABLE)
+  include(${QT_USE_FILE})
 
-qt4_wrap_cpp(QVTKLibMocSrcs ${QVTKMocHeaders})
+  qt4_wrap_cpp(QVTKLibMocSrcs ${QVTKMocHeaders})
+endif()
 
 foreach(opt
     QT_RCC_EXECUTABLE
@@ -76,15 +88,15 @@ vtk_module_library(${vtk-module}
   ${QVTKNonMocHeaders} ${QVTKMocHeaders}
   )
 
-target_link_libraries(${vtk-module} ${QT_LIBRARIES})
+target_link_libraries(${vtk-module} LINK_PUBLIC ${QT_LIBRARIES})
 
 if(VTK_USE_X)
-  target_link_libraries(${vtk-module} ${X11_LIBRARIES})
+  target_link_libraries(${vtk-module} LINK_PRIVATE ${X11_LIBRARIES})
 endif()
 
 if(APPLE)
   if(VTK_USE_CARBON)
-    target_link_libraries(${vtk-module} "-framework Carbon" )
+    target_link_libraries(${vtk-module} LINK_PRIVATE "-framework Carbon" )
   endif()
 endif()
 
@@ -94,9 +106,18 @@ if(BUILD_SHARED_LIBS)
 
   set(PluginMocHeaders Q4VTKWidgetPlugin.h)
 
-  add_definitions(-DQT_PLUGIN)
-  include_directories(${QT_QTDESIGNER_INCLUDE_DIR})
-  qt4_wrap_cpp(PluginMocSrcs ${PluginMocHeaders})
+  if(VTK_QT_VERSION VERSION_GREATER "4")
+    find_package(Qt5Designer REQUIRED QUIET)
+    add_definitions(${Qt5Designer_DEFINITIONS})
+    include_directories(${Qt5Designer_INCLUDE_DIRS})
+    qt5_wrap_cpp(PluginMocSrcs ${PluginMocHeaders})
+    set(QT_LIBRARIES ${Qt5Designer_LIBRARIES})
+  else()
+    add_definitions(-DQT_PLUGIN)
+    include_directories(${QT_QTDESIGNER_INCLUDE_DIR})
+    qt4_wrap_cpp(PluginMocSrcs ${PluginMocHeaders})
+    set(QT_LIBRARIES ${QT_QTGUI_LIBRARY} ${QT_QTCORE_LIBRARY})
+  endif()
 
   # add QVTK plugin from sources
   # stand-alone as it doesn't depend on QVTK library
@@ -109,10 +130,7 @@ if(BUILD_SHARED_LIBS)
   set_target_properties(QVTKWidgetPlugin PROPERTIES COMPILE_DEFINITIONS QT_NO_DEBUG)
 
   # link with Qt libs
-  target_link_libraries(QVTKWidgetPlugin
-    ${QT_QTGUI_LIBRARY}
-    ${QT_QTCORE_LIBRARY}
-    )
+  target_link_libraries(QVTKWidgetPlugin ${QT_LIBRARIES})
 
   # install rules
 
@@ -123,7 +141,7 @@ if(BUILD_SHARED_LIBS)
   # prefix.  The default VTK_INSTALL_QT_DIR will allow the designer plugin
   # path to be set to vtk-install-prefix/plugins to get the plugin.
   if(NOT VTK_INSTALL_QT_DIR)
-    set(VTK_INSTALL_QT_DIR /plugins/designer)
+    set(VTK_INSTALL_QT_DIR plugins/designer)
   endif()
 
   # If no runtime is to be installed then do not install the qt plugin.
@@ -137,7 +155,7 @@ if(BUILD_SHARED_LIBS)
     if(DEFINED VTK_INSTALL_QT_PLUGIN_DIR)
     else()
       set(VTK_INSTALL_QT_PLUGIN_DIR
-        "${DOLLAR}{CMAKE_INSTALL_PREFIX}${DOLLAR}{VTK_INSTALL_QT_DIR}"
+        "${DOLLAR}{CMAKE_INSTALL_PREFIX}/${DOLLAR}{VTK_INSTALL_QT_DIR}"
         CACHE STRING "Directory in which the VTK Qt plugin is placed during installation.")
       mark_as_advanced(VTK_INSTALL_QT_PLUGIN_DIR)
     endif()
@@ -147,11 +165,11 @@ if(BUILD_SHARED_LIBS)
     # main install prefix.  Attach the script as a post-install script.
     configure_file(${CMAKE_CURRENT_SOURCE_DIR}/PluginInstall.cmake.in
       ${CMAKE_CURRENT_BINARY_DIR}/PluginInstall.cmake
-      @ONLY IMMEDIATE)
+      @ONLY)
     set_target_properties(${vtk-module} PROPERTIES POST_INSTALL_SCRIPT
       ${CMAKE_CURRENT_BINARY_DIR}/PluginInstall.cmake)
   endif()
 endif()
 
 configure_file("${CMAKE_CURRENT_SOURCE_DIR}/CTestCustom.ctest.in"
-  "${CMAKE_CURRENT_BINARY_DIR}/CTestCustom.ctest" @ONLY IMMEDIATE)
+  "${CMAKE_CURRENT_BINARY_DIR}/CTestCustom.ctest" @ONLY)
diff --git a/GUISupport/Qt/Q4VTKWidgetPlugin.cxx b/GUISupport/Qt/Q4VTKWidgetPlugin.cxx
index 64b68f2..ddb174f 100644
--- a/GUISupport/Qt/Q4VTKWidgetPlugin.cxx
+++ b/GUISupport/Qt/Q4VTKWidgetPlugin.cxx
@@ -144,5 +144,7 @@ QList<QDesignerCustomWidgetInterface*> QVTKPlugin::customWidgets() const
   return plugins;
 }
 
+#if QT_VERSION < 0x050000
 Q_EXPORT_PLUGIN(QVTKPlugin)
+#endif
 
diff --git a/GUISupport/Qt/Q4VTKWidgetPlugin.h b/GUISupport/Qt/Q4VTKWidgetPlugin.h
index 0aedd9a..05cb23b 100644
--- a/GUISupport/Qt/Q4VTKWidgetPlugin.h
+++ b/GUISupport/Qt/Q4VTKWidgetPlugin.h
@@ -27,8 +27,11 @@
 
 #include <QDesignerCustomWidgetInterface>
 #include <QDesignerCustomWidgetCollectionInterface>
-#include <QtPlugin>
 #include <QObject>
+#if QT_VERSION >= 0x050000
+# include <QtDesigner>
+#endif
+#include <QtPlugin>
 #include <QWidget>
 
 
@@ -54,10 +57,13 @@ class QVTKWidgetPlugin : public QDesignerCustomWidgetInterface
 class QVTKPlugin : public QObject, public QDesignerCustomWidgetCollectionInterface
 {
   Q_OBJECT
+  #if QT_VERSION >= 0x050000
+  Q_PLUGIN_METADATA(IID "org.vtk.qvtkplugin")
+  #endif
   Q_INTERFACES(QDesignerCustomWidgetCollectionInterface)
   public:
   QVTKPlugin();
-  ~QVTKPlugin();
+  virtual ~QVTKPlugin();
 
   virtual QList<QDesignerCustomWidgetInterface*> customWidgets() const;
   private:
diff --git a/GUISupport/Qt/QVTKWidget.cxx b/GUISupport/Qt/QVTKWidget.cxx
index ff2801c..abd24e7 100644
--- a/GUISupport/Qt/QVTKWidget.cxx
+++ b/GUISupport/Qt/QVTKWidget.cxx
@@ -74,7 +74,7 @@
 #endif
 
 /*! constructor */
-QVTKWidget::QVTKWidget(QWidget* p, Qt::WFlags f)
+QVTKWidget::QVTKWidget(QWidget* p, Qt::WindowFlags f)
   : QWidget(p, f | Qt::MSWindowsOwnDC), mRenWin(NULL),
     cachedImageCleanFlag(false),
     automaticImageCache(false), maxImageCacheRenderRate(1.0),
@@ -401,7 +401,7 @@ bool QVTKWidget::event(QEvent* e)
 
   if(QObject::event(e))
     {
-    return TRUE;
+    return true;
     }
 
   if(e->type() == QEvent::KeyPress)
diff --git a/GUISupport/Qt/QVTKWidget.h b/GUISupport/Qt/QVTKWidget.h
index 6d0be37..631d300 100644
--- a/GUISupport/Qt/QVTKWidget.h
+++ b/GUISupport/Qt/QVTKWidget.h
@@ -38,7 +38,7 @@
 
 #include "vtkGUISupportQtModule.h" // For export macro
 #include "QVTKInteractor.h"
-#include <QtGui/QWidget>
+#include <QWidget>
 
 class QVTKInteractorAdapter;
 
@@ -85,7 +85,7 @@ class VTKGUISUPPORTQT_EXPORT QVTKWidget : public QWidget
 
 public:
   //! constructor
-  QVTKWidget(QWidget* parent = NULL, Qt::WFlags f = 0);
+  QVTKWidget(QWidget* parent = NULL, Qt::WindowFlags f = 0);
   //! destructor
   virtual ~QVTKWidget();
 
diff --git a/GUISupport/Qt/Testing/Cxx/CMakeLists.txt b/GUISupport/Qt/Testing/Cxx/CMakeLists.txt
index eb390bb..b26beb8 100644
--- a/GUISupport/Qt/Testing/Cxx/CMakeLists.txt
+++ b/GUISupport/Qt/Testing/Cxx/CMakeLists.txt
@@ -1,31 +1,23 @@
-set(MyTests
+include(vtkQt)
+
+vtk_add_test_cxx(NO_VALID
   TestQtDebugLeaksView.cxx
   TestQtTableModelAdapter.cxx
   TestQtTreeModelAdapter.cxx
   )
 
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx ${MyTests}
-  EXTRA_INCLUDE vtkTestDriver.h)
-
-find_package(Qt4 REQUIRED)
-include(${QT_USE_FILE})
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests} QTestApp.cxx)
+if(VTK_QT_VERSION VERSION_GREATER "4")
+  find_package(Qt5Widgets REQUIRED)
+  include_directories(${Qt5Widgets_INCLUDE_DIRS})
+  add_definitions(${Qt5Widgets_DEFINITIONS})
+else()
+  find_package(Qt4 REQUIRED)
+  include(${QT_USE_FILE})
+endif()
 
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
+vtk_test_cxx_executable(${vtk-module}CxxTests QTestApp.cxx)
 
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/QVTK/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+if(VTK_QT_VERSION VERSION_GREATER "4")
+  set_target_properties(${vtk-module}CxxTests PROPERTIES
+    COMPILE_FLAGS "${Qt5Widgets_EXECUTABLE_COMPILE_FLAGS}")
+endif()
diff --git a/GUISupport/Qt/Testing/Cxx/QTestApp.cxx b/GUISupport/Qt/Testing/Cxx/QTestApp.cxx
index e010d32..dcdef83 100644
--- a/GUISupport/Qt/Testing/Cxx/QTestApp.cxx
+++ b/GUISupport/Qt/Testing/Cxx/QTestApp.cxx
@@ -31,7 +31,11 @@ int QTestApp::Error = 0;
 
 QTestApp::QTestApp(int _argc, char** _argv)
 {
+#if QT_VERSION >= 0x050000
+  qInstallMessageHandler(QTestApp::messageHandler);
+#else
   qInstallMsgHandler(QTestApp::messageHandler);
+#endif
 
   // CMake generated driver removes argv[0],
   // so let's put a dummy back in
@@ -51,7 +55,11 @@ QTestApp::QTestApp(int _argc, char** _argv)
 QTestApp::~QTestApp()
 {
   delete App;
+#if QT_VERSION >= 0x050000
+  qInstallMessageHandler(0);
+#else
   qInstallMsgHandler(0);
+#endif
 }
 
 int QTestApp::exec()
@@ -65,8 +73,18 @@ int QTestApp::exec()
   return Error + ret;
 }
 
+#if QT_VERSION >= 0x050000
+void QTestApp::messageHandler(QtMsgType type,
+  const QMessageLogContext & context,
+  const QString & message)
+#else
 void QTestApp::messageHandler(QtMsgType type, const char *msg)
+#endif
 {
+#if QT_VERSION >= 0x050000
+  Q_UNUSED(context)
+  const char * msg = qPrintable(message);
+#endif
   switch(type)
   {
   case QtDebugMsg:
diff --git a/GUISupport/Qt/Testing/Cxx/QTestApp.h b/GUISupport/Qt/Testing/Cxx/QTestApp.h
index a3b13fb..0d16fef 100644
--- a/GUISupport/Qt/Testing/Cxx/QTestApp.h
+++ b/GUISupport/Qt/Testing/Cxx/QTestApp.h
@@ -33,7 +33,13 @@ public:
 
   static int exec();
 
+#if QT_VERSION >= 0x050000
+  static void messageHandler(QtMsgType type,
+    const QMessageLogContext & context,
+    const QString & message);
+#else
   static void messageHandler(QtMsgType type, const char *msg);
+#endif
 
   static void delay(int ms);
 
diff --git a/GUISupport/Qt/module.cmake b/GUISupport/Qt/module.cmake
index f967dc3..c65057e 100644
--- a/GUISupport/Qt/module.cmake
+++ b/GUISupport/Qt/module.cmake
@@ -6,6 +6,8 @@ vtk_module(vtkGUISupportQt
     vtkRenderingOpenGL
     vtkInteractionStyle
     vtkImagingCore
+  PRIVATE_DEPENDS
+    vtkFiltersExtraction
   TEST_DEPENDS
     vtkTestingCore
   EXCLUDE_FROM_WRAPPING
diff --git a/GUISupport/Qt/vtkQtAbstractModelAdapter.h b/GUISupport/Qt/vtkQtAbstractModelAdapter.h
index e3f6323..c858069 100644
--- a/GUISupport/Qt/vtkQtAbstractModelAdapter.h
+++ b/GUISupport/Qt/vtkQtAbstractModelAdapter.h
@@ -111,7 +111,15 @@ public:
   // We make the reset() method public because it isn't always possible for
   // an adapter to know when its input has changed, so it must be callable
   // by an outside entity.
-  void reset() { QAbstractItemModel::reset(); }
+  /// \sa beginResetModel, endResetModel
+  /// \deprecated
+  void reset() { QAbstractItemModel::beginResetModel(); QAbstractItemModel::endResetModel();}
+
+  // We make the beginResetModel() and endResetModel() methods public because it
+  // isn't always possible for an adapter to know when its input has changed,
+  // so it must be callable by an outside entity.
+  void beginResetModel() { QAbstractItemModel::beginResetModel(); }
+  void endResetModel() { QAbstractItemModel::endResetModel(); }
 
 
 signals:
diff --git a/GUISupport/Qt/vtkQtConnection.cxx b/GUISupport/Qt/vtkQtConnection.cxx
index 01ef45a..e700e97 100644
--- a/GUISupport/Qt/vtkQtConnection.cxx
+++ b/GUISupport/Qt/vtkQtConnection.cxx
@@ -75,7 +75,7 @@ void vtkQtConnection::Execute(vtkObject* caller, unsigned long e, void* call_dat
   if(e == vtkCommand::DeleteEvent)
     {
     this->Owner->Disconnect(this->VTKObject, this->VTKEvent, this->QtObject,
-      this->QtSlot.toAscii().data(),
+      this->QtSlot.toLatin1().data(),
       this->ClientData);
     }
 }
@@ -145,7 +145,7 @@ void vtkQtConnection::PrintSelf(ostream& os, vtkIndent indent)
           this->VTKObject->GetClassName() << ":" <<
           vtkCommand::GetStringFromEventId(this->VTKEvent) << "  <---->  " <<
           this->QtObject->metaObject()->className() << "::" <<
-          this->QtSlot.toAscii().data() << "\n";
+          this->QtSlot.toLatin1().data() << "\n";
     }
 }
 
diff --git a/GUISupport/Qt/vtkQtDebugLeaksView.cxx b/GUISupport/Qt/vtkQtDebugLeaksView.cxx
index 279755a..a2c291b 100644
--- a/GUISupport/Qt/vtkQtDebugLeaksView.cxx
+++ b/GUISupport/Qt/vtkQtDebugLeaksView.cxx
@@ -14,6 +14,7 @@
 =========================================================================*/
 #include "vtkQtDebugLeaksView.h"
 #include "vtkQtDebugLeaksModel.h"
+#include "vtkObjectBase.h"
 
 #include <QCheckBox>
 #include <QDesktopServices>
diff --git a/GUISupport/Qt/vtkQtTableModelAdapter.cxx b/GUISupport/Qt/vtkQtTableModelAdapter.cxx
index f34c9ce..6c63eaf 100644
--- a/GUISupport/Qt/vtkQtTableModelAdapter.cxx
+++ b/GUISupport/Qt/vtkQtTableModelAdapter.cxx
@@ -675,7 +675,7 @@ void vtkQtTableModelAdapter::getValue(int row, int in_column, vtkVariant& v) con
       strValue = strValue.remove(strValue.size()-2, 2); // remove the last comma
 
       // Reconstruct the variant using this string value
-      v = vtkVariant(strValue.toAscii().data());
+      v = vtkVariant(strValue.toLatin1().data());
       }
     }
 }
diff --git a/GUISupport/QtOpenGL/CMakeLists.txt b/GUISupport/QtOpenGL/CMakeLists.txt
index 4a3d8e3..5bdeee0 100644
--- a/GUISupport/QtOpenGL/CMakeLists.txt
+++ b/GUISupport/QtOpenGL/CMakeLists.txt
@@ -1,3 +1,5 @@
+include(vtkQt)
+
 set(MocHeaders ${QVTKMocHeaders}
   QVTKGraphicsItem.h
   QVTKWidget2.h
@@ -7,13 +9,22 @@ set(LibSrcs ${QVTKLibSrcs}
   QVTKWidget2.cxx
   )
 
-# import Qt4 build settings
-set(QT_USE_QTOPENGL 1)
-find_package(Qt4 REQUIRED QUIET)
-include(${QT_USE_FILE})
+if(VTK_QT_VERSION VERSION_GREATER "4")
+  find_package(Qt5OpenGL REQUIRED QUIET)
+  include_directories(${Qt5OpenGL_INCLUDE_DIRS})
+  add_definitions(${Qt5OpenGL_DEFINITIONS})
+  qt5_wrap_cpp(LibMocSrcs ${MocHeaders})
+
+  set(QT_LIBRARIES ${Qt5OpenGL_LIBRARIES})
+else()
+  # import Qt4 build settings
+  set(QT_USE_QTOPENGL 1)
+  find_package(Qt4 REQUIRED QUIET)
+  include(${QT_USE_FILE})
 
-qt4_wrap_cpp(LibMocSrcs ${MocHeaders})
+  qt4_wrap_cpp(LibMocSrcs ${MocHeaders})
+endif()
 
 set(${vtk-module}_NO_HeaderTest 1)
 vtk_module_library(${vtk-module} ${LibSrcs} ${LibMocSrcs})
-target_link_libraries(${vtk-module} ${QT_LIBRARIES})
+target_link_libraries(${vtk-module} LINK_PRIVATE ${QT_LIBRARIES})
diff --git a/GUISupport/QtOpenGL/QVTKGraphicsItem.cxx b/GUISupport/QtOpenGL/QVTKGraphicsItem.cxx
index 812711f..417e2f2 100644
--- a/GUISupport/QtOpenGL/QVTKGraphicsItem.cxx
+++ b/GUISupport/QtOpenGL/QVTKGraphicsItem.cxx
@@ -28,6 +28,7 @@
 #include "vtkGenericOpenGLRenderWindow.h"
 #include "vtkEventQtSlotConnect.h"
 #include "vtkgl.h"
+#include "vtkOpenGLError.h"
 
 QVTKGraphicsItem::QVTKGraphicsItem(QGLContext* ctx, QGraphicsItem* p)
   : QGraphicsWidget(p), mContext(ctx)
@@ -51,8 +52,7 @@ QVTKGraphicsItem::QVTKGraphicsItem(QGLContext* ctx, QGraphicsItem* p)
 
 QVTKGraphicsItem::~QVTKGraphicsItem()
 {
-  if(mFBO)
-    delete mFBO;
+  delete mFBO;
 }
 
 void QVTKGraphicsItem::SetRenderWindow(vtkGenericOpenGLRenderWindow* win)
@@ -117,8 +117,7 @@ void QVTKGraphicsItem::MakeCurrent()
   QSize sz = this->size().toSize();
   if(!mFBO || sz != mFBO->size())
   {
-    if(mFBO)
-      delete mFBO;
+    delete mFBO;
 
     if(!sz.isEmpty())
       mFBO = new QGLFramebufferObject(sz, QGLFramebufferObject::Depth);
@@ -183,6 +182,8 @@ void QVTKGraphicsItem::paint(QPainter*, const QStyleOptionGraphicsItem*, QWidget
   if(!mWin)
     return;
 
+ vtkOpenGLClearErrorMacro();
+
 #if QT_VERSION >= 0x040600
   // tell Qt we're doing our own GL calls
   // if necessary, it'll put us in an OpenGL 1.x compatible state.
@@ -239,6 +240,8 @@ void QVTKGraphicsItem::paint(QPainter*, const QStyleOptionGraphicsItem*, QWidget
 #if QT_VERSION >= 0x040600
   painter->endNativePainting();
 #endif
+
+  vtkOpenGLStaticCheckErrorMacro("failed after paint");
 }
 
 void QVTKGraphicsItem::keyPressEvent(QKeyEvent* e)
diff --git a/GUISupport/QtOpenGL/QVTKGraphicsItem.h b/GUISupport/QtOpenGL/QVTKGraphicsItem.h
index dfc5c2e..4f06b47 100644
--- a/GUISupport/QtOpenGL/QVTKGraphicsItem.h
+++ b/GUISupport/QtOpenGL/QVTKGraphicsItem.h
@@ -26,8 +26,8 @@
 #define QVTKGraphicsItem_hpp
 
 #include "vtkGUISupportQtOpenGLModule.h" // For export macro
-#include <QtGui/QGraphicsWidget>
-#include <QtOpenGL/QGLContext>
+#include <QGLContext>
+#include <QGraphicsWidget>
 #include <vtkSmartPointer.h>
 #include "QVTKWin32Header.h"
 class vtkEventQtSlotConnect;
diff --git a/GUISupport/QtSQL/CMakeLists.txt b/GUISupport/QtSQL/CMakeLists.txt
index 5acb86b..0f8a467 100644
--- a/GUISupport/QtSQL/CMakeLists.txt
+++ b/GUISupport/QtSQL/CMakeLists.txt
@@ -1,3 +1,5 @@
+include(vtkQt)
+
 # set up sources to build
 set(LibSrcs
   vtkQtSQLDatabase.cxx
@@ -7,10 +9,25 @@ set(LibSrcs
 
 include_directories(${CMAKE_CURRENT_BINARY_DIR})
 
-# import Qt4 build settings
-set(QT_USE_QTSQL 1)
-find_package(Qt4 REQUIRED QUIET)
-include(${QT_USE_FILE})
+if(VTK_QT_VERSION VERSION_GREATER "4")
+  find_package(Qt5Widgets REQUIRED QUIET)
+  find_package(Qt5Sql REQUIRED QUIET)
+
+  set(_qt_include_dirs ${Qt5Widgets_INCLUDE_DIRS} ${Qt5Sql_INCLUDE_DIRS})
+  list(REMOVE_DUPLICATES _qt_include_dirs)
+  include_directories(${_qt_include_dirs})
+
+  set(_qt_definitions ${Qt5Widgets_DEFINITIONS} ${Qt5Sql_DEFINITIONS})
+  list(REMOVE_DUPLICATES _qt_definitions)
+  add_definitions(${_qt_definitions})
+
+  set(QT_LIBRARIES ${Qt5Widgets_LIBRARIES} ${Qt5Sql_LIBRARIES})
+else()
+  # import Qt4 build settings
+  set(QT_USE_QTSQL 1)
+  find_package(Qt4 REQUIRED QUIET)
+  include(${QT_USE_FILE})
+endif()
 
 set(${vtk-module}_NO_HeaderTest 1)
 vtk_module_library(${vtk-module} ${LibSrcs})
@@ -20,4 +37,4 @@ if(VTK_LIBRARY_PROPERTIES)
   set_target_properties(${vtk-module} PROPERTIES ${VTK_LIBRARY_PROPERTIES})
 endif()
 
-target_link_libraries(${vtk-module} ${QT_LIBRARIES})
+target_link_libraries(${vtk-module} LINK_PRIVATE ${QT_LIBRARIES})
diff --git a/GUISupport/QtSQL/Testing/Cxx/TestQtSQLDatabase.cxx b/GUISupport/QtSQL/Testing/Cxx/TestQtSQLDatabase.cxx
index 4cc562e..0646a84 100644
--- a/GUISupport/QtSQL/Testing/Cxx/TestQtSQLDatabase.cxx
+++ b/GUISupport/QtSQL/Testing/Cxx/TestQtSQLDatabase.cxx
@@ -43,7 +43,7 @@ int TestQtSQLDatabase(int argc, char* argv[])
   //QCoreApplication app(argc, argv);
   //for (int i = 0; i < QCoreApplication::libraryPaths().count(); i++)
   //  {
-  //  cerr << QCoreApplication::libraryPaths().at(i).toAscii().data() << endl;
+  //  cerr << QCoreApplication::libraryPaths().at(i).toLatin1().data() << endl;
   //  }
 
   bool interactive = false;
@@ -133,11 +133,11 @@ int TestQtSQLDatabase(int argc, char* argv[])
     }
 
   vtkQtSQLDatabase* db = vtkQtSQLDatabase::New();
-  db->SetDatabaseType(dbtype.toAscii().data());
-  db->SetDatabaseName(database.toAscii().data());
-  db->SetUserName(user.toAscii().data());
+  db->SetDatabaseType(dbtype.toLatin1().data());
+  db->SetDatabaseName(database.toLatin1().data());
+  db->SetUserName(user.toLatin1().data());
   db->SetPort(port);
-  if (!db->Open(password.toAscii().data()))
+  if (!db->Open(password.toLatin1().data()))
     {
     cerr << "Unable to open database" << endl;
     return 1;
@@ -155,8 +155,8 @@ int TestQtSQLDatabase(int argc, char* argv[])
   if (!dataExists)
     {
     QString createQuery("CREATE TABLE IF NOT EXISTS people (name TEXT, age INTEGER, weight FLOAT)");
-    cout << createQuery.toAscii().data() << endl;
-    query->SetQuery(createQuery.toAscii().data());
+    cout << createQuery.toLatin1().data() << endl;
+    query->SetQuery(createQuery.toLatin1().data());
     if (!query->Execute())
       {
       cerr << "Create query failed" << endl;
@@ -166,8 +166,8 @@ int TestQtSQLDatabase(int argc, char* argv[])
     for (int i = 0; i < 40; i++)
       {
       QString insertQuery = QString("INSERT INTO people VALUES('John Doe %1', %1, %2)").arg(i).arg(10*i);
-      cout << insertQuery.toAscii().data() << endl;
-      query->SetQuery(insertQuery.toAscii().data());
+      cout << insertQuery.toLatin1().data() << endl;
+      query->SetQuery(insertQuery.toLatin1().data());
       if (!query->Execute())
         {
         cerr << "Insert query failed" << endl;
@@ -176,7 +176,7 @@ int TestQtSQLDatabase(int argc, char* argv[])
       }
     }
 
-  query->SetQuery(queryText.toAscii().data());
+  query->SetQuery(queryText.toLatin1().data());
   cerr << endl << "Running query: " << query->GetQuery() << endl;
 
   cerr << endl << "Using vtkSQLQuery directly to execute query:" << endl;
diff --git a/GUISupport/QtSQL/module.cmake b/GUISupport/QtSQL/module.cmake
index 3cd5bbc..8d3b1cd 100644
--- a/GUISupport/QtSQL/module.cmake
+++ b/GUISupport/QtSQL/module.cmake
@@ -3,6 +3,8 @@ vtk_module(vtkGUISupportQtSQL
     Qt
   DEPENDS
     vtkIOSQL
+  PRIVATE_DEPENDS
+    vtksys
   TEST_DEPENDS
     vtkTestingCore
   EXCLUDE_FROM_WRAPPING
diff --git a/GUISupport/QtSQL/vtkQtSQLDatabase.cxx b/GUISupport/QtSQL/vtkQtSQLDatabase.cxx
index f9731e4..04b3b2f 100644
--- a/GUISupport/QtSQL/vtkQtSQLDatabase.cxx
+++ b/GUISupport/QtSQL/vtkQtSQLDatabase.cxx
@@ -152,7 +152,7 @@ bool vtkQtSQLDatabase::HasError()
 
 const char* vtkQtSQLDatabase::GetLastErrorText()
 {
-  return this->QtDatabase.lastError().text().toAscii();
+  return this->QtDatabase.lastError().text().toLatin1();
 }
 
 vtkStringArray* vtkQtSQLDatabase::GetTables()
@@ -179,7 +179,7 @@ vtkStringArray* vtkQtSQLDatabase::GetTables()
     QStringList tables = this->QtDatabase.tables(QSql::Tables);
     for (int i = 0; i < tables.size(); ++i)
       {
-      this->myTables->InsertNextValue(tables.at(i).toAscii());
+      this->myTables->InsertNextValue(tables.at(i).toLatin1());
       }
 
     }
@@ -195,7 +195,7 @@ vtkStringArray* vtkQtSQLDatabase::GetRecord(const char *table)
   QSqlRecord columns = this->QtDatabase.record(table);
   for (int i = 0; i < columns.count(); i++)
     {
-    this->currentRecord->InsertNextValue(columns.fieldName(i).toAscii());
+    this->currentRecord->InsertNextValue(columns.fieldName(i).toLatin1());
     }
 
   return currentRecord;
@@ -300,7 +300,7 @@ bool vtkQtSQLDatabase::ParseURL(const char* URL)
   qtType = protocol.c_str();
   qtType = "Q" + qtType.toUpper();
 
-  this->SetDatabaseType(qtType.toAscii());
+  this->SetDatabaseType(qtType.toLatin1());
   this->SetUserName(username.c_str());
   this->SetHostName(hostname.c_str());
   this->SetPort(atoi(dataport.c_str()));
diff --git a/GUISupport/QtSQL/vtkQtSQLQuery.cxx b/GUISupport/QtSQL/vtkQtSQLQuery.cxx
index 8d55162..a420741 100644
--- a/GUISupport/QtSQL/vtkQtSQLQuery.cxx
+++ b/GUISupport/QtSQL/vtkQtSQLQuery.cxx
@@ -75,7 +75,7 @@ bool vtkQtSQLQuery::HasError()
 
 const char* vtkQtSQLQuery::GetLastErrorText()
 {
-  this->SetLastErrorText(this->Internals->QtQuery.lastError().text().toAscii());
+  this->SetLastErrorText(this->Internals->QtQuery.lastError().text().toLatin1());
   return this->LastErrorText;
 }
 
@@ -93,8 +93,8 @@ bool vtkQtSQLQuery::Execute()
     {
     QString errorString;
     errorString.sprintf("Query execute error: %s (type:%d)\n",
-      error.text().toAscii().data(),error.type());
-    vtkErrorMacro(<< errorString.toAscii().data());
+      error.text().toLatin1().data(),error.type());
+    vtkErrorMacro(<< errorString.toLatin1().data());
     return false;
     }
 
@@ -102,7 +102,7 @@ bool vtkQtSQLQuery::Execute()
   this->Internals->FieldNames.clear();
   for (int i = 0; i < this->Internals->QtQuery.record().count(); i++)
     {
-    this->Internals->FieldNames.push_back(this->Internals->QtQuery.record().fieldName(i).toAscii().data());
+    this->Internals->FieldNames.push_back(this->Internals->QtQuery.record().fieldName(i).toLatin1().data());
     }
   return true;
 }
@@ -180,7 +180,7 @@ vtkVariant vtkQtSQLQuery::DataValue(vtkIdType c)
     case QVariant::Bool:
       return vtkVariant(v.toInt());
     case QVariant::Char:
-      return vtkVariant(v.toChar().toAscii());
+      return vtkVariant(v.toChar().toLatin1());
     case QVariant::DateTime:
       {
       QDateTime dt = v.toDateTime();
@@ -206,7 +206,7 @@ vtkVariant vtkQtSQLQuery::DataValue(vtkIdType c)
     case QVariant::LongLong:
       return vtkVariant(v.toLongLong());
     case QVariant::String:
-      return vtkVariant(v.toString().toAscii().data());
+      return vtkVariant(v.toString().toLatin1().data());
     case QVariant::UInt:
       return vtkVariant(v.toUInt());
     case QVariant::ULongLong:
@@ -223,7 +223,7 @@ vtkVariant vtkQtSQLQuery::DataValue(vtkIdType c)
     default:
       vtkErrorMacro(<< "Unhandled Qt variant type "
         << v.type() << " found; returning string variant.");
-      return vtkVariant(v.toString().toAscii().data());
+      return vtkVariant(v.toString().toLatin1().data());
     }
 }
 
diff --git a/GUISupport/QtSQL/vtkQtTimePointUtility.h b/GUISupport/QtSQL/vtkQtTimePointUtility.h
index 751bada..55085b1 100644
--- a/GUISupport/QtSQL/vtkQtTimePointUtility.h
+++ b/GUISupport/QtSQL/vtkQtTimePointUtility.h
@@ -40,8 +40,8 @@ public:
   static vtkTypeUInt64 QTimeToTimePoint(QTime time);
 
 protected:
-  vtkQtTimePointUtility() {};
-  ~vtkQtTimePointUtility() {};
+  vtkQtTimePointUtility() {}
+  ~vtkQtTimePointUtility() {}
 
 private:
   vtkQtTimePointUtility(const vtkQtTimePointUtility&);  // Not implemented.
diff --git a/GUISupport/QtWebkit/CMakeLists.txt b/GUISupport/QtWebkit/CMakeLists.txt
index 5f9841e..356327b 100644
--- a/GUISupport/QtWebkit/CMakeLists.txt
+++ b/GUISupport/QtWebkit/CMakeLists.txt
@@ -1,22 +1,36 @@
-find_package(Qt4 REQUIRED QtCore QtGui QtWebKit QUIET)
+include(vtkQt)
 
 # Rich-text view requires Qt >= 4.5.0
 # Rich-text depends on Qt Webkit which is not portable on Unix (AIX & HP-UX)
-QT4_WRAP_UI(UI_FILES vtkQtRichTextView.ui)
-set(LibSrcs ${QVTKLibSrcs} ${UI_FILES} vtkQtRichTextView.cxx)
+
+set(LibSrcs ${QVTKLibSrcs} vtkQtRichTextView.cxx)
 set(MocHeaders ${QVTKMocHeaders} vtkQtRichTextView.h)
-if(QT_PHONON_FOUND AND APPLE)
-  set(QT_USE_PHONON 1)
-endif()
-set(QT_USE_QTWEBKIT 1)
 
-# import Qt4 build settings
-set(QT_USE_QTNETWORK 1)
-include(${QT_USE_FILE})
+if(VTK_QT_VERSION VERSION_GREATER "4")
+  find_package(Qt5WebKitWidgets REQUIRED QUIET)
+  include_directories(${Qt5WebKitWidgets_INCLUDE_DIRS})
+  add_definitions(${Qt5WebKitWidgets_DEFINITIONS})
+
+  qt5_wrap_ui(UI_FILES vtkQtRichTextView.ui)
+  qt5_wrap_cpp(LibMocSrcs ${MocHeaders})
 
-qt4_wrap_cpp(LibMocSrcs ${MocHeaders})
+  set(QT_LIBRARIES ${Qt5WebKitWidgets_LIBRARIES})
+else()
+  find_package(Qt4 REQUIRED QtCore QtGui QtWebKit QUIET)
+
+  # import Qt4 build settings
+  if(QT_PHONON_FOUND AND APPLE)
+    set(QT_USE_PHONON 1)
+  endif()
+  set(QT_USE_QTWEBKIT 1)
+  set(QT_USE_QTNETWORK 1)
+  include(${QT_USE_FILE})
+
+  qt4_wrap_ui(UI_FILES vtkQtRichTextView.ui)
+  qt4_wrap_cpp(LibMocSrcs ${MocHeaders})
+endif()
 
 set(${vtk-module}_NO_HeaderTest 1)
-vtk_module_library(${vtk-module} ${LibSrcs} ${LibMocSrcs})
+vtk_module_library(${vtk-module} ${LibSrcs} ${UI_FILES} ${LibMocSrcs})
 
-target_link_libraries(${vtk-module} ${QT_LIBRARIES})
+target_link_libraries(${vtk-module} LINK_PRIVATE ${QT_LIBRARIES})
diff --git a/GUISupport/QtWebkit/vtkQtRichTextView.cxx b/GUISupport/QtWebkit/vtkQtRichTextView.cxx
index f117047..0ac1355 100644
--- a/GUISupport/QtWebkit/vtkQtRichTextView.cxx
+++ b/GUISupport/QtWebkit/vtkQtRichTextView.cxx
@@ -51,8 +51,6 @@ PURPOSE.  See the above copyright notice for more information.
 #include <QWebHistory>
 #include <QWebPage>
 #include <QWebView>
-#include <QHttpHeader>
-#include <QHttpRequestHeader>
 #include <QUrl>
 
 vtkStandardNewMacro(vtkQtRichTextView);
diff --git a/Geovis/Core/Testing/Cxx/CMakeLists.txt b/Geovis/Core/Testing/Cxx/CMakeLists.txt
index ad68bea..d89f7bc 100644
--- a/Geovis/Core/Testing/Cxx/CMakeLists.txt
+++ b/Geovis/Core/Testing/Cxx/CMakeLists.txt
@@ -1,27 +1,10 @@
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
+vtk_add_test_cxx(
   TestCoincidentGeoGraphRepresentation2D.cxx
   TestGeoAssignCoordinates.cxx
   TestGeoGraticule.cxx
-  TestGeoProjection.cxx
+  TestGeoProjection.cxx,NO_VALID
   TestGlobeSource.cxx
   TestLabeledGeoView2D.cxx
-)
+  )
 
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-            COMMAND ${vtk-module}CxxTests ${TName}
-                                          -D ${VTK_DATA_ROOT}
-                                          -T ${VTK_TEST_OUTPUT_DIR}
-                                          -V Baseline/Geovis/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName} COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Geovis/Core/Testing/Data/Baseline/TestCoincidentGeoGraphRepresentation2D.png.md5 b/Geovis/Core/Testing/Data/Baseline/TestCoincidentGeoGraphRepresentation2D.png.md5
new file mode 100644
index 0000000..70c0325
--- /dev/null
+++ b/Geovis/Core/Testing/Data/Baseline/TestCoincidentGeoGraphRepresentation2D.png.md5
@@ -0,0 +1 @@
+e01bf49e22f38c54cc7a1ca283b327b0
diff --git a/Geovis/Core/Testing/Data/Baseline/TestGeoAssignCoordinates.png.md5 b/Geovis/Core/Testing/Data/Baseline/TestGeoAssignCoordinates.png.md5
new file mode 100644
index 0000000..d7a933c
--- /dev/null
+++ b/Geovis/Core/Testing/Data/Baseline/TestGeoAssignCoordinates.png.md5
@@ -0,0 +1 @@
+1c705f422240513ed28db9b20f843edb
diff --git a/Geovis/Core/Testing/Data/Baseline/TestGeoGraticule.png.md5 b/Geovis/Core/Testing/Data/Baseline/TestGeoGraticule.png.md5
new file mode 100644
index 0000000..5fb4b81
--- /dev/null
+++ b/Geovis/Core/Testing/Data/Baseline/TestGeoGraticule.png.md5
@@ -0,0 +1 @@
+5e2e3810909a17218a799bd5652986c4
diff --git a/Geovis/Core/Testing/Data/Baseline/TestGlobeSource.png.md5 b/Geovis/Core/Testing/Data/Baseline/TestGlobeSource.png.md5
new file mode 100644
index 0000000..7162ddd
--- /dev/null
+++ b/Geovis/Core/Testing/Data/Baseline/TestGlobeSource.png.md5
@@ -0,0 +1 @@
+981322b8b8dc7a7275bbd09d75f6176f
diff --git a/Geovis/Core/Testing/Data/Baseline/TestLabeledGeoView2D.png.md5 b/Geovis/Core/Testing/Data/Baseline/TestLabeledGeoView2D.png.md5
new file mode 100644
index 0000000..f2b1ac6
--- /dev/null
+++ b/Geovis/Core/Testing/Data/Baseline/TestLabeledGeoView2D.png.md5
@@ -0,0 +1 @@
+a0ba2137f9ecfbbe7e24f196c0231056
diff --git a/Geovis/Core/Testing/Data/Baseline/TestLabeledGeoView2D_1.png.md5 b/Geovis/Core/Testing/Data/Baseline/TestLabeledGeoView2D_1.png.md5
new file mode 100644
index 0000000..774e726
--- /dev/null
+++ b/Geovis/Core/Testing/Data/Baseline/TestLabeledGeoView2D_1.png.md5
@@ -0,0 +1 @@
+61e0c742f0a037d45b76f0317dacdb73
diff --git a/Geovis/Core/Testing/Data/Baseline/TestLabeledGeoView2D_2.png.md5 b/Geovis/Core/Testing/Data/Baseline/TestLabeledGeoView2D_2.png.md5
new file mode 100644
index 0000000..15f34c8
--- /dev/null
+++ b/Geovis/Core/Testing/Data/Baseline/TestLabeledGeoView2D_2.png.md5
@@ -0,0 +1 @@
+dce4d801948c1b896163ed65def0fd76
diff --git a/Geovis/Core/Testing/Data/Baseline/TestLabeledGeoView2D_3.png.md5 b/Geovis/Core/Testing/Data/Baseline/TestLabeledGeoView2D_3.png.md5
new file mode 100644
index 0000000..ab98732
--- /dev/null
+++ b/Geovis/Core/Testing/Data/Baseline/TestLabeledGeoView2D_3.png.md5
@@ -0,0 +1 @@
+c291a5768cc0c0cf9ec516a666cefc95
diff --git a/Geovis/Core/Testing/Data/Baseline/TestLabeledGeoView2D_4.png.md5 b/Geovis/Core/Testing/Data/Baseline/TestLabeledGeoView2D_4.png.md5
new file mode 100644
index 0000000..ea111ea
--- /dev/null
+++ b/Geovis/Core/Testing/Data/Baseline/TestLabeledGeoView2D_4.png.md5
@@ -0,0 +1 @@
+00a9dac85f9d1a9ff39ef2c181f18ddf
diff --git a/Geovis/Core/Testing/Data/Baseline/TestLabeledGeoView2D_5.png.md5 b/Geovis/Core/Testing/Data/Baseline/TestLabeledGeoView2D_5.png.md5
new file mode 100644
index 0000000..c0de37b
--- /dev/null
+++ b/Geovis/Core/Testing/Data/Baseline/TestLabeledGeoView2D_5.png.md5
@@ -0,0 +1 @@
+fe2697992f9e05ef19943d2b3ca28471
diff --git a/Geovis/Core/vtkGeoAlignedImageSource.cxx b/Geovis/Core/vtkGeoAlignedImageSource.cxx
index 46c73f5..4797f82 100644
--- a/Geovis/Core/vtkGeoAlignedImageSource.cxx
+++ b/Geovis/Core/vtkGeoAlignedImageSource.cxx
@@ -31,7 +31,7 @@
 #include "vtkTimerLog.h"
 #include "vtkTransform.h"
 
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkGeoAlignedImageSource);
 vtkCxxSetObjectMacro(vtkGeoAlignedImageSource, Image, vtkImageData);
diff --git a/IO/AMR/Testing/Cxx/CMakeLists.txt b/IO/AMR/Testing/Cxx/CMakeLists.txt
index 729907d..5f8ceb3 100644
--- a/IO/AMR/Testing/Cxx/CMakeLists.txt
+++ b/IO/AMR/Testing/Cxx/CMakeLists.txt
@@ -1,24 +1,5 @@
-## List all test sources
-set(MyTests
-#    TestFlashReader.cxx  there is not data for this test
-    TestEnzoReader.cxx
-    )
-
-## Create test source list
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-    ${MyTests}
-    EXTRA_INCLUDE vtkTestDriver.h
-    )
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-## Add all executables
-if (VTK_DATA_ROOT)
- foreach(test ${TestsToRun})
-   get_filename_component(t ${test} NAME_WE)
-   add_test(NAME ${vtk-module}Cxx-${t}
-     COMMAND ${vtk-module}CxxTests ${t} -D ${VTK_DATA_ROOT})
- endforeach()
-endif()
+vtk_add_test_cxx(NO_VALID NO_OUTPUT
+  #    TestFlashReader.cxx  there is not data for this test
+  TestEnzoReader.cxx
+  )
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/IO/AMR/module.cmake b/IO/AMR/module.cmake
index 21c4223..501a1fd 100644
--- a/IO/AMR/module.cmake
+++ b/IO/AMR/module.cmake
@@ -3,10 +3,12 @@ vtk_module(vtkIOAMR
     StandAlone
   DEPENDS
     vtkParallelCore
-    vtkhdf5
     vtkFiltersAMR
+  PRIVATE_DEPENDS
+    vtkhdf5
+    vtksys
   TEST_DEPENDS
     vtkIOXML
     vtkTestingCore
     vtkTestingRendering
-  )
\ No newline at end of file
+  )
diff --git a/IO/AMR/vtkAMREnzoReaderInternal.cxx b/IO/AMR/vtkAMREnzoReaderInternal.cxx
index f5ca4b5..f2ad42d 100644
--- a/IO/AMR/vtkAMREnzoReaderInternal.cxx
+++ b/IO/AMR/vtkAMREnzoReaderInternal.cxx
@@ -92,6 +92,52 @@ void vtkEnzoReaderBlock::Init()
 }
 
 //------------------------------------------------------------------------------
+void vtkEnzoReaderBlock::DeepCopy(const vtkEnzoReaderBlock *other)
+{
+  this->BlockFileName    = other->BlockFileName;
+  this->ParticleFileName = other->ParticleFileName;
+
+  this->Index    = other->Index;
+  this->Level    = other->Level;
+  this->ParentId = other->ParentId;
+  this->ChildrenIds = other->ChildrenIds;
+  this->NumberOfParticles  = other->NumberOfParticles;
+  this->NumberOfDimensions = other->NumberOfDimensions;
+
+  this->MinParentWiseIds[0] = other->MinParentWiseIds[0];
+  this->MinParentWiseIds[1] = other->MinParentWiseIds[1];
+  this->MinParentWiseIds[2] = other->MinParentWiseIds[2];
+  this->MaxParentWiseIds[0] = other->MaxParentWiseIds[0];
+  this->MaxParentWiseIds[1] = other->MaxParentWiseIds[1];
+  this->MaxParentWiseIds[2] = other->MaxParentWiseIds[2];
+
+  this->MinLevelBasedIds[0] = other->MinLevelBasedIds[0];
+  this->MinLevelBasedIds[1] = other->MinLevelBasedIds[1];
+  this->MinLevelBasedIds[2] = other->MinLevelBasedIds[2];
+  this->MaxLevelBasedIds[0] = other->MaxLevelBasedIds[0];
+  this->MaxLevelBasedIds[1] = other->MaxLevelBasedIds[1];
+  this->MaxLevelBasedIds[2] = other->MaxLevelBasedIds[2];
+
+  this->BlockCellDimensions[0] = other->BlockCellDimensions[0];
+  this->BlockCellDimensions[1] = other->BlockCellDimensions[1];
+  this->BlockCellDimensions[2] = other->BlockCellDimensions[2];
+  this->BlockNodeDimensions[0] = other->BlockNodeDimensions[0];
+  this->BlockNodeDimensions[1] = other->BlockNodeDimensions[1];
+  this->BlockNodeDimensions[2] = other->BlockNodeDimensions[2];
+
+  this->MinBounds[0] = other->MinBounds[0];
+  this->MinBounds[1] = other->MinBounds[1];
+  this->MinBounds[2] = other->MinBounds[2];
+  this->MaxBounds[0] = other->MaxBounds[0];
+  this->MaxBounds[1] = other->MaxBounds[1];
+  this->MaxBounds[2] = other->MaxBounds[2];
+
+  this->SubdivisionRatio[0] = other->SubdivisionRatio[0];
+  this->SubdivisionRatio[1] = other->SubdivisionRatio[1];
+  this->SubdivisionRatio[2] = other->SubdivisionRatio[2];
+}
+
+//------------------------------------------------------------------------------
 // get the bounding (cell) Ids of this block in terms of its parent block's
 // sub-division resolution (indexing is limited to the scope of the parent)
 void vtkEnzoReaderBlock::GetParentWiseIds
diff --git a/IO/AMR/vtkAMREnzoReaderInternal.h b/IO/AMR/vtkAMREnzoReaderInternal.h
index aefa7d5..17a8685 100644
--- a/IO/AMR/vtkAMREnzoReaderInternal.h
+++ b/IO/AMR/vtkAMREnzoReaderInternal.h
@@ -62,6 +62,10 @@ class vtkEnzoReaderBlock
 public:
   vtkEnzoReaderBlock()  { this->Init(); }
  ~vtkEnzoReaderBlock()  { this->Init(); }
+  vtkEnzoReaderBlock(const vtkEnzoReaderBlock& other)
+    { this->DeepCopy(&other); }
+  vtkEnzoReaderBlock& operator=(const vtkEnzoReaderBlock& other)
+    { this->DeepCopy(&other); return *this; }
 
   int                   Index;
   int                   Level;
@@ -86,6 +90,7 @@ public:
   std::string        ParticleFileName;
 
   void   Init();
+  void DeepCopy(const vtkEnzoReaderBlock *other);
   void GetParentWiseIds(  std::vector< vtkEnzoReaderBlock > & blocks  );
   void GetLevelBasedIds(  std::vector< vtkEnzoReaderBlock > & blocks  );
 };
diff --git a/IO/Core/Testing/Cxx/CMakeLists.txt b/IO/Core/Testing/Cxx/CMakeLists.txt
index cd35333..e30fc2a 100644
--- a/IO/Core/Testing/Cxx/CMakeLists.txt
+++ b/IO/Core/Testing/Cxx/CMakeLists.txt
@@ -1,27 +1,7 @@
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
+vtk_add_test_cxx(NO_VALID
   TestArrayDataWriter.cxx
   TestArrayDenormalized.cxx
   TestArraySerialization.cxx
   TestCompress.cxx
-
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-            COMMAND ${vtk-module}CxxTests ${TName}
-                                          -D ${VTK_DATA_ROOT}
-                                          -T ${VTK_TEST_OUTPUT_DIR}
-                                          -V Baseline/${vtk-module}/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName} COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+  )
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/IO/Core/Testing/Python/CMakeLists.txt b/IO/Core/Testing/Python/CMakeLists.txt
index de2d1e8..1bfd129 100644
--- a/IO/Core/Testing/Python/CMakeLists.txt
+++ b/IO/Core/Testing/Python/CMakeLists.txt
@@ -1,4 +1,2 @@
-if(VTK_DATA_ROOT})
-  add_test_python1(TestGlobFileNames.py ${VTK_DATA_ROOT})
-  add_test_python1(TestSortFileNames.py ${VTK_DATA_ROOT})
-endif()
+vtk_add_test_python(TestGlobFileNames.py NO_VALID NO_RT)
+vtk_add_test_python(TestSortFileNames.py NO_VALID NO_RT)
diff --git a/IO/Core/Testing/Tcl/CMakeLists.txt b/IO/Core/Testing/Tcl/CMakeLists.txt
index ce708cc..1fcfb22 100644
--- a/IO/Core/Testing/Tcl/CMakeLists.txt
+++ b/IO/Core/Testing/Tcl/CMakeLists.txt
@@ -6,10 +6,5 @@ set(tests
 )
 
 foreach( tfile ${tests})
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Tcl-${tfile} COMMAND ${VTK_TCL_EXE}
-      ${CMAKE_CURRENT_SOURCE_DIR}/${tfile}.tcl
-      -D ${VTK_DATA_ROOT}
-   )
-  endif()
+  vtk_add_test_tcl(${tfile}.tcl NO_RT)
 endforeach()
diff --git a/IO/Core/module.cmake b/IO/Core/module.cmake
index fd0030d..1c894ea 100644
--- a/IO/Core/module.cmake
+++ b/IO/Core/module.cmake
@@ -5,6 +5,7 @@ vtk_module(vtkIOCore
     vtkCommonDataModel
     vtkCommonExecutionModel
     vtkCommonMisc
+  PRIVATE_DEPENDS
     vtkzlib
     vtksys
   TEST_DEPENDS
diff --git a/IO/Core/vtkBase64Utilities.cxx b/IO/Core/vtkBase64Utilities.cxx
index 8401c34..be1873f 100644
--- a/IO/Core/vtkBase64Utilities.cxx
+++ b/IO/Core/vtkBase64Utilities.cxx
@@ -14,7 +14,7 @@
 =========================================================================*/
 #include "vtkBase64Utilities.h"
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 
 //----------------------------------------------------------------------------
 vtkStandardNewMacro(vtkBase64Utilities);
diff --git a/IO/Core/vtkBase64Utilities.h b/IO/Core/vtkBase64Utilities.h
index 3a5fb3f..b8b1ff1 100644
--- a/IO/Core/vtkBase64Utilities.h
+++ b/IO/Core/vtkBase64Utilities.h
@@ -99,8 +99,8 @@ public:
                               unsigned long max_input_length = 0);
 
 protected:
-  vtkBase64Utilities() {};
-  ~vtkBase64Utilities() {};
+  vtkBase64Utilities() {}
+  ~vtkBase64Utilities() {}
 
 private:
   vtkBase64Utilities(const vtkBase64Utilities&);  // Not implemented.
diff --git a/IO/Core/vtkSortFileNames.cxx b/IO/Core/vtkSortFileNames.cxx
index 06ece36..1a8c483 100644
--- a/IO/Core/vtkSortFileNames.cxx
+++ b/IO/Core/vtkSortFileNames.cxx
@@ -289,8 +289,8 @@ void vtkSortFileNames::GroupFileNames(vtkStringArray *input,
 }
 
 // Sort filenames lexicographically, ignoring case.
-bool vtkCompareFileNamesIgnoreCase(const std::string& s1,
-                                   const std::string& s2)
+static bool vtkCompareFileNamesIgnoreCase(const std::string& s1,
+                                          const std::string& s2)
 {
   unsigned int n1 = static_cast<unsigned int>(s1.length());
   unsigned int n2 = static_cast<unsigned int>(s2.length());
@@ -335,8 +335,8 @@ bool vtkCompareFileNamesIgnoreCase(const std::string& s1,
 }
 
 // Sort filenames numerically
-bool vtkCompareFileNamesNumeric(const std::string& s1,
-                                const std::string& s2)
+static bool vtkCompareFileNamesNumeric(const std::string& s1,
+                                       const std::string& s2)
 {
   unsigned int n1 = static_cast<unsigned int>(s1.length());
   unsigned int n2 = static_cast<unsigned int>(s2.length());
@@ -417,8 +417,8 @@ bool vtkCompareFileNamesNumeric(const std::string& s1,
 }
 
 // Sort filenames numerically
-bool vtkCompareFileNamesNumericIgnoreCase(const std::string& s1,
-                                          const std::string& s2)
+static bool vtkCompareFileNamesNumericIgnoreCase(const std::string& s1,
+                                                 const std::string& s2)
 {
   unsigned int n1 = static_cast<unsigned int>(s1.length());
   unsigned int n2 = static_cast<unsigned int>(s2.length());
diff --git a/IO/Core/vtkTextCodec.cxx b/IO/Core/vtkTextCodec.cxx
index 8ec94da..2fca88c 100644
--- a/IO/Core/vtkTextCodec.cxx
+++ b/IO/Core/vtkTextCodec.cxx
@@ -61,7 +61,6 @@ namespace
     ~vtkUnicodeStringOutputIterator();
 
   private:
-    vtkUnicodeStringOutputIterator(); // Not implemented
     vtkUnicodeStringOutputIterator(const vtkUnicodeStringOutputIterator&); // Not implemented
     const vtkUnicodeStringOutputIterator& operator=(const vtkUnicodeStringOutputIterator&); // Not Implemented
 
diff --git a/IO/Core/vtkTextCodecFactory.cxx b/IO/Core/vtkTextCodecFactory.cxx
index 3f67eb8..d1302bf 100644
--- a/IO/Core/vtkTextCodecFactory.cxx
+++ b/IO/Core/vtkTextCodecFactory.cxx
@@ -176,17 +176,17 @@ vtkTextCodec* vtkTextCodecFactory::CodecToHandle(istream& SampleData)
   return NULL;
 }
 
-vtkTextCodec* vtkASCIITextCodecFromCallback()
+static vtkTextCodec* vtkASCIITextCodecFromCallback()
 {
    return vtkASCIITextCodec::New();
 }
 
-vtkTextCodec* vtkUTF8TextCodecFromCallback()
+static vtkTextCodec* vtkUTF8TextCodecFromCallback()
 {
    return vtkUTF8TextCodec::New();
 }
 
-vtkTextCodec* vtkUTF16TextCodecFromCallback()
+static vtkTextCodec* vtkUTF16TextCodecFromCallback()
 {
    return vtkUTF16TextCodec::New();
 }
diff --git a/IO/Core/vtkWriter.cxx b/IO/Core/vtkWriter.cxx
index 1ed9f0b..47b9fe4 100644
--- a/IO/Core/vtkWriter.cxx
+++ b/IO/Core/vtkWriter.cxx
@@ -24,11 +24,6 @@
 #include <vtksys/ios/sstream>
 
 
-namespace
-{
-  const char hex_digits[] = "0123456789ABCDEF";
-};
-
 // Construct with no start and end write methods or arguments.
 vtkWriter::vtkWriter()
 {
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSight6Elements.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSight6Elements.png.md5
new file mode 100644
index 0000000..91381d9
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSight6Elements.png.md5
@@ -0,0 +1 @@
+f1aaf6506554f871d210ca13767ab9d3
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSight6OfficeBin.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSight6OfficeBin.png.md5
new file mode 100644
index 0000000..e22255a
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSight6OfficeBin.png.md5
@@ -0,0 +1 @@
+05b5e6fc2389fe9bbb86f2c5fe5d763d
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightBlow1ASCII.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightBlow1ASCII.png.md5
new file mode 100644
index 0000000..563f678
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightBlow1ASCII.png.md5
@@ -0,0 +1 @@
+8f0ca12b647fbde328ad35eff4e1ac3b
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightBlow1Bin.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightBlow1Bin.png.md5
new file mode 100644
index 0000000..ee253b7
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightBlow1Bin.png.md5
@@ -0,0 +1 @@
+4ac2bb73f7bc9f1487d30df346d5f802
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightBlow2ASCII.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightBlow2ASCII.png.md5
new file mode 100644
index 0000000..563f678
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightBlow2ASCII.png.md5
@@ -0,0 +1 @@
+8f0ca12b647fbde328ad35eff4e1ac3b
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightBlow2Bin.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightBlow2Bin.png.md5
new file mode 100644
index 0000000..ee253b7
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightBlow2Bin.png.md5
@@ -0,0 +1 @@
+4ac2bb73f7bc9f1487d30df346d5f802
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightBlow3Bin.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightBlow3Bin.png.md5
new file mode 100644
index 0000000..ee253b7
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightBlow3Bin.png.md5
@@ -0,0 +1 @@
+4ac2bb73f7bc9f1487d30df346d5f802
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightBlow4Bin.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightBlow4Bin.png.md5
new file mode 100644
index 0000000..ee253b7
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightBlow4Bin.png.md5
@@ -0,0 +1 @@
+4ac2bb73f7bc9f1487d30df346d5f802
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightBlow5ASCII.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightBlow5ASCII.png.md5
new file mode 100644
index 0000000..6d635cb
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightBlow5ASCII.png.md5
@@ -0,0 +1 @@
+e7c955ec9b4aa0330df2693ded1d627f
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightCompound.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightCompound.png.md5
new file mode 100644
index 0000000..51981f8
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightCompound.png.md5
@@ -0,0 +1 @@
+8edfa169ca507a57d449662f57803c1c
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightGoldElements.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightGoldElements.png.md5
new file mode 100644
index 0000000..51981f8
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightGoldElements.png.md5
@@ -0,0 +1 @@
+8edfa169ca507a57d449662f57803c1c
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightIronProtASCII.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightIronProtASCII.png.md5
new file mode 100644
index 0000000..3642f6a
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightIronProtASCII.png.md5
@@ -0,0 +1 @@
+f200276ee9b1bfa87eb76d407cee93ed
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightIronProtBin.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightIronProtBin.png.md5
new file mode 100644
index 0000000..3642f6a
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightIronProtBin.png.md5
@@ -0,0 +1 @@
+f200276ee9b1bfa87eb76d407cee93ed
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightMandelbrot.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightMandelbrot.png.md5
new file mode 100644
index 0000000..2431cae
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightMandelbrot.png.md5
@@ -0,0 +1 @@
+b41a7522135f553bdc1be1b169df8a4f
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightNfacedASCII.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightNfacedASCII.png.md5
new file mode 100644
index 0000000..bf523d3
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightNfacedASCII.png.md5
@@ -0,0 +1 @@
+b12a5b8677419f62fe63d7991f684814
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightNfacedBin.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightNfacedBin.png.md5
new file mode 100644
index 0000000..af6d3f2
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightNfacedBin.png.md5
@@ -0,0 +1 @@
+0b9ecd23133967e27e414e68bf38a8da
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightOfficeASCII.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightOfficeASCII.png.md5
new file mode 100644
index 0000000..e22255a
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightOfficeASCII.png.md5
@@ -0,0 +1 @@
+05b5e6fc2389fe9bbb86f2c5fe5d763d
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightOfficeBin.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightOfficeBin.png.md5
new file mode 100644
index 0000000..e22255a
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightOfficeBin.png.md5
@@ -0,0 +1 @@
+05b5e6fc2389fe9bbb86f2c5fe5d763d
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightRectGridASCII.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightRectGridASCII.png.md5
new file mode 100644
index 0000000..750e649
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightRectGridASCII.png.md5
@@ -0,0 +1 @@
+e7e45127df3c9d148c9f1d6476aba29b
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightRectGridASCII_1.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightRectGridASCII_1.png.md5
new file mode 100644
index 0000000..401ab67
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightRectGridASCII_1.png.md5
@@ -0,0 +1 @@
+6ebeb33325d5ce77603fd56974279582
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightRectGridBin.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightRectGridBin.png.md5
new file mode 100644
index 0000000..750e649
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightRectGridBin.png.md5
@@ -0,0 +1 @@
+e7e45127df3c9d148c9f1d6476aba29b
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightRectGridBin_1.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightRectGridBin_1.png.md5
new file mode 100644
index 0000000..401ab67
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightRectGridBin_1.png.md5
@@ -0,0 +1 @@
+6ebeb33325d5ce77603fd56974279582
diff --git a/IO/EnSight/Testing/Data/Baseline/EnSightSelectArrays.png.md5 b/IO/EnSight/Testing/Data/Baseline/EnSightSelectArrays.png.md5
new file mode 100644
index 0000000..a8f49b9
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/EnSightSelectArrays.png.md5
@@ -0,0 +1 @@
+9222e781fc9afa0028c778c371b30b40
diff --git a/IO/EnSight/Testing/Data/Baseline/nacaBinary.png.md5 b/IO/EnSight/Testing/Data/Baseline/nacaBinary.png.md5
new file mode 100644
index 0000000..cf32e13
--- /dev/null
+++ b/IO/EnSight/Testing/Data/Baseline/nacaBinary.png.md5
@@ -0,0 +1 @@
+36a4acb95d0dcec7aeeedaf481ba38fc
diff --git a/IO/EnSight/Testing/Python/CMakeLists.txt b/IO/EnSight/Testing/Python/CMakeLists.txt
index 718fc33..a7e14ec 100644
--- a/IO/EnSight/Testing/Python/CMakeLists.txt
+++ b/IO/EnSight/Testing/Python/CMakeLists.txt
@@ -1,24 +1,22 @@
-if (VTK_DATA_ROOT)
-  add_test_python(EnSight6Elements.py Graphics)
-  add_test_python(EnSight6OfficeBin.py Graphics)
-  add_test_python(EnSightBlow1ASCII.py Graphics)
-  add_test_python(EnSightBlow1Bin.py Graphics)
-  add_test_python(EnSightBlow2ASCII.py Graphics)
-  add_test_python(EnSightBlow2Bin.py Graphics)
-  add_test_python(EnSightBlow3Bin.py Graphics)
-  add_test_python(EnSightBlow4Bin.py Graphics)
-  add_test_python(EnSightBlow5ASCII.py Graphics)
-  add_test_python(EnSightCompound.py Graphics)
-  add_test_python(EnSightGoldElements.py Graphics)
-  add_test_python(EnSightIronProtASCII.py Graphics)
-  add_test_python(EnSightIronProtBin.py Graphics)
-  add_test_python(EnSightMandelbrot.py Graphics)
-  add_test_python(EnSightNfacedASCII.py Graphics)
-  add_test_python(EnSightNfacedBin.py Graphics)
-  add_test_python(EnSightOfficeASCII.py Graphics)
-  add_test_python(EnSightOfficeBin.py Graphics)
-  add_test_python(EnSightRectGridASCII.py Graphics)
-  add_test_python(EnSightRectGridBin.py Graphics)
-  add_test_python(EnSightSelectArrays.py Graphics)
-  add_test_python(nacaBinary.py Graphics)
-endif()
+vtk_add_test_python(EnSight6Elements.py)
+vtk_add_test_python(EnSight6OfficeBin.py)
+vtk_add_test_python(EnSightBlow1ASCII.py)
+vtk_add_test_python(EnSightBlow1Bin.py)
+vtk_add_test_python(EnSightBlow2ASCII.py)
+vtk_add_test_python(EnSightBlow2Bin.py)
+vtk_add_test_python(EnSightBlow3Bin.py)
+vtk_add_test_python(EnSightBlow4Bin.py)
+vtk_add_test_python(EnSightBlow5ASCII.py)
+vtk_add_test_python(EnSightCompound.py)
+vtk_add_test_python(EnSightGoldElements.py)
+vtk_add_test_python(EnSightIronProtASCII.py)
+vtk_add_test_python(EnSightIronProtBin.py)
+vtk_add_test_python(EnSightMandelbrot.py)
+vtk_add_test_python(EnSightNfacedASCII.py)
+vtk_add_test_python(EnSightNfacedBin.py)
+vtk_add_test_python(EnSightOfficeASCII.py)
+vtk_add_test_python(EnSightOfficeBin.py)
+vtk_add_test_python(EnSightRectGridASCII.py)
+vtk_add_test_python(EnSightRectGridBin.py)
+vtk_add_test_python(EnSightSelectArrays.py)
+vtk_add_test_python(nacaBinary.py)
diff --git a/IO/EnSight/Testing/Tcl/CMakeLists.txt b/IO/EnSight/Testing/Tcl/CMakeLists.txt
index 8edc9bf..8aca62a 100644
--- a/IO/EnSight/Testing/Tcl/CMakeLists.txt
+++ b/IO/EnSight/Testing/Tcl/CMakeLists.txt
@@ -1,39 +1,29 @@
-# Tests with test images in Baseline/Graphics
-#
 unset(tests)
-if(VTK_DATA_ROOT)
-  set(tests
-    EnSight6Elements
-    EnSight6OfficeBin
-    EnSightBlow1ASCII
-    EnSightBlow1Bin
-    EnSightBlow2ASCII
-    EnSightBlow2Bin
-    EnSightBlow3Bin
-    EnSightBlow4Bin
-    EnSightBlow5ASCII
-    EnSightCompound
-    EnSightGoldElements
-    EnSightIronProtASCII
-    EnSightIronProtBin
-    EnSightMandelbrot
-    EnSightNfacedASCII
-    EnSightNfacedBin
-    EnSightOfficeASCII
-    EnSightOfficeBin
-    EnSightRectGridASCII
-    EnSightRectGridBin
-    EnSightSelectArrays
-    nacaBinary
+set(tests
+  EnSight6Elements
+  EnSight6OfficeBin
+  EnSightBlow1ASCII
+  EnSightBlow1Bin
+  EnSightBlow2ASCII
+  EnSightBlow2Bin
+  EnSightBlow3Bin
+  EnSightBlow4Bin
+  EnSightBlow5ASCII
+  EnSightCompound
+  EnSightGoldElements
+  EnSightIronProtASCII
+  EnSightIronProtBin
+  EnSightMandelbrot
+  EnSightNfacedASCII
+  EnSightNfacedBin
+  EnSightOfficeASCII
+  EnSightOfficeBin
+  EnSightRectGridASCII
+  EnSightRectGridBin
+  EnSightSelectArrays
+  nacaBinary
   )
-endif()
 
 foreach( tfile ${tests})
-  add_test(NAME ${vtk-module}Tcl-${tfile} COMMAND ${VTK_TCL_EXE}
-    ${vtkTestingRendering_SOURCE_DIR}/rtImageTest.tcl
-    ${CMAKE_CURRENT_SOURCE_DIR}/${tfile}.tcl
-    -D ${VTK_DATA_ROOT}
-    -T ${VTK_TEST_OUTPUT_DIR}
-    -V Baseline/Graphics/${tfile}.png
-   )
+  vtk_add_test_tcl(${tfile}.tcl)
 endforeach()
diff --git a/IO/EnSight/vtkEnSight6Reader.cxx b/IO/EnSight/vtkEnSight6Reader.cxx
index fa05740..948e972 100644
--- a/IO/EnSight/vtkEnSight6Reader.cxx
+++ b/IO/EnSight/vtkEnSight6Reader.cxx
@@ -29,7 +29,7 @@
 #include "vtkUnstructuredGrid.h"
 
 
-#include <assert.h>
+#include <cassert>
 #include <ctype.h>
 #include <string>
 
diff --git a/IO/EnSight/vtkEnSightReader.cxx b/IO/EnSight/vtkEnSightReader.cxx
index 3fb5f48..f86bd4a 100644
--- a/IO/EnSight/vtkEnSightReader.cxx
+++ b/IO/EnSight/vtkEnSightReader.cxx
@@ -1306,7 +1306,6 @@ int vtkEnSightReader::ReadCaseFileFile(char* line)
 
     filenameNums->Delete();
     numSteps->Delete();
-    lineRead = this->ReadNextDataLine(line);
     }
 
   return lineRead;
diff --git a/IO/EnSight/vtkGenericEnSightReader.cxx b/IO/EnSight/vtkGenericEnSightReader.cxx
index bff36cc..4fc1682 100644
--- a/IO/EnSight/vtkGenericEnSightReader.cxx
+++ b/IO/EnSight/vtkGenericEnSightReader.cxx
@@ -30,7 +30,7 @@
 
 #include <string>
 #include <map>
-#include <assert.h>
+#include <cassert>
 #include <ctype.h> /* isspace */
 
 vtkStandardNewMacro(vtkGenericEnSightReader);
diff --git a/IO/Exodus/CMakeLists.txt b/IO/Exodus/CMakeLists.txt
index d1da56f..125c87e 100644
--- a/IO/Exodus/CMakeLists.txt
+++ b/IO/Exodus/CMakeLists.txt
@@ -1,4 +1,8 @@
 set(Module_SRCS
+  vtkCPExodusIIElementBlock.cxx
+  vtkCPExodusIIInSituReader.cxx
+  vtkCPExodusIINodalCoordinatesTemplate.txx
+  vtkCPExodusIIResultsArrayTemplate.txx
   vtkExodusIICache.cxx
   vtkExodusIIReader.cxx
   vtkExodusIIReaderParser.cxx
@@ -9,9 +13,18 @@ set(Module_SRCS
   )
 
 set_source_files_properties(
+  vtkCPExodusIINodalCoordinatesTemplate
+  vtkCPExodusIIResultsArrayTemplate
   vtkExodusIIReaderParser
   vtkExodusIIReaderVariableCheck
+
   WRAP_EXCLUDE
   )
 
+set(vtkIOExodus_HDRS
+  vtkCPExodusIINodalCoordinatesTemplate.h
+  vtkCPExodusIIResultsArrayTemplate.h
+)
+
+
 vtk_module_library(vtkIOExodus ${Module_SRCS})
diff --git a/IO/Exodus/Testing/Cxx/CMakeLists.txt b/IO/Exodus/Testing/Cxx/CMakeLists.txt
index a68a967..66e93fe 100644
--- a/IO/Exodus/Testing/Cxx/CMakeLists.txt
+++ b/IO/Exodus/Testing/Cxx/CMakeLists.txt
@@ -1,29 +1,17 @@
+# Tests with data
 # VS6 builds do not handle out-of-range double assignment to float
 # properly. Do not run TestMultiBlockExodusWrite on VS6 builds.
 #
+vtk_add_test_cxx(TestExodusAttributes.cxx NO_VALID NO_OUTPUT)
+vtk_add_test_cxx(TestExodusSideSets.cxx NO_VALID NO_OUTPUT)
 if(NOT CMAKE_GENERATOR MATCHES "Visual Studio 6")
-  set(MyTests ${MyTests}
+  vtk_add_test_cxx(
     TestMultiBlockExodusWrite.cxx
     )
 endif()
 
+vtk_add_test_cxx(NO_VALID
+  TestInSituExodus.cxx
+  )
 
-# Tests with data
-if(VTK_DATA_ROOT)
-  # Use the testing object factory, to reduce boilerplate code in tests.
-  include(vtkTestingObjectFactory)
-  vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-  set(TestsToRun ${Tests})
-  list(REMOVE_ITEM TestsToRun CxxTests.cxx)
-
-  # Add all the executables
-  foreach(test ${TestsToRun})
-    get_filename_component(TName ${test} NAME_WE)
-      add_test(NAME ${vtk-module}-${TName}
-        COMMAND ${vtk-module}CxxTests ${TName}
-          -D ${VTK_DATA_ROOT}
-          -T ${VTK_TEST_OUTPUT_DIR}
-          -V Baseline/Parallel/${TName}.png)
-  endforeach ()
-endif()
+vtk_test_cxx_executable(${vtk-module}CxxTests RENDERING_FACTORY)
diff --git a/IO/Exodus/Testing/Cxx/TestExodusAttributes.cxx b/IO/Exodus/Testing/Cxx/TestExodusAttributes.cxx
new file mode 100644
index 0000000..965afba
--- /dev/null
+++ b/IO/Exodus/Testing/Cxx/TestExodusAttributes.cxx
@@ -0,0 +1,75 @@
+#include "vtkCellData.h"
+#include "vtkDataArray.h"
+#include "vtkDataSet.h"
+#include "vtkExodusIIReader.h"
+#include "vtkMultiBlockDataSet.h"
+#include "vtkNew.h"
+#include "vtkTestUtilities.h"
+
+int TestExodusAttributes(int argc, char* argv[])
+{
+  char* fname = vtkTestUtilities::ExpandDataFileName(
+    argc, argv, "Data/edgeFaceElem.exii");
+  if (!fname)
+    {
+    cout << "Could not obtain filename for test data.\n";
+    return 1;
+    }
+
+  vtkNew<vtkExodusIIReader> rdr;
+  if (!rdr->CanReadFile(fname))
+    {
+    cout << "Cannot read \"" << fname << "\"\n";
+    return 1;
+    }
+  rdr->SetFileName(fname);
+  delete[] fname;
+
+  rdr->UpdateInformation();
+  rdr->SetObjectAttributeStatus(vtkExodusIIReader::ELEM_BLOCK,0,"SPAGHETTI",1);
+  rdr->SetObjectAttributeStatus(vtkExodusIIReader::ELEM_BLOCK,0,"WESTERN",1);
+  rdr->Update();
+  vtkCellData* cd =
+    vtkDataSet::SafeDownCast(
+      vtkMultiBlockDataSet::SafeDownCast(
+        vtkMultiBlockDataSet::SafeDownCast(
+          rdr->GetOutputDataObject(0))
+        ->GetBlock(0))
+      ->GetBlock(0))
+    ->GetCellData();
+  if (!cd)
+    {
+    cout << "Could not obtain cell data\n";
+    return 1;
+    }
+  int na = cd->GetNumberOfArrays();
+  for (int i = 0; i < na; ++i)
+    {
+    vtkDataArray* arr = cd->GetArray(i);
+    cout << "Cell array " << i << " \"" << arr->GetName() << "\"\n";
+    for (int j = 0; j <= arr->GetMaxId(); ++j)
+      {
+      cout << " " << arr->GetTuple1(j) << "\n";
+      }
+    }
+  vtkDataArray* spaghetti = cd->GetArray("SPAGHETTI");
+  vtkDataArray* western = cd->GetArray("WESTERN");
+  if (
+    !spaghetti || !western ||
+    spaghetti->GetNumberOfTuples() != 2 || western->GetNumberOfTuples() != 2)
+    {
+    cout << "Attribute arrays not read or are wrong length.\n";
+    return 1;
+    }
+  if (spaghetti->GetTuple1(0) != 127. || spaghetti->GetTuple1(1) != 137)
+    {
+    cout << "Bad spaghetti\n";
+    return 1;
+    }
+  if (western->GetTuple1(0) != 101. || western->GetTuple1(1) != 139)
+    {
+    cout << "Wrong western\n";
+    return 1;
+    }
+  return 0;
+}
diff --git a/IO/Exodus/Testing/Cxx/TestExodusSideSets.cxx b/IO/Exodus/Testing/Cxx/TestExodusSideSets.cxx
new file mode 100644
index 0000000..519f3b5
--- /dev/null
+++ b/IO/Exodus/Testing/Cxx/TestExodusSideSets.cxx
@@ -0,0 +1,94 @@
+#include "vtkCellData.h"
+#include "vtkDataSet.h"
+#include "vtkExodusIIReader.h"
+#include "vtkIdTypeArray.h"
+#include "vtkIntArray.h"
+#include "vtkMultiBlockDataSet.h"
+#include "vtkNew.h"
+#include "vtkTestUtilities.h"
+
+int TestExodusSideSets(int argc, char* argv[])
+{
+  char* fname = vtkTestUtilities::ExpandDataFileName(
+    argc, argv, "Data/edgeFaceElem.exii");
+  if (!fname)
+    {
+    cout << "Could not obtain filename for test data.\n";
+    return 1;
+    }
+
+  vtkNew<vtkExodusIIReader> rdr;
+  if (!rdr->CanReadFile(fname))
+    {
+    cout << "Cannot read \"" << fname << "\"\n";
+    return 1;
+    }
+  rdr->SetFileName(fname);
+  delete[] fname;
+
+  rdr->GenerateGlobalNodeIdArrayOn();
+  rdr->GenerateGlobalElementIdArrayOn();
+  rdr->ExodusModelMetadataOn();
+  rdr->UpdateInformation();
+
+  for(int i=0;i<rdr->GetNumberOfObjects(vtkExodusIIReader::ELEM_BLOCK);i++)
+    {
+    rdr->SetObjectStatus(vtkExodusIIReader::ELEM_BLOCK, i, 0);
+    }
+
+  for(int i=0;i<rdr->GetNumberOfObjects(vtkExodusIIReader::SIDE_SET);i++)
+    {
+    rdr->SetObjectStatus(vtkExodusIIReader::SIDE_SET, i, 1);
+    }
+
+  rdr->Update();
+
+  vtkMultiBlockDataSet* mb = vtkMultiBlockDataSet::SafeDownCast(rdr->GetOutput());
+  vtkCellData* cd = vtkDataSet::SafeDownCast(
+    vtkMultiBlockDataSet::SafeDownCast(mb->GetBlock(4))->GetBlock(0))->GetCellData();
+
+  if(cd == NULL)
+    {
+    cerr << "Can't find proper data set\n";
+    return 1;
+    }
+
+  vtkIdTypeArray* sourceelementids = vtkIdTypeArray::SafeDownCast(
+    cd->GetArray(vtkExodusIIReader::GetSideSetSourceElementIdArrayName()));
+
+  vtkIntArray* sourceelementsides = vtkIntArray::SafeDownCast(
+    cd->GetArray(vtkExodusIIReader::GetSideSetSourceElementSideArrayName()));
+
+  if(!sourceelementsides || !sourceelementids)
+    {
+    cerr << "Can't find proper cell data arrays\n";
+    return 1;
+    }
+  else
+    {
+    if(sourceelementids->GetNumberOfTuples() != 5)
+      {
+      cerr << "Wrong number of cell array tuples\n";
+      return 1;
+      }
+    // correct values
+    vtkIdType ids[] = {0, 0, 0, 1, 1};
+    int sides[] = {2, 3, 4, 1, 0};
+
+    for(vtkIdType i=0;i<sourceelementids->GetNumberOfTuples();i++)
+      {
+      if(sourceelementids->GetValue(i) != ids[i])
+        {
+        cerr << "Source element id is wrong\n";
+        return 1;
+        }
+      if(sourceelementsides->GetValue(i) != sides[i])
+        {
+        cerr << "Source element side is wrong\n";
+        return 1;
+        }
+      }
+    }
+
+  return 0;
+}
diff --git a/IO/Exodus/Testing/Cxx/TestInSituExodus.cxx b/IO/Exodus/Testing/Cxx/TestInSituExodus.cxx
new file mode 100644
index 0000000..4e0c5e2
--- /dev/null
+++ b/IO/Exodus/Testing/Cxx/TestInSituExodus.cxx
@@ -0,0 +1,1010 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkCPExodusIIElementBlock.h"
+#include "vtkCPExodusIIInSituReader.h"
+#include "vtkCPExodusIINodalCoordinatesTemplate.h"
+#include "vtkCPExodusIIResultsArrayTemplate.h"
+
+#include "vtkCellData.h"
+#include "vtkCellIterator.h"
+#include "vtkConeSource.h"
+#include "vtkDoubleArray.h"
+#include "vtkExodusIIReader.h"
+#include "vtkFloatArray.h"
+#include "vtkGenericCell.h"
+#include "vtkMultiBlockDataSet.h"
+#include "vtkNew.h"
+#include "vtkPlane.h"
+#include "vtkPolyData.h"
+#include "vtkPointData.h"
+#include "vtkPoints.h"
+#include "vtkSmartPointer.h"
+#include "vtkTestUtilities.h"
+#include "vtkTimerLog.h"
+#include "vtkUnstructuredGrid.h"
+
+// Filters that we test against:
+#include "vtkContourFilter.h"
+#include "vtkDataSetSurfaceFilter.h"
+#include "vtkCutter.h"
+#include "vtkExtractGeometry.h"
+#include "vtkGlyph3D.h"
+#include "vtkWarpScalar.h"
+#include "vtkWarpVector.h"
+
+#include <algorithm>
+#include <cassert>
+#include <cmath>
+#include <string>
+#include <sstream>
+
+// Define this to work around "glommed" point/cell data in the reference data.
+#undef GLOM_WORKAROUND
+//#define GLOM_WORKAROUND
+
+#define FAIL(x)        \
+  cerr << x << endl;   \
+  return EXIT_FAILURE;
+
+#define FAILB(x)       \
+  cerr << x << endl;   \
+  return false;
+
+bool readExodusCopy(std::string fileName, vtkMultiBlockDataSet *mbds)
+{
+  // Read file using reference reader
+  vtkNew<vtkExodusIIReader> reader;
+  reader->SetFileName(fileName.c_str());
+  reader->UpdateInformation();
+
+  // Disable extra arrays:
+  reader->SetGenerateFileIdArray(0);
+  reader->SetGenerateGlobalElementIdArray(0);
+  reader->SetGenerateGlobalNodeIdArray(0);
+  reader->SetGenerateImplicitElementIdArray(0);
+  reader->SetGenerateImplicitNodeIdArray(0);
+  reader->SetGenerateObjectIdCellArray(0);
+
+  // Just read the first timestep
+  int timeStepRange[2];
+  reader->GetTimeStepRange(timeStepRange);
+  reader->SetTimeStep(timeStepRange[0]);
+
+  // Include all points in element blocks (including those unused by the block)
+  reader->SetSqueezePoints(false);
+
+  // Enable all nodal result (point data) arrays
+  int numNodeArrays = reader->GetNumberOfObjectArrays(vtkExodusIIReader::NODAL);
+  for (int i = 0; i < numNodeArrays; ++i)
+    {
+    reader->SetObjectArrayStatus(vtkExodusIIReader::NODAL, i, 1);
+    }
+
+  // Enable all element result (cell data) arrays
+  int numElementBlockArrays =
+      reader->GetNumberOfObjectArrays(vtkExodusIIReader::ELEM_BLOCK);
+  for (int i = 0; i < numElementBlockArrays; ++i)
+    {
+    reader->SetObjectArrayStatus(vtkExodusIIReader::ELEM_BLOCK, i, 1);
+    }
+
+  reader->Update();
+  mbds->ShallowCopy(reader->GetOutput());
+
+  return true;
+}
+
+vtkUnstructuredGridBase* getConnectivityBlock(vtkMultiBlockDataSet *mbds)
+{
+  vtkUnstructuredGridBase *result = NULL;
+   if (vtkDataObject *tmpDO = mbds->GetBlock(0))
+     {
+     if (vtkMultiBlockDataSet *tmpMBDS =
+         vtkMultiBlockDataSet::SafeDownCast(tmpDO))
+       {
+       result = vtkUnstructuredGridBase::SafeDownCast(tmpMBDS->GetBlock(0));
+       }
+     }
+   return result;
+}
+
+// Predicate for std::equal to fuzzy compare floating points.
+template <class Scalar> bool fuzzyEqual(const Scalar &a, const Scalar &b)
+{
+  return fabs(a - b) < 1e-6;
+}
+
+bool compareDataSets(vtkDataSet *ref, vtkDataSet *test)
+{
+  // Compare number of points
+  vtkIdType refNumPoints = ref->GetNumberOfPoints();
+  vtkIdType testNumPoints = test->GetNumberOfPoints();
+  if (refNumPoints != testNumPoints)
+    {
+    FAILB("Number of points do not match (" << refNumPoints << ", "
+          << testNumPoints << ").")
+    }
+
+  // Compare coordinate data
+  double refPoint[3] = {0., 0., 0.};
+  double testPoint[3] = {0., 0., 0.};
+  for (vtkIdType pointId = 0; pointId < testNumPoints; ++pointId)
+    {
+    ref->GetPoint(pointId, refPoint);
+    test->GetPoint(pointId, testPoint);
+    if (fabs(refPoint[0] - testPoint[0]) > 1e-5
+        || fabs(refPoint[1] - testPoint[1]) > 1e-5
+        || fabs(refPoint[2] - testPoint[2]) > 1e-5)
+      {
+      FAILB("Point mismatch at point index: " << pointId
+            << "\n\tExpected: " << refPoint[0] << " " << refPoint[1] << " "
+            << refPoint[2]
+            << "\n\tActual: " << testPoint[0] << " " << testPoint[1] << " "
+            << testPoint[2])
+      }
+    }
+
+  // Compare point data
+  // Number of point data arrays may not match -- the reference reader
+  // "gloms" multi-component arrays together, while the in-situ doesn't (yet?).
+  vtkPointData *refPointData = ref->GetPointData();
+  vtkPointData *testPointData = test->GetPointData();
+  int refNumPointDataArrays = refPointData->GetNumberOfArrays();
+  int testNumPointDataArrays = testPointData->GetNumberOfArrays();
+  if (refNumPointDataArrays != testNumPointDataArrays)
+    {
+#ifdef GLOM_WORKAROUND
+    cerr << "Warning: "
+            "Point data array count mismatch. This may not be an error, as "
+            "the reference data combines multicomponent arrays. "
+            << "Reference: " << refNumPointDataArrays
+            << " Actual: " << testNumPointDataArrays
+            << endl;
+#else
+    FAILB("Point data array count mismatch. This may not be an error, as "
+          "the reference data combines multicomponent arrays. "
+          << "Reference: " << refNumPointDataArrays
+          << " Actual: " << testNumPointDataArrays
+          << " Define GLOM_WORKAROUND in " << __FILE__ << " to treat this "
+          << "message as a warning.")
+#endif
+    }
+  for (int arrayIndex = 0; arrayIndex < testNumPointDataArrays; ++arrayIndex)
+    {
+    vtkDataArray *testArray = testPointData->GetArray(arrayIndex);
+    const char *arrayName = testArray->GetName();
+    vtkDataArray *refArray = refPointData->GetArray(arrayName);
+    if (refArray == NULL)
+      {
+#ifdef GLOM_WORKAROUND
+      cerr << "Warning: "
+           << "Testing point data array '" << arrayName
+           << "' does not exist in the reference data set. This may not be an "
+           << "error if the reference data has probably made this into a "
+           << "multicomponent array. "
+           << endl;
+      continue;
+#else
+      FAILB("Testing point data array '" << arrayName
+            << "' does not exist in the reference data set. This may not be an "
+            << "error if the reference data has probably made this into a "
+            << "multicomponent array. "
+            << " Define GLOM_WORKAROUND in " << __FILE__ << " to treat this "
+            << "message as a warning.")
+#endif
+      }
+
+    int refNumComponents = refArray->GetNumberOfComponents();
+    int testNumComponents = testArray->GetNumberOfComponents();
+    if (refNumComponents != testNumComponents)
+      {
+      FAILB("Number of components mismatch for point data array '"
+            << arrayName << "'")
+      }
+
+    vtkIdType refNumTuples = refArray->GetNumberOfTuples();
+    vtkIdType testNumTuples = testArray->GetNumberOfTuples();
+    if (refNumTuples != testNumTuples)
+      {
+      FAILB("Number of tuples mismatch for point data array '"
+            << arrayName << "'")
+      }
+
+    std::vector<double> refTuple(refNumComponents);
+    std::vector<double> testTuple(testNumComponents);
+    for (vtkIdType i = 0; i < testNumTuples; ++i)
+      {
+      refArray->GetTuple(i, &refTuple[0]);
+      testArray->GetTuple(i, &testTuple[0]);
+      if (!std::equal(refTuple.begin(), refTuple.end(), testTuple.begin(),
+                     fuzzyEqual<double>))
+        {
+        std::stringstream refString;
+        std::stringstream testString;
+        for (int comp = 0; comp < refNumComponents; ++comp)
+          {
+          refString << refTuple[comp] << " ";
+          testString << testTuple[comp] << " ";
+          }
+        FAILB("Tuple mismatch for point data array '" << arrayName
+              << "' at tuple index: " << i << "\n"
+              << "Expected:\n\t" << refString.str() << "\n"
+              << "Actual:\n\t" << testString.str());
+        }
+      }
+    }
+
+  // Compare number of cells
+  vtkIdType refNumCells = ref->GetNumberOfCells();
+  vtkIdType testNumCells = test->GetNumberOfCells();
+  if (refNumCells != testNumCells)
+    {
+    FAILB("Number of cells do not match (" << refNumCells << ", "
+          << testNumCells << ").")
+    }
+
+  // Compare connectivity data
+  vtkNew<vtkGenericCell> refCell;
+  vtkNew<vtkGenericCell> testCell;
+
+  // Test out the iterators, too:
+  vtkSmartPointer<vtkCellIterator> refCellIter =
+      vtkSmartPointer<vtkCellIterator>::Take(ref->NewCellIterator());
+  vtkSmartPointer<vtkCellIterator> testCellIter =
+      vtkSmartPointer<vtkCellIterator>::Take(test->NewCellIterator());
+
+  for (vtkIdType cellId = 0;
+       cellId < testNumCells &&
+       !refCellIter->IsDoneWithTraversal() &&
+       !testCellIter->IsDoneWithTraversal();
+       ++cellId, refCellIter->GoToNextCell(), testCellIter->GoToNextCell())
+    {
+    // Lookup cells in iterators:
+    refCellIter->GetCell(refCell.GetPointer());
+    testCellIter->GetCell(testCell.GetPointer());
+
+    if (refCell->GetCellType() != testCell->GetCellType())
+      {
+      FAILB("Cell types do not match!")
+      }
+    refNumPoints = refCell->GetNumberOfPoints();
+    testNumPoints = testCell->GetNumberOfPoints();
+    if (refNumPoints != testNumPoints)
+      {
+      FAILB("Number of cell points do not match (" << refNumPoints << ", "
+            << testNumPoints << ") for cellId " << cellId)
+      }
+
+    for (vtkIdType pointId = 0; pointId < testNumPoints; ++pointId)
+      {
+      if (refCell->GetPointId(pointId) != testCell->GetPointId(pointId))
+        {
+        FAILB("Point id mismatch in cellId " << cellId)
+        }
+      refCell->Points->GetPoint(pointId, refPoint);
+      testCell->Points->GetPoint(pointId, testPoint);
+      if (fabs(refPoint[0] - testPoint[0]) > 1e-5
+          || fabs(refPoint[1] - testPoint[1]) > 1e-5
+          || fabs(refPoint[2] - testPoint[2]) > 1e-5)
+        {
+        FAILB("Point mismatch in cellId " << cellId
+              << "\n\tExpected: " << refPoint[0] << " " << refPoint[1] << " "
+              << refPoint[2]
+              << "\n\tActual: " << testPoint[0] << " " << testPoint[1] << " "
+              << testPoint[2])
+        }
+      }
+    }
+
+  // Verify that all cells were checked
+  if (!refCellIter->IsDoneWithTraversal() ||
+      !testCellIter->IsDoneWithTraversal())
+    {
+    FAILB("Did not finish traversing all cells (an iterator is still valid).")
+    }
+
+  // Compare cell data
+  // Number of cell data arrays probably won't match -- the reference reader
+  // "gloms" multi-component arrays together, while the in-situ doesn't (yet?).
+  vtkCellData *refCellData = ref->GetCellData();
+  vtkCellData *testCellData = test->GetCellData();
+  int refNumCellDataArrays = refCellData->GetNumberOfArrays();
+  int testNumCellDataArrays = testCellData->GetNumberOfArrays();
+  if (refNumCellDataArrays != testNumCellDataArrays)
+    {
+#ifdef GLOM_WORKAROUND
+    cerr << "Warning: "
+         << "Cell data array count mismatch. This may not be an error, as "
+            "the reference data combines multicomponent arrays. "
+         << "Reference: " << refNumCellDataArrays
+         << " Actual: " << testNumCellDataArrays
+         << endl;
+#else
+    FAILB("Cell data array count mismatch. This may not be an error, as "
+          "the reference data combines multicomponent arrays. "
+          << "Reference: " << refNumCellDataArrays
+          << " Actual: " << testNumCellDataArrays
+          << " Define GLOM_WORKAROUND in " << __FILE__ << " to treat this "
+          << "message as a warning.")
+#endif
+    }
+  for (int arrayIndex = 0; arrayIndex < testNumCellDataArrays; ++arrayIndex)
+    {
+    vtkDataArray *testArray = testCellData->GetArray(arrayIndex);
+    const char *arrayName = testArray->GetName();
+    vtkDataArray *refArray = refCellData->GetArray(arrayName);
+    if (refArray == NULL)
+      {
+#ifdef GLOM_WORKAROUND
+      cerr << "Warning: "
+           << "Testing cell data array '" << arrayName
+           << "' does not exist in the reference data set. But it's cool -- "
+           << "the reference data has probably made this into a multicomponent "
+           << "array."
+           << endl;
+         continue;
+#else
+      FAILB("Testing cell data array '" << arrayName
+            << "' does not exist in the reference data set. But it's cool -- "
+            << "the reference data has probably made this into a multicomponent "
+            << "array."
+            << " Define GLOM_WORKAROUND in " << __FILE__ << " to treat this "
+            << "message as a warning.")
+#endif
+      }
+
+    int refNumComponents = refArray->GetNumberOfComponents();
+    int testNumComponents = testArray->GetNumberOfComponents();
+    if (refNumComponents != testNumComponents)
+      {
+      FAILB("Number of components mismatch for cell data array '"
+            << arrayName << "'")
+      }
+
+    vtkIdType refNumTuples = refArray->GetNumberOfTuples();
+    vtkIdType testNumTuples = testArray->GetNumberOfTuples();
+    if (refNumTuples != testNumTuples)
+      {
+      FAILB("Number of tuples mismatch for cell data array '"
+            << arrayName << "'")
+      }
+
+    std::vector<double> refTuple(refNumComponents);
+    std::vector<double> testTuple(testNumComponents);
+    for (vtkIdType i = 0; i < testNumTuples; ++i)
+      {
+      refArray->GetTuple(i, &refTuple[0]);
+      testArray->GetTuple(i, &testTuple[0]);
+      if (!std::equal(refTuple.begin(), refTuple.end(), testTuple.begin(),
+                     fuzzyEqual<double>))
+        {
+        std::stringstream refString;
+        std::stringstream testString;
+        for (int comp = 0; comp < refNumComponents; ++comp)
+          {
+          refString << refTuple[comp] << " ";
+          testString << testTuple[comp] << " ";
+          }
+        FAILB("Tuple mismatch for cell data array '" << arrayName
+              << "' at tuple index: " << i << "\n"
+              << "Expected:\n\t" << refString.str() << "\n"
+              << "Actual:\n\t" << testString.str());
+        }
+      }
+    }
+
+  return true;
+}
+
+// Add fake scalar and normal data to the dataset
+void populateAttributes(vtkDataSet *ref, vtkDataSet *test)
+{
+  vtkIdType numPoints = ref->GetNumberOfPoints();
+
+  // Create/set scalars for the filters
+  vtkNew<vtkDoubleArray> refScalars;
+  refScalars->SetName("test-scalars");
+  double point[3];
+  for (vtkIdType pointId = 0; pointId < numPoints; ++pointId)
+    {
+    ref->GetPoint(pointId, point);
+    refScalars->InsertNextTuple1((sin(point[0] * point[1]) + cos(point[2])));
+    }
+  vtkNew<vtkCPExodusIIResultsArrayTemplate<double> > testScalars;
+  testScalars->SetName("test-scalars");
+  double *testScalarArray = new double[numPoints];
+  memcpy(testScalarArray, refScalars->GetVoidPointer(0),
+         numPoints * sizeof(double));
+  testScalars->SetExodusScalarArrays(std::vector<double*>(1, testScalarArray),
+                                     numPoints);
+
+  ref->GetPointData()->SetScalars(refScalars.GetPointer());
+  test->GetPointData()->SetScalars(testScalars.GetPointer());
+
+  // And some fake normals
+  vtkNew<vtkFloatArray> refNormals;
+  refNormals->SetName("test-normals");
+  refNormals->SetNumberOfComponents(3);
+  refNormals->SetNumberOfTuples(numPoints);
+  double *testNormalArrayX = new double[numPoints];
+  double *testNormalArrayY = new double[numPoints];
+  double *testNormalArrayZ = new double[numPoints];
+  double normal[3];
+  double norm;
+  for (vtkIdType pointId = 0; pointId < numPoints; ++pointId)
+    {
+    ref->GetPoint(pointId, point);
+    norm = sqrt(point[0]*point[0] + point[1]*point[1] + point[2]*point[2]);
+    if (norm > 1e-5)
+      {
+      testNormalArrayX[pointId] = normal[0] = (point[1] / norm);
+      testNormalArrayY[pointId] = normal[1] = (point[0] / norm);
+      testNormalArrayZ[pointId] = normal[2] = (point[2] / norm);
+      }
+    else
+      {
+      testNormalArrayX[pointId] = normal[0] = 1.0;
+      testNormalArrayY[pointId] = normal[1] = 0.0;
+      testNormalArrayZ[pointId] = normal[2] = 0.0;
+      }
+    refNormals->SetTuple(pointId, normal);
+    }
+  vtkNew<vtkCPExodusIIResultsArrayTemplate<double> > testNormals;
+  testNormals->SetName("test-normals");
+  std::vector<double*> testNormalVector;
+  testNormalVector.push_back(testNormalArrayX);
+  testNormalVector.push_back(testNormalArrayY);
+  testNormalVector.push_back(testNormalArrayZ);
+  testNormals->SetExodusScalarArrays(testNormalVector, numPoints);
+
+  ref->GetPointData()->SetNormals(refNormals.GetPointer());
+  test->GetPointData()->SetNormals(testNormals.GetPointer());
+}
+
+void testContourFilter(vtkUnstructuredGridBase *input,
+                       vtkDataSet *&output,
+                       double &time)
+{
+  vtkNew<vtkTimerLog> timer;
+  vtkNew<vtkContourFilter> contour;
+  contour->SetInputData(input);
+  contour->GenerateValues(2, -0.5, 0.5);
+  timer->StartTimer();
+  contour->Update();
+  timer->StopTimer();
+  output = contour->GetOutput();
+  output->Register(NULL);
+  time = timer->GetElapsedTime();
+}
+
+void testDataSetSurfaceFilter(vtkUnstructuredGridBase *input,
+                              vtkDataSet *&output,
+                              double &time)
+{
+  vtkNew<vtkTimerLog> timer;
+  vtkNew<vtkDataSetSurfaceFilter> extractSurface;
+  extractSurface->SetInputData(input);
+  extractSurface->SetNonlinearSubdivisionLevel(4);
+  timer->StartTimer();
+  extractSurface->Update();
+  timer->StopTimer();
+  output = extractSurface->GetOutput();
+  output->Register(NULL);
+  time = timer->GetElapsedTime();
+}
+
+void testCutterFilter(vtkUnstructuredGridBase *input,
+                      vtkDataSet *&output,
+                      double &time)
+{
+  vtkNew<vtkTimerLog> timer;
+
+  // Create plane for testing slicing
+  vtkNew<vtkPlane> slicePlane;
+  slicePlane->SetOrigin(input->GetCenter());
+  slicePlane->SetNormal(1.0, 1.0, 1.0);
+
+  // Cutter (slice, polydata output)
+  vtkNew<vtkCutter> cutter;
+  cutter->SetInputData(input);
+  cutter->SetCutFunction(slicePlane.GetPointer());
+  cutter->SetGenerateTriangles(0);
+  timer->StartTimer();
+  cutter->Update();
+  timer->StopTimer();
+  output = cutter->GetOutput();
+  output->Register(NULL);
+  time = timer->GetElapsedTime();
+}
+
+void testExtractGeometryFilter(vtkUnstructuredGridBase *input,
+                               vtkDataSet *&output,
+                               double &time)
+{
+  vtkNew<vtkTimerLog> timer;
+
+  // Create plane for testing slicing
+  vtkNew<vtkPlane> slicePlane;
+  slicePlane->SetOrigin(input->GetCenter());
+  slicePlane->SetNormal(1.0, 1.0, 1.0);
+
+  vtkNew<vtkExtractGeometry> extract;
+  extract->SetInputData(input);
+  extract->SetImplicitFunction(slicePlane.GetPointer());
+  extract->SetExtractInside(1);
+  extract->SetExtractOnlyBoundaryCells(1);
+  extract->SetExtractBoundaryCells(1);
+  timer->StartTimer();
+  extract->Update();
+  timer->StopTimer();
+  output = extract->GetOutput();
+  output->Register(NULL);
+  time = timer->GetElapsedTime();
+}
+
+void testGlyph3DFilter(vtkUnstructuredGridBase *input,
+                       vtkDataSet *&output,
+                       double &time)
+{
+  vtkNew<vtkTimerLog> timer;
+
+  // Create a cone to test glyphing
+  vtkNew<vtkConeSource> coneSource;
+  coneSource->SetDirection(0.0, 1.0, 0.0);
+  coneSource->SetHeight(2.5);
+  coneSource->SetCapping(1);
+  coneSource->SetRadius(1.25);
+
+  // Glyph3D
+  vtkNew<vtkGlyph3D> glypher;
+  glypher->SetSourceConnection(coneSource->GetOutputPort());
+  glypher->SetInputData(input);
+  timer->StartTimer();
+  glypher->Update();
+  timer->StopTimer();
+  output = glypher->GetOutput();
+  output->Register(NULL);
+  time = timer->GetElapsedTime();
+}
+
+void testWarpScalarFilter(vtkUnstructuredGridBase *input,
+                          vtkDataSet *&output,
+                          double &time)
+{
+  vtkNew<vtkTimerLog> timer;
+  vtkNew<vtkWarpScalar> warpScalar;
+  warpScalar->SetInputData(input);
+  timer->StartTimer();
+  warpScalar->Update();
+  timer->StopTimer();
+  output = warpScalar->GetOutput();
+  output->Register(NULL);
+  time = timer->GetElapsedTime();
+}
+
+void testWarpVectorFilter(vtkUnstructuredGridBase *input,
+                          vtkDataSet *&output,
+                          double &time)
+{
+  vtkNew<vtkTimerLog> timer;
+  vtkNew<vtkWarpVector> warpVector;
+  warpVector->SetInputData(input);
+  warpVector->SetScaleFactor(1.0);
+  timer->StartTimer();
+  warpVector->Update();
+  timer->StopTimer();
+  output = warpVector->GetOutput();
+  output->Register(NULL);
+  time = timer->GetElapsedTime();
+}
+
+void testPipeline(vtkUnstructuredGridBase *input, vtkDataSet *&output,
+                  double &time)
+{
+  vtkNew<vtkTimerLog> timer;
+
+  vtkNew<vtkWarpScalar> warpScalar;
+  warpScalar->SetInputData(input);
+
+  vtkNew<vtkWarpVector> warpVector;
+  warpVector->SetInputConnection(warpScalar->GetOutputPort());
+  warpVector->SetScaleFactor(1.0);
+
+  timer->StartTimer();
+  warpVector->Update();
+  timer->StopTimer();
+  output = warpVector->GetOutput();
+  output->Register(NULL);
+  time = timer->GetElapsedTime();
+}
+
+#define doBenchmark(call_, reset_, timeLog_, repeat_) \
+{ \
+  timeLog_.clear(); \
+  timeLog_.resize(repeat_, -500); \
+  for (int benchmark = 0; benchmark < repeat_; ++benchmark) \
+    { \
+    double &benchmarkTime = timeLog_[benchmark]; \
+    call_; \
+    if (benchmark + 1 != repeat_) \
+      { \
+      reset_; \
+      } \
+    } \
+}
+
+// Check that refOutput == testOutput, then delete and clear the outputs.
+bool validateFilterOutput(std::string name,
+                          vtkDataSet *&refOutput, vtkDataSet *&testOutput)
+{
+  if (refOutput->GetNumberOfPoints() == 0)
+    {
+    FAILB("Reference " << name << " produced an empty output!")
+    }
+  if (!compareDataSets(refOutput, testOutput))
+    {
+    FAILB(name << " output mismatch.")
+    }
+  cout << name << " produced " << refOutput->GetNumberOfPoints()
+       << " points and " << refOutput->GetNumberOfCells() << " cells." << endl;
+  refOutput->Delete();
+  refOutput = NULL;
+  testOutput->Delete();
+  testOutput = NULL;
+  return true;
+}
+
+void printTimingInfo(const std::string &name,
+                     const std::vector<double> &ref,
+                     const std::vector<double> &test)
+{
+  assert(ref.size() == test.size());
+  double refAverage(0.0);
+  double testAverage(0.0);
+  double refMin(10000.0);
+  double testMin(10000.0);
+  double refMax(0.0);
+  double testMax(0.0);
+
+  for (size_t i = 0; i < ref.size(); ++i)
+    {
+    refAverage += ref[i];
+    testAverage += test[i];
+    refMin = std::min(refMin, ref[i]);
+    refMax = std::max(refMax, ref[i]);
+    testMin = std::min(testMin, test[i]);
+    testMax = std::max(testMax, test[i]);
+    }
+  refAverage  /= static_cast<double>(ref.size());
+  testAverage /= static_cast<double>(test.size());
+
+  double refStdev(0.0);
+  double testStdev(0.0);
+  for (size_t i = 0; i < ref.size(); ++i)
+    {
+    refStdev += (ref[i] - refAverage) * (ref[i] - refAverage);
+    testStdev += (test[i] - testAverage) * (test[i] - testAverage);
+    }
+  refStdev = std::sqrt(refStdev / static_cast<double>(ref.size()));
+  testStdev = std::sqrt(testStdev / static_cast<double>(test.size()));
+
+  cout << "Timing info for test '" << name << "', "
+       << ref.size() << " sample(s):\n\t"
+       << "Average (ref | test | %slowdown): "
+       << std::setprecision(6)
+       << std::setw(9) << refAverage
+       << std::setw(0) << " | "
+       << std::setw(9) << testAverage
+       << std::setw(0) << " | "
+       << std::setw(9) << ((testAverage / refAverage) - 1.0) * 100
+       << std::setw(0) << "%\n\t"
+       << "Std Dev (ref | test): "
+       << std::setw(9) << refStdev
+       << std::setw(0) << " | "
+       << std::setw(9) << testStdev
+       << std::setw(0) << "\n\t"
+       << "Minimum (ref | test): "
+       << std::setw(9) << refMin
+       << std::setw(0) << " | "
+       << std::setw(9) << testMin
+       << std::setw(0) << "\n\t"
+       << "Maximum (ref | test): "
+       << std::setw(9) << refMax
+       << std::setw(0) << " | "
+       << std::setw(9) << testMax
+       << std::setw(0) << endl;
+}
+
+// The test to run while profiling or benchmarking:
+#define CURRENT_TEST testContourFilter
+
+// Define this to profile a particular filter (see testFilters(...)).
+#undef PROFILE
+//#define PROFILE CURRENT_TEST
+
+// Define this to benchmark a particular filter (see testFilters(...)).
+#undef BENCHMARK
+//#define BENCHMARK CURRENT_TEST
+
+bool testFilters(vtkUnstructuredGridBase *ref,
+                 vtkUnstructuredGridBase *test)
+{
+  cout << "Number of points: " << ref->GetNumberOfPoints() << endl;
+  cout << "Number of cells:  " << ref->GetNumberOfCells() << endl;
+
+  // Number of times to run each benchmark. Don't commit a value greater than
+  // 1 to keep the dashboards fast, but this can be increased while benchmarking
+  // or profiling particular filters.
+  int numBenchmarks = 1;
+
+  // Temporary variables for outputs.
+  vtkDataSet *refOutput(NULL);
+  vtkDataSet *testOutput(NULL);
+
+#ifdef PROFILE
+  // Profiling, multirun:
+  std::vector<double> profileTimes;
+  doBenchmark(PROFILE(test, refOutput, benchmarkTime),
+              refOutput->Delete(); refOutput = NULL,
+              profileTimes, numBenchmarks);
+  return true;
+#endif
+
+#ifdef BENCHMARK
+  // Benchmarking:
+  std::vector<double> benchmarkRefTimes;
+  std::vector<double> benchmarkTestTimes;
+  doBenchmark(BENCHMARK(ref, refOutput, benchmarkTime),
+              refOutput->Delete(); refOutput = NULL,
+              benchmarkRefTimes, numBenchmarks);
+  doBenchmark(BENCHMARK(test, testOutput, benchmarkTime),
+              testOutput->Delete(); testOutput = NULL,
+              benchmarkTestTimes, numBenchmarks);
+  if (!validateFilterOutput("Benchmark:", refOutput, testOutput))
+    {
+    return false;
+    }
+  printTimingInfo("Benchmark", benchmarkRefTimes, benchmarkTestTimes);
+  return true;
+#endif
+
+  //////////////////////////////
+  // Actual tests start here: //
+  //////////////////////////////
+
+  // Contour filter
+  std::vector<double> contourRefTimes;
+  std::vector<double> contourTestTimes;
+  doBenchmark(testContourFilter(ref, refOutput, benchmarkTime),
+              refOutput->Delete(); refOutput = NULL,
+              contourRefTimes, numBenchmarks);
+  doBenchmark(testContourFilter(test, testOutput, benchmarkTime),
+              testOutput->Delete(); testOutput = NULL,
+              contourTestTimes, numBenchmarks);
+  if (!validateFilterOutput("Contour filter", refOutput, testOutput))
+    {
+    return false;
+    }
+  printTimingInfo("contour", contourRefTimes, contourTestTimes);
+
+  // Extract surface
+  std::vector<double> dataSetSurfaceRefTimes;
+  std::vector<double> dataSetSurfaceTestTimes;
+  doBenchmark(testDataSetSurfaceFilter(ref, refOutput, benchmarkTime),
+              refOutput->Delete(); refOutput = NULL,
+              dataSetSurfaceRefTimes, numBenchmarks);
+  doBenchmark(testDataSetSurfaceFilter(test, testOutput, benchmarkTime),
+              testOutput->Delete(); testOutput = NULL,
+              dataSetSurfaceTestTimes, numBenchmarks);
+  if (!validateFilterOutput("Data set surface filter", refOutput, testOutput))
+    {
+    return false;
+    }
+  printTimingInfo("dataset surface", dataSetSurfaceRefTimes,
+                  dataSetSurfaceTestTimes);
+
+  // Cutter
+  std::vector<double> cutterRefTimes;
+  std::vector<double> cutterTestTimes;
+  doBenchmark(testCutterFilter(ref, refOutput, benchmarkTime),
+              refOutput->Delete(); refOutput = NULL,
+              cutterRefTimes, numBenchmarks);
+  doBenchmark(testCutterFilter(test, testOutput, benchmarkTime),
+              testOutput->Delete(); testOutput = NULL,
+              cutterTestTimes, numBenchmarks);
+  if (!validateFilterOutput("Cutter", refOutput, testOutput))
+    {
+    return false;
+    }
+  printTimingInfo("cutter", cutterRefTimes, cutterTestTimes);
+
+  // Extract geometry
+  std::vector<double> extractGeometryRefTimes;
+  std::vector<double> extractGeometryTestTimes;
+  doBenchmark(testExtractGeometryFilter(ref, refOutput, benchmarkTime),
+              refOutput->Delete(); refOutput = NULL,
+              extractGeometryRefTimes, numBenchmarks);
+  doBenchmark(testExtractGeometryFilter(test, testOutput, benchmarkTime),
+              testOutput->Delete(); testOutput = NULL,
+              extractGeometryTestTimes, numBenchmarks);
+  if (!validateFilterOutput("Extract geometry", refOutput, testOutput))
+    {
+    return false;
+    }
+  printTimingInfo("extract geometry", extractGeometryRefTimes,
+                  extractGeometryTestTimes);
+
+  // Glyph3D
+  std::vector<double> glyph3dRefTimes;
+  std::vector<double> glyph3dTestTimes;
+  doBenchmark(testGlyph3DFilter(ref, refOutput, benchmarkTime),
+              refOutput->Delete(); refOutput = NULL,
+              glyph3dRefTimes, numBenchmarks);
+  doBenchmark(testGlyph3DFilter(test, testOutput, benchmarkTime),
+              testOutput->Delete(); testOutput = NULL,
+              glyph3dTestTimes, numBenchmarks);
+  if (!validateFilterOutput("Glyph3D", refOutput, testOutput))
+    {
+    return false;
+    }
+  printTimingInfo("glyph3d", glyph3dRefTimes, glyph3dTestTimes);
+
+  // Warp scalar
+  std::vector<double> warpScalarRefTimes;
+  std::vector<double> warpScalarTestTimes;
+  doBenchmark(testWarpScalarFilter(ref, refOutput, benchmarkTime),
+              refOutput->Delete(); refOutput = NULL,
+              warpScalarRefTimes, numBenchmarks);
+  doBenchmark(testWarpScalarFilter(test, testOutput, benchmarkTime),
+              testOutput->Delete(); testOutput = NULL,
+              warpScalarTestTimes, numBenchmarks);
+  if (!validateFilterOutput("Warp scalar", refOutput, testOutput))
+    {
+    return false;
+    }
+  printTimingInfo("warp scalar", warpScalarRefTimes, warpScalarTestTimes);
+
+  // Warp vector
+  std::vector<double> warpVectorRefTimes;
+  std::vector<double> warpVectorTestTimes;
+  doBenchmark(testWarpVectorFilter(ref, refOutput, benchmarkTime),
+              refOutput->Delete(); refOutput = NULL,
+              warpVectorRefTimes, numBenchmarks);
+  doBenchmark(testWarpVectorFilter(test, testOutput, benchmarkTime),
+              testOutput->Delete(); testOutput = NULL,
+              warpVectorTestTimes, numBenchmarks);
+  if (!validateFilterOutput("Warp vector", refOutput, testOutput))
+    {
+    return false;
+    }
+  printTimingInfo("warp vector", warpVectorRefTimes, warpVectorTestTimes);
+
+  // mini-mapped pipeline (Warp scalar + vector)
+  std::vector<double> pipelineRefTimes;
+  std::vector<double> pipelineTestTimes;
+  doBenchmark(testPipeline(ref, refOutput, benchmarkTime),
+              refOutput->Delete(); refOutput = NULL,
+              pipelineRefTimes, numBenchmarks);
+  doBenchmark(testPipeline(test, testOutput, benchmarkTime),
+              testOutput->Delete(); testOutput = NULL,
+              pipelineTestTimes, numBenchmarks);
+  // Ensure that the mapped test produced a mapped output:
+  if (!testOutput->IsA("vtkCPExodusIIElementBlock"))
+    {
+    cerr << "Pipeline test did not produce a mapped output object!" << endl;
+    return false;
+    }
+  if (!validateFilterOutput("Pipeline test", refOutput, testOutput))
+    {
+    return false;
+    }
+  printTimingInfo("pipeline", pipelineRefTimes, pipelineTestTimes);
+
+  return true;
+}
+
+bool testCopies(vtkUnstructuredGridBase *test)
+{
+  vtkNew<vtkUnstructuredGrid> vtkTarget;
+  vtkSmartPointer<vtkUnstructuredGridBase> mappedTarget =
+      vtkSmartPointer<vtkUnstructuredGridBase>::Take(test->NewInstance());
+
+  // No deep copy into test class -- it's read only. Can shallow copy into test
+  // class, since it will just share the implementation instance.
+
+  // Deep copy: test --> vtk
+  vtkTarget->DeepCopy(test);
+  if (!compareDataSets(test, vtkTarget.GetPointer()))
+    {
+    FAILB("Deep copy insitu --> VTK failed.")
+    }
+  vtkTarget->Reset();
+
+  // Shallow copy: test --> vtk
+  vtkTarget->ShallowCopy(test); // Should really deep copy.
+  if (!compareDataSets(test, vtkTarget.GetPointer()))
+    {
+    FAILB("Shallow copy insitu --> VTK failed.")
+    }
+  vtkTarget->Reset();
+
+  // Shallow copy: test --> test
+  mappedTarget->ShallowCopy(test);
+  if (!compareDataSets(test, mappedTarget))
+    {
+    FAILB("Shallow copy insitu --> insitu failed.")
+    }
+  mappedTarget->Initialize();
+
+  return true;
+}
+
+int TestInSituExodus(int argc, char *argv[])
+{
+  vtkNew<vtkTimerLog> timer;
+  timer->StartTimer();
+
+  char *fileNameC = vtkTestUtilities::ExpandDataFileName(argc, argv,
+                                                         "Data/box-noglom.ex2");
+  std::string fileName(fileNameC);
+  delete [] fileNameC;
+
+  // Read reference copy
+  vtkNew<vtkMultiBlockDataSet> refMBDS;
+  readExodusCopy(fileName, refMBDS.GetPointer());
+  vtkUnstructuredGridBase *refGrid(getConnectivityBlock(refMBDS.GetPointer()));
+  if (!refGrid)
+    {
+    FAIL("Error retrieving reference element block container.");
+    }
+
+  // Read in-situ copy
+  vtkNew<vtkCPExodusIIInSituReader> reader;
+  reader->SetFileName(fileName.c_str());
+  reader->Update();
+  vtkMultiBlockDataSet *testMBDS = reader->GetOutput();
+  vtkUnstructuredGridBase *grid(getConnectivityBlock(testMBDS));
+  if (!grid)
+    {
+    FAIL("Error retrieving testing element block container.")
+    }
+
+#ifndef PROFILE // These just add noise during profiling:
+  // Compare
+  if (!compareDataSets(refGrid, grid))
+    {
+    FAIL("In-situ data set doesn't match reference data!")
+    }
+
+  if (!testCopies(grid))
+    {
+    FAIL("A copy test failed.")
+    }
+#endif
+
+  populateAttributes(refGrid, grid);
+
+  // Test selected filters
+  if (!testFilters(refGrid, grid))
+    {
+    FAIL("Pipeline test failed!")
+    }
+
+  timer->StopTimer();
+  double time = timer->GetElapsedTime();
+  cout << "Test took " << static_cast<int>(time / 60) << "m "
+       << std::fmod(time, 60.0) << "s." << endl;
+  return EXIT_SUCCESS;
+}
diff --git a/IO/Exodus/Testing/Data/Baseline/TestMultiBlockExodusWrite.png.md5 b/IO/Exodus/Testing/Data/Baseline/TestMultiBlockExodusWrite.png.md5
new file mode 100644
index 0000000..5c0763c
--- /dev/null
+++ b/IO/Exodus/Testing/Data/Baseline/TestMultiBlockExodusWrite.png.md5
@@ -0,0 +1 @@
+5fbd4e8c3ede9b62d4b067d231a8f27a
diff --git a/IO/Exodus/module.cmake b/IO/Exodus/module.cmake
index e9e2cbb..8a162ae 100644
--- a/IO/Exodus/module.cmake
+++ b/IO/Exodus/module.cmake
@@ -4,7 +4,9 @@ vtk_module(vtkIOExodus
   DEPENDS
     vtkFiltersGeneral
     vtkIOXML
+  PRIVATE_DEPENDS
     vtkexodusII
+    vtksys
   TEST_DEPENDS
     vtkTestingRendering
     vtkInteractionStyle
diff --git a/IO/Exodus/vtkCPExodusIIElementBlock.cxx b/IO/Exodus/vtkCPExodusIIElementBlock.cxx
new file mode 100644
index 0000000..0ec317d
--- /dev/null
+++ b/IO/Exodus/vtkCPExodusIIElementBlock.cxx
@@ -0,0 +1,222 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkCPExodusIIElementBlock.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkCPExodusIIElementBlock.h"
+
+#include "vtkCellType.h"
+#include "vtkCellTypes.h"
+#include "vtkGenericCell.h"
+#include "vtkIdTypeArray.h"
+#include "vtkObjectFactory.h"
+#include "vtkPoints.h"
+
+#include <algorithm>
+
+//------------------------------------------------------------------------------
+vtkStandardNewMacro(vtkCPExodusIIElementBlock)
+vtkStandardNewMacro(vtkCPExodusIIElementBlockImpl)
+
+//------------------------------------------------------------------------------
+void vtkCPExodusIIElementBlockImpl::PrintSelf(ostream &os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+  os << indent << "Elements: " << this->Elements << endl;
+  os << indent << "CellType: "
+     << vtkCellTypes::GetClassNameFromTypeId(this->CellType) << endl;
+  os << indent << "CellSize: " << this->CellSize << endl;
+  os << indent << "NumberOfCells: " << this->NumberOfCells << endl;
+}
+
+//------------------------------------------------------------------------------
+bool vtkCPExodusIIElementBlockImpl::SetExodusConnectivityArray(
+    int *elements, const std::string &type, int numElements,
+    int nodesPerElement)
+{
+  if (!elements)
+    {
+    return false;
+    }
+
+  // Try to figure out the vtk cell type:
+  if (type.size() < 3)
+  {
+    vtkErrorMacro(<< "Element type too short, expected at least 3 char: "
+                  << type);
+    return false;
+  }
+  std::string typekey = type.substr(0, 3);
+  std::transform(typekey.begin(), typekey.end(), typekey.begin(), ::toupper);
+  if (typekey == "CIR" || typekey == "SPH")
+    {
+    this->CellType = VTK_VERTEX;
+    }
+  else if (typekey == "TRU" || typekey == "BEA")
+    {
+    this->CellType = VTK_LINE;
+    }
+  else if (typekey == "TRI")
+    {
+    this->CellType = VTK_TRIANGLE;
+    }
+  else if (typekey == "QUA" || typekey == "SHE")
+    {
+    this->CellType = VTK_QUAD;
+    }
+  else if (typekey == "TET")
+    {
+    this->CellType = VTK_TETRA;
+    }
+  else if (typekey == "WED")
+    {
+    this->CellType = VTK_WEDGE;
+    }
+  else if (typekey == "HEX")
+    {
+    this->CellType = VTK_HEXAHEDRON;
+    }
+  else
+    {
+    vtkErrorMacro(<< "Unknown cell type: " << type);
+    return false;
+    }
+
+  this->CellSize = static_cast<vtkIdType>(nodesPerElement);
+  this->NumberOfCells = static_cast<vtkIdType>(numElements);
+  this->Elements = elements;
+  this->Modified();
+
+  return true;
+}
+
+//------------------------------------------------------------------------------
+vtkIdType vtkCPExodusIIElementBlockImpl::GetNumberOfCells()
+{
+  return this->NumberOfCells;
+}
+
+//------------------------------------------------------------------------------
+int vtkCPExodusIIElementBlockImpl::GetCellType(vtkIdType)
+{
+  return this->CellType;
+}
+
+//------------------------------------------------------------------------------
+void vtkCPExodusIIElementBlockImpl::GetCellPoints(vtkIdType cellId,
+                                              vtkIdList *ptIds)
+{
+  ptIds->SetNumberOfIds(this->CellSize);
+
+  std::transform(this->GetElementStart(cellId),
+                 this->GetElementEnd(cellId),
+                 ptIds->GetPointer(0), NodeToPoint);
+}
+
+//------------------------------------------------------------------------------
+void vtkCPExodusIIElementBlockImpl::GetPointCells(vtkIdType ptId,
+                                                  vtkIdList *cellIds)
+{
+  const int targetElement = PointToNode(ptId);
+  int *element = this->GetStart();
+  int *elementEnd = this->GetEnd();
+
+  cellIds->Reset();
+
+  element = std::find(element, elementEnd, targetElement);
+  while (element != elementEnd)
+    {
+    cellIds->InsertNextId(static_cast<vtkIdType>((element - this->Elements)
+                                                 / this->CellSize));
+    element = std::find(element, elementEnd, targetElement);
+    }
+}
+
+//------------------------------------------------------------------------------
+int vtkCPExodusIIElementBlockImpl::GetMaxCellSize()
+{
+  return this->CellSize;
+}
+
+//------------------------------------------------------------------------------
+void vtkCPExodusIIElementBlockImpl::GetIdsOfCellsOfType(int type,
+                                                        vtkIdTypeArray *array)
+{
+  array->Reset();
+  if (type == this->CellType)
+    {
+    array->SetNumberOfComponents(1);
+    array->Allocate(this->NumberOfCells);
+    for (vtkIdType i = 0; i < this->NumberOfCells; ++i)
+      {
+      array->InsertNextValue(i);
+      }
+    }
+}
+
+//------------------------------------------------------------------------------
+int vtkCPExodusIIElementBlockImpl::IsHomogeneous()
+{
+  return 1;
+}
+
+//------------------------------------------------------------------------------
+void vtkCPExodusIIElementBlockImpl::Allocate(vtkIdType, int)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+vtkIdType vtkCPExodusIIElementBlockImpl::InsertNextCell(int, vtkIdList*)
+{
+  vtkErrorMacro("Read only container.")
+  return -1;
+}
+
+//------------------------------------------------------------------------------
+vtkIdType vtkCPExodusIIElementBlockImpl::InsertNextCell(int, vtkIdType, vtkIdType*)
+{
+  vtkErrorMacro("Read only container.")
+  return -1;
+}
+
+//------------------------------------------------------------------------------
+vtkIdType vtkCPExodusIIElementBlockImpl::InsertNextCell(
+    int, vtkIdType, vtkIdType*, vtkIdType, vtkIdType*)
+{
+  vtkErrorMacro("Read only container.")
+  return -1;
+}
+
+//------------------------------------------------------------------------------
+void vtkCPExodusIIElementBlockImpl::ReplaceCell(vtkIdType, int, vtkIdType*)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+vtkCPExodusIIElementBlockImpl::vtkCPExodusIIElementBlockImpl()
+  : Elements(NULL),
+    CellType(VTK_EMPTY_CELL),
+    CellSize(0),
+    NumberOfCells(0)
+{
+}
+
+//------------------------------------------------------------------------------
+vtkCPExodusIIElementBlockImpl::~vtkCPExodusIIElementBlockImpl()
+{
+  delete [] this->Elements;
+}
diff --git a/IO/Exodus/vtkCPExodusIIElementBlock.h b/IO/Exodus/vtkCPExodusIIElementBlock.h
new file mode 100644
index 0000000..028bc9d
--- /dev/null
+++ b/IO/Exodus/vtkCPExodusIIElementBlock.h
@@ -0,0 +1,113 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkCPExodusIIElementBlock.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkCPExodusIIElementBlock - Uses an Exodus II element block as a
+//  vtkMappedUnstructuredGrid's implementation.
+//
+// .SECTION Description
+// This class allows raw data arrays returned by the Exodus II library to be
+// used directly in VTK without repacking the data into the vtkUnstructuredGrid
+// memory layout. Use the vtkCPExodusIIInSituReader to read an Exodus II file's
+// data into this structure.
+
+#ifndef __vtkCPExodusIIElementBlock_h
+#define __vtkCPExodusIIElementBlock_h
+
+#include "vtkObject.h"
+#include "vtkIOExodusModule.h" // For export macro
+
+#include "vtkMappedUnstructuredGrid.h" // For mapped unstructured grid wrapper
+
+#include <string> // For std::string
+
+class vtkGenericCell;
+
+class VTKIOEXODUS_EXPORT vtkCPExodusIIElementBlockImpl : public vtkObject
+{
+public:
+  static vtkCPExodusIIElementBlockImpl *New();
+  virtual void PrintSelf(ostream &os, vtkIndent indent);
+  vtkTypeMacro(vtkCPExodusIIElementBlockImpl, vtkObject)
+
+  // Description:
+  // Set the Exodus element block data. 'elements' is the array returned from
+  // ex_get_elem_conn. 'type', 'numElements', and 'nodesPerElement' are obtained
+  // from ex_get_elem_block. Returns true or false depending on whether or not
+  // the element type can be translated into a VTK cell type. This object takes
+  // ownership of the elements array unless this function returns false.
+  bool SetExodusConnectivityArray(int *elements, const std::string &type,
+                                  int numElements, int nodesPerElement);
+
+  // API for vtkMappedUnstructuredGrid's implementation.
+  vtkIdType GetNumberOfCells();
+  int GetCellType(vtkIdType cellId);
+  void GetCellPoints(vtkIdType cellId, vtkIdList *ptIds);
+  void GetPointCells(vtkIdType ptId, vtkIdList *cellIds);
+  int GetMaxCellSize();
+  void GetIdsOfCellsOfType(int type, vtkIdTypeArray *array);
+  int IsHomogeneous();
+
+  // This container is read only -- these methods do nothing but print a
+  // warning.
+  void Allocate(vtkIdType numCells, int extSize = 1000);
+  vtkIdType InsertNextCell(int type, vtkIdList *ptIds);
+  vtkIdType InsertNextCell(int type, vtkIdType npts, vtkIdType *ptIds);
+  vtkIdType InsertNextCell(int type, vtkIdType npts, vtkIdType *ptIds,
+                           vtkIdType nfaces, vtkIdType *faces);
+  void ReplaceCell(vtkIdType cellId, int npts, vtkIdType *pts);
+
+protected:
+  vtkCPExodusIIElementBlockImpl();
+  ~vtkCPExodusIIElementBlockImpl();
+
+private:
+  vtkCPExodusIIElementBlockImpl(const vtkCPExodusIIElementBlockImpl &); // Not implemented.
+  void operator=(const vtkCPExodusIIElementBlockImpl &);   // Not implemented.
+
+  // Convert between Exodus node ids and VTK point ids.
+  static vtkIdType NodeToPoint(const int &id)
+  {
+    return static_cast<vtkIdType>(id - 1);
+  }
+  static int PointToNode(const vtkIdType &id)
+  {
+    return static_cast<int>(id + 1);
+  }
+
+  // Convenience methods to get pointers into the element array.
+  int* GetElementStart(vtkIdType cellId) const
+  {
+    return this->Elements + (cellId * this->CellSize);
+  }
+  int* GetElementEnd(vtkIdType cellId) const
+  {
+    return this->Elements + (cellId * this->CellSize) + this->CellSize;
+  }
+  int* GetStart() const { return this->Elements; }
+  int* GetEnd() const
+  {
+    return this->Elements + (this->NumberOfCells * this->CellSize);
+  }
+
+  int *Elements;
+  int CellType;
+  int CellSize;
+  vtkIdType NumberOfCells;
+};
+
+vtkMakeExportedMappedUnstructuredGrid(vtkCPExodusIIElementBlock,
+                                      vtkCPExodusIIElementBlockImpl,
+                                      VTKIOEXODUS_EXPORT)
+
+#endif //__vtkCPExodusIIElementBlock_h
diff --git a/IO/Exodus/vtkCPExodusIIElementBlockCellIterator.cxx b/IO/Exodus/vtkCPExodusIIElementBlockCellIterator.cxx
new file mode 100644
index 0000000..e6a6582
--- /dev/null
+++ b/IO/Exodus/vtkCPExodusIIElementBlockCellIterator.cxx
@@ -0,0 +1,113 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkCPExodusIIElementBlockCellIterator.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkCPExodusIIElementBlockCellIterator.h"
+
+#include "vtkCPExodusIIElementBlock.h"
+#include "vtkCPExodusIIElementBlockPrivate.h"
+#include "vtkObjectFactory.h"
+#include "vtkPoints.h"
+
+#include <algorithm>
+
+vtkStandardNewMacro(vtkCPExodusIIElementBlockCellIterator)
+
+//------------------------------------------------------------------------------
+void vtkCPExodusIIElementBlockCellIterator::PrintSelf(ostream &os,
+                                                      vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+  os << indent << "Storage: " << this->Storage.GetPointer() << endl;
+  os << indent << "DataSetPoints: "
+     << this->DataSetPoints.GetPointer() << endl;
+  os << indent << "CellId: "  << this->CellId << endl;
+}
+
+//------------------------------------------------------------------------------
+bool vtkCPExodusIIElementBlockCellIterator::IsValid()
+{
+  return this->Storage.GetPointer()
+      && this->CellId < this->Storage->NumberOfCells;
+}
+
+//------------------------------------------------------------------------------
+vtkIdType vtkCPExodusIIElementBlockCellIterator::GetCellId()
+{
+  return this->CellId;
+}
+
+//------------------------------------------------------------------------------
+vtkCPExodusIIElementBlockCellIterator::vtkCPExodusIIElementBlockCellIterator()
+    : Storage(NULL),
+      DataSetPoints(NULL),
+      CellId(0)
+{
+}
+
+//------------------------------------------------------------------------------
+vtkCPExodusIIElementBlockCellIterator::~vtkCPExodusIIElementBlockCellIterator()
+{
+}
+
+//------------------------------------------------------------------------------
+void vtkCPExodusIIElementBlockCellIterator::ResetToFirstCell()
+{
+  this->CellId = 0;
+}
+
+//------------------------------------------------------------------------------
+void vtkCPExodusIIElementBlockCellIterator::IncrementToNextCell()
+{
+  ++this->CellId;
+}
+
+//------------------------------------------------------------------------------
+void vtkCPExodusIIElementBlockCellIterator::FetchCellType()
+{
+  this->CellType = this->Storage->CellType;
+}
+
+//------------------------------------------------------------------------------
+void vtkCPExodusIIElementBlockCellIterator::FetchPointIds()
+{
+  this->PointIds->SetNumberOfIds(this->Storage->CellSize);
+
+  std::transform(this->Storage->GetElementStart(this->CellId),
+                 this->Storage->GetElementEnd(this->CellId),
+                 this->PointIds->GetPointer(0), StorageType::NodeToPoint);
+}
+
+//------------------------------------------------------------------------------
+void vtkCPExodusIIElementBlockCellIterator::FetchPoints()
+{
+  this->DataSetPoints->GetPoints(this->GetPointIds(), this->Points);
+}
+
+//------------------------------------------------------------------------------
+void vtkCPExodusIIElementBlockCellIterator::SetStorage(
+    vtkCPExodusIIElementBlock *eb)
+{
+  if (eb != NULL)
+    {
+    this->Storage = eb->GetInternals();
+    this->DataSetPoints= eb->GetPoints();
+    }
+  else
+    {
+    this->Storage = NULL;
+    this->DataSetPoints = NULL;
+    }
+  this->CellId = 0;
+}
diff --git a/IO/Exodus/vtkCPExodusIIElementBlockCellIterator.h b/IO/Exodus/vtkCPExodusIIElementBlockCellIterator.h
new file mode 100644
index 0000000..59ae0a3
--- /dev/null
+++ b/IO/Exodus/vtkCPExodusIIElementBlockCellIterator.h
@@ -0,0 +1,64 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkCPExodusIIElementBlockCellIterator.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkCPExodusIIElementBlockCellIterator - vtkCellIterator subclass
+// specialized for vtkCPExodusIIElementBlock.
+
+#ifndef __vtkCPExodusIIElementBlockCellIterator_h
+#define __vtkCPExodusIIElementBlockCellIterator_h
+
+#include "vtkCellIterator.h"
+#include "vtkIOExodusModule.h" // For export macro
+
+#include "vtkSmartPointer.h" // For smart pointer
+
+class vtkCPExodusIIElementBlock;
+class vtkCPExodusIIElementBlockPrivate;
+
+class VTKIOEXODUS_EXPORT vtkCPExodusIIElementBlockCellIterator
+    : public vtkCellIterator
+{
+public:
+  typedef vtkCPExodusIIElementBlockPrivate StorageType;
+
+  static vtkCPExodusIIElementBlockCellIterator *New();
+  vtkTypeMacro(vtkCPExodusIIElementBlockCellIterator, vtkCellIterator)
+  void PrintSelf(ostream &os, vtkIndent indent);
+
+  bool IsValid();
+  vtkIdType GetCellId();
+
+protected:
+  vtkCPExodusIIElementBlockCellIterator();
+  ~vtkCPExodusIIElementBlockCellIterator();
+
+  void ResetToFirstCell();
+  void IncrementToNextCell();
+  void FetchCellType();
+  void FetchPointIds();
+  void FetchPoints();
+
+  friend class ::vtkCPExodusIIElementBlock;
+  void SetStorage(vtkCPExodusIIElementBlock *eb);
+
+private:
+  vtkCPExodusIIElementBlockCellIterator(const vtkCPExodusIIElementBlockCellIterator &); // Not implemented.
+  void operator=(const vtkCPExodusIIElementBlockCellIterator &);   // Not implemented.
+
+  vtkSmartPointer<StorageType> Storage;
+  vtkSmartPointer<vtkPoints> DataSetPoints;
+  vtkIdType CellId;
+};
+
+#endif //__vtkCPExodusIIElementBlockCellIterator_h
diff --git a/IO/Exodus/vtkCPExodusIIInSituReader.cxx b/IO/Exodus/vtkCPExodusIIInSituReader.cxx
new file mode 100644
index 0000000..6a2fcb0
--- /dev/null
+++ b/IO/Exodus/vtkCPExodusIIInSituReader.cxx
@@ -0,0 +1,442 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkCPExodusInSituReader.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkCPExodusIIInSituReader.h"
+
+#include "vtkCellData.h"
+#include "vtkCPExodusIIElementBlock.h"
+#include "vtkCPExodusIINodalCoordinatesTemplate.h"
+#include "vtkCPExodusIIResultsArrayTemplate.h"
+#include "vtkDemandDrivenPipeline.h"
+#include "vtkDoubleArray.h"
+#include "vtkInformation.h"
+#include "vtkInformationVector.h"
+#include "vtkObjectFactory.h"
+#include "vtkPointData.h"
+#include "vtkPoints.h"
+#include "vtkMultiBlockDataSet.h"
+
+#include "vtk_exodusII.h"
+
+vtkStandardNewMacro(vtkCPExodusIIInSituReader)
+
+//------------------------------------------------------------------------------
+void vtkCPExodusIIInSituReader::PrintSelf(ostream &os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+  os << indent << "FileName: "
+     << (this->FileName ? this->FileName : "(none)") << endl;
+}
+
+//------------------------------------------------------------------------------
+vtkCPExodusIIInSituReader::vtkCPExodusIIInSituReader()
+  : FileName(NULL),
+    FileId(-1),
+    NumberOfDimensions(0),
+    NumberOfNodes(0),
+    NumberOfElementBlocks(0),
+    CurrentTimeStep(0)
+{
+  this->TimeStepRange[0] = 0;
+  this->TimeStepRange[1] = 0;
+  this->SetNumberOfInputPorts(0);
+}
+
+//------------------------------------------------------------------------------
+vtkCPExodusIIInSituReader::~vtkCPExodusIIInSituReader()
+{
+  this->SetFileName(NULL);
+}
+
+//------------------------------------------------------------------------------
+int vtkCPExodusIIInSituReader::ProcessRequest(
+    vtkInformation *request, vtkInformationVector **inputVector,
+    vtkInformationVector *outputVector)
+{
+  if(request->Has(vtkDemandDrivenPipeline::REQUEST_DATA()))
+    {
+    return this->RequestData(request, inputVector, outputVector);
+    }
+
+  if(request->Has(vtkDemandDrivenPipeline::REQUEST_INFORMATION()))
+    {
+    return this->RequestInformation(request, inputVector, outputVector);
+    }
+
+  return this->Superclass::ProcessRequest(request, inputVector, outputVector);
+}
+
+//------------------------------------------------------------------------------
+int vtkCPExodusIIInSituReader::RequestData(vtkInformation *,
+                                         vtkInformationVector **,
+                                         vtkInformationVector *outputVector)
+{
+  // Get output object:
+  vtkInformation* outInfo(outputVector->GetInformationObject(0));
+  vtkMultiBlockDataSet *output(vtkMultiBlockDataSet::SafeDownCast(
+                                 outInfo->Get( vtkDataObject::DATA_OBJECT())));
+
+  // Prepare high-level structure:
+  //
+  // output                             vtkMultiBlockDataSet
+  //   - Block 0: this->ElementBlocks   vtkMultiBlockDataSet
+  //     - Block N: Element blocks      vtkCPExodusIIElementBlock
+  output->SetNumberOfBlocks(1);
+  output->SetBlock(0, this->ElementBlocks.GetPointer());
+
+  bool success = false;
+
+  if (!this->ExOpen())
+    {
+    return 0;
+    }
+
+  for (;;) // Used to skip reading rest of file and close handle if error occurs
+    {
+    if (!this->ExGetMetaData())
+      {
+      break;
+      }
+
+    if (!this->ExGetCoords())
+      {
+      break;
+      }
+
+    if (!this->ExGetNodalVars())
+      {
+      break;
+      }
+
+    if (!this->ExGetElemBlocks())
+      {
+      break;
+      }
+
+    success = true;
+    break;
+    }
+
+  this->ExClose();
+
+  if (!success)
+    {
+    output->Initialize();
+    }
+
+  return success ? 1 : 0;
+}
+
+//------------------------------------------------------------------------------
+int vtkCPExodusIIInSituReader::RequestInformation(
+    vtkInformation *, vtkInformationVector **, vtkInformationVector *)
+{
+  if (!this->ExOpen())
+    {
+    return 0;
+    }
+
+  bool success(this->ExGetMetaData());
+
+  this->ExClose();
+
+  return success ? 1 : 0;
+}
+
+//------------------------------------------------------------------------------
+bool vtkCPExodusIIInSituReader::ExOpen()
+{
+  int doubleSize = sizeof(double);
+  int fileRealSize = 0;
+  float exodusVersion;
+
+  this->FileId = ex_open(this->FileName, EX_READ, &doubleSize, &fileRealSize,
+                         &exodusVersion);
+
+  if (this->FileId < 0)
+    {
+    vtkErrorMacro("Cannot open file: " << this->FileName);
+    return false;
+    }
+  return true;
+}
+
+//------------------------------------------------------------------------------
+bool vtkCPExodusIIInSituReader::ExGetMetaData()
+{
+  // Generic metadata:
+  int numElem, numNodeSets, numSideSets;
+  std::string title(MAX_LINE_LENGTH + 1, '\0');
+
+  int error = ex_get_init(this->FileId,
+                          &title[0],
+                          &this->NumberOfDimensions,
+                          &this->NumberOfNodes,
+                          &numElem,
+                          &NumberOfElementBlocks,
+                          &numNodeSets, &numSideSets);
+
+  // Trim excess null characters from string:
+  title.resize(strlen(title.c_str()));
+
+  if (error < 0)
+    {
+    vtkErrorMacro("Error retrieving file metadata.");
+    return false;
+    }
+
+  // Number of nodal variables
+  int numNodalVars;
+
+  error = ex_get_var_param(this->FileId, "n", &numNodalVars);
+
+  if (error < 0)
+    {
+    vtkErrorMacro("Error retrieving number of nodal variables.");
+    return false;
+    }
+
+  // Names of nodal variables
+  this->NodalVariableNames = std::vector<std::string>(
+        numNodalVars, std::string(MAX_STR_LENGTH+1, '\0'));
+
+  for (int i = 0; i < numNodalVars; ++i)
+    {
+    error = ex_get_var_name(this->FileId, "n", i + 1,
+                            &(this->NodalVariableNames[i][0]));
+    if (error < 0)
+      {
+      vtkErrorMacro("Error retrieving nodal variable name at index" << i);
+      return false;
+      }
+    // Trim excess null chars from the strings:
+    this->NodalVariableNames[i].resize(
+          strlen(this->NodalVariableNames[i].c_str()));
+    }
+
+  // Number of element variables
+  int numElemVars;
+
+  error = ex_get_var_param(this->FileId, "e", &numElemVars);
+
+  if (error < 0)
+    {
+    vtkErrorMacro("Error retrieving number of element variables.");
+    return false;
+    }
+
+  // Names of element variables
+  this->ElementVariableNames = std::vector<std::string>(
+        numElemVars, std::string(MAX_STR_LENGTH+1, '\0'));
+
+  for (int i = 0; i < numElemVars; ++i)
+    {
+    error = ex_get_var_name(this->FileId, "e", i + 1,
+                            &(this->ElementVariableNames[i][0]));
+    if (error < 0)
+      {
+      vtkErrorMacro("Error retrieving element variable name at index" << i);
+      return false;
+      }
+    // Trim excess null chars from the strings:
+    this->ElementVariableNames[i].resize(
+          strlen(this->ElementVariableNames[i].c_str()));
+    }
+
+  // Element block ids:
+  this->ElementBlockIds.resize(this->NumberOfElementBlocks);
+
+  error = ex_get_elem_blk_ids(this->FileId, &(this->ElementBlockIds[0]));
+
+  if (error < 0)
+    {
+    vtkErrorMacro("Failed to get the element block ids.");
+    return false;
+    }
+
+  // Timesteps
+  int numTimeSteps;
+
+  error = ex_inquire(this->FileId, EX_INQ_TIME,
+                     &numTimeSteps, NULL, NULL);
+  if (error < 0)
+    {
+    vtkErrorMacro("Error retrieving the number of timesteps.");
+    return false;
+    }
+
+  this->TimeStepRange[0] = 0;
+  this->TimeStepRange[1] = numTimeSteps - 1;
+  this->TimeSteps.resize(numTimeSteps);
+
+  if (numTimeSteps > 0)
+    {
+    error = ex_get_all_times(this->FileId, &(this->TimeSteps[0]));
+
+    if (error < 0)
+      {
+      vtkErrorMacro("Error retrieving timestep array.");
+      return false;
+      }
+    }
+  return true;
+}
+
+//------------------------------------------------------------------------------
+bool vtkCPExodusIIInSituReader::ExGetCoords()
+{
+  this->Points->Reset();
+  vtkNew<vtkCPExodusIINodalCoordinatesTemplate<double> > nodeCoords;
+
+  // Get coordinates
+  double *x(new double[this->NumberOfNodes]);
+  double *y(new double[this->NumberOfNodes]);
+  double *z(this->NumberOfDimensions >= 3
+            ? new double[this->NumberOfNodes]
+            : NULL);
+
+  int error = ex_get_coord(this->FileId, x, y, z);
+
+  if (error < 0)
+    {
+    delete [] x;
+    delete [] y;
+    delete [] z;
+    vtkErrorMacro("Error retrieving coordinates.");
+    return false;
+    }
+
+  // NodalCoordinates takes ownership of the arrays.
+  nodeCoords->SetExodusScalarArrays(x, y, z, this->NumberOfNodes);
+  this->Points->SetData(nodeCoords.GetPointer());
+  return true;
+}
+
+//------------------------------------------------------------------------------
+bool vtkCPExodusIIInSituReader::ExGetNodalVars()
+{
+  this->PointData->Reset();
+  const int numNodalVars = static_cast<int>(this->NodalVariableNames.size());
+  for (int nodalVarIndex = 0; nodalVarIndex < numNodalVars; ++nodalVarIndex)
+    {
+    double *nodalVars = new double[this->NumberOfNodes];
+    int error = ex_get_nodal_var(this->FileId, this->CurrentTimeStep + 1,
+                                 nodalVarIndex + 1, this->NumberOfNodes,
+                                 nodalVars);
+    std::vector<double*> varsVector(1, nodalVars);
+    vtkNew<vtkCPExodusIIResultsArrayTemplate<double> > nodalVarArray;
+    nodalVarArray->SetExodusScalarArrays(varsVector, this->NumberOfNodes);
+    nodalVarArray->SetName(this->NodalVariableNames[nodalVarIndex].c_str());
+
+    if (error < 0)
+      {
+      vtkErrorMacro("Failed to read nodal variable array '"
+                    << this->NodalVariableNames[nodalVarIndex] << "'");
+      return false;
+      }
+
+    this->PointData->AddArray(nodalVarArray.GetPointer());
+    }
+  return true;
+}
+
+//------------------------------------------------------------------------------
+bool vtkCPExodusIIInSituReader::ExGetElemBlocks()
+{
+  const int numElemBlk = static_cast<int>(this->ElementBlockIds.size());
+  const int numElemVars = static_cast<int>(this->ElementVariableNames.size());
+  this->ElementBlocks->Initialize();
+  this->ElementBlocks->SetNumberOfBlocks(numElemBlk);
+  for (int blockInd = 0; blockInd < numElemBlk; ++blockInd)
+    {
+    std::string elemType(MAX_STR_LENGTH + 1, '\0');
+    int numElem;
+    int nodesPerElem;
+    int numAttributes;
+
+    int error = ex_get_elem_block(this->FileId,
+                                  this->ElementBlockIds[blockInd],
+                                  &(elemType[0]), &numElem, &nodesPerElem,
+                                  &numAttributes);
+
+    // Trim excess null chars from the type string:
+    elemType.resize(strlen(elemType.c_str()));
+
+    if (error < 0)
+      {
+      vtkErrorMacro("Failed to get the element block metadata for block "
+                    << blockInd);
+      return false;
+      }
+
+    // Get element block connectivity
+    vtkNew<vtkCPExodusIIElementBlock> block;
+    int *connect = new int[numElem * nodesPerElem];
+    error = ex_get_elem_conn(this->FileId, this->ElementBlockIds[blockInd],
+                             connect);
+    if (!block->GetImplementation()->SetExodusConnectivityArray(
+          connect, elemType, numElem, nodesPerElem))
+      {
+      delete [] connect;
+      return false;
+      }
+
+    if (error < 0)
+      {
+      vtkErrorMacro("Failed to get the connectivity for block " << blockInd);
+      return false;
+      }
+
+    // Use the mapped point container for the block points
+    block->SetPoints(this->Points.GetPointer());
+
+    // Add the point data arrays
+    block->GetPointData()->ShallowCopy(this->PointData.GetPointer());
+
+    // Read the element variables (cell data)
+    for (int elemVarIndex = 0; elemVarIndex < numElemVars; ++elemVarIndex)
+      {
+      double *elemVars = new double[numElem];
+      error = ex_get_elem_var(this->FileId, this->CurrentTimeStep + 1,
+                              elemVarIndex + 1, this->ElementBlockIds[blockInd],
+                              numElem, elemVars);
+      std::vector<double*> varsVector(1, elemVars);
+      vtkNew<vtkCPExodusIIResultsArrayTemplate<double> > elemVarArray;
+      elemVarArray->SetExodusScalarArrays(varsVector, numElem);
+      elemVarArray->SetName(this->ElementVariableNames[elemVarIndex].c_str());
+
+      if (error < 0)
+        {
+        vtkErrorMacro("Failed to read element block variable array '"
+                      << this->ElementVariableNames[elemVarIndex] << "'");
+        return false;
+        }
+
+      block->GetCellData()->AddArray(elemVarArray.GetPointer());
+      }
+
+    // Add this element block to the multi-block data set
+    this->ElementBlocks->SetBlock(blockInd, block.GetPointer());
+    }
+
+  return true;
+}
+
+//------------------------------------------------------------------------------
+void vtkCPExodusIIInSituReader::ExClose()
+{
+  ex_close(this->FileId);
+  this->FileId = -1;
+}
diff --git a/IO/Exodus/vtkCPExodusIIInSituReader.h b/IO/Exodus/vtkCPExodusIIInSituReader.h
new file mode 100644
index 0000000..2c44aa1
--- /dev/null
+++ b/IO/Exodus/vtkCPExodusIIInSituReader.h
@@ -0,0 +1,112 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkCPExodusIIInSituReader.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+// .NAME vtkCPExodusIIInSituReader - Read an Exodus II file into data structures
+// that map the raw arrays returned by the Exodus II library into a multi-block
+// data set containing vtkUnstructuredGridBase subclasses.
+//
+// .SECTION Description
+// This class can be used to import Exodus II files into VTK without repacking
+// the data into the standard VTK memory layout, avoiding the cost of a deep
+// copy.
+
+#ifndef __vtkCPExodusIIInSituReader_h
+#define __vtkCPExodusIIInSituReader_h
+
+#include "vtkIOExodusModule.h" // For export macro
+#include "vtkMultiBlockDataSetAlgorithm.h"
+#include "vtkNew.h" // For vtkNew
+#include <string> // For std::string
+#include <vector> // For std::vector
+
+class vtkDataArrayCollection;
+class vtkPointData;
+class vtkPoints;
+
+class VTKIOEXODUS_EXPORT vtkCPExodusIIInSituReader :
+    public vtkMultiBlockDataSetAlgorithm
+{
+public:
+  static vtkCPExodusIIInSituReader *New();
+  vtkTypeMacro(vtkCPExodusIIInSituReader, vtkMultiBlockDataSetAlgorithm)
+  virtual void PrintSelf(ostream &os, vtkIndent indent);
+
+  // Description:
+  // Get/Set the name of the Exodus file to read.
+  vtkSetStringMacro(FileName)
+  vtkGetStringMacro(FileName)
+
+  // Description:
+  // Get/Set the current timestep to read as a zero-based index.
+  vtkGetMacro(CurrentTimeStep, int)
+  vtkSetMacro(CurrentTimeStep, int)
+
+  // Description:
+  // Get the range of timesteps, represented as [0, numTimeSteps - 1]. Call
+  // UpdateInformation first to set this without reading any timestep data.
+  vtkGetVector2Macro(TimeStepRange, int)
+
+  // Description:
+  // Get the floating point tag associated with the timestep at 'step'.
+  double GetTimeStepValue(int step)
+  {
+    return TimeSteps.at(step);
+  }
+
+protected:
+  vtkCPExodusIIInSituReader();
+  ~vtkCPExodusIIInSituReader();
+
+  int ProcessRequest(vtkInformation *request,
+                     vtkInformationVector **inputVector,
+                     vtkInformationVector *outputVector);
+  int RequestData(vtkInformation *, vtkInformationVector **,
+                  vtkInformationVector *);
+  int RequestInformation(vtkInformation *, vtkInformationVector **,
+                         vtkInformationVector *);
+
+private:
+  vtkCPExodusIIInSituReader(const vtkCPExodusIIInSituReader &); // Not implemented.
+  void operator=(const vtkCPExodusIIInSituReader &);   // Not implemented.
+
+  bool ExOpen();
+  char *FileName;
+  int FileId;
+
+  bool ExGetMetaData();
+  int NumberOfDimensions;
+  int NumberOfNodes;
+  int NumberOfElementBlocks;
+  std::vector<std::string> NodalVariableNames;
+  std::vector<std::string> ElementVariableNames;
+  std::vector<int> ElementBlockIds;
+  std::vector<double> TimeSteps;
+  int TimeStepRange[2];
+
+  bool ExGetCoords();
+  vtkNew<vtkPoints> Points;
+
+  bool ExGetNodalVars();
+  vtkNew<vtkPointData> PointData;
+
+  bool ExGetElemBlocks();
+  vtkNew<vtkMultiBlockDataSet> ElementBlocks;
+
+  void ExClose();
+
+  int CurrentTimeStep;
+};
+
+#endif //__vtkCPExodusIIInSituReader_h
diff --git a/IO/Exodus/vtkCPExodusIINodalCoordinatesTemplate.h b/IO/Exodus/vtkCPExodusIINodalCoordinatesTemplate.h
new file mode 100644
index 0000000..e510463
--- /dev/null
+++ b/IO/Exodus/vtkCPExodusIINodalCoordinatesTemplate.h
@@ -0,0 +1,124 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkCPExodusIINodalCoordinatesTemplate.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+// .NAME vtkCPExodusIINodalCoordinatesTemplate - Map native Exodus II coordinate
+// arrays into the vtkDataArray interface.
+//
+// .SECTION Description
+// Map native Exodus II coordinate arrays into the vtkDataArray interface. Use
+// the vtkCPExodusIIInSituReader to read an Exodus II file's data into this
+// structure.
+
+#ifndef __vtkCPExodusIINodalCoordinatesTemplate_h
+#define __vtkCPExodusIINodalCoordinatesTemplate_h
+
+#include "vtkMappedDataArray.h"
+#include "vtkIOExodusModule.h" // For export macro
+
+#include "vtkTypeTemplate.h" // For templated vtkObject API
+#include "vtkObjectFactory.h" // for vtkStandardNewMacro
+
+template <class Scalar>
+class vtkCPExodusIINodalCoordinatesTemplate:
+    public vtkTypeTemplate<vtkCPExodusIINodalCoordinatesTemplate<Scalar>,
+                           vtkMappedDataArray<Scalar> >
+{
+public:
+  vtkMappedDataArrayNewInstanceMacro(
+      vtkCPExodusIINodalCoordinatesTemplate<Scalar>)
+  static vtkCPExodusIINodalCoordinatesTemplate *New();
+  virtual void PrintSelf(ostream &os, vtkIndent indent);
+
+  // Description:
+  // Set the raw scalar arrays for the coordinate set. This class takes
+  // ownership of the arrays and deletes them with delete[].
+  void SetExodusScalarArrays(Scalar *x, Scalar *y, Scalar *z,
+                             vtkIdType numPoints);
+
+  // Reimplemented virtuals -- see superclasses for descriptions:
+  void Initialize();
+  void GetTuples(vtkIdList *ptIds, vtkAbstractArray *output);
+  void GetTuples(vtkIdType p1, vtkIdType p2, vtkAbstractArray *output);
+  void Squeeze();
+  vtkArrayIterator *NewIterator();
+  vtkIdType LookupValue(vtkVariant value);
+  void LookupValue(vtkVariant value, vtkIdList *ids);
+  vtkVariant GetVariantValue(vtkIdType idx);
+  void ClearLookup();
+  double* GetTuple(vtkIdType i);
+  void GetTuple(vtkIdType i, double *tuple);
+  vtkIdType LookupTypedValue(Scalar value);
+  void LookupTypedValue(Scalar value, vtkIdList *ids);
+  Scalar GetValue(vtkIdType idx);
+  Scalar& GetValueReference(vtkIdType idx);
+  void GetTupleValue(vtkIdType idx, Scalar *t);
+
+  // Description:
+  // This container is read only -- this method does nothing but print a
+  // warning.
+  int Allocate(vtkIdType sz, vtkIdType ext);
+  int Resize(vtkIdType numTuples);
+  void SetNumberOfTuples(vtkIdType number);
+  void SetTuple(vtkIdType i, vtkIdType j, vtkAbstractArray *source);
+  void SetTuple(vtkIdType i, const float *source);
+  void SetTuple(vtkIdType i, const double *source);
+  void InsertTuple(vtkIdType i, vtkIdType j, vtkAbstractArray *source);
+  void InsertTuple(vtkIdType i, const float *source);
+  void InsertTuple(vtkIdType i, const double *source);
+  void InsertTuples(vtkIdList *dstIds, vtkIdList *srcIds,
+                    vtkAbstractArray *source);
+  vtkIdType InsertNextTuple(vtkIdType j, vtkAbstractArray *source);
+  vtkIdType InsertNextTuple(const float *source);
+  vtkIdType InsertNextTuple(const double *source);
+  void DeepCopy(vtkAbstractArray *aa);
+  void DeepCopy(vtkDataArray *da);
+  void InterpolateTuple(vtkIdType i, vtkIdList *ptIndices,
+                        vtkAbstractArray* source,  double* weights);
+  void InterpolateTuple(vtkIdType i, vtkIdType id1, vtkAbstractArray *source1,
+                        vtkIdType id2, vtkAbstractArray *source2, double t);
+  void SetVariantValue(vtkIdType idx, vtkVariant value);
+  void RemoveTuple(vtkIdType id);
+  void RemoveFirstTuple();
+  void RemoveLastTuple();
+  void SetTupleValue(vtkIdType i, const Scalar *t);
+  void InsertTupleValue(vtkIdType i, const Scalar *t);
+  vtkIdType InsertNextTupleValue(const Scalar *t);
+  void SetValue(vtkIdType idx, Scalar value);
+  vtkIdType InsertNextValue(Scalar v);
+  void InsertValue(vtkIdType idx, Scalar v);
+
+protected:
+  vtkCPExodusIINodalCoordinatesTemplate();
+  ~vtkCPExodusIINodalCoordinatesTemplate();
+
+  Scalar *XArray;
+  Scalar *YArray;
+  Scalar *ZArray;
+
+private:
+  vtkCPExodusIINodalCoordinatesTemplate(
+      const vtkCPExodusIINodalCoordinatesTemplate &); // Not implemented.
+  void operator=(
+      const vtkCPExodusIINodalCoordinatesTemplate &); // Not implemented.
+
+  vtkIdType Lookup(const Scalar &val, vtkIdType startIndex);
+  double *TempDoubleArray;
+};
+
+#include "vtkCPExodusIINodalCoordinatesTemplate.txx"
+
+#endif //__vtkCPExodusIINodalCoordinatesTemplate_h
+
+// VTK-HeaderTest-Exclude: vtkCPExodusIINodalCoordinatesTemplate.h
diff --git a/IO/Exodus/vtkCPExodusIINodalCoordinatesTemplate.txx b/IO/Exodus/vtkCPExodusIINodalCoordinatesTemplate.txx
new file mode 100644
index 0000000..8aa9259
--- /dev/null
+++ b/IO/Exodus/vtkCPExodusIINodalCoordinatesTemplate.txx
@@ -0,0 +1,509 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkCPExodusIINodalCoordinatesTemplate.txx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkCPExodusIINodalCoordinatesTemplate.h"
+
+#include "vtkIdList.h"
+#include "vtkObjectFactory.h"
+#include "vtkVariant.h"
+#include "vtkVariantCast.h"
+
+//------------------------------------------------------------------------------
+// Can't use vtkStandardNewMacro with a template.
+template <class Scalar> vtkCPExodusIINodalCoordinatesTemplate<Scalar> *
+vtkCPExodusIINodalCoordinatesTemplate<Scalar>::New()
+{
+  VTK_STANDARD_NEW_BODY(vtkCPExodusIINodalCoordinatesTemplate<Scalar>)
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::PrintSelf(ostream &os, vtkIndent indent)
+{
+  this->vtkCPExodusIINodalCoordinatesTemplate<Scalar>::Superclass::PrintSelf(
+        os, indent);
+  os << indent << "XArray: " << this->XArray << std::endl;
+  os << indent << "YArray: " << this->YArray << std::endl;
+  os << indent << "ZArray: " << this->ZArray << std::endl;
+  os << indent << "TempDoubleArray: " << this->TempDoubleArray << std::endl;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::Initialize()
+{
+  delete [] this->XArray;
+  this->XArray = NULL;
+  delete [] this->YArray;
+  this->YArray = NULL;
+  delete [] this->ZArray;
+  this->ZArray = NULL;
+  delete [] this->TempDoubleArray;
+  this->TempDoubleArray = NULL;
+  this->MaxId = -1;
+  this->Size = 0;
+  this->NumberOfComponents = 1;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::GetTuples(vtkIdList *ptIds, vtkAbstractArray *output)
+{
+  vtkDataArray *outArray = vtkDataArray::FastDownCast(output);
+  if (!outArray)
+    {
+    vtkWarningMacro(<<"Input is not a vtkDataArray");
+    return;
+    }
+
+  vtkIdType numTuples = ptIds->GetNumberOfIds();
+
+  outArray->SetNumberOfComponents(this->NumberOfComponents);
+  outArray->SetNumberOfTuples(numTuples);
+
+  const vtkIdType numPoints = ptIds->GetNumberOfIds();
+  for (vtkIdType i = 0; i < numPoints; ++i)
+    {
+    outArray->SetTuple(i, this->GetTuple(ptIds->GetId(i)));
+    }
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::GetTuples(vtkIdType p1, vtkIdType p2, vtkAbstractArray *output)
+{
+  vtkDataArray *da = vtkDataArray::FastDownCast(output);
+  if (!da)
+    {
+    vtkErrorMacro(<<"Input is not a vtkDataArray");
+    return;
+    }
+
+  if (da->GetNumberOfComponents() != this->GetNumberOfComponents())
+    {
+    vtkErrorMacro(<<"Incorrect number of components in input array.");
+    return;
+    }
+
+  for (vtkIdType daTupleId = 0; p1 <= p2; ++p1)
+    {
+    da->SetTuple(daTupleId++, this->GetTuple(p1));
+    }
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::Squeeze()
+{
+  // noop
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkArrayIterator*
+vtkCPExodusIINodalCoordinatesTemplate<Scalar>::NewIterator()
+{
+  vtkErrorMacro(<<"Not implemented.");
+  return NULL;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkIdType vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::LookupValue(vtkVariant value)
+{
+  bool valid = true;
+  Scalar val = vtkVariantCast<Scalar>(value, &valid);
+  if (valid)
+    {
+    return this->Lookup(val, 0);
+    }
+  return -1;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::LookupValue(vtkVariant value, vtkIdList *ids)
+{
+  bool valid = true;
+  Scalar val = vtkVariantCast<Scalar>(value, &valid);
+  ids->Reset();
+  if (valid)
+    {
+    vtkIdType index = 0;
+    while ((index = this->Lookup(val, index)) >= 0)
+      {
+      ids->InsertNextId(index++);
+      }
+    }
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkVariant vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::GetVariantValue(vtkIdType idx)
+{
+  return vtkVariant(this->GetValueReference(idx));
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::ClearLookup()
+{
+  // no-op, no fast lookup implemented.
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> double* vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::GetTuple(vtkIdType i)
+{
+  this->GetTuple(i, this->TempDoubleArray);
+  return this->TempDoubleArray;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::GetTuple(vtkIdType i, double *tuple)
+{
+  tuple[0] = static_cast<double>(this->XArray[i]);
+  tuple[1] = static_cast<double>(this->YArray[i]);
+  if (this->ZArray != NULL)
+    {
+    tuple[2] = static_cast<double>(this->ZArray[i]);
+    }
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkIdType vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::LookupTypedValue(Scalar value)
+{
+  return this->Lookup(value, 0);
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::LookupTypedValue(Scalar value, vtkIdList *ids)
+{
+  ids->Reset();
+  vtkIdType index = 0;
+  while ((index = this->Lookup(value, index)) >= 0)
+    {
+    ids->InsertNextId(index++);
+    }
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> Scalar vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::GetValue(vtkIdType idx)
+{
+  return this->GetValueReference(idx);
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> Scalar& vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::GetValueReference(vtkIdType idx)
+{
+  const vtkIdType tuple = idx / this->NumberOfComponents;
+  const vtkIdType comp = idx % this->NumberOfComponents;
+  switch (comp)
+    {
+    case 0:
+      return this->XArray[tuple];
+    case 1:
+      return this->YArray[tuple];
+    case 2:
+      return this->ZArray[tuple];
+    default:
+      vtkErrorMacro(<< "Invalid number of components.");
+      static Scalar dummy(0);
+      return dummy;
+    }
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::GetTupleValue(vtkIdType tupleId, Scalar *tuple)
+{
+  tuple[0] = this->XArray[tupleId];
+  tuple[1] = this->YArray[tupleId];
+  if (this->ZArray != NULL)
+    {
+    tuple[2] = this->ZArray[tupleId];
+    }
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> int vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::Allocate(vtkIdType, vtkIdType)
+{
+  vtkErrorMacro("Read only container.")
+  return 0;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> int vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::Resize(vtkIdType)
+{
+  vtkErrorMacro("Read only container.")
+  return 0;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::SetNumberOfTuples(vtkIdType)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::SetTuple(vtkIdType, vtkIdType, vtkAbstractArray *)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::SetTuple(vtkIdType, const float *)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::SetTuple(vtkIdType, const double *)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::InsertTuple(vtkIdType, vtkIdType, vtkAbstractArray *)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::InsertTuple(vtkIdType, const float *)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::InsertTuple(vtkIdType, const double *)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::InsertTuples(vtkIdList *, vtkIdList *, vtkAbstractArray *)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkIdType vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::InsertNextTuple(vtkIdType, vtkAbstractArray *)
+{
+  vtkErrorMacro("Read only container.")
+  return -1;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkIdType vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::InsertNextTuple(const float *)
+{
+
+  vtkErrorMacro("Read only container.")
+  return -1;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkIdType vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::InsertNextTuple(const double *)
+{
+  vtkErrorMacro("Read only container.")
+  return -1;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::DeepCopy(vtkAbstractArray *)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::DeepCopy(vtkDataArray *)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::InterpolateTuple(vtkIdType, vtkIdList *, vtkAbstractArray *, double *)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::InterpolateTuple(vtkIdType, vtkIdType, vtkAbstractArray*, vtkIdType,
+                   vtkAbstractArray*, double)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::SetVariantValue(vtkIdType, vtkVariant)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::RemoveTuple(vtkIdType)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::RemoveFirstTuple()
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::RemoveLastTuple()
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::SetTupleValue(vtkIdType, const Scalar*)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::InsertTupleValue(vtkIdType, const Scalar*)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkIdType vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::InsertNextTupleValue(const Scalar *)
+{
+  vtkErrorMacro("Read only container.")
+  return -1;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::SetValue(vtkIdType, Scalar)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkIdType vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::InsertNextValue(Scalar)
+{
+  vtkErrorMacro("Read only container.")
+  return -1;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::InsertValue(vtkIdType, Scalar)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::vtkCPExodusIINodalCoordinatesTemplate()
+  : XArray(NULL),
+    YArray(NULL),
+    ZArray(NULL),
+    TempDoubleArray(NULL)
+{
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::~vtkCPExodusIINodalCoordinatesTemplate()
+{
+  delete [] this->XArray;
+  delete [] this->YArray;
+  delete [] this->ZArray;
+  delete [] this->TempDoubleArray;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::SetExodusScalarArrays(Scalar *x, Scalar *y, Scalar *z, vtkIdType numPoints)
+{
+  Initialize();
+  this->XArray = x;
+  this->YArray = y;
+  this->ZArray = z;
+  this->NumberOfComponents = (z != NULL) ? 3 : 2;
+  this->Size = this->NumberOfComponents * numPoints;
+  this->MaxId = this->Size - 1;
+  this->TempDoubleArray = new double [this->NumberOfComponents];
+  this->Modified();
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkIdType vtkCPExodusIINodalCoordinatesTemplate<Scalar>
+::Lookup(const Scalar &val, vtkIdType index)
+{
+  while (index <= this->MaxId)
+    {
+    if (this->GetValueReference(index++) == val)
+      {
+      return index;
+      }
+    }
+  return -1;
+}
diff --git a/IO/Exodus/vtkCPExodusIIResultsArrayTemplate.h b/IO/Exodus/vtkCPExodusIIResultsArrayTemplate.h
new file mode 100644
index 0000000..3df9c64
--- /dev/null
+++ b/IO/Exodus/vtkCPExodusIIResultsArrayTemplate.h
@@ -0,0 +1,114 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkCPExodusIIResultsArrayTemplate.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+// .NAME vtkCPExodusIIResultsArrayTemplate - Map native Exodus II results arrays
+// into the vtkDataArray interface.
+//
+// .SECTION Description
+// Map native Exodus II results arrays into the vtkDataArray interface. Use
+// the vtkCPExodusIIInSituReader to read an Exodus II file's data into this
+// structure.
+
+#ifndef __vtkCPExodusIIResultsArrayTemplate_h
+#define __vtkCPExodusIIResultsArrayTemplate_h
+
+#include "vtkMappedDataArray.h"
+
+#include "vtkTypeTemplate.h" // For templated vtkObject API
+#include "vtkObjectFactory.h" // for vtkStandardNewMacro
+
+template <class Scalar>
+class vtkCPExodusIIResultsArrayTemplate:
+    public vtkTypeTemplate<vtkCPExodusIIResultsArrayTemplate<Scalar>,
+                           vtkMappedDataArray<Scalar> >
+{
+public:
+  vtkMappedDataArrayNewInstanceMacro(vtkCPExodusIIResultsArrayTemplate<Scalar>)
+  static vtkCPExodusIIResultsArrayTemplate *New();
+  virtual void PrintSelf(ostream &os, vtkIndent indent);
+
+  void SetExodusScalarArrays(std::vector<Scalar*> arrays, vtkIdType numTuples);
+
+  // Reimplemented virtuals -- see superclasses for descriptions:
+  void Initialize();
+  void GetTuples(vtkIdList *ptIds, vtkAbstractArray *output);
+  void GetTuples(vtkIdType p1, vtkIdType p2, vtkAbstractArray *output);
+  void Squeeze();
+  vtkArrayIterator *NewIterator();
+  vtkIdType LookupValue(vtkVariant value);
+  void LookupValue(vtkVariant value, vtkIdList *ids);
+  vtkVariant GetVariantValue(vtkIdType idx);
+  void ClearLookup();
+  double* GetTuple(vtkIdType i);
+  void GetTuple(vtkIdType i, double *tuple);
+  vtkIdType LookupTypedValue(Scalar value);
+  void LookupTypedValue(Scalar value, vtkIdList *ids);
+  Scalar GetValue(vtkIdType idx);
+  Scalar& GetValueReference(vtkIdType idx);
+  void GetTupleValue(vtkIdType idx, Scalar *t);
+
+  // Description:
+  // This container is read only -- this method does nothing but print a
+  // warning.
+  int Allocate(vtkIdType sz, vtkIdType ext);
+  int Resize(vtkIdType numTuples);
+  void SetNumberOfTuples(vtkIdType number);
+  void SetTuple(vtkIdType i, vtkIdType j, vtkAbstractArray *source);
+  void SetTuple(vtkIdType i, const float *source);
+  void SetTuple(vtkIdType i, const double *source);
+  void InsertTuple(vtkIdType i, vtkIdType j, vtkAbstractArray *source);
+  void InsertTuple(vtkIdType i, const float *source);
+  void InsertTuple(vtkIdType i, const double *source);
+  void InsertTuples(vtkIdList *dstIds, vtkIdList *srcIds,
+                    vtkAbstractArray *source);
+  vtkIdType InsertNextTuple(vtkIdType j, vtkAbstractArray *source);
+  vtkIdType InsertNextTuple(const float *source);
+  vtkIdType InsertNextTuple(const double *source);
+  void DeepCopy(vtkAbstractArray *aa);
+  void DeepCopy(vtkDataArray *da);
+  void InterpolateTuple(vtkIdType i, vtkIdList *ptIndices,
+                        vtkAbstractArray* source,  double* weights);
+  void InterpolateTuple(vtkIdType i, vtkIdType id1, vtkAbstractArray *source1,
+                        vtkIdType id2, vtkAbstractArray *source2, double t);
+  void SetVariantValue(vtkIdType idx, vtkVariant value);
+  void RemoveTuple(vtkIdType id);
+  void RemoveFirstTuple();
+  void RemoveLastTuple();
+  void SetTupleValue(vtkIdType i, const Scalar *t);
+  void InsertTupleValue(vtkIdType i, const Scalar *t);
+  vtkIdType InsertNextTupleValue(const Scalar *t);
+  void SetValue(vtkIdType idx, Scalar value);
+  vtkIdType InsertNextValue(Scalar v);
+  void InsertValue(vtkIdType idx, Scalar v);
+
+protected:
+  vtkCPExodusIIResultsArrayTemplate();
+  ~vtkCPExodusIIResultsArrayTemplate();
+
+  std::vector<Scalar *> Arrays;
+
+private:
+  vtkCPExodusIIResultsArrayTemplate(const vtkCPExodusIIResultsArrayTemplate &); // Not implemented.
+  void operator=(const vtkCPExodusIIResultsArrayTemplate &); // Not implemented.
+
+  vtkIdType Lookup(const Scalar &val, vtkIdType startIndex);
+  double *TempDoubleArray;
+};
+
+#include "vtkCPExodusIIResultsArrayTemplate.txx"
+
+#endif //__vtkCPExodusIIResultsArrayTemplate_h
+
+// VTK-HeaderTest-Exclude: vtkCPExodusIIResultsArrayTemplate.h
diff --git a/IO/Exodus/vtkCPExodusIIResultsArrayTemplate.txx b/IO/Exodus/vtkCPExodusIIResultsArrayTemplate.txx
new file mode 100644
index 0000000..657d32d
--- /dev/null
+++ b/IO/Exodus/vtkCPExodusIIResultsArrayTemplate.txx
@@ -0,0 +1,501 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkCPExodusIIResultsArrayTemplate.txx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkCPExodusIIResultsArrayTemplate.h"
+
+#include "vtkIdList.h"
+#include "vtkObjectFactory.h"
+#include "vtkVariant.h"
+#include "vtkVariantCast.h"
+
+//------------------------------------------------------------------------------
+// Can't use vtkStandardNewMacro on a templated class.
+template <class Scalar> vtkCPExodusIIResultsArrayTemplate<Scalar> *
+vtkCPExodusIIResultsArrayTemplate<Scalar>::New()
+{
+  VTK_STANDARD_NEW_BODY(vtkCPExodusIIResultsArrayTemplate<Scalar>)
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::PrintSelf(ostream &os, vtkIndent indent)
+{
+  this->vtkCPExodusIIResultsArrayTemplate<Scalar>::Superclass::PrintSelf(
+        os, indent);
+
+  os << indent << "Number of arrays: " << this->Arrays.size() << "\n";
+  vtkIndent deeper = indent.GetNextIndent();
+  for (size_t i = 0; i < this->Arrays.size(); ++i)
+    {
+    os << deeper << "Array " << i << ": " << this->Arrays.at(i) << "\n";
+    }
+
+  os << indent << "TempDoubleArray: " << this->TempDoubleArray << "\n";
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::SetExodusScalarArrays(std::vector<Scalar *> arrays, vtkIdType numTuples)
+{
+  this->Initialize();
+  this->NumberOfComponents = static_cast<int>(arrays.size());
+  this->Arrays = arrays;
+  this->Size = this->NumberOfComponents * numTuples;
+  this->MaxId = this->Size - 1;
+  this->TempDoubleArray = new double [this->NumberOfComponents];
+  this->Modified();
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::Initialize()
+{
+  for (size_t i = 0; i < this->Arrays.size(); ++i)
+    {
+    delete this->Arrays[i];
+    }
+  this->Arrays.clear();
+  this->Arrays.push_back(NULL);
+
+  delete [] this->TempDoubleArray;
+  this->TempDoubleArray = NULL;
+
+  this->MaxId = -1;
+  this->Size = 0;
+  this->NumberOfComponents = 1;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::GetTuples(vtkIdList *ptIds, vtkAbstractArray *output)
+{
+  vtkDataArray *da = vtkDataArray::FastDownCast(output);
+  if (!da)
+    {
+    vtkWarningMacro(<<"Input is not a vtkDataArray");
+    return;
+    }
+
+  if (da->GetNumberOfComponents() != this->GetNumberOfComponents())
+    {
+    vtkWarningMacro(<<"Incorrect number of components in input array.");
+    return;
+    }
+
+  const vtkIdType numPoints = ptIds->GetNumberOfIds();
+  for (vtkIdType i = 0; i < numPoints; ++i)
+    {
+    da->SetTuple(i, this->GetTuple(ptIds->GetId(i)));
+    }
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::GetTuples(vtkIdType p1, vtkIdType p2, vtkAbstractArray *output)
+{
+  vtkDataArray *da = vtkDataArray::FastDownCast(output);
+  if (!da)
+    {
+    vtkErrorMacro(<<"Input is not a vtkDataArray");
+    return;
+    }
+
+  if (da->GetNumberOfComponents() != this->GetNumberOfComponents())
+    {
+    vtkErrorMacro(<<"Incorrect number of components in input array.");
+    return;
+    }
+
+  for (vtkIdType daTupleId = 0; p1 <= p2; ++p1)
+    {
+    da->SetTuple(daTupleId++, this->GetTuple(p1));
+    }
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::Squeeze()
+{
+  // noop
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkArrayIterator*
+vtkCPExodusIIResultsArrayTemplate<Scalar>::NewIterator()
+{
+  vtkErrorMacro(<<"Not implemented.");
+  return NULL;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkIdType vtkCPExodusIIResultsArrayTemplate<Scalar>
+::LookupValue(vtkVariant value)
+{
+  bool valid = true;
+  Scalar val = vtkVariantCast<Scalar>(value, &valid);
+  if (valid)
+    {
+    return this->Lookup(val, 0);
+    }
+  return -1;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::LookupValue(vtkVariant value, vtkIdList *ids)
+{
+  bool valid = true;
+  Scalar val = vtkVariantCast<Scalar>(value, &valid);
+  ids->Reset();
+  if (valid)
+    {
+    vtkIdType index = 0;
+    while ((index = this->Lookup(val, index)) >= 0)
+      {
+      ids->InsertNextId(index);
+      ++index;
+      }
+    }
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkVariant vtkCPExodusIIResultsArrayTemplate<Scalar>
+::GetVariantValue(vtkIdType idx)
+{
+  return vtkVariant(this->GetValueReference(idx));
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::ClearLookup()
+{
+  // no-op, no fast lookup implemented.
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> double* vtkCPExodusIIResultsArrayTemplate<Scalar>
+::GetTuple(vtkIdType i)
+{
+  this->GetTuple(i, this->TempDoubleArray);
+  return this->TempDoubleArray;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::GetTuple(vtkIdType i, double *tuple)
+{
+  for (size_t comp = 0; comp < this->Arrays.size(); ++comp)
+    {
+    tuple[comp] = static_cast<double>(this->Arrays[comp][i]);
+    }
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkIdType vtkCPExodusIIResultsArrayTemplate<Scalar>
+::LookupTypedValue(Scalar value)
+{
+  return this->Lookup(value, 0);
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::LookupTypedValue(Scalar value, vtkIdList *ids)
+{
+  ids->Reset();
+  vtkIdType index = 0;
+  while ((index = this->Lookup(value, index)) >= 0)
+    {
+    ids->InsertNextId(index);
+    ++index;
+    }
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> Scalar vtkCPExodusIIResultsArrayTemplate<Scalar>
+::GetValue(vtkIdType idx)
+{
+  return this->GetValueReference(idx);
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> Scalar& vtkCPExodusIIResultsArrayTemplate<Scalar>
+::GetValueReference(vtkIdType idx)
+{
+  const vtkIdType tuple = idx / this->NumberOfComponents;
+  const vtkIdType comp = idx % this->NumberOfComponents;
+  return this->Arrays[comp][tuple];
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::GetTupleValue(vtkIdType tupleId, Scalar *tuple)
+{
+  for (size_t comp = 0; comp < this->Arrays.size(); ++comp)
+    {
+    tuple[comp] = this->Arrays[comp][tupleId];
+    }
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> int vtkCPExodusIIResultsArrayTemplate<Scalar>
+::Allocate(vtkIdType, vtkIdType)
+{
+  vtkErrorMacro("Read only container.")
+  return 0;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> int vtkCPExodusIIResultsArrayTemplate<Scalar>
+::Resize(vtkIdType)
+{
+  vtkErrorMacro("Read only container.")
+  return 0;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::SetNumberOfTuples(vtkIdType)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::SetTuple(vtkIdType, vtkIdType, vtkAbstractArray *)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::SetTuple(vtkIdType, const float *)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::SetTuple(vtkIdType, const double *)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::InsertTuple(vtkIdType, vtkIdType, vtkAbstractArray *)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::InsertTuple(vtkIdType, const float *)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::InsertTuple(vtkIdType, const double *)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::InsertTuples(vtkIdList *, vtkIdList *, vtkAbstractArray *)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkIdType vtkCPExodusIIResultsArrayTemplate<Scalar>
+::InsertNextTuple(vtkIdType, vtkAbstractArray *)
+{
+  vtkErrorMacro("Read only container.")
+  return -1;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkIdType vtkCPExodusIIResultsArrayTemplate<Scalar>
+::InsertNextTuple(const float *)
+{
+
+  vtkErrorMacro("Read only container.")
+  return -1;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkIdType vtkCPExodusIIResultsArrayTemplate<Scalar>
+::InsertNextTuple(const double *)
+{
+  vtkErrorMacro("Read only container.")
+  return -1;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::DeepCopy(vtkAbstractArray *)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::DeepCopy(vtkDataArray *)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::InterpolateTuple(vtkIdType, vtkIdList *, vtkAbstractArray *, double *)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::InterpolateTuple(vtkIdType, vtkIdType, vtkAbstractArray*, vtkIdType,
+                   vtkAbstractArray*, double)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::SetVariantValue(vtkIdType, vtkVariant)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::RemoveTuple(vtkIdType)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::RemoveFirstTuple()
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::RemoveLastTuple()
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::SetTupleValue(vtkIdType, const Scalar*)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::InsertTupleValue(vtkIdType, const Scalar*)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkIdType vtkCPExodusIIResultsArrayTemplate<Scalar>
+::InsertNextTupleValue(const Scalar *)
+{
+  vtkErrorMacro("Read only container.")
+  return -1;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::SetValue(vtkIdType, Scalar)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkIdType vtkCPExodusIIResultsArrayTemplate<Scalar>
+::InsertNextValue(Scalar)
+{
+  vtkErrorMacro("Read only container.")
+  return -1;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> void vtkCPExodusIIResultsArrayTemplate<Scalar>
+::InsertValue(vtkIdType, Scalar)
+{
+  vtkErrorMacro("Read only container.")
+  return;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkCPExodusIIResultsArrayTemplate<Scalar>
+::vtkCPExodusIIResultsArrayTemplate()
+  : TempDoubleArray(NULL)
+{
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkCPExodusIIResultsArrayTemplate<Scalar>
+::~vtkCPExodusIIResultsArrayTemplate()
+{
+  typedef typename std::vector<Scalar*>::const_iterator ArrayIterator;
+  for (ArrayIterator it = this->Arrays.begin(), itEnd = this->Arrays.end();
+       it != itEnd; ++it)
+    {
+    delete [] *it;
+    }
+  delete [] this->TempDoubleArray;
+}
+
+//------------------------------------------------------------------------------
+template <class Scalar> vtkIdType vtkCPExodusIIResultsArrayTemplate<Scalar>
+::Lookup(const Scalar &val, vtkIdType index)
+{
+  while (index <= this->MaxId)
+    {
+    if (this->GetValueReference(index++) == val)
+      {
+      return index;
+      }
+    }
+  return -1;
+}
diff --git a/IO/Exodus/vtkExodusIICache.h b/IO/Exodus/vtkExodusIICache.h
index 7eaaa02..c76738a 100644
--- a/IO/Exodus/vtkExodusIICache.h
+++ b/IO/Exodus/vtkExodusIICache.h
@@ -58,6 +58,14 @@ public:
     ObjectId = src.ObjectId;
     ArrayId = src.ArrayId;
     }
+  vtkExodusIICacheKey& operator = ( const vtkExodusIICacheKey& src )
+    {
+    Time = src.Time;
+    ObjectType = src.ObjectType;
+    ObjectId = src.ObjectId;
+    ArrayId = src.ArrayId;
+    return *this;
+    }
   bool match( const vtkExodusIICacheKey&other, const vtkExodusIICacheKey& pattern ) const
     {
     if ( pattern.Time && this->Time != other.Time )
diff --git a/IO/Exodus/vtkExodusIIReader.cxx b/IO/Exodus/vtkExodusIIReader.cxx
index 41ca761..f515850 100644
--- a/IO/Exodus/vtkExodusIIReader.cxx
+++ b/IO/Exodus/vtkExodusIIReader.cxx
@@ -230,6 +230,35 @@ vtkExodusIIReaderPrivate::BlockSetInfoType::~BlockSetInfoType()
     }
 }
 
+vtkExodusIIReaderPrivate::BlockSetInfoType& vtkExodusIIReaderPrivate::BlockSetInfoType::operator=(const vtkExodusIIReaderPrivate::BlockSetInfoType& block)
+{
+  // protect against invalid self-assignment
+  if (this != &block)
+    {
+    // superclass
+    this->ObjectInfoType::operator=(static_cast<ObjectInfoType const&>(block));
+
+    // delete existing
+    if (this->CachedConnectivity)
+      {
+      this->CachedConnectivity->Delete();
+      this->CachedConnectivity = NULL;
+      }
+
+    this->FileOffset = block.FileOffset;
+    this->PointMap = block.PointMap;
+    this->ReversePointMap = block.ReversePointMap;
+    this->NextSqueezePoint = block.NextSqueezePoint;
+    if (block.CachedConnectivity)
+      {
+      this->CachedConnectivity = vtkUnstructuredGrid::New();
+      this->CachedConnectivity->ShallowCopy(block.CachedConnectivity);
+      }
+    }
+
+  return *this;
+}
+
 // ----------------------------------------------------------- UTILITY ROUTINES
 
 // This function exists because FORTRAN ordering sucks.
@@ -696,6 +725,30 @@ int vtkExodusIIReaderPrivate::AssembleOutputCellArrays(
     return 1;
     }
 
+  vtkCellData* cd = output->GetCellData();
+  // Load (time-constant) attributes first because their status is in the block info.
+  if (
+    otyp == vtkExodusIIReader::ELEM_BLOCK ||
+    otyp == vtkExodusIIReader::EDGE_BLOCK ||
+    otyp == vtkExodusIIReader::FACE_BLOCK)
+    {
+    BlockInfoType* binfop = (BlockInfoType*)bsinfop;
+    std::vector<int>::iterator atit;
+    vtkIdType a = 0;
+    for (atit = binfop->AttributeStatus.begin(); atit != binfop->AttributeStatus.end(); ++atit, ++a)
+      {
+      if (*atit)
+        {
+        vtkDataArray* arr = this->GetCacheOrRead(
+          vtkExodusIICacheKey( timeStep, vtkExodusIIReader::ELEM_BLOCK_ATTRIB, obj, a ) );
+        if ( arr )
+          {
+          cd->AddArray( arr );
+          }
+        }
+      }
+    }
+
   // Panic if we're given a bad otyp.
   std::map<int,std::vector<ArrayInfoType> >::iterator ami = this->ArrayInfo.find( otyp );
   if ( ami == this->ArrayInfo.end() )
@@ -712,7 +765,6 @@ int vtkExodusIIReaderPrivate::AssembleOutputCellArrays(
 #endif // 0
     }
 
-  vtkCellData* cd = output->GetCellData();
   // For each array defined on objects of the same type as our output,
   // look for ones that are turned on (Status != 0) and have a truth
   // table indicating values are present for object obj in the file.
@@ -755,6 +807,60 @@ int vtkExodusIIReaderPrivate::AssembleOutputProceduralArrays(
       }
     }
 
+  if ( this->GenerateGlobalElementIdArray &&
+       ( otyp == vtkExodusIIReader::SIDE_SET_CONN ||
+         otyp == vtkExodusIIReader::SIDE_SET ) )
+    {
+    vtkExodusIICacheKey key( -1, vtkExodusIIReader::SIDE_SET_CONN, obj, 1 );
+    if ( vtkDataArray* arr = this->GetCacheOrRead( key ) )
+      {
+      vtkIdTypeArray* idarray = vtkIdTypeArray::SafeDownCast(arr);
+      vtkIdTypeArray* elementid = vtkIdTypeArray::New();
+      elementid->SetNumberOfTuples(idarray->GetNumberOfTuples());
+      elementid->SetName(vtkExodusIIReader::GetSideSetSourceElementIdArrayName());
+      vtkIntArray* elementside = vtkIntArray::New();
+      elementside->SetNumberOfTuples(idarray->GetNumberOfTuples());
+      elementside->SetName(vtkExodusIIReader::GetSideSetSourceElementSideArrayName());
+      vtkIdType values[2];
+      for(vtkIdType i=0;i<idarray->GetNumberOfTuples();i++)
+        {
+        idarray->GetTupleValue(i, values);
+        elementid->SetValue(i, values[0]-1); // switch to 0-based indexing
+        // now we have to worry about mapping from exodus canonical side
+        // ordering to vtk canonical side ordering for wedges and hexes.
+        // Even if the element block isn't loaded that we still know what
+        // types of cells it would have contained since all elements
+        // in a block are of the same type.
+        BlockInfoType* type =
+          this->GetBlockFromFileGlobalId(vtkExodusIIReader::ELEM_BLOCK, values[0]);
+        switch(type->CellType)
+          {
+          case VTK_WEDGE:
+            {
+            int wedgeMapping[5] = {2, 3, 4, 0, 1};
+            elementside->SetValue(i, wedgeMapping[ values[1]-1 ] );
+            break;
+            }
+          case VTK_HEXAHEDRON:
+            {
+            int hexMapping[6] = {2, 1, 3, 0, 4, 5};
+            elementside->SetValue(i, hexMapping[ values[1]-1 ] );
+            break;
+            }
+          default:
+            { // switch to 0-based indexing
+            elementside->SetValue(i, values[1]-1 );
+            }
+          }
+        }
+      cd->AddArray( elementid );
+      cd->AddArray( elementside );
+      elementid->FastDelete();
+      elementside->FastDelete();
+      status -= 2;
+      }
+    }
+
   if ( this->GenerateGlobalElementIdArray && ! OBJTYPE_IS_SET( otyp ) )
     {
     // This retrieves the first new-style map, or if that is not present,
@@ -2365,33 +2471,61 @@ vtkDataArray* vtkExodusIIReaderPrivate::GetCacheOrRead( vtkExodusIICacheKey key
     }
   else if ( key.ObjectType == vtkExodusIIReader::SIDE_SET_CONN )
     {
-    // Stick all of side_set_node_list and side_set_node_count and side_set_nodes_per_side in one array
-    // let InsertSetSides() figure it all out. Except for 0-based indexing
-    SetInfoType* sinfop = &this->SetInfo[vtkExodusIIReader::SIDE_SET][key.ObjectId];
-    int ssnllen; // side set node list length
-    if ( ex_get_side_set_node_list_len( exoid, sinfop->Id, &ssnllen ) < 0 )
+    if(key.ArrayId <= 0)
       {
-      vtkErrorMacro( "Unable to fetch side set \"" << sinfop->Name.c_str() << "\" (" << sinfop->Id << ") node list length" );
-      arr = 0;
-      return 0;
-      }
-    vtkIntArray* iarr = vtkIntArray::New();
-    vtkIdType ilen = ssnllen + sinfop->Size;
-    iarr->SetNumberOfComponents( 1 );
-    iarr->SetNumberOfTuples( ilen );
-    int* dat = iarr->GetPointer( 0 );
-    if ( ex_get_side_set_node_list( exoid, sinfop->Id, dat, dat + sinfop->Size ) < 0 )
+      // Stick all of side_set_node_list and side_set_node_count and side_set_nodes_per_side in one array
+      // let InsertSetSides() figure it all out. Except for 0-based indexing
+      SetInfoType* sinfop = &this->SetInfo[vtkExodusIIReader::SIDE_SET][key.ObjectId];
+      int ssnllen; // side set node list length
+      if ( ex_get_side_set_node_list_len( exoid, sinfop->Id, &ssnllen ) < 0 )
+        {
+        vtkErrorMacro( "Unable to fetch side set \"" << sinfop->Name.c_str() << "\" (" << sinfop->Id << ") node list length" );
+        arr = 0;
+        return 0;
+        }
+      vtkIntArray* iarr = vtkIntArray::New();
+      vtkIdType ilen = ssnllen + sinfop->Size;
+      iarr->SetNumberOfComponents( 1 );
+      iarr->SetNumberOfTuples( ilen );
+      int* dat = iarr->GetPointer( 0 );
+      if ( ex_get_side_set_node_list( exoid, sinfop->Id, dat, dat + sinfop->Size ) < 0 )
+        {
+        vtkErrorMacro( "Unable to fetch side set \"" << sinfop->Name.c_str() << "\" (" << sinfop->Id << ") node list" );
+        iarr->Delete();
+        arr = 0;
+        return 0;
+        }
+      while ( ilen > sinfop->Size )
+        { // move to 0-based indexing on nodes, don't touch nodes/side counts at head of array
+        --dat[--ilen];
+        }
+      arr = iarr;
+      } // if(key.ArrayId <= 0)
+    else
       {
-      vtkErrorMacro( "Unable to fetch side set \"" << sinfop->Name.c_str() << "\" (" << sinfop->Id << ") node list" );
-      iarr->Delete();
-      arr = 0;
-      return 0;
-      }
-    while ( ilen > sinfop->Size )
-      { // move to 0-based indexing on nodes, don't touch nodes/side counts at head of array
-      --dat[--ilen];
+      // return information about where the side set cells come from on the elements
+      // the first tuple value is the element id and the second is the canonical side
+      // sinfop->Size is the number of sides in this side set
+      SetInfoType* sinfop = &this->SetInfo[vtkExodusIIReader::SIDE_SET][key.ObjectId];
+      std::vector<int> side_set_elem_list(sinfop->Size);
+      std::vector<int> side_set_side_list(sinfop->Size);
+      if ( ex_get_side_set( exoid, sinfop->Id, &side_set_elem_list[0], &side_set_side_list[0]) < 0 )
+        {
+        vtkErrorMacro( "Unable to fetch side set \"" << sinfop->Name.c_str() << "\" (" << sinfop->Id << ") node list" );
+        arr = 0;
+        return 0;
+        }
+      vtkIdTypeArray* iarr = vtkIdTypeArray::New();
+      iarr->SetNumberOfComponents( 2 );
+      iarr->SetNumberOfTuples( sinfop->Size );
+      for(int i=0;i<sinfop->Size;i++)
+        { // we'll have to fix up the side indexing later
+        // because Exodus and VTK have different canonical orderings for wedges and hexes.
+        vtkIdType info[2] = {side_set_elem_list[i], side_set_side_list[i]};
+        iarr->SetTupleValue(i, info);
+        }
+      arr = iarr;
       }
-    arr = iarr;
     }
   else if ( key.ObjectType == vtkExodusIIReader::NODAL_COORDS )
     {
@@ -2505,16 +2639,21 @@ vtkDataArray* vtkExodusIIReaderPrivate::GetCacheOrRead( vtkExodusIICacheKey key
     key.ObjectType == vtkExodusIIReader::EDGE_BLOCK_ATTRIB
     )
     {
-    BlockInfoType* binfop = &this->BlockInfo[key.ObjectType][key.ObjectId];
+    int blkType =
+      (key.ObjectType == vtkExodusIIReader::ELEM_BLOCK_ATTRIB ? vtkExodusIIReader::ELEM_BLOCK :
+       (key.ObjectType == vtkExodusIIReader::FACE_BLOCK_ATTRIB ? vtkExodusIIReader::FACE_BLOCK :
+        vtkExodusIIReader::EDGE_BLOCK));
+    BlockInfoType* binfop = &this->BlockInfo[blkType][key.ObjectId];
     vtkDoubleArray* darr = vtkDoubleArray::New();
     arr = darr;
     darr->SetName( binfop->AttributeNames[key.ArrayId].c_str() );
     darr->SetNumberOfComponents( 1 );
     darr->SetNumberOfTuples( binfop->Size );
-    if ( ex_get_one_attr( exoid, static_cast<ex_entity_type>( key.ObjectType ), key.ObjectId, key.ArrayId, darr->GetVoidPointer( 0 ) ) < 0 )
+    if ( ex_get_one_attr(
+        exoid, static_cast<ex_entity_type>(blkType), binfop->Id, key.ArrayId + 1, darr->GetVoidPointer( 0 ) ) < 0 )
       { // NB: The error message references the file-order object id, not the numerically sorted index presented to users.
       vtkErrorMacro( "Unable to read attribute " << key.ArrayId
-        << " for object " << key.ObjectId << " of type " << key.ObjectType << "." );
+        << " for object " << key.ObjectId << " of type " << key.ObjectType  << " block type " << blkType << "." );
       arr->Delete();
       arr = 0;
       }
@@ -3626,11 +3765,6 @@ int vtkExodusIIReaderPrivate::RequestInformation()
   //VTK_EXO_FUNC( ex_inquire( exoid, EX_INQ_TIME,       itmp, 0, 0 ), "Inquire for EX_INQ_TIME failed" );
   //num_timesteps = itmp[0];
 
-  std::vector<BlockInfoType> bitBlank;
-  std::vector<SetInfoType> sitBlank;
-  std::vector<MapInfoType> mitBlank;
-  std::vector<ArrayInfoType> aitBlank;
-
   num_timesteps = static_cast<int>( this->Times.size() );
 /*
   this->Times.clear();
@@ -3697,17 +3831,17 @@ int vtkExodusIIReaderPrivate::RequestInformation()
 
     if ( OBJTYPE_IS_BLOCK(i) )
       {
-      this->BlockInfo[obj_types[i]] = bitBlank;
+      this->BlockInfo[obj_types[i]].clear();
       this->BlockInfo[obj_types[i]].reserve( nids );
       }
     else if ( OBJTYPE_IS_SET(i) )
       {
-      this->SetInfo[obj_types[i]] = sitBlank;
+      this->SetInfo[obj_types[i]].clear();
       this->SetInfo[obj_types[i]].reserve( nids );
       }
     else
       {
-      this->MapInfo[obj_types[i]] = mitBlank;
+      this->MapInfo[obj_types[i]].clear();
       this->MapInfo[obj_types[i]].reserve( nids );
       }
 
@@ -3949,7 +4083,7 @@ int vtkExodusIIReaderPrivate::RequestInformation()
 
     if ( ((OBJTYPE_IS_BLOCK(i)) || (OBJTYPE_IS_SET(i))) && num_vars && num_timesteps > 0 )
       {
-      this->ArrayInfo[obj_types[i]] = aitBlank;
+      this->ArrayInfo[obj_types[i]].clear();
       // Fill in ArrayInfo entries, combining array names into vectors/tensors where appropriate:
       this->GlomArrayNames( obj_types[i], nids, num_vars, var_names, truth_tab );
       }
@@ -4954,8 +5088,7 @@ unsigned long vtkExodusIIReader::GetMetadataMTime()
   if ( fname && this->propName && !strcmp( fname, this->propName ) ) \
     return; \
   modified = 1; \
-  if ( this->propName ) \
-    delete [] this->propName; \
+  delete [] this->propName; \
   if ( fname ) \
     { \
     size_t fnl = strlen( fname ) + 1; \
@@ -6149,6 +6282,16 @@ double vtkExodusIIReader::GetCacheSize()
   return this->Metadata->GetCacheSize();
 }
 
+void vtkExodusIIReader::SetSqueezePoints(bool sp)
+{
+  this->Metadata->SetSqueezePoints(sp ? 1 : 0);
+}
+
+bool vtkExodusIIReader::GetSqueezePoints()
+{
+  return this->Metadata->GetSqueezePoints() != 0;
+}
+
 void vtkExodusIIReader::ResetCache()
 {
   this->Metadata->ResetCache();
diff --git a/IO/Exodus/vtkExodusIIReader.h b/IO/Exodus/vtkExodusIIReader.h
index c564f45..8e9df43 100644
--- a/IO/Exodus/vtkExodusIIReader.h
+++ b/IO/Exodus/vtkExodusIIReader.h
@@ -240,6 +240,15 @@ public:
   static const char* GetImplicitNodeIdArrayName() { return "ImplicitNodeId"; }
 
   // Description:
+  // Get the name of the array that stores the mapping from side set
+  // cells back to the global id of the elements they bound.
+  static const char* GetSideSetSourceElementIdArrayName() { return "SourceElementId"; }
+
+  // Description:
+  // Get the name of the array that stores the mapping from side set
+  // cells back to the canonical side of the elements they bound.
+  static const char* GetSideSetSourceElementSideArrayName() { return "SourceElementSide"; }
+  // Description:
   // Geometric locations can include displacements.  By default,
   // this is ON.  The nodal positions are 'displaced' by the
   // standard exodus displacment vector. If displacements
@@ -702,6 +711,21 @@ public:
   double GetCacheSize();
 
   // Description:
+  // Should the reader output only points used by elements in the output mesh,
+  // or all the points. Outputting all the points is much faster since the
+  // point array can be read straight from disk and the mesh connectivity need
+  // not be altered. Squeezing the points down to the minimum set needed to
+  // produce the output mesh is useful for glyphing and other point-based
+  // operations. On large parallel datasets, loading all the points implies
+  // loading all the points on all processes and performing subsequent
+  // filtering on a much larger set.
+  //
+  // By default, SqueezePoints is true for backwards compatibility.
+  void SetSqueezePoints(bool sp);
+  bool GetSqueezePoints();
+
+
+  // Description:
   // Re-reads time information from the exodus file and updates
   // TimeStepRange accordingly.
   virtual void UpdateTimeInformation();
diff --git a/IO/Exodus/vtkExodusIIReaderPrivate.h b/IO/Exodus/vtkExodusIIReaderPrivate.h
index 23f9752..ad981cf 100644
--- a/IO/Exodus/vtkExodusIIReaderPrivate.h
+++ b/IO/Exodus/vtkExodusIIReaderPrivate.h
@@ -349,6 +349,7 @@ public:
     BlockSetInfoType(){this->CachedConnectivity=0;}
     BlockSetInfoType(const BlockSetInfoType& block);
     ~BlockSetInfoType();
+    BlockSetInfoType& operator=(const BlockSetInfoType& block);
   };
 
   /// A struct to hold information about Exodus blocks
diff --git a/IO/Export/Testing/Cxx/CMakeLists.txt b/IO/Export/Testing/Cxx/CMakeLists.txt
index 63bf1f5..d3e4b5e 100644
--- a/IO/Export/Testing/Cxx/CMakeLists.txt
+++ b/IO/Export/Testing/Cxx/CMakeLists.txt
@@ -1,49 +1,24 @@
-# Only run GL2PS tests if GhostScript is available if VTK_DATA_ROOT is set
-if(VTK_GHOSTSCRIPT_EXECUTABLE AND VTK_DATA_ROOT)
+include(vtkGhostscript)
+
+# Only run GL2PS tests if GhostScript is available
+if(VTK_GHOSTSCRIPT_EXECUTABLE)
   set(GL2PSTests
-    TestContextGL2PS.cxx
+    TestContextGL2PS.cxx,NO_VALID
     TestGL2PSExporterMultipleRenderers.cxx
-    TestGL2PSExporterRaster.cxx
+    TestGL2PSExporterRaster.cxx,NO_VALID
     TestGL2PSExporterRasterExclusion.cxx
-    TestGL2PSExporterVector.cxx
-    TestGL2PSExporterVolumeRaster.cxx
+    TestGL2PSExporterVector.cxx,NO_VALID
+    TestGL2PSExporterVolumeRaster.cxx,NO_VALID
     TestGL2PSTextActor3D.cxx
+    TestGL2PSLabeledDataMapper.cxx
+    TestLinePlotGL2PS.cxx
     TestStackedPlotGL2PS.cxx
   )
 endif()
 
-set(MyTests
-  X3DTest.cxx
-)
-
-if (VTK_DATA_ROOT)
-  set(MyTests
-    ${MyTest}
-    ${GL2PSTests}
-  )
-endif()
-
-# Use the testing object factory, to reduce boilerplate code in tests.
-include(vtkTestingObjectFactory)
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Hybrid/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_add_test_cxx(X3DTest.cxx NO_DATA NO_VALID)
+vtk_add_test_cxx(${GL2PSTests})
+vtk_test_cxx_executable(${vtk-module}CxxTests RENDERING_FACTORY)
 
 # The GL2PS tests produce postscript output, which cannot be compared using
 # the built-in image regression framework. The tests added above will create the
@@ -53,6 +28,8 @@ endforeach()
 if(GL2PSTests)
   set(TestGL2PSExporterVolumeRasterError 50)
   foreach(test ${GL2PSTests})
+    string(REGEX REPLACE ",.*" "" testsrc "${test}")
+    get_filename_component(TName ${testsrc} NAME_WE)
     add_test(NAME ${vtk-module}Cxx-${TName}-RasterizePNG
       COMMAND ${CMAKE_COMMAND}
         -DPSFILE=${VTK_TEST_OUTPUT_DIR}/${TName}.ps
@@ -76,12 +53,13 @@ if(GL2PSTests)
       set(_error_threshold 15)
     endif()
     # Image diff rasterized png with baseline
-    add_test(NAME ${vtk-module}Cxx-${TName}-VerifyRasterizedPNG
+    ExternalData_add_test(VTKData
+      NAME ${vtk-module}Cxx-${TName}-VerifyRasterizedPNG
       COMMAND vtkRenderingGL2PSCxxTests PNGCompare
-        -D ${VTK_DATA_ROOT}
+        -D ${VTK_TEST_DATA_DIR}
         -T ${VTK_TEST_OUTPUT_DIR}
         -E ${_error_threshold}
-        -V Baseline/Hybrid/${TName}-rasterRef.png
+        -V DATA{../Data/Baseline/${TName}-rasterRef.png,:}
         --test-file ${VTK_TEST_OUTPUT_DIR}/${TName}-raster.png
     )
     set_tests_properties("${vtk-module}Cxx-${TName}-VerifyRasterizedPNG"
diff --git a/IO/Export/Testing/Cxx/TestContextGL2PS.cxx b/IO/Export/Testing/Cxx/TestContextGL2PS.cxx
index 169b7ac..e5c28ef 100644
--- a/IO/Export/Testing/Cxx/TestContextGL2PS.cxx
+++ b/IO/Export/Testing/Cxx/TestContextGL2PS.cxx
@@ -63,8 +63,8 @@ int TestContextGL2PS( int, char *[] )
   vtkNew<vtkGL2PSExporter> exp;
   exp->SetRenderWindow(view->GetRenderWindow());
   exp->SetFileFormatToPS();
+  exp->UsePainterSettings();
   exp->CompressOff();
-  exp->SetSortToOff();
   exp->DrawBackgroundOn();
   exp->SetLineWidthFactor(1.0);
   exp->SetPointSizeFactor(1.0);
diff --git a/IO/Export/Testing/Cxx/TestGL2PSLabeledDataMapper.cxx b/IO/Export/Testing/Cxx/TestGL2PSLabeledDataMapper.cxx
new file mode 100644
index 0000000..36fba64
--- /dev/null
+++ b/IO/Export/Testing/Cxx/TestGL2PSLabeledDataMapper.cxx
@@ -0,0 +1,156 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestGL2PSLabeledDataMapper.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkTestUtilities.h"
+#include "vtkRegressionTestImage.h"
+#include "vtkGL2PSExporter.h"
+
+#include "vtkActor.h"
+#include "vtkActor2D.h"
+#include "vtkCamera.h"
+#include "vtkCellArray.h"
+#include "vtkCellCenters.h"
+#include "vtkIdFilter.h"
+#include "vtkLabeledDataMapper.h"
+#include "vtkNew.h"
+#include "vtkPoints.h"
+#include "vtkPolyData.h"
+#include "vtkPolyDataMapper.h"
+#include "vtkPolyDataMapper2D.h"
+#include "vtkRenderer.h"
+#include "vtkRenderWindow.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkSelectVisiblePoints.h"
+#include "vtkSphereSource.h"
+#include "vtkTestingInteractor.h"
+#include "vtkTextProperty.h"
+
+// This test is adapted from labeledMesh.py to test GL2PS exporting of selection
+// labels.
+int TestGL2PSLabeledDataMapper(int, char *[] )
+{
+  // Selection rectangle:
+  double xmin = 100.;
+  double xmax = 400.;
+  double ymin = 100.;
+  double ymax = 400.;
+
+  vtkNew<vtkPoints> pts;
+  pts->InsertPoint(0, xmin, ymin, 0.);
+  pts->InsertPoint(1, xmax, ymin, 0.);
+  pts->InsertPoint(2, xmax, ymax, 0.);
+  pts->InsertPoint(3, xmin, ymax, 0.);
+
+  vtkNew<vtkCellArray> rect;
+  rect->InsertNextCell(5);
+  rect->InsertCellPoint(0);
+  rect->InsertCellPoint(1);
+  rect->InsertCellPoint(2);
+  rect->InsertCellPoint(3);
+  rect->InsertCellPoint(0);
+
+  vtkNew<vtkPolyData> selectRect;
+  selectRect->SetPoints(pts.GetPointer());
+  selectRect->SetLines(rect.GetPointer());
+
+  vtkNew<vtkPolyDataMapper2D> rectMapper;
+  vtkNew<vtkActor2D> rectActor;
+  rectMapper->SetInputData(selectRect.GetPointer());
+  rectActor->SetMapper(rectMapper.GetPointer());
+
+  // Create sphere
+  vtkNew<vtkSphereSource> sphere;
+  vtkNew<vtkPolyDataMapper> sphereMapper;
+  vtkNew<vtkActor> sphereActor;
+  sphereMapper->SetInputConnection(sphere->GetOutputPort());
+  sphereActor->SetMapper(sphereMapper.GetPointer());
+
+  // Generate ids for labeling
+  vtkNew<vtkIdFilter> ids;
+  ids->SetInputConnection(sphere->GetOutputPort());
+  ids->PointIdsOn();
+  ids->CellIdsOn();
+  ids->FieldDataOn();
+
+  // Create labels for points
+  vtkNew<vtkSelectVisiblePoints> visPts;
+  visPts->SetInputConnection(ids->GetOutputPort());
+  visPts->SelectionWindowOn();
+  visPts->SetSelection(xmin, xmax, ymin, ymax);
+
+  vtkNew<vtkLabeledDataMapper> ldm;
+  ldm->SetInputConnection(visPts->GetOutputPort());
+  ldm->SetLabelModeToLabelFieldData();
+
+  vtkNew<vtkActor2D> pointLabels;
+  pointLabels->SetMapper(ldm.GetPointer());
+
+  // Create labels for cells:
+  vtkNew<vtkCellCenters> cc;
+  cc->SetInputConnection(ids->GetOutputPort());
+
+  vtkNew<vtkSelectVisiblePoints> visCells;
+  visCells->SetInputConnection(cc->GetOutputPort());
+  visCells->SelectionWindowOn();
+  visCells->SetSelection(xmin, xmax, ymin, ymax);
+
+  vtkNew<vtkLabeledDataMapper> cellMapper;
+  cellMapper->SetInputConnection(visCells->GetOutputPort());
+  cellMapper->SetLabelModeToLabelFieldData();
+  cellMapper->GetLabelTextProperty()->SetColor(0., 1., 0.);
+
+  vtkNew<vtkActor2D> cellLabels;
+  cellLabels->SetMapper(cellMapper.GetPointer());
+
+  // Rendering setup
+  vtkNew<vtkRenderer> ren;
+  visPts->SetRenderer(ren.GetPointer());
+  visCells->SetRenderer(ren.GetPointer());
+  ren->AddActor(sphereActor.GetPointer());
+  ren->AddActor2D(rectActor.GetPointer());
+  ren->AddActor2D(pointLabels.GetPointer());
+  ren->AddActor2D(cellLabels.GetPointer());
+  ren->SetBackground(1., 1., 1.);
+  ren->GetActiveCamera()->Zoom(.55);
+
+  vtkNew<vtkRenderWindow> renWin;
+  vtkNew<vtkRenderWindowInteractor> iren;
+  iren->SetRenderWindow(renWin.GetPointer());
+  renWin->AddRenderer(ren.GetPointer());
+  renWin->SetMultiSamples(0);
+  renWin->SetSize(500, 500);
+  renWin->Render();
+
+  vtkNew<vtkGL2PSExporter> exp;
+  exp->SetRenderWindow(renWin.GetPointer());
+  exp->SetFileFormatToPS();
+  exp->CompressOff();
+  exp->SetPS3Shading(0);
+  exp->SetSortToSimple();
+  exp->DrawBackgroundOn();
+  exp->Write3DPropsAsRasterImageOff();
+  exp->SetTextAsPath(true);
+
+  std::string fileprefix = vtkTestingInteractor::TempDirectory +
+      std::string("/TestGL2PSLabeledDataMapper");
+
+  exp->SetFilePrefix(fileprefix.c_str());
+  exp->Write();
+
+  iren->Initialize();
+  iren->Start();
+
+  return EXIT_SUCCESS;
+}
diff --git a/IO/Export/Testing/Cxx/TestLinePlotGL2PS.cxx b/IO/Export/Testing/Cxx/TestLinePlotGL2PS.cxx
new file mode 100644
index 0000000..14f8f1d
--- /dev/null
+++ b/IO/Export/Testing/Cxx/TestLinePlotGL2PS.cxx
@@ -0,0 +1,124 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestLinePlotGL2PS.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkChartXY.h"
+#include "vtkContextScene.h"
+#include "vtkContextView.h"
+#include "vtkFloatArray.h"
+#include "vtkGL2PSExporter.h"
+#include "vtkNew.h"
+#include "vtkPlot.h"
+#include "vtkPlotLine.h"
+#include "vtkRenderWindow.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkSmartPointer.h"
+#include "vtkTable.h"
+#include "vtkTestingInteractor.h"
+
+//----------------------------------------------------------------------------
+int TestLinePlotGL2PS(int , char * [])
+{
+  // Set up a 2D scene, add an XY chart to it
+  vtkNew<vtkContextView> view;
+  view->GetRenderWindow()->SetSize(400, 300);
+  vtkNew<vtkChartXY> chart;
+  view->GetScene()->AddItem(chart.GetPointer());
+  chart->SetShowLegend(true);
+
+  // Create a table with some points in it...
+  vtkNew<vtkTable> table;
+  vtkNew<vtkFloatArray> arrX;
+  arrX->SetName("X Axis");
+  table->AddColumn(arrX.GetPointer());
+  vtkNew<vtkFloatArray> arrC;
+  arrC->SetName("Cosine");
+  table->AddColumn(arrC.GetPointer());
+  vtkNew<vtkFloatArray> arrS;
+  arrS->SetName("Sine");
+  table->AddColumn(arrS.GetPointer());
+  vtkNew<vtkFloatArray> arrS2;
+  arrS2->SetName("Sine2");
+  table->AddColumn(arrS2.GetPointer());
+  vtkNew<vtkFloatArray> arr1;
+  arr1->SetName("One");
+  table->AddColumn(arr1.GetPointer());
+  vtkNew<vtkFloatArray> arr0;
+  arr0->SetName("Zero");
+  table->AddColumn(arr0.GetPointer());
+  // Test charting with a few more points...
+  int numPoints = 69;
+  float inc = 7.5 / (numPoints-1);
+  table->SetNumberOfRows(numPoints);
+  for (int i = 0; i < numPoints; ++i)
+    {
+    table->SetValue(i, 0, i * inc);
+    table->SetValue(i, 1, cos(i * inc) + 0.0);
+    table->SetValue(i, 2, sin(i * inc) + 0.0);
+    table->SetValue(i, 3, sin(i * inc) + 0.5);
+    table->SetValue(i, 4, 1.0);
+    table->SetValue(i, 5, 0.0);
+    }
+
+  // Add multiple line plots, setting the colors etc
+  vtkPlotLine *line = vtkPlotLine::SafeDownCast(chart->AddPlot(vtkChart::LINE));
+  line->SetInputData(table.GetPointer(), 0, 1);
+  line->SetColor(0, 255, 0, 255);
+  line->SetWidth(1.0);
+  line->SetMarkerStyle(vtkPlotLine::CIRCLE);
+  line = vtkPlotLine::SafeDownCast(chart->AddPlot(vtkChart::LINE));
+  line->SetInputData(table.GetPointer(), 0, 2);
+  line->SetColor(255, 0, 0, 255);
+  line->SetWidth(5.0);
+  line->SetMarkerStyle(vtkPlotLine::SQUARE);
+  line = vtkPlotLine::SafeDownCast(chart->AddPlot(vtkChart::LINE));
+  line->SetInputData(table.GetPointer(), 0, 3);
+  line->SetColor(0, 0, 255, 255);
+  line->SetWidth(4.0);
+  line->SetMarkerStyle(vtkPlotLine::DIAMOND);
+  line = vtkPlotLine::SafeDownCast(chart->AddPlot(vtkChart::LINE));
+  line->SetInputData(table.GetPointer(), 0, 4);
+  line->SetColor(0, 255, 255, 255);
+  line->SetWidth(4.0);
+  line->SetMarkerStyle(vtkPlotLine::CROSS);
+  line = vtkPlotLine::SafeDownCast(chart->AddPlot(vtkChart::LINE));
+  line->SetInputData(table.GetPointer(), 0, 5);
+  line->SetColor(255, 255, 0, 255);
+  line->SetWidth(4.0);
+  line->SetMarkerStyle(vtkPlotLine::PLUS);
+
+
+  // Render the scene and compare the image to a reference image
+  view->GetRenderWindow()->SetMultiSamples(0);
+  view->GetRenderWindow()->Render();
+
+  vtkNew<vtkGL2PSExporter> exp;
+  exp->SetRenderWindow(view->GetRenderWindow());
+  exp->SetFileFormatToPS();
+  exp->UsePainterSettings();
+  exp->CompressOff();
+  exp->DrawBackgroundOn();
+
+  std::string fileprefix = vtkTestingInteractor::TempDirectory +
+      std::string("/TestLinePlotGL2PS");
+
+  exp->SetFilePrefix(fileprefix.c_str());
+  exp->Write();
+
+  //Finally render the scene and compare the image to a reference image
+  view->GetInteractor()->Initialize();
+  view->GetInteractor()->Start();
+
+  return EXIT_SUCCESS;
+}
diff --git a/IO/Export/Testing/Cxx/TestStackedPlotGL2PS.cxx b/IO/Export/Testing/Cxx/TestStackedPlotGL2PS.cxx
index 0f343d7..44df682 100644
--- a/IO/Export/Testing/Cxx/TestStackedPlotGL2PS.cxx
+++ b/IO/Export/Testing/Cxx/TestStackedPlotGL2PS.cxx
@@ -126,8 +126,8 @@ int TestStackedPlotGL2PS(int , char * [])
   vtkNew<vtkGL2PSExporter> exp;
   exp->SetRenderWindow(view->GetRenderWindow());
   exp->SetFileFormatToPS();
+  exp->UsePainterSettings();
   exp->CompressOff();
-  exp->SetSortToOff();
   exp->DrawBackgroundOn();
 
   std::string fileprefix = vtkTestingInteractor::TempDirectory +
diff --git a/IO/Export/Testing/Data/Baseline/TestContextGL2PS-rasterRef.png.md5 b/IO/Export/Testing/Data/Baseline/TestContextGL2PS-rasterRef.png.md5
new file mode 100644
index 0000000..fc6a340
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestContextGL2PS-rasterRef.png.md5
@@ -0,0 +1 @@
+cf147ce912f5e55b541a769d86c208e9
diff --git a/IO/Export/Testing/Data/Baseline/TestContextGL2PS-rasterRef_1.png.md5 b/IO/Export/Testing/Data/Baseline/TestContextGL2PS-rasterRef_1.png.md5
new file mode 100644
index 0000000..d28d5ac
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestContextGL2PS-rasterRef_1.png.md5
@@ -0,0 +1 @@
+bf545d72b4b5faaeeead0f7c565a169f
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSExporterMultipleRenderers-rasterRef.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterMultipleRenderers-rasterRef.png.md5
new file mode 100644
index 0000000..a00209b
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterMultipleRenderers-rasterRef.png.md5
@@ -0,0 +1 @@
+7720ed2fd67bbbe95eee7a472987dfcc
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSExporterMultipleRenderers-rasterRef_1.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterMultipleRenderers-rasterRef_1.png.md5
new file mode 100644
index 0000000..4f474c3
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterMultipleRenderers-rasterRef_1.png.md5
@@ -0,0 +1 @@
+70070c4c1c6d89ed2864456c75c274aa
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSExporterMultipleRenderers.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterMultipleRenderers.png.md5
new file mode 100644
index 0000000..ad22a9b
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterMultipleRenderers.png.md5
@@ -0,0 +1 @@
+d431a96bfe4223eade5b83d013929b59
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRaster-rasterRef.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRaster-rasterRef.png.md5
new file mode 100644
index 0000000..64ed308
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRaster-rasterRef.png.md5
@@ -0,0 +1 @@
+47555f165bcd0fe1c9df3354e81146d0
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRaster-rasterRef_1.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRaster-rasterRef_1.png.md5
new file mode 100644
index 0000000..d59bd7b
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRaster-rasterRef_1.png.md5
@@ -0,0 +1 @@
+a1a851bfd82d8b6383eb22e0f86bd283
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRaster-rasterRef_2.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRaster-rasterRef_2.png.md5
new file mode 100644
index 0000000..5d9a886
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRaster-rasterRef_2.png.md5
@@ -0,0 +1 @@
+4d44d9f719a4ca0af0105808bd8b9911
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRaster-rasterRef_3.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRaster-rasterRef_3.png.md5
new file mode 100644
index 0000000..a84207b
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRaster-rasterRef_3.png.md5
@@ -0,0 +1 @@
+7f84e9963652b8058c6437adde0bfe31
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRasterExclusion-rasterRef.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRasterExclusion-rasterRef.png.md5
new file mode 100644
index 0000000..e397828
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRasterExclusion-rasterRef.png.md5
@@ -0,0 +1 @@
+19b436c526938981a8b16aa356fc46db
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRasterExclusion-rasterRef_1.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRasterExclusion-rasterRef_1.png.md5
new file mode 100644
index 0000000..991f410
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRasterExclusion-rasterRef_1.png.md5
@@ -0,0 +1 @@
+a39ef32e9cd16b399ffad821d1b25c4e
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRasterExclusion-rasterRef_2.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRasterExclusion-rasterRef_2.png.md5
new file mode 100644
index 0000000..497b4ac
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRasterExclusion-rasterRef_2.png.md5
@@ -0,0 +1 @@
+2d91503a224a53516d8f812faeafcc45
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRasterExclusion.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRasterExclusion.png.md5
new file mode 100644
index 0000000..2c538cd
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterRasterExclusion.png.md5
@@ -0,0 +1 @@
+40cdc5ff802e798758abbd0d9647801d
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVector-rasterRef.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVector-rasterRef.png.md5
new file mode 100644
index 0000000..4fa6083
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVector-rasterRef.png.md5
@@ -0,0 +1 @@
+1376c4e71930a1735e4e3f92cbb88558
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVector-rasterRef_1.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVector-rasterRef_1.png.md5
new file mode 100644
index 0000000..ba838b8
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVector-rasterRef_1.png.md5
@@ -0,0 +1 @@
+85abe20e1a2185f1d55b95fb5367ff37
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVector-rasterRef_2.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVector-rasterRef_2.png.md5
new file mode 100644
index 0000000..321bf46
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVector-rasterRef_2.png.md5
@@ -0,0 +1 @@
+8dca44af90682b10d42eb7f9bb22d713
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVector-rasterRef_3.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVector-rasterRef_3.png.md5
new file mode 100644
index 0000000..06318ba
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVector-rasterRef_3.png.md5
@@ -0,0 +1 @@
+71f5f38df773f281ea63ad1e3c1f8748
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVolumeRaster-rasterRef.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVolumeRaster-rasterRef.png.md5
new file mode 100644
index 0000000..648703a
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVolumeRaster-rasterRef.png.md5
@@ -0,0 +1 @@
+c660b8882d0bbc72f366e5218545cab6
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVolumeRaster-rasterRef_1.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVolumeRaster-rasterRef_1.png.md5
new file mode 100644
index 0000000..800bef3
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVolumeRaster-rasterRef_1.png.md5
@@ -0,0 +1 @@
+cd447b7ee48c7fc4024fba85881900f0
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVolumeRaster-rasterRef_2.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVolumeRaster-rasterRef_2.png.md5
new file mode 100644
index 0000000..6b48b79
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVolumeRaster-rasterRef_2.png.md5
@@ -0,0 +1 @@
+1e56dfa9cc5afa912d4f794f6e7d16bf
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVolumeRaster-rasterRef_3.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVolumeRaster-rasterRef_3.png.md5
new file mode 100644
index 0000000..ab5b8dc
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVolumeRaster-rasterRef_3.png.md5
@@ -0,0 +1 @@
+622cc183dc3cf87fda4719e045ec445e
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVolumeRaster-rasterRef_4.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVolumeRaster-rasterRef_4.png.md5
new file mode 100644
index 0000000..4984c70
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSExporterVolumeRaster-rasterRef_4.png.md5
@@ -0,0 +1 @@
+d6eb04c3766991bb8f0005a7d0d90261
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSLabeledDataMapper-rasterRef.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSLabeledDataMapper-rasterRef.png.md5
new file mode 100644
index 0000000..6b3a690
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSLabeledDataMapper-rasterRef.png.md5
@@ -0,0 +1 @@
+0baa142c7b599f5b4858467135aa9fbf
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSLabeledDataMapper.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSLabeledDataMapper.png.md5
new file mode 100644
index 0000000..36edbd7
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSLabeledDataMapper.png.md5
@@ -0,0 +1 @@
+2fab6fd7f29e0aff854261f8b3e4e3eb
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSTextActor3D-rasterRef.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSTextActor3D-rasterRef.png.md5
new file mode 100644
index 0000000..2c1f79e
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSTextActor3D-rasterRef.png.md5
@@ -0,0 +1 @@
+8b38724659d5475e06084068e1590f53
diff --git a/IO/Export/Testing/Data/Baseline/TestGL2PSTextActor3D.png.md5 b/IO/Export/Testing/Data/Baseline/TestGL2PSTextActor3D.png.md5
new file mode 100644
index 0000000..e98d4c3
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestGL2PSTextActor3D.png.md5
@@ -0,0 +1 @@
+388233f66553052f3b43c15695c95e82
diff --git a/IO/Export/Testing/Data/Baseline/TestLinePlotGL2PS-rasterRef.png.md5 b/IO/Export/Testing/Data/Baseline/TestLinePlotGL2PS-rasterRef.png.md5
new file mode 100644
index 0000000..8a2d093
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestLinePlotGL2PS-rasterRef.png.md5
@@ -0,0 +1 @@
+5f8b8139e6c5d5dd995b7b978ade4253
diff --git a/IO/Export/Testing/Data/Baseline/TestLinePlotGL2PS.png.md5 b/IO/Export/Testing/Data/Baseline/TestLinePlotGL2PS.png.md5
new file mode 100644
index 0000000..2d86b94
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestLinePlotGL2PS.png.md5
@@ -0,0 +1 @@
+ed7d9ba58822315ba797114a5e847e9a
diff --git a/IO/Export/Testing/Data/Baseline/TestLinePlotGL2PS_1.png.md5 b/IO/Export/Testing/Data/Baseline/TestLinePlotGL2PS_1.png.md5
new file mode 100644
index 0000000..ec5ab43
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestLinePlotGL2PS_1.png.md5
@@ -0,0 +1 @@
+fb0ed7dcdcb6796802f9f2bed0876c43
diff --git a/IO/Export/Testing/Data/Baseline/TestStackedPlotGL2PS-rasterRef.png.md5 b/IO/Export/Testing/Data/Baseline/TestStackedPlotGL2PS-rasterRef.png.md5
new file mode 100644
index 0000000..0a7964a
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestStackedPlotGL2PS-rasterRef.png.md5
@@ -0,0 +1 @@
+e3a141301d92252be343a0770177404c
diff --git a/IO/Export/Testing/Data/Baseline/TestStackedPlotGL2PS-rasterRef_1.png.md5 b/IO/Export/Testing/Data/Baseline/TestStackedPlotGL2PS-rasterRef_1.png.md5
new file mode 100644
index 0000000..50944fa
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestStackedPlotGL2PS-rasterRef_1.png.md5
@@ -0,0 +1 @@
+19d61df3d058d87fb8dcc6eec26f6e3e
diff --git a/IO/Export/Testing/Data/Baseline/TestStackedPlotGL2PS.png.md5 b/IO/Export/Testing/Data/Baseline/TestStackedPlotGL2PS.png.md5
new file mode 100644
index 0000000..a512c23
--- /dev/null
+++ b/IO/Export/Testing/Data/Baseline/TestStackedPlotGL2PS.png.md5
@@ -0,0 +1 @@
+b26485a625bcadbdcb5319a11e078122
diff --git a/IO/Export/module.cmake b/IO/Export/module.cmake
index 067b682..414fa48 100644
--- a/IO/Export/module.cmake
+++ b/IO/Export/module.cmake
@@ -3,13 +3,18 @@ vtk_module(vtkIOExport
     StandAlone
   DEPENDS
     vtkCommonCore
+    vtkRenderingAnnotation
     vtkRenderingContext2D
     vtkRenderingCore
     vtkRenderingFreeType
     vtkRenderingGL2PS
+    vtkRenderingLabel
     vtkRenderingOpenGL
     vtkImagingCore
-    vtkIOCore
+  PRIVATE_DEPENDS
+    vtkIOImage
+    vtkFiltersGeometry
+    vtkgl2ps
   TEST_DEPENDS
     vtkCommonColor
     vtkChartsCore
diff --git a/IO/Export/vtkGL2PSExporter.cxx b/IO/Export/vtkGL2PSExporter.cxx
index 9ed8379..2de4d1b 100644
--- a/IO/Export/vtkGL2PSExporter.cxx
+++ b/IO/Export/vtkGL2PSExporter.cxx
@@ -24,23 +24,27 @@
 #include "vtkContextActor.h"
 #include "vtkContextScene.h"
 #include "vtkCoordinate.h"
+#include "vtkFloatArray.h"
 #include "vtkGL2PSContextDevice2D.h"
 #include "vtkGL2PSUtilities.h"
 #include "vtkImageData.h"
 #include "vtkImageShiftScale.h"
 #include "vtkIntArray.h"
+#include "vtkLabeledDataMapper.h"
 #include "vtkMapper2D.h"
 #include "vtkMathTextUtilities.h"
 #include "vtkMatrix4x4.h"
 #include "vtkNew.h"
 #include "vtkObjectFactory.h"
-#include "vtkOpenGLRenderWindow.h"
 #include "vtkPath.h"
+#include "vtkPointData.h"
 #include "vtkProp.h"
 #include "vtkProp3DCollection.h"
+#include "vtkOpenGLRenderWindow.h"
 #include "vtkRenderWindow.h"
 #include "vtkRenderer.h"
 #include "vtkRendererCollection.h"
+#include "vtkScalarBarActor.h"
 #include "vtkStdString.h"
 #include "vtkTextActor.h"
 #include "vtkTextActor3D.h"
@@ -53,6 +57,7 @@
 #include "vtkVolumeCollection.h"
 #include "vtkWindowToImageFilter.h"
 #include "vtk_gl2ps.h"
+#include "vtkOpenGLError.h"
 
 #include <vector>
 
@@ -623,6 +628,8 @@ void vtkGL2PSExporter::SetPropVisibilities(vtkPropCollection *col, int vis)
 void vtkGL2PSExporter::DrawSpecialProps(vtkCollection *specialPropCol,
                                         vtkRendererCollection *renCol)
 {
+  vtkOpenGLClearErrorMacro();
+
   // Iterate through the renderers and the prop collections together:
   assert("renderers and special prop collections match" &&
          renCol->GetNumberOfItems() == specialPropCol->GetNumberOfItems());
@@ -651,6 +658,8 @@ void vtkGL2PSExporter::DrawSpecialProps(vtkCollection *specialPropCol,
     glMatrixMode(GL_MODELVIEW);
     glPopMatrix();
     }
+
+  vtkOpenGLCheckErrorMacro("failed after DrawSpecialProps");
 }
 
 void vtkGL2PSExporter::HandleSpecialProp(vtkProp *prop, vtkRenderer *ren)
@@ -668,11 +677,20 @@ void vtkGL2PSExporter::HandleSpecialProp(vtkProp *prop, vtkRenderer *ren)
         {
         this->DrawTextMapper(textMap, act2d, ren);
         }
+      else if (vtkLabeledDataMapper *ldm =
+               vtkLabeledDataMapper::SafeDownCast(map2d))
+        {
+        this->DrawLabeledDataMapper(ldm, ren);
+        }
       else // Some other mapper2D
         {
         return;
         }
       }
+    else if (vtkScalarBarActor *bar = vtkScalarBarActor::SafeDownCast(act2d))
+      {
+      this->DrawScalarBarActor(bar, ren);
+      }
     else // Some other actor2D
       {
       return;
@@ -749,11 +767,75 @@ void vtkGL2PSExporter::DrawTextMapper(vtkTextMapper *textMap,
   this->DrawViewportTextOverlay(string, tprop, coord, ren);
 }
 
+void vtkGL2PSExporter::DrawLabeledDataMapper(vtkLabeledDataMapper *mapper,
+                                             vtkRenderer *ren)
+{
+  vtkNew<vtkCoordinate> coord;
+  coord->SetViewport(ren);
+  switch (mapper->GetCoordinateSystem())
+    {
+    case vtkLabeledDataMapper::WORLD:
+      coord->SetCoordinateSystem(VTK_WORLD);
+      break;
+    case vtkLabeledDataMapper::DISPLAY:
+      coord->SetCoordinateSystem(VTK_DISPLAY);
+      break;
+    default:
+      vtkWarningMacro("Unsupported coordinate system for exporting vtkLabeled"
+                      "DataMapper. Some text may not be exported properly.");
+      return;
+    }
+
+  int numberOfLabels = mapper->GetNumberOfLabels();
+  const char *text;
+  double position[3];
+
+  for (int i = 0; i < numberOfLabels; ++i)
+    {
+    text = mapper->GetLabelText(i);
+    mapper->GetLabelPosition(i, position);
+    coord->SetValue(position);
+    this->DrawViewportTextOverlay(text, mapper->GetLabelTextProperty(),
+                                  coord.GetPointer(), ren);
+    }
+}
+
+void vtkGL2PSExporter::DrawScalarBarActor(vtkScalarBarActor *bar,
+                                          vtkRenderer *ren)
+{
+  // Disable colorbar -- the texture doesn't render properly, we'll copy the
+  // rasterized pixel data for it.
+  int drawColorBarOrig(bar->GetDrawColorBar());
+  bar->SetDrawColorBar(0);
+
+  // Disable text -- it is handled separately
+  int drawTickLabelsOrig(bar->GetDrawTickLabels());
+  bar->SetDrawTickLabels(0);
+  int drawAnnotationsOrig(bar->GetDrawAnnotations());
+  bar->SetDrawAnnotations(0);
+
+  // Render what's left:
+  bar->RenderOpaqueGeometry(ren);
+  bar->RenderOverlay(ren);
+
+  // Restore settings
+  bar->SetDrawColorBar(drawColorBarOrig);
+  bar->SetDrawTickLabels(drawTickLabelsOrig);
+  bar->SetDrawAnnotations(drawAnnotationsOrig);
+
+  // Copy the color bar into the output.
+  int rect[4];
+  bar->GetScalarBarRect(rect, ren);
+  this->CopyPixels(rect, ren);
+}
+
 void vtkGL2PSExporter::DrawViewportTextOverlay(const char *string,
                                                vtkTextProperty *tprop,
                                                vtkCoordinate *coord,
                                                vtkRenderer *ren)
 {
+  vtkOpenGLClearErrorMacro();
+
   // Figure out the viewport information
   int *winsize = this->RenderWindow->GetSize();
   double *viewport = ren->GetViewport();
@@ -789,16 +871,21 @@ void vtkGL2PSExporter::DrawViewportTextOverlay(const char *string,
   glPopMatrix();
   glMatrixMode(GL_PROJECTION);
   glPopMatrix();
+
+  vtkOpenGLCheckErrorMacro("failed after DrawViewportTextOverlay");
 }
 
 
 void vtkGL2PSExporter::CopyPixels(int copyRect[4], vtkRenderer *ren)
 {
-  if (this->PixelData->GetScalarType() == VTK_FLOAT)
+  if (this->PixelData->GetScalarType() != VTK_FLOAT)
     {
     vtkErrorMacro(<<"Raster image is not correctly formatted.")
     return;
     }
+
+  vtkOpenGLClearErrorMacro();
+
   // Figure out the viewport information
   int *winsize = this->RenderWindow->GetSize();
   double *viewport = ren->GetViewport();
@@ -857,6 +944,8 @@ void vtkGL2PSExporter::CopyPixels(int copyRect[4], vtkRenderer *ren)
   glPopMatrix();
   glMatrixMode(GL_PROJECTION);
   glPopMatrix();
+
+  vtkOpenGLCheckErrorMacro("failed after CopyPixels");
 }
 
 void vtkGL2PSExporter::DrawContextActors(vtkPropCollection *contextActs,
diff --git a/IO/Export/vtkGL2PSExporter.h b/IO/Export/vtkGL2PSExporter.h
index ac6e944..9eb74f8 100644
--- a/IO/Export/vtkGL2PSExporter.h
+++ b/IO/Export/vtkGL2PSExporter.h
@@ -85,6 +85,7 @@ class vtkCollection;
 class vtkCoordinate;
 class vtkImageData;
 class vtkIntArray;
+class vtkLabeledDataMapper;
 class vtkMatrix4x4;
 class vtkPath;
 class vtkProp;
@@ -92,6 +93,7 @@ class vtkPropCollection;
 class vtkProp3DCollection;
 class vtkRenderer;
 class vtkRendererCollection;
+class vtkScalarBarActor;
 class vtkTextActor;
 class vtkTextActor3D;
 class vtkTextMapper;
@@ -129,6 +131,18 @@ public:
 //ETX
 
   // Description:
+  // Configure the exporter to expect a painter-ordered 2D rendering, that is,
+  // a rendering at a fixed depth where primitives are drawn from the bottom up.
+  // This disables sorting, which will break the painter ordering, and turns off
+  // the simple line offset, which can cause line primitives to be drawn on top
+  // of all other geometry.
+  void UsePainterSettings()
+  {
+    this->SetSortToOff();
+    this->SetSimpleLineOffset(0);
+  }
+
+  // Description:
   // Specify the format of file to write out.  This can be one of:
   // PS_FILE, EPS_FILE, PDF_FILE, TEX_FILE.  Defaults to EPS_FILE.
   // Depending on the option chosen it generates the appropriate file
@@ -310,6 +324,8 @@ protected:
   void DrawTextActor3D(vtkTextActor3D *textAct, vtkRenderer *ren);
   void DrawTextMapper(vtkTextMapper *textMap, vtkActor2D *textAct,
                       vtkRenderer *ren);
+  void DrawLabeledDataMapper(vtkLabeledDataMapper *mapper, vtkRenderer *ren);
+  void DrawScalarBarActor(vtkScalarBarActor *bar, vtkRenderer *ren);
   void DrawViewportTextOverlay(const char *string, vtkTextProperty *tprop,
                                vtkCoordinate *coord, vtkRenderer *ren);
 
diff --git a/IO/Export/vtkX3DExporterWriter.h b/IO/Export/vtkX3DExporterWriter.h
index 1f40676..7706c7f 100644
--- a/IO/Export/vtkX3DExporterWriter.h
+++ b/IO/Export/vtkX3DExporterWriter.h
@@ -72,7 +72,7 @@ public:
   // Flush can be called optionally after some operations to
   // flush the buffer to the filestream. A writer not necessarily
   // implements this function
-  virtual void Flush() {};
+  virtual void Flush() {}
 
   // Description:
   // Starts a document and sets all necessary informations,
diff --git a/IO/FFMPEG/CMakeLists.txt b/IO/FFMPEG/CMakeLists.txt
index 05ce6bf..264b241 100644
--- a/IO/FFMPEG/CMakeLists.txt
+++ b/IO/FFMPEG/CMakeLists.txt
@@ -31,4 +31,4 @@ endif()
 
 vtk_module_library(vtkIOFFMPEG ${Module_SRCS})
 
-target_link_libraries(vtkIOFFMPEG ${_ffmpeg_libs})
+target_link_libraries(vtkIOFFMPEG LINK_PRIVATE ${_ffmpeg_libs})
diff --git a/IO/FFMPEG/Testing/Cxx/CMakeLists.txt b/IO/FFMPEG/Testing/Cxx/CMakeLists.txt
index 3b6a0d2..041ae7a 100644
--- a/IO/FFMPEG/Testing/Cxx/CMakeLists.txt
+++ b/IO/FFMPEG/Testing/Cxx/CMakeLists.txt
@@ -1,26 +1,4 @@
-set(TEST_SRC)
-
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-   TestFFMPEGWriter.cxx
-  ${TEST_SRC}
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/IO/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName} COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_add_test_cxx(NO_VALID
+  TestFFMPEGWriter.cxx
+  )
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/IO/FFMPEG/vtkFFMPEGConfig.h.in b/IO/FFMPEG/vtkFFMPEGConfig.h.in
index e5444db..8250984 100644
--- a/IO/FFMPEG/vtkFFMPEGConfig.h.in
+++ b/IO/FFMPEG/vtkFFMPEGConfig.h.in
@@ -18,8 +18,8 @@
 /* This header is configured by VTK's build process.  */
 
 #cmakedefine VTK_FFMPEG_HAS_OLD_HEADER
-#cmakedefine VTK_FFMPEG_OLD_URL_FCLOSE
 #cmakedefine VTK_FFMPEG_HAS_IMG_CONVERT
 #cmakedefine VTK_FFMPEG_NEW_ALLOC
+#cmakedefine VTK_FFMPEG_AVCODECID
 
 #endif
diff --git a/IO/FFMPEG/vtkFFMPEGWriter.cxx b/IO/FFMPEG/vtkFFMPEGWriter.cxx
index 7f47d21..8ebe129 100644
--- a/IO/FFMPEG/vtkFFMPEGWriter.cxx
+++ b/IO/FFMPEG/vtkFFMPEGWriter.cxx
@@ -170,7 +170,11 @@ int vtkFFMPEGWriterInternal::Start()
 
   //Set up the codec.
   AVCodecContext *c = this->avStream->codec;
-  c->codec_id = (CodecID)this->avOutputFormat->video_codec;
+#ifdef VTK_FFMPEG_AVCODECID
+  c->codec_id = static_cast<AVCodecID>(this->avOutputFormat->video_codec);
+#else
+  c->codec_id = static_cast<CodecID>(this->avOutputFormat->video_codec);
+#endif
 #ifdef VTK_FFMPEG_HAS_OLD_HEADER
   c->codec_type = CODEC_TYPE_VIDEO;
 #else
@@ -368,12 +372,12 @@ int vtkFFMPEGWriterInternal::Write(vtkImageData *id)
     }
 #endif
 
-#if LIBAVFORMAT_VERSION_MAJOR >= 54
-  AVPacket pkt = { 0 };
-  int got_frame;
-#endif
-
   //run the encoder
+  AVPacket pkt;
+  av_init_packet(&pkt);
+  pkt.data = NULL;
+  pkt.size = 0;
+
 #if LIBAVFORMAT_VERSION_MAJOR < 54
   int toAdd = avcodec_encode_video(cc,
                                    this->codecBuf,
@@ -381,9 +385,6 @@ int vtkFFMPEGWriterInternal::Write(vtkImageData *id)
                                    this->yuvOutput);
   if (toAdd)
     {
-    AVPacket pkt;
-    av_init_packet(&pkt);
-
     //to do playback at actual recorded rate, this will need more work
     pkt.pts = cc->coded_frame->pts;
     //pkt.dts = ?; not dure what decompression time stamp should be
@@ -410,6 +411,7 @@ int vtkFFMPEGWriterInternal::Write(vtkImageData *id)
     }
 
 #else
+  int got_frame;
   int ret = avcodec_encode_video2(cc,
                                   &pkt,
                                   this->yuvOutput,
@@ -463,9 +465,7 @@ void vtkFFMPEGWriterInternal::End()
     if (this->openedFile)
       {
       av_write_trailer(this->avFormatContext);
-#if VTK_FFMPEG_OLD_URL_FCLOSE
-      url_fclose(&this->avFormatContext->pb);
-#elif LIBAVFORMAT_VERSION_MAJOR < 54
+#if LIBAVFORMAT_VERSION_MAJOR < 54
       url_fclose(this->avFormatContext->pb);
 #else
       avio_close(this->avFormatContext->pb);
diff --git a/IO/GDAL/CMakeLists.txt b/IO/GDAL/CMakeLists.txt
index fe3c6dc..5d4d086 100644
--- a/IO/GDAL/CMakeLists.txt
+++ b/IO/GDAL/CMakeLists.txt
@@ -7,4 +7,4 @@ include_directories(${GDAL_INCLUDE_DIRS})
 
 vtk_module_library(vtkIOGDAL ${GDAL_SRCS})
 
-target_link_libraries(vtkIOGDAL ${GDAL_LIBRARY})
+target_link_libraries(vtkIOGDAL LINK_PRIVATE ${GDAL_LIBRARY})
diff --git a/IO/GDAL/Testing/Cxx/CMakeLists.txt b/IO/GDAL/Testing/Cxx/CMakeLists.txt
index 7e86efc..eb16388 100644
--- a/IO/GDAL/Testing/Cxx/CMakeLists.txt
+++ b/IO/GDAL/Testing/Cxx/CMakeLists.txt
@@ -1,25 +1,4 @@
-create_test_sourcelist(Tests
-  ${vtk-module}CxxTests.cxx
+vtk_add_test_cxx(
   TestGDALVectorReader.cxx
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/IO/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+  )
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/IO/GDAL/Testing/Cxx/TestGDALVectorReader.cxx b/IO/GDAL/Testing/Cxx/TestGDALVectorReader.cxx
index 170441b..7e739b8 100644
--- a/IO/GDAL/Testing/Cxx/TestGDALVectorReader.cxx
+++ b/IO/GDAL/Testing/Cxx/TestGDALVectorReader.cxx
@@ -16,6 +16,7 @@
 
 // VTK includes
 #include <vtkActor.h>
+#include <vtkCellData.h>
 #include <vtkCompositePolyDataMapper.h>
 #include <vtkDataSetAttributes.h>
 #include <vtkDoubleArray.h>
@@ -45,7 +46,22 @@ int TestGDALVectorReader(int argc, char** argv)
   // Create reader to read shape file.
   vtkNew<vtkGDALVectorReader> reader;
   reader->SetFileName(vectorFileName);
+  reader->AddFeatureIdsOn();
   delete [] vectorFileName;
+
+  // Test layer information helpers
+  reader->UpdateInformation();
+  int nl = reader->GetNumberOfLayers();
+  for (int i = 0; i < nl; ++i)
+    {
+    reader->SetActiveLayer(i);
+    cout
+      << "Layer " << i
+      << " Type " << reader->GetActiveLayerType()
+      << " FeatureCount " << reader->GetActiveLayerFeatureCount()
+      << "\n";
+    }
+  reader->SetActiveLayer(0); // Read only layer 0, which is the only layer.
   reader->Update();
 
   // We need a renderer
@@ -54,6 +70,21 @@ int TestGDALVectorReader(int argc, char** argv)
   // Get the data
   vtkSmartPointer<vtkMultiBlockDataSet> mbds = reader->GetOutput();
 
+  // Verify that feature IDs exist as a scalar (assuming first block exists)
+  if (mbds && mbds->GetNumberOfBlocks() > 0)
+    {
+    vtkPolyData* pd = vtkPolyData::SafeDownCast(mbds->GetBlock(0));
+    vtkCellData* cd = pd ? pd->GetCellData() : NULL;
+    if (cd)
+      {
+      if (!cd->GetPedigreeIds())
+        {
+        cerr << "Unable to find pedigree IDs even though AddFeatureIds was ON\n";
+        return 1;
+        }
+      }
+    }
+
   // Create scene
   vtkNew<vtkActor> actor;
   vtkNew<vtkCompositePolyDataMapper> mapper;
diff --git a/IO/GDAL/Testing/Data/Baseline/TestGDALVectorReader.png.md5 b/IO/GDAL/Testing/Data/Baseline/TestGDALVectorReader.png.md5
new file mode 100644
index 0000000..4e0d416
--- /dev/null
+++ b/IO/GDAL/Testing/Data/Baseline/TestGDALVectorReader.png.md5
@@ -0,0 +1 @@
+3593ea69e791a93fd7f6d388982cd712
diff --git a/IO/GDAL/vtkGDALVectorReader.cxx b/IO/GDAL/vtkGDALVectorReader.cxx
index 547465e..86854a0 100644
--- a/IO/GDAL/vtkGDALVectorReader.cxx
+++ b/IO/GDAL/vtkGDALVectorReader.cxx
@@ -23,6 +23,7 @@
 #include <vtkInformationVector.h>
 #include <vtkIntArray.h>
 #include <vtkMultiBlockDataSet.h>
+#include <vtkNew.h>
 #include <vtkObjectFactory.h>
 #include <vtkStringArray.h>
 #include <vtkPolyData.h>
@@ -41,7 +42,7 @@ int vtkGDALVectorReader::OGRRegistered = 0;
 class vtkGDALVectorReader::Internal
 {
 public:
-  Internal( const char* srcName, int srcMode, int appendFeatures )
+  Internal( const char* srcName, int srcMode, int appendFeatures, int addFeatIds )
     {
     this->Source = OGRSFDriverRegistrar::Open( srcName, srcMode, &this->Driver );
     if ( ! this->Source )
@@ -54,6 +55,7 @@ public:
       }
     this->LayerIdx = 0;
     this->AppendFeatures = appendFeatures;
+    this->AddFeatureIds = addFeatIds;
     }
   ~Internal()
     {
@@ -92,6 +94,13 @@ public:
       (*pd)->GetCellData()->AddArray( arr );
       arr->FastDelete();
       }
+    if (this->AddFeatureIds)
+      {
+      vtkNew<vtkIdTypeArray> featIds;
+      featIds->SetName("_vtkPedigreeIds");
+      (*pd)->GetCellData()->SetPedigreeIds(featIds.GetPointer());
+      fields->push_back(featIds.GetPointer());
+      }
 
     *lines = vtkCellArray::New();
     *verts  = vtkCellArray::New();
@@ -135,6 +144,7 @@ public:
       if ( ! pts )
         {
         pts = vtkPoints::New();
+        pts->SetDataTypeToDouble();
         pd->SetPoints( pts );
         pts->FastDelete();
         }
@@ -190,6 +200,14 @@ public:
             }
           }
         }
+      if (this->AddFeatureIds)
+        {
+        vtkIdTypeArray* idarr = vtkIdTypeArray::SafeDownCast(fields[numFields]);
+        for ( i = 0; i < nPoly; ++i )
+          {
+          idarr->InsertNextValue(feat->GetFID());
+          }
+        }
       OGRFeature::DestroyFeature(feat);
       }
 
@@ -291,6 +309,7 @@ public:
   const char* LastError;
   int LayerIdx;
   int AppendFeatures;
+  int AddFeatureIds;
 };
 
 // -----------------------------------------------------------------------------
@@ -298,6 +317,7 @@ vtkGDALVectorReader::vtkGDALVectorReader()
 {
   this->FileName = 0;
   this->Implementation = 0;
+  this->ActiveLayer = -1;
 
   this->SetNumberOfInputPorts( 0 );
 
@@ -308,6 +328,7 @@ vtkGDALVectorReader::vtkGDALVectorReader()
     }
 
   this->AppendFeatures = 0;
+  this->AddFeatureIds = 0;
 }
 
 // -----------------------------------------------------------------------------
@@ -326,6 +347,8 @@ void vtkGDALVectorReader::PrintSelf( ostream& os, vtkIndent indent )
   this->Superclass::PrintSelf( os, indent );
   os << indent << "FileName: " << ( this->FileName ? this->FileName : "(null)" ) << "\n";
   os << indent << "Implementation: " << this->Implementation << "\n";
+  os << indent << "AppendFeatures: " << (this->AppendFeatures ? "ON" : "OFF") << "\n";
+  os << indent << "AddFeatureIds: " << (this->AddFeatureIds ? "ON" : "OFF") << "\n";
 }
 
 // -----------------------------------------------------------------------------
@@ -408,13 +431,17 @@ int vtkGDALVectorReader::GetFeatureCount(int layerIndex)
 // -----------------------------------------------------------------------------
 int vtkGDALVectorReader::GetActiveLayerType()
 {
-  return this->GetLayerType(ActiveLayer);
+  return
+    this->ActiveLayer < 0 || this->ActiveLayer >= this->GetNumberOfLayers() ?
+    -1 : this->GetLayerType(this->ActiveLayer);
 }
 
 // -----------------------------------------------------------------------------
 int vtkGDALVectorReader::GetActiveLayerFeatureCount()
 {
-  return this->GetFeatureCount(ActiveLayer);
+  return
+    this->ActiveLayer < 0 || this->ActiveLayer >= this->GetNumberOfLayers() ?
+    0 : this->GetFeatureCount(this->ActiveLayer);
 }
 
 // -----------------------------------------------------------------------------
@@ -492,7 +519,13 @@ int vtkGDALVectorReader::RequestData( vtkInformation* request,
 
   vtkGDALVectorReader::Internal* p = this->Implementation;
 
-  for ( int layerIdx = 0; layerIdx < p->Source->GetLayerCount(); ++layerIdx )
+  int lastLayer = p->Source->GetLayerCount() - 1;
+  int startLayer =
+    this->ActiveLayer < 0 || this->ActiveLayer >= lastLayer ?
+    0 : this->ActiveLayer;
+  int endLayer = this->ActiveLayer < 0 || this->ActiveLayer >= lastLayer ?
+    lastLayer : this->ActiveLayer;
+  for ( int layerIdx = startLayer; layerIdx <= endLayer; ++layerIdx )
     {
     OGRLayer* layer = p->Source->GetLayer( layerIdx );
     if ( ! layer )
@@ -526,7 +559,8 @@ int vtkGDALVectorReader::InitializeInternal()
   if ( !this->Implementation )
     {
     this->Implementation = new vtkGDALVectorReader::Internal(
-                             this->FileName, 0 , this->AppendFeatures );
+                             this->FileName, 0 ,
+                             this->AppendFeatures, this->AddFeatureIds );
     if ( ! this->Implementation || this->Implementation->LastError )
       {
       if ( this->Implementation )
diff --git a/IO/GDAL/vtkGDALVectorReader.h b/IO/GDAL/vtkGDALVectorReader.h
index c2c1caa..f40ae86 100644
--- a/IO/GDAL/vtkGDALVectorReader.h
+++ b/IO/GDAL/vtkGDALVectorReader.h
@@ -17,7 +17,10 @@
 // vtkGDALVectorReader is a source object that reads vector files and uses
 // GDAL as the underlying library for the task. GDAL is required for this
 // reader. The output of the reader is a vtkMultiBlockDataSet
-
+//
+// This filter uses the ActiveLayer member to only load entries from the
+// specified layer (when ActiveLayer >= 0).
+//
 // .SECTION See Also
 // vtkMultiBlockDataSet
 
@@ -60,7 +63,9 @@ public:
   int GetActiveLayerFeatureCount();
 
   // Description:
-  // Set and Get the active layer
+  // Set and Get the active layer.
+  // If ActiveLayer is less than 0 (the default is -1), then all
+  // layers are read. Otherwise, only the specified layer is read.
   vtkSetMacro(ActiveLayer,int);
   vtkGetMacro(ActiveLayer,int);
 
@@ -83,6 +88,19 @@ public:
   // Return projection string belong to a layer.
   const char* GetLayerProjection(int layerIndex);
 
+  // Description:
+  // Set/get whether feature IDs should be generated.
+  // Some GDAL primitives (e.g., a polygon with a hole
+  // in its interior) are represented by multiple VTK
+  // cells. If you wish to identify the primitive
+  // responsible for a VTK cell, turn this on. It is
+  // off by default for backwards compatibility.
+  // The array of feature IDs will be the active
+  // cell-data pedigree IDs.
+  vtkSetMacro(AddFeatureIds,int);
+  vtkGetMacro(AddFeatureIds,int);
+  vtkBooleanMacro(AddFeatureIds,int);
+
 protected:
   vtkGDALVectorReader();
   virtual ~vtkGDALVectorReader();
@@ -97,6 +115,7 @@ protected:
 
   int ActiveLayer;
   int AppendFeatures;
+  int AddFeatureIds;
 
   //BTX
   class Internal;
diff --git a/IO/GeoJSON/CMakeLists.txt b/IO/GeoJSON/CMakeLists.txt
new file mode 100644
index 0000000..ec741ac
--- /dev/null
+++ b/IO/GeoJSON/CMakeLists.txt
@@ -0,0 +1,5 @@
+set(Module_SRCS
+  vtkGeoJSONWriter.cxx
+  )
+
+vtk_module_library(vtkIOGeoJSON ${Module_SRCS})
diff --git a/IO/GeoJSON/Testing/Python/CMakeLists.txt b/IO/GeoJSON/Testing/Python/CMakeLists.txt
new file mode 100644
index 0000000..f8b5c1f
--- /dev/null
+++ b/IO/GeoJSON/Testing/Python/CMakeLists.txt
@@ -0,0 +1 @@
+vtk_add_test_python(TestGeoJSONWriter.py NO_VALID)
diff --git a/IO/GeoJSON/Testing/Python/TestGeoJSONWriter.py b/IO/GeoJSON/Testing/Python/TestGeoJSONWriter.py
new file mode 100644
index 0000000..5dbe5c3
--- /dev/null
+++ b/IO/GeoJSON/Testing/Python/TestGeoJSONWriter.py
@@ -0,0 +1,54 @@
+import vtk, os, sys
+from vtk.test import Testing
+
+ss = vtk.vtkSphereSource() #make mesh to test with
+
+af = vtk.vtkElevationFilter() #add some attributes
+af.SetInputConnection(ss.GetOutputPort())
+
+ef = vtk.vtkExtractEdges() #make lines to test
+ef.SetInputConnection(af.GetOutputPort())
+
+gf = vtk.vtkGlyph3D() #make verts to test
+pts = vtk.vtkPoints()
+pts.InsertNextPoint(0,0,0)
+verts = vtk.vtkCellArray()
+avert = vtk.vtkVertex()
+avert.GetPointIds().SetId(0, 0)
+verts.InsertNextCell(avert)
+onevertglyph = vtk.vtkPolyData()
+onevertglyph.SetPoints(pts)
+onevertglyph.SetVerts(verts)
+gf.SetSourceData(onevertglyph)
+gf.SetInputConnection(af.GetOutputPort())
+
+testwrites = ["points","lines","mesh"]
+failed = False
+for datasetString in testwrites:
+  if datasetString == "points":
+    toshow=gf
+  elif datasetString == "lines":
+    toshow = ef
+  else:
+    toshow = af
+  gw = vtk.vtkGeoJSONWriter()
+  fname = "sphere_"+datasetString+".json"
+  gw.SetInputConnection(toshow.GetOutputPort())
+  gw.SetFileName(fname)
+  gw.Write()
+  if (os.path.exists(fname) and
+     os.path.isfile(fname)):
+    os.remove(fname)
+  else:
+    print "Failed to write " + fname + " to file"
+    failed = True
+  gw.WriteToOutputStringOn()
+  gw.Write()
+  gj = "['"+str(gw.RegisterAndGetOutputString()).replace('\n','')+"']"
+  if len(gj) <= 1000:
+    print "Failed to write " + fname + " to buffer"
+    failed = True
+
+if failed:
+  sys.exit(1)
+sys.exit(0)
diff --git a/IO/GeoJSON/module.cmake b/IO/GeoJSON/module.cmake
new file mode 100644
index 0000000..bb38d9c
--- /dev/null
+++ b/IO/GeoJSON/module.cmake
@@ -0,0 +1,7 @@
+vtk_module(vtkIOGeoJSON
+  DEPENDS
+    vtkCommonDataModel
+    vtkIOCore
+  TEST_DEPENDS
+    vtkTestingCore
+  )
diff --git a/IO/GeoJSON/vtkGeoJSONWriter.cxx b/IO/GeoJSON/vtkGeoJSONWriter.cxx
new file mode 100644
index 0000000..ed73b35
--- /dev/null
+++ b/IO/GeoJSON/vtkGeoJSONWriter.cxx
@@ -0,0 +1,433 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkGeoJSONWriter.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkGeoJSONWriter.h"
+
+#include "vtkCellArray.h"
+#include "vtkInformation.h"
+#include "vtkLookupTable.h"
+#include "vtkMath.h"
+#include "vtkObjectFactory.h"
+#include "vtkPointData.h"
+#include "vtkPolyData.h"
+
+#include <vtksys/ios/sstream>
+
+vtkStandardNewMacro(vtkGeoJSONWriter);
+
+#define VTK_GJWRITER_MAXPOINTS 32000
+
+//------------------------------------------------------------------------------
+vtkGeoJSONWriter::vtkGeoJSONWriter()
+{
+  this->FileName = NULL;
+  this->OutputString = NULL;
+  this->SetNumberOfOutputPorts(0);
+  this->WriteToOutputString = false;
+  this->ScalarFormat = 2;
+  this->LookupTable = NULL;
+}
+
+//------------------------------------------------------------------------------
+vtkGeoJSONWriter::~vtkGeoJSONWriter()
+{
+  this->SetFileName(NULL);
+  delete[] this->OutputString;
+  this->SetLookupTable(NULL);
+}
+
+//------------------------------------------------------------------------------
+void vtkGeoJSONWriter::PrintSelf(ostream & os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+  os << indent << "FileName: "
+     << (this->FileName?this->FileName:"NONE") << endl;
+  os << indent << "WriteToOutputString: "
+     << (this->WriteToOutputString?"True":"False") << endl;
+  os << indent << "ScalarFormat: " << this->ScalarFormat << endl;
+}
+
+//------------------------------------------------------------------------------
+int vtkGeoJSONWriter::FillInputPortInformation(int port, vtkInformation *info)
+{
+  if (port == 0)
+    {
+    info->Set(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkPolyData");
+    }
+  return 1;
+}
+
+//------------------------------------------------------------------------------
+ostream *vtkGeoJSONWriter::OpenFile()
+{
+  vtkDebugMacro(<<"Opening file\n");
+
+  ostream *fptr;
+
+  if (!this->WriteToOutputString)
+    {
+    if (!this->FileName)
+      {
+      vtkErrorMacro(<< "No FileName specified! Can't write!");
+      return NULL;
+      }
+
+    fptr = new ofstream(this->FileName, ios::out);
+    }
+  else
+    {
+    // Get rid of any old output string.
+    if (this->OutputString)
+      {
+      delete [] this->OutputString;
+      this->OutputString = NULL;
+      this->OutputStringLength = 0;
+      }
+    fptr = new vtksys_ios::ostringstream;
+    }
+
+  if (fptr->fail())
+    {
+    vtkErrorMacro(<< "Unable to open file: "<< this->FileName);
+    delete fptr;
+    return NULL;
+    }
+
+  return fptr;
+}
+
+//------------------------------------------------------------------------------
+void vtkGeoJSONWriter::CloseFile(ostream *fp)
+{
+  vtkDebugMacro(<<"Closing file\n");
+
+  if ( fp != NULL )
+    {
+    if (this->WriteToOutputString)
+      {
+      vtksys_ios::ostringstream *ostr =
+        static_cast<vtksys_ios::ostringstream*>(fp);
+
+      delete [] this->OutputString;
+      this->OutputStringLength = static_cast<int>(ostr->str().size());
+      //+1's account for null terminator
+      this->OutputString = new char[ostr->str().size()+1];
+      memcpy(this->OutputString, ostr->str().c_str(),
+        this->OutputStringLength+1);
+      }
+
+    delete fp;
+    }
+}
+
+//------------------------------------------------------------------------------
+void vtkGeoJSONWriter::ConditionalComma(ostream *fp,
+  vtkIdType cnt, vtkIdType limit)
+{
+  if (cnt+1 != limit)
+    {
+    *fp << ",";
+    }
+}
+
+//------------------------------------------------------------------------------
+void vtkGeoJSONWriter::WriteScalar(ostream *fp,
+  vtkDataArray *da, vtkIdType ptId)
+{
+  if (this->ScalarFormat == 0)
+  {
+    return;
+  }
+  if (da)
+  {
+    double b = da->GetTuple1(ptId);
+    if (this->ScalarFormat == 1)
+    {
+      vtkLookupTable *lut = this->GetLookupTable();
+      if (!lut)
+      {
+        lut = vtkLookupTable::New();
+        lut->SetNumberOfColors(256);
+        lut->SetHueRange(0.0,0.667);
+        lut->SetRange(da->GetRange());
+        lut->Build();
+        this->SetLookupTable(lut);
+        lut->Delete();
+      }
+      unsigned char *color = lut->MapValue(b);
+      *fp << ","
+          << (double)color[0]/255.0 << ","
+          << (double)color[1]/255.0 << ","
+          << (double)color[2]/255;
+    }
+    else
+    {
+      if (vtkMath::IsNan(b))
+        {
+        *fp << "," << "null";
+        }
+      else
+        {
+        *fp << "," << b;
+        }
+    }
+  }
+}
+
+//------------------------------------------------------------------------------
+void vtkGeoJSONWriter::WriteData()
+{
+  ostream *fp;
+  vtkPolyData *input = vtkPolyData::SafeDownCast(this->GetInput());
+
+  vtkDebugMacro(<<"Writing vtk polygonal data to geojson file...");
+  fp=this->OpenFile();
+  if ( !fp )
+    {
+    return;
+    }
+
+  *fp << "{\n";
+  *fp << "\"type\": \"Feature\",\n";
+  vtkDataArray *da = input->GetPointData()->GetScalars();
+  if (!da)
+  {
+    da = input->GetPointData()->GetArray(0);
+  }
+  if (da)
+  {
+    switch (this->ScalarFormat) {
+    case 0:
+      *fp << "\"properties\": {\"ScalarFormat\": \"none\"},\n";
+      break;
+    case 1:
+      *fp << "\"properties\": {\"ScalarFormat\": \"rgb\"},\n";
+      break;
+    case 2:
+      double rng[2];
+      da->GetRange(rng);
+      *fp << "\"properties\": {\"ScalarFormat\": \"values\", \"ScalarRange\": [" << rng[0] << "," << rng[1] << "] },\n";
+      break;
+    }
+  }
+  else
+  {
+    *fp << "\"properties\": {\"ScalarFormat\": \"none\"},\n";
+  }
+  *fp << "\"geometry\":\n";
+  *fp << "{\n";
+  *fp << "\"type\": \"GeometryCollection\",\n";
+  *fp << "\"geometries\":\n";
+  *fp << "[\n";
+
+  vtkIdType cellLoc = 0;
+  vtkIdType *cellPts = NULL;
+  vtkIdType cellSize = 0;
+  vtkIdType numlines, numpolys;
+  numlines = input->GetLines()->GetNumberOfCells();
+  numpolys = input->GetPolys()->GetNumberOfCells();
+
+  //VERTS
+  vtkCellArray *ca;
+  ca = input->GetVerts();
+  if (ca && ca->GetNumberOfCells())
+    {
+    bool done = false;
+    vtkIdType inCell = 0;
+    vtkIdType ptCnt = 0;
+    do //loop to break into sections with < VTK_GJWRITER_MAXPOINTS points
+      {
+      *fp << "{\n";
+      *fp << "\"type\": \"MultiPoint\",\n";
+      *fp << "\"coordinates\":\n";
+      *fp << "[\n";
+      for (; inCell < ca->GetNumberOfCells() && ptCnt < VTK_GJWRITER_MAXPOINTS; inCell++)
+        {
+        ca->GetCell(cellLoc, cellSize, cellPts);
+        cellLoc += cellSize+1;
+        ptCnt += cellSize;
+        vtkIdType inPt;
+        for (inPt = 0; inPt < cellSize; inPt++)
+          {
+          double coords[3];
+          input->GetPoint(cellPts[inPt], coords);
+          *fp << "[" << coords[0] << "," << coords[1] << "," << coords[2];
+          this->WriteScalar(fp, da, cellPts[inPt]);
+          *fp << "]";
+          this->ConditionalComma(fp, inPt, cellSize);
+          }
+        if (ptCnt<VTK_GJWRITER_MAXPOINTS)
+          {
+          this->ConditionalComma(fp, inCell, ca->GetNumberOfCells());
+          }
+        *fp << "\n";
+        }
+      *fp << "]"; //coordinates for this cell array
+      if (inCell < ca->GetNumberOfCells())
+        {
+        ptCnt = 0;
+        *fp << ",\n";
+        }
+      else
+        {
+        if (numlines || numpolys)
+          {
+          *fp << ",";
+          }
+        done = true;
+        }
+      } while (!done);
+    }
+
+  //lines
+  ca = input->GetLines();
+  if (ca && ca->GetNumberOfCells())
+    {
+    bool done = false;
+    vtkIdType inCell = 0;
+    vtkIdType ptCnt = 0;
+    do //loop to break into sections with < VTK_GJWRITER_MAXPOINTS points
+      {
+      *fp << "{\n";
+      *fp << "\"type\": \"MultiLineString\",\n";
+      *fp << "\"coordinates\":\n";
+      *fp << "[\n";
+      for (; inCell < ca->GetNumberOfCells() && ptCnt < VTK_GJWRITER_MAXPOINTS; inCell++)
+        {
+        *fp << "[ "; //one cell
+        ca->GetCell(cellLoc, cellSize, cellPts);
+        cellLoc += cellSize+1;
+        ptCnt += cellSize;
+        vtkIdType inPt;
+        for (inPt = 0; inPt < cellSize; inPt++)
+          {
+          double coords[3];
+          input->GetPoint(cellPts[inPt], coords);
+          *fp << "[" << coords[0] << "," << coords[1] << "," << coords[2];
+          this->WriteScalar(fp, da, cellPts[inPt]);
+          *fp << "]";
+          this->ConditionalComma(fp, inPt, cellSize);
+          }
+        *fp << " ]";//one cell
+        if (ptCnt<VTK_GJWRITER_MAXPOINTS)
+          {
+          this->ConditionalComma(fp, inCell, ca->GetNumberOfCells());
+          }
+        *fp << "\n";
+        }
+      *fp << "]"; //coordinates for this cell array
+      *fp << "\n";
+      *fp << "}\n"; //this cell array
+      if (inCell < ca->GetNumberOfCells())
+        {
+        ptCnt = 0;
+        *fp << ",\n";
+        }
+      else
+        {
+        if (numpolys)
+          {
+          *fp << ",";
+          }
+        done = true;
+        }
+      } while (!done);
+    }
+
+  //polygons
+  ca = input->GetPolys();
+  if (ca && ca->GetNumberOfCells())
+    {
+    bool done = false;
+    vtkIdType inCell = 0;
+    vtkIdType ptCnt = 0;
+    do //loop to break into sections with < VTK_GJWRITER_MAXPOINTS points
+      {
+      *fp << "{\n";
+      *fp << "\"type\": \"MultiPolygon\",\n";
+      *fp << "\"coordinates\":\n";
+      *fp << "[\n";
+      for (; inCell < ca->GetNumberOfCells() && ptCnt < VTK_GJWRITER_MAXPOINTS; inCell++)
+        {
+        *fp << "[[ "; //one cell
+        ca->GetCell(cellLoc, cellSize, cellPts);
+        cellLoc += cellSize+1;
+        ptCnt += cellSize;
+        vtkIdType inPt;
+        for (inPt = 0; inPt < cellSize; inPt++)
+          {
+          double coords[3];
+          input->GetPoint(cellPts[inPt], coords);
+          *fp << "[" << coords[0] << "," << coords[1] << "," << coords[2];
+          this->WriteScalar(fp, da, cellPts[inPt]);
+          *fp << "]";
+          this->ConditionalComma(fp, inPt, cellSize);
+          }
+        *fp << " ]]";//one cell
+        if (ptCnt<VTK_GJWRITER_MAXPOINTS)
+          {
+          this->ConditionalComma(fp, inCell, ca->GetNumberOfCells());
+          }
+        *fp << "\n";
+        }
+      *fp << "]"; //coordinates for this cell array
+      *fp << "\n";
+      *fp << "}\n"; //this cell array
+      if (inCell < ca->GetNumberOfCells())
+        {
+        ptCnt = 0;
+        *fp << ",\n";
+        }
+      else
+        {
+        done = true;
+        }
+      } while (!done);
+    }
+
+  *fp << "]\n";//feature.geometry.GeometryCollection.geometries
+  *fp << "}\n";//feature.geometry
+  *fp << "}\n";//feature
+
+  fp->flush();
+  if (fp->fail())
+    {
+    vtkErrorMacro("Problem writing result check disk space.");
+    delete fp;
+    fp = NULL;
+    }
+
+  this->CloseFile(fp);
+}
+
+//------------------------------------------------------------------------------
+char *vtkGeoJSONWriter::RegisterAndGetOutputString()
+{
+  char *tmp = this->OutputString;
+
+  this->OutputString = NULL;
+  this->OutputStringLength = 0;
+
+  return tmp;
+}
+
+//------------------------------------------------------------------------------
+vtkStdString vtkGeoJSONWriter::GetOutputStdString()
+{
+  return vtkStdString(this->OutputString, this->OutputStringLength);
+}
+
+//------------------------------------------------------------------------------
+vtkCxxSetObjectMacro(vtkGeoJSONWriter, LookupTable, vtkLookupTable)
diff --git a/IO/GeoJSON/vtkGeoJSONWriter.h b/IO/GeoJSON/vtkGeoJSONWriter.h
new file mode 100644
index 0000000..e9ac8d6
--- /dev/null
+++ b/IO/GeoJSON/vtkGeoJSONWriter.h
@@ -0,0 +1,113 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkGeoJSONWriter.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkGeoJSONWriter - Convert vtkPolyData to Geo JSON format.
+// .SECTION Description
+// Outputs a Geo JSON (http://www.geojson.org) description of the input
+// polydata data set.
+
+#ifndef __vtkGeoJSONWriter_h
+#define __vtkGeoJSONWriter_h
+
+#include <vtkIOGeoJSONModule.h> // For export macro
+#include <vtkWriter.h>
+
+class vtkLookupTable;
+
+class VTKIOGEOJSON_EXPORT vtkGeoJSONWriter : public vtkWriter
+{
+public:
+  static vtkGeoJSONWriter* New();
+  virtual void PrintSelf( ostream& os, vtkIndent indent );
+  vtkTypeMacro(vtkGeoJSONWriter,vtkWriter);
+
+  //Decription:
+  // Accessor for name of the file that will be opened on WriteData
+  vtkSetStringMacro(FileName);
+  vtkGetStringMacro(FileName);
+
+  // Description:
+  // Enable writing to an OutputString instead of the default, a file.
+  vtkSetMacro(WriteToOutputString,bool);
+  vtkGetMacro(WriteToOutputString,bool);
+  vtkBooleanMacro(WriteToOutputString,bool);
+
+  // Description:
+  // When WriteToOutputString in on, then a string is allocated, written to,
+  // and can be retrieved with these methods.  The string is deleted during
+  // the next call to write ...
+  vtkGetMacro(OutputStringLength, int);
+  vtkGetStringMacro(OutputString);
+  unsigned char *GetBinaryOutputString()
+    {
+    return reinterpret_cast<unsigned char *>(this->OutputString);
+    }
+
+  // Description:
+  // Controls how data attributes are written out.
+  // When 0, data attributes are ignored and not written at all.
+  // When 1, values are mapped through a lookup table and colors are written to the output.
+  // When 2, which is the default, the values are written directly.
+  vtkSetMacro(ScalarFormat,int);
+  vtkGetMacro(ScalarFormat,int);
+
+  // Description:
+  // Controls the lookup table to use when ValueMode is set to map colors;
+  void SetLookupTable(vtkLookupTable *lut);
+  vtkGetObjectMacro(LookupTable, vtkLookupTable);
+
+  // Description:
+  // When WriteToOutputString is on, this method returns a copy of the
+  // output string in a vtkStdString.
+  vtkStdString GetOutputStdString();
+
+  // Description:
+  // This convenience method returns the string, sets the IVAR to NULL,
+  // so that the user is responsible for deleting the string.
+  // I am not sure what the name should be, so it may change in the future.
+  char *RegisterAndGetOutputString();
+
+protected:
+  vtkGeoJSONWriter();
+  virtual ~vtkGeoJSONWriter();
+
+  // Only accepts vtkPolyData
+  virtual int FillInputPortInformation(int port, vtkInformation *info);
+
+  // Implementation of Write()
+  void WriteData();
+
+  // Helper for Write that writes attributes out
+  void WriteScalar(ostream *fp, vtkDataArray *da, vtkIdType ptId);
+  vtkLookupTable *LookupTable;
+
+  bool WriteToOutputString;
+  char *OutputString;
+  int OutputStringLength;
+
+  int ScalarFormat;
+
+  // Internal helpers
+  ostream *OpenFile();
+  void ConditionalComma(ostream *, vtkIdType, vtkIdType);
+  void CloseFile(ostream *);
+
+  char* FileName;
+
+private:
+  vtkGeoJSONWriter(const vtkGeoJSONWriter&);  // Not implemented.
+  void operator=(const vtkGeoJSONWriter&);       // Not implemented.
+};
+
+#endif // __vtkGeoJSONWriter_h
diff --git a/IO/Geometry/CMakeLists.txt b/IO/Geometry/CMakeLists.txt
index 6c1cf8e..f8ea0ad 100644
--- a/IO/Geometry/CMakeLists.txt
+++ b/IO/Geometry/CMakeLists.txt
@@ -21,7 +21,6 @@ SET(Module_SRCS
   vtkPDBReader.cxx
   vtkPlot3DMetaReader.cxx
   vtkProStarReader.cxx
-  vtkSESAMEReader.cxx
   vtkSTLReader.cxx
   vtkSTLWriter.cxx
   vtkTecplotReader.cxx
diff --git a/IO/Geometry/Testing/Cxx/CMakeLists.txt b/IO/Geometry/Testing/Cxx/CMakeLists.txt
index 3bdda3c..10593e8 100644
--- a/IO/Geometry/Testing/Cxx/CMakeLists.txt
+++ b/IO/Geometry/Testing/Cxx/CMakeLists.txt
@@ -1,11 +1,31 @@
 # Tests with data
-if(VTK_DATA_ROOT)
-  set(NEEDS_DATA
+ExternalData_Expand_Arguments(VTKData _
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/0/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/0.5/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/0.5/uniform/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/1/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/1/uniform/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/1.5/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/1.5/uniform/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/2/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/2/uniform/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/2.5/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/2.5/uniform/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/constant/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/constant/polyMesh/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/system/,REGEX:.*}"
+  )
+vtk_add_test_cxx(
+  TestOpenFOAMReader.cxx
   TestProStarReader.cxx
   TestTecplotReader.cxx
-  TestAMRReadWrite.cxx
+  TestAMRReadWrite.cxx,NO_VALID
+  )
+
+vtk_add_test_cxx(NO_VALID
+  TestSimplePointsReaderWriter.cxx
   )
-endif()
 
 set(_known_little_endian FALSE)
 if (DEFINED CMAKE_WORDS_BIGENDIAN)
@@ -13,43 +33,21 @@ if (DEFINED CMAKE_WORDS_BIGENDIAN)
     set(_known_little_endian TRUE)
   endif()
 endif()
-if(VTK_LARGE_DATA_ROOT AND _known_little_endian)
-  set( WINDBLADE_TEST
-  TestWindBladeReader.cxx
-  )
-endif()
+if(VTK_USE_LARGE_DATA AND _known_little_endian)
+  # Tell ExternalData to fetch test input at build time.
+  ExternalData_Expand_Arguments(VTKData _
+    "DATA{${VTK_TEST_INPUT_DIR}/WindBladeReader/,REGEX:.*}"
+    "DATA{${VTK_TEST_INPUT_DIR}/WindBladeReader/field/,REGEX:.*}"
+    "DATA{${VTK_TEST_INPUT_DIR}/WindBladeReader/turbine/,REGEX:.*}"
+    "DATA{${VTK_TEST_INPUT_DIR}/MFIXReader/,REGEX:.*}"
+    )
 
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-  ${NEEDS_DATA}
-  ${WINDBLADE_TEST}
-  TestSimplePointsReaderWriter.cxx
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx ${WINDBLADE_TEST})
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/IO/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
-
-if (WINDBLADE_TEST)
-  add_test(NAME ${vtk-module}Cxx-TestWindBladeReader
-    COMMAND ${vtk-module}CxxTests TestWindBladeReader
-    -D ${VTK_LARGE_DATA_ROOT}
-    -T ${VTK_TEST_OUTPUT_DIR}
-    -V Baseline/Parallel/TestWindBladeReader.png)
+  vtk_add_test_cxx(
+    TestWindBladeReader.cxx
+    )
+  vtk_add_test_cxx(NO_VALID
+    TestMFIXReader.cxx
+    )
 endif()
+
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/IO/Geometry/Testing/Cxx/TestMFIXReader.cxx b/IO/Geometry/Testing/Cxx/TestMFIXReader.cxx
new file mode 100644
index 0000000..878d84b
--- /dev/null
+++ b/IO/Geometry/Testing/Cxx/TestMFIXReader.cxx
@@ -0,0 +1,183 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestSimplePointsReaderWriter.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include <vtkSmartPointer.h>
+#include <vtkDataSetMapper.h>
+#include <vtkActor.h>
+#include <vtkRenderWindow.h>
+#include <vtkRenderer.h>
+#include <vtkRenderWindowInteractor.h>
+#include <vtkMFIXReader.h>
+#include <vtkUnstructuredGrid.h>
+#include <vtkCellData.h>
+
+#include <vtkTestUtilities.h>
+#include <vtkTestErrorObserver.h>
+#include <vtkRegressionTestImage.h>
+
+int TestMFIXReader(int argc, char *argv[])
+{
+  // Read file name.
+  char* filename =
+    vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/MFIXReader/BUB01.RES");
+
+  vtkSmartPointer<vtkTest::ErrorObserver>  errorObserver =
+    vtkSmartPointer<vtkTest::ErrorObserver>::New();
+
+  vtkSmartPointer<vtkMFIXReader> reader =
+    vtkSmartPointer<vtkMFIXReader>::New();
+  reader->AddObserver(vtkCommand::ErrorEvent, errorObserver);
+
+  // Update without a filename should cause an error
+  reader->Update();
+
+  // Check for model bounds error
+  if (errorObserver->GetError())
+    {
+    std::cout << "Caught expected error: "
+              << errorObserver->GetErrorMessage();
+    }
+  else
+    {
+    std::cout << "Failed to catch expected error regarding missing file name" << std::endl;
+    return EXIT_FAILURE;
+    }
+  errorObserver->Clear();
+
+  reader->SetFileName(filename);
+  delete [] filename;
+
+  reader->Update();
+
+  std::cout << "Testing reader with file: "
+            << reader->GetFileName() << std::endl;
+  std::cout << "There are " << reader->GetNumberOfPoints()\
+            << " number of points" << std::endl;
+  std::cout << "There are " << reader->GetNumberOfCells()\
+            << " number of cells" << std::endl;
+  std::cout << "There are " << reader->GetNumberOfCellFields()
+            << " number of cell fields" << std::endl;
+  reader->SetTimeStep(reader->GetNumberOfTimeSteps()/2);
+  std::cout << "The timestep is  " << reader->GetTimeStep() << std::endl;
+  reader->SetTimeStepRange (0, reader->GetNumberOfTimeSteps() - 1);
+  std::cout << "The time step range is: "
+            << reader->GetTimeStepRange()[0] << " to "
+            << reader->GetTimeStepRange()[1]
+            << std::endl;
+  // Exercise Cell Arrays
+
+  // 1) Default array settings
+  int numberOfCellArrays = reader->GetNumberOfCellArrays();
+  std::cout << "----- Default array settings" << std::endl;
+  for (int i = 0; i < numberOfCellArrays; ++i)
+    {
+    const char * name = reader->GetCellArrayName(i);
+    std::cout << "  Cell Array: " << i
+              << " is named " << name
+              << " and is "
+              << (reader->GetCellArrayStatus(name) ? "Enabled" : "Disabled")
+              << std::endl;
+    }
+
+  // 2) Disable one array
+  std::cout << "----- Disable one array" << std::endl;
+  const char * arrayName = reader->GetCellArrayName(0);
+  reader->SetCellArrayStatus(arrayName, 0);
+  if (reader->GetCellArrayStatus(arrayName) != 0)
+    {
+    std::cout << "ERROR:  Cell Array: " << "0"
+              << " is named " << arrayName
+              << " and should be disabled"
+              << std::endl;
+    return EXIT_FAILURE;
+    }
+
+  // 3) Disable all arrays
+  std::cout << "----- Disable all arrays" << std::endl;
+  reader->DisableAllCellArrays();
+  for (int i = 0; i < numberOfCellArrays; ++i)
+    {
+    const char * name = reader->GetCellArrayName(i);
+    if (reader->GetCellArrayStatus(name) != 0)
+      {
+      std::cout << "ERROR: " << "  Cell Array: " << i
+                << " is named " << name
+                << " and should be disabled"
+                << std::endl;
+      return EXIT_FAILURE;
+      }
+    }
+
+  // 4) Enable one array
+  std::cout << "----- Enable one array" << std::endl;
+  arrayName = reader->GetCellArrayName(0);
+  reader->SetCellArrayStatus(arrayName, 1);
+  if (reader->GetCellArrayStatus(arrayName) != 1)
+    {
+    std::cout << "ERROR:  Cell Array: " << "0"
+              << " is named " << arrayName
+              << " and should be disabled"
+              << std::endl;
+    return EXIT_FAILURE;
+    }
+
+  // 5) Enable all arrays
+  std::cout << "----- Enable all arrays" << std::endl;
+  reader->EnableAllCellArrays();
+  for (int i = 0; i < numberOfCellArrays; ++i)
+    {
+    const char * name = reader->GetCellArrayName(i);
+    if (reader->GetCellArrayStatus(name) != 1)
+      {
+      std::cout << "ERROR: " << "  Cell Array: " << i
+                << " is named " << name
+                << " and should be enabled"
+                << std::endl;
+      return EXIT_FAILURE;
+      }
+    }
+
+  reader->Print(std::cout);
+
+  // Visualize
+  vtkSmartPointer<vtkDataSetMapper> mapper =
+    vtkSmartPointer<vtkDataSetMapper>::New();
+  mapper->SetInputConnection(reader->GetOutputPort());
+  mapper->SetScalarRange(reader->GetOutput()->GetScalarRange());
+  vtkSmartPointer<vtkActor> actor =
+    vtkSmartPointer<vtkActor>::New();
+  actor->SetMapper(mapper);
+
+  vtkSmartPointer<vtkRenderer> renderer =
+    vtkSmartPointer<vtkRenderer>::New();
+  vtkSmartPointer<vtkRenderWindow> renderWindow =
+    vtkSmartPointer<vtkRenderWindow>::New();
+  renderWindow->AddRenderer(renderer);
+  vtkSmartPointer<vtkRenderWindowInteractor> renderWindowInteractor =
+    vtkSmartPointer<vtkRenderWindowInteractor>::New();
+  renderWindowInteractor->SetRenderWindow(renderWindow);
+
+  renderer->AddActor(actor);
+  renderer->SetBackground(.2, .4, .6);
+
+  renderWindow->Render();
+  int retVal = vtkRegressionTestImage( renderWindow );
+
+  if ( retVal == vtkRegressionTester::DO_INTERACTOR)
+    {
+    renderWindowInteractor->Start();
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/IO/Geometry/Testing/Cxx/TestOpenFOAMReader.cxx b/IO/Geometry/Testing/Cxx/TestOpenFOAMReader.cxx
new file mode 100644
index 0000000..56bb68d
--- /dev/null
+++ b/IO/Geometry/Testing/Cxx/TestOpenFOAMReader.cxx
@@ -0,0 +1,138 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestSimplePointsReaderWriter.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkSmartPointer.h>
+#include <vtkProperty.h>
+#include <vtkUnstructuredGrid.h>
+#include <vtkPointData.h>
+#include <vtkCellData.h>
+#include <vtkDataSetMapper.h>
+#include <vtkOpenFOAMReader.h>
+#include <vtkMultiBlockDataSet.h>
+#include <vtkRenderWindow.h>
+#include <vtkRenderWindowInteractor.h>
+#include <vtkRenderer.h>
+
+#include <vtkTestUtilities.h>
+#include <vtkRegressionTestImage.h>
+
+int TestOpenFOAMReader(int argc, char* argv[])
+{
+  // Read file name.
+  char* filename =
+    vtkTestUtilities::ExpandDataFileName(argc, argv,
+                                         "Data/OpenFOAM/cavity/cavity.foam");
+
+  // Read the file
+  vtkSmartPointer<vtkOpenFOAMReader> reader =
+    vtkSmartPointer<vtkOpenFOAMReader>::New();
+  reader->SetFileName(filename);
+  delete [] filename;
+  reader->Update();
+  reader->SetTimeValue(.5);
+//  reader->CreateCellToPointOn();
+  reader->ReadZonesOn();
+  reader->Update();
+  reader->Print(std::cout);
+  reader->GetOutput()->Print(std::cout);
+  reader->GetOutput()->GetBlock(0)->Print(std::cout);
+
+  // 1) Default array settings
+  int numberOfCellArrays = reader->GetNumberOfCellArrays();
+  std::cout << "----- Default array settings" << std::endl;
+  for (int i = 0; i < numberOfCellArrays; ++i)
+    {
+    const char * name = reader->GetCellArrayName(i);
+    std::cout << "  Cell Array: " << i
+              << " is named " << name
+              << " and is "
+              << (reader->GetCellArrayStatus(name) ? "Enabled" : "Disabled")
+              << std::endl;
+    }
+
+  int numberOfPointArrays = reader->GetNumberOfPointArrays();
+  std::cout << "----- Default array settings" << std::endl;
+  for (int i = 0; i < numberOfPointArrays; ++i)
+    {
+    const char * name = reader->GetPointArrayName(i);
+    std::cout << "  Point Array: " << i
+              << " is named " << name
+              << " and is "
+              << (reader->GetPointArrayStatus(name) ? "Enabled" : "Disabled")
+              << std::endl;
+    }
+
+  int numberOfLagrangianArrays = reader->GetNumberOfLagrangianArrays();
+  std::cout << "----- Default array settings" << std::endl;
+  for (int i = 0; i < numberOfLagrangianArrays; ++i)
+    {
+    const char * name = reader->GetLagrangianArrayName(i);
+    std::cout << "  Lagrangian Array: " << i
+              << " is named " << name
+              << " and is "
+              << (reader->GetLagrangianArrayStatus(name) ? "Enabled" : "Disabled")
+              << std::endl;
+    }
+
+  int numberOfPatchArrays = reader->GetNumberOfPatchArrays();
+  std::cout << "----- Default array settings" << std::endl;
+  for (int i = 0; i < numberOfPatchArrays; ++i)
+    {
+    const char * name = reader->GetPatchArrayName(i);
+    std::cout << "  Patch Array: " << i
+              << " is named " << name
+              << " and is "
+              << (reader->GetPatchArrayStatus(name) ? "Enabled" : "Disabled")
+              << std::endl;
+    }
+
+  vtkUnstructuredGrid *block0 = vtkUnstructuredGrid::SafeDownCast(reader->GetOutput()->GetBlock(0));
+  block0->GetCellData()->SetActiveScalars("p");
+  std::cout << "Scalar range: "
+            << block0->GetCellData()->GetScalars()->GetRange()[0] << ", "
+            << block0->GetCellData()->GetScalars()->GetRange()[1] << std::endl;
+
+  // Visualize
+  vtkSmartPointer<vtkDataSetMapper> mapper =
+    vtkSmartPointer<vtkDataSetMapper>::New();
+  mapper->SetInputData(block0);
+  mapper->SetScalarRange(block0->GetScalarRange());
+
+  vtkSmartPointer<vtkActor> actor =
+    vtkSmartPointer<vtkActor>::New();
+  actor->SetMapper(mapper);
+
+  vtkSmartPointer<vtkRenderer> renderer =
+    vtkSmartPointer<vtkRenderer>::New();
+  vtkSmartPointer<vtkRenderWindow> renderWindow =
+    vtkSmartPointer<vtkRenderWindow>::New();
+  renderWindow->AddRenderer(renderer);
+  vtkSmartPointer<vtkRenderWindowInteractor> renderWindowInteractor =
+    vtkSmartPointer<vtkRenderWindowInteractor>::New();
+  renderWindowInteractor->SetRenderWindow(renderWindow);
+
+  renderer->AddActor(actor);
+  renderer->SetBackground(.2, .4, .6);
+
+  renderWindow->Render();
+
+  int retVal = vtkRegressionTestImage( renderWindow );
+  if ( retVal == vtkRegressionTester::DO_INTERACTOR)
+    {
+    renderWindowInteractor->Start();
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/IO/Geometry/Testing/Cxx/TestWindBladeReader.cxx b/IO/Geometry/Testing/Cxx/TestWindBladeReader.cxx
index 095b5b6..2e8f668 100644
--- a/IO/Geometry/Testing/Cxx/TestWindBladeReader.cxx
+++ b/IO/Geometry/Testing/Cxx/TestWindBladeReader.cxx
@@ -14,7 +14,7 @@
 =========================================================================*/
 // .NAME Test of vtkWindBladeReader
 // .SECTION Description
-// Tests the vtkWindBladeReader.  Requires VTKLargeData to be set.
+// Tests the vtkWindBladeReader.
 
 #include "vtkWindBladeReader.h"
 
diff --git a/IO/Geometry/Testing/Data/Baseline/ParticleReader.png.md5 b/IO/Geometry/Testing/Data/Baseline/ParticleReader.png.md5
new file mode 100644
index 0000000..9eafb93
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/ParticleReader.png.md5
@@ -0,0 +1 @@
+9fb4ca2b757c827fd72b65e6cf43bcab
diff --git a/IO/Geometry/Testing/Data/Baseline/Plot3DScalars.png.md5 b/IO/Geometry/Testing/Data/Baseline/Plot3DScalars.png.md5
new file mode 100644
index 0000000..b5af6a7
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/Plot3DScalars.png.md5
@@ -0,0 +1 @@
+e22b59f0707dd6951559230cc1264098
diff --git a/IO/Geometry/Testing/Data/Baseline/Plot3DScalars_1.png.md5 b/IO/Geometry/Testing/Data/Baseline/Plot3DScalars_1.png.md5
new file mode 100644
index 0000000..c92b076
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/Plot3DScalars_1.png.md5
@@ -0,0 +1 @@
+bbbdc06e8a0db49fd208a17412ab9e27
diff --git a/IO/Geometry/Testing/Data/Baseline/Plot3DScalars_2.png.md5 b/IO/Geometry/Testing/Data/Baseline/Plot3DScalars_2.png.md5
new file mode 100644
index 0000000..411f25a
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/Plot3DScalars_2.png.md5
@@ -0,0 +1 @@
+46e071fe5742a297ecca0aa698c3226f
diff --git a/IO/Geometry/Testing/Data/Baseline/Plot3DScalars_3.png.md5 b/IO/Geometry/Testing/Data/Baseline/Plot3DScalars_3.png.md5
new file mode 100644
index 0000000..ef3421d
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/Plot3DScalars_3.png.md5
@@ -0,0 +1 @@
+c9db923fa3c93c9ccf22f9a320c63867
diff --git a/IO/Geometry/Testing/Data/Baseline/Plot3DVectors.png.md5 b/IO/Geometry/Testing/Data/Baseline/Plot3DVectors.png.md5
new file mode 100644
index 0000000..e18c7b0
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/Plot3DVectors.png.md5
@@ -0,0 +1 @@
+9386d3ebbdb405ee2fec6992e9e720a0
diff --git a/IO/Geometry/Testing/Data/Baseline/TestAVSucdReader.png.md5 b/IO/Geometry/Testing/Data/Baseline/TestAVSucdReader.png.md5
new file mode 100644
index 0000000..e3eff6a
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/TestAVSucdReader.png.md5
@@ -0,0 +1 @@
+51773b530045cbb979d218bf1528ed79
diff --git a/IO/Geometry/Testing/Data/Baseline/TestChacoReader.png.md5 b/IO/Geometry/Testing/Data/Baseline/TestChacoReader.png.md5
new file mode 100644
index 0000000..8c8f846
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/TestChacoReader.png.md5
@@ -0,0 +1 @@
+610286ea8dae7dcf956e1e370a541efd
diff --git a/IO/Geometry/Testing/Data/Baseline/TestGAMBITReader.png.md5 b/IO/Geometry/Testing/Data/Baseline/TestGAMBITReader.png.md5
new file mode 100644
index 0000000..515178c
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/TestGAMBITReader.png.md5
@@ -0,0 +1 @@
+fc7624c9cec2158a58f996dd8fdd2edd
diff --git a/IO/Geometry/Testing/Data/Baseline/TestOpenFOAMReader.png.md5 b/IO/Geometry/Testing/Data/Baseline/TestOpenFOAMReader.png.md5
new file mode 100644
index 0000000..565e62c
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/TestOpenFOAMReader.png.md5
@@ -0,0 +1 @@
+420ec407305c636946d9730b1c010a2e
diff --git a/IO/Geometry/Testing/Data/Baseline/TestPlot3DMeta.png.md5 b/IO/Geometry/Testing/Data/Baseline/TestPlot3DMeta.png.md5
new file mode 100644
index 0000000..d3e9474
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/TestPlot3DMeta.png.md5
@@ -0,0 +1 @@
+d55bdad85eb176b194f8a6db8937e00c
diff --git a/IO/Geometry/Testing/Data/Baseline/TestPolygonWriters.png.md5 b/IO/Geometry/Testing/Data/Baseline/TestPolygonWriters.png.md5
new file mode 100644
index 0000000..2c96b70
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/TestPolygonWriters.png.md5
@@ -0,0 +1 @@
+1f8e9d5a0ea22f69ce6c9572622e2e7c
diff --git a/IO/Geometry/Testing/Data/Baseline/TestProStarReader.png.md5 b/IO/Geometry/Testing/Data/Baseline/TestProStarReader.png.md5
new file mode 100644
index 0000000..ae05910
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/TestProStarReader.png.md5
@@ -0,0 +1 @@
+654816e3a1291918b9b647b4e4c5e5e0
diff --git a/IO/Geometry/Testing/Data/Baseline/TestSimplePointsReader.png.md5 b/IO/Geometry/Testing/Data/Baseline/TestSimplePointsReader.png.md5
new file mode 100644
index 0000000..0e4cb65
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/TestSimplePointsReader.png.md5
@@ -0,0 +1 @@
+566fc4862f270d1afc0482301a0b0652
diff --git a/IO/Geometry/Testing/Data/Baseline/TestTecplotReader.png.md5 b/IO/Geometry/Testing/Data/Baseline/TestTecplotReader.png.md5
new file mode 100644
index 0000000..df20cf9
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/TestTecplotReader.png.md5
@@ -0,0 +1 @@
+49c62e684a81be57bdfc128593c35e4e
diff --git a/IO/Geometry/Testing/Data/Baseline/TestTecplotReader_1.png.md5 b/IO/Geometry/Testing/Data/Baseline/TestTecplotReader_1.png.md5
new file mode 100644
index 0000000..32946cd
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/TestTecplotReader_1.png.md5
@@ -0,0 +1 @@
+6b5a6a2a3f5ebf54b20eef9be30c107e
diff --git a/IO/Geometry/Testing/Data/Baseline/TestWindBladeReader.png.md5 b/IO/Geometry/Testing/Data/Baseline/TestWindBladeReader.png.md5
new file mode 100644
index 0000000..74a7843
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/TestWindBladeReader.png.md5
@@ -0,0 +1 @@
+4f8e2ccd32864a9c79261b8ec85f944e
diff --git a/IO/Geometry/Testing/Data/Baseline/TestWindBladeReader_1.png.md5 b/IO/Geometry/Testing/Data/Baseline/TestWindBladeReader_1.png.md5
new file mode 100644
index 0000000..0ad95f5
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/TestWindBladeReader_1.png.md5
@@ -0,0 +1 @@
+346e35b1c0eeee3dde7544d329542236
diff --git a/IO/Geometry/Testing/Data/Baseline/TestXYZMol.png.md5 b/IO/Geometry/Testing/Data/Baseline/TestXYZMol.png.md5
new file mode 100644
index 0000000..0a666a8
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/TestXYZMol.png.md5
@@ -0,0 +1 @@
+0e19df7fbd8949f8dcc74124119173ec
diff --git a/IO/Geometry/Testing/Data/Baseline/caffeine.png.md5 b/IO/Geometry/Testing/Data/Baseline/caffeine.png.md5
new file mode 100644
index 0000000..2a47469
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/caffeine.png.md5
@@ -0,0 +1 @@
+6388fee9e45d1cb7ea2137df8bd0d394
diff --git a/IO/Geometry/Testing/Data/Baseline/motor.png.md5 b/IO/Geometry/Testing/Data/Baseline/motor.png.md5
new file mode 100644
index 0000000..783fec7
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/motor.png.md5
@@ -0,0 +1 @@
+d1f4f614214cd05079c3c99df71dcc6e
diff --git a/IO/Geometry/Testing/Data/Baseline/testHexaPenta.png.md5 b/IO/Geometry/Testing/Data/Baseline/testHexaPenta.png.md5
new file mode 100644
index 0000000..f1fe4d3
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/testHexaPenta.png.md5
@@ -0,0 +1 @@
+fce0f8815865fafac59ce35ff9474d8a
diff --git a/IO/Geometry/Testing/Data/Baseline/testHexaPenta_1.png.md5 b/IO/Geometry/Testing/Data/Baseline/testHexaPenta_1.png.md5
new file mode 100644
index 0000000..89611b2
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/testHexaPenta_1.png.md5
@@ -0,0 +1 @@
+fc6ff5c2e59119d09cdec79714fc055f
diff --git a/IO/Geometry/Testing/Data/Baseline/testHexaPenta_2.png.md5 b/IO/Geometry/Testing/Data/Baseline/testHexaPenta_2.png.md5
new file mode 100644
index 0000000..b416e58
--- /dev/null
+++ b/IO/Geometry/Testing/Data/Baseline/testHexaPenta_2.png.md5
@@ -0,0 +1 @@
+5255bc43fac752608859c93382659026
diff --git a/IO/Geometry/Testing/Python/CMakeLists.txt b/IO/Geometry/Testing/Python/CMakeLists.txt
index 424b3b6..df342de 100644
--- a/IO/Geometry/Testing/Python/CMakeLists.txt
+++ b/IO/Geometry/Testing/Python/CMakeLists.txt
@@ -1,16 +1,14 @@
-if (VTK_DATA_ROOT)
-  add_test_python(ParticleReader.py IO)
-  add_test_python(Plot3D IO)
-  add_test_python(Plot3DScalars.py IO)
-  add_test_python(Plot3DVectors.py IO)
-  add_test_python(TestAVSucdReader.py IO)
-  add_test_python(TestGAMBITReader.py IO)
-  # add_test_python(TestPolygonWriters.py IO) # needs vtkCGMWriter (not built)
-  add_test_python(TestXYZMol.py IO)
-  add_test_python(caffeine.py IO)
-  add_test_python(motor.py Graphics)
-  add_test_python(testHexaPenta.py Graphics)
-  add_test_python(TestChacoReader.py IO)
-  add_test_python(TestSimplePointsReader.py IO)
-  add_test_python(TestPlot3DMeta.py IO)
-endif ()
+vtk_add_test_python(ParticleReader.py)
+vtk_add_test_python(Plot3D NO_VALID)
+vtk_add_test_python(Plot3DScalars.py)
+vtk_add_test_python(Plot3DVectors.py)
+vtk_add_test_python(TestAVSucdReader.py)
+vtk_add_test_python(TestGAMBITReader.py)
+# vtk_add_test_python(TestPolygonWriters.py) # needs vtkCGMWriter (not built)
+vtk_add_test_python(TestXYZMol.py)
+vtk_add_test_python(caffeine.py)
+vtk_add_test_python(motor.py)
+vtk_add_test_python(testHexaPenta.py)
+vtk_add_test_python(TestChacoReader.py)
+vtk_add_test_python(TestSimplePointsReader.py)
+vtk_add_test_python(TestPlot3DMeta.py)
diff --git a/IO/Geometry/Testing/Tcl/CMakeLists.txt b/IO/Geometry/Testing/Tcl/CMakeLists.txt
index 26e45c6..9cd82bc 100644
--- a/IO/Geometry/Testing/Tcl/CMakeLists.txt
+++ b/IO/Geometry/Testing/Tcl/CMakeLists.txt
@@ -1,14 +1,12 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(ParticleReader IO)
-  add_test_tcl(Plot3DScalars IO)
-  add_test_tcl(Plot3DVectors IO)
-  add_test_tcl(TestAVSucdReader IO)
-  add_test_tcl(TestChacoReader IO)
-  add_test_tcl(TestGAMBITReader IO)
-  add_test_tcl(TestPolygonWriters IO)
-  add_test_tcl(TestSimplePointsReader IO)
-  add_test_tcl(TestXYZMol IO)
-  add_test_tcl(caffeine IO)
-  add_test_tcl(motor Graphics)
-  add_test_tcl(testHexaPenta Graphics)
-endif()
+vtk_add_test_tcl(ParticleReader)
+vtk_add_test_tcl(Plot3DScalars)
+vtk_add_test_tcl(Plot3DVectors)
+vtk_add_test_tcl(TestAVSucdReader)
+vtk_add_test_tcl(TestChacoReader)
+vtk_add_test_tcl(TestGAMBITReader)
+vtk_add_test_tcl(TestPolygonWriters)
+vtk_add_test_tcl(TestSimplePointsReader)
+vtk_add_test_tcl(TestXYZMol)
+vtk_add_test_tcl(caffeine)
+vtk_add_test_tcl(motor)
+vtk_add_test_tcl(testHexaPenta)
diff --git a/IO/Geometry/module.cmake b/IO/Geometry/module.cmake
index 4d3c5c8..bbcb615 100644
--- a/IO/Geometry/module.cmake
+++ b/IO/Geometry/module.cmake
@@ -6,8 +6,10 @@ vtk_module(vtkIOGeometry
     vtkCommonSystem
     vtkCommonMisc
     vtkIOCore
+  PRIVATE_DEPENDS
     vtkzlib
     vtkjsoncpp
+    vtksys
   TEST_DEPENDS
     vtkIOAMR
     vtkIOLegacy
diff --git a/IO/Geometry/vtkChacoReader.cxx b/IO/Geometry/vtkChacoReader.cxx
index 278059a..e49023a 100644
--- a/IO/Geometry/vtkChacoReader.cxx
+++ b/IO/Geometry/vtkChacoReader.cxx
@@ -424,8 +424,8 @@ int vtkChacoReader::ReadFile(vtkUnstructuredGrid* output)
   if (!memoryOK)
     {
     vtkErrorMacro(<< "ReadFile memory allocation failure");
-    if (x) delete [] x;
-    if (y) delete [] y;
+    delete [] x;
+    delete [] y;
     return 0;
     }
 
@@ -435,9 +435,9 @@ int vtkChacoReader::ReadFile(vtkUnstructuredGrid* output)
 
   if (rc == 0)
     {
-    if (x) delete [] x;
-    if (y) delete [] y;
-    if (z) delete [] z;
+    delete [] x;
+    delete [] y;
+    delete [] z;
     return 0;
     }
 
@@ -465,9 +465,9 @@ int vtkChacoReader::ReadFile(vtkUnstructuredGrid* output)
 
   output->SetPoints(ptarray);
 
-  if (x) delete [] x;
-  if (y) delete [] y;
-  if (z) delete [] z;
+  delete [] x;
+  delete [] y;
+  delete [] z;
   ptarray->Delete();
 
   // Read in cell topology and possibly cell and point weights.
diff --git a/IO/Geometry/vtkFLUENTReader.cxx b/IO/Geometry/vtkFLUENTReader.cxx
index f2a326f..db24fec 100644
--- a/IO/Geometry/vtkFLUENTReader.cxx
+++ b/IO/Geometry/vtkFLUENTReader.cxx
@@ -2666,7 +2666,7 @@ void vtkFLUENTReader::GetCellsAscii()
       vtksys_ios::stringstream pdatastream(pdata);
       for (int i = firstIndex; i <=lastIndex; i++)
         {
-        pdatastream >> this->Cells->value[i].type;
+        pdatastream >> this->Cells->value[i-1].type;
         this->Cells->value[i-1].zone = zoneId;
         this->Cells->value[i-1].parent = 0;
         this->Cells->value[i-1].child  = 0;
diff --git a/IO/Geometry/vtkFLUENTReader.h b/IO/Geometry/vtkFLUENTReader.h
index 2ac5d49..bce81bd 100644
--- a/IO/Geometry/vtkFLUENTReader.h
+++ b/IO/Geometry/vtkFLUENTReader.h
@@ -167,7 +167,7 @@ protected:
   virtual void                   GetInterfaceFaceParentsBinary();
   virtual void                   GetNonconformalGridInterfaceFaceInformationAscii();
   virtual void                   GetNonconformalGridInterfaceFaceInformationBinary();
-  virtual void                   GetPartitionInfo() {};
+  virtual void                   GetPartitionInfo() {}
   virtual void                   CleanCells();
   virtual void                   PopulateCellNodes();
   virtual int                    GetCaseBufferInt(int ptr);
diff --git a/IO/Geometry/vtkMFIXReader.cxx b/IO/Geometry/vtkMFIXReader.cxx
index 13b697a..9c53384 100644
--- a/IO/Geometry/vtkMFIXReader.cxx
+++ b/IO/Geometry/vtkMFIXReader.cxx
@@ -12,9 +12,9 @@
      PURPOSE.  See the above copyright notice for more information.
 
 =========================================================================*/
-// Thanks to Phil Nicoletti and Brian Dotson at the National Energy
-// Technology Laboratory who developed this class.
-// Please address all comments to Brian Dotson (brian.dotson at netl.doe.gov)
+// Thanks to Phil Nicoletti, Terry Jordan and Brian Dotson at the
+// National Energy Technology Laboratory who developed this class.
+// Please address all comments to Terry Jordan (terry.jordan at netl.doe.gov)
 //
 
 #include "vtkMFIXReader.h"
@@ -74,7 +74,6 @@ vtkMFIXReader::vtkMFIXReader()
   this->FileExtension[10] = 'B';
   this->VersionNumber = 0;
 
-  this->CellDataArray = NULL;
   this->CellDataArraySelection = vtkDataArraySelection::New();
   this->Points = vtkPoints::New();
   this->Mesh = vtkUnstructuredGrid::New();
@@ -118,10 +117,14 @@ vtkMFIXReader::~vtkMFIXReader()
     delete [] this->FileName;
     }
 
+  if( this->CellDataArray )
+  {
   for (int j = 0; j <= this->VariableNames->GetMaxId(); j++)
     {
     this->CellDataArray[j]->Delete();
     }
+    delete [] this->CellDataArray;
+  }
 
   this->CellDataArraySelection->Delete();
   this->Points->Delete();
@@ -149,12 +152,6 @@ vtkMFIXReader::~vtkMFIXReader()
   this->Maximum->Delete();
   this->VectorLength->Delete();
   this->SPXTimestepIndexTable->Delete();
-
-  if (this->CellDataArray)
-    {
-    delete [] this->CellDataArray;
-    }
-
 }
 
 //----------------------------------------------------------------------------
@@ -935,6 +932,7 @@ void vtkMFIXReader::GetBlockOfFloats(istream& in, vtkFloatArray *v, int n)
     numberOfRecords = 1 + n/numberOfFloatsInBlock;
     }
 
+  bool modified = false;
   int c = 0;
   int cnt = 0;
   for (int i=0; i<numberOfRecords; ++i)
@@ -950,11 +948,17 @@ void vtkMFIXReader::GetBlockOfFloats(istream& in, vtkFloatArray *v, int n)
           {
           v->InsertValue(cnt, temp);
           cnt++;
+          modified = true;
           }
         ++c;
         }
       }
     }
+
+  if (modified)
+    {
+    v->Modified();
+    }
 }
 
 //----------------------------------------------------------------------------
@@ -1595,6 +1599,14 @@ void vtkMFIXReader::ReadRestartFile()
       this->BkEpsilon = true;
       }
     }
+  if (this->VersionNumber >= 1.7999)
+    {
+    for( int i = 0; i < this->MMAX; ++i)
+      {
+      this->SkipBytes(in,512);
+      }
+    }
+  in.close();
 }
 
 //----------------------------------------------------------------------------
@@ -1916,6 +1928,7 @@ void vtkMFIXReader::CreateVariableNames()
       {
       this->SpxFileExists->InsertValue(i, 0);
       }
+    in.close();
     }
 }
 
@@ -2035,7 +2048,7 @@ void vtkMFIXReader::GetTimeSteps()
         case 7:
           {
           numberOfVariables = this->NMax->GetValue(0);
-          for (int m=0; m<this->MMAX; ++m)
+          for (int m=1; m<=this->MMAX; ++m)
             {
             numberOfVariables += this->NMax->GetValue(m);
             }
@@ -2073,6 +2086,7 @@ void vtkMFIXReader::GetTimeSteps()
         cnt++;
         }
       }
+    in.close();
     }
 }
 
@@ -2181,6 +2195,7 @@ void vtkMFIXReader::GetVariableAtTimestep(int vari , int tstep,
 #endif
   in.seekg(nBytesSkip,ios::beg);
   this->GetBlockOfFloats (in, v, this->IJKMaximum2);
+  in.close();
 }
 
 //----------------------------------------------------------------------------
@@ -2228,7 +2243,7 @@ void vtkMFIXReader::GetNumberOfVariablesInSPXFiles()
   int skip = 0;
   for (int j=1; j<this->NumberOfSPXFilesUsed; j++)
     {
-    for(int i=0;i<this->VariableNames->GetMaxId()+1;i++)
+    for(int i=0;i<=this->VariableNames->GetMaxId();i++)
       {
       if ((this->VariableIndexToSPX->GetValue(i) == j)
         && (this->VariableComponents->GetValue(i) == 1))
@@ -2254,6 +2269,7 @@ void vtkMFIXReader::FillVectorVariable( int xindex, int yindex,
     v->InsertComponent(i, 1, this->CellDataArray[yindex]->GetValue(i));
     v->InsertComponent(i, 2, this->CellDataArray[zindex]->GetValue(i));
     }
+  v->Modified();
 }
 
 //----------------------------------------------------------------------------
@@ -2395,5 +2411,6 @@ void vtkMFIXReader::GetAllTimes(vtkInformationVector *outputVector)
   timeRange[1] = steps[this->NumberOfTimeSteps - 1];
   outInfo->Set(vtkStreamingDemandDrivenPipeline::TIME_RANGE(), timeRange, 2);
 
+  tfile.close();
   delete [] steps;
 }
diff --git a/IO/Geometry/vtkMoleculeReaderBase.cxx b/IO/Geometry/vtkMoleculeReaderBase.cxx
index a012d48..b980d05 100644
--- a/IO/Geometry/vtkMoleculeReaderBase.cxx
+++ b/IO/Geometry/vtkMoleculeReaderBase.cxx
@@ -29,6 +29,7 @@
 #include "vtkPointLocator.h"
 #include "vtkNew.h"
 
+#include <algorithm>
 #include <ctype.h>
 
 
@@ -420,9 +421,9 @@ int vtkMoleculeReaderBase::MakeBonds(vtkPoints *newPts,
                                      vtkIdTypeArray *atype,
                                      vtkCellArray *newBonds)
 {
-  register int i, j, k;
-  register int nbonds;
-  register double dx, dy, dz;
+  int i, j, k;
+  int nbonds;
+  double dx, dy, dz;
   double max, dist, radius;
   double X[3], Y[3];
   vtkIdType bond[2];
diff --git a/IO/Geometry/vtkMultiBlockPLOT3DReader.cxx b/IO/Geometry/vtkMultiBlockPLOT3DReader.cxx
index 8e41e00..efdc780 100644
--- a/IO/Geometry/vtkMultiBlockPLOT3DReader.cxx
+++ b/IO/Geometry/vtkMultiBlockPLOT3DReader.cxx
@@ -106,6 +106,8 @@ public:
 
 vtkMultiBlockPLOT3DReader::vtkMultiBlockPLOT3DReader()
 {
+  this->Internal = new vtkMultiBlockPLOT3DReaderInternals;
+
   this->XYZFileName = NULL;
   this->QFileName = NULL;
   this->FunctionFileName = NULL;
@@ -131,8 +133,6 @@ vtkMultiBlockPLOT3DReader::vtkMultiBlockPLOT3DReader()
   this->SetVectorFunctionNumber(202);
 
   this->SetNumberOfInputPorts(0);
-
-  this->Internal = new vtkMultiBlockPLOT3DReaderInternals;
 }
 
 vtkMultiBlockPLOT3DReader::~vtkMultiBlockPLOT3DReader()
@@ -2814,3 +2814,11 @@ void vtkMultiBlockPLOT3DReader::PrintSelf(ostream& os, vtkIndent indent)
   os << indent << "Auto Detect Format: " << this->AutoDetectFormat << endl;
 }
 
+void vtkMultiBlockPLOT3DReader::Modified()
+{
+  this->Superclass::Modified();
+  if (this->Internal)
+    {
+    this->Internal->NeedToCheckXYZFile = true;
+    }
+}
diff --git a/IO/Geometry/vtkMultiBlockPLOT3DReader.h b/IO/Geometry/vtkMultiBlockPLOT3DReader.h
index bda7426..8b1a4a1 100644
--- a/IO/Geometry/vtkMultiBlockPLOT3DReader.h
+++ b/IO/Geometry/vtkMultiBlockPLOT3DReader.h
@@ -232,6 +232,11 @@ public:
   // for binary files.
   virtual int CanReadBinaryFile(const char* fname);
 
+  // Description:
+  // Overwritten to make sure that RequestInformation reads the meta-data
+  // again after the reader parameters were changed.
+  virtual void Modified();
+
 //BTX
   enum
   {
diff --git a/IO/Geometry/vtkOpenFOAMReader.cxx b/IO/Geometry/vtkOpenFOAMReader.cxx
index 3e0767d..e601fd3 100644
--- a/IO/Geometry/vtkOpenFOAMReader.cxx
+++ b/IO/Geometry/vtkOpenFOAMReader.cxx
@@ -426,17 +426,6 @@ private:
   typedef vtkStdString Superclass;
 
 public:
-  vtkFoamError() :
-    vtkStdString()
-  {
-  }
-  vtkFoamError(const vtkFoamError& e) :
-    vtkStdString(e)
-  {
-  }
-  ~vtkFoamError()
-  {
-  }
   // a super-easy way to make use of operator<<()'s defined in
   // vtksys_ios::ostringstream class
   template <class T> vtkFoamError& operator<<(const T& t)
diff --git a/IO/Geometry/vtkPlot3DMetaReader.cxx b/IO/Geometry/vtkPlot3DMetaReader.cxx
index 2312118..baff887 100644
--- a/IO/Geometry/vtkPlot3DMetaReader.cxx
+++ b/IO/Geometry/vtkPlot3DMetaReader.cxx
@@ -28,11 +28,7 @@
 #include <vector>
 #include <string>
 
-// Needed for Windows declspec import logic
-#if defined(WIN32) && defined(VTK_BUILD_SHARED_LIBS)
-# define JSON_DLL
-#endif
-#include <json/json.h>
+#include "vtk_jsoncpp.h"
 
 #define CALL_MEMBER_FN(object,ptrToMember)  ((object).*(ptrToMember))
 
@@ -363,7 +359,7 @@ int vtkPlot3DMetaReader::RequestInformation(
     {
     // report to the user the failure and their locations in the document.
     vtkErrorMacro("Failed to parse configuration\n"
-                  << reader.getFormattedErrorMessages().c_str());
+                  << reader.getFormatedErrorMessages().c_str());
     return 0;
     }
 
diff --git a/IO/Geometry/vtkSESAMEReader.cxx b/IO/Geometry/vtkSESAMEReader.cxx
deleted file mode 100644
index 9bbf032..0000000
--- a/IO/Geometry/vtkSESAMEReader.cxx
+++ /dev/null
@@ -1,656 +0,0 @@
-/*
- * Copyright 2004 Sandia Corporation.
- * Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
- * license for use of this work by or on behalf of the
- * U.S. Government. Redistribution and use in source and binary forms, with
- * or without modification, are permitted provided that this Notice and any
- * statement of authorship are reproduced on all copies.
- */
-#include "vtkSESAMEReader.h"
-
-#include "vtkInformation.h"
-#include "vtkInformationVector.h"
-#include "vtkStreamingDemandDrivenPipeline.h"
-#include <vtkFloatArray.h>
-#include <vtkIntArray.h>
-#include <vtkObjectFactory.h>
-#include <vtkPointData.h>
-#include <vtkRectilinearGrid.h>
-#include <vector>
-#include <string>
-
-vtkStandardNewMacro(vtkSESAMEReader);
-
-static const int SESAME_NUM_CHARS = 512;
-static const char* TableLineFormat = "%2i%6i%6i";
-
-class vtkSESAMEReader::MyInternal
-{
-public:
-  std::string FileName;
-  FILE* File;
-  std::vector<int> TableIds;
-  std::vector<long> TableLocations;
-  vtkIdType TableId;
-  std::vector<std::string> TableArrays;
-  std::vector<int> TableArrayStatus;
-  vtkIntArray* TableIdsArray;
-
-  void ClearTables()
-    {
-    this->TableIds.clear();
-    this->TableId = -1;
-    this->TableIdsArray->Initialize();
-    this->ClearArrays();
-    }
-  void ClearArrays()
-    {
-    this->TableArrays.clear();
-    this->TableArrayStatus.clear();
-    }
-
-  MyInternal()
-    {
-    this->File = NULL;
-    this->TableId = -1;
-    this->TableIdsArray = vtkIntArray::New();
-    }
-  ~MyInternal()
-    {
-    this->TableIdsArray->Delete();
-    }
-};
-
-// structures to hold information about SESAME files
-static const int MaxTableArrays = 10;
-struct vtkSESAMETableDef
-{
-  int TableId;
-  const char* Arrays[MaxTableArrays];
-};
-
-static const vtkSESAMETableDef TableDefs[] =
-{
-    {301,
-      {"301: Total EOS (Pressure)",
-       "301: Total EOS (Energy)",
-       "301: Total EOS (Free Energy)",
-      0}  // keep 0 last
-    },
-
-    {303,
-      {"303: Total EOS (Pressure)",
-       "303: Total EOS (Energy)",
-       "303: Total EOS (Free Energy)",
-      0}  // keep 0 last
-    },
-
-    {304,
-      {"304: Electron EOS (Pressure)",
-       "304: Electron EOS (Energy)",
-       "304: Electron EOS (Free Energy)",
-       0}  // keep 0 last
-    },
-
-    {305,
-      {"305: Total EOS (Pressure)",
-       "305: Total EOS (Energy)",
-       "305: Total EOS (Free Energy)",
-      0}  // keep 0 last
-    },
-
-    {306,
-      {"306: Total EOS (Pressure)",
-       "306: Total EOS (Energy)",
-       "306: Total EOS (Free Energy)",
-      0}  // keep 0 last
-    },
-
-    {502,
-      {"502: Rosseland Mean Opacity",
-       0}  // keep 0 last
-    },
-
-    {503,
-      {"503: Electron Conductive Opacity1",
-       0}  // keep 0 last
-    },
-
-    {504,
-      {"504: Mean Ion Charge1",
-       0}  // keep 0 last
-    },
-
-    {505,
-      {"505: Planck Mean Opacity",
-       0}  // keep 0 last
-    },
-
-    {601,
-      {"601: Mean Ion Charge2",
-       0}  // keep 0 last
-    },
-
-    {602,
-      {"602: Electrical Conductivity",
-       0}  // keep 0 last
-    },
-
-    {603,
-      {"603: Thermal Conductivity",
-       0}  // keep 0 last
-    },
-
-    {604,
-      {"604: Thermoelectric Coefficient",
-       0}  // keep 0 last
-    },
-
-    {605,
-    {"605: Electron Conductive Opacity2",
-    0}  // keep 0 last
-    }
-
-};
-
-static int TableIndex(int tableId)
-{
-  // check that we got a valid table id
-  for(unsigned int i=0; i<sizeof(TableDefs)/sizeof(vtkSESAMETableDef); i++)
-    {
-    if(tableId == TableDefs[i].TableId)
-      {
-      return i;
-      }
-    }
-  return -1;
-}
-
-
-vtkSESAMEReader::vtkSESAMEReader()
-{
-  this->Internal = new MyInternal();
-  this->SetNumberOfInputPorts(0);
-}
-
-vtkSESAMEReader::~vtkSESAMEReader()
-{
-  this->CloseFile();
-  delete this->Internal;
-}
-
-int vtkSESAMEReader::IsValidFile()
-{
-  if(this->Internal->FileName.empty())
-    {
-    return 0;
-    }
-
-  // open the file
-  FILE* f = fopen(this->GetFileName(), "rb");
-  if(!f)
-    {
-    return 0;
-    }
-
-  // check that it is valid
-  int a,b,c;
-  int ret = fscanf(f, TableLineFormat, &a,&b,&c);
-  fclose(f);
-  if(ret != 3)
-    {
-    return 0;
-    }
-  return 1;
-}
-
-void vtkSESAMEReader::SetFileName(const char* file)
-{
-  if(this->Internal->FileName == file)
-    {
-    return;
-    }
-
-  this->Internal->FileName = file;
-
-  // clean out possible data from last file
-  this->Internal->ClearTables();
-  this->CloseFile();
-  this->Modified();
-}
-
-const char* vtkSESAMEReader::GetFileName()
-{
-  return this->Internal->FileName.c_str();
-}
-
-int vtkSESAMEReader::OpenFile()
-{
-  //already open
-  if(this->Internal->File)
-    {
-    return 1;
-    }
-
-  if(this->Internal->FileName.empty())
-    {
-    return 0;
-    }
-
-  // open the file
-  this->Internal->File = fopen(this->GetFileName(), "rb");
-  if(!this->Internal->File)
-    {
-    vtkErrorMacro(<<"Unable to open file " << this->GetFileName());
-    return 0;
-    }
-
-  // check that it is valid
-  int a,b,c;
-  int ret = fscanf(this->Internal->File, TableLineFormat, &a,&b,&c);
-  rewind(this->Internal->File);
-  if(ret != 3)
-    {
-    vtkErrorMacro(<<this->GetFileName() << " is not a valid SESAME file");
-    fclose(this->Internal->File);
-    this->Internal->File = NULL;
-    return 0;
-    }
-  return 1;
-}
-
-void vtkSESAMEReader::CloseFile()
-{
-  if(this->Internal->File)
-    {
-    fclose(this->Internal->File);
-    this->Internal->File = NULL;
-    }
-}
-
-int vtkSESAMEReader::GetNumberOfTableIds()
-{
-  return static_cast<int>(this->Internal->TableIds.size());
-}
-
-int* vtkSESAMEReader::GetTableIds()
-{
-  return &this->Internal->TableIds[0];
-}
-
-vtkIntArray* vtkSESAMEReader::GetTableIdsAsArray()
-{
-  this->Internal->TableIdsArray->Initialize();
-  this->Internal->TableIdsArray->SetNumberOfComponents(1);
-  int numTableIds = static_cast<int>(this->Internal->TableIds.size());
-  for (int i=0; i < numTableIds; ++i)
-    {
-    this->Internal->TableIdsArray->InsertNextValue(
-      this->Internal->TableIds[i]);
-    }
-  return this->Internal->TableIdsArray;
-}
-
-void vtkSESAMEReader::SetTable(int tableId)
-{
-  if(this->Internal->TableId != tableId)
-    {
-    if(TableIndex(tableId) != -1)
-      {
-      this->Internal->TableId = tableId;
-
-      // clean out info about the previous table
-      this->Internal->ClearArrays();
-      this->Modified();
-      }
-    }
-}
-
-int vtkSESAMEReader::GetTable()
-{
-  return this->Internal->TableId;
-}
-
-int vtkSESAMEReader::GetNumberOfTableArrayNames()
-{
-  return static_cast<int>(this->Internal->TableArrays.size());
-}
-
-const char* vtkSESAMEReader::GetTableArrayName(int index)
-{
-  int s = static_cast<int>(this->Internal->TableArrays.size());
-  if(s > index)
-    {
-    return this->Internal->TableArrays[index].c_str();
-    }
-  return NULL;
-}
-
-void vtkSESAMEReader::SetTableArrayStatus(const char* name, int flag)
-{
-  int i, numArrays;
-  numArrays = static_cast<int>(this->Internal->TableArrays.size());
-  for(i=0; i<numArrays; i++)
-    {
-    if(this->Internal->TableArrays[i] == name)
-      {
-      this->Internal->TableArrayStatus[i] = flag;
-      this->Modified();
-      }
-    }
-}
-
-int vtkSESAMEReader::GetTableArrayStatus(const char* name)
-{
-  int i, numArrays;
-  numArrays = static_cast<int>(this->Internal->TableArrays.size());
-  for(i=0; i<numArrays; i++)
-    {
-    if(this->Internal->TableArrays[i] == name)
-      {
-      return this->Internal->TableArrayStatus[i];
-      }
-    }
-  return 0;
-}
-
-
-int vtkSESAMEReader::RequestInformation(vtkInformation *,
-                                        vtkInformationVector **,
-                                        vtkInformationVector *outputVector)
-{
-  // open the file
-  if(!this->OpenFile())
-    {
-    return 1;
-    }
-
-  if(this->Internal->TableIds.empty())
-    {
-    this->Internal->TableLocations.clear();
-
-    // get the table ids
-
-    char buffer[SESAME_NUM_CHARS];
-    int dummy;
-    int internalId;
-    int tableId;
-
-    // read lines from the file the whole file
-    while( fgets(buffer, SESAME_NUM_CHARS, this->Internal->File) != NULL )
-      {
-      // see if the line matches the  " 0 9999 602" format
-      if(sscanf(buffer, TableLineFormat, &dummy, &internalId, &tableId) == 3)
-        {
-        if(TableIndex(tableId) != -1)
-          {
-          this->Internal->TableIds.push_back(tableId);
-          long loc = ftell(this->Internal->File);
-          this->Internal->TableLocations.push_back(loc);
-          }
-        }
-      }
-    }
-
-  //if(this->Internal->TableId == -1 &&
-  //   !this->Internal->TableIds.empty())
-  //  {
-  //  this->Internal->TableId = this->Internal->TableIds[0];
-  //  }
-
-  if(this->Internal->TableId != -1)
-    {
-    JumpToTable(this->Internal->TableId);
-    float v[5] = { 0.0, 0.0, 0.0, 0.0, 0.0 };
-    if ( ReadTableValueLine( &(v[0]), &(v[1]),
-                             &(v[2]), &(v[3]), &(v[4]) ) != 0)
-      {
-      // first two values are dimensions of
-      // grid
-      outputVector->GetInformationObject(0)->Set(
-        vtkStreamingDemandDrivenPipeline::WHOLE_EXTENT(),
-        0, (int)(v[0]) - 1,
-        0, (int)(v[1]) - 1,
-        0, 0 );
-      }
-    }
-
-  if(this->Internal->TableId != -1 &&
-     this->Internal->TableArrays.empty())
-    {
-    // get the names of the arrays in the table
-    int tableIndex = TableIndex(this->Internal->TableId);
-    for(int j=0; TableDefs[tableIndex].Arrays[j] != 0; j++)
-      {
-      this->Internal->TableArrays.push_back(
-                TableDefs[tableIndex].Arrays[j]);
-      this->Internal->TableArrayStatus.push_back(1);  // all arrays are on
-                                                      // by default
-      }
-    }
-  return 0;
-}
-
-int vtkSESAMEReader::JumpToTable( int toTable )
-{
-  int numIds = static_cast<int>(this->Internal->TableIds.size());
-  for(int i=0; i<numIds; i++)
-    {
-    if(this->Internal->TableIds[i] == toTable)
-      {
-      fseek(this->Internal->File, this->Internal->TableLocations[i], SEEK_SET);
-      return 1;
-      }
-    }
-
-  return 0;
-}
-
-int vtkSESAMEReader::RequestData(vtkInformation *,
-                                 vtkInformationVector **,
-                                 vtkInformationVector *outputVector)
-{
-  vtkRectilinearGrid* output = vtkRectilinearGrid::GetData(outputVector);
-  if (!this->Internal->File)
-      {
-      return 0;
-      }
-
-  // read the file
-  JumpToTable(this->Internal->TableId);
-  this->ReadTable(output);
-  return 0;
-}
-
-void vtkSESAMEReader::ReadTable(vtkRectilinearGrid* output)
-{
-  vtkFloatArray *xCoords = vtkFloatArray::New();
-  vtkFloatArray *yCoords = vtkFloatArray::New();
-  vtkFloatArray *zCoords = vtkFloatArray::New();
-
-  float v[5] = { 0.0, 0.0, 0.0, 0.0, 0.0 };
-  int datadims[2] = { 0, 0 };
-  int numRead = 0;
-  int result=0;
-
-  result=ReadTableValueLine( &(v[0]), &(v[1]), &(v[2]), &(v[3]), &(v[4]) );
-  // get the table header
-  if (result!= 0)
-    {
-    // dimensions of grid
-    datadims[0] = (int)(v[0]);
-    datadims[1] = (int)(v[1]);
-    output->SetDimensions( datadims[0], datadims[1], 1 );
-
-    // allocate space
-    xCoords->Allocate( datadims[0] );
-    yCoords->Allocate( datadims[1] );
-    zCoords->Allocate( 1 );
-    zCoords->InsertNextTuple1( 0.0 );
-
-    // the first three values are x samples Update: this only works if X has at least 3 values.
-    //xCoords->InsertNextTuple1( v[2] );
-    //xCoords->InsertNextTuple1( v[3] );
-    //xCoords->InsertNextTuple1( v[4] );
-    //numRead = 3;
-    }
-
-  unsigned int i;
-  std::vector<vtkFloatArray*> scalars;
-  for(i=0; i<this->Internal->TableArrayStatus.size(); i++)
-    {
-    vtkFloatArray* newArray = this->Internal->TableArrayStatus[i] ?
-                      vtkFloatArray::New() : NULL;
-    scalars.push_back(newArray);
-    if(newArray)
-      {
-      newArray->Allocate(datadims[0] * datadims[1]);
-      newArray->SetName(this->Internal->TableArrays[i].c_str());
-      }
-    }
-
-  unsigned int scalarIndex = 0;
-  int scalarCount = 0;
-  int readFromTable = 0;
-
-  if (result!= 0)
-  {
-    for (int k=2;k<5;k++)
-    {
-      if ( numRead < datadims[0] )
-      {
-        xCoords->InsertNextTuple1(  v[k] );
-      }
-      else if ( numRead < (datadims[0] + datadims[1]) )
-      {
-        yCoords->InsertNextTuple1(  v[k] );
-      }
-      else
-      {
-        scalarCount++;
-        if(scalarCount > datadims[0] * datadims[1])
-        {
-          scalarCount = 1;
-          scalarIndex++;
-        }
-        if(this->Internal->TableArrayStatus.size() > scalarIndex &&
-          this->Internal->TableArrayStatus[scalarIndex])
-        {
-          scalars[scalarIndex]->InsertNextTuple1(v[k]);
-        }
-      }
-      numRead++;
-    }
-  }
-
-
-  while ( (readFromTable = ReadTableValueLine( &(v[0]), &(v[1]), &(v[2]), &(v[3]),
-      &(v[4])  )) != 0)
-    {
-    for (int k=0;k<readFromTable;k++)
-      {
-      if ( numRead < datadims[0] )
-        {
-        xCoords->InsertNextTuple1(  v[k] );
-        }
-      else if ( numRead < (datadims[0] + datadims[1]) )
-        {
-        yCoords->InsertNextTuple1(  v[k] );
-        }
-      else
-        {
-        scalarCount++;
-        if(scalarCount > datadims[0] * datadims[1])
-          {
-          scalarCount = 1;
-          scalarIndex++;
-          }
-        if(this->Internal->TableArrayStatus.size() > scalarIndex &&
-           this->Internal->TableArrayStatus[scalarIndex])
-          {
-          scalars[scalarIndex]->InsertNextTuple1(v[k]);
-          }
-        }
-      numRead++;
-      }
-    }
-
-  for(i=scalarIndex+1;
-      i<this->Internal->TableArrayStatus.size();
-      i++)
-    {
-    // fill in the empty scalars with zeros
-    int max = datadims[0] * datadims[1];
-    for(int j=0; j<max; j++)
-      {
-      scalars[i]->InsertNextTuple1(0.0);
-      }
-    }
-
-  output->SetXCoordinates( xCoords );
-  output->SetYCoordinates( yCoords );
-  output->SetZCoordinates( zCoords );
-
-  output->GetPointData()->Reset();
-
-  for(i=0; i<scalars.size(); i++)
-    {
-    if(scalars[i])
-      {
-      if(scalars[i]->GetNumberOfTuples())
-        {
-        output->GetPointData()->AddArray(scalars[i]);
-        }
-      scalars[i]->Delete();
-      }
-    }
-
-  xCoords->Delete();
-  yCoords->Delete();
-  zCoords->Delete();
-
-  output->Squeeze();
-}
-
-int vtkSESAMEReader::ReadTableValueLine ( float *v1, float *v2,
-  float *v3, float *v4, float *v5)
-{
-  // by definition, a line of this file is 80 characters long
-  // when we start reading the data values, the end of the line is a tag
-  // (see note below), which we have to ignore in order to read the data
-  // properly.
-  //
-  char buffer[SESAME_NUM_CHARS + 1];
-  buffer[SESAME_NUM_CHARS] = '\0';
-  int numRead = 0;
-  if ( fgets(buffer, SESAME_NUM_CHARS, this->Internal->File) != NULL )
-    {
-    int dummy;
-    int internalId;
-    int tableId;
-
-    // see if the line matches the  " 0 9999 602" format
-    if(sscanf(buffer, TableLineFormat, &dummy, &internalId, &tableId) == 3)
-      {
-      // this is the start of a new table
-      numRead = 0;
-      }
-    else
-      {
-      // ignore the last 5 characters of the line (see notes above)
-      buffer[75] = '\0';
-      numRead = sscanf( buffer, "%e%e%e%e%e", v1, v2, v3, v4, v5);
-      }
-    }
-
-  return numRead;
-}
-
-void vtkSESAMEReader::PrintSelf(ostream& os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os,indent);
-
-  os << indent << "FileName: " << this->GetFileName() << "\n";
-  os << indent << "Table: " << this->GetTable() << "\n";
-}
-
diff --git a/IO/Geometry/vtkSESAMEReader.h b/IO/Geometry/vtkSESAMEReader.h
deleted file mode 100644
index fc70061..0000000
--- a/IO/Geometry/vtkSESAMEReader.h
+++ /dev/null
@@ -1,120 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkSESAMEReader.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-// .NAME vtkSESAMEReader - read SESAME files
-// .SECTION Description
-// vtkSESAMEReader is a source object that reads SESAME files.
-// Currently supported tables include 301, 304, 502, 503, 504, 505, 602
-//
-// SESAMEReader creates rectilinear grid datasets. The dimension of the
-// dataset depends upon the number of densities and temperatures in the table.
-// Values at certain temperatures and densities are stored as scalars.
-//
-
-#ifndef __vtkSESAMEReader_h
-#define __vtkSESAMEReader_h
-
-#include "vtkIOGeometryModule.h" // For export macro
-#include <vtkRectilinearGridAlgorithm.h>
-
-class vtkIntArray;
-
-class VTKIOGEOMETRY_EXPORT vtkSESAMEReader : public vtkRectilinearGridAlgorithm
-{
-public:
-  static vtkSESAMEReader *New();
-  vtkTypeMacro(vtkSESAMEReader, vtkRectilinearGridAlgorithm);
-
-  void PrintSelf(ostream& os, vtkIndent indent);
-
-  // Description:
-  // Set the filename to read
-  void SetFileName(const char* file);
-  // Description:
-  // Get the filename to read
-  const char* GetFileName();
-
-  // Description:
-  // Return whether this is a valid file
-  int IsValidFile();
-
-  // Description:
-  // Get the number of tables in this file
-  int GetNumberOfTableIds();
-
-  // Description:
-  // Get the ids of the tables in this file
-  int* GetTableIds();
-
-  // Description:
-  // Returns the table ids in a data array.
-  vtkIntArray* GetTableIdsAsArray();
-
-  // Description:
-  // Set the table to read in
-  void SetTable(int tableId);
-  // Description:
-  // Get the table to read in
-  int GetTable();
-
-  // Description:
-  // Get the number of arrays for the table to read
-  int GetNumberOfTableArrayNames();
-
-  // Description:
-  // Get the number of arrays for the table to read
-  int GetNumberOfTableArrays()
-    { return this->GetNumberOfTableArrayNames(); }
-  // Description:
-  // Get the names of arrays for the table to read
-  const char* GetTableArrayName(int index);
-
-  // Description:
-  // Set whether to read a table array
-  void SetTableArrayStatus(const char* name, int flag);
-  int GetTableArrayStatus(const char* name);
-
-protected:
-
-  vtkSESAMEReader();
-  virtual ~vtkSESAMEReader();
-
-  //BTX
-  class MyInternal;
-  MyInternal* Internal;
-  //ETX
-
-  int OpenFile();
-  void CloseFile();
-
-  virtual int RequestData(vtkInformation *,
-                          vtkInformationVector **,
-                          vtkInformationVector *);
-  virtual int RequestInformation(vtkInformation *,
-                                 vtkInformationVector **,
-                                 vtkInformationVector *);
-
-  int ReadTableValueLine ( float *v1, float *v2, float *v3,
-      float *v4, float *v5);
-  int JumpToTable( int tableID );
-
-  void ReadTable(vtkRectilinearGrid* output);
-
-private:
-  vtkSESAMEReader(const vtkSESAMEReader&);  // Not implemented.
-  void operator=(const vtkSESAMEReader&);  // Not implemented.
-
-};
-
-#endif
diff --git a/IO/Geometry/vtkWindBladeReader.cxx b/IO/Geometry/vtkWindBladeReader.cxx
index 9128e30..f1f5e6a 100644
--- a/IO/Geometry/vtkWindBladeReader.cxx
+++ b/IO/Geometry/vtkWindBladeReader.cxx
@@ -1456,7 +1456,7 @@ void vtkWindBladeReader::InitPressureData(int pressure, int prespre,
 
 //----------------------------------------------------------------------------
 void vtkWindBladeReader::SetUpPressureData(float* pressureData, float* prespreData,
-                                           float* tempgData, float* densityData)
+                                           const float* tempgData, const float* densityData)
 {
   // Entire block of data is read so to calculate index into that data we
   // must use the entire Dimension and not the SubDimension
@@ -1497,7 +1497,7 @@ void vtkWindBladeReader::SetUpPressureData(float* pressureData, float* prespreDa
 
 //----------------------------------------------------------------------------
 void vtkWindBladeReader::SetUpVorticityData(float* uData, float* vData,
-                                            float *densityData, float* vortData)
+                                            const float *densityData, float* vortData)
 {
   // Divide U and V components by Density
   for (unsigned int i = 0; i < this->BlockSize; i++)
diff --git a/IO/Geometry/vtkWindBladeReader.h b/IO/Geometry/vtkWindBladeReader.h
index 03ec1a1..044c676 100644
--- a/IO/Geometry/vtkWindBladeReader.h
+++ b/IO/Geometry/vtkWindBladeReader.h
@@ -218,8 +218,8 @@ protected:
   void InitPressureData(int pressure, int prespre,
                         float *&pressureData, float *&prespreData);
   void SetUpPressureData(float* pressureData, float* prespreData,
-                         float* tempgData, float* densityData);
-  void SetUpVorticityData(float* uData, float* vData, float *densityData,
+                         const float* tempgData, const float* densityData);
+  void SetUpVorticityData(float* uData, float* vData, const float *densityData,
                           float* vortData);
   void InitVariableData(int var, int &numberOfComponents, float *&varData,
                         int &planeSize, int &rowSize);
diff --git a/IO/Image/Testing/Cxx/CMakeLists.txt b/IO/Image/Testing/Cxx/CMakeLists.txt
index ad6be0f..96babcb 100644
--- a/IO/Image/Testing/Cxx/CMakeLists.txt
+++ b/IO/Image/Testing/Cxx/CMakeLists.txt
@@ -1,40 +1,9 @@
-set(TEST_SRC)
-
-if (VTK_DATA_ROOT)
-  set(TEST_SRC
-    TestNrrdReader.cxx
-    )
-endif()
-
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
+vtk_add_test_cxx(
   # TestImageReader2Factory.cxx   # fixme (deps not satisfied)
+  TestNrrdReader.cxx
+  )
+set(TestMetaIO_ARGS "DATA{${VTK_TEST_INPUT_DIR}/HeadMRVolume.mhd,HeadMRVolume.raw}")
+vtk_add_test_cxx(NO_DATA NO_VALID NO_OUTPUT
   TestMetaIO.cxx
-  ${TEST_SRC}
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-list(REMOVE_ITEM TestsToRun TestMetaIO.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/IO/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName} COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
-
-if(VTK_DATA_ROOT)
-  add_test(NAME ${vtk-module}Cxx-MetaIO
-    COMMAND ${vtk-module}CxxTests TestMetaIO
-      ${VTK_DATA_ROOT}/Data/HeadMRVolume.mhd)
-endif()
+  )
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/IO/Image/Testing/Data/Baseline/TestBMPReader.png.md5 b/IO/Image/Testing/Data/Baseline/TestBMPReader.png.md5
new file mode 100644
index 0000000..9e9db4f
--- /dev/null
+++ b/IO/Image/Testing/Data/Baseline/TestBMPReader.png.md5
@@ -0,0 +1 @@
+bd925a2859dcba12c9dbf74fd91864b0
diff --git a/IO/Image/Testing/Data/Baseline/TestImageWriters.png.md5 b/IO/Image/Testing/Data/Baseline/TestImageWriters.png.md5
new file mode 100644
index 0000000..e84dd00
--- /dev/null
+++ b/IO/Image/Testing/Data/Baseline/TestImageWriters.png.md5
@@ -0,0 +1 @@
+2e847e583ef678314b8cc84e1f20c462
diff --git a/IO/Image/Testing/Data/Baseline/TestJPEGReader.png.md5 b/IO/Image/Testing/Data/Baseline/TestJPEGReader.png.md5
new file mode 100644
index 0000000..9221133
--- /dev/null
+++ b/IO/Image/Testing/Data/Baseline/TestJPEGReader.png.md5
@@ -0,0 +1 @@
+565fe404d6b8970501b234b66271ecd4
diff --git a/IO/Image/Testing/Data/Baseline/TestMHD.png.md5 b/IO/Image/Testing/Data/Baseline/TestMHD.png.md5
new file mode 100644
index 0000000..96af725
--- /dev/null
+++ b/IO/Image/Testing/Data/Baseline/TestMHD.png.md5
@@ -0,0 +1 @@
+84a349ad6d0c0dda2ac72ddc728daea7
diff --git a/IO/Image/Testing/Data/Baseline/TestMetaImage2D.png.md5 b/IO/Image/Testing/Data/Baseline/TestMetaImage2D.png.md5
new file mode 100644
index 0000000..877e0d4
--- /dev/null
+++ b/IO/Image/Testing/Data/Baseline/TestMetaImage2D.png.md5
@@ -0,0 +1 @@
+3b1db75b9e58f7ea98b0554463503e6d
diff --git a/IO/Image/Testing/Data/Baseline/TestNrrdReader.png.md5 b/IO/Image/Testing/Data/Baseline/TestNrrdReader.png.md5
new file mode 100644
index 0000000..6f314ad
--- /dev/null
+++ b/IO/Image/Testing/Data/Baseline/TestNrrdReader.png.md5
@@ -0,0 +1 @@
+d315de8f6e2ed941711f5dd922b00e69
diff --git a/IO/Image/Testing/Data/Baseline/TestSetFileNames.png.md5 b/IO/Image/Testing/Data/Baseline/TestSetFileNames.png.md5
new file mode 100644
index 0000000..e7a068b
--- /dev/null
+++ b/IO/Image/Testing/Data/Baseline/TestSetFileNames.png.md5
@@ -0,0 +1 @@
+609cbc65aa869990349a957b1571c93f
diff --git a/IO/Image/Testing/Data/Baseline/TestTIFFReader.png.md5 b/IO/Image/Testing/Data/Baseline/TestTIFFReader.png.md5
new file mode 100644
index 0000000..b675f88
--- /dev/null
+++ b/IO/Image/Testing/Data/Baseline/TestTIFFReader.png.md5
@@ -0,0 +1 @@
+74e8de98dfbeec560fe043b0a619b3fa
diff --git a/IO/Image/Testing/Data/Baseline/dem.png.md5 b/IO/Image/Testing/Data/Baseline/dem.png.md5
new file mode 100644
index 0000000..220df3e
--- /dev/null
+++ b/IO/Image/Testing/Data/Baseline/dem.png.md5
@@ -0,0 +1 @@
+7bcb093054e0c57c318b141a33d6e195
diff --git a/IO/Image/Testing/Python/CMakeLists.txt b/IO/Image/Testing/Python/CMakeLists.txt
index 213c73d..85ff9f6 100644
--- a/IO/Image/Testing/Python/CMakeLists.txt
+++ b/IO/Image/Testing/Python/CMakeLists.txt
@@ -1,10 +1,8 @@
-if (VTK_DATA_ROOT)
-  add_test_python(TestBMPReader.py IO)
-  add_test_python(TestImageWriters.py IO)
-  add_test_python(TestJPEGReader.py IO)
-  add_test_python(TestMHD.py IO)
-  add_test_python(TestTIFFReader.py IO)
-  add_test_python(dem.py IO)
-  add_test_python(TestMetaImage2D.py IO)
-  add_test_python(TestSetFileNames.py IO)
-endif()
+vtk_add_test_python(TestBMPReader.py)
+vtk_add_test_python(TestImageWriters.py)
+vtk_add_test_python(TestJPEGReader.py)
+vtk_add_test_python(TestMHD.py)
+vtk_add_test_python(TestTIFFReader.py)
+vtk_add_test_python(dem.py)
+vtk_add_test_python(TestMetaImage2D.py)
+vtk_add_test_python(TestSetFileNames.py)
diff --git a/IO/Image/Testing/Python/TestSetFileNames.py b/IO/Image/Testing/Python/TestSetFileNames.py
index 8cee89f..b171bd8 100755
--- a/IO/Image/Testing/Python/TestSetFileNames.py
+++ b/IO/Image/Testing/Python/TestSetFileNames.py
@@ -5,7 +5,7 @@ from vtk.util.misc import vtkGetDataRoot
 VTK_DATA_ROOT = vtkGetDataRoot()
 
 globFileNames = vtk.vtkGlobFileNames()
-globFileNames.AddFileNames(VTK_DATA_ROOT + "/Data/headsq/quarter.*")
+globFileNames.AddFileNames(VTK_DATA_ROOT + "/Data/headsq/quarter.*[0-9]")
 
 sortFileNames = vtk.vtkSortFileNames()
 sortFileNames.SetInputFileNames(globFileNames.GetFileNames())
diff --git a/IO/Image/Testing/Tcl/CMakeLists.txt b/IO/Image/Testing/Tcl/CMakeLists.txt
index 29119c2..a6258f6 100644
--- a/IO/Image/Testing/Tcl/CMakeLists.txt
+++ b/IO/Image/Testing/Tcl/CMakeLists.txt
@@ -1,10 +1,8 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(TestBMPReader IO)
-  add_test_tcl(TestImageWriters IO)
-  add_test_tcl(TestJPEGReader IO)
-  add_test_tcl(TestMHD IO)
-  add_test_tcl(TestMetaImage2D IO)
-  add_test_tcl(TestSetFileNames IO)
-  add_test_tcl(TestTIFFReader IO)
-  add_test_tcl(dem IO)
-endif()
+vtk_add_test_tcl(TestBMPReader)
+vtk_add_test_tcl(TestImageWriters)
+vtk_add_test_tcl(TestJPEGReader)
+vtk_add_test_tcl(TestMHD)
+vtk_add_test_tcl(TestMetaImage2D)
+vtk_add_test_tcl(TestSetFileNames)
+vtk_add_test_tcl(TestTIFFReader)
+vtk_add_test_tcl(dem)
diff --git a/IO/Image/Testing/Tcl/TestSetFileNames.tcl b/IO/Image/Testing/Tcl/TestSetFileNames.tcl
index 1b8f420..70f5c0e 100644
--- a/IO/Image/Testing/Tcl/TestSetFileNames.tcl
+++ b/IO/Image/Testing/Tcl/TestSetFileNames.tcl
@@ -1,7 +1,7 @@
 package require vtk
 
 vtkGlobFileNames globFileNames
-globFileNames AddFileNames "$VTK_DATA_ROOT/Data/headsq/quarter.*"
+globFileNames AddFileNames "$VTK_DATA_ROOT/Data/headsq/quarter.*\[0-9\]"
 
 vtkSortFileNames sortFileNames
 sortFileNames SetInputFileNames [globFileNames GetFileNames]
diff --git a/IO/Image/module.cmake b/IO/Image/module.cmake
index e43268c..955a50c 100644
--- a/IO/Image/module.cmake
+++ b/IO/Image/module.cmake
@@ -9,11 +9,13 @@ vtk_module(vtkIOImage
     vtkCommonMisc
     vtkCommonTransforms
     vtkIOCore
+  PRIVATE_DEPENDS
     vtkjpeg
     vtkpng
     vtktiff
     vtkMetaIO
     vtkDICOMParser
+    vtksys
   TEST_DEPENDS
     vtkTestingCore
     vtkImagingSources
diff --git a/IO/Image/vtkBMPWriter.h b/IO/Image/vtkBMPWriter.h
index 3dcd0c2..f5f8acd 100644
--- a/IO/Image/vtkBMPWriter.h
+++ b/IO/Image/vtkBMPWriter.h
@@ -35,7 +35,7 @@ public:
 
 protected:
   vtkBMPWriter();
-  ~vtkBMPWriter() {};
+  ~vtkBMPWriter() {}
 
   virtual void WriteFile(ofstream *file, vtkImageData *data, int ext[6], int wExt[6]);
   virtual void WriteFileHeader(ofstream *, vtkImageData *, int wExt[6]);
diff --git a/IO/Image/vtkGESignaReader.h b/IO/Image/vtkGESignaReader.h
index 82447fb..c3865cb 100644
--- a/IO/Image/vtkGESignaReader.h
+++ b/IO/Image/vtkGESignaReader.h
@@ -58,8 +58,8 @@ public:
     }
 
 protected:
-  vtkGESignaReader() {};
-  ~vtkGESignaReader() {};
+  vtkGESignaReader() {}
+  ~vtkGESignaReader() {}
 
   virtual void ExecuteInformation();
   virtual void ExecuteDataWithInformation(vtkDataObject *out, vtkInformation* outInfo);
diff --git a/IO/Image/vtkImageImport.cxx b/IO/Image/vtkImageImport.cxx
index 76f6fbf..b282089 100644
--- a/IO/Image/vtkImageImport.cxx
+++ b/IO/Image/vtkImageImport.cxx
@@ -209,6 +209,23 @@ int vtkImageImport::RequestUpdateExtent(
 }
 
 //----------------------------------------------------------------------------
+int vtkImageImport::ComputePipelineMTime(
+  vtkInformation* request,
+  vtkInformationVector** inInfoVec,
+  vtkInformationVector* outInfoVec,
+  int requestFromOutputPort,
+  unsigned long* mtime )
+{
+  if (this->InvokePipelineModifiedCallbacks())
+    {
+    this->Modified();
+    }
+  // Superclass normally returns our MTime.
+  return Superclass::ComputePipelineMTime(request, inInfoVec, outInfoVec,
+        requestFromOutputPort, mtime);
+}
+
+//----------------------------------------------------------------------------
 int vtkImageImport::RequestInformation (
   vtkInformation * vtkNotUsed(request),
   vtkInformationVector ** vtkNotUsed( inputVector ),
diff --git a/IO/Image/vtkImageImport.h b/IO/Image/vtkImageImport.h
index 6d3a1bd..91c3487 100644
--- a/IO/Image/vtkImageImport.h
+++ b/IO/Image/vtkImageImport.h
@@ -117,6 +117,14 @@ public:
   virtual int RequestUpdateExtent(  vtkInformation* request,
                                     vtkInformationVector** inputVector,
                                     vtkInformationVector* outputVector);
+  // Description:
+  // Override vtkAlgorithm
+  virtual int
+  ComputePipelineMTime(vtkInformation* request,
+                       vtkInformationVector** inInfoVec,
+                       vtkInformationVector* outInfoVec,
+                       int requestFromOutputPort,
+                       unsigned long* mtime);
 
   // Description:
   // Set/get the scalar array name for this data set. Initial value is
diff --git a/IO/Image/vtkImageImportExecutive.h b/IO/Image/vtkImageImportExecutive.h
index 8439fa9..f8cd09a 100644
--- a/IO/Image/vtkImageImportExecutive.h
+++ b/IO/Image/vtkImageImportExecutive.h
@@ -37,8 +37,8 @@ public:
                              vtkInformationVector* outInfo);
 
 protected:
-  vtkImageImportExecutive() {};
-  ~vtkImageImportExecutive() {};
+  vtkImageImportExecutive() {}
+  ~vtkImageImportExecutive() {}
 
 private:
   vtkImageImportExecutive(const vtkImageImportExecutive&);  // Not implemented.
diff --git a/IO/Image/vtkImageReader2Collection.h b/IO/Image/vtkImageReader2Collection.h
index d006d4a..c0e4b4d 100644
--- a/IO/Image/vtkImageReader2Collection.h
+++ b/IO/Image/vtkImageReader2Collection.h
@@ -50,8 +50,8 @@ public:
   //ETX
 
 protected:
-  vtkImageReader2Collection() {};
-  ~vtkImageReader2Collection() {};
+  vtkImageReader2Collection() {}
+  ~vtkImageReader2Collection() {}
 
 
 private:
diff --git a/IO/Image/vtkImageWriter.h b/IO/Image/vtkImageWriter.h
index 9c8e262..aa1dda8 100644
--- a/IO/Image/vtkImageWriter.h
+++ b/IO/Image/vtkImageWriter.h
@@ -94,8 +94,8 @@ protected:
                               ofstream *file);
   virtual void WriteFile(ofstream *file, vtkImageData *data,
                          int extent[6], int wExtent[6]);
-  virtual void WriteFileHeader(ofstream *, vtkImageData *, int [6]) {};
-  virtual void WriteFileTrailer(ofstream *, vtkImageData *) {};
+  virtual void WriteFileHeader(ofstream *, vtkImageData *, int [6]) {}
+  virtual void WriteFileTrailer(ofstream *, vtkImageData *) {}
 
   // This is called by the superclass.
   // This is the method you should override.
diff --git a/IO/Image/vtkJPEGReader.h b/IO/Image/vtkJPEGReader.h
index aa84a25..345765c 100644
--- a/IO/Image/vtkJPEGReader.h
+++ b/IO/Image/vtkJPEGReader.h
@@ -55,8 +55,8 @@ public:
       return "JPEG";
     }
 protected:
-  vtkJPEGReader() {};
-  ~vtkJPEGReader() {};
+  vtkJPEGReader() {}
+  ~vtkJPEGReader() {}
 
   virtual void ExecuteInformation();
   virtual void ExecuteDataWithInformation(vtkDataObject *out, vtkInformation *outInfo);
diff --git a/IO/Image/vtkJPEGWriter.cxx b/IO/Image/vtkJPEGWriter.cxx
index 7d92415..ece2539 100644
--- a/IO/Image/vtkJPEGWriter.cxx
+++ b/IO/Image/vtkJPEGWriter.cxx
@@ -32,6 +32,10 @@ extern "C" {
 #include <setjmp.h>
 }
 
+#if _MSC_VER
+#define snprintf _snprintf
+#endif
+
 vtkStandardNewMacro(vtkJPEGWriter);
 
 vtkCxxSetObjectMacro(vtkJPEGWriter,Result,vtkUnsignedCharArray);
@@ -77,10 +81,10 @@ void vtkJPEGWriter::Write()
     }
 
   // Make sure the file name is allocated
-  this->InternalFileName =
-    new char[(this->FileName ? strlen(this->FileName) : 1) +
-            (this->FilePrefix ? strlen(this->FilePrefix) : 1) +
-            (this->FilePattern ? strlen(this->FilePattern) : 1) + 10];
+  size_t InternalFileNameSize = (this->FileName ? strlen(this->FileName) : 1) +
+    (this->FilePrefix ? strlen(this->FilePrefix) : 1) +
+    (this->FilePattern ? strlen(this->FilePattern) : 1) + 10;
+  this->InternalFileName = new char[InternalFileNameSize];
 
   // Fill in image information.
   vtkDemandDrivenPipeline::SafeDownCast(this->GetInputExecutive(0, 0))->UpdateInformation();
@@ -117,7 +121,8 @@ void vtkJPEGWriter::Write()
         }
       else
         {
-        sprintf(this->InternalFileName, this->FilePattern,this->FileNumber);
+        snprintf(this->InternalFileName, InternalFileNameSize,
+          this->FilePattern, this->FileNumber);
         }
       }
     this->GetInputExecutive(0, 0)->Update();
@@ -138,7 +143,7 @@ void vtkJPEGWriter::Write()
 // these three routines are for writing into memory
 extern "C"
 {
-  void vtkJPEGWriteToMemoryInit(j_compress_ptr cinfo)
+  static void vtkJPEGWriteToMemoryInit(j_compress_ptr cinfo)
   {
     vtkJPEGWriter *self = vtkJPEGWriter::SafeDownCast(
       static_cast<vtkObject *>(cinfo->client_data));
@@ -161,7 +166,7 @@ extern "C"
 
 extern "C"
 {
-  boolean vtkJPEGWriteToMemoryEmpty(j_compress_ptr cinfo)
+  static boolean vtkJPEGWriteToMemoryEmpty(j_compress_ptr cinfo)
   {
     // Even if (cinfo->dest->free_in_buffer != 0) we still need to write on the
     // new array and not at (arraySize - nbFree)
@@ -184,7 +189,7 @@ extern "C"
 
 extern "C"
 {
-  void vtkJPEGWriteToMemoryTerm(j_compress_ptr cinfo)
+  static void vtkJPEGWriteToMemoryTerm(j_compress_ptr cinfo)
   {
     vtkJPEGWriter *self = vtkJPEGWriter::SafeDownCast(
       static_cast<vtkObject *>(cinfo->client_data));
diff --git a/IO/Image/vtkMedicalImageProperties.cxx b/IO/Image/vtkMedicalImageProperties.cxx
index 217306a..9de00a0 100644
--- a/IO/Image/vtkMedicalImageProperties.cxx
+++ b/IO/Image/vtkMedicalImageProperties.cxx
@@ -21,7 +21,7 @@
 #include <vtksys/stl/set>
 #include <time.h> // for strftime
 #include <ctype.h> // for isdigit
-#include <assert.h>
+#include <cassert>
 
 //----------------------------------------------------------------------------
 vtkStandardNewMacro(vtkMedicalImageProperties);
@@ -74,8 +74,11 @@ public:
       {
       UserDefinedValue key(name);
       UserDefinedValues::const_iterator it = UserDefinedValuePool.find( key );
-      assert( strcmp(it->Name.c_str(), name) == 0 );
-      return it->Value.c_str();
+      if( it != UserDefinedValuePool.end() )
+        {
+        assert( strcmp(it->Name.c_str(), name) == 0 );
+        return it->Value.c_str();
+        }
       }
     return NULL;
     }
diff --git a/IO/Image/vtkPNGReader.h b/IO/Image/vtkPNGReader.h
index 1aed1c7..c3c673c 100644
--- a/IO/Image/vtkPNGReader.h
+++ b/IO/Image/vtkPNGReader.h
@@ -54,8 +54,8 @@ public:
     }
 
 protected:
-  vtkPNGReader() {};
-  ~vtkPNGReader() {};
+  vtkPNGReader() {}
+  ~vtkPNGReader() {}
 
   virtual void ExecuteInformation();
   virtual void ExecuteDataWithInformation(vtkDataObject *out, vtkInformation *outInfo);
diff --git a/IO/Image/vtkPNGWriter.cxx b/IO/Image/vtkPNGWriter.cxx
index 2d7d8f7..1ee217d 100644
--- a/IO/Image/vtkPNGWriter.cxx
+++ b/IO/Image/vtkPNGWriter.cxx
@@ -30,6 +30,7 @@ vtkPNGWriter::vtkPNGWriter()
 {
   this->FileLowerLeft = 1;
   this->FileDimensionality = 2;
+  this->CompressionLevel = 5;
   this->WriteToMemory = 0;
   this->Result = 0;
   this->TempFP = 0;
@@ -206,6 +207,8 @@ void vtkPNGWriter::WriteSlice(vtkImageData *data, int* uExtent)
     return;
     }
 
+  png_set_compression_level(png_ptr, this->CompressionLevel);
+
   png_infop info_ptr = png_create_info_struct(png_ptr);
   if (!info_ptr)
     {
diff --git a/IO/Image/vtkPNGWriter.h b/IO/Image/vtkPNGWriter.h
index e578a07..9f9f258 100644
--- a/IO/Image/vtkPNGWriter.h
+++ b/IO/Image/vtkPNGWriter.h
@@ -41,6 +41,15 @@ public:
   virtual void Write();
 
   // Description:
+  // Set/Get the zlib compression level.
+  // The range is 0-9, with 0 meaning no compression
+  // corresponding to the largest file size, and 9 meaning
+  // best compression, corresponding to the smallest file size.
+  // The default is 5.
+  vtkSetClampMacro(CompressionLevel, int, 0, 9);
+  vtkGetMacro(CompressionLevel, int);
+
+  // Description:
   // Write the image to memory (a vtkUnsignedCharArray)
   vtkSetMacro(WriteToMemory, unsigned int);
   vtkGetMacro(WriteToMemory, unsigned int);
@@ -57,6 +66,7 @@ protected:
   ~vtkPNGWriter();
 
   void WriteSlice(vtkImageData *data, int* uExtent);
+  int CompressionLevel;
   unsigned int WriteToMemory;
   vtkUnsignedCharArray *Result;
   FILE *TempFP;
diff --git a/IO/Image/vtkPNMReader.cxx b/IO/Image/vtkPNMReader.cxx
index 81b8d51..3d81d22 100644
--- a/IO/Image/vtkPNMReader.cxx
+++ b/IO/Image/vtkPNMReader.cxx
@@ -20,7 +20,7 @@
 
 vtkStandardNewMacro(vtkPNMReader);
 
-char vtkPNMReaderGetChar(FILE *fp)
+static char vtkPNMReaderGetChar(FILE *fp)
 {
   char c;
   int result;
@@ -47,7 +47,7 @@ char vtkPNMReaderGetChar(FILE *fp)
   return c;
 }
 
-int vtkPNMReaderGetInt(FILE *fp)
+static int vtkPNMReaderGetInt(FILE *fp)
 {
   char c;
   int result = 0;
diff --git a/IO/Image/vtkPNMReader.h b/IO/Image/vtkPNMReader.h
index 7618545..86ad8c4 100644
--- a/IO/Image/vtkPNMReader.h
+++ b/IO/Image/vtkPNMReader.h
@@ -59,8 +59,8 @@ public:
     }
 
 protected:
-  vtkPNMReader() {};
-  ~vtkPNMReader() {};
+  vtkPNMReader() {}
+  ~vtkPNMReader() {}
   void ExecuteInformation();
 private:
   vtkPNMReader(const vtkPNMReader&);  // Not implemented.
diff --git a/IO/Image/vtkPNMWriter.h b/IO/Image/vtkPNMWriter.h
index 024ea28..b8f0463 100644
--- a/IO/Image/vtkPNMWriter.h
+++ b/IO/Image/vtkPNMWriter.h
@@ -32,8 +32,8 @@ public:
   virtual void PrintSelf(ostream& os, vtkIndent indent);
 
 protected:
-  vtkPNMWriter() {};
-  ~vtkPNMWriter() {};
+  vtkPNMWriter() {}
+  ~vtkPNMWriter() {}
 
   virtual void WriteFile(
     ofstream *file, vtkImageData *data, int extent[6], int wExt[6]);
diff --git a/IO/Image/vtkPostScriptWriter.h b/IO/Image/vtkPostScriptWriter.h
index dcf6f61..8760449 100644
--- a/IO/Image/vtkPostScriptWriter.h
+++ b/IO/Image/vtkPostScriptWriter.h
@@ -34,8 +34,8 @@ public:
   virtual void PrintSelf(ostream& os, vtkIndent indent);
 
 protected:
-  vtkPostScriptWriter() {};
-  ~vtkPostScriptWriter() {};
+  vtkPostScriptWriter() {}
+  ~vtkPostScriptWriter() {}
 
   virtual void WriteFile(
     ofstream *file, vtkImageData *data, int extent[6], int wExt[6]);
diff --git a/IO/Image/vtkTIFFReader.cxx b/IO/Image/vtkTIFFReader.cxx
index 5ea40ce..7e7b596 100644
--- a/IO/Image/vtkTIFFReader.cxx
+++ b/IO/Image/vtkTIFFReader.cxx
@@ -69,9 +69,9 @@ public:
 
 
 extern "C" {
-void vtkTIFFReaderInternalErrorHandler(const char* vtkNotUsed(module),
-                                          const char* vtkNotUsed(fmt),
-                                          va_list vtkNotUsed(ap))
+static void vtkTIFFReaderInternalErrorHandler(const char* vtkNotUsed(module),
+                                              const char* vtkNotUsed(fmt),
+                                              va_list vtkNotUsed(ap))
 {
     // Do nothing
     // Ignore errors
@@ -289,8 +289,15 @@ vtkTIFFReader::vtkTIFFReader()
 
   this->InitializeColors();
   this->InternalImage = new vtkTIFFReaderInternal;
-  this->OutputExtent = 0;
-  this->OutputIncrements = 0;
+  this->OutputExtent[0] = 0;
+  this->OutputExtent[1] = 0;
+  this->OutputExtent[2] = 0;
+  this->OutputExtent[3] = 0;
+  this->OutputExtent[4] = 0;
+  this->OutputExtent[5] = 0;
+  this->OutputIncrements[0] = 0;
+  this->OutputIncrements[1] = 0;
+  this->OutputIncrements[2] = 0;
 
   this->OrientationTypeSpecifiedFlag = false;
   this->OriginSpecifiedFlag = false;
@@ -451,11 +458,13 @@ void vtkTIFFReader::ExecuteInformation()
     {
     if(this->GetInternalImage()->SubFiles>0)
       {
-      this->DataExtent[5] = this->GetInternalImage()->SubFiles;
+      this->DataExtent[4] = 0;
+      this->DataExtent[5] = this->GetInternalImage()->SubFiles - 1;
       }
     else
       {
-      this->DataExtent[5] = this->GetInternalImage()->NumberOfPages;
+      this->DataExtent[4] = 0;
+      this->DataExtent[5] = this->GetInternalImage()->NumberOfPages - 1;
       }
 
     if( !SpacingSpecifiedFlag )
@@ -554,15 +563,8 @@ void vtkTIFFReaderUpdate2(vtkTIFFReader *self, vtkTIFFReaderInternal *reader,
 // templated to handle different data types.
 template <class OT>
 void vtkTIFFReaderUpdate(vtkTIFFReader *self, vtkTIFFReaderInternal *reader,
-                         vtkImageData *data, OT *outPtr)
+                         OT *outPtr, int outExtent[6], vtkIdType outIncr[3])
 {
-  vtkIdType outIncr[3];
-  int outExtent[6];
-  OT *outPtr2;
-
-  data->GetExtent(outExtent);
-  data->GetIncrements(outIncr);
-
   // multiple number of pages
   if(reader->NumberOfPages>1 )
     {
@@ -582,9 +584,8 @@ void vtkTIFFReaderUpdate(vtkTIFFReader *self, vtkTIFFReaderInternal *reader,
   //file
   reader->Clean();
 
-  outPtr2 = outPtr;
-  int idx2;
-  for (idx2 = outExtent[4]; idx2 <= outExtent[5]; ++idx2)
+  OT *outPtr2 = outPtr;
+  for (int idx2 = outExtent[4]; idx2 <= outExtent[5]; ++idx2)
     {
     self->ComputeInternalFileName(idx2);
     // read in a TIFF file
@@ -602,7 +603,6 @@ void vtkTIFFReaderUpdate(vtkTIFFReader *self, vtkTIFFReaderInternal *reader,
 void vtkTIFFReader::ExecuteDataWithInformation(vtkDataObject *output,
                                                vtkInformation *outInfo)
 {
-  vtkImageData *data = this->AllocateOutputData(output, outInfo);
   vtkTIFFReaderInternal *reader = this->GetInternalImage();
 
   if (this->InternalFileName == NULL)
@@ -613,18 +613,18 @@ void vtkTIFFReader::ExecuteDataWithInformation(vtkDataObject *output,
 
   this->ComputeDataIncrements();
 
-  // Call the correct templated function for the output
-  void *outPtr;
+  // Get the data
+  vtkImageData *data = this->AllocateOutputData(output, outInfo);
+  data->GetExtent(this->OutputExtent);
+  data->GetIncrements(this->OutputIncrements);
 
   // Call the correct templated function for the input
-  outPtr = data->GetScalarPointer();
-  // Needed deep in reading for finding the correct starting location.
-  this->OutputIncrements = data->GetIncrements();
+  void *outPtr = data->GetScalarPointer();
 
   switch (data->GetScalarType())
     {
-    vtkTemplateMacro(vtkTIFFReaderUpdate(this, reader, data,
-                                         (VTK_TT *)(outPtr)));
+    vtkTemplateMacro(vtkTIFFReaderUpdate(this, reader, (VTK_TT *)(outPtr),
+                     this->OutputExtent, this->OutputIncrements));
     default:
       vtkErrorMacro("UpdateFromFile: Unknown data type");
     }
@@ -1510,12 +1510,11 @@ void vtkTIFFReader::ReadGenericImage( void *out,
 
 //-------------------------------------------------------------------------
 void vtkTIFFReader::ReadImageInternal( void* vtkNotUsed(in), void* outPtr,
-                                       int* outExt,
+                                       int* vtkNotUsed(outExt),
                                        unsigned int vtkNotUsed(size) )
 {
   int width  = this->GetInternalImage()->Width;
   int height = this->GetInternalImage()->Height;
-  this->OutputExtent = outExt;
 
   if ( !this->GetInternalImage()->CanRead() )
     {
diff --git a/IO/Image/vtkTIFFReader.h b/IO/Image/vtkTIFFReader.h
index dbdcee3..804f397 100644
--- a/IO/Image/vtkTIFFReader.h
+++ b/IO/Image/vtkTIFFReader.h
@@ -142,8 +142,8 @@ private:
   int TotalColors;
   unsigned int ImageFormat;
   vtkTIFFReaderInternal *InternalImage;
-  int *OutputExtent;
-  vtkIdType *OutputIncrements;
+  int OutputExtent[6];
+  vtkIdType OutputIncrements[3];
   unsigned int OrientationType;
   bool OrientationTypeSpecifiedFlag;
   bool OriginSpecifiedFlag;
diff --git a/IO/Import/Testing/Cxx/CMakeLists.txt b/IO/Import/Testing/Cxx/CMakeLists.txt
index f4933fa..7fcc423 100644
--- a/IO/Import/Testing/Cxx/CMakeLists.txt
+++ b/IO/Import/Testing/Cxx/CMakeLists.txt
@@ -1,29 +1,4 @@
-if(VTK_DATA_ROOT)
-  set (NEEDS_DATA
+vtk_add_test_cxx(
   TestVRMLNormals.cxx
   )
-endif()
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-  ${NEEDS_DATA}
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Hybrid/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/IO/Import/Testing/Cxx/TestVRMLNormals.cxx b/IO/Import/Testing/Cxx/TestVRMLNormals.cxx
index 3a43bc2..f0096e8 100644
--- a/IO/Import/Testing/Cxx/TestVRMLNormals.cxx
+++ b/IO/Import/Testing/Cxx/TestVRMLNormals.cxx
@@ -34,13 +34,13 @@ int TestVRMLNormals( int argc, char * argv [] )
   vtkVRMLImporter* importer = vtkVRMLImporter::New();
   importer->SetRenderWindow(renWin);
 
-  char* fname = vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/wineglass.wrl");
+  char* fname = vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/WineGlass.wrl");
   importer->SetFileName(fname);
   importer->Read();
 
   delete [] fname;
 
-  renWin->SetSize(800, 800);
+  renWin->SetSize(400, 400);
 
   // render the image
   iren->Initialize();
@@ -58,5 +58,5 @@ int TestVRMLNormals( int argc, char * argv [] )
   renWin->Delete();
   iren->Delete();
 
-  return retVal;
+  return !retVal;
 }
diff --git a/IO/Import/Testing/Data/Baseline/TestVRMLNormals.png.md5 b/IO/Import/Testing/Data/Baseline/TestVRMLNormals.png.md5
new file mode 100644
index 0000000..a87daba
--- /dev/null
+++ b/IO/Import/Testing/Data/Baseline/TestVRMLNormals.png.md5
@@ -0,0 +1 @@
+37a744ad3c0abf4ed4812c49a41c2c44
diff --git a/IO/Import/Testing/Data/Baseline/VRMLImporter.png.md5 b/IO/Import/Testing/Data/Baseline/VRMLImporter.png.md5
new file mode 100644
index 0000000..28799c1
--- /dev/null
+++ b/IO/Import/Testing/Data/Baseline/VRMLImporter.png.md5
@@ -0,0 +1 @@
+46c068f8dcc052a13b9b0ddd9e7bd3fd
diff --git a/IO/Import/Testing/Python/CMakeLists.txt b/IO/Import/Testing/Python/CMakeLists.txt
index bcfd7b9..8e747fd 100644
--- a/IO/Import/Testing/Python/CMakeLists.txt
+++ b/IO/Import/Testing/Python/CMakeLists.txt
@@ -1 +1 @@
-add_test_python(VRMLImporter.py Hybrid)
+vtk_add_test_python(VRMLImporter.py)
diff --git a/IO/Import/Testing/Tcl/CMakeLists.txt b/IO/Import/Testing/Tcl/CMakeLists.txt
index 3a165bb..c9535bb 100644
--- a/IO/Import/Testing/Tcl/CMakeLists.txt
+++ b/IO/Import/Testing/Tcl/CMakeLists.txt
@@ -1,3 +1 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(VRMLImporter Hybrid)
-endif()
+vtk_add_test_tcl(VRMLImporter)
diff --git a/IO/Import/module.cmake b/IO/Import/module.cmake
index 4eff63a..e344054 100644
--- a/IO/Import/module.cmake
+++ b/IO/Import/module.cmake
@@ -4,6 +4,8 @@ vtk_module(vtkIOImport
   DEPENDS
     vtkCommonCore
     vtkRenderingCore
+  PRIVATE_DEPENDS
+    vtkFiltersSources
   TEST_DEPENDS
     vtkRenderingOpenGL
     vtkTestingRendering
diff --git a/IO/Import/vtk3DSImporter.cxx b/IO/Import/vtk3DSImporter.cxx
index 4bcba03..c8182be 100644
--- a/IO/Import/vtk3DSImporter.cxx
+++ b/IO/Import/vtk3DSImporter.cxx
@@ -96,7 +96,8 @@ static void start_chunk (vtk3DSImporter *importer, vtk3DSChunk *chunk);
 static void end_chunk (vtk3DSImporter *importer, vtk3DSChunk *chunk);
 static byte read_byte (vtk3DSImporter *importer);
 static word read_word (vtk3DSImporter *importer);
-static dword read_dword (vtk3DSImporter *importer);
+static word peek_word (vtk3DSImporter *importer);
+static dword peek_dword (vtk3DSImporter *importer);
 static float read_float (vtk3DSImporter *importer);
 static void read_point (vtk3DSImporter *importer, vtk3DSVector v);
 static char *read_string (vtk3DSImporter *importer);
@@ -1043,8 +1044,8 @@ static float parse_float_percentage(vtk3DSImporter *importer)
 static void start_chunk (vtk3DSImporter *importer, vtk3DSChunk *chunk)
 {
   chunk->start  = ftell(importer->GetFileFD());
-  chunk->tag    = read_word(importer);
-  chunk->length = read_dword(importer);
+  chunk->tag    = peek_word(importer);
+  chunk->length = peek_dword(importer);
   if (chunk->length == 0)
     {
     chunk->length = 1;
@@ -1083,14 +1084,24 @@ static word read_word(vtk3DSImporter *importer)
   return data;
 }
 
-static dword read_dword(vtk3DSImporter *importer)
+static word peek_word(vtk3DSImporter *importer)
+{
+  word data;
+
+  if (fread (&data, 2, 1, importer->GetFileFD()) != 1)
+    {
+    data = 0;
+    }
+  vtkByteSwap::Swap2LE ((short *) &data);
+  return data;
+}
+
+static dword peek_dword(vtk3DSImporter *importer)
 {
   dword data;
 
   if (fread (&data, 4, 1, importer->GetFileFD()) != 1)
     {
-    vtkErrorWithObjectMacro(
-      importer, "Pre-mature end of file in read_dword\n");
     data = 0;
     }
 
@@ -1098,7 +1109,6 @@ static dword read_dword(vtk3DSImporter *importer)
   return data;
 }
 
-
 static float read_float(vtk3DSImporter *importer)
 {
   float data;
diff --git a/IO/Import/vtkImporter.h b/IO/Import/vtkImporter.h
index 00b3790..f7485dc 100644
--- a/IO/Import/vtkImporter.h
+++ b/IO/Import/vtkImporter.h
@@ -82,11 +82,11 @@ protected:
   ~vtkImporter();
 
   virtual int ImportBegin () {return 1;};
-  virtual void ImportEnd () {};
-  virtual void ImportActors (vtkRenderer*) {};
-  virtual void ImportCameras (vtkRenderer*) {};
-  virtual void ImportLights (vtkRenderer*) {};
-  virtual void ImportProperties (vtkRenderer*) {};
+  virtual void ImportEnd () {}
+  virtual void ImportActors (vtkRenderer*) {}
+  virtual void ImportCameras (vtkRenderer*) {}
+  virtual void ImportLights (vtkRenderer*) {}
+  virtual void ImportProperties (vtkRenderer*) {}
 
   vtkRenderer *Renderer;
   vtkRenderWindow *RenderWindow;
diff --git a/IO/Import/vtkVRMLImporter.cxx b/IO/Import/vtkVRMLImporter.cxx
index 5267a21..35de33f 100644
--- a/IO/Import/vtkVRMLImporter.cxx
+++ b/IO/Import/vtkVRMLImporter.cxx
@@ -253,7 +253,7 @@ private:
 // or prototype types.
 //
 
-#include <assert.h>
+#include <cassert>
 
 //
 // Static list of node types.
@@ -566,12 +566,6 @@ yyltype;
 #define YYLTYPE yyltype
 #endif
 
-#ifndef __cplusplus
-#ifndef __STDC__
-#define const
-#endif
-#endif
-
 #define YYFINAL         128
 #define YYFLAG          -32768
 #define YYNTBASE        40
@@ -903,8 +897,8 @@ while (0)
 
 #ifndef YYPURE
 
-int     yychar;                 /*  the lookahead symbol                */
-YYSTYPE yylval;                 /*  the semantic value of the           */
+static int     yychar;          /*  the lookahead symbol                */
+static YYSTYPE yylval;          /*  the semantic value of the           */
                                 /*  lookahead symbol                    */
 
 #ifdef YYLSP_NEEDED
@@ -912,11 +906,11 @@ YYLTYPE yylloc;                 /*  location data for the lookahead     */
                                 /*  symbol                              */
 #endif
 
-int yynerrs;                    /*  number of parse errors so far       */
+static int yynerrs;             /*  number of parse errors so far       */
 #endif  /* not YYPURE */
 
 #if YYDEBUG != 0
-int yydebug;                    /*  nonzero means print parse trace     */
+static int yydebug;             /*  nonzero means print parse trace     */
 /* Since this is uninitialized, it does not stop multiple parsers
    from coexisting.  */
 #endif
@@ -938,60 +932,26 @@ int yydebug;                    /*  nonzero means print parse trace     */
 #define YYMAXDEPTH 10000
 #endif
 
-/* Prevent warning if -Wstrict-prototypes.  */
-#ifdef __GNUC__
-int yyparse (vtkVRMLImporter*);
-#endif
-
-#if __GNUC__ > 1                /* GNU C and GNU C++ define this.  */
-#define __yy_memcpy(FROM,TO,COUNT)      __builtin_memcpy(TO,FROM,COUNT)
-#else                           /* not GNU C or C++ */
-#ifndef __cplusplus
-
-/* This is the most reliable way to avoid incompatibilities
-   in available built-in functions on various systems.  */
-static void
-__yy_memcpy (from, to, count)
-  char *from;
-char *to;
-int count;
-{
-  register char *f = from;
-  register char *t = to;
-  register int i = count;
-
-  while (i-- > 0)
-    *t++ = *f++;
-}
-
-#else /* __cplusplus */
-
-/* This is the most reliable way to avoid incompatibilities
-   in available built-in functions on various systems.  */
 static void
 __yy_memcpy (char *from, char *to, int count)
 {
-  register char *f = from;
-  register char *t = to;
-  register int i = count;
+  char *f = from;
+  char *t = to;
+  int i = count;
 
   while (i-- > 0)
     *t++ = *f++;
 }
 
-#endif
-#endif
 
-
-
-int
+static int
 yyparse(vtkVRMLImporter* self)
 {
   FakeAlloca yyallocator;
-  register int yystate;
-  register int yyn;
-  register short *yyssp;
-  register YYSTYPE *yyvsp;
+  int yystate;
+  int yyn;
+  short *yyssp;
+  YYSTYPE *yyvsp;
   int yyerrstatus;      /*  number of tokens to shift before error messages enabled */
   int yychar1 = 0;              /*  lookahead token as an internal (translated) token number */
 
@@ -1729,50 +1689,9 @@ expect(int type)
 #include <unistd.h>
 #endif
 
-/* cfront 1.2 defines "c_plusplus" instead of "__cplusplus" */
-#ifdef c_plusplus
-#ifndef __cplusplus
-#define __cplusplus
-#endif
-#endif
-
-
-#ifdef __cplusplus
-
 /* Use prototypes in function declarations. */
 #define YY_USE_PROTOS
-
-/* The "const" storage-class-modifier is valid. */
-#define YY_USE_CONST
-
-#else   /* ! __cplusplus */
-
-#ifdef __STDC__
-
-#define YY_USE_PROTOS
-#define YY_USE_CONST
-
-#endif  /* __STDC__ */
-#endif  /* ! __cplusplus */
-
-
-#ifdef __TURBOC__
-#define YY_USE_CONST
-#endif
-
-
-#ifndef YY_USE_CONST
-#ifndef const
-#define const
-#endif
-#endif
-
-
-#ifdef YY_USE_PROTOS
 #define YY_PROTO(proto) proto
-#else
-#define YY_PROTO(proto) ()
-#endif
 
 /* Returned upon end-of-file. */
 #define YY_NULL 0
@@ -4120,7 +4039,7 @@ char *yytext;
 #include <string.h>
 
 // used to reset the lexer input after initialization of VRML nodes
-void (*theyyInput)(char *, int &, int);
+static void (*theyyInput)(char *, int &, int);
 
 // We define the YY_INPUT so we an change the input source later
 #define YY_INPUT(buf, result, max_size) (*theyyInput)(buf, result, max_size);
@@ -4285,9 +4204,9 @@ void vtkVRMLImporter::DeleteObject(vtkObject* obj)
 
 int yylex ( vtkVRMLImporter* self )
 {
-  register yy_state_type yy_current_state;
-  register char *yy_cp, *yy_bp;
-  register int yy_act;
+  yy_state_type yy_current_state;
+  char *yy_cp, *yy_bp;
+  int yy_act;
 
 
 
@@ -4381,7 +4300,7 @@ int yylex ( vtkVRMLImporter* self )
       yy_match:
     do
       {
-      register YY_CHAR yy_c = yy_ec[YY_SC_TO_UI(*yy_cp)];
+      YY_CHAR yy_c = yy_ec[YY_SC_TO_UI(*yy_cp)];
       if ( yy_accept[yy_current_state] )
         {
         yy_last_accepting_state = yy_current_state;
@@ -4905,9 +4824,9 @@ int yylex ( vtkVRMLImporter* self )
 
 static int yy_get_next_buffer()
 {
-  register char *dest = yy_current_buffer->yy_ch_buf;
-  register char *source = yytext_ptr - 1; /* copy prev. char, too */
-  register int number_to_move, i;
+  char *dest = yy_current_buffer->yy_ch_buf;
+  char *source = yytext_ptr - 1; /* copy prev. char, too */
+  int number_to_move, i;
   int ret_val;
 
   if ( yy_c_buf_p > &yy_current_buffer->yy_ch_buf[yy_n_chars + 1] )
@@ -5027,14 +4946,14 @@ static int yy_get_next_buffer()
 
 static yy_state_type yy_get_previous_state()
 {
-  register yy_state_type yy_current_state;
-  register char *yy_cp;
+  yy_state_type yy_current_state;
+  char *yy_cp;
 
   yy_current_state = yy_start;
 
   for ( yy_cp = yytext_ptr + YY_MORE_ADJ; yy_cp < yy_c_buf_p; ++yy_cp )
     {
-    register YY_CHAR yy_c = (*yy_cp ? yy_ec[YY_SC_TO_UI(*yy_cp)] : 1);
+    YY_CHAR yy_c = (*yy_cp ? yy_ec[YY_SC_TO_UI(*yy_cp)] : 1);
     if ( yy_accept[yy_current_state] )
       {
       yy_last_accepting_state = yy_current_state;
@@ -5066,10 +4985,10 @@ static yy_state_type yy_try_NUL_trans( yy_state_type yy_current_state )
   yy_state_type yy_current_state;
 #endif
 {
-  register int yy_is_jam;
-  register char *yy_cp = yy_c_buf_p;
+  int yy_is_jam;
+  char *yy_cp = yy_c_buf_p;
 
-  register YY_CHAR yy_c = 1;
+  YY_CHAR yy_c = 1;
   if ( yy_accept[yy_current_state] )
     {
     yy_last_accepting_state = yy_current_state;
@@ -5268,7 +5187,7 @@ const char *s2;
 int n;
 #endif
 {
-  register int i;
+  int i;
   for ( i = 0; i < n; ++i )
     s1[i] = s2[i];
 }
@@ -5514,6 +5433,11 @@ vtkVRMLImporter::~vtkVRMLImporter()
   delete VrmlNodeType::useList;
   VrmlNodeType::useList = 0;
   vtkVRMLAllocator::CleanUp();
+
+  if (yy_current_buffer)
+    {
+    yy_delete_buffer(yy_current_buffer);
+    }
 }
 
 void vtkVRMLImporter::PrintSelf(ostream& os, vtkIndent indent)
diff --git a/IO/Import/vtkVRMLImporter.h b/IO/Import/vtkVRMLImporter.h
index 1e6d30c..2bd6843 100644
--- a/IO/Import/vtkVRMLImporter.h
+++ b/IO/Import/vtkVRMLImporter.h
@@ -129,10 +129,10 @@ protected:
 
   virtual int ImportBegin ();
   virtual void ImportEnd ();
-  virtual void ImportActors (vtkRenderer *) {};
-  virtual void ImportCameras (vtkRenderer *) {};
-  virtual void ImportLights (vtkRenderer *) {};
-  virtual void ImportProperties (vtkRenderer *) {};
+  virtual void ImportActors (vtkRenderer *) {}
+  virtual void ImportCameras (vtkRenderer *) {}
+  virtual void ImportLights (vtkRenderer *) {}
+  virtual void ImportProperties (vtkRenderer *) {}
 
   int OpenImportFile();
   char *FileName;
diff --git a/IO/Infovis/CMakeLists.txt b/IO/Infovis/CMakeLists.txt
index d1f7bbe..3b42093 100644
--- a/IO/Infovis/CMakeLists.txt
+++ b/IO/Infovis/CMakeLists.txt
@@ -8,6 +8,7 @@ set(Module_SRCS
   vtkISIReader.cxx
   vtkMultiNewickTreeReader.cxx
   vtkNewickTreeReader.cxx
+  vtkNewickTreeWriter.cxx
   vtkRISReader.cxx
   vtkTulipReader.cxx
   vtkXGMLReader.cxx
diff --git a/IO/Infovis/Testing/Cxx/CMakeLists.txt b/IO/Infovis/Testing/Cxx/CMakeLists.txt
index f7559f8..7dddc2f 100644
--- a/IO/Infovis/Testing/Cxx/CMakeLists.txt
+++ b/IO/Infovis/Testing/Cxx/CMakeLists.txt
@@ -1,34 +1,13 @@
-if(VTK_DATA_ROOT)
-  set (NEEDS_DATA
-    TestBiomTableReader.cxx
-    TestNewickTreeReader.cxx
-    TestMultiNewickTreeReader.cxx
-    TestTulipReaderProperties.cxx
+vtk_add_test_cxx(NO_VALID
+  TestBiomTableReader.cxx
+  TestDIMACSGraphReader.cxx
+  TestISIReader.cxx
+  TestFixedWidthTextReader.cxx
+  TestNewickTreeReader.cxx
+  TestNewickTreeWriter.cxx
+  TestMultiNewickTreeReader.cxx
+  TestRISReader.cxx
+  TestTulipReaderProperties.cxx
+  TestDelimitedTextReader2.cxx
   )
-endif()
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-  ${NEEDS_DATA}
-  # TestTulipReader.cxx           # todo (unsatisfied deps)
-  # TestTulipReaderClusters.cxx   # todo (unsatisfied deps)
-
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-            COMMAND ${vtk-module}CxxTests ${TName}
-                                          -D ${VTK_DATA_ROOT}
-                                          -T ${VTK_TEST_OUTPUT_DIR}
-                                          -V Baseline/${vtk-module}/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName} COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/IO/Infovis/Testing/Cxx/TestDelimitedTextReader2.cxx b/IO/Infovis/Testing/Cxx/TestDelimitedTextReader2.cxx
new file mode 100644
index 0000000..d2cbee6
--- /dev/null
+++ b/IO/Infovis/Testing/Cxx/TestDelimitedTextReader2.cxx
@@ -0,0 +1,82 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestDelimitedTextReader.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkDelimitedTextReader.h>
+#include <vtkTable.h>
+#include <vtkTestUtilities.h>
+
+// This test mainly tests the capability of the DelimitedTextReader accepting
+//  both a file and a text string as the input
+int TestDelimitedTextReader2(int argc, char *argv[])
+{
+  //------------  test the reader with an input file-----------------
+  char* filename = vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/authors.csv");
+
+  vtkDelimitedTextReader *reader = vtkDelimitedTextReader::New();
+  reader->SetFileName(filename);
+  reader->SetHaveHeaders(1);
+  reader->SetDetectNumericColumns(1);
+  reader->Update();
+  delete[] filename;
+
+  vtkTable* table = reader->GetOutput();
+  table->Dump();
+  cout << "Printing reader info..." << endl;
+  reader->Print(cout);
+
+  if (table->GetNumberOfRows() != 6)
+    {
+    cout << "ERROR: Wrong number of rows: " << table->GetNumberOfRows()<<endl;
+    return 1;
+    }
+  if (table->GetNumberOfColumns() != 6)
+    {
+    cout << "ERROR: Wrong number of columns: " << table->GetNumberOfColumns()<<endl;
+    return 1;
+    }
+
+  reader->Delete();
+
+
+  //------------  test the reader with an input string-----------------
+  char inputString[] = ",awesomeness,fitness,region\r\nAbby,1,2,china\r\nBob,5,0.2,US\r\nCatie,3,0.3,UK\r\nDavid,2,100,UK\r\nGrace,4,20,US\r\nIlknur,6,5,Turkey\r\n";
+
+  vtkDelimitedTextReader *reader2 = vtkDelimitedTextReader::New();
+  reader2->SetHaveHeaders(1);
+  reader2->SetReadFromInputString(1);
+  reader2->SetInputString(inputString);
+  reader2->SetDetectNumericColumns(1);
+  reader2->Update();
+
+  vtkTable* table2 = reader2->GetOutput();
+  table2->Dump();
+  cout << "Printing reader2 info..." << endl;
+  reader2->Print(cout);
+
+  if (table2->GetNumberOfRows() != 6)
+    {
+    cout << "ERROR: Wrong number of rows: " << table2->GetNumberOfRows()<<endl;
+    return 1;
+    }
+  if (table2->GetNumberOfColumns() != 4)
+    {
+    cout << "ERROR: Wrong number of columns: " << table2->GetNumberOfColumns()<<endl;
+    return 1;
+    }
+
+  reader2->Delete();
+
+  return 0;
+}
diff --git a/IO/Infovis/Testing/Cxx/TestNewickTreeReader.cxx b/IO/Infovis/Testing/Cxx/TestNewickTreeReader.cxx
index f1e9ee1..2460334 100644
--- a/IO/Infovis/Testing/Cxx/TestNewickTreeReader.cxx
+++ b/IO/Infovis/Testing/Cxx/TestNewickTreeReader.cxx
@@ -1,3 +1,18 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestNewickTreeReader.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
 #include "vtkNewickTreeReader.h"
 #include "vtkSmartPointer.h"
 #include "vtkTestUtilities.h"
@@ -5,30 +20,55 @@
 
 int TestNewickTreeReader(int argc, char* argv[])
 {
+  // reading from a file
   char* file = vtkTestUtilities::ExpandDataFileName(argc, argv,
                                        "Data/Infovis/rep_set.tre");
 
-  cerr << "file: " << file << endl;
+  cout << "reading from a file: "<< file <<  endl;
 
-  vtkSmartPointer<vtkNewickTreeReader> reader =
+  vtkSmartPointer<vtkNewickTreeReader> reader1 =
       vtkSmartPointer<vtkNewickTreeReader>::New();
-  reader->SetFileName(file);
+  reader1->SetFileName(file);
   delete[] file;
-  reader->Update();
-  vtkTree *tree = reader->GetOutput();
+  reader1->Update();
+  vtkTree *tree1 = reader1->GetOutput();
+
 
-  int error_count = 0;
+  if (tree1->GetNumberOfVertices() != 836)
+    {
+    cerr << "Wrong number of Vertices: "<< tree1->GetNumberOfVertices() << endl;
+    return 1;
+    }
+
+  if (tree1->GetNumberOfEdges() != 835)
+    {
+    cerr << "Wrong number of Edges: "<< tree1->GetNumberOfEdges() << endl;
+    return 1;
+    }
+
+
+  //reading from a string
+  cout << "reading from a  string"<< endl;
+  char inputStr[] =  "(((A:0.1,B:0.2,(C:0.3,D:0.4)E:0.5)F:0.6,G:0.7)H:0.8,I:0.9);";
+
+  vtkSmartPointer<vtkNewickTreeReader> reader2 =
+      vtkSmartPointer<vtkNewickTreeReader>::New();
+  reader2->SetReadFromInputString(1);
+  reader2->SetInputString(inputStr);
+  reader2->Update();
+  vtkTree *tree2 = reader2->GetOutput();
 
-  if (tree->GetNumberOfVertices() != 837)
+  if (tree2->GetNumberOfVertices() != 10)
     {
-    ++error_count;
+    cerr << "Wrong number of Vertices: "<< tree2->GetNumberOfVertices() << endl;
+    return 1;
     }
 
-  if (tree->GetNumberOfEdges() != 836)
+  if (tree2->GetNumberOfEdges() != 9)
     {
-    ++error_count;
+    cerr << "Wrong number of Edges: "<< tree2->GetNumberOfEdges() << endl;
+    return 1;
     }
 
-  cerr << error_count << " errors" << endl;
-  return error_count;
+  return 0;
 }
diff --git a/IO/Infovis/Testing/Cxx/TestNewickTreeWriter.cxx b/IO/Infovis/Testing/Cxx/TestNewickTreeWriter.cxx
new file mode 100644
index 0000000..9985253
--- /dev/null
+++ b/IO/Infovis/Testing/Cxx/TestNewickTreeWriter.cxx
@@ -0,0 +1,124 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestNewickTreeWriter.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkAbstractArray.h"
+#include "vtkDataSetAttributes.h"
+#include "vtkNew.h"
+#include "vtkNewickTreeReader.h"
+#include "vtkNewickTreeWriter.h"
+#include "vtkTestUtilities.h"
+#include "vtkTree.h"
+
+int TestNewickTreeWriter(int argc, char* argv[])
+{
+  // get the full path to the input file
+  char* file = vtkTestUtilities::ExpandDataFileName(argc, argv,
+                                       "Data/Infovis/rep_set.tre");
+  cout << "reading from a file: "<< file <<  endl;
+
+  // read the input file into a vtkTree
+  vtkNew<vtkNewickTreeReader> reader1;
+  reader1->SetFileName(file);
+  reader1->Update();
+  vtkTree *tree1 = reader1->GetOutput();
+  delete[] file;
+
+  // write this vtkTree out to a string
+  vtkNew<vtkNewickTreeWriter> writer;
+  writer->WriteToOutputStringOn();
+  writer->SetInputData(tree1);
+  writer->Update();
+  std::string treeString = writer->GetOutputStdString();
+
+  // read this string back in, creating another vtkTree
+  vtkNew<vtkNewickTreeReader> reader2;
+  reader2->ReadFromInputStringOn();
+  reader2->SetInputString(treeString);
+  reader2->Update();
+  vtkTree *tree2 = reader2->GetOutput();
+
+  // compare these two trees.  This test fails if it detects any differences
+  // between them.
+  vtkIdType numVerticesTree1 = tree1->GetNumberOfVertices();
+  vtkIdType numVerticesTree2 = tree2->GetNumberOfVertices();
+  if (numVerticesTree1 != numVerticesTree2)
+    {
+    cout << "number of vertices is not equal: " << numVerticesTree1
+         << " vs. " << numVerticesTree2 << endl;
+    return EXIT_FAILURE;
+    }
+
+  vtkIdType numEdgesTree1 = tree1->GetNumberOfEdges();
+  vtkIdType numEdgesTree2 = tree2->GetNumberOfEdges();
+  if (numEdgesTree1 != numEdgesTree2)
+    {
+    cout << "number of edges is not equal: " << numEdgesTree1 <<
+            " vs. " << numEdgesTree2 << endl;
+    return EXIT_FAILURE;
+    }
+
+  for (vtkIdType vertex = 0; vertex < numVerticesTree1; ++vertex)
+    {
+    if (tree1->GetParent(vertex) != tree2->GetParent(vertex))
+      {
+      cout << "tree1 and tree2 do not agree on the parent of vertex " << vertex
+           << endl;
+      return EXIT_FAILURE;
+      }
+    if (tree1->GetNumberOfChildren(vertex) !=
+        tree2->GetNumberOfChildren(vertex))
+      {
+      cout << "tree1 and tree2 do not agree on the number of children "
+           << "for vertex " << vertex << endl;
+      return EXIT_FAILURE;
+      }
+    }
+
+  vtkAbstractArray *names1 =
+    tree1->GetVertexData()->GetAbstractArray("node name");
+  vtkAbstractArray *names2 =
+    tree2->GetVertexData()->GetAbstractArray("node name");
+  if (names1->GetNumberOfTuples() != names2->GetNumberOfTuples())
+    {
+    cout << "the names arrays are of different sizes" << endl;
+    return EXIT_FAILURE;
+    }
+  for (vtkIdType v = 0; v < names1->GetNumberOfTuples(); v++)
+    {
+    if (names1->GetVariantValue(v) != names2->GetVariantValue(v))
+      {
+      cout << "tree1 and tree2 do not agree on the name of vertex " << v << endl;
+      return EXIT_FAILURE;
+      }
+    }
+
+  vtkAbstractArray *weights1 = tree1->GetEdgeData()->GetAbstractArray("weight");
+  vtkAbstractArray *weights2 = tree2->GetEdgeData()->GetAbstractArray("weight");
+  if (weights1->GetNumberOfTuples() != weights2->GetNumberOfTuples())
+    {
+    cout << "the weights arrays are of different sizes" << endl;
+    return EXIT_FAILURE;
+    }
+  for (vtkIdType e = 0; e < weights1->GetNumberOfTuples(); e++)
+    {
+    if (weights1->GetVariantValue(e) != weights2->GetVariantValue(e))
+      {
+      cout << "tree1 and tree2 do not agree on the weight of edge " << e << endl;
+      return EXIT_FAILURE;
+      }
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/IO/Infovis/module.cmake b/IO/Infovis/module.cmake
index bfd17b7..c10d0d4 100644
--- a/IO/Infovis/module.cmake
+++ b/IO/Infovis/module.cmake
@@ -8,7 +8,9 @@ vtk_module(vtkIOInfovis
     vtkIOCore
     vtkIOLegacy
     vtkInfovisCore
+  PRIVATE_DEPENDS
     vtklibxml2
+    vtksys
   TEST_DEPENDS
     vtkInfovisLayout
     vtkRenderingCore
diff --git a/IO/Infovis/vtkDIMACSGraphWriter.h b/IO/Infovis/vtkDIMACSGraphWriter.h
index 81e4c56..5b7a3d2 100644
--- a/IO/Infovis/vtkDIMACSGraphWriter.h
+++ b/IO/Infovis/vtkDIMACSGraphWriter.h
@@ -63,8 +63,8 @@ public:
   vtkGraph* GetInput(int port);
 
 protected:
-  vtkDIMACSGraphWriter() {};
-  ~vtkDIMACSGraphWriter() {};
+  vtkDIMACSGraphWriter() {}
+  ~vtkDIMACSGraphWriter() {}
 
   void WriteData();
 
diff --git a/IO/Infovis/vtkDelimitedTextReader.cxx b/IO/Infovis/vtkDelimitedTextReader.cxx
index 8c562df..2196e5e 100644
--- a/IO/Infovis/vtkDelimitedTextReader.cxx
+++ b/IO/Infovis/vtkDelimitedTextReader.cxx
@@ -36,6 +36,7 @@
 #include "vtkTextCodecFactory.h"
 
 #include <vtksys/ios/sstream>
+#include <vtksys/ios/iostream>
 #include <algorithm>
 #include <iterator>
 #include <stdexcept>
@@ -387,6 +388,9 @@ vtkDelimitedTextReader::vtkDelimitedTextReader() :
   this->SetNumberOfInputPorts(0);
   this->SetNumberOfOutputPorts(1);
 
+  this->ReadFromInputString = 0;
+  this->InputString = NULL;
+  this->InputStringLength = 0;
   this->MergeConsecutiveDelimiters = false;
   this->PedigreeIdArrayName = NULL;
   this->SetPedigreeIdArrayName("id");
@@ -409,6 +413,7 @@ vtkDelimitedTextReader::~vtkDelimitedTextReader()
   this->SetPedigreeIdArrayName(0);
   this->SetUnicodeCharacterSet(0);
   this->SetFileName(0);
+  this->SetInputString(NULL);
   this->SetFieldDelimiterCharacters(0);
 }
 
@@ -417,6 +422,16 @@ void vtkDelimitedTextReader::PrintSelf(ostream& os, vtkIndent indent)
   this->Superclass::PrintSelf(os, indent);
   os << indent << "FileName: "
      << (this->FileName ? this->FileName : "(none)") << endl;
+  os << indent << "ReadFromInputString: "
+     << (this->ReadFromInputString ? "On\n" : "Off\n");
+  if ( this->InputString )
+    {
+    os << indent << "Input String: " << this->InputString << "\n";
+    }
+  else
+    {
+    os << indent << "Input String: (None)\n";
+    }
   os << indent << "UnicodeCharacterSet: "
      << (this->UnicodeCharacterSet ? this->UnicodeCharacterSet : "(none)") << endl;
   os << indent << "MaxRecords: " << this->MaxRecords
@@ -456,6 +471,48 @@ void vtkDelimitedTextReader::PrintSelf(ostream& os, vtkIndent indent)
     << (this->OutputPedigreeIds? "true" : "false") << endl;
 }
 
+void vtkDelimitedTextReader::SetInputString(const char *in)
+{
+  int len = 0;
+  if (in != NULL)
+    {
+    len = static_cast<int>(strlen(in));
+    }
+  this->SetInputString(in, len);
+}
+
+void vtkDelimitedTextReader::SetInputString(const char *in, int len)
+{
+  if (this->InputString && in && strncmp(in, this->InputString, len) == 0)
+    {
+    return;
+    }
+
+  if (this->InputString)
+    {
+    delete [] this->InputString;
+    }
+
+  if (in && len>0)
+    {
+    // Add a NULL terminator so that GetInputString
+    // callers (from wrapped languages) get a valid
+    // C string in *ALL* cases...
+    //
+    this->InputString = new char[len+1];
+    memcpy(this->InputString,in,len);
+    this->InputString[len] = 0;
+    this->InputStringLength = len;
+    }
+   else
+    {
+    this->InputString = NULL;
+    this->InputStringLength = 0;
+    }
+
+  this->Modified();
+}
+
 void vtkDelimitedTextReader::SetUnicodeRecordDelimiters(const vtkUnicodeString& delimiters)
 {
   this->UnicodeRecordDelimiters = delimiters;
@@ -546,26 +603,39 @@ int vtkDelimitedTextReader::RequestData(
       return 1;
       }
 
-    // If the filename hasn't been specified, we're done ...
-    if(!this->FileName)
-      {
-      return 1;
-      }
-
     if (!this->PedigreeIdArrayName)
       throw std::runtime_error("You must specify a pedigree id array name");
 
-    // Get the total size of the input file in bytes
-    ifstream file_stream(this->FileName, ios::binary);
-    if(!file_stream.good())
+    istream* input_stream_pt = NULL;
+    ifstream file_stream;
+    std::istringstream string_stream;
+
+    if(!this->ReadFromInputString)
       {
-      throw std::runtime_error(
-            "Unable to open input file " + std::string(this->FileName));
-      }
+      // If the filename hasn't been specified, we're done ...
+      if(!this->FileName)
+        {
+        return 1;
+        }
+      // Get the total size of the input file in bytes
+      file_stream.open(this->FileName, ios::binary);
+      if(!file_stream.good())
+        {
+        throw std::runtime_error(
+          "Unable to open input file " + std::string(this->FileName));
+        }
+
+      file_stream.seekg(0, ios::end);
+      //const vtkIdType total_bytes = file_stream.tellg();
+      file_stream.seekg(0, ios::beg);
 
-    file_stream.seekg(0, ios::end);
-    //const vtkIdType total_bytes = file_stream.tellg();
-    file_stream.seekg(0, ios::beg);
+      input_stream_pt = dynamic_cast<istream*>(&file_stream);
+      }
+    else
+      {
+      string_stream.str(this->InputString);
+      input_stream_pt = dynamic_cast<istream*>(&string_stream);
+      }
 
     vtkStdString character_set;
     vtkTextCodec* transCodec = NULL;
@@ -588,7 +658,7 @@ int vtkDelimitedTextReader::RequestData(
       this->UnicodeStringDelimiters =
         vtkUnicodeString::from_utf8(tstring);
       this->UnicodeOutputArrays = false;
-      transCodec = vtkTextCodecFactory::CodecToHandle(file_stream);
+      transCodec = vtkTextCodecFactory::CodecToHandle(*input_stream_pt);
       }
 
     if (NULL == transCodec)
@@ -612,7 +682,7 @@ int vtkDelimitedTextReader::RequestData(
 
     vtkTextCodec::OutputIterator& outIter = iterator;
 
-    transCodec->ToUnicode(file_stream, outIter);
+    transCodec->ToUnicode(*input_stream_pt, outIter);
     iterator.ReachedEndOfInput();
     transCodec->Delete();
 
diff --git a/IO/Infovis/vtkDelimitedTextReader.h b/IO/Infovis/vtkDelimitedTextReader.h
index aaceff6..71681f0 100644
--- a/IO/Infovis/vtkDelimitedTextReader.h
+++ b/IO/Infovis/vtkDelimitedTextReader.h
@@ -80,6 +80,25 @@ public:
   vtkSetStringMacro(FileName);
 
   // Description:
+  // Specify the InputString for use when reading from a character array.
+  // Optionally include the length for binary strings. Note that a copy
+  // of the string is made and stored. If this causes exceedingly large
+  // memory consumption, consider using InputArray instead.
+  void SetInputString(const char *in);
+  vtkGetStringMacro(InputString);
+  void SetInputString(const char *in, int len);
+  vtkGetMacro(InputStringLength, int);
+  void SetInputString(const vtkStdString& input)
+    { this->SetInputString(input.c_str(), static_cast<int>(input.length())); }
+
+  // Description:
+  // Enable reading from an InputString or InputArray instead of the default,
+  // a file.
+  vtkSetMacro(ReadFromInputString,int);
+  vtkGetMacro(ReadFromInputString,int);
+  vtkBooleanMacro(ReadFromInputString,int);
+
+  // Description:
   // Specifies the character set used in the input file.  Valid character set
   // names will be drawn from the list maintained by the Internet Assigned Name
   // Authority at
@@ -245,6 +264,9 @@ protected:
     vtkInformationVector*);
 
   char* FileName;
+  int ReadFromInputString;
+  char *InputString;
+  int InputStringLength;
   char* UnicodeCharacterSet;
   vtkIdType MaxRecords;
   vtkUnicodeString UnicodeRecordDelimiters;
diff --git a/IO/Infovis/vtkNewickTreeReader.cxx b/IO/Infovis/vtkNewickTreeReader.cxx
index 4e23293..61298ab 100644
--- a/IO/Infovis/vtkNewickTreeReader.cxx
+++ b/IO/Infovis/vtkNewickTreeReader.cxx
@@ -43,7 +43,7 @@ vtkNewickTreeReader::vtkNewickTreeReader()
 {
   vtkTree *output = vtkTree::New();
   this->SetOutput(output);
-  // Releasing data for pipeline parallism.
+  // Releasing data for pipeline parallelism.
   // Filters will know it is empty.
   output->ReleaseData();
   output->Delete();
@@ -95,7 +95,7 @@ int vtkNewickTreeReader::RequestUpdateExtent(
 }
 
 //----------------------------------------------------------------------------
-int vtkNewickTreeReader:: ReadNewickTree(  char * const buffer, vtkTree & tree)
+int vtkNewickTreeReader:: ReadNewickTree(  const char *  buffer, vtkTree & tree)
 {
   // Read through the input file to count the number of nodes in the tree.
   // We start at one to account for the root node
@@ -116,12 +116,11 @@ int vtkNewickTreeReader:: ReadNewickTree(  char * const buffer, vtkTree & tree)
   names->SetName("node name");
   names->SetNumberOfValues(numNodes);
 
-  //parse the input file to create the graph
+  // parse the input file to create the graph
   vtkNew<vtkMutableDirectedGraph> builder;
-  this->BuildTree(buffer, builder.GetPointer(), weights.GetPointer(),
+  this->BuildTree(const_cast<char*> (buffer), builder.GetPointer(), weights.GetPointer(),
     names.GetPointer(), -1);
 
-  builder->GetEdgeData()->AddArray(weights.GetPointer());
   builder->GetVertexData()->AddArray(names.GetPointer());
 
   if (!tree.CheckedShallowCopy(builder.GetPointer()))
@@ -130,17 +129,27 @@ int vtkNewickTreeReader:: ReadNewickTree(  char * const buffer, vtkTree & tree)
     return 1;
     }
 
+  // check if our input file contained edge weight information
+  bool haveWeights = false;
+  for (vtkIdType i = 0; i < weights->GetNumberOfTuples(); ++i)
+    {
+    if (weights->GetValue(i) != 0.0)
+      {
+      haveWeights = true;
+      break;
+      }
+    }
+  if (!haveWeights)
+    {
+    return 1;
+    }
+
+  tree.GetEdgeData()->AddArray(weights.GetPointer());
+
   vtkNew<vtkDoubleArray> nodeWeights;
   nodeWeights->SetNumberOfTuples(tree.GetNumberOfVertices());
 
-  // trueWeights is (for the most part) a duplicate of nodeWeights.
-  // The only difference is that leaf nodes aren't clamped to the max
-  // weight in this array.
-  vtkNew<vtkDoubleArray> trueWeights;
-  trueWeights->SetNumberOfTuples(tree.GetNumberOfVertices());
-
   //set node weights
-  double maxWeight = 0.0;
   vtkNew<vtkTreeDFSIterator> treeIterator;
   treeIterator->SetStartVertex(tree.GetRoot());
   treeIterator->SetTree(&tree);
@@ -154,31 +163,15 @@ int vtkNewickTreeReader:: ReadNewickTree(  char * const buffer, vtkTree & tree)
       weight = weights->GetValue(tree.GetEdgeId(parent, vertex));
       weight += nodeWeights->GetValue(parent);
       }
-
-    if (weight > maxWeight)
-      {
-      maxWeight = weight;
-      }
     nodeWeights->SetValue(vertex, weight);
-    trueWeights->SetValue(vertex, weight);
     }
 
-  for (vtkIdType vertex = 0; vertex < tree.GetNumberOfVertices(); ++vertex)
-    {
-    if (tree.IsLeaf(vertex))
-      {
-      nodeWeights->SetValue(vertex, maxWeight);
-      }
-    }
   nodeWeights->SetName("node weight");
   tree.GetVertexData()->AddArray(nodeWeights.GetPointer());
 
-  trueWeights->SetName("true node weight");
-  tree.GetVertexData()->AddArray(trueWeights.GetPointer());
-
   return 1;
-
 }
+
 //----------------------------------------------------------------------------
 int vtkNewickTreeReader::RequestData(
   vtkInformation *,
@@ -195,68 +188,69 @@ int vtkNewickTreeReader::RequestData(
 
   vtkDebugMacro(<<"Reading Newick tree ...");
 
-  if(this->GetFileName() == NULL || strcmp(this->GetFileName(), "") == 0)
+  if( !this->ReadFromInputString)
     {
-    vtkErrorMacro(<<"Input filename not set");
-    return 1;
-    }
+    if(!this->GetFileName())
+      {
+      vtkErrorMacro("FileName not set.");
+      return 1;
+      }
 
-  std::ifstream ifs( this->GetFileName(), std::ifstream::in );
-  if(!ifs.good())
+    std::ifstream ifs( this->GetFileName(), std::ifstream::in );
+    if(!ifs.good())
+      {
+      vtkErrorMacro(<<"Unable to open " << this->GetFileName() << " for reading");
+      return 1;
+      }
+
+    // Read the input file into a char *
+    ifs.seekg(0, std::ios::end);
+    this->InputStringLength = ifs.tellg();
+    ifs.seekg(0, std::ios::beg);
+    this->InputString = new char[this->InputStringLength];
+    ifs.read(this->InputString, this->InputStringLength);
+    ifs.close();
+    }
+  else
     {
-    vtkErrorMacro(<<"Unable to open " << this->GetFileName() << " for reading");
-    return 1;
+    if ( (!this->InputString) || (this->InputStringLength == 0))
+      {
+      vtkErrorMacro(<<"Input string is empty!");
+      return 1;
+      }
     }
 
   vtkTree* const output = vtkTree::SafeDownCast(
     outInfo->Get(vtkDataObject::DATA_OBJECT()));
 
 
-
-  // Read the input file into a char *
-  int length;
-  ifs.seekg(0, std::ios::end);
-  length = ifs.tellg();
-  ifs.seekg(0, std::ios::beg);
-  char *buffer = new char[length];
-  ifs.read(buffer, length);
-
-
-  /*// Rewind input buffer
-  ifs.seekg(0, std::ios::beg);
-  ifs.read(buffer, length);
-  ifs.close();
-*/
-  ifs.close();
-
-  if(!ReadNewickTree(buffer, *output))
+  if(!ReadNewickTree(this->InputString, *output))
     {
-    vtkErrorMacro(<<"Error reading the buffer into a vtkTree structure.");
+    vtkErrorMacro(<<"Error reading a vtkTree from the input.");
     return 1;
     }
 
-    delete [] buffer;
-
   vtkDebugMacro(<< "Read " << output->GetNumberOfVertices() <<" vertices and "
     << output->GetNumberOfEdges() <<" edges.\n");
 
   return 1;
 }
 
-void vtkNewickTreeReader::CountNodes(char * const buffer, vtkIdType *numNodes)
+//----------------------------------------------------------------------------
+void vtkNewickTreeReader::CountNodes(const char *buffer, vtkIdType *numNodes)
 {
   char *current;
   char *start;
   char temp;
   int childCount;
 
-  start = buffer;
+  start = const_cast<char*>(buffer);
 
   if (*start != '(')
   {
     // Leaf node. Separate name from weight.
     // If weight doesn't exist then take care of name only
-    current = buffer;
+    current = const_cast<char*>(buffer);
     while (*current != '\0')
     {
       current++;
@@ -366,6 +360,7 @@ void vtkNewickTreeReader::CountNodes(char * const buffer, vtkIdType *numNodes)
   }
 }
 
+//----------------------------------------------------------------------------
 vtkIdType vtkNewickTreeReader::BuildTree(char *buffer,
   vtkMutableDirectedGraph *g, vtkDoubleArray *weights, vtkStringArray *names,
   vtkIdType parent)
@@ -379,12 +374,6 @@ vtkIdType vtkNewickTreeReader::BuildTree(char *buffer,
 
   start = buffer;
 
-  if(parent == -1)
-    {
-    parent = g->AddVertex();
-    names->SetValue(parent, "");
-    }
-
   if (*start != '(')
   {
     // Leaf node. Separate name from weight (if it exists).
@@ -419,7 +408,15 @@ vtkIdType vtkNewickTreeReader::BuildTree(char *buffer,
   else
   {
     // Create node
-    node = g->AddChild(parent);
+    if(parent == -1)
+      {
+      node = g->AddVertex();
+      names->SetValue(node, "");
+      }
+    else
+      {
+      node = g->AddChild(parent);
+      }
 
     // Search for all child nodes
     // Find all ',' until corresponding ')' is encountered
@@ -548,4 +545,10 @@ int vtkNewickTreeReader::FillOutputPortInformation(int, vtkInformation* info)
 void vtkNewickTreeReader::PrintSelf(ostream& os, vtkIndent indent)
 {
   this->Superclass::PrintSelf(os,indent);
+  os << indent << "FileName: "
+     << (this->FileName ? this->FileName : "(none)") << endl;
+  os << indent << "InputString: "
+     << (this->InputString ? this->InputString : "(none)") << endl;
+  os << indent << "ReadFromInputString: "
+     << (this->ReadFromInputString ? "on" : "off") << endl;
 }
diff --git a/IO/Infovis/vtkNewickTreeReader.h b/IO/Infovis/vtkNewickTreeReader.h
index 4de3f3e..2f8a352 100644
--- a/IO/Infovis/vtkNewickTreeReader.h
+++ b/IO/Infovis/vtkNewickTreeReader.h
@@ -48,7 +48,7 @@ public:
   vtkTree *GetOutput();
   vtkTree *GetOutput(int idx);
   void SetOutput(vtkTree *output);
-  int ReadNewickTree(char * const buffer, vtkTree & tree);
+  int ReadNewickTree(const char * buffer, vtkTree & tree);
 
 protected:
   vtkNewickTreeReader();
@@ -64,7 +64,7 @@ protected:
                                   vtkInformationVector *);
 
   virtual int FillOutputPortInformation(int, vtkInformation*);
-  void CountNodes(char * const buffer, vtkIdType *numNodes);
+  void CountNodes(const char * buffer, vtkIdType *numNodes);
   vtkIdType BuildTree(char *buffer, vtkMutableDirectedGraph *g,
     vtkDoubleArray *weights, vtkStringArray *names, vtkIdType parent);
 private:
diff --git a/IO/Infovis/vtkNewickTreeWriter.cxx b/IO/Infovis/vtkNewickTreeWriter.cxx
new file mode 100644
index 0000000..6bffa66
--- /dev/null
+++ b/IO/Infovis/vtkNewickTreeWriter.cxx
@@ -0,0 +1,145 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkNewickTreeWriter.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkNewickTreeWriter.h"
+
+#include "vtkDataSetAttributes.h"
+#include "vtkInformation.h"
+#include "vtkObjectFactory.h"
+#include "vtkTree.h"
+
+vtkStandardNewMacro(vtkNewickTreeWriter);
+
+//----------------------------------------------------------------------------
+vtkNewickTreeWriter::vtkNewickTreeWriter()
+{
+  this->SetFileTypeToASCII();
+
+  this->EdgeWeightArrayName = "weight";
+  this->NodeNameArrayName = "node name";
+
+  this->EdgeWeightArray = NULL;
+  this->NodeNameArray = NULL;
+}
+
+//----------------------------------------------------------------------------
+void vtkNewickTreeWriter::WriteData()
+{
+  vtkDebugMacro(<<"Writing vtk tree data...");
+
+  vtkTree* const input = this->GetInput();
+
+  this->EdgeWeightArray =
+    input->GetEdgeData()->GetAbstractArray(this->EdgeWeightArrayName.c_str());
+
+  this->NodeNameArray =
+    input->GetVertexData()->GetAbstractArray(this->NodeNameArrayName.c_str());
+
+  ostream *fp;
+  if( !(fp=this->OpenVTKFile()) )
+    {
+    if(fp)
+      {
+      if(this->FileName)
+        {
+        vtkErrorMacro("Problem opening file: "
+                      << this->FileName);
+        this->CloseVTKFile(fp);
+        }
+      else
+        {
+        this->CloseVTKFile(fp);
+        vtkErrorMacro("The FileName was not set correctly");
+        }
+      }
+    return;
+    }
+
+  this->WriteVertex(fp, input, input->GetRoot());
+
+  // the tree ends with a semi-colon
+  *fp << ";";
+
+  this->CloseVTKFile(fp);
+}
+
+//----------------------------------------------------------------------------
+void vtkNewickTreeWriter::WriteVertex(ostream *fp, vtkTree* const input,
+                                      vtkIdType vertex)
+{
+  vtkIdType numChildren = input->GetNumberOfChildren(vertex);
+  if (numChildren > 0)
+    {
+    *fp << "(";
+    for (vtkIdType child = 0; child < numChildren; ++child)
+      {
+      this->WriteVertex(fp, input, input->GetChild(vertex, child));
+      if (child != numChildren - 1)
+        {
+        *fp << ",";
+        }
+      }
+    *fp << ")";
+    }
+
+  if (this->NodeNameArray)
+    {
+    vtkStdString name = this->NodeNameArray->GetVariantValue(vertex).ToString();
+    if (name != "")
+      {
+      *fp << name;
+      }
+    }
+
+  if (this->EdgeWeightArray)
+    {
+    vtkIdType parent = input->GetParent(vertex);
+    if (parent != -1)
+      {
+      vtkIdType edge = input->GetEdgeId(parent, vertex);
+      if (edge != -1)
+        {
+        double weight = this->EdgeWeightArray->GetVariantValue(edge).ToDouble();
+        *fp << ":" << weight;
+        }
+      }
+    }
+}
+
+//----------------------------------------------------------------------------
+int vtkNewickTreeWriter::FillInputPortInformation(int, vtkInformation *info)
+{
+  info->Set(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkTree");
+  return 1;
+}
+
+//----------------------------------------------------------------------------
+vtkTree* vtkNewickTreeWriter::GetInput()
+{
+  return vtkTree::SafeDownCast(this->Superclass::GetInput());
+}
+
+//----------------------------------------------------------------------------
+vtkTree* vtkNewickTreeWriter::GetInput(int port)
+{
+  return vtkTree::SafeDownCast(this->Superclass::GetInput(port));
+}
+
+//----------------------------------------------------------------------------
+void vtkNewickTreeWriter::PrintSelf(ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os,indent);
+  os << indent << "EdgeWeightArrayName: " << this->EdgeWeightArrayName << endl;
+  os << indent << "NodeNameArrayName: " << this->NodeNameArrayName << endl;
+}
diff --git a/IO/Infovis/vtkNewickTreeWriter.h b/IO/Infovis/vtkNewickTreeWriter.h
new file mode 100644
index 0000000..a263133
--- /dev/null
+++ b/IO/Infovis/vtkNewickTreeWriter.h
@@ -0,0 +1,83 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkNewickTreeWriter.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkNewickTreeWriter - write vtkTree data to Newick format.
+// .SECTION Description
+// vtkNewickTreeWriter is writes a vtkTree to a Newick formatted file
+// or string.
+
+#ifndef __vtkNewickTreeWriter_h
+#define __vtkNewickTreeWriter_h
+
+#include "vtkIOInfovisModule.h" // For export macro
+#include "vtkDataWriter.h"
+#include "vtkStdString.h"       // For get/set ivars
+
+class vtkTree;
+
+class VTKIOINFOVIS_EXPORT vtkNewickTreeWriter : public vtkDataWriter
+{
+public:
+  static vtkNewickTreeWriter *New();
+  vtkTypeMacro(vtkNewickTreeWriter,vtkDataWriter);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Get the input to this writer.
+  vtkTree* GetInput();
+  vtkTree* GetInput(int port);
+
+  // Description:
+  // Get/Set the name of the input's tree edge weight array.
+  // This array must be part of the input tree's EdgeData.
+  // The default name is "weight".  If this array cannot be
+  // found, then no edge weights will be included in the
+  // output of this writer.
+  vtkGetMacro(EdgeWeightArrayName, vtkStdString);
+  vtkSetMacro(EdgeWeightArrayName, vtkStdString);
+
+  // Description:
+  // Get/Set the name of the input's tree node name array.
+  // This array must be part of the input tree's VertexData.
+  // The default name is "node name".  If this array cannot
+  // be found, then no node names will be included in the
+  // output of this writer.
+  vtkGetMacro(NodeNameArrayName, vtkStdString);
+  vtkSetMacro(NodeNameArrayName, vtkStdString);
+
+protected:
+  vtkNewickTreeWriter();
+  ~vtkNewickTreeWriter() {}
+
+  void WriteData();
+
+  // Description:
+  // Write one vertex.  This function calls itself recursively for
+  // any children of the input vertex.
+  void WriteVertex(ostream *fp, vtkTree* const input, vtkIdType vertex);
+
+  virtual int FillInputPortInformation(int port, vtkInformation *info);
+
+  vtkStdString EdgeWeightArrayName;
+  vtkStdString NodeNameArrayName;
+
+  vtkAbstractArray *EdgeWeightArray;
+  vtkAbstractArray *NodeNameArray;
+
+private:
+  vtkNewickTreeWriter(const vtkNewickTreeWriter&);  // Not implemented.
+  void operator=(const vtkNewickTreeWriter&);  // Not implemented.
+};
+
+#endif
diff --git a/IO/Infovis/vtkTulipReader.cxx b/IO/Infovis/vtkTulipReader.cxx
index be1c657..418a0ac 100644
--- a/IO/Infovis/vtkTulipReader.cxx
+++ b/IO/Infovis/vtkTulipReader.cxx
@@ -123,7 +123,7 @@ struct vtkTulipReaderToken
   double DoubleValue;
 };
 
-void vtkTulipReaderNextToken(vtksys_ios::istream& in, vtkTulipReaderToken& tok)
+static void vtkTulipReaderNextToken(vtksys_ios::istream& in, vtkTulipReaderToken& tok)
 {
   char ch = in.peek();
   while (!in.eof() && (ch == ';' || isspace(ch)))
diff --git a/IO/Infovis/vtkXGMLReader.cxx b/IO/Infovis/vtkXGMLReader.cxx
index 360756f..782b987 100644
--- a/IO/Infovis/vtkXGMLReader.cxx
+++ b/IO/Infovis/vtkXGMLReader.cxx
@@ -100,7 +100,7 @@ struct vtkXGMLReaderToken
 
 
 
-void vtkXGMLReaderNextToken(vtksys_ios::istream& in, vtkXGMLReaderToken& tok)
+static void vtkXGMLReaderNextToken(vtksys_ios::istream& in, vtkXGMLReaderToken& tok)
 {
   char ch = in.peek();
   while (!in.eof() && (ch == ';' || isspace(ch)))
diff --git a/IO/Infovis/vtkXMLTreeReader.cxx b/IO/Infovis/vtkXMLTreeReader.cxx
index 9d84369..42c7d5d 100644
--- a/IO/Infovis/vtkXMLTreeReader.cxx
+++ b/IO/Infovis/vtkXMLTreeReader.cxx
@@ -87,7 +87,7 @@ void vtkXMLTreeReader::PrintSelf(ostream& os, vtkIndent indent)
      << (this->GenerateVertexPedigreeIds ? "on" : "off") << endl;
 }
 
-void vtkXMLTreeReaderProcessElement(vtkMutableDirectedGraph *tree,
+static void vtkXMLTreeReaderProcessElement(vtkMutableDirectedGraph *tree,
    vtkIdType parent, xmlNode *node, int readCharData, int maskArrays)
 {
   vtkDataSetAttributes *data = tree->GetVertexData();
diff --git a/IO/LSDyna/Testing/Cxx/CMakeLists.txt b/IO/LSDyna/Testing/Cxx/CMakeLists.txt
index e094122..227ec11 100644
--- a/IO/LSDyna/Testing/Cxx/CMakeLists.txt
+++ b/IO/LSDyna/Testing/Cxx/CMakeLists.txt
@@ -1,26 +1,14 @@
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-  TestLSDynaReader.cxx
-  TestLSDynaReaderSPH.cxx
-
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_LARGE_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_LARGE_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/IO/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+if(VTK_USE_LARGE_DATA)
+  # Tell ExternalData to fetch test input at build time.
+  ExternalData_Expand_Arguments(VTKData _
+    "DATA{${VTK_TEST_INPUT_DIR}/LSDyna/foam/,REGEX:.*}"
+    "DATA{${VTK_TEST_INPUT_DIR}/LSDyna/hemi.draw/,REGEX:.*}"
+    )
+
+  vtk_add_test_cxx(
+    TestLSDynaReader.cxx
+    TestLSDynaReaderSPH.cxx
+    )
+endif()
+
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/IO/LSDyna/Testing/Cxx/TestLSDynaReader.cxx b/IO/LSDyna/Testing/Cxx/TestLSDynaReader.cxx
index 9afb6a0..398ac4a 100644
--- a/IO/LSDyna/Testing/Cxx/TestLSDynaReader.cxx
+++ b/IO/LSDyna/Testing/Cxx/TestLSDynaReader.cxx
@@ -14,7 +14,7 @@
 =========================================================================*/
 // .NAME Test of vtkLSDynaReader
 // .SECTION Description
-// Tests the vtkLSDynaReader.  Requires VTKLargeData.
+// Tests the vtkLSDynaReader.
 
 #include "vtkLSDynaReader.h"
 #include "vtkDebugLeaks.h"
diff --git a/IO/LSDyna/Testing/Cxx/TestLSDynaReaderSPH.cxx b/IO/LSDyna/Testing/Cxx/TestLSDynaReaderSPH.cxx
index 6e9dd7d..a44747c 100644
--- a/IO/LSDyna/Testing/Cxx/TestLSDynaReaderSPH.cxx
+++ b/IO/LSDyna/Testing/Cxx/TestLSDynaReaderSPH.cxx
@@ -14,7 +14,7 @@
 =========================================================================*/
 // .NAME Test of vtkLSDynaReader
 // .SECTION Description
-// Tests the vtkLSDynaReader.  Requires VTKLargeData.
+// Tests the vtkLSDynaReader.
 
 #include "vtkLSDynaReader.h"
 #include "vtkDebugLeaks.h"
diff --git a/IO/LSDyna/Testing/Data/Baseline/TestLSDynaReader.png.md5 b/IO/LSDyna/Testing/Data/Baseline/TestLSDynaReader.png.md5
new file mode 100644
index 0000000..21686ac
--- /dev/null
+++ b/IO/LSDyna/Testing/Data/Baseline/TestLSDynaReader.png.md5
@@ -0,0 +1 @@
+7e1da22f24afc2c5e090728781ff8de8
diff --git a/IO/LSDyna/Testing/Data/Baseline/TestLSDynaReaderSPH.png.md5 b/IO/LSDyna/Testing/Data/Baseline/TestLSDynaReaderSPH.png.md5
new file mode 100644
index 0000000..82f3fe7
--- /dev/null
+++ b/IO/LSDyna/Testing/Data/Baseline/TestLSDynaReaderSPH.png.md5
@@ -0,0 +1 @@
+d87e33b4e26ebbd27dabef9481f80852
diff --git a/IO/LSDyna/module.cmake b/IO/LSDyna/module.cmake
index 3ac119b..52754ee 100644
--- a/IO/LSDyna/module.cmake
+++ b/IO/LSDyna/module.cmake
@@ -4,6 +4,8 @@ vtk_module(vtkIOLSDyna
   DEPENDS
     vtkCommonExecutionModel
     vtkIOXML
+  PRIVATE_DEPENDS
+    vtksys
   TEST_DEPENDS
     vtkRenderingOpenGL
     vtkTestingRendering
diff --git a/IO/LSDyna/private/LSDynaFamily.cxx b/IO/LSDyna/private/LSDynaFamily.cxx
index 468f1ee..ae1d230 100644
--- a/IO/LSDyna/private/LSDynaFamily.cxx
+++ b/IO/LSDyna/private/LSDynaFamily.cxx
@@ -22,7 +22,7 @@
 
 #include <errno.h>
 #include <ctype.h>
-#include <assert.h>
+#include <cassert>
 
 #include <string>
 #include <set>
@@ -33,8 +33,8 @@
 namespace
 {
 //Documentation on why the exemption
-#define USE_STAT_64 VTK_SIZEOF_ID_TYPE==8 && !defined _DARWIN_FEATURE_64_BIT_INODE
-//OSX uses stat instead of stat64
+#define USE_STAT_64 VTK_SIZEOF_ID_TYPE==8 && !defined _DARWIN_FEATURE_64_BIT_INODE && !defined __FreeBSD__
+//OS X and FreeBSD use stat instead of stat64
 #if (USE_STAT_64)
 //64bit
 #ifndef WIN32
diff --git a/IO/LSDyna/vtkLSDynaReader.cxx b/IO/LSDyna/vtkLSDynaReader.cxx
index 2442e28..3e5cf89 100644
--- a/IO/LSDyna/vtkLSDynaReader.cxx
+++ b/IO/LSDyna/vtkLSDynaReader.cxx
@@ -52,7 +52,7 @@
 #include <vector>
 #include <algorithm>
 #include <map>
-#include <assert.h>
+#include <cassert>
 
 #include <vtkCellType.h>
 #include <vtkDataObject.h>
diff --git a/IO/Legacy/CMakeLists.txt b/IO/Legacy/CMakeLists.txt
index de7f4f1..cb09a23 100644
--- a/IO/Legacy/CMakeLists.txt
+++ b/IO/Legacy/CMakeLists.txt
@@ -11,6 +11,7 @@ SET(Module_SRCS
   vtkGenericDataObjectWriter.cxx
   vtkGraphReader.cxx
   vtkGraphWriter.cxx
+  vtkPixelExtentIO.cxx
   vtkPolyDataReader.cxx
   vtkPolyDataWriter.cxx
   vtkRectilinearGridReader.cxx
@@ -29,4 +30,9 @@ SET(Module_SRCS
   vtkUnstructuredGridWriter.cxx
   )
 
+set_source_files_properties(
+  vtkPixelExtentIO.cxx
+  WRAP_EXCLUDE
+  )
+
 vtk_module_library(vtkIOLegacy ${Module_SRCS})
diff --git a/IO/Legacy/Testing/Cxx/CMakeLists.txt b/IO/Legacy/Testing/Cxx/CMakeLists.txt
index 8eadae4..532fb78 100644
--- a/IO/Legacy/Testing/Cxx/CMakeLists.txt
+++ b/IO/Legacy/Testing/Cxx/CMakeLists.txt
@@ -1,23 +1,2 @@
-set(MyTests ${MyTests}
-  TestLegacyCompositeDataReaderWriter.cxx
-)
-
-# Tests with data
-if(VTK_DATA_ROOT)
-  # Use the testing object factory, to reduce boilerplate code in tests.
-  include(vtkTestingObjectFactory)
-  vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-  set(TestsToRun ${Tests})
-  list(REMOVE_ITEM TestsToRun CxxTests.cxx)
-
-  # Add all the executables
-  foreach(test ${TestsToRun})
-    get_filename_component(TName ${test} NAME_WE)
-      add_test(NAME ${vtk-module}-${TName}
-        COMMAND ${vtk-module}CxxTests ${TName}
-          -D ${VTK_DATA_ROOT}
-          -T ${VTK_TEST_OUTPUT_DIR}
-          -V Baseline/Parallel/${TName}.png)
-  endforeach ()
-endif()
+vtk_add_test_cxx(TestLegacyCompositeDataReaderWriter.cxx NO_VALID)
+vtk_test_cxx_executable(${vtk-module}CxxTests RENDERING_FACTORY)
diff --git a/IO/Legacy/module.cmake b/IO/Legacy/module.cmake
index 95f962d..7185e27 100644
--- a/IO/Legacy/module.cmake
+++ b/IO/Legacy/module.cmake
@@ -6,6 +6,8 @@ vtk_module(vtkIOLegacy
     vtkCommonSystem
     vtkCommonMisc
     vtkIOCore
+  PRIVATE_DEPENDS
+    vtksys
   TEST_DEPENDS
     vtkFiltersAMR
     vtkInteractionStyle
diff --git a/IO/Legacy/vtkCompositeDataReader.cxx b/IO/Legacy/vtkCompositeDataReader.cxx
index fd32ef6..6861bfe 100644
--- a/IO/Legacy/vtkCompositeDataReader.cxx
+++ b/IO/Legacy/vtkCompositeDataReader.cxx
@@ -31,6 +31,7 @@
 #include "vtkStreamingDemandDrivenPipeline.h"
 #include "vtkUniformGrid.h"
 
+#include <vtksys/RegularExpression.hxx>
 #include <vtksys/SystemTools.hxx>
 #include <vtksys/ios/sstream>
 
@@ -279,6 +280,13 @@ bool vtkCompositeDataReader::ReadCompositeData(vtkMultiBlockDataSet* mb)
       }
     // eat up the "\n" and other whitespace at the end of CHILD <type>.
     this->ReadLine(line);
+    // if "line" has text enclosed in [] then that's the composite name.
+    vtksys::RegularExpression regEx("\\s*\\[(.*)\\]");
+    if (regEx.find(line))
+      {
+      std::string name = regEx.match(1);
+      mb->GetMetaData(cc)->Set(vtkCompositeDataSet::NAME(), name.c_str());
+      }
 
     if (type != -1)
       {
@@ -384,7 +392,7 @@ bool vtkCompositeDataReader::ReadCompositeData(vtkOverlappingAMR* oamr)
     }
   else
     {
-    if (!strncmp(this->LowerCase(line), "amrboxes", strlen("amrboxes")) == 0)
+    if (strncmp(this->LowerCase(line), "amrboxes", strlen("amrboxes")) != 0)
       {
       vtkErrorMacro("Failed to read AMRBOXES' line");
       }
@@ -526,6 +534,13 @@ bool vtkCompositeDataReader::ReadCompositeData(vtkMultiPieceDataSet* mp)
       }
     // eat up the "\n" and other whitespace at the end of CHILD <type>.
     this->ReadLine(line);
+    // if "line" has text enclosed in [] then that's the composite name.
+    vtksys::RegularExpression regEx("\\s*\\[(.*)\\]");
+    if (regEx.find(line))
+      {
+      std::string name = regEx.match(1);
+      mp->GetMetaData(cc)->Set(vtkCompositeDataSet::NAME(), name.c_str());
+      }
 
     if (type != -1)
       {
diff --git a/IO/Legacy/vtkCompositeDataWriter.cxx b/IO/Legacy/vtkCompositeDataWriter.cxx
index 16dcb7b..faa394a 100644
--- a/IO/Legacy/vtkCompositeDataWriter.cxx
+++ b/IO/Legacy/vtkCompositeDataWriter.cxx
@@ -154,7 +154,15 @@ bool vtkCompositeDataWriter::WriteCompositeData(ostream* fp,
   for (unsigned int cc=0; cc < mb->GetNumberOfBlocks(); cc++)
     {
     vtkDataObject* child = mb->GetBlock(cc);
-    *fp << "CHILD " << (child? child->GetDataObjectType() : -1) << "\n";
+    *fp << "CHILD " << (child? child->GetDataObjectType() : -1);
+    // add name if present.
+    if (mb->HasMetaData(cc) &&
+      mb->GetMetaData(cc)->Has(vtkCompositeDataSet::NAME()))
+      {
+      *fp << " [" << mb->GetMetaData(cc)->Get(vtkCompositeDataSet::NAME())
+          << "]";
+      }
+    *fp << "\n";
     if (child)
       {
       if (!this->WriteBlock(fp, child))
@@ -176,7 +184,16 @@ bool vtkCompositeDataWriter::WriteCompositeData(ostream* fp,
   for (unsigned int cc=0; cc < mp->GetNumberOfPieces(); cc++)
     {
     vtkDataObject* child = mp->GetPieceAsDataObject(cc);
-    *fp << "CHILD " << (child? child->GetDataObjectType() : -1) << "\n";
+    *fp << "CHILD " << (child? child->GetDataObjectType() : -1);
+    // add name if present.
+    if (mp->HasMetaData(cc) &&
+      mp->GetMetaData(cc)->Has(vtkCompositeDataSet::NAME()))
+      {
+      *fp << " [" << mp->GetMetaData(cc)->Get(vtkCompositeDataSet::NAME())
+          << "]";
+      }
+    *fp << "\n";
+
     if (child)
       {
       if (!this->WriteBlock(fp, child))
diff --git a/IO/Legacy/vtkDataReader.cxx b/IO/Legacy/vtkDataReader.cxx
index 4e3d292..dbf6fd4 100644
--- a/IO/Legacy/vtkDataReader.cxx
+++ b/IO/Legacy/vtkDataReader.cxx
@@ -1449,7 +1449,7 @@ vtkAbstractArray *vtkDataReader::ReadArray(const char *dataType, int numTuples,
       }
     }
 
-  else if ( ! strncmp(type, "char", 4) )
+  else if ( ! strcmp(type, "char") || !strcmp(type, "signed_char") )
     {
     array = vtkCharArray::New();
     array->SetNumberOfComponents(numComp);
diff --git a/IO/Legacy/vtkDataSetWriter.h b/IO/Legacy/vtkDataSetWriter.h
index ae8a398..4bff3ae 100644
--- a/IO/Legacy/vtkDataSetWriter.h
+++ b/IO/Legacy/vtkDataSetWriter.h
@@ -37,8 +37,8 @@ public:
   vtkDataSet* GetInput(int port);
 
 protected:
-  vtkDataSetWriter() {};
-  ~vtkDataSetWriter() {};
+  vtkDataSetWriter() {}
+  ~vtkDataSetWriter() {}
 
   void WriteData();
 
diff --git a/IO/Legacy/vtkDataWriter.cxx b/IO/Legacy/vtkDataWriter.cxx
index 5f56eef..bbb5ff2 100644
--- a/IO/Legacy/vtkDataWriter.cxx
+++ b/IO/Legacy/vtkDataWriter.cxx
@@ -1920,9 +1920,9 @@ void vtkDataWriter::CloseVTKFile(ostream *fp)
 
       delete [] this->OutputString;
       this->OutputStringLength = static_cast<int>(ostr->str().size());
-      this->OutputString = new char[ostr->str().size()];
+      this->OutputString = new char[this->OutputStringLength+1];
       memcpy(this->OutputString, ostr->str().c_str(),
-        this->OutputStringLength);
+        this->OutputStringLength+1);
       }
     delete fp;
     }
diff --git a/IO/Legacy/vtkGraphWriter.h b/IO/Legacy/vtkGraphWriter.h
index 9a22517..2bc8017 100644
--- a/IO/Legacy/vtkGraphWriter.h
+++ b/IO/Legacy/vtkGraphWriter.h
@@ -40,8 +40,8 @@ public:
   vtkGraph* GetInput(int port);
 
 protected:
-  vtkGraphWriter() {};
-  ~vtkGraphWriter() {};
+  vtkGraphWriter() {}
+  ~vtkGraphWriter() {}
 
   void WriteData();
 
diff --git a/IO/Legacy/vtkPixelExtentIO.cxx b/IO/Legacy/vtkPixelExtentIO.cxx
new file mode 100644
index 0000000..c00d6d1
--- /dev/null
+++ b/IO/Legacy/vtkPixelExtentIO.cxx
@@ -0,0 +1,200 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPixelExtentIO.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkPixelExtentIO.h"
+
+#include "vtkUnstructuredGrid.h"
+#include "vtkCellType.h"
+#include "vtkCellArray.h"
+#include "vtkPoints.h"
+#include "vtkCellData.h"
+#include "vtkUnsignedCharArray.h"
+#include "vtkIntArray.h"
+#include "vtkIdTypeArray.h"
+#include "vtkFloatArray.h"
+#include "vtkDataSetWriter.h"
+
+using std::deque;
+
+// ----------------------------------------------------------------------------
+vtkUnstructuredGrid &operator<<(
+        vtkUnstructuredGrid &data,
+        const vtkPixelExtent &ext)
+{
+  // initialize empty dataset
+  if (data.GetNumberOfCells()<1)
+    {
+    vtkPoints *opts=vtkPoints::New();
+    data.SetPoints(opts);
+    opts->Delete();
+
+    vtkCellArray *cells=vtkCellArray::New();
+    vtkUnsignedCharArray *types=vtkUnsignedCharArray::New();
+    vtkIdTypeArray *locs=vtkIdTypeArray::New();
+
+    data.SetCells(types,locs,cells);
+
+    cells->Delete();
+    types->Delete();
+    locs->Delete();
+    }
+
+  // cell to node
+  vtkPixelExtent next(ext);
+  next.CellToNode();
+
+  // build the cell
+  vtkFloatArray *pts=dynamic_cast<vtkFloatArray*>(data.GetPoints()->GetData());
+  vtkIdType ptId=pts->GetNumberOfTuples();
+  float *ppts=pts->WritePointer(3*ptId,12);
+
+  int id[12]={
+        0,2,-1,
+        1,2,-1,
+        1,3,-1,
+        0,3,-1};
+
+  vtkIdType ptIds[4];
+
+  for (int i=0; i<4; ++i)
+    {
+    ppts[3*i+2]=0.0;
+    for (int j=0; j<2; ++j)
+      {
+      int q=3*i+j;
+      ppts[q]=next[id[q]];
+      }
+    ptIds[i]=ptId+i;
+    }
+
+  data.InsertNextCell(VTK_QUAD,4,ptIds);
+
+  return data;
+}
+
+// ----------------------------------------------------------------------------
+void vtkPixelExtentIO::Write(
+      int commRank,
+      const char *fileName,
+      const deque<deque<vtkPixelExtent> >&exts)
+{
+  if (commRank!=0)
+    {
+    // only rank 0 writes
+    return;
+    }
+
+  vtkUnstructuredGrid *data=vtkUnstructuredGrid::New();
+
+  vtkIntArray *rank=vtkIntArray::New();
+  rank->SetName("rank");
+  data->GetCellData()->AddArray(rank);
+  rank->Delete();
+
+  vtkIntArray *block=vtkIntArray::New();
+  block->SetName("block");
+  data->GetCellData()->AddArray(block);
+  block->Delete();
+
+  size_t nRanks=exts.size();
+
+  for (size_t i=0; i<nRanks; ++i)
+    {
+    size_t nBlocks = exts[i].size();
+    for (size_t j=0; j<nBlocks; ++j)
+      {
+      const vtkPixelExtent &ext = exts[i][j];
+      *data << ext;
+
+      rank->InsertNextTuple1(i);
+      block->InsertNextTuple1(j);
+      }
+    }
+
+  vtkDataSetWriter *idw=vtkDataSetWriter::New();
+  idw->SetFileName(fileName);
+  idw->SetInputData(data);
+  idw->Write();
+  idw->Delete();
+
+  data->Delete();
+}
+
+// ----------------------------------------------------------------------------
+void vtkPixelExtentIO::Write(
+      int commRank,
+      const char *fileName,
+      const deque<vtkPixelExtent> &exts)
+{
+  if (commRank!=0)
+   {
+   // only rank 0 will write
+   return;
+   }
+
+  vtkUnstructuredGrid *data=vtkUnstructuredGrid::New();
+
+  vtkIntArray *rank=vtkIntArray::New();
+  rank->SetName("rank");
+  data->GetCellData()->AddArray(rank);
+  rank->Delete();
+
+  int nExts=static_cast<int>(exts.size());
+  rank->SetNumberOfTuples(nExts);
+
+  int *pRank = rank->GetPointer(0);
+
+  for (int i=0; i<nExts; ++i)
+    {
+    const vtkPixelExtent &ext = exts[i];
+    *data << ext;
+    pRank[i] = i;
+    }
+
+  vtkDataSetWriter *idw=vtkDataSetWriter::New();
+  idw->SetFileName(fileName);
+  idw->SetInputData(data);
+  idw->Write();
+  idw->Delete();
+
+  data->Delete();
+}
+
+// ----------------------------------------------------------------------------
+void vtkPixelExtentIO::Write(
+      int commRank,
+      const char *fileName,
+      const vtkPixelExtent &ext)
+{
+  vtkUnstructuredGrid *data=vtkUnstructuredGrid::New();
+
+  vtkIntArray *rank=vtkIntArray::New();
+  rank->SetName("rank");
+  data->GetCellData()->AddArray(rank);
+  rank->Delete();
+
+  rank->SetNumberOfTuples(1);
+  int *pRank = rank->GetPointer(0);
+
+  *data << ext;
+  pRank[0] = commRank;
+
+  vtkDataSetWriter *idw=vtkDataSetWriter::New();
+  idw->SetFileName(fileName);
+  idw->SetInputData(data);
+  idw->Write();
+  idw->Delete();
+
+  data->Delete();
+}
diff --git a/IO/Legacy/vtkPixelExtentIO.h b/IO/Legacy/vtkPixelExtentIO.h
new file mode 100644
index 0000000..17b8145
--- /dev/null
+++ b/IO/Legacy/vtkPixelExtentIO.h
@@ -0,0 +1,75 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPixelExtentIO.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkPixelExtentIO -- I/O routines for vtkPixelExtent
+// .SECTION Description
+// A small collection of I/O routines that can write vtkPixelExtent's
+// or collections of them to disk for visualization as unstructured
+// grids.
+#ifndef __vtkPixelExtentIO_h
+#define __vtkPixelExtentIO_h
+
+#include "vtkIOLegacyModule.h" // for export
+#include "vtkPixelExtent.h" // for pixel extent
+#include <deque> // for std::deque
+
+class vtkUnstructuredGrid;
+
+class VTKIOLEGACY_EXPORT vtkPixelExtentIO
+{
+public:
+  // Description:
+  // Writes deque of extents for each MPI rank to disk
+  // as an unstructured grid. Each extent is converted to
+  // a QUAD cell. Rank is encoded in a cell data array.
+  // It's aassumed that the data is duplicated on all
+  // ranks thus only rank 0 writes the data to disk.
+  static
+  void Write(
+        int commRank,
+        const char *fileName,
+        const std::deque<std::deque<vtkPixelExtent> >&exts);
+
+  // Description:
+  // Writes an extent for each MPI rank to disk as an
+  // unstructured grid. It's expected that the index into
+  // the deque identifies the rank. Each extent is converted
+  // to a QUAD cell. Rank is encoded in a cell data array.
+  // It's aassumed that the data is duplicated on all
+  // ranks thus only rank 0 writes the data to disk.
+  static
+  void Write(
+        int commRank,
+        const char *fileName,
+        const std::deque<vtkPixelExtent> &exts);
+
+  // Description:
+  // Write an extent per MPI rank to disk. All ranks
+  // write. It's assumed that each rank passes a unique
+  // filename.
+  static
+  void Write(
+        int commRank,
+        const char *fileName,
+        const vtkPixelExtent &ext);
+};
+
+
+// Description:
+// Insert the extent into an unstructured grid.
+VTKIOLEGACY_EXPORT
+vtkUnstructuredGrid &operator<<(vtkUnstructuredGrid &data, const vtkPixelExtent &ext);
+
+#endif
+// VTK-HeaderTest-Exclude: vtkPixelExtentIO.h
diff --git a/IO/Legacy/vtkPolyDataWriter.h b/IO/Legacy/vtkPolyDataWriter.h
index 3af6201..7030690 100644
--- a/IO/Legacy/vtkPolyDataWriter.h
+++ b/IO/Legacy/vtkPolyDataWriter.h
@@ -40,8 +40,8 @@ public:
   vtkPolyData* GetInput(int port);
 
 protected:
-  vtkPolyDataWriter() {};
-  ~vtkPolyDataWriter() {};
+  vtkPolyDataWriter() {}
+  ~vtkPolyDataWriter() {}
 
   void WriteData();
 
diff --git a/IO/Legacy/vtkRectilinearGridWriter.h b/IO/Legacy/vtkRectilinearGridWriter.h
index 319d69c..79b6e20 100644
--- a/IO/Legacy/vtkRectilinearGridWriter.h
+++ b/IO/Legacy/vtkRectilinearGridWriter.h
@@ -41,8 +41,8 @@ public:
   vtkRectilinearGrid* GetInput(int port);
 
 protected:
-  vtkRectilinearGridWriter() {};
-  ~vtkRectilinearGridWriter() {};
+  vtkRectilinearGridWriter() {}
+  ~vtkRectilinearGridWriter() {}
 
   void WriteData();
 
diff --git a/IO/Legacy/vtkStructuredGridWriter.h b/IO/Legacy/vtkStructuredGridWriter.h
index 4d21c1e..0e383f7 100644
--- a/IO/Legacy/vtkStructuredGridWriter.h
+++ b/IO/Legacy/vtkStructuredGridWriter.h
@@ -41,8 +41,8 @@ public:
   vtkStructuredGrid* GetInput(int port);
 
 protected:
-  vtkStructuredGridWriter() {};
-  ~vtkStructuredGridWriter() {};
+  vtkStructuredGridWriter() {}
+  ~vtkStructuredGridWriter() {}
 
   void WriteData();
   int WriteBlanking(ostream *fp, vtkStructuredGrid *ds);
diff --git a/IO/Legacy/vtkStructuredPointsWriter.h b/IO/Legacy/vtkStructuredPointsWriter.h
index 5d26a88..5ab0d34 100644
--- a/IO/Legacy/vtkStructuredPointsWriter.h
+++ b/IO/Legacy/vtkStructuredPointsWriter.h
@@ -40,8 +40,8 @@ public:
   vtkImageData* GetInput(int port);
 
 protected:
-  vtkStructuredPointsWriter() {};
-  ~vtkStructuredPointsWriter() {};
+  vtkStructuredPointsWriter() {}
+  ~vtkStructuredPointsWriter() {}
 
   void WriteData();
 
diff --git a/IO/Legacy/vtkTableWriter.h b/IO/Legacy/vtkTableWriter.h
index 655907b..44e0175 100644
--- a/IO/Legacy/vtkTableWriter.h
+++ b/IO/Legacy/vtkTableWriter.h
@@ -39,8 +39,8 @@ public:
   vtkTable* GetInput(int port);
 
 protected:
-  vtkTableWriter() {};
-  ~vtkTableWriter() {};
+  vtkTableWriter() {}
+  ~vtkTableWriter() {}
 
   void WriteData();
 
diff --git a/IO/Legacy/vtkTreeReader.cxx b/IO/Legacy/vtkTreeReader.cxx
index 40cc6e2..d9f3f83 100644
--- a/IO/Legacy/vtkTreeReader.cxx
+++ b/IO/Legacy/vtkTreeReader.cxx
@@ -174,7 +174,7 @@ int vtkTreeReader::RequestData(
       continue;
       }
 
-    if(!strncmp(this->LowerCase(line), "edges", 4))
+    if(!strncmp(this->LowerCase(line), "edges", 5))
       {
       int edge_count = 0;
       if(!this->Read(&edge_count))
diff --git a/IO/Legacy/vtkTreeWriter.cxx b/IO/Legacy/vtkTreeWriter.cxx
index 7a165c2..5b6f4ef 100644
--- a/IO/Legacy/vtkTreeWriter.cxx
+++ b/IO/Legacy/vtkTreeWriter.cxx
@@ -28,17 +28,13 @@
 
 vtkStandardNewMacro(vtkTreeWriter);
 
-void vtkTreeWriter::WriteEdges(ostream& Stream, vtkTree* Tree, vtkIdType Vertex)
+void vtkTreeWriter::WriteEdges(ostream& Stream, vtkTree* Tree)
 {
-  if (Vertex != Tree->GetRoot())
+  for (vtkIdType e = 0; e < Tree->GetNumberOfEdges(); ++e)
     {
-    Stream << Vertex << " " << Tree->GetParent(Vertex) << "\n";
-    }
-
-  vtkIdType count = Tree->GetNumberOfChildren(Vertex);
-  for(vtkIdType child = 0; child != count; ++child)
-    {
-    WriteEdges(Stream, Tree, Tree->GetChild(Vertex, child));
+    vtkIdType parent = Tree->GetSourceVertex(e);
+    vtkIdType child = Tree->GetTargetVertex(e);
+    Stream << child << " " << parent << "\n";
     }
 }
 
@@ -85,7 +81,7 @@ void vtkTreeWriter::WriteData()
     {
     const vtkIdType edge_count = input->GetNumberOfEdges();
     *fp << "EDGES " << edge_count << "\n";
-    this->WriteEdges(*fp, input, input->GetRoot());
+    this->WriteEdges(*fp, input);
     }
   if (!error_occurred && !this->WriteEdgeData(fp, input))
     {
diff --git a/IO/Legacy/vtkTreeWriter.h b/IO/Legacy/vtkTreeWriter.h
index 6bce1fc..d5a1088 100644
--- a/IO/Legacy/vtkTreeWriter.h
+++ b/IO/Legacy/vtkTreeWriter.h
@@ -40,8 +40,8 @@ public:
   vtkTree* GetInput(int port);
 
 protected:
-  vtkTreeWriter() {};
-  ~vtkTreeWriter() {};
+  vtkTreeWriter() {}
+  ~vtkTreeWriter() {}
 
   void WriteData();
 
@@ -51,7 +51,7 @@ private:
   vtkTreeWriter(const vtkTreeWriter&);  // Not implemented.
   void operator=(const vtkTreeWriter&);  // Not implemented.
 
-  void WriteEdges(ostream& Stream, vtkTree* Tree, vtkIdType Vertex);
+  void WriteEdges(ostream& Stream, vtkTree* Tree);
 };
 
 #endif
diff --git a/IO/Legacy/vtkUnstructuredGridWriter.h b/IO/Legacy/vtkUnstructuredGridWriter.h
index f67a484..7611cc6 100644
--- a/IO/Legacy/vtkUnstructuredGridWriter.h
+++ b/IO/Legacy/vtkUnstructuredGridWriter.h
@@ -39,8 +39,8 @@ public:
   vtkUnstructuredGrid* GetInput(int port);
 
 protected:
-  vtkUnstructuredGridWriter() {};
-  ~vtkUnstructuredGridWriter() {};
+  vtkUnstructuredGridWriter() {}
+  ~vtkUnstructuredGridWriter() {}
 
   void WriteData();
 
diff --git a/IO/MINC/Testing/Data/Baseline/TestMINCImageReader.png.md5 b/IO/MINC/Testing/Data/Baseline/TestMINCImageReader.png.md5
new file mode 100644
index 0000000..ee33c02
--- /dev/null
+++ b/IO/MINC/Testing/Data/Baseline/TestMINCImageReader.png.md5
@@ -0,0 +1 @@
+b53ace4d926dc8bca3078fcf9f83265f
diff --git a/IO/MINC/Testing/Data/Baseline/TestMINCImageWriter.png.md5 b/IO/MINC/Testing/Data/Baseline/TestMINCImageWriter.png.md5
new file mode 100644
index 0000000..ee33c02
--- /dev/null
+++ b/IO/MINC/Testing/Data/Baseline/TestMINCImageWriter.png.md5
@@ -0,0 +1 @@
+b53ace4d926dc8bca3078fcf9f83265f
diff --git a/IO/MINC/Testing/Data/Baseline/TestMNIObjects.png.md5 b/IO/MINC/Testing/Data/Baseline/TestMNIObjects.png.md5
new file mode 100644
index 0000000..c25fdf9
--- /dev/null
+++ b/IO/MINC/Testing/Data/Baseline/TestMNIObjects.png.md5
@@ -0,0 +1 @@
+3531c0cbe067092ec39c4ec08d1a514c
diff --git a/IO/MINC/Testing/Data/Baseline/TestMNITagPoints.png.md5 b/IO/MINC/Testing/Data/Baseline/TestMNITagPoints.png.md5
new file mode 100644
index 0000000..cfcc163
--- /dev/null
+++ b/IO/MINC/Testing/Data/Baseline/TestMNITagPoints.png.md5
@@ -0,0 +1 @@
+7a664d7609f76ad2871110f412691c7e
diff --git a/IO/MINC/Testing/Data/Baseline/TestMNITransforms.png.md5 b/IO/MINC/Testing/Data/Baseline/TestMNITransforms.png.md5
new file mode 100644
index 0000000..152855e
--- /dev/null
+++ b/IO/MINC/Testing/Data/Baseline/TestMNITransforms.png.md5
@@ -0,0 +1 @@
+693a48f1b44a7080a8c53637ce9baaed
diff --git a/IO/MINC/Testing/Python/CMakeLists.txt b/IO/MINC/Testing/Python/CMakeLists.txt
index bb320b0..e87b335 100644
--- a/IO/MINC/Testing/Python/CMakeLists.txt
+++ b/IO/MINC/Testing/Python/CMakeLists.txt
@@ -1,5 +1,5 @@
-add_test_python(TestMNIObjects.py Hybrid)
-add_test_python(TestMNITagPoints.py Hybrid)
-add_test_python(TestMNITransforms.py Hybrid)
-add_test_python(TestMINCImageReader.py IO)
-add_test_python(TestMINCImageWriter.py IO)
+vtk_add_test_python(TestMNIObjects.py)
+vtk_add_test_python(TestMNITagPoints.py)
+vtk_add_test_python(TestMNITransforms.py)
+vtk_add_test_python(TestMINCImageReader.py)
+vtk_add_test_python(TestMINCImageWriter.py)
diff --git a/IO/MINC/Testing/Tcl/CMakeLists.txt b/IO/MINC/Testing/Tcl/CMakeLists.txt
index 56775fd..6366f12 100644
--- a/IO/MINC/Testing/Tcl/CMakeLists.txt
+++ b/IO/MINC/Testing/Tcl/CMakeLists.txt
@@ -1,7 +1,5 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(TestMINCImageReader IO)
-  add_test_tcl(TestMINCImageWriter IO)
-  add_test_tcl(TestMNIObjects Hybrid)
-  add_test_tcl(TestMNITagPoints Hybrid)
-  add_test_tcl(TestMNITransforms Hybrid)
-endif()
+vtk_add_test_tcl(TestMINCImageReader)
+vtk_add_test_tcl(TestMINCImageWriter)
+vtk_add_test_tcl(TestMNIObjects)
+vtk_add_test_tcl(TestMNITagPoints)
+vtk_add_test_tcl(TestMNITransforms)
diff --git a/IO/MINC/module.cmake b/IO/MINC/module.cmake
index 21a2585..c5c4e92 100644
--- a/IO/MINC/module.cmake
+++ b/IO/MINC/module.cmake
@@ -6,5 +6,8 @@ vtk_module(vtkIOMINC
     vtkCommonExecutionModel
     vtkRenderingCore
     vtkFiltersHybrid
+    vtkIOImage
+  PRIVATE_DEPENDS
+    vtksys
     vtknetcdf
   )
diff --git a/IO/MINC/vtkMNITagPointWriter.h b/IO/MINC/vtkMNITagPointWriter.h
index 176c496..2f51b47 100644
--- a/IO/MINC/vtkMNITagPointWriter.h
+++ b/IO/MINC/vtkMNITagPointWriter.h
@@ -152,7 +152,7 @@ protected:
   vtkIntArray *PatientIds;
   char *Comments;
 
-  virtual void WriteData() {};
+  virtual void WriteData() {}
   virtual void WriteData(vtkPointSet *inputs[2]);
 
   int FillInputPortInformation(int port, vtkInformation *info);
diff --git a/IO/MPIImage/CMakeLists.txt b/IO/MPIImage/CMakeLists.txt
index b73c05f..10e1ae4 100644
--- a/IO/MPIImage/CMakeLists.txt
+++ b/IO/MPIImage/CMakeLists.txt
@@ -1,6 +1,4 @@
-find_package(MPI REQUIRED)
-include_directories(${MPI_INCLUDE_PATH})
-add_definitionS("-DMPICH_IGNORE_CXX_SEEK")
+include(vtkMPI)
 
 set(Module_SRCS
   vtkPNrrdReader.cxx
@@ -38,3 +36,4 @@ configure_file(${VTK_CMAKE_DIR}/vtkObjectFactory.cxx.in
   ${CMAKE_CURRENT_BINARY_DIR}/${vtk-module}ObjectFactory.cxx)
 
 vtk_module_library(vtkIOMPIImage ${Module_SRCS})
+vtk_mpi_link(${vtk-module})
diff --git a/IO/MPIImage/Testing/Cxx/CMakeLists.txt b/IO/MPIImage/Testing/Cxx/CMakeLists.txt
index 71aca69..38d30c2 100644
--- a/IO/MPIImage/Testing/Cxx/CMakeLists.txt
+++ b/IO/MPIImage/Testing/Cxx/CMakeLists.txt
@@ -1,14 +1,14 @@
-if(VTK_DATA_ROOT)
-  find_package(MPI REQUIRED)
-  include_directories(${MPI_INCLUDE_PATH})
-  vtk_module_test_executable(ParallelIsoTest ParallelIso.cxx)
-  if(VTK_MPI_MAX_NUMPROCS GREATER 1)
-     add_test(NAME ParallelIso-image
-       COMMAND ${MPIEXEC} ${MPIEXEC_NUMPROC_FLAG} 2 ${MPIEXEC_PREFLAGS}
-       $<TARGET_FILE:ParallelIsoTest>
-       -D ${VTK_DATA_ROOT}
-       -T ${VTK_TEST_OUTPUT_DIR}
-       -V Baseline/Parallel/ParallelIso.cxx.png
-       ${VTK_MPI_POSTFLAGS})
-  endif()
+include(vtkMPI)
+vtk_module_test_executable(ParallelIsoTest ParallelIso.cxx)
+if(VTK_MPI_MAX_NUMPROCS GREATER 1)
+  ExternalData_add_test(VTKData
+     NAME ParallelIso-image
+     COMMAND ${MPIEXEC} ${MPIEXEC_NUMPROC_FLAG} 2 ${MPIEXEC_PREFLAGS}
+     $<TARGET_FILE:ParallelIsoTest>
+     -D ${VTK_TEST_DATA_DIR}
+     -T ${VTK_TEST_OUTPUT_DIR}
+     -V DATA{../Data/Baseline/ParallelIso.cxx.png,:}
+     ${VTK_MPI_POSTFLAGS})
 endif()
+
+vtk_mpi_link(ParallelIsoTest)
diff --git a/IO/MPIImage/Testing/Data/Baseline/ParallelIso.cxx.png.md5 b/IO/MPIImage/Testing/Data/Baseline/ParallelIso.cxx.png.md5
new file mode 100644
index 0000000..26162c5
--- /dev/null
+++ b/IO/MPIImage/Testing/Data/Baseline/ParallelIso.cxx.png.md5
@@ -0,0 +1 @@
+e043dec70849a043f51b0df319ce6fec
diff --git a/IO/MPIImage/module.cmake b/IO/MPIImage/module.cmake
index a71a485..deea282 100644
--- a/IO/MPIImage/module.cmake
+++ b/IO/MPIImage/module.cmake
@@ -6,6 +6,8 @@ vtk_module(vtkIOMPIImage
   DEPENDS
     vtkParallelMPI
     vtkIOImage
+  PRIVATE_DEPENDS
+    vtksys
   TEST_DEPENDS
     vtkTestingCore
     vtkTestingRendering
diff --git a/IO/MPIParallel/CMakeLists.txt b/IO/MPIParallel/CMakeLists.txt
index 430a8c4..98f0fb7 100644
--- a/IO/MPIParallel/CMakeLists.txt
+++ b/IO/MPIParallel/CMakeLists.txt
@@ -1,6 +1,4 @@
-find_package(MPI REQUIRED)
-include_directories(${MPI_INCLUDE_PATH})
-add_definitionS("-DMPICH_IGNORE_CXX_SEEK")
+include(vtkMPI)
 
 # We don't build the parallel WindBlade reader on windows
 # because there's problems with the MPI_File_open()
@@ -28,7 +26,6 @@ if(WIN32)
   set( vtk_module_overrides )
 endif()
 
-
 foreach(_class ${vtk_module_overrides})
   set(_override vtkP${_class})
   set(_vtk_override_includes "${_vtk_override_includes} #include \"${_override}.h\"")
@@ -48,4 +45,5 @@ configure_file(${VTK_CMAKE_DIR}/vtkObjectFactory.h.in
 configure_file(${VTK_CMAKE_DIR}/vtkObjectFactory.cxx.in
   ${CMAKE_CURRENT_BINARY_DIR}/${vtk-module}ObjectFactory.cxx)
 
-vtk_module_library(vtkIOMPIParallel ${Module_SRCS})
+vtk_module_library(${vtk-module} ${Module_SRCS})
+vtk_mpi_link(${vtk-module})
diff --git a/IO/MPIParallel/Testing/Cxx/CMakeLists.txt b/IO/MPIParallel/Testing/Cxx/CMakeLists.txt
index e5f2e74..5072d4b 100644
--- a/IO/MPIParallel/Testing/Cxx/CMakeLists.txt
+++ b/IO/MPIParallel/Testing/Cxx/CMakeLists.txt
@@ -5,15 +5,24 @@ if (DEFINED CMAKE_WORDS_BIGENDIAN)
   endif()
 endif()
 
-if (VTK_LARGE_DATA_ROOT AND _known_little_endian AND NOT WIN32)
+if (VTK_USE_LARGE_DATA AND _known_little_endian AND NOT WIN32)
   find_package(MPI REQUIRED)
   include_directories(${MPI_INCLUDE_PATH})
   vtk_module_test_executable(${vtk-module}CxxTests TestPWindBladeReader.cxx)
-  add_test(NAME ${vtk-module}Cxx-TestPWindBladeReader
+
+  # Tell ExternalData to fetch test input at build time.
+  ExternalData_Expand_Arguments(VTKData _
+    "DATA{${VTK_TEST_INPUT_DIR}/WindBladeReader/,REGEX:.*}"
+    "DATA{${VTK_TEST_INPUT_DIR}/WindBladeReader/field/,REGEX:.*}"
+    "DATA{${VTK_TEST_INPUT_DIR}/WindBladeReader/turbine/,REGEX:.*}"
+    )
+
+  ExternalData_add_test(VTKData
+    NAME ${vtk-module}Cxx-TestPWindBladeReader
     COMMAND ${MPIEXEC} ${MPIEXEC_NUMPROC_FLAG} 1 ${MPIEXEC_PREFLAGS}
     $<TARGET_FILE:${vtk-module}CxxTests> TestPWindBladeReader
-    -D ${VTK_LARGE_DATA_ROOT}
+    -D ${VTK_TEST_DATA_DIR}
     -T ${VTK_TEST_OUTPUT_DIR}
-    -V Baseline/Parallel/TestWindBladeReader.png
+    -V DATA{../Data/Baseline/TestWindBladeReader.png,:}
     ${VTK_MPI_POSTFLAGS})
 endif()
diff --git a/IO/MPIParallel/Testing/Cxx/TestPWindBladeReader.cxx b/IO/MPIParallel/Testing/Cxx/TestPWindBladeReader.cxx
index bbe3e52..4e9fc71 100644
--- a/IO/MPIParallel/Testing/Cxx/TestPWindBladeReader.cxx
+++ b/IO/MPIParallel/Testing/Cxx/TestPWindBladeReader.cxx
@@ -14,7 +14,7 @@
 =========================================================================*/
 // .NAME Test of vtkPWindBladeReader
 // .SECTION Description
-// Tests the vtkPWindBladeReader.  Requires VTKLargeData to be set.
+// Tests the vtkPWindBladeReader.
 
 #include "vtkPWindBladeReader.h"
 
diff --git a/IO/MPIParallel/Testing/Data/Baseline/TestWindBladeReader.png.md5 b/IO/MPIParallel/Testing/Data/Baseline/TestWindBladeReader.png.md5
new file mode 100644
index 0000000..74a7843
--- /dev/null
+++ b/IO/MPIParallel/Testing/Data/Baseline/TestWindBladeReader.png.md5
@@ -0,0 +1 @@
+4f8e2ccd32864a9c79261b8ec85f944e
diff --git a/IO/MPIParallel/Testing/Data/Baseline/TestWindBladeReader_1.png.md5 b/IO/MPIParallel/Testing/Data/Baseline/TestWindBladeReader_1.png.md5
new file mode 100644
index 0000000..0ad95f5
--- /dev/null
+++ b/IO/MPIParallel/Testing/Data/Baseline/TestWindBladeReader_1.png.md5
@@ -0,0 +1 @@
+346e35b1c0eeee3dde7544d329542236
diff --git a/IO/MPIParallel/module.cmake b/IO/MPIParallel/module.cmake
index 40d7250..3ed6067 100644
--- a/IO/MPIParallel/module.cmake
+++ b/IO/MPIParallel/module.cmake
@@ -6,6 +6,8 @@ vtk_module(vtkIOMPIParallel
   DEPENDS
     vtkIOGeometry
     vtkParallelMPI
+  PRIVATE_DEPENDS
+    vtksys
   TEST_DEPENDS
     vtkRenderingOpenGL
     vtkTestingRendering
diff --git a/IO/Movie/CMakeLists.txt b/IO/Movie/CMakeLists.txt
index a8f546b..5c94606 100644
--- a/IO/Movie/CMakeLists.txt
+++ b/IO/Movie/CMakeLists.txt
@@ -37,5 +37,5 @@ set(vtkIOMovie_HDRS
 vtk_module_library(vtkIOMovie ${Module_SRCS})
 
 if(WIN32 AND VTK_USE_VIDEO_FOR_WINDOWS)
-  target_link_libraries(vtkIOMovie vfw32)
+  target_link_libraries(vtkIOMovie LINK_PRIVATE vfw32)
 endif()
diff --git a/IO/Movie/Testing/Cxx/CMakeLists.txt b/IO/Movie/Testing/Cxx/CMakeLists.txt
index 50c67cd..c619bb4 100644
--- a/IO/Movie/Testing/Cxx/CMakeLists.txt
+++ b/IO/Movie/Testing/Cxx/CMakeLists.txt
@@ -8,29 +8,9 @@ if(vtkIOMovie_vtkoggtheora)
   list(APPEND TEST_SRC TestOggTheoraWriter.cxx)
 endif()
 
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
+vtk_add_test_cxx(NO_VALID
   # TestMovieWriter.cxx           # fixme (deps not satisfied)
   ${TEST_SRC}
-  EXTRA_INCLUDE vtkTestDriver.h
-)
+  )
 
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/IO/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -T ${VTK_TEST_OUTPUT_DIR})
-  endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/IO/Movie/vtkOggTheoraWriter.cxx b/IO/Movie/vtkOggTheoraWriter.cxx
index 368afa5..238305e 100644
--- a/IO/Movie/vtkOggTheoraWriter.cxx
+++ b/IO/Movie/vtkOggTheoraWriter.cxx
@@ -180,8 +180,7 @@ int vtkOggTheoraWriterInternal::Start()
     // the stride is in bytes
     this->thImage[i].stride = this->thImage[i].width*sizeof(unsigned char);
     // make sure there's nothing left laying around...
-    if (this->thImage[i].data)
-      delete[] this->thImage[i].data;
+    delete[] this->thImage[i].data;
     // allocate the image plane
     size_t siz = this->thImage[i].width * this->thImage[i].height;
     this->thImage[i].data   = new unsigned char[siz];
diff --git a/IO/MySQL/CMakeLists.txt b/IO/MySQL/CMakeLists.txt
index d300a18..77ff445 100644
--- a/IO/MySQL/CMakeLists.txt
+++ b/IO/MySQL/CMakeLists.txt
@@ -14,4 +14,4 @@ include_directories(${MYSQL_INCLUDE_DIRECTORIES})
 
 vtk_module_library(vtkIOMySQL ${MySQL_SRCS})
 
-target_link_libraries(vtkIOMySQL ${MYSQL_LIBRARY} ${MYSQL_EXTRA_LIBRARIES})
+target_link_libraries(vtkIOMySQL LINK_PRIVATE ${MYSQL_LIBRARY} ${MYSQL_EXTRA_LIBRARIES})
diff --git a/IO/MySQL/Testing/Cxx/CMakeLists.txt b/IO/MySQL/Testing/Cxx/CMakeLists.txt
index c369611..3c9cf23 100644
--- a/IO/MySQL/Testing/Cxx/CMakeLists.txt
+++ b/IO/MySQL/Testing/Cxx/CMakeLists.txt
@@ -4,30 +4,15 @@ if(VTK_MYSQL_TEST_URL)
 
   include_directories(${CMAKE_CURRENT_BINARY_DIR})
 
-  create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-    TestMySQLDatabase.cxx
-    TestMySQLTableReadWrite.cxx
-    EXTRA_INCLUDE vtkTestDriver.h
-  )
-
-  vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-  set(TestsToRun ${Tests})
-  list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-  add_test(NAME ${vtk-module}Cxx-TestMySQLDatabase
-           COMMAND ${vtk-module}CxxTests TestMySQLDatabase)
-
-  if(VTK_DATA_ROOT)
-    if(WIN32)
-      add_test(NAME ${vtk-module}Cxx-TestMySQLTableReadWrite
-               COMMAND ${vtk-module}CxxTests TestMySQLTableReadWrite
-                       ${VTK_DATA_ROOT}/Baseline/IO/simple_table_win.vtk)
-    else()
-      add_test(NAME ${vtk-module}Cxx-TestMySQLTableReadWrite
-               COMMAND ${vtk-module}CxxTests TestMySQLTableReadWrite
-                       ${VTK_DATA_ROOT}/Baseline/IO/simple_table.vtk)
-    endif()
+  vtk_add_test_cxx(TestMySQLDatabase.cxx NO_DATA NO_VALID)
+  if(WIN32)
+    set(TestMySQLTableReadWrite_ARGS
+      DATA{../Data/Input/simple_table_win.vtk})
+  else()
+    set(TestMySQLTableReadWrite_ARGS
+      DATA{../Data/Input/simple_table.vtk})
   endif()
+  vtk_add_test_cxx(TestMySQLTableReadWrite.cxx NO_DATA NO_VALID)
 
+  vtk_test_cxx_executable(${vtk-module}CxxTests)
 endif()
diff --git a/IO/MySQL/Testing/Data/Input/simple_table.vtk.md5 b/IO/MySQL/Testing/Data/Input/simple_table.vtk.md5
new file mode 100644
index 0000000..ee95258
--- /dev/null
+++ b/IO/MySQL/Testing/Data/Input/simple_table.vtk.md5
@@ -0,0 +1 @@
+7fea0586c39ab4442ac43970cc78fedc
diff --git a/IO/MySQL/Testing/Data/Input/simple_table_win.vtk.md5 b/IO/MySQL/Testing/Data/Input/simple_table_win.vtk.md5
new file mode 100644
index 0000000..e313f80
--- /dev/null
+++ b/IO/MySQL/Testing/Data/Input/simple_table_win.vtk.md5
@@ -0,0 +1 @@
+be6f6306020f7cad635170ab61169044
diff --git a/IO/MySQL/module.cmake b/IO/MySQL/module.cmake
index 796fc4c..5300f2d 100644
--- a/IO/MySQL/module.cmake
+++ b/IO/MySQL/module.cmake
@@ -3,6 +3,8 @@ vtk_module(vtkIOMySQL
     vtkCommonDataModel
   IMPLEMENTS
     vtkIOSQL
+  PRIVATE_DEPENDS
+    vtksys
   TEST_DEPENDS
     vtkTestingCore
     vtkTestingIOSQL
diff --git a/IO/MySQL/vtkMySQLDatabase.cxx b/IO/MySQL/vtkMySQLDatabase.cxx
index 6ab83c8..07c47d9 100644
--- a/IO/MySQL/vtkMySQLDatabase.cxx
+++ b/IO/MySQL/vtkMySQLDatabase.cxx
@@ -29,7 +29,7 @@
 #include <vtksys/SystemTools.hxx>
 #include <vtksys/ios/sstream>
 
-#include <assert.h>
+#include <cassert>
 
 #define VTK_MYSQL_DEFAULT_PORT 3306
 
diff --git a/IO/MySQL/vtkMySQLQuery.cxx b/IO/MySQL/vtkMySQLQuery.cxx
index 61f6ac5..1a93aa8 100644
--- a/IO/MySQL/vtkMySQLQuery.cxx
+++ b/IO/MySQL/vtkMySQLQuery.cxx
@@ -34,7 +34,7 @@
 # define LOWERCASE_COMPARE strcasecmp
 #endif
 
-#include <assert.h>
+#include <cassert>
 
 #include <vtksys/ios/sstream>
 #include <vtksys/stl/vector>
@@ -872,8 +872,9 @@ vtkMySQLQuery::DataValue(vtkIdType column)
 
     // Initialize base as a VTK_VOID value... only populate with
     // data when a column value is non-NULL.
+    bool isNull = !this->Internals->CurrentRow[column];
     vtkVariant base;
-    if ( this->Internals->CurrentRow[column] )
+    if ( !isNull )
       {
       // Make a string holding the data, including possible embedded null characters.
       vtkStdString s( this->Internals->CurrentRow[column],
@@ -890,17 +891,17 @@ vtkMySQLQuery::DataValue(vtkIdType column)
       case VTK_INT:
       case VTK_SHORT:
       case VTK_BIT:
-        return vtkVariant(base.ToInt());
+        return isNull ? base : vtkVariant(base.ToInt());
 
       case VTK_LONG:
       case VTK_UNSIGNED_LONG:
-        return vtkVariant(base.ToLong());
+        return isNull ? base : vtkVariant(base.ToLong());
 
       case VTK_FLOAT:
-        return vtkVariant(base.ToFloat());
+        return isNull ? base : vtkVariant(base.ToFloat());
 
       case VTK_DOUBLE:
-        return vtkVariant(base.ToDouble());
+        return isNull ? base : vtkVariant(base.ToDouble());
 
       case VTK_STRING:
         return base; // it's already a string
diff --git a/IO/NetCDF/CMakeLists.txt b/IO/NetCDF/CMakeLists.txt
index 26b2740..9265daf 100644
--- a/IO/NetCDF/CMakeLists.txt
+++ b/IO/NetCDF/CMakeLists.txt
@@ -9,6 +9,3 @@ set(Module_SRCS
   )
 
 vtk_module_library(vtkIONetCDF ${Module_SRCS})
-
-target_link_libraries(vtkIONetCDF vtkNetCDF_cxx)
-set_target_properties(vtkIONetCDF PROPERTIES LINK_INTERFACE_LIBRARIES "")
diff --git a/IO/NetCDF/Testing/Cxx/CMakeLists.txt b/IO/NetCDF/Testing/Cxx/CMakeLists.txt
index 093b713..8f75ef0 100644
--- a/IO/NetCDF/Testing/Cxx/CMakeLists.txt
+++ b/IO/NetCDF/Testing/Cxx/CMakeLists.txt
@@ -1,5 +1,13 @@
-if(VTK_LARGE_DATA_ROOT)
-  set (NEEDS_LARGE_DATA
+if(VTK_USE_LARGE_DATA)
+  # Tell ExternalData to fetch test input at build time.
+  ExternalData_Expand_Arguments(VTKData _
+    "DATA{${VTK_TEST_INPUT_DIR}/NetCDF/,REGEX:.*}"
+    "DATA{${VTK_TEST_INPUT_DIR}/SLAC/ll-9cell-f523/,REGEX:.*}"
+    "DATA{${VTK_TEST_INPUT_DIR}/SLAC/pic-example/,REGEX:.*}"
+    "DATA{${VTK_TEST_INPUT_DIR}/SLAC/pillbox/,REGEX:.*}"
+    )
+
+  vtk_add_test_cxx(
     SLACMultipleModes.cxx
     SLACParticleReader.cxx
     SLACReaderLinear.cxx
@@ -9,28 +17,5 @@ if(VTK_LARGE_DATA_ROOT)
     TestNetCDFPOPReader.cxx
     )
 endif()
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-  ${NEEDS_LARGE_DATA}
-
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
 
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_LARGE_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_LARGE_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/IO/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/IO/NetCDF/Testing/Cxx/TestMPASReader.cxx b/IO/NetCDF/Testing/Cxx/TestMPASReader.cxx
index 7157836..16cbd20 100644
--- a/IO/NetCDF/Testing/Cxx/TestMPASReader.cxx
+++ b/IO/NetCDF/Testing/Cxx/TestMPASReader.cxx
@@ -14,7 +14,7 @@
 =========================================================================*/
 // .NAME Test of vtkMPASReader
 // .SECTION Description
-// Tests the vtkMPASReader.  Requires VTKLargeData to be used.
+// Tests the vtkMPASReader.
 
 #include "vtkMPASReader.h"
 
diff --git a/IO/NetCDF/Testing/Cxx/TestNetCDFCAMReader.cxx b/IO/NetCDF/Testing/Cxx/TestNetCDFCAMReader.cxx
index 8a75c86..ac4a796 100644
--- a/IO/NetCDF/Testing/Cxx/TestNetCDFCAMReader.cxx
+++ b/IO/NetCDF/Testing/Cxx/TestNetCDFCAMReader.cxx
@@ -14,7 +14,7 @@
 =========================================================================*/
 // .NAME Test of vtkNetCDFCAMReader
 // .SECTION Description
-// Tests the vtkNetCDFCAMReader.  Requires VTKLargeData to be used.
+// Tests the vtkNetCDFCAMReader.
 
 #include "vtkNetCDFCAMReader.h"
 
diff --git a/IO/NetCDF/Testing/Cxx/TestNetCDFPOPReader.cxx b/IO/NetCDF/Testing/Cxx/TestNetCDFPOPReader.cxx
index 5cde632..d3a523c 100644
--- a/IO/NetCDF/Testing/Cxx/TestNetCDFPOPReader.cxx
+++ b/IO/NetCDF/Testing/Cxx/TestNetCDFPOPReader.cxx
@@ -14,7 +14,7 @@
 =========================================================================*/
 // .NAME Test of vtkNetCDFPOPReader
 // .SECTION Description
-// Tests the vtkNetCDFPOPReader.  Requires VTKLargeData to be set.
+// Tests the vtkNetCDFPOPReader.
 
 #include "vtkNetCDFPOPReader.h"
 
diff --git a/IO/NetCDF/Testing/Data/Baseline/NetCDFCF2DBounds.png.md5 b/IO/NetCDF/Testing/Data/Baseline/NetCDFCF2DBounds.png.md5
new file mode 100644
index 0000000..a17e94a
--- /dev/null
+++ b/IO/NetCDF/Testing/Data/Baseline/NetCDFCF2DBounds.png.md5
@@ -0,0 +1 @@
+35711f1f4d4e0ec15774a60b8f8db157
diff --git a/IO/NetCDF/Testing/Data/Baseline/NetCDFCFSetOutputType.png.md5 b/IO/NetCDF/Testing/Data/Baseline/NetCDFCFSetOutputType.png.md5
new file mode 100644
index 0000000..93e9180
--- /dev/null
+++ b/IO/NetCDF/Testing/Data/Baseline/NetCDFCFSetOutputType.png.md5
@@ -0,0 +1 @@
+18ba39b525fbdfefff571ff425fb240f
diff --git a/IO/NetCDF/Testing/Data/Baseline/NetCDFCFSphericalCoords.png.md5 b/IO/NetCDF/Testing/Data/Baseline/NetCDFCFSphericalCoords.png.md5
new file mode 100644
index 0000000..f699b57
--- /dev/null
+++ b/IO/NetCDF/Testing/Data/Baseline/NetCDFCFSphericalCoords.png.md5
@@ -0,0 +1 @@
+4d4aed7adaa7a3a66c0597c2802d8bca
diff --git a/IO/NetCDF/Testing/Data/Baseline/NetCDFCFUnstructured.png.md5 b/IO/NetCDF/Testing/Data/Baseline/NetCDFCFUnstructured.png.md5
new file mode 100644
index 0000000..e6d2616
--- /dev/null
+++ b/IO/NetCDF/Testing/Data/Baseline/NetCDFCFUnstructured.png.md5
@@ -0,0 +1 @@
+f82d2e047a5f71103b1ab75667fa5d2d
diff --git a/IO/NetCDF/Testing/Data/Baseline/NetCDFReader.png.md5 b/IO/NetCDF/Testing/Data/Baseline/NetCDFReader.png.md5
new file mode 100644
index 0000000..49c4474
--- /dev/null
+++ b/IO/NetCDF/Testing/Data/Baseline/NetCDFReader.png.md5
@@ -0,0 +1 @@
+ed9019984783dd6d8e11f65d554e48fd
diff --git a/IO/NetCDF/Testing/Data/Baseline/SLACMultipleModes.png.md5 b/IO/NetCDF/Testing/Data/Baseline/SLACMultipleModes.png.md5
new file mode 100644
index 0000000..4f342ff
--- /dev/null
+++ b/IO/NetCDF/Testing/Data/Baseline/SLACMultipleModes.png.md5
@@ -0,0 +1 @@
+87739a6d20694ba7fe2bc38db1235729
diff --git a/IO/NetCDF/Testing/Data/Baseline/SLACParticleReader.png.md5 b/IO/NetCDF/Testing/Data/Baseline/SLACParticleReader.png.md5
new file mode 100644
index 0000000..ac2ae39
--- /dev/null
+++ b/IO/NetCDF/Testing/Data/Baseline/SLACParticleReader.png.md5
@@ -0,0 +1 @@
+4fabbc42a15ea77fd8bfa7a0c4546b05
diff --git a/IO/NetCDF/Testing/Data/Baseline/SLACReaderLinear.png.md5 b/IO/NetCDF/Testing/Data/Baseline/SLACReaderLinear.png.md5
new file mode 100644
index 0000000..23fe2d9
--- /dev/null
+++ b/IO/NetCDF/Testing/Data/Baseline/SLACReaderLinear.png.md5
@@ -0,0 +1 @@
+da1728f49eee0bdf0f34361ca7a7fa6d
diff --git a/IO/NetCDF/Testing/Data/Baseline/SLACReaderQuadratic.png.md5 b/IO/NetCDF/Testing/Data/Baseline/SLACReaderQuadratic.png.md5
new file mode 100644
index 0000000..d1969a2
--- /dev/null
+++ b/IO/NetCDF/Testing/Data/Baseline/SLACReaderQuadratic.png.md5
@@ -0,0 +1 @@
+e0ae19e87f5f6578e3714ece985f0c80
diff --git a/IO/NetCDF/Testing/Data/Baseline/TestMPASReader.png.md5 b/IO/NetCDF/Testing/Data/Baseline/TestMPASReader.png.md5
new file mode 100644
index 0000000..0b8ef6b
--- /dev/null
+++ b/IO/NetCDF/Testing/Data/Baseline/TestMPASReader.png.md5
@@ -0,0 +1 @@
+0718631a8d53fcdfba3e53e261f07640
diff --git a/IO/NetCDF/Testing/Data/Baseline/TestNetCDFCAMReader.png.md5 b/IO/NetCDF/Testing/Data/Baseline/TestNetCDFCAMReader.png.md5
new file mode 100644
index 0000000..26ab10f
--- /dev/null
+++ b/IO/NetCDF/Testing/Data/Baseline/TestNetCDFCAMReader.png.md5
@@ -0,0 +1 @@
+5bf5726197d7d71d0de0b3909554ae32
diff --git a/IO/NetCDF/Testing/Data/Baseline/TestNetCDFPOPReader.png.md5 b/IO/NetCDF/Testing/Data/Baseline/TestNetCDFPOPReader.png.md5
new file mode 100644
index 0000000..8f11d78
--- /dev/null
+++ b/IO/NetCDF/Testing/Data/Baseline/TestNetCDFPOPReader.png.md5
@@ -0,0 +1 @@
+a5242241ce452fffe15d3e58602aa9c3
diff --git a/IO/NetCDF/Testing/Python/CMakeLists.txt b/IO/NetCDF/Testing/Python/CMakeLists.txt
index ff151f6..5c50571 100644
--- a/IO/NetCDF/Testing/Python/CMakeLists.txt
+++ b/IO/NetCDF/Testing/Python/CMakeLists.txt
@@ -1,7 +1,5 @@
-if (VTK_DATA_ROOT)
-  add_test_python(NetCDFCF2DBounds.py IO)
-  add_test_python(NetCDFCFSetOutputType.py IO)
-  add_test_python(NetCDFCFSphericalCoords.py IO)
-  add_test_python(NetCDFCFUnstructured.py IO)
-  add_test_python(NetCDFReader.py IO)
-endif()
+vtk_add_test_python(NetCDFCF2DBounds.py)
+vtk_add_test_python(NetCDFCFSetOutputType.py)
+vtk_add_test_python(NetCDFCFSphericalCoords.py)
+vtk_add_test_python(NetCDFCFUnstructured.py)
+vtk_add_test_python(NetCDFReader.py)
diff --git a/IO/NetCDF/Testing/Tcl/CMakeLists.txt b/IO/NetCDF/Testing/Tcl/CMakeLists.txt
index fa17ebf..d1eccbe 100644
--- a/IO/NetCDF/Testing/Tcl/CMakeLists.txt
+++ b/IO/NetCDF/Testing/Tcl/CMakeLists.txt
@@ -1,7 +1,5 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(NetCDFCF2DBounds IO)
-  add_test_tcl(NetCDFCFSetOutputType IO)
-  add_test_tcl(NetCDFCFSphericalCoords IO)
-  add_test_tcl(NetCDFCFUnstructured IO)
-  add_test_tcl(NetCDFReader IO)
-endif()
+vtk_add_test_tcl(NetCDFCF2DBounds)
+vtk_add_test_tcl(NetCDFCFSetOutputType)
+vtk_add_test_tcl(NetCDFCFSphericalCoords)
+vtk_add_test_tcl(NetCDFCFUnstructured)
+vtk_add_test_tcl(NetCDFReader)
diff --git a/IO/NetCDF/module.cmake b/IO/NetCDF/module.cmake
index a4fac09..3189978 100644
--- a/IO/NetCDF/module.cmake
+++ b/IO/NetCDF/module.cmake
@@ -5,7 +5,8 @@ vtk_module(vtkIONetCDF
     vtkCommonDataModel
     vtkCommonSystem
     vtkIOCore
-  COMPILE_DEPENDS
+  PRIVATE_DEPENDS
+    vtksys
     vtknetcdf
   TEST_DEPENDS
     vtkCommonExecutionModel
diff --git a/IO/NetCDF/vtkMPASReader.cxx b/IO/NetCDF/vtkMPASReader.cxx
index a7f70f3..cfff394 100644
--- a/IO/NetCDF/vtkMPASReader.cxx
+++ b/IO/NetCDF/vtkMPASReader.cxx
@@ -170,7 +170,7 @@ class vtkMPASReader::Internal {
 // Function to check if there is a NetCDF variable by that name
 //-----------------------------------------------------------------------------
 
-bool isNcVar(NcFile *ncFile, NcToken name)
+static bool isNcVar(NcFile *ncFile, NcToken name)
 {
   int num_vars = ncFile->num_vars();
   for (int i = 0; i < num_vars; i++)
@@ -190,7 +190,7 @@ bool isNcVar(NcFile *ncFile, NcToken name)
 // Check if there is a NetCDF dimension by that name
 //----------------------------------------------------------------------------
 
-bool isNcDim(NcFile *ncFile, NcToken name)
+static bool isNcDim(NcFile *ncFile, NcToken name)
 {
   int num_dims = ncFile->num_dims();
   //cerr << "looking for: " << name << endl;
@@ -213,7 +213,7 @@ bool isNcDim(NcFile *ncFile, NcToken name)
 //  computing points in different layers of multilayer spherical view
 //----------------------------------------------------------------------------
 
-int CartesianToSpherical(double x, double y, double z, double* rho,
+static int CartesianToSpherical(double x, double y, double z, double* rho,
     double* phi, double* theta)
 {
   double trho, ttheta, tphi;
@@ -238,7 +238,7 @@ int CartesianToSpherical(double x, double y, double z, double* rho,
 //  computing points in different layers of multilayer spherical view
 //----------------------------------------------------------------------------
 
-int SphericalToCartesian(double rho, double phi, double theta, double* x,
+static int SphericalToCartesian(double rho, double phi, double theta, double* x,
     double* y, double* z)
 {
   double tx, ty, tz;
@@ -269,6 +269,8 @@ vtkMPASReader::vtkMPASReader()
 {
   this->Internals = new vtkMPASReader::Internal;
 
+  this->CellMask = 0;
+
   // Debugging
   //this->DebugOn();
   vtkDebugMacro(<< "Starting to create vtkMPASReader..." << endl);
@@ -772,12 +774,18 @@ int vtkMPASReader::GetNcVars (const char* cellDimName, const char* pointDimName)
     {
     NcVar* aVar = ncFile->get_var(i);
 
-    // must have 3 dims
+    // for 1 dimension, must have format:
+    // (nCells)
+    //
+    // for 2 dimensions, must have format:
+    // (Time, nCells | nVertices, nVertLevels | nVertLevelsP1)
+    //
+    // for 3 dimensions, must have format:
     // (Time, nCells | nVertices, nVertLevels | nVertLevelsP1)
 
     int numDims = aVar->num_dims();
     //cout << "Num Dims of var: " << aVar->name() << " is " << numDims << endl;
-    if (numDims != 3)
+    if (numDims > 3)
       {
       continue;
       }
@@ -787,34 +795,55 @@ int vtkMPASReader::GetNcVars (const char* cellDimName, const char* pointDimName)
 
     // check for Time dim 0
     NcToken dim0Name = aVar->get_dim(0)->name();
-    if (strcmp(dim0Name, "Time"))
+    if (numDims == 1)
       {
-      continue;
+      if (strcmp(dim0Name, "nCells"))
+        {
+        continue;
+        }
+      }
+    else
+      {
+      if (strcmp(dim0Name, "Time"))
+        {
+        continue;
+        }
       }
 
     // check for dim 1 being cell or point
     bool isCellData = false;
     bool isPointData = false;
-    NcToken dim1Name = aVar->get_dim(1)->name();
-    if (!strcmp(dim1Name, cellDimName))
-      {
-      isCellData = true;
-      }
-    else if (!strcmp(dim1Name, pointDimName))
+    if (numDims == 1)
       {
       isPointData = true;
       }
-    else
+    else if (numDims == 2 || numDims == 3)
       {
-      continue;
+      NcToken dim1Name = aVar->get_dim(1)->name();
+      if (!strcmp(dim1Name, cellDimName))
+        {
+        isCellData = true;
+        }
+      else if (!strcmp(dim1Name, pointDimName))
+        {
+        isPointData = true;
+        }
+      else
+        {
+        continue;
+        }
       }
 
-    // check if dim 2 is nVertLevels or nVertLevelsP1, too
-    NcToken dim2Name = aVar->get_dim(2)->name();
-    if ((strcmp(dim2Name, "nVertLevels"))
-        && (strcmp(dim2Name, "nVertLevelsP1")))
+    // 3D fields are defined over nVertLevels.
+    if (numDims == 3)
       {
-      continue;
+      // check if dim 2 is nVertLevels or nVertLevelsP1, too
+      NcToken dim2Name = aVar->get_dim(2)->name();
+      if ((strcmp(dim2Name, "nVertLevels"))
+          && (strcmp(dim2Name, "nVertLevelsP1")))
+        {
+        continue;
+        }
       }
 
     // Add to cell or point var array
@@ -1014,6 +1043,20 @@ int vtkMPASReader::AllocSphereGeometry()
     this->MaximumPoints = this->CurrentExtraPoint;
     vtkDebugMacro
       (<< "alloc sphere: singlelayer: setting MaximumPoints to " << this->MaximumPoints);
+
+    if (isNcVar(ncFile, "vertexMask"))
+      {
+      this->CellMask = (int*)malloc(this->NumberOfCells*sizeof(int));
+      CHECK_MALLOC(this->CellMask);
+      NcVar*  cellMask = ncFile->get_var("vertexMask");
+      cellMask->set_cur(0, this->VerticalLevelSelected);
+      cellMask->get(this->CellMask, this->NumberOfCells, 1);
+      }
+    else
+      {
+      free(this->CellMask);
+      this->CellMask = 0;
+      }
     }
   vtkDebugMacro(<< "Leaving AllocSphereGeometry...");
 
@@ -1102,6 +1145,21 @@ int vtkMPASReader::AllocLatLonGeometry()
     vtkDebugMacro
       (<< "alloc latlon: singlelayer: setting this->MaximumPoints to " << this->MaximumPoints
        << endl);
+
+    if (isNcVar(ncFile, "vertexMask"))
+      {
+      CHECK_VAR(ncFile, "vertexMask");
+      this->CellMask = (int*)malloc(this->ModNumCells*sizeof(int));
+      CHECK_MALLOC(this->CellMask);
+      NcVar*  cellMask = ncFile->get_var("vertexMask");
+      cellMask->set_cur(0, this->VerticalLevelSelected);
+      cellMask->get(this->CellMask, this->NumberOfCells, 1);
+      }
+    else
+      {
+      free(this->CellMask);
+      this->CellMask = 0;
+      }
     }
   vtkDebugMacro(<< "Leaving AllocLatLonGeometry..." << endl);
 
@@ -1297,6 +1355,11 @@ int vtkMPASReader::EliminateXWrap ()
           }
         }
 
+      if (this->CellMask)
+        {
+        this->CellMask[this->CurrentExtraCell] = this->CellMask[j];
+        }
+
       // move addedConns to this->ModConnections extra cells area
       int* addedConns = this->ModConnections
         + (this->CurrentExtraCell * this->PointsPerCell);
@@ -1672,6 +1735,18 @@ void vtkMPASReader::OutputCells(bool init)
       }
     }
 
+  if (this->CellMask)
+    {
+    vtkIntArray* cellMask = vtkIntArray::New();
+    cellMask->SetArray(this->CellMask,
+                       this->CurrentExtraCell,
+                       0,
+                       vtkIntArray::VTK_DATA_ARRAY_FREE);
+    cellMask->SetName("Mask");
+    output->GetCellData()->AddArray(cellMask);
+    this->CellMask = NULL;
+    }
+
   free(this->ModConnections); this->ModConnections = NULL;
   free(this->OrigConnections); this->OrigConnections = NULL;
 
@@ -1721,25 +1796,72 @@ int vtkMPASReader::LoadPointVarData(int variableIndex, double dTimeStep)
   vtkDebugMacro( << "Time: " << timestep << endl);
 
 
+  int numDims = ncVar->num_dims();
+
   // singlelayer
   if (!ShowMultilayerView)
     {
-    this->Internals->pointVars[variableIndex]->set_cur
-      (timestep, 0, this->VerticalLevelSelected);
-
     // we can go ahead and read it into the datablock
-    this->Internals->pointVars[variableIndex]->get
-      (dataBlock+this->PointOffset, 1, this->NumberOfPoints, 1);
+    if (numDims == 1)
+      {
+      ncVar->set_cur((long)0);
+      ncVar->get(dataBlock+this->PointOffset, this->NumberOfPoints);
+      }
+    else if (numDims == 2)
+      {
+      ncVar->set_cur(timestep, 0);
+      ncVar->get(dataBlock+this->PointOffset, 1, this->NumberOfPoints);
+      }
+    else if (numDims == 3)
+      {
+      ncVar->set_cur(timestep, 0, this->VerticalLevelSelected);
+      ncVar->get(dataBlock+this->PointOffset, 1, this->NumberOfPoints, 1);
+      }
+
     dataBlock[0] = dataBlock[1];
     // data is all in place, don't need to do next step
-
     }
   else
     { // multilayer
-    this->Internals->pointVars[variableIndex]->set_cur(timestep, 0, 0);
-    this->Internals->pointVars[variableIndex]->get
-      (this->PointVarData +
-       (this->MaximumNVertLevels * this->PointOffset), 1, this->NumberOfPoints, this->MaximumNVertLevels);
+    double* dataPtr = this->PointVarData +
+                      (this->MaximumNVertLevels * this->PointOffset);
+    if (numDims == 1)
+      {
+      ncVar->set_cur((long)0);
+      ncVar->get(dataPtr, this->NumberOfPoints);
+      }
+    else if (numDims == 2)
+      {
+      ncVar->set_cur(timestep, 0);
+      ncVar->get(dataPtr, 1, this->NumberOfPoints);
+      }
+    else if (numDims == 3)
+      {
+      ncVar->set_cur(timestep, 0, 0);
+      ncVar->get(dataPtr, 1, this->NumberOfPoints, this->MaximumNVertLevels);
+      }
+    if (numDims == 1 || numDims == 2)
+      {
+      // need to replicate data over all vertical layers
+      // layout in memory needs to be:
+      // pt1, pt1, ..., (VertLevels times), pt2, pt2, ..., (VertLevels times),
+      // need to go backwards through the points in order to not overwrite
+      // anything.
+      for(int i=this->NumberOfPoints; i>0; i--)
+        {
+        // point to copy
+        double pt = *(dataPtr + i - 1);
+
+        // where to start copying
+        double* copyPtr = dataPtr + (i-1)*this->MaximumNVertLevels;
+
+        for(int j=0; j<this->MaximumNVertLevels; j++)
+          {
+          *copyPtr = pt;
+          copyPtr++;
+          }
+        }
+      }
     }
 
   vtkDebugMacro
diff --git a/IO/NetCDF/vtkMPASReader.h b/IO/NetCDF/vtkMPASReader.h
index 4b5c2bb..7a572e3 100644
--- a/IO/NetCDF/vtkMPASReader.h
+++ b/IO/NetCDF/vtkMPASReader.h
@@ -239,6 +239,7 @@ class VTKIONETCDF_EXPORT vtkMPASReader : public vtkUnstructuredGridAlgorithm
   double* PointZ;      // z coord of point
   int ModNumPoints;
   int ModNumCells;
+  int* CellMask;
   int* OrigConnections;   // original connections
   int* ModConnections;    // modified connections
   int* CellMap;           // maps from added cell to original cell #
diff --git a/IO/NetCDF/vtkNetCDFCFReader.cxx b/IO/NetCDF/vtkNetCDFCFReader.cxx
index 0b8056c..0398886 100644
--- a/IO/NetCDF/vtkNetCDFCFReader.cxx
+++ b/IO/NetCDF/vtkNetCDFCFReader.cxx
@@ -32,6 +32,7 @@
 #include "vtkIntArray.h"
 #include "vtkMath.h"
 #include "vtkMergePoints.h"
+#include "vtkNew.h"
 #include "vtkObjectFactory.h"
 #include "vtkPoints.h"
 #include "vtkRectilinearGrid.h"
@@ -1071,7 +1072,17 @@ int vtkNetCDFCFReader::RequestData(vtkInformation *request,
     = vtkRectilinearGrid::GetData(outputVector);
   if (rectilinearOutput)
     {
-    this->AddRectilinearCoordinates(rectilinearOutput);
+    switch (this->CoordinateType(this->LoadingDimensions))
+      {
+      case COORDS_EUCLIDEAN_PSIDED_CELLS:
+      case COORDS_SPHERICAL_PSIDED_CELLS:
+        // There is no sensible way to store p-sided cells in a structured grid.
+        // Just fake some coordinates (related to ParaView bug #11543).
+        this->FakeRectilinearCoordinates(rectilinearOutput);
+        break;
+      default:
+        this->AddRectilinearCoordinates(rectilinearOutput);
+      }
     }
 
   vtkStructuredGrid *structuredOutput
@@ -1098,9 +1109,8 @@ int vtkNetCDFCFReader::RequestData(vtkInformation *request,
       case COORDS_EUCLIDEAN_PSIDED_CELLS:
       case COORDS_SPHERICAL_PSIDED_CELLS:
         // There is no sensible way to store p-sided cells in a structured grid.
-        // Just store them as a rectilinear grid, which should at least not
-        // crash (bug #11543).
-        this->Add1DRectilinearCoordinates(structuredOutput);
+        // Just fake some coordinates (ParaView bug #11543).
+        this->FakeStructuredCoordinates(structuredOutput);
         break;
       default:
         vtkErrorMacro("Internal error: unknown coordinate type.");
@@ -1244,6 +1254,34 @@ void vtkNetCDFCFReader::AddRectilinearCoordinates(
       case 0: rectilinearOutput->SetXCoordinates(coords);  break;
       case 1: rectilinearOutput->SetYCoordinates(coords);  break;
       case 2: rectilinearOutput->SetZCoordinates(coords);  break;
+      default: vtkErrorMacro("Sanity check failed!"); break;
+      }
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkNetCDFCFReader::FakeRectilinearCoordinates(
+    vtkRectilinearGrid *rectilinearOutput)
+{
+  int dimensionSizes[3];
+  rectilinearOutput->GetDimensions(dimensionSizes);
+
+  for (int dim = 0; dim < 3; dim++)
+    {
+    vtkNew<vtkDoubleArray> coordinate;
+    coordinate->SetNumberOfComponents(1);
+    coordinate->SetNumberOfTuples(dimensionSizes[dim]);
+
+    for (int index = 0; index < dimensionSizes[dim]; index++)
+      {
+      coordinate->SetComponent(index, 0, static_cast<double>(index));
+      }
+    switch(dim)
+      {
+      case 0: rectilinearOutput->SetXCoordinates(coordinate.GetPointer());break;
+      case 1: rectilinearOutput->SetYCoordinates(coordinate.GetPointer());break;
+      case 2: rectilinearOutput->SetZCoordinates(coordinate.GetPointer());break;
+      default: vtkErrorMacro("Sanity check failed!"); break;
       }
     }
 }
@@ -1379,6 +1417,33 @@ void vtkNetCDFCFReader::Add2DRectilinearCoordinates(
 }
 
 //-----------------------------------------------------------------------------
+void vtkNetCDFCFReader::FakeStructuredCoordinates(
+    vtkStructuredGrid *structuredOutput)
+{
+  int extent[6];
+  structuredOutput->GetExtent(extent);
+
+  vtkNew<vtkPoints> points;
+  points->SetDataTypeToDouble();
+  points->Allocate( (extent[1]-extent[0]+1)
+                    * (extent[3]-extent[2]+1)
+                    * (extent[5]-extent[4]+1) );
+
+  for (int kIndex = extent[4]; kIndex <= extent[5]; kIndex++)
+    {
+    for (int jIndex = extent[2]; jIndex <= extent[3]; jIndex++)
+      {
+      for (int iIndex = extent[0]; iIndex <= extent[1]; iIndex++)
+        {
+        points->InsertNextPoint(iIndex, jIndex, kIndex);
+        }
+      }
+    }
+
+  structuredOutput->SetPoints(points.GetPointer());
+}
+
+//-----------------------------------------------------------------------------
 void vtkNetCDFCFReader::Add1DRectilinearCoordinates(
                                         vtkUnstructuredGrid *unstructuredOutput,
                                         const int extent[6])
diff --git a/IO/NetCDF/vtkNetCDFCFReader.h b/IO/NetCDF/vtkNetCDFCFReader.h
index 7eccdaf..2b2d775 100644
--- a/IO/NetCDF/vtkNetCDFCFReader.h
+++ b/IO/NetCDF/vtkNetCDFCFReader.h
@@ -252,10 +252,12 @@ protected:
   // Internal methods for setting rectilinear coordinates.
   void AddRectilinearCoordinates(vtkImageData *imageOutput);
   void AddRectilinearCoordinates(vtkRectilinearGrid *rectilinearOutput);
+  void FakeRectilinearCoordinates(vtkRectilinearGrid *rectilinearOutput);
   void Add1DRectilinearCoordinates(vtkPoints *points, const int extent[6]);
   void Add2DRectilinearCoordinates(vtkPoints *points, const int extent[6]);
   void Add1DRectilinearCoordinates(vtkStructuredGrid *structuredOutput);
   void Add2DRectilinearCoordinates(vtkStructuredGrid *structuredOutput);
+  void FakeStructuredCoordinates(vtkStructuredGrid *structuredOutput);
   void Add1DRectilinearCoordinates(vtkUnstructuredGrid *unstructuredOutput,
                                    const int extent[6]);
   void Add2DRectilinearCoordinates(vtkUnstructuredGrid *unstructuredOutput,
diff --git a/IO/NetCDF/vtkNetCDFReader.cxx b/IO/NetCDF/vtkNetCDFReader.cxx
index e203df8..dc2c92e 100644
--- a/IO/NetCDF/vtkNetCDFReader.cxx
+++ b/IO/NetCDF/vtkNetCDFReader.cxx
@@ -43,7 +43,10 @@
   vtkSmartPointer<type> name = vtkSmartPointer<type>::New()
 
 #include <algorithm>
+#include <map>
 #include <set>
+#include <string>
+
 #include <vtksys/SystemTools.hxx>
 
 #include "vtk_netcdf.h"
@@ -60,6 +63,20 @@
 
 #include <ctype.h>
 
+class vtkNetCDFReaderPrivate {
+public:
+  vtkNetCDFReaderPrivate() {}
+  ~vtkNetCDFReaderPrivate()
+  {
+    this->ArrayUnits.clear();
+  }
+  void AddUnit(std::string arrayName, std::string unit)
+  {
+     this->ArrayUnits[arrayName] = unit;
+  }
+  std::map<std::string,std::string> ArrayUnits;
+};
+
 //=============================================================================
 static int NetCDFTypeToVTKType(nc_type type)
 {
@@ -105,6 +122,10 @@ vtkNetCDFReader::vtkNetCDFReader()
   this->WholeExtent[0] = this->WholeExtent[1]
     = this->WholeExtent[2] = this->WholeExtent[3]
     = this->WholeExtent[4] = this->WholeExtent[5] = 0;
+
+  this->TimeUnits = NULL;
+  this->Calendar = NULL;
+  this->Private = new vtkNetCDFReaderPrivate();
 }
 
 vtkNetCDFReader::~vtkNetCDFReader()
@@ -112,6 +133,9 @@ vtkNetCDFReader::~vtkNetCDFReader()
   this->SetFileName(NULL);
   this->VariableDimensions->Delete();
   this->AllDimensions->Delete();
+  delete[] this->TimeUnits;
+  delete[] this->Calendar;
+  delete this->Private;
 }
 
 void vtkNetCDFReader::PrintSelf(ostream &os, vtkIndent indent)
@@ -184,6 +208,23 @@ int vtkNetCDFReader::RequestInformation(
     CALL_NETCDF(nc_inq_vardimid(ncFD, varId,
                                 currentDimensions->GetPointer(0)));
 
+    //get units
+    int status;
+    size_t len = 0;
+    char *buffer = NULL;
+    status = nc_inq_attlen(ncFD, varId, "units", &len);
+    if (status == NC_NOERR)
+      {
+      buffer = new char[len+1];
+      status = nc_get_att_text(ncFD, varId, "units", buffer);
+      buffer[len] = '\0';
+      }
+    if (status == NC_NOERR)
+      {
+      this->Private->AddUnit(name, buffer);
+      }
+    delete[] buffer;
+
     // Assumption: time dimension is first.
     int timeDim = currentDimensions->GetValue(0);       // Not determined yet.
     if (this->IsTimeDimension(ncFD, timeDim))
@@ -268,6 +309,13 @@ int vtkNetCDFReader::RequestInformation(
                  this->WholeExtent, 6);
     }
 
+
+  //Free old time units.
+  delete[] this->TimeUnits;
+  this->TimeUnits = NULL;
+  delete[] this->Calendar;
+  this->Calendar = NULL;
+
   // If we have time, report that.
   if (timeValues && (timeValues->GetNumberOfTuples() > 0))
     {
@@ -278,6 +326,50 @@ int vtkNetCDFReader::RequestInformation(
     timeRange[0] = timeValues->GetValue(0);
     timeRange[1] = timeValues->GetValue(timeValues->GetNumberOfTuples()-1);
     outInfo->Set(vtkStreamingDemandDrivenPipeline::TIME_RANGE(), timeRange, 2);
+
+    //Get time units
+    int status, varId;
+    size_t len = 0;
+    char *buffer = NULL;
+    status = nc_inq_varid(ncFD, "time", &varId);
+    if (status == NC_NOERR)
+      {
+      status = nc_inq_attlen(ncFD, varId, "units", &len);
+      }
+    if (status == NC_NOERR)
+      {
+      buffer = new char[len+1];
+      status = nc_get_att_text(ncFD, varId, "units", buffer);
+      buffer[len] = '\0';
+      if (status == NC_NOERR)
+        {
+        this->TimeUnits = buffer;
+        }
+      else
+        {
+        delete[] buffer;
+        }
+      }
+
+    //Get calendar that time units are in
+    if (status == NC_NOERR)
+       {
+       status = nc_inq_attlen(ncFD, varId, "calendar", &len);
+       }
+     if (status == NC_NOERR)
+       {
+       buffer = new char[len+1];
+       status = nc_get_att_text(ncFD, varId, "calendar", buffer);
+       buffer[len] = '\0';
+       if (status == NC_NOERR)
+         {
+         this->Calendar = buffer;
+         }
+       else
+         {
+         delete[] buffer;
+         }
+       }
     }
   else
     {
@@ -806,3 +898,8 @@ int vtkNetCDFReader::LoadVariable(int ncFD, const char *varName, double time,
   return 1;
 }
 
+//-----------------------------------------------------------------------------
+std::string vtkNetCDFReader::QueryArrayUnits(const char* name)
+{
+  return this->Private->ArrayUnits[name].c_str();
+}
diff --git a/IO/NetCDF/vtkNetCDFReader.h b/IO/NetCDF/vtkNetCDFReader.h
index 2d4c49d..6ec2ec6 100644
--- a/IO/NetCDF/vtkNetCDFReader.h
+++ b/IO/NetCDF/vtkNetCDFReader.h
@@ -36,7 +36,7 @@
 #include "vtkDataObjectAlgorithm.h"
 
 #include "vtkSmartPointer.h"    // For ivars
-
+#include <string> //For std::string
 
 class vtkDataArraySelection;
 class vtkDataSet;
@@ -44,6 +44,7 @@ class vtkDoubleArray;
 class vtkIntArray;
 class vtkStdString;
 class vtkStringArray;
+class vtkNetCDFReaderPrivate;
 
 class VTKIONETCDF_EXPORT vtkNetCDFReader : public vtkDataObjectAlgorithm
 {
@@ -113,6 +114,17 @@ public:
   vtkSetMacro(ReplaceFillValueWithNan, int);
   vtkBooleanMacro(ReplaceFillValueWithNan, int);
 
+  // Description:
+  // Access to the time dimensions units.
+  // Can be used by the udunits library to convert raw numerical time values
+  // into meaningful representations.
+  vtkGetStringMacro(TimeUnits);
+  vtkGetStringMacro(Calendar);
+
+  // Description:
+  // Get units attached to a particular array in the netcdf file.
+  std::string QueryArrayUnits(const char *ArrayName);
+
 protected:
   vtkNetCDFReader();
   ~vtkNetCDFReader();
@@ -218,6 +230,9 @@ private:
   void operator=(const vtkNetCDFReader &);      // Not implemented
 
   int UpdateExtent[6];
+  char *TimeUnits;
+  char *Calendar;
+  vtkNetCDFReaderPrivate *Private;
 };
 
 #endif //__vtkNetCDFReader_h
diff --git a/IO/ODBC/CMakeLists.txt b/IO/ODBC/CMakeLists.txt
index 65119d1..6825b25 100644
--- a/IO/ODBC/CMakeLists.txt
+++ b/IO/ODBC/CMakeLists.txt
@@ -12,4 +12,4 @@ include_directories(${ODBC_INCLUDE_DIRECTORIES})
 
 vtk_module_library(vtkIOODBC ${ODBC_SRCS})
 
-target_link_libraries(vtkIOODBC ${ODBC_LIBRARIES})
+target_link_libraries(vtkIOODBC LINK_PRIVATE ${ODBC_LIBRARIES})
diff --git a/IO/ODBC/Testing/Cxx/CMakeLists.txt b/IO/ODBC/Testing/Cxx/CMakeLists.txt
index 063b3a4..cba1c69 100644
--- a/IO/ODBC/Testing/Cxx/CMakeLists.txt
+++ b/IO/ODBC/Testing/Cxx/CMakeLists.txt
@@ -4,27 +4,6 @@ if(VTK_ODBC_TEST_DSN)
 
   include_directories(${CMAKE_CURRENT_BINARY_DIR})
 
-  create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-    TestODBCDatabase.cxx
-    EXTRA_INCLUDE vtkTestDriver.h
-    )
-
-  vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-  set(TestsToRun ${Tests})
-  list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-  # Add all the executables
-  foreach(test ${TestsToRun})
-    get_filename_component(TName ${test} NAME_WE)
-    if(VTK_DATA_ROOT)
-      add_test(NAME ${vtk-module}Cxx-${TName}
-        COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/${vtk-module}/${TName}.png)
-    else()
-      add_test(NAME ${vtk-module}Cxx-${TName} COMMAND ${vtk-module}CxxTests ${TName})
-    endif()
-  endforeach()
+  vtk_add_test_cxx(TestODBCDatabase.cxx NO_DATA NO_VALID)
+  vtk_test_cxx_executable(${vtk-module}CxxTests)
 endif()
diff --git a/IO/ODBC/module.cmake b/IO/ODBC/module.cmake
index 7fc9396..5ee4218 100644
--- a/IO/ODBC/module.cmake
+++ b/IO/ODBC/module.cmake
@@ -2,6 +2,8 @@ vtk_module(vtkIOODBC
   DEPENDS
     vtkCommonDataModel
     vtkIOSQL
+  PRIVATE_DEPENDS
+    vtksys
   TEST_DEPENDS
     vtkTestingCore
   )
diff --git a/IO/ODBC/vtkODBCDatabase.cxx b/IO/ODBC/vtkODBCDatabase.cxx
index 2f91bab..8c7e5ef 100644
--- a/IO/ODBC/vtkODBCDatabase.cxx
+++ b/IO/ODBC/vtkODBCDatabase.cxx
@@ -41,7 +41,7 @@
 #include <vtksys/SystemTools.hxx>
 #include <vtksys/ios/sstream>
 
-#include <assert.h>
+#include <cassert>
 #include <string.h>
 
 #include <sql.h>
diff --git a/IO/ODBC/vtkODBCQuery.cxx b/IO/ODBC/vtkODBCQuery.cxx
index ddd0611..b2c9d58 100644
--- a/IO/ODBC/vtkODBCQuery.cxx
+++ b/IO/ODBC/vtkODBCQuery.cxx
@@ -35,7 +35,7 @@
 #include <vtkVariant.h>
 #include <vtkVariantArray.h>
 
-#include <assert.h>
+#include <cassert>
 
 #include <vtksys/ios/sstream>
 #include <vtksys/stl/vector>
diff --git a/IO/PLY/Testing/Cxx/CMakeLists.txt b/IO/PLY/Testing/Cxx/CMakeLists.txt
index 3185e9a..2a801e9 100644
--- a/IO/PLY/Testing/Cxx/CMakeLists.txt
+++ b/IO/PLY/Testing/Cxx/CMakeLists.txt
@@ -1,31 +1,2 @@
-set(testswithdata)
-if (VTK_DATA_ROOT)
-  set(testswithdata
-    TestPLYReader.cxx
-  )
-endif()
-
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-  ${testswithdata}
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/IO/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_add_test_cxx(TestPLYReader.cxx)
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/IO/PLY/Testing/Data/Baseline/TestPLYReadWrite.png.md5 b/IO/PLY/Testing/Data/Baseline/TestPLYReadWrite.png.md5
new file mode 100644
index 0000000..4a62f8e
--- /dev/null
+++ b/IO/PLY/Testing/Data/Baseline/TestPLYReadWrite.png.md5
@@ -0,0 +1 @@
+06a796326c623548481e85aeae631aed
diff --git a/IO/PLY/Testing/Data/Baseline/TestPLYReader.png.md5 b/IO/PLY/Testing/Data/Baseline/TestPLYReader.png.md5
new file mode 100644
index 0000000..aaf9c94
--- /dev/null
+++ b/IO/PLY/Testing/Data/Baseline/TestPLYReader.png.md5
@@ -0,0 +1 @@
+6ee8d345f9ed2ebb11be7326324da237
diff --git a/IO/PLY/Testing/Python/CMakeLists.txt b/IO/PLY/Testing/Python/CMakeLists.txt
index ba60d9c..c59b63f 100644
--- a/IO/PLY/Testing/Python/CMakeLists.txt
+++ b/IO/PLY/Testing/Python/CMakeLists.txt
@@ -1 +1 @@
-add_test_python(TestPLYReadWrite.py IO)
+vtk_add_test_python(TestPLYReadWrite.py)
diff --git a/IO/PLY/Testing/Tcl/CMakeLists.txt b/IO/PLY/Testing/Tcl/CMakeLists.txt
index 343b909..0d4ca92 100644
--- a/IO/PLY/Testing/Tcl/CMakeLists.txt
+++ b/IO/PLY/Testing/Tcl/CMakeLists.txt
@@ -1,3 +1 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(TestPLYReadWrite IO)
-endif()
+vtk_add_test_tcl(TestPLYReadWrite)
diff --git a/IO/PLY/vtkPLY.cxx b/IO/PLY/vtkPLY.cxx
index 45c0495..c75b643 100644
--- a/IO/PLY/vtkPLY.cxx
+++ b/IO/PLY/vtkPLY.cxx
@@ -52,7 +52,7 @@ WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE.
 
 #include <stddef.h>
 #include <string.h>
-#include <assert.h>
+#include <cassert>
 
 /* memory allocation */
 #define myalloc(mem_size) vtkPLY::my_alloc((mem_size), __LINE__, __FILE__)
@@ -1034,7 +1034,7 @@ Entry:
   elem    - element for which we want to save away other properties
 ******************************************************************************/
 
-void setup_other_props(PlyFile *, PlyElement *elem)
+static void setup_other_props(PlyFile *, PlyElement *elem)
 {
   int i;
   PlyProperty *prop;
diff --git a/IO/PLY/vtkPLYWriter.cxx b/IO/PLY/vtkPLYWriter.cxx
index c06ab88..0f1f27c 100644
--- a/IO/PLY/vtkPLYWriter.cxx
+++ b/IO/PLY/vtkPLYWriter.cxx
@@ -233,8 +233,8 @@ void vtkPLYWriter::WriteData()
       }
     }//for all polygons
 
-  if ( pointColors ) {delete [] pointColors;}
-  if ( cellColors ) {delete [] cellColors;}
+  delete [] pointColors;
+  delete [] cellColors;
 
   // close the PLY file
   vtkPLY::ply_close (ply);
diff --git a/IO/Parallel/Testing/Cxx/CMakeLists.txt b/IO/Parallel/Testing/Cxx/CMakeLists.txt
index 23ba4b2..8a864bf 100644
--- a/IO/Parallel/Testing/Cxx/CMakeLists.txt
+++ b/IO/Parallel/Testing/Cxx/CMakeLists.txt
@@ -1,7 +1,32 @@
-if (VTK_LARGE_DATA_ROOT)
+ExternalData_Expand_Arguments(VTKData _
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/0/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/0.5/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/0.5/uniform/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/1/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/1/uniform/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/1.5/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/1.5/uniform/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/2/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/2/uniform/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/2.5/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/2.5/uniform/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/constant/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/constant/polyMesh/,REGEX:.*}"
+  "DATA{${VTK_TEST_INPUT_DIR}/OpenFOAM/cavity/system/,REGEX:.*}"
+  )
+vtk_add_test_cxx(
+  TestPOpenFOAMReader.cxx
+)
 
-  # Parallel tests
-  add_test_mpi(PSLACReaderLinear.cxx DATADIR ${VTK_LARGE_DATA_ROOT})
-  add_test_mpi(PSLACReaderQuadratic.cxx DATADIR ${VTK_LARGE_DATA_ROOT})
+vtk_test_cxx_executable(${vtk-module}CxxTests)
 
+if(VTK_USE_LARGE_DATA)
+  # Tell ExternalData to fetch test input at build time.
+  ExternalData_Expand_Arguments(VTKData _
+    "DATA{${VTK_TEST_INPUT_DIR}/SLAC/ll-9cell-f523/,REGEX:.*}"
+    )
+
+  vtk_add_test_mpi(PSLACReaderLinear.cxx TESTING_DATA)
+  vtk_add_test_mpi(PSLACReaderQuadratic.cxx TESTING_DATA)
 endif()
diff --git a/IO/Parallel/Testing/Cxx/TestPOpenFOAMReader.cxx b/IO/Parallel/Testing/Cxx/TestPOpenFOAMReader.cxx
new file mode 100644
index 0000000..4a90cac
--- /dev/null
+++ b/IO/Parallel/Testing/Cxx/TestPOpenFOAMReader.cxx
@@ -0,0 +1,139 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestSimplePointsReaderWriter.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include <vtkSmartPointer.h>
+#include <vtkProperty.h>
+#include <vtkUnstructuredGrid.h>
+#include <vtkPointData.h>
+#include <vtkCellData.h>
+#include <vtkDataSetMapper.h>
+#include <vtkPOpenFOAMReader.h>
+#include <vtkMultiBlockDataSet.h>
+#include <vtkRenderWindow.h>
+#include <vtkRenderWindowInteractor.h>
+#include <vtkRenderer.h>
+
+#include <vtkTestUtilities.h>
+#include <vtkRegressionTestImage.h>
+
+int TestPOpenFOAMReader(int argc, char* argv[])
+{
+  // Read file name.
+  char* filename =
+    vtkTestUtilities::ExpandDataFileName(argc, argv,
+                                         "Data/OpenFOAM/cavity/cavity.foam");
+
+  // Read the file
+  vtkSmartPointer<vtkPOpenFOAMReader> reader =
+    vtkSmartPointer<vtkPOpenFOAMReader>::New();
+  reader->SetFileName(filename);
+  delete [] filename;
+  reader->Update();
+  reader->SetTimeValue(.5);
+//  reader->CreateCellToPointOn();
+  reader->ReadZonesOn();
+  reader->Update();
+  reader->Print(std::cout);
+  reader->GetOutput()->Print(std::cout);
+  reader->GetOutput()->GetBlock(0)->Print(std::cout);
+
+  // 1) Default array settings
+  int numberOfCellArrays = reader->GetNumberOfCellArrays();
+  std::cout << "----- Default array settings" << std::endl;
+  for (int i = 0; i < numberOfCellArrays; ++i)
+    {
+    const char * name = reader->GetCellArrayName(i);
+    std::cout << "  Cell Array: " << i
+              << " is named " << name
+              << " and is "
+              << (reader->GetCellArrayStatus(name) ? "Enabled" : "Disabled")
+              << std::endl;
+    }
+
+  int numberOfPointArrays = reader->GetNumberOfPointArrays();
+  std::cout << "----- Default array settings" << std::endl;
+  for (int i = 0; i < numberOfPointArrays; ++i)
+    {
+    const char * name = reader->GetPointArrayName(i);
+    std::cout << "  Point Array: " << i
+              << " is named " << name
+              << " and is "
+              << (reader->GetPointArrayStatus(name) ? "Enabled" : "Disabled")
+              << std::endl;
+    }
+
+  int numberOfLagrangianArrays = reader->GetNumberOfLagrangianArrays();
+  std::cout << "----- Default array settings" << std::endl;
+  for (int i = 0; i < numberOfLagrangianArrays; ++i)
+    {
+    const char * name = reader->GetLagrangianArrayName(i);
+    std::cout << "  Lagrangian Array: " << i
+              << " is named " << name
+              << " and is "
+              << (reader->GetLagrangianArrayStatus(name) ? "Enabled" : "Disabled")
+              << std::endl;
+    }
+
+  int numberOfPatchArrays = reader->GetNumberOfPatchArrays();
+  std::cout << "----- Default array settings" << std::endl;
+  for (int i = 0; i < numberOfPatchArrays; ++i)
+    {
+    const char * name = reader->GetPatchArrayName(i);
+    std::cout << "  Patch Array: " << i
+              << " is named " << name
+              << " and is "
+              << (reader->GetPatchArrayStatus(name) ? "Enabled" : "Disabled")
+              << std::endl;
+    }
+
+  vtkUnstructuredGrid *block0 = vtkUnstructuredGrid::SafeDownCast(reader->GetOutput()->GetBlock(0));
+  block0->GetCellData()->SetActiveScalars("p");
+  std::cout << "Scalar range: "
+            << block0->GetCellData()->GetScalars()->GetRange()[0] << ", "
+            << block0->GetCellData()->GetScalars()->GetRange()[1] << std::endl;
+
+  // Visualize
+  vtkSmartPointer<vtkDataSetMapper> mapper =
+    vtkSmartPointer<vtkDataSetMapper>::New();
+  mapper->SetInputData(block0);
+  mapper->SetScalarRange(block0->GetScalarRange());
+
+  vtkSmartPointer<vtkActor> actor =
+    vtkSmartPointer<vtkActor>::New();
+  actor->SetMapper(mapper);
+
+  vtkSmartPointer<vtkRenderer> renderer =
+    vtkSmartPointer<vtkRenderer>::New();
+  vtkSmartPointer<vtkRenderWindow> renderWindow =
+    vtkSmartPointer<vtkRenderWindow>::New();
+  renderWindow->AddRenderer(renderer);
+  vtkSmartPointer<vtkRenderWindowInteractor> renderWindowInteractor =
+    vtkSmartPointer<vtkRenderWindowInteractor>::New();
+  renderWindowInteractor->SetRenderWindow(renderWindow);
+
+  renderer->AddActor(actor);
+  renderer->SetBackground(.2, .4, .6);
+
+  renderWindow->Render();
+
+  int retVal = vtkRegressionTestImage( renderWindow );
+  if ( retVal == vtkRegressionTester::DO_INTERACTOR)
+    {
+    renderWindowInteractor->Start();
+    }
+
+  return EXIT_SUCCESS;
+
+}
diff --git a/IO/Parallel/Testing/Data/Baseline/PSLACReaderLinear.png.md5 b/IO/Parallel/Testing/Data/Baseline/PSLACReaderLinear.png.md5
new file mode 100644
index 0000000..23fe2d9
--- /dev/null
+++ b/IO/Parallel/Testing/Data/Baseline/PSLACReaderLinear.png.md5
@@ -0,0 +1 @@
+da1728f49eee0bdf0f34361ca7a7fa6d
diff --git a/IO/Parallel/Testing/Data/Baseline/PSLACReaderQuadratic.png.md5 b/IO/Parallel/Testing/Data/Baseline/PSLACReaderQuadratic.png.md5
new file mode 100644
index 0000000..d1969a2
--- /dev/null
+++ b/IO/Parallel/Testing/Data/Baseline/PSLACReaderQuadratic.png.md5
@@ -0,0 +1 @@
+e0ae19e87f5f6578e3714ece985f0c80
diff --git a/IO/Parallel/Testing/Data/Baseline/TestPDataSetReaderGrid.png.md5 b/IO/Parallel/Testing/Data/Baseline/TestPDataSetReaderGrid.png.md5
new file mode 100644
index 0000000..fb260f7
--- /dev/null
+++ b/IO/Parallel/Testing/Data/Baseline/TestPDataSetReaderGrid.png.md5
@@ -0,0 +1 @@
+767cbb90be533e3e3ccefb62d1c962f4
diff --git a/IO/Parallel/Testing/Data/Baseline/TestPImageWriter.png.md5 b/IO/Parallel/Testing/Data/Baseline/TestPImageWriter.png.md5
new file mode 100644
index 0000000..2c765b5
--- /dev/null
+++ b/IO/Parallel/Testing/Data/Baseline/TestPImageWriter.png.md5
@@ -0,0 +1 @@
+d12ac791877712d3586c007a2169c008
diff --git a/IO/Parallel/Testing/Data/Baseline/TestPOpenFOAMReader.png.md5 b/IO/Parallel/Testing/Data/Baseline/TestPOpenFOAMReader.png.md5
new file mode 100644
index 0000000..565e62c
--- /dev/null
+++ b/IO/Parallel/Testing/Data/Baseline/TestPOpenFOAMReader.png.md5
@@ -0,0 +1 @@
+420ec407305c636946d9730b1c010a2e
diff --git a/IO/Parallel/Testing/Python/CMakeLists.txt b/IO/Parallel/Testing/Python/CMakeLists.txt
index f36384b..9c000cf 100644
--- a/IO/Parallel/Testing/Python/CMakeLists.txt
+++ b/IO/Parallel/Testing/Python/CMakeLists.txt
@@ -1,2 +1,2 @@
-add_test_python(TestPDataSetReaderGrid.py Parallel)
-add_test_python(TestPImageWriter.py Parallel)
+vtk_add_test_python(TestPDataSetReaderGrid.py)
+vtk_add_test_python(TestPImageWriter.py)
diff --git a/IO/Parallel/Testing/Tcl/CMakeLists.txt b/IO/Parallel/Testing/Tcl/CMakeLists.txt
index 0223a84..d3b327f 100644
--- a/IO/Parallel/Testing/Tcl/CMakeLists.txt
+++ b/IO/Parallel/Testing/Tcl/CMakeLists.txt
@@ -1,4 +1,2 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(TestPDataSetReaderGrid Parallel)
-  add_test_tcl(TestPImageWriter Parallel)
-endif()
+vtk_add_test_tcl(TestPDataSetReaderGrid)
+vtk_add_test_tcl(TestPImageWriter)
diff --git a/IO/Parallel/module.cmake b/IO/Parallel/module.cmake
index c5341e6..1d52297 100644
--- a/IO/Parallel/module.cmake
+++ b/IO/Parallel/module.cmake
@@ -6,7 +6,10 @@ vtk_module(vtkIOParallel
     vtkFiltersParallel
     vtkIONetCDF
     vtkIOXML
+    vtkIOImage
+  PRIVATE_DEPENDS
     vtkexodusII
+    vtknetcdf
   TEST_DEPENDS
     vtkParallelMPI
     vtkRenderingParallel
diff --git a/IO/Parallel/vtkXMLPUniformGridAMRWriter.cxx b/IO/Parallel/vtkXMLPUniformGridAMRWriter.cxx
index a888bcf..3a36601 100644
--- a/IO/Parallel/vtkXMLPUniformGridAMRWriter.cxx
+++ b/IO/Parallel/vtkXMLPUniformGridAMRWriter.cxx
@@ -16,7 +16,7 @@
 
 #include "vtkMultiProcessController.h"
 #include "vtkObjectFactory.h"
-#include "assert.h"
+#include <cassert>
 
 //----------------------------------------------------------------------------
 vtkStandardNewMacro(vtkXMLPUniformGridAMRWriter);
diff --git a/IO/ParallelExodus/Testing/Cxx/CMakeLists.txt b/IO/ParallelExodus/Testing/Cxx/CMakeLists.txt
index 3d95110..8176c7d 100644
--- a/IO/ParallelExodus/Testing/Cxx/CMakeLists.txt
+++ b/IO/ParallelExodus/Testing/Cxx/CMakeLists.txt
@@ -1 +1,2 @@
-vtk_tests(TestExodusImplicitArrays.cxx DATADIR ${VTK_DATA_ROOT})
+vtk_add_test_cxx(TestExodusImplicitArrays.cxx NO_VALID)
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/IO/ParallelExodus/module.cmake b/IO/ParallelExodus/module.cmake
index 974c6f2..320cd7f 100644
--- a/IO/ParallelExodus/module.cmake
+++ b/IO/ParallelExodus/module.cmake
@@ -4,6 +4,8 @@ vtk_module(vtkIOParallelExodus
   DEPENDS
     vtkParallelCore
     vtkIOExodus
+  PRIVATE_DEPENDS
+    vtksys
     vtkexodusII
   TEST_DEPENDS
     vtkTestingRendering
diff --git a/IO/ParallelExodus/vtkPExodusIIReader.cxx b/IO/ParallelExodus/vtkPExodusIIReader.cxx
index c66db36..227f72a 100644
--- a/IO/ParallelExodus/vtkPExodusIIReader.cxx
+++ b/IO/ParallelExodus/vtkPExodusIIReader.cxx
@@ -286,8 +286,7 @@ int vtkPExodusIIReader::RequestInformation(
       char* nm =
         new char[strlen( this->FilePattern ) + strlen( this->FilePrefix ) + 20];
       sprintf( nm, this->FilePattern, this->FilePrefix, this->FileRange[0] );
-      if ( this->FileName )
-        delete [] this->FileName;
+      delete [] this->FileName;
       this->FileName = nm;
       //this->Superclass::SetFileName( nm ); // XXX Bad set
       //delete [] nm;
@@ -915,10 +914,8 @@ int vtkPExodusIIReader::DeterminePattern( const char* file )
     //this->SetFilePattern( pattern ); // XXX Bad set
     //this->SetFilePrefix( file ); // XXX Bad set
     //this->SetFileRange( min, max ); // XXX Bad set
-    if ( this->FilePattern )
-      delete [] this->FilePattern;
-    if ( this->FilePrefix )
-      delete [] this->FilePrefix;
+    delete [] this->FilePattern;
+    delete [] this->FilePrefix;
     this->FilePattern = vtksys::SystemTools::DuplicateString( pattern );
     this->FilePrefix = vtksys::SystemTools::DuplicateString( file );
     this->FileRange[0] = min;
@@ -1008,10 +1005,8 @@ int vtkPExodusIIReader::DeterminePattern( const char* file )
   //this->SetFilePattern( pattern ); // XXX Bad set
   //this->SetFilePrefix( prefix ); // XXX Bad set
   //delete [] prefix;
-  if ( this->FilePattern )
-    delete [] this->FilePattern;
-  if ( this->FilePrefix )
-    delete [] this->FilePrefix;
+  delete [] this->FilePattern;
+  delete [] this->FilePrefix;
   this->FilePattern = vtksys::SystemTools::DuplicateString( pattern );
   this->FilePrefix = vtksys::SystemTools::DuplicateString(prefix.c_str());
 
@@ -1604,10 +1599,8 @@ void vtkPExodusIIReader::Broadcast( vtkMultiProcessController* ctrl )
     else
       {
       std::vector<char> tmp;
-      if ( this->FilePattern )
-        delete [] this->FilePattern;
-      if ( this->FilePrefix )
-        delete [] this->FilePrefix;
+      delete [] this->FilePattern;
+      delete [] this->FilePrefix;
       //this->SetFilePattern( BroadcastRecvString( ctrl, tmp ) ? &tmp[0] : 0 ); // XXX Bad set
       //this->SetFilePrefix(  BroadcastRecvString( ctrl, tmp ) ? &tmp[0] : 0 ); // XXX Bad set
       this->FilePattern = BroadcastRecvString( ctrl, tmp ) ? vtksys::SystemTools::DuplicateString( &tmp[0] ) : 0;
diff --git a/IO/ParallelLSDyna/Testing/Cxx/CMakeLists.txt b/IO/ParallelLSDyna/Testing/Cxx/CMakeLists.txt
index 1d736e7..2c97253 100644
--- a/IO/ParallelLSDyna/Testing/Cxx/CMakeLists.txt
+++ b/IO/ParallelLSDyna/Testing/Cxx/CMakeLists.txt
@@ -1,3 +1,8 @@
-if (VTK_LARGE_DATA_ROOT)
-  add_test_mpi(PLSDynaReader.cxx DATADIR ${VTK_LARGE_DATA_ROOT})
+if(VTK_USE_LARGE_DATA)
+  # Tell ExternalData to fetch test input at build time.
+  ExternalData_Expand_Arguments(VTKData _
+    "DATA{${VTK_TEST_INPUT_DIR}/LSDyna/hemi.draw/,REGEX:.*}"
+    )
+
+  vtk_add_test_mpi(PLSDynaReader.cxx TESTING_DATA)
 endif()
diff --git a/IO/ParallelLSDyna/Testing/Data/Baseline/PLSDynaReader.png.md5 b/IO/ParallelLSDyna/Testing/Data/Baseline/PLSDynaReader.png.md5
new file mode 100644
index 0000000..bb3dfdb
--- /dev/null
+++ b/IO/ParallelLSDyna/Testing/Data/Baseline/PLSDynaReader.png.md5
@@ -0,0 +1 @@
+4e96103835f3a401d328436a508ce33c
diff --git a/IO/ParallelNetCDF/CMakeLists.txt b/IO/ParallelNetCDF/CMakeLists.txt
index b7ce035..2bf4516 100644
--- a/IO/ParallelNetCDF/CMakeLists.txt
+++ b/IO/ParallelNetCDF/CMakeLists.txt
@@ -1,11 +1,8 @@
-find_package(MPI)
-include_directories(${MPI_INCLUDE_PATH})
-add_definitionS("-DMPICH_IGNORE_CXX_SEEK")
+include(vtkMPI)
 
 set(Module_SRCS
   vtkPNetCDFPOPReader.cxx
   )
 
 vtk_module_library(vtkIOParallelNetCDF ${Module_SRCS})
-
-target_link_libraries(vtkIOParallelNetCDF vtkNetCDF_cxx)
+vtk_mpi_link(vtkIOParallelNetCDF)
diff --git a/IO/ParallelNetCDF/Testing/Cxx/CMakeLists.txt b/IO/ParallelNetCDF/Testing/Cxx/CMakeLists.txt
index 7d10257..8d8fb46 100644
--- a/IO/ParallelNetCDF/Testing/Cxx/CMakeLists.txt
+++ b/IO/ParallelNetCDF/Testing/Cxx/CMakeLists.txt
@@ -1,15 +1,12 @@
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-  TestPNetCDFPOPReader.cxx
+if(VTK_USE_LARGE_DATA)
+  # Tell ExternalData to fetch test input at build time.
+  ExternalData_Expand_Arguments(VTKData _
+    "DATA{${VTK_TEST_INPUT_DIR}/NetCDF/,REGEX:.*}"
+    )
 
-  EXTRA_INCLUDE vtkTestDriver.h
-)
+  vtk_add_test_cxx(
+    TestPNetCDFPOPReader.cxx
+    )
+endif()
 
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-if(VTK_LARGE_DATA_ROOT)
-  add_test(NAME vtkIOParallelNetCDFCxx-TestPNetCDFPOPReader
-    COMMAND ${vtk-module}CxxTests TestPNetCDFPOPReader
-    -D ${VTK_LARGE_DATA_ROOT}
-    -T ${VTK_TEST_OUTPUT_DIR}
-    -V Baseline/IO/TestNetCDFPOPReader.png)
-endif(VTK_LARGE_DATA_ROOT)
\ No newline at end of file
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/IO/ParallelNetCDF/Testing/Cxx/TestPNetCDFPOPReader.cxx b/IO/ParallelNetCDF/Testing/Cxx/TestPNetCDFPOPReader.cxx
index 785f1b6..1d2a803 100644
--- a/IO/ParallelNetCDF/Testing/Cxx/TestPNetCDFPOPReader.cxx
+++ b/IO/ParallelNetCDF/Testing/Cxx/TestPNetCDFPOPReader.cxx
@@ -14,7 +14,7 @@
 =========================================================================*/
 // .NAME Test of vtkPNetCDFPOPReader
 // .SECTION Description
-// Tests the vtkPNetCDFPOPReader.  Requires VTKLargeData to be set.
+// Tests the vtkPNetCDFPOPReader.
 
 #include "vtkPNetCDFPOPReader.h"
 
diff --git a/IO/ParallelNetCDF/Testing/Data/Baseline/TestPNetCDFPOPReader.png.md5 b/IO/ParallelNetCDF/Testing/Data/Baseline/TestPNetCDFPOPReader.png.md5
new file mode 100644
index 0000000..8f11d78
--- /dev/null
+++ b/IO/ParallelNetCDF/Testing/Data/Baseline/TestPNetCDFPOPReader.png.md5
@@ -0,0 +1 @@
+a5242241ce452fffe15d3e58602aa9c3
diff --git a/IO/ParallelNetCDF/module.cmake b/IO/ParallelNetCDF/module.cmake
index 0652865..7dbdd06 100644
--- a/IO/ParallelNetCDF/module.cmake
+++ b/IO/ParallelNetCDF/module.cmake
@@ -4,7 +4,7 @@ vtk_module(vtkIOParallelNetCDF
   DEPENDS
     vtkCommonCore
     vtkParallelMPI
-  COMPILE_DEPENDS
+  PRIVATE_DEPENDS
     vtknetcdf
   TEST_DEPENDS
     vtkCommonExecutionModel
diff --git a/IO/PostgreSQL/CMakeLists.txt b/IO/PostgreSQL/CMakeLists.txt
index fac22f5..d554a7c 100644
--- a/IO/PostgreSQL/CMakeLists.txt
+++ b/IO/PostgreSQL/CMakeLists.txt
@@ -15,4 +15,4 @@ include_directories(${PostgreSQL_INCLUDE_DIRS})
 
 vtk_module_library(vtkIOPostgreSQL ${PostgreSQL_SRCS})
 
-target_link_libraries(vtkIOPostgreSQL ${PostgreSQL_LIBRARY})
+target_link_libraries(vtkIOPostgreSQL LINK_PRIVATE ${PostgreSQL_LIBRARY})
diff --git a/IO/PostgreSQL/Testing/Cxx/CMakeLists.txt b/IO/PostgreSQL/Testing/Cxx/CMakeLists.txt
index edfe985..37dc917 100644
--- a/IO/PostgreSQL/Testing/Cxx/CMakeLists.txt
+++ b/IO/PostgreSQL/Testing/Cxx/CMakeLists.txt
@@ -1,34 +1,18 @@
 if(VTK_PSQL_TEST_URL)
-
   configure_file(${CMAKE_CURRENT_SOURCE_DIR}/vtkIOPostgresSQLTestingCxxConfigure.h.in
   ${CMAKE_CURRENT_BINARY_DIR}/vtkIOPostgresSQLTestingCxxConfigure.h)
 
   include_directories(${CMAKE_CURRENT_BINARY_DIR})
 
-  create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-    TestPostgreSQLDatabase
-    TestPostgreSQLTableReadWrite.cxx
-    EXTRA_INCLUDE vtkTestDriver.h
-  )
-
-  vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-  set(TestsToRun ${Tests})
-  list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-  add_test(NAME ${vtk-module}Cxx-TestPostgreSQLDatabase
-           COMMAND ${vtk-module}CxxTests TestPostgreSQLDatabase)
-
-  if(VTK_DATA_ROOT)
-    if(WIN32)
-      add_test(NAME ${vtk-module}Cxx-TestPostgreSQLTableReadWrite
-               COMMAND ${vtk-module}CxxTests TestPostgreSQLTableReadWrite
-                       ${VTK_DATA_ROOT}/Baseline/IO/simple_table_win.vtk)
-    else()
-      add_test(NAME ${vtk-module}Cxx-TestPostgreSQLTableReadWrite
-               COMMAND ${vtk-module}CxxTests TestPostgreSQLTableReadWrite
-                       ${VTK_DATA_ROOT}/Baseline/IO/simple_table.vtk)
-    endif()
+  vtk_add_test_cxx(TestPostgreSQLDatabase.cxx NO_DATA NO_VALID)
+  if(WIN32)
+    set(TestPostgreSQLTableReadWrite_ARGS
+      DATA{../Data/Input/simple_table_win.vtk})
+  else()
+    set(TestPostgreSQLTableReadWrite_ARGS
+      DATA{../Data/Input/simple_table.vtk})
   endif()
+  vtk_add_test_cxx(TestPostgreSQLTableReadWrite.cxx NO_DATA NO_VALID)
 
+  vtk_test_cxx_executable(${vtk-module}CxxTests)
 endif()
diff --git a/IO/PostgreSQL/Testing/Data/Input/simple_table.vtk.md5 b/IO/PostgreSQL/Testing/Data/Input/simple_table.vtk.md5
new file mode 100644
index 0000000..ee95258
--- /dev/null
+++ b/IO/PostgreSQL/Testing/Data/Input/simple_table.vtk.md5
@@ -0,0 +1 @@
+7fea0586c39ab4442ac43970cc78fedc
diff --git a/IO/PostgreSQL/Testing/Data/Input/simple_table_win.vtk.md5 b/IO/PostgreSQL/Testing/Data/Input/simple_table_win.vtk.md5
new file mode 100644
index 0000000..e313f80
--- /dev/null
+++ b/IO/PostgreSQL/Testing/Data/Input/simple_table_win.vtk.md5
@@ -0,0 +1 @@
+be6f6306020f7cad635170ab61169044
diff --git a/IO/PostgreSQL/module.cmake b/IO/PostgreSQL/module.cmake
index c1e80b9..e40e76c 100644
--- a/IO/PostgreSQL/module.cmake
+++ b/IO/PostgreSQL/module.cmake
@@ -1,6 +1,8 @@
 vtk_module(vtkIOPostgreSQL
   DEPENDS
     vtkCommonDataModel
+  PRIVATE_DEPENDS
+    vtksys
   IMPLEMENTS
     vtkIOSQL
   TEST_DEPENDS
diff --git a/IO/PostgreSQL/vtkPostgreSQLDatabase.cxx b/IO/PostgreSQL/vtkPostgreSQLDatabase.cxx
index af3388c..98cef51 100644
--- a/IO/PostgreSQL/vtkPostgreSQLDatabase.cxx
+++ b/IO/PostgreSQL/vtkPostgreSQLDatabase.cxx
@@ -34,7 +34,7 @@
 
 #include <libpq-fe.h>
 
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkPostgreSQLDatabase);
 
diff --git a/IO/SQL/Testing/Cxx/CMakeLists.txt b/IO/SQL/Testing/Cxx/CMakeLists.txt
index 2e2aed0..49fd184 100644
--- a/IO/SQL/Testing/Cxx/CMakeLists.txt
+++ b/IO/SQL/Testing/Cxx/CMakeLists.txt
@@ -1,38 +1,13 @@
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
+vtk_add_test_cxx(NO_VALID
   TestSQLDatabaseSchema.cxx
   TestSQLiteDatabase.cxx
-  TestSQLiteTableReadWrite.cxx
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-list(REMOVE_ITEM TestsToRun TestSQLiteTableReadWrite.cxx)
-
-if(VTK_DATA_ROOT)
-  add_test(NAME ${vtk-module}Cxx-SQLiteTableReadWrite
-    COMMAND ${vtk-module}CxxTests TestSQLiteTableReadWrite
-      ${VTK_DATA_ROOT}/Baseline/IO/simple_table.vtk
   )
-  set_tests_properties(vtkIOSQLCxx-SQLiteTableReadWrite PROPERTIES RUN_SERIAL 1)
-endif()
 
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/IO/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName} COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
+set(TestSQLiteTableReadWrite_ARGS DATA{../Data/Input/simple_table.vtk})
+vtk_add_test_cxx(TestSQLiteTableReadWrite.cxx NO_DATA NO_VALID NO_OUTPUT)
+set_tests_properties(vtkIOSQLCxx-TestSQLiteTableReadWrite PROPERTIES RUN_SERIAL 1)
 
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests)
 
 # Run these serial to avoid intermittent test failures on machines
 # that do parallel testing
diff --git a/IO/SQL/Testing/Data/Input/simple_table.vtk.md5 b/IO/SQL/Testing/Data/Input/simple_table.vtk.md5
new file mode 100644
index 0000000..ee95258
--- /dev/null
+++ b/IO/SQL/Testing/Data/Input/simple_table.vtk.md5
@@ -0,0 +1 @@
+7fea0586c39ab4442ac43970cc78fedc
diff --git a/IO/SQL/module.cmake b/IO/SQL/module.cmake
index 15a04dc..69ef379 100644
--- a/IO/SQL/module.cmake
+++ b/IO/SQL/module.cmake
@@ -3,6 +3,8 @@ vtk_module(vtkIOSQL
     StandAlone
   DEPENDS
     vtkIOCore
+  PRIVATE_DEPENDS
+    vtksys
     vtksqlite # We should consider splitting this into a module.
   TEST_DEPENDS
     vtkIOLegacy
diff --git a/IO/SQL/vtkSQLiteQuery.cxx b/IO/SQL/vtkSQLiteQuery.cxx
index 36c2b1b..614f47b 100644
--- a/IO/SQL/vtkSQLiteQuery.cxx
+++ b/IO/SQL/vtkSQLiteQuery.cxx
@@ -27,7 +27,7 @@
 
 #include <vtksqlite/vtk_sqlite3.h>
 
-#include <assert.h>
+#include <cassert>
 
 #include <vtksys/ios/sstream>
 #include <vtksys/stl/vector>
diff --git a/IO/VPIC/vtkVPICReader.cxx b/IO/VPIC/vtkVPICReader.cxx
index 54876a6..abb5c03 100644
--- a/IO/VPIC/vtkVPICReader.cxx
+++ b/IO/VPIC/vtkVPICReader.cxx
@@ -106,16 +106,11 @@ vtkVPICReader::~vtkVPICReader()
     }
   this->PointDataArraySelection->Delete();
 
-  if (this->vpicData)
-    delete this->vpicData;
-  if (this->VariableName)
-    delete [] this->VariableName;
-  if (this->VariableStruct)
-    delete [] this->VariableStruct;
-  if (this->TimeSteps)
-    delete [] this->TimeSteps;
-  if (this->dataLoaded)
-    delete [] this->dataLoaded;
+  delete this->vpicData;
+  delete [] this->VariableName;
+  delete [] this->VariableStruct;
+  delete [] this->TimeSteps;
+  delete [] this->dataLoaded;
 
   if (this->exchanger)
     delete this->exchanger;
@@ -351,8 +346,7 @@ int vtkVPICReader::RequestInformation(
       int decomposition[DIMENSION];
       this->vpicData->getDecomposition(decomposition);
 
-      if (this->exchanger)
-        delete this->exchanger;
+      delete this->exchanger;
 
       this->exchanger = new GridExchange
         (this->Rank, this->TotalRank, decomposition,
diff --git a/IO/Video/CMakeLists.txt b/IO/Video/CMakeLists.txt
index d2d0fb6..4bec4d0 100644
--- a/IO/Video/CMakeLists.txt
+++ b/IO/Video/CMakeLists.txt
@@ -18,5 +18,5 @@ vtk_module_library(vtkIOVideo ${Module_SRCS})
 
 
 if(WIN32 AND VTK_USE_VIDEO_FOR_WINDOWS)
-  target_link_libraries(vtkIOVideo vfw32)
+  target_link_libraries(vtkIOVideo LINK_PRIVATE vfw32)
 endif()
diff --git a/IO/Video/module.cmake b/IO/Video/module.cmake
index 11aa678..70c171a 100644
--- a/IO/Video/module.cmake
+++ b/IO/Video/module.cmake
@@ -3,4 +3,6 @@ vtk_module(vtkIOVideo
     StandAlone
   DEPENDS
     vtkCommonExecutionModel
+  PRIVATE_DEPENDS
+    vtksys
   )
diff --git a/IO/XML/Testing/Cxx/CMakeLists.txt b/IO/XML/Testing/Cxx/CMakeLists.txt
index 38268a9..57aad56 100644
--- a/IO/XML/Testing/Cxx/CMakeLists.txt
+++ b/IO/XML/Testing/Cxx/CMakeLists.txt
@@ -1,39 +1,14 @@
-if(VTK_DATA_ROOT)
-  set (NEEDS_DATA
-    TestAMRXMLIO.cxx
-    TestXMLHierarchicalBoxDataFileConverter.cxx
-    TestXMLUnstructuredGridReader.cxx
-    )
-endif()
-
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-  ${NEEDS_DATA}
+vtk_add_test_cxx(
+  TestAMRXMLIO.cxx,NO_VALID
+  TestHyperOctreeIO.cxx
+  TestXMLHierarchicalBoxDataFileConverter.cxx,NO_VALID
+  TestXMLUnstructuredGridReader.cxx
+  )
+set(TestXML_ARGS "DATA{${VTK_TEST_INPUT_DIR}/sample.xml}")
+vtk_add_test_cxx(TestXML.cxx NO_DATA NO_VALID NO_OUTPUT)
+
+vtk_add_test_cxx(NO_VALID
   TestDataObjectXMLIO.cxx
-  TestXML.cxx
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-list(REMOVE_ITEM TestsToRun TestXML.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/IO/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName} COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+  )
 
-if(VTK_DATA_ROOT)
-  add_test(NAME ${vtk-module}Cxx-XML
-    COMMAND ${vtk-module}CxxTests TestXML ${VTK_DATA_ROOT}/Data/sample.xml)
-endif()
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/IO/XML/Testing/Data/Baseline/TestHyperOctreeIO.png.md5 b/IO/XML/Testing/Data/Baseline/TestHyperOctreeIO.png.md5
new file mode 100644
index 0000000..3ab3d58
--- /dev/null
+++ b/IO/XML/Testing/Data/Baseline/TestHyperOctreeIO.png.md5
@@ -0,0 +1 @@
+ea647f390fc0181782bc1ae3fdb9038e
diff --git a/IO/XML/Testing/Data/Baseline/TestXMLImageDataIO.png.md5 b/IO/XML/Testing/Data/Baseline/TestXMLImageDataIO.png.md5
new file mode 100644
index 0000000..6ba88bf
--- /dev/null
+++ b/IO/XML/Testing/Data/Baseline/TestXMLImageDataIO.png.md5
@@ -0,0 +1 @@
+644270dba7a2c0c57cb1d3bf8456e790
diff --git a/IO/XML/Testing/Data/Baseline/TestXMLPolyDataIO.png.md5 b/IO/XML/Testing/Data/Baseline/TestXMLPolyDataIO.png.md5
new file mode 100644
index 0000000..f0c5579
--- /dev/null
+++ b/IO/XML/Testing/Data/Baseline/TestXMLPolyDataIO.png.md5
@@ -0,0 +1 @@
+ab5718f5e12b2d737c9f458deb509eba
diff --git a/IO/XML/Testing/Data/Baseline/TestXMLRectilinearGridIO.png.md5 b/IO/XML/Testing/Data/Baseline/TestXMLRectilinearGridIO.png.md5
new file mode 100644
index 0000000..fcd03a4
--- /dev/null
+++ b/IO/XML/Testing/Data/Baseline/TestXMLRectilinearGridIO.png.md5
@@ -0,0 +1 @@
+a57d6b4826beac168f91d72c4bb931d6
diff --git a/IO/XML/Testing/Data/Baseline/TestXMLStructuredGridIO.png.md5 b/IO/XML/Testing/Data/Baseline/TestXMLStructuredGridIO.png.md5
new file mode 100644
index 0000000..13addf9
--- /dev/null
+++ b/IO/XML/Testing/Data/Baseline/TestXMLStructuredGridIO.png.md5
@@ -0,0 +1 @@
+38bd7c320cee29ebf5eb284b6e3afd1a
diff --git a/IO/XML/Testing/Data/Baseline/TestXMLUnstructuredGridIO.png.md5 b/IO/XML/Testing/Data/Baseline/TestXMLUnstructuredGridIO.png.md5
new file mode 100644
index 0000000..dbbb127
--- /dev/null
+++ b/IO/XML/Testing/Data/Baseline/TestXMLUnstructuredGridIO.png.md5
@@ -0,0 +1 @@
+1c760dc6e9665f773359d0ac3bfb0d1a
diff --git a/IO/XML/Testing/Data/Baseline/TestXMLUnstructuredGridReader.png.md5 b/IO/XML/Testing/Data/Baseline/TestXMLUnstructuredGridReader.png.md5
new file mode 100644
index 0000000..065ab54
--- /dev/null
+++ b/IO/XML/Testing/Data/Baseline/TestXMLUnstructuredGridReader.png.md5
@@ -0,0 +1 @@
+19bdb8c25f847bb4e0750ca5d1d22329
diff --git a/IO/XML/Testing/Python/CMakeLists.txt b/IO/XML/Testing/Python/CMakeLists.txt
index 3856ba8..cd37de9 100644
--- a/IO/XML/Testing/Python/CMakeLists.txt
+++ b/IO/XML/Testing/Python/CMakeLists.txt
@@ -1,8 +1,6 @@
-if (VTK_DATA_ROOT)
-  add_test_python1(TestEmptyXML.py)
-  add_test_python(TestXMLImageDataIO.py IO)
-  add_test_python(TestXMLPolyDataIO.py IO)
-  add_test_python(TestXMLRectilinearGridIO.py IO)
-  add_test_python(TestXMLStructuredGridIO.py IO)
-  add_test_python(TestXMLUnstructuredGridIO.py IO)
-endif()
+vtk_add_test_python(TestEmptyXML.py NO_DATA NO_VALID NO_RT)
+vtk_add_test_python(TestXMLImageDataIO.py)
+vtk_add_test_python(TestXMLPolyDataIO.py)
+vtk_add_test_python(TestXMLRectilinearGridIO.py)
+vtk_add_test_python(TestXMLStructuredGridIO.py)
+vtk_add_test_python(TestXMLUnstructuredGridIO.py)
diff --git a/IO/XML/Testing/Tcl/CMakeLists.txt b/IO/XML/Testing/Tcl/CMakeLists.txt
index 5ad4ae9..bd91c09 100644
--- a/IO/XML/Testing/Tcl/CMakeLists.txt
+++ b/IO/XML/Testing/Tcl/CMakeLists.txt
@@ -1,9 +1,7 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(TestXMLImageDataIO IO)
-  add_test_tcl(TestXMLRectilinearGridIO IO)
-  add_test_tcl(TestEmptyXML IO)
-  add_test_tcl(TestXMLStructuredGridIO IO)
-endif()
+vtk_add_test_tcl(TestXMLImageDataIO)
+vtk_add_test_tcl(TestXMLRectilinearGridIO)
+vtk_add_test_tcl(TestEmptyXML NO_DATA NO_VALID)
+vtk_add_test_tcl(TestXMLStructuredGridIO)
 
 #    TestXMLUnstructuredGridIO depends on Parallel
 #    TestXMLPolyDataIO depends on Parallel
diff --git a/IO/XML/module.cmake b/IO/XML/module.cmake
index 574b579..0e30e30 100644
--- a/IO/XML/module.cmake
+++ b/IO/XML/module.cmake
@@ -4,9 +4,12 @@ vtk_module(vtkIOXML
   DEPENDS
     vtkIOXMLParser
     vtkIOGeometry
+  PRIVATE_DEPENDS
+    vtksys
   TEST_DEPENDS
     vtkFiltersAMR
     vtkFiltersCore
+    vtkFiltersHyperTree
     vtkFiltersSources
     vtkImagingSources
     vtkInfovisCore
diff --git a/IO/XML/vtkXMLDataReader.cxx b/IO/XML/vtkXMLDataReader.cxx
index e618783..a3c2d8e 100644
--- a/IO/XML/vtkXMLDataReader.cxx
+++ b/IO/XML/vtkXMLDataReader.cxx
@@ -27,7 +27,7 @@
 #include "vtkInformation.h"
 #include "vtkStreamingDemandDrivenPipeline.h"
 
-#include "assert.h"
+#include <cassert>
 
 
 //----------------------------------------------------------------------------
diff --git a/IO/XML/vtkXMLGenericDataObjectReader.cxx b/IO/XML/vtkXMLGenericDataObjectReader.cxx
index 1fa64fe..cace64b 100644
--- a/IO/XML/vtkXMLGenericDataObjectReader.cxx
+++ b/IO/XML/vtkXMLGenericDataObjectReader.cxx
@@ -137,11 +137,13 @@ int vtkXMLGenericDataObjectReader::ReadOutputType(const char *name,
         parallel=true;
         return VTK_STRUCTURED_GRID;
         }
-      if(fileDataType.compare("UnstructuredGrid")==0)
+      if(fileDataType.compare("UnstructuredGrid")==0 ||
+         fileDataType.compare("UnstructuredGridBase")==0)
         {
         return VTK_UNSTRUCTURED_GRID;
         }
-      if(fileDataType.compare("PUnstructuredGrid")==0)
+      if(fileDataType.compare("PUnstructuredGrid")==0 ||
+         fileDataType.compare("PUnstructuredGridBase")==0)
         {
         parallel=true;
         return VTK_UNSTRUCTURED_GRID;
diff --git a/IO/XML/vtkXMLOffsetsManager.h b/IO/XML/vtkXMLOffsetsManager.h
index d3940ae..8836d23 100644
--- a/IO/XML/vtkXMLOffsetsManager.h
+++ b/IO/XML/vtkXMLOffsetsManager.h
@@ -45,7 +45,7 @@
 
 #include "vtkSystemIncludes.h"
 #include <vector>
-#include <assert.h>
+#include <cassert>
 
 //----------------------------------------------------------------------------
 class OffsetsManager
@@ -56,9 +56,6 @@ public:
     {
       this->LastMTime = static_cast<unsigned long>(-1); //almost invalid state
     }
-  ~OffsetsManager()
-    {
-    }
   void Allocate(int numTimeStep)
     {
     assert( numTimeStep > 0);
diff --git a/IO/XML/vtkXMLPDataWriter.cxx b/IO/XML/vtkXMLPDataWriter.cxx
index cfbea61..e41ed5c 100644
--- a/IO/XML/vtkXMLPDataWriter.cxx
+++ b/IO/XML/vtkXMLPDataWriter.cxx
@@ -45,10 +45,10 @@ vtkXMLPDataWriter::vtkXMLPDataWriter()
 //----------------------------------------------------------------------------
 vtkXMLPDataWriter::~vtkXMLPDataWriter()
 {
-  if(this->PathName) { delete [] this->PathName; }
-  if(this->FileNameBase) { delete [] this->FileNameBase; }
-  if(this->FileNameExtension) { delete [] this->FileNameExtension; }
-  if(this->PieceFileNameExtension) { delete [] this->PieceFileNameExtension; }
+  delete [] this->PathName;
+  delete [] this->FileNameBase;
+  delete [] this->FileNameExtension;
+  delete [] this->PieceFileNameExtension;
   this->ProgressObserver->Delete();
 }
 
@@ -234,8 +234,7 @@ void vtkXMLPDataWriter::SplitFileName()
 
   // "begin" now points at the beginning of the file name.
   // Look for the first "." to pull off the longest extension.
-  if(this->FileNameExtension)
-    { delete [] this->FileNameExtension; this->FileNameExtension = 0; }
+  delete [] this->FileNameExtension; this->FileNameExtension = 0;
   for(s=begin; s != end; ++s) { if(*s == '.') { break; } }
   if(s < end)
     {
diff --git a/IO/XML/vtkXMLPStructuredGridWriter.h b/IO/XML/vtkXMLPStructuredGridWriter.h
index 627f89e..e192fd3 100644
--- a/IO/XML/vtkXMLPStructuredGridWriter.h
+++ b/IO/XML/vtkXMLPStructuredGridWriter.h
@@ -45,6 +45,10 @@ public:
   vtkStructuredGrid* GetInput();
   //ETX
 
+  // Description:
+  // Get the default file extension for files written by this writer.
+  const char* GetDefaultFileExtension();
+
 protected:
   vtkXMLPStructuredGridWriter();
   ~vtkXMLPStructuredGridWriter();
@@ -53,7 +57,6 @@ protected:
   virtual int FillInputPortInformation(int port, vtkInformation* info);
 
   const char* GetDataSetName();
-  const char* GetDefaultFileExtension();
   vtkXMLStructuredDataWriter* CreateStructuredPieceWriter();
   void WritePData(vtkIndent indent);
 
diff --git a/IO/XML/vtkXMLPUnstructuredGridWriter.cxx b/IO/XML/vtkXMLPUnstructuredGridWriter.cxx
index 4888033..1b42ed2 100644
--- a/IO/XML/vtkXMLPUnstructuredGridWriter.cxx
+++ b/IO/XML/vtkXMLPUnstructuredGridWriter.cxx
@@ -38,9 +38,9 @@ void vtkXMLPUnstructuredGridWriter::PrintSelf(ostream& os, vtkIndent indent)
 }
 
 //----------------------------------------------------------------------------
-vtkUnstructuredGrid* vtkXMLPUnstructuredGridWriter::GetInput()
+vtkUnstructuredGridBase* vtkXMLPUnstructuredGridWriter::GetInput()
 {
-  return static_cast<vtkUnstructuredGrid*>(this->Superclass::GetInput());
+  return static_cast<vtkUnstructuredGridBase*>(this->Superclass::GetInput());
 }
 
 //----------------------------------------------------------------------------
@@ -69,6 +69,6 @@ vtkXMLPUnstructuredGridWriter::CreateUnstructuredPieceWriter()
 int vtkXMLPUnstructuredGridWriter::FillInputPortInformation(
   int vtkNotUsed(port), vtkInformation* info)
 {
-  info->Set(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkUnstructuredGrid");
+  info->Set(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkUnstructuredGridBase");
   return 1;
 }
diff --git a/IO/XML/vtkXMLPUnstructuredGridWriter.h b/IO/XML/vtkXMLPUnstructuredGridWriter.h
index f9caf2c..d94f6e3 100644
--- a/IO/XML/vtkXMLPUnstructuredGridWriter.h
+++ b/IO/XML/vtkXMLPUnstructuredGridWriter.h
@@ -30,7 +30,7 @@
 #include "vtkIOXMLModule.h" // For export macro
 #include "vtkXMLPUnstructuredDataWriter.h"
 
-class vtkUnstructuredGrid;
+class vtkUnstructuredGridBase;
 
 class VTKIOXML_EXPORT vtkXMLPUnstructuredGridWriter : public vtkXMLPUnstructuredDataWriter
 {
@@ -42,7 +42,7 @@ public:
   //BTX
   // Description:
   // Get/Set the writer's input.
-  vtkUnstructuredGrid* GetInput();
+  vtkUnstructuredGridBase* GetInput();
   //ETX
 
   // Description:
diff --git a/IO/XML/vtkXMLPolyDataReader.cxx b/IO/XML/vtkXMLPolyDataReader.cxx
index 73fd7e4..940e25c 100644
--- a/IO/XML/vtkXMLPolyDataReader.cxx
+++ b/IO/XML/vtkXMLPolyDataReader.cxx
@@ -22,7 +22,7 @@
 #include "vtkInformation.h"
 #include "vtkStreamingDemandDrivenPipeline.h"
 
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkXMLPolyDataReader);
 
diff --git a/IO/XML/vtkXMLReader.cxx b/IO/XML/vtkXMLReader.cxx
index 3a44ea6..3da5200 100644
--- a/IO/XML/vtkXMLReader.cxx
+++ b/IO/XML/vtkXMLReader.cxx
@@ -34,7 +34,7 @@
 
 #include <vtksys/ios/sstream>
 #include <sys/stat.h>
-#include <assert.h>
+#include <cassert>
 #include <locale> // C++ locale
 
 //-----------------------------------------------------------------------------
@@ -394,7 +394,7 @@ int vtkXMLReader
     // this->ReadXMLInformation()
     int numTimesteps = this->GetNumberOfTimeSteps();
     this->TimeStepRange[0] = 0;
-    this->TimeStepRange[1] = numTimesteps-1;
+    this->TimeStepRange[1] = (numTimesteps > 0 ? numTimesteps-1 : 0);
     if (numTimesteps != 0)
       {
       double* timeSteps = new double[numTimesteps];
@@ -1192,7 +1192,7 @@ void vtkXMLReader::GetProgressRange(float* range)
 }
 
 //----------------------------------------------------------------------------
-void vtkXMLReader::SetProgressRange(float* range, int curStep, int numSteps)
+void vtkXMLReader::SetProgressRange(const float range[2], int curStep, int numSteps)
 {
   float stepSize = (range[1] - range[0])/numSteps;
   this->ProgressRange[0] = range[0] + stepSize*curStep;
@@ -1201,8 +1201,8 @@ void vtkXMLReader::SetProgressRange(float* range, int curStep, int numSteps)
 }
 
 //----------------------------------------------------------------------------
-void vtkXMLReader::SetProgressRange(float* range, int curStep,
-                                    float* fractions)
+void vtkXMLReader::SetProgressRange(const float range[2], int curStep,
+                                    const float* fractions)
 {
   float width = range[1] - range[0];
   this->ProgressRange[0] = range[0] + fractions[curStep]*width;
diff --git a/IO/XML/vtkXMLReader.h b/IO/XML/vtkXMLReader.h
index 5af149c..81b12dd 100644
--- a/IO/XML/vtkXMLReader.h
+++ b/IO/XML/vtkXMLReader.h
@@ -233,8 +233,8 @@ protected:
 
   // incrementally fine-tuned progress updates.
   virtual void GetProgressRange(float* range);
-  virtual void SetProgressRange(float* range, int curStep, int numSteps);
-  virtual void SetProgressRange(float* range, int curStep, float* fractions);
+  virtual void SetProgressRange(const float range[2], int curStep, int numSteps);
+  virtual void SetProgressRange(const float range[2], int curStep, const float* fractions);
   virtual void UpdateProgressDiscrete(float progress);
   float ProgressRange[2];
 
diff --git a/IO/XML/vtkXMLUniformGridAMRWriter.cxx b/IO/XML/vtkXMLUniformGridAMRWriter.cxx
index c06e1b9..ce79cf5 100644
--- a/IO/XML/vtkXMLUniformGridAMRWriter.cxx
+++ b/IO/XML/vtkXMLUniformGridAMRWriter.cxx
@@ -23,7 +23,7 @@
 #include "vtkUniformGrid.h"
 #include "vtkXMLDataElement.h"
 
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkXMLUniformGridAMRWriter);
 //----------------------------------------------------------------------------
diff --git a/IO/XML/vtkXMLUnstructuredDataReader.cxx b/IO/XML/vtkXMLUnstructuredDataReader.cxx
index 270120f..c57115c 100644
--- a/IO/XML/vtkXMLUnstructuredDataReader.cxx
+++ b/IO/XML/vtkXMLUnstructuredDataReader.cxx
@@ -23,7 +23,7 @@
 #include "vtkInformation.h"
 #include "vtkStreamingDemandDrivenPipeline.h"
 
-#include <assert.h>
+#include <cassert>
 
 
 //----------------------------------------------------------------------------
diff --git a/IO/XML/vtkXMLUnstructuredDataWriter.cxx b/IO/XML/vtkXMLUnstructuredDataWriter.cxx
index 84e2056..5703fa5 100644
--- a/IO/XML/vtkXMLUnstructuredDataWriter.cxx
+++ b/IO/XML/vtkXMLUnstructuredDataWriter.cxx
@@ -16,6 +16,7 @@
 
 #include "vtkCellArray.h"
 #include "vtkCellData.h"
+#include "vtkCellIterator.h"
 #include "vtkDataArray.h"
 #include "vtkDataCompressor.h"
 #include "vtkDataSetAttributes.h"
@@ -33,7 +34,7 @@
 #include "vtkXMLOffsetsManager.h"
 #undef  vtkXMLOffsetsManager_DoNotInclude
 
-#include <assert.h>
+#include <cassert>
 
 
 //----------------------------------------------------------------------------
@@ -555,6 +556,28 @@ void vtkXMLUnstructuredDataWriter::WriteAppendedPieceData(int index)
 }
 
 //----------------------------------------------------------------------------
+void vtkXMLUnstructuredDataWriter::WriteCellsInline(
+    const char *name, vtkCellIterator *cellIter, vtkIdType numCells,
+    vtkIdType cellSizeEstimate, vtkIndent indent)
+{
+  this->ConvertCells(cellIter, numCells, cellSizeEstimate);
+
+  // Faces are not supported via this method.
+  this->Faces->SetNumberOfTuples(0);
+  this->FaceOffsets->SetNumberOfTuples(0);
+
+  vtkNew<vtkUnsignedCharArray> types;
+  types->Allocate(numCells);
+  for (cellIter->InitTraversal(); !cellIter->IsDoneWithTraversal();
+       cellIter->GoToNextCell())
+    {
+    types->InsertNextValue(static_cast<unsigned char>(cellIter->GetCellType()));
+    }
+
+  this->WriteCellsInlineWorker(name, types.GetPointer(), indent);
+}
+
+//----------------------------------------------------------------------------
 void vtkXMLUnstructuredDataWriter::WriteCellsInline(const char* name,
                                                     vtkCellArray* cells,
                                                     vtkDataArray* types,
@@ -573,6 +596,13 @@ void vtkXMLUnstructuredDataWriter::WriteCellsInline(const char* name,
   this->ConvertCells(cells);
   this->ConvertFaces(faces, faceOffsets);
 
+  this->WriteCellsInlineWorker(name, types, indent);
+}
+
+//----------------------------------------------------------------------------
+void vtkXMLUnstructuredDataWriter::WriteCellsInlineWorker(
+    const char *name, vtkDataArray *types, vtkIndent indent)
+{
   ostream& os = *(this->Stream);
   os << indent << "<" << name << ">\n";
 
@@ -693,6 +723,22 @@ void vtkXMLUnstructuredDataWriter::WriteCellsAppended(const char* name,
 }
 
 //----------------------------------------------------------------------------
+void vtkXMLUnstructuredDataWriter::WriteCellsAppended(
+    const char *name, vtkCellIterator *cellIter, vtkIdType numCells,
+    vtkIndent indent, OffsetsManagerGroup *cellsManager)
+{
+  vtkNew<vtkUnsignedCharArray> types;
+  types->Allocate(numCells);
+  for (cellIter->InitTraversal(); !cellIter->IsDoneWithTraversal();
+       cellIter->GoToNextCell())
+    {
+    types->InsertNextValue(static_cast<unsigned char>(cellIter->GetCellType()));
+    }
+
+  this->WriteCellsAppended(name, types.GetPointer(), indent, cellsManager);
+}
+
+//----------------------------------------------------------------------------
 void
 vtkXMLUnstructuredDataWriter::WriteCellsAppendedData(vtkCellArray* cells,
                                                      vtkDataArray* types,
@@ -703,6 +749,30 @@ vtkXMLUnstructuredDataWriter::WriteCellsAppendedData(vtkCellArray* cells,
 }
 
 //----------------------------------------------------------------------------
+void vtkXMLUnstructuredDataWriter::WriteCellsAppendedData(
+    vtkCellIterator *cellIter, vtkIdType numCells, vtkIdType cellSizeEstimate,
+    int timestep, OffsetsManagerGroup *cellsManager)
+{
+  this->ConvertCells(cellIter, numCells, cellSizeEstimate);
+
+  // Faces are not supported by this method:
+  this->Faces->SetNumberOfTuples(0);
+  this->FaceOffsets->SetNumberOfTuples(0);
+
+  vtkNew<vtkUnsignedCharArray> types;
+  types->Allocate(this->CellOffsets->GetNumberOfTuples() + 1);
+
+  for(cellIter->InitTraversal(); !cellIter->IsDoneWithTraversal();
+      cellIter->GoToNextCell())
+    {
+    types->InsertNextValue(static_cast<unsigned char>(cellIter->GetCellType()));
+    }
+
+  this->WriteCellsAppendedDataWorker(types.GetPointer(), timestep,
+                                     cellsManager);
+}
+
+//----------------------------------------------------------------------------
 void
 vtkXMLUnstructuredDataWriter::WriteCellsAppendedData(vtkCellArray* cells,
                                                      vtkDataArray* types,
@@ -717,7 +787,13 @@ vtkXMLUnstructuredDataWriter::WriteCellsAppendedData(vtkCellArray* cells,
     }
 
   this->ConvertFaces(faces, faceOffsets);
+  this->WriteCellsAppendedDataWorker(types, timestep, cellsManager);
+}
 
+//----------------------------------------------------------------------------
+void vtkXMLUnstructuredDataWriter::WriteCellsAppendedDataWorker(
+    vtkDataArray *types, int timestep, OffsetsManagerGroup *cellsManager)
+{
   // Split progress by cell connectivity, offset, and type arrays.
   float progressRange[5] = {0,0,0,0,0};
   this->GetProgressRange(progressRange);
@@ -766,7 +842,31 @@ vtkXMLUnstructuredDataWriter::WriteCellsAppendedData(vtkCellArray* cells,
                                         "offset" );
         }
       }
+  }
+}
+
+//----------------------------------------------------------------------------
+void vtkXMLUnstructuredDataWriter::ConvertCells(
+    vtkCellIterator *cellIter, vtkIdType numCells, vtkIdType cellSizeEstimate)
+{
+  this->CellPoints->Allocate(numCells * cellSizeEstimate);
+  this->CellOffsets->Allocate(numCells);
+
+  for (cellIter->InitTraversal(); !cellIter->IsDoneWithTraversal();
+       cellIter->GoToNextCell())
+    {
+    vtkIdType *begin = cellIter->GetPointIds()->GetPointer(0);
+    vtkIdType *end = begin + cellIter->GetNumberOfPoints();
+    while (begin != end)
+      {
+      this->CellPoints->InsertNextValue(*begin++);
+      }
+
+    this->CellOffsets->InsertNextValue(this->CellPoints->GetNumberOfTuples());
     }
+
+  this->CellPoints->Squeeze();
+  this->CellOffsets->Squeeze();
 }
 
 //----------------------------------------------------------------------------
diff --git a/IO/XML/vtkXMLUnstructuredDataWriter.h b/IO/XML/vtkXMLUnstructuredDataWriter.h
index e347e74..361066b 100644
--- a/IO/XML/vtkXMLUnstructuredDataWriter.h
+++ b/IO/XML/vtkXMLUnstructuredDataWriter.h
@@ -25,6 +25,7 @@
 
 class vtkPointSet;
 class vtkCellArray;
+class vtkCellIterator;
 class vtkDataArray;
 class vtkIdTypeArray;
 class vtkUnstructuredGrid;
@@ -81,6 +82,10 @@ protected:
   virtual void WriteAppendedPiece(int index, vtkIndent indent);
   virtual void WriteAppendedPieceData(int index);
 
+  void WriteCellsInline(const char* name, vtkCellIterator *cellIter,
+                        vtkIdType numCells, vtkIdType cellSizeEstimate,
+                        vtkIndent indent);
+
   void WriteCellsInline(const char* name, vtkCellArray* cells,
                         vtkDataArray* types, vtkIndent indent);
 
@@ -89,17 +94,34 @@ protected:
                         vtkDataArray* types, vtkIdTypeArray* faces,
                         vtkIdTypeArray* faceOffsets, vtkIndent indent);
 
+  void WriteCellsInlineWorker(const char* name, vtkDataArray *types,
+                              vtkIndent indent);
+
   void WriteCellsAppended(const char* name, vtkDataArray* types,
                           vtkIndent indent, OffsetsManagerGroup *cellsManager);
 
+  void WriteCellsAppended(const char* name, vtkCellIterator *cellIter,
+                          vtkIdType numCells, vtkIndent indent,
+                          OffsetsManagerGroup *cellsManager);
+
   void WriteCellsAppendedData(vtkCellArray* cells, vtkDataArray* types,
                               int timestep, OffsetsManagerGroup *cellsManager);
 
+  void WriteCellsAppendedData(vtkCellIterator* cellIter, vtkIdType numCells,
+                              vtkIdType cellSizeEstimate, int timestep,
+                              OffsetsManagerGroup *cellsManager);
+
   // New API with face infomration for polyhedron cell support.
   void WriteCellsAppendedData(vtkCellArray* cells, vtkDataArray* types,
                               vtkIdTypeArray* faces,vtkIdTypeArray* faceOffsets,
                               int timestep, OffsetsManagerGroup *cellsManager);
 
+  void WriteCellsAppendedDataWorker(vtkDataArray* types, int timestep,
+                                    OffsetsManagerGroup *cellsManager);
+
+  void ConvertCells(vtkCellIterator* cellIter, vtkIdType numCells,
+                    vtkIdType cellSizeEstimate);
+
   void ConvertCells(vtkCellArray* cells);
 
   // For polyhedron support, conversion results are stored in Faces and FaceOffsets
diff --git a/IO/XML/vtkXMLUnstructuredGridReader.cxx b/IO/XML/vtkXMLUnstructuredGridReader.cxx
index e84fa08..cc089c2 100644
--- a/IO/XML/vtkXMLUnstructuredGridReader.cxx
+++ b/IO/XML/vtkXMLUnstructuredGridReader.cxx
@@ -23,7 +23,7 @@
 #include "vtkInformation.h"
 #include "vtkStreamingDemandDrivenPipeline.h"
 
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkXMLUnstructuredGridReader);
 
diff --git a/IO/XML/vtkXMLUnstructuredGridWriter.cxx b/IO/XML/vtkXMLUnstructuredGridWriter.cxx
index a194665..4a54265 100644
--- a/IO/XML/vtkXMLUnstructuredGridWriter.cxx
+++ b/IO/XML/vtkXMLUnstructuredGridWriter.cxx
@@ -16,6 +16,7 @@
 
 #include "vtkCellArray.h"
 #include "vtkCellData.h"
+#include "vtkCellIterator.h"
 #include "vtkErrorCode.h"
 #include "vtkInformation.h"
 #include "vtkObjectFactory.h"
@@ -26,7 +27,7 @@
 #include "vtkXMLOffsetsManager.h"
 #undef vtkXMLOffsetsManager_DoNotInclude
 
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkXMLUnstructuredGridWriter);
 
@@ -49,9 +50,9 @@ void vtkXMLUnstructuredGridWriter::PrintSelf(ostream& os, vtkIndent indent)
 }
 
 //----------------------------------------------------------------------------
-vtkUnstructuredGrid* vtkXMLUnstructuredGridWriter::GetInput()
+vtkUnstructuredGridBase* vtkXMLUnstructuredGridWriter::GetInput()
 {
-  return static_cast<vtkUnstructuredGrid*>(this->Superclass::GetInput());
+  return static_cast<vtkUnstructuredGridBase*>(this->Superclass::GetInput());
 }
 
 //----------------------------------------------------------------------------
@@ -75,14 +76,14 @@ void vtkXMLUnstructuredGridWriter::WriteInlinePieceAttributes()
     return;
     }
 
-  vtkUnstructuredGrid* input = this->GetInput();
+  vtkUnstructuredGridBase* input = this->GetInput();
   this->WriteScalarAttribute("NumberOfCells", input->GetNumberOfCells());
 }
 
 //----------------------------------------------------------------------------
 void vtkXMLUnstructuredGridWriter::WriteInlinePiece(vtkIndent indent)
 {
-  vtkUnstructuredGrid* input = this->GetInput();
+  vtkUnstructuredGridBase* input = this->GetInput();
 
   // Split progress range by the approximate fraction of data written
   // by each step in this method.
@@ -105,11 +106,19 @@ void vtkXMLUnstructuredGridWriter::WriteInlinePiece(vtkIndent indent)
   this->SetProgressRange(progressRange, 1, fractions);
 
   // Write the cell specifications.
-  this->WriteCellsInline("Cells", input->GetCells(),
-                         input->GetCellTypesArray(),
-                         input->GetFaces(),
-                         input->GetFaceLocations(),
-                         indent);
+  if (vtkUnstructuredGrid *grid = vtkUnstructuredGrid::SafeDownCast(input))
+    {
+    // This is a bit more efficient and avoids iteration over all cells.
+    this->WriteCellsInline("Cells", grid->GetCells(), grid->GetCellTypesArray(),
+                           grid->GetFaces(), grid->GetFaceLocations(), indent);
+    }
+  else
+    {
+    vtkCellIterator *cellIter = input->NewCellIterator();
+    this->WriteCellsInline("Cells", cellIter, input->GetNumberOfCells(),
+                           input->GetMaxCellSize(), indent);
+    cellIter->Delete();
+    }
 }
 
 //----------------------------------------------------------------------------
@@ -147,22 +156,32 @@ void vtkXMLUnstructuredGridWriter::WriteAppendedPieceAttributes(int index)
 void vtkXMLUnstructuredGridWriter::WriteAppendedPiece(int index,
                                                       vtkIndent indent)
 {
-  vtkUnstructuredGrid* input = this->GetInput();
+  vtkUnstructuredGridBase* input = this->GetInput();
   this->Superclass::WriteAppendedPiece(index, indent);
   if (this->ErrorCode == vtkErrorCode::OutOfDiskSpaceError)
     {
     return;
     }
 
-  this->WriteCellsAppended("Cells", input->GetCellTypesArray(),
-                           indent, &this->CellsOM->GetPiece(index));
+  if (vtkUnstructuredGrid *grid = vtkUnstructuredGrid::SafeDownCast(input))
+    {
+    this->WriteCellsAppended("Cells", grid->GetCellTypesArray(),
+                             indent, &this->CellsOM->GetPiece(index));
+    }
+  else
+    {
+    vtkCellIterator *cellIter = input->NewCellIterator();
+    this->WriteCellsAppended("Cells", cellIter, input->GetNumberOfCells(),
+                             indent, &this->CellsOM->GetPiece(index));
+    cellIter->Delete();
+    }
 }
 
 //----------------------------------------------------------------------------
 void vtkXMLUnstructuredGridWriter::WriteAppendedPieceData(int index)
 {
   ostream& os = *(this->Stream);
-  vtkUnstructuredGrid* input = this->GetInput();
+  vtkUnstructuredGridBase* input = this->GetInput();
 
   std::streampos returnPosition = os.tellp();
   os.seekp(std::streampos(this->NumberOfCellsPositions[index]));
@@ -194,10 +213,22 @@ void vtkXMLUnstructuredGridWriter::WriteAppendedPieceData(int index)
   this->SetProgressRange(progressRange, 1, fractions);
 
   // Write the cell specification arrays.
-  this->WriteCellsAppendedData( input->GetCells(),
-    input->GetCellTypesArray(), input->GetFaces(),
-    input->GetFaceLocations(), this->CurrentTimeIndex,
-    &this->CellsOM->GetPiece(index));
+  if (vtkUnstructuredGrid *grid = vtkUnstructuredGrid::SafeDownCast(input))
+    {
+    this->WriteCellsAppendedData(grid->GetCells(), grid->GetCellTypesArray(),
+                                 grid->GetFaces(), grid->GetFaceLocations(),
+                                 this->CurrentTimeIndex,
+                                 &this->CellsOM->GetPiece(index));
+    }
+  else
+    {
+    vtkCellIterator *cellIter = input->NewCellIterator();
+    this->WriteCellsAppendedData(cellIter, input->GetNumberOfCells(),
+                                 input->GetMaxCellSize(),
+                                 this->CurrentTimeIndex,
+                                 &this->CellsOM->GetPiece(index));
+    cellIter->Delete();
+    }
 }
 
 //----------------------------------------------------------------------------
@@ -209,7 +240,7 @@ vtkIdType vtkXMLUnstructuredGridWriter::GetNumberOfInputCells()
 //----------------------------------------------------------------------------
 void vtkXMLUnstructuredGridWriter::CalculateSuperclassFraction(float* fractions)
 {
-  vtkUnstructuredGrid* input = this->GetInput();
+  vtkUnstructuredGridBase* input = this->GetInput();
 
   // The superclass will write point/cell data and point specifications.
   int pdArrays = input->GetPointData()->GetNumberOfArrays();
@@ -219,16 +250,30 @@ void vtkXMLUnstructuredGridWriter::CalculateSuperclassFraction(float* fractions)
   vtkIdType pointsSize = this->GetNumberOfInputPoints();
 
   // This class will write cell specifications.
-  vtkIdType connectSize;
-  if(input->GetCells()==0)
+  vtkIdType connectSize = 0;
+  if (vtkUnstructuredGrid *grid = vtkUnstructuredGrid::SafeDownCast(input))
     {
-    connectSize=0;
+    if (grid->GetCells() == 0)
+      {
+      connectSize = 0;
+      }
+    else
+      {
+      connectSize = (grid->GetCells()->GetData()->GetNumberOfTuples() -
+                     grid->GetNumberOfCells());
+      }
     }
   else
     {
-    connectSize = (input->GetCells()->GetData()->GetNumberOfTuples() -
-                   input->GetNumberOfCells());
+    vtkCellIterator *cellIter = input->NewCellIterator();
+    for (cellIter->InitTraversal(); !cellIter->IsDoneWithTraversal();
+         cellIter->GoToNextCell())
+      {
+      connectSize += cellIter->GetNumberOfPoints();
+      }
+    cellIter->Delete();
     }
+
   vtkIdType offsetSize = input->GetNumberOfCells();
   vtkIdType typesSize = input->GetNumberOfCells();
 
@@ -246,6 +291,6 @@ void vtkXMLUnstructuredGridWriter::CalculateSuperclassFraction(float* fractions)
 int vtkXMLUnstructuredGridWriter::FillInputPortInformation(
   int vtkNotUsed(port), vtkInformation* info)
 {
-  info->Set(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkUnstructuredGrid");
+  info->Set(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkUnstructuredGridBase");
   return 1;
 }
diff --git a/IO/XML/vtkXMLUnstructuredGridWriter.h b/IO/XML/vtkXMLUnstructuredGridWriter.h
index 8b797af..c78c367 100644
--- a/IO/XML/vtkXMLUnstructuredGridWriter.h
+++ b/IO/XML/vtkXMLUnstructuredGridWriter.h
@@ -31,7 +31,7 @@
 #include "vtkXMLUnstructuredDataWriter.h"
 
 
-class vtkUnstructuredGrid;
+class vtkUnstructuredGridBase;
 
 class VTKIOXML_EXPORT vtkXMLUnstructuredGridWriter : public vtkXMLUnstructuredDataWriter
 {
@@ -43,7 +43,7 @@ public:
   //BTX
   // Description:
   // Get/Set the writer's input.
-  vtkUnstructuredGrid* GetInput();
+  vtkUnstructuredGridBase* GetInput();
   //ETX
 
   // Description:
diff --git a/IO/XML/vtkXMLWriter.cxx b/IO/XML/vtkXMLWriter.cxx
index 97469fd..163520b 100644
--- a/IO/XML/vtkXMLWriter.cxx
+++ b/IO/XML/vtkXMLWriter.cxx
@@ -20,6 +20,7 @@
 #include "vtkCellData.h"
 #include "vtkCommand.h"
 #include "vtkDataArray.h"
+#include "vtkDataArrayIteratorMacro.h"
 #include "vtkDataSet.h"
 #include "vtkErrorCode.h"
 #include "vtkInformation.h"
@@ -45,7 +46,7 @@
 #include <vtksys/auto_ptr.hxx>
 #include <vtksys/ios/sstream>
 
-#include <assert.h>
+#include <cassert>
 #include <string>
 
 #if !defined(_WIN32) || defined(__CYGWIN__)
@@ -87,18 +88,22 @@ public:
 };
 
 //----------------------------------------------------------------------------
-template <class iterT>
+// Specialize for cases where IterType is ValueType* (common case for
+// vtkDataArrayTemplate subclasses). The last arg is to help less-robust
+// compilers decide between the various overloads by making a unfavorable
+// int-to-long conversion in the IterType overload, making this next
+// overload more favorable when the iterator is a ValueType*.
+template <class ValueType>
 int vtkXMLWriterWriteBinaryDataBlocks(vtkXMLWriter* writer,
-  iterT* iter,
-  int wordType, size_t memWordSize, size_t outWordSize)
+  ValueType* iter, int wordType, size_t memWordSize, size_t outWordSize,
+  size_t numWords, int)
 {
   // generic implementation for fixed component length arrays.
-  size_t numWords = iter->GetNumberOfValues();
   size_t blockWords = writer->GetBlockSize()/outWordSize;
   size_t memBlockSize = blockWords*memWordSize;
 
   // Prepare a pointer and counter to move through the data.
-  unsigned char* ptr = reinterpret_cast<unsigned char*>(iter->GetTuple(0));
+  unsigned char* ptr = reinterpret_cast<unsigned char*>(iter);
   size_t wordsLeft = numWords;
 
   // Do the complete blocks.
@@ -129,10 +134,74 @@ int vtkXMLWriterWriteBinaryDataBlocks(vtkXMLWriter* writer,
 }
 
 //----------------------------------------------------------------------------
-VTK_TEMPLATE_SPECIALIZE
+// Specialize for cases where IterType is some other type with iterator
+// semantics (e.g. vtkMappedDataArray iterators):
+template <class ValueType, class IterType>
 int vtkXMLWriterWriteBinaryDataBlocks(vtkXMLWriter* writer,
-  vtkArrayIteratorTemplate<vtkStdString>* iter,
-  int wordType, size_t vtkNotUsed(memWordSize), size_t outWordSize)
+  IterType iter, int wordType, size_t memWordSize, size_t outWordSize,
+  size_t numWords, long)
+{
+  // generic implementation for fixed component length arrays.
+  size_t blockWords = writer->GetBlockSize()/outWordSize;
+
+  // Prepare a buffer to move through the data.
+  std::vector<unsigned char> buffer(blockWords * memWordSize);
+  size_t wordsLeft = numWords;
+
+  if (buffer.empty())
+    {
+    // No data -- bail here, since the calls to buffer[0] below will segfault.
+    return 1;
+    }
+
+  // Do the complete blocks.
+  vtkXMLWriterHelper::SetProgressPartial(writer, 0);
+  int result = 1;
+  while(result && (wordsLeft >= blockWords))
+    {
+    // Copy data to contiguous buffer:
+    IterType blockEnd = iter + blockWords;
+    ValueType* bufferIter = reinterpret_cast<ValueType*>(&buffer[0]);
+    while (iter != blockEnd)
+      {
+      *bufferIter++ = *iter++;
+      }
+
+    if(!vtkXMLWriterHelper::WriteBinaryDataBlock(writer, &buffer[0], blockWords,
+                                                 wordType))
+      {
+      result = 0;
+      }
+    wordsLeft -= blockWords;
+    vtkXMLWriterHelper::SetProgressPartial(writer,
+                                           float(numWords-wordsLeft)/numWords);
+    }
+
+  // Do the last partial block if any.
+  if(result && (wordsLeft > 0))
+    {
+    // Copy data to contiguous buffer:
+    IterType blockEnd = iter + wordsLeft;
+    ValueType* bufferIter = reinterpret_cast<ValueType*>(&buffer[0]);
+    while (iter != blockEnd)
+      {
+      *bufferIter++ = *iter++;
+      }
+
+    if(!vtkXMLWriterHelper::WriteBinaryDataBlock(writer, &buffer[0], wordsLeft,
+                                                 wordType))
+      {
+      result = 0;
+      }
+    }
+  vtkXMLWriterHelper::SetProgressPartial(writer, 1);
+  return result;
+}
+
+//----------------------------------------------------------------------------
+static int vtkXMLWriterWriteBinaryDataBlocks(
+           vtkXMLWriter* writer, vtkArrayIteratorTemplate<vtkStdString>* iter,
+           int wordType, size_t outWordSize, size_t numStrings, int)
 {
   vtkXMLWriterHelper::SetProgressPartial(writer, 0);
   vtkStdString::value_type* allocated_buffer = 0;
@@ -156,7 +225,6 @@ int vtkXMLWriterWriteBinaryDataBlocks(vtkXMLWriter* writer,
   // For string arrays, writing as binary requires that the strings are written
   // out into a contiguous block. This is essential since the compressor can
   // only compress complete blocks of data.
-  size_t numStrings = iter->GetNumberOfValues();
   size_t maxCharsPerBlock = writer->GetBlockSize() / outWordSize;
 
   size_t index = 0; // index in string array.
@@ -1120,19 +1188,38 @@ int vtkXMLWriter::WriteBinaryDataInternal(vtkAbstractArray* a)
       this->ByteSwapBuffer = new unsigned char[this->BlockSize];
       }
     }
-  int ret;;
-  vtkArrayIterator* iter = a->NewIterator();
+  int ret;
+
+  size_t numValues = static_cast<size_t>(a->GetNumberOfComponents() *
+                                         a->GetNumberOfTuples());
   switch (wordType)
     {
-    vtkArrayIteratorTemplateMacro(
-      ret = vtkXMLWriterWriteBinaryDataBlocks(this,
-        static_cast<VTK_TT*>(iter),
-        wordType, memWordSize, outWordSize));
-  default:
-    vtkWarningMacro("Cannot write binary data of type : " << wordType);
-    ret = 0;
+    vtkDataArrayIteratorMacro(a,
+      ret = vtkXMLWriterWriteBinaryDataBlocks<vtkDAValueType>(
+        this, vtkDABegin, wordType, memWordSize, outWordSize, numValues, 1)
+      );
+    case VTK_STRING:
+      {
+      vtkArrayIterator *aiter = a->NewIterator();
+      vtkArrayIteratorTemplate<vtkStdString> *iter =
+          vtkArrayIteratorTemplate<vtkStdString>::SafeDownCast(aiter);
+      if (iter)
+        {
+        ret = vtkXMLWriterWriteBinaryDataBlocks(
+              this, iter, wordType, outWordSize, numValues, 1);
+        }
+      else
+        {
+        vtkWarningMacro("Unsupported iterator for data type : " << wordType);
+        ret = 0;
+        }
+      aiter->Delete();
+      }
+      break;
+    default:
+      vtkWarningMacro("Cannot write binary data of type : " << wordType);
+      ret = 0;
     }
-  iter->Delete();
 
   // Free the byte swap buffer if it was allocated.
   if(this->ByteSwapBuffer && !this->Int32IdTypeBuffer)
@@ -2904,14 +2991,14 @@ void vtkXMLWriter::DestroyStringArray(int numStrings, char** strings)
 }
 
 //----------------------------------------------------------------------------
-void vtkXMLWriter::GetProgressRange(float* range)
+void vtkXMLWriter::GetProgressRange(float range[2])
 {
   range[0] = this->ProgressRange[0];
   range[1] = this->ProgressRange[1];
 }
 
 //----------------------------------------------------------------------------
-void vtkXMLWriter::SetProgressRange(float* range, int curStep, int numSteps)
+void vtkXMLWriter::SetProgressRange(const float range[2], int curStep, int numSteps)
 {
   float stepSize = (range[1] - range[0])/numSteps;
   this->ProgressRange[0] = range[0] + stepSize*curStep;
@@ -2920,8 +3007,8 @@ void vtkXMLWriter::SetProgressRange(float* range, int curStep, int numSteps)
 }
 
 //----------------------------------------------------------------------------
-void vtkXMLWriter::SetProgressRange(float* range, int curStep,
-                                    float* fractions)
+void vtkXMLWriter::SetProgressRange(const float range[2], int curStep,
+                                    const float* fractions)
 {
   float width = range[1] - range[0];
   this->ProgressRange[0] = range[0] + fractions[curStep]*width;
diff --git a/IO/XML/vtkXMLWriter.h b/IO/XML/vtkXMLWriter.h
index 3454332..19b49b9 100644
--- a/IO/XML/vtkXMLWriter.h
+++ b/IO/XML/vtkXMLWriter.h
@@ -420,9 +420,9 @@ protected:
 
   // The current range over which progress is moving.  This allows for
   // incrementally fine-tuned progress updates.
-  virtual void GetProgressRange(float* range);
-  virtual void SetProgressRange(float* range, int curStep, int numSteps);
-  virtual void SetProgressRange(float* range, int curStep, float* fractions);
+  virtual void GetProgressRange(float range[2]);
+  virtual void SetProgressRange(const float range[2], int curStep, int numSteps);
+  virtual void SetProgressRange(const float range[2], int curStep, const float* fractions);
   virtual void SetProgressPartial(float fraction);
   virtual void UpdateProgressDiscrete(float progress);
   float ProgressRange[2];
diff --git a/IO/XMLParser/module.cmake b/IO/XMLParser/module.cmake
index a2937f6..e487316 100644
--- a/IO/XMLParser/module.cmake
+++ b/IO/XMLParser/module.cmake
@@ -6,5 +6,6 @@ vtk_module(vtkIOXMLParser
     vtkCommonSystem
     vtkCommonMisc
     vtkIOCore
+  PRIVATE_DEPENDS
     vtkexpat
   )
diff --git a/IO/XMLParser/vtkXMLDataParser.cxx b/IO/XMLParser/vtkXMLDataParser.cxx
index 3b75f1d..380554f 100644
--- a/IO/XMLParser/vtkXMLDataParser.cxx
+++ b/IO/XMLParser/vtkXMLDataParser.cxx
@@ -81,8 +81,8 @@ vtkXMLDataParser::~vtkXMLDataParser()
   delete [] this->OpenElements;
   this->InlineDataStream->Delete();
   this->AppendedDataStream->Delete();
-  if(this->BlockCompressedSizes) { delete [] this->BlockCompressedSizes; }
-  if(this->BlockStartOffsets) { delete [] this->BlockStartOffsets; }
+  delete [] this->BlockCompressedSizes;
+  delete [] this->BlockStartOffsets;
   this->SetCompressor(0);
   if(this->AsciiDataBuffer) { this->FreeAsciiBuffer(); }
 }
diff --git a/IO/XMLParser/vtkXMLUtilities.h b/IO/XMLParser/vtkXMLUtilities.h
index 2a09f98..030f8e2 100644
--- a/IO/XMLParser/vtkXMLUtilities.h
+++ b/IO/XMLParser/vtkXMLUtilities.h
@@ -127,8 +127,8 @@ public:
   static void UnFactorElements(vtkXMLDataElement *tree);
 
 protected:
-  vtkXMLUtilities() {};
-  ~vtkXMLUtilities() {};
+  vtkXMLUtilities() {}
+  ~vtkXMLUtilities() {}
 
   static int FactorElementsInternal(vtkXMLDataElement *tree,
                                     vtkXMLDataElement *root,
diff --git a/IO/Xdmf2/module.cmake b/IO/Xdmf2/module.cmake
index b3520c1..3c44818 100644
--- a/IO/Xdmf2/module.cmake
+++ b/IO/Xdmf2/module.cmake
@@ -8,6 +8,8 @@ vtk_module(vtkIOXdmf2
     vtkFiltersExtraction
     vtkIOLegacy
     vtkIOXML
+  PRIVATE_DEPENDS
+    vtksys
     vtkxdmf2
   TEST_DEPENDS
     vtkFiltersGeneral
diff --git a/IO/Xdmf2/vtkXdmfHeavyData.cxx b/IO/Xdmf2/vtkXdmfHeavyData.cxx
index b802f75..b346e2c 100644
--- a/IO/Xdmf2/vtkXdmfHeavyData.cxx
+++ b/IO/Xdmf2/vtkXdmfHeavyData.cxx
@@ -41,7 +41,7 @@
 #include "vtkXdmfReaderInternal.h"
 
 #include <deque>
-#include <assert.h>
+#include <cassert>
 
 static void vtkScaleExtents(int in_exts[6], int out_exts[6], int stride[3])
 {
diff --git a/IO/Xdmf2/vtkXdmfReaderInternal.h b/IO/Xdmf2/vtkXdmfReaderInternal.h
index 69760b7..eb0c584 100644
--- a/IO/Xdmf2/vtkXdmfReaderInternal.h
+++ b/IO/Xdmf2/vtkXdmfReaderInternal.h
@@ -48,7 +48,7 @@
 #include <set>
 #include <map>
 #include <vtksys/SystemTools.hxx>
-#include <assert.h>
+#include <cassert>
 #include <functional>
 #include <algorithm>
 #include <vtksys/ios/sstream>
diff --git a/Imaging/Color/vtkImageHSIToRGB.cxx b/Imaging/Color/vtkImageHSIToRGB.cxx
index c9b175d..651ff0b 100644
--- a/Imaging/Color/vtkImageHSIToRGB.cxx
+++ b/Imaging/Color/vtkImageHSIToRGB.cxx
@@ -49,7 +49,7 @@ void vtkImageHSIToRGBExecute(vtkImageHSIToRGB *self,
   // find the region to loop over
   int maxC = inData->GetNumberOfScalarComponents()-1;
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   while (!outIt.IsAtEnd())
     {
     T* inSI = inIt.BeginSpan();
diff --git a/Imaging/Color/vtkImageHSIToRGB.h b/Imaging/Color/vtkImageHSIToRGB.h
index 416c40a..8209cd0 100644
--- a/Imaging/Color/vtkImageHSIToRGB.h
+++ b/Imaging/Color/vtkImageHSIToRGB.h
@@ -45,7 +45,7 @@ public:
 
 protected:
   vtkImageHSIToRGB();
-  ~vtkImageHSIToRGB() {};
+  ~vtkImageHSIToRGB() {}
 
   double Maximum;
 
diff --git a/Imaging/Color/vtkImageHSVToRGB.cxx b/Imaging/Color/vtkImageHSVToRGB.cxx
index 54b594d..349b06b 100644
--- a/Imaging/Color/vtkImageHSVToRGB.cxx
+++ b/Imaging/Color/vtkImageHSVToRGB.cxx
@@ -47,7 +47,7 @@ void vtkImageHSVToRGBExecute(vtkImageHSVToRGB *self,
   // find the region to loop over
   int maxC = inData->GetNumberOfScalarComponents()-1;
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   while (!outIt.IsAtEnd())
     {
     T* inSI = inIt.BeginSpan();
diff --git a/Imaging/Color/vtkImageHSVToRGB.h b/Imaging/Color/vtkImageHSVToRGB.h
index c890751..92b6895 100644
--- a/Imaging/Color/vtkImageHSVToRGB.h
+++ b/Imaging/Color/vtkImageHSVToRGB.h
@@ -46,7 +46,7 @@ public:
 
 protected:
   vtkImageHSVToRGB();
-  ~vtkImageHSVToRGB() {};
+  ~vtkImageHSVToRGB() {}
 
   double Maximum;
 
diff --git a/Imaging/Color/vtkImageLuminance.cxx b/Imaging/Color/vtkImageLuminance.cxx
index bca485c..8fcad86 100644
--- a/Imaging/Color/vtkImageLuminance.cxx
+++ b/Imaging/Color/vtkImageLuminance.cxx
@@ -57,7 +57,7 @@ void vtkImageLuminanceExecute(vtkImageLuminance *self, vtkImageData *inData,
   vtkImageProgressIterator<T> outIt(outData, outExt, self, id);
   float luminance;
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   while (!outIt.IsAtEnd())
     {
     T* inSI = inIt.BeginSpan();
diff --git a/Imaging/Color/vtkImageLuminance.h b/Imaging/Color/vtkImageLuminance.h
index 40db2fb..7ffba6f 100644
--- a/Imaging/Color/vtkImageLuminance.h
+++ b/Imaging/Color/vtkImageLuminance.h
@@ -31,7 +31,7 @@ public:
 
 protected:
   vtkImageLuminance();
-  ~vtkImageLuminance() {};
+  ~vtkImageLuminance() {}
 
   virtual int RequestInformation (vtkInformation *, vtkInformationVector**,
                                   vtkInformationVector *);
diff --git a/Imaging/Color/vtkImageMapToRGBA.h b/Imaging/Color/vtkImageMapToRGBA.h
index f804a3e..c124437 100644
--- a/Imaging/Color/vtkImageMapToRGBA.h
+++ b/Imaging/Color/vtkImageMapToRGBA.h
@@ -34,8 +34,8 @@ public:
   vtkTypeMacro(vtkImageMapToRGBA,vtkImageMapToColors);
 
 protected:
-  vtkImageMapToRGBA() {};
-  ~vtkImageMapToRGBA() {};
+  vtkImageMapToRGBA() {}
+  ~vtkImageMapToRGBA() {}
 private:
   vtkImageMapToRGBA(const vtkImageMapToRGBA&);  // Not implemented.
   void operator=(const vtkImageMapToRGBA&);  // Not implemented.
diff --git a/Imaging/Color/vtkImageQuantizeRGBToIndex.cxx b/Imaging/Color/vtkImageQuantizeRGBToIndex.cxx
index b58a969..889faeb 100644
--- a/Imaging/Color/vtkImageQuantizeRGBToIndex.cxx
+++ b/Imaging/Color/vtkImageQuantizeRGBToIndex.cxx
@@ -40,11 +40,11 @@ public:
       this->Bounds[4] = 0; this->Bounds[5] = 256; };
 
   ~vtkColorQuantizeNode()
-    { if ( this->Histogram[0] ) { delete []this->Histogram[0]; }
-      if ( this->Histogram[1] ) { delete []this->Histogram[1]; }
-      if ( this->Histogram[2] ) { delete []this->Histogram[2]; }
-      if ( this->Child1 ) { delete this->Child1; }
-      if ( this->Child2 ) { delete this->Child2; } };
+    { delete []this->Histogram[0];
+      delete []this->Histogram[1];
+      delete []this->Histogram[2];
+      delete this->Child1;
+      delete this->Child2; };
 
   void SetImageExtent( int v[6] )
     { memcpy( this->ImageExtent, v, 6*sizeof(int) ); };
diff --git a/Imaging/Color/vtkImageRGBToHSI.cxx b/Imaging/Color/vtkImageRGBToHSI.cxx
index 1345c02..e9fdb29 100644
--- a/Imaging/Color/vtkImageRGBToHSI.cxx
+++ b/Imaging/Color/vtkImageRGBToHSI.cxx
@@ -49,7 +49,7 @@ void vtkImageRGBToHSIExecute(vtkImageRGBToHSI *self,
   // find the region to loop over
   maxC = inData->GetNumberOfScalarComponents()-1;
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   while (!outIt.IsAtEnd())
     {
     T* inSI = inIt.BeginSpan();
diff --git a/Imaging/Color/vtkImageRGBToHSI.h b/Imaging/Color/vtkImageRGBToHSI.h
index 29e17f5..4187fd1 100644
--- a/Imaging/Color/vtkImageRGBToHSI.h
+++ b/Imaging/Color/vtkImageRGBToHSI.h
@@ -40,7 +40,7 @@ public:
 
 protected:
   vtkImageRGBToHSI();
-  ~vtkImageRGBToHSI() {};
+  ~vtkImageRGBToHSI() {}
 
   double Maximum;
 
diff --git a/Imaging/Color/vtkImageRGBToHSV.cxx b/Imaging/Color/vtkImageRGBToHSV.cxx
index 9133471..6e3c6f8 100644
--- a/Imaging/Color/vtkImageRGBToHSV.cxx
+++ b/Imaging/Color/vtkImageRGBToHSV.cxx
@@ -46,7 +46,7 @@ void vtkImageRGBToHSVExecute(vtkImageRGBToHSV *self,
   // find the region to loop over
   maxC = inData->GetNumberOfScalarComponents()-1;
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   while (!outIt.IsAtEnd())
     {
     T* inSI = inIt.BeginSpan();
diff --git a/Imaging/Color/vtkImageRGBToHSV.h b/Imaging/Color/vtkImageRGBToHSV.h
index 03f132d..569f3bd 100644
--- a/Imaging/Color/vtkImageRGBToHSV.h
+++ b/Imaging/Color/vtkImageRGBToHSV.h
@@ -43,7 +43,7 @@ public:
 
 protected:
   vtkImageRGBToHSV();
-  ~vtkImageRGBToHSV() {};
+  ~vtkImageRGBToHSV() {}
 
   double Maximum;
 
diff --git a/Imaging/Core/Testing/Cxx/CMakeLists.txt b/Imaging/Core/Testing/Cxx/CMakeLists.txt
index 57b86e2..88f325b 100644
--- a/Imaging/Core/Testing/Cxx/CMakeLists.txt
+++ b/Imaging/Core/Testing/Cxx/CMakeLists.txt
@@ -1,57 +1,39 @@
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-  TestImageStencilData.cxx
-  # FastSplatter.cxx # todo (unsatistfied deps)
-  ImageAccumulate.cxx
-  # ImageAutoRange.cxx # todo (unsatistfied deps)
+vtk_add_test_cxx(
+  FastSplatter.cxx
+  ImageAccumulate.cxx,NO_VALID
+  ImageAutoRange.cxx
   ImageBSplineCoefficients.cxx
-  # ImageHistogram.cxx # todo (unsatistfied deps)
-  # ImageHistogramStatistics.cxx # todo (unsatistfied deps)
-  # ImageResize.cxx # todo (unsatistfied deps)
-  # ImageResize3D.cxx # todo (unsatistfied deps)
-  # ImageResizeCropping.cxx # todo (unsatistfied deps)
-  ImageWeightedSum.cxx
-  # ImportExport.cxx # todo (unsatistfied deps)
-  TestUpdateExtentReset.cxx
-
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-list(REMOVE_ITEM TestsToRun TestImageStencilData.cxx)
+  ImageHistogram.cxx
+  ImageHistogramStatistics.cxx,NO_VALID
+  ImageResize.cxx
+  ImageResize3D.cxx
+  ImageResizeCropping.cxx
+  ImageWeightedSum.cxx,NO_VALID
+  ImportExport.cxx,NO_VALID # todo (unsatistfied deps)
+  TestUpdateExtentReset.cxx,NO_VALID
+  )
 
 # The stencil test is special
-if(VTK_DATA_ROOT)
-  add_test(NAME ${vtk-module}Cxx-AddStencilData
-    COMMAND ${vtk-module}CxxTests TestImageStencilData 1
-      -D ${VTK_DATA_ROOT}
-      -T ${VTK_TEST_OUTPUT_DIR}
-      -V Baseline/Hybrid/TestAddStencilData.png)
-  add_test(NAME ${vtk-module}Cxx-SubtractStencilData
-    COMMAND ${vtk-module}CxxTests TestImageStencilData 2
-      -D ${VTK_DATA_ROOT}
-      -T ${VTK_TEST_OUTPUT_DIR}
-      -V Baseline/Hybrid/TestSubtractStencilData.png)
-  add_test(NAME ${vtk-module}Cxx-ClipStencilData
-    COMMAND ${vtk-module}CxxTests TestImageStencilData 3
-      -D ${VTK_DATA_ROOT}
-      -T ${VTK_TEST_OUTPUT_DIR}
-      -V Baseline/Hybrid/TestClipStencilData.png)
-endif()
+set_property(DIRECTORY APPEND PROPERTY VTK_TEST_CXX_SOURCES
+  TestImageStencilData.cxx
+  )
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-AddStencilData
+  COMMAND ${vtk-module}CxxTests TestImageStencilData 1
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V DATA{../Data/Baseline/TestAddStencilData.png,:})
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-SubtractStencilData
+  COMMAND ${vtk-module}CxxTests TestImageStencilData 2
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V DATA{../Data/Baseline/TestSubtractStencilData.png,:})
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-ClipStencilData
+  COMMAND ${vtk-module}CxxTests TestImageStencilData 3
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V DATA{../Data/Baseline/TestClipStencilData.png,:})
 
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Imaging/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests RENDERING_FACTORY)
diff --git a/Imaging/Core/Testing/Cxx/FastSplatter.cxx b/Imaging/Core/Testing/Cxx/FastSplatter.cxx
index 29f615a..9909e88 100644
--- a/Imaging/Core/Testing/Cxx/FastSplatter.cxx
+++ b/Imaging/Core/Testing/Cxx/FastSplatter.cxx
@@ -19,8 +19,6 @@
 #include "vtkRenderWindow.h"
 #include "vtkRenderWindowInteractor.h"
 
-#include "vtkRegressionTestImage.h"
-
 #include "vtkSmartPointer.h"
 #define VTK_CREATE(type, name) \
   vtkSmartPointer<type> name = vtkSmartPointer<type>::New()
@@ -29,7 +27,7 @@
 
 const int SPLAT_IMAGE_SIZE = 100;
 
-int FastSplatter(int argc, char *argv[])
+int FastSplatter(int, char *[])
 {
   // For the purposes of this example we'll build the splat image by
   // hand.
@@ -118,14 +116,10 @@ int FastSplatter(int argc, char *argv[])
   ImageViewer->Render();
   ImageViewer->GetRenderer()->ResetCamera();
 
-  int retVal = vtkRegressionTestImage(ImageViewer->GetRenderWindow());
-  if (retVal == vtkRegressionTester::DO_INTERACTOR)
-    {
-    ImageViewer->Render();
-    iren->Start();
-    retVal = vtkRegressionTester::PASSED;
-    }
+  iren->Initialize();
+  ImageViewer->Render();
+  iren->Start();
 
-  return !retVal;
+  return EXIT_SUCCESS;
 }
 
diff --git a/Imaging/Core/Testing/Cxx/ImageAccumulate.cxx b/Imaging/Core/Testing/Cxx/ImageAccumulate.cxx
index 10645ad..97185a0 100644
--- a/Imaging/Core/Testing/Cxx/ImageAccumulate.cxx
+++ b/Imaging/Core/Testing/Cxx/ImageAccumulate.cxx
@@ -19,7 +19,6 @@
 #include "vtkRenderWindowInteractor.h"
 
 #include "vtkTestUtilities.h"
-#include "vtkRegressionTestImage.h"
 
 #include <math.h>
 
diff --git a/Imaging/Core/Testing/Cxx/ImageAutoRange.cxx b/Imaging/Core/Testing/Cxx/ImageAutoRange.cxx
index 9da5524..6f9214f 100644
--- a/Imaging/Core/Testing/Cxx/ImageAutoRange.cxx
+++ b/Imaging/Core/Testing/Cxx/ImageAutoRange.cxx
@@ -17,6 +17,8 @@
 // The command line arguments are:
 // -I        => run in interactive mode
 
+#include "vtkSmartPointer.h"
+
 #include "vtkRenderWindowInteractor.h"
 #include "vtkInteractorStyleImage.h"
 #include "vtkRenderWindow.h"
@@ -30,19 +32,20 @@
 #include "vtkImageHistogramStatistics.h"
 
 #include "vtkTestUtilities.h"
-#include "vtkRegressionTestImage.h"
 
 int ImageAutoRange(int argc, char *argv[])
 {
-  vtkRenderWindowInteractor *iren = vtkRenderWindowInteractor::New();
-  vtkInteractorStyle *style = vtkInteractorStyleImage::New();
-  vtkRenderWindow *renWin = vtkRenderWindow::New();
+  vtkSmartPointer<vtkRenderWindowInteractor> iren =
+    vtkSmartPointer<vtkRenderWindowInteractor>::New();
+  vtkSmartPointer<vtkInteractorStyle> style =
+    vtkSmartPointer<vtkInteractorStyle>::New();
+  vtkSmartPointer<vtkRenderWindow> renWin =
+    vtkSmartPointer<vtkRenderWindow>::New();
   iren->SetRenderWindow(renWin);
   iren->SetInteractorStyle(style);
-  renWin->Delete();
-  style->Delete();
 
-  vtkPNGReader *reader = vtkPNGReader::New();
+  vtkSmartPointer<vtkPNGReader> reader =
+    vtkSmartPointer<vtkPNGReader>::New();
 
   char* fname = vtkTestUtilities::ExpandDataFileName(
     argc, argv, "Data/fullhead15.png");
@@ -50,7 +53,8 @@ int ImageAutoRange(int argc, char *argv[])
   reader->SetFileName(fname);
   delete[] fname;
 
-  vtkImageHistogramStatistics *statistics = vtkImageHistogramStatistics::New();
+  vtkSmartPointer<vtkImageHistogramStatistics> statistics =
+    vtkSmartPointer<vtkImageHistogramStatistics>::New();
   statistics->SetInputConnection(reader->GetOutputPort());
   statistics->GenerateHistogramImageOff();
   statistics->Update();
@@ -66,15 +70,16 @@ int ImageAutoRange(int argc, char *argv[])
 
   for (int i = 0; i < 2; i++)
     {
-    vtkRenderer *renderer = vtkRenderer::New();
+    vtkSmartPointer<vtkRenderer> renderer =
+      vtkSmartPointer<vtkRenderer>::New();
     vtkCamera *camera = renderer->GetActiveCamera();
     renderer->SetBackground(0.0,0.0,0.0);
     renderer->SetViewport(0.5*(i&1), 0.0,
                           0.5 + 0.5*(i&1), 1.0);
     renWin->AddRenderer(renderer);
-    renderer->Delete();
 
-    vtkImageSliceMapper *imageMapper = vtkImageSliceMapper::New();
+    vtkSmartPointer<vtkImageSliceMapper> imageMapper =
+      vtkSmartPointer<vtkImageSliceMapper>::New();
     imageMapper->SetInputConnection(reader->GetOutputPort());
 
     double *bounds = imageMapper->GetBounds();
@@ -90,9 +95,9 @@ int ImageAutoRange(int argc, char *argv[])
     camera->ParallelProjectionOn();
     camera->SetParallelScale(128);
 
-    vtkImageSlice *image = vtkImageSlice::New();
+    vtkSmartPointer<vtkImageSlice> image =
+      vtkSmartPointer<vtkImageSlice>::New();
     image->SetMapper(imageMapper);
-    imageMapper->Delete();
     renderer->AddViewProp(image);
 
     if ((i & 1) == 0)
@@ -106,21 +111,13 @@ int ImageAutoRange(int argc, char *argv[])
       image->GetProperty()->SetColorLevel(0.5*(autorange[0] + autorange[1]));
       }
 
-    image->Delete();
     }
 
   renWin->SetSize(512,256);
 
+  iren->Initialize();
   renWin->Render();
-  int retVal = vtkRegressionTestImage( renWin );
-  if ( retVal == vtkRegressionTester::DO_INTERACTOR )
-    {
-    iren->Start();
-    }
-  iren->Delete();
-
-  statistics->Delete();
-  reader->Delete();
+  iren->Start();
 
-  return !retVal;
+  return EXIT_SUCCESS;
 }
diff --git a/Imaging/Core/Testing/Cxx/ImageBSplineCoefficients.cxx b/Imaging/Core/Testing/Cxx/ImageBSplineCoefficients.cxx
index 01da350..0501e8a 100644
--- a/Imaging/Core/Testing/Cxx/ImageBSplineCoefficients.cxx
+++ b/Imaging/Core/Testing/Cxx/ImageBSplineCoefficients.cxx
@@ -17,6 +17,8 @@
 // The command line arguments are:
 // -I        => run in interactive mode
 
+#include "vtkSmartPointer.h"
+
 #include "vtkRenderWindowInteractor.h"
 #include "vtkInteractorStyleImage.h"
 #include "vtkRenderWindow.h"
@@ -31,20 +33,21 @@
 #include "vtkImageBSplineCoefficients.h"
 
 #include "vtkTestUtilities.h"
-#include "vtkRegressionTestImage.h"
 
 int ImageBSplineCoefficients(int argc, char *argv[])
 {
-  int retVal = vtkTesting::PASSED;
-  vtkRenderWindowInteractor *iren = vtkRenderWindowInteractor::New();
-  vtkInteractorStyle *style = vtkInteractorStyleImage::New();
-  vtkRenderWindow *renWin = vtkRenderWindow::New();
+  int retVal = EXIT_SUCCESS;
+  vtkSmartPointer<vtkRenderWindowInteractor> iren =
+    vtkSmartPointer<vtkRenderWindowInteractor>::New();
+  vtkSmartPointer<vtkInteractorStyle> style =
+    vtkSmartPointer<vtkInteractorStyle>::New();
+  vtkSmartPointer<vtkRenderWindow> renWin =
+    vtkSmartPointer<vtkRenderWindow>::New();
   iren->SetRenderWindow(renWin);
   iren->SetInteractorStyle(style);
-  renWin->Delete();
-  style->Delete();
 
-  vtkPNGReader *reader = vtkPNGReader::New();
+  vtkSmartPointer<vtkPNGReader> reader =
+    vtkSmartPointer<vtkPNGReader>::New();
 
   char* fname = vtkTestUtilities::ExpandDataFileName(
     argc, argv, "Data/fullhead15.png");
@@ -59,13 +62,13 @@ int ImageBSplineCoefficients(int argc, char *argv[])
   reader->GetOutput()->GetOrigin(origin);
   reader->GetOutput()->GetSpacing(spacing);
 
-  vtkImageBSplineCoefficients *coeffs =
-    vtkImageBSplineCoefficients::New();
+  vtkSmartPointer<vtkImageBSplineCoefficients> coeffs =
+    vtkSmartPointer<vtkImageBSplineCoefficients>::New();
   coeffs->SetInputConnection(reader->GetOutputPort());
   coeffs->Update();
 
-  vtkImageBSplineInterpolator *interp =
-    vtkImageBSplineInterpolator::New();
+  vtkSmartPointer<vtkImageBSplineInterpolator> interp =
+    vtkSmartPointer<vtkImageBSplineInterpolator>::New();
 
   double points[20][3] = {
     { 84.75451, 130.78060, 0.0 },
@@ -143,26 +146,26 @@ int ImageBSplineCoefficients(int argc, char *argv[])
           cerr << "Bad interpolation, error is " << e << " k = " << k
                << " degree = " << j << " mode = " << mode << "\n";
           cerr << v0 << " " << v1 << "\n";
-          retVal = vtkTesting::FAILED;
+          retVal = EXIT_FAILURE;
           }
         }
       }
 
     interp->ReleaseData();
     }
-  interp->Delete();
 
   for (int i = 0; i < 2; i++)
     {
-    vtkRenderer *renderer = vtkRenderer::New();
+    vtkSmartPointer<vtkRenderer> renderer =
+      vtkSmartPointer<vtkRenderer>::New();
     vtkCamera *camera = renderer->GetActiveCamera();
     renderer->SetBackground(0.0,0.0,0.0);
     renderer->SetViewport(0.5*(i&1), 0.0,
                           0.5 + 0.5*(i&1), 1.0);
     renWin->AddRenderer(renderer);
-    renderer->Delete();
 
-    vtkImageSliceMapper *imageMapper = vtkImageSliceMapper::New();
+    vtkSmartPointer<vtkImageSliceMapper> imageMapper =
+      vtkSmartPointer<vtkImageSliceMapper>::New();
     if (i == 0)
       {
       imageMapper->SetInputConnection(reader->GetOutputPort());
@@ -186,33 +189,21 @@ int ImageBSplineCoefficients(int argc, char *argv[])
     camera->ParallelProjectionOn();
     camera->SetParallelScale(128);
 
-    vtkImageSlice *image = vtkImageSlice::New();
+    vtkSmartPointer<vtkImageSlice> image =
+      vtkSmartPointer<vtkImageSlice>::New();
     image->SetMapper(imageMapper);
-    imageMapper->Delete();
     renderer->AddViewProp(image);
 
     image->GetProperty()->SetColorWindow(range[1] - range[0]);
     image->GetProperty()->SetColorLevel(0.5*(range[0] + range[1]));
 
-    image->Delete();
     }
 
   renWin->SetSize(512,256);
 
+  iren->Initialize();
   renWin->Render();
-  int retTmp = vtkRegressionTestImage( renWin );
-  if ( retTmp == vtkRegressionTester::DO_INTERACTOR )
-    {
-    iren->Start();
-    }
-  else if ( retTmp != vtkTesting::PASSED)
-    {
-    retVal = retTmp;
-    }
-  iren->Delete();
-
-  coeffs->Delete();
-  reader->Delete();
+  iren->Start();
 
-  return !retVal;
+  return retVal;
 }
diff --git a/Imaging/Core/Testing/Cxx/ImageHistogram.cxx b/Imaging/Core/Testing/Cxx/ImageHistogram.cxx
index be9d9bd..fee2988 100644
--- a/Imaging/Core/Testing/Cxx/ImageHistogram.cxx
+++ b/Imaging/Core/Testing/Cxx/ImageHistogram.cxx
@@ -17,6 +17,7 @@
 // The command line arguments are:
 // -I        => run in interactive mode
 
+#include "vtkNew.h"
 #include "vtkRenderWindowInteractor.h"
 #include "vtkInteractorStyleImage.h"
 #include "vtkRenderWindow.h"
@@ -30,19 +31,16 @@
 #include "vtkImageHistogram.h"
 
 #include "vtkTestUtilities.h"
-#include "vtkRegressionTestImage.h"
 
 int ImageHistogram(int argc, char *argv[])
 {
-  vtkRenderWindowInteractor *iren = vtkRenderWindowInteractor::New();
-  vtkInteractorStyle *style = vtkInteractorStyleImage::New();
-  vtkRenderWindow *renWin = vtkRenderWindow::New();
-  iren->SetRenderWindow(renWin);
-  iren->SetInteractorStyle(style);
-  renWin->Delete();
-  style->Delete();
+  vtkNew<vtkRenderWindowInteractor> iren;
+  vtkNew<vtkInteractorStyle> style;
+  vtkNew<vtkRenderWindow> renWin;
+  iren->SetRenderWindow(renWin.GetPointer());
+  iren->SetInteractorStyle(style.GetPointer());
 
-  vtkPNGReader *reader = vtkPNGReader::New();
+  vtkNew<vtkPNGReader> reader;
 
   char* fname = vtkTestUtilities::ExpandDataFileName(
     argc, argv, "Data/fullhead15.png");
@@ -50,7 +48,7 @@ int ImageHistogram(int argc, char *argv[])
   reader->SetFileName(fname);
   delete[] fname;
 
-  vtkImageHistogram *histogram = vtkImageHistogram::New();
+  vtkNew<vtkImageHistogram> histogram;
   histogram->SetInputConnection(reader->GetOutputPort());
   histogram->GenerateHistogramImageOn();
   histogram->SetHistogramImageSize(256,256);
@@ -65,15 +63,14 @@ int ImageHistogram(int argc, char *argv[])
 
   for (int i = 0; i < 2; i++)
     {
-    vtkRenderer *renderer = vtkRenderer::New();
+    vtkNew<vtkRenderer> renderer;
     vtkCamera *camera = renderer->GetActiveCamera();
     renderer->SetBackground(0.0,0.0,0.0);
     renderer->SetViewport(0.5*(i&1), 0.0,
                           0.5 + 0.5*(i&1), 1.0);
-    renWin->AddRenderer(renderer);
-    renderer->Delete();
+    renWin->AddRenderer(renderer.GetPointer());
 
-    vtkImageSliceMapper *imageMapper = vtkImageSliceMapper::New();
+    vtkNew<vtkImageSliceMapper> imageMapper;
     if ((i & 1) == 0)
       {
       imageMapper->SetInputConnection(reader->GetOutputPort());
@@ -97,10 +94,10 @@ int ImageHistogram(int argc, char *argv[])
     camera->ParallelProjectionOn();
     camera->SetParallelScale(128);
 
-    vtkImageSlice *image = vtkImageSlice::New();
-    image->SetMapper(imageMapper);
-    imageMapper->Delete();
-    renderer->AddViewProp(image);
+    vtkNew<vtkImageSlice> image;
+    image->SetMapper(imageMapper.GetPointer());
+
+    renderer->AddViewProp(image.GetPointer());
 
     if ((i & 1) == 0)
       {
@@ -113,22 +110,13 @@ int ImageHistogram(int argc, char *argv[])
       image->GetProperty()->SetColorWindow(255.0);
       image->GetProperty()->SetColorLevel(127.5);
       }
-
-    image->Delete();
     }
 
   renWin->SetSize(512,256);
 
+  iren->Initialize();
   renWin->Render();
-  int retVal = vtkRegressionTestImage( renWin );
-  if ( retVal == vtkRegressionTester::DO_INTERACTOR )
-    {
-    iren->Start();
-    }
-  iren->Delete();
-
-  histogram->Delete();
-  reader->Delete();
+  iren->Start();
 
-  return !retVal;
+  return EXIT_SUCCESS;
 }
diff --git a/Imaging/Core/Testing/Cxx/ImageResize.cxx b/Imaging/Core/Testing/Cxx/ImageResize.cxx
index a3a3de1..a2350e0 100644
--- a/Imaging/Core/Testing/Cxx/ImageResize.cxx
+++ b/Imaging/Core/Testing/Cxx/ImageResize.cxx
@@ -17,6 +17,8 @@
 // The command line arguments are:
 // -I        => run in interactive mode
 
+#include "vtkSmartPointer.h"
+
 #include "vtkRenderWindowInteractor.h"
 #include "vtkInteractorStyleImage.h"
 #include "vtkRenderWindow.h"
@@ -30,19 +32,20 @@
 #include "vtkImageResize.h"
 
 #include "vtkTestUtilities.h"
-#include "vtkRegressionTestImage.h"
 
 int ImageResize(int argc, char *argv[])
 {
-  vtkRenderWindowInteractor *iren = vtkRenderWindowInteractor::New();
-  vtkInteractorStyle *style = vtkInteractorStyleImage::New();
-  vtkRenderWindow *renWin = vtkRenderWindow::New();
+  vtkSmartPointer<vtkRenderWindowInteractor> iren =
+    vtkSmartPointer<vtkRenderWindowInteractor>::New();
+  vtkSmartPointer<vtkInteractorStyle> style =
+    vtkSmartPointer<vtkInteractorStyle>::New();
+  vtkSmartPointer<vtkRenderWindow> renWin =
+    vtkSmartPointer<vtkRenderWindow>::New();
   iren->SetRenderWindow(renWin);
   iren->SetInteractorStyle(style);
-  renWin->Delete();
-  style->Delete();
 
-  vtkPNGReader *reader = vtkPNGReader::New();
+  vtkSmartPointer<vtkPNGReader> reader =
+    vtkSmartPointer<vtkPNGReader>::New();
 
   char* fname = vtkTestUtilities::ExpandDataFileName(
     argc, argv, "Data/fullhead15.png");
@@ -54,11 +57,13 @@ int ImageResize(int argc, char *argv[])
 
   for (int i = 0; i < 4; i++)
     {
-    vtkImageResize *resize = vtkImageResize::New();
+    vtkSmartPointer<vtkImageResize> resize =
+      vtkSmartPointer<vtkImageResize>::New();
     resize->SetInputConnection(reader->GetOutputPort());
     resize->SetOutputDimensions(64, 64, 1);
 
-    vtkImageSliceMapper *imageMapper = vtkImageSliceMapper::New();
+    vtkSmartPointer<vtkImageSliceMapper> imageMapper =
+      vtkSmartPointer<vtkImageSliceMapper>::New();
     imageMapper->SetInputConnection(resize->GetOutputPort());
     imageMapper->BorderOn();
 
@@ -80,14 +85,16 @@ int ImageResize(int argc, char *argv[])
       resize->InterpolateOn();
       }
 
-    vtkImageSlice *image = vtkImageSlice::New();
+    vtkSmartPointer<vtkImageSlice> image =
+      vtkSmartPointer<vtkImageSlice>::New();
     image->SetMapper(imageMapper);
 
     image->GetProperty()->SetColorWindow(range[1] - range[0]);
     image->GetProperty()->SetColorLevel(0.5*(range[0] + range[1]));
     image->GetProperty()->SetInterpolationTypeToNearest();
 
-    vtkRenderer *renderer = vtkRenderer::New();
+    vtkSmartPointer<vtkRenderer> renderer =
+      vtkSmartPointer<vtkRenderer>::New();
     renderer->AddViewProp(image);
     renderer->SetBackground(0.0,0.0,0.0);
     renderer->SetViewport(0.5*(i&1), 0.25*(i&2),
@@ -109,23 +116,14 @@ int ImageResize(int argc, char *argv[])
     camera->ParallelProjectionOn();
     camera->SetParallelScale(128);
 
-    imageMapper->Delete();
-    renderer->Delete();
-    image->Delete();
-    resize->Delete();
     }
 
   renWin->SetSize(512,512);
 
+  iren->Initialize();
   renWin->Render();
-  int retVal = vtkRegressionTestImage( renWin );
-  if ( retVal == vtkRegressionTester::DO_INTERACTOR )
-    {
-    iren->Start();
-    }
-  iren->Delete();
 
-  reader->Delete();
+  iren->Start();
 
-  return !retVal;
+  return EXIT_SUCCESS;
 }
diff --git a/Imaging/Core/Testing/Cxx/ImageResize3D.cxx b/Imaging/Core/Testing/Cxx/ImageResize3D.cxx
index 33cc22c..6d62c50 100644
--- a/Imaging/Core/Testing/Cxx/ImageResize3D.cxx
+++ b/Imaging/Core/Testing/Cxx/ImageResize3D.cxx
@@ -17,6 +17,8 @@
 // The command line arguments are:
 // -I        => run in interactive mode
 
+#include "vtkSmartPointer.h"
+
 #include "vtkRenderWindowInteractor.h"
 #include "vtkInteractorStyleImage.h"
 #include "vtkRenderWindow.h"
@@ -30,23 +32,24 @@
 #include "vtkImageResize.h"
 
 #include "vtkTestUtilities.h"
-#include "vtkRegressionTestImage.h"
 
 int ImageResize3D(int argc, char *argv[])
 {
-  vtkRenderWindowInteractor *iren = vtkRenderWindowInteractor::New();
-  vtkInteractorStyleImage *style = vtkInteractorStyleImage::New();
+  vtkSmartPointer<vtkRenderWindowInteractor> iren =
+    vtkSmartPointer<vtkRenderWindowInteractor>::New();
+  vtkSmartPointer<vtkInteractorStyleImage> style =
+    vtkSmartPointer<vtkInteractorStyleImage>::New();
   style->SetInteractionModeToImageSlicing();
-  vtkRenderWindow *renWin = vtkRenderWindow::New();
+  vtkSmartPointer<vtkRenderWindow> renWin =
+    vtkSmartPointer<vtkRenderWindow>::New();
   iren->SetRenderWindow(renWin);
   iren->SetInteractorStyle(style);
-  renWin->Delete();
-  style->Delete();
 
   char* fname =
     vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/headsq/quarter");
 
-  vtkImageReader2 *reader = vtkImageReader2::New();
+  vtkSmartPointer<vtkImageReader2> reader =
+    vtkSmartPointer<vtkImageReader2>::New();
   reader->SetDataByteOrderToLittleEndian();
   reader->SetDataExtent(0,63,0,63,1,93);
   reader->SetDataSpacing(3.2, 3.2, 1.5);
@@ -54,14 +57,16 @@ int ImageResize3D(int argc, char *argv[])
 
   delete [] fname;
 
-  vtkImageResize *resize = vtkImageResize::New();
+  vtkSmartPointer<vtkImageResize> resize =
+    vtkSmartPointer<vtkImageResize>::New();
   resize->SetInputConnection(reader->GetOutputPort());
   resize->SetResizeMethodToOutputSpacing();
   resize->SetOutputSpacing(0.80, 0.80, 1.5);
   resize->InterpolateOn();
   resize->Update();
 
-  vtkImageResize *resize2 = vtkImageResize::New();
+  vtkSmartPointer<vtkImageResize> resize2 =
+    vtkSmartPointer<vtkImageResize>::New();
   resize2->SetInputConnection(reader->GetOutputPort());
   resize2->SetResizeMethodToMagnificationFactors();
   resize2->SetMagnificationFactors(4, 4, 1);
@@ -71,7 +76,8 @@ int ImageResize3D(int argc, char *argv[])
 
   for (int i = 0; i < 4; i++)
     {
-    vtkImageSliceMapper *imageMapper = vtkImageSliceMapper::New();
+    vtkSmartPointer<vtkImageSliceMapper> imageMapper =
+      vtkSmartPointer<vtkImageSliceMapper>::New();
     if (i < 3)
       {
       imageMapper->SetInputConnection(resize->GetOutputPort());
@@ -83,14 +89,16 @@ int ImageResize3D(int argc, char *argv[])
     imageMapper->SetOrientation(i % 3);
     imageMapper->SliceAtFocalPointOn();
 
-    vtkImageSlice *image = vtkImageSlice::New();
+    vtkSmartPointer<vtkImageSlice> image =
+      vtkSmartPointer<vtkImageSlice>::New();
     image->SetMapper(imageMapper);
 
     image->GetProperty()->SetColorWindow(range[1] - range[0]);
     image->GetProperty()->SetColorLevel(0.5*(range[0] + range[1]));
     image->GetProperty()->SetInterpolationTypeToNearest();
 
-    vtkRenderer *renderer = vtkRenderer::New();
+    vtkSmartPointer<vtkRenderer> renderer =
+      vtkSmartPointer<vtkRenderer>::New();
     renderer->AddViewProp(image);
     renderer->SetBackground(0.0,0.0,0.0);
     renderer->SetViewport(0.5*(i&1), 0.25*(i&2),
@@ -119,24 +127,13 @@ int ImageResize3D(int argc, char *argv[])
     camera->ParallelProjectionOn();
     camera->SetParallelScale(0.8*128);
 
-    imageMapper->Delete();
-    renderer->Delete();
-    image->Delete();
     }
 
   renWin->SetSize(512,512);
 
+  iren->Initialize();
   renWin->Render();
-  int retVal = vtkRegressionTestImage( renWin );
-  if ( retVal == vtkRegressionTester::DO_INTERACTOR )
-    {
-    iren->Start();
-    }
-  iren->Delete();
-
-  resize->Delete();
-  resize2->Delete();
-  reader->Delete();
+  iren->Start();
 
-  return !retVal;
+  return EXIT_SUCCESS;
 }
diff --git a/Imaging/Core/Testing/Cxx/ImageResizeCropping.cxx b/Imaging/Core/Testing/Cxx/ImageResizeCropping.cxx
index 906e31e..6fd80e8 100644
--- a/Imaging/Core/Testing/Cxx/ImageResizeCropping.cxx
+++ b/Imaging/Core/Testing/Cxx/ImageResizeCropping.cxx
@@ -17,6 +17,8 @@
 // The command line arguments are:
 // -I        => run in interactive mode
 
+#include "vtkSmartPointer.h"
+
 #include "vtkRenderWindowInteractor.h"
 #include "vtkInteractorStyleImage.h"
 #include "vtkRenderWindow.h"
@@ -34,20 +36,21 @@
 #include "vtkImageResize.h"
 
 #include "vtkTestUtilities.h"
-#include "vtkRegressionTestImage.h"
 
 int ImageResizeCropping(int argc, char *argv[])
 {
-  vtkRenderWindowInteractor *iren = vtkRenderWindowInteractor::New();
-  vtkInteractorStyle *style = vtkInteractorStyleImage::New();
-  vtkRenderWindow *renWin = vtkRenderWindow::New();
+  vtkSmartPointer<vtkRenderWindowInteractor> iren =
+    vtkSmartPointer<vtkRenderWindowInteractor>::New();
+  vtkSmartPointer<vtkInteractorStyle> style =
+    vtkSmartPointer<vtkInteractorStyle>::New();
+  vtkSmartPointer<vtkRenderWindow> renWin =
+    vtkSmartPointer<vtkRenderWindow>::New();
   renWin->SetMultiSamples(0);
   iren->SetRenderWindow(renWin);
   iren->SetInteractorStyle(style);
-  renWin->Delete();
-  style->Delete();
 
-  vtkTIFFReader *reader = vtkTIFFReader::New();
+  vtkSmartPointer<vtkTIFFReader> reader =
+    vtkSmartPointer<vtkTIFFReader>::New();
 
   char* fname = vtkTestUtilities::ExpandDataFileName(
     argc, argv, "Data/beach.tif");
@@ -64,19 +67,23 @@ int ImageResizeCropping(int argc, char *argv[])
     { 9.5, 149.5, 199.5, 49.5, 0, 0 },
   };
 
-  vtkOutlineSource *outline = vtkOutlineSource::New();
+  vtkSmartPointer<vtkOutlineSource> outline =
+    vtkSmartPointer<vtkOutlineSource>::New();
   outline->SetBounds(10, 149, 50, 199, -1, 1);
 
-  vtkDataSetMapper *mapper = vtkDataSetMapper::New();
+  vtkSmartPointer<vtkDataSetMapper> mapper =
+    vtkSmartPointer<vtkDataSetMapper>::New();
   mapper->SetInputConnection(outline->GetOutputPort());
 
-  vtkActor *actor = vtkActor::New();
+  vtkSmartPointer<vtkActor> actor =
+    vtkSmartPointer<vtkActor>::New();
   actor->SetMapper(mapper);
   actor->GetProperty()->SetColor(1.0, 0.0, 0.0);
 
   for (int i = 0; i < 4; i++)
     {
-    vtkImageResize *resize = vtkImageResize::New();
+    vtkSmartPointer<vtkImageResize> resize =
+      vtkSmartPointer<vtkImageResize>::New();
     resize->SetNumberOfThreads(1);
     resize->SetInputConnection(reader->GetOutputPort());
     resize->SetOutputDimensions(256, 256, 1);
@@ -86,7 +93,8 @@ int ImageResizeCropping(int argc, char *argv[])
       resize->SetCroppingRegion(cropping[i]);
       }
 
-    vtkImageSliceMapper *imageMapper = vtkImageSliceMapper::New();
+    vtkSmartPointer<vtkImageSliceMapper> imageMapper =
+      vtkSmartPointer<vtkImageSliceMapper>::New();
     imageMapper->SetInputConnection(resize->GetOutputPort());
 
     if ((i & 2) == 2)
@@ -95,13 +103,15 @@ int ImageResizeCropping(int argc, char *argv[])
       imageMapper->BorderOn();
       }
 
-    vtkImageSlice *image = vtkImageSlice::New();
+    vtkSmartPointer<vtkImageSlice> image =
+      vtkSmartPointer<vtkImageSlice>::New();
     image->SetMapper(imageMapper);
 
     image->GetProperty()->SetColorWindow(range[1] - range[0]);
     image->GetProperty()->SetColorLevel(0.5*(range[0] + range[1]));
 
-    vtkRenderer *renderer = vtkRenderer::New();
+    vtkSmartPointer<vtkRenderer> renderer =
+      vtkSmartPointer<vtkRenderer>::New();
     renderer->AddViewProp(image);
     if (i == 0)
       {
@@ -122,26 +132,13 @@ int ImageResizeCropping(int argc, char *argv[])
     camera->ParallelProjectionOn();
     camera->SetParallelScale(100);
 
-    imageMapper->Delete();
-    renderer->Delete();
-    image->Delete();
-    resize->Delete();
     }
 
   renWin->SetSize(512,512);
 
+  iren->Initialize();
   renWin->Render();
-  int retVal = vtkRegressionTestImage( renWin );
-  if ( retVal == vtkRegressionTester::DO_INTERACTOR )
-    {
-    iren->Start();
-    }
-  iren->Delete();
-
-  reader->Delete();
-  outline->Delete();
-  mapper->Delete();
-  actor->Delete();
+  iren->Start();
 
-  return !retVal;
+  return EXIT_SUCCESS;
 }
diff --git a/Imaging/Core/Testing/Cxx/ImageWeightedSum.cxx b/Imaging/Core/Testing/Cxx/ImageWeightedSum.cxx
index 4ebd6f8..e8f8041 100644
--- a/Imaging/Core/Testing/Cxx/ImageWeightedSum.cxx
+++ b/Imaging/Core/Testing/Cxx/ImageWeightedSum.cxx
@@ -12,6 +12,8 @@
      PURPOSE.  See the above copyright notice for more information.
 
 =========================================================================*/
+#include "vtkSmartPointer.h"
+
 #include "vtkImageReader.h"
 #include "vtkImageWeightedSum.h"
 #include "vtkDoubleArray.h"
@@ -21,7 +23,6 @@
 #include "vtkStructuredPointsWriter.h"
 
 #include "vtkTestUtilities.h"
-#include "vtkRegressionTestImage.h"
 
 int ImageWeightedSum(int argc, char *argv[])
 {
@@ -29,7 +30,8 @@ int ImageWeightedSum(int argc, char *argv[])
   char* fname =
     vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/headsq/quarter");
 
-  vtkImageReader *reader = vtkImageReader::New();
+  vtkSmartPointer<vtkImageReader> reader =
+    vtkSmartPointer<vtkImageReader>::New();
   reader->SetDataByteOrderToLittleEndian();
   reader->SetDataExtent(0,63,0,63,1,93);
   reader->SetDataSpacing(3.2, 3.2, 1.5);
@@ -39,7 +41,8 @@ int ImageWeightedSum(int argc, char *argv[])
   delete [] fname;
 
   // Test when weight is equal to 0
-  vtkImageWeightedSum *sum = vtkImageWeightedSum::New();
+  vtkSmartPointer<vtkImageWeightedSum> sum =
+    vtkSmartPointer<vtkImageWeightedSum>::New();
   sum->SetWeight(0,0.);
   sum->AddInputConnection( reader->GetOutputPort() );
   sum->Update();
@@ -52,7 +55,8 @@ int ImageWeightedSum(int argc, char *argv[])
     }
 
   // Set dummy values
-  vtkDoubleArray *weights = vtkDoubleArray::New();
+  vtkSmartPointer<vtkDoubleArray> weights =
+    vtkSmartPointer<vtkDoubleArray>::New();
   weights->SetNumberOfTuples(5);
   weights->SetValue(0, 10.0);
   weights->SetValue(1, 20.0);
@@ -70,7 +74,8 @@ int ImageWeightedSum(int argc, char *argv[])
   sum->AddInputConnection( reader->GetOutputPort() );
 
   // Subtract the original image
-  vtkImageMathematics *math = vtkImageMathematics::New();
+  vtkSmartPointer<vtkImageMathematics> math =
+    vtkSmartPointer<vtkImageMathematics>::New();
   math->SetOperationToSubtract();
   math->SetInputConnection( 0, reader->GetOutputPort() );
   math->SetInputConnection( 1, sum->GetOutputPort() );
@@ -85,7 +90,8 @@ int ImageWeightedSum(int argc, char *argv[])
 
   // Get scalar range:
   reader->GetOutput()->GetScalarRange( range );
-  vtkImageShiftScale *shift = vtkImageShiftScale::New();
+  vtkSmartPointer<vtkImageShiftScale> shift =
+    vtkSmartPointer<vtkImageShiftScale>::New();
   shift->SetInputConnection( reader->GetOutputPort() );
   shift->SetScale( 1./(range[1]-range[0]));
   shift->SetShift( -range[0] );
@@ -101,21 +107,6 @@ int ImageWeightedSum(int argc, char *argv[])
 
   math->SetInputConnection( 0, shift->GetOutputPort() );
   math->SetInputConnection( 1, sum->GetOutputPort() );
-  //math->Update();
-
-  //math->GetOutput()->GetScalarRange( range );
-  //if( range[0] != 0 || range[1] != 0 )
-  //  {
-  //  cerr << "Range2: " << range[0] << "," << range[1] << endl;
-  //  rval++;
-  //  }
-
-  // Cleanup
-  reader->Delete();
-  weights->Delete();
-  sum->Delete();
-  math->Delete();
-  shift->Delete();
 
   return rval;
 }
diff --git a/Imaging/Core/Testing/Cxx/ImportExport.cxx b/Imaging/Core/Testing/Cxx/ImportExport.cxx
index 136b748..cc88437 100644
--- a/Imaging/Core/Testing/Cxx/ImportExport.cxx
+++ b/Imaging/Core/Testing/Cxx/ImportExport.cxx
@@ -12,6 +12,8 @@
      PURPOSE.  See the above copyright notice for more information.
 
 =========================================================================*/
+#include "vtkSmartPointer.h"
+
 #include "vtkImageViewer.h"
 #include "vtkImageReader.h"
 #include "vtkImageImport.h"
@@ -28,7 +30,8 @@ int ImportExport( int argc, char *argv[] )
 
   char* fname = vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/headsq/quarter");
 
-  vtkImageReader *reader = vtkImageReader::New();
+  vtkSmartPointer<vtkImageReader> reader =
+    vtkSmartPointer<vtkImageReader>::New();
   reader->SetDataByteOrderToLittleEndian();
   reader->SetDataExtent(0,63,0,63,1,93);
   reader->SetFilePrefix(fname);
@@ -36,7 +39,8 @@ int ImportExport( int argc, char *argv[] )
   delete [] fname;
 
   // create exporter
-  vtkImageExport *exporter = vtkImageExport::New();
+  vtkSmartPointer<vtkImageExport> exporter =
+    vtkSmartPointer<vtkImageExport>::New();
   exporter->SetInputConnection(reader->GetOutputPort());
   exporter->ImageLowerLeftOn();
 
@@ -73,14 +77,16 @@ int ImportExport( int argc, char *argv[] )
     }
 
   // create an importer to read the data back in
-  vtkImageImport *importer = vtkImageImport::New();
+  vtkSmartPointer<vtkImageImport> importer =
+    vtkSmartPointer<vtkImageImport>::New();
   importer->SetWholeExtent(1,dimensions[0],1,dimensions[1],1,dimensions[2]);
   importer->SetDataExtentToWholeExtent();
   importer->SetDataScalarTypeToShort();
   importer->SetImportVoidPointer(data);
   importer->SetScalarArrayName("importedScalars");
 
-  vtkImageViewer *viewer = vtkImageViewer::New();
+  vtkSmartPointer<vtkImageViewer> viewer =
+    vtkSmartPointer<vtkImageViewer>::New();
   viewer->SetInputConnection(importer->GetOutputPort());
   viewer->SetZSlice(45);
   viewer->SetColorWindow(2000);
@@ -90,16 +96,7 @@ int ImportExport( int argc, char *argv[] )
 
   int retVal = vtkRegressionTestImage( viewer->GetRenderWindow() );
 
-  viewer->Delete();
-  importer->Delete();
-  exporter->Delete();
-  reader->Delete();
-
   delete [] data;
 
   return !retVal;
 }
-
-
-
-
diff --git a/Imaging/Core/Testing/Data/Baseline/ContinuousClose2D.png.md5 b/Imaging/Core/Testing/Data/Baseline/ContinuousClose2D.png.md5
new file mode 100644
index 0000000..2b537b1
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ContinuousClose2D.png.md5
@@ -0,0 +1 @@
+bfa50b46be4196147ddaabd0efa030c2
diff --git a/Imaging/Core/Testing/Data/Baseline/FastSplatter.png.md5 b/Imaging/Core/Testing/Data/Baseline/FastSplatter.png.md5
new file mode 100644
index 0000000..a037c6c
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/FastSplatter.png.md5
@@ -0,0 +1 @@
+1bdc1cd2e3593b881682499c9d8ad9f8
diff --git a/Imaging/Core/Testing/Data/Baseline/IdealHighPass.png.md5 b/Imaging/Core/Testing/Data/Baseline/IdealHighPass.png.md5
new file mode 100644
index 0000000..5f2e553
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/IdealHighPass.png.md5
@@ -0,0 +1 @@
+fdf525dba36096aea489a9d17c226f92
diff --git a/Imaging/Core/Testing/Data/Baseline/ImageAutoRange.png.md5 b/Imaging/Core/Testing/Data/Baseline/ImageAutoRange.png.md5
new file mode 100644
index 0000000..9ce3dd0
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ImageAutoRange.png.md5
@@ -0,0 +1 @@
+41a8e63d0b33522f86fb205a2920c205
diff --git a/Imaging/Core/Testing/Data/Baseline/ImageBSplineCoefficients.png.md5 b/Imaging/Core/Testing/Data/Baseline/ImageBSplineCoefficients.png.md5
new file mode 100644
index 0000000..60e0086
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ImageBSplineCoefficients.png.md5
@@ -0,0 +1 @@
+86d9371466148f47b1fcade564e6d47b
diff --git a/Imaging/Core/Testing/Data/Baseline/ImageHistogram.png.md5 b/Imaging/Core/Testing/Data/Baseline/ImageHistogram.png.md5
new file mode 100644
index 0000000..be2c163
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ImageHistogram.png.md5
@@ -0,0 +1 @@
+cc4340e1d809b2f03a425df320e38e3c
diff --git a/Imaging/Core/Testing/Data/Baseline/ImageResize.png.md5 b/Imaging/Core/Testing/Data/Baseline/ImageResize.png.md5
new file mode 100644
index 0000000..477bb61
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ImageResize.png.md5
@@ -0,0 +1 @@
+f4acc3950502d99dab7d85438540a8a1
diff --git a/Imaging/Core/Testing/Data/Baseline/ImageResize3D.png.md5 b/Imaging/Core/Testing/Data/Baseline/ImageResize3D.png.md5
new file mode 100644
index 0000000..e1d3d77
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ImageResize3D.png.md5
@@ -0,0 +1 @@
+a0dd877394f3220c0c70b5ad063bc885
diff --git a/Imaging/Core/Testing/Data/Baseline/ImageResizeCropping.png.md5 b/Imaging/Core/Testing/Data/Baseline/ImageResizeCropping.png.md5
new file mode 100644
index 0000000..5a94b37
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ImageResizeCropping.png.md5
@@ -0,0 +1 @@
+38834abf4bacd1ff455141cdf95e4bf9
diff --git a/Imaging/Core/Testing/Data/Baseline/LaplacianEdgeEnhance.png.md5 b/Imaging/Core/Testing/Data/Baseline/LaplacianEdgeEnhance.png.md5
new file mode 100644
index 0000000..4537baf
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/LaplacianEdgeEnhance.png.md5
@@ -0,0 +1 @@
+f0e6a53fc184f4255b13a81b849df376
diff --git a/Imaging/Core/Testing/Data/Baseline/MultipleUpdateExtents.png.md5 b/Imaging/Core/Testing/Data/Baseline/MultipleUpdateExtents.png.md5
new file mode 100644
index 0000000..7af65ba
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/MultipleUpdateExtents.png.md5
@@ -0,0 +1 @@
+7449bb9c0601d7b37521ba4733b16452
diff --git a/Imaging/Core/Testing/Data/Baseline/ResliceBSpline.png.md5 b/Imaging/Core/Testing/Data/Baseline/ResliceBSpline.png.md5
new file mode 100644
index 0000000..143dedf
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ResliceBSpline.png.md5
@@ -0,0 +1 @@
+35e3f0dfbe971e71624a0db7e30a9837
diff --git a/Imaging/Core/Testing/Data/Baseline/ResliceColorWrap.png.md5 b/Imaging/Core/Testing/Data/Baseline/ResliceColorWrap.png.md5
new file mode 100644
index 0000000..a1e1f93
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ResliceColorWrap.png.md5
@@ -0,0 +1 @@
+3e067a7952198a581bfe22f49a3a2c48
diff --git a/Imaging/Core/Testing/Data/Baseline/ResliceInformationInput.png.md5 b/Imaging/Core/Testing/Data/Baseline/ResliceInformationInput.png.md5
new file mode 100644
index 0000000..ae95f37
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ResliceInformationInput.png.md5
@@ -0,0 +1 @@
+46fabb733d6c83a8898c7170a2ae53c7
diff --git a/Imaging/Core/Testing/Data/Baseline/ResliceInterpolationModes.png.md5 b/Imaging/Core/Testing/Data/Baseline/ResliceInterpolationModes.png.md5
new file mode 100644
index 0000000..33796e2
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ResliceInterpolationModes.png.md5
@@ -0,0 +1 @@
+8dc6946ce80a2dc57796a9cd6c0e0d29
diff --git a/Imaging/Core/Testing/Data/Baseline/ResliceInterpolationOblique.png.md5 b/Imaging/Core/Testing/Data/Baseline/ResliceInterpolationOblique.png.md5
new file mode 100644
index 0000000..8f9122d
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ResliceInterpolationOblique.png.md5
@@ -0,0 +1 @@
+7d1ec38b05201674817b0c35826f6849
diff --git a/Imaging/Core/Testing/Data/Baseline/ResliceLanczosKaiser.png.md5 b/Imaging/Core/Testing/Data/Baseline/ResliceLanczosKaiser.png.md5
new file mode 100644
index 0000000..489f834
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ResliceLanczosKaiser.png.md5
@@ -0,0 +1 @@
+07ea4a8330642208aaf1acec35fbca7d
diff --git a/Imaging/Core/Testing/Data/Baseline/ResliceMirrorOblique.png.md5 b/Imaging/Core/Testing/Data/Baseline/ResliceMirrorOblique.png.md5
new file mode 100644
index 0000000..1a7bf0d
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ResliceMirrorOblique.png.md5
@@ -0,0 +1 @@
+967e434ff10021be61e41dab37c46e2a
diff --git a/Imaging/Core/Testing/Data/Baseline/ResliceMirrorPad.png.md5 b/Imaging/Core/Testing/Data/Baseline/ResliceMirrorPad.png.md5
new file mode 100644
index 0000000..5ded4db
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ResliceMirrorPad.png.md5
@@ -0,0 +1 @@
+d02d3df4e64f3cfb63b9c505a3a9ef23
diff --git a/Imaging/Core/Testing/Data/Baseline/ResliceOptimizationOff.png.md5 b/Imaging/Core/Testing/Data/Baseline/ResliceOptimizationOff.png.md5
new file mode 100644
index 0000000..023eb00
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ResliceOptimizationOff.png.md5
@@ -0,0 +1 @@
+b9d8f34905585602c8126f88d7984219
diff --git a/Imaging/Core/Testing/Data/Baseline/ReslicePermutations.png.md5 b/Imaging/Core/Testing/Data/Baseline/ReslicePermutations.png.md5
new file mode 100644
index 0000000..11f1803
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ReslicePermutations.png.md5
@@ -0,0 +1 @@
+71632cd65a655c6f84dfa9e15887f2a4
diff --git a/Imaging/Core/Testing/Data/Baseline/ReslicePermuteResample.png.md5 b/Imaging/Core/Testing/Data/Baseline/ReslicePermuteResample.png.md5
new file mode 100644
index 0000000..873e4a9
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ReslicePermuteResample.png.md5
@@ -0,0 +1 @@
+64df85c3d4a5b1a29296755358abfb19
diff --git a/Imaging/Core/Testing/Data/Baseline/ReslicePermuteSlab.png.md5 b/Imaging/Core/Testing/Data/Baseline/ReslicePermuteSlab.png.md5
new file mode 100644
index 0000000..97c828c
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ReslicePermuteSlab.png.md5
@@ -0,0 +1 @@
+66a6bb23c7a914967fc8419bb6ba5cdc
diff --git a/Imaging/Core/Testing/Data/Baseline/ResliceSlabModes.png.md5 b/Imaging/Core/Testing/Data/Baseline/ResliceSlabModes.png.md5
new file mode 100644
index 0000000..8ad2ff2
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ResliceSlabModes.png.md5
@@ -0,0 +1 @@
+8e20880f13ae8ee8b3bf6ab6d5b2107f
diff --git a/Imaging/Core/Testing/Data/Baseline/ResliceToColors.png.md5 b/Imaging/Core/Testing/Data/Baseline/ResliceToColors.png.md5
new file mode 100644
index 0000000..76cdc84
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ResliceToColors.png.md5
@@ -0,0 +1 @@
+8cc4a57820c5ae4bf9e94517517fc97c
diff --git a/Imaging/Core/Testing/Data/Baseline/ResliceWrapOblique.png.md5 b/Imaging/Core/Testing/Data/Baseline/ResliceWrapOblique.png.md5
new file mode 100644
index 0000000..41d6e66
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ResliceWrapOblique.png.md5
@@ -0,0 +1 @@
+927197c5ca39aa19416a759de992dd72
diff --git a/Imaging/Core/Testing/Data/Baseline/ResliceWrapPad.png.md5 b/Imaging/Core/Testing/Data/Baseline/ResliceWrapPad.png.md5
new file mode 100644
index 0000000..7df6684
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/ResliceWrapPad.png.md5
@@ -0,0 +1 @@
+a2a341034856edea3e95c0beabcf6cdb
diff --git a/Imaging/Core/Testing/Data/Baseline/Spectrum.png.md5 b/Imaging/Core/Testing/Data/Baseline/Spectrum.png.md5
new file mode 100644
index 0000000..88d035a
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/Spectrum.png.md5
@@ -0,0 +1 @@
+3f3910d68aeb93268bfadbce8ecded1c
diff --git a/Imaging/Core/Testing/Data/Baseline/TestAccumulate.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestAccumulate.png.md5
new file mode 100644
index 0000000..81a05ee
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestAccumulate.png.md5
@@ -0,0 +1 @@
+a242172779fddaf2c97896b326894a9e
diff --git a/Imaging/Core/Testing/Data/Baseline/TestAddStencilData.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestAddStencilData.png.md5
new file mode 100644
index 0000000..6eeedf0
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestAddStencilData.png.md5
@@ -0,0 +1 @@
+31a61ff10d62d655bd1bfba0483f0ec1
diff --git a/Imaging/Core/Testing/Data/Baseline/TestAllBlends.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestAllBlends.png.md5
new file mode 100644
index 0000000..3ff951e
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestAllBlends.png.md5
@@ -0,0 +1 @@
+d4b5b444445c0b0268f12991d7385845
diff --git a/Imaging/Core/Testing/Data/Baseline/TestAllBlendsFloat.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestAllBlendsFloat.png.md5
new file mode 100644
index 0000000..19c9391
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestAllBlendsFloat.png.md5
@@ -0,0 +1 @@
+cd91cab6283e6e56f76925a3bd81a865
diff --git a/Imaging/Core/Testing/Data/Baseline/TestAllFlips.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestAllFlips.png.md5
new file mode 100644
index 0000000..40b4981
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestAllFlips.png.md5
@@ -0,0 +1 @@
+c8278f9ae4098d4d1ac8e95d0346f032
diff --git a/Imaging/Core/Testing/Data/Baseline/TestAllLogic.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestAllLogic.png.md5
new file mode 100644
index 0000000..ef67062
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestAllLogic.png.md5
@@ -0,0 +1 @@
+d0d9c19b8b92f8321165fb9978bb955e
diff --git a/Imaging/Core/Testing/Data/Baseline/TestAllMaskBits.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestAllMaskBits.png.md5
new file mode 100644
index 0000000..7bf7aa0
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestAllMaskBits.png.md5
@@ -0,0 +1 @@
+dc21c713d1478ef7ed740a5ebce1eaf0
diff --git a/Imaging/Core/Testing/Data/Baseline/TestAllMathematics.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestAllMathematics.png.md5
new file mode 100644
index 0000000..25ccbe2
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestAllMathematics.png.md5
@@ -0,0 +1 @@
+0daae224652ce9150c865f3f8582f21d
diff --git a/Imaging/Core/Testing/Data/Baseline/TestAllShrinks.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestAllShrinks.png.md5
new file mode 100644
index 0000000..18e6a4c
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestAllShrinks.png.md5
@@ -0,0 +1 @@
+8e85359444aa0814d1a58acd4c58b6e9
diff --git a/Imaging/Core/Testing/Data/Baseline/TestAnisotropicDiffusion2D.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestAnisotropicDiffusion2D.png.md5
new file mode 100644
index 0000000..03a49b6
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestAnisotropicDiffusion2D.png.md5
@@ -0,0 +1 @@
+7894d132d7bbe444c7171d0c22ca8d88
diff --git a/Imaging/Core/Testing/Data/Baseline/TestAnisotropicDiffusion3D.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestAnisotropicDiffusion3D.png.md5
new file mode 100644
index 0000000..e7ef472
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestAnisotropicDiffusion3D.png.md5
@@ -0,0 +1 @@
+80499b514222140eb0e6f430917921bc
diff --git a/Imaging/Core/Testing/Data/Baseline/TestBlendStencil.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestBlendStencil.png.md5
new file mode 100644
index 0000000..f2e783f
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestBlendStencil.png.md5
@@ -0,0 +1 @@
+42a4531622c701c68b3caf0c308eee82
diff --git a/Imaging/Core/Testing/Data/Baseline/TestButterworthHighPass.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestButterworthHighPass.png.md5
new file mode 100644
index 0000000..65296ed
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestButterworthHighPass.png.md5
@@ -0,0 +1 @@
+8b69290514e3608e5d9890e903587e49
diff --git a/Imaging/Core/Testing/Data/Baseline/TestButterworthLowPass.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestButterworthLowPass.png.md5
new file mode 100644
index 0000000..588ee9c
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestButterworthLowPass.png.md5
@@ -0,0 +1 @@
+73f7b28a9be4f5784f40dc22b18e650d
diff --git a/Imaging/Core/Testing/Data/Baseline/TestCache.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestCache.png.md5
new file mode 100644
index 0000000..6c24aa9
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestCache.png.md5
@@ -0,0 +1 @@
+8ea9994c2872b57916751ac8d351d1d1
diff --git a/Imaging/Core/Testing/Data/Baseline/TestChangeInformation.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestChangeInformation.png.md5
new file mode 100644
index 0000000..c857e7e
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestChangeInformation.png.md5
@@ -0,0 +1 @@
+9ed3caf89169467d02b5cf2260f72193
diff --git a/Imaging/Core/Testing/Data/Baseline/TestCheckerboard.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestCheckerboard.png.md5
new file mode 100644
index 0000000..ba5aaeb
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestCheckerboard.png.md5
@@ -0,0 +1 @@
+e0bd774aadb2287a831c8363cacbe305
diff --git a/Imaging/Core/Testing/Data/Baseline/TestCityBlockDistance.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestCityBlockDistance.png.md5
new file mode 100644
index 0000000..a297e55
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestCityBlockDistance.png.md5
@@ -0,0 +1 @@
+4fbdbf13992c4481dae5756053cdc9a4
diff --git a/Imaging/Core/Testing/Data/Baseline/TestClipStencilData.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestClipStencilData.png.md5
new file mode 100644
index 0000000..5dad799
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestClipStencilData.png.md5
@@ -0,0 +1 @@
+e722c13df0807505c8912b1f974ac5e1
diff --git a/Imaging/Core/Testing/Data/Baseline/TestConvolve.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestConvolve.png.md5
new file mode 100644
index 0000000..ef4cbc3
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestConvolve.png.md5
@@ -0,0 +1 @@
+84ecbf8c3c1fcd65d03635b0d4e277d3
diff --git a/Imaging/Core/Testing/Data/Baseline/TestCorrelation.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestCorrelation.png.md5
new file mode 100644
index 0000000..02bfe1e
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestCorrelation.png.md5
@@ -0,0 +1 @@
+6289e1699687e987a8b766e63870d697
diff --git a/Imaging/Core/Testing/Data/Baseline/TestDivergence.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestDivergence.png.md5
new file mode 100644
index 0000000..14532c5
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestDivergence.png.md5
@@ -0,0 +1 @@
+2c6f8e8271e0b08cb9447d83171684c1
diff --git a/Imaging/Core/Testing/Data/Baseline/TestDotProduct.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestDotProduct.png.md5
new file mode 100644
index 0000000..6c5ebcb
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestDotProduct.png.md5
@@ -0,0 +1 @@
+c67c7f14fa39b74e1be90155c1741c6d
diff --git a/Imaging/Core/Testing/Data/Baseline/TestEuclideanDistance.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestEuclideanDistance.png.md5
new file mode 100644
index 0000000..fb200d4
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestEuclideanDistance.png.md5
@@ -0,0 +1 @@
+eb8307529a525c6420216bf951b1406d
diff --git a/Imaging/Core/Testing/Data/Baseline/TestEuclideanDistanceCached.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestEuclideanDistanceCached.png.md5
new file mode 100644
index 0000000..fb200d4
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestEuclideanDistanceCached.png.md5
@@ -0,0 +1 @@
+eb8307529a525c6420216bf951b1406d
diff --git a/Imaging/Core/Testing/Data/Baseline/TestEuclideanToPolar.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestEuclideanToPolar.png.md5
new file mode 100644
index 0000000..1a1fe2a
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestEuclideanToPolar.png.md5
@@ -0,0 +1 @@
+ba54f00796ea88d5bdc5b0d38f60774c
diff --git a/Imaging/Core/Testing/Data/Baseline/TestExtractVOI.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestExtractVOI.png.md5
new file mode 100644
index 0000000..4f3afab
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestExtractVOI.png.md5
@@ -0,0 +1 @@
+ec32ffd2c2171515d9122b046f6f1fed
diff --git a/Imaging/Core/Testing/Data/Baseline/TestFFTCorrelation.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestFFTCorrelation.png.md5
new file mode 100644
index 0000000..2020747
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestFFTCorrelation.png.md5
@@ -0,0 +1 @@
+8d948153ed4060222933fe4f49ff7dd5
diff --git a/Imaging/Core/Testing/Data/Baseline/TestGradientMagnitude.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestGradientMagnitude.png.md5
new file mode 100644
index 0000000..97fb88a
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestGradientMagnitude.png.md5
@@ -0,0 +1 @@
+199e417433b93c0d12181ecf11864f65
diff --git a/Imaging/Core/Testing/Data/Baseline/TestGradientMagnitude2.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestGradientMagnitude2.png.md5
new file mode 100644
index 0000000..b52a84f
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestGradientMagnitude2.png.md5
@@ -0,0 +1 @@
+f7963ed9642f3b2cbfb3a74923543526
diff --git a/Imaging/Core/Testing/Data/Baseline/TestHSIToRGB.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestHSIToRGB.png.md5
new file mode 100644
index 0000000..04dc122
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestHSIToRGB.png.md5
@@ -0,0 +1 @@
+23d6dbe3975f9d6a52f974745b6e5e6a
diff --git a/Imaging/Core/Testing/Data/Baseline/TestHSVToRGB.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestHSVToRGB.png.md5
new file mode 100644
index 0000000..a9cb32f
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestHSVToRGB.png.md5
@@ -0,0 +1 @@
+6f3896ded10fb26b740aa3f3374593f1
diff --git a/Imaging/Core/Testing/Data/Baseline/TestHybridMedian2D.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestHybridMedian2D.png.md5
new file mode 100644
index 0000000..9807ce3
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestHybridMedian2D.png.md5
@@ -0,0 +1 @@
+fd1b7ad555871fea13e0f2784e5c5ced
diff --git a/Imaging/Core/Testing/Data/Baseline/TestIdealLowPass.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestIdealLowPass.png.md5
new file mode 100644
index 0000000..ad63fb8
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestIdealLowPass.png.md5
@@ -0,0 +1 @@
+7a288087272cc0f4b29fe1033be33675
diff --git a/Imaging/Core/Testing/Data/Baseline/TestImageCanvas.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestImageCanvas.png.md5
new file mode 100644
index 0000000..1bae78e
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestImageCanvas.png.md5
@@ -0,0 +1 @@
+b5643bc053df53ebf734933fd43fc76a
diff --git a/Imaging/Core/Testing/Data/Baseline/TestImageProjection.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestImageProjection.png.md5
new file mode 100644
index 0000000..3b04c59
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestImageProjection.png.md5
@@ -0,0 +1 @@
+bddc97fa2aa5c833bb8395faa9b2033a
diff --git a/Imaging/Core/Testing/Data/Baseline/TestImageThresholdConnectivity.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestImageThresholdConnectivity.png.md5
new file mode 100644
index 0000000..7199ef7
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestImageThresholdConnectivity.png.md5
@@ -0,0 +1 @@
+0599e74f15284437028d4ec8cda5609d
diff --git a/Imaging/Core/Testing/Data/Baseline/TestImageWeightedSum.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestImageWeightedSum.png.md5
new file mode 100644
index 0000000..4997844
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestImageWeightedSum.png.md5
@@ -0,0 +1 @@
+e563697641f29b0a50328cb75aa2f435
diff --git a/Imaging/Core/Testing/Data/Baseline/TestInPlaceFilter.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestInPlaceFilter.png.md5
new file mode 100644
index 0000000..171c833
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestInPlaceFilter.png.md5
@@ -0,0 +1 @@
+937a09d544749c9014ea937129566d29
diff --git a/Imaging/Core/Testing/Data/Baseline/TestIslandRemoval2D.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestIslandRemoval2D.png.md5
new file mode 100644
index 0000000..3933759
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestIslandRemoval2D.png.md5
@@ -0,0 +1 @@
+a89c59f47aa3b84b8cf5dfd1c94c5f64
diff --git a/Imaging/Core/Testing/Data/Baseline/TestLassoStencil.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestLassoStencil.png.md5
new file mode 100644
index 0000000..cc3f51e
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestLassoStencil.png.md5
@@ -0,0 +1 @@
+f717f40693829093eed2f04081cbbba6
diff --git a/Imaging/Core/Testing/Data/Baseline/TestMapToRGBABlockStreaming.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestMapToRGBABlockStreaming.png.md5
new file mode 100644
index 0000000..36206f5
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestMapToRGBABlockStreaming.png.md5
@@ -0,0 +1 @@
+e4547b848f159f14f4da62818b799e63
diff --git a/Imaging/Core/Testing/Data/Baseline/TestMapToWindowLevelColors.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestMapToWindowLevelColors.png.md5
new file mode 100644
index 0000000..fa88e15
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestMapToWindowLevelColors.png.md5
@@ -0,0 +1 @@
+0cf6f70029f1da0ebc2838066ace6137
diff --git a/Imaging/Core/Testing/Data/Baseline/TestMapToWindowLevelColors2.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestMapToWindowLevelColors2.png.md5
new file mode 100644
index 0000000..e62d922
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestMapToWindowLevelColors2.png.md5
@@ -0,0 +1 @@
+c7dc53c974518b47c88d3b7ab37d61dc
diff --git a/Imaging/Core/Testing/Data/Baseline/TestMask2.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestMask2.png.md5
new file mode 100644
index 0000000..f493fdb
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestMask2.png.md5
@@ -0,0 +1 @@
+91d6b34e38555ccd73fc347e9158f448
diff --git a/Imaging/Core/Testing/Data/Baseline/TestMedian3D.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestMedian3D.png.md5
new file mode 100644
index 0000000..8613a62
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestMedian3D.png.md5
@@ -0,0 +1 @@
+bc77137dcd831d50495ffbae5070f995
diff --git a/Imaging/Core/Testing/Data/Baseline/TestNormalize.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestNormalize.png.md5
new file mode 100644
index 0000000..6625a81
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestNormalize.png.md5
@@ -0,0 +1 @@
+e6347ec288c28032a062bac5e91e3ac7
diff --git a/Imaging/Core/Testing/Data/Baseline/TestOpenClose3D.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestOpenClose3D.png.md5
new file mode 100644
index 0000000..8af8097
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestOpenClose3D.png.md5
@@ -0,0 +1 @@
+4d831274bedd406d70079c11faa97aef
diff --git a/Imaging/Core/Testing/Data/Baseline/TestPermute.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestPermute.png.md5
new file mode 100644
index 0000000..857ec64
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestPermute.png.md5
@@ -0,0 +1 @@
+12154c18c8ec74b204609bbf1205dd83
diff --git a/Imaging/Core/Testing/Data/Baseline/TestQuantizeTo16Colors.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestQuantizeTo16Colors.png.md5
new file mode 100644
index 0000000..b019e18
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestQuantizeTo16Colors.png.md5
@@ -0,0 +1 @@
+6e3d9b7095b5f9a4c75585ee0581b58c
diff --git a/Imaging/Core/Testing/Data/Baseline/TestROIStencil.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestROIStencil.png.md5
new file mode 100644
index 0000000..a5b3ea3
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestROIStencil.png.md5
@@ -0,0 +1 @@
+d4c116b7368147c423c104583c04bddb
diff --git a/Imaging/Core/Testing/Data/Baseline/TestRange3D.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestRange3D.png.md5
new file mode 100644
index 0000000..a2f5617
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestRange3D.png.md5
@@ -0,0 +1 @@
+1e7fceb9233f368386e779a528766b02
diff --git a/Imaging/Core/Testing/Data/Baseline/TestResample.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestResample.png.md5
new file mode 100644
index 0000000..57fbb25
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestResample.png.md5
@@ -0,0 +1 @@
+491284256392d1db2f1ff9d9c9def325
diff --git a/Imaging/Core/Testing/Data/Baseline/TestSeparableFilter.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestSeparableFilter.png.md5
new file mode 100644
index 0000000..a45d5c8
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestSeparableFilter.png.md5
@@ -0,0 +1 @@
+62d8cbbe1a7f4852c5d7f4745ccfce0d
diff --git a/Imaging/Core/Testing/Data/Baseline/TestShiftScale.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestShiftScale.png.md5
new file mode 100644
index 0000000..237abec
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestShiftScale.png.md5
@@ -0,0 +1 @@
+c641f32731e8f4a615400381f48ca81d
diff --git a/Imaging/Core/Testing/Data/Baseline/TestShiftScale2.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestShiftScale2.png.md5
new file mode 100644
index 0000000..7932e26
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestShiftScale2.png.md5
@@ -0,0 +1 @@
+b7af121a9f57a65ac8a103d863d6af62
diff --git a/Imaging/Core/Testing/Data/Baseline/TestSimpleImageExample.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestSimpleImageExample.png.md5
new file mode 100644
index 0000000..532bb59
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestSimpleImageExample.png.md5
@@ -0,0 +1 @@
+53f93094bf821bcdbca2d5d09f76ad46
diff --git a/Imaging/Core/Testing/Data/Baseline/TestSkeleton2D.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestSkeleton2D.png.md5
new file mode 100644
index 0000000..74763ab
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestSkeleton2D.png.md5
@@ -0,0 +1 @@
+f7ebf4c0f1c47a3a5b2ee4fe6c21b448
diff --git a/Imaging/Core/Testing/Data/Baseline/TestSobel2D.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestSobel2D.png.md5
new file mode 100644
index 0000000..a30f8e5
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestSobel2D.png.md5
@@ -0,0 +1 @@
+ac30f85b82402031b40e7e7f72a2530c
diff --git a/Imaging/Core/Testing/Data/Baseline/TestSobel3D.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestSobel3D.png.md5
new file mode 100644
index 0000000..4ef0b14
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestSobel3D.png.md5
@@ -0,0 +1 @@
+bb76e56014ecafb0f909396e71619790
diff --git a/Imaging/Core/Testing/Data/Baseline/TestStencilToImage.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestStencilToImage.png.md5
new file mode 100644
index 0000000..79f0d63
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestStencilToImage.png.md5
@@ -0,0 +1 @@
+d436b16ecc3e952c3f5808c29ba0e367
diff --git a/Imaging/Core/Testing/Data/Baseline/TestStencilWithFunction.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestStencilWithFunction.png.md5
new file mode 100644
index 0000000..3f814c5
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestStencilWithFunction.png.md5
@@ -0,0 +1 @@
+f65de891671e13689d750675d8de9d68
diff --git a/Imaging/Core/Testing/Data/Baseline/TestStencilWithImage.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestStencilWithImage.png.md5
new file mode 100644
index 0000000..10ac431
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestStencilWithImage.png.md5
@@ -0,0 +1 @@
+08354463771c23050ac92a90e62f4478
diff --git a/Imaging/Core/Testing/Data/Baseline/TestSubtractStencilData.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestSubtractStencilData.png.md5
new file mode 100644
index 0000000..02dab61
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestSubtractStencilData.png.md5
@@ -0,0 +1 @@
+215cf119d0d0e6c6f22c82dbcb18e243
diff --git a/Imaging/Core/Testing/Data/Baseline/TestThreshold.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestThreshold.png.md5
new file mode 100644
index 0000000..46a9649
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestThreshold.png.md5
@@ -0,0 +1 @@
+bb7e4485f2fd309416d7ead0a9d4723f
diff --git a/Imaging/Core/Testing/Data/Baseline/TestVariance3D.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestVariance3D.png.md5
new file mode 100644
index 0000000..5737b87
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestVariance3D.png.md5
@@ -0,0 +1 @@
+84717a7f8729d6f1634c22661c49f016
diff --git a/Imaging/Core/Testing/Data/Baseline/TestWipe.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestWipe.png.md5
new file mode 100644
index 0000000..736b801
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestWipe.png.md5
@@ -0,0 +1 @@
+0fa0d383b4206bd44d2c8d46e50c0d37
diff --git a/Imaging/Core/Testing/Data/Baseline/TestWrapPad.png.md5 b/Imaging/Core/Testing/Data/Baseline/TestWrapPad.png.md5
new file mode 100644
index 0000000..4cd4fca
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/TestWrapPad.png.md5
@@ -0,0 +1 @@
+4c65c1410e6142dcd03ff6f205c1860c
diff --git a/Imaging/Core/Testing/Data/Baseline/imageMCAll.png.md5 b/Imaging/Core/Testing/Data/Baseline/imageMCAll.png.md5
new file mode 100644
index 0000000..2a3daad
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/imageMCAll.png.md5
@@ -0,0 +1 @@
+e44e02443eca110a0691522cb9af30ce
diff --git a/Imaging/Core/Testing/Data/Baseline/reconstructSurface.png.md5 b/Imaging/Core/Testing/Data/Baseline/reconstructSurface.png.md5
new file mode 100644
index 0000000..903a74a
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/reconstructSurface.png.md5
@@ -0,0 +1 @@
+85a2f9d156d3bc4d67d9a4009a8d5ebf
diff --git a/Imaging/Core/Testing/Data/Baseline/resampledTexture.png.md5 b/Imaging/Core/Testing/Data/Baseline/resampledTexture.png.md5
new file mode 100644
index 0000000..79e8aae
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/resampledTexture.png.md5
@@ -0,0 +1 @@
+7c46a7ca7473fdd39c52468005269e1c
diff --git a/Imaging/Core/Testing/Data/Baseline/voxelModel.png.md5 b/Imaging/Core/Testing/Data/Baseline/voxelModel.png.md5
new file mode 100644
index 0000000..87c06ff
--- /dev/null
+++ b/Imaging/Core/Testing/Data/Baseline/voxelModel.png.md5
@@ -0,0 +1 @@
+d7f2831ab5559018e708d192be429181
diff --git a/Imaging/Core/Testing/Python/CMakeLists.txt b/Imaging/Core/Testing/Python/CMakeLists.txt
index 4adaf4b..a03a337 100644
--- a/Imaging/Core/Testing/Python/CMakeLists.txt
+++ b/Imaging/Core/Testing/Python/CMakeLists.txt
@@ -1,94 +1,91 @@
-add_test_python(ContinuousClose2D.py Imaging)
-add_test_python(LaplacianEdgeEnhance.py Imaging)
-add_test_python(MultipleUpdateExtents.py Imaging)
-add_test_python(ResliceBSpline.py Imaging)
-add_test_python(ResliceColorWrap.py Imaging)
-add_test_python(ResliceInformationInput.py Imaging)
-add_test_python(ResliceInterpolationModes.py Imaging)
-add_test_python(ResliceInterpolationOblique.py Imaging)
-add_test_python(ResliceLanczosKaiser.py Imaging)
-add_test_python(ResliceMirrorOblique.py Imaging)
-add_test_python(ResliceMirrorPad.py Imaging)
-add_test_python(ResliceOptimizationOff.py Imaging)
-add_test_python(ReslicePermutations.py Imaging)
-add_test_python(ReslicePermuteResample.py Imaging)
-add_test_python(ReslicePermuteSlab.py Imaging)
-add_test_python(ResliceSlabModes.py Imaging)
-add_test_python(ResliceToColors.py Imaging)
-add_test_python(ResliceWrapOblique.py Imaging)
-add_test_python(ResliceWrapPad.py Imaging)
-add_test_python(Spectrum.py Imaging)
-add_test_python(TestAccumulate.py Imaging)
-add_test_python(TestAllFlips.py Imaging)
-add_test_python(TestAnisotropicDiffusion2D.py Imaging)
-add_test_python(TestAnisotropicDiffusion3D.py Imaging)
-add_test_python(TestButterworthHighPass.py Imaging)
-add_test_python(TestButterworthLowPass.py Imaging)
-add_test_python(TestCache.py Imaging)
-add_test_python(TestChangeInformation.py Imaging)
-add_test_python(TestCheckerboard.py Imaging)
-add_test_python(TestCityBlockDistance.py Imaging)
-add_test_python(TestConvolve.py Imaging)
-add_test_python(TestCorrelation.py Imaging)
-add_test_python(TestDivergence.py Imaging)
-add_test_python(TestDotProduct.py Imaging)
-add_test_python(TestEuclideanDistance.py Imaging)
-add_test_python(TestEuclideanDistanceCached.py Imaging)
-add_test_python(TestEuclideanToPolar.py Imaging)
-add_test_python(TestFFTCorrelation.py Imaging)
-add_test_python(TestGradientMagnitude.py Imaging)
-add_test_python(TestGradientMagnitude2.py Imaging)
-add_test_python(TestHSIToRGB.py Imaging)
-add_test_python(TestHSVToRGB.py Imaging)
-add_test_python(TestHybridMedian2D.py Imaging)
-add_test_python(TestIdealLowPass.py Imaging)
-add_test_python(TestImageProjection.py Imaging)
-add_test_python(TestIslandRemoval2D.py Imaging)
-add_test_python(TestLassoStencil.py Imaging)
-add_test_python(TestMapToRGBABlockStreaming.py Imaging)
-add_test_python(TestMapToWindowLevelColors.py Imaging)
-add_test_python(TestMapToWindowLevelColors2.py Imaging)
-add_test_python(TestMask2.py Imaging)
-add_test_python(TestMedian3D.py Imaging)
-add_test_python(TestNormalize.py Imaging)
-add_test_python(TestOpenClose3D.py Imaging)
-add_test_python(TestPermute.py Imaging)
-add_test_python(TestQuantizeTo16Colors.py Imaging)
-add_test_python(TestROIStencil.py Imaging)
-add_test_python(TestRange3D.py Imaging)
-add_test_python(TestResample.py Imaging)
-add_test_python(TestSeparableFilter.py Imaging)
-add_test_python(TestShiftScale.py Imaging)
-add_test_python(TestShiftScale2.py Imaging)
-add_test_python(TestSimpleImageExample.py Imaging)
-add_test_python(TestSkeleton2D.py Imaging)
-add_test_python(TestSobel3D.py Imaging)
-add_test_python(TestStencilToImage.py Imaging)
-add_test_python(TestStencilWithFunction.py Imaging)
-add_test_python(TestStencilWithImage.py Imaging)
-add_test_python(TestVariance3D.py Imaging)
-add_test_python(TestWrapPad.py Imaging)
-add_test_python(resampledTexture.py Imaging)
-add_test_python(voxelModel.py Imaging)
-add_test_python(TestExtractVOI.py Parallel)
-add_test_python(TestImageWeightedSum.py Imaging)
-# add_test_python(WindowLevelInterface.py Imaging) # TCL test not being made as well !!!
-
-if (VTK_DATA_ROOT)
-   add_test_python(IdealHighPass.py Imaging)
-   add_test_python(TestImageCanvas.py Imaging)
-   add_test_python(TestInPlaceFilter.py Imaging)
-   add_test_python(TestSobel2D.py Imaging)
-   add_test_python(TestBlendStencil.py Imaging)
-   add_test_python1(imageMCAll Baseline/Graphics)
-   add_test_python1(TestAllBlends Baseline/Imaging)
-   add_test_python1(TestAllBlendsFloat Baseline/Imaging)
-   add_test_python1(TestAllLogic Baseline/Imaging)
-   add_test_python1(TestAllMaskBits Baseline/Imaging)
-   add_test_python1(TestAllMathematics Baseline/Imaging)
-   add_test_python1(TestAllShrinks Baseline/Imaging)
-   add_test_python1(TestImageThresholdConnectivity Baseline/Imaging)
-   add_test_python1(TestThreshold Baseline/Imaging)
-   add_test_python1(TestWipe Baseline/Imaging)
-   add_test_python1(reconstructSurface Baseline/Imaging)
-endif()
+vtk_add_test_python(ContinuousClose2D.py)
+vtk_add_test_python(IdealHighPass.py)
+vtk_add_test_python(LaplacianEdgeEnhance.py)
+vtk_add_test_python(MultipleUpdateExtents.py)
+vtk_add_test_python(ResliceBSpline.py)
+vtk_add_test_python(ResliceColorWrap.py)
+vtk_add_test_python(ResliceInformationInput.py)
+vtk_add_test_python(ResliceInterpolationModes.py)
+vtk_add_test_python(ResliceInterpolationOblique.py)
+vtk_add_test_python(ResliceLanczosKaiser.py)
+vtk_add_test_python(ResliceMirrorOblique.py)
+vtk_add_test_python(ResliceMirrorPad.py)
+vtk_add_test_python(ResliceOptimizationOff.py)
+vtk_add_test_python(ReslicePermutations.py)
+vtk_add_test_python(ReslicePermuteResample.py)
+vtk_add_test_python(ReslicePermuteSlab.py)
+vtk_add_test_python(ResliceSlabModes.py)
+vtk_add_test_python(ResliceToColors.py)
+vtk_add_test_python(ResliceWrapOblique.py)
+vtk_add_test_python(ResliceWrapPad.py)
+vtk_add_test_python(Spectrum.py)
+vtk_add_test_python(TestAccumulate.py)
+vtk_add_test_python(TestAllFlips.py)
+vtk_add_test_python(TestAnisotropicDiffusion2D.py)
+vtk_add_test_python(TestAnisotropicDiffusion3D.py)
+vtk_add_test_python(TestButterworthHighPass.py)
+vtk_add_test_python(TestButterworthLowPass.py)
+vtk_add_test_python(TestCache.py)
+vtk_add_test_python(TestChangeInformation.py)
+vtk_add_test_python(TestCheckerboard.py)
+vtk_add_test_python(TestCityBlockDistance.py)
+vtk_add_test_python(TestConvolve.py)
+vtk_add_test_python(TestCorrelation.py)
+vtk_add_test_python(TestDivergence.py)
+vtk_add_test_python(TestDotProduct.py)
+vtk_add_test_python(TestEuclideanDistance.py)
+vtk_add_test_python(TestEuclideanDistanceCached.py)
+vtk_add_test_python(TestEuclideanToPolar.py)
+vtk_add_test_python(TestFFTCorrelation.py)
+vtk_add_test_python(TestGradientMagnitude.py)
+vtk_add_test_python(TestGradientMagnitude2.py)
+vtk_add_test_python(TestHSIToRGB.py)
+vtk_add_test_python(TestHSVToRGB.py)
+vtk_add_test_python(TestHybridMedian2D.py)
+vtk_add_test_python(TestIdealLowPass.py)
+vtk_add_test_python(TestImageCanvas.py)
+vtk_add_test_python(TestImageProjection.py)
+vtk_add_test_python(TestInPlaceFilter.py)
+vtk_add_test_python(TestIslandRemoval2D.py)
+vtk_add_test_python(TestLassoStencil.py)
+vtk_add_test_python(TestMapToRGBABlockStreaming.py)
+vtk_add_test_python(TestMapToWindowLevelColors.py)
+vtk_add_test_python(TestMapToWindowLevelColors2.py)
+vtk_add_test_python(TestMask2.py)
+vtk_add_test_python(TestMedian3D.py)
+vtk_add_test_python(TestNormalize.py)
+vtk_add_test_python(TestOpenClose3D.py)
+vtk_add_test_python(TestPermute.py)
+vtk_add_test_python(TestQuantizeTo16Colors.py)
+vtk_add_test_python(TestROIStencil.py)
+vtk_add_test_python(TestRange3D.py)
+vtk_add_test_python(TestResample.py)
+vtk_add_test_python(TestSeparableFilter.py)
+vtk_add_test_python(TestShiftScale.py)
+vtk_add_test_python(TestShiftScale2.py)
+vtk_add_test_python(TestSimpleImageExample.py)
+vtk_add_test_python(TestSkeleton2D.py)
+vtk_add_test_python(TestSobel2D.py)
+vtk_add_test_python(TestSobel3D.py)
+vtk_add_test_python(TestStencilToImage.py)
+vtk_add_test_python(TestStencilWithFunction.py)
+vtk_add_test_python(TestStencilWithImage.py)
+vtk_add_test_python(TestVariance3D.py)
+vtk_add_test_python(TestWrapPad.py)
+vtk_add_test_python(resampledTexture.py)
+vtk_add_test_python(voxelModel.py)
+vtk_add_test_python(TestBlendStencil.py)
+vtk_add_test_python(TestExtractVOI.py)
+vtk_add_test_python(TestImageWeightedSum.py)
+# vtk_add_test_python(WindowLevelInterface.py) # TCL test not being made as well !!!
+vtk_add_test_python(imageMCAll NO_RT)
+vtk_add_test_python(TestAllBlends NO_RT)
+vtk_add_test_python(TestAllBlendsFloat NO_RT)
+vtk_add_test_python(TestAllLogic NO_RT)
+vtk_add_test_python(TestAllMaskBits NO_RT)
+vtk_add_test_python(TestAllMathematics NO_RT)
+vtk_add_test_python(TestAllShrinks NO_RT)
+vtk_add_test_python(TestImageThresholdConnectivity NO_RT)
+vtk_add_test_python(TestThreshold NO_RT)
+vtk_add_test_python(TestWipe NO_RT)
+vtk_add_test_python(reconstructSurface NO_RT)
diff --git a/Imaging/Core/Testing/Python/TestAllBlends.py b/Imaging/Core/Testing/Python/TestAllBlends.py
index adbc599..04d321d 100755
--- a/Imaging/Core/Testing/Python/TestAllBlends.py
+++ b/Imaging/Core/Testing/Python/TestAllBlends.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestAllBlends.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Imaging
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Imaging/Core/Testing/Python/TestAllBlendsFloat.py b/Imaging/Core/Testing/Python/TestAllBlendsFloat.py
index 66d9444..719af7e 100755
--- a/Imaging/Core/Testing/Python/TestAllBlendsFloat.py
+++ b/Imaging/Core/Testing/Python/TestAllBlendsFloat.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestAllBlendsFloat.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Imaging
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Imaging/Core/Testing/Python/TestAllLogic.py b/Imaging/Core/Testing/Python/TestAllLogic.py
index 1126f1f..7863a4c 100755
--- a/Imaging/Core/Testing/Python/TestAllLogic.py
+++ b/Imaging/Core/Testing/Python/TestAllLogic.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestAllLogic.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Imaging
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Imaging/Core/Testing/Python/TestAllMaskBits.py b/Imaging/Core/Testing/Python/TestAllMaskBits.py
index dc770d3..094a3e6 100755
--- a/Imaging/Core/Testing/Python/TestAllMaskBits.py
+++ b/Imaging/Core/Testing/Python/TestAllMaskBits.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestAllMaskBits.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Imaging
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Imaging/Core/Testing/Python/TestAllMathematics.py b/Imaging/Core/Testing/Python/TestAllMathematics.py
index a48ee8a..ec5dfb0 100755
--- a/Imaging/Core/Testing/Python/TestAllMathematics.py
+++ b/Imaging/Core/Testing/Python/TestAllMathematics.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestAllMathematics.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Imaging
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Imaging/Core/Testing/Python/TestAllShrinks.py b/Imaging/Core/Testing/Python/TestAllShrinks.py
index fa7b22f..c1bab7d 100755
--- a/Imaging/Core/Testing/Python/TestAllShrinks.py
+++ b/Imaging/Core/Testing/Python/TestAllShrinks.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestAllShrinks.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Imaging
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Imaging/Core/Testing/Python/TestImageThresholdConnectivity.py b/Imaging/Core/Testing/Python/TestImageThresholdConnectivity.py
index 0f9be62..7742cf4 100755
--- a/Imaging/Core/Testing/Python/TestImageThresholdConnectivity.py
+++ b/Imaging/Core/Testing/Python/TestImageThresholdConnectivity.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestImageThresholdConnectivity.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Imaging
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Imaging/Core/Testing/Python/TestThreshold.py b/Imaging/Core/Testing/Python/TestThreshold.py
index 6f19f28..f5a9a45 100755
--- a/Imaging/Core/Testing/Python/TestThreshold.py
+++ b/Imaging/Core/Testing/Python/TestThreshold.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestThreshold.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Imaging
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Imaging/Core/Testing/Python/TestWipe.py b/Imaging/Core/Testing/Python/TestWipe.py
index c4fe0d8..1ecdec0 100755
--- a/Imaging/Core/Testing/Python/TestWipe.py
+++ b/Imaging/Core/Testing/Python/TestWipe.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestWipe.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Imaging
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Imaging/Core/Testing/Python/imageMCAll.py b/Imaging/Core/Testing/Python/imageMCAll.py
index 21c4d77..c4051b4 100755
--- a/Imaging/Core/Testing/Python/imageMCAll.py
+++ b/Imaging/Core/Testing/Python/imageMCAll.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython imageMCAll.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Graphics
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Imaging/Core/Testing/Python/reconstructSurface.py b/Imaging/Core/Testing/Python/reconstructSurface.py
index f9d091f..fd9c6b8 100755
--- a/Imaging/Core/Testing/Python/reconstructSurface.py
+++ b/Imaging/Core/Testing/Python/reconstructSurface.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython reconstructSurface.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Graphics
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Imaging/Core/Testing/Tcl/CMakeLists.txt b/Imaging/Core/Testing/Tcl/CMakeLists.txt
index 73644df..bd2799d 100644
--- a/Imaging/Core/Testing/Tcl/CMakeLists.txt
+++ b/Imaging/Core/Testing/Tcl/CMakeLists.txt
@@ -1,93 +1,90 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(ContinuousClose2D Imaging)
-  add_test_tcl(IdealHighPass Imaging)
-  add_test_tcl(LaplacianEdgeEnhance Imaging)
-  add_test_tcl(MultipleUpdateExtents Imaging)
-  add_test_tcl(ResliceBSpline Imaging)
-  add_test_tcl(ResliceColorWrap Imaging)
-  add_test_tcl(ResliceInformationInput Imaging)
-  add_test_tcl(ResliceInterpolationModes Imaging)
-  add_test_tcl(ResliceInterpolationOblique Imaging)
-  add_test_tcl(ResliceLanczosKaiser Imaging)
-  add_test_tcl(ResliceMirrorOblique Imaging)
-  add_test_tcl(ResliceMirrorPad Imaging)
-  add_test_tcl(ResliceOptimizationOff Imaging)
-  add_test_tcl(ReslicePermutations Imaging)
-  add_test_tcl(ReslicePermuteResample Imaging)
-  add_test_tcl(ReslicePermuteSlab Imaging)
-  add_test_tcl(ResliceSlabModes Imaging)
-  add_test_tcl(ResliceToColors Imaging)
-  add_test_tcl(ResliceWrapOblique Imaging)
-  add_test_tcl(ResliceWrapPad Imaging)
-  add_test_tcl(Spectrum Imaging)
-  add_test_tcl(TestAccumulate Imaging)
-  add_test_tcl(TestAllBlends Imaging)
-  add_test_tcl(TestAllBlendsFloat Imaging)
-  add_test_tcl(TestAllFlips Imaging)
-  add_test_tcl(TestAllLogic Imaging)
-  add_test_tcl(TestAllMaskBits Imaging)
-  add_test_tcl(TestAllShrinks Imaging)
-  add_test_tcl(TestAnisotropicDiffusion2D Imaging)
-  add_test_tcl(TestAnisotropicDiffusion3D Imaging)
-  add_test_tcl(TestBlendStencil Imaging)
-  add_test_tcl(TestButterworthHighPass Imaging)
-  add_test_tcl(TestButterworthLowPass Imaging)
-  add_test_tcl(TestCache Imaging)
-  add_test_tcl(TestChangeInformation Imaging)
-  add_test_tcl(TestCityBlockDistance Imaging)
-  add_test_tcl(TestDivergence Imaging)
-  add_test_tcl(TestDotProduct Imaging)
-  add_test_tcl(TestEuclideanDistance Imaging)
-  add_test_tcl(TestEuclideanDistanceCached Imaging)
-  add_test_tcl(TestGradientMagnitude Imaging)
-  add_test_tcl(TestGradientMagnitude2)
-  add_test_tcl(TestIdealLowPass Imaging)
-  add_test_tcl(TestImageCanvas Imaging)
-  add_test_tcl(TestImageProjection Imaging)
-  add_test_tcl(TestImageThresholdConnectivity Imaging)
-  add_test_tcl(TestImageWeightedSum Imaging)
-  add_test_tcl(TestInPlaceFilter Imaging)
-  add_test_tcl(TestIslandRemoval2D Imaging)
-  add_test_tcl(TestLassoStencil Imaging)
-  add_test_tcl(TestMapToRGBABlockStreaming Imaging)
-  add_test_tcl(TestMapToWindowLevelColors2 Imaging)
-  add_test_tcl(TestMask2 Imaging)
-  add_test_tcl(TestMedian3D Imaging)
-  add_test_tcl(TestOpenClose3D Imaging)
-  add_test_tcl(TestPermute Imaging)
-  add_test_tcl(TestQuantizeTo16Colors Imaging)
-  add_test_tcl(TestRange3D Imaging)
-  add_test_tcl(TestResample Imaging)
-  add_test_tcl(TestROIStencil Imaging)
-  add_test_tcl(TestSeparableFilter Imaging)
-  add_test_tcl(TestShiftScale Imaging)
-  add_test_tcl(TestShiftScale2 Imaging)
-  add_test_tcl(TestSimpleImageExample Imaging)
-  add_test_tcl(TestSobel2D Imaging)
-  add_test_tcl(TestSobel3D Imaging)
-  add_test_tcl(TestStencilWithFunction Imaging)
-  add_test_tcl(TestStencilWithImage Imaging)
-  add_test_tcl(TestThreshold Imaging)
-  add_test_tcl(TestVariance3D Imaging)
-  add_test_tcl(TestWrapPad Imaging)
-  add_test_tcl(reconstructSurface Imaging)
-  add_test_tcl(resampledTexture Imaging)
-  add_test_tcl(imageMCAll Graphics)
-endif()
-
-add_test_tcl(TestAllMathematics Imaging)
-add_test_tcl(TestCheckerboard Imaging)
-add_test_tcl(TestConvolve Imaging)
-add_test_tcl(TestCorrelation Imaging)
-add_test_tcl(TestEuclideanToPolar Imaging)
-add_test_tcl(TestFFTCorrelation Imaging)
-add_test_tcl(TestHSIToRGB Imaging)
-add_test_tcl(TestHSVToRGB Imaging)
-add_test_tcl(TestHybridMedian2D Imaging)
-add_test_tcl(TestMapToWindowLevelColors Imaging)
-add_test_tcl(TestNormalize Imaging)
-add_test_tcl(TestSkeleton2D Imaging)
-add_test_tcl(TestStencilToImage Imaging)
-add_test_tcl(TestWipe Imaging)
-add_test_tcl(voxelModel Imaging)
-add_test_tcl(TestExtractVOI Parallel)
+vtk_add_test_tcl(ContinuousClose2D)
+vtk_add_test_tcl(IdealHighPass)
+vtk_add_test_tcl(LaplacianEdgeEnhance)
+vtk_add_test_tcl(MultipleUpdateExtents)
+vtk_add_test_tcl(ResliceBSpline)
+vtk_add_test_tcl(ResliceColorWrap)
+vtk_add_test_tcl(ResliceInformationInput)
+vtk_add_test_tcl(ResliceInterpolationModes)
+vtk_add_test_tcl(ResliceInterpolationOblique)
+vtk_add_test_tcl(ResliceLanczosKaiser)
+vtk_add_test_tcl(ResliceMirrorOblique)
+vtk_add_test_tcl(ResliceMirrorPad)
+vtk_add_test_tcl(ResliceOptimizationOff)
+vtk_add_test_tcl(ReslicePermutations)
+vtk_add_test_tcl(ReslicePermuteResample)
+vtk_add_test_tcl(ReslicePermuteSlab)
+vtk_add_test_tcl(ResliceSlabModes)
+vtk_add_test_tcl(ResliceToColors)
+vtk_add_test_tcl(ResliceWrapOblique)
+vtk_add_test_tcl(ResliceWrapPad)
+vtk_add_test_tcl(Spectrum)
+vtk_add_test_tcl(TestAccumulate)
+vtk_add_test_tcl(TestAllBlends)
+vtk_add_test_tcl(TestAllBlendsFloat)
+vtk_add_test_tcl(TestAllFlips)
+vtk_add_test_tcl(TestAllLogic)
+vtk_add_test_tcl(TestAllMaskBits)
+vtk_add_test_tcl(TestAllShrinks)
+vtk_add_test_tcl(TestAnisotropicDiffusion2D)
+vtk_add_test_tcl(TestAnisotropicDiffusion3D)
+vtk_add_test_tcl(TestBlendStencil)
+vtk_add_test_tcl(TestButterworthHighPass)
+vtk_add_test_tcl(TestButterworthLowPass)
+vtk_add_test_tcl(TestCache)
+vtk_add_test_tcl(TestChangeInformation)
+vtk_add_test_tcl(TestCityBlockDistance)
+vtk_add_test_tcl(TestDivergence)
+vtk_add_test_tcl(TestDotProduct)
+vtk_add_test_tcl(TestEuclideanDistance)
+vtk_add_test_tcl(TestEuclideanDistanceCached)
+vtk_add_test_tcl(TestGradientMagnitude)
+vtk_add_test_tcl(TestGradientMagnitude2)
+vtk_add_test_tcl(TestIdealLowPass)
+vtk_add_test_tcl(TestImageCanvas)
+vtk_add_test_tcl(TestImageProjection)
+vtk_add_test_tcl(TestImageThresholdConnectivity)
+vtk_add_test_tcl(TestImageWeightedSum)
+vtk_add_test_tcl(TestInPlaceFilter)
+vtk_add_test_tcl(TestIslandRemoval2D)
+vtk_add_test_tcl(TestLassoStencil)
+vtk_add_test_tcl(TestMapToRGBABlockStreaming)
+vtk_add_test_tcl(TestMapToWindowLevelColors2)
+vtk_add_test_tcl(TestMask2)
+vtk_add_test_tcl(TestMedian3D)
+vtk_add_test_tcl(TestOpenClose3D)
+vtk_add_test_tcl(TestPermute)
+vtk_add_test_tcl(TestQuantizeTo16Colors)
+vtk_add_test_tcl(TestRange3D)
+vtk_add_test_tcl(TestResample)
+vtk_add_test_tcl(TestROIStencil)
+vtk_add_test_tcl(TestSeparableFilter)
+vtk_add_test_tcl(TestShiftScale)
+vtk_add_test_tcl(TestShiftScale2)
+vtk_add_test_tcl(TestSimpleImageExample)
+vtk_add_test_tcl(TestSobel2D)
+vtk_add_test_tcl(TestSobel3D)
+vtk_add_test_tcl(TestStencilWithFunction)
+vtk_add_test_tcl(TestStencilWithImage)
+vtk_add_test_tcl(TestThreshold)
+vtk_add_test_tcl(TestVariance3D)
+vtk_add_test_tcl(TestWrapPad)
+vtk_add_test_tcl(reconstructSurface)
+vtk_add_test_tcl(resampledTexture)
+vtk_add_test_tcl(imageMCAll)
+vtk_add_test_tcl(TestAllMathematics)
+vtk_add_test_tcl(TestCheckerboard)
+vtk_add_test_tcl(TestConvolve)
+vtk_add_test_tcl(TestCorrelation)
+vtk_add_test_tcl(TestEuclideanToPolar)
+vtk_add_test_tcl(TestFFTCorrelation)
+vtk_add_test_tcl(TestHSIToRGB)
+vtk_add_test_tcl(TestHSVToRGB)
+vtk_add_test_tcl(TestHybridMedian2D)
+vtk_add_test_tcl(TestMapToWindowLevelColors)
+vtk_add_test_tcl(TestNormalize)
+vtk_add_test_tcl(TestSkeleton2D)
+vtk_add_test_tcl(TestStencilToImage)
+vtk_add_test_tcl(TestWipe)
+vtk_add_test_tcl(voxelModel)
+vtk_add_test_tcl(TestExtractVOI)
diff --git a/Imaging/Core/vtkExtractVOI.h b/Imaging/Core/vtkExtractVOI.h
index 264ccc4..b4538e4 100644
--- a/Imaging/Core/vtkExtractVOI.h
+++ b/Imaging/Core/vtkExtractVOI.h
@@ -66,7 +66,7 @@ public:
 
 protected:
   vtkExtractVOI();
-  ~vtkExtractVOI() {};
+  ~vtkExtractVOI() {}
 
   virtual int RequestUpdateExtent(vtkInformation*,
                                   vtkInformationVector**,
diff --git a/Imaging/Core/vtkImageAppendComponents.cxx b/Imaging/Core/vtkImageAppendComponents.cxx
index 4c28893..31ead94 100644
--- a/Imaging/Core/vtkImageAppendComponents.cxx
+++ b/Imaging/Core/vtkImageAppendComponents.cxx
@@ -70,7 +70,7 @@ vtkDataObject *vtkImageAppendComponents::GetInput(int idx)
 }
 
 //----------------------------------------------------------------------------
-// This method tells the ouput it will have more components
+// This method tells the output it will have more components
 int vtkImageAppendComponents::RequestInformation (
   vtkInformation * vtkNotUsed(request),
   vtkInformationVector **inputVector,
@@ -113,7 +113,7 @@ void vtkImageAppendComponentsExecute(vtkImageAppendComponents *self,
   int numSkip = outData->GetNumberOfScalarComponents() - numIn;
   int i;
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   while (!outIt.IsAtEnd())
     {
     T* inSI = inIt.BeginSpan();
diff --git a/Imaging/Core/vtkImageAppendComponents.h b/Imaging/Core/vtkImageAppendComponents.h
index 036da66..c2d25c8 100644
--- a/Imaging/Core/vtkImageAppendComponents.h
+++ b/Imaging/Core/vtkImageAppendComponents.h
@@ -62,8 +62,8 @@ public:
   int GetNumberOfInputs() { return this->GetNumberOfInputConnections(0); };
 
 protected:
-  vtkImageAppendComponents() {};
-  ~vtkImageAppendComponents() {};
+  vtkImageAppendComponents() {}
+  ~vtkImageAppendComponents() {}
 
   virtual int RequestInformation (vtkInformation *, vtkInformationVector **,
                                   vtkInformationVector *);
diff --git a/Imaging/Core/vtkImageBSplineInternals.h b/Imaging/Core/vtkImageBSplineInternals.h
index 0b508bc..6248dcd 100644
--- a/Imaging/Core/vtkImageBSplineInternals.h
+++ b/Imaging/Core/vtkImageBSplineInternals.h
@@ -72,8 +72,8 @@ public:
     double x, double y, double z, long degree, long border);
 
 protected:
-  vtkImageBSplineInternals() {};
-  ~vtkImageBSplineInternals() {};
+  vtkImageBSplineInternals() {}
+  ~vtkImageBSplineInternals() {}
 
   static double InitialCausalCoefficient(
     double data[], long size, long border, double pole, double tol);
diff --git a/Imaging/Core/vtkImageBlend.cxx b/Imaging/Core/vtkImageBlend.cxx
index 47a879c..0fb4373 100644
--- a/Imaging/Core/vtkImageBlend.cxx
+++ b/Imaging/Core/vtkImageBlend.cxx
@@ -547,8 +547,8 @@ void vtkImageBlendExecuteChar(vtkImageBlend *self, int extent[6],
 //----------------------------------------------------------------------------
 // This function simply does a copy (for the first input)
 //----------------------------------------------------------------------------
-void vtkImageBlendCopyData(vtkImageData *inData, vtkImageData *outData,
-                           int *ext)
+static void vtkImageBlendCopyData(vtkImageData *inData, vtkImageData *outData,
+                                  int *ext)
 {
   int idxY, idxZ, maxY, maxZ;
   vtkIdType inIncX, inIncY, inIncZ;
diff --git a/Imaging/Core/vtkImageCast.cxx b/Imaging/Core/vtkImageCast.cxx
index 52224e2..a0a3dd0 100644
--- a/Imaging/Core/vtkImageCast.cxx
+++ b/Imaging/Core/vtkImageCast.cxx
@@ -65,7 +65,7 @@ void vtkImageCastExecute(vtkImageCast *self,
   typeMax = outData->GetScalarTypeMax();
   clamp = self->GetClampOverflow();
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   while (!outIt.IsAtEnd())
     {
     IT* inSI = inIt.BeginSpan();
diff --git a/Imaging/Core/vtkImageCast.h b/Imaging/Core/vtkImageCast.h
index e81e426..854ca3c 100644
--- a/Imaging/Core/vtkImageCast.h
+++ b/Imaging/Core/vtkImageCast.h
@@ -72,7 +72,7 @@ public:
 
 protected:
   vtkImageCast();
-  ~vtkImageCast() {};
+  ~vtkImageCast() {}
 
   int ClampOverflow;
   int OutputScalarType;
diff --git a/Imaging/Core/vtkImageClip.h b/Imaging/Core/vtkImageClip.h
index 7e43d51..30d0ceb 100644
--- a/Imaging/Core/vtkImageClip.h
+++ b/Imaging/Core/vtkImageClip.h
@@ -61,7 +61,7 @@ public:
 
 protected:
   vtkImageClip();
-  ~vtkImageClip() {};
+  ~vtkImageClip() {}
 
   // Time when OutputImageExtent was computed.
   vtkTimeStamp CTime;
diff --git a/Imaging/Core/vtkImageConstantPad.cxx b/Imaging/Core/vtkImageConstantPad.cxx
index 7560820..95ff835 100644
--- a/Imaging/Core/vtkImageConstantPad.cxx
+++ b/Imaging/Core/vtkImageConstantPad.cxx
@@ -65,7 +65,7 @@ void vtkImageConstantPadExecute(vtkImageConstantPad *self,
   inData->GetContinuousIncrements(inExt, inIncX, inIncY, inIncZ);
   outData->GetContinuousIncrements(outExt, outIncX, outIncY, outIncZ);
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   for (idxZ = outExt[4]; idxZ <= outExt[5]; idxZ++)
     {
     state3 = (idxZ < inExt[4] || idxZ > inExt[5]);
diff --git a/Imaging/Core/vtkImageConstantPad.h b/Imaging/Core/vtkImageConstantPad.h
index 1f66940..709c893 100644
--- a/Imaging/Core/vtkImageConstantPad.h
+++ b/Imaging/Core/vtkImageConstantPad.h
@@ -44,7 +44,7 @@ public:
 
 protected:
   vtkImageConstantPad();
-  ~vtkImageConstantPad() {};
+  ~vtkImageConstantPad() {}
 
   double Constant;
 
diff --git a/Imaging/Core/vtkImageDecomposeFilter.h b/Imaging/Core/vtkImageDecomposeFilter.h
index a447e70..c26adee 100644
--- a/Imaging/Core/vtkImageDecomposeFilter.h
+++ b/Imaging/Core/vtkImageDecomposeFilter.h
@@ -51,7 +51,7 @@ public:
 
 protected:
   vtkImageDecomposeFilter();
-  ~vtkImageDecomposeFilter() {};
+  ~vtkImageDecomposeFilter() {}
 
   int Dimensionality;
 
diff --git a/Imaging/Core/vtkImageDifference.h b/Imaging/Core/vtkImageDifference.h
index 83336cf..312f9a3 100644
--- a/Imaging/Core/vtkImageDifference.h
+++ b/Imaging/Core/vtkImageDifference.h
@@ -92,7 +92,7 @@ public:
 
 protected:
   vtkImageDifference();
-  ~vtkImageDifference() {};
+  ~vtkImageDifference() {}
 
   double ErrorPerThread[VTK_MAX_THREADS];
   double ThresholdedErrorPerThread[VTK_MAX_THREADS];
diff --git a/Imaging/Core/vtkImageExtractComponents.h b/Imaging/Core/vtkImageExtractComponents.h
index 3314b95..eb66ae0 100644
--- a/Imaging/Core/vtkImageExtractComponents.h
+++ b/Imaging/Core/vtkImageExtractComponents.h
@@ -48,7 +48,7 @@ public:
 
 protected:
   vtkImageExtractComponents();
-  ~vtkImageExtractComponents() {};
+  ~vtkImageExtractComponents() {}
 
   int NumberOfComponents;
   int Components[3];
diff --git a/Imaging/Core/vtkImageFlip.h b/Imaging/Core/vtkImageFlip.h
index 4418399..e16fa53 100644
--- a/Imaging/Core/vtkImageFlip.h
+++ b/Imaging/Core/vtkImageFlip.h
@@ -73,7 +73,7 @@ public:
 
 protected:
   vtkImageFlip();
-  ~vtkImageFlip() {};
+  ~vtkImageFlip() {}
 
   virtual int RequestInformation(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
 
diff --git a/Imaging/Core/vtkImageMagnify.cxx b/Imaging/Core/vtkImageMagnify.cxx
index 3991fcd..703f8e6 100644
--- a/Imaging/Core/vtkImageMagnify.cxx
+++ b/Imaging/Core/vtkImageMagnify.cxx
@@ -162,7 +162,7 @@ void vtkImageMagnifyExecute(vtkImageMagnify *self,
   inMaxZ = inExt[5];
   inData->GetExtent(idxC, inMaxX, idxC, inMaxY, idxC, inMaxZ);
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   for (idxC = 0; idxC < maxC; idxC++)
     {
     inPtrZ = inPtr + idxC;
diff --git a/Imaging/Core/vtkImageMagnify.h b/Imaging/Core/vtkImageMagnify.h
index c05c4c8..02dc8d6 100644
--- a/Imaging/Core/vtkImageMagnify.h
+++ b/Imaging/Core/vtkImageMagnify.h
@@ -48,7 +48,7 @@ public:
 
 protected:
   vtkImageMagnify();
-  ~vtkImageMagnify() {};
+  ~vtkImageMagnify() {}
 
   int MagnificationFactors[3];
   int Interpolate;
diff --git a/Imaging/Core/vtkImageMask.cxx b/Imaging/Core/vtkImageMask.cxx
index 23e2a8f..fa3deb5 100644
--- a/Imaging/Core/vtkImageMask.cxx
+++ b/Imaging/Core/vtkImageMask.cxx
@@ -136,7 +136,7 @@ void vtkImageMaskExecute(vtkImageMask *self, int ext[6],
   target = static_cast<unsigned long>(num2*num1/50.0);
   target++;
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   for (idx2 = 0; idx2 < num2; ++idx2)
     {
     for (idx1 = 0; !self->AbortExecute && idx1 < num1; ++idx1)
diff --git a/Imaging/Core/vtkImageMirrorPad.cxx b/Imaging/Core/vtkImageMirrorPad.cxx
index a0642c3..9e902de 100644
--- a/Imaging/Core/vtkImageMirrorPad.cxx
+++ b/Imaging/Core/vtkImageMirrorPad.cxx
@@ -106,7 +106,7 @@ void vtkImageMirrorPadExecute(vtkImageMirrorPad *self,
     }
   inPtr = static_cast<T *>(inData->GetScalarPointer(inIdxStart[0], inIdxStart[1], inIdxStart[2]));
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   inPtrZ = inPtr;
   inIdx[2] = inIdxStart[2];
   inInc[2] = inIncStart[2];
diff --git a/Imaging/Core/vtkImageMirrorPad.h b/Imaging/Core/vtkImageMirrorPad.h
index 0e07c1f..a19e4af 100644
--- a/Imaging/Core/vtkImageMirrorPad.h
+++ b/Imaging/Core/vtkImageMirrorPad.h
@@ -32,8 +32,8 @@ public:
   vtkTypeMacro(vtkImageMirrorPad,vtkImagePadFilter);
 
 protected:
-  vtkImageMirrorPad() {};
-  ~vtkImageMirrorPad() {};
+  vtkImageMirrorPad() {}
+  ~vtkImageMirrorPad() {}
 
   void ComputeInputUpdateExtent(int inExt[6], int outExt[6], int wExt[6]);
   void ThreadedRequestData (vtkInformation* request,
diff --git a/Imaging/Core/vtkImagePadFilter.h b/Imaging/Core/vtkImagePadFilter.h
index e08c576..abe5599 100644
--- a/Imaging/Core/vtkImagePadFilter.h
+++ b/Imaging/Core/vtkImagePadFilter.h
@@ -48,7 +48,7 @@ public:
 
 protected:
   vtkImagePadFilter();
-  ~vtkImagePadFilter() {};
+  ~vtkImagePadFilter() {}
 
   int OutputWholeExtent[6];
   int OutputNumberOfScalarComponents;
diff --git a/Imaging/Core/vtkImagePermute.h b/Imaging/Core/vtkImagePermute.h
index 7c6e708..8bb5faf 100644
--- a/Imaging/Core/vtkImagePermute.h
+++ b/Imaging/Core/vtkImagePermute.h
@@ -44,7 +44,7 @@ public:
 
 protected:
   vtkImagePermute();
-  ~vtkImagePermute() {};
+  ~vtkImagePermute() {}
 
   int FilteredAxes[3];
 
diff --git a/Imaging/Core/vtkImageResample.h b/Imaging/Core/vtkImageResample.h
index a939d46..700fbd3 100644
--- a/Imaging/Core/vtkImageResample.h
+++ b/Imaging/Core/vtkImageResample.h
@@ -55,7 +55,7 @@ public:
 
 protected:
   vtkImageResample();
-  ~vtkImageResample() {};
+  ~vtkImageResample() {}
 
   double MagnificationFactors[3];
   double OutputSpacing[3];
diff --git a/Imaging/Core/vtkImageShrink3D.h b/Imaging/Core/vtkImageShrink3D.h
index e01acd6..8207624 100644
--- a/Imaging/Core/vtkImageShrink3D.h
+++ b/Imaging/Core/vtkImageShrink3D.h
@@ -69,7 +69,7 @@ public:
 
 protected:
   vtkImageShrink3D();
-  ~vtkImageShrink3D() {};
+  ~vtkImageShrink3D() {}
 
   int ShrinkFactors[3];
   int Shift[3];
diff --git a/Imaging/Core/vtkImageThreshold.h b/Imaging/Core/vtkImageThreshold.h
index b05b2fe..e46e8ff 100644
--- a/Imaging/Core/vtkImageThreshold.h
+++ b/Imaging/Core/vtkImageThreshold.h
@@ -101,7 +101,7 @@ public:
 
 protected:
   vtkImageThreshold();
-  ~vtkImageThreshold() {};
+  ~vtkImageThreshold() {}
 
   double UpperThreshold;
   double LowerThreshold;
diff --git a/Imaging/Core/vtkImageTranslateExtent.h b/Imaging/Core/vtkImageTranslateExtent.h
index f21bc66..e23591e 100644
--- a/Imaging/Core/vtkImageTranslateExtent.h
+++ b/Imaging/Core/vtkImageTranslateExtent.h
@@ -37,7 +37,7 @@ public:
 
 protected:
   vtkImageTranslateExtent();
-  ~vtkImageTranslateExtent() {};
+  ~vtkImageTranslateExtent() {}
 
   int Translation[3];
 
diff --git a/Imaging/Core/vtkImageWrapPad.h b/Imaging/Core/vtkImageWrapPad.h
index b6cf96e..c6623ef 100644
--- a/Imaging/Core/vtkImageWrapPad.h
+++ b/Imaging/Core/vtkImageWrapPad.h
@@ -37,8 +37,8 @@ public:
   vtkTypeMacro(vtkImageWrapPad,vtkImagePadFilter);
 
 protected:
-  vtkImageWrapPad() {};
-  ~vtkImageWrapPad() {};
+  vtkImageWrapPad() {}
+  ~vtkImageWrapPad() {}
 
   void ComputeInputUpdateExtent (int inExt[6], int outExt[6], int wExt[6]);
   void ThreadedRequestData (vtkInformation* request,
diff --git a/Imaging/Core/vtkRTAnalyticSource.cxx b/Imaging/Core/vtkRTAnalyticSource.cxx
index 6b1d06a..5b7c9fd 100644
--- a/Imaging/Core/vtkRTAnalyticSource.cxx
+++ b/Imaging/Core/vtkRTAnalyticSource.cxx
@@ -172,7 +172,7 @@ void vtkRTAnalyticSource::ExecuteDataWithInformation(vtkDataObject *output,
   target = static_cast<unsigned long>((maxZ+1)*(maxY+1)/50.0);
   target++;
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   temp2 = 1.0 / (2.0 * this->StandardDeviation * this->StandardDeviation);
 
   double x, y, z;
diff --git a/Imaging/Fourier/module.cmake b/Imaging/Fourier/module.cmake
index e4bec9e..f6ecb4a 100644
--- a/Imaging/Fourier/module.cmake
+++ b/Imaging/Fourier/module.cmake
@@ -4,4 +4,6 @@ vtk_module(vtkImagingFourier
     StandAlone
   DEPENDS
     vtkImagingCore
+  PRIVATE_DEPENDS
+    vtksys
   )
diff --git a/Imaging/Fourier/vtkImageButterworthHighPass.h b/Imaging/Fourier/vtkImageButterworthHighPass.h
index 450e825..a4711e1 100644
--- a/Imaging/Fourier/vtkImageButterworthHighPass.h
+++ b/Imaging/Fourier/vtkImageButterworthHighPass.h
@@ -60,7 +60,7 @@ public:
 
 protected:
   vtkImageButterworthHighPass();
-  ~vtkImageButterworthHighPass() {};
+  ~vtkImageButterworthHighPass() {}
 
   int Order;
   double CutOff[3];
diff --git a/Imaging/Fourier/vtkImageButterworthLowPass.h b/Imaging/Fourier/vtkImageButterworthLowPass.h
index 222a7ea..7cb4ba8 100644
--- a/Imaging/Fourier/vtkImageButterworthLowPass.h
+++ b/Imaging/Fourier/vtkImageButterworthLowPass.h
@@ -61,7 +61,7 @@ public:
 
 protected:
   vtkImageButterworthLowPass();
-  ~vtkImageButterworthLowPass() {};
+  ~vtkImageButterworthLowPass() {}
 
   int Order;
   double CutOff[3];
diff --git a/Imaging/Fourier/vtkImageFFT.h b/Imaging/Fourier/vtkImageFFT.h
index 37d3bdc..e22f84a 100644
--- a/Imaging/Fourier/vtkImageFFT.h
+++ b/Imaging/Fourier/vtkImageFFT.h
@@ -50,8 +50,8 @@ public:
                   int num, int total);
 
 protected:
-  vtkImageFFT() {};
-  ~vtkImageFFT() {};
+  vtkImageFFT() {}
+  ~vtkImageFFT() {}
 
   virtual int IterativeRequestInformation(vtkInformation* in,
                                           vtkInformation* out);
diff --git a/Imaging/Fourier/vtkImageFourierCenter.h b/Imaging/Fourier/vtkImageFourierCenter.h
index 9834407..128d0bc 100644
--- a/Imaging/Fourier/vtkImageFourierCenter.h
+++ b/Imaging/Fourier/vtkImageFourierCenter.h
@@ -35,7 +35,7 @@ public:
 
 protected:
   vtkImageFourierCenter();
-  ~vtkImageFourierCenter() {};
+  ~vtkImageFourierCenter() {}
 
   virtual int IterativeRequestUpdateExtent(vtkInformation* in,
                                            vtkInformation* out);
diff --git a/Imaging/Fourier/vtkImageFourierFilter.h b/Imaging/Fourier/vtkImageFourierFilter.h
index a6bc5b4..ca8b403 100644
--- a/Imaging/Fourier/vtkImageFourierFilter.h
+++ b/Imaging/Fourier/vtkImageFourierFilter.h
@@ -109,8 +109,8 @@ public:
   //ETX
 
 protected:
-  vtkImageFourierFilter() {};
-  ~vtkImageFourierFilter() {};
+  vtkImageFourierFilter() {}
+  ~vtkImageFourierFilter() {}
 
   //BTX
   void ExecuteFftStep2(vtkImageComplex *p_in, vtkImageComplex *p_out,
diff --git a/Imaging/Fourier/vtkImageIdealHighPass.h b/Imaging/Fourier/vtkImageIdealHighPass.h
index a0ca850..d6d196d 100644
--- a/Imaging/Fourier/vtkImageIdealHighPass.h
+++ b/Imaging/Fourier/vtkImageIdealHighPass.h
@@ -56,7 +56,7 @@ public:
 
 protected:
   vtkImageIdealHighPass();
-  ~vtkImageIdealHighPass() {};
+  ~vtkImageIdealHighPass() {}
 
   double CutOff[3];
 
diff --git a/Imaging/Fourier/vtkImageIdealLowPass.h b/Imaging/Fourier/vtkImageIdealLowPass.h
index 1093a2e..e298be1 100644
--- a/Imaging/Fourier/vtkImageIdealLowPass.h
+++ b/Imaging/Fourier/vtkImageIdealLowPass.h
@@ -57,7 +57,7 @@ public:
 
 protected:
   vtkImageIdealLowPass();
-  ~vtkImageIdealLowPass() {};
+  ~vtkImageIdealLowPass() {}
 
   double CutOff[3];
 
diff --git a/Imaging/Fourier/vtkImageRFFT.cxx b/Imaging/Fourier/vtkImageRFFT.cxx
index 4650a94..61bf477 100644
--- a/Imaging/Fourier/vtkImageRFFT.cxx
+++ b/Imaging/Fourier/vtkImageRFFT.cxx
@@ -33,9 +33,10 @@ int vtkImageRFFT::IterativeRequestInformation(
   return 1;
 }
 
-void vtkImageRFFTInternalRequestUpdateExtent(int *inExt, int *outExt,
-                                             int *wExt,
-                                             int iteration)
+static void vtkImageRFFTInternalRequestUpdateExtent(int *inExt,
+                                                    const int *outExt,
+                                                    const int *wExt,
+                                                    int iteration)
 {
   memcpy(inExt, outExt, 6 * sizeof(int));
   inExt[iteration*2] = wExt[iteration*2];
diff --git a/Imaging/Fourier/vtkImageRFFT.h b/Imaging/Fourier/vtkImageRFFT.h
index 756e386..d50c72c 100644
--- a/Imaging/Fourier/vtkImageRFFT.h
+++ b/Imaging/Fourier/vtkImageRFFT.h
@@ -56,8 +56,8 @@ public:
                   int num, int total);
 
 protected:
-  vtkImageRFFT() {};
-  ~vtkImageRFFT() {};
+  vtkImageRFFT() {}
+  ~vtkImageRFFT() {}
 
   virtual int IterativeRequestInformation(vtkInformation* in,
                                           vtkInformation* out);
diff --git a/Imaging/General/vtkImageAnisotropicDiffusion2D.h b/Imaging/General/vtkImageAnisotropicDiffusion2D.h
index c2d71e9..b51978a 100644
--- a/Imaging/General/vtkImageAnisotropicDiffusion2D.h
+++ b/Imaging/General/vtkImageAnisotropicDiffusion2D.h
@@ -94,7 +94,7 @@ public:
 
 protected:
   vtkImageAnisotropicDiffusion2D();
-  ~vtkImageAnisotropicDiffusion2D() {};
+  ~vtkImageAnisotropicDiffusion2D() {}
 
   int NumberOfIterations;
   double DiffusionThreshold;
diff --git a/Imaging/General/vtkImageAnisotropicDiffusion3D.h b/Imaging/General/vtkImageAnisotropicDiffusion3D.h
index ffbe15c..b2a0fbd 100644
--- a/Imaging/General/vtkImageAnisotropicDiffusion3D.h
+++ b/Imaging/General/vtkImageAnisotropicDiffusion3D.h
@@ -95,7 +95,7 @@ public:
 
 protected:
   vtkImageAnisotropicDiffusion3D();
-  ~vtkImageAnisotropicDiffusion3D() {};
+  ~vtkImageAnisotropicDiffusion3D() {}
 
   int NumberOfIterations;
   double DiffusionThreshold;
diff --git a/Imaging/General/vtkImageCheckerboard.h b/Imaging/General/vtkImageCheckerboard.h
index 0148afa..dd9c038 100644
--- a/Imaging/General/vtkImageCheckerboard.h
+++ b/Imaging/General/vtkImageCheckerboard.h
@@ -46,7 +46,7 @@ public:
 
 protected:
   vtkImageCheckerboard();
-  ~vtkImageCheckerboard() {};
+  ~vtkImageCheckerboard() {}
 
   virtual void ThreadedRequestData(vtkInformation *request,
                                    vtkInformationVector **inputVector,
diff --git a/Imaging/General/vtkImageCityBlockDistance.h b/Imaging/General/vtkImageCityBlockDistance.h
index ff9ff3f..2528551 100644
--- a/Imaging/General/vtkImageCityBlockDistance.h
+++ b/Imaging/General/vtkImageCityBlockDistance.h
@@ -41,7 +41,7 @@ public:
 
 protected:
   vtkImageCityBlockDistance();
-  ~vtkImageCityBlockDistance() {};
+  ~vtkImageCityBlockDistance() {}
 
   virtual int IterativeRequestUpdateExtent(vtkInformation* in,
                                            vtkInformation* out);
diff --git a/Imaging/General/vtkImageCorrelation.cxx b/Imaging/General/vtkImageCorrelation.cxx
index 8435934..69eea79 100644
--- a/Imaging/General/vtkImageCorrelation.cxx
+++ b/Imaging/General/vtkImageCorrelation.cxx
@@ -134,7 +134,7 @@ void vtkImageCorrelationExecute(vtkImageCorrelation *self,
   maxIY = wExtent[3] - outExt[2];
   maxIX = wExtent[1] - outExt[0];
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   for (idxZ = 0; idxZ <= maxZ; idxZ++)
     {
     // how much of kernel to use
diff --git a/Imaging/General/vtkImageCorrelation.h b/Imaging/General/vtkImageCorrelation.h
index a30416d..cfb767f 100644
--- a/Imaging/General/vtkImageCorrelation.h
+++ b/Imaging/General/vtkImageCorrelation.h
@@ -52,7 +52,7 @@ public:
 
 protected:
   vtkImageCorrelation();
-  ~vtkImageCorrelation() {};
+  ~vtkImageCorrelation() {}
 
   int Dimensionality;
   virtual int RequestInformation (vtkInformation *,
diff --git a/Imaging/General/vtkImageEuclideanToPolar.cxx b/Imaging/General/vtkImageEuclideanToPolar.cxx
index df371f2..290a317 100644
--- a/Imaging/General/vtkImageEuclideanToPolar.cxx
+++ b/Imaging/General/vtkImageEuclideanToPolar.cxx
@@ -47,7 +47,7 @@ void vtkImageEuclideanToPolarExecute(vtkImageEuclideanToPolar *self,
   // find the region to loop over
   int maxC = inData->GetNumberOfScalarComponents();
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   while (!outIt.IsAtEnd())
     {
     T* inSI = inIt.BeginSpan();
diff --git a/Imaging/General/vtkImageEuclideanToPolar.h b/Imaging/General/vtkImageEuclideanToPolar.h
index be2e50d..9eb5830 100644
--- a/Imaging/General/vtkImageEuclideanToPolar.h
+++ b/Imaging/General/vtkImageEuclideanToPolar.h
@@ -41,7 +41,7 @@ public:
 
 protected:
   vtkImageEuclideanToPolar();
-  ~vtkImageEuclideanToPolar() {};
+  ~vtkImageEuclideanToPolar() {}
 
   double ThetaMaximum;
 
diff --git a/Imaging/General/vtkImageGradient.cxx b/Imaging/General/vtkImageGradient.cxx
index 68f5d6c..c572119 100644
--- a/Imaging/General/vtkImageGradient.cxx
+++ b/Imaging/General/vtkImageGradient.cxx
@@ -182,7 +182,7 @@ void vtkImageGradientExecute(vtkImageGradient *self,
            (outExt[2]-inExt[2])*inIncs[1] +
            (outExt[4]-inExt[4])*inIncs[2];
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   for (idxZ = 0; idxZ <= maxZ; idxZ++)
     {
     useZMin = ((idxZ + outExt[4]) <= wholeExtent[4]) ? 0 : -inIncs[2];
diff --git a/Imaging/General/vtkImageGradient.h b/Imaging/General/vtkImageGradient.h
index 0a88677..b6465b8 100644
--- a/Imaging/General/vtkImageGradient.h
+++ b/Imaging/General/vtkImageGradient.h
@@ -49,7 +49,7 @@ public:
 
 protected:
   vtkImageGradient();
-  ~vtkImageGradient() {};
+  ~vtkImageGradient() {}
 
   int HandleBoundaries;
   int Dimensionality;
diff --git a/Imaging/General/vtkImageGradientMagnitude.cxx b/Imaging/General/vtkImageGradientMagnitude.cxx
index 53c0f29..79158a8 100644
--- a/Imaging/General/vtkImageGradientMagnitude.cxx
+++ b/Imaging/General/vtkImageGradientMagnitude.cxx
@@ -178,7 +178,7 @@ void vtkImageGradientMagnitudeExecute(vtkImageGradientMagnitude *self,
            (outExt[2]-inExt[2])*inIncs[1] +
            (outExt[4]-inExt[4])*inIncs[2];
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   for (idxZ = 0; idxZ <= maxZ; idxZ++)
     {
     useZMin = ((idxZ + outExt[4]) <= wholeExtent[4]) ? 0 : -inIncs[2];
diff --git a/Imaging/General/vtkImageGradientMagnitude.h b/Imaging/General/vtkImageGradientMagnitude.h
index 577913a..422dbe2 100644
--- a/Imaging/General/vtkImageGradientMagnitude.h
+++ b/Imaging/General/vtkImageGradientMagnitude.h
@@ -50,7 +50,7 @@ public:
 
 protected:
   vtkImageGradientMagnitude();
-  ~vtkImageGradientMagnitude() {};
+  ~vtkImageGradientMagnitude() {}
 
   int HandleBoundaries;
   int Dimensionality;
diff --git a/Imaging/General/vtkImageHybridMedian2D.h b/Imaging/General/vtkImageHybridMedian2D.h
index 299e713..027686a 100644
--- a/Imaging/General/vtkImageHybridMedian2D.h
+++ b/Imaging/General/vtkImageHybridMedian2D.h
@@ -38,7 +38,7 @@ public:
 
 protected:
   vtkImageHybridMedian2D();
-  ~vtkImageHybridMedian2D() {};
+  ~vtkImageHybridMedian2D() {}
 
   void ThreadedRequestData(vtkInformation *request,
                            vtkInformationVector **inputVector,
diff --git a/Imaging/General/vtkImageLaplacian.cxx b/Imaging/General/vtkImageLaplacian.cxx
index a108ea6..1694b0a 100644
--- a/Imaging/General/vtkImageLaplacian.cxx
+++ b/Imaging/General/vtkImageLaplacian.cxx
@@ -130,7 +130,7 @@ void vtkImageLaplacianExecute(vtkImageLaplacian *self,
   inIncs = inData->GetIncrements();
   wholeExtent = inData->GetExtent();
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   for (idxZ = 0; idxZ <= maxZ; idxZ++)
     {
     useZMin = ((idxZ + outExt[4]) <= wholeExtent[4]) ? 0 : -inIncs[2];
diff --git a/Imaging/General/vtkImageLaplacian.h b/Imaging/General/vtkImageLaplacian.h
index d1a2457..87520dd 100644
--- a/Imaging/General/vtkImageLaplacian.h
+++ b/Imaging/General/vtkImageLaplacian.h
@@ -44,7 +44,7 @@ public:
 
 protected:
   vtkImageLaplacian();
-  ~vtkImageLaplacian() {};
+  ~vtkImageLaplacian() {}
 
   int Dimensionality;
 
diff --git a/Imaging/General/vtkImageNormalize.cxx b/Imaging/General/vtkImageNormalize.cxx
index 75d5919..245d17e 100644
--- a/Imaging/General/vtkImageNormalize.cxx
+++ b/Imaging/General/vtkImageNormalize.cxx
@@ -63,7 +63,7 @@ void vtkImageNormalizeExecute(vtkImageNormalize *self,
   // find the region to loop over
   maxC = inData->GetNumberOfScalarComponents();
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   while (!outIt.IsAtEnd())
     {
     T* inSI = inIt.BeginSpan();
diff --git a/Imaging/General/vtkImageNormalize.h b/Imaging/General/vtkImageNormalize.h
index db152ac..c1ca38d 100644
--- a/Imaging/General/vtkImageNormalize.h
+++ b/Imaging/General/vtkImageNormalize.h
@@ -34,7 +34,7 @@ public:
 
 protected:
   vtkImageNormalize();
-  ~vtkImageNormalize() {};
+  ~vtkImageNormalize() {}
 
   virtual int RequestInformation (vtkInformation *, vtkInformationVector**, vtkInformationVector *);
 
diff --git a/Imaging/General/vtkImageSlabReslice.h b/Imaging/General/vtkImageSlabReslice.h
index 2aedfc9..1e60b2f 100644
--- a/Imaging/General/vtkImageSlabReslice.h
+++ b/Imaging/General/vtkImageSlabReslice.h
@@ -46,9 +46,11 @@
 #include "vtkImageReslice.h"
 
 // For backwards compatibility
+#ifndef VTK_LEGACY_REMOVE
 #define VTK_IMAGESLAB_BLEND_MIN VTK_IMAGE_SLAB_MIN
 #define VTK_IMAGESLAB_BLEND_MAX VTK_IMAGE_SLAB_MAX
 #define VTK_IMAGESLAB_BLEND_MEAN VTK_IMAGE_SLAB_MEAN
+#endif
 
 class VTKIMAGINGGENERAL_EXPORT vtkImageSlabReslice : public vtkImageReslice
 {
diff --git a/Imaging/General/vtkImageSobel2D.h b/Imaging/General/vtkImageSobel2D.h
index 735ff2a..d739ebd 100644
--- a/Imaging/General/vtkImageSobel2D.h
+++ b/Imaging/General/vtkImageSobel2D.h
@@ -35,7 +35,7 @@ public:
 
 protected:
   vtkImageSobel2D();
-  ~vtkImageSobel2D() {};
+  ~vtkImageSobel2D() {}
 
   void ThreadedRequestData(vtkInformation *request,
                            vtkInformationVector **inputVector,
diff --git a/Imaging/General/vtkImageSobel3D.h b/Imaging/General/vtkImageSobel3D.h
index 2e424dd..37f0e67 100644
--- a/Imaging/General/vtkImageSobel3D.h
+++ b/Imaging/General/vtkImageSobel3D.h
@@ -38,7 +38,7 @@ public:
 
 protected:
   vtkImageSobel3D();
-  ~vtkImageSobel3D() {};
+  ~vtkImageSobel3D() {}
 
   void ThreadedRequestData(vtkInformation *request,
                            vtkInformationVector **inputVector,
diff --git a/Imaging/General/vtkImageSpatialAlgorithm.h b/Imaging/General/vtkImageSpatialAlgorithm.h
index 51f7866..7866e1a 100644
--- a/Imaging/General/vtkImageSpatialAlgorithm.h
+++ b/Imaging/General/vtkImageSpatialAlgorithm.h
@@ -46,7 +46,7 @@ public:
 
 protected:
   vtkImageSpatialAlgorithm();
-  ~vtkImageSpatialAlgorithm() {};
+  ~vtkImageSpatialAlgorithm() {}
 
   int   KernelSize[3];
   int   KernelMiddle[3];      // Index of kernel origin
diff --git a/Imaging/General/vtkSimpleImageFilterExample.h b/Imaging/General/vtkSimpleImageFilterExample.h
index 6f01393..bd38149 100644
--- a/Imaging/General/vtkSimpleImageFilterExample.h
+++ b/Imaging/General/vtkSimpleImageFilterExample.h
@@ -34,8 +34,8 @@ public:
 
 protected:
 
-  vtkSimpleImageFilterExample() {};
-  ~vtkSimpleImageFilterExample() {};
+  vtkSimpleImageFilterExample() {}
+  ~vtkSimpleImageFilterExample() {}
 
   virtual void SimpleExecute(vtkImageData* input, vtkImageData* output);
 private:
diff --git a/Imaging/Hybrid/Testing/Cxx/CMakeLists.txt b/Imaging/Hybrid/Testing/Cxx/CMakeLists.txt
index c9fb8d5..8baf9ba 100644
--- a/Imaging/Hybrid/Testing/Cxx/CMakeLists.txt
+++ b/Imaging/Hybrid/Testing/Cxx/CMakeLists.txt
@@ -1,16 +1,2 @@
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-  TestSampleFunction.cxx
-
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  add_test(NAME ${vtk-module}Cxx-${TName}
-    COMMAND ${vtk-module}CxxTests ${TName})
-endforeach()
+vtk_add_test_cxx(TestSampleFunction.cxx NO_DATA NO_VALID NO_OUTPUT)
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Imaging/Hybrid/Testing/Data/Baseline/genHead.png.md5 b/Imaging/Hybrid/Testing/Data/Baseline/genHead.png.md5
new file mode 100644
index 0000000..720b4c6
--- /dev/null
+++ b/Imaging/Hybrid/Testing/Data/Baseline/genHead.png.md5
@@ -0,0 +1 @@
+e82986908c6c53362f13929ad2700f70
diff --git a/Imaging/Hybrid/Testing/Data/Baseline/iceCream.png.md5 b/Imaging/Hybrid/Testing/Data/Baseline/iceCream.png.md5
new file mode 100644
index 0000000..ac154aa
--- /dev/null
+++ b/Imaging/Hybrid/Testing/Data/Baseline/iceCream.png.md5
@@ -0,0 +1 @@
+59ecaa15a4da3f90479de6a2baf11d12
diff --git a/Imaging/Hybrid/Testing/Data/Baseline/shepards.png.md5 b/Imaging/Hybrid/Testing/Data/Baseline/shepards.png.md5
new file mode 100644
index 0000000..2d6e528
--- /dev/null
+++ b/Imaging/Hybrid/Testing/Data/Baseline/shepards.png.md5
@@ -0,0 +1 @@
+89e32bcf61772e4c2195439bfaed1af9
diff --git a/Imaging/Hybrid/Testing/Data/Baseline/triangularTexture.png.md5 b/Imaging/Hybrid/Testing/Data/Baseline/triangularTexture.png.md5
new file mode 100644
index 0000000..406c7bb
--- /dev/null
+++ b/Imaging/Hybrid/Testing/Data/Baseline/triangularTexture.png.md5
@@ -0,0 +1 @@
+73e4e1b8e566ed7f22cd7485542195f2
diff --git a/Imaging/Hybrid/Testing/Python/CMakeLists.txt b/Imaging/Hybrid/Testing/Python/CMakeLists.txt
index 97d2309..1a9db21 100644
--- a/Imaging/Hybrid/Testing/Python/CMakeLists.txt
+++ b/Imaging/Hybrid/Testing/Python/CMakeLists.txt
@@ -1,7 +1,4 @@
-add_test_python(iceCream.py Graphics)
-add_test_python(shepards.py Graphics)
-add_test_python(triangularTexture.py Graphics)
-
-if (VTK_DATA_ROOT)
-  add_test_python(genHead.py Graphics)
-endif()
+vtk_add_test_python(genHead.py)
+vtk_add_test_python(iceCream.py)
+vtk_add_test_python(shepards.py)
+vtk_add_test_python(triangularTexture.py)
diff --git a/Imaging/Hybrid/Testing/Tcl/CMakeLists.txt b/Imaging/Hybrid/Testing/Tcl/CMakeLists.txt
index 099e299..3fcd5f3 100644
--- a/Imaging/Hybrid/Testing/Tcl/CMakeLists.txt
+++ b/Imaging/Hybrid/Testing/Tcl/CMakeLists.txt
@@ -1,8 +1,4 @@
-# Tests with test images in Baseline/Graphics
-#
-if(VTK_DATA_ROOT)
-  add_test_tcl(genHead Graphics)
-endif()
-add_test_tcl(iceCream Graphics)
-add_test_tcl(shepards Graphics)
-add_test_tcl(triangularTexture Graphics)
+vtk_add_test_tcl(genHead)
+vtk_add_test_tcl(iceCream)
+vtk_add_test_tcl(shepards)
+vtk_add_test_tcl(triangularTexture)
diff --git a/Imaging/Hybrid/vtkBooleanTexture.h b/Imaging/Hybrid/vtkBooleanTexture.h
index 2e07bee..b1b42fc 100644
--- a/Imaging/Hybrid/vtkBooleanTexture.h
+++ b/Imaging/Hybrid/vtkBooleanTexture.h
@@ -114,7 +114,7 @@ public:
 
 protected:
   vtkBooleanTexture();
-  ~vtkBooleanTexture() {};
+  ~vtkBooleanTexture() {}
 
   virtual int RequestInformation (vtkInformation *, vtkInformationVector**, vtkInformationVector *);
   virtual void ExecuteDataWithInformation(vtkDataObject *data, vtkInformation* outInfo);
diff --git a/Imaging/Hybrid/vtkGaussianSplatter.h b/Imaging/Hybrid/vtkGaussianSplatter.h
index 522208d..380d41d 100644
--- a/Imaging/Hybrid/vtkGaussianSplatter.h
+++ b/Imaging/Hybrid/vtkGaussianSplatter.h
@@ -188,7 +188,7 @@ public:
 
 protected:
   vtkGaussianSplatter();
-  ~vtkGaussianSplatter() {};
+  ~vtkGaussianSplatter() {}
 
   virtual int FillInputPortInformation(int port, vtkInformation* info);
   virtual int RequestInformation (vtkInformation *,
diff --git a/Imaging/Hybrid/vtkImageCursor3D.h b/Imaging/Hybrid/vtkImageCursor3D.h
index f5e0d42..7e191aa 100644
--- a/Imaging/Hybrid/vtkImageCursor3D.h
+++ b/Imaging/Hybrid/vtkImageCursor3D.h
@@ -49,7 +49,7 @@ public:
 
 protected:
   vtkImageCursor3D();
-  ~vtkImageCursor3D() {};
+  ~vtkImageCursor3D() {}
 
   double CursorPosition[3];
   double CursorValue;
diff --git a/Imaging/Hybrid/vtkImageRectilinearWipe.cxx b/Imaging/Hybrid/vtkImageRectilinearWipe.cxx
index d2dc9ef..3937d21 100644
--- a/Imaging/Hybrid/vtkImageRectilinearWipe.cxx
+++ b/Imaging/Hybrid/vtkImageRectilinearWipe.cxx
@@ -94,7 +94,7 @@ void vtkImageRectilinearWipeExecute2(vtkImageRectilinearWipe *self,
 
 //----------------------------------------------------------------------------
 // This function adjusts the extents of the wipe to the output extents.
-int vtkImageRectilinearWipeClampExtents(int wipeExt[6], int outExt[6])
+static int vtkImageRectilinearWipeClampExtents(int wipeExt[6], int outExt[6])
 {
   int status = 1;
 
diff --git a/Imaging/Hybrid/vtkImageRectilinearWipe.h b/Imaging/Hybrid/vtkImageRectilinearWipe.h
index b90923d..e0c550b 100644
--- a/Imaging/Hybrid/vtkImageRectilinearWipe.h
+++ b/Imaging/Hybrid/vtkImageRectilinearWipe.h
@@ -115,7 +115,7 @@ public:
 
 protected:
   vtkImageRectilinearWipe();
-  ~vtkImageRectilinearWipe() {};
+  ~vtkImageRectilinearWipe() {}
 
   virtual void ThreadedRequestData(vtkInformation *request,
                                    vtkInformationVector **inputVector,
diff --git a/Imaging/Hybrid/vtkPointLoad.h b/Imaging/Hybrid/vtkPointLoad.h
index 1bf9f8f..68b2dcd 100644
--- a/Imaging/Hybrid/vtkPointLoad.h
+++ b/Imaging/Hybrid/vtkPointLoad.h
@@ -71,14 +71,14 @@ public:
   // Description:
   // Turn on/off computation of effective stress scalar. These methods do
   // nothing. The effective stress is always computed.
-  void SetComputeEffectiveStress(int) {};
+  void SetComputeEffectiveStress(int) {}
   int GetComputeEffectiveStress() {return 1;};
-  void ComputeEffectiveStressOn() {};
-  void ComputeEffectiveStressOff() {};
+  void ComputeEffectiveStressOn() {}
+  void ComputeEffectiveStressOff() {}
 
 protected:
   vtkPointLoad();
-  ~vtkPointLoad() {};
+  ~vtkPointLoad() {}
 
   virtual int RequestInformation (vtkInformation *,
                                    vtkInformationVector **,
diff --git a/Imaging/Hybrid/vtkSampleFunction.cxx b/Imaging/Hybrid/vtkSampleFunction.cxx
index d08f074..7057e40 100644
--- a/Imaging/Hybrid/vtkSampleFunction.cxx
+++ b/Imaging/Hybrid/vtkSampleFunction.cxx
@@ -97,7 +97,7 @@ void vtkSampleFunction::SetSampleDimensions(int dim[3])
 }
 
 // Set the bounds of the model
-void vtkSampleFunction::SetModelBounds(double bounds[6])
+void vtkSampleFunction::SetModelBounds(const double bounds[6])
 {
   this->SetModelBounds(bounds[0], bounds[1],
                        bounds[2], bounds[3],
diff --git a/Imaging/Hybrid/vtkSampleFunction.h b/Imaging/Hybrid/vtkSampleFunction.h
index c2ff41c..ae83455 100644
--- a/Imaging/Hybrid/vtkSampleFunction.h
+++ b/Imaging/Hybrid/vtkSampleFunction.h
@@ -92,7 +92,7 @@ public:
   // Description:
   // Specify the region in space over which the sampling occurs. The
   // bounds is specified as (xMin,xMax, yMin,yMax, zMin,zMax).
-  void SetModelBounds(double bounds[6]);
+  void SetModelBounds(const double bounds[6]);
   void SetModelBounds(double xMin, double xMax,
                       double yMin, double yMax,
                       double zMin, double zMax);
diff --git a/Imaging/Hybrid/vtkShepardMethod.h b/Imaging/Hybrid/vtkShepardMethod.h
index 5a7fe5f..2eab3a1 100644
--- a/Imaging/Hybrid/vtkShepardMethod.h
+++ b/Imaging/Hybrid/vtkShepardMethod.h
@@ -88,7 +88,7 @@ public:
 
 protected:
   vtkShepardMethod();
-  ~vtkShepardMethod() {};
+  ~vtkShepardMethod() {}
 
   virtual int RequestInformation (vtkInformation *,
                                   vtkInformationVector **,
diff --git a/Imaging/Hybrid/vtkSurfaceReconstructionFilter.h b/Imaging/Hybrid/vtkSurfaceReconstructionFilter.h
index 5deb62f..72090c3 100644
--- a/Imaging/Hybrid/vtkSurfaceReconstructionFilter.h
+++ b/Imaging/Hybrid/vtkSurfaceReconstructionFilter.h
@@ -56,7 +56,7 @@ public:
 
 protected:
   vtkSurfaceReconstructionFilter();
-  ~vtkSurfaceReconstructionFilter() {};
+  ~vtkSurfaceReconstructionFilter() {}
 
   virtual int RequestInformation (vtkInformation *,
                                   vtkInformationVector **,
diff --git a/Imaging/Hybrid/vtkTriangularTexture.h b/Imaging/Hybrid/vtkTriangularTexture.h
index bff0c2f..e09a226 100644
--- a/Imaging/Hybrid/vtkTriangularTexture.h
+++ b/Imaging/Hybrid/vtkTriangularTexture.h
@@ -67,7 +67,7 @@ public:
 
 protected:
   vtkTriangularTexture();
-  ~vtkTriangularTexture() {};
+  ~vtkTriangularTexture() {}
 
   virtual int RequestInformation (vtkInformation *, vtkInformationVector**, vtkInformationVector *);
   virtual void ExecuteDataWithInformation(vtkDataObject *data, vtkInformation *outInfo);
diff --git a/Imaging/Hybrid/vtkVoxelModeller.cxx b/Imaging/Hybrid/vtkVoxelModeller.cxx
index ddcf80f..c959ba0 100644
--- a/Imaging/Hybrid/vtkVoxelModeller.cxx
+++ b/Imaging/Hybrid/vtkVoxelModeller.cxx
@@ -53,7 +53,7 @@ vtkVoxelModeller::vtkVoxelModeller()
 }
 
 // Specify the position in space to perform the voxelization.
-void vtkVoxelModeller::SetModelBounds(double bounds[6])
+void vtkVoxelModeller::SetModelBounds(const double bounds[6])
 {
   vtkVoxelModeller::SetModelBounds(bounds[0], bounds[1], bounds[2], bounds[3],
                                    bounds[4], bounds[5]);
diff --git a/Imaging/Hybrid/vtkVoxelModeller.h b/Imaging/Hybrid/vtkVoxelModeller.h
index b2d641b..0aa3200 100644
--- a/Imaging/Hybrid/vtkVoxelModeller.h
+++ b/Imaging/Hybrid/vtkVoxelModeller.h
@@ -65,7 +65,7 @@ public:
   // Description:
   // Specify the position in space to perform the voxelization.
   // Default is (0, 0, 0, 0, 0, 0)
-  void SetModelBounds(double bounds[6]);
+  void SetModelBounds(const double bounds[6]);
   void SetModelBounds(double xmin, double xmax, double ymin, double ymax, double zmin, double zmax);
   vtkGetVectorMacro(ModelBounds,double,6);
 
@@ -106,7 +106,7 @@ public:
   vtkGetMacro(BackgroundValue, double);
 protected:
   vtkVoxelModeller();
-  ~vtkVoxelModeller() {};
+  ~vtkVoxelModeller() {}
 
   virtual int RequestInformation (vtkInformation *,
                                   vtkInformationVector **,
diff --git a/Imaging/Math/vtkImageDivergence.cxx b/Imaging/Math/vtkImageDivergence.cxx
index 55b3a92..bbff956 100644
--- a/Imaging/Math/vtkImageDivergence.cxx
+++ b/Imaging/Math/vtkImageDivergence.cxx
@@ -156,7 +156,7 @@ void vtkImageDivergenceExecute(vtkImageDivergence *self,
   inIncs = inData->GetIncrements();
   wholeExtent = inData->GetExtent();
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   for (idxZ = 0; idxZ <= maxZ; idxZ++)
     {
     useMin[2] = ((idxZ + outExt[4]) <= wholeExtent[4]) ? 0 : -inIncs[2];
diff --git a/Imaging/Math/vtkImageDivergence.h b/Imaging/Math/vtkImageDivergence.h
index 087d71e..cba48f7 100644
--- a/Imaging/Math/vtkImageDivergence.h
+++ b/Imaging/Math/vtkImageDivergence.h
@@ -35,7 +35,7 @@ public:
 
 protected:
   vtkImageDivergence();
-  ~vtkImageDivergence() {};
+  ~vtkImageDivergence() {}
 
   virtual int RequestUpdateExtent(vtkInformation*,
                                   vtkInformationVector**,
diff --git a/Imaging/Math/vtkImageDotProduct.cxx b/Imaging/Math/vtkImageDotProduct.cxx
index b6e4a3b..2d9d74b 100644
--- a/Imaging/Math/vtkImageDotProduct.cxx
+++ b/Imaging/Math/vtkImageDotProduct.cxx
@@ -61,7 +61,7 @@ void vtkImageDotProductExecute(vtkImageDotProduct *self,
   int maxC = in1Data->GetNumberOfScalarComponents();
   int idxC;
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   while (!outIt.IsAtEnd())
     {
     T* inSI1 = inIt1.BeginSpan();
diff --git a/Imaging/Math/vtkImageDotProduct.h b/Imaging/Math/vtkImageDotProduct.h
index d92cdb8..9fb787a 100644
--- a/Imaging/Math/vtkImageDotProduct.h
+++ b/Imaging/Math/vtkImageDotProduct.h
@@ -38,7 +38,7 @@ public:
 
 protected:
   vtkImageDotProduct();
-  ~vtkImageDotProduct() {};
+  ~vtkImageDotProduct() {}
 
   virtual int RequestInformation (vtkInformation *,
                                   vtkInformationVector **,
diff --git a/Imaging/Math/vtkImageLogarithmicScale.cxx b/Imaging/Math/vtkImageLogarithmicScale.cxx
index 49c17c1..d584325 100644
--- a/Imaging/Math/vtkImageLogarithmicScale.cxx
+++ b/Imaging/Math/vtkImageLogarithmicScale.cxx
@@ -45,7 +45,7 @@ void vtkImageLogarithmicScaleExecute(vtkImageLogarithmicScale *self,
 
   c = self->GetConstant();
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   while (!outIt.IsAtEnd())
     {
     T* inSI = inIt.BeginSpan();
diff --git a/Imaging/Math/vtkImageLogarithmicScale.h b/Imaging/Math/vtkImageLogarithmicScale.h
index 5899c03..368133b 100644
--- a/Imaging/Math/vtkImageLogarithmicScale.h
+++ b/Imaging/Math/vtkImageLogarithmicScale.h
@@ -41,7 +41,7 @@ public:
 
 protected:
   vtkImageLogarithmicScale();
-  ~vtkImageLogarithmicScale() {};
+  ~vtkImageLogarithmicScale() {}
 
   double Constant;
 
diff --git a/Imaging/Math/vtkImageLogic.cxx b/Imaging/Math/vtkImageLogic.cxx
index 06df075..0954bea 100644
--- a/Imaging/Math/vtkImageLogic.cxx
+++ b/Imaging/Math/vtkImageLogic.cxx
@@ -48,7 +48,7 @@ void vtkImageLogicExecute1(vtkImageLogic *self, vtkImageData *inData,
   T trueValue = static_cast<T>(self->GetOutputTrueValue());
   int op = self->GetOperation();
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   while (!outIt.IsAtEnd())
     {
     T* inSI = inIt.BeginSpan();
@@ -108,7 +108,7 @@ void vtkImageLogicExecute2(vtkImageLogic *self, vtkImageData *in1Data,
   T trueValue = static_cast<T>(self->GetOutputTrueValue());
   int op = self->GetOperation();
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   while (!outIt.IsAtEnd())
     {
     T* inSI1 = inIt1.BeginSpan();
diff --git a/Imaging/Math/vtkImageLogic.h b/Imaging/Math/vtkImageLogic.h
index caddc27..003f8f3 100644
--- a/Imaging/Math/vtkImageLogic.h
+++ b/Imaging/Math/vtkImageLogic.h
@@ -70,7 +70,7 @@ public:
 
 protected:
   vtkImageLogic();
-  ~vtkImageLogic() {};
+  ~vtkImageLogic() {}
 
   int Operation;
   double OutputTrueValue;
diff --git a/Imaging/Math/vtkImageMagnitude.cxx b/Imaging/Math/vtkImageMagnitude.cxx
index 23b3d0a..d899746 100644
--- a/Imaging/Math/vtkImageMagnitude.cxx
+++ b/Imaging/Math/vtkImageMagnitude.cxx
@@ -62,7 +62,7 @@ void vtkImageMagnitudeExecute(vtkImageMagnitude *self,
   int maxC = inData->GetNumberOfScalarComponents();
   int idxC;
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   while (!outIt.IsAtEnd())
     {
     T* inSI = inIt.BeginSpan();
diff --git a/Imaging/Math/vtkImageMagnitude.h b/Imaging/Math/vtkImageMagnitude.h
index 9c63f3c..d2d9f99 100644
--- a/Imaging/Math/vtkImageMagnitude.h
+++ b/Imaging/Math/vtkImageMagnitude.h
@@ -32,7 +32,7 @@ public:
 
 protected:
   vtkImageMagnitude();
-  ~vtkImageMagnitude() {};
+  ~vtkImageMagnitude() {}
 
   virtual int RequestInformation (vtkInformation *, vtkInformationVector**,
                                   vtkInformationVector *);
diff --git a/Imaging/Math/vtkImageMaskBits.cxx b/Imaging/Math/vtkImageMaskBits.cxx
index 1745ff0..0511d8a 100644
--- a/Imaging/Math/vtkImageMaskBits.cxx
+++ b/Imaging/Math/vtkImageMaskBits.cxx
@@ -56,7 +56,7 @@ void vtkImageMaskBitsExecute(vtkImageMaskBits *self,
   masks = self->GetMasks();
   operation = self->GetOperation();
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   while (!outIt.IsAtEnd())
     {
     T* inSI = inIt.BeginSpan();
diff --git a/Imaging/Math/vtkImageMaskBits.h b/Imaging/Math/vtkImageMaskBits.h
index 9fec09f..ee14ffe 100644
--- a/Imaging/Math/vtkImageMaskBits.h
+++ b/Imaging/Math/vtkImageMaskBits.h
@@ -56,7 +56,7 @@ public:
 
 protected:
   vtkImageMaskBits();
-  ~vtkImageMaskBits() {};
+  ~vtkImageMaskBits() {}
 
   void ThreadedExecute (vtkImageData *inData, vtkImageData *outData,
                        int ext[6], int id);
diff --git a/Imaging/Math/vtkImageMathematics.cxx b/Imaging/Math/vtkImageMathematics.cxx
index 4cd4d20..cbe172f 100644
--- a/Imaging/Math/vtkImageMathematics.cxx
+++ b/Imaging/Math/vtkImageMathematics.cxx
@@ -273,7 +273,7 @@ void vtkImageMathematicsExecute2(vtkImageMathematics *self,
   in2Data->GetContinuousIncrements(outExt, in2IncX, in2IncY, in2IncZ);
   outData->GetContinuousIncrements(outExt, outIncX, outIncY, outIncZ);
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   for (idxZ = 0; idxZ <= maxZ; idxZ++)
     {
     for (idxY = 0; !self->AbortExecute && idxY <= maxY; idxY++)
diff --git a/Imaging/Math/vtkImageMathematics.h b/Imaging/Math/vtkImageMathematics.h
index 3eb418d..9ef099e 100644
--- a/Imaging/Math/vtkImageMathematics.h
+++ b/Imaging/Math/vtkImageMathematics.h
@@ -183,7 +183,7 @@ public:
 
 protected:
   vtkImageMathematics();
-  ~vtkImageMathematics() {};
+  ~vtkImageMathematics() {}
 
   int Operation;
   double ConstantK;
diff --git a/Imaging/Math/vtkImageWeightedSum.cxx b/Imaging/Math/vtkImageWeightedSum.cxx
index 74aa647..288ed1c 100644
--- a/Imaging/Math/vtkImageWeightedSum.cxx
+++ b/Imaging/Math/vtkImageWeightedSum.cxx
@@ -100,7 +100,7 @@ void vtkImageWeightedSumExecute(vtkImageWeightedSum *self,
     {
     inIts[i].Initialize(inDatas[i], outExt);
     }
-  // Loop through ouput pixels
+  // Loop through output pixels
   while (!outIt.IsAtEnd())
     {
     for(int j=0; j < numInputs; ++j)
diff --git a/Imaging/Morphological/vtkImageIslandRemoval2D.h b/Imaging/Morphological/vtkImageIslandRemoval2D.h
index 10ca0fa..7868410 100644
--- a/Imaging/Morphological/vtkImageIslandRemoval2D.h
+++ b/Imaging/Morphological/vtkImageIslandRemoval2D.h
@@ -68,7 +68,7 @@ public:
 
 protected:
   vtkImageIslandRemoval2D();
-  ~vtkImageIslandRemoval2D() {};
+  ~vtkImageIslandRemoval2D() {}
 
   int AreaThreshold;
   int SquareNeighborhood;
diff --git a/Imaging/Morphological/vtkImageNonMaximumSuppression.cxx b/Imaging/Morphological/vtkImageNonMaximumSuppression.cxx
index d61eecf..5ef709f 100644
--- a/Imaging/Morphological/vtkImageNonMaximumSuppression.cxx
+++ b/Imaging/Morphological/vtkImageNonMaximumSuppression.cxx
@@ -164,7 +164,7 @@ void vtkImageNonMaximumSuppressionExecute(vtkImageNonMaximumSuppression *self,
   // Gradient is computed with data spacing (world coordinates)
   ratio = in2Data->GetSpacing();
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   for (idxZ = 0; idxZ <= maxZ; idxZ++)
     {
     useZMin = ((idxZ + outExt[4]) <= wholeExtent[4]) ? 0 : -inIncs[2];
diff --git a/Imaging/Morphological/vtkImageNonMaximumSuppression.h b/Imaging/Morphological/vtkImageNonMaximumSuppression.h
index f755c3b..9491935 100644
--- a/Imaging/Morphological/vtkImageNonMaximumSuppression.h
+++ b/Imaging/Morphological/vtkImageNonMaximumSuppression.h
@@ -58,7 +58,7 @@ public:
 
 protected:
   vtkImageNonMaximumSuppression();
-  ~vtkImageNonMaximumSuppression() {};
+  ~vtkImageNonMaximumSuppression() {}
 
   int HandleBoundaries;
   int Dimensionality;
diff --git a/Imaging/Morphological/vtkImageSkeleton2D.h b/Imaging/Morphological/vtkImageSkeleton2D.h
index 9678912..984ab88 100644
--- a/Imaging/Morphological/vtkImageSkeleton2D.h
+++ b/Imaging/Morphological/vtkImageSkeleton2D.h
@@ -51,7 +51,7 @@ public:
 
 protected:
   vtkImageSkeleton2D();
-  ~vtkImageSkeleton2D() {};
+  ~vtkImageSkeleton2D() {}
 
   int Prune;
 
diff --git a/Imaging/Morphological/vtkImageThresholdConnectivity.cxx b/Imaging/Morphological/vtkImageThresholdConnectivity.cxx
index 68d6954..a6b23f5 100644
--- a/Imaging/Morphological/vtkImageThresholdConnectivity.cxx
+++ b/Imaging/Morphological/vtkImageThresholdConnectivity.cxx
@@ -286,7 +286,7 @@ void vtkImageThresholdConnectivityValues(
 }
 
 //----------------------------------------------------------------------------
-void vtkImageThresholdConnectivityApplyStencil(
+static void vtkImageThresholdConnectivityApplyStencil(
   vtkImageData *maskData, vtkImageStencilData *stencil, int extent[6])
 {
   vtkImageStencilIterator<unsigned char> iter(maskData, stencil, extent);
diff --git a/Imaging/Sources/vtkImageGaussianSource.cxx b/Imaging/Sources/vtkImageGaussianSource.cxx
index e0b7320..8b46531 100644
--- a/Imaging/Sources/vtkImageGaussianSource.cxx
+++ b/Imaging/Sources/vtkImageGaussianSource.cxx
@@ -141,7 +141,7 @@ int vtkImageGaussianSource::RequestData(
   target = static_cast<unsigned long>((maxZ+1)*(maxY+1)/50.0);
   target++;
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   temp2 = 1.0 / (2.0 * this->StandardDeviation * this->StandardDeviation);
 
   for (idxZ = 0; idxZ <= maxZ; idxZ++)
diff --git a/Imaging/Sources/vtkImageGaussianSource.h b/Imaging/Sources/vtkImageGaussianSource.h
index d79582b..ba8b98c 100644
--- a/Imaging/Sources/vtkImageGaussianSource.h
+++ b/Imaging/Sources/vtkImageGaussianSource.h
@@ -53,7 +53,7 @@ public:
 
 protected:
   vtkImageGaussianSource();
-  ~vtkImageGaussianSource() {};
+  ~vtkImageGaussianSource() {}
 
   double StandardDeviation;
   int WholeExtent[6];
diff --git a/Imaging/Sources/vtkImageGridSource.cxx b/Imaging/Sources/vtkImageGridSource.cxx
index f9ef945..826fe25 100644
--- a/Imaging/Sources/vtkImageGridSource.cxx
+++ b/Imaging/Sources/vtkImageGridSource.cxx
@@ -92,7 +92,7 @@ void vtkImageGridSourceExecute(vtkImageGridSource *self,
                                       (outExt[3]-outExt[2]+1)/50.0);
   target++;
 
-  // Loop through ouput pixel
+  // Loop through output pixel
   for (idxZ = outExt[4]; idxZ <= outExt[5]; idxZ++)
     {
     if (gridSpacing[2])
diff --git a/Imaging/Sources/vtkImageGridSource.h b/Imaging/Sources/vtkImageGridSource.h
index cfa76d8..0dd6907 100644
--- a/Imaging/Sources/vtkImageGridSource.h
+++ b/Imaging/Sources/vtkImageGridSource.h
@@ -84,7 +84,7 @@ public:
 
 protected:
   vtkImageGridSource();
-  ~vtkImageGridSource() {};
+  ~vtkImageGridSource() {}
 
   int GridSpacing[3];
   int GridOrigin[3];
diff --git a/Imaging/Sources/vtkImageNoiseSource.cxx b/Imaging/Sources/vtkImageNoiseSource.cxx
index 72c2139..aa97644 100644
--- a/Imaging/Sources/vtkImageNoiseSource.cxx
+++ b/Imaging/Sources/vtkImageNoiseSource.cxx
@@ -107,7 +107,7 @@ void vtkImageNoiseSource::ExecuteDataWithInformation(vtkDataObject *output,
 
   vtkImageProgressIterator<double> outIt(data, data->GetExtent(), this, 0);
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   while (!outIt.IsAtEnd())
     {
     double* outSI = outIt.BeginSpan();
diff --git a/Imaging/Sources/vtkImageNoiseSource.h b/Imaging/Sources/vtkImageNoiseSource.h
index fba1703..62e9c6f 100644
--- a/Imaging/Sources/vtkImageNoiseSource.h
+++ b/Imaging/Sources/vtkImageNoiseSource.h
@@ -55,7 +55,7 @@ public:
 
 protected:
   vtkImageNoiseSource();
-  ~vtkImageNoiseSource() {};
+  ~vtkImageNoiseSource() {}
 
   double Minimum;
   double Maximum;
diff --git a/Imaging/Sources/vtkImageSinusoidSource.cxx b/Imaging/Sources/vtkImageSinusoidSource.cxx
index d82157a..eab5036 100644
--- a/Imaging/Sources/vtkImageSinusoidSource.cxx
+++ b/Imaging/Sources/vtkImageSinusoidSource.cxx
@@ -174,7 +174,7 @@ void vtkImageSinusoidSource::ExecuteDataWithInformation(vtkDataObject *output,
   target = static_cast<unsigned long>((maxZ+1)*(maxY+1)/50.0);
   target++;
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   for (idxZ = 0; idxZ <= maxZ; idxZ++)
     {
     zContrib = this->Direction[2] * (idxZ + outExt[4]);
diff --git a/Imaging/Sources/vtkImageSinusoidSource.h b/Imaging/Sources/vtkImageSinusoidSource.h
index ccea280..ae03233 100644
--- a/Imaging/Sources/vtkImageSinusoidSource.h
+++ b/Imaging/Sources/vtkImageSinusoidSource.h
@@ -60,7 +60,7 @@ public:
 
 protected:
   vtkImageSinusoidSource();
-  ~vtkImageSinusoidSource() {};
+  ~vtkImageSinusoidSource() {}
 
   int WholeExtent[6];
   double Direction[3];
diff --git a/Imaging/Stencil/vtkImageStencilToImage.cxx b/Imaging/Stencil/vtkImageStencilToImage.cxx
index 648e68a..e7e2be7 100644
--- a/Imaging/Stencil/vtkImageStencilToImage.cxx
+++ b/Imaging/Stencil/vtkImageStencilToImage.cxx
@@ -103,7 +103,7 @@ void vtkImageStencilToImageExecute(
 
   vtkImageStencilIterator<T> outIter(outData, stencil, outExt, self, id);
 
-  // Loop through ouput pixels
+  // Loop through output pixels
   while (!outIter.IsAtEnd())
     {
     T* outPtr = outIter.BeginSpan();
diff --git a/Infovis/Boost/Testing/Cxx/CMakeLists.txt b/Infovis/Boost/Testing/Cxx/CMakeLists.txt
index 77cf49d..34b4e06 100644
--- a/Infovis/Boost/Testing/Cxx/CMakeLists.txt
+++ b/Infovis/Boost/Testing/Cxx/CMakeLists.txt
@@ -6,6 +6,6 @@ include_directories(${Boost_INCLUDE_DIRS})
 vtk_module_test_executable(TestVariantSerialization TestVariantSerialization.cxx
   )
 
-target_link_libraries(TestVariantSerialization ${Boost_SERIALIZATION_LIBRARY})
+target_link_libraries(TestVariantSerialization LINK_PRIVATE ${Boost_SERIALIZATION_LIBRARY})
 
 add_test(NAME ${vtk-module}-TestVariantSerialization COMMAND TestVariantSerialization)
diff --git a/Infovis/BoostGraphAlgorithms/Testing/Cxx/CMakeLists.txt b/Infovis/BoostGraphAlgorithms/Testing/Cxx/CMakeLists.txt
index f1bab5a..811ad72 100644
--- a/Infovis/BoostGraphAlgorithms/Testing/Cxx/CMakeLists.txt
+++ b/Infovis/BoostGraphAlgorithms/Testing/Cxx/CMakeLists.txt
@@ -1,36 +1,16 @@
 find_package(Boost REQUIRED)
 include_directories(${Boost_INCLUDE_DIRS})
 
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
+vtk_add_test_cxx(
   # BoostArrayLogWeighting.cxx    # todo (unsatisfied deps)
-  BoostArrayRandomSparseArraySource.cxx
-  TestBoostAdapter.cxx
+  BoostArrayRandomSparseArraySource.cxx,NO_VALID
+  TestBoostAdapter.cxx,NO_VALID
   TestBoostAlgorithms.cxx
-  TestBoostBetweennessClustering.cxx
+  TestBoostBetweennessClustering.cxx,NO_VALID
   # TestBoostBrandesCentrality.cxx # todo (unsatisfied deps)
   TestBoostDividedEdgeBundling.cxx
-  TestBoostExtractLargestComponent.cxx
-  TestBoostSplitTableField.cxx
+  TestBoostExtractLargestComponent.cxx,NO_VALID
+  TestBoostSplitTableField.cxx,NO_VALID
+  )
 
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Infovis/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Infovis/BoostGraphAlgorithms/Testing/Cxx/TestBoostAdapter.cxx b/Infovis/BoostGraphAlgorithms/Testing/Cxx/TestBoostAdapter.cxx
index d238a86..f2f3205 100644
--- a/Infovis/BoostGraphAlgorithms/Testing/Cxx/TestBoostAdapter.cxx
+++ b/Infovis/BoostGraphAlgorithms/Testing/Cxx/TestBoostAdapter.cxx
@@ -65,10 +65,10 @@ void TestTraversal(Graph g, int repeat, int& vtkNotUsed(errors))
   int count = 0;
   for (int r = 0; r < repeat; r++)
     {
-    for (tie(vi, viEnd) = vertices(g); vi != viEnd; ++vi)
+    for (boost::tie(vi, viEnd) = vertices(g); vi != viEnd; ++vi)
       {
       typename graph_traits<Graph>::out_edge_iterator oi, oiEnd;
-      tie(oi, oiEnd) = out_edges(*vi, g);
+      boost::tie(oi, oiEnd) = out_edges(*vi, g);
       count++;
       }
     }
@@ -85,10 +85,10 @@ void TestTraversal(Graph g, int repeat, int& vtkNotUsed(errors))
   count = 0;
   for (int r = 0; r < repeat; r++)
     {
-    for (tie(vi, viEnd) = vertices(g); vi != viEnd; ++vi)
+    for (boost::tie(vi, viEnd) = vertices(g); vi != viEnd; ++vi)
       {
       typename graph_traits<Graph>::out_edge_iterator oi, oiEnd;
-      for (tie(oi, oiEnd) = out_edges(*vi, g); oi != oiEnd; ++oi)
+      for (boost::tie(oi, oiEnd) = out_edges(*vi, g); oi != oiEnd; ++oi)
         {
         count++;
         }
@@ -105,10 +105,10 @@ void TestTraversal(Graph g, int repeat, int& vtkNotUsed(errors))
     {
     edge_vec.clear();
     vert_vec.clear();
-    for (tie(vi, viEnd) = vertices(g); vi != viEnd; ++vi)
+    for (boost::tie(vi, viEnd) = vertices(g); vi != viEnd; ++vi)
       {
       typename graph_traits<Graph>::out_edge_iterator oi, oiEnd;
-      for (tie(oi, oiEnd) = out_edges(*vi, g); oi != oiEnd; ++oi)
+      for (boost::tie(oi, oiEnd) = out_edges(*vi, g); oi != oiEnd; ++oi)
         {
         edge_vec.push_back(e);
         vert_vec.push_back(v);
@@ -127,10 +127,10 @@ void TestTraversal(Graph g, int repeat, int& vtkNotUsed(errors))
     {
     edge_vec.clear();
     vert_vec.clear();
-    for (tie(vi, viEnd) = vertices(g); vi != viEnd; ++vi)
+    for (boost::tie(vi, viEnd) = vertices(g); vi != viEnd; ++vi)
       {
       typename graph_traits<Graph>::out_edge_iterator oi, oiEnd;
-      for (tie(oi, oiEnd) = out_edges(*vi, g); oi != oiEnd; ++oi)
+      for (boost::tie(oi, oiEnd) = out_edges(*vi, g); oi != oiEnd; ++oi)
         {
         Edge e1 = *oi;
         edge_vec.push_back(e1);
@@ -150,10 +150,10 @@ void TestTraversal(Graph g, int repeat, int& vtkNotUsed(errors))
     {
     edge_vec.clear();
     vert_vec.clear();
-    for (tie(vi, viEnd) = vertices(g); vi != viEnd; ++vi)
+    for (boost::tie(vi, viEnd) = vertices(g); vi != viEnd; ++vi)
       {
       typename graph_traits<Graph>::out_edge_iterator oi, oiEnd;
-      for (tie(oi, oiEnd) = out_edges(*vi, g); oi != oiEnd; ++oi)
+      for (boost::tie(oi, oiEnd) = out_edges(*vi, g); oi != oiEnd; ++oi)
         {
         Edge e1 = *oi;
         edge_vec.push_back(e1);
@@ -197,7 +197,7 @@ void TestGraph(Graph g, vtkIdType numVertices, vtkIdType numEdges, int repeat, i
     errors++;
     }
 
-  for (tie(vi, viEnd) = vertices(g); vi != viEnd; ++vi)
+  for (boost::tie(vi, viEnd) = vertices(g); vi != viEnd; ++vi)
     {
     graphVerts.push_back(*vi);
     }
@@ -220,7 +220,7 @@ void TestGraph(Graph g, vtkIdType numVertices, vtkIdType numEdges, int repeat, i
     }
 
   typename graph_traits<Graph>::edge_iterator ei, eiEnd;
-  for (tie(ei, eiEnd) = edges(g); ei != eiEnd; ++ei)
+  for (boost::tie(ei, eiEnd) = edges(g); ei != eiEnd; ++ei)
     {
     graphEdges.push_back(*ei);
     }
diff --git a/Infovis/BoostGraphAlgorithms/Testing/Data/Baseline/TestBoostAlgorithms.png.md5 b/Infovis/BoostGraphAlgorithms/Testing/Data/Baseline/TestBoostAlgorithms.png.md5
new file mode 100644
index 0000000..e54afd3
--- /dev/null
+++ b/Infovis/BoostGraphAlgorithms/Testing/Data/Baseline/TestBoostAlgorithms.png.md5
@@ -0,0 +1 @@
+4f89185ff954e7d0035579237d856e21
diff --git a/Infovis/BoostGraphAlgorithms/Testing/Data/Baseline/TestBoostAlgorithms_1.png.md5 b/Infovis/BoostGraphAlgorithms/Testing/Data/Baseline/TestBoostAlgorithms_1.png.md5
new file mode 100644
index 0000000..2a2f777
--- /dev/null
+++ b/Infovis/BoostGraphAlgorithms/Testing/Data/Baseline/TestBoostAlgorithms_1.png.md5
@@ -0,0 +1 @@
+bb00054446eef953fc547828428ce306
diff --git a/Infovis/BoostGraphAlgorithms/Testing/Data/Baseline/TestBoostDividedEdgeBundling.png.md5 b/Infovis/BoostGraphAlgorithms/Testing/Data/Baseline/TestBoostDividedEdgeBundling.png.md5
new file mode 100644
index 0000000..419bbd8
--- /dev/null
+++ b/Infovis/BoostGraphAlgorithms/Testing/Data/Baseline/TestBoostDividedEdgeBundling.png.md5
@@ -0,0 +1 @@
+3ced2929b620409623086a4c4463427d
diff --git a/Infovis/BoostGraphAlgorithms/vtkBoostBetweennessClustering.cxx b/Infovis/BoostGraphAlgorithms/vtkBoostBetweennessClustering.cxx
index 3d858b7..cf58f66 100644
--- a/Infovis/BoostGraphAlgorithms/vtkBoostBetweennessClustering.cxx
+++ b/Infovis/BoostGraphAlgorithms/vtkBoostBetweennessClustering.cxx
@@ -80,8 +80,6 @@ namespace boost
       centrality_type;
     typedef typename graph_traits<MutableGraph>::edge_iterator edge_iterator;
     typedef typename graph_traits<MutableGraph>::edge_descriptor edge_descriptor;
-    typedef typename graph_traits<MutableGraph>::vertices_size_type
-      vertices_size_type;
 
     if (has_no_edges(g)) return;
 
diff --git a/Infovis/BoostGraphAlgorithms/vtkBoostGraphAdapter.h b/Infovis/BoostGraphAlgorithms/vtkBoostGraphAdapter.h
index 52b1a90..39757e7 100644
--- a/Infovis/BoostGraphAlgorithms/vtkBoostGraphAdapter.h
+++ b/Infovis/BoostGraphAlgorithms/vtkBoostGraphAdapter.h
@@ -131,6 +131,12 @@ namespace boost {
   {
     arr->InsertVariantValue(key, value);
   }
+#if defined(_MSC_VER)
+  namespace detail {
+       using ::boost::get;
+       using ::boost::put;
+  }
+#endif
 }
 
 #include <vtksys/stl/utility> // STL Header
diff --git a/Infovis/Core/CMakeLists.txt b/Infovis/Core/CMakeLists.txt
index 55972a6..98cad7f 100644
--- a/Infovis/Core/CMakeLists.txt
+++ b/Infovis/Core/CMakeLists.txt
@@ -21,6 +21,7 @@ set(Module_SRCS
   vtkPipelineGraphSource.cxx
   vtkPruneTreeFilter.cxx
   vtkRandomGraphSource.cxx
+  vtkReduceTable.cxx
   vtkRemoveIsolatedVertices.cxx
   vtkSparseArrayToTable.cxx
   vtkStreamGraph.cxx
diff --git a/Infovis/Core/Testing/Cxx/CMakeLists.txt b/Infovis/Core/Testing/Cxx/CMakeLists.txt
index 635dfe5..fdd5ffe 100644
--- a/Infovis/Core/Testing/Cxx/CMakeLists.txt
+++ b/Infovis/Core/Testing/Cxx/CMakeLists.txt
@@ -1,49 +1,31 @@
-if(VTK_DATA_ROOT)
-  set (NEEDS_DATA TestStringToNumeric.cxx TestMergeTables.cxx)
-endif()
+set (NEEDS_DATA
+  TestStringToNumeric.cxx,NO_VALID
+  TestMergeTables.cxx,NO_VALID
+  )
 
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
+vtk_add_test_cxx(
   ${NEEDS_DATA}
-  ArraySparseArrayToTable.cxx
-  ArrayTableToSparseArray.cxx
-  ArrayToTable.cxx
-  ArrayTransposeMatrix.cxx
+  ArraySparseArrayToTable.cxx,NO_VALID
+  ArrayTableToSparseArray.cxx,NO_VALID
+  ArrayToTable.cxx,NO_VALID
+  ArrayTransposeMatrix.cxx,NO_VALID
   # TestArrayNorm.cxx             # todo (unsatisfied deps)
-  TestCollapseVerticesByArray.cxx
+  TestCollapseVerticesByArray.cxx,NO_VALID
   # TestConvexHull2D.cxx          # todo (unsatisfied deps)
-  TestDataObjectToTable.cxx
-  TestExtractSelectedTree.cxx
-  TestExtractSelectedGraph.cxx
+  TestDataObjectToTable.cxx,NO_VALID
+  TestExtractSelectedTree.cxx,NO_VALID
+  TestExtractSelectedGraph.cxx,NO_VALID
   TestGraphAlgorithms.cxx
-  TestMergeGraphs.cxx
+  TestMergeGraphs.cxx,NO_VALID
   TestPruneTreeFilter.cxx
-  TestRandomGraphSource.cxx
-  TestRemoveIsolatedVertices.cxx
-  TestStreamGraph.cxx
-  TestTableToArray.cxx
-  TestThresholdTable.cxx
-  TestTreeDifferenceFilter.cxx
+  TestRandomGraphSource.cxx,NO_VALID
+  TestReduceTable.cxx,NO_VALID
+  TestRemoveIsolatedVertices.cxx,NO_VALID
+  TestStreamGraph.cxx,NO_VALID
+  TestTableToArray.cxx,NO_VALID
+  TestThresholdTable.cxx,NO_VALID
+  TestTreeDifferenceFilter.cxx,NO_VALID
   # TestTimePoint.cxx             # deprecated dep
+  )
 
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Infovis/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Infovis/Core/Testing/Cxx/TestMergeGraphs.cxx b/Infovis/Core/Testing/Cxx/TestMergeGraphs.cxx
index 8f32389..fd95e40 100644
--- a/Infovis/Core/Testing/Cxx/TestMergeGraphs.cxx
+++ b/Infovis/Core/Testing/Cxx/TestMergeGraphs.cxx
@@ -77,35 +77,35 @@ bool CheckTable(vtkTable* expected, vtkTable* output)
   return ok;
 }
 
-std::string vert_data1[][3] = {
+const std::string vert_data1[][3] = {
   { "id", "arr1", "arr2" },
   { "v1", "a"   , "d" },
   { "v2", "b"   , "e" },
   { "v3", "c"   , "f" }
 };
 
-std::string vert_data2[][3] = {
+const std::string vert_data2[][3] = {
   { "id", "arr2", "arr3" },
   { "v2", "g"   , "j" },
   { "v3", "h"   , "k" },
   { "v4", "i"   , "l" }
 };
 
-std::string edge_data1[][4] = {
+const std::string edge_data1[][4] = {
   { "id", "src", "tgt", "extra" },
   { "e1", "v1" , "v2" , "m" },
   { "e2", "v2" , "v3" , "n" },
   { "e3", "v3" , "v1" , "o" }
 };
 
-std::string edge_data2[][3] = {
+const std::string edge_data2[][3] = {
   { "id", "src", "tgt" },
   { "e4", "v2" , "v3" },
   { "e5", "v3" , "v4" },
   { "e6", "v4" , "v2" }
 };
 
-std::string expected_vert_data[][3] = {
+const std::string expected_vert_data[][3] = {
   { "id", "arr1", "arr2" },
   { "v1", "a", "d" },
   { "v2", "b", "e" },
@@ -113,7 +113,7 @@ std::string expected_vert_data[][3] = {
   { "v4", "" , "i" }
 };
 
-std::string expected_edge_data[][4] = {
+const std::string expected_edge_data[][4] = {
   { "id", "src", "tgt", "extra" },
   { "e1", "v1" , "v2" , "m" },
   { "e2", "v2" , "v3" , "n" },
diff --git a/Infovis/Core/Testing/Cxx/TestReduceTable.cxx b/Infovis/Core/Testing/Cxx/TestReduceTable.cxx
new file mode 100644
index 0000000..87edf83
--- /dev/null
+++ b/Infovis/Core/Testing/Cxx/TestReduceTable.cxx
@@ -0,0 +1,127 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestReduceTable.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkReduceTable.h"
+
+#include "vtkIntArray.h"
+#include "vtkDoubleArray.h"
+#include "vtkNew.h"
+#include "vtkStringArray.h"
+#include "vtkTable.h"
+
+//----------------------------------------------------------------------------
+int TestReduceTable(int, char*[])
+{
+  vtkNew<vtkTable> table;
+
+  vtkNew<vtkStringArray> indexColumn;
+  indexColumn->SetNumberOfTuples(6);
+  indexColumn->SetValue(0,  "a");
+  indexColumn->SetValue(1,  "b");
+  indexColumn->SetValue(2,  "b");
+  indexColumn->SetValue(3,  "c");
+  indexColumn->SetValue(4,  "c");
+  indexColumn->SetValue(5,  "c");
+
+  vtkNew<vtkDoubleArray> meanColumn;
+  meanColumn->SetNumberOfTuples(6);
+  meanColumn->SetValue(0, 1.0);
+  meanColumn->SetValue(1, 1.0);
+  meanColumn->SetValue(2, 3.0);
+  meanColumn->SetValue(3, 1.0);
+  meanColumn->SetValue(4, 3.0);
+  meanColumn->SetValue(5, 5.0);
+
+  vtkNew<vtkIntArray> medianColumn;
+  medianColumn->SetNumberOfTuples(6);
+  medianColumn->SetValue(0, 2);
+  medianColumn->SetValue(1, 3);
+  medianColumn->SetValue(2, 5);
+  medianColumn->SetValue(3, 4);
+  medianColumn->SetValue(4, 6);
+  medianColumn->SetValue(5, 20);
+
+  vtkNew<vtkStringArray> modeColumn;
+  modeColumn->SetNumberOfTuples(6);
+  modeColumn->SetValue(0, "a");
+  modeColumn->SetValue(1, "b");
+  modeColumn->SetValue(2, "b");
+  modeColumn->SetValue(3, "c");
+  modeColumn->SetValue(4, "c");
+  modeColumn->SetValue(5, "d");
+
+  table->AddColumn(indexColumn.GetPointer());
+  table->AddColumn(meanColumn.GetPointer());
+  table->AddColumn(medianColumn.GetPointer());
+  table->AddColumn(modeColumn.GetPointer());
+
+  vtkNew<vtkReduceTable> filter;
+  filter->SetInputData(0, table.GetPointer());
+  filter->SetIndexColumn(0);
+  filter->SetReductionMethodForColumn(1, vtkReduceTable::MEAN);
+  filter->SetReductionMethodForColumn(2, vtkReduceTable::MEDIAN);
+  filter->SetReductionMethodForColumn(3, vtkReduceTable::MODE);
+  filter->Update();
+
+  vtkTable *output = filter->GetOutput();
+
+  if (output->GetValue(0, 1) != 1)
+    {
+    cout << "ERROR: incorrect value encountered at (0, 1)" << endl;
+    return EXIT_FAILURE;
+    }
+  if (output->GetValue(1, 1) != 2)
+    {
+    cout << "ERROR: incorrect value encountered at (1, 1)" << endl;
+    return EXIT_FAILURE;
+    }
+  if (output->GetValue(2, 1) != 3)
+    {
+    cout << "ERROR: incorrect value encountered at (2, 1)" << endl;
+    return EXIT_FAILURE;
+    }
+  if (output->GetValue(0, 2) != 2)
+    {
+    cout << "ERROR: incorrect value encountered at (0, 2)" << endl;
+    return EXIT_FAILURE;
+    }
+  if (output->GetValue(1, 2) != 4)
+    {
+    cout << "ERROR: incorrect value encountered at (1, 2)" << endl;
+    return EXIT_FAILURE;
+    }
+  if (output->GetValue(2, 2) != 6)
+    {
+    cout << "ERROR: incorrect value encountered at (2, 2)" << endl;
+    return EXIT_FAILURE;
+    }
+  if (output->GetValue(0, 3) != "a")
+    {
+    cout << "ERROR: incorrect value encountered at (0, 3)" << endl;
+    return EXIT_FAILURE;
+    }
+  if (output->GetValue(1, 3) != "b")
+    {
+    cout << "ERROR: incorrect value encountered at (1, 3)" << endl;
+    return EXIT_FAILURE;
+    }
+  if (output->GetValue(2, 3) != "c")
+    {
+    cout << "ERROR: incorrect value encountered at (2, 3)" << endl;
+    return EXIT_FAILURE;
+    }
+
+  return EXIT_SUCCESS;
+}
diff --git a/Infovis/Core/Testing/Data/Baseline/TestGraphAlgorithms.png.md5 b/Infovis/Core/Testing/Data/Baseline/TestGraphAlgorithms.png.md5
new file mode 100644
index 0000000..595e6eb
--- /dev/null
+++ b/Infovis/Core/Testing/Data/Baseline/TestGraphAlgorithms.png.md5
@@ -0,0 +1 @@
+8a4cbc80b25c35ae746ed97da55d2b1a
diff --git a/Infovis/Core/Testing/Data/Baseline/TestPruneTreeFilter.png.md5 b/Infovis/Core/Testing/Data/Baseline/TestPruneTreeFilter.png.md5
new file mode 100644
index 0000000..d10e4d6
--- /dev/null
+++ b/Infovis/Core/Testing/Data/Baseline/TestPruneTreeFilter.png.md5
@@ -0,0 +1 @@
+dbc04de9361aba1386d7d8b281d445f4
diff --git a/Infovis/Core/vtkAdjacencyMatrixToEdgeTable.cxx b/Infovis/Core/vtkAdjacencyMatrixToEdgeTable.cxx
index f08d41b..ef41064 100644
--- a/Infovis/Core/vtkAdjacencyMatrixToEdgeTable.cxx
+++ b/Infovis/Core/vtkAdjacencyMatrixToEdgeTable.cxx
@@ -31,6 +31,7 @@
 #include "vtkSmartPointer.h"
 #include "vtkTable.h"
 
+#include <algorithm>
 #include <vtksys/stl/map>
 #include <vtksys/stl/functional>
 
diff --git a/Infovis/Core/vtkCollapseVerticesByArray.cxx b/Infovis/Core/vtkCollapseVerticesByArray.cxx
index d53f536..dee3114 100644
--- a/Infovis/Core/vtkCollapseVerticesByArray.cxx
+++ b/Infovis/Core/vtkCollapseVerticesByArray.cxx
@@ -195,8 +195,6 @@ vtkGraph* vtkCollapseVerticesByArray::Create(vtkGraph* inGraph)
                                               vtkEdgeListIteratorRefPtr;
   typedef vtkSmartPointer<vtkVertexListIterator>
                                               vtkVertexListIteratorRefPtr;
-  typedef vtkSmartPointer<vtkStringArray>     vtkStringArrayRefPtr;
-  typedef vtkSmartPointer<vtkDoubleArray>     vtkDoubleArrayRefPtr;
   typedef vtkSmartPointer<vtkIntArray>        vtkIntArrayRefPtr;
   typedef std::pair<vtkVariant, vtkIdType> NameIdPair;
 
diff --git a/Infovis/Core/vtkEdgeCenters.h b/Infovis/Core/vtkEdgeCenters.h
index 38766e0..43abd73 100644
--- a/Infovis/Core/vtkEdgeCenters.h
+++ b/Infovis/Core/vtkEdgeCenters.h
@@ -53,7 +53,7 @@ public:
 
 protected:
   vtkEdgeCenters();
-  ~vtkEdgeCenters() {};
+  ~vtkEdgeCenters() {}
 
   virtual int RequestData(vtkInformation *, vtkInformationVector **, vtkInformationVector *);
   virtual int FillInputPortInformation(int port, vtkInformation *info);
diff --git a/Infovis/Core/vtkPruneTreeFilter.cxx b/Infovis/Core/vtkPruneTreeFilter.cxx
index c110b17..4704a1f 100644
--- a/Infovis/Core/vtkPruneTreeFilter.cxx
+++ b/Infovis/Core/vtkPruneTreeFilter.cxx
@@ -84,6 +84,9 @@ int vtkPruneTreeFilter::RequestData(
   builderVertexData->CopyAllocate(inputVertexData);
   builderEdgeData->CopyAllocate(inputEdgeData);
 
+  // Copy field data
+  builder->GetFieldData()->DeepCopy(inputTree->GetFieldData());
+
   // Build a copy of the tree, skipping the parent vertex to remove.
   vtksys_stl::vector< vtksys_stl::pair<vtkIdType, vtkIdType> > vertStack;
   if (inputTree->GetRoot() != this->ParentVertex)
diff --git a/Infovis/Core/vtkReduceTable.cxx b/Infovis/Core/vtkReduceTable.cxx
new file mode 100644
index 0000000..bd00193
--- /dev/null
+++ b/Infovis/Core/vtkReduceTable.cxx
@@ -0,0 +1,334 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkReduceTable.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkReduceTable.h"
+
+#include "vtkAbstractArray.h"
+#include "vtkInformation.h"
+#include "vtkInformationVector.h"
+#include "vtkObjectFactory.h"
+#include "vtkStreamingDemandDrivenPipeline.h"
+#include "vtkTable.h"
+
+#include <algorithm>
+
+vtkStandardNewMacro(vtkReduceTable);
+//---------------------------------------------------------------------------
+vtkReduceTable::vtkReduceTable()
+{
+  this->SetNumberOfInputPorts(1);
+  this->SetNumberOfOutputPorts(1);
+  this->IndexColumn = -1;
+  this->NumericalReductionMethod = vtkReduceTable::MEAN;
+  this->NonNumericalReductionMethod = vtkReduceTable::MODE;
+}
+
+//---------------------------------------------------------------------------
+vtkReduceTable::~vtkReduceTable()
+{
+}
+
+//---------------------------------------------------------------------------
+int vtkReduceTable::RequestData(
+  vtkInformation*,
+  vtkInformationVector** inputVector,
+  vtkInformationVector* outputVector)
+{
+  if (this->IndexColumn == -1)
+    {
+    vtkWarningMacro(<< "Index column not set");
+    return 1;
+    }
+
+  // Get input table
+  vtkInformation* inputInfo = inputVector[0]->GetInformationObject(0);
+  vtkTable* input = vtkTable::SafeDownCast(
+    inputInfo->Get(vtkDataObject::DATA_OBJECT()));
+
+  if (this->IndexColumn < 0 ||
+      this->IndexColumn > input->GetNumberOfColumns() - 1)
+    {
+    vtkWarningMacro(<< "Index column exceeds bounds of input table");
+    return 1;
+    }
+
+  // Get output table
+  vtkInformation* outInfo = outputVector->GetInformationObject(0);
+  vtkTable* output = vtkTable::SafeDownCast(
+    outInfo->Get(vtkDataObject::DATA_OBJECT()));
+
+  this->InitializeOutputTable(input, output);
+  this->AccumulateIndexValues(input);
+
+  // set the number of rows in the output table
+  output->SetNumberOfRows(static_cast<vtkIdType>(this->IndexValues.size()));
+
+  this->PopulateIndexColumn(output);
+
+ // populate the data columns of the output table
+ for (vtkIdType col = 0; col < output->GetNumberOfColumns(); ++col)
+   {
+   if (col == this->IndexColumn)
+     {
+     continue;
+     }
+
+   this->PopulateDataColumn(input, output, col);
+
+   }
+
+  // Clean up pipeline information
+  int piece = -1;
+  int npieces = -1;
+  if (outInfo->Has(
+        vtkStreamingDemandDrivenPipeline::UPDATE_PIECE_NUMBER()))
+    {
+    piece = outInfo->Get(
+      vtkStreamingDemandDrivenPipeline::UPDATE_PIECE_NUMBER());
+    npieces = outInfo->Get(
+      vtkStreamingDemandDrivenPipeline::UPDATE_NUMBER_OF_PIECES());
+    }
+  output->GetInformation()->Set(vtkDataObject::DATA_NUMBER_OF_PIECES(), npieces);
+  output->GetInformation()->Set(vtkDataObject::DATA_PIECE_NUMBER(), piece);
+
+  return 1;
+}
+
+//---------------------------------------------------------------------------
+void vtkReduceTable::InitializeOutputTable(vtkTable *input, vtkTable *output)
+{
+  output->DeepCopy(input);
+  for (vtkIdType row = output->GetNumberOfRows() - 1; row > -1; --row)
+    {
+    output->RemoveRow(row);
+    }
+}
+
+//---------------------------------------------------------------------------
+void vtkReduceTable::AccumulateIndexValues(vtkTable *input)
+{
+  for (vtkIdType row = 0; row < input->GetNumberOfRows(); ++row)
+    {
+    vtkVariant value = input->GetValue(row, this->IndexColumn);
+    this->IndexValues.insert(value);
+    std::map<vtkVariant, std::vector<vtkIdType> >::iterator itr =
+      this->NewRowToOldRowsMap.find(value);
+    if (itr == this->NewRowToOldRowsMap.end())
+      {
+      std::vector<vtkIdType> v;
+      v.push_back(row);
+      this->NewRowToOldRowsMap[value] = v;
+      }
+    else
+      {
+      itr->second.push_back(row);
+      }
+    }
+}
+
+//---------------------------------------------------------------------------
+void vtkReduceTable::PopulateIndexColumn(vtkTable *output)
+{
+  vtkIdType row = 0;
+  for (std::set<vtkVariant>::iterator itr = this->IndexValues.begin();
+       itr != this->IndexValues.end(); ++itr)
+    {
+    output->SetValue(row, this->IndexColumn, *itr);
+    ++row;
+    }
+}
+
+//---------------------------------------------------------------------------
+void vtkReduceTable::PopulateDataColumn(vtkTable *input, vtkTable *output,
+                                        vtkIdType col)
+{
+  int reductionMethod = 0;
+
+  // check if this column has a reduction method
+  int columnSpecificMethod = this->GetReductionMethodForColumn(col);
+  if (columnSpecificMethod != -1)
+    {
+    reductionMethod = columnSpecificMethod;
+    }
+  else
+    {
+    // determine whether this column contains numerical or not.
+    if (input->GetValue(0, col).IsNumeric())
+      {
+      reductionMethod = this->NumericalReductionMethod;
+      }
+    else
+      {
+      reductionMethod = this->NonNumericalReductionMethod;
+      }
+    }
+
+  for (vtkIdType row = 0; row < output->GetNumberOfRows(); ++row)
+    {
+    // look up the cells in the input table that should be represented by
+    // this cell in the output table
+    vtkVariant indexValue = output->GetValue(row, this->IndexColumn);
+    std::vector<vtkIdType> oldRows = this->NewRowToOldRowsMap[indexValue];
+
+    // special case: one-to-one mapping between input table and output table
+    // (no collapse necessary)
+    if (oldRows.size() == 1)
+      {
+      output->SetValue(row, col,
+        input->GetValue(this->NewRowToOldRowsMap[indexValue].at(0), col));
+      continue;
+      }
+
+    // otherwise, combine them appropriately & store the value in the
+    // output table
+    switch (reductionMethod)
+      {
+      case vtkReduceTable::MODE:
+        this->ReduceValuesToMode(input, output, row, col, oldRows);
+        break;
+      case vtkReduceTable::MEDIAN:
+        this->ReduceValuesToMedian(input, output, row, col, oldRows);
+        break;
+      case vtkReduceTable::MEAN:
+      default:
+        this->ReduceValuesToMean(input, output, row, col, oldRows);
+        break;
+      }
+    }
+}
+
+//---------------------------------------------------------------------------
+void vtkReduceTable::ReduceValuesToMean(vtkTable *input, vtkTable *output,
+                                        vtkIdType row, vtkIdType col,
+                                        std::vector<vtkIdType> oldRows)
+{
+  if (!input->GetValue(0, col).IsNumeric())
+    {
+    vtkErrorMacro(<< "Mean is unsupported for non-numerical data");
+    return;
+    }
+
+  double mean = 0.0;
+  for (std::vector<vtkIdType>::iterator itr = oldRows.begin();
+       itr != oldRows.end(); ++itr)
+    {
+    mean += input->GetValue(*itr, col).ToDouble();
+    }
+  mean /= oldRows.size();
+  output->SetValue(row, col, vtkVariant(mean));
+}
+
+//---------------------------------------------------------------------------
+void vtkReduceTable::ReduceValuesToMedian(vtkTable *input, vtkTable *output,
+                                          vtkIdType row, vtkIdType col,
+                                          std::vector<vtkIdType> oldRows)
+{
+  if (!input->GetValue(0, col).IsNumeric())
+    {
+    vtkErrorMacro(<< "Median is unsupported for non-numerical data");
+    return;
+    }
+
+  // generate a vector of values
+  std::vector<double> values;
+  for (std::vector<vtkIdType>::iterator itr = oldRows.begin();
+       itr != oldRows.end(); ++itr)
+    {
+    values.push_back(input->GetValue(*itr, col).ToDouble());
+    }
+
+  // sort it
+  std::sort(values.begin(), values.end());
+
+  // get the median and store it in the output table
+  if (values.size() % 2 == 1)
+    {
+    output->SetValue(row, col,
+      vtkVariant( values.at( (values.size() - 1) / 2 ) )
+    );
+    }
+  else
+    {
+    double d1 = values.at( (values.size() - 1) / 2 );
+    double d2 = values.at( values.size() / 2 );
+    output->SetValue(row, col, vtkVariant( (d1 + d2) / 2.0 ));
+    }
+}
+
+//---------------------------------------------------------------------------
+void vtkReduceTable::ReduceValuesToMode(vtkTable *input, vtkTable *output,
+                                        vtkIdType row, vtkIdType col,
+                                        std::vector<vtkIdType> oldRows)
+{
+  // setup a map to determine how frequently each value appears
+  std::map<vtkVariant, int> modeMap;
+  std::map<vtkVariant, int>::iterator mapItr;
+  for (std::vector<vtkIdType>::iterator vectorItr = oldRows.begin();
+       vectorItr != oldRows.end(); ++vectorItr)
+    {
+    vtkVariant v = input->GetValue(*vectorItr, col);
+    mapItr = modeMap.find(v);
+    if (mapItr == modeMap.end())
+      {
+      modeMap[v] = 1;
+      }
+    else
+      {
+      mapItr->second += 1;
+      }
+    }
+
+  // use our map to find the mode & store it in the output table
+  int maxCount = -1;
+  vtkVariant mode;
+  for (mapItr = modeMap.begin(); mapItr != modeMap.end(); ++mapItr)
+    {
+    if (mapItr->second > maxCount)
+      {
+      mode = mapItr->first;
+      maxCount = mapItr->second;
+      }
+    }
+  output->SetValue(row, col, mode);
+}
+
+//---------------------------------------------------------------------------
+int vtkReduceTable::GetReductionMethodForColumn(vtkIdType col)
+{
+  std::map<vtkIdType, int>::iterator itr =
+    this->ColumnReductionMethods.find(col);
+  if (itr != this->ColumnReductionMethods.end())
+    {
+    return itr->second;
+    }
+  return -1;
+}
+
+//---------------------------------------------------------------------------
+void vtkReduceTable::SetReductionMethodForColumn(vtkIdType col, int method)
+{
+  this->ColumnReductionMethods[col] = method;
+}
+
+//---------------------------------------------------------------------------
+void vtkReduceTable::PrintSelf(ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+  os << indent << "IndexColumn: " << this->IndexColumn << endl;
+  os << indent << "NumericalReductionMethod: "
+     << this->NumericalReductionMethod << endl;
+  os << indent << "NonNumericalReductionMethod: "
+     << this->NonNumericalReductionMethod << endl;
+}
diff --git a/Infovis/Core/vtkReduceTable.h b/Infovis/Core/vtkReduceTable.h
new file mode 100644
index 0000000..5a77fa8
--- /dev/null
+++ b/Infovis/Core/vtkReduceTable.h
@@ -0,0 +1,156 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkReduceTable.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkReduceTable - combine some of the rows of a table
+//
+// .SECTION Description
+// Collapses the rows of the input table so that one particular
+// column (the IndexColumn) does not contain any duplicate values.
+// Thus the output table will have the same columns as the input
+// table, but potentially fewer rows.  One example use of this
+// class would be to generate a summary table from a table of
+// observations.
+// When two or more rows of the input table share a value in the
+// IndexColumn, the values from these rows will be combined on a
+// column-by-column basis.  By default, such numerical values will be
+// reduced to their mean, and non-numerical values will be reduced to
+// their mode.  This default behavior can be changed by calling
+// SetNumericalReductionMethod() or SetNonNumericalReductionMethod().
+// You can also specify the reduction method to use for a particular
+// column by calling SetReductionMethodForColumn().
+
+#ifndef __vtkReduceTable_h
+#define __vtkReduceTable_h
+
+#include "vtkInfovisCoreModule.h" // For export macro
+#include "vtkTableAlgorithm.h"
+
+#include <map>                   // For ivar
+#include <set>                   // For ivar
+#include <vector>                // For ivar
+
+class vtkVariant;
+
+class VTKINFOVISCORE_EXPORT vtkReduceTable : public vtkTableAlgorithm
+{
+public:
+  static vtkReduceTable* New();
+  vtkTypeMacro(vtkReduceTable,vtkTableAlgorithm);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Get/Set the column that will be used to reduce the input table.
+  // Any rows sharing a value in this column will be collapsed into
+  // a single row in the output table.
+  vtkGetMacro(IndexColumn, vtkIdType);
+  vtkSetMacro(IndexColumn, vtkIdType);
+
+  // Description:
+  // Get/Set the method that should be used to combine numerical
+  // values.
+  vtkGetMacro(NumericalReductionMethod, int);
+  vtkSetMacro(NumericalReductionMethod, int);
+
+  // Description:
+  // Get/Set the method that should be used to combine non-numerical
+  // values.
+  vtkGetMacro(NonNumericalReductionMethod, int);
+  vtkSetMacro(NonNumericalReductionMethod, int);
+
+  // Description:
+  // Get the method that should be used to combine the values within
+  // the specified column.  Returns -1 if no method has been set for
+  // this particular column.
+  int GetReductionMethodForColumn(vtkIdType col);
+
+  // Description:
+  // Set the method that should be used to combine the values within
+  // the specified column.
+  void SetReductionMethodForColumn(vtkIdType col, int method);
+
+  //BTX
+  // Description:
+  // Enum for methods of reduction
+  enum
+    {
+    MEAN,
+    MEDIAN,
+    MODE
+    };
+  //ETX
+
+protected:
+  vtkReduceTable();
+  ~vtkReduceTable();
+
+  int RequestData(
+    vtkInformation*,
+    vtkInformationVector**,
+    vtkInformationVector*);
+
+  // Description:
+  // Initialize the output table to have the same types of columns as
+  // the input table, but no rows.
+  void InitializeOutputTable(vtkTable *input, vtkTable *output);
+
+  // Description:
+  // Find the distinct values in the input table's index column.
+  // This function also populates the mapping of new row IDs to old row IDs.
+  void AccumulateIndexValues(vtkTable *input);
+
+  // Description:
+  // Populate the index column of the output table.
+  void PopulateIndexColumn(vtkTable *output);
+
+  // Description:
+  // Populate a non-index column of the output table.  This involves
+  // potentially combining multiple values from the input table into
+  // a single value for the output table.
+  void PopulateDataColumn(vtkTable *input, vtkTable *output, vtkIdType col);
+
+  // Description:
+  // Find the mean of a series of values from the input table
+  // and store it in the output table.
+  void ReduceValuesToMean(vtkTable *input, vtkTable *output,
+                          vtkIdType row, vtkIdType col,
+                          std::vector<vtkIdType> oldRows);
+
+  // Description:
+  // Find the median of a series of values from the input table
+  // and store it in the output table.
+  void ReduceValuesToMedian(vtkTable *input, vtkTable *output,
+                            vtkIdType row, vtkIdType col,
+                            std::vector<vtkIdType> oldRows);
+
+  // Description:
+  // Find the mode of a series of values from the input table
+  // and store it in the output table.
+  void ReduceValuesToMode(vtkTable *input, vtkTable *output,
+                          vtkIdType row, vtkIdType col,
+                          std::vector<vtkIdType> oldRows);
+
+  vtkIdType IndexColumn;
+  std::set<vtkVariant> IndexValues;
+  std::map<vtkVariant, std::vector<vtkIdType> > NewRowToOldRowsMap;
+  std::map<vtkIdType, int> ColumnReductionMethods;
+
+  int NumericalReductionMethod;
+  int NonNumericalReductionMethod;
+
+private:
+  vtkReduceTable(const vtkReduceTable&); // Not implemented
+  void operator=(const vtkReduceTable&);   // Not implemented
+};
+
+#endif
diff --git a/Infovis/Core/vtkTreeDifferenceFilter.cxx b/Infovis/Core/vtkTreeDifferenceFilter.cxx
index 0bf87ac..218b41b 100644
--- a/Infovis/Core/vtkTreeDifferenceFilter.cxx
+++ b/Infovis/Core/vtkTreeDifferenceFilter.cxx
@@ -160,6 +160,7 @@ bool vtkTreeDifferenceFilter::GenerateMapping(vtkTree *tree1, vtkTree *tree2)
 
   vtkIdType root1 = tree1->GetRoot();
   vtkIdType root2 = tree2->GetRoot();
+  this->VertexMap[root1] = root2;
 
   vtkIdType edgeId1 = -1;
   vtkIdType edgeId2 = -1;
diff --git a/Infovis/Layout/Testing/Cxx/CMakeLists.txt b/Infovis/Layout/Testing/Cxx/CMakeLists.txt
index 8e24655..a86d2ca 100644
--- a/Infovis/Layout/Testing/Cxx/CMakeLists.txt
+++ b/Infovis/Layout/Testing/Cxx/CMakeLists.txt
@@ -1,31 +1,11 @@
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
+vtk_add_test_cxx(
   # TestAssignCoordinates.cxx # todo (unsatisfied deps)
-  # TestChacoGraphReader.cxx # todo (unsatisfied deps)
+  TestChacoGraphReader.cxx
   TestCirclePackLayoutStrategy.cxx
   # TestCosmicTreeLayoutStrategy.cxx # todo (unsatisfied deps)
-  TestGraphLayoutStrategy.cxx
-  TestIncrementalForceLayout.cxx
+  TestGraphLayoutStrategy.cxx,NO_VALID
+  TestIncrementalForceLayout.cxx,NO_VALID
   TestTreeMapLayoutStrategy.cxx
+  )
 
-  EXTRA_INCLUDE vtkTestDriver.h
-)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Infovis/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Infovis/Layout/Testing/Data/Baseline/TestChacoGraphReader.png.md5 b/Infovis/Layout/Testing/Data/Baseline/TestChacoGraphReader.png.md5
new file mode 100644
index 0000000..46fa5d1
--- /dev/null
+++ b/Infovis/Layout/Testing/Data/Baseline/TestChacoGraphReader.png.md5
@@ -0,0 +1 @@
+b009c5de088db628876eec042eeb8f6c
diff --git a/Infovis/Layout/Testing/Data/Baseline/TestCirclePackLayoutStrategy.png.md5 b/Infovis/Layout/Testing/Data/Baseline/TestCirclePackLayoutStrategy.png.md5
new file mode 100644
index 0000000..5a460b8
--- /dev/null
+++ b/Infovis/Layout/Testing/Data/Baseline/TestCirclePackLayoutStrategy.png.md5
@@ -0,0 +1 @@
+ddb5588c910ab9e5427f626d62f9a888
diff --git a/Infovis/Layout/Testing/Data/Baseline/TestCirclePackLayoutStrategy_1.png.md5 b/Infovis/Layout/Testing/Data/Baseline/TestCirclePackLayoutStrategy_1.png.md5
new file mode 100644
index 0000000..8a98def
--- /dev/null
+++ b/Infovis/Layout/Testing/Data/Baseline/TestCirclePackLayoutStrategy_1.png.md5
@@ -0,0 +1 @@
+6a340ead7be32b2eb6dabf01de15a8a4
diff --git a/Infovis/Layout/Testing/Data/Baseline/TestTreeMapLayoutStrategy.png.md5 b/Infovis/Layout/Testing/Data/Baseline/TestTreeMapLayoutStrategy.png.md5
new file mode 100644
index 0000000..5032ea2
--- /dev/null
+++ b/Infovis/Layout/Testing/Data/Baseline/TestTreeMapLayoutStrategy.png.md5
@@ -0,0 +1 @@
+bc565bd719118ea162b2a04f8ebeccfa
diff --git a/Infovis/Layout/vtkAttributeClustering2DLayoutStrategy.cxx b/Infovis/Layout/vtkAttributeClustering2DLayoutStrategy.cxx
index ee4e38c..52fb9a2 100644
--- a/Infovis/Layout/vtkAttributeClustering2DLayoutStrategy.cxx
+++ b/Infovis/Layout/vtkAttributeClustering2DLayoutStrategy.cxx
@@ -116,7 +116,7 @@ void vtkAttributeClustering2DLayoutStrategy::SetVertexAttribute(const char* att)
   // except for the call to Initialize at the end :)
   if ( this->VertexAttribute == NULL && att == NULL) { return;}
   if ( this->VertexAttribute && att && (!strcmp(this->VertexAttribute,att))) { return;}
-  if (this->VertexAttribute) { delete [] this->VertexAttribute; }
+  delete [] this->VertexAttribute;
   if (att)
     {
     size_t n = strlen(att) + 1;
diff --git a/Infovis/Layout/vtkEdgeLayoutStrategy.h b/Infovis/Layout/vtkEdgeLayoutStrategy.h
index 0726a41..50f9537 100644
--- a/Infovis/Layout/vtkEdgeLayoutStrategy.h
+++ b/Infovis/Layout/vtkEdgeLayoutStrategy.h
@@ -44,7 +44,7 @@ public:
   // This method allows the layout strategy to
   // do initialization of data structures
   // or whatever else it might want to do.
-  virtual void Initialize() {};
+  virtual void Initialize() {}
 
   // Description:
   // This is the layout method where the graph that was
diff --git a/Infovis/Layout/vtkForceDirectedLayoutStrategy.cxx b/Infovis/Layout/vtkForceDirectedLayoutStrategy.cxx
index 7a51f6c..44cff35 100644
--- a/Infovis/Layout/vtkForceDirectedLayoutStrategy.cxx
+++ b/Infovis/Layout/vtkForceDirectedLayoutStrategy.cxx
@@ -110,8 +110,8 @@ void vtkForceDirectedLayoutStrategy::Initialize()
       }
     }
 
-  if (this->v) delete[] this->v;
-  if (this->e) delete[] this->e;
+  delete[] this->v;
+  delete[] this->e;
   this->v = new vtkLayoutVertex[numVertices];
   this->e = new vtkLayoutEdge[numEdges];
 
diff --git a/Infovis/Layout/vtkGraphLayoutStrategy.cxx b/Infovis/Layout/vtkGraphLayoutStrategy.cxx
index e73d0b2..56868f3 100644
--- a/Infovis/Layout/vtkGraphLayoutStrategy.cxx
+++ b/Infovis/Layout/vtkGraphLayoutStrategy.cxx
@@ -78,7 +78,7 @@ void vtkGraphLayoutStrategy::SetEdgeWeightField(const char* weights)
   // except for the call to Initialize at the end :)
   if ( this->EdgeWeightField == NULL && weights == NULL) { return;}
   if ( this->EdgeWeightField && weights && (!strcmp(this->EdgeWeightField,weights))) { return;}
-  if (this->EdgeWeightField) { delete [] this->EdgeWeightField; }
+  delete [] this->EdgeWeightField;
   if (weights)
     {
     size_t n = strlen(weights) + 1;
diff --git a/Infovis/Layout/vtkGraphLayoutStrategy.h b/Infovis/Layout/vtkGraphLayoutStrategy.h
index 388abb8..6eb18e5 100644
--- a/Infovis/Layout/vtkGraphLayoutStrategy.h
+++ b/Infovis/Layout/vtkGraphLayoutStrategy.h
@@ -53,7 +53,7 @@ public:
   // This method allows the layout strategy to
   // do initialization of data structures
   // or whatever else it might want to do.
-  virtual void Initialize() {};
+  virtual void Initialize() {}
 
   // Description:
   // This is the layout method where the graph that was
diff --git a/Infovis/Layout/vtkIncrementalForceLayout.cxx b/Infovis/Layout/vtkIncrementalForceLayout.cxx
index d8c5ed1..dfa449f 100644
--- a/Infovis/Layout/vtkIncrementalForceLayout.cxx
+++ b/Infovis/Layout/vtkIncrementalForceLayout.cxx
@@ -11,6 +11,7 @@
 #include "vtkVariant.h"
 #include "vtkVector.h"
 
+#include <algorithm>
 #include <utility>
 #include <vector>
 
diff --git a/Infovis/Layout/vtkPerturbCoincidentVertices.cxx b/Infovis/Layout/vtkPerturbCoincidentVertices.cxx
index 79dc535..e47a6ca 100644
--- a/Infovis/Layout/vtkPerturbCoincidentVertices.cxx
+++ b/Infovis/Layout/vtkPerturbCoincidentVertices.cxx
@@ -197,7 +197,6 @@ struct Coord
     this->coord[0] = src[0];
     this->coord[1] = src[1];
     }
-  ~Coord() {}
 
    static double distance(Coord x,Coord y)
      {
diff --git a/Infovis/Parallel/CMakeLists.txt b/Infovis/Parallel/CMakeLists.txt
index b8fd203..092762e 100644
--- a/Infovis/Parallel/CMakeLists.txt
+++ b/Infovis/Parallel/CMakeLists.txt
@@ -18,4 +18,4 @@ set(Module_SRCS
   )
 
 vtk_module_library(vtkInfovisParallel ${Module_SRCS})
-target_link_libraries(vtkInfovisParallel ${Boost_LIBRARIES})
+target_link_libraries(vtkInfovisParallel LINK_PRIVATE ${Boost_LIBRARIES})
diff --git a/Infovis/Parallel/Testing/Cxx/CMakeLists.txt b/Infovis/Parallel/Testing/Cxx/CMakeLists.txt
index e1760c6..a314887 100644
--- a/Infovis/Parallel/Testing/Cxx/CMakeLists.txt
+++ b/Infovis/Parallel/Testing/Cxx/CMakeLists.txt
@@ -1,12 +1,12 @@
-add_test_mpi(PBGLNamedVertexGraph.cxx)
-add_test_mpi(PBGLRandomGraph.cxx)
-add_test_mpi(TestPBGLAlgorithms.cxx)
-add_test_mpi(TestPBGLCollapseGraph.cxx)
-add_test_mpi(TestPBGLCollectGraph.cxx)
-add_test_mpi(TestPBGLEdgesPedigrees.cxx)
-add_test_mpi(TestPBGLGraphSQLReader.cxx)
-add_test_mpi(TestPBGLGraphSQLReaderFile.cxx)
-add_test_mpi(TestPBGLPedigrees.cxx)
-add_test_mpi(TestPBGLPipeline.cxx)
-add_test_mpi(TestPRMATGraphSource.cxx)
-add_test_mpi(TestPRandomGraphSource.cxx)
+vtk_add_test_mpi(PBGLNamedVertexGraph.cxx)
+vtk_add_test_mpi(PBGLRandomGraph.cxx)
+vtk_add_test_mpi(TestPBGLAlgorithms.cxx)
+vtk_add_test_mpi(TestPBGLCollapseGraph.cxx)
+vtk_add_test_mpi(TestPBGLCollectGraph.cxx)
+vtk_add_test_mpi(TestPBGLEdgesPedigrees.cxx)
+vtk_add_test_mpi(TestPBGLGraphSQLReader.cxx)
+vtk_add_test_mpi(TestPBGLGraphSQLReaderFile.cxx)
+vtk_add_test_mpi(TestPBGLPedigrees.cxx)
+vtk_add_test_mpi(TestPBGLPipeline.cxx)
+vtk_add_test_mpi(TestPRMATGraphSource.cxx)
+vtk_add_test_mpi(TestPRandomGraphSource.cxx)
diff --git a/Infovis/Parallel/Testing/Cxx/TestPBGLAlgorithms.cxx b/Infovis/Parallel/Testing/Cxx/TestPBGLAlgorithms.cxx
index 3651321..eef78c2 100644
--- a/Infovis/Parallel/Testing/Cxx/TestPBGLAlgorithms.cxx
+++ b/Infovis/Parallel/Testing/Cxx/TestPBGLAlgorithms.cxx
@@ -38,7 +38,7 @@
 #include <vtksys/stl/vector>
 
 #include <stdlib.h>
-#include <assert.h>
+#include <cassert>
 
 using std::pair;
 using std::vector;
diff --git a/Infovis/Parallel/Testing/Cxx/TestPBGLGraphSQLReaderFile.cxx b/Infovis/Parallel/Testing/Cxx/TestPBGLGraphSQLReaderFile.cxx
index 7d8bcab..93ce734 100644
--- a/Infovis/Parallel/Testing/Cxx/TestPBGLGraphSQLReaderFile.cxx
+++ b/Infovis/Parallel/Testing/Cxx/TestPBGLGraphSQLReaderFile.cxx
@@ -55,7 +55,7 @@
 #include <boost/mpi/timer.hpp>
 #include <boost/lexical_cast.hpp>
 
-#include <assert.h>
+#include <cassert>
 
 //------------------------------------------------------------------------------
 // Definitions
diff --git a/Infovis/Parallel/vtkPBGLDistributedGraphHelper.cxx b/Infovis/Parallel/vtkPBGLDistributedGraphHelper.cxx
index a036093..1e524d1 100644
--- a/Infovis/Parallel/vtkPBGLDistributedGraphHelper.cxx
+++ b/Infovis/Parallel/vtkPBGLDistributedGraphHelper.cxx
@@ -19,7 +19,7 @@
  */
 #include "vtkPBGLDistributedGraphHelper.h"
 
-#include "assert.h"
+#include <cassert>
 #include "vtkGraph.h"
 #include "vtkGraphInternals.h"
 #include "vtkInformation.h"
diff --git a/Interaction/Style/Testing/Data/Baseline/TestFlyTo.png.md5 b/Interaction/Style/Testing/Data/Baseline/TestFlyTo.png.md5
new file mode 100644
index 0000000..861ef69
--- /dev/null
+++ b/Interaction/Style/Testing/Data/Baseline/TestFlyTo.png.md5
@@ -0,0 +1 @@
+e8aa7a819b247cbb51196c8bb6dd5381
diff --git a/Interaction/Style/Testing/Data/Baseline/TestInteractorStyleTerrain.png.md5 b/Interaction/Style/Testing/Data/Baseline/TestInteractorStyleTerrain.png.md5
new file mode 100644
index 0000000..38f5c76
--- /dev/null
+++ b/Interaction/Style/Testing/Data/Baseline/TestInteractorStyleTerrain.png.md5
@@ -0,0 +1 @@
+ab9bb8db07f516fe3dbd6aeff1b3774b
diff --git a/Interaction/Style/Testing/Data/Baseline/TestInteractorStyleTerrain_1.png.md5 b/Interaction/Style/Testing/Data/Baseline/TestInteractorStyleTerrain_1.png.md5
new file mode 100644
index 0000000..5827295
--- /dev/null
+++ b/Interaction/Style/Testing/Data/Baseline/TestInteractorStyleTerrain_1.png.md5
@@ -0,0 +1 @@
+00213752268a1a0a497406db0e4d8a63
diff --git a/Interaction/Style/Testing/Data/Baseline/TestStyleBaseSpike.png.md5 b/Interaction/Style/Testing/Data/Baseline/TestStyleBaseSpike.png.md5
new file mode 100644
index 0000000..2312df7
--- /dev/null
+++ b/Interaction/Style/Testing/Data/Baseline/TestStyleBaseSpike.png.md5
@@ -0,0 +1 @@
+b55f0dffd352cd5417612febc6638505
diff --git a/Interaction/Style/Testing/Data/Baseline/TestStyleJoystickActor.png.md5 b/Interaction/Style/Testing/Data/Baseline/TestStyleJoystickActor.png.md5
new file mode 100644
index 0000000..0e87a49
--- /dev/null
+++ b/Interaction/Style/Testing/Data/Baseline/TestStyleJoystickActor.png.md5
@@ -0,0 +1 @@
+585d9a74fd3b66d3d7adcfcaf16c0db4
diff --git a/Interaction/Style/Testing/Data/Baseline/TestStyleJoystickCamera.png.md5 b/Interaction/Style/Testing/Data/Baseline/TestStyleJoystickCamera.png.md5
new file mode 100644
index 0000000..3917fce
--- /dev/null
+++ b/Interaction/Style/Testing/Data/Baseline/TestStyleJoystickCamera.png.md5
@@ -0,0 +1 @@
+980ee5a624ca70d70e341a110aceb022
diff --git a/Interaction/Style/Testing/Data/Baseline/TestStyleRubberBandZoom.png.md5 b/Interaction/Style/Testing/Data/Baseline/TestStyleRubberBandZoom.png.md5
new file mode 100644
index 0000000..2c9ff04
--- /dev/null
+++ b/Interaction/Style/Testing/Data/Baseline/TestStyleRubberBandZoom.png.md5
@@ -0,0 +1 @@
+6502119ea064949962952793e03199ed
diff --git a/Interaction/Style/Testing/Data/Baseline/TestStyleTerrain.png.md5 b/Interaction/Style/Testing/Data/Baseline/TestStyleTerrain.png.md5
new file mode 100644
index 0000000..24a0745
--- /dev/null
+++ b/Interaction/Style/Testing/Data/Baseline/TestStyleTerrain.png.md5
@@ -0,0 +1 @@
+fd1cb7d17c0ab97174ad1be6377b1717
diff --git a/Interaction/Style/Testing/Data/Baseline/TestStyleTrackballActor.png.md5 b/Interaction/Style/Testing/Data/Baseline/TestStyleTrackballActor.png.md5
new file mode 100644
index 0000000..313c872
--- /dev/null
+++ b/Interaction/Style/Testing/Data/Baseline/TestStyleTrackballActor.png.md5
@@ -0,0 +1 @@
+53ab4852deb3b8daa9d3d82c467998b1
diff --git a/Interaction/Style/Testing/Data/Baseline/TestStyleTrackballCamera.png.md5 b/Interaction/Style/Testing/Data/Baseline/TestStyleTrackballCamera.png.md5
new file mode 100644
index 0000000..14b562d
--- /dev/null
+++ b/Interaction/Style/Testing/Data/Baseline/TestStyleTrackballCamera.png.md5
@@ -0,0 +1 @@
+595fa01f82211d45d35ceec89a8c6e8f
diff --git a/Interaction/Style/Testing/Python/CMakeLists.txt b/Interaction/Style/Testing/Python/CMakeLists.txt
index e59bbf3..6f7d0bf 100644
--- a/Interaction/Style/Testing/Python/CMakeLists.txt
+++ b/Interaction/Style/Testing/Python/CMakeLists.txt
@@ -1,14 +1,11 @@
 if(NOT VTK_OPENGL_HAS_OSMESA)
-  add_test_python(TestFlyTo.py Rendering)
-
-  if (VTK_DATA_ROOT)
-    add_test_python(TestStyleRubberBandZoom.py Rendering)
-    add_test_python(TestInteractorStyleTerrain.py Rendering)
-    add_test_python1(TestStyleBaseSpike.py Baseline/Rendering)
-    add_test_python1(TestStyleJoystickActor.py Baseline/Rendering)
-    add_test_python1(TestStyleJoystickCamera.py Baseline/Rendering)
-    add_test_python1(TestStyleTerrain.py Baseline/Rendering)
-    add_test_python1(TestStyleTrackballActor.py Baseline/Rendering)
-    add_test_python1(TestStyleTrackballCamera.py Baseline/Rendering)
-  endif()
+  vtk_add_test_python(TestFlyTo.py)
+  vtk_add_test_python(TestStyleRubberBandZoom.py)
+  vtk_add_test_python(TestInteractorStyleTerrain.py)
+  vtk_add_test_python(TestStyleBaseSpike.py NO_RT)
+  vtk_add_test_python(TestStyleJoystickActor.py NO_RT)
+  vtk_add_test_python(TestStyleJoystickCamera.py NO_RT)
+  vtk_add_test_python(TestStyleTerrain.py NO_RT)
+  vtk_add_test_python(TestStyleTrackballActor.py NO_RT)
+  vtk_add_test_python(TestStyleTrackballCamera.py NO_RT)
 endif()
diff --git a/Interaction/Style/Testing/Python/TestStyleBaseSpike.py b/Interaction/Style/Testing/Python/TestStyleBaseSpike.py
index 1d4a3f3..2d3d268 100755
--- a/Interaction/Style/Testing/Python/TestStyleBaseSpike.py
+++ b/Interaction/Style/Testing/Python/TestStyleBaseSpike.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestStyleBaseSpike.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Rendering
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Interaction/Style/Testing/Python/TestStyleJoystickActor.py b/Interaction/Style/Testing/Python/TestStyleJoystickActor.py
index 4bcb7b9..b1e848e 100755
--- a/Interaction/Style/Testing/Python/TestStyleJoystickActor.py
+++ b/Interaction/Style/Testing/Python/TestStyleJoystickActor.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestStyleJoystickActor.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Rendering
-
 import sys
 import vtk
 import vtk.test.Testing
diff --git a/Interaction/Style/Testing/Python/TestStyleJoystickCamera.py b/Interaction/Style/Testing/Python/TestStyleJoystickCamera.py
index d4fbd48..b4f96cc 100755
--- a/Interaction/Style/Testing/Python/TestStyleJoystickCamera.py
+++ b/Interaction/Style/Testing/Python/TestStyleJoystickCamera.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestStyleJoystickActor.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Rendering
-
 import sys
 import vtk
 import vtk.test.Testing
diff --git a/Interaction/Style/Testing/Python/TestStyleTerrain.py b/Interaction/Style/Testing/Python/TestStyleTerrain.py
index 2d51bd3..4740966 100755
--- a/Interaction/Style/Testing/Python/TestStyleTerrain.py
+++ b/Interaction/Style/Testing/Python/TestStyleTerrain.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestStyleTerrain.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Rendering
-
 import sys
 import vtk
 import vtk.test.Testing
diff --git a/Interaction/Style/Testing/Python/TestStyleTrackballActor.py b/Interaction/Style/Testing/Python/TestStyleTrackballActor.py
index 0306257..b605934 100755
--- a/Interaction/Style/Testing/Python/TestStyleTrackballActor.py
+++ b/Interaction/Style/Testing/Python/TestStyleTrackballActor.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestStyleTrackballActor.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Rendering
-
 import sys
 import vtk
 import vtk.test.Testing
diff --git a/Interaction/Style/Testing/Python/TestStyleTrackballCamera.py b/Interaction/Style/Testing/Python/TestStyleTrackballCamera.py
index ec1a577..d3579e3 100755
--- a/Interaction/Style/Testing/Python/TestStyleTrackballCamera.py
+++ b/Interaction/Style/Testing/Python/TestStyleTrackballCamera.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestStyleTrackballCamera.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Rendering
-
 import sys
 import vtk
 import vtk.test.Testing
diff --git a/Interaction/Style/Testing/Tcl/CMakeLists.txt b/Interaction/Style/Testing/Tcl/CMakeLists.txt
index 20a1582..28d786c 100644
--- a/Interaction/Style/Testing/Tcl/CMakeLists.txt
+++ b/Interaction/Style/Testing/Tcl/CMakeLists.txt
@@ -1,16 +1,14 @@
-if(VTK_DATA_ROOT)
-  if(NOT VTK_OPENGL_HAS_OSMESA)
-    add_test_tcl(TestInteractorStyleTerrain Rendering)
-    add_test_tcl(TestStyleRubberBandZoom Rendering)
-  endif()
+if(NOT VTK_OPENGL_HAS_OSMESA)
+  vtk_add_test_tcl(TestInteractorStyleTerrain)
+  vtk_add_test_tcl(TestStyleRubberBandZoom)
 endif()
 
 if(NOT VTK_OPENGL_HAS_OSMESA)
-  add_test_tcl(TestStyleJoystickCamera Rendering)
-  add_test_tcl(TestFlyTo Rendering)
-  add_test_tcl(TestStyleTerrain Rendering)
-  add_test_tcl(TestStyleTrackballActor Rendering)
-  add_test_tcl(TestStyleBaseSpike Rendering)
-  add_test_tcl(TestStyleTrackballCamera Rendering)
-  add_test_tcl(TestStyleJoystickActor Rendering)
+  vtk_add_test_tcl(TestStyleJoystickCamera)
+  vtk_add_test_tcl(TestFlyTo)
+  vtk_add_test_tcl(TestStyleTerrain)
+  vtk_add_test_tcl(TestStyleTrackballActor)
+  vtk_add_test_tcl(TestStyleBaseSpike)
+  vtk_add_test_tcl(TestStyleTrackballCamera)
+  vtk_add_test_tcl(TestStyleJoystickActor)
 endif()
diff --git a/Interaction/Style/module.cmake b/Interaction/Style/module.cmake
index b5afcd4..9803b32 100644
--- a/Interaction/Style/module.cmake
+++ b/Interaction/Style/module.cmake
@@ -3,4 +3,7 @@ vtk_module(vtkInteractionStyle
     Rendering
   IMPLEMENTS
     vtkRenderingCore
+  PRIVATE_DEPENDS
+    vtkFiltersSources
+    vtkFiltersExtraction
   )
diff --git a/Interaction/Style/vtkInteractorStyleImage.cxx b/Interaction/Style/vtkInteractorStyleImage.cxx
index 2e03022..66c127a 100644
--- a/Interaction/Style/vtkInteractorStyleImage.cxx
+++ b/Interaction/Style/vtkInteractorStyleImage.cxx
@@ -482,7 +482,12 @@ void vtkInteractorStyleImage::WindowLevel()
   this->WindowLevelCurrentPosition[0] = rwi->GetEventPosition()[0];
   this->WindowLevelCurrentPosition[1] = rwi->GetEventPosition()[1];
 
-  if (this->CurrentImageProperty)
+  if (this->HandleObservers &&
+      this->HasObserver(vtkCommand::WindowLevelEvent))
+    {
+    this->InvokeEvent(vtkCommand::WindowLevelEvent, this);
+    }
+  else if (this->CurrentImageProperty)
     {
     int *size = this->CurrentRenderer->GetSize();
 
@@ -541,10 +546,6 @@ void vtkInteractorStyleImage::WindowLevel()
 
     this->Interactor->Render();
     }
-  else
-    {
-    this->InvokeEvent(vtkCommand::WindowLevelEvent, this);
-    }
 }
 
 //----------------------------------------------------------------------------
diff --git a/Interaction/Widgets/Testing/Cxx/BoxWidget.cxx b/Interaction/Widgets/Testing/Cxx/BoxWidget.cxx
index e76e099..e578eb3 100644
--- a/Interaction/Widgets/Testing/Cxx/BoxWidget.cxx
+++ b/Interaction/Widgets/Testing/Cxx/BoxWidget.cxx
@@ -45,7 +45,7 @@ public:
   vtkActor     *Actor;
 };
 
-char BoxWidgetEventLog[] =
+const char BoxWidgetEventLog[] =
   "# StreamVersion 1\n"
   "CharEvent 187 242 0 0 105 1 i\n"
   "KeyReleaseEvent 187 242 0 0 105 1 i\n"
diff --git a/Interaction/Widgets/Testing/Cxx/BoxWidget2.cxx b/Interaction/Widgets/Testing/Cxx/BoxWidget2.cxx
index c3a6bd3..ecbd7be 100644
--- a/Interaction/Widgets/Testing/Cxx/BoxWidget2.cxx
+++ b/Interaction/Widgets/Testing/Cxx/BoxWidget2.cxx
@@ -49,7 +49,7 @@ public:
   vtkActor     *Actor;
 };
 
-char BoxWidgetEventLog2[] =
+const char BoxWidgetEventLog2[] =
   "# StreamVersion 1\n"
   "CharEvent 187 242 0 0 105 1 i\n"
   "KeyReleaseEvent 187 242 0 0 105 1 i\n"
diff --git a/Interaction/Widgets/Testing/Cxx/CMakeLists.txt b/Interaction/Widgets/Testing/Cxx/CMakeLists.txt
index 8798fdb..7bb7942 100644
--- a/Interaction/Widgets/Testing/Cxx/CMakeLists.txt
+++ b/Interaction/Widgets/Testing/Cxx/CMakeLists.txt
@@ -1,4 +1,5 @@
-set(MyTests
+set(TerrainPolylineEditor_ARGS -ProjectionMode 2 -HeightOffset 100)
+vtk_add_test_cxx(
   BoxWidget.cxx
   BoxWidget2.cxx
   # ImagePlaneWidget.cxx # todo (unsatisfied deps)
@@ -40,10 +41,11 @@ set(MyTests
   TestPlaybackWidget.cxx
   TestPointHandleRepresentation3D.cxx
   TestProgrammaticPlacement.cxx
-  TestProp3DButtonRepresentation.cxx
+  TestProp3DButtonRepresentation.cxx,NO_VALID
   TestRectilinearWipeWidget.cxx
   # TestResliceCursorWidget2.cxx # todo (unsatistfied deps)
   # TestResliceCursorWidget3.cxx # todo (unsatistfied deps)
+  TestScalarBarWidget.cxx
   TestSeedWidget.cxx
   TestSeedWidget2.cxx
   TestSeedWidgetNonUniformRepresentations.cxx
@@ -51,6 +53,8 @@ set(MyTests
   TestSliderWidget2D.cxx
   # TestSplineWidget.cxx # todo (unsatistfied deps)
   TestTextWidget.cxx
+  )
+vtk_add_test_cxx(NO_VALID
   vtkAngleRepresentation2DTest1.cxx
   vtkAngleRepresentation3DTest1.cxx
   vtkAngleWidgetTest1.cxx
@@ -70,71 +74,32 @@ set(MyTests
   vtkSplineWidget2Test1.cxx
   vtkTextRepresentationTest1.cxx
   vtkTextWidgetTest1.cxx
-)
+  )
 
 # Tests with data
-if(VTK_DATA_ROOT)
-  set(MyTests
-    ${MyTests}
-    TestDijkstraGraphGeodesicPath.cxx
-    TestLineWidget.cxx
-    TestLineWidget2.cxx
-    TestPlaneWidget.cxx
-    TestPointWidget.cxx
-    TestPolyPlane.cxx
-    TestPolygonalHandleRepresentations.cxx
-    TestPolygonalRepresentationHandleWidget.cxx
-    TestPolygonalSurfaceConstrainedDistanceWidget.cxx
-    TestScalarBarWidget.cxx
-    TestSphereHandleWidget.cxx
-    TestSurfaceConstrainedHandleWidget.cxx
-    TestTensorProbeWidget.cxx
-    )
-endif()
+vtk_add_test_cxx(
+  TestDijkstraGraphGeodesicPath.cxx
+  TestLineWidget.cxx
+  TestLineWidget2.cxx
+  TestPlaneWidget.cxx
+  TestPointWidget.cxx
+  TestPolyPlane.cxx,NO_VALID
+  TestPolygonalHandleRepresentations.cxx
+  TestPolygonalRepresentationHandleWidget.cxx
+  TestPolygonalSurfaceConstrainedDistanceWidget.cxx
+  TestSphereHandleWidget.cxx
+  TestSurfaceConstrainedHandleWidget.cxx
+  TestTensorProbeWidget.cxx
+  )
 
 if(NOT VTK_OPENGL_HAS_OSMESA)
-  set(MyTests
-    ${MyTests} TestPickingManagerWidgets.cxx)
-endif()
-
-
-# Use the testing object factory, to reduce boilerplate code in tests.
-include(vtkTestingObjectFactory)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun CxxTests.cxx)
-list(REMOVE_ITEM TestsToRun TerrainPolylineEditor.cxx)
-
-if(VTK_DATA_ROOT)
-  add_test(NAME ${vtk-module}Cxx-TerrainPolylineEditor
-    COMMAND ${vtk-module}CxxTests TerrainPolylineEditor
-      -ProjectionMode 2
-      -HeightOffset 100
-      -D ${VTK_DATA_ROOT}
-      -T ${VTK_TEST_OUTPUT_DIR}
-      -V Baseline/Widgets/TerrainPolylineEditor.png)
+  vtk_add_test_cxx(TestPickingManagerWidgets.cxx)
 endif()
 
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Widgets/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName} COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests RENDERING_FACTORY)
 
-if (VTK_DATA_ROOT)
-  set_tests_properties(${vtk-module}Cxx-TerrainPolylineEditor
-    PROPERTIES TIMEOUT 90)
-endif()
+set_tests_properties(${vtk-module}Cxx-TerrainPolylineEditor
+  PROPERTIES TIMEOUT 90)
 
 if(NOT VTK_OPENGL_HAS_OSMESA)
   set_tests_properties(${vtk-module}Cxx-TestPickingManagerSeedWidget
diff --git a/Interaction/Widgets/Testing/Cxx/TerrainPolylineEditor.cxx b/Interaction/Widgets/Testing/Cxx/TerrainPolylineEditor.cxx
index ad7dea8..eb46eb5 100644
--- a/Interaction/Widgets/Testing/Cxx/TerrainPolylineEditor.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TerrainPolylineEditor.cxx
@@ -40,7 +40,7 @@
 #include "vtkInteractorEventRecorder.h"
 #include "vtkTestUtilities.h"
 
-char TerrainPolylineEditorLog[] =
+const char TerrainPolylineEditorLog[] =
 "# StreamVersion 1\n"
 "EnterEvent 522 259 0 0 0 0 0 i\n"
 "MouseMoveEvent 446 277 0 0 0 0 0 i\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestAngleWidget2D.cxx b/Interaction/Widgets/Testing/Cxx/TestAngleWidget2D.cxx
index 1803464..312a952 100644
--- a/Interaction/Widgets/Testing/Cxx/TestAngleWidget2D.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestAngleWidget2D.cxx
@@ -34,7 +34,7 @@
 #include "vtkProperty2D.h"
 #include "vtkTesting.h"
 
-char TestAngleWidget2DEventLog[] =
+const char TestAngleWidget2DEventLog[] =
   "# StreamVersion 1\n"
   "EnterEvent 185 1 0 0 0 0 0\n"
   "MouseMoveEvent 179 19 0 0 0 0 0\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestAngleWidget3D.cxx b/Interaction/Widgets/Testing/Cxx/TestAngleWidget3D.cxx
index 9eec05e..4e222a9 100644
--- a/Interaction/Widgets/Testing/Cxx/TestAngleWidget3D.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestAngleWidget3D.cxx
@@ -36,7 +36,7 @@
 #include "vtkProperty2D.h"
 #include "vtkTesting.h"
 
-char TestAngleWidget3DEventLog[] =
+const char TestAngleWidget3DEventLog[] =
 "# StreamVersion 1\n"
 "RenderEvent 0 0 0 0 0 0 0\n"
 "EnterEvent 2 184 0 0 0 0 0\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestCellCentersPointPlacer.cxx b/Interaction/Widgets/Testing/Cxx/TestCellCentersPointPlacer.cxx
index eebc33b..847713d 100644
--- a/Interaction/Widgets/Testing/Cxx/TestCellCentersPointPlacer.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestCellCentersPointPlacer.cxx
@@ -54,7 +54,7 @@
 #include <vector>
 
 //---------------------------------------------------------------------------
-char TestCellCentersPointPlacerEventLog[] =
+const char TestCellCentersPointPlacerEventLog[] =
 "# StreamVersion 1\n"
 "EnterEvent 384 226 0 0 0 0 0\n"
 "MouseMoveEvent 384 226 0 0 0 0 0\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestDijkstraImageGeodesicPath.cxx b/Interaction/Widgets/Testing/Cxx/TestDijkstraImageGeodesicPath.cxx
index 9166ffd..9b41fa9 100644
--- a/Interaction/Widgets/Testing/Cxx/TestDijkstraImageGeodesicPath.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestDijkstraImageGeodesicPath.cxx
@@ -36,7 +36,7 @@
 #include "vtkRenderWindowInteractor.h"
 #include "vtkTestUtilities.h"
 
-char TestDijkstraImageGeodesicPathLog[] =
+const char TestDijkstraImageGeodesicPathLog[] =
   "# StreamVersion 1 i\n"
   "RenderEvent 0 0 0 0 0 0 0 i\n"
   "EnterEvent 399 96 0 0 0 0 0 i\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestDistanceWidget.cxx b/Interaction/Widgets/Testing/Cxx/TestDistanceWidget.cxx
index 6d21569..add5b65 100644
--- a/Interaction/Widgets/Testing/Cxx/TestDistanceWidget.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestDistanceWidget.cxx
@@ -37,7 +37,7 @@
 #define VTK_CREATE(type, name) \
   vtkSmartPointer<type> name = vtkSmartPointer<type>::New()
 
-char TestDistanceWidgetEventLog[] =
+const char TestDistanceWidgetEventLog[] =
 "# StreamVersion 1\n"
 "RenderEvent 0 0 0 0 0 0 0\n"
 "EnterEvent 292 123 0 0 0 0 0\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestDistanceWidget3D.cxx b/Interaction/Widgets/Testing/Cxx/TestDistanceWidget3D.cxx
index e57abbd..0fa653b 100644
--- a/Interaction/Widgets/Testing/Cxx/TestDistanceWidget3D.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestDistanceWidget3D.cxx
@@ -40,7 +40,7 @@
 #define VTK_CREATE(type, name) \
   vtkSmartPointer<type> name = vtkSmartPointer<type>::New()
 
-char TestDistanceWidget3DEventLog[] =
+const char TestDistanceWidget3DEventLog[] =
 "# StreamVersion 1\n"
 "RenderEvent 0 0 0 0 0 0 0\n"
 "EnterEvent 292 123 0 0 0 0 0\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestFixedSizeHandleRepresentation3D.cxx b/Interaction/Widgets/Testing/Cxx/TestFixedSizeHandleRepresentation3D.cxx
index 26a4f57..23014a9 100644
--- a/Interaction/Widgets/Testing/Cxx/TestFixedSizeHandleRepresentation3D.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestFixedSizeHandleRepresentation3D.cxx
@@ -39,7 +39,7 @@
 #include "vtkTestUtilities.h"
 #include "vtkTesting.h"
 
-char TestFixedSizeHandleRepresentation3DLog[] =
+const char TestFixedSizeHandleRepresentation3DLog[] =
 "# StreamVersion 1\n"
 "RenderEvent 0 0 0 0 0 0 0\n"
 "EnterEvent 156 11 0 0 0 0 0\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestImageTracerWidget.cxx b/Interaction/Widgets/Testing/Cxx/TestImageTracerWidget.cxx
index b7bf628..eca863b 100644
--- a/Interaction/Widgets/Testing/Cxx/TestImageTracerWidget.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestImageTracerWidget.cxx
@@ -42,7 +42,7 @@
 
 #include "vtkTestUtilities.h"
 
-char ImageTracerWidgetEventLog[] =
+const char ImageTracerWidgetEventLog[] =
   "# StreamVersion 1\n"
   "MouseMoveEvent 322 145 0 0 0 0  b\n"
   "LeftButtonPressEvent 322 145 0 0 0 0  b\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestImplicitPlaneWidget.cxx b/Interaction/Widgets/Testing/Cxx/TestImplicitPlaneWidget.cxx
index 8090c8a..f4316c1 100644
--- a/Interaction/Widgets/Testing/Cxx/TestImplicitPlaneWidget.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestImplicitPlaneWidget.cxx
@@ -32,7 +32,7 @@
 #include "vtkRenderer.h"
 #include "vtkSphereSource.h"
 
-char eventLog[] =
+const char eventLog[] =
   "# StreamVersion 1\n"
   "CharEvent 108 202 0 0 105 1 i\n"
   "KeyReleaseEvent 108 202 0 0 105 1 i\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestImplicitPlaneWidget2.cxx b/Interaction/Widgets/Testing/Cxx/TestImplicitPlaneWidget2.cxx
index 00fc400..927d72f 100644
--- a/Interaction/Widgets/Testing/Cxx/TestImplicitPlaneWidget2.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestImplicitPlaneWidget2.cxx
@@ -33,7 +33,7 @@
 #include "vtkRenderer.h"
 #include "vtkSphereSource.h"
 
-char eventLog2[] =
+const char eventLog2[] =
   "# StreamVersion 1\n"
   "CharEvent 108 202 0 0 105 1 i\n"
   "KeyReleaseEvent 108 202 0 0 105 1 i\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestImplicitPlaneWidget2LockNormalToCamera.cxx b/Interaction/Widgets/Testing/Cxx/TestImplicitPlaneWidget2LockNormalToCamera.cxx
index 25ba6a5..de08e29 100644
--- a/Interaction/Widgets/Testing/Cxx/TestImplicitPlaneWidget2LockNormalToCamera.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestImplicitPlaneWidget2LockNormalToCamera.cxx
@@ -36,7 +36,7 @@
 #include "vtkSmartPointer.h"
 #include "vtkSphereSource.h"
 
-char eventLog2LockNormalToCamera[] =
+const char eventLog2LockNormalToCamera[] =
   "# StreamVersion 1\n"
   "CharEvent 108 202 0 0 105 1 i\n"
   "KeyReleaseEvent 108 202 0 0 105 1 i\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestLineWidget.cxx b/Interaction/Widgets/Testing/Cxx/TestLineWidget.cxx
index 5b42f54..2bc235f 100644
--- a/Interaction/Widgets/Testing/Cxx/TestLineWidget.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestLineWidget.cxx
@@ -34,7 +34,7 @@
 
 #include "vtkTestUtilities.h"
 
-char TestLineWidgetEventLog[] =
+const char TestLineWidgetEventLog[] =
   "# StreamVersion 1\n"
   "CharEvent 185 179 0 0 105 1 i\n"
   "KeyReleaseEvent 185 179 0 0 105 1 i\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestLineWidget2.cxx b/Interaction/Widgets/Testing/Cxx/TestLineWidget2.cxx
index 72551a4..2031768 100644
--- a/Interaction/Widgets/Testing/Cxx/TestLineWidget2.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestLineWidget2.cxx
@@ -35,7 +35,7 @@
 
 #include "vtkTestUtilities.h"
 
-char TestLineWidget2EventLog[] =
+const char TestLineWidget2EventLog[] =
   "# StreamVersion 1\n"
   "CharEvent 185 179 0 0 105 1 i\n"
   "KeyReleaseEvent 185 179 0 0 105 1 i\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestLogoWidgetAlphaBlending.cxx b/Interaction/Widgets/Testing/Cxx/TestLogoWidgetAlphaBlending.cxx
index af983cf..9526158 100644
--- a/Interaction/Widgets/Testing/Cxx/TestLogoWidgetAlphaBlending.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestLogoWidgetAlphaBlending.cxx
@@ -48,7 +48,7 @@ int TestLogoWidgetAlphaBlending( int argc, char *argv[] )
   vtkSmartPointer<vtkRenderWindow> renWin =
     vtkSmartPointer<vtkRenderWindow>::New();
   renWin->AddRenderer(ren1);
-  renWin->SetMultiSamples(1);
+  renWin->SetMultiSamples(0);
   renWin->SetAlphaBitPlanes(1);
 
   ren1->SetUseDepthPeeling(0);
diff --git a/Interaction/Widgets/Testing/Cxx/TestLogoWidgetDepthPeeling.cxx b/Interaction/Widgets/Testing/Cxx/TestLogoWidgetDepthPeeling.cxx
index ffdb5d4..f0a4239 100644
--- a/Interaction/Widgets/Testing/Cxx/TestLogoWidgetDepthPeeling.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestLogoWidgetDepthPeeling.cxx
@@ -47,7 +47,7 @@ int TestLogoWidgetDepthPeeling( int argc, char *argv[] )
   vtkSmartPointer<vtkRenderWindow> renWin =
     vtkSmartPointer<vtkRenderWindow>::New();
   renWin->AddRenderer(ren1);
-  renWin->SetMultiSamples(1);
+  renWin->SetMultiSamples(0);
   renWin->SetAlphaBitPlanes(1);
 
   ren1->SetUseDepthPeeling(1);
diff --git a/Interaction/Widgets/Testing/Cxx/TestOrientationMarkerWidget.cxx b/Interaction/Widgets/Testing/Cxx/TestOrientationMarkerWidget.cxx
index cf72f05..60042d7 100644
--- a/Interaction/Widgets/Testing/Cxx/TestOrientationMarkerWidget.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestOrientationMarkerWidget.cxx
@@ -38,7 +38,7 @@
 #include "vtkTransformPolyDataFilter.h"
 #include "vtkTubeFilter.h"
 
-char TestOMWidgetEventLog[] =
+const char TestOMWidgetEventLog[] =
   "# StreamVersion 1\n"
   "CharEvent 215 191 0 0 98 1 b\n"
   "KeyReleaseEvent 215 191 0 0 98 1 b\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestPickingManagerSeedWidget.cxx b/Interaction/Widgets/Testing/Cxx/TestPickingManagerSeedWidget.cxx
index 8d4992b..2e3e0c0 100644
--- a/Interaction/Widgets/Testing/Cxx/TestPickingManagerSeedWidget.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestPickingManagerSeedWidget.cxx
@@ -73,7 +73,7 @@
 #include <fstream>
 #include <list>
 
-char eventLogTestPickingManagerSeedWidget[] =
+const char eventLogTestPickingManagerSeedWidget[] =
   "# StreamVersion 1 \n"
   "EnterEvent 570 160 0 0 0 0 0 i\n"
   "MouseMoveEvent 570 160 0 0 0 0 0 i\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestPlaneWidget.cxx b/Interaction/Widgets/Testing/Cxx/TestPlaneWidget.cxx
index 991c4c5..17e56c9 100644
--- a/Interaction/Widgets/Testing/Cxx/TestPlaneWidget.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestPlaneWidget.cxx
@@ -34,7 +34,7 @@
 
 #include "vtkTestUtilities.h"
 
-char TPWeventLog[] =
+const char TPWeventLog[] =
   "# StreamVersion 1\n"
   "CharEvent 192 232 0 0 105 1 i\n"
   "KeyReleaseEvent 192 230 0 0 105 1 i\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestPointWidget.cxx b/Interaction/Widgets/Testing/Cxx/TestPointWidget.cxx
index bdb4b09..965e53f 100644
--- a/Interaction/Widgets/Testing/Cxx/TestPointWidget.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestPointWidget.cxx
@@ -34,7 +34,7 @@
 
 #include "vtkTestUtilities.h"
 
-char PointWidgetEventLog[] =
+const char PointWidgetEventLog[] =
   "# StreamVersion 1\n"
   "CharEvent 204 169 0 0 105 1 i\n"
   "KeyReleaseEvent 204 169 0 0 105 1 i\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestPolygonalSurfaceConstrainedDistanceWidget.cxx b/Interaction/Widgets/Testing/Cxx/TestPolygonalSurfaceConstrainedDistanceWidget.cxx
index 013729e..5ae01c5 100644
--- a/Interaction/Widgets/Testing/Cxx/TestPolygonalSurfaceConstrainedDistanceWidget.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestPolygonalSurfaceConstrainedDistanceWidget.cxx
@@ -55,7 +55,7 @@
 #include "vtkTestUtilities.h"
 #include "vtkTesting.h"
 
-char TestPolygonalSurfaceConstrainedDistanceWidgetLog[] =
+const char TestPolygonalSurfaceConstrainedDistanceWidgetLog[] =
   "# StreamVersion 1 i\n"
   "RenderEvent 0 0 0 0 0 0 0 i\n"
   "EnterEvent 293 1 0 0 0 0 0 i\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestScalarBarWidget.cxx b/Interaction/Widgets/Testing/Cxx/TestScalarBarWidget.cxx
index 95abe19..9db7d83 100644
--- a/Interaction/Widgets/Testing/Cxx/TestScalarBarWidget.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestScalarBarWidget.cxx
@@ -29,7 +29,7 @@
 
 #include "vtkTestUtilities.h"
 
-char TSBWeventLog[] =
+const char TSBWeventLog[] =
   "# StreamVersion 1\n"
   "CharEvent 153 168 0 0 105 1 i\n"
   "KeyReleaseEvent 153 168 0 0 105 1 i\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestSeedWidget.cxx b/Interaction/Widgets/Testing/Cxx/TestSeedWidget.cxx
index 14f61b1..448bc9a 100644
--- a/Interaction/Widgets/Testing/Cxx/TestSeedWidget.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestSeedWidget.cxx
@@ -38,7 +38,7 @@
 #include "vtkInteractorStyleTrackballCamera.h"
 
 
-char TestSeedWidgetEventLog[] =
+const char TestSeedWidgetEventLog[] =
   "# StreamVersion 1 i\n"
   "CharEvent 185 179 0 0 105 1 i\n"
   "KeyReleaseEvent 185 179 0 0 105 1 i\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestSliderWidget.cxx b/Interaction/Widgets/Testing/Cxx/TestSliderWidget.cxx
index 453de83..f26c9e9 100644
--- a/Interaction/Widgets/Testing/Cxx/TestSliderWidget.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestSliderWidget.cxx
@@ -41,7 +41,7 @@
 #include "vtkWidgetEvent.h"
 
 
-char TestSliderWidgetEventLog[] =
+const char TestSliderWidgetEventLog[] =
   "# StreamVersion 1\n"
   "EnterEvent 294 33 0 0 0 0 0\n"
   "MouseMoveEvent 286 41 0 0 0 0 0\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestSliderWidget2D.cxx b/Interaction/Widgets/Testing/Cxx/TestSliderWidget2D.cxx
index 52df86b..2168910 100644
--- a/Interaction/Widgets/Testing/Cxx/TestSliderWidget2D.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestSliderWidget2D.cxx
@@ -37,7 +37,7 @@
 #include "vtkWidgetEventTranslator.h"
 #include "vtkWidgetEvent.h"
 
-char TestSliderWidget2DEventLog[] =
+const char TestSliderWidget2DEventLog[] =
   "# StreamVersion 1\n"
   "EnterEvent 285 73 0 0 0 0 0\n"
   "MouseMoveEvent 265 83 0 0 0 0 0\n"
diff --git a/Interaction/Widgets/Testing/Cxx/TestSphereHandleWidget.cxx b/Interaction/Widgets/Testing/Cxx/TestSphereHandleWidget.cxx
index 30a8583..1c88b20 100644
--- a/Interaction/Widgets/Testing/Cxx/TestSphereHandleWidget.cxx
+++ b/Interaction/Widgets/Testing/Cxx/TestSphereHandleWidget.cxx
@@ -46,7 +46,7 @@
 #include "vtkDataSetWriter.h"
 #include "vtkMultiThreader.h"
 
-char TestSphereHandleWidgetEventLog[] =
+const char TestSphereHandleWidgetEventLog[] =
 "# StreamVersion 1\n"
 "MouseMoveEvent 181 152 0 0 0 0 0\n"
 "MouseMoveEvent 180 151 0 0 0 0 0\n"
diff --git a/Interaction/Widgets/Testing/Data/Baseline/BoxWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/BoxWidget.png.md5
new file mode 100644
index 0000000..aa04ffa
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/BoxWidget.png.md5
@@ -0,0 +1 @@
+0a37be24dd7d76a13ec64aff4132360e
diff --git a/Interaction/Widgets/Testing/Data/Baseline/BoxWidget2.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/BoxWidget2.png.md5
new file mode 100644
index 0000000..52dba5b
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/BoxWidget2.png.md5
@@ -0,0 +1 @@
+47842c04f9b85afa5d65cc1e09313b15
diff --git a/Interaction/Widgets/Testing/Data/Baseline/BoxWidget_1.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/BoxWidget_1.png.md5
new file mode 100644
index 0000000..1c3ca16
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/BoxWidget_1.png.md5
@@ -0,0 +1 @@
+43dd811476be3d77c70f1d8d792cb289
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TerrainPolylineEditor.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TerrainPolylineEditor.png.md5
new file mode 100644
index 0000000..138a62b
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TerrainPolylineEditor.png.md5
@@ -0,0 +1 @@
+a5f746404e48bd4cf8ee2eb705b19aad
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestAffineWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestAffineWidget.png.md5
new file mode 100644
index 0000000..01fb252
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestAffineWidget.png.md5
@@ -0,0 +1 @@
+808574137c553a91bf65a10803e4398c
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestAngleWidget2D.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestAngleWidget2D.png.md5
new file mode 100644
index 0000000..7bfdcf0
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestAngleWidget2D.png.md5
@@ -0,0 +1 @@
+f37cf0dc97d9e6dc1e9e67986f1539c5
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestAngleWidget2D_1.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestAngleWidget2D_1.png.md5
new file mode 100644
index 0000000..edb3267
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestAngleWidget2D_1.png.md5
@@ -0,0 +1 @@
+9d9f05f4cf3f8437728452af3f0bffad
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestAngleWidget3D.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestAngleWidget3D.png.md5
new file mode 100644
index 0000000..e3b9d8d
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestAngleWidget3D.png.md5
@@ -0,0 +1 @@
+7142b384f12b550e1c4351fb711839bc
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestBalloonWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestBalloonWidget.png.md5
new file mode 100644
index 0000000..e986e61
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestBalloonWidget.png.md5
@@ -0,0 +1 @@
+cb0b197f13207002139d334e21100ecf
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestBiDimensionalWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestBiDimensionalWidget.png.md5
new file mode 100644
index 0000000..7063b61
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestBiDimensionalWidget.png.md5
@@ -0,0 +1 @@
+818ea30b9841f4ba4d13f8ccbe4ba776
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestBorderWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestBorderWidget.png.md5
new file mode 100644
index 0000000..822bdb2
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestBorderWidget.png.md5
@@ -0,0 +1 @@
+25f789695f5dc6431bbc219b27cb9b8f
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestBorderWidget_1.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestBorderWidget_1.png.md5
new file mode 100644
index 0000000..ad2d736
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestBorderWidget_1.png.md5
@@ -0,0 +1 @@
+7ad5d05f6a756f2751730188911322cd
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestBoxWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestBoxWidget.png.md5
new file mode 100644
index 0000000..21c5bb5
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestBoxWidget.png.md5
@@ -0,0 +1 @@
+0e84d1b38286d110e0b46e8470f8fbfb
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestCameraWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestCameraWidget.png.md5
new file mode 100644
index 0000000..8464680
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestCameraWidget.png.md5
@@ -0,0 +1 @@
+2b91cb708a0ca0bb0c77c7424b842c4f
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestCellCentersPointPlacer.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestCellCentersPointPlacer.png.md5
new file mode 100644
index 0000000..05ad7c7
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestCellCentersPointPlacer.png.md5
@@ -0,0 +1 @@
+de5399415d68f614b583f5dd2f9673b6
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestCenteredSliderWidget2D.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestCenteredSliderWidget2D.png.md5
new file mode 100644
index 0000000..2a005d9
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestCenteredSliderWidget2D.png.md5
@@ -0,0 +1 @@
+b3ad3e229d587d113ab640aac7a9ead0
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestCheckerboardWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestCheckerboardWidget.png.md5
new file mode 100644
index 0000000..2ce7e51
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestCheckerboardWidget.png.md5
@@ -0,0 +1 @@
+507d43366068abd231f2143a7b4a98a6
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestConstrainedHandleWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestConstrainedHandleWidget.png.md5
new file mode 100644
index 0000000..62c1369
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestConstrainedHandleWidget.png.md5
@@ -0,0 +1 @@
+5e5bf5e5b046ba97b8fa44c2292daaae
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestContourWidget2.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestContourWidget2.png.md5
new file mode 100644
index 0000000..2ecdd58
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestContourWidget2.png.md5
@@ -0,0 +1 @@
+89ea66c4ea21caa7f97aaedbca3765fe
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestDijkstraGraphGeodesicPath.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestDijkstraGraphGeodesicPath.png.md5
new file mode 100644
index 0000000..84eddd9
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestDijkstraGraphGeodesicPath.png.md5
@@ -0,0 +1 @@
+1752ce1889c5e7cd9da9adecc3a8ee9c
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestDijkstraGraphGeodesicPath_1.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestDijkstraGraphGeodesicPath_1.png.md5
new file mode 100644
index 0000000..05386b5
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestDijkstraGraphGeodesicPath_1.png.md5
@@ -0,0 +1 @@
+93a3db60ace7e7fc6e54c278011312f6
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestDijkstraImageGeodesicPath.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestDijkstraImageGeodesicPath.png.md5
new file mode 100644
index 0000000..293dc5c
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestDijkstraImageGeodesicPath.png.md5
@@ -0,0 +1 @@
+a6ac612dd5ffb3c68ca1b5f8aac46a27
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestDistanceWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestDistanceWidget.png.md5
new file mode 100644
index 0000000..2532744
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestDistanceWidget.png.md5
@@ -0,0 +1 @@
+eb691fb94fbf1f023d4b49b05aae05b6
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestDistanceWidget3D.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestDistanceWidget3D.png.md5
new file mode 100644
index 0000000..53dff7f
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestDistanceWidget3D.png.md5
@@ -0,0 +1 @@
+6aa148750b527364b6b9e3cbdb5cf7ba
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestDistanceWidget_1.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestDistanceWidget_1.png.md5
new file mode 100644
index 0000000..c6507bd
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestDistanceWidget_1.png.md5
@@ -0,0 +1 @@
+a612f86679fb3c12417e976d6a29603a
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestFixedSizeHandleRepresentation3D.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestFixedSizeHandleRepresentation3D.png.md5
new file mode 100644
index 0000000..a776154
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestFixedSizeHandleRepresentation3D.png.md5
@@ -0,0 +1 @@
+46bf6d76b10fc427d623c4b970fe8259
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestFocalPlaneContour.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestFocalPlaneContour.png.md5
new file mode 100644
index 0000000..1b26dd8
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestFocalPlaneContour.png.md5
@@ -0,0 +1 @@
+42110da9a34590fe70acd85b8d46e74e
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestHandleWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestHandleWidget.png.md5
new file mode 100644
index 0000000..709b80a
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestHandleWidget.png.md5
@@ -0,0 +1 @@
+2311d4ce9fa5e7a6ae3d78627c857b83
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestHandleWidget2D.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestHandleWidget2D.png.md5
new file mode 100644
index 0000000..e9b53dd
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestHandleWidget2D.png.md5
@@ -0,0 +1 @@
+e36d0753bcf97b5026bdff7fb87d1677
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestHandleWidget_1.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestHandleWidget_1.png.md5
new file mode 100644
index 0000000..6b460c0
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestHandleWidget_1.png.md5
@@ -0,0 +1 @@
+da83f456aed06bf41316b21d75ec69dd
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestHandleWidget_2.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestHandleWidget_2.png.md5
new file mode 100644
index 0000000..db4a761
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestHandleWidget_2.png.md5
@@ -0,0 +1 @@
+bd7aa0187854ec74f5d2c4d4ac6be1a0
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestImagePlaneWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestImagePlaneWidget.png.md5
new file mode 100644
index 0000000..32e07ad
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestImagePlaneWidget.png.md5
@@ -0,0 +1 @@
+b77bd813c58867b979a5a7cc8b3be0ce
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestImageTracerWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestImageTracerWidget.png.md5
new file mode 100644
index 0000000..b9870d2
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestImageTracerWidget.png.md5
@@ -0,0 +1 @@
+f6c734dd3143b26ba09db9f03d10dcdc
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestImageTracerWidget_1.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestImageTracerWidget_1.png.md5
new file mode 100644
index 0000000..a442712
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestImageTracerWidget_1.png.md5
@@ -0,0 +1 @@
+97ad8161a25f9736dbb95f94dbf1ca14
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestImageTracerWidget_2.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestImageTracerWidget_2.png.md5
new file mode 100644
index 0000000..a8cea48
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestImageTracerWidget_2.png.md5
@@ -0,0 +1 @@
+3b094610cbd251c6eaaa48d376a68f0c
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget.png.md5
new file mode 100644
index 0000000..c58dabe
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget.png.md5
@@ -0,0 +1 @@
+0fc0f8d8f089b2fbe8562f2a1239dd21
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2.png.md5
new file mode 100644
index 0000000..1a51bb6
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2.png.md5
@@ -0,0 +1 @@
+b33767ff2b92fbb639c33133f53b548c
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2LockNormalToCamera.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2LockNormalToCamera.png.md5
new file mode 100644
index 0000000..959bb98
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2LockNormalToCamera.png.md5
@@ -0,0 +1 @@
+62625eed92918516a6596dcd8831a6dd
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2LockNormalToCamera_1.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2LockNormalToCamera_1.png.md5
new file mode 100644
index 0000000..b8cfe8a
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2LockNormalToCamera_1.png.md5
@@ -0,0 +1 @@
+cfaecd21b492f6e0c92f82eb87f94416
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2_1.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2_1.png.md5
new file mode 100644
index 0000000..ea75282
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2_1.png.md5
@@ -0,0 +1 @@
+3e33e75ad36453ee78d01142c9ccec3a
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2_2.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2_2.png.md5
new file mode 100644
index 0000000..fefa66f
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2_2.png.md5
@@ -0,0 +1 @@
+4785e6d25c9f85c19dee002b8c1db8e5
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2b.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2b.png.md5
new file mode 100644
index 0000000..f2a9868
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2b.png.md5
@@ -0,0 +1 @@
+7fd7f89c7ff81c8ac401f26c5c8eeceb
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2b_1.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2b_1.png.md5
new file mode 100644
index 0000000..9c6481c
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2b_1.png.md5
@@ -0,0 +1 @@
+3fe73f5be2cc171741ef5913b19fb49d
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2b_2.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2b_2.png.md5
new file mode 100644
index 0000000..aeb159e
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestImplicitPlaneWidget2b_2.png.md5
@@ -0,0 +1 @@
+25cf6cf775c6e88fb63028a29ab0fee1
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestInteractorEventRecorder.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestInteractorEventRecorder.png.md5
new file mode 100644
index 0000000..6865190
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestInteractorEventRecorder.png.md5
@@ -0,0 +1 @@
+c3e2001962afbcc48ede32fa28a5fafb
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestLineWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestLineWidget.png.md5
new file mode 100644
index 0000000..c395bb1
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestLineWidget.png.md5
@@ -0,0 +1 @@
+061614e52ffe7e846a3974207e4f280b
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestLineWidget2.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestLineWidget2.png.md5
new file mode 100644
index 0000000..b436b4e
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestLineWidget2.png.md5
@@ -0,0 +1 @@
+cffffef2f90660a81040872519fbc03d
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestLogoWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestLogoWidget.png.md5
new file mode 100644
index 0000000..62241e7
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestLogoWidget.png.md5
@@ -0,0 +1 @@
+d60066713c4143a19c3fbffc5ae5014e
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestLogoWidgetAlphaBlending.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestLogoWidgetAlphaBlending.png.md5
new file mode 100644
index 0000000..ff60521
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestLogoWidgetAlphaBlending.png.md5
@@ -0,0 +1 @@
+929ddcd0b8987acb5319c7ac5f4118e7
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestLogoWidgetDepthPeeling.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestLogoWidgetDepthPeeling.png.md5
new file mode 100644
index 0000000..414523d
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestLogoWidgetDepthPeeling.png.md5
@@ -0,0 +1 @@
+65a4baf0bf8a68db0189d3b255acea15
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestOrientationMarkerWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestOrientationMarkerWidget.png.md5
new file mode 100644
index 0000000..03b0008
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestOrientationMarkerWidget.png.md5
@@ -0,0 +1 @@
+94d32f424b5a027673e4859b99d1678e
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestOrientationMarkerWidget_1.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestOrientationMarkerWidget_1.png.md5
new file mode 100644
index 0000000..20ddd9c
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestOrientationMarkerWidget_1.png.md5
@@ -0,0 +1 @@
+64010b1b6a33a242078580b74cc996c9
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestOrientationMarkerWidget_2.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestOrientationMarkerWidget_2.png.md5
new file mode 100644
index 0000000..ab5d87c
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestOrientationMarkerWidget_2.png.md5
@@ -0,0 +1 @@
+fcd38ef29c5831f0e7a7253eddce5373
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestParallelopipedWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestParallelopipedWidget.png.md5
new file mode 100644
index 0000000..a6b8578
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestParallelopipedWidget.png.md5
@@ -0,0 +1 @@
+5f803c9ed07b324a88e29abbb3c934d2
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestPickingManagerSeedWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestPickingManagerSeedWidget.png.md5
new file mode 100644
index 0000000..33bfc23
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestPickingManagerSeedWidget.png.md5
@@ -0,0 +1 @@
+6bda33182058d22c7bf04d727eec1d2a
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestPickingManagerWidgets.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestPickingManagerWidgets.png.md5
new file mode 100644
index 0000000..78eaf3b
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestPickingManagerWidgets.png.md5
@@ -0,0 +1 @@
+9d40e61849ee27bfc1612034bab4f21f
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestPlaneWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestPlaneWidget.png.md5
new file mode 100644
index 0000000..43f1b52
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestPlaneWidget.png.md5
@@ -0,0 +1 @@
+0259211e7c31a62bb3fd5b28f000845e
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestPlaybackWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestPlaybackWidget.png.md5
new file mode 100644
index 0000000..7d0bc93
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestPlaybackWidget.png.md5
@@ -0,0 +1 @@
+430c6a5da7ce2437bdeefcb6eb100859
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestPointHandleRepresentation3D.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestPointHandleRepresentation3D.png.md5
new file mode 100644
index 0000000..ba00bef
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestPointHandleRepresentation3D.png.md5
@@ -0,0 +1 @@
+03a4aff3b6a677a4e980f8802a137749
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestPointWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestPointWidget.png.md5
new file mode 100644
index 0000000..02fbfb0
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestPointWidget.png.md5
@@ -0,0 +1 @@
+3c80b177c19a49f60be61066a4ef1ef6
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestPointWidget_1.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestPointWidget_1.png.md5
new file mode 100644
index 0000000..f7d2f80
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestPointWidget_1.png.md5
@@ -0,0 +1 @@
+06ffcda8564a9471bfe44ad2e11d3c53
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestPolygonalHandleRepresentations.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestPolygonalHandleRepresentations.png.md5
new file mode 100644
index 0000000..6395fc8
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestPolygonalHandleRepresentations.png.md5
@@ -0,0 +1 @@
+763c9f063662c99dd5ee03ac2763c3e9
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestPolygonalRepresentationHandleWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestPolygonalRepresentationHandleWidget.png.md5
new file mode 100644
index 0000000..8835b1b
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestPolygonalRepresentationHandleWidget.png.md5
@@ -0,0 +1 @@
+6c025c874cf7c6f3426679bff44c3e72
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestPolygonalSurfaceConstrainedDistanceWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestPolygonalSurfaceConstrainedDistanceWidget.png.md5
new file mode 100644
index 0000000..a40a3d6
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestPolygonalSurfaceConstrainedDistanceWidget.png.md5
@@ -0,0 +1 @@
+3431cff01cd79d118e8aa61dfc2a1147
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestProgrammaticPlacement.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestProgrammaticPlacement.png.md5
new file mode 100644
index 0000000..fa77463
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestProgrammaticPlacement.png.md5
@@ -0,0 +1 @@
+03f4dcf2961b937d4aa2fd7e5289461f
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestRectilinearWipeWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestRectilinearWipeWidget.png.md5
new file mode 100644
index 0000000..59f70f8
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestRectilinearWipeWidget.png.md5
@@ -0,0 +1 @@
+272ff485196c33c326ff2a0526aaf37d
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestRectilinearWipeWidget_1.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestRectilinearWipeWidget_1.png.md5
new file mode 100644
index 0000000..764d269
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestRectilinearWipeWidget_1.png.md5
@@ -0,0 +1 @@
+77cda860be96ae2fd2108d70ad8b0b1d
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestScalarBarWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestScalarBarWidget.png.md5
new file mode 100644
index 0000000..ffefb09
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestScalarBarWidget.png.md5
@@ -0,0 +1 @@
+c5a7a5bf34ab101cfcc1df899fb7a76f
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestSeedWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestSeedWidget.png.md5
new file mode 100644
index 0000000..f81a6d5
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestSeedWidget.png.md5
@@ -0,0 +1 @@
+faf2bb6808f87f095546f1878be601c6
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestSeedWidget2.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestSeedWidget2.png.md5
new file mode 100644
index 0000000..5677031
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestSeedWidget2.png.md5
@@ -0,0 +1 @@
+d53b7d419225f188491daa3a810b811b
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestSeedWidgetNonUniformRepresentations.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestSeedWidgetNonUniformRepresentations.png.md5
new file mode 100644
index 0000000..8b8b99f
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestSeedWidgetNonUniformRepresentations.png.md5
@@ -0,0 +1 @@
+d8e25e660bc2fd6d937994d48daf59d4
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestSeedWidgetNonUniformRepresentations_1.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestSeedWidgetNonUniformRepresentations_1.png.md5
new file mode 100644
index 0000000..79e9a04
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestSeedWidgetNonUniformRepresentations_1.png.md5
@@ -0,0 +1 @@
+3b48a7ae7534616d2d336094a6bdfb6e
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestSeedWidgetNonUniformRepresentations_2.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestSeedWidgetNonUniformRepresentations_2.png.md5
new file mode 100644
index 0000000..eeda6d3
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestSeedWidgetNonUniformRepresentations_2.png.md5
@@ -0,0 +1 @@
+80e0cecaaf85ee29a18a943751502ef0
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestSliderWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestSliderWidget.png.md5
new file mode 100644
index 0000000..28596dd
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestSliderWidget.png.md5
@@ -0,0 +1 @@
+4c9bb3cc6ba469fc1d2969e9a5e5032b
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestSliderWidget2D.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestSliderWidget2D.png.md5
new file mode 100644
index 0000000..c7ca966
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestSliderWidget2D.png.md5
@@ -0,0 +1 @@
+bbbe89d21f52ce0646e620de5a55722e
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestSliderWidget2D_1.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestSliderWidget2D_1.png.md5
new file mode 100644
index 0000000..0f0e0fe
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestSliderWidget2D_1.png.md5
@@ -0,0 +1 @@
+4f5580f0ba97b35308338f67317090b1
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestSphereHandleWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestSphereHandleWidget.png.md5
new file mode 100644
index 0000000..cbf8228
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestSphereHandleWidget.png.md5
@@ -0,0 +1 @@
+88ed961e207c022fe6391a7334f667ad
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestSphereWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestSphereWidget.png.md5
new file mode 100644
index 0000000..62cbb02
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestSphereWidget.png.md5
@@ -0,0 +1 @@
+190416acc5e5b4433c1ed6c8b8a8ddd9
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestSphereWidget_1.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestSphereWidget_1.png.md5
new file mode 100644
index 0000000..9c9edc4
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestSphereWidget_1.png.md5
@@ -0,0 +1 @@
+cc899a8042f4f5ee1d2b64dae88115e3
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestSphereWidget_2.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestSphereWidget_2.png.md5
new file mode 100644
index 0000000..d0db896
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestSphereWidget_2.png.md5
@@ -0,0 +1 @@
+fcf2037a960b6d1aa6e9d9ea1e7f5ee6
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestSphereWidget_3.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestSphereWidget_3.png.md5
new file mode 100644
index 0000000..cead665
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestSphereWidget_3.png.md5
@@ -0,0 +1 @@
+ee260182653dc69c6f396651e0740ca3
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestSurfaceConstrainedHandleWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestSurfaceConstrainedHandleWidget.png.md5
new file mode 100644
index 0000000..e83f732
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestSurfaceConstrainedHandleWidget.png.md5
@@ -0,0 +1 @@
+182d3195561b7c749e980e94f7b81ee8
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestSurfaceConstrainedHandleWidget_1.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestSurfaceConstrainedHandleWidget_1.png.md5
new file mode 100644
index 0000000..95d7e60
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestSurfaceConstrainedHandleWidget_1.png.md5
@@ -0,0 +1 @@
+bb2709f4dec32950cfeec35f850b0d2a
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestTensorProbeWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestTensorProbeWidget.png.md5
new file mode 100644
index 0000000..9597060
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestTensorProbeWidget.png.md5
@@ -0,0 +1 @@
+04fe6d06cae2407f92c9bad051957cc9
diff --git a/Interaction/Widgets/Testing/Data/Baseline/TestTextWidget.png.md5 b/Interaction/Widgets/Testing/Data/Baseline/TestTextWidget.png.md5
new file mode 100644
index 0000000..53b3a59
--- /dev/null
+++ b/Interaction/Widgets/Testing/Data/Baseline/TestTextWidget.png.md5
@@ -0,0 +1 @@
+0a2ac843ff94fccc108dfef68568c88c
diff --git a/Interaction/Widgets/Testing/Python/CMakeLists.txt b/Interaction/Widgets/Testing/Python/CMakeLists.txt
index d83762f..65ca0e8 100644
--- a/Interaction/Widgets/Testing/Python/CMakeLists.txt
+++ b/Interaction/Widgets/Testing/Python/CMakeLists.txt
@@ -1,9 +1,7 @@
 if(NOT VTK_OPENGL_HAS_OSMESA)
-  add_test_python(TestImagePlaneWidget.py Widgets)
-  if (VTK_DATA_ROOT)
-    add_test_python1(TestBoxWidget.py Baseline/Widgets)
-    add_test_python1(TestInteractorEventRecorder.py Baseline/Widgets)
-    add_test_python1(TestSphereWidget.py Baseline/Widgets)
-    add_test_python1(TestTextWidget.py Baseline/Widgets)
-  endif()
+  vtk_add_test_python(TestImagePlaneWidget.py)
+  vtk_add_test_python(TestBoxWidget.py NO_RT)
+  vtk_add_test_python(TestInteractorEventRecorder.py NO_RT)
+  vtk_add_test_python(TestSphereWidget.py NO_RT)
+  vtk_add_test_python(TestTextWidget.py NO_RT)
 endif()
diff --git a/Interaction/Widgets/Testing/Python/TestBoxWidget.py b/Interaction/Widgets/Testing/Python/TestBoxWidget.py
index abcbfc2..7c94828 100755
--- a/Interaction/Widgets/Testing/Python/TestBoxWidget.py
+++ b/Interaction/Widgets/Testing/Python/TestBoxWidget.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestBoxWidget.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Widgets
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Interaction/Widgets/Testing/Python/TestImagePlaneWidget.py b/Interaction/Widgets/Testing/Python/TestImagePlaneWidget.py
index 6fdd073..98ad20c 100755
--- a/Interaction/Widgets/Testing/Python/TestImagePlaneWidget.py
+++ b/Interaction/Widgets/Testing/Python/TestImagePlaneWidget.py
@@ -1,9 +1,5 @@
 #!/usr/bin/env python
 
-# Run this test like so:
-# $ vtkpython TestImagePlaneWidget.py  -D $VTK_DATA_ROOT \
-#   -B $VTK_DATA_ROOT/Baseline/Widgets/
-#
 # $ vtkpython TestImagePlaneWidget.py --help
 # provides more details on other options.
 
diff --git a/Interaction/Widgets/Testing/Python/TestInteractorEventRecorder.py b/Interaction/Widgets/Testing/Python/TestInteractorEventRecorder.py
index f1b2722..8ebc387 100755
--- a/Interaction/Widgets/Testing/Python/TestInteractorEventRecorder.py
+++ b/Interaction/Widgets/Testing/Python/TestInteractorEventRecorder.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestBoxWidget.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Widgets
-
 import sys
 import vtk
 import vtk.test.Testing
diff --git a/Interaction/Widgets/Testing/Python/TestSphereWidget.py b/Interaction/Widgets/Testing/Python/TestSphereWidget.py
index 814d1ad..95ffd1d 100755
--- a/Interaction/Widgets/Testing/Python/TestSphereWidget.py
+++ b/Interaction/Widgets/Testing/Python/TestSphereWidget.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestSphereWidget.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Widgets
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Interaction/Widgets/Testing/Python/TestTextWidget.py b/Interaction/Widgets/Testing/Python/TestTextWidget.py
index 71083d5..0b188b4 100755
--- a/Interaction/Widgets/Testing/Python/TestTextWidget.py
+++ b/Interaction/Widgets/Testing/Python/TestTextWidget.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestTextWidget.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Widgets
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Interaction/Widgets/Testing/Tcl/CMakeLists.txt b/Interaction/Widgets/Testing/Tcl/CMakeLists.txt
index 85608dc..074dc98 100644
--- a/Interaction/Widgets/Testing/Tcl/CMakeLists.txt
+++ b/Interaction/Widgets/Testing/Tcl/CMakeLists.txt
@@ -1,9 +1,6 @@
 if(NOT VTK_OPENGL_HAS_OSMESA)
-  if(VTK_DATA_ROOT)
-    add_test_tcl(TestInteractorEventRecorder Widgets)
-    add_test_tcl(TestSphereWidget Widgets)
-  endif()
-
-  add_test_tcl(TestBoxWidget Widgets)
-  add_test_tcl(TestTextWidget Widgets)
+  vtk_add_test_tcl(TestInteractorEventRecorder)
+  vtk_add_test_tcl(TestSphereWidget)
+  vtk_add_test_tcl(TestBoxWidget)
+  vtk_add_test_tcl(TestTextWidget)
 endif()
diff --git a/Interaction/Widgets/vtkAffineRepresentation2D.h b/Interaction/Widgets/vtkAffineRepresentation2D.h
index 0edc748..51c1aac 100644
--- a/Interaction/Widgets/vtkAffineRepresentation2D.h
+++ b/Interaction/Widgets/vtkAffineRepresentation2D.h
@@ -83,7 +83,7 @@ public:
   // Specify the origin of the widget (in world coordinates). The origin
   // is the point where the widget places itself. Note that rotations and
   // scaling occurs around the origin.
-  void SetOrigin(double o[3]) {this->SetOrigin(o[0],o[1],o[2]);}
+  void SetOrigin(const double o[3]) {this->SetOrigin(o[0],o[1],o[2]);}
   void SetOrigin(double ox, double oy, double oz);
   vtkGetVector3Macro(Origin,double);
 
diff --git a/Interaction/Widgets/vtkBalloonWidget.cxx b/Interaction/Widgets/vtkBalloonWidget.cxx
index 1f042ee..67b1a01 100644
--- a/Interaction/Widgets/vtkBalloonWidget.cxx
+++ b/Interaction/Widgets/vtkBalloonWidget.cxx
@@ -40,6 +40,14 @@ struct vtkBalloon
   vtkImageData *Image;
 
   vtkBalloon() : Text(), Image(0) {}
+  vtkBalloon(const vtkBalloon &balloon) :
+    Text(balloon.Text), Image(balloon.Image)
+    {
+      if ( this->Image )
+        {
+        this->Image->Register(NULL);
+        }
+    }
   vtkBalloon(vtkStdString *str, vtkImageData *img)
     {
       this->Text = *str;
diff --git a/Interaction/Widgets/vtkBorderRepresentation.cxx b/Interaction/Widgets/vtkBorderRepresentation.cxx
index 43199d6..76e5621 100644
--- a/Interaction/Widgets/vtkBorderRepresentation.cxx
+++ b/Interaction/Widgets/vtkBorderRepresentation.cxx
@@ -25,6 +25,7 @@
 #include "vtkWindow.h"
 #include "vtkObjectFactory.h"
 
+#include <cassert>
 
 vtkStandardNewMacro(vtkBorderRepresentation);
 
@@ -34,7 +35,8 @@ vtkBorderRepresentation::vtkBorderRepresentation()
 {
   this->InteractionState = vtkBorderRepresentation::Outside;
 
-  this->ShowBorder = BORDER_ON;
+  this->ShowVerticalBorder = BORDER_ON;
+  this->ShowHorizontalBorder = BORDER_ON;
   this->ProportionalResize = 0;
   this->Tolerance = 3;
   this->SelectionPoint[0] = this->SelectionPoint[1] = 0.0;
@@ -109,6 +111,33 @@ vtkBorderRepresentation::~vtkBorderRepresentation()
 }
 
 //-------------------------------------------------------------------------
+void vtkBorderRepresentation::SetShowBorder(int border)
+{
+  this->SetShowVerticalBorder(border);
+  this->SetShowHorizontalBorder(border);
+  this->UpdateShowBorder();
+}
+
+//-------------------------------------------------------------------------
+int vtkBorderRepresentation::GetShowBorderMinValue()
+{
+  return BORDER_OFF;
+}
+
+//-------------------------------------------------------------------------
+int vtkBorderRepresentation::GetShowBorderMaxValue()
+{
+  return BORDER_ACTIVE;
+}
+
+//-------------------------------------------------------------------------
+int vtkBorderRepresentation::GetShowBorder()
+{
+  return this->GetShowVerticalBorder() != BORDER_OFF ?
+    this->GetShowVerticalBorder() : this->GetShowHorizontalBorder();
+}
+
+//-------------------------------------------------------------------------
 void vtkBorderRepresentation::StartWidgetInteraction(double eventPos[2])
 {
   this->StartEventPosition[0] = eventPos[0];
@@ -275,40 +304,34 @@ int vtkBorderRepresentation::ComputeInteractionState(int X, int Y, int vtkNotUse
   if ( X < (pos1[0]-this->Tolerance) || (pos2[0]+this->Tolerance) < X ||
        Y < (pos1[1]-this->Tolerance) || (pos2[1]+this->Tolerance) < Y )
     {
-    if ( this->ShowBorder != BORDER_ON )
-      {
-      this->BWActor->VisibilityOff();
-      }
     this->InteractionState = vtkBorderRepresentation::Outside;
     }
 
   else // we are on the boundary or inside the border
     {
-    if ( this->ShowBorder != BORDER_OFF )
-      {
-      this->BWActor->VisibilityOn();
-      }
-
     // Now check for proximinity to edges and points
     int e0 = (Y >= (pos1[1] - this->Tolerance) && Y <= (pos1[1] + this->Tolerance));
     int e1 = (X >= (pos2[0] - this->Tolerance) && X <= (pos2[0] + this->Tolerance));
     int e2 = (Y >= (pos2[1] - this->Tolerance) && Y <= (pos2[1] + this->Tolerance));
     int e3 = (X >= (pos1[0] - this->Tolerance) && X <= (pos1[0] + this->Tolerance));
 
-    // Points
-    if ( e0 && e1 )
+    int adjustHorizontalEdges = (this->ShowHorizontalBorder != BORDER_OFF);
+    int adjustVerticalEdges = (this->ShowVerticalBorder != BORDER_OFF);
+    int adjustPoints = (adjustHorizontalEdges && adjustVerticalEdges);
+
+    if ( e0 && e1 && adjustPoints )
       {
       this->InteractionState = vtkBorderRepresentation::AdjustingP1;
       }
-    else if ( e1 && e2 )
+    else if ( e1 && e2 && adjustPoints)
       {
       this->InteractionState = vtkBorderRepresentation::AdjustingP2;
       }
-    else if ( e2 && e3 )
+    else if ( e2 && e3 && adjustPoints )
       {
       this->InteractionState = vtkBorderRepresentation::AdjustingP3;
       }
-    else if ( e3 && e0 )
+    else if ( e3 && e0 && adjustPoints )
       {
       this->InteractionState = vtkBorderRepresentation::AdjustingP0;
       }
@@ -316,19 +339,19 @@ int vtkBorderRepresentation::ComputeInteractionState(int X, int Y, int vtkNotUse
     // Edges
     else if ( e0 || e1 || e2 || e3 )
       {
-      if ( e0 )
+      if ( e0 && adjustHorizontalEdges )
         {
         this->InteractionState = vtkBorderRepresentation::AdjustingE0;
         }
-      else if ( e1 )
+      else if ( e1 && adjustVerticalEdges )
         {
         this->InteractionState = vtkBorderRepresentation::AdjustingE1;
         }
-      else if ( e2 )
+      else if ( e2 && adjustHorizontalEdges )
         {
         this->InteractionState = vtkBorderRepresentation::AdjustingE2;
         }
-      else if ( e3 )
+      else if ( e3 && adjustVerticalEdges )
         {
         this->InteractionState = vtkBorderRepresentation::AdjustingE3;
         }
@@ -350,10 +373,100 @@ int vtkBorderRepresentation::ComputeInteractionState(int X, int Y, int vtkNotUse
         }
       }
     }//else inside or on border
+  this->UpdateShowBorder();
 
   return this->InteractionState;
 }
 
+//-------------------------------------------------------------------------
+void vtkBorderRepresentation::UpdateShowBorder()
+{
+  enum{
+    NoBorder = 0x00,
+    VerticalBorder = 0x01,
+    HorizontalBorder = 0x02,
+    AllBorders = VerticalBorder | HorizontalBorder
+  };
+  int currentBorder = NoBorder;
+  switch (this->BWPolyData->GetLines()->GetNumberOfCells())
+    {
+    case 1:
+      currentBorder = AllBorders;
+      break;
+    case 2:
+      {
+      vtkIdType npts = 0;
+      vtkIdType* pts = 0;
+      this->BWPolyData->GetLines()->GetCell(0, npts, pts);
+      assert(npts == 2);
+      currentBorder = (pts[0] == 0 ? HorizontalBorder : VerticalBorder);
+      break;
+      }
+    case 0:
+    default: // not supported
+      currentBorder = NoBorder;
+      break;
+    }
+  int newBorder = NoBorder;
+  if (this->ShowVerticalBorder == this->ShowHorizontalBorder)
+    {
+    newBorder =
+      (this->ShowVerticalBorder == BORDER_ON ||
+       (this->ShowVerticalBorder == BORDER_ACTIVE &&
+        this->InteractionState != vtkBorderRepresentation::Outside)) ? AllBorders : NoBorder;
+    }
+  else
+    {
+    newBorder = newBorder |
+      ((this->ShowVerticalBorder == BORDER_ON ||
+       (this->ShowVerticalBorder == BORDER_ACTIVE &&
+        this->InteractionState != vtkBorderRepresentation::Outside)) ? VerticalBorder: NoBorder);
+    newBorder = newBorder |
+      ((this->ShowHorizontalBorder == BORDER_ON ||
+        (this->ShowHorizontalBorder == BORDER_ACTIVE &&
+         this->InteractionState != vtkBorderRepresentation::Outside)) ? HorizontalBorder: NoBorder);
+    }
+  bool visible = (newBorder != NoBorder);
+  if (currentBorder != newBorder &&
+      visible)
+    {
+    vtkCellArray *outline = vtkCellArray::New();
+    switch (newBorder)
+      {
+      case AllBorders:
+        outline->InsertNextCell(5);
+        outline->InsertCellPoint(0);
+        outline->InsertCellPoint(1);
+        outline->InsertCellPoint(2);
+        outline->InsertCellPoint(3);
+        outline->InsertCellPoint(0);
+        break;
+      case VerticalBorder:
+        outline->InsertNextCell(2);
+        outline->InsertCellPoint(1);
+        outline->InsertCellPoint(2);
+        outline->InsertNextCell(2);
+        outline->InsertCellPoint(3);
+        outline->InsertCellPoint(0);
+        break;
+      case HorizontalBorder:
+        outline->InsertNextCell(2);
+        outline->InsertCellPoint(0);
+        outline->InsertCellPoint(1);
+        outline->InsertNextCell(2);
+        outline->InsertCellPoint(2);
+        outline->InsertCellPoint(3);
+        break;
+      default:
+        break;
+      }
+    this->BWPolyData->SetLines(outline);
+    outline->Delete();
+    this->BWPolyData->Modified();
+    this->Modified();
+    }
+  this->BWActor->SetVisibility(visible);
+}
 
 //-------------------------------------------------------------------------
 void vtkBorderRepresentation::BuildRepresentation()
@@ -460,16 +573,30 @@ void vtkBorderRepresentation::PrintSelf(ostream& os, vtkIndent indent)
 {
   this->Superclass::PrintSelf(os,indent);
 
-  os << indent << "Show Border: ";
-  if ( this->ShowBorder == BORDER_OFF)
+  os << indent << "Show Vertical Border: ";
+  if ( this->ShowVerticalBorder == BORDER_OFF)
+    {
+    os << "Off\n";
+    }
+  else if ( this->ShowVerticalBorder == BORDER_ON)
+    {
+    os << "On\n";
+    }
+  else //if ( this->ShowVerticalBorder == BORDER_ACTIVE)
+    {
+    os << "Active\n";
+    }
+
+  os << indent << "Show Horizontal Border: ";
+  if ( this->ShowHorizontalBorder == BORDER_OFF)
     {
     os << "Off\n";
     }
-  else if ( this->ShowBorder == BORDER_ON)
+  else if ( this->ShowHorizontalBorder == BORDER_ON)
     {
     os << "On\n";
     }
-  else //if ( this->ShowBorder == BORDER_ACTIVE)
+  else //if ( this->ShowHorizontalBorder == BORDER_ACTIVE)
     {
     os << "Active\n";
     }
diff --git a/Interaction/Widgets/vtkBorderRepresentation.h b/Interaction/Widgets/vtkBorderRepresentation.h
index cd1a44a..58c4346 100644
--- a/Interaction/Widgets/vtkBorderRepresentation.h
+++ b/Interaction/Widgets/vtkBorderRepresentation.h
@@ -82,13 +82,31 @@ public:
   // border will never appear.  If ShowBorder is "active" then the border
   // will appear when the mouse pointer enters the region bounded by the
   // border widget.
-  vtkSetClampMacro(ShowBorder,int,BORDER_OFF,BORDER_ACTIVE);
-  vtkGetMacro(ShowBorder,int);
+  // This method is provided as conveniency to set both horizontal and
+  // vertical borders.
+  // BORDER_ON by default.
+  // See Also: SetShowHorizontalBorder(), SetShowVerticalBorder()
+  virtual void SetShowBorder(int border);
+  virtual int GetShowBorderMinValue();
+  virtual int GetShowBorderMaxValue();
+  virtual int GetShowBorder();
   void SetShowBorderToOff() {this->SetShowBorder(BORDER_OFF);}
   void SetShowBorderToOn() {this->SetShowBorder(BORDER_ON);}
   void SetShowBorderToActive() {this->SetShowBorder(BORDER_ACTIVE);}
 
   // Description:
+  // Specify when and if the vertical border should appear.
+  // See Also: SetShowBorder(), SetShowHorizontalBorder()
+  vtkSetClampMacro(ShowVerticalBorder,int,BORDER_OFF,BORDER_ACTIVE);
+  vtkGetMacro(ShowVerticalBorder,int);
+
+  // Description:
+  // Specify when and if the horizontal border should appear.
+  // See Also: SetShowBorder(), SetShowVerticalBorder()
+  vtkSetClampMacro(ShowHorizontalBorder,int,BORDER_OFF,BORDER_ACTIVE);
+  vtkGetMacro(ShowHorizontalBorder,int);
+
+  // Description:
   // Specify the properties of the border.
   vtkGetObjectMacro(BorderProperty,vtkProperty2D);
 
@@ -174,7 +192,8 @@ protected:
   ~vtkBorderRepresentation();
 
   // Ivars
-  int           ShowBorder;
+  int           ShowVerticalBorder;
+  int           ShowHorizontalBorder;
   vtkProperty2D *BorderProperty;
   int           ProportionalResize;
   int           Tolerance;
@@ -190,6 +209,11 @@ protected:
   int Negotiated;
   virtual void NegotiateLayout();
 
+  // Update the border visibility based on InteractionState.
+  // See Also: SetShowHorizontalBorder(), SetShowHorizontalBorder(),
+  // ComputeInteractionState()
+  virtual void UpdateShowBorder();
+
   // Keep track of start position when moving border
   double StartPosition[2];
 
diff --git a/Interaction/Widgets/vtkBorderWidget.cxx b/Interaction/Widgets/vtkBorderWidget.cxx
index 86f3f12..53d1221 100644
--- a/Interaction/Widgets/vtkBorderWidget.cxx
+++ b/Interaction/Widgets/vtkBorderWidget.cxx
@@ -221,16 +221,19 @@ void vtkBorderWidget::MoveAction(vtkAbstractWidget *w)
     int stateAfter = self->WidgetRep->GetInteractionState();
     self->SetCursor(stateAfter);
 
+    vtkBorderRepresentation* borderRepresentation =
+      reinterpret_cast<vtkBorderRepresentation*>(self->WidgetRep);
     if ( self->Selectable || stateAfter != vtkBorderRepresentation::Inside )
       {
-      reinterpret_cast<vtkBorderRepresentation*>(self->WidgetRep)->MovingOff();
+      borderRepresentation->MovingOff();
       }
     else
       {
-      reinterpret_cast<vtkBorderRepresentation*>(self->WidgetRep)->MovingOn();
+      borderRepresentation->MovingOn();
       }
 
-    if ( reinterpret_cast<vtkBorderRepresentation*>(self->WidgetRep)->GetShowBorder() == vtkBorderRepresentation::BORDER_ACTIVE &&
+    if ( (borderRepresentation->GetShowVerticalBorder() == vtkBorderRepresentation::BORDER_ACTIVE ||
+          borderRepresentation->GetShowHorizontalBorder() == vtkBorderRepresentation::BORDER_ACTIVE) &&
          stateBefore != stateAfter &&
          (stateBefore == vtkBorderRepresentation::Outside || stateAfter == vtkBorderRepresentation::Outside) )
       {
diff --git a/Interaction/Widgets/vtkCameraRepresentation.cxx b/Interaction/Widgets/vtkCameraRepresentation.cxx
index bf75532..bce39e4 100644
--- a/Interaction/Widgets/vtkCameraRepresentation.cxx
+++ b/Interaction/Widgets/vtkCameraRepresentation.cxx
@@ -48,7 +48,7 @@ vtkCameraRepresentation::vtkCameraRepresentation()
   this->Position2Coordinate->SetValue(0.04*size[0], 0.04*size[1]);
   this->ProportionalResize = 1;
   this->Moving = 1;
-  this->ShowBorder = vtkBorderRepresentation::BORDER_ON;
+  this->SetShowBorder(vtkBorderRepresentation::BORDER_ON);
 
   // Create the geometry in canonical coordinates
   this->Points = vtkPoints::New();
diff --git a/Interaction/Widgets/vtkCaptionRepresentation.cxx b/Interaction/Widgets/vtkCaptionRepresentation.cxx
index ee14642..faa349d 100644
--- a/Interaction/Widgets/vtkCaptionRepresentation.cxx
+++ b/Interaction/Widgets/vtkCaptionRepresentation.cxx
@@ -57,7 +57,7 @@ vtkCaptionRepresentation::vtkCaptionRepresentation()
   this->CaptionActor2D->SetLeaderGlyphConnection(
     this->CaptionGlyph->GetOutputPort());
 
-  this->ShowBorder = vtkBorderRepresentation::BORDER_OFF;
+  this->SetShowBorder(vtkBorderRepresentation::BORDER_OFF);
   this->FontFactor = 1.0;
 }
 
diff --git a/Interaction/Widgets/vtkContinuousValueWidgetRepresentation.h b/Interaction/Widgets/vtkContinuousValueWidgetRepresentation.h
index 5665c46..a2920c1 100644
--- a/Interaction/Widgets/vtkContinuousValueWidgetRepresentation.h
+++ b/Interaction/Widgets/vtkContinuousValueWidgetRepresentation.h
@@ -46,7 +46,7 @@ public:
   // assumes that the parameter bounds[6] specifies the location in display
   // space where the widget should be placed.
   virtual void PlaceWidget(double bounds[6]);
-  virtual void BuildRepresentation() {};
+  virtual void BuildRepresentation() {}
   virtual void StartWidgetInteraction(double eventPos[2]) = 0;
   virtual void WidgetInteraction(double eventPos[2]) = 0;
 //  virtual void Highlight(int);
diff --git a/Interaction/Widgets/vtkLogoRepresentation.cxx b/Interaction/Widgets/vtkLogoRepresentation.cxx
index 70b8a8a..1f5a7f4 100644
--- a/Interaction/Widgets/vtkLogoRepresentation.cxx
+++ b/Interaction/Widgets/vtkLogoRepresentation.cxx
@@ -80,7 +80,7 @@ vtkLogoRepresentation::vtkLogoRepresentation()
   this->Position2Coordinate->SetValue(0.04*size[0], 0.04*size[1]);
   this->ProportionalResize = 1;
   this->Moving = 1;
-  this->ShowBorder = vtkBorderRepresentation::BORDER_ACTIVE;
+  this->SetShowBorder(vtkBorderRepresentation::BORDER_ACTIVE);
   this->PositionCoordinate->SetValue(0.9, 0.025);
   this->Position2Coordinate->SetValue(0.075, 0.075);
 }
diff --git a/Interaction/Widgets/vtkPlaybackRepresentation.cxx b/Interaction/Widgets/vtkPlaybackRepresentation.cxx
index 007b760..e7cf91e 100644
--- a/Interaction/Widgets/vtkPlaybackRepresentation.cxx
+++ b/Interaction/Widgets/vtkPlaybackRepresentation.cxx
@@ -40,7 +40,7 @@ vtkPlaybackRepresentation::vtkPlaybackRepresentation()
   this->Position2Coordinate->SetValue(0.04*size[0], 0.04*size[1]);
   this->ProportionalResize = 1;
   this->Moving = 1;
-  this->ShowBorder = vtkBorderRepresentation::BORDER_ON;
+  this->SetShowBorder(vtkBorderRepresentation::BORDER_ON);
 
   // Create the geometry in canonical coordinates
   this->Points = vtkPoints::New();
diff --git a/Interaction/Widgets/vtkPolygonalHandleRepresentation3D.h b/Interaction/Widgets/vtkPolygonalHandleRepresentation3D.h
index f2f0b12..7556d06 100644
--- a/Interaction/Widgets/vtkPolygonalHandleRepresentation3D.h
+++ b/Interaction/Widgets/vtkPolygonalHandleRepresentation3D.h
@@ -55,7 +55,7 @@ public:
 
 protected:
   vtkPolygonalHandleRepresentation3D();
-  ~vtkPolygonalHandleRepresentation3D() {};
+  ~vtkPolygonalHandleRepresentation3D() {}
 
   double Offset[3];
 
diff --git a/Interaction/Widgets/vtkScalarBarRepresentation.cxx b/Interaction/Widgets/vtkScalarBarRepresentation.cxx
index 564be34..e60da8e 100644
--- a/Interaction/Widgets/vtkScalarBarRepresentation.cxx
+++ b/Interaction/Widgets/vtkScalarBarRepresentation.cxx
@@ -30,6 +30,8 @@
 #include "vtkSmartPointer.h"
 #include "vtkTextProperty.h"
 
+#include <algorithm>
+
 //=============================================================================
 vtkStandardNewMacro(vtkScalarBarRepresentation);
 //-----------------------------------------------------------------------------
@@ -43,8 +45,7 @@ vtkScalarBarRepresentation::vtkScalarBarRepresentation()
   this->SetScalarBarActor(actor);
   actor->Delete();
 
-  this->ShowBorder = vtkBorderRepresentation::BORDER_ACTIVE;
-  this->BWActor->VisibilityOff();
+  this->SetShowBorder(vtkBorderRepresentation::BORDER_ACTIVE);
 }
 
 //-----------------------------------------------------------------------------
@@ -156,7 +157,10 @@ void vtkScalarBarRepresentation::WidgetInteraction(double eventPos[2])
     this->PositionCoordinate->SetValue(par1[0],par1[1]);
     this->Position2Coordinate->SetValue(par2[0] - par1[0], par2[1] - par1[1]);
 
+    std::swap(this->ShowHorizontalBorder, this->ShowVerticalBorder);
+
     this->Modified();
+    this->UpdateShowBorder();
     this->BuildRepresentation();
   }
 }
diff --git a/Interaction/Widgets/vtkTextRepresentation.cxx b/Interaction/Widgets/vtkTextRepresentation.cxx
index 6b5f8dc..f423a0d 100644
--- a/Interaction/Widgets/vtkTextRepresentation.cxx
+++ b/Interaction/Widgets/vtkTextRepresentation.cxx
@@ -62,7 +62,7 @@ vtkTextRepresentation::vtkTextRepresentation()
   this->TextActor = vtkTextActor::New();
   this->InitializeTextActor();
 
-  this->ShowBorder = vtkBorderRepresentation::BORDER_ACTIVE;
+  this->SetShowBorder(vtkBorderRepresentation::BORDER_ACTIVE);
   this->BWActor->VisibilityOff();
   this->WindowLocation = AnyLocation;
 }
diff --git a/Parallel/Core/Testing/Cxx/CMakeLists.txt b/Parallel/Core/Testing/Cxx/CMakeLists.txt
index ae65b5e..2746d4d 100644
--- a/Parallel/Core/Testing/Cxx/CMakeLists.txt
+++ b/Parallel/Core/Testing/Cxx/CMakeLists.txt
@@ -1 +1,2 @@
-vtk_tests(TestFieldDataSerialization.cxx)
+vtk_add_test_cxx(TestFieldDataSerialization.cxx NO_DATA NO_VALID NO_OUTPUT)
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Parallel/Core/Testing/Data/Baseline/TestPolyDataPieces.png.md5 b/Parallel/Core/Testing/Data/Baseline/TestPolyDataPieces.png.md5
new file mode 100644
index 0000000..0b653ec
--- /dev/null
+++ b/Parallel/Core/Testing/Data/Baseline/TestPolyDataPieces.png.md5
@@ -0,0 +1 @@
+ff62bf9a6fd1abe05dc3d186a209fa5b
diff --git a/Parallel/Core/Testing/Data/Baseline/TestUnstructuredPieces.png.md5 b/Parallel/Core/Testing/Data/Baseline/TestUnstructuredPieces.png.md5
new file mode 100644
index 0000000..1f9e693
--- /dev/null
+++ b/Parallel/Core/Testing/Data/Baseline/TestUnstructuredPieces.png.md5
@@ -0,0 +1 @@
+3a8ed2be273ca5838080b2efd208e1e5
diff --git a/Parallel/Core/Testing/Python/CMakeLists.txt b/Parallel/Core/Testing/Python/CMakeLists.txt
index 8944595..3439273 100644
--- a/Parallel/Core/Testing/Python/CMakeLists.txt
+++ b/Parallel/Core/Testing/Python/CMakeLists.txt
@@ -1,4 +1,2 @@
-add_test_python(TestPolyDataPieces.py Parallel)
-if (VTK_DATA_ROOT)
-  add_test_python(TestUnstructuredPieces.py Parallel)
-endif()
+vtk_add_test_python(TestPolyDataPieces.py)
+vtk_add_test_python(TestUnstructuredPieces.py)
diff --git a/Parallel/Core/Testing/Tcl/CMakeLists.txt b/Parallel/Core/Testing/Tcl/CMakeLists.txt
index 21eff2f..83f8704 100644
--- a/Parallel/Core/Testing/Tcl/CMakeLists.txt
+++ b/Parallel/Core/Testing/Tcl/CMakeLists.txt
@@ -1,4 +1,2 @@
-add_test_tcl(TestPolyDataPieces.tcl Parallel)
-if(VTK_DATA_ROOT)
-  add_test_tcl(TestUnstructuredPieces.tcl Parallel)
-endif()
+vtk_add_test_tcl(TestPolyDataPieces.tcl)
+vtk_add_test_tcl(TestUnstructuredPieces.tcl)
diff --git a/Parallel/Core/module.cmake b/Parallel/Core/module.cmake
index 3b02430..7668742 100644
--- a/Parallel/Core/module.cmake
+++ b/Parallel/Core/module.cmake
@@ -7,6 +7,8 @@ vtk_module(vtkParallelCore
     # of vtkIOLegacy off but still satisfy API dependcy.
     vtkCommonCore
     vtkIOLegacy
+  PRIVATE_DEPENDS
+    vtksys
   COMPILE_DEPENDS
     vtkUtilitiesHashSource
   TEST_DEPENDS
diff --git a/Parallel/Core/vtkCommunicator.cxx b/Parallel/Core/vtkCommunicator.cxx
index d29bd3a..378ad21 100644
--- a/Parallel/Core/vtkCommunicator.cxx
+++ b/Parallel/Core/vtkCommunicator.cxx
@@ -27,7 +27,6 @@
 #include "vtkIdTypeArray.h"
 #include "vtkImageData.h"
 #include "vtkIntArray.h"
-#include "vtkMultiBlockDataSet.h"
 #include "vtkMultiProcessController.h"
 #include "vtkMultiProcessStream.h"
 #include "vtkRectilinearGrid.h"
@@ -207,40 +206,12 @@ int vtkCommunicator::Send(vtkDataObject* data, int remoteHandle,
     case VTK_TABLE:
     case VTK_TREE:
     case VTK_UNSTRUCTURED_GRID:
-      return this->SendElementalDataObject(data, remoteHandle, tag);
-
-    //for composite types send type, structure, and then iterate
-    //over the internal dataobjects, sending each one (recursively)
     case VTK_MULTIBLOCK_DATA_SET:
-      return this->SendMultiBlockDataSet(
-        vtkMultiBlockDataSet::SafeDownCast(data), remoteHandle, tag);
-    }
-}
-
-//----------------------------------------------------------------------------
-int vtkCommunicator::SendMultiBlockDataSet(vtkMultiBlockDataSet* mbds,
-  int remoteHandle, int tag)
-{
-  int returnCode = 1;
-  int numblocks = static_cast<int>(mbds->GetNumberOfBlocks());
-
-  // Tell the receiver the number of blocks to expect.
-  returnCode = this->Send(&numblocks, 1, remoteHandle, tag);
-  for (int cc=0; (cc < numblocks) && returnCode; cc++)
-    {
-    vtkDataObject* block = mbds->GetBlock(cc);
-    int dataType = (block? block->GetDataObjectType() : -1);
-    returnCode = returnCode && this->Send(&dataType, 1, remoteHandle, tag);
-    if (block)
-      {
-      // Now, send the actual block data.
-      returnCode = returnCode && this->Send(block, remoteHandle, tag);
-      }
+    case VTK_UNIFORM_GRID_AMR:
+      return this->SendElementalDataObject(data, remoteHandle, tag);
     }
-  return returnCode;
 }
 
-
 //----------------------------------------------------------------------------
 int vtkCommunicator::SendElementalDataObject(
   vtkDataObject* data, int remoteHandle,
@@ -429,39 +400,10 @@ int vtkCommunicator::ReceiveDataObject(vtkDataObject* data, int remoteHandle,
     case VTK_TABLE:
     case VTK_TREE:
     case VTK_UNSTRUCTURED_GRID:
-      return this->ReceiveElementalDataObject(data, remoteHandle, tag);
-
-    //for composite types receive type, structure, and then iterate
-    //over the internal dataobjects, receiving each recursively as needed
     case VTK_MULTIBLOCK_DATA_SET:
-      return this->ReceiveMultiBlockDataSet(
-        vtkMultiBlockDataSet::SafeDownCast(data), remoteHandle, tag);
-    }
-}
-
-//----------------------------------------------------------------------------
-int vtkCommunicator::ReceiveMultiBlockDataSet(
-  vtkMultiBlockDataSet* mbds, int remoteHandle, int tag)
-{
-  int returnCode = 1;
-
-  int numblocks = 0;
-  returnCode = this->Receive(&numblocks, 1, remoteHandle, tag);
-  mbds->SetNumberOfBlocks(numblocks);
-  for (int cc=0; (cc < numblocks) && returnCode; cc++)
-    {
-    int dataType = 0;
-    returnCode = returnCode && this->Receive(&dataType, 1, remoteHandle, tag);
-    if (dataType != -1) // 0 is a valid data type :).
-      {
-      vtkDataObject* dObj = vtkDataObjectTypes::NewDataObject(dataType);
-      returnCode = returnCode && this->Receive(dObj, remoteHandle, tag);
-      mbds->SetBlock(cc, dObj);
-      dObj->Delete();
-      }
+    case VTK_UNIFORM_GRID_AMR:
+      return this->ReceiveElementalDataObject(data, remoteHandle, tag);
     }
-
-  return returnCode;
 }
 
 //----------------------------------------------------------------------------
diff --git a/Parallel/Core/vtkCommunicator.h b/Parallel/Core/vtkCommunicator.h
index 3da6cb9..a7385a8 100644
--- a/Parallel/Core/vtkCommunicator.h
+++ b/Parallel/Core/vtkCommunicator.h
@@ -845,7 +845,6 @@ protected:
 
   // Internal methods called by Send/Receive(vtkDataObject *... ) above.
   int SendElementalDataObject(vtkDataObject* data, int remoteHandle, int tag);
-  int SendMultiBlockDataSet(vtkMultiBlockDataSet* data, int remoteHandle, int tag);
   int ReceiveDataObject(vtkDataObject* data,
                         int remoteHandle, int tag, int type=-1);
   int ReceiveElementalDataObject(vtkDataObject* data,
diff --git a/Parallel/Core/vtkFieldDataSerializer.cxx b/Parallel/Core/vtkFieldDataSerializer.cxx
index abf1aa0..7107b78 100644
--- a/Parallel/Core/vtkFieldDataSerializer.cxx
+++ b/Parallel/Core/vtkFieldDataSerializer.cxx
@@ -331,6 +331,10 @@ void vtkFieldDataSerializer::SerializeDataArray(
      case VTK_INT:
        bytestream.Push(static_cast<int*>(dataArray->GetVoidPointer(0)),size);
        break;
+     case VTK_ID_TYPE:
+       bytestream.Push(
+           static_cast<vtkIdType*>(dataArray->GetVoidPointer(0)),size);
+       break;
      default:
        assert("ERROR: cannot serialize data of given type" && false);
        cerr << "Canot serialize data of type="
@@ -392,6 +396,7 @@ void vtkFieldDataSerializer::DeserializeDataArray(
   std::string name;
 
   bytestream >> dataType >> numTuples >> numComp >> name;
+  assert("pre: numComp >= 1" && (numComp >= 1) );
 
   // STEP 1: Construct vtkDataArray object
   dataArray = vtkDataArray::CreateDataArray( dataType );
@@ -401,46 +406,33 @@ void vtkFieldDataSerializer::DeserializeDataArray(
 
   // STEP 2: Extract raw data to vtkDataArray
   // TODO: Add more cases for more datatypes here (?)
-  unsigned int size = 0;
+  unsigned int size = numTuples*numComp;
+  void* rawPtr = dataArray->GetVoidPointer(0);
+  assert("pre: raw pointer is NULL!" && (rawPtr != NULL) );
   switch( dataType )
     {
     case VTK_FLOAT:
       {
-      float *data = NULL;
+      float* data = static_cast<float*>(rawPtr);
       bytestream.Pop(data,size);
-      assert("pre: deserialized raw data array is NULL" && (data != NULL) );
-
-      float *dataArrayPtr = static_cast<float*>(dataArray->GetVoidPointer(0));
-      assert("pre: data array pointer is NULL!" && (dataArrayPtr != NULL) );
-
-      std::memcpy(dataArrayPtr,data,size*sizeof(float));
-      delete [] data;
       }
       break;
     case VTK_DOUBLE:
       {
-      double *data = NULL;
+      double *data = static_cast<double*>(rawPtr);
       bytestream.Pop(data,size);
-      assert("pre: deserialized raw data array is NULL" && (data != NULL) );
-
-      double *dataArrayPtr = static_cast<double*>(dataArray->GetVoidPointer(0));
-      assert("pre: data array pointer is NULL!" && (dataArrayPtr != NULL) );
-
-      std::memcpy(dataArrayPtr,data,size*sizeof(double));
-      delete [] data;
       }
       break;
     case VTK_INT:
       {
-      int *data = NULL;
+      int *data = static_cast<int*>(rawPtr);
+      bytestream.Pop(data,size);
+      }
+      break;
+    case VTK_ID_TYPE:
+      {
+      vtkIdType* data = static_cast<vtkIdType*>(rawPtr);
       bytestream.Pop(data,size);
-      assert("pre: deserialized raw data array is NULL" && (data != NULL) );
-
-      int *dataArrayPtr = static_cast<int*>(dataArray->GetVoidPointer(0));
-      assert("pre: data array pointer is NULL!" && (dataArrayPtr != NULL) );
-
-      std::memcpy(dataArrayPtr,data,size*sizeof(int));
-      delete [] data;
       }
       break;
     default:
diff --git a/Parallel/Core/vtkMultiProcessStream.cxx b/Parallel/Core/vtkMultiProcessStream.cxx
index e2ded2f..443a7f4 100644
--- a/Parallel/Core/vtkMultiProcessStream.cxx
+++ b/Parallel/Core/vtkMultiProcessStream.cxx
@@ -17,7 +17,7 @@
 #include "vtkObjectFactory.h"
 #include "vtkSocketCommunicator.h" // for vtkSwap8 and vtkSwap4 macros.
 #include <deque>
-#include <assert.h>
+#include <cassert>
 
 class vtkMultiProcessStream::vtkInternals
 {
@@ -271,19 +271,26 @@ void vtkMultiProcessStream::Pop(double*& array, unsigned int& size)
           this->Internals->Data.front()==vtkInternals::double_value);
   this->Internals->Data.pop_front();
 
-  if( array != NULL )
+  if( array == NULL )
     {
-    delete [] array;
-    }
+    // Get the size of the array
+    this->Internals->Pop(
+        reinterpret_cast<unsigned char*>(&size), sizeof(unsigned int));
 
-  // Get the size of the array
-  this->Internals->Pop(
-      reinterpret_cast<unsigned char*>(&size), sizeof(unsigned int));
-  assert( "pre: size > 0" && (size > 0) );
+    // Allocate array
+    array = new double[ size ];
+    assert( "ERROR: cannot allocate array" && (array != NULL) );
+    }
+  else
+    {
+    unsigned int sz;
 
-  // Allocate array
-  array = new double[ size ];
-  assert( "ERROR: cannot allocate array" && (array != NULL) );
+    // Get the size of the array
+    this->Internals->Pop(
+        reinterpret_cast<unsigned char*>(&sz), sizeof(unsigned int));
+    assert("ERROR: input array size does not match size of data" &&
+            (sz==size) );
+    }
 
   // Pop the array data
   this->Internals->Pop(
@@ -297,18 +304,26 @@ void vtkMultiProcessStream::Pop(float*& array, unsigned int& size)
           this->Internals->Data.front()==vtkInternals::float_value);
   this->Internals->Data.pop_front();
 
-  if( array != NULL )
+  if( array == NULL )
     {
-    delete [] array;
-    }
+    // Get the size of the array
+    this->Internals->Pop(
+        reinterpret_cast<unsigned char*>(&size), sizeof(unsigned int) );
 
-  // Get the size of the array
-  this->Internals->Pop(
-      reinterpret_cast<unsigned char*>(&size), sizeof(unsigned int) );
+    // Allocate array
+    array = new float[ size ];
+    assert( "ERROR: cannot allocate array" && (array != NULL) );
+    }
+  else
+    {
+    unsigned int sz;
 
-  // Allocate array
-  array = new float[ size ];
-  assert( "ERROR: cannot allocate array" && (array != NULL) );
+    // Get the size of the array
+    this->Internals->Pop(
+        reinterpret_cast<unsigned char*>(&sz), sizeof(unsigned int));
+    assert("ERROR: input array size does not match size of data" &&
+                (sz==size) );
+    }
 
   // Pop the array data
   this->Internals->Pop(
@@ -322,18 +337,26 @@ void vtkMultiProcessStream::Pop(int*& array, unsigned int& size)
            this->Internals->Data.front()==vtkInternals::int32_value );
   this->Internals->Data.pop_front();
 
-  if( array != NULL )
+  if( array == NULL )
     {
-    delete [] array;
-    }
+    // Get the size of the array
+    this->Internals->Pop(
+       reinterpret_cast<unsigned char*>(&size), sizeof(unsigned int) );
 
-  // Get the size of the array
-  this->Internals->Pop(
-      reinterpret_cast<unsigned char*>(&size), sizeof(unsigned int) );
+    // Allocate the array
+    array = new int[ size ];
+    assert( "ERROR: cannot allocate array" && (array != NULL) );
+    }
+  else
+    {
+    unsigned int sz;
 
-  // Allocate the array
-  array = new int[ size ];
-  assert( "ERROR: cannot allocate array" && (array != NULL) );
+    // Get the size of the array
+    this->Internals->Pop(
+        reinterpret_cast<unsigned char*>(&sz), sizeof(unsigned int));
+    assert("ERROR: input array size does not match size of data" &&
+                (sz==size) );
+    }
 
   // Pop the array data
   this->Internals->Pop(
@@ -347,18 +370,26 @@ void vtkMultiProcessStream::Pop(char*& array, unsigned int& size)
           this->Internals->Data.front()==vtkInternals::char_value );
   this->Internals->Data.pop_front();
 
-  if( array != NULL )
+  if( array == NULL )
     {
-    delete [] array;
-    }
+    // Get the size of the array
+    this->Internals->Pop(
+        reinterpret_cast<unsigned char*>(&size), sizeof(unsigned int) );
 
-  // Get the size of the array
-  this->Internals->Pop(
-      reinterpret_cast<unsigned char*>(&size), sizeof(unsigned int) );
+    // Allocate the array
+    array = new char[ size ];
+    assert( "ERROR: cannot allocate array" && (array != NULL) );
+    }
+  else
+    {
+    unsigned int sz;
 
-  // Allocate the array
-  array = new char[ size ];
-  assert( "ERROR: cannot allocate array" && (array != NULL) );
+    // Get the size of the array
+    this->Internals->Pop(
+        reinterpret_cast<unsigned char*>(&sz), sizeof(unsigned int));
+    assert("ERROR: input array size does not match size of data" &&
+                (sz==size) );
+    }
 
   // Pop the array data
   this->Internals->Pop(
@@ -372,18 +403,26 @@ void vtkMultiProcessStream::Pop(unsigned int*& array, unsigned int& size )
           this->Internals->Data.front()==vtkInternals::uint32_value );
   this->Internals->Data.pop_front();
 
-  if( array != NULL )
+  if( array == NULL )
     {
-    delete [] array;
-    }
+    // Get the size of the array
+    this->Internals->Pop(
+        reinterpret_cast<unsigned char*>(&size), sizeof(unsigned int) );
 
-  // Get the size of the array
-  this->Internals->Pop(
-      reinterpret_cast<unsigned char*>(&size), sizeof(unsigned int) );
+    // Allocate the array
+    array = new unsigned int[ size ];
+    assert( "ERROR: cannot allocate array" && (array != NULL) );
+    }
+  else
+    {
+    unsigned int sz;
 
-  // Allocate the array
-  array = new unsigned int[ size ];
-  assert( "ERROR: cannot allocate array" && (array != NULL) );
+    // Get the size of the array
+    this->Internals->Pop(
+        reinterpret_cast<unsigned char*>(&sz), sizeof(unsigned int));
+    assert("ERROR: input array size does not match size of data" &&
+                (sz==size) );
+    }
 
   // Pop the array data
   this->Internals->Pop(
@@ -397,18 +436,26 @@ void vtkMultiProcessStream::Pop(unsigned char*& array, unsigned int& size )
           this->Internals->Data.front()==vtkInternals::uchar_value );
   this->Internals->Data.pop_front();
 
-  if( array != NULL )
+  if( array == NULL )
     {
-    delete [] array;
-    }
+    // Get the size of the array
+    this->Internals->Pop(
+        reinterpret_cast<unsigned char*>(&size), sizeof(unsigned int) );
 
-  // Get the size of the array
-  this->Internals->Pop(
-      reinterpret_cast<unsigned char*>(&size), sizeof(unsigned int) );
+    // Allocate the array
+    array = new unsigned char[ size ];
+    assert( "ERROR: cannot allocate array" && (array != NULL) );
+    }
+  else
+    {
+    unsigned int sz;
 
-  // Allocate the array
-  array = new unsigned char[ size ];
-  assert( "ERROR: cannot allocate array" && (array != NULL) );
+    // Get the size of the array
+    this->Internals->Pop(
+        reinterpret_cast<unsigned char*>(&sz), sizeof(unsigned int));
+    assert("ERROR: input array size does not match size of data" &&
+                (sz==size) );
+    }
 
   // Pop the array data
   this->Internals->Pop( array, size );
@@ -421,18 +468,26 @@ void vtkMultiProcessStream::Pop(vtkTypeInt64*& array, unsigned int& size )
           this->Internals->Data.front()==vtkInternals::int64_value );
   this->Internals->Data.pop_front();
 
-  if( array != NULL )
+  if( array == NULL )
     {
-    delete [] array;
-    }
+    // Get the size of the array
+    this->Internals->Pop(
+        reinterpret_cast<unsigned char*>(&size), sizeof(unsigned int) );
 
-  // Get the size of the array
-  this->Internals->Pop(
-      reinterpret_cast<unsigned char*>(&size), sizeof(unsigned int) );
+    // Allocate the array
+    array = new vtkTypeInt64[ size ];
+    assert( "ERROR: cannot allocate array" && (array != NULL) );
+    }
+  else
+    {
+    unsigned int sz;
 
-  // Allocate the array
-  array = new vtkTypeInt64[ size ];
-  assert( "ERROR: cannot allocate array" && (array != NULL) );
+    // Get the size of the array
+    this->Internals->Pop(
+        reinterpret_cast<unsigned char*>(&sz), sizeof(unsigned int));
+    assert("ERROR: input array size does not match size of data" &&
+                (sz==size) );
+    }
 
   // Pop the array data
   this->Internals->Pop(
@@ -446,18 +501,26 @@ void vtkMultiProcessStream::Pop(vtkTypeUInt64*& array, unsigned int& size )
           this->Internals->Data.front()==vtkInternals::uint64_value );
   this->Internals->Data.pop_front();
 
-  if( array != NULL )
+  if( array == NULL )
     {
-    delete [] array;
-    }
+    // Get the size of the array
+    this->Internals->Pop(
+        reinterpret_cast<unsigned char*>(&size), sizeof(unsigned int) );
 
-  // Get the size of the array
-  this->Internals->Pop(
-      reinterpret_cast<unsigned char*>(&size), sizeof(unsigned int) );
+    // Allocate the array
+    array = new vtkTypeUInt64[ size ];
+    assert( "ERROR: cannot allocate array" && (array != NULL) );
+    }
+  else
+    {
+    unsigned int sz;
 
-  // Allocate the array
-  array = new vtkTypeUInt64[ size ];
-  assert( "ERROR: cannot allocate array" && (array != NULL) );
+    // Get the size of the array
+    this->Internals->Pop(
+        reinterpret_cast<unsigned char*>(&sz), sizeof(unsigned int));
+    assert("ERROR: input array size does not match size of data" &&
+                (sz==size) );
+    }
 
   // Pop the array data
   this->Internals->Pop(
diff --git a/Parallel/Core/vtkMultiProcessStream.h b/Parallel/Core/vtkMultiProcessStream.h
index 8df8afd..a9f76a4 100644
--- a/Parallel/Core/vtkMultiProcessStream.h
+++ b/Parallel/Core/vtkMultiProcessStream.h
@@ -78,7 +78,10 @@ public:
 
   // Description:
   // Remove-array-to-stream methods. Removes from the head of the stream.
-  // Note: the user must call delete on the array after processing it.
+  // Note: If the input array is NULL, the array will be allocated internally
+  // and the calling application is responsible for properly de-allocating it.
+  // If the input array is not NULL, it is expected to match the size of the
+  // data internally, and this method would just fill in the data.
   void Pop(double*& array, unsigned int& size);
   void Pop(float*& array, unsigned int& size);
   void Pop(int*& array, unsigned int& size);
@@ -98,6 +101,12 @@ public:
   int Size();
 
   // Description:
+  // Returns the size of the raw data returned by GetRawData. This
+  // includes 1 byte to store the endian type.
+  int RawSize()
+    {return(this->Size()+1);};
+
+  // Description:
   // Returns true iff the stream is empty.
   bool Empty();
 
diff --git a/Parallel/Core/vtkSocketCommunicator.cxx b/Parallel/Core/vtkSocketCommunicator.cxx
index b58f53e..5c42fbe 100644
--- a/Parallel/Core/vtkSocketCommunicator.cxx
+++ b/Parallel/Core/vtkSocketCommunicator.cxx
@@ -21,7 +21,7 @@
 #include "vtkSocketController.h"
 #include "vtkStdString.h"
 #include "vtkTypeTraits.h"
-#include <assert.h>
+#include <cassert>
 
 #include <algorithm>
 #include <vector>
diff --git a/Parallel/Core/vtkSocketController.h b/Parallel/Core/vtkSocketController.h
index b313489..42209db 100644
--- a/Parallel/Core/vtkSocketController.h
+++ b/Parallel/Core/vtkSocketController.h
@@ -61,20 +61,20 @@ public:
 
   // Description:
   // Does not apply to sockets. Does nothing.
-  void Finalize() {};
-  void Finalize(int) {};
+  void Finalize() {}
+  void Finalize(int) {}
 
   // Description:
   //  Does not apply to sockets. Does nothing.
-  void SingleMethodExecute() {};
+  void SingleMethodExecute() {}
 
   // Description:
   //  Does not apply to sockets.  Does nothing.
-  void MultipleMethodExecute() {};
+  void MultipleMethodExecute() {}
 
   // Description:
   //  Does not apply to sockets. Does nothing.
-  void CreateOutputWindow() {};
+  void CreateOutputWindow() {}
 
   // Description:
   // Wait for connection on a given port, forwarded
diff --git a/Parallel/MPI/CMakeLists.txt b/Parallel/MPI/CMakeLists.txt
index 004383a..ca7c400 100644
--- a/Parallel/MPI/CMakeLists.txt
+++ b/Parallel/MPI/CMakeLists.txt
@@ -1,5 +1,4 @@
-find_package(MPI REQUIRED)
-mark_as_advanced(MPI_LIBRARY MPI_EXTRA_LIBRARY)
+include(vtkMPI)
 set(vtkParallelMPI_SYSTEM_INCLUDE_DIRS ${MPI_C_INCLUDE_PATH})
 
 include(vtkTestingMPISupport)
@@ -8,6 +7,7 @@ include(vtkTestingMacros)
 set(Module_SRCS
   vtkMPICommunicator.cxx
   vtkMPIController.cxx
+  vtkMPIUtilities.cxx
   # vtkMPIEventLog.cxx # Deprecated
   vtkMPI.h
   )
@@ -19,6 +19,7 @@ set_source_files_properties(
 
 set_source_files_properties(
   vtkMPI
+  vtkMPIUtilities
   WRAP_EXCLUDE
   )
 
@@ -29,7 +30,4 @@ add_definitions("-DMPICH_IGNORE_CXX_SEEK")
 set(${vtk-module}_EXPORT_CODE "#define VTK_USE_MPI")
 
 vtk_module_library(vtkParallelMPI ${Module_SRCS})
-target_link_libraries(vtkParallelMPI ${MPI_C_LIBRARIES})
-if (MPI_CXX_LIBRARIES)
-  target_link_libraries(vtkParallelMPI ${MPI_CXX_LIBRARIES})
-endif()
+vtk_mpi_link(vtkParallelMPI)
diff --git a/Parallel/MPI/Testing/Cxx/CMakeLists.txt b/Parallel/MPI/Testing/Cxx/CMakeLists.txt
index 84a8850..fc676af 100644
--- a/Parallel/MPI/Testing/Cxx/CMakeLists.txt
+++ b/Parallel/MPI/Testing/Cxx/CMakeLists.txt
@@ -1,9 +1,15 @@
-add_test_mpi(TestDistributedDataShadowMapPass.cxx)
-add_test_mpi(TestNonBlockingCommunication.cxx)
-add_test_mpi(GenericCommunicator.cxx)
-add_test_mpi(MPIController.cxx ExerciseMultiProcessController.cxx)
-add_test_mpi(TestProcess.cxx)
+include(vtkMPI)
 
-if (VTK_DATA_ROOT)
-  add_test_mpi(TestPProbe.cxx DATADIR ${VTK_DATA_ROOT})
-endif()
+vtk_add_test_mpi(TestDistributedDataShadowMapPass.cxx)
+vtk_add_test_mpi(TestNonBlockingCommunication.cxx)
+vtk_add_test_mpi(GenericCommunicator.cxx)
+vtk_add_test_mpi(MPIController.cxx ExerciseMultiProcessController.cxx)
+vtk_add_test_mpi(TestProcess.cxx)
+vtk_add_test_mpi(TestPProbe.cxx TESTING_DATA)
+
+vtk_mpi_link(TestDistributedDataShadowMapPass)
+vtk_mpi_link(TestNonBlockingCommunication)
+vtk_mpi_link(GenericCommunicator)
+vtk_mpi_link(MPIController)
+vtk_mpi_link(TestProcess)
+vtk_mpi_link(TestPProbe)
diff --git a/Parallel/MPI/Testing/Cxx/TestDistributedDataShadowMapPass.cxx b/Parallel/MPI/Testing/Cxx/TestDistributedDataShadowMapPass.cxx
index 5fea07d..45e5053 100644
--- a/Parallel/MPI/Testing/Cxx/TestDistributedDataShadowMapPass.cxx
+++ b/Parallel/MPI/Testing/Cxx/TestDistributedDataShadowMapPass.cxx
@@ -66,7 +66,7 @@
 #include "vtkProperty.h"
 #include "vtkLight.h"
 #include "vtkLightCollection.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkMath.h"
 #include "vtkFrustumSource.h"
 #include "vtkPlanes.h"
@@ -136,7 +136,6 @@ void MyProcess::Execute()
     }
 
   vtkRenderWindow *renWin = prm->MakeRenderWindow();
-  renWin->SetReportGraphicErrors(true);
   renWin->SetMultiSamples(0);
 
   renWin->SetAlphaBitPlanes(1);
diff --git a/Parallel/MPI/Testing/Data/Baseline/TestPProbe.png.md5 b/Parallel/MPI/Testing/Data/Baseline/TestPProbe.png.md5
new file mode 100644
index 0000000..3bfbf5b
--- /dev/null
+++ b/Parallel/MPI/Testing/Data/Baseline/TestPProbe.png.md5
@@ -0,0 +1 @@
+8771102da26734133081ded8e1888421
diff --git a/Parallel/MPI/Testing/Tcl/CMakeLists.txt b/Parallel/MPI/Testing/Tcl/CMakeLists.txt
index 8d37d76..997c643 100644
--- a/Parallel/MPI/Testing/Tcl/CMakeLists.txt
+++ b/Parallel/MPI/Testing/Tcl/CMakeLists.txt
@@ -1,21 +1,18 @@
-# Tests with test images in Baseline/Parallel
-#
 unset(tests)
-if(VTK_DATA_ROOT)
-  set(tests
-    TestPProbe
+set(tests
+  TestPProbe
   )
-endif()
 
 foreach( tfile ${tests})
-  add_test(NAME ${vtk-module}Tcl-${tfile} COMMAND
+  ExternalData_add_test(VTKData
+    NAME ${vtk-module}Tcl-${tfile} COMMAND
     ${VTK_MPIRUN_EXE} ${VTK_MPI_PRENUMPROC_FLAGS} ${VTK_MPI_NUMPROC_FLAG} 2 ${VTK_MPI_PREFLAGS}
     $<TARGET_FILE:pvtk>
     ${vtkTestingRendering_SOURCE_DIR}/prtImageTest.tcl
     ${CMAKE_CURRENT_SOURCE_DIR}/${tfile}.tcl
-    -D ${VTK_DATA_ROOT}
+    -D ${VTK_TEST_DATA_DIR}
     -T ${VTK_TEST_OUTPUT_DIR}
-    -V Baseline/Parallel/${tfile}.png
+    -V DATA{../Data/Baseline/${tfile}.png}
     -A ${VTK_SOURCE_DIR}/Wrapping/Tcl
     ${VTK_MPI_POSTFLAGS}
    )
diff --git a/Parallel/MPI/vtkMPICommunicator.cxx b/Parallel/MPI/vtkMPICommunicator.cxx
index 8e3daa5..dd66d3a 100644
--- a/Parallel/MPI/vtkMPICommunicator.cxx
+++ b/Parallel/MPI/vtkMPICommunicator.cxx
@@ -29,7 +29,7 @@
   vtkSmartPointer<type> name = vtkSmartPointer<type>::New()
 
 #include <vector>
-#include <assert.h>
+#include <cassert>
 
 static inline void  vtkMPICommunicatorDebugBarrier(MPI_Comm* handle)
 {
diff --git a/Parallel/MPI/vtkMPIUtilities.cxx b/Parallel/MPI/vtkMPIUtilities.cxx
new file mode 100644
index 0000000..fe54e73
--- /dev/null
+++ b/Parallel/MPI/vtkMPIUtilities.cxx
@@ -0,0 +1,115 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkMPIUtilities.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkMPIUtilities.h"
+
+// VTK includes
+#include "vtkMPICommunicator.h"
+#include "vtkMPIController.h"
+
+// C/C++ includes
+#include <cassert>
+#include <cstdarg>
+#include <cstdio>
+
+namespace vtkMPIUtilities
+{
+
+
+void Printf(vtkMPIController* comm, const char* format, ...)
+{
+  // Sanity checks
+  assert("pre: MPI controller is NULL!" && (comm != NULL) );
+  assert("pre: format argument is NULL!" && (format != NULL) );
+
+  if( comm->GetLocalProcessId() == 0 )
+    {
+    va_list argptr;
+    va_start(argptr,format);
+    vprintf(format,argptr);
+    fflush(stdout);
+    va_end(argptr);
+    }
+
+  comm->Barrier();
+}
+
+//------------------------------------------------------------------------------
+void SynchronizedPrintf(vtkMPIController* comm, const char* format, ...)
+{
+  // Sanity checks
+  assert("pre: MPI controller is NULL!" && (comm != NULL) );
+  assert("pre: format argument is NULL!" && (format != NULL) );
+
+  int rank     = comm->GetLocalProcessId();
+  int numRanks = comm->GetNumberOfProcesses();
+
+
+  vtkMPICommunicator::Request rqst;
+  int* nullmsg = NULL;
+
+  if(rank == 0)
+    {
+    // STEP 0: print message
+    printf("[%d]: ", rank);
+    fflush(stdout);
+
+    va_list argptr;
+    va_start(argptr,format);
+    vprintf(format,argptr);
+    fflush(stdout);
+    va_end(argptr);
+
+    // STEP 1: signal next process (if any) to print
+    if( numRanks > 1)
+      {
+      comm->NoBlockSend(nullmsg,0,rank+1,0,rqst);
+      } // END if
+    } // END first rank
+  else if( rank == numRanks-1 )
+    {
+    // STEP 0: Block until previous process completes
+    comm->Receive(nullmsg,0,rank-1,0);
+
+    // STEP 1: print message
+    printf("[%d]: ", rank);
+
+    va_list argptr;
+    va_start(argptr,format);
+    vprintf(format,argptr);
+    fflush(stdout);
+    va_end(argptr);
+    } // END last rank
+  else
+    {
+    // STEP 0: Block until previous process completes
+    comm->Receive(nullmsg,0,rank-1,0);
+
+    // STEP 1: print message
+    printf("[%d]: ", rank);
+
+    va_list argptr;
+    va_start(argptr,format);
+    vprintf(format,argptr);
+    fflush(stdout);
+    va_end(argptr);
+
+    // STEP 2: signal next process to print
+    comm->NoBlockSend(nullmsg,0,rank+1,0,rqst);
+    }
+
+  comm->Barrier();
+}
+
+} // END namespace vtkMPIUtilities
diff --git a/Parallel/MPI/vtkMPIUtilities.h b/Parallel/MPI/vtkMPIUtilities.h
new file mode 100644
index 0000000..0c6229e
--- /dev/null
+++ b/Parallel/MPI/vtkMPIUtilities.h
@@ -0,0 +1,46 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkMPIUtilities.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#ifndef __vtkMPIUtilities_h
+#define __vtkMPIUtilities_h
+
+#include "vtkParallelMPIModule.h" // For export macro
+
+// Forward declarations
+class vtkMPIController;
+
+namespace vtkMPIUtilities
+{
+
+// Description:
+// Rank 0 prints the user-supplied formatted message to stdout.
+// This method works just like printf, but, requires an additional
+// argument to specify the MPI controller for the application.
+// NOTE: This is a collective operation, all ranks in the given communicator
+// must call this method.
+VTKPARALLELMPI_EXPORT
+void Printf(vtkMPIController* comm, const char* format, ...);
+
+// Description:
+// Each rank, r_0 to r_{N-1}, prints the formatted message to stdout in
+// rank order. That is, r_i prints the supplied message right after r_{i-1}.
+// NOTE: This is a collective operation, all ranks in the given communicator
+// must call this method.
+VTKPARALLELMPI_EXPORT
+void SynchronizedPrintf(vtkMPIController* comm, const char* format, ...);
+
+} // END namespace vtkMPIUtilities
+
+#endif // __vtkMPIUtilities_h
+// VTK-HeaderTest-Exclude: vtkMPIUtilities.h
diff --git a/Rendering/Annotation/CMakeLists.txt b/Rendering/Annotation/CMakeLists.txt
index 79e63c9..c43a1c4 100644
--- a/Rendering/Annotation/CMakeLists.txt
+++ b/Rendering/Annotation/CMakeLists.txt
@@ -15,6 +15,7 @@ set(Module_SRCS
   vtkLegendScaleActor.cxx
   vtkPieChartActor.cxx
   vtkPolarAxesActor.cxx
+  vtkProp3DAxisFollower.cxx
   vtkScalarBarActor.cxx
   vtkSpiderPlotActor.cxx
   vtkXYPlotActor.cxx
diff --git a/Rendering/Annotation/Testing/Cxx/CMakeLists.txt b/Rendering/Annotation/Testing/Cxx/CMakeLists.txt
index 4df6c2b..be8be1c 100644
--- a/Rendering/Annotation/Testing/Cxx/CMakeLists.txt
+++ b/Rendering/Annotation/Testing/Cxx/CMakeLists.txt
@@ -1,5 +1,5 @@
-set(MyTests
-  # TestAxisActor3D.cxx # todo (may not be in vtk in the first place)
+vtk_add_test_cxx(
+  TestAxisActor3D.cxx
   TestBarChartActor.cxx
   TestCaptionActor2D.cxx
   TestCubeAxes2DMode.cxx
@@ -25,26 +25,6 @@ set(MyTests
   TestCubeAxesInnerGridClosest.cxx
   TestCubeAxesInnerGridFurthest.cxx
   TestXYPlotActor.cxx
-)
+  )
 
-# Use the testing object factory, to reduce boilerplate code in tests.
-include(vtkTestingObjectFactory)
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Hybrid/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests RENDERING_FACTORY)
diff --git a/Rendering/Annotation/Testing/Cxx/TestAxisActor3D.cxx b/Rendering/Annotation/Testing/Cxx/TestAxisActor3D.cxx
index a9fd14d..254a7f2 100644
--- a/Rendering/Annotation/Testing/Cxx/TestAxisActor3D.cxx
+++ b/Rendering/Annotation/Testing/Cxx/TestAxisActor3D.cxx
@@ -12,7 +12,9 @@
      PURPOSE.  See the above copyright notice for more information.
 
 =========================================================================*/
-// This tests the spider plot capabilities in VTK.
+#include "vtkSphereSource.h"
+#include "vtkPolyDataMapper.h"
+#include "vtkActor.h"
 #include "vtkAxisActor.h"
 #include "vtkRenderer.h"
 #include "vtkRenderWindow.h"
@@ -21,53 +23,73 @@
 #include "vtkStringArray.h"
 #include "vtkSmartPointer.h"
 #include "vtkTestUtilities.h"
-#include "vtkRegressionTestImage.h"
 
 //----------------------------------------------------------------------------
-int TestAxisActor3D( int argc, char * argv [] )
+int TestAxisActor3D( int vtkNotUsed(argc), char * vtkNotUsed(argv) [] )
 {
   // Create the axis actor
-  vtkSmartPointer<vtkAxisActor> axis = vtkSmartPointer<vtkAxisActor>::New();
+  vtkSmartPointer<vtkAxisActor> axis =
+    vtkSmartPointer<vtkAxisActor>::New();
   axis->SetPoint1(0,0,0);
-  axis->SetPoint2(1,1,1);
+  axis->SetPoint2(1,1,0);
   axis->SetBounds(0,1,0,0,0,0);
   axis->SetTickLocationToBoth();
   axis->SetAxisTypeToX();
   axis->SetTitle("1.0");
+  axis->SetTitleScale(0.5);
+  axis->SetTitleVisibility(1);
   axis->SetMajorTickSize(0.01);
   axis->SetRange(0,1);
-  vtkSmartPointer<vtkStringArray> labels = vtkSmartPointer<vtkStringArray>::New();
-  labels->SetNumberOfTuples(3);
+
+  vtkSmartPointer<vtkStringArray> labels =
+    vtkSmartPointer<vtkStringArray>::New();
+  labels->SetNumberOfTuples(1);
   labels->SetValue(0,"X");
-  labels->SetValue(1,"Y");
-  labels->SetValue(2,"Z");
-//  axis->SetLabels(labels);
-  axis->SetLabelScale(0.01);
-  axis->SetTitleScale(0.02);
+
+  axis->SetLabels(labels);
+  axis->SetLabelScale(.2);
   axis->MinorTicksVisibleOff();
-  axis->SetDeltaMajor(0.1);
+  axis->SetDeltaMajor(0,.1);
+  axis->SetCalculateTitleOffset(0);
+  axis->SetCalculateLabelOffset(0);
+  axis->Print(std::cout);
+
+  vtkSmartPointer<vtkSphereSource> source =
+    vtkSmartPointer<vtkSphereSource>::New();
+  source->SetCenter(1,1,1);
+  vtkSmartPointer<vtkPolyDataMapper> mapper =
+    vtkSmartPointer<vtkPolyDataMapper>::New();
+  mapper->SetInputConnection(source->GetOutputPort());
+
+ vtkSmartPointer<vtkActor> actor =
+    vtkSmartPointer<vtkActor>::New();
+ actor->SetMapper(mapper);
 
   // Create the RenderWindow, Renderer and both Actors
-  vtkSmartPointer<vtkRenderer> ren1 = vtkRenderer::New();
-  vtkSmartPointer<vtkRenderWindow> renWin = vtkRenderWindow::New();
+  vtkSmartPointer<vtkRenderer> ren1 =
+    vtkSmartPointer<vtkRenderer>::New();
+  vtkSmartPointer<vtkRenderWindow> renWin =
+    vtkSmartPointer<vtkRenderWindow>::New();
   renWin->AddRenderer(ren1);
-  vtkSmartPointer<vtkRenderWindowInteractor> iren = vtkRenderWindowInteractor::New();
+  vtkSmartPointer<vtkRenderWindowInteractor> iren =
+    vtkSmartPointer<vtkRenderWindowInteractor>::New();
   iren->SetRenderWindow(renWin);
 
   axis->SetCamera(ren1->GetActiveCamera());
 
+  ren1->AddActor(actor);
   ren1->AddActor(axis);
-  ren1->SetBackground(0,0,0);
+
+  ren1->SetBackground(.3, .4, .5);
   renWin->SetSize(500,200);
+  ren1->ResetCamera();
+  ren1->ResetCameraClippingRange();
 
   // render the image
+  iren->Initialize();
   renWin->Render();
 
-  int retVal = vtkRegressionTestImage( renWin );
-  if ( retVal == vtkRegressionTester::DO_INTERACTOR)
-    {
-    iren->Start();
-    }
+  iren->Start();
 
-  return !retVal;
+  return EXIT_SUCCESS;
 }
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestAxisActor3D.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestAxisActor3D.png.md5
new file mode 100644
index 0000000..ea8cf94
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestAxisActor3D.png.md5
@@ -0,0 +1 @@
+4c2eb306867d637866e1d0c3a1c55d27
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestBarChartActor.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestBarChartActor.png.md5
new file mode 100644
index 0000000..898f121
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestBarChartActor.png.md5
@@ -0,0 +1 @@
+04429ee8d697b00be9b658c152cc34a7
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCaptionActor.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCaptionActor.png.md5
new file mode 100644
index 0000000..3b372b3
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCaptionActor.png.md5
@@ -0,0 +1 @@
+a84ace11ef6ec63994f9be321a24b5f9
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCaptionActor2D.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCaptionActor2D.png.md5
new file mode 100644
index 0000000..e90232a
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCaptionActor2D.png.md5
@@ -0,0 +1 @@
+8da4ee7c48f341511c0e61a4e851b8c9
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCaptionActor_1.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCaptionActor_1.png.md5
new file mode 100644
index 0000000..bb356d3
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCaptionActor_1.png.md5
@@ -0,0 +1 @@
+cef3b676f5b613333f0299bef1304b2d
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxes2DMode.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxes2DMode.png.md5
new file mode 100644
index 0000000..4dbc9b3
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxes2DMode.png.md5
@@ -0,0 +1 @@
+691dadad521a920ac166ef97da4564ab
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxes3.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxes3.png.md5
new file mode 100644
index 0000000..b0457ad
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxes3.png.md5
@@ -0,0 +1 @@
+c700c7b40176fa188b0da816b8bbb0b9
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesInnerGridAll.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesInnerGridAll.png.md5
new file mode 100644
index 0000000..3ad6c60
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesInnerGridAll.png.md5
@@ -0,0 +1 @@
+4b94e1b2064e6c640a6f5a5fe0cba742
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesInnerGridClosest.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesInnerGridClosest.png.md5
new file mode 100644
index 0000000..47597e1
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesInnerGridClosest.png.md5
@@ -0,0 +1 @@
+1ffc8ccafbd05df7a7c94fb88762451e
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesInnerGridFurthest.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesInnerGridFurthest.png.md5
new file mode 100644
index 0000000..23fcda5
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesInnerGridFurthest.png.md5
@@ -0,0 +1 @@
+dc5a3945e333a404261b6c52e706a695
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesIntersectionPoint.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesIntersectionPoint.png.md5
new file mode 100644
index 0000000..46b2d19
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesIntersectionPoint.png.md5
@@ -0,0 +1 @@
+96f82f92b8d31e9db70fed13819c2f9e
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesOrientedBoundingBox.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesOrientedBoundingBox.png.md5
new file mode 100644
index 0000000..3ad6c60
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesOrientedBoundingBox.png.md5
@@ -0,0 +1 @@
+4b94e1b2064e6c640a6f5a5fe0cba742
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithGridLines.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithGridLines.png.md5
new file mode 100644
index 0000000..36b497c
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithGridLines.png.md5
@@ -0,0 +1 @@
+c89f95753df6e4268f83855a5e865735
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithXInnerGrids.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithXInnerGrids.png.md5
new file mode 100644
index 0000000..58b28c1
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithXInnerGrids.png.md5
@@ -0,0 +1 @@
+5116f11b9146375df2489884229bded0
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithXInnerPolys.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithXInnerPolys.png.md5
new file mode 100644
index 0000000..74aeb69
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithXInnerPolys.png.md5
@@ -0,0 +1 @@
+eca7de481f3bd888bc6ef851547e1c5d
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithXInnerPolys_1.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithXInnerPolys_1.png.md5
new file mode 100644
index 0000000..f6b9694
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithXInnerPolys_1.png.md5
@@ -0,0 +1 @@
+1a3263fb4ddef12f1523042cc06905e9
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithXLines.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithXLines.png.md5
new file mode 100644
index 0000000..e6a5936
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithXLines.png.md5
@@ -0,0 +1 @@
+62510648f6422c433b439f9b4900a7b2
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithXLines_1.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithXLines_1.png.md5
new file mode 100644
index 0000000..63860ba
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithXLines_1.png.md5
@@ -0,0 +1 @@
+8013507c5529ab828bce33cd1c475d88
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithYInnerGrids.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithYInnerGrids.png.md5
new file mode 100644
index 0000000..b758a76
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithYInnerGrids.png.md5
@@ -0,0 +1 @@
+6ac833a66966e30c3e3786e366ed3709
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithYInnerPolys.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithYInnerPolys.png.md5
new file mode 100644
index 0000000..a5aca09
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithYInnerPolys.png.md5
@@ -0,0 +1 @@
+0833e886b5e31fb541f196e0368bdca9
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithYInnerPolys_1.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithYInnerPolys_1.png.md5
new file mode 100644
index 0000000..7661bb5
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithYInnerPolys_1.png.md5
@@ -0,0 +1 @@
+9f40e20a14aa377464f954e896d0fe77
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithYLines.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithYLines.png.md5
new file mode 100644
index 0000000..4a2624f
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithYLines.png.md5
@@ -0,0 +1 @@
+ab717ea746594380e68d787f6508c22d
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithYLines_1.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithYLines_1.png.md5
new file mode 100644
index 0000000..a5ed0b7
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithYLines_1.png.md5
@@ -0,0 +1 @@
+a72e76899218d5d6272fc4bd2ab362ba
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithZInnerGrids.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithZInnerGrids.png.md5
new file mode 100644
index 0000000..ed41599
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithZInnerGrids.png.md5
@@ -0,0 +1 @@
+d1e3d067d33b3316c0efa76e847ab41e
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithZInnerPolys.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithZInnerPolys.png.md5
new file mode 100644
index 0000000..3fa47e5
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithZInnerPolys.png.md5
@@ -0,0 +1 @@
+a9220b5d85ad43ce5d265ae10e9d763a
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithZLines.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithZLines.png.md5
new file mode 100644
index 0000000..5a648c5
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestCubeAxesWithZLines.png.md5
@@ -0,0 +1 @@
+ae7e743aebeb584e14778a045376b0f8
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestLegendBoxActor.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestLegendBoxActor.png.md5
new file mode 100644
index 0000000..eb55064
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestLegendBoxActor.png.md5
@@ -0,0 +1 @@
+4cd8dd4e342d6a9f11d6230c6279bb5d
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestLegendScaleActor.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestLegendScaleActor.png.md5
new file mode 100644
index 0000000..3987701
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestLegendScaleActor.png.md5
@@ -0,0 +1 @@
+95c7d56f81fcc353b075455692b34966
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestPieChartActor.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestPieChartActor.png.md5
new file mode 100644
index 0000000..43491af
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestPieChartActor.png.md5
@@ -0,0 +1 @@
+e6ad191d6bcccba8e581e33543086147
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestPolarAxes.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestPolarAxes.png.md5
new file mode 100644
index 0000000..fc0e4fd
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestPolarAxes.png.md5
@@ -0,0 +1 @@
+d64f6a0f7e410f8f512a52d497dceb94
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestSpiderPlotActor.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestSpiderPlotActor.png.md5
new file mode 100644
index 0000000..131022b
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestSpiderPlotActor.png.md5
@@ -0,0 +1 @@
+ead5faed95820ec7c4bf16d60b12b9f3
diff --git a/Rendering/Annotation/Testing/Data/Baseline/TestXYPlotActor.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/TestXYPlotActor.png.md5
new file mode 100644
index 0000000..e5b8679
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/TestXYPlotActor.png.md5
@@ -0,0 +1 @@
+52f3303605ebb8343367b1427bf1d91b
diff --git a/Rendering/Annotation/Testing/Data/Baseline/bore.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/bore.png.md5
new file mode 100644
index 0000000..e1770f6
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/bore.png.md5
@@ -0,0 +1 @@
+0638648d695e535739442f1a22727f7e
diff --git a/Rendering/Annotation/Testing/Data/Baseline/cubeAxes.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/cubeAxes.png.md5
new file mode 100644
index 0000000..93ab8b6
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/cubeAxes.png.md5
@@ -0,0 +1 @@
+6b4583336635f57c04ba32972b11f63b
diff --git a/Rendering/Annotation/Testing/Data/Baseline/cubeAxes2.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/cubeAxes2.png.md5
new file mode 100644
index 0000000..e31f303
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/cubeAxes2.png.md5
@@ -0,0 +1 @@
+53dc189b6649f24afaaf77e94906350e
diff --git a/Rendering/Annotation/Testing/Data/Baseline/cubeAxes3.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/cubeAxes3.png.md5
new file mode 100644
index 0000000..6988dde
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/cubeAxes3.png.md5
@@ -0,0 +1 @@
+4b2f163f87d9efaec21bc7951109aee6
diff --git a/Rendering/Annotation/Testing/Data/Baseline/xyPlot.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/xyPlot.png.md5
new file mode 100644
index 0000000..ce1fea3
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/xyPlot.png.md5
@@ -0,0 +1 @@
+196e4ea3291b715f85476b4c5a956b9f
diff --git a/Rendering/Annotation/Testing/Data/Baseline/xyPlot2.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/xyPlot2.png.md5
new file mode 100644
index 0000000..6d76aea
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/xyPlot2.png.md5
@@ -0,0 +1 @@
+4a158d59253c6015ca93f0ff651106ca
diff --git a/Rendering/Annotation/Testing/Data/Baseline/xyPlot3.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/xyPlot3.png.md5
new file mode 100644
index 0000000..a0bfd2f
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/xyPlot3.png.md5
@@ -0,0 +1 @@
+04770d5576f14ea141682ac0cf58537a
diff --git a/Rendering/Annotation/Testing/Data/Baseline/xyPlot4.png.md5 b/Rendering/Annotation/Testing/Data/Baseline/xyPlot4.png.md5
new file mode 100644
index 0000000..6d76aea
--- /dev/null
+++ b/Rendering/Annotation/Testing/Data/Baseline/xyPlot4.png.md5
@@ -0,0 +1 @@
+4a158d59253c6015ca93f0ff651106ca
diff --git a/Rendering/Annotation/Testing/Python/CMakeLists.txt b/Rendering/Annotation/Testing/Python/CMakeLists.txt
index 9ca1798..ba2e8b9 100644
--- a/Rendering/Annotation/Testing/Python/CMakeLists.txt
+++ b/Rendering/Annotation/Testing/Python/CMakeLists.txt
@@ -1,12 +1,9 @@
-add_test_python(TestCaptionActor.py Hybrid)
-add_test_python(bore.py Hybrid)
-add_test_python(cubeAxes.py Hybrid)
-add_test_python(cubeAxes2.py Hybrid)
-add_test_python(cubeAxes3.py Hybrid)
-
-if (VTK_DATA_ROOT)
-  add_test_python(xyPlot.py Hybrid)
-  add_test_python(xyPlot2.py Hybrid)
-  add_test_python(xyPlot3.py Hybrid)
-  add_test_python(xyPlot4.py Hybrid)
-endif()
+vtk_add_test_python(TestCaptionActor.py)
+vtk_add_test_python(bore.py)
+vtk_add_test_python(cubeAxes.py)
+vtk_add_test_python(xyPlot.py)
+vtk_add_test_python(xyPlot2.py)
+vtk_add_test_python(xyPlot3.py)
+vtk_add_test_python(xyPlot4.py)
+vtk_add_test_python(cubeAxes2.py)
+vtk_add_test_python(cubeAxes3.py)
diff --git a/Rendering/Annotation/Testing/Tcl/CMakeLists.txt b/Rendering/Annotation/Testing/Tcl/CMakeLists.txt
index 168274a..6372e6e 100644
--- a/Rendering/Annotation/Testing/Tcl/CMakeLists.txt
+++ b/Rendering/Annotation/Testing/Tcl/CMakeLists.txt
@@ -1,12 +1,9 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(bore Hybrid)
-  add_test_tcl(cubeAxes Hybrid)
-  add_test_tcl(cubeAxes2 Hybrid)
-  add_test_tcl(cubeAxes3 Hybrid)
-  add_test_tcl(xyPlot Hybrid)
-  add_test_tcl(xyPlot2 Hybrid)
-  add_test_tcl(xyPlot3 Hybrid)
-  add_test_tcl(xyPlot4 Hybrid)
-endif()
-
-add_test_tcl(TestCaptionActor Hybrid)
+vtk_add_test_tcl(bore)
+vtk_add_test_tcl(cubeAxes)
+vtk_add_test_tcl(cubeAxes2)
+vtk_add_test_tcl(cubeAxes3)
+vtk_add_test_tcl(xyPlot)
+vtk_add_test_tcl(xyPlot2)
+vtk_add_test_tcl(xyPlot3)
+vtk_add_test_tcl(xyPlot4)
+vtk_add_test_tcl(TestCaptionActor)
diff --git a/Rendering/Annotation/module.cmake b/Rendering/Annotation/module.cmake
index 68e6135..ddfde07 100644
--- a/Rendering/Annotation/module.cmake
+++ b/Rendering/Annotation/module.cmake
@@ -4,6 +4,8 @@ vtk_module(vtkRenderingAnnotation
   DEPENDS
     vtkRenderingFreeType
     vtkImagingColor
+  PRIVATE_DEPENDS
+    vtkFiltersSources
   TEST_DEPENDS
     vtkIOGeometry
     vtkRenderingFreeTypeOpenGL
diff --git a/Rendering/Annotation/vtkAxisActor.cxx b/Rendering/Annotation/vtkAxisActor.cxx
index 6b31cfd..abf8a08 100644
--- a/Rendering/Annotation/vtkAxisActor.cxx
+++ b/Rendering/Annotation/vtkAxisActor.cxx
@@ -24,10 +24,12 @@
 #include "vtkObjectFactory.h"
 #include "vtkPolyData.h"
 #include "vtkPolyDataMapper.h"
+#include "vtkProp3DAxisFollower.h"
 #include "vtkProperty.h"
 #include "vtkProperty2D.h"
 #include "vtkStringArray.h"
 #include "vtkTextActor.h"
+#include "vtkTextActor3D.h"
 #include "vtkTextProperty.h"
 #include "vtkVectorText.h"
 #include "vtkViewport.h"
@@ -66,31 +68,46 @@ vtkAxisActor::vtkAxisActor()
   this->Bounds[0] = this->Bounds[2] = this->Bounds[4] = -1;
   this->Bounds[1] = this->Bounds[3] = this->Bounds[5] = 1;
 
+  this->UseTextActor3D = 0;
   this->LabelFormat = new char[8];
   sprintf(this->LabelFormat, "%s", "%-#6.3g");
 
   this->TitleTextProperty = vtkTextProperty::New();
   this->TitleTextProperty->SetColor(0.,0.,0.);
   this->TitleTextProperty->SetFontFamilyToArial();
+  this->TitleTextProperty->SetFontSize(18.);
+  this->TitleTextProperty->SetVerticalJustificationToCentered();
+  this->TitleTextProperty->SetJustificationToCentered();
 
   this->TitleVector = vtkVectorText::New();
   this->TitleMapper = vtkPolyDataMapper::New();
   this->TitleMapper->SetInputConnection(
     this->TitleVector->GetOutputPort());
   this->TitleActor = vtkAxisFollower::New();
+  this->TitleActor->SetAxis(this);
   this->TitleActor->SetMapper(this->TitleMapper);
   this->TitleActor->SetEnableDistanceLOD(0);
+  this->TitleProp3D = vtkProp3DAxisFollower::New();
+  this->TitleProp3D->SetAxis(this);
+  this->TitleProp3D->SetEnableDistanceLOD(0);
+  this->TitleActor3D = vtkTextActor3D::New();
+  this->TitleProp3D->SetProp3D(this->TitleActor3D);
   this->TitleActor2D = vtkTextActor::New();
 
   this->NumberOfLabelsBuilt = 0;
   this->LabelVectors = NULL;
   this->LabelMappers = NULL;
   this->LabelActors = NULL;
+  this->LabelProps3D = NULL;
+  this->LabelActors3D = NULL;
   this->LabelActors2D = NULL;
 
   this->LabelTextProperty = vtkTextProperty::New();
   this->LabelTextProperty->SetColor(0.,0.,0.);
   this->LabelTextProperty->SetFontFamilyToArial();
+  this->LabelTextProperty->SetFontSize(14.);
+  this->LabelTextProperty->SetVerticalJustificationToBottom();
+  this->LabelTextProperty->SetJustificationToLeft();
 
   this->AxisLines = vtkPolyData::New();
   this->AxisLinesMapper = vtkPolyDataMapper::New();
@@ -219,11 +236,8 @@ vtkAxisActor::~vtkAxisActor()
     this->Point2Coordinate = NULL;
     }
 
-  if (this->LabelFormat)
-    {
-    delete [] this->LabelFormat;
-    this->LabelFormat = NULL;
-    }
+  delete [] this->LabelFormat;
+  this->LabelFormat = NULL;
 
   if (this->TitleVector)
     {
@@ -240,17 +254,18 @@ vtkAxisActor::~vtkAxisActor()
     this->TitleActor->Delete();
     this->TitleActor = NULL;
     }
+  this->TitleProp3D->Delete();
+  this->TitleProp3D = NULL;
+  this->TitleActor3D->Delete();
+  this->TitleActor3D = NULL;
   if (this->TitleActor2D)
     {
     this->TitleActor2D->Delete();
     this->TitleActor2D = NULL;
     }
 
-  if (this->Title)
-    {
-    delete [] this->Title;
-    this->Title = NULL;
-    }
+  delete [] this->Title;
+  this->Title = NULL;
 
   if (this->TitleTextProperty)
     {
@@ -265,16 +280,22 @@ vtkAxisActor::~vtkAxisActor()
       this->LabelVectors[i]->Delete();
       this->LabelMappers[i]->Delete();
       this->LabelActors[i]->Delete();
+      this->LabelProps3D[i]->Delete();
+      this->LabelActors3D[i]->Delete();
       this->LabelActors2D[i]->Delete();
       }
     this->NumberOfLabelsBuilt = 0;
     delete [] this->LabelVectors;
     delete [] this->LabelMappers;
     delete [] this->LabelActors;
+    delete [] this->LabelProps3D;
+    delete [] this->LabelActors3D;
     delete [] this->LabelActors2D;
     this->LabelVectors = NULL;
     this->LabelMappers = NULL;
     this->LabelActors = NULL;
+    this->LabelProps3D = NULL;
+    this->LabelActors3D = NULL;
     this->LabelActors2D = NULL;
     }
   if (this->LabelTextProperty)
@@ -378,10 +399,14 @@ vtkAxisActor::~vtkAxisActor()
 void vtkAxisActor::ReleaseGraphicsResources(vtkWindow *win)
 {
   this->TitleActor->ReleaseGraphicsResources(win);
+  this->TitleProp3D->ReleaseGraphicsResources(win);
+  this->TitleActor3D->ReleaseGraphicsResources(win);
   this->TitleActor2D->ReleaseGraphicsResources(win);
   for (int i=0; i < this->NumberOfLabelsBuilt; i++)
     {
     this->LabelActors[i]->ReleaseGraphicsResources(win);
+    this->LabelProps3D[i]->ReleaseGraphicsResources(win);
+    this->LabelActors3D[i]->ReleaseGraphicsResources(win);
     this->LabelActors2D[i]->ReleaseGraphicsResources(win);
     }
   this->AxisLinesActor->ReleaseGraphicsResources(win);
@@ -408,13 +433,17 @@ int vtkAxisActor::RenderOpaqueGeometry(vtkViewport *viewport)
       }
     if (this->Title != NULL && this->Title[0] != 0 && this->TitleVisibility)
       {
-      if (this->Use2DMode == 0)
+      if (this->Use2DMode)
         {
-        renderedSomething += this->TitleActor->RenderOpaqueGeometry(viewport);
+        renderedSomething += this->TitleActor2D->RenderOpaqueGeometry(viewport);
+        }
+      else if (this->UseTextActor3D)
+        {
+        renderedSomething += this->TitleProp3D->RenderOpaqueGeometry(viewport);
         }
       else
         {
-        renderedSomething += this->TitleActor2D->RenderOpaqueGeometry(viewport);
+        renderedSomething += this->TitleActor->RenderOpaqueGeometry(viewport);
         }
       }
     if (this->AxisVisibility || this->TickVisibility)
@@ -433,15 +462,20 @@ int vtkAxisActor::RenderOpaqueGeometry(vtkViewport *viewport)
       {
       for (i=0; i<this->NumberOfLabelsBuilt; i++)
         {
-        if (this->Use2DMode == 0)
+        if (this->Use2DMode)
           {
           renderedSomething +=
-            this->LabelActors[i]->RenderOpaqueGeometry(viewport);
+            this->LabelActors2D[i]->RenderOpaqueGeometry(viewport);
+          }
+        else if (this->UseTextActor3D)
+          {
+          renderedSomething +=
+            this->LabelActors3D[i]->RenderOpaqueGeometry(viewport);
           }
         else
           {
           renderedSomething +=
-            this->LabelActors2D[i]->RenderOpaqueGeometry(viewport);
+            this->LabelActors[i]->RenderOpaqueGeometry(viewport);
           }
         }
       }
@@ -476,8 +510,43 @@ int vtkAxisActor::RenderTranslucentPolygonalGeometry(vtkViewport *viewport)
       {
       renderedSomething += this->GridpolysActor->RenderTranslucentPolygonalGeometry(viewport);
       }
+    if (this->Title != NULL && this->Title[0] != 0 && this->TitleVisibility)
+      {
+      if (this->Use2DMode)
+        {
+        renderedSomething += this->TitleActor2D->RenderTranslucentPolygonalGeometry(viewport);
+        }
+      else if (this->UseTextActor3D)
+        {
+        renderedSomething += this->TitleProp3D->RenderTranslucentPolygonalGeometry(viewport);
+        }
+      else
+        {
+        renderedSomething += this->TitleActor->RenderTranslucentPolygonalGeometry(viewport);
+        }
+      }
+    if (this->LabelVisibility)
+      {
+      for (int i=0; i<this->NumberOfLabelsBuilt; i++)
+        {
+        if (this->Use2DMode)
+          {
+          renderedSomething +=
+            this->LabelActors2D[i]->RenderTranslucentPolygonalGeometry(viewport);
+          }
+        else if (this->UseTextActor3D)
+          {
+          renderedSomething +=
+            this->LabelProps3D[i]->RenderTranslucentPolygonalGeometry(viewport);
+          }
+        else
+          {
+          renderedSomething +=
+            this->LabelActors[i]->RenderTranslucentPolygonalGeometry(viewport);
+          }
+        }
+      }
     }
-
   return renderedSomething;
 }
 
@@ -491,18 +560,34 @@ int vtkAxisActor::RenderOverlay(vtkViewport *viewport)
   // Everything is built, just have to render
   if (!this->AxisHasZeroLength && !this->DrawGridlinesOnly)
     {
-    if( this->Use2DMode == 1 )
+    if (this->Use2DMode)
       {
       renderedSomething += this->TitleActor2D->RenderOverlay(viewport);
       }
+    else if (this->UseTextActor3D)
+      {
+      renderedSomething += this->TitleProp3D->RenderOverlay(viewport);
+      }
+    else
+      {
+      renderedSomething += this->TitleActor->RenderOverlay(viewport);
+      }
     if (this->LabelVisibility)
       {
       for (i=0; i<this->NumberOfLabelsBuilt; i++)
         {
-        if (this->Use2DMode == 1)
+        if (this->Use2DMode)
           {
           renderedSomething += this->LabelActors2D[i]->RenderOverlay(viewport);
           }
+        else if (this->UseTextActor3D)
+          {
+          renderedSomething += this->LabelProps3D[i]->RenderOverlay(viewport);
+          }
+        else
+          {
+          renderedSomething += this->LabelActors[i]->RenderOverlay(viewport);
+          }
         }
       }
     }
@@ -525,6 +610,13 @@ int vtkAxisActor::HasTranslucentPolygonalGeometry()
           return 1;
           }
         }
+      else if (this->UseTextActor3D)
+        {
+        if (this->TitleProp3D->HasTranslucentPolygonalGeometry())
+          {
+          return 1;
+          }
+        }
       else
         {
         if (this->TitleActor->HasTranslucentPolygonalGeometry())
@@ -546,6 +638,17 @@ int vtkAxisActor::HasTranslucentPolygonalGeometry()
             } // end if
           } // end for
         } // end 2D
+      else if (this->UseTextActor3D)
+        {
+        for (int i = 0; i < this->NumberOfLabelsBuilt; ++i)
+          {
+          //if (this->LabelActors3D[i]->HasTranslucentPolygonalGeometry())
+          if (this->LabelProps3D[i]->HasTranslucentPolygonalGeometry())
+            {
+            return 1;
+            } // end if
+          } // end for
+        } // end 3D
       else
         {
         for (int i = 0; i < this->NumberOfLabelsBuilt; ++i)
@@ -625,6 +728,11 @@ void vtkAxisActor::BuildAxis(vtkViewport *viewport, bool force)
       //this->AxisLinesActor->SetProperty(this->GetProperty());
     this->TitleActor->SetProperty(this->GetProperty());
     this->TitleActor->GetProperty()->SetColor(this->TitleTextProperty->GetColor());
+    this->TitleActor->GetProperty()->SetOpacity(this->TitleTextProperty->GetOpacity());
+    if (this->UseTextActor3D)
+      {
+      this->TitleActor3D->GetTextProperty()->ShallowCopy(this->TitleTextProperty);
+      }
     }
 
   //
@@ -679,12 +787,31 @@ vtkAxisActor::BuildLabels(vtkViewport *viewport, bool force)
   for (int i = 0; i < this->NumberOfLabelsBuilt; i++)
     {
     this->LabelActors[i]->SetCamera(this->Camera);
+    this->LabelProps3D[i]->SetCamera(this->Camera);
     this->LabelActors[i]->GetProperty()->SetColor(this->LabelTextProperty->GetColor());
+    this->LabelActors[i]->GetProperty()->SetOpacity(this->LabelTextProperty->GetOpacity());
     this->LabelActors[i]->SetOrientation(0., 0., this->LabelTextProperty->GetOrientation());
+    this->LabelProps3D[i]->SetOrientation(0., 0., this->LabelTextProperty->GetOrientation());
+
+    if (this->UseTextActor3D)
+      {
+      this->LabelActors3D[i]->GetTextProperty()->ShallowCopy(this->LabelTextProperty);
 
+      double labelActorsBounds[6];
+      this->LabelActors[i]->GetMapper()->GetBounds(labelActorsBounds);
+      const double labelActorsWidth = (labelActorsBounds[1] - labelActorsBounds[0]);
+
+      int labelActors3DBounds[4];
+      this->LabelActors3D[i]->GetBoundingBox(labelActors3DBounds);
+      const double labelActors3DWidth =
+        static_cast<double>(labelActors3DBounds[1] - labelActors3DBounds[0]);
+
+      this->LabelActors3D[i]->SetScale( labelActorsWidth / labelActors3DWidth );
+      }
     if(!this->GetCalculateLabelOffset())
       {
       this->LabelActors[i]->SetAutoCenter(1);
+      this->LabelProps3D[i]->SetAutoCenter(1);
       }
     }
 
@@ -775,9 +902,11 @@ void vtkAxisActor::SetLabelPositions(vtkViewport *viewport, bool force)
 
       double delta  = 0.5 * ((bounds[1] - bounds[0]) * labelSin + (bounds[3] - bounds[2]) * labelCos);
       this->LabelActors[i]->SetScreenOffset(this->LabelOffset + (delta) * this->ScreenSize);
+      this->LabelProps3D[i]->SetScreenOffset(this->LabelOffset + (delta) * this->ScreenSize);
       }
 
     this->LabelActors[i]->SetPosition(pos[0], pos[1], pos[2]);
+    this->LabelProps3D[i]->SetPosition(pos[0], pos[1], pos[2]);
     }
 }
 
@@ -793,10 +922,8 @@ vtkAxisActor::BuildLabels2D(vtkViewport *viewport, bool force)
   for (int i = 0; i < this->NumberOfLabelsBuilt; i++)
     {
     this->LabelActors2D[i]->GetProperty()->SetColor(this->LabelTextProperty->GetColor());
-    this->LabelActors2D[i]->GetProperty()->SetOpacity(1);
-    this->LabelActors2D[i]->GetTextProperty()->SetFontSize(14);
-    this->LabelActors2D[i]->GetTextProperty()->SetVerticalJustificationToBottom();
-    this->LabelActors2D[i]->GetTextProperty()->SetJustificationToLeft();
+    this->LabelActors2D[i]->GetProperty()->SetOpacity(this->LabelTextProperty->GetOpacity());
+    this->LabelActors2D[i]->GetTextProperty()->ShallowCopy(this->LabelTextProperty);
     }
 
   this->NeedBuild2D = this->BoundsDisplayCoordinateChanged(viewport);
@@ -953,14 +1080,33 @@ void vtkAxisActor::BuildTitle(bool force)
     }
 
   this->TitleVector->SetText(this->Title);
+  this->TitleActor3D->SetInput( this->Title );
 
   this->TitleActor->GetProperty()->SetColor(this->TitleTextProperty->GetColor());
   this->TitleActor->SetCamera(this->Camera);
-  this->TitleActor->SetPosition(p2[0], p2[1], p2[2]);
+  this->TitleProp3D->SetCamera(this->Camera);
   this->TitleActor->GetMapper()->GetBounds(titleBounds);
+
   if(!this->GetCalculateTitleOffset())
     {
     this->TitleActor->SetAutoCenter(1);
+    this->TitleProp3D->SetAutoCenter(1);
+    }
+
+  if (this->UseTextActor3D)
+    {
+    double titleActorBounds[6];
+    this->TitleActor->GetMapper()->GetBounds(titleActorBounds);
+    const double titleActorWidth = (titleActorBounds[1] - titleActorBounds[0]);
+
+    int titleActor3DBounds[4];
+    this->TitleActor3D->GetBoundingBox(titleActor3DBounds);
+    const double titleActor3DWidth =
+      static_cast<double>(titleActor3DBounds[1] - titleActor3DBounds[0]);
+
+    // Convert from font coordinate system to world coordinate system:
+    this->TitleActor3D->SetScale(
+      titleActorWidth / titleActor3DWidth);
     }
 
   center[0] = p1[0] + (p2[0] - p1[0]) / 2.0;
@@ -978,6 +1124,8 @@ void vtkAxisActor::BuildTitle(bool force)
     {
     this->TitleActor->SetScreenOffset(this->TitleOffset +
       this->LabelOffset + this->ScreenSize * (maxHeight + halfTitleHeight));
+    this->TitleProp3D->SetScreenOffset(this->TitleOffset +
+      this->LabelOffset + this->ScreenSize * (maxHeight + halfTitleHeight));
     }
 
   pos[0] = center[0];
@@ -985,6 +1133,7 @@ void vtkAxisActor::BuildTitle(bool force)
   pos[2] = center[2];
 
   this->TitleActor->SetPosition(pos[0], pos[1], pos[2]);
+  this->TitleProp3D->SetPosition(pos[0], pos[1], pos[2]);
 }
 
 // **********************************************************************
@@ -1002,10 +1151,8 @@ vtkAxisActor::BuildTitle2D(vtkViewport *viewport, bool force)
   // for textactor instead of follower
   this->TitleActor2D->SetInput( this->TitleVector->GetText() );
   this->TitleActor2D->GetProperty()->SetColor( this->TitleTextProperty->GetColor() );
-  this->TitleActor2D->GetProperty()->SetOpacity(1);
-  this->TitleActor2D->GetTextProperty()->SetFontSize(18);
-  this->TitleActor2D->GetTextProperty()->SetVerticalJustificationToCentered();
-  this->TitleActor2D->GetTextProperty()->SetJustificationToCentered();
+  this->TitleActor2D->GetProperty()->SetOpacity( this->TitleTextProperty->GetOpacity() );
+  this->TitleActor2D->GetTextProperty()->ShallowCopy(this->TitleTextProperty);
 
   if (this->AxisType == VTK_AXIS_TYPE_Y)
     {
@@ -1111,6 +1258,7 @@ void vtkAxisActor::PrintSelf(ostream& os, vtkIndent indent)
      << this->Range[0] << ", "
      << this->Range[1] << ")\n";
 
+  os << indent << "UseTextActor3D: " << this->UseTextActor3D << "\n";
   os << indent << "Label Format: " << this->LabelFormat << "\n";
 
   os << indent << "Axis Visibility: "
@@ -1240,17 +1388,23 @@ void vtkAxisActor::SetLabels(vtkStringArray *labels)
         this->LabelVectors[i]->Delete();
         this->LabelMappers[i]->Delete();
         this->LabelActors[i]->Delete();
+        this->LabelProps3D[i]->Delete();
+        this->LabelActors3D[i]->Delete();
         this->LabelActors2D[i]->Delete();
         }
       delete [] this->LabelVectors;
       delete [] this->LabelMappers;
       delete [] this->LabelActors;
+      delete [] this->LabelProps3D;
+      delete [] this->LabelActors3D;
       delete [] this->LabelActors2D;
       }
 
     this->LabelVectors = new vtkVectorText * [numLabels];
     this->LabelMappers = new vtkPolyDataMapper * [numLabels];
     this->LabelActors  = new vtkAxisFollower * [numLabels];
+    this->LabelProps3D = new vtkProp3DAxisFollower * [numLabels];
+    this->LabelActors3D = new vtkTextActor3D * [numLabels];
     this->LabelActors2D = new vtkTextActor * [numLabels];
 
     for (i = 0; i < numLabels; i++)
@@ -1260,11 +1414,18 @@ void vtkAxisActor::SetLabels(vtkStringArray *labels)
       this->LabelMappers[i]->SetInputConnection(
         this->LabelVectors[i]->GetOutputPort());
       this->LabelActors[i] = vtkAxisFollower::New();
+      this->LabelActors[i]->SetAxis(this);
       this->LabelActors[i]->SetMapper(this->LabelMappers[i]);
       this->LabelActors[i]->SetEnableDistanceLOD(0);
       this->LabelActors[i]->GetProperty()->SetAmbient(1.);
       this->LabelActors[i]->GetProperty()->SetDiffuse(0.);
       this->LabelActors[i]->GetProperty()->SetColor(this->LabelTextProperty->GetColor());
+      this->LabelActors[i]->GetProperty()->SetOpacity(this->LabelTextProperty->GetOpacity());
+      this->LabelProps3D[i] = vtkProp3DAxisFollower::New();
+      this->LabelProps3D[i]->SetAxis(this);
+      this->LabelProps3D[i]->SetEnableDistanceLOD(0);
+      this->LabelActors3D[i] = vtkTextActor3D::New();
+      this->LabelProps3D[i]->SetProp3D(this->LabelActors3D[i]);
       this->LabelActors2D[i] = vtkTextActor::New();
       }
     }
@@ -1275,6 +1436,7 @@ void vtkAxisActor::SetLabels(vtkStringArray *labels)
   for (i = 0; i < numLabels; i++)
     {
     this->LabelVectors[i]->SetText(labels->GetValue(i).c_str());
+    this->LabelActors3D[i]->SetInput(this->LabelVectors[i]->GetText());
     this->LabelActors2D[i]->SetInput(this->LabelVectors[i]->GetText());
     }
   this->NumberOfLabelsBuilt = numLabels;
@@ -1419,7 +1581,7 @@ bool vtkAxisActor::TickVisibilityChanged()
 // Set the bounds for this actor to use.  Sets timestamp BoundsModified.
 // *********************************************************************
 void
-vtkAxisActor::SetBounds(double b[6])
+vtkAxisActor::SetBounds(const double b[6])
 {
   if ((this->Bounds[0] != b[0]) ||
       (this->Bounds[1] != b[1]) ||
@@ -1492,10 +1654,17 @@ double vtkAxisActor::ComputeMaxLabelLength(const double vtkNotUsed(center)[3])
   double maxYSize = 0;
   for (int i = 0; i < this->NumberOfLabelsBuilt; i++)
     {
-    this->LabelActors[i]->SetCamera(this->Camera);
-    this->LabelActors[i]->SetProperty(newProp);
-    this->LabelActors[i]->GetMapper()->GetBounds(bounds);
-    this->LabelActors[i]->GetProperty()->SetColor(this->LabelTextProperty->GetColor());
+    if (this->UseTextActor3D)
+      {
+      this->LabelProps3D[i]->SetCamera(this->Camera);
+      this->LabelActors3D[i]->GetBounds(bounds);
+      }
+    else
+      {
+      this->LabelActors[i]->SetCamera(this->Camera);
+      this->LabelActors[i]->SetProperty(newProp);
+      this->LabelActors[i]->GetMapper()->GetBounds(bounds);
+      }
     xsize = bounds[1] - bounds[0];
     ysize = bounds[3] - bounds[2];
     maxXSize = (xsize > maxXSize ? xsize : maxXSize);
@@ -1514,13 +1683,21 @@ double vtkAxisActor::ComputeTitleLength(const double vtkNotUsed(center)[3])
   double xsize, ysize;
   double length;
 
-  this->TitleVector->SetText(this->Title);
-  this->TitleActor->SetCamera(this->Camera);
-  vtkProperty * newProp = this->NewTitleProperty();
-  this->TitleActor->SetProperty(newProp);
-  newProp->Delete();
-  this->TitleActor->GetMapper()->GetBounds(bounds);
-  this->TitleActor->GetProperty()->SetColor(this->TitleTextProperty->GetColor());
+  if (this->UseTextActor3D)
+    {
+    this->TitleActor3D->SetInput(this->Title);
+    this->TitleProp3D->SetCamera(this->Camera);
+    this->TitleActor3D->GetBounds(bounds);
+    }
+  else
+    {
+    this->TitleVector->SetText(this->Title);
+    this->TitleActor->SetCamera(this->Camera);
+    vtkProperty * newProp = this->NewTitleProperty();
+    this->TitleActor->SetProperty(newProp);
+    newProp->Delete();
+    this->TitleActor->GetMapper()->GetBounds(bounds);
+    }
   xsize = bounds[1] - bounds[0];
   ysize = bounds[3] - bounds[2];
   length = sqrt(xsize*xsize + ysize*ysize);
@@ -1533,14 +1710,22 @@ void vtkAxisActor::SetLabelScale(const double s)
 {
   for (int i=0; i < this->NumberOfLabelsBuilt; i++)
     {
-    this->LabelActors[i]->SetScale(s);
+    this->SetLabelScale(i, s);
     }
 }
 
 // *********************************************************************
+void vtkAxisActor::SetLabelScale(int label, const double s)
+{
+  this->LabelActors[label]->SetScale(s);
+  this->LabelProps3D[label]->SetScale(s);
+}
+
+// *********************************************************************
 void vtkAxisActor::SetTitleScale(const double s)
 {
   this->TitleActor->SetScale(s);
+  this->TitleProp3D->SetScale(s);
 }
 
 // *********************************************************************
@@ -1554,10 +1739,7 @@ void vtkAxisActor::SetTitle(const char *t)
     {
     return;
     }
-  if (this->Title)
-    {
-    delete [] this->Title;
-    }
+  delete [] this->Title;
   if (t)
     {
     this->Title = new char[strlen(t)+1];
diff --git a/Rendering/Annotation/vtkAxisActor.h b/Rendering/Annotation/vtkAxisActor.h
index 6b51280..613ecae 100644
--- a/Rendering/Annotation/vtkAxisActor.h
+++ b/Rendering/Annotation/vtkAxisActor.h
@@ -77,9 +77,11 @@ class vtkFollower;
 class vtkPoints;
 class vtkPolyData;
 class vtkPolyDataMapper;
+class vtkProp3DAxisFollower;
 class vtkProperty2D;
 class vtkStringArray;
 class vtkTextActor;
+class vtkTextActor3D;
 class vtkTextProperty;
 class vtkVectorText;
 
@@ -117,7 +119,7 @@ class VTKRENDERINGANNOTATION_EXPORT vtkAxisActor : public vtkActor
 
   // Description:
   // Set or get the bounds for this Actor as (Xmin,Xmax,Ymin,Ymax,Zmin,Zmax).
-  void   SetBounds(double bounds[6]);
+  void   SetBounds(const double bounds[6]);
   void   SetBounds(double xmin, double xmax, double ymin, double ymax, double zmin, double zmax);
   double *GetBounds(void);
   void   GetBounds(double bounds[6]);
@@ -128,12 +130,20 @@ class VTKRENDERINGANNOTATION_EXPORT vtkAxisActor : public vtkActor
   vtkGetStringMacro(LabelFormat);
 
   // Description:
+  // Render text as polygons (vtkVectorText) or as sprites (vtkTextActor3D).
+  // In 2D mode, the value is ignored and text is rendered as vtkTextActor.
+  // False(0) by default.
+  // See Also:
+  // GetUse2DMode(), SetUse2DMode
+  vtkSetMacro(UseTextActor3D, int);
+  vtkGetMacro(UseTextActor3D, int);
+
+  // Description:
   // Set/Get the flag that controls whether the minor ticks are visible.
   vtkSetMacro(MinorTicksVisible, int);
   vtkGetMacro(MinorTicksVisible, int);
   vtkBooleanMacro(MinorTicksVisible, int);
 
-
   // Description:
   // Set/Get the title of the axis actor,
   void SetTitle(const char *t);
@@ -304,8 +314,9 @@ class VTKRENDERINGANNOTATION_EXPORT vtkAxisActor : public vtkActor
   double ComputeMaxLabelLength(const double [3]);
   double ComputeTitleLength(const double [3]);
 //ETX
-  void SetLabelScale(const double);
-  void SetTitleScale(const double);
+  void SetLabelScale(const double scale);
+  void SetLabelScale(int labelIndex, const double scale);
+  void SetTitleScale(const double scale);
 
   // Description:
   // Set/Get the starting position for minor and major tick points,
@@ -355,6 +366,18 @@ class VTKRENDERINGANNOTATION_EXPORT vtkAxisActor : public vtkActor
     {
     return this->LabelActors;
     }
+
+  // Description:
+  // Get title actor and it is responsible for drawing
+  // title text.
+  vtkGetObjectMacro(TitleProp3D,  vtkProp3DAxisFollower);
+
+  // Description:
+  // Get label actors responsigle for drawing label text.
+  inline vtkProp3DAxisFollower** GetLabelProps3D()
+    {
+    return this->LabelProps3D;
+    }
 //ETX
 
   // Description:
@@ -433,6 +456,7 @@ class VTKRENDERINGANNOTATION_EXPORT vtkAxisActor : public vtkActor
   double  Range[2];
   double  LastRange[2];
   char  *LabelFormat;
+  int    UseTextActor3D;
   int    NumberOfLabelsBuilt;
   int    MinorTicksVisible;
   int    LastMinorTicksVisible;
@@ -524,12 +548,16 @@ class VTKRENDERINGANNOTATION_EXPORT vtkAxisActor : public vtkActor
   vtkPolyDataMapper *TitleMapper;
   vtkAxisFollower   *TitleActor;
   vtkTextActor      *TitleActor2D;
+  vtkProp3DAxisFollower *TitleProp3D;
+  vtkTextActor3D    *TitleActor3D;
   vtkTextProperty   *TitleTextProperty;
 
   vtkVectorText     **LabelVectors;
   vtkPolyDataMapper **LabelMappers;
   vtkAxisFollower   **LabelActors;
+  vtkProp3DAxisFollower **LabelProps3D;
   vtkTextActor      **LabelActors2D;
+  vtkTextActor3D    **LabelActors3D;
   vtkTextProperty    *LabelTextProperty;
 
   vtkPolyData        *AxisLines;
diff --git a/Rendering/Annotation/vtkAxisActor2D.cxx b/Rendering/Annotation/vtkAxisActor2D.cxx
index 9e2f81d..a404917 100644
--- a/Rendering/Annotation/vtkAxisActor2D.cxx
+++ b/Rendering/Annotation/vtkAxisActor2D.cxx
@@ -115,20 +115,14 @@ vtkAxisActor2D::vtkAxisActor2D()
 //----------------------------------------------------------------------------
 vtkAxisActor2D::~vtkAxisActor2D()
 {
-  if (this->LabelFormat)
-    {
-    delete [] this->LabelFormat;
-    this->LabelFormat = NULL;
-    }
+  delete [] this->LabelFormat;
+  this->LabelFormat = NULL;
 
   this->TitleMapper->Delete();
   this->TitleActor->Delete();
 
-  if (this->Title)
-    {
-    delete [] this->Title;
-    this->Title = NULL;
-    }
+  delete [] this->Title;
+  this->Title = NULL;
 
   if (this->LabelMappers != NULL )
     {
@@ -394,7 +388,7 @@ void vtkAxisActor2D::BuildAxis(vtkViewport *viewport)
   double *xp1, *xp2, len=0.0;
   if ( this->SizeFontRelativeToAxis )
     {
-    xp1 = this->PositionCoordinate->GetComputedDoubleDisplayValue(viewport);
+    xp1 = this->PositionCoordinate->GetComputedDoubleViewportValue(viewport);
     xp2 = this->Position2Coordinate->GetComputedDoubleViewportValue(viewport);
     len = sqrt((xp2[0]-xp1[0])*(xp2[0]-xp1[0]) + (xp2[1]-xp1[1])*(xp2[1]-xp1[1]));
     }
diff --git a/Rendering/Annotation/vtkAxisFollower.cxx b/Rendering/Annotation/vtkAxisFollower.cxx
index f5b21bc..b81899a 100644
--- a/Rendering/Annotation/vtkAxisFollower.cxx
+++ b/Rendering/Annotation/vtkAxisFollower.cxx
@@ -584,14 +584,11 @@ void vtkAxisFollower::Render(vtkRenderer *ren)
   this->ComputeTransformMatrix(ren);
   this->Device->SetUserMatrix(this->Matrix);
 
+  this->SetVisibility(this->VisibleAtCurrentViewAngle);
   if(this->VisibleAtCurrentViewAngle)
     {
     this->Device->Render(ren,this->Mapper);
     }
-  else
-    {
-    this->SetVisibility(this->VisibleAtCurrentViewAngle);
-    }
 }
 
 //----------------------------------------------------------------------
diff --git a/Rendering/Annotation/vtkAxisFollower.h b/Rendering/Annotation/vtkAxisFollower.h
index 3d18652..15f9d58 100644
--- a/Rendering/Annotation/vtkAxisFollower.h
+++ b/Rendering/Annotation/vtkAxisFollower.h
@@ -160,7 +160,7 @@ private:
  void operator =(const vtkAxisFollower&);  // Not implemented.
 
  // hide the two parameter Render() method from the user and the compiler.
- virtual void Render(vtkRenderer *, vtkMapper *) {};
+ virtual void Render(vtkRenderer *, vtkMapper *) {}
 
  //Internal matrices to avoid New/Delete for performance reasons
  vtkMatrix4x4 *InternalMatrix;
diff --git a/Rendering/Annotation/vtkBarChartActor.cxx b/Rendering/Annotation/vtkBarChartActor.cxx
index f5428be..61f9695 100644
--- a/Rendering/Annotation/vtkBarChartActor.cxx
+++ b/Rendering/Annotation/vtkBarChartActor.cxx
@@ -135,11 +135,8 @@ vtkBarChartActor::~vtkBarChartActor()
     this->Input = NULL;
     }
 
-  if (this->Title)
-    {
-    delete [] this->Title;
-    this->Title = NULL;
-    }
+  delete [] this->Title;
+  this->Title = NULL;
 
   delete this->Labels;
   this->SetLabelTextProperty(NULL);
@@ -156,10 +153,7 @@ vtkBarChartActor::~vtkBarChartActor()
   this->TitleActor = NULL;
 
   this->YAxis->Delete();
-  if ( this->YTitle )
-    {
-    delete [] this->YTitle;
-    }
+  delete [] this->YTitle;
 
   this->PlotData->Delete();
   this->PlotMapper->Delete();
@@ -184,11 +178,8 @@ void vtkBarChartActor::Initialize()
     }
 
   this->N = 0;
-  if ( this->Heights )
-    {
-    delete [] this->Heights;
-    this->Heights = NULL;
-    }
+  delete [] this->Heights;
+  this->Heights = NULL;
 }
 
 //----------------------------------------------------------------------------
diff --git a/Rendering/Annotation/vtkCornerAnnotation.cxx b/Rendering/Annotation/vtkCornerAnnotation.cxx
index 0e18542..c0565b8 100644
--- a/Rendering/Annotation/vtkCornerAnnotation.cxx
+++ b/Rendering/Annotation/vtkCornerAnnotation.cxx
@@ -358,6 +358,28 @@ int vtkCornerAnnotation::RenderOverlay(vtkViewport *viewport)
   return 1;
 }
 
+namespace {
+// Ported from old vtkTextMapper implementation
+int GetNumberOfLines(const char *str)
+{
+  if (str == NULL || *str == '\0')
+    {
+    return 0;
+    }
+
+  int result = 1;
+  while (str != NULL)
+    {
+    if ((str = strstr(str, "\n")) != NULL)
+      {
+      result++;
+      str++; // Skip '\n'
+      }
+    }
+  return result;
+}
+}
+
 //----------------------------------------------------------------------------
 int vtkCornerAnnotation::RenderOpaqueGeometry(vtkViewport *viewport)
 {
@@ -495,13 +517,11 @@ int vtkCornerAnnotation::RenderOpaqueGeometry(vtkViewport *viewport)
 
       int max_width = (width_01 > width_23) ? width_01 : width_23;
 
-      int num_lines_02 =
-        this->TextMapper[0]->GetNumberOfLines() +
-        this->TextMapper[2]->GetNumberOfLines();
+      int num_lines_02 = GetNumberOfLines(this->TextMapper[0]->GetInput())
+          + GetNumberOfLines(this->TextMapper[2]->GetInput());
 
-      int num_lines_13 =
-        this->TextMapper[1]->GetNumberOfLines() +
-        this->TextMapper[3]->GetNumberOfLines();
+      int num_lines_13 = GetNumberOfLines(this->TextMapper[1]->GetInput())
+          + GetNumberOfLines(this->TextMapper[3]->GetInput());
 
       int line_max_02 = (int)(vSize[1] * this->MaximumLineHeight) *
         (num_lines_02 ? num_lines_02 : 1);
diff --git a/Rendering/Annotation/vtkCubeAxesActor.cxx b/Rendering/Annotation/vtkCubeAxesActor.cxx
index 906a0fb..f22bdd0 100644
--- a/Rendering/Annotation/vtkCubeAxesActor.cxx
+++ b/Rendering/Annotation/vtkCubeAxesActor.cxx
@@ -23,6 +23,7 @@
 #include "vtkFollower.h"
 #include "vtkMath.h"
 #include "vtkObjectFactory.h"
+#include "vtkProp3DAxisFollower.h"
 #include "vtkProperty.h"
 #include "vtkStringArray.h"
 #include "vtkTextProperty.h"
@@ -81,10 +82,16 @@ vtkCubeAxesActor::vtkCubeAxesActor() : vtkActor()
     this->TitleTextProperty[i] = vtkTextProperty::New();
     this->TitleTextProperty[i]->SetColor(1.,1.,1.);
     this->TitleTextProperty[i]->SetFontFamilyToArial();
+    this->TitleTextProperty[i]->SetFontSize(18.);
+    this->TitleTextProperty[i]->SetVerticalJustificationToCentered();
+    this->TitleTextProperty[i]->SetJustificationToCentered();
 
     this->LabelTextProperty[i] = vtkTextProperty::New();
     this->LabelTextProperty[i]->SetColor(1.,1.,1.);
     this->LabelTextProperty[i]->SetFontFamilyToArial();
+    this->LabelTextProperty[i]->SetFontSize(14.);
+    this->LabelTextProperty[i]->SetVerticalJustificationToBottom();
+    this->LabelTextProperty[i]->SetJustificationToLeft();
     }
 
   // Axis lines
@@ -174,25 +181,37 @@ vtkCubeAxesActor::vtkCubeAxesActor() : vtkActor()
 
     // Pass information to axes followers.
     vtkAxisFollower* follower = this->XAxes[i]->GetTitleActor();
-    follower->SetAxis(this->XAxes[i]);
     follower->SetEnableDistanceLOD( this->EnableDistanceLOD );
     follower->SetDistanceLODThreshold( this->DistanceLODThreshold );
     follower->SetEnableViewAngleLOD( this->EnableViewAngleLOD );
     follower->SetViewAngleLODThreshold( this->ViewAngleLODThreshold );
+    vtkProp3DAxisFollower* axisFollower = this->XAxes[i]->GetTitleProp3D();
+    axisFollower->SetEnableDistanceLOD( this->EnableDistanceLOD );
+    axisFollower->SetDistanceLODThreshold( this->DistanceLODThreshold );
+    axisFollower->SetEnableViewAngleLOD( this->EnableViewAngleLOD );
+    axisFollower->SetViewAngleLODThreshold( this->ViewAngleLODThreshold );
 
     follower = this->YAxes[i]->GetTitleActor();
-    follower->SetAxis(this->YAxes[i]);
     follower->SetEnableDistanceLOD( this->EnableDistanceLOD );
     follower->SetDistanceLODThreshold( this->DistanceLODThreshold );
     follower->SetEnableViewAngleLOD( this->EnableViewAngleLOD );
     follower->SetViewAngleLODThreshold( this->ViewAngleLODThreshold );
+    axisFollower = this->YAxes[i]->GetTitleProp3D();
+    axisFollower->SetEnableDistanceLOD( this->EnableDistanceLOD );
+    axisFollower->SetDistanceLODThreshold( this->DistanceLODThreshold );
+    axisFollower->SetEnableViewAngleLOD( this->EnableViewAngleLOD );
+    axisFollower->SetViewAngleLODThreshold( this->ViewAngleLODThreshold );
 
     follower = this->ZAxes[i]->GetTitleActor();
-    follower->SetAxis(this->ZAxes[i]);
     follower->SetEnableDistanceLOD( this->EnableDistanceLOD );
     follower->SetDistanceLODThreshold( this->DistanceLODThreshold );
     follower->SetEnableViewAngleLOD( this->EnableViewAngleLOD );
     follower->SetViewAngleLODThreshold( this->ViewAngleLODThreshold );
+    axisFollower = this->ZAxes[i]->GetTitleProp3D();
+    axisFollower->SetEnableDistanceLOD( this->EnableDistanceLOD );
+    axisFollower->SetDistanceLODThreshold( this->DistanceLODThreshold );
+    axisFollower->SetEnableViewAngleLOD( this->EnableViewAngleLOD );
+    axisFollower->SetViewAngleLODThreshold( this->ViewAngleLODThreshold );
     }
 
   this->XTitle = new char[7];
@@ -319,6 +338,24 @@ vtkCubeAxesActor::vtkCubeAxesActor() : vtkActor()
   this->TitleScale = -1.0;
 }
 
+// *************************************************************************
+void vtkCubeAxesActor::SetUseTextActor3D( int val )
+{
+  for( int i = 0 ; i < NUMBER_OF_ALIGNED_AXIS ; ++ i )
+    {
+    this->XAxes[i]->SetUseTextActor3D( val );
+    this->YAxes[i]->SetUseTextActor3D( val );
+    this->ZAxes[i]->SetUseTextActor3D( val );
+    }
+}
+
+// *************************************************************************
+int vtkCubeAxesActor::GetUseTextActor3D()
+{
+  // It is assumed that all axes have the same value
+  return this->XAxes[0]->GetUseTextActor3D();
+}
+
 void vtkCubeAxesActor::SetUse2DMode( int val )
 {
   for( int i = 0 ; i < NUMBER_OF_ALIGNED_AXIS ; ++ i )
@@ -444,71 +481,41 @@ vtkCubeAxesActor::~vtkCubeAxesActor()
     this->LabelTextProperty[i] = NULL;
     }
 
-  if (this->XLabelFormat)
-    {
-    delete [] this->XLabelFormat;
-    this->XLabelFormat = NULL;
-    }
+  delete [] this->XLabelFormat;
+  this->XLabelFormat = NULL;
 
-  if (this->YLabelFormat)
-    {
-    delete [] this->YLabelFormat;
-    this->YLabelFormat = NULL;
-    }
+  delete [] this->YLabelFormat;
+  this->YLabelFormat = NULL;
 
-  if (this->ZLabelFormat)
-    {
-    delete [] this->ZLabelFormat;
-    this->ZLabelFormat = NULL;
-    }
+  delete [] this->ZLabelFormat;
+  this->ZLabelFormat = NULL;
 
-  if (this->XTitle)
-    {
-    delete [] this->XTitle;
-    this->XTitle = NULL;
-    }
-  if (this->YTitle)
-    {
-    delete [] this->YTitle;
-    this->YTitle = NULL;
-    }
-  if (this->ZTitle)
-    {
-    delete [] this->ZTitle;
-    this->ZTitle = NULL;
-    }
+  delete [] this->XTitle;
+  this->XTitle = NULL;
 
-  if (this->XUnits)
-    {
-    delete [] this->XUnits;
-    this->XUnits = NULL;
-    }
-  if (this->YUnits)
-    {
-    delete [] this->YUnits;
-    this->YUnits = NULL;
-    }
-  if (this->ZUnits)
-    {
-    delete [] this->ZUnits;
-    this->ZUnits = NULL;
-    }
+  delete [] this->YTitle;
+  this->YTitle = NULL;
 
-  if (this->ActualXLabel)
-    {
-    delete [] this->ActualXLabel;
-    this->ActualXLabel = NULL;
-    }
-  if (this->ActualYLabel)
-    {
-    delete [] this->ActualYLabel;
-    this->ActualYLabel = NULL;
-    }
-  if (this->ActualZLabel)
-    {
-    delete [] this->ActualZLabel;
-    this->ActualZLabel = NULL;
-    }
+  delete [] this->ZTitle;
+  this->ZTitle = NULL;
+
+  delete [] this->XUnits;
+  this->XUnits = NULL;
+
+  delete [] this->YUnits;
+  this->YUnits = NULL;
+
+  delete [] this->ZUnits;
+  this->ZUnits = NULL;
+
+  delete [] this->ActualXLabel;
+  this->ActualXLabel = NULL;
+
+  delete [] this->ActualYLabel;
+  this->ActualYLabel = NULL;
+
+  delete [] this->ActualZLabel;
+  this->ActualZLabel = NULL;
 }
 
 // *************************************************************************
@@ -2102,7 +2109,7 @@ void vtkCubeAxesActor::AutoScale(vtkViewport *viewport, vtkAxisActor *axis[NUMBE
                           this->ScreenSize,
                           labelActors[j]->GetPosition());
 
-      labelActors[j]->SetScale(newLabelScale);
+      axis[i]->SetLabelScale(j, newLabelScale);
       }
     }
 }
@@ -2327,35 +2334,23 @@ vtkTextProperty* vtkCubeAxesActor::GetLabelTextProperty(int axis)
 // ****************************************************************************
 //  Set axes and screen size of the labels.
 // ****************************************************************************
-void vtkCubeAxesActor::UpdateLabels(vtkAxisActor **axis, int index)
+void vtkCubeAxesActor::UpdateLabels(vtkAxisActor **axis, int vtkNotUsed(index))
   {
   for (int i = 0; i < NUMBER_OF_ALIGNED_AXIS; i++)
     {
     int numberOfLabelsBuild = axis[i]->GetNumberOfLabelsBuilt();
     vtkAxisFollower **labelActors = axis[i]->GetLabelActors();
+    vtkProp3DAxisFollower **labelProps = axis[i]->GetLabelProps3D();
     for(int k=0; k < numberOfLabelsBuild; ++k)
       {
-      if(index == 0)
-        {
-        labelActors[k]->SetAxis(this->XAxes[i]);
-        }
-      else if(index == 1)
-        {
-        labelActors[k]->SetAxis(this->YAxes[i]);
-        }
-      else if(index == 2)
-        {
-        labelActors[k]->SetAxis(this->ZAxes[i]);
-        }
-      else
-        {
-        // Do nothing.
-        }
-
       labelActors[k]->SetEnableDistanceLOD( this->EnableDistanceLOD );
       labelActors[k]->SetDistanceLODThreshold( this->DistanceLODThreshold );
       labelActors[k]->SetEnableViewAngleLOD( this->EnableViewAngleLOD );
       labelActors[k]->SetViewAngleLODThreshold( this->ViewAngleLODThreshold );
+      labelProps[k]->SetEnableDistanceLOD( this->EnableDistanceLOD );
+      labelProps[k]->SetDistanceLODThreshold( this->DistanceLODThreshold );
+      labelProps[k]->SetEnableViewAngleLOD( this->EnableViewAngleLOD );
+      labelProps[k]->SetViewAngleLODThreshold( this->ViewAngleLODThreshold );
       }
     }
   }
diff --git a/Rendering/Annotation/vtkCubeAxesActor.h b/Rendering/Annotation/vtkCubeAxesActor.h
index 8e5ca2c..8227d70 100644
--- a/Rendering/Annotation/vtkCubeAxesActor.h
+++ b/Rendering/Annotation/vtkCubeAxesActor.h
@@ -395,6 +395,13 @@ public:
   void SetLabelScaling(bool, int, int, int);
 
   // Description:
+  // Use or not vtkTextActor3D for titles and labels.
+  // See Also:
+  // vtkAxisActor::SetUseTextActor3D(), vtkAxisActor::GetUseTextActor3D()
+  void SetUseTextActor3D( int val );
+  int GetUseTextActor3D();
+
+  // Description:
   // Get/Set 2D mode
   // NB: Use vtkTextActor for titles in 2D instead of vtkAxisFollower
   void SetUse2DMode( int val );
diff --git a/Rendering/Annotation/vtkCubeAxesActor2D.cxx b/Rendering/Annotation/vtkCubeAxesActor2D.cxx
index fd29a29..fac11b1 100644
--- a/Rendering/Annotation/vtkCubeAxesActor2D.cxx
+++ b/Rendering/Annotation/vtkCubeAxesActor2D.cxx
@@ -156,24 +156,12 @@ vtkCubeAxesActor2D::~vtkCubeAxesActor2D()
   this->YAxis->Delete();
   this->ZAxis->Delete();
 
-  if (this->LabelFormat)
-    {
-    delete [] this->LabelFormat;
-    this->LabelFormat = NULL;
-    }
+  delete [] this->LabelFormat;
+  this->LabelFormat = NULL;
 
-  if ( this->XLabel )
-    {
-    delete [] this->XLabel;
-    }
-  if ( this->YLabel )
-    {
-    delete [] this->YLabel;
-    }
-  if ( this->ZLabel )
-    {
-    delete [] this->ZLabel;
-    }
+  delete [] this->XLabel;
+  delete [] this->YLabel;
+  delete [] this->ZLabel;
 
   this->SetAxisLabelTextProperty(NULL);
   this->SetAxisTitleTextProperty(NULL);
diff --git a/Rendering/Annotation/vtkLeaderActor2D.cxx b/Rendering/Annotation/vtkLeaderActor2D.cxx
index 50ea3a0..1edbe15 100644
--- a/Rendering/Annotation/vtkLeaderActor2D.cxx
+++ b/Rendering/Annotation/vtkLeaderActor2D.cxx
@@ -100,16 +100,11 @@ vtkLeaderActor2D::~vtkLeaderActor2D()
   this->LabelMapper->Delete();
   this->LabelActor->Delete();
 
-  if (this->Label)
-    {
-    delete [] this->Label;
-    this->Label = NULL;
-    }
-  if (this->LabelFormat)
-    {
-    delete [] this->LabelFormat;
-    this->LabelFormat = NULL;
-    }
+  delete [] this->Label;
+  this->Label = NULL;
+
+  delete [] this->LabelFormat;
+  this->LabelFormat = NULL;
 
   this->LeaderPoints->Delete();
   this->LeaderLines->Delete();
diff --git a/Rendering/Annotation/vtkParallelCoordinatesActor.cxx b/Rendering/Annotation/vtkParallelCoordinatesActor.cxx
index 2bebcbc..45da8a0 100644
--- a/Rendering/Annotation/vtkParallelCoordinatesActor.cxx
+++ b/Rendering/Annotation/vtkParallelCoordinatesActor.cxx
@@ -118,17 +118,11 @@ vtkParallelCoordinatesActor::~vtkParallelCoordinatesActor()
   this->PlotMapper->Delete();
   this->PlotActor->Delete();
 
-  if (this->Title)
-    {
-    delete [] this->Title;
-    this->Title = NULL;
-    }
+  delete [] this->Title;
+  this->Title = NULL;
 
-  if (this->LabelFormat)
-    {
-    delete [] this->LabelFormat;
-    this->LabelFormat = NULL;
-    }
+  delete [] this->LabelFormat;
+  this->LabelFormat = NULL;
 
   this->SetLabelTextProperty(NULL);
   this->SetTitleTextProperty(NULL);
diff --git a/Rendering/Annotation/vtkPieChartActor.cxx b/Rendering/Annotation/vtkPieChartActor.cxx
index bda7921..6c45514 100644
--- a/Rendering/Annotation/vtkPieChartActor.cxx
+++ b/Rendering/Annotation/vtkPieChartActor.cxx
@@ -142,11 +142,8 @@ vtkPieChartActor::~vtkPieChartActor()
   this->ConnectionHolder->Delete();
   this->ConnectionHolder = 0;
 
-  if (this->Title)
-    {
-    delete [] this->Title;
-    this->Title = NULL;
-    }
+  delete [] this->Title;
+  this->Title = NULL;
 
   delete this->Labels;
   this->SetLabelTextProperty(NULL);
@@ -211,10 +208,7 @@ void vtkPieChartActor::Initialize()
 
   this->N = 0;
   this->Total = 0.0;
-  if ( this->Fractions )
-    {
-    delete [] this->Fractions;
-    }
+  delete [] this->Fractions;
 }
 
 //----------------------------------------------------------------------------
diff --git a/Rendering/Annotation/vtkPolarAxesActor.cxx b/Rendering/Annotation/vtkPolarAxesActor.cxx
index fab0596..9898505 100644
--- a/Rendering/Annotation/vtkPolarAxesActor.cxx
+++ b/Rendering/Annotation/vtkPolarAxesActor.cxx
@@ -245,17 +245,11 @@ vtkPolarAxesActor::~vtkPolarAxesActor()
     this->RadialAxesProperty->Delete();
     }
 
-  if ( this->PolarLabelFormat )
-    {
-    delete [] this->PolarLabelFormat;
-    this->PolarLabelFormat = NULL;
-    }
+  delete [] this->PolarLabelFormat;
+  this->PolarLabelFormat = NULL;
 
-  if ( this->PolarAxisTitle )
-    {
-    delete [] this->PolarAxisTitle;
-    this->PolarAxisTitle = NULL;
-    }
+  delete [] this->PolarAxisTitle;
+  this->PolarAxisTitle = NULL;
 
   if ( this->PolarAxisTitleTextProperty )
     {
diff --git a/Rendering/Annotation/vtkProp3DAxisFollower.cxx b/Rendering/Annotation/vtkProp3DAxisFollower.cxx
new file mode 100644
index 0000000..13e4698
--- /dev/null
+++ b/Rendering/Annotation/vtkProp3DAxisFollower.cxx
@@ -0,0 +1,593 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkProp3DAxisFollower.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkProp3DAxisFollower.h"
+
+#include "vtkAxisActor.h"
+#include "vtkBoundingBox.h"
+#include "vtkCamera.h"
+#include "vtkCoordinate.h"
+#include "vtkMath.h"
+#include "vtkMatrix4x4.h"
+#include "vtkObjectFactory.h"
+#include "vtkProperty.h"
+#include "vtkViewport.h"
+#include "vtkTexture.h"
+#include "vtkTransform.h"
+
+#include <math.h>
+
+vtkStandardNewMacro(vtkProp3DAxisFollower);
+
+// List of vectors per axis (depending on which one needs to be
+// followed.
+// Order here is X, Y, and Z.
+// Set of two axis aligned vectors that would define the Y vector.
+// Order is MINMIN, MINMAX, MAXMAX, MAXMIN
+namespace
+{
+  const double AxisAlignedY[3][4][2][3] =
+  {
+    { {{0.0,  1.0, 0.0}, {0.0, 0.0,  1.0}},
+      {{0.0,  1.0, 0.0}, {0.0, 0.0, -1.0}},
+      {{0.0, -1.0, 0.0}, {0.0, 0.0, -1.0}},
+      {{0.0, -1.0, 0.0}, {0.0, 0.0,  1.0}}
+    },
+    {
+      {{ 1.0, 0.0, 0.0}, {0.0, 0.0,  1.0}},
+      {{ 1.0, 0.0, 0.0}, {0.0, 0.0, -1.0}},
+      {{-1.0, 0.0, 0.0}, {0.0, 0.0, -1.0}},
+      {{-1.0, 0.0, 0.0}, {0.0, 0.0,  1.0}}
+    },
+    {
+      {{ 1.0, 0.0, 0.0},  {0.0,  1.0, 0.0}},
+      {{ 1.0, 0.0, 0.0},  {0.0, -1.0, 0.0}},
+      {{-1.0, 0.0, 0.0},  {0.0, -1.0, 0.0}},
+      {{-1.0, 0.0, 0.0},  {0.0,  1.0, 0.0}}
+    }
+  };
+}
+//----------------------------------------------------------------------
+// Creates a follower with no camera set
+vtkProp3DAxisFollower::vtkProp3DAxisFollower()
+{
+  this->AutoCenter                = 1;
+
+  this->EnableDistanceLOD         = 0;
+  this->DistanceLODThreshold      = 0.80;
+
+  this->EnableViewAngleLOD        = 1;
+  this->ViewAngleLODThreshold     = 0.34;
+
+  this->ScreenOffset              = 10.0;
+
+  this->Axis                      = NULL;
+  this->Viewport                  = NULL;
+
+  this->TextUpsideDown            = -1;
+  this->VisibleAtCurrentViewAngle = -1;
+}
+
+//----------------------------------------------------------------------
+vtkProp3DAxisFollower::~vtkProp3DAxisFollower()
+{
+}
+
+//----------------------------------------------------------------------
+void vtkProp3DAxisFollower::SetAxis(vtkAxisActor *axis)
+{
+  if(!axis)
+    {
+    vtkErrorMacro("Invalid or NULL axis\n");
+    return;
+    }
+
+  if(this->Axis != axis)
+    {
+    // \NOTE: Don't increment the ref count of axis as it could lead to
+    // circular references.
+    this->Axis = axis;
+    this->Modified();
+    }
+}
+
+//----------------------------------------------------------------------
+vtkAxisActor* vtkProp3DAxisFollower::GetAxis()
+{
+  return this->Axis.GetPointer();
+}
+
+
+//----------------------------------------------------------------------
+void vtkProp3DAxisFollower::SetViewport(vtkViewport* vp)
+{
+  if(this->Viewport != vp)
+    {
+    // \NOTE: Don't increment the ref count of vtkViewport as it could lead to
+    // circular references.
+    this->Viewport = vp;
+    this->Modified();
+    }
+}
+
+//----------------------------------------------------------------------
+vtkViewport* vtkProp3DAxisFollower::GetViewport()
+{
+  return this->Viewport.GetPointer();
+}
+
+//----------------------------------------------------------------------------
+void vtkProp3DAxisFollower::CalculateOrthogonalVectors(double rX[3], double rY[3],
+  double rZ[3], vtkAxisActor *axis, double *dop, vtkViewport *viewport)
+{
+  if (!rX || !rY || !rZ)
+    {
+    vtkErrorMacro("Invalid or NULL direction vectors\n");
+    return;
+    }
+
+  if (!axis)
+    {
+    vtkErrorMacro("Invalid or NULL axis\n");
+    return;
+    }
+
+  if (!dop)
+    {
+    vtkErrorMacro("Invalid or NULL direction of projection vector\n");
+    return;
+    }
+
+  if (!viewport)
+    {
+    vtkErrorMacro("Invalid or NULL renderer\n");
+    return;
+    }
+
+  vtkMatrix4x4* cameraMatrix = this->Camera->GetViewTransformMatrix();
+
+  vtkCoordinate *c1Axis =  axis->GetPoint1Coordinate();
+  vtkCoordinate *c2Axis =  axis->GetPoint2Coordinate();
+  double *axisPt1 = c1Axis->GetComputedWorldValue(viewport);
+  double *axisPt2 = c2Axis->GetComputedWorldValue(viewport);
+
+  rX[0] = axisPt2[0] - axisPt1[0];
+  rX[1] = axisPt2[1] - axisPt1[1];
+  rX[2] = axisPt2[2] - axisPt1[2];
+  vtkMath::Normalize(rX);
+
+  if (rX[0] != dop[0] || rX[1] != dop[1] || rX[2] != dop[2])
+    {
+    // Get Y
+    vtkMath::Cross(rX, dop, rY);
+    vtkMath::Normalize(rY);
+
+    // Get Z
+    vtkMath::Cross(rX, rY, rZ);
+    vtkMath::Normalize(rZ);
+    }
+  else
+    {
+    vtkMath::Perpendiculars(rX, rY, rZ, 0.);
+    }
+  double a[3], b[3];
+
+  // Need homogeneous points.
+  double homoPt1[4] = {axisPt1[0], axisPt1[1], axisPt1[2], 1.0};
+  double homoPt2[4] = {axisPt2[0], axisPt2[1], axisPt2[2], 1.0};
+
+  double *viewCoordinatePt1 = cameraMatrix->MultiplyDoublePoint(homoPt1);
+  a[0] = viewCoordinatePt1[0];
+  a[1] = viewCoordinatePt1[1];
+  a[2] = viewCoordinatePt1[2];
+
+  double *viewCoordinatePt2 = cameraMatrix->MultiplyDoublePoint(homoPt2);
+  b[0] = viewCoordinatePt2[0];
+  b[1] = viewCoordinatePt2[1];
+  b[2] = viewCoordinatePt2[2];
+
+  // If the text is upside down, we make a 180 rotation to keep it readable.
+  if(this->IsTextUpsideDown(a, b))
+    {
+    this->TextUpsideDown = 1;
+    rX[0] = -rX[0];
+    rX[1] = -rX[1];
+    rX[2] = -rX[2];
+    rZ[0] = -rZ[0];
+    rZ[1] = -rZ[1];
+    rZ[2] = -rZ[2];
+    }
+  else
+    {
+    this->TextUpsideDown = 0;
+    }
+}
+
+//----------------------------------------------------------------------------
+double vtkProp3DAxisFollower::AutoScale(vtkViewport *viewport, vtkCamera *camera,
+                                        double screenSize, double position[3])
+{
+  double newScale = 0.0;
+
+  if(!viewport)
+    {
+    std::cerr << "Invalid or NULL viewport \n";
+    return newScale;
+    }
+
+  if(!camera)
+    {
+    std::cerr << "Invalid or NULL camera \n";
+    return newScale;
+    }
+
+  if(!position)
+    {
+    std::cerr << "Invalid or NULL position \n";
+    return newScale;
+    }
+
+  double factor = 1;
+  if (viewport->GetSize()[1] > 0)
+    {
+    factor = 2.0 * screenSize
+      * tan(vtkMath::RadiansFromDegrees(camera->GetViewAngle()/2.0))
+      / viewport->GetSize()[1];
+    }
+
+    double dist = sqrt(
+          vtkMath::Distance2BetweenPoints(position,
+                                          camera->GetPosition()));
+    newScale = factor * dist;
+
+    return newScale;
+}
+
+//----------------------------------------------------------------------------
+void vtkProp3DAxisFollower::ComputeMatrix()
+{
+  if(!this->Axis)
+    {
+    vtkErrorMacro("ERROR: Invalid axis\n");
+    return;
+    }
+
+  if (this->EnableDistanceLOD && !this->TestDistanceVisibility())
+    {
+    this->SetVisibility(0);
+    return;
+    }
+
+  // check whether or not need to rebuild the matrix
+  if ( this->GetMTime() > this->MatrixMTime ||
+       (this->Camera && this->Camera->GetMTime() > this->MatrixMTime) )
+    {
+    this->GetOrientation();
+    this->Transform->Push();
+    this->Transform->Identity();
+    this->Transform->PostMultiply();
+    this->Transform->GetMatrix(this->Matrix);
+
+    double pivotPoint[3] =
+    {
+      this->Origin[0],
+      this->Origin[1],
+      this->Origin[2]
+    };
+
+    if(this->AutoCenter)
+      {
+      // Don't apply the user matrix when retrieving the center.
+      this->Device->SetUserMatrix(NULL);
+
+      double* center = this->Device->GetCenter();
+      pivotPoint[0] = center[0];
+      pivotPoint[1] = center[1];
+      pivotPoint[2] = center[2];
+      }
+
+    // Move pivot point to origin
+    this->Transform->Translate(-pivotPoint[0],
+                               -pivotPoint[1],
+                               -pivotPoint[2]);
+    // Scale
+    this->Transform->Scale(this->Scale[0],
+                           this->Scale[1],
+                           this->Scale[2]);
+
+    // Rotate
+    this->Transform->RotateY(this->Orientation[1]);
+    this->Transform->RotateX(this->Orientation[0]);
+    this->Transform->RotateZ(this->Orientation[2]);
+
+    double translation[3] = {0.0, 0.0, 0.0};
+    if (this->Axis)
+      {
+      vtkMatrix4x4 *matrix = this->InternalMatrix;
+      matrix->Identity();
+      double rX[3], rY[3], rZ[3];
+
+      this->ComputeRotationAndTranlation(this->Viewport, translation, rX, rY, rZ, this->Axis);
+
+      vtkMath::Normalize(rX);
+      vtkMath::Normalize(rY);
+      vtkMath::Normalize(rZ);
+
+      matrix->Element[0][0] = rX[0];
+      matrix->Element[1][0] = rX[1];
+      matrix->Element[2][0] = rX[2];
+      matrix->Element[0][1] = rY[0];
+      matrix->Element[1][1] = rY[1];
+      matrix->Element[2][1] = rY[2];
+      matrix->Element[0][2] = rZ[0];
+      matrix->Element[1][2] = rZ[1];
+      matrix->Element[2][2] = rZ[2];
+
+      this->Transform->Concatenate(matrix);
+      }
+
+    this->Transform->Translate(this->Origin[0] + this->Position[0] + translation[0],
+                               this->Origin[1] + this->Position[1] + translation[1],
+                               this->Origin[2] + this->Position[2] + translation[2]);
+
+    // Apply user defined matrix last if there is one
+    if (this->UserMatrix)
+      {
+      this->Transform->Concatenate(this->UserMatrix);
+      }
+
+    this->Transform->PreMultiply();
+    this->Transform->GetMatrix(this->Matrix);
+    this->MatrixMTime.Modified();
+    this->Transform->Pop();
+    }
+
+  this->SetVisibility(this->VisibleAtCurrentViewAngle);
+}
+
+//-----------------------------------------------------------------------------
+void vtkProp3DAxisFollower
+::ComputeRotationAndTranlation(vtkViewport* viewport, double translation[3],
+                               double rX[3], double rY[3], double rZ[3],
+                               vtkAxisActor *axis)
+{
+  double autoScaleFactor =
+    this->AutoScale(viewport, this->Camera, this->ScreenOffset, this->Position);
+
+  double dop[3];
+  this->Camera->GetDirectionOfProjection(dop);
+  vtkMath::Normalize(dop);
+
+  this->CalculateOrthogonalVectors(rX, rY, rZ, axis, dop, this->Viewport);
+
+  double dotVal = vtkMath::Dot(rZ, dop);
+
+  double origRy[3] = {0.0, 0.0, 0.0};
+
+  origRy[0] = rY[0];
+  origRy[1] = rY[1];
+  origRy[2] = rY[2];
+
+  // NOTE: Basically the idea here is that dotVal will be positive
+  // only when we have projection direction aligned with our z directon
+  // and when that happens it means that our Y is inverted.
+  if(dotVal > 0)
+    {
+    rY[0] = -rY[0];
+    rY[1] = -rY[1];
+    rY[2] = -rY[2];
+    }
+
+  // Check visibility at current view angle.
+  if(this->EnableViewAngleLOD)
+    {
+    this->ExecuteViewAngleVisibility(rZ);
+    }
+
+  // Since we already stored all the possible Y axes that are geometry aligned,
+  // we compare our vertical vector with these vectors and if it aligns then we
+  // translate in opposite direction.
+  int axisPosition = this->Axis->GetAxisPosition();
+
+  double dotVal1 = vtkMath::Dot(AxisAlignedY[this->Axis->GetAxisType()][axisPosition][0], origRy) ;
+  double dotVal2 = vtkMath::Dot(AxisAlignedY[this->Axis->GetAxisType()][axisPosition][1], origRy) ;
+
+  if(fabs(dotVal1) > fabs(dotVal2))
+    {
+    int sign = (dotVal1 > 0 ? -1 : 1);
+
+    translation[0] =  origRy[0] * autoScaleFactor * sign;
+    translation[1] =  origRy[1] * autoScaleFactor * sign;
+    translation[2] =  origRy[2] * autoScaleFactor * sign;
+    }
+  else
+    {
+    int sign = (dotVal2 > 0 ? -1 : 1);
+
+    translation[0] =  origRy[0] * autoScaleFactor * sign;
+    translation[1] =  origRy[1] * autoScaleFactor * sign;
+    translation[2] =  origRy[2] * autoScaleFactor * sign;
+    }
+}
+
+//----------------------------------------------------------------------
+void vtkProp3DAxisFollower::ComputerAutoCenterTranslation(
+  const double& vtkNotUsed(autoScaleFactor), double translation[3])
+{
+  if(!translation)
+    {
+    vtkErrorMacro("ERROR: Invalid or NULL translation\n");
+    return;
+    }
+
+  double *bounds = this->GetProp3D()->GetBounds();
+
+  // Offset by half of width.
+  double halfWidth  = (bounds[1] - bounds[0]) * 0.5 * this->Scale[0];
+
+  if(this->TextUpsideDown == 1)
+    {
+    halfWidth  = -halfWidth;
+    }
+
+  if(this->Axis->GetAxisType() == VTK_AXIS_TYPE_X)
+    {
+    translation[0] = translation[0] - halfWidth;
+    }
+  else if(this->Axis->GetAxisType() == VTK_AXIS_TYPE_Y)
+    {
+    translation[1] = translation[1] - halfWidth;
+    }
+  else if(this->Axis->GetAxisType() == VTK_AXIS_TYPE_Z)
+    {
+    translation[2] = translation[2] - halfWidth;
+    }
+  else
+    {
+    // Do nothing.
+    }
+
+  return;
+}
+
+//----------------------------------------------------------------------
+int vtkProp3DAxisFollower::TestDistanceVisibility()
+{
+  if(!this->Camera->GetParallelProjection())
+    {
+    double cameraClippingRange[2];
+
+    this->Camera->GetClippingRange(cameraClippingRange);
+
+    // We are considering the far clip plane for evaluation. In certain
+    // odd conditions it might not work.
+    const double maxVisibleDistanceFromCamera = this->DistanceLODThreshold * (cameraClippingRange[1]);
+
+    double dist = sqrt(vtkMath::Distance2BetweenPoints(this->Camera->GetPosition(),
+                                                       this->Position));
+
+    if(dist > maxVisibleDistanceFromCamera)
+      {
+      // Need to make sure we are not looking at a flat axis and therefore should enable it anyway
+      if(this->Axis)
+        {
+        vtkBoundingBox bbox(this->Axis->GetBounds());
+        return (bbox.GetDiagonalLength() > (cameraClippingRange[1] - cameraClippingRange[0])) ? 1 : 0;
+        }
+      return 0;
+      }
+    else
+      {
+      return 1;
+      }
+    }
+  else
+    {
+    return 1;
+    }
+}
+
+//----------------------------------------------------------------------
+void vtkProp3DAxisFollower::ExecuteViewAngleVisibility(double normal[3])
+{
+  if(!normal)
+    {
+    vtkErrorMacro("ERROR: Invalid or NULL normal\n");
+    return;
+    }
+
+  double *cameraPos = this->Camera->GetPosition();
+  double  dir[3] = {this->Position[0] - cameraPos[0],
+                    this->Position[1] - cameraPos[1],
+                    this->Position[2] - cameraPos[2]};
+  vtkMath::Normalize(dir);
+  double dotDir = vtkMath::Dot(dir, normal);
+  if( fabs(dotDir) < this->ViewAngleLODThreshold )
+    {
+    this->VisibleAtCurrentViewAngle = 0;
+    }
+  else
+    {
+    this->VisibleAtCurrentViewAngle = 1;
+    }
+}
+
+//----------------------------------------------------------------------
+void vtkProp3DAxisFollower::PrintSelf(ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os,indent);
+
+  os << indent << "AutoCenter: ("  << this->AutoCenter   << ")\n";
+  os << indent << "EnableDistanceLOD: ("   << this->EnableDistanceLOD    << ")\n";
+  os << indent << "DistanceLODThreshold: ("   << this->DistanceLODThreshold    << ")\n";
+  os << indent << "EnableViewAngleLOD: ("   << this->EnableViewAngleLOD    << ")\n";
+  os << indent << "ViewAngleLODThreshold: ("   << this->ViewAngleLODThreshold    << ")\n";
+  os << indent << "ScreenOffset: ("<< this->ScreenOffset << ")\n";
+
+  if ( this->Axis )
+    {
+    os << indent << "Axis: (" << this->Axis << ")\n";
+    }
+  else
+    {
+    os << indent << "Axis: (none)\n";
+    }
+}
+
+//----------------------------------------------------------------------
+void vtkProp3DAxisFollower::ShallowCopy(vtkProp *prop)
+{
+  vtkProp3DAxisFollower *f = vtkProp3DAxisFollower::SafeDownCast(prop);
+  if ( f != NULL )
+    {
+    this->SetAutoCenter(f->GetAutoCenter());
+    this->SetEnableDistanceLOD(f->GetEnableDistanceLOD());
+    this->SetDistanceLODThreshold(f->GetDistanceLODThreshold());
+    this->SetEnableViewAngleLOD(f->GetEnableViewAngleLOD());
+    this->SetViewAngleLODThreshold(f->GetViewAngleLODThreshold());
+    this->SetScreenOffset(f->GetScreenOffset());
+    this->SetAxis(f->GetAxis());
+    }
+
+  // Now do superclass
+  this->Superclass::ShallowCopy(prop);
+}
+
+//----------------------------------------------------------------------
+bool vtkProp3DAxisFollower::IsTextUpsideDown( double* a, double* b )
+{
+  double angle = vtkMath::RadiansFromDegrees(this->Orientation[2]);
+  return (b[0] - a[0]) * cos(angle) - (b[1] - a[1]) * sin(angle) < 0;
+}
+
+//----------------------------------------------------------------------
+int vtkProp3DAxisFollower::RenderOpaqueGeometry(vtkViewport *viewport)
+{
+  this->SetViewport(viewport);
+  return this->Superclass::RenderOpaqueGeometry(viewport);
+}
+
+//----------------------------------------------------------------------
+int vtkProp3DAxisFollower::RenderTranslucentPolygonalGeometry(vtkViewport *viewport)
+{
+  this->SetViewport(viewport);
+  return this->Superclass::RenderTranslucentPolygonalGeometry(viewport);
+}
+
+//----------------------------------------------------------------------
+int vtkProp3DAxisFollower::RenderVolumetricGeometry(vtkViewport *viewport)
+{
+  this->SetViewport(viewport);
+  return this->Superclass::RenderVolumetricGeometry(viewport);
+}
diff --git a/Rendering/Annotation/vtkProp3DAxisFollower.h b/Rendering/Annotation/vtkProp3DAxisFollower.h
new file mode 100644
index 0000000..8e57bc7
--- /dev/null
+++ b/Rendering/Annotation/vtkProp3DAxisFollower.h
@@ -0,0 +1,165 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkProp3DAxisFollower.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkProp3DAxisFollower - a subclass of vtkProp3DFollower that ensures
+// that data is always parallel to the axis defined by a vtkAxisActor.
+// .SECTION Description
+// vtkProp3DAxisFollower is a subclass of vtkProp3DFollower that always follows
+// its specified axis. More specifically it will not change its position or
+// scale, but it will continually update its orientation so that it is aligned
+// with the axis and facing at angle to the camera to provide maximum visibilty.
+// This is typically used for text labels for 3d plots.
+// .SECTION see also
+// vtkFollower vtkAxisFollower vtkProp3DFollower
+
+#ifndef __vtkProp3DAxisFollower_h
+#define __vtkProp3DAxisFollower_h
+
+#include "vtkRenderingAnnotationModule.h" // For export macro
+#include "vtkProp3DFollower.h"
+#include "vtkWeakPointer.h" // For vtkWeakPointer
+
+class vtkAxisActor;
+class vtkViewport;
+
+class VTKRENDERINGANNOTATION_EXPORT vtkProp3DAxisFollower
+  : public vtkProp3DFollower
+{
+ public:
+  // Description:
+  // Creates a follower with no camera set.
+  static vtkProp3DAxisFollower *New();
+
+  // Description:
+  // Standard VTK methods for type and printing.
+  vtkTypeMacro(vtkProp3DAxisFollower,vtkProp3DFollower);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+ // Description:
+ // Set axis that needs to be followed.
+ virtual void SetAxis(vtkAxisActor*);
+ virtual vtkAxisActor* GetAxis();
+
+ // Description:
+ // Set/Get state of auto center mode where additional
+ // translation will be added to make sure the underlying
+ // geometry has its pivot point at the center of its bounds.
+ vtkSetMacro(AutoCenter, int);
+ vtkGetMacro(AutoCenter, int);
+ vtkBooleanMacro(AutoCenter, int);
+
+ // Description:
+ // Enable / disable use of distance based LOD. If enabled the actor
+ // will not be visible at a certain distance from the camera.
+ // Default is false.
+ vtkSetMacro(EnableDistanceLOD, int);
+ vtkGetMacro(EnableDistanceLOD, int);
+
+ // Description:
+ // Set distance LOD threshold (0.0 - 1.0).This determines at what fraction
+ // of camera far clip range, actor is not visible.
+ // Default is 0.80.
+ vtkSetClampMacro(DistanceLODThreshold, double, 0.0, 1.0);
+ vtkGetMacro(DistanceLODThreshold, double);
+
+ // Description:
+ // Enable / disable use of view angle based LOD. If enabled the actor
+ // will not be visible at a certain view angle.
+ // Default is true.
+ vtkSetMacro(EnableViewAngleLOD, int);
+ vtkGetMacro(EnableViewAngleLOD, int);
+
+ // Description:
+ // Set view angle LOD threshold (0.0 - 1.0).This determines at what view
+ // angle to geometry will make the geometry not visibile.
+ // Default is 0.34.
+ vtkSetClampMacro(ViewAngleLODThreshold, double, 0.0, 1.0);
+ vtkGetMacro(ViewAngleLODThreshold, double);
+
+ // Description:
+ // Set/Get the desired screen offset from the axis.
+ vtkSetMacro(ScreenOffset, double);
+ vtkGetMacro(ScreenOffset, double);
+
+  // Description:
+  // Generate the matrix based on ivars. This method overloads its superclasses
+  // ComputeMatrix() method due to the special vtkProp3DAxisFollower matrix operations.
+  virtual void ComputeMatrix();
+
+  // Description:
+  // Shallow copy of a follower. Overloads the virtual vtkProp method.
+  void ShallowCopy(vtkProp *prop);
+
+ // Description:
+ // Calculate scale factor to maintain same size of a object
+ // on the screen.
+ static double AutoScale(vtkViewport *viewport, vtkCamera * camera,
+                         double screenSize, double position[3]);
+
+  // Description:
+  // This causes the actor to be rendered. It in turn will render the actor's
+  // property, texture map and then mapper. If a property hasn't been
+  // assigned, then the actor will create one automatically.
+  virtual int RenderOpaqueGeometry(vtkViewport *viewport);
+  virtual int RenderTranslucentPolygonalGeometry(vtkViewport *viewport);
+  virtual int RenderVolumetricGeometry(vtkViewport *viewport);
+
+  virtual void SetViewport(vtkViewport* viewport);
+  virtual vtkViewport* GetViewport();
+
+protected:
+  vtkProp3DAxisFollower();
+  ~vtkProp3DAxisFollower();
+
+ void CalculateOrthogonalVectors(double Rx[3], double Ry[3], double Rz[3],
+                                 vtkAxisActor *axis1, double *dop,
+                                 vtkViewport *ren);
+
+
+ void ComputeRotationAndTranlation(vtkViewport *ren, double translation[3],
+                                   double Rx[3], double Ry[3], double Rz[3],
+                                   vtkAxisActor *axis);
+
+ // \NOTE: Not used as of now.
+ void ComputerAutoCenterTranslation(const double& autoScaleFactor,
+                                    double translation[3]);
+
+
+  int  TestDistanceVisibility();
+  void ExecuteViewAngleVisibility(double normal[3]);
+
+  bool IsTextUpsideDown(double* a, double* b);
+
+  int          AutoCenter;
+
+  int          EnableDistanceLOD;
+  double       DistanceLODThreshold;
+
+  int          EnableViewAngleLOD;
+  double       ViewAngleLODThreshold;
+
+  double       ScreenOffset;
+
+  vtkWeakPointer<vtkAxisActor> Axis;
+  vtkWeakPointer<vtkViewport> Viewport;
+private:
+  vtkProp3DAxisFollower(const vtkProp3DAxisFollower&);  // Not implemented.
+  void operator=(const vtkProp3DAxisFollower&);  // Not implemented.
+
+  int TextUpsideDown;
+  int VisibleAtCurrentViewAngle;
+
+};
+
+#endif
diff --git a/Rendering/Annotation/vtkScalarBarActor.cxx b/Rendering/Annotation/vtkScalarBarActor.cxx
index 545361c..3eaf7b1 100644
--- a/Rendering/Annotation/vtkScalarBarActor.cxx
+++ b/Rendering/Annotation/vtkScalarBarActor.cxx
@@ -18,6 +18,7 @@
 #include "vtkCellArray.h"
 #include "vtkCellData.h"
 #include "vtkColor.h"
+#include "vtkCoordinate.h"
 #include "vtkFloatArray.h"
 #include "vtkPointData.h"
 #include "vtkImageData.h"
@@ -27,6 +28,7 @@
 #include "vtkPolyDataMapper2D.h"
 #include "vtkProperty2D.h"
 #include "vtkRenderer.h"
+#include "vtkRenderWindow.h"
 #include "vtkScalarsToColors.h"
 #include "vtkSmartPointer.h"
 #include "vtkTextActor.h"
@@ -80,6 +82,7 @@ vtkScalarBarActor::vtkScalarBarActor()
   this->Orientation = VTK_ORIENT_VERTICAL;
   this->Title = NULL;
   this->ComponentTitle = NULL;
+  this->VerticalTitleSeparation = 0;
 
   this->LabelTextProperty = vtkTextProperty::New();
   this->LabelTextProperty->SetFontSize(12);
@@ -233,6 +236,7 @@ vtkScalarBarActor::vtkScalarBarActor()
     ->SetReferenceCoordinate(this->PositionCoordinate);
 
   this->DrawColorBar = true;
+  this->DrawTickLabels = true;
 }
 
 //----------------------------------------------------------------------------
@@ -263,16 +267,29 @@ void vtkScalarBarActor::ReleaseGraphicsResources(vtkWindow* win)
   this->P->AnnotationLeadersActor->ReleaseGraphicsResources(win);
   this->BackgroundActor->ReleaseGraphicsResources(win);
   this->FrameActor->ReleaseGraphicsResources(win);
+  this->Texture->ReleaseGraphicsResources(win);
+}
+
+//----------------------------------------------------------------------------
+void vtkScalarBarActor::GetScalarBarRect(int rect[4], vtkViewport* viewport)
+{
+  vtkCoordinate *origin = this->ScalarBarActor->GetPositionCoordinate();
+  int * vpPos = origin->GetComputedViewportValue(viewport);
+  rect[0] = vpPos[0];
+  rect[1] = vpPos[1];
+
+  double *bounds = this->ScalarBar->GetBounds();
+  rect[0] += static_cast<int>(bounds[0] + 0.5);
+  rect[1] += static_cast<int>(bounds[2] + 0.5);
+  rect[2] = static_cast<int>(bounds[1] - bounds[0] + 0.5);
+  rect[3] = static_cast<int>(bounds[3] - bounds[2] + 0.5);
 }
 
 //----------------------------------------------------------------------------
 vtkScalarBarActor::~vtkScalarBarActor()
 {
-  if (this->LabelFormat)
-    {
-    delete [] this->LabelFormat;
-    this->LabelFormat = NULL;
-    }
+  delete [] this->LabelFormat;
+  this->LabelFormat = NULL;
 
   this->TitleActor->Delete();
   this->SetNanAnnotation(NULL);
@@ -293,17 +310,11 @@ vtkScalarBarActor::~vtkScalarBarActor()
   this->P->AnnotationLeadersMapper->Delete();
   this->P->AnnotationLeadersActor->Delete();
 
-  if (this->Title)
-    {
-    delete [] this->Title;
-    this->Title = NULL;
-    }
+  delete [] this->Title;
+  this->Title = NULL;
 
-  if ( this->ComponentTitle )
-    {
-    delete [] this->ComponentTitle;
-    this->ComponentTitle = NULL;
-    }
+  delete [] this->ComponentTitle;
+  this->ComponentTitle = NULL;
 
   this->SetLookupTable(NULL);
   this->SetLabelTextProperty(NULL);
@@ -327,6 +338,19 @@ int vtkScalarBarActor::RenderOverlay(vtkViewport* viewport)
 {
   int renderedSomething = 0;
 
+  // Is the viewport's RenderWindow capturing GL2PS-special props? We'll need
+  // to handle this specially to get the texture to show up right.
+  if (vtkRenderer *renderer = vtkRenderer::SafeDownCast(viewport))
+    {
+    if (vtkRenderWindow *renderWindow = renderer->GetRenderWindow())
+      {
+      if (renderWindow->GetCapturingGL2PSSpecialProps())
+        {
+        renderer->CaptureGL2PSSpecialProp(this);
+        }
+      }
+    }
+
   // Everything is built, just have to render
   if (this->DrawBackground)
     {
@@ -348,13 +372,16 @@ int vtkScalarBarActor::RenderOverlay(vtkViewport* viewport)
       renderedSomething += this->ScalarBarActor->RenderOverlay(viewport);
       }
 
-    vtkScalarBarActorInternal::ActorVec::iterator it;
-    for (
-      it = this->P->TextActors.begin();
-      it != this->P->TextActors.end();
-      ++it)
+    if (this->DrawTickLabels)
       {
-      renderedSomething += (*it)->RenderOverlay(viewport);
+      vtkScalarBarActorInternal::ActorVec::iterator it;
+      for (
+           it = this->P->TextActors.begin();
+           it != this->P->TextActors.end();
+           ++it)
+        {
+        renderedSomething += (*it)->RenderOverlay(viewport);
+        }
       }
     }
   else if (this->DrawColorBar)
@@ -621,6 +648,8 @@ void vtkScalarBarActor::PrintSelf(ostream& os, vtkIndent indent)
     << this->AnnotationLeaderPadding << endl;
   os << indent << "AnnotationTextScaling: "
     << this->AnnotationTextScaling << endl;
+  os << indent << "VerticalTitleSeparation: "
+    << VerticalTitleSeparation << endl;
 
   os << indent << "DrawBackground: " << this->DrawBackground << "\n";
   os << indent << "Background Property:\n";
@@ -950,7 +979,8 @@ void vtkScalarBarActor::ComputeScalarBarLength()
 {
   this->P->ScalarBarBox.Size[1] =
     this->Orientation == VTK_ORIENT_VERTICAL ?
-      this->P->Frame.Size[1] - this->P->TitleBox.Size[1] :
+      this->P->Frame.Size[1] - this->P->TitleBox.Size[1] -
+        this->VerticalTitleSeparation :
       this->P->Frame.Size[1];
 
   // The scalar bar does not include the Nan Swatch.
@@ -1048,7 +1078,8 @@ void vtkScalarBarActor::LayoutTicks()
       // Tick height could be adjusted if title text is
       // lowered by box constraints, but we won't bother:
       this->P->TickBox.Size[1] = this->P->Frame.Size[1] -
-        this->P->TitleBox.Size[1] - 3 * this->TextPad;
+        this->P->TitleBox.Size[1] - 3 * this->TextPad -
+        this->VerticalTitleSeparation;
       // Tick box height also reduced by NaN swatch size, if present:
       if (this->DrawNanAnnotation)
         {
diff --git a/Rendering/Annotation/vtkScalarBarActor.h b/Rendering/Annotation/vtkScalarBarActor.h
index 569caaa..ce287be 100644
--- a/Rendering/Annotation/vtkScalarBarActor.h
+++ b/Rendering/Annotation/vtkScalarBarActor.h
@@ -99,6 +99,12 @@ public:
   virtual void ReleaseGraphicsResources(vtkWindow*);
 
   // Description:
+  // Fills rect with the dimensions of the scalar bar in viewport coordinates.
+  // Only the color bar is considered -- text labels are not considered.
+  // rect is {xmin, xmax, width, height}
+  virtual void GetScalarBarRect(int rect[4], vtkViewport* viewport);
+
+  // Description:
   // Set/Get the lookup table to use. The lookup table specifies the number
   // of colors to use in the table (if not overridden), the scalar range,
   // and any annotated values.
@@ -275,6 +281,12 @@ public:
   vtkBooleanMacro(DrawColorBar, int);
 
   // Description:
+  // Set/Get whether the tick labels should be drawn. Default is on.
+  vtkSetMacro(DrawTickLabels, int);
+  vtkGetMacro(DrawTickLabels, int);
+  vtkBooleanMacro(DrawTickLabels, int);
+
+  // Description:
   // Set/Get the background property.
   virtual void SetBackgroundProperty(vtkProperty2D* p);
   vtkGetObjectMacro(BackgroundProperty,vtkProperty2D);
@@ -291,6 +303,13 @@ public:
   vtkSetMacro(TextPad,int);
 
   // Description:
+  // Set/get the margin in pixels, between the title and the bar,
+  // when the \a Orientation is vertical.
+  // The default is 0 pixels.
+  vtkGetMacro(VerticalTitleSeparation,int);
+  vtkSetMacro(VerticalTitleSeparation,int);
+
+  // Description:
   // Set/get the thickness of the color bar relative to the widget frame.
   // The default is 0.375 and must always be in the range ]0, 1[.
   vtkGetMacro(BarRatio,double);
@@ -476,6 +495,7 @@ protected:
   int DrawBackground; // off by default
   int DrawFrame; // off by default
   int DrawColorBar; // on by default
+  int DrawTickLabels; // on by default
   int DrawAnnotations;
   int DrawNanAnnotation;
   int AnnotationTextScaling; // off by default
@@ -493,6 +513,7 @@ protected:
   int MaximumWidthInPixels;
   int MaximumHeightInPixels;
   int TextPad;
+  int VerticalTitleSeparation;
   double BarRatio;
   double TitleRatio;
   //@}
diff --git a/Rendering/Annotation/vtkSpiderPlotActor.cxx b/Rendering/Annotation/vtkSpiderPlotActor.cxx
index 1ba6975..a2dc69e 100644
--- a/Rendering/Annotation/vtkSpiderPlotActor.cxx
+++ b/Rendering/Annotation/vtkSpiderPlotActor.cxx
@@ -155,11 +155,8 @@ vtkSpiderPlotActor::~vtkSpiderPlotActor()
   this->ConnectionHolder->Delete();
   this->ConnectionHolder = 0;
 
-  if (this->Title)
-    {
-    delete [] this->Title;
-    this->Title = NULL;
-    }
+  delete [] this->Title;
+  this->Title = NULL;
 
   delete this->Labels;
   delete this->Ranges;
diff --git a/Rendering/Annotation/vtkXYPlotActor.cxx b/Rendering/Annotation/vtkXYPlotActor.cxx
index 5c6d281..e6300fd 100644
--- a/Rendering/Annotation/vtkXYPlotActor.cxx
+++ b/Rendering/Annotation/vtkXYPlotActor.cxx
@@ -2821,10 +2821,7 @@ void vtkXYPlotActor::SetXLabelFormat( const char* _arg )
     return;
     }
 
-  if ( this->XLabelFormat )
-    {
-    delete [] this->XLabelFormat;
-    }
+  delete [] this->XLabelFormat;
 
   if (_arg )
     {
@@ -2854,10 +2851,7 @@ void vtkXYPlotActor::SetYLabelFormat( const char* _arg )
     return;
     }
 
-  if ( this->YLabelFormat )
-    {
-    delete [] this->YLabelFormat;
-    }
+  delete [] this->YLabelFormat;
 
   if (_arg )
     {
diff --git a/Rendering/Cg/vtkCgShader.cxx b/Rendering/Cg/vtkCgShader.cxx
deleted file mode 100644
index b349ae0..0000000
--- a/Rendering/Cg/vtkCgShader.cxx
+++ /dev/null
@@ -1,511 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkCgShader.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*
- * Copyright 2003 Sandia Corporation.
- * Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
- * license for use of this work by or on behalf of the
- * U.S. Government. Redistribution and use in source and binary forms, with
- * or without modification, are permitted provided that this Notice and any
- * statement of authorship are reproduced on all copies.
- */
-
-#include "vtkCgShader.h"
-
-#include "vtkActor.h"
-#include "vtkCamera.h"
-#include "vtkLightCollection.h"
-#include "vtkLight.h"
-#include "vtkMatrix4x4.h"
-#include "vtkObjectFactory.h"
-#include "vtkOpenGL.h"
-#include "vtkOpenGLTexture.h"
-#include "vtkProperty.h"
-#include "vtkRenderer.h"
-#include "vtkWindow.h"
-#include "vtkXMLDataElement.h"
-#include "vtkXMLShader.h"
-
-#include <Cg/cg.h>
-#include <Cg/cgGL.h>
-
-#include <string>
-#include <vector>
-#include <map>
-
-#define CG_UNIFORM_DOUBLE_AS_FLOAT 1
-
-//-----------------------------------------------------------------------------
-class CgStateMatrixMap
-{
-  public:
-
-  CgStateMatrixMap()
-    {
-    // Define CGGLenums for mapping strings from xml file
-    this->StateMap["CG_GL_MATRIX_IDENTITY"] = CG_GL_MATRIX_IDENTITY;
-    this->StateMap["CG_GL_MATRIX_TRANSPOSE"] = CG_GL_MATRIX_TRANSPOSE;
-    this->StateMap["CG_GL_MATRIX_INVERSE"] = CG_GL_MATRIX_INVERSE;
-    this->StateMap["CG_GL_MATRIX_INVERSE_TRANSPOSE"] = CG_GL_MATRIX_INVERSE_TRANSPOSE;
-    this->StateMap["CG_GL_MODELVIEW_MATRIX"] = CG_GL_MODELVIEW_MATRIX;
-    this->StateMap["CG_GL_PROJECTION_MATRIX"] = CG_GL_PROJECTION_MATRIX;
-    this->StateMap["CG_GL_TEXTURE_MATRIX"] = CG_GL_TEXTURE_MATRIX;
-    this->StateMap["CG_GL_MODELVIEW_PROJECTION_MATRIX"] = CG_GL_MODELVIEW_PROJECTION_MATRIX;
-    this->StateMap["CG_GL_VERTEX"] = CG_GL_VERTEX;
-    this->StateMap["CG_GL_FRAGMENT"] = CG_GL_FRAGMENT;
-    }
-  ~CgStateMatrixMap()
-    {
-    // STL container free their memory before destruction
-    // Their destructors are call automatically when this class is destroyed
-    }
-
-  bool HaveCGGLenum( std::string name )
-    {
-    if( this->StateMap.find(name) == this->StateMap.end() )
-      {
-      return 0;
-      }
-    return 1;
-    }
-
-  bool HaveCGGLenum( const char* name )
-    {
-    if(!name)
-      {
-      return 0;
-      }
-    std::string Name = name;
-    return this->HaveCGGLenum(Name);
-    }
-
-  CGGLenum GetCGGLenum( std::string name )
-    {
-    return this->StateMap[ name ];
-    }
-  CGGLenum GetCGGLenum( const char* name )
-    {
-    std::string Name = name;
-    return this->GetCGGLenum( Name );
-    }
-
-  protected:
-  private:
-    std::map< std::string, CGGLenum > StateMap;
-};
-
-//-----------------------------------------------------------------------------
-static vtkCgShader *CurrentShader;
-extern "C" {
-  static void ErrorCallback(void)
-    {
-    CurrentShader->ReportError();
-    }
-}
-
-//-----------------------------------------------------------------------------
-class vtkCgShaderInternals
-{
-public:
-  CGprofile Profile;
-  CGcontext Context;
-  CGprogram Program;
-  CGerror LastError;
-  CgStateMatrixMap StateMatrixMap;
-
-  vtkCgShaderInternals()
-    {
-    this->LastError = CG_NO_ERROR;
-    this->Context = 0;
-    }
-
-  CGparameter GetUniformParameter(const char* name)
-    {
-    if(!name)
-      {
-      vtkGenericWarningMacro( "NULL uniform shader parameter name.");
-      return NULL;
-      }
-
-    if( cgIsProgram(this->Program) != GL_TRUE )
-      {
-      vtkGenericWarningMacro( "NULL shader program.");
-      return NULL;
-      }
-
-    CGparameter p = cgGetNamedParameter( this->Program, name );
-    if( (cgIsParameter(p)!=CG_TRUE) || (p==NULL) )
-      {
-      vtkGenericWarningMacro( "No parameter named: " << name << endl );
-      return NULL;
-      }
-    return p;
-    }
-};
-
-//-----------------------------------------------------------------------------
-vtkStandardNewMacro(vtkCgShader);
-
-//-----------------------------------------------------------------------------
-vtkCgShader::vtkCgShader()
-{
-  this->Internals = new vtkCgShaderInternals;
-}
-
-//-----------------------------------------------------------------------------
-vtkCgShader::~vtkCgShader()
-{
-  this->ReleaseGraphicsResources(NULL);
-  delete this->Internals;
-}
-
-//-----------------------------------------------------------------------------
-void vtkCgShader::ReleaseGraphicsResources(vtkWindow* window)
-{
-  if (window &&
-    window->GetMapped() &&
-    cgIsContext(this->Internals->Context))
-    {
-    // This will also destroy any programs contained in the context.
-    cgDestroyContext(this->Internals->Context);
-    }
-  this->Internals->Context = 0;
-}
-
-//-----------------------------------------------------------------------------
-int vtkCgShader::Compile()
-{
-  if (!this->XMLShader || this->Internals->LastError != CG_NO_ERROR)
-    {
-    return 0;
-    }
-
-  if (!this->XMLShader->GetCode())
-    {
-    vtkErrorMacro("Shader doesn't have any code!");
-    this->Internals->LastError = CG_INVALID_PROGRAM_HANDLE_ERROR;
-    return 0;
-    }
-
-  // If we already have a compiled program, grab the
-  // correct context and profile and return control.
-  if( cgIsProgram(this->Internals->Program) == GL_TRUE )
-    {
-    if( cgGLIsProgramLoaded(this->Internals->Program) == GL_TRUE )
-      {
-      this->Internals->Profile = cgGetProgramProfile( this->Internals->Program );
-      this->Internals->Context = cgGetProgramContext( this->Internals->Program );
-      return 1;
-      }
-    }
-
-  // Get a valid profile
-  if( cgGLIsProfileSupported(this->Internals->Profile) == CG_FALSE )
-    {
-    switch(this->XMLShader->GetScope())
-      {
-    case vtkXMLShader::SCOPE_VERTEX:
-      this->Internals->Profile = cgGLGetLatestProfile(CG_GL_VERTEX);
-      break;
-
-    case vtkXMLShader::SCOPE_FRAGMENT:
-      this->Internals->Profile = cgGLGetLatestProfile(CG_GL_FRAGMENT);
-      break;
-
-    default:
-      vtkErrorMacro("Unsupported scope!");
-      this->Internals->LastError = CG_UNKNOWN_PROFILE_ERROR;
-      return 0;
-      }
-    }
-
-  // Get a valid context
-  if( cgIsContext(this->Internals->Context) == CG_FALSE )
-    {
-    this->Internals->Context = cgCreateContext();
-    }
-
-  ::CurrentShader = this;
-  cgSetErrorCallback(ErrorCallback);
-  this->Internals->LastError = CG_NO_ERROR;
-
-  const char* source_string = this->XMLShader->GetCode();
-
-  // Although Cg can create a shader form the file directly,
-  // vtkXMLShader does not provide an interface to obtain the
-  // filename (to keep the interface simple and clear).
-  // So we always provide the contents of the file .
-
-  if( cgIsContext(this->Internals->Context) == CG_TRUE
-    && cgGLIsProfileSupported( this->Internals->Profile ) == CG_TRUE
-    && source_string)
-    {
-    this->Internals->Program = cgCreateProgram( this->Internals->Context,
-      CG_SOURCE,
-      source_string,
-      this->Internals->Profile,
-      this->XMLShader->GetEntry(),
-      this->XMLShader->GetArgs());
-    }
-
-  if( cgIsProgram( this->Internals->Program ) == CG_TRUE )
-    {
-    cgGLLoadProgram(this->Internals->Program);
-    }
-  else
-    {
-    vtkErrorMacro("Failed to create Cg program.");
-    return 0;
-    }
-  if (this->Internals->LastError != CG_NO_ERROR)
-    {
-    vtkErrorMacro("Error occurred during Shader compile.");
-    return 0;
-    }
-  return 1;
-}
-
-//-----------------------------------------------------------------------------
-void vtkCgShader::Bind()
-{
-  // Bind shader to hardware
-  if(cgIsProgram( this->Internals->Program ) == CG_TRUE)
-    {
-    cgGLEnableProfile(this->Internals->Profile);
-    cgGLBindProgram(this->Internals->Program);
-    }
-}
-
-
-//-----------------------------------------------------------------------------
-void vtkCgShader::Unbind()
-{
-  if(cgIsProgram( this->Internals->Program ) == CG_TRUE)
-    {
-    cgGLUnbindProgram(this->Internals->Profile);
-    cgGLDisableProfile(this->Internals->Profile);
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkCgShader::ReportError()
-{
-  CGerror error = cgGetError();
-  this->Internals->LastError = error;
-  vtkErrorMacro( << cgGetErrorString(error) );
-  if( error == CG_COMPILER_ERROR )
-    {
-    vtkErrorMacro( << cgGetLastListing(this->Internals->Context) );
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkCgShader::SetUniformParameter(const char* name, int numValues,
-  const int* values)
-{
-  double* dvalues = new double[numValues];
-  for (int i=0; i < numValues; i++)
-    {
-    dvalues[i] = static_cast<double>(values[i]);
-    }
-
-  this->SetUniformParameter(name, numValues, dvalues);
-  delete[] dvalues;
-}
-
-//-----------------------------------------------------------------------------
-void vtkCgShader::SetUniformParameter(const char* name, int numValues, const float* value)
-{
-  CGparameter param = this->Internals->GetUniformParameter(name);
-  if (!param)
-    {
-    return;
-    }
-  switch (numValues)
-    {
-  case 1:
-    cgGLSetParameter1fv(param, value);
-    break;
-  case 2:
-    cgGLSetParameter2fv(param, value);
-    break;
-  case 3:
-    cgGLSetParameter3fv(param, value);
-    break;
-  case 4:
-    cgGLSetParameter4fv(param, value);
-    break;
-  default:
-    vtkErrorMacro("Number of values not supported : " << numValues);
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkCgShader::SetUniformParameter(const char* name, int numValues, const double* value)
-{
-  CGparameter param = this->Internals->GetUniformParameter(name);
-  if (!param)
-    {
-    return;
-    }
-  switch (numValues)
-    {
-  case 1:
-    cgGLSetParameter1dv(param, value);
-    break;
-  case 2:
-    cgGLSetParameter2dv(param, value);
-    break;
-  case 3:
-    cgGLSetParameter3dv(param, value);
-    break;
-  case 4:
-    cgGLSetParameter4dv(param, value);
-    break;
-  default:
-    vtkErrorMacro("Number of values not supported : " << numValues);
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkCgShader::SetMatrixParameter(const char* name, int , int order,
-  const float* value)
-{
-  CGparameter param = this->Internals->GetUniformParameter(name);
-  if (!param)
-    {
-    return;
-    }
-  if (order == vtkShader::RowMajor)
-    {
-    cgGLSetMatrixParameterfr(param, value);
-    }
-  else
-    {
-    cgGLSetMatrixParameterfc(param, value);
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkCgShader::SetMatrixParameter(const char* name, int , int order,
-  const double* value)
-{
-  CGparameter param = this->Internals->GetUniformParameter(name);
-  if (!param)
-    {
-    return;
-    }
-  if (order == vtkShader::RowMajor)
-    {
-    cgGLSetMatrixParameterdr(param, value);
-    }
-  else
-    {
-    cgGLSetMatrixParameterdc(param, value);
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkCgShader::SetMatrixParameter(const char* name, const char* state_matrix_type,
-  const char* transform_type)
-{
-  if (!state_matrix_type )
-    {
-    vtkErrorMacro( "state_matrix Type not specified!" );
-    return;
-    }
-  if (!transform_type )
-    {
-    transform_type = "CG_GL_MATRIX_IDENTITY";
-    }
-  CGparameter param = this->Internals->GetUniformParameter(name);
-  if (!param)
-    {
-    return;
-    }
-
-  if( this->Internals->StateMatrixMap.HaveCGGLenum(state_matrix_type) &&
-      this->Internals->StateMatrixMap.HaveCGGLenum(transform_type) )
-    {
-    cgGLSetStateMatrixParameter(param,
-      this->Internals->StateMatrixMap.GetCGGLenum(state_matrix_type),
-      this->Internals->StateMatrixMap.GetCGGLenum(transform_type));
-    }
-  else
-    {
-    vtkErrorMacro( "Can't find state matrix valuse or xforms for : " <<
-                   name << ", " << state_matrix_type << ", " << transform_type << endl );
-    exit(0);
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkCgShader::SetSamplerParameter(const char* name, vtkTexture* texture,int)
-{
-  CGparameter param = this->Internals->GetUniformParameter(name);
-  if (!param)
-    {
-    return;
-    }
-  vtkOpenGLTexture* glTexture = vtkOpenGLTexture::SafeDownCast(texture);
-  if (glTexture)
-    {
-    cgGLSetTextureParameter(param, glTexture->GetIndex());
-    cgGLEnableTextureParameter(param);
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkCgShader::PassShaderVariables(vtkActor* actor, vtkRenderer* renderer)
-{
-  bool push_transform = (actor->GetIsIdentity() == 0);
-  if (push_transform)
-    {
-    double *mat = actor->GetMatrix()->Element[0];
-    double mat2[16];
-    mat2[0] = mat[0];
-    mat2[1] = mat[4];
-    mat2[2] = mat[8];
-    mat2[3] = mat[12];
-    mat2[4] = mat[1];
-    mat2[5] = mat[5];
-    mat2[6] = mat[9];
-    mat2[7] = mat[13];
-    mat2[8] = mat[2];
-    mat2[9] = mat[6];
-    mat2[10] = mat[10];
-    mat2[11] = mat[14];
-    mat2[12] = mat[3];
-    mat2[13] = mat[7];
-    mat2[14] = mat[11];
-    mat2[15] = mat[15];
-
-    // insert model transformation
-    glMatrixMode( GL_MODELVIEW );
-    glPushMatrix();
-    glMultMatrixd(mat2);
-    }
-  this->Superclass::PassShaderVariables(actor, renderer);
-  if (push_transform)
-    {
-    glMatrixMode(GL_MODELVIEW);
-    glPopMatrix();
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkCgShader::PrintSelf(ostream& os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os, indent);
-}
diff --git a/Rendering/Cg/vtkCgShader.h b/Rendering/Cg/vtkCgShader.h
deleted file mode 100644
index 2c6da16..0000000
--- a/Rendering/Cg/vtkCgShader.h
+++ /dev/null
@@ -1,172 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkCgShader.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*
- * Copyright 2003 Sandia Corporation.
- * Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
- * license for use of this work by or on behalf of the
- * U.S. Government. Redistribution and use in source and binary forms, with
- * or without modification, are permitted provided that this Notice and any
- * statement of authorship are reproduced on all copies.
- */
-
-// .NAME vtkCgShader - Cg Shader
-// .SECTION Description
-// vtkCgShader is the only class that interfaces directly with the Cg
-// libraries. Once is has a valid shader described by a vtkXMLDataElement
-// it will create, compile, install, and initialize the parameters of a Cg
-// hardware shader.
-//
-// .Section Supported Shader Types:
-//
-// Basic Types:
-// uniform float
-// uniform double
-//
-// Vector Types:
-// uniform float{1|2|3|4}
-// uniform double{1|2|3|4}
-//
-// Matrix Types:
-// uniform float{1|2|3|4}x{1|2|3|4}
-// uniform double{1|2|3|4}x{1|2|3|4}
-//
-// NOTE: In the above, 'double' and 'float' refer to the application's
-// representation, the hardware shader must define all of the above types
-// as 'uniform float'.
-//
-// State Matrix Parameters:
-// uniform float4x4
-// with the following Cg-defined settings:
-//  CG_GL_MATRIX_IDENTITY
-//  CG_GL_MATRIX_TRANSPOSE
-//  CG_GL_MATRIX_INVERSE
-//  CG_GL_MATRIX_INVERSE_TRANSPOSE
-//  CG_GL_MODELVIEW_MATRIX
-//  CG_GL_PROJECTION_MATRIX
-//  CG_GL_TEXTURE_MATRIX
-//  CG_GL_MODELVIEW_PROJECTION_MATRIX
-//
-// Texture Samplers:
-// sample1D - Not tested
-// sample2D
-// sample3D - Not supported by VTK
-// sampleRECT - Not supported by VTK
-// sampleCUBE - Not supported by VTK
-//
-// User-Defined:
-// uniform struct
-//
-// VTK-Specific Hardware Data Structures:
-// vtkProperty
-// vtkLight
-// vtkCamera
-// See vtkSNL/Rendering/Shaders/vtkProperty.cg
-// See vtkSNL/Rendering/Shaders/vtkLight.cg
-// See vtkSNL/Rendering/Shaders/vtkCamera.cg
-//
-// .SECTION Thanks
-// Shader support in VTK includes key contributions by Gary Templet at
-// Sandia National Labs.
-
-#ifndef __vtkCgShader_h
-#define __vtkCgShader_h
-
-#include "vtkShader.h"
-
-class vtkActor;
-class vtkCamera;
-class vtkCgShaderInternals;
-class vtkLight;
-class vtkProperty;
-class vtkRenderer;
-
-// manages all shaders defined in the XML file
-// especially the part about sending things to the card
-class VTK_RENDERING_EXPORT vtkCgShader : public vtkShader
-{
-public:
-  static vtkCgShader *New();
-  vtkTypeMacro(vtkCgShader, vtkShader);
-  void PrintSelf(ostream &os, vtkIndent indent);
-
-  // Called to compile the shader code.
-  // The vtkShaderProgram calls this method only when
-  // vtkShader::IsCompiled() returns false.
-  // The subclasses must only compile the code in this method.
-  // Returns if the compile was successful.
-  virtual int Compile();
-
-  // Description:
-  // In this method the shader can enable/bind itself. This is applicable
-  // only to Cg, since in GLSL, individual shaders in a program can't be
-  // enabled/bound.
-  virtual void  Bind();
-
-  // Description:
-  // Called to unbind the shader. As with Bind(), this is only applicable
-  // to Cg.
-  virtual void Unbind();
-
-  // Description:
-  // Internal method don't call directly. Called by Cg erro callback
-  // to report Cg errors.
-  void ReportError();
-
-  // Description:
-  // Release any graphics resources that are being consumed by this actor.
-  // The parameter window could be used to determine which graphic
-  // resources to release.
-  void ReleaseGraphicsResources(vtkWindow *);
-
-
-  // Description:
-  // Called to pass VTK actor/property/light values and other
-  // Shader variables over to the shader. This is called by the ShaderProgram
-  // during each render. We override this method for Cg shaders, since for Cg shaders,
-  // we need to ensure that the actor transformations are pushed before
-  // state matrix uniform variables are bound.
-  virtual void PassShaderVariables(vtkActor* actor, vtkRenderer* ren);
-//BTX
-protected:
-  vtkCgShader();
-  ~vtkCgShader();
-
-  // Description:
-  // Equivalent to cgGLSetParameter and glUniform.
-  virtual void SetUniformParameter(const char* name, int numValues, const int* value) ;
-  virtual void SetUniformParameter(const char* name, int numValues, const float* value) ;
-  virtual void SetUniformParameter(const char* name, int numValues, const double* value);
-
-  // Description:
-  // Equivalent to cgGLSetMatrixParameterfc and glUniformMatrix.
-  virtual void SetMatrixParameter(const char* name, int numValues,
-    int order, const float* value);
-  virtual void SetMatrixParameter(const char* name, int numValues,
-    int order, const double* value);
-  virtual void SetMatrixParameter(const char* name, const char* state_matix_type,
-    const char* transform_type);
-
-  virtual void SetSamplerParameter(const char* name, vtkTexture* texture,
-                                   int);
-
-  friend class vtkCgShaderDeviceAdapter;
-private:
-  vtkCgShader(const vtkCgShader&); // Not Implemented
-  void operator=(const vtkCgShader&); // Not Implemented
-
-  vtkCgShaderInternals* Internals;
-//ETX
-};
-#endif //_vtkCgShader_h
diff --git a/Rendering/Cg/vtkCgShaderDeviceAdapter.cxx b/Rendering/Cg/vtkCgShaderDeviceAdapter.cxx
deleted file mode 100644
index 9e63a96..0000000
--- a/Rendering/Cg/vtkCgShaderDeviceAdapter.cxx
+++ /dev/null
@@ -1,114 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkCgShaderDeviceAdapter.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#include "vtkCgShaderDeviceAdapter.h"
-
-#include "vtkObjectFactory.h"
-#include "vtkSmartPointer.h"
-#include "vtkShaderProgram.h"
-#include "vtkCgShader.h"
-#include "vtkCollectionIterator.h"
-#include "vtkXMLShader.h"
-
-class vtkCgShaderDeviceAdapter::vtkInternal
-{
-public:
-  vtkSmartPointer<vtkCgShader> VertexShader;
-};
-
-vtkStandardNewMacro(vtkCgShaderDeviceAdapter);
-//----------------------------------------------------------------------------
-vtkCgShaderDeviceAdapter::vtkCgShaderDeviceAdapter()
-{
-  this->Internal = new vtkInternal();
-}
-
-//----------------------------------------------------------------------------
-vtkCgShaderDeviceAdapter::~vtkCgShaderDeviceAdapter()
-{
-  delete this->Internal;
-}
-
-//----------------------------------------------------------------------------
-void vtkCgShaderDeviceAdapter::PrepareForRender()
-{
-  // locate the vertex CgShader which can accept varying parameters.
-  vtkCollectionIterator* shaderIter = this->ShaderProgram->NewShaderIterator();
-
-  for (shaderIter->InitTraversal(); !shaderIter->IsDoneWithTraversal();
-    shaderIter->GoToNextItem())
-    {
-    vtkCgShader* shader = vtkCgShader::SafeDownCast(
-      shaderIter->GetCurrentObject());
-    if (shader && shader->GetScope() == vtkXMLShader::SCOPE_VERTEX)
-      {
-      this->Internal->VertexShader = shader;
-      break;
-      }
-    }
-  shaderIter->Delete();
-}
-
-template <class T>
-void vtkCgShaderDeviceAdapterSendAttributeInternal(vtkCgShaderDeviceAdapter* self,
-  const char* attrname, int components, const T* attribute, unsigned long offset)
-{
-  double converted_value[4];
-  for (int cc=0; cc < 4 && cc < components; cc++)
-    {
-    converted_value[cc] = static_cast<double>((attribute+offset)[cc]);
-    }
-  self->SendAttributeInternal(attrname, components, converted_value);
-}
-
-VTK_TEMPLATE_SPECIALIZE
-void vtkCgShaderDeviceAdapterSendAttributeInternal(vtkCgShaderDeviceAdapter* self,
-  const char* attrname, int components, const float* attribute, unsigned long offset)
-{
-  self->SendAttributeInternal(attrname, components, (attribute+offset));
-}
-
-//----------------------------------------------------------------------------
-void vtkCgShaderDeviceAdapter::SendAttributeInternal(
-  const char* attrname, int components, const double* data)
-{
-  this->Internal->VertexShader->SetUniformParameter(attrname, components, data);
-}
-
-//----------------------------------------------------------------------------
-void vtkCgShaderDeviceAdapter::SendAttributeInternal(
-  const char* attrname, int components, const float* data)
-{
-  this->Internal->VertexShader->SetUniformParameter(attrname, components, data);
-}
-
-//----------------------------------------------------------------------------
-void vtkCgShaderDeviceAdapter::SendAttribute(const char* attrname,
-  int components, int type,
-  const void* attribute, unsigned long offset/*=0*/)
-{
-  switch (type)
-    {
-    vtkTemplateMacro(
-      vtkCgShaderDeviceAdapterSendAttributeInternal(this,
-        attrname, components, static_cast<const VTK_TT*>(attribute), offset));
-    }
-}
-
-//----------------------------------------------------------------------------
-void vtkCgShaderDeviceAdapter::PrintSelf(ostream& os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os, indent);
-}
-
diff --git a/Rendering/Cg/vtkCgShaderDeviceAdapter.h b/Rendering/Cg/vtkCgShaderDeviceAdapter.h
deleted file mode 100644
index 8ef6c51..0000000
--- a/Rendering/Cg/vtkCgShaderDeviceAdapter.h
+++ /dev/null
@@ -1,75 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkCgShaderDeviceAdapter.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-// .NAME vtkCgShaderDeviceAdapter - adapter to pass generic vertex attributes
-// to the rendering pipeline to be used in a Cg shader.
-// .SECTION Description
-// vtkShaderDeviceAdapter subclass for Cg.
-// .SECTION Thanks
-// Support for generic vertex attributes in VTK was contributed in
-// collaboration with Stephane Ploix at EDF.
-
-#ifndef __vtkCgShaderDeviceAdapter_h
-#define __vtkCgShaderDeviceAdapter_h
-
-#include "vtkShaderDeviceAdapter.h"
-
-class VTK_RENDERING_EXPORT vtkCgShaderDeviceAdapter : public vtkShaderDeviceAdapter
-{
-public:
-  static vtkCgShaderDeviceAdapter* New();
-  vtkTypeMacro(vtkCgShaderDeviceAdapter, vtkShaderDeviceAdapter);
-  void PrintSelf(ostream& os, vtkIndent indent);
-
-  // Descrition:
-  // This method is called before rendering. This gives the shader device
-  // adapter an opportunity to collect information, such as attribute indices
-  // that it will need while rendering.
-  virtual void PrepareForRender();
-
-  // Description:
-  // Sends a single attribute to the graphics card.
-  // The attrname parameter identifies the name of attribute.
-  // The components parameter gives the number of
-  // components in the attribute.  In general, components must be between
-  // 1-4, but a rendering system may impose even more constraints.  The
-  // type parameter is a VTK type enumeration (VTK_FLOAT, VTK_INT, etc.).
-  // Again, a rendering system may not support all types for all
-  // attributes.  The attribute parameter is the actual data for the
-  // attribute.
-  // If offset is specified, it is added to attribute pointer \c after
-  // it has been casted to the proper type.
-  virtual void SendAttribute(const char* attrname,
-    int components, int type,
-    const void* attribute, unsigned long offset=0);
-
-//BTX
-  void SendAttributeInternal(const char* attrname, int components, const double*);
-  void SendAttributeInternal(const char* attrname, int components, const float*);
-protected:
-  vtkCgShaderDeviceAdapter();
-  ~vtkCgShaderDeviceAdapter();
-
-private:
-  vtkCgShaderDeviceAdapter(const vtkCgShaderDeviceAdapter&); // Not implemented.
-  void operator=(const vtkCgShaderDeviceAdapter&); // Not implemented.
-
-  class vtkInternal;
-  vtkInternal* Internal;
-//ETX
-};
-
-#endif
-
-
diff --git a/Rendering/Cg/vtkCgShaderProgram.cxx b/Rendering/Cg/vtkCgShaderProgram.cxx
deleted file mode 100644
index 661e016..0000000
--- a/Rendering/Cg/vtkCgShaderProgram.cxx
+++ /dev/null
@@ -1,81 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkCgShaderProgram.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*
- * Copyright 2003 Sandia Corporation.
- * Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
- * license for use of this work by or on behalf of the
- * U.S. Government. Redistribution and use in source and binary forms, with
- * or without modification, are permitted provided that this Notice and any
- * statement of authorship are reproduced on all copies.
- */
-
-#include "vtkCgShaderProgram.h"
-
-#include "vtkActor.h"
-#include "vtkCgShaderDeviceAdapter.h"
-#include "vtkCgShader.h"
-#include "vtkCollectionIterator.h"
-#include "vtkObjectFactory.h"
-#include "vtkRenderer.h"
-#include "vtkXMLMaterialReader.h"
-
-
-vtkStandardNewMacro(vtkCgShaderProgram);
-
-//----------------------------------------------------------------------------
-vtkCgShaderProgram::vtkCgShaderProgram()
-{
-  vtkCgShaderDeviceAdapter* da = vtkCgShaderDeviceAdapter::New();
-  this->SetShaderDeviceAdapter(da);
-  da->Delete();
-}
-
-//----------------------------------------------------------------------------
-vtkCgShaderProgram::~vtkCgShaderProgram()
-{
-  this->SetShaderDeviceAdapter(0);
-}
-
-//----------------------------------------------------------------------------
-vtkShader* vtkCgShaderProgram::NewShader()
-{
-  return vtkCgShader::New();
-}
-
-//----------------------------------------------------------------------------
-void vtkCgShaderProgram::Render(vtkActor *actor, vtkRenderer *renderer )
-{
-  // Cg requires no communication between vertex and fragment shaders;
-  // each can be installed in hardware independently. There's really
-  // nothing for vtkCgShaderProgram to do but delegate all shader mechanics
-  // tasks to it's vertex and fragment shaders.
-  vtkCollectionIterator* iter = this->ShaderCollectionIterator;
-  for (iter->InitTraversal(); !iter->IsDoneWithTraversal();
-    iter->GoToNextItem())
-    {
-    vtkShader* shader = vtkShader::SafeDownCast(iter->GetCurrentObject());
-    if (shader->Compile())
-      {
-      shader->PassShaderVariables(actor, renderer);
-      shader->Bind();
-      }
-    }
-}
-
-//----------------------------------------------------------------------------
-void vtkCgShaderProgram::PrintSelf(ostream &os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os, indent);
-}
diff --git a/Rendering/Cg/vtkCgShaderProgram.h b/Rendering/Cg/vtkCgShaderProgram.h
deleted file mode 100644
index 6d62aaf..0000000
--- a/Rendering/Cg/vtkCgShaderProgram.h
+++ /dev/null
@@ -1,77 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkCgShaderProgram.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*
- * Copyright 2003 Sandia Corporation.
- * Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
- * license for use of this work by or on behalf of the
- * U.S. Government. Redistribution and use in source and binary forms, with
- * or without modification, are permitted provided that this Notice and any
- * statement of authorship are reproduced on all copies.
- */
-
-// .NAME vtkCgShaderProgram - Cg Shader Program
-// .SECTION Description
-// vtkCgShaderProgram allows vtkShaderProperty (later vtkProperty)
-// to treat a vertex/fragment shader pair as a single unit for the purpose
-// of setting their common material library and encapsulating shader operation:
-// shader installation and variable initialization.
-//
-// Since the interface between Cg shaders is only resolved at runtime (shader
-// runtime that is, after they've been installed on the card), Cg does not
-// have the concept of a shader Program. This class simply delegates shader
-// program functions to its delegate vtkCgShaders.
-//
-// .Section See Also:
-// vtkShaderBase, vtkShader, vtkCgShader, vtkShaderProgram
-// .SECTION Thanks
-// Shader support in VTK includes key contributions by Gary Templet at
-// Sandia National Labs.
-
-#ifndef __vtkCgShaderProgram_h
-#define __vtkCgShaderProgram_h
-
-#include "vtkShaderProgram.h"
-
-class vtkActor;
-class vtkRenderer;
-
-class VTK_RENDERING_EXPORT vtkCgShaderProgram : public vtkShaderProgram
-{
-public:
-  vtkTypeMacro(vtkCgShaderProgram, vtkShaderProgram);
-  static vtkCgShaderProgram *New();
-  void PrintSelf(ostream &os, vtkIndent indent);
-
-  // .Description:
-  // Take shader from its source (file and/or string) and load, compile, and
-  // install in hardware. Also, initialize uniform variables originating from
-  // the following sources: XML material file, vtkProperty, vtkLight,
-  // vtkCamera, and application-specified uniform variables.
-  //
-  // Delegates to vtkShader.
-  virtual void Render(vtkActor*, vtkRenderer*);
-
-protected:
-  vtkCgShaderProgram();
-  ~vtkCgShaderProgram();
-
-  // Description:
-  // Creates and returns a new vtkCgShader.
-  virtual vtkShader* NewShader();
-private:
-  vtkCgShaderProgram(const vtkCgShaderProgram&); // Not Implemented
-  void operator=(const vtkCgShaderProgram&); // Not Implemented
-};
-#endif //__vtkCgShaderProgram_h
diff --git a/Rendering/Context2D/module.cmake b/Rendering/Context2D/module.cmake
index 370f56a..0291e75 100644
--- a/Rendering/Context2D/module.cmake
+++ b/Rendering/Context2D/module.cmake
@@ -3,10 +3,11 @@ vtk_module(vtkRenderingContext2D
   GROUPS
     Rendering
   DEPENDS
+    vtkRenderingCore
+  PRIVATE_DEPENDS
     vtkCommonDataModel
     vtkCommonMath
     vtkCommonTransforms
-    vtkRenderingCore
     vtkRenderingOpenGL
     vtkRenderingFreeType
   )
diff --git a/Rendering/Context2D/vtkAbstractContextItem.h b/Rendering/Context2D/vtkAbstractContextItem.h
index bea3ea0..251f29d 100644
--- a/Rendering/Context2D/vtkAbstractContextItem.h
+++ b/Rendering/Context2D/vtkAbstractContextItem.h
@@ -53,7 +53,7 @@ public:
   // Description:
   // Paint the children of the item, should be called whenever the children
   // need to be rendered.
-  bool PaintChildren(vtkContext2D *painter);
+  virtual bool PaintChildren(vtkContext2D *painter);
 
   // Description:
   // Release graphics resources hold by the item. The default implementation
diff --git a/Rendering/Context2D/vtkContext2D.cxx b/Rendering/Context2D/vtkContext2D.cxx
index a311d3a..4f87440 100644
--- a/Rendering/Context2D/vtkContext2D.cxx
+++ b/Rendering/Context2D/vtkContext2D.cxx
@@ -655,6 +655,50 @@ void vtkContext2D::ComputeStringBounds(const char* string,
 }
 
 //-----------------------------------------------------------------------------
+int vtkContext2D::ComputeFontSizeForBoundedString(const vtkStdString &string,
+                                                  float width, float height)
+{
+  int orientation = this->GetTextProp()->GetOrientation();
+  this->GetTextProp()->SetOrientation(0.0);
+
+  float stringBounds[4];
+  int currentFontSize = this->GetTextProp()->GetFontSize();
+  this->ComputeStringBounds(string, stringBounds);
+
+  // font size is too big
+  if (stringBounds[2] > width || stringBounds[3] > height)
+    {
+    while (stringBounds[2] > width || stringBounds[3] > height)
+      {
+      --currentFontSize;
+      this->GetTextProp()->SetFontSize(currentFontSize);
+      this->ComputeStringBounds(string, stringBounds);
+      if (currentFontSize < 0)
+        {
+        this->GetTextProp()->SetFontSize(0);
+        return 0;
+        }
+      }
+    }
+
+  // font size is too small
+  else
+    {
+      while (stringBounds[2] < width && stringBounds[3] < height)
+      {
+      ++currentFontSize;
+      this->GetTextProp()->SetFontSize(currentFontSize);
+      this->ComputeStringBounds(string, stringBounds);
+      }
+    --currentFontSize;
+    this->GetTextProp()->SetFontSize(currentFontSize);
+    }
+
+  this->GetTextProp()->SetOrientation(orientation);
+  return currentFontSize;
+}
+
+//-----------------------------------------------------------------------------
 void vtkContext2D::DrawMathTextString(vtkPoints2D *point,
                                       const vtkStdString &string)
 {
diff --git a/Rendering/Context2D/vtkContext2D.h b/Rendering/Context2D/vtkContext2D.h
index 2451874..cbb3db8 100644
--- a/Rendering/Context2D/vtkContext2D.h
+++ b/Rendering/Context2D/vtkContext2D.h
@@ -299,7 +299,6 @@ public:
   // supplied bounds variable, the first two elements are the bottom corner of
   // the string, and the second two elements are the width and height of the
   // bounding box.
-  // NOTE: This function does not take account of the text rotation.
   void ComputeStringBounds(const vtkStdString &string, vtkPoints2D *bounds);
   void ComputeStringBounds(const vtkStdString &string, float bounds[4]);
   void ComputeStringBounds(const vtkUnicodeString &string, vtkPoints2D *bounds);
@@ -308,6 +307,14 @@ public:
   void ComputeStringBounds(const char* string, float bounds[4]);
 
   // Description:
+  // Calculate the largest possible font size where the supplied string will fit
+  // within the specified bounds.  In addition to being returned, this font size
+  // is also used to update the vtkTextProperty used by this object.
+  // NOTE: text rotation is ignored for the purposes of this function.
+  int ComputeFontSizeForBoundedString(const vtkStdString &string, float width,
+                                      float height);
+
+  // Description:
   // Draw a MathText formatted equation to the screen. See
   // http://matplotlib.sourceforge.net/users/mathtext.html for more information.
   // MathText requires matplotlib and python, and the vtkMatplotlib module must
diff --git a/Rendering/Context2D/vtkContextBufferId.cxx b/Rendering/Context2D/vtkContextBufferId.cxx
index 8b756af..dcac999 100644
--- a/Rendering/Context2D/vtkContextBufferId.cxx
+++ b/Rendering/Context2D/vtkContextBufferId.cxx
@@ -19,6 +19,7 @@
 #include <cassert>
 #include "vtkObjectFactory.h"
 #include "vtkgl.h"
+#include "vtkOpenGLError.h"
 
 vtkStandardNewMacro(vtkContextBufferId);
 
@@ -71,6 +72,8 @@ void vtkContextBufferId::SetValues(int srcXmin,
 {
   assert("pre: is_allocated" && this->IsAllocated());
 
+  vtkOpenGLClearErrorMacro();
+
   GLint savedReadBuffer;
   glGetIntegerv(GL_READ_BUFFER,&savedReadBuffer);
 
@@ -110,6 +113,8 @@ void vtkContextBufferId::SetValues(int srcXmin,
     }
 
   delete[] rgb;
+
+  vtkOpenGLCheckErrorMacro("failed after SetValues");
 }
 
 // ----------------------------------------------------------------------------
diff --git a/Rendering/Context2D/vtkContextDevice2D.cxx b/Rendering/Context2D/vtkContextDevice2D.cxx
index e0512ab..83aca53 100644
--- a/Rendering/Context2D/vtkContextDevice2D.cxx
+++ b/Rendering/Context2D/vtkContextDevice2D.cxx
@@ -23,7 +23,7 @@
 
 
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 
 vtkContextDevice2D::vtkContextDevice2D()
 {
diff --git a/Rendering/Context2D/vtkContextMouseEvent.h b/Rendering/Context2D/vtkContextMouseEvent.h
index 4660bf3..7454475 100644
--- a/Rendering/Context2D/vtkContextMouseEvent.h
+++ b/Rendering/Context2D/vtkContextMouseEvent.h
@@ -53,10 +53,6 @@ public:
   {
   }
 
-  ~vtkContextMouseEvent()
-  {
-  }
-
   // Description:
   // Set the interactor for the mouse event.
   void SetInteractor(vtkRenderWindowInteractor *interactor)
diff --git a/Rendering/Context2D/vtkContextScene.cxx b/Rendering/Context2D/vtkContextScene.cxx
index 6af3def..2eab31a 100644
--- a/Rendering/Context2D/vtkContextScene.cxx
+++ b/Rendering/Context2D/vtkContextScene.cxx
@@ -36,7 +36,7 @@
 #include "vtkOpenGLRenderWindow.h"
 
 // My STL containers
-#include <assert.h>
+#include <cassert>
 
 //-----------------------------------------------------------------------------
 // Minimal storage class for STL containers etc.
diff --git a/Rendering/Context2D/vtkMarkerUtilities.cxx b/Rendering/Context2D/vtkMarkerUtilities.cxx
index 9d587b4..47a2f72 100644
--- a/Rendering/Context2D/vtkMarkerUtilities.cxx
+++ b/Rendering/Context2D/vtkMarkerUtilities.cxx
@@ -17,6 +17,8 @@
 
 #include "vtkImageData.h"
 
+#include <algorithm>
+
 //-----------------------------------------------------------------------------
 vtkMarkerUtilities::vtkMarkerUtilities()
 {
diff --git a/Rendering/Context2D/vtkOpenGL2ContextDevice2D.cxx b/Rendering/Context2D/vtkOpenGL2ContextDevice2D.cxx
index 83dbbe5..199bdb7 100644
--- a/Rendering/Context2D/vtkOpenGL2ContextDevice2D.cxx
+++ b/Rendering/Context2D/vtkOpenGL2ContextDevice2D.cxx
@@ -40,6 +40,7 @@
 #include "vtkOpenGLExtensionManager.h"
 #include "vtkShaderProgram2.h"
 #include "vtkgl.h"
+#include "vtkOpenGLError.h"
 
 #include "vtkObjectFactory.h"
 
@@ -55,25 +56,25 @@ bool vtkOpenGL2ContextDevice2D::IsSupported(vtkViewport *viewport)
   vtkOpenGLRenderer *gl = vtkOpenGLRenderer::SafeDownCast(viewport);
   if (gl)
     {
-    vtkOpenGLRenderWindow *win =
-        vtkOpenGLRenderWindow::SafeDownCast(gl->GetRenderWindow());
-    vtkOpenGLExtensionManager *man = win->GetExtensionManager();
-    if (man->ExtensionSupported("GL_VERSION_2_0"))
-      {
-      supported = true;
-      }
-    }
+    vtkOpenGLRenderWindow *context =
+      vtkOpenGLRenderWindow::SafeDownCast(gl->GetRenderWindow());
 
-  if (supported)
-    {
-    // Workaround for a bug in mesa - support for non-power of two textures is
-    // poor at best. Disable, and use power of two textures for mesa rendering.
-    const char *gl_version =
-      reinterpret_cast<const char *>(glGetString(GL_VERSION));
-    const char *mesa_version = strstr(gl_version, "Mesa");
-    if (mesa_version != 0)
+    vtkOpenGLExtensionManager *extensions
+      = context->GetExtensionManager();
+
+    bool ogl_support
+      = extensions->ExtensionSupported("GL_VERSION_2_0")==1;
+
+    // NPOT textures work in OS Mesa > 8.0.0
+    // Mesa's other renderer's need to be validated individually
+    bool driver_support
+       = (!extensions->DriverIsMesa()
+      || (extensions->DriverGLRendererIsOSMesa()
+      && extensions->DriverVersionAtLeast(8)));
+
+    if ( ogl_support && driver_support )
       {
-      supported = false;
+      supported = true;
       }
     }
 
@@ -96,6 +97,7 @@ void vtkOpenGL2ContextDevice2D::DrawPointSprites(vtkImageData *sprite,
                                                  unsigned char *colors,
                                                  int nc_comps)
 {
+  vtkOpenGLClearErrorMacro();
   if (points && n > 0)
     {
     this->SetPointSize(this->Pen->GetWidth());
@@ -132,12 +134,14 @@ void vtkOpenGL2ContextDevice2D::DrawPointSprites(vtkImageData *sprite,
     {
     vtkWarningMacro(<< "Points supplied without a valid image or pointer.");
     }
+  vtkOpenGLCheckErrorMacro("failed after DrawPointSprites");
 }
 
 //-----------------------------------------------------------------------------
 void vtkOpenGL2ContextDevice2D::DrawImage(float p[2], float scale,
                                          vtkImageData *image)
 {
+  vtkOpenGLClearErrorMacro();
   this->SetTexture(image);
   this->Storage->Texture->Render(this->Renderer);
   int *extent = image->GetExtent();
@@ -162,12 +166,14 @@ void vtkOpenGL2ContextDevice2D::DrawImage(float p[2], float scale,
 
   this->Storage->Texture->PostRender(this->Renderer);
   glDisable(GL_TEXTURE_2D);
+  vtkOpenGLCheckErrorMacro("failed after DrawImage");
 }
 
 //-----------------------------------------------------------------------------
 void vtkOpenGL2ContextDevice2D::DrawImage(const vtkRectf& pos,
                                          vtkImageData *image)
 {
+  vtkOpenGLClearErrorMacro();
   GLuint index = this->Storage->TextureFromImage(image);
 //  this->SetTexture(image);
 //  this->Storage->Texture->Render(this->Renderer);
@@ -193,6 +199,7 @@ void vtkOpenGL2ContextDevice2D::DrawImage(const vtkRectf& pos,
 //  this->Storage->Texture->PostRender(this->Renderer);
   glDisable(GL_TEXTURE_2D);
   glDeleteTextures(1, &index);
+  vtkOpenGLCheckErrorMacro("failed after DrawImage");
 }
 
 //----------------------------------------------------------------------------
diff --git a/Rendering/Context2D/vtkOpenGLContextBufferId.cxx b/Rendering/Context2D/vtkOpenGLContextBufferId.cxx
index 03887e6..72779eb 100644
--- a/Rendering/Context2D/vtkOpenGLContextBufferId.cxx
+++ b/Rendering/Context2D/vtkOpenGLContextBufferId.cxx
@@ -22,6 +22,7 @@
 #include "vtkTextureObject.h"
 #include "vtkgl.h"
 #include "vtkOpenGLRenderWindow.h"
+#include "vtkOpenGLError.h"
 
 vtkStandardNewMacro(vtkOpenGLContextBufferId);
 
@@ -118,6 +119,8 @@ vtkIdType vtkOpenGLContextBufferId::GetPickedItem(int x, int y)
 {
   assert("pre: is_allocated" && this->IsAllocated());
 
+  vtkOpenGLClearErrorMacro();
+
   vtkIdType result=-1;
   if(x<0 || x>=this->Width)
     {
@@ -220,6 +223,9 @@ vtkIdType vtkOpenGLContextBufferId::GetPickedItem(int x, int y)
     }
 
   assert("post: valid_result" && result>=-1 );
+
+  vtkOpenGLCheckErrorMacro("failed after GetPickedItem");
+
   return result;
 }
 
diff --git a/Rendering/Context2D/vtkOpenGLContextDevice2D.cxx b/Rendering/Context2D/vtkOpenGLContextDevice2D.cxx
index 5c23f97..3026c74 100644
--- a/Rendering/Context2D/vtkOpenGLContextDevice2D.cxx
+++ b/Rendering/Context2D/vtkOpenGLContextDevice2D.cxx
@@ -43,6 +43,7 @@
 #include "vtkOpenGLExtensionManager.h"
 #include "vtkShaderProgram2.h"
 #include "vtkgl.h"
+#include "vtkOpenGLError.h"
 
 #include "vtkObjectFactory.h"
 #include "vtkContextBufferId.h"
@@ -82,6 +83,7 @@ vtkOpenGLContextDevice2D::~vtkOpenGLContextDevice2D()
 //-----------------------------------------------------------------------------
 void vtkOpenGLContextDevice2D::Begin(vtkViewport* viewport)
 {
+  vtkOpenGLClearErrorMacro();
   // Need the actual pixel size of the viewport - ask OpenGL.
   GLint vp[4];
   glGetIntegerv(GL_VIEWPORT, vp);
@@ -139,6 +141,7 @@ void vtkOpenGLContextDevice2D::Begin(vtkViewport* viewport)
   glBindTexture(GL_TEXTURE_2D, 0);
 
   this->InRender = true;
+  vtkOpenGLCheckErrorMacro("failed after Begin");
 }
 
 //-----------------------------------------------------------------------------
@@ -149,6 +152,8 @@ void vtkOpenGLContextDevice2D::End()
     return;
     }
 
+  vtkOpenGLClearErrorMacro();
+
   // push a 2D matrix on the stack
   glMatrixMode(GL_PROJECTION);
   glPopMatrix();
@@ -168,6 +173,8 @@ void vtkOpenGLContextDevice2D::End()
 
   this->RenderWindow = NULL;
   this->InRender = false;
+
+  vtkOpenGLCheckErrorMacro("failed after End");
 }
 
 // ----------------------------------------------------------------------------
@@ -177,6 +184,8 @@ void vtkOpenGLContextDevice2D::BufferIdModeBegin(
   assert("pre: not_yet" && !this->GetBufferIdMode());
   assert("pre: bufferId_exists" && bufferId!=0);
 
+  vtkOpenGLClearErrorMacro();
+
   this->BufferId=bufferId;
 
   // Save OpenGL state.
@@ -205,6 +214,8 @@ void vtkOpenGLContextDevice2D::BufferIdModeBegin(
   glDisable(GL_DEPTH_TEST);
   glDisable(GL_BLEND);
 
+  vtkOpenGLCheckErrorMacro("failed after BufferIdModeBegin");
+
   assert("post: started" && this->GetBufferIdMode());
 }
 
@@ -213,6 +224,8 @@ void vtkOpenGLContextDevice2D::BufferIdModeEnd()
 {
   assert("pre: started" && this->GetBufferIdMode());
 
+  vtkOpenGLClearErrorMacro();
+
   // Assume the renderer has been set previously during rendering (sse Begin())
   int lowerLeft[2];
   int usize, vsize;
@@ -229,6 +242,9 @@ void vtkOpenGLContextDevice2D::BufferIdModeEnd()
   this->Storage->RestoreGLState(true);
 
   this->BufferId=0;
+
+  vtkOpenGLCheckErrorMacro("failed after BufferIdModeEnd");
+
   assert("post: done" && !this->GetBufferIdMode());
 }
 
@@ -239,6 +255,8 @@ void vtkOpenGLContextDevice2D::DrawPoly(float *f, int n, unsigned char *colors,
   assert("f must be non-null" && f != NULL);
   assert("n must be greater than 0" && n > 0);
 
+  vtkOpenGLClearErrorMacro();
+
   this->SetLineType(this->Pen->GetLineType());
   this->SetLineWidth(this->Pen->GetWidth());
 
@@ -263,12 +281,16 @@ void vtkOpenGLContextDevice2D::DrawPoly(float *f, int n, unsigned char *colors,
   // Restore line type and width.
   this->SetLineType(vtkPen::SOLID_LINE);
   this->SetLineWidth(1.0f);
+
+  vtkOpenGLCheckErrorMacro("failed after DrawPoly");
 }
 
 //-----------------------------------------------------------------------------
 void vtkOpenGLContextDevice2D::DrawPoints(float *f, int n, unsigned char *c,
                                           int nc)
 {
+  vtkOpenGLClearErrorMacro();
+
   if (f && n > 0)
     {
     this->SetPointSize(this->Pen->GetWidth());
@@ -294,6 +316,8 @@ void vtkOpenGLContextDevice2D::DrawPoints(float *f, int n, unsigned char *c,
     {
     vtkWarningMacro(<< "Points supplied that were not of type float.");
     }
+
+  vtkOpenGLCheckErrorMacro("failed after DrawPoints");
 }
 
 //-----------------------------------------------------------------------------
@@ -302,6 +326,7 @@ void vtkOpenGLContextDevice2D::DrawPointSprites(vtkImageData *sprite,
                                                 unsigned char *colors,
                                                 int nc_comps)
 {
+  vtkOpenGLClearErrorMacro();
   if (points && n > 0)
     {
     this->SetPointSize(this->Pen->GetWidth());
@@ -386,6 +411,7 @@ void vtkOpenGLContextDevice2D::DrawPointSprites(vtkImageData *sprite,
     {
     vtkWarningMacro(<< "Points supplied without a valid image or pointer.");
     }
+  vtkOpenGLCheckErrorMacro("failed after DrawPointSprites");
 }
 
 //-----------------------------------------------------------------------------
@@ -402,6 +428,7 @@ void vtkOpenGLContextDevice2D::DrawMarkers(int shape, bool highlight,
 //-----------------------------------------------------------------------------
 void vtkOpenGLContextDevice2D::DrawQuad(float *f, int n)
 {
+  vtkOpenGLClearErrorMacro();
   if (!f || n <= 0)
     {
     vtkWarningMacro(<< "Points supplied that were not of type float.");
@@ -429,11 +456,13 @@ void vtkOpenGLContextDevice2D::DrawQuad(float *f, int n)
     glDisable(GL_TEXTURE_2D);
     delete [] texCoord;
     }
+  vtkOpenGLCheckErrorMacro("failed after DrawQuad");
 }
 
 //-----------------------------------------------------------------------------
 void vtkOpenGLContextDevice2D::DrawQuadStrip(float *f, int n)
 {
+  vtkOpenGLClearErrorMacro();
   if (!f || n <= 0)
     {
     vtkWarningMacro(<< "Points supplied that were not of type float.");
@@ -461,10 +490,12 @@ void vtkOpenGLContextDevice2D::DrawQuadStrip(float *f, int n)
     glDisable(GL_TEXTURE_2D);
     delete [] texCoord;
     }
+  vtkOpenGLCheckErrorMacro("failed after DrawQuadStrip");
 }
 //-----------------------------------------------------------------------------
 void vtkOpenGLContextDevice2D::DrawPolygon(float *f, int n)
 {
+  vtkOpenGLClearErrorMacro();
   if (!f || n <= 0)
     {
     vtkWarningMacro(<< "Points supplied that were not of type float.");
@@ -492,6 +523,7 @@ void vtkOpenGLContextDevice2D::DrawPolygon(float *f, int n)
     glDisable(GL_TEXTURE_2D);
     delete [] texCoord;
     }
+  vtkOpenGLCheckErrorMacro("failed after DrawPolygon");
 }
 
 //-----------------------------------------------------------------------------
@@ -514,6 +546,8 @@ void vtkOpenGLContextDevice2D::DrawEllipseWedge(float x, float y, float outRx,
     return;
     }
 
+  vtkOpenGLClearErrorMacro();
+
   int iterations=this->GetNumberOfArcIterations(outRx,outRy,startAngle,
                                                 stopAngle);
 
@@ -556,6 +590,8 @@ void vtkOpenGLContextDevice2D::DrawEllipseWedge(float x, float y, float outRx,
   glDisableClientState(GL_VERTEX_ARRAY);
 
   delete[] p;
+
+  vtkOpenGLCheckErrorMacro("failed after DrawEllipseWedge");
 }
 
 // ----------------------------------------------------------------------------
@@ -571,6 +607,9 @@ void vtkOpenGLContextDevice2D::DrawEllipticArc(float x, float y, float rX,
     // we make sure maxRadius will never be null.
     return;
     }
+
+  vtkOpenGLClearErrorMacro();
+
   int iterations = this->GetNumberOfArcIterations(rX, rY, startAngle, stopAngle);
 
   float *p = new float[2*(iterations+1)];
@@ -605,6 +644,8 @@ void vtkOpenGLContextDevice2D::DrawEllipticArc(float x, float y, float rX,
   this->SetLineWidth(1.0f);
 
   delete[] p;
+
+  vtkOpenGLCheckErrorMacro("failed after DrawEllipseArc");
 }
 
 // ----------------------------------------------------------------------------
@@ -758,6 +799,22 @@ void vtkOpenGLContextDevice2D::AlignText(double orientation, float width,
 void vtkOpenGLContextDevice2D::DrawString(float *point,
                                           const vtkStdString &string)
 {
+  this->DrawString(point, vtkUnicodeString::from_utf8(string));
+}
+
+//-----------------------------------------------------------------------------
+void vtkOpenGLContextDevice2D::ComputeStringBounds(const vtkStdString &string,
+                                                   float bounds[4])
+{
+  this->ComputeStringBounds(vtkUnicodeString::from_utf8(string), bounds);
+}
+
+//-----------------------------------------------------------------------------
+void vtkOpenGLContextDevice2D::DrawString(float *point,
+                                          const vtkUnicodeString &string)
+{
+  vtkOpenGLClearErrorMacro();
+
   GLfloat mv[16];
   glGetFloatv(GL_MODELVIEW_MATRIX, mv);
   float xScale = mv[0];
@@ -767,9 +824,9 @@ void vtkOpenGLContextDevice2D::DrawString(float *point,
                 std::floor(point[1] * yScale) / yScale };
 
   // Cache rendered text strings
-  vtkTextureImageCache<TextPropertyKey>::CacheData &cache =
-    this->Storage->TextTextureCache.GetCacheData(
-      TextPropertyKey(this->TextProp, string));
+  vtkTextureImageCache<UTF16TextPropertyKey>::CacheData &cache =
+      this->Storage->TextTextureCache.GetCacheData(
+        UTF16TextPropertyKey(this->TextProp, string));
   vtkImageData* image = cache.ImageData;
   if (image->GetNumberOfPoints() == 0 && image->GetNumberOfCells() == 0)
     {
@@ -817,47 +874,8 @@ void vtkOpenGLContextDevice2D::DrawString(float *point,
 
   texture->PostRender(this->Renderer);
   glDisable(GL_TEXTURE_2D);
-}
 
-//-----------------------------------------------------------------------------
-void vtkOpenGLContextDevice2D::ComputeStringBounds(const vtkStdString &string,
-                                                   float bounds[4])
-{
-  vtkVector2i box = this->TextRenderer->GetBounds(this->TextProp, string);
-  // Check for invalid bounding box
-  if (box[0] == VTK_INT_MIN || box[0] == VTK_INT_MAX ||
-      box[1] == VTK_INT_MIN || box[1] == VTK_INT_MAX)
-    {
-    bounds[0] = static_cast<float>(0);
-    bounds[1] = static_cast<float>(0);
-    bounds[2] = static_cast<float>(0);
-    bounds[3] = static_cast<float>(0);
-    return;
-    }
-  GLfloat mv[16];
-  glGetFloatv(GL_MODELVIEW_MATRIX, mv);
-  float xScale = mv[0];
-  float yScale = mv[5];
-  bounds[0] = static_cast<float>(0);
-  bounds[1] = static_cast<float>(0);
-  bounds[2] = static_cast<float>(box.GetX() / xScale);
-  bounds[3] = static_cast<float>(box.GetY() / yScale);
-}
-
-//-----------------------------------------------------------------------------
-void vtkOpenGLContextDevice2D::DrawString(float *point,
-                                          const vtkUnicodeString &string)
-{
-  int p[] = { static_cast<int>(point[0]),
-              static_cast<int>(point[1]) };
-
-  //TextRenderer draws in window, not viewport coords
-  p[0]+=this->Storage->Offset.GetX();
-  p[1]+=this->Storage->Offset.GetY();
-  vtkImageData *data = vtkImageData::New();
-  this->TextRenderer->RenderString(this->TextProp, string, data);
-  this->DrawImage(point, 1.0, data);
-  data->Delete();
+  vtkOpenGLCheckErrorMacro("failed after DrawString");
 }
 
 //-----------------------------------------------------------------------------
@@ -898,12 +916,14 @@ void vtkOpenGLContextDevice2D::DrawMathTextString(float point[2],
     return;
     }
 
+  vtkOpenGLClearErrorMacro();
+
   float p[] = { std::floor(point[0]), std::floor(point[1]) };
 
   // Cache rendered text strings
-  vtkTextureImageCache<TextPropertyKey>::CacheData &cache =
+  vtkTextureImageCache<UTF8TextPropertyKey>::CacheData &cache =
     this->Storage->MathTextTextureCache.GetCacheData(
-      TextPropertyKey(this->TextProp, string));
+      UTF8TextPropertyKey(this->TextProp, string));
   vtkImageData* image = cache.ImageData;
   if (image->GetNumberOfPoints() == 0 && image->GetNumberOfCells() == 0)
     {
@@ -957,12 +977,16 @@ void vtkOpenGLContextDevice2D::DrawMathTextString(float point[2],
 
   texture->PostRender(this->Renderer);
   glDisable(GL_TEXTURE_2D);
+
+  vtkOpenGLCheckErrorMacro("failed after DrawMathTexString");
 }
 
 //-----------------------------------------------------------------------------
 void vtkOpenGLContextDevice2D::DrawImage(float p[2], float scale,
                                          vtkImageData *image)
 {
+  vtkOpenGLClearErrorMacro();
+
   this->SetTexture(image);
   this->Storage->Texture->Render(this->Renderer);
   int *extent = image->GetExtent();
@@ -987,12 +1011,16 @@ void vtkOpenGLContextDevice2D::DrawImage(float p[2], float scale,
 
   this->Storage->Texture->PostRender(this->Renderer);
   glDisable(GL_TEXTURE_2D);
+
+  vtkOpenGLCheckErrorMacro("failed after DrawImage");
 }
 
 //-----------------------------------------------------------------------------
 void vtkOpenGLContextDevice2D::DrawImage(const vtkRectf& pos,
                                          vtkImageData *image)
 {
+  vtkOpenGLClearErrorMacro();
+
   vtkVector2f tex(1.0, 1.0);
   GLuint index = 0;
   if (this->Storage->PowerOfTwoTextures)
@@ -1027,6 +1055,8 @@ void vtkOpenGLContextDevice2D::DrawImage(const vtkRectf& pos,
 //  this->Storage->Texture->PostRender(this->Renderer);
   glDisable(GL_TEXTURE_2D);
   glDeleteTextures(1, &index);
+
+  vtkOpenGLCheckErrorMacro("failed after DrawImage");
 }
 
 //-----------------------------------------------------------------------------
@@ -1199,15 +1229,19 @@ void vtkOpenGLContextDevice2D::GetMatrix(vtkMatrix3x3 *m)
 //-----------------------------------------------------------------------------
 void vtkOpenGLContextDevice2D::PushMatrix()
 {
+  vtkOpenGLClearErrorMacro();
   glMatrixMode( GL_MODELVIEW );
   glPushMatrix();
+  vtkOpenGLCheckErrorMacro("failed after PushMatrix");
 }
 
 //-----------------------------------------------------------------------------
 void vtkOpenGLContextDevice2D::PopMatrix()
 {
+  vtkOpenGLClearErrorMacro();
   glMatrixMode( GL_MODELVIEW );
   glPopMatrix();
+  vtkOpenGLCheckErrorMacro("failed after PopMatrix");
 }
 
 //-----------------------------------------------------------------------------
@@ -1318,12 +1352,10 @@ bool vtkOpenGLContextDevice2D::LoadExtensions(vtkOpenGLExtensionManager *m)
     this->Storage->GLSL = false;
     }
 
-  // Workaround for a bug in mesa - support for non-power of two textures is
-  // poor at best. Disable, and use power of two textures for mesa rendering.
-  const char *gl_version =
-    reinterpret_cast<const char *>(glGetString(GL_VERSION));
-  const char *mesa_version = strstr(gl_version, "Mesa");
-  if (mesa_version != 0)
+  // disable NPOT textures for Mesa
+  // NPOT textures work in OS Mesa >= 8.0.0
+  if ( m->DriverIsMesa()
+     && !(m->DriverGLRendererIsOSMesa() && m->DriverVersionAtLeast(8)))
     {
     this->Storage->PowerOfTwoTextures = true;
     this->TextRenderer->SetScaleToPowerOfTwo(true);
diff --git a/Rendering/Context2D/vtkOpenGLContextDevice2DPrivate.h b/Rendering/Context2D/vtkOpenGLContextDevice2DPrivate.h
index b4688b4..438b1cb 100644
--- a/Rendering/Context2D/vtkOpenGLContextDevice2DPrivate.h
+++ b/Rendering/Context2D/vtkOpenGLContextDevice2DPrivate.h
@@ -35,6 +35,9 @@
 #include "vtkColor.h"
 #include "vtkTextProperty.h"
 #include "vtkFreeTypeTools.h"
+#include "vtkStdString.h"
+#include "vtkUnicodeString.h"
+
 #include <algorithm>
 #include <list>
 #include <utility>
@@ -162,6 +165,7 @@ typename vtkTextureImageCache<Key>::CacheData& vtkTextureImageCache<Key>
 // .NAME TextPropertyKey - unique key for a vtkTextProperty and text
 // .SECTION Description
 // Uniquely describe a pair of vtkTextProperty and text.
+template <class StringType>
 struct TextPropertyKey
 {
   // Description:
@@ -175,7 +179,7 @@ struct TextPropertyKey
 
   // Description:
   // Creates a TextPropertyKey.
-  TextPropertyKey(vtkTextProperty* textProperty, const vtkStdString& text)
+  TextPropertyKey(vtkTextProperty* textProperty, const StringType& text)
   {
     this->TextPropertyId = GetIdFromTextProperty(textProperty);
     this->FontSize = textProperty->GetFontSize();
@@ -206,9 +210,12 @@ struct TextPropertyKey
   vtkColor4ub Color;
   // States in the function not to use more than 32 bits - int works fine here.
   unsigned int TextPropertyId;
-  vtkStdString Text;
+  StringType Text;
 };
 
+typedef TextPropertyKey<vtkStdString> UTF8TextPropertyKey;
+typedef TextPropertyKey<vtkUnicodeString> UTF16TextPropertyKey;
+
 class vtkOpenGLContextDevice2D::Private
 {
 public:
@@ -394,12 +401,12 @@ public:
           {
           if (i < size[0] && j < size[1])
             {
-            dataPtr[i * newImg[0] * bytesPerPixel + j * bytesPerPixel + k] =
-                origPtr[i * size[0] * bytesPerPixel + j * bytesPerPixel + k];
+            dataPtr[i * bytesPerPixel + j * newImg[0] * bytesPerPixel + k] =
+                origPtr[i * bytesPerPixel + j * size[0] * bytesPerPixel + k];
             }
           else
             {
-            dataPtr[i * newImg[0] * bytesPerPixel + j * bytesPerPixel + k] =
+            dataPtr[i * bytesPerPixel + j * newImg[0] * bytesPerPixel + k] =
                 k == 3 ? 0 : 255;
             }
           }
@@ -502,8 +509,8 @@ public:
   // Description:
   // Cache for text images. Generating texture for strings is expensive,
   // we cache the textures here for a faster reuse.
-  mutable vtkTextureImageCache<TextPropertyKey> TextTextureCache;
-  mutable vtkTextureImageCache<TextPropertyKey> MathTextTextureCache;
+  mutable vtkTextureImageCache<UTF16TextPropertyKey> TextTextureCache;
+  mutable vtkTextureImageCache<UTF8TextPropertyKey> MathTextTextureCache;
 };
 
 #endif // VTKOPENGLCONTEXTDEVICE2DPRIVATE_H
diff --git a/Rendering/Context2D/vtkOpenGLContextDevice3D.cxx b/Rendering/Context2D/vtkOpenGLContextDevice3D.cxx
index e279db9..635a3cc 100644
--- a/Rendering/Context2D/vtkOpenGLContextDevice3D.cxx
+++ b/Rendering/Context2D/vtkOpenGLContextDevice3D.cxx
@@ -20,9 +20,9 @@
 
 #include "vtkMatrix4x4.h"
 #include "vtkOpenGLRenderer.h"
-#include "vtkOpenGLRenderWindow.h"
 #include "vtkOpenGLExtensionManager.h"
 #include "vtkgl.h"
+#include "vtkOpenGLError.h"
 
 #include "vtkObjectFactory.h"
 
@@ -63,6 +63,7 @@ public:
       {
       glDisable(capability);
       }
+    vtkOpenGLStaticCheckErrorMacro("failed after SetGLCapability");
   }
 
   void Transpose(double *in, double *transposed)
@@ -148,6 +149,8 @@ void vtkOpenGLContextDevice3D::DrawPoly(const float *verts, int n,
   assert("verts must be non-null" && verts != NULL);
   assert("n must be greater than 0" && n > 0);
 
+  vtkOpenGLClearErrorMacro();
+
   this->EnableDepthBuffer();
 
   this->Storage->SetLineType(this->Pen->GetLineType());
@@ -172,6 +175,8 @@ void vtkOpenGLContextDevice3D::DrawPoly(const float *verts, int n,
     }
 
   this->DisableDepthBuffer();
+
+  vtkOpenGLCheckErrorMacro("failed after DrawPoly");
 }
 
 void vtkOpenGLContextDevice3D::DrawPoints(const float *verts, int n,
@@ -180,6 +185,8 @@ void vtkOpenGLContextDevice3D::DrawPoints(const float *verts, int n,
   assert("verts must be non-null" && verts != NULL);
   assert("n must be greater than 0" && n > 0);
 
+  vtkOpenGLClearErrorMacro();
+
   this->EnableDepthBuffer();
 
   glPointSize(this->Pen->GetWidth());
@@ -202,6 +209,8 @@ void vtkOpenGLContextDevice3D::DrawPoints(const float *verts, int n,
     }
 
   this->DisableDepthBuffer();
+
+  vtkOpenGLCheckErrorMacro("failed DrawPoints");
 }
 
 void vtkOpenGLContextDevice3D::DrawTriangleMesh(const float *mesh, int n,
@@ -211,6 +220,8 @@ void vtkOpenGLContextDevice3D::DrawTriangleMesh(const float *mesh, int n,
   assert("mesh must be non-null" && mesh != NULL);
   assert("n must be greater than 0" && n > 0);
 
+  vtkOpenGLClearErrorMacro();
+
   this->EnableDepthBuffer();
 
   glPointSize(this->Pen->GetWidth());
@@ -233,6 +244,8 @@ void vtkOpenGLContextDevice3D::DrawTriangleMesh(const float *mesh, int n,
     }
 
   this->DisableDepthBuffer();
+
+  vtkOpenGLCheckErrorMacro("failed after DrawTriangleMesh");
 }
 
 void vtkOpenGLContextDevice3D::ApplyPen(vtkPen *pen)
@@ -276,12 +289,14 @@ void vtkOpenGLContextDevice3D::PushMatrix()
 {
   glMatrixMode(GL_MODELVIEW);
   glPushMatrix();
+  vtkOpenGLCheckErrorMacro("failed after PushMatrix");
 }
 
 void vtkOpenGLContextDevice3D::PopMatrix()
 {
   glMatrixMode(GL_MODELVIEW);
   glPopMatrix();
+  vtkOpenGLCheckErrorMacro("failed after PopMatrix");
 }
 
 void vtkOpenGLContextDevice3D::SetClipping(const vtkRecti &rect)
@@ -327,12 +342,14 @@ void vtkOpenGLContextDevice3D::EnableClippingPlane(int i, double *planeEquation)
   GLenum clipPlaneId = static_cast<GLenum>(GL_CLIP_PLANE0+i);
   glEnable(clipPlaneId);
   glClipPlane(clipPlaneId, planeEquation);
+  vtkOpenGLCheckErrorMacro("failed after EnableClippingPlane");
 }
 
 void vtkOpenGLContextDevice3D::DisableClippingPlane(int i)
 {
   GLenum clipPlaneId = static_cast<GLenum>(GL_CLIP_PLANE0+i);
   glDisable(clipPlaneId);
+  vtkOpenGLCheckErrorMacro("failed after DisableClippingPlane");
 }
 
 void vtkOpenGLContextDevice3D::EnableDepthBuffer()
diff --git a/Rendering/Context2D/vtkTooltipItem.cxx b/Rendering/Context2D/vtkTooltipItem.cxx
index c65aa6f..206ef80 100644
--- a/Rendering/Context2D/vtkTooltipItem.cxx
+++ b/Rendering/Context2D/vtkTooltipItem.cxx
@@ -22,6 +22,7 @@
 #include "vtkTextProperty.h"
 #include "vtkTransform2D.h"
 
+#include "vtkNew.h"
 #include "vtkStdString.h"
 #include "vtksys/ios/sstream"
 
@@ -100,6 +101,14 @@ bool vtkTooltipItem::Paint(vtkContext2D *painter)
     return false;
     }
 
+  // save painter settings
+  vtkNew<vtkPen> previousPen;
+  previousPen->DeepCopy(painter->GetPen());
+  vtkNew<vtkBrush> previousBrush;
+  previousBrush->DeepCopy(painter->GetBrush());
+  vtkNew<vtkTextProperty> previousTextProp;
+  previousTextProp->ShallowCopy(painter->GetTextProp());
+
   painter->ApplyPen(this->Pen);
   painter->ApplyBrush(this->Brush);
   painter->ApplyTextProp(this->TextProperties);
@@ -129,6 +138,11 @@ bool vtkTooltipItem::Paint(vtkContext2D *painter)
   painter->DrawRect(bounds[0].GetX(), bounds[0].GetY(), bounds[1].GetX(), bounds[1].GetY());
   painter->DrawString(bounds[0].GetX()+5/scale[0], bounds[0].GetY()+3/scale[1], this->Text);
 
+  // restore painter settings
+  painter->ApplyPen(previousPen.GetPointer());
+  painter->ApplyBrush(previousBrush.GetPointer());
+  painter->ApplyTextProp(previousTextProp.GetPointer());
+
   return true;
 }
 
diff --git a/Rendering/Core/CMakeLists.txt b/Rendering/Core/CMakeLists.txt
index d9e9fcc..a670a72 100644
--- a/Rendering/Core/CMakeLists.txt
+++ b/Rendering/Core/CMakeLists.txt
@@ -8,6 +8,7 @@ set(Module_SRCS
   vtkActorCollection.cxx
   vtkActor.cxx
   vtkAssembly.cxx
+  vtkBackgroundColorMonitor.cxx
   vtkCameraActor.cxx
   vtkCamera.cxx
   vtkCameraInterpolator.cxx
@@ -60,7 +61,6 @@ set(Module_SRCS
   vtkMapper2D.cxx
   vtkMapperCollection.cxx
   vtkMapper.cxx
-  vtkMaterialLibrary.cxx
   vtkObserverMediator.cxx
   vtkPainter.cxx
   vtkPainterDeviceAdapter.cxx
@@ -78,7 +78,7 @@ set(Module_SRCS
   vtkPropCollection.cxx
   vtkProp.cxx
   vtkProperty2D.cxx
-  vtkProperty.cxx # needs a load of image readers - IOImage, IOXML
+  vtkProperty.cxx
   vtkRendererCollection.cxx
   vtkRenderer.cxx
   vtkRendererDelegate.cxx
@@ -89,11 +89,7 @@ set(Module_SRCS
   vtkRepresentationPainter.cxx
   vtkScalarsToColorsPainter.cxx
   vtkSelectVisiblePoints.cxx
-  vtkShader.cxx # vtkXMLShader
-  vtkShaderCodeLibrary.cxx
-  vtkShaderDeviceAdapter.cxx
   vtkShaderDeviceAdapter2.cxx
-  vtkShaderProgram.cxx # vtkXMLShaderProgram
   vtkStandardPolyDataPainter.cxx
   vtkTexture.cxx
   vtkTexturedActor2D.cxx
@@ -107,10 +103,6 @@ set(Module_SRCS
   vtkVolumeCollection.cxx
   vtkVolume.cxx
   vtkVolumeProperty.cxx
-  vtkXMLMaterial.cxx
-  vtkXMLMaterialParser.cxx
-  vtkXMLMaterialReader.cxx
-  vtkXMLShader.cxx
   vtkWindowLevelLookupTable.cxx
   vtkWindowToImageFilter.cxx
 
@@ -163,6 +155,7 @@ set_source_files_properties(
   vtkAbstractPicker
   vtkAbstractPropPicker
   vtkCuller
+  vtkHardwareSelector
   vtkImageMapper3D
   vtkInteractorObserver
   vtkMapper
diff --git a/Rendering/Core/Testing/Cxx/CMakeLists.txt b/Rendering/Core/Testing/Cxx/CMakeLists.txt
index 367025a..e2ab90a 100644
--- a/Rendering/Core/Testing/Cxx/CMakeLists.txt
+++ b/Rendering/Core/Testing/Cxx/CMakeLists.txt
@@ -1,28 +1,9 @@
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-  TestPickingManager.cxx
+vtk_add_test_cxx(
+  TestPickingManager.cxx,NO_VALID
   TestSplitViewportStereoHorizontal.cxx
   # otherLookupTable.cxx             # fixme
   # otherLookupTableWithEnabling.cxx # fixme
   # TestHierarchicalBoxPipeline.cxx  # legacy code
-  EXTRA_INCLUDE vtkTestDriver.h
-)
+  )
 
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Rendering/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests)
diff --git a/Rendering/Core/Testing/Cxx/TestViewDependentErrorMetric.cxx b/Rendering/Core/Testing/Cxx/TestViewDependentErrorMetric.cxx
index 1944ce8..b78273b 100644
--- a/Rendering/Core/Testing/Cxx/TestViewDependentErrorMetric.cxx
+++ b/Rendering/Core/Testing/Cxx/TestViewDependentErrorMetric.cxx
@@ -38,7 +38,7 @@
 #include "vtkGenericDataSetTessellator.h"
 #include "vtkGenericCellTessellator.h"
 #include "vtkGenericSubdivisionErrorMetric.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkLookupTable.h"
 #include "vtkDataSetMapper.h"
 #include "vtkLabeledDataMapper.h"
diff --git a/Rendering/Core/Testing/Data/Baseline/CamBlur.png.md5 b/Rendering/Core/Testing/Data/Baseline/CamBlur.png.md5
new file mode 100644
index 0000000..4db5d1a
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/CamBlur.png.md5
@@ -0,0 +1 @@
+b19eaecead8692924e6bb99c5566d441
diff --git a/Rendering/Core/Testing/Data/Baseline/ImageActor.png.md5 b/Rendering/Core/Testing/Data/Baseline/ImageActor.png.md5
new file mode 100644
index 0000000..7f42315
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/ImageActor.png.md5
@@ -0,0 +1 @@
+45e0721536f532b60918b14e0a6c7093
diff --git a/Rendering/Core/Testing/Data/Baseline/ImageActorStressed.png.md5 b/Rendering/Core/Testing/Data/Baseline/ImageActorStressed.png.md5
new file mode 100644
index 0000000..1730b73
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/ImageActorStressed.png.md5
@@ -0,0 +1 @@
+a0c109e601186c098d774160f47ac078
diff --git a/Rendering/Core/Testing/Data/Baseline/NoLightGeneration.png.md5 b/Rendering/Core/Testing/Data/Baseline/NoLightGeneration.png.md5
new file mode 100644
index 0000000..922affc
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/NoLightGeneration.png.md5
@@ -0,0 +1 @@
+1183edaf8a6c2f7f4cda8f42ff0a5f19
diff --git a/Rendering/Core/Testing/Data/Baseline/PickerWithLocator.png.md5 b/Rendering/Core/Testing/Data/Baseline/PickerWithLocator.png.md5
new file mode 100644
index 0000000..2ed6719
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/PickerWithLocator.png.md5
@@ -0,0 +1 @@
+cb5d2aaba4f2048403d19087d12334b5
diff --git a/Rendering/Core/Testing/Data/Baseline/PolyDataMapperAllPolygons.png.md5 b/Rendering/Core/Testing/Data/Baseline/PolyDataMapperAllPolygons.png.md5
new file mode 100644
index 0000000..7158bea
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/PolyDataMapperAllPolygons.png.md5
@@ -0,0 +1 @@
+b5f33379266e6e71a2a496d33d8683cb
diff --git a/Rendering/Core/Testing/Data/Baseline/PolyDataMapperAllPolygons_1.png.md5 b/Rendering/Core/Testing/Data/Baseline/PolyDataMapperAllPolygons_1.png.md5
new file mode 100644
index 0000000..48723a1
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/PolyDataMapperAllPolygons_1.png.md5
@@ -0,0 +1 @@
+8bb79ec70f030d9d265e8823563e6be6
diff --git a/Rendering/Core/Testing/Data/Baseline/PolyDataMapperAllWireframe.png.md5 b/Rendering/Core/Testing/Data/Baseline/PolyDataMapperAllWireframe.png.md5
new file mode 100644
index 0000000..a24ff5e
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/PolyDataMapperAllWireframe.png.md5
@@ -0,0 +1 @@
+8693f207aeda5e176d1c5bc62d0cf258
diff --git a/Rendering/Core/Testing/Data/Baseline/PolyDataMapperAllWireframe_1.png.md5 b/Rendering/Core/Testing/Data/Baseline/PolyDataMapperAllWireframe_1.png.md5
new file mode 100644
index 0000000..2e13f71
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/PolyDataMapperAllWireframe_1.png.md5
@@ -0,0 +1 @@
+ef79d25e219833b9991483b461d3a325
diff --git a/Rendering/Core/Testing/Data/Baseline/ScalarBar.png.md5 b/Rendering/Core/Testing/Data/Baseline/ScalarBar.png.md5
new file mode 100644
index 0000000..462c09e
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/ScalarBar.png.md5
@@ -0,0 +1 @@
+46f93d0b315951354cd082e0a28f1165
diff --git a/Rendering/Core/Testing/Data/Baseline/SurfacePickerWithTexture.png.md5 b/Rendering/Core/Testing/Data/Baseline/SurfacePickerWithTexture.png.md5
new file mode 100644
index 0000000..0b88be6
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/SurfacePickerWithTexture.png.md5
@@ -0,0 +1 @@
+8e749f2dc2b8b8913640ae1461a3345a
diff --git a/Rendering/Core/Testing/Data/Baseline/SurfacePickerWithTexture_1.png.md5 b/Rendering/Core/Testing/Data/Baseline/SurfacePickerWithTexture_1.png.md5
new file mode 100644
index 0000000..6b4dd1a
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/SurfacePickerWithTexture_1.png.md5
@@ -0,0 +1 @@
+eaee8c8a7d3f42d8eee5da9fa6fede52
diff --git a/Rendering/Core/Testing/Data/Baseline/TestCameraInterpolator.png.md5 b/Rendering/Core/Testing/Data/Baseline/TestCameraInterpolator.png.md5
new file mode 100644
index 0000000..ad49fd8
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TestCameraInterpolator.png.md5
@@ -0,0 +1 @@
+111d621bbf336a059a48f44ec828b6a5
diff --git a/Rendering/Core/Testing/Data/Baseline/TestGenericVertexAttributesGLSL.png.md5 b/Rendering/Core/Testing/Data/Baseline/TestGenericVertexAttributesGLSL.png.md5
new file mode 100644
index 0000000..f015351
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TestGenericVertexAttributesGLSL.png.md5
@@ -0,0 +1 @@
+6dfd30ab53720596468963dd991c0018
diff --git a/Rendering/Core/Testing/Data/Baseline/TestGenericVertexAttributesGLSL_1.png.md5 b/Rendering/Core/Testing/Data/Baseline/TestGenericVertexAttributesGLSL_1.png.md5
new file mode 100644
index 0000000..08b35c2
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TestGenericVertexAttributesGLSL_1.png.md5
@@ -0,0 +1 @@
+359f2229ab687880a997133d3e900b27
diff --git a/Rendering/Core/Testing/Data/Baseline/TestLeaderActor2D.png.md5 b/Rendering/Core/Testing/Data/Baseline/TestLeaderActor2D.png.md5
new file mode 100644
index 0000000..6a8c959
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TestLeaderActor2D.png.md5
@@ -0,0 +1 @@
+849acc13369267fe650459c6e74ac407
diff --git a/Rendering/Core/Testing/Data/Baseline/TestOnePoint.png.md5 b/Rendering/Core/Testing/Data/Baseline/TestOnePoint.png.md5
new file mode 100644
index 0000000..be08cd0
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TestOnePoint.png.md5
@@ -0,0 +1 @@
+a7294cb578a2d374246b4c0a48e36c1b
diff --git a/Rendering/Core/Testing/Data/Baseline/TestOpacity2.png.md5 b/Rendering/Core/Testing/Data/Baseline/TestOpacity2.png.md5
new file mode 100644
index 0000000..e2e6138
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TestOpacity2.png.md5
@@ -0,0 +1 @@
+969c1b03d4d4a450f61ea3897caf9dd4
diff --git a/Rendering/Core/Testing/Data/Baseline/TestOpacity2_1.png.md5 b/Rendering/Core/Testing/Data/Baseline/TestOpacity2_1.png.md5
new file mode 100644
index 0000000..1312ce2
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TestOpacity2_1.png.md5
@@ -0,0 +1 @@
+ad89e145afef6cafa0651cf6ea8306d5
diff --git a/Rendering/Core/Testing/Data/Baseline/TestOpacity2_2.png.md5 b/Rendering/Core/Testing/Data/Baseline/TestOpacity2_2.png.md5
new file mode 100644
index 0000000..024cdd7
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TestOpacity2_2.png.md5
@@ -0,0 +1 @@
+1b77aface6652194cd699260a1e4c861
diff --git a/Rendering/Core/Testing/Data/Baseline/TestOpacity2_3.png.md5 b/Rendering/Core/Testing/Data/Baseline/TestOpacity2_3.png.md5
new file mode 100644
index 0000000..d181e9d
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TestOpacity2_3.png.md5
@@ -0,0 +1 @@
+8638ac00e657af777c638a06f1955eef
diff --git a/Rendering/Core/Testing/Data/Baseline/TestOpacityVectors.png.md5 b/Rendering/Core/Testing/Data/Baseline/TestOpacityVectors.png.md5
new file mode 100644
index 0000000..d9d01f2
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TestOpacityVectors.png.md5
@@ -0,0 +1 @@
+76014e40671e4f6ef9a4d670e7671b71
diff --git a/Rendering/Core/Testing/Data/Baseline/TestOpacityVectors_1.png.md5 b/Rendering/Core/Testing/Data/Baseline/TestOpacityVectors_1.png.md5
new file mode 100644
index 0000000..d6a7cc7
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TestOpacityVectors_1.png.md5
@@ -0,0 +1 @@
+4feb0481f15a78b2ede70fe273e89877
diff --git a/Rendering/Core/Testing/Data/Baseline/TestOpacityVectors_2.png.md5 b/Rendering/Core/Testing/Data/Baseline/TestOpacityVectors_2.png.md5
new file mode 100644
index 0000000..4a53dea
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TestOpacityVectors_2.png.md5
@@ -0,0 +1 @@
+ba0e172ccb85fd647b6c13e4ff3ed1d7
diff --git a/Rendering/Core/Testing/Data/Baseline/TestRotate.png.md5 b/Rendering/Core/Testing/Data/Baseline/TestRotate.png.md5
new file mode 100644
index 0000000..75e5286
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TestRotate.png.md5
@@ -0,0 +1 @@
+10631f9cf5d4d269075a97d6f3d97194
diff --git a/Rendering/Core/Testing/Data/Baseline/TestSplitViewportStereoHorizontal.png.md5 b/Rendering/Core/Testing/Data/Baseline/TestSplitViewportStereoHorizontal.png.md5
new file mode 100644
index 0000000..b52c06c
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TestSplitViewportStereoHorizontal.png.md5
@@ -0,0 +1 @@
+7568610dd71146f4ad5c1db611e8c8b7
diff --git a/Rendering/Core/Testing/Data/Baseline/TestTransformCoordinateSystems.png.md5 b/Rendering/Core/Testing/Data/Baseline/TestTransformCoordinateSystems.png.md5
new file mode 100644
index 0000000..d89fb01
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TestTransformCoordinateSystems.png.md5
@@ -0,0 +1 @@
+f616b369492a9891759c3c94d55bfbfe
diff --git a/Rendering/Core/Testing/Data/Baseline/TestTransformInterpolator.png.md5 b/Rendering/Core/Testing/Data/Baseline/TestTransformInterpolator.png.md5
new file mode 100644
index 0000000..d46ed04
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TestTransformInterpolator.png.md5
@@ -0,0 +1 @@
+561468f6a3c53eb62d985bfde9c4564a
diff --git a/Rendering/Core/Testing/Data/Baseline/TestTransformInterpolator_1.png.md5 b/Rendering/Core/Testing/Data/Baseline/TestTransformInterpolator_1.png.md5
new file mode 100644
index 0000000..ba9ba96
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TestTransformInterpolator_1.png.md5
@@ -0,0 +1 @@
+1d3909fb38deeade04c1d65a83c573da
diff --git a/Rendering/Core/Testing/Data/Baseline/TestTransformInterpolator_2.png.md5 b/Rendering/Core/Testing/Data/Baseline/TestTransformInterpolator_2.png.md5
new file mode 100644
index 0000000..119a153
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TestTransformInterpolator_2.png.md5
@@ -0,0 +1 @@
+bcacec46677093bae63f9cfcf31f5068
diff --git a/Rendering/Core/Testing/Data/Baseline/TestTriangleStripCellColor.png.md5 b/Rendering/Core/Testing/Data/Baseline/TestTriangleStripCellColor.png.md5
new file mode 100644
index 0000000..f17c074
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TestTriangleStripCellColor.png.md5
@@ -0,0 +1 @@
+f4d77e5dd4aeefa42fbd25172ca0e173
diff --git a/Rendering/Core/Testing/Data/Baseline/TestWindowToImageTransparency.png.md5 b/Rendering/Core/Testing/Data/Baseline/TestWindowToImageTransparency.png.md5
new file mode 100644
index 0000000..c141979
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TestWindowToImageTransparency.png.md5
@@ -0,0 +1 @@
+24938693d3a0fb312d15aa3612bae8e3
diff --git a/Rendering/Core/Testing/Data/Baseline/TexturedSphere.png.md5 b/Rendering/Core/Testing/Data/Baseline/TexturedSphere.png.md5
new file mode 100644
index 0000000..1ae33d2
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TexturedSphere.png.md5
@@ -0,0 +1 @@
+befa1a11bc71a62c7ef596dcd7355c66
diff --git a/Rendering/Core/Testing/Data/Baseline/TexturedSphere_1.png.md5 b/Rendering/Core/Testing/Data/Baseline/TexturedSphere_1.png.md5
new file mode 100644
index 0000000..58f83d6
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TexturedSphere_1.png.md5
@@ -0,0 +1 @@
+de189095084cc541a94bfd660ceeb116
diff --git a/Rendering/Core/Testing/Data/Baseline/TransformConcatenation.png.md5 b/Rendering/Core/Testing/Data/Baseline/TransformConcatenation.png.md5
new file mode 100644
index 0000000..dd8f47c
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TransformConcatenation.png.md5
@@ -0,0 +1 @@
+214558a7b35223bc7a94104869fb0a1b
diff --git a/Rendering/Core/Testing/Data/Baseline/TransformCoverage.png.md5 b/Rendering/Core/Testing/Data/Baseline/TransformCoverage.png.md5
new file mode 100644
index 0000000..bcb4e1d
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TransformCoverage.png.md5
@@ -0,0 +1 @@
+a3c5bde91b52d6945ce7d111cefd22db
diff --git a/Rendering/Core/Testing/Data/Baseline/TransformCoverage_1.png.md5 b/Rendering/Core/Testing/Data/Baseline/TransformCoverage_1.png.md5
new file mode 100644
index 0000000..782d2fc
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TransformCoverage_1.png.md5
@@ -0,0 +1 @@
+5a11d8af00aa0eb819c1825ed0fc9f7b
diff --git a/Rendering/Core/Testing/Data/Baseline/TransformImageData.png.md5 b/Rendering/Core/Testing/Data/Baseline/TransformImageData.png.md5
new file mode 100644
index 0000000..230e6b6
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TransformImageData.png.md5
@@ -0,0 +1 @@
+6b673a646159dd4fa8fa6694e9909885
diff --git a/Rendering/Core/Testing/Data/Baseline/TransformPolyData.png.md5 b/Rendering/Core/Testing/Data/Baseline/TransformPolyData.png.md5
new file mode 100644
index 0000000..e56cdf3
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/TransformPolyData.png.md5
@@ -0,0 +1 @@
+b6540f895f3e9a70d3e165fb209da42d
diff --git a/Rendering/Core/Testing/Data/Baseline/assembly.png.md5 b/Rendering/Core/Testing/Data/Baseline/assembly.png.md5
new file mode 100644
index 0000000..dbf8219
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/assembly.png.md5
@@ -0,0 +1 @@
+649617fa476a6112b10fd8bf964799f1
diff --git a/Rendering/Core/Testing/Data/Baseline/cameraWarpedCone.png.md5 b/Rendering/Core/Testing/Data/Baseline/cameraWarpedCone.png.md5
new file mode 100644
index 0000000..e6f1598
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/cameraWarpedCone.png.md5
@@ -0,0 +1 @@
+ad0a3200e4ea04feca7d7a572a039910
diff --git a/Rendering/Core/Testing/Data/Baseline/cells.png.md5 b/Rendering/Core/Testing/Data/Baseline/cells.png.md5
new file mode 100644
index 0000000..42fd62b
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/cells.png.md5
@@ -0,0 +1 @@
+961d03a790ec660a21e02244f74575b1
diff --git a/Rendering/Core/Testing/Data/Baseline/cowHair.png.md5 b/Rendering/Core/Testing/Data/Baseline/cowHair.png.md5
new file mode 100644
index 0000000..6351a73
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/cowHair.png.md5
@@ -0,0 +1 @@
+748285f1a71d13c76d8629f870d0a6d9
diff --git a/Rendering/Core/Testing/Data/Baseline/cowHair2.png.md5 b/Rendering/Core/Testing/Data/Baseline/cowHair2.png.md5
new file mode 100644
index 0000000..d3efa51
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/cowHair2.png.md5
@@ -0,0 +1 @@
+34f6a0b7cadb6be32a9f6329b4e72d99
diff --git a/Rendering/Core/Testing/Data/Baseline/pickCells.png.md5 b/Rendering/Core/Testing/Data/Baseline/pickCells.png.md5
new file mode 100644
index 0000000..1860630
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/pickCells.png.md5
@@ -0,0 +1 @@
+ca8e3eb18098e6ba96a6fdc278718136
diff --git a/Rendering/Core/Testing/Data/Baseline/propAssembly.png.md5 b/Rendering/Core/Testing/Data/Baseline/propAssembly.png.md5
new file mode 100644
index 0000000..e8359c5
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/propAssembly.png.md5
@@ -0,0 +1 @@
+d96e4e1df758b1598a72e0c8d31dd04a
diff --git a/Rendering/Core/Testing/Data/Baseline/rendererSource.png.md5 b/Rendering/Core/Testing/Data/Baseline/rendererSource.png.md5
new file mode 100644
index 0000000..9478f72
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/rendererSource.png.md5
@@ -0,0 +1 @@
+e8a0ae027ca6a3a6ba5ca942d1ff0043
diff --git a/Rendering/Core/Testing/Data/Baseline/rendererSource_1.png.md5 b/Rendering/Core/Testing/Data/Baseline/rendererSource_1.png.md5
new file mode 100644
index 0000000..e46f6ef
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/rendererSource_1.png.md5
@@ -0,0 +1 @@
+b993a88c5e9ff708cfae13f551b171c8
diff --git a/Rendering/Core/Testing/Data/Baseline/rotations.png.md5 b/Rendering/Core/Testing/Data/Baseline/rotations.png.md5
new file mode 100644
index 0000000..75a79af
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/rotations.png.md5
@@ -0,0 +1 @@
+dd78f2723d461b90cc40fb4407d2b14d
diff --git a/Rendering/Core/Testing/Data/Baseline/stereoDresdenMace.png.md5 b/Rendering/Core/Testing/Data/Baseline/stereoDresdenMace.png.md5
new file mode 100644
index 0000000..124ecb8
--- /dev/null
+++ b/Rendering/Core/Testing/Data/Baseline/stereoDresdenMace.png.md5
@@ -0,0 +1 @@
+5e4b4f3321d66095d1defaa9b98b09f8
diff --git a/Rendering/Core/Testing/Python/CMakeLists.txt b/Rendering/Core/Testing/Python/CMakeLists.txt
index 4d38d8b..3cc7901 100644
--- a/Rendering/Core/Testing/Python/CMakeLists.txt
+++ b/Rendering/Core/Testing/Python/CMakeLists.txt
@@ -1,48 +1,42 @@
-add_test_python(CamBlur.py Rendering)
-add_test_python(ImageActorStressed.py Rendering)
-add_test_python(NoLightGeneration.py Graphics)
-add_test_python(PolyDataMapperAllPolygons.py Graphics)
-add_test_python(PolyDataMapperAllWireframe.py Graphics)
-add_test_python(ScalarBar.py Rendering)
-add_test_python(TestCameraInterpolator.py Graphics)
-add_test_python(TestMapperLUT.py)
-add_test_python(TestOnePoint.py Rendering)
-# add_test_python(TestParallelCoordinates.py Rendering) # disabled in TCL too
-add_test_python(TestRotate.py Graphics)
-add_test_python(TestTransformInterpolator.py Graphics)
-add_test_python(TestTriangleStripCellColor.py Rendering)
-add_test_python(TransformConcatenation.py Graphics)
-add_test_python(TransformCoverage.py Graphics)
-add_test_python(TransformImageData.py Graphics)
-add_test_python(TransformPolyData.py Graphics)
-add_test_python(assembly.py Rendering)
-add_test_python(cameraWarpedCone.py Graphics)
-add_test_python(cowHair.py Graphics)
-add_test_python(cowHair2.py Graphics)
-add_test_python(pickCells.py Rendering)
-add_test_python(propAssembly.py Rendering)
-add_test_python(rotations.py Rendering)
-add_test_python(stereoDresdenMace.py Graphics)
-add_test_python(PickerWithLocator.py Rendering)
-add_test_python(SurfacePickerWithTexture.py Rendering)
-add_test_python(TestGenericVertexAttributesGLSL.py Rendering)
-add_test_python(TestLeaderActor2D.py Rendering)
-add_test_python(TestOpacity2.py Rendering)
-add_test_python(TestTransformCoordinateSystems.py Graphics)
-add_test_python(TestWindowToImageTransparency.py Rendering)
-add_test_python(TexturedSphere.py Rendering)
-
-if(VTK_USE_CG_SHADERS)
-  add_test_python(TestGenericVertexAttributesCg.py Rendering)
-  add_test_python(TestCgShader.py Rendering)
-endif()
+vtk_add_test_python(CamBlur.py)
+vtk_add_test_python(ImageActor.py)
+vtk_add_test_python(ImageActorStressed.py)
+vtk_add_test_python(NoLightGeneration.py)
+vtk_add_test_python(PolyDataMapperAllPolygons.py)
+vtk_add_test_python(PolyDataMapperAllWireframe.py)
+vtk_add_test_python(ScalarBar.py)
+vtk_add_test_python(TestCameraInterpolator.py)
+vtk_add_test_python(TestMapperLUT.py NO_DATA NO_VALID)
+vtk_add_test_python(TestOnePoint.py)
+# vtk_add_test_python(TestParallelCoordinates.py) # disabled in TCL too
+vtk_add_test_python(TestRotate.py)
+vtk_add_test_python(TestTransformInterpolator.py)
+vtk_add_test_python(TestTriangleStripCellColor.py)
+vtk_add_test_python(TransformConcatenation.py)
+vtk_add_test_python(TransformCoverage.py)
+vtk_add_test_python(TransformImageData.py)
+vtk_add_test_python(TransformPolyData.py)
+vtk_add_test_python(assembly.py)
+vtk_add_test_python(cameraWarpedCone.py)
+vtk_add_test_python(cowHair.py)
+vtk_add_test_python(cowHair2.py)
+vtk_add_test_python(pickCells.py)
+vtk_add_test_python(propAssembly.py)
+vtk_add_test_python(rendererSource.py)
+vtk_add_test_python(rotations.py)
+vtk_add_test_python(stereoDresdenMace.py)
+vtk_add_test_python(PickerWithLocator.py)
+vtk_add_test_python(SurfacePickerWithTexture.py)
+# XML shader code removed, test needs rewriting for new API
+#vtk_add_test_python(TestGenericVertexAttributesGLSL.py)
+vtk_add_test_python(TestLeaderActor2D.py)
+vtk_add_test_python(TestOpacity2.py)
+vtk_add_test_python(TestOpacityVectors.py)
+vtk_add_test_python(TestTransformCoordinateSystems.py)
+vtk_add_test_python(TestWindowToImageTransparency.py)
+vtk_add_test_python(TexturedSphere.py)
+vtk_add_test_python(cells.py NO_RT)
 
 if(VTK_USE_GLSL_SHADERS)
-  add_test_python(TestGLSLShader.py Rendering)
-endif()
-
-if (VTK_DATA_ROOT)
-  add_test_python(ImageActor.py Rendering)
-  add_test_python(rendererSource.py Rendering)
-  add_test_python1(cells.py Baseline/Rendering)
+  vtk_add_test_python(TestGLSLShader.py)
 endif()
diff --git a/Rendering/Core/Testing/Python/TestCgShader.py b/Rendering/Core/Testing/Python/TestCgShader.py
deleted file mode 100755
index f70facb..0000000
--- a/Rendering/Core/Testing/Python/TestCgShader.py
+++ /dev/null
@@ -1,29 +0,0 @@
-#!/usr/bin/env python
-
-renWin = vtk.vtkRenderWindow()
-iren = vtk.vtkRenderWindowInteractor()
-iren.SetRenderWindow(renWin)
-renderer = vtk.vtkRenderer()
-renWin.AddRenderer(renderer)
-src1 = vtk.vtkSphereSource()
-src1.SetRadius(5)
-src1.SetPhiResolution(20)
-src1.SetThetaResolution(20)
-mapper = vtk.vtkPolyDataMapper()
-mapper.SetInputConnection(src1.GetOutputPort())
-actor = vtk.vtkActor()
-actor.SetMapper(mapper)
-# Load the material. Here, we are loading a material
-# defined in the Vtk Library. One can also specify
-# a filename to a material description xml.
-actor.GetProperty().LoadMaterial("CgTwisted")
-# Turn shading on. Otherwise, shaders are not used.
-actor.GetProperty().ShadingOn()
-# Pass a shader variable need by CgTwisted.
-actor.GetProperty().AddShaderVariable("Rate",1.0)
-renderer.AddActor(actor)
-renWin.Render()
-renderer.GetActiveCamera().Azimuth(-50)
-renderer.GetActiveCamera().Roll(70)
-renWin.Render()
-# --- end of script --
diff --git a/Rendering/Core/Testing/Python/TestGenericVertexAttributesCg.py b/Rendering/Core/Testing/Python/TestGenericVertexAttributesCg.py
deleted file mode 100755
index 926c16c..0000000
--- a/Rendering/Core/Testing/Python/TestGenericVertexAttributesCg.py
+++ /dev/null
@@ -1,84 +0,0 @@
-#!/usr/bin/env python
-
-# tests the support to pass generic vertex attributes to be used in Cg shaders.
-xmlMaterial = '<?xml version="1.0" encoding="UTF-8"?> \
-<Material name="GenericAttributes1"> \
-  <Shader \
-    scope="Vertex" \
-    name="VertexShader" \
-    location="Inline" \
-    language="Cg" \
-    entry="main"> \
-      <MatrixUniform name="ModelViewProj" \
-        type="State" \
-        number_of_elements="2" \
-        value="CG_GL_MODELVIEW_PROJECTION_MATRIX CG_GL_MATRIX_IDENTITY" /> \
-      <MatrixUniform name="ModelViewIT" \
-        type="State" \
-        number_of_elements="2" \
-        value="CG_GL_MODELVIEW_MATRIX CG_GL_MATRIX_INVERSE_TRANSPOSE" /> \
- \
-      struct appin \
-      { \
-          float4 Position : POSITION; \
-          float3 Normal   : NORMAL; \
-      }; \
- \
-      // define outputs from vertex shader \
-      struct vertout \
-      { \
-          float4 HPosition : POSITION; \
-          float4 Color0    : COLOR0; \
-      }; \
- \
-      vertout main(appin IN, \
-                   uniform float4x4 ModelViewProj, \
-                   uniform float4x4 ModelViewIT) \
-      { \
-        vertout OUT; \
- \
-        // transform vertex position into homogenous clip-space \
-        OUT.HPosition = mul(ModelViewProj, IN.Position); \
- \
-        OUT.Color0.xyz = normalize(IN.Normal); \
-        OUT.Color0.a = 1.0; \
-        return OUT; \
-      } \
-  </Shader> \
-</Material> \
-'
-
-renWin = vtk.vtkRenderWindow()
-iren = vtk.vtkRenderWindowInteractor()
-iren.SetRenderWindow(renWin)
-renderer = vtk.vtkRenderer()
-renWin.AddRenderer(renderer)
-src1 = vtk.vtkSphereSource()
-src1.SetRadius(5)
-src1.SetPhiResolution(20)
-src1.SetThetaResolution(20)
-randomVectors = vtk.vtkBrownianPoints()
-randomVectors.SetMinimumSpeed(0)
-randomVectors.SetMaximumSpeed(1)
-randomVectors.SetInputConnection(src1.GetOutputPort())
-mapper = vtk.vtkPolyDataMapper()
-mapper.SetInputConnection(randomVectors.GetOutputPort())
-actor = vtk.vtkActor()
-actor.SetMapper(mapper)
-# Load the material. Here, we are loading a material
-# defined in the Vtk Library. One can also specify
-# a filename to a material description xml.
-actor.GetProperty().LoadMaterialFromString(xmlMaterial)
-# Set red color to show if shading fails.
-actor.GetProperty().SetColor(1.0,0,0)
-# Turn shading on. Otherwise, shaders are not used.
-actor.GetProperty().ShadingOn()
-# Map PointData.BrownianVectors (all 3 components) to IN.Normal
-mapper.MapDataArrayToVertexAttribute("IN.Normal","BrownianVectors",0,-1)
-renderer.AddActor(actor)
-renderer.SetBackground(0.5,0.5,0.5)
-renWin.Render()
-renderer.GetActiveCamera().Azimuth(-50)
-renderer.GetActiveCamera().Roll(70)
-renWin.Render()
-# --- end of script --
diff --git a/Rendering/Core/Testing/Python/TestOpacityVectors.py b/Rendering/Core/Testing/Python/TestOpacityVectors.py
new file mode 100755
index 0000000..1c75571
--- /dev/null
+++ b/Rendering/Core/Testing/Python/TestOpacityVectors.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+import vtk
+
+def SetRandomSeed(caller, eventId):
+    #print "Restart random number generator"
+    raMath = vtk.vtkMath()
+    raMath.RandomSeed(6)
+
+
+def SphereActor(lut, interpolateBeforeMapping):
+    ss = vtk.vtkSphereSource()
+    if interpolateBeforeMapping:
+        ss.SetCenter(-1, 0, 0)
+
+    bp = vtk.vtkBrownianPoints()
+    bp.SetInputConnection(ss.GetOutputPort())
+    bp.AddObserver (vtk.vtkCommand.EndEvent, SetRandomSeed)
+
+    pm = vtk.vtkPolyDataMapper()
+    pm.SetInputConnection(bp.GetOutputPort())
+    pm.SetScalarModeToUsePointFieldData()
+    pm.SelectColorArray("BrownianVectors")
+    pm.SetLookupTable(lut)
+    pm.SetInterpolateScalarsBeforeMapping(interpolateBeforeMapping)
+
+    a = vtk.vtkActor()
+    a.SetMapper(pm)
+    return a
+
+def ColorTransferFunction():
+    opacityTransfer = vtk.vtkPiecewiseFunction()
+    opacityTransfer.AddPoint(0,0)
+    opacityTransfer.AddPoint(0.6,0)
+    opacityTransfer.AddPoint(1,1)
+
+    lut = vtk.vtkDiscretizableColorTransferFunction()
+    lut.SetColorSpaceToDiverging();
+    lut.AddRGBPoint(0.0, 0.23, 0.299, 0.754)
+    lut.AddRGBPoint(1.0, 0.706, 0.016, 0.150);
+    lut.SetVectorModeToMagnitude()
+    lut.SetRange (0, 1)
+    lut.SetScalarOpacityFunction(opacityTransfer)
+    lut.EnableOpacityMappingOn()
+    return lut
+
+
+renWin = vtk.vtkRenderWindow()
+renWin.SetSize(300, 300)
+# enable depth peeling
+renWin.AlphaBitPlanesOn()
+renWin.SetMultiSamples(0)
+
+ren = vtk.vtkRenderer()
+ren.SetBackground(0, 0, 0)
+# enable depth peeling
+ren.UseDepthPeelingOn()
+ren.SetMaximumNumberOfPeels(4)
+renWin.AddRenderer(ren)
+
+iren = vtk.vtkRenderWindowInteractor()
+iren.SetRenderWindow(renWin)
+
+# Force a starting random value
+SetRandomSeed(0, 0)
+
+lut = ColorTransferFunction()
+ren.AddActor(SphereActor (lut, 0))
+ren.AddActor(SphereActor (lut, 1))
+
+renWin.Render()
+#iren.Start()
diff --git a/Rendering/Core/Testing/Python/cells.py b/Rendering/Core/Testing/Python/cells.py
index b84c847..61abec9 100755
--- a/Rendering/Core/Testing/Python/cells.py
+++ b/Rendering/Core/Testing/Python/cells.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython cells.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Rendering
-
 import tempfile
 import os
 import vtk
diff --git a/Rendering/Core/Testing/Tcl/CMakeLists.txt b/Rendering/Core/Testing/Tcl/CMakeLists.txt
index 671001e..3255fe8 100644
--- a/Rendering/Core/Testing/Tcl/CMakeLists.txt
+++ b/Rendering/Core/Testing/Tcl/CMakeLists.txt
@@ -1,36 +1,34 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(ImageActor Rendering)
-  add_test_tcl(PickerWithLocator Rendering)
-  add_test_tcl(PolyDataMapperAllPolygons Graphics)
-  add_test_tcl(PolyDataMapperAllWireframe Graphics)
-  add_test_tcl(SurfacePickerWithTexture Rendering)
-  add_test_tcl(TestCameraInterpolator Graphics)
-  add_test_tcl(TestOpacity2 Rendering)
-  add_test_tcl(TexturedSphere Rendering)
-  add_test_tcl(cells Rendering)
-  add_test_tcl(cowHair2 Graphics)
-  add_test_tcl(cowHair Graphics)
-  add_test_tcl(rendererSource Rendering)
-  add_test_tcl(rotations Rendering)
-endif()
+vtk_add_test_tcl(ImageActor)
+vtk_add_test_tcl(PickerWithLocator)
+vtk_add_test_tcl(PolyDataMapperAllPolygons)
+vtk_add_test_tcl(PolyDataMapperAllWireframe)
+vtk_add_test_tcl(SurfacePickerWithTexture)
+vtk_add_test_tcl(TestCameraInterpolator)
+vtk_add_test_tcl(TestOpacity2)
+vtk_add_test_tcl(TexturedSphere)
+vtk_add_test_tcl(cells)
+vtk_add_test_tcl(cowHair2)
+vtk_add_test_tcl(cowHair)
+vtk_add_test_tcl(rendererSource)
+vtk_add_test_tcl(rotations)
 
-add_test_tcl(assembly Rendering)
-add_test_tcl(CamBlur Rendering)
-add_test_tcl(ImageActorStressed Rendering)
-add_test_tcl(ScalarBar Rendering)
-add_test_tcl(TestLeaderActor2D Rendering)
-add_test_tcl(TestOnePoint Rendering)
-add_test_tcl(TestTriangleStripCellColor Rendering)
-add_test_tcl(TestWindowToImageTransparency Rendering)
-add_test_tcl(pickCells Rendering)
-add_test_tcl(propAssembly Rendering)
+vtk_add_test_tcl(assembly)
+vtk_add_test_tcl(CamBlur)
+vtk_add_test_tcl(ImageActorStressed)
+vtk_add_test_tcl(ScalarBar)
+vtk_add_test_tcl(TestLeaderActor2D)
+vtk_add_test_tcl(TestOnePoint)
+vtk_add_test_tcl(TestTriangleStripCellColor)
+vtk_add_test_tcl(TestWindowToImageTransparency)
+vtk_add_test_tcl(pickCells)
+vtk_add_test_tcl(propAssembly)
 
-add_test_tcl(NoLightGeneration Graphics)
-add_test_tcl(TestRotate Graphics)
-add_test_tcl(TestTransformCoordinateSystems Graphics)
-add_test_tcl(TestTransformInterpolator Graphics)
-add_test_tcl(TransformConcatenation Graphics)
-add_test_tcl(TransformCoverage Graphics)
-add_test_tcl(TransformImageData Graphics)
-add_test_tcl(TransformPolyData Graphics)
-add_test_tcl(stereoDresdenMace Graphics)
+vtk_add_test_tcl(NoLightGeneration)
+vtk_add_test_tcl(TestRotate)
+vtk_add_test_tcl(TestTransformCoordinateSystems)
+vtk_add_test_tcl(TestTransformInterpolator)
+vtk_add_test_tcl(TransformConcatenation)
+vtk_add_test_tcl(TransformCoverage)
+vtk_add_test_tcl(TransformImageData)
+vtk_add_test_tcl(TransformPolyData)
+vtk_add_test_tcl(stereoDresdenMace)
diff --git a/Rendering/Core/module.cmake b/Rendering/Core/module.cmake
index 89493fe..52246a8 100644
--- a/Rendering/Core/module.cmake
+++ b/Rendering/Core/module.cmake
@@ -4,17 +4,14 @@ vtk_module(vtkRenderingCore
   DEPENDS
     vtkCommonExecutionModel
     vtkCommonTransforms
+  PRIVATE_DEPENDS
     vtkFiltersSources
-    vtkFiltersGeneral
     vtkFiltersGeometry
     vtkFiltersExtraction
-    vtkIOImage
-    vtkIOXMLParser
-  COMPILE_DEPENDS
-    vtkUtilitiesMaterialLibrary
+    vtksys
   TEST_DEPENDS
     vtkIOXML
     vtkTestingCore
     vtkTestingRendering
-    vtkRenderingContext2D
+    vtkRenderingOpenGL
   )
diff --git a/Rendering/Core/vtkAbstractPropPicker.cxx b/Rendering/Core/vtkAbstractPropPicker.cxx
index 8bc6cf7..9953c7e 100644
--- a/Rendering/Core/vtkAbstractPropPicker.cxx
+++ b/Rendering/Core/vtkAbstractPropPicker.cxx
@@ -154,13 +154,3 @@ void vtkAbstractPropPicker::PrintSelf(ostream& os, vtkIndent indent)
     os << indent << "Path: (none)" << endl;
     }
 }
-
-//----------------------------------------------------------------------------
-
-// Disable warnings about qualifiers on return types.
-#if defined(_COMPILER_VERSION)
-# pragma set woff 3303
-#endif
-#if defined(__INTEL_COMPILER)
-# pragma warning (disable:858)
-#endif
diff --git a/Rendering/Core/vtkAbstractPropPicker.h b/Rendering/Core/vtkAbstractPropPicker.h
index f69db28..99895e7 100644
--- a/Rendering/Core/vtkAbstractPropPicker.h
+++ b/Rendering/Core/vtkAbstractPropPicker.h
@@ -129,23 +129,6 @@ public:
   // vtkPropAssembly.)
   virtual vtkPropAssembly *GetPropAssembly();
 
-// Disable warnings about qualifiers on return types.
-#if defined(_COMPILER_VERSION)
-# pragma set woff 3303
-#endif
-#if defined(__INTEL_COMPILER)
-# pragma warning (push)
-# pragma warning (disable:858)
-#endif
-
-// Reset disabled warning about qualifiers on return types.
-#if defined(__INTEL_COMPILER)
-# pragma warning (pop)
-#endif
-#if defined(_COMPILER_VERSION)
-# pragma reset woff 3303
-#endif
-
 protected:
   vtkAbstractPropPicker();
   ~vtkAbstractPropPicker();
diff --git a/Rendering/Core/vtkAbstractVolumeMapper.h b/Rendering/Core/vtkAbstractVolumeMapper.h
index 834b50f..2a0b650 100644
--- a/Rendering/Core/vtkAbstractVolumeMapper.h
+++ b/Rendering/Core/vtkAbstractVolumeMapper.h
@@ -111,7 +111,7 @@ public:
   // Release any graphics resources that are being consumed by this mapper.
   // The parameter window could be used to determine which graphic
   // resources to release.
-  virtual void ReleaseGraphicsResources(vtkWindow *) {};
+  virtual void ReleaseGraphicsResources(vtkWindow *) {}
 
 //ETX
 
diff --git a/Rendering/Core/vtkActor2D.cxx b/Rendering/Core/vtkActor2D.cxx
index 96ce1be..7067fc8 100644
--- a/Rendering/Core/vtkActor2D.cxx
+++ b/Rendering/Core/vtkActor2D.cxx
@@ -96,7 +96,8 @@ int vtkActor2D::RenderOverlay(vtkViewport* viewport)
       if (renderWindow->GetCapturingGL2PSSpecialProps())
         {
         if (this->IsA("vtkTextActor") ||
-            (this->Mapper && this->Mapper->IsA("vtkTextMapper")))
+            (this->Mapper && (this->Mapper->IsA("vtkTextMapper") ||
+                              this->Mapper->IsA("vtkLabeledDataMapper"))))
           {
           renderer->CaptureGL2PSSpecialProp(this);
           }
diff --git a/Rendering/Core/vtkActor2DCollection.h b/Rendering/Core/vtkActor2DCollection.h
index a6d217e..683d6ec 100644
--- a/Rendering/Core/vtkActor2DCollection.h
+++ b/Rendering/Core/vtkActor2DCollection.h
@@ -79,7 +79,7 @@ class VTKRENDERINGCORE_EXPORT vtkActor2DCollection : public vtkPropCollection
   //ETX
 
 protected:
-  vtkActor2DCollection() {};
+  vtkActor2DCollection() {}
   ~vtkActor2DCollection();
 
   virtual void DeleteElement(vtkCollectionElement *);
diff --git a/Rendering/Core/vtkActorCollection.h b/Rendering/Core/vtkActorCollection.h
index 5feb01f..89e870a 100644
--- a/Rendering/Core/vtkActorCollection.h
+++ b/Rendering/Core/vtkActorCollection.h
@@ -69,8 +69,8 @@ public:
   //ETX
 
 protected:
-  vtkActorCollection() {};
-  ~vtkActorCollection() {};
+  vtkActorCollection() {}
+  ~vtkActorCollection() {}
 
 
 private:
diff --git a/Rendering/Core/vtkAssemblyPaths.h b/Rendering/Core/vtkAssemblyPaths.h
index b2d5ec4..04eab93 100644
--- a/Rendering/Core/vtkAssemblyPaths.h
+++ b/Rendering/Core/vtkAssemblyPaths.h
@@ -68,8 +68,8 @@ public:
   //ETX
 
 protected:
-  vtkAssemblyPaths() {};
-  ~vtkAssemblyPaths() {};
+  vtkAssemblyPaths() {}
+  ~vtkAssemblyPaths() {}
 
 private:
   // hide the standard AddItem from the user and the compiler.
diff --git a/Rendering/Core/vtkBackgroundColorMonitor.cxx b/Rendering/Core/vtkBackgroundColorMonitor.cxx
new file mode 100644
index 0000000..84c50ff
--- /dev/null
+++ b/Rendering/Core/vtkBackgroundColorMonitor.cxx
@@ -0,0 +1,95 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkBackgroundColorMonitor
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkBackgroundColorMonitor.h"
+#include "vtkObjectFactory.h"
+#include "vtkRenderer.h"
+#include <cstring>
+#include <cmath>
+
+//-----------------------------------------------------------------------------
+vtkStandardNewMacro(vtkBackgroundColorMonitor)
+
+//-----------------------------------------------------------------------------
+vtkBackgroundColorMonitor::vtkBackgroundColorMonitor()
+        :
+    UpTime(0),
+    Gradient(false)
+{
+  memset(this->Color1,0,3*sizeof(double));
+  memset(this->Color2,0,3*sizeof(double));
+}
+
+//-----------------------------------------------------------------------------
+bool vtkBackgroundColorMonitor::StateChanged(vtkRenderer *ren)
+{
+  unsigned int oldUpTime = this->UpTime;
+  this->Update(ren);
+  if (oldUpTime != this->UpTime)
+    {
+    return true;
+    }
+  return false;
+}
+
+//-----------------------------------------------------------------------------
+void vtkBackgroundColorMonitor::Update(vtkRenderer *ren)
+{
+  // update colors
+  double *color1 = ren->GetBackground();
+  double *color2 = ren->GetBackground2();
+  bool changed = false;
+  for (int i=0; i<3; ++i)
+    {
+    if ( (this->Color1[i] != color1[i])
+      || (this->Color2[i] != color2[i]) )
+      {
+      changed=true;
+      }
+    this->Color1[i] = color1[i];
+    this->Color2[i] = color2[i];
+    }
+  // update gradient flag
+  bool grad = ren->GetGradientBackground();
+  if ( this->Gradient != grad )
+    {
+    changed = true;
+    }
+  this->Gradient = grad;
+  // update mtime
+  if (changed)
+    {
+    this->UpTime += 1;
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkBackgroundColorMonitor::PrintSelf(ostream &os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os,indent);
+  os << indent << "Gradient=" << this->Gradient << endl;
+  os << indent << "Color1=";
+  for (int q=0; q<3; ++q)
+    {
+     os << this->Color1[q] << " ";
+    }
+  os << endl;
+  os << indent << "Color2=";
+  for (int q=0; q<3; ++q)
+    {
+     os << this->Color2[q] << " ";
+    }
+  os << endl;
+  os << indent << "UpTime=" << this->UpTime << endl;
+}
diff --git a/Rendering/Core/vtkBackgroundColorMonitor.h b/Rendering/Core/vtkBackgroundColorMonitor.h
new file mode 100644
index 0000000..d64e9ff
--- /dev/null
+++ b/Rendering/Core/vtkBackgroundColorMonitor.h
@@ -0,0 +1,73 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkBackgroundColorMonitor
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkBackgroundColorMonitor -- A helper for painters that
+// tracks state of background color(s).
+//
+// .SECTION Description:
+// vtkBackgroundColorMonitor -- A helper for painters that
+// tracks state of background color(s). A Painter could use this
+// to skip expensive processing that is only needed when
+// background color changes. This class queries VTK renderer
+// rather than OpenGL state in order to support VTK's gradient
+// background.
+//
+// this is not intended to be shared. each object should use it's
+// own instance of this class. it's intended to be called once per
+// render.
+
+#ifndef __vtkBackgroundColorMonitor_H
+#define __vtkBackgroundColorMonitor_H
+
+#include "vtkRenderingCoreModule.h" // for export macro
+#include "vtkObject.h"
+
+class vtkRenderer;
+
+class VTKRENDERINGCORE_EXPORT vtkBackgroundColorMonitor : public vtkObject
+{
+public:
+  static vtkBackgroundColorMonitor* New();
+  vtkTypeMacro(vtkBackgroundColorMonitor, vtkObject);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Fetches the current background color state and
+  // updates the internal copies of the data. returns
+  // true if any of the tracked colors or modes have
+  // changed. Typically this is the only function a
+  // user needs to call.
+  bool StateChanged(vtkRenderer *ren);
+
+  // Description:
+  // Update the internal state if anything changed. Note,
+  // this is done automatically in SateChanged.
+  void Update(vtkRenderer *ren);
+
+protected:
+  vtkBackgroundColorMonitor();
+  ~vtkBackgroundColorMonitor(){}
+
+private:
+  unsigned int UpTime;
+  bool Gradient;
+  double Color1[3];
+  double Color2[3];
+
+private:
+ vtkBackgroundColorMonitor(const vtkBackgroundColorMonitor&); // Not implemented
+ void operator=(const vtkBackgroundColorMonitor&); // Not implemented
+};
+
+#endif
diff --git a/Rendering/Core/vtkCellPicker.cxx b/Rendering/Core/vtkCellPicker.cxx
index 46aad41..e82ec13 100644
--- a/Rendering/Core/vtkCellPicker.cxx
+++ b/Rendering/Core/vtkCellPicker.cxx
@@ -1011,7 +1011,7 @@ double vtkCellPicker::IntersectImageWithLine(const double p1[3],
       }
 
     this->ResetPickInfo();
-    this->Mapper = 0;
+    this->Mapper = imageMapper;
     this->DataSet = data;
 
     // Compute all the pick values
diff --git a/Rendering/Core/vtkChooserPainter.cxx b/Rendering/Core/vtkChooserPainter.cxx
index 6e1cfcd..e03ea70 100644
--- a/Rendering/Core/vtkChooserPainter.cxx
+++ b/Rendering/Core/vtkChooserPainter.cxx
@@ -47,7 +47,7 @@ vtkChooserPainter::vtkChooserPainter()
   this->UseLinesPainterForWireframes = 0;
 #if defined(__APPLE__) && (defined(VTK_USE_CARBON) || defined(VTK_USE_COCOA))
   /*
-   * On some apples, glPolygonMode(*,GL_LINE) does not render anything
+   * On some Macs, glPolygonMode(*,GL_LINE) does not render anything
    * for polys. To fix this, we use the GL_LINE_LOOP to render the polygons.
    */
   this->UseLinesPainterForWireframes = 1;
diff --git a/Rendering/Core/vtkColorTransferFunction.cxx b/Rendering/Core/vtkColorTransferFunction.cxx
index ad79dc1..61e6a1b 100644
--- a/Rendering/Core/vtkColorTransferFunction.cxx
+++ b/Rendering/Core/vtkColorTransferFunction.cxx
@@ -257,11 +257,8 @@ vtkColorTransferFunction::~vtkColorTransferFunction()
 {
   delete [] this->Table;
 
-  if ( this->Function )
-    {
-    delete [] this->Function;
-    this->Function = NULL;
-    }
+  delete [] this->Function;
+  this->Function = NULL;
 
   for(unsigned int i=0;i<this->Internal->Nodes.size();i++)
     {
@@ -284,11 +281,8 @@ double *vtkColorTransferFunction::GetDataPointer()
 {
   int size = static_cast<int>(this->Internal->Nodes.size());
 
-  if ( this->Function )
-    {
-    delete [] this->Function;
-    this->Function = NULL;
-    }
+  delete [] this->Function;
+  this->Function = NULL;
 
   if ( size > 0 )
     {
diff --git a/Rendering/Core/vtkColorTransferFunction.h b/Rendering/Core/vtkColorTransferFunction.h
index b6ea015..6f02cd7 100644
--- a/Rendering/Core/vtkColorTransferFunction.h
+++ b/Rendering/Core/vtkColorTransferFunction.h
@@ -244,7 +244,7 @@ protected:
   // Description:
   // Set the range of scalars being mapped. The set has no functionality
   // in this subclass of vtkScalarsToColors.
-  virtual void SetRange(double, double) {};
+  virtual void SetRange(double, double) {}
   void SetRange(double rng[2]) {this->SetRange(rng[0],rng[1]);};
 
   // Internal method to sort the vector and update the
diff --git a/Rendering/Core/vtkCompositePainter.cxx b/Rendering/Core/vtkCompositePainter.cxx
index 303db35..9f7dff9 100644
--- a/Rendering/Core/vtkCompositePainter.cxx
+++ b/Rendering/Core/vtkCompositePainter.cxx
@@ -30,7 +30,7 @@
 #include "vtkRenderer.h"
 #include "vtkRenderWindow.h"
 
-#include <assert.h>
+#include <cassert>
 
 // Return NULL if no override is supplied.
 vtkAbstractObjectFactoryNewMacro(vtkCompositePainter)
diff --git a/Rendering/Core/vtkDiscretizableColorTransferFunction.cxx b/Rendering/Core/vtkDiscretizableColorTransferFunction.cxx
index 2f55f52..0bda12b 100644
--- a/Rendering/Core/vtkDiscretizableColorTransferFunction.cxx
+++ b/Rendering/Core/vtkDiscretizableColorTransferFunction.cxx
@@ -19,12 +19,23 @@
 #include "vtkMath.h"
 #include "vtkObjectFactory.h"
 #include "vtkPiecewiseFunction.h"
+#include "vtkTemplateAliasMacro.h"
+#include "vtkTuple.h"
 
 #include <vector>
 
+class vtkDiscretizableColorTransferFunction::vtkInternals
+{
+public:
+  std::vector<vtkTuple<double, 3> > IndexedColors;
+};
+
 vtkStandardNewMacro(vtkDiscretizableColorTransferFunction);
+vtkCxxSetObjectMacro(vtkDiscretizableColorTransferFunction,
+  ScalarOpacityFunction, vtkPiecewiseFunction);
 //-----------------------------------------------------------------------------
 vtkDiscretizableColorTransferFunction::vtkDiscretizableColorTransferFunction()
+  : Internals(new vtkInternals())
 {
   this->LookupTable = vtkLookupTable::New();
 
@@ -47,13 +58,93 @@ vtkDiscretizableColorTransferFunction::~vtkDiscretizableColorTransferFunction()
   this->SetScalarOpacityFunction(NULL);
   this->LookupTable->Delete();
   delete [] this->Data;
+
+  delete this->Internals;
+  this->Internals = NULL;
 }
 
 //-----------------------------------------------------------------------------
-struct vtkDiscretizableColorTransferFunctionNode
+unsigned long vtkDiscretizableColorTransferFunction::GetMTime()
 {
-  double Value[6];
-};
+  unsigned long mtime = this->Superclass::GetMTime();
+  if (this->ScalarOpacityFunction)
+    {
+    unsigned long somtime = this->ScalarOpacityFunction->GetMTime();
+    mtime = somtime > mtime? somtime : mtime;
+    }
+  if (this->LookupTable)
+    {
+    unsigned ltmtime = this->LookupTable->GetMTime();
+    mtime = ltmtime > mtime? ltmtime : mtime;
+    }
+
+  return mtime;
+}
+
+//-----------------------------------------------------------------------------
+void vtkDiscretizableColorTransferFunction::SetNumberOfIndexedColors(
+  unsigned int count)
+{
+  if (static_cast<unsigned int>(this->Internals->IndexedColors.size()) != count)
+    {
+    this->Internals->IndexedColors.resize(count);
+    this->Modified();
+    }
+}
+
+//-----------------------------------------------------------------------------
+unsigned int vtkDiscretizableColorTransferFunction::GetNumberOfIndexedColors()
+{
+  return static_cast<unsigned int>(this->Internals->IndexedColors.size());
+}
+
+//-----------------------------------------------------------------------------
+void vtkDiscretizableColorTransferFunction::SetIndexedColor(
+  unsigned int index, double r, double g, double b)
+{
+  if (static_cast<unsigned int>(this->Internals->IndexedColors.size()) <= index)
+    {
+    // resize and fill all new colors with the same color as specified.
+    size_t old_size = this->Internals->IndexedColors.size();
+    size_t new_size = static_cast<size_t>(index+1);
+    this->Internals->IndexedColors.resize(new_size);
+
+    for (size_t cc = old_size; cc < new_size; cc++)
+      {
+      double *data = this->Internals->IndexedColors[cc].GetData();
+      data[0] = r;
+      data[1] = g;
+      data[2] = b;
+      }
+
+    this->Modified();
+    }
+  else if (this->Internals->IndexedColors[index].GetData()[0] != r ||
+    this->Internals->IndexedColors[index].GetData()[1] != g ||
+    this->Internals->IndexedColors[index].GetData()[2] != b )
+    {
+    // color has changed, change it.
+    double *data = this->Internals->IndexedColors[index].GetData();
+    data[0] = r;
+    data[1] = g;
+    data[2] = b;
+
+    this->Modified();
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkDiscretizableColorTransferFunction::GetIndexedColor(vtkIdType i, double rgba[4])
+{
+  if (this->IndexedLookup || this->Discretize)
+    {
+    this->LookupTable->GetIndexedColor(i, rgba);
+    }
+  else
+    {
+    this->Superclass::GetIndexedColor(i, rgba);
+    }
+}
 
 //-----------------------------------------------------------------------------
 void vtkDiscretizableColorTransferFunction::SetUseLogScale(int useLogScale)
@@ -77,14 +168,6 @@ void vtkDiscretizableColorTransferFunction::SetUseLogScale(int useLogScale)
 }
 
 //-----------------------------------------------------------------------------
-void vtkDiscretizableColorTransferFunction::SetNumberOfValues(vtkIdType number)
-{
-  this->NumberOfValues = number;
-  this->LookupTable->SetNumberOfTableValues(number);
-  this->Modified();
-}
-
-//-----------------------------------------------------------------------------
 int vtkDiscretizableColorTransferFunction::IsOpaque()
 {
   return !this->EnableOpacityMapping;
@@ -95,28 +178,55 @@ void vtkDiscretizableColorTransferFunction::Build()
 {
   this->Superclass::Build();
 
+  if (this->BuildTime > this->GetMTime())
+    {
+    // no need to rebuild anything.
+    return;
+    }
+
   this->LookupTable->SetVectorMode(this->VectorMode);
   this->LookupTable->SetVectorComponent(this->VectorComponent);
   this->LookupTable->SetIndexedLookup(this->IndexedLookup);
-  this->LookupTable->SetAnnotations( this->AnnotatedValues, this->Annotations );
 
-  if ( this->IndexedLookup )
+  // this  is essential since other the LookupTable doesn't update the
+  // annotations map. That's a bug in the implementation of
+  // vtkScalarsToColors::SetAnnotations(..,..);
+  this->LookupTable->SetAnnotations(NULL, NULL);
+  this->LookupTable->SetAnnotations(this->AnnotatedValues, this->Annotations);
+
+  if (this->IndexedLookup)
     {
-    int nv = this->GetSize();
-    this->LookupTable->SetNumberOfTableValues( nv );
-    double nodeVal[6];
-    for ( int i = 0; i < nv; ++ i )
+    if (this->GetNumberOfIndexedColors() > 0)
       {
-      this->GetNodeValue( i, nodeVal );
-      nodeVal[4] = 1.;
-      this->LookupTable->SetTableValue( i, &nodeVal[1] );
+      // Use the specified indexed-colors.
+      vtkIdType count = this->GetNumberOfAnnotatedValues();
+      this->LookupTable->SetNumberOfTableValues(count);
+      for (size_t cc=0; cc < this->Internals->IndexedColors.size() &&
+                        cc < static_cast<size_t>(count); cc++)
+        {
+        double rgba[4];
+        rgba[0] = this->Internals->IndexedColors[cc].GetData()[0];
+        rgba[1] = this->Internals->IndexedColors[cc].GetData()[1];
+        rgba[2] = this->Internals->IndexedColors[cc].GetData()[2];
+        rgba[3] = 1.0;
+        this->LookupTable->SetTableValue(static_cast<int>(cc), rgba);
+        }
+      }
+    else
+      {
+      // old logic for backwards compatibility.
+      int nv = this->GetSize();
+      this->LookupTable->SetNumberOfTableValues( nv );
+      double nodeVal[6];
+      for ( int i = 0; i < nv; ++ i )
+        {
+        this->GetNodeValue( i, nodeVal );
+        nodeVal[4] = 1.;
+        this->LookupTable->SetTableValue( i, &nodeVal[1] );
+        }
       }
-    return;
     }
-
-  if (this->Discretize && (this->GetMTime() > this->BuildTime ||
-      (this->ScalarOpacityFunction.GetPointer() &&
-       this->ScalarOpacityFunction->GetMTime() > this->BuildTime)))
+  else if (this->Discretize)
     {
     // Do not omit the LookupTable->SetNumberOfTableValues call:
     // WritePointer does not update the NumberOfColors ivar.
@@ -127,7 +237,7 @@ void vtkDiscretizableColorTransferFunction::Build()
     double range[2];
     this->GetRange(range);
     bool logRangeValid = true;
-    if(this->UseLogScale)
+    if (this->UseLogScale)
       {
       logRangeValid = range[0] > 0.0 || range[1] < 0.0;
       if(!logRangeValid && this->LookupTable->GetScale() == VTK_SCALE_LOG10)
@@ -153,9 +263,9 @@ void vtkDiscretizableColorTransferFunction::Build()
       lut_ptr[4*cc+3] = 255;
       }
     delete [] table;
-
-    this->BuildTime.Modified();
     }
+
+  this->BuildTime.Modified();
 }
 
 //-----------------------------------------------------------------------------
@@ -177,26 +287,7 @@ void vtkDiscretizableColorTransferFunction::SetNanColor(
 unsigned char* vtkDiscretizableColorTransferFunction::MapValue(double v)
 {
   this->Build();
-  if ( this->IndexedLookup )
-    {
-    vtkIdType idx = this->GetAnnotatedValueIndex( v );
-    if ( idx < 0 || this->GetSize() == 0 )
-      {
-      return this->Superclass::MapValue( vtkMath::Nan() );
-      }
-    double nodeValue[6];
-    this->GetNodeValue( idx % this->GetSize(), nodeValue );
-    this->UnsignedCharRGBAValue[0] =
-      static_cast<unsigned char>(255.0*nodeValue[1] + 0.5);
-    this->UnsignedCharRGBAValue[1] =
-      static_cast<unsigned char>(255.0*nodeValue[2] + 0.5);
-    this->UnsignedCharRGBAValue[2] =
-      static_cast<unsigned char>(255.0*nodeValue[3] + 0.5);
-    this->UnsignedCharRGBAValue[3] = 255;
-    return this->UnsignedCharRGBAValue;
-    }
-
-  if (this->Discretize)
+  if (this->Discretize || this->IndexedLookup)
     {
     return this->LookupTable->MapValue(v);
     }
@@ -208,13 +299,14 @@ unsigned char* vtkDiscretizableColorTransferFunction::MapValue(double v)
 void vtkDiscretizableColorTransferFunction::GetColor(double v, double rgb[3])
 {
   this->Build();
-  if (this->Discretize)
+  if (this->Discretize || this->IndexedLookup)
     {
     this->LookupTable->GetColor(v, rgb);
-    return;
     }
-
-  this->Superclass::GetColor(v, rgb);
+  else
+    {
+    this->Superclass::GetColor(v, rgb);
+    }
 }
 
 //-----------------------------------------------------------------------------
@@ -231,41 +323,120 @@ double vtkDiscretizableColorTransferFunction::GetOpacity(double v)
 }
 
 //-----------------------------------------------------------------------------
-vtkUnsignedCharArray* vtkDiscretizableColorTransferFunction::MapScalars(vtkDataArray *scalars,
-  int colorMode, int component)
+vtkUnsignedCharArray* vtkDiscretizableColorTransferFunction::MapScalars(
+  vtkDataArray *scalars, int colorMode, int component)
 {
-  if ( this->IndexedLookup )
-    {
-    return this->Superclass::MapScalars( scalars, colorMode, component );
-    }
-
   this->Build();
 
   bool scalars_are_mapped = !(colorMode == VTK_COLOR_MODE_DEFAULT) &&
                              vtkUnsignedCharArray::SafeDownCast(scalars);
 
-  vtkUnsignedCharArray *colors = this->Discretize ?
+  vtkUnsignedCharArray *colors = (this->Discretize || this->IndexedLookup) ?
     this->LookupTable->MapScalars(scalars, colorMode, component):
     this->Superclass::MapScalars(scalars, colorMode, component);
 
   // calculate alpha values
-  if(colors &&
+  if (colors &&
      colors->GetNumberOfComponents() == 4 &&
-     !scalars_are_mapped && !this->IndexedLookup &&
+     !scalars_are_mapped &&
+     !this->IndexedLookup && //  we don't change alpha for IndexedLookup.
      this->EnableOpacityMapping &&
      this->ScalarOpacityFunction.GetPointer())
     {
-    for(vtkIdType i = 0; i < scalars->GetNumberOfTuples(); i++)
+    MapDataArrayToOpacity(scalars, component, colors);
+    }
+  return colors;
+}
+
+template<typename T>
+struct VectorComponentGetter
+{
+  double Get(
+    T* scalars, int component, int numberOfComponents, vtkIdType tuple)
+  {
+    double value = *(scalars + tuple * numberOfComponents + component);
+    return value;
+  }
+};
+
+template<typename T>
+struct VectorMagnitudeGetter
+{
+  double Get(
+    T* scalars, int component, int numberOfComponents, vtkIdType tuple)
+  {
+    (void)component;
+    double v = 0.0;
+    for (int j = 0; j < numberOfComponents; ++j)
       {
-      double value = scalars->GetTuple1(i);
-      double alpha = this->ScalarOpacityFunction->GetValue(value);
-      colors->SetValue(4*i+3, static_cast<unsigned char>(alpha * 255.0 + 0.5));
+      double u = *(scalars + tuple * numberOfComponents + j);
+      v += u * u;
       }
+    v = sqrt (v);
+    return v;
+  }
+};
+
+template<typename T, typename VectorGetter>
+void vtkDiscretizableColorTransferFunction::MapVectorToOpacity (
+  VectorGetter getter, T* scalars, int component,
+  int numberOfComponents, vtkIdType numberOfTuples, unsigned char* colors)
+{
+  for(vtkIdType i = 0; i < numberOfTuples; i++)
+    {
+    double value = getter.Get (scalars, component, numberOfComponents, i);
+    double alpha = this->ScalarOpacityFunction->GetValue(value);
+    *(colors + i * 4 + 3) = static_cast<unsigned char>(alpha * 255.0 + 0.5);
     }
+}
 
-  return colors;
+template<template<class> class VectorGetter>
+void vtkDiscretizableColorTransferFunction::AllTypesMapVectorToOpacity (
+  int scalarType,
+  void* scalarPtr, int component,
+  int numberOfComponents, vtkIdType numberOfTuples, unsigned char* colorPtr)
+{
+  switch (scalarType)
+    {
+    vtkTemplateAliasMacro(
+      MapVectorToOpacity(
+        VectorGetter<VTK_TT>(),
+        static_cast<VTK_TT*>(scalarPtr), component, numberOfComponents,
+        numberOfTuples, colorPtr));
+    }
 }
 
+void vtkDiscretizableColorTransferFunction::MapDataArrayToOpacity(
+  vtkDataArray *scalars, int component, vtkUnsignedCharArray* colors)
+{
+  int scalarType = scalars->GetDataType ();
+  void* scalarPtr = scalars->GetVoidPointer(0);
+  unsigned char* colorPtr = static_cast<unsigned char*> (
+    colors->GetVoidPointer(0));
+  int numberOfComponents = scalars->GetNumberOfComponents ();
+  vtkIdType numberOfTuples = scalars->GetNumberOfTuples ();
+  if (component >= numberOfComponents)
+    {
+    vtkWarningMacro(
+      << "Clamping component: " << component
+      << " to numberOfComponents - 1: " << (numberOfComponents - 1));
+    component = numberOfComponents - 1;
+    }
+  if (component < 0)
+    {
+    AllTypesMapVectorToOpacity<VectorMagnitudeGetter> (
+      scalarType, scalarPtr,
+      component, numberOfComponents, numberOfTuples, colorPtr);
+    }
+  else
+    {
+    AllTypesMapVectorToOpacity<VectorComponentGetter> (
+      scalarType, scalarPtr,
+      component, numberOfComponents, numberOfTuples, colorPtr);
+    }
+}
+
+
 //-----------------------------------------------------------------------------
 double* vtkDiscretizableColorTransferFunction::GetRGBPoints()
 {
@@ -300,41 +471,12 @@ vtkIdType vtkDiscretizableColorTransferFunction::GetNumberOfAvailableColors()
 }
 
 //----------------------------------------------------------------------------
-void vtkDiscretizableColorTransferFunction::SetScalarOpacityFunction(vtkPiecewiseFunction *function)
-{
-  if(this->ScalarOpacityFunction != function)
-    {
-    if (this->ScalarOpacityFunction &&
-      this->ScalarOpacityFunctionObserverId > 0)
-      {
-      this->ScalarOpacityFunction->RemoveObserver(this->ScalarOpacityFunctionObserverId);
-      this->ScalarOpacityFunctionObserverId = 0;
-      }
-    this->ScalarOpacityFunction = function;
-    if (function)
-      {
-      this->ScalarOpacityFunctionObserverId =
-        function->AddObserver(vtkCommand::ModifiedEvent,
-          this,
-          &vtkDiscretizableColorTransferFunction::ScalarOpacityFunctionModified);
-      }
-    this->Modified();
-    }
-}
-
-//----------------------------------------------------------------------------
 vtkPiecewiseFunction* vtkDiscretizableColorTransferFunction::GetScalarOpacityFunction() const
 {
   return this->ScalarOpacityFunction;
 }
 
 //-----------------------------------------------------------------------------
-void vtkDiscretizableColorTransferFunction::ScalarOpacityFunctionModified()
-{
-  this->Modified();
-}
-
-//-----------------------------------------------------------------------------
 void vtkDiscretizableColorTransferFunction::PrintSelf(ostream& os, vtkIndent indent)
 {
   this->Superclass::PrintSelf(os, indent);
diff --git a/Rendering/Core/vtkDiscretizableColorTransferFunction.h b/Rendering/Core/vtkDiscretizableColorTransferFunction.h
index b1c72ca..ba787e8 100644
--- a/Rendering/Core/vtkDiscretizableColorTransferFunction.h
+++ b/Rendering/Core/vtkDiscretizableColorTransferFunction.h
@@ -16,23 +16,26 @@
 // vtkLookupTable.
 // .SECTION Description
 // This is a cross between a vtkColorTransferFunction and a vtkLookupTable
-// selectively combiniting the functionality of both.
-// NOTE: One must call Build() after making any changes to the points
-// in the ColorTransferFunction to ensure that the discrete and non-discrete
-// version match up.
+// selectively combining the functionality of both. This class is a
+// vtkColorTransferFunction allowing users to specify the RGB control points
+// that control the color transfer function. At the same time, by setting
+// \a Discretize to 1 (true), one can force the transfer function to only have
+// \a NumberOfValues discrete colors.
 //
-// This class behaves differently depending on how \a IndexedLookup is set.
-// When true, vtkLookupTable enters a mode for representing categorical color maps.
-// By setting \a IndexedLookup to true, you indicate that the annotated
-// values are the only valid values for which entries in the color table
-// should be returned. The colors in the lookup \a Table are assigned
+// When \a IndexedLookup is true, this class behaves differently. The annotated
+// valyes are considered to the be only valid values for which entries in the
+// color table should be returned. The colors for annotated values are those
+// specified using \a AddIndexedColors. Typically, there must be atleast as many
+// indexed colors specified as the annotations. For backwards compatibility, if
+// no indexed-colors are specified, the colors in the lookup \a Table are assigned
 // to annotated values by taking the modulus of their index in the list
-// of annotations. \a IndexedLookup changes the behavior of \a GetIndex,
-// which in turn changes the way \a MapScalars behaves;
-// when \a IndexedLookup is true, \a MapScalars will search for
-// scalar values in \a AnnotatedValues and use the resulting index to
-// determine the color. If a scalar value is not present in \a AnnotatedValues,
+// of annotations. If a scalar value is not present in \a AnnotatedValues,
 // then \a NanColor will be used.
+//
+// NOTE: One must call Build() after making any changes to the points
+// in the ColorTransferFunction to ensure that the discrete and non-discrete
+// versions match up.
+
 
 #ifndef __vtkDiscretizableColorTransferFunction_h
 #define __vtkDiscretizableColorTransferFunction_h
@@ -55,6 +58,36 @@ public:
   int IsOpaque();
 
   // Description:
+  // Add colors to use when \a IndexedLookup is true.
+  // \a SetIndexedColor() will automatically call
+  // SetNumberOfIndexedColors(index+1) if the current number of indexed colors
+  // is not sufficient for the specified index and all will be initialized to
+  // with the rgb values passed to this call.
+  void SetIndexedColor(unsigned int index, const double rgb[3])
+    { this->SetIndexedColor(index, rgb[0], rgb[1], rgb[2]); }
+  void SetIndexedColor(unsigned int index, double r, double g, double b);
+
+  /** Get the "indexed color" assigned to an index.
+   *
+   * The index is used in \a IndexedLookup mode to assign colors to annotations (in the order
+   * the annotations were set).
+   * Subclasses must implement this and interpret how to treat the index.
+   * vtkLookupTable simply returns GetTableValue(\a index % \a this->GetNumberOfTableValues()).
+   * vtkColorTransferFunction returns the color assocated with node \a index % \a this->GetSize().
+   *
+   * Note that implementations *must* set the opacity (alpha) component of the color, even if they
+   * do not provide opacity values in their colormaps. In that case, alpha = 1 should be used.
+   */
+  virtual void GetIndexedColor(vtkIdType i, double rgba[4]);
+
+  // Description:
+  // Set the number of indexed colors. These are used when IndexedLookup is
+  // true. If no indexed colors are specified, for backwards compatibility,
+  // this class reverts to using the RGBPoints for colors.
+  void SetNumberOfIndexedColors(unsigned int count);
+  unsigned int GetNumberOfIndexedColors();
+
+  // Description:
   // Generate discretized lookup table, if applicable.
   // This method must be called after changes to the ColorTransferFunction
   // otherwise the discretized version will be inconsistent with the
@@ -80,7 +113,7 @@ public:
   // Set the number of values i.e. colors to be generated in the
   // discrete lookup table. This has no effect if Discretize is off.
   // The default is 256.
-  void SetNumberOfValues(vtkIdType number);
+  vtkSetMacro(NumberOfValues, vtkIdType);
   vtkGetMacro(NumberOfValues, vtkIdType);
 
   // Description:
@@ -158,14 +191,14 @@ public:
   vtkGetMacro(EnableOpacityMapping, bool)
   vtkBooleanMacro(EnableOpacityMapping, bool)
 
+  // Description:
+  // Overridden to include the ScalarOpacityFunction's MTime.
+  virtual unsigned long GetMTime();
+
 protected:
   vtkDiscretizableColorTransferFunction();
   ~vtkDiscretizableColorTransferFunction();
 
-  // Description:
-  // Called when ScalarOpacityFunction is modified.
-  void ScalarOpacityFunctionModified();
-
   int Discretize;
   int UseLogScale;
 
@@ -178,13 +211,28 @@ protected:
   vtkSmartPointer<vtkPiecewiseFunction> ScalarOpacityFunction;
   unsigned long ScalarOpacityFunctionObserverId;
 
+  void MapDataArrayToOpacity(
+    vtkDataArray *scalars, int component, vtkUnsignedCharArray* colors);
+
 private:
   vtkDiscretizableColorTransferFunction(const vtkDiscretizableColorTransferFunction&); // Not implemented.
   void operator=(const vtkDiscretizableColorTransferFunction&); // Not implemented.
+  template<typename T, typename VectorGetter>
+    void MapVectorToOpacity (
+      VectorGetter getter, T* scalars, int component,
+      int numberOfComponents, vtkIdType numberOfTuples, unsigned char* colors);
+  template<template<class> class VectorGetter>
+    void AllTypesMapVectorToOpacity (
+      int scalarType,
+      void* scalarsPtr, int component,
+      int numberOfComponents, vtkIdType numberOfTuples, unsigned char* colors);
+
 
   // Pointer used by GetRGBPoints().
   double* Data;
+
+  class vtkInternals;
+  vtkInternals* Internals;
 };
 
 #endif
-
diff --git a/Rendering/Core/vtkFollower.h b/Rendering/Core/vtkFollower.h
index 84d03e9..bf8254f 100644
--- a/Rendering/Core/vtkFollower.h
+++ b/Rendering/Core/vtkFollower.h
@@ -24,7 +24,7 @@
 // 90 degrees.
 
 // .SECTION see also
-// vtkActor vtkCamera
+// vtkActor vtkCamera vtkAxisFollower vtkProp3DFollower
 
 #ifndef __vtkFollower_h
 #define __vtkFollower_h
diff --git a/Rendering/Core/vtkFrustumCoverageCuller.h b/Rendering/Core/vtkFrustumCoverageCuller.h
index 1a29519..649c206 100644
--- a/Rendering/Core/vtkFrustumCoverageCuller.h
+++ b/Rendering/Core/vtkFrustumCoverageCuller.h
@@ -89,7 +89,7 @@ public:
 
 protected:
   vtkFrustumCoverageCuller();
-  ~vtkFrustumCoverageCuller() {};
+  ~vtkFrustumCoverageCuller() {}
 
   double       MinimumCoverage;
   double       MaximumCoverage;
diff --git a/Rendering/Core/vtkGPUInfoList.cxx b/Rendering/Core/vtkGPUInfoList.cxx
index 3a4b077..ae31c26 100644
--- a/Rendering/Core/vtkGPUInfoList.cxx
+++ b/Rendering/Core/vtkGPUInfoList.cxx
@@ -15,7 +15,7 @@
 
 #include "vtkGPUInfoList.h"
 
-#include <assert.h>
+#include <cassert>
 #include "vtkGPUInfo.h"
 #include "vtkGraphicsFactory.h"
 
diff --git a/Rendering/Core/vtkGenericVertexAttributeMapping.cxx b/Rendering/Core/vtkGenericVertexAttributeMapping.cxx
index edc3d9b..97e85e9 100644
--- a/Rendering/Core/vtkGenericVertexAttributeMapping.cxx
+++ b/Rendering/Core/vtkGenericVertexAttributeMapping.cxx
@@ -86,7 +86,7 @@ void vtkGenericVertexAttributeMapping::AddMapping(
   if (this->RemoveMapping(attributeName.str().c_str()))
     {
     vtkWarningMacro("Replacing existing mapping for attribute "
-      << attributeName);
+      << attributeName.str().c_str());
     }
 
   vtkInternal::vtkInfo info;
diff --git a/Rendering/Core/vtkGlyph3DMapper.cxx b/Rendering/Core/vtkGlyph3DMapper.cxx
index 0cbbc8a..3b8a962 100644
--- a/Rendering/Core/vtkGlyph3DMapper.cxx
+++ b/Rendering/Core/vtkGlyph3DMapper.cxx
@@ -35,7 +35,7 @@
 #include "vtkTransform.h"
 #include "vtkTrivialProducer.h"
 
-#include <assert.h>
+#include <cassert>
 #include <vector>
 
 // Return NULL if no override is supplied.
diff --git a/Rendering/Core/vtkHardwareSelector.cxx b/Rendering/Core/vtkHardwareSelector.cxx
index 74bab67..93edc14 100644
--- a/Rendering/Core/vtkHardwareSelector.cxx
+++ b/Rendering/Core/vtkHardwareSelector.cxx
@@ -73,11 +73,10 @@ public:
   // Ids for props that were hit.
   std::set<int> HitProps;
   std::map<int, vtkSmartPointer<vtkProp> > Props;
+
+  // state that's managed through the renderer
   double OriginalBackground[3];
   bool OriginalGradient;
-  int OriginalMultisample;
-  int OriginalLighting;
-  int OriginalBlending;
 
   typedef std::map<PixelInformation, std::set<vtkIdType>,
     PixelInformationComparator> MapOfAttributeIds;
@@ -177,8 +176,12 @@ public:
 
 };
 
-vtkStandardNewMacro(vtkHardwareSelector);
+//----------------------------------------------------------------------------
+vtkAbstractObjectFactoryNewMacro(vtkHardwareSelector);
+
+//----------------------------------------------------------------------------
 vtkCxxSetObjectMacro(vtkHardwareSelector, Renderer, vtkRenderer);
+
 //----------------------------------------------------------------------------
 vtkHardwareSelector::vtkHardwareSelector()
 {
@@ -365,16 +368,11 @@ void vtkHardwareSelector::BeginRenderProp()
     {
     return;
     }
-  // Ensure that blending/lighting/multisampling is off.
-  vtkPainterDeviceAdapter* device = this->Renderer->GetRenderWindow()->
-    GetPainterDeviceAdapter();
-  this->Internals->OriginalMultisample = device->QueryMultisampling();
-  this->Internals->OriginalLighting = device->QueryLighting();
-  this->Internals->OriginalBlending = device->QueryBlending();
 
-  device->MakeMultisampling(0);
-  device->MakeLighting(0);
-  device->MakeBlending(0);
+  // device specific prep
+  vtkRenderWindow *renWin = this->Renderer->GetRenderWindow();
+  this->BeginRenderProp(renWin);
+
   //cout << "In BeginRenderProp" << endl;
   //glFinish();
   if (this->CurrentPass == ACTOR_PASS)
@@ -390,7 +388,7 @@ void vtkHardwareSelector::BeginRenderProp()
     // Since 0 is reserved for nothing selected, we offset propid by 1.
     propid = propid + 1;
     vtkHardwareSelector::Convert(propid, color);
-    this->Renderer->GetRenderWindow()->GetPainterDeviceAdapter()->SendAttribute(
+    renWin->GetPainterDeviceAdapter()->SendAttribute(
       vtkDataSetAttributes::SCALARS, 3, VTK_FLOAT, color);
     }
   else if (this->CurrentPass == PROCESS_PASS)
@@ -398,13 +396,13 @@ void vtkHardwareSelector::BeginRenderProp()
     float color[3];
     // Since 0 is reserved for nothing selected, we offset propid by 1.
     vtkHardwareSelector::Convert(this->ProcessID + 1, color);
-    this->Renderer->GetRenderWindow()->GetPainterDeviceAdapter()->SendAttribute(
+    renWin->GetPainterDeviceAdapter()->SendAttribute(
       vtkDataSetAttributes::SCALARS, 3, VTK_FLOAT, color);
     }
   else
     {
     float color[3] = {0, 0, 0};
-    this->Renderer->GetRenderWindow()->GetPainterDeviceAdapter()->SendAttribute(
+    renWin->GetPainterDeviceAdapter()->SendAttribute(
       vtkDataSetAttributes::SCALARS, 3, VTK_FLOAT, color);
     }
 }
@@ -415,15 +413,15 @@ void vtkHardwareSelector::EndRenderProp()
   if (this->InPropRender)
     {
     this->InPropRender--;
+
     if (this->InPropRender != 0)
       {
       return;
       }
-    vtkPainterDeviceAdapter* device = this->Renderer->GetRenderWindow()->
-      GetPainterDeviceAdapter();
-    device->MakeMultisampling(this->Internals->OriginalMultisample);
-    device->MakeLighting(this->Internals->OriginalLighting);
-    device->MakeBlending(this->Internals->OriginalBlending);
+
+    // device specific cleanup
+    vtkRenderWindow *renWin = this->Renderer->GetRenderWindow();
+    this->EndRenderProp(renWin);
     }
 }
 
diff --git a/Rendering/Core/vtkHardwareSelector.h b/Rendering/Core/vtkHardwareSelector.h
index b7c4ef2..2e32890 100644
--- a/Rendering/Core/vtkHardwareSelector.h
+++ b/Rendering/Core/vtkHardwareSelector.h
@@ -62,6 +62,7 @@
 #include "vtkObject.h"
 
 class vtkRenderer;
+class vtkRenderWindow;
 class vtkSelection;
 class vtkProp;
 class vtkTextureObject;
@@ -94,7 +95,7 @@ public:
 
   // Description:
   // Get/Set the renderer to perform the selection on.
-  void SetRenderer(vtkRenderer*);
+  virtual void SetRenderer(vtkRenderer*);
   vtkGetObjectMacro(Renderer, vtkRenderer);
 
   // Description:
@@ -176,8 +177,8 @@ public:
   // Description:
   // Called by the mapper (vtkHardwareSelectionPolyDataPainter) before and after
   // rendering each prop.
-  void BeginRenderProp();
-  void EndRenderProp();
+  virtual void BeginRenderProp();
+  virtual void EndRenderProp();
 
   // Description:
   // Get/Set the process id. If process id < 0 (default -1), then the
@@ -233,11 +234,16 @@ protected:
   vtkHardwareSelector();
   ~vtkHardwareSelector();
 
+  // Called internally before and after each prop is rendered
+  // for device specific configuration/preparation etc.
+  virtual void BeginRenderProp(vtkRenderWindow *) = 0;
+  virtual void EndRenderProp(vtkRenderWindow *) = 0;
+
   static void Convert(int id, float tcoord[3])
     {
-      tcoord[0] = static_cast<float>((id & 0xff)/255.0);
-      tcoord[1] = static_cast<float>(((id & 0xff00) >> 8)/255.0);
-      tcoord[2] = static_cast<float>(((id & 0xff0000) >> 16)/255.0);
+    tcoord[0] = static_cast<float>((id & 0xff)/255.0);
+    tcoord[1] = static_cast<float>(((id & 0xff00) >> 8)/255.0);
+    tcoord[2] = static_cast<float>(((id & 0xff0000) >> 16)/255.0);
     }
 
   int Convert(unsigned long offset, unsigned char* pb)
@@ -246,7 +252,6 @@ protected:
       {
       return 0;
       }
-
     offset = offset * 3;
     unsigned char rgb[3];
     rgb[0] = pb[offset];
diff --git a/Rendering/Core/vtkImageActor.cxx b/Rendering/Core/vtkImageActor.cxx
index 9e7215d..5a06935 100644
--- a/Rendering/Core/vtkImageActor.cxx
+++ b/Rendering/Core/vtkImageActor.cxx
@@ -29,12 +29,12 @@ vtkStandardNewMacro(vtkImageActor);
 //----------------------------------------------------------------------------
 vtkImageActor::vtkImageActor()
 {
-  this->DisplayExtent[0] = -1;
-  this->DisplayExtent[1] = 0;
+  this->DisplayExtent[0] = 0;
+  this->DisplayExtent[1] = -1;
   this->DisplayExtent[2] = 0;
-  this->DisplayExtent[3] = 0;
+  this->DisplayExtent[3] = -1;
   this->DisplayExtent[4] = 0;
-  this->DisplayExtent[5] = 0;
+  this->DisplayExtent[5] = -1;
 
   vtkMath::UninitializeBounds(this->DisplayBounds);
 
@@ -208,7 +208,7 @@ void vtkImageActor::SetDisplayExtent(int extent[6])
     {
     if (this->Mapper && this->Mapper->IsA("vtkImageSliceMapper"))
       {
-      if (this->DisplayExtent[0] != -1)
+      if (this->DisplayExtent[0] <= this->DisplayExtent[1])
         {
         static_cast<vtkImageSliceMapper *>(this->Mapper)->CroppingOn();
         static_cast<vtkImageSliceMapper *>(this->Mapper)->
@@ -284,7 +284,7 @@ double *vtkImageActor::GetDisplayBounds()
   // if the display extent has not been set, use first slice
   extent[5] = extent[4];
 
-  if (this->DisplayExtent[0] != -1)
+  if (this->DisplayExtent[0] <= this->DisplayExtent[1])
     {
     extent[0] = this->DisplayExtent[0];
     extent[1] = this->DisplayExtent[1];
diff --git a/Rendering/Core/vtkInteractorEventRecorder.cxx b/Rendering/Core/vtkInteractorEventRecorder.cxx
index 0cf7b91..144d850 100644
--- a/Rendering/Core/vtkInteractorEventRecorder.cxx
+++ b/Rendering/Core/vtkInteractorEventRecorder.cxx
@@ -52,10 +52,7 @@ vtkInteractorEventRecorder::~vtkInteractorEventRecorder()
 {
   this->SetInteractor(0);
 
-  if ( this->FileName )
-    {
-    delete [] this->FileName;
-    }
+  delete [] this->FileName;
 
   if ( this->InputStream )
     {
@@ -64,17 +61,11 @@ vtkInteractorEventRecorder::~vtkInteractorEventRecorder()
     this->InputStream = NULL;
     }
 
-  if ( this->OutputStream )
-    {
-    delete this->OutputStream;
-    this->OutputStream = NULL;
-    }
+  delete this->OutputStream;
+  this->OutputStream = NULL;
 
-  if ( this->InputString )
-    {
-    delete [] this->InputString;
-    this->InputString = NULL;
-    }
+  delete [] this->InputString;
+  this->InputString = NULL;
 }
 
 //----------------------------------------------------------------------------
diff --git a/Rendering/Core/vtkInteractorObserver.h b/Rendering/Core/vtkInteractorObserver.h
index 3d40692..d524782 100644
--- a/Rendering/Core/vtkInteractorObserver.h
+++ b/Rendering/Core/vtkInteractorObserver.h
@@ -66,7 +66,7 @@ public:
   // observers; disabling it removes the observers. Prior to enabling the
   // vtkInteractorObserver you must set the render window interactor (via
   // SetInteractor()). Initial value is 0.
-  virtual void SetEnabled(int) {};
+  virtual void SetEnabled(int) {}
   int GetEnabled() {return this->Enabled;}
   void EnabledOn() {this->SetEnabled(1);}
   void EnabledOff() {this->SetEnabled(0);}
diff --git a/Rendering/Core/vtkInteractorStyle.cxx b/Rendering/Core/vtkInteractorStyle.cxx
index 89162fd..78200e1 100644
--- a/Rendering/Core/vtkInteractorStyle.cxx
+++ b/Rendering/Core/vtkInteractorStyle.cxx
@@ -60,8 +60,11 @@ vtkInteractorStyle::vtkInteractorStyle()
   this->OutlineActor        = NULL;
   this->OutlineMapper       = vtkPolyDataMapper::New();
 
-  this->OutlineMapper->SetInputConnection(
-    this->Outline->GetOutputPort());
+  if(this->OutlineMapper && this->Outline)
+    {
+    this->OutlineMapper->SetInputConnection(
+      this->Outline->GetOutputPort());
+    }
 
   this->PickedRenderer      = NULL;
   this->CurrentProp         = NULL;
diff --git a/Rendering/Core/vtkInteractorStyle.h b/Rendering/Core/vtkInteractorStyle.h
index 5c1d9fc..9c07613 100644
--- a/Rendering/Core/vtkInteractorStyle.h
+++ b/Rendering/Core/vtkInteractorStyle.h
@@ -189,15 +189,15 @@ public:
 
   // Description:
   // Generic event bindings can be overridden in subclasses
-  virtual void OnMouseMove() {};
-  virtual void OnLeftButtonDown() {};
-  virtual void OnLeftButtonUp() {};
-  virtual void OnMiddleButtonDown() {};
-  virtual void OnMiddleButtonUp() {};
-  virtual void OnRightButtonDown() {};
-  virtual void OnRightButtonUp() {};
-  virtual void OnMouseWheelForward() {};
-  virtual void OnMouseWheelBackward() {};
+  virtual void OnMouseMove() {}
+  virtual void OnLeftButtonDown() {}
+  virtual void OnLeftButtonUp() {}
+  virtual void OnMiddleButtonDown() {}
+  virtual void OnMiddleButtonUp() {}
+  virtual void OnRightButtonDown() {}
+  virtual void OnRightButtonUp() {}
+  virtual void OnMouseWheelForward() {}
+  virtual void OnMouseWheelBackward() {}
 
   // Description:
   // OnChar is triggered when an ASCII key is pressed. Some basic key presses
@@ -207,29 +207,29 @@ public:
   // OnKeyDown is triggered by pressing any key (identical to OnKeyPress()).
   // An empty implementation is provided. The behavior of this function should
   // be specified in the subclass.
-  virtual void OnKeyDown() {};
+  virtual void OnKeyDown() {}
 
   // OnKeyUp is triggered by releaseing any key (identical to OnKeyRelease()).
   // An empty implementation is provided. The behavior of this function should
   // be specified in the subclass.
-  virtual void OnKeyUp() {};
+  virtual void OnKeyUp() {}
 
   // OnKeyPress is triggered by pressing any key (identical to OnKeyDown()).
   // An empty implementation is provided. The behavior of this function should
   // be specified in the subclass.
-  virtual void OnKeyPress() {};
+  virtual void OnKeyPress() {}
 
   // OnKeyRelease is triggered by pressing any key (identical to OnKeyUp()).
   // An empty implementation is provided. The behavior of this function should
   // be specified in the subclass.
-  virtual void OnKeyRelease() {};
+  virtual void OnKeyRelease() {}
 
   // Description:
   // These are more esoteric events, but are useful in some cases.
-  virtual void OnExpose() {};
-  virtual void OnConfigure() {};
-  virtual void OnEnter() {};
-  virtual void OnLeave() {};
+  virtual void OnExpose() {}
+  virtual void OnConfigure() {}
+  virtual void OnEnter() {}
+  virtual void OnLeave() {}
 
   // Description:
   // OnTimer calls Rotate, Rotate etc which should be overridden by
@@ -241,12 +241,12 @@ public:
   // are overridden in subclasses to perform the correct motion. Since
   // they might be called from OnTimer, they do not have mouse coord parameters
   // (use interactor's GetEventPosition and GetLastEventPosition)
-  virtual void Rotate() {};
-  virtual void Spin() {};
-  virtual void Pan() {};
-  virtual void Dolly() {};
-  virtual void Zoom() {};
-  virtual void UniformScale() {};
+  virtual void Rotate() {}
+  virtual void Spin() {}
+  virtual void Pan() {}
+  virtual void Dolly() {}
+  virtual void Zoom() {}
+  virtual void UniformScale() {}
 
   // Description:
   // utility routines used by state changes
diff --git a/Rendering/Core/vtkLight.h b/Rendering/Core/vtkLight.h
index c9d620d..84f494a 100644
--- a/Rendering/Core/vtkLight.h
+++ b/Rendering/Core/vtkLight.h
@@ -76,7 +76,7 @@ public:
   // will load its data into the graphics system in response to this method
   // invocation. The actual loading is performed by a vtkLightDevice
   // subclass, which will get created automatically.
-  virtual void Render(vtkRenderer *, int) {};
+  virtual void Render(vtkRenderer *, int) {}
 
   // Description:
   // Set/Get the color of the light. It is possible to set the ambient,
@@ -90,7 +90,7 @@ public:
   vtkSetVector3Macro(SpecularColor,double);
   vtkGetVectorMacro(SpecularColor,double,3);
   void SetColor(double, double, double);
-  void SetColor(double a[3]) { this->SetColor(a[0], a[1], a[2]); }
+  void SetColor(const double a[3]) { this->SetColor(a[0], a[1], a[2]); }
 
   // Description:
   // Set/Get the position of the light.
@@ -100,7 +100,7 @@ public:
   // vtkGetTransformedPosition() instead of vtkGetPosition().
   vtkSetVector3Macro(Position,double);
   vtkGetVectorMacro(Position,double,3);
-  void SetPosition(float *a) {this->SetPosition(a[0],a[1],a[2]);};
+  void SetPosition(const float *a) {this->SetPosition(a[0],a[1],a[2]);};
 
   // Description:
   // Set/Get the point at which the light is shining.
@@ -110,7 +110,7 @@ public:
   // vtkGetTransformedFocalPoint() instead of vtkGetFocalPoint().
   vtkSetVector3Macro(FocalPoint,double);
   vtkGetVectorMacro(FocalPoint,double,3);
-  void SetFocalPoint(float *a) {this->SetFocalPoint(a[0],a[1],a[2]);};
+  void SetFocalPoint(const float *a) {this->SetFocalPoint(a[0],a[1],a[2]);};
 
   // Description:
   // Set/Get the brightness of the light (from one to zero).
@@ -176,7 +176,7 @@ public:
   // Angles are given in degrees.  If the light is a
   // positional light, it is made directional instead.
   void SetDirectionAngle(double elevation, double azimuth);
-  void SetDirectionAngle(double ang[2]) {
+  void SetDirectionAngle(const double ang[2]) {
     this->SetDirectionAngle(ang[0], ang[1]); };
 
   // Description:
diff --git a/Rendering/Core/vtkLightActor.cxx b/Rendering/Core/vtkLightActor.cxx
index 055662c..72eb844 100644
--- a/Rendering/Core/vtkLightActor.cxx
+++ b/Rendering/Core/vtkLightActor.cxx
@@ -276,8 +276,14 @@ void vtkLightActor::UpdateViewProps()
     }
   else
     {
-    this->ConeActor->SetMapper(0);
-    this->FrustumActor->SetCamera(0);
+    if(this->ConeActor)
+      {
+      this->ConeActor->SetMapper(0);
+      }
+    if(this->FrustumActor)
+      {
+      this->FrustumActor->SetCamera(0);
+      }
     vtkErrorMacro(<< "not a spotlight.");
     return;
     }
diff --git a/Rendering/Core/vtkLookupTableWithEnabling.cxx b/Rendering/Core/vtkLookupTableWithEnabling.cxx
index 7302c9f..42ec3cd 100644
--- a/Rendering/Core/vtkLookupTableWithEnabling.cxx
+++ b/Rendering/Core/vtkLookupTableWithEnabling.cxx
@@ -17,7 +17,7 @@
 #include "vtkObjectFactory.h"
 #include "vtkMath.h"
 #include "vtkVariant.h"
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkLookupTableWithEnabling);
 
diff --git a/Rendering/Core/vtkMaterialLibrary.cxx b/Rendering/Core/vtkMaterialLibrary.cxx
deleted file mode 100644
index cc29aa1..0000000
--- a/Rendering/Core/vtkMaterialLibrary.cxx
+++ /dev/null
@@ -1,74 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkMaterialLibrary.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#include "vtkMaterialLibrary.h"
-
-#include "vtkObjectFactory.h"
-#include "vtkMaterialLibraryMacro.h"
-
-#ifndef vtkMaterialLibraryMacro
-  #define vtkMaterialLibraryMacro(name) \
-    vtkGenericWarningMacro("VTK is not built with shading support." \
-      "No materials are available.");
-#endif
-
-vtkStandardNewMacro(vtkMaterialLibrary);
-//-----------------------------------------------------------------------------
-vtkMaterialLibrary::vtkMaterialLibrary()
-{
-}
-
-//-----------------------------------------------------------------------------
-vtkMaterialLibrary::~vtkMaterialLibrary()
-{
-}
-
-//-----------------------------------------------------------------------------
-char* vtkMaterialLibrary::GetMaterial(const char* name)
-{
-  if (!name || !*name)
-    {
-    return 0;
-    }
-
-  // CMake sets VTK_SHADER_CODE_LIBRARY_CHUNK to be the
-  // chunk of code that does name comparisons and
-  // call appropriate method from the vtk*ShaderLibrary.
-  vtkMaterialLibraryMacro(name)
-  return 0;
-}
-
-//-----------------------------------------------------------------------------
-const char** vtkMaterialLibrary::GetListOfMaterialNames()
-{
-  // defined in vtkMaterialLibraryMacro.h
-  return ::ListOfMaterialNames;
-}
-
-//-----------------------------------------------------------------------------
-unsigned int vtkMaterialLibrary::GetNumberOfMaterials()
-{
-  const char** names = vtkMaterialLibrary::GetListOfMaterialNames();
-  unsigned int cc ;
-  for (cc=0; names[cc]; cc++)
-    {
-    }
-  return cc;
-}
-
-//-----------------------------------------------------------------------------
-void vtkMaterialLibrary::PrintSelf(ostream& os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os, indent);
-}
diff --git a/Rendering/Core/vtkMaterialLibrary.h b/Rendering/Core/vtkMaterialLibrary.h
deleted file mode 100644
index 56b2f99..0000000
--- a/Rendering/Core/vtkMaterialLibrary.h
+++ /dev/null
@@ -1,61 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkMaterialLibrary.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-// .NAME vtkMaterialLibrary - Library for Materials
-// .SECTION Description
-// This class provides the Material XMLs.
-// .SECTION Thanks
-// Shader support in VTK includes key contributions by Gary Templet at
-// Sandia National Labs.
-#ifndef __vtkMaterialLibrary_h
-#define __vtkMaterialLibrary_h
-
-#include "vtkRenderingCoreModule.h" // For export macro
-#include "vtkObject.h"
-
-class VTKRENDERINGCORE_EXPORT vtkMaterialLibrary : public vtkObject
-{
-public:
-  static vtkMaterialLibrary* New();
-  vtkTypeMacro(vtkMaterialLibrary, vtkObject);
-  void PrintSelf(ostream& os, vtkIndent indent);
-
-  // Description:
-  // Obtain the code for the shader with given name.
-  // Note that Cg shader names are prefixed with CG and
-  // GLSL shader names are prefixed with GLSL.
-  // This method allocates memory. It's the responsibility
-  // of the caller to free this memory.
-  static char* GetMaterial(const char* name);
-
-  // Description:
-  // Returns an array of pointers to char strings that are
-  // the names of the materials provided by the library.
-  // The end of the array is marked by a null pointer.
-  static const char** GetListOfMaterialNames();
-
-  // Description:
-  // Returns the number of materials provided by the library.
-  static unsigned int GetNumberOfMaterials();
-protected:
-  vtkMaterialLibrary();
-  ~vtkMaterialLibrary();
-
-private:
-  vtkMaterialLibrary(const vtkMaterialLibrary&); // Not implemented.
-  void operator=(const vtkMaterialLibrary&); // Not implemented.
-};
-
-#endif
-
diff --git a/Rendering/Core/vtkPainterDeviceAdapter.h b/Rendering/Core/vtkPainterDeviceAdapter.h
index 18da12c..74d82ae 100644
--- a/Rendering/Core/vtkPainterDeviceAdapter.h
+++ b/Rendering/Core/vtkPainterDeviceAdapter.h
@@ -158,29 +158,43 @@ public:
   // vtkRenderer.
   virtual int Compatible(vtkRenderer *renderer) = 0;
 
+#ifndef VTK_LEGACY_REMOVE
   // Description:
+  // @deprecated code that needs access directly to OpenGL state should
+  // manage it locally.
   // Turns lighting on and off.
   virtual void MakeLighting(int mode) = 0;
 
   // Description:
+  // @deprecated code that needs access directly to OpenGL state should
+  // manage it locally.
   // Returns current lighting setting.
   virtual int QueryLighting() = 0;
 
   // Description:
+  // @deprecated code that needs access directly to OpenGL state should
+  // manage it locally.
   // Turns antialiasing on and off.
   virtual void MakeMultisampling(int mode) = 0;
 
   // Description:
+  // @deprecated code that needs access directly to OpenGL state should
+  // manage it locally.
   // Returns current antialiasing setting.
   virtual int QueryMultisampling() = 0;
 
   // Description:
+  // @deprecated code that needs access directly to OpenGL state should
+  // manage it locally.
   // Turns blending on and off.
   virtual void MakeBlending(int mode) = 0;
 
   // Description:
+  // @deprecated code that needs access directly to OpenGL state should
+  // manage it locally.
   // Returns current blending setting.
   virtual int QueryBlending() = 0;
+#endif
 
   // Description:
   // Turns emphasis of vertices on or off for vertex selection.
diff --git a/Rendering/Core/vtkPainterPolyDataMapper.cxx b/Rendering/Core/vtkPainterPolyDataMapper.cxx
index 2532956..fe64c26 100644
--- a/Rendering/Core/vtkPainterPolyDataMapper.cxx
+++ b/Rendering/Core/vtkPainterPolyDataMapper.cxx
@@ -373,6 +373,12 @@ void vtkPainterPolyDataMapper::RenderPiece(vtkRenderer* ren, vtkActor* act)
 //-------------------------------------------------------------------------
 void vtkPainterPolyDataMapper::ComputeBounds()
 {
+  if (!this->GetInput())
+    {
+    vtkMath::UninitializeBounds(this->Bounds);
+    return;
+    }
+
   this->GetInput()->GetBounds(this->Bounds);
 
   // if the mapper has a painter, update the bounds in the painter
diff --git a/Rendering/Core/vtkPointPicker.h b/Rendering/Core/vtkPointPicker.h
index 40c1896..8394bc9 100644
--- a/Rendering/Core/vtkPointPicker.h
+++ b/Rendering/Core/vtkPointPicker.h
@@ -46,7 +46,7 @@ public:
 
 protected:
   vtkPointPicker();
-  ~vtkPointPicker() {};
+  ~vtkPointPicker() {}
 
   vtkIdType PointId; //picked point
 
diff --git a/Rendering/Core/vtkPrimitivePainter.cxx b/Rendering/Core/vtkPrimitivePainter.cxx
index 7a607cd..f58ed1f 100644
--- a/Rendering/Core/vtkPrimitivePainter.cxx
+++ b/Rendering/Core/vtkPrimitivePainter.cxx
@@ -30,8 +30,6 @@
 #include "vtkProperty.h"
 #include "vtkRenderer.h"
 #include "vtkRenderWindow.h"
-#include "vtkShaderDeviceAdapter.h"
-#include "vtkShaderProgram.h"
 #include "vtkTimerLog.h"
 #include "vtkUnsignedCharArray.h"
 #include "vtkShaderDeviceAdapter2.h"
@@ -295,15 +293,10 @@ void vtkPrimitivePainter::RenderInternal(vtkRenderer* renderer,
     vtkErrorMacro("No actor");
     }
 
-  vtkShaderDeviceAdapter *shaderDevice = NULL;
   vtkShaderDeviceAdapter2 *shaderDevice2 = NULL;
 
   if (prop->GetShading())
     {
-    if (prop->GetShaderProgram())
-      {
-      shaderDevice = prop->GetShaderProgram()->GetShaderDeviceAdapter();
-      }
     shaderDevice2 = prop->GetShaderDeviceAdapter2();
     }
 
@@ -315,11 +308,6 @@ void vtkPrimitivePainter::RenderInternal(vtkRenderer* renderer,
         this->GetInformation()->Get(SHADER_DEVICE_ADAPTOR()));
     }
 
-  if (shaderDevice && this->GenericVertexAttributes)
-    {
-    idx |= VTK_PDM_GENERIC_VERTEX_ATTRIBUTES;
-    }
-
   if (shaderDevice2 && this->GenericVertexAttributes)
     {
     idx |= VTK_PDM_GENERIC_VERTEX_ATTRIBUTES;
diff --git a/Rendering/Core/vtkProp.cxx b/Rendering/Core/vtkProp.cxx
index 983918c..0823a79 100644
--- a/Rendering/Core/vtkProp.cxx
+++ b/Rendering/Core/vtkProp.cxx
@@ -19,7 +19,7 @@
 #include "vtkInformation.h"
 #include "vtkInformationIterator.h"
 #include "vtkInformationKey.h"
-#include <assert.h>
+#include <cassert>
 
 vtkCxxSetObjectMacro(vtkProp,PropertyKeys,vtkInformation);
 
@@ -53,10 +53,8 @@ vtkProp::~vtkProp()
     {
     this->Paths->Delete();
     }
-  if (this->Consumers)
-    {
-    delete [] this->Consumers;
-    }
+
+  delete [] this->Consumers;
 
   if(this->PropertyKeys!=0)
     {
diff --git a/Rendering/Core/vtkProp3D.h b/Rendering/Core/vtkProp3D.h
index 3261a71..1b5caf0 100644
--- a/Rendering/Core/vtkProp3D.h
+++ b/Rendering/Core/vtkProp3D.h
@@ -91,7 +91,7 @@ public:
         this->IsIdentity = 0;
         }
     };
-  virtual void SetOrigin(double pos[3])
+  virtual void SetOrigin(const double pos[3])
     { this->SetOrigin(pos[0], pos[1], pos[2]); }
   vtkGetVectorMacro(Origin, double, 3);
 
diff --git a/Rendering/Core/vtkProp3DCollection.h b/Rendering/Core/vtkProp3DCollection.h
index b2c00ae..ff0dbf8 100644
--- a/Rendering/Core/vtkProp3DCollection.h
+++ b/Rendering/Core/vtkProp3DCollection.h
@@ -58,8 +58,8 @@ public:
   //ETX
 
 protected:
-  vtkProp3DCollection() {};
-  ~vtkProp3DCollection() {};
+  vtkProp3DCollection() {}
+  ~vtkProp3DCollection() {}
 
 
 private:
diff --git a/Rendering/Core/vtkProp3DFollower.cxx b/Rendering/Core/vtkProp3DFollower.cxx
index 748ee61..6ec8dda 100644
--- a/Rendering/Core/vtkProp3DFollower.cxx
+++ b/Rendering/Core/vtkProp3DFollower.cxx
@@ -48,7 +48,7 @@ vtkProp3DFollower::~vtkProp3DFollower()
     this->Camera->UnRegister(this);
     }
 
-  if (this->Camera)
+  if (this->Device)
     {
     this->Device->Delete();
     }
@@ -230,12 +230,12 @@ int vtkProp3DFollower::RenderOpaqueGeometry(vtkViewport *vp)
     this->ComputeMatrix();
     this->Device->SetUserMatrix(this->Matrix);
 
-    return this->Device->RenderOpaqueGeometry(vp);
-    }
-  else
-    {
-    return 0;
+    if (this->GetVisibility())
+      {
+      return this->Device->RenderOpaqueGeometry(vp);
+      }
     }
+  return 0;
 }
 
 //-----------------------------------------------------------------------------
@@ -245,12 +245,12 @@ int vtkProp3DFollower::RenderTranslucentPolygonalGeometry(vtkViewport *vp)
     {
     this->ComputeMatrix();
     this->Device->SetUserMatrix(this->Matrix);
-    return this->Device->RenderTranslucentPolygonalGeometry(vp);
-    }
-  else
-    {
-    return 0;
+    if (this->GetVisibility())
+      {
+      return this->Device->RenderTranslucentPolygonalGeometry(vp);
+      }
     }
+  return 0;
 }
 
 //----------------------------------------------------------------------
@@ -260,12 +260,12 @@ int vtkProp3DFollower::RenderVolumetricGeometry(vtkViewport *vp)
     {
     this->ComputeMatrix();
     this->Device->SetUserMatrix(this->Matrix);
-    return this->Device->RenderVolumetricGeometry(vp);
-    }
-  else
-    {
-    return 0;
+    if (this->GetVisibility())
+      {
+      return this->Device->RenderVolumetricGeometry(vp);
+      }
     }
+  return 0;
 }
 
 //----------------------------------------------------------------------
diff --git a/Rendering/Core/vtkProp3DFollower.h b/Rendering/Core/vtkProp3DFollower.h
index 36868d6..bdddc83 100644
--- a/Rendering/Core/vtkProp3DFollower.h
+++ b/Rendering/Core/vtkProp3DFollower.h
@@ -26,7 +26,7 @@
 // degrees.
 
 // .SECTION see also
-// vtkFollower vtkProp3D vtkActor vtkCamera
+// vtkFollower vtkProp3D vtkCamera vtkProp3DAxisFollower
 
 #ifndef __vtkProp3DFollower_h
 #define __vtkProp3DFollower_h
@@ -103,12 +103,12 @@ protected:
   vtkCamera *Camera;
   vtkProp3D  *Device;
 
+  //Internal matrices to avoid New/Delete for performance reasons
+  vtkMatrix4x4 *InternalMatrix;
+
 private:
   vtkProp3DFollower(const vtkProp3DFollower&);  // Not implemented.
   void operator=(const vtkProp3DFollower&);  // Not implemented.
-
-  //Internal matrices to avoid New/Delete for performance reasons
-  vtkMatrix4x4 *InternalMatrix;
 };
 
 #endif
diff --git a/Rendering/Core/vtkPropCollection.h b/Rendering/Core/vtkPropCollection.h
index 7bf734b..6af3ab7 100644
--- a/Rendering/Core/vtkPropCollection.h
+++ b/Rendering/Core/vtkPropCollection.h
@@ -63,8 +63,8 @@ class VTKRENDERINGCORE_EXPORT vtkPropCollection : public vtkCollection
   //ETX
 
 protected:
-  vtkPropCollection() {};
-  ~vtkPropCollection() {};
+  vtkPropCollection() {}
+  ~vtkPropCollection() {}
 
 
 private:
diff --git a/Rendering/Core/vtkProperty.cxx b/Rendering/Core/vtkProperty.cxx
index 2ef71ea..a01318c 100644
--- a/Rendering/Core/vtkProperty.cxx
+++ b/Rendering/Core/vtkProperty.cxx
@@ -16,19 +16,11 @@
 
 #include "vtkActor.h"
 #include "vtkObjectFactory.h"
-#include "vtkImageData.h"
-#include "vtkImageReader2.h"
-#include "vtkImageReader2Factory.h"
 #include "vtkRenderer.h"
 #include "vtkRenderWindow.h"
-#include "vtkShaderProgram.h"
 #include "vtkSmartPointer.h"
 #include "vtkStdString.h"
 #include "vtkTexture.h"
-#include "vtkXMLDataElement.h"
-#include "vtkXMLMaterial.h"
-#include "vtkXMLMaterialParser.h"
-#include "vtkXMLShader.h"
 
 #include <stdlib.h>
 #include <vtksys/ios/sstream>
@@ -48,121 +40,11 @@ public:
   MapOfTextureNames TextureNames;
 };
 
-vtkCxxSetObjectMacro(vtkProperty, ShaderProgram, vtkShaderProgram);
 //----------------------------------------------------------------------------
 // Return NULL if no override is supplied.
 vtkAbstractObjectFactoryNewMacro(vtkProperty)
-//----------------------------------------------------------------------------
 
 // Construct object with object color, ambient color, diffuse color,
-
-enum IVarEnum {
-  IVarNone = 0,
-  IVarColor,
-  IVarAmbientColor,
-  IVarDiffuseColor,
-  IVarSpecularColor,
-  IVarEdgeColor,
-  IVarAmbient,
-  IVarDiffuse,
-  IVarSpecular,
-  IVarSpecularPower,
-  IVarOpacity,
-
-  IVarPointSize,
-  IVarLineWidth,
-
-  IVarLineStipplePattern,
-  IVarLineStippleRepeatFactor,
-  IVarInterpolation,
-  IVarRepresentation,
-  IVarEdgeVisibility,
-  IVarBackfaceCulling,
-  IVarFrontfaceCulling
-};
-
-static IVarEnum XMLMemberToIvar(const char* name)
-{
-  if (!strcmp(name, "Color"))
-    {
-    return IVarColor;
-    }
-  if (!strcmp(name, "AmbientColor"))
-    {
-    return IVarAmbientColor;
-    }
-  if (!strcmp(name, "DiffuseColor"))
-    {
-    return IVarDiffuseColor;
-    }
-  if (!strcmp(name, "SpecularColor"))
-    {
-    return IVarSpecularColor;
-    }
-  if (!strcmp(name, "EdgeColor"))
-    {
-    return IVarEdgeColor;
-    }
-  if (!strcmp(name, "Ambient"))
-    {
-    return IVarAmbient;
-    }
-  if (!strcmp(name, "Diffuse"))
-    {
-    return IVarDiffuse;
-    }
-  if (!strcmp(name, "Specular"))
-    {
-    return IVarSpecular;
-    }
-  if (!strcmp(name, "SpecularPower"))
-    {
-    return IVarSpecularPower;
-    }
-  if (!strcmp(name, "Opacity"))
-    {
-    return IVarOpacity;
-    }
-  if (!strcmp(name, "PointSize"))
-    {
-    return IVarPointSize;
-    }
-  if (!strcmp(name, "LineWidth"))
-    {
-    return IVarLineWidth;
-    }
-  if (!strcmp(name, "LineStipplePattern"))
-    {
-    return IVarLineStipplePattern;
-    }
-  if (!strcmp(name, "LineStippleRepeatFactor"))
-    {
-    return IVarLineStippleRepeatFactor;
-    }
-  if (!strcmp(name, "Interpolation"))
-    {
-    return IVarInterpolation;
-    }
-  if (!strcmp(name, "Representation"))
-    {
-    return IVarRepresentation;
-    }
-  if (!strcmp(name, "EdgeVisibility"))
-    {
-    return IVarEdgeVisibility;
-    }
-  if (!strcmp(name, "BackfaceCulling"))
-    {
-    return IVarBackfaceCulling;
-    }
-  if (!strcmp(name, "FrontfaceCulling"))
-    {
-    return IVarFrontfaceCulling;
-    }
-  return IVarNone;
-}
-
-
 // specular color, and edge color white; ambient coefficient=0; diffuse
 // coefficient=0; specular coefficient=0; specular power=1; Gouraud shading;
 // and surface representation. Backface and frontface culling are off.
@@ -206,8 +88,6 @@ vtkProperty::vtkProperty()
   this->Lighting = true;
 
   this->Shading = 0;
-  this->ShaderProgram = 0;
-  this->Material = 0;
   this->MaterialName = 0;
   this->Internals = new vtkPropertyInternals;
 }
@@ -215,12 +95,6 @@ vtkProperty::vtkProperty()
 //----------------------------------------------------------------------------
 vtkProperty::~vtkProperty()
 {
-  if (this->Material)
-    {
-    this->Material->UnRegister(this);
-    this->Material = 0;
-    }
-  this->SetShaderProgram(0);
   this->SetMaterialName(0);
   delete this->Internals;
 }
@@ -251,7 +125,6 @@ void vtkProperty::DeepCopy(vtkProperty *p)
     this->SetLineStipplePattern(p->GetLineStipplePattern());
     this->SetLineStippleRepeatFactor(p->GetLineStippleRepeatFactor());
     this->SetShading(p->GetShading());
-    this->LoadMaterial(p->GetMaterial());
 
     this->RemoveAllTextures();
     vtkPropertyInternals::MapOfTextures::iterator iter =
@@ -486,407 +359,7 @@ int vtkProperty::GetTextureUnit(const char* name)
 }
 
 //----------------------------------------------------------------------------
-void vtkProperty::LoadMaterial(const char* name)
-{
-  this->SetMaterialName(0);
-  if( !name || strlen(name) == 0)
-    {
-    this->LoadMaterial(static_cast<vtkXMLMaterial*>(0));
-    return;
-    }
-
-  // vtkXMLMaterial::CreateInstance using library/absolute path/repository
-  // in that order.
-  vtkXMLMaterial* material = vtkXMLMaterial::CreateInstance(name);
-  if (!material)
-    {
-    vtkErrorMacro("Failed to create Material : " << name);
-    return;
-    }
-  this->LoadMaterial(material);
-  material->Delete();
-  return;
-}
-
-//----------------------------------------------------------------------------
-void vtkProperty::LoadMaterialFromString(const char* materialxml)
-{
-  this->SetMaterialName(0);
-  if (!materialxml)
-    {
-    this->LoadMaterial(static_cast<vtkXMLMaterial*>(0));
-    return;
-    }
-  vtkXMLMaterialParser* parser = vtkXMLMaterialParser::New();
-  vtkXMLMaterial* material = vtkXMLMaterial::New();
-  parser->SetMaterial(material);
-  parser->Parse(materialxml);
-  parser->Delete();
-  this->LoadMaterial(material);
-  material->Delete();
-}
-
-// ----------------------------------------------------------------------------
-// Description:
-// Read this->Material from new style shaders.
-// Default implementation is empty.
-void vtkProperty::ReadFrameworkMaterial()
-{
-  // empty. See vtkOpenGLProperty.
-}
-
-//----------------------------------------------------------------------------
-void vtkProperty::LoadMaterial(vtkXMLMaterial* material)
-{
-  this->SetMaterialName(0);
-  vtkSetObjectBodyMacro(Material, vtkXMLMaterial, material);
-  if (this->Material)
-    {
-    this->SetMaterialName(this->Material->GetRootElement()->GetAttribute("name"));
-    this->LoadProperty();
-    this->LoadTextures();
-    int lang = this->Material->GetShaderLanguage();
-    int style = this->Material->GetShaderStyle();
-
-    if (style == 2) // TODO: use a constant instead of a literal
-      {
-      if (lang == vtkXMLShader::LANGUAGE_GLSL)
-        {
-        // ready-for-multipass
-        this->ReadFrameworkMaterial();
-//        vtkShader2Collection *shaders=vtkShader2Collection::New();
-//        this->SetShaderCollection(shaders);
-//        shaders->Delete();
-        }
-      else
-        {
-        vtkErrorMacro(<<"style 2 is only supported with GLSL. Failed to setup the shader.");
-        this->SetShaderProgram(0); // failed to create shaders.
-        }
-      }
-    else
-      {
-      vtkShaderProgram* shader = vtkShaderProgram::CreateShaderProgram(lang);
-      if (shader)
-        {
-        this->SetShaderProgram(shader);
-        shader->Delete();
-        this->ShaderProgram->SetMaterial(this->Material);
-        this->ShaderProgram->ReadMaterial();
-        }
-      // Some materials may have no shaders and only set ivars for vtkProperty.
-      else if ((material->GetNumberOfVertexShaders() != 0) ||
-               (material->GetNumberOfFragmentShaders() != 0))
-        {
-        vtkErrorMacro("Failed to setup the shader.");
-        this->SetShaderProgram(0); // failed to create shaders.
-        }
-      }
-    }
-  else
-    {
-    this->SetShaderProgram(0);
-    }
-}
-
-//----------------------------------------------------------------------------
-void vtkProperty::LoadProperty()
-{
-  vtkXMLDataElement* elem = this->Material->GetProperty();
-  if (elem == NULL )
-    {
-    return;
-    }
-
-  int numNested = elem->GetNumberOfNestedElements();
-
-  // Each element is a child node of <Property />
-  for (int iElem = 0; iElem < numNested; iElem++)
-    {
-    vtkXMLDataElement* currElement = elem->GetNestedElement(iElem);
-    const char* tagname = currElement->GetName();
-
-    if (!strcmp(tagname, "Member"))
-      {
-      this->LoadMember(currElement);
-      }
-    else
-      {
-      vtkErrorMacro("Unknown tag name '" << tagname << "'");
-      }
-    }
-}
-
-//----------------------------------------------------------------------------
-void vtkProperty::LoadTextures()
-{
-  int numTextures = this->Material->GetNumberOfTextures();
-  for (int i = 0; i < numTextures; i++)
-    {
-    this->LoadTexture(this->Material->GetTexture(i));
-    }
-}
-
-//----------------------------------------------------------------------------
-void vtkProperty::LoadMember(vtkXMLDataElement* elem)
-{
-  const char* name = elem->GetAttribute("name");
-  if (!name)
-    {
-    vtkErrorMacro("Element missing required attribute 'name'");
-    return;
-    }
-
-  if (!elem->GetAttribute("value"))
-    {
-    vtkErrorMacro("Element with name=" << name << " missing required attribute "
-      "'value'");
-    return;
-    }
-  int number_of_elements;
-  int* pint = 0;
-  double* pdouble = 0;
-  float* pfloat = 0;
-  int success = 0;
-
-  IVarEnum member = XMLMemberToIvar( name );
-
-  // Sort to find the correct number of ivar values
-  if ( member == IVarColor ||
-       member == IVarAmbientColor||
-       member == IVarDiffuseColor||
-       member == IVarSpecularColor||
-       member == IVarEdgeColor )
-    {
-    number_of_elements = 3;
-    }
-  else if ( member == IVarAmbient ||
-            member == IVarDiffuse ||
-            member == IVarSpecular ||
-            member == IVarSpecularPower ||
-            member == IVarSpecularColor||
-            member == IVarOpacity ||
-            member == IVarPointSize ||
-            member == IVarLineWidth ||
-            member == IVarLineStipplePattern ||
-            member == IVarLineStippleRepeatFactor ||
-            member == IVarInterpolation ||
-            member == IVarRepresentation ||
-            member == IVarEdgeVisibility ||
-            member == IVarBackfaceCulling ||
-            member == IVarFrontfaceCulling )
-    {
-    number_of_elements = 1;
-    }
-  else
-    {
-    vtkErrorMacro("Invalid name='" << name);
-    return;
-    }
-
-
-
-  if ( (member == IVarColor) ||
-       (member == IVarAmbientColor) ||
-       (member == IVarDiffuseColor) ||
-       (member == IVarSpecularColor) ||
-       (member == IVarEdgeColor) ||
-       (member == IVarAmbient) ||
-       (member == IVarDiffuse) ||
-       (member == IVarSpecular) ||
-       (member == IVarSpecularPower) ||
-       (member == IVarOpacity) )
-    {
-    pdouble = new double[number_of_elements];
-    success = elem->GetVectorAttribute("value", number_of_elements, pdouble);
-    }
-  else if( (member == IVarPointSize) ||
-           (member == IVarLineWidth) )
-    {
-    pfloat = new float[number_of_elements];
-    success = elem->GetVectorAttribute("value", number_of_elements, pfloat);
-    }
-  else if ( (member == IVarLineStipplePattern) ||
-            (member == IVarLineStippleRepeatFactor) ||
-            (member == IVarInterpolation) ||
-            (member == IVarRepresentation) ||
-            (member == IVarEdgeVisibility) ||
-            (member == IVarBackfaceCulling) ||
-            (member == IVarFrontfaceCulling) )
-    {
-    pint = new int[number_of_elements];
-    success = elem->GetVectorAttribute( "value", number_of_elements, pint);
-    }
-  else
-    {
-    vtkErrorMacro("Invalid name='" << name);
-    return;
-    }
-
-  if (!success)
-    {
-    vtkErrorMacro("Error reading 'value' for name=" << name);
-    delete []pdouble;
-    delete []pfloat;
-    delete []pint;
-    return;
-    }
-
-  if (pdouble)
-    {
-    if (member == IVarColor)
-      {
-      this->SetColor(pdouble);
-      }
-    else if (member == IVarAmbientColor)
-      {
-      this->SetAmbientColor(pdouble);
-      }
-    else if (member == IVarDiffuseColor)
-      {
-      this->SetDiffuseColor(pdouble);
-      }
-    else if (member == IVarSpecularColor)
-      {
-      this->SetSpecularColor(pdouble);
-      }
-    else if (member == IVarEdgeColor)
-      {
-      this->SetEdgeColor(pdouble);
-      }
-    else if (member == IVarAmbient)
-      {
-      this->SetAmbient(*pdouble);
-      }
-    else if (member == IVarDiffuse)
-      {
-      this->SetDiffuse(*pdouble);
-      }
-    else if (member == IVarSpecular)
-      {
-      this->SetSpecular(*pdouble);
-      }
-    else if (member == IVarSpecularPower)
-      {
-      this->SetSpecularPower(*pdouble);
-      }
-    else if (member == IVarOpacity)
-      {
-      this->SetOpacity(*pdouble);
-      }
-    }
-  else if (pfloat)
-    {
-    if (member == IVarPointSize)
-      {
-      this->SetPointSize(*pfloat);
-      }
-    else if (member == IVarLineWidth)
-      {
-      this->SetLineWidth(*pfloat);
-      }
-    }
-  else if (pint)
-    {
-    if (member == IVarLineStipplePattern)
-      {
-      this->SetLineStipplePattern(*pint);
-      }
-    else if (member == IVarLineStippleRepeatFactor)
-      {
-      this->SetLineStippleRepeatFactor(*pint);
-      }
-    else if (member == IVarInterpolation)
-      {
-      this->SetInterpolation(*pint);
-      }
-    else if (member == IVarRepresentation)
-      {
-      this->SetRepresentation(*pint);
-      }
-    else if (member == IVarEdgeVisibility)
-      {
-      this->SetEdgeVisibility(*pint);
-      }
-    else if (member == IVarBackfaceCulling)
-      {
-      this->SetBackfaceCulling(*pint);
-      }
-    else if (member == IVarFrontfaceCulling)
-      {
-      this->SetFrontfaceCulling(*pint);
-      }
-    }
-
-  delete [] pdouble;
-  delete [] pfloat;
-  delete [] pint;
-}
-
-//----------------------------------------------------------------------------
-void vtkProperty::LoadTexture(vtkXMLDataElement* elem )
-{
-  const char* name = elem->GetAttribute("name");
-  if (!name)
-    {
-    vtkErrorMacro("Missing required attribute 'name'");
-    return;
-    }
-
-  const char* type = elem->GetAttribute("type");
-  if (!type)
-    {
-    vtkErrorMacro("Missing required attribute 'type' "
-      "for element with name=" << name);
-    return;
-    }
-
-  const char* location = elem->GetAttribute("location");
-  if (!location)
-    {
-    vtkErrorMacro("Missing required attribute 'location'"
-      "for element with name=" << name);
-    return;
-    }
-
-  char* filename = vtkXMLShader::LocateFile(location);
-
-  vtkImageReader2* reader =
-    vtkImageReader2Factory::CreateImageReader2(filename);
-
-  if (!reader)
-    {
-    vtkErrorMacro("Invalid format for element with name="
-      << name);
-    return;
-    }
-
-  if (filename)
-    {
-    reader->SetFileName(filename);
-    vtkTexture* t = vtkTexture::New();
-    t->SetInputConnection(reader->GetOutputPort());
-    t->InterpolateOn();
-    this->SetTexture(name, t);
-    t->Delete();
-    }
-  else
-    {
-    vtkErrorMacro("Failed to locate texture file " << location);
-    }
-
-  reader->Delete();
-  delete [] filename;
-}
-
-//----------------------------------------------------------------------------
-void vtkProperty::LoadPerlineNoise(vtkXMLDataElement* )
-{
-  vtkWarningMacro("Perlin Noise support not complete yet!");
-}
-
-//----------------------------------------------------------------------------
-void vtkProperty::Render(vtkActor* actor, vtkRenderer* renderer)
+void vtkProperty::Render(vtkActor*, vtkRenderer* renderer)
 {
   // subclass would have renderer the property already.
   // this class, just handles the shading.
@@ -896,68 +369,36 @@ void vtkProperty::Render(vtkActor* actor, vtkRenderer* renderer)
     // nothing to do when rendering for hardware selection.
     return;
     }
-
-  if (this->ShaderProgram && this->GetShading())
-    {
-    vtkDebugMacro("Attempting to use Shaders");
-
-    this->ShaderProgram->Render(actor, renderer);
-    }
 }
 
 //----------------------------------------------------------------------------
-void vtkProperty::PostRender(vtkActor* actor, vtkRenderer* renderer)
+void vtkProperty::PostRender(vtkActor*, vtkRenderer* renderer)
 {
   if (renderer->GetSelector())
     {
     // nothing to do when rendering for hardware selection.
     return;
     }
-
-  if (this->ShaderProgram && this->Shading)
-    {
-    this->ShaderProgram->PostRender(actor, renderer);
-    }
 }
 
 //----------------------------------------------------------------------------
-void vtkProperty::AddShaderVariable(const char* name, int numVars, int* x)
+void vtkProperty::AddShaderVariable(const char*, int, int*)
 {
-  if (!this->ShaderProgram)
-    {
-    return;
-    }
-  this->ShaderProgram->AddShaderVariable(name, numVars, x);
 }
 
 //----------------------------------------------------------------------------
-void vtkProperty::AddShaderVariable(const char* name, int numVars, float* x)
+void vtkProperty::AddShaderVariable(const char*, int, float*)
 {
-  if (!this->ShaderProgram)
-    {
-    return;
-    }
-  this->ShaderProgram->AddShaderVariable(name, numVars, x);
 }
 
 //----------------------------------------------------------------------------
-void vtkProperty::AddShaderVariable(const char* name, int numVars, double* x)
+void vtkProperty::AddShaderVariable(const char*, int, double*)
 {
-  if (!this->ShaderProgram)
-    {
-    return;
-    }
-  this->ShaderProgram->AddShaderVariable(name, numVars, x);
 }
 
 //-----------------------------------------------------------------------------
-void vtkProperty::ReleaseGraphicsResources(vtkWindow *win)
+void vtkProperty::ReleaseGraphicsResources(vtkWindow *)
 {
-  if (this->ShaderProgram)
-    {
-    this->ShaderProgram->ReleaseGraphicsResources(win);
-    }
-
   // vtkOpenGLRenderer releases texture resources, so we don't need to release
   // them here.
 }
@@ -1019,27 +460,6 @@ void vtkProperty::PrintSelf(ostream& os, vtkIndent indent)
   os << indent << "Shading: "
     << (this->Shading? "On" : "Off") << endl;
 
-  os << indent << "Material: " ;
-  if (this->Material)
-    {
-    os << endl;
-    this->Material->PrintSelf(os, indent.GetNextIndent());
-    }
-  else
-    {
-    os << "(none)" << endl;
-    }
  os << indent << "MaterialName: " <<
    (this->MaterialName? this->MaterialName:"(none)") << endl;
-
-  os << indent << "ShaderProgram: ";
-  if (this->ShaderProgram)
-    {
-    os << endl;
-    this->ShaderProgram->PrintSelf(os, indent.GetNextIndent());
-    }
-  else
-    {
-    os << "(none)" << endl;
-    }
 }
diff --git a/Rendering/Core/vtkProperty.h b/Rendering/Core/vtkProperty.h
index 0542a7b..68e45ed 100644
--- a/Rendering/Core/vtkProperty.h
+++ b/Rendering/Core/vtkProperty.h
@@ -232,28 +232,10 @@ public:
   vtkBooleanMacro(FrontfaceCulling, int);
 
   // Description:
-  // Get the material representation used for shading. The material will be used
-  // only when shading is enabled.
-  vtkGetObjectMacro(Material, vtkXMLMaterial);
-
-  // Description:
   // Returns the name of the material currently loaded, if any.
   vtkGetStringMacro(MaterialName);
 
   // Description:
-  // Load the material. The material can be the name of a
-  // built-on material or the filename for a VTK material XML description.
-  void LoadMaterial(const char* name);
-
-  // Description:
-  // Load the material given the contents of the material file.
-  void LoadMaterialFromString(const char* materialxml);
-
-  // Description:
-  // Load the material given the material representation.
-  void LoadMaterial(vtkXMLMaterial*);
-
-  // Description:
   // Enable/Disable shading. When shading is enabled, the
   // Material must be set.
   vtkSetMacro(Shading, int);
@@ -261,11 +243,6 @@ public:
   vtkBooleanMacro(Shading, int);
 
   // Description:
-  // Get the Shader program. If Material is not set/or not loaded properly,
-  // this will return null.
-  vtkGetObjectMacro(ShaderProgram, vtkShaderProgram);
-
-  // Description:
   // Get the vtkShaderDeviceAdapter2 if set, returns null otherwise.
   virtual vtkShaderDeviceAdapter2* GetShaderDeviceAdapter2()
     { return NULL; }
@@ -384,14 +361,6 @@ protected:
     double diffuse, const double diffuse_color[3],
     double specular, const double specular_color[3]);
 
-  // Description:
-  // Load property iVar values from the Material XML.
-  void LoadProperty();
-  void LoadTextures();
-  void LoadTexture(vtkXMLDataElement*);
-  void LoadPerlineNoise(vtkXMLDataElement*);
-  void LoadMember(vtkXMLDataElement*);
-
   double Color[3];
   double AmbientColor[3];
   double DiffuseColor[3];
@@ -418,21 +387,7 @@ protected:
   char* MaterialName;
   vtkSetStringMacro(MaterialName);
 
-  vtkShaderProgram* ShaderProgram;
-  void SetShaderProgram(vtkShaderProgram*);
-
-  vtkXMLMaterial* Material; // TODO: I wonder if this reference needs to be maintained.
-
-  // Description:
-  // Read this->Material from new style shaders.
-  // Default implementation is empty.
-  virtual void ReadFrameworkMaterial();
-
 //BTX
-  // These friends are provided only for the time being
-  // till we device a graceful way of loading texturing for GLSL.
-  friend class vtkGLSLShaderProgram;
-  friend class vtkShader;
   // FIXME:
   // Don't use these methods. They will be removed. They are provided only
   // for the time-being.
diff --git a/Rendering/Core/vtkRenderWindow.cxx b/Rendering/Core/vtkRenderWindow.cxx
index ed8aa2a..3751354 100644
--- a/Rendering/Core/vtkRenderWindow.cxx
+++ b/Rendering/Core/vtkRenderWindow.cxx
@@ -80,7 +80,9 @@ vtkRenderWindow::vtkRenderWindow()
   this->AnaglyphColorMask[0] = 4;  // red
   this->AnaglyphColorMask[1] = 3;  // cyan
   this->PainterDeviceAdapter = vtkPainterDeviceAdapter::New();
-  this->ReportGraphicErrors=0; // false
+#ifndef VTK_LEGACY_REMOVE
+  this->ReportGraphicErrors = 0; // false
+#endif
   this->AbortCheckTime = 0.0;
   this->CapturingGL2PSSpecialProps = 0;
 
@@ -94,24 +96,16 @@ vtkRenderWindow::~vtkRenderWindow()
 {
   this->SetInteractor(NULL);
 
-  if (this->AccumulationBuffer)
-    {
-    delete [] this->AccumulationBuffer;
-    this->AccumulationBuffer = NULL;
-    this->AccumulationBufferSize = 0;
-    }
-  if (this->ResultFrame)
-    {
-    delete [] this->ResultFrame;
-    this->ResultFrame = NULL;
-    }
+  delete [] this->AccumulationBuffer;
+  this->AccumulationBuffer = NULL;
+  this->AccumulationBufferSize = 0;
+
+  delete [] this->ResultFrame;
+  this->ResultFrame = NULL;
 
   for (int i = 0; i < 2; ++i)
     {
-    if (this->ConstantFDOffsets[i])
-      {
-      delete [] this->ConstantFDOffsets[i];
-      }
+    delete [] this->ConstantFDOffsets[i];
     this->ConstantFDOffsets[i] = NULL;
     }
 
@@ -178,11 +172,9 @@ void vtkRenderWindow::SetFDFrames(int fdFrames)
 
     for (int i = 0; i < 2; i++)
       {
-      if (this->ConstantFDOffsets[i])
-        {
-        delete [] this->ConstantFDOffsets[i];
-        }
+      delete [] this->ConstantFDOffsets[i];
       this->ConstantFDOffsets[i] = NULL;
+
       if (this->FDFrames > 0)
         {
         this->ConstantFDOffsets[i] = new double[this->FDFrames];
@@ -473,11 +465,8 @@ void vtkRenderWindow::Render()
     this->CopyResultFrame();
     }
 
-  if (this->ResultFrame)
-    {
-    delete [] this->ResultFrame;
-    this->ResultFrame = NULL;
-    }
+  delete [] this->ResultFrame;
+  this->ResultFrame = NULL;
 
   this->InRender = 0;
   this->InvokeEvent(vtkCommand::EndEvent,NULL);
@@ -912,11 +901,55 @@ void vtkRenderWindow::PrintSelf(ostream& os, vtkIndent indent)
   os << indent << "MultiSamples: " << this->MultiSamples << "\n";
   os << indent << "StencilCapable: " <<
     (this->StencilCapable ? "True" : "False") << endl;
+#ifndef VTK_LEGACY_REMOVE
   os << indent << "ReportGraphicErrors: "
      << (this->ReportGraphicErrors ? "On" : "Off")<< "\n";
+#endif
+}
+
+#ifndef VTK_LEGACY_REMOVE
+//----------------------------------------------------------------------------
+void vtkRenderWindow::SetReportGraphicErrors(int val)
+{
+  VTK_LEGACY_BODY(vtkRenderWindow::SetReportGraphicsErrors, "VTK 6.1");
+  if (this->ReportGraphicErrors != val)
+    {
+    this->ReportGraphicErrors = val;
+    this->Modified();
+    }
+}
+
+//----------------------------------------------------------------------------
+void vtkRenderWindow::SetReportGraphicErrorsOn()
+{
+  VTK_LEGACY_BODY(vtkRenderWindow::SetReportGraphicsErrorsOn, "VTK 6.1");
+  if (this->ReportGraphicErrors == 0)
+    {
+    this->ReportGraphicErrors = 1;
+    this->Modified();
+    }
 }
 
 //----------------------------------------------------------------------------
+void vtkRenderWindow::SetReportGraphicErrorsOff()
+{
+  VTK_LEGACY_BODY(vtkRenderWindow::GetReportGraphicsErrorsOff, "VTK 6.1");
+  if (this->ReportGraphicErrors != 0)
+    {
+    this->ReportGraphicErrors = 0;
+    this->Modified();
+    }
+}
+
+//----------------------------------------------------------------------------
+int vtkRenderWindow::GetReportGraphicErrors()
+{
+  VTK_LEGACY_BODY(vtkRenderWindow::GetReportGraphicsErrors, "VTK 6.1");
+  return this->ReportGraphicErrors;
+}
+#endif
+
+//----------------------------------------------------------------------------
 // Update the system, if needed, due to stereo rendering. For some stereo
 // methods, subclasses might need to switch some hardware settings here.
 void vtkRenderWindow::StereoUpdate(void)
diff --git a/Rendering/Core/vtkRenderWindow.h b/Rendering/Core/vtkRenderWindow.h
index 80946b7..1aa92bd 100644
--- a/Rendering/Core/vtkRenderWindow.h
+++ b/Rendering/Core/vtkRenderWindow.h
@@ -73,26 +73,23 @@ class vtkUnsignedCharArray;
 #define VTK_CURSOR_HAND      9
 #define VTK_CURSOR_CROSSHAIR 10
 
-// Description:
-// This macro is used to print error message coming from the graphic library
-// (OpenGL for instance) used to actually implement the rendering algorithms.
-// It is only active in debug mode and has no cost in release mode.
-// In debug mode, if reports errors only if flag ReportGraphicError is true
-// on the render window (initial value is false).
-// Signature is:
-// void vtkGraphicErrorMacro(vtkRenderWindow *renderWindow,const char *message)
-#ifdef NDEBUG
-# define vtkGraphicErrorMacro(renderWindow,message)
+#ifndef VTK_LEGACY_REMOVE
+// This macro should not be used, see vtkOpenGLError.h for
+// GL error handling functions and macros.
+#if defined NDEBUG
+# define vtkGraphicErrorMacro(renderWindow,message)   \
+  renderWindow->CheckGraphicError();
 #else
-# define vtkGraphicErrorMacro(renderWindow,message)                     \
-  if(renderWindow->GetReportGraphicErrors())                            \
-    {                                                                   \
-    renderWindow->CheckGraphicError();                                  \
-    if(renderWindow->HasGraphicError())                                 \
-      {                                                                 \
-      vtkErrorMacro(<<message<<" "<<renderWindow->GetLastGraphicErrorString()); \
-      }                                                                 \
+# define vtkGraphicErrorMacro(renderWindow,message)   \
+  renderWindow->CheckGraphicError();                  \
+  if ( renderWindow->GetReportGraphicErrors()         \
+    && renderWindow->HasGraphicError() )              \
+    {                                                 \
+    vtkErrorMacro(                                    \
+      << message << " "                               \
+      << renderWindow->GetLastGraphicErrorString());  \
     }
+# endif
 #endif
 
 class VTKRENDERINGCORE_EXPORT vtkRenderWindow : public vtkWindow
@@ -180,7 +177,7 @@ public:
   // corner).
   virtual void HideCursor() = 0;
   virtual void ShowCursor() = 0;
-  virtual void SetCursorPosition(int , int ) {};
+  virtual void SetCursorPosition(int , int ) {}
 
   // Description:
   // Change the shape of the cursor.
@@ -518,10 +515,17 @@ public:
   virtual bool IsCurrent()=0;
 
   // Description:
+  // Test if the window has a valid drawable. This is
+  // currently only an issue on Mac OS X Cocoa where rendering
+  // to an invalid drawable results in all OpenGL calls to fail
+  // with "invalid framebuffer operation".
+  virtual bool IsDrawable(){ return true; }
+
+  // Description:
   // If called, allow MakeCurrent() to skip cache-check when called.
   // MakeCurrent() reverts to original behavior of cache-checking
   // on the next render.
-  virtual void SetForceMakeCurrent() {};
+  virtual void SetForceMakeCurrent() {}
 
   // Description:
   // Get report of capabilities for the render window
@@ -562,25 +566,22 @@ public:
   vtkBooleanMacro(StencilCapable, int);
 
   // Description:
-  // Turn on/off report of graphic errors. Initial value is false (off).
-  // This flag is used by vtkGraphicErrorMacro.
-  vtkSetMacro(ReportGraphicErrors,int);
-  vtkGetMacro(ReportGraphicErrors,int);
-  vtkBooleanMacro(ReportGraphicErrors,int);
-
-  // Description:
-  // Update graphic error status, regardless of ReportGraphicErrors flag.
-  // It means this method can be used in any context and is not restricted to
-  // debug mode.
-  virtual void CheckGraphicError()=0;
+  // @deprecated Replaced by
+  // the CMakeLists variable VTK_REPORT_OPENGL_ERRORS
+  // error reporting is enabled/disabled at compile time
+  VTK_LEGACY(void SetReportGraphicErrors(int val));
+  VTK_LEGACY(void SetReportGraphicErrorsOn());
+  VTK_LEGACY(void SetReportGraphicErrorsOff());
+  VTK_LEGACY(int GetReportGraphicErrors());
 
+#ifndef VTK_LEGACY_REMOVE
   // Description:
-  // Return the last graphic error status. Initial value is false.
-  virtual int HasGraphicError()=0;
-
-  // Description:
-  // Return a string matching the last graphic error status.
-  virtual const char *GetLastGraphicErrorString()=0;
+  // @deprecated Replaced by
+  // vtkOpenGLCheckErrorMacro
+  virtual void CheckGraphicError() = 0;
+  virtual int HasGraphicError() = 0;
+  virtual const char *GetLastGraphicErrorString() = 0;
+#endif
 
 protected:
   vtkRenderWindow();
@@ -629,10 +630,13 @@ protected:
   int StencilCapable;
   int CapturingGL2PSSpecialProps;
 
+#ifndef VTK_LEGACY_REMOVE
   // Description:
-  // Boolean flag telling if errors from the graphic library have to be
-  // reported by vtkGraphicErrorMacro. Initial value is false (off).
+  // @deprecated Replaced by
+  // the CMakeLists variable VTK_REPORT_OPENGL_ERRORS
+  // error reporting is enabled/disabled at compile time
   int ReportGraphicErrors;
+#endif
 
   // Description:
   // The universal time since the last abort check occurred.
diff --git a/Rendering/Core/vtkRenderWindowInteractor.cxx b/Rendering/Core/vtkRenderWindowInteractor.cxx
index 6597485..fdeae0f 100644
--- a/Rendering/Core/vtkRenderWindowInteractor.cxx
+++ b/Rendering/Core/vtkRenderWindowInteractor.cxx
@@ -133,10 +133,7 @@ vtkRenderWindowInteractor::~vtkRenderWindowInteractor()
     {
     this->Picker->UnRegister(this);
     }
-  if ( this->KeySym )
-    {
-    delete [] this->KeySym;
-    }
+  delete [] this->KeySym;
   if ( this->ObserverMediator)
     {
     this->ObserverMediator->Delete();
diff --git a/Rendering/Core/vtkRenderer.cxx b/Rendering/Core/vtkRenderer.cxx
index 7d30802..fb918cb 100644
--- a/Rendering/Core/vtkRenderer.cxx
+++ b/Rendering/Core/vtkRenderer.cxx
@@ -139,10 +139,7 @@ vtkRenderer::~vtkRenderer()
     this->CreatedLight = NULL;
     }
 
-  if (this->BackingImage)
-    {
-    delete [] this->BackingImage;
-    }
+  delete [] this->BackingImage;
 
   this->Actors->Delete();
   this->Actors = NULL;
@@ -308,18 +305,12 @@ void vtkRenderer::Render(void)
 
   // Clean up the space we allocated before. If the PropArray exists,
   // they all should exist
-  if ( this->PropArray)
-    {
-    delete [] this->PropArray;
-    this->PropArray                = NULL;
-    }
+  delete [] this->PropArray;
+  this->PropArray = NULL;
 
   if (this->BackingStore)
     {
-    if (this->BackingImage)
-      {
-      delete [] this->BackingImage;
-      }
+    delete [] this->BackingImage;
 
     int rx1, ry1, rx2, ry2;
 
@@ -969,6 +960,10 @@ void vtkRenderer::ResetCamera(double bounds[6])
     return;
     }
 
+  // Reset the perspective zoom factors, otherwise subsequent zooms will cause
+  // the view angle to become very small and cause bad depth sorting.
+  this->ActiveCamera->SetViewAngle(30.0);
+
   this->ExpandBounds(bounds, this->ActiveCamera->GetModelTransformMatrix());
 
   center[0] = (bounds[0] + bounds[1])/2.0;
diff --git a/Rendering/Core/vtkRenderer.h b/Rendering/Core/vtkRenderer.h
index f5bb931..f1c3109 100644
--- a/Rendering/Core/vtkRenderer.h
+++ b/Rendering/Core/vtkRenderer.h
@@ -248,7 +248,7 @@ public:
 
   // Description:
   // Clear the image to the background color.
-  virtual void Clear() {};
+  virtual void Clear() {}
 
   // Description:
   // Returns the number of visible actors.
diff --git a/Rendering/Core/vtkShader.cxx b/Rendering/Core/vtkShader.cxx
deleted file mode 100644
index 9c7ba61..0000000
--- a/Rendering/Core/vtkShader.cxx
+++ /dev/null
@@ -1,1235 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkShader.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*
- * Copyright 2003 Sandia Corporation.
- * Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
- * license for use of this work by or on behalf of the
- * U.S. Government. Redistribution and use in source and binary forms, with
- * or without modification, are permitted provided that this Notice and any
- * statement of authorship are reproduced on all copies.
- */
-
-
-#include "vtkShader.h"
-#include <vtkObjectFactory.h>
-
-#include "vtkActor.h"
-#include "vtkCamera.h"
-#include "vtkCollectionIterator.h"
-#include "vtkLight.h"
-#include "vtkLightCollection.h"
-#include "vtkProperty.h"
-#include "vtkRenderer.h"
-#include "vtkTimeStamp.h"
-#include "vtkXMLDataElement.h"
-#include "vtkXMLShader.h"
-
-#include <string>
-#include <vector>
-#include <vtksys/SystemTools.hxx>
-
-//-----------------------------------------------------------------------------
-// Helper method.
-static inline int vtkShaderGetType(const char* type)
-{
-  if (!type)
-    {
-    return 0;
-    }
-  if (strcmp(type,"double")==0 ||
-      strcmp(type,"double1")==0 ||
-      strcmp(type,"double2")==0 ||
-      strcmp(type,"double3")==0 ||
-      strcmp(type,"double4")==0 )
-    {
-    return VTK_DOUBLE;
-    }
-
-  // XML attributes should reflect native shader types
-  if (strcmp(type, "float")==0  ||
-      strcmp(type, "float1")==0 ||
-      strcmp(type, "float2")==0 ||
-      strcmp(type, "float3")==0 ||
-      strcmp(type, "float4")==0 ||
-
-      strcmp(type, "vec1")==0 ||
-      strcmp(type, "vec2")==0 ||
-      strcmp(type, "vec3")==0 ||
-      strcmp(type, "vec4")==0 ||
-
-      strcmp(type, "mat2")==0 ||
-      strcmp(type, "mat3")==0 ||
-      strcmp(type, "mat4")==0  )
-    {
-    return VTK_FLOAT;
-    }
-  if (strcmp(type, "int")==0 ||
-      strcmp(type, "ivec2")==0 ||
-      strcmp(type, "ivec3")==0 ||
-      strcmp(type, "ivec4")==0 )
-    {
-    return VTK_INT;
-    }
-  return 0;
-}
-
-//-----------------------------------------------------------------------------
-class vtkShaderUniformVariable
-{
-public:
-  vtkShaderUniformVariable()
-    : Name(),
-      NumberOfValues(0),
-      Type(0),
-      IntValues(NULL),
-      FloatValues(NULL),
-      DoubleValues(NULL)
-    {
-    }
-
-  vtkShaderUniformVariable(const char* name, int num, const int* val)
-    : Name(),
-      NumberOfValues(0),
-      Type(0),
-      IntValues(NULL),
-      FloatValues(NULL),
-      DoubleValues(NULL)
-    {
-    this->SetName(name);
-    this->NumberOfValues = num;
-    this->Type = VTK_INT;
-    this->IntValues = new int[num];
-    for (int i=0; i < num; i++)
-      {
-      this->IntValues[i] = val[i];
-      }
-    }
-
-  vtkShaderUniformVariable(const char* name, int num, const double* val)
-    : Name(),
-      NumberOfValues(0),
-      Type(0),
-      IntValues(NULL),
-      FloatValues(NULL),
-      DoubleValues(NULL)
-    {
-    this->SetName(name);
-    this->NumberOfValues = num;
-    this->Type = VTK_DOUBLE;
-    this->DoubleValues = new double[num];
-    for (int i=0; i < num; i++)
-      {
-      this->DoubleValues[i] = val[i];
-      }
-    }
-
-  vtkShaderUniformVariable(const char* name, int num, const float* val)
-    : Name(),
-      NumberOfValues(0),
-      Type(0),
-      IntValues(NULL),
-      FloatValues(NULL),
-      DoubleValues(NULL)
-    {
-    this->SetName(name);
-    this->NumberOfValues = num;
-    this->Type = VTK_FLOAT;
-    this->FloatValues = new float[num];
-    for (int i=0; i < num; i++)
-      {
-      this->FloatValues[i] = val[i];
-      }
-    }
-
-  // A copy constructor is required to use a class as
-  // a map value.
-  vtkShaderUniformVariable(const vtkShaderUniformVariable& x)
-    {
-    this->SetName( x.GetName() );
-    this->NumberOfValues = x.GetNumberOfValues();
-    this->Type = x.GetType();
-    this->IntValues = NULL;
-    this->FloatValues = NULL;
-    this->DoubleValues = NULL;
-    if ( (this->Type == VTK_INT) && (this->NumberOfValues > 0) )
-      {
-      this->IntValues = new int[this->NumberOfValues];
-      x.GetValue( this->IntValues );
-      }
-    else if ( (this->Type == VTK_FLOAT) && (this->NumberOfValues > 0) )
-      {
-      this->FloatValues = new float[this->NumberOfValues];
-      x.GetValue( this->FloatValues );
-      }
-    else if ( (this->Type == VTK_DOUBLE) && (this->NumberOfValues > 0) )
-      {
-      this->DoubleValues = new double[this->NumberOfValues];
-      x.GetValue( this->DoubleValues );
-      }
-    }
-
-  // Don't allow the default assignment operator to copy pointers
-  // that might be made invalid later if the original objects move,
-  // for instance, in a map operation.
-  void operator=(const vtkShaderUniformVariable& x)
-    {
-    this->SetName( x.GetName() );
-    this->NumberOfValues = x.GetNumberOfValues();
-    this->Type = x.GetType();
-
-    if (this->IntValues)
-      {
-      delete [] this->IntValues;
-      this->IntValues = NULL;
-      }
-    if (this->FloatValues)
-      {
-      delete [] this->FloatValues;
-      this->FloatValues = NULL;
-      }
-    if (this->DoubleValues)
-      {
-      delete [] this->DoubleValues;
-      this->DoubleValues = NULL;
-      }
-
-    if ( (this->Type == VTK_INT) && (this->NumberOfValues > 0) )
-      {
-      this->IntValues = new int[this->NumberOfValues];
-      x.GetValue( this->IntValues );
-      }
-    else if ( (this->Type == VTK_FLOAT) && (this->NumberOfValues > 0) )
-      {
-      this->FloatValues = new float[this->NumberOfValues];
-      x.GetValue( this->FloatValues );
-      }
-    else if ( (this->Type == VTK_DOUBLE) && (this->NumberOfValues > 0) )
-      {
-      this->DoubleValues = new double[this->NumberOfValues];
-      x.GetValue( this->DoubleValues );
-      }
-    }
-
-
-
-  int GetType() const { return this->Type; }
-  int GetNumberOfValues() const { return this->NumberOfValues; }
-
-  int GetValue(int *a) const
-    {
-    if( (this->Type == VTK_INT) && this->IntValues )
-      {
-      for (int i=0; i < this->NumberOfValues; i++)
-        {
-        a[i] = this->IntValues[i];
-        }
-      return 1;
-      }
-    return 0;
-    }
-
-  int GetValue(float *a) const
-    {
-    if( (this->Type == VTK_FLOAT) && this->FloatValues )
-      {
-      for (int i=0; i < this->NumberOfValues; i++)
-        {
-        a[i] = this->FloatValues[i];
-        }
-      return 1;
-      }
-    return 0;
-    }
-
-  int GetValue(double *a) const
-    {
-    if( (this->Type == VTK_DOUBLE) && this->DoubleValues )
-      {
-      for (int i=0; i < this->NumberOfValues; i++)
-        {
-        a[i] = this->DoubleValues[i];
-        }
-      return 1;
-      }
-    return 0;
-    }
-
-
-
-  ~vtkShaderUniformVariable()
-    {
-    if (this->IntValues)
-      {
-      delete [] this->IntValues;
-      this->IntValues = NULL;
-      }
-    if (this->FloatValues)
-      {
-      delete [] this->FloatValues;
-      this->FloatValues = NULL;
-      }
-    if (this->DoubleValues)
-      {
-      delete [] this->DoubleValues;
-      this->DoubleValues = NULL;
-      }
-    }
-
-  void Print(ostream& os, vtkIndent indent)
-    {
-    int i;
-    os << indent << "Name: " << ((this->GetName())? this->GetName() : "(none)") << endl;
-    os << indent << "NumberOfValues: " << this->NumberOfValues;
-    switch (this->Type)
-      {
-    case VTK_INT:
-      os << indent << "Type: int" << endl;
-      os << indent << "Values: " ;
-      for (i=0; i < this->NumberOfValues; i++)
-        {
-        os << this->IntValues[i] << " ";
-        }
-      os << endl;
-      break;
-    case VTK_DOUBLE:
-      os << indent << "Type: double" << endl;
-      os << indent << "Values: " ;
-      for (i=0; i < this->NumberOfValues; i++)
-        {
-        os << this->DoubleValues[i] << " ";
-        }
-      os << endl;
-      break;
-    case VTK_FLOAT:
-      os << indent << "Type: float" << endl;
-      os << indent << "Values: " ;
-      for (i=0; i < this->NumberOfValues; i++)
-        {
-        os << this->FloatValues[i] << " ";
-        }
-      os << endl;
-      break;
-      }
-    }
-
-  const char* GetName() const
-    {
-    return this->Name.c_str();
-    }
-  void SetName(const char* name)
-    {
-    if (name)
-      {
-      this->Name = name;
-      }
-    }
-private:
-  std::string Name;
-  int NumberOfValues;
-  int Type;
-  int* IntValues;
-  float* FloatValues;
-  double* DoubleValues;
-};
-
-
-//-----------------------------------------------------------------------------
-class vtkShaderInternals
-{
-public:
-  std::map<std::string, vtkShaderUniformVariable> UniformVariables;
-};
-
-//-----------------------------------------------------------------------------
-vtkCxxSetObjectMacro(vtkShader, XMLShader, vtkXMLShader);
-//-----------------------------------------------------------------------------
-vtkShader::vtkShader()
-{
-  this->XMLShader = 0;
-  this->Internals = new vtkShaderInternals;
-}
-
-//-----------------------------------------------------------------------------
-vtkShader::~vtkShader()
-{
-  this->SetXMLShader(0);
-  delete this->Internals;
-}
-
-//-----------------------------------------------------------------------------
-void vtkShader::PassShaderVariables(vtkActor* actor, vtkRenderer* renderer)
-{
-  if( !this->XMLShader )
-    {
-    return;
-    }
-
-  if( !this->XMLShader->GetRootElement() )
-    {
-    return;
-    }
-
-  this->SetShaderParameters(actor, renderer, this->XMLShader->GetRootElement());
-  this->PassShaderVariablesTime.Modified();
-}
-
-//-----------------------------------------------------------------------------
-int vtkShader::HasShaderVariable(const char* name)
-{
-  if (!name)
-    {
-    return 0;
-    }
-  if (this->Internals->UniformVariables.find(name) !=
-      this->Internals->UniformVariables.end())
-    {
-    return 1;
-    }
-  return 0;
-}
-
-//-----------------------------------------------------------------------------
-void vtkShader::AddShaderVariable(const char* name, int num_of_elements,
-  const int * values)
-{
-  if (!name || num_of_elements <= 0 || !values)
-    {
-    return;
-    }
-  this->Internals->UniformVariables[name] = vtkShaderUniformVariable(
-    name, num_of_elements, values);
-
-  this->Modified();
-}
-
-//-----------------------------------------------------------------------------
-void vtkShader::AddShaderVariable(const char* name, int num_of_elements,
-  const float* values)
-{
-  if (!name || num_of_elements <= 0 || !values)
-    {
-    return;
-    }
-  this->Internals->UniformVariables[name] = vtkShaderUniformVariable(
-    name, num_of_elements, values);
-  this->Modified();
-}
-
-//-----------------------------------------------------------------------------
-void vtkShader::AddShaderVariable(const char* name, int num_of_elements,
-  const double* values)
-{
-  if (!name || num_of_elements <= 0 || !values)
-    {
-    vtkWarningMacro("Need more info to build a Shader Variables!");
-    return;
-    }
-  this->Internals->UniformVariables[name] = vtkShaderUniformVariable(
-    name, num_of_elements, values);
-  this->Modified();
-}
-
-
-//-----------------------------------------------------------------------------
-int vtkShader::GetShaderVariableSize(const char* name)
-{
-  if (!this->HasShaderVariable(name))
-    {
-    return 0;
-    }
-  return this->Internals->UniformVariables[name].GetNumberOfValues();
-}
-
-
-//-----------------------------------------------------------------------------
-int vtkShader::GetShaderVariableType(const char* name)
-{
-  if (!this->HasShaderVariable(name))
-    {
-    return 0;
-    }
-  return this->Internals->UniformVariables[name].GetType();
-}
-
-
-//-----------------------------------------------------------------------------
-int vtkShader::GetShaderVariable(const char* name, int *values)
-{
-  if (!this->HasShaderVariable(name))
-    {
-    return 0;
-    }
-  return this->Internals->UniformVariables[name].GetValue(values);
-}
-
-//-----------------------------------------------------------------------------
-int vtkShader::GetShaderVariable(const char* name, float *values)
-{
-  if (!this->HasShaderVariable(name))
-    {
-    return 0;
-    }
-  return this->Internals->UniformVariables[name].GetValue(values);
-}
-
-//-----------------------------------------------------------------------------
-int vtkShader::GetShaderVariable(const char* name, double *values)
-{
-  if (!this->HasShaderVariable(name))
-    {
-    return 0;
-    }
-  return this->Internals->UniformVariables[name].GetValue(values);
-}
-
-//-----------------------------------------------------------------------------
-// Set all children elements of start elements
-void vtkShader::SetShaderParameters(vtkActor* actor, vtkRenderer* renderer,
-  vtkXMLDataElement* root)
-{
-  if(root==NULL)
-    {
-    return;
-    }
-
-
-  int max = root->GetNumberOfNestedElements();
-  for (int i=0; i < max; i++)
-    {
-    vtkXMLDataElement* elem = root->GetNestedElement(i);
-    // Decide what to do with the elem element.
-    const char* name = elem->GetAttribute("name");
-    if (!name)
-      {
-      vtkErrorMacro("Uniform parameter missing required attribute 'name' " << *elem);
-      continue;
-      }
-
-    const char* tagname = elem->GetName();
-    if (!tagname)
-      {
-      vtkErrorMacro("Unexpected error. XML element has no tag name!");
-      continue;
-      }
-
-    if (strcmp(tagname, "Uniform") == 0)
-      {
-      this->SetUniformParameter(actor, renderer, elem);
-      }
-    else if (strcmp(tagname, "CameraUniform") == 0)
-      {
-      this->SetCameraParameter(actor, renderer, elem);
-      }
-    else if (strcmp(tagname, "LightUniform") == 0)
-      {
-      this->SetLightParameter(actor, renderer, elem);
-      }
-    else if (strcmp(tagname, "MatrixUniform") == 0)
-      {
-      this->SetMatrixParameter(actor, renderer, elem);
-      }
-    else if (strcmp(tagname, "PropertyUniform") == 0)
-      {
-      this->SetPropertyParameter(actor, renderer, elem);
-      }
-    else if (strcmp(tagname, "SamplerUniform") == 0)
-      {
-      this->SetSamplerParameter(actor, renderer, elem);
-      }
-    else if (strcmp(tagname, "ApplicationUniform") == 0)
-      {
-      this->SetApplicationParameter(elem);
-      }
-    else
-      {
-      vtkErrorMacro("Invalid tag: " << tagname);
-      }
-    }
-}
-
-
-void vtkShader::SetUniformParameter(vtkActor* , vtkRenderer* ,
-                                    vtkXMLDataElement* elem)
-  {
-    if (this->GetMTime() < this->PassShaderVariablesTime)
-      {
-      return; // no need to update.
-      }
-    const char* name = elem->GetAttribute("name");
-    const char* ctype = elem->GetAttribute("type");
-    const char* cvalue = elem->GetAttribute("value");
-
-    if (!ctype)
-      {
-      vtkErrorMacro("Missing required attribute 'type' on name=" << name);
-      return;
-      }
-
-    int number_of_elements = 0;
-    if (!elem->GetScalarAttribute("number_of_elements", number_of_elements))
-      {
-      vtkErrorMacro("Missing return attribute 'number_of_elements' " << name );
-      return;
-      }
-
-    if (number_of_elements <= 0)
-      {
-      vtkErrorMacro("'number_of_elements' cannot be " << number_of_elements);
-      return;
-      }
-
-    if (!cvalue && !this->HasShaderVariable(name))
-      {
-      vtkErrorMacro("Variable '" << name << "' doesn't have a value specified in the XML"
-          << " nor as a Shader Variable.");
-      return;
-      }
-
-    int type = vtkShaderGetType(ctype);
-    if (!cvalue && type != this->GetShaderVariableType(name))
-      {
-      vtkErrorMacro("Parameter type mismatch: " << name);
-      return;
-      }
-
-    if (!cvalue && number_of_elements != this->GetShaderVariableSize(name))
-      {
-      vtkErrorMacro("Parameter size mismatch: " << name);
-      return;
-      }
-
-    switch (type)
-      {
-      case VTK_INT:
-        {
-        int *v = new int [number_of_elements];
-        if  ((cvalue && elem->GetVectorAttribute("value", number_of_elements, v))
-          ||(!cvalue && this->GetShaderVariable(name, v)))
-          {
-          this->SetUniformParameter(name, number_of_elements, v);
-          }
-        else
-          {
-          vtkErrorMacro("Failed to set unform variable : " << name);
-          }
-        delete []v;
-        }
-      break;
-
-    case VTK_FLOAT:
-      {
-      float *v = new float [number_of_elements];
-      if  ((cvalue && elem->GetVectorAttribute("value", number_of_elements, v))
-          ||(!cvalue && this->GetShaderVariable(name, v)))
-          {
-          this->SetUniformParameter(name, number_of_elements, v);
-          }
-      else
-        {
-        vtkErrorMacro("Failed to set unform variable : " << name);
-        }
-      delete []v;
-      }
-      break;
-
-    case VTK_DOUBLE:
-      {
-      double *v = new double[number_of_elements];
-      if  ((cvalue && elem->GetVectorAttribute("value", number_of_elements, v))
-          ||(!cvalue && this->GetShaderVariable(name, v)))
-        {
-        this->SetUniformParameter(name, number_of_elements, v);
-        }
-      else
-        {
-        vtkErrorMacro("Failed to set unform variable : " << name);
-        }
-        delete []v;
-      }
-      break;
-    default:
-      vtkErrorMacro("Invalid type: " << ctype);
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkShader::SetCameraParameter(vtkActor* , vtkRenderer* ren,
-  vtkXMLDataElement* elem)
-{
-  vtkCamera* camera = ren->GetActiveCamera();
-  if (this->GetMTime() < this->PassShaderVariablesTime &&
-    camera->GetMTime() < this->PassShaderVariablesTime)
-    {
-    return; // no need to update.
-    }
-  const char* name = elem->GetAttribute("name");
-  const char* value = elem->GetAttribute("value");
-
-  if (!name)
-    {
-    vtkErrorMacro("Missing required attribute 'name' on name=");
-    return;
-    }
-
-  if (!value)
-    {
-    vtkErrorMacro("Missing required attribute 'value' on name=" << name);
-    return;
-    }
-
-  double *x = 0;
-  if (strcmp(value, "FocalPoint")==0)
-    {
-    x = camera->GetFocalPoint();
-    this->SetUniformParameter(name, 3, x);
-    }
-  else if (strcmp(value, "Position")==0)
-    {
-    x = camera->GetPosition();
-    this->SetUniformParameter(name, 3, x);
-    }
-  else if (strcmp(value, "ViewUp")==0)
-    {
-    x = camera->GetViewUp();
-    this->SetUniformParameter(name, 3, x);
-    }
-  else if (strcmp(value, "DirectionOfProjection")==0)
-    {
-    x = camera->GetDirectionOfProjection();
-    this->SetUniformParameter(name, 3, x);
-    }
-  else if (strcmp(value, "ViewPlaneNormal") == 0)
-    {
-    x = camera->GetViewPlaneNormal();
-    this->SetUniformParameter(name, 3, x);
-    }
-  else if (strcmp(value, "ViewShear") == 0)
-    {
-    x = camera->GetViewShear();
-    this->SetUniformParameter(name, 3, x);
-    }
-  else if (strcmp(value, "WindowCenter") == 0)
-    {
-    x = camera->GetWindowCenter();
-    this->SetUniformParameter(name, 2, x);
-    }
-  else if (strcmp(value, "ClippingRange") == 0)
-    {
-    x = camera->GetClippingRange();
-    this->SetUniformParameter(name, 2, x);
-    }
-  else if (strcmp(value, "ViewAngle") == 0)
-    {
-    double c = camera->GetViewAngle();
-    this->SetUniformParameter(name, 1, &c);
-    }
-  else if (strcmp(value, "EyeAngle") == 0)
-    {
-    double c = camera->GetEyeAngle();
-    this->SetUniformParameter(name, 1, &c);
-    }
-  else if (strcmp(value, "ParallelScale") == 0)
-    {
-    double c = camera->GetParallelScale();
-    this->SetUniformParameter(name, 1, &c);
-    }
-  else if (strcmp(value, "Thickness") == 0)
-    {
-    double c = camera->GetThickness();
-    this->SetUniformParameter(name, 1, &c);
-    }
-  else if (strcmp(value, "Distance") == 0)
-    {
-    double c = camera->GetDistance();
-    this->SetUniformParameter(name, 1, &c);
-    }
-  else if (strcmp(value, "FocalDisk") == 0)
-    {
-    double c = camera->GetFocalDisk();
-    this->SetUniformParameter(name, 1, &c);
-    }
-  else if (strcmp(value, "ParallelProjection") == 0)
-    {
-    double c = camera->GetParallelProjection();
-    this->SetUniformParameter(name, 1, &c);
-    }
-  else if (strcmp(value, "UseHorizontalViewAngle") == 0)
-    {
-    double c = camera->GetUseHorizontalViewAngle();
-    this->SetUniformParameter(name, 1, &c);
-    }
-  else
-    {
-    vtkErrorMacro("Invalid camera property " << value);
-    }
-}
-
-
-//-----------------------------------------------------------------------------
-void vtkShader::SetPropertyParameter(vtkActor* actor, vtkRenderer* ,
-  vtkXMLDataElement* elem)
-{
-  vtkProperty* property = actor->GetProperty();
-  if (property->GetMTime() < this->PassShaderVariablesTime)
-    {
-    // no need to update.
-    return;
-    }
-  const char* name = elem->GetAttribute("name");
-  if (!name)
-    {
-    vtkErrorMacro("Missing required attribute 'name'");
-    return;
-    }
-
-  const char* value = elem->GetAttribute("value");
-  if (!value)
-    {
-    vtkErrorMacro("Missing required attribute 'value' on name=" << name);
-    return;
-    }
-
-  if( strcmp(value,"Color")==0 )
-    {
-    this->SetUniformParameter(name, 3, property->GetColor());
-    }
-  else if( strcmp(value,"AmbientColor")==0 )
-    {
-    this->SetUniformParameter(name, 3, property->GetAmbientColor());
-    }
-  else if( strcmp(value,"DiffuseColor")==0 )
-    {
-    this->SetUniformParameter(name, 3, property->GetDiffuseColor());
-    }
-  else if( strcmp(value,"SpecularColor")==0 )
-    {
-    this->SetUniformParameter(name, 3, property->GetSpecularColor());
-    }
-  else if( strcmp(value,"EdgeColor")==0 )
-    {
-    this->SetUniformParameter(name, 3, property->GetEdgeColor());
-    }
-  else if( strcmp(value,"Ambient")==0 )
-    {
-    double v = property->GetAmbient();
-    this->SetUniformParameter(name, 1, &v);
-    }
-  else if( strcmp(value,"Diffuse")==0 )
-    {
-    double v = property->GetDiffuse();
-    this->SetUniformParameter(name, 1, &v);
-    }
-  else if( strcmp(value,"Specular")==0 )
-    {
-    double v = property->GetSpecular();
-    this->SetUniformParameter(name, 1, &v);
-    }
-  else if( strcmp(value,"SpecularPower")==0 )
-    {
-    double v = property->GetSpecularPower();
-    this->SetUniformParameter(name, 1, &v);
-    }
-  else if( strcmp(value,"Opacity")==0 )
-    {
-    double v = property->GetOpacity();
-    this->SetUniformParameter(name, 1, &v);
-    }
-  else if( strcmp(value,"PointSize")==0 )
-    {
-    double v = property->GetPointSize();
-    this->SetUniformParameter(name, 1, &v);
-    }
-  else if( strcmp(value,"LineWidth")==0 )
-    {
-    double v = property->GetLineWidth();
-    this->SetUniformParameter(name, 1, &v);
-    }
-  else if( strcmp(value,"LineStipplePattern")==0 )
-    {
-    int v = property->GetLineStipplePattern();
-    this->SetUniformParameter(name, 1, &v);
-    }
-  else if( strcmp(value,"LineStippleRepeatFactor")==0 )
-    {
-    int v = property->GetLineStippleRepeatFactor();
-    this->SetUniformParameter(name, 1, &v);
-    }
-  else if( strcmp(value,"Interpolation")==0 )
-    {
-    int v = property->GetInterpolation();
-    this->SetUniformParameter(name, 1, &v);
-    }
-  else if( strcmp(value,"Representation")==0 )
-    {
-    int v = property->GetRepresentation();
-    this->SetUniformParameter(name, 1, &v);
-    }
-  else if( strcmp(value,"EdgeVisibility")==0 )
-    {
-    int v = property->GetEdgeVisibility();
-    this->SetUniformParameter(name, 1, &v);
-    }
-  else if( strcmp(value,"BackfaceCulling")==0 )
-    {
-    int v = property->GetBackfaceCulling();
-    this->SetUniformParameter(name, 1, &v);
-    }
-  else if( strcmp(value,"FrontfaceCulling")==0 )
-    {
-    int v = property->GetFrontfaceCulling();
-    this->SetUniformParameter(name, 1, &v);
-    }
-  else if( strcmp(value,"MTime")==0 )
-    {
-    double mtime = static_cast<double>(property->GetMTime());
-    this->SetUniformParameter(name, 1, &mtime);
-    }
-  else
-    {
-    vtkErrorMacro("Invalid property name for vtkProperty " << value);
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkShader::SetLightParameter(vtkActor* , vtkRenderer* renderer,
-  vtkXMLDataElement* elem)
-{
-  const char* name = elem->GetAttribute("name");
-  const char* value = elem->GetAttribute("value");
-  if (!value)
-    {
-    vtkErrorMacro("Missing required attribute 'value'.");
-    return;
-    }
-  int lightid;
-  if (!elem->GetScalarAttribute("light_id",lightid))
-    {
-    lightid = 0;
-    }
-
-  vtkLightCollection* lights = renderer->GetLights();
-
-  // If number of lights is requested then we don;t need to locate the light
-  if (strcmp(value, "NumberOfLights") == 0)
-    {
-    int v = lights->GetNumberOfItems();
-    this->SetUniformParameter(name, 1, &v);
-    return;
-    }
-
-  vtkLight* light = 0;
-  vtkCollectionIterator *iter = lights->NewIterator();
-  int id = 0;
-  for (iter->InitTraversal(); !iter->IsDoneWithTraversal(); iter->GoToNextItem(), id++)
-    {
-    if (id == lightid)
-      {
-      light = vtkLight::SafeDownCast(iter->GetCurrentObject());
-      break;
-      }
-    }
-  iter->Delete();
-
-  if (!light)
-    {
-    vtkErrorMacro("Failed to locate light with id " << lightid);
-    return;
-    }
-
-  if (lights->GetMTime() < this->PassShaderVariablesTime &&
-    light->GetMTime() < this->PassShaderVariablesTime)
-    {
-    // no need to update.
-    return;
-    }
-
-  if (strcmp(value, "Position") == 0)
-    {
-    this->SetUniformParameter(name, 3, light->GetPosition());
-    }
-  else if (strcmp(value, "FocalPoint") == 0)
-    {
-    this->SetUniformParameter(name, 3, light->GetFocalPoint());
-    }
-  else if (strcmp(value, "AmbientColor") == 0)
-    {
-    this->SetUniformParameter(name, 3, light->GetAmbientColor());
-    }
-  else if (strcmp(value, "DiffuseColor") == 0)
-    {
-    this->SetUniformParameter(name, 3, light->GetDiffuseColor());
-    }
-  else if (strcmp(value, "SpecularColor") == 0)
-    {
-    this->SetUniformParameter(name, 3, light->GetSpecularColor());
-    }
-  else if (strcmp(value, "AttenuationValues") == 0)
-    {
-    this->SetUniformParameter(name, 3, light->GetAttenuationValues());
-    }
-  else if (strcmp(value, "Intensity") == 0)
-    {
-    double v = light->GetIntensity();
-    this->SetUniformParameter(name, 1, &v);
-    }
-  else if (strcmp(value, "Exponent") == 0)
-    {
-    double v = light->GetExponent();
-    this->SetUniformParameter(name, 1, &v);
-    }
-  else if (strcmp(value, "ConeAngle") == 0)
-    {
-    double v = light->GetConeAngle();
-    this->SetUniformParameter(name, 1, &v);
-    }
-  else if (strcmp(value, "Switch") == 0)
-    {
-    int v = light->GetSwitch();
-    this->SetUniformParameter(name, 1, &v);
-    }
-  else if (strcmp(value, "Positional") == 0)
-    {
-    int v = light->GetPositional();
-    this->SetUniformParameter(name, 1, &v);
-    }
-  else if (strcmp(value, "LightType") == 0)
-    {
-    int v = light->GetLightType();
-    this->SetUniformParameter(name, 1, &v);
-    }
-  else
-    {
-    vtkErrorMacro("Invalid light property: " << value);
-    }
-
-}
-
-
-//-----------------------------------------------------------------------------
-// FIXME: Cg allows non-square matrices to be set and program parameters, that
-// should be reflected here as well, but I'm not sure just how.
-void vtkShader::SetMatrixParameter(vtkActor* , vtkRenderer* ,
-  vtkXMLDataElement* elem)
-{
-  const char* name = elem->GetAttribute("name");
-  const char* type = elem->GetAttribute("type");
-  if (!type)
-    {
-    vtkErrorMacro("Missing required attribute 'type' for name=" << name);
-    return;
-    }
-
-  // TODO: for starters, matrices can't be set as Shader Variables.
-  // Matrices CAN be set as shader variables, specifically, they can
-  // be used as uniform variables to both fragment and vertex programs.
-  const char* cvalue = elem->GetAttribute("value");
-  if (!cvalue)
-    {
-    vtkErrorMacro("Missing required attribute 'value' for name=" << name);
-    return;
-    }
-  int number_of_elements;
-  if (!elem->GetScalarAttribute("number_of_elements", number_of_elements) ||
-    number_of_elements <= 0)
-    {
-    vtkErrorMacro("Invalid number_of_elements on name=" << name);
-    return;
-    }
-
-  int order = vtkShader::RowMajor;
-  const char* corder = elem->GetAttribute("order");
-  if (corder && strcmp(corder, "ColumnMajor") == 0)
-    {
-    order = vtkShader::ColumnMajor;
-    }
-
-  // FIXME : 'State' is only meaningful in a Cg context, so it should be in
-  // vtkCgShader and not in vtkShader
-  if (strcmp(type, "State") == 0)
-    {
-    std::vector<std::string> args;
-    vtksys::SystemTools::Split(cvalue, args, ' ');
-    if (args.size() != static_cast<unsigned int>(number_of_elements))
-      {
-      vtkErrorMacro("Mismatch in number_of_elements and actual values!");
-      return;
-      }
-
-    const char* state_matix_type = args[0].c_str();
-    const char* transform_type = (number_of_elements > 1)?
-      args[1].c_str() : 0;
-    this->SetMatrixParameter(name, state_matix_type, transform_type);
-    }
-  else
-    {
-
-    if ( (strcmp(type, "float")==0) ||
-         (strcmp(type,"mat2")==0) ||
-         (strcmp(type,"mat3")==0) ||
-         (strcmp(type,"mat4")==0) )
-      {
-      float *v = new float[number_of_elements];
-      if (elem->GetVectorAttribute("value",number_of_elements, v))
-        {
-        this->SetMatrixParameter(name, number_of_elements, order, v);
-        }
-      else
-        {
-        vtkErrorMacro("Failed to obtain value for name=" << name);
-        }
-      delete [] v;
-      }
-    else if (strcmp(type, "double") == 0)
-      {
-      double *v = new double[number_of_elements];
-      if (elem->GetVectorAttribute("value",number_of_elements, v))
-        {
-        this->SetMatrixParameter(name, number_of_elements, order, v);
-        }
-      else
-        {
-        vtkErrorMacro("Failed to obtain value for name=" << name);
-        }
-      delete [] v;
-      }
-    else
-      {
-      vtkErrorMacro("Invalid 'type'='" << type << "' for name=" << name);
-      }
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkShader::SetSamplerParameter(vtkActor* act, vtkRenderer*,
-  vtkXMLDataElement* elem)
-{
-  const char* name = elem->GetAttribute("name");
-  const char* value = elem->GetAttribute("value");
-  if (!value)
-    {
-    vtkErrorMacro("Missing required attribute 'value' on element "
-      "with name=" << name);
-    return;
-    }
-
-
-  vtkTexture* texture = act->GetProperty()->GetTexture(value);
-
-  if (!texture)
-    {
-    vtkErrorMacro("Property does have texture with name=" << value);
-    return;
-    }
-
-  int texture_unit = act->GetProperty()->GetTextureUnit(value);
-  this->SetSamplerParameter(name, texture, texture_unit);
-}
-
-//-----------------------------------------------------------------------------
-void vtkShader::SetApplicationParameter(vtkXMLDataElement* elem)
-{
-  // 'name' is the variable name in the hardware shader program.
-  const char* name = elem->GetAttribute("name");
-  if (!name)
-    {
-    vtkErrorMacro("Missing required attribute 'name' on element.");
-    return;
-    }
-
-  // 'value' is the variable name in the application.
-  const char* value = elem->GetAttribute("value");
-  if (!value)
-    {
-    value = name;
-    }
-
-  // check to see if the application has set a variable named 'value'
-  // If it exists, set it as a uniform parameter
-  if( this->HasShaderVariable(value) )
-    {
-    vtkShaderUniformVariable var = this->Internals->UniformVariables.find(value)->second;
-    if( var.GetType() == VTK_INT )
-      {
-      std::vector<int> x(4,0);
-      if( var.GetValue(&x[0])==1 )
-        {
-        this->SetUniformParameter( name,
-                                   var.GetNumberOfValues(),
-                                   &x[0]);
-        }
-      }
-    else if( var.GetType() == VTK_FLOAT )
-      {
-      std::vector<float> x(4,0.0);
-      if( var.GetValue(&x[0])==1 )
-        {
-        this->SetUniformParameter( name,
-                                   var.GetNumberOfValues(),
-                                   &x[0]);
-        }
-      }
-    else if( var.GetType() == VTK_DOUBLE )
-      {
-      std::vector<double> x(4,0.0);
-      if( var.GetValue(&x[0])==1 )
-        {
-        this->SetUniformParameter( name,
-                                   var.GetNumberOfValues(),
-                                   &x[0]);
-        }
-      }
-    }
-  else
-    {
-    vtkErrorMacro("Shader requires application variable " << name
-      << " which is missing.");
-    }
-}
-
-//-----------------------------------------------------------------------------
-int vtkShader::GetScope()
-{
-  return (this->XMLShader? this->XMLShader->GetScope() : vtkXMLShader::SCOPE_NONE);
-}
-
-//-----------------------------------------------------------------------------
-void vtkShader::PrintSelf(ostream &os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os, indent);
-
-  os << indent << "Number of Shader Variables: "
-    << this->Internals->UniformVariables.size() << endl;
-
-  std::map<std::string, vtkShaderUniformVariable>::iterator iter;
-  for (iter = this->Internals->UniformVariables.begin();
-    iter != this->Internals->UniformVariables.end(); ++iter)
-    {
-    os << indent << "ShaderVariable: " << endl;
-    iter->second.Print(os, indent.GetNextIndent());
-    }
-
-  os << indent << "XMLShader: ";
-  if (this->XMLShader)
-    {
-    os << endl;
-    this->XMLShader->PrintSelf(os, indent.GetNextIndent());
-    }
-  else
-    {
-    os << "(none)" << endl;
-    }
-
-
-}
diff --git a/Rendering/Core/vtkShader.h b/Rendering/Core/vtkShader.h
deleted file mode 100644
index 2e2a911..0000000
--- a/Rendering/Core/vtkShader.h
+++ /dev/null
@@ -1,224 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkShader.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*
- * Copyright 2003 Sandia Corporation.
- * Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
- * license for use of this work by or on behalf of the
- * U.S. Government. Redistribution and use in source and binary forms, with
- * or without modification, are permitted provided that this Notice and any
- * statement of authorship are reproduced on all copies.
- */
-
-// .NAME vtkShader
-// .SECTION Description
-// vtkShader is a base class for interfacing VTK to hardware shader
-// libraries. vtkShader interprets a vtkXMLDataElement that describes a
-// particular shader. Descendants of this class inherit this functionality and
-// additionally interface to specific shader libraries like NVidia's Cg and
-// OpenGL2.0 (GLSL) to perform operations, on individual shaders.
-//
-// During each render, the vtkShaderProgram calls Compile(),
-// PassShaderVariables(), Bind() and after the actor has been rendered,
-// calls Unbind(), in that order.
-// .SECTION See Also
-// vtkCgShader vtkGLSLShader
-// .SECTION Thanks
-// Shader support in VTK includes key contributions by Gary Templet at
-// Sandia National Labs.
-
-#ifndef __vtkShader_h
-#define __vtkShader_h
-
-#include "vtkRenderingCoreModule.h" // For export macro
-#include "vtkObject.h"
-
-class vtkActor;
-class vtkCamera;
-class vtkLight;
-class vtkProperty;
-class vtkRenderer;
-class vtkShaderInternals;
-class vtkTexture;
-class vtkWindow;
-class vtkXMLDataElement;
-class vtkXMLShader;
-
-class VTKRENDERINGCORE_EXPORT vtkShader : public vtkObject
-{
-public:
-  vtkTypeMacro(vtkShader, vtkObject);
-  virtual void PrintSelf(ostream &os, vtkIndent indent);
-
-  // Description:
-  // Called to compile the shader code.
-  // The subclasses must only compile the code in this method.
-  // Returns if the compile was successful.
-  // Subclasses should compile the code only if it was not
-  // already compiled.
-  virtual int Compile() =0;
-
-  // Description:
-  // Called to pass VTK actor/property/light values and other
-  // Shader variables over to the shader. This is called by the ShaderProgram
-  // during each render.
-  virtual void PassShaderVariables(vtkActor* actor, vtkRenderer* ren);
-
-  // Description:
-  // In this method the shader can enable/bind itself. This is applicable
-  // only to Cg, since in GLSL, individual shaders in a program can't be
-  // enabled/bound.
-  virtual void  Bind() { }
-
-  // Description:
-  // Called to unbind the shader. As with Bind(), this is only applicable
-  // to Cg.
-  virtual void Unbind() { }
-
-  // Description:
-  // Release any graphics resources that are being consumed by this actor.
-  // The parameter window could be used to determine which graphic
-  // resources to release.
-  virtual void ReleaseGraphicsResources(vtkWindow *) { }
-
-  // Description:
-  // Get/Set the XMLShader representation for this shader.
-  // A shader is not valid without a XMLShader.
-  void SetXMLShader(vtkXMLShader*);
-  vtkGetObjectMacro(XMLShader, vtkXMLShader);
-
-  // Description:
-  // Indicates if a variable by the given name exists.
-  int HasShaderVariable(const char* name);
-
-  // Description:
-  // Methods to add shader variables to this shader.
-  // The shader variable type must match with that declared in
-  // the Material xml, otherwise, the variable is not made available
-  // to the shader.
-  void AddShaderVariable(const char* name, int num_of_elements,
-    const int *values);
-  void AddShaderVariable(const char* name, int num_of_elements,
-    const float *values);
-  void AddShaderVariable(const char* name, int num_of_elements,
-    const double *values);
-
-  // Description:
-  // Get number of elements in a Shader variable. Return 0 if
-  // failed to find the shader variable.
-  int GetShaderVariableSize(const char* name);
-
-  // Description:
-  // Returns the type of a Shader variable with the given name.
-  // Return 0 on error.
-  int GetShaderVariableType(const char* name);
-
-  // Description:
-  // Methods to get the value of shader variables with the given name.
-  // Values must be at least the size of the shader variable (obtained
-  // by GetShaderVariableSize(). Returns if the operation was successful.
-  int GetShaderVariable(const char* name, int* values);
-  int GetShaderVariable(const char* name, float* values);
-  int GetShaderVariable(const char* name, double* values);
-
-  // Description:
-  // Returns the scope of the shader i.e. if it's a vertex or fragment shader.
-  // (vtkXMLShader::SCOPE_VERTEX or vtkXMLShader::SCOPE_FRAGMENT).
-  int GetScope();
-protected:
-  vtkShader();
-  ~vtkShader();
-
-  vtkXMLShader* XMLShader;
-  vtkShaderInternals* Internals;
-
-  //BTX
-  enum MatrixOrders
-    {
-    RowMajor,
-    ColumnMajor
-    };
-  //ETX
-
-  // Description:
-  // Runs through the XML element children to locate uniform
-  // variable elements and process them.
-  virtual void SetShaderParameters(vtkActor*, vtkRenderer*,
-                                   vtkXMLDataElement*);
-
-
-  // Description:
-  // Processes <Uniform /> elements.
-  void SetUniformParameter(vtkActor*, vtkRenderer*, vtkXMLDataElement*);
-
-  // Description:
-  // Processes <CameraUniform />
-  void SetCameraParameter(vtkActor*, vtkRenderer*, vtkXMLDataElement*);
-
-  // Description:
-  // Processes <PropertyUniform />
-  void SetPropertyParameter(vtkActor*, vtkRenderer*, vtkXMLDataElement*);
-
-  // Description:
-  // Processes <LightUniform />
-  void SetLightParameter(vtkActor*, vtkRenderer*, vtkXMLDataElement*);
-
-  // Description:
-  // Process <MatrixUniform />
-  void SetMatrixParameter(vtkActor*, vtkRenderer*, vtkXMLDataElement*);
-
-  // Description:
-  // Process <SamplerUniform />
-  void SetSamplerParameter(vtkActor*, vtkRenderer*, vtkXMLDataElement*);
-
-  // Description:
-  // Process <ApplicationUniform />
-  void SetApplicationParameter(vtkXMLDataElement*);
-
-  // Description:
-  // Equivalent to cgGLSetParameter and glUniform.
-  // Subclasses must override these and perform GLSL or Cg calls.
-  virtual void SetUniformParameter(const char* name, int numValues,
-    const int* value) =0;
-  virtual void SetUniformParameter(const char* name, int numValues,
-    const float* value)=0;
-  virtual void SetUniformParameter(const char* name, int numValues,
-    const double* value)=0;
-
-  // Description:
-  // Equivalent to cgGLSetMatrixParameterfc and glUniformMatrix.
-  // Subclasses must override these and perform GLSL or Cg calls.
-  virtual void SetMatrixParameter(const char* name, int numValues,
-    int order, const float* value)=0;
-  virtual void SetMatrixParameter(const char* name, int numValues,
-    int order, const double* value)=0;
-  virtual void SetMatrixParameter(const char* name, const char* state_matix_type,
-    const char* transform_type)=0;
-
-
-  // Description:
-  // Establishes the given texture as the uniform sampler to perform lookups on.
-  // The textureIndex argument corresponds to the indices of the textures in a
-  // vtkProperty.  Subclass may have to cast the texture to vtkOpenGLTexture to
-  // obtain the GLuint for texture this texture.  Subclasses must override these
-  // and perform GLSL or Cg calls.
-  virtual void SetSamplerParameter(const char* name, vtkTexture* texture,
-                                   int textureIndex)=0;
-
-  vtkTimeStamp PassShaderVariablesTime;
-private:
-  vtkShader(const vtkShader&); // Not Implemented
-  void operator=(const vtkShader&); // Not Implemented
-};
-#endif //__vtkShader_h
diff --git a/Rendering/Core/vtkShaderCodeLibrary.cxx b/Rendering/Core/vtkShaderCodeLibrary.cxx
deleted file mode 100644
index afe5adb..0000000
--- a/Rendering/Core/vtkShaderCodeLibrary.cxx
+++ /dev/null
@@ -1,115 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkShaderCodeLibrary.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#include "vtkShaderCodeLibrary.h"
-
-#include "vtkObjectFactory.h"
-#include "vtkShaderCodeLibraryMacro.h"
-
-#ifndef vtkShaderCodeLibraryMacro
-  #define vtkShaderCodeLibraryMacro(name) \
-    vtkGenericWarningMacro("VTK is not built with shading support." \
-      "No shaders are available.");
-#endif
-
-
-#include <map>
-#include <string>
-#include <vtksys/SystemTools.hxx>
-
-class vtkShaderCodeLibrary::vtkInternal
-{
-public:
-  std::map<std::string, std::string> Codes;
-  const char* GetShaderCode(const char* name)
-    {
-    std::map<std::string, std::string>::iterator iter;
-    iter = this->Codes.find(name);
-    if (iter != this->Codes.end())
-      {
-      return iter->second.c_str();
-      }
-    return NULL;
-    }
-};
-
-vtkShaderCodeLibrary::vtkInternal* vtkShaderCodeLibrary::Internal = 0;;
-
-vtkShaderCodeLibrary::vtkInternalCleanup vtkShaderCodeLibrary::Cleanup;
-vtkShaderCodeLibrary::vtkInternalCleanup::~vtkInternalCleanup()
-{
-  delete vtkShaderCodeLibrary::Internal;
-  vtkShaderCodeLibrary::Internal = 0;
-}
-
-
-vtkStandardNewMacro(vtkShaderCodeLibrary);
-//-----------------------------------------------------------------------------
-vtkShaderCodeLibrary::vtkShaderCodeLibrary()
-{
-}
-
-//-----------------------------------------------------------------------------
-vtkShaderCodeLibrary::~vtkShaderCodeLibrary()
-{
-}
-
-//-----------------------------------------------------------------------------
-char* vtkShaderCodeLibrary::GetShaderCode(const char* name)
-{
-  if (!name || !*name)
-    {
-    return 0;
-    }
-
-  if (vtkShaderCodeLibrary::Internal)
-    {
-    const char* code = vtkShaderCodeLibrary::Internal->GetShaderCode(name);
-    if (code)
-      {
-      return vtksys::SystemTools::DuplicateString(code);
-      }
-    }
-
-  // CMake sets VTK_SHADER_CODE_LIBRARY_CHUNK to be the
-  // chunk of code that does name comparisons and
-  // call appropriate method from the vtk*ShaderLibrary.
-  vtkShaderCodeLibraryMacro(name)
-  return 0;
-}
-
-//-----------------------------------------------------------------------------
-const char** vtkShaderCodeLibrary::GetListOfShaderCodeNames()
-{
-  return ::ListOfShaderNames;
-}
-
-//-----------------------------------------------------------------------------
-void vtkShaderCodeLibrary::RegisterShaderCode(const char* name, const char* code)
-{
-  if (name && code)
-    {
-    if (!vtkShaderCodeLibrary::Internal)
-      {
-      vtkShaderCodeLibrary::Internal = new vtkShaderCodeLibrary::vtkInternal();
-      }
-    vtkShaderCodeLibrary::Internal->Codes[name] = code;
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkShaderCodeLibrary::PrintSelf(ostream& os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os, indent);
-}
diff --git a/Rendering/Core/vtkShaderCodeLibrary.h b/Rendering/Core/vtkShaderCodeLibrary.h
deleted file mode 100644
index 884c14f..0000000
--- a/Rendering/Core/vtkShaderCodeLibrary.h
+++ /dev/null
@@ -1,80 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkShaderCodeLibrary.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-// .NAME vtkShaderCodeLibrary - Library for Hardware Shaders.
-// .SECTION Description
-// This class provides the hardware shader code.
-// .SECTION Thanks
-// Shader support in VTK includes key contributions by Gary Templet at
-// Sandia National Labs.
-
-#ifndef __vtkShaderCodeLibrary_h
-#define __vtkShaderCodeLibrary_h
-
-#include "vtkRenderingCoreModule.h" // For export macro
-#include "vtkObject.h"
-
-class VTKRENDERINGCORE_EXPORT vtkShaderCodeLibrary : public vtkObject
-{
-public:
-  static vtkShaderCodeLibrary* New();
-  vtkTypeMacro(vtkShaderCodeLibrary, vtkObject);
-  void PrintSelf(ostream& os, vtkIndent indent);
-
-  // Description:
-  // Obtain the code for the shader with given name.
-  // Note that Cg shader names are prefixed with CG and
-  // GLSL shader names are prefixed with GLSL.
-  // This method allocates memory. It's the responsibility
-  // of the caller to free this memory.
-  static char* GetShaderCode(const char* name);
-
-  // Description:
-  // Returns an array of pointers to char strings that are
-  // the names of the shader codes provided by the library.
-  // The end of the array is marked by a null pointer.
-  static const char** GetListOfShaderCodeNames();
-
-  // Description:
-  // Provides for registering shader code. This overrides the compiled in shader
-  // codes.
-  static void RegisterShaderCode(const char* name, const char* code);
-//BTX
-protected:
-  vtkShaderCodeLibrary();
-  ~vtkShaderCodeLibrary();
-
-private:
-  vtkShaderCodeLibrary(const vtkShaderCodeLibrary&); // Not implemented.
-  void operator=(const vtkShaderCodeLibrary&); // Not implemented.
-
-  // vtkInternalCleanup is used to destroy Internal ptr when the application
-  // exits.
-  class vtkInternalCleanup
-    {
-  public:
-    vtkInternalCleanup() {}
-    ~vtkInternalCleanup();
-    };
-
-  friend class vtkInternalCleanup;
-  static vtkInternalCleanup Cleanup;
-
-  // vtkInternal is used to maintain user registered shader codes.
-  class vtkInternal;
-  static vtkInternal* Internal;
-//ETX
-};
-
-#endif
diff --git a/Rendering/Core/vtkShaderDeviceAdapter.cxx b/Rendering/Core/vtkShaderDeviceAdapter.cxx
deleted file mode 100644
index a385cd0..0000000
--- a/Rendering/Core/vtkShaderDeviceAdapter.cxx
+++ /dev/null
@@ -1,38 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkShaderDeviceAdapter.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#include "vtkShaderDeviceAdapter.h"
-
-#include "vtkObjectFactory.h"
-#include "vtkShaderProgram.h"
-
-
-//---------------------------------------------------------------------------
-vtkShaderDeviceAdapter::vtkShaderDeviceAdapter()
-{
-  this->ShaderProgram = 0;
-}
-
-//---------------------------------------------------------------------------
-vtkShaderDeviceAdapter::~vtkShaderDeviceAdapter()
-{
-  this->SetShaderProgram(0);
-}
-
-//---------------------------------------------------------------------------
-void vtkShaderDeviceAdapter::PrintSelf(ostream &os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os, indent);
-  os << indent << "ShaderProgram: " << this->ShaderProgram << endl;
-}
diff --git a/Rendering/Core/vtkShaderDeviceAdapter.h b/Rendering/Core/vtkShaderDeviceAdapter.h
deleted file mode 100644
index b062d46..0000000
--- a/Rendering/Core/vtkShaderDeviceAdapter.h
+++ /dev/null
@@ -1,81 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkShaderDeviceAdapter.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-// .NAME vtkShaderDeviceAdapter - an adapter to pass generic vertex attributes
-// to the rendering pipeline.
-// .SECTION
-// This class is an adapter used to pass generic vertex attributes to the
-// rendering pipeline. Since this changes based on the shading language used,
-// this class merely defines the API and subclasses provide implementations for
-// Cg and GSince this changes based on the shading language used, this class
-// merely defines the API and subclasses provide implementations for Cg and
-// GLSL.
-// .SECTION Thanks
-// Support for generic vertex attributes in VTK was contributed in
-// collaboration with Stephane Ploix at EDF.
-
-#ifndef __vtkShaderDeviceAdapter_h
-#define __vtkShaderDeviceAdapter_h
-
-#include "vtkRenderingCoreModule.h" // For export macro
-#include "vtkObject.h"
-
-class vtkShaderProgram;
-
-class VTKRENDERINGCORE_EXPORT vtkShaderDeviceAdapter : public vtkObject
-{
-public:
-  vtkTypeMacro(vtkShaderDeviceAdapter, vtkObject);
-  virtual void PrintSelf(ostream &os, vtkIndent indent);
-
-  // Description:
-  // Sends a single attribute to the graphics card.
-  // The attrname parameter identifies the name of attribute.
-  // The components parameter gives the number of
-  // components in the attribute.  In general, components must be between
-  // 1-4, but a rendering system may impose even more constraints.  The
-  // type parameter is a VTK type enumeration (VTK_FLOAT, VTK_INT, etc.).
-  // Again, a rendering system may not support all types for all
-  // attributes.  The attribute parameter is the actual data for the
-  // attribute.
-  // If offset is specified, it is added to attribute pointer \c after
-  // it has been casted to the proper type.
-  virtual void SendAttribute(const char* attrname,
-    int components, int type,
-    const void* attribute, unsigned long offset=0) = 0;
-
-  // Description:
-  // Set the shader program which is being updated by this device adapter.
-  // The shader program is not reference counted to avoid reference loops.
-  void SetShaderProgram(vtkShaderProgram* pgm)
-    { this->ShaderProgram = pgm; }
-  vtkGetObjectMacro(ShaderProgram, vtkShaderProgram);
-
-  // Descrition:
-  // This method is called before rendering. This gives the shader device
-  // adapter an opportunity to collect information, such as attribute indices
-  // that it will need while rendering.
-  virtual void PrepareForRender() = 0;
-protected:
-  vtkShaderDeviceAdapter();
-  ~vtkShaderDeviceAdapter();
-
-  vtkShaderProgram* ShaderProgram;
-
-private:
-  vtkShaderDeviceAdapter(const vtkShaderDeviceAdapter&); // Not implemented
-  void operator=(const vtkShaderDeviceAdapter&); // Not implemented
-};
-
-#endif
diff --git a/Rendering/Core/vtkShaderDeviceAdapter2.cxx b/Rendering/Core/vtkShaderDeviceAdapter2.cxx
index f520407..047e185 100644
--- a/Rendering/Core/vtkShaderDeviceAdapter2.cxx
+++ b/Rendering/Core/vtkShaderDeviceAdapter2.cxx
@@ -15,7 +15,6 @@
 #include "vtkShaderDeviceAdapter2.h"
 
 #include "vtkObjectFactory.h"
-#include "vtkShaderProgram.h"
 
 //---------------------------------------------------------------------------
 vtkShaderDeviceAdapter2::vtkShaderDeviceAdapter2()
diff --git a/Rendering/Core/vtkShaderProgram.cxx b/Rendering/Core/vtkShaderProgram.cxx
deleted file mode 100644
index 76100bf..0000000
--- a/Rendering/Core/vtkShaderProgram.cxx
+++ /dev/null
@@ -1,260 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkShaderProgram.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-
-/*
- * Copyright 2004 Sandia Corporation.
- * Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
- * license for use of this work by or on behalf of the
- * U.S. Government. Redistribution and use in source and binary forms, with
- * or without modification, are permitted provided that this Notice and any
- * statement of authorship are reproduced on all copies.
- */
-
-#include "vtkShaderProgram.h"
-
-#include "vtkActor.h"
-#include "vtkCollection.h"
-#include "vtkCollectionIterator.h"
-#include "vtkInstantiator.h"
-#include "vtkObjectFactory.h"
-#include "vtkRenderer.h"
-#include "vtkRenderWindow.h"
-#include "vtkShader.h"
-#include "vtkToolkits.h" // for VTK_USE_*_SHADERS
-#include "vtkShaderDeviceAdapter.h"
-#include "vtkXMLDataElement.h"
-#include "vtkXMLMaterial.h"
-#include "vtkXMLShader.h"
-
-vtkCxxSetObjectMacro(vtkShaderProgram, Material, vtkXMLMaterial);
-//-----------------------------------------------------------------------------
-vtkShaderProgram::vtkShaderProgram()
-{
-  this->Material= 0;
-  this->ShaderCollection = vtkCollection::New();
-  this->ShaderCollectionIterator = this->ShaderCollection->NewIterator();
-
-  this->GLExtensionsLoaded = 0;
-  this->ShaderDeviceAdapter = NULL;
-}
-
-//-----------------------------------------------------------------------------
-vtkShaderProgram::~vtkShaderProgram()
-{
-  this->SetShaderDeviceAdapter(0);
-  this->SetMaterial(0);
-  this->ShaderCollection->Delete();
-  this->ShaderCollectionIterator->Delete();
-}
-
-//-----------------------------------------------------------------------------
-void vtkShaderProgram::ReleaseGraphicsResources(vtkWindow *w)
-{
-  for (this->ShaderCollectionIterator->InitTraversal();
-    !this->ShaderCollectionIterator->IsDoneWithTraversal();
-    this->ShaderCollectionIterator->GoToNextItem())
-    {
-    vtkShader* shader = vtkShader::SafeDownCast(
-      this->ShaderCollectionIterator->GetCurrentObject());
-    shader->ReleaseGraphicsResources(w);
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkShaderProgram::SetShaderDeviceAdapter(vtkShaderDeviceAdapter* adapter)
-{
-  if (this->ShaderDeviceAdapter)
-    {
-    this->ShaderDeviceAdapter->SetShaderProgram(0);
-    }
-  vtkSetObjectBodyMacro(ShaderDeviceAdapter, vtkShaderDeviceAdapter, adapter);
-  if (this->ShaderDeviceAdapter)
-    {
-    this->ShaderDeviceAdapter->SetShaderProgram(this);
-    }
-}
-
-//-----------------------------------------------------------------------------
-vtkCollectionIterator* vtkShaderProgram::NewShaderIterator()
-{
-  return this->ShaderCollection->NewIterator();
-}
-
-//-----------------------------------------------------------------------------
-// conditional build - only include shaders that exist, based on the
-// user-selected build options for shader types.
-vtkShaderProgram* vtkShaderProgram::CreateShaderProgram(int shaderType)
-{
-  vtkShaderProgram* retval = 0;
-  switch ( shaderType )
-    {
-  case vtkXMLShader::LANGUAGE_CG:
-    retval = vtkShaderProgram::SafeDownCast( vtkInstantiator::CreateInstance( "vtkCgShaderProgram" ) );
-    if ( ! retval )
-      {
-      vtkGenericWarningMacro("Cg shaders not supported.");
-      }
-    break;
-  case vtkXMLShader::LANGUAGE_GLSL:
-    retval = vtkShaderProgram::SafeDownCast( vtkInstantiator::CreateInstance( "vtkGLSLShaderProgram" ) );
-    if ( ! retval )
-      {
-      vtkGenericWarningMacro("GLSL shaders not supported.");
-      }
-    break;
-  default:
-    vtkGenericWarningMacro( "Unknown shader type " << shaderType );
-    break;
-    }
-  return retval;
-}
-
-//-----------------------------------------------------------------------------
-void vtkShaderProgram::ReadMaterial()
-{
-  if (this->ShaderCollection->GetNumberOfItems() != 0)
-    {
-    vtkErrorMacro("ReadMaterial() can only be called on a clean ShaderProgram."
-      "This shader program already has some shaders loaded.");
-    return;
-    }
-
-  if (!this->Material)
-    {
-    vtkErrorMacro("No Material set to read.");
-    return;
-    }
-
-  int cc;
-  int max = this->Material->GetNumberOfVertexShaders();
-  for (cc=0; cc < max; cc++)
-    {
-    vtkShader* shader = this->NewShader();
-    shader->SetXMLShader(this->Material->GetVertexShader(cc));
-    this->AddShader(shader);
-    shader->Delete();
-    }
-  vtkDebugMacro(<< max << " Vertex shaders added.");
-
-  max = this->Material->GetNumberOfFragmentShaders();
-  for (cc=0; cc < max; cc++)
-    {
-    vtkShader* shader = this->NewShader();
-    shader->SetXMLShader(this->Material->GetFragmentShader(cc));
-    this->AddShader(shader);
-    shader->Delete();
-    }
-  vtkDebugMacro(<< max << " Fragment shaders added.");
-}
-
-//-----------------------------------------------------------------------------
-int vtkShaderProgram::AddShader(vtkShader* shader)
-{
-  int index = this->GetNumberOfShaders();
-  this->ShaderCollection->AddItem(shader);
-  return index;
-}
-
-//-----------------------------------------------------------------------------
-void vtkShaderProgram::RemoveShader(vtkShader* shader)
-{
-  this->ShaderCollection->RemoveItem(shader);
-}
-
-//-----------------------------------------------------------------------------
-void vtkShaderProgram::RemoveShader(int index)
-{
-  this->ShaderCollection->RemoveItem(index);
-}
-
-//-----------------------------------------------------------------------------
-int vtkShaderProgram::GetNumberOfShaders()
-{
-  return this->ShaderCollection->GetNumberOfItems();
-}
-
-//-----------------------------------------------------------------------------
-void vtkShaderProgram::AddShaderVariable(const char* name, int numVars, int* x)
-{
-  vtkCollectionIterator* iter = this->ShaderCollectionIterator;
-  for (iter->InitTraversal(); !iter->IsDoneWithTraversal();
-    iter->GoToNextItem())
-    {
-    vtkShader* shader = vtkShader::SafeDownCast(iter->GetCurrentObject());
-    shader->AddShaderVariable(name, numVars, x);
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkShaderProgram::AddShaderVariable(const char* name, int numVars, float* x)
-{
-  vtkCollectionIterator* iter = this->ShaderCollectionIterator;
-  for (iter->InitTraversal(); !iter->IsDoneWithTraversal();
-    iter->GoToNextItem())
-    {
-    vtkShader* shader = vtkShader::SafeDownCast(iter->GetCurrentObject());
-    shader->AddShaderVariable(name, numVars, x);
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkShaderProgram::AddShaderVariable(const char* name, int numVars, double* x)
-{
-  vtkCollectionIterator* iter = this->ShaderCollectionIterator;
-  for (iter->InitTraversal(); !iter->IsDoneWithTraversal();
-    iter->GoToNextItem())
-    {
-    vtkShader* shader = vtkShader::SafeDownCast(iter->GetCurrentObject());
-    shader->AddShaderVariable(name, numVars, x);
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkShaderProgram::PostRender(vtkActor*, vtkRenderer*)
-{
-  vtkCollectionIterator* iter = this->ShaderCollectionIterator;
-  for (iter->InitTraversal(); !iter->IsDoneWithTraversal();
-    iter->GoToNextItem())
-    {
-    vtkShader* shader = vtkShader::SafeDownCast(iter->GetCurrentObject());
-    shader->Unbind();
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkShaderProgram::PrintSelf(ostream &os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os, indent);
-  os << indent << "Material: ";
-  if (this->Material)
-    {
-    os << endl;
-    this->Material->PrintSelf(os, indent.GetNextIndent());
-    }
-  else
-    {
-    os << "(none)" << endl;
-    }
-
-  vtkCollectionIterator* iter = this->ShaderCollectionIterator;
-  for (iter->InitTraversal(); !iter->IsDoneWithTraversal();
-    iter->GoToNextItem())
-    {
-    vtkShader* shader = vtkShader::SafeDownCast(iter->GetCurrentObject());
-    os << indent << "Shader: " << endl;
-    shader->PrintSelf(os, indent.GetNextIndent());
-    }
-  os << indent << "ShaderDeviceAdapter: " << this->ShaderDeviceAdapter << endl;
-}
diff --git a/Rendering/Core/vtkShaderProgram.h b/Rendering/Core/vtkShaderProgram.h
deleted file mode 100644
index a2b05fa..0000000
--- a/Rendering/Core/vtkShaderProgram.h
+++ /dev/null
@@ -1,168 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkShaderProgram.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*
- * Copyright 2004 Sandia Corporation.
- * Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
- * license for use of this work by or on behalf of the
- * U.S. Government. Redistribution and use in source and binary forms, with
- * or without modification, are permitted provided that this Notice and any
- * statement of authorship are reproduced on all copies.
- */
-
-// .NAME vtkShaderProgram
-// .SECTION Description
-// vtkShaderProgram is a superclass for managing Hardware Shaders
-// defined in the XML Material file and interfacing VTK to those shaders.
-// It's concrete descendants are responsible for installing vertex and
-// fragment programs to the graphics hardware.
-//
-// .SECTION Shader Operations are shader library operations that are performed
-// on individual shaders, that is, without consideration of the partner shader.
-//
-// .SECTION Program Operations are shader library operations that treat the
-// vertex and fragment shader as a single unit.
-//
-// .SECTION Design
-// This class is a Strategy pattern for 'Program' operations, which treat
-// vertex/fragment shader pairs as a single 'Program', as required by some
-// shader libraries (GLSL). Typically, 'Shader' operations are delegated
-// to instances of vtkShader (managed by descendants of this class)
-// while 'Program' operations are handled by descendants of this class,
-// vtkCgShaderProgram, vtkGLSLShaderProgram.
-//
-// .SECTION See Also
-// vtkCgShaderProgram, vtkGLSLShaderProgram
-// .SECTION Thanks
-// Shader support in VTK includes key contributions by Gary Templet at
-// Sandia National Labs.
-
-#ifndef __vtkShaderProgram_h
-#define __vtkShaderProgram_h
-
-#include "vtkRenderingCoreModule.h" // For export macro
-#include "vtkObject.h"
-
-class vtkActor;
-class vtkCollection;
-class vtkCollectionIterator;
-class vtkRenderer;
-class vtkRenderWindow;
-class vtkShader;
-class vtkWindow;
-class vtkXMLMaterial;
-class vtkShaderDeviceAdapter;
-
-// manages all shaders defined in the XML file
-// especially the part about sending things to the card
-class VTKRENDERINGCORE_EXPORT vtkShaderProgram : public vtkObject
-{
-public:
-  vtkTypeMacro(vtkShaderProgram, vtkObject);
-  void PrintSelf(ostream &os, vtkIndent indent);
-
-  // .Description:
-  // Accessors for the Material.
-  vtkGetObjectMacro( Material, vtkXMLMaterial);
-  virtual void SetMaterial( vtkXMLMaterial* );
-
-  // .Description:
-  // Add shaders. Returns the index of the shader.
-  int AddShader(vtkShader* shader);
-
-  // Description:
-  // Remove a shader at the given index.
-  void RemoveShader(int index);
-
-  // Description:
-  // Removes the given shader.
-  void RemoveShader(vtkShader* shader);
-
-  // Description:
-  // Returns a new iterator to iterate over the shaders.
-  vtkCollectionIterator* NewShaderIterator();
-
-  // Description:
-  // Returns the number of shaders available in this
-  // shader program.
-  int GetNumberOfShaders();
-
-  // .Description
-  // This static function creates concrete shaders of a specific type. This is
-  // used to create a shader of the langauge specified in the XML file.
-  static vtkShaderProgram* CreateShaderProgram( int type );
-
-  // .Description
-  // Read the material file to get necessary shader info. Synchronize with
-  // delegate shaders.
-  virtual void ReadMaterial();
-
-  // .Description
-  // Load, compile, install and initialize shaders. These operations may
-  // be delegated to the shaders themselves or handled in descendants of
-  // this class.
-  virtual void Render( vtkActor*, vtkRenderer* )=0;
-
-  //Description
-  // Provide values to initialize shader variables. This is a conduit to initialize
-  // shader variables that change over time, useful for animation, gui widget inputs,
-  // etc.
-  // name - hardware name of the uniform variable
-  // numVars - number of variables being set
-  // x - values
-  virtual void AddShaderVariable(const char* name, int numVars, int* x);
-  virtual void AddShaderVariable(const char* name, int numVars, float* x);
-  virtual void AddShaderVariable(const char* name, int numVars, double* x);
-
-  // Description:
-  // Called to unload the shaders after the actor has been rendered.
-  virtual void PostRender(vtkActor*, vtkRenderer*);
-
-  // Description:
-  // Release any graphics resources that are being consumed by this actor.
-  // The parameter window could be used to determine which graphic
-  // resources to release.
-  virtual void ReleaseGraphicsResources(vtkWindow *);
-
-  // Description:
-  // Get the vtkShaderDeviceAdapter which can be used to execute this
-  // shader program.
-  vtkGetObjectMacro(ShaderDeviceAdapter, vtkShaderDeviceAdapter);
-
-protected:
-  vtkShaderProgram();
-  ~vtkShaderProgram();
-
-  vtkXMLMaterial* Material;
-  vtkCollection* ShaderCollection;
-  vtkCollectionIterator* ShaderCollectionIterator;
-
-  vtkSetMacro(GLExtensionsLoaded, int);
-  vtkGetMacro(GLExtensionsLoaded, int);
-  int GLExtensionsLoaded;
-  virtual void LoadExtensions(vtkRenderWindow*) {}
-
-  // Subclasses must set the shader device apater of the right type.
-  void SetShaderDeviceAdapter(vtkShaderDeviceAdapter*);
-
-  // Description:;
-  // Must be overloaded by subclasses to create the shader of appropriate type.
-  virtual vtkShader* NewShader() =0;
-private:
-  vtkShaderProgram(const vtkShaderProgram&); // Not Implemented
-  void operator=(const vtkShaderProgram&); // Not Implemented
-
-  vtkShaderDeviceAdapter* ShaderDeviceAdapter;
-};
-#endif //__vtkShaderProgram_h
diff --git a/Rendering/Core/vtkStandardPolyDataPainter.cxx b/Rendering/Core/vtkStandardPolyDataPainter.cxx
index c303394..8b33079 100644
--- a/Rendering/Core/vtkStandardPolyDataPainter.cxx
+++ b/Rendering/Core/vtkStandardPolyDataPainter.cxx
@@ -38,9 +38,7 @@
 #include "vtkProperty.h"
 #include "vtkRenderer.h"
 #include "vtkRenderWindow.h"
-#include "vtkShaderDeviceAdapter.h"
 #include "vtkShaderDeviceAdapter2.h"
-#include "vtkShaderProgram.h"
 #include "vtkSmartPointer.h"
 #include "vtkTimerLog.h"
 #include "vtkTriangle.h"
@@ -119,7 +117,6 @@ void vtkStandardPolyDataPainter::ProcessInformation(vtkInformation* info)
 
 //-----------------------------------------------------------------------------
 void vtkStandardPolyDataPainter::UpdateGenericAttributesCache(
-  vtkShaderDeviceAdapter* shaderDevice,
   vtkShaderDeviceAdapter2* shaderDevice2)
 {
   if (this->Internal->Mappings)
@@ -167,10 +164,6 @@ void vtkStandardPolyDataPainter::UpdateGenericAttributesCache(
         // This caches the attribute index.
         // This is essential since we don't want to call GetAttributeLocation in
         // glBegin()/glEnd().
-        if(shaderDevice)
-          {
-          shaderDevice->SendAttribute(vertexAttributeName, 0, 0, 0, 0);
-          }
         if(shaderDevice2)
           {
           shaderDevice2->SendAttribute(vertexAttributeName, 0, 0, 0, 0);
@@ -203,24 +196,14 @@ void vtkStandardPolyDataPainter::RenderInternal(vtkRenderer* renderer,
   vtkProperty* property = actor->GetProperty();
   vtkIdType startCell = 0;
   int interpolation = property->GetInterpolation();
-  vtkShaderDeviceAdapter* shaderDevice=0;
   vtkShaderDeviceAdapter2* shaderDevice2=0;
   this->Internal->PointAttributesCache.clear();
   this->Internal->CellAttributesCache.clear();
   if(property->GetShading())
     {
-    if(property->GetShaderProgram())
-      {
-      // Preprocess the generic vertex attributes that we need to pass to the
-      // shader.
-      shaderDevice = property->GetShaderProgram()->GetShaderDeviceAdapter();
-      }
-    else
-      {
-      // Preprocess the generic vertex attributes that we need to pass to the
-      // shader.
-      shaderDevice2 = property->GetShaderDeviceAdapter2();
-      }
+    // Preprocess the generic vertex attributes that we need to pass to the
+    // shader.
+    shaderDevice2 = property->GetShaderDeviceAdapter2();
     }
 
   if(!shaderDevice2)
@@ -231,29 +214,24 @@ void vtkStandardPolyDataPainter::RenderInternal(vtkRenderer* renderer,
         this->GetInformation()->Get(SHADER_DEVICE_ADAPTOR()));
     }
 
-  if(shaderDevice)
-    {
-    shaderDevice->PrepareForRender();
-    }
   if(shaderDevice2)
     {
     shaderDevice2->PrepareForRender();
     }
 
-  this->UpdateGenericAttributesCache(shaderDevice,shaderDevice2);
-
+  this->UpdateGenericAttributesCache(shaderDevice2);
 
   if (typeflags & vtkPainter::VERTS)
     {
     this->DrawCells(VTK_POLY_VERTEX, pd->GetVerts(), startCell,
-                    shaderDevice, shaderDevice2, renderer, 0, interpolation);
+                    shaderDevice2, renderer, 0, interpolation);
     }
 
   startCell += pd->GetNumberOfVerts();
   if (typeflags & vtkPainter::LINES)
     {
     this->DrawCells(VTK_POLY_LINE, pd->GetLines(), startCell,
-                    shaderDevice, shaderDevice2, renderer, 0, interpolation);
+                    shaderDevice2, renderer, 0, interpolation);
     }
 
   startCell += pd->GetNumberOfLines();
@@ -263,14 +241,14 @@ void vtkStandardPolyDataPainter::RenderInternal(vtkRenderer* renderer,
     if (property->GetRepresentation() == VTK_WIREFRAME)
       {
       this->DrawCells(VTK_TETRA, pd->GetPolys(), startCell,
-                      shaderDevice, shaderDevice2, renderer,
+                      shaderDevice2, renderer,
                       this->BuildNormals, interpolation);
       }
     else
 #endif
       {
       this->DrawCells(VTK_POLYGON, pd->GetPolys(), startCell,
-                      shaderDevice, shaderDevice2, renderer,
+                      shaderDevice2, renderer,
                       this->BuildNormals, interpolation);
       }
     }
@@ -279,7 +257,7 @@ void vtkStandardPolyDataPainter::RenderInternal(vtkRenderer* renderer,
   if (typeflags & vtkPainter::STRIPS)
     {
     this->DrawCells(VTK_TRIANGLE_STRIP, pd->GetStrips(), startCell,
-      shaderDevice, shaderDevice2, renderer, this->BuildNormals,
+                    shaderDevice2, renderer, this->BuildNormals,
                     interpolation);
     }
 
@@ -301,7 +279,6 @@ void vtkStandardPolyDataPainter::DrawCells(
   int mode,
   vtkCellArray *connectivity,
   vtkIdType startCellId,
-  vtkShaderDeviceAdapter *shaderDevice,
   vtkShaderDeviceAdapter2 *shaderDevice2,
   vtkRenderer *renderer,
   int buildnormals,
@@ -422,26 +399,9 @@ void vtkStandardPolyDataPainter::DrawCells(
       fielddata_cellId++;
       }
 
-    // Send generic attributes associated with the cell. Shaders style 1.
-    vtkInternal::InfoVector::iterator gaIter = this->Internal->CellAttributesCache.begin();
-    for (; shaderDevice && gaIter != this->Internal->CellAttributesCache.end(); ++gaIter)
-      {
-      vtkDataArray* a = gaIter->Array;
-      unsigned int mappingsIndex = gaIter->MappingsIndex;
-      int numc = a->GetNumberOfComponents();
-      int siComp = this->Internal->Mappings->GetComponent(mappingsIndex);
-      // if siComp==-1, then all components of the array are sent,
-      // otherwise only the chosen component is sent.
-      shaderDevice->SendAttribute(
-        this->Internal->Mappings->GetAttributeName(mappingsIndex),
-        (siComp>=0)? 1: numc,
-        a->GetDataType(),
-        (siComp>=0) ? a->GetVoidPointer(numc*cellId+siComp) :
-        a->GetVoidPointer(numc*cellId));
-      }
-
     // Send generic attributes associated with the cell. Shaders style 2.
-    gaIter = this->Internal->CellAttributesCache.begin();
+    vtkInternal::InfoVector::iterator gaIter =
+        this->Internal->CellAttributesCache.begin();
     for (; shaderDevice2 && gaIter != this->Internal->CellAttributesCache.end(); ++gaIter)
       {
       vtkDataArray* a = gaIter->Array;
@@ -515,26 +475,6 @@ void vtkStandardPolyDataPainter::DrawCells(
           VTK_DOUBLE, polyNorm);
         }
 
-      // Send generic attributes associated with the point. Shader style 1.
-      gaIter = this->Internal->PointAttributesCache.begin();
-      for (; shaderDevice && gaIter !=
-        this->Internal->PointAttributesCache.end(); ++gaIter)
-        {
-        vtkDataArray* a = gaIter->Array;
-        unsigned int mappingsIndex = gaIter->MappingsIndex;
-        int numc = a->GetNumberOfComponents();
-        int siComp = this->Internal->Mappings->GetComponent(mappingsIndex);
-        // if siComp==-1, then all components of the array are sent,
-        // otherwise only the chosen component is sent.
-        shaderDevice->SendAttribute(
-          this->Internal->Mappings->GetAttributeName(mappingsIndex),
-          (siComp>=0)? 1: numc,
-          a->GetDataType(),
-          (siComp>=0) ? a->GetVoidPointer(numc*pointId+siComp) :
-          a->GetVoidPointer(numc*pointId));
-        }
-
-
       // Send generic attributes associated with the point. Shader style 2.
       gaIter = this->Internal->PointAttributesCache.begin();
       for (; shaderDevice2 && gaIter !=
diff --git a/Rendering/Core/vtkStandardPolyDataPainter.h b/Rendering/Core/vtkStandardPolyDataPainter.h
index adaa579..5e3fdde 100644
--- a/Rendering/Core/vtkStandardPolyDataPainter.h
+++ b/Rendering/Core/vtkStandardPolyDataPainter.h
@@ -45,7 +45,6 @@ class vtkCellArray;
 class vtkDataArray;
 class vtkPointData;
 class vtkPoints;
-class vtkShaderDeviceAdapter;
 class vtkShaderDeviceAdapter2;
 
 class VTKRENDERINGCORE_EXPORT vtkStandardPolyDataPainter : public vtkPolyDataPainter
@@ -69,7 +68,6 @@ protected:
 
   void DrawCells(int mode, vtkCellArray *connectivity,
                  vtkIdType startCellId,
-                 vtkShaderDeviceAdapter *shaderDevice,
                  vtkShaderDeviceAdapter2 *shaderDevice2,
                  vtkRenderer *renderer,
                  int buildnormals, int interpolation);
@@ -79,8 +77,7 @@ protected:
   // since the last time this method was called.
   virtual void ProcessInformation(vtkInformation*);
 
-  void UpdateGenericAttributesCache(vtkShaderDeviceAdapter *shaderDevice,
-                                    vtkShaderDeviceAdapter2 *shaderDevice2);
+  void UpdateGenericAttributesCache(vtkShaderDeviceAdapter2 *shaderDevice2);
 
   vtkIdType TotalCells;
 private:
diff --git a/Rendering/Core/vtkTDxInteractorStyleCamera.cxx b/Rendering/Core/vtkTDxInteractorStyleCamera.cxx
index e0d6ff7..3711915 100644
--- a/Rendering/Core/vtkTDxInteractorStyleCamera.cxx
+++ b/Rendering/Core/vtkTDxInteractorStyleCamera.cxx
@@ -16,7 +16,7 @@
 #include "vtkTDxInteractorStyleCamera.h"
 
 #include "vtkTransform.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkCamera.h"
 #include "vtkRenderer.h"
 #include "vtkRenderWindow.h"
diff --git a/Rendering/Core/vtkTextMapper.cxx b/Rendering/Core/vtkTextMapper.cxx
index 803f34e..ca9d35c 100644
--- a/Rendering/Core/vtkTextMapper.cxx
+++ b/Rendering/Core/vtkTextMapper.cxx
@@ -14,11 +14,25 @@
 =========================================================================*/
 #include "vtkTextMapper.h"
 
-#include "vtkTextProperty.h"
+#include "vtkActor2D.h"
+#include "vtkCellArray.h"
+#include "vtkFloatArray.h"
+#include "vtkImageData.h"
 #include "vtkObjectFactory.h"
+#include "vtkPointData.h"
+#include "vtkPoints.h"
+#include "vtkPolyData.h"
+#include "vtkPolyDataMapper2D.h"
+#include "vtkRenderer.h"
+#include "vtkStdString.h"
+#include "vtkTextProperty.h"
+#include "vtkTextRenderer.h"
+#include "vtkTexture.h"
+
+#include <algorithm>
 
 //----------------------------------------------------------------------------
-vtkAbstractObjectFactoryNewMacro(vtkTextMapper)
+vtkObjectFactoryNewMacro(vtkTextMapper)
 //----------------------------------------------------------------------------
 vtkCxxSetObjectMacro(vtkTextMapper,TextProperty,vtkTextProperty);
 
@@ -27,14 +41,38 @@ vtkCxxSetObjectMacro(vtkTextMapper,TextProperty,vtkTextProperty);
 vtkTextMapper::vtkTextMapper()
 {
   this->Input = NULL;
-  // consistent Register/unregister
   this->TextProperty = NULL;
-  this->SetTextProperty(vtkTextProperty::New());
-  this->TextProperty->Delete();
 
-  this->TextLines = NULL;
-  this->NumberOfLines = 0;
-  this->NumberOfLinesAllocated = 0;
+  vtkNew<vtkTextProperty> tprop;
+  this->SetTextProperty(tprop.GetPointer());
+
+  this->Points->SetNumberOfPoints(4);
+  this->Points->SetPoint(0, 0., 0., 0.);
+  this->Points->SetPoint(1, 0., 0., 0.);
+  this->Points->SetPoint(2, 0., 0., 0.);
+  this->Points->SetPoint(3, 0., 0., 0.);
+  this->PolyData->SetPoints(this->Points.GetPointer());
+
+  vtkNew<vtkCellArray> quad;
+  quad->InsertNextCell(4);
+  quad->InsertCellPoint(0);
+  quad->InsertCellPoint(1);
+  quad->InsertCellPoint(2);
+  quad->InsertCellPoint(3);
+  this->PolyData->SetPolys(quad.GetPointer());
+
+  vtkNew<vtkFloatArray> tcoords;
+  tcoords->SetNumberOfComponents(2);
+  tcoords->SetNumberOfTuples(4);
+  tcoords->SetTuple2(0, 0., 0.);
+  tcoords->SetTuple2(1, 0., 0.);
+  tcoords->SetTuple2(2, 0., 0.);
+  tcoords->SetTuple2(3, 0., 0.);
+  this->PolyData->GetPointData()->SetTCoords(tcoords.GetPointer());
+  this->Mapper->SetInputData(this->PolyData.GetPointer());
+
+  this->Texture->SetInputData(this->Image.GetPointer());
+  this->TextDims[0] = this->TextDims[1] = 0;
 }
 
 //----------------------------------------------------------------------------
@@ -50,21 +88,7 @@ void vtkTextMapper::ShallowCopy(vtkTextMapper *tm)
 //----------------------------------------------------------------------------
 vtkTextMapper::~vtkTextMapper()
 {
-  if (this->Input)
-    {
-    delete [] this->Input;
-    this->Input = NULL;
-    }
-
-  if (this->TextLines != NULL)
-    {
-    for (int i=0; i < this->NumberOfLinesAllocated; i++)
-      {
-      this->TextLines[i]->Delete();
-      }
-    delete [] this->TextLines;
-    }
-
+  delete [] this->Input;
   this->SetTextProperty(NULL);
 }
 
@@ -84,7 +108,30 @@ void vtkTextMapper::PrintSelf(ostream& os, vtkIndent indent)
     }
 
   os << indent << "Input: " << (this->Input ? this->Input : "(none)") << "\n";
-  os << indent << "NumberOfLines: " << this->NumberOfLines << "\n";
+
+  os << indent << "TextDims: "
+     << this->TextDims[0] << ", " << this->TextDims[1] << "\n";
+
+  os << indent << "CoordsTime: " << this->CoordsTime.GetMTime() << "\n";
+  os << indent << "TCoordsTime: " << this->TCoordsTime.GetMTime() << "\n";
+  os << indent << "Image:\n";
+  this->Image->PrintSelf(os, indent.GetNextIndent());
+  os << indent << "Points:\n";
+  this->Points->PrintSelf(os, indent.GetNextIndent());
+  os << indent << "PolyData:\n";
+  this->PolyData->PrintSelf(os, indent.GetNextIndent());
+  os << indent << "Mapper:\n";
+  this->Mapper->PrintSelf(os, indent.GetNextIndent());
+  os << indent << "Texture:\n";
+  this->Texture->PrintSelf(os, indent.GetNextIndent());
+}
+
+//----------------------------------------------------------------------------
+void vtkTextMapper::GetSize(vtkViewport *, int size[])
+{
+  UpdateImage();
+  size[0] = this->TextDims[0];
+  size[1] = this->TextDims[1];
 }
 
 //----------------------------------------------------------------------------
@@ -294,79 +341,54 @@ int vtkTextMapper::SetMultipleRelativeFontSize(vtkViewport *viewport,
   return fontSize;
 }
 
-
 //----------------------------------------------------------------------------
-// Parse the input and create multiple text mappers if multiple lines
-// (delimited by \n) are specified.
-void vtkTextMapper::SetInput(const char *input)
+void vtkTextMapper::RenderOverlay(vtkViewport *viewport, vtkActor2D *actor)
 {
-  if ( this->Input && input && (!strcmp(this->Input,input)))
+  vtkDebugMacro(<<"RenderOverlay called");
+  if (this->Input && this->Input[0])
     {
-    return;
-    }
-  if (this->Input)
-    {
-    delete [] this->Input;
-    }
-  if (input)
-    {
-    this->Input = new char[strlen(input)+1];
-    strcpy(this->Input,input);
-    }
-  else
-    {
-    this->Input = NULL;
-    }
-  this->Modified();
-
-  int numLines = this->GetNumberOfLines(input);
-
-  if ( numLines <= 1) // a line with no "\n"
-    {
-    this->NumberOfLines = numLines;
+    this->UpdateImage();
+    this->UpdateQuad(actor);
+    if (vtkRenderer *ren = vtkRenderer::SafeDownCast(viewport))
+      {
+      vtkDebugMacro(<<"Texture::Render called");
+      this->Texture->Render(ren);
+      }
     }
 
-  else //multiple lines
-    {
-    char *line;
-    int i;
+  vtkDebugMacro(<<"PolyData::RenderOverlay called");
+  this->Mapper->RenderOverlay(viewport, actor);
 
-    if ( numLines > this->NumberOfLinesAllocated )
-      {
-      // delete old stuff
-      if ( this->TextLines )
-        {
-        for (i=0; i < this->NumberOfLinesAllocated; i++)
-          {
-          this->TextLines[i]->Delete();
-          }
-        delete [] this->TextLines;
-        }
+  vtkDebugMacro(<<"Superclass::RenderOverlay called");
+  this->Superclass::RenderOverlay(viewport, actor);
+}
 
-      // allocate new text mappers
-      this->NumberOfLinesAllocated = numLines;
-      this->TextLines = new vtkTextMapper *[numLines];
-      for (i=0; i < numLines; i++)
-        {
-        this->TextLines[i] = vtkTextMapper::New();
-        }
-      } //if we need to reallocate
+//----------------------------------------------------------------------------
+void vtkTextMapper::ReleaseGraphicsResources(vtkWindow *win)
+{
+  this->Superclass::ReleaseGraphicsResources(win);
+  this->Texture->ReleaseGraphicsResources(win);
+}
 
-    // set the input strings
-    this->NumberOfLines = numLines;
-    for (i=0; i < this->NumberOfLines; i++)
-      {
-      line = this->NextLine(input, i);
-      this->TextLines[i]->SetInput( line );
-      delete [] line;
-      }
-    }
+//----------------------------------------------------------------------------
+unsigned long vtkTextMapper::GetMTime()
+{
+  unsigned long result = this->Superclass::GetMTime();
+  result = std::max(result, this->CoordsTime.GetMTime());
+  result = std::max(result, this->Image->GetMTime());
+  result = std::max(result, this->Points->GetMTime());
+  result = std::max(result, this->PolyData->GetMTime());
+  result = std::max(result, this->Mapper->GetMTime());
+  result = std::max(result, this->Texture->GetMTime());
+  return result;
 }
 
 //----------------------------------------------------------------------------
 // Determine the number of lines in the Input string (delimited by "\n").
+#ifndef VTK_LEGACY_REMOVE
 int vtkTextMapper::GetNumberOfLines(const char *input)
 {
+  VTK_LEGACY_BODY(vtkTextMapper::GetNumberOfLines, "VTK 6.0")
   if ( input == NULL || input[0] == '\0')
     {
     return 0;
@@ -386,100 +408,168 @@ int vtkTextMapper::GetNumberOfLines(const char *input)
 
   return numLines;
 }
+#endif // VTK_LEGACY_REMOVE
 
-//----------------------------------------------------------------------------
-// Get the next \n delimited line. Returns a string that
-// must be freed by the calling function.
-char *vtkTextMapper::NextLine(const char *input, int lineNum)
+//------------------------------------------------------------------------------
+namespace {
+// Given an Actor2D position coordinate (viewport, bottom left corner of actor)
+// and image dimensions, adjust the position to reflect the supplied alignment.
+void AdjustOrigin(int hAlign, int vAlign, int origin[2], const int dims[2])
 {
-  const char *ptr, *ptrEnd;
-  int strLen;
-  char *line;
-
-  ptr = input;
-  for (int i=0; i != lineNum; i++)
+  switch (hAlign)
     {
-    ptr = strstr(ptr,"\n");
-    ptr++;
+    default:
+    case VTK_TEXT_LEFT:
+      break;
+    case VTK_TEXT_CENTERED:
+      origin[0] -= dims[0] / 2;
+      break;
+    case VTK_TEXT_RIGHT:
+      origin[0] -= dims[0];
+      break;
     }
-  ptrEnd = strstr(ptr,"\n");
-  if ( ptrEnd == NULL )
+
+  switch (vAlign)
     {
-    ptrEnd = strchr(ptr, '\0');
+    default:
+    case VTK_TEXT_TOP:
+      origin[1] -= dims[1];
+      break;
+    case VTK_TEXT_CENTERED:
+      origin[1] -= dims[1] / 2;
+      break;
+    case VTK_TEXT_BOTTOM:
+      break;
     }
-
-  strLen = ptrEnd - ptr;
-  line = new char[strLen+1];
-  strncpy(line, ptr, strLen);
-  line[strLen] = '\0';
-
-  return line;
+}
 }
 
 //----------------------------------------------------------------------------
-// Get the size of a multi-line text string
-void vtkTextMapper::GetMultiLineSize(vtkViewport* viewport, int size[2])
+void vtkTextMapper::UpdateQuad(vtkActor2D *actor)
 {
-  int i;
-  int lineSize[2];
+  vtkDebugMacro(<<"UpdateQuad called");
 
-  vtkTextProperty *tprop = this->GetTextProperty();
-  if (!tprop)
+  // Ensure that the image is up to date.
+  UpdateImage();
+
+  // Update texture coordinates:
+  if (this->Image->GetMTime() > this->TCoordsTime)
     {
-    vtkErrorMacro(<<"Need text property to get multiline size of mapper");
-    size[0] = size[1] = 0;
-    return;
+    int dims[3];
+    this->Image->GetDimensions(dims);
+
+    // Add a fudge factor to the texture coordinates to prevent the top
+    // row of pixels from being truncated on some systems. The coordinates
+    // are calculated to be centered on a texel and trim the padding from the
+    // image. (padding is often added to create textures that have power-of-two
+    // dimensions)
+    float tw = static_cast<float>(this->TextDims[0]);
+    float th = static_cast<float>(this->TextDims[1]);
+    float iw = static_cast<float>(dims[0]);
+    float ih = static_cast<float>(dims[1]);
+    float tcXMin = 1.f / (2.f * iw);
+    float tcYMin = 1.f / (2.f * ih);
+    float tcXMax = std::min(1.0f,
+                            (((2.f * tw - 1.f) / (2.f)) + 0.000001f) / iw);
+    float tcYMax = std::min(1.0f,
+                            (((2.f * th - 1.f) / (2.f)) + 0.000001f) / ih);
+    if (vtkFloatArray *tc =
+        vtkFloatArray::SafeDownCast(
+          this->PolyData->GetPointData()->GetTCoords()))
+      {
+      vtkDebugMacro(<<"Setting tcoords: xmin, xmax, ymin, ymax: "
+                    << tcXMin << ", " << tcXMax << ", "
+                    << tcYMin << ", " << tcYMax);
+      tc->Reset();
+      tc->InsertNextValue(tcXMin);
+      tc->InsertNextValue(tcYMin);
+
+      tc->InsertNextValue(tcXMin);
+      tc->InsertNextValue(tcYMax);
+
+      tc->InsertNextValue(tcXMax);
+      tc->InsertNextValue(tcYMax);
+
+      tc->InsertNextValue(tcXMax);
+      tc->InsertNextValue(tcYMin);
+
+      this->TCoordsTime.Modified();
+      }
+    else
+      {
+      vtkErrorMacro(<<"Invalid texture coordinate array type.");
+      }
     }
 
-  lineSize[0] = lineSize[1] = size[0] = size[1] = 0;
-  for ( i=0; i < this->NumberOfLines; i++ )
+  if (this->CoordsTime < actor->GetMTime() ||
+      this->CoordsTime < this->TextProperty->GetMTime())
     {
-    this->TextLines[i]->GetTextProperty()->ShallowCopy(tprop);
-    this->TextLines[i]->GetSize(viewport, lineSize);
-    size[0] = (lineSize[0] > size[0] ? lineSize[0] : size[0]);
-    size[1] = (lineSize[1] > size[1] ? lineSize[1] : size[1]);
+    int pos[2] = { 0, 0 };
+    AdjustOrigin(this->TextProperty->GetJustification(),
+                 this->TextProperty->GetVerticalJustification(),
+                 pos, this->TextDims);
+
+    double x = static_cast<double>(pos[0]);
+    double y = static_cast<double>(pos[1]);
+    double w = static_cast<double>(this->TextDims[0]);
+    double h = static_cast<double>(this->TextDims[1]);
+
+    this->Points->Reset();
+    this->Points->InsertNextPoint(x, y, 0.);
+    this->Points->InsertNextPoint(x, y + h, 0.);
+    this->Points->InsertNextPoint(x + w, y + h, 0.);
+    this->Points->InsertNextPoint(x + w, y, 0.);
+    this->CoordsTime.Modified();
     }
-
-  // add in the line spacing
-  this->LineSize = size[1];
-  size[1] = static_cast<int>(
-    size[1] * (1.0 + (this->NumberOfLines - 1) * tprop->GetLineSpacing()));
 }
 
 //----------------------------------------------------------------------------
-void vtkTextMapper::RenderOverlayMultipleLines(vtkViewport *viewport,
-                                               vtkActor2D *actor)
+void vtkTextMapper::UpdateImage()
 {
-  float offset = 0.0f;
-  int size[2];
-  // make sure LineSize is up to date
-  this->GetMultiLineSize(viewport,size);
-
-  vtkTextProperty *tprop = this->GetTextProperty();
-  if (!tprop)
+  vtkDebugMacro(<<"UpdateImage called");
+  if (this->MTime > this->Image->GetMTime() ||
+      this->TextProperty->GetMTime() > this->Image->GetMTime())
     {
-    vtkErrorMacro(<<"Need text property to render multiple lines of mapper");
-    return;
+    vtkTextRenderer *tren = vtkTextRenderer::GetInstance();
+    if (tren)
+      {
+      if (!tren->RenderString(this->TextProperty, this->Input,
+                              this->Image.GetPointer(), this->TextDims))
+        {
+        vtkErrorMacro(<<"Texture generation failed.");
+        }
+      vtkDebugMacro(<< "Text rendered to " << this->TextDims[0] << ", "
+                    << this->TextDims[1] << " buffer.");
+      }
+    else
+      {
+      vtkErrorMacro(<<"Could not locate vtkTextRenderer object.");
+      }
     }
+}
 
-  switch (tprop->GetVerticalJustification())
+#ifndef VTK_LEGACY_REMOVE
+int vtkTextMapper::GetNumberOfLines()
+{
+  VTK_LEGACY_BODY(vtkTextMapper::GetNumberOfLines, "VTK 6.0")
+  const char *input = this->Input;
+  if ( input == NULL || input[0] == '\0')
     {
-    case VTK_TEXT_TOP:
-      offset = 0.0f;
-      break;
-    case VTK_TEXT_CENTERED:
-      offset = (-this->NumberOfLines + 1.0f) / 2.0f;
-      break;
-    case VTK_TEXT_BOTTOM:
-      offset = -this->NumberOfLines + 1.0f;
-      break;
+    return 0;
     }
 
-  for (int lineNum=0; lineNum < this->NumberOfLines; lineNum++)
+  int numLines=1;
+  const char *ptr = input;
+
+  while ( ptr != NULL )
     {
-    this->TextLines[lineNum]->GetTextProperty()->ShallowCopy(tprop);
-    this->TextLines[lineNum]->GetTextProperty()->SetLineOffset
-      (tprop->GetLineOffset() + static_cast<int>(this->LineSize * (lineNum + offset) * tprop->GetLineSpacing()));
-    this->TextLines[lineNum]->RenderOverlay(viewport,actor);
+    if ( (ptr=strstr(ptr,"\n")) != NULL )
+      {
+      numLines++;
+      ptr++; //skip over \n
+      }
     }
+
+  return numLines;
 }
+#endif // VTK_LEGACY_REMOVE
diff --git a/Rendering/Core/vtkTextMapper.h b/Rendering/Core/vtkTextMapper.h
index 6238f78..7586616 100644
--- a/Rendering/Core/vtkTextMapper.h
+++ b/Rendering/Core/vtkTextMapper.h
@@ -19,9 +19,16 @@
 // vtkRenderer.
 //
 // To use vtkTextMapper, specify an input text string.
-
+//
 // .SECTION See Also
-// vtkMapper2D vtkActor2D vtkLegendBoxActor vtkCaptionActor2D vtkVectorText vtkTextProperty
+// vtkActor2D vtkTextActor vtkTextActor3D vtkTextProperty vtkTextRenderer
+//
+// .SECTION Note
+// This class will be overridden by the older vtkOpenGLFreeTypeTextMapper when
+// the vtkRenderingFreeTypeOpenGL library is linked into the executable. That
+// class provides legacy support for regression testing, but lacks many of the
+// newer features provided by this implementation (such as unicode and MathText
+// strings). Do not link with that library if such features are needed.
 
 #ifndef __vtkTextMapper_h
 #define __vtkTextMapper_h
@@ -29,8 +36,16 @@
 #include "vtkRenderingCoreModule.h" // For export macro
 #include "vtkMapper2D.h"
 
+#include "vtkNew.h" // For vtkNew
+
 class vtkActor2D;
+class vtkImageData;
+class vtkPoints;
+class vtkPolyData;
+class vtkPolyDataMapper2D;
 class vtkTextProperty;
+class vtkTexture;
+class vtkTimeStamp;
 class vtkViewport;
 
 class VTKRENDERINGCORE_EXPORT vtkTextMapper : public vtkMapper2D
@@ -46,15 +61,14 @@ public:
   // Description:
   // Return the size[2]/width/height of the rectangle required to draw this
   // mapper (in pixels).
-  virtual void GetSize(vtkViewport*, int size[2]) {size[0]=size[0];}
+  virtual void GetSize(vtkViewport*, int size[2]);
   virtual int GetWidth(vtkViewport*v);
   virtual int GetHeight(vtkViewport*v);
 
   // Description:
-  // Set the input text string to the mapper.  The mapper recognizes "\n"
-  // as a carriage return/linefeed (line separator).
-  virtual void SetInput(const char *inputString);
-  vtkGetStringMacro(Input);
+  // The input text string to the mapper.
+  vtkSetStringMacro(Input)
+  vtkGetStringMacro(Input)
 
   // Description:
   // Set/Get the text property.
@@ -66,13 +80,16 @@ public:
   void ShallowCopy(vtkTextMapper *tm);
 
   // Description:
-  // Determine the number of lines in the input string (delimited by "\n").
-  int  GetNumberOfLines(const char *input);
+  // Determine the number of lines in the input string.
+  // @deprecated This is a legacy method that was used in an older
+  // implementation, and may be removed in the future.
+  VTK_LEGACY(int GetNumberOfLines(const char *input));
 
   // Description:
-  // Get the number of lines in the input string (the method GetNumberOfLines(char*)
-  // must have been previously called for the return value to be valid).
-  vtkGetMacro(NumberOfLines,int);
+  // Get the number of lines in this mapper's input.
+  // @deprecated This is a legacy method that was used in an older
+  // implementation, and may be removed in the future.
+  VTK_LEGACY(int GetNumberOfLines());
 
   // Description:
   // Set and return the font size (in points) required to make this mapper fit
@@ -106,8 +123,13 @@ public:
 
   // Description:
   // Get the available system font size matching a font size.
-  virtual int GetSystemFontSize(int size)
-    { return size; }
+  // @deprecated This is a legacy method that was used in an older
+  // implementation, and may be removed in the future.
+  VTK_LEGACY(virtual int GetSystemFontSize(int size));
+
+  void RenderOverlay(vtkViewport *, vtkActor2D *);
+  void ReleaseGraphicsResources(vtkWindow *);
+  unsigned long GetMTime();
 
 protected:
   vtkTextMapper();
@@ -116,21 +138,32 @@ protected:
   char* Input;
   vtkTextProperty *TextProperty;
 
-  int  LineSize;
-  int  NumberOfLines;
-  int  NumberOfLinesAllocated;
-
-  vtkTextMapper **TextLines;
-
-  // These functions are used to parse, process, and render multiple lines
-  char *NextLine(const char *input, int lineNum);
-  void GetMultiLineSize(vtkViewport* viewport, int size[2]);
-  void RenderOverlayMultipleLines(vtkViewport *viewport, vtkActor2D *actor);
-
 private:
   vtkTextMapper(const vtkTextMapper&);  // Not implemented.
   void operator=(const vtkTextMapper&);  // Not implemented.
+
+  void UpdateQuad(vtkActor2D *actor);
+  void UpdateImage();
+
+  int TextDims[2];
+
+  vtkTimeStamp CoordsTime;
+  vtkTimeStamp TCoordsTime;
+  vtkNew<vtkImageData> Image;
+  vtkNew<vtkPoints> Points;
+  vtkNew<vtkPolyData> PolyData;
+  vtkNew<vtkPolyDataMapper2D> Mapper;
+  vtkNew<vtkTexture> Texture;
 };
 
+
+#ifndef VTK_LEGACY_REMOVE
+inline int vtkTextMapper::GetSystemFontSize(int size)
+{
+  VTK_LEGACY_BODY(vtkTextMapper::GetSystemFontSize, "VTK 6.0")
+  return size;
+}
+#endif // VTK_LEGACY_REMOVE
+
 #endif
 
diff --git a/Rendering/Core/vtkTextProperty.cxx b/Rendering/Core/vtkTextProperty.cxx
index b778e0b..ab77e52 100644
--- a/Rendering/Core/vtkTextProperty.cxx
+++ b/Rendering/Core/vtkTextProperty.cxx
@@ -27,6 +27,7 @@ vtkTextProperty::vtkTextProperty()
   this->Opacity  = 1.0;
 
   this->FontFamilyAsString = 0;
+  this->FontFile = NULL;
   this->SetFontFamilyAsString( "Arial" );
   this->FontSize = 12;
 
@@ -48,6 +49,7 @@ vtkTextProperty::vtkTextProperty()
 vtkTextProperty::~vtkTextProperty()
 {
   this->SetFontFamilyAsString(NULL);
+  this->SetFontFile(NULL);
 }
 
 //----------------------------------------------------------------------------
@@ -62,6 +64,7 @@ void vtkTextProperty::ShallowCopy(vtkTextProperty *tprop)
   this->SetOpacity(tprop->GetOpacity());
 
   this->SetFontFamilyAsString(tprop->GetFontFamilyAsString());
+  this->SetFontFile(tprop->GetFontFile());
   this->SetFontSize(tprop->GetFontSize());
 
   this->SetBold(tprop->GetBold());
@@ -105,6 +108,8 @@ void vtkTextProperty::PrintSelf(ostream& os, vtkIndent indent)
 
   os << indent << "FontFamilyAsString: "
      << (this->FontFamilyAsString ? this->FontFamilyAsString : "(null)") << endl;
+  os << indent << "FontFile: "
+     << (this->FontFile ? this->FontFile : "(null)") << endl;
   os << indent << "FontSize: " << this->FontSize << "\n";
 
   os << indent << "Bold: " << (this->Bold ? "On\n" : "Off\n");
diff --git a/Rendering/Core/vtkTextProperty.h b/Rendering/Core/vtkTextProperty.h
index f2fc628..e692601 100644
--- a/Rendering/Core/vtkTextProperty.h
+++ b/Rendering/Core/vtkTextProperty.h
@@ -51,6 +51,9 @@ public:
 
   // Description:
   // Set/Get the font family. Supports legacy three font family system.
+  // If the symbolic constant VTK_FONT_FILE is returned by GetFontFamily(), the
+  // string returned by GetFontFile() must be an absolute filepath
+  // to a local FreeType compatible font.
   vtkGetStringMacro(FontFamilyAsString);
   vtkSetStringMacro(FontFamilyAsString);
   void SetFontFamily(int t);
@@ -63,6 +66,13 @@ public:
   static const char *GetFontFamilyAsString( int f );
 
   // Description:
+  // The absolute filepath to a local file containing a freetype-readable font
+  // if GetFontFamily() return VTK_FONT_FILE. The result is undefined for other
+  // values of GetFontFamily().
+  vtkGetStringMacro(FontFile)
+  vtkSetStringMacro(FontFile)
+
+  // Description:
   // Set/Get the font size (in points).
   vtkSetClampMacro(FontSize,int,0,VTK_INT_MAX);
   vtkGetMacro(FontSize, int);
@@ -148,6 +158,7 @@ protected:
   double Color[3];
   double Opacity;
   char* FontFamilyAsString;
+  char* FontFile;
   int   FontSize;
   int   Bold;
   int   Italic;
@@ -178,6 +189,10 @@ inline const char *vtkTextProperty::GetFontFamilyAsString( int f )
     {
     return "Times";
     }
+  else if ( f == VTK_FONT_FILE )
+    {
+    return "File";
+    }
   return "Unknown";
 }
 
@@ -215,6 +230,10 @@ inline int vtkTextProperty::GetFontFamilyFromString( const char *f )
     {
     return VTK_TIMES;
     }
+  else if ( strcmp( f, GetFontFamilyAsString( VTK_FONT_FILE) ) == 0 )
+    {
+    return VTK_FONT_FILE;
+    }
   return VTK_UNKNOWN_FONT;
 }
 
diff --git a/Rendering/Core/vtkTextRenderer.h b/Rendering/Core/vtkTextRenderer.h
index 0c4b519..787f2fd 100644
--- a/Rendering/Core/vtkTextRenderer.h
+++ b/Rendering/Core/vtkTextRenderer.h
@@ -58,6 +58,10 @@ class VTKRENDERINGCORE_EXPORT vtkTextRendererCleanup
 public:
   vtkTextRendererCleanup();
   ~vtkTextRendererCleanup();
+
+private:
+  vtkTextRendererCleanup(const vtkTextRendererCleanup& other); // no copy constructor
+  vtkTextRendererCleanup& operator=(const vtkTextRendererCleanup& rhs); // no copy assignment
 };
 
 class VTKRENDERINGCORE_EXPORT vtkTextRenderer: public vtkObject
diff --git a/Rendering/Core/vtkViewDependentErrorMetric.cxx b/Rendering/Core/vtkViewDependentErrorMetric.cxx
index 7223df3..90c6db2 100644
--- a/Rendering/Core/vtkViewDependentErrorMetric.cxx
+++ b/Rendering/Core/vtkViewDependentErrorMetric.cxx
@@ -20,7 +20,7 @@
 #include "vtkGenericAdaptorCell.h"
 #include "vtkGenericDataSet.h"
 #include "vtkMath.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkCoordinate.h"
 #include "vtkViewport.h"
 
diff --git a/Rendering/Core/vtkViewport.h b/Rendering/Core/vtkViewport.h
index c276d46..ece20ff 100644
--- a/Rendering/Core/vtkViewport.h
+++ b/Rendering/Core/vtkViewport.h
@@ -183,13 +183,13 @@ public:
   virtual void NormalizedDisplayToViewport(double &x, double &y);
   virtual void ViewportToNormalizedViewport(double &u, double &v);
   virtual void NormalizedViewportToView(double &x, double &y, double &z);
-  virtual void ViewToWorld(double &, double &, double &) {};
+  virtual void ViewToWorld(double &, double &, double &) {}
   virtual void DisplayToLocalDisplay(double &x, double &y);
   virtual void NormalizedDisplayToDisplay(double &u, double &v);
   virtual void ViewportToNormalizedDisplay(double &x, double &y);
   virtual void NormalizedViewportToViewport(double &u, double &v);
   virtual void ViewToNormalizedViewport(double &x, double &y, double &z);
-  virtual void WorldToView(double &, double &, double &) {};
+  virtual void WorldToView(double &, double &, double &) {}
 
   // Description:
   // Get the size and origin of the viewport in display coordinates. Note:
diff --git a/Rendering/Core/vtkVolume.cxx b/Rendering/Core/vtkVolume.cxx
index 3f035be..ae89809 100644
--- a/Rendering/Core/vtkVolume.cxx
+++ b/Rendering/Core/vtkVolume.cxx
@@ -66,25 +66,10 @@ vtkVolume::~vtkVolume()
 
   for ( int i = 0; i < VTK_MAX_VRCOMP; i++ )
     {
-    if ( this->ScalarOpacityArray[i] )
-      {
-      delete [] this->ScalarOpacityArray[i];
-      }
-
-    if ( this->RGBArray[i] )
-      {
-      delete [] this->RGBArray[i];
-      }
-
-    if ( this->GrayArray[i] )
-      {
-      delete [] this->GrayArray[i];
-      }
-
-    if ( this->CorrectedScalarOpacityArray[i] )
-      {
-      delete [] this->CorrectedScalarOpacityArray[i];
-      }
+    delete [] this->ScalarOpacityArray[i];
+    delete [] this->RGBArray[i];
+    delete [] this->GrayArray[i];
+    delete [] this->CorrectedScalarOpacityArray[i];
     }
 }
 
@@ -570,26 +555,17 @@ void vtkVolume::UpdateTransferFunctions( vtkRenderer *vtkNotUsed(ren) )
     // opacity
     if ( arraySize != this->ArraySize )
       {
-      if ( this->ScalarOpacityArray[c] )
-        {
-        delete [] this->ScalarOpacityArray[c];
-        this->ScalarOpacityArray[c] = NULL;
-        }
-      if ( this->CorrectedScalarOpacityArray[c] )
-        {
-        delete [] this->CorrectedScalarOpacityArray[c];
-        this->CorrectedScalarOpacityArray[c] = NULL;
-        }
-      if ( this->GrayArray[c] )
-        {
-        delete [] this->GrayArray[c];
-        this->GrayArray[c] = NULL;
-        }
-      if ( this->RGBArray[c] )
-        {
-        delete [] this->RGBArray[c];
-        this->RGBArray[c] = NULL;
-        }
+      delete [] this->ScalarOpacityArray[c];
+      this->ScalarOpacityArray[c] = NULL;
+
+      delete [] this->CorrectedScalarOpacityArray[c];
+      this->CorrectedScalarOpacityArray[c] = NULL;
+
+      delete [] this->GrayArray[c];
+      this->GrayArray[c] = NULL;
+
+      delete [] this->RGBArray[c];
+      this->RGBArray[c] = NULL;
 
       // Allocate these two because we know we need them
       this->ScalarOpacityArray[c] = new float[arraySize];
@@ -603,11 +579,9 @@ void vtkVolume::UpdateTransferFunctions( vtkRenderer *vtkNotUsed(ren) )
     // Free the rgb array if there is one.
     if ( colorChannels == 1 )
       {
-      if ( this->RGBArray[c] )
-        {
-        delete [] this->RGBArray[c];
-        this->RGBArray[c] = NULL;
-        }
+      delete [] this->RGBArray[c];
+      this->RGBArray[c] = NULL;
+
       if ( !this->GrayArray[c] )
         {
         this->GrayArray[c] = new float[arraySize];
@@ -618,11 +592,9 @@ void vtkVolume::UpdateTransferFunctions( vtkRenderer *vtkNotUsed(ren) )
     // Free the gray array if there is one.
     if ( colorChannels == 3 )
       {
-      if ( this->GrayArray[c] )
-        {
-        delete [] this->GrayArray[c];
-        this->GrayArray[c] = NULL;
-        }
+      delete [] this->GrayArray[c];
+      this->GrayArray[c] = NULL;
+
       if ( !this->RGBArray[c] )
         {
         this->RGBArray[c] = new float[3*arraySize];
diff --git a/Rendering/Core/vtkWindowLevelLookupTable.h b/Rendering/Core/vtkWindowLevelLookupTable.h
index b550b1b..e2ddfa6 100644
--- a/Rendering/Core/vtkWindowLevelLookupTable.h
+++ b/Rendering/Core/vtkWindowLevelLookupTable.h
@@ -90,7 +90,7 @@ public:
 
 protected:
   vtkWindowLevelLookupTable(int sze=256, int ext=256);
-  ~vtkWindowLevelLookupTable() {};
+  ~vtkWindowLevelLookupTable() {}
 
   double Window;
   double Level;
diff --git a/Rendering/Core/vtkWorldPointPicker.h b/Rendering/Core/vtkWorldPointPicker.h
index 9debaf1..0d314f7 100644
--- a/Rendering/Core/vtkWorldPointPicker.h
+++ b/Rendering/Core/vtkWorldPointPicker.h
@@ -48,7 +48,7 @@ public:
 
 protected:
   vtkWorldPointPicker ();
-  ~vtkWorldPointPicker() {};
+  ~vtkWorldPointPicker() {}
 
 private:
   vtkWorldPointPicker(const vtkWorldPointPicker&);  // Not implemented.
diff --git a/Rendering/Core/vtkXMLMaterial.cxx b/Rendering/Core/vtkXMLMaterial.cxx
deleted file mode 100644
index 0c93ba6..0000000
--- a/Rendering/Core/vtkXMLMaterial.cxx
+++ /dev/null
@@ -1,313 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkXMLMaterial.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-
-#include "vtkXMLMaterial.h"
-
-#include "vtkMaterialLibrary.h"
-#include "vtkObjectFactory.h"
-#include "vtkSmartPointer.h"
-#include "vtkXMLDataElement.h"
-#include "vtkXMLMaterialParser.h"
-#include "vtkXMLShader.h"
-
-#include <vector>
-#include <assert.h>
-
-class vtkXMLMaterialInternals
-{
-public:
-  typedef std::vector<vtkXMLDataElement*> VectorOfElements;
-  typedef std::vector<vtkSmartPointer<vtkXMLShader> > VectorOfShaders;
-  VectorOfElements Properties;
-  VectorOfShaders VertexShaders;
-  VectorOfShaders FragmentShaders;
-  VectorOfElements Textures;
-  void Initialize()
-    {
-    this->Properties.clear();
-    this->VertexShaders.clear();
-    this->FragmentShaders.clear();
-    this->Textures.clear();
-    }
-};
-
-vtkStandardNewMacro(vtkXMLMaterial);
-
-//-----------------------------------------------------------------------------
-vtkXMLMaterial::vtkXMLMaterial()
-{
-  this->RootElement = 0;
-  this->Internals = new vtkXMLMaterialInternals;
-}
-
-//-----------------------------------------------------------------------------
-vtkXMLMaterial::~vtkXMLMaterial()
-{
-  this->SetRootElement(0);
-  delete this->Internals;
-}
-
-//-----------------------------------------------------------------------------
-vtkXMLMaterial* vtkXMLMaterial::CreateInstance(const char* name)
-{
-  if (!name)
-    {
-    return 0;
-    }
-
-  vtkXMLMaterialParser* parser = vtkXMLMaterialParser::New();
-  vtkXMLMaterial* material = vtkXMLMaterial::New();
-  parser->SetMaterial(material);
-
-  // First, look for material library files.
-  // Then, look for Repository files.
-
-  char* xml = vtkMaterialLibrary::GetMaterial(name);
-  if (xml)
-    {
-    parser->Parse(xml);
-    parser->Delete();
-    delete [] xml;
-    return material;
-    }
-
-  char* filename = vtkXMLShader::LocateFile(name);
-  if (filename)
-    {
-    parser->SetFileName( filename );
-    delete [] filename;
-    parser->Parse();
-    parser->Delete();
-    return material;
-    }
-
-  parser->Delete();
-  material->Delete();
-  return NULL;
-}
-
-//-----------------------------------------------------------------------------
-void vtkXMLMaterial::SetRootElement(vtkXMLDataElement* root)
-{
-  this->Internals->Initialize();
-
-  vtkSetObjectBodyMacro(RootElement, vtkXMLDataElement, root);
-  if (this->RootElement)
-    {
-    // Update the internal data structure to
-    // avoid repeated searches.
-    int numElems = this->RootElement->GetNumberOfNestedElements();
-    for (int i=0; i<numElems; i++)
-      {
-      vtkXMLDataElement* elem = this->RootElement->GetNestedElement(i);
-      const char* name = elem->GetName();
-      if (!name)
-        {
-        continue;
-        }
-      if (strcmp(name, "Property") == 0)
-        {
-        this->Internals->Properties.push_back(elem);
-        }
-      else if (strcmp(name, "Shader") == 0)
-        {
-        vtkXMLShader* shader = vtkXMLShader::New();
-        shader->SetRootElement(elem);
-
-        switch (shader->GetScope())
-          {
-        case vtkXMLShader::SCOPE_VERTEX:
-          this->Internals->VertexShaders.push_back(shader);
-          break;
-        case vtkXMLShader::SCOPE_FRAGMENT:
-          this->Internals->FragmentShaders.push_back(shader);
-          break;
-        default:
-          vtkErrorMacro("Invalid scope for shader: " << shader->GetName());
-          }
-
-        shader->Delete();
-        }
-      else if (strcmp(name, "Texture") == 0)
-        {
-        this->Internals->Textures.push_back(elem);
-        }
-      }
-    }
-}
-
-//-----------------------------------------------------------------------------
-int vtkXMLMaterial::GetNumberOfProperties()
-{
-  return static_cast<int>(this->Internals->Properties.size());
-}
-
-//-----------------------------------------------------------------------------
-int vtkXMLMaterial::GetNumberOfTextures()
-{
-  return static_cast<int>(this->Internals->Textures.size());
-}
-
-//-----------------------------------------------------------------------------
-int vtkXMLMaterial::GetNumberOfVertexShaders()
-{
-  return static_cast<int>(this->Internals->VertexShaders.size());
-}
-
-//-----------------------------------------------------------------------------
-int vtkXMLMaterial::GetNumberOfFragmentShaders()
-{
-  return static_cast<int>(this->Internals->FragmentShaders.size());
-}
-
-//-----------------------------------------------------------------------------
-vtkXMLDataElement* vtkXMLMaterial::GetProperty(int id)
-{
-  if (id < this->GetNumberOfProperties())
-    {
-    return this->Internals->Properties[id];
-    }
-  return NULL;
-}
-
-//-----------------------------------------------------------------------------
-vtkXMLDataElement* vtkXMLMaterial::GetTexture(int index)
-{
-  if (index < this->GetNumberOfTextures())
-    {
-    return this->Internals->Textures[index];
-    }
-  return NULL;
-}
-
-//-----------------------------------------------------------------------------
-vtkXMLShader* vtkXMLMaterial::GetVertexShader(int id)
-{
-  if (id < this->GetNumberOfVertexShaders())
-    {
-    return this->Internals->VertexShaders[id].GetPointer();
-    }
-  return NULL;
-}
-
-//-----------------------------------------------------------------------------
-vtkXMLShader* vtkXMLMaterial::GetFragmentShader(int id)
-{
-  if (id < this->GetNumberOfFragmentShaders())
-    {
-    return this->Internals->FragmentShaders[id].GetPointer();
-    }
-  return NULL;
-}
-
-//-----------------------------------------------------------------------------
-int vtkXMLMaterial::GetShaderLanguage()
-{
-  if( this->GetVertexShader() && this->GetFragmentShader() )
-    {
-    int vLang = this->GetVertexShader()->GetLanguage();
-    int fLang = this->GetFragmentShader()->GetLanguage();
-
-    if (vLang == fLang)
-      {
-      return this->GetVertexShader()->GetLanguage();
-      }
-    else if(vLang != vtkXMLShader::LANGUAGE_NONE &&
-      fLang == vtkXMLShader::LANGUAGE_NONE)
-      {
-      return this->GetVertexShader()->GetLanguage();
-      }
-    else if(vLang == vtkXMLShader::LANGUAGE_NONE &&
-      fLang != vtkXMLShader::LANGUAGE_NONE)
-      {
-      return this->GetFragmentShader()->GetLanguage();
-      }
-    else
-      {
-      return vtkXMLShader::LANGUAGE_MIXED;
-      }
-    }
-  else if (this->GetVertexShader())
-    {
-    return this->GetVertexShader()->GetLanguage();
-    }
-  else if (this->GetFragmentShader())
-    {
-    return this->GetFragmentShader()->GetLanguage();
-    }
-  return vtkXMLShader::LANGUAGE_NONE;
-}
-
-// ----------------------------------------------------------------------------
-// Description:
-// Get the style the shaders.
-// \post valid_result: result==1 || result==2
-int vtkXMLMaterial::GetShaderStyle()
-{
-  int result = 1;
-  if(this->GetShaderLanguage() == vtkXMLShader::LANGUAGE_GLSL)
-    {
-    int vStyle = 0;
-    if (this->GetVertexShader())
-      {
-      vStyle = this->GetVertexShader()->GetStyle();
-      }
-    int fStyle = 0;
-    if (this->GetFragmentShader())
-      {
-      fStyle=this->GetFragmentShader()->GetStyle();
-      }
-    if (vStyle!=0 && fStyle!=0 && vStyle!=fStyle)
-      {
-      vtkErrorMacro(<<"vertex shader and fragment shader style differ.");
-      }
-    else
-      {
-      if (vStyle!=0)
-        {
-        result = vStyle;
-        }
-      else
-        {
-        result = fStyle;
-        }
-      }
-    }
-
-  assert("post: valid_result" && (result==1 || result==2) );
-  return result;
-}
-
-//-----------------------------------------------------------------------------
-void vtkXMLMaterial::PrintSelf(ostream& os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os, indent);
-  os << indent << "Number of Properties: " << this->GetNumberOfProperties()
-    << endl;
-  os << indent << "Number of Vertex Shaders: "
-    << this->GetNumberOfVertexShaders() << endl;
-  os << indent << "Number of Fragment Shaders: "
-    << this->GetNumberOfFragmentShaders() << endl;
-  os << indent << "RootElement: ";
-  if (this->RootElement)
-    {
-    os << endl;
-    this->RootElement->PrintSelf(os, indent.GetNextIndent());
-    }
-  else
-    {
-    os << "(null)" << endl;
-    }
-}
diff --git a/Rendering/Core/vtkXMLMaterial.h b/Rendering/Core/vtkXMLMaterial.h
deleted file mode 100644
index dae401d..0000000
--- a/Rendering/Core/vtkXMLMaterial.h
+++ /dev/null
@@ -1,109 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkXMLMaterial.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-// .NAME vtkXMLMaterial - encapsulates a VTK Material description.
-// .SECTION Description
-// vtkXMLMaterial encapsulates VTK Material description. It keeps a pointer
-// to vtkXMLDataElement that defines the material and provides
-// access to Shaders/Properties defined in it.
-// .SECTION Thanks
-// Shader support in VTK includes key contributions by Gary Templet at
-// Sandia National Labs.
-
-#ifndef __vtkXMLMaterial_h
-#define __vtkXMLMaterial_h
-
-#include "vtkRenderingCoreModule.h" // For export macro
-#include "vtkObject.h"
-
-class vtkXMLDataElement;
-class vtkXMLMaterialInternals;
-class vtkXMLShader;
-
-class VTKRENDERINGCORE_EXPORT vtkXMLMaterial : public vtkObject
-{
-public:
-  static vtkXMLMaterial* New();
-  vtkTypeMacro(vtkXMLMaterial, vtkObject);
-  void PrintSelf(ostream& os, vtkIndent indent);
-
-  // Description:
-  // Create a new instance. It searches for the material
-  // using the following order: first, check the MaterialLibrary; second,
-  // treat the name as an absolute path and try to locate it; third,
-  // search the Material repository. Returns null is it fails to
-  // locate the material.
-  static vtkXMLMaterial* CreateInstance(const char* name);
-
-  // Description:
-  // Get number of elements of type Property.
-  int GetNumberOfProperties();
-
-  // Description:
-  // Get number of elements of type Texture.
-  int GetNumberOfTextures();
-
-  // Description:
-  // Get number of Vertex shaders.
-  int GetNumberOfVertexShaders();
-
-  // Description:
-  // Get number of fragment shaders.
-  int GetNumberOfFragmentShaders();
-
-  // Description:
-  // Get the ith vtkXMLDataElement of type <Property />.
-  vtkXMLDataElement* GetProperty(int id=0);
-
-  // Description:
-  // Get the ith vtkXMLDataElement of type <Texture />.
-  vtkXMLDataElement* GetTexture(int id=0);
-
-  // Description:
-  // Get the ith vtkXMLDataElement of type <VertexShader />.
-  vtkXMLShader* GetVertexShader(int id=0);
-
-  // Description:
-  // Get the ith vtkXMLDataElement of type <FragmentShader />.
-  vtkXMLShader* GetFragmentShader(int id=0);
-
-  // Description:
-  // Get/Set the XML root element that describes this material.
-  vtkGetObjectMacro(RootElement, vtkXMLDataElement);
-  void SetRootElement(vtkXMLDataElement*);
-
-  // Description:
-  // Get the Language used by the shaders in this Material.
-  // The Language of a vtkXMLMaterial is based on the Language of it's
-  // shaders.
-  int GetShaderLanguage();
-
-  // Description:
-  // Get the style the shaders.
-  // \post valid_result: result==1 || result==2
-  int GetShaderStyle();
-
-protected:
-  vtkXMLMaterial();
-  ~vtkXMLMaterial();
-
-  vtkXMLDataElement* RootElement;
-  vtkXMLMaterialInternals* Internals;
-private:
-  vtkXMLMaterial(const vtkXMLMaterial&); // Not implemented.
-  void operator=(const vtkXMLMaterial&); // Not implemented.
-};
-
-#endif
-
diff --git a/Rendering/Core/vtkXMLMaterialParser.cxx b/Rendering/Core/vtkXMLMaterialParser.cxx
deleted file mode 100644
index 6b9f04d..0000000
--- a/Rendering/Core/vtkXMLMaterialParser.cxx
+++ /dev/null
@@ -1,152 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkXMLMaterialParser.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-
-/*
- * Copyright 2003 Sandia Corporation.
- * Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
- * license for use of this work by or on behalf of the
- * U.S. Government. Redistribution and use in source and binary forms, with
- * or without modification, are permitted provided that this Notice and any
- * statement of authorship are reproduced on all copies.
- */
-
-#include "vtkXMLMaterialParser.h"
-
-#include "vtkXMLMaterial.h"
-#include "vtkObjectFactory.h"
-#include "vtkSmartPointer.h"
-#include "vtkXMLDataElement.h"
-
-#include "vtkXMLUtilities.h"
-
-#include <vector>
-
-
-//-----------------------------------------------------------------------------
-class vtkXMLMaterialParserInternals
-{
-public:
-  typedef std::vector<vtkSmartPointer<vtkXMLDataElement> > VectorOfElements;
-  VectorOfElements Stack;
-};
-
-//-----------------------------------------------------------------------------
-vtkStandardNewMacro(vtkXMLMaterialParser);
-vtkCxxSetObjectMacro(vtkXMLMaterialParser, Material, vtkXMLMaterial);
-
-//-----------------------------------------------------------------------------
-vtkXMLMaterialParser::vtkXMLMaterialParser()
-{
-  this->Material = vtkXMLMaterial::New();
-  this->Material->Register(this);
-  this->Material->Delete();
-  this->Internals = new vtkXMLMaterialParserInternals;
-}
-
-//-----------------------------------------------------------------------------
-vtkXMLMaterialParser::~vtkXMLMaterialParser()
-{
-  delete this->Internals;
-  this->SetMaterial(0);
-}
-
-//-----------------------------------------------------------------------------
-int vtkXMLMaterialParser::Parse(const char* str)
-{
-  return this->Superclass::Parse(str);
-}
-
-//-----------------------------------------------------------------------------
-int vtkXMLMaterialParser::Parse(const char* str, unsigned int length)
-{
-  return this->Superclass::Parse(str, length);
-}
-
-//-----------------------------------------------------------------------------
-int vtkXMLMaterialParser::Parse()
-{
-  this->Internals->Stack.clear();
-  return this->Superclass::Parse();
-}
-
-//-----------------------------------------------------------------------------
-int vtkXMLMaterialParser::InitializeParser()
-{
-  int ret = this->Superclass::InitializeParser();
-  if (ret)
-    {
-    this->Internals->Stack.clear();
-    }
-  return ret;
-}
-
-//-----------------------------------------------------------------------------
-void vtkXMLMaterialParser::StartElement(const char* name, const char** atts)
-{
-  vtkXMLDataElement* element = vtkXMLDataElement::New();
-  element->SetName(name);
-  element->SetXMLByteIndex(this->GetXMLByteIndex());
-  vtkXMLUtilities::ReadElementFromAttributeArray(element, atts, VTK_ENCODING_NONE);
-  const char* id = element->GetAttribute("id");
-  if (id)
-    {
-    element->SetId(id);
-    }
-  this->Internals->Stack.push_back(element);
-  element->Delete();
-}
-
-//-----------------------------------------------------------------------------
-void vtkXMLMaterialParser::EndElement(const char* vtkNotUsed(name))
-{
-  vtkXMLDataElement* finished = this->Internals->Stack.back().GetPointer();
-  int prev_pos = static_cast<int>(this->Internals->Stack.size()) - 2;
-  if (prev_pos >= 0)
-    {
-    this->Internals->Stack[prev_pos].GetPointer()->AddNestedElement(finished);
-    }
-  else
-    {
-    this->Material->SetRootElement(finished);
-    }
-
-  this->Internals->Stack.pop_back();
-}
-
-//-----------------------------------------------------------------------------
-void vtkXMLMaterialParser::CharacterDataHandler( const char* inData, int inLength )
-{
-  if (this->Internals->Stack.size() > 0)
-    {
-    vtkXMLDataElement* elem = this->Internals->Stack.back().GetPointer();
-    elem->AddCharacterData(inData, inLength);
-    }
-  /*
-  // this wont happen as the XML parser will flag it as an error.
-  else
-    {
-    vtkErrorMacro("Character data not enclosed in XML tags");
-    }
-  */
-}
-
-//-----------------------------------------------------------------------------
-void vtkXMLMaterialParser::PrintSelf(ostream &os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os, indent);
-  os << indent << "Material: " ;
-  this->Material->PrintSelf(os, indent.GetNextIndent());
-}
-
diff --git a/Rendering/Core/vtkXMLMaterialParser.h b/Rendering/Core/vtkXMLMaterialParser.h
deleted file mode 100644
index 1482367..0000000
--- a/Rendering/Core/vtkXMLMaterialParser.h
+++ /dev/null
@@ -1,123 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkXMLMaterialParser.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-
-/*
- * Copyright 2004 Sandia Corporation.
- * Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
- * license for use of this work by or on behalf of the
- * U.S. Government. Redistribution and use in source and binary forms, with
- * or without modification, are permitted provided that this Notice and any
- * statement of authorship are reproduced on all copies.
- */
-
-// .NAME vtkXMLMaterialParser - Parses VTK Material file
-//
-// .SECTION Description
-// vtkXMLMaterialParser parses a VTK Material file and provides that file's
-// description of a number of vertex and fragment shaders along with data
-// values specified for data members of vtkProperty. This material is to be
-// applied to an actor through it's vtkProperty and augments VTK's concept
-// of a vtkProperty to include explicitly include vertex and fragment shaders
-// and parameter settings for those shaders. This effectively makes reflectance
-// models and other shaders  a material property. If no shaders are specified
-// VTK should default to standard rendering.
-//
-// .SECTION Design
-// vtkXMLMaterialParser provides access to 3 distinct types of first-level
-// vtkXMLDataElements that describe a VTK material. These elements are as
-// follows:
-//
-// vtkProperty - describe values for vtkProperty data members
-//
-// vtkVertexShader - a vertex shader and enough information to
-// install it into the hardware rendering pipeline including values for
-// specific shader parameters and structures.
-//
-// vtkFragmentShader - a fragment shader and enough information to
-// install it into the hardware rendering pipeline including values for
-// specific shader parameters and structures.
-//
-// The design of the material file closely follows that of vtk's xml
-// descriptions of it's data sets. This allows use of the very handy
-// vtkXMLDataElement which provides easy access to an xml element's
-// attribute values. Inlined data is currently not handled.
-//
-// Ideally this class would be a Facade to a DOM parser, but VTK only
-// provides access to expat, a SAX parser. Other vtk classes that parse
-// xml files are tuned to read vtkDataSets and don't provide the functionality
-// to handle generic xml data. As such they are of little use here.
-//
-// This class may be extended for better data  handling or may become a
-// Facade to a DOM parser should on become part of the VTK code base.
-// .SECTION Thanks
-// Shader support in VTK includes key contributions by Gary Templet at
-// Sandia National Labs.
-
-#ifndef __vtkXMLMaterialParser_h
-#define __vtkXMLMaterialParser_h
-
-#include "vtkRenderingCoreModule.h" // For export macro
-#include "vtkXMLParser.h"
-
-class vtkXMLMaterial;
-class vtkXMLMaterialParserInternals;
-
-class VTKRENDERINGCORE_EXPORT vtkXMLMaterialParser : public vtkXMLParser
-{
-public:
-  static vtkXMLMaterialParser *New();
-  vtkTypeMacro(vtkXMLMaterialParser,vtkXMLParser);
-  void PrintSelf(ostream& os, vtkIndent indent);
-
-  // Description:
-  // Set/Get the vtkXMLMaterial representation of the parsed material.
-  vtkGetObjectMacro(Material, vtkXMLMaterial);
-  void SetMaterial(vtkXMLMaterial*);
-
-  // Description:
-  // Overridden to initialize the internal structures before
-  // the parsing begins.
-  virtual int Parse();
-  virtual int Parse(const char* inputString);
-  virtual int Parse(const char* inputString, unsigned int length);
-
-  // Description:
-  // Overridden to clean up internal structures before the chunk-parsing
-  // begins.
-  virtual int InitializeParser();
-protected:
-  vtkXMLMaterialParser();
-  ~vtkXMLMaterialParser();
-
-  // Description:
-  // Event for handling the start of an element
-  virtual void StartElement(const char* name, const char** atts);
-
-  // Description:
-  // Event for handling the end of an element
-  virtual void EndElement(const char*);
-
-  // Description:
-  // Handle character data, not yet implemented
-  virtual void CharacterDataHandler( const char* data, int length );
-
-  vtkXMLMaterial* Material;
-  vtkXMLMaterialParserInternals* Internals;
-
-private:
-  vtkXMLMaterialParser(const vtkXMLMaterialParser&); // Not implemented
-  void operator=(const vtkXMLMaterialParser&); // Not implemented
-};
-#endif
diff --git a/Rendering/Core/vtkXMLMaterialReader.cxx b/Rendering/Core/vtkXMLMaterialReader.cxx
deleted file mode 100644
index 0872174..0000000
--- a/Rendering/Core/vtkXMLMaterialReader.cxx
+++ /dev/null
@@ -1,112 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkXMLMaterialReader.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#include "vtkXMLMaterialReader.h"
-
-#include "vtkObjectFactory.h"
-#include "vtkXMLDataElement.h"
-#include "vtkXMLMaterial.h"
-#include "vtkXMLMaterialParser.h"
-
-vtkStandardNewMacro( vtkXMLMaterialReader );
-//-----------------------------------------------------------------------------
-vtkXMLMaterialReader::vtkXMLMaterialReader()
-  :
-  FileName(NULL),
-  XMLParser(NULL)
-{
-  this->CreateXMLParser();
-}
-
-//-----------------------------------------------------------------------------
-vtkXMLMaterialReader::~vtkXMLMaterialReader()
-{
-  this->SetFileName(NULL);
-  this->DestroyXMLParser();
-}
-
-//-----------------------------------------------------------------------------
-void vtkXMLMaterialReader::CreateXMLParser()
-{
-  if( this->XMLParser)
-    {
-    vtkErrorMacro("vtkXMLMaterialReader::CreateXMLParser() called with \
-      an existent XMLParser.");
-    this->DestroyXMLParser();
-    }
-  this->XMLParser = vtkXMLMaterialParser::New();
-}
-
-//-----------------------------------------------------------------------------
-void vtkXMLMaterialReader::DestroyXMLParser()
-{
-  if(!this->XMLParser)
-    {
-    vtkErrorMacro("DestroyXMLParser() called with no current XMLParser.");
-    return;
-    }
-  this->XMLParser->Delete();
-  this->XMLParser = 0;
-}
-
-//-----------------------------------------------------------------------------
-vtkXMLMaterial* vtkXMLMaterialReader::GetMaterial()
-{
-  if (this->XMLParser)
-    {
-    return this->XMLParser->GetMaterial();
-    }
-  return 0;
-}
-
-//-----------------------------------------------------------------------------
-void vtkXMLMaterialReader::ReadMaterial()
-{
-  if( this->GetFileName() == NULL )
-    {
-    vtkErrorMacro( "No material file specified in vtkXMLMaterialReader." );
-    }
-
-  if (this->ParseTime < this->MTime )
-    {
-    if( this->XMLParser )
-      {
-      this->XMLParser->SetFileName( this->GetFileName() );
-      this->XMLParser->Parse();
-      this->ParseTime.Modified();
-      }
-    else
-      {
-      vtkErrorMacro( "Cannot read the material file without a Parser." );
-      }
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkXMLMaterialReader::PrintSelf(ostream& os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os, indent);
-  os << indent << "XMLParser: " ;
-  if (this->XMLParser)
-    {
-    os << endl;
-    this->XMLParser->PrintSelf(os, indent.GetNextIndent());
-    }
-  else
-    {
-    os << "(none)" << endl;
-    }
-  os << indent << "FileName: " <<
-    (this->FileName? this->FileName : "(null)") << endl;
-}
diff --git a/Rendering/Core/vtkXMLMaterialReader.h b/Rendering/Core/vtkXMLMaterialReader.h
deleted file mode 100644
index 3b3d0f0..0000000
--- a/Rendering/Core/vtkXMLMaterialReader.h
+++ /dev/null
@@ -1,92 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkXMLMaterialReader.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-// .NAME vtkXMLMaterialReader - Provide access to elements in Material files
-//
-// .SECTION Description
-// vtkXMLMaterialReader provides access to three types of vtkXMLDataElement
-// found in XML Material Files. This class sorts them by type and integer
-// id from 0-N for N elements of a specific type starting with the first
-// instance found.
-//
-// .SECTION Design
-// This class is basically a Facade for vtkXMLMaterialParser. Currently
-// functionality is to only provide access to vtkXMLDataElements but further
-// extensions may return higher level data structures.
-//
-// Having both an vtkXMLMaterialParser and a vtkXMLMaterialReader is consistent with
-// VTK's design for handling xml file and provides for future flexibility, that is
-// better data handlers and interfacing with a DOM xml parser.
-//
-// vtkProperty - defines values for some or all data members of vtkProperty
-//
-// vtkVertexShader - defines vertex shaders
-//
-// vtkFragmentShader - defines fragment shaders
-// .SECTION Thanks
-// Shader support in VTK includes key contributions by Gary Templet at
-// Sandia National Labs.
-
-#ifndef __vtkXMLMaterialReader_h
-#define __vtkXMLMaterialReader_h
-
-#include "vtkRenderingCoreModule.h" // For export macro
-#include "vtkObject.h"
-
-class vtkXMLDataElement;
-class vtkXMLMaterial;
-class vtkXMLMaterialParser;
-
-class VTKRENDERINGCORE_EXPORT vtkXMLMaterialReader : public vtkObject
-{
-public:
-  vtkTypeMacro(vtkXMLMaterialReader,vtkObject);
-  void PrintSelf(ostream& os, vtkIndent indent);
-  static vtkXMLMaterialReader* New();
-
-  // Description:
-  // Set and get file name.
-  vtkSetStringMacro(FileName);
-  vtkGetStringMacro(FileName);
-
-  // Description:
-  // Read the material file referred to in FileName.
-  // If the Reader hasn't changed since the last ReadMaterial(),
-  // it does not read the file again.
-  void ReadMaterial();
-
-  // Description:
-  // Get the Material representation read by the reader.
-  vtkXMLMaterial* GetMaterial();
-protected:
-  vtkXMLMaterialReader();
-  ~vtkXMLMaterialReader();
-
-  // Description:
-  // Create and vtkXMLParser to read the file
-  virtual void CreateXMLParser();
-
-  // Description:
-  // Destroys the vtkXMLParser.
-  virtual void DestroyXMLParser();
-
-  char* FileName;
-  vtkXMLMaterialParser* XMLParser;
-  vtkTimeStamp ParseTime;
-
-private:
-  vtkXMLMaterialReader(const vtkXMLMaterialReader&);  // Not implemented.
-  void operator=(const vtkXMLMaterialReader&);  // Not implemented.
-};
-#endif
diff --git a/Rendering/Core/vtkXMLShader.cxx b/Rendering/Core/vtkXMLShader.cxx
deleted file mode 100644
index 183aa15..0000000
--- a/Rendering/Core/vtkXMLShader.cxx
+++ /dev/null
@@ -1,447 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkXMLShader.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#include "vtkXMLShader.h"
-
-#include "vtkObjectFactory.h"
-#include "vtkShaderCodeLibrary.h"
-#include "vtkToolkits.h" // for VTK_MATERIALS_DIRS.
-#include "vtkXMLDataElement.h"
-
-#include <vtksys/SystemTools.hxx>
-#include <assert.h>
-
-vtkStandardNewMacro(vtkXMLShader);
-vtkCxxSetObjectMacro(vtkXMLShader, SourceLibraryElement, vtkXMLDataElement);
-//-----------------------------------------------------------------------------
-vtkXMLShader::vtkXMLShader()
-  : Code(NULL),
-    RootElement(NULL),
-    SourceLibraryElement(NULL),
-    Args(NULL)
-{
-}
-
-//-----------------------------------------------------------------------------
-vtkXMLShader::~vtkXMLShader()
-{
-  if (this->RootElement)
-    {
-    this->RootElement->UnRegister(this);
-    this->RootElement = 0;
-    }
-  this->SetSourceLibraryElement(0);
-  this->SetCode(0);
-  this->CleanupArgs();
-}
-
-//-----------------------------------------------------------------------------
-void vtkXMLShader::SetRootElement(vtkXMLDataElement* root)
-{
-  vtkSetObjectBodyMacro(RootElement, vtkXMLDataElement, root);
-  this->SetCode(0);
-  this->SetSourceLibraryElement(0); // release the SourceLibrary element.
-  // Determine if this shader description uses a library.
-  if (this->RootElement)
-    {
-    switch (this->GetLocation())
-      {
-    case vtkXMLShader::LOCATION_LIBRARY:
-        {
-        const char* name = this->RootElement->GetAttribute("name");
-        this->Code = vtkShaderCodeLibrary::GetShaderCode(name);
-        // TODO: the library should be XML enclosed.
-        // For now, it's not.
-        if (!this->Code)
-          {
-          vtkErrorMacro("Failed to locate library " << name);
-          return;
-          }
-        }
-      break;
-    case vtkXMLShader::LOCATION_FILE:
-        {
-        const char* filename = this->RootElement->GetAttribute("location");
-        char* fullpath = this->LocateFile(filename);
-        if (!fullpath)
-          {
-          vtkErrorMacro("Failed to locate file " << filename);
-          return;
-          }
-        this->ReadCodeFromFile(fullpath);
-        delete [] fullpath;
-        }
-      break;
-      }
-    }
-}
-
-//-----------------------------------------------------------------------------
-// Note that this method allocates a new string which must be deleted by
-// the caller.
-char* vtkXMLShader::LocateFile(const char* filename)
-{
-  if(!filename)
-    {
-    return NULL;
-    }
-
-  // if filename is absolute path, return the same.
-  if (vtksys::SystemTools::FileExists(filename))
-    {
-    return vtksys::SystemTools::DuplicateString(filename);
-    }
-
-  // Fetch any runtime defined user paths for materials
-  std::vector<std::string> paths;
-  std::string userpaths;
-  vtksys::SystemTools::GetEnv("USER_MATERIALS_DIRS", userpaths);
-  if (userpaths.size()>0)
-    {
-    vtksys::SystemTools::Split(userpaths.c_str(), paths, ';');
-    }
-
-#ifdef VTK_MATERIALS_DIRS
-  // search thru default paths to locate file.
-  vtksys::SystemTools::Split(VTK_MATERIALS_DIRS, paths, ';');
-#endif
-  for (unsigned int i =0; i < paths.size(); i++)
-    {
-    std::string path = paths[i];
-    if (path.size() == 0)
-      {
-      continue;
-      }
-    vtksys::SystemTools::ConvertToUnixSlashes(path);
-    if (path[path.size()-1] != '/')
-      {
-      path += "/";
-      }
-    path += filename;
-    if (vtksys::SystemTools::FileExists(path.c_str()))
-      {
-      return vtksys::SystemTools::DuplicateString(path.c_str());
-      }
-    }
-  return NULL;
-}
-
-//-----------------------------------------------------------------------------
-void vtkXMLShader::ReadCodeFromFile(const char* filepath)
-{
-  // Delete the existing code first. If 'filepath' doesn't exist,
-  // default to standard rendering.
-  if (this->Code)
-    {
-    delete [] this->Code;
-    this->Code = 0;
-    }
-
-  ifstream ifp;
-  ifp.open(filepath, ios::binary);
-  if (!ifp)
-    {
-    vtkErrorMacro("Failed to open file " << filepath);
-    return;
-    }
-
-  // determine the length of the file.
-  long length;
-  ifp.seekg(0, ios::end);
-  length = ifp.tellg();
-  ifp.seekg(0, ios::beg);
-
-  // Allocate for the file and the null terminator.
-  this->Code = new char[length+1];
-  ifp.read(this->Code, length);
-
-  // See how many characters were actually read. On Windows, CRLF line endings
-  // are read as a single char, so the number of read bytes will be fewer than
-  // the number of bytes reported in the file size query above.
-  long charsRead = ifp.gcount();
-  ifp.close();
-
-   // Null terminate the string so GL doesn't get confused.
-  this->Code[charsRead] = '\0';
-}
-//-----------------------------------------------------------------------------
-int vtkXMLShader::GetLanguage()
-{
-  if (this->RootElement)
-    {
-    const char* language = this->RootElement->GetAttribute("language");
-    if (!language)
-      {
-      vtkErrorMacro("Shader description missing Language attribute.");
-      }
-    else if (strcmp(language, "Cg") == 0)
-      {
-      return vtkXMLShader::LANGUAGE_CG;
-      }
-    else if (strcmp(language, "GLSL") == 0)
-      {
-      return vtkXMLShader::LANGUAGE_GLSL;
-      }
-    }
-  return vtkXMLShader::LANGUAGE_NONE;
-}
-
-//-----------------------------------------------------------------------------
-int vtkXMLShader::GetScope()
-{
-  if (this->RootElement)
-    {
-    const char* scope = this->RootElement->GetAttribute("scope");
-    if (!scope)
-      {
-      vtkErrorMacro("Shader description missing \"scope\" attribute.");
-      }
-    else if (strcmp(scope, "Vertex") == 0)
-      {
-      return vtkXMLShader::SCOPE_VERTEX;
-      }
-    else if (strcmp(scope, "Fragment") == 0)
-      {
-      return vtkXMLShader::SCOPE_FRAGMENT;
-      }
-    }
-  return vtkXMLShader::SCOPE_NONE;
-}
-
-//-----------------------------------------------------------------------------
-int vtkXMLShader::GetLocation()
-{
-  if (this->RootElement)
-    {
-    const char* loc= this->RootElement->GetAttribute("location");
-    if (!loc)
-      {
-      vtkErrorMacro("Shader description missing 'location' attribute.");
-      }
-    else if (strcmp(loc, "Inline") == 0)
-      {
-      return vtkXMLShader::LOCATION_INLINE;
-      }
-    else if (strcmp(loc, "Library") == 0)
-      {
-      return vtkXMLShader::LOCATION_LIBRARY;
-      }
-    else
-      {
-      // assume its a filename.
-      return vtkXMLShader::LOCATION_FILE;
-      }
-    }
-  return vtkXMLShader::LOCATION_NONE;
-}
-
-// ----------------------------------------------------------------------------
-// \post valid_result: result==1 || result==2
-int vtkXMLShader::GetStyle()
-{
-  int result=1;
-  if(this->RootElement)
-    {
-    const char *loc=this->RootElement->GetAttribute("style");
-    if(loc==0)
-      {
-      // fine. this attribute is optional.
-      }
-    else
-      {
-      if(strcmp(loc,"1")==0)
-        {
-        // fine. default value.
-        }
-      else
-        {
-        if(strcmp(loc,"2")==0)
-          {
-          result=2; // new style
-          }
-        else
-          {
-          vtkErrorMacro(<<"style number not supported. Expect 1 or 2. We force it to be 1.");
-          }
-        }
-      }
-    }
-
-  assert("post valid_result" && (result==1 || result==2) );
-  return result;
-}
-
-//-----------------------------------------------------------------------------
-const char* vtkXMLShader::GetName()
-{
-  return (this->RootElement)? this->RootElement->GetAttribute("name") : 0;
-}
-
-//-----------------------------------------------------------------------------
-const char* vtkXMLShader::GetEntry()
-{
-  return (this->RootElement)? this->RootElement->GetAttribute("entry") : 0;
-}
-
-//-----------------------------------------------------------------------------
-const char** vtkXMLShader::GetArgs()
-{
-  this->CleanupArgs();
-  if (!this->RootElement || !this->RootElement->GetAttribute("args"))
-    {
-    return 0;
-    }
-
-  std::vector<std::string> args;
-  vtksys::SystemTools::Split(this->RootElement->GetAttribute("args"), args, ' ');
-
-  int i;
-  int size = static_cast<int>(args.size());
-  if (size == 0)
-    {
-    return 0;
-    }
-  this->Args = new char*[size+1];
-  for (i=0; i < size; i++)
-    {
-    this->Args[i] = vtksys::SystemTools::DuplicateString(args[i].c_str());
-    }
-  this->Args[size] = 0;
-  return const_cast<const char**>(this->Args);
-}
-
-//-----------------------------------------------------------------------------
-const char* vtkXMLShader::GetCode()
-{
-  switch(this->GetLocation())
-    {
-  case vtkXMLShader::LOCATION_INLINE:
-    return this->RootElement->GetCharacterData();
-    break;
-  case vtkXMLShader::LOCATION_LIBRARY:
-    // until the ShaderCode library starts providing XMLs, we just return the code.
-    return this->Code;
-    break;
-  case vtkXMLShader::LOCATION_FILE:
-    return this->Code;
-    break;
-    }
-  return 0;
-}
-
-
-//-----------------------------------------------------------------------------
-void vtkXMLShader::CleanupArgs()
-{
-  if (this->Args)
-    {
-    char** a = this->Args;
-    while (*a)
-      {
-      delete [] (*a);
-      a++;
-      }
-    delete [] this->Args;
-    this->Args = 0;
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkXMLShader::PrintSelf(ostream& os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os, indent);
-  os << indent << "Name: " << (this->GetName()? this->GetName() : "(none)")
-                                                                    << endl;
-  os << indent << "Scope: ";
-  switch(this->GetScope())
-    {
-  case SCOPE_NONE:
-    os << "None";
-    break;
-  case SCOPE_MIXED:
-    os << "Mixed";
-    break;
-  case SCOPE_VERTEX:
-    os << "Vertex";
-    break;
-  case SCOPE_FRAGMENT:
-    os << "Fragment";
-    break;
-    }
-  os << endl;
-
-  os << indent << "Language: ";
-  switch (this->GetLanguage())
-    {
-  case LANGUAGE_NONE:
-    os << "None";
-    break;
-  case LANGUAGE_MIXED:
-    os << "Mixed";
-    break;
-  case LANGUAGE_CG:
-    os << "Cg";
-    break;
-  case LANGUAGE_GLSL:
-    os << "GLSL";
-    }
-  os << endl;
-
-  os << indent << "Location: ";
-  switch (this->GetLocation())
-    {
-  case LOCATION_NONE:
-    os << "None";
-    break;
-  case LOCATION_INLINE:
-    os << "Inline";
-    break;
-  case LOCATION_FILE:
-    os << "(loaded from a source file)";
-    break;
-  case LOCATION_LIBRARY:
-    os << "Library";
-    break;
-    }
-  os << endl;
-
-  os << indent << "Entry: "
-    <<  (this->GetEntry()? this->GetEntry() : "(none)") << endl;
-  os << indent << "Args: ";
-  const char** args = this->GetArgs();
-  if (!args)
-    {
-    os << "(none)" << endl;
-    }
-  else
-    {
-    while (*args)
-      {
-      os << indent << *args << " ";
-      args++;
-      }
-    os << endl;
-    }
-
-  os << indent << "RootElement: ";
-  if (this->RootElement)
-    {
-    os << endl;
-    this->RootElement->PrintSelf(os, indent.GetNextIndent());
-    }
-  else
-    {
-    os << "(none)" << endl;
-    }
-}
diff --git a/Rendering/Core/vtkXMLShader.h b/Rendering/Core/vtkXMLShader.h
deleted file mode 100644
index 545b131..0000000
--- a/Rendering/Core/vtkXMLShader.h
+++ /dev/null
@@ -1,137 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkXMLShader.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-// .NAME vtkXMLShader - encapsulates a Shader XML description.
-// .SECTION Description
-// vtkXMLShader encapsulates the XML description for a Shader.
-// It provides convenient access to various attributes/properties
-// of a shader.
-// .SECTION Thanks
-// Shader support in VTK includes key contributions by Gary Templet at
-// Sandia National Labs.
-
-#ifndef __vtkXMLShader_h
-#define __vtkXMLShader_h
-
-#include "vtkRenderingCoreModule.h" // For export macro
-#include "vtkObject.h"
-
-class vtkXMLDataElement;
-
-class VTKRENDERINGCORE_EXPORT vtkXMLShader : public vtkObject
-{
-public:
-  static vtkXMLShader* New();
-  vtkTypeMacro(vtkXMLShader, vtkObject);
-  void PrintSelf(ostream& os, vtkIndent indent);
-
-  // Description:
-  // Get/Set the XML root element that describes this shader.
-  vtkGetObjectMacro(RootElement, vtkXMLDataElement);
-  void SetRootElement(vtkXMLDataElement*);
-
-  // Description:
-  // Returns the shader's language as defined in the XML description.
-  int GetLanguage();
-
-  // Description:
-  // Returns the type of the shader as defined in the XML description.
-  int GetScope();
-
-  // Description:
-  // Returns the location of the shader as defined in the XML description.
-  int GetLocation();
-
-  // Description:
-  // Returns the style of the shader as optionaly defined in the XML
-  // description. If not present, default style is 1. "style=2" means it is
-  // a shader without a main(). In style 2, the "main" function for the vertex
-  // shader part is void propFuncVS(void), the main function for the fragment
-  // shader part is void propFuncFS(). This is useful when combining a shader
-  // at the actor level and a shader defines at the renderer level, like
-  // the depth peeling pass.
-  // \post valid_result: result==1 || result==2
-  int GetStyle();
-
-  // Description:
-  // Get the name of the Shader.
-  const char* GetName();
-
-  // Description:
-  // Get the entry point to the shader code as defined in the XML.
-  const char* GetEntry();
-
-  // Description:
-  // Get the shader code.
-  const char* GetCode();
-
-  // Description:
-  // Returns an null terminate array of the pointers to space sepatared Args
-  // defined in the XML description.
-  const char** GetArgs();
-
-  // Description:
-  // Searches the file in the VTK_MATERIALS_DIRS.
-  // Note that this allocates new memory for the string.
-  // The caller must delete it.
-  static char* LocateFile(const char* filename);
-
-//BTX
-  enum LanguageCodes
-    {
-    LANGUAGE_NONE=0,
-    LANGUAGE_MIXED,
-    LANGUAGE_CG,
-    LANGUAGE_GLSL
-    };
-
-  enum ScopeCodes
-    {
-    SCOPE_NONE=0,
-    SCOPE_MIXED,
-    SCOPE_VERTEX,
-    SCOPE_FRAGMENT
-    };
-
-  enum LocationCodes
-    {
-    LOCATION_NONE=0,
-    LOCATION_INLINE,
-    LOCATION_FILE,
-    LOCATION_LIBRARY
-    };
-//ETX
-protected:
-  vtkXMLShader();
-  ~vtkXMLShader();
-
-  // Reads the file and fills it in this->Code.
-  void ReadCodeFromFile(const char* fullpath);
-
-  char* Code; // cache for the code.
-  vtkSetStringMacro(Code);
-
-  vtkXMLDataElement* RootElement;
-  vtkXMLDataElement* SourceLibraryElement;
-  void SetSourceLibraryElement(vtkXMLDataElement*);
-
-  char** Args;
-  void CleanupArgs();
-private:
-  vtkXMLShader(const vtkXMLShader&); // Not implemented.
-  void operator=(const vtkXMLShader&); // Not implemented.
-};
-
-#endif
-
diff --git a/Rendering/FreeType/Testing/Cxx/CMakeLists.txt b/Rendering/FreeType/Testing/Cxx/CMakeLists.txt
index b6fe064..b21c5c6 100644
--- a/Rendering/FreeType/Testing/Cxx/CMakeLists.txt
+++ b/Rendering/FreeType/Testing/Cxx/CMakeLists.txt
@@ -1,34 +1,29 @@
-# Check that matplotlib is available:
-execute_process(COMMAND ${PYTHON_EXECUTABLE} -c "import matplotlib"
-  RESULT_VARIABLE IMPORT_MATPLOTLIB_EXITCODE
-  OUTPUT_VARIABLE IMPORT_MATPLOTLIB_OUTPUT
-  ERROR_VARIABLE IMPORT_MATPLOTLIB_ERROR
-  )
-set(MATPLOTLIB_FOUND FALSE)
-if(${IMPORT_MATPLOTLIB_EXITCODE} EQUAL 0)
-  set(MATPLOTLIB_FOUND TRUE)
-endif()
+# Check that matplotlib is available
+include(FindPythonModules)
+find_python_module(matplotlib MATPLOTLIB_FOUND)
 
 if(NOT MATPLOTLIB_FOUND)
   message(STATUS "Matplotlib not found! MathText rendering will not be available until it is installed. Disabling tests.")
 endif()
 
 # add tests that do not require data or produce vector output
-set(MyTests
-  TestMathTextFreeTypeTextRendererNoMath.cxx
-  )
+set(TestMathTextFreeTypeTextRendererNoMath_ARGS
+  DATA{../Data/Fonts/DejaVuSans.ttf}
+)
+vtk_add_test_cxx(TestMathTextFreeTypeTextRendererNoMath.cxx)
 
-if(VTK_DATA_ROOT)
-  # add tests that require data
-  set(MyTests ${MyTests}
-    TestFTStringToPath.cxx
-    )
-endif()
+set(TestFreeTypeTextMapperNoMath_ARGS DATA{../Data/Fonts/DejaVuSans.ttf})
+vtk_add_test_cxx(TestFreeTypeTextMapperNoMath.cxx)
+
+# add tests that require data
+vtk_add_test_cxx(TestFTStringToPath.cxx)
 
 if(MATPLOTLIB_FOUND)
-  set(MyTests ${MyTests}
-    TestMathTextFreeTypeTextRenderer.cxx
-    )
+  set(TestMathTextFreeTypeTextRenderer_ARGS DATA{../Data/Fonts/DejaVuSans.ttf})
+  vtk_add_test_cxx(TestMathTextFreeTypeTextRenderer.cxx)
+
+  set(TestFreeTypeTextMapper_ARGS DATA{../Data/Fonts/DejaVuSans.ttf})
+  vtk_add_test_cxx(TestFreeTypeTextMapper.cxx)
 endif()
 
 # This benchmark takes a while to run and can't fail, so disable it by default:
@@ -38,33 +33,7 @@ option(VTK_BUILD_FREETYPE_BENCHMARK
 mark_as_advanced(VTK_BUILD_FREETYPE_BENCHMARK)
 
 if(VTK_BUILD_FREETYPE_BENCHMARK)
-  set(MyTests ${MyTests} BenchmarkFreeTypeRendering.cxx)
+  vtk_add_test_cxx(BenchmarkFreeTypeRendering.cxx NO_VALID)
 endif()
 
-# Use the testing object factory, to reduce boilerplate code in tests.
-include(vtkTestingObjectFactory)
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    if(${${TName}Error})
-      set(_error_threshold ${${TName}Error})
-    else()
-      set(_error_threshold 10)
-    endif()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Rendering/${TName}.png
-        -E ${_error_threshold})
-  else()
-    add_test(NAME FreeType-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests RENDERING_FACTORY)
diff --git a/Rendering/FreeType/Testing/Cxx/TestFreeTypeTextMapper.cxx b/Rendering/FreeType/Testing/Cxx/TestFreeTypeTextMapper.cxx
new file mode 100644
index 0000000..96b9273
--- /dev/null
+++ b/Rendering/FreeType/Testing/Cxx/TestFreeTypeTextMapper.cxx
@@ -0,0 +1,265 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestFreeTypeTextMapper.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkTextMapper.h"
+
+#include "vtkActor2D.h"
+#include "vtkNew.h"
+#include "vtkObjectFactory.h"
+#include "vtkOverrideInformation.h"
+#include "vtkOverrideInformationCollection.h"
+#include "vtkRenderer.h"
+#include "vtkRenderWindow.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkStdString.h"
+#include "vtkTextProperty.h"
+
+//----------------------------------------------------------------------------
+int TestFreeTypeTextMapper(int argc, char *argv[])
+{
+  if (argc < 2)
+    {
+    cerr << "Missing font filename." << endl;
+    return EXIT_FAILURE;
+    }
+
+  vtkStdString uncodeFontFile(argv[1]);
+
+  // Remove any override to the class to ensure that the actual vtkTextMapper
+  // class is being tested:
+  vtkNew<vtkOverrideInformationCollection> overrides;
+  vtkObjectFactory::GetOverrideInformation("vtkTextMapper",
+                                           overrides.GetPointer());
+  overrides->InitTraversal();
+  while (vtkOverrideInformation *override = overrides->GetNextItem())
+    {
+    if (vtkObjectFactory *factory = override->GetObjectFactory())
+      {
+      vtkObjectFactory::UnRegisterFactory(factory);
+      }
+    }
+
+  vtkNew<vtkTextMapper> nameChecker;
+  if (vtkStdString(nameChecker->GetClassName()) != "vtkTextMapper")
+    {
+    cerr << "Needed a vtkTextMapper instance, got "
+         << nameChecker->GetClassName() << " instead!" << endl;
+    return EXIT_FAILURE;
+    }
+
+  vtkStdString str = "Sample multiline\ntext rendered\nusing FreeTypeTools.";
+
+  vtkNew<vtkTextMapper> mapper1;
+  vtkNew<vtkActor2D> actor1;
+  actor1->SetMapper(mapper1.GetPointer());
+  mapper1->GetTextProperty()->SetFontSize(20);
+  mapper1->GetTextProperty()->SetColor(1.0, 0.0, 0.0);
+  mapper1->GetTextProperty()->SetJustificationToLeft();
+  mapper1->GetTextProperty()->SetVerticalJustificationToTop();
+  mapper1->GetTextProperty()->SetFontFamilyToTimes();
+  mapper1->SetInput(str.c_str());
+  actor1->SetPosition(10, 590);
+
+  vtkNew<vtkTextMapper> mapper2;
+  vtkNew<vtkActor2D> actor2;
+  actor2->SetMapper(mapper2.GetPointer());
+  mapper2->GetTextProperty()->SetFontSize(20);
+  mapper2->GetTextProperty()->SetColor(0.0, 1.0, 0.0);
+  mapper2->GetTextProperty()->SetJustificationToRight();
+  mapper2->GetTextProperty()->SetVerticalJustificationToTop();
+  mapper2->GetTextProperty()->SetFontFamilyToCourier();
+  mapper2->SetInput(str.c_str());
+  actor2->SetPosition(590, 590);
+
+  vtkNew<vtkTextMapper> mapper3;
+  vtkNew<vtkActor2D> actor3;
+  actor3->SetMapper(mapper3.GetPointer());
+  mapper3->GetTextProperty()->SetFontSize(20);
+  mapper3->GetTextProperty()->SetColor(0.0, 0.0, 1.0);
+  mapper3->GetTextProperty()->SetJustificationToLeft();
+  mapper3->GetTextProperty()->SetVerticalJustificationToBottom();
+  mapper3->GetTextProperty()->SetItalic(1);
+  mapper3->SetInput(str.c_str());
+  actor3->SetPosition(10, 10);
+
+  vtkNew<vtkTextMapper> mapper4;
+  vtkNew<vtkActor2D> actor4;
+  actor4->SetMapper(mapper4.GetPointer());
+  mapper4->GetTextProperty()->SetFontSize(20);
+  mapper4->GetTextProperty()->SetColor(0.3, 0.4, 0.5);
+  mapper4->GetTextProperty()->SetJustificationToRight();
+  mapper4->GetTextProperty()->SetVerticalJustificationToBottom();
+  mapper4->GetTextProperty()->SetBold(1);
+  mapper4->GetTextProperty()->SetShadow(1);
+  mapper4->GetTextProperty()->SetShadowOffset(-3, 2);
+  mapper4->SetInput(str.c_str());
+  actor4->SetPosition(590, 10);
+
+  vtkNew<vtkTextMapper> mapper5;
+  vtkNew<vtkActor2D> actor5;
+  actor5->SetMapper(mapper5.GetPointer());
+  mapper5->GetTextProperty()->SetFontSize(20);
+  mapper5->GetTextProperty()->SetColor(1.0, 1.0, 0.0);
+  mapper5->GetTextProperty()->SetJustificationToCentered();
+  mapper5->GetTextProperty()->SetVerticalJustificationToCentered();
+  mapper5->GetTextProperty()->SetBold(1);
+  mapper5->GetTextProperty()->SetItalic(1);
+  mapper5->GetTextProperty()->SetShadow(1);
+  mapper5->GetTextProperty()->SetShadowOffset(5, -8);
+  mapper5->SetInput(str.c_str());
+  actor5->SetPosition(300, 300);
+
+  vtkNew<vtkTextMapper> mapper6;
+  vtkNew<vtkActor2D> actor6;
+  actor6->SetMapper(mapper6.GetPointer());
+  mapper6->GetTextProperty()->SetFontSize(16);
+  mapper6->GetTextProperty()->SetColor(1.0, 0.5, 0.2);
+  mapper6->GetTextProperty()->SetJustificationToCentered();
+  mapper6->GetTextProperty()->SetVerticalJustificationToCentered();
+  mapper6->GetTextProperty()->SetOrientation(45);
+  mapper6->SetInput(str.c_str());
+  actor6->SetPosition(300, 450);
+
+  vtkNew<vtkTextMapper> mapper7;
+  vtkNew<vtkActor2D> actor7;
+  actor7->SetMapper(mapper7.GetPointer());
+  mapper7->GetTextProperty()->SetFontSize(16);
+  mapper7->GetTextProperty()->SetColor(0.5, 0.2, 1.0);
+  mapper7->GetTextProperty()->SetJustificationToLeft();
+  mapper7->GetTextProperty()->SetVerticalJustificationToCentered();
+  mapper7->GetTextProperty()->SetOrientation(45);
+  mapper7->SetInput(str.c_str());
+  actor7->SetPosition(100, 200);
+
+  vtkNew<vtkTextMapper> mapper8;
+  vtkNew<vtkActor2D> actor8;
+  actor8->SetMapper(mapper8.GetPointer());
+  mapper8->GetTextProperty()->SetFontSize(16);
+  mapper8->GetTextProperty()->SetColor(0.8, 1.0, 0.3);
+  mapper8->GetTextProperty()->SetJustificationToRight();
+  mapper8->GetTextProperty()->SetVerticalJustificationToCentered();
+  mapper8->GetTextProperty()->SetOrientation(45);
+  mapper8->SetInput(str.c_str());
+  actor8->SetPosition(500, 200);
+
+  // Mathtext tests
+
+  // Test that escaped "$" are passed through to freetype:
+  vtkNew<vtkTextMapper> mapper9;
+  vtkNew<vtkActor2D> actor9;
+  actor9->SetMapper(mapper9.GetPointer());
+  mapper9->GetTextProperty()->SetFontSize(12);
+  mapper9->GetTextProperty()->SetColor(0.2, 0.5, 1.0);
+  mapper9->SetInput("Escaped dollar signs:\n\\$10, \\$20");
+  actor9->SetPosition(100, 450);
+
+  vtkNew<vtkTextMapper> mapper10;
+  vtkNew<vtkActor2D> actor10;
+  actor10->SetMapper(mapper10.GetPointer());
+  mapper10->GetTextProperty()->SetFontSize(16);
+  mapper10->GetTextProperty()->SetColor(0.5, 0.2, 1.0);
+  mapper10->GetTextProperty()->SetJustificationToRight();
+  mapper10->GetTextProperty()->SetOrientation(45);
+  mapper10->SetInput("Test MathText $\\int_0^\\infty\\frac{2\\pi}"
+                     "{x - \\frac{z}{4}}\\,dx$");
+  actor10->SetPosition(590, 300);
+
+  // Invalid latex markup -- should fallback to freetype.
+  vtkNew<vtkTextMapper> mapper11;
+  vtkNew<vtkActor2D> actor11;
+  actor11->SetMapper(mapper11.GetPointer());
+  mapper11->GetTextProperty()->SetFontSize(15);
+  mapper11->GetTextProperty()->SetColor(1.0, 0.5, 0.2);
+  mapper11->SetInput("Test FreeType fallback:\n$\\asdf$");
+  actor11->SetPosition(10, 350);
+
+  // Both $...$ and \\$
+  vtkNew<vtkTextMapper> mapper12;
+  vtkNew<vtkActor2D> actor12;
+  actor12->SetMapper(mapper12.GetPointer());
+  mapper12->GetTextProperty()->SetFontSize(18);
+  mapper12->GetTextProperty()->SetColor(0.0, 1.0, 0.7);
+  mapper12->SetInput("Test MathText '\\$' $\\$\\sqrt[3]{8}$");
+  actor12->SetPosition(10, 300);
+
+  // $...$ without other text.
+  vtkNew<vtkTextMapper> mapper13;
+  vtkNew<vtkActor2D> actor13;
+  actor13->SetMapper(mapper13.GetPointer());
+  mapper13->GetTextProperty()->SetFontSize(18);
+  mapper13->GetTextProperty()->SetColor(0.2, 1.0, 1.0);
+  mapper13->SetInput("$A = \\pi r^2$");
+  actor13->SetPosition(10, 250);
+
+  // Numbers, using courier, Text that gets 'cut off'
+  vtkNew<vtkTextMapper> mapper14;
+  vtkNew<vtkActor2D> actor14;
+  actor14->SetMapper(mapper14.GetPointer());
+  mapper14->GetTextProperty()->SetFontSize(21);
+  mapper14->GetTextProperty()->SetColor(1.0, 0.0, 0.0);
+  mapper14->GetTextProperty()->SetJustificationToCentered();
+  mapper14->GetTextProperty()->SetVerticalJustificationToCentered();
+  mapper14->GetTextProperty()->SetBold(1);
+  mapper14->GetTextProperty()->SetItalic(1);
+  mapper14->GetTextProperty()->SetFontFamilyToCourier();
+  mapper14->SetInput("4.0");
+  actor14->SetPosition(500, 400);
+
+  // UTF-8 freetype handling:
+  vtkNew<vtkTextMapper> mapper15;
+  vtkNew<vtkActor2D> actor15;
+  actor15->SetMapper(mapper15.GetPointer());
+  mapper15->GetTextProperty()->SetFontFile(uncodeFontFile.c_str());
+  mapper15->GetTextProperty()->SetFontFamily(VTK_FONT_FILE);
+  mapper15->GetTextProperty()->SetJustificationToCentered();
+  mapper15->GetTextProperty()->SetVerticalJustificationToCentered();
+  mapper15->GetTextProperty()->SetFontSize(18);
+  mapper15->GetTextProperty()->SetColor(0.0, 1.0, 0.7);
+  mapper15->SetInput("UTF-8 FreeType: \xce\xa8\xd2\x94\xd2\x96\xd1\x84\xd2\xbe");
+  actor15->SetPosition(300, 110);
+
+  // Boring rendering setup....
+
+  vtkNew<vtkRenderer> ren;
+  ren->SetBackground(0.1, 0.1, 0.1);
+  vtkNew<vtkRenderWindow> win;
+  win->SetSize(600, 600);
+  win->AddRenderer(ren.GetPointer());
+  vtkNew<vtkRenderWindowInteractor> iren;
+  iren->SetRenderWindow(win.GetPointer());
+
+  ren->AddActor(actor1.GetPointer());
+  ren->AddActor(actor2.GetPointer());
+  ren->AddActor(actor3.GetPointer());
+  ren->AddActor(actor4.GetPointer());
+  ren->AddActor(actor5.GetPointer());
+  ren->AddActor(actor6.GetPointer());
+  ren->AddActor(actor7.GetPointer());
+  ren->AddActor(actor8.GetPointer());
+  ren->AddActor(actor9.GetPointer());
+  ren->AddActor(actor10.GetPointer());
+  ren->AddActor(actor11.GetPointer());
+  ren->AddActor(actor12.GetPointer());
+  ren->AddActor(actor13.GetPointer());
+  ren->AddActor(actor14.GetPointer());
+  ren->AddActor(actor15.GetPointer());
+
+  win->SetMultiSamples(0);
+  win->Render();
+  win->GetInteractor()->Initialize();
+  win->GetInteractor()->Start();
+
+  return EXIT_SUCCESS;
+}
diff --git a/Rendering/FreeType/Testing/Cxx/TestFreeTypeTextMapperNoMath.cxx b/Rendering/FreeType/Testing/Cxx/TestFreeTypeTextMapperNoMath.cxx
new file mode 100644
index 0000000..c040428
--- /dev/null
+++ b/Rendering/FreeType/Testing/Cxx/TestFreeTypeTextMapperNoMath.cxx
@@ -0,0 +1,211 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestFreeTypeTextMapper.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkTextMapper.h"
+
+#include "vtkActor2D.h"
+#include "vtkNew.h"
+#include "vtkObjectFactory.h"
+#include "vtkOverrideInformation.h"
+#include "vtkOverrideInformationCollection.h"
+#include "vtkRenderer.h"
+#include "vtkRenderWindow.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkStdString.h"
+#include "vtkTextProperty.h"
+
+//----------------------------------------------------------------------------
+int TestFreeTypeTextMapperNoMath(int argc, char *argv[])
+{
+  if (argc < 2)
+    {
+    cerr << "Missing font filename." << endl;
+    return EXIT_FAILURE;
+    }
+
+  vtkStdString uncodeFontFile(argv[1]);
+
+  // Remove any override to the class to ensure that the actual vtkTextMapper
+  // class is being tested:
+  vtkNew<vtkOverrideInformationCollection> overrides;
+  vtkObjectFactory::GetOverrideInformation("vtkTextMapper",
+                                           overrides.GetPointer());
+  overrides->InitTraversal();
+  while (vtkOverrideInformation *override = overrides->GetNextItem())
+    {
+    if (vtkObjectFactory *factory = override->GetObjectFactory())
+      {
+      vtkObjectFactory::UnRegisterFactory(factory);
+      }
+    }
+
+  vtkNew<vtkTextMapper> nameChecker;
+  if (vtkStdString(nameChecker->GetClassName()) != "vtkTextMapper")
+    {
+    cerr << "Needed a vtkTextMapper instance, got "
+         << nameChecker->GetClassName() << " instead!" << endl;
+    return EXIT_FAILURE;
+    }
+
+  vtkStdString str = "Sample multiline\ntext rendered\nusing FreeTypeTools.";
+
+  vtkNew<vtkTextMapper> mapper1;
+  vtkNew<vtkActor2D> actor1;
+  actor1->SetMapper(mapper1.GetPointer());
+  mapper1->GetTextProperty()->SetFontSize(20);
+  mapper1->GetTextProperty()->SetColor(1.0, 0.0, 0.0);
+  mapper1->GetTextProperty()->SetJustificationToLeft();
+  mapper1->GetTextProperty()->SetVerticalJustificationToTop();
+  mapper1->GetTextProperty()->SetFontFamilyToTimes();
+  mapper1->SetInput(str.c_str());
+  actor1->SetPosition(10, 590);
+
+  vtkNew<vtkTextMapper> mapper2;
+  vtkNew<vtkActor2D> actor2;
+  actor2->SetMapper(mapper2.GetPointer());
+  mapper2->GetTextProperty()->SetFontSize(20);
+  mapper2->GetTextProperty()->SetColor(0.0, 1.0, 0.0);
+  mapper2->GetTextProperty()->SetJustificationToRight();
+  mapper2->GetTextProperty()->SetVerticalJustificationToTop();
+  mapper2->GetTextProperty()->SetFontFamilyToCourier();
+  mapper2->SetInput(str.c_str());
+  actor2->SetPosition(590, 590);
+
+  vtkNew<vtkTextMapper> mapper3;
+  vtkNew<vtkActor2D> actor3;
+  actor3->SetMapper(mapper3.GetPointer());
+  mapper3->GetTextProperty()->SetFontSize(20);
+  mapper3->GetTextProperty()->SetColor(0.0, 0.0, 1.0);
+  mapper3->GetTextProperty()->SetJustificationToLeft();
+  mapper3->GetTextProperty()->SetVerticalJustificationToBottom();
+  mapper3->GetTextProperty()->SetItalic(1);
+  mapper3->SetInput(str.c_str());
+  actor3->SetPosition(10, 10);
+
+  vtkNew<vtkTextMapper> mapper4;
+  vtkNew<vtkActor2D> actor4;
+  actor4->SetMapper(mapper4.GetPointer());
+  mapper4->GetTextProperty()->SetFontSize(20);
+  mapper4->GetTextProperty()->SetColor(0.3, 0.4, 0.5);
+  mapper4->GetTextProperty()->SetJustificationToRight();
+  mapper4->GetTextProperty()->SetVerticalJustificationToBottom();
+  mapper4->GetTextProperty()->SetBold(1);
+  mapper4->GetTextProperty()->SetShadow(1);
+  mapper4->GetTextProperty()->SetShadowOffset(-3, 2);
+  mapper4->SetInput(str.c_str());
+  actor4->SetPosition(590, 10);
+
+  vtkNew<vtkTextMapper> mapper5;
+  vtkNew<vtkActor2D> actor5;
+  actor5->SetMapper(mapper5.GetPointer());
+  mapper5->GetTextProperty()->SetFontSize(20);
+  mapper5->GetTextProperty()->SetColor(1.0, 1.0, 0.0);
+  mapper5->GetTextProperty()->SetJustificationToCentered();
+  mapper5->GetTextProperty()->SetVerticalJustificationToCentered();
+  mapper5->GetTextProperty()->SetBold(1);
+  mapper5->GetTextProperty()->SetItalic(1);
+  mapper5->GetTextProperty()->SetShadow(1);
+  mapper5->GetTextProperty()->SetShadowOffset(5, -8);
+  mapper5->SetInput(str.c_str());
+  actor5->SetPosition(300, 300);
+
+  vtkNew<vtkTextMapper> mapper6;
+  vtkNew<vtkActor2D> actor6;
+  actor6->SetMapper(mapper6.GetPointer());
+  mapper6->GetTextProperty()->SetFontSize(16);
+  mapper6->GetTextProperty()->SetColor(1.0, 0.5, 0.2);
+  mapper6->GetTextProperty()->SetJustificationToCentered();
+  mapper6->GetTextProperty()->SetVerticalJustificationToCentered();
+  mapper6->GetTextProperty()->SetOrientation(45);
+  mapper6->SetInput(str.c_str());
+  actor6->SetPosition(300, 450);
+
+  vtkNew<vtkTextMapper> mapper7;
+  vtkNew<vtkActor2D> actor7;
+  actor7->SetMapper(mapper7.GetPointer());
+  mapper7->GetTextProperty()->SetFontSize(16);
+  mapper7->GetTextProperty()->SetColor(0.5, 0.2, 1.0);
+  mapper7->GetTextProperty()->SetJustificationToLeft();
+  mapper7->GetTextProperty()->SetVerticalJustificationToCentered();
+  mapper7->GetTextProperty()->SetOrientation(45);
+  mapper7->SetInput(str.c_str());
+  actor7->SetPosition(100, 200);
+
+  vtkNew<vtkTextMapper> mapper8;
+  vtkNew<vtkActor2D> actor8;
+  actor8->SetMapper(mapper8.GetPointer());
+  mapper8->GetTextProperty()->SetFontSize(16);
+  mapper8->GetTextProperty()->SetColor(0.8, 1.0, 0.3);
+  mapper8->GetTextProperty()->SetJustificationToRight();
+  mapper8->GetTextProperty()->SetVerticalJustificationToCentered();
+  mapper8->GetTextProperty()->SetOrientation(45);
+  mapper8->SetInput(str.c_str());
+  actor8->SetPosition(500, 200);
+
+  // Numbers, using courier, Text that gets 'cut off'
+  vtkNew<vtkTextMapper> mapper9;
+  vtkNew<vtkActor2D> actor9;
+  actor9->SetMapper(mapper9.GetPointer());
+  mapper9->GetTextProperty()->SetFontSize(21);
+  mapper9->GetTextProperty()->SetColor(1.0, 0.0, 0.0);
+  mapper9->GetTextProperty()->SetJustificationToCentered();
+  mapper9->GetTextProperty()->SetVerticalJustificationToCentered();
+  mapper9->GetTextProperty()->SetBold(1);
+  mapper9->GetTextProperty()->SetItalic(1);
+  mapper9->GetTextProperty()->SetFontFamilyToCourier();
+  mapper9->SetInput("4.0");
+  actor9->SetPosition(500, 400);
+
+  // UTF-8 freetype handling:
+  vtkNew<vtkTextMapper> mapper10;
+  vtkNew<vtkActor2D> actor10;
+  actor10->SetMapper(mapper10.GetPointer());
+  mapper10->GetTextProperty()->SetFontFile(uncodeFontFile.c_str());
+  mapper10->GetTextProperty()->SetFontFamily(VTK_FONT_FILE);
+  mapper10->GetTextProperty()->SetJustificationToCentered();
+  mapper10->GetTextProperty()->SetVerticalJustificationToCentered();
+  mapper10->GetTextProperty()->SetFontSize(18);
+  mapper10->GetTextProperty()->SetColor(0.0, 1.0, 0.7);
+  mapper10->SetInput("UTF-8 FreeType: \xce\xa8\xd2\x94\xd2\x96\xd1\x84\xd2\xbe");
+  actor10->SetPosition(300, 110);
+
+  // Boring rendering setup....
+
+  vtkNew<vtkRenderer> ren;
+  ren->SetBackground(0.1, 0.1, 0.1);
+  vtkNew<vtkRenderWindow> win;
+  win->SetSize(600, 600);
+  win->AddRenderer(ren.GetPointer());
+  vtkNew<vtkRenderWindowInteractor> iren;
+  iren->SetRenderWindow(win.GetPointer());
+
+  ren->AddActor(actor1.GetPointer());
+  ren->AddActor(actor2.GetPointer());
+  ren->AddActor(actor3.GetPointer());
+  ren->AddActor(actor4.GetPointer());
+  ren->AddActor(actor5.GetPointer());
+  ren->AddActor(actor6.GetPointer());
+  ren->AddActor(actor7.GetPointer());
+  ren->AddActor(actor8.GetPointer());
+  ren->AddActor(actor9.GetPointer());
+  ren->AddActor(actor10.GetPointer());
+
+  win->SetMultiSamples(0);
+  win->Render();
+  win->GetInteractor()->Initialize();
+  win->GetInteractor()->Start();
+
+  return EXIT_SUCCESS;
+}
diff --git a/Rendering/FreeType/Testing/Cxx/TestMathTextFreeTypeTextRenderer.cxx b/Rendering/FreeType/Testing/Cxx/TestMathTextFreeTypeTextRenderer.cxx
index 9de64e3..82289e0 100644
--- a/Rendering/FreeType/Testing/Cxx/TestMathTextFreeTypeTextRenderer.cxx
+++ b/Rendering/FreeType/Testing/Cxx/TestMathTextFreeTypeTextRenderer.cxx
@@ -24,10 +24,19 @@
 #include "vtkTextProperty.h"
 
 #include <iostream>
+#include <string>
 
 //----------------------------------------------------------------------------
-int TestMathTextFreeTypeTextRenderer(int , char *[])
+int TestMathTextFreeTypeTextRenderer(int argc, char *argv[])
 {
+  if (argc < 2)
+    {
+    cerr << "Missing font filename." << endl;
+    return EXIT_FAILURE;
+    }
+
+  std::string uncodeFontFile(argv[1]);
+
   vtkNew<vtkTextRenderer> tren;
   if (tren.GetPointer() == NULL)
     {
@@ -172,6 +181,17 @@ int TestMathTextFreeTypeTextRenderer(int , char *[])
   actor14->SetInput("4.0");
   actor14->SetPosition(500, 400);
 
+  // UTF-8 freetype handling:
+  vtkNew<vtkTextActor> actor15;
+  actor15->GetTextProperty()->SetFontFamily(VTK_FONT_FILE);
+  actor15->GetTextProperty()->SetFontFile(uncodeFontFile.c_str());
+  actor15->GetTextProperty()->SetJustificationToCentered();
+  actor15->GetTextProperty()->SetVerticalJustificationToCentered();
+  actor15->GetTextProperty()->SetFontSize(18);
+  actor15->GetTextProperty()->SetColor(0.0, 1.0, 0.7);
+  actor15->SetInput("UTF-8 FreeType: \xce\xa8\xd2\x94\xd2\x96\xd1\x84\xd2\xbe");
+  actor15->SetPosition(300, 110);
+
   // Boring rendering setup....
 
   vtkNew<vtkRenderer> ren;
@@ -196,6 +216,7 @@ int TestMathTextFreeTypeTextRenderer(int , char *[])
   ren->AddActor(actor12.GetPointer());
   ren->AddActor(actor13.GetPointer());
   ren->AddActor(actor14.GetPointer());
+  ren->AddActor(actor15.GetPointer());
 
   win->SetMultiSamples(0);
   win->Render();
diff --git a/Rendering/FreeType/Testing/Cxx/TestMathTextFreeTypeTextRendererNoMath.cxx b/Rendering/FreeType/Testing/Cxx/TestMathTextFreeTypeTextRendererNoMath.cxx
index 58771df..0c54676 100644
--- a/Rendering/FreeType/Testing/Cxx/TestMathTextFreeTypeTextRendererNoMath.cxx
+++ b/Rendering/FreeType/Testing/Cxx/TestMathTextFreeTypeTextRendererNoMath.cxx
@@ -24,10 +24,19 @@
 #include "vtkTextProperty.h"
 
 #include <iostream>
+#include <string>
 
 //----------------------------------------------------------------------------
-int TestMathTextFreeTypeTextRendererNoMath(int , char *[])
+int TestMathTextFreeTypeTextRendererNoMath(int argc, char *argv[])
 {
+  if (argc < 2)
+    {
+    cerr << "Missing font filename." << endl;
+    return EXIT_FAILURE;
+    }
+
+  std::string uncodeFontFile(argv[1]);
+
   vtkNew<vtkTextRenderer> tren;
   if (tren.GetPointer() == NULL)
     {
@@ -133,6 +142,17 @@ int TestMathTextFreeTypeTextRendererNoMath(int , char *[])
   actor9->SetInput("4.0");
   actor9->SetPosition(500, 400);
 
+  // UTF-8 freetype handling:
+  vtkNew<vtkTextActor> actor10;
+  actor10->GetTextProperty()->SetFontFamily(VTK_FONT_FILE);
+  actor10->GetTextProperty()->SetFontFile(uncodeFontFile.c_str());
+  actor10->GetTextProperty()->SetJustificationToCentered();
+  actor10->GetTextProperty()->SetVerticalJustificationToCentered();
+  actor10->GetTextProperty()->SetFontSize(18);
+  actor10->GetTextProperty()->SetColor(0.0, 1.0, 0.7);
+  actor10->SetInput("UTF-8 FreeType: \xce\xa8\xd2\x94\xd2\x96\xd1\x84\xd2\xbe");
+  actor10->SetPosition(300, 110);
+
   // Boring rendering setup....
 
   vtkNew<vtkRenderer> ren;
@@ -152,6 +172,7 @@ int TestMathTextFreeTypeTextRendererNoMath(int , char *[])
   ren->AddActor(actor7.GetPointer());
   ren->AddActor(actor8.GetPointer());
   ren->AddActor(actor9.GetPointer());
+  ren->AddActor(actor10.GetPointer());
 
   win->SetMultiSamples(0);
   win->Render();
diff --git a/Rendering/FreeType/Testing/Data/Baseline/OverlayTextOnImage.png.md5 b/Rendering/FreeType/Testing/Data/Baseline/OverlayTextOnImage.png.md5
new file mode 100644
index 0000000..5e43195
--- /dev/null
+++ b/Rendering/FreeType/Testing/Data/Baseline/OverlayTextOnImage.png.md5
@@ -0,0 +1 @@
+bc24a92b1b399a114e9cea45f49d29b0
diff --git a/Rendering/FreeType/Testing/Data/Baseline/TestFTStringToPath.png.md5 b/Rendering/FreeType/Testing/Data/Baseline/TestFTStringToPath.png.md5
new file mode 100644
index 0000000..a453db7
--- /dev/null
+++ b/Rendering/FreeType/Testing/Data/Baseline/TestFTStringToPath.png.md5
@@ -0,0 +1 @@
+584aa6d7079fad86d9f8a48a58529e2e
diff --git a/Rendering/FreeType/Testing/Data/Baseline/TestFreeTypeTextMapper.png.md5 b/Rendering/FreeType/Testing/Data/Baseline/TestFreeTypeTextMapper.png.md5
new file mode 100644
index 0000000..c17e879
--- /dev/null
+++ b/Rendering/FreeType/Testing/Data/Baseline/TestFreeTypeTextMapper.png.md5
@@ -0,0 +1 @@
+4676cc01fdf9d501f114207a19a673da
diff --git a/Rendering/FreeType/Testing/Data/Baseline/TestFreeTypeTextMapperNoMath.png.md5 b/Rendering/FreeType/Testing/Data/Baseline/TestFreeTypeTextMapperNoMath.png.md5
new file mode 100644
index 0000000..8832aa0
--- /dev/null
+++ b/Rendering/FreeType/Testing/Data/Baseline/TestFreeTypeTextMapperNoMath.png.md5
@@ -0,0 +1 @@
+89b1b495e8404eab965eaa38e7b8588d
diff --git a/Rendering/FreeType/Testing/Data/Baseline/TestMathTextFreeTypeTextRenderer.png.md5 b/Rendering/FreeType/Testing/Data/Baseline/TestMathTextFreeTypeTextRenderer.png.md5
new file mode 100644
index 0000000..75fbbaf
--- /dev/null
+++ b/Rendering/FreeType/Testing/Data/Baseline/TestMathTextFreeTypeTextRenderer.png.md5
@@ -0,0 +1 @@
+669acfe307b60ad48ffb9b4332a45a6f
diff --git a/Rendering/FreeType/Testing/Data/Baseline/TestMathTextFreeTypeTextRendererNoMath.png.md5 b/Rendering/FreeType/Testing/Data/Baseline/TestMathTextFreeTypeTextRendererNoMath.png.md5
new file mode 100644
index 0000000..2ea6bf7
--- /dev/null
+++ b/Rendering/FreeType/Testing/Data/Baseline/TestMathTextFreeTypeTextRendererNoMath.png.md5
@@ -0,0 +1 @@
+953c107f02146513e0b17bbc90d869b9
diff --git a/Rendering/FreeType/Testing/Data/Baseline/multiLineText.png.md5 b/Rendering/FreeType/Testing/Data/Baseline/multiLineText.png.md5
new file mode 100644
index 0000000..dfeccd3
--- /dev/null
+++ b/Rendering/FreeType/Testing/Data/Baseline/multiLineText.png.md5
@@ -0,0 +1 @@
+ef344564467fe2f0f4b6ccd8562c5d4b
diff --git a/Rendering/FreeType/Testing/Data/Baseline/text.png.md5 b/Rendering/FreeType/Testing/Data/Baseline/text.png.md5
new file mode 100644
index 0000000..3388c6b
--- /dev/null
+++ b/Rendering/FreeType/Testing/Data/Baseline/text.png.md5
@@ -0,0 +1 @@
+ef0672d1749de3ad4effefd2ad3dad1f
diff --git a/Rendering/FreeType/Testing/Data/Fonts/DejaVuSans.ttf.md5 b/Rendering/FreeType/Testing/Data/Fonts/DejaVuSans.ttf.md5
new file mode 100644
index 0000000..09b8dd3
--- /dev/null
+++ b/Rendering/FreeType/Testing/Data/Fonts/DejaVuSans.ttf.md5
@@ -0,0 +1 @@
+eccb7a74720fc377b60d6b2110530fd9
diff --git a/Rendering/FreeType/Testing/Python/CMakeLists.txt b/Rendering/FreeType/Testing/Python/CMakeLists.txt
index 6041519..e93fba7 100644
--- a/Rendering/FreeType/Testing/Python/CMakeLists.txt
+++ b/Rendering/FreeType/Testing/Python/CMakeLists.txt
@@ -1,3 +1,3 @@
-add_test_python(OverlayTextOnImage.py Hybrid)
-add_test_python(multiLineText.py Rendering)
-add_test_python(text.py Hybrid)
+vtk_add_test_python(OverlayTextOnImage.py)
+vtk_add_test_python(multiLineText.py)
+vtk_add_test_python(text.py)
diff --git a/Rendering/FreeType/Testing/Tcl/CMakeLists.txt b/Rendering/FreeType/Testing/Tcl/CMakeLists.txt
index 8268922..91a6f2a 100644
--- a/Rendering/FreeType/Testing/Tcl/CMakeLists.txt
+++ b/Rendering/FreeType/Testing/Tcl/CMakeLists.txt
@@ -1,3 +1,3 @@
-add_test_tcl(OverlayTextOnImage Hybrid)
-add_test_tcl(text Hybrid)
-add_test_tcl(multiLineText Rendering)
+vtk_add_test_tcl(OverlayTextOnImage)
+vtk_add_test_tcl(text)
+vtk_add_test_tcl(multiLineText)
diff --git a/Rendering/FreeType/vtkFreeTypeTools.cxx b/Rendering/FreeType/vtkFreeTypeTools.cxx
index 1baab9f..d86cc74 100644
--- a/Rendering/FreeType/vtkFreeTypeTools.cxx
+++ b/Rendering/FreeType/vtkFreeTypeTools.cxx
@@ -536,7 +536,11 @@ void vtkFreeTypeTools::MapTextPropertyToId(vtkTextProperty *tprop,
   int bits = 1;
 
   // The font family is hashed into 16 bits (= 17 bits so far)
-  *id |= vtkFreeTypeTools::HashString(tprop->GetFontFamilyAsString()) << bits;
+  vtkTypeUInt16 familyHash =
+      vtkFreeTypeTools::HashString(tprop->GetFontFamily() != VTK_FONT_FILE
+                                   ? tprop->GetFontFamilyAsString()
+                                   : tprop->GetFontFile());
+  *id |= familyHash << bits;
   bits += 16;
 
   // Bold is in 1 bit (= 18 bits so far)
@@ -862,6 +866,23 @@ bool vtkFreeTypeTools::LookupFace(vtkTextProperty *tprop, FT_Library lib,
           " unavailable. Substituting Arial.");
     family = VTK_ARIAL;
     }
+  else if (family == VTK_FONT_FILE)
+    {
+    vtkDebugWithObjectMacro(tprop,
+                            << "Attempting to load font from file: "
+                            << tprop->GetFontFile());
+
+    if (FT_New_Face(lib, tprop->GetFontFile(), 0, face) == 0)
+      {
+      return true;
+      }
+
+    vtkDebugWithObjectMacro(
+          tprop,
+          << "Error loading font from file '" << tprop->GetFontFile()
+          << "'. Falling back to arial.");
+    family = VTK_ARIAL;
+    }
 
   FT_Long length = EmbeddedFonts
     [family][tprop->GetBold()][tprop->GetItalic()].length;
@@ -1938,6 +1959,7 @@ void vtkFreeTypeTools::GetLineMetrics(T begin, T end, MetaData &metaData,
       {
       // FIXME: do something more elegant here.
       // We should render an empty rectangle to adhere to the specs...
+      vtkDebugMacro(<<"Unrecognized character: " << *begin);
       continue;
       }
 
diff --git a/Rendering/FreeType/vtkMathTextFreeTypeTextRenderer.cxx b/Rendering/FreeType/vtkMathTextFreeTypeTextRenderer.cxx
index 9ae6ebf..5b99ad3 100644
--- a/Rendering/FreeType/vtkMathTextFreeTypeTextRenderer.cxx
+++ b/Rendering/FreeType/vtkMathTextFreeTypeTextRenderer.cxx
@@ -94,7 +94,9 @@ bool vtkMathTextFreeTypeTextRenderer::GetBoundingBoxInternal(
       {
       vtkStdString cleanString(str);
       this->CleanUpFreeTypeEscapes(cleanString);
-      return this->FreeTypeTools->GetBoundingBox(tprop, cleanString, bbox);
+      // Interpret string as UTF-8, use the UTF-16 GetBoundingBox overload:
+      return this->FreeTypeTools->GetBoundingBox(
+            tprop, vtkUnicodeString::from_utf8(cleanString), bbox);
       }
     default:
       vtkDebugMacro("Unrecognized backend requested: " << backend);
@@ -199,8 +201,9 @@ bool vtkMathTextFreeTypeTextRenderer::RenderStringInternal(
       {
       vtkStdString cleanString(str);
       this->CleanUpFreeTypeEscapes(cleanString);
-      return this->FreeTypeTools->RenderString(tprop, cleanString, data,
-                                               textDims);
+      // Interpret string as UTF-8, use the UTF-16 RenderString overload:
+      return this->FreeTypeTools->RenderString(
+            tprop, vtkUnicodeString::from_utf8(cleanString), data, textDims);
       }
     default:
       vtkDebugMacro("Unrecognized backend requested: " << backend);
diff --git a/Rendering/FreeType/vtkMathTextUtilities.cxx b/Rendering/FreeType/vtkMathTextUtilities.cxx
index 7f7f27e..4667f59 100644
--- a/Rendering/FreeType/vtkMathTextUtilities.cxx
+++ b/Rendering/FreeType/vtkMathTextUtilities.cxx
@@ -27,6 +27,8 @@
 #include "vtkDebugLeaks.h"
 #endif
 
+#include <algorithm>
+
 //----------------------------------------------------------------------------
 vtkInstantiatorNewMacro(vtkMathTextUtilities)
 
diff --git a/Rendering/FreeType/vtkMathTextUtilities.h b/Rendering/FreeType/vtkMathTextUtilities.h
index 1e0a9c4..6b83817 100644
--- a/Rendering/FreeType/vtkMathTextUtilities.h
+++ b/Rendering/FreeType/vtkMathTextUtilities.h
@@ -19,7 +19,7 @@
 // vtkMatplotlib module).
 
 #ifndef __vtkMathTextUtilities_h
-#define __vtkMathTypeUtilities_h
+#define __vtkMathTextUtilities_h
 
 #include "vtkRenderingFreeTypeModule.h" // For export macro
 #include "vtkObject.h"
@@ -38,6 +38,10 @@ class VTKRENDERINGFREETYPE_EXPORT vtkMathTextUtilitiesCleanup
 public:
   vtkMathTextUtilitiesCleanup();
   ~vtkMathTextUtilitiesCleanup();
+
+private:
+  vtkMathTextUtilitiesCleanup(const vtkMathTextUtilitiesCleanup& other); // no copy constructor
+  vtkMathTextUtilitiesCleanup& operator=(const vtkMathTextUtilitiesCleanup& rhs); // no copy assignment
 };
 
 class VTKRENDERINGFREETYPE_EXPORT vtkMathTextUtilities : public vtkObject
diff --git a/Rendering/FreeType/vtkTextActor.cxx b/Rendering/FreeType/vtkTextActor.cxx
index 0670666..a3243d3 100644
--- a/Rendering/FreeType/vtkTextActor.cxx
+++ b/Rendering/FreeType/vtkTextActor.cxx
@@ -30,6 +30,8 @@
 #include "vtkTextRenderer.h"
 #include "vtkRenderer.h"
 
+#include <algorithm>
+
 vtkStandardNewMacro(vtkTextActor);
 vtkCxxSetObjectMacro(vtkTextActor,Texture,vtkTexture);
 
diff --git a/Rendering/FreeType/vtkTextActor.h b/Rendering/FreeType/vtkTextActor.h
index bb75fbc..eeffe1e 100644
--- a/Rendering/FreeType/vtkTextActor.h
+++ b/Rendering/FreeType/vtkTextActor.h
@@ -68,7 +68,7 @@ public:
   // Description:
   // Set the text string to be displayed. "\n" is recognized
   // as a carriage return/linefeed (line separator).
-  // The characters must be in the ISO-8859-1 encoding.
+  // The characters must be in the UTF-8 encoding.
   // Convenience method to the underlying mapper
   void SetInput(const char *inputString);
   char *GetInput();
diff --git a/Rendering/FreeType/vtkTextActor3D.cxx b/Rendering/FreeType/vtkTextActor3D.cxx
index 0343409..54a85ae 100644
--- a/Rendering/FreeType/vtkTextActor3D.cxx
+++ b/Rendering/FreeType/vtkTextActor3D.cxx
@@ -90,7 +90,14 @@ double* vtkTextActor3D::GetBounds()
     // that we haven't rendered yet, so we have to make sure our bounds
     // are up to date so that we don't get culled.
     this->UpdateImageActor();
-    return this->ImageActor->GetBounds();
+    double* bounds = this->ImageActor->GetBounds();
+    this->Bounds[0] = bounds[0];
+    this->Bounds[1] = bounds[1];
+    this->Bounds[2] = bounds[2];
+    this->Bounds[3] = bounds[3];
+    this->Bounds[4] = bounds[4];
+    this->Bounds[5] = bounds[5];
+    return bounds;
     }
 
   return NULL;
@@ -257,11 +264,15 @@ int vtkTextActor3D::UpdateImageActor()
       return 0;
       }
 
+    int bbox[4];
+    this->GetBoundingBox(bbox);
+
     // Associate the image data (should be up to date now) to the image actor
     if (this->ImageActor)
       {
       this->ImageActor->SetInputData(this->ImageData);
-      this->ImageActor->SetDisplayExtent(this->ImageData->GetExtent());
+      this->ImageActor->SetDisplayExtent(
+        bbox[0], bbox[1], bbox[2], bbox[3], 0, 0);
       }
 
     } // if (this->GetMTime() ...
diff --git a/Rendering/FreeType/vtkTextActor3D.h b/Rendering/FreeType/vtkTextActor3D.h
index 661be0c..ed46aed 100644
--- a/Rendering/FreeType/vtkTextActor3D.h
+++ b/Rendering/FreeType/vtkTextActor3D.h
@@ -64,8 +64,8 @@ public:
 
   // Description:
   // Get the bounds for this Prop3D as (Xmin,Xmax,Ymin,Ymax,Zmin,Zmax).
-  // These are the padded-to-power-of-two texture bounds.
   virtual double *GetBounds();
+  void GetBounds(double bounds[6]) {this->vtkProp3D::GetBounds( bounds );}
 
   // Description:
   // Get the Freetype-derived real bounding box for the given vtkTextProperty
diff --git a/Rendering/FreeTypeFontConfig/CMakeLists.txt b/Rendering/FreeTypeFontConfig/CMakeLists.txt
index 6bf15a5..4be7f10 100644
--- a/Rendering/FreeTypeFontConfig/CMakeLists.txt
+++ b/Rendering/FreeTypeFontConfig/CMakeLists.txt
@@ -40,4 +40,4 @@ configure_file(${VTK_CMAKE_DIR}/vtkObjectFactory.cxx.in
 
 vtk_module_library(${vtk-module} ${Module_SRCS})
 
-target_link_libraries(${vtk-module} ${FONTCONFIG_LIBRARIES})
+target_link_libraries(${vtk-module} LINK_PRIVATE ${FONTCONFIG_LIBRARIES})
diff --git a/Rendering/FreeTypeFontConfig/Testing/Cxx/CMakeLists.txt b/Rendering/FreeTypeFontConfig/Testing/Cxx/CMakeLists.txt
index 4594f44..32d50a8 100644
--- a/Rendering/FreeTypeFontConfig/Testing/Cxx/CMakeLists.txt
+++ b/Rendering/FreeTypeFontConfig/Testing/Cxx/CMakeLists.txt
@@ -1,32 +1,2 @@
-# add tests that do not require data
-set(MyTests
-  TestSystemFontRendering
-  )
-
-# Use the testing object factory, to reduce boilerplate code in tests.
-include(vtkTestingObjectFactory)
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    if(${${TName}Error})
-      set(_error_threshold ${${TName}Error})
-    else()
-      set(_error_threshold 10)
-    endif()
-    add_test(NAME Charts-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Rendering/${TName}.png
-        -E ${_error_threshold})
-  else()
-    add_test(NAME Charts-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_add_test_cxx(TestSystemFontRendering.cxx)
+vtk_test_cxx_executable(${vtk-module}CxxTests RENDERING_FACTORY)
diff --git a/Rendering/FreeTypeFontConfig/Testing/Cxx/TestSystemFontRendering.cxx b/Rendering/FreeTypeFontConfig/Testing/Cxx/TestSystemFontRendering.cxx
index df44fab..f7c8d4d 100644
--- a/Rendering/FreeTypeFontConfig/Testing/Cxx/TestSystemFontRendering.cxx
+++ b/Rendering/FreeTypeFontConfig/Testing/Cxx/TestSystemFontRendering.cxx
@@ -44,7 +44,7 @@ int TestSystemFontRendering( int, char * [] )
   // Set up a 2D context view, context test object and add it to the scene
   vtkNew<vtkContextView> view;
   view->GetRenderer()->SetBackground(1.0, 1.0, 1.0);
-  view->GetRenderWindow()->SetSize(520, 360);
+  view->GetRenderWindow()->SetSize(580, 360);
   vtkNew<SystemFontRenderTest> test;
   view->GetScene()->AddItem(test.GetPointer());
 
@@ -72,69 +72,72 @@ bool SystemFontRenderTest::Paint(vtkContext2D *painter)
 
   painter->GetTextProp()->SetFontFamilyToArial();
 
+  const char *testString =
+      "ABCDEFGHIJKLMNOPQRSTUVWXYZ\xce\xb1\xce\xb2\xce\xb3\xce\xb4";
+
   y -= 30;
   painter->GetTextProp()->SetBold(false);
   painter->GetTextProp()->SetItalic(false);
-  painter->DrawString(5, y, "ABCDEFGHIJKLMNOPQRSTUVWXYZ");
+  painter->DrawString(5, y, testString);
 
   y -= 30;
   painter->GetTextProp()->SetBold(false);
   painter->GetTextProp()->SetItalic(true);
-  painter->DrawString(5, y, "ABCDEFGHIJKLMNOPQRSTUVWXYZ");
+  painter->DrawString(5, y, testString);
 
   y -= 30;
   painter->GetTextProp()->SetBold(true);
   painter->GetTextProp()->SetItalic(false);
-  painter->DrawString(5, y, "ABCDEFGHIJKLMNOPQRSTUVWXYZ");
+  painter->DrawString(5, y, testString);
 
   y -= 30;
   painter->GetTextProp()->SetBold(true);
   painter->GetTextProp()->SetItalic(true);
-  painter->DrawString(5, y, "ABCDEFGHIJKLMNOPQRSTUVWXYZ");
+  painter->DrawString(5, y, testString);
 
   painter->GetTextProp()->SetFontFamilyToTimes();
 
   y -= 30;
   painter->GetTextProp()->SetBold(false);
   painter->GetTextProp()->SetItalic(false);
-  painter->DrawString(5, y, "ABCDEFGHIJKLMNOPQRSTUVWXYZ");
+  painter->DrawString(5, y, testString);
 
   y -= 30;
   painter->GetTextProp()->SetBold(false);
   painter->GetTextProp()->SetItalic(true);
-  painter->DrawString(5, y, "ABCDEFGHIJKLMNOPQRSTUVWXYZ");
+  painter->DrawString(5, y, testString);
 
   y -= 30;
   painter->GetTextProp()->SetBold(true);
   painter->GetTextProp()->SetItalic(false);
-  painter->DrawString(5, y, "ABCDEFGHIJKLMNOPQRSTUVWXYZ");
+  painter->DrawString(5, y, testString);
 
   y -= 30;
   painter->GetTextProp()->SetBold(true);
   painter->GetTextProp()->SetItalic(true);
-  painter->DrawString(5, y, "ABCDEFGHIJKLMNOPQRSTUVWXYZ");
+  painter->DrawString(5, y, testString);
 
   painter->GetTextProp()->SetFontFamilyToCourier();
 
   y -= 30;
   painter->GetTextProp()->SetBold(false);
   painter->GetTextProp()->SetItalic(false);
-  painter->DrawString(5, y, "ABCDEFGHIJKLMNOPQRSTUVWXYZ");
+  painter->DrawString(5, y, testString);
 
   y -= 30;
   painter->GetTextProp()->SetBold(false);
   painter->GetTextProp()->SetItalic(true);
-  painter->DrawString(5, y, "ABCDEFGHIJKLMNOPQRSTUVWXYZ");
+  painter->DrawString(5, y, testString);
 
   y -= 30;
   painter->GetTextProp()->SetBold(true);
   painter->GetTextProp()->SetItalic(false);
-  painter->DrawString(5, y, "ABCDEFGHIJKLMNOPQRSTUVWXYZ");
+  painter->DrawString(5, y, testString);
 
   y -= 30;
   painter->GetTextProp()->SetBold(true);
   painter->GetTextProp()->SetItalic(true);
-  painter->DrawString(5, y, "ABCDEFGHIJKLMNOPQRSTUVWXYZ");
+  painter->DrawString(5, y, testString);
 
   return true;
 }
diff --git a/Rendering/FreeTypeFontConfig/Testing/Data/Baseline/TestSystemFontRendering.png.md5 b/Rendering/FreeTypeFontConfig/Testing/Data/Baseline/TestSystemFontRendering.png.md5
new file mode 100644
index 0000000..7ed67b1
--- /dev/null
+++ b/Rendering/FreeTypeFontConfig/Testing/Data/Baseline/TestSystemFontRendering.png.md5
@@ -0,0 +1 @@
+4fe53d21608f32ed71048f350b230f6a
diff --git a/Rendering/FreeTypeFontConfig/Testing/Data/Baseline/TestSystemFontRendering_1.png.md5 b/Rendering/FreeTypeFontConfig/Testing/Data/Baseline/TestSystemFontRendering_1.png.md5
new file mode 100644
index 0000000..4178d2a
--- /dev/null
+++ b/Rendering/FreeTypeFontConfig/Testing/Data/Baseline/TestSystemFontRendering_1.png.md5
@@ -0,0 +1 @@
+67787844d6ddeb1aaed8abc9c1b6501a
diff --git a/Rendering/FreeTypeFontConfig/Testing/Data/Baseline/TestSystemFontRendering_2.png.md5 b/Rendering/FreeTypeFontConfig/Testing/Data/Baseline/TestSystemFontRendering_2.png.md5
new file mode 100644
index 0000000..80dcbb8
--- /dev/null
+++ b/Rendering/FreeTypeFontConfig/Testing/Data/Baseline/TestSystemFontRendering_2.png.md5
@@ -0,0 +1 @@
+390085c7145df1e1ed03150d632e49ce
diff --git a/Rendering/FreeTypeFontConfig/vtkFontConfigFreeTypeTools.cxx b/Rendering/FreeTypeFontConfig/vtkFontConfigFreeTypeTools.cxx
index 1a51452..63cdbfe 100644
--- a/Rendering/FreeTypeFontConfig/vtkFontConfigFreeTypeTools.cxx
+++ b/Rendering/FreeTypeFontConfig/vtkFontConfigFreeTypeTools.cxx
@@ -46,8 +46,9 @@ vtkFontConfigFreeTypeToolsFaceRequester(FTC_FaceID face_id,
       vtkSmartPointer<vtkTextProperty>::New();
   self->MapIdToTextProperty(reinterpret_cast<intptr_t>(face_id), tprop);
 
-  bool faceIsSet = self->GetForceCompiledFonts() ?
-        false : self->LookupFaceFontConfig(tprop, lib, face);
+  bool faceIsSet =
+      self->GetForceCompiledFonts() || tprop->GetFontFamily() == VTK_FONT_FILE
+      ? false : self->LookupFaceFontConfig(tprop, lib, face);
 
   // Fall back to compiled fonts if lookup fails/compiled fonts are forced:
   if (!faceIsSet)
@@ -126,6 +127,11 @@ bool vtkFontConfigFreeTypeTools::LookupFaceFontConfig(vtkTextProperty *tprop,
   FcPatternAddInteger(pattern, FC_SLANT, slant);
   FcPatternAddBool(pattern, FC_SCALABLE, true);
 
+  // Prefer fonts that have at least greek characters:
+  FcCharSet *charSet = FcCharSetCreate();
+  FcCharSetAddChar(charSet, static_cast<FcChar32>(948)); // lowercase delta
+  FcPatternAddCharSet(pattern, FC_CHARSET, charSet);
+
   // Replace common font names, e.g. arial, times, etc -> sans, serif, etc
   FcConfigSubstitute(NULL, pattern, FcMatchPattern);
 
@@ -140,7 +146,9 @@ bool vtkFontConfigFreeTypeTools::LookupFaceFontConfig(vtkTextProperty *tprop,
   if (!fontMatches || fontMatches->nfont == 0)
     {
     if (fontMatches)
+      {
       FcFontSetDestroy(fontMatches);
+      }
     return false;
     }
 
@@ -151,17 +159,29 @@ bool vtkFontConfigFreeTypeTools::LookupFaceFontConfig(vtkTextProperty *tprop,
     {
     match = fontMatches->fonts[i];
 
+    // Ensure that the match is scalable
     FcBool isScalable;
-    FcPatternGetBool(match, FC_SCALABLE, 0, &isScalable);
-    if (!isScalable)
+    if (FcPatternGetBool(match, FC_SCALABLE, 0, &isScalable) != FcResultMatch ||
+        !isScalable)
+      {
+      continue;
+      }
+
+    FcCharSet *currentFontCharSet;
+    if (FcPatternGetCharSet(match, FC_CHARSET, 0, &currentFontCharSet)
+        != FcResultMatch ||
+        FcCharSetIntersectCount(charSet, currentFontCharSet) == 0)
+      {
       continue;
-    else
-      break;
+      }
+
+    break;
     }
 
   if (!match)
     {
     FcFontSetDestroy(fontMatches);
+    FcCharSetDestroy(charSet);
     return false;
     }
 
@@ -173,14 +193,22 @@ bool vtkFontConfigFreeTypeTools::LookupFaceFontConfig(vtkTextProperty *tprop,
   FT_Error error = FT_New_Face(lib, reinterpret_cast<const char*>(filename), 0,
                                face);
 
-  if (error)
+  if (!error)
     {
-    FcFontSetDestroy(fontMatches);
-    return false;
+    vtkDebugWithObjectMacro(vtkFreeTypeTools::GetInstance(),
+                            <<"Loading system font: "
+                            << reinterpret_cast<const char*>(filename));
     }
 
+  FcCharSetDestroy(charSet);
+  charSet = NULL;
   FcFontSetDestroy(fontMatches);
   fontMatches = NULL;
 
+  if (error)
+    {
+    return false;
+    }
+
   return true;
 }
diff --git a/Rendering/FreeTypeOpenGL/vtkOpenGLFreeTypeTextMapper.cxx b/Rendering/FreeTypeOpenGL/vtkOpenGLFreeTypeTextMapper.cxx
index a456414..6654692 100644
--- a/Rendering/FreeTypeOpenGL/vtkOpenGLFreeTypeTextMapper.cxx
+++ b/Rendering/FreeTypeOpenGL/vtkOpenGLFreeTypeTextMapper.cxx
@@ -20,6 +20,7 @@
 #include "vtkTextProperty.h"
 #include "vtkViewport.h"
 #include "vtkWindow.h"
+#include "vtkOpenGLError.h"
 
 #include "vtkFreeTypeUtilities.h"
 #include "vtkftglConfig.h"
@@ -32,6 +33,27 @@
 using namespace ftgl;
 #endif
 
+namespace {
+inline int GetNumberOfLinesImpl(const char *str)
+{
+  if (str == NULL || *str == '\0')
+    {
+    return 0;
+    }
+
+  int result = 1;
+  while (str != NULL)
+    {
+    if ((str = strstr(str, "\n")) != NULL)
+      {
+      result++;
+      str++; // Skip '\n'
+      }
+    }
+  return result;
+}
+}
+
 //----------------------------------------------------------------------------
 // Print debug info
 
@@ -48,11 +70,23 @@ vtkOpenGLFreeTypeTextMapper::vtkOpenGLFreeTypeTextMapper()
 {
   this->LastSize[0] = 0;
   this->LastSize[1] = 0;
+  this->TextLines = NULL;
+  this->NumberOfLines = 0;
+  this->NumberOfLinesAllocated = 0;
 }
 
 //----------------------------------------------------------------------------
 vtkOpenGLFreeTypeTextMapper::~vtkOpenGLFreeTypeTextMapper()
 {
+  if (this->TextLines != NULL)
+    {
+    for (int i=0; i < this->NumberOfLinesAllocated; i++)
+      {
+      this->TextLines[i]->Delete();
+      }
+    delete [] this->TextLines;
+    }
+
   if (this->LastWindow)
     {
     this->ReleaseGraphicsResources(this->LastWindow);
@@ -60,7 +94,104 @@ vtkOpenGLFreeTypeTextMapper::~vtkOpenGLFreeTypeTextMapper()
 }
 
 //----------------------------------------------------------------------------
-void vtkOpenGLFreeTypeTextMapper::ReleaseGraphicsResources(vtkWindow *vtkNotUsed(win))
+char *vtkOpenGLFreeTypeTextMapper::NextLine(const char *input, int lineNum)
+{
+  const char *ptr, *ptrEnd;
+  int strLen;
+  char *line;
+
+  ptr = input;
+  for (int i=0; i != lineNum; i++)
+    {
+    ptr = strstr(ptr,"\n");
+    ptr++;
+    }
+  ptrEnd = strstr(ptr,"\n");
+  if ( ptrEnd == NULL )
+    {
+    ptrEnd = strchr(ptr, '\0');
+    }
+
+  strLen = ptrEnd - ptr;
+  line = new char[strLen+1];
+  strncpy(line, ptr, strLen);
+  line[strLen] = '\0';
+
+  return line;
+}
+
+//----------------------------------------------------------------------------
+void vtkOpenGLFreeTypeTextMapper::GetMultiLineSize(vtkViewport *viewport,
+                                                   int size[])
+{
+  int i;
+  int lineSize[2];
+
+  vtkTextProperty *tprop = this->GetTextProperty();
+  if (!tprop)
+    {
+    vtkErrorMacro(<<"Need text property to get multiline size of mapper");
+    size[0] = size[1] = 0;
+    return;
+    }
+
+  lineSize[0] = lineSize[1] = size[0] = size[1] = 0;
+  for ( i=0; i < this->NumberOfLines; i++ )
+    {
+    this->TextLines[i]->GetTextProperty()->ShallowCopy(tprop);
+    this->TextLines[i]->GetSize(viewport, lineSize);
+    size[0] = (lineSize[0] > size[0] ? lineSize[0] : size[0]);
+    size[1] = (lineSize[1] > size[1] ? lineSize[1] : size[1]);
+    }
+
+  // add in the line spacing
+  this->LineSize = size[1];
+  size[1] = static_cast<int>(
+    size[1] * (1.0 + (this->NumberOfLines - 1) * tprop->GetLineSpacing()));
+}
+
+//----------------------------------------------------------------------------
+void vtkOpenGLFreeTypeTextMapper::RenderOverlayMultipleLines(
+    vtkViewport *viewport, vtkActor2D *actor)
+{
+  float offset = 0.0f;
+  int size[2];
+  // make sure LineSize is up to date
+  this->GetMultiLineSize(viewport,size);
+
+  vtkTextProperty *tprop = this->GetTextProperty();
+  if (!tprop)
+    {
+    vtkErrorMacro(<<"Need text property to render multiple lines of mapper");
+    return;
+    }
+
+  switch (tprop->GetVerticalJustification())
+    {
+    case VTK_TEXT_TOP:
+      offset = 0.0f;
+      break;
+    case VTK_TEXT_CENTERED:
+      offset = (-this->NumberOfLines + 1.0f) / 2.0f;
+      break;
+    case VTK_TEXT_BOTTOM:
+      offset = -this->NumberOfLines + 1.0f;
+      break;
+    }
+
+  for (int lineNum=0; lineNum < this->NumberOfLines; lineNum++)
+    {
+    this->TextLines[lineNum]->GetTextProperty()->ShallowCopy(tprop);
+    this->TextLines[lineNum]->GetTextProperty()->SetLineOffset
+      (tprop->GetLineOffset() +
+       static_cast<int>(this->LineSize * (lineNum + offset)
+                        * tprop->GetLineSpacing()));
+    this->TextLines[lineNum]->RenderOverlay(viewport,actor);
+    }
+}
+
+//----------------------------------------------------------------------------
+void vtkOpenGLFreeTypeTextMapper::ReleaseGraphicsResources(vtkWindow *)
 {
 #if VTK_FTTM_DEBUG
     printf("vtkOpenGLFreeTypeTextMapper::ReleaseGraphicsResources\n");
@@ -179,6 +310,8 @@ void vtkOpenGLFreeTypeTextMapper::RenderOverlay(vtkViewport* viewport,
     return;
     }
 
+  vtkOpenGLClearErrorMacro();
+
   // Get the window information for display
 
   vtkWindow* window = viewport->GetVTKWindow();
@@ -406,10 +539,76 @@ void vtkOpenGLFreeTypeTextMapper::RenderOverlay(vtkViewport* viewport,
     glEnable(GL_LIGHTING);
     }
   glDepthFunc(depthFunc);
+
+  vtkOpenGLCheckErrorMacro("failed after RenderOverlay");
 }
 
 //----------------------------------------------------------------------------
 void vtkOpenGLFreeTypeTextMapper::PrintSelf(ostream& os, vtkIndent indent)
 {
   this->Superclass::PrintSelf(os,indent);
+  os << indent << "NumberOfLines: " << this->NumberOfLines << "\n";
+}
+
+//----------------------------------------------------------------------------
+void vtkOpenGLFreeTypeTextMapper::SetInput(const char *input)
+{
+  if ( this->Input && input && (!strcmp(this->Input,input)))
+    {
+    return;
+    }
+  delete [] this->Input;
+  if (input)
+    {
+    this->Input = new char[strlen(input)+1];
+    strcpy(this->Input,input);
+    }
+  else
+    {
+    this->Input = NULL;
+    }
+  this->Modified();
+
+  int numLines = GetNumberOfLinesImpl(input);
+
+  if ( numLines <= 1) // a line with no "\n"
+    {
+    this->NumberOfLines = numLines;
+    }
+
+  else //multiple lines
+    {
+    char *line;
+    int i;
+
+    if ( numLines > this->NumberOfLinesAllocated )
+      {
+      // delete old stuff
+      if ( this->TextLines )
+        {
+        for (i=0; i < this->NumberOfLinesAllocated; i++)
+          {
+          this->TextLines[i]->Delete();
+          }
+        delete [] this->TextLines;
+        }
+
+      // allocate new text mappers
+      this->NumberOfLinesAllocated = numLines;
+      this->TextLines = new vtkTextMapper *[numLines];
+      for (i=0; i < numLines; i++)
+        {
+        this->TextLines[i] = vtkTextMapper::New();
+        }
+      } //if we need to reallocate
+
+    // set the input strings
+    this->NumberOfLines = numLines;
+    for (i=0; i < this->NumberOfLines; i++)
+      {
+      line = this->NextLine(input, i);
+      this->TextLines[i]->SetInput( line );
+      delete [] line;
+      }
+    }
 }
diff --git a/Rendering/FreeTypeOpenGL/vtkOpenGLFreeTypeTextMapper.h b/Rendering/FreeTypeOpenGL/vtkOpenGLFreeTypeTextMapper.h
index 7833853..2d49dd1 100644
--- a/Rendering/FreeTypeOpenGL/vtkOpenGLFreeTypeTextMapper.h
+++ b/Rendering/FreeTypeOpenGL/vtkOpenGLFreeTypeTextMapper.h
@@ -35,6 +35,8 @@ public:
   static vtkOpenGLFreeTypeTextMapper *New();
   virtual void PrintSelf(ostream& os, vtkIndent indent);
 
+  void SetInput(const char *inputString);
+
   // Description:
   // Actally draw the text.
   void RenderOverlay(vtkViewport* viewport, vtkActor2D* actor);
@@ -57,6 +59,16 @@ protected:
   vtkTimeStamp  SizeBuildTime;
   int LastSize[2];
   int LastLargestDescender;
+  int LineSize;
+  int NumberOfLines;
+  int NumberOfLinesAllocated;
+  vtkTextMapper **TextLines;
+
+  // Description:
+  // These functions are used to parse, process, and render multiple lines
+  char *NextLine(const char *input, int lineNum);
+  void GetMultiLineSize(vtkViewport* viewport, int size[2]);
+  void RenderOverlayMultipleLines(vtkViewport *viewport, vtkActor2D *actor);
 
 private:
   vtkOpenGLFreeTypeTextMapper(const vtkOpenGLFreeTypeTextMapper&);  // Not implemented.
diff --git a/Rendering/GL2PS/Testing/Cxx/CMakeLists.txt b/Rendering/GL2PS/Testing/Cxx/CMakeLists.txt
index 6fe74ce..f224ba9 100644
--- a/Rendering/GL2PS/Testing/Cxx/CMakeLists.txt
+++ b/Rendering/GL2PS/Testing/Cxx/CMakeLists.txt
@@ -1,8 +1,5 @@
 # Compile the PNGCompare test, which is used to validate PNG images (read from
 # disk, not generated in VTK). The CMake/RasterizePostScript script can be used
 # to create a png image from GL2PS output.
-set(MyTests PNGCompare.cxx)
-
-# Use the testing object factory, to reduce boilerplate code in tests.
-include(vtkTestingObjectFactory)
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
+set_property(DIRECTORY APPEND PROPERTY VTK_TEST_CXX_SOURCES PNGCompare.cxx)
+vtk_test_cxx_executable(${vtk-module}CxxTests RENDERING_FACTORY)
diff --git a/Rendering/GL2PS/module.cmake b/Rendering/GL2PS/module.cmake
index f7ff9d6..dcef707 100644
--- a/Rendering/GL2PS/module.cmake
+++ b/Rendering/GL2PS/module.cmake
@@ -2,7 +2,9 @@ vtk_module(vtkRenderingGL2PS
   TCL_NAME vtkRenderingGLtoPS
   DEPENDS
     vtkRenderingContext2D
+  PRIVATE_DEPENDS
     vtkRenderingOpenGL
+    vtkRenderingFreeType
     vtkgl2ps
   TEST_DEPENDS
     vtkTestingRendering
diff --git a/Rendering/GL2PS/vtkGL2PSContextDevice2D.cxx b/Rendering/GL2PS/vtkGL2PSContextDevice2D.cxx
index 2d8a2c6..4e39aa8 100644
--- a/Rendering/GL2PS/vtkGL2PSContextDevice2D.cxx
+++ b/Rendering/GL2PS/vtkGL2PSContextDevice2D.cxx
@@ -30,6 +30,8 @@
 
 #include "vtk_gl2ps.h"
 
+#include <sstream>
+
 //-----------------------------------------------------------------------------
 vtkStandardNewMacro(vtkGL2PSContextDevice2D)
 
@@ -134,8 +136,6 @@ void vtkGL2PSContextDevice2D::DrawPolygon(float *points, int n)
     }
 }
 
-
-
 //-----------------------------------------------------------------------------
 void vtkGL2PSContextDevice2D::DrawEllipseWedge(float x, float y,
                                                float outRx, float outRy,
@@ -149,7 +149,7 @@ void vtkGL2PSContextDevice2D::DrawEllipseWedge(float x, float y,
 
   // The path implementation can't handle start/stop angles. Defer to the
   // superclass in this case.
-  if (fabs(startAngle) > 1e-5 || fabs(stopAngle - 360.0) > 1e-5)
+  if (std::fabs(startAngle) > 1e-5f || std::fabs(stopAngle - 360.0f) > 1e-5f)
     {
     this->Superclass::DrawEllipseWedge(x, y, outRx, outRy, inRx, inRy,
                                        startAngle, stopAngle);
@@ -158,13 +158,23 @@ void vtkGL2PSContextDevice2D::DrawEllipseWedge(float x, float y,
   vtkNew<vtkPath> path;
   this->AddEllipseToPath(path.GetPointer(), 0.f, 0.f, outRx, outRy, false);
   this->AddEllipseToPath(path.GetPointer(), 0.f, 0.f, inRx, inRy, true);
-  this->TransformPath(path.GetPointer());
 
-  double origin[3] = {x, y, 0.f};
+  std::stringstream label;
+  label << "vtkGL2PSContextDevice2D::DrawEllipseWedge("
+        << x << ", " << y << ", " << outRx << ", " << outRy << ", "
+        << inRx << ", " << inRy << ", " << startAngle << ", " << stopAngle
+        << ") path:";
+
   unsigned char color[4];
   this->Brush->GetColor(color);
 
-  vtkGL2PSUtilities::DrawPath(path.GetPointer(), origin, origin, color);
+  double rasterPos[3] = {static_cast<double>(x), static_cast<double>(y), 0.};
+
+  this->TransformPoint(x, y);
+  double windowPos[3] = {static_cast<double>(x), static_cast<double>(y), 0.};
+
+  vtkGL2PSUtilities::DrawPath(path.GetPointer(), rasterPos, windowPos, color,
+                              NULL, 0.0, -1.f, label.str().c_str());
 }
 
 //-----------------------------------------------------------------------------
@@ -194,14 +204,27 @@ void vtkGL2PSContextDevice2D::DrawEllipticArc(float x, float y,
   // Fill
   unsigned char fillColor[4];
   this->Brush->GetColor(fillColor);
-  vtkGL2PSUtilities::DrawPath(path.GetPointer(), origin, origin, fillColor);
+
+  std::stringstream label;
+  label << "vtkGL2PSContextDevice2D::DrawEllipticArc("
+        << x << ", " << y << ", " << rx << ", " << ry << ", "
+        << startAngle << ", " << stopAngle << ") fill:";
+
+  vtkGL2PSUtilities::DrawPath(path.GetPointer(), origin, origin, fillColor,
+                              NULL, 0.0, -1.f, label.str().c_str());
 
   // and stroke
   unsigned char strokeColor[4];
   this->Pen->GetColor(strokeColor);
   float strokeWidth = this->Pen->GetWidth();
+
+  label.str("");
+  label.clear();
+  label << "vtkGL2PSContextDevice2D::DrawEllipticArc("
+        << x << ", " << y << ", " << rx << ", " << ry << ", "
+        << startAngle << ", " << stopAngle << ") stroke:";
   vtkGL2PSUtilities::DrawPath(path.GetPointer(), origin, origin, strokeColor,
-                              NULL, 0.0, strokeWidth);
+                              NULL, 0.0, strokeWidth, label.str().c_str());
 }
 
 //-----------------------------------------------------------------------------
@@ -220,6 +243,7 @@ void vtkGL2PSContextDevice2D::DrawString(float *point,
   vtkGL2PSUtilities::DrawString(string.utf8_str(), this->TextProp, p);
 }
 
+//-----------------------------------------------------------------------------
 void vtkGL2PSContextDevice2D::DrawMathTextString(float apoint[],
                                                  const vtkStdString &string)
 {
@@ -256,7 +280,8 @@ void vtkGL2PSContextDevice2D::DrawMathTextString(float apoint[],
   this->TransformPath(path.GetPointer());
 
   vtkGL2PSUtilities::DrawPath(path.GetPointer(), origin, origin, color, NULL,
-                              rotateAngle);
+                              rotateAngle, -1.f,
+                              ("Pathified string: " + string).c_str());
 }
 
 //-----------------------------------------------------------------------------
@@ -300,13 +325,17 @@ void vtkGL2PSContextDevice2D::DrawCrossMarkers(bool highlight, float *points,
                                                int n, unsigned char *colors,
                                                int nc_comps)
 {
-  float delta = this->GetPen()->GetWidth() * 0.475;
-
   float oldWidth = this->Pen->GetWidth();
   unsigned char oldColor[4];
   this->Pen->GetColor(oldColor);
   int oldLineType = this->Pen->GetLineType();
 
+  float halfWidth = oldWidth * 0.5f;
+  float deltaX = halfWidth;
+  float deltaY = halfWidth;
+
+  this->TransformSize(deltaX, deltaY);
+
   if (highlight)
     {
     this->Pen->SetWidth(1.5);
@@ -345,17 +374,17 @@ void vtkGL2PSContextDevice2D::DrawCrossMarkers(bool highlight, float *points,
       }
 
     // The first line of the cross:
-    curLine[0] = point[0] + delta;
-    curLine[1] = point[1] + delta;
-    curLine[2] = point[0] - delta;
-    curLine[3] = point[1] - delta;
+    curLine[0] = point[0] + deltaX;
+    curLine[1] = point[1] + deltaY;
+    curLine[2] = point[0] - deltaX;
+    curLine[3] = point[1] - deltaY;
     this->DrawPoly(curLine, 2);
 
     // And the second:
-    curLine[0] = point[0] + delta;
-    curLine[1] = point[1] - delta;
-    curLine[2] = point[0] - delta;
-    curLine[3] = point[1] + delta;
+    curLine[0] = point[0] + deltaX;
+    curLine[1] = point[1] - deltaY;
+    curLine[2] = point[0] - deltaX;
+    curLine[3] = point[1] + deltaY;
     this->DrawPoly(curLine, 2);
     }
 
@@ -369,13 +398,17 @@ void vtkGL2PSContextDevice2D::DrawPlusMarkers(bool highlight, float *points,
                                               int n, unsigned char *colors,
                                               int nc_comps)
 {
-  float delta = this->GetPen()->GetWidth() * 0.475;
-
   float oldWidth = this->Pen->GetWidth();
   unsigned char oldColor[4];
   this->Pen->GetColor(oldColor);
   int oldLineType = this->Pen->GetLineType();
 
+  float halfWidth = oldWidth * 0.5f;
+  float deltaX = halfWidth;
+  float deltaY = halfWidth;
+
+  this->TransformSize(deltaX, deltaY);
+
   if (highlight)
     {
     this->Pen->SetWidth(1.5);
@@ -414,17 +447,17 @@ void vtkGL2PSContextDevice2D::DrawPlusMarkers(bool highlight, float *points,
       }
 
     // The first line of the plus:
-    curLine[0] = point[0] - delta;
+    curLine[0] = point[0] - deltaX;
     curLine[1] = point[1];
-    curLine[2] = point[0] + delta;
+    curLine[2] = point[0] + deltaX;
     curLine[3] = point[1];
     this->DrawPoly(curLine, 2);
 
     // And the second:
     curLine[0] = point[0];
-    curLine[1] = point[1] - delta;
+    curLine[1] = point[1] - deltaY;
     curLine[2] = point[0];
-    curLine[3] = point[1] + delta;
+    curLine[3] = point[1] + deltaY;
     this->DrawPoly(curLine, 2);
     }
 
@@ -439,13 +472,17 @@ void vtkGL2PSContextDevice2D::DrawSquareMarkers(bool /*highlight*/,
                                                 int n, unsigned char *colors,
                                                 int nc_comps)
 {
-  float delta = this->GetPen()->GetWidth() * 0.475;
-
   unsigned char oldColor[4];
   this->Brush->GetColor(oldColor);
 
   this->Brush->SetColor(this->Pen->GetColor());
 
+  float halfWidth = this->GetPen()->GetWidth() * 0.5f;
+  float deltaX = halfWidth;
+  float deltaY = halfWidth;
+
+  this->TransformSize(deltaX, deltaY);
+
   float quad[8];
   unsigned char color[4];
   for (int i = 0; i < n; ++i)
@@ -473,12 +510,12 @@ void vtkGL2PSContextDevice2D::DrawSquareMarkers(bool /*highlight*/,
       this->Brush->SetColor(color);
       }
 
-    quad[0] = point[0] - delta;
-    quad[1] = point[1] - delta;
-    quad[2] = point[0] + delta;
+    quad[0] = point[0] - deltaX;
+    quad[1] = point[1] - deltaY;
+    quad[2] = point[0] + deltaX;
     quad[3] = quad[1];
     quad[4] = quad[2];
-    quad[5] = point[1] + delta;
+    quad[5] = point[1] + deltaY;
     quad[6] = quad[0];
     quad[7] = quad[5];
 
@@ -540,13 +577,17 @@ void vtkGL2PSContextDevice2D::DrawDiamondMarkers(bool /*highlight*/,
                                                  int n, unsigned char *colors,
                                                  int nc_comps)
 {
-  float delta = this->GetPen()->GetWidth() * 0.475;
-
   unsigned char oldColor[4];
   this->Brush->GetColor(oldColor);
 
   this->Brush->SetColor(this->Pen->GetColor());
 
+  float halfWidth = this->GetPen()->GetWidth() * 0.5f;
+  float deltaX = halfWidth;
+  float deltaY = halfWidth;
+
+  this->TransformSize(deltaX, deltaY);
+
   float quad[8];
   unsigned char color[4];
   for (int i = 0; i < n; ++i)
@@ -574,14 +615,14 @@ void vtkGL2PSContextDevice2D::DrawDiamondMarkers(bool /*highlight*/,
       this->Brush->SetColor(color);
       }
 
-    quad[0] = point[0] - delta;
+    quad[0] = point[0] - deltaX;
     quad[1] = point[1];
     quad[2] = point[0];
-    quad[3] = point[1] - delta;
-    quad[4] = point[0] + delta;
+    quad[3] = point[1] - deltaY;
+    quad[4] = point[0] + deltaX;
     quad[5] = point[1];
     quad[6] = point[0];
-    quad[7] = point[1] + delta;
+    quad[7] = point[1] + deltaY;
 
     this->DrawQuad(quad,4);
     }
@@ -642,7 +683,7 @@ void vtkGL2PSContextDevice2D::AddEllipseToPath(vtkPath *path, float x, float y,
 }
 
 //-----------------------------------------------------------------------------
-void vtkGL2PSContextDevice2D::TransformPath(vtkPath *path)
+void vtkGL2PSContextDevice2D::TransformPath(vtkPath *path) const
 {
   // Transform the path with the modelview matrix:
   float modelview[16];
@@ -663,6 +704,28 @@ void vtkGL2PSContextDevice2D::TransformPath(vtkPath *path)
 }
 
 //-----------------------------------------------------------------------------
+void vtkGL2PSContextDevice2D::TransformPoint(float &x, float &y) const
+{
+  float modelview[16];
+  glGetFloatv(GL_MODELVIEW_MATRIX, modelview);
+
+  float inX = x;
+  float inY = y;
+  x = modelview[0] * inX + modelview[4] * inY + modelview[12];
+  y = modelview[1] * inX + modelview[5] * inY + modelview[13];
+}
+
+//-----------------------------------------------------------------------------
+void vtkGL2PSContextDevice2D::TransformSize(float &dx, float &dy) const
+{
+  float modelview[16];
+  glGetFloatv(GL_MODELVIEW_MATRIX, modelview);
+
+  dx /= modelview[0];
+  dy /= modelview[5];
+}
+
+//-----------------------------------------------------------------------------
 void vtkGL2PSContextDevice2D::PrintSelf(ostream &os, vtkIndent indent)
 {
   this->Superclass::PrintSelf(os, indent);
diff --git a/Rendering/GL2PS/vtkGL2PSContextDevice2D.h b/Rendering/GL2PS/vtkGL2PSContextDevice2D.h
index 2540458..c6b2250 100644
--- a/Rendering/GL2PS/vtkGL2PSContextDevice2D.h
+++ b/Rendering/GL2PS/vtkGL2PSContextDevice2D.h
@@ -163,8 +163,15 @@ private:
                           unsigned char *colors, int nc_comps);
   void AddEllipseToPath(vtkPath *path, float x, float y, float rx, float ry,
                         bool reverse);
+
   // Transform the path using the current modelview matrix.
-  void TransformPath(vtkPath *path);
+  void TransformPath(vtkPath *path) const;
+
+  // Transform the 2D point using the current modelview matrix.
+  void TransformPoint(float &x, float &y) const;
+
+  // Transform the width and height from pixels to data units.
+  void TransformSize(float &dx, float &dy) const;
 };
 
 #endif //__vtkGL2PSContextDevice2D_h
diff --git a/Rendering/GL2PS/vtkGL2PSUtilities.cxx b/Rendering/GL2PS/vtkGL2PSUtilities.cxx
index 7e14284..685b288 100644
--- a/Rendering/GL2PS/vtkGL2PSUtilities.cxx
+++ b/Rendering/GL2PS/vtkGL2PSUtilities.cxx
@@ -94,7 +94,9 @@ void vtkGL2PSUtilities::DrawString(const char *str,
     double devicePos[3] = {pos[0], pos[1], pos[2]};
     vtkGL2PSUtilities::ProjectPoint(devicePos);
 
-    vtkGL2PSUtilities::DrawPath(path.GetPointer(), pos, devicePos, rgba);
+    vtkGL2PSUtilities::DrawPath(path.GetPointer(), pos, devicePos, rgba, NULL,
+                                0.0, -1.f, (std::string("Pathified string: ")
+                                            + str).c_str());
     }
 }
 
@@ -236,35 +238,36 @@ int vtkGL2PSUtilities::TextPropertyToGL2PSAlignment(vtkTextProperty *tprop)
 
 void vtkGL2PSUtilities::Draw3DPath(vtkPath *path, vtkMatrix4x4 *actorMatrix,
                                    double rasterPos[3],
-                                   unsigned char actorColor[4])
+                                   unsigned char actorColor[4],
+                                   const char *label)
 {
   double translation[2] = {0.0, 0.0};
   vtkNew<vtkPath> projPath;
   projPath->DeepCopy(path);
   vtkGL2PSUtilities::ProjectPoints(projPath->GetPoints(), actorMatrix);
   vtkGL2PSUtilities::DrawPath(projPath.GetPointer(), rasterPos, translation,
-                              actorColor);
+                              actorColor, NULL, 0.0, -1.f, label);
 }
 
 void vtkGL2PSUtilities::DrawPath(vtkPath *path, double rasterPos[3],
                                  double windowPos[2], unsigned char rgba[4],
                                  double scale[2], double rotateAngle,
-                                 float strokeWidth)
+                                 float strokeWidth, const char *label)
 {
   switch (gl2psGetFileFormat())
     {
     case GL2PS_PS:
     case GL2PS_EPS:
       vtkGL2PSUtilities::DrawPathPS(path, rasterPos, windowPos, rgba, scale,
-                                    rotateAngle, strokeWidth);
+                                    rotateAngle, strokeWidth, label);
       break;
     case GL2PS_SVG:
       vtkGL2PSUtilities::DrawPathSVG(path, rasterPos, windowPos, rgba, scale,
-                                     rotateAngle, strokeWidth);
+                                     rotateAngle, strokeWidth, label);
       break;
     case GL2PS_PDF:
       vtkGL2PSUtilities::DrawPathPDF(path, rasterPos, windowPos, rgba, scale,
-                                     rotateAngle, strokeWidth);
+                                     rotateAngle, strokeWidth, label);
       break;
     default:
       break;
@@ -302,7 +305,7 @@ void vtkGL2PSUtilities::FinishExport()
 void vtkGL2PSUtilities::DrawPathPS(vtkPath *path, double rasterPos[3],
                                    double windowPos[2], unsigned char rgba[4],
                                    double scale[2], double rotateAngle,
-                                   float strokeWidth)
+                                   float strokeWidth, const char *label)
 {
   vtkFloatArray *points =
       vtkFloatArray::SafeDownCast(path->GetPoints()->GetData());
@@ -334,6 +337,10 @@ void vtkGL2PSUtilities::DrawPathPS(vtkPath *path, double rasterPos[3],
   int *codeBegin = code;
 #endif
   int *codeEnd = code + codes->GetNumberOfTuples();
+  if (label != NULL && label[0] != '\0')
+    {
+    out << "% " << label << endl;
+    }
   out << "gsave" << endl;
   out << "initmatrix" << endl;
   out << windowPos[0] << " " << windowPos[1] << " translate" << endl;
@@ -435,7 +442,8 @@ void vtkGL2PSUtilities::DrawPathPS(vtkPath *path, double rasterPos[3],
 void vtkGL2PSUtilities::DrawPathPDF(vtkPath *path, double rasterPos[3],
                                     double windowPos[2], unsigned char rgba[4],
                                     double scale[2], double rotateAngle,
-                                    float strokeWidth)
+                                    float strokeWidth,
+                                    const char *)
 {
   vtkFloatArray *points =
       vtkFloatArray::SafeDownCast(path->GetPoints()->GetData());
@@ -583,7 +591,7 @@ void vtkGL2PSUtilities::DrawPathPDF(vtkPath *path, double rasterPos[3],
 void vtkGL2PSUtilities::DrawPathSVG(vtkPath *path, double rasterPos[3],
                                     double windowPos[2], unsigned char rgba[4],
                                     double scale[2], double rotateAngle,
-                                    float strokeWidth)
+                                    float strokeWidth, const char *label)
 {
   vtkFloatArray *points =
       vtkFloatArray::SafeDownCast(path->GetPoints()->GetData());
@@ -625,6 +633,12 @@ void vtkGL2PSUtilities::DrawPathSVG(vtkPath *path, double rasterPos[3],
   float *ptBegin = pt;
   int *codeBegin = code;
 #endif
+
+  if (label != NULL && label[0] != '\0')
+    {
+    out << "<!-- " << label << " -->" << endl;
+    }
+
   int *codeEnd = code + codes->GetNumberOfTuples();
   out << "<g transform=\"" << endl
       << "     translate(" << windowPos[0] << " "
diff --git a/Rendering/GL2PS/vtkGL2PSUtilities.h b/Rendering/GL2PS/vtkGL2PSUtilities.h
index 623bb61..8c86358 100644
--- a/Rendering/GL2PS/vtkGL2PSUtilities.h
+++ b/Rendering/GL2PS/vtkGL2PSUtilities.h
@@ -63,9 +63,11 @@ public:
 
   // Description:
   // Transform the path using the actor's matrix and current GL state, then
-  // draw it to GL2PS.
+  // draw it to GL2PS. The label string is inserted into the GL2PS output at the
+  // beginning of the path specification as a comment on supported backends.
   static void Draw3DPath(vtkPath *path, vtkMatrix4x4 *actorMatrix,
-                         double rasterPos[3], unsigned char actorColor[4]);
+                         double rasterPos[3], unsigned char actorColor[4],
+                         const char *label = NULL);
   // Description:
   // Generate PS, EPS, or SVG markup from a vtkPath object, and then inject it
   // into the output using the gl2psSpecial command. The path is translated
@@ -74,9 +76,12 @@ public:
   // and determines clipping and depth. If scale is NULL, no scaling is done.
   // If strokeWidth is positive, the path will be stroked with the indicated
   // width. If zero or negative, the path will be filled (default).
+  // The label string is inserted into the GL2PS output at the beginning of the
+  // path specification as a comment on supported backends.
   static void DrawPath(vtkPath *path, double rasterPos[3], double windowPos[2],
                        unsigned char rgba[4], double scale[2] = NULL,
-                       double rotateAngle = 0.0, float strokeWidth = -1);
+                       double rotateAngle = 0.0, float strokeWidth = -1,
+                       const char *label = NULL);
 
   // Description:
   // Get whether all text will be exported as paths.
@@ -118,15 +123,15 @@ protected:
   static void DrawPathPS(vtkPath *path, double rasterPos[3],
                          double windowPos[2], unsigned char rgba[4],
                          double scale[2] = NULL, double rotateAngle = 0.0,
-                         float strokeWidth = -1);
+                         float strokeWidth = -1, const char *label = NULL);
   static void DrawPathPDF(vtkPath *path, double rasterPos[3],
                           double windowPos[2], unsigned char rgba[4],
                           double scale[2] = NULL, double rotateAngle = 0.0,
-                          float strokeWidth = -1);
+                          float strokeWidth = -1, const char *label = NULL);
   static void DrawPathSVG(vtkPath *path, double rasterPos[3],
                           double windowPos[2], unsigned char rgba[4],
                           double scale[2] = NULL, double rotateAngle = 0.0,
-                          float strokeWidth = -1);
+                          float strokeWidth = -1, const char *label = NULL);
 
   vtkGL2PSUtilities() {}
   ~vtkGL2PSUtilities() {}
diff --git a/Rendering/HybridOpenGL/CMakeLists.txt b/Rendering/HybridOpenGL/CMakeLists.txt
deleted file mode 100644
index 1b72090..0000000
--- a/Rendering/HybridOpenGL/CMakeLists.txt
+++ /dev/null
@@ -1,34 +0,0 @@
-set(Module_SRCS
-  vtkImageDataLIC2D.cxx
-  vtkImageDataLIC2DExtentTranslator.cxx
-  vtkStructuredGridLIC2D.cxx
-  vtkSurfaceLICDefaultPainter.cxx
-  vtkSurfaceLICPainter.cxx
-  )
-
-set(shader_files
-  vtkStructuredGridLIC2D_fs.glsl
-  vtkSurfaceLICPainter_fs1.glsl
-  vtkSurfaceLICPainter_fs2.glsl
-  vtkSurfaceLICPainter_vs1.glsl
-  )
-
-unset(shader_h_files)
-foreach(file ${shader_files})
-  get_filename_component(file_we ${file} NAME_WE)
-  set(src  ${CMAKE_CURRENT_SOURCE_DIR}/${file})
-  set(res  ${CMAKE_CURRENT_BINARY_DIR}/${file_we}.cxx)
-  set(resh ${CMAKE_CURRENT_BINARY_DIR}/${file_we}.h)
-  list(APPEND shader_h_files ${resh})
-  add_custom_command(
-    OUTPUT ${res} ${resh}
-    DEPENDS ${src} vtkEncodeString
-    COMMAND vtkEncodeString
-    ARGS ${res} ${src} ${file_we}
-      --build-header VTKRENDERINGHYBRIDOPENGL_EXPORT vtkRenderingHybridOpenGLModule.h
-    )
-  list(APPEND Module_SRCS ${res})
-  set_source_files_properties(${file_we} WRAP_EXCLUDE)
-endforeach()
-
-vtk_module_library(${vtk-module} ${Module_SRCS})
diff --git a/Rendering/HybridOpenGL/Testing/Cxx/CMakeLists.txt b/Rendering/HybridOpenGL/Testing/Cxx/CMakeLists.txt
deleted file mode 100644
index b4e2bd6..0000000
--- a/Rendering/HybridOpenGL/Testing/Cxx/CMakeLists.txt
+++ /dev/null
@@ -1,43 +0,0 @@
-set(MyTests "")
-
-if(NOT APPLE)
-  # This is from the old build system - not working on Apple platform.
-  list(APPEND MyTests
-    TestStructuredGridLIC2DXSlice.cxx
-    TestStructuredGridLIC2DYSlice.cxx
-    TestStructuredGridLIC2DZSlice.cxx
-    )
-  if(VTK_DATA_ROOT)
-    list(APPEND MyTests
-      TestSurfaceLIC.cxx
-      TestImageDataLIC2D.cxx # This was segfaulting before modularization (and is now).
-      )
-  endif()
-endif()
-
-# Use the testing object factory, to reduce boilerplate code in tests.
-include(vtkTestingObjectFactory)
-
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    set(_basedir Hybrid)
-    if("${TName}" STREQUAL "TestSurfaceLIC")
-      set(_basedir Rendering)
-    endif()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/${_basedir}/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
diff --git a/Rendering/HybridOpenGL/Testing/Cxx/TestImageDataLIC2D.cxx b/Rendering/HybridOpenGL/Testing/Cxx/TestImageDataLIC2D.cxx
deleted file mode 100644
index 0bdd944..0000000
--- a/Rendering/HybridOpenGL/Testing/Cxx/TestImageDataLIC2D.cxx
+++ /dev/null
@@ -1,48 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    TestImageDataLIC2D.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-
-#include "TestImageDataLIC2D.h"
-#include <vtksys/SystemTools.hxx>
-
-int TestImageDataLIC2D(int argc, char* argv[])
-{
-  char* fname =
-    vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/SurfaceVectors.vtk");
-  std::string filename = fname;
-  filename = "--data=" + filename;
-  delete [] fname;
-
-  fname = vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/noise.png");
-  std::string noise = fname;
-  noise = "--noise=" + noise;
-  delete [] fname;
-
-  char** new_argv = new char*[argc+10];
-  for (int cc=0; cc < argc; cc++)
-    {
-    new_argv[cc] = vtksys::SystemTools::DuplicateString(argv[cc]);
-    }
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString(filename.c_str());
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString(noise.c_str());
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--mag=5");
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--partitions=5");
-  int status = ImageDataLIC2D(argc, new_argv);
-  for (int kk=0; kk < argc; kk++)
-    {
-    delete [] new_argv[kk];
-    }
-  delete [] new_argv;
-  return status;
-}
diff --git a/Rendering/HybridOpenGL/Testing/Cxx/TestImageDataLIC2D.h b/Rendering/HybridOpenGL/Testing/Cxx/TestImageDataLIC2D.h
deleted file mode 100644
index f2ff151..0000000
--- a/Rendering/HybridOpenGL/Testing/Cxx/TestImageDataLIC2D.h
+++ /dev/null
@@ -1,297 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    TestImageDataLIC2D.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-
-#ifndef  __TestImageDataLIC2D_h
-#define  __TestImageDataLIC2D_h
-
-#include "vtkGenericDataObjectReader.h"
-#include "vtkImageDataLIC2D.h"
-#include "vtkImageData.h"
-#include "vtkImageIterator.h"
-#include "vtkImagePermute.h"
-#include "vtkImageShiftScale.h"
-#include "vtkPNGReader.h"
-#include "vtkPNGWriter.h"
-#include "vtkProbeFilter.h"
-#include "vtkRenderWindow.h"
-#include "vtkSmartPointer.h"
-#include "vtkStreamingDemandDrivenPipeline.h"
-#include "vtkStructuredData.h"
-#include "vtkTimerLog.h"
-#include "vtkUnstructuredGrid.h"
-#include "vtkTestUtilities.h"
-#include "vtkRegressionTestImage.h"
-#include "vtkTesting.h"
-#include <vtksys/CommandLineArguments.hxx>
-#include "vtkImageMapToColors.h"
-#include "vtkLookupTable.h"
-#include "vtkTrivialProducer.h"
-
-#define CREATE_NEW(var, class) vtkSmartPointer<class> var = vtkSmartPointer<class>::New();
-
-
-//-----------------------------------------------------------------------------
-void Merge(vtkImageData* dest, vtkImageData* src)
-{
-  if (!src || !dest)
-    {
-    return;
-    }
-
-
-  if (src->GetScalarType() != dest->GetScalarType())
-    {
-    cout << src->GetScalarTypeAsString() << ", " << dest->GetScalarTypeAsString() << endl;
-    abort();
-    }
-
-  vtkImageIterator<unsigned char> inIt(src, src->GetExtent());
-  int outextent[6];
-  src->GetExtent(outextent);
-
-  vtkImageIterator<unsigned char> outIt(dest, outextent);
-
-  while (!outIt.IsAtEnd() && !inIt.IsAtEnd())
-    {
-    unsigned char* spanOut = outIt.BeginSpan();
-    unsigned char* spanIn = inIt.BeginSpan();
-    unsigned char* outSpanEnd = outIt.EndSpan();
-    unsigned char* inSpanEnd = inIt.EndSpan();
-    if (outSpanEnd != spanOut && inSpanEnd != spanIn)
-      {
-      size_t minO = outSpanEnd - spanOut;
-      size_t minI = inSpanEnd - spanIn;
-      memcpy(spanOut, spanIn, (minO < minI)? minO : minI);
-      }
-    inIt.NextSpan();
-    outIt.NextSpan();
-    }
-}
-
-// Example demonstrating use of vtkImageDataLIC2D filter.
-// Typical usage:
-// ./bin/ImageDataLIC2D --data=<vtk file> --output=<png file>
-int ImageDataLIC2D(int argc, char* argv[])
-{
-  std::string filename;
-  std::string noise_filename;
-  int resolution = 10;
-  int magnification = 1;
-  std::string outputpath;
-  int num_partitions = 1;
-  int num_steps = 40;
-
-  vtksys::CommandLineArguments arg;
-  arg.StoreUnusedArguments(1);
-  arg.Initialize(argc, argv);
-
-  typedef vtksys::CommandLineArguments argT;
-  arg.AddArgument("--data", argT::EQUAL_ARGUMENT, &filename,
-    "(required) Enter dataset to load (currently only *.vtk files are supported");
-  arg.AddArgument("--res", argT::EQUAL_ARGUMENT, &resolution,
-    "(optional: default 10) Number of sample per unit distance");
-  arg.AddArgument("--mag", argT::EQUAL_ARGUMENT, &magnification,
-    "(optional: default 1) Magnification");
-  arg.AddArgument("--output", argT::EQUAL_ARGUMENT, &outputpath,
-    "(optional) Output png image");
-  arg.AddArgument("--partitions", argT::EQUAL_ARGUMENT, &num_partitions,
-    "(optional: default 1) Number of partitions");
-  arg.AddArgument("--num-steps", argT::EQUAL_ARGUMENT, &num_steps,
-    "(optional: default 40) Number of steps in each direction");
-  arg.AddArgument("--noise", argT::EQUAL_ARGUMENT, &noise_filename,
-    "(optional) Specify the filename to a png image file to use as the noise texture.");
-
-  if (!arg.Parse() || filename=="")
-    {
-    cerr << "Problem parsing arguments." << endl;
-    cerr << arg.GetHelp() << endl;
-    return 1;
-    }
-
-  if (magnification < 1)
-    {
-    cout << "WARNING: Magnification \'" << magnification  << "\' is invalid."
-      " Forcing a magnification of 1.";
-    magnification = 1;
-    }
-
-  if (num_steps < 1)
-    {
-    cout << "WARNING: Number of steps cannot be less than 1. Forcing 10.";
-    num_steps = 10;
-    }
-
-  CREATE_NEW(reader,vtkGenericDataObjectReader);
-  reader->SetFileName(filename.c_str());
-  reader->Update();
-
-  double bounds[6];
-  vtkDataSet::SafeDownCast(reader->GetOutput())->GetBounds(bounds);
-
-  // If 3D use XY slice, otherwise use non-trivial slice.
-  int dataDesc = VTK_XY_PLANE;
-  if (bounds[0] == bounds[1])
-    {
-    dataDesc = VTK_YZ_PLANE;
-    }
-  else if (bounds[2] == bounds[3])
-    {
-    dataDesc = VTK_XZ_PLANE;
-    }
-  else if (bounds[4] == bounds[5])
-    {
-    dataDesc = VTK_XY_PLANE;
-    }
-
-  CREATE_NEW(probeData,vtkImageData);
-  probeData->SetOrigin(bounds[0], bounds[2], bounds[4]);
-  int width = 0;
-  int height = 0;
-  switch (dataDesc)
-    {
-  case VTK_XY_PLANE:
-    width = static_cast<int>(ceil((bounds[1]-bounds[0]) * resolution));
-    height = static_cast<int>(ceil((bounds[3]-bounds[2]) * resolution));
-    probeData->SetDimensions(width, height, 1);
-    probeData->SetSpacing(
-      (bounds[1]-bounds[0])/double(width), (bounds[3]-bounds[2])/double(height), 1);
-    break;
-
-  case VTK_YZ_PLANE:
-    width = static_cast<int>(ceil((bounds[3]-bounds[2]) * resolution));
-    height = static_cast<int>(ceil((bounds[5]-bounds[4]) * resolution));
-    probeData->SetDimensions(1, width, height);
-    probeData->SetSpacing(
-      1, (bounds[3]-bounds[2])/double(width), (bounds[5]-bounds[4])/double(height));
-    break;
-
-  case VTK_XZ_PLANE:
-    width = static_cast<int>(ceil((bounds[1]-bounds[0]) * resolution));
-    height = static_cast<int>(ceil((bounds[5]-bounds[4]) * resolution));
-    probeData->SetDimensions(width, 1, height);
-    probeData->SetSpacing(
-      (bounds[1]-bounds[0])/double(width), 1, (bounds[5]-bounds[4])/double(height));
-    break;
-    }
-
-  CREATE_NEW(probe,vtkProbeFilter);
-  probe->SetSourceConnection(reader->GetOutputPort());
-  probe->SetInputData(probeData);
-  probe->Update();
-
-  CREATE_NEW(renWin, vtkRenderWindow);
-  renWin->Render();
-
-  CREATE_NEW(output, vtkImageData);
-  output->SetDimensions(width * magnification, height * magnification, 1);
-  output->SetSpacing(probeData->GetSpacing());
-  output->SetOrigin(probeData->GetOrigin());
-  output->AllocateScalars(VTK_UNSIGNED_CHAR, 3);
-
-  CREATE_NEW( filter, vtkImageDataLIC2D );
-  if (  filter->SetContext( renWin ) == 0  )
-    {
-    cout << "Required OpenGL extensions / GPU not supported." << endl;
-    return 0;
-    }
-
-  filter->SetInputConnection(0, probe->GetOutputPort(0));
-
-  if (noise_filename != "")
-    {
-    CREATE_NEW(pngReader,vtkPNGReader);
-    pngReader->SetFileName(noise_filename.c_str());
-    filter->SetInputConnection(1, pngReader->GetOutputPort(0));
-    }
-
-  filter->SetSteps(num_steps);
-  filter->SetStepSize(0.8/magnification);
-  filter->SetMagnification(magnification);
-  filter->UpdateInformation();
-
-  int original_extents[6];
-  probeData->GetExtent(original_extents);
-
-  for (int kk=0; kk < num_partitions; kk++)
-    {
-    filter->SetUpdateExtent(0, kk, num_partitions, 0);
-
-    vtkTimerLog* timer = vtkTimerLog::New();
-    timer->StartTimer();
-    filter->Update();
-    if ( filter->GetFBOSuccess() == 0 ||
-         filter->GetLICSuccess() == 0 )
-      {
-      timer->Delete();
-      timer  = NULL;
-      return 0;
-      }
-    timer->StopTimer();
-
-    //cout << "Time: " << timer->GetElapsedTime() << endl;
-    timer->Delete();
-
-    CREATE_NEW(clone, vtkImageData);
-    clone->ShallowCopy(filter->GetOutput());
-
-    // input is double between 0.0 and 1.0. Cast it between [0, 255].
-    CREATE_NEW(caster, vtkImageShiftScale);
-    caster->SetInputData(clone);
-    caster->SetShift(0.0);
-    caster->SetScale(255.0);
-    caster->SetOutputScalarTypeToUnsignedChar();
-
-    CREATE_NEW(permuter, vtkImagePermute);
-    permuter->SetInputConnection(caster->GetOutputPort());
-    switch (dataDesc)
-      {
-    case VTK_XY_PLANE:
-      permuter->SetFilteredAxes(0, 1, 2);
-      break;
-
-    case VTK_YZ_PLANE:
-      permuter->SetFilteredAxes(1, 2, 0);
-      break;
-
-    case VTK_XZ_PLANE:
-      permuter->SetFilteredAxes(0, 2, 1);
-      break;
-      }
-    permuter->Update();
-    ::Merge(output, permuter->GetOutput());
-    }
-
-  CREATE_NEW(tester, vtkTesting);
-  for (int cc=0; cc < argc; cc++)
-    {
-    tester->AddArgument(argv[cc]);
-    }
-
-  if (outputpath != "")
-    {
-    CREATE_NEW(writer, vtkPNGWriter);
-    writer->SetFileName(outputpath.c_str());
-    writer->SetInputData(output);
-    writer->Write();
-    }
-
-  CREATE_NEW(tp, vtkTrivialProducer);
-  tp->SetOutput(output);
-  int retVal = (!tester->IsValidImageSpecified() ||
-    (tester->RegressionTest(tp, 10) == vtkTesting::PASSED))? /*success*/ 0 : /*failure*/ 1;
-  return retVal;
-}
-
-#endif
diff --git a/Rendering/HybridOpenGL/Testing/Cxx/TestStructuredGridLIC2DSlice.h b/Rendering/HybridOpenGL/Testing/Cxx/TestStructuredGridLIC2DSlice.h
deleted file mode 100644
index 6f5deb2..0000000
--- a/Rendering/HybridOpenGL/Testing/Cxx/TestStructuredGridLIC2DSlice.h
+++ /dev/null
@@ -1,304 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    TestStructuredGridLIC2DSlice.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-
-#ifndef  __TestStructuredGridLIC2DSlice_h
-#define  __TestStructuredGridLIC2DSlice_h
-
-#include "vtkStructuredGridLIC2D.h"
-#include "vtkPNGReader.h"
-#include "vtkXMLStructuredGridReader.h"
-#include "vtkExtractGrid.h"
-#include "vtkRenderWindowInteractor.h"
-#include "vtkRenderWindow.h"
-#include "vtkRenderer.h"
-#include "vtkActor.h"
-#include "vtkTexture.h"
-#include "vtkDataSetMapper.h"
-#include "vtkTestUtilities.h"
-#include "vtkRegressionTestImage.h"
-#include "vtkPNGWriter.h"
-#include "vtkImageShiftScale.h"
-#include "vtkImageData.h"
-#include "vtkPointData.h"
-#include "vtkProperty.h"
-#include "vtkPolyDataMapper.h"
-#include "vtkDataSetSurfaceFilter.h"
-#include "vtkImageExtractComponents.h"
-#include "vtkTestUtilities.h"
-#include "vtkCamera.h"
-#include "vtkSmartPointer.h"
-#include "vtkStreamingDemandDrivenPipeline.h"
-#include "vtkTimerLog.h"
-#include <vtksys/CommandLineArguments.hxx>
-#include <string>
-
-enum { STRUCTURED_GRID_LIC2D_SLICE_DEMO = 0, STRUCTURED_GRID_LIC2D_SLICE_TEST = 1 };
-static int    RenderingMode = STRUCTURED_GRID_LIC2D_SLICE_TEST;
-static double ZoomFactor    = 2.8;
-
-#define CREATE_NEW(var, class) vtkSmartPointer<class> var = vtkSmartPointer<class>::New();
-inline  int CLAMP(int a, int min, int max)
-{
-  a = (a < min)? min : a;
-  a = (a > max)? max : a;
-  return a;
-}
-
-static int StructuredGridLIC2DSlice(int argc, char* argv[])
-{
-  std::string filename;
-  std::string noise_filename;
-  int resolution = 10;
-  int magnification = 1;
-  std::string outputpath;
-  int num_partitions = 1;
-  int num_steps = 40;
-  int slice_dir = 2; // 0 == X, 1 == Y, 2 == Z
-  int slice = 0;
-
-  vtksys::CommandLineArguments arg;
-  arg.StoreUnusedArguments(1);
-  arg.Initialize(argc, argv);
-
-  typedef vtksys::CommandLineArguments argT;
-  arg.AddArgument("--data", argT::EQUAL_ARGUMENT, &filename,
-    "(required) Enter dataset to load (currently only *.vts files are supported");
-  arg.AddArgument("--res", argT::EQUAL_ARGUMENT, &resolution,
-    "(optional: default 10) Number of sample per unit distance");
-  arg.AddArgument("--mag", argT::EQUAL_ARGUMENT, &magnification,
-    "(optional: default 1) Magnification");
-  arg.AddArgument("--output", argT::EQUAL_ARGUMENT, &outputpath,
-    "(optional) Output png image");
-  arg.AddArgument("--partitions", argT::EQUAL_ARGUMENT, &num_partitions,
-    "(optional: default 1) Number of partitions");
-  arg.AddArgument("--num-steps", argT::EQUAL_ARGUMENT, &num_steps,
-    "(optional: default 40) Number of steps in each direction");
-  arg.AddArgument("--noise", argT::EQUAL_ARGUMENT, &noise_filename,
-    "(optional) Specify the filename to a png image file to use as the noise texture");
-  arg.AddArgument("--slice", argT::EQUAL_ARGUMENT, &slice,
-    "(optional: default 0) The slice number in the direction specified by slice-dir. "
-    "This is ignored for 2D data.");
-  arg.AddArgument("--slice-dir", argT::EQUAL_ARGUMENT, &slice_dir,
-    "(optional: default 2 (Z slices)) The slice direction: 0 for X slices, 1 for Y slices and 2 for Z slices. "
-    "This is ignored for 2D data.");
-
-  if (!arg.Parse() || filename=="")
-    {
-    cerr << "Problem parsing arguments." << endl;
-    cerr << arg.GetHelp() << endl;
-    return 1;
-    }
-
-  if (magnification < 1)
-    {
-    cout << "WARNING: Magnification \'" << magnification  << "\' is invalid."
-      " Forcing a magnification of 1.";
-    magnification = 1;
-    }
-
-  if (num_steps < 1)
-    {
-    cout << "WARNING: Number of steps cannot be less than 1. Forcing 10.";
-    num_steps = 10;
-    }
-
-  if (slice_dir < 0 || slice_dir > 2)
-    {
-    cout << "WARNING: Invalid slice-dir (" <<slice_dir<<"). Forcing Z slices";
-    slice_dir = 2;
-    }
-
-  CREATE_NEW(reader, vtkXMLStructuredGridReader);
-  reader->SetFileName(filename.c_str());
-  reader->Update();
-
-  int dataDesc = VTK_XY_PLANE;
-  switch(slice_dir)
-    {
-  case 0:
-    dataDesc = VTK_YZ_PLANE;
-    break;
-  case 1:
-    dataDesc = VTK_XZ_PLANE;
-    break;
-  case 2:
-  default:
-    dataDesc = VTK_XY_PLANE;
-    }
-
-  int extent[6];
-  int voi[6];
-  reader->GetOutput()->GetExtent(extent);
-  memcpy(voi, extent, 6*sizeof(int));
-
-  // If data is 2D, then override the slice-dir
-  if (extent[0] == extent[1])
-    {
-    dataDesc = VTK_YZ_PLANE;
-    slice = 0;
-    }
-  else if (extent[2] == extent[3])
-    {
-    dataDesc = VTK_XZ_PLANE;
-    slice = 0;
-    }
-  else if (extent[4] == extent[5])
-    {
-    dataDesc = VTK_XY_PLANE;
-    slice = 0;
-    }
-  else
-    {
-    switch (dataDesc)
-      {
-    case VTK_XY_PLANE:
-      voi[4] = voi[5] = CLAMP(extent[4]+slice, extent[4], extent[5]);
-      break;
-
-    case VTK_YZ_PLANE:
-      voi[0] = voi[1] = CLAMP(extent[0]+slice, extent[0], extent[1]);
-      break;
-
-    case VTK_XZ_PLANE:
-      voi[2] = voi[3] = CLAMP(extent[2]+slice, extent[2], extent[3]);
-      break;
-      }
-    }
-
-  CREATE_NEW(extractVOI, vtkExtractGrid);
-  extractVOI->SetInputConnection(reader->GetOutputPort());
-  extractVOI->SetVOI(voi);
-
-  CREATE_NEW(renWin, vtkRenderWindow);
-  CREATE_NEW(renderer, vtkRenderer);
-  renWin->AddRenderer(renderer);
-  CREATE_NEW(iren, vtkRenderWindowInteractor);
-  iren->SetRenderWindow(renWin);
-
-  renWin->Render();
-
-  CREATE_NEW(filter, vtkStructuredGridLIC2D);
-  if (  filter->SetContext( renWin ) == 0  )
-    {
-    cout << "Required OpenGL extensions / GPU not supported." << endl;
-    return 0;
-    }
-
-  filter->SetInputConnection(extractVOI->GetOutputPort());
-
-  if (noise_filename != "")
-    {
-    CREATE_NEW(pngReader, vtkPNGReader);
-    pngReader->SetFileName(noise_filename.c_str());
-    filter->SetInputConnection(1, pngReader->GetOutputPort(0));
-    }
-
-  filter->SetSteps(num_steps);
-  filter->SetStepSize(0.01/magnification);
-  filter->SetMagnification(magnification);
-
-  for (int kk=0; kk < num_partitions; kk++)
-    {
-    filter->SetUpdateExtent(0, kk, num_partitions, 0);
-
-    cout << "*****************" << endl;
-    filter->Update();
-    if (  filter->GetFBOSuccess() == 0 ||
-          filter->GetLICSuccess() == 0  )
-      {
-      return 0;
-      }
-
-    CREATE_NEW(clone, vtkImageData);
-    clone->ShallowCopy(filter->GetOutputDataObject(1));
-
-    double range[2];
-    clone->GetPointData()->GetScalars()->GetRange(range);
-    CREATE_NEW(caster, vtkImageShiftScale);
-    caster->SetInputData(clone);
-    caster->SetOutputScalarTypeToUnsignedChar();
-    caster->SetShift(-range[0]);
-    caster->SetScale(255.0/(range[1]-range[0]));
-    caster->Update();
-
-
-    //vtkPNGWriter* writer = vtkPNGWriter::New();
-    //writer->SetFileName("/tmp/lic.png");
-    //writer->SetInputConnection(caster->GetOutputPort());
-    //writer->Write();
-    //writer->Delete();
-
-    CREATE_NEW(texture, vtkTexture);
-    texture->SetInputConnection(caster->GetOutputPort());
-
-    CREATE_NEW(clone2, vtkStructuredGrid);
-    clone2->ShallowCopy(filter->GetOutput(0));
-
-    CREATE_NEW(surfaceFilter, vtkDataSetSurfaceFilter);
-    surfaceFilter->SetInputData(clone2);
-
-    CREATE_NEW(mapper, vtkPolyDataMapper);
-    mapper->SetInputConnection(surfaceFilter->GetOutputPort());
-    mapper->SetScalarVisibility(0);
-
-    CREATE_NEW(actor, vtkActor);
-    actor->SetMapper(mapper);
-    actor->SetTexture(texture);
-
-    renderer->AddActor(actor);
-    }
-
-  CREATE_NEW(tester, vtkTesting);
-  for (int cc=0; cc < argc; cc++)
-    {
-    tester->AddArgument(argv[cc]);
-    }
-  tester->SetRenderWindow(renWin);
-
-  renderer->SetBackground(0.2,0.1,0.2);
-  renderer->ResetCamera();
-  renderer->GetActiveCamera()->Zoom( ZoomFactor );
-
-  if ( RenderingMode == STRUCTURED_GRID_LIC2D_SLICE_TEST )
-    {
-    switch (dataDesc)
-      {
-      case VTK_YZ_PLANE:
-        renderer->GetActiveCamera()->Azimuth(90);
-        break;
-      case VTK_XZ_PLANE:
-        renderer->GetActiveCamera()->Elevation(90);
-        break;
-      }
-    }
-
- renWin->Render();
- int reply = (!tester->IsValidImageSpecified() ||
-   (tester->RegressionTest(10) == vtkTesting::PASSED))? /*success*/ 0 : /*failure*/ 1;
-
- if (tester->IsInteractiveModeSpecified())
-   {
-   iren->Start();
-   }
-
-  if ( RenderingMode != STRUCTURED_GRID_LIC2D_SLICE_TEST )
-    {
-    iren->Start();
-    }
-
- return reply;
-}
-
-#endif
diff --git a/Rendering/HybridOpenGL/Testing/Cxx/TestStructuredGridLIC2DXSlice.cxx b/Rendering/HybridOpenGL/Testing/Cxx/TestStructuredGridLIC2DXSlice.cxx
deleted file mode 100644
index c7bb9a7..0000000
--- a/Rendering/HybridOpenGL/Testing/Cxx/TestStructuredGridLIC2DXSlice.cxx
+++ /dev/null
@@ -1,56 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    TestStructuredGridLIC2DXSlice.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-
-#include "TestStructuredGridLIC2DSlice.h"
-#include <vtksys/SystemTools.hxx>
-
-int TestStructuredGridLIC2DXSlice(int argc, char* argv[])
-{
-  ZoomFactor    = 3.0;
-  RenderingMode = STRUCTURED_GRID_LIC2D_SLICE_TEST;
-
-  char* fname =
-    vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/timestep_0_15.vts");
-
-  std::string filename = fname;
-  filename = "--data=" + filename;
-  delete [] fname;
-
-  fname = vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/noise.png");
-  std::string noise = fname;
-  noise = "--noise=" + noise;
-  delete [] fname;
-
-  char** new_argv = new char*[argc+10];
-  for (int cc=0; cc < argc; cc++)
-    {
-    new_argv[cc] = vtksys::SystemTools::DuplicateString(argv[cc]);
-    }
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString(filename.c_str());
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString(noise.c_str());
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--mag=8");
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--partitions=1");
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--num-steps=100");
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--slice-dir=0");
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--slice=98");
-  int status = ::StructuredGridLIC2DSlice(argc, new_argv);
-  for (int kk=0; kk < argc; kk++)
-    {
-    delete [] new_argv[kk];
-    }
-  delete [] new_argv;
-
-  return status;
-}
diff --git a/Rendering/HybridOpenGL/Testing/Cxx/TestStructuredGridLIC2DYSlice.cxx b/Rendering/HybridOpenGL/Testing/Cxx/TestStructuredGridLIC2DYSlice.cxx
deleted file mode 100644
index 24a625c..0000000
--- a/Rendering/HybridOpenGL/Testing/Cxx/TestStructuredGridLIC2DYSlice.cxx
+++ /dev/null
@@ -1,55 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    TestStructuredGridLIC2DYSlice.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-
-#include "TestStructuredGridLIC2DSlice.h"
-#include <vtksys/SystemTools.hxx>
-
-int TestStructuredGridLIC2DYSlice(int argc, char* argv[])
-{
-  ZoomFactor    = 3.0;
-  RenderingMode = STRUCTURED_GRID_LIC2D_SLICE_TEST;
-
-  char* fname =
-    vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/timestep_0_15.vts");
-
-  std::string filename = fname;
-  filename = "--data=" + filename;
-  delete [] fname;
-
-  fname = vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/noise.png");
-  std::string noise = fname;
-  noise = "--noise=" + noise;
-  delete [] fname;
-
-  char** new_argv = new char*[argc+10];
-  for (int cc=0; cc < argc; cc++)
-    {
-    new_argv[cc] = vtksys::SystemTools::DuplicateString(argv[cc]);
-    }
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString(filename.c_str());
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString(noise.c_str());
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--mag=8");
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--partitions=1");
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--num-steps=100");
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--slice-dir=1");
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--slice=0");
-  int status = ::StructuredGridLIC2DSlice(argc, new_argv);
-  for (int kk=0; kk < argc; kk++)
-    {
-    delete [] new_argv[kk];
-    }
-  delete [] new_argv;
-  return status;
-}
diff --git a/Rendering/HybridOpenGL/Testing/Cxx/TestStructuredGridLIC2DZSlice.cxx b/Rendering/HybridOpenGL/Testing/Cxx/TestStructuredGridLIC2DZSlice.cxx
deleted file mode 100644
index 3761abd..0000000
--- a/Rendering/HybridOpenGL/Testing/Cxx/TestStructuredGridLIC2DZSlice.cxx
+++ /dev/null
@@ -1,55 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    TestStructuredGridLIC2DZSlice.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-
-#include "TestStructuredGridLIC2DSlice.h"
-#include <vtksys/SystemTools.hxx>
-
-int TestStructuredGridLIC2DZSlice(int argc, char* argv[])
-{
-  ZoomFactor    = 3.8;
-  RenderingMode = STRUCTURED_GRID_LIC2D_SLICE_TEST;
-
-  char* fname =
-    vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/timestep_0_15.vts");
-
-  std::string filename = fname;
-  filename = "--data=" + filename;
-  delete [] fname;
-
-  fname = vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/noise.png");
-  std::string noise = fname;
-  noise = "--noise=" + noise;
-  delete [] fname;
-
-  char** new_argv = new char*[argc+10];
-  for (int cc=0; cc < argc; cc++)
-    {
-    new_argv[cc] = vtksys::SystemTools::DuplicateString(argv[cc]);
-    }
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString(filename.c_str());
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString(noise.c_str());
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--mag=8");
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--partitions=1");
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--num-steps=100");
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--slice-dir=2");
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--slice=10");
-  int status = ::StructuredGridLIC2DSlice(argc, new_argv);
-  for (int kk=0; kk < argc; kk++)
-    {
-    delete [] new_argv[kk];
-    }
-  delete [] new_argv;
-  return status;
-}
diff --git a/Rendering/HybridOpenGL/Testing/Cxx/TestSurfaceLIC.cxx b/Rendering/HybridOpenGL/Testing/Cxx/TestSurfaceLIC.cxx
deleted file mode 100644
index 0f20b65..0000000
--- a/Rendering/HybridOpenGL/Testing/Cxx/TestSurfaceLIC.cxx
+++ /dev/null
@@ -1,46 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    TestSurfaceLIC.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#include "TestSurfaceLIC.h"
-
-#include <vtksys/SystemTools.hxx>
-//#define VTK_CREATE_NEW(var, class) vtkSmartPointer<class> var = vtkSmartPointer<class>::New();
-
-int TestSurfaceLIC(int argc, char* argv[])
-{
-  char* fname =
-    vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/disk_out_ref_surface.vtp");
-  std::string filename = fname;
-  filename = "--data=" + filename;
-  std::string vectors = "--vectors=V";
-  delete []fname;
-
-  char** new_argv = new char*[argc+2];
-  for (int cc=0; cc < argc; cc++)
-    {
-    new_argv[cc] = vtksys::SystemTools::DuplicateString(argv[cc]);
-    }
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString(filename.c_str());
-  new_argv[argc++] = vtksys::SystemTools::DuplicateString(vectors.c_str());
-
-  RenderingMode = SURFACE_LIC_TEST;
-  int status = SurfaceLIC(argc, new_argv);
-
-  for (int kk=0; kk < argc; kk++)
-    {
-    delete [] new_argv[kk];
-    }
-  delete [] new_argv;
-  return status;
-}
diff --git a/Rendering/HybridOpenGL/Testing/Cxx/TestSurfaceLIC.h b/Rendering/HybridOpenGL/Testing/Cxx/TestSurfaceLIC.h
deleted file mode 100644
index 98f99da..0000000
--- a/Rendering/HybridOpenGL/Testing/Cxx/TestSurfaceLIC.h
+++ /dev/null
@@ -1,216 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    TestSurfaceLIC.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#ifndef __TestSurfaceLIC_h
-#define __TestSurfaceLIC_h
-
-#include "vtkCamera.h"
-#include "vtkCellData.h"
-#include "vtkDataSetSurfaceFilter.h"
-#include "vtkGenericDataObjectReader.h"
-#include "vtkSurfaceLICPainter.h"
-#include "vtkObjectFactory.h"
-#include "vtkPainterPolyDataMapper.h"
-#include "vtkPointData.h"
-#include "vtkPolyData.h"
-#include "vtkRegressionTestImage.h"
-#include "vtkRenderer.h"
-#include "vtkRenderWindow.h"
-#include "vtkRenderWindowInteractor.h"
-#include "vtkSmartPointer.h"
-#include "vtkTestUtilities.h"
-#include "vtkXMLPolyDataReader.h"
-
-#include <vtksys/CommandLineArguments.hxx>
-#include <vtksys/SystemTools.hxx>
-#include <vector>
-#include <string>
-
-#define VTK_CREATE_NEW(var, class) vtkSmartPointer<class> var = vtkSmartPointer<class>::New();
-
-// This example demonstrates the use of vtkSurfaceLICPainter for rendering
-// geometry with LIC on the surface.
-
-enum { SURFACE_LIC_DEMO = 0, SURFACE_LIC_TEST = 1 };
-
-int RenderingMode = SURFACE_LIC_TEST;
-
-int SurfaceLIC( int argc, char * argv[] )
-{
-  std::string filename;
-  int num_steps = 40;
-  double step_size = 0.4;
-  double lic_intensity = 0.8;
-  //std::string color_by;
-  std::string vectors;
-
-  vtksys::CommandLineArguments arg;
-  arg.StoreUnusedArguments(1);
-  arg.Initialize(argc, argv);
-
-  // Fill up accepted arguments.
-  typedef vtksys::CommandLineArguments argT;
-
-  arg.AddArgument("--data", argT::EQUAL_ARGUMENT, &filename,
-    "(required) Enter dataset to load (currently only *.[vtk|vtp] files are supported");
-  arg.AddArgument("--num-steps", argT::EQUAL_ARGUMENT, &num_steps,
-    "(optional: default 40) Number of steps in each direction");
-  arg.AddArgument("--step-size", argT::EQUAL_ARGUMENT, &step_size,
-    "(optional: default 0.4) Step size in pixels");
-  arg.AddArgument("--lic-intensity", argT::EQUAL_ARGUMENT, &lic_intensity,
-    "(optional: default 0.8) Contribution of LIC in the final image [1.0 == max contribution]");
-  //arg.AddArgument("--color-by", argT::EQUAL_ARGUMENT, &color_by,
-  //  "(optional: default active scalars) Name of the array to color by");
-  arg.AddArgument("--vectors", argT::EQUAL_ARGUMENT, &vectors,
-    "(optional: default active point vectors) Name of the vector field array");
-
-  if (!arg.Parse() || filename == "")
-    {
-    cerr << "Usage: " << endl;
-    cerr << arg.GetHelp() << endl;
-    return 1;
-    }
-
-  vtkSmartPointer<vtkPolyData> polydata;
-  std::string ext = vtksys::SystemTools::GetFilenameExtension(filename);
-  if (ext == ".vtk")
-    {
-    vtkGenericDataObjectReader* reader = vtkGenericDataObjectReader::New();
-    reader->SetFileName(filename.c_str());
-
-    vtkDataSetSurfaceFilter* surface = vtkDataSetSurfaceFilter::New();
-    surface->SetInputConnection(reader->GetOutputPort());
-    surface->Update();
-
-    polydata = surface->GetOutput();
-
-    reader->Delete();
-    surface->Delete();
-    }
-  else if (ext == ".vtp")
-    {
-    vtkXMLPolyDataReader* reader = vtkXMLPolyDataReader::New();
-    reader->SetFileName(filename.c_str());
-    reader->Update();
-    polydata = reader->GetOutput();
-    reader->Delete();
-    }
-  else
-    {
-    cerr << "Error: Unknown extension: '" << ext << "'"<< endl;
-    return 1;
-    }
-
-  if (!polydata || polydata->GetNumberOfPoints() == 0)
-    {
-    cerr << "Error reading file: '" << filename.c_str() << "'" << endl;
-    return 1;
-    }
-
-  // Set up the render window, renderer, interactor.
-  VTK_CREATE_NEW(renWin, vtkRenderWindow);
-  VTK_CREATE_NEW(renderer, vtkRenderer);
-  VTK_CREATE_NEW(iren, vtkRenderWindowInteractor);
-  renWin->SetReportGraphicErrors(1);
-  renWin->AddRenderer(renderer);
-  renWin->SetSize(300,300);
-  iren->SetRenderWindow(renWin);
-  renWin->Render();
-  if (!vtkSurfaceLICPainter::IsSupported(renWin))
-    {
-    cout << "WARNING: The rendering context does not support required "
-      "extensions." << endl;
-    return 0;
-    }
-
-  // Create a mapper and insert the vtkSurfaceLICPainter painter into the
-  // painter chain. This is essential since the entire logic of performin the
-  // LIC is present in the vtkSurfaceLICPainter.
-  VTK_CREATE_NEW(mapper, vtkPainterPolyDataMapper);
-  VTK_CREATE_NEW(painter, vtkSurfaceLICPainter);
-  painter->SetDelegatePainter(mapper->GetPainter());
-  mapper->SetPainter(painter);
-
-  // If user chose a vector field, select it.
-  if (vectors != "")
-    {
-    painter->SetInputArrayToProcess(
-      vtkDataObject::FIELD_ASSOCIATION_POINTS_THEN_CELLS,
-      vectors.c_str());
-    }
-  else if (!polydata->GetPointData()->GetVectors() &&
-    !polydata->GetCellData()->GetVectors())
-    {
-    cerr << "ERROR: No active vectors are available." << endl<<
-            "       Please select the vectors array using '--vectors'" << endl;
-    return 1;
-    }
-
-  // Pass parameters.
-  painter->SetLICIntensity(lic_intensity);
-  painter->SetNumberOfSteps(num_steps);
-  painter->SetStepSize(step_size);
-
-  // Set the mapper input
-  mapper->SetInputData(polydata);
-
-  VTK_CREATE_NEW(actor, vtkActor);
-  actor->SetMapper(mapper);
-  renderer->AddActor(actor);
-  renderer->SetBackground(0.3, 0.3, 0.3);
-
-  if ( RenderingMode )
-    {
-    // Code used for regression testing.
-    renderer->GetActiveCamera()->SetFocalPoint(-1.88, -0.98, -1.04);
-    renderer->GetActiveCamera()->SetPosition(13.64, 4.27, -31.59);
-    renderer->GetActiveCamera()->SetViewAngle(30);
-    renderer->GetActiveCamera()->SetViewUp(0.41, 0.83, 0.35);
-    renderer->ResetCamera();
-    renWin->Render();
-    if (  painter->GetLICSuccess() == 0 ||
-          painter->GetRenderingPreparationSuccess() == 0 )
-      {
-      return 0;
-      }
-
-    int retVal = vtkTesting::Test(argc, argv, renWin, 75);
-    if (retVal == vtkRegressionTester::DO_INTERACTOR)
-      {
-      iren->Start();
-      }
-
-    if ((retVal == vtkTesting::PASSED) || (retVal == vtkTesting::DO_INTERACTOR))
-      {
-      return 0;
-      }
-    // failed.
-    return 1;
-    }
-  else
-    {
-    renderer->ResetCamera();
-    renWin->Render();
-    if (  painter->GetLICSuccess() == 0 ||
-          painter->GetRenderingPreparationSuccess() == 0 )
-      {
-      return 0;
-      }
-    iren->Start();
-    }
-  // failed.
-  return 0;
-}
-
-#endif
diff --git a/Rendering/HybridOpenGL/module.cmake b/Rendering/HybridOpenGL/module.cmake
deleted file mode 100644
index a6a17b0..0000000
--- a/Rendering/HybridOpenGL/module.cmake
+++ /dev/null
@@ -1,15 +0,0 @@
-vtk_module(vtkRenderingHybridOpenGL
-  GROUPS
-    Rendering
-  DEPENDS
-    vtkIOLegacy
-    vtkImagingSources
-    vtkImagingHybrid
-    vtkRenderingOpenGL
-  TEST_DEPENDS
-    vtkIOLegacy
-    vtkIOXML
-    vtkTestingCore
-    vtkTestingRendering
-    vtkInteractionStyle
-  )
diff --git a/Rendering/HybridOpenGL/vtkImageDataLIC2D.cxx b/Rendering/HybridOpenGL/vtkImageDataLIC2D.cxx
deleted file mode 100644
index e3b0203..0000000
--- a/Rendering/HybridOpenGL/vtkImageDataLIC2D.cxx
+++ /dev/null
@@ -1,613 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkImageDataLIC2D.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#include "vtkImageDataLIC2D.h"
-
-#include "vtkFloatArray.h"
-#include "vtkImageData.h"
-#include "vtkImageNoiseSource.h"
-#include "vtkInformation.h"
-#include "vtkInformationVector.h"
-#include "vtkDataTransferHelper.h"
-#include "vtkImageDataLIC2DExtentTranslator.h"
-#include "vtkLineIntegralConvolution2D.h"
-#include "vtkStructuredExtent.h"
-#include "vtkTextureObject.h"
-#include "vtkObjectFactory.h"
-#include "vtkOpenGLExtensionManager.h"
-#include "vtkOpenGLRenderWindow.h"
-#include "vtkPointData.h"
-#include "vtkStreamingDemandDrivenPipeline.h"
-#include "vtkStructuredData.h"
-#include "vtkFrameBufferObject.h"
-
-#include "vtkgl.h"
-
-#define PRINTEXTENT(ext) \
-  ext[0] << ", " << ext[1] << ", " << ext[2] << ", " << ext[3] << ", " << ext[4] << ", " << ext[5]
-
-vtkStandardNewMacro(vtkImageDataLIC2D);
-//----------------------------------------------------------------------------
-vtkImageDataLIC2D::vtkImageDataLIC2D()
-{
-  this->Context    = 0;
-  this->Steps      = 20;
-  this->StepSize   = 1.0;
-  this->FBOSuccess = 0;
-  this->LICSuccess = 0;
-  this->Magnification = 1;
-
-  this->NoiseSource = vtkImageNoiseSource::New();
-  this->NoiseSource->SetWholeExtent(0, 127, 0, 127, 0, 0);
-  this->NoiseSource->SetMinimum(0.0);
-  this->NoiseSource->SetMaximum(1.0);
-
-  this->SetNumberOfInputPorts(2);
-  // by default process active point vectors
-  this->SetInputArrayToProcess(0, 0, 0, vtkDataObject::FIELD_ASSOCIATION_POINTS,
-    vtkDataSetAttributes::VECTORS);
-
-  this->OwnWindow = false;
-  this->OpenGLExtensionsSupported    = 0;
-  this->ARBColorBufferFloatSupported = false;
-}
-
-//----------------------------------------------------------------------------
-vtkImageDataLIC2D::~vtkImageDataLIC2D()
-{
-  this->NoiseSource->Delete();
-  this->SetContext( NULL );
-}
-
-//----------------------------------------------------------------------------
-int vtkImageDataLIC2D::SetContext( vtkRenderWindow * context )
-{
-  if ( this->Context == context )
-    {
-    return this->OpenGLExtensionsSupported;
-    }
-
-  if ( this->Context && this->OwnWindow )
-    {
-    this->Context->Delete();
-    this->Context = NULL;
-    }
-  this->OwnWindow = false;
-
-  vtkOpenGLRenderWindow * openGLRenWin =
-  vtkOpenGLRenderWindow::SafeDownCast( context );
-  this->Context = openGLRenWin;
-
-  if ( openGLRenWin )
-    {
-    openGLRenWin->Render();
-    openGLRenWin->MakeCurrent();
-    vtkOpenGLExtensionManager * mgr = openGLRenWin->GetExtensionManager();
-
-    // optional for texture objects.
-    mgr->LoadSupportedExtension( "GL_EXT_texture_integer" );
-
-    //this->ARBColorBufferFloatSupported =
-    //  mgr->LoadSupportedExtension("GL_ARB_color_buffer_float");
-
-    if (  !mgr->LoadSupportedExtension( "GL_VERSION_1_3" ) ||
-          !mgr->LoadSupportedExtension( "GL_VERSION_1_2" ) ||
-          !mgr->LoadSupportedExtension( "GL_VERSION_2_0" ) ||
-          !mgr->LoadSupportedExtension( "GL_ARB_texture_float" ) ||
-          !mgr->LoadSupportedExtension( "GL_ARB_texture_non_power_of_two" )
-       )
-      {
-      vtkErrorMacro( "Required OpenGL extensions not supported." );
-      mgr = NULL;
-      this->Context = 0;
-      openGLRenWin  = NULL;
-      return 0;
-      }
-
-    mgr = NULL;
-    }
-
-  openGLRenWin = NULL;
-  this->Modified();
-
-  this->OpenGLExtensionsSupported = 1;
-  return 1;
-}
-
-//----------------------------------------------------------------------------
-vtkRenderWindow* vtkImageDataLIC2D::GetContext()
-{
-  return this->Context;
-}
-
-//----------------------------------------------------------------------------
-// Description:
-// Fill the input port information objects for this algorithm.  This
-// is invoked by the first call to GetInputPortInformation for each
-// port so subclasses can specify what they can handle.
-// Redefined from the superclass.
-int vtkImageDataLIC2D::FillInputPortInformation(int port,
-                                          vtkInformation *info)
-{
-  if (!this->Superclass::FillInputPortInformation(port, info))
-    {
-    return 0;
-    }
-
-  if (port==1)
-    {
-    info->Set(vtkAlgorithm::INPUT_IS_OPTIONAL(), 1);
-    }
-
-  return 1;
-}
-
-//----------------------------------------------------------------------------
-void vtkImageDataLIC2D::TranslateInputExtent(const int* inExt,
-  const int* inWholeExtent, int *resultExtent)
-{
-  for (int axis = 0; axis < 3; axis++)
-    {
-    int whole_dim = (inWholeExtent[axis*2+1] - inWholeExtent[axis*2]) + 1;
-    int dim = (inExt[axis*2+1] - inExt[axis*2]) + 1;
-
-    // Scale the output extent
-    int min = inExt[axis*2] * this->Magnification;
-    int max = min + (whole_dim>1? (dim * this->Magnification - 1) : 0);
-
-    resultExtent[axis*2] = min;
-    resultExtent[axis*2+1] = max;
-    }
-}
-
-//----------------------------------------------------------------------------
-// We need to report output extent after taking into consideration the
-// magnification.
-int vtkImageDataLIC2D::RequestInformation(vtkInformation* vtkNotUsed(request),
-  vtkInformationVector** inputVector,
-  vtkInformationVector* outputVector)
-{
-  int ext[6];
-  int wholeExtent[6];
-  double spacing[3];
-
-  vtkInformation *inInfo = inputVector[0]->GetInformationObject(0);
-  vtkInformation *outInfo = outputVector->GetInformationObject(0);
-
-  inInfo->Get(vtkStreamingDemandDrivenPipeline::WHOLE_EXTENT(), wholeExtent);
-  inInfo->Get(vtkDataObject::SPACING(), spacing);
-  vtkDebugMacro( << "Input WHOLE_EXTENT: " << PRINTEXTENT( wholeExtent ) << endl );
-  this->TranslateInputExtent(wholeExtent, wholeExtent, ext);
-
-  for (int axis = 0; axis < 3; axis++)
-    {
-    // Change the data spacing
-    spacing[axis] /= this->Magnification;
-    }
-  vtkDebugMacro( << "WHOLE_EXTENT: " << PRINTEXTENT( ext ) << endl );
-
-  outInfo->Set(vtkStreamingDemandDrivenPipeline::WHOLE_EXTENT(), ext, 6);
-  outInfo->Set(vtkDataObject::SPACING(), spacing, 3);
-
-  // Setup ExtentTranslator
-  vtkImageDataLIC2DExtentTranslator* extTranslator =
-    vtkImageDataLIC2DExtentTranslator::SafeDownCast(
-      vtkStreamingDemandDrivenPipeline::GetExtentTranslator(outInfo));
-
-  if (!extTranslator)
-    {
-    extTranslator = vtkImageDataLIC2DExtentTranslator::New();
-    vtkStreamingDemandDrivenPipeline::SetExtentTranslator(outInfo, extTranslator);
-    extTranslator->Delete();
-    }
-  extTranslator->SetAlgorithm(this);
-  extTranslator->SetInputWholeExtent(wholeExtent);
-  extTranslator->SetInputExtentTranslator(
-    vtkExtentTranslator::SafeDownCast(
-    inInfo->Get(vtkStreamingDemandDrivenPipeline::EXTENT_TRANSLATOR())));
-  return 1;
-}
-
-//----------------------------------------------------------------------------
-int vtkImageDataLIC2D::RequestUpdateExtent (
-  vtkInformation * vtkNotUsed(request),
-  vtkInformationVector **inputVector,
-  vtkInformationVector *outputVector)
-{
-  vtkInformation *inInfo = inputVector[0]->GetInformationObject(0);
-  vtkInformation *outInfo = outputVector->GetInformationObject(0);
-
-  // Tell the vector field input the extents that we need from it.
-  // The downstream request needs to be downsized based on the Magnification.
-  int ext[6];
-  outInfo->Get(vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT(), ext);
-
-  vtkDebugMacro( << "Requested UPDATE_EXTENT: " <<  PRINTEXTENT( ext ) << endl );
-  for (int axis = 0; axis < 3; axis++)
-    {
-    int wholeMin = ext[axis*2];
-    int wholeMax = ext[axis*2+1];
-
-    // Scale the output extent
-    wholeMin = wholeMin / this->Magnification;
-    wholeMax = wholeMax / this->Magnification;
-
-    ext[axis*2] = wholeMin;
-    ext[axis*2+1] = wholeMax;
-    }
-  vtkDebugMacro( << "UPDATE_EXTENT: " <<  PRINTEXTENT( ext ) << endl );
-
-  inInfo->Set(vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT(), ext, 6);
-
-  inInfo = inputVector[1]->GetInformationObject(0);
-  if (inInfo)
-    {
-    // always request the whole noise image.
-    inInfo->Set(vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT(),
-      inInfo->Get(vtkStreamingDemandDrivenPipeline::WHOLE_EXTENT()),
-      6);
-    }
-
-  return 1;
-}
-
-//----------------------------------------------------------------------------
-int vtkImageDataLIC2D::RequestData(
-  vtkInformation        * vtkNotUsed(request),
-  vtkInformationVector ** inputVector,
-  vtkInformationVector  * outputVector )
-{
-  vtkInformation * inInfo= inputVector[0]->GetInformationObject(0);
-  vtkImageData   * input = vtkImageData::SafeDownCast(
-    inInfo->Get(vtkDataObject::DATA_OBJECT()));
-
-  // Check if the input image is a 2D image (not 0D, not 1D, not 3D)
-  int dims[3];
-  input->GetDimensions(dims);
-
-  int dataDescription = vtkStructuredData::GetDataDescription(dims);
-  if (  vtkStructuredData::GetDataDimension( dataDescription ) != 2  )
-    {
-    vtkErrorMacro( "Input is not a 2D image." );
-    input  = NULL;
-    inInfo = NULL;
-    return 0;
-    }
-
-  vtkIdType numPoints = input->GetNumberOfPoints();
-  vtkSmartPointer<vtkDataArray> inVectors =
-    this->GetInputArrayToProcess( 0, inputVector );
-
-  if ( inVectors.GetPointer() == 0 )
-    {
-    vtkErrorMacro( "No input vectors selected. "
-                   "Vectors are required for line integral convolution." );
-    input  = NULL;
-    inInfo = NULL;
-    return 0;
-    }
-
-  if ( inVectors->GetNumberOfTuples() != numPoints )
-    {
-    vtkErrorMacro( "Only point vectors are supported." );
-    input  = NULL;
-    inInfo = NULL;
-    return 0;
-    }
-
-  if ( !this->Context )
-    {
-    vtkRenderWindow * renWin = vtkRenderWindow::New();
-    if (  this->SetContext( renWin ) == 0  )
-      {
-      renWin->Delete();
-      renWin = NULL;
-      input  = NULL;
-      inInfo = NULL;
-      return 0;
-      }
-
-    renWin = NULL; // will be released via this->Context
-    this->OwnWindow = true;
-    }
-
-  this->Context->MakeCurrent();
-  this->Context->SetReportGraphicErrors(1);
-
-  // Noise.
-  vtkInformation *noiseInfo = inputVector[1]->GetInformationObject(0);
-  vtkImageData *noise=0;
-  if (noiseInfo==0)
-    {
-    this->NoiseSource->Update();
-    noise = this->NoiseSource->GetOutput();
-    }
-  else
-    {
-    noise = vtkImageData::SafeDownCast(
-      noiseInfo->Get(vtkDataObject::DATA_OBJECT()));
-
-    if (noise->GetPointData()==0)
-      {
-      vtkErrorMacro("Provided noise does not have point data.");
-      return 0;
-      }
-    if (noise->GetPointData()->GetScalars()==0)
-      {
-      vtkErrorMacro("Provided noise does not have scalars on point data.");
-      return 0;
-      }
-    }
-
-  int firstComponent;
-  int secondComponent;
-  switch (dataDescription)
-    {
-  case VTK_XY_PLANE:
-    firstComponent = 0;
-    secondComponent = 1;
-    break;
-
-  case VTK_YZ_PLANE:
-    firstComponent = 1;
-    secondComponent = 2;
-    break;
-
-  case VTK_XZ_PLANE:
-    firstComponent = 0;
-    secondComponent = 2;
-    break;
-
-  default:
-    vtkErrorMacro("Sanity check failed. This filter can only handle 2D inputs.");
-    return 0;
-    }
-
-  double *spacing = input->GetSpacing();
-  // this->StepSize is specified as a unit of the cell length, while
-  // vtkLineIntegralConvolution2D expects step size in normalized image space,
-  // hence we do the conversion.
-  double cellLength = sqrt(spacing[firstComponent] * spacing[firstComponent] +
-    spacing[secondComponent] * spacing[secondComponent]);
-
-  double w = spacing[firstComponent] * dims[firstComponent];
-  double h = spacing[secondComponent] * dims[secondComponent];
-  double normalizationFactor = sqrt(w*w + h*h);
-  double stepSize = this->StepSize * cellLength / normalizationFactor;
-  vtkDebugMacro( << "** StepSize (Normalized Image Space): " << stepSize << endl );
-
-  vtkLineIntegralConvolution2D * internal=vtkLineIntegralConvolution2D::New();
-  if (  !internal->IsSupported( this->Context )  )
-    {
-    internal->Delete();
-    internal   = NULL;
-    input      = NULL;
-    noise      = NULL;
-    inInfo     = NULL;
-    spacing    = NULL;
-    noiseInfo  = NULL;
-
-    this->LICSuccess = 0;
-    return 0;
-    }
-  internal->SetNumberOfSteps(this->Steps);
-  internal->SetLICStepSize(stepSize);
-  internal->SetComponentIds(firstComponent,secondComponent);
-  internal->SetGridSpacings(spacing[firstComponent], spacing[secondComponent]);
-  internal->SetMagnification(this->Magnification);
-
-  vtkDataTransferHelper *vectorBus = vtkDataTransferHelper::New();
-  vectorBus->SetContext(this->Context);
-  vectorBus->SetCPUExtent(input->GetExtent());
-  vectorBus->SetGPUExtent(input->GetExtent());
-  //  vectorBus->SetTextureExtent(input->GetExtent());
-  vectorBus->SetArray(inVectors);
-
-  vtkDataTransferHelper *noiseBus = vtkDataTransferHelper::New();
-  noiseBus->SetContext(this->Context);
-  noiseBus->SetCPUExtent(noise->GetExtent());
-  noiseBus->SetGPUExtent(noise->GetExtent());
-  //  noiseBus->SetTextureExtent(noise->GetExtent());
-  noiseBus->SetArray(noise->GetPointData()->GetScalars());
-
-  // For uploading of float textures without clamping, we create a FBO with a
-  // float color buffer.
-  // Vector field in image space.
-  vtkTextureObject *tempBuffer = vtkTextureObject::New();
-  tempBuffer->SetContext(this->Context);
-  tempBuffer->Create2D(128, 128, 3, VTK_FLOAT, false);
-
-  vtkFrameBufferObject *fbo = vtkFrameBufferObject::New();
-  fbo->SetContext(this->Context);
-  fbo->SetColorBuffer(0, tempBuffer);
-  fbo->SetNumberOfRenderTargets(1);
-  fbo->SetActiveBuffer(0);
-  if (  !fbo->Start( 128, 128, false )  )
-    {
-    fbo->Delete();
-    internal->Delete();
-    noiseBus->Delete();
-    vectorBus->Delete();
-    tempBuffer->Delete();
-    fbo        = NULL;
-    internal   = NULL;
-    noiseBus   = NULL;
-    vectorBus  = NULL;
-    tempBuffer = NULL;
-
-    input      = NULL;
-    noise      = NULL;
-    inInfo     = NULL;
-    spacing    = NULL;
-    noiseInfo  = NULL;
-
-    this->FBOSuccess = 0;
-    return 0;
-    }
-  this->FBOSuccess = 1;
-  tempBuffer->Delete();
-  tempBuffer = NULL;
-
-  vtkgl::ActiveTexture(vtkgl::TEXTURE0);
-  vectorBus->Upload(0,0);
-  vectorBus->GetTexture()->Bind();
-  glTexParameteri(vectorBus->GetTexture()->GetTarget(),GL_TEXTURE_WRAP_S,
-                  GL_CLAMP);
-  glTexParameteri(vectorBus->GetTexture()->GetTarget(),GL_TEXTURE_WRAP_T,
-                  GL_CLAMP);
-  glTexParameteri(vectorBus->GetTexture()->GetTarget(), vtkgl::TEXTURE_WRAP_R,
-                  GL_CLAMP);
-  glTexParameteri(vectorBus->GetTexture()->GetTarget(), GL_TEXTURE_MIN_FILTER,
-                  GL_LINEAR);
-  glTexParameteri(vectorBus->GetTexture()->GetTarget(), GL_TEXTURE_MAG_FILTER,
-                  GL_LINEAR);
-
-  internal->SetVectorField(vectorBus->GetTexture());
-
-  vtkgl::ActiveTexture(vtkgl::TEXTURE1);
-  noiseBus->Upload(0,0);
-  noiseBus->GetTexture()->Bind();
-  glTexParameteri(noiseBus->GetTexture()->GetTarget(),GL_TEXTURE_WRAP_S,
-                  GL_REPEAT);
-  glTexParameteri(noiseBus->GetTexture()->GetTarget(),GL_TEXTURE_WRAP_T,
-                  GL_REPEAT);
-  glTexParameteri(noiseBus->GetTexture()->GetTarget(),vtkgl::TEXTURE_WRAP_R,
-                  GL_REPEAT);
-  glTexParameteri(noiseBus->GetTexture()->GetTarget(),GL_TEXTURE_MIN_FILTER,
-                  GL_NEAREST);
-  glTexParameteri(noiseBus->GetTexture()->GetTarget(),GL_TEXTURE_MAG_FILTER,
-                  GL_NEAREST);
-  internal->SetNoise(noiseBus->GetTexture());
-  fbo->Delete();
-  fbo = NULL;
-
-  int inputRequestedExtent[6];
-  inInfo->Get(vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT(), inputRequestedExtent);
-  // transform inputRequestedExtent relative to the input's actual extent since the
-  // vtkLineIntegralConvolution2D needs extents relative to the input vector
-  // field.
-  vtkStructuredExtent::Transform(inputRequestedExtent, input->GetExtent());
-  int licextent[4];
-  switch (dataDescription)
-    {
-  case VTK_XY_PLANE:
-    licextent[0] = inputRequestedExtent[0];
-    licextent[1] = inputRequestedExtent[1];
-    licextent[2] = inputRequestedExtent[2];
-    licextent[3] = inputRequestedExtent[3];
-    break;
-
-  case VTK_YZ_PLANE:
-    licextent[0] = inputRequestedExtent[2];
-    licextent[1] = inputRequestedExtent[3];
-    licextent[2] = inputRequestedExtent[4];
-    licextent[3] = inputRequestedExtent[5];
-    break;
-
-  case VTK_XZ_PLANE:
-    licextent[0] = inputRequestedExtent[0];
-    licextent[1] = inputRequestedExtent[1];
-    licextent[2] = inputRequestedExtent[4];
-    licextent[3] = inputRequestedExtent[5];
-    break;
-    }
-
-  if (  internal->Execute(licextent) == 0  )
-    {
-    internal->Delete();
-    noiseBus->Delete();
-    vectorBus->Delete();
-    internal   = NULL;
-    noiseBus   = NULL;
-    vectorBus  = NULL;
-
-    input      = NULL;
-    noise      = NULL;
-    inInfo     = NULL;
-    spacing    = NULL;
-    noiseInfo  = NULL;
-
-    this->LICSuccess = 0;
-    return 0;
-    }
-  this->LICSuccess = 1;
-
-  glFlush(); // breakpoint for debugging.
-
-  vtkInformation *outInfo = outputVector->GetInformationObject(0);
-  vtkImageData *output = vtkImageData::SafeDownCast(
-    outInfo->Get(vtkDataObject::DATA_OBJECT()));
-
-  vtkDataTransferHelper *outputBus=vtkDataTransferHelper::New();
-  outputBus->SetContext(this->Context);
-
-  vtkTextureObject* licTexture = internal->GetLIC();
-  int gpuExtent[6];
-  inInfo->Get(vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT(), gpuExtent);
-  gpuExtent[0] *= this->Magnification;
-  gpuExtent[2] *= this->Magnification;
-  gpuExtent[4] *= this->Magnification;
-  switch (dataDescription)
-    {
-  case VTK_XY_PLANE:
-    gpuExtent[1] = gpuExtent[0] + licTexture->GetWidth() - 1;
-    gpuExtent[3] = gpuExtent[2] + licTexture->GetHeight() - 1;
-    break;
-
-  case VTK_YZ_PLANE:
-    gpuExtent[3] = gpuExtent[2] + licTexture->GetWidth() - 1;
-    gpuExtent[5] = gpuExtent[4] + licTexture->GetHeight() -1;
-    break;
-
-  case VTK_XZ_PLANE:
-    gpuExtent[1] = gpuExtent[0] + licTexture->GetWidth() -1;
-    gpuExtent[5] = gpuExtent[4] + licTexture->GetHeight() - 1;
-    break;
-
-    }
-  vtkDebugMacro( << "GPU Extent: " << PRINTEXTENT( gpuExtent ) << endl );
-  // It is possible that GPU extent is larger than what the output expected,
-  // hence we allocate the output using the GPU extent and then crop it.
-
-  output->SetExtent(gpuExtent);
-  output->AllocateScalars(VTK_FLOAT, 3);
-  outputBus->SetCPUExtent(gpuExtent);
-  outputBus->SetGPUExtent(gpuExtent);
-  outputBus->SetTexture(internal->GetLIC());
-  outputBus->SetArray(output->GetPointData()->GetScalars());
-  output->GetPointData()->GetScalars()->SetName("LIC");
-  outputBus->Download();
-  outputBus->Delete();
-  internal->Delete();
-  vectorBus->Delete();
-  noiseBus->Delete();
-
-  // Ensures that the output extent is exactly same as what was asked for.
-  output->Crop(outInfo->Get(vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT()));
-  return 1;
-}
-
-//----------------------------------------------------------------------------
-void vtkImageDataLIC2D::PrintSelf( ostream & os, vtkIndent indent )
-{
-  this->Superclass::PrintSelf( os, indent );
-
-  os << indent << "Steps: "         << this->Steps          << "\n";
-  os << indent << "StepSize: "      << this->StepSize       << "\n";
-  os << indent << "FBOSuccess: "    << this->FBOSuccess     << "\n";
-  os << indent << "LICSuccess: "    << this->LICSuccess     << "\n";
-  os << indent << "Magnification: " << this->Magnification  << "\n";
-  os << indent << "OpenGLExtensionsSupported: "
-               << this->OpenGLExtensionsSupported << "\n";
-}
diff --git a/Rendering/HybridOpenGL/vtkImageDataLIC2D.h b/Rendering/HybridOpenGL/vtkImageDataLIC2D.h
deleted file mode 100644
index dcb6b92..0000000
--- a/Rendering/HybridOpenGL/vtkImageDataLIC2D.h
+++ /dev/null
@@ -1,155 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkImageDataLIC2D.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-// .NAME vtkImageDataLIC2D
-//
-// .SECTION Description
-//  GPU implementation of a Line Integral Convolution, a technique for
-//  imaging  vector fields.
-//
-//  The input on port 0 is an vtkImageData with extents of a 2D image. It needs
-//  a vector field on point data.
-//  Port 1 is a special port for customized noise input. It is an optional port.
-//  If not present, noise is generated by the filter. Even if none-power-of-two
-//  texture are supported, giving a power-of-two image may result in faster
-//  execution on the GPU.
-//  If noise input is not specified, then the filter using vtkImageNoiseSource to
-//  generate a 128x128 noise texture.
-//  This filter only works on point vectors. One can use a
-//  vtkCellDataToPointData filter to convert cell vectors to point vectors.
-//
-// .SECTION Required OpenGL Extensins
-//  GL_ARB_texture_non_power_of_two
-//  GL_VERSION_2_0
-//  GL_ARB_texture_float
-//  GL_ARB_draw_buffers
-//  GL_EXT_framebuffer_object
-//  GL_ARB_pixel_buffer_object
-//
-// .SECTION see also
-//  vtkImageAlgorithm vtkImageNoiseSource
-
-#ifndef __vtkImageDataLIC2D_h
-#define __vtkImageDataLIC2D_h
-
-#include "vtkRenderingHybridOpenGLModule.h" // For export macro
-#include "vtkImageAlgorithm.h"
-#include "vtkWeakPointer.h" // needed for vtkWeakPointer.
-
-class vtkRenderWindow;
-class vtkImageNoiseSource;
-
-class VTKRENDERINGHYBRIDOPENGL_EXPORT vtkImageDataLIC2D : public vtkImageAlgorithm
-{
-public:
-  static vtkImageDataLIC2D* New();
-  vtkTypeMacro(vtkImageDataLIC2D, vtkImageAlgorithm);
-  void PrintSelf(ostream& os, vtkIndent indent);
-
-  // Description:
-  // Get/Set the context. Context must be a vtkOpenGLRenderWindow.
-  // This does not increase the reference count of the
-  // context to avoid reference loops.
-  // SetContext() may raise an error is the OpenGL context does not support the
-  // required OpenGL extensions. Return 0 upon failure and 1 upon success.
-  int SetContext( vtkRenderWindow * context );
-  vtkRenderWindow * GetContext();
-
-  // Description:
-  // Number of steps. Initial value is 20.
-  // class invariant: Steps>0.
-  // In term of visual quality, the greater the better.
-  vtkSetMacro(Steps,int);
-  vtkGetMacro(Steps,int);
-
-  // Description:
-  // Step size.
-  // Specify the step size as a unit of the cell length of the input vector
-  // field. Cell lengthh is the length of the diagonal of a cell.
-  // Initial value is 1.0.
-  // class invariant: StepSize>0.0.
-  // In term of visual quality, the smaller the better.
-  // The type for the interface is double as VTK interface is double
-  // but GPU only supports float. This value will be converted to
-  // float in the execution of the algorithm.
-  vtkSetClampMacro(StepSize, double, 0.0, 100.0);
-  vtkGetMacro(StepSize, double);
-
-  // Description:
-  // The the magnification factor. Default is 1
-  vtkSetClampMacro(Magnification, int, 1, VTK_INT_MAX);
-  vtkGetMacro(Magnification, int);
-
-  // Description:
-  // Check if the required OpenGL extensions / GPU are supported.
-  vtkGetMacro( OpenGLExtensionsSupported, int );
-
-  // Description:
-  // Check if FBO is started properly.
-  int  GetFBOSuccess() { return this->FBOSuccess; }
-
-  // Description:
-  // Check if LIC runs properly.
-  int  GetLICSuccess() { return this->LICSuccess; }
-
-
-  void TranslateInputExtent(const int* inExt,
-    const int* inWholeExtent, int *outExt);
-
-//BTX
-protected:
-  vtkImageDataLIC2D();
-  ~vtkImageDataLIC2D();
-
-  virtual int RequestInformation(vtkInformation *,
-    vtkInformationVector **, vtkInformationVector *);
-
-  // Description:
-  // Fill the input port information objects for this algorithm.  This
-  // is invoked by the first call to GetInputPortInformation for each
-  // port so subclasses can specify what they can handle.
-  // Redefined from the superclass.
-  virtual int FillInputPortInformation(int port,
-                                       vtkInformation *info);
-
-  int RequestUpdateExtent (vtkInformation * vtkNotUsed(request),
-                           vtkInformationVector **inputVector,
-                           vtkInformationVector *vtkNotUsed( outputVector ));
-
-  // Description:
-  // This is called by the superclass.
-  // This is the method you should override.
-  virtual int RequestData(vtkInformation *request,
-                          vtkInformationVector **inputVector,
-                          vtkInformationVector *outputVector);
-
-  int    Steps;
-  double StepSize;
-  int    Magnification;
-  int    OpenGLExtensionsSupported;
-  vtkWeakPointer<vtkRenderWindow> Context;
-  vtkImageNoiseSource* NoiseSource;
-
-  bool ARBColorBufferFloatSupported;
-  bool OwnWindow;
-  int  FBOSuccess;
-  int  LICSuccess;
-
-private:
-  vtkImageDataLIC2D(const vtkImageDataLIC2D&); // Not implemented.
-  void operator=(const vtkImageDataLIC2D&); // Not implemented.
-//ETX
-};
-
-#endif
diff --git a/Rendering/HybridOpenGL/vtkImageDataLIC2DExtentTranslator.cxx b/Rendering/HybridOpenGL/vtkImageDataLIC2DExtentTranslator.cxx
deleted file mode 100644
index 8c4ff3b..0000000
--- a/Rendering/HybridOpenGL/vtkImageDataLIC2DExtentTranslator.cxx
+++ /dev/null
@@ -1,93 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkImageDataLIC2DExtentTranslator.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#include "vtkImageDataLIC2DExtentTranslator.h"
-
-#include "vtkObjectFactory.h"
-#include "vtkImageDataLIC2D.h"
-
-vtkStandardNewMacro(vtkImageDataLIC2DExtentTranslator);
-vtkCxxSetObjectMacro(vtkImageDataLIC2DExtentTranslator, InputExtentTranslator, vtkExtentTranslator);
-//----------------------------------------------------------------------------
-vtkImageDataLIC2DExtentTranslator::vtkImageDataLIC2DExtentTranslator()
-{
-  this->Algorithm = 0;
-  this->InputExtentTranslator = 0;
-  this->InputWholeExtent[0] =
-    this->InputWholeExtent[1] =
-    this->InputWholeExtent[2] =
-    this->InputWholeExtent[3] =
-    this->InputWholeExtent[4] =
-    this->InputWholeExtent[5] = 0;
-}
-
-//----------------------------------------------------------------------------
-vtkImageDataLIC2DExtentTranslator::~vtkImageDataLIC2DExtentTranslator()
-{
-  this->SetInputExtentTranslator(0);
-}
-
-//----------------------------------------------------------------------------
-void vtkImageDataLIC2DExtentTranslator::SetAlgorithm(
-  vtkImageDataLIC2D* alg)
-{
-  if (this->Algorithm.GetPointer() != alg)
-    {
-    this->Algorithm = alg;
-    this->Modified();
-    }
-}
-
-//----------------------------------------------------------------------------
-vtkImageDataLIC2D* vtkImageDataLIC2DExtentTranslator::GetAlgorithm()
-{
-  return this->Algorithm.GetPointer();
-}
-
-//----------------------------------------------------------------------------
-int vtkImageDataLIC2DExtentTranslator::PieceToExtentThreadSafe(int piece, int numPieces,
-                                     int ghostLevel, int *wholeExtent,
-                                     int *resultExtent, int splitMode,
-                                     int byPoints)
-{
-  if (!this->Algorithm)
-    {
-    return this->Superclass::PieceToExtentThreadSafe(piece, numPieces, ghostLevel, wholeExtent,
-      resultExtent, splitMode, byPoints);
-    }
-
-  // Let the input extent translator do the translation.
-  int inExt[6];
-  this->InputExtentTranslator->PieceToExtentThreadSafe(piece, numPieces,
-    ghostLevel, this->InputWholeExtent, inExt, splitMode, byPoints);
-  this->Algorithm->TranslateInputExtent(inExt, this->InputWholeExtent, resultExtent);
-  return 1;
-}
-
-//----------------------------------------------------------------------------
-void vtkImageDataLIC2DExtentTranslator::PrintSelf( ostream & os, vtkIndent indent )
-{
-  this->Superclass::PrintSelf( os, indent );
-
-  os << indent << "Algorithm: "               << this->Algorithm << endl;
-  os << indent << "InputWholeExtent: ("
-               << this->InputWholeExtent[0]   << ", "
-               << this->InputWholeExtent[1]   << ", "
-               << this->InputWholeExtent[2]   << ", "
-               << this->InputWholeExtent[3]   << ", "
-               << this->InputWholeExtent[4]   << ", "
-               << this->InputWholeExtent[5]   << ")" << endl;
-  os << indent << "InputExtentTranslator: "
-               << this->InputExtentTranslator << endl;
-}
diff --git a/Rendering/HybridOpenGL/vtkImageDataLIC2DExtentTranslator.h b/Rendering/HybridOpenGL/vtkImageDataLIC2DExtentTranslator.h
deleted file mode 100644
index 074a016..0000000
--- a/Rendering/HybridOpenGL/vtkImageDataLIC2DExtentTranslator.h
+++ /dev/null
@@ -1,66 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkImageDataLIC2DExtentTranslator.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-// .NAME vtkImageDataLIC2DExtentTranslator
-// .SECTION Description
-
-#ifndef __vtkImageDataLIC2DExtentTranslator_h
-#define __vtkImageDataLIC2DExtentTranslator_h
-
-#include "vtkRenderingHybridOpenGLModule.h" // For export macro
-#include "vtkExtentTranslator.h"
-#include "vtkWeakPointer.h" // needed for vtkWeakPointer.
-
-class vtkImageDataLIC2D;
-
-class VTKRENDERINGHYBRIDOPENGL_EXPORT vtkImageDataLIC2DExtentTranslator
-  : public vtkExtentTranslator
-{
-public:
-  static vtkImageDataLIC2DExtentTranslator* New();
-  vtkTypeMacro(vtkImageDataLIC2DExtentTranslator, vtkExtentTranslator);
-  void PrintSelf(ostream& os, vtkIndent indent);
-
-  // Description:
-  // Set the vtkImageDataLIC2D algorithm for which this extent translator is
-  // being used.
-  void SetAlgorithm(vtkImageDataLIC2D*);
-  vtkImageDataLIC2D* GetAlgorithm();
-
-  void SetInputExtentTranslator(vtkExtentTranslator*);
-  vtkGetObjectMacro(InputExtentTranslator, vtkExtentTranslator);
-
-  vtkSetVector6Macro(InputWholeExtent, int);
-  vtkGetVector6Macro(InputWholeExtent, int);
-
-  virtual int PieceToExtentThreadSafe(int piece, int numPieces,
-                                     int ghostLevel, int *wholeExtent,
-                                     int *resultExtent, int splitMode,
-                                     int byPoints);
-
-//BTX
-protected:
-  vtkImageDataLIC2DExtentTranslator();
-  ~vtkImageDataLIC2DExtentTranslator();
-
-  int InputWholeExtent[6];
-  vtkExtentTranslator* InputExtentTranslator;
-  vtkWeakPointer<vtkImageDataLIC2D> Algorithm;
-private:
-  vtkImageDataLIC2DExtentTranslator(const vtkImageDataLIC2DExtentTranslator&); // Not implemented.
-  void operator=(const vtkImageDataLIC2DExtentTranslator&); // Not implemented.
-//ETX
-};
-
-#endif
diff --git a/Rendering/HybridOpenGL/vtkStructuredGridLIC2D.cxx b/Rendering/HybridOpenGL/vtkStructuredGridLIC2D.cxx
deleted file mode 100644
index 3ba71ec..0000000
--- a/Rendering/HybridOpenGL/vtkStructuredGridLIC2D.cxx
+++ /dev/null
@@ -1,817 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkStructuredGridLIC2D.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#include "vtkStructuredGridLIC2D.h"
-
-#include "vtkFloatArray.h"
-#include "vtkImageData.h"
-#include "vtkImageNoiseSource.h"
-#include "vtkInformation.h"
-#include "vtkInformationVector.h"
-#include "vtkDataTransferHelper.h"
-#include "vtkFrameBufferObject.h"
-#include "vtkLineIntegralConvolution2D.h"
-#include "vtkShaderProgram2.h"
-#include "vtkShader2.h"
-#include "vtkShader2Collection.h"
-#include "vtkUniformVariables.h"
-#include "vtkStructuredExtent.h"
-#include "vtkTextureObject.h"
-#include "vtkObjectFactory.h"
-#include "vtkOpenGLExtensionManager.h"
-#include "vtkOpenGLRenderWindow.h"
-#include "vtkPointData.h"
-#include "vtkStreamingDemandDrivenPipeline.h"
-
-#include <assert.h>
-#include "vtkgl.h"
-
-extern const char *vtkStructuredGridLIC2D_fs;
-
-#define PRINTEXTENT(ext) \
-  ext[0] << ", " << ext[1] << ", " << ext[2] << ", " << ext[3] << ", " << ext[4] << ", " << ext[5]
-
-vtkStandardNewMacro(vtkStructuredGridLIC2D);
-//----------------------------------------------------------------------------
-vtkStructuredGridLIC2D::vtkStructuredGridLIC2D()
-{
-  this->Context = 0;
-  this->Steps=1;
-  this->StepSize=1.0;
-  this->Magnification=1;
-  this->SetNumberOfInputPorts(2);
-  this->SetNumberOfOutputPorts(2);
-  this->OwnWindow  = false;
-  this->FBOSuccess = 0;
-  this->LICSuccess = 0;
-  this->OpenGLExtensionsSupported = 0;
-
-  this->NoiseSource = vtkImageNoiseSource::New();
-  this->NoiseSource->SetWholeExtent(0, 127, 0, 127, 0, 0);
-  this->NoiseSource->SetMinimum(0.0);
-  this->NoiseSource->SetMaximum(1.0);
-}
-
-//----------------------------------------------------------------------------
-vtkStructuredGridLIC2D::~vtkStructuredGridLIC2D()
-{
-  this->NoiseSource->Delete();
-  this->SetContext( NULL );
-}
-
-//----------------------------------------------------------------------------
-vtkRenderWindow* vtkStructuredGridLIC2D::GetContext()
-{
-  return this->Context;
-}
-
-//----------------------------------------------------------------------------
-int vtkStructuredGridLIC2D::SetContext( vtkRenderWindow * context )
-{
-  if ( this->Context == context )
-    {
-    return this->OpenGLExtensionsSupported;
-    }
-
-  if ( this->Context && this->OwnWindow )
-    {
-    this->Context->Delete();
-    this->Context = NULL;
-    }
-  this->OwnWindow = false;
-
-
-  vtkOpenGLRenderWindow * openGLRenWin =
-  vtkOpenGLRenderWindow::SafeDownCast( context );
-  this->Context = openGLRenWin;
-
-  if ( openGLRenWin )
-    {
-    vtkOpenGLExtensionManager * mgr = openGLRenWin->GetExtensionManager();
-
-    // optional for texture objects.
-    mgr->LoadSupportedExtension( "GL_EXT_texture_integer" );
-
-    if (  !mgr->LoadSupportedExtension( "GL_VERSION_1_3" ) ||
-          !mgr->LoadSupportedExtension( "GL_VERSION_1_2" ) ||
-          !mgr->LoadSupportedExtension( "GL_VERSION_2_0" ) ||
-          !mgr->LoadSupportedExtension( "GL_ARB_texture_float" ) ||
-          !mgr->LoadSupportedExtension( "GL_ARB_color_buffer_float" ) ||
-          !mgr->LoadSupportedExtension( "GL_ARB_texture_non_power_of_two" )
-       )
-      {
-      vtkErrorMacro( "Required OpenGL extensions not supported." );
-      mgr = NULL;
-      this->Context = 0;
-      openGLRenWin  = NULL;
-      return 0;
-      }
-
-    mgr = NULL;
-    }
-
-  openGLRenWin = NULL;
-  this->Modified();
-
-  this->OpenGLExtensionsSupported = 1;
-  return 1;
-}
-
-//----------------------------------------------------------------------------
-// Description:
-// Fill the input port information objects for this algorithm.  This
-// is invoked by the first call to GetInputPortInformation for each
-// port so subclasses can specify what they can handle.
-// Redefined from the superclass.
-int vtkStructuredGridLIC2D::FillInputPortInformation
-  ( int port, vtkInformation * info )
-{
-  if ( port == 0 )
-    {
-    info->Set( vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkStructuredGrid" );
-    info->Set( vtkAlgorithm::INPUT_IS_REPEATABLE(), 0 );
-    info->Set( vtkAlgorithm::INPUT_IS_OPTIONAL(),   0 );
-    }
-  else
-    {
-    info->Set( vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkImageData" );
-    info->Set( vtkAlgorithm::INPUT_IS_REPEATABLE(), 0 );
-    info->Set( vtkAlgorithm::INPUT_IS_OPTIONAL(),   1 );
-    }
-
-  return 1;
-}
-
-// ----------------------------------------------------------------------------
-// Description:
-// Fill the output port information objects for this algorithm.
-// This is invoked by the first call to GetOutputPortInformation for
-// each port so subclasses can specify what they can handle.
-// Redefined from the superclass.
-int vtkStructuredGridLIC2D::FillOutputPortInformation
-  ( int port, vtkInformation * info )
-{
-  if ( port == 0 )
-    {
-    // input+texcoords
-    info->Set( vtkDataObject::DATA_TYPE_NAME(), "vtkStructuredGrid" );
-    }
-  else
-    {
-    // LIC texture
-    info->Set( vtkDataObject::DATA_TYPE_NAME(), "vtkImageData" );
-    }
-
-  return 1;
-}
-//----------------------------------------------------------------------------
-// We need to report output extent after taking into consideration the
-// magnification.
-int vtkStructuredGridLIC2D::RequestInformation(
-  vtkInformation        * vtkNotUsed(request),
-  vtkInformationVector ** inputVector,
-  vtkInformationVector  * outputVector )
-{
-  int ext[6];
-  double spacing[3];
-
-  vtkInformation * inInfo  = inputVector[0]->GetInformationObject( 0 );
-  vtkInformation * outInfo = outputVector->GetInformationObject( 1 );
-
-  inInfo->Get( vtkStreamingDemandDrivenPipeline::WHOLE_EXTENT(), ext );
-
-  spacing[0]=1.0;
-  spacing[1]=1.0;
-  spacing[2]=1.0;
-
-  for (int axis = 0; axis < 3; axis++)
-    {
-    int wholeMin = ext[axis*2];
-    int wholeMax = ext[axis*2+1];
-    int dimension = wholeMax - wholeMin + 1;
-
-    // Scale the output extent
-    wholeMin = static_cast<int>(  ceil( static_cast<double>
-                                        ( wholeMin * this->Magnification )
-                                      )
-                               );
-    wholeMax = ( dimension != 1 )
-               ? wholeMin + static_cast<int>
-                 (   floor(  static_cast<double>
-                             ( dimension * this->Magnification )
-                          )
-                 ) -1
-               : wholeMin;
-
-    ext[ axis * 2     ] = wholeMin;
-    ext[ axis * 2 + 1 ] = wholeMax;
-    }
-
-  vtkDebugMacro( << "request info whole ext = " << PRINTEXTENT( ext ) << endl );
-
-  outInfo->Set( vtkStreamingDemandDrivenPipeline::WHOLE_EXTENT(), ext, 6 );
-  outInfo->Set( vtkDataObject::SPACING(), spacing, 3 );
-
-  return 1;
-}
-
-//----------------------------------------------------------------------------
-int vtkStructuredGridLIC2D::RequestUpdateExtent (
-  vtkInformation * vtkNotUsed(request),
-  vtkInformationVector **inputVector,
-  vtkInformationVector *outputVector)
-{
-  vtkInformation *inInfo = inputVector[0]->GetInformationObject(0);
-  vtkInformation *outInfo = outputVector->GetInformationObject(1);
-
-
-  // Tell the vector field input the extents that we need from it.
-  // The downstream request needs to be downsized based on the Magnification.
-  int ext[6];
-  outInfo->Get(vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT(), ext);
-
-  vtkDebugMacro( << "request update extent, update ext = "
-                 << PRINTEXTENT( ext ) << endl );
-
-  for (int axis = 0; axis < 3; axis++)
-    {
-    int wholeMin = ext[axis*2];
-    int wholeMax = ext[axis*2+1];
-    int dimension = wholeMax - wholeMin + 1;
-
-    // Scale the output extent
-    wholeMin = static_cast<int>(ceil(static_cast<double>(wholeMin / this->Magnification)));
-    wholeMax = dimension != 1? wholeMin + static_cast<int>(floor(static_cast<double>(dimension / this->Magnification))) -1:
-      wholeMin;
-
-    ext[axis*2] = wholeMin;
-    ext[axis*2+1] = wholeMax;
-    }
-  vtkDebugMacro( << "UPDATE_EXTENT: " << ext[0] << ", " << ext[1] << ", "
-                 << ext[2] << ", "    << ext[3] << ", " << ext[4] << ", "
-                 << ext[5] << endl );
-  inInfo->Set(vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT(), ext, 6);
-
-  vtkDebugMacro( << "request update extent, update ext2 = "
-                 << PRINTEXTENT( ext ) << endl );
-
-
-  if(inputVector[1]!=0 && inputVector[1]->GetInformationObject(0) != NULL) // optional input
-    {
-    inInfo = inputVector[1]->GetInformationObject(0);
-    // always request the whole extent
-    inInfo->Set(vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT(),
-                inInfo->Get(vtkStreamingDemandDrivenPipeline::WHOLE_EXTENT()),
-                6);
-    }
-
-  return 1;
-}
-
-//----------------------------------------------------------------------------
-// Stolen from vtkImageAlgorithm. Should be in vtkStructuredGridAlgorithm.
-void vtkStructuredGridLIC2D::AllocateOutputData(vtkDataObject *output,
-                                                vtkInformation *outInfo)
-{
-  // set the extent to be the update extent
-  vtkStructuredGrid *out = vtkStructuredGrid::SafeDownCast(output);
-  if (out)
-    {
-    out->SetExtent(outInfo->Get(vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT()));
-    this->AllocateScalars(out, outInfo);
-    }
-  else
-    {
-    vtkImageData *out2 = vtkImageData::SafeDownCast(output);
-    if (out2)
-      {
-      out2->SetExtent(outInfo->Get(vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT()));
-      out2->AllocateScalars(VTK_FLOAT, 3);
-      }
-    }
-}
-
-//----------------------------------------------------------------------------
-// Stolen from vtkImageData. Should be in vtkStructuredGrid.
-void vtkStructuredGridLIC2D::AllocateScalars(vtkStructuredGrid *sg,
-                                             vtkInformation *outInfo)
-{
-  int newType = VTK_DOUBLE;
-  int newNumComp = 1;
-
-  vtkInformation *scalarInfo = vtkDataObject::GetActiveFieldInformation(
-    outInfo,
-    vtkDataObject::FIELD_ASSOCIATION_POINTS, vtkDataSetAttributes::SCALARS);
-  if (scalarInfo)
-    {
-    newType = scalarInfo->Get( vtkDataObject::FIELD_ARRAY_TYPE() );
-    if ( scalarInfo->Has(vtkDataObject::FIELD_NUMBER_OF_COMPONENTS()) )
-      {
-      newNumComp = scalarInfo->Get( vtkDataObject::FIELD_NUMBER_OF_COMPONENTS() );
-      }
-    }
-
-  vtkDataArray *scalars;
-
-  // if the scalar type has not been set then we have a problem
-  if (newType == VTK_VOID)
-    {
-    vtkErrorMacro("Attempt to allocate scalars before scalar type was set!.");
-    return;
-    }
-
-  const int* extent = sg->GetExtent();
-  // Use vtkIdType to avoid overflow on large images
-  vtkIdType dims[3];
-  dims[0] = extent[1] - extent[0] + 1;
-  dims[1] = extent[3] - extent[2] + 1;
-  dims[2] = extent[5] - extent[4] + 1;
-  vtkIdType imageSize = dims[0]*dims[1]*dims[2];
-
-  // if we currently have scalars then just adjust the size
-  scalars = sg->GetPointData()->GetScalars();
-  if (scalars && scalars->GetDataType() == newType
-      && scalars->GetReferenceCount() == 1)
-    {
-    scalars->SetNumberOfComponents(newNumComp);
-    scalars->SetNumberOfTuples(imageSize);
-    // Since the execute method will be modifying the scalars
-    // directly.
-    scalars->Modified();
-    return;
-    }
-
-  // allocate the new scalars
-  scalars = vtkDataArray::CreateDataArray(newType);
-  scalars->SetNumberOfComponents(newNumComp);
-
-  // allocate enough memory
-  scalars->SetNumberOfTuples(imageSize);
-
-  sg->GetPointData()->SetScalars(scalars);
-  scalars->Delete();
-}
-
-
-//----------------------------------------------------------------------------
-int vtkStructuredGridLIC2D::RequestData(
-  vtkInformation        * vtkNotUsed(request),
-  vtkInformationVector ** inputVector,
-  vtkInformationVector  * outputVector )
-{
-  // 3 passes:
-  // pass 1: render to compute the transformed vector field for the points.
-  // pass 2: perform LIC with the new vector field. This has to happen in a
-  // different pass than computation of the transformed vector.
-  // pass 3: Render structured slice quads with correct texture correct
-  // tcoords and apply the LIC texture to it.
-
-  vtkInformation    * inInfo = inputVector[0]->GetInformationObject(0);
-  vtkStructuredGrid * input  = vtkStructuredGrid::SafeDownCast
-                   (  inInfo->Get( vtkDataObject::DATA_OBJECT() )  );
-
-  int inputRequestedExtent[6];
-  inInfo->Get( vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT(),
-               inputRequestedExtent );
-
-  // Check if the input image is a 2D image (not 0D, not 1D, not 3D)
-  int dims[3];
-  //  input->GetDimensions(dims);
-
-  vtkStructuredExtent::GetDimensions( inputRequestedExtent, dims );
-
-  vtkDebugMacro( << "dims = " << dims[0] << " "
-                 << dims[1] << " " << dims[2] << endl );
-  vtkDebugMacro( << "requested ext = " << inputRequestedExtent[0] << " "
-                 << inputRequestedExtent[1] << " " << inputRequestedExtent[2]
-                 << " " << inputRequestedExtent[3] << " "
-                 << inputRequestedExtent[4] << " "
-                 << inputRequestedExtent[5] << endl );
-
-  if(   !( dims[0]==1 && dims[1]>1 && dims[2]>1 )
-     && !( dims[1]==1 && dims[0]>1 && dims[2]>1 )
-     && !( dims[2]==1 && dims[0]>1 && dims[1]>1 )
-    )
-    {
-    vtkErrorMacro( << "input is not a 2D image." << endl );
-    input  = NULL;
-    inInfo = NULL;
-    return 0;
-    }
-  if( input->GetPointData() == 0 )
-    {
-    vtkErrorMacro( << "input does not have point data." );
-    input  = NULL;
-    inInfo = NULL;
-    return 0;
-    }
-  if( input->GetPointData()->GetVectors() == 0 )
-    {
-    vtkErrorMacro( << "input does not vectors on point data." );
-    input  = NULL;
-    inInfo = NULL;
-    return 0;
-    }
-
-  if ( !this->Context )
-    {
-    vtkRenderWindow * renWin = vtkRenderWindow::New();
-    if (  this->SetContext( renWin ) == 0  )
-      {
-      renWin->Delete();
-      renWin = NULL;
-      input  = NULL;
-      inInfo = NULL;
-      return 0;
-      }
-
-    renWin = NULL; // to be released via this->context
-    this->OwnWindow = true;
-    }
-  this->Context->SetReportGraphicErrors(1);
-
-
-  vtkInformation    * outInfo = outputVector->GetInformationObject(0);
-  vtkStructuredGrid * output  = vtkStructuredGrid::SafeDownCast(
-    outInfo->Get(vtkDataObject::DATA_OBJECT()));
-  this->AllocateOutputData(output, outInfo);
-  output->ShallowCopy(input);
-
-  vtkInformation * outInfoTexture = outputVector->GetInformationObject(1);
-  vtkImageData   * outputTexture  = vtkImageData::SafeDownCast(
-    outInfoTexture->Get(vtkDataObject::DATA_OBJECT()));
-  this->AllocateOutputData(outputTexture, outInfoTexture);
-
-  // Noise.
-  vtkInformation *noiseInfo = inputVector[1]->GetInformationObject(0);
-  vtkImageData *noise=0;
-  if(noiseInfo==0)
-    {
-    this->NoiseSource->Update();
-    noise = this->NoiseSource->GetOutput();
-    }
-  else
-    {
-    noise=vtkImageData::SafeDownCast(
-      noiseInfo->Get(vtkDataObject::DATA_OBJECT()));
-
-    if(noise->GetPointData()==0)
-      {
-      vtkErrorMacro(<<"provided noise does not have point data.");
-      return 0;
-      }
-    if(noise->GetPointData()->GetScalars()==0)
-      {
-      vtkErrorMacro(<<"provided noise does not have scalars on point data.");
-      return 0;
-      }
-    }
-
-  int width;
-  int height;
-  int firstComponent;
-  int secondComponent;
-  int slice;
-  if(dims[0]==1)
-    {
-    vtkDebugMacro( << "x" << endl );
-    firstComponent = 1;
-    secondComponent = 2;
-    slice=0;
-    }
-  else
-    {
-    if(dims[1]==1)
-      {
-      vtkDebugMacro( << "y" << endl );
-      firstComponent = 0;
-      secondComponent = 2;
-      slice=1;
-      }
-    else
-      {
-      vtkDebugMacro( << "z" << endl );
-      firstComponent = 0;
-      secondComponent = 1;
-      slice=2;
-      }
-    }
-
-  width=dims[firstComponent];
-  height=dims[secondComponent];
-
-  vtkDebugMacro( << "w = " << width << " h = " << height << endl );
-
-  vtkDataTransferHelper * vectorFieldBus=vtkDataTransferHelper::New();
-  vectorFieldBus->SetContext(this->Context);
-  vectorFieldBus->SetCPUExtent(inputRequestedExtent); // input->GetExtent());
-  vectorFieldBus->SetGPUExtent(inputRequestedExtent); // input->GetExtent());
-  //  vectorFieldBus->SetTextureExtent(input->GetExtent());
-  vectorFieldBus->SetArray(input->GetPointData()->GetVectors());
-
-  vtkDataTransferHelper * pointBus=vtkDataTransferHelper::New();
-  pointBus->SetContext(this->Context);
-  pointBus->SetCPUExtent(inputRequestedExtent); // input->GetExtent());
-  pointBus->SetGPUExtent(inputRequestedExtent); // input->GetExtent());
-  //  pointBus->SetTextureExtent(input->GetExtent());
-  pointBus->SetArray(input->GetPoints()->GetData());
-
-  vtkOpenGLExtensionManager * mgr = vtkOpenGLExtensionManager::New();
-  mgr->SetRenderWindow(this->Context);
-
-  // Vector field in image space.
-  vtkTextureObject * vector2=vtkTextureObject::New();
-  vector2->SetContext(this->Context);
-  vector2->Create2D(width,height,3,VTK_FLOAT,false);
-
-  vtkDebugMacro( << "Vector field in image space (target) textureId = "
-                 << vector2->GetHandle() << endl );
-
-  vtkFrameBufferObject *fbo=vtkFrameBufferObject::New();
-  fbo->SetContext(this->Context);
-  fbo->SetColorBuffer(0,vector2);
-  fbo->SetNumberOfRenderTargets(1);
-  fbo->SetActiveBuffer(0);
-
-  if (  !fbo->Start( width, height, false )  )
-    {
-    mgr->Delete();
-    fbo->Delete();
-    vector2->Delete();
-    pointBus->Delete();
-    vectorFieldBus->Delete();
-
-    mgr = NULL;
-    fbo = NULL;
-    vector2  = NULL;
-    pointBus = NULL;
-    vectorFieldBus   = NULL;
-
-    noise   = NULL;
-    input   = NULL;
-    inInfo  = NULL;
-    output  = NULL;
-    outInfo = NULL;
-    noiseInfo = NULL;
-    outputTexture  = NULL;
-    outInfoTexture = NULL;
-
-    this->FBOSuccess = 0;
-    return 0;
-    }
-  this->FBOSuccess = 1;
-
-  vtkShaderProgram2 *pgm=vtkShaderProgram2::New();
-  pgm->SetContext(static_cast<vtkOpenGLRenderWindow *>(this->Context.GetPointer()));
-
-  vtkShader2 *shader=vtkShader2::New();
-  shader->SetType(VTK_SHADER_TYPE_FRAGMENT);
-  shader->SetSourceCode(vtkStructuredGridLIC2D_fs);
-  shader->SetContext(pgm->GetContext());
-  pgm->GetShaders()->AddItem(shader);
-  shader->Delete();
-  shader = NULL;
-
-  pgm->Build();
-  if(pgm->GetLastBuildStatus()!=VTK_SHADER_PROGRAM2_LINK_SUCCEEDED)
-    {
-    vtkErrorMacro("shader build error.");
-    return 0;
-    }
-
-  int value=0;
-  pgm->GetUniformVariables()->SetUniformi("texPoints",1,&value);
-  value=1;
-  pgm->GetUniformVariables()->SetUniformi("texVectorField",1,&value);
-  float fvalues[3];
-  fvalues[0]=static_cast<float>(dims[0]);
-  fvalues[1]=static_cast<float>(dims[1]);
-  fvalues[2]=static_cast<float>(dims[2]);
-  pgm->GetUniformVariables()->SetUniformf("uDimensions",3,fvalues);
-  value=slice;
-  pgm->GetUniformVariables()->SetUniformi("uSlice",1,&slice);
-
-  vtkgl::ActiveTexture(vtkgl::TEXTURE0);
-  pointBus->Upload(0,0);
-  vtkTextureObject *points=pointBus->GetTexture();
-  points->Bind();
-  glTexParameteri(points->GetTarget(),GL_TEXTURE_WRAP_S,
-                  GL_CLAMP);
-  glTexParameteri(points->GetTarget(),GL_TEXTURE_WRAP_T,
-                  GL_CLAMP);
-  glTexParameteri(points->GetTarget(), vtkgl::TEXTURE_WRAP_R,
-                  GL_CLAMP);
-  glTexParameteri(points->GetTarget(), GL_TEXTURE_MIN_FILTER,
-                  GL_NEAREST);
-  glTexParameteri(points->GetTarget(), GL_TEXTURE_MAG_FILTER,
-                  GL_NEAREST);
-
-  vtkDebugMacro( << "points on texture unit 0, textureId == "
-                 << points->GetHandle() << endl );
-
-  vtkgl::ActiveTexture(vtkgl::TEXTURE1);
-  vectorFieldBus->Upload(0,0);
-  vtkTextureObject *vectorField=vectorFieldBus->GetTexture();
-  vectorField->Bind();
-  glTexParameteri(vectorField->GetTarget(),GL_TEXTURE_WRAP_S,
-      GL_CLAMP);
-  glTexParameteri(vectorField->GetTarget(),GL_TEXTURE_WRAP_T,
-      GL_CLAMP);
-  glTexParameteri(vectorField->GetTarget(), vtkgl::TEXTURE_WRAP_R,
-      GL_CLAMP);
-  glTexParameteri(vectorField->GetTarget(), GL_TEXTURE_MIN_FILTER,
-      GL_NEAREST);
-  glTexParameteri(vectorField->GetTarget(), GL_TEXTURE_MAG_FILTER,
-      GL_NEAREST);
-
-  vtkDebugMacro( << "vector field on texture unit 1, textureId == "
-                 << vectorField->GetHandle() << endl );
-
-  pgm->Use();
-
-  if(!pgm->IsValid())
-    {
-    vtkErrorMacro(<<" validation of the program failed: "<<pgm->GetLastValidateLog());
-    }
-
-  vtkDebugMacro( << "glFinish before rendering quad" << endl );
-  glFinish(); // debug: GL break point
-  vtkGraphicErrorMacro(this->Context,"error1");
-  fbo->RenderQuad(0, width-1,0,height-1);
-  vtkGraphicErrorMacro(this->Context,"error2");
-
-  vtkDebugMacro( << "glFinish after rendering quad" << endl );
-  glFinish(); // debug: GL break point
-
-  pgm->Restore();
-
-  vtkLineIntegralConvolution2D *internal=vtkLineIntegralConvolution2D::New();
-  if (  !internal->IsSupported( this->Context )  )
-    {
-    pgm->ReleaseGraphicsResources();
-
-    pgm->Delete();
-    mgr->Delete();
-    fbo->Delete();
-    vector2->Delete();
-    internal->Delete();
-    pointBus->Delete();
-    vectorFieldBus->Delete();
-
-    pgm = NULL;
-    mgr = NULL;
-    fbo = NULL;
-    vector2  = NULL;
-    internal = NULL;
-    pointBus = NULL;
-    vectorFieldBus = NULL;
-
-    noise   = NULL;
-    input   = NULL;
-    inInfo  = NULL;
-    points  = NULL;
-    output  = NULL;
-    outInfo = NULL;
-    noiseInfo   = NULL;
-    vectorField = NULL;
-    outputTexture  = NULL;
-    outInfoTexture = NULL;
-
-    this->LICSuccess = 0;
-    return 0;
-    }
-
-  internal->SetNumberOfSteps(this->Steps);
-  internal->SetLICStepSize(this->StepSize);
-  internal->SetMagnification(this->Magnification);
-  internal->SetComponentIds(firstComponent,secondComponent);
-
-  vtkDataTransferHelper *noiseBus=vtkDataTransferHelper::New();
-  noiseBus->SetContext(this->Context);
-  noiseBus->SetCPUExtent(noise->GetExtent());
-  noiseBus->SetGPUExtent(noise->GetExtent());
-  //  noiseBus->SetTextureExtent(noise->GetExtent());
-  noiseBus->SetArray(noise->GetPointData()->GetScalars());
-  noiseBus->Upload(0,0);
-
-  internal->SetVectorField(vector2);
-  internal->SetNoise(noiseBus->GetTexture());
-
-  if (  !internal->Execute()  )
-    {
-    pgm->ReleaseGraphicsResources();
-
-    pgm->Delete();
-    mgr->Delete();
-    fbo->Delete();
-    vector2->Delete();
-    internal->Delete();
-    pointBus->Delete();
-    noiseBus->Delete();
-    vectorFieldBus->Delete();
-
-    pgm = NULL;
-    mgr = NULL;
-    fbo = NULL;
-    vector2  = NULL;
-    internal = NULL;
-    pointBus = NULL;
-    noiseBus = NULL;
-    vectorFieldBus = NULL;
-
-    noise   = NULL;
-    input   = NULL;
-    inInfo  = NULL;
-    points  = NULL;
-    output  = NULL;
-    outInfo = NULL;
-    noiseInfo   = NULL;
-    vectorField = NULL;
-    outputTexture  = NULL;
-    outInfoTexture = NULL;
-
-    this->LICSuccess = 0;
-    return 0;
-    }
-  this->LICSuccess = 1;
-
-  vtkDataTransferHelper *outputBus=vtkDataTransferHelper::New();
-  outputBus->SetContext(this->Context);
-  outputBus->SetCPUExtent(outputTexture->GetExtent());
-  outputBus->SetGPUExtent(outputTexture->GetExtent());
-  //  outputBus->SetTextureExtent(outputTexture->GetExtent());
-  outputBus->SetTexture(internal->GetLIC());
-  outputBus->SetArray(outputTexture->GetPointData()->GetScalars());
-  outputBus->Download();
-  outputBus->Delete();
-
-  // Pass three. Generate texture coordinates. Software.
-  vtkFloatArray *tcoords=vtkFloatArray::New();
-  tcoords->SetNumberOfComponents(2);
-  tcoords->SetNumberOfTuples(dims[0]*dims[1]*dims[2]);
-  output->GetPointData()->SetTCoords(tcoords);
-  tcoords->Delete();
-
-  double ddim[3];
-  ddim[0]=static_cast<double>(dims[0]-1);
-  ddim[1]=static_cast<double>(dims[1]-1);
-  ddim[2]=static_cast<double>(dims[2]-1);
-
-  int tz=0;
-  while(tz<dims[slice])
-    {
-    int ty=0;
-    while(ty<dims[secondComponent])
-      {
-      int tx=0;
-      while(tx<dims[firstComponent])
-        {
-        tcoords->SetTuple2((tz*dims[secondComponent]+ty)*dims[firstComponent]+tx,tx/ddim[firstComponent],ty/ddim[secondComponent]);
-        ++tx;
-        }
-      ++ty;
-      }
-    ++tz;
-    }
-
-  internal->Delete();
-  noiseBus->Delete();
-  vectorFieldBus->Delete();
-  pointBus->Delete();
-  mgr->Delete();
-  vector2->Delete();
-  fbo->Delete();
-  pgm->ReleaseGraphicsResources();
-  pgm->Delete();
-
-  // Make sure the active texture is back to texture0 for the part of the
-  // pipeline using OpenGL 1.1 (texture on actor)
-  vtkgl::ActiveTexture(vtkgl::TEXTURE0);
-
-  return 1;
-}
-
-//----------------------------------------------------------------------------
-void vtkStructuredGridLIC2D::PrintSelf( ostream & os, vtkIndent indent )
-{
-  this->Superclass::PrintSelf( os, indent );
-
-  os << indent << "Steps: "         << this->Steps          << "\n";
-  os << indent << "StepSize: "      << this->StepSize       << "\n";
-  os << indent << "FBOSuccess: "    << this->FBOSuccess     << "\n";
-  os << indent << "LICSuccess: "    << this->LICSuccess     << "\n";
-  os << indent << "Magnification: " << this->Magnification  << "\n";
-  os << indent << "OpenGLExtensionsSupported: "
-               << this->OpenGLExtensionsSupported << "\n";
-}
diff --git a/Rendering/HybridOpenGL/vtkStructuredGridLIC2D.h b/Rendering/HybridOpenGL/vtkStructuredGridLIC2D.h
deleted file mode 100644
index aaceb82..0000000
--- a/Rendering/HybridOpenGL/vtkStructuredGridLIC2D.h
+++ /dev/null
@@ -1,166 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkStructuredGridLIC2D.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-// .NAME vtkStructuredGridLIC2D
-// .SECTION Description
-// GPU implementation of a Line Integral Convolution, a technique for
-// imaging  vector fields.
-//
-// The input on port 0 is an 2D vtkStructuredGrid. It needs
-// a vector field on point data.
-// Port 1 is a special port for customized noise input. It is an optional port.
-// If not present, noise is generated by the filter. Even if none-power-of-two
-// texture are supported, giving a power-of-two image may result in faster
-// execution on the GPU.
-//
-// Please refer to Forssell, L. K., “Visualizing flow over curvilinear grid
-// surfaces using line integral convolution”, Visualization 94 Conference
-// Proceedings, pages 240-247, IEEE Computer Society, 1994 for details of the
-// algorithm.
-//
-// .SECTION Required OpenGL Extensions
-// GL_ARB_texture_non_power_of_two
-// GL_VERSION_2_0
-// GL_ARB_texture_float
-// GL_ARB_draw_buffers
-// GL_EXT_framebuffer_object
-// GL_ARB_pixel_buffer_object
-#ifndef __vtkStructuredGridLIC2D_h
-#define __vtkStructuredGridLIC2D_h
-
-#include "vtkRenderingHybridOpenGLModule.h" // For export macro
-#include "vtkStructuredGridAlgorithm.h"
-#include "vtkWeakPointer.h" // needed for vtkWeakPointer.
-
-class vtkRenderWindow;
-class vtkImageNoiseSource;
-
-class VTKRENDERINGHYBRIDOPENGL_EXPORT vtkStructuredGridLIC2D
-  : public vtkStructuredGridAlgorithm
-{
-public:
-  static vtkStructuredGridLIC2D* New();
-  vtkTypeMacro(vtkStructuredGridLIC2D, vtkStructuredGridAlgorithm);
-  void PrintSelf(ostream& os, vtkIndent indent);
-
-  // Description:
-  // Get/Set the context. Context must be a vtkOpenGLRenderWindow.
-  // This does not increase the reference count of the
-  // context to avoid reference loops.
-  // SetContext() may raise an error is the OpenGL context does not support the
-  // required OpenGL extensions. Return 0 upon failure and 1 upon success.
-  int SetContext( vtkRenderWindow * context );
-  vtkRenderWindow * GetContext();
-
-  // Description:
-  // Number of steps. Initial value is 1.
-  // class invariant: Steps>0.
-  // In term of visual quality, the greater the better.
-  vtkSetMacro(Steps,int);
-  vtkGetMacro(Steps,int);
-
-  // Description:
-  // Step size.
-  // WE ARE NOT SURE YET about the space where we define the step.
-  // If the image data has different spacing in each dimension, it
-  // is an issue.
-  // Initial value is 1.0.
-  // class invariant: StepSize>0.0.
-  // In term of visual quality, the smaller the better.
-  // The type for the interface is double as VTK interface is double
-  // but GPU only supports float. This value will be converted to
-  // float in the execution of the algorithm.
-  vtkSetMacro(StepSize,double);
-  vtkGetMacro(StepSize,double);
-
-  // Description:
-  // The the magnification factor. Default is 1
-  vtkSetClampMacro(Magnification, int, 1, VTK_INT_MAX);
-  vtkGetMacro(Magnification, int);
-
-  // Description:
-  // Check if the required OpenGL extensions / GPU are supported.
-  vtkGetMacro( OpenGLExtensionsSupported, int );
-
-  // Description:
-  // Check if FBO is started properly.
-  int   GetFBOSuccess() { return this->FBOSuccess; }
-
-  // Description:
-  // Check if LIC runs properly.
-  int   GetLICSuccess() { return this->LICSuccess; }
-
-//BTX
-protected:
-  vtkStructuredGridLIC2D();
-  ~vtkStructuredGridLIC2D();
-
-  // Description:
-  // Fill the input port information objects for this algorithm.  This
-  // is invoked by the first call to GetInputPortInformation for each
-  // port so subclasses can specify what they can handle.
-  // Redefined from the superclass.
-  virtual int FillInputPortInformation(int port,
-                                       vtkInformation *info);
-
-  // Description:
-  // Fill the output port information objects for this algorithm.
-  // This is invoked by the first call to GetOutputPortInformation for
-  // each port so subclasses can specify what they can handle.
-  // Redefined from the superclass.
-  virtual int FillOutputPortInformation(int port,
-                                        vtkInformation *info);
-
-  virtual int RequestInformation(vtkInformation *request,
-         vtkInformationVector **inputVector,
-         vtkInformationVector *outputVector);
-
-  int RequestUpdateExtent (vtkInformation *request,
-                           vtkInformationVector **inputVector,
-                           vtkInformationVector *outputVector);
-
-  // Description:
-  // Stolen from vtkImageAlgorithm. Should be in vtkStructuredGridAlgorithm.
-  void AllocateOutputData(vtkDataObject *output,
-                          vtkInformation *outInfo);
-
-  // Description:
-  // Stolen from vtkImageData. Should be in vtkStructuredGrid.
-  void AllocateScalars(vtkStructuredGrid *sg, vtkInformation *outInfo);
-
-  // Description:
-  // This is called by the superclass.
-  // This is the method you should override.
-  virtual int RequestData(vtkInformation *request,
-                          vtkInformationVector **inputVector,
-                          vtkInformationVector *outputVector);
-
-  int    Steps;
-  double StepSize;
-  int    Magnification;
-  int    OpenGLExtensionsSupported;
-  vtkWeakPointer<vtkRenderWindow> Context;
-
-  vtkImageNoiseSource* NoiseSource;
-  bool   OwnWindow;
-  int    FBOSuccess;
-  int    LICSuccess;
-
-private:
-  vtkStructuredGridLIC2D(const vtkStructuredGridLIC2D&); // Not implemented.
-  void operator=(const vtkStructuredGridLIC2D&); // Not implemented.
-//ETX
-};
-
-#endif
diff --git a/Rendering/HybridOpenGL/vtkStructuredGridLIC2D_fs.glsl b/Rendering/HybridOpenGL/vtkStructuredGridLIC2D_fs.glsl
deleted file mode 100644
index efe2e9f..0000000
--- a/Rendering/HybridOpenGL/vtkStructuredGridLIC2D_fs.glsl
+++ /dev/null
@@ -1,179 +0,0 @@
-//=========================================================================
-//
-//  Program:   Visualization Toolkit
-//  Module:    vtkStructuredGridLIC2D_fs.glsl
-//
-//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-//  All rights reserved.
-//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-//
-//     This software is distributed WITHOUT ANY WARRANTY; without even
-//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-//     PURPOSE.  See the above copyright notice for more information.
-//
-//=========================================================================
-
-// Filename: vtkStructuredGridLIC2D_fs.glsl
-// Filename is useful when using gldb-gui
-
-#version 120 // because of transpose()
-
-
-/*
-For an input structure grid, this computes the inverse jacobian for each point.
-
-Algorithm:
-* PASS ONE
-* * render to compute the transformed vector field for the points.
-* PASS TWO
-* * perform LIC with the new vector field. This has to happen in a different
-*   pass than computation of the transformed vector.
-* PASS THREE
-* * Render structued slice quads with correct texture correct tcoords and apply
-*   the LIC texture to it.
-*/
-
-uniform sampler2D texPoints;  // point coordinates
-uniform sampler2D texVectorField; // vector field.
-uniform vec3 uDimensions;     // structured dimensions; initially == (width, height, 1)
-
-uniform int uSlice; // 0,1,2
-
-ivec3 getIJK(vec3 ninjnk, vec3 dims)
-{
-  return ivec3(floor(ninjnk*(dims-1.0)+vec3(0.5, 0.5, 0.5)));
-}
-
-vec3 getVector(ivec3 ijk, vec3 dims, sampler2D field)
-{
-  // ignoring k component for now since dims =  (width, height, 1)
-  // not any more.
-  vec3 rcoord = vec3(ijk)/max(vec3(1.0), dims-1.0);
-  vec2 tcoord;
-
-  if(uSlice==0)
-   {
-    tcoord.xy=rcoord.yz;
-   }
-  else
-  {
-   if(uSlice==1)
-    {
-     tcoord.xy=rcoord.xz;
-    }
-    else
-    {
-     tcoord.xy=rcoord.xy;
-    }
-  }
-
-  return texture2D(field, tcoord).xyz;
-}
-
-float determinant(mat3 m)
-{
-  // develop determinant along first row.
-return m[0][0]*(m[2][2]*m[1][1] - m[2][1]*m[1][2])
-     - m[1][0]*(m[2][2]*m[0][1] - m[2][1]*m[0][2])
-     + m[2][0]*(m[1][2]*m[0][1] - m[1][1]*m[0][2]);
-}
-
-mat3 inverse(mat3 mm, float det)
-{
-  mat3 m=transpose(mm);
-
-  mat3 adjM = mat3(
-    m[2][2]*m[1][1]-m[2][1]*m[1][2], -(m[2][2]*m[0][1]-m[2][1]*m[0][2]),  m[1][2]*m[0][1]-m[1][1]*m[0][2],
-  -(m[2][2]*m[1][0]-m[2][0]*m[1][2]),  m[2][2]*m[0][0]-m[2][0]*m[0][2], -(m[1][2]*m[0][0]-m[1][0]*m[0][2]),
-    m[2][1]*m[1][0]-m[2][0]*m[1][1], -(m[2][1]*m[0][0]-m[2][0]*m[0][1]),  m[1][1]*m[0][0]-m[1][0]*m[0][1]
-  );
-
-  return adjM/det;
-}
-
-mat3 jacobian(ivec3 ijk, vec3 dims, sampler2D tex)
-{
-  // Jacobian is estimated with a central finite difference technique.
-
-  // get point coordinates at (i, j, k),
-  //  vec3 pts_I_J_K  = getVector(ijk, dims, tex);
-
-  //(i-1, j, k), (i+1, j, k)
-  vec3 pts_IM1_J_K = getVector(ivec3(ijk.x-1, ijk.yz), dims, tex);
-  vec3 pts_I1_J_K = getVector(ivec3(ijk.x+1, ijk.yz), dims, tex);
-
-  //   (i, j-1, k), (i, j+1, k)
-  vec3 pts_I_JM1_K = getVector(ivec3(ijk.x, ijk.y-1, ijk.z), dims, tex);
-  vec3 pts_I_J1_K = getVector(ivec3(ijk.x, ijk.y+1, ijk.z), dims, tex);
-
-  // (i, j, k-1), (i, j, k+1).
-  vec3 pts_I_J_KM1 = getVector(ivec3(ijk.xy, ijk.z-1), dims, tex);
-  vec3 pts_I_J_K1 = getVector(ivec3(ijk.xy, ijk.z+1), dims, tex);
-
-  vec3 col1 = 0.5*(pts_I1_J_K - pts_IM1_J_K);
-  vec3 col2 = 0.5*(pts_I_J1_K - pts_I_JM1_K);
-  vec3 col3 = 0.5*(pts_I_J_K1 - pts_I_J_KM1);
-
- if(uSlice==0)
-  {
-    col1[0]=1.0;
-  }
- else
-  {
-     if(uSlice==1)
-      {
-      col2[1]=1.0;
-      }
-     else
-      {
-      col3[2]=1.0;
-      }
-  }
-
-  /*
-  Jacobian is given by
-  | dx/di, dx/dj, dx/dk |
-  | dy/di, dy/dj, dy/dk |
-  | dz/di, dz/dj, dz/dk |
-     where  d == partial derivative
-  */
-
-  mat3 J = mat3(col1, col2, col3);
-  return J;
-}
-
-void main(void)
-{
-  // determine the structured coordinate for the current location.
-  vec3 tcoord;
-  if(uSlice==0)
-  {
-   tcoord=vec3(0,gl_TexCoord[0].st);
-  }
-  else
-  {
-   if(uSlice==1)
-    {
-     tcoord=vec3(gl_TexCoord[0].s,0,gl_TexCoord[0].t);
-    }
-   else
-    {
-     tcoord=vec3(gl_TexCoord[0].st, 0);
-    }
-  }
-
-
-  ivec3 ijk = getIJK(tcoord, uDimensions);
-
-  // compute partial derivative for X.
-  mat3 J = jacobian(ijk, uDimensions, texPoints);
-
-  // compute inverse of J.
-  vec3 vector = getVector(ijk, uDimensions, texVectorField);
-  float detJ=determinant(J);
-  mat3 invJ = inverse(J,detJ);
-  gl_FragData[0] = vec4(invJ*vector, 1.0);
-//gl_FragData[0] = vec4(vector, 1.0);
-//    gl_FragData[0] = vec4(detJ);
-//      gl_FragData[0] = vec4(J[2],1.0);
-}
diff --git a/Rendering/HybridOpenGL/vtkSurfaceLICDefaultPainter.cxx b/Rendering/HybridOpenGL/vtkSurfaceLICDefaultPainter.cxx
deleted file mode 100644
index 4602253..0000000
--- a/Rendering/HybridOpenGL/vtkSurfaceLICDefaultPainter.cxx
+++ /dev/null
@@ -1,60 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkSurfaceLICDefaultPainter.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#include "vtkSurfaceLICDefaultPainter.h"
-
-#include "vtkGarbageCollector.h"
-#include "vtkSurfaceLICPainter.h"
-#include "vtkObjectFactory.h"
-#include "vtkClipPlanesPainter.h"
-
-vtkStandardNewMacro(vtkSurfaceLICDefaultPainter);
-vtkCxxSetObjectMacro(vtkSurfaceLICDefaultPainter, SurfaceLICPainter, vtkSurfaceLICPainter);
-//----------------------------------------------------------------------------
-vtkSurfaceLICDefaultPainter::vtkSurfaceLICDefaultPainter()
-{
-  this->SurfaceLICPainter = vtkSurfaceLICPainter::New();
-}
-
-//----------------------------------------------------------------------------
-vtkSurfaceLICDefaultPainter::~vtkSurfaceLICDefaultPainter()
-{
-  this->SetSurfaceLICPainter(0);
-}
-
-//----------------------------------------------------------------------------
-void vtkSurfaceLICDefaultPainter::BuildPainterChain()
-{
-  this->Superclass::BuildPainterChain();
-
-  // Now insert the SurfaceLICPainter before the display-list painter.
-  vtkPainter* prevPainter = this->GetClipPlanesPainter();
-  this->SurfaceLICPainter->SetDelegatePainter(prevPainter->GetDelegatePainter());
-  prevPainter->SetDelegatePainter(this->SurfaceLICPainter);
-}
-
-//----------------------------------------------------------------------------
-void vtkSurfaceLICDefaultPainter::ReportReferences(vtkGarbageCollector *collector)
-{
-  this->Superclass::ReportReferences(collector);
-  vtkGarbageCollectorReport(collector, this->SurfaceLICPainter,
-    "SurfaceLICPainter");
-}
-
-//----------------------------------------------------------------------------
-void vtkSurfaceLICDefaultPainter::PrintSelf(ostream& os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os, indent);
-  os << indent << "SurfaceLICPainter: " << this->SurfaceLICPainter << endl;
-}
diff --git a/Rendering/HybridOpenGL/vtkSurfaceLICDefaultPainter.h b/Rendering/HybridOpenGL/vtkSurfaceLICDefaultPainter.h
deleted file mode 100644
index 0aa3c1b..0000000
--- a/Rendering/HybridOpenGL/vtkSurfaceLICDefaultPainter.h
+++ /dev/null
@@ -1,68 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkSurfaceLICDefaultPainter.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-// .NAME vtkSurfaceLICDefaultPainter - vtkDefaultPainter replacement that
-//  inserts the vtkSurfaceLICPainter at the correct position in the painter
-//  chain.
-//
-// .SECTION Description
-//  vtkSurfaceLICDefaultPainter is a vtkDefaultPainter replacement
-//  that inserts the vtkSurfaceLICPainter at the correct position in the painter
-//  chain.
-//
-// .SECTION See Also
-//  vtkDefaultPainter vtkSurfaceLICPainter
-
-#ifndef __vtkSurfaceLICDefaultPainter_h
-#define __vtkSurfaceLICDefaultPainter_h
-
-#include "vtkRenderingHybridOpenGLModule.h" // For export macro
-#include "vtkDefaultPainter.h"
-
-class vtkSurfaceLICPainter;
-
-class VTKRENDERINGHYBRIDOPENGL_EXPORT vtkSurfaceLICDefaultPainter
-  : public vtkDefaultPainter
-{
-public:
-  static vtkSurfaceLICDefaultPainter* New();
-  vtkTypeMacro(vtkSurfaceLICDefaultPainter, vtkDefaultPainter);
-  void PrintSelf(ostream& os, vtkIndent indent);
-
-  // Description:
-  // Get/Set the Surface LIC painter.
-  void SetSurfaceLICPainter(vtkSurfaceLICPainter*);
-  vtkGetObjectMacro(SurfaceLICPainter, vtkSurfaceLICPainter);
-
-//BTX
-protected:
-  vtkSurfaceLICDefaultPainter();
-  ~vtkSurfaceLICDefaultPainter();
-
-  // Description:
-  // Setups the the painter chain.
-  virtual void BuildPainterChain();
-
-  // Description:
-  // Take part in garbage collection.
-  virtual void ReportReferences(vtkGarbageCollector *collector);
-
-  vtkSurfaceLICPainter* SurfaceLICPainter;
-private:
-  vtkSurfaceLICDefaultPainter(const vtkSurfaceLICDefaultPainter&); // Not implemented.
-  void operator=(const vtkSurfaceLICDefaultPainter&); // Not implemented.
-//ETX
-};
-
-#endif
diff --git a/Rendering/HybridOpenGL/vtkSurfaceLICPainter.cxx b/Rendering/HybridOpenGL/vtkSurfaceLICPainter.cxx
deleted file mode 100644
index fbda2f0..0000000
--- a/Rendering/HybridOpenGL/vtkSurfaceLICPainter.cxx
+++ /dev/null
@@ -1,937 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkSurfaceLICPainter.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#include "vtkSurfaceLICPainter.h"
-
-#include "vtkBase64Utilities.h"
-#include "vtkBoundingBox.h"
-#include "vtkCellData.h"
-#include "vtkColorMaterialHelper.h"
-#include "vtkCompositeDataIterator.h"
-#include "vtkCompositeDataSet.h"
-#include "vtkDataTransferHelper.h"
-#include "vtkFrameBufferObject.h"
-#include "vtkGarbageCollector.h"
-#include "vtkGenericDataObjectReader.h"
-#include "vtkImageData.h"
-#include "vtkLightingHelper.h"
-#include "vtkLineIntegralConvolution2D.h"
-#include "vtkMatrix4x4.h"
-#include "vtkMath.h"
-#include "vtkNoise200x200.h"
-#include "vtkObjectFactory.h"
-#include "vtkOpenGLRenderWindow.h"
-#include "vtkPointData.h"
-#include "vtkPolyData.h"
-#include "vtkProperty.h"
-#include "vtkRenderer.h"
-#include "vtkShader2Collection.h"
-#include "vtkShader2.h"
-#include "vtkShaderProgram2.h"
-#include "vtkSmartPointer.h"
-#include "vtkTextureObject.h"
-#include "vtkTimerLog.h"
-#include "vtkUniformVariables.h"
-#include "vtkWeakPointer.h"
-
-#include <assert.h>
-#include "vtkgl.h"
-#include <string>
-
-#define vtkGetIndex(r,c)    (c*4+r)
-extern const char* vtkSurfaceLICPainter_fs1;
-extern const char* vtkSurfaceLICPainter_vs1;
-extern const char* vtkSurfaceLICPainter_fs2;
-
-inline double vtkClamp(double val, const double& min, const double& max)
-{
-  val = (val < min)? min : val;
-  val = (val > max)? max : val;
-  return val;
-}
-
-class vtkSurfaceLICPainter::vtkInternals
-{
-public:
-  vtkWeakPointer<vtkOpenGLRenderWindow> LastRenderWindow;
-  int LastViewportSize[2];
-
-  // Extent relative to the viewport origin.
-  unsigned int ViewportExtent[4];
-
-  vtkSmartPointer<vtkFrameBufferObject> FBO;
-  vtkSmartPointer<vtkTextureObject> VelocityImage;
-  vtkSmartPointer<vtkTextureObject> GeometryImage;
-  vtkSmartPointer<vtkTextureObject> NoiseImage;
-  vtkSmartPointer<vtkShaderProgram2> PassOne;
-  vtkSmartPointer<vtkShaderProgram2> PassTwo;
-  vtkSmartPointer<vtkLightingHelper> LightingHelper;
-  vtkSmartPointer<vtkColorMaterialHelper> ColorMaterialHelper;
-  vtkSmartPointer<vtkImageData> Noise;
-
-  int FieldAssociation;
-  int FieldAttributeType;
-  std::string FieldName;
-  bool FieldNameSet;
-
-  // Some internal flags.
-  bool HasVectors;
-
-  vtkInternals()
-    {
-    this->LastViewportSize[0] = this->LastViewportSize[1] = 0;
-    this->HasVectors = false;
-    this->FieldNameSet = false;
-    this->FieldAttributeType = 0;
-    this->FieldAssociation = 0;
-    this->LightingHelper = vtkSmartPointer<vtkLightingHelper>::New();
-    this->ColorMaterialHelper = vtkSmartPointer<vtkColorMaterialHelper>::New();
-    }
-
-  void ClearTextures()
-    {
-    this->VelocityImage = 0;
-    this->GeometryImage = 0;
-    this->NoiseImage = 0;
-    if (this->FBO)
-      {
-      this->FBO->RemoveAllColorBuffers();
-      }
-    }
-
-  void ClearGraphicsResources()
-    {
-    this->ClearTextures();
-    this->FBO = 0;
-    this->VelocityImage = 0;
-    this->GeometryImage = 0;
-    this->NoiseImage = 0;
-    if(this->PassOne!=0)
-      {
-      this->PassOne->ReleaseGraphicsResources();
-      this->PassOne = 0;
-      }
-    if(this->PassTwo!=0)
-      {
-      this->PassTwo->ReleaseGraphicsResources();
-      this->PassTwo = 0;
-      }
-    this->LightingHelper->Initialize(0,VTK_SHADER_TYPE_VERTEX);
-    this->ColorMaterialHelper->Initialize(0);
-    }
-};
-
-vtkStandardNewMacro(vtkSurfaceLICPainter);
-//----------------------------------------------------------------------------
-vtkSurfaceLICPainter::vtkSurfaceLICPainter()
-{
-  this->Internals     = new vtkInternals();
-  this->Output        = 0;
-  this->Enable        = 1;
-  this->StepSize      = 1;
-  this->EnhancedLIC   = 1;
-  this->LICIntensity  = 0.8;
-  this->NumberOfSteps = 20;
-  this->LICSuccess    = 0;
-  this->RenderingPreparationSuccess = 0;
-
-  this->SetInputArrayToProcess(vtkDataObject::FIELD_ASSOCIATION_POINTS_THEN_CELLS,
-    vtkDataSetAttributes::VECTORS);
-}
-
-//----------------------------------------------------------------------------
-vtkSurfaceLICPainter::~vtkSurfaceLICPainter()
-{
-  this->ReleaseGraphicsResources(this->Internals->LastRenderWindow);
-  delete this->Internals;
-
-  if (this->Output)
-    {
-    this->Output->Delete();
-    this->Output = 0;
-    }
-}
-
-//----------------------------------------------------------------------------
-void vtkSurfaceLICPainter::SetInputArrayToProcess(int fieldAssociation,
-  const char* name)
-{
-  if (this->Internals->FieldAssociation != fieldAssociation ||
-    !this->Internals->FieldNameSet ||
-    this->Internals->FieldName != name)
-    {
-    this->Internals->FieldAssociation = fieldAssociation;
-    this->Internals->FieldName = name;
-    this->Internals->FieldNameSet = true;
-    this->Modified();
-    }
-}
-
-//----------------------------------------------------------------------------
-void vtkSurfaceLICPainter::SetInputArrayToProcess(int fieldAssociation,
-  int fieldAttributeType)
-{
-  if (this->Internals->FieldAssociation != fieldAssociation ||
-    this->Internals->FieldNameSet ||
-    this->Internals->FieldAttributeType != fieldAttributeType)
-    {
-    this->Internals->FieldAssociation = fieldAssociation;
-    this->Internals->FieldNameSet = false;
-    this->Internals->FieldAttributeType = fieldAttributeType;
-    this->Modified();
-    }
-}
-
-//----------------------------------------------------------------------------
-void vtkSurfaceLICPainter::ReleaseGraphicsResources(vtkWindow* win)
-{
-  this->Internals->ClearGraphicsResources();
-  this->Internals->LastRenderWindow = 0;
-
-  this->Superclass::ReleaseGraphicsResources(win);
-}
-
-static vtkImageData* vtkGetNoiseResource()
-{
-  std::string base64string;
-  for (unsigned int cc=0; cc < file_noise200x200_vtk_nb_sections; cc++)
-    {
-      base64string += reinterpret_cast<const char*>(file_noise200x200_vtk_sections[cc]);
-    }
-
-  unsigned char* binaryInput = new unsigned char[file_noise200x200_vtk_decoded_length + 10];
-  unsigned long binarylength = vtkBase64Utilities::Decode(
-    reinterpret_cast<const unsigned char*>(base64string.c_str()), static_cast<unsigned long>(base64string.length()),
-    binaryInput);
-  assert("check valid_length" && binarylength == file_noise200x200_vtk_decoded_length);
-
-  vtkGenericDataObjectReader* reader = vtkGenericDataObjectReader::New();
-  reader->ReadFromInputStringOn();
-  reader->SetBinaryInputString(reinterpret_cast<char*>(binaryInput), static_cast<int>(binarylength));
-  reader->Update();
-
-  vtkImageData* data = vtkImageData::New();
-  data->ShallowCopy(reader->GetOutput());
-
-  delete [] binaryInput;
-  reader->Delete();
-  return data;
-}
-
-//----------------------------------------------------------------------------
-bool vtkSurfaceLICPainter::CanRenderLIC
-   ( vtkRenderer * vtkNotUsed(renderer), vtkActor * actor )
-{
-  return ( this->Enable && this->Internals->HasVectors &&
-           actor->GetProperty()->GetRepresentation() == VTK_SURFACE );
-}
-
-//----------------------------------------------------------------------------
-bool vtkSurfaceLICPainter::IsSupported( vtkRenderWindow * renWin )
-{
-  return (  vtkDataTransferHelper::IsSupported( renWin ) &&
-            vtkLineIntegralConvolution2D::IsSupported( renWin )  );
-}
-
-//----------------------------------------------------------------------------
-void vtkSurfaceLICPainter::PrepareForRendering
-   ( vtkRenderer * renderer, vtkActor * actor )
-{
-  if ( !this->PrepareOutput() )
-    {
-    this->RenderingPreparationSuccess = 0;
-    return;
-    }
-
-  if (  !this->CanRenderLIC( renderer, actor )  )
-    {
-    this->ReleaseGraphicsResources( this->Internals->LastRenderWindow );
-    this->Superclass::PrepareForRendering( renderer, actor );
-    this->RenderingPreparationSuccess = 0;
-    return;
-    }
-
-  vtkOpenGLRenderWindow * renWin = vtkOpenGLRenderWindow::SafeDownCast
-                                   ( renderer->GetRenderWindow() );
-
-  if (  !this->IsSupported( renWin )  )
-    {
-    this->RenderingPreparationSuccess = 0;
-    renWin = NULL;
-    return;
-    }
-
-  if ( !this->Internals->Noise )
-    {
-    vtkImageData * noise = ::vtkGetNoiseResource();
-    this->Internals->Noise = noise;
-    noise->Delete();
-    noise = NULL;
-    }
-
-  if ( this->Internals->LastRenderWindow &&
-       this->Internals->LastRenderWindow != renWin )
-    {
-    // Cleanup all graphics resources associated with the old render window.
-    this->ReleaseGraphicsResources( this->Internals->LastRenderWindow );
-    }
-
-  this->Internals->LastRenderWindow = renWin;
-
-  // we get the view port size (not the renderwindow size).
-  int viewsize[2], vieworigin[2];
-  renderer->GetTiledSizeAndOrigin( &viewsize[0],   &viewsize[1],
-                                   &vieworigin[0], &vieworigin[1] );
-
-  if ( this->Internals->LastViewportSize[0] != viewsize[0] ||
-       this->Internals->LastViewportSize[1] != viewsize[1] )
-    {
-    // View size has changed, we need to re-generate the textures.
-    this->Internals->ClearTextures();
-    }
-
-  this->Internals->LastViewportSize[0] = viewsize[0];
-  this->Internals->LastViewportSize[1] = viewsize[1];
-
-  if ( !this->Internals->FBO )
-    {
-    vtkFrameBufferObject * fbo = vtkFrameBufferObject::New();
-    fbo->SetContext( renWin );
-    fbo->SetNumberOfRenderTargets( 2 );
-    unsigned int activeTargets[]=  { 0, 1 };
-    fbo->SetActiveBuffers( 2, activeTargets );
-    this->Internals->FBO = fbo;
-    fbo->Delete();
-    fbo = NULL;
-    }
-
-  if ( !this->Internals->GeometryImage )
-    {
-    vtkTextureObject * geometryImage = vtkTextureObject::New();
-    geometryImage->SetContext( renWin );
-    geometryImage->Create2D( viewsize[0], viewsize[1], 4, VTK_FLOAT, false );
-    this->Internals->GeometryImage = geometryImage;
-    geometryImage->Delete();
-    geometryImage = NULL;
-    }
-  this->Internals->FBO->SetColorBuffer( 0, this->Internals->GeometryImage );
-
-  if ( !this->Internals->VelocityImage )
-    {
-    vtkTextureObject * velocityImage = vtkTextureObject::New();
-    velocityImage->SetContext( renWin );
-    velocityImage->Create2D( viewsize[0], viewsize[1], 4, VTK_FLOAT, false );
-                  // (r,g) == surface vector in image space
-                  // (b) == depth.
-                  // a == unused.
-    this->Internals->VelocityImage = velocityImage;
-    velocityImage->Delete();
-    velocityImage = NULL;
-    }
-  this->Internals->FBO->SetColorBuffer( 1, this->Internals->VelocityImage );
-
-  if ( !this->Internals->PassOne )
-    {
-    vtkShaderProgram2 * pgmPass1 = vtkShaderProgram2::New();
-    pgmPass1->SetContext( renWin );
-
-    vtkShader2 * s1 = vtkShader2::New();
-    s1->SetSourceCode( vtkSurfaceLICPainter_vs1 );
-    s1->SetType( VTK_SHADER_TYPE_VERTEX );
-    s1->SetContext( pgmPass1->GetContext() );
-
-    vtkShader2 * s2 = vtkShader2::New();
-    s2->SetSourceCode( vtkSurfaceLICPainter_fs1 );
-    s2->SetType( VTK_SHADER_TYPE_FRAGMENT );
-    s2->SetContext( pgmPass1->GetContext() );
-
-    pgmPass1->GetShaders()->AddItem( s1 );
-    pgmPass1->GetShaders()->AddItem( s2 );
-    s1->Delete();
-    s2->Delete();
-    s1 = NULL;
-    s2 = NULL;
-
-    this->Internals->LightingHelper->Initialize
-                                     ( pgmPass1, VTK_SHADER_TYPE_VERTEX );
-    this->Internals->ColorMaterialHelper->Initialize( pgmPass1 );
-    this->Internals->PassOne = pgmPass1;
-    pgmPass1->Delete();
-    pgmPass1 = NULL;
-    }
-
-  if ( !this->Internals->NoiseImage )
-    {
-    vtkDataTransferHelper * noiseBus=vtkDataTransferHelper::New();
-    noiseBus->SetContext( renWin );
-    noiseBus->SetCPUExtent( this->Internals->Noise->GetExtent() );
-    noiseBus->SetGPUExtent( this->Internals->Noise->GetExtent() );
-    noiseBus->SetTextureExtent( this->Internals->Noise->GetExtent() );
-    noiseBus->SetArray( this->Internals->Noise->GetPointData()->GetScalars() );
-    noiseBus->Upload( 0, 0 );
-    this->Internals->NoiseImage = noiseBus->GetTexture();
-    noiseBus->Delete();
-    noiseBus = NULL;
-
-    vtkTextureObject * tex = this->Internals->NoiseImage;
-    tex->Bind();
-    glTexParameteri( tex->GetTarget(), GL_TEXTURE_WRAP_S,     GL_CLAMP   );
-    glTexParameteri( tex->GetTarget(), GL_TEXTURE_WRAP_T,     GL_CLAMP   );
-    glTexParameteri( tex->GetTarget(), vtkgl::TEXTURE_WRAP_R, GL_CLAMP   );
-    glTexParameteri( tex->GetTarget(), GL_TEXTURE_MIN_FILTER, GL_NEAREST );
-    glTexParameteri( tex->GetTarget(), GL_TEXTURE_MAG_FILTER, GL_NEAREST );
-    tex->UnBind();
-    tex = NULL;
-    }
-
-  if ( !this->Internals->PassTwo )
-    {
-    vtkShaderProgram2 * pgmPass2 = vtkShaderProgram2::New();
-    pgmPass2->SetContext( renWin );
-
-    vtkShader2 * s3 = vtkShader2::New();
-    s3->SetSourceCode( vtkSurfaceLICPainter_fs2 );
-    s3->SetType( VTK_SHADER_TYPE_FRAGMENT );
-    s3->SetContext( pgmPass2->GetContext() );
-    pgmPass2->GetShaders()->AddItem( s3 );
-    s3->Delete();
-    s3 = NULL;
-
-    this->Internals->PassTwo = pgmPass2;
-    pgmPass2->Delete();
-    pgmPass2 = NULL;
-    }
-
-  // Now compute the bounds of the pixels that this dataset is going to occupy
-  // on the screen.
-
-  double bounds[6];
-  this->GetBounds(this->GetInput(), bounds);
-  double worldPoints[8][4];
-  worldPoints[0][0] = bounds[0];
-  worldPoints[0][1] = bounds[2];
-  worldPoints[0][2] = bounds[4];
-  worldPoints[0][3] = 0;
-
-  worldPoints[1][0] = bounds[1];
-  worldPoints[1][1] = bounds[2];
-  worldPoints[1][2] = bounds[4];
-  worldPoints[1][3] = 0;
-
-  worldPoints[2][0] = bounds[1];
-  worldPoints[2][1] = bounds[3];
-  worldPoints[2][2] = bounds[4];
-  worldPoints[2][3] = 0;
-
-  worldPoints[3][0] = bounds[0];
-  worldPoints[3][1] = bounds[3];
-  worldPoints[3][2] = bounds[4];
-  worldPoints[3][3] = 0;
-
-  worldPoints[4][0] = bounds[0];
-  worldPoints[4][1] = bounds[2];
-  worldPoints[4][2] = bounds[5];
-  worldPoints[4][3] = 0;
-
-  worldPoints[5][0] = bounds[1];
-  worldPoints[5][1] = bounds[2];
-  worldPoints[5][2] = bounds[5];
-  worldPoints[4][3] = 0;
-
-  worldPoints[6][0] = bounds[1];
-  worldPoints[6][1] = bounds[3];
-  worldPoints[6][2] = bounds[5];
-  worldPoints[6][3] = 0;
-
-  worldPoints[7][0] = bounds[0];
-  worldPoints[7][1] = bounds[3];
-  worldPoints[7][2] = bounds[5];
-  worldPoints[7][3] = 0;
-
-  // We need to use matrices provided by OpenGL since renderers such as
-  // vtkIceTRenderer change the matrices on the fly without updating the vtkCamera
-  // tranforms.
-  GLdouble projection[16];
-  GLdouble modelview[16];
-  GLdouble transform[16];
-  glGetDoublev( GL_PROJECTION_MATRIX, projection );
-  glGetDoublev( GL_MODELVIEW_MATRIX,  modelview  );
-  for ( int c = 0; c < 4; c ++ )
-    {
-    for ( int r = 0; r < 4; r ++ )
-      {
-      transform[ c * 4 + r ] =
-          projection[ vtkGetIndex( r, 0 ) ] * modelview[ vtkGetIndex( 0, c ) ]
-        + projection[ vtkGetIndex( r, 1 ) ] * modelview[ vtkGetIndex( 1, c ) ]
-        + projection[ vtkGetIndex( r, 2 ) ] * modelview[ vtkGetIndex( 2, c ) ]
-        + projection[ vtkGetIndex( r, 3 ) ] * modelview[ vtkGetIndex( 3, c ) ];
-      }
-    }
-
-  vtkBoundingBox box;
-  for (int kk = 0; kk < 8; kk ++ )
-    {
-    double x = worldPoints[kk][0];
-    double y = worldPoints[kk][1];
-    double z = worldPoints[kk][2];
-    double view[4];
-    view[0] = x * transform[vtkGetIndex(0,0)] + y * transform[vtkGetIndex(0,1)] +
-              z * transform[vtkGetIndex(0,2)] + transform[vtkGetIndex(0,3)];
-    view[1] = x * transform[vtkGetIndex(1,0)] + y * transform[vtkGetIndex(1,1)] +
-              z * transform[vtkGetIndex(1,2)] + transform[vtkGetIndex(1,3)];
-    view[2] = x * transform[vtkGetIndex(2,0)] + y * transform[vtkGetIndex(2,1)] +
-              z * transform[vtkGetIndex(2,2)] + transform[vtkGetIndex(2,3)];
-    view[3] = x * transform[vtkGetIndex(3,0)] + y * transform[vtkGetIndex(3,1)] +
-              z * transform[vtkGetIndex(3,2)] + transform[vtkGetIndex(3,3)];
-
-    if (view[3] != 0.0)
-      {
-      view[0] = view[0]/view[3];
-      view[1] = view[1]/view[3];
-      view[2] = view[2]/view[3];
-      }
-    double displayPt[2];
-    displayPt[0] = ( view[0] + 1.0 ) * viewsize[0] / 2.0/* + vieworigin[0]*/;
-    displayPt[1] = ( view[1] + 1.0 ) * viewsize[1] / 2.0/* + vieworigin[1]*/;
-    box.AddPoint(
-      vtkClamp( displayPt[0]/*-vieworigin[0]*/, 0.0, viewsize[0] - 1.0 ),
-      vtkClamp( displayPt[1]/*-vieworigin[1]*/, 0.0, viewsize[1] - 1.0 ), 0.0 );
-    }
-
-  this->Internals->ViewportExtent[0] =
-        static_cast<unsigned int>( box.GetMinPoint()[0] );
-  this->Internals->ViewportExtent[1] =
-        static_cast<unsigned int>( box.GetMaxPoint()[0] );
-  this->Internals->ViewportExtent[2] =
-        static_cast<unsigned int>( box.GetMinPoint()[1] );
-  this->Internals->ViewportExtent[3] =
-        static_cast<unsigned int>( box.GetMaxPoint()[1] );
-
-  vtkDebugMacro( << "ViewportExtent: " << this->Internals->ViewportExtent[0]
-                 << ", " << this->Internals->ViewportExtent[1]
-                 << ", " << this->Internals->ViewportExtent[2]
-                 << ", " << this->Internals->ViewportExtent[3] << endl );
-
-  this->Superclass::PrepareForRendering( renderer, actor );
-
-  this->RenderingPreparationSuccess = 1;
-}
-
-//----------------------------------------------------------------------------
-void vtkSurfaceLICPainter::RenderInternal
-   ( vtkRenderer * renderer,  vtkActor * actor,
-     unsigned long typeflags, bool forceCompileOnly )
-{
-  if (  !this->RenderingPreparationSuccess  ||
-        !this->CanRenderLIC( renderer, actor )  )
-    {
-    this->Superclass::RenderInternal
-        ( renderer, actor, typeflags, forceCompileOnly );
-    return;
-    }
-
-  vtkTimerLog * timer = vtkTimerLog::New();
-  timer->StartTimer();
-
-  // Save context state to be able to restore.
-  glPushAttrib(GL_ALL_ATTRIB_BITS);
-  // save model-view/projection matrices.
-  glMatrixMode(GL_PROJECTION);
-  glPushMatrix();
-  glMatrixMode(GL_MODELVIEW);
-  glPushMatrix();
-
-  // TODO: eventually we'll add code to generate the LIC only if the camera
-  // position has changed or the input dataset has changed. Currently, we always
-  // rebuild the LIC.
-
-  // * PASS ONE
-  //   * Render geometry
-  //   * Outputs:
-  //      - shaded geometry rendering -- used to combine with the final LIC
-  //        image.
-  //      - "velocity image"
-  //      - "depth mask" - when putting pixes back into the original scene, we
-  //        need to ensure that the depth values match the original rendering.
-  //      - model-view and projection matrices.
-  // * PASS THREE to N
-  //   * Render Quad covering the image-space bounds of the rendered geometry
-  //     and perform LIC
-  // * PASS (N+1)
-  //   * Combine shaded geometry rendering with LIC image and put it back into
-  //   the actual render window.
-
-  vtkOpenGLRenderWindow * renWin = vtkOpenGLRenderWindow::SafeDownCast
-                                   ( renderer->GetRenderWindow() );
-
-  // we get the view port size (not the renderwindow size).
-  int viewsize[2], vieworigin[2];
-  renderer->GetTiledSizeAndOrigin(&viewsize[0], &viewsize[1], &vieworigin[0], &vieworigin[1]);
-
-  glViewport(0, 0, viewsize[0], viewsize[1]);
-  // Set clear color to black in case user has set some background color.
-  glClearColor(0.0, 0.0, 0.0, 0.0);
-
-  // Set scissor to work with on the area covered by the data.
-  glEnable(GL_SCISSOR_TEST);
-  glScissor(this->Internals->ViewportExtent[0],
-    this->Internals->ViewportExtent[2],
-    this->Internals->ViewportExtent[1]-this->Internals->ViewportExtent[0]+1,
-    this->Internals->ViewportExtent[3]-this->Internals->ViewportExtent[2]+1);
-
-  if (  !this->Internals->FBO
-             ->StartNonOrtho( viewsize[0], viewsize[1], false )  )
-    {
-    timer->Delete();
-    timer  = NULL;
-    renWin = NULL;
-    this->LICSuccess = 0;
-    return;
-    }
-
-  glClear(GL_DEPTH_BUFFER_BIT|GL_COLOR_BUFFER_BIT);
-
-  this->Internals->ColorMaterialHelper->PrepareForRendering();
-  this->Internals->LightingHelper->PrepareForRendering();
-
-  this->Internals->PassOne->Build();
-  if(this->Internals->PassOne->GetLastBuildStatus()!=
-     VTK_SHADER_PROGRAM2_LINK_SUCCEEDED)
-    {
-    vtkErrorMacro("Pass One failed.");
-    abort();
-    }
-  this->Internals->PassOne->Use();
-  if(!this->Internals->PassOne->IsValid())
-    {
-    vtkErrorMacro(<<" validation of the program failed: "<<this->Internals->PassOne->GetLastValidateLog());
-    }
-
-  this->Internals->ColorMaterialHelper->Render();
-
-  this->Superclass::RenderInternal(renderer, actor, typeflags,
-                                   forceCompileOnly);
-  glFinish();
-  this->Internals->PassOne->Restore();
-  this->Internals->FBO->UnBind();
-
-  renWin->MakeCurrent();
-
-  unsigned int licSize[2] = {
-    this->Internals->ViewportExtent[1]-this->Internals->ViewportExtent[0]+1,
-    this->Internals->ViewportExtent[3]-this->Internals->ViewportExtent[2]+1 };
-
-  // vtkLineIntegralConvolution2D needs step size in normalized image space, so we
-  // convert this->StepSize to normalized space.
-  // (assuming 1 pixel is a unit square):
-  double stepsize = this->StepSize * sqrt(2.0) /
-                    sqrt(  static_cast< double > ( licSize[0] * licSize[0] +
-                                                   licSize[1] * licSize[1]
-                                                 )
-                        );
-  vtkLineIntegralConvolution2D * licer = vtkLineIntegralConvolution2D::New();
-  if (  !licer->IsSupported( renWin )  )
-    {
-    licer->Delete();
-    timer->Delete();
-    licer  = NULL;
-    timer  = NULL;
-    renWin = NULL;
-    this->LICSuccess = 0;
-    return;
-    }
-
-  licer->SetNumberOfSteps( this->NumberOfSteps );
-  licer->SetLICStepSize( stepsize );
-  licer->SetEnhancedLIC( this->EnhancedLIC );
-  licer->SetLICForSurface( 1 );
-  licer->SetNoise( this->Internals->NoiseImage );
-  licer->SetVectorField( this->Internals->VelocityImage );
-  licer->SetComponentIds( 0, 1 );
-  if (  !licer->Execute( this->Internals->ViewportExtent )  )
-    {
-    licer->Delete();
-    timer->Delete();
-    licer  = NULL;
-    timer  = NULL;
-    renWin = NULL;
-    this->LICSuccess = 0;
-    return;
-    }
-  this->LICSuccess = 1;
-
-  vtkSmartPointer<vtkTextureObject> lic = licer->GetLIC();
-  licer->Delete();
-
-  glFinish();
-
-  // * Now render lic on-to the scene with
-  renWin->MakeCurrent();
-
-  this->Internals->PassTwo->Build();
-  if(this->Internals->PassTwo->GetLastBuildStatus()!=
-     VTK_SHADER_PROGRAM2_LINK_SUCCEEDED)
-    {
-    vtkErrorMacro("Pass Two failed.");
-    abort();
-    }
-  this->Internals->PassTwo->Use();
-
-  vtkgl::ActiveTexture(vtkgl::TEXTURE0);
-  lic->Bind();
-  int value=0;
-  this->Internals->PassTwo->GetUniformVariables()->SetUniformi("texLIC",1,&value);
-
-  vtkgl::ActiveTexture(vtkgl::TEXTURE1);
-  this->Internals->GeometryImage->Bind();
-
-  value=1;
-  this->Internals->PassTwo->GetUniformVariables()->SetUniformi("texGeometry",1,&value);
-  vtkgl::ActiveTexture(vtkgl::TEXTURE2);
-  this->Internals->VelocityImage->Bind();
-
-  value=2;
-  this->Internals->PassTwo->GetUniformVariables()->SetUniformi("texDepth",1,&value);
-
-  float fvalue=static_cast<float>(this->LICIntensity);
-  this->Internals->PassTwo->GetUniformVariables()->SetUniformf("uLICIntensity",1,&fvalue);
-
-  // vtkLineIntegralConvolution2D changed the matrices to be orthogonal to the
-  // extents we provided. Now we want the view to be orthogonal to the full
-  // viewport.
-  glMatrixMode(GL_PROJECTION);
-  glLoadIdentity();
-  glOrtho(0.0, viewsize[0], 0.0, viewsize[1], -1, 1);
-  glMatrixMode(GL_MODELVIEW);
-  glLoadIdentity();
-  glViewport(vieworigin[0], vieworigin[1], viewsize[0], viewsize[1]);
-  glScissor(vieworigin[0], vieworigin[1], viewsize[0], viewsize[1]);
-
-  // vtkFrameBufferObject disables depth-test, we need to enable it.
-  glEnable(GL_DEPTH_TEST);
-  glEnable(GL_SCISSOR_TEST);
-
-  this->Internals->PassTwo->Use();
-  if(!this->Internals->PassTwo->IsValid())
-    {
-    vtkErrorMacro(<<" validation of the program failed: "<<this->Internals->PassTwo->GetLastValidateLog());
-    }
-
-  glBegin(GL_QUADS);
-  glTexCoord2f(0.0, 0.0);
-  vtkgl::MultiTexCoord2f(vtkgl::TEXTURE1,
-                         static_cast<GLfloat>(this->Internals->ViewportExtent[0]/double(viewsize[0])),
-                         static_cast<GLfloat>(this->Internals->ViewportExtent[2]/double(viewsize[1])));
-  glVertex2f(static_cast<GLfloat>(this->Internals->ViewportExtent[0]),
-             static_cast<GLfloat>(this->Internals->ViewportExtent[2]));
-
-  glTexCoord2f(1.0, 0.0);
-  vtkgl::MultiTexCoord2f(vtkgl::TEXTURE1,
-                         static_cast<GLfloat>(this->Internals->ViewportExtent[1]/double(viewsize[0])),
-                         static_cast<GLfloat>(this->Internals->ViewportExtent[2]/double(viewsize[1])));
-  glVertex2f(static_cast<GLfloat>(this->Internals->ViewportExtent[1]),
-             static_cast<GLfloat>(this->Internals->ViewportExtent[2]));
-
-  glTexCoord2f(1.0, 1.0);
-  vtkgl::MultiTexCoord2f(vtkgl::TEXTURE1,
-                         static_cast<GLfloat>(this->Internals->ViewportExtent[1]/double(viewsize[0])),
-                         static_cast<GLfloat>(this->Internals->ViewportExtent[3]/double(viewsize[1])));
-  glVertex2f(static_cast<GLfloat>(this->Internals->ViewportExtent[1]),
-             static_cast<GLfloat>(this->Internals->ViewportExtent[3]));
-
-  glTexCoord2f(0.0, 1.0);
-  vtkgl::MultiTexCoord2f(vtkgl::TEXTURE1,
-                         static_cast<GLfloat>(this->Internals->ViewportExtent[0]/double(viewsize[0])),
-                         static_cast<GLfloat>(this->Internals->ViewportExtent[3]/double(viewsize[1])));
-  glVertex2f(static_cast<GLfloat>(this->Internals->ViewportExtent[0]),
-             static_cast<GLfloat>(this->Internals->ViewportExtent[3]));
-  glEnd();
-
-  lic = 0;
-  this->Internals->PassTwo->Restore();
-
-  // Essential to restore the context to what it was before we started messing
-  // with it.
-  glMatrixMode(GL_MODELVIEW);
-  glPopMatrix();
-  glMatrixMode(GL_PROJECTION);
-  glPopMatrix();
-
-  // Pop the attributes.
-  glPopAttrib();
-
-  timer->StopTimer();
-  vtkDebugMacro( << "Elapsed: " << timer->GetElapsedTime() << endl );
-  timer->Delete();
-}
-
-//-----------------------------------------------------------------------------
-void vtkSurfaceLICPainter::ReportReferences(vtkGarbageCollector *collector)
-{
-  this->Superclass::ReportReferences(collector);
-
-  vtkGarbageCollectorReport(collector, this->Output, "Output PolyData");
-}
-
-//----------------------------------------------------------------------------
-vtkDataObject* vtkSurfaceLICPainter::GetOutput()
-{
-  if (this->Enable)
-    {
-    return this->Output;
-    }
-
-  return this->Superclass::GetOutput();
-}
-
-//----------------------------------------------------------------------------
-bool vtkSurfaceLICPainter::PrepareOutput()
-{
-  if ( !this->Enable )
-    {
-    // Don't bother doing any work, we are simply passing the input as the
-    // output.
-    return false;
-    }
-
-  // TODO: Handle composite datasets.
-  vtkDataObject* input = this->GetInput();
-
-  if (  !this->Output ||
-        !this->Output->IsA( input->GetClassName() ) ||
-       ( this->Output->GetMTime() < this->GetMTime( ) ) ||
-       ( this->Output->GetMTime() < input->GetMTime() )
-     )
-    {
-    this->Internals->HasVectors = true;
-    if ( this->Output )
-      {
-      this->Output->Delete();
-      this->Output = 0;
-      }
-
-    vtkDataObject* output = input->NewInstance();
-    output->ShallowCopy( input );
-
-    bool found_some_vectors = false;
-    vtkDataSet* ds = vtkDataSet::SafeDownCast(output);
-    if (ds)
-      {
-      found_some_vectors = this->FixTCoords(ds);
-      }
-    vtkCompositeDataSet* cd = vtkCompositeDataSet::SafeDownCast(output);
-    if (cd)
-      {
-      vtkCompositeDataIterator* iter = cd->NewIterator();
-      for (iter->InitTraversal(); !iter->IsDoneWithTraversal(); iter->GoToNextItem())
-        {
-        ds = vtkDataSet::SafeDownCast(iter->GetCurrentDataObject());
-        if (ds)
-          {
-          found_some_vectors = this->FixTCoords(ds) || found_some_vectors;
-          }
-        }
-      iter->Delete();
-      }
-
-    if (!found_some_vectors)
-      {
-      vtkErrorMacro( "No vectors available." );
-      this->Internals->HasVectors = false;
-      }
-
-    this->Output = output;
-    this->Output->Modified();
-    output = NULL;
-    }
-
-  input = NULL;
-  return this->Internals->HasVectors;
-}
-
-//----------------------------------------------------------------------------
-void vtkSurfaceLICPainter::GetBounds(vtkDataObject* dobj, double bounds[6])
-{
-  vtkMath::UninitializeBounds(bounds);
-  vtkDataSet* ds = vtkDataSet::SafeDownCast(dobj);
-  if (ds)
-    {
-    ds->GetBounds(bounds);
-    return;
-    }
-
-  vtkCompositeDataSet* cd = vtkCompositeDataSet::SafeDownCast(dobj);
-  if (cd)
-    {
-    vtkBoundingBox bbox;
-    vtkCompositeDataIterator* iter = cd->NewIterator();
-    for (iter->InitTraversal(); !iter->IsDoneWithTraversal(); iter->GoToNextItem())
-      {
-      ds = vtkDataSet::SafeDownCast(iter->GetCurrentDataObject());
-      if (ds)
-        {
-        ds->GetBounds(bounds);
-        bbox.AddBounds(bounds);
-        }
-      }
-    iter->Delete();
-    bbox.GetBounds(bounds);
-    }
-}
-
-//----------------------------------------------------------------------------
-bool vtkSurfaceLICPainter::FixTCoords(vtkDataSet* ds)
-{
-  bool cell_data;
-  vtkDataArray * vectors = NULL;
-  if ( this->Internals->FieldNameSet )
-    {
-    vectors = vtkDataArray::SafeDownCast(this->GetInputArrayToProcess(
-      this->Internals->FieldAssociation,
-      this->Internals->FieldName.c_str(), ds, &cell_data));
-    }
-  else
-    {
-    vectors = vtkDataArray::SafeDownCast(this->GetInputArrayToProcess(
-      this->Internals->FieldAssociation,
-      this->Internals->FieldAttributeType, ds, &cell_data));
-    }
-  if (vectors)
-    {
-    if (cell_data)
-      {
-      ds->GetCellData()->SetTCoords(vectors);
-      }
-    else
-      {
-      ds->GetPointData()->SetTCoords(vectors);
-      }
-    }
-
-  return vectors != NULL;
-}
-
-//----------------------------------------------------------------------------
-void vtkSurfaceLICPainter::PrintSelf( ostream & os, vtkIndent indent )
-{
-  this->Superclass::PrintSelf( os, indent );
-
-  os << indent << "Enable: "        << this->Enable        << endl;
-  os << indent << "StepSize: "      << this->StepSize      << endl;
-  os << indent << "EnhancedLIC: "   << this->EnhancedLIC   << endl;
-  os << indent << "LICIntensity: "  << this->LICIntensity  << endl;
-  os << indent << "NumberOfSteps: " << this->NumberOfSteps << endl;
-  os << indent << "RenderingPreparationSuccess: "
-               << this->RenderingPreparationSuccess << endl;
-}
diff --git a/Rendering/HybridOpenGL/vtkSurfaceLICPainter.h b/Rendering/HybridOpenGL/vtkSurfaceLICPainter.h
deleted file mode 100644
index 6b8b8b7..0000000
--- a/Rendering/HybridOpenGL/vtkSurfaceLICPainter.h
+++ /dev/null
@@ -1,167 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkSurfaceLICPainter.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-// .NAME vtkSurfaceLICPainter - painter that performs LIC on the surface of
-//  arbitrary geometry.
-//
-// .SECTION Description
-//  vtkSurfaceLICPainter painter performs LIC on the surface of arbitrary
-//  geometry. Point vectors are used as the vector field for generating the LIC.
-//  The implementation is based on "Image Space Based Visualization on Unsteady
-//  Flow on Surfaces" by Laramee, Jobard and Hauser appeared in proceedings of
-//  IEEE Visualization '03, pages 131-138.
-
-#ifndef __vtkSurfaceLICPainter_h
-#define __vtkSurfaceLICPainter_h
-
-#include "vtkRenderingHybridOpenGLModule.h" // For export macro
-#include "vtkPainter.h"
-
-class vtkRenderWindow;
-
-class VTKRENDERINGHYBRIDOPENGL_EXPORT vtkSurfaceLICPainter : public vtkPainter
-{
-public:
-  static vtkSurfaceLICPainter* New();
-  vtkTypeMacro(vtkSurfaceLICPainter, vtkPainter);
-  void PrintSelf(ostream& os, vtkIndent indent);
-
-  // Description:
-  // Release any graphics resources that are being consumed by this mapper.
-  // The parameter window could be used to determine which graphic
-  // resources to release. In this case, releases the display lists.
-  virtual void ReleaseGraphicsResources(vtkWindow *);
-
-  // Description:
-  // Get the output data object from this painter.
-  // Overridden to pass the input points (or cells) vectors as the tcoords to
-  // the deletage painters. This is required by the internal GLSL shader
-  // programs used for generating LIC.
-  virtual vtkDataObject* GetOutput();
-
-  // Description:
-  // Enable/Disable this painter.
-  vtkSetMacro(Enable, int);
-  vtkGetMacro(Enable, int);
-  vtkBooleanMacro(Enable, int);
-
-  // Description:
-  // Set the vectors to used for applying LIC. By default point vectors are
-  // used. Arguments are same as those passed to
-  // vtkAlgorithm::SetInputArrayToProcess except the first 3 arguments i.e. idx,
-  // port, connection.
-  void SetInputArrayToProcess(int fieldAssociation, const char *name);
-  void SetInputArrayToProcess(int fieldAssociation, int fieldAttributeType);
-
-  // Description:
-  // Enable/Disable enhanced LIC that improves image quality by increasing
-  // inter-streamline contrast while suppressing artifacts. Enhanced LIC
-  // performs two passes of LIC, with a 3x3 Laplacian high-pass filter in
-  // between that processes the output of pass #1 LIC and forwards the result
-  // as the input 'noise' to pass #2 LIC. This flag is automatically turned
-  // off during user interaction.
-  vtkSetMacro( EnhancedLIC, int );
-  vtkGetMacro( EnhancedLIC, int );
-  vtkBooleanMacro( EnhancedLIC, int );
-
-  // Description:
-  // Get/Set the number of integration steps in each direction.
-  vtkSetMacro(NumberOfSteps, int);
-  vtkGetMacro(NumberOfSteps, int);
-
-  // Description:
-  // Get/Set the step size (in pixels).
-  vtkSetMacro(StepSize, double);
-  vtkGetMacro(StepSize, double);
-
-  // Description:
-  // Control the contribution of the LIC in the final output image.
-  // 0.0 produces same result as disabling LIC altogether, while 1.0 implies
-  // show LIC result alone.
-  vtkSetClampMacro(LICIntensity, double, 0.0, 1.0);
-  vtkGetMacro(LICIntensity, double);
-
-  // Description:
-  // Check if PrepareForRendering passes.
-  int GetRenderingPreparationSuccess()
-      { return this->RenderingPreparationSuccess; }
-
-  // Description:
-  // Check if the LIC process runs properly.
-  int GetLICSuccess() { return this->LICSuccess; }
-
-  // Description:
-  // Returns true is the rendering context supports extensions needed by this
-  // painter.
-  static bool IsSupported(vtkRenderWindow*);
-//BTX
-protected:
-  vtkSurfaceLICPainter();
-  ~vtkSurfaceLICPainter();
-
-  // Description:
-  // Computes data bounds.
-  void GetBounds(vtkDataObject* data, double bounds[6]);
-
-  // Description:
-  // Take part in garbage collection.
-  virtual void ReportReferences(vtkGarbageCollector *collector);
-
-  // Description:
-  // Some subclasses may need to do some preprocessing
-  // before the actual rendering can be done eg. build efficient
-  // representation for the data etc. This should be done here.
-  // This method get called after the ProcessInformation()
-  // but before RenderInternal().
-  virtual void PrepareForRendering(vtkRenderer*, vtkActor*);
-
-  // Description:
-  // Performs the actual rendering. Subclasses may override this method.
-  // default implementation merely call a Render on the DelegatePainter,
-  // if any. When RenderInternal() is called, it is assured that the
-  // DelegatePainter is in sync with this painter i.e. UpdateDelegatePainter()
-  // has been called.
-  virtual void RenderInternal(vtkRenderer* renderer, vtkActor* actor,
-                              unsigned long typeflags, bool forceCompileOnly);
-
-  // Description:
-  // Prepares output data. Returns true if vectors are available.
-  bool PrepareOutput();
-  bool FixTCoords(vtkDataSet* ds);
-
-  // Description:
-  // Returns true when rendering LIC is possible.
-  bool CanRenderLIC(vtkRenderer*, vtkActor*);
-
-  // Unit is a pixel length.
-  int    NumberOfSteps;
-  double StepSize;
-
-  int    Enable;
-  int    EnhancedLIC;
-  int    RenderingPreparationSuccess;
-  int    LICSuccess;
-  double LICIntensity;
-
-private:
-  vtkSurfaceLICPainter(const vtkSurfaceLICPainter&); // Not implemented.
-  void operator=(const vtkSurfaceLICPainter&); // Not implemented.
-
-  vtkDataObject* Output;
-  class vtkInternals;
-  vtkInternals* Internals;
-//ETX
-};
-
-#endif
diff --git a/Rendering/HybridOpenGL/vtkSurfaceLICPainter_fs1.glsl b/Rendering/HybridOpenGL/vtkSurfaceLICPainter_fs1.glsl
deleted file mode 100644
index 5c0b019..0000000
--- a/Rendering/HybridOpenGL/vtkSurfaceLICPainter_fs1.glsl
+++ /dev/null
@@ -1,30 +0,0 @@
-//=========================================================================
-//
-//  Program:   Visualization Toolkit
-//  Module:    vtkSurfaceLICPainter_fs1.glsl
-//
-//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-//  All rights reserved.
-//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-//
-//     This software is distributed WITHOUT ANY WARRANTY; without even
-//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-//     PURPOSE.  See the above copyright notice for more information.
-//
-//=========================================================================
-
-// Filename: vtkSurfaceLICPainter_fs1.glsl
-// Filename is useful when using gldb-gui
-
-#version 110
-
-#extension GL_ARB_draw_buffers : enable
-
-varying vec2 vProjectedVF;
-varying vec4 vColor;
-
-void main()
-{
-  gl_FragData[0] = clamp(vColor, vec4(0,0, 0, 0), vec4(1, 1, 1, 1));;
-  gl_FragData[1] = vec4(vProjectedVF.x, vProjectedVF.y, gl_FragCoord.z, 1.0);
-}
diff --git a/Rendering/HybridOpenGL/vtkSurfaceLICPainter_fs2.glsl b/Rendering/HybridOpenGL/vtkSurfaceLICPainter_fs2.glsl
deleted file mode 100644
index 91c81df..0000000
--- a/Rendering/HybridOpenGL/vtkSurfaceLICPainter_fs2.glsl
+++ /dev/null
@@ -1,58 +0,0 @@
-//=========================================================================
-//
-//  Program:   Visualization Toolkit
-//  Module:    vtkSurfaceLICPainter_fs2.glsl
-//
-//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-//  All rights reserved.
-//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-//
-//     This software is distributed WITHOUT ANY WARRANTY; without even
-//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-//     PURPOSE.  See the above copyright notice for more information.
-//
-//=========================================================================
-
-// Filename: vtkSurfaceLICPainter_fs2.glsl
-// Filename is useful when using gldb-gui
-
-#version 110
-
-uniform sampler2D texLIC;
-uniform sampler2D texGeometry;
-uniform sampler2D texDepth;
-uniform float     uLICIntensity;
-
-vec3    texMasker = vec3( -1.0, -1.0, -1.0 ); // for zero-vector fragments
-
-void main()
-{
-  float fragDepth = texture2D( texDepth,    gl_TexCoord[1].st ).b;
-
-  if ( fragDepth == 0.0 )
-    {
-    discard;
-    }
-
-  vec3  licTexVal = texture2D( texLIC,      gl_TexCoord[0].st ).rgb;
-  vec4  geomColor = texture2D( texGeometry, gl_TexCoord[1].st );
-
-  // In pass #1 LIC (providing a low-quality image during user interaction)
-  // or pass #2 LIC (providing an improved image when no user interaction),
-  // both in vtkLineIntegralConvolution2D_fs1, any fragment where the surface
-  // vector is zero is assigned with a masking texture value vec3( -1.0, -1.0,
-  // -1.0 ). Such fragments need to be made totally transparent to show the
-  // underlying geoemtry surface.
-  bvec3 isMaskVal = equal( licTexVal, texMasker );
-  int   rejectLIC = int(  all( isMaskVal )  );
-
-  vec4  tempColor = vec4(   (  licTexVal     *         uLICIntensity +
-                               geomColor.xyz * ( 1.0 - uLICIntensity )
-                            ), geomColor.a
-                        );
-  tempColor = float( 1 - rejectLIC ) * tempColor +
-              float(     rejectLIC ) * geomColor;
-
-  gl_FragColor = tempColor;
-  gl_FragDepth = fragDepth;
-}
diff --git a/Rendering/HybridOpenGL/vtkSurfaceLICPainter_vs1.glsl b/Rendering/HybridOpenGL/vtkSurfaceLICPainter_vs1.glsl
deleted file mode 100644
index 3b04a0f..0000000
--- a/Rendering/HybridOpenGL/vtkSurfaceLICPainter_vs1.glsl
+++ /dev/null
@@ -1,51 +0,0 @@
-//=========================================================================
-//
-//  Program:   Visualization Toolkit
-//  Module:    vtkSurfaceLICPainter_vs1.glsl
-//
-//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-//  All rights reserved.
-//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-//
-//     This software is distributed WITHOUT ANY WARRANTY; without even
-//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-//     PURPOSE.  See the above copyright notice for more information.
-//
-//=========================================================================
-// Filename: vtkSurfaceLICPainter_vs1.glsl
-// Filename is useful when using gldb-gui
-
-#version 120
-varying vec4 vColor;
-varying vec2 vProjectedVF;
-
-// from vtkColorMaterialHelper
-gl_MaterialParameters getMaterialParameters();
-
-// from vtkLightingHelper
-vec4 singleColor(gl_MaterialParameters m,
-  vec3 surfacePosEyeCoords, vec3 n);
-
-// Projects "vector" onto the surface.
-vec3 projectOnSurface(vec3 vector)
-{
-  vec3 normal = normalize(gl_Normal);
-  float k = dot(normal, vector);
-  return (vector - (k*normal));
-}
-
-vec4 colorFrontFace()
-{
- vec4 heyeCoords = gl_ModelViewMatrix*gl_Vertex;
- vec3 eyeCoords = heyeCoords.xyz/heyeCoords.w;
- vec3 n = normalize(gl_NormalMatrix*gl_Normal);
- return singleColor(getMaterialParameters(),eyeCoords,n);
-}
-
-void main()
-{
-  vec3 vf = projectOnSurface(gl_MultiTexCoord0.stp);
-  vProjectedVF = (gl_NormalMatrix * vf).xy;
-  vColor = colorFrontFace();
-  gl_Position = ftransform();
-}
diff --git a/Rendering/Image/vtkImageSliceCollection.h b/Rendering/Image/vtkImageSliceCollection.h
index 5bac85d..0d0be89 100644
--- a/Rendering/Image/vtkImageSliceCollection.h
+++ b/Rendering/Image/vtkImageSliceCollection.h
@@ -64,7 +64,7 @@ class VTKRENDERINGIMAGE_EXPORT vtkImageSliceCollection : public vtkPropCollectio
   vtkImageSlice *GetNextItem() { return this->GetNextImage(); }
 
 protected:
-  vtkImageSliceCollection() {};
+  vtkImageSliceCollection() {}
   ~vtkImageSliceCollection();
 
   virtual void DeleteElement(vtkCollectionElement *);
diff --git a/Rendering/LIC/CMakeLists.txt b/Rendering/LIC/CMakeLists.txt
new file mode 100644
index 0000000..782a408
--- /dev/null
+++ b/Rendering/LIC/CMakeLists.txt
@@ -0,0 +1,66 @@
+set(Module_SRCS
+  vtkImageDataLIC2D.cxx
+  vtkImageDataLIC2DExtentTranslator.cxx
+  vtkLineIntegralConvolution2D.cxx
+  vtkPixelTransfer.cxx
+  vtkStructuredGridLIC2D.cxx
+  vtkSurfaceLICComposite.cxx
+  vtkSurfaceLICDefaultPainter.cxx
+  vtkSurfaceLICPainter.cxx
+  vtkTextureIO.cxx
+  )
+
+set_source_files_properties(
+  vtkLineIntegralConvolution2D.cxx
+  vtkPainterCommunicator.cxx
+  vtkPixelTransfer.cxx
+  vtkSurfaceLICComposite.cxx
+  vtkTextureIO.cxx
+  WRAP_EXCLUDE
+  )
+
+if (VTK_RENDERINGPARALLELLIC_SURFACELICPAINTER_TIMER)
+  add_definitions("-DvtkSurfaceLICPainterTIME")
+endif()
+if (VTK_RENDERINGPARALLELLIC_LINEINTEGRALCONVLOLUTION2D_TIMER)
+  add_definitions("-DvtkLineIntegralConvolution2DTIME")
+endif()
+
+set(shader_files
+  vtkLineIntegralConvolution2D_VT.glsl
+  vtkLineIntegralConvolution2D_LIC0.glsl
+  vtkLineIntegralConvolution2D_LICI.glsl
+  vtkLineIntegralConvolution2D_LICN.glsl
+  vtkLineIntegralConvolution2D_EE.glsl
+  vtkLineIntegralConvolution2D_CE.glsl
+  vtkLineIntegralConvolution2D_AAH.glsl
+  vtkLineIntegralConvolution2D_AAV.glsl
+  vtkStructuredGridLIC2D_fs.glsl
+  vtkSurfaceLICPainter_GeomVs.glsl
+  vtkSurfaceLICPainter_GeomFs.glsl
+  vtkSurfaceLICPainter_SC.glsl
+  vtkSurfaceLICPainter_CE.glsl
+  vtkSurfaceLICPainter_DCpy.glsl
+  )
+
+unset(shader_h_files)
+foreach(file ${shader_files})
+  get_filename_component(file_we ${file} NAME_WE)
+  set(src ${CMAKE_CURRENT_SOURCE_DIR}/${file})
+  set(res ${CMAKE_CURRENT_BINARY_DIR}/${file_we}.cxx)
+  set(resh ${CMAKE_CURRENT_BINARY_DIR}/${file_we}.h)
+  list(APPEND shader_h_files ${resh})
+  add_custom_command(
+    OUTPUT ${res} ${resh}
+    DEPENDS ${src} vtkEncodeString
+    COMMAND vtkEncodeString
+    ARGS ${res} ${src} ${file_we}
+    --build-header VTKRENDERINGLIC_EXPORT vtkRenderingLICModule.h
+    )
+  list(APPEND Module_SRCS ${res})
+  set_source_files_properties(${file_we} WRAP_EXCLUDE)
+endforeach()
+
+vtk_module_library(${vtk-module} ${Module_SRCS})
+
+include_directories(${OPENGL_INCLUDE_DIR})
diff --git a/Rendering/LIC/Testing/Cxx/CMakeLists.txt b/Rendering/LIC/Testing/Cxx/CMakeLists.txt
new file mode 100644
index 0000000..bdfdd97
--- /dev/null
+++ b/Rendering/LIC/Testing/Cxx/CMakeLists.txt
@@ -0,0 +1,375 @@
+# basic tests on the legacy filters
+# excercises line integral convolution 2d
+vtk_add_test_cxx(
+  TestImageDataLIC2D.cxx
+  TestStructuredGridLIC2DXSlice.cxx
+  TestStructuredGridLIC2DYSlice.cxx
+  TestStructuredGridLIC2DZSlice.cxx
+  )
+# surface lic painter tests
+# get decent coverage by calling
+# varying arguments to the following
+# test exec. to get unqiue test names
+# avoid vtk_add_test_cxx and add exec
+# and it's test cases manually
+set_property(
+  DIRECTORY APPEND PROPERTY
+  VTK_TEST_CXX_SOURCES
+  TestSurfaceLIC.cxx
+  )
+vtk_test_cxx_executable(
+  ${vtk-module}CxxTests
+  RENDERING_FACTORY
+  vtkStructuredGridLIC2DTestDriver.cxx
+  vtkSurfaceLICTestDriver.cxx
+  )
+# surface lic on a curved surface
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-SurfaceLICCurvedDefaults
+  COMMAND ${vtk-module}CxxTests TestSurfaceLIC
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/TestSurfaceLICCurvedDefaults.png,:}"
+    --step-size=0.4
+    --num-steps=40
+    --camera-config=1
+    --data=${VTK_TEST_DATA_DIR}/Data/disk_out_ref_surface.vtp
+    --vectors=V
+    )
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-SurfaceLICCurvedContrastEnhancedBlended
+  COMMAND ${vtk-module}CxxTests TestSurfaceLIC
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedBlended.png,:}"
+    --step-size=0.4
+    --num-steps=40
+    --camera-config=1
+    --data=${VTK_TEST_DATA_DIR}/Data/disk_out_ref_surface.vtp
+    --vectors=V
+    --enhance-contrast=1
+    --lic-intensity=0.8
+    )
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-SurfaceLICCurvedContrastEnhancedMapped
+  COMMAND ${vtk-module}CxxTests TestSurfaceLIC
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedMapped.png,:}"
+    --step-size=0.4
+    --num-steps=40
+    --camera-config=1
+    --data=${VTK_TEST_DATA_DIR}/Data/disk_out_ref_surface.vtp
+    --vectors=V
+    --enhance-contrast=4
+    --color-mode=1
+    )
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-SurfaceLICCurvedEnhancedVectorNormalizeOff
+  COMMAND ${vtk-module}CxxTests TestSurfaceLIC
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/TestSurfaceLICCurvedEnhancedVectorNormalizeOff.png,:}"
+    --data=${VTK_TEST_DATA_DIR}/Data/disk_out_ref_surface.vtp
+    --camera-config=1
+    --vectors=V
+    --normalize-vectors=0
+    --step-size=0.5
+    --num-steps=800
+    --enhance-contrast=1
+    --color-mode=1
+    )
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-SurfaceLICCurvedContrastEnhancedBlendedSmallGrain
+  COMMAND ${vtk-module}CxxTests TestSurfaceLIC
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedBlendedSmallGrain.png,:}"
+    --camera-config=1
+    --data=${VTK_TEST_DATA_DIR}/Data/disk_out_ref_surface.vtp
+    --vectors=V
+    --step-size=0.4
+    --num-steps=40
+    --generate-noise-texture=1
+    --noise-texture-size=200
+    --noise-grain-size=1
+    --number-of-noise-levels=1024
+    --noise-gen-seed=1
+    --enhance-contrast=1
+    --lic-intensity=0.8
+    )
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-SurfaceLICCurvedContrastEnhancedMappedSmallGrain
+  COMMAND ${vtk-module}CxxTests TestSurfaceLIC
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedMappedSmallGrain.png,:}"
+    --camera-config=1
+    --data=${VTK_TEST_DATA_DIR}/Data/disk_out_ref_surface.vtp
+    --vectors=V
+    --step-size=0.4
+    --num-steps=40
+    --generate-noise-texture=1
+    --noise-texture-size=200
+    --noise-grain-size=1
+    --number-of-noise-levels=1024
+    --enhance-contrast=4
+    --color-mode=1
+    )
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-SurfaceLICCurvedContrastEnhancedMappedSmallVectorNormalizeOff
+  COMMAND ${vtk-module}CxxTests TestSurfaceLIC
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedMappedSmallVectorNormalizeOff.png,:}"
+    --data=${VTK_TEST_DATA_DIR}/Data/disk_out_ref_surface.vtp
+    --camera-config=1
+    --vectors=V
+    --normalize-vectors=0
+    --step-size=0.5
+    --num-steps=800
+    --generate-noise-texture=1
+    --noise-texture-size=200
+    --noise-grain-size=1
+    --number-of-noise-levels=1024
+    --enhance-contrast=1
+    --low-lic-contrast-enhancement-factor=0.05
+    --color-mode=1
+    )
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-SurfaceLICCurvedDefaultsColor
+  COMMAND ${vtk-module}CxxTests TestSurfaceLIC
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/TestSurfaceLICCurvedDefaultsColor.png,:}"
+    --step-size=0.4
+    --num-steps=40
+    --camera-config=1
+    --data=${VTK_TEST_DATA_DIR}/Data/disk_out_ref_surface.vtp
+    --vectors=V
+    --color-by-mag=1
+    )
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-SurfaceLICCurvedContrastEnhancedColorBlendedSmallGrain
+  COMMAND ${vtk-module}CxxTests TestSurfaceLIC
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedColorBlendedSmallGrain.png,:}"
+    --camera-config=1
+    --data=${VTK_TEST_DATA_DIR}/Data/disk_out_ref_surface.vtp
+    --vectors=V
+    --step-size=0.4
+    --num-steps=40
+    --generate-noise-texture=1
+    --noise-texture-size=200
+    --noise-grain-size=1
+    --number-of-noise-levels=1024
+    --noise-gen-seed=1
+    --enhance-contrast=1
+    --color-by-mag=1
+    --lic-intensity=0.6
+    )
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-SurfaceLICCurvedContrastEnhancedColorMappedSmallGrain
+  COMMAND ${vtk-module}CxxTests TestSurfaceLIC
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedColorMappedSmallGrain.png,:}"
+    --camera-config=1
+    --data=${VTK_TEST_DATA_DIR}/Data/disk_out_ref_surface.vtp
+    --vectors=V
+    --step-size=0.4
+    --num-steps=40
+    --generate-noise-texture=1
+    --noise-texture-size=200
+    --noise-grain-size=1
+    --number-of-noise-levels=1024
+    --enhance-contrast=4
+    --color-by-mag=1
+    --color-mode=1
+    )
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-SurfaceLICCurvedContrastEnhancedColorBlendedSmallGrainMask
+  COMMAND ${vtk-module}CxxTests TestSurfaceLIC
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedColorBlendedSmallGrainMask.png,:}"
+    --camera-config=1
+    --data=${VTK_TEST_DATA_DIR}/Data/disk_out_ref_surface.vtp
+    --vectors=V
+    --step-size=0.4
+    --num-steps=40
+    --generate-noise-texture=1
+    --noise-texture-size=200
+    --noise-grain-size=1
+    --number-of-noise-levels=1024
+    --noise-gen-seed=1
+    --enhance-contrast=1
+    --color-by-mag=1
+    --lic-intensity=0.6
+    --mask-intensity=0.2
+    --mask-color 1.0 1.0 1.0
+    )
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-SurfaceLICCurvedContrastEnhancedColorMappedSmallGrainMask
+  COMMAND ${vtk-module}CxxTests TestSurfaceLIC
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedColorMappedSmallGrainMask.png,:}"
+    --camera-config=1
+    --data=${VTK_TEST_DATA_DIR}/Data/disk_out_ref_surface.vtp
+    --vectors=V
+    --step-size=0.4
+    --num-steps=40
+    --generate-noise-texture=1
+    --noise-texture-size=200
+    --noise-grain-size=1
+    --number-of-noise-levels=1024
+    --enhance-contrast=1
+    --high-lic-contrast-enhancement-factor=0.1
+    --color-by-mag=1
+    --color-mode=1
+    --map-mode-bias=0.05
+    --mask-intensity=0.2
+    --mask-color 1.0 1.0 1.0
+    )
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-SurfaceLICCurvedContrastEnhancedSmallGrainMask
+  COMMAND ${vtk-module}CxxTests TestSurfaceLIC
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedSmallGrainMask.png,:}"
+    --camera-config=1
+    --data=${VTK_TEST_DATA_DIR}/Data/disk_out_ref_surface.vtp
+    --vectors=V
+    --normalize-vectors=0
+    --step-size=0.5
+    --num-steps=800
+    --generate-noise-texture=1
+    --noise-texture-size=200
+    --noise-grain-size=1
+    --number-of-noise-levels=1024
+    --enhance-contrast=1
+    --low-lic-contrast-enhancement-factor=0.05
+    --color-mode=1
+    --map-mode-bias=0.05
+    --mask-intensity=0.2
+    )
+# surface lic on a slice
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-SurfaceLICPlanarDefaults
+  COMMAND ${vtk-module}CxxTests TestSurfaceLIC
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/TestSurfaceLICPlanarDefaults.png,:}"
+    --data=${VTK_TEST_DATA_DIR}/Data/SurfaceVectors.vtk
+    --camera-config=3
+    --vectors=V
+    --step-size=1
+    --num-steps=40
+    --lic-intensity=0.8
+    )
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-SurfaceLICPlanarContrastEnhanced
+  COMMAND ${vtk-module}CxxTests TestSurfaceLIC
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/TestSurfaceLICPlanarContrastEnhanced.png,:}"
+    --data=${VTK_TEST_DATA_DIR}/Data/SurfaceVectors.vtk
+    --camera-config=3
+    --vectors=V
+    --step-size=1
+    --num-steps=40
+    --enhance-contrast=1
+    --color-mode=1
+    )
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-SurfaceLICPlanarVectorNormalizeOff
+  COMMAND ${vtk-module}CxxTests TestSurfaceLIC
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/TestSurfaceLICPlanarVectorNormalizeOff.png,:}"
+    --data=${VTK_TEST_DATA_DIR}/Data/SurfaceVectors.vtk
+    --camera-config=3
+    --vectors=V
+    --normalize-vectors=0
+    --step-size=0.5
+    --num-steps=2000
+    --generate-noise-texture=1
+    --noise-texture-size=128
+    --noise-grain-size=2
+    --number-of-noise-levels=256
+    --noise-gen-seed=12367
+    --enhance-contrast=1
+    --color-by-mag=1
+    --color-mode=1
+    )
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-SurfaceLICPlanarVectorNormalizeOffMediumGrainUniform
+  COMMAND ${vtk-module}CxxTests TestSurfaceLIC
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/TestSurfaceLICPlanarVectorNormalizeOffMediumGrainUniform.png,:}"
+    --data=${VTK_TEST_DATA_DIR}/Data/SurfaceVectors.vtk
+    --camera-config=3
+    --vectors=V
+    --enhanced-lic=0
+    --step-size=0.25
+    --num-steps=40
+    --generate-noise-texture=1
+    --noise-type=1
+    --noise-texture-size=128
+    --noise-grain-size=2
+    --impulse-noise-prob=0.04
+    --enhance-contrast=4
+    --high-color-contrast-enhancement-factor=0.2
+    --color-by-mag=1
+    --color-mode=1
+    )
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-SurfaceLICPlanarVectorNormalizeOffMediumGrainPerlin
+  COMMAND ${vtk-module}CxxTests TestSurfaceLIC
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/TestSurfaceLICPlanarVectorNormalizeOffMediumGrainPerlin.png,:}"
+    --data=${VTK_TEST_DATA_DIR}/Data/SurfaceVectors.vtk
+    --camera-config=3
+    --vectors=V
+    --normalize-vectors=0
+    --step-size=2
+    --num-steps=400
+    --generate-noise-texture=1
+    --noise-type=2
+    --noise-texture-size=256
+    --noise-grain-size=8
+    --max-noise-value=0.8
+    --enhance-contrast=4
+    --color-by-mag=1
+    --color-mode=1
+    --anti-alias=1
+    )
+ExternalData_add_test(VTKData
+  NAME ${vtk-module}Cxx-SurfaceLICMultiBlockContrastEnhancedPerlin
+  COMMAND ${vtk-module}CxxTests TestSurfaceLIC
+    -D ${VTK_TEST_DATA_DIR}
+    -T ${VTK_TEST_OUTPUT_DIR}
+    -V "DATA{${${vtk-module}_SOURCE_DIR}/Testing/Data/Baseline/TestSurfaceLICMultiBlockContrastEnhancedPerlin.png,:}"
+    --data=${VTK_TEST_DATA_DIR}/Data/ex-blow_5.vtm
+    --camera-config=4
+    --vectors=DISPLACEMENT_
+    --step-size=0.1
+    --num-steps=50
+    --generate-noise-texture=1
+    --noise-type=2
+    --noise-texture-size=128
+    --noise-grain-size=2
+    --number-of-noise-levels=1024
+    --noise-gen-seed=1
+    --enhance-contrast=4
+    --color-by-mag=1
+    --color-mode=1
+    --map-mode-bias=0.1
+    --mask-on-surface=1
+    --mask-intensity=0.41
+    --mask-color 0 0 0
+    )
diff --git a/Rendering/LIC/Testing/Cxx/TestImageDataLIC2D.cxx b/Rendering/LIC/Testing/Cxx/TestImageDataLIC2D.cxx
new file mode 100644
index 0000000..f5475ee
--- /dev/null
+++ b/Rendering/LIC/Testing/Cxx/TestImageDataLIC2D.cxx
@@ -0,0 +1,401 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestImageDataLIC2D.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "TestImageDataLIC2D.h"
+
+#include "vtkGenericDataObjectReader.h"
+#include "vtkImageDataLIC2D.h"
+#include "vtkPixelExtent.h"
+#include "vtkPixelTransfer.h"
+#include "vtkImageData.h"
+#include "vtkPointData.h"
+#include "vtkUnsignedCharArray.h"
+#include "vtkFloatArray.h"
+#include "vtkImageIterator.h"
+#include "vtkImagePermute.h"
+#include "vtkImageShiftScale.h"
+#include "vtkPNGReader.h"
+#include "vtkPNGWriter.h"
+#include "vtkDataSetWriter.h"
+#include "vtkProbeFilter.h"
+#include "vtkRenderWindow.h"
+#include "vtkSmartPointer.h"
+#include "vtkStreamingDemandDrivenPipeline.h"
+#include "vtkStructuredData.h"
+#include "vtkTimerLog.h"
+#include "vtkUnstructuredGrid.h"
+#include "vtkTestUtilities.h"
+#include "vtkRegressionTestImage.h"
+#include "vtkTesting.h"
+#include <vtksys/CommandLineArguments.hxx>
+#include "vtkImageMapToColors.h"
+#include "vtkLookupTable.h"
+#include "vtkTrivialProducer.h"
+#include <vtksys/SystemTools.hxx>
+#include <sstream>
+using std::ostringstream;
+
+//-----------------------------------------------------------------------------
+int TestImageDataLIC2D(int argc, char* argv[])
+{
+  char* fname =
+    vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/SurfaceVectors.vtk");
+  std::string filename = fname;
+  filename = "--data=" + filename;
+  delete [] fname;
+
+  fname = vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/noise.png");
+  std::string noise = fname;
+  noise = "--noise=" + noise;
+  delete [] fname;
+
+  char** new_argv = new char*[argc+10];
+  for (int cc=0; cc < argc; cc++)
+    {
+    new_argv[cc] = vtksys::SystemTools::DuplicateString(argv[cc]);
+    }
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString(filename.c_str());
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString(noise.c_str());
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--mag=5");
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--partitions=5");
+  int status = ImageDataLIC2D(argc, new_argv);
+  for (int kk=0; kk < argc; kk++)
+    {
+    delete [] new_argv[kk];
+    }
+  delete [] new_argv;
+  return status;
+}
+
+// Example demonstrating use of vtkImageDataLIC2D filter.
+// Typical usage:
+// ./bin/ImageDataLIC2D --data=<vtk file> --output=<png file>
+//-----------------------------------------------------------------------------
+int ImageDataLIC2D(int argc, char* argv[])
+{
+  std::string filename;
+  std::string noise_filename;
+  int resolution = 10;
+  int magnification = 1;
+  std::string outputpath;
+  int num_partitions = 1;
+  int num_steps = 40;
+
+  vtksys::CommandLineArguments arg;
+  arg.StoreUnusedArguments(1);
+  arg.Initialize(argc, argv);
+
+  typedef vtksys::CommandLineArguments argT;
+  arg.AddArgument("--data", argT::EQUAL_ARGUMENT, &filename,
+    "(required) Enter dataset to load (currently only *.vtk files are supported");
+  arg.AddArgument("--res", argT::EQUAL_ARGUMENT, &resolution,
+    "(optional: default 10) Number of sample per unit distance");
+  arg.AddArgument("--mag", argT::EQUAL_ARGUMENT, &magnification,
+    "(optional: default 1) Magnification");
+  arg.AddArgument("--output", argT::EQUAL_ARGUMENT, &outputpath,
+    "(optional) Output png image");
+  arg.AddArgument("--partitions", argT::EQUAL_ARGUMENT, &num_partitions,
+    "(optional: default 1) Number of partitions");
+  arg.AddArgument("--num-steps", argT::EQUAL_ARGUMENT, &num_steps,
+    "(optional: default 40) Number of steps in each direction");
+  arg.AddArgument("--noise", argT::EQUAL_ARGUMENT, &noise_filename,
+    "(optional) Specify the filename to a png image file to use as the noise texture.");
+
+  if (!arg.Parse() || filename=="")
+    {
+    cerr << "Problem parsing arguments." << endl;
+    cerr << arg.GetHelp() << endl;
+    return -1;
+    }
+
+  if (magnification < 1)
+    {
+    cerr << "WARNING: Magnification cannot be less than 1. Using 1" << endl;
+    magnification = 1;
+    }
+
+  if (num_steps < 0)
+    {
+    cerr << "WARNING: Number of steps cannot be less than 0. Forcing 0." << endl;
+    num_steps = 0;
+    }
+
+  // set up test helper
+  vtkSmartPointer<vtkTesting> tester
+    = vtkSmartPointer<vtkTesting>::New();
+
+  for (int cc=0; cc < argc; cc++)
+    {
+    tester->AddArgument(argv[cc]);
+    }
+  if (!tester->IsValidImageSpecified())
+    {
+    cerr << "ERROR: Valid image not specified." << endl;
+    return -2;
+    }
+
+  // load noise
+  vtkSmartPointer<vtkImageData> noise;
+  if (noise_filename != "")
+    {
+    vtkSmartPointer<vtkPNGReader> pngReader
+      = vtkSmartPointer<vtkPNGReader>::New();
+
+    pngReader->SetFileName(noise_filename.c_str());
+    pngReader->Update();
+
+    noise = pngReader->GetOutput();
+
+    vtkUnsignedCharArray *cVals
+      = vtkUnsignedCharArray::SafeDownCast(noise->GetPointData()->GetScalars());
+    if (!cVals)
+      {
+      cerr << "Error: expected unsigned chars, test fails" << endl;
+      return 1;
+      }
+
+    unsigned char *pCVals = cVals->GetPointer(0);
+    vtkIdType cTups = cVals->GetNumberOfTuples();
+
+    vtkFloatArray *fVals = vtkFloatArray::New();
+    fVals->SetNumberOfComponents(2);
+    fVals->SetNumberOfTuples(cTups);
+    fVals->SetName("noise");
+    float *pFVals = fVals->GetPointer(0);
+
+    size_t nVals = 2*cTups;
+    for (size_t i=0; i<nVals; ++i)
+      {
+      pFVals[i] = pCVals[i]/255.0;
+      }
+
+    noise->GetPointData()->RemoveArray(0);
+    noise->GetPointData()->SetScalars(fVals);
+    fVals->Delete();
+    }
+
+  // load vectors
+  vtkSmartPointer<vtkGenericDataObjectReader> reader
+    = vtkSmartPointer<vtkGenericDataObjectReader>::New();
+
+  reader->SetFileName(filename.c_str());
+  reader->Update();
+
+  vtkDataSet *dataset = vtkDataSet::SafeDownCast(reader->GetOutput());
+  if (!dataset)
+    {
+    cerr << "Error: expected dataset, test fails" << endl;
+    return 1;
+    }
+  double bounds[6];
+  dataset->GetBounds(bounds);
+
+  // If 3D use XY slice, otherwise use non-trivial slice.
+  int dataDesc = VTK_XY_PLANE;
+  if (bounds[0] == bounds[1])
+    {
+    dataDesc = VTK_YZ_PLANE;
+    }
+  else if (bounds[2] == bounds[3])
+    {
+    dataDesc = VTK_XZ_PLANE;
+    }
+  else if (bounds[4] == bounds[5])
+    {
+    dataDesc = VTK_XY_PLANE;
+    }
+
+  int comp[3] = {0,1,2};
+  switch (dataDesc)
+    {
+  case VTK_XY_PLANE:
+    comp[0] = 0;
+    comp[1] = 1;
+    comp[2] = 2;
+    break;
+
+  case VTK_YZ_PLANE:
+    comp[0] = 1;
+    comp[1] = 2;
+    comp[2] = 0;
+    break;
+
+  case VTK_XZ_PLANE:
+    comp[0] = 0;
+    comp[1] = 2;
+    comp[2] = 1;
+    break;
+    }
+
+  int  width  = static_cast<int>(ceil((bounds[2*comp[0]+1]-bounds[2*comp[0]]) * resolution));
+  int  height = static_cast<int>(ceil((bounds[2*comp[1]+1]-bounds[2*comp[1]]) * resolution));
+
+  int dims[3];
+  dims[comp[0]] = width;
+  dims[comp[1]] = height;
+  dims[comp[2]] = 1;
+
+  double spacing[3];
+  spacing[comp[0]] = (bounds[2*comp[0]+1]-bounds[2*comp[0]])/double(width);
+  spacing[comp[1]] = (bounds[2*comp[1]+1]-bounds[2*comp[1]])/double(height);
+  spacing[comp[2]] = 1.0;
+
+  double origin[3] = {bounds[0], bounds[2], bounds[4]};
+
+  int outWidth = magnification*width;
+  int outHeight = magnification*height;
+
+  double outSpacing[3];
+  outSpacing[0] = spacing[comp[0]]/magnification;
+  outSpacing[1] = spacing[comp[1]]/magnification;
+  outSpacing[2] = 1.0;
+
+  // convert input dataset to an image data
+  vtkSmartPointer<vtkImageData> probeData
+    = vtkSmartPointer<vtkImageData>::New();
+
+  probeData->SetOrigin(origin);
+  probeData->SetDimensions(dims);
+  probeData->SetSpacing(spacing);
+
+  vtkSmartPointer<vtkProbeFilter> probe
+    = vtkSmartPointer<vtkProbeFilter>::New();
+
+  probe->SetSourceConnection(reader->GetOutputPort());
+  probe->SetInputData(probeData);
+  probe->Update();
+  probeData = NULL;
+
+  // create and initialize a rendering context
+  vtkSmartPointer<vtkRenderWindow> renWin
+    = vtkSmartPointer<vtkRenderWindow>::New();
+  renWin->Render();
+
+  // create and initialize the image lic'er
+  vtkSmartPointer<vtkImageDataLIC2D> filter
+    = vtkSmartPointer<vtkImageDataLIC2D>::New();
+
+  if (filter->SetContext( renWin ) == 0)
+    {
+    cerr << "WARNING: Required OpenGL not supported, test passes." << endl;
+    return 0;
+    }
+  filter->SetSteps(num_steps);
+  filter->SetStepSize(0.8/magnification);
+  filter->SetMagnification(magnification);
+  filter->SetInputConnection(0, probe->GetOutputPort(0));
+  if ( noise ) filter->SetInputData(1, noise);
+  filter->UpdateInformation();
+  noise = NULL;
+
+  // array to hold the results
+  vtkPixelExtent licDataExt(outWidth, outHeight);
+
+  size_t licDataSize = licDataExt.Size();
+
+  vtkSmartPointer<vtkFloatArray> licData
+    = vtkSmartPointer<vtkFloatArray>::New();
+
+  licData->SetNumberOfComponents(3);
+  licData->SetNumberOfTuples(licDataSize);
+
+  // for each piece in the paritioned dataset compute lic and
+  // copy into the output.
+  for (int kk=0; kk < num_partitions; kk++)
+    {
+    filter->SetUpdateExtent(0, kk, num_partitions, 0);
+    filter->Update();
+
+    vtkImageData *licPieceDataSet = filter->GetOutput();
+    vtkDataArray *licPiece = licPieceDataSet->GetPointData()->GetScalars();
+
+    int tmp[6];
+    licPieceDataSet->GetExtent(tmp);
+
+    vtkPixelExtent licPieceExt(
+            tmp[2*comp[0]],
+            tmp[2*comp[0]+1],
+            tmp[2*comp[1]],
+            tmp[2*comp[1]+1]);
+
+    vtkPixelTransfer::Blit(
+            licPieceExt,
+            licPieceExt,
+            licDataExt,
+            licPieceExt,
+            licPiece->GetNumberOfComponents(),
+            licPiece->GetDataType(),
+            licPiece->GetVoidPointer(0),
+            licData->GetNumberOfComponents(),
+            licData->GetDataType(),
+            licData->GetVoidPointer(0));
+     }
+  probe = NULL;
+  filter = NULL;
+  renWin = NULL;
+
+  // convert from float to u char for png
+  vtkSmartPointer<vtkUnsignedCharArray> licPng
+    = vtkSmartPointer<vtkUnsignedCharArray>::New();
+
+  licPng->SetNumberOfComponents(3);
+  licPng->SetNumberOfTuples(licDataSize);
+  unsigned char *pPng = licPng->GetPointer(0);
+  float *pData = licData->GetPointer(0);
+  size_t n = 3*licDataSize;
+  for (size_t i=0; i<n; ++i)
+    {
+    pPng[i] = pData[i]*255.0f;
+    }
+  licData = NULL;
+
+  // wrap the result into an image data for the png writer
+  vtkSmartPointer<vtkImageData> pngDataSet
+    = vtkSmartPointer<vtkImageData>::New();
+
+  pngDataSet->SetDimensions(outWidth, outHeight, 1);
+  pngDataSet->SetSpacing(outSpacing);
+  pngDataSet->SetOrigin(origin);
+  pngDataSet->GetPointData()->SetScalars(licPng);
+  licPng = NULL;
+
+  // save a png
+  if (outputpath != "")
+    {
+    vtkSmartPointer<vtkPNGWriter> writer
+      = vtkSmartPointer<vtkPNGWriter>::New();
+
+    writer->SetFileName(outputpath.c_str());
+    writer->SetInputData(pngDataSet);
+    writer->Write();
+    writer = NULL;
+    }
+
+  // run the test
+  vtkSmartPointer<vtkTrivialProducer> tp
+    = vtkSmartPointer<vtkTrivialProducer>::New();
+
+  tp->SetOutput(pngDataSet);
+  int retVal =
+    (tester->RegressionTest(tp, 10) == vtkTesting::PASSED)? 0 : -4;
+  if (retVal)
+    {
+    cerr << "ERROR: test failed." << endl;
+    }
+
+  tp = NULL;
+  pngDataSet = NULL;
+
+  return retVal;
+}
diff --git a/Rendering/LIC/Testing/Cxx/TestImageDataLIC2D.h b/Rendering/LIC/Testing/Cxx/TestImageDataLIC2D.h
new file mode 100644
index 0000000..903aaa7
--- /dev/null
+++ b/Rendering/LIC/Testing/Cxx/TestImageDataLIC2D.h
@@ -0,0 +1,24 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestImageDataLIC2D.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#ifndef  __TestImageDataLIC2D_h
+#define  __TestImageDataLIC2D_h
+
+// Example demonstrating use of vtkImageDataLIC2D filter.
+// Typical usage:
+// ./bin/ImageDataLIC2D --data=<vtk file> --output=<png file>
+int ImageDataLIC2D(int argc, char* argv[]);
+
+#endif
diff --git a/Rendering/LIC/Testing/Cxx/TestStructuredGridLIC2DXSlice.cxx b/Rendering/LIC/Testing/Cxx/TestStructuredGridLIC2DXSlice.cxx
new file mode 100644
index 0000000..dec1e9c
--- /dev/null
+++ b/Rendering/LIC/Testing/Cxx/TestStructuredGridLIC2DXSlice.cxx
@@ -0,0 +1,55 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestStructuredGridLIC2DXSlice.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkStructuredGridLIC2DTestDriver.h"
+#include "vtkTestUtilities.h"
+#include "vtksys/SystemTools.hxx"
+
+int TestStructuredGridLIC2DXSlice(int argc, char* argv[])
+{
+  char* fname =
+    vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/timestep_0_15.vts");
+
+  std::string filename = fname;
+  filename = "--data=" + filename;
+  delete [] fname;
+
+  fname = vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/noise.png");
+  std::string noise = fname;
+  noise = "--noise=" + noise;
+  delete [] fname;
+
+  char** new_argv = new char*[argc+12];
+  for (int cc=0; cc < argc; cc++)
+    {
+    new_argv[cc] = vtksys::SystemTools::DuplicateString(argv[cc]);
+    }
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString(filename.c_str());
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString(noise.c_str());
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--mag=8");
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--partitions=1");
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--num-steps=100");
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--slice-dir=0");
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--slice=98");
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--zoom-factor=3.0");
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--test-mode=1");
+  int status = vtkStructuredGridLIC2DTestDriver(argc, new_argv);
+  for (int kk=0; kk < argc; kk++)
+    {
+    delete [] new_argv[kk];
+    }
+  delete [] new_argv;
+
+  return status;
+}
diff --git a/Rendering/LIC/Testing/Cxx/TestStructuredGridLIC2DYSlice.cxx b/Rendering/LIC/Testing/Cxx/TestStructuredGridLIC2DYSlice.cxx
new file mode 100644
index 0000000..ab89d61
--- /dev/null
+++ b/Rendering/LIC/Testing/Cxx/TestStructuredGridLIC2DYSlice.cxx
@@ -0,0 +1,54 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestStructuredGridLIC2DYSlice.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkStructuredGridLIC2DTestDriver.h"
+#include "vtkTestUtilities.h"
+#include "vtksys/SystemTools.hxx"
+
+int TestStructuredGridLIC2DYSlice(int argc, char* argv[])
+{
+  char* fname =
+    vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/timestep_0_15.vts");
+
+  std::string filename = fname;
+  filename = "--data=" + filename;
+  delete [] fname;
+
+  fname = vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/noise.png");
+  std::string noise = fname;
+  noise = "--noise=" + noise;
+  delete [] fname;
+
+  char** new_argv = new char*[argc+12];
+  for (int cc=0; cc < argc; cc++)
+    {
+    new_argv[cc] = vtksys::SystemTools::DuplicateString(argv[cc]);
+    }
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString(filename.c_str());
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString(noise.c_str());
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--mag=8");
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--partitions=1");
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--num-steps=100");
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--slice-dir=1");
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--slice=0");
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--zoom-factor=3.0");
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--test-mode=1");
+  int status = vtkStructuredGridLIC2DTestDriver(argc, new_argv);
+  for (int kk=0; kk < argc; kk++)
+    {
+    delete [] new_argv[kk];
+    }
+  delete [] new_argv;
+  return status;
+}
diff --git a/Rendering/LIC/Testing/Cxx/TestStructuredGridLIC2DZSlice.cxx b/Rendering/LIC/Testing/Cxx/TestStructuredGridLIC2DZSlice.cxx
new file mode 100644
index 0000000..98e9656
--- /dev/null
+++ b/Rendering/LIC/Testing/Cxx/TestStructuredGridLIC2DZSlice.cxx
@@ -0,0 +1,54 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestStructuredGridLIC2DZSlice.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkStructuredGridLIC2DTestDriver.h"
+#include "vtkTestUtilities.h"
+#include "vtksys/SystemTools.hxx"
+
+int TestStructuredGridLIC2DZSlice(int argc, char* argv[])
+{
+  char* fname =
+    vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/timestep_0_15.vts");
+
+  std::string filename = fname;
+  filename = "--data=" + filename;
+  delete [] fname;
+
+  fname = vtkTestUtilities::ExpandDataFileName(argc, argv, "Data/noise.png");
+  std::string noise = fname;
+  noise = "--noise=" + noise;
+  delete [] fname;
+
+  char** new_argv = new char*[argc+12];
+  for (int cc=0; cc < argc; cc++)
+    {
+    new_argv[cc] = vtksys::SystemTools::DuplicateString(argv[cc]);
+    }
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString(filename.c_str());
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString(noise.c_str());
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--mag=8");
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--partitions=1");
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--num-steps=100");
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--slice-dir=2");
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--slice=10");
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--zoom-factor=3.8");
+  new_argv[argc++] = vtksys::SystemTools::DuplicateString("--test-mode=1");
+  int status = vtkStructuredGridLIC2DTestDriver(argc, new_argv);
+  for (int kk=0; kk < argc; kk++)
+    {
+    delete [] new_argv[kk];
+    }
+  delete [] new_argv;
+  return status;
+}
diff --git a/Rendering/LIC/Testing/Cxx/TestSurfaceLIC.cxx b/Rendering/LIC/Testing/Cxx/TestSurfaceLIC.cxx
new file mode 100644
index 0000000..9bbd069
--- /dev/null
+++ b/Rendering/LIC/Testing/Cxx/TestSurfaceLIC.cxx
@@ -0,0 +1,252 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestSurfaceLIC.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkObjectFactory.h"
+#include "vtkCellData.h"
+#include "vtkDataSetSurfaceFilter.h"
+#include "vtkPointData.h"
+#include "vtkPolyData.h"
+#include "vtkSmartPointer.h"
+#include "vtkXMLPolyDataReader.h"
+#include "vtkXMLMultiBlockDataReader.h"
+#include "vtkGenericDataObjectReader.h"
+#include "vtkCompositeDataPipeline.h"
+#include "vtkCompositeDataSet.h"
+#include "vtkSurfaceLICTestDriver.h"
+
+#include <vtksys/CommandLineArguments.hxx>
+#include <vtksys/SystemTools.hxx>
+#include <vector>
+#include <string>
+
+// Description:
+// Serial regression test, parse command line, build the
+// pipeline, call the driver.
+int TestSurfaceLIC(int argc, char* argv[])
+{
+  vtkCompositeDataPipeline* prototype = vtkCompositeDataPipeline::New();
+  vtkAlgorithm::SetDefaultExecutivePrototype(prototype);
+  prototype->Delete();
+
+  std::string filename;
+  int num_steps = 40;
+  double step_size = 0.4;
+  int enhanced_lic = 1;
+  int normalize_vectors = 1;
+  int camera_config = 1;
+  int generate_noise_texture = 0;
+  int noise_type = 1;
+  int noise_texture_size = 200;
+  int noise_grain_size = 2;
+  double min_noise_value = 0.0;
+  double max_noise_value = 1.0;
+  int number_of_noise_levels = 256;
+  double impulse_noise_prob = 1.0;
+  double impulse_noise_bg_value = 0.0;
+  int noise_gen_seed = 1;
+  int enhance_contrast = 0;
+  double low_lic_contrast_enhancement_factor = 0.0;
+  double high_lic_contrast_enhancement_factor = 0.0;
+  double low_color_contrast_enhancement_factor = 0.0;
+  double high_color_contrast_enhancement_factor = 0.0;
+  int anti_alias = 0;
+  int color_mode = 0;
+  double lic_intensity = 0.8;
+  double map_mode_bias = 0.0;
+  int color_by_mag = 0;
+  int mask_on_surface = 0;
+  double mask_threshold = 0.0;
+  double mask_intensity = 0.0;
+  std::vector<double> mask_color_rgb;
+  std::string vectors;
+
+  vtksys::CommandLineArguments arg;
+  arg.StoreUnusedArguments(1);
+  arg.Initialize(argc, argv);
+
+  // Fill up accepted arguments.
+  typedef vtksys::CommandLineArguments argT;
+
+  arg.AddArgument("--data", argT::EQUAL_ARGUMENT, &filename,
+    "(required) Enter dataset to load (currently only *.[vtk|vtp] files are supported");
+  arg.AddArgument("--num-steps", argT::EQUAL_ARGUMENT, &num_steps,
+    "(optional: default 40) Number of steps in each direction");
+  arg.AddArgument("--step-size", argT::EQUAL_ARGUMENT, &step_size,
+    "(optional: default 0.4) Step size in pixels");
+  arg.AddArgument("--enhanced-lic", argT::EQUAL_ARGUMENT, &enhanced_lic,
+    "(optional: default 1) Enable enhanced algoruthm");
+  arg.AddArgument("--color-by-mag", argT::EQUAL_ARGUMENT, &color_by_mag,
+    "(optional: default 0) when set compute the magnitude of the vector and color the lic by this");
+  arg.AddArgument("--vectors", argT::EQUAL_ARGUMENT, &vectors,
+    "(optional: default active point vectors) Name of the vector field array");
+  arg.AddArgument("--normalize-vectors", argT::EQUAL_ARGUMENT, &normalize_vectors,
+    "(optional: default 1) Normalize vectors during integration");
+  arg.AddArgument("--generate-noise-texture", argT::EQUAL_ARGUMENT, &generate_noise_texture,
+    "(optional: default 0) Generate noise texture (if not generate use pickeled 200x200 noise texture.");
+  arg.AddArgument("--noise-type", argT::EQUAL_ARGUMENT, &noise_type,
+    "(optional: default 1) statistical distribution for noise generator, 0=Uniform, 1=Gaussian. 2=perlin");
+  arg.AddArgument("--noise-texture-size", argT::EQUAL_ARGUMENT, &noise_texture_size,
+    "(optional: default 200) side of the square texture in pixels");
+  arg.AddArgument("--noise-grain-size", argT::EQUAL_ARGUMENT, &noise_grain_size,
+    "(optional: default 1) number of pixels each noise value occupies");
+  arg.AddArgument("--min-noise-value", argT::EQUAL_ARGUMENT, &min_noise_value,
+    "(optional: default 0) darkest color noise can contain");
+  arg.AddArgument("--max-noise-value", argT::EQUAL_ARGUMENT, &max_noise_value,
+    "(optional: default 1) lightest color noise can contain");
+  arg.AddArgument("--number-of-noise-levels", argT::EQUAL_ARGUMENT, &number_of_noise_levels,
+    "(optional: default 256) number of gray scale colors");
+  arg.AddArgument("--impulse-noise-prob", argT::EQUAL_ARGUMENT, &impulse_noise_prob,
+    "(optional: default 1.0) probabilty a pixel will conatin a noise value");
+  arg.AddArgument("--impulse-noise-bg-val", argT::EQUAL_ARGUMENT, &impulse_noise_bg_value,
+    "(optional: default 1.0) bg_valabilty a pixel will conatin a noise value");
+  arg.AddArgument("--noise-gen-seed", argT::EQUAL_ARGUMENT, &noise_gen_seed,
+    "(optional: default 1) set the seed to the random number generator");
+  arg.AddArgument("--enhance-contrast", argT::EQUAL_ARGUMENT, &enhance_contrast,
+    "(optional: default 0) Nomralize colors after each pass");
+  arg.AddArgument("--low-lic-contrast-enhancement-factor", argT::EQUAL_ARGUMENT, &low_lic_contrast_enhancement_factor,
+    "(optional: default 0) lower normalization factor 0 is the min");
+  arg.AddArgument("--high-lic-contrast-enhancement-factor", argT::EQUAL_ARGUMENT, &high_lic_contrast_enhancement_factor,
+    "(optional: default 1) upper normalization factor, 0 is the max");
+  arg.AddArgument("--low-color-contrast-enhancement-factor", argT::EQUAL_ARGUMENT, &low_color_contrast_enhancement_factor,
+    "(optional: default 0) lower normalization factor 0 is the min");
+  arg.AddArgument("--high-color-contrast-enhancement-factor", argT::EQUAL_ARGUMENT, &high_color_contrast_enhancement_factor,
+    "(optional: default 1) upper normalization factor, 0 is the max");
+  arg.AddArgument("--anti-alias", argT::EQUAL_ARGUMENT, &anti_alias,
+    "(optional: default 0) apply anti-aliasing pass after lic to remove jagged artifacts");
+  arg.AddArgument("--color-mode", argT::EQUAL_ARGUMENT, &color_mode,
+    "(optional: default 0) choose color blending algorithm 0==blending 1==mapping");
+  arg.AddArgument("--lic-intensity", argT::EQUAL_ARGUMENT, &lic_intensity,
+    "(optional: default 0.8) Contribution of LIC in the final image [1.0 == max contribution]");
+  arg.AddArgument("--map-mode-bias", argT::EQUAL_ARGUMENT, &map_mode_bias,
+    "(optional: default 0.8) Contribution of LIC in the final image [1.0 == max contribution]");
+  arg.AddArgument("--mask-on-surface", argT::EQUAL_ARGUMENT, &mask_on_surface,
+    "(optional: default 0) Mask criteria is computed on surface-projected vectors");
+  arg.AddArgument("--mask-threshold", argT::EQUAL_ARGUMENT, &mask_threshold,
+    "(optional: default 0.0) Mask fragment if |V| < threshold");
+  arg.AddArgument("--mask-intensity", argT::EQUAL_ARGUMENT, &mask_intensity,
+    "(optional: default 0.0) blending factor for masked fragments");
+  arg.AddArgument("--mask-color", argT::MULTI_ARGUMENT, &mask_color_rgb,
+    "(optional: default pink=1.0 0.0 0.84705) mask color");
+  arg.AddArgument("--camera-config", argT::EQUAL_ARGUMENT, &camera_config,
+    "(optional: default 1) use a preset camera configuration");
+
+  if (!arg.Parse() || filename == "")
+    {
+    cerr << "Usage: " << endl;
+    cerr << arg.GetHelp() << endl;
+    return 1;
+    }
+
+  if (mask_color_rgb.size() == 0)
+    {
+    // something bright for the default.
+    mask_color_rgb.resize(3);
+    mask_color_rgb[0] = 1.0;
+    mask_color_rgb[1] = 0.0;
+    mask_color_rgb[2] = 0.84705;
+    }
+
+  vtkSmartPointer<vtkDataObject> dataObj;
+  std::string ext = vtksys::SystemTools::GetFilenameExtension(filename);
+  if (ext == ".vtk")
+    {
+    vtkGenericDataObjectReader* reader = vtkGenericDataObjectReader::New();
+    reader->SetFileName(filename.c_str());
+
+    vtkDataSetSurfaceFilter* surface = vtkDataSetSurfaceFilter::New();
+    surface->SetInputConnection(reader->GetOutputPort());
+    surface->Update();
+
+    dataObj = surface->GetOutputDataObject(0);
+
+    reader->Delete();
+    surface->Delete();
+    }
+  else
+  if (ext == ".vtp")
+    {
+    vtkXMLPolyDataReader* reader = vtkXMLPolyDataReader::New();
+    reader->SetFileName(filename.c_str());
+    reader->Update();
+    dataObj = reader->GetOutputDataObject(0);
+    reader->Delete();
+    }
+  else
+  if (ext == ".vtm")
+    {
+    vtkXMLMultiBlockDataReader* reader = vtkXMLMultiBlockDataReader::New();
+    reader->SetFileName(filename.c_str());
+
+    vtkDataSetSurfaceFilter* surface = vtkDataSetSurfaceFilter::New();
+    surface->SetInputConnection(reader->GetOutputPort());
+    surface->Update();
+
+    dataObj = surface->GetOutputDataObject(0);
+
+    reader->Delete();
+    surface->Delete();
+    }
+  else
+    {
+    cerr << "Error: Unknown extension: '" << ext << "'"<< endl;
+    vtkAlgorithm::SetDefaultExecutivePrototype(NULL);
+    return 1;
+    }
+
+  if (!dataObj) // || dataObj->GetNumberOfElements(vtkDataObject::POINT) == 0)
+    {
+    cerr << "Error reading file: '" << filename.c_str() << "'" << endl;
+    vtkAlgorithm::SetDefaultExecutivePrototype(NULL);
+    return 1;
+    }
+
+  int status = vtkSurfaceLICTestDriver(
+        argc,
+        argv,
+        dataObj,
+        num_steps,
+        step_size,
+        enhanced_lic,
+        normalize_vectors,
+        camera_config,
+        generate_noise_texture,
+        noise_type,
+        noise_texture_size,
+        noise_grain_size,
+        min_noise_value,
+        max_noise_value,
+        number_of_noise_levels,
+        impulse_noise_prob,
+        impulse_noise_bg_value,
+        noise_gen_seed,
+        enhance_contrast,
+        low_lic_contrast_enhancement_factor,
+        high_lic_contrast_enhancement_factor,
+        low_color_contrast_enhancement_factor,
+        high_color_contrast_enhancement_factor,
+        anti_alias,
+        color_mode,
+        lic_intensity,
+        map_mode_bias,
+        color_by_mag,
+        mask_on_surface,
+        mask_threshold,
+        mask_intensity,
+        mask_color_rgb,
+        vectors);
+
+  vtkAlgorithm::SetDefaultExecutivePrototype(NULL);
+
+  return status;
+}
diff --git a/Rendering/LIC/Testing/Cxx/TestSurfaceLIC.h b/Rendering/LIC/Testing/Cxx/TestSurfaceLIC.h
new file mode 100644
index 0000000..e16c6c2
--- /dev/null
+++ b/Rendering/LIC/Testing/Cxx/TestSurfaceLIC.h
@@ -0,0 +1,20 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestSurfaceLIC.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#ifndef __TestSurfaceLIC_h
+#define __TestSurfaceLIC_h
+
+int TestSurfaceLIC(int argc, char* argv[]);
+
+#endif
diff --git a/Rendering/LIC/Testing/Cxx/vtkStructuredGridLIC2DTestDriver.cxx b/Rendering/LIC/Testing/Cxx/vtkStructuredGridLIC2DTestDriver.cxx
new file mode 100644
index 0000000..fb632cf
--- /dev/null
+++ b/Rendering/LIC/Testing/Cxx/vtkStructuredGridLIC2DTestDriver.cxx
@@ -0,0 +1,335 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestStructuredGridLIC2DSlice.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkStructuredGridLIC2D.h"
+#include "vtkPNGReader.h"
+#include "vtkXMLStructuredGridReader.h"
+#include "vtkExtractGrid.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkRenderWindow.h"
+#include "vtkRenderer.h"
+#include "vtkActor.h"
+#include "vtkTexture.h"
+#include "vtkDataSetMapper.h"
+#include "vtkTestUtilities.h"
+#include "vtkRegressionTestImage.h"
+#include "vtkPNGWriter.h"
+#include "vtkImageShiftScale.h"
+#include "vtkImageData.h"
+#include "vtkPointData.h"
+#include "vtkProperty.h"
+#include "vtkPolyDataMapper.h"
+#include "vtkDataSetSurfaceFilter.h"
+#include "vtkImageExtractComponents.h"
+#include "vtkTestUtilities.h"
+#include "vtkCamera.h"
+#include "vtkSmartPointer.h"
+#include "vtkStreamingDemandDrivenPipeline.h"
+#include "vtkTimerLog.h"
+#include <vtksys/CommandLineArguments.hxx>
+#include <string>
+
+// --------------------------------------------------------------------------
+static inline
+int CLAMP(int a, int low, int high)
+{
+  a = (a < low)? low : a;
+  a = (a > high)? high : a;
+  return a;
+}
+
+// --------------------------------------------------------------------------
+int vtkStructuredGridLIC2DTestDriver(int argc, char* argv[])
+{
+  std::string filename;
+  std::string noise_filename;
+  int resolution = 10;
+  int magnification = 1;
+  std::string outputpath;
+  int num_partitions = 1;
+  int num_steps = 40;
+  int slice_dir = 2; // 0 == X, 1 == Y, 2 == Z
+  int slice = 0;
+  int test_mode = 0;
+  double zoom_factor = 2.8;
+
+  vtksys::CommandLineArguments arg;
+  arg.StoreUnusedArguments(1);
+  arg.Initialize(argc, argv);
+
+  typedef vtksys::CommandLineArguments argT;
+  arg.AddArgument("--data", argT::EQUAL_ARGUMENT, &filename,
+    "(required) Enter dataset to load (currently only *.vts files are supported");
+  arg.AddArgument("--res", argT::EQUAL_ARGUMENT, &resolution,
+    "(optional: default 10) Number of sample per unit distance");
+  arg.AddArgument("--mag", argT::EQUAL_ARGUMENT, &magnification,
+    "(optional: default 1) Magnification");
+  arg.AddArgument("--output", argT::EQUAL_ARGUMENT, &outputpath,
+    "(optional) Output png image");
+  arg.AddArgument("--partitions", argT::EQUAL_ARGUMENT, &num_partitions,
+    "(optional: default 1) Number of partitions");
+  arg.AddArgument("--num-steps", argT::EQUAL_ARGUMENT, &num_steps,
+    "(optional: default 40) Number of steps in each direction");
+  arg.AddArgument("--noise", argT::EQUAL_ARGUMENT, &noise_filename,
+    "(optional) Specify the filename to a png image file to use as the noise texture");
+  arg.AddArgument("--slice", argT::EQUAL_ARGUMENT, &slice,
+    "(optional: default 0) The slice number in the direction specified by slice-dir. "
+    "This is ignored for 2D data.");
+  arg.AddArgument("--slice-dir", argT::EQUAL_ARGUMENT, &slice_dir,
+    "(optional: default 2 (Z slices)) The slice direction: 0 for X slices, 1 for Y slices and 2 for Z slices. "
+    "This is ignored for 2D data.");
+  arg.AddArgument("--test-mode", argT::EQUAL_ARGUMENT, &test_mode,
+    "(optional: default 0) run as ctest or demo");
+  arg.AddArgument("--zoom-factor", argT::EQUAL_ARGUMENT, &zoom_factor,
+    "(optional: default 2.8) set camera zoom");
+
+  if (!arg.Parse() || filename=="")
+    {
+    cerr << "Problem parsing arguments." << endl;
+    cerr << arg.GetHelp() << endl;
+    return 1;
+    }
+
+  if (magnification < 1)
+    {
+    cout << "WARNING: Magnification \'" << magnification  << "\' is invalid."
+      " Forcing a magnification of 1.";
+    magnification = 1;
+    }
+
+  if (num_steps < 1)
+    {
+    cout << "WARNING: Number of steps cannot be less than 1. Forcing 10.";
+    num_steps = 10;
+    }
+
+  if (slice_dir < 0 || slice_dir > 2)
+    {
+    cout << "WARNING: Invalid slice-dir (" <<slice_dir<<"). Forcing Z slices";
+    slice_dir = 2;
+    }
+
+  vtkSmartPointer<vtkXMLStructuredGridReader> reader
+    = vtkSmartPointer<vtkXMLStructuredGridReader>::New();
+
+  reader->SetFileName(filename.c_str());
+  reader->Update();
+
+  int dataDesc = VTK_XY_PLANE;
+  switch(slice_dir)
+    {
+  case 0:
+    dataDesc = VTK_YZ_PLANE;
+    break;
+  case 1:
+    dataDesc = VTK_XZ_PLANE;
+    break;
+  case 2:
+  default:
+    dataDesc = VTK_XY_PLANE;
+    }
+
+  int extent[6];
+  int voi[6];
+  reader->GetOutput()->GetExtent(extent);
+  memcpy(voi, extent, 6*sizeof(int));
+
+  // If data is 2D, then override the slice-dir
+  if (extent[0] == extent[1])
+    {
+    dataDesc = VTK_YZ_PLANE;
+    slice = 0;
+    }
+  else if (extent[2] == extent[3])
+    {
+    dataDesc = VTK_XZ_PLANE;
+    slice = 0;
+    }
+  else if (extent[4] == extent[5])
+    {
+    dataDesc = VTK_XY_PLANE;
+    slice = 0;
+    }
+  else
+    {
+    switch (dataDesc)
+      {
+    case VTK_XY_PLANE:
+      voi[4] = voi[5] = CLAMP(extent[4]+slice, extent[4], extent[5]);
+      break;
+
+    case VTK_YZ_PLANE:
+      voi[0] = voi[1] = CLAMP(extent[0]+slice, extent[0], extent[1]);
+      break;
+
+    case VTK_XZ_PLANE:
+      voi[2] = voi[3] = CLAMP(extent[2]+slice, extent[2], extent[3]);
+      break;
+      }
+    }
+
+  vtkSmartPointer<vtkExtractGrid> extractVOI
+    = vtkSmartPointer<vtkExtractGrid>::New();
+
+  extractVOI->SetInputConnection(reader->GetOutputPort());
+  extractVOI->SetVOI(voi);
+
+  vtkSmartPointer<vtkRenderWindow> renWin
+    = vtkSmartPointer<vtkRenderWindow>::New();
+
+  vtkSmartPointer<vtkRenderer> renderer
+    = vtkSmartPointer<vtkRenderer>::New();
+
+  vtkSmartPointer<vtkRenderWindowInteractor> iren
+    = vtkSmartPointer<vtkRenderWindowInteractor>::New();
+
+  renWin->AddRenderer(renderer);
+  iren->SetRenderWindow(renWin);
+
+  renWin->Render();
+
+  vtkSmartPointer<vtkStructuredGridLIC2D> filter
+    = vtkSmartPointer<vtkStructuredGridLIC2D>::New();
+
+  if (  filter->SetContext( renWin ) == 0  )
+    {
+    cout << "Required OpenGL extensions / GPU not supported." << endl;
+    return 0;
+    }
+
+  filter->SetInputConnection(extractVOI->GetOutputPort());
+
+  if (noise_filename != "")
+    {
+    vtkSmartPointer<vtkPNGReader> pngReader
+      = vtkSmartPointer<vtkPNGReader>::New();
+
+    pngReader->SetFileName(noise_filename.c_str());
+    filter->SetInputConnection(1, pngReader->GetOutputPort(0));
+    }
+
+  filter->SetSteps(num_steps);
+  filter->SetStepSize(0.01/magnification);
+  filter->SetMagnification(magnification);
+
+  for (int kk=0; kk < num_partitions; kk++)
+    {
+    filter->SetUpdateExtent(0, kk, num_partitions, 0);
+
+    cout << "*****************" << endl;
+    filter->Update();
+    if (  filter->GetFBOSuccess() == 0 ||
+          filter->GetLICSuccess() == 0  )
+      {
+      return 0;
+      }
+
+    vtkSmartPointer<vtkImageData> clone
+      = vtkSmartPointer<vtkImageData>::New();
+
+    clone->ShallowCopy(filter->GetOutputDataObject(1));
+
+    double range[2];
+    clone->GetPointData()->GetScalars()->GetRange(range);
+
+    vtkSmartPointer<vtkImageShiftScale> caster
+      = vtkSmartPointer<vtkImageShiftScale>::New();
+
+    caster->SetInputData(clone);
+    caster->SetOutputScalarTypeToUnsignedChar();
+    caster->SetShift(-range[0]);
+    caster->SetScale(255.0/(range[1]-range[0]));
+    caster->Update();
+
+    //vtkPNGWriter* writer = vtkPNGWriter::New();
+    //writer->SetFileName("/tmp/lic.png");
+    //writer->SetInputConnection(caster->GetOutputPort());
+    //writer->Write();
+    //writer->Delete();
+
+    vtkSmartPointer<vtkTexture> texture
+      = vtkSmartPointer<vtkTexture>::New();
+
+    texture->SetInputConnection(caster->GetOutputPort());
+
+    vtkSmartPointer<vtkStructuredGrid> clone2
+      = vtkSmartPointer<vtkStructuredGrid>::New();
+
+    clone2->ShallowCopy(filter->GetOutput(0));
+
+    vtkSmartPointer<vtkDataSetSurfaceFilter> surfaceFilter
+      = vtkSmartPointer<vtkDataSetSurfaceFilter>::New();
+
+    surfaceFilter->SetInputData(clone2);
+
+    vtkSmartPointer<vtkPolyDataMapper> mapper
+      = vtkSmartPointer<vtkPolyDataMapper>::New();
+
+    mapper->SetInputConnection(surfaceFilter->GetOutputPort());
+    mapper->SetScalarVisibility(0);
+
+
+    vtkSmartPointer<vtkActor> actor
+      = vtkSmartPointer<vtkActor>::New();
+
+    actor->SetMapper(mapper);
+    actor->SetTexture(texture);
+
+    renderer->AddActor(actor);
+    }
+
+  vtkSmartPointer<vtkTesting> tester
+    = vtkSmartPointer<vtkTesting>::New();
+
+  for (int cc=0; cc < argc; cc++)
+    {
+    tester->AddArgument(argv[cc]);
+    }
+  tester->SetRenderWindow(renWin);
+
+  renderer->SetBackground(0.2,0.1,0.2);
+  renderer->ResetCamera();
+  renderer->GetActiveCamera()->Zoom( zoom_factor );
+
+  if ( test_mode )
+    {
+    switch (dataDesc)
+      {
+      case VTK_YZ_PLANE:
+        renderer->GetActiveCamera()->Azimuth(90);
+        break;
+      case VTK_XZ_PLANE:
+        renderer->GetActiveCamera()->Elevation(90);
+        break;
+      }
+    }
+
+ renWin->Render();
+ int reply = (!tester->IsValidImageSpecified() ||
+   (tester->RegressionTest(10) == vtkTesting::PASSED))? /*success*/ 0 : /*failure*/ 1;
+
+ if ( tester->IsInteractiveModeSpecified() || !test_mode )
+   {
+   iren->Start();
+   }
+
+ return reply;
+}
+
+// --------------------------------------------------------------------------
+int StructuredGridLIC2DDemo(int argc, char* argv[])
+{
+  return vtkStructuredGridLIC2DTestDriver(argc, argv);
+}
diff --git a/Rendering/LIC/Testing/Cxx/vtkStructuredGridLIC2DTestDriver.h b/Rendering/LIC/Testing/Cxx/vtkStructuredGridLIC2DTestDriver.h
new file mode 100644
index 0000000..fdda712
--- /dev/null
+++ b/Rendering/LIC/Testing/Cxx/vtkStructuredGridLIC2DTestDriver.h
@@ -0,0 +1,26 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestStructuredGridLIC2DSlice.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#ifndef  __vtkStructuredGridLIC2DTestDriver_h
+#define  __vtkStructuredGridLIC2DTestDriver_h
+
+// Description:
+// For demo.
+int StructuredGridLIC2DDemo(int argc, char* argv[]);
+
+// Description:
+// For ctests
+int vtkStructuredGridLIC2DTestDriver(int argc, char* argv[]);
+
+#endif
diff --git a/Rendering/LIC/Testing/Cxx/vtkSurfaceLICTestDriver.cxx b/Rendering/LIC/Testing/Cxx/vtkSurfaceLICTestDriver.cxx
new file mode 100644
index 0000000..32d1544
--- /dev/null
+++ b/Rendering/LIC/Testing/Cxx/vtkSurfaceLICTestDriver.cxx
@@ -0,0 +1,406 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestSurfaceLIC.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkObjectFactory.h"
+#include "vtkSmartPointer.h"
+#include "vtkPointData.h"
+#include "vtkCellData.h"
+#include "vtkPolyData.h"
+#include "vtkDataObject.h"
+#include "vtkDataSet.h"
+#include "vtkCompositeDataSet.h"
+#include "vtkCompositeDataIterator.h"
+#include "vtkTestUtilities.h"
+#include "vtkCamera.h"
+#include "vtkColorTransferFunction.h"
+#include "vtkSurfaceLICPainter.h"
+#include "vtkSurfaceLICDefaultPainter.h"
+#include "vtkPainterPolyDataMapper.h"
+#include "vtkRegressionTestImage.h"
+#include "vtkRenderer.h"
+#include "vtkRenderWindow.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkCompositeDataPipeline.h"
+#include "vtkCompositePolyDataMapper2.h"
+#include "vtkgl.h"
+
+#include <vector>
+#include <string>
+
+#ifndef vtkFloatingPointTemplateMacro
+#define vtkFloatingPointTemplateMacro(call) \
+  vtkTemplateMacroCase(VTK_DOUBLE, double, call); \
+  vtkTemplateMacroCase(VTK_FLOAT, float, call);
+#endif
+
+// Helper to compute range
+static
+void Range(vtkDataArray *S, double *range)
+{
+  double Srange[2];
+  S->GetRange(Srange);
+  range[0] = Srange[0] < range[0] ? Srange[0] : range[0];
+  range[1] = Srange[1] > range[1] ? Srange[1] : range[1];
+}
+
+// helper to compute magnitude
+static
+vtkDataArray *Magnitude(vtkDataArray *V)
+{
+  vtkIdType nTups = V->GetNumberOfTuples();
+  vtkIdType nComps = V->GetNumberOfComponents();
+  vtkDataArray *magV = V->NewInstance();
+  magV->SetNumberOfTuples(nTups);
+  switch (V->GetDataType())
+    {
+    vtkFloatingPointTemplateMacro(
+      VTK_TT *pV = (VTK_TT*)V->GetVoidPointer(0);
+      VTK_TT *pMagV = (VTK_TT*)magV->GetVoidPointer(0);
+      for (vtkIdType i=0; i<nTups; ++i)
+        {
+        VTK_TT mag = VTK_TT(0);
+        for (vtkIdType j=0; j<nComps; ++j)
+          {
+          VTK_TT v = pV[i*nComps+j];
+          mag += v*v;
+          }
+        pMagV[i] = sqrt(mag);
+        }
+      );
+  default:
+    cerr << "ERROR: vectors must be float or double" << endl;
+    break;
+    }
+  return magV;
+}
+
+// Compute the magnitude of the named vector and add it to
+// dataset, return range.
+static
+vtkDataArray *Magnitude(vtkDataSet *ds, std::string &vectors)
+{
+  vtkDataArray *V = NULL;
+  V = ds->GetPointData()->GetArray(vectors.c_str());
+  if (V == NULL)
+    {
+    cerr << "ERROR: point vectors " << vectors << " not found" << endl;
+    return NULL;
+    }
+  vtkDataArray *magV = Magnitude(V);
+  std::string magVName = "mag"+vectors;
+  magV->SetName(magVName.c_str());
+  return magV;
+}
+
+// This example demonstrates the use of vtkSurfaceLICPainter for rendering
+// geometry with LIC on the surface.
+int vtkSurfaceLICTestDriver(
+      int argc,
+      char **argv,
+      vtkDataObject *dataObj,
+      int num_steps,
+      double step_size,
+      int enhanced_lic,
+      int normalize_vectors,
+      int camera_config,
+      int generate_noise_texture,
+      int noise_type,
+      int noise_texture_size,
+      int noise_grain_size,
+      double min_noise_value,
+      double max_noise_value,
+      int number_of_noise_levels,
+      double impulse_noise_prob,
+      double impulse_noise_bg_value,
+      int noise_gen_seed,
+      int enhance_contrast,
+      double low_lic_contrast_enhancement_factor,
+      double high_lic_contrast_enhancement_factor,
+      double low_color_contrast_enhancement_factor,
+      double high_color_contrast_enhancement_factor,
+      int anti_alias,
+      int color_mode,
+      double lic_intensity,
+      double map_mode_bias,
+      int color_by_mag,
+      int mask_on_surface,
+      double mask_threshold,
+      double mask_intensity,
+      std::vector<double> &mask_color_rgb,
+      std::string &vectors)
+{
+  // Set up the render window, renderer, interactor.
+  vtkSmartPointer<vtkRenderWindow> renWin
+    = vtkSmartPointer<vtkRenderWindow>::New();
+
+  vtkSmartPointer<vtkRenderer> renderer
+    = vtkSmartPointer<vtkRenderer>::New();
+
+  vtkSmartPointer<vtkRenderWindowInteractor> iren
+    = vtkSmartPointer<vtkRenderWindowInteractor>::New();
+
+  renWin->AddRenderer(renderer);
+  iren->SetRenderWindow(renWin);
+
+  if (camera_config == 1)
+    {
+    renWin->SetSize(300, 300);
+    }
+  else
+  if (camera_config == 2)
+    {
+    renWin->SetSize(300, 270);
+    }
+  else
+  if (camera_config == 3)
+    {
+    renWin->SetSize(400, 340);
+    }
+  else
+  if (camera_config == 4)
+    {
+    renWin->SetSize(364, 256);
+    }
+  renWin->Render();
+
+  if (!vtkSurfaceLICPainter::IsSupported(renWin))
+    {
+    cerr
+      << "WARNING: The rendering context does not support required extensions."
+      << endl;
+    dataObj = NULL;
+    renWin = NULL;
+    renderer = NULL;
+    iren = NULL;
+    vtkAlgorithm::SetDefaultExecutivePrototype(NULL);
+    return 0;
+    }
+
+  // Create a mapper and insert the vtkSurfaceLICPainter painter into the
+  // painter chain. This is essential since the entire logic of performin the
+  // LIC is present in the vtkSurfaceLICPainter.
+
+  vtkSmartPointer<vtkCompositePolyDataMapper2> mapper
+    = vtkSmartPointer<vtkCompositePolyDataMapper2>::New();
+
+  vtkSmartPointer<vtkSurfaceLICDefaultPainter> defPainter
+    = vtkSmartPointer<vtkSurfaceLICDefaultPainter>::New();
+
+  defPainter->SetDelegatePainter(mapper->GetPainter()->GetDelegatePainter());
+  mapper->SetPainter(defPainter);
+  vtkSurfaceLICPainter *painter = defPainter->GetSurfaceLICPainter();
+  defPainter = NULL;
+
+  // print details of the test
+  // convenient for debugging failed
+  // tests on remote dashboards.
+  const char *vtkGLVersion = reinterpret_cast<const char*>(glGetString(GL_VERSION));
+  const char *vtkGLVendor = reinterpret_cast<const char*>(glGetString(GL_VENDOR));
+  const char *vtkGLRenderer = reinterpret_cast<const char*>(glGetString(GL_RENDERER));
+  const char *vtkLICClass = painter->GetClassName();
+  std::string details
+    = std::string("\n\n====================================================================\n")
+    + std::string("VTK Painter:\n")
+    + std::string("    ") + std::string(vtkLICClass) + std::string("\n")
+    + std::string("OpenGL:\n")
+    + std::string("    ") + std::string(vtkGLVersion?vtkGLVersion:"unknown") + std::string("\n")
+    + std::string("    ") + std::string(vtkGLRenderer?vtkGLRenderer:"unknown") + std::string("\n")
+    + std::string("    ") + std::string(vtkGLVendor?vtkGLVendor:"unknown") + std::string("\n")
+    + std::string("====================================================================\n\n\n");
+  cerr << details << endl;
+
+  // If user chose a vector field, select it.
+  if (vectors != "")
+    {
+    painter->SetInputArrayToProcess(
+      vtkDataObject::FIELD_ASSOCIATION_POINTS_THEN_CELLS,
+      vectors.c_str());
+    }
+  else
+    {
+    cerr << "ERROR: vectors must be set using --vectors." << endl;
+    return 1;
+    }
+
+  // Set the mapper input
+  mapper->SetInputDataObject(dataObj);
+
+  if (color_by_mag)
+    {
+    if ( vectors.empty() )
+      {
+      cerr << "ERROR: color by mag requires using --vectors." << endl;
+      vtkAlgorithm::SetDefaultExecutivePrototype(NULL);
+      return 1;
+      }
+
+    const char *magVName = NULL;
+    double range[2] = {VTK_FLOAT_MAX, -VTK_FLOAT_MAX};
+    vtkCompositeDataSet *cd = dynamic_cast<vtkCompositeDataSet*>(dataObj);
+    if (cd)
+      {
+      vtkCompositeDataIterator* iter = cd->NewIterator();
+      for (iter->InitTraversal(); !iter->IsDoneWithTraversal(); iter->GoToNextItem())
+        {
+        vtkDataSet* ds = dynamic_cast<vtkDataSet*>(iter->GetCurrentDataObject());
+        if (ds && ds->GetNumberOfCells())
+          {
+          vtkDataArray *magV = Magnitude(ds, vectors);
+          magVName = magV->GetName();
+          Range(magV, range);
+          ds->GetPointData()->SetScalars(magV);
+          magV->Delete();
+          }
+        }
+      iter->Delete();
+      }
+    vtkDataSet* ds = dynamic_cast<vtkDataSet*>(dataObj);
+    if (ds && ds->GetNumberOfCells())
+      {
+      vtkDataArray *magV = Magnitude(ds, vectors);
+      magVName = magV->GetName();
+      Range(magV, range);
+      ds->GetPointData()->SetScalars(magV);
+      magV->Delete();
+      }
+    if (!magVName)
+      {
+      cerr << "ERROR: color by mag could not generate magV." << endl;
+      vtkAlgorithm::SetDefaultExecutivePrototype(NULL);
+      return 1;
+      }
+    vtkColorTransferFunction *lut = vtkColorTransferFunction::New();
+    lut->SetColorSpaceToRGB();
+    lut->AddRGBPoint(range[0], 0.0, 0.0, 1.0);
+    lut->AddRGBPoint(range[1], 1.0, 0.0, 0.0);
+    lut->SetColorSpaceToDiverging();
+    lut->Build();
+    mapper->SetLookupTable(lut);
+    mapper->SetScalarModeToUsePointData();
+    mapper->SetScalarVisibility(1);
+    mapper->SelectColorArray(magVName);
+    mapper->SetUseLookupTableScalarRange(1);
+    mapper->SetScalarMode(VTK_SCALAR_MODE_USE_POINT_FIELD_DATA);
+    lut->Delete();
+    }
+  dataObj = NULL;
+
+  // Pass parameters.
+  painter->SetNumberOfSteps(num_steps);
+  painter->SetStepSize(step_size);
+  painter->SetEnhancedLIC(enhanced_lic);
+  painter->SetGenerateNoiseTexture(generate_noise_texture);
+  painter->SetNoiseType(noise_type);
+  painter->SetNormalizeVectors(normalize_vectors);
+  painter->SetNoiseTextureSize(noise_texture_size);
+  painter->SetNoiseGrainSize(noise_grain_size);
+  painter->SetMinNoiseValue(min_noise_value);
+  painter->SetMaxNoiseValue(max_noise_value);
+  painter->SetNumberOfNoiseLevels(number_of_noise_levels);
+  painter->SetImpulseNoiseProbability(impulse_noise_prob);
+  painter->SetImpulseNoiseBackgroundValue(impulse_noise_bg_value);
+  painter->SetNoiseGeneratorSeed(noise_gen_seed);
+  painter->SetEnhanceContrast(enhance_contrast);
+  painter->SetLowLICContrastEnhancementFactor(low_lic_contrast_enhancement_factor);
+  painter->SetHighLICContrastEnhancementFactor(high_lic_contrast_enhancement_factor);
+  painter->SetLowColorContrastEnhancementFactor(low_color_contrast_enhancement_factor);
+  painter->SetHighColorContrastEnhancementFactor(high_color_contrast_enhancement_factor);
+  painter->SetAntiAlias(anti_alias);
+  painter->SetColorMode(color_mode);
+  painter->SetLICIntensity(lic_intensity);
+  painter->SetMapModeBias(map_mode_bias);
+  painter->SetMaskOnSurface(mask_on_surface);
+  painter->SetMaskThreshold(mask_threshold);
+  painter->SetMaskIntensity(mask_intensity);
+  painter->SetMaskColor(&mask_color_rgb[0]);
+
+  vtkSmartPointer<vtkActor> actor
+    = vtkSmartPointer<vtkActor>::New();
+
+  actor->SetMapper(mapper);
+  renderer->AddActor(actor);
+  renderer->SetBackground(0.3, 0.3, 0.3);
+  mapper = NULL;
+  actor = NULL;
+
+  vtkCamera *camera = renderer->GetActiveCamera();
+
+  if (camera_config == 1)
+    {
+    renWin->SetSize(300, 300);
+    renderer->SetBackground(0.3216, 0.3412, 0.4314);
+    renderer->SetBackground2(0.0, 0.0, 0.1647);
+    renderer->GradientBackgroundOn();
+    camera->SetFocalPoint(-1.88, -0.98, -1.04);
+    camera->SetPosition(13.64, 4.27, -31.59);
+    camera->SetViewAngle(30);
+    camera->SetViewUp(0.41, 0.83, 0.35);
+    renderer->ResetCamera();
+    }
+  else
+  if (camera_config == 2)
+    {
+    renWin->SetSize(300, 270);
+    camera->SetFocalPoint(0.0, 0.0, 0.0);
+    camera->SetPosition(1.0, 0.0, 0.0);
+    camera->SetViewAngle(30);
+    camera->SetViewUp(0.0, 0.0, 1.0);
+    renderer->ResetCamera();
+    camera->Zoom(1.2);
+    }
+  else
+  if (camera_config == 3)
+    {
+    renWin->SetSize(400, 340);
+    camera->SetFocalPoint(0.0, 0.0, 0.0);
+    camera->SetPosition(1.0, 0.0, 0.0);
+    camera->SetViewAngle(30);
+    camera->SetViewUp(0.0, 0.0, 1.0);
+    renderer->ResetCamera();
+    camera->Zoom(1.4);
+    }
+  else
+  if (camera_config == 4)
+    {
+    renWin->SetSize(364, 256);
+    renderer->SetBackground(0.3216, 0.3412, 0.4314);
+    renderer->SetBackground2(0.0, 0.0, 0.1647);
+    renderer->GradientBackgroundOn();
+    camera->SetFocalPoint(-30.3, 15.2, 7.0);
+    camera->SetPosition(64.7, 3.2, -14.0);
+    camera->SetViewAngle(30);
+    camera->SetViewUp(0.25, 0.5, 0.8);
+    //renderer->ResetCamera();
+    camera->Zoom(1.09);
+    }
+
+  int retVal = vtkTesting::Test(argc, argv, renWin, 75);
+  if (retVal == vtkRegressionTester::DO_INTERACTOR)
+    {
+    renWin->Render();
+    iren->Start();
+    }
+
+  renderer = NULL;
+  renWin = NULL;
+  iren = NULL;
+
+  if ((retVal == vtkTesting::PASSED) || (retVal == vtkTesting::DO_INTERACTOR))
+    {
+    return 0;
+    }
+  // test failed.
+  return 1;
+}
diff --git a/Rendering/LIC/Testing/Cxx/vtkSurfaceLICTestDriver.h b/Rendering/LIC/Testing/Cxx/vtkSurfaceLICTestDriver.h
new file mode 100644
index 0000000..4f985fb
--- /dev/null
+++ b/Rendering/LIC/Testing/Cxx/vtkSurfaceLICTestDriver.h
@@ -0,0 +1,61 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestSurfaceLIC.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#ifndef __vtkTestSurfaceLICDriver_h
+#define __vtkTestSurfaceLICDriver_h
+
+#include "vtkSystemIncludes.h" // include it first
+#include <vector> // for vector
+#include <string> // for string
+class vtkDataObject;
+
+// Description:
+// Internal test driver. Used by serial and parallel
+// ctests.
+int vtkSurfaceLICTestDriver(
+      int argc,
+      char **argv,
+      vtkDataObject *dataObj,
+      int num_steps,
+      double step_size,
+      int enhanced_lic,
+      int normalize_vectors,
+      int camera_config,
+      int generate_noise_texture,
+      int noise_type,
+      int noise_texture_size,
+      int noise_grain_size,
+      double min_noise_value,
+      double max_noise_value,
+      int number_of_noise_levels,
+      double impulse_noise_prob,
+      double impulse_noise_bg_value,
+      int noise_gen_seed,
+      int enhance_contrast,
+      double low_lic_contrast_enhancement_factor,
+      double high_lic_contrast_enhancement_factor,
+      double low_color_contrast_enhancement_factor,
+      double high_color_contrast_enhancement_factor,
+      int anti_alias,
+      int color_mode,
+      double lic_intensity,
+      double map_mode_bias,
+      int color_by_mag,
+      int mask_on_surface,
+      double mask_threshold,
+      double mask_intensity,
+      std::vector<double> &mask_color_rgb,
+      std::string &vectors);
+
+#endif
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestImageDataLIC2D.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestImageDataLIC2D.png.md5
new file mode 100644
index 0000000..3db4708
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestImageDataLIC2D.png.md5
@@ -0,0 +1 @@
+b5d78214f60aec7a0e059ed01a4e5a5c
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestImageDataLIC2D_1.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestImageDataLIC2D_1.png.md5
new file mode 100644
index 0000000..435c78e
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestImageDataLIC2D_1.png.md5
@@ -0,0 +1 @@
+bee54a2489a7eed433e09f29c4261f9c
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestStructuredGridLIC2DXSlice.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestStructuredGridLIC2DXSlice.png.md5
new file mode 100644
index 0000000..ac24bb7
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestStructuredGridLIC2DXSlice.png.md5
@@ -0,0 +1 @@
+5ceb976e5743f95132389ca08a30e515
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestStructuredGridLIC2DXSlice_1.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestStructuredGridLIC2DXSlice_1.png.md5
new file mode 100644
index 0000000..d0be177
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestStructuredGridLIC2DXSlice_1.png.md5
@@ -0,0 +1 @@
+96be4970872f8595d7015602eb326bf0
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestStructuredGridLIC2DYSlice.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestStructuredGridLIC2DYSlice.png.md5
new file mode 100644
index 0000000..bc3a315
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestStructuredGridLIC2DYSlice.png.md5
@@ -0,0 +1 @@
+6ee6222ebd80b1db7a7bcc3bb3ef7bdd
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestStructuredGridLIC2DYSlice_1.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestStructuredGridLIC2DYSlice_1.png.md5
new file mode 100644
index 0000000..cbec01a
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestStructuredGridLIC2DYSlice_1.png.md5
@@ -0,0 +1 @@
+9057700a8a346aebd86fcd195a16a9a1
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestStructuredGridLIC2DZSlice.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestStructuredGridLIC2DZSlice.png.md5
new file mode 100644
index 0000000..34a2bea
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestStructuredGridLIC2DZSlice.png.md5
@@ -0,0 +1 @@
+90414545b69a53daf2637b1b96e1d00d
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestStructuredGridLIC2DZSlice_1.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestStructuredGridLIC2DZSlice_1.png.md5
new file mode 100644
index 0000000..8a755e5
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestStructuredGridLIC2DZSlice_1.png.md5
@@ -0,0 +1 @@
+8680abc81a0c377cb1120aa42e922904
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLIC.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLIC.png.md5
new file mode 100644
index 0000000..a0ef54e
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLIC.png.md5
@@ -0,0 +1 @@
+b695e5e37173ba0be139358cce86cc8c
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedBlended.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedBlended.png.md5
new file mode 100644
index 0000000..b914d12
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedBlended.png.md5
@@ -0,0 +1 @@
+310ec71930a4f24c5549fa8e5358e60c
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedBlendedSmallGrain.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedBlendedSmallGrain.png.md5
new file mode 100644
index 0000000..4aa371b
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedBlendedSmallGrain.png.md5
@@ -0,0 +1 @@
+14553c1976a6c7e9a66e87a6a464bc4a
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedColorBlendedSmallGrain.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedColorBlendedSmallGrain.png.md5
new file mode 100644
index 0000000..d02ba67
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedColorBlendedSmallGrain.png.md5
@@ -0,0 +1 @@
+3b671f3a6c61bf9c71a5d7ae1102c137
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedColorBlendedSmallGrainMask.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedColorBlendedSmallGrainMask.png.md5
new file mode 100644
index 0000000..602bd9c
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedColorBlendedSmallGrainMask.png.md5
@@ -0,0 +1 @@
+7bc97e0a1a700a70fad3f9850756635e
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedColorMappedSmallGrain.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedColorMappedSmallGrain.png.md5
new file mode 100644
index 0000000..879563c
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedColorMappedSmallGrain.png.md5
@@ -0,0 +1 @@
+c2cf1ae2a87068a6d26df666e1e919c4
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedColorMappedSmallGrainMask.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedColorMappedSmallGrainMask.png.md5
new file mode 100644
index 0000000..36cd913
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedColorMappedSmallGrainMask.png.md5
@@ -0,0 +1 @@
+799d172be203b70d9c6c0e0ad6bf110d
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedMapped.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedMapped.png.md5
new file mode 100644
index 0000000..ce37e7e
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedMapped.png.md5
@@ -0,0 +1 @@
+86a47d000665ef0e7275b26f21eef5f9
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedMappedSmallGrain.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedMappedSmallGrain.png.md5
new file mode 100644
index 0000000..878a1db
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedMappedSmallGrain.png.md5
@@ -0,0 +1 @@
+b17c538e6c671f1e9cc9a9c565ba931b
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedMappedSmallVectorNormalizeOff.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedMappedSmallVectorNormalizeOff.png.md5
new file mode 100644
index 0000000..754526f
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedMappedSmallVectorNormalizeOff.png.md5
@@ -0,0 +1 @@
+b055348f2ae6b27d2d753b9aa555f49a
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedSmallGrainMask.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedSmallGrainMask.png.md5
new file mode 100644
index 0000000..62b8903
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedContrastEnhancedSmallGrainMask.png.md5
@@ -0,0 +1 @@
+3a770efafe36fec811c9b40b0c3eb048
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedDefaults.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedDefaults.png.md5
new file mode 100644
index 0000000..9b2bb7a
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedDefaults.png.md5
@@ -0,0 +1 @@
+3514b7e59d35f3939085b4f29fcdd7ae
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedDefaultsColor.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedDefaultsColor.png.md5
new file mode 100644
index 0000000..f5f7cc5
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedDefaultsColor.png.md5
@@ -0,0 +1 @@
+29d06a9306e93df947830dc6d10d1ebf
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedEnhancedVectorNormalizeOff.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedEnhancedVectorNormalizeOff.png.md5
new file mode 100644
index 0000000..e8e410b
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICCurvedEnhancedVectorNormalizeOff.png.md5
@@ -0,0 +1 @@
+1970b150e894caa03048cb0804e51dae
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICMultiBlockContrastEnhancedPerlin.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICMultiBlockContrastEnhancedPerlin.png.md5
new file mode 100644
index 0000000..20ec37f
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICMultiBlockContrastEnhancedPerlin.png.md5
@@ -0,0 +1 @@
+c342466c49bda926be580f61563aaec6
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICPlanarContrastEnhanced.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICPlanarContrastEnhanced.png.md5
new file mode 100644
index 0000000..390431c
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICPlanarContrastEnhanced.png.md5
@@ -0,0 +1 @@
+1a691893f8446d6244173fdfdd8032b8
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICPlanarDefaults.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICPlanarDefaults.png.md5
new file mode 100644
index 0000000..4ca0bc3
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICPlanarDefaults.png.md5
@@ -0,0 +1 @@
+4f7101a16563c5ce602b13087c3b55b4
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICPlanarVectorNormalizeOff.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICPlanarVectorNormalizeOff.png.md5
new file mode 100644
index 0000000..c5c2db9
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICPlanarVectorNormalizeOff.png.md5
@@ -0,0 +1 @@
+3e3b4201043fb5a0bb35390666d78631
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICPlanarVectorNormalizeOffMediumGrainPerlin.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICPlanarVectorNormalizeOffMediumGrainPerlin.png.md5
new file mode 100644
index 0000000..d4fb4a6
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICPlanarVectorNormalizeOffMediumGrainPerlin.png.md5
@@ -0,0 +1 @@
+af520199da81350d2d967138398cea39
diff --git a/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICPlanarVectorNormalizeOffMediumGrainUniform.png.md5 b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICPlanarVectorNormalizeOffMediumGrainUniform.png.md5
new file mode 100644
index 0000000..2f36c7b
--- /dev/null
+++ b/Rendering/LIC/Testing/Data/Baseline/TestSurfaceLICPlanarVectorNormalizeOffMediumGrainUniform.png.md5
@@ -0,0 +1 @@
+df79f16867defc8db21c0ecfb545eaf3
diff --git a/Rendering/LIC/module.cmake b/Rendering/LIC/module.cmake
new file mode 100644
index 0000000..b336ed5
--- /dev/null
+++ b/Rendering/LIC/module.cmake
@@ -0,0 +1,15 @@
+vtk_module(vtkRenderingLIC
+  GROUPS
+    Rendering
+  DEPENDS
+    vtkIOXML
+    vtkIOLegacy
+    vtkImagingSources
+    vtkRenderingOpenGL
+  PRIVATE_DEPENDS
+    vtksys
+  TEST_DEPENDS
+    vtkTestingCore
+    vtkTestingRendering
+    vtkInteractionStyle
+  )
diff --git a/Rendering/LIC/vtkImageDataLIC2D.cxx b/Rendering/LIC/vtkImageDataLIC2D.cxx
new file mode 100644
index 0000000..c21cde2
--- /dev/null
+++ b/Rendering/LIC/vtkImageDataLIC2D.cxx
@@ -0,0 +1,722 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkImageDataLIC2D.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkImageDataLIC2D.h"
+
+#include "vtkStructuredData.h"
+#include "vtkPointData.h"
+#include "vtkCellData.h"
+#include "vtkFloatArray.h"
+#include "vtkUnsignedCharArray.h"
+#include "vtkImageData.h"
+#include "vtkImageNoiseSource.h"
+#include "vtkInformation.h"
+#include "vtkInformationVector.h"
+#include "vtkStructuredExtent.h"
+#include "vtkObjectFactory.h"
+#include "vtkImageDataLIC2DExtentTranslator.h"
+#include "vtkLineIntegralConvolution2D.h"
+#include "vtkFrameBufferObject2.h"
+#include "vtkRenderbuffer.h"
+#include "vtkPixelBufferObject.h"
+#include "vtkTextureObject.h"
+#include "vtkPixelExtent.h"
+#include "vtkPixelTransfer.h"
+#include "vtkStreamingDemandDrivenPipeline.h"
+#include "vtkSmartPointer.h"
+#include "vtkOpenGLRenderWindow.h"
+#include "vtkOpenGLExtensionManager.h"
+#include "vtkShader2Collection.h"
+#include "vtkShaderProgram2.h"
+#include "vtkShader2.h"
+#include "vtkOpenGLError.h"
+
+#include <deque>
+using std::deque;
+
+#include "vtkgl.h"
+
+#define vtkImageDataLIC2DDEBUG 0
+#if (vtkImageDataLIC2DDEBUG >= 1)
+#include "vtkTextureWriter.h"
+#endif
+
+#define PRINTEXTENT(ext) \
+  ext[0] << ", " << ext[1] << ", " << ext[2] << ", " << ext[3] << ", " << ext[4] << ", " << ext[5]
+
+//----------------------------------------------------------------------------
+vtkStandardNewMacro(vtkImageDataLIC2D);
+
+//----------------------------------------------------------------------------
+vtkImageDataLIC2D::vtkImageDataLIC2D()
+{
+  this->Context = NULL;
+  this->OwnWindow = false;
+  this->OpenGLExtensionsSupported = 0;
+
+  this->MagShader = NULL;
+
+  this->Steps = 20;
+  this->StepSize = 1.0;
+  this->Magnification = 1;
+
+  this->NoiseSource = vtkImageNoiseSource::New();
+  this->NoiseSource->SetWholeExtent(0, 127, 0, 127, 0, 0);
+  this->NoiseSource->SetMinimum(0.0);
+  this->NoiseSource->SetMaximum(1.0);
+
+  this->SetNumberOfInputPorts(2);
+
+  // by default process active point vectors
+  this->SetInputArrayToProcess(
+        0,
+        0,
+        0,
+        vtkDataObject::FIELD_ASSOCIATION_POINTS,
+        vtkDataSetAttributes::VECTORS);
+
+}
+
+//----------------------------------------------------------------------------
+vtkImageDataLIC2D::~vtkImageDataLIC2D()
+{
+  this->NoiseSource->Delete();
+  this->SetContext(NULL);
+}
+
+//----------------------------------------------------------------------------
+int vtkImageDataLIC2D::SetContext(vtkRenderWindow * renWin)
+{
+  if (this->Context == renWin)
+    {
+    return this->OpenGLExtensionsSupported;
+    }
+
+  if (this->Context && this->OwnWindow)
+    {
+    this->Context->Delete();
+    }
+  this->Modified();
+  this->Context = NULL;
+  this->OwnWindow = false;
+  this->OpenGLExtensionsSupported = 0;
+  if (this->MagShader)
+    {
+    this->MagShader->Delete();
+    this->MagShader = NULL;
+    }
+
+  vtkOpenGLRenderWindow *context = vtkOpenGLRenderWindow::SafeDownCast(renWin);
+  if (context)
+    {
+    context->Render();
+    context->MakeCurrent();
+
+    bool featureSupport
+      = vtkLineIntegralConvolution2D::IsSupported(context)
+      && vtkPixelBufferObject::IsSupported(context)
+      && vtkFrameBufferObject2::IsSupported(context)
+      && vtkRenderbuffer::IsSupported(context)
+      && vtkTextureObject::IsSupported(context);
+
+    vtkOpenGLExtensionManager *manager = context->GetExtensionManager();
+
+    bool driverSupport
+       = !manager->DriverGLRendererIsOSMesa()
+       || manager->GetIgnoreDriverBugs("OSMesa bug");
+
+    if (!featureSupport || !driverSupport)
+      {
+      vtkErrorMacro("Required OpenGL extensions not supported.");
+      return 0;
+      }
+
+    this->OpenGLExtensionsSupported = 1;
+    this->Context = context;
+
+    vtkShaderProgram2 *prog = vtkShaderProgram2::New();
+    prog->SetContext(context);
+
+    vtkShader2 *tmp = vtkShader2::New();
+    tmp->SetContext(context);
+    tmp->SetType(VTK_SHADER_TYPE_FRAGMENT);
+    tmp->SetSourceCode(
+        "uniform sampler2D texVectors; "
+        "void main() "
+        "  { gl_FragData[0] = texture2D( texVectors, gl_TexCoord[0].st ); }"
+        );
+    prog->GetShaders()->AddItem(tmp);
+    tmp->Delete();
+
+    prog->Build();
+    if (prog->GetLastBuildStatus() != VTK_SHADER_PROGRAM2_LINK_SUCCEEDED)
+      {
+      vtkErrorMacro("failed to build the magnification fragment shader");
+      return 0;
+      }
+    this->MagShader = prog;
+    return 1;
+    }
+
+  return 0;
+}
+
+//----------------------------------------------------------------------------
+vtkRenderWindow* vtkImageDataLIC2D::GetContext()
+{
+  return this->Context;
+}
+
+//----------------------------------------------------------------------------
+int vtkImageDataLIC2D::FillInputPortInformation(int port, vtkInformation *info)
+{
+  if (!this->Superclass::FillInputPortInformation(port, info))
+    {
+    return 0;
+    }
+
+  if (port == 1)
+    {
+    info->Set(vtkAlgorithm::INPUT_IS_OPTIONAL(), 1);
+    }
+
+  return 1;
+}
+
+//----------------------------------------------------------------------------
+void vtkImageDataLIC2D::TranslateInputExtent(
+        const int* inExt,
+        const int* inWholeExt,
+        int *resultExt)
+{
+  int nPlanar = 0;
+  for (int q=0; q<3; ++q)
+    {
+    int qq = 2*q;
+    if (inWholeExt[qq] == inWholeExt[qq+1])
+      {
+      resultExt[qq] = inExt[qq];
+      resultExt[qq+1] = inExt[qq];
+      nPlanar += 1;
+      }
+    else
+      {
+      resultExt[qq] = inExt[qq] * this->Magnification;
+      resultExt[qq+1] = (inExt[qq+1] + 1) * this->Magnification - 1;
+      }
+    }
+  if (nPlanar != 1)
+    {
+    vtkErrorMacro("Non-planar dataset");
+    }
+}
+
+//----------------------------------------------------------------------------
+int vtkImageDataLIC2D::RequestInformation(
+      vtkInformation* vtkNotUsed(request),
+      vtkInformationVector** inputVector,
+      vtkInformationVector* outputVector)
+{
+  int ext[6];
+  int wholeExtent[6];
+  double spacing[3];
+
+  vtkInformation *inInfo = inputVector[0]->GetInformationObject(0);
+  vtkInformation *outInfo = outputVector->GetInformationObject(0);
+
+  inInfo->Get(vtkStreamingDemandDrivenPipeline::WHOLE_EXTENT(), wholeExtent);
+  inInfo->Get(vtkDataObject::SPACING(), spacing);
+  vtkDebugMacro( << "Input WHOLE_EXTENT: " << PRINTEXTENT( wholeExtent ) << endl );
+  this->TranslateInputExtent(wholeExtent, wholeExtent, ext);
+
+  for (int axis = 0; axis < 3; axis++)
+    {
+    // Change the data spacing
+    spacing[axis] /= this->Magnification;
+    }
+  vtkDebugMacro( << "WHOLE_EXTENT: " << PRINTEXTENT( ext ) << endl );
+
+  outInfo->Set(vtkStreamingDemandDrivenPipeline::WHOLE_EXTENT(), ext, 6);
+  outInfo->Set(vtkDataObject::SPACING(), spacing, 3);
+
+  // Setup ExtentTranslator
+  vtkImageDataLIC2DExtentTranslator* extTranslator =
+    vtkImageDataLIC2DExtentTranslator::SafeDownCast(
+      vtkStreamingDemandDrivenPipeline::GetExtentTranslator(outInfo));
+
+  if (!extTranslator)
+    {
+    extTranslator = vtkImageDataLIC2DExtentTranslator::New();
+    vtkStreamingDemandDrivenPipeline::SetExtentTranslator(outInfo, extTranslator);
+    extTranslator->Delete();
+    }
+
+  extTranslator->SetAlgorithm(this);
+  extTranslator->SetInputWholeExtent(wholeExtent);
+  extTranslator->SetInputExtentTranslator(
+    vtkExtentTranslator::SafeDownCast(
+    inInfo->Get(vtkStreamingDemandDrivenPipeline::EXTENT_TRANSLATOR())));
+
+  return 1;
+}
+
+//----------------------------------------------------------------------------
+int vtkImageDataLIC2D::RequestUpdateExtent (
+      vtkInformation * vtkNotUsed(request),
+      vtkInformationVector **inputVector,
+      vtkInformationVector *outputVector)
+{
+  vtkInformation *inInfo = inputVector[0]->GetInformationObject(0);
+  vtkInformation *outInfo = outputVector->GetInformationObject(0);
+
+  // Tell the vector field input the extents that we need from it.
+  // The downstream request needs to be downsized based on the Magnification.
+  int ext[6];
+  outInfo->Get(vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT(), ext);
+
+  vtkDebugMacro( << "Requested UPDATE_EXTENT: " <<  PRINTEXTENT( ext ) << endl );
+  for (int axis = 0; axis < 3; axis++)
+    {
+    int wholeMin = ext[axis*2];
+    int wholeMax = ext[axis*2+1];
+
+    // Scale the output extent
+    wholeMin = wholeMin / this->Magnification;
+    wholeMax = wholeMax / this->Magnification;
+
+    ext[axis*2] = wholeMin;
+    ext[axis*2+1] = wholeMax;
+    }
+  vtkDebugMacro( << "UPDATE_EXTENT: " <<  PRINTEXTENT( ext ) << endl );
+
+  inInfo->Set(vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT(), ext, 6);
+
+  inInfo = inputVector[1]->GetInformationObject(0);
+  if (inInfo)
+    {
+    // always request the whole noise image.
+    inInfo->Set(
+        vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT(),
+        inInfo->Get(vtkStreamingDemandDrivenPipeline::WHOLE_EXTENT()),
+        6);
+    }
+
+  return 1;
+}
+
+//----------------------------------------------------------------------------
+int vtkImageDataLIC2D::RequestData(
+      vtkInformation  *vtkNotUsed(request),
+      vtkInformationVector **inputVector,
+      vtkInformationVector *outputVector)
+{
+  vtkInformation *inInfo = inputVector[0]->GetInformationObject(0);
+
+  vtkImageData *input
+     = vtkImageData::SafeDownCast(inInfo->Get(vtkDataObject::DATA_OBJECT()));
+  if ( !input )
+    {
+    vtkErrorMacro("Empty input");
+    return 0;
+    }
+
+  int dims[3];
+  input->GetDimensions(dims);
+
+  int dataDescription = vtkStructuredData::GetDataDescription(dims);
+
+  if (  vtkStructuredData::GetDataDimension( dataDescription ) != 2  )
+    {
+    vtkErrorMacro( "Input is not a 2D image." );
+    return 0;
+    }
+
+  vtkIdType numPoints = input->GetNumberOfPoints();
+  vtkDataArray *inVectors = this->GetInputArrayToProcess(0, inputVector);
+  if ( !inVectors )
+    {
+    vtkErrorMacro("Vectors are required for line integral convolution.");
+    return 0;
+    }
+
+  if ( inVectors->GetNumberOfTuples() != numPoints )
+    {
+    vtkErrorMacro( "Only point vectors are supported." );
+    return 0;
+    }
+
+  if ( !this->Context )
+    {
+    vtkRenderWindow * renWin = vtkRenderWindow::New();
+    if ( this->SetContext(renWin) == 0 )
+      {
+      vtkErrorMacro("Missing required OpenGL extensions");
+      renWin->Delete();
+      return 0;
+      }
+    this->OwnWindow = true;
+    }
+
+  this->Context->MakeCurrent();
+  vtkOpenGLClearErrorMacro();
+
+  // Noise.
+  vtkInformation *noiseInfo = inputVector[1]->GetInformationObject(0);
+  vtkImageData *noise = NULL;
+  if ( noiseInfo )
+    {
+    noise
+      = vtkImageData::SafeDownCast(noiseInfo->Get(vtkDataObject::DATA_OBJECT()));
+    if ( !noise )
+      {
+      vtkErrorMacro(
+        "Invalid noise dataset on input "
+        "Default noise dataset is used");
+      }
+
+    if ( (noise->GetPointData()==0)
+      || (noise->GetPointData()->GetScalars()==0) )
+      {
+      vtkErrorMacro(
+        "Noise dataset missing point data scalars. "
+        "Default noise dataset is used");
+      noise = NULL;
+      }
+
+    double noiseRange[2];
+    vtkDataArray *inVals = noise->GetPointData()->GetScalars();
+    inVals->GetRange(noiseRange);
+    if ( (noiseRange[0] < 0.0) || (noiseRange[1] > 1.0) )
+      {
+      vtkErrorMacro(
+        "Noise dataset has values out of range 0.0 to 1.0."
+        "Default noise dataset is used");
+      noise = NULL;
+      }
+    }
+
+  if ( !noise )
+    {
+    this->NoiseSource->Update();
+    noise = this->NoiseSource->GetOutput();
+    }
+
+  int comp[3] = {0, 0, 0};
+  switch (dataDescription)
+    {
+  case VTK_XY_PLANE:
+    comp[0] = 0;
+    comp[1] = 1;
+    comp[2] = 2;
+    break;
+
+  case VTK_YZ_PLANE:
+    comp[0] = 1;
+    comp[1] = 2;
+    comp[2] = 0;
+    break;
+
+  case VTK_XZ_PLANE:
+    comp[0] = 0;
+    comp[1] = 2;
+    comp[2] = 1;
+    break;
+    }
+
+  // size of output
+  int magDims[3];
+  magDims[0] = this->Magnification*dims[0];
+  magDims[1] = this->Magnification*dims[1];
+  magDims[2] = this->Magnification*dims[2];
+
+  // send vector data to a texture
+  int inputExt[6];
+  input->GetExtent(inputExt);
+
+  vtkPixelExtent inVectorExtent(dims[comp[0]], dims[comp[1]]);
+
+  vtkPixelBufferObject *vecPBO = vtkPixelBufferObject::New();
+  vecPBO->SetContext(this->Context);
+
+  vtkPixelTransfer::Blit(
+        inVectorExtent,
+        inVectorExtent,
+        inVectorExtent,
+        inVectorExtent,
+        3,
+        inVectors->GetDataType(),
+        inVectors->GetVoidPointer(0),
+        4,
+        VTK_FLOAT,
+        vecPBO->MapUnpackedBuffer(
+        VTK_FLOAT,
+        static_cast<unsigned int>(inVectorExtent.Size()),
+        4));
+
+  vecPBO->UnmapUnpackedBuffer();
+
+  vtkTextureObject *vectorTex = vtkTextureObject::New();
+  vectorTex->SetContext(this->Context);
+  vectorTex->Create2D(dims[comp[0]], dims[comp[1]], 4, vecPBO, false);
+  vtkLineIntegralConvolution2D::SetVectorTexParameters(vectorTex);
+
+  vecPBO->Delete();
+
+  #if (vtkImageDataLIC2DDEBUG >= 1)
+  vtkTextureWriter::WriteTexture(
+          "idlic2d_vectors.vtk",
+          vectorTex);
+  #endif
+
+  // magnify vectors
+  vtkPixelExtent magVectorExtent(magDims[comp[0]], magDims[comp[1]]);
+  int magVectorSize[2];
+  magVectorExtent.Size(magVectorSize);
+
+  vtkTextureObject *magVectorTex = vectorTex;
+  if (this->Magnification > 1)
+    {
+    this->MagShader->UseProgram();
+    this->MagShader->SetUniformi("texVectors", 0);
+
+    magVectorTex = vtkTextureObject::New();
+    magVectorTex->SetContext(this->Context);
+    magVectorTex->Create2D(magVectorSize[0], magVectorSize[1], 4, VTK_FLOAT, false);
+    vtkLineIntegralConvolution2D::SetVectorTexParameters(magVectorTex);
+
+    vtkFrameBufferObject2 *drawFbo = vtkFrameBufferObject2::New();
+    drawFbo->SetContext(this->Context);
+    drawFbo->SaveCurrentBindings();
+    drawFbo->Bind(vtkgl::FRAMEBUFFER_EXT);
+    drawFbo->AddColorAttachment(vtkgl::FRAMEBUFFER_EXT, 0U, magVectorTex);
+    drawFbo->ActivateDrawBuffer(0U);
+    //drawFbo->AddColorAttachment(vtkgl::FRAMEBUFFER_EXT, 0U, vectorTex);
+    //drawFbo->ActivateReadBuffer(0U);
+    drawFbo->CheckFrameBufferStatus(vtkgl::FRAMEBUFFER_EXT);
+    drawFbo->InitializeViewport(magVectorSize[0], magVectorSize[1]);
+
+    glClearColor(0.0, 0.0, 0.0, 0.0);
+    glClear(GL_COLOR_BUFFER_BIT);
+
+    vectorTex->Activate(vtkgl::TEXTURE0);
+
+    glBegin(GL_QUADS);
+    vtkgl::MultiTexCoord2f(vtkgl::TEXTURE0, 0.0f, 0.0f);
+    glVertex2f(0.0f, 0.0f);
+    vtkgl::MultiTexCoord2f(vtkgl::TEXTURE0, 1.0f, 0.0f);
+    glVertex2f((float)magVectorSize[0]+1.0f, 0.0f);
+    vtkgl::MultiTexCoord2f(vtkgl::TEXTURE0, 1.0f, 1.0f);
+    glVertex2f((float)magVectorSize[0]+1.0f, (float)magVectorSize[1]+1.0f);
+    vtkgl::MultiTexCoord2f(vtkgl::TEXTURE0, 0.0f, 1.0f);
+    glVertex2f(0.0f, (float)magVectorSize[1]+1.0f);
+    glEnd();
+
+    vectorTex->UnBind();
+    vectorTex->Delete();
+
+    drawFbo->UnBind(vtkgl::FRAMEBUFFER_EXT);
+    drawFbo->Delete();
+
+    this->MagShader->Restore();
+    }
+
+  #if (vtkImageDataLIC2DDEBUG >= 1)
+  vtkTextureWriter::WriteTexture(
+          "idlic2d_magvectors.vtk",
+          magVectorTex);
+  #endif
+
+  // send noise data to a texture
+  vtkDataArray * inNoise = noise->GetPointData()->GetScalars();
+
+  vtkPixelExtent noiseExt(noise->GetExtent());
+
+  vtkPixelBufferObject *noisePBO = vtkPixelBufferObject::New();
+  noisePBO->SetContext(this->Context);
+
+  vtkPixelTransfer::Blit(
+        noiseExt,
+        2,
+        inNoise->GetDataType(),
+        inNoise->GetVoidPointer(0),
+        VTK_FLOAT,
+        noisePBO->MapUnpackedBuffer(
+        VTK_FLOAT,
+        static_cast<unsigned int>(noiseExt.Size()),
+        2));
+
+  noisePBO->UnmapUnpackedBuffer();
+
+  int noiseTexSize[2];
+  noiseExt.Size(noiseTexSize);
+
+  vtkTextureObject *noiseTex = vtkTextureObject::New();
+  noiseTex->SetContext(this->Context);
+  noiseTex->Create2D(noiseTexSize[0], noiseTexSize[1], 2, noisePBO, false);
+
+  noisePBO->Delete();
+
+  #if (vtkImageDataLIC2DDEBUG >= 1)
+  vtkTextureWriter::WriteTexture(
+          "idlic2d_noise.vtk",
+          noiseTex);
+  #endif
+
+  // step size conversion to normalize image space
+  double *spacing = input->GetSpacing();
+  spacing[comp[0]] /= this->Magnification;
+  spacing[comp[1]] /= this->Magnification;
+
+  double cellLength
+    = sqrt(spacing[comp[0]]*spacing[comp[0]]+spacing[comp[1]]*spacing[comp[1]]);
+
+  double w = spacing[comp[0]]*dims[comp[0]];
+  double h = spacing[comp[1]]*dims[comp[1]];
+  double normalizationFactor = sqrt(w*w+h*h);
+  double stepSize = this->StepSize*cellLength/normalizationFactor;
+
+  // compute the LIC
+  int updateExt[6];
+  inInfo->Get(vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT(), updateExt);
+
+  int magUpdateExt[6];
+  magUpdateExt[2*comp[0]] = updateExt[2*comp[0]] * this->Magnification;
+  magUpdateExt[2*comp[1]] = updateExt[2*comp[1]] * this->Magnification;
+  magUpdateExt[2*comp[2]] = updateExt[comp[2]];
+  magUpdateExt[2*comp[0]+1] = (updateExt[2*comp[0]+1] + 1) * this->Magnification - 1;
+  magUpdateExt[2*comp[1]+1] = (updateExt[2*comp[1]+1] + 1) * this->Magnification - 1;
+  magUpdateExt[2*comp[2]+1] = updateExt[comp[2]];
+
+  vtkPixelExtent magLicExtent(
+        magUpdateExt[2*comp[0]],
+        magUpdateExt[2*comp[0]+1],
+        magUpdateExt[2*comp[1]],
+        magUpdateExt[2*comp[1]+1]);
+
+  // add ghosts
+  double rk4fac = 3.0;
+  int nGhosts = this->Steps*this->StepSize*rk4fac;
+  nGhosts = nGhosts < 1 ? 1 : nGhosts;
+  nGhosts *= 2; // for second ee lic pass
+
+  vtkPixelExtent magLicGuardExtent(magLicExtent);
+  magLicGuardExtent.Grow(nGhosts);
+  magLicGuardExtent &= magVectorExtent;
+
+  vtkLineIntegralConvolution2D *LICer = vtkLineIntegralConvolution2D::New();
+  LICer->SetContext(this->Context);
+  LICer->SetNumberOfSteps(this->Steps);
+  LICer->SetStepSize(stepSize);
+  LICer->SetComponentIds(comp[0], comp[1]);
+  //LICer->SetGridSpacings(spacing[comp[0]], spacing[comp[1]]);
+
+  deque<vtkPixelExtent> magLicExtents(1, magLicExtent);
+  deque<vtkPixelExtent> magLicGuardExtents(1, magLicGuardExtent);
+
+  vtkTextureObject *licTex
+     = LICer->Execute(
+            magVectorExtent,
+            magLicGuardExtents,
+            magLicExtents,
+            magVectorTex,
+            NULL,
+            noiseTex);
+
+  LICer->Delete();
+  noiseTex->Delete();
+  magVectorTex->Delete();
+
+  if ( !licTex )
+    {
+    vtkErrorMacro("Failed to compute LIC");
+    return 0;
+    }
+
+  #if (vtkImageDataLIC2DDEBUG >= 1)
+  vtkTextureWriter::WriteTexture(
+          "idlic2d_lic.vtk",
+          licTex);
+  #endif
+
+  // transfer lic from texture to vtk array
+  vtkIdType nOutTups = magLicExtent.Size();
+  vtkFloatArray *licOut = vtkFloatArray::New();
+  licOut->SetNumberOfComponents(3);
+  licOut->SetNumberOfTuples(nOutTups);
+  licOut->SetName("LIC");
+
+  vtkPixelBufferObject *licPBO = licTex->Download();
+
+  vtkPixelTransfer::Blit<float, float>(
+        magVectorExtent,
+        magLicExtent,
+        magLicExtent,
+        magLicExtent,
+        4,
+        (float*)licPBO->MapPackedBuffer(),
+        3,
+        licOut->GetPointer(0));
+
+  licPBO->UnmapPackedBuffer();
+  licPBO->Delete();
+  licTex->Delete();
+
+  // mask and convert to gray scale 3 components
+  float *pLicOut = licOut->GetPointer(0);
+  for (vtkIdType i=0; i<nOutTups; ++i)
+    {
+    float lic = pLicOut[3*i];
+    float mask = pLicOut[3*i+1];
+    if ( mask )
+      {
+      pLicOut[3*i+1] = pLicOut[3*i+2] = pLicOut[3*i] = 0.0f;
+      }
+    else
+      {
+      pLicOut[3*i+1] = pLicOut[3*i+2] = lic;
+      }
+    }
+
+  // setup output
+  vtkInformation *outInfo = outputVector->GetInformationObject(0);
+  vtkImageData *output
+    = vtkImageData::SafeDownCast(outInfo->Get(vtkDataObject::DATA_OBJECT()));
+  if (!output)
+    {
+    vtkErrorMacro("Empty output");
+    return 1;
+    }
+
+  output->SetExtent(magUpdateExt);
+  output->SetSpacing(spacing);
+  output->GetPointData()->SetScalars(licOut);
+  licOut->Delete();
+
+  // Ensures that the output extent is exactly same as what was asked for.
+  //output->Crop(outInfo->Get(vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT()));
+
+  return 1;
+}
+
+//----------------------------------------------------------------------------
+void vtkImageDataLIC2D::PrintSelf( ostream & os, vtkIndent indent )
+{
+  this->Superclass::PrintSelf( os, indent );
+
+  os << indent << "Steps: "         << this->Steps          << "\n";
+  os << indent << "StepSize: "      << this->StepSize       << "\n";
+  os << indent << "Magnification: " << this->Magnification  << "\n";
+  os << indent << "OpenGLExtensionsSupported: "
+               << this->OpenGLExtensionsSupported << "\n";
+}
diff --git a/Rendering/LIC/vtkImageDataLIC2D.h b/Rendering/LIC/vtkImageDataLIC2D.h
new file mode 100644
index 0000000..11214c6
--- /dev/null
+++ b/Rendering/LIC/vtkImageDataLIC2D.h
@@ -0,0 +1,141 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkImageDataLIC2D.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkImageDataLIC2D
+//
+// .SECTION Description
+//  GPU implementation of a Line Integral Convolution, a technique for
+//  imaging  vector fields.
+//
+//  The input on port 0 is an vtkImageData with extents of a 2D image. It needs
+//  a vector field on point data. This filter only works on point vectors. One
+//  can use a vtkCellDataToPointData filter to convert cell vectors to point
+//  vectors.
+//
+//  Port 1 is a special port for customized noise input. It is an optional port.
+//  If noise input is not specified, then the filter using vtkImageNoiseSource to
+//  generate a 128x128 noise texture.
+//
+// .SECTION see also
+//  vtkSurfaceLICPainter vtkLineIntegralConvolution2D
+
+#ifndef __vtkImageDataLIC2D_h
+#define __vtkImageDataLIC2D_h
+
+#include "vtkRenderingLICModule.h" // For export macro
+#include "vtkImageAlgorithm.h"
+#include "vtkWeakPointer.h" // needed for vtkWeakPointer.
+
+class vtkRenderWindow;
+class vtkImageNoiseSource;
+class vtkShaderProgram2;
+
+class VTKRENDERINGLIC_EXPORT vtkImageDataLIC2D : public vtkImageAlgorithm
+{
+public:
+  static vtkImageDataLIC2D* New();
+  vtkTypeMacro(vtkImageDataLIC2D, vtkImageAlgorithm);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Get/Set the context. Context must be a vtkOpenGLRenderWindow.
+  // This does not increase the reference count of the
+  // context to avoid reference loops.
+  // SetContext() may raise an error is the OpenGL context does not support the
+  // required OpenGL extensions. Return 0 upon failure and 1 upon success.
+  int SetContext( vtkRenderWindow * context );
+  vtkRenderWindow * GetContext();
+
+  // Description:
+  // Number of steps. Initial value is 20.
+  // class invariant: Steps>0.
+  // In term of visual quality, the greater the better.
+  vtkSetMacro(Steps,int);
+  vtkGetMacro(Steps,int);
+
+  // Description:
+  // Step size.
+  // Specify the step size as a unit of the cell length of the input vector
+  // field. Cell lengthh is the length of the diagonal of a cell.
+  // Initial value is 1.0.
+  // class invariant: StepSize>0.0.
+  // In term of visual quality, the smaller the better.
+  // The type for the interface is double as VTK interface is double
+  // but GPU only supports float. This value will be converted to
+  // float in the execution of the algorithm.
+  vtkSetMacro(StepSize, double);
+  vtkGetMacro(StepSize, double);
+
+  // Description:
+  // The the magnification factor. Default is 1
+  vtkSetMacro(Magnification, int);
+  vtkGetMacro(Magnification, int);
+
+  // Description:
+  // Check if the required OpenGL extensions / GPU are supported.
+  vtkGetMacro(OpenGLExtensionsSupported, int);
+
+  void TranslateInputExtent(
+        const int* inExt,
+        const int* inWholeExtent,
+        int *outExt);
+
+//BTX
+protected:
+  vtkImageDataLIC2D();
+  ~vtkImageDataLIC2D();
+
+  virtual int RequestInformation(vtkInformation *,
+    vtkInformationVector **, vtkInformationVector *);
+
+  // Description:
+  // Fill the input port information objects for this algorithm.  This
+  // is invoked by the first call to GetInputPortInformation for each
+  // port so subclasses can specify what they can handle.
+  // Redefined from the superclass.
+  virtual int FillInputPortInformation(int port,
+                                       vtkInformation *info);
+
+  int RequestUpdateExtent (vtkInformation * vtkNotUsed(request),
+                           vtkInformationVector **inputVector,
+                           vtkInformationVector *vtkNotUsed( outputVector ));
+
+  // Description:
+  // This is called by the superclass.
+  // This is the method you should override.
+  virtual int RequestData(vtkInformation *request,
+                          vtkInformationVector **inputVector,
+                          vtkInformationVector *outputVector);
+
+  vtkWeakPointer<vtkRenderWindow> Context;
+  bool OwnWindow;
+  int OpenGLExtensionsSupported;
+
+  vtkShaderProgram2 *MagShader;
+
+  vtkImageNoiseSource* NoiseSource;
+
+  int Steps;
+  double StepSize;
+  int Magnification;
+
+
+
+private:
+  vtkImageDataLIC2D(const vtkImageDataLIC2D&); // Not implemented.
+  void operator=(const vtkImageDataLIC2D&); // Not implemented.
+//ETX
+};
+
+#endif
diff --git a/Rendering/LIC/vtkImageDataLIC2DExtentTranslator.cxx b/Rendering/LIC/vtkImageDataLIC2DExtentTranslator.cxx
new file mode 100644
index 0000000..2ba97a5
--- /dev/null
+++ b/Rendering/LIC/vtkImageDataLIC2DExtentTranslator.cxx
@@ -0,0 +1,121 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkImageDataLIC2DExtentTranslator.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkImageDataLIC2DExtentTranslator.h"
+
+#include "vtkObjectFactory.h"
+#include "vtkImageDataLIC2D.h"
+
+//----------------------------------------------------------------------------
+vtkStandardNewMacro(vtkImageDataLIC2DExtentTranslator);
+
+//----------------------------------------------------------------------------
+vtkCxxSetObjectMacro(
+      vtkImageDataLIC2DExtentTranslator,
+      InputExtentTranslator,
+      vtkExtentTranslator);
+
+//----------------------------------------------------------------------------
+vtkImageDataLIC2DExtentTranslator::vtkImageDataLIC2DExtentTranslator()
+{
+  this->Algorithm = 0;
+  this->InputExtentTranslator = 0;
+  this->InputWholeExtent[0] =
+  this->InputWholeExtent[1] =
+  this->InputWholeExtent[2] =
+  this->InputWholeExtent[3] =
+  this->InputWholeExtent[4] =
+  this->InputWholeExtent[5] = 0;
+}
+
+//----------------------------------------------------------------------------
+vtkImageDataLIC2DExtentTranslator::~vtkImageDataLIC2DExtentTranslator()
+{
+  this->SetInputExtentTranslator(0);
+}
+
+//----------------------------------------------------------------------------
+void vtkImageDataLIC2DExtentTranslator::SetAlgorithm(
+  vtkImageDataLIC2D* alg)
+{
+  if (this->Algorithm.GetPointer() != alg)
+    {
+    this->Algorithm = alg;
+    this->Modified();
+    }
+}
+
+//----------------------------------------------------------------------------
+vtkImageDataLIC2D* vtkImageDataLIC2DExtentTranslator::GetAlgorithm()
+{
+  return this->Algorithm.GetPointer();
+}
+
+//----------------------------------------------------------------------------
+int vtkImageDataLIC2DExtentTranslator::PieceToExtentThreadSafe(
+      int piece,
+      int numPieces,
+      int ghostLevel,
+      int *wholeExtent,
+      int *resultExtent,
+      int splitMode,
+      int byPoints)
+{
+  if (!this->Algorithm)
+    {
+    return this->Superclass::PieceToExtentThreadSafe(
+          piece,
+          numPieces,
+          ghostLevel,
+          wholeExtent,
+          resultExtent,
+          splitMode,
+          byPoints);
+    }
+
+  // Let the input extent translator do the translation.
+  int inExt[6];
+  this->InputExtentTranslator->PieceToExtentThreadSafe(
+            piece,
+            numPieces,
+            ghostLevel,
+            this->InputWholeExtent,
+            inExt,
+            splitMode,
+            byPoints);
+
+  this->Algorithm->TranslateInputExtent(
+            inExt,
+            this->InputWholeExtent,
+            resultExtent);
+
+  return 1;
+}
+
+//----------------------------------------------------------------------------
+void vtkImageDataLIC2DExtentTranslator::PrintSelf( ostream & os, vtkIndent indent )
+{
+  this->Superclass::PrintSelf( os, indent );
+
+  os << indent << "Algorithm: "               << this->Algorithm << endl;
+  os << indent << "InputWholeExtent: ("
+               << this->InputWholeExtent[0]   << ", "
+               << this->InputWholeExtent[1]   << ", "
+               << this->InputWholeExtent[2]   << ", "
+               << this->InputWholeExtent[3]   << ", "
+               << this->InputWholeExtent[4]   << ", "
+               << this->InputWholeExtent[5]   << ")" << endl;
+  os << indent << "InputExtentTranslator: "
+               << this->InputExtentTranslator << endl;
+}
diff --git a/Rendering/LIC/vtkImageDataLIC2DExtentTranslator.h b/Rendering/LIC/vtkImageDataLIC2DExtentTranslator.h
new file mode 100644
index 0000000..e869bb4
--- /dev/null
+++ b/Rendering/LIC/vtkImageDataLIC2DExtentTranslator.h
@@ -0,0 +1,74 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkImageDataLIC2DExtentTranslator.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkImageDataLIC2DExtentTranslator
+// .SECTION Description
+// This is needed because vtkImageDataLIC2D produces a larger output
+// extents by setting a magnification factor. This class calls
+// vtkImageLIC2D to do the translation.
+
+#ifndef __vtkImageDataLIC2DExtentTranslator_h
+#define __vtkImageDataLIC2DExtentTranslator_h
+
+#include "vtkRenderingLICModule.h" // For export macro
+#include "vtkExtentTranslator.h"
+#include "vtkWeakPointer.h" // needed for vtkWeakPointer.
+
+class vtkImageDataLIC2D;
+
+class VTKRENDERINGLIC_EXPORT vtkImageDataLIC2DExtentTranslator
+          : public vtkExtentTranslator
+{
+public:
+  static vtkImageDataLIC2DExtentTranslator* New();
+  vtkTypeMacro(vtkImageDataLIC2DExtentTranslator, vtkExtentTranslator);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Set the vtkImageDataLIC2D algorithm for which this extent translator is
+  // being used. vtkImageDataLIC2D will be called to make the translation
+  // which is dependant on the magnification factor.
+  void SetAlgorithm(vtkImageDataLIC2D*);
+  vtkImageDataLIC2D* GetAlgorithm();
+
+  void SetInputExtentTranslator(vtkExtentTranslator*);
+  vtkGetObjectMacro(InputExtentTranslator, vtkExtentTranslator);
+
+  vtkSetVector6Macro(InputWholeExtent, int);
+  vtkGetVector6Macro(InputWholeExtent, int);
+
+  virtual int PieceToExtentThreadSafe(
+        int piece,
+        int numPieces,
+        int ghostLevel,
+        int *wholeExtent,
+        int *resultExtent,
+        int splitMode,
+        int byPoints);
+
+//BTX
+protected:
+  vtkImageDataLIC2DExtentTranslator();
+  ~vtkImageDataLIC2DExtentTranslator();
+
+  int InputWholeExtent[6];
+  vtkExtentTranslator* InputExtentTranslator;
+  vtkWeakPointer<vtkImageDataLIC2D> Algorithm;
+private:
+  vtkImageDataLIC2DExtentTranslator(const vtkImageDataLIC2DExtentTranslator&); // Not implemented.
+  void operator=(const vtkImageDataLIC2DExtentTranslator&); // Not implemented.
+//ETX
+};
+
+#endif
diff --git a/Rendering/LIC/vtkLineIntegralConvolution2D.cxx b/Rendering/LIC/vtkLineIntegralConvolution2D.cxx
new file mode 100644
index 0000000..1e5a356
--- /dev/null
+++ b/Rendering/LIC/vtkLineIntegralConvolution2D.cxx
@@ -0,0 +1,2156 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkLineIntegralConvolution2D.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkLineIntegralConvolution2D.h"
+
+#include "vtkObjectFactory.h"
+#include "vtkOpenGLExtensionManager.h"
+#include "vtkOpenGLRenderWindow.h"
+#include "vtkShader2.h"
+#include "vtkShaderProgram2.h"
+#include "vtkUniformVariables.h"
+#include "vtkShader2Collection.h"
+#include "vtkTextureObject.h"
+#include "vtkPixelBufferObject.h"
+#include "vtkFrameBufferObject2.h"
+#include "vtkPixelExtent.h"
+#include "vtkPainterCommunicator.h"
+#include "vtkFloatArray.h"
+#include "vtkTimerLog.h"
+#include "vtkMath.h"
+#include "vtkgl.h"
+#include "vtkOpenGLError.h"
+
+#include <vector>
+#include <string>
+#include <algorithm>
+
+using std::deque;
+using std::vector;
+using std::string;
+
+// Enable stream min/max computations. Streaming is accomplished
+// via PBO+glReadPixels to read just the regions we are updating.
+// Without streaming PBO+glGetTexImage is used to uplaod the entire
+// screen sized texture, of which (in parallel) we are updating only
+// a small part of.
+#define STREAMING_MIN_MAX
+
+// if you don't explicitly bind to 0 before swapping on some
+// systems (intel hd4000) then things get whacky. nvidia devices
+// are fine without this.
+#define NOT_NVIDIA
+
+// here have to setup the activate textures *before* calling use
+// program looks like it's a bug in the intel driver.
+// Intel GL 4.0.0 - Build 9.17.10.2932 GLSL 4.00 - Build 9.17.10.2932
+#define INTEL_BUG
+
+// if defined write intermediate results to disk
+// for debugging. (1 results, 2 +steps, 3 +fbo status )
+#define vtkLineIntegralConvolution2DDEBUG 0
+#if (vtkLineIntegralConvolution2DDEBUG >= 1)
+#include "vtkTextureIO.h"
+#include <sstream>
+using std::ostringstream;
+//----------------------------------------------------------------------------
+static
+string mpifn(int rank, const char *fn)
+{
+  ostringstream oss;
+  oss << rank << "_" << fn;
+  return oss.str();
+}
+#endif
+#if vtkLineIntegralConvolution2DDEBUG >= 3
+#define vtkLICCheckFrameBufferStatusMacro(mode) vtkCheckFrameBufferStatusMacro(mode)
+#else
+#define vtkLICCheckFrameBufferStatusMacro(mode)
+#endif
+
+// shader sources
+extern const char *vtkLineIntegralConvolution2D_VT;   // normalized image space transform
+extern const char *vtkLineIntegralConvolution2D_LIC0; // initialization for lic
+extern const char *vtkLineIntegralConvolution2D_LICI; // compute i'th lic step
+extern const char *vtkLineIntegralConvolution2D_LICN; // finalize lic
+extern const char *vtkLineIntegralConvolution2D_EE;   // Laplace edge-enhance
+extern const char *vtkLineIntegralConvolution2D_CE;   // contrast enhance
+extern const char *vtkLineIntegralConvolution2D_AAH;  // horizontal part of anti-alias filter
+extern const char *vtkLineIntegralConvolution2D_AAV;  // vertical part of anti-alias filter
+
+#if defined(NDEBUG) || (vtkLineIntegralConvolution2DDEBUG < 3)
+# define DEBUG3CheckFrameBufferStatusMacro(mode)
+#else
+# define DEBUG3CheckFrameBufferStatusMacro(mode) \
+    vtkStaticCheckFrameBufferStatusMacro(mode)
+#endif
+
+/// vtkLICPingPongBufferManager -- gpgpu buffer manager
+/**
+Helper that manages state for the ping-pong buffer strategy
+employed during LIC integration. This class encapsulates all
+of the knowledge of our use of the FBO and texture units. Care
+is taken to avoid feedback loops.
+*/
+class vtkLICPingPongBufferManager
+{
+public:
+  vtkLICPingPongBufferManager(
+      vtkFrameBufferObject2 *fbo,
+      unsigned int *bufSize,
+      vtkTextureObject *vectorTexture,
+      vtkTextureObject *maskVectorTexture,
+      vtkTextureObject *noiseTexture,
+      int doEEPass,
+      int doVTPass)
+    {
+    this->VectorTexture = vectorTexture;
+    this->MaskVectorTexture = maskVectorTexture;
+    this->NoiseTexture = noiseTexture;
+
+    // allocate buffers
+    vtkRenderWindow *context = fbo->GetContext();
+    this->LICTexture0 = this->AllocateLICBuffer(context, bufSize);
+    this->SeedTexture0 = this->AllocateLICBuffer(context, bufSize);
+    this->LICTexture1 = this->AllocateLICBuffer(context, bufSize);
+    this->SeedTexture1 = this->AllocateLICBuffer(context, bufSize);
+    this->EETexture = doEEPass ? this->AllocateNoiseBuffer(context, bufSize) : NULL;
+    this->ImageVectorTexture = doVTPass ? this->AllocateVectorBuffer(context, bufSize) : NULL;
+
+    this->DettachBuffers();
+
+    // setup pairs for buffer ping-pong
+    this->PingTextures[0] = this->LICTexture0->GetHandle();
+    this->PingTextures[1] = this->SeedTexture0->GetHandle();
+
+    this->PongTextures[0] = this->LICTexture1->GetHandle();
+    this->PongTextures[1] = this->SeedTexture1->GetHandle();
+
+    this->Textures[0] = this->PingTextures;
+    this->Textures[1] = this->PongTextures;
+
+    this->ReadIndex = 0;
+
+    #if vtkLineIntegralConvolution2DDEBUG >= 3
+    this->Print(cerr);
+    #endif
+    }
+
+  ~vtkLICPingPongBufferManager()
+    {
+    // free buffers
+    this->LICTexture0->Delete();
+    this->SeedTexture0->Delete();
+    this->LICTexture1->Delete();
+    this->SeedTexture1->Delete();
+    if (this->EETexture)
+      {
+      this->EETexture->Delete();
+      }
+    if (this->ImageVectorTexture)
+      {
+      this->ImageVectorTexture->Delete();
+      }
+    }
+
+  // Description:
+  // Get the unit/unit id for the given texture
+  // Here is how we use tetxure units.
+  //   name         | unit
+  //   -------------+--------
+  //   vectors      | 0
+  //   mask vectors | 0/1
+  //   noise        | 2
+  //   lic          | 3
+  //   seeds        | 4
+  int GetVectorTextureUnit(){ return 0; }
+  int GetMaskVectorTextureUnit(){ return this->MaskVectorUnit; }
+  int GetNoiseTextureUnit(){ return 2; }
+  int GetLICTextureUnit(){ return 3; }
+  int GetSeedTextureUnit(){ return 4; }
+
+  // Description:
+  // Switch input and output buffers
+  void Swap(){ this->ReadIndex = 1 - this->ReadIndex; }
+
+  // Description:
+  // Get the last output (assumes a swap has been done).
+  vtkTextureObject *GetLastLICBuffer()
+    {
+    return this->ReadIndex == 0 ? this->LICTexture0 : this->LICTexture1;
+    }
+
+  // Description:
+  // Get the last output (assumes a swap has been done).
+  vtkTextureObject *GetLastSeedBuffer()
+    {
+    return this->ReadIndex == 0 ? this->SeedTexture0 : this->SeedTexture1;
+    }
+
+  // Description:
+  // Get the last output (assumes a swap has been done).
+  vtkTextureObject *GetLICBuffer()
+    {
+    return 1-this->ReadIndex == 0 ? this->LICTexture0 : this->LICTexture1;
+    }
+
+  // Description:
+  // Get the last output (assumes a swap has been done).
+  vtkTextureObject *GetSeedBuffer()
+    {
+    return 1-this->ReadIndex == 0 ? this->SeedTexture0 : this->SeedTexture1;
+    }
+
+  // Description:
+  // Clear all the buffers used for writing.
+  void ClearBuffers(
+        vtkFrameBufferObject2 *fbo,
+        const vtkPixelExtent &viewExt,
+        const deque<vtkPixelExtent> &extents,
+        int clearEETex = 0)
+    {
+    //attach
+    fbo->AddColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 0U, this->LICTexture0);
+    fbo->AddColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 1U, this->SeedTexture0);
+    fbo->AddColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 2U, this->LICTexture1);
+    fbo->AddColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 3U, this->SeedTexture1);
+    unsigned int num = 4U;
+    if (clearEETex)
+      {
+      fbo->AddColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 4U, this->EETexture);
+      num = 5U;
+      }
+    fbo->ActivateDrawBuffers(num);
+    DEBUG3CheckFrameBufferStatusMacro(vtkgl::DRAW_FRAMEBUFFER_EXT);
+
+    // clear the parts of the screen which we will modify
+    // initially mask all fragments
+    glClearColor(0.0, 1.0, 0.0, 0.0);
+    #if 0
+    glClear(GL_COLOR_BUFFER_BIT);
+    #else
+    glEnable(GL_SCISSOR_TEST);
+    size_t nBlocks = extents.size();
+    for (size_t e=0; e<nBlocks; ++e)
+      {
+      vtkPixelExtent ext = extents[e];
+      // add halo for linear filtering
+      // since at most linear filtering requires
+      // 4 pixels , clearing an extra 4 here
+      // ensures we never access uninitialized
+      // memory.
+      ext.Grow(4);
+      ext &= viewExt;
+
+      unsigned int extSize[2];
+      ext.Size(extSize);
+
+      glScissor(ext[0], ext[2], extSize[0], extSize[1]);
+      glClear(GL_COLOR_BUFFER_BIT);
+      }
+    glDisable(GL_SCISSOR_TEST);
+    #endif
+    // detach
+    fbo->RemoveTexColorAttachments(vtkgl::DRAW_FRAMEBUFFER_EXT, num);
+    fbo->DeactivateDrawBuffers();
+    }
+
+  // Description:
+  // Clear the given buffer
+  void ClearBuffer(
+        vtkFrameBufferObject2 *fbo,
+        vtkTextureObject *tex,
+        const vtkPixelExtent &viewExt,
+        const deque<vtkPixelExtent> &extents)
+    {
+    //attach
+    fbo->AddColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 0U, tex);
+    fbo->ActivateDrawBuffers(1);
+    DEBUG3CheckFrameBufferStatusMacro(vtkgl::DRAW_FRAMEBUFFER_EXT);
+
+    // clear the parts of the screen which we will modify
+    // initially mask all fragments
+    glClearColor(0.0, 1.0, 0.0, 0.0);
+    #if 0
+    glClear(GL_COLOR_BUFFER_BIT);
+    #else
+    glEnable(GL_SCISSOR_TEST);
+    size_t nBlocks = extents.size();
+    for (size_t e=0; e<nBlocks; ++e)
+      {
+      vtkPixelExtent ext = extents[e];
+      // add halo for linear filtering
+      // since at most linear filtering requires
+      // 4 pixels , clearing an extra 4 here
+      // ensures we never access uninitialized
+      // memory.
+      ext.Grow(4);
+      ext &= viewExt;
+
+      unsigned int extSize[2];
+      ext.Size(extSize);
+
+      glScissor(ext[0], ext[2], extSize[0], extSize[1]);
+      glClear(GL_COLOR_BUFFER_BIT);
+      }
+    glDisable(GL_SCISSOR_TEST);
+    #endif
+    // detach
+    fbo->RemoveTexColorAttachments(vtkgl::DRAW_FRAMEBUFFER_EXT, 1);
+    fbo->DeactivateDrawBuffers();
+    }
+
+  // Description:
+  // Activates the input textures. These are read only.
+  void AttachVectorTextures()
+    {
+    // vector
+    vtkgl::ActiveTexture(vtkgl::TEXTURE0);
+    if (this->ImageVectorTexture)
+      {
+      glBindTexture( GL_TEXTURE_2D, this->ImageVectorTexture->GetHandle());
+      }
+    else
+      {
+      glBindTexture( GL_TEXTURE_2D, this->VectorTexture->GetHandle());
+      }
+    vtkOpenGLStaticCheckErrorMacro("failed at glBindTexture");
+
+    // mask vectors (optional)
+    vtkgl::ActiveTexture(vtkgl::TEXTURE1);
+    if (this->MaskVectorTexture)
+      {
+      glBindTexture(GL_TEXTURE_2D, this->MaskVectorTexture->GetHandle());
+      vtkOpenGLStaticCheckErrorMacro("failed at glBindTexture");
+      this->MaskVectorUnit = 1;
+      }
+    else
+      {
+      glBindTexture(GL_TEXTURE_2D, 0);
+      this->MaskVectorUnit = 0;
+      }
+    }
+
+  // Description:
+  // Deactivates the input noise textures.
+  void DettachVectorTextures()
+    {
+    vtkgl::ActiveTexture(vtkgl::TEXTURE0);
+    glBindTexture(GL_TEXTURE_2D, 0);
+
+    vtkgl::ActiveTexture(vtkgl::TEXTURE1);
+    glBindTexture(GL_TEXTURE_2D, 0);
+    }
+
+  // Description:
+  // Activate the read only noise texture. It's active for
+  // the entirety of each LIC pass.
+  void AttachNoiseTexture(int LICPassNum = 0)
+    {
+    switch (LICPassNum)
+      {
+      case 0:
+        this->NoiseTexture->Activate(vtkgl::TEXTURE2);
+        break;
+      case 1:
+        this->EETexture->Activate(vtkgl::TEXTURE2);
+        break;
+      }
+    }
+
+  // Dsecription:
+  // Deactivate the inpyt noise tetxure.
+  void DettachNoiseTexture()
+    {
+    vtkgl::ActiveTexture(vtkgl::TEXTURE2);
+    glBindTexture(GL_TEXTURE_2D, 0);
+    }
+
+  // Description:
+  // Setup read/write from/to the active lic/seed buffer texture pair
+  // for LIC pass.
+  void AttachLICBuffers()
+    {
+    unsigned int *readTex = this->Textures[this->ReadIndex];
+    vtkgl::ActiveTexture(vtkgl::TEXTURE3);
+    glBindTexture(GL_TEXTURE_2D, readTex[0]);
+    vtkOpenGLStaticCheckErrorMacro("failed at glBindTexture");
+
+    vtkgl::ActiveTexture(vtkgl::TEXTURE4);
+    glBindTexture(GL_TEXTURE_2D, readTex[1]);
+    vtkOpenGLStaticCheckErrorMacro("failed at glBindTexture");
+
+    unsigned int *writeTex = this->Textures[1-this->ReadIndex];
+    vtkgl::FramebufferTexture2DEXT(
+          vtkgl::DRAW_FRAMEBUFFER_EXT,
+          vtkgl::COLOR_ATTACHMENT0,
+          GL_TEXTURE_2D,
+          writeTex[0],
+          0);
+    vtkOpenGLStaticCheckErrorMacro("failed at glFramebuffereadTexture2D");
+
+    vtkgl::FramebufferTexture2DEXT(
+          vtkgl::DRAW_FRAMEBUFFER_EXT,
+          vtkgl::COLOR_ATTACHMENT1,
+          GL_TEXTURE_2D,
+          writeTex[1],
+          0);
+    vtkOpenGLStaticCheckErrorMacro("failed at glFramebuffereadTexture2D");
+
+    GLenum atts[2] = {
+          vtkgl::COLOR_ATTACHMENT0,
+          vtkgl::COLOR_ATTACHMENT1
+          };
+    vtkgl::DrawBuffers(2, atts);
+    vtkOpenGLStaticCheckErrorMacro("failed at glDrawBuffers");
+
+    DEBUG3CheckFrameBufferStatusMacro(vtkgl::DRAW_FRAMEBUFFER_EXT);
+    }
+
+  // Description:
+  // Remove input/output bufers used for computing the LIC.
+  void DettachLICBuffers()
+    {
+    vtkgl::ActiveTexture(vtkgl::TEXTURE3);
+    glBindTexture(GL_TEXTURE_2D, 0);
+
+    vtkgl::ActiveTexture(vtkgl::TEXTURE4);
+    glBindTexture(GL_TEXTURE_2D, 0);
+
+    vtkgl::FramebufferTexture2DEXT(
+          vtkgl::DRAW_FRAMEBUFFER_EXT,
+          vtkgl::COLOR_ATTACHMENT0,
+          GL_TEXTURE_2D,
+          0U,
+          0);
+
+    vtkgl::FramebufferTexture2DEXT(
+          vtkgl::DRAW_FRAMEBUFFER_EXT,
+          vtkgl::COLOR_ATTACHMENT1,
+          GL_TEXTURE_2D,
+          0U,
+          0);
+
+    GLenum atts[1] = {GL_NONE};
+    vtkgl::DrawBuffers(1, atts);
+    vtkOpenGLStaticCheckErrorMacro("failed at glDrawBuffers");
+    }
+
+  // Description:
+  // Attach read/write buffers for transform pass.
+  void AttachImageVectorBuffer()
+    {
+    vtkgl::ActiveTexture(vtkgl::TEXTURE0);
+    glBindTexture(GL_TEXTURE_2D, this->VectorTexture->GetHandle());
+    vtkOpenGLStaticCheckErrorMacro("failed at glBindTexture");
+
+    vtkgl::FramebufferTexture2DEXT(
+          vtkgl::DRAW_FRAMEBUFFER_EXT,
+          vtkgl::COLOR_ATTACHMENT0,
+          GL_TEXTURE_2D,
+          this->ImageVectorTexture->GetHandle(),
+          0);
+    vtkOpenGLStaticCheckErrorMacro("failed at glFramebufferTexture2D");
+
+    GLenum atts[1] = {vtkgl::COLOR_ATTACHMENT0};
+    vtkgl::DrawBuffers(1, atts);
+    vtkOpenGLStaticCheckErrorMacro("failed at glDrawBuffers");
+
+    DEBUG3CheckFrameBufferStatusMacro(vtkgl::DRAW_FRAMEBUFFER_EXT);
+    }
+
+  // Description:
+  // Attach read/write buffers for transform pass.
+  void DettachImageVectorBuffer()
+    {
+    vtkgl::ActiveTexture(vtkgl::TEXTURE0);
+    glBindTexture(GL_TEXTURE_2D, 0);
+
+    vtkgl::FramebufferTexture2DEXT(
+          vtkgl::DRAW_FRAMEBUFFER_EXT,
+          vtkgl::COLOR_ATTACHMENT0,
+          GL_TEXTURE_2D,
+          0U,
+          0);
+
+    GLenum atts[1] = {GL_NONE};
+    vtkgl::DrawBuffers(1, atts);
+    vtkOpenGLStaticCheckErrorMacro("failed at glDrawBuffers");
+    }
+
+  // Description:
+  // Attach read/write buffers for EE pass.
+  void AttachEEBuffer()
+    {
+    unsigned int *readTex = this->Textures[this->ReadIndex];
+    vtkgl::ActiveTexture(vtkgl::TEXTURE3);
+    glBindTexture(GL_TEXTURE_2D, readTex[0]);
+    vtkOpenGLStaticCheckErrorMacro("failed at glBindTexture");
+
+    vtkgl::FramebufferTexture2DEXT(
+          vtkgl::DRAW_FRAMEBUFFER_EXT,
+          vtkgl::COLOR_ATTACHMENT0,
+          GL_TEXTURE_2D,
+          this->EETexture->GetHandle(),
+          0);
+    vtkOpenGLStaticCheckErrorMacro("failed at glFramebufferTexture2D");
+
+    GLenum atts[1] = {vtkgl::COLOR_ATTACHMENT0};
+    vtkgl::DrawBuffers(1, atts);
+    vtkOpenGLStaticCheckErrorMacro("failed at glDrawBuffers");
+
+    DEBUG3CheckFrameBufferStatusMacro(vtkgl::DRAW_FRAMEBUFFER_EXT);
+    }
+
+  // Description:
+  // Attach read/write buffers for EE pass.
+  void DettachEEBuffer()
+    {
+    vtkgl::ActiveTexture(vtkgl::TEXTURE3);
+    glBindTexture(GL_TEXTURE_2D, 0);
+
+    vtkgl::FramebufferTexture2DEXT(
+          vtkgl::DRAW_FRAMEBUFFER_EXT,
+          vtkgl::COLOR_ATTACHMENT0,
+          GL_TEXTURE_2D,
+          0U,
+          0);
+
+    GLenum atts[1] = {GL_NONE};
+    vtkgl::DrawBuffers(1, atts);
+    vtkOpenGLStaticCheckErrorMacro("failed at glDrawBuffers");
+    }
+
+  // Description:
+  // Deactivates and removes all read/write buffers that were in
+  // use during the run, restoring a pristine FBO/texture unit state.
+  void DettachBuffers()
+    {
+    vtkgl::ActiveTexture(vtkgl::TEXTURE0);
+    glBindTexture(GL_TEXTURE_2D, 0U);
+    vtkOpenGLStaticCheckErrorMacro("failed at glBindTexture");
+    vtkgl::ActiveTexture(vtkgl::TEXTURE1);
+    glBindTexture(GL_TEXTURE_2D, 0U);
+    vtkOpenGLStaticCheckErrorMacro("failed at glBindTexture");
+    vtkgl::ActiveTexture(vtkgl::TEXTURE2);
+    glBindTexture(GL_TEXTURE_2D, 0U);
+    vtkOpenGLStaticCheckErrorMacro("failed at glBindTexture");
+    vtkgl::ActiveTexture(vtkgl::TEXTURE3);
+    glBindTexture(GL_TEXTURE_2D, 0U);
+    vtkOpenGLStaticCheckErrorMacro("failed at glBindTexture");
+    vtkgl::ActiveTexture(vtkgl::TEXTURE4);
+    glBindTexture(GL_TEXTURE_2D, 0U);
+    vtkOpenGLStaticCheckErrorMacro("failed at glBindTexture");
+
+    vtkgl::FramebufferTexture2DEXT(
+          vtkgl::DRAW_FRAMEBUFFER_EXT,
+          vtkgl::COLOR_ATTACHMENT0,
+          GL_TEXTURE_2D,
+          0U,
+          0);
+    vtkOpenGLStaticCheckErrorMacro("failed at glFramebufferTexture2D");
+
+    vtkgl::FramebufferTexture2DEXT(
+          vtkgl::DRAW_FRAMEBUFFER_EXT,
+          vtkgl::COLOR_ATTACHMENT1,
+          GL_TEXTURE_2D,
+          0U,
+          0);
+    vtkOpenGLStaticCheckErrorMacro("failed at glFramebufferTexture2D");
+
+    GLenum none = GL_NONE;
+    vtkgl::DrawBuffers(1, &none);
+    vtkOpenGLStaticCheckErrorMacro("failed at glDrawBuffers");
+    }
+
+  // Description:
+  // Get the read/write ids
+  int GetReadIndex(){ return this->ReadIndex; }
+  int GetWriteIndex(){ return 1 - this->ReadIndex; }
+
+  // Description:
+  // Allocate a texture of the given size.
+  // with parameters for LIC lookups
+  vtkTextureObject *AllocateLICBuffer(
+        vtkRenderWindow *context,
+        unsigned int texSize[2])
+    {
+    float border[4] = {0.0f, 1.0f, 0.0f, 0.0f};
+    return this->AllocateBuffer(
+          context,
+          texSize,
+          vtkTextureObject::Nearest,
+          vtkTextureObject::ClampToBorder,
+          border);
+    }
+
+  // Description:
+  // Allocate a texture of the given size.
+  // with parameters for noise lookups
+  vtkTextureObject *AllocateNoiseBuffer(
+        vtkRenderWindow *context,
+        unsigned int texSize[2])
+    {
+    float border[4] = {0.0f, 0.0f, 0.0f, 0.0f};
+    return this->AllocateBuffer(
+          context,
+          texSize,
+          vtkTextureObject::Nearest,
+          vtkTextureObject::ClampToEdge,
+          border);
+    }
+
+  // Description:
+  // Allocate a texture of the given size.
+  // with parameters for LIC
+  vtkTextureObject *AllocateVectorBuffer(
+        vtkRenderWindow *context,
+        unsigned int texSize[2])
+    {
+    float border[4] = {0.0f, 0.0f, 0.0f, 0.0f};
+    return this->AllocateBuffer(
+          context,
+          texSize,
+          vtkTextureObject::Linear,
+          vtkTextureObject::ClampToBorder,
+          border);
+    }
+
+  // Description:
+  // Allocate a texture of the given size.
+  vtkTextureObject *AllocateBuffer(
+        vtkRenderWindow *context,
+        unsigned int texSize[2],
+        int filter,
+        int wrapping,
+        float *borderColor)
+    {
+    vtkTextureObject *tex = vtkTextureObject::New();
+    tex->SetContext(context);
+    tex->SetBaseLevel(0);
+    tex->SetMaxLevel(0);
+    tex->SetBorderColor(borderColor);
+    tex->SetWrapS(wrapping);
+    tex->SetWrapT(wrapping);
+    tex->SetMinificationFilter(filter);  // no guard pixels
+    tex->SetMagnificationFilter(filter); // no guard pixels
+    tex->Create2D(texSize[0], texSize[1], 4, VTK_FLOAT, false);
+    tex->SetAutoParameters(0);
+    return tex;
+    }
+
+  // Description:
+  // Render screen aligned quad. Texture coordinates are
+  // always assigned on TEXTURE0, it's hardcoded in the
+  // shaders.
+  void RenderQuad(
+          float computeBoundsPt0[2],
+          float computeBoundsPt1[2],
+          vtkPixelExtent computeExtent)
+    {
+    float computeBounds[4] = {
+          computeBoundsPt0[0], computeBoundsPt1[0],
+          computeBoundsPt0[1], computeBoundsPt1[1]
+          };
+     this->RenderQuad(computeBounds, computeExtent);
+    }
+
+  // Description:
+  // Render screen aligned quad. Texture coordinates are
+  // always assigned on TEXTURE0, it's hardcoded in the
+  // shaders.
+  void RenderQuad(
+          float computeBounds[4],
+          vtkPixelExtent computeExtent)
+    {
+    int quadPtIds[8] = {0,2, 1,2, 1,3, 0,3};
+
+    float quadBounds[4];
+    computeExtent.CellToNode();
+    computeExtent.GetData(quadBounds);
+
+    glBegin(GL_QUADS);
+    for (int q=0; q<4; ++q)
+      {
+      int qq = 2*q;
+
+      vtkgl::MultiTexCoord2f(
+            vtkgl::TEXTURE0,
+            computeBounds[quadPtIds[qq]],
+            computeBounds[quadPtIds[qq+1]]);
+
+      glVertex2f(
+            quadBounds[quadPtIds[qq]],
+            quadBounds[quadPtIds[qq+1]]);
+      }
+    glEnd();
+    vtkOpenGLStaticCheckErrorMacro("failed at render quad");
+    }
+
+  #if (vtkLineIntegralConvolution2DDEBUG >= 1)
+  // Description:
+  // Write the last output buffers to disk (assumes a swap has
+  // already been done)
+  void WriteBuffers(
+      int rank,
+      const char *licFileName,
+      const char *seedFileName,
+      const deque<vtkPixelExtent>& exts)
+    {
+    if (licFileName)
+      {
+      vtkTextureIO::Write(
+              mpifn(rank, licFileName),
+              this->GetLastLICBuffer(),
+              exts);
+      }
+    if (seedFileName)
+      {
+      vtkTextureIO::Write(
+              mpifn(rank, seedFileName),
+              this->GetLastSeedBuffer(),
+              exts);
+      }
+    }
+  void WriteEEBuffer(int rank, const deque<vtkPixelExtent> &exts)
+    {
+    vtkTextureIO::Write(
+          mpifn(rank,"lic2d_ee.vtm"),
+          this->EETexture,
+          exts);
+    }
+  void WriteImageVectorBuffer(int rank, const deque<vtkPixelExtent> &exts)
+    {
+    vtkTextureIO::Write(
+          mpifn(rank,"lic2d_ivec.vtm"),
+          this->ImageVectorTexture,
+          exts);
+    }
+  void WriteInputs(int rank, const deque<vtkPixelExtent>& exts)
+    {
+    vtkTextureIO::Write(
+              mpifn(rank,"lic2d_vec.vtm"),
+              this->VectorTexture,
+              exts);
+    if (this->MaskVectorTexture)
+      {
+      vtkTextureIO::Write(
+                mpifn(rank,"lic2d_mask.vtm"),
+                this->MaskVectorTexture,
+                exts);
+      }
+    vtkTextureIO::Write(
+              mpifn(rank,"lic2d_noise.vtk"),
+              this->NoiseTexture);
+    }
+
+  // Description:
+  // Print current state to the given stream
+  void Print(ostream &os)
+    {
+    os
+      << "Vectors = " << this->VectorTexture->GetHandle() << endl
+      << "ImageVectors = " << this->ImageVectorTexture->GetHandle() << endl
+      << "MaskVectors = " << (this->MaskVectorTexture ? this->MaskVectorTexture->GetHandle() : 0U) << endl
+      << "Noise = " << this->NoiseTexture->GetHandle() << endl
+      << "EE = " << (this->EETexture ? this->EETexture->GetHandle() : 0U) << endl
+      << "LIC0 = " << this->LICTexture0->GetHandle() << endl
+      << "Seed0 = " << this->SeedTexture0->GetHandle() << endl
+      << "LIC1 = " << this->LICTexture1->GetHandle() << endl
+      << "Seed1 = " << this->SeedTexture1->GetHandle() << endl
+      << "ReadIndex=" << this->ReadIndex << endl
+      << "PingTextures[0]=" << this->Textures[0][0] << ", " << this->Textures[0][1] << endl
+      << "PongTextures[1]=" << this->Textures[1][0] << ", " << this->Textures[1][1] << endl;
+    }
+  #endif
+
+private:
+  vtkTextureObject *VectorTexture;
+  vtkTextureObject *ImageVectorTexture;
+  vtkTextureObject *MaskVectorTexture;
+  vtkTextureObject *NoiseTexture;
+  vtkTextureObject *EETexture;
+  vtkTextureObject *LICTexture0;
+  vtkTextureObject *SeedTexture0;
+  vtkTextureObject *LICTexture1;
+  vtkTextureObject *SeedTexture1;
+  int MaskVectorUnit;
+
+  int  ReadIndex;
+  unsigned int PingTextures[2];
+  unsigned int PongTextures[2];
+  unsigned int *Textures[2];
+};
+
+namespace vtkLineIntegralConvolution2DUtil
+{
+/**
+glsl shader code for selecting vector comps.
+*/
+string GetComponentSelectionProgram(int *compIds)
+{
+  // swizles at 45,46
+  string srcCode("vec2 getSelectedComponents(vec4 V){ return V.$$; }");
+  const char *compNames = "xyzw";
+  srcCode[45] = compNames[compIds[0]];
+  srcCode[46] = compNames[compIds[1]];
+  return srcCode;
+}
+
+/*
+Shader code for looking up vectors
+*/
+const char *GetVectorLookupProgram(int normalize)
+{
+  // lookup the vector and normalize
+  const char *getNormVecSrc = " \
+    uniform sampler2D texVectors;\n \
+    vec2 getVector( vec2 vectc )\n \
+      {\n \
+      vec2 V = texture2D( texVectors, vectc ).xy;\n \
+      // normalize if |V| not 0\n \
+      float lenV = length( V );\n \
+      if ( lenV > 1.0e-8 )\n \
+        {\n \
+        return V/lenV;\n \
+        }\n \
+      else\n \
+        {\n \
+        return vec2( 0.0, 0.0 );\n \
+        }\n \
+      }\n \
+    ";
+
+   // lookup the vector
+   const char *getVecSrc = " \
+    uniform sampler2D texVectors;\n \
+    vec2 getVector( vec2 vectc )\n \
+      {\n \
+      return texture2D( texVectors, vectc ).xy;\n \
+      }\n \
+    ";
+
+  if ( normalize )
+    {
+    return getNormVecSrc;
+    }
+ return getVecSrc;
+}
+
+// Description
+// find min/max of unmasked fragments across all regions
+// download the entire screen then search each region
+void FindMinMax(
+      vtkTextureObject *tex,
+      const deque<vtkPixelExtent> &extents,
+      float &min,
+      float &max)
+{
+  // download entire screen
+  int size0 = tex->GetWidth();
+  vtkPixelBufferObject *colors = tex->Download();
+  float *pColors = static_cast<float*>(colors->MapPackedBuffer());
+  // search each region
+  size_t nExtents = extents.size();
+  for (size_t q=0; q<nExtents; ++q)
+    {
+    const vtkPixelExtent &extent = extents[q];
+    for (int j=extent[2]; j<=extent[3]; ++j)
+      {
+      for (int i=extent[0]; i<=extent[1]; ++i)
+        {
+        int id = 4*(j*size0+i);
+        bool masked = pColors[id+1] != 0.0f;
+        bool ceskip = pColors[id+2] != 0.0f;
+        if ( !masked && !ceskip )
+          {
+          float color = pColors[id];
+          min = min > color ? color : min;
+          max = max < color ? color : max;
+          }
+        }
+      }
+    }
+  colors->UnmapPackedBuffer();
+  colors->Delete();
+  #if  vtkLineIntegralConvolution2DDEBUG>=1
+  cerr << "min=" << min << " max=" << max << endl;
+  #endif
+}
+
+// Description
+// find min/max of unmasked fragments across all regions
+// download each search each region individually
+void StreamingFindMinMax(
+      vtkFrameBufferObject2 *fbo,
+      vtkTextureObject *tex,
+      const deque<vtkPixelExtent> &extents,
+      float &min,
+      float &max)
+{
+  size_t nExtents = extents.size();
+  // initiate download of each region
+  fbo->AddColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 0U, tex);
+  fbo->AddColorAttachment(vtkgl::READ_FRAMEBUFFER_EXT, 0U, tex);
+  fbo->ActivateDrawBuffer(0U);
+  fbo->ActivateReadBuffer(0U);
+  fbo->CheckFrameBufferStatus(vtkgl::FRAMEBUFFER_EXT);
+  vector<vtkPixelBufferObject*> pbos(nExtents, NULL);
+  for (size_t q=0; q<nExtents; ++q)
+    {
+    pbos[q] = fbo->Download(
+          const_cast<int*>(extents[q].GetData()),
+          VTK_FLOAT,
+          4,
+          GL_FLOAT,
+          GL_RGBA);
+    }
+  fbo->DeactivateDrawBuffers();
+  fbo->DeactivateReadBuffer();
+  fbo->RemoveTexColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 0U);
+  fbo->RemoveTexColorAttachment(vtkgl::READ_FRAMEBUFFER_EXT, 0U);
+  // search each region
+  for (size_t q=0; q<nExtents; ++q)
+    {
+    vtkPixelBufferObject *&pbo = pbos[q];
+    float *pColors = (float*)pbo->MapPackedBuffer();
+
+    size_t n = extents[q].Size();
+    for (size_t i = 0; i<n; ++i)
+      {
+      bool masked = pColors[4*i+1] != 0.0f;
+      bool ceskip = pColors[4*i+2] != 0.0f;
+      if ( !masked && !ceskip )
+        {
+        float color = pColors[4*i];
+        min = min > color ? color : min;
+        max = max < color ? color : max;
+        }
+      }
+
+    pbo->UnmapPackedBuffer();
+    pbo->Delete();
+    pbo = NULL;
+    }
+  pbos.clear();
+  #if  vtkLineIntegralConvolution2DDEBUG >= 1
+  cerr << "min=" << min << " max=" << max << endl;
+  #endif
+}
+
+};
+using namespace vtkLineIntegralConvolution2DUtil;
+
+// ----------------------------------------------------------------------------
+vtkObjectFactoryNewMacro(vtkLineIntegralConvolution2D);
+
+// ----------------------------------------------------------------------------
+vtkLineIntegralConvolution2D::vtkLineIntegralConvolution2D()
+{
+  this->Comm = NULL;
+
+  this->Context = NULL;
+  this->FBO = vtkFrameBufferObject2::New();
+
+  this->ShadersNeedBuild = 1;
+  this->VTShader = NULL;
+  this->LIC0Shader = NULL;
+  this->LICIShader = NULL;
+  this->LICNShader = NULL;
+  this->EEShader = NULL;
+  this->CEShader = NULL;
+  this->AAHShader = NULL;
+  this->AAVShader = NULL;
+
+  this->StepSize = 0.01;
+  this->NumberOfSteps = 1;
+  this->NormalizeVectors = 1;
+  this->ComponentIds[0] = 0;
+  this->ComponentIds[1] = 1;
+
+  this->EnhancedLIC = 1;
+
+  this->EnhanceContrast = 0;
+  this->LowContrastEnhancementFactor = 0.0;
+  this->HighContrastEnhancementFactor = 0.0;
+  this->AntiAlias = 0;
+  this->MaskThreshold = 0.0;
+
+  this->TransformVectors = 1;
+}
+
+// ----------------------------------------------------------------------------
+vtkLineIntegralConvolution2D::~vtkLineIntegralConvolution2D()
+{
+  if (this->Comm)
+    {
+    delete this->Comm;
+    this->Comm = NULL;
+    }
+  this->SetContext(NULL);
+  this->SetVTShader(NULL);
+  this->SetLIC0Shader(NULL);
+  this->SetLICIShader(NULL);
+  this->SetLICNShader(NULL);
+  this->SetEEShader(NULL);
+  this->SetCEShader(NULL);
+  this->SetAAHShader(NULL);
+  this->SetAAVShader(NULL);
+  this->FBO->Delete();
+}
+
+// ----------------------------------------------------------------------------
+vtkPainterCommunicator *vtkLineIntegralConvolution2D::GetCommunicator()
+{
+  if (this->Comm == NULL)
+    {
+    this->Comm = new vtkPainterCommunicator;
+    }
+  return this->Comm;
+}
+
+// ----------------------------------------------------------------------------
+vtkRenderWindow *vtkLineIntegralConvolution2D::GetContext()
+{
+  return this->Context;
+}
+
+// ----------------------------------------------------------------------------
+void vtkLineIntegralConvolution2D::SetContext(vtkRenderWindow *renWin)
+{
+  if (this->Context == renWin)
+    {
+    return;
+    }
+
+  this->Context = renWin;
+  this->ShadersNeedBuild = 1;
+  this->FBO->SetContext(renWin);
+  this->Modified();
+
+  if (renWin && !this->IsSupported(renWin))
+    {
+    vtkErrorMacro("The required OpenGL extensions are not present");
+    }
+}
+
+// ----------------------------------------------------------------------------
+bool vtkLineIntegralConvolution2D::IsSupported(vtkRenderWindow *renWin)
+{
+  vtkOpenGLRenderWindow *context = vtkOpenGLRenderWindow::SafeDownCast(renWin);
+  if (!context)
+    {
+    return false;
+    }
+
+#if defined(__APPLE__) || defined(_WIN32)
+  vtkOpenGLExtensionManager *manager = context->GetExtensionManager();
+#endif
+#if defined(__APPLE__)
+  if (manager->DriverIsNvidia() && manager->DriverVersionIs(1,6))
+    {
+    // Mac OSX 10.6 GLSL doesn't support array initializer
+    return false;
+    }
+#endif
+#if defined(_WIN32)
+  if ( manager->DriverIsIntel() && manager->DriverGLRendererHas("HD Graphics")
+    && !manager->GetIgnoreDriverBugs("Intel HD 2k,3k,4k incorrect results") )
+    {
+    // Intel drivers produce close but not pixel for pixel identical
+    // results. Windows: yes. Linux: untested. Mac: no.
+    return false;
+    }
+#endif
+
+  return vtkTextureObject::IsSupported(renWin, true, false, false)
+     && vtkFrameBufferObject2::IsSupported(renWin)
+     && vtkShaderProgram2::IsSupported(renWin)
+     && vtkPixelBufferObject::IsSupported(renWin);
+}
+
+// ----------------------------------------------------------------------------
+void vtkLineIntegralConvolution2D::SetNoiseTexParameters(vtkTextureObject * tex)
+{
+  tex->SetBaseLevel(0);
+  tex->SetMaxLevel(0);
+  tex->SetWrapS(vtkTextureObject::Repeat);
+  tex->SetWrapT(vtkTextureObject::Repeat);
+  tex->SetMinificationFilter(vtkTextureObject::Nearest);
+  tex->SetMagnificationFilter(vtkTextureObject::Nearest);
+  // note : as a side affect it sets the parameters
+  // that is needed here.
+  tex->Bind();
+  tex->UnBind();
+}
+
+// ----------------------------------------------------------------------------
+void vtkLineIntegralConvolution2D::SetVectorTexParameters(vtkTextureObject *tex)
+{
+  tex->SetBaseLevel(0);
+  tex->SetMaxLevel(0);
+  tex->SetWrapS(vtkTextureObject::ClampToBorder);
+  tex->SetWrapT(vtkTextureObject::ClampToBorder);
+  tex->SetBorderColor(0.0, 0.0, 0.0, 0.0);
+  tex->SetMinificationFilter(vtkTextureObject::Linear);
+  tex->SetMagnificationFilter(vtkTextureObject::Linear);
+  // note : as a side affect it sets the parameters
+  // that is needed here.
+  tex->Bind();
+  tex->UnBind();
+}
+
+// ----------------------------------------------------------------------------
+void vtkLineIntegralConvolution2D::SetComponentIds(int c0, int c1)
+{
+  if ((this->ComponentIds[0] == c0) && (this->ComponentIds[1] == c1))
+    {
+    return;
+    }
+  this->ComponentIds[0] = c0;
+  this->ComponentIds[1] = c1;
+  this->ShadersNeedBuild = 1;
+  this->Modified();
+}
+
+// ----------------------------------------------------------------------------
+void vtkLineIntegralConvolution2D::SetTransformVectors(int val)
+{
+  val = val < 0 ? 0 : val;
+  val = val > 1 ? 1 : val;
+  if (this->TransformVectors == val)
+    {
+    return;
+    }
+  this->TransformVectors = val;
+  this->ShadersNeedBuild = 1;
+  this->Modified();
+}
+
+// ----------------------------------------------------------------------------
+void vtkLineIntegralConvolution2D::SetNormalizeVectors(int val)
+{
+  val = val < 0 ? 0 : val;
+  val = val > 1 ? 1 : val;
+  if (this->NormalizeVectors == val)
+    {
+    return;
+    }
+  this->NormalizeVectors = val;
+  this->ShadersNeedBuild = 1;
+  this->Modified();
+}
+
+// ----------------------------------------------------------------------------
+void vtkLineIntegralConvolution2D::SetVTShader(vtkShaderProgram2 * prog)
+{
+  vtkSetObjectBodyMacro(VTShader, vtkShaderProgram2, prog);
+}
+
+// ----------------------------------------------------------------------------
+void vtkLineIntegralConvolution2D::SetLIC0Shader(vtkShaderProgram2 * prog)
+{
+  vtkSetObjectBodyMacro(LIC0Shader, vtkShaderProgram2, prog);
+}
+
+// ----------------------------------------------------------------------------
+void vtkLineIntegralConvolution2D::SetLICIShader(vtkShaderProgram2 * prog)
+{
+  vtkSetObjectBodyMacro(LICIShader, vtkShaderProgram2, prog);
+}
+
+// ----------------------------------------------------------------------------
+void vtkLineIntegralConvolution2D::SetLICNShader(vtkShaderProgram2 * prog)
+{
+  vtkSetObjectBodyMacro(LICNShader, vtkShaderProgram2, prog);
+}
+
+// ----------------------------------------------------------------------------
+void vtkLineIntegralConvolution2D::SetEEShader(vtkShaderProgram2 * prog)
+{
+  vtkSetObjectBodyMacro(EEShader, vtkShaderProgram2, prog);
+}
+// ----------------------------------------------------------------------------
+void vtkLineIntegralConvolution2D::SetCEShader(vtkShaderProgram2 * prog)
+{
+  vtkSetObjectBodyMacro(CEShader, vtkShaderProgram2, prog);
+}
+
+// ----------------------------------------------------------------------------
+void vtkLineIntegralConvolution2D::SetAAHShader(vtkShaderProgram2 * prog)
+{
+  vtkSetObjectBodyMacro(AAHShader, vtkShaderProgram2, prog);
+}
+
+// ----------------------------------------------------------------------------
+void vtkLineIntegralConvolution2D::SetAAVShader(vtkShaderProgram2 * prog)
+{
+  vtkSetObjectBodyMacro(AAVShader, vtkShaderProgram2, prog);
+}
+
+// ----------------------------------------------------------------------------
+void vtkLineIntegralConvolution2D::BuildShaders()
+{
+  // normalized image space transform shader
+  vtkShaderProgram2 *prog = vtkShaderProgram2::New();
+  prog->SetContext(this->Context);
+
+  string selectCompsSrc
+   = GetComponentSelectionProgram(this->ComponentIds);
+
+  vtkShader2 *selectComps = vtkShader2::New();
+  selectComps->SetContext(this->Context);
+  selectComps->SetType(VTK_SHADER_TYPE_FRAGMENT);
+  selectComps->SetSourceCode(selectCompsSrc.c_str());
+  prog->GetShaders()->AddItem(selectComps);
+  selectComps->Delete();
+
+  vtkShader2 *glslVT = vtkShader2::New();
+  glslVT->SetContext(this->Context);
+  glslVT->SetType(VTK_SHADER_TYPE_FRAGMENT);
+  glslVT->SetSourceCode(vtkLineIntegralConvolution2D_VT);
+  prog->GetShaders()->AddItem(glslVT);
+  glslVT->Delete();
+
+  prog->Build();
+  if (prog->GetLastBuildStatus() != VTK_SHADER_PROGRAM2_LINK_SUCCEEDED)
+    {
+    vtkErrorMacro("falied to build the VT fragment shader");
+    }
+  else
+    {
+    this->SetVTShader(prog);
+    }
+  prog->Delete();
+
+  // LIC0 shader
+  prog = vtkShaderProgram2::New();
+  prog->SetContext(this->Context);
+
+  vtkShader2 *glslLIC0 = vtkShader2::New();
+  glslLIC0->SetContext(this->Context);
+  glslLIC0->SetType(VTK_SHADER_TYPE_FRAGMENT);
+  glslLIC0->SetSourceCode(vtkLineIntegralConvolution2D_LIC0);
+  prog->GetShaders()->AddItem(glslLIC0);
+  glslLIC0->Delete();
+
+  prog->Build();
+  if (prog->GetLastBuildStatus() != VTK_SHADER_PROGRAM2_LINK_SUCCEEDED)
+    {
+    vtkErrorMacro("falied to build the LIC0 fragment shader");
+    }
+  else
+    {
+    this->SetLIC0Shader(prog);
+    }
+  prog->Delete();
+
+  // LICI shader
+  prog = vtkShaderProgram2::New();
+  prog->SetContext(this->Context);
+
+  vtkShader2 *getVectors = vtkShader2::New();
+  getVectors->SetContext(this->Context);
+  getVectors->SetType(VTK_SHADER_TYPE_FRAGMENT);
+  getVectors->SetSourceCode(GetVectorLookupProgram(this->NormalizeVectors));
+  prog->GetShaders()->AddItem(getVectors);
+  getVectors->Delete();
+
+  vtkShader2 *glslLICI = vtkShader2::New();
+  glslLICI->SetContext(this->Context);
+  glslLICI->SetType(VTK_SHADER_TYPE_FRAGMENT);
+  glslLICI->SetSourceCode(vtkLineIntegralConvolution2D_LICI);
+  prog->GetShaders()->AddItem(glslLICI);
+  glslLICI->Delete();
+
+  prog->Build();
+  if (prog->GetLastBuildStatus() != VTK_SHADER_PROGRAM2_LINK_SUCCEEDED)
+    {
+    vtkErrorMacro("falied to build the LICI fragment shader");
+    }
+  else
+    {
+    this->SetLICIShader(prog);
+    }
+  prog->Delete();
+
+  // LICN shader
+  prog = vtkShaderProgram2::New();
+  prog->SetContext(this->Context);
+
+  vtkShader2 *glslLICN = vtkShader2::New();
+  glslLICN->SetContext(this->Context);
+  glslLICN->SetType(VTK_SHADER_TYPE_FRAGMENT);
+  glslLICN->SetSourceCode(vtkLineIntegralConvolution2D_LICN);
+  prog->GetShaders()->AddItem(glslLICN);
+  glslLICN->Delete();
+
+  prog->Build();
+  if (prog->GetLastBuildStatus() != VTK_SHADER_PROGRAM2_LINK_SUCCEEDED)
+    {
+    vtkErrorMacro("falied to build the LICN fragment shader");
+    }
+  else
+    {
+    this->SetLICNShader(prog);
+    }
+  prog->Delete();
+
+  // Edge Enhancement(EE) shader
+  prog = vtkShaderProgram2::New();
+  prog->SetContext(this->Context);
+
+  vtkShader2 *glslEE = vtkShader2::New();
+  glslEE->SetContext(this->Context);
+  glslEE->SetType(VTK_SHADER_TYPE_FRAGMENT);
+  glslEE->SetSourceCode(vtkLineIntegralConvolution2D_EE);
+  prog->GetShaders()->AddItem(glslEE);
+  glslEE->Delete();
+
+  prog->Build();
+  if (prog->GetLastBuildStatus() != VTK_SHADER_PROGRAM2_LINK_SUCCEEDED)
+    {
+    vtkErrorMacro("falied to build the EE fragment shader");
+    }
+  else
+    {
+    this->SetEEShader(prog);
+    }
+  prog->Delete();
+
+  // Contrast Enhancement(CE) shader
+  prog = vtkShaderProgram2::New();
+  prog->SetContext(this->Context);
+
+  vtkShader2 *glslCE = vtkShader2::New();
+  glslCE->SetContext(this->Context);
+  glslCE->SetType(VTK_SHADER_TYPE_FRAGMENT);
+  glslCE->SetSourceCode(vtkLineIntegralConvolution2D_CE);
+  prog->GetShaders()->AddItem(glslCE);
+  glslCE->Delete();
+
+  prog->Build();
+  if (prog->GetLastBuildStatus() != VTK_SHADER_PROGRAM2_LINK_SUCCEEDED)
+    {
+    vtkErrorMacro("falied to build the CE fragment shader");
+    }
+  else
+    {
+    this->SetCEShader(prog);
+    }
+  prog->Delete();
+
+  // Anti-Alias(AA) shader
+  prog = vtkShaderProgram2::New();
+  prog->SetContext(this->Context);
+
+  vtkShader2 *glslAAH = vtkShader2::New();
+  glslAAH->SetContext(this->Context);
+  glslAAH->SetType(VTK_SHADER_TYPE_FRAGMENT);
+  glslAAH->SetSourceCode(vtkLineIntegralConvolution2D_AAH);
+  prog->GetShaders()->AddItem(glslAAH);
+  glslAAH->Delete();
+
+  prog->Build();
+  if (prog->GetLastBuildStatus() != VTK_SHADER_PROGRAM2_LINK_SUCCEEDED)
+    {
+    vtkErrorMacro("falied to build the AAH fragment shader");
+    }
+  else
+    {
+    this->SetAAHShader(prog);
+    }
+  prog->Delete();
+
+  prog = vtkShaderProgram2::New();
+  prog->SetContext(this->Context);
+
+  vtkShader2 *glslAAV = vtkShader2::New();
+  glslAAV->SetContext(this->Context);
+  glslAAV->SetType(VTK_SHADER_TYPE_FRAGMENT);
+  glslAAV->SetSourceCode(vtkLineIntegralConvolution2D_AAV);
+  prog->GetShaders()->AddItem(glslAAV);
+  glslAAV->Delete();
+
+  prog->Build();
+  if (prog->GetLastBuildStatus() != VTK_SHADER_PROGRAM2_LINK_SUCCEEDED)
+    {
+    vtkErrorMacro("falied to build the AAV fragment shader");
+    }
+  else
+    {
+    this->SetAAVShader(prog);
+    }
+  prog->Delete();
+}
+
+// ----------------------------------------------------------------------------
+vtkTextureObject *vtkLineIntegralConvolution2D::Execute(
+        vtkTextureObject *vectorTex,
+        vtkTextureObject *noiseTex)
+{
+  // execute over the entire vector field, no guard pixels are present
+  // parallel results will be incorrect.
+
+  vtkPixelExtent vectorTexExtent(
+        vectorTex->GetWidth(),
+        vectorTex->GetHeight());
+
+  return this->Execute(
+        vectorTexExtent.GetData(),
+        vectorTex,
+        noiseTex);
+}
+
+// ----------------------------------------------------------------------------
+vtkTextureObject *vtkLineIntegralConvolution2D::Execute(
+      const int ext[4],
+      vtkTextureObject *vectorTex,
+      vtkTextureObject *noiseTex)
+{
+  // execute over a subset of the input texture, no guard pixels are present
+  // composite data and parallel results will be incorrect.
+
+  this->SetVectorTexParameters(vectorTex);
+  this->SetNoiseTexParameters(noiseTex);
+
+  vtkPixelExtent vectorTexExtent(vectorTex->GetWidth(), vectorTex->GetHeight());
+  vtkPixelExtent vectorExtent(ext);
+  vtkPixelExtent licExtent(ext);
+  vtkPixelExtent outputTexExtent(ext);
+  vtkPixelExtent outputExtent(ext);
+
+  deque<vtkPixelExtent> vectorExtents;
+  vectorExtents.push_back(vectorExtent);
+
+  deque<vtkPixelExtent> licExtents;
+  licExtents.push_back(licExtent);
+
+  unsigned int licSize[2];
+  licExtent.Size(licSize);
+
+  return this->Execute(
+        vectorTexExtent,
+        vectorExtents,
+        licExtents,
+        vectorTex,
+        NULL,
+        noiseTex);
+}
+
+// ----------------------------------------------------------------------------
+vtkTextureObject *vtkLineIntegralConvolution2D::Execute(
+      const vtkPixelExtent &inputTexExtent,              // screen space extent of the input texture
+      const deque<vtkPixelExtent> &vectorExtents,        // disjoint set describing vector extents
+      const deque<vtkPixelExtent> &licExtents,           // disjoint set describing desired lic extents
+      vtkTextureObject *vectorTex,
+      vtkTextureObject *maskVectorTex,
+      vtkTextureObject *noiseTex)
+{
+  // validate inputs, internal state, etc...
+  if (!this->Context)
+    {
+    vtkErrorMacro("invalid this->Context");
+    return NULL;
+    }
+  if (this->NumberOfSteps < 0)
+    {
+    vtkErrorMacro("Number of integration steps should be positive.");
+    return NULL;
+    }
+  if (this->StepSize < 0.0)
+    {
+    vtkErrorMacro("Streamline integration step size should be positive.");
+    return NULL;
+    }
+  if (vectorTex->GetComponents() < 2)
+    {
+    vtkErrorMacro("VectorField must have at least 2 components.");
+    return NULL;
+    }
+
+  #if defined(vtkLineIntegralConvolution2DTIME) && !defined(vtkSurfaceLICPainterTIME)
+  this->StartTimerEvent("vtkLineIntegralConvolution::Execute");
+  #elif defined(USE_VTK_TIMER)
+  vtkSmartPointer<vtkTimerLog> timer = vtkSmartPointer<vtkTimerLog>::New();
+  timer->StartTimer();
+  #endif
+
+  // initialize shaders
+  if (this->ShadersNeedBuild)
+    {
+    this->BuildShaders();
+    this->ShadersNeedBuild = 0;
+    }
+
+  // input
+  unsigned int inputTexSize[2];
+  inputTexExtent.Size(inputTexSize);
+
+  float dx = 1.0f/((float)inputTexSize[0]);
+  float dy = 1.0f/((float)inputTexSize[1]);
+
+  // texture coordinates and bounds for compute regions
+  unsigned int computeTexSize[2];
+  inputTexExtent.Size(computeTexSize);
+
+  // at slight expense to the serial case
+  // compute LIC,EE, and AA over the entire vector
+  // extents (this is why it's critical that they
+  // are disjoint)this allows us to forgo expensive
+  // halo exchanges when running in parallel.
+  size_t nComputeExtents1 = vectorExtents.size();
+  const deque<vtkPixelExtent> &computeExtents1 = vectorExtents;
+
+  size_t nbds = 4*nComputeExtents1;
+  vector<float> computeBounds1(nbds, 0.0f);
+  for (size_t i=0; i<nComputeExtents1; ++i)
+    {
+    const vtkPixelExtent &computeExtent1 = computeExtents1[i];
+    float *bound = &computeBounds1[4*i];
+    bound[0] = ((float)computeExtent1[0])/((float)inputTexSize[0]);
+    bound[1] = ((float)computeExtent1[1]+1.0f)/((float)inputTexSize[0]);
+    bound[2] = ((float)computeExtent1[2])/((float)inputTexSize[1]);
+    bound[3] = ((float)computeExtent1[3]+1.0f)/((float)inputTexSize[1]);
+    }
+
+  // for CE only compute on valid extents
+  // because there is bleeding at the extent
+  // edges that will result in correct scaling
+  // if it's used.
+  size_t nComputeExtents2 = licExtents.size();
+  const deque<vtkPixelExtent> &computeExtents2 = licExtents;
+
+  nbds = 4*nComputeExtents2;
+  vector<float> computeBounds2(nbds, 0.0f);
+
+  for (size_t i=0; i<nComputeExtents2; ++i)
+    {
+    const vtkPixelExtent &computeExtent2 = computeExtents2[i];
+    float *bound = &computeBounds2[4*i];
+    bound[0] = ((float)computeExtent2[0])/((float)inputTexSize[0]);
+    bound[1] = ((float)computeExtent2[1]+1.0f)/((float)inputTexSize[0]);
+    bound[2] = ((float)computeExtent2[2])/((float)inputTexSize[1]);
+    bound[3] = ((float)computeExtent2[3]+1.0f)/((float)inputTexSize[1]);
+    }
+
+  // during integration texture coordinates for
+  // noise lookup is computed using the vector
+  // texture coordinate this ensures that on any
+  // rank we get the same noise value
+  unsigned int noiseTexSize[2] = {
+        noiseTex->GetWidth(),
+        noiseTex->GetHeight()
+        };
+
+  vtkPixelExtent noiseExtent(noiseTexSize[0], noiseTexSize[1]);
+
+  float noiseBoundsPt1[2];
+  noiseBoundsPt1[0] = ((float)noiseTexSize[0]+1.0f)/((float)inputTexSize[0]);
+  noiseBoundsPt1[1] = ((float)noiseTexSize[1]+1.0f)/((float)inputTexSize[1]);
+
+  // bind our fbo
+  this->FBO->SaveCurrentBindings();
+  this->FBO->Bind(vtkgl::FRAMEBUFFER_EXT);
+  this->FBO->InitializeViewport(computeTexSize[0], computeTexSize[1]);
+
+  // initialize the buffer mananger. Textures are assigned
+  // and bound to individual units. These textures and units
+  // are active and bound for the remainder of this execution.
+  vtkLICPingPongBufferManager bufs(
+        this->FBO,
+        computeTexSize,
+        vectorTex,
+        maskVectorTex,
+        noiseTex,
+        this->EnhancedLIC,
+        this->TransformVectors);
+
+  #if  vtkLineIntegralConvolution2DDEBUG >= 1
+  int rank = this->GetCommunicator()->GetRank();
+  #endif
+  #if vtkLineIntegralConvolution2DDEBUG >= 3
+  bufs.WriteInputs(rank, vectorExtents);
+  #endif
+
+  if (this->TransformVectors)
+    {
+    // ------------------------------------------- begin normalized image space transform
+    #if defined(vtkLineIntegralConvolution2DTIME)
+    this->StartTimerEvent("vtkLineIntegralConvolution::TransformVectors");
+    #endif
+
+    this->VTShader->UseProgram();
+    this->VTShader->SetUniformi("texVectors", bufs.GetVectorTextureUnit());
+    this->VTShader->SetUniform2ft("uTexSize", inputTexSize);
+
+    bufs.AttachImageVectorBuffer();
+    // essential to initialize the entire buffer
+    glClearColor(0.0f, 0.0f, 0.0f, 0.0f);
+    glClear(GL_COLOR_BUFFER_BIT);
+    size_t nVectorExtents = vectorExtents.size();
+    for (size_t q=0; q<nVectorExtents; ++q)
+      {
+      bufs.RenderQuad(&computeBounds1[4*q], computeExtents1[q]);
+      }
+    bufs.DettachImageVectorBuffer();
+
+    this->VTShader->UnuseProgram();
+
+    #if (vtkLineIntegralConvolution2DDEBUG >= 2)
+    bufs.WriteImageVectorBuffer(rank, vectorExtents);
+    #endif
+
+    #if defined(vtkLineIntegralConvolution2DTIME)
+    this->EndTimerEvent("vtkLineIntegralConvolution::TransformVectors");
+    #endif
+    // ------------------------------------------- end normalized image space transform
+    }
+
+  // --------------------------------------------- begin first-pass LIC
+  #if defined(vtkLineIntegralConvolution2DTIME)
+  this->StartTimerEvent("vtkLineIntegralConvolution::Integrate1");
+  #endif
+
+  //
+  // initialize convolution and seeds
+  //
+  bufs.ClearBuffers(this->FBO, inputTexExtent, vectorExtents, this->EnhancedLIC);
+  bufs.AttachVectorTextures();
+  bufs.AttachNoiseTexture(0);
+
+  this->LIC0Shader->UseProgram();
+  this->LIC0Shader->SetUniformi("uStepNo", 0);
+  this->LIC0Shader->SetUniformi("uPassNo", 0);
+  this->LIC0Shader->SetUniformf("uMaskThreshold", this->MaskThreshold);
+  this->LIC0Shader->SetUniform2f("uNoiseBoundsPt1", noiseBoundsPt1);
+  this->LIC0Shader->SetUniformi("texMaskVectors", bufs.GetMaskVectorTextureUnit());
+  this->LIC0Shader->SetUniformi("texNoise", bufs.GetNoiseTextureUnit());
+  this->LIC0Shader->SetUniformi("texLIC", bufs.GetLICTextureUnit());
+
+  bufs.AttachLICBuffers();
+  for (size_t q=0; q<nComputeExtents1; ++q)
+    {
+    bufs.RenderQuad(&computeBounds1[4*q], computeExtents1[q]);
+    }
+  bufs.DettachLICBuffers();
+  bufs.Swap();
+
+  this->LIC0Shader->UnuseProgram();
+
+  #if (vtkLineIntegralConvolution2DDEBUG >= 2)
+  bufs.WriteBuffers(rank,"lic2d_lic0b_a.vtm","lic2d_lic0b_s.vtm",computeExtents1);
+  #endif
+
+  //
+  // backward LIC
+  //
+  this->LICIShader->UseProgram();
+  this->LICIShader->SetUniformi("uPassNo", 0);
+  this->LICIShader->SetUniformf("uStepSize", -this->StepSize);
+  this->LICIShader->SetUniform2f("uNoiseBoundsPt1", noiseBoundsPt1);
+  this->LICIShader->SetUniformi("texVectors", bufs.GetVectorTextureUnit());
+  this->LICIShader->SetUniformi("texNoise", bufs.GetNoiseTextureUnit());
+  this->LICIShader->SetUniformi("texLIC", bufs.GetLICTextureUnit());
+  this->LICIShader->SetUniformi("texSeedPts", bufs.GetSeedTextureUnit());
+
+  int stepNum = 0;
+  for (int stepIdx=0; stepIdx<this->NumberOfSteps; ++stepIdx, ++stepNum)
+    {
+    bufs.AttachLICBuffers();
+    for (size_t q=0; q<nComputeExtents1; ++q)
+      {
+      bufs.RenderQuad(&computeBounds1[4*q], computeExtents1[q]);
+      }
+    bufs.DettachLICBuffers();
+    bufs.Swap();
+    }
+  this->LICIShader->UnuseProgram();
+
+  #if (vtkLineIntegralConvolution2DDEBUG >= 2)
+  bufs.WriteBuffers(rank,"lic2d_licib_a.vtm", "lic2d_licib_s.vtm", computeExtents1);
+  #endif
+
+  //
+  // initialize seeds
+  //
+  this->LIC0Shader->UseProgram();
+  this->LIC0Shader->SetUniformi("uStepNo", 1);
+
+  bufs.AttachLICBuffers();
+  for (size_t q=0; q<nComputeExtents1; ++q)
+    {
+    bufs.RenderQuad(&computeBounds1[4*q], computeExtents1[q]);
+    }
+  bufs.DettachLICBuffers();
+  bufs.Swap();
+
+  this->LIC0Shader->UnuseProgram();
+
+  #if (vtkLineIntegralConvolution2DDEBUG >= 2)
+  bufs.WriteBuffers(rank,"lic2d_lic0f_a.vtm", "lic2d_lic0f_s.vtm", computeExtents1);
+  #endif
+
+  //
+  // forward LIC
+  //
+  this->LICIShader->UseProgram();
+  this->LICIShader->SetUniformf("uStepSize", this->StepSize);
+
+  for (int stepIdx=0; stepIdx<this->NumberOfSteps; ++stepIdx, ++stepNum)
+    {
+    bufs.AttachLICBuffers();
+    for (size_t q=0; q<nComputeExtents1; ++q)
+      {
+      bufs.RenderQuad(&computeBounds1[4*q], computeExtents1[q]);
+      }
+    bufs.DettachLICBuffers();
+    bufs.Swap();
+    }
+
+  this->LICIShader->UnuseProgram();
+
+  #if (vtkLineIntegralConvolution2DDEBUG >= 2)
+  bufs.WriteBuffers(rank,"lic2d_licif_a.vtm", "lic2d_licif_s.vtm", computeExtents1);
+  #endif
+
+  //
+  // finalize LIC
+  //
+  this->LICNShader->UseProgram();
+  this->LICNShader->SetUniformi("texLIC", bufs.GetLICTextureUnit());
+
+  bufs.AttachLICBuffers();
+  for (size_t q=0; q<nComputeExtents1; ++q)
+    {
+    bufs.RenderQuad(&computeBounds1[4*q], computeExtents1[q]);
+    }
+  bufs.DettachBuffers();
+  bufs.Swap();
+
+  this->LICNShader->UnuseProgram();
+
+  #if (vtkLineIntegralConvolution2DDEBUG >= 1)
+  bufs.WriteBuffers(rank,"lic2d_licn_a.vtm", "lic2d_licn_s.vtm", computeExtents1);
+  #endif
+
+  #if defined(vtkLineIntegralConvolution2DTIME)
+  this->EndTimerEvent("vtkLineIntegralConvolution::Integrate1");
+  #endif
+
+  // ----------------------------------------------- end first-pass LIC
+  if (this->EnhancedLIC)
+    {
+    if (this->EnhanceContrast == ENHANCE_CONTRAST_ON)
+      {
+      // ------------------------------------------- begin contrast enhance
+      #if defined(vtkLineIntegralConvolution2DTIME) || defined(vtkSurfaceLICPainterTIME)
+      this->StartTimerEvent("vtkLineIntegralConvolution::ContrastEnhance1");
+      #endif
+
+      vtkPainterCommunicator *comm = this->GetCommunicator();
+
+      // find the min and max only on the valid extents
+      // because there will be bleeding at the edges.
+      float grayMin = VTK_FLOAT_MAX;
+      float grayMax = -VTK_FLOAT_MAX;
+      float grayMaxMinDiff = VTK_FLOAT_MAX;
+      vtkTextureObject *licTex = bufs.GetLastLICBuffer();
+      #ifdef STREAMING_MIN_MAX
+      StreamingFindMinMax(this->FBO, licTex, computeExtents2, grayMin, grayMax);
+      #else
+      FindMinMax(licTex, computeExtents2, grayMin, grayMax);
+      #endif
+
+      if ( computeExtents2.size()
+        && ((grayMax <= grayMin) || (grayMax > 1.0f) || (grayMin < 0.0f)) )
+        {
+        vtkErrorMacro(
+          << comm->GetRank()
+          << " : Invalid color range " << grayMin << ", " << grayMax
+          << ". Normlaization pass skipped");
+        grayMin = 0.0;
+        grayMax = 1.0;
+        }
+
+      // in parallel use a reduction to obtain the image
+      // wide min/max
+      this->GetGlobalMinMax(comm, grayMin, grayMax);
+
+      // its critical to update on the entire extent to
+      //ensure correct values in the guard pixles because
+      // we don't do a halo exchange
+      grayMaxMinDiff = grayMax-grayMin;
+
+      this->CEShader->UseProgram();
+      this->CEShader->SetUniformi("texLIC", bufs.GetLICTextureUnit());
+      this->CEShader->SetUniformf("uMin", grayMin );
+      this->CEShader->SetUniformf("uMaxMinDiff", grayMaxMinDiff);
+
+      bufs.AttachLICBuffers();
+      for (size_t q=0; q<nComputeExtents1; ++q)
+        {
+        bufs.RenderQuad(&computeBounds1[4*q], computeExtents1[q]);
+        }
+      bufs.DettachLICBuffers();
+      bufs.Swap();
+
+      this->CEShader->UnuseProgram();
+
+      #if (vtkLineIntegralConvolution2DDEBUG >= 1)
+      bufs.WriteBuffers(rank,"lic2d_1ce.vtm", NULL, computeExtents1);
+      #endif
+
+      #if defined(vtkLineIntegralConvolution2DTIME) || defined(vtkSurfaceLICPainterTIME)
+      this->EndTimerEvent("vtkLineIntegralConvolution::ContrastEnhance1");
+      #endif
+      // --------------------------------------------- end contrast enhance
+      }
+
+    // --------------------------------------------- begin high-pass filtering
+    #if defined(vtkLineIntegralConvolution2DTIME)
+    this->StartTimerEvent("vtkLineIntegralConvolution::EdgeEnahnce");
+    #endif
+
+    #ifdef INTEL_BUG
+    bufs.AttachEEBuffer();
+    #endif
+
+    this->EEShader->UseProgram();
+    this->EEShader->SetUniformi("texLIC", bufs.GetLICTextureUnit());
+    this->EEShader->SetUniformf("uDx", dx);
+    this->EEShader->SetUniformf("uDy", dy);
+
+    #ifndef INTEL_BUG
+    bufs.AttachEEBuffer();
+    #endif
+    for (size_t q=0; q<nComputeExtents1; ++q)
+      {
+      bufs.RenderQuad(&computeBounds1[4*q], computeExtents1[q]);
+      }
+    bufs.DettachEEBuffer();
+
+    this->EEShader->UnuseProgram();
+
+    #if (vtkLineIntegralConvolution2DDEBUG >= 1)
+    bufs.WriteEEBuffer(rank, computeExtents1);
+    #endif
+
+    #if defined(vtkLineIntegralConvolution2DTIME)
+    this->EndTimerEvent("vtkLineIntegralConvolution::EdgeEnahnce");
+    #endif
+    // --------------------------------------------- end high-pass filtering
+
+    // --------------------------------------------- begin second-pass LIC
+    #if defined(vtkLineIntegralConvolution2DTIME)
+    this->StartTimerEvent("vtkLineIntegralConvolution::Integrate2");
+    #endif
+
+    // in pass 2 lic is comuted by convolving edge-enhanced result of pass 1
+    // rather than noise. This gives the result a nice smooth look, since the
+    // input is fairly smooth fewer steps are needed.
+
+    // clear the buffers
+    bufs.DettachBuffers();
+    bufs.ClearBuffers(this->FBO, inputTexExtent, vectorExtents, /*clearEE=*/0);
+    bufs.AttachVectorTextures();
+    bufs.AttachNoiseTexture(1);
+
+    //
+    // initialize convolution and seeds
+    //
+    this->LIC0Shader->UseProgram();
+    this->LIC0Shader->SetUniformi("uStepNo", 0);
+    this->LIC0Shader->SetUniformi("uPassNo", 1);
+
+    bufs.AttachLICBuffers();
+    for (size_t q=0; q<nComputeExtents1; ++q)
+      {
+      bufs.RenderQuad(&computeBounds1[4*q], computeExtents1[q]);
+      }
+    bufs.DettachLICBuffers();
+    bufs.Swap();
+
+    this->LIC0Shader->UnuseProgram();
+
+    #if (vtkLineIntegralConvolution2DDEBUG >= 2)
+    bufs.WriteBuffers(rank,"lic2d_elic0b_a.vtm", "lic2d_elic0b_s.vtm", computeExtents1);
+    #endif
+
+    //
+    // backward LIC
+    //
+    this->LICIShader->UseProgram();
+    this->LICIShader->SetUniformi("uPassNo", 1);
+    this->LICIShader->SetUniformf("uStepSize", -this->StepSize);
+
+    int nSteps = this->NumberOfSteps/2;
+    stepNum = 0;
+    for (int stepIdx=0; stepIdx<nSteps; ++stepIdx, ++stepNum)
+      {
+      bufs.AttachLICBuffers();
+      for (size_t q=0; q<nComputeExtents1; ++q)
+        {
+        bufs.RenderQuad(&computeBounds1[4*q], computeExtents1[q]);
+        }
+      bufs.DettachLICBuffers();
+      bufs.Swap();
+      }
+
+    this->LICIShader->UnuseProgram();
+
+    #if (vtkLineIntegralConvolution2DDEBUG >=2 )
+    bufs.WriteBuffers(rank,"lic2d_elicib_a.vtm", "lic2d_elicib_s.vtm",computeExtents1);
+    #endif
+
+    //
+    // initialize seeds
+    //
+    this->LIC0Shader->UseProgram();
+    this->LIC0Shader->SetUniformi("uStepNo", 1);
+
+    bufs.AttachLICBuffers();
+    for (size_t q=0; q<nComputeExtents1; ++q)
+      {
+      bufs.RenderQuad(&computeBounds1[4*q], computeExtents1[q]);
+      }
+    bufs.DettachLICBuffers();
+    bufs.Swap();
+
+    this->LIC0Shader->UnuseProgram();
+
+    #if (vtkLineIntegralConvolution2DDEBUG >= 2)
+    bufs.WriteBuffers(rank,"lic2d_elic0f_a.vtm", "lic2d_elic0f_s.vtm",computeExtents1);
+    #endif
+
+    //
+    // forward LIC
+    //
+    this->LICIShader->UseProgram();
+    this->LICIShader->SetUniformf("uStepSize", this->StepSize);
+
+    for (int stepIdx=0; stepIdx<nSteps; ++stepIdx, ++stepNum)
+      {
+      bufs.AttachLICBuffers();
+      for (size_t q=0; q<nComputeExtents1; ++q)
+        {
+        bufs.RenderQuad(&computeBounds1[4*q], computeExtents1[q]);
+        }
+      bufs.DettachLICBuffers();
+      bufs.Swap();
+      }
+
+    this->LICIShader->UnuseProgram();
+
+    #if (vtkLineIntegralConvolution2DDEBUG >= 2)
+    bufs.WriteBuffers(rank,"lic2d_elicif_a.vtm", "lic2d_elicif_s.vtm",computeExtents1);
+    #endif
+
+    //
+    // finalize LIC
+    //
+    this->LICNShader->UseProgram();
+    this->LICNShader->SetUniformi("texLIC", bufs.GetLICTextureUnit());
+
+    bufs.AttachLICBuffers();
+    for (size_t q=0; q<nComputeExtents1; ++q)
+      {
+      bufs.RenderQuad(&computeBounds1[4*q], computeExtents1[q]);
+      }
+    bufs.DettachLICBuffers();
+    bufs.Swap();
+
+    this->LICNShader->UnuseProgram();
+
+    #if (vtkLineIntegralConvolution2DDEBUG >= 1)
+    bufs.WriteBuffers(rank,"lic2d_elicn_a.vtm", "lic2d_elicn_s.vtm",computeExtents1);
+    #endif
+
+    #if defined(vtkLineIntegralConvolution2DTIME)
+    this->EndTimerEvent("vtkLineIntegralConvolution::Integrate2");
+    #endif
+    // --------------------------------------------- end second-pass LIC
+    }
+
+  if (this->AntiAlias)
+    {
+    // --------------------------------------------- begin anti-alias
+
+    #if defined(vtkLineIntegralConvolution2DTIME)
+    this->StartTimerEvent("vtkLineIntegralConvolution::AntiAlias");
+    #endif
+
+    this->AAHShader->UseProgram();
+    this->AAHShader->SetUniformi("texLIC", bufs.GetLICTextureUnit());
+    this->AAHShader->SetUniformf("uDx", dx);
+    this->AAHShader->UnuseProgram();
+
+    this->AAVShader->UseProgram();
+    this->AAVShader->SetUniformi("texLIC", bufs.GetLICTextureUnit());
+    this->AAVShader->SetUniformf("uDy", dy);
+    this->AAVShader->UnuseProgram();
+
+    // it's ok to compute over just the valid extent
+    // we don't care here if guard pixels are smoothed
+    // however computing over the entire extent avoids
+    // bleeding at the edges when multiple passes are
+    // requested.
+    for (int i=0; i<this->AntiAlias; ++i)
+      {
+      // horizontal pass
+      this->AAHShader->UseProgram();
+      bufs.AttachLICBuffers();
+      for (size_t q=0; q<nComputeExtents1; ++q)
+        {
+        bufs.RenderQuad(&computeBounds1[4*q], computeExtents1[q]);
+        }
+      bufs.DettachLICBuffers();
+      bufs.Swap();
+      this->AAHShader->UnuseProgram();
+
+      // vertical pass
+      this->AAVShader->UseProgram();
+      bufs.AttachLICBuffers();
+      for (size_t q=0; q<nComputeExtents1; ++q)
+        {
+        bufs.RenderQuad(&computeBounds1[4*q], computeExtents1[q]);
+        }
+      bufs.DettachLICBuffers();
+      bufs.Swap();
+      this->AAVShader->UnuseProgram();
+      }
+
+    #if (vtkLineIntegralConvolution2DDEBUG >= 1)
+    bufs.WriteBuffers(rank,"lic2d_aa.vtm", NULL, computeExtents1);
+    #endif
+
+    #if defined(vtkLineIntegralConvolution2DTIME)
+    this->EndTimerEvent("vtkLineIntegralConvolution::AntiAlias");
+    #endif
+    // --------------------------------------------- end anti-alias
+    }
+
+  if (this->EnhanceContrast)
+    {
+    // ------------------------------------------- begin contrast enhance
+    #if defined(vtkLineIntegralConvolution2DTIME) || defined(vtkSurfaceLICPainterTIME)
+    this->StartTimerEvent("vtkLineIntegralConvolution::ContrastEnhance2");
+    #endif
+
+    vtkPainterCommunicator *comm = this->GetCommunicator();
+
+    // the final contrast enhancement should
+    // be applied only to the valid extents
+    float grayMin = VTK_FLOAT_MAX;
+    float grayMax = -VTK_FLOAT_MAX;
+    float grayMaxMinDiff = 1.0f;
+
+    vtkTextureObject *licTex = bufs.GetLastLICBuffer();
+    #ifdef STREAMING_MIN_MAX
+    StreamingFindMinMax(this->FBO, licTex, computeExtents2, grayMin, grayMax);
+    #else
+    FindMinMax(licTex, computeExtents2, grayMin, grayMax);
+    #endif
+
+    if ( computeExtents2.size()
+      && ((grayMax <= grayMin) || (grayMax > 1.0f) || (grayMin < 0.0f)) )
+      {
+      vtkErrorMacro(
+        << comm->GetRank()
+        << " : Invalid intensity range " << grayMin << ", " << grayMax
+        << "for contrast ehancement");
+      grayMin = 0.0;
+      grayMax = 1.0;
+      }
+
+    // in parallel use a reduction to obtain the image
+    // wide min/max
+    this->GetGlobalMinMax(comm, grayMin, grayMax);
+
+    // select M and m as a fraction of the range.
+    grayMaxMinDiff = grayMax-grayMin;
+    grayMin += grayMaxMinDiff*this->LowContrastEnhancementFactor;
+    grayMax -= grayMaxMinDiff*this->HighContrastEnhancementFactor;
+    grayMaxMinDiff = grayMax-grayMin;
+
+    this->CEShader->UseProgram();
+    this->CEShader->SetUniformi("texLIC", bufs.GetLICTextureUnit());
+    this->CEShader->SetUniformf("uMin", grayMin );
+    this->CEShader->SetUniformf("uMaxMinDiff", grayMaxMinDiff);
+
+    bufs.AttachLICBuffers();
+    for (size_t q=0; q<nComputeExtents2; ++q)
+      {
+      bufs.RenderQuad(&computeBounds2[4*q], computeExtents2[q]);
+      }
+    bufs.DettachLICBuffers();
+    bufs.Swap();
+
+    this->CEShader->UnuseProgram();
+
+    #if (vtkLineIntegralConvolution2DDEBUG >= 1)
+    bufs.WriteBuffers(rank,"lic2d_2ce.vtm", NULL, computeExtents2);
+    #endif
+
+    #if defined(vtkLineIntegralConvolution2DTIME) || defined(vtkSurfaceLICPainterTIME)
+    this->EndTimerEvent("vtkLineIntegralConvolution::ContrastEnhance2");
+    #endif
+
+    // --------------------------------------------- end contrast enahnce
+    }
+
+  bufs.DettachBuffers();
+  this->FBO->UnBind(vtkgl::FRAMEBUFFER_EXT);
+
+  vtkTextureObject *outputTex = bufs.GetLastLICBuffer();
+  outputTex->Register(0);
+
+  #if defined(vtkLineIntegralConvolution2DTIME) && !defined(vtkSurfaceLICPainterTIME)
+  this->EndTimerEvent("vtkLineIntegralConvolution::Execute");
+  #elif defined(USE_VTK_TIMER)
+  timer->StopTimer();
+  #endif
+
+  return outputTex;
+}
+
+//-----------------------------------------------------------------------------
+void vtkLineIntegralConvolution2D::PrintSelf(ostream & os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+  os
+    << indent << "Comm=" << this->Comm << endl
+    << indent << "Context=" << this->Context << endl
+    << indent << "FBO=" << this->FBO << endl
+    << indent << "ShadersNeedBuild=" << this->ShadersNeedBuild << endl
+    << indent << "VTShader=" << this->VTShader << endl
+    << indent << "LIC0Shader=" << this->LIC0Shader << endl
+    << indent << "LICIShader=" << this->LICIShader << endl
+    << indent << "LICNShader=" << this->LICNShader << endl
+    << indent << "EEShader=" << this->EEShader << endl
+    << indent << "CEShader=" << this->CEShader << endl
+    << indent << "AAHShader=" << this->AAHShader << endl
+    << indent << "AAVShader=" << this->AAVShader << endl
+    << indent << "NumberOfSteps=" << this->NumberOfSteps << endl
+    << indent << "StepSize=" << this->StepSize << endl
+    << indent << "EnhancedLIC=" << this->EnhancedLIC << endl
+    << indent << "EnhanceContrast=" << this->EnhanceContrast << endl
+    << indent << "LowContrastEnhancementFactor=" << this->LowContrastEnhancementFactor << endl
+    << indent << "HighContrastEnhancementFactor=" << this->HighContrastEnhancementFactor << endl
+    << indent << "AntiAlias=" << this->AntiAlias << endl
+    << indent << "MaskThreshold=" << this->MaskThreshold << endl
+    << indent << "TransformVectors=" << this->TransformVectors << endl
+    << indent << "NormalizeVectors=" << this->NormalizeVectors << endl
+    << indent << "ComponentIds=" << this->ComponentIds[0] << ", " << this->ComponentIds[1] << endl;
+}
diff --git a/Rendering/LIC/vtkLineIntegralConvolution2D.h b/Rendering/LIC/vtkLineIntegralConvolution2D.h
new file mode 100644
index 0000000..1c2e010
--- /dev/null
+++ b/Rendering/LIC/vtkLineIntegralConvolution2D.h
@@ -0,0 +1,396 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkLineIntegralConvolution2D.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkLineIntegralConvolution2D - GPU-based implementation of Line
+//  Integral Convolution (LIC)
+//
+// .SECTION Description
+//  This class resorts to GLSL to implement GPU-based Line Integral Convolution
+//  (LIC) for visualizing a 2D vector field that may be obtained by projecting
+//  an original 3D vector field onto a surface (such that the resulting 2D
+//  vector at each grid point on the surface is tangential to the local normal,
+//  as done in vtkSurfaceLICPainter).
+//
+//  As an image-based technique, 2D LIC works by (1) integrating a bidirectional
+//  streamline from the center of each pixel (of the LIC output image), (2)
+//  locating the pixels along / hit by this streamline as the correlated pixels
+//  of the starting pixel (seed point / pixel), (3) indexing a (usually white)
+//  noise texture (another input to LIC, in addition to the 2D vector field,
+//  usually with the same size as that of the 2D vetor field) to determine the
+//  values (colors) of these pixels (the starting and the correlated pixels),
+//  typically through bi-linear interpolation, and (4) performing convolution
+//  (weighted averaging) on these values, by adopting a low-pass filter (such
+//  as box, ramp, and Hanning kernels), to obtain the result value (color) that
+//  is then assigned to the seed pixel.
+//
+//  The GLSL-based GPU implementation herein maps the aforementioned pipeline to
+//  fragment shaders and a box kernel is employed. Both the white noise and the
+//  vector field are provided to the GPU as texture objects (supported by the
+//  multi-texturing capability). In addition, there are four texture objects
+//  (color buffers) allocated to constitute two pairs that work in a ping-pong
+//  fashion, with one as the read buffers and the other as the write / render
+//  targets. Maintained by a frame buffer object (GL_EXT_framebuffer_object),
+//  each pair employs one buffer to store the current (dynamically updated)
+//  position (by means of the texture coordinate that keeps being warped by the
+//  underlying vector) of the (virtual) particle initially released from each
+//  fragment while using the bother buffer to store the current (dynamically
+//  updated too) accumulated texture value that each seed fragment (before the
+//  'mesh' is warped) collects. Given NumberOfSteps integration steps in each
+//  direction, there are a total of (2 * NumberOfSteps + 1) fragments (including
+//  the seed fragment) are convolved and each contributes 1 / (2 * NumberOfSteps
+//  + 1) of the associated texture value to fulfill the box filter.
+//
+//  One pass of LIC (basic LIC) tends to produce low-contrast / blurred images and
+//  vtkLineIntegralConvolution2D provides an option for creating enhanced LIC
+//  images. Enhanced LIC improves image quality by increasing inter-streamline
+//  contrast while suppressing artifacts. It performs two passes of LIC, with a
+//  3x3 Laplacian high-pass filter in between that processes the output of pass
+//  #1 LIC and forwards the result as the input 'noise' to pass #2 LIC.
+//
+//  vtkLineIntegralConvolution2D applies masking to zero-vector fragments so
+//  that un-filtered white noise areas are made totally transparent by class
+//  vtkSurfaceLICPainter to show the underlying geometry surface.
+//
+//  The convolution process tends to decrease both contrast and dynamic range,
+//  sometimes leading to dull dark images. In order to counteract this, optional
+//  contrast ehnancement stages have been added. These increase the dynamic range and
+//  contrast and sharpen streaking patterns that emerge from the LIC process.
+//
+//  Under some circumstances, typically depending on the contrast and dynamic
+//  range and graininess of the noise texture, jagged or pixelated patterns emerge
+//  in the LIC. These can be reduced by enabling the optional anti-aliasing pass.
+//
+//  The internal pipeline is as follows, with optional stages denoted by ()
+//  nested optional stages depend on their parent stage.
+//  <pre>
+//   noise texture
+//           |
+//           [ LIC ((CE) HPF LIC) (AA) (CE) ]
+//           |                              |
+//  vector field                       LIC'd image
+// </pre>
+//  where LIC is the LIC stage, HPF is the high-pass filter stage, CE is the
+//  contrast ehnacement stage, and AA is the antialias stage.
+//
+// .SECTION See Also
+//  vtkSurfaceLICPainter vtkImageDataLIC2D vtkStructuredGridLIC2D
+
+#ifndef __vtkLineIntegralConvolution2D_h
+#define __vtkLineIntegralConvolution2D_h
+
+#include "vtkObject.h"
+#include "vtkWeakPointer.h" // for ren context
+#include "vtkRenderingLICModule.h" // for export macro
+#include <deque> // for deque
+
+class vtkRenderWindow;
+class vtkTextureObject;
+class vtkPixelExtent;
+class vtkShaderProgram2;
+class vtkFrameBufferObject2;
+class vtkPainterCommunicator;
+
+class VTKRENDERINGLIC_EXPORT vtkLineIntegralConvolution2D : public vtkObject
+{
+public:
+  static vtkLineIntegralConvolution2D *New();
+  vtkTypeMacro(vtkLineIntegralConvolution2D, vtkObject);
+  void PrintSelf(ostream & os, vtkIndent indent);
+
+  // Description:
+  // Returns if the context supports the required extensions.
+  static bool IsSupported(vtkRenderWindow * renWin);
+
+  // Description:
+  // Set/Get the rendering context. A reference is not explicity held,
+  // thus refernce to the context must be held externally.
+  void SetContext(vtkRenderWindow *context);
+  vtkRenderWindow *GetContext();
+
+  // Description:
+  // EnhancedLIC mean compute the LIC twice with the second pass using
+  // the edge-enhanced result of the first pass as a noise texture. Edge
+  // enhancedment is made by a simple Laplace convolution.
+  vtkSetClampMacro(EnhancedLIC, int, 0, 1);
+  vtkGetMacro(EnhancedLIC, int);
+  vtkBooleanMacro(EnhancedLIC, int);
+
+  // Description:
+  // Enable/Disable contrast and dynamic range correction stages. Stage 1 is applied
+  // on the input to the high-pass filter when the high-pass filter is enabled and
+  // skipped otherwise. Stage 2, when enabled is the final stage in the internal
+  // pipeline. Both stages are implemented by a histogram stretching of the gray scale
+  // colors in the LIC'd image as follows:
+  //
+  //     c = (c-m)/(M-m)
+  //
+  // where, c is the fragment color, m is the color value to map to 0, M is the
+  // color value to map to 1. The default values of m and M are the min and max
+  // over all fragments.
+  //
+  // This increase the dynamic range and contrast in the LIC'd image, both of which
+  //  are natuarly attenuated by the LI conovlution proccess.
+  //
+  //  ENHANCE_CONTRAST_OFF  -- don't enhance contrast
+  //  ENHANCE_CONTRAST_ON   -- enhance high-pass input and final stage output
+  //
+  // This feature is disabled by default.
+  enum {
+    ENHANCE_CONTRAST_OFF=0,
+    ENHANCE_CONTRAST_ON=1};
+  vtkSetClampMacro(EnhanceContrast, int, 0, 2);
+  vtkGetMacro(EnhanceContrast, int);
+  vtkBooleanMacro(EnhanceContrast, int);
+
+  // Description:
+  // This feature is used to fine tune the contrast enhancement. Values are provided
+  // indicating the fraction of the range to adjust m and M by during contrast enahncement
+  // histogram stretching.  M and m are the intensity/lightness values that map to 1 and 0.
+  // (see EnhanceContrast for an explanation of the mapping procedure). m and M are computed
+  // using the factors as follows:
+  //
+  //     m = min(C) - mFactor * (max(C) - min(C))
+  //     M = max(C) - MFactor * (max(C) - min(C))
+  //
+  // the default values for mFactor and MFactor are 0 which result in
+  // m = min(C), M = max(C), where C is all of the colors in the image. Adjusting
+  // mFactor and MFactor above zero provide a means to control the saturation of
+  // normalization. These settings only affect the final normalization, the
+  // normalization that occurs on the input to the high-pass filter always uses
+  // the min and max.
+  vtkSetClampMacro(LowContrastEnhancementFactor, double, 0.0, 1.0);
+  vtkGetMacro(LowContrastEnhancementFactor, double);
+  vtkSetClampMacro(HighContrastEnhancementFactor, double, 0.0, 1.0);
+  vtkGetMacro(HighContrastEnhancementFactor, double);
+
+  // Description:
+  // Enable/Disable the anti-aliasing pass. This optional pass (disabled by
+  // default) can be enabled to reduce jagged patterns in the final LIC image.
+  // Values greater than 0 control the number of iterations, one is typically
+  // sufficient.
+  vtkSetClampMacro(AntiAlias, int, 0, VTK_INT_MAX);
+  vtkGetMacro(AntiAlias, int);
+  vtkBooleanMacro(AntiAlias, int);
+
+  // Description:
+  // Number of streamline integration steps (initial value is 1).
+  // In term of visual quality, the greater (within some range) the better.
+  vtkSetClampMacro(NumberOfSteps, int, 0, VTK_INT_MAX);
+  vtkGetMacro(NumberOfSteps, int);
+
+  // Description:
+  // Get/Set the streamline integration step size (0.01 by default). This is
+  // the length of each step in normalized image space i.e. in range [0, FLOAT_MAX].
+  // In term of visual quality, the smaller the better. The type for the
+  // interface is double as VTK interface is, but GPU only supports float.
+  // Thus it will be converted to float in the execution of the algorithm.
+  vtkSetClampMacro(StepSize, double, 0.0, VTK_FLOAT_MAX);
+  vtkGetMacro(StepSize, double);
+
+  // Description:
+  // If VectorField has >= 3 components, we must choose which 2 components
+  // form the (X, Y) components for the vector field. Must be in the range
+  // [0, 3].
+  void SetComponentIds(int c0, int c1);
+  void SetComponentIds(int c[2]){ this->SetComponentIds(c[0], c[1]); }
+  vtkGetVector2Macro(ComponentIds, int);
+
+  // Description:
+  // Set the max noise value for use during LIC integration normalization.
+  // The integration normalization factor is the max noise value times the
+  // number of steps taken. The default value is 1.
+  vtkSetClampMacro(MaxNoiseValue, double, 0.0, 1.0);
+  vtkGetMacro(MaxNoiseValue, double);
+
+  // Description:
+  // This class performs LIC in the normalized image space. Hence, by default
+  // it transforms the input vectors to the normalized image space (using the
+  // GridSpacings and input vector field dimensions). Set this to 0 to disable
+  // tranformation if the vectors are already transformed.
+  void SetTransformVectors(int val);
+  vtkGetMacro(TransformVectors, int);
+
+  // Description:
+  // Set/Get the spacing in each dimension of the plane on which the vector
+  // field is defined. This class performs LIC in the normalized image space
+  // and hence generally it needs to transform the input vector field (given
+  // in physical space) to the normalized image space. The Spacing is needed
+  // to determine the transform. Default is (1.0, 1.0). It is possible to
+  // disable vector transformation by setting TransformVectors to 0.
+  //vtkSetVector2Macro(GridSpacings, double);
+  //vtkGetVector2Macro(GridSpacings, double);
+
+  // Description:
+  // Normalize vectors during integration. When set(the default) the input vector field
+  // is normalized during integration, and each integration occurs over the same arclength.
+  // When not set each integration occurs over an arc length proportional to the field
+  // magnitude as is customary in traditional numerical methods. See, "Imaging Vector
+  // Fields Using Line Integral Convolution" for an axample where normalization is used.
+  // See, "Image Space Based Visualization of Unsteady Flow on Surfaces" for an example
+  // of where no normalization is used.
+  void SetNormalizeVectors(int val);
+  vtkGetMacro(NormalizeVectors, int);
+
+  // Description:
+  // The MaskThreshold controls blanking of the LIC texture. For fragments with
+  // |V|<threhold the LIC fragment is not rendered. The default value is 0.0.
+  //
+  // For surface LIC MaskThreshold units are in the original vector space. For image LIC
+  // be aware that while the vector field is transformed to image space while the mask
+  // threshold is not. Therefore the mask threshold must be specified in image space
+  // units.
+  vtkSetClampMacro(MaskThreshold, double, -1.0, VTK_FLOAT_MAX);
+  vtkGetMacro(MaskThreshold, double);
+
+
+  // Description:
+  // Compute the lic on the entire vector field texture.
+  vtkTextureObject *Execute(
+        vtkTextureObject *vectorTex,
+        vtkTextureObject *noiseTex);
+
+  // Description:
+  // Compute the lic on the indicated subset of the vector field
+  // texture.
+  vtkTextureObject *Execute(
+        const int extent[4],
+        vtkTextureObject *vectorTex,
+        vtkTextureObject *noiseTex);
+
+  //BTX
+  // Description:
+  // Compute LIC over the desired subset of the input texture. The
+  // result is copied into the desired subset of the provided output
+  // texture.
+  //
+  // inputTexExtent  : screen space extent of the input texture
+  // vectorExtent    : part of the inpute extent that has valid vectors
+  // licExtent       : part of the inpute extent to compute on
+  // outputTexExtent : screen space extent of the output texture
+  // outputExtent    : part of the output texture to store the result
+  //
+  vtkTextureObject *Execute(
+        const vtkPixelExtent &inputTexExtent,
+        const std::deque<vtkPixelExtent> &vectorExtent,
+        const std::deque<vtkPixelExtent> &licExtent,
+        vtkTextureObject *vectorTex,
+        vtkTextureObject *maskVectorTex,
+        vtkTextureObject *noiseTex);
+  //ETX
+
+  // Description:
+  // Convenience functions to ensure that the input textures are
+  // configured correctly.
+  static
+  void SetVectorTexParameters(vtkTextureObject *vectors);
+
+  static
+  void SetNoiseTexParameters(vtkTextureObject *noise);
+
+  //BTX
+  // Description:
+  // Set the communicator to use during parallel operation
+  // The communicator will not be duplicated or reference
+  // counted for performance reasons thus caller should
+  // hold/manage reference to the communicator during use
+  // of the LIC object.
+  virtual void SetCommunicator(vtkPainterCommunicator *){}
+  virtual vtkPainterCommunicator *GetCommunicator();
+
+  // Description:
+  // For parallel operation, find global min/max
+  // min/max are in/out.
+  virtual void GetGlobalMinMax(
+        vtkPainterCommunicator*,
+        float&,
+        float&) {}
+  //ETX
+
+  // Description:
+  // Methods used for parallel benchmarks. Use cmake to define
+  // vtkLineIntegralConviolution2DTIME to enable benchmarks.
+  // During each update timing information is stored, it can
+  // be written to disk by calling WriteLog.
+  virtual void WriteTimerLog(const char *){}
+
+protected:
+  vtkLineIntegralConvolution2D();
+  virtual ~vtkLineIntegralConvolution2D();
+
+  void SetVTShader(vtkShaderProgram2 *prog);
+  void SetLIC0Shader(vtkShaderProgram2 *prog);
+  void SetLICIShader(vtkShaderProgram2 *prog);
+  void SetLICNShader(vtkShaderProgram2 *prog);
+  void SetEEShader(vtkShaderProgram2 *prog);
+  void SetCEShader(vtkShaderProgram2 *prog);
+  void SetAAHShader(vtkShaderProgram2 *prog);
+  void SetAAVShader(vtkShaderProgram2 *prog);
+
+  void BuildShaders();
+
+  void RenderQuad(
+        float computeBounds[4],
+        vtkPixelExtent computeExtent);
+
+  vtkTextureObject *AllocateBuffer(unsigned int texSize[2]);
+
+  // Description:
+  // Convenience functions to ensure that the input textures are
+  // configured correctly.
+  void SetNoise2TexParameters(vtkTextureObject *noise);
+
+  // Description:
+  // Methods used for parallel benchmarks. Use cmake to define
+  // vtkSurfaceLICPainterTIME to enable benchmarks. During each
+  // update timing information is stored, it can be written to
+  // disk by calling WriteLog (defined in vtkSurfaceLICPainter).
+  virtual void StartTimerEvent(const char *){}
+  virtual void EndTimerEvent(const char *){}
+
+protected:
+  vtkPainterCommunicator *Comm;
+
+  vtkWeakPointer<vtkRenderWindow> Context;
+  vtkFrameBufferObject2 *FBO;
+
+  int ShadersNeedBuild;
+  vtkShaderProgram2 *VTShader;
+  vtkShaderProgram2 *LIC0Shader;
+  vtkShaderProgram2 *LICIShader;
+  vtkShaderProgram2 *LICNShader;
+  vtkShaderProgram2 *EEShader;
+  vtkShaderProgram2 *CEShader;
+  vtkShaderProgram2 *AAHShader;
+  vtkShaderProgram2 *AAVShader;
+
+  int     NumberOfSteps;
+  double  StepSize;
+  int     EnhancedLIC;
+  int     EnhanceContrast;
+  double  LowContrastEnhancementFactor;
+  double  HighContrastEnhancementFactor;
+  int     AntiAlias;
+  int     NoiseTextureLookupCompatibilityMode;
+  double  MaskThreshold;
+  int     TransformVectors;
+  int     NormalizeVectors;
+  int     ComponentIds[2];
+  double  MaxNoiseValue;
+
+private:
+  vtkLineIntegralConvolution2D(const vtkLineIntegralConvolution2D &); // Not implemented.
+  void operator = (const vtkLineIntegralConvolution2D &);             // Not implemented.
+};
+
+#endif
diff --git a/Rendering/LIC/vtkLineIntegralConvolution2D_AA.glsl b/Rendering/LIC/vtkLineIntegralConvolution2D_AA.glsl
new file mode 100644
index 0000000..4bcea8a
--- /dev/null
+++ b/Rendering/LIC/vtkLineIntegralConvolution2D_AA.glsl
@@ -0,0 +1,88 @@
+//=========================================================================
+//
+//  Program:   Visualization Toolkit
+//  Module:    vtkLineIntegralConvolution2D_fs2.glsl
+//
+//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+//  All rights reserved.
+//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+//
+//     This software is distributed WITHOUT ANY WARRANTY; without even
+//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//     PURPOSE.  See the above copyright notice for more information.
+//
+//=========================================================================
+
+// Anti-alias stage in vtkLineIntegralConvolution2D
+
+#version 120
+
+uniform sampler2D texLIC;         // inout texture
+uniform vec2      uLICTexSize;    // input texture size
+uniform vec4      uComputeBounds; // valid region of texture
+
+// fragment size
+float tcDx = 1.0 / uLICTexSize.x;
+float tcDy = 1.0 / uLICTexSize.y;
+
+// 3x3 Gaussian kernel
+float K[9] = float[9](
+  0.0191724, 0.100120, 0.0191724,
+  0.1001200, 0.522831, 0.1001200,
+  0.0191724, 0.100120, 0.0191724
+  );
+
+// tex lictc neighbor offsets
+vec2 fragDx[9] = vec2[9](
+  vec2(-tcDx,  tcDy),  vec2(0.0,  tcDy),  vec2(tcDx,  tcDy),
+  vec2(-tcDx,  0.0 ),  vec2(0.0,  0.0 ),  vec2(tcDx,  0.0 ),
+  vec2(-tcDx, -tcDy),  vec2(0.0, -tcDy),  vec2(tcDx, -tcDy)
+  );
+
+// valid domain (because no ghost fragments)
+vec2 validBox[2] = vec2[2](
+  vec2(      tcDx,       tcDy),
+  vec2(1.0 - tcDx, 1.0 - tcDy)
+  );
+
+// valid domain (because no ghost fragments)
+vec2 computeBounds0 = vec2(uComputeBounds[0] + tcDx, uComputeBounds[2] + tcDy);
+vec2 computeBounds1 = vec2(uComputeBounds[1] - tcDx, uComputeBounds[3] - tcDy);
+
+// determinie if fragment is outside the valid regions
+bool OutOfBounds( vec2 tc )
+{
+  return any(lessThan(tc, computeBounds0))
+   || any(greaterThan(tc, computeBounds1));
+}
+
+// determine if the fragment was masked
+bool Masked(float val)
+{
+  return val == 1.0;
+}
+
+
+
+void main( void )
+{
+  vec2 lictc = gl_TexCoord[0].st;
+  vec4 lic;
+  bool dontUse = false;
+  float conv = 0.0;
+  for (int i=0; i<9; ++i)
+    {
+    vec2 tc = lictc + fragDx[i];
+    lic = texture2D( texLIC, tc );
+    dontUse = dontUse || OutOfBounds(tc) || Masked(lic.g);
+    conv = conv + K[i] * lic.r;
+    }
+
+  lic = texture2D( texLIC, lictc );
+
+  conv
+    = ( 1.0 - float( dontUse ) ) * conv
+    + float( dontUse ) * lic.r;
+
+  gl_FragData[0] = vec4( conv, lic.g, 0.0, 1.0 );
+}
diff --git a/Rendering/LIC/vtkLineIntegralConvolution2D_AAH.glsl b/Rendering/LIC/vtkLineIntegralConvolution2D_AAH.glsl
new file mode 100644
index 0000000..a0efc96
--- /dev/null
+++ b/Rendering/LIC/vtkLineIntegralConvolution2D_AAH.glsl
@@ -0,0 +1,56 @@
+//=========================================================================
+//
+//  Program:   Visualization Toolkit
+//  Module:    vtkLineIntegralConvolution2D_AAH.glsl
+//
+//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+//  All rights reserved.
+//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+//
+//     This software is distributed WITHOUT ANY WARRANTY; without even
+//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//     PURPOSE.  See the above copyright notice for more information.
+//
+//=========================================================================
+
+// Anti-alias stage in vtkLineIntegralConvolution2D
+// horizontal pass of a Gaussian convolution
+
+#version 120 // for arrays
+
+uniform sampler2D texLIC; // input texture
+uniform float     uDx;    // fragment size
+
+// neighbor offsets
+vec2 fragDx[3] = vec2[3](vec2(-uDx,0.0), vec2(0.0,0.0), vec2(uDx,0.0));
+
+// factored 3x3 Gaussian kernel
+// K^T*K = G
+float K[3] = float[3](0.141421356, 0.707106781, 0.141421356);
+
+// determine if the fragment was masked
+bool Masked(float val){ return val != 0.0; }
+
+void main(void)
+{
+  vec2 lictc = gl_TexCoord[0].st;
+  vec4 lic[3];
+  bool dontUse = false;
+  float conv = 0.0;
+  for (int i=0; i<3; ++i)
+    {
+    vec2 tc = lictc + fragDx[i];
+    lic[i] = texture2D(texLIC, tc);
+    dontUse = dontUse || Masked(lic[i].g);
+    conv = conv + K[i] * lic[i].r;
+    }
+  // output is (conv, mask, skip, 1)
+  if (dontUse)
+    {
+    gl_FragData[0] = vec4(lic[1].rg, 1.0, 1.0);
+    }
+  else
+    {
+    gl_FragData[0] = vec4(conv, lic[1].gb, 1.0);
+    }
+}
diff --git a/Rendering/LIC/vtkLineIntegralConvolution2D_AAV.glsl b/Rendering/LIC/vtkLineIntegralConvolution2D_AAV.glsl
new file mode 100644
index 0000000..1349f96
--- /dev/null
+++ b/Rendering/LIC/vtkLineIntegralConvolution2D_AAV.glsl
@@ -0,0 +1,56 @@
+//=========================================================================
+//
+//  Program:   Visualization Toolkit
+//  Module:    vtkLineIntegralConvolution2D_AAV.glsl
+//
+//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+//  All rights reserved.
+//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+//
+//     This software is distributed WITHOUT ANY WARRANTY; without even
+//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//     PURPOSE.  See the above copyright notice for more information.
+//
+//=========================================================================
+
+// Anti-alias stage in vtkLineIntegralConvolution2D
+// vertical pass of a Gaussian convolution
+
+#version 120 // for arrays
+
+uniform sampler2D texLIC; // input texture
+uniform float     uDy;    // fragment size
+
+// neighbor offsets
+vec2 fragDy[3] = vec2[3](vec2(0.0,-uDy), vec2(0.0,0.0), vec2(0.0,uDy));
+
+// factored 3x3 Gaussian kernel
+// K^T*K = G
+float K[3] = float[3](0.141421356, 0.707106781, 0.141421356);
+
+// determine if the fragment was masked
+bool Masked(float val){ return val != 0.0; }
+
+void main(void)
+{
+  vec2 lictc = gl_TexCoord[0].st;
+  vec4 lic[3];
+  bool dontUse = false;
+  float conv = 0.0;
+  for (int i=0; i<3; ++i)
+    {
+    vec2 tc = lictc + fragDy[i];
+    lic[i] = texture2D(texLIC, tc);
+    dontUse = dontUse || Masked(lic[i].g);
+    conv = conv + K[i] * lic[i].r;
+    }
+  // output is (conv, mask, skip, 1)
+  if (dontUse)
+    {
+    gl_FragData[0] = vec4(lic[1].rg, 1.0, 1.0);
+    }
+  else
+    {
+    gl_FragData[0] = vec4(conv, lic[1].gb, 1.0);
+    }
+}
diff --git a/Rendering/LIC/vtkLineIntegralConvolution2D_CE.glsl b/Rendering/LIC/vtkLineIntegralConvolution2D_CE.glsl
new file mode 100644
index 0000000..7fff360
--- /dev/null
+++ b/Rendering/LIC/vtkLineIntegralConvolution2D_CE.glsl
@@ -0,0 +1,38 @@
+//=========================================================================
+//
+//  Program:   Visualization Toolkit
+//  Module:    vtkLineIntegralConvolution2D_CE.glsl
+//
+//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+//  All rights reserved.
+//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+//
+//     This software is distributed WITHOUT ANY WARRANTY; without even
+//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//     PURPOSE.  See the above copyright notice for more information.
+//
+//=========================================================================
+
+// gray scale contrast enhance stage implemented via histogram stretching
+// if the min and max are tweaked it can generate out-of-range values
+// these will be clamped in 0 to 1
+
+#version 110
+
+uniform sampler2D texLIC;  // most recent lic pass
+uniform float uMin;        // min gray scale color value
+uniform float uMaxMinDiff; // max-min
+
+void main( void )
+{
+  vec4 lic = texture2D(texLIC, gl_TexCoord[0].st);
+  if (lic.g!=0.0)
+    {
+    gl_FragData[0] = lic;
+    }
+  else
+    {
+    float CElic = clamp((lic.r - uMin)/uMaxMinDiff, 0.0, 1.0);
+    gl_FragData[0] = vec4(CElic, lic.gb, 1.0);
+    }
+}
diff --git a/Rendering/LIC/vtkLineIntegralConvolution2D_EE.glsl b/Rendering/LIC/vtkLineIntegralConvolution2D_EE.glsl
new file mode 100644
index 0000000..1b4fcb0
--- /dev/null
+++ b/Rendering/LIC/vtkLineIntegralConvolution2D_EE.glsl
@@ -0,0 +1,73 @@
+//=========================================================================
+//
+//  Program:   Visualization Toolkit
+//  Module:    vtkLineIntegralConvolution2D_fs2.glsl
+//
+//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+//  All rights reserved.
+//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+//
+//     This software is distributed WITHOUT ANY WARRANTY; without even
+//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//     PURPOSE.  See the above copyright notice for more information.
+//
+//=========================================================================
+
+// high-pass filter stage employed by vtkLineIntegralConvolution2D
+// between LIC pass 1 and LIC pass 2. filtered LIC pass 1, becomes
+// noise for pass2.
+
+#version 120 // for arrays
+
+uniform sampler2D texLIC; // most recent lic pass
+uniform float     uDx;    // fragment size
+uniform float     uDy;    // fragment size
+
+// kernel for simple laplace edge enhancement.
+// p=Laplace(p)+p
+float K[9] = float[9](
+  -1.0, -1.0, -1.0,
+  -1.0,  9.0, -1.0,
+  -1.0, -1.0, -1.0
+  );
+
+// tex coord neighbor offsets
+vec2 fragDx[9] = vec2[9](
+  vec2(-uDx, uDy), vec2(0.0, uDy), vec2(uDx, uDy),
+  vec2(-uDx, 0.0), vec2(0.0, 0.0), vec2(uDx, 0.0),
+  vec2(-uDx,-uDy), vec2(0.0,-uDy), vec2(uDx,-uDy)
+  );
+
+// determine if the fragment was masked
+bool Masked(float val) { return val != 0.0; }
+
+void main(void)
+{
+  vec2 lictc = gl_TexCoord[0].st;
+
+  // compute the convolution but don't use convovled values if
+  // any masked fragments on the stencil. Fragments outside
+  // the valid domain are masked during initializaiton, and
+  // texture wrap parameters are clamp to border with border
+  // color that contains masked flag
+  float conv = 0.0;
+  bool dontUse = false;
+  for (int i=0; i<9; ++i)
+    {
+    vec2 tc = lictc + fragDx[i];
+    vec4 lic = texture2D(texLIC, tc);
+    dontUse = dontUse || Masked(lic.g);
+    conv = conv + K[i] * lic.r;
+    }
+
+  if (dontUse)
+    {
+    conv = texture2D(texLIC, lictc).r;
+    }
+  else
+    {
+    conv = clamp(conv, 0.0, 1.0);
+    }
+
+  gl_FragData[0] = vec4(conv, 0.0, 0.0, 1.0);
+}
diff --git a/Rendering/LIC/vtkLineIntegralConvolution2D_LIC0.glsl b/Rendering/LIC/vtkLineIntegralConvolution2D_LIC0.glsl
new file mode 100644
index 0000000..1f3563b
--- /dev/null
+++ b/Rendering/LIC/vtkLineIntegralConvolution2D_LIC0.glsl
@@ -0,0 +1,89 @@
+//=========================================================================
+//
+//  Program:   Visualization Toolkit
+//  Module:    vtkLineIntegralConvolution2D_LIC0.glsl
+//
+//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+//  All rights reserved.
+//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+//
+//     This software is distributed WITHOUT ANY WARRANTY; without even
+//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//     PURPOSE.  See the above copyright notice for more information.
+//
+//=========================================================================
+
+/**
+This shader initializes the convolution for the LIC computation.
+*/
+
+#version 110
+
+uniform sampler2D texMaskVectors;
+uniform sampler2D texNoise;
+uniform sampler2D texLIC;
+
+uniform int   uStepNo;         // in step 0 initialize lic and seeds, else just seeds
+uniform int   uPassNo;         // in pass 1 hpf of pass 0 is convolved.
+uniform float uMaskThreshold;  // if |V| < uMaskThreshold render transparent
+uniform vec2  uNoiseBoundsPt1; // tc of upper right pt of noise texture
+
+// convert from vector coordinate space to noise coordinate space.
+// the noise texture is tiled across the *whole* domain
+vec2 VectorTCToNoiseTC(vec2 vectc)
+{
+  return vectc/uNoiseBoundsPt1;
+}
+
+// get the texture coordidnate to lookup noise value. this
+// depends on the pass number.
+vec2 getNoiseTC(vec2 vectc)
+{
+  // in pass 1 : convert from vector tc to noise tc
+  // in pass 2 : use vector tc
+  if (uPassNo == 0)
+    {
+    return VectorTCToNoiseTC(vectc);
+    }
+  else
+    {
+    return vectc;
+    }
+}
+
+// look up noise value at the given location. The location
+// is supplied in vector texture coordinates, hence the
+// need to convert to noise texture coordinates.
+float getNoise(vec2 vectc)
+{
+  return texture2D(texNoise, getNoiseTC(vectc)).r;
+}
+
+void main(void)
+{
+  vec2 vectc = gl_TexCoord[0].st;
+
+  // lic => (convolution, mask, 0, step count)
+  if (uStepNo == 0)
+    {
+    float maskCriteria = length(texture2D(texMaskVectors, vectc).xyz);
+    float maskFlag;
+    if (maskCriteria <= uMaskThreshold)
+      {
+      maskFlag = 1.0;
+      }
+    else
+      {
+      maskFlag = 0.0;
+      }
+    float noise = getNoise(vectc);
+    gl_FragData[0] = vec4(noise, maskFlag, 0.0, 1.0);
+    }
+  else
+    {
+    gl_FragData[0] = texture2D(texLIC, vectc);
+    }
+
+  // initial seed
+  gl_FragData[1] = vec4(vectc, 0.0, 1.0);
+}
diff --git a/Rendering/LIC/vtkLineIntegralConvolution2D_LICI.glsl b/Rendering/LIC/vtkLineIntegralConvolution2D_LICI.glsl
new file mode 100644
index 0000000..37ae1a9
--- /dev/null
+++ b/Rendering/LIC/vtkLineIntegralConvolution2D_LICI.glsl
@@ -0,0 +1,124 @@
+//=========================================================================
+//
+//  Program:   Visualization Toolkit
+//  Module:    vtkLineIntegralConvolution2D_fs1.glsl
+//
+//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+//  All rights reserved.
+//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+//
+//     This software is distributed WITHOUT ANY WARRANTY; without even
+//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//     PURPOSE.  See the above copyright notice for more information.
+//
+//=========================================================================
+
+#version 110
+
+uniform sampler2D  texVectors;
+uniform sampler2D  texNoise;
+uniform sampler2D  texLIC;
+uniform sampler2D  texSeedPts;
+
+uniform int   uPassNo;          // in pass 1 hpf of pass 0 is convolved.
+uniform float uStepSize;        // step size in parametric space
+
+uniform vec2  uNoiseBoundsPt1;  // tc of upper right pt of noise texture
+
+// Lookup the vector at the given point. Value will be normalized if
+// normalization is enabled.  See vtkLineIntegralConvolution2D.cxx for
+// implementation
+vec2 getVector(vec2 vectc);
+
+// convert from vector coordinate space to noise coordinate space.
+// the noise texture is tiled across the whole domain
+vec2 VectorTCToNoiseTC(vec2 vectc)
+{
+  return vectc/uNoiseBoundsPt1;
+}
+
+// get the texture coordidnate to lookup noise value.
+// in pass 1 repeatedly tile the noise texture across
+// the computational domain.
+vec2 getNoiseTC(vec2 tc)
+{
+  if (uPassNo == 0)
+    {
+    return VectorTCToNoiseTC(tc);
+    }
+  else
+    {
+    return tc;
+    }
+}
+
+// look up noise value at the given location. The location
+// is supplied in vector texture coordinates, hence the need
+// to convert to either noise or lic texture coordinates in
+// pass 1 and 2 respectively.
+float getNoise(vec2 vectc)
+{
+  return texture2D(texNoise, getNoiseTC(vectc)).r;
+}
+
+// fourth-order Runge-Kutta streamline integration
+// no bounds checks are made, therefor it's essential
+// to have the entire texture initialized to 0
+// and set clamp to border and have border color 0
+// an integer is set if the step was taken, keeping
+// an accurate step count is neccessary to prevent
+// boundary artifacts. Don't count the step if
+// all vector lookups are identically 0. This is
+// a proxy for "stepped outside valid domain"
+vec2 rk4(vec2 pt0, float dt, out bool count)
+{
+  count=true;
+  float dtHalf = dt * 0.5;
+  vec2 pt1;
+
+  vec2 v0 = getVector(pt0);
+  pt1 = pt0 + v0 * dtHalf;
+
+  vec2 v1 = getVector(pt1);
+  pt1 = pt0 + v1 * dtHalf;
+
+  vec2 v2 = getVector(pt1);
+  pt1 = pt0 + v2 * dt;
+
+  vec2 v3 = getVector(pt1);
+  vec2 vSum = v0 + v1 + v1 + v2 + v2 + v3;
+
+  if (vSum == vec2(0.0,0.0))
+    {
+    count = false;
+    }
+
+  pt1 = pt0 + (vSum) * (dt * (1.0/6.0));
+
+ return pt1;
+}
+
+void main(void)
+{
+  vec2 lictc = gl_TexCoord[0].st;
+  vec4 lic = texture2D(texLIC, lictc);
+  vec2 pt0 = texture2D(texSeedPts, lictc).st;
+
+  bool count;
+  vec2 pt1 = rk4(pt0, uStepSize, count);
+
+  if (count)
+    {
+    // accumulate lic step
+    // (lic, mask, 0, step count)
+    float noise = getNoise(pt1);
+    gl_FragData[0] = vec4(lic.r + noise, lic.g, 0.0, lic.a + 1.0);
+    gl_FragData[1] = vec4(pt1, 0.0, 1.0);
+    }
+  else
+    {
+    // keep existing values
+    gl_FragData[0] = lic;
+    gl_FragData[1] = vec4(pt0, 0.0, 1.0);
+    }
+}
diff --git a/Rendering/LIC/vtkLineIntegralConvolution2D_LICN.glsl b/Rendering/LIC/vtkLineIntegralConvolution2D_LICN.glsl
new file mode 100644
index 0000000..e9c2c9f
--- /dev/null
+++ b/Rendering/LIC/vtkLineIntegralConvolution2D_LICN.glsl
@@ -0,0 +1,32 @@
+//=========================================================================
+//
+//  Program:   Visualization Toolkit
+//  Module:    vtkLineIntegralConvolution2D_LICN.glsl
+//
+//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+//  All rights reserved.
+//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+//
+//     This software is distributed WITHOUT ANY WARRANTY; without even
+//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//     PURPOSE.  See the above copyright notice for more information.
+//
+//=========================================================================
+
+/**
+This shader finalizes the convolution for the LIC computation
+applying the normalization. eg. if box kernel is used the this
+is the number of steps taken.
+*/
+
+#version 110
+
+uniform sampler2D texLIC;
+
+void main(void)
+{
+  vec4 conv = texture2D(texLIC, gl_TexCoord[0].st);
+  conv.r = conv.r/conv.a;
+  // lic => (convolution, mask, 0, 1)
+  gl_FragData[0] = vec4(conv.rg , 0.0, 1.0);
+}
diff --git a/Rendering/LIC/vtkLineIntegralConvolution2D_VT.glsl b/Rendering/LIC/vtkLineIntegralConvolution2D_VT.glsl
new file mode 100644
index 0000000..4d97d7e
--- /dev/null
+++ b/Rendering/LIC/vtkLineIntegralConvolution2D_VT.glsl
@@ -0,0 +1,33 @@
+//=========================================================================
+//
+//  Program:   Visualization Toolkit
+//  Module:    vtkLineIntegralConvolution2D_VT.glsl
+//
+//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+//  All rights reserved.
+//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+//
+//     This software is distributed WITHOUT ANY WARRANTY; without even
+//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//     PURPOSE.  See the above copyright notice for more information.
+//
+//=========================================================================
+
+// move vector field to normalized image space
+// pre-procesing for vtkLineIntegralConvolution2D
+
+#version 110
+
+uniform sampler2D texVectors; // input texture
+uniform vec2      uTexSize;   // size of texture
+
+// select vector components.
+// see vtkLineIntegralConvolution2D.cxx for implementation
+vec2 getSelectedComponents(vec4 V);
+
+void main(void)
+{
+  vec2 V = getSelectedComponents(texture2D(texVectors, gl_TexCoord[0].st));
+  V = V/uTexSize;
+  gl_FragData[0] = vec4(V, 0.0, 1.0);
+}
diff --git a/Rendering/LIC/vtkPainterCommunicator.h b/Rendering/LIC/vtkPainterCommunicator.h
new file mode 100644
index 0000000..19d5e4d
--- /dev/null
+++ b/Rendering/LIC/vtkPainterCommunicator.h
@@ -0,0 +1,72 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPainterCommunicator.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkPainterCommunicator -- A communicator containing only
+// ranks that will execute a painter chain.
+//
+// .SECTION Description
+// A communicator that can safely be used inside a painter.
+// A simple container holding a handle to an MPI communicator.
+// This API is sufficient to allow for control flow with/without
+// MPI. The parallel parts of the code should use the derived
+// class vtkPPainterCommunicator.
+#ifndef __vtkPainterCommunicator_h
+#define __vtkPainterCommunicator_h
+
+#include "vtkRenderingLICModule.h" // for export macro
+
+class VTKRENDERINGLIC_EXPORT vtkPainterCommunicator
+{
+public:
+  vtkPainterCommunicator(){}
+  virtual ~vtkPainterCommunicator(){}
+
+  // Description:
+  // Copy and assignment operators. Both use Copy internally
+  // and do take ownership.
+  vtkPainterCommunicator(const vtkPainterCommunicator &other)
+    { this->Copy(&other, false); }
+
+  vtkPainterCommunicator &operator=(const vtkPainterCommunicator &other)
+    { this->Copy(&other, false); return *this; }
+
+  // Description:
+  // Copy the communicator, the flag indicates if ownership
+  // should be assumed. The owner is responsible for free'ing
+  // the communicator.
+  virtual void Copy(const vtkPainterCommunicator *, bool){}
+
+  // Description:
+  // Duplicate the communicator.
+  virtual void Duplicate(const vtkPainterCommunicator *){}
+
+  // Description:
+  // Querry MPI about the communicator.
+  virtual int GetRank(){ return 0; }
+  virtual int GetSize(){ return 1; }
+  virtual bool GetIsNull(){ return false; }
+
+  // Description:
+  // Querry MPI about the world communicator.
+  virtual int GetWorldRank(){ return 0; }
+  virtual int GetWorldSize(){ return 1; }
+
+  // Description:
+  // Querry MPI about its state.
+  virtual bool GetMPIInitialized(){ return false; }
+  virtual bool GetMPIFinalized(){ return true; }
+};
+
+#endif
+// VTK-HeaderTest-Exclude: vtkPainterCommunicator.h
diff --git a/Rendering/LIC/vtkPixelTransfer.cxx b/Rendering/LIC/vtkPixelTransfer.cxx
new file mode 100644
index 0000000..d49e70d
--- /dev/null
+++ b/Rendering/LIC/vtkPixelTransfer.cxx
@@ -0,0 +1,32 @@
+#include "vtkPixelTransfer.h"
+
+//-----------------------------------------------------------------------------
+int vtkPixelTransfer::Blit(
+       const vtkPixelExtent &srcWholeExt,
+       const vtkPixelExtent &srcExt,
+       const vtkPixelExtent &destWholeExt,
+       const vtkPixelExtent &destExt,
+       int nSrcComps,
+       int srcType,
+       void *srcData,
+       int nDestComps,
+       int destType,
+       void *destData)
+{
+  // first layer of dispatch
+  switch(srcType)
+    {
+    vtkTemplateMacro(
+        return vtkPixelTransfer::Blit(
+            srcWholeExt,
+            srcExt,
+            destWholeExt,
+            destExt,
+            nSrcComps,
+            (VTK_TT*)srcData,
+            nDestComps,
+            destType,
+            destData));
+    }
+  return 0;
+}
diff --git a/Rendering/LIC/vtkPixelTransfer.h b/Rendering/LIC/vtkPixelTransfer.h
new file mode 100644
index 0000000..833eb61
--- /dev/null
+++ b/Rendering/LIC/vtkPixelTransfer.h
@@ -0,0 +1,233 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPixelTransfer.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkPixelTransfer -- For movement of pixel data described by
+// pixel extents
+//
+// .SECTION Description
+// Class to handle non-contiguous data transfers of data described
+// by pixel extents within a process. For transfering data between
+// processes see vtkPPixelTransfer.
+//
+// .SECTION See also
+// vtkPixelExtent vtkPPixelTransfer
+
+#ifndef __vtkPixelTransfer_h
+#define __vtkPixelTransfer_h
+
+#include "vtkRenderingLICModule.h" // for export
+#include "vtkSetGet.h" // for macros
+#include "vtkPixelExtent.h" // for pixel extent
+#include <cstring> // for memcpy
+
+class VTKRENDERINGLIC_EXPORT vtkPixelTransfer
+{
+public:
+  vtkPixelTransfer(){}
+  ~vtkPixelTransfer(){}
+
+  // Description:
+  // for  memory to memory transfers. Conveinience api for working
+  // with vtk type enum rather than c-data types and simple extents.
+  static
+  int Blit(
+         const vtkPixelExtent &ext,
+         int nComps,
+         int srcType,
+         void *srcData,
+         int destType,
+         void *destData);
+
+  // Description:
+  // for  memory to memory transfers. Conveinience api for working
+  // with vtk type enum rather than c-data types.
+  static
+  int Blit(
+         const vtkPixelExtent &srcWhole,
+         const vtkPixelExtent &srcSubset,
+         const vtkPixelExtent &destWhole,
+         const vtkPixelExtent &destSubset,
+         int nSrcComps,
+         int srcType,
+         void *srcData,
+         int nDestComps,
+         int destType,
+         void *destData);
+
+  // Description:
+  // for local memory to memory transfers
+  template<typename SOURCE_TYPE, typename DEST_TYPE>
+  static
+  int Blit(
+         const vtkPixelExtent &srcWhole,
+         const vtkPixelExtent &srcSubset,
+         const vtkPixelExtent &destWhole,
+         const vtkPixelExtent &destSubset,
+         int nSrcComps,
+         SOURCE_TYPE *srcData,
+         int nDestComps,
+         DEST_TYPE *destData);
+
+private:
+  // distpatch helper for vtk data type enum
+  template<typename SOURCE_TYPE>
+  static
+  int Blit(
+         const vtkPixelExtent &srcWhole,
+         const vtkPixelExtent &srcSubset,
+         const vtkPixelExtent &destWhole,
+         const vtkPixelExtent &destSubset,
+         int nSrcComps,
+         SOURCE_TYPE *srcData,
+         int nDestComps,
+         int destType,
+         void *destData);
+};
+
+//-----------------------------------------------------------------------------
+inline
+int vtkPixelTransfer::Blit(
+         const vtkPixelExtent &ext,
+         int nComps,
+         int srcType,
+         void *srcData,
+         int destType,
+         void *destData)
+{
+  return vtkPixelTransfer::Blit(
+        ext,
+        ext,
+        ext,
+        ext,
+        nComps,
+        srcType,
+        srcData,
+        nComps,
+        destType,
+        destData);
+}
+
+
+//-----------------------------------------------------------------------------
+template<typename SOURCE_TYPE>
+int vtkPixelTransfer::Blit(
+       const vtkPixelExtent &srcWholeExt,
+       const vtkPixelExtent &srcExt,
+       const vtkPixelExtent &destWholeExt,
+       const vtkPixelExtent &destExt,
+       int nSrcComps,
+       SOURCE_TYPE *srcData,
+       int nDestComps,
+       int destType,
+       void *destData)
+{
+  // second layer of dispatch
+  switch(destType)
+    {
+    vtkTemplateMacro(
+        return vtkPixelTransfer::Blit(
+            srcWholeExt,
+            srcExt,
+            destWholeExt,
+            destExt,
+            nSrcComps,
+            srcData,
+            nDestComps,
+            (VTK_TT*)destData););
+    }
+  return 0;
+}
+
+//-----------------------------------------------------------------------------
+template<typename SOURCE_TYPE, typename DEST_TYPE>
+int vtkPixelTransfer::Blit(
+       const vtkPixelExtent &srcWholeExt,
+       const vtkPixelExtent &srcSubset,
+       const vtkPixelExtent &destWholeExt,
+       const vtkPixelExtent &destSubset,
+       int nSrcComps,
+       SOURCE_TYPE *srcData,
+       int nDestComps,
+       DEST_TYPE *destData)
+{
+  if ( (srcData == NULL) || (destData == NULL) )
+    {
+    return -1;
+    }
+  if ( (srcWholeExt == srcSubset)
+    && (destWholeExt == destSubset)
+    && (nSrcComps == nDestComps) )
+    {
+    // buffers are contiguous
+    size_t n = srcWholeExt.Size()*nSrcComps;
+    for (size_t i=0; i<n; ++i)
+      {
+      destData[i] = static_cast<DEST_TYPE>(srcData[i]);
+      }
+    }
+  else
+    {
+    // buffers are not contiguous
+    int tmp[2];
+
+    // get the dimensions of the arrays
+    srcWholeExt.Size(tmp);
+    int swnx = tmp[0];
+
+    destWholeExt.Size(tmp);
+    int dwnx = tmp[0];
+
+    // move from logical extent to memory extent
+    vtkPixelExtent srcExt(srcSubset);
+    srcExt.Shift(srcWholeExt);
+
+    vtkPixelExtent destExt(destSubset);
+    destExt.Shift(destWholeExt);
+
+    // get size of sub-set to copy (it's the same in src and dest)
+    int nxny[2];
+    srcExt.Size(nxny);
+
+    // use smaller ncomps for loop index to avoid reading/writing
+    // invalid mem
+    int nCopyComps = nSrcComps < nDestComps ? nSrcComps : nDestComps;
+
+    for (int j=0; j<nxny[1]; ++j)
+      {
+      int sjj = swnx*(srcExt[2]+j)+srcExt[0];
+      int djj = dwnx*(destExt[2]+j)+destExt[0];
+      for (int i=0; i<nxny[0]; ++i)
+        {
+        int sidx = nSrcComps*(sjj+i);
+        int didx = nDestComps*(djj+i);
+        // copy values from source
+        for (int p=0; p<nCopyComps; ++p)
+          {
+          destData[didx+p] = static_cast<DEST_TYPE>(srcData[sidx+p]);
+          }
+        // ensure all dest comps are initialized
+        for (int p=nCopyComps; p<nDestComps; ++p)
+          {
+          destData[didx+p] = static_cast<DEST_TYPE>(0);
+          }
+        }
+      }
+    }
+  return 0;
+}
+
+ostream &operator<<(ostream &os, const vtkPixelTransfer &gt);
+
+#endif
+// VTK-HeaderTest-Exclude: vtkPixelTransfer.h
diff --git a/Rendering/LIC/vtkStructuredGridLIC2D.cxx b/Rendering/LIC/vtkStructuredGridLIC2D.cxx
new file mode 100644
index 0000000..f516ae4
--- /dev/null
+++ b/Rendering/LIC/vtkStructuredGridLIC2D.cxx
@@ -0,0 +1,851 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkStructuredGridLIC2D.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkStructuredGridLIC2D.h"
+
+#include "vtkFloatArray.h"
+#include "vtkImageData.h"
+#include "vtkImageNoiseSource.h"
+#include "vtkInformation.h"
+#include "vtkInformationVector.h"
+#include "vtkDataTransferHelper.h"
+#include "vtkFrameBufferObject.h"
+#include "vtkLineIntegralConvolution2D.h"
+#include "vtkShaderProgram2.h"
+#include "vtkShader2.h"
+#include "vtkShader2Collection.h"
+#include "vtkUniformVariables.h"
+#include "vtkStructuredExtent.h"
+#include "vtkTextureObject.h"
+#include "vtkObjectFactory.h"
+#include "vtkOpenGLExtensionManager.h"
+#include "vtkOpenGLRenderWindow.h"
+#include "vtkPointData.h"
+#include "vtkStreamingDemandDrivenPipeline.h"
+#include "vtkOpenGLError.h"
+
+#include "vtkPixelExtent.h"
+#include "vtkPixelTransfer.h"
+#include "vtkPixelBufferObject.h"
+
+#include <cassert>
+#include "vtkgl.h"
+
+extern const char *vtkStructuredGridLIC2D_fs;
+
+#define PRINTEXTENT(ext) \
+  ext[0] << ", " << ext[1] << ", " << ext[2] << ", " << ext[3] << ", " << ext[4] << ", " << ext[5]
+
+vtkStandardNewMacro(vtkStructuredGridLIC2D);
+//----------------------------------------------------------------------------
+vtkStructuredGridLIC2D::vtkStructuredGridLIC2D()
+{
+  this->Context = 0;
+  this->Steps = 1;
+  this->StepSize = 1.0;
+  this->Magnification = 1;
+  this->SetNumberOfInputPorts(2);
+  this->SetNumberOfOutputPorts(2);
+  this->OwnWindow  = false;
+  this->FBOSuccess = 0;
+  this->LICSuccess = 0;
+  this->OpenGLExtensionsSupported = 0;
+
+  this->NoiseSource = vtkImageNoiseSource::New();
+  this->NoiseSource->SetWholeExtent(0, 127, 0, 127, 0, 0);
+  this->NoiseSource->SetMinimum(0.0);
+  this->NoiseSource->SetMaximum(1.0);
+}
+
+//----------------------------------------------------------------------------
+vtkStructuredGridLIC2D::~vtkStructuredGridLIC2D()
+{
+  this->NoiseSource->Delete();
+  this->SetContext( NULL );
+}
+
+//----------------------------------------------------------------------------
+vtkRenderWindow* vtkStructuredGridLIC2D::GetContext()
+{
+  return this->Context;
+}
+
+//----------------------------------------------------------------------------
+int vtkStructuredGridLIC2D::SetContext( vtkRenderWindow * context )
+{
+  if ( this->Context == context )
+    {
+    return this->OpenGLExtensionsSupported;
+    }
+
+  if ( this->Context && this->OwnWindow )
+    {
+    this->Context->Delete();
+    this->Context = NULL;
+    }
+  this->OwnWindow = false;
+
+
+  vtkOpenGLRenderWindow * openGLRenWin =
+  vtkOpenGLRenderWindow::SafeDownCast( context );
+  this->Context = openGLRenWin;
+
+  if ( openGLRenWin )
+    {
+    vtkOpenGLExtensionManager * mgr = openGLRenWin->GetExtensionManager();
+
+    // optional for texture objects.
+    mgr->LoadSupportedExtension( "GL_EXT_texture_integer" );
+
+    if (  !mgr->LoadSupportedExtension( "GL_VERSION_1_3" ) ||
+          !mgr->LoadSupportedExtension( "GL_VERSION_1_2" ) ||
+          !mgr->LoadSupportedExtension( "GL_VERSION_2_0" ) ||
+          !mgr->LoadSupportedExtension( "GL_ARB_texture_float" ) ||
+          !mgr->LoadSupportedExtension( "GL_ARB_color_buffer_float" ) ||
+          !mgr->LoadSupportedExtension( "GL_ARB_texture_non_power_of_two" )
+       )
+      {
+      vtkErrorMacro( "Required OpenGL extensions not supported." );
+      mgr = NULL;
+      this->Context = 0;
+      openGLRenWin  = NULL;
+      return 0;
+      }
+
+    mgr = NULL;
+    }
+
+  openGLRenWin = NULL;
+  this->Modified();
+
+  this->OpenGLExtensionsSupported = 1;
+  return 1;
+}
+
+//----------------------------------------------------------------------------
+// Description:
+// Fill the input port information objects for this algorithm.  This
+// is invoked by the first call to GetInputPortInformation for each
+// port so subclasses can specify what they can handle.
+// Redefined from the superclass.
+int vtkStructuredGridLIC2D::FillInputPortInformation
+  ( int port, vtkInformation * info )
+{
+  if ( port == 0 )
+    {
+    info->Set( vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkStructuredGrid" );
+    info->Set( vtkAlgorithm::INPUT_IS_REPEATABLE(), 0 );
+    info->Set( vtkAlgorithm::INPUT_IS_OPTIONAL(),   0 );
+    }
+  else
+    {
+    info->Set( vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkImageData" );
+    info->Set( vtkAlgorithm::INPUT_IS_REPEATABLE(), 0 );
+    info->Set( vtkAlgorithm::INPUT_IS_OPTIONAL(),   1 );
+    }
+
+  return 1;
+}
+
+// ----------------------------------------------------------------------------
+// Description:
+// Fill the output port information objects for this algorithm.
+// This is invoked by the first call to GetOutputPortInformation for
+// each port so subclasses can specify what they can handle.
+// Redefined from the superclass.
+int vtkStructuredGridLIC2D::FillOutputPortInformation
+  ( int port, vtkInformation * info )
+{
+  if ( port == 0 )
+    {
+    // input+texcoords
+    info->Set( vtkDataObject::DATA_TYPE_NAME(), "vtkStructuredGrid" );
+    }
+  else
+    {
+    // LIC texture
+    info->Set( vtkDataObject::DATA_TYPE_NAME(), "vtkImageData" );
+    }
+
+  return 1;
+}
+//----------------------------------------------------------------------------
+// We need to report output extent after taking into consideration the
+// magnification.
+int vtkStructuredGridLIC2D::RequestInformation(
+  vtkInformation        * vtkNotUsed(request),
+  vtkInformationVector ** inputVector,
+  vtkInformationVector  * outputVector )
+{
+  int ext[6];
+  double spacing[3];
+
+  vtkInformation * inInfo  = inputVector[0]->GetInformationObject( 0 );
+  vtkInformation * outInfo = outputVector->GetInformationObject( 1 );
+
+  inInfo->Get( vtkStreamingDemandDrivenPipeline::WHOLE_EXTENT(), ext );
+
+  spacing[0] = 1.0;
+  spacing[1] = 1.0;
+  spacing[2] = 1.0;
+
+  for (int axis = 0; axis < 3; axis++)
+    {
+    int wholeMin = ext[axis*2];
+    int wholeMax = ext[axis*2+1];
+    int dimension = wholeMax - wholeMin + 1;
+
+    // Scale the output extent
+    wholeMin = static_cast<int>(  ceil( static_cast<double>
+                                        ( wholeMin * this->Magnification )
+                                      )
+                               );
+    wholeMax = ( dimension != 1 )
+               ? wholeMin + static_cast<int>
+                 (   floor(  static_cast<double>
+                             ( dimension * this->Magnification )
+                          )
+                 ) -1
+               : wholeMin;
+
+    ext[ axis * 2     ] = wholeMin;
+    ext[ axis * 2 + 1 ] = wholeMax;
+    }
+
+  vtkDebugMacro( << "request info whole ext = " << PRINTEXTENT( ext ) << endl );
+
+  outInfo->Set( vtkStreamingDemandDrivenPipeline::WHOLE_EXTENT(), ext, 6 );
+  outInfo->Set( vtkDataObject::SPACING(), spacing, 3 );
+
+  return 1;
+}
+
+//----------------------------------------------------------------------------
+int vtkStructuredGridLIC2D::RequestUpdateExtent (
+  vtkInformation * vtkNotUsed(request),
+  vtkInformationVector **inputVector,
+  vtkInformationVector *outputVector)
+{
+  vtkInformation *inInfo = inputVector[0]->GetInformationObject(0);
+  vtkInformation *outInfo = outputVector->GetInformationObject(1);
+
+
+  // Tell the vector field input the extents that we need from it.
+  // The downstream request needs to be downsized based on the Magnification.
+  int ext[6];
+  outInfo->Get(vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT(), ext);
+
+  vtkDebugMacro( << "request update extent, update ext = "
+                 << PRINTEXTENT( ext ) << endl );
+
+  for (int axis = 0; axis < 3; axis++)
+    {
+    int wholeMin = ext[axis*2];
+    int wholeMax = ext[axis*2+1];
+    int dimension = wholeMax - wholeMin + 1;
+
+    // Scale the output extent
+    wholeMin = static_cast<int>(ceil(static_cast<double>(wholeMin / this->Magnification)));
+    wholeMax = dimension != 1? wholeMin + static_cast<int>(floor(static_cast<double>(dimension / this->Magnification))) -1:
+
+    ext[axis*2] = wholeMin;
+    ext[axis*2+1] = wholeMax;
+    }
+  vtkDebugMacro( << "UPDATE_EXTENT: " << ext[0] << ", " << ext[1] << ", "
+                 << ext[2] << ", "    << ext[3] << ", " << ext[4] << ", "
+                 << ext[5] << endl );
+  inInfo->Set(vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT(), ext, 6);
+
+  vtkDebugMacro( << "request update extent, update ext2 = "
+                 << PRINTEXTENT( ext ) << endl );
+
+
+  if(inputVector[1]!=0 && inputVector[1]->GetInformationObject(0) != NULL) // optional input
+    {
+    inInfo = inputVector[1]->GetInformationObject(0);
+    // always request the whole extent
+    inInfo->Set(vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT(),
+                inInfo->Get(vtkStreamingDemandDrivenPipeline::WHOLE_EXTENT()),
+                6);
+    }
+
+  return 1;
+}
+
+//----------------------------------------------------------------------------
+// Stolen from vtkImageAlgorithm. Should be in vtkStructuredGridAlgorithm.
+void vtkStructuredGridLIC2D::AllocateOutputData(vtkDataObject *output,
+                                                vtkInformation *outInfo)
+{
+  // set the extent to be the update extent
+  vtkStructuredGrid *out = vtkStructuredGrid::SafeDownCast(output);
+  if (out)
+    {
+    out->SetExtent(outInfo->Get(vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT()));
+    }
+  else
+    {
+    vtkImageData *out2 = vtkImageData::SafeDownCast(output);
+    if (out2)
+      {
+      out2->SetExtent(outInfo->Get(vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT()));
+      }
+    }
+}
+
+//----------------------------------------------------------------------------
+// Stolen from vtkImageData. Should be in vtkStructuredGrid.
+void vtkStructuredGridLIC2D::AllocateScalars(vtkStructuredGrid *sg,
+                                             vtkInformation *outInfo)
+{
+  int newType = VTK_DOUBLE;
+  int newNumComp = 1;
+
+  vtkInformation *scalarInfo = vtkDataObject::GetActiveFieldInformation(
+    outInfo,
+    vtkDataObject::FIELD_ASSOCIATION_POINTS, vtkDataSetAttributes::SCALARS);
+  if (scalarInfo)
+    {
+    newType = scalarInfo->Get( vtkDataObject::FIELD_ARRAY_TYPE() );
+    if ( scalarInfo->Has(vtkDataObject::FIELD_NUMBER_OF_COMPONENTS()) )
+      {
+      newNumComp = scalarInfo->Get( vtkDataObject::FIELD_NUMBER_OF_COMPONENTS() );
+      }
+    }
+
+  vtkDataArray *scalars;
+
+  // if the scalar type has not been set then we have a problem
+  if (newType == VTK_VOID)
+    {
+    vtkErrorMacro("Attempt to allocate scalars before scalar type was set!.");
+    return;
+    }
+
+  const int* extent = sg->GetExtent();
+  // Use vtkIdType to avoid overflow on large images
+  vtkIdType dims[3];
+  dims[0] = extent[1] - extent[0] + 1;
+  dims[1] = extent[3] - extent[2] + 1;
+  dims[2] = extent[5] - extent[4] + 1;
+  vtkIdType imageSize = dims[0]*dims[1]*dims[2];
+
+  // if we currently have scalars then just adjust the size
+  scalars = sg->GetPointData()->GetScalars();
+  if (scalars && scalars->GetDataType() == newType
+      && scalars->GetReferenceCount() == 1)
+    {
+    scalars->SetNumberOfComponents(newNumComp);
+    scalars->SetNumberOfTuples(imageSize);
+    // Since the execute method will be modifying the scalars
+    // directly.
+    scalars->Modified();
+    return;
+    }
+
+  // allocate the new scalars
+  scalars = vtkDataArray::CreateDataArray(newType);
+  scalars->SetNumberOfComponents(newNumComp);
+
+  // allocate enough memory
+  scalars->SetNumberOfTuples(imageSize);
+
+  sg->GetPointData()->SetScalars(scalars);
+  scalars->Delete();
+}
+
+
+//----------------------------------------------------------------------------
+int vtkStructuredGridLIC2D::RequestData(
+  vtkInformation        * vtkNotUsed(request),
+  vtkInformationVector ** inputVector,
+  vtkInformationVector  * outputVector )
+{
+  // 3 passes:
+  // pass 1: render to compute the transformed vector field for the points.
+  // pass 2: perform LIC with the new vector field. This has to happen in a
+  // different pass than computation of the transformed vector.
+  // pass 3: Render structured slice quads with correct texture correct
+  // tcoords and apply the LIC texture to it.
+
+  vtkInformation    * inInfo = inputVector[0]->GetInformationObject(0);
+  vtkStructuredGrid * input  = vtkStructuredGrid::SafeDownCast
+                   (  inInfo->Get( vtkDataObject::DATA_OBJECT() )  );
+
+  int inputRequestedExtent[6];
+  inInfo->Get(
+        vtkStreamingDemandDrivenPipeline::UPDATE_EXTENT(),
+        inputRequestedExtent);
+
+  // Check if the input image is a 2D image (not 0D, not 1D, not 3D)
+  int dims[3];
+  vtkStructuredExtent::GetDimensions( inputRequestedExtent, dims );
+
+  vtkDebugMacro( << "dims = " << dims[0] << " "
+                 << dims[1] << " " << dims[2] << endl );
+  vtkDebugMacro( << "requested ext = " << inputRequestedExtent[0] << " "
+                 << inputRequestedExtent[1] << " " << inputRequestedExtent[2]
+                 << " " << inputRequestedExtent[3] << " "
+                 << inputRequestedExtent[4] << " "
+                 << inputRequestedExtent[5] << endl );
+
+  if(   !( (dims[0] == 1) && (dims[1] > 1) && (dims[2] > 1) )
+     && !( (dims[1] == 1) && (dims[0] > 1) && (dims[2] > 1) )
+     && !( (dims[2] == 1) && (dims[0] > 1) && (dims[1] > 1) )
+    )
+    {
+    vtkErrorMacro( << "input is not a 2D image." << endl );
+    input  = NULL;
+    inInfo = NULL;
+    return 0;
+    }
+  if( input->GetPointData() == 0 )
+    {
+    vtkErrorMacro( << "input does not have point data." );
+    input  = NULL;
+    inInfo = NULL;
+    return 0;
+    }
+  if( input->GetPointData()->GetVectors() == 0 )
+    {
+    vtkErrorMacro( << "input does not vectors on point data." );
+    input  = NULL;
+    inInfo = NULL;
+    return 0;
+    }
+
+  if ( !this->Context )
+    {
+    vtkRenderWindow * renWin = vtkRenderWindow::New();
+    if (  this->SetContext( renWin ) == 0  )
+      {
+      vtkErrorMacro("Invalid render window");
+      renWin->Delete();
+      renWin = NULL;
+      input  = NULL;
+      inInfo = NULL;
+      return 0;
+      }
+
+    renWin = NULL; // to be released via this->context
+    this->OwnWindow = true;
+    }
+
+  vtkInformation    * outInfo = outputVector->GetInformationObject(0);
+  vtkStructuredGrid * output  = vtkStructuredGrid::SafeDownCast(
+    outInfo->Get(vtkDataObject::DATA_OBJECT()));
+  this->AllocateOutputData(output, outInfo);
+  output->ShallowCopy(input);
+
+  vtkInformation * outInfoTexture = outputVector->GetInformationObject(1);
+  vtkImageData   * outputTexture  = vtkImageData::SafeDownCast(
+    outInfoTexture->Get(vtkDataObject::DATA_OBJECT()));
+  this->AllocateOutputData(outputTexture, outInfoTexture);
+
+  // Noise.
+  vtkInformation *noiseInfo = inputVector[1]->GetInformationObject(0);
+  vtkImageData *noise = 0;
+  if (noiseInfo == 0)
+    {
+    this->NoiseSource->Update();
+    noise = this->NoiseSource->GetOutput();
+    }
+  else
+    {
+    noise = vtkImageData::SafeDownCast(
+      noiseInfo->Get(vtkDataObject::DATA_OBJECT()));
+
+    if(noise->GetPointData()==0)
+      {
+      vtkErrorMacro(<<"provided noise does not have point data.");
+      return 0;
+      }
+    if(noise->GetPointData()->GetScalars()==0)
+      {
+      vtkErrorMacro(<<"provided noise does not have scalars on point data.");
+      return 0;
+      }
+    }
+
+  vtkOpenGLClearErrorMacro();
+
+  int width;
+  int height;
+  int firstComponent;
+  int secondComponent;
+  int slice;
+  if (dims[0] == 1)
+    {
+    vtkDebugMacro( << "x" << endl );
+    firstComponent = 1;
+    secondComponent = 2;
+    slice = 0;
+    }
+  else
+    {
+    if (dims[1] == 1)
+      {
+      vtkDebugMacro( << "y" << endl );
+      firstComponent = 0;
+      secondComponent = 2;
+      slice = 1;
+      }
+    else
+      {
+      vtkDebugMacro( << "z" << endl );
+      firstComponent = 0;
+      secondComponent = 1;
+      slice = 2;
+      }
+    }
+
+  width = dims[firstComponent];
+  height = dims[secondComponent];
+
+  vtkDebugMacro( << "w = " << width << " h = " << height << endl );
+
+  vtkDataTransferHelper * vectorFieldBus = vtkDataTransferHelper::New();
+  vectorFieldBus->SetContext(this->Context);
+  vectorFieldBus->SetCPUExtent(inputRequestedExtent); // input->GetExtent());
+  vectorFieldBus->SetGPUExtent(inputRequestedExtent); // input->GetExtent());
+  //  vectorFieldBus->SetTextureExtent(input->GetExtent());
+  vectorFieldBus->SetArray(input->GetPointData()->GetVectors());
+
+  vtkDataTransferHelper * pointBus = vtkDataTransferHelper::New();
+  pointBus->SetContext(this->Context);
+  pointBus->SetCPUExtent(inputRequestedExtent); // input->GetExtent());
+  pointBus->SetGPUExtent(inputRequestedExtent); // input->GetExtent());
+  //  pointBus->SetTextureExtent(input->GetExtent());
+  pointBus->SetArray(input->GetPoints()->GetData());
+
+  vtkOpenGLExtensionManager * mgr = vtkOpenGLExtensionManager::New();
+  mgr->SetRenderWindow(this->Context);
+
+  // Vector field in image space.
+  int magWidth = this->Magnification*width;
+  int magHeight = this->Magnification*height;
+
+  vtkTextureObject * vector2 = vtkTextureObject::New();
+  vector2->SetContext(this->Context);
+  vector2->Create2D(magWidth,magHeight,3,VTK_FLOAT,false);
+
+  vtkDebugMacro( << "Vector field in image space (target) textureId = "
+                 << vector2->GetHandle() << endl );
+
+  vtkFrameBufferObject *fbo = vtkFrameBufferObject::New();
+  fbo->SetContext(this->Context);
+  fbo->SetColorBuffer(0,vector2);
+  fbo->SetNumberOfRenderTargets(1);
+  fbo->SetActiveBuffer(0);
+
+  // TODO --
+  // step size is incorrect here
+  // guard pixels are needed for parallel operations
+
+  if (  !fbo->Start( magWidth, magHeight, false )  )
+    {
+    mgr->Delete();
+    fbo->Delete();
+    vector2->Delete();
+    pointBus->Delete();
+    vectorFieldBus->Delete();
+
+    mgr = NULL;
+    fbo = NULL;
+    vector2  = NULL;
+    pointBus = NULL;
+    vectorFieldBus   = NULL;
+
+    noise   = NULL;
+    input   = NULL;
+    inInfo  = NULL;
+    output  = NULL;
+    outInfo = NULL;
+    noiseInfo = NULL;
+    outputTexture  = NULL;
+    outInfoTexture = NULL;
+
+    this->FBOSuccess = 0;
+    return 0;
+    }
+  this->FBOSuccess = 1;
+
+  vtkShaderProgram2 *pgm = vtkShaderProgram2::New();
+  pgm->SetContext(static_cast<vtkOpenGLRenderWindow *>(this->Context.GetPointer()));
+
+  vtkShader2 *shader = vtkShader2::New();
+  shader->SetType(VTK_SHADER_TYPE_FRAGMENT);
+  shader->SetSourceCode(vtkStructuredGridLIC2D_fs);
+  shader->SetContext(pgm->GetContext());
+  pgm->GetShaders()->AddItem(shader);
+  shader->Delete();
+  shader = NULL;
+
+  pgm->Build();
+  if(pgm->GetLastBuildStatus()!=VTK_SHADER_PROGRAM2_LINK_SUCCEEDED)
+    {
+    vtkErrorMacro("shader build error.");
+    return 0;
+    }
+
+  int value = 0;
+  pgm->GetUniformVariables()->SetUniformi("texPoints",1,&value);
+  value = 1;
+  pgm->GetUniformVariables()->SetUniformi("texVectorField",1,&value);
+  float fvalues[3];
+  fvalues[0] = static_cast<float>(dims[0]);
+  fvalues[1] = static_cast<float>(dims[1]);
+  fvalues[2] = static_cast<float>(dims[2]);
+  pgm->GetUniformVariables()->SetUniformf("uDimensions",3,fvalues);
+  value = slice;
+  pgm->GetUniformVariables()->SetUniformi("uSlice",1,&slice);
+
+  vtkgl::ActiveTexture(vtkgl::TEXTURE0);
+  pointBus->Upload(0,0);
+  vtkTextureObject *points = pointBus->GetTexture();
+  points->Bind();
+  glTexParameteri(points->GetTarget(),GL_TEXTURE_WRAP_S, GL_CLAMP);
+  glTexParameteri(points->GetTarget(),GL_TEXTURE_WRAP_T, GL_CLAMP);
+  glTexParameteri(points->GetTarget(), vtkgl::TEXTURE_WRAP_R, GL_CLAMP);
+  glTexParameteri(points->GetTarget(), GL_TEXTURE_MIN_FILTER, GL_NEAREST);
+  glTexParameteri(points->GetTarget(), GL_TEXTURE_MAG_FILTER, GL_NEAREST);
+
+  vtkDebugMacro( << "points on texture unit 0, textureId == "
+                 << points->GetHandle() << endl );
+
+  vtkgl::ActiveTexture(vtkgl::TEXTURE1);
+  vectorFieldBus->Upload(0,0);
+  vtkTextureObject *vectorField = vectorFieldBus->GetTexture();
+  vectorField->Bind();
+  glTexParameteri(vectorField->GetTarget(),GL_TEXTURE_WRAP_S, GL_CLAMP);
+  glTexParameteri(vectorField->GetTarget(),GL_TEXTURE_WRAP_T, GL_CLAMP);
+  glTexParameteri(vectorField->GetTarget(), vtkgl::TEXTURE_WRAP_R, GL_CLAMP);
+  glTexParameteri(vectorField->GetTarget(), GL_TEXTURE_MIN_FILTER, GL_NEAREST);
+  glTexParameteri(vectorField->GetTarget(), GL_TEXTURE_MAG_FILTER, GL_NEAREST);
+
+  vtkDebugMacro( << "vector field on texture unit 1, textureId == "
+                 << vectorField->GetHandle() << endl );
+
+  pgm->Use();
+  if(!pgm->IsValid())
+    {
+    vtkErrorMacro(<<" validation of the program failed: "<<pgm->GetLastValidateLog());
+    }
+  vtkOpenGLCheckErrorMacro("failed during config");
+
+  vtkDebugMacro( << "glFinish before rendering quad" << endl );
+
+  fbo->RenderQuad(0, magWidth-1,0, magHeight-1);
+  vtkOpenGLCheckErrorMacro("StructuredGridLIC2D projection fialed");
+
+  vtkDebugMacro( << "glFinish after rendering quad" << endl );
+
+  pgm->Restore();
+
+  vtkLineIntegralConvolution2D *internal = vtkLineIntegralConvolution2D::New();
+  if (  !internal->IsSupported( this->Context )  )
+    {
+    pgm->ReleaseGraphicsResources();
+
+    pgm->Delete();
+    mgr->Delete();
+    fbo->Delete();
+    vector2->Delete();
+    internal->Delete();
+    pointBus->Delete();
+    vectorFieldBus->Delete();
+
+    pgm = NULL;
+    mgr = NULL;
+    fbo = NULL;
+    vector2  = NULL;
+    internal = NULL;
+    pointBus = NULL;
+    vectorFieldBus = NULL;
+
+    noise   = NULL;
+    input   = NULL;
+    inInfo  = NULL;
+    points  = NULL;
+    output  = NULL;
+    outInfo = NULL;
+    noiseInfo   = NULL;
+    vectorField = NULL;
+    outputTexture  = NULL;
+    outInfoTexture = NULL;
+
+    this->LICSuccess = 0;
+    return 0;
+    }
+
+  internal->SetContext(this->Context);
+  internal->SetNumberOfSteps(this->Steps);
+  internal->SetStepSize(this->StepSize);
+  internal->SetComponentIds(firstComponent,secondComponent);
+
+  vtkDataTransferHelper *noiseBus = vtkDataTransferHelper::New();
+  noiseBus->SetContext(this->Context);
+  noiseBus->SetCPUExtent(noise->GetExtent());
+  noiseBus->SetGPUExtent(noise->GetExtent());
+  //  noiseBus->SetTextureExtent(noise->GetExtent());
+  noiseBus->SetArray(noise->GetPointData()->GetScalars());
+  noiseBus->Upload(0,0);
+
+  vtkTextureObject *licTex = internal->Execute(vector2, noiseBus->GetTexture());
+  if (licTex == NULL)
+    {
+    pgm->ReleaseGraphicsResources();
+
+    pgm->Delete();
+    mgr->Delete();
+    fbo->Delete();
+    vector2->Delete();
+    internal->Delete();
+    pointBus->Delete();
+    noiseBus->Delete();
+    vectorFieldBus->Delete();
+
+    pgm = NULL;
+    mgr = NULL;
+    fbo = NULL;
+    vector2 = NULL;
+    internal = NULL;
+    pointBus = NULL;
+    noiseBus = NULL;
+    vectorFieldBus = NULL;
+
+    noise = NULL;
+    input = NULL;
+    inInfo = NULL;
+    points = NULL;
+    output = NULL;
+    outInfo = NULL;
+    noiseInfo = NULL;
+    vectorField = NULL;
+    outputTexture = NULL;
+    outInfoTexture = NULL;
+
+    this->LICSuccess = 0;
+    return 0;
+    }
+  this->LICSuccess = 1;
+
+  // transfer lic from texture to vtk array
+  vtkPixelExtent magLicExtent(magWidth, magHeight);
+  vtkIdType nOutTups = magLicExtent.Size();
+
+  vtkFloatArray *licOut = vtkFloatArray::New();
+  licOut->SetNumberOfComponents(3);
+  licOut->SetNumberOfTuples(nOutTups);
+  licOut->SetName("LIC");
+
+  vtkPixelBufferObject *licPBO = licTex->Download();
+
+  vtkPixelTransfer::Blit<float,float>(
+        magLicExtent,
+        magLicExtent,
+        magLicExtent,
+        magLicExtent,
+        4,
+        (float*)licPBO->MapPackedBuffer(),
+        3,
+        licOut->GetPointer(0));
+
+  licPBO->UnmapPackedBuffer();
+  licPBO->Delete();
+  licTex->Delete();
+
+  // mask and convert to gray scale 3 components
+  float *pLicOut = licOut->GetPointer(0);
+  for (vtkIdType i=0; i<nOutTups; ++i)
+    {
+    float lic = pLicOut[3*i];
+    float mask = pLicOut[3*i+1];
+    if ( mask )
+      {
+      pLicOut[3*i+1] = pLicOut[3*i+2] = pLicOut[3*i] = 0.0f;
+      }
+    else
+      {
+      pLicOut[3*i+1] = pLicOut[3*i+2] = lic;
+      }
+    }
+
+  outputTexture->GetPointData()->SetScalars(licOut);
+  licOut->Delete();
+
+  // Pass three. Generate texture coordinates. Software.
+  vtkFloatArray *tcoords = vtkFloatArray::New();
+  tcoords->SetNumberOfComponents(2);
+  tcoords->SetNumberOfTuples(dims[0]*dims[1]*dims[2]);
+  output->GetPointData()->SetTCoords(tcoords);
+  tcoords->Delete();
+
+  double ddim[3];
+  ddim[0] = static_cast<double>(dims[0]-1);
+  ddim[1] = static_cast<double>(dims[1]-1);
+  ddim[2] = static_cast<double>(dims[2]-1);
+
+  int tz = 0;
+  while(tz < dims[slice])
+    {
+    int ty = 0;
+    while(ty < dims[secondComponent])
+      {
+      int tx = 0;
+      while(tx < dims[firstComponent])
+        {
+        tcoords->SetTuple2(
+              (tz*dims[secondComponent]+ty)*dims[firstComponent]+tx,
+              tx/ddim[firstComponent],
+              ty/ddim[secondComponent]);
+        ++tx;
+        }
+      ++ty;
+      }
+    ++tz;
+    }
+
+  internal->Delete();
+  noiseBus->Delete();
+  vectorFieldBus->Delete();
+  pointBus->Delete();
+  mgr->Delete();
+  vector2->Delete();
+  fbo->Delete();
+  pgm->ReleaseGraphicsResources();
+  pgm->Delete();
+
+  // Make sure the active texture is back to texture0 for the part of the
+  // pipeline using OpenGL 1.1 (texture on actor)
+  vtkgl::ActiveTexture(vtkgl::TEXTURE0);
+
+  vtkOpenGLCheckErrorMacro("failed after RequestData");
+
+  return 1;
+}
+
+//----------------------------------------------------------------------------
+void vtkStructuredGridLIC2D::PrintSelf( ostream & os, vtkIndent indent )
+{
+  this->Superclass::PrintSelf( os, indent );
+
+  os << indent << "Steps: " << this->Steps << "\n";
+  os << indent << "StepSize: " << this->StepSize << "\n";
+  os << indent << "FBOSuccess: " << this->FBOSuccess << "\n";
+  os << indent << "LICSuccess: " << this->LICSuccess << "\n";
+  os << indent << "Magnification: " << this->Magnification << "\n";
+  os << indent << "OpenGLExtensionsSupported: "
+               << this->OpenGLExtensionsSupported << "\n";
+}
diff --git a/Rendering/LIC/vtkStructuredGridLIC2D.h b/Rendering/LIC/vtkStructuredGridLIC2D.h
new file mode 100644
index 0000000..912bdd6
--- /dev/null
+++ b/Rendering/LIC/vtkStructuredGridLIC2D.h
@@ -0,0 +1,166 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkStructuredGridLIC2D.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkStructuredGridLIC2D
+// .SECTION Description
+// GPU implementation of a Line Integral Convolution, a technique for
+// imaging  vector fields.
+//
+// The input on port 0 is an 2D vtkStructuredGrid. It needs
+// a vector field on point data.
+// Port 1 is a special port for customized noise input. It is an optional port.
+// If not present, noise is generated by the filter. Even if none-power-of-two
+// texture are supported, giving a power-of-two image may result in faster
+// execution on the GPU.
+//
+// Please refer to Forssell, L. K., “Visualizing flow over curvilinear grid
+// surfaces using line integral convolution”, Visualization 94 Conference
+// Proceedings, pages 240-247, IEEE Computer Society, 1994 for details of the
+// algorithm.
+//
+// .SECTION Required OpenGL Extensions
+// GL_ARB_texture_non_power_of_two
+// GL_VERSION_2_0
+// GL_ARB_texture_float
+// GL_ARB_draw_buffers
+// GL_EXT_framebuffer_object
+// GL_ARB_pixel_buffer_object
+#ifndef __vtkStructuredGridLIC2D_h
+#define __vtkStructuredGridLIC2D_h
+
+#include "vtkRenderingLICModule.h" // For export macro
+#include "vtkStructuredGridAlgorithm.h"
+#include "vtkWeakPointer.h" // needed for vtkWeakPointer.
+
+class vtkRenderWindow;
+class vtkImageNoiseSource;
+
+class VTKRENDERINGLIC_EXPORT vtkStructuredGridLIC2D
+  : public vtkStructuredGridAlgorithm
+{
+public:
+  static vtkStructuredGridLIC2D* New();
+  vtkTypeMacro(vtkStructuredGridLIC2D, vtkStructuredGridAlgorithm);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Get/Set the context. Context must be a vtkOpenGLRenderWindow.
+  // This does not increase the reference count of the
+  // context to avoid reference loops.
+  // SetContext() may raise an error is the OpenGL context does not support the
+  // required OpenGL extensions. Return 0 upon failure and 1 upon success.
+  int SetContext( vtkRenderWindow * context );
+  vtkRenderWindow * GetContext();
+
+  // Description:
+  // Number of steps. Initial value is 1.
+  // class invariant: Steps>0.
+  // In term of visual quality, the greater the better.
+  vtkSetMacro(Steps,int);
+  vtkGetMacro(Steps,int);
+
+  // Description:
+  // Step size.
+  // WE ARE NOT SURE YET about the space where we define the step.
+  // If the image data has different spacing in each dimension, it
+  // is an issue.
+  // Initial value is 1.0.
+  // class invariant: StepSize>0.0.
+  // In term of visual quality, the smaller the better.
+  // The type for the interface is double as VTK interface is double
+  // but GPU only supports float. This value will be converted to
+  // float in the execution of the algorithm.
+  vtkSetMacro(StepSize,double);
+  vtkGetMacro(StepSize,double);
+
+  // Description:
+  // The the magnification factor. Default is 1
+  vtkSetClampMacro(Magnification, int, 1, VTK_INT_MAX);
+  vtkGetMacro(Magnification, int);
+
+  // Description:
+  // Check if the required OpenGL extensions / GPU are supported.
+  vtkGetMacro( OpenGLExtensionsSupported, int );
+
+  // Description:
+  // Check if FBO is started properly.
+  int   GetFBOSuccess() { return this->FBOSuccess; }
+
+  // Description:
+  // Check if LIC runs properly.
+  int   GetLICSuccess() { return this->LICSuccess; }
+
+//BTX
+protected:
+  vtkStructuredGridLIC2D();
+  ~vtkStructuredGridLIC2D();
+
+  // Description:
+  // Fill the input port information objects for this algorithm.  This
+  // is invoked by the first call to GetInputPortInformation for each
+  // port so subclasses can specify what they can handle.
+  // Redefined from the superclass.
+  virtual int FillInputPortInformation(int port,
+                                       vtkInformation *info);
+
+  // Description:
+  // Fill the output port information objects for this algorithm.
+  // This is invoked by the first call to GetOutputPortInformation for
+  // each port so subclasses can specify what they can handle.
+  // Redefined from the superclass.
+  virtual int FillOutputPortInformation(int port,
+                                        vtkInformation *info);
+
+  virtual int RequestInformation(vtkInformation *request,
+         vtkInformationVector **inputVector,
+         vtkInformationVector *outputVector);
+
+  int RequestUpdateExtent (vtkInformation *request,
+                           vtkInformationVector **inputVector,
+                           vtkInformationVector *outputVector);
+
+  // Description:
+  // Stolen from vtkImageAlgorithm. Should be in vtkStructuredGridAlgorithm.
+  void AllocateOutputData(vtkDataObject *output,
+                          vtkInformation *outInfo);
+
+  // Description:
+  // Stolen from vtkImageData. Should be in vtkStructuredGrid.
+  void AllocateScalars(vtkStructuredGrid *sg, vtkInformation *outInfo);
+
+  // Description:
+  // This is called by the superclass.
+  // This is the method you should override.
+  virtual int RequestData(vtkInformation *request,
+                          vtkInformationVector **inputVector,
+                          vtkInformationVector *outputVector);
+
+  int    Steps;
+  double StepSize;
+  int    Magnification;
+  int    OpenGLExtensionsSupported;
+  vtkWeakPointer<vtkRenderWindow> Context;
+
+  vtkImageNoiseSource* NoiseSource;
+  bool   OwnWindow;
+  int    FBOSuccess;
+  int    LICSuccess;
+
+private:
+  vtkStructuredGridLIC2D(const vtkStructuredGridLIC2D&); // Not implemented.
+  void operator=(const vtkStructuredGridLIC2D&); // Not implemented.
+//ETX
+};
+
+#endif
diff --git a/Rendering/LIC/vtkStructuredGridLIC2D_fs.glsl b/Rendering/LIC/vtkStructuredGridLIC2D_fs.glsl
new file mode 100644
index 0000000..cfdc4a4
--- /dev/null
+++ b/Rendering/LIC/vtkStructuredGridLIC2D_fs.glsl
@@ -0,0 +1,158 @@
+//=========================================================================
+//
+//  Program:   Visualization Toolkit
+//  Module:    vtkStructuredGridLIC2D_fs.glsl
+//
+//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+//  All rights reserved.
+//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+//
+//     This software is distributed WITHOUT ANY WARRANTY; without even
+//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//     PURPOSE.  See the above copyright notice for more information.
+//
+//=========================================================================
+
+// Filename: vtkStructuredGridLIC2D_fs.glsl
+// Filename is useful when using gldb-gui
+
+#version 120 // because of transpose()
+
+/*
+For an input structure grid, this computes the inverse jacobian for each point.
+*/
+
+uniform sampler2D texPoints;  // point coordinates
+uniform sampler2D texVectorField; // vector field.
+uniform vec3 uDimensions;     // structured dimensions; initially == (width, height, 1)
+
+uniform int uSlice; // 0,1,2
+
+ivec3 getIJK(vec3 ninjnk, vec3 dims)
+{
+  return ivec3(floor(ninjnk * (dims - 1.0) + vec3(0.5, 0.5, 0.5)));
+}
+
+vec3 getVector(ivec3 ijk, vec3 dims, sampler2D field)
+{
+  // ignoring k component for now since dims =  (width, height, 1)
+  // not any more.
+  vec3 rcoord = vec3(ijk) / max(vec3(1.0), dims - 1.0);
+  vec2 tcoord;
+
+  if(uSlice == 0)
+   {
+    tcoord.xy = rcoord.yz;
+   }
+  else
+  {
+   if(uSlice == 1)
+    {
+     tcoord.xy = rcoord.xz;
+    }
+    else
+    {
+     tcoord.xy = rcoord.xy;
+    }
+  }
+
+  return texture2D(field, tcoord).xyz;
+}
+
+float vtkDeterminant(mat3 m)
+{
+  // develop determinant along first row.
+  return m[0][0]*(m[2][2]*m[1][1] - m[2][1]*m[1][2])
+       - m[1][0]*(m[2][2]*m[0][1] - m[2][1]*m[0][2])
+       + m[2][0]*(m[1][2]*m[0][1] - m[1][1]*m[0][2]);
+}
+
+mat3 vtkInverse(mat3 mm, float det)
+{
+  mat3 m=transpose(mm);
+
+  mat3 adjM = mat3(
+    m[2][2]*m[1][1]-m[2][1]*m[1][2], -(m[2][2]*m[0][1]-m[2][1]*m[0][2]),  m[1][2]*m[0][1]-m[1][1]*m[0][2],
+  -(m[2][2]*m[1][0]-m[2][0]*m[1][2]),  m[2][2]*m[0][0]-m[2][0]*m[0][2], -(m[1][2]*m[0][0]-m[1][0]*m[0][2]),
+    m[2][1]*m[1][0]-m[2][0]*m[1][1], -(m[2][1]*m[0][0]-m[2][0]*m[0][1]),  m[1][1]*m[0][0]-m[1][0]*m[0][1]
+  );
+
+  return adjM / det;
+}
+
+mat3 vtkJacobian(ivec3 ijk, vec3 dims, sampler2D tex)
+{
+  // Jacobian is estimated with a central finite difference technique.
+  /*
+  | dx/di, dx/dj, dx/dk |
+  | dy/di, dy/dj, dy/dk |
+  | dz/di, dz/dj, dz/dk |
+  */
+
+  //(i-1, j, k), (i+1, j, k)
+  vec3 pts_IM1_J_K = getVector(ivec3(ijk.x-1, ijk.yz), dims, tex);
+  vec3 pts_I1_J_K = getVector(ivec3(ijk.x+1, ijk.yz), dims, tex);
+
+  //   (i, j-1, k), (i, j+1, k)
+  vec3 pts_I_JM1_K = getVector(ivec3(ijk.x, ijk.y-1, ijk.z), dims, tex);
+  vec3 pts_I_J1_K = getVector(ivec3(ijk.x, ijk.y+1, ijk.z), dims, tex);
+
+  // (i, j, k-1), (i, j, k+1).
+  vec3 pts_I_J_KM1 = getVector(ivec3(ijk.xy, ijk.z-1), dims, tex);
+  vec3 pts_I_J_K1 = getVector(ivec3(ijk.xy, ijk.z+1), dims, tex);
+
+  vec3 col1 = 0.5 * (pts_I1_J_K - pts_IM1_J_K);
+  vec3 col2 = 0.5 * (pts_I_J1_K - pts_I_JM1_K);
+  vec3 col3 = 0.5 * (pts_I_J_K1 - pts_I_J_KM1);
+
+ if(uSlice == 0)
+  {
+    col1[0] = 1.0;
+  }
+ else
+  {
+     if(uSlice == 1)
+      {
+      col2[1] = 1.0;
+      }
+     else
+      {
+      col3[2] = 1.0;
+      }
+  }
+
+  mat3 J = mat3(col1, col2, col3);
+  return J;
+}
+
+void main(void)
+{
+  // determine the structured coordinate for the current location.
+  vec3 tcoord;
+  if (uSlice == 0)
+  {
+   tcoord = vec3(0, gl_TexCoord[0].st);
+  }
+  else
+  {
+   if (uSlice == 1)
+    {
+     tcoord = vec3(gl_TexCoord[0].s, 0, gl_TexCoord[0].t);
+    }
+   else
+    {
+     tcoord = vec3(gl_TexCoord[0].st, 0);
+    }
+  }
+
+  ivec3 ijk = getIJK(tcoord, uDimensions);
+
+  // compute partial derivative for X.
+  mat3 J = vtkJacobian(ijk, uDimensions, texPoints);
+
+  // compute inverse of J.
+  vec3 vector = getVector(ijk, uDimensions, texVectorField);
+  float detJ = vtkDeterminant(J);
+  mat3 invJ = vtkInverse(J, detJ);
+  gl_FragData[0] = vec4(invJ * vector, 1.0);
+}
diff --git a/Rendering/LIC/vtkSurfaceLICComposite.cxx b/Rendering/LIC/vtkSurfaceLICComposite.cxx
new file mode 100644
index 0000000..0764ebf
--- /dev/null
+++ b/Rendering/LIC/vtkSurfaceLICComposite.cxx
@@ -0,0 +1,425 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSurfaceLICComposite.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkSurfaceLICComposite.h"
+
+#include "vtkObjectFactory.h"
+#include "vtkPixelExtent.h"
+#include "vtkPixelExtentIO.h"
+
+#include <algorithm>
+
+using std::deque;
+using std::vector;
+
+// Enable debug output
+// 0 -- off
+// 1 -- dump extents
+// 2 -- all
+#define vtkSurfaceLICCompositeDEBUG 0
+
+//-----------------------------------------------------------------------------
+vtkObjectFactoryNewMacro(vtkSurfaceLICComposite);
+
+// ----------------------------------------------------------------------------
+vtkSurfaceLICComposite::vtkSurfaceLICComposite()
+        :
+     Pass(0),
+     WindowExt(),
+     BlockExts(),
+     CompositeExt(),
+     GuardExt(),
+     DisjointGuardExt(),
+     Strategy(COMPOSITE_AUTO),
+     StepSize(0),
+     NumberOfSteps(0),
+     NormalizeVectors(1),
+     NumberOfGuardLevels(1)
+{}
+
+// ----------------------------------------------------------------------------
+vtkSurfaceLICComposite::~vtkSurfaceLICComposite()
+{}
+
+// ----------------------------------------------------------------------------
+void vtkSurfaceLICComposite::Initialize(
+        const vtkPixelExtent &winExt,
+        const deque<vtkPixelExtent> &blockExts,
+        int strategy,
+        double stepSize,
+        int nSteps,
+        int normalizeVectors,
+        int enhancedLIC,
+        int antialias)
+{
+  this->Pass = 0;
+  this->WindowExt = winExt;
+  this->BlockExts = blockExts;
+  this->CompositeExt.clear();
+  this->GuardExt.clear();
+  this->DisjointGuardExt.clear();
+  this->Strategy = strategy;
+  this->StepSize = stepSize;
+  this->NumberOfSteps = nSteps;
+  this->NormalizeVectors = normalizeVectors;
+  // TODO -- FIXME
+  // type of NumberOfGuardLevels should be float. The change is
+  // fairly involved and needs to be thoroughly tested. Note too
+  // few gaurd pixels and you get an incorrect result, too many
+  // and you destroy performance and scaling. while getting this
+  // right the following will quiets dashboard warnings and keeps
+  // the existing well tested behavior.
+  this->NumberOfGuardLevels = 1;
+  //this->NumberOfGuardLevels = enhancedLIC ? 1.5 : 1;
+  this->NumberOfEEGuardPixels = enhancedLIC ? 1 : 0;
+  this->NumberOfAAGuardPixels = 2*antialias;
+}
+
+// ----------------------------------------------------------------------------
+int vtkSurfaceLICComposite::VectorMax(
+    const deque<vtkPixelExtent> &exts,
+    float *vectors,
+    vector<float> &vMax)
+{
+  #if vtkSurfaceLICCompositeDEBUG >= 2
+  cerr << "=====vtkSurfaceLICComposite::VectorMax" << endl;
+  #endif
+
+  // find the max on each extent
+  size_t nBlocks = exts.size();
+  vector<float> tmpMax(nBlocks, 0.0f);
+  for (size_t b=0; b<nBlocks; ++b)
+    {
+    tmpMax[b] = this->VectorMax(exts[b], vectors);
+    }
+
+  // use larger of this extent and its neighbors
+  vMax.resize(nBlocks, 0.0f);
+  for (size_t a=0; a<nBlocks; ++a)
+    {
+    vtkPixelExtent extA = exts[a];
+    extA.Grow(1);
+    for (size_t b=0; b<nBlocks; ++b)
+      {
+      vtkPixelExtent extB = exts[b];
+      extB&=extA;
+
+      // it's a neighbor(or self) use the larger of ours and theirs
+      if (!extB.Empty())
+        {
+        vMax[a] = vMax[a] < tmpMax[b] ? tmpMax[b] : vMax[a];
+        }
+      }
+    }
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+float vtkSurfaceLICComposite::VectorMax(
+    const vtkPixelExtent &ext,
+    float *vectors)
+{
+  #if vtkSurfaceLICCompositeDEBUG >= 2
+  cerr << "=====vtkSurfaceLICComposite::VectorMax" << endl;
+  #endif
+
+  int nx[2];
+  this->WindowExt.Size(nx);
+
+  // find the max over this region
+  // scaling by 1/nx since that's what LIC'er does.
+  float eMax = 0.0;
+  for (int j=ext[2]; j<=ext[3]; ++j)
+    {
+    int idx = 4*(nx[0]*j+ext[0]);
+    for (int i=ext[0]; i<=ext[1]; ++i, idx+=4)
+      {
+      float eMag = 0.0;
+      for (int c=0; c<2; ++c)
+        {
+        float eVec = vectors[idx+c]/static_cast<float>(nx[c]);
+        eMag += eVec*eVec;
+        }
+      eMag = sqrt(eMag);
+      eMax = eMax < eMag ? eMag : eMax;
+      }
+    }
+
+  return eMax;
+}
+
+// ----------------------------------------------------------------------------
+int vtkSurfaceLICComposite::MakeDecompDisjoint(
+     const deque<vtkPixelExtent> &in,
+     deque<vtkPixelExtent> &out,
+     float *vectors)
+{
+  #if vtkSurfaceLICCompositeDEBUG >= 2
+  cerr << "=====vtkSurfaceLICComposite::MakeDecompDisjoint" << endl;
+  #endif
+
+  // serial implementation
+
+  // sort by size
+  deque<vtkPixelExtent> tmpIn(in);
+  sort(tmpIn.begin(), tmpIn.end());
+
+  // from largest to smallest, make it disjoint
+  // to others. This order has the best chance of
+  // leaving each rank with some data.
+  deque<vtkPixelExtent> tmpOut0;
+
+  this->MakeDecompDisjoint(tmpIn, tmpOut0);
+
+  // minimize and remove empty extents.
+  int nx[2];
+  this->WindowExt.Size(nx);
+  while(!tmpOut0.empty())
+    {
+    vtkPixelExtent outExt = tmpOut0.back();
+    tmpOut0.pop_back();
+
+    GetPixelBounds(vectors, nx[0], outExt);
+    if (!outExt.Empty())
+      {
+      out.push_back(outExt);
+      }
+    }
+
+  /*
+  // merge compatible extents
+  vtkPixelExtent::Merge(tmpOut0);
+  */
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+int vtkSurfaceLICComposite::MakeDecompDisjoint(
+     deque<vtkPixelExtent> &in,
+     deque<vtkPixelExtent> &out)
+{
+  while (!in.empty())
+    {
+    // for each element
+    deque<vtkPixelExtent> tmpOut(1, in.back());
+    in.pop_back();
+
+    // subtract other elements
+    // to make it disjoint
+    size_t ns = in.size();
+    for (size_t se=0; se<ns; ++se)
+      {
+      vtkPixelExtent &selem = in[se];
+      deque<vtkPixelExtent> tmpOut2;
+      size_t nl = tmpOut.size();
+      for (size_t le=0; le<nl; ++le)
+        {
+        vtkPixelExtent &lelem = tmpOut[le];
+        vtkPixelExtent::Subtract(lelem, selem, tmpOut2);
+        }
+      tmpOut = tmpOut2;
+      }
+
+    // append new disjoint elements
+    out.insert(out.end(), tmpOut.begin(), tmpOut.end());
+    }
+
+  return 0;
+}
+
+// TODO -- this is needed in part because our step size is incorrect
+// due to anisotropic (in aspect ratio) trasnsform to texture
+// space. see how we transform step size in surface lic painter.
+// also there is bleeding at the edges so you do need a bit extra
+// paddding.
+// ----------------------------------------------------------------------------
+float vtkSurfaceLICComposite::GetFudgeFactor(int nx[2])
+{
+  float aspect = float(nx[0])/float(nx[1]);
+  float fudge
+    = (aspect > 4.0f) ? 3.0f :
+      (aspect > 1.0f) ? (2.0f/3.0f)*aspect + (5.0f/6.0f) :
+      (aspect < 0.25) ? 3.0f :
+      (aspect < 1.0f) ? (-8.0f/3.0f)*aspect + (25.0f/6.0f) :
+      1.5f;
+  return fudge;
+}
+
+// ----------------------------------------------------------------------------
+int vtkSurfaceLICComposite::AddGuardPixels(
+      const deque<vtkPixelExtent> &exts,
+      deque<vtkPixelExtent> &guardExts,
+      deque<vtkPixelExtent> &disjointGuardExts,
+      float *vectors)
+{
+  #if vtkSurfaceLICCompositeDEBUG >= 2
+  cerr << "=====vtkSurfaceLICComposite::AddGuardPixles" << endl;
+  #endif
+
+  int nx[2];
+  this->WindowExt.Size(nx);
+  float fudge = this->GetFudgeFactor(nx);
+  float arc
+    = this->StepSize*this->NumberOfSteps*this->NumberOfGuardLevels*fudge;
+
+  if (this->NormalizeVectors)
+    {
+    // when normalizing velocity is always 1, all extents have the
+    // same number of gaurd cells.
+    int ng
+      = static_cast<int>(arc)
+      + this->NumberOfEEGuardPixels
+      + this->NumberOfAAGuardPixels;
+    ng = ng < 2 ? 2 : ng;
+    //cerr << "ng=" << ng << endl;
+    deque<vtkPixelExtent> tmpExts(exts);
+    size_t nExts = tmpExts.size();
+    // add guard pixels
+    for (size_t b=0; b<nExts; ++b)
+      {
+      tmpExts[b].Grow(ng);
+      tmpExts[b]&=this->DataSetExt;
+      }
+    guardExts=tmpExts;
+    // make sure it's disjoint
+    disjointGuardExts.clear();
+    this->MakeDecompDisjoint(tmpExts, disjointGuardExts);
+    }
+  else
+    {
+    // when not normailzing during integration we need max(V) on the LIC
+    // decomp. Each domain has the potential to require a unique number
+    // of gaurd cells.
+    vector<float> vectorMax;
+    this->VectorMax(
+            exts,
+            vectors,
+            vectorMax);
+    //cerr << "ng=";
+    deque<vtkPixelExtent> tmpExts(exts);
+    size_t nExts = tmpExts.size();
+    // add guard pixels
+    for (size_t b=0; b<nExts; ++b)
+      {
+      int ng
+        = vectorMax[b]*arc
+        + this->NumberOfEEGuardPixels
+        + this->NumberOfAAGuardPixels;
+      ng = ng<2 ? 2 : ng;
+      //cerr << " " << ng;
+      tmpExts[b].Grow(ng);
+      tmpExts[b]&=this->DataSetExt;
+      }
+    guardExts=tmpExts;
+    //cerr << endl;
+    // make sure it's disjoint
+    disjointGuardExts.clear();
+    this->MakeDecompDisjoint(tmpExts, disjointGuardExts);
+    }
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+void vtkSurfaceLICComposite::GetPixelBounds(
+      float *rgba,
+      int ni,
+      vtkPixelExtent &ext)
+{
+  vtkPixelExtent text;
+  for (int j=ext[2]; j<=ext[3]; ++j)
+    {
+    for (int i=ext[0]; i<=ext[1]; ++i)
+      {
+      if (rgba[4*(j*ni+i)+3] > 0.0f)
+        {
+        text[0] = text[0] > i ? i : text[0];
+        text[1] = text[1] < i ? i : text[1];
+        text[2] = text[2] > j ? j : text[2];
+        text[3] = text[3] < j ? j : text[3];
+        }
+      }
+    }
+  ext = text;
+}
+
+// ----------------------------------------------------------------------------
+int vtkSurfaceLICComposite::InitializeCompositeExtents(float *vectors)
+{
+  // determine screen bounds of all blocks
+  size_t nBlocks = this->BlockExts.size();
+  for (size_t b = 0; b<nBlocks; ++b)
+    {
+    this->DataSetExt |= this->BlockExts[b];
+    }
+
+  // Make all of the input block extents disjoint so that
+  // LIC is computed once per pixel.
+  this->MakeDecompDisjoint(this->BlockExts, this->CompositeExt, vectors);
+
+  // add gaurd cells to the new decomp that prevent artifacts
+  this->AddGuardPixels(
+        this->CompositeExt,
+        this->GuardExt,
+        this->DisjointGuardExt,
+        vectors);
+
+  #if vtkSurfaceLICCompositeDEBUG >= 1
+  vtkPixelExtentIO::Write(0, "SerViewExtent.vtk", this->WindowExt);
+  vtkPixelExtentIO::Write(0, "SerGeometryDecomp.vtk", this->BlockExts);
+  vtkPixelExtentIO::Write(0, "SerLICDecomp.vtk", this->CompositeExt);
+  vtkPixelExtentIO::Write(0, "SerLICDecompGuard.vtk", this->GuardExt);
+  vtkPixelExtentIO::Write(0, "SerLICDecompDisjointGuard.vtk", this->DisjointGuardExt);
+  #endif
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+void vtkSurfaceLICComposite::PrintSelf(ostream &os, vtkIndent indent)
+{
+  vtkObject::PrintSelf(os, indent);
+  os << *this << endl;
+}
+
+// ****************************************************************************
+ostream &operator<<(ostream &os, vtkSurfaceLICComposite &ss)
+{
+  os << "winExt=" << ss.WindowExt << endl;
+  os << "blockExts=" << endl;
+  size_t nExts = ss.BlockExts.size();
+  for (size_t i=0; i<nExts; ++i)
+    {
+    os << "  " << ss.BlockExts[i] << endl;
+    }
+  os << "compositeExts=" << endl;
+  nExts = ss.CompositeExt.size();
+  for (size_t i=0; i<nExts; ++i)
+    {
+    os << ss.CompositeExt[i] << endl;
+    }
+  os << "guardExts=" << endl;
+  for (size_t i=0; i<nExts; ++i)
+    {
+    os << ss.GuardExt[i] << endl;
+    }
+  os << "disjointGuardExts=" << endl;
+  for (size_t i=0; i<nExts; ++i)
+    {
+    os << ss.DisjointGuardExt[i] << endl;
+    }
+  return os;
+}
diff --git a/Rendering/LIC/vtkSurfaceLICComposite.h b/Rendering/LIC/vtkSurfaceLICComposite.h
new file mode 100644
index 0000000..a2aec7b
--- /dev/null
+++ b/Rendering/LIC/vtkSurfaceLICComposite.h
@@ -0,0 +1,270 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSurfaceLICComposite.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkSurfaceLICComposite -- To move data during parallel surface LIC
+// .SECTION Description
+// This class decomposes the image space and shuffles image space
+// data onto the new decomposition with the necessary guard cells
+// to prevent artifacts at the decomposition boundaries. After the
+// image LIC is computed on the new decomposition this class will
+// un-shuffle the computed LIC back onto the original decomposition
+
+#ifndef __vtkSurfaceLICComposite_h
+#define __vtkSurfaceLICComposite_h
+
+#include "vtkObject.h"
+#include "vtkRenderingLICModule.h" // for export macro
+#include "vtkPixelExtent.h" // for pixel extent
+#include <deque> // for deque
+#include <vector> // for vector
+
+class vtkFloatArray;
+class vtkOpenGLRenderWindow;
+class vtkPainterCommunicator;
+class vtkTextureObject;
+
+class VTKRENDERINGLIC_EXPORT vtkSurfaceLICComposite : public vtkObject
+{
+public:
+  static vtkSurfaceLICComposite *New();
+  vtkTypeMacro(vtkSurfaceLICComposite, vtkObject);
+  virtual void PrintSelf(ostream &os, vtkIndent indent);
+
+  // Description:
+  // Initialize the object based on the following description of the
+  // blocks projected onto the render window. wholeExt describes the
+  // window size, originalExts describe each block's extent in window
+  // coords. stepSize is the window coordiniate integration step size.
+  // when inplace is true compositing happens on the original extent.
+  void Initialize(
+        const vtkPixelExtent &winExt,
+        const std::deque<vtkPixelExtent> &blockExts,
+        int strategy,
+        double stepSize,
+        int nSteps,
+        int normalizeVectors,
+        int enhancedLIC,
+        int anitalias);
+
+  // Description:
+  // Control the screen space decomposition. The available modes are:
+  //
+  // INPLACE
+  //   use the block decomp. This may result in LIC being computed
+  //   many times for the same pixels and an excessive amount of
+  //   IPC during compositing if any of the block extents cover
+  //   or intersect a number of block extents. The input data
+  //   needs to be shuffled but not unshuffled since for overlapping
+  //   regions LIC is computed by all proccesses that overlap.
+  //   If there is very little overlap between block extents
+  //   then this method is superior since no unshuffle is needed.
+  //
+  // INPLACE_DISJOINT
+  //   use a disjoint version of the block decomp. This will leave
+  //   non-overlapping data in place, reasigning overlaping regions
+  //   so that LIC is computed once for each pixel on the screen.
+  //   An unshuffle step to move data in overlapping region to all
+  //   processes that overlap.
+  //
+  // BALANCED
+  //   move to a new decomp where each rank gets an equal number
+  //   of pixels. This ensures the best load balancing during LIC
+  //   and that LIC is computed once for each pixel. In the worst
+  //   case each pixel will be shuffled and unshuffled.
+  //
+  // AUTO
+  //   Use a heuristic to select the mode.
+  enum {
+    COMPOSITE_INPLACE=0,
+    COMPOSITE_INPLACE_DISJOINT,
+    COMPOSITE_BALANCED,
+    COMPOSITE_AUTO
+    };
+  void SetStrategy(int val){ this->Strategy = val; }
+  int GetStrategy(){ return this->Strategy; }
+
+  // Description:
+  // Get the number of new extents assigned to this rank after
+  // the decomposition.
+  int GetNumberOfCompositeExtents() const
+    { return static_cast<int>(this->CompositeExt.size()); }
+
+  // Description:
+  // Get the extent of the domain over which to compute the LIC. This can
+  // be querried only after the Composite takes place.
+  const vtkPixelExtent &GetGuardExtent(int i=0) const
+    { return this->GuardExt[i]; }
+
+  const std::deque<vtkPixelExtent> &GetGuardExtents() const
+    { return this->GuardExt; }
+
+  // Description:
+  // Get the extent of the domain over which to compute the LIC. This can
+  // be querried only after the Composite takes place.
+  const vtkPixelExtent &GetDisjointGuardExtent(int i=0) const
+    { return this->DisjointGuardExt[i]; }
+
+  const std::deque<vtkPixelExtent> &GetDisjointGuardExtents() const
+    { return this->GuardExt; }
+
+  // Description:
+  // Get the extent of the domain over which to compute the LIC. This can
+  // be querried only after the Composite takes place.
+  const vtkPixelExtent &GetCompositeExtent(int i=0) const
+    { return this->CompositeExt[i]; }
+
+  const std::deque<vtkPixelExtent> &GetCompositeExtents() const
+    { return this->CompositeExt; }
+
+  // Description:
+  // Get the whole dataset extent (all blocks).
+  const vtkPixelExtent &GetDataSetExtent() const
+    { return this->DataSetExt; }
+
+  // Description:
+  // Get the whole window extent.
+  const vtkPixelExtent &GetWindowExtent() const
+    { return this->WindowExt; }
+
+  // Description:
+  // Set up for a serial run, makes the decomp disjoint and adds
+  // requisite guard pixles.
+  int InitializeCompositeExtents(float *vectors);
+
+  // Description:
+  // Set the rendering context. Must set prior to use. Reference is not
+  // held, so caller must ensure the renderer is not destroyed durring
+  // use.
+  virtual void SetContext(vtkOpenGLRenderWindow *){}
+  virtual vtkOpenGLRenderWindow *GetContext(){ return NULL; }
+
+  // Description:
+  // Set the communicator for parallel communication. A duplicate
+  // is not made. It is up to the caller to manage the life of
+  // the communicator such that it is around while this class
+  // needs it and is released after.
+  virtual void SetCommunicator(vtkPainterCommunicator*){}
+
+  // Description:
+  // Set the communicator to the default communicator
+  virtual void RestoreDefaultCommunicator(){}
+
+  // Description:
+  // Build programs to move data to the new decomp
+  // In parallel THIS IS A COLLECTIVE OPERATION
+  virtual int BuildProgram(float*){ return -1; }
+
+  // Description:
+  // Move a single buffer from the geometry decomp to the LIC decomp.
+  // THIS IS A COLLECTIVE OPERATION
+  virtual int Gather(void *, int, int, vtkTextureObject *&)
+    { return -1; }
+
+  // Description:
+  // Move a single buffer from the LIC decomp to the geometry decomp
+  // In parallel THIS IS A COLLECTIVE OPERATION
+  virtual int Scatter(void *, int, int, vtkTextureObject *&)
+    { return -1; }
+
+  // Description:
+  // Make a decomposition disjoint with respect to itself. Extents are
+  // removed from the input array and disjoint extents are appened onto
+  // the output array. This is a local operation.
+  static
+  int MakeDecompDisjoint(
+        std::deque<vtkPixelExtent> &in,
+        std::deque<vtkPixelExtent> &out);
+
+protected:
+  vtkSurfaceLICComposite();
+  ~vtkSurfaceLICComposite();
+
+  // Description:
+  // For serial run. Make a decomposition disjoint. Sorts extents and
+  // processes largest to smallest , repeatedly subtracting smaller
+  // remaining blocks from the largest remaining. Each extent in the
+  // new disjoint set is shrunk to tightly bound the vector data,
+  // extents with empty vectors are removed. This is a local operation
+  // since vector field is local.
+  int MakeDecompDisjoint(
+        const std::deque<vtkPixelExtent> &in,
+        std::deque<vtkPixelExtent> &out,
+        float *vectors);
+
+  // Description:
+  // Compute max(V) on the given extent.
+  float VectorMax(
+        const vtkPixelExtent &ext,
+        float *vectors);
+
+  // Description:
+  // Compute max(V) on a set of extents. Neighboring extents are
+  // including in the computation.
+  int VectorMax(
+        const std::deque<vtkPixelExtent> &exts,
+        float *vectors,
+        std::vector<float> &vMax);
+
+  // Description:
+  // Add guard pixels (Serial run)
+  int AddGuardPixels(
+      const std::deque<vtkPixelExtent> &exts,
+      std::deque<vtkPixelExtent> &guardExts,
+      std::deque<vtkPixelExtent> &disjointGuardExts,
+      float *vectors);
+
+  // Description:
+  // shrink pixel extent based on non-zero alpha channel values
+  void GetPixelBounds(
+      float *rgba,
+      int ni,
+      vtkPixelExtent &ext);
+
+  // Description:
+  // factor for determining extra padding for guard pixels.
+  // depends on window aspect ratio because of anisotropic
+  // transform to texture space. see note in implementation.
+  float GetFudgeFactor(int nx[2]);
+
+protected:
+  int Pass;                                    // id for mpi tagging
+
+  vtkPixelExtent WindowExt;                    // screen extent (screen size)
+  vtkPixelExtent DataSetExt;                   // screen extent of the dataset
+  std::deque<vtkPixelExtent> BlockExts;        // screen extents of blocks
+
+  std::deque<vtkPixelExtent> CompositeExt;     // screen extents after decomp
+  std::deque<vtkPixelExtent> GuardExt;         // screen extents w/ guard cells
+  std::deque<vtkPixelExtent> DisjointGuardExt; // screen extents w/ guard cells
+
+  int Strategy;                                // control for parallel composite
+
+  double StepSize;                             // window coordinates step size
+  int NumberOfSteps;                           // number of integration steps
+  int NormalizeVectors;                        // does integrator normailze
+  int NumberOfGuardLevels;                     // 1.5 if enhanced LIC 1 otherwise
+  int NumberOfEEGuardPixels;                   // 1 if enhanced LIC 0 otherwise
+  int NumberOfAAGuardPixels;                   // n antialias passes
+
+private:
+  vtkSurfaceLICComposite(const vtkSurfaceLICComposite&); // Not implemented
+  void operator=(const vtkSurfaceLICComposite&); // Not implemented
+
+  friend
+  ostream &operator<<(ostream &os, vtkSurfaceLICComposite &ss);
+};
+
+ostream &operator<<(ostream &os, vtkSurfaceLICComposite &ss);
+
+#endif
diff --git a/Rendering/LIC/vtkSurfaceLICDefaultPainter.cxx b/Rendering/LIC/vtkSurfaceLICDefaultPainter.cxx
new file mode 100644
index 0000000..e090022
--- /dev/null
+++ b/Rendering/LIC/vtkSurfaceLICDefaultPainter.cxx
@@ -0,0 +1,114 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSurfaceLICDefaultPainter.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkSurfaceLICDefaultPainter.h"
+
+#include "vtkObjectFactory.h"
+#include "vtkGarbageCollector.h"
+#include "vtkSurfaceLICPainter.h"
+#include "vtkCoincidentTopologyResolutionPainter.h"
+#include "vtkClipPlanesPainter.h"
+
+//#define vtkSurfaceLICDefaultPainterDEBUG
+
+//----------------------------------------------------------------------------
+vtkStandardNewMacro(vtkSurfaceLICDefaultPainter);
+
+//----------------------------------------------------------------------------
+vtkCxxSetObjectMacro(
+      vtkSurfaceLICDefaultPainter,
+      SurfaceLICPainter,
+      vtkSurfaceLICPainter);
+
+//----------------------------------------------------------------------------
+vtkSurfaceLICDefaultPainter::vtkSurfaceLICDefaultPainter()
+{
+  this->SurfaceLICPainter = vtkSurfaceLICPainter::New();
+}
+
+//----------------------------------------------------------------------------
+vtkSurfaceLICDefaultPainter::~vtkSurfaceLICDefaultPainter()
+{
+  this->SetSurfaceLICPainter(NULL);
+}
+
+//----------------------------------------------------------------------------
+void vtkSurfaceLICDefaultPainter::BuildPainterChain()
+{
+  this->Superclass::BuildPainterChain();
+
+  vtkPainter *prevPainter = this->GetClipPlanesPainter();
+  vtkPainter *nextPainter = prevPainter->GetDelegatePainter();
+
+  prevPainter->SetDelegatePainter(this->SurfaceLICPainter);
+  this->SurfaceLICPainter->SetDelegatePainter(nextPainter);
+
+  #if defined (vtkSurfaceLICDefaultPainterDEBUG)
+  cerr << "SurfaceLIC Default Painter Chain:" << endl;
+  vtkPainter *painter = this->vtkPainter::GetDelegatePainter();;
+  while (painter)
+    {
+    cerr << painter->GetClassName() << "->";
+    painter = painter->GetDelegatePainter();
+    }
+  cerr << "NULL" << endl;
+  #endif
+}
+
+//----------------------------------------------------------------------------
+void vtkSurfaceLICDefaultPainter::ReportReferences(
+        vtkGarbageCollector *collector)
+{
+  this->Superclass::ReportReferences(collector);
+  vtkGarbageCollectorReport(
+        collector,
+        this->SurfaceLICPainter,
+        "SurfaceLICPainter");
+}
+
+//-------------------------------------------------------------------------
+void vtkSurfaceLICDefaultPainter::UpdateBounds(double bounds[6])
+{
+  // need the superclass to start with the first painter in the chain
+  vtkPainter *painter = this->Superclass::GetDelegatePainter();
+  if (painter)
+    {
+    // delegate the task of updating the bounds
+    painter->UpdateBounds(bounds);
+    }
+  else
+    {
+    // no painter in the chain. let's build the chain if needed.
+    if (this->ChainBuildTime < this->MTime)
+      {
+      // build the chain of painters
+      this->BuildPainterChain();
+      this->ChainBuildTime.Modified();
+      }
+    // try again to get the first painter in the chain
+    painter = this->Superclass::GetDelegatePainter();
+    if (painter)
+      {
+      //delegate the task of updating the bounds
+      painter->UpdateBounds(bounds);
+      }
+    }
+}
+
+//----------------------------------------------------------------------------
+void vtkSurfaceLICDefaultPainter::PrintSelf(ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+  os << indent << "SurfaceLICPainter: " << this->SurfaceLICPainter << endl;
+}
diff --git a/Rendering/LIC/vtkSurfaceLICDefaultPainter.h b/Rendering/LIC/vtkSurfaceLICDefaultPainter.h
new file mode 100644
index 0000000..d10b344
--- /dev/null
+++ b/Rendering/LIC/vtkSurfaceLICDefaultPainter.h
@@ -0,0 +1,74 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSurfaceLICDefaultPainter.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkSurfaceLICDefaultPainter - vtkDefaultPainter replacement that
+//  inserts the vtkSurfaceLICPainter at the correct position in the painter
+//  chain.
+//
+// .SECTION Description
+//  vtkSurfaceLICDefaultPainter is a vtkDefaultPainter replacement
+//  that inserts the vtkSurfaceLICPainter at the correct position in the painter
+//  chain.
+//
+// .SECTION See Also
+//  vtkDefaultPainter vtkSurfaceLICPainter
+
+#ifndef __vtkSurfaceLICDefaultPainter_h
+#define __vtkSurfaceLICDefaultPainter_h
+
+#include "vtkRenderingLICModule.h" // For export macro
+#include "vtkDefaultPainter.h"
+
+class vtkSurfaceLICPainter;
+
+class VTKRENDERINGLIC_EXPORT vtkSurfaceLICDefaultPainter
+  : public vtkDefaultPainter
+{
+public:
+  static vtkSurfaceLICDefaultPainter* New();
+  vtkTypeMacro(vtkSurfaceLICDefaultPainter, vtkDefaultPainter);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Get/Set the Surface LIC painter.
+  void SetSurfaceLICPainter(vtkSurfaceLICPainter*);
+  vtkGetObjectMacro(SurfaceLICPainter, vtkSurfaceLICPainter);
+
+//BTX
+protected:
+  vtkSurfaceLICDefaultPainter();
+  ~vtkSurfaceLICDefaultPainter();
+
+  // Description:
+  // Setup the the painter chain.
+  virtual void BuildPainterChain();
+
+  // Description:
+  // Take part in garbage collection.
+  virtual void ReportReferences(vtkGarbageCollector *collector);
+
+  // Description:
+  // Override.
+  virtual void UpdateBounds(double bounds[6]);
+
+protected:
+  vtkSurfaceLICPainter* SurfaceLICPainter;
+
+private:
+  vtkSurfaceLICDefaultPainter(const vtkSurfaceLICDefaultPainter&); // Not implemented.
+  void operator=(const vtkSurfaceLICDefaultPainter&); // Not implemented.
+//ETX
+};
+
+#endif
diff --git a/Rendering/LIC/vtkSurfaceLICPainter.cxx b/Rendering/LIC/vtkSurfaceLICPainter.cxx
new file mode 100644
index 0000000..01d54f1
--- /dev/null
+++ b/Rendering/LIC/vtkSurfaceLICPainter.cxx
@@ -0,0 +1,3442 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSurfaceLICPainter.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkSurfaceLICPainter.h"
+
+#include "vtkInformation.h"
+#include "vtkScalarsToColors.h"
+#include "vtkScalarsToColorsPainter.h"
+
+#include "vtkPainterCommunicator.h"
+#include "vtkSurfaceLICComposite.h"
+#include "vtkBase64Utilities.h"
+#include "vtkBoundingBox.h"
+#include "vtkCellData.h"
+#include "vtkColorMaterialHelper.h"
+#include "vtkCompositeDataIterator.h"
+#include "vtkCompositeDataSet.h"
+#include "vtkFrameBufferObject2.h"
+#include "vtkRenderbuffer.h"
+#include "vtkPixelBufferObject.h"
+#include "vtkPixelExtent.h"
+#include "vtkGarbageCollector.h"
+#include "vtkGenericDataObjectReader.h"
+#include "vtkImageData.h"
+#include "vtkLightingHelper.h"
+#include "vtkLineIntegralConvolution2D.h"
+#include "vtkMatrix4x4.h"
+#include "vtkMath.h"
+#include "vtkMinimalStandardRandomSequence.h"
+#include "vtkNoise200x200.h"
+#include "vtkObjectFactory.h"
+#include "vtkOpenGLRenderWindow.h"
+#include "vtkOpenGLExtensionManager.h"
+#include "vtkOpenGLModelViewProjectionMonitor.h"
+#include "vtkOpenGLLightMonitor.h"
+#include "vtkBackgroundColorMonitor.h"
+#include "vtkPointData.h"
+#include "vtkPolyData.h"
+#include "vtkProperty.h"
+#include "vtkRenderer.h"
+#include "vtkCamera.h"
+#include "vtkShader2Collection.h"
+#include "vtkShader2.h"
+#include "vtkShaderProgram2.h"
+#include "vtkSmartPointer.h"
+#include "vtkTextureObject.h"
+#include "vtkUniformVariables.h"
+#include "vtkWeakPointer.h"
+#include "vtkUnsignedCharArray.h"
+#include "vtkFloatArray.h"
+#include "vtkgl.h"
+#include "vtkOpenGLError.h"
+
+#include <cassert>
+#include <cstring>
+#include <algorithm>
+#include <limits>
+#include <vector>
+#include <deque>
+#include <cstdlib>
+
+using std::vector;
+using std::deque;
+using std::string;
+
+typedef vtkLineIntegralConvolution2D vtkLIC2D;
+
+// use parallel timer for benchmarks and scaling
+// if not defined vtkTimerLOG is used.
+// #define vtkSurfaceLICPainterTIME
+#if !defined(vtkSurfaceLICPainterTIME)
+#include "vtkTimerLog.h"
+#endif
+
+// write intermediate results to disk for debugging
+#define vtkSurfaceLICPainterDEBUG 0
+#if vtkSurfaceLICPainterDEBUG >= 2
+#include "vtkTextureIO.h"
+#include <sstream>
+using std::ostringstream;
+//----------------------------------------------------------------------------
+static
+string mpifn(vtkPainterCommunicator *comm, const char *fn)
+{
+  ostringstream oss;
+  oss << comm->GetRank() << "_" << fn;
+  return oss.str();
+}
+#endif
+
+// Enable stream min/max computations. Streaming is accomplished
+// via PBO+glReadPixels to read just the regions we are updating.
+// Without streaming PBO+glGetTexImage is used to uplaod the entire
+// screen sized texture, of which (in parallel) we are updating only
+// a small part of.
+#define STREAMING_MIN_MAX
+
+// store depths in a texture. if not a renderbuffer object is used.
+// NOTE: this must be on because of a slight diffference in how
+// texture filtering is implemented by os mesa.
+#define USE_DEPTH_TEXTURE
+
+extern const char* vtkSurfaceLICPainter_GeomFs;
+extern const char* vtkSurfaceLICPainter_GeomVs;
+extern const char* vtkSurfaceLICPainter_SC;
+extern const char* vtkSurfaceLICPainter_CE;
+extern const char* vtkSurfaceLICPainter_DCpy;
+
+namespace vtkSurfaceLICPainterUtil
+{
+
+inline
+double vtkClamp(double val, const double& min, const double& max)
+{
+  val = (val < min)? min : val;
+  val = (val > max)? max : val;
+  return val;
+}
+
+// Description
+// find min/max of unmasked fragments across all regions
+// download the entire screen then search each region
+void FindMinMax(
+      vtkTextureObject *tex,
+      deque<vtkPixelExtent> &blockExts,
+      float &min,
+      float &max)
+{
+  // download entire screen
+  vtkPixelBufferObject *pbo = tex->Download();
+  float *pHSLColors = static_cast<float*>(pbo->MapPackedBuffer());
+  // search regions
+  int size0 = tex->GetWidth();
+  size_t nBlocks = blockExts.size();
+  for (size_t e=0; e<nBlocks; ++e)
+    {
+    const vtkPixelExtent &blockExt = blockExts[e];
+    for (int j=blockExt[2]; j<=blockExt[3]; ++j)
+      {
+      for (int i=blockExt[0]; i<=blockExt[1]; ++i)
+        {
+        size_t id = 4*(size0*j+i);
+        if (pHSLColors[id+3] != 0.0f)
+          {
+          float L = pHSLColors[id+2];
+          min = min > L ? L : min;
+          max = max < L ? L : max;
+          }
+        }
+      }
+    }
+  pbo->UnmapPackedBuffer();
+  pbo->Delete();
+  #if  vtkSurfaceLICPainterDEBUG >= 1
+  cerr << "min=" << min << " max=" << max << endl;
+  #endif
+}
+
+// Description
+// find min/max of unmasked fragments across all regions
+// download each search each region individually
+void StreamingFindMinMax(
+      vtkFrameBufferObject2 *fbo,
+      deque<vtkPixelExtent> &blockExts,
+      float &min,
+      float &max)
+{
+  size_t nBlocks = blockExts.size();
+  // initiate download
+  fbo->ActivateReadBuffer(1U);
+  vtkStaticCheckFrameBufferStatusMacro(vtkgl::FRAMEBUFFER_EXT);
+  vector<vtkPixelBufferObject*> pbos(nBlocks, NULL);
+  for (size_t e=0; e<nBlocks; ++e)
+    {
+    pbos[e] = fbo->Download(
+          blockExts[e].GetData(),
+          VTK_FLOAT,
+          4,
+          GL_FLOAT,
+          GL_RGBA);
+    }
+  fbo->RemoveTexColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 0U);
+  fbo->RemoveTexColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 1U);
+  fbo->DeactivateDrawBuffers();
+  fbo->DeactivateReadBuffer();
+  // map search and release each region
+  for (size_t e=0; e<nBlocks; ++e)
+    {
+    vtkPixelBufferObject *&pbo = pbos[e];
+    float *pColors = (float*)pbo->MapPackedBuffer();
+
+    size_t n = blockExts[e].Size();
+    for (size_t i = 0; i<n; ++i)
+      {
+      if (pColors[4*i+3] != 0.0f)
+        {
+        float L = pColors[4*i+2];
+        min = min > L ? L : min;
+        max = max < L ? L : max;
+        }
+      }
+    pbo->UnmapPackedBuffer();
+    pbo->Delete();
+    pbo = NULL;
+    }
+  #if vtkSurfaceLICPainterDEBUG >= 1
+  cerr << "min=" << min << " max=" << max << endl;
+  #endif
+}
+
+/**
+integer log base 2
+*/
+int ilog2(unsigned int n)
+{
+  if (n == 0)
+    {
+    return -1;
+    }
+  unsigned int r = 0;
+  while ((n >>= 1))
+    {
+    r += 1;
+    }
+  return r;
+}
+
+/**
+An interface to a random number generator. We can't use
+c stdlib since we're not gauranteed to get consistent.
+sequences across platform or library version and that
+would prevent consistent output during regression tests.
+*/
+class RandomNumberGeneratorInterface
+{
+public:
+  RandomNumberGeneratorInterface()
+    {
+    this->RNG = vtkMinimalStandardRandomSequence::New();
+    }
+
+  ~RandomNumberGeneratorInterface()
+    {
+    this->RNG->Delete();
+    }
+
+  /**
+  Seed the random number generator
+  */
+  void SetSeed(int seedVal)
+    {
+    #if 0
+    srand(seedVal);
+    #else
+    this->RNG->SetSeed(seedVal);
+    #endif
+    }
+
+  /**
+  Get a random number in the range of 0 to 1.
+  */
+  double GetRandomNumber()
+  {
+    #if 0
+    double val = static_cast<double>(rand())/RAND_MAX;
+    #else
+    double val = this->RNG->GetValue();
+    this->RNG->Next();
+    #endif
+    return val;
+  }
+
+private:
+  void operator=(const RandomNumberGeneratorInterface &); // not implemented
+  RandomNumberGeneratorInterface(const RandomNumberGeneratorInterface &); // not implemented
+
+private:
+  vtkMinimalStandardRandomSequence *RNG;
+};
+
+/**
+2D Noise Generator. Generate arrays for use as noise texture
+in the LIC algorithm. Can generate noise with uniform or Gaussian
+distributions, with a desired number of noise levels, and a
+desired frequency (f < 1 is impulse noise).
+*/
+class RandomNoise2D
+{
+public:
+  RandomNoise2D(){}
+
+  // Description:
+  // Generate a patch of random gray scale values along with an
+  // alpha channel (in vtk array format). The data should be
+  // deleted by later calling DeleteValues. Grain size and sideLen
+  // may be modified to match the noise generator requirements,
+  // returned arrays will be sized accordingly.
+  //
+  // type              - UNIFORM=0, GAUSSIAN=1, PERLIN=2
+  // sideLen           - side length of square patch in pixels (in/out)
+  // grainSize         - grain size of noise values in pixels (in/out)
+  // nLevels           - number of noise intesity levels
+  // minNoiseVal       - set the min for noise pixels (position distribution)
+  // maxNoiseVal       - set the max for noise pixels (position distribution)
+  // impulseProb       - probability of impulse noise,1 touches every pixel
+  // impulseBgNoiseVal - set the background color for impulse noise
+  // seed              - seed for random number generator
+  enum {
+    UNIFORM = 0,
+    GAUSSIAN = 1,
+    PERLIN = 2
+    };
+  float *Generate(
+        int type,
+        int &sideLen,
+        int &grainLize,
+        float minNoiseVal,
+        float maxNoiseVal,
+        int nLevels,
+        double impulseProb,
+        float impulseBgNoiseVal,
+        int seed);
+
+  // Description
+  // Delete the passed in array of values.
+  void DeleteValues(unsigned char *vals){ free(vals); }
+
+private:
+  // Description:
+  // Generate noise with a uniform distribution.
+  float *GenerateUniform(
+        int sideLen,
+        int grainLize,
+        float minNoiseVal,
+        float maxNoiseVal,
+        int nLevels,
+        double impulseProb,
+        float impulseBgNoiseVal,
+        int seed);
+
+  // Description:
+  // Generate noise with a Gaussian distribution.
+  float *GenerateGaussian(
+        int sideLen,
+        int grainLize,
+        float minNoiseVal,
+        float maxNoiseVal,
+        int nLevels,
+        double impulseProb,
+        float impulseBgNoiseVal,
+        int seed);
+
+  // Description:
+  // Generate Perlin noise with a Gaussian distribution.
+  float *GeneratePerlin(
+        int sideLen,
+        int grainLize,
+        float minNoiseVal,
+        float maxNoiseVal,
+        int nLevels,
+        double impulseProb,
+        float impulseBgNoiseVal,
+        int seed);
+
+  // Description:
+  // A way of controling the probability (from 0.0 to 1.0) that you
+  // generate values. returns 1 if you should generate a value.
+  // for example this is used to control the frequency of impulse
+  // noise.
+  int ShouldGenerateValue(double prob);
+
+  // Description:
+  // Get a valid the length of the side of the patch and grains size in pixels
+  // given a desired patch side length and a grain size. This ensures that all
+  // grains are the same size.
+  void GetValidDimensionAndGrainSize(int type, int &dim, int &grainSize);
+
+private:
+  RandomNumberGeneratorInterface ValueGen;
+  RandomNumberGeneratorInterface ProbGen;
+};
+
+//-----------------------------------------------------------------------------
+void RandomNoise2D::GetValidDimensionAndGrainSize(int type, int &sideLen, int &grainSize)
+{
+  // perlin noise both side len and grain size need to be powers of 2
+  if (type == PERLIN)
+    {
+    sideLen = 1 << ilog2(sideLen);
+    grainSize = 1 << ilog2(grainSize);
+    }
+
+  // grains can't be larger than the patch
+  if (sideLen < grainSize)
+    {
+    sideLen = grainSize;
+    }
+
+  // generate noise with agiven grainSize size on the patch
+  if (sideLen % grainSize)
+    {
+    // grainSize is not an even divsior of sideLen, adjust sideLen to
+    // next larger even divisor
+    sideLen = grainSize * (sideLen/grainSize + 1);
+    }
+}
+
+//-----------------------------------------------------------------------------
+int RandomNoise2D::ShouldGenerateValue(double prob)
+{
+  if (this->ProbGen.GetRandomNumber() > (1.0 - prob))
+    {
+    return 1;
+    }
+  return 0;
+}
+
+//-----------------------------------------------------------------------------
+float *RandomNoise2D::Generate(
+      int type,
+      int &sideLen,
+      int &grainSize,
+      float minNoiseVal,
+      float maxNoiseVal,
+      int nLevels,
+      double impulseProb,
+      float impulseBgNoiseVal,
+      int seed)
+{
+  this->GetValidDimensionAndGrainSize(type, sideLen, grainSize);
+
+  switch (type)
+    {
+    case GAUSSIAN:
+      return this->GenerateGaussian(
+            sideLen,
+            grainSize,
+            minNoiseVal,
+            maxNoiseVal,
+            nLevels,
+            impulseProb,
+            impulseBgNoiseVal,
+            seed);
+      break;
+    case UNIFORM:
+      return this->GenerateUniform(
+            sideLen,
+            grainSize,
+            minNoiseVal,
+            maxNoiseVal,
+            nLevels,
+            impulseProb,
+            impulseBgNoiseVal,
+            seed);
+      break;
+    case PERLIN:
+      return this->GeneratePerlin(
+            sideLen,
+            grainSize,
+            minNoiseVal,
+            maxNoiseVal,
+            nLevels,
+            impulseProb,
+            impulseBgNoiseVal,
+            seed);
+      break;
+    }
+  return NULL;
+}
+
+//-----------------------------------------------------------------------------
+float *RandomNoise2D::GenerateUniform(
+      int sideLen,
+      int grainSize,
+      float minNoiseVal,
+      float maxNoiseVal,
+      int nLevels,
+      double impulseProb,
+      float impulseBgNoiseVal,
+      int seed)
+{
+  // generate a patch of single pixel random values
+  // with a uniform distribution and fixed number of levels
+  nLevels = nLevels < 1 ? 1 : nLevels;
+  int maxLevel = nLevels-1;
+  float delta = 1.0f/maxLevel;
+  minNoiseVal = minNoiseVal < 0.0f ? 0.0f : minNoiseVal;
+  maxNoiseVal = maxNoiseVal > 1.0f ? 1.0f : maxNoiseVal;
+  float noiseRange = maxNoiseVal - minNoiseVal;
+  impulseProb = impulseProb < 0.0 ? 0.0 : impulseProb;
+  impulseProb = impulseProb > 1.0 ? 1.0 : impulseProb;
+  impulseBgNoiseVal = impulseBgNoiseVal < 0.0f ? 0.0f : impulseBgNoiseVal;
+  impulseBgNoiseVal = impulseBgNoiseVal > 1.0f ? 1.0f : impulseBgNoiseVal;
+  this->ValueGen.SetSeed(seed);
+  this->ProbGen.SetSeed(seed);
+  const int sdim = sideLen/grainSize;
+  const int sdim2 = sdim*sdim;
+  float *rvals=(float*)malloc(sdim2*sizeof(float));
+  for (int i=0; i<sdim2; ++i)
+    {
+    rvals[i] = impulseBgNoiseVal;
+    }
+  for (int j=0; j<sdim; ++j)
+    {
+     for (int i=0; i<sdim; ++i)
+       {
+       int idx=j*sdim+i;
+
+       if ((impulseProb == 1.0) || this->ShouldGenerateValue(impulseProb))
+         {
+         int l = static_cast<int>(this->ValueGen.GetRandomNumber()*nLevels);
+         l = l > maxLevel ? maxLevel : l; // needed for 1.0
+         rvals[idx] = nLevels == 1 ? maxNoiseVal : minNoiseVal + (l*delta) * noiseRange;
+         }
+       }
+    }
+
+  // map single pixel random values onto a patch of values of
+  // the requested grain size
+  const int ncomp = 2;
+  const int dim2 = sideLen*sideLen;
+  const int ntup = ncomp*dim2;
+  float *noise = (float*)malloc(ntup*sizeof(float));
+  for (int j=0; j<sideLen; ++j)
+    {
+     for (int i=0; i<sideLen; ++i)
+       {
+       int idx=ncomp*(j*sideLen+i);
+
+       int ii = i/grainSize;
+       int jj = j/grainSize;
+       int iidx = jj*sdim+ii;
+
+       noise[idx] = rvals[iidx];
+       noise[idx+1] = 1.0f; // alpha
+       }
+    }
+  free(rvals);
+
+  return noise;
+}
+
+//-----------------------------------------------------------------------------
+float *RandomNoise2D::GenerateGaussian(
+      int sideLen,
+      int grainSize,
+      float minNoiseVal,
+      float maxNoiseVal,
+      int nLevels,
+      double impulseProb,
+      float impulseBgNoiseVal,
+      int seed)
+{
+  // the distribution becomes Gaussian as N goes to infinity
+  const int N = 2048;
+
+  // generate a patch of single pixel random values
+  // with a gaussian distribution
+  impulseProb = impulseProb < 0.0 ? 0.0 : impulseProb;
+  impulseProb = impulseProb > 1.0 ? 1.0 : impulseProb;
+  impulseBgNoiseVal = impulseBgNoiseVal < 0.0f ? 0.0f : impulseBgNoiseVal;
+  impulseBgNoiseVal = impulseBgNoiseVal > 1.0f ? 1.0f : impulseBgNoiseVal;
+  this->ValueGen.SetSeed(seed);
+  this->ProbGen.SetSeed(seed);
+  const int sdim = sideLen/grainSize;
+  const int sdim2 = sdim*sdim;
+  float *rvals = (float*)malloc(sdim2*sizeof(float));
+  for (int i=0; i<sdim2; ++i)
+    {
+    rvals[i] = 0.0f;
+    }
+  for (int j=0; j<sdim; ++j)
+    {
+    for (int i=0; i<sdim; ++i)
+      {
+      int idx = j*sdim+i;
+
+      if ((impulseProb == 1.0) || this->ShouldGenerateValue(impulseProb))
+        {
+        double val = 0.0;
+        for (int q=0; q<N; ++q)
+          {
+          val += this->ValueGen.GetRandomNumber();
+          }
+        rvals[idx] = static_cast<float>(val);
+        }
+      }
+    }
+
+  // normalize noise field from eps to nLevels onto 0 to 1
+  // and restrict to the requested number of levels
+  // min/max
+  float minVal = static_cast<float>(N+1);
+  float maxVal = 0.0f;
+  for (int i=0; i<sdim2; ++i)
+    {
+    // for impulseProb < 1 background is 0 but pixels that are touched
+    // have a much larger value, after normalization the gaussian
+    // distribution is compressed and localized near 1. We can fix this
+    // by ignoring zero values.
+    minVal = impulseProb == 1.0 ?
+            (rvals[i] < minVal ? rvals[i] : minVal) :
+            (rvals[i] < minVal && rvals[i] > 0.0f ? rvals[i] : minVal);
+
+    maxVal = rvals[i]>maxVal ? rvals[i] : maxVal;
+    }
+  float maxMinDiff = maxVal-minVal;
+  // because we ignore zero when impulseProb<1 we have to be careful
+  // here so that we can support one noise level.
+  minVal = maxMinDiff == 0.0f ? 0.0f : minVal;
+  maxMinDiff = maxMinDiff == 0.0f ? (maxVal == 0.0f ? 1.0f : maxVal) : maxMinDiff;
+
+  nLevels = nLevels < 1 ? 1 : nLevels;
+  int maxLevel = nLevels-1;
+  float delta = 1.0f/maxLevel;
+  minNoiseVal = minNoiseVal < 0.0f ? 0.0f : minNoiseVal;
+  maxNoiseVal = maxNoiseVal > 1.0f ? 1.0f : maxNoiseVal;
+  float noiseRange = maxNoiseVal - minNoiseVal;
+  for (int i=0; i<sdim2; ++i)
+    {
+    // normalize
+    float val = rvals[i] < minVal ? rvals[i] : (rvals[i] - minVal)/maxMinDiff;
+    // restrict
+    int l = static_cast<int>(val*nLevels);
+    l = l > maxLevel ? maxLevel : l;
+    rvals[i]
+       = rvals[i] < minVal ? impulseBgNoiseVal
+       : nLevels == 1 ? maxNoiseVal : minNoiseVal + (l*delta) * noiseRange;
+    }
+
+  // map single pixel random values onto a patch of values of
+  // the requested grain size
+  const int ncomp = 2;
+  const int dim2 = sideLen*sideLen;
+  const int ntup = ncomp*dim2;
+  float *noise = (float*)malloc(ntup*sizeof(float));
+  for (int j=0; j<sideLen; ++j)
+    {
+     for (int i=0; i<sideLen; ++i)
+       {
+       int idx = ncomp*(j*sideLen+i);
+
+       int ii = i/grainSize;
+       int jj = j/grainSize;
+       int iidx = jj*sdim+ii;
+
+       noise[idx] = rvals[iidx];
+       noise[idx+1] = 1.0; // alpha
+       }
+    }
+  free(rvals);
+
+  return noise;
+}
+
+//-----------------------------------------------------------------------------
+float *RandomNoise2D::GeneratePerlin(
+      int sideLen,
+      int grainSize,
+      float minNoiseVal,
+      float maxNoiseVal,
+      int nLevels,
+      double impulseProb,
+      float impulseBgNoiseVal,
+      int seed)
+{
+  // note: requires power of 2 sideLen, and sideLen > grainSize
+  const int ncomp = 2;
+  const int dim2 = sideLen*sideLen;
+  const int ntup = ncomp*dim2;
+  float *noise = static_cast<float*>(malloc(ntup*sizeof(float)));
+  for (int i=0; i<ntup; i+=2)
+    {
+    noise[i  ] = 0.0f;
+    noise[i+1] = 1.0f; // alpha channel
+    }
+
+  impulseProb = impulseProb < 0.0 ? 0.0 : impulseProb;
+  impulseProb = impulseProb > 1.0 ? 1.0 : impulseProb;
+  impulseBgNoiseVal = impulseBgNoiseVal < 0.0f ? 0.0f : impulseBgNoiseVal;
+  impulseBgNoiseVal = impulseBgNoiseVal > 1.0f ? 1.0f : impulseBgNoiseVal;
+  minNoiseVal = minNoiseVal < 0.0f ? 0.0f : minNoiseVal;
+  maxNoiseVal = maxNoiseVal > 1.0f ? 1.0f : maxNoiseVal;
+
+  //int nIter = ilog2(static_cast<unsigned int>(sideLen-1<nLevels ? sideLen-1 : nLevels));
+  int nIter = ilog2(static_cast<unsigned int>(grainSize));
+  for (int w=0; w<nIter; ++w)
+    {
+    // reduce range with grain size
+    float levelNoiseMin = 0.0f;
+    float levelNoiseMax = 0.1f + 0.9f/static_cast<float>(1<<(nIter-1-w));
+    //float levelNoiseMax = 1.0f - levelNoiseMin;
+    // generate a level of noise
+    int levelGrainSize = 1<<w;
+    float *levelNoise = GenerateGaussian(
+          sideLen,
+          levelGrainSize,
+          levelNoiseMin,
+          levelNoiseMax,
+          nLevels,
+          impulseProb,
+          impulseBgNoiseVal,
+          seed);
+    /*// smooth
+    int nsp = w;
+    for (int k=0; k<nsp; ++k)
+      {
+      for (int j=0; j<sideLen; ++j)
+        {
+         for (int i=0; i<sideLen; ++i)
+           {
+           float K[9] = {
+             0.0191724, 0.100120, 0.0191724,
+             0.1001200, 0.522831, 0.1001200,
+             0.0191724, 0.100120, 0.0191724
+             };
+           float val=0.0;
+           for (int q=0; q<3; ++q)
+             {
+             for (int p=0; p<3; ++p)
+               {
+               int ii = i+p-1;
+               ii = ii < 0 ? i : ii;
+               ii = ii >= sideLen ? i : ii;
+               int jj = j+q-1;
+               jj = jj < 0 ? j : jj;
+               jj = jj >= sideLen ? j : jj;
+               int idx = 2*(sideLen*jj+ii);
+               val += levelNoise[idx]*K[q*3+p];
+               }
+             }
+           levelNoise[2*(sideLen*j+i)] = val;
+           }
+        }
+      }*/
+    // accumulate
+    for (int i=0; i<ntup; i+=2)
+      {
+      noise[i] += levelNoise[i];
+      }
+    free(levelNoise);
+    }
+  // normalize
+  float minVal = static_cast<float>(nIter+1);
+  float maxVal = 0.0f;
+  for (int i=0; i<ntup; i+=2)
+    {
+    float val = noise[i];
+    minVal = val<minVal ? val : minVal;
+    maxVal = val>maxVal ? val : maxVal;
+    }
+  float maxMinDiff = maxVal - minVal;
+  if ( maxMinDiff <= 0.0f )
+    {
+    maxMinDiff = 1.0f;
+    minVal = 0.0f;
+    }
+  for (int i=0; i<ntup; i+=2)
+    {
+    noise[i] = (noise[i] - minVal) / maxMinDiff;
+    }
+  return noise;
+}
+
+/**
+Load a predefiined texture that has been "pickled" in a string.
+This texture is 200x200 pixles, has a Gaussian distribution, and
+intensities ranging between 0 and 206. This is the texture that
+is used when GenerateNoiseTexture is disabled.
+*/
+vtkImageData *vtkGetNoiseResource()
+{
+  std::string base64string;
+  for (unsigned int cc=0; cc < file_noise200x200_vtk_nb_sections; cc++)
+    {
+    base64string += reinterpret_cast<const char*>(file_noise200x200_vtk_sections[cc]);
+    }
+
+  unsigned char* binaryInput
+     = new unsigned char[file_noise200x200_vtk_decoded_length + 10];
+
+  unsigned long binarylength = vtkBase64Utilities::Decode(
+        reinterpret_cast<const unsigned char*>(base64string.c_str()),
+        static_cast<unsigned long>(base64string.length()),
+        binaryInput);
+
+  assert("check valid_length"
+    && (binarylength == file_noise200x200_vtk_decoded_length));
+
+  vtkGenericDataObjectReader* reader = vtkGenericDataObjectReader::New();
+  reader->ReadFromInputStringOn();
+
+  reader->SetBinaryInputString(
+        reinterpret_cast<char*>(binaryInput),
+        static_cast<int>(binarylength));
+
+  reader->Update();
+  vtkImageData* data = vtkImageData::New();
+  data->ShallowCopy(reader->GetOutput());
+
+  delete [] binaryInput;
+  reader->Delete();
+  return data;
+}
+
+};
+using namespace vtkSurfaceLICPainterUtil;
+
+/**
+Internal data
+*/
+class vtkSurfaceLICPainter::vtkInternals
+{
+public:
+  vtkSmartPointer<vtkOpenGLLightMonitor> LightMonitor[vtkLightingHelper::VTK_MAX_LIGHTS];
+  vtkSmartPointer<vtkOpenGLModelViewProjectionMonitor> ViewMonitor;
+  vtkSmartPointer<vtkBackgroundColorMonitor> BGMonitor;
+
+  vtkWeakPointer<vtkOpenGLRenderWindow> Context;
+  bool GLSupport;
+  int Viewsize[2];
+  long long LastInputDataSetMTime;
+  long long LastPropertyMTime;
+  long long LastLUTMTime;
+
+  deque<vtkPixelExtent> BlockExts;
+  vtkPixelExtent DataSetExt;
+
+  bool ContextNeedsUpdate;
+  bool OutputDataNeedsUpdate;
+  bool CommunicatorNeedsUpdate;
+  bool GeometryNeedsUpdate;
+  bool GatherNeedsUpdate;
+  bool LICNeedsUpdate;
+  bool ColorNeedsUpdate;
+
+  vtkPainterCommunicator *Communicator;
+
+  #ifdef USE_DEPTH_TEXTURE
+  vtkSmartPointer<vtkTextureObject> DepthImage;
+  #else
+  vtkSmartPointer<vtkRenderbuffer> DepthImage;
+  #endif
+  vtkSmartPointer<vtkTextureObject> GeometryImage;
+  vtkSmartPointer<vtkTextureObject> VectorImage;
+  vtkSmartPointer<vtkTextureObject> CompositeVectorImage;
+  vtkSmartPointer<vtkTextureObject> MaskVectorImage;
+  vtkSmartPointer<vtkTextureObject> CompositeMaskVectorImage;
+  vtkSmartPointer<vtkTextureObject> NoiseImage;
+  vtkSmartPointer<vtkTextureObject> LICImage;
+  vtkSmartPointer<vtkTextureObject> RGBColorImage;
+  vtkSmartPointer<vtkTextureObject> HSLColorImage;
+  vtkSmartPointer<vtkImageData> Noise;
+
+  vtkSmartPointer<vtkFrameBufferObject2> FBO;
+
+  vtkSmartPointer<vtkShaderProgram2> RenderGeometryPass;
+  vtkSmartPointer<vtkShaderProgram2> ColorPass;
+  vtkSmartPointer<vtkShaderProgram2> ColorEnhancePass;
+  vtkSmartPointer<vtkShaderProgram2> CopyPass;
+  vtkSmartPointer<vtkLightingHelper> LightingHelper;
+  vtkSmartPointer<vtkColorMaterialHelper> ColorMaterialHelper;
+
+  vtkSmartPointer<vtkSurfaceLICComposite> Compositor;
+  vtkSmartPointer<vtkLineIntegralConvolution2D> LICer;
+
+  int FieldAssociation;
+  int FieldAttributeType;
+  std::string FieldName;
+  bool FieldNameSet;
+  bool HasVectors;
+
+
+
+  // Description:
+  // Constructor
+  vtkInternals()
+    {
+    const int nLights = vtkLightingHelper::VTK_MAX_LIGHTS;
+    for (int i=0; i<nLights; ++i)
+      {
+      this->LightMonitor[i] = vtkSmartPointer<vtkOpenGLLightMonitor>::New();
+      this->LightMonitor[i]->SetLightId(i);
+      }
+
+    this->ViewMonitor = vtkSmartPointer<vtkOpenGLModelViewProjectionMonitor>::New();
+    this->BGMonitor = vtkSmartPointer<vtkBackgroundColorMonitor>::New();
+
+    this->Viewsize[0] = this->Viewsize[1] = 0;
+    this->LastInputDataSetMTime = 0;
+    this->LastPropertyMTime = 0;
+    this->LastLUTMTime = 0;
+    this->GLSupport = false;
+
+    this->ContextNeedsUpdate = true;
+    this->OutputDataNeedsUpdate = true;
+    this->CommunicatorNeedsUpdate = true;
+    this->GeometryNeedsUpdate = true;
+    this->LICNeedsUpdate = true;
+    this->GatherNeedsUpdate = true;
+    this->ColorNeedsUpdate = true;
+
+    this->Communicator = new vtkPainterCommunicator;
+
+    this->HasVectors = false;
+    this->FieldNameSet = false;
+    this->FieldAttributeType = 0;
+    this->FieldAssociation = 0;
+
+    this->LightingHelper = vtkSmartPointer<vtkLightingHelper>::New();
+    this->ColorMaterialHelper = vtkSmartPointer<vtkColorMaterialHelper>::New();
+    }
+
+  // Description:
+  // Destructor
+  ~vtkInternals()
+    {
+    this->ClearGraphicsResources();
+
+    const int nLights = vtkLightingHelper::VTK_MAX_LIGHTS;
+    for (int i=0; i<nLights; ++i)
+      {
+      this->LightMonitor[i] = NULL;
+      }
+    this->ViewMonitor = NULL;
+    this->BGMonitor = NULL;
+
+    this->LightingHelper = NULL;
+    this->ColorMaterialHelper = NULL;
+
+    if (this->Communicator)
+      {
+      delete this->Communicator;
+      }
+    }
+
+  // Description:
+  // Check for OpenGL support
+  static bool IsSupported(vtkOpenGLRenderWindow *context)
+    {
+    if (context == NULL)
+      {
+      vtkGenericWarningMacro("OpenGL render window required");
+      return false;
+      }
+
+    bool lic2d = vtkLineIntegralConvolution2D::IsSupported(context);
+
+    bool floatFormats
+      = vtkTextureObject::IsSupported(context, true, true, false);
+
+    bool renderbuffer = true;
+    #if !defined(USE_DEPTH_TEXTURE)
+    renderbuffer = vtkRenderbuffer::IsSupported(context);
+    #endif
+
+    bool support = lic2d && floatFormats && renderbuffer;
+
+    if (!support)
+      {
+      vtkOpenGLExtensionManager *manager = context->GetExtensionManager();
+      vtkGenericWarningMacro(
+        << "SurfaceLIC is not supported" << endl
+        << context->GetClassName() << endl
+        << manager->GetDriverGLVendor() << endl
+        << manager->GetDriverGLVersion() << endl
+        << manager->GetDriverGLRenderer() << endl
+        << "LIC support = " << lic2d << endl
+        << "floating point texture formats = " << floatFormats << endl
+        << "render buffers = " << renderbuffer);
+      return false;
+      }
+    return true;
+    }
+
+  // Description:
+  // Free textures and shader programs we're holding a reference to.
+  void ClearGraphicsResources()
+    {
+    this->ClearTextures();
+
+    this->RenderGeometryPass = NULL;
+    this->ColorPass = NULL;
+    this->ColorEnhancePass = NULL;
+    this->CopyPass = NULL;
+
+    this->Compositor = NULL;
+    this->LICer = NULL;
+    this->FBO = NULL;
+
+    this->LightingHelper->Initialize(0, VTK_SHADER_TYPE_VERTEX);
+    this->ColorMaterialHelper->Initialize(0);
+    }
+
+  // Description:
+  // Free textures we're holding a reference to.
+  void ClearTextures()
+    {
+    this->DepthImage = NULL;
+    this->GeometryImage = NULL;
+    this->VectorImage = NULL;
+    this->MaskVectorImage = NULL;
+    this->CompositeVectorImage = NULL;
+    this->CompositeMaskVectorImage = NULL;
+    this->NoiseImage = NULL;
+    this->LICImage = NULL;
+    this->RGBColorImage = NULL;
+    this->HSLColorImage = NULL;
+    }
+
+  // Description:
+  // Allocate textures.
+  void AllocateTextures(
+        vtkRenderWindow *context,
+        int *viewsize)
+    {
+    this->AllocateDepthTexture(context, viewsize, this->DepthImage);
+    this->AllocateTexture(context, viewsize, this->GeometryImage, vtkTextureObject::Nearest);
+    this->AllocateTexture(context, viewsize, this->VectorImage, vtkTextureObject::Linear);
+    this->AllocateTexture(context, viewsize, this->MaskVectorImage, vtkTextureObject::Linear);
+    this->AllocateTexture(context, viewsize, this->CompositeVectorImage, vtkTextureObject::Linear);
+    this->AllocateTexture(context, viewsize, this->CompositeMaskVectorImage, vtkTextureObject::Linear);
+    this->AllocateTexture(context, viewsize, this->LICImage, vtkTextureObject::Nearest);
+    this->AllocateTexture(context, viewsize, this->RGBColorImage, vtkTextureObject::Nearest);
+    this->AllocateTexture(context, viewsize, this->HSLColorImage, vtkTextureObject::Nearest);
+    }
+
+  // Description:
+  // Allocate a size texture, store in the given smart pointer.
+  void AllocateTexture(
+        vtkRenderWindow *context,
+        int *viewsize,
+        vtkSmartPointer<vtkTextureObject> &tex,
+        int filter = vtkTextureObject::Nearest)
+    {
+    if ( !tex )
+      {
+      vtkTextureObject * newTex = vtkTextureObject::New();
+      newTex->SetContext(context);
+      newTex->SetBaseLevel(0);
+      newTex->SetMaxLevel(0);
+      newTex->SetWrapS(vtkTextureObject::ClampToEdge);
+      newTex->SetWrapT(vtkTextureObject::ClampToEdge);
+      newTex->SetMinificationFilter(filter);
+      newTex->SetMagnificationFilter(filter);
+      newTex->SetBorderColor(0.0f, 0.0f, 0.0f, 0.0f);
+      newTex->Create2D(viewsize[0], viewsize[1], 4, VTK_FLOAT, false);
+      newTex->SetAutoParameters(0);
+      tex = newTex;
+      newTex->Delete();
+      }
+    }
+
+  // Description:
+  // Allocate a size texture, store in the given smart pointer.
+  #ifdef USE_DEPTH_TEXTURE
+  void AllocateDepthTexture(
+        vtkRenderWindow *context,
+        int *viewsize,
+        vtkSmartPointer<vtkTextureObject> &tex)
+    {
+    if ( !tex )
+      {
+      vtkTextureObject * newTex = vtkTextureObject::New();
+      newTex->SetContext(context);
+      newTex->AllocateDepth(viewsize[0], viewsize[1], vtkTextureObject::Float32);
+      newTex->SetAutoParameters(0);
+      tex = newTex;
+      newTex->Delete();
+      }
+    }
+  #else
+  void AllocateDepthTexture(
+        vtkRenderWindow *context,
+        int *viewsize,
+        vtkSmartPointer<vtkRenderbuffer> &buf)
+    {
+    if ( !buf )
+      {
+      vtkRenderbuffer * newBuf = vtkRenderbuffer::New();
+      newBuf->SetContext(context);
+      newBuf->CreateDepthAttachment(viewsize[0], viewsize[1]);
+      buf = newBuf;
+      newBuf->Delete();
+      }
+    }
+  #endif
+
+  // Description:
+  // After LIC has been computed reset/clean internal state
+  void Updated()
+    {
+    this->ContextNeedsUpdate = false;
+    this->OutputDataNeedsUpdate = false;
+    this->CommunicatorNeedsUpdate = false;
+    this->GeometryNeedsUpdate = false;
+    this->GatherNeedsUpdate = false;
+    this->LICNeedsUpdate = false;
+    this->ColorNeedsUpdate = false;
+    }
+
+  // Description:
+  // Force all stages to re-execute. Necessary if the
+  // context or communicator changes.
+  void UpdateAll()
+    {
+    this->ContextNeedsUpdate = true;
+    this->OutputDataNeedsUpdate= true;
+    this->CommunicatorNeedsUpdate= true;
+    this->GeometryNeedsUpdate= true;
+    this->GatherNeedsUpdate= true;
+    this->LICNeedsUpdate= true;
+    this->ColorNeedsUpdate= true;
+    }
+
+  // Description:
+  // Convert viewport to texture coordinates
+  void ViewportQuadTextureCoords(GLfloat *tcoords)
+    {
+    tcoords[0] = tcoords[2] = 0.0f;
+    tcoords[1] = tcoords[3] = 1.0f;
+    }
+
+  // Description:
+  // Convert a viewport to a bounding box and it's texture coordinates for a
+  // screen size texture.
+  void ViewportQuadPoints(const vtkPixelExtent &viewportExt, GLfloat *quadpts)
+    {
+    viewportExt.GetData(quadpts);
+    }
+
+  // Description:
+  // Convert a viewport to a bounding box and it's texture coordinates for a
+  // screen size texture.
+  void ViewportQuadTextureCoords(
+        const vtkPixelExtent &viewExt,
+        const vtkPixelExtent &viewportExt,
+        GLfloat *tcoords)
+    {
+    GLfloat viewsize[2];
+    viewExt.Size(viewsize);
+
+    // cell to node
+    vtkPixelExtent next(viewportExt);
+    next.CellToNode();
+    next.GetData(tcoords);
+
+    tcoords[0] = tcoords[0]/viewsize[0];
+    tcoords[1] = tcoords[1]/viewsize[0];
+    tcoords[2] = tcoords[2]/viewsize[1];
+    tcoords[3] = tcoords[3]/viewsize[1];
+    }
+
+  // Description:
+  // Convert the entire view to a bounding box and it's texture coordinates for
+  // a screen size texture.
+  void ViewQuadPoints(GLfloat *quadpts)
+    {
+    quadpts[0] = quadpts[2] = 0.0f;
+    quadpts[1] = quadpts[3] = 1.0f;
+    }
+
+  // Description:
+  // Convert the entire view to a bounding box and it's texture coordinates for
+  // a screen size texture.
+  void ViewQuadTextureCoords(GLfloat *tcoords)
+    {
+    tcoords[0] = tcoords[2] = 0.0f;
+    tcoords[1] = tcoords[3] = 1.0f;
+    }
+
+  // Description:
+  // Render a quad (to trigger a shader to run)
+  void RenderQuad(
+        const vtkPixelExtent &viewExt,
+        const vtkPixelExtent &viewportExt,
+        int nTexUnits)
+    {
+    // cell to node
+    vtkPixelExtent next(viewportExt);
+    next.CellToNode();
+
+    GLfloat quadPts[4];
+    next.GetData(quadPts);
+
+    GLfloat quadTCoords[4];
+    this->ViewportQuadTextureCoords(viewExt, viewportExt, quadTCoords);
+
+    int ids[8] = {0,2, 1,2, 1,3, 0,3};
+
+    glBegin(GL_QUADS);
+    for (int q=0; q<4; ++q)
+      {
+      int qq = 2*q;
+      for (int i=0; i<nTexUnits; ++i)
+        {
+        GLenum texUnit = vtkgl::TEXTURE0+i;
+        vtkgl::MultiTexCoord2f(texUnit, quadTCoords[ids[qq]], quadTCoords[ids[qq+1]]);
+        }
+      glVertex2f(quadPts[ids[qq]], quadPts[ids[qq+1]]);
+      }
+    glEnd();
+    }
+
+  // Description:
+  // Test to see if some lighting parameters had changed since the
+  // last render internal.
+  bool LightingChanged()
+    {
+    bool anyChanged = false;
+    const int nLights = vtkLightingHelper::VTK_MAX_LIGHTS;
+    for (int i=0; i<nLights; ++i) // must look at all
+      {
+      if ( this->LightMonitor[i]->StateChanged() )
+        {
+        anyChanged = true;
+        }
+      }
+    return anyChanged;
+    }
+
+  // Description:
+  // Test to see if some model view related parameters had changed since
+  // the last render internal.
+  bool ViewChanged()
+    {
+    return this->ViewMonitor->StateChanged();
+    }
+
+  // Description:
+  // Test to see if background colors or mode has changed since the
+  // last render internal.
+  bool BackgroundChanged(vtkRenderer *ren)
+    {
+    return this->BGMonitor->StateChanged(ren);
+    }
+
+  // Description:
+  // Compute the index into the 4x4 OpenGL ordered matrix.
+  inline
+  int idx(int row, int col) { return 4*col+row; }
+
+  // Description:
+  // given a axes aligned bounding box in
+  // normalized device coordinates test for
+  // view frustum visibility.
+  // if all points are outside one of the
+  // view frustum planes then this box
+  // is not visible. we might have false
+  // positive where more than one clip
+  // plane intersects the box.
+  bool VisibilityTest(double ndcBBox[24])
+    {
+    // check all points in the direction d
+    // at the same time.
+    for (int d=0; d<3; ++d)
+      {
+      if (((ndcBBox[     d] < -1.0)
+        && (ndcBBox[3  + d] < -1.0)
+        && (ndcBBox[6  + d] < -1.0)
+        && (ndcBBox[9  + d] < -1.0)
+        && (ndcBBox[12 + d] < -1.0)
+        && (ndcBBox[15 + d] < -1.0)
+        && (ndcBBox[18 + d] < -1.0)
+        && (ndcBBox[21 + d] < -1.0))
+        ||((ndcBBox[     d] > 1.0)
+        && (ndcBBox[3  + d] > 1.0)
+        && (ndcBBox[6  + d] > 1.0)
+        && (ndcBBox[9  + d] > 1.0)
+        && (ndcBBox[12 + d] > 1.0)
+        && (ndcBBox[15 + d] > 1.0)
+        && (ndcBBox[18 + d] > 1.0)
+        && (ndcBBox[21 + d] > 1.0)) )
+        {
+        return false;
+        }
+      }
+    return true;
+    }
+
+  // Description:
+  // Given world space bounds,
+  // compute bounding boxes in clip and normalized device
+  // coordinates and perform view frustum visiblity test.
+  // return true if the bounds are visible. If so the passed
+  // in extent object is initialized with the corresponding
+  //screen space extents.
+  bool ProjectBounds(
+          double PMV[16],
+          int viewsize[2],
+          double bounds[6],
+          vtkPixelExtent &screenExt)
+    {
+    // this is how to get the 8 corners of a bounding
+    // box from the VTK bounds
+    int bbIds[24] = {
+          0,2,4,
+          1,2,4,
+          1,3,4,
+          0,3,4,
+          0,2,5,
+          1,2,5,
+          1,3,5,
+          0,3,5
+          };
+
+    // normalized device coordinate bounding box
+    double ndcBBox[24];
+    for (int q = 0; q<8; ++q)
+      {
+      int qq = 3*q;
+      // bounding box corner
+      double wx = bounds[bbIds[qq  ]];
+      double wy = bounds[bbIds[qq+1]];
+      double wz = bounds[bbIds[qq+2]];
+      // to clip coordinates
+      ndcBBox[qq  ] = wx * PMV[idx(0,0)] + wy * PMV[idx(0,1)] + wz * PMV[idx(0,2)] + PMV[idx(0,3)];
+      ndcBBox[qq+1] = wx * PMV[idx(1,0)] + wy * PMV[idx(1,1)] + wz * PMV[idx(1,2)] + PMV[idx(1,3)];
+      ndcBBox[qq+2] = wx * PMV[idx(2,0)] + wy * PMV[idx(2,1)] + wz * PMV[idx(2,2)] + PMV[idx(2,3)];
+      double ndcw   = wx * PMV[idx(3,0)] + wy * PMV[idx(3,1)] + wz * PMV[idx(3,2)] + PMV[idx(3,3)];
+
+      // TODO
+      // if the point is past the near clipping plane
+      // we need to do something more robust. this ensures
+      // the correct result but its inefficient
+      if (ndcw < 0.0)
+        {
+        screenExt = vtkPixelExtent(viewsize[0], viewsize[1]);
+        //cerr << "W<0!!!!!!!!!!!!!" << endl;
+        return true;
+        }
+
+      // to normalized device coordinates
+      ndcw = (ndcw == 0.0 ? 1.0 : 1.0/ndcw);
+      ndcBBox[qq  ] *= ndcw;
+      ndcBBox[qq+1] *= ndcw;
+      ndcBBox[qq+2] *= ndcw;
+      }
+
+    // compute screen extent only if the object
+    // is inside the view frustum.
+    if (VisibilityTest(ndcBBox))
+      {
+      // these bounds are visible. compute screen
+      // space exents
+      double vx  = viewsize[0] - 1.0;
+      double vy  = viewsize[1] - 1.0;
+      double vx2 = viewsize[0] * 0.5;
+      double vy2 = viewsize[1] * 0.5;
+      vtkBoundingBox box;
+      for (int q=0; q<8; ++q)
+        {
+        int qq = 3*q;
+        double sx = (ndcBBox[qq  ] + 1.0) * vx2;
+        double sy = (ndcBBox[qq+1] + 1.0) * vy2;
+        box.AddPoint(
+          vtkClamp(sx, 0.0, vx),
+          vtkClamp(sy, 0.0, vy),
+          0.0);
+        }
+      // to screen extent
+      const double *s0 = box.GetMinPoint();
+      const double *s1 = box.GetMaxPoint();
+      screenExt[0] = static_cast<int>(s0[0]);
+      screenExt[1] = static_cast<int>(s1[0]);
+      screenExt[2] = static_cast<int>(s0[1]);
+      screenExt[3] = static_cast<int>(s1[1]);
+      return true;
+      }
+
+    // these bounds aren't visible
+    return false;
+    }
+
+  // Description:
+  // Compute screen space extents for each block in the input
+  // dataset and for the entire dataset. Only visible blocks
+  // are used in the computations.
+  int ProjectBounds(
+        vtkDataObject *dobj,
+        int viewsize[2],
+        vtkPixelExtent &dataExt,
+        deque<vtkPixelExtent> &blockExts)
+    {
+    // get the modelview projection matrix
+    GLdouble P[16];
+    GLdouble MV[16];
+    GLdouble PMV[16];
+    glGetDoublev(GL_PROJECTION_MATRIX, P);
+    glGetDoublev(GL_MODELVIEW_MATRIX, MV);
+    for ( int c = 0; c < 4; c ++ )
+      {
+      for ( int r = 0; r < 4; r ++ )
+        {
+        PMV[c*4+r]
+          = P[idx(r,0)] * MV[idx(0,c)]
+          + P[idx(r,1)] * MV[idx(1,c)]
+          + P[idx(r,2)] * MV[idx(2,c)]
+          + P[idx(r,3)] * MV[idx(3,c)];
+        }
+      }
+    // dataset case
+    vtkDataSet* ds = dynamic_cast<vtkDataSet*>(dobj);
+    if (ds && ds->GetNumberOfCells())
+      {
+      double bounds[6];
+      ds->GetBounds(bounds);
+      if ( vtkBoundingBox::IsValid(bounds)
+        && this->ProjectBounds(PMV, viewsize, bounds, dataExt) )
+        {
+        // the dataset is visible
+        // add its extent
+        blockExts.push_back(dataExt);
+        return 1;
+        }
+      //cerr << "ds " << ds << " not visible " << endl;
+      return 0;
+      }
+    // composite dataset case
+    vtkCompositeDataSet* cd = dynamic_cast<vtkCompositeDataSet*>(dobj);
+    if (cd)
+      {
+      // process each block's bounds
+      vtkBoundingBox bbox;
+      vtkCompositeDataIterator* iter = cd->NewIterator();
+      for (iter->InitTraversal(); !iter->IsDoneWithTraversal(); iter->GoToNextItem())
+        {
+        ds = dynamic_cast<vtkDataSet*>(iter->GetCurrentDataObject());
+        if (ds && ds->GetNumberOfCells())
+          {
+          double bounds[6];
+          ds->GetBounds(bounds);
+          vtkPixelExtent screenExt;
+          if ( vtkBoundingBox::IsValid(bounds)
+            && this->ProjectBounds(PMV, viewsize, bounds, screenExt) )
+            {
+            // this block is visible
+            // save it's screen extent
+            // and accumulate its bounds
+            blockExts.push_back(screenExt);
+            bbox.AddBounds(bounds);
+            }
+          //else { cerr << "leaf " << ds << " not visible " << endl << endl;}
+          }
+        }
+      iter->Delete();
+      // process accumulated dataset bounds
+      double bounds[6];
+      bbox.GetBounds(bounds);
+      if ( vtkBoundingBox::IsValid(bounds)
+        && this->ProjectBounds(PMV, viewsize, bounds, dataExt) )
+        {
+        return 1;
+        }
+      return 0;
+      }
+    //cerr << "ds " << ds << " no cells " << endl;
+    return 0;
+    }
+
+  // Description:
+  // Shrink an extent to tightly bound non-zero values
+  void GetPixelBounds(float *rgba, int ni, vtkPixelExtent &ext)
+    {
+    vtkPixelExtent text;
+    for (int j=ext[2]; j<=ext[3]; ++j)
+      {
+      for (int i=ext[0]; i<=ext[1]; ++i)
+        {
+        if (rgba[4*(j*ni+i)+3] > 0.0f)
+          {
+          text[0] = text[0] > i ? i : text[0];
+          text[1] = text[1] < i ? i : text[1];
+          text[2] = text[2] > j ? j : text[2];
+          text[3] = text[3] < j ? j : text[3];
+          }
+        }
+      }
+    ext = text;
+    }
+
+  // Description:
+  // Shrink a set of extents to tightly bound non-zero values
+  // cull extent if it's empty
+  void GetPixelBounds(float *rgba, int ni, deque<vtkPixelExtent> &blockExts)
+    {
+    vector<vtkPixelExtent> tmpExts(blockExts.begin(),blockExts.end());
+    blockExts.clear();
+    size_t nBlocks = tmpExts.size();
+    for (size_t b=0; b<nBlocks; ++b)
+      {
+      vtkPixelExtent &tmpExt = tmpExts[b];
+      GetPixelBounds(rgba, ni, tmpExt);
+      if (!tmpExt.Empty())
+        {
+        blockExts.push_back(tmpExt);
+        }
+      }
+    }
+};
+
+//----------------------------------------------------------------------------
+vtkObjectFactoryNewMacro(vtkSurfaceLICPainter);
+
+//----------------------------------------------------------------------------
+vtkSurfaceLICPainter::vtkSurfaceLICPainter()
+{
+  this->Internals = new vtkInternals();
+  this->Output = 0;
+
+  this->Enable = 1;
+  this->AlwaysUpdate = 0;
+
+  this->StepSize = 1;
+  this->NumberOfSteps = 20;
+  this->NormalizeVectors = 1;
+
+  this->EnhancedLIC = 1;
+
+  this->EnhanceContrast = 0;
+  this->LowLICContrastEnhancementFactor = 0.0;
+  this->HighLICContrastEnhancementFactor = 0.0;
+  this->LowColorContrastEnhancementFactor = 0.0;
+  this->HighColorContrastEnhancementFactor = 0.0;
+  this->AntiAlias = 0;
+  this->ColorMode = COLOR_MODE_BLEND;
+  this->LICIntensity = 0.8;
+  this->MapModeBias = 0.0;
+
+  this->GenerateNoiseTexture = 0;
+  this->NoiseType = NOISE_TYPE_GAUSSIAN;
+  this->NoiseTextureSize = 200;
+  this->MinNoiseValue = 0.0;
+  this->MaxNoiseValue = 0.8;
+  this->NoiseGrainSize = 1;
+  this->NumberOfNoiseLevels = 256;
+  this->ImpulseNoiseProbability = 1.0;
+  this->ImpulseNoiseBackgroundValue = 0.0;
+  this->NoiseGeneratorSeed = 1;
+
+  this->MaskOnSurface = 0;
+  this->MaskThreshold = 0.0;
+  this->MaskIntensity = 0.0;
+  this->MaskColor[0] = 0.5;
+  this->MaskColor[1] = 0.5;
+  this->MaskColor[2] = 0.5;
+
+  this->CompositeStrategy = COMPOSITE_AUTO;
+
+  this->SetInputArrayToProcess(
+        vtkDataObject::FIELD_ASSOCIATION_POINTS_THEN_CELLS,
+        vtkDataSetAttributes::VECTORS);
+}
+
+//----------------------------------------------------------------------------
+vtkSurfaceLICPainter::~vtkSurfaceLICPainter()
+{
+  #if vtkSurfaceLICPainterDEBUG >= 1
+  cerr << "=====vtkSurfaceLICPainter::~vtkSurfaceLICPainter" << endl;
+  #endif
+
+  this->ReleaseGraphicsResources(this->Internals->Context);
+  delete this->Internals;
+
+  if (this->Output)
+    {
+    this->Output->Delete();
+    this->Output = 0;
+    }
+}
+
+//----------------------------------------------------------------------------
+void vtkSurfaceLICPainter::SetInputArrayToProcess(
+      int fieldAssociation,
+      const char* name)
+{
+  if ( !this->Internals->FieldNameSet
+    || (this->Internals->FieldAssociation != fieldAssociation)
+    || (this->Internals->FieldName != name) )
+    {
+    this->Internals->FieldAssociation = fieldAssociation;
+    this->Internals->FieldName = name;
+    this->Internals->FieldNameSet = true;
+    this->Internals->HasVectors = false;
+    this->Internals->UpdateAll();
+    this->Modified();
+    }
+}
+
+//----------------------------------------------------------------------------
+void vtkSurfaceLICPainter::SetInputArrayToProcess(
+      int fieldAssociation,
+      int fieldAttributeType)
+{
+  if ( (this->Internals->FieldAssociation != fieldAssociation)
+    || (this->Internals->FieldAttributeType != fieldAttributeType)
+    || this->Internals->FieldNameSet )
+    {
+    this->Internals->FieldAssociation = fieldAssociation;
+    this->Internals->FieldAttributeType = fieldAttributeType;
+    this->Internals->FieldNameSet = false;
+    this->Internals->HasVectors = false;
+    this->Internals->UpdateAll();
+    this->Modified();
+    }
+}
+
+//----------------------------------------------------------------------------
+void vtkSurfaceLICPainter::ReleaseGraphicsResources(vtkWindow* win)
+{
+  this->Internals->ClearGraphicsResources();
+  this->Internals->Context = NULL;
+  if (this->Output)
+    {
+    this->Output->Delete();
+    this->Output = NULL;
+    }
+  this->Superclass::ReleaseGraphicsResources(win);
+}
+
+//----------------------------------------------------------------------------
+#define vtkSetMonitoredParameterMacro(_name, _type, _code)  \
+void vtkSurfaceLICPainter::Set##_name (_type val)           \
+{                                                           \
+  if (val == this->_name)                                   \
+    {                                                       \
+    return;                                                 \
+    }                                                       \
+  _code                                                     \
+  this->_name = val;                                        \
+  this->Modified();                                         \
+}
+// output dataset
+vtkSetMonitoredParameterMacro(
+      Enable,
+      int,
+      this->Internals->OutputDataNeedsUpdate = true;)
+// lic
+vtkSetMonitoredParameterMacro(
+      GenerateNoiseTexture,
+      int,
+      this->Internals->Noise = NULL;
+      this->Internals->NoiseImage = NULL;
+      this->Internals->LICNeedsUpdate = true;)
+
+vtkSetMonitoredParameterMacro(
+      NoiseType,
+      int,
+      this->Internals->Noise = NULL;
+      this->Internals->NoiseImage = NULL;
+      this->Internals->LICNeedsUpdate = true;)
+
+vtkSetMonitoredParameterMacro(
+      NoiseTextureSize,
+      int,
+      this->Internals->Noise = NULL;
+      this->Internals->NoiseImage = NULL;
+      this->Internals->LICNeedsUpdate = true;)
+
+vtkSetMonitoredParameterMacro(
+      NoiseGrainSize,
+      int,
+      this->Internals->Noise = NULL;
+      this->Internals->NoiseImage = NULL;
+      this->Internals->LICNeedsUpdate = true;)
+
+vtkSetMonitoredParameterMacro(
+      MinNoiseValue,
+      double,
+      val = val < 0.0 ? 0.0 : val;
+      val = val > 1.0 ? 1.0 : val;
+      this->Internals->Noise = NULL;
+      this->Internals->NoiseImage = NULL;
+      this->Internals->LICNeedsUpdate = true;)
+
+vtkSetMonitoredParameterMacro(
+      MaxNoiseValue,
+      double,
+      val = val < 0.0 ? 0.0 : val;
+      val = val > 1.0 ? 1.0 : val;
+      this->Internals->Noise = NULL;
+      this->Internals->NoiseImage = NULL;
+      this->Internals->LICNeedsUpdate = true;)
+
+vtkSetMonitoredParameterMacro(
+      NumberOfNoiseLevels,
+      int,
+      this->Internals->Noise = NULL;
+      this->Internals->NoiseImage = NULL;
+      this->Internals->LICNeedsUpdate = true;)
+
+vtkSetMonitoredParameterMacro(
+      ImpulseNoiseProbability,
+      double,
+      val = val < 0.0 ? 0.0 : val;
+      val = val > 1.0 ? 1.0 : val;
+      this->Internals->Noise = NULL;
+      this->Internals->NoiseImage = NULL;
+      this->Internals->LICNeedsUpdate = true;)
+
+vtkSetMonitoredParameterMacro(
+      ImpulseNoiseBackgroundValue,
+      double,
+      val = val < 0.0 ? 0.0 : val;
+      val = val > 1.0 ? 1.0 : val;
+      this->Internals->Noise = NULL;
+      this->Internals->NoiseImage = NULL;
+      this->Internals->LICNeedsUpdate = true;)
+
+vtkSetMonitoredParameterMacro(
+      NoiseGeneratorSeed,
+      int,
+      this->Internals->Noise = NULL;
+      this->Internals->NoiseImage = NULL;
+      this->Internals->LICNeedsUpdate = true;)
+
+// compositor
+vtkSetMonitoredParameterMacro(
+      CompositeStrategy,
+      int,
+      this->Internals->GatherNeedsUpdate = true;)
+
+// lic/compositor
+vtkSetMonitoredParameterMacro(
+      NumberOfSteps,
+      int,
+      this->Internals->GatherNeedsUpdate = true;
+      this->Internals->LICNeedsUpdate = true;)
+
+vtkSetMonitoredParameterMacro(
+      StepSize,
+      double,
+      this->Internals->GatherNeedsUpdate = true;
+      this->Internals->LICNeedsUpdate = true;)
+
+vtkSetMonitoredParameterMacro(
+      NormalizeVectors,
+      int,
+      val = val < 0 ? 0 : val;
+      val = val > 1 ? 1 : val;
+      this->Internals->GatherNeedsUpdate = true;
+      this->Internals->LICNeedsUpdate = true;)
+
+vtkSetMonitoredParameterMacro(
+      MaskThreshold,
+      double,
+      this->Internals->LICNeedsUpdate = true;)
+
+vtkSetMonitoredParameterMacro(
+      EnhancedLIC,
+      int,
+      this->Internals->GatherNeedsUpdate = true;
+      this->Internals->LICNeedsUpdate = true;)
+
+// lic
+vtkSetMonitoredParameterMacro(
+      LowLICContrastEnhancementFactor,
+      double,
+      val = val < 0.0 ? 0.0 : val;
+      val = val > 1.0 ? 1.0 : val;
+      this->Internals->LICNeedsUpdate = true;)
+
+vtkSetMonitoredParameterMacro(
+      HighLICContrastEnhancementFactor,
+      double,
+      val = val < 0.0 ? 0.0 : val;
+      val = val > 1.0 ? 1.0 : val;
+      this->Internals->LICNeedsUpdate = true;)
+
+vtkSetMonitoredParameterMacro(
+      AntiAlias,
+      int,
+      val = val < 0 ? 0 : val;
+      this->Internals->GatherNeedsUpdate = true;
+      this->Internals->LICNeedsUpdate = true;)
+
+// geometry
+vtkSetMonitoredParameterMacro(
+      MaskOnSurface,
+      int,
+      val = val < 0 ? 0 : val;
+      val = val > 1 ? 1 : val;
+      this->Internals->GeometryNeedsUpdate = true;)
+
+// colors
+vtkSetMonitoredParameterMacro(
+      ColorMode,
+      int,
+      this->Internals->ColorNeedsUpdate = true;)
+
+vtkSetMonitoredParameterMacro(
+      LICIntensity,
+      double,
+      val = val < 0.0 ? 0.0 : val;
+      val = val > 1.0 ? 1.0 : val;
+      this->Internals->ColorNeedsUpdate = true;)
+
+vtkSetMonitoredParameterMacro(
+      MaskIntensity,
+      double,
+      val = val < 0.0 ? 0.0 : val;
+      val = val > 1.0 ? 1.0 : val;
+      this->Internals->ColorNeedsUpdate = true;)
+
+vtkSetMonitoredParameterMacro(
+      MapModeBias,
+      double,
+      val = val <-1.0 ? -1.0 : val;
+      val = val > 1.0 ?  1.0 : val;
+      this->Internals->ColorNeedsUpdate = true;)
+
+vtkSetMonitoredParameterMacro(
+      LowColorContrastEnhancementFactor,
+      double,
+      val = val < 0.0 ? 0.0 : val;
+      val = val > 1.0 ? 1.0 : val;
+      this->Internals->ColorNeedsUpdate = true;)
+
+vtkSetMonitoredParameterMacro(
+      HighColorContrastEnhancementFactor,
+      double,
+      val = val < 0.0 ? 0.0 : val;
+      val = val > 1.0 ? 1.0 : val;
+      this->Internals->ColorNeedsUpdate = true;)
+
+//----------------------------------------------------------------------------
+void vtkSurfaceLICPainter::SetMaskColor(double *val)
+{
+  double rgb[3];
+  for (int q=0; q<3; ++q)
+    {
+    rgb[q] = val[q];
+    rgb[q] = rgb[q] < 0.0 ? 0.0 : rgb[q];
+    rgb[q] = rgb[q] > 1.0 ? 1.0 : rgb[q];
+    }
+  if ( (rgb[0] == this->MaskColor[0])
+    && (rgb[1] == this->MaskColor[1])
+    && (rgb[2] == this->MaskColor[2]) )
+    {
+    return;
+    }
+  for (int q=0; q<3; ++q)
+    {
+    this->MaskColor[q] = rgb[q];
+    }
+  this->Internals->ColorNeedsUpdate = true;
+  this->Modified();
+}
+
+//----------------------------------------------------------------------------
+void vtkSurfaceLICPainter::SetEnhanceContrast(int val)
+{
+  val = val < ENHANCE_CONTRAST_OFF ? ENHANCE_CONTRAST_OFF : val;
+  val = val > ENHANCE_CONTRAST_BOTH ? ENHANCE_CONTRAST_BOTH : val;
+  if (val == this->EnhanceContrast)
+    {
+    return;
+    }
+
+  switch ( this->EnhanceContrast )
+    {
+    case ENHANCE_CONTRAST_OFF:
+      switch ( val )
+        {
+        case ENHANCE_CONTRAST_LIC:
+        case ENHANCE_CONTRAST_BOTH:
+          this->Internals->LICNeedsUpdate = true;
+          break;
+        case ENHANCE_CONTRAST_COLOR:
+          this->Internals->ColorNeedsUpdate = true;
+          break;
+        }
+      break;
+
+    case ENHANCE_CONTRAST_LIC:
+      switch ( val )
+        {
+        case ENHANCE_CONTRAST_OFF:
+        case ENHANCE_CONTRAST_COLOR:
+          this->Internals->LICNeedsUpdate = true;
+          break;
+        case ENHANCE_CONTRAST_BOTH:
+          this->Internals->ColorNeedsUpdate = true;
+          break;
+        }
+      break;
+
+    case ENHANCE_CONTRAST_COLOR:
+      switch ( val )
+        {
+        case ENHANCE_CONTRAST_LIC:
+        case ENHANCE_CONTRAST_BOTH:
+          this->Internals->LICNeedsUpdate = true;
+          break;
+        case ENHANCE_CONTRAST_OFF:
+          this->Internals->ColorNeedsUpdate = true;
+          break;
+        }
+      break;
+
+    case ENHANCE_CONTRAST_BOTH:
+      switch ( val )
+        {
+        case ENHANCE_CONTRAST_OFF:
+          this->Internals->LICNeedsUpdate = true;
+          break;
+        case ENHANCE_CONTRAST_COLOR:
+          this->Internals->LICNeedsUpdate = true;
+        case ENHANCE_CONTRAST_LIC:
+          this->Internals->ColorNeedsUpdate = true;
+          break;
+        }
+      break;
+    }
+
+  this->EnhanceContrast = val;
+  this->Modified();
+}
+
+//----------------------------------------------------------------------------
+void vtkSurfaceLICPainter::SetNoiseDataSet(vtkImageData *data)
+{
+  if (data == this->Internals->Noise)
+    {
+    return;
+    }
+  this->Internals->Noise = data;
+  this->Internals->NoiseImage = NULL;
+  this->Modified();
+}
+
+//----------------------------------------------------------------------------
+vtkImageData *vtkSurfaceLICPainter::GetNoiseDataSet()
+{
+  if (this->Internals->Noise == NULL)
+    {
+    vtkImageData *noise = NULL;
+    if ( this->GenerateNoiseTexture )
+      {
+      // report potential issues
+      if ( this->NoiseGrainSize >= this->NoiseTextureSize )
+        {
+        vtkErrorMacro(
+          "NoiseGrainSize must be smaller than NoiseTextureSize");
+        }
+      if ( this->MinNoiseValue >= this->MaxNoiseValue )
+        {
+        vtkErrorMacro(
+          "MinNoiseValue must be smaller than MaxNoiseValue");
+        }
+      if ( (this->ImpulseNoiseProbability == 1.0)
+        && (this->NumberOfNoiseLevels < 2) )
+        {
+        vtkErrorMacro(
+          "NumberOfNoiseLevels must be greater than 1 "
+          "when not generating impulse noise");
+        }
+
+      // generate a custom noise texture based on the
+      // current settings.
+      int noiseTextureSize = this->NoiseTextureSize;
+      int noiseGrainSize = this->NoiseGrainSize;
+      RandomNoise2D noiseGen;
+      float *noiseValues = noiseGen.Generate(
+            this->NoiseType,
+            noiseTextureSize,
+            noiseGrainSize,
+            static_cast<float>(this->MinNoiseValue),
+            static_cast<float>(this->MaxNoiseValue),
+            this->NumberOfNoiseLevels,
+            this->ImpulseNoiseProbability,
+            static_cast<float>(this->ImpulseNoiseBackgroundValue),
+            this->NoiseGeneratorSeed);
+      if ( noiseValues == NULL )
+        {
+        vtkErrorMacro("Failed to generate noise.");
+        }
+
+      vtkFloatArray *noiseArray = vtkFloatArray::New();
+      noiseArray->SetNumberOfComponents(2);
+      noiseArray->SetName("noise");
+      vtkIdType arraySize = 2*noiseTextureSize*noiseTextureSize;
+      noiseArray->SetArray(noiseValues, arraySize, 0);
+
+      noise = vtkImageData::New();
+      noise->SetSpacing(1.0, 1.0, 1.0);
+      noise->SetOrigin(0.0, 0.0, 0.0);
+      noise->SetDimensions(noiseTextureSize, noiseTextureSize, 1);
+      noise->GetPointData()->SetScalars(noiseArray);
+
+      noiseArray->Delete();
+      }
+    else
+      {
+      // load a predefined noise texture.
+      noise = vtkGetNoiseResource();
+      }
+
+    this->Internals->Noise = noise;
+    this->Internals->NoiseImage = NULL;
+    noise->Delete();
+    noise = NULL;
+    }
+
+  return this->Internals->Noise;
+}
+
+//----------------------------------------------------------------------------
+void vtkSurfaceLICPainter::UpdateNoiseImage(vtkRenderWindow *renWin)
+{
+  vtkImageData *noiseDataSet = this->GetNoiseDataSet();
+
+  int ext[6];
+  noiseDataSet->GetExtent(ext);
+  unsigned int dataWidth = ext[1]-ext[0]+1;
+  unsigned int dataHeight = ext[3]-ext[2]+1;
+
+  vtkDataArray *noiseArray = noiseDataSet->GetPointData()->GetScalars();
+  int dataType = noiseArray->GetDataType();
+  void *data = noiseArray->GetVoidPointer(0);
+  int dataComps = noiseArray->GetNumberOfComponents();
+  unsigned int dataSize = noiseArray->GetNumberOfTuples()*dataComps;
+
+  vtkPixelBufferObject *pbo = vtkPixelBufferObject::New();
+  pbo->SetContext(renWin);
+  pbo->Upload1D(dataType, data, dataSize, 1, 0);
+
+  vtkTextureObject *tex = vtkTextureObject::New();
+  tex->SetContext(renWin);
+  tex->SetBaseLevel(0);
+  tex->SetMaxLevel(0);
+  tex->SetWrapS(vtkTextureObject::Repeat);
+  tex->SetWrapT(vtkTextureObject::Repeat);
+  tex->SetMinificationFilter(vtkTextureObject::Nearest);
+  tex->SetMagnificationFilter(vtkTextureObject::Nearest);
+  tex->Create2D(dataWidth, dataHeight, dataComps, pbo, false);
+  tex->SetAutoParameters(0);
+  pbo->Delete();
+
+  this->Internals->NoiseImage = tex;
+  tex->Delete();
+}
+
+
+//----------------------------------------------------------------------------
+bool vtkSurfaceLICPainter::IsSupported(vtkRenderWindow *renWin)
+{
+  vtkOpenGLRenderWindow *context
+    = vtkOpenGLRenderWindow::SafeDownCast(renWin);
+
+  return vtkInternals::IsSupported(context);
+}
+
+//----------------------------------------------------------------------------
+bool vtkSurfaceLICPainter::CanRenderSurfaceLIC(vtkActor *actor, int typeFlags)
+{
+  // check the render context for GL fetaure support
+  // note this also handles non-opengl render window
+  if ( this->Internals->ContextNeedsUpdate
+    && !vtkSurfaceLICPainter::IsSupported(this->Internals->Context) )
+    {
+    vtkErrorMacro("SurfaceLIC is not supported");
+    return false;
+    }
+
+  bool canRender = false;
+
+  // check for common situations where surface lic
+  // isn't computed.
+  GLint polyMode[2];
+  glGetIntegerv(GL_POLYGON_MODE, polyMode);
+
+  int rep = actor->GetProperty()->GetRepresentation();
+
+  if ( this->Enable
+    && this->Internals->HasVectors
+    && (rep == VTK_SURFACE)
+    && (typeFlags & (vtkPainter::POLYS|vtkPainter::STRIPS))
+    && (polyMode[0] == GL_FILL) // should I be checking backface mode too?
+    && glIsEnabled(GL_LIGHTING) )
+    {
+    canRender = true;
+    }
+
+  #if vtkSurfaceLICPainterDEBUG >= 1
+  cerr
+    << this->Internals->Communicator->GetWorldRank()
+    << " CanRender " << canRender << endl;
+  #endif
+
+  return canRender;
+}
+
+//----------------------------------------------------------------------------
+void vtkSurfaceLICPainter::InitializeResources()
+{
+  bool initialized = true;
+
+  // noise image
+  if (!this->Internals->NoiseImage)
+    {
+    initialized = false;
+
+    this->UpdateNoiseImage(this->Internals->Context);
+    }
+
+  // compositer for parallel operation
+  if (!this->Internals->Compositor)
+    {
+    this->Internals->UpdateAll();
+    vtkSurfaceLICComposite *compositor = vtkSurfaceLICComposite::New();
+    compositor->SetContext(this->Internals->Context);
+    this->Internals->Compositor = compositor;
+    compositor->Delete();
+    }
+
+  // image LIC
+  if (!this->Internals->LICer)
+    {
+    initialized = false;
+
+    vtkLineIntegralConvolution2D *LICer = vtkLineIntegralConvolution2D::New();
+    LICer->SetContext(this->Internals->Context);
+    this->Internals->LICer = LICer;
+    LICer->Delete();
+    }
+
+  // frame buffers
+  if (!this->Internals->FBO)
+    {
+    initialized = false;
+
+    vtkFrameBufferObject2 * fbo = vtkFrameBufferObject2::New();
+    fbo->SetContext(this->Internals->Context);
+    this->Internals->FBO = fbo;
+    fbo->Delete();
+    }
+
+  // load shader codes
+  if (!this->Internals->RenderGeometryPass)
+    {
+    initialized = false;
+
+    vtkShaderProgram2 * prog = vtkShaderProgram2::New();
+    prog->SetContext(this->Internals->Context);
+
+    vtkShader2 *s = vtkShader2::New();
+    s->SetSourceCode(vtkSurfaceLICPainter_GeomVs);
+    s->SetType(VTK_SHADER_TYPE_VERTEX);
+    s->SetContext(this->Internals->Context);
+
+    vtkShader2 *s2 = vtkShader2::New();
+    s2->SetSourceCode(vtkSurfaceLICPainter_GeomFs);
+    s2->SetType(VTK_SHADER_TYPE_FRAGMENT);
+    s2->SetContext(this->Internals->Context);
+
+    prog->GetShaders()->AddItem(s);
+    prog->GetShaders()->AddItem(s2);
+    s->Delete();
+    s2->Delete();
+
+    this->Internals->LightingHelper->Initialize(prog, VTK_SHADER_TYPE_VERTEX);
+    this->Internals->ColorMaterialHelper->Initialize(prog);
+
+    prog->Build();
+    if (prog->GetLastBuildStatus() != VTK_SHADER_PROGRAM2_LINK_SUCCEEDED)
+      {
+      vtkErrorMacro("geometry shader failed to build.");
+      }
+
+    this->Internals->RenderGeometryPass = prog;
+    prog->Delete();
+    }
+
+  if (!this->Internals->ColorPass)
+    {
+    initialized = false;
+
+    vtkShaderProgram2 *prog = vtkShaderProgram2::New();
+    prog->SetContext(this->Internals->Context);
+
+    vtkShader2 *s = vtkShader2::New();
+    s->SetSourceCode(vtkSurfaceLICPainter_SC);
+    s->SetType(VTK_SHADER_TYPE_FRAGMENT);
+    s->SetContext(this->Internals->Context);
+    prog->GetShaders()->AddItem(s);
+    s->Delete();
+
+    prog->Build();
+    if (prog->GetLastBuildStatus() != VTK_SHADER_PROGRAM2_LINK_SUCCEEDED)
+      {
+      vtkErrorMacro("scalar color shader failed to build.");
+      }
+
+    this->Internals->ColorPass = prog;
+    prog->Delete();
+    }
+
+  if (!this->Internals->ColorEnhancePass)
+    {
+    initialized = false;
+
+    vtkShaderProgram2 *prog = vtkShaderProgram2::New();
+    prog->SetContext(this->Internals->Context);
+
+    vtkShader2 *s = vtkShader2::New();
+    s->SetSourceCode(vtkSurfaceLICPainter_CE);
+    s->SetType(VTK_SHADER_TYPE_FRAGMENT);
+    s->SetContext(this->Internals->Context);
+    prog->GetShaders()->AddItem(s);
+    s->Delete();
+
+    prog->Build();
+    if (prog->GetLastBuildStatus() != VTK_SHADER_PROGRAM2_LINK_SUCCEEDED)
+      {
+      vtkErrorMacro("color contrast enhance shader failed to build.");
+      }
+
+    this->Internals->ColorEnhancePass = prog;
+    prog->Delete();
+    }
+
+  if (!this->Internals->CopyPass)
+    {
+    initialized = false;
+
+    vtkShaderProgram2 *prog = vtkShaderProgram2::New();
+    prog->SetContext(this->Internals->Context);
+
+    vtkShader2 *s = vtkShader2::New();
+    s->SetSourceCode(vtkSurfaceLICPainter_DCpy);
+    s->SetType(VTK_SHADER_TYPE_FRAGMENT);
+    s->SetContext(this->Internals->Context);
+    prog->GetShaders()->AddItem(s);
+    s->Delete();
+
+    prog->Build();
+    if (prog->GetLastBuildStatus() != VTK_SHADER_PROGRAM2_LINK_SUCCEEDED)
+      {
+      vtkErrorMacro("color contrast enhance shader failed to build.");
+      }
+
+    this->Internals->CopyPass = prog;
+    prog->Delete();
+    }
+
+  // if any of the above were not already initialized
+  // then execute all stages
+  if (!initialized)
+    {
+    this->Internals->UpdateAll();
+    }
+}
+
+//----------------------------------------------------------------------------
+bool vtkSurfaceLICPainter::NeedToColorLIC()
+{
+  if ( this->Internals->ColorNeedsUpdate
+    || this->Internals->LICNeedsUpdate
+    || this->Internals->GatherNeedsUpdate
+    || this->Internals->GeometryNeedsUpdate
+    || this->Internals->CommunicatorNeedsUpdate
+    || this->Internals->OutputDataNeedsUpdate
+    || this->Internals->ContextNeedsUpdate
+    || this->AlwaysUpdate )
+    {
+    this->Internals->ColorNeedsUpdate = true;
+    }
+
+  #if vtkSurfaceLICPainterDEBUG >= 1
+  cerr
+    << this->Internals->Communicator->GetWorldRank()
+    << " NeedToColorLIC " << this->Internals->ColorNeedsUpdate << endl;
+  #endif
+  return this->Internals->ColorNeedsUpdate;
+}
+
+//----------------------------------------------------------------------------
+bool vtkSurfaceLICPainter::NeedToComputeLIC()
+{
+  if ( this->Internals->LICNeedsUpdate
+    || this->Internals->GatherNeedsUpdate
+    || this->Internals->GeometryNeedsUpdate
+    || this->Internals->CommunicatorNeedsUpdate
+    || this->Internals->OutputDataNeedsUpdate
+    || this->Internals->ContextNeedsUpdate
+    || this->AlwaysUpdate )
+    {
+    this->Internals->LICNeedsUpdate = true;
+    }
+
+  #if vtkSurfaceLICPainterDEBUG >= 1
+  cerr
+    << this->Internals->Communicator->GetWorldRank()
+    << " NeedToComputeLIC " << this->Internals->LICNeedsUpdate << endl;
+  #endif
+  return this->Internals->LICNeedsUpdate;
+}
+
+//----------------------------------------------------------------------------
+bool vtkSurfaceLICPainter::NeedToGatherVectors()
+{
+  if ( this->Internals->GatherNeedsUpdate
+    || this->Internals->GeometryNeedsUpdate
+    || this->Internals->OutputDataNeedsUpdate
+    || this->Internals->CommunicatorNeedsUpdate
+    || this->Internals->ContextNeedsUpdate
+    || this->AlwaysUpdate )
+    {
+    this->Internals->GatherNeedsUpdate = true;
+    }
+
+  #if vtkSurfaceLICPainterDEBUG >= 1
+  cerr
+    << this->Internals->Communicator->GetWorldRank()
+    << " NeedToGatherVectors "
+    << this->Internals->GatherNeedsUpdate << endl;
+  #endif
+  return this->Internals->GatherNeedsUpdate;
+}
+
+//----------------------------------------------------------------------------
+bool vtkSurfaceLICPainter::NeedToRenderGeometry(
+      vtkRenderer *renderer,
+      vtkActor *actor)
+{
+  // view changed or
+  // user modifiable parameters
+  if ( this->Internals->GeometryNeedsUpdate
+    || this->Internals->CommunicatorNeedsUpdate
+    || this->Internals->OutputDataNeedsUpdate
+    || this->Internals->ContextNeedsUpdate
+    || this->AlwaysUpdate )
+    {
+    this->Internals->GeometryNeedsUpdate = true;
+    }
+
+  // lights changed
+  if ( this->Internals->LightingChanged() )
+    {
+    this->Internals->GeometryNeedsUpdate = true;
+    }
+
+  // props changed
+  long long propMTime = actor->GetProperty()->GetMTime();
+  if ( this->Internals->LastPropertyMTime != propMTime )
+    {
+    this->Internals->LastPropertyMTime = propMTime;
+    this->Internals->GeometryNeedsUpdate = true;
+    }
+
+  // background colors changed
+  if (this->Internals->BackgroundChanged(renderer))
+    {
+    this->Internals->GeometryNeedsUpdate = true;
+    this->Internals->ColorNeedsUpdate = true;
+    }
+
+  #if vtkSurfaceLICPainterDEBUG >= 1
+  cerr
+    << this->Internals->Communicator->GetWorldRank()
+    << " NeedToUpdateGeometry "
+    << this->Internals->GeometryNeedsUpdate << endl;
+  #endif
+  return this->Internals->GeometryNeedsUpdate;
+}
+
+//----------------------------------------------------------------------------
+bool vtkSurfaceLICPainter::NeedToUpdateCommunicator()
+{
+  // no comm or externally modfied paramters
+  if ( this->Internals->CommunicatorNeedsUpdate
+    || this->Internals->ContextNeedsUpdate
+    || this->Internals->OutputDataNeedsUpdate
+    || !this->Internals->Communicator
+    || this->AlwaysUpdate )
+    {
+    this->Internals->CommunicatorNeedsUpdate = true;
+    this->Internals->UpdateAll();
+    }
+
+  #if vtkSurfaceLICPainterDEBUG >= 1
+  cerr
+    << this->Internals->Communicator->GetWorldRank()
+    << " NeedToUpdateCommunicator "
+    << this->Internals->CommunicatorNeedsUpdate << endl;
+  #endif
+
+  return this->Internals->CommunicatorNeedsUpdate;
+}
+
+//----------------------------------------------------------------------------
+bool vtkSurfaceLICPainter::NeedToUpdateOutputData()
+{
+  vtkDataObject *input = this->GetInput();
+  // input dataset changed
+  long long inputMTime = input->GetMTime();
+  if ( (this->Internals->LastInputDataSetMTime < inputMTime)
+    || !this->Output
+    || this->AlwaysUpdate)
+    {
+    this->Internals->LastInputDataSetMTime = inputMTime;
+    this->Internals->UpdateAll();
+    }
+
+  #if vtkSurfaceLICPainterDEBUG >= 1
+  cerr
+    << this->Internals->Communicator->GetWorldRank()
+    << " NeedToUpdateOutputData " << this->Internals->OutputDataNeedsUpdate << endl;
+  #endif
+  return this->Internals->OutputDataNeedsUpdate;
+}
+
+//----------------------------------------------------------------------------
+void vtkSurfaceLICPainter::ValidateContext(vtkRenderer *renderer)
+{
+  bool modified = false;
+
+  vtkOpenGLRenderWindow *context
+    = vtkOpenGLRenderWindow::SafeDownCast(renderer->GetRenderWindow());
+
+  // context changed
+  if (this->Internals->Context != context)
+    {
+    modified = true;
+    if (this->Internals->Context)
+      {
+      this->ReleaseGraphicsResources(this->Internals->Context);
+      }
+    this->Internals->Context = context;
+    }
+
+  // viewport size changed
+  int viewsize[2];
+  renderer->GetTiledSize(&viewsize[0], &viewsize[1]);
+  if ( this->Internals->Viewsize[0] != viewsize[0]
+    || this->Internals->Viewsize[1] != viewsize[1] )
+    {
+    modified = true;
+
+    // udpate view size
+    this->Internals->Viewsize[0] = viewsize[0];
+    this->Internals->Viewsize[1] = viewsize[1];
+
+    // resize textures
+    this->Internals->ClearTextures();
+    this->Internals->AllocateTextures(context, viewsize);
+    }
+
+  // view changed
+  if (this->Internals->ViewChanged())
+    {
+    modified = true;
+    }
+
+  // if anything changed execute all stages
+  if (modified)
+    {
+    this->Internals->UpdateAll();
+    }
+
+  #if vtkSurfaceLICPainterDEBUG >= 1
+  cerr
+    << this->Internals->Communicator->GetWorldRank()
+    << " NeedToUpdatContext " << modified << endl;
+  #endif
+}
+
+//----------------------------------------------------------------------------
+vtkPainterCommunicator *vtkSurfaceLICPainter::CreateCommunicator(int)
+{
+  return new vtkPainterCommunicator;
+}
+
+//----------------------------------------------------------------------------
+void vtkSurfaceLICPainter::CreateCommunicator()
+{
+  // compute screen space pixel extent of local blocks and
+  // union of local blocks. only blocks that pass view frustum
+  // visibility test are used in the computation.
+
+  vtkDataObject *input = this->GetInput();
+
+  this->Internals->DataSetExt.Clear();
+  this->Internals->BlockExts.clear();
+
+  int includeRank = this->Internals->ProjectBounds(
+          input,
+          this->Internals->Viewsize,
+          this->Internals->DataSetExt,
+          this->Internals->BlockExts);
+
+  if (this->Internals->Communicator)
+    {
+    delete this->Internals->Communicator;
+    this->Internals->Communicator = NULL;
+    }
+
+  this->Internals->Communicator = this->CreateCommunicator(includeRank);
+
+  #if vtkSurfaceLICPainterDEBUG >= 1
+  cerr
+    << this->Internals->Communicator->GetWorldRank()
+    << " is rendering " << includeRank << endl;
+  #endif
+}
+
+//-----------------------------------------------------------------------------
+void vtkSurfaceLICPainter::ProcessInformation(vtkInformation* info)
+{
+  #if vtkSurfaceLICPainterDEBUG >= 1
+  bool LUTNeedsUpdate = false;
+  #endif
+
+  // detect when the LUT has been modified
+  if (info->Has(vtkScalarsToColorsPainter::LOOKUP_TABLE()))
+    {
+    vtkObjectBase *lutObj = info->Get(vtkScalarsToColorsPainter::LOOKUP_TABLE());
+    vtkScalarsToColors *lut = vtkScalarsToColors::SafeDownCast(lutObj);
+    long long lutMTime;
+    if (lut && ((lutMTime = lut->GetMTime()) > this->Internals->LastLUTMTime))
+      {
+      this->Internals->LastLUTMTime = lutMTime;
+      this->Internals->UpdateAll();
+      #if vtkSurfaceLICPainterDEBUG >= 1
+      LUTNeedsUpdate = true;
+      #endif
+      }
+    }
+
+  #if vtkSurfaceLICPainterDEBUG >= 1
+  cerr
+    << this->Internals->Communicator->GetWorldRank()
+    << " NeedToUpdateLUT " << LUTNeedsUpdate << endl;
+  #endif
+}
+
+//----------------------------------------------------------------------------
+void vtkSurfaceLICPainter::SetUpdateAll()
+{
+  this->Internals->UpdateAll();
+}
+
+//----------------------------------------------------------------------------
+void vtkSurfaceLICPainter::RenderInternal(
+        vtkRenderer *renderer,
+        vtkActor *actor,
+        unsigned long typeflags,
+        bool forceCompileOnly)
+{
+  #if vtkSurfaceLICPainterDEBUG >= 1
+  cerr
+    << this->Internals->Communicator->GetWorldRank()
+    << " ===== " << this->GetClassName() << "::RenderInternal" << endl;
+  #endif
+
+  #ifdef vtkSurfaceLICPainterTIME
+  this->StartTimerEvent("vtkSurfaceLICPainter::RenderInternal");
+  #else
+  vtkSmartPointer<vtkTimerLog> timer = vtkSmartPointer<vtkTimerLog>::New();
+  timer->StartTimer();
+  #endif
+
+  vtkOpenGLClearErrorMacro();
+
+  this->ValidateContext(renderer);
+
+  if (this->NeedToUpdateOutputData())
+    {
+    // if the input data has changed we need to
+    // reload vector attributes and recompute
+    // all, but only if the output is valid.
+    this->PrepareOutput();
+    }
+
+  if (this->NeedToUpdateCommunicator())
+    {
+    #ifdef vtkSurfaceLICPainterTIME
+    this->StartTimerEvent("vtkSurfaceLICPainter::CreateCommunicator");
+    #endif
+    // create a communicator that contains only ranks
+    // that have visible data. In parallel this is a
+    // collective operation accross all ranks. In
+    // serial this is a no-op.
+    this->CreateCommunicator();
+    #ifdef vtkSurfaceLICPainterTIME
+    this->EndTimerEvent("vtkSurfaceLICPainter::CreateCommunicator");
+    #endif
+    }
+  vtkPainterCommunicator *comm = this->Internals->Communicator;
+
+  if (comm->GetIsNull())
+    {
+    // other rank's may have some visible data but we
+    // have none and should not participate further
+    #ifdef vtkSurfaceLICPainterTIME
+    this->EndTimerEvent("vtkSurfaceLICPainter::RenderInternal");
+    #endif
+    return;
+    }
+
+  if (!this->CanRenderSurfaceLIC(actor, typeflags))
+    {
+    // we've determined that there's no work for us, or that the
+    // requisite opengl extensions are not available. pass control on
+    // to delegate renderer and return.
+    this->Superclass::RenderInternal(renderer, actor, typeflags, forceCompileOnly);
+    #ifdef vtkSurfaceLICPainterTIME
+    this->EndTimerEvent("vtkSurfaceLICPainter::RenderInternal");
+    #endif
+    return;
+    }
+
+  // allocate rendering resources, initialize or update
+  // textures and shaders.
+  this->InitializeResources();
+
+  // Save context and matrix state to be able to restore.
+  glPushAttrib(GL_ALL_ATTRIB_BITS);
+  glMatrixMode(GL_PROJECTION);
+  glPushMatrix();
+  glMatrixMode(GL_MODELVIEW);
+  glPushMatrix();
+
+  vtkPixelExtent viewExt(
+        this->Internals->Viewsize[0],
+        this->Internals->Viewsize[1]);
+
+  // save the active fbo and its draw buffer
+  int prevDrawBuf = 0;
+  glGetIntegerv(GL_DRAW_BUFFER, &prevDrawBuf);
+
+  int prevFbo = 0;
+  glGetIntegerv(vtkgl::DRAW_FRAMEBUFFER_BINDING_EXT, &prevFbo);
+
+  // ------------------------------------------- render geometry, project vectors onto screen, etc
+  if (this->NeedToRenderGeometry(renderer, actor))
+    {
+    #ifdef vtkSurfaceLICPainterTIME
+    this->StartTimerEvent("vtkSurfaceLICPainter::RenderGeometry");
+    #endif
+
+    // setup our fbo
+    vtkFrameBufferObject2 *fbo = this->Internals->FBO;
+    fbo->SaveCurrentBindings();
+    fbo->Bind(vtkgl::FRAMEBUFFER_EXT);
+    fbo->AddDepthAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, this->Internals->DepthImage);
+    fbo->AddColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 0U, this->Internals->GeometryImage);
+    fbo->AddColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 1U, this->Internals->VectorImage);
+    fbo->AddColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 2U, this->Internals->MaskVectorImage);
+    fbo->ActivateDrawBuffers(3);
+    vtkCheckFrameBufferStatusMacro(vtkgl::FRAMEBUFFER_EXT);
+
+    // clear internal color and depth buffers
+    // the LIC'er requires *all* fragments in the vector
+    // texture to be initialized to 0
+    glDisable(GL_BLEND);
+    glEnable(GL_DEPTH_TEST);
+    glDisable(GL_SCISSOR_TEST);
+    glClearColor(0.0, 0.0, 0.0, 0.0);
+    glClear(GL_DEPTH_BUFFER_BIT|GL_COLOR_BUFFER_BIT);
+
+    // setup projection shader
+    vtkShaderProgram2 *geometryPass = this->Internals->RenderGeometryPass;
+
+    vtkUniformVariables *uniforms = geometryPass->GetUniformVariables();
+    uniforms->SetUniformft("uMaskOnSurface", this->MaskOnSurface);
+
+    this->Internals->LightingHelper->EncodeLightState();
+    this->Internals->ColorMaterialHelper->SetUniformVariables();
+
+    // render geometry through delegate chain. not looping over blocks
+    // here since composite dataset painter is in the chain.
+    geometryPass->Use();
+
+    typeflags &= (vtkPainter::POLYS|vtkPainter::STRIPS);
+    this->Superclass::RenderInternal(renderer, actor, typeflags, forceCompileOnly);
+
+    geometryPass->Restore();
+
+    fbo->RemoveRenDepthAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT);
+    fbo->RemoveTexColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 0U);
+    fbo->RemoveTexColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 1U);
+    fbo->RemoveTexColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 2U);
+    fbo->DeactivateDrawBuffers();
+    fbo->UnBind(vtkgl::FRAMEBUFFER_EXT);
+
+    #ifdef vtkSurfaceLICPainterTIME
+    this->EndTimerEvent("vtkSurfaceLICPainter::RenderGeometry");
+    #endif
+    #if vtkSurfaceLICPainterDEBUG >= 2
+    vtkTextureIO::Write(
+          mpifn(comm,"slicp_geometry_image.vtm"),
+          this->Internals->GeometryImage,
+          this->Internals->BlockExts);
+    vtkTextureIO::Write(
+          mpifn(comm,"slicp_vector_image.vtm"),
+          this->Internals->VectorImage,
+          this->Internals->BlockExts);
+    vtkTextureIO::Write(
+          mpifn(comm,"slicp_mask_vector_image.vtm"),
+          this->Internals->MaskVectorImage,
+          this->Internals->BlockExts);
+    #if defined(USE_DEPTH_TEXTURE)
+    vtkTextureIO::Write(
+          mpifn(comm,"slicp_depth_image.vtm"),
+          this->Internals->DepthImage,
+          this->Internals->BlockExts);
+    #endif
+    #endif
+    }
+
+  // --------------------------------------------- compoiste vectors for parallel LIC
+  if (this->NeedToGatherVectors())
+    {
+    #ifdef vtkSurfaceLICPainterTIME
+    this->StartTimerEvent("vtkSurfaceLICPainter::GatherVectors");
+    #endif
+
+    // get tight screen space bounds to reduce communication/computation
+    vtkPixelBufferObject *vecPBO = this->Internals->VectorImage->Download();
+    void *pVecPBO = vecPBO->MapPackedBuffer();
+
+    this->Internals->GetPixelBounds(
+            (float*)pVecPBO,
+            this->Internals->Viewsize[0],
+            this->Internals->BlockExts);
+
+    // initialize compositor
+    this->Internals->Compositor->Initialize(
+          viewExt,
+          this->Internals->BlockExts,
+          this->CompositeStrategy,
+          this->StepSize,
+          this->NumberOfSteps,
+          this->NormalizeVectors,
+          this->EnhancedLIC,
+          this->AntiAlias);
+
+    if (comm->GetMPIInitialized())
+      {
+      // parallel run
+      // need to use the communicator provided by the rendering engine
+      this->Internals->Compositor->SetCommunicator(comm);
+
+      // build compositing program and set up the screen space decomp
+      // with guard pixels
+      int iErr = 0;
+      iErr = this->Internals->Compositor->BuildProgram((float*)pVecPBO);
+      if (iErr)
+        {
+        vtkErrorMacro("Failed to construct program, reason " << iErr);
+        }
+
+      // composite vectors
+      vtkTextureObject *compositeVectors = this->Internals->CompositeVectorImage;
+      iErr = this->Internals->Compositor->Gather(
+              pVecPBO,
+              VTK_FLOAT,
+              4,
+              compositeVectors);
+      if (iErr)
+        {
+        vtkErrorMacro("Failed to composite vectors, reason  " << iErr);
+        }
+
+      // composite mask vectors
+      vtkTextureObject *compositeMaskVectors = this->Internals->CompositeMaskVectorImage;
+      vtkPixelBufferObject *maskVecPBO = this->Internals->MaskVectorImage->Download();
+      void *pMaskVecPBO = maskVecPBO->MapPackedBuffer();
+      iErr = this->Internals->Compositor->Gather(
+              pMaskVecPBO,
+              VTK_FLOAT,
+              4,
+              compositeMaskVectors);
+      if (iErr)
+        {
+        vtkErrorMacro("Failed to composite mask vectors, reason " << iErr);
+        }
+      maskVecPBO->UnmapPackedBuffer();
+      maskVecPBO->Delete();
+
+      // restore the default communicator
+      this->Internals->Compositor->RestoreDefaultCommunicator();
+
+      #if vtkSurfaceLICPainterDEBUG >= 2
+      vtkTextureIO::Write(
+             mpifn(comm,"slicp_new_vector_image.vtm"),
+             this->Internals->CompositeVectorImage,
+             this->Internals->Compositor->GetDisjointGuardExtents());
+
+      vtkTextureIO::Write(
+             mpifn(comm,"slicp_new_mask_vector_image.vtm"),
+             this->Internals->CompositeMaskVectorImage,
+             this->Internals->Compositor->GetDisjointGuardExtents());
+      #endif
+      }
+    else
+      {
+      // serial run
+      // make the decomposition disjoint and add guard pixels
+      this->Internals->Compositor->InitializeCompositeExtents((float*)pVecPBO);
+
+      // use the lic decomp from here on out, in serial we have this
+      // flexibility because we don't need to worry about ordered compositing
+      // or IceT's scissor boxes
+      this->Internals->BlockExts
+         = this->Internals->Compositor->GetCompositeExtents();
+
+      // pass through without compositing
+      this->Internals->CompositeVectorImage = this->Internals->VectorImage;
+      this->Internals->CompositeMaskVectorImage = this->Internals->MaskVectorImage;
+      }
+
+   vecPBO->UnmapPackedBuffer();
+   vecPBO->Delete();
+
+   #ifdef vtkSurfaceLICPainterTIME
+   this->EndTimerEvent("vtkSurfaceLICPainter::GatherVectors");
+   #endif
+   }
+
+  // ------------------------------------------- LIC on screen
+  if ( this->NeedToComputeLIC() )
+    {
+    #if vtkSurfaceLICPainterDEBUG >= 2
+    ostringstream oss;
+    if ( this->GenerateNoiseTexture )
+      {
+      const char *noiseType[3]={"unif","gauss","perl"};
+      oss
+       << "slicp_noise_"
+       << noiseType[this->NoiseType]
+       << "_size_" << this->NoiseTextureSize
+       << "_grain_" << this->NoiseGrainSize
+       << "_minval_" << this->MinNoiseValue
+       << "_maxval_" << this->MaxNoiseValue
+       << "_nlevels_" << this->NumberOfNoiseLevels
+       << "_impulseprob_" << this->ImpulseNoiseProbability
+       << "_impulseprob_" << this->ImpulseNoiseBackgroundValue
+       << ".vtk";
+      }
+    else
+      {
+      oss << "slicp_noise_default.vtk";
+      }
+    vtkTextureIO::Write(
+          mpifn(comm, oss.str().c_str()),
+          this->Internals->NoiseImage);
+    #endif
+    #ifdef vtkSurfaceLICPainterTIME
+    this->StartTimerEvent("vtkSurfaceLICPainter::ComputeLIC");
+    #endif
+
+    // TODO -- this means that the steps size is a function
+    // of aspect ratio which is pretty insane...
+    // convert from window units to texture units
+    // this isn't correct since there's no way to account
+    // for anisotropy in the trasnform to texture space
+    double tcScale[2] = {
+          1.0/this->Internals->Viewsize[0],
+          1.0/this->Internals->Viewsize[1]};
+
+    double stepSize
+      = this->StepSize*sqrt(tcScale[0]*tcScale[0]+tcScale[1]*tcScale[1]);
+
+    stepSize = stepSize <= 0.0 ? 1.0e-10 : stepSize;
+
+    // configure image lic
+    vtkLineIntegralConvolution2D *LICer = this->Internals->LICer;
+
+    LICer->SetStepSize(stepSize);
+    LICer->SetNumberOfSteps(this->NumberOfSteps);
+    LICer->SetEnhancedLIC(this->EnhancedLIC);
+    switch (this->EnhanceContrast)
+      {
+      case ENHANCE_CONTRAST_LIC:
+      case ENHANCE_CONTRAST_BOTH:
+        LICer->SetEnhanceContrast(vtkLIC2D::ENHANCE_CONTRAST_ON);
+        break;
+      default:
+        LICer->SetEnhanceContrast(vtkLIC2D::ENHANCE_CONTRAST_OFF);
+      }
+    LICer->SetLowContrastEnhancementFactor(this->LowLICContrastEnhancementFactor);
+    LICer->SetHighContrastEnhancementFactor(this->HighLICContrastEnhancementFactor);
+    LICer->SetAntiAlias(this->AntiAlias);
+    LICer->SetComponentIds(0, 1);
+    LICer->SetNormalizeVectors(this->NormalizeVectors);
+    LICer->SetMaskThreshold(this->MaskThreshold);
+    LICer->SetCommunicator(comm);
+
+    // loop over composited extents
+    const deque<vtkPixelExtent> &compositeExts
+      = this->Internals->Compositor->GetCompositeExtents();
+
+    const deque<vtkPixelExtent> &disjointGuardExts
+      = this->Internals->Compositor->GetDisjointGuardExtents();
+
+    this->Internals->LICImage.TakeReference(
+         LICer->Execute(
+              viewExt,            // screen extent
+              disjointGuardExts,  // disjoint extent of valid vectors
+              compositeExts,      // disjoint extent where lic is needed
+              this->Internals->CompositeVectorImage,
+              this->Internals->CompositeMaskVectorImage,
+              this->Internals->NoiseImage));
+
+    if (!this->Internals->LICImage)
+      {
+      vtkErrorMacro("Failed to compute image LIC");
+      return;
+      }
+
+    #ifdef vtkSurfaceLICPainterTIME
+    this->EndTimerEvent("vtkSurfaceLICPainter::ComputeLIC");
+    #endif
+    #if vtkSurfaceLICPainterDEBUG >= 2
+    vtkTextureIO::Write(
+          mpifn(comm,"slicp_lic.vtm"),
+          this->Internals->LICImage,
+          compositeExts);
+    #endif
+
+    // ------------------------------------------- move from LIC decomp back to geometry decomp
+    if ( comm->GetMPIInitialized()
+      && (this->Internals->Compositor->GetStrategy()!=COMPOSITE_INPLACE ) )
+      {
+      #ifdef vtkSurfaceLICPainterTIME
+      this->StartTimerEvent("vtkSurfaceLICPainter::ScatterLIC");
+      #endif
+
+      // parallel run
+      // need to use the communicator provided by the rendering engine
+      this->Internals->Compositor->SetCommunicator(comm);
+
+      vtkPixelBufferObject *licPBO = this->Internals->LICImage->Download();
+      void *pLicPBO = licPBO->MapPackedBuffer();
+      vtkTextureObject *newLicImage = NULL;
+      int iErr = this->Internals->Compositor->Scatter(pLicPBO, VTK_FLOAT, 4, newLicImage);
+      if (iErr)
+        {
+        vtkErrorMacro("Failed to scatter lic");
+        }
+      licPBO->UnmapPackedBuffer();
+      licPBO->Delete();
+      this->Internals->LICImage = NULL;
+      this->Internals->LICImage = newLicImage;
+      newLicImage->Delete();
+
+      // restore the default communicator
+      this->Internals->Compositor->RestoreDefaultCommunicator();
+
+      #ifdef vtkSurfaceLICPainterTIME
+      this->EndTimerEvent("vtkSurfaceLICPainter::ScatterLIC");
+      #endif
+      #if vtkSurfaceLICPainterDEBUG >= 2
+      vtkTextureIO::Write(
+            mpifn(comm,"slicp_new_lic.vtm"),
+            this->Internals->LICImage,
+            this->Internals->BlockExts);
+      #endif
+      }
+    }
+
+  // ------------------------------------------- combine scalar colors + LIC
+  if ( this->NeedToColorLIC() )
+    {
+    #ifdef vtkSurfaceLICPainterTIME
+    this->StartTimerEvent("vtkSurfaceLICPainter::ColorLIC");
+    #endif
+    vtkFrameBufferObject2 *fbo = this->Internals->FBO;
+    fbo->SaveCurrentBindings();
+    fbo->Bind(vtkgl::FRAMEBUFFER_EXT);
+    fbo->InitializeViewport(this->Internals->Viewsize[0], this->Internals->Viewsize[1]);
+    fbo->AddColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 0U, this->Internals->RGBColorImage);
+    fbo->AddColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 1U, this->Internals->HSLColorImage);
+    fbo->ActivateDrawBuffers(2U);
+    vtkCheckFrameBufferStatusMacro(vtkgl::FRAMEBUFFER_EXT);
+
+    #if 0
+    glDisable(GL_SCISSOR_TEST);
+    glClearColor(0.0, 0.0, 0.0, 0.0);
+    glClear(GL_COLOR_BUFFER_BIT);
+    #else
+    // clear the parts of the screen which we will modify
+    glEnable(GL_SCISSOR_TEST);
+    glClearColor(0.0, 0.0, 0.0, 0.0);
+    size_t nBlocks = this->Internals->BlockExts.size();
+    for (size_t e=0; e<nBlocks; ++e)
+      {
+      vtkPixelExtent ext = this->Internals->BlockExts[e];
+      ext.Grow(2); // halo for linear filtering
+      ext &= viewExt;
+
+      unsigned int extSize[2];
+      ext.Size(extSize);
+
+      glScissor(ext[0], ext[2], extSize[0], extSize[1]);
+      glClear(GL_COLOR_BUFFER_BIT);
+      }
+    glDisable(GL_SCISSOR_TEST);
+    #endif
+
+    this->Internals->VectorImage->Activate(vtkgl::TEXTURE0);
+    this->Internals->GeometryImage->Activate(vtkgl::TEXTURE1);
+    this->Internals->LICImage->Activate(vtkgl::TEXTURE2);
+
+    vtkShaderProgram2 *colorPass = this->Internals->ColorPass;
+    vtkUniformVariables *uniforms = colorPass->GetUniformVariables();
+    uniforms->SetUniformit("texVectors", 0);
+    uniforms->SetUniformit("texGeomColors", 1);
+    uniforms->SetUniformit("texLIC", 2);
+    uniforms->SetUniformit("uScalarColorMode", this->ColorMode);
+    uniforms->SetUniformft("uLICIntensity", this->LICIntensity);
+    uniforms->SetUniformft("uMapBias", this->MapModeBias);
+    uniforms->SetUniformft("uMaskIntensity", this->MaskIntensity);
+    uniforms->SetUniformft("uMaskColor", 3, this->MaskColor);
+    colorPass->Use();
+
+    for (size_t e=0; e<nBlocks; ++e)
+      {
+      this->Internals->RenderQuad(viewExt, this->Internals->BlockExts[e], 1);
+      }
+
+    colorPass->Restore();
+
+    this->Internals->VectorImage->Deactivate(vtkgl::TEXTURE0);
+    this->Internals->GeometryImage->Deactivate(vtkgl::TEXTURE1);
+    this->Internals->LICImage->Deactivate(vtkgl::TEXTURE2);
+
+    #ifdef vtkSurfaceLICPainterTIME
+    this->EndTimerEvent("vtkSurfaceLICPainter::ColorLIC");
+    #endif
+
+    // --------------------------------------------- color contrast enhance
+    if ( ( this->EnhanceContrast == ENHANCE_CONTRAST_COLOR )
+      || ( this->EnhanceContrast == ENHANCE_CONTRAST_BOTH ) )
+      {
+      #if vtkSurfaceLICPainterDEBUG >= 2
+      vtkTextureIO::Write(
+            mpifn(comm,"slic_color_rgb_in.vtm"),
+            this->Internals->RGBColorImage,
+            this->Internals->BlockExts);
+      vtkTextureIO::Write(
+            mpifn(comm,"slic_color_hsl_in.vtm"),
+            this->Internals->HSLColorImage,
+            this->Internals->BlockExts);
+      #endif
+      #ifdef vtkSurfaceLICPainterTIME
+      this->StartTimerEvent("vtkSurfaceLICPainter::ContrastEnhance");
+      #endif
+
+      // find min/max lighness value for color contrast enhancement.
+      float LMin = VTK_FLOAT_MAX;
+      float LMax = -VTK_FLOAT_MAX;
+      float LMaxMinDiff = VTK_FLOAT_MAX;
+
+      #ifdef STREAMING_MIN_MAX
+      StreamingFindMinMax(fbo, this->Internals->BlockExts, LMin, LMax);
+      #else
+      FindMinMax(
+            this->Internals->HSLColorImage,
+            this->Internals->BlockExts,
+            LMin,
+            LMax);
+      #endif
+
+      if ( this->Internals->BlockExts.size()
+        && ((LMax <= LMin) || (LMin < 0.0f) || (LMax > 1.0f)) )
+        {
+        vtkErrorMacro(
+          << comm->GetRank()
+          << ": Invalid  range " << LMin << ", " << LMax
+          << " for color contrast enhancement");
+        LMin = 0.0;
+        LMax = 1.0;
+        LMaxMinDiff = 1.0;
+        }
+
+      // global collective reduction for parallel operation
+      this->GetGlobalMinMax(comm, LMin, LMax);
+
+      // set M and m as a fraction of the range.
+      LMaxMinDiff = LMax-LMin;
+      LMin += LMaxMinDiff*this->LowColorContrastEnhancementFactor;
+      LMax -= LMaxMinDiff*this->HighColorContrastEnhancementFactor;
+      LMaxMinDiff = LMax-LMin;
+
+      // normalize shader
+      fbo->AddColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 0U, this->Internals->RGBColorImage);
+      fbo->ActivateDrawBuffer(0U);
+      vtkCheckFrameBufferStatusMacro(vtkgl::DRAW_FRAMEBUFFER_EXT);
+
+      this->Internals->GeometryImage->Activate(vtkgl::TEXTURE0);
+      this->Internals->HSLColorImage->Activate(vtkgl::TEXTURE1);
+      this->Internals->LICImage->Activate(vtkgl::TEXTURE2);
+
+      vtkShaderProgram2 *colorEnhancePass = this->Internals->ColorEnhancePass;
+      uniforms = colorEnhancePass->GetUniformVariables();
+      uniforms->SetUniformit("texGeomColors", 0);
+      uniforms->SetUniformit("texHSLColors", 1);
+      uniforms->SetUniformit("texLIC", 2);
+      uniforms->SetUniformft("uLMin", LMin);
+      uniforms->SetUniformft("uLMaxMinDiff", LMaxMinDiff);
+      colorEnhancePass->Use();
+
+      for (size_t e=0; e<nBlocks; ++e)
+        {
+        this->Internals->RenderQuad(viewExt, this->Internals->BlockExts[e], 1);
+        }
+
+      colorEnhancePass->Restore();
+
+      this->Internals->GeometryImage->Deactivate(vtkgl::TEXTURE0);
+      this->Internals->HSLColorImage->Deactivate(vtkgl::TEXTURE1);
+      this->Internals->LICImage->Deactivate(vtkgl::TEXTURE2);
+
+      fbo->RemoveTexColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 0U);
+      fbo->DeactivateDrawBuffers();
+
+      #ifdef vtkSurfaceLICPainterTIME
+      this->EndTimerEvent("vtkSurfaceLICPainter::ContrastEnhance");
+      #endif
+      }
+    else
+      {
+      fbo->RemoveTexColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 0U);
+      fbo->RemoveTexColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 1U);
+      fbo->DeactivateDrawBuffers();
+      }
+
+    fbo->UnBind(vtkgl::FRAMEBUFFER_EXT);
+
+    #if vtkSurfaceLICPainterDEBUG >= 2
+    vtkTextureIO::Write(
+           mpifn(comm,"slicp_new_rgb.vtm"),
+           this->Internals->RGBColorImage,
+           this->Internals->BlockExts);
+    #endif
+    }
+
+  // ----------------------------------------------- depth test and copy to screen
+  #ifdef vtkSurfaceLICPainterTIME
+  this->StartTimerEvent("vtkSurfaceLICPainter::DepthCopy");
+  #endif
+  vtkgl::BindFramebufferEXT(vtkgl::FRAMEBUFFER_EXT, prevFbo);
+  glDrawBuffer(prevDrawBuf);
+  vtkFrameBufferObject2::InitializeViewport(
+        this->Internals->Viewsize[0],
+        this->Internals->Viewsize[1]);
+  glEnable(GL_DEPTH_TEST);
+
+  this->Internals->DepthImage->Activate(vtkgl::TEXTURE0);
+  this->Internals->RGBColorImage->Activate(vtkgl::TEXTURE1);
+
+  vtkShaderProgram2 *copyPass = this->Internals->CopyPass;
+  vtkUniformVariables *uniforms = copyPass->GetUniformVariables();
+  uniforms->SetUniformit("texDepth", 0);
+  uniforms->SetUniformit("texRGBColors", 1);
+  copyPass->Use();
+
+  size_t nBlocks = this->Internals->BlockExts.size();
+  for (size_t e=0; e<nBlocks; ++e)
+    {
+    this->Internals->RenderQuad(viewExt, this->Internals->BlockExts[e], 1);
+    }
+
+  copyPass->Restore();
+
+  this->Internals->DepthImage->Deactivate(vtkgl::TEXTURE0);
+  this->Internals->RGBColorImage->Deactivate(vtkgl::TEXTURE1);
+
+  #ifdef vtkSurfaceLICPainterTIME
+  this->EndTimerEvent("vtkSurfaceLICPainter::DepthCopy");
+  #endif
+
+  //
+  this->Internals->Updated();
+
+  // Essential to restore the context to what it was before we started messing
+  // with it.
+  glMatrixMode(GL_MODELVIEW);
+  glPopMatrix();
+  glMatrixMode(GL_PROJECTION);
+  glPopMatrix();
+  glPopAttrib();
+
+  // clear opengl error flags and be absolutely certain that nothing failed.
+  vtkOpenGLCheckErrorMacro("failed during surface lic painter");
+
+  #ifdef vtkSurfaceLICPainterTIME
+  this->EndTimerEvent("vtkSurfaceLICPainter::RenderInternal");
+  #else
+  timer->StopTimer();
+  #endif
+}
+
+//-----------------------------------------------------------------------------
+void vtkSurfaceLICPainter::ReportReferences(vtkGarbageCollector *collector)
+{
+  this->Superclass::ReportReferences(collector);
+
+  vtkGarbageCollectorReport(collector, this->Output, "Output PolyData");
+}
+
+//----------------------------------------------------------------------------
+vtkDataObject* vtkSurfaceLICPainter::GetOutput()
+{
+  #if vtkSurfaceLICPainterDEBUG >= 1
+  cerr << "=====vtkSurfaceLICPainter::GetOutput" << endl;
+  #endif
+
+  if (this->Enable && this->Output)
+    {
+    return this->Output;
+    }
+  return this->GetInput();
+}
+
+//----------------------------------------------------------------------------
+bool vtkSurfaceLICPainter::PrepareOutput()
+{
+  vtkDataObject* input = this->GetInput();
+  if ((input == NULL) || !this->Enable)
+    {
+    if (this->Output)
+      {
+      this->Output->Delete();
+      this->Output = NULL;
+      this->Internals->HasVectors = false;
+      }
+    return false;
+    }
+
+  if (this->Internals->OutputDataNeedsUpdate)
+    {
+    if (this->Output)
+      {
+      this->Output->Delete();
+      this->Output = NULL;
+      }
+
+    this->Output = input->NewInstance();
+    this->Output->ShallowCopy(input);
+    this->Internals->HasVectors = false;
+    }
+
+  if (!this->Internals->HasVectors)
+    {
+    this->Internals->HasVectors = this->VectorsToTCoords(this->Output);
+    }
+
+  return this->Internals->HasVectors;
+}
+
+//----------------------------------------------------------------------------
+bool vtkSurfaceLICPainter::VectorsToTCoords(vtkDataObject *dataObj)
+{
+  bool hasVectors = false;
+
+  vtkCompositeDataSet *cd = vtkCompositeDataSet::SafeDownCast(dataObj);
+  if (cd)
+    {
+    vtkCompositeDataIterator* iter = cd->NewIterator();
+    for (iter->InitTraversal(); !iter->IsDoneWithTraversal(); iter->GoToNextItem())
+      {
+      vtkDataSet* ds = vtkDataSet::SafeDownCast(iter->GetCurrentDataObject());
+      if (ds && ds->GetNumberOfCells())
+        {
+        this->ClearTCoords(ds);
+        hasVectors |= this->VectorsToTCoords(ds);
+        }
+      }
+    iter->Delete();
+    return hasVectors;
+    }
+
+  vtkDataSet* ds = vtkDataSet::SafeDownCast(dataObj);
+  if (ds && ds->GetNumberOfCells())
+    {
+    this->ClearTCoords(ds);
+    hasVectors |= this->VectorsToTCoords(ds);
+    }
+
+  if ( hasVectors )
+    {
+    // force downstream updates (display lists, etc)
+    this->Output->Modified();
+    }
+
+  return hasVectors;
+}
+
+//----------------------------------------------------------------------------
+bool vtkSurfaceLICPainter::VectorsToTCoords(vtkDataSet *data)
+{
+  // don't use SafeDownCast here for rendering performance
+  vtkDataArray *vectors = NULL;
+  bool hasCellVectors = false;
+
+  if (this->Internals->FieldNameSet)
+    {
+    vectors
+       = vtkDataArray::SafeDownCast(
+            this->GetInputArrayToProcess(
+            this->Internals->FieldAssociation,
+            this->Internals->FieldName.c_str(),
+            data,
+            &hasCellVectors));
+    }
+  else
+    {
+    vectors
+       = vtkDataArray::SafeDownCast(
+            this->GetInputArrayToProcess(
+            this->Internals->FieldAssociation,
+            this->Internals->FieldAttributeType,
+            data,
+            &hasCellVectors));
+    }
+
+  if ( vectors == NULL )
+    {
+    return false;
+    }
+
+  vtkDataSetAttributes *atts = NULL;
+  if ( hasCellVectors )
+    {
+    atts = data->GetCellData();
+    }
+  else
+    {
+    atts = data->GetPointData();
+    }
+
+  int id = -1;
+  int nArrays = atts->GetNumberOfArrays();
+  for (int i=0; i<nArrays; ++i)
+    {
+    if ( atts->GetArray(i) == vectors )
+      {
+      id = i;
+      break;
+      }
+    }
+  atts->SetActiveAttribute(id, vtkDataSetAttributes::TCOORDS);
+  return true;
+}
+
+//----------------------------------------------------------------------------
+void vtkSurfaceLICPainter::ClearTCoords(vtkDataSet *data)
+{
+  data->GetCellData()->SetActiveAttribute(-1, vtkDataSetAttributes::TCOORDS);
+  data->GetPointData()->SetActiveAttribute(-1, vtkDataSetAttributes::TCOORDS);
+}
+
+//----------------------------------------------------------------------------
+void vtkSurfaceLICPainter::GetBounds(vtkDataObject* dobj, double bounds[6])
+{
+  #if vtkSurfaceLICPainterDEBUG >= 1
+  cerr << "=====vtkSurfaceLICPainter::GetBounds" << endl;
+  #endif
+  // don't use SafeDownCast here for rendering performance
+
+  vtkMath::UninitializeBounds(bounds);
+  vtkDataSet* ds = vtkDataSet::SafeDownCast(dobj);
+  if (ds)
+    {
+    ds->GetBounds(bounds);
+    return;
+    }
+
+  vtkCompositeDataSet* cd = vtkCompositeDataSet::SafeDownCast(dobj);
+  if (cd)
+    {
+    vtkBoundingBox bbox;
+    vtkCompositeDataIterator* iter = cd->NewIterator();
+    for (iter->InitTraversal(); !iter->IsDoneWithTraversal(); iter->GoToNextItem())
+      {
+      ds = vtkDataSet::SafeDownCast(iter->GetCurrentDataObject());
+      if (ds && ds->GetNumberOfCells())
+        {
+        ds->GetBounds(bounds);
+        bbox.AddBounds(bounds);
+        }
+      }
+    iter->Delete();
+    bbox.GetBounds(bounds);
+    return;
+    }
+
+  vtkErrorMacro("unsupported dataset " << dobj->GetClassName());
+}
+
+//----------------------------------------------------------------------------
+void vtkSurfaceLICPainter::PrintSelf(ostream & os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+  os
+    << indent << "NumberOfSteps=" << this->NumberOfSteps << endl
+    << indent << "StepSize=" << this->StepSize << endl
+    << indent << "NormalizeVectors=" << this->NormalizeVectors << endl
+    << indent << "EnhancedLIC=" << this->EnhancedLIC << endl
+    << indent << "EnhanceContrast=" << this->EnhanceContrast << endl
+    << indent << "LowLICContrastEnhancementFactor=" << this->LowLICContrastEnhancementFactor << endl
+    << indent << "HighLICContrastEnhancementFactor=" << this->HighLICContrastEnhancementFactor << endl
+    << indent << "LowColorContrastEnhancementFactor=" << this->LowColorContrastEnhancementFactor << endl
+    << indent << "HighColorContrastEnhancementFactor=" << this->HighColorContrastEnhancementFactor << endl
+    << indent << "AntiAlias=" << this->AntiAlias << endl
+    << indent << "MaskOnSurface=" << this->MaskOnSurface << endl
+    << indent << "MaskThreshold=" << this->MaskThreshold << endl
+    << indent << "MaskIntensity=" << this->MaskIntensity << endl
+    << indent << "MaskColor=" << this->MaskColor[0] << ", " << this->MaskColor[1] << ", " << this->MaskColor[2] << endl
+    << indent << "ColorMode=" << this->ColorMode << endl
+    << indent << "LICIntensity=" << this->LICIntensity << endl
+    << indent << "MapModeBias=" << this->MapModeBias << endl
+    << indent << "GenerateNoiseTexture=" << this->GenerateNoiseTexture << endl
+    << indent << "NoiseType=" << this->NoiseType << endl
+    << indent << "NoiseTextureSize=" << this->NoiseTextureSize << endl
+    << indent << "NoiseGrainSize=" << this->NoiseGrainSize << endl
+    << indent << "MinNoiseValue=" << this->MinNoiseValue << endl
+    << indent << "MaxNoiseValue=" << this->MaxNoiseValue << endl
+    << indent << "NumberOfNoiseLevels=" << this->NumberOfNoiseLevels << endl
+    << indent << "ImpulseNoiseProbablity=" << this->ImpulseNoiseProbability << endl
+    << indent << "ImpulseNoiseBackgroundValue=" << this->ImpulseNoiseBackgroundValue << endl
+    << indent << "NoiseGeneratorSeed=" << this->NoiseGeneratorSeed << endl
+    << indent << "AlwaysUpdate=" << this->AlwaysUpdate << endl
+    << indent << "Enable=" << this->Enable << endl
+    << indent << "CompositeStrategy=" << this->CompositeStrategy << endl;
+}
diff --git a/Rendering/LIC/vtkSurfaceLICPainter.h b/Rendering/LIC/vtkSurfaceLICPainter.h
new file mode 100644
index 0000000..fba532a
--- /dev/null
+++ b/Rendering/LIC/vtkSurfaceLICPainter.h
@@ -0,0 +1,532 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSurfaceLICPainter.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkSurfaceLICPainter - painter that performs LIC on the surface of
+//  arbitrary geometry.
+//
+// .SECTION Description
+//  vtkSurfaceLICPainter painter performs LIC on the surface of arbitrary
+//  geometry. Point vectors are used as the vector field for generating the LIC.
+//  The implementation was originallu  based on "Image Space Based Visualization
+//  on Unsteady Flow on Surfaces" by Laramee, Jobard and Hauser appeared in
+//  proceedings of IEEE Visualization '03, pages 131-138.
+//
+//  Internal pipeline:
+// <pre>
+// noise
+//     |
+//     [ PROJ (GAT) (COMP) LIC2D (SCAT) SHADE (CCE) DEP]
+//     |                                               |
+// vectors                                         surface LIC
+// </pre>
+// PROj  - prject vectors onto surface
+// GAT   - gather data for compositing and guard pixel generation  (parallel only)
+// COMP  - composite gathered data
+// LIC2D - line intengral convolution, see vtkLineIntegralConvolution2D.
+// SCAT  - scatter result (parallel only, not all compositors use it)
+// SHADE - combine LIC and scalar colors
+// CCE   - color contrast enhancement (optional)
+// DEP   - depth test and copy to back buffer
+//
+// The result of each stage is cached in a texture so that during interaction
+// a stage may be skipped if the user has not modified its paramters or input
+// data.
+//
+// The parallel parts of algorithm are implemented in vtkPSurfaceLICPainter.
+// Note that for MPI enabled builds this class will be automatically created
+// by the object factory.
+//
+// .SECTION See also
+// vtkSurfaceLICDefaultPainter vtkLineIntegralConvolution2D
+#ifndef __vtkSurfaceLICPainter_h
+#define __vtkSurfaceLICPainter_h
+
+#include "vtkRenderingLICModule.h" // For export macro
+#include "vtkPainter.h"
+
+class vtkRenderWindow;
+class vtkRenderer;
+class vtkActor;
+class vtkImageData;
+class vtkDataObject;
+class vtkDataArray;
+class vtkPainterCommunicator;
+
+class VTKRENDERINGLIC_EXPORT vtkSurfaceLICPainter : public vtkPainter
+{
+public:
+  static vtkSurfaceLICPainter* New();
+  vtkTypeMacro(vtkSurfaceLICPainter, vtkPainter);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Release any graphics resources that are being consumed by this mapper.
+  // The parameter window could be used to determine which graphic
+  // resources to release. In this case, releases the display lists.
+  virtual void ReleaseGraphicsResources(vtkWindow * win);
+
+  // Description:
+  // Get the output data object from this painter.
+  // Overridden to pass the input points (or cells) vectors as the tcoords to
+  // the deletage painters. This is required by the internal GLSL shader
+  // programs used for generating LIC.
+  virtual vtkDataObject* GetOutput();
+
+  // Description:
+  // Enable/Disable this painter.
+  void SetEnable(int val);
+  vtkGetMacro(Enable, int);
+  void SetEnableOn(){ this->SetEnable(1); }
+  void SetEnableOff(){ this->SetEnable(0); }
+
+  // Description:
+  // Set the vectors to used for applying LIC. By default point vectors are
+  // used. Arguments are same as those passed to
+  // vtkAlgorithm::SetInputArrayToProcess except the first 3 arguments i.e. idx,
+  // port, connection.
+  void SetInputArrayToProcess(int fieldAssociation, const char *name);
+  void SetInputArrayToProcess(int fieldAssociation, int fieldAttributeType);
+
+  // Description:
+  // Get/Set the number of integration steps in each direction.
+  void SetNumberOfSteps(int val);
+  vtkGetMacro(NumberOfSteps, int);
+
+  // Description:
+  // Get/Set the step size (in pixels).
+  void SetStepSize(double val);
+  vtkGetMacro(StepSize, double);
+
+  // Description:
+  // Normalize vectors during integration. When set(the default) the
+  // input vector field is normalized during integration, and each
+  // integration occurs over the same arclength. When not set each
+  // integration occurs over an arc length proportional to the field
+  // magnitude as is customary in traditional numerical methods. See,
+  // "Imaging Vector Fields Using Line Integral Convolution" for an
+  // axample where normalization is used. See, "Image Space Based
+  // Visualization of Unsteady Flow on Surfaces" for an example
+  // of where no normalization is used.
+  void SetNormalizeVectors(int val);
+  vtkBooleanMacro(NormalizeVectors, int);
+  vtkGetMacro(NormalizeVectors, int);
+
+  // Description:
+  // When set MaskOnSurface computes |V| for use in the fragment masking
+  // tests on the surface. When not set the original un-projected
+  // un-transformed |V| is used.
+  void SetMaskOnSurface(int val);
+  vtkBooleanMacro(MaskOnSurface, int);
+  vtkGetMacro(MaskOnSurface, int);
+
+  // Description:
+  // The MaskThreshold controls the rendering of fragments in stagnant
+  // regions of flow.  // In these regions LIC noise texture will be masked,
+  // where |V| < MaskThreshold is satisifed. The masking process blends a
+  // the MaskColor with the scalar color of the surface proportional to
+  // MaskIntesnsity. See MaskIntensity for more information on the blending
+  // algorithm. This blending allows one control over the masking process
+  // so that masked fragments may be: highlighted (by setting a unique
+  // mask color and mask intensity > 0), made invisible with and without
+  // passing the un-convolved noise texture (by setting mask intensity 0),
+  // made to blend into the LIC.
+  //
+  // MaskThreshold units are in the original vector space. Note that the
+  // threshold can be applied to the original vector field or to the surface
+  // projected vector field. See MaskOnSurface.
+  void SetMaskThreshold(double val);
+  vtkGetMacro(MaskThreshold, double);
+
+  // Description:
+  // The MaskColor is used on masked fragments. The default of (0.5, 0.5, 0.5)
+  // makes the masked fragments look similar to the LIC'd fragments. The mask
+  // color is applied only when MaskIntensity > 0.
+  void SetMaskColor(double *val);
+  void SetMaskColor(double r, double g, double b)
+    { double rgb[3]={r,g,b}; this->SetMaskColor(rgb); }
+  vtkGetVector3Macro(MaskColor, double);
+
+  // Description:
+  // The MaskIntensity controls the blending of the mask color and the geometry
+  // color. The color of masked fragments is given by:
+  //
+  //     c = maskColor * maskIntensity + geomColor * (1 - maskIntensity)
+  //
+  // The default value of 0.0 results in the geometry color being used.
+  void SetMaskIntensity(double val);
+  vtkGetMacro(MaskIntensity, double);
+
+  // Description:
+  // EnhancedLIC mean compute the LIC twice with the second pass using
+  // the edge-enhanced result of the first pass as a noise texture. Edge
+  // enhancedment is made by a simple Laplace convolution.
+  void SetEnhancedLIC(int val);
+  vtkGetMacro(EnhancedLIC, int);
+  vtkBooleanMacro(EnhancedLIC, int);
+
+  // Description:
+  // Enable/Disable contrast and dynamic range correction stages. Contrast
+  // enhancement can be enabled during LIC computations (See
+  // vtkLineINtegralComvolution2D) and after the scalar colors have been
+  // combined with the LIC.
+  //
+  // The best appraoch for using this feature is to enable LIC enhancement,
+  // and only if the image is to dark or dull enable COLOR enhancement.
+  //
+  // Both stages are implemented by a histogram stretching algorithm. During
+  // LIC stages the contrast enhancement is applied to gray scale LIC image.
+  // During the scalar coloring stage the contrast enhancement is applied to
+  // the lightness channel of the color image in HSL color space. The
+  // histogram stretching is implemented as follows:
+  //
+  //     L = (L-m)/(M-m)
+  //
+  // where, L is the fragment intensity/lightness, m is the intensity/lightness
+  // to map to 0, M is the intensity/lightness to map to 1. The default values
+  // of m and M are the min and max taken over all fragments.
+  //
+  // This increase the dynamic range and contrast in the LIC'd image, both of
+  // which are natuarly attenuated by the convolution proccess.
+  //
+  // Values
+  //
+  //   ENHANCE_CONTRAST_OFF   -- don't enhance LIC or scalar colors
+  //   ENHANCE_CONTRAST_LIC   -- enhance in LIC high-pass input and output
+  //   ENHANCE_CONTRAST_COLOR -- enhance after scalars are combined with LIC
+  //   ENHANCE_CONTRAST_BOTH  -- enhance in LIC stages and after scalar colors
+  //
+  // This feature is disabled by default.
+  enum {
+    ENHANCE_CONTRAST_OFF=0,
+    ENHANCE_CONTRAST_LIC=1,
+    ENHANCE_CONTRAST_COLOR=3,
+    ENHANCE_CONTRAST_BOTH=4
+    };
+  void SetEnhanceContrast(int val);
+  vtkGetMacro(EnhanceContrast, int);
+
+  // Description:
+  // This feature is used to fine tune the contrast enhancement. There are two
+  // modes AUTOMATIC and MANUAL.In AUTOMATIC mode values are provided indicating
+  // the fraction of the range to adjust M and m by, during contrast enahncement
+  // histogram stretching. M and m are the intensity/lightness values that map
+  // to 1 and 0. (see EnhanceContrast for an explanation of the mapping
+  // procedure). m and M are computed using the factors as follows:
+  //
+  //     m = min(C) + mFactor * (max(C) - min(C))
+  //     M = max(C) - MFactor * (max(C) - min(C))
+  //
+  // the default values for mFactor and MFactor are 0 which result in
+  // m = min(C), M = max(C), taken over the entire image. Modifying mFactor and
+  // MFactor above or below zero provide control over the saturation/
+  // de-saturation during contrast enhancement.
+  vtkGetMacro(LowLICContrastEnhancementFactor, double);
+  vtkGetMacro(HighLICContrastEnhancementFactor, double);
+  void SetLowLICContrastEnhancementFactor(double val);
+  void SetHighLICContrastEnhancementFactor(double val);
+  //
+  vtkGetMacro(LowColorContrastEnhancementFactor, double);
+  vtkGetMacro(HighColorContrastEnhancementFactor, double);
+  void SetLowColorContrastEnhancementFactor(double val);
+  void SetHighColorContrastEnhancementFactor(double val);
+
+  // Description:
+  // Enable/Disable the anti-aliasing pass. This optional pass (disabled by
+  // default) can be enabled to reduce jagged patterns in the final LIC image.
+  // Values greater than 0 control the number of iterations, 1 is typically
+  // sufficient.
+  void SetAntiAlias(int val);
+  vtkBooleanMacro(AntiAlias, int);
+  vtkGetMacro(AntiAlias, int);
+
+  // Description:
+  // Set/Get the color mode. The color mode controls how scalar colors are
+  // combined with the LIC in the final image. The BLEND mode combines scalar
+  // colors with LIC intensities with proportional blending controled by the
+  // LICIntensity parameter. The MAP mode combines scalar colors with LIC,
+  // by multiplication the HSL represntation of color's lightness.
+  //
+  // The default is COLOR_MODE_BLEND.
+  enum {
+    COLOR_MODE_BLEND=0,
+    COLOR_MODE_MAP
+  };
+  void SetColorMode(int val);
+  vtkGetMacro(ColorMode, int);
+
+  // Description:
+  // Factor used when blend mode is set to COLOR_MODE_BLEND. This controls the
+  // contribution of the LIC in the final output image as follows:
+  //
+  //   c = LIC * LICIntensity + scalar * (1 - LICIntensity);
+  //
+  // 0.0 produces same result as disabling LIC altogether, while 1.0 implies
+  // show LIC result alone.
+  void SetLICIntensity(double val);
+  vtkGetMacro(LICIntensity, double);
+
+  // Description:
+  // Factor used when blend mode is set to COLOR_MODE_MAP. This adds a bias to
+  // the LIC image. The purpose of this is to adjust the brightness when a
+  // brighter image is desired. The default of 0.0 results in no change. Values
+  // gretaer than 0.0 will brighten the image while values less than 0.0 darken
+  // the image.
+  void SetMapModeBias(double val);
+  vtkGetMacro(MapModeBias, double);
+
+  // Description:
+  // Set the data containing a noise array as active scalars. Active scalars
+  // array will be converted into a texture for use as noise in the LIC process.
+  // Noise datasets are expected to be gray scale.
+  void SetNoiseDataSet(vtkImageData *data);
+  vtkImageData *GetNoiseDataSet();
+
+  // Description:
+  // Set/Get the noise texture source. When not set the default 200x200 white
+  // noise texture is used (see VTKData/Data/Data/noise.png). When set a noise
+  // texture is generated based on the following parameters:
+  //
+  //    NoiseType               - select noise type. Gaussian, Uniform, etc
+  //    NoiseTextureSize        - number of pixels in square noise texture(side)
+  //    NoiseGrainSize          - number of pixels each noise value spans(side)
+  //    MinNoiseValue           - minimum noise color >=0 && < MaxNoiseValue
+  //    MaxNoiseValue           - maximum noise color <=1 ** > MinNoiseValue
+  //    NumberOfNoiseLevels     - number of discrete noise colors
+  //    ImpulseNoiseProbability - impulse noise is generated when < 1
+  //    ImpulseNoiseBackgroundValue  - the background color for untouched pixels
+  //    NoiseGeneratorSeed      - seed the random number generators
+  //
+  // Changing the noise texture gives one greater control over the look of the
+  // final image. The default is 0 which results in the use of a static 200x200
+  // Gaussian noise texture. See VTKData/Data/Data/noise.png.
+  void SetGenerateNoiseTexture(int shouldGenerate);
+  vtkGetMacro(GenerateNoiseTexture, int);
+
+  // Description:
+  // Select the statistical distribution of randomly generated noise values.
+  // With uniform noise there is greater control over the range of values
+  // in the noise texture. The Default is NOISE_TYPE_GAUSSIAN.
+  enum {
+    NOISE_TYPE_UNIFORM=0,
+    NOISE_TYPE_GAUSSIAN=1,
+    NOISE_TYPE_PERLIN=2
+    };
+  void SetNoiseType(int type);
+  vtkGetMacro(NoiseType, int);
+
+  // Description:
+  // Set/Get the side length in pixels of the noise texture. The texture will
+  // be length^2 pixels in area.
+  void SetNoiseTextureSize(int length);
+  vtkGetMacro(NoiseTextureSize, int);
+
+  // Description:
+  // Set/Get the side length in pixels of the noise values in the noise texture.
+  // Each noise value will be length^2 pixels in area.
+  void SetNoiseGrainSize(int val);
+  vtkGetMacro(NoiseGrainSize, int);
+
+  // Description:
+  // Set/Get the minimum and mximum  gray scale values that the generated noise
+  // can take on. The generated noise will be in the range of MinNoiseValue to
+  // MaxNoiseValue. Values are clamped within 0 to 1. MinNoiseValue must be
+  // less than MaxNoiseValue.
+  void SetMinNoiseValue(double val);
+  void SetMaxNoiseValue(double val);
+  vtkGetMacro(MinNoiseValue, double);
+  vtkGetMacro(MaxNoiseValue, double);
+
+  // Description:
+  // Set/Get the number of discrete values a noise pixel may take on. Default
+  // 1024.
+  void SetNumberOfNoiseLevels(int val);
+  vtkGetMacro(NumberOfNoiseLevels, int);
+
+  // Description:
+  // Control the density of of the noise. A value of 1.0 produces uniform random
+  // noise while values < 1.0 produce impulse noise with the given probabilty.
+  void SetImpulseNoiseProbability(double val);
+  vtkGetMacro(ImpulseNoiseProbability, double);
+
+  // Description:
+  // The color to use for untouched pixels when impulse noise probability < 1.
+  void SetImpulseNoiseBackgroundValue(double val);
+  vtkGetMacro(ImpulseNoiseBackgroundValue, double);
+
+  // Description:
+  // Set/Get the seed value used by the random number generator.
+  void SetNoiseGeneratorSeed(int val);
+  vtkGetMacro(NoiseGeneratorSeed, int);
+
+  // Description:
+  // Control the screen space decomposition where LIC is computed.
+  enum {
+    COMPOSITE_INPLACE=0,
+    COMPOSITE_INPLACE_DISJOINT=1,
+    COMPOSITE_BALANCED=2,
+    COMPOSITE_AUTO=3
+    };
+  void SetCompositeStrategy(int val);
+  vtkGetMacro(CompositeStrategy, int);
+
+  // Description:
+  // Returns true if the rendering context supports extensions needed by this
+  // painter.
+  static bool IsSupported(vtkRenderWindow *context);
+
+  // Description:
+  // Methods used for parallel benchmarks. Use cmake to define
+  // vtkSurfaceLICPainterTIME to enable benchmarks. During each
+  // update timing information is stored, it can be written to
+  // disk by calling WriteLog.
+  virtual void WriteTimerLog(const char *){}
+
+protected:
+  vtkSurfaceLICPainter();
+  ~vtkSurfaceLICPainter();
+
+  // Description:
+  // Called before RenderInternal() if the Information has been changed
+  // since the last time this method was called. We use this to detect
+  // when LUT has changed.
+  virtual void ProcessInformation(vtkInformation* info);
+
+  // Description:
+  // Get the min/max across all ranks. min/max are in/out.
+  // In serial operation this is a no-op, in parallel it
+  // is a global collective reduction.
+  virtual void GetGlobalMinMax(vtkPainterCommunicator*, float&, float&){}
+
+  // Description:
+  // Methods used for parallel benchmarks. Use cmake to define
+  // vtkSurfaceLICPainterTIME to enable benchmarks. During each
+  // update timing information is stored, it can be written to
+  // disk by calling WriteLog.
+  virtual void StartTimerEvent(const char *){}
+  virtual void EndTimerEvent(const char *){}
+
+  // Description:
+  // Creates a new communicator with/without the calling processes
+  // as indicated by the passed in flag, if not 0 the calling process
+  // is included in the new communicator. In parallel this call is mpi
+  // collective on the world communicator. In serial this is a no-op.
+  virtual vtkPainterCommunicator *CreateCommunicator(int);
+
+  // Description:
+  // Creates a new communicator for internal use based on this
+  // rank's visible data.
+  void CreateCommunicator();
+
+  // Description:
+  // Computes data bounds.
+  void GetBounds(vtkDataObject* data, double bounds[6]);
+
+  // Description:
+  // Take part in garbage collection.
+  virtual void ReportReferences(vtkGarbageCollector *collector);
+
+  // Description:
+  // Updates the noise texture, downsampling by the requested sample rate.
+  void UpdateNoiseImage(vtkRenderWindow *renWin);
+
+  // Description:
+  // Performs the actual rendering. Subclasses may override this method.
+  // default implementation merely call a Render on the DelegatePainter,
+  // if any. When RenderInternal() is called, it is assured that the
+  // DelegatePainter is in sync with this painter i.e. UpdateDelegatePainter()
+  // has been called.
+  virtual void RenderInternal(
+        vtkRenderer* renderer,
+        vtkActor* actor,
+        unsigned long typeflags,
+        bool forceCompileOnly);
+
+
+  // Description:
+  // Look for changes that would trigger stage updates
+  void ValidateContext(vtkRenderer *renderer);
+
+  // Description:
+  // Return false if stage can be skipped
+  bool NeedToUpdateOutputData();
+  virtual bool NeedToUpdateCommunicator();
+  bool NeedToRenderGeometry(vtkRenderer *renderer, vtkActor *actor);
+  bool NeedToGatherVectors();
+  bool NeedToComputeLIC();
+  bool NeedToColorLIC();
+  void SetUpdateAll();
+
+  // Description:
+  // resoucre allocators
+  bool PrepareOutput();
+  void InitializeResources();
+
+  // Description:
+  // set tcoords with vectors
+  bool VectorsToTCoords(vtkDataObject *dataObj);
+  bool VectorsToTCoords(vtkDataSet *dataObj);
+  void ClearTCoords(vtkDataSet *data);
+
+  // Description:
+  // Returns true when rendering LIC is possible.
+  bool CanRenderSurfaceLIC(vtkActor *actor, int typeflags);
+
+protected:
+  // Unit is a pixel length.
+  int     NumberOfSteps;
+  double  StepSize;
+  int     NormalizeVectors;
+
+  int     EnhancedLIC;
+  int     EnhanceContrast;
+  double  LowLICContrastEnhancementFactor;
+  double  HighLICContrastEnhancementFactor;
+  double  LowColorContrastEnhancementFactor;
+  double  HighColorContrastEnhancementFactor;
+  int     AntiAlias;
+
+  int     MaskOnSurface;
+  double  MaskThreshold;
+  double  MaskIntensity;
+  double  MaskColor[3];
+
+  int     ColorMode;
+  double  LICIntensity;
+  double  MapModeBias;
+
+  int     GenerateNoiseTexture;
+  int     NoiseType;
+  int     NoiseTextureSize;
+  int     NoiseGrainSize;
+  double  MinNoiseValue;
+  double  MaxNoiseValue;
+  int     NumberOfNoiseLevels;
+  double  ImpulseNoiseProbability;
+  double  ImpulseNoiseBackgroundValue;
+  int     NoiseGeneratorSeed;
+
+  int     AlwaysUpdate;
+  int     Enable;
+  int     CompositeStrategy;
+
+  vtkDataObject* Output;
+  class vtkInternals;
+  vtkInternals* Internals;
+
+private:
+  vtkSurfaceLICPainter(const vtkSurfaceLICPainter&); // Not implemented.
+  void operator=(const vtkSurfaceLICPainter&); // Not implemented.
+};
+
+#endif
diff --git a/Rendering/LIC/vtkSurfaceLICPainter_CE.glsl b/Rendering/LIC/vtkSurfaceLICPainter_CE.glsl
new file mode 100644
index 0000000..9c27c8a
--- /dev/null
+++ b/Rendering/LIC/vtkSurfaceLICPainter_CE.glsl
@@ -0,0 +1,105 @@
+//=========================================================================
+//
+//  Program:   Visualization Toolkit
+//  Module:    vtkSurfaceLICPainter_CE.glsl
+//
+//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+//  All rights reserved.
+//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+//
+//     This software is distributed WITHOUT ANY WARRANTY; without even
+//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//     PURPOSE.  See the above copyright notice for more information.
+//
+//=========================================================================
+
+// color contrast enhance stage implemented via histogram stretching
+// on lightness channel. if the min and max are tweaked it can generate
+// out-of-range values these will be clamped in 0 to 1
+
+#version 110
+
+uniform sampler2D texGeomColors; // scalars + lighting
+uniform sampler2D texLIC;        // image lic, mask
+uniform sampler2D texHSLColors;  // hsla colors
+
+uniform float     uLMin;         // min lightness over all fragments
+uniform float     uLMaxMinDiff;  // max - min lightness over all fragments
+
+/**
+Helper for HSL to RGB conversion.
+*/
+float Util(float v1, float v2, float vH)
+{
+  if (vH < 0.0)
+    vH += 1.0;
+
+  if (vH > 1.0)
+     vH -= 1.0;
+
+  if ((6.0 * vH) < 1.0)
+    return (v1 + (v2 - v1) * 6.0 * vH);
+
+  if ((2.0 * vH) < 1.0)
+    return (v2);
+
+  if ((3.0 * vH) < 2.0)
+    return (v1 + (v2 - v1) * ((2.0 / 3.0) - vH) * 6.0);
+
+  return v1;
+}
+
+/**
+Convert from HSL space into RGB space.
+*/
+vec3 HSLToRGB(vec3 HSL)
+{
+  vec3 RGB;
+  if (HSL.y == 0.0)
+    {
+    // Gray
+    RGB.r = HSL.z;
+    RGB.g = HSL.z;
+    RGB.b = HSL.z;
+    }
+  else
+    {
+    // Chromatic
+    float v2;
+    if (HSL.z < 0.5)
+      v2 = HSL.z * (1.0 + HSL.y);
+    else
+      v2 = (HSL.z + HSL.y) - (HSL.y * HSL.z);
+
+    float v1 = 2.0 * HSL.z - v2;
+
+    RGB.r = Util(v1, v2, HSL.x + (1.0 / 3.0));
+    RGB.g = Util(v1, v2, HSL.x);
+    RGB.b = Util(v1, v2, HSL.x - (1.0 / 3.0));
+    }
+
+  return RGB.rgb;
+}
+
+void main()
+{
+  // lookup hsl color , mask
+  vec4 fragColor = texture2D(texHSLColors, gl_TexCoord[0].st);
+
+  // don't modify masked fragments (masked => lic.g==1)
+  vec4 lic = texture2D(texLIC, gl_TexCoord[0].st);
+  if (lic.g==0.0)
+    {
+    // normalize lightness channel
+    fragColor.z = clamp((fragColor.z - uLMin)/uLMaxMinDiff, 0.0, 1.0);
+    }
+
+  // back into rgb space
+  fragColor.rgb = HSLToRGB(fragColor.xyz);
+
+  // add alpha
+  vec4 geomColor = texture2D(texGeomColors, gl_TexCoord[0].st);
+  fragColor.a = geomColor.a;
+
+  gl_FragData[0] = fragColor;
+}
diff --git a/Rendering/LIC/vtkSurfaceLICPainter_DCpy.glsl b/Rendering/LIC/vtkSurfaceLICPainter_DCpy.glsl
new file mode 100644
index 0000000..d000ae5
--- /dev/null
+++ b/Rendering/LIC/vtkSurfaceLICPainter_DCpy.glsl
@@ -0,0 +1,38 @@
+//=========================================================================
+//
+//  Program:   Visualization Toolkit
+//  Module:    vtkSurfaceLICPainter_DCpy.glsl
+//
+//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+//  All rights reserved.
+//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+//
+//     This software is distributed WITHOUT ANY WARRANTY; without even
+//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//     PURPOSE.  See the above copyright notice for more information.
+//
+//=========================================================================
+
+// This shader copies fragments and depths to the output buffer
+
+#version 110
+
+uniform sampler2D texDepth;     // z values from vertex shader
+uniform sampler2D texRGBColors; // final rgb LIC colors
+
+void main()
+{
+  vec2 tc = gl_TexCoord[0].st;
+  gl_FragDepth = texture2D(texDepth, tc).x;
+  gl_FragColor = texture2D(texRGBColors, tc);
+
+  // since we render a screen aligned quad
+  // we're going to be writing fragments
+  // not touched by the original geometry
+  // it's critical not to modify those
+  // fragments.
+  if (gl_FragDepth == 1.0)
+    {
+    discard;
+    }
+}
diff --git a/Rendering/LIC/vtkSurfaceLICPainter_GeomFs.glsl b/Rendering/LIC/vtkSurfaceLICPainter_GeomFs.glsl
new file mode 100644
index 0000000..ecbd7ab
--- /dev/null
+++ b/Rendering/LIC/vtkSurfaceLICPainter_GeomFs.glsl
@@ -0,0 +1,32 @@
+//=========================================================================
+//
+//  Program:   Visualization Toolkit
+//  Module:    vtkSurfaceLICPainter_fs1.glsl
+//
+//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+//  All rights reserved.
+//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+//
+//     This software is distributed WITHOUT ANY WARRANTY; without even
+//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//     PURPOSE.  See the above copyright notice for more information.
+//
+//=========================================================================
+
+#version 110
+
+varying vec4 vColor;
+varying vec2 vProjectedVF;
+varying vec3 vMaskCriteria;
+
+void main()
+{
+  // save the depth for parallel use
+  // 1 it identifies local fragments after composiiting
+  // 2 it's used in compositing
+  float depth = gl_FragCoord.z;
+
+  gl_FragData[0] = clamp(vColor, vec4(0,0,0,0), vec4(1,1,1,1));       // colors => scalars + lighting
+  gl_FragData[1] = vec4(vProjectedVF.x, vProjectedVF.y, 0.0 , depth); // projected vectors
+  gl_FragData[2] = vec4(vMaskCriteria, depth);                        // vectors for fragment masking
+}
diff --git a/Rendering/LIC/vtkSurfaceLICPainter_GeomVs.glsl b/Rendering/LIC/vtkSurfaceLICPainter_GeomVs.glsl
new file mode 100644
index 0000000..9904592
--- /dev/null
+++ b/Rendering/LIC/vtkSurfaceLICPainter_GeomVs.glsl
@@ -0,0 +1,78 @@
+//=========================================================================
+//
+//  Program:   Visualization Toolkit
+//  Module:    vtkSurfaceLICPainter_vs1.glsl
+//
+//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+//  All rights reserved.
+//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+//
+//     This software is distributed WITHOUT ANY WARRANTY; without even
+//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//     PURPOSE.  See the above copyright notice for more information.
+//
+//=========================================================================
+
+#version 120
+
+varying vec4 vColor;
+varying vec2 vProjectedVF;
+varying vec3 vMaskCriteria;
+
+// 0/1, when 1 V is projected to surface for |V| computation.
+uniform float uMaskOnSurface;
+
+
+// from vtkColorMaterialHelper
+gl_MaterialParameters getMaterialParameters();
+
+// from vtkLightingHelper
+vec4 singleColor(gl_MaterialParameters m, vec3 surfacePosEyeCoords, vec3 n);
+
+/**
+Compute vertext color
+*/
+vec4 colorFrontFace()
+{
+  vec4 heyeCoords = gl_ModelViewMatrix * gl_Vertex;
+  vec3 eyeCoords = heyeCoords.xyz / heyeCoords.w;
+  vec3 n = normalize(gl_NormalMatrix * gl_Normal);
+  return singleColor(getMaterialParameters(), eyeCoords,n);
+}
+
+/**
+Project "vector" onto the surface.
+*/
+vec3 projectOnSurface(vec3 vector)
+{
+  vec3 normal = normalize(gl_Normal);
+  float k = dot(normal, vector);
+  return (vector - (k*normal));
+}
+
+
+/**
+get fragment mask criteria. Fragment masking should be applied according to
+the original vector field and in those units. If it is not then masked fragments
+will not match pseudo color plots.
+*/
+vec3 getMaskCriteria( vec3 vector )
+{
+  if (uMaskOnSurface == 0)
+    {
+    return vector;
+    }
+  else
+    {
+    return projectOnSurface(vector);
+    }
+}
+
+void main()
+{
+  vec3 vf = projectOnSurface(gl_MultiTexCoord0.stp);
+  vProjectedVF = (gl_NormalMatrix * vf).xy;
+  vMaskCriteria = getMaskCriteria(gl_MultiTexCoord0.stp);
+  vColor = colorFrontFace();
+  gl_Position = ftransform();
+}
diff --git a/Rendering/LIC/vtkSurfaceLICPainter_SC.glsl b/Rendering/LIC/vtkSurfaceLICPainter_SC.glsl
new file mode 100644
index 0000000..c1ee02a
--- /dev/null
+++ b/Rendering/LIC/vtkSurfaceLICPainter_SC.glsl
@@ -0,0 +1,202 @@
+//=========================================================================
+//
+//  Program:   Visualization Toolkit
+//  Module:    vtkSurfaceLICPainter_fs2.glsl
+//
+//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+//  All rights reserved.
+//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+//
+//     This software is distributed WITHOUT ANY WARRANTY; without even
+//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//     PURPOSE.  See the above copyright notice for more information.
+//
+//=========================================================================
+
+// This shader combines surface geometry, LIC, and  scalar colors.
+
+#version 110
+
+uniform sampler2D texVectors;       // vectors, depth
+uniform sampler2D texGeomColors;    // scalar colors + lighting
+uniform sampler2D texLIC;           // image lic
+uniform int       uScalarColorMode; // select between blend, and map shader
+uniform float     uLICIntensity;    // blend shader: blending factor for lic'd colors
+uniform float     uMapBias;         // map shader: adjust the brightness of the result
+uniform float     uMaskIntensity;   // blending factor for mask color
+uniform vec3      uMaskColor;       // color for the masked out fragments
+
+/**
+Convert from RGB color space into HSL colorspace.
+*/
+vec3 RGBToHSL(vec3 RGB)
+{
+  vec3 HSL = vec3(0.0, 0.0, 0.0);
+
+  float RGBMin = min(min(RGB.r, RGB.g), RGB.b);
+  float RGBMax = max(max(RGB.r, RGB.g), RGB.b);
+  float RGBMaxMinDiff = RGBMax - RGBMin;
+
+  HSL.z = (RGBMax + RGBMin) / 2.0;
+
+  if (RGBMaxMinDiff == 0.0)
+    {
+    // Gray scale
+    HSL.x = 0.0;
+    HSL.y = 0.0;
+    }
+  else
+    {
+    // Color
+    if (HSL.z < 0.5)
+      HSL.y = RGBMaxMinDiff / (RGBMax + RGBMin);
+    else
+      HSL.y = RGBMaxMinDiff / (2.0 - RGBMax - RGBMin);
+
+    float dR
+      = (((RGBMax - RGB.r) / 6.0) + (RGBMaxMinDiff / 2.0)) / RGBMaxMinDiff;
+    float dG
+      = (((RGBMax - RGB.g) / 6.0) + (RGBMaxMinDiff / 2.0)) / RGBMaxMinDiff;
+    float dB
+      = (((RGBMax - RGB.b) / 6.0) + (RGBMaxMinDiff / 2.0)) / RGBMaxMinDiff;
+
+    if (RGB.r == RGBMax)
+      HSL.x = dB - dG;
+    else
+    if (RGB.g == RGBMax)
+      HSL.x = (1.0 / 3.0) + dR - dB;
+    else
+    if (RGB.b == RGBMax)
+      HSL.x = (2.0 / 3.0) + dG - dR;
+
+    if (HSL.x < 0.0)
+      HSL.x += 1.0;
+
+    if (HSL.x > 1.0)
+      HSL.x -= 1.0;
+    }
+
+  return HSL;
+}
+
+/**
+Helper for HSL to RGB conversion.
+*/
+float Util(float v1, float v2, float vH)
+{
+  if (vH < 0.0)
+    vH += 1.0;
+
+  if (vH > 1.0)
+     vH -= 1.0;
+
+  if ((6.0 * vH) < 1.0)
+    return (v1 + (v2 - v1) * 6.0 * vH);
+
+  if ((2.0 * vH) < 1.0)
+    return (v2);
+
+  if ((3.0 * vH) < 2.0)
+    return (v1 + (v2 - v1) * ((2.0 / 3.0) - vH) * 6.0);
+
+  return v1;
+}
+
+/**
+Convert from HSL space into RGB space.
+*/
+vec3 HSLToRGB(vec3 HSL)
+{
+  vec3 RGB;
+  if (HSL.y == 0.0)
+    {
+    // Gray
+    RGB.r = HSL.z;
+    RGB.g = HSL.z;
+    RGB.b = HSL.z;
+    }
+  else
+    {
+    // Chromatic
+    float v2;
+    if (HSL.z < 0.5)
+      v2 = HSL.z * (1.0 + HSL.y);
+    else
+      v2 = (HSL.z + HSL.y) - (HSL.y * HSL.z);
+
+    float v1 = 2.0 * HSL.z - v2;
+
+    RGB.r = Util(v1, v2, HSL.x + (1.0 / 3.0));
+    RGB.g = Util(v1, v2, HSL.x);
+    RGB.b = Util(v1, v2, HSL.x - (1.0 / 3.0));
+    }
+
+  return RGB.rgb;
+}
+
+void main()
+{
+  vec4 lic = texture2D(texLIC, gl_TexCoord[0].st);
+  vec4 geomColor = texture2D(texGeomColors, gl_TexCoord[0].st);
+
+  // depth is used to determine which fragment belong to us
+  // and we can change
+  float depth = texture2D(texVectors, gl_TexCoord[0].st).a;
+
+  vec3 fragColorRGB;
+  float valid;
+  if (depth > 1.0e-3)
+    {
+    // we own it
+    // shade LIC'ed geometry, or apply mask
+    if (lic.g!=0.0)
+      {
+      // it's masked
+      // apply fragment mask
+      fragColorRGB = uMaskIntensity * uMaskColor + (1.0 - uMaskIntensity) * geomColor.rgb;
+      valid = 0.0;
+      }
+    else
+      {
+      if (uScalarColorMode==0)
+        {
+        // blend with scalars
+        fragColorRGB = lic.rrr * uLICIntensity + geomColor.rgb * (1.0 - uLICIntensity);
+        }
+      else
+        {
+        // multiply with scalars
+        fragColorRGB = geomColor.rgb * clamp((uMapBias + lic.r), 0.0, 1.0);
+        }
+      if (lic.b != 0.0)
+        {
+        // didn't have the required guard pixels
+        // don't consider it in min max estimation
+        // for histpgram stretching
+        valid = 0.0;
+        }
+      else
+        {
+        // ok to use in min/max estimates for histogram
+        // stretching
+        valid = 1.0;
+        }
+      }
+    }
+  else
+    {
+    // we don't own it
+    // pass through scalars
+    fragColorRGB = geomColor.rgb;
+    valid = 0.0;
+    }
+
+  // if no further stages this texture is
+  // copied to the screen
+  gl_FragData[0] = vec4(fragColorRGB, geomColor.a);
+
+  // if further stages, move to hsl space for contrast
+  // enhancement. encoding valididty saves moving a texture to the cpu
+  vec3 fragColorHSL = RGBToHSL(fragColorRGB);
+  gl_FragData[1] = vec4(fragColorHSL, valid);
+}
diff --git a/Rendering/LIC/vtkTextureIO.cxx b/Rendering/LIC/vtkTextureIO.cxx
new file mode 100644
index 0000000..cee6233
--- /dev/null
+++ b/Rendering/LIC/vtkTextureIO.cxx
@@ -0,0 +1,172 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkTextureIO.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkTextureIO.h"
+
+#include "vtkTextureObject.h"
+#include "vtkPixelBufferObject.h"
+#include "vtkMultiBlockDataSet.h"
+#include "vtkImageData.h"
+#include "vtkPointData.h"
+#include "vtkCellData.h"
+#include "vtkFloatArray.h"
+#include "vtkDataSetWriter.h"
+#include "vtkXMLMultiBlockDataWriter.h"
+#include "vtkPixelExtent.h"
+#include "vtkPixelTransfer.h"
+#include <cstddef>
+#include <deque>
+#include <sstream>
+
+using std::string;
+using std::deque;
+using std::ostringstream;
+
+//----------------------------------------------------------------------------
+static
+vtkFloatArray *DownloadTexture(
+        vtkTextureObject *texture,
+        const unsigned int *sub)
+{
+  int tt = texture->GetDataType();
+  unsigned int tw = texture->GetWidth();
+  unsigned int th = texture->GetHeight();
+  unsigned int tnc = texture->GetComponents();
+
+  vtkPixelExtent texExt(0U, tw-1U, 0U, th-1U);
+  vtkPixelExtent subExt(texExt);
+  if (sub)
+    {
+    subExt.SetData(sub);
+    }
+
+  vtkFloatArray *ta = vtkFloatArray::New();
+  ta->SetNumberOfComponents(tnc);
+  ta->SetNumberOfTuples(subExt.Size());
+  ta->SetName("tex");
+  float *pTa = ta->GetPointer(0);
+
+  vtkPixelBufferObject *pbo = texture->Download();
+
+  vtkPixelTransfer::Blit(
+        texExt,
+        subExt,
+        subExt,
+        subExt,
+        tnc,
+        tt,
+        pbo->MapPackedBuffer(),
+        tnc,
+        VTK_FLOAT,
+        pTa);
+
+  pbo->UnmapPackedBuffer();
+  pbo->Delete();
+
+  return ta;
+}
+
+//----------------------------------------------------------------------------
+void vtkTextureIO::Write(
+        const char *filename,
+        vtkTextureObject *texture,
+        const unsigned int *subset,
+        const double *origin)
+{
+  unsigned int tw = texture->GetWidth();
+  unsigned int th = texture->GetHeight();
+
+  vtkPixelExtent subExt(0U, tw-1U, 0U, th-1U);
+  if (subset)
+    {
+    subExt.SetData(subset);
+    }
+
+  int dataExt[6]={0,0, 0,0, 0,0};
+  subExt.CellToNode();
+  subExt.GetData(dataExt);
+
+  double dataOrigin[6]={0,0, 0,0, 0,0};
+  if (origin)
+    {
+    dataOrigin[0] = origin[0];
+    dataOrigin[1] = origin[1];
+    }
+
+  vtkFloatArray *ta = DownloadTexture(texture, subset);
+
+  vtkImageData *id = vtkImageData::New();
+  id->SetExtent(dataExt);
+  id->SetOrigin(dataOrigin);
+  id->GetCellData()->AddArray(ta);
+  ta->Delete();
+
+  vtkDataSetWriter *w = vtkDataSetWriter::New();
+  w->SetFileName(filename);
+  w->SetInputData(id);
+  w->Write();
+
+  id->Delete();
+  w->Delete();
+}
+
+//----------------------------------------------------------------------------
+void vtkTextureIO::Write(
+        const char *filename,
+        vtkTextureObject *texture,
+        const deque<vtkPixelExtent> &exts,
+        const double *origin)
+{
+  int n = static_cast<int>(exts.size());
+  if (n == 0)
+    {
+    //vtkGenericWarningMacro("Empty extents nothing will be written");
+    return;
+    }
+  vtkMultiBlockDataSet *mb = vtkMultiBlockDataSet::New();
+  for (int i=0; i<n; ++i)
+    {
+    vtkPixelExtent ext = exts[i];
+    if (ext.Empty()) continue;
+
+    vtkFloatArray *ta = DownloadTexture(texture, ext.GetDataU());
+
+    int dataExt[6]={0,0,0,0,0,0};
+    ext.CellToNode();
+    ext.GetData(dataExt);
+
+    double dataOrigin[6]={0,0,0,0,0,0};
+    if (origin)
+      {
+      dataOrigin[0] = origin[0];
+      dataOrigin[1] = origin[1];
+      }
+
+    vtkImageData *id = vtkImageData::New();
+    id->SetExtent(dataExt);
+    id->SetOrigin(dataOrigin);
+    id->GetCellData()->AddArray(ta);
+    ta->Delete();
+
+    mb->SetBlock(i, id);
+    id->Delete();
+    }
+
+  vtkXMLMultiBlockDataWriter *w = vtkXMLMultiBlockDataWriter::New();
+  w->SetFileName(filename);
+  w->SetInputData(mb);
+  w->Write();
+  w->Delete();
+  mb->Delete();
+}
diff --git a/Rendering/LIC/vtkTextureIO.h b/Rendering/LIC/vtkTextureIO.h
new file mode 100644
index 0000000..a71662c
--- /dev/null
+++ b/Rendering/LIC/vtkTextureIO.h
@@ -0,0 +1,86 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkTextureIO.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkTextureIO -- I/O routines for vtkTextureObject
+// .SECTION Description
+// A small collection of I/O routines that write vtkTextureObject
+// to disk for debugging.
+#ifndef __vtkTextureIO_h
+#define __vtkTextureIO_h
+
+#include "vtkRenderingLICModule.h" // for export
+#include "vtkPixelExtent.h" // for pixel extent
+
+// included vtkSystemIncludes in vtkPixelExtent
+#include <cstddef> // for NULL
+#include <string> // for string
+#include <deque> // for deque
+
+class vtkTextureObject;
+
+class VTKRENDERINGLIC_EXPORT vtkTextureIO
+{
+public:
+  // Description:
+  // Write to disk as image data with subset(optional) at dataset origin(optional)
+  static void Write(
+          const char *filename,
+          vtkTextureObject *texture,
+          const unsigned int *subset=NULL,
+          const double *origin=NULL);
+
+  // Description:
+  // Write to disk as image data with subset(optional) at dataset origin(optional)
+  static void Write(
+          std::string filename,
+          vtkTextureObject *texture,
+          const unsigned int *subset=NULL,
+          const double *origin=NULL)
+      {
+      Write(filename.c_str(), texture, subset, origin);
+      }
+
+  // Description:
+  // Write to disk as image data with subset(optional) at dataset origin(optional)
+  static void Write(
+          std::string filename,
+          vtkTextureObject *texture,
+          const vtkPixelExtent &subset,
+          const double *origin=NULL)
+      {
+      Write(filename.c_str(), texture, subset.GetDataU(), origin);
+      }
+
+  // Description:
+  // Write list of subsets to disk as multiblock image data at dataset origin(optional).
+  static void Write(
+          const char *filename,
+          vtkTextureObject *texture,
+          const std::deque<vtkPixelExtent> &exts,
+          const double *origin=NULL);
+
+  // Description:
+  // Write list of subsets to disk as multiblock image data at dataset origin(optional).
+  static void Write(
+          std::string filename,
+          vtkTextureObject *texture,
+          const std::deque<vtkPixelExtent> &exts,
+          const double *origin=NULL)
+      {
+      Write(filename.c_str(),texture,exts,origin);
+      }
+};
+
+#endif
+// VTK-HeaderTest-Exclude: vtkTextureIO.h
diff --git a/Rendering/LOD/Testing/Data/Baseline/TestQuadricLODActor.png.md5 b/Rendering/LOD/Testing/Data/Baseline/TestQuadricLODActor.png.md5
new file mode 100644
index 0000000..2bce17f
--- /dev/null
+++ b/Rendering/LOD/Testing/Data/Baseline/TestQuadricLODActor.png.md5
@@ -0,0 +1 @@
+824af40f73075ad68a2defbae933524f
diff --git a/Rendering/LOD/Testing/Python/CMakeLists.txt b/Rendering/LOD/Testing/Python/CMakeLists.txt
index bbc108b..2650423 100644
--- a/Rendering/LOD/Testing/Python/CMakeLists.txt
+++ b/Rendering/LOD/Testing/Python/CMakeLists.txt
@@ -1 +1 @@
-add_test_python(TestQuadricLODActor.py Rendering)
+vtk_add_test_python(TestQuadricLODActor.py)
diff --git a/Rendering/LOD/Testing/Tcl/CMakeLists.txt b/Rendering/LOD/Testing/Tcl/CMakeLists.txt
index b8a3048..bd6f467 100644
--- a/Rendering/LOD/Testing/Tcl/CMakeLists.txt
+++ b/Rendering/LOD/Testing/Tcl/CMakeLists.txt
@@ -1 +1 @@
-add_test_tcl(TestQuadricLODActor Hybrid)
+vtk_add_test_tcl(TestQuadricLODActor)
diff --git a/Rendering/LOD/vtkLODActor.cxx b/Rendering/LOD/vtkLODActor.cxx
index 50be2ef..08af6aa 100644
--- a/Rendering/LOD/vtkLODActor.cxx
+++ b/Rendering/LOD/vtkLODActor.cxx
@@ -355,23 +355,25 @@ void vtkLODActor::UpdateOwnLODs()
 // (number two and three)
 void vtkLODActor::DeleteOwnLODs()
 {
-  if (!this->MediumMapper)
+  // remove the mappers from the LOD collection
+  if (this->LowMapper)
     {
-    return;
+    this->LODMappers->RemoveItem(this->LowMapper);
+    this->LowMapper->Delete();
+    this->LowMapper = NULL;
     }
 
-  // remove the mappers from the LOD collection
-  this->LODMappers->RemoveItem(this->LowMapper);
-  this->LODMappers->RemoveItem(this->MediumMapper);
+  if (this->MediumMapper)
+    {
+    this->LODMappers->RemoveItem(this->MediumMapper);
+    this->MediumMapper->Delete();
+    this->MediumMapper = NULL;
+    }
 
   // delete the filters used to create the LODs ...
   // The NULL check should not be necessary, but for sanity ...
   this->SetLowResFilter(NULL);
   this->SetMediumResFilter(NULL);
-  this->LowMapper->Delete();
-  this->LowMapper = NULL;
-  this->MediumMapper->Delete();
-  this->MediumMapper = NULL;
 }
 
 //----------------------------------------------------------------------------
diff --git a/Rendering/Label/Testing/Data/Baseline/labeledContours.png.md5 b/Rendering/Label/Testing/Data/Baseline/labeledContours.png.md5
new file mode 100644
index 0000000..de32b25
--- /dev/null
+++ b/Rendering/Label/Testing/Data/Baseline/labeledContours.png.md5
@@ -0,0 +1 @@
+8ff6bc1479ad9c62fcab0e7b2e419700
diff --git a/Rendering/Label/Testing/Data/Baseline/labeledContours_1.png.md5 b/Rendering/Label/Testing/Data/Baseline/labeledContours_1.png.md5
new file mode 100644
index 0000000..fbe8b0b
--- /dev/null
+++ b/Rendering/Label/Testing/Data/Baseline/labeledContours_1.png.md5
@@ -0,0 +1 @@
+29e09c177d842057b9c97b27d1831c84
diff --git a/Rendering/Label/Testing/Data/Baseline/labeledContours_2.png.md5 b/Rendering/Label/Testing/Data/Baseline/labeledContours_2.png.md5
new file mode 100644
index 0000000..5c842cd
--- /dev/null
+++ b/Rendering/Label/Testing/Data/Baseline/labeledContours_2.png.md5
@@ -0,0 +1 @@
+76299d6924fee495ec32ced46967addf
diff --git a/Rendering/Label/Testing/Data/Baseline/labeledContours_3.png.md5 b/Rendering/Label/Testing/Data/Baseline/labeledContours_3.png.md5
new file mode 100644
index 0000000..7d002bb
--- /dev/null
+++ b/Rendering/Label/Testing/Data/Baseline/labeledContours_3.png.md5
@@ -0,0 +1 @@
+14c5dee3dc8f2b9eac89f5d8e273d128
diff --git a/Rendering/Label/Testing/Data/Baseline/labeledMesh.png.md5 b/Rendering/Label/Testing/Data/Baseline/labeledMesh.png.md5
new file mode 100644
index 0000000..927afdb
--- /dev/null
+++ b/Rendering/Label/Testing/Data/Baseline/labeledMesh.png.md5
@@ -0,0 +1 @@
+7341fffbf881c8d65d48f925106d6d59
diff --git a/Rendering/Label/Testing/Data/Baseline/labeledMesh_1.png.md5 b/Rendering/Label/Testing/Data/Baseline/labeledMesh_1.png.md5
new file mode 100644
index 0000000..b3c0593
--- /dev/null
+++ b/Rendering/Label/Testing/Data/Baseline/labeledMesh_1.png.md5
@@ -0,0 +1 @@
+0fe4cb4a42ccc7e6818914441ef722f4
diff --git a/Rendering/Label/Testing/Python/CMakeLists.txt b/Rendering/Label/Testing/Python/CMakeLists.txt
index 12cef23..3bc760a 100644
--- a/Rendering/Label/Testing/Python/CMakeLists.txt
+++ b/Rendering/Label/Testing/Python/CMakeLists.txt
@@ -1,2 +1,2 @@
-add_test_python(labeledContours.py Rendering)
-add_test_python(labeledMesh.py Rendering)
+vtk_add_test_python(labeledContours.py)
+vtk_add_test_python(labeledMesh.py)
diff --git a/Rendering/Label/Testing/Tcl/CMakeLists.txt b/Rendering/Label/Testing/Tcl/CMakeLists.txt
index cad1fc2..5d96d4d 100644
--- a/Rendering/Label/Testing/Tcl/CMakeLists.txt
+++ b/Rendering/Label/Testing/Tcl/CMakeLists.txt
@@ -1,5 +1,2 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(labeledContours Rendering)
-endif()
-
-add_test_tcl(labeledMesh Rendering)
+vtk_add_test_tcl(labeledContours)
+vtk_add_test_tcl(labeledMesh)
diff --git a/Rendering/Label/module.cmake b/Rendering/Label/module.cmake
index 75d8646..50a142d 100644
--- a/Rendering/Label/module.cmake
+++ b/Rendering/Label/module.cmake
@@ -3,4 +3,6 @@ vtk_module(vtkRenderingLabel
     Rendering
   DEPENDS
     vtkRenderingFreeType
+  PRIVATE_DEPENDS
+    vtkFiltersExtraction
   )
diff --git a/Rendering/Label/vtkLabeledDataMapper.cxx b/Rendering/Label/vtkLabeledDataMapper.cxx
index 2b46bd4..574fb0e 100644
--- a/Rendering/Label/vtkLabeledDataMapper.cxx
+++ b/Rendering/Label/vtkLabeledDataMapper.cxx
@@ -792,6 +792,13 @@ vtkLabeledDataMapper::GetMTime()
 }
 
 // ----------------------------------------------------------------------
+const char *vtkLabeledDataMapper::GetLabelText(int label)
+{
+  assert("label index range" && label >= 0 && label < this->NumberOfLabels);
+  return this->TextMappers[label]->GetInput();
+}
+
+// ----------------------------------------------------------------------
 void
 vtkLabeledDataMapper::SetFieldDataName(const char *arrayName)
 {
@@ -801,7 +808,7 @@ vtkLabeledDataMapper::SetFieldDataName(const char *arrayName)
 
   if ( this->FieldDataName == NULL && arrayName == NULL) { return; }
   if ( this->FieldDataName && arrayName && (!strcmp(this->FieldDataName,arrayName))) { return;}
-  if (this->FieldDataName) { delete [] this->FieldDataName; }
+  delete [] this->FieldDataName;
   if (arrayName)
     {
     this->FieldDataName = new char[strlen(arrayName)+1];
diff --git a/Rendering/Label/vtkLabeledDataMapper.h b/Rendering/Label/vtkLabeledDataMapper.h
index 7e34d65..6139f0c 100644
--- a/Rendering/Label/vtkLabeledDataMapper.h
+++ b/Rendering/Label/vtkLabeledDataMapper.h
@@ -46,6 +46,8 @@
 #include "vtkRenderingLabelModule.h" // For export macro
 #include "vtkMapper2D.h"
 
+#include <cassert> // For assert macro
+
 class vtkDataObject;
 class vtkDataSet;
 class vtkTextMapper;
@@ -180,6 +182,24 @@ public:
   // Return the modified time for this object.
   virtual unsigned long GetMTime();
 
+  // Description:
+  // Return the number of labels rendered by the mapper.
+  vtkGetMacro(NumberOfLabels, int)
+
+  // Description:
+  // Return the position of the requested label.
+  void GetLabelPosition(int label, double pos[3])
+  {
+    assert("label index range" && label >= 0 && label < this->NumberOfLabels);
+    pos[0] = this->LabelPositions[3 * label];
+    pos[1] = this->LabelPositions[3 * label + 1];
+    pos[2] = this->LabelPositions[3 * label + 2];
+  }
+
+  // Description:
+  // Return the text for the requested label.
+  const char *GetLabelText(int label);
+
 protected:
   vtkLabeledDataMapper();
   ~vtkLabeledDataMapper();
diff --git a/Rendering/Matplotlib/Testing/Cxx/CMakeLists.txt b/Rendering/Matplotlib/Testing/Cxx/CMakeLists.txt
index 52f47b8..fcda855 100644
--- a/Rendering/Matplotlib/Testing/Cxx/CMakeLists.txt
+++ b/Rendering/Matplotlib/Testing/Cxx/CMakeLists.txt
@@ -1,20 +1,15 @@
-# Check that matplotlib is available:
-execute_process(COMMAND ${PYTHON_EXECUTABLE} -c "import matplotlib"
-  RESULT_VARIABLE IMPORT_MATPLOTLIB_EXITCODE
-  OUTPUT_VARIABLE IMPORT_MATPLOTLIB_OUTPUT
-  ERROR_VARIABLE IMPORT_MATPLOTLIB_ERROR
-  )
-set(MATPLOTLIB_FOUND FALSE)
-if(${IMPORT_MATPLOTLIB_EXITCODE} EQUAL 0)
-  set(MATPLOTLIB_FOUND TRUE)
-endif()
+include(vtkGhostscript)
+
+# Check that matplotlib is available
+include(FindPythonModules)
+find_python_module(matplotlib MATPLOTLIB_FOUND)
 
 if(NOT MATPLOTLIB_FOUND)
   message(STATUS "Matplotlib not found! MathText rendering will not be available until it is installed. Disabling tests.")
 endif()
 
 if(MATPLOTLIB_FOUND)
-  if(VTK_GHOSTSCRIPT_EXECUTABLE AND VTK_DATA_ROOT)
+  if(VTK_GHOSTSCRIPT_EXECUTABLE)
     set(GL2PSTests
       TestGL2PSMathTextActor.cxx
       TestGL2PSMathTextActor3D.cxx
@@ -23,48 +18,21 @@ if(MATPLOTLIB_FOUND)
     )
   endif()
 
-  if(VTK_DATA_ROOT)
-    # add tests that require data
-    set(MyTests
-      TestContextMathTextImage.cxx
-      TestMathTextActor.cxx
-      TestMathTextActor3D.cxx
-      TestRenderString.cxx
-      TestStringToPath.cxx
-      TestIndexedLookupScalarBar.cxx
-      TestScalarBarCombinatorics.cxx
-      ${GL2PSTests}
-      )
-  endif()
+  # add tests that require data
+  vtk_add_test_cxx(
+    TestContextMathTextImage.cxx
+    TestMathTextActor.cxx
+    TestMathTextActor3D.cxx
+    TestRenderString.cxx
+    TestStringToPath.cxx
+    TestIndexedLookupScalarBar.cxx
+    TestScalarBarCombinatorics.cxx
+    )
 endif()
 
-# Use the testing object factory, to reduce boilerplate code in tests.
-include(vtkTestingObjectFactory)
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
+vtk_add_test_cxx(${GL2PSTests})
 
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    if(${${TName}Error})
-      set(_error_threshold ${${TName}Error})
-    else()
-      set(_error_threshold 10)
-    endif()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Rendering/${TName}.png
-        -E ${_error_threshold})
-  else()
-    add_test(NAME MathText-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests RENDERING_FACTORY)
 
 # The GL2PS tests produce postscript output, which cannot be compared using
 # the built-in image regression framework. The tests added above will create the
@@ -90,11 +58,12 @@ if(GL2PSTests)
           "${VTK_TEST_OUTPUT_DIR}/${TName}.ps"
     )
     # Image diff rasterized png with baseline
-    add_test(NAME ${vtk-module}Cxx-${TName}-VerifyRasterizedPNG
+    ExternalData_add_test(VTKData
+      NAME ${vtk-module}Cxx-${TName}-VerifyRasterizedPNG
       COMMAND vtkRenderingGL2PSCxxTests PNGCompare
-        -D ${VTK_DATA_ROOT}
+        -D ${VTK_TEST_DATA_DIR}
         -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Rendering/${TName}-rasterRef.png
+        -V DATA{../Data/Baseline/${TName}-rasterRef.png,:}
         --test-file ${VTK_TEST_OUTPUT_DIR}/${TName}-raster.png
     )
     set_tests_properties("${vtk-module}Cxx-${TName}-VerifyRasterizedPNG"
diff --git a/Rendering/Matplotlib/Testing/Cxx/TestScalarBarCombinatorics.cxx b/Rendering/Matplotlib/Testing/Cxx/TestScalarBarCombinatorics.cxx
index 95cc08f..64cabd3 100644
--- a/Rendering/Matplotlib/Testing/Cxx/TestScalarBarCombinatorics.cxx
+++ b/Rendering/Matplotlib/Testing/Cxx/TestScalarBarCombinatorics.cxx
@@ -51,15 +51,16 @@ struct vtkScalarBarTestCondition
   double Position2[2];
   int ProcessEvents;
   int Enabled;
+  int VTitleSeparation;
 } conditions[] = {
-  {"$T_1$", VTK_ORIENT_HORIZONTAL, vtkScalarBarActor::PrecedeScalarBar, 1, 1, 1, 0, {0.000, 0.015}, {0.400, 0.135}, 1, 1},
-  {"$T_2$", VTK_ORIENT_HORIZONTAL, vtkScalarBarActor::PrecedeScalarBar, 1, 0, 1, 1, {0.000, 0.230}, {0.400, 0.146}, 1, 1},
-  {"$T_3$", VTK_ORIENT_HORIZONTAL, vtkScalarBarActor::SucceedScalarBar, 1, 1, 1, 1, {0.000, 0.850}, {0.630, 0.154}, 1, 1},
-  {"$T_4$", VTK_ORIENT_VERTICAL,   vtkScalarBarActor::PrecedeScalarBar, 1, 1, 1, 0, {0.799, 0.032}, {0.061, 0.794}, 1, 1},
-  {"$T_5$", VTK_ORIENT_VERTICAL,   vtkScalarBarActor::PrecedeScalarBar, 1, 0, 1, 1, {0.893, 0.036}, {0.052, 0.752}, 1, 1},
-  {"$T_6$", VTK_ORIENT_VERTICAL,   vtkScalarBarActor::SucceedScalarBar, 1, 1, 1, 1, {0.792, 0.081}, {0.061, 0.617}, 1, 1},
-  {"$T_7$", VTK_ORIENT_VERTICAL,   vtkScalarBarActor::SucceedScalarBar, 1, 1, 0, 0, {0.646, 0.061}, {0.084, 0.714}, 1, 1},
-  {"$T_8$", VTK_ORIENT_HORIZONTAL, vtkScalarBarActor::SucceedScalarBar, 0, 1, 0, 1, {0.076, 0.535}, {0.313, 0.225}, 1, 1},
+  {"$T_1$", VTK_ORIENT_HORIZONTAL, vtkScalarBarActor::PrecedeScalarBar, 1, 1, 1, 0, {0.000, 0.015}, {0.400, 0.135}, 1, 1, 0},
+  {"$T_2$", VTK_ORIENT_HORIZONTAL, vtkScalarBarActor::PrecedeScalarBar, 1, 0, 1, 1, {0.000, 0.230}, {0.400, 0.146}, 1, 1, 0},
+  {"$T_3$", VTK_ORIENT_HORIZONTAL, vtkScalarBarActor::SucceedScalarBar, 1, 1, 1, 1, {0.000, 0.850}, {0.630, 0.154}, 1, 1, 5},
+  {"$T_4$", VTK_ORIENT_VERTICAL,   vtkScalarBarActor::PrecedeScalarBar, 1, 1, 1, 0, {0.799, 0.032}, {0.061, 0.794}, 1, 1, 5},
+  {"$T_5$", VTK_ORIENT_VERTICAL,   vtkScalarBarActor::PrecedeScalarBar, 1, 0, 1, 1, {0.893, 0.036}, {0.052, 0.752}, 1, 1, 0},
+  {"$T_6$", VTK_ORIENT_VERTICAL,   vtkScalarBarActor::SucceedScalarBar, 1, 1, 1, 1, {0.792, 0.081}, {0.061, 0.617}, 1, 1, 0},
+  {"$T_7$", VTK_ORIENT_VERTICAL,   vtkScalarBarActor::SucceedScalarBar, 1, 1, 0, 0, {0.646, 0.061}, {0.084, 0.714}, 1, 1, 0},
+  {"$T_8$", VTK_ORIENT_HORIZONTAL, vtkScalarBarActor::SucceedScalarBar, 0, 1, 0, 1, {0.076, 0.535}, {0.313, 0.225}, 1, 1, 0},
 };
 
 static vtkSmartPointer<vtkScalarBarActor> CreateScalarBar(
@@ -75,6 +76,7 @@ static vtkSmartPointer<vtkScalarBarActor> CreateScalarBar(
   sba->SetFixedAnnotationLeaderLineColor(cond.FixedAnnotationLeaderLineColor);
   sba->SetPosition(cond.Position[0], cond.Position[1]);
   sba->SetPosition2(cond.Position2[0], cond.Position2[1]);
+  sba->SetVerticalTitleSeparation(cond.VTitleSeparation);
   ren->AddActor(sba.GetPointer());
   return sba.GetPointer();
 }
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestContextMathTextImage.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestContextMathTextImage.png.md5
new file mode 100644
index 0000000..032aff8
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestContextMathTextImage.png.md5
@@ -0,0 +1 @@
+6a9f9f0da2b87650d8212438cfdc700c
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestContextMathTextImage_1.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestContextMathTextImage_1.png.md5
new file mode 100644
index 0000000..2452f87
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestContextMathTextImage_1.png.md5
@@ -0,0 +1 @@
+0eeb45376bad768844cda4c3f05904ca
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor-rasterRef.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor-rasterRef.png.md5
new file mode 100644
index 0000000..0ecdf63
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor-rasterRef.png.md5
@@ -0,0 +1 @@
+e126e01f33f0da04a62456cb1d595146
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor.png.md5
new file mode 100644
index 0000000..b872b56
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor.png.md5
@@ -0,0 +1 @@
+ee4bb8bc1d67fc22e235dbf7d4d52144
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor3D-rasterRef.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor3D-rasterRef.png.md5
new file mode 100644
index 0000000..27dc545
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor3D-rasterRef.png.md5
@@ -0,0 +1 @@
+f0bb03f6d4c7ce1f78a2e217ce39ff6a
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor3D.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor3D.png.md5
new file mode 100644
index 0000000..7bdd138
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor3D.png.md5
@@ -0,0 +1 @@
+e209ad82039d1efdb0692b5b2a4794c9
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor3D_1.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor3D_1.png.md5
new file mode 100644
index 0000000..60e85d4
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor3D_1.png.md5
@@ -0,0 +1 @@
+5f3f7d1e57b11fe2e876fa67c4045535
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor3D_2.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor3D_2.png.md5
new file mode 100644
index 0000000..f52cfbf
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor3D_2.png.md5
@@ -0,0 +1 @@
+e14e2c18cc9118e33fb1ed8ec28b8fbb
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor3D_3.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor3D_3.png.md5
new file mode 100644
index 0000000..c8283ea
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor3D_3.png.md5
@@ -0,0 +1 @@
+e7951897cdd2e064aa26b47243ea1828
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor_1.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor_1.png.md5
new file mode 100644
index 0000000..8e234d0
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextActor_1.png.md5
@@ -0,0 +1 @@
+b5653930f8437e0a777f968249f1ff1a
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextOutput-rasterRef.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextOutput-rasterRef.png.md5
new file mode 100644
index 0000000..e4c52d4
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextOutput-rasterRef.png.md5
@@ -0,0 +1 @@
+f0aed2560f46d3940353e54090df9972
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextOutput-rasterRef_1.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextOutput-rasterRef_1.png.md5
new file mode 100644
index 0000000..6514005
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextOutput-rasterRef_1.png.md5
@@ -0,0 +1 @@
+caf895781b1dd0674d56a414eca7c376
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextOutput.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextOutput.png.md5
new file mode 100644
index 0000000..b57927d
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextOutput.png.md5
@@ -0,0 +1 @@
+e06b8945fd3f5915205b6db3143627a9
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextOutput_1.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextOutput_1.png.md5
new file mode 100644
index 0000000..4ec84d3
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextOutput_1.png.md5
@@ -0,0 +1 @@
+6320b4ae772781779c1a6de6a0292119
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextScaling-rasterRef.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextScaling-rasterRef.png.md5
new file mode 100644
index 0000000..90de431
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextScaling-rasterRef.png.md5
@@ -0,0 +1 @@
+13d6b649388b234678eecc99a5ee4efb
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextScaling.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextScaling.png.md5
new file mode 100644
index 0000000..e6613bb
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextScaling.png.md5
@@ -0,0 +1 @@
+b93bee603f5cfacd8d660d60caf827ce
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextScaling_1.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextScaling_1.png.md5
new file mode 100644
index 0000000..4d11f14
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestGL2PSMathTextScaling_1.png.md5
@@ -0,0 +1 @@
+1fe22de9a8ea880c55454272c2fed2c0
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestIndexedLookupScalarBar.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestIndexedLookupScalarBar.png.md5
new file mode 100644
index 0000000..caf1e66
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestIndexedLookupScalarBar.png.md5
@@ -0,0 +1 @@
+d15f8d0ac2baf33a995218c352656abf
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestIndexedLookupScalarBar_1.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestIndexedLookupScalarBar_1.png.md5
new file mode 100644
index 0000000..97cbbf4
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestIndexedLookupScalarBar_1.png.md5
@@ -0,0 +1 @@
+f0ff8f57c6f904df0e9bb88d5b098bfa
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestIndexedLookupScalarBar_2.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestIndexedLookupScalarBar_2.png.md5
new file mode 100644
index 0000000..1a9b015
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestIndexedLookupScalarBar_2.png.md5
@@ -0,0 +1 @@
+fe1729f054e624e8f57edaf674581287
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestMathTextActor.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestMathTextActor.png.md5
new file mode 100644
index 0000000..e6d225a
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestMathTextActor.png.md5
@@ -0,0 +1 @@
+562c5ec2dc61c3329169b24704196cf7
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestMathTextActor3D.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestMathTextActor3D.png.md5
new file mode 100644
index 0000000..7bdd138
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestMathTextActor3D.png.md5
@@ -0,0 +1 @@
+e209ad82039d1efdb0692b5b2a4794c9
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestMathTextActor3D_1.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestMathTextActor3D_1.png.md5
new file mode 100644
index 0000000..60e85d4
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestMathTextActor3D_1.png.md5
@@ -0,0 +1 @@
+5f3f7d1e57b11fe2e876fa67c4045535
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestMathTextActor3D_2.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestMathTextActor3D_2.png.md5
new file mode 100644
index 0000000..d22bda5
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestMathTextActor3D_2.png.md5
@@ -0,0 +1 @@
+0bcbf6df9b4a1c0470d941e88300a2f8
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestMathTextActor_1.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestMathTextActor_1.png.md5
new file mode 100644
index 0000000..8e234d0
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestMathTextActor_1.png.md5
@@ -0,0 +1 @@
+b5653930f8437e0a777f968249f1ff1a
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestRenderString.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestRenderString.png.md5
new file mode 100644
index 0000000..639e029
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestRenderString.png.md5
@@ -0,0 +1 @@
+7e40598b9a72231a24cb5c4c3e7c28f3
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestRenderString_1.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestRenderString_1.png.md5
new file mode 100644
index 0000000..288bff3
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestRenderString_1.png.md5
@@ -0,0 +1 @@
+26ab6aed69fd76ae6d0f4bf3da2c3b9c
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestRenderString_2.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestRenderString_2.png.md5
new file mode 100644
index 0000000..25b3184
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestRenderString_2.png.md5
@@ -0,0 +1 @@
+028b68b8ecad3ea321a49a59d57b85fa
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestScalarBarCombinatorics.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestScalarBarCombinatorics.png.md5
new file mode 100644
index 0000000..aebe0ce
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestScalarBarCombinatorics.png.md5
@@ -0,0 +1 @@
+54352f8a446dcabcb49b296a7795b0ab
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestStringToPath.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestStringToPath.png.md5
new file mode 100644
index 0000000..19aba05
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestStringToPath.png.md5
@@ -0,0 +1 @@
+53d703912c58eb98f7bb58f618056aba
diff --git a/Rendering/Matplotlib/Testing/Data/Baseline/TestStringToPath_1.png.md5 b/Rendering/Matplotlib/Testing/Data/Baseline/TestStringToPath_1.png.md5
new file mode 100644
index 0000000..be2f103
--- /dev/null
+++ b/Rendering/Matplotlib/Testing/Data/Baseline/TestStringToPath_1.png.md5
@@ -0,0 +1 @@
+ad06f114a6cdfe2a2732820f13f129aa
diff --git a/Rendering/OpenGL/CMakeLists.txt b/Rendering/OpenGL/CMakeLists.txt
index c2ae0c0..9db17b9 100644
--- a/Rendering/OpenGL/CMakeLists.txt
+++ b/Rendering/OpenGL/CMakeLists.txt
@@ -47,12 +47,13 @@ set(Module_SRCS
   vtkDefaultPass.cxx
   vtkDepthPeelingPass.cxx
   vtkFrameBufferObject.cxx
+  vtkFrameBufferObject2.cxx
+  vtkGLSLShaderDeviceAdapter2.cxx
   vtkGaussianBlurPass.cxx
   vtkGenericOpenGLRenderWindow.cxx
   vtkImageProcessingPass.cxx
   vtkLightingHelper.cxx
   vtkLightsPass.cxx
-  vtkLineIntegralConvolution2D.cxx
   vtkOpaquePass.cxx
   vtkOpenGLActor.cxx
   vtkOpenGLCamera.cxx
@@ -63,29 +64,34 @@ set(Module_SRCS
   vtkOpenGLExtensionManager.cxx
   vtkOpenGLGL2PSHelper.cxx
   vtkOpenGLGlyph3DMapper.cxx
+  vtkOpenGLHardwareSelector.cxx
   vtkOpenGLHardwareSupport.cxx
   vtkOpenGLImageMapper.cxx
   vtkOpenGLImageSliceMapper.cxx
   vtkOpenGLLight.cxx
+  vtkOpenGLLightMonitor.cxx
   vtkOpenGLLightingPainter.cxx
+  vtkOpenGLModelViewProjectionMonitor.cxx
   vtkOpenGLPainterDeviceAdapter.cxx
-  vtkOpenGLPolyDataMapper2D.cxx
   vtkOpenGLPolyDataMapper.cxx
+  vtkOpenGLPolyDataMapper2D.cxx
   vtkOpenGLProperty.cxx
-  vtkOpenGLRenderer.cxx
   vtkOpenGLRenderWindow.cxx
+  vtkOpenGLRenderer.cxx
   vtkOpenGLRepresentationPainter.cxx
   vtkOpenGLScalarsToColorsPainter.cxx
   vtkOpenGLState.cxx
   vtkOpenGLTexture.cxx
   vtkOverlayPass.cxx
   vtkPixelBufferObject.cxx
-  vtkRenderPassCollection.cxx
   vtkRenderPass.cxx
+  vtkRenderPassCollection.cxx
   vtkRenderState.cxx
+  vtkRenderbuffer.cxx
+  vtkRenderbuffer.cxx
   vtkSequencePass.cxx
-  vtkShader2Collection.cxx
   vtkShader2.cxx
+  vtkShader2Collection.cxx
   vtkShaderProgram2.cxx
   vtkShadowMapBakerPass.cxx
   vtkShadowMapPass.cxx
@@ -96,10 +102,6 @@ set(Module_SRCS
   vtkUniformVariables.cxx
   vtkVolumetricPass.cxx
   ${CMAKE_CURRENT_BINARY_DIR}/vtkgl.cxx
-  vtkGLSLShaderDeviceAdapter.cxx
-  vtkGLSLShaderDeviceAdapter2.cxx
-  vtkGLSLShader.cxx
-  vtkGLSLShaderProgram.cxx
   ${CMAKE_CURRENT_BINARY_DIR}/vtkRenderingOpenGLObjectFactory.cxx
   )
 
@@ -110,6 +112,7 @@ set(${vtk-module}_HDRS
   ${CMAKE_CURRENT_BINARY_DIR}/vtkTDxConfigure.h
   ${CMAKE_CURRENT_BINARY_DIR}/vtkRenderingOpenGLConfigure.h
   ${CMAKE_CURRENT_BINARY_DIR}/vtkgl.h
+  ${CMAKE_CURRENT_BINARY_DIR}/vtkOpenGLError.h
   )
 
 set_source_files_properties(
@@ -232,9 +235,6 @@ set(shader_files
   vtkGaussianBlurPassShader_fs.glsl
   vtkLighting_s.glsl # s not vs or fs because it can be used in both.
   vtkLightingHelper_s.glsl # s not vs or fs because it can be used in both.
-  vtkLineIntegralConvolution2D_fs.glsl
-  vtkLineIntegralConvolution2D_fs1.glsl
-  vtkLineIntegralConvolution2D_fs2.glsl
   vtkOpenGLRenderer_PeelingFS.glsl
   vtkOpenGLPropertyDefaultPropFunc_fs.glsl
   vtkOpenGLPropertyDefaultPropFunc_vs.glsl
@@ -278,6 +278,7 @@ set(opengl_overrides
   CompositePainter
   DisplayListPainter
   Glyph3DMapper
+  HardwareSelector
   ImageSliceMapper
   Light
   LightingPainter
@@ -426,16 +427,39 @@ set(${vtk-module}_EXPORT_CODE "#include \"vtkRenderingOpenGLConfigure.h\"")
 # automatically.
 set(${vtk-module}_INCLUDE_DIRS "${OPENGL_INCLUDE_DIR}")
 
+# Detect and clear OpenGL errors. If not then other classes/user code
+# may detect VTK's OpenGL errors and vice-versa.
+set(VTK_REPORT_OPENGL_ERRORS ON CACHE BOOL "Enable OpenGL error check and report")
+mark_as_advanced(VTK_REPORT_OPENGL_ERRORS)
+configure_file(
+    ${CMAKE_CURRENT_SOURCE_DIR}/vtkOpenGLError.h.in
+    ${CMAKE_CURRENT_BINARY_DIR}/vtkOpenGLError.h)
+
+# For testing new driver releases
+set(VTK_IGNORE_GLDRIVER_BUGS
+    OFF CACHE BOOL
+   "Enable buggy OpenGL drivers for testing.")
+mark_as_advanced(VTK_IGNORE_GLDRIVER_BUGS)
+if (VTK_IGNORE_GLDRIVER_BUGS)
+  set_property(SOURCE
+     vtkOpenGLExtensionManager.cxx
+     APPEND PROPERTY
+     COMPILE_DEFINITIONS
+     VTK_IGNORE_GLDRIVER_BUGS)
+endif()
+
 vtk_module_library(vtkRenderingOpenGL ${Module_SRCS})
-target_link_libraries(vtkRenderingOpenGL ${OPENGL_LIBRARIES} ${extra_libs})
+target_link_libraries(vtkRenderingOpenGL
+  LINK_PUBLIC ${OPENGL_LIBRARIES} ${extra_libs})
 if(VTK_USE_X)
-  target_link_libraries(vtkRenderingOpenGL ${X11_LIBRARIES} ${X11_Xt_LIB})
+  target_link_libraries(vtkRenderingOpenGL
+    LINK_PUBLIC ${X11_LIBRARIES} ${X11_Xt_LIB})
 elseif(VTK_USE_COCOA)
-  target_link_libraries(vtkRenderingOpenGL "-framework Cocoa")
+  target_link_libraries(vtkRenderingOpenGL LINK_PUBLIC "-framework Cocoa")
 elseif(VTK_USE_CARBON)
-  target_link_libraries(vtkRenderingOpenGL "-framework Carbon")
+  target_link_libraries(vtkRenderingOpenGL LINK_PUBLIC "-framework Carbon")
 endif()
 
 if(VTK_OPENGL_HAS_OSMESA)
-  target_link_libraries(vtkRenderingOpenGL ${OSMESA_LIBRARY})
+  target_link_libraries(vtkRenderingOpenGL LINK_PUBLIC ${OSMESA_LIBRARY})
 endif()
diff --git a/Rendering/OpenGL/Testing/Cxx/CMakeLists.txt b/Rendering/OpenGL/Testing/Cxx/CMakeLists.txt
index 8aa951e..09680f5 100644
--- a/Rendering/OpenGL/Testing/Cxx/CMakeLists.txt
+++ b/Rendering/OpenGL/Testing/Cxx/CMakeLists.txt
@@ -1,4 +1,4 @@
-SET(RenderingTests
+vtk_add_test_cxx(NO_DATA NO_VALID
   otherCoordinate.cxx
   TestPriorityStreaming.cxx
   )
@@ -9,51 +9,63 @@ INCLUDE_DIRECTORIES(
 )
 
 # For tests that actually use a vtkRenderWindow
-SET(RenderingTestsWithArguments
+vtk_add_test_cxx(
   LoadOpenGLExtension.cxx
   TestActorLightingFlag.cxx
   TestAnimationScene.cxx
   TestBackfaceCulling.cxx
   TestBlurAndSobelPasses.cxx
   TestDynamic2DLabelMapper.cxx
-  TestFBO.cxx
+  TestFBO.cxx,NO_VALID
   TestFollowerPicking.cxx
   TestGaussianBlurPass.cxx
   TestGlyph3DMapper.cxx
   TestGlyph3DMapperMasking.cxx
   TestGlyph3DMapperOrientationArray.cxx
   TestGlyph3DMapperPicking.cxx
-  TestGPUInfo.cxx
+  TestGPUInfo.cxx,NO_VALID
   TestGradientBackground.cxx
   TestHomogeneousTransformOfActor.cxx
+  TestImageResliceMapperAlpha.cxx
+  TestImageResliceMapperBackground.cxx
+  TestImageResliceMapperInterpolation.cxx
+  TestImageResliceMapperOffAxis.cxx
+  TestImageResliceMapperOrient3D.cxx
+  TestImageResliceMapperSlab.cxx
+  TestImageSliceMapperAlpha.cxx
+  TestImageSliceMapperBackground.cxx
+  TestImageSliceMapperOrient2D.cxx
+  TestImageSliceMapperOrient3D.cxx
+  TestImageSliceMapperInterpolation.cxx
   TestImageStack.cxx
-  TestInteractorStyleImageProperty.cxx
-  TestInteractorTimers.cxx
+  TestInteractorStyleImageProperty.cxx,NO_VALID
+  TestInteractorTimers.cxx,NO_VALID
   TestLabelPlacer.cxx
   TestLabelPlacer2D.cxx
   TestLabelPlacerCoincidentPoints.cxx
   TestLabelPlacementMapper2D.cxx
   TestLabelPlacementMapperCoincidentPoints.cxx
   TestLightActor.cxx
-  TestManyActors.cxx
+  TestLODActor.cxx,NO_VALID
+  TestManyActors.cxx,NO_VALID
+  TestMonitors.cxx,NO_VALID
   TestOffAxisStereo.cxx
   TestOrderedTriangulator.cxx
   TestOpacity.cxx
   TestOpenGLPolyDataMapper.cxx
   TestOSConeCxx.cxx
   TestPolygonSelection.cxx
-  TestPOVExporter.cxx
+  TestPOVExporter.cxx,NO_VALID
   TestResetCameraVerticalAspectRatio.cxx
   TestResetCameraVerticalAspectRatioParallel.cxx
   TestSetImageOrientation.cxx
   TestSobelGradientMagnitudePass.cxx
-  TestShadowMapPass.cxx
   TestTextActorAlphaBlending.cxx
   TestTextActorDepthPeeling.cxx
   TestTextActor3DAlphaBlending.cxx
   TestTextActor3DDepthPeeling.cxx
   TestTexturedBackground.cxx
-  TestTextureSize.cxx
+  TestTextureSize.cxx,NO_VALID
   TestTDx.cxx
   TestTilingCxx.cxx
   TestTransformCoordinateUseDouble.cxx
@@ -63,132 +75,73 @@ SET(RenderingTestsWithArguments
   TestTranslucentLUTTextureAlphaBlending.cxx
   TestTranslucentLUTTextureDepthPeeling.cxx
   )
-
-if(APPLE)
-  list(REMOVE_ITEM RenderingTestsWithArguments TestShadowMapPass.cxx)
+if(NOT APPLE)
+  vtk_add_test_cxx(TestShadowMapPass.cxx)
 endif()
 
 if(WIN32 AND NOT VTK_USE_X)
-  set(RenderingTestsWithArguments
-    ${RenderingTestsWithArguments}
-    TestWin32OpenGLRenderWindow.cxx
-    )
+  vtk_add_test_cxx(TestWin32OpenGLRenderWindow.cxx NO_VALID)
 endif()
 
 include_directories(${OPENGL_INCLUDE_DIR})
 
-if(VTK_DATA_ROOT)
-  set(RenderingTestsWithArguments
-    ${RenderingTestsWithArguments}
-    TestAreaSelections.cxx
-    TestGlyph3DMapperArrow.cxx
-    TestLODActor.cxx
-    TestMultiblockDisplayProperties.cxx
-    TestMultiTexturing.cxx
-    TestMultiTexturingTransform.cxx
-    TestImageResliceMapperAlpha.cxx
-    TestImageResliceMapperBackground.cxx
-    TestImageResliceMapperBorder.cxx
-    TestImageResliceMapperInterpolation.cxx
-    TestImageResliceMapperOffAxis.cxx
-    TestImageResliceMapperOrient3D.cxx
-    TestImageResliceMapperSlab.cxx
-    TestImageSliceMapperAlpha.cxx
-    TestImageSliceMapperBackground.cxx
-    TestImageSliceMapperBorder.cxx
-    TestImageSliceMapperOrient2D.cxx
-    TestImageSliceMapperOrient3D.cxx
-    TestImageSliceMapperInterpolation.cxx
-    TestScalarBar.cxx
-    TestScenePicker.cxx
-    TestTextureRGBA.cxx
-    TestTextureRGBADepthPeeling.cxx
-    TestTranslucentImageActorAlphaBlending.cxx
-    TestTranslucentImageActorDepthPeeling.cxx
-    TestTStripsColorsTCoords.cxx
-    TestTStripsNormalsColorsTCoords.cxx
-    TestTStripsNormalsTCoords.cxx
-    TestTStripsTCoords.cxx
-    )
-endif()
-
-if(VTK_USE_DISPLAY AND VTK_DATA_ROOT)
-  set(RenderingTestsWithArguments
-    ${RenderingTestsWithArguments}
-    RenderNonFinite.cxx
-    SurfacePlusEdges.cxx
-    TestLabelPlacementMapper.cxx
-    )
-endif()
+vtk_add_test_cxx(
+  TestAreaSelections.cxx
+  TestGlyph3DMapperArrow.cxx
+  TestMultiblockDisplayProperties.cxx
+  TestMultiTexturing.cxx
+  TestMultiTexturingTransform.cxx
+  TestImageSliceMapperBorder.cxx
+  TestImageResliceMapperBorder.cxx
+  TestScalarBar.cxx
+  TestScenePicker.cxx,NO_VALID
+  TestTextureRGBA.cxx
+  TestTextureRGBADepthPeeling.cxx
+  TestTranslucentImageActorAlphaBlending.cxx
+  TestTranslucentImageActorDepthPeeling.cxx
+  TestTStripsColorsTCoords.cxx
+  TestTStripsNormalsColorsTCoords.cxx
+  TestTStripsNormalsTCoords.cxx
+  TestTStripsTCoords.cxx
+  RenderNonFinite.cxx
+  SurfacePlusEdges.cxx
+  TestLabelPlacementMapper.cxx
+  )
 
-set(VTK_USE_GLSL_SHADERS TRUE)
+set(VTK_USE_GLSL_SHADERS FALSE)
 if(VTK_USE_GLSL_SHADERS)
   # Tests testing GLSL Shaders.
-  set(RenderingTestsWithArguments
-    ${RenderingTestsWithArguments}
+  vtk_add_test_cxx(
     TestGenericVertexAttributesGLSLCxx.cxx
     TestGenericVertexAttributesGLSLAlphaBlending.cxx
     TestGenericVertexAttributesGLSLDepthPeelingPass.cxx
     )
 endif()
 
-create_test_sourcelist(Tests ${vtk-module}CxxTests.cxx
-  ${RenderingTests};${RenderingTestsWithArguments}
-  EXTRA_INCLUDE vtkTestDriver.h
-  )
+vtk_test_cxx_executable(${vtk-module}CxxTests)
 
 set(TEST_FBO_IMPLEMENTATION_EXE
   TestFBOImplementation)
 configure_file(${CMAKE_CURRENT_SOURCE_DIR}/TestFBOInclude.h.in
   ${CMAKE_CURRENT_BINARY_DIR}/TestFBOInclude.h
-  @ONLY IMMEDIATE ESCAPE_QUOTES)
+  @ONLY ESCAPE_QUOTES)
 
 # This is used by TestFBO. Using a separate processes makes is possible to
 # avoid issues to OpenGL implementations segfaulting etc. from affecting the
 # test.
 vtk_module_test_executable(TestFBOImplementation
   TestFBOImplementation.cxx)
-target_link_libraries(TestFBOImplementation
+target_link_libraries(TestFBOImplementation LINK_PRIVATE
   vtkRenderingOpenGL ${OPENGL_gl_LIBRARY})
 
 if(VTK_OPENGL_HAS_OSMESA AND OSMESA_LIBRARY)
-  target_link_libraries(TestFBOImplementation ${OSMESA_LIBRARY})
+  target_link_libraries(TestFBOImplementation LINK_PRIVATE ${OSMESA_LIBRARY})
 endif()
 
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
 if(VTK_OPENGL_HAS_OSMESA AND OSMESA_LIBRARY)
-  target_link_libraries(${vtk-module}CxxTests ${OSMESA_LIBRARY})
+  target_link_libraries(${vtk-module}CxxTests LINK_PRIVATE ${OSMESA_LIBRARY})
 endif()
 
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun ${vtk-module}CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${RenderingTests})
-  get_filename_component(TName ${test} NAME_WE)
-  add_test(NAME ${vtk-module}Cxx-${TName}
-    COMMAND ${vtk-module}CxxTests ${TName}
-      -T ${VTK_TEST_OUTPUT_DIR}
-    )
-endforeach()
-
-foreach(test ${RenderingTestsWithArguments})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/Rendering/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -T ${VTK_TEST_OUTPUT_DIR}
-      )
-  endif()
-endforeach()
-
 # Add other odd tests or executables
 foreach(exe
   TimeRenderer
@@ -202,6 +155,7 @@ foreach(exe
     vtkInteractionStyle
     vtkImagingSources
     vtkImagingGeneral
+    vtkFiltersSources
     vtkIOCore
     ${OPENGL_gl_LIBRARY})
   if(APPLE)
@@ -221,4 +175,4 @@ endif()
 set_tests_properties(${vtk-module}Cxx-TestOpacity
   PROPERTIES TIMEOUT 70)
 set_tests_properties(${vtk-module}Cxx-TestTextureSize
-  PROPERTIES TIMEOUT 90)
+  PROPERTIES TIMEOUT 300)
diff --git a/Rendering/OpenGL/Testing/Cxx/LoadOpenGLExtension.cxx b/Rendering/OpenGL/Testing/Cxx/LoadOpenGLExtension.cxx
index f044c00..bf200d8 100644
--- a/Rendering/OpenGL/Testing/Cxx/LoadOpenGLExtension.cxx
+++ b/Rendering/OpenGL/Testing/Cxx/LoadOpenGLExtension.cxx
@@ -40,6 +40,7 @@
 #include "vtkRegressionTestImage.h"
 #include "vtkOpenGLExtensionManager.h"
 #include "vtkgl.h"
+#include "vtkOpenGLError.h"
 #include "vtkTextActor.h"
 #include "vtkTextProperty.h"
 
@@ -61,6 +62,8 @@ static void ImageCallback(vtkObject *__renwin, unsigned long, void *, void *)
     }
   inImageCallback = 1;
 
+  vtkOpenGLClearErrorMacro();
+
   cout << "In ImageCallback" << endl;
 
   vtkRenderWindow *renwin = static_cast<vtkRenderWindow *>(__renwin);
@@ -84,6 +87,8 @@ static void ImageCallback(vtkObject *__renwin, unsigned long, void *, void *)
   renwin->SwapBuffersOff();
 
   inImageCallback = 0;
+
+  vtkOpenGLStaticCheckErrorMacro("failed after ImageCallback");
 }
 
 int LoadOpenGLExtension(int argc, char *argv[])
@@ -118,6 +123,25 @@ int LoadOpenGLExtension(int argc, char *argv[])
   cout << "GL_VERSION: " << (gl_version ? gl_version : "(null)") << endl;
   cout << "GL_RENDERER: " << (gl_renderer ? gl_renderer : "(null)") << endl;
 
+  extensions->Update();
+  cout
+    << endl
+    << "DriverGLVersion = " << extensions->GetDriverGLVersion() << endl
+    << "DriverGLVendor = " << extensions->GetDriverGLVendor() << endl
+    << "DriverGLRenderer = " << extensions->GetDriverGLRenderer() << endl
+    << "DriverGLVersionMajor = " << extensions->GetDriverGLVersionMajor() << endl
+    << "DriverGLVersionMinor = " << extensions->GetDriverGLVersionMinor() << endl
+    << "DriverGLVersionPatch = " << extensions->GetDriverGLVersionPatch() << endl
+    << "DriverVersionMajor = " << extensions->GetDriverVersionMajor() << endl
+    << "DriverVersionMinor = " << extensions->GetDriverVersionMinor() << endl
+    << "DriverVersionPatch = " << extensions->GetDriverVersionPatch() << endl
+    << "DriverIsATI = " << extensions->DriverIsATI() << endl
+    << "DriverIsNvidia = " << extensions->DriverIsNvidia() << endl
+    << "DriverIsIntel = " << extensions->DriverIsIntel() << endl
+    << "DriverIsMesa = " << extensions->DriverIsMesa() << endl
+    << "DriverGLRendererIsOSMesa = " << extensions->DriverGLRendererIsOSMesa() << endl
+    << "DriverIsMicrosoft = " << extensions->DriverIsMicrosoft() << endl;
+
   cout << endl;
   renwin->Print(cout);
 
@@ -242,6 +266,8 @@ int LoadOpenGLExtension(int argc, char *argv[])
                                vtkgl::CONVOLUTION_BORDER_MODE,
                                  vtkgl::REPLICATE_BORDER);
 
+    vtkOpenGLStaticCheckErrorMacro("failed after setting up convolution");
+
     image = vtkUnsignedCharArray::New();
     vtkCallbackCommand *cbc = vtkCallbackCommand::New();
     cbc->SetCallback(ImageCallback);
diff --git a/Rendering/OpenGL/Testing/Cxx/TestFBOImplementation.cxx b/Rendering/OpenGL/Testing/Cxx/TestFBOImplementation.cxx
index e9acd32..6777e87 100644
--- a/Rendering/OpenGL/Testing/Cxx/TestFBOImplementation.cxx
+++ b/Rendering/OpenGL/Testing/Cxx/TestFBOImplementation.cxx
@@ -31,13 +31,13 @@ PURPOSE.  See the above copyright notice for more information.
 #include "vtkRegressionTestImage.h"
 #include "vtkOpenGLExtensionManager.h"
 #include "vtkgl.h"
-#include <assert.h>
+#include <cassert>
 
-bool ARB_texture_rectangle_supported=false;
-bool depth_texture_supported=false; // OpenGL 1.4 or GL_ARB_depth_texture
-bool srgb_texture_supported=false; // OpenGL 2.1 or GL_EXT_texture_sRGB
-bool float_texture_supported=false; // GL_ARB_texture_float
-bool integer_texture_supported=false; // GL_EXT_texture_integer (GeForce 8)
+static bool ARB_texture_rectangle_supported=false;
+static bool depth_texture_supported=false; // OpenGL 1.4 or GL_ARB_depth_texture
+static bool srgb_texture_supported=false; // OpenGL 2.1 or GL_EXT_texture_sRGB
+static bool float_texture_supported=false; // GL_ARB_texture_float
+static bool integer_texture_supported=false; // GL_EXT_texture_integer (GeForce 8)
 
 // ----------------------------------------------------------------------------
 // Description:
@@ -1176,194 +1176,116 @@ void QueryTexture3D()
 
 }
 
-int textureSizes[2][2]={{64,32}, // spec says min of max is 64.
-                        {63,32}};
-
-int textureFormat[13]={GL_COLOR_INDEX,
-                       GL_STENCIL_INDEX,
-                       GL_DEPTH_COMPONENT,
-                       GL_RED,
-                       GL_GREEN,
-                       GL_BLUE,
-                       GL_ALPHA,
-                       GL_RGB,
-                       GL_RGBA,
-                       vtkgl::BGR,
-                       vtkgl::BGRA,
-                       GL_LUMINANCE,
-                       GL_LUMINANCE_ALPHA};
-
-int textureBaseInternalFormats[7]={GL_ALPHA,
-                                   GL_DEPTH_COMPONENT,
-                                   GL_LUMINANCE,
-                                   GL_LUMINANCE_ALPHA,
-                                   GL_INTENSITY,
-                                   GL_RGB,
-                                   GL_RGBA};
-
-int textureSizedInternalFormats[87]={GL_ALPHA4,
-                                     GL_ALPHA8,
-                                     GL_ALPHA12,
-                                     GL_ALPHA16,
-                                     vtkgl::DEPTH_COMPONENT16, //4
-                                     vtkgl::DEPTH_COMPONENT24, //5
-                                     vtkgl::DEPTH_COMPONENT32, //6
-                                     GL_LUMINANCE4,
-                                     GL_LUMINANCE8,
-                                     GL_LUMINANCE12,
-                                     GL_LUMINANCE16,
-                                     GL_LUMINANCE4_ALPHA4,
-                                     GL_LUMINANCE6_ALPHA2,
-                                     GL_LUMINANCE8_ALPHA8,
-                                     GL_LUMINANCE12_ALPHA4,
-                                     GL_LUMINANCE12_ALPHA12,
-                                     GL_LUMINANCE16_ALPHA16,
-                                     GL_INTENSITY4,
-                                     GL_INTENSITY8,
-                                     GL_INTENSITY12,
-                                     GL_INTENSITY16,
-                                     GL_R3_G3_B2,
-                                     GL_RGB4,
-                                     GL_RGB5,
-                                     GL_RGB8,
-                                     GL_RGB10,
-                                     GL_RGB12,
-                                     GL_RGB16,
-                                     GL_RGBA2,
-                                     GL_RGBA4,
-                                     GL_RGB5_A1,
-                                     GL_RGBA8,
-                                     GL_RGB10_A2,
-                                     GL_RGBA12,
-                                     GL_RGBA16,
-                                     vtkgl::SRGB8, //35
-                                     vtkgl::SRGB8_ALPHA8,
-                                     vtkgl::SLUMINANCE8,
-                                     vtkgl::SLUMINANCE8_ALPHA8, // idx=38,count=39
-                                     vtkgl::RGBA32F_ARB,
-                                     vtkgl::RGB32F_ARB,
-                                     vtkgl::ALPHA32F_ARB,
-                                     vtkgl::INTENSITY32F_ARB,
-                                     vtkgl::LUMINANCE32F_ARB,
-                                     vtkgl::LUMINANCE_ALPHA32F_ARB,
-                                     vtkgl::RGBA16F_ARB,
-                                     vtkgl::RGB16F_ARB,
-                                     vtkgl::ALPHA16F_ARB,
-                                     vtkgl::INTENSITY16F_ARB,
-                                     vtkgl::LUMINANCE16F_ARB,
-                                     vtkgl::LUMINANCE_ALPHA16F_ARB,// i=50,c=51
-                                     vtkgl::ALPHA8I_EXT,
-                                     vtkgl::ALPHA8UI_EXT,
-                                     vtkgl::ALPHA16I_EXT,
-                                     vtkgl::ALPHA16UI_EXT,
-                                     vtkgl::ALPHA32I_EXT,
-                                     vtkgl::ALPHA32UI_EXT,
-                                     vtkgl::LUMINANCE8I_EXT,
-                                     vtkgl::LUMINANCE8UI_EXT,
-                                     vtkgl::LUMINANCE16I_EXT,
-                                     vtkgl::LUMINANCE16UI_EXT,
-                                     vtkgl::LUMINANCE32I_EXT,
-                                     vtkgl::LUMINANCE32UI_EXT,
-                                     vtkgl::LUMINANCE_ALPHA8I_EXT,
-                                     vtkgl::LUMINANCE_ALPHA8UI_EXT,
-                                     vtkgl::LUMINANCE_ALPHA16I_EXT,
-                                     vtkgl::LUMINANCE_ALPHA16UI_EXT,
-                                     vtkgl::LUMINANCE_ALPHA32I_EXT,
-                                     vtkgl::LUMINANCE_ALPHA32UI_EXT,
-                                     vtkgl::INTENSITY8I_EXT,
-                                     vtkgl::INTENSITY8UI_EXT,
-                                     vtkgl::INTENSITY16I_EXT,
-                                     vtkgl::INTENSITY16UI_EXT,
-                                     vtkgl::INTENSITY32I_EXT,
-                                     vtkgl::INTENSITY32UI_EXT,
-                                     vtkgl::RGB8I_EXT,
-                                     vtkgl::RGB8UI_EXT,
-                                     vtkgl::RGB16I_EXT,
-                                     vtkgl::RGB16UI_EXT,
-                                     vtkgl::RGB32I_EXT,
-                                     vtkgl::RGB32UI_EXT,
-                                     vtkgl::RGBA8I_EXT,
-                                     vtkgl::RGBA8UI_EXT,
-                                     vtkgl::RGBA16I_EXT,
-                                     vtkgl::RGBA16UI_EXT,
-                                     vtkgl::RGBA32I_EXT,
-                                     vtkgl::RGBA32UI_EXT}; // i=86, c=87
+const int textureSizes[2][2]={{64,32}, // spec says min of max is 64.
+                              {63,32}};
+
+const int textureSizedInternalFormats[87]={GL_ALPHA4,
+                                           GL_ALPHA8,
+                                           GL_ALPHA12,
+                                           GL_ALPHA16,
+                                           vtkgl::DEPTH_COMPONENT16, //4
+                                           vtkgl::DEPTH_COMPONENT24, //5
+                                           vtkgl::DEPTH_COMPONENT32, //6
+                                           GL_LUMINANCE4,
+                                           GL_LUMINANCE8,
+                                           GL_LUMINANCE12,
+                                           GL_LUMINANCE16,
+                                           GL_LUMINANCE4_ALPHA4,
+                                           GL_LUMINANCE6_ALPHA2,
+                                           GL_LUMINANCE8_ALPHA8,
+                                           GL_LUMINANCE12_ALPHA4,
+                                           GL_LUMINANCE12_ALPHA12,
+                                           GL_LUMINANCE16_ALPHA16,
+                                           GL_INTENSITY4,
+                                           GL_INTENSITY8,
+                                           GL_INTENSITY12,
+                                           GL_INTENSITY16,
+                                           GL_R3_G3_B2,
+                                           GL_RGB4,
+                                           GL_RGB5,
+                                           GL_RGB8,
+                                           GL_RGB10,
+                                           GL_RGB12,
+                                           GL_RGB16,
+                                           GL_RGBA2,
+                                           GL_RGBA4,
+                                           GL_RGB5_A1,
+                                           GL_RGBA8,
+                                           GL_RGB10_A2,
+                                           GL_RGBA12,
+                                           GL_RGBA16,
+                                           vtkgl::SRGB8, //35
+                                           vtkgl::SRGB8_ALPHA8,
+                                           vtkgl::SLUMINANCE8,
+                                           vtkgl::SLUMINANCE8_ALPHA8, // idx=38,count=39
+                                           vtkgl::RGBA32F_ARB,
+                                           vtkgl::RGB32F_ARB,
+                                           vtkgl::ALPHA32F_ARB,
+                                           vtkgl::INTENSITY32F_ARB,
+                                           vtkgl::LUMINANCE32F_ARB,
+                                           vtkgl::LUMINANCE_ALPHA32F_ARB,
+                                           vtkgl::RGBA16F_ARB,
+                                           vtkgl::RGB16F_ARB,
+                                           vtkgl::ALPHA16F_ARB,
+                                           vtkgl::INTENSITY16F_ARB,
+                                           vtkgl::LUMINANCE16F_ARB,
+                                           vtkgl::LUMINANCE_ALPHA16F_ARB,// i=50,c=51
+                                           vtkgl::ALPHA8I_EXT,
+                                           vtkgl::ALPHA8UI_EXT,
+                                           vtkgl::ALPHA16I_EXT,
+                                           vtkgl::ALPHA16UI_EXT,
+                                           vtkgl::ALPHA32I_EXT,
+                                           vtkgl::ALPHA32UI_EXT,
+                                           vtkgl::LUMINANCE8I_EXT,
+                                           vtkgl::LUMINANCE8UI_EXT,
+                                           vtkgl::LUMINANCE16I_EXT,
+                                           vtkgl::LUMINANCE16UI_EXT,
+                                           vtkgl::LUMINANCE32I_EXT,
+                                           vtkgl::LUMINANCE32UI_EXT,
+                                           vtkgl::LUMINANCE_ALPHA8I_EXT,
+                                           vtkgl::LUMINANCE_ALPHA8UI_EXT,
+                                           vtkgl::LUMINANCE_ALPHA16I_EXT,
+                                           vtkgl::LUMINANCE_ALPHA16UI_EXT,
+                                           vtkgl::LUMINANCE_ALPHA32I_EXT,
+                                           vtkgl::LUMINANCE_ALPHA32UI_EXT,
+                                           vtkgl::INTENSITY8I_EXT,
+                                           vtkgl::INTENSITY8UI_EXT,
+                                           vtkgl::INTENSITY16I_EXT,
+                                           vtkgl::INTENSITY16UI_EXT,
+                                           vtkgl::INTENSITY32I_EXT,
+                                           vtkgl::INTENSITY32UI_EXT,
+                                           vtkgl::RGB8I_EXT,
+                                           vtkgl::RGB8UI_EXT,
+                                           vtkgl::RGB16I_EXT,
+                                           vtkgl::RGB16UI_EXT,
+                                           vtkgl::RGB32I_EXT,
+                                           vtkgl::RGB32UI_EXT,
+                                           vtkgl::RGBA8I_EXT,
+                                           vtkgl::RGBA8UI_EXT,
+                                           vtkgl::RGBA16I_EXT,
+                                           vtkgl::RGBA16UI_EXT,
+                                           vtkgl::RGBA32I_EXT,
+                                           vtkgl::RGBA32UI_EXT}; // i=86, c=87
 
 const int NumberOftextureSizedInternalFormats=87;
 
-int textureFormats[23]={GL_COLOR_INDEX,
-                        GL_STENCIL_INDEX,
-                        GL_DEPTH_COMPONENT,
-                        GL_RED,
-                        GL_GREEN,
-                        GL_BLUE,
-                        GL_ALPHA,
-                        GL_RGB,
-                        GL_RGBA,
-                        vtkgl::BGR,
-                        vtkgl::BGRA,
-                        GL_LUMINANCE,
-                        GL_LUMINANCE_ALPHA,
-                        vtkgl::RED_INTEGER_EXT,
-                        vtkgl::GREEN_INTEGER_EXT,
-                        vtkgl::BLUE_INTEGER_EXT,
-                        vtkgl::ALPHA_INTEGER_EXT,
-                        vtkgl::RGB_INTEGER_EXT,
-                        vtkgl::RGBA_INTEGER_EXT,
-                        vtkgl::BGR_INTEGER_EXT,
-                        vtkgl::BGRA_INTEGER_EXT,
-                        vtkgl::LUMINANCE_INTEGER_EXT,
-                        vtkgl::LUMINANCE_ALPHA_INTEGER_EXT};
+const GLenum textureTarget[2]={GL_TEXTURE_2D,
+                               vtkgl::TEXTURE_RECTANGLE_ARB};
 
-#if 0
-int textureFormat[7]={GL_ALPHA,
-                      GL_DEPTH_COMPONENT,
-                      GL_LUMINANCE,
-                      GL_LUMINANCE_ALPHA,
-                      GL_INTENSITY,
-                      GL_RGB,
-                      GL_RGBA //,
-};
-#endif
+const int textureProxyTarget[2]={GL_PROXY_TEXTURE_2D,
+                                 vtkgl::PROXY_TEXTURE_RECTANGLE_ARB};
 
-int textureType[]={GL_UNSIGNED_BYTE,
-                   GL_BITMAP,
-                   GL_BYTE,
-                   GL_UNSIGNED_SHORT,
-                   GL_SHORT,
-                   GL_UNSIGNED_INT,
-                   GL_INT,
-                   GL_FLOAT,
-                   vtkgl::UNSIGNED_BYTE_3_3_2,
-                   vtkgl::UNSIGNED_BYTE_2_3_3_REV,
-                   vtkgl::UNSIGNED_SHORT_5_6_5,
-                   vtkgl::UNSIGNED_SHORT_5_6_5_REV,
-                   vtkgl::UNSIGNED_SHORT_4_4_4_4,
-                   vtkgl::UNSIGNED_SHORT_4_4_4_4_REV,
-                   vtkgl::UNSIGNED_SHORT_5_5_5_1,
-                   vtkgl::UNSIGNED_SHORT_1_5_5_5_REV,
-                   vtkgl::UNSIGNED_INT_8_8_8_8,
-                   vtkgl::UNSIGNED_INT_8_8_8_8_REV,
-                   vtkgl::UNSIGNED_INT_10_10_10_2,
-                   vtkgl::UNSIGNED_INT_2_10_10_10_REV};
-
-GLenum textureTarget[2]={GL_TEXTURE_2D,
-                         vtkgl::TEXTURE_RECTANGLE_ARB};
-
-int textureProxyTarget[2]={GL_PROXY_TEXTURE_2D,
-                           vtkgl::PROXY_TEXTURE_RECTANGLE_ARB};
-
-int textureMinMag[2]={GL_NEAREST,GL_LINEAR};
+const int textureMinMag[2]={GL_NEAREST,GL_LINEAR};
 
 // OpenGL 1.2: vtkgl::CLAMP_TO_EDGE
 // OpenGL 1.3: vtkgl::CLAMP_TO_BORDER
 // OpenGL 1.4: vtkgl::MIRRORED_REPEAT
 
-int textureWrap[5]={GL_CLAMP,
-                    GL_REPEAT,
-                    vtkgl::CLAMP_TO_EDGE,
-                    vtkgl::CLAMP_TO_BORDER,
-                    vtkgl::MIRRORED_REPEAT};
+const int textureWrap[5]={GL_CLAMP,
+                          GL_REPEAT,
+                          vtkgl::CLAMP_TO_EDGE,
+                          vtkgl::CLAMP_TO_BORDER,
+                          vtkgl::MIRRORED_REPEAT};
 
 // GL_ARB_color_buffer_float
 // GL_ARB_half_float_pixel
@@ -2223,7 +2145,7 @@ void TestVisual(int multiSample,
   renwin->Delete();
 }
 
-int windowSize[2]={512,511};
+const int windowSize[2]={512,511};
 
 int main(int vtkNotUsed(argc), char *vtkNotUsed(argv)[])
 {
diff --git a/Rendering/OpenGL/Testing/Cxx/TestFollowerPicking.cxx b/Rendering/OpenGL/Testing/Cxx/TestFollowerPicking.cxx
index 8d7ad8e..8601857 100644
--- a/Rendering/OpenGL/Testing/Cxx/TestFollowerPicking.cxx
+++ b/Rendering/OpenGL/Testing/Cxx/TestFollowerPicking.cxx
@@ -64,7 +64,7 @@ public:
 };
 
 // -----------------------------------------------------------------------
-char PickFollowerLog[] =
+const char PickFollowerLog[] =
   "# StreamVersion 1\n"
   "RenderEvent 0 0 0 0 0 0 0\n"
   "EnterEvent 285 289 0 0 0 0 0\n"
diff --git a/Rendering/OpenGL/Testing/Cxx/TestGaussianBlurPass.cxx b/Rendering/OpenGL/Testing/Cxx/TestGaussianBlurPass.cxx
index 0e79541..52d6394 100644
--- a/Rendering/OpenGL/Testing/Cxx/TestGaussianBlurPass.cxx
+++ b/Rendering/OpenGL/Testing/Cxx/TestGaussianBlurPass.cxx
@@ -52,7 +52,7 @@
 
 // Make sure to have a valid OpenGL context current on the calling thread
 // before calling it. Defined in TestGenericVertexAttributesGLSLAlphaBlending.
-bool MesaHasVTKBug8135();
+bool MesaHasVTKBug8135(vtkRenderWindow *);
 
 int TestGaussianBlurPass(int argc, char* argv[])
 {
@@ -176,7 +176,7 @@ int TestGaussianBlurPass(int argc, char* argv[])
   renWin->Render();
 
   int retVal;
-  if(MesaHasVTKBug8135())
+  if(MesaHasVTKBug8135(renWin))
     {
     // Mesa will crash if version<7.3
     cout<<"This version of Mesa would crash. Skip the test."<<endl;
diff --git a/Rendering/OpenGL/Testing/Cxx/TestGenericVertexAttributesGLSLAlphaBlending.cxx b/Rendering/OpenGL/Testing/Cxx/TestGenericVertexAttributesGLSLAlphaBlending.cxx
index bb72bd4..5b0a159 100644
--- a/Rendering/OpenGL/Testing/Cxx/TestGenericVertexAttributesGLSLAlphaBlending.cxx
+++ b/Rendering/OpenGL/Testing/Cxx/TestGenericVertexAttributesGLSLAlphaBlending.cxx
@@ -34,7 +34,7 @@
 
 // Make sure to have a valid OpenGL context current on the calling thread
 // before calling it. Defined in TestTranslucentLUTDepthPeelingPass.cxx.
-bool MesaHasVTKBug8135();
+bool MesaHasVTKBug8135(vtkRenderWindow *);
 
 int TestGenericVertexAttributesGLSLAlphaBlending(int argc, char *argv[])
 {
@@ -94,7 +94,7 @@ int TestGenericVertexAttributesGLSLAlphaBlending(int argc, char *argv[])
   renWin->Render();
 
   int retVal;
-  if(MesaHasVTKBug8135())
+  if(MesaHasVTKBug8135(renWin))
     {
     // Mesa will crash if version<7.3
     cout<<"This version of Mesa would crash. Skip the test."<<endl;
diff --git a/Rendering/OpenGL/Testing/Cxx/TestGenericVertexAttributesGLSLDepthPeelingPass.cxx b/Rendering/OpenGL/Testing/Cxx/TestGenericVertexAttributesGLSLDepthPeelingPass.cxx
index 60ac52d..8c876a5 100644
--- a/Rendering/OpenGL/Testing/Cxx/TestGenericVertexAttributesGLSLDepthPeelingPass.cxx
+++ b/Rendering/OpenGL/Testing/Cxx/TestGenericVertexAttributesGLSLDepthPeelingPass.cxx
@@ -46,7 +46,7 @@
 
 // Make sure to have a valid OpenGL context current on the calling thread
 // before calling it. Defined in TestGenericVertexAttributesGLSLAlphaBlending.
-bool MesaHasVTKBug8135();
+bool MesaHasVTKBug8135(vtkRenderWindow *);
 
 int TestGenericVertexAttributesGLSLDepthPeelingPass(int argc, char *argv[])
 {
@@ -149,7 +149,7 @@ int TestGenericVertexAttributesGLSLDepthPeelingPass(int argc, char *argv[])
   renWin->Render();
 
   int retVal;
-  if(MesaHasVTKBug8135())
+  if(MesaHasVTKBug8135(renWin))
     {
     // Mesa will crash if version<7.3
     cout<<"This version of Mesa would crash. Skip the test."<<endl;
diff --git a/Rendering/OpenGL/Testing/Cxx/TestGlyph3DMapperPicking.cxx b/Rendering/OpenGL/Testing/Cxx/TestGlyph3DMapperPicking.cxx
index 9588b91..4e16d83 100644
--- a/Rendering/OpenGL/Testing/Cxx/TestGlyph3DMapperPicking.cxx
+++ b/Rendering/OpenGL/Testing/Cxx/TestGlyph3DMapperPicking.cxx
@@ -43,7 +43,7 @@
 #include "vtkGlyph3DMapper.h"
 #include "vtkSelection.h"
 #include "vtkSelectionNode.h"
-#include <assert.h>
+#include <cassert>
 
 vtkRenderer *renderer = NULL;
 
diff --git a/Rendering/OpenGL/Testing/Cxx/TestLightActor.cxx b/Rendering/OpenGL/Testing/Cxx/TestLightActor.cxx
index e000eae..d7ca9c6 100644
--- a/Rendering/OpenGL/Testing/Cxx/TestLightActor.cxx
+++ b/Rendering/OpenGL/Testing/Cxx/TestLightActor.cxx
@@ -54,7 +54,7 @@
 #include "vtkProperty.h"
 #include "vtkLight.h"
 #include "vtkLightCollection.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkMath.h"
 #include "vtkFrustumSource.h"
 #include "vtkPlanes.h"
diff --git a/Rendering/OpenGL/Testing/Cxx/TestMonitors.cxx b/Rendering/OpenGL/Testing/Cxx/TestMonitors.cxx
new file mode 100644
index 0000000..a68623b
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Cxx/TestMonitors.cxx
@@ -0,0 +1,261 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestMonitors.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkSmartPointer.h"
+#include "vtkSphereSource.h"
+#include "vtkPolyDataMapper.h"
+#include "vtkActor.h"
+#include "vtkRenderer.h"
+#include "vtkRenderWindow.h"
+#include "vtkProperty.h"
+#include "vtkCamera.h"
+#include "vtkLight.h"
+#include "vtkOpenGLLightMonitor.h"
+#include "vtkOpenGLModelViewProjectionMonitor.h"
+#include "vtkBackgroundColorMonitor.h"
+#include "vtkLightingHelper.h"
+
+int TestMonitors(int argc, char* argv[])
+{
+  (void)argc;
+  (void)argv;
+
+  cerr << "CTEST_FULL_OUTPUT (Avoid ctest truncation of output)" << endl << endl;
+
+  // initialize a scene with a bunch of spheres
+  vtkSmartPointer<vtkSphereSource> sphere =
+    vtkSmartPointer<vtkSphereSource>::New();
+  sphere->SetThetaResolution(100);
+  sphere->SetPhiResolution(50);
+
+  vtkSmartPointer<vtkPolyDataMapper> sphereMapper =
+    vtkSmartPointer<vtkPolyDataMapper>::New();
+  sphereMapper->SetInputConnection(sphere->GetOutputPort());
+
+  vtkSmartPointer<vtkActor> sphere1 =
+    vtkSmartPointer<vtkActor>::New();
+  sphere1->SetMapper(sphereMapper);
+  sphere1->GetProperty()->SetColor(1,0,0);
+  sphere1->GetProperty()->SetAmbient(0.3);
+  sphere1->GetProperty()->SetDiffuse(0.0);
+  sphere1->GetProperty()->SetSpecular(1.0);
+  sphere1->GetProperty()->SetSpecularPower(5.0);
+
+  vtkSmartPointer<vtkActor> sphere2 =
+    vtkSmartPointer<vtkActor>::New();
+  sphere2->SetMapper(sphereMapper);
+  sphere2->GetProperty()->SetColor(1,0,0);
+  sphere2->GetProperty()->SetAmbient(0.3);
+  sphere2->GetProperty()->SetDiffuse(0.0);
+  sphere2->GetProperty()->SetSpecular(1.0);
+  sphere2->GetProperty()->SetSpecularPower(10.0);
+  sphere2->AddPosition(1.25,0,0);
+
+  vtkSmartPointer<vtkActor> sphere3 =
+    vtkSmartPointer<vtkActor>::New();
+  sphere3->SetMapper(sphereMapper);
+  sphere3->GetProperty()->SetColor(1,0,0);
+  sphere3->GetProperty()->SetAmbient(0.3);
+  sphere3->GetProperty()->SetDiffuse(0.0);
+  sphere3->GetProperty()->SetSpecular(1.0);
+  sphere3->GetProperty()->SetSpecularPower(20.0);
+  sphere3->AddPosition(2.5,0,0);
+
+  vtkSmartPointer<vtkActor> sphere4 =
+    vtkSmartPointer<vtkActor>::New();
+  sphere4->SetMapper(sphereMapper);
+  sphere4->GetProperty()->SetColor(1,0,0);
+  sphere4->GetProperty()->SetAmbient(0.3);
+  sphere4->GetProperty()->SetDiffuse(0.0);
+  sphere4->GetProperty()->SetSpecular(1.0);
+  sphere4->GetProperty()->SetSpecularPower(40.0);
+  sphere4->AddPosition(3.75,0,0);
+
+  vtkSmartPointer<vtkActor> sphere5 =
+    vtkSmartPointer<vtkActor>::New();
+  sphere5->SetMapper(sphereMapper);
+  sphere5->GetProperty()->SetColor(1,0,0);
+  sphere5->GetProperty()->SetAmbient(0.3);
+  sphere5->GetProperty()->SetDiffuse(0.0);
+  sphere5->GetProperty()->SetSpecular(0.5);
+  sphere5->GetProperty()->SetSpecularPower(5.0);
+  sphere5->AddPosition(0.0,1.25,0);
+
+  vtkSmartPointer<vtkActor> sphere6 =
+    vtkSmartPointer<vtkActor>::New();
+  sphere6->SetMapper(sphereMapper);
+  sphere6->GetProperty()->SetColor(1,0,0);
+  sphere6->GetProperty()->SetAmbient(0.3);
+  sphere6->GetProperty()->SetDiffuse(0.0);
+  sphere6->GetProperty()->SetSpecular(0.5);
+  sphere6->GetProperty()->SetSpecularPower(10.0);
+  sphere6->AddPosition(1.25,1.25,0);
+
+  vtkSmartPointer<vtkActor> sphere7 =
+    vtkSmartPointer<vtkActor>::New();
+  sphere7->SetMapper(sphereMapper);
+  sphere7->GetProperty()->SetColor(1,0,0);
+  sphere7->GetProperty()->SetAmbient(0.3);
+  sphere7->GetProperty()->SetDiffuse(0.0);
+  sphere7->GetProperty()->SetSpecular(0.5);
+  sphere7->GetProperty()->SetSpecularPower(20.0);
+  sphere7->AddPosition(2.5,1.25,0);
+
+  vtkSmartPointer<vtkActor> sphere8 =
+    vtkSmartPointer<vtkActor>::New();
+  sphere8->SetMapper(sphereMapper);
+  sphere8->GetProperty()->SetColor(1,0,0);
+  sphere8->GetProperty()->SetAmbient(0.3);
+  sphere8->GetProperty()->SetDiffuse(0.0);
+  sphere8->GetProperty()->SetSpecular(0.5);
+  sphere8->GetProperty()->SetSpecularPower(40.0);
+  sphere8->AddPosition(3.75,1.25,0);
+
+  vtkSmartPointer<vtkRenderer> ren1 =
+    vtkSmartPointer<vtkRenderer>::New();
+  vtkSmartPointer<vtkRenderWindow> renWin =
+    vtkSmartPointer<vtkRenderWindow>::New();
+  renWin->AddRenderer(ren1);
+
+  ren1->AddActor(sphere1);
+  ren1->AddActor(sphere2);
+  ren1->AddActor(sphere3);
+  ren1->AddActor(sphere4);
+  ren1->AddActor(sphere5);
+  ren1->AddActor(sphere6);
+  ren1->AddActor(sphere7);
+  ren1->AddActor(sphere8);
+
+  vtkSmartPointer<vtkLight> light =
+    vtkSmartPointer<vtkLight>::New();
+  ren1->AddLight(light);
+
+  vtkSmartPointer<vtkLight> light2 =
+    vtkSmartPointer<vtkLight>::New();
+  ren1->AddLight(light2);
+
+  renWin->SetSize(400, 200);
+
+  // push all of VTK state to OpenGL.
+  renWin->Render();
+
+  // now create the monitors
+  // for lighting
+  // for gl modelview and projection matrices
+  // for the background color
+  // and initialize them from current state
+  cerr
+    << "Lights" << endl
+    << "================" << endl;
+  const int nLights = vtkLightingHelper::VTK_MAX_LIGHTS;
+  vtkSmartPointer<vtkOpenGLLightMonitor> lightMonitor[nLights];
+  for (int i=0; i<nLights; ++i)
+   {
+   lightMonitor[i] = vtkSmartPointer<vtkOpenGLLightMonitor>::New();
+   lightMonitor[i]->SetLightId(i);
+   lightMonitor[i]->Update();
+   lightMonitor[i]->Print(cerr);
+   }
+
+  cerr
+    << "Matrices" << endl
+    << "================" << endl;
+
+  vtkSmartPointer<vtkOpenGLModelViewProjectionMonitor> matrixMonitor
+    = vtkSmartPointer<vtkOpenGLModelViewProjectionMonitor>::New();
+
+  matrixMonitor->Update();
+  matrixMonitor->Print(cerr);
+
+  cerr
+    << "Background Color" << endl
+    << "================" << endl;
+
+  vtkSmartPointer<vtkBackgroundColorMonitor> backgroundColorMonitor
+    = vtkSmartPointer<vtkBackgroundColorMonitor>::New();
+
+  backgroundColorMonitor->Update(ren1);
+  backgroundColorMonitor->Print(cerr);
+
+  // update the scene so that lights, background color
+  // and modelview projection matrices are modified
+  ren1->SetBackground2(0.1, 0.2, 0.4);
+  ren1->SetGradientBackground(1);
+
+  light->SetFocalPoint(1.875,0.6125,0);
+  light->SetPosition(0.875,1.6125,1);
+
+  light2->SetFocalPoint(-100,-100,-100);
+  light2->SetPosition(100,100,100);
+
+  ren1->GetActiveCamera()->SetFocalPoint(0,0,0);
+  ren1->GetActiveCamera()->SetPosition(0,0,1);
+  ren1->GetActiveCamera()->SetViewUp(0,1,0);
+  ren1->GetActiveCamera()->ParallelProjectionOn();
+  ren1->ResetCamera();
+  ren1->GetActiveCamera()->SetParallelScale(1.5);
+
+  // push all of VTK state to OpenGL.
+  renWin->Render();
+
+  // verify that we can detect the changes
+  // in lighting
+  // in the modelview and projectionmatrices
+  // in the background color
+  // all should have been updated
+  cerr
+    << "Lights" << endl
+    << "================" << endl;
+  bool lightsChanged = false;
+  for (int i=0; i<nLights; ++i)
+   {
+   if (lightMonitor[i]->StateChanged())
+     {
+     cerr << "this light was changed..." << endl;
+     lightsChanged = true;
+     }
+   lightMonitor[i]->Print(cerr);
+   }
+
+  cerr
+    << "Matrices" << endl
+    << "================" << endl;
+
+  bool matricesChanged = matrixMonitor->StateChanged();
+  matrixMonitor->Print(cerr);
+
+  cerr
+    << "Background Color" << endl
+    << "================" << endl;
+
+  bool colorChanged = backgroundColorMonitor->StateChanged(ren1);
+  backgroundColorMonitor->Print(cerr);
+
+  cerr
+    << "Test results" << endl
+    << "================" << endl
+    << "detected lights changed..." << (lightsChanged?"yes":"no") << endl
+    << "detected matrices changed..." << (matricesChanged?"yes":"no") << endl
+    << "detected background color changed..." << (colorChanged?"yes":"no") << endl
+    << endl;
+
+  if (!(lightsChanged && matricesChanged && colorChanged))
+    {
+    cerr << "Test fails" << endl;
+    return 1;
+    }
+
+  cerr << "Test passes" << endl;
+  return 0;
+}
diff --git a/Rendering/OpenGL/Testing/Cxx/TestShadowMapPass.cxx b/Rendering/OpenGL/Testing/Cxx/TestShadowMapPass.cxx
index 67f7dee..58405f6 100644
--- a/Rendering/OpenGL/Testing/Cxx/TestShadowMapPass.cxx
+++ b/Rendering/OpenGL/Testing/Cxx/TestShadowMapPass.cxx
@@ -59,7 +59,7 @@
 #include "vtkProperty.h"
 #include "vtkLight.h"
 #include "vtkLightCollection.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkMath.h"
 #include "vtkFrustumSource.h"
 #include "vtkPlanes.h"
diff --git a/Rendering/OpenGL/Testing/Cxx/TestTranslucentLUTDepthPeelingPass.cxx b/Rendering/OpenGL/Testing/Cxx/TestTranslucentLUTDepthPeelingPass.cxx
index f149ac1..9d5b1cf 100644
--- a/Rendering/OpenGL/Testing/Cxx/TestTranslucentLUTDepthPeelingPass.cxx
+++ b/Rendering/OpenGL/Testing/Cxx/TestTranslucentLUTDepthPeelingPass.cxx
@@ -26,6 +26,8 @@
 #include "vtkRenderWindowInteractor.h"
 #include "vtkRenderWindow.h"
 #include "vtkOpenGLRenderer.h"
+#include "vtkOpenGLRenderWindow.h"
+#include "vtkOpenGLExtensionManager.h"
 #include "vtkActor.h"
 
 #include "vtkImageSinusoidSource.h"
@@ -50,29 +52,15 @@
 
 // Make sure to have a valid OpenGL context current on the calling thread
 // before calling it.
-bool MesaHasVTKBug8135()
+bool MesaHasVTKBug8135(vtkRenderWindow *renwin)
 {
-  // GL_VENDOR cannot be used because it can be "Brian Paul" or "Mesa project"
-  // GL_RENDERER cannot be used because it can be "Software Rasterizer" or
-  // "Mesa X11"
-  // GL_VERSION is more robust. It has things like "2.0 Mesa 7.0.4" or
-  // "2.1 Mesa 7.2" or "2.1 Mesa 7.3-devel"
-
-  bool result=false;
-  const char *gl_version=
-    reinterpret_cast<const char *>(glGetString(GL_VERSION));
-  const char *mesa_version=strstr(gl_version,"Mesa");
-
-  if(mesa_version!=0)
-    {
-    int mesa_major=0;
-    int mesa_minor=0;
-    if(sscanf(mesa_version,"Mesa %d.%d",&mesa_major, &mesa_minor)>=2)
-      {
-      result=mesa_major<7 || (mesa_major==7 && mesa_minor<3);
-      }
-    }
-  return result;
+  vtkOpenGLRenderWindow *context
+    = vtkOpenGLRenderWindow::SafeDownCast(renwin);
+
+  vtkOpenGLExtensionManager *extmgr
+    = context->GetExtensionManager();
+
+  return (extmgr->DriverIsMesa() && !extmgr->DriverVersionAtLeast(7,3));
 }
 
 int TestTranslucentLUTDepthPeelingPass(int argc, char* argv[])
@@ -170,7 +158,7 @@ int TestTranslucentLUTDepthPeelingPass(int argc, char* argv[])
   renWin->Render();
 
   int retVal;
-  if(MesaHasVTKBug8135())
+  if(MesaHasVTKBug8135(renWin))
     {
     // Mesa will crash if version<7.3
     cout<<"This version of Mesa would crash. Skip the test."<<endl;
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/LoadOpenGLExtension.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/LoadOpenGLExtension.png.md5
new file mode 100644
index 0000000..4654977
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/LoadOpenGLExtension.png.md5
@@ -0,0 +1 @@
+500c5b4b4015c0ca383e02acab089134
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/LoadOpenGLExtension_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/LoadOpenGLExtension_1.png.md5
new file mode 100644
index 0000000..5665cfb
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/LoadOpenGLExtension_1.png.md5
@@ -0,0 +1 @@
+5f1e2517430e25fad2a43a69292563cf
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/RenderNonFinite.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/RenderNonFinite.png.md5
new file mode 100644
index 0000000..42c9652
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/RenderNonFinite.png.md5
@@ -0,0 +1 @@
+9eba1123cd54a1dda1b1eeb46c0eb997
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/SurfacePlusEdges.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/SurfacePlusEdges.png.md5
new file mode 100644
index 0000000..670a104
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/SurfacePlusEdges.png.md5
@@ -0,0 +1 @@
+4f54ac111e4af4d443b2a2a877bce502
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestActorLightingFlag.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestActorLightingFlag.png.md5
new file mode 100644
index 0000000..e752cf2
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestActorLightingFlag.png.md5
@@ -0,0 +1 @@
+3eebb58de822edbf2568e73ed9d3e7a7
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestAnimationScene.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestAnimationScene.png.md5
new file mode 100644
index 0000000..4785e4e
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestAnimationScene.png.md5
@@ -0,0 +1 @@
+688936fa214527429a9c28228202a038
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestAreaSelections.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestAreaSelections.png.md5
new file mode 100644
index 0000000..48e9780
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestAreaSelections.png.md5
@@ -0,0 +1 @@
+5b8df0c9a12363520ebc6dc0968d3330
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestAreaSelections_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestAreaSelections_1.png.md5
new file mode 100644
index 0000000..245d904
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestAreaSelections_1.png.md5
@@ -0,0 +1 @@
+38c193f9baaab16cd688b568fab047c4
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestBackfaceCulling.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestBackfaceCulling.png.md5
new file mode 100644
index 0000000..88c6a31
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestBackfaceCulling.png.md5
@@ -0,0 +1 @@
+27129e67d23fc014bd6dd96e5e63b6cb
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestBlurAndSobelPasses.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestBlurAndSobelPasses.png.md5
new file mode 100644
index 0000000..2b28049
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestBlurAndSobelPasses.png.md5
@@ -0,0 +1 @@
+daa0e5ee312fd39d47781a2f5e3f006f
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestBlurAndSobelPasses_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestBlurAndSobelPasses_1.png.md5
new file mode 100644
index 0000000..e2010eb
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestBlurAndSobelPasses_1.png.md5
@@ -0,0 +1 @@
+0ff84568341e2a6836960742364714dd
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestDynamic2DLabelMapper.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestDynamic2DLabelMapper.png.md5
new file mode 100644
index 0000000..b21242b
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestDynamic2DLabelMapper.png.md5
@@ -0,0 +1 @@
+a70002d1dcab9f5203d8bb70b53d74dc
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestFollowerPicking.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestFollowerPicking.png.md5
new file mode 100644
index 0000000..88d2cab
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestFollowerPicking.png.md5
@@ -0,0 +1 @@
+2834eabe16478c9940c0a57ecf12bc17
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestFreetypeTextMapper.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestFreetypeTextMapper.png.md5
new file mode 100644
index 0000000..495114e
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestFreetypeTextMapper.png.md5
@@ -0,0 +1 @@
+874303bb41f009f382912854c5751da7
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestFreetypeTextMapperBigger.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestFreetypeTextMapperBigger.png.md5
new file mode 100644
index 0000000..b9cbb97
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestFreetypeTextMapperBigger.png.md5
@@ -0,0 +1 @@
+6f382afc015d037ad8f46d8381a48387
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestFreetypeTextOverlay.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestFreetypeTextOverlay.png.md5
new file mode 100644
index 0000000..0629d56
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestFreetypeTextOverlay.png.md5
@@ -0,0 +1 @@
+11d717e53ba64c7079f01e9df304ca14
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestGaussianBlurPass.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestGaussianBlurPass.png.md5
new file mode 100644
index 0000000..07dbc7a
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestGaussianBlurPass.png.md5
@@ -0,0 +1 @@
+6b9c21571db71c156776832332193f82
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestGaussianBlurPass_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestGaussianBlurPass_1.png.md5
new file mode 100644
index 0000000..41bea8f
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestGaussianBlurPass_1.png.md5
@@ -0,0 +1 @@
+9484e78f306f5b22c4edb28da53e475c
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestGaussianBlurPass_2.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestGaussianBlurPass_2.png.md5
new file mode 100644
index 0000000..8cb745a
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestGaussianBlurPass_2.png.md5
@@ -0,0 +1 @@
+35751778e1c10c391051ec851ac809e7
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLAlphaBlending.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLAlphaBlending.png.md5
new file mode 100644
index 0000000..af958a2
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLAlphaBlending.png.md5
@@ -0,0 +1 @@
+cccd0f5c14f32095e07480252f31b8ee
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLAlphaBlending_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLAlphaBlending_1.png.md5
new file mode 100644
index 0000000..fc5bdb0
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLAlphaBlending_1.png.md5
@@ -0,0 +1 @@
+4abffe2645536d4d629aebc088c7058c
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLCxx.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLCxx.png.md5
new file mode 100644
index 0000000..930ba8a
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLCxx.png.md5
@@ -0,0 +1 @@
+619f1a520e39198bb6430fdc4a104b80
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLCxx_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLCxx_1.png.md5
new file mode 100644
index 0000000..88661e6
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLCxx_1.png.md5
@@ -0,0 +1 @@
+e74a66d7267d1d289a909b75a127e81d
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLDepthPeelingPass.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLDepthPeelingPass.png.md5
new file mode 100644
index 0000000..e8762e9
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLDepthPeelingPass.png.md5
@@ -0,0 +1 @@
+656f979985a4d14aae16624e5d0f921f
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLDepthPeelingPass_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLDepthPeelingPass_1.png.md5
new file mode 100644
index 0000000..88661e6
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLDepthPeelingPass_1.png.md5
@@ -0,0 +1 @@
+e74a66d7267d1d289a909b75a127e81d
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLDepthPeelingPass_2.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLDepthPeelingPass_2.png.md5
new file mode 100644
index 0000000..fc5bdb0
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestGenericVertexAttributesGLSLDepthPeelingPass_2.png.md5
@@ -0,0 +1 @@
+4abffe2645536d4d629aebc088c7058c
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapper.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapper.png.md5
new file mode 100644
index 0000000..b6ff415
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapper.png.md5
@@ -0,0 +1 @@
+c50cc7017b9307315367d6f4341f036e
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapperArrow.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapperArrow.png.md5
new file mode 100644
index 0000000..1175b19
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapperArrow.png.md5
@@ -0,0 +1 @@
+fd9a0777ab20578407779f0a4112f819
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapperMasking.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapperMasking.png.md5
new file mode 100644
index 0000000..c59cf6a
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapperMasking.png.md5
@@ -0,0 +1 @@
+5ff4b2aad517b00ea986065999bf3644
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapperMasking_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapperMasking_1.png.md5
new file mode 100644
index 0000000..7706c9f
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapperMasking_1.png.md5
@@ -0,0 +1 @@
+a10e1d55c3a73fab07fb92b923e25fc1
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapperOrientationArray.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapperOrientationArray.png.md5
new file mode 100644
index 0000000..1ef0ea0
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapperOrientationArray.png.md5
@@ -0,0 +1 @@
+4549bf553048c24682e52878d4846fdc
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapperOrientationArray_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapperOrientationArray_1.png.md5
new file mode 100644
index 0000000..63e912d
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapperOrientationArray_1.png.md5
@@ -0,0 +1 @@
+a6711321ec31124281a35ff82bcca26e
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapperPicking.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapperPicking.png.md5
new file mode 100644
index 0000000..a8a8877
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestGlyph3DMapperPicking.png.md5
@@ -0,0 +1 @@
+109445d5f26623d202f5ff5c0a09c231
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestGradientBackground.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestGradientBackground.png.md5
new file mode 100644
index 0000000..0f293e9
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestGradientBackground.png.md5
@@ -0,0 +1 @@
+e3af9c15572535ec5cf1dd99bd41f755
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestHomogeneousTransformOfActor.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestHomogeneousTransformOfActor.png.md5
new file mode 100644
index 0000000..3cfeff7
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestHomogeneousTransformOfActor.png.md5
@@ -0,0 +1 @@
+e6ed81c870ac798c97c2b44939c08b87
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperAlpha.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperAlpha.png.md5
new file mode 100644
index 0000000..b52ca38
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperAlpha.png.md5
@@ -0,0 +1 @@
+ee790a0f40deb33e208c265e457eaf51
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperBackground.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperBackground.png.md5
new file mode 100644
index 0000000..d9b8349
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperBackground.png.md5
@@ -0,0 +1 @@
+d8b945becbf0daa1ae591a204e6fb4cd
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperBorder.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperBorder.png.md5
new file mode 100644
index 0000000..7b8ff2e
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperBorder.png.md5
@@ -0,0 +1 @@
+d993b816b83dd003a34de9e72d451a52
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperInterpolation.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperInterpolation.png.md5
new file mode 100644
index 0000000..a38ec23
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperInterpolation.png.md5
@@ -0,0 +1 @@
+212978e536851c0855d377adfbb2e977
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperOffAxis.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperOffAxis.png.md5
new file mode 100644
index 0000000..90d3825
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperOffAxis.png.md5
@@ -0,0 +1 @@
+38f074fe0b435955c841b1c64785d59a
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperOrient3D.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperOrient3D.png.md5
new file mode 100644
index 0000000..bc3a71f
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperOrient3D.png.md5
@@ -0,0 +1 @@
+8bad1696ca9f01a393df7cc176ed7ea9
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperSlab.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperSlab.png.md5
new file mode 100644
index 0000000..16e41f3
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestImageResliceMapperSlab.png.md5
@@ -0,0 +1 @@
+bf970ec0a3332c26c23597fa4b4fe6c9
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperAlpha.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperAlpha.png.md5
new file mode 100644
index 0000000..ce4aabe
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperAlpha.png.md5
@@ -0,0 +1 @@
+eb47298ae7a3777d635b34c7054b1305
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperBackground.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperBackground.png.md5
new file mode 100644
index 0000000..9784937
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperBackground.png.md5
@@ -0,0 +1 @@
+5b7cf00ecf5acf17279a095b930c38eb
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperBackground_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperBackground_1.png.md5
new file mode 100644
index 0000000..003ebdd
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperBackground_1.png.md5
@@ -0,0 +1 @@
+ea2159a9fc663a222c8a75dc691b82e3
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperBorder.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperBorder.png.md5
new file mode 100644
index 0000000..ba0ba8a
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperBorder.png.md5
@@ -0,0 +1 @@
+a903c954335ddb71767d239fae4bf2a3
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperBorder_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperBorder_1.png.md5
new file mode 100644
index 0000000..66e74a8
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperBorder_1.png.md5
@@ -0,0 +1 @@
+8b8bf03e123975fbd574d766f3d8eb75
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperInterpolation.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperInterpolation.png.md5
new file mode 100644
index 0000000..9bbe73c
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperInterpolation.png.md5
@@ -0,0 +1 @@
+23d1a8c2d958ec9f052aeef4eab70781
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperOrient2D.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperOrient2D.png.md5
new file mode 100644
index 0000000..02c2e3b
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperOrient2D.png.md5
@@ -0,0 +1 @@
+3678bafbf6601d2efb766bfada63896a
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperOrient3D.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperOrient3D.png.md5
new file mode 100644
index 0000000..d6622cc
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestImageSliceMapperOrient3D.png.md5
@@ -0,0 +1 @@
+b67a616006970c3e75b1b431a8622ae0
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestImageStack.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestImageStack.png.md5
new file mode 100644
index 0000000..36b62d3
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestImageStack.png.md5
@@ -0,0 +1 @@
+aa2e855074312f1436eff5fcb18246d2
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestImageStack_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestImageStack_1.png.md5
new file mode 100644
index 0000000..7a3fdaf
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestImageStack_1.png.md5
@@ -0,0 +1 @@
+76d6c7cf8c72eeb9c9c156371b893c0f
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacementMapper.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacementMapper.png.md5
new file mode 100644
index 0000000..c731055
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacementMapper.png.md5
@@ -0,0 +1 @@
+b15bd511937c9883c40cd294d562f711
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacementMapper2D.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacementMapper2D.png.md5
new file mode 100644
index 0000000..52a384b
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacementMapper2D.png.md5
@@ -0,0 +1 @@
+66bfd9aad028454673fed6cecc8b60d9
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacementMapper2D_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacementMapper2D_1.png.md5
new file mode 100644
index 0000000..5a310c6
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacementMapper2D_1.png.md5
@@ -0,0 +1 @@
+2dbb2230b3e4bf4ba0f287353385aef4
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacementMapper2D_2.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacementMapper2D_2.png.md5
new file mode 100644
index 0000000..7dde8b3
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacementMapper2D_2.png.md5
@@ -0,0 +1 @@
+b696541bf7ca5d3dfe59925ed5b7c5dd
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacementMapperCoincidentPoints.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacementMapperCoincidentPoints.png.md5
new file mode 100644
index 0000000..1bc2157
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacementMapperCoincidentPoints.png.md5
@@ -0,0 +1 @@
+c86e972d3a446440bc18ec40ccd8d90d
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacer.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacer.png.md5
new file mode 100644
index 0000000..c731055
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacer.png.md5
@@ -0,0 +1 @@
+b15bd511937c9883c40cd294d562f711
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacer2D.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacer2D.png.md5
new file mode 100644
index 0000000..0201db9
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacer2D.png.md5
@@ -0,0 +1 @@
+621078c459e6e868e0fa9eb2a4d14e33
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacer2D_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacer2D_1.png.md5
new file mode 100644
index 0000000..8c7292c
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacer2D_1.png.md5
@@ -0,0 +1 @@
+e939fcca815a468e7de6fae39b7deae0
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacer2D_2.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacer2D_2.png.md5
new file mode 100644
index 0000000..a127b34
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacer2D_2.png.md5
@@ -0,0 +1 @@
+5420097ab3f139bc99ba5d018c4d1ac1
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacer2D_3.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacer2D_3.png.md5
new file mode 100644
index 0000000..6254d54
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacer2D_3.png.md5
@@ -0,0 +1 @@
+adf5268606c22f8664e9bcbfdb267eca
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacerCoincidentPoints.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacerCoincidentPoints.png.md5
new file mode 100644
index 0000000..1bc2157
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestLabelPlacerCoincidentPoints.png.md5
@@ -0,0 +1 @@
+c86e972d3a446440bc18ec40ccd8d90d
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestLightActor.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestLightActor.png.md5
new file mode 100644
index 0000000..0cea81e
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestLightActor.png.md5
@@ -0,0 +1 @@
+05a22a7d9b84bd815d7ef4ed97a976db
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestMultiTexturing.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestMultiTexturing.png.md5
new file mode 100644
index 0000000..b3b2f41
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestMultiTexturing.png.md5
@@ -0,0 +1 @@
+02dccf93e9fafd3acd444a39acdb69e5
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestMultiTexturingTransform.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestMultiTexturingTransform.png.md5
new file mode 100644
index 0000000..13952da
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestMultiTexturingTransform.png.md5
@@ -0,0 +1 @@
+d7766297d387ab74fda394dced7f3d0f
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestMultiTexturingTransform_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestMultiTexturingTransform_1.png.md5
new file mode 100644
index 0000000..8517b7a
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestMultiTexturingTransform_1.png.md5
@@ -0,0 +1 @@
+1c11d0190e554749698b6618449e76f5
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestMultiTexturing_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestMultiTexturing_1.png.md5
new file mode 100644
index 0000000..8517b7a
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestMultiTexturing_1.png.md5
@@ -0,0 +1 @@
+1c11d0190e554749698b6618449e76f5
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestMultiblockDisplayProperties.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestMultiblockDisplayProperties.png.md5
new file mode 100644
index 0000000..2561081
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestMultiblockDisplayProperties.png.md5
@@ -0,0 +1 @@
+dfd4f8b96509d34ea5f114bc0d9f528e
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestMultiblockDisplayProperties_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestMultiblockDisplayProperties_1.png.md5
new file mode 100644
index 0000000..bfd3089
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestMultiblockDisplayProperties_1.png.md5
@@ -0,0 +1 @@
+7476efd0b5e7d624f145cd1de87b5154
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestOSConeCxx.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestOSConeCxx.png.md5
new file mode 100644
index 0000000..e50c113
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestOSConeCxx.png.md5
@@ -0,0 +1 @@
+e150e7c166498c3a6d6a056fa17a05d8
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestOffAxisStereo.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestOffAxisStereo.png.md5
new file mode 100644
index 0000000..1b2c939
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestOffAxisStereo.png.md5
@@ -0,0 +1 @@
+880fd95c8a3b633cff19d4540355cf4d
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestOpacity.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestOpacity.png.md5
new file mode 100644
index 0000000..0a1bd06
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestOpacity.png.md5
@@ -0,0 +1 @@
+8982890ee90ae656fde5136d7262e507
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestOpacity_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestOpacity_1.png.md5
new file mode 100644
index 0000000..8550a67
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestOpacity_1.png.md5
@@ -0,0 +1 @@
+ab367f3fd28ca824368b832731711749
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestOpenGLPolyDataMapper.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestOpenGLPolyDataMapper.png.md5
new file mode 100644
index 0000000..84991d2
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestOpenGLPolyDataMapper.png.md5
@@ -0,0 +1 @@
+c814ae4fc767fb151b64615ef4aca6f3
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestOrderedTriangulator.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestOrderedTriangulator.png.md5
new file mode 100644
index 0000000..ba54094
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestOrderedTriangulator.png.md5
@@ -0,0 +1 @@
+2b2fa89a1a1e5bc25a609f697d4efc30
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestPolygonSelection.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestPolygonSelection.png.md5
new file mode 100644
index 0000000..93cea90
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestPolygonSelection.png.md5
@@ -0,0 +1 @@
+81f237e4e44102cc12095a903ee86d92
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestResetCameraVerticalAspectRatio.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestResetCameraVerticalAspectRatio.png.md5
new file mode 100644
index 0000000..3093b50
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestResetCameraVerticalAspectRatio.png.md5
@@ -0,0 +1 @@
+1a2cd276dae98b56a56f5ea5c11c20c3
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestResetCameraVerticalAspectRatioParallel.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestResetCameraVerticalAspectRatioParallel.png.md5
new file mode 100644
index 0000000..f060d7f
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestResetCameraVerticalAspectRatioParallel.png.md5
@@ -0,0 +1 @@
+bb751674c90f3ba1bf67b548a60da99f
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestScalarBar.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestScalarBar.png.md5
new file mode 100644
index 0000000..a2f18d0
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestScalarBar.png.md5
@@ -0,0 +1 @@
+e553913d5caa95e2bbc6cd66846ad5b4
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestScalarBar_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestScalarBar_1.png.md5
new file mode 100644
index 0000000..2ae1d0a
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestScalarBar_1.png.md5
@@ -0,0 +1 @@
+44656c0c28888839c39da968df5ad357
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestScalarBar_2.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestScalarBar_2.png.md5
new file mode 100644
index 0000000..37fd060
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestScalarBar_2.png.md5
@@ -0,0 +1 @@
+5860f74f4b5ea200234047a6af9ce1d6
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestScalarBar_3.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestScalarBar_3.png.md5
new file mode 100644
index 0000000..a89fe93
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestScalarBar_3.png.md5
@@ -0,0 +1 @@
+89fbc99acfaca8809ff2eecc05aad5eb
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestSetImageOrientation.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestSetImageOrientation.png.md5
new file mode 100644
index 0000000..297ade9
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestSetImageOrientation.png.md5
@@ -0,0 +1 @@
+7cb733a338ca9827ace0b6f5f0269620
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestShadowMapPass.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestShadowMapPass.png.md5
new file mode 100644
index 0000000..9237c05
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestShadowMapPass.png.md5
@@ -0,0 +1 @@
+2b3210219a16536099997e747d0f88e0
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestShadowMapPass_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestShadowMapPass_1.png.md5
new file mode 100644
index 0000000..0cea81e
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestShadowMapPass_1.png.md5
@@ -0,0 +1 @@
+05a22a7d9b84bd815d7ef4ed97a976db
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestSobelGradientMagnitudePass.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestSobelGradientMagnitudePass.png.md5
new file mode 100644
index 0000000..810a8a5
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestSobelGradientMagnitudePass.png.md5
@@ -0,0 +1 @@
+d75e957d5b01b0153d391d2451f162d8
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestSobelGradientMagnitudePass_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestSobelGradientMagnitudePass_1.png.md5
new file mode 100644
index 0000000..e2010eb
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestSobelGradientMagnitudePass_1.png.md5
@@ -0,0 +1 @@
+0ff84568341e2a6836960742364714dd
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTDx.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTDx.png.md5
new file mode 100644
index 0000000..1092705
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTDx.png.md5
@@ -0,0 +1 @@
+a7867d6bea13a808aa0d70baf0588aa2
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTStripsColorsTCoords.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTStripsColorsTCoords.png.md5
new file mode 100644
index 0000000..314014b
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTStripsColorsTCoords.png.md5
@@ -0,0 +1 @@
+657a63075ee8bc836208d0773ba0f027
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTStripsNormalsColorsTCoords.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTStripsNormalsColorsTCoords.png.md5
new file mode 100644
index 0000000..314014b
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTStripsNormalsColorsTCoords.png.md5
@@ -0,0 +1 @@
+657a63075ee8bc836208d0773ba0f027
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTStripsNormalsTCoords.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTStripsNormalsTCoords.png.md5
new file mode 100644
index 0000000..d6e4d9a
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTStripsNormalsTCoords.png.md5
@@ -0,0 +1 @@
+e30af68f40e61eb9e034e7f783024756
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTStripsTCoords.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTStripsTCoords.png.md5
new file mode 100644
index 0000000..d6e4d9a
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTStripsTCoords.png.md5
@@ -0,0 +1 @@
+e30af68f40e61eb9e034e7f783024756
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTextActor3DAlphaBlending.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTextActor3DAlphaBlending.png.md5
new file mode 100644
index 0000000..a1a07b8
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTextActor3DAlphaBlending.png.md5
@@ -0,0 +1 @@
+81c1f7e7b43cfae5378cd97f2e2310a4
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTextActor3DDepthPeeling.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTextActor3DDepthPeeling.png.md5
new file mode 100644
index 0000000..a593173
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTextActor3DDepthPeeling.png.md5
@@ -0,0 +1 @@
+cabec47663c0ba7564cb7a6cfbd2bed9
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTextActorAlphaBlending.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTextActorAlphaBlending.png.md5
new file mode 100644
index 0000000..a5014b6
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTextActorAlphaBlending.png.md5
@@ -0,0 +1 @@
+a3c6ec6c8e5dea39bd217d4a1f8cbcad
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTextActorDepthPeeling.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTextActorDepthPeeling.png.md5
new file mode 100644
index 0000000..c4dd371
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTextActorDepthPeeling.png.md5
@@ -0,0 +1 @@
+91884691205c3eeaa97ef6243f4c9f66
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTextureRGBA.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTextureRGBA.png.md5
new file mode 100644
index 0000000..2eb5593
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTextureRGBA.png.md5
@@ -0,0 +1 @@
+428ddd8f41b2dd8b39389ebac31d2361
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTextureRGBADepthPeeling.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTextureRGBADepthPeeling.png.md5
new file mode 100644
index 0000000..e948ddd
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTextureRGBADepthPeeling.png.md5
@@ -0,0 +1 @@
+a9091a943f5448a49c6d2573e31cc8df
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTexturedBackground.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTexturedBackground.png.md5
new file mode 100644
index 0000000..e692119
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTexturedBackground.png.md5
@@ -0,0 +1 @@
+8dfd61d05f17d5732b5c7f3691a3bd3c
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTilingCxx.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTilingCxx.png.md5
new file mode 100644
index 0000000..59f49a5
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTilingCxx.png.md5
@@ -0,0 +1 @@
+5eb5bd7c308aeaf01473aa235fe1d335
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTransformCoordinateUseDouble.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTransformCoordinateUseDouble.png.md5
new file mode 100644
index 0000000..d936169
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTransformCoordinateUseDouble.png.md5
@@ -0,0 +1 @@
+3e299c8036ed2e0d6bc5da21e0e15028
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentImageActorAlphaBlending.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentImageActorAlphaBlending.png.md5
new file mode 100644
index 0000000..996a2ee
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentImageActorAlphaBlending.png.md5
@@ -0,0 +1 @@
+726090666db0b526d0a4bbcaea4849be
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentImageActorDepthPeeling.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentImageActorDepthPeeling.png.md5
new file mode 100644
index 0000000..61c6753
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentImageActorDepthPeeling.png.md5
@@ -0,0 +1 @@
+9dc29c0aa88069b86405c465ed702931
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTAlphaBlending.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTAlphaBlending.png.md5
new file mode 100644
index 0000000..ce6dd3d
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTAlphaBlending.png.md5
@@ -0,0 +1 @@
+8687dc14fab46f34e86c8da88aa1dc0e
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTDepthPeeling.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTDepthPeeling.png.md5
new file mode 100644
index 0000000..6a1988d
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTDepthPeeling.png.md5
@@ -0,0 +1 @@
+10c9250a465612d862567df55bf3bcd4
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTDepthPeelingPass.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTDepthPeelingPass.png.md5
new file mode 100644
index 0000000..5205aa9
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTDepthPeelingPass.png.md5
@@ -0,0 +1 @@
+1924298190ccbff1c29a6f35cec2a53c
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTDepthPeelingPass_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTDepthPeelingPass_1.png.md5
new file mode 100644
index 0000000..a5b543e
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTDepthPeelingPass_1.png.md5
@@ -0,0 +1 @@
+008fbed9a3fc0eb258af3a00ce511cce
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTDepthPeeling_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTDepthPeeling_1.png.md5
new file mode 100644
index 0000000..ce6dd3d
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTDepthPeeling_1.png.md5
@@ -0,0 +1 @@
+8687dc14fab46f34e86c8da88aa1dc0e
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTTextureAlphaBlending.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTTextureAlphaBlending.png.md5
new file mode 100644
index 0000000..4693b0a
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTTextureAlphaBlending.png.md5
@@ -0,0 +1 @@
+fddadbb60be2365ba326eadaf6b94855
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTTextureDepthPeeling.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTTextureDepthPeeling.png.md5
new file mode 100644
index 0000000..61cf71d
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTTextureDepthPeeling.png.md5
@@ -0,0 +1 @@
+96451a12f5f96a1e086b55327d2c1a49
diff --git a/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTTextureDepthPeeling_1.png.md5 b/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTTextureDepthPeeling_1.png.md5
new file mode 100644
index 0000000..4693b0a
--- /dev/null
+++ b/Rendering/OpenGL/Testing/Data/Baseline/TestTranslucentLUTTextureDepthPeeling_1.png.md5
@@ -0,0 +1 @@
+fddadbb60be2365ba326eadaf6b94855
diff --git a/Rendering/OpenGL/Testing/Python/CMakeLists.txt b/Rendering/OpenGL/Testing/Python/CMakeLists.txt
index ee8a9e5..edf0141 100644
--- a/Rendering/OpenGL/Testing/Python/CMakeLists.txt
+++ b/Rendering/OpenGL/Testing/Python/CMakeLists.txt
@@ -1,6 +1,3 @@
-add_test_python(TestFreetypeTextOverlay.py Rendering)
-add_test_python(TestFreetypeTextMapperBigger.py Rendering)
-
-if (VTK_DATA_ROOT)
-  add_test_python1(TestFreetypeTextMapper.py Baseline/Rendering)
-endif()
+vtk_add_test_python(TestFreetypeTextOverlay.py)
+vtk_add_test_python(TestFreetypeTextMapper.py NO_RT)
+vtk_add_test_python(TestFreetypeTextMapperBigger.py)
diff --git a/Rendering/OpenGL/Testing/Python/TestFreetypeTextMapper.py b/Rendering/OpenGL/Testing/Python/TestFreetypeTextMapper.py
index 2425600..b494d64 100755
--- a/Rendering/OpenGL/Testing/Python/TestFreetypeTextMapper.py
+++ b/Rendering/OpenGL/Testing/Python/TestFreetypeTextMapper.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestFreetypeTextMapper.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Rendering
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Rendering/OpenGL/Testing/Python/TestFreetypeTextMapperBigger.py b/Rendering/OpenGL/Testing/Python/TestFreetypeTextMapperBigger.py
index d240c96..34ae2db 100755
--- a/Rendering/OpenGL/Testing/Python/TestFreetypeTextMapperBigger.py
+++ b/Rendering/OpenGL/Testing/Python/TestFreetypeTextMapperBigger.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestFreetypeTextMapper.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/Rendering
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Rendering/OpenGL/Testing/Tcl/CMakeLists.txt b/Rendering/OpenGL/Testing/Tcl/CMakeLists.txt
index 9e86750..ac35b1b 100644
--- a/Rendering/OpenGL/Testing/Tcl/CMakeLists.txt
+++ b/Rendering/OpenGL/Testing/Tcl/CMakeLists.txt
@@ -1,3 +1,3 @@
-add_test_tcl(TestFreetypeTextMapperBigger Rendering)
-add_test_tcl(TestFreetypeTextMapper Rendering)
-add_test_tcl(TestFreetypeTextOverlay Rendering)
+vtk_add_test_tcl(TestFreetypeTextMapperBigger)
+vtk_add_test_tcl(TestFreetypeTextMapper)
+vtk_add_test_tcl(TestFreetypeTextOverlay)
diff --git a/Rendering/OpenGL/module.cmake b/Rendering/OpenGL/module.cmake
index 0717283..5b933fc 100644
--- a/Rendering/OpenGL/module.cmake
+++ b/Rendering/OpenGL/module.cmake
@@ -3,9 +3,10 @@ vtk_module(vtkRenderingOpenGL
     Rendering
   IMPLEMENTS
     vtkRenderingCore
-  DEPENDS
+  PRIVATE_DEPENDS
     # These are likely to be removed soon - split Rendering/OpenGL further.
     vtkImagingHybrid # For vtkSampleFunction
+    vtksys
   COMPILE_DEPENDS
     vtkParseOGLExt
     vtkUtilitiesEncodeString
@@ -22,5 +23,6 @@ vtk_module(vtkRenderingOpenGL
     vtkImagingGeneral
     vtkImagingSources
     vtkFiltersProgrammable
+    vtkFiltersSources
     vtkRenderingAnnotation
   )
diff --git a/Rendering/OpenGL/vtkCameraPass.cxx b/Rendering/OpenGL/vtkCameraPass.cxx
index e526173..fb5aabc 100644
--- a/Rendering/OpenGL/vtkCameraPass.cxx
+++ b/Rendering/OpenGL/vtkCameraPass.cxx
@@ -15,7 +15,7 @@
 
 #include "vtkCameraPass.h"
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkRenderState.h"
 #include "vtkOpenGLRenderer.h"
 #include "vtkgl.h"
@@ -24,6 +24,7 @@
 #include "vtkgluPickMatrix.h"
 #include "vtkCamera.h"
 #include "vtkFrameBufferObject.h"
+#include "vtkOpenGLError.h"
 
 vtkStandardNewMacro(vtkCameraPass);
 vtkCxxSetObjectMacro(vtkCameraPass,DelegatePass,vtkRenderPass);
@@ -79,6 +80,8 @@ void vtkCameraPass::Render(const vtkRenderState *s)
 {
   assert("pre: s_exists" && s!=0);
 
+  vtkOpenGLClearErrorMacro();
+
   this->NumberOfRenderedProps=0;
 
   vtkRenderer *ren=s->GetRenderer();
@@ -254,6 +257,7 @@ void vtkCameraPass::Render(const vtkRenderState *s)
   matrix->Delete();
 
   // Done with camera initialization. The delegate can be called.
+  vtkOpenGLCheckErrorMacro("failed after camera initialization");
 
   if(this->DelegatePass!=0)
     {
@@ -265,6 +269,7 @@ void vtkCameraPass::Render(const vtkRenderState *s)
     {
     vtkWarningMacro(<<" no delegate.");
     }
+  vtkOpenGLCheckErrorMacro("failed after delegate pass");
 
   // Restore changed context.
   glViewport(saved_viewport[0], saved_viewport[1], saved_viewport[2],
@@ -284,6 +289,8 @@ void vtkCameraPass::Render(const vtkRenderState *s)
   glMatrixMode(GL_PROJECTION);
   glLoadMatrixf(saved_projection_matrix);
   glMatrixMode(saved_matrix_mode);
+
+  vtkOpenGLCheckErrorMacro("failed after restore context");
 }
 
 // ----------------------------------------------------------------------------
diff --git a/Rendering/OpenGL/vtkCarbonRenderWindow.cxx b/Rendering/OpenGL/vtkCarbonRenderWindow.cxx
index e58a5ee..05cadda 100644
--- a/Rendering/OpenGL/vtkCarbonRenderWindow.cxx
+++ b/Rendering/OpenGL/vtkCarbonRenderWindow.cxx
@@ -232,7 +232,6 @@ vtkCarbonRenderWindow::vtkCarbonRenderWindow()
   this->Internal = new vtkCarbonRenderWindowInternal(this);
   this->ApplicationInitialized = 0;
   this->ContextId = 0;
-  this->MultiSamples = 0;
   this->WindowId = 0;
   this->ParentId = 0;
   this->RootWindow = 0;
diff --git a/Rendering/OpenGL/vtkClearZPass.cxx b/Rendering/OpenGL/vtkClearZPass.cxx
index bce1c24..b587655 100644
--- a/Rendering/OpenGL/vtkClearZPass.cxx
+++ b/Rendering/OpenGL/vtkClearZPass.cxx
@@ -15,7 +15,7 @@
 
 #include "vtkClearZPass.h"
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkRenderState.h"
 #include "vtkRenderer.h"
 #include "vtkgl.h"
diff --git a/Rendering/OpenGL/vtkCocoaRenderWindow.h b/Rendering/OpenGL/vtkCocoaRenderWindow.h
index fe52506..538b055 100644
--- a/Rendering/OpenGL/vtkCocoaRenderWindow.h
+++ b/Rendering/OpenGL/vtkCocoaRenderWindow.h
@@ -174,6 +174,13 @@ public:
   virtual bool IsCurrent();
 
   // Description:
+  // Test if the window has a valid drawable. This is
+  // currently only an issue on Mac OS X Cocoa where rendering
+  // to an invalid drawable results in all OpenGL calls to fail
+  // with "invalid framebuffer operation".
+  virtual bool IsDrawable();
+
+  // Description:
   // Update this window's OpenGL context, e.g. when the window is resized.
   void UpdateContext();
 
diff --git a/Rendering/OpenGL/vtkCocoaRenderWindow.mm b/Rendering/OpenGL/vtkCocoaRenderWindow.mm
index 5d3bf87..52b59c2 100644
--- a/Rendering/OpenGL/vtkCocoaRenderWindow.mm
+++ b/Rendering/OpenGL/vtkCocoaRenderWindow.mm
@@ -56,7 +56,6 @@ vtkCocoaRenderWindow::vtkCocoaRenderWindow()
 
   this->WindowCreated = 0;
   this->ViewCreated = 0;
-  this->MultiSamples = 8;
   this->SetWindowName("Visualization Toolkit - Cocoa");
   this->CursorHidden = 0;
   this->ForceMakeCurrent = 0;
@@ -232,6 +231,22 @@ bool vtkCocoaRenderWindow::IsCurrent()
   return result;
 }
 
+//----------------------------------------------------------------------------
+bool vtkCocoaRenderWindow::IsDrawable()
+{
+  // you must initialize it first
+  // else it always evaluates false
+  this->Initialize();
+
+  // first check that window is valid
+  NSView *theView = (NSView*)this->GetWindowId();
+  bool win =[[theView window] windowNumber]>0;
+
+  // then check that the drawable is valid
+  NSOpenGLContext *context = (NSOpenGLContext *)this->GetContextId();
+  bool ok  = [context view] != nil;
+  return win && ok;
+}
 
 //----------------------------------------------------------------------------
 void vtkCocoaRenderWindow::UpdateContext()
@@ -712,19 +727,58 @@ void vtkCocoaRenderWindow::CreateAWindow()
 //----------------------------------------------------------------------------
 void vtkCocoaRenderWindow::CreateGLContext()
 {
-  NSOpenGLPixelFormatAttribute attribs[] =
+  // keep trying to get different pixelFormats until successful
+  NSOpenGLPixelFormat* pixelFormat = nil;
+  while (pixelFormat == nil)
     {
-      NSOpenGLPFAAccelerated,
-      NSOpenGLPFADepthSize,
-      (NSOpenGLPixelFormatAttribute)32,
-      (this->DoubleBuffer != 0) ?
-        (NSOpenGLPixelFormatAttribute)NSOpenGLPFADoubleBuffer :
-        (NSOpenGLPixelFormatAttribute)nil,
-      (NSOpenGLPixelFormatAttribute)nil
-    };
-
-  NSOpenGLPixelFormat* pixelFormat = [[[NSOpenGLPixelFormat alloc]
-                                      initWithAttributes:attribs] autorelease];
+    int i = 0;
+    NSOpenGLPixelFormatAttribute attribs[20];
+
+    attribs[i++] = NSOpenGLPFAAccelerated;
+    attribs[i++] = NSOpenGLPFADepthSize;
+    attribs[i++] = (NSOpenGLPixelFormatAttribute)32;
+
+    if (this->MultiSamples != 0)
+      {
+      attribs[i++] = NSOpenGLPFASampleBuffers;
+      attribs[i++] = (NSOpenGLPixelFormatAttribute)1;
+      attribs[i++] = NSOpenGLPFASamples;
+      attribs[i++] = (NSOpenGLPixelFormatAttribute)(this->MultiSamples);
+      attribs[i++] = NSOpenGLPFAMultisample;
+      }
+
+    if (this->DoubleBuffer != 0)
+      {
+      attribs[i++] = NSOpenGLPFADoubleBuffer;
+      }
+
+    attribs[i++] = (NSOpenGLPixelFormatAttribute)0;
+
+    // make sure that size of array was not exceeded
+    assert(sizeof(NSOpenGLPixelFormatAttribute)*i < sizeof(attribs));
+
+    pixelFormat = [[[NSOpenGLPixelFormat alloc]
+                   initWithAttributes:attribs] autorelease];
+
+    if (pixelFormat == nil)
+      {
+      if (this->MultiSamples == 0)
+        {
+        // after trying with no multisamples, we are done
+        break;
+        }
+      else if (this->MultiSamples < 4)
+        {
+        // next time try with no multisamples
+        this->MultiSamples = 0;
+        }
+      else
+        {
+        this->MultiSamples /= 2;
+        }
+      }
+    }
+
   NSOpenGLContext* context = [[[NSOpenGLContext alloc]
                               initWithFormat:pixelFormat
                                 shareContext:nil] autorelease];
diff --git a/Rendering/OpenGL/vtkCocoaRenderWindowInteractor.mm b/Rendering/OpenGL/vtkCocoaRenderWindowInteractor.mm
index 64550ed..fb308dc 100644
--- a/Rendering/OpenGL/vtkCocoaRenderWindowInteractor.mm
+++ b/Rendering/OpenGL/vtkCocoaRenderWindowInteractor.mm
@@ -205,8 +205,9 @@ static vtkEarlyCocoaSetup * gEarlyCocoaSetup = new vtkEarlyCocoaSetup();
 //----------------------------------------------------------------------------
 - (void)start
 {
-  // Retrieve the NSWindow.
+  // Retrieve the NSWindow and the NSView.
   NSWindow *win = nil;
+  NSView *view = nil;
   if (_renWin != NULL)
     {
     win = reinterpret_cast<NSWindow *>(_renWin->GetRootWindow());
@@ -221,6 +222,17 @@ static vtkEarlyCocoaSetup * gEarlyCocoaSetup = new vtkEarlyCocoaSetup();
                                name:NSWindowWillCloseNotification
                              object:win];
       }
+
+    view = reinterpret_cast<NSView *>(_renWin->GetWindowId());
+
+    if (view != nil)
+      {
+      // Receive notifications of frame rectangle changes.
+      NSNotificationCenter *nc = [NSNotificationCenter defaultCenter];
+      [nc addObserver:self selector:@selector(viewFrameChanged:)
+                               name:NSViewFrameDidChangeNotification
+                             object:view];
+      }
     }
 
   // Start the NSApplication's run loop
@@ -231,11 +243,19 @@ static vtkEarlyCocoaSetup * gEarlyCocoaSetup = new vtkEarlyCocoaSetup();
 //----------------------------------------------------------------------------
 - (void)stop
 {
-  // Retrieve the NSWindow.
+  // Retrieve the NSWindow and the NSView.
   NSWindow *win = nil;
+  NSView *view = nil;
   if (_renWin != NULL)
     {
     win = reinterpret_cast<NSWindow *>(_renWin->GetRootWindow());
+    view = reinterpret_cast<NSView *>(_renWin->GetWindowId());
+    if (view != nil)
+      {
+      NSNotificationCenter *nc = [NSNotificationCenter defaultCenter];
+      [nc removeObserver:self name:NSViewFrameDidChangeNotification
+                            object:view];
+      }
     }
 
   // Close the window, removing it from the screen and releasing it
@@ -278,6 +298,40 @@ static vtkEarlyCocoaSetup * gEarlyCocoaSetup = new vtkEarlyCocoaSetup();
     }
 }
 
+//----------------------------------------------------------------------------
+- (void)viewFrameChanged:(NSNotification *)aNotification
+{
+  (void)aNotification;
+
+  // Retrieve the NSView and the Interactor.
+  NSView *view = nil;
+  vtkRenderWindowInteractor *interactor = NULL;
+  if (_renWin != NULL)
+    {
+    view = reinterpret_cast<NSView *>(_renWin->GetWindowId());
+    interactor = _renWin->GetInteractor();
+    }
+
+  if (view == nil || interactor == NULL || !interactor->GetEnabled())
+    {
+    return;
+    }
+
+  // Get the frame size, send ConfigureEvent from the Interactor.
+  NSRect frameRect = [view frame];
+  int width = (int)round(NSWidth(frameRect));
+  int height = (int)round(NSHeight(frameRect));
+
+  int size[2];
+  interactor->GetSize(size);
+
+  if (width != size[0] || height != size[1])
+    {
+    interactor->UpdateSize(width, height);
+    interactor->InvokeEvent(vtkCommand::ConfigureEvent, NULL);
+    }
+}
+
 @end
 
 //----------------------------------------------------------------------------
diff --git a/Rendering/OpenGL/vtkColorMaterialHelper.cxx b/Rendering/OpenGL/vtkColorMaterialHelper.cxx
index 32c041c..edbe1c2 100644
--- a/Rendering/OpenGL/vtkColorMaterialHelper.cxx
+++ b/Rendering/OpenGL/vtkColorMaterialHelper.cxx
@@ -23,8 +23,12 @@
 
 extern const char * vtkColorMaterialHelper_vs;
 
+//----------------------------------------------------------------------------
 vtkStandardNewMacro(vtkColorMaterialHelper);
+
+//----------------------------------------------------------------------------
 vtkCxxSetObjectMacro(vtkColorMaterialHelper, Shader, vtkShaderProgram2);
+
 //----------------------------------------------------------------------------
 vtkColorMaterialHelper::vtkColorMaterialHelper()
 {
@@ -54,14 +58,17 @@ void vtkColorMaterialHelper::Initialize(vtkShaderProgram2* pgm)
       }
     }
 }
+
 //----------------------------------------------------------------------------
 void vtkColorMaterialHelper::PrepareForRendering()
 {
+  #ifndef NDEBUG
   if (!this->Shader)
     {
     vtkErrorMacro("Please Initialize() before calling PrepareForRendering().");
     return ;
     }
+  #endif
 
   this->Mode = vtkColorMaterialHelper::DISABLED;
   if (glIsEnabled(GL_COLOR_MATERIAL))
@@ -96,17 +103,26 @@ void vtkColorMaterialHelper::PrepareForRendering()
 //----------------------------------------------------------------------------
 void vtkColorMaterialHelper::Render()
 {
+  #ifndef NDEBUG
   if (!this->Shader)
     {
     vtkErrorMacro("Please Initialize() before calling Render().");
     return;
     }
+  #endif
 
   int value=this->Mode;
   this->Shader->GetUniformVariables()->SetUniformi("vtkColorMaterialHelper_Mode",1,&value);
 }
 
 //----------------------------------------------------------------------------
+void vtkColorMaterialHelper::SetUniformVariables()
+{
+  this->PrepareForRendering(); // iniitialize this with gl state
+  this->Render();              // send as uniforms
+}
+
+//----------------------------------------------------------------------------
 void vtkColorMaterialHelper::PrintSelf(ostream& os, vtkIndent indent)
 {
   this->Superclass::PrintSelf(os, indent);
diff --git a/Rendering/OpenGL/vtkColorMaterialHelper.h b/Rendering/OpenGL/vtkColorMaterialHelper.h
index 4f47423..e9a911f 100644
--- a/Rendering/OpenGL/vtkColorMaterialHelper.h
+++ b/Rendering/OpenGL/vtkColorMaterialHelper.h
@@ -44,13 +44,20 @@ public:
   //ETX
 
   // Description:
-  // Prepares the shader i.e. reads color material parameters state from OpenGL.
-  // This must be called before the shader is bound.
+  // Capture current OpenGL state and initialize uniform variables
+  // used by the helper shader.
+  void SetUniformVariables();
+
+  // Description:
+  // Captures current OpenGL state.
+  // DEPRECATED (Use PrepareForRendering2 instead)
   void PrepareForRendering();
 
   // Description:
-  // Uploads any uniforms needed. This must be called only
-  // after the shader has been bound, but before rendering the geometry.
+  // Initializes uniform variables with the last captured
+  // OpenGL state.
+  // NOTHING IS RENDERED THIS SETS UNIFORMS.
+  // DEPRECATED: Use SetUnformVariables instead.
   void Render();
 
 //BTX
diff --git a/Rendering/OpenGL/vtkColorMaterialHelper_vs.glsl b/Rendering/OpenGL/vtkColorMaterialHelper_vs.glsl
index 91ea88d..3f2d369 100644
--- a/Rendering/OpenGL/vtkColorMaterialHelper_vs.glsl
+++ b/Rendering/OpenGL/vtkColorMaterialHelper_vs.glsl
@@ -12,7 +12,6 @@
 //     PURPOSE.  See the above copyright notice for more information.
 //
 //=========================================================================
-// Id: Id
 
 #version 110
 
@@ -26,11 +25,6 @@ uniform int vtkColorMaterialHelper_Mode;
 
 gl_MaterialParameters getMaterialParameters()
 {
-  if (vtkColorMaterialHelper_Mode == 0)
-    {
-    return gl_FrontMaterial;
-    }
-
   gl_MaterialParameters materialParams = gl_FrontMaterial;
   if (vtkColorMaterialHelper_Mode == VTK_GL_AMBIENT)
     {
diff --git a/Rendering/OpenGL/vtkCoreGraphicsGPUInfoList.cxx b/Rendering/OpenGL/vtkCoreGraphicsGPUInfoList.cxx
index a4aa3a0..85efa16 100644
--- a/Rendering/OpenGL/vtkCoreGraphicsGPUInfoList.cxx
+++ b/Rendering/OpenGL/vtkCoreGraphicsGPUInfoList.cxx
@@ -17,7 +17,7 @@
 #include "vtkGPUInfoListArray.h"
 #include "vtkObjectFactory.h"
 
-#include <assert.h>
+#include <cassert>
 #include <ApplicationServices/ApplicationServices.h>
 
 vtkStandardNewMacro(vtkCoreGraphicsGPUInfoList);
diff --git a/Rendering/OpenGL/vtkDataTransferHelper.cxx b/Rendering/OpenGL/vtkDataTransferHelper.cxx
index 3e0cd61..77a9664 100644
--- a/Rendering/OpenGL/vtkDataTransferHelper.cxx
+++ b/Rendering/OpenGL/vtkDataTransferHelper.cxx
@@ -22,7 +22,7 @@
 #include "vtkOpenGLRenderWindow.h"
 #include "vtkSmartPointer.h"
 #include "vtkStructuredData.h"
-#include <assert.h>
+#include <cassert>
 
 static void vtkGetDimensions(int extents[6], int dims[3])
 {
diff --git a/Rendering/OpenGL/vtkDefaultPass.cxx b/Rendering/OpenGL/vtkDefaultPass.cxx
index c560d8a..4dec722 100644
--- a/Rendering/OpenGL/vtkDefaultPass.cxx
+++ b/Rendering/OpenGL/vtkDefaultPass.cxx
@@ -15,7 +15,7 @@
 
 #include "vtkDefaultPass.h"
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkRenderState.h"
 #include "vtkProp.h"
 #include "vtkRenderer.h"
diff --git a/Rendering/OpenGL/vtkDepthPeelingPass.cxx b/Rendering/OpenGL/vtkDepthPeelingPass.cxx
index 8f88074..59d570d 100644
--- a/Rendering/OpenGL/vtkDepthPeelingPass.cxx
+++ b/Rendering/OpenGL/vtkDepthPeelingPass.cxx
@@ -15,7 +15,7 @@ PURPOSE.  See the above copyright notice for more information.
 
 #include "vtkDepthPeelingPass.h"
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkRenderState.h"
 #include "vtkProp.h"
 #include "vtkRenderer.h"
@@ -29,6 +29,7 @@ PURPOSE.  See the above copyright notice for more information.
 #include "vtkShader2Collection.h"
 #include "vtkUniformVariables.h"
 #include "vtkTextureUnitManager.h"
+#include "vtkOpenGLError.h"
 
 vtkStandardNewMacro(vtkDepthPeelingPass);
 vtkCxxSetObjectMacro(vtkDepthPeelingPass,TranslucentPass,vtkRenderPass);
@@ -158,8 +159,13 @@ void vtkDepthPeelingPass::Render(const vtkRenderState *s)
       return; // nothing to render.
     }
 
-  this->CheckSupport(static_cast<vtkOpenGLRenderWindow *>(
-                       s->GetRenderer()->GetRenderWindow()));
+  // check driver support
+  vtkOpenGLRenderWindow *context
+    = vtkOpenGLRenderWindow::SafeDownCast(s->GetRenderer()->GetRenderWindow());
+
+  this->CheckSupport(context);
+
+  vtkOpenGLExtensionManager *extensions = context->GetExtensionManager();
 
   if(!this->IsSupported)
     {
@@ -170,6 +176,8 @@ void vtkDepthPeelingPass::Render(const vtkRenderState *s)
       return;
     }
 
+  vtkOpenGLClearErrorMacro();
+
   // Depth peeling.
   vtkRenderer *r=s->GetRenderer();
 
@@ -200,6 +208,90 @@ void vtkDepthPeelingPass::Render(const vtkRenderState *s)
     {
       this->DepthFormat=vtkgl::DEPTH_COMPONENT24_ARB;
     }
+
+  // NOTE, this class expects texture unit already selected
+  // don't change it.
+  //vtkgl::ActiveTexture(vtkgl::TEXTURE1);
+
+  // check if the GPU supports a viewport sized texture in
+  // the formats we will use. If not then we should fallback
+  // to alpha blending.
+
+  // This check is really an anachronism since modern GPU
+  // typically support full screen sized textures in all the
+  // common formats.
+
+  GLint depthTexSupport = 1;
+  GLint colorTexSupport = 1;
+
+  if ( extensions->DriverIsATI()
+     && !extensions->GetIgnoreDriverBugs("ATI proxy query bug.") )
+    {
+    // The ATI Radeon HD drivers currently choke on the proxy
+    // query, but depth peeling has been confirmed to work. For
+    // those driver fall back on the weaker max texture size
+    // check.
+    GLint maxTexSize = 0;
+    glGetIntegerv(GL_MAX_TEXTURE_SIZE, &maxTexSize);
+    if ( (this->ViewportWidth > maxTexSize)
+      || (this->ViewportHeight > maxTexSize) )
+      {
+      depthTexSupport = 0;
+      colorTexSupport = 0;
+      }
+    }
+  else
+    {
+    // Not a buggy ATI driver, it's OK to make the proxy query.
+    GLuint proxyQueryTex = 0;
+    glGenTextures(1, &proxyQueryTex);
+    glBindTexture(vtkgl::TEXTURE_RECTANGLE_ARB, proxyQueryTex);
+
+    // support for depth buffer format
+    glTexImage2D(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB, 0, this->DepthFormat,
+                 this->ViewportWidth, this->ViewportHeight, 0,
+                 GL_DEPTH_COMPONENT, GL_UNSIGNED_INT,0);
+
+    glGetTexLevelParameteriv(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB, 0,
+                             GL_TEXTURE_WIDTH, &depthTexSupport);
+
+    // support for color buffer
+    glTexImage2D(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB, 0, GL_RGBA8,
+                 this->ViewportWidth, this->ViewportHeight, 0, GL_RGBA,
+                 GL_UNSIGNED_BYTE, 0);
+
+    glGetTexLevelParameteriv(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB, 0,
+                             GL_TEXTURE_WIDTH,&colorTexSupport);
+
+    glBindTexture(vtkgl::TEXTURE_RECTANGLE_ARB, 0);
+    glDeleteTextures(1, &proxyQueryTex);
+    }
+
+  if (!(depthTexSupport && colorTexSupport))
+    {
+    // GPU does not support a view sized texture in this format.
+    // Do alpha blending technique instead.
+    vtkWarningMacro(
+        << "The GPU supports "
+        << this->ViewportWidth << "x"
+        << this->ViewportHeight << " texture: depth buffer "
+        << (depthTexSupport?"yes":"no") << ", color buffer "
+        << (colorTexSupport?"yes":"no"));
+
+    this->LastRenderingUsedDepthPeeling = false;
+    vtkgl::ActiveTexture(vtkgl::TEXTURE0);
+    this->TranslucentPass->Render(s);
+    this->NumberOfRenderedProps
+      = this->TranslucentPass->GetNumberOfRenderedProps();
+    return;
+    }
+
+  // Have to be set before a call to UpdateTranslucentPolygonalGeometry()
+  // because UpdateTranslucentPolygonalGeometry() will eventually call
+  // vtkOpenGLActor::Render() that uses this flag.
+  this->LastRenderingUsedDepthPeeling = true;
+  this->SetLastRenderingUsedDepthPeeling(s->GetRenderer(), true);
+
     // 1. Grab the RGBAZ of the opaque layer.
     GLuint opaqueLayerZ=0;
     GLuint opaqueLayerRgba=0;
@@ -226,24 +318,6 @@ void vtkDepthPeelingPass::Render(const vtkRenderState *s)
                     GL_LESS);
 
     // Allocate memory
-    glTexImage2D(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB,0,this->DepthFormat,
-                 this->ViewportWidth,this->ViewportHeight,
-                 0,GL_DEPTH_COMPONENT, GL_UNSIGNED_INT, 0);
-    GLint width;
-    glGetTexLevelParameteriv(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB,0,
-                             GL_TEXTURE_WIDTH,&width);
-    if(width==0)
-      {
-      vtkDebugMacro(<<"not enough GPU RAM for opaque z");
-      // not enough GPU RAM. Do alpha blending technique instead
-      glDeleteTextures(1,&opaqueLayerRgba);
-      glDeleteTextures(1,&opaqueLayerZ);
-      this->LastRenderingUsedDepthPeeling=false;
-      vtkgl::ActiveTexture(vtkgl::TEXTURE0 );
-      this->TranslucentPass->Render(s);
-      this->NumberOfRenderedProps=this->TranslucentPass->GetNumberOfRenderedProps();
-      return;
-      }
     glTexImage2D(vtkgl::TEXTURE_RECTANGLE_ARB,0,this->DepthFormat,
                  this->ViewportWidth,this->ViewportHeight, 0,
                  GL_DEPTH_COMPONENT, GL_UNSIGNED_INT, 0);
@@ -258,33 +332,10 @@ void vtkDepthPeelingPass::Render(const vtkRenderState *s)
     glTexParameteri(vtkgl::TEXTURE_RECTANGLE_ARB,GL_TEXTURE_MAG_FILTER,
                     GL_NEAREST);
     // Allocate memory
-    glTexImage2D(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB, 0, GL_RGBA8,
-                 this->ViewportWidth,this->ViewportHeight,
-                 0,GL_RGBA, GL_UNSIGNED_BYTE, 0);
-    glGetTexLevelParameteriv(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB,0,
-                             GL_TEXTURE_WIDTH,&width);
-    if(width==0)
-      {
-      vtkDebugMacro(<<"not enough GPU RAM for opaque rgba");
-      // not enough GPU RAM. Do alpha blending technique instead
-      glDeleteTextures(1,&opaqueLayerRgba);
-      glDeleteTextures(1,&opaqueLayerZ);
-      this->LastRenderingUsedDepthPeeling=false;
-      vtkgl::ActiveTexture(vtkgl::TEXTURE0 );
-      this->TranslucentPass->Render(s);
-      this->NumberOfRenderedProps=this->TranslucentPass->GetNumberOfRenderedProps();
-      return;
-      }
-
-    // Have to be set before a call to UpdateTranslucentPolygonalGeometry()
-    // because UpdateTranslucentPolygonalGeometry() will eventually call
-    // vtkOpenGLActor::Render() that uses this flag.
-    this->LastRenderingUsedDepthPeeling=true;
-    this->SetLastRenderingUsedDepthPeeling(s->GetRenderer(),true);
-
     glTexImage2D(vtkgl::TEXTURE_RECTANGLE_ARB, 0, GL_RGBA8,
                  this->ViewportWidth,this->ViewportHeight, 0, GL_RGBA,
                  GL_UNSIGNED_BYTE, 0);
+
     // Grab the rgba-buffer
     glCopyTexSubImage2D(vtkgl::TEXTURE_RECTANGLE_ARB, 0, 0, 0, this->ViewportX,
                         this->ViewportY,this->ViewportWidth,
@@ -348,8 +399,7 @@ void vtkDepthPeelingPass::Render(const vtkRenderState *s)
 
     if(l>1) // some higher layer, we allocated some tex unit in RenderPeel()
       {
-      vtkTextureUnitManager *m=
-        this->Prog->GetContext()->GetTextureUnitManager();
+      vtkTextureUnitManager *m=context->GetTextureUnitManager();
       m->Free(this->ShadowTexUnit);
       m->Free(this->OpaqueShadowTexUnit);
       this->ShadowTexUnit=-1;
@@ -460,18 +510,22 @@ void vtkDepthPeelingPass::Render(const vtkRenderState *s)
     glDeleteTextures(1,&opaqueLayerRgba);
     glDeleteTextures(1,&opaqueLayerZ);
     this->NumberOfRenderedProps=this->TranslucentPass->GetNumberOfRenderedProps();
+
+    vtkOpenGLCheckErrorMacro("failed after Render");
 }
 
 // ----------------------------------------------------------------------------
 void vtkDepthPeelingPass::CheckSupport(vtkOpenGLRenderWindow *w)
 {
   assert("pre: w_exists" && w!=0);
+  vtkOpenGLClearErrorMacro();
+
+  vtkOpenGLExtensionManager *extensions=w->GetExtensionManager();
 
   if(!this->IsChecked || w->GetContextCreationTime()>this->CheckTime)
     {
       this->IsChecked=true;
       this->CheckTime.Modified();
-      vtkOpenGLExtensionManager *extensions=w->GetExtensionManager();
 
       bool supports_GL_1_3=extensions->ExtensionSupported("GL_VERSION_1_3")==1;
       bool supports_GL_1_4=extensions->ExtensionSupported("GL_VERSION_1_4")==1;
@@ -545,6 +599,20 @@ void vtkDepthPeelingPass::CheckSupport(vtkOpenGLRenderWindow *w)
       glGetIntegerv(GL_ALPHA_BITS, &alphaBits);
       bool supportsAtLeast8AlphaBits=alphaBits>=8;
 
+      // force alpha blending
+      // Mesa does not support true linking of shaders (VTK bug 8135)
+      // and Mesa 7.2 just crashes during the try-compile.
+      // os mesa 9.1.4 some tests fail
+      // ATI Radeon HD XXXXX on Windows chokes on PROXY_TEXTURE_RECTANGLE_ARB
+      // memory querries however if those are not used all the tests pass.
+      // ATI Radeon HD on Mac OSX PROXY_TEXTURE_RECTANGLE_ARB are fine but
+      // TestTranslucentLUTDepthPeeling fails. So leave it disabled on Apple
+      int driver_support
+        = (!(extensions->DriverIsATI()
+        && (extensions->GetDriverGLVersionMajor() < 3))
+        && !extensions->DriverIsMesa())
+        || extensions->GetIgnoreDriverBugs("ATI and Mesa depth peeling bugs");
+
       this->IsSupported =
         supports_depth_texture &&
         supports_shadow &&
@@ -557,7 +625,8 @@ void vtkDepthPeelingPass::CheckSupport(vtkOpenGLRenderWindow *w)
         supports_multitexture &&
         supports_GL_ARB_texture_rectangle &&
         supports_edge_clamp &&
-        supportsAtLeast8AlphaBits;
+        supportsAtLeast8AlphaBits &&
+        driver_support;
 
       if(this->IsSupported)
         {
@@ -601,6 +670,31 @@ void vtkDepthPeelingPass::CheckSupport(vtkOpenGLRenderWindow *w)
           }
 
         extensions->LoadExtension("GL_ARB_texture_rectangle");
+
+        // Some OpenGL implementations such as ATI
+        // claim to support both GLSL and GL_ARB_texture_rectangle but
+        // don't actually support sampler2DRectShadow in a GLSL code.
+        // Others (like Mesa) claim to support shaders but don't actually
+        // support true linking of shaders (and declaration of functions).
+        // To test that, we compile the shader, if it fails, we don't use
+        // deph peeling
+        GLuint shader =
+          vtkgl::CreateShader(vtkgl::FRAGMENT_SHADER);
+        vtkgl::ShaderSource(
+          shader, 1,
+          const_cast<const char **>(&vtkDepthPeeling_fs), 0);
+        vtkgl::CompileShader(shader);
+        GLint params;
+        vtkgl::GetShaderiv(shader,vtkgl::COMPILE_STATUS,
+                           &params);
+        this->IsSupported = params==GL_TRUE;
+        vtkgl::DeleteShader(shader);
+        if(!this->IsSupported)
+          {
+          vtkDebugMacro("this OpenGL implementation does not support "
+                        "GL_ARB_texture_rectangle in GLSL code or does"
+                        "not support true linking of shaders.");
+          }
         }
       else
         {
@@ -649,56 +743,13 @@ void vtkDepthPeelingPass::CheckSupport(vtkOpenGLRenderWindow *w)
           {
           vtkDebugMacro(<<"at least 8 alpha bits is not supported");
           }
-        }
-
-      if(this->IsSupported)
-        {
-        // Some OpenGL implementations are buggy so depth peeling does not
-        // work:
-        //  - ATI
-        //  - Mesa git does not support true linking of shaders (VTK bug 8135)
-        //    and Mesa 7.2 just crashes during the try-compile.
-        // Do alpha blending always.
-        const char* gl_renderer =
-          reinterpret_cast<const char *>(glGetString(GL_RENDERER));
-        int isATI = strstr(gl_renderer, "ATI") != 0;
-
-        bool isMesa=strstr(gl_renderer, "Mesa") != 0;
-
-        if(isMesa || isATI)
-          {
-          this->IsSupported = false;
-          }
-        }
-
-      if(this->IsSupported)
-        {
-        // Some OpenGL implementations such as ATI
-        // claim to support both GLSL and GL_ARB_texture_rectangle but
-        // don't actually support sampler2DRectShadow in a GLSL code.
-        // Others (like Mesa) claim to support shaders but don't actually
-        // support true linking of shaders (and declaration of functions).
-        // To test that, we compile the shader, if it fails, we don't use
-        // deph peeling
-        GLuint shader =
-          vtkgl::CreateShader(vtkgl::FRAGMENT_SHADER);
-        vtkgl::ShaderSource(
-          shader, 1,
-          const_cast<const char **>(&vtkDepthPeeling_fs), 0);
-        vtkgl::CompileShader(shader);
-        GLint params;
-        vtkgl::GetShaderiv(shader,vtkgl::COMPILE_STATUS,
-                           &params);
-        this->IsSupported = params==GL_TRUE;
-        vtkgl::DeleteShader(shader);
-        if(!this->IsSupported)
+        if (!driver_support)
           {
-          vtkDebugMacro("this OpenGL implementation does not support "
-                        "GL_ARB_texture_rectangle in GLSL code or does"
-                        "not support true linking of shaders.");
+          vtkDebugMacro(<<"buggy driver (Mesa or ATI)");
           }
         }
     }
+  vtkOpenGLClearErrorMacro();
 }
 
 // ----------------------------------------------------------------------------
@@ -731,6 +782,7 @@ void vtkDepthPeelingPass::CheckCompilation(
       vtkErrorMacro(<<"no log");
       }
     }
+  vtkOpenGLClearErrorMacro();
 }
 
 // ----------------------------------------------------------------------------
@@ -746,6 +798,8 @@ int vtkDepthPeelingPass::RenderPeel(const vtkRenderState *s,
   assert("pre: s_exists" && s!=0);
   assert("pre: positive_layer" && layer>=0);
 
+  vtkOpenGLClearErrorMacro();
+
   GLbitfield mask=GL_COLOR_BUFFER_BIT;
   if(layer>0)
     {
@@ -766,9 +820,11 @@ int vtkDepthPeelingPass::RenderPeel(const vtkRenderState *s,
     {
     if(layer==1)
       {
-    // allocate texture units.
-      vtkTextureUnitManager *m=
-        this->Prog->GetContext()->GetTextureUnitManager();
+      // allocate texture units.
+      vtkOpenGLRenderWindow *context
+        = vtkOpenGLRenderWindow::SafeDownCast(this->Prog->GetContext());
+
+      vtkTextureUnitManager *m = context->GetTextureUnitManager();
 
       // Avoid using texture unit 0 because the glBindTexture call's
       // below must specify unique active textures. If texture unit 0
@@ -828,7 +884,6 @@ int vtkDepthPeelingPass::RenderPeel(const vtkRenderState *s,
     oRenderer->SetShaderProgram(0);
     }
 
-  GLint width;
 //  vtkgl::ActiveTexture(vtkgl::TEXTURE0+this->ShadowTexUnit);
   if(layer==0)
     {
@@ -854,19 +909,8 @@ int vtkDepthPeelingPass::RenderPeel(const vtkRenderState *s,
                       vtkgl::TEXTURE_COMPARE_FUNC,
                       GL_GREATER);
 
-      // Allocate memory
-      glTexImage2D(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB,0,this->DepthFormat,
-                   this->ViewportWidth,this->ViewportHeight,
-                   0,GL_DEPTH_COMPONENT, GL_UNSIGNED_INT, 0);
-      glGetTexLevelParameteriv(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB,0,
-                               GL_TEXTURE_WIDTH,&width);
-      if(width==0)
-        {
-        // not enough GPU RAM. Use alpha blending technique instead
-        glDeleteTextures(1,&transparentLayerZ);
-        this->TransparentLayerZ=0;
-        return 0;
-        }
+      // Allocate memory. verified that GPU supports
+      // a texture of this size and format above.
       glTexImage2D(vtkgl::TEXTURE_RECTANGLE_ARB,0,this->DepthFormat,
                    this->ViewportWidth,this->ViewportHeight, 0,
                    GL_DEPTH_COMPONENT, GL_UNSIGNED_INT, 0);
@@ -893,19 +937,8 @@ int vtkDepthPeelingPass::RenderPeel(const vtkRenderState *s,
     glTexParameteri(vtkgl::TEXTURE_RECTANGLE_ARB,GL_TEXTURE_MAG_FILTER,
                     GL_NEAREST);
 
-    // Allocate memory
-    glTexImage2D(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB, 0, GL_RGBA8,
-                 this->ViewportWidth,this->ViewportHeight,
-                 0,GL_RGBA, GL_UNSIGNED_BYTE, 0);
-    glGetTexLevelParameteriv(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB,0,
-                             GL_TEXTURE_WIDTH,&width);
-    if(width==0)
-      {
-      // not enough GPU RAM. Do alpha blending technique instead
-      glDeleteTextures(1,&rgba);
-      return 0;
-      }
-
+    // Allocate memory. verified that GPU supports
+    // a texture of this size and format above.
     glTexImage2D(vtkgl::TEXTURE_RECTANGLE_ARB, 0, GL_RGBA8,
                  this->ViewportWidth,this->ViewportHeight, 0, GL_RGBA,
                  GL_UNSIGNED_BYTE, 0);
@@ -915,10 +948,12 @@ int vtkDepthPeelingPass::RenderPeel(const vtkRenderState *s,
                         this->ViewportY,this->ViewportWidth,
                         this->ViewportHeight);
     this->LayerList->List.push_back(rgba);
+    vtkOpenGLCheckErrorMacro("failed after RenderPeel");
     return 1;
     }
   else
     {
+    vtkOpenGLCheckErrorMacro("failed after RenderPeel");
     return 0;
     }
 }
diff --git a/Rendering/OpenGL/vtkDirectXGPUInfoList.cxx b/Rendering/OpenGL/vtkDirectXGPUInfoList.cxx
index 323c676..7946246 100644
--- a/Rendering/OpenGL/vtkDirectXGPUInfoList.cxx
+++ b/Rendering/OpenGL/vtkDirectXGPUInfoList.cxx
@@ -18,7 +18,7 @@
 #include "vtkGPUInfoListArray.h"
 
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 
 // DirectX, DXGI api
 #include <dxgi.h>
diff --git a/Rendering/OpenGL/vtkDummyGPUInfoList.cxx b/Rendering/OpenGL/vtkDummyGPUInfoList.cxx
index 71c1f20..4585b34 100644
--- a/Rendering/OpenGL/vtkDummyGPUInfoList.cxx
+++ b/Rendering/OpenGL/vtkDummyGPUInfoList.cxx
@@ -18,7 +18,7 @@
 #include "vtkGPUInfoListArray.h"
 
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkDummyGPUInfoList);
 
diff --git a/Rendering/OpenGL/vtkFrameBufferObject.cxx b/Rendering/OpenGL/vtkFrameBufferObject.cxx
index 96e8969..5e6e1e0 100644
--- a/Rendering/OpenGL/vtkFrameBufferObject.cxx
+++ b/Rendering/OpenGL/vtkFrameBufferObject.cxx
@@ -14,108 +14,134 @@
 =========================================================================*/
 #include "vtkFrameBufferObject.h"
 
-#include "vtkTextureObject.h"
 #include "vtkObjectFactory.h"
-#include "vtkOpenGLExtensionManager.h"
 #include "vtkOpenGLRenderWindow.h"
+#include "vtkTextureObject.h"
+#include "vtkRenderbuffer.h"
+#include "vtkPixelBufferObject.h"
+#include "vtkOpenGLExtensionManager.h"
+#include "vtkOpenGLError.h"
 
 #include "vtkgl.h"
-#include <assert.h>
+
+#include <cassert>
+#include <vector>
+using std::vector;
 
 // #define VTK_FBO_DEBUG // display info on RenderQuad()
 
+//----------------------------------------------------------------------------
 vtkStandardNewMacro(vtkFrameBufferObject);
+
 //----------------------------------------------------------------------------
 vtkFrameBufferObject::vtkFrameBufferObject()
 {
-  this->FBOIndex = 0;
+  this->ColorBuffersDirty = true;
   this->DepthBufferNeeded = true;
+  this->FBOIndex = 0;
+  this->PreviousFBOIndex = -1;
   this->DepthBuffer = 0;
-  this->NumberOfRenderTargets = 1;
   this->LastSize[0] = this->LastSize[1] = -1;
   this->SetActiveBuffer(0);
-  this->PreviousFBOIndex=-1; // -1 Bind hasn't been called yet.
+  this->NumberOfRenderTargets = 1;
 }
 
 //----------------------------------------------------------------------------
 vtkFrameBufferObject::~vtkFrameBufferObject()
 {
-  if(this->Context!=0)
-    {
-      this->DestroyFBO();
-    this->DestroyBuffers();
-    this->DestroyColorBuffers();
-    }
+  this->DestroyFBO();
+  this->DestroyDepthBuffer();
+  this->DestroyColorBuffers();
 }
 
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject::CreateFBO()
+{
+  this->FBOIndex=0;
+  GLuint temp;
+  vtkgl::GenFramebuffersEXT(1,&temp);
+  vtkOpenGLCheckErrorMacro("failed at glGenFramebuffers");
+  this->FBOIndex=temp;
+}
 
 //----------------------------------------------------------------------------
-  // Description:
-  // Returns if the context supports the required extensions.
-bool vtkFrameBufferObject::IsSupported(vtkRenderWindow *win)
+void vtkFrameBufferObject::DestroyFBO()
 {
-  // FBO passes make sense only with hardware acceleration, and Mesa has
-  // insufficient support for VTK's separate compilation units anyway.
-  if(const char* gl_version =
-     reinterpret_cast<const char*>(glGetString(GL_VERSION)))
+  // because we don't hold a reference to the render
+  // context we don't have any control on when it is
+  // destroyed. In fact it may be destroyed before
+  // we are(eg smart pointers), in which case we should
+  // do nothing.
+  if (this->Context && (this->FBOIndex!=0))
     {
-    if(strstr(gl_version, "Mesa"))
-      {
-      return false;
-      }
+    GLuint fbo=static_cast<GLuint>(this->FBOIndex);
+    vtkgl::DeleteFramebuffersEXT(1,&fbo);
+    vtkOpenGLCheckErrorMacro("failed at glDeleteFramebuffers");
+    this->FBOIndex=0;
     }
+}
 
+//----------------------------------------------------------------------------
+bool vtkFrameBufferObject::IsSupported(vtkRenderWindow *win)
+{
   vtkOpenGLRenderWindow *renWin=vtkOpenGLRenderWindow::SafeDownCast(win);
-  if(renWin!=0)
+  if (renWin)
     {
-      vtkOpenGLExtensionManager *mgr=renWin->GetExtensionManager();
+    vtkOpenGLExtensionManager *mgr=renWin->GetExtensionManager();
 
-      bool gl12=mgr->ExtensionSupported("GL_VERSION_1_2")==1;
-      bool gl14=mgr->ExtensionSupported("GL_VERSION_1_4")==1;
-      bool gl15=mgr->ExtensionSupported("GL_VERSION_1_5")==1;
-      bool gl20=mgr->ExtensionSupported("GL_VERSION_2_0")==1;
+    bool gl12 = mgr->ExtensionSupported("GL_VERSION_1_2")==1;
+    bool tex3D = gl12 || mgr->ExtensionSupported("GL_EXT_texture3D");
 
-      bool tex3D=gl12 || mgr->ExtensionSupported("GL_EXT_texture3D");
+    bool gl14 = mgr->ExtensionSupported("GL_VERSION_1_4")==1;
+    bool depthTex = gl14 || mgr->ExtensionSupported("GL_ARB_depth_texture")==1;
 
-      bool depthTexture24=gl14 ||
-        mgr->ExtensionSupported("GL_ARB_depth_texture");
+    bool gl20 = mgr->ExtensionSupported("GL_VERSION_2_0")==1;
+    bool drawBufs = gl20 || mgr->ExtensionSupported("GL_ARB_draw_buffers")==1;
 
-      bool occlusion=gl15 ||
-        mgr->ExtensionSupported("GL_ARB_occlusion_query");
+    bool fbo = mgr->ExtensionSupported("GL_EXT_framebuffer_object")==1;
+    bool fboBlit = mgr->ExtensionSupported("GL_EXT_framebuffer_blit")==1;
 
-      bool drawbuffers=gl20 || mgr->ExtensionSupported("GL_ARB_draw_buffers");
+    // On Mesa 8.0.4 reporting OpenGL 1.4 with renderer
+    // "Mesa DRI Intel(R) 945GME" shader fails to compile
+    // "gl_FragData[1] = ..." with the error
+    //  0:46(15): error: array index must be < 1
 
-      bool fbo=mgr->ExtensionSupported("GL_EXT_framebuffer_object")==1;
+    // Mesa 7 with renderer "Software Rasterizer
+    // has a bug GL_ARB_draw_buffers that leaves the FBO
+    // perpetually incomplete.
+    bool driver
+      = !(mgr->DriverIsMesa()
+        && (mgr->DriverGLVersionIs(1,4)
+        || (mgr->DriverVersionIs(7)
+        && (mgr->DriverGLRendererIs("Software Rasterizer")
+        || mgr->DriverGLRendererIs("Mesa X11")))));
 
-      return tex3D && depthTexture24 && occlusion && drawbuffers && fbo;
+    return tex3D && depthTex && drawBufs && fbo && fboBlit && driver;
     }
   return false;
 }
 
 //----------------------------------------------------------------------------
-bool vtkFrameBufferObject::LoadRequiredExtensions(
-                                              vtkOpenGLExtensionManager*mgr)
+bool vtkFrameBufferObject::LoadRequiredExtensions(vtkRenderWindow *win)
 {
-  // Load extensions using vtkOpenGLExtensionManager
-
-  bool gl12=mgr->ExtensionSupported("GL_VERSION_1_2")==1;
-  bool gl14=mgr->ExtensionSupported("GL_VERSION_1_4")==1;
-  bool gl15=mgr->ExtensionSupported("GL_VERSION_1_5")==1;
-  bool gl20=mgr->ExtensionSupported("GL_VERSION_2_0")==1;
+  vtkOpenGLRenderWindow *oglRenWin
+    = vtkOpenGLRenderWindow::SafeDownCast(win);
 
-  bool tex3D=gl12 || mgr->ExtensionSupported("GL_EXT_texture3D");
+  vtkOpenGLExtensionManager *mgr = oglRenWin->GetExtensionManager();
 
-  bool depthTexture24=gl14 ||
-    mgr->ExtensionSupported("GL_ARB_depth_texture");
+  bool gl12 = mgr->ExtensionSupported("GL_VERSION_1_2")==1;
+  bool tex3D = gl12 || mgr->ExtensionSupported("GL_EXT_texture3D");
 
-  bool occlusion=gl15 ||
-    mgr->ExtensionSupported("GL_ARB_occlusion_query");
+  bool gl14 = mgr->ExtensionSupported("GL_VERSION_1_4")==1;
+  bool depthTex = gl14 || mgr->ExtensionSupported("GL_ARB_depth_texture")==1;
 
-  bool drawbuffers=gl20 || mgr->ExtensionSupported("GL_ARB_draw_buffers");
+  bool gl20 = mgr->ExtensionSupported("GL_VERSION_2_0")==1;
+  bool drawBufs = gl20 || mgr->ExtensionSupported("GL_ARB_draw_buffers");
 
-  bool fbo=mgr->ExtensionSupported("GL_EXT_framebuffer_object")==1;
+  bool fbo = mgr->ExtensionSupported("GL_EXT_framebuffer_object")==1;
+  bool fboBlit = mgr->ExtensionSupported("GL_EXT_framebuffer_blit")==1;
 
-  bool supported=tex3D && depthTexture24 && occlusion && drawbuffers && fbo;
+  bool supported = tex3D && depthTex && drawBufs && fbo && fboBlit;
 
   if(supported)
     {
@@ -137,15 +163,6 @@ bool vtkFrameBufferObject::LoadRequiredExtensions(
       mgr->LoadCorePromotedExtension("GL_ARB_depth_texture");
       }
 
-    if(gl15)
-      {
-      mgr->LoadSupportedExtension("GL_VERSION_1_5");
-      }
-    else
-      {
-      mgr->LoadCorePromotedExtension("GL_ARB_occlusion_query");
-      }
-
     if(gl20)
       {
       mgr->LoadSupportedExtension("GL_VERSION_2_0");
@@ -154,8 +171,8 @@ bool vtkFrameBufferObject::LoadRequiredExtensions(
       {
       mgr->LoadCorePromotedExtension("GL_ARB_draw_buffers");
       }
-
     mgr->LoadSupportedExtension("GL_EXT_framebuffer_object");
+    mgr->LoadSupportedExtension("GL_EXT_framebuffer_blit");
     }
 
   return supported;
@@ -164,30 +181,35 @@ bool vtkFrameBufferObject::LoadRequiredExtensions(
 //----------------------------------------------------------------------------
 void vtkFrameBufferObject::SetContext(vtkRenderWindow *renWin)
 {
-  if(this->Context==renWin)
+  // avoid pointless re-assignment
+  if (this->Context==renWin)
     {
     return;
     }
-
-  if(this->Context!=0)
+  // free previous resources
+  this->DestroyDepthBuffer();
+  this->DestroyColorBuffers();
+  this->DestroyFBO();
+  this->Context = NULL;
+  this->Modified();
+  // all done if assigned null
+  if (!renWin)
     {
-    this->DestroyFBO();
-    this->DestroyBuffers();
-    this->DestroyColorBuffers();
+    return;
     }
-
-  vtkOpenGLRenderWindow *openGLRenWin=
-    vtkOpenGLRenderWindow::SafeDownCast(renWin);
-  this->Context=openGLRenWin;
-  if(openGLRenWin!=0)
+  // check for support
+  vtkOpenGLRenderWindow *context
+    = vtkOpenGLRenderWindow::SafeDownCast(renWin);
+  if ( !context
+    || !this->LoadRequiredExtensions(renWin))
     {
-    if (!this->LoadRequiredExtensions(openGLRenWin->GetExtensionManager()))
-      {
-      this->Context=0;
-      vtkErrorMacro("Required OpenGL extensions not supported by the context.");
-      }
+    vtkErrorMacro("Context does not support the required extensions");
+    return;
     }
-  this->Modified();
+  // intialize
+  this->Context=renWin;
+  this->Context->MakeCurrent();
+  this->CreateFBO();
 }
 
 //----------------------------------------------------------------------------
@@ -201,23 +223,15 @@ bool vtkFrameBufferObject::StartNonOrtho(int width,
                                          int height,
                                          bool shaderSupportsTextureInt)
 {
-  this->Context->MakeCurrent();
-  if(this->FBOIndex==0)
-    {
-      this->CreateFBO();
-    }
-
   this->Bind();
 
-  // this->CheckFrameBufferStatus();
-
   // If width/height changed since last render, we need to resize the
   // buffers.
   if (this->LastSize[0] != width || this->LastSize[1] != height ||
     (this->DepthBuffer && !this->DepthBufferNeeded) ||
     (this->DepthBufferNeeded && !this->DepthBuffer))
     {
-    this->DestroyBuffers();
+    this->DestroyDepthBuffer();
     this->DestroyColorBuffers();
     }
 
@@ -225,27 +239,31 @@ bool vtkFrameBufferObject::StartNonOrtho(int width,
     || this->ColorBuffersDirty
     || this->DepthBufferNeeded)
     {
-    this->CreateBuffers(width, height);
-    //    this->CheckFrameBufferStatus();
-    this->CreateColorBuffers(width, height,shaderSupportsTextureInt);
+    this->CreateDepthBuffer(
+          width,
+          height,
+          vtkgl::DRAW_FRAMEBUFFER_EXT);
+
+    this->CreateColorBuffers(
+          width,
+          height,
+          vtkgl::DRAW_FRAMEBUFFER_EXT,
+          shaderSupportsTextureInt);
     }
 
   this->LastSize[0] = width;
   this->LastSize[1] = height;
 
   this->ActivateBuffers();
-  // we cannot check the FBO status before calling this->ActivateBuffers()
-  // because the draw buffer status is part of
-  // the FBO status.
-  // Note this is because we are using FBO through the EXT extension.
-  // This is not true with the ARB extension (which we are not using here
-  // as it exists only on OpenGL 3.0 drivers)
 
   GLenum status = vtkgl::CheckFramebufferStatusEXT(vtkgl::FRAMEBUFFER_EXT);
   if (status != vtkgl::FRAMEBUFFER_COMPLETE_EXT)
     {
     vtkErrorMacro("Frame buffer object was not initialized correctly.");
-    this->CheckFrameBufferStatus();
+    this->CheckFrameBufferStatus(vtkgl::FRAMEBUFFER_EXT);
+    this->DisplayFrameBufferAttachments();
+    this->DisplayDrawBuffers();
+    this->DisplayReadBuffer();
     return false;
     }
 
@@ -278,10 +296,22 @@ bool vtkFrameBufferObject::Start(int width,
   glMatrixMode(GL_MODELVIEW);
   glLoadIdentity();
   glViewport(0, 0, width, height);
+
   return true;
 }
 
 //----------------------------------------------------------------------------
+void vtkFrameBufferObject::SetActiveBuffers(int num, unsigned int indices[])
+{
+  this->ActiveBuffers.clear();
+  for (int cc=0; cc < num; cc++)
+    {
+    this->ActiveBuffers.push_back(indices[cc]);
+    }
+  this->Modified();
+}
+
+//----------------------------------------------------------------------------
 void vtkFrameBufferObject::ActivateBuffers()
 {
   GLint maxbuffers;
@@ -297,6 +327,8 @@ void vtkFrameBufferObject::ActivateBuffers()
     }
 
   vtkgl::DrawBuffers(count, buffers);
+  vtkOpenGLCheckErrorMacro("failed at glDrawBuffers");
+
   delete[] buffers;
 }
 
@@ -324,86 +356,75 @@ void vtkFrameBufferObject::UnBind()
 }
 
 //----------------------------------------------------------------------------
-void vtkFrameBufferObject::SetActiveBuffers(int num,
-                                            unsigned int indices[])
+void vtkFrameBufferObject::CreateDepthBuffer(
+        int width,
+        int height,
+        unsigned int mode)
 {
-  this->ActiveBuffers.clear();
-  for (int cc=0; cc < num; cc++)
-    {
-    this->ActiveBuffers.push_back(indices[cc]);
-    }
-  this->Modified();
-}
-
-//----------------------------------------------------------------------------
-void vtkFrameBufferObject::CreateFBO()
-{
-  this->FBOIndex=0;
-  GLuint temp;
-  vtkgl::GenFramebuffersEXT(1,&temp);
-  this->FBOIndex=temp;
-}
-
-//----------------------------------------------------------------------------
-void vtkFrameBufferObject::DestroyFBO()
-{
-  if(this->FBOIndex!=0)
-    {
-    GLuint fbo=static_cast<GLuint>(this->FBOIndex);
-    vtkgl::DeleteFramebuffersEXT(1,&fbo);
-    this->FBOIndex=0;
-    }
-}
-
-//----------------------------------------------------------------------------
-void vtkFrameBufferObject::CreateBuffers(int width, int height)
-{
-  // Create render buffers which are independent of render targets.
-  this->DestroyBuffers();
+  this->DestroyDepthBuffer();
 
   if (this->UserDepthBuffer)
     {
     // Attach the depth buffer to the FBO.
-    vtkgl::FramebufferTexture2DEXT(vtkgl::FRAMEBUFFER_EXT,
-                                   vtkgl::DEPTH_ATTACHMENT_EXT,
-                                   GL_TEXTURE_2D,
-                                   this->UserDepthBuffer->GetHandle(), 0);
+    vtkgl::FramebufferTexture2DEXT(
+          (GLenum)mode,
+          vtkgl::DEPTH_ATTACHMENT_EXT,
+          GL_TEXTURE_2D,
+          this->UserDepthBuffer->GetHandle(),
+          0);
+
+    vtkOpenGLCheckErrorMacro("failed at glFramebufferTexture2D");
     }
   else
+  if (this->DepthBufferNeeded)
     {
-    if (!this->DepthBufferNeeded)
-      {
-      return;
-      }
-
+    // Create render buffers which are independent of render targets.
     GLuint temp;
     vtkgl::GenRenderbuffersEXT(1, &temp);
+    vtkOpenGLCheckErrorMacro("failed at glGenRenderbuffers");
+
     this->DepthBuffer = temp;
     vtkgl::BindRenderbufferEXT(vtkgl::RENDERBUFFER_EXT, this->DepthBuffer);
+    vtkOpenGLCheckErrorMacro("failed at glBindRenderbuffer");
 
     // Assign storage to this depth buffer.
-    vtkgl::RenderbufferStorageEXT(vtkgl::RENDERBUFFER_EXT,
-      vtkgl::DEPTH_COMPONENT24, width, height);
+    vtkgl::RenderbufferStorageEXT(
+          vtkgl::RENDERBUFFER_EXT,
+          vtkgl::DEPTH_COMPONENT24,
+          width,
+          height);
+
+    vtkOpenGLCheckErrorMacro("failed at glRenderbufferStorage");
+
     // Attach the depth buffer to the FBO.
-    vtkgl::FramebufferRenderbufferEXT(vtkgl::FRAMEBUFFER_EXT,
-      vtkgl::DEPTH_ATTACHMENT_EXT,
-      vtkgl::RENDERBUFFER_EXT, this->DepthBuffer);
+    vtkgl::FramebufferRenderbufferEXT(
+          (GLenum)mode,
+          vtkgl::DEPTH_ATTACHMENT_EXT,
+          vtkgl::RENDERBUFFER_EXT,
+          this->DepthBuffer);
+
+    vtkOpenGLCheckErrorMacro("failed at glFramebufferRenderbuffer");
     }
 }
 
 //----------------------------------------------------------------------------
-void vtkFrameBufferObject::DestroyBuffers()
+void vtkFrameBufferObject::DestroyDepthBuffer()
 {
-  if(this->DepthBuffer!=0)
+  // because we don't hold a reference to the render
+  // context we don't have any control on when it is
+  // destroyed. In fact it may be destroyed before
+  // we are(eg smart pointers), in which case we should
+  // do nothing.
+  if(this->Context && this->DepthBuffer)
     {
     GLuint temp = static_cast<GLuint>(this->DepthBuffer);
     vtkgl::DeleteRenderbuffersEXT(1, &temp);
+    vtkOpenGLCheckErrorMacro("failed at glDeleteRenderbuffers");
     this->DepthBuffer = 0;
     }
 }
 
 //----------------------------------------------------------------------------
-// Destroy color buffers
 void vtkFrameBufferObject::DestroyColorBuffers()
 {
   this->ColorBuffers.clear();
@@ -412,34 +433,26 @@ void vtkFrameBufferObject::DestroyColorBuffers()
 
 //----------------------------------------------------------------------------
 void vtkFrameBufferObject::CreateColorBuffers(
-  int iwidth,
-  int iheight,
-  bool shaderSupportsTextureInt)
+    int iwidth,
+    int iheight,
+    unsigned int mode,
+    bool shaderSupportsTextureInt)
 {
   unsigned int width = static_cast<unsigned int>(iwidth);
   unsigned int height = static_cast <unsigned int>(iheight);
 
+  unsigned int nUserColorBuffers
+    = static_cast<unsigned int>(this->UserColorBuffers.size());
+
   this->ColorBuffers.resize(this->NumberOfRenderTargets);
   unsigned int cc;
-  for (cc=0;
-       cc < this->NumberOfRenderTargets && cc < this->UserColorBuffers.size();
-       cc++)
+  for (cc=0; cc<this->NumberOfRenderTargets && cc<nUserColorBuffers; cc++)
     {
     vtkTextureObject *userBuffer=this->UserColorBuffers[cc];
     if (userBuffer)
       {
-      if (userBuffer->GetNumberOfDimensions() != 2)
-        {
-        vtkWarningMacro("Skipping color buffer at index " << cc
-          << " due to dimension mismatch.");
-        continue;
-        }
-      if (userBuffer->GetWidth() != width || userBuffer->GetHeight() != height)
-        {
-        vtkWarningMacro("Skipping color buffer at index " << cc
-          << " due to size mismatch.");
-        continue;
-        }
+      assert(userBuffer->GetWidth()==width);
+      assert(userBuffer->GetHeight()==height);
       this->ColorBuffers[cc] = this->UserColorBuffers[cc];
       }
     }
@@ -449,89 +462,103 @@ void vtkFrameBufferObject::CreateColorBuffers(
     vtkSmartPointer<vtkTextureObject> colorBuffer = this->ColorBuffers[cc];
     if (!colorBuffer)
       {
+      // create a new color buffer for the user.
       colorBuffer = vtkSmartPointer<vtkTextureObject>::New();
       colorBuffer->SetContext(this->Context);
       colorBuffer->SetMinificationFilter(vtkTextureObject::Nearest);
       colorBuffer->SetLinearMagnification(false);
       colorBuffer->SetWrapS(vtkTextureObject::Clamp);
       colorBuffer->SetWrapT(vtkTextureObject::Clamp);
-      if (!colorBuffer->Create2D(width, height, 4, VTK_UNSIGNED_CHAR,
-            shaderSupportsTextureInt))
+      if (!colorBuffer->Create2D(
+                width,
+                height,
+                4,
+                VTK_UNSIGNED_CHAR,
+                shaderSupportsTextureInt))
         {
         vtkErrorMacro("Failed to create texture for color buffer.");
         return;
         }
       }
-    //    colorBuffer->Bind(); // useless and actually error-prone.
+
+    // attach the buffer
     if (colorBuffer->GetNumberOfDimensions() == 2)
       {
-      vtkgl::FramebufferTexture2DEXT(vtkgl::FRAMEBUFFER_EXT,
-        vtkgl::COLOR_ATTACHMENT0_EXT+cc,
-        GL_TEXTURE_2D, colorBuffer->GetHandle(), 0);
-      vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
+      vtkgl::FramebufferTexture2DEXT(
+            (GLenum)mode,
+            vtkgl::COLOR_ATTACHMENT0_EXT+cc,
+            GL_TEXTURE_2D,
+            colorBuffer->GetHandle(),
+            0);
+
+      vtkOpenGLCheckErrorMacro("failed at glFramebufferTexture2D");
       }
-    else if (colorBuffer->GetNumberOfDimensions() == 3)
+    else
+    if (colorBuffer->GetNumberOfDimensions() == 3)
       {
-      unsigned int zSlice = this->UserZSlices[cc];
-      if (zSlice >= static_cast<unsigned int>(colorBuffer->GetDepth()))
-        {
-        vtkErrorMacro("Invalid zSlice " << zSlice << ". Using 0.");
-        zSlice = 0;
-        }
-      vtkgl::FramebufferTexture3DEXT(vtkgl::FRAMEBUFFER_EXT,
-        vtkgl::COLOR_ATTACHMENT0_EXT+cc,
-        vtkgl::TEXTURE_3D, colorBuffer->GetHandle(), 0, zSlice);
-      vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
+      assert(this->UserZSlices[cc]<colorBuffer->GetDepth());
+      vtkgl::FramebufferTexture3DEXT(
+            (GLenum)mode,
+            vtkgl::COLOR_ATTACHMENT0_EXT+cc,
+            vtkgl::TEXTURE_3D,
+            colorBuffer->GetHandle(),
+            0,
+            this->UserZSlices[cc]);
+
+      vtkOpenGLCheckErrorMacro("failed at glFramebufferTexture3D");
       }
     this->ColorBuffers[cc] = colorBuffer;
     }
 
+  // unbind the remainder
   unsigned int attachments=this->GetMaximumNumberOfRenderTargets();
   while(cc<attachments)
     {
-    vtkgl::FramebufferRenderbufferEXT(vtkgl::FRAMEBUFFER_EXT,
-                                      vtkgl::COLOR_ATTACHMENT0_EXT+cc,
-                                      vtkgl::RENDERBUFFER_EXT,0);
+    vtkgl::FramebufferRenderbufferEXT(
+          (GLenum)mode,
+          vtkgl::COLOR_ATTACHMENT0_EXT+cc,
+          vtkgl::RENDERBUFFER_EXT,
+          0);
+
+    vtkOpenGLCheckErrorMacro("failed at glFramebufferRenderbuffer");
     ++cc;
     }
+
+  // color buffers are allocated and attached
   this->ColorBuffersDirty = false;
 }
 
+
 //----------------------------------------------------------------------------
 unsigned int vtkFrameBufferObject::GetMaximumNumberOfActiveTargets()
 {
-  if (!this->Context)
+  unsigned int result = 0;
+  if (this->Context)
     {
-    return 0;
+    GLint maxbuffers;
+    glGetIntegerv(vtkgl::MAX_DRAW_BUFFERS, &maxbuffers);
+    result = static_cast<unsigned int>(maxbuffers);
     }
-  GLint maxbuffers;
-  glGetIntegerv(vtkgl::MAX_DRAW_BUFFERS, &maxbuffers);
-  vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
-  return static_cast<unsigned int>(maxbuffers);
+  return result;
 }
 
 //----------------------------------------------------------------------------
 unsigned int vtkFrameBufferObject::GetMaximumNumberOfRenderTargets()
 {
-  if (!this->Context)
+  unsigned int result = 0;
+  if (this->Context)
     {
-    return 0;
+    GLint maxColorAttachments;
+    glGetIntegerv(vtkgl::MAX_COLOR_ATTACHMENTS_EXT,&maxColorAttachments);
+    result = static_cast<unsigned int>(maxColorAttachments);
     }
-
-  GLint maxColorAttachments;
-  glGetIntegerv(vtkgl::MAX_COLOR_ATTACHMENTS_EXT,&maxColorAttachments);
-  vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
-  return static_cast<unsigned int>(maxColorAttachments);
+  return result;
 }
 
 //----------------------------------------------------------------------------
 void vtkFrameBufferObject::SetNumberOfRenderTargets(unsigned int num)
 {
-  if (num == 0)
-    {
-    vtkErrorMacro("NumberOfRenderTargets must be >= 1");
-    return;
-    }
+  assert(num>0);
   this->NumberOfRenderTargets = num;
   this->ColorBuffersDirty = true;
 }
@@ -553,15 +580,16 @@ void vtkFrameBufferObject::RemoveDepthBuffer()
 }
 
 //----------------------------------------------------------------------------
-void vtkFrameBufferObject::SetColorBuffer(unsigned int index,
-  vtkTextureObject* tex, unsigned int zslice/*=0*/)
+void vtkFrameBufferObject::SetColorBuffer(
+        unsigned int index,
+        vtkTextureObject* tex,
+        unsigned int zslice/*=0*/)
 {
   if (this->UserColorBuffers.size() <= index)
     {
     this->UserColorBuffers.resize(index+1);
     this->UserZSlices.resize(index+1);
     }
-
   if (this->UserColorBuffers[index] != tex ||
     this->UserZSlices[index] != zslice)
     {
@@ -574,11 +602,8 @@ void vtkFrameBufferObject::SetColorBuffer(unsigned int index,
 //----------------------------------------------------------------------------
 vtkTextureObject* vtkFrameBufferObject::GetColorBuffer(unsigned int index)
 {
-  if (this->UserColorBuffers.size() > index)
-    {
-    return this->UserColorBuffers[index];
-    }
-  return 0;
+  assert(this->UserColorBuffers.size()>index);
+  return this->UserColorBuffers[index];
 }
 
 //----------------------------------------------------------------------------
@@ -602,59 +627,12 @@ void vtkFrameBufferObject::RemoveAllColorBuffers()
 
 // ----------------------------------------------------------------------------
 // Description:
-// Display the status of the current framebuffer on the standard output.
-void vtkFrameBufferObject::CheckFrameBufferStatus()
-{
-  GLenum status;
-  status = vtkgl::CheckFramebufferStatusEXT(vtkgl::FRAMEBUFFER_EXT);
-  vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
-  switch(status)
-    {
-    case 0:
-      cout << "call to vtkgl::CheckFramebufferStatusEXT generates an error."
-           << endl;
-      break;
-    case vtkgl::FRAMEBUFFER_COMPLETE_EXT:
-//      cout<<"framebuffer is complete"<<endl;
-      break;
-    case vtkgl::FRAMEBUFFER_UNSUPPORTED_EXT:
-      cout << "framebuffer is unsupported" << endl;
-      break;
-    case vtkgl::FRAMEBUFFER_INCOMPLETE_ATTACHMENT_EXT:
-      cout << "framebuffer has an attachment error"<<endl;
-      break;
-    case vtkgl::FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT_EXT:
-      cout << "framebuffer has a missing attachment"<<endl;
-      break;
-    case vtkgl::FRAMEBUFFER_INCOMPLETE_DIMENSIONS_EXT:
-      cout << "framebuffer has bad dimensions"<<endl;
-      break;
-    case vtkgl::FRAMEBUFFER_INCOMPLETE_FORMATS_EXT:
-      cout << "framebuffer has bad formats"<<endl;
-      break;
-    case vtkgl::FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER_EXT:
-      cout << "framebuffer has bad draw buffer"<<endl;
-      break;
-    case vtkgl::FRAMEBUFFER_INCOMPLETE_READ_BUFFER_EXT:
-      cout << "framebuffer has bad read buffer"<<endl;
-      break;
-    default:
-      cout << "Unknown framebuffer status=0x" << hex<< status << dec << endl;
-    }
-  // DO NOT REMOVE THE FOLLOWING COMMENTED LINES. FOR DEBUGGING PURPOSE.
-  this->DisplayFrameBufferAttachments();
-  this->DisplayDrawBuffers();
-  this->DisplayReadBuffer();
-}
-
-// ----------------------------------------------------------------------------
-// Description:
 // Display all the attachments of the current framebuffer object.
 void vtkFrameBufferObject::DisplayFrameBufferAttachments()
 {
   GLint framebufferBinding;
   glGetIntegerv(vtkgl::FRAMEBUFFER_BINDING_EXT,&framebufferBinding);
-  vtkGraphicErrorMacro(this->Context,"after getting FRAMEBUFFER_BINDING_EXT");
+  vtkOpenGLCheckErrorMacro("after getting FRAMEBUFFER_BINDING_EXT");
   if(framebufferBinding==0)
     {
     cout<<"Current framebuffer is bind to the system one"<<endl;
@@ -666,7 +644,7 @@ void vtkFrameBufferObject::DisplayFrameBufferAttachments()
 
     GLint maxColorAttachments;
     glGetIntegerv(vtkgl::MAX_COLOR_ATTACHMENTS_EXT,&maxColorAttachments);
-    vtkGraphicErrorMacro(this->Context,"after getting MAX_COLOR_ATTACHMENTS_EXT");
+    vtkOpenGLCheckErrorMacro("after getting MAX_COLOR_ATTACHMENTS_EXT");
     int i=0;
     while(i<maxColorAttachments)
       {
@@ -694,7 +672,7 @@ void vtkFrameBufferObject::DisplayFrameBufferAttachment(
     vtkgl::FRAMEBUFFER_EXT,attachment,
     vtkgl::FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE_EXT,&params);
 
-  vtkGraphicErrorMacro(this->Context,"after getting FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE_EXT");
+  vtkOpenGLCheckErrorMacro("after getting FRAMEBUFFER_ATTACHMENT_OBJECT_TYPE_EXT");
 
   switch(params)
     {
@@ -705,17 +683,17 @@ void vtkFrameBufferObject::DisplayFrameBufferAttachment(
       vtkgl::GetFramebufferAttachmentParameterivEXT(
         vtkgl::FRAMEBUFFER_EXT,attachment,
         vtkgl::FRAMEBUFFER_ATTACHMENT_OBJECT_NAME_EXT,&params);
-       vtkGraphicErrorMacro(this->Context,"after getting FRAMEBUFFER_ATTACHMENT_OBJECT_NAME_EXT");
+       vtkOpenGLCheckErrorMacro("after getting FRAMEBUFFER_ATTACHMENT_OBJECT_NAME_EXT");
       cout<<" this attachment is a texture with name: "<<params<<endl;
       vtkgl::GetFramebufferAttachmentParameterivEXT(
         vtkgl::FRAMEBUFFER_EXT,attachment,
         vtkgl::FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL_EXT,&params);
-      vtkGraphicErrorMacro(this->Context,"after getting FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL_EXT");
+      vtkOpenGLCheckErrorMacro("after getting FRAMEBUFFER_ATTACHMENT_TEXTURE_LEVEL_EXT");
       cout<<" its mipmap level is: "<<params<<endl;
       vtkgl::GetFramebufferAttachmentParameterivEXT(
         vtkgl::FRAMEBUFFER_EXT,attachment,
         vtkgl::FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE_EXT,&params);
-      vtkGraphicErrorMacro(this->Context,"after getting FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE_EXT");
+      vtkOpenGLCheckErrorMacro("after getting FRAMEBUFFER_ATTACHMENT_TEXTURE_CUBE_MAP_FACE_EXT");
       if(params==0)
         {
         cout<<" this is not a cube map texture."<<endl;
@@ -729,7 +707,7 @@ void vtkFrameBufferObject::DisplayFrameBufferAttachment(
          vtkgl::FRAMEBUFFER_EXT,attachment,
          vtkgl::FRAMEBUFFER_ATTACHMENT_TEXTURE_3D_ZOFFSET_EXT,&params);
 
-       vtkGraphicErrorMacro(this->Context,"after getting FRAMEBUFFER_ATTACHMENT_TEXTURE_3D_ZOFFSET_EXT");
+       vtkOpenGLCheckErrorMacro("after getting FRAMEBUFFER_ATTACHMENT_TEXTURE_3D_ZOFFSET_EXT");
       if(params==0)
         {
         cout<<" this is not 3D texture."<<endl;
@@ -767,7 +745,7 @@ void vtkFrameBufferObject::DisplayFrameBufferAttachment(
         &params);
 //      this->PrintError("after getting RENDERBUFFER_INTERNAL_FORMAT_EXT");
 
-      cout<<" renderbuffer internal format=0x"<< hex<<params<<dec<<endl;
+      cout<<" renderbuffer internal format=0x"<< std::hex<<params<<std::dec<<endl;
 
       vtkgl::GetRenderbufferParameterivEXT(vtkgl::RENDERBUFFER_EXT,
                                            vtkgl::RENDERBUFFER_RED_SIZE_EXT,
@@ -883,8 +861,8 @@ void vtkFrameBufferObject::DisplayBuffer(int value)
       else
         {
         cout << "invalid aux buffer: " << b << ", upper limit is "
-             << (ivalue-1) << ", raw value is 0x" << hex << (GL_AUX0+b)
-             << dec;
+             << (ivalue-1) << ", raw value is 0x" << std::hex << (GL_AUX0+b)
+             << std::dec;
         }
       }
     else
@@ -922,7 +900,7 @@ void vtkFrameBufferObject::DisplayBuffer(int value)
           cout << "GL_FRONT_AND_BACK";
           break;
         default:
-          cout << "unknown 0x" << hex << value << dec;
+          cout << "unknown 0x" << std::hex << value << std::dec;
           break;
         }
       }
@@ -930,10 +908,7 @@ void vtkFrameBufferObject::DisplayBuffer(int value)
 }
 
 // ---------------------------------------------------------------------------
-void vtkFrameBufferObject::RenderQuad(int minX,
-                                         int maxX,
-                                         int minY,
-                                         int maxY)
+void vtkFrameBufferObject::RenderQuad(int minX, int maxX, int minY, int maxY)
 {
   assert("pre positive_minX" && minX>=0);
   assert("pre increasing_x" && minX<=maxX);
@@ -997,3 +972,54 @@ void vtkFrameBufferObject::PrintSelf(ostream& os, vtkIndent indent)
   os << indent << "NumberOfRenderTargets:" << this->NumberOfRenderTargets
      << endl;
 }
+
+// Description:
+// Common switch for parsing fbo status return.
+#define vtkFBOStrErrorMacro(status, str, ok) \
+  ok = false; \
+  switch(status) \
+    { \
+    case vtkgl::FRAMEBUFFER_COMPLETE_EXT: \
+      str = "FBO complete"; \
+      ok = true; \
+      break; \
+    case vtkgl::FRAMEBUFFER_UNSUPPORTED_EXT: \
+      str = "FRAMEBUFFER_UNSUPPORTED"; \
+      break; \
+    case vtkgl::FRAMEBUFFER_INCOMPLETE_ATTACHMENT_EXT: \
+      str = "FRAMEBUFFER_INCOMPLETE_ATTACHMENT"; \
+      break; \
+    case vtkgl::FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT_EXT: \
+      str = "FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT"; \
+      break; \
+    case vtkgl::FRAMEBUFFER_INCOMPLETE_DIMENSIONS_EXT: \
+      str = "FRAMEBUFFER_INCOMPLETE_DIMENSIONS"; \
+      break; \
+    case vtkgl::FRAMEBUFFER_INCOMPLETE_FORMATS_EXT: \
+      str = "FRAMEBUFFER_INCOMPLETE_FORMATS"; \
+      break; \
+    case vtkgl::FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER_EXT: \
+      str = "FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER"; \
+      break; \
+    case vtkgl::FRAMEBUFFER_INCOMPLETE_READ_BUFFER_EXT: \
+      str = "FRAMEBUFFER_INCOMPLETE_READ_BUFFER"; \
+      break; \
+    default: \
+      str = "Unknown status"; \
+    }
+
+// ----------------------------------------------------------------------------
+int vtkFrameBufferObject::CheckFrameBufferStatus(unsigned int mode)
+{
+  bool ok;
+  const char *desc = "error";
+  GLenum status = vtkgl::CheckFramebufferStatusEXT((GLenum)mode);
+  vtkOpenGLCheckErrorMacro("failed at glCheckFramebufferStatus");
+  vtkFBOStrErrorMacro(status, desc, ok);
+  if (!ok)
+    {
+    vtkErrorMacro("The framebuffer is incomplete : " << desc);
+    return 0;
+    }
+  return 1;
+}
diff --git a/Rendering/OpenGL/vtkFrameBufferObject.h b/Rendering/OpenGL/vtkFrameBufferObject.h
index 92c44ba..101fb4f 100644
--- a/Rendering/OpenGL/vtkFrameBufferObject.h
+++ b/Rendering/OpenGL/vtkFrameBufferObject.h
@@ -17,13 +17,16 @@
 // .SECTION Description
 // Encapsulates an OpenGL Frame Buffer Object.
 // For use by vtkOpenGLFBORenderWindow, not to be used directly.
+// Use vtkFrameBufferObject2 instead.
 // .SECTION Caveats
 // DON'T PLAY WITH IT YET.
+// .SECTION See Also
+// vtkFrameBufferObject2, vtkRenderbufferObject
 #ifndef __vtkFrameBufferObject_h
 #define __vtkFrameBufferObject_h
 
-#include "vtkRenderingOpenGLModule.h" // For export macro
 #include "vtkObject.h"
+#include "vtkRenderingOpenGLModule.h" // For export macro
 #include "vtkSmartPointer.h" // needed for vtkSmartPointer.
 #include "vtkWeakPointer.h" // needed for vtkWeakPointer.
 //BTX
@@ -32,7 +35,10 @@
 
 class vtkRenderWindow;
 class vtkTextureObject;
+class vtkRenderbuffer;
+class vtkPixelBufferObject;
 class vtkOpenGLExtensionManager;
+class vtkOpenGLRenderWindow;
 
 class VTKRENDERINGOPENGL_EXPORT vtkFrameBufferObject : public vtkObject
 {
@@ -58,12 +64,8 @@ public:
   // Note that this does not clear the render buffers i.e. no glClear() calls
   // are made by either of these methods. It's up to the caller to clear the
   // buffers if needed.
-  bool Start(int width,
-             int height,
-             bool shaderSupportsTextureInt);
-  bool StartNonOrtho(int width,
-                     int height,
-                     bool shaderSupportsTextureInt);
+  bool Start(int width, int height, bool shaderSupportsTextureInt);
+  bool StartNonOrtho(int width, int height, bool shaderSupportsTextureInt);
 
   // Description:
   // Renders a quad at the given location with pixel coordinates. This method
@@ -74,44 +76,46 @@ public:
   // \pre positive_minY: minY>=0
   // \pre increasing_y: minY<=maxY
   // \pre valid_maxY: maxY<LastSize[1]
-  void RenderQuad(int minX,
-                  int maxX,
-                  int minY,
-                  int maxY);
+  void RenderQuad(int minX, int maxX, int minY, int maxY);
 
   // Description:
-  // Save the current framebuffer and make the frame buffer active.
-  // Multiple calls to Bind has no effect.
+  // Make the draw frame buffer active (uses FRAMEBUFFER).
   void Bind();
 
   // Description:
-  // Restore the framebuffer saved with the call to Bind().
-  // Multiple calls to UnBind has no effect.
+  // Restore the previous draw framebuffer if saved, else
+  // bind the default buffer.
   void UnBind();
 
   // Description:
-  // Choose the buffer to render into.
+  // Choose the buffers to render into.
   void SetActiveBuffer(unsigned int index)
     {
-      this->SetActiveBuffers(1, &index);
+    this->SetActiveBuffers(1, &index);
     }
 
   // Description:
-  // Choose the buffer to render into.
-  // This is available only if the GL_ARB_draw_buffers extension is supported
-  // by the card.
-  void SetActiveBuffers(int numbuffers,
-                        unsigned int indices[]);
+  // User provided color buffers are attached by index
+  // to color buffers. This command lets you select which
+  // attachments are written to. See set color buffer.
+  // This call overwrites what the previous list of active
+  // buffers.
+  void SetActiveBuffers(int numbuffers, unsigned int indices[]);
 
+  // Description:
+  // Insert a color buffer into the list of available color buffers.
+  // 0 to NumberOfRenderTargets of these are attached to color attachments
+  // by index. See SetActiveBuffers to select them for writing.
   // All user specified texture objects must match the FBO dimensions
   // and must have been created by the time Start() gets called.
   // If texture is a 3D texture, zslice identifies the zslice that will be
   // attached to the color buffer.
   // .SECTION Caveat
   // Currently, 1D textures are not supported.
-  void SetColorBuffer(unsigned int index,
-                      vtkTextureObject *texture,
-                      unsigned int zslice=0);
+  void SetColorBuffer(
+        unsigned int index,
+        vtkTextureObject *texture,
+        unsigned int zslice=0);
 
   vtkTextureObject *GetColorBuffer(unsigned int index);
   void RemoveColorBuffer(unsigned int index);
@@ -130,6 +134,10 @@ public:
 
   // Description:
   // Set/Get the number of render targets to render into at once.
+  // Textures (user supplied or generated internally) are attached
+  // to color attachment 0 to NumberOfRenderTargets. You can use
+  // SetActiveBuffer to specify which of these are actually written to.
+  // If zero then all of the user provided color buffers are used.
   void SetNumberOfRenderTargets(unsigned int);
   vtkGetMacro(NumberOfRenderTargets,unsigned int);
 
@@ -151,13 +159,48 @@ public:
 
   // Description:
   // Returns if the context supports the required extensions.
+  // Extension will be loaded when the conetxt is set.
   static bool IsSupported(vtkRenderWindow *renWin);
 
+  // Description:
+  // Validate the current FBO configuration (attachments, formats, etc)
+  // prints detected errors to vtkErrorMacro.
+  int CheckFrameBufferStatus(unsigned int mode);
+
 //BTX
 protected:
   // Description:
-  // Display the status of the current framebuffer on the standard output.
-  void CheckFrameBufferStatus();
+  // Load all necessary extensions.
+  static
+  bool LoadRequiredExtensions(vtkRenderWindow *renWin);
+
+  // gen buffer (occurs when context is set)
+  void CreateFBO();
+
+  // delete buffer (occurs during destruction or context swicth)
+  void DestroyFBO();
+
+  // create texture or renderbuffer and attach
+  // if user provided a texture just use that
+  // mode specifies DRAW or READ
+  void CreateDepthBuffer(int width, int height, unsigned int mode);
+
+  // create textures for each target and attach
+  // if user provided textures use those, if the user
+  // provides any then they need to provide all
+  // mode specifies DRAW or READ
+  void CreateColorBuffers(
+        int width,
+        int height,
+        unsigned int mode,
+        bool shaderSupportsTextureInt);
+
+  // detach and delete our reference(s)
+  void DestroyDepthBuffer();
+  void DestroyColorBuffers();
+
+  // glDrawBuffers
+  void ActivateBuffers();
 
   // Description:
   // Display all the attachments of the current framebuffer object.
@@ -187,38 +230,17 @@ protected:
   bool DepthBufferNeeded;
   bool ColorBuffersDirty;
   unsigned int FBOIndex;
-  int PreviousFBOIndex; // -1: no previous FBO
+  int PreviousFBOIndex;
   unsigned int DepthBuffer;
-
   unsigned int NumberOfRenderTargets;
-  // TODO: add support for stencil buffer.
-
   int LastSize[2];
-
-  void CreateFBO();
-  void DestroyFBO();
-  void Create(int width,
-              int height);
-  void CreateBuffers(int width,
-                     int height);
-  void CreateColorBuffers(int width,
-                          int height,
-                          bool shaderSupportsTextureInt);
-  void Destroy();
-  void DestroyBuffers();
-  void DestroyColorBuffers();
-  void ActivateBuffers();
-
-  // Description:
-  // Load all necessary extensions.
-  bool LoadRequiredExtensions(vtkOpenGLExtensionManager *manager);
-
   std::vector<unsigned int> UserZSlices;
   std::vector<vtkSmartPointer<vtkTextureObject> > UserColorBuffers;
   std::vector<vtkSmartPointer<vtkTextureObject> > ColorBuffers;
   std::vector<unsigned int> ActiveBuffers;
   vtkSmartPointer<vtkTextureObject> UserDepthBuffer;
   bool DepthBufferDirty;
+
 private:
   vtkFrameBufferObject(const vtkFrameBufferObject&); // Not implemented.
   void operator=(const vtkFrameBufferObject&); // Not implemented.
diff --git a/Rendering/OpenGL/vtkFrameBufferObject2.cxx b/Rendering/OpenGL/vtkFrameBufferObject2.cxx
new file mode 100644
index 0000000..84a5ca4
--- /dev/null
+++ b/Rendering/OpenGL/vtkFrameBufferObject2.cxx
@@ -0,0 +1,728 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkFrameBufferObject2.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkFrameBufferObject2.h"
+
+#include "vtkObjectFactory.h"
+#include "vtkOpenGLRenderWindow.h"
+#include "vtkTextureObject.h"
+#include "vtkRenderbuffer.h"
+#include "vtkPixelBufferObject.h"
+#include "vtkOpenGLExtensionManager.h"
+#include "vtkOpenGLError.h"
+
+#include "vtkgl.h"
+
+#include <cassert>
+#include <vector>
+using std::vector;
+
+//----------------------------------------------------------------------------
+vtkStandardNewMacro(vtkFrameBufferObject2);
+
+//----------------------------------------------------------------------------
+vtkFrameBufferObject2::vtkFrameBufferObject2()
+{
+  this->FBOIndex = 0;
+  this->PreviousDrawFBO = 0;
+  this->PreviousReadFBO = 0;
+  this->PreviousDrawBuffer = GL_NONE;
+  this->PreviousReadBuffer = GL_NONE;
+}
+
+//----------------------------------------------------------------------------
+vtkFrameBufferObject2::~vtkFrameBufferObject2()
+{
+  this->DestroyFBO();
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::CreateFBO()
+{
+  this->FBOIndex=0;
+  GLuint temp;
+  vtkgl::GenFramebuffersEXT(1,&temp);
+  vtkOpenGLCheckErrorMacro("failed at glGenFramebuffers");
+  this->FBOIndex=temp;
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::DestroyFBO()
+{
+  // because we don't hold a reference to the render
+  // context we don't have any control on when it is
+  // destroyed. In fact it may be destroyed before
+  // we are(eg smart pointers), in which case we should
+  // do nothing.
+  if (this->Context && (this->FBOIndex!=0))
+    {
+    GLuint fbo=static_cast<GLuint>(this->FBOIndex);
+    vtkgl::DeleteFramebuffersEXT(1,&fbo);
+    vtkOpenGLCheckErrorMacro("failed at glDeleteFramebuffers");
+    this->FBOIndex=0;
+    }
+}
+
+//----------------------------------------------------------------------------
+bool vtkFrameBufferObject2::IsSupported(vtkRenderWindow *win)
+{
+  vtkOpenGLRenderWindow *renWin=vtkOpenGLRenderWindow::SafeDownCast(win);
+  if(renWin!=0)
+    {
+    vtkOpenGLExtensionManager *mgr=renWin->GetExtensionManager();
+
+    bool gl12 = mgr->ExtensionSupported("GL_VERSION_1_2")==1;
+    bool tex3D = gl12 || mgr->ExtensionSupported("GL_EXT_texture3D");
+
+    bool gl14 = mgr->ExtensionSupported("GL_VERSION_1_4")==1;
+    bool depthTex = gl14 || mgr->ExtensionSupported("GL_ARB_depth_texture")==1;
+
+    bool gl20 = mgr->ExtensionSupported("GL_VERSION_2_0")==1;
+    bool drawBufs = gl20 || mgr->ExtensionSupported("GL_ARB_draw_buffers")==1;
+
+    bool fbo = mgr->ExtensionSupported("GL_EXT_framebuffer_object")==1;
+    bool fboBlit = mgr->ExtensionSupported("GL_EXT_framebuffer_blit")==1;
+
+    return tex3D && depthTex && drawBufs && fbo && fboBlit;
+    }
+  return false;
+}
+
+//----------------------------------------------------------------------------
+bool vtkFrameBufferObject2::LoadRequiredExtensions(vtkRenderWindow *win)
+{
+  vtkOpenGLRenderWindow *oglRenWin
+    = dynamic_cast<vtkOpenGLRenderWindow*>(win);
+
+  vtkOpenGLExtensionManager *mgr = oglRenWin->GetExtensionManager();
+
+  bool gl12 = mgr->ExtensionSupported("GL_VERSION_1_2")==1;
+  bool tex3D = gl12 || mgr->ExtensionSupported("GL_EXT_texture3D");
+
+  bool gl14 = mgr->ExtensionSupported("GL_VERSION_1_4")==1;
+  bool depthTex = gl14 || mgr->ExtensionSupported("GL_ARB_depth_texture")==1;
+
+  bool gl20 = mgr->ExtensionSupported("GL_VERSION_2_0")==1;
+  bool drawBufs = gl20 || mgr->ExtensionSupported("GL_ARB_draw_buffers");
+
+  bool fbo = mgr->ExtensionSupported("GL_EXT_framebuffer_object")==1;
+  bool fboBlit = mgr->ExtensionSupported("GL_EXT_framebuffer_blit")==1;
+
+  bool supported = tex3D && depthTex && drawBufs && fbo && fboBlit;
+
+  if(supported)
+    {
+    if(gl12)
+      {
+      mgr->LoadSupportedExtension("GL_VERSION_1_2");
+      }
+    else
+      {
+      mgr->LoadCorePromotedExtension("GL_EXT_texture3D");
+      }
+
+    if(gl14)
+      {
+      mgr->LoadSupportedExtension("GL_VERSION_1_4");
+      }
+    else
+      {
+      mgr->LoadCorePromotedExtension("GL_ARB_depth_texture");
+      }
+
+    if(gl20)
+      {
+      mgr->LoadSupportedExtension("GL_VERSION_2_0");
+      }
+    else
+      {
+      mgr->LoadCorePromotedExtension("GL_ARB_draw_buffers");
+      }
+
+    mgr->LoadSupportedExtension("GL_EXT_framebuffer_object");
+    mgr->LoadSupportedExtension("GL_EXT_framebuffer_blit");
+    }
+
+  return supported;
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::SetContext(vtkRenderWindow *renWin)
+{
+  // avoid pointless re-assignment
+  if (this->Context==renWin)
+    {
+    return;
+    }
+  // free previous resources
+  this->DestroyFBO();
+  this->Context = NULL;
+  this->Modified();
+  // all done if assigned null
+  if (!renWin)
+    {
+    return;
+    }
+  // check for support
+  vtkOpenGLRenderWindow *context
+    = dynamic_cast<vtkOpenGLRenderWindow*>(renWin);
+  if ( !context
+    || !this->LoadRequiredExtensions(renWin))
+    {
+    vtkErrorMacro("Context does not support the required extensions");
+    return;
+    }
+  // intialize
+  this->Context=renWin;
+  this->Context->MakeCurrent();
+  this->CreateFBO();
+}
+
+//----------------------------------------------------------------------------
+vtkRenderWindow *vtkFrameBufferObject2::GetContext()
+{
+  return this->Context;
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::SaveCurrentBindings()
+{
+  glGetIntegerv(vtkgl::DRAW_FRAMEBUFFER_BINDING_EXT, (int*)&this->PreviousDrawFBO);
+  glGetIntegerv(vtkgl::READ_FRAMEBUFFER_BINDING_EXT, (int*)&this->PreviousReadFBO);
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::SaveCurrentBuffers()
+{
+  glGetIntegerv(GL_DRAW_BUFFER, (int*)&this->PreviousDrawBuffer);
+  glGetIntegerv(GL_READ_BUFFER, (int*)&this->PreviousReadBuffer);
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::RestorePreviousBuffers(unsigned int mode)
+{
+  switch((GLenum)mode)
+    {
+    case vtkgl::FRAMEBUFFER_EXT:
+      glDrawBuffer((GLenum)this->PreviousDrawBuffer);
+      vtkOpenGLCheckErrorMacro("failed at glDrawBuffer");
+
+      glReadBuffer((GLenum)this->PreviousReadBuffer);
+      vtkOpenGLCheckErrorMacro("failed at glReadBuffer");
+      break;
+
+    case vtkgl::DRAW_FRAMEBUFFER_EXT:
+      glDrawBuffer((GLenum)this->PreviousDrawBuffer);
+      vtkOpenGLCheckErrorMacro("failed at glDrawBuffer");
+      break;
+
+    case vtkgl::READ_FRAMEBUFFER_EXT:
+      glReadBuffer((GLenum)this->PreviousReadBuffer);
+      vtkOpenGLCheckErrorMacro("failed at glReadBuffer");
+      break;
+    }
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::Bind(unsigned int mode)
+{
+  assert(this->FBOIndex!=0); // need to call glGenFramebuffers first
+
+  // need to ensure that binding is esxtablished *every* time because
+  // if other code binds over us then all of our subsequent calls
+  // will affect that fbo not ours.
+  vtkgl::BindFramebufferEXT((GLenum)mode, this->FBOIndex);
+  vtkOpenGLCheckErrorMacro("failed at glBindFramebuffer");
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::UnBind(unsigned int mode)
+{
+  assert(this->FBOIndex!=0); // need to GenFramebuffers first
+
+  bool drawing
+    =  ((GLenum)mode)==vtkgl::DRAW_FRAMEBUFFER_EXT
+    || ((GLenum)mode)==vtkgl::FRAMEBUFFER_EXT;
+
+  GLuint prevFbo
+    = (drawing ? this->PreviousDrawFBO : this->PreviousReadFBO);
+
+  vtkgl::BindFramebufferEXT((GLenum)mode, prevFbo);
+  vtkOpenGLCheckErrorMacro("failed at glBindFramebuffer");
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::ActivateDrawBuffers(unsigned int *ids, int num)
+{
+  assert(num<17); // a practical limit, increase if needed
+  GLenum colorAtts[16];
+  for (int i=0; i<num; ++i)
+    {
+    colorAtts[i] = vtkgl::COLOR_ATTACHMENT0 + ids[i];
+    }
+  vtkgl::DrawBuffers(num, &colorAtts[0]);
+  vtkOpenGLCheckErrorMacro("failed at glDrawBuffers");
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::ActivateDrawBuffers(unsigned int num)
+{
+  assert(num<17); // a practical limit, increase if needed
+  GLenum colorAtts[16];
+  for (unsigned int i=0; i<num; ++i)
+    {
+    colorAtts[i] = vtkgl::COLOR_ATTACHMENT0 + i;
+    }
+  vtkgl::DrawBuffers(num, &colorAtts[0]);
+  vtkOpenGLCheckErrorMacro("failed at glDrawBuffers");
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::DeactivateDrawBuffers()
+{
+  GLenum att = GL_NONE;
+  vtkgl::DrawBuffers(1, &att);
+  vtkOpenGLCheckErrorMacro("failed at glDrawBuffers(GL_NONE)");
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::ActivateDrawBuffer(
+      unsigned int colorAtt)
+{
+  colorAtt += vtkgl::COLOR_ATTACHMENT0;
+  vtkgl::DrawBuffers(1, &colorAtt);
+  vtkOpenGLCheckErrorMacro("failed at glDrawBuffers");
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::ActivateReadBuffer(
+      unsigned int colorAtt)
+{
+  colorAtt += vtkgl::COLOR_ATTACHMENT0;
+  glReadBuffer((GLenum)colorAtt);
+  vtkOpenGLCheckErrorMacro("failed at glReadBuffer");
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::DeactivateReadBuffer()
+{
+  glReadBuffer(GL_NONE);
+  vtkOpenGLCheckErrorMacro("failed at glReadBuffer(GL_NONE)");
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::AddTexColorAttachment(
+        unsigned int mode,
+        unsigned int i,
+        unsigned int handle)
+{
+  vtkgl::FramebufferTexture2DEXT(
+        (GLenum)mode,
+        vtkgl::COLOR_ATTACHMENT0_EXT+i,
+        GL_TEXTURE_2D,
+        handle,
+        0);
+  vtkOpenGLCheckErrorMacro("failed at glFramebufferTexture2D");
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::RemoveTexColorAttachments(
+      unsigned int mode,
+      unsigned int num)
+{
+  for (unsigned int i=0; i<num; ++i)
+    {
+    this->AddTexColorAttachment(mode, i, 0U);
+    }
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::AddColorAttachment(
+        unsigned int mode,
+        unsigned int i,
+        vtkTextureObject* tex)
+{
+  unsigned int handle = (tex==NULL) ? 0 : tex->GetHandle();
+  this->AddTexColorAttachment(mode,i,handle);
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::AddRenColorAttachment(
+        unsigned int mode,
+        unsigned int i,
+        unsigned int handle)
+{
+  vtkgl::FramebufferRenderbufferEXT(
+        (GLenum)mode,
+        vtkgl::COLOR_ATTACHMENT0_EXT+i,
+        vtkgl::RENDERBUFFER,
+        handle);
+  vtkOpenGLCheckErrorMacro("failed at glFramebufferRenderbuffer");
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::AddColorAttachment(
+        unsigned int mode,
+        unsigned int i,
+        vtkRenderbuffer* renbuf)
+{
+  unsigned int handle = (renbuf==NULL) ? 0 : renbuf->GetHandle();
+  this->AddRenColorAttachment(mode, i, handle);
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::RemoveRenColorAttachments(
+      unsigned int mode,
+      unsigned int num)
+{
+  for (unsigned int i=0; i<num; ++i)
+    {
+    this->AddRenColorAttachment(mode, i, 0U);
+    }
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::AddTexDepthAttachment(
+        unsigned int mode,
+        unsigned int handle)
+{
+  vtkgl::FramebufferTexture2DEXT(
+        (GLenum)mode,
+        vtkgl::DEPTH_ATTACHMENT,
+        GL_TEXTURE_2D,
+        handle,
+        0);
+  vtkOpenGLCheckErrorMacro("failed at glFramebufferTexture2D");
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::AddDepthAttachment(
+        unsigned int mode,
+        vtkTextureObject* tex)
+{
+  unsigned int handle = (tex==NULL) ? 0 : tex->GetHandle();
+  this->AddTexDepthAttachment(mode,handle);
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::AddRenDepthAttachment(
+        unsigned int mode,
+        unsigned int handle)
+{
+  vtkgl::FramebufferRenderbufferEXT(
+        (GLenum)mode,
+        vtkgl::DEPTH_ATTACHMENT,
+        vtkgl::RENDERBUFFER,
+        handle);
+  vtkOpenGLCheckErrorMacro("failed at glFramebufferRenderbuffer");
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::AddDepthAttachment(
+        unsigned int mode,
+        vtkRenderbuffer* renbuf)
+{
+  unsigned int handle = (renbuf==NULL) ? 0 : renbuf->GetHandle();
+  this->AddRenDepthAttachment(mode, handle);
+}
+
+//----------------------------------------------------------------------------
+void vtkFrameBufferObject2::InitializeViewport(int width, int height)
+{
+  glDisable(GL_ALPHA_TEST);
+  glDisable(GL_BLEND);
+  glDisable(GL_DEPTH_TEST);
+  glDisable(GL_LIGHTING);
+  glDisable(GL_SCISSOR_TEST);
+
+  // Viewport transformation for 1:1 'pixel=texel=data' mapping.
+  // Note this note enough for 1:1 mapping, because depending on the
+  // primitive displayed (point,line,polygon), the rasterization rules
+  // are different.
+  glMatrixMode(GL_PROJECTION);
+  glLoadIdentity();
+  glOrtho(0.0, width, 0.0, height, -1, 1);
+  glMatrixMode(GL_MODELVIEW);
+  glLoadIdentity();
+  glViewport(0, 0, width, height);
+
+  vtkOpenGLStaticCheckErrorMacro("failed after InitializeViewport");
+}
+
+//----------------------------------------------------------------------------
+int vtkFrameBufferObject2::Blit(
+        int srcExt[4],
+        int destExt[4],
+        unsigned int bits,
+        unsigned int mapping)
+{
+  vtkgl::BlitFramebufferEXT(
+        (GLint)srcExt[0],
+        (GLint)srcExt[2],
+        (GLint)srcExt[1],
+        (GLint)srcExt[3],
+        (GLint)destExt[0],
+        (GLint)destExt[2],
+        (GLint)destExt[1],
+        (GLint)destExt[3],
+        (GLbitfield)bits,
+        (GLenum)mapping);
+
+  vtkOpenGLStaticCheckErrorMacro("failed at glBlitFramebuffer");
+
+  return 1;
+}
+
+//-----------------------------------------------------------------------------
+vtkPixelBufferObject *vtkFrameBufferObject2::DownloadDepth(
+      int extent[4],
+      int vtkType)
+{
+  assert(this->Context);
+
+  return this->Download(
+      extent,
+      vtkType,
+      1,
+      this->GetOpenGLType(vtkType),
+      GL_DEPTH_COMPONENT);
+}
+
+//-----------------------------------------------------------------------------
+vtkPixelBufferObject *vtkFrameBufferObject2::DownloadColor4(
+      int extent[4],
+      int vtkType)
+{
+  assert(this->Context);
+
+  return this->Download(
+      extent,
+      vtkType,
+      4,
+      this->GetOpenGLType(vtkType),
+      GL_RGBA);
+}
+
+//-----------------------------------------------------------------------------
+vtkPixelBufferObject *vtkFrameBufferObject2::DownloadColor3(
+      int extent[4],
+      int vtkType)
+{
+  assert(this->Context);
+
+  return this->Download(
+      extent,
+      vtkType,
+      3,
+      this->GetOpenGLType(vtkType),
+      GL_RGB);
+}
+
+//-----------------------------------------------------------------------------
+vtkPixelBufferObject *vtkFrameBufferObject2::DownloadColor1(
+      int extent[4],
+      int vtkType,
+      int channel)
+{
+  assert(this->Context);
+  GLenum oglChannel = 0;
+  switch (channel)
+    {
+    case 0:
+      oglChannel = GL_RED;
+      break;
+    case 1:
+      oglChannel = GL_GREEN;
+      break;
+    case 2:
+      oglChannel = GL_BLUE;
+      break;
+    default:
+      vtkErrorMacro("Inavlid channel");
+      return NULL;
+    }
+
+  return this->Download(
+      extent,
+      vtkType,
+      1,
+      this->GetOpenGLType(vtkType),
+      oglChannel);
+}
+
+//-----------------------------------------------------------------------------
+vtkPixelBufferObject *vtkFrameBufferObject2::Download(
+      int extent[4],
+      int vtkType,
+      int nComps,
+      int oglType,
+      int oglFormat)
+{
+  vtkPixelBufferObject *pbo = vtkPixelBufferObject::New();
+  pbo->SetContext(this->Context);
+
+  this->Download(
+        extent,
+        vtkType,
+        nComps,
+        oglType,
+        oglFormat,
+        pbo);
+
+  return pbo;
+}
+//-----------------------------------------------------------------------------
+void vtkFrameBufferObject2::Download(
+      int extent[4],
+      int vtkType,
+      int nComps,
+      int oglType,
+      int oglFormat,
+      vtkPixelBufferObject *pbo)
+{
+  unsigned int extentSize[2] = {
+        static_cast<unsigned int>(extent[1] - extent[0] + 1),
+        static_cast<unsigned int>(extent[3] - extent[2] + 1)
+        };
+
+  unsigned int nTups = extentSize[0]*extentSize[1];
+
+  pbo->Allocate(
+        vtkType,
+        nTups,
+        nComps,
+        vtkPixelBufferObject::PACKED_BUFFER);
+
+  pbo->Bind(vtkPixelBufferObject::PACKED_BUFFER);
+
+  glPixelStorei(GL_PACK_ALIGNMENT, 1);
+  glReadPixels(
+        extent[0],
+        extent[2],
+        extentSize[0],
+        extentSize[1],
+        oglFormat,
+        oglType,
+        NULL);
+
+  vtkOpenGLStaticCheckErrorMacro("failed at glReadPixels");
+
+  pbo->UnBind();
+}
+
+//-----------------------------------------------------------------------------
+int vtkFrameBufferObject2::GetOpenGLType(int vtkType)
+{
+  // convert vtk type to open gl type
+  int oglType = 0;
+  switch (vtkType)
+    {
+    case VTK_FLOAT:
+      oglType = GL_FLOAT;
+      break;
+    case VTK_INT:
+      oglType = GL_INT;
+      break;
+    case VTK_UNSIGNED_INT:
+      oglType = GL_UNSIGNED_INT;
+      break;
+    case VTK_CHAR:
+      oglType = GL_BYTE;
+      break;
+    case VTK_UNSIGNED_CHAR:
+      oglType = GL_UNSIGNED_BYTE;
+      break;
+    default:
+      vtkErrorMacro("Unsupported type");
+      return 0;
+    }
+  return oglType;
+}
+
+// Description:
+// Common switch for parsing fbo status return.
+#define vtkFBOStrErrorMacro(status, str, ok) \
+  ok = false; \
+  switch(status) \
+    { \
+    case vtkgl::FRAMEBUFFER_COMPLETE_EXT: \
+      str = "FBO complete"; \
+      ok = true; \
+      break; \
+    case vtkgl::FRAMEBUFFER_UNSUPPORTED_EXT: \
+      str = "FRAMEBUFFER_UNSUPPORTED"; \
+      break; \
+    case vtkgl::FRAMEBUFFER_INCOMPLETE_ATTACHMENT_EXT: \
+      str = "FRAMEBUFFER_INCOMPLETE_ATTACHMENT"; \
+      break; \
+    case vtkgl::FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT_EXT: \
+      str = "FRAMEBUFFER_INCOMPLETE_MISSING_ATTACHMENT"; \
+      break; \
+    case vtkgl::FRAMEBUFFER_INCOMPLETE_DIMENSIONS_EXT: \
+      str = "FRAMEBUFFER_INCOMPLETE_DIMENSIONS"; \
+      break; \
+    case vtkgl::FRAMEBUFFER_INCOMPLETE_FORMATS_EXT: \
+      str = "FRAMEBUFFER_INCOMPLETE_FORMATS"; \
+      break; \
+    case vtkgl::FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER_EXT: \
+      str = "FRAMEBUFFER_INCOMPLETE_DRAW_BUFFER"; \
+      break; \
+    case vtkgl::FRAMEBUFFER_INCOMPLETE_READ_BUFFER_EXT: \
+      str = "FRAMEBUFFER_INCOMPLETE_READ_BUFFER"; \
+      break; \
+    default: \
+      str = "Unknown status"; \
+    }
+
+// ----------------------------------------------------------------------------
+bool vtkFrameBufferObject2::GetFrameBufferStatus(
+      unsigned int mode,
+      const char *&desc)
+{
+  bool ok;
+  GLenum status = vtkgl::CheckFramebufferStatusEXT((GLenum)mode);
+  vtkFBOStrErrorMacro(status, desc, ok);
+  return ok;
+}
+
+// ----------------------------------------------------------------------------
+int vtkFrameBufferObject2::CheckFrameBufferStatus(unsigned int mode)
+{
+  bool ok;
+  const char *desc = "error";
+  GLenum status = vtkgl::CheckFramebufferStatusEXT((GLenum)mode);
+  vtkOpenGLCheckErrorMacro("failed at glCheckFramebufferStatus");
+  vtkFBOStrErrorMacro(status, desc, ok);
+  if (!ok)
+    {
+    vtkErrorMacro("The framebuffer is incomplete : " << desc);
+    return 0;
+    }
+  return 1;
+}
+
+// ----------------------------------------------------------------------------
+void vtkFrameBufferObject2::PrintSelf(ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+
+  os
+    << indent << "Context=" << this->Context << endl
+    << indent << "FBOIndex=" << this->FBOIndex << endl
+    << indent << "PreviousDrawFBO=" << this->PreviousDrawFBO << endl
+    << indent << "PreviousReadFBO=" << this->PreviousReadFBO << endl
+    << indent << "PreviousDrawBuffer=" << this->PreviousDrawBuffer << endl
+    << indent << "PreviousReadBuffer=" << this->PreviousReadBuffer << endl
+    << endl;
+}
diff --git a/Rendering/OpenGL/vtkFrameBufferObject2.h b/Rendering/OpenGL/vtkFrameBufferObject2.h
new file mode 100644
index 0000000..85cc3a2
--- /dev/null
+++ b/Rendering/OpenGL/vtkFrameBufferObject2.h
@@ -0,0 +1,316 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkFrameBufferObject2.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkFrameBufferObject2 - Interface to OpenGL framebuffer object.
+// .SECTION Description
+// A light and efficient interface to an OpenGL Frame Buffer Object.
+// Use is very simillalry to directly calling OpenGL, but as vtkObject
+// it may safely stored, shared, or passed around. It supports FBO Blit
+// and transfer to Pixel Buffer Object.
+//
+// Typical use case:
+//\code{.cpp}
+// vtkFrameBufferObject2 *fbo = this->Internals->FBO;
+// fbo->SaveCurrentBindings();
+// fbo->Bind(vtkgl::FRAMEBUFFER_EXT);
+// fbo->AddDepthAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, depthBuffer);
+// fbo->AddColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 0U, colorTex1);
+// fbo->AddColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 1U, colorTex2);
+// fbo->AddColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 2U, colorTex3);
+// fbo->ActivateDrawBuffers(3);
+// vtkCheckFrameBufferStatusMacro(vtkgl::FRAMEBUFFER_EXT);
+//
+// ...
+//
+// fbo->UnBind(vtkgl::FRAMEBUFFER_EXT);
+//\endcode
+//
+// .SECTION See Also
+// vtkRenderbuffer, vtkPixelBufferObject
+
+#ifndef __vtkFrameBufferObject2_h
+#define __vtkFrameBufferObject2_h
+
+#include "vtkObject.h"
+#include "vtkRenderingOpenGLModule.h" // For export macro
+#include "vtkSmartPointer.h" // needed for vtkSmartPointer.
+#include "vtkWeakPointer.h" // needed for vtkWeakPointer.
+
+// Description:
+// A variant of vtkErrorMacro that is used to verify framebuffer
+// object completeness. It's provided so that reporting may include
+// the file and line number of the offending code. In release mode
+// the macro does nothing.
+#ifdef NDEBUG
+# define vtkCheckFrameBufferStatusMacro(mode)
+# define vtkStaticCheckFrameBufferStatusMacro(mode)
+#else
+# define vtkCheckFrameBufferStatusMacroImpl(macro, mode)           \
+{                                                                  \
+const char *eStr;                                                  \
+bool ok = vtkFrameBufferObject2::GetFrameBufferStatus(mode, eStr); \
+if (!ok)                                                           \
+  {                                                                \
+  macro(                                                           \
+    << "OpenGL ERROR. The FBO is incomplete : " << eStr);          \
+  }                                                                \
+ }
+# define vtkCheckFrameBufferStatusMacro(mode) \
+    vtkCheckFrameBufferStatusMacroImpl(vtkErrorMacro, mode)
+# define vtkStaticCheckFrameBufferStatusMacro(mode) \
+    vtkCheckFrameBufferStatusMacroImpl(vtkGenericWarningMacro, mode)
+#endif
+
+class vtkRenderWindow;
+class vtkTextureObject;
+class vtkRenderbuffer;
+class vtkPixelBufferObject;
+class vtkOpenGLExtensionManager;
+class vtkOpenGLRenderWindow;
+
+class VTKRENDERINGOPENGL_EXPORT vtkFrameBufferObject2 : public vtkObject
+{
+public:
+  static vtkFrameBufferObject2* New();
+  vtkTypeMacro(vtkFrameBufferObject2, vtkObject);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Get/Set the context. Context must be a vtkOpenGLRenderWindow.
+  // This does not increase the reference count of the
+  // context to avoid reference loops.
+  // SetContext() may raise an error is the OpenGL context does not support the
+  // required OpenGL extensions.
+  void SetContext(vtkRenderWindow *context);
+  vtkRenderWindow *GetContext();
+
+  // Description:
+  // Returns if the context supports the required extensions.
+  // Extension will be loaded when the conetxt is set.
+  static bool IsSupported(vtkRenderWindow *renWin);
+
+  // Description:
+  // Bind FBO to FRAMEBUFFER,  DRAW_FRAMEBUFFER or READ_FRAMEBUFFER
+  // The current binding is not saved, nor restored. (see glBindFramebuffer)
+  // This method can be used to prepare for FBO Blit or buffer ping-pong.
+  // Low level api.
+  void Bind(unsigned int mode);
+
+  // Description:
+  // Bind saved FBO (see SaveCurrentBindings) for DRAW or READ (see glBindFramebuffer)
+  // If no bindings were saved bind to default FBO.
+  // Low level api.
+  void UnBind(unsigned int mode);
+
+  // Description:
+  // Store the current framebuffer bindings. If this method
+  // is called then UnBind will restore the saved value accoring
+  // to its mode (DRAW_FRAMEBUFFER,READ_FRAMEBUFFER,FRAMEBUFFER)
+  // Restoration occurs in UnBind.
+  // Low level api
+  void SaveCurrentBindings();
+
+  // Description:
+  // Store the current draw and read buffers. When restored
+  // only the buffers matching mode are modified.
+  // DRAW_FRAMEBUFFER -> glDrawBuffer
+  // READ_FRAMEBUFFER -> glReadBuffer
+  // FRAMEBUFFER -> both
+  void SaveCurrentBuffers();
+  void RestorePreviousBuffers(unsigned int mode);
+
+  // Description:
+  // Directly assign/remove a texture to color attachments.
+  void AddColorAttachment(
+        unsigned int mode,
+        unsigned int attId,
+        vtkTextureObject* tex);
+
+  void AddTexColorAttachment(
+        unsigned int mode,
+        unsigned int attId,
+        unsigned int handle);
+
+  void RemoveTexColorAttachments(unsigned int mode, unsigned int num);
+  void RemoveTexColorAttachment(unsigned int mode, unsigned int attId)
+    { this->AddTexColorAttachment(mode, attId, 0U); }
+
+  // Description:
+  // Directly assign/remove a renderbuffer to color attachments.
+  void AddColorAttachment(
+        unsigned int mode,
+        unsigned int attId,
+        vtkRenderbuffer* tex);
+
+  void AddRenColorAttachment(
+        unsigned int mode,
+        unsigned int attId,
+        unsigned int handle);
+
+  void RemoveRenColorAttachments(unsigned int mode, unsigned int num);
+  void RemoveRenColorAttachment(unsigned int mode, unsigned int attId)
+    { this->AddRenColorAttachment(mode, attId, 0U); }
+
+  // Description:
+  // Directly assign/remove a texture/renderbuffer to depth attachments.
+  void AddDepthAttachment(unsigned int mode, vtkTextureObject* tex);
+  void AddTexDepthAttachment(unsigned int mode, unsigned int handle);
+  void RemoveTexDepthAttachment(unsigned int mode)
+    { this->AddTexDepthAttachment(mode, 0U); }
+
+  // Description:
+  // Directly assign/remove a renderbuffer to depth attachments.
+  void AddDepthAttachment(unsigned int mode, vtkRenderbuffer* tex);
+  void AddRenDepthAttachment(unsigned int mode, unsigned int handle);
+  void RemoveRenDepthAttachment(unsigned int mode)
+    { this->AddRenDepthAttachment(mode, 0U); }
+
+  // Description:
+  // Select a single specific draw or read buffer (zero based)
+  void ActivateDrawBuffer(unsigned int id);
+  void ActivateReadBuffer(unsigned int id);
+  void DeactivateReadBuffer();
+
+  // Description:
+  // Select n consecutive write attachments.
+  // Low level api.
+  void ActivateDrawBuffers(unsigned int n);
+  void ActivateDrawBuffers(unsigned int *ids, int n);
+  void DeactivateDrawBuffers();
+
+  // Description:
+  // Set up ortho viewport with scissor, lighting, blend, and depth
+  // disabled. The method affects the current bound FBO. The method is
+  // static so that it may be used on the default FBO without an instance.
+  // Low level api.
+  static
+  void InitializeViewport(int width, int height);
+
+  // Description:
+  // Validate the current FBO configuration (attachments, formats, etc)
+  // prints detected errors to vtkErrorMacro.
+  // Low level api.
+  int CheckFrameBufferStatus(unsigned int mode);
+
+  // Description:
+  // Validate the current FBO configuration (attachments, formats, etc)
+  // return false if the FBO is incomplete. Assigns description a literal
+  // containing a description of the status.
+  // Low level api.
+  static
+  bool GetFrameBufferStatus(
+        unsigned int mode,
+        const char *&desc);
+
+  // Description:
+  // Copy from the currently bound READ FBO to the currently
+  // bound DRAW FBO. The method is static so that one doesn't
+  // need to ccreate an instance when transfering between attachments
+  // in the default FBO.
+  static
+  int Blit(
+        int srcExt[4],
+        int destExt[4],
+        unsigned int bits,
+        unsigned int mapping);
+
+  // Description:
+  // Download data from the read color attachment of the currently
+  // bound FBO into the retruned PBO. The PBO must be free'd when
+  // you are finished with it. The number of components in the
+  // PBO is the same as in the name of the specific  download fucntion.
+  // When downloading a single color channel, the channel must be
+  // identified by index, 1->red, 2->green, 3-> blue.
+  vtkPixelBufferObject *DownloadColor1(
+        int extent[4],
+        int vtkType,
+        int channel);
+
+  vtkPixelBufferObject *DownloadColor3(
+        int extent[4],
+        int vtkType);
+
+  vtkPixelBufferObject *DownloadColor4(
+        int extent[4],
+        int vtkType);
+
+  // Description:
+  // Download data from the depth attachment of the currently
+  // bound FBO. The returned PBO must be Delete'd by the caller.
+  // The retruned PBO has one component.
+  vtkPixelBufferObject *DownloadDepth(
+        int extent[4],
+        int vtkType);
+
+  // Description:
+  // Download data from the read buffer of the current FBO. These
+  // are low level meothds. In the static variant a PBO must be
+  // passed in since we don't have access to a context. The static
+  // method is provided so that one may download from the default
+  // FBO.
+  vtkPixelBufferObject *Download(
+        int extent[4],
+        int vtkType,
+        int nComps,
+        int oglType,
+        int oglFormat);
+
+  static
+  void Download(
+        int extent[4],
+        int vtkType,
+        int nComps,
+        int oglType,
+        int oglFormat,
+        vtkPixelBufferObject *pbo);
+
+//BTX
+protected:
+  // Description:
+  // Load all necessary extensions.
+  static
+  bool LoadRequiredExtensions(vtkRenderWindow *renWin);
+
+  // gen buffer (occurs when context is set)
+  void CreateFBO();
+
+  // delete buffer (occurs during destruction or context swicth)
+  void DestroyFBO();
+
+
+  // Description:
+  // Given a vtk type get a compatible open gl type.
+  int GetOpenGLType(int vtkType);
+
+  vtkFrameBufferObject2();
+  ~vtkFrameBufferObject2();
+
+  vtkWeakPointer<vtkRenderWindow> Context;
+
+  unsigned int FBOIndex;
+  unsigned int PreviousDrawFBO;
+  unsigned int PreviousReadFBO;
+  unsigned int DepthBuffer;
+  unsigned int PreviousDrawBuffer;
+  unsigned int PreviousReadBuffer;
+
+private:
+  vtkFrameBufferObject2(const vtkFrameBufferObject2&); // Not implemented.
+  void operator=(const vtkFrameBufferObject2&); // Not implemented.
+
+  friend class vtkRenderbuffer; // needs access to LoadRequiredExtentsions
+//ETX
+};
+
+#endif
diff --git a/Rendering/OpenGL/vtkGLSLShader.cxx b/Rendering/OpenGL/vtkGLSLShader.cxx
deleted file mode 100644
index 11d427c..0000000
--- a/Rendering/OpenGL/vtkGLSLShader.cxx
+++ /dev/null
@@ -1,521 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkGLSLShader.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*
- * Copyright 2003 Sandia Corporation.
- * Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
- * license for use of this work by or on behalf of the
- * U.S. Government. Redistribution and use in source and binary forms, with
- * or without modification, are permitted provided that this Notice and any
- * statement of authorship are reproduced on all copies.
- */
-
-#include "vtkGLSLShader.h"
-
-#include "vtkActor.h"
-#include "vtkCamera.h"
-#include "vtkLight.h"
-#include "vtkLightCollection.h"
-#include "vtkObjectFactory.h"
-#include "vtkOpenGLTexture.h"
-#include "vtkProperty.h"
-#include "vtkRenderer.h"
-#include "vtkXMLDataElement.h"
-#include "vtkXMLShader.h"
-#include "vtkWindow.h"
-
-#include <vtkgl.h>
-//#include <GL/glu.h>
-
-#include <sys/types.h>
-#include <string>
-#include <vector>
-
-
-#if 1
-#define GLSLprintOpenGLError() GLSLprintOglError(__FILE__, __LINE__)
-static int GLSLprintOglError(const char *vtkNotUsed(file),
-                             int vtkNotUsed(line))
-{
-  //Returns 1 if an OpenGL error occurred, 0 otherwise.
-  GLenum glErr;
-  int    retCode = 0;
-
-  glErr = glGetError();
-  while (glErr != GL_NO_ERROR)
-    {
-    //printf("glError in file %s @ line %d: %s\n", file, line, gluErrorString(glErr));
-    cout << "Error!" << endl;
-    retCode = 1;
-    glErr = glGetError();
-    }
-  return retCode;
-}
-#endif
-
-static void printLogInfo(GLuint shader, const char* filename)
-{
-#if 1
-  int isShader = vtkgl::IsShader(shader);
-  if (isShader)
-    {
-    cout << "GLSL Shader." << endl;
-    }
-  else
-    {
-    cout << "Not a GLSL Shader!!!." << endl;
-    return;
-    }
-
-  // Check scope
-  GLint type;
-  vtkgl::GetShaderiv(shader, vtkgl::SHADER_TYPE, &type);
-  // I know.  Technically if OpenGL 2.0 is not supported I should be checking
-  // against VERTEX_SHADER_ARB and FRAGMENT_SHADER_ARB, but the respective
-  // specifications for each has them set to the same value.
-  if( type == static_cast<GLint>(vtkgl::VERTEX_SHADER) )
-    {
-    cout << "GLSL Vertex Shader." << endl;
-    }
-  else if( type == static_cast<GLint>(vtkgl::FRAGMENT_SHADER) )
-    {
-    cout << "GLSL Fragment Shader." << endl;
-    }
-  else
-    {
-    cout << "Not a GLSL Shader???" << endl;
-    }
-
-  GLint compiled = 0;
-  GLsizei maxLength = 0;
-  vtkgl::GetShaderiv(shader, vtkgl::COMPILE_STATUS, &compiled);
-  vtkgl::GetShaderiv(shader, vtkgl::INFO_LOG_LENGTH, &maxLength);
-
-  vtkgl::GLchar* info = new vtkgl::GLchar[maxLength];
-  GLsizei charsWritten = 0;
-
-  vtkgl::GetShaderInfoLog( shader, maxLength, &charsWritten, info );
-
-  cout << "Compiled Status: " << compiled << endl;
-  if( info )
-    {
-    cout << "Log message: " << filename << endl
-         << static_cast<char *>(info) << endl;
-    }
-
-  GLSLprintOpenGLError();
-#endif
-}
-
-
-#if 0
-static void printAttributeInfo(GLuint program,
-                               const char* vtkNotUsed(filename))
-{
-  // print all uniform attributes
-  GLint numAttrs;
-  vtkgl::GetProgramiv( program, vtkgl::ACTIVE_UNIFORMS, &numAttrs);
-  if( numAttrs == GL_INVALID_VALUE )
-    {
-    cout << "GL_INVALID_VALUE for number of attributes." << endl;
-    }
-  else if( numAttrs == GL_INVALID_OPERATION )
-    {
-    cout << "GL_INVALID_OPERATION for number of attributes." << endl;
-    }
-  else if( numAttrs == GL_INVALID_ENUM )
-    {
-    cout << "GL_INVALID_ENUM for number of attributes." << endl;
-    }
-  else if( numAttrs == GL_INVALID_OPERATION )
-    {
-    cout << "GL_INVALID_OPERATION for number of attributes." << endl;
-    }
-  else
-    {
-    cout << numAttrs << " Uniform parameters:" << endl;
-    }
-
-  GLint maxLength;
-  vtkgl::GetProgramiv( program, vtkgl::ACTIVE_UNIFORM_MAX_LENGTH, &maxLength);
-  GLint id;
-  for( id=0; id<numAttrs; id++ )
-    {
-    vtkgl::GLchar *name = new vtkgl::GLchar[maxLength];
-    GLint length;
-    GLint size;
-    GLenum type;
-    vtkgl::GetActiveUniform( program, id, maxLength, &length, &size, &type,
-                             name);
-    if( name )
-      {
-      cout << "\t" << (char *)name << endl;
-      }
-    delete[] name;
-    }
-  cout << endl;
-}
-#endif
-
-//-----------------------------------------------------------------------------
-vtkStandardNewMacro(vtkGLSLShader);
-
-//-----------------------------------------------------------------------------
-vtkGLSLShader::vtkGLSLShader()
-{
-  this->Shader = 0;
-  this->Program = 0;
-}
-
-//-----------------------------------------------------------------------------
-vtkGLSLShader::~vtkGLSLShader()
-{
-  this->ReleaseGraphicsResources(0);
-}
-
-//-----------------------------------------------------------------------------
-void vtkGLSLShader::ReleaseGraphicsResources(vtkWindow *win)
-{
-  if (win && win->GetMapped() && this->IsShader())
-    {
-    vtkgl::DeleteShader(this->Shader);
-    this->Shader = 0;
-    }
-}
-
-//-----------------------------------------------------------------------------
-int vtkGLSLShader::IsCompiled()
-{
-  GLint value = 0;
-  if( this->IsShader() )
-    {
-    vtkgl::GetShaderiv( static_cast<GLuint>(this->Shader),
-                        vtkgl::COMPILE_STATUS,
-                        &value );
-    }
-  return value==1;
-}
-
-//-----------------------------------------------------------------------------
-int vtkGLSLShader::IsShader()
-{
-  return this->Shader &&
-    vtkgl::IsShader( static_cast<GLuint>(this->Shader) ) == GL_TRUE;
-}
-
-//-----------------------------------------------------------------------------
-void vtkGLSLShader::LoadShader()
-{
-  // if we have a shader, don't create a new one
-  if( !this->IsShader() )
-    {
-    // create an empty shader object
-    switch (this->XMLShader->GetScope())
-      {
-      case vtkXMLShader::SCOPE_VERTEX:
-        this->Shader = vtkgl::CreateShader( vtkgl::VERTEX_SHADER );
-        break;
-      case vtkXMLShader::SCOPE_FRAGMENT:
-        this->Shader = vtkgl::CreateShader( vtkgl::FRAGMENT_SHADER );
-        break;
-      }
-    }
-}
-
-//-----------------------------------------------------------------------------
-int vtkGLSLShader::Compile()
-{
-  if (this->IsCompiled())
-    {
-    return 1;
-    }
-
-  // Later, an XMLShader may not be necessary if the source is set by the
-  // application.
-  // For now, we need an XMLShader
-  if (!this->XMLShader)
-    {
-    return 0;
-    }
-
-  if (!this->XMLShader->GetCode())
-    {
-    vtkErrorMacro("Shader doesn't have any code!");
-    return 0;
-
-    }
-
-  if (this->IsCompiled())
-    {
-    return 1;
-    }
-
-  // create a shader context if needed.
-  this->LoadShader();
-
-  if( !this->IsShader() )
-    {
-    vtkErrorMacro( "Shader not loaded!!!" << endl );
-    if( this->Shader && this->XMLShader->GetName() )
-      {
-      printLogInfo(static_cast<GLuint>(this->Shader),
-                   this->XMLShader->GetName());
-      }
-    return 0;
-    }
-
-  // if we have the source available, try to load it
-  // Load the shader as a single string seems to work best
-  const vtkgl::GLchar* source =
-    static_cast<const vtkgl::GLchar*>(this->XMLShader->GetCode());
-
-  // Since the entire shader is sent to GL as a single string, the number of
-  // lines (second argument) is '1'.
-  vtkgl::ShaderSource( static_cast<GLuint>(this->Shader), 1, &source, NULL );
-
-  // make sure the source has been loaded
-  // print an error log if the shader is not compiled
-  vtkgl::CompileShader(static_cast<GLuint>(this->Shader));
-
-  if( !this->IsCompiled() )
-    {
-    vtkErrorMacro( "Shader not compiled!!!" << endl );
-    if( this->Shader && this->XMLShader->GetName() )
-      {
-      printLogInfo(static_cast<GLuint>(this->Shader),
-                   this->XMLShader->GetName());
-      }
-    return 0;
-    }
-  return 1;
-}
-
-//-----------------------------------------------------------------------------
-void vtkGLSLShader::SetUniformParameter(const char* name,
-                                        int numValues,
-                                        const int* values)
-{
-  if( !this->IsShader() )
-    {
-    return;
-    }
-  while (glGetError() != GL_NO_ERROR)
-    {
-    vtkErrorMacro(<< "Found unchecked OpenGL error.");
-    }
-  GLint loc = static_cast<GLint>(this->GetUniformLocation(name));
-  if (loc == -1)
-    {
-    return;
-    }
-  const GLint *v = reinterpret_cast<const GLint *>(values);
-  switch(numValues)
-    {
-    case 1:
-      vtkgl::Uniform1iv(loc, 1, v);
-      break;
-    case 2:
-      vtkgl::Uniform2iv(loc, 1, v);
-      break;
-    case 3:
-      vtkgl::Uniform3iv(loc, 1, v);
-      break;
-    case 4:
-      vtkgl::Uniform4iv(loc, 1, v);
-      break;
-    default:
-      vtkErrorMacro("Number of values not supported: " << numValues);
-    }
-  while (glGetError() != GL_NO_ERROR)
-    {
-    vtkErrorMacro(<< "OpenGL error when setting uniform variable int["
-                  << numValues << "] " << name << ".\n"
-                  << "Perhaps there is a type mismatch.");
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkGLSLShader::SetUniformParameter(const char* name,
-                                        int numValues,
-                                        const float* values)
-{
-  if( !this->IsShader() )
-    {
-    return;
-    }
-  while (glGetError() != GL_NO_ERROR)
-    {
-    vtkErrorMacro(<< "Found unchecked OpenGL error.");
-    }
-  GLint loc = static_cast<GLint>(this->GetUniformLocation(name));
-  if (loc == -1)
-    {
-    return;
-    }
-  switch(numValues)
-    {
-    case 1:
-      vtkgl::Uniform1fv(loc, 1, values);
-      break;
-    case 2:
-      vtkgl::Uniform2fv(loc, 1, values);
-      break;
-    case 3:
-      vtkgl::Uniform3fv(loc, 1, values);
-      break;
-    case 4:
-      vtkgl::Uniform4fv(loc, 1, values);
-      break;
-    default:
-      vtkErrorMacro("Number of values not supported: " << numValues);
-    }
-  while (glGetError() != GL_NO_ERROR)
-    {
-    vtkErrorMacro(<< "OpenGL error when setting uniform variable int["
-                  << numValues << "] " << name << ".\n"
-                  << "Perhaps there is a type mismatch.");
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkGLSLShader::SetUniformParameter(const char* name,
-                                        int numValues,
-                                        const double* values)
-{
-  if( !this->IsShader() )
-    {
-    return;
-    }
-  float* fvalues = new float [numValues];
-
-  for (int i=0; i<numValues; i++)
-    {
-    fvalues[i] = static_cast<float>(values[i]);
-    }
-  this->SetUniformParameter(name, numValues, fvalues);
-  delete []fvalues;
-}
-
-//-----------------------------------------------------------------------------
-void vtkGLSLShader:: SetMatrixParameter(const char* name,
-                                        int numValues,
-                                        int order, const float* value)
-{
-  if( !this->IsShader() )
-    {
-    return;
-    }
-  int transpose = order == vtkShader::RowMajor;
-
-  GLint loc = static_cast<GLint>(this->GetUniformLocation(name));
-  if (loc == -1)
-    {
-    return;
-    }
-
-  switch (numValues)
-    {
-    case 2*2:
-      vtkgl::UniformMatrix2fv(loc, 1, transpose, value);
-      break;
-    case 3*3:
-      vtkgl::UniformMatrix3fv(loc, 1, transpose, value);
-      break;
-    case 4*4:
-      vtkgl::UniformMatrix4fv(loc, 1, transpose, value);
-      break;
-    default:
-      vtkErrorMacro("Number of values not supported: " << numValues);
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkGLSLShader:: SetMatrixParameter(const char* name,
-                                        int numValues,
-                                        int order,
-                                        const double* value)
-{
-  if( !this->IsShader() )
-    {
-    return;
-    }
-  float *v = new float[numValues];
-  for (int i=0; i < numValues; i++)
-    {
-    v[i] = static_cast<float>(value[i]);
-    }
-  this->SetMatrixParameter(name, numValues, order, v);
-  delete []v;
-}
-
-
-//-----------------------------------------------------------------------------
-void vtkGLSLShader::SetMatrixParameter(const char*,
-                                       const char*,
-                                       const char*)
-{
-  if( !this->IsShader() )
-    {
-    return;
-    }
-  vtkErrorMacro("GLSL does not support any system matrices!");
-}
-
-//-----------------------------------------------------------------------------
-void vtkGLSLShader::SetSamplerParameter(const char* name,
-                                        vtkTexture* ,
-                                        int textureIndex)
-{
-  if( !this->IsShader() )
-    {
-    return;
-    }
-  this->SetUniformParameter(name, 1, &textureIndex);
-}
-
-//-----------------------------------------------------------------------------
-int vtkGLSLShader::GetUniformLocation(const char* name)
-{
-  if( !this->IsShader() )
-    {
-    return -1;
-    }
-  if( !name )
-    {
-    vtkErrorMacro( "NULL uniform shader parameter name.");
-    return -1;
-    }
-
-  if( vtkgl::IsProgram(this->GetProgram())!=GL_TRUE)
-    {
-    vtkErrorMacro( "NULL shader program.");
-    return -1;
-    }
-
-  int location;
-  location = vtkgl::GetUniformLocation( this->GetProgram(), name );
-  if( location == -1 )
-    {
-    vtkErrorMacro( "No such shader parameter. " << name );
-    }
-  return location;
-}
-
-//-----------------------------------------------------------------------------
-void vtkGLSLShader::PrintSelf(ostream& os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os, indent);
-
-  os << indent << "Program: " << this->Program << endl;
-}
diff --git a/Rendering/OpenGL/vtkGLSLShader.h b/Rendering/OpenGL/vtkGLSLShader.h
deleted file mode 100644
index 722d122..0000000
--- a/Rendering/OpenGL/vtkGLSLShader.h
+++ /dev/null
@@ -1,151 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkGLSLShader.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*
- * Copyright 2003 Sandia Corporation.
- * Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
- * license for use of this work by or on behalf of the
- * U.S. Government. Redistribution and use in source and binary forms, with
- * or without modification, are permitted provided that this Notice and any
- * statement of authorship are reproduced on all copies.
- */
-
-// .NAME vtkGLSLShader - GLSL Shader
-// .SECTION Description
-// vtkGLSLShader is a concrete class that creates and compiles hardware
-// shaders written in the OpenGL Shadering Language (GLSL, OpenGL2.0).
-// While step linking a vertex and a fragment shader is performed by
-// vtkGLSLShaderProgram, all shader parameters are initialized in this
-// class.
-//
-// .Section vtkOpenGLExtensionManager
-// All OpenGL calls are made through vtkOpenGLExtensionManager.
-//
-// .Section Supported Basic Shader Types:
-//
-// Scalar Types
-// uniform float
-// uniform int
-// uniform int -- boolean scalar not yet tested
-//
-// Vector Types:
-// uniform vec{2|3|4}
-// uniform ivec{2|3|4}
-// uniform bvec{2|3|4} -- boolean vector not yet tested
-//
-// Matrix Types:
-// uniform mat{2|3|4}
-//
-// Texture Samplers:
-// sample1D -- Not yet implemented in this cless.
-// sample2D -- Not yet implemented in this class.
-// sample3D -- Not yet implemented in this class.
-// sampler1DShadow -- Not yet implemented in this class.
-// sampler1DShadow -- Not yet implemented in this class.
-//
-// User-Defined structures:
-// uniform struct
-//  NOTE: these must be defined and declared  outside of the 'main' shader
-//  function.
-//
-//
-// .SECTION Thanks
-// Shader support in VTK includes key contributions by Gary Templet at
-// Sandia National Labs.
-
-#ifndef __vtkGLSLShader_h
-#define __vtkGLSLShader_h
-
-#include "vtkRenderingOpenGLModule.h" // For export macro
-#include "vtkShader.h"
-
-class vtkActor;
-class vtkRenderer;
-class vtkProperty;
-class vtkLight;
-class vtkCamera;
-class vtkRenderWindow;
-
-// Manages all shaders defined in the XML file
-// especially the part about sending things to the card
-class VTKRENDERINGOPENGL_EXPORT vtkGLSLShader : public vtkShader
-{
-public:
-  static vtkGLSLShader *New();
-  vtkTypeMacro(vtkGLSLShader, vtkShader);
-  void PrintSelf(ostream &os, vtkIndent indent);
-
-  // Description:
-  // Called to compile the shader code.
-  // The subclasses must only compile the code in this method.
-  // Returns if the compile was successful.
-  // Subclasses should compile the code only if it was not
-  // already compiled.
-  virtual int Compile();
-
-  // Description:
-  // The vtkGLSLShaderProgram needs the shader handle for attaching.
-  unsigned int GetHandle() { return this->Shader; }
-
-  // Description:
-  // The Shader needs the id of the ShaderProgram
-  // to obtain uniform variable locations. This is set
-  // by vtkGLSLShaderProgram.
-  vtkSetMacro( Program, unsigned int );
-  vtkGetMacro( Program, unsigned int );
-
-  // Description:
-  // Release any graphics resources that are being consumed by this actor.
-  // The parameter window could be used to determine which graphic
-  // resources to release.
-  virtual void ReleaseGraphicsResources(vtkWindow *);
-protected:
-  vtkGLSLShader();
-  virtual ~vtkGLSLShader();
-
-  // These are GLuints.
-  unsigned int Program;
-  unsigned int Shader;
-
-  int IsShader();
-  int IsCompiled();
-
-  // Description:
-  // Create an empty Shader context.
-  void LoadShader();
-
-  // Description:
-  // Equivalent to cgGLSetParameter and glUniform.
-  virtual void SetUniformParameter(const char* name, int numValues, const int* value);
-  virtual void SetUniformParameter(const char* name, int numValues, const float* value);
-  virtual void SetUniformParameter(const char* name, int numValues, const double* value);
-
-  // Description:
-  // Equivalent to cgGLSetMatrixParameterfc and glUniformMatrix.
-  virtual void SetMatrixParameter(const char* name, int numValues,
-    int order, const float* value);
-  virtual void SetMatrixParameter(const char* name, int numValues,
-    int order, const double* value);
-  virtual void SetMatrixParameter(const char* name, const char* state_matix_type,
-    const char* transform_type);
-
-  virtual void SetSamplerParameter(const char* name, vtkTexture* texture,
-                                   int textureIndex);
-private:
-  vtkGLSLShader(const vtkGLSLShader&); // Not Implemented
-  void operator=(const vtkGLSLShader&); // Not Implemented
-
-  int GetUniformLocation( const char* name );
-};
-#endif //__vtkGLSLShader_h
diff --git a/Rendering/OpenGL/vtkGLSLShaderDeviceAdapter.cxx b/Rendering/OpenGL/vtkGLSLShaderDeviceAdapter.cxx
deleted file mode 100644
index 17f9335..0000000
--- a/Rendering/OpenGL/vtkGLSLShaderDeviceAdapter.cxx
+++ /dev/null
@@ -1,294 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkGLSLShaderDeviceAdapter.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-
-#include "vtkGLSLShaderDeviceAdapter.h"
-
-#include "vtkCollection.h"
-#include "vtkDataObject.h"
-#include "vtkDataSetAttributes.h"
-#include "vtkGLSLShaderProgram.h"
-#include "vtkObjectFactory.h"
-
-#include <map>
-#include <string>
-
-#include "vtkgl.h"
-
-class vtkGLSLShaderDeviceAdapter::vtkInternal
-{
-public:
-  typedef std::map<std::string, int> MapOfStringToInt;
-  MapOfStringToInt AttributeIndicesCache;
-};
-
-vtkStandardNewMacro(vtkGLSLShaderDeviceAdapter);
-
-#define GLSL_SHADER_DEVICE_ADAPTER(msg) \
-  /* cout << __LINE__ << " vtkGLSLShaderDeviceAdapter " << msg << endl; */
-
-//---------------------------------------------------------------------------
-vtkGLSLShaderDeviceAdapter::vtkGLSLShaderDeviceAdapter()
-{
-  GLSL_SHADER_DEVICE_ADAPTER("constructor");
-  this->Internal = new vtkInternal();
-}
-
-//---------------------------------------------------------------------------
-vtkGLSLShaderDeviceAdapter::~vtkGLSLShaderDeviceAdapter()
-{
-  GLSL_SHADER_DEVICE_ADAPTER("destructor");
-  delete this->Internal;
-}
-
-//---------------------------------------------------------------------------
-static inline GLenum VTK2SignedOpenGLType(int type)
-{
-  switch (type)
-    {
-#if VTK_SIZEOF_CHAR == 1
-    case VTK_CHAR:              return GL_BYTE;
-    case VTK_UNSIGNED_CHAR:     return GL_BYTE;
-#elif VTK_SIZE_OF_CHAR == 2
-    case VTK_CHAR:              return GL_SHORT;
-    case VTK_UNSIGNED CHAR:     return GL_SHORT;
-#endif
-
-#if VTK_SIZEOF_SHORT == 1
-    case VTK_SHORT:             return GL_BYTE;
-    case VTK_UNSIGNED_SHORT:    return GL_BYTE;
-#elif VTK_SIZEOF_SHORT == 2
-    case VTK_SHORT:             return GL_SHORT;
-    case VTK_UNSIGNED_SHORT:    return GL_SHORT;
-#elif VTK_SIZEOF_SHORT == 4
-    case VTK_SHORT:             return GL_INT;
-    case VTK_UNSIGNED_SHORT:    return GL_INT;
-#endif
-
-#if VTK_SIZEOF_INT == 2
-    case VTK_INT:               return GL_SHORT;
-    case VTK_UNSIGNED_INT:      return GL_SHORT;
-#elif VTK_SIZEOF_INT == 4
-    case VTK_INT:               return GL_INT;
-    case VTK_UNSIGNED_INT:      return GL_INT;
-#endif
-
-#if VTK_SIZEOF_ID_TYPE == 4
-    case VTK_ID_TYPE:           return GL_INT;
-#endif
-
-#if VTK_SIZEOF_LONG == 4
-    case VTK_LONG:              return GL_INT;
-    case VTK_UNSIGNED_LONG:     return GL_INT;
-#endif
-
-#if VTK_SIZEOF_FLOAT == 4
-    case VTK_FLOAT:             return GL_FLOAT;
-#elif VTK_SIZEOF_FLOAT == 8
-    case VTK_FLOAT:             return GL_DOUBLE;
-#endif
-
-#if VTK_SIZEOF_DOUBLE == 4
-    case VTK_DOUBLE:            return GL_FLOAT;
-#elif VTK_SIZEOF_DOUBLE == 8
-    case VTK_DOUBLE:            return GL_DOUBLE;
-#endif
-
-    default:                    return GL_FALSE;
-    }
-}
-
-//---------------------------------------------------------------------------
-int vtkGLSLShaderDeviceAdapter::GetAttributeLocation(const char *attributeName)
-{
-  vtkGLSLShaderProgram* glslProgram =
-    vtkGLSLShaderProgram::SafeDownCast(this->ShaderProgram);
-  if (glslProgram && glslProgram->GetProgram())
-    {
-    GLSL_SHADER_DEVICE_ADAPTER(
-      "GetAttributeLocation Program " << glslProgram->GetProgram());
-    return vtkgl::GetAttribLocation(glslProgram->GetProgram(), attributeName);
-    }
-  return -1;
-}
-
-//---------------------------------------------------------------------------
-void vtkGLSLShaderDeviceAdapter::PrepareForRender()
-{
-  this->Internal->AttributeIndicesCache.clear();
-}
-
-//---------------------------------------------------------------------------
-void vtkGLSLShaderDeviceAdapter::SendAttribute(const char *attrname,
-                                               int components,
-                                               int type,
-                                               const void *attribute,
-                                               unsigned long offset)
-{
-  int index;
-  vtkInternal::MapOfStringToInt::iterator iter =
-    this->Internal->AttributeIndicesCache.find(attrname);
-  if (iter == this->Internal->AttributeIndicesCache.end())
-    {
-    index = this->GetAttributeLocation(attrname);
-    if (index < 0)
-      {
-      // failed.
-      return;
-      }
-    this->Internal->AttributeIndicesCache[attrname]=index;
-    }
-  else
-    {
-    index = iter->second;
-    }
-
-  if (!attribute)
-    {
-    return;
-    }
-
-  if (components <=0 || components > 4)
-    {
-    vtkErrorMacro(<< components<< " components not supported.");
-    return;
-    }
-
-  if (index >= 0)
-    {
-    switch (VTK2SignedOpenGLType(type))
-      {
-    case GL_SHORT:
-      switch (components)
-        {
-        case 1:
-          vtkgl::VertexAttrib1sv(index, static_cast<const GLshort *>(attribute)
-                                 + offset);
-          break;
-        case 2:
-          vtkgl::VertexAttrib2sv(index, static_cast<const GLshort *>(attribute)
-                                 + offset);
-          break;
-        case 3:
-          vtkgl::VertexAttrib3sv(index, static_cast<const GLshort *>(attribute)
-                                 + offset);
-          break;
-        case 4:
-          vtkgl::VertexAttrib4sv(index, static_cast<const GLshort *>(attribute)
-                                 + offset);
-          break;
-        }
-      break;
-      case GL_FLOAT:
-        switch(components)
-          {
-          case 1:
-            GLSL_SHADER_DEVICE_ADAPTER( "SENDING " << components << " ATTRIBUTES "
-                                        << static_cast<const float*>(attribute)[offset] );
-            break;
-          case 2:
-            GLSL_SHADER_DEVICE_ADAPTER( "SENDING " << components << " ATTRIBUTES "
-                                        << static_cast<const float*>(attribute)[offset] << " "
-                                        << static_cast<const float*>(attribute)[offset+1] );
-            break;
-          case 3:
-            GLSL_SHADER_DEVICE_ADAPTER( "SENDING " << components << " ATTRIBUTES "
-                                        << static_cast<const float*>(attribute)[offset] << " "
-                                        << static_cast<const float*>(attribute)[offset+1] << " "
-                                        << static_cast<const float*>(attribute)[offset+2] );
-            break;
-          case 4:
-            GLSL_SHADER_DEVICE_ADAPTER( "SENDING " << components << " ATTRIBUTES "
-                                        << static_cast<const float*>(attribute)[offset] << " "
-                                        << static_cast<const float*>(attribute)[offset+1] << " "
-                                        << static_cast<const float*>(attribute)[offset+2] << " "
-                                        << static_cast<const float*>(attribute)[offset+3] );
-            break;
-          default:
-            GLSL_SHADER_DEVICE_ADAPTER( "SENDING " << components << " ATTRIBUTES "
-                                        << static_cast<const float*>(attribute)[offset] << " UNSUPPORTED NUMBER OF COMPONENTS");
-          }
-        switch (components)
-          {
-          case 1:
-            vtkgl::VertexAttrib1fv(index,
-                                   static_cast<const GLfloat *>(attribute)
-                                   + offset);
-            break;
-          case 2:
-            vtkgl::VertexAttrib2fv(index,
-                                   static_cast<const GLfloat *>(attribute)
-                                   + offset);
-            break;
-          case 3:
-            vtkgl::VertexAttrib3fv(index,
-                                   static_cast<const GLfloat *>(attribute)
-                                   + offset);
-            break;
-          case 4:
-            vtkgl::VertexAttrib4fv(index,
-                                   static_cast<const GLfloat *>(attribute)
-                                   + offset);
-            break;
-          }
-        break;
-      case GL_DOUBLE:
-        if(components == 3)
-          {
-          GLSL_SHADER_DEVICE_ADAPTER("SendingAttribute index " << index << " ["
-                                     << static_cast<const GLdouble *>(attribute)[offset] << " "
-                                     << static_cast<const GLdouble *>(attribute)[offset+1] << " "
-                                     << static_cast<const GLdouble *>(attribute)[offset+2] << "]");
-          }
-        switch (components)
-          {
-          case 1:
-            vtkgl::VertexAttrib1dv(index,
-                                   static_cast<const GLdouble *>(attribute)
-                                   + offset);
-            break;
-          case 2:
-            vtkgl::VertexAttrib2dv(index,
-                                   static_cast<const GLdouble *>(attribute)
-                                   + offset);
-            break;
-          case 3:
-            vtkgl::VertexAttrib3dv(index,
-                                   static_cast<const GLdouble *>(attribute)
-                                   + offset);
-            break;
-          case 4:
-            vtkgl::VertexAttrib4dv(index,
-                                   static_cast<const GLdouble *>(attribute)
-                                   + offset);
-            break;
-          }
-        break;
-      default:
-        vtkErrorMacro("Unsupported type for vertex attribute: " << type);
-        return;
-      }
-    }
-  else
-    {
-    vtkErrorMacro("Unsupported attribute index: " << index);
-    }
-  return;
-};
-
-//---------------------------------------------------------------------------
-void vtkGLSLShaderDeviceAdapter::PrintSelf(ostream &os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os, indent);
-}
diff --git a/Rendering/OpenGL/vtkGLSLShaderDeviceAdapter.h b/Rendering/OpenGL/vtkGLSLShaderDeviceAdapter.h
deleted file mode 100644
index 8bcfe9b..0000000
--- a/Rendering/OpenGL/vtkGLSLShaderDeviceAdapter.h
+++ /dev/null
@@ -1,83 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkGLSLShaderDeviceAdapter.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-// .NAME vtkGLSLShaderDeviceAdapter - adapter to pass generic vertex
-// attributes to the rendering pipeline to be used in a GLSL shader.
-// .SECTION Description
-// vtkShaderDeviceAdapter subclass for GLSL.
-// .SECTION Thanks
-// Support for generic vertex attributes in VTK was contributed in
-// collaboration with Stephane Ploix at EDF.
-
-#ifndef __vtkGLSLShaderDeviceAdapter_h
-#define __vtkGLSLShaderDeviceAdapter_h
-
-#include "vtkRenderingOpenGLModule.h" // For export macro
-#include "vtkShaderDeviceAdapter.h"
-
-class VTKRENDERINGOPENGL_EXPORT vtkGLSLShaderDeviceAdapter :
-  public vtkShaderDeviceAdapter
-{
-public:
-  vtkTypeMacro(vtkGLSLShaderDeviceAdapter, vtkShaderDeviceAdapter);
-  static vtkGLSLShaderDeviceAdapter *New();
-  virtual void PrintSelf(ostream &os, vtkIndent indent);
-
-  // Descrition:
-  // This method is called before rendering. This gives the shader device
-  // adapter an opportunity to collect information, such as attribute indices
-  // that it will need while rendering.
-  virtual void PrepareForRender();
-
-  // Description:
-  // Sends a single attribute to the graphics card.
-  // The attrname parameter identifies the name of attribute.
-  // The components parameter gives the number of
-  // components in the attribute.  In general, components must be between
-  // 1-4, but a rendering system may impose even more constraints.  The
-  // type parameter is a VTK type enumeration (VTK_FLOAT, VTK_INT, etc.).
-  // Again, a rendering system may not support all types for all
-  // attributes.  The attribute parameter is the actual data for the
-  // attribute.
-  // If offset is specified, it is added to attribute pointer \c after
-  // it has been casted to the proper type.
-  // If attribute is NULL, the OpenGL ID for the attribute will simply be
-  // cached.
-  virtual void SendAttribute(const char* attrname,
-                             int components,
-                             int type,
-                             const void *attribute,
-                             unsigned long offset=0);
-
-//BTX
-protected:
-  vtkGLSLShaderDeviceAdapter();
-  ~vtkGLSLShaderDeviceAdapter();
-
-  // Description:
-  int GetAttributeLocation(const char* attrName);
-
-  friend class vtkGLSLShaderProgram;
-
-private:
-  vtkGLSLShaderDeviceAdapter(const vtkGLSLShaderDeviceAdapter&);
-  // Not implemented
-  void operator=(const vtkGLSLShaderDeviceAdapter&); // Not implemented
-
-  class vtkInternal;
-  vtkInternal* Internal;
-//ETX
-};
-
-#endif
diff --git a/Rendering/OpenGL/vtkGLSLShaderDeviceAdapter2.cxx b/Rendering/OpenGL/vtkGLSLShaderDeviceAdapter2.cxx
index d81b437..3df41d8 100644
--- a/Rendering/OpenGL/vtkGLSLShaderDeviceAdapter2.cxx
+++ b/Rendering/OpenGL/vtkGLSLShaderDeviceAdapter2.cxx
@@ -20,6 +20,7 @@
 #include "vtkDataSetAttributes.h"
 #include "vtkObjectFactory.h"
 #include "vtkShaderProgram2.h"
+#include "vtkOpenGLError.h"
 
 #include <map>
 #include <string>
@@ -128,6 +129,8 @@ void vtkGLSLShaderDeviceAdapter2::SendAttribute(const char *attrname,
                                                const void *attribute,
                                                unsigned long offset)
 {
+  vtkOpenGLClearErrorMacro();
+
   int index;
   vtkInternal::MapOfStringToInt::iterator iter =
     this->Internal->AttributeIndicesCache.find(attrname);
@@ -276,6 +279,8 @@ void vtkGLSLShaderDeviceAdapter2::SendAttribute(const char *attrname,
     {
     vtkErrorMacro("Unsupported attribute index: " << index);
     }
+
+  vtkOpenGLCheckErrorMacro("failed after SendAttribute");
   return;
 };
 
diff --git a/Rendering/OpenGL/vtkGLSLShaderProgram.cxx b/Rendering/OpenGL/vtkGLSLShaderProgram.cxx
deleted file mode 100644
index 4fa134c..0000000
--- a/Rendering/OpenGL/vtkGLSLShaderProgram.cxx
+++ /dev/null
@@ -1,403 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkGLSLShaderProgram.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*
- * Copyright 2003 Sandia Corporation.
- * Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
- * license for use of this work by or on behalf of the
- * U.S. Government. Redistribution and use in source and binary forms, with
- * or without modification, are permitted provided that this Notice and any
- * statement of authorship are reproduced on all copies.
- */
-
-
-#include "vtkGLSLShaderProgram.h"
-
-#include "vtkActor.h"
-#include "vtkCollectionIterator.h"
-#include "vtkGLSLShaderDeviceAdapter.h"
-#include "vtkGLSLShader.h"
-#include "vtkObjectFactory.h"
-#include "vtkOpenGLExtensionManager.h"
-#include "vtkProperty.h"
-#include "vtkRenderer.h"
-#include "vtkTexture.h"
-#include "vtkXMLDataElement.h"
-#include "vtkWindow.h"
-
-#include <vector>
-#include <string>
-// GL/glu.h is needed for the error reporting, this should be removed
-// after the initial development phase.
-//#include <GL/glu.h>
-#include <vtkgl.h>
-
-#if 0
-#define printOpenGLError() printOglError(__FILE__, __LINE__)
-static int printOglError(char *vtkNotUsed(file), int vtkNotUsed(line))
-  {
-  // Returns 1 if an OpenGL error occurred, 0 otherwise.
-  //
-  GLenum glErr;
-  int    retCode = 0;
-
-  glErr = glGetError();
-  while (glErr != GL_NO_ERROR)
-    {
-    //printf("glError in file %s @ line %d: %s\n", file, line, gluErrorString(glErr));
-    cout << "Error in GLSLShaderProgram" << endl;
-    retCode = 1;
-    glErr = glGetError();
-    }
-  return retCode;
-  }
-#endif
-
-//-----------------------------------------------------------------------------
-vtkStandardNewMacro(vtkGLSLShaderProgram);
-
-//-----------------------------------------------------------------------------
-vtkGLSLShaderProgram::vtkGLSLShaderProgram()
-  : Program(0),
-    Info(NULL)
-{
-  vtkGLSLShaderDeviceAdapter* adapter = vtkGLSLShaderDeviceAdapter::New();
-  this->SetShaderDeviceAdapter(adapter);
-  adapter->Delete();
-}
-
-//-----------------------------------------------------------------------------
-vtkGLSLShaderProgram::~vtkGLSLShaderProgram()
-{
-  this->SetShaderDeviceAdapter(0);
-}
-
-
-//-----------------------------------------------------------------------------
-vtkShader* vtkGLSLShaderProgram::NewShader()
-{
-  return vtkGLSLShader::New();
-}
-
-//-----------------------------------------------------------------------------
-void vtkGLSLShaderProgram::ReleaseGraphicsResources(vtkWindow* w)
-{
-  if (w && w->GetMapped() && this->IsProgram())
-    {
-    vtkgl::DeleteProgram(this->Program);
-    }
-  this->Program = 0;
-  this->Superclass::ReleaseGraphicsResources(w);
-}
-
-//-----------------------------------------------------------------------------
-void vtkGLSLShaderProgram::Link()
-{
-}
-
-//-----------------------------------------------------------------------------
-int vtkGLSLShaderProgram::IsProgram()
-{
-  return this->Program &&
-    vtkgl::IsProgram( static_cast<GLuint>(this->Program) ) == GL_TRUE;
-}
-
-//-----------------------------------------------------------------------------
-int vtkGLSLShaderProgram::IsLinked()
-{
-  if(!this->IsProgram())
-    {
-    return false;
-    }
-  GLint value = 0;
-  vtkgl::GetProgramiv(static_cast<GLuint>(this->Program),
-                      vtkgl::LINK_STATUS, &value);
-  return value==1;
-}
-
-//-----------------------------------------------------------------------------
-void vtkGLSLShaderProgram::GetProgramInfo()
-{
-  if (!this->Program)
-    {
-    return;
-    }
-
-  std::string infoString;
-  if(this->IsProgram())
-    {
-    infoString += "GLSL Program. \n";
-    }
-  else
-    {
-    this->SetInfo("Not a GLSL Program. \n");
-    return;
-    }
-
-  // is this Program linked?
-  infoString += "Linked Status: ";
-  char linkedStr[256];
-  sprintf( linkedStr, "%d", this->IsLinked() );
-  infoString += linkedStr;
-  infoString += "\n";
-
-  // how many objects are attached?
-  GLint numObjects = 0;
-  vtkgl::GetProgramiv(static_cast<GLuint>(this->Program),
-                      vtkgl::ATTACHED_SHADERS, &numObjects);
-
-  char numStr[256];
-  sprintf( numStr, "%d", static_cast<int>(numObjects) );
-  infoString += "Number of attached objects: ";
-  infoString += numStr;
-  infoString += "\n";
-
-
-  // Anything in the info log?
-  GLint maxLength = 0;
-  vtkgl::GetProgramiv(static_cast<GLuint>(this->Program),
-                      vtkgl::INFO_LOG_LENGTH, &maxLength);
-
-  vtkgl::GLchar* info = new vtkgl::GLchar[maxLength];
-
-  GLsizei charsWritten;
-  vtkgl::GetProgramInfoLog( static_cast<GLuint>(this->Program), maxLength,
-                            &charsWritten, info );
-  if( info )
-    {
-    infoString += static_cast<char*>(info);
-    infoString += "\n";
-    }
-
-  if( infoString.empty() )
-    {
-    this->SetInfo( "No Program Info." );
-    }
-  else
-    {
-    this->SetInfo( infoString.c_str() );
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkGLSLShaderProgram::GetInfoLog()
-{
-  // Anything in the info log?
-  int infologLength = 0;
-  int charsWritten  = 0;
-  vtkgl::GLchar *infoLog = NULL;
-
-  vtkgl::GetProgramiv(static_cast<GLuint>(this->Program),
-                      vtkgl::INFO_LOG_LENGTH,
-                      reinterpret_cast<GLint*>(&infologLength));
-
-  if(infologLength > 0)
-    {
-    infoLog = new vtkgl::GLchar[infologLength];
-    if(infoLog == NULL)
-      {
-      printf("ERROR: Could not allocate InfoLog buffer\n");
-      return;
-      }
-    vtkgl::GetProgramInfoLog(static_cast<GLuint>(this->Program),
-                             infologLength,
-                             reinterpret_cast<GLsizei*>(&charsWritten),
-                             infoLog);
-    this->SetInfo( infoLog );
-    delete [] infoLog;
-    }
-  else
-    {
-    this->SetInfo( "No Log Info." );
-    }
-}
-
-//-----------------------------------------------------------------------------
-int vtkGLSLShaderProgram::IsAttached(vtkGLSLShader* glslshader)
-{
-  unsigned int handle = glslshader->GetHandle();
-  int attached = 0;
-  // find out what's attached
-  GLint numObjects = 0;
-  GLint writtenObjects = 0;
-  vtkgl::GetProgramiv(static_cast<GLuint>(this->Program),
-                      vtkgl::ATTACHED_SHADERS, &numObjects);
-
-  std::vector<GLuint> attachedObjects(numObjects);
-  if( numObjects > 0 )
-    {
-    vtkgl::GetAttachedShaders(static_cast<GLuint>(this->Program), numObjects,
-                              &writtenObjects, &attachedObjects[0]);
-    }
-
-  std::vector<GLuint>::iterator it = attachedObjects.begin();
-  std::vector<GLuint>::iterator itEnd = attachedObjects.end();
-  while( it != itEnd )
-    {
-    if( static_cast<GLuint>(handle) == *it )
-      {
-      attached = 1;
-      }
-    it++;
-    }
-  return attached;
-}
-
-//-----------------------------------------------------------------------------
-void vtkGLSLShaderProgram::LoadExtensions( vtkRenderWindow* renWin )
-{
-  if(this->GetGLExtensionsLoaded())
-    {
-    return;
-    }
-
-  // Load extensions using vtkOpenGLExtensionManager
-  vtkOpenGLExtensionManager *extensions = vtkOpenGLExtensionManager::New();
-  // How can I get access to the vtkRenderWindow from here?
-  extensions->SetRenderWindow( renWin );
-  if(extensions->ExtensionSupported("GL_VERSION_2_0")
-     && extensions->ExtensionSupported("GL_VERSION_1_3") )
-    {
-    extensions->LoadExtension("GL_VERSION_2_0");
-    extensions->LoadExtension("GL_VERSION_1_3");
-    this->SetGLExtensionsLoaded(1);
-    }
-  else if (extensions->ExtensionSupported("GL_VERSION_1_3")
-           && extensions->ExtensionSupported("GL_ARB_shading_language_100")
-           && extensions->ExtensionSupported("GL_ARB_shader_objects")
-           && extensions->ExtensionSupported("GL_ARB_vertex_shader")
-           && extensions->ExtensionSupported("GL_ARB_fragment_shader") )
-    {
-    // Support older drivers that implement GLSL but not all of OpenGL 2.0.
-    extensions->LoadExtension("GL_VERSION_1_3");
-    extensions->LoadCorePromotedExtension("GL_ARB_shading_language_100");
-    extensions->LoadCorePromotedExtension("GL_ARB_shader_objects");
-    extensions->LoadCorePromotedExtension("GL_ARB_vertex_shader");
-    extensions->LoadCorePromotedExtension("GL_ARB_fragment_shader");
-    this->SetGLExtensionsLoaded(1);
-    }
-  else
-    {
-    vtkErrorMacro( "Required extension (GL_VERSION_2_0) is not supported." )
-    this->SetGLExtensionsLoaded(0);
-    }
-  extensions->Delete();
-}
-
-//-----------------------------------------------------------------------------
-void vtkGLSLShaderProgram::Render(vtkActor *actor, vtkRenderer *renderer)
-{
-  this->LoadExtensions( renderer->GetRenderWindow() );
-  if (!this->GetGLExtensionsLoaded())
-    {
-    return;
-    }
-
-  // Get a gl identifier for the shader program if we don't already have one.
-  if(!this->IsProgram())
-    {
-    this->Program = static_cast<unsigned int>(vtkgl::CreateProgram());
-    }
-
-  if(!this->IsProgram())
-    {
-    vtkErrorMacro( "Not able to create a GLSL Program!!!" << endl );
-    return;
-    }
-
-  vtkCollectionIterator* iter = this->ShaderCollectionIterator;
-  for (iter->InitTraversal(); !iter->IsDoneWithTraversal();
-    iter->GoToNextItem())
-    {
-    vtkGLSLShader* shader = vtkGLSLShader::SafeDownCast(
-      iter->GetCurrentObject());
-
-    if (!shader)
-      {
-      vtkErrorMacro("GLSL Shader program cannot contain a non-GLSL shader.");
-      continue;
-      }
-
-    if (shader->Compile())
-      {
-      if (!this->IsAttached(shader))
-        {
-        vtkgl::AttachShader(static_cast<GLuint>(this->Program),
-                            shader->GetHandle());
-        }
-      }
-    }
-
-  if( !this->IsLinked() )
-    {
-    // if either a vertex or a fragment program is attached (or both)
-    // link the program.
-    GLint numObjects = 0;
-    vtkgl::GetProgramiv(static_cast<GLuint>(this->Program),
-                        vtkgl::ATTACHED_SHADERS, &numObjects);
-    if (numObjects>0)
-      {
-      vtkgl::LinkProgram(static_cast<GLuint>(this->Program));
-      if (!this->IsLinked())
-        {
-        this->GetInfoLog();
-        vtkErrorMacro(<< "Failed to link GLSL program:\n"
-                      << this->Info);
-        }
-      }
-    }
-
-  if( this->IsLinked() )
-    {
-    // check to see if this is the active program
-    vtkgl::UseProgram(static_cast<GLuint>(this->Program));
-    }
-
-  // handle attributes and uniform variables
-  // uniform variables
-  for (iter->InitTraversal(); !iter->IsDoneWithTraversal();
-    iter->GoToNextItem())
-    {
-    vtkGLSLShader* shader = vtkGLSLShader::SafeDownCast(
-      iter->GetCurrentObject());
-    if (!shader)
-      {
-      // no need to flag error...already marked.
-      continue;
-      }
-    shader->SetProgram(this->Program);
-    shader->PassShaderVariables(actor, renderer);
-    }
-}
-//-----------------------------------------------------------------------------
-void vtkGLSLShaderProgram::PostRender(vtkActor*, vtkRenderer*)
-{
-  if (!this->GetGLExtensionsLoaded())
-    {
-    return;
-    }
-
-  if (this->IsProgram())
-    {
-    // this unloads the shader program.
-    vtkgl::UseProgram(0);
-    }
-}
-
-//-----------------------------------------------------------------------------
-void vtkGLSLShaderProgram::PrintSelf(ostream &os, vtkIndent indent)
-{
-  this->Superclass::PrintSelf(os, indent);
-}
-
diff --git a/Rendering/OpenGL/vtkGLSLShaderProgram.h b/Rendering/OpenGL/vtkGLSLShaderProgram.h
deleted file mode 100644
index 01eb15a..0000000
--- a/Rendering/OpenGL/vtkGLSLShaderProgram.h
+++ /dev/null
@@ -1,101 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkGLSLShaderProgram.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-/*
- * Copyright 2003 Sandia Corporation.
- * Under the terms of Contract DE-AC04-94AL85000, there is a non-exclusive
- * license for use of this work by or on behalf of the
- * U.S. Government. Redistribution and use in source and binary forms, with
- * or without modification, are permitted provided that this Notice and any
- * statement of authorship are reproduced on all copies.
- */
-
-// .NAME vtkGLSLShaderProgram - GLSL Shader Program
-// .SECTION Description
-// vtkGLSLShaderProgram is a concerete implementation of vtkShaderProgram.
-// It's main function is to 'Link' a vertex and a fragment shader together
-// and install them into the rendering pipeline by calling OpenGL2.0.
-//
-// Initialization of shader parameters is delegated to instances of vtkShader
-// (vtkGLSLShader in this case).
-// .SECTION Thanks
-// Shader support in VTK includes key contributions by Gary Templet at
-// Sandia National Labs.
-
-#ifndef __vtkGLSLShaderProgram_h
-#define __vtkGLSLShaderProgram_h
-
-#include "vtkRenderingOpenGLModule.h" // For export macro
-#include "vtkShaderProgram.h"
-
-class vtkGLSLShader;
-class vtkXMLDataElement;
-class vtkRenderWindow;
-
-class VTKRENDERINGOPENGL_EXPORT vtkGLSLShaderProgram : public vtkShaderProgram
-{
-public:
-  vtkTypeMacro(vtkGLSLShaderProgram, vtkShaderProgram);
-  static vtkGLSLShaderProgram *New();
-  void PrintSelf(ostream &os, vtkIndent indent);
-
-  // .Description:
-  // Take shader from its source (file and/or string) and load, compile, and
-  // install in hardware. Also, initialize uniform variables originating from
-  // the following sources: XML material file, vtkProperty, vtkLight,
-  // vtkCamera, and application-specified uniform variables.
-  virtual void Render(vtkActor *actor, vtkRenderer *renderer);
-
-  // Description:
-  // Called to unload the shaders after the actor has been rendered.
-  virtual void PostRender(vtkActor*, vtkRenderer*);
-
-  // Description:
-  // Release any graphics resources that are being consumed by this actor.
-  // The parameter window could be used to determine which graphic
-  // resources to release.
-  virtual void ReleaseGraphicsResources(vtkWindow *);
-
-  // Description:
-  // Return the OpenGL program id.
-  unsigned int GetProgram()
-    {return this->Program;}
-
-protected:
-  vtkGLSLShaderProgram();
-  virtual ~vtkGLSLShaderProgram();
-
-  // Description:
-  // Creates and returns a new vtkGLSLShader.
-  virtual vtkShader* NewShader();
-
-  virtual void LoadExtensions(vtkRenderWindow*);
-
-  unsigned int Program;
-  int IsProgram();
-  int IsLinked();
-  int IsAttached(vtkGLSLShader* shader);
-  virtual void Link();
-
-  vtkSetStringMacro( Info );
-  vtkGetStringMacro( Info );
-  char* Info;
-  void GetProgramInfo();
-  void GetInfoLog();
-
-private:
-  vtkGLSLShaderProgram(const vtkGLSLShaderProgram&); // Not Implemented
-  void operator=(const vtkGLSLShaderProgram&); // Not Implemented
-};
-#endif //__vtkGLSLShaderProgram_h
diff --git a/Rendering/OpenGL/vtkGaussianBlurPass.cxx b/Rendering/OpenGL/vtkGaussianBlurPass.cxx
index ef86c16..a1c5281 100644
--- a/Rendering/OpenGL/vtkGaussianBlurPass.cxx
+++ b/Rendering/OpenGL/vtkGaussianBlurPass.cxx
@@ -15,7 +15,7 @@
 
 #include "vtkGaussianBlurPass.h"
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkRenderState.h"
 #include "vtkRenderer.h"
 #include "vtkgl.h"
@@ -27,6 +27,7 @@
 #include "vtkUniformVariables.h"
 #include "vtkOpenGLRenderWindow.h"
 #include "vtkTextureUnitManager.h"
+#include "vtkOpenGLError.h"
 
 // to be able to dump intermediate passes into png files for debugging.
 // only for vtkGaussianBlurPass developers.
@@ -91,6 +92,8 @@ void vtkGaussianBlurPass::Render(const vtkRenderState *s)
 {
   assert("pre: s_exists" && s!=0);
 
+  vtkOpenGLClearErrorMacro();
+
   this->NumberOfRenderedProps=0;
 
   vtkRenderer *r=s->GetRenderer();
@@ -458,6 +461,8 @@ void vtkGaussianBlurPass::Render(const vtkRenderState *s)
     {
     vtkWarningMacro(<<" no delegate.");
     }
+
+  vtkOpenGLCheckErrorMacro("failed after Render");
 }
 
 // ----------------------------------------------------------------------------
diff --git a/Rendering/OpenGL/vtkGenericOpenGLRenderWindow.cxx b/Rendering/OpenGL/vtkGenericOpenGLRenderWindow.cxx
index cd22a7c..9367874 100644
--- a/Rendering/OpenGL/vtkGenericOpenGLRenderWindow.cxx
+++ b/Rendering/OpenGL/vtkGenericOpenGLRenderWindow.cxx
@@ -17,12 +17,17 @@
 #include "vtkObjectFactory.h"
 #include "vtkRendererCollection.h"
 #include "vtkOpenGLRenderer.h"
+#include "vtkOpenGLRenderWindow.h"
 #include "vtkCommand.h"
+#include "vtkOpenGLError.h"
 
 vtkStandardNewMacro(vtkGenericOpenGLRenderWindow);
 
 vtkGenericOpenGLRenderWindow::vtkGenericOpenGLRenderWindow()
 {
+  this->DirectStatus = 0;
+  this->CurrentStatus = false;
+  this->SupportsOpenGLStatus = 0;
 }
 
 vtkGenericOpenGLRenderWindow::~vtkGenericOpenGLRenderWindow()
@@ -101,27 +106,26 @@ void vtkGenericOpenGLRenderWindow::MakeCurrent()
 
 bool vtkGenericOpenGLRenderWindow::IsCurrent()
 {
-  bool current = 0;
-  this->InvokeEvent(vtkCommand::WindowIsCurrentEvent, &current);
-  return current;
+  this->InvokeEvent(vtkCommand::WindowIsCurrentEvent, &this->CurrentStatus);
+  return this->CurrentStatus;
 }
 
 int vtkGenericOpenGLRenderWindow::SupportsOpenGL()
 {
-  int supports_ogl = 0;
-  this->InvokeEvent(vtkCommand::WindowSupportsOpenGLEvent, &supports_ogl);
-  return supports_ogl;
+  this->InvokeEvent(vtkCommand::WindowSupportsOpenGLEvent, &this->SupportsOpenGLStatus);
+  return this->SupportsOpenGLStatus;
 }
 
 int vtkGenericOpenGLRenderWindow::IsDirect()
 {
-  int is_direct = 0;
-  this->InvokeEvent(vtkCommand::WindowIsDirectEvent, &is_direct);
-  return is_direct;
+  this->InvokeEvent(vtkCommand::WindowIsDirectEvent, &this->DirectStatus);
+  return this->DirectStatus;
 }
 
 void vtkGenericOpenGLRenderWindow::PushState()
 {
+  vtkOpenGLClearErrorMacro();
+
   glPushClientAttrib(GL_CLIENT_ALL_ATTRIB_BITS);
   glPushAttrib(GL_ALL_ATTRIB_BITS);
 
@@ -131,10 +135,14 @@ void vtkGenericOpenGLRenderWindow::PushState()
   glMatrixMode(GL_MODELVIEW);
   glPushMatrix();
   glLoadIdentity();
+
+  vtkOpenGLCheckErrorMacro("failed after PushState");
 }
 
 void vtkGenericOpenGLRenderWindow::PopState()
 {
+  vtkOpenGLClearErrorMacro();
+
   glMatrixMode(GL_PROJECTION);
   glPopMatrix();
   glMatrixMode(GL_MODELVIEW);
@@ -142,6 +150,8 @@ void vtkGenericOpenGLRenderWindow::PopState()
 
   glPopClientAttrib();
   glPopAttrib();
+
+  vtkOpenGLCheckErrorMacro("failed after PopState");
 }
 
 
@@ -236,3 +246,18 @@ void vtkGenericOpenGLRenderWindow::CreateAWindow()
 void vtkGenericOpenGLRenderWindow::DestroyWindow()
 {
 }
+
+void vtkGenericOpenGLRenderWindow::SetIsDirect(int newValue)
+{
+  this->DirectStatus = newValue;
+}
+
+void vtkGenericOpenGLRenderWindow::SetSupportsOpenGL(int newValue)
+{
+  this->SupportsOpenGLStatus = newValue;
+}
+
+void vtkGenericOpenGLRenderWindow::SetIsCurrent(bool newValue)
+{
+  this->CurrentStatus = newValue;
+}
diff --git a/Rendering/OpenGL/vtkGenericOpenGLRenderWindow.h b/Rendering/OpenGL/vtkGenericOpenGLRenderWindow.h
index d17b4f2..37acbde 100644
--- a/Rendering/OpenGL/vtkGenericOpenGLRenderWindow.h
+++ b/Rendering/OpenGL/vtkGenericOpenGLRenderWindow.h
@@ -105,7 +105,17 @@ public:
   void DestroyWindow();
   // }@
 
+  // Description:
+  // Allow to update state within observer callback without changing
+  // data argument and MTime.
+  void SetIsDirect(int newValue);
+  void SetSupportsOpenGL(int newValue);
+  void SetIsCurrent(bool newValue);
+
 protected:
+  int DirectStatus;
+  int SupportsOpenGLStatus;
+  bool CurrentStatus;
 
 private:
   vtkGenericOpenGLRenderWindow(const vtkGenericOpenGLRenderWindow&);  // Not implemented.
diff --git a/Rendering/OpenGL/vtkImageProcessingPass.cxx b/Rendering/OpenGL/vtkImageProcessingPass.cxx
index 7041ecc..2ce916a 100644
--- a/Rendering/OpenGL/vtkImageProcessingPass.cxx
+++ b/Rendering/OpenGL/vtkImageProcessingPass.cxx
@@ -15,7 +15,7 @@
 
 #include "vtkImageProcessingPass.h"
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkRenderState.h"
 #include "vtkRenderer.h"
 #include "vtkgl.h"
diff --git a/Rendering/OpenGL/vtkLightingHelper.cxx b/Rendering/OpenGL/vtkLightingHelper.cxx
index 3f6edad..7c9bf84 100644
--- a/Rendering/OpenGL/vtkLightingHelper.cxx
+++ b/Rendering/OpenGL/vtkLightingHelper.cxx
@@ -15,14 +15,19 @@
 #include "vtkLightingHelper.h"
 
 #include "vtkObjectFactory.h"
-#include "vtkgl.h"
 #include "vtkShaderProgram2.h"
 #include "vtkShader2Collection.h"
+#include "vtkgl.h"
+#include "vtkOpenGLError.h"
 
 extern const char * vtkLightingHelper_s;
 
+//----------------------------------------------------------------------------
 vtkStandardNewMacro(vtkLightingHelper);
+
+//----------------------------------------------------------------------------
 vtkCxxSetObjectMacro(vtkLightingHelper, Shader, vtkShaderProgram2);
+
 //----------------------------------------------------------------------------
 vtkLightingHelper::vtkLightingHelper()
 {
@@ -55,9 +60,9 @@ void vtkLightingHelper::Initialize(vtkShaderProgram2* pgm,
 }
 
 //----------------------------------------------------------------------------
-#define VTK_MAX_LIGHTS 8
 void vtkLightingHelper::PrepareForRendering()
 {
+  #ifndef NDEBUG
   GLint ivalue;
   glGetIntegerv(vtkgl::CURRENT_PROGRAM, &ivalue);
   if (ivalue != 0)
@@ -65,6 +70,9 @@ void vtkLightingHelper::PrepareForRendering()
     vtkErrorMacro("PrepareForRendering() cannot be called after a shader program has been bound.");
     return;
     }
+  #endif
+
+  vtkOpenGLClearErrorMacro();
 
   for (int cc=0; cc < VTK_MAX_LIGHTS; cc++)
     {
@@ -83,6 +91,8 @@ void vtkLightingHelper::PrepareForRendering()
       }
     glLightfv(GL_LIGHT0 + cc, GL_DIFFUSE, lightDiffuse);
     }
+
+  vtkOpenGLCheckErrorMacro("failed after enabling lights");
 }
 
 //----------------------------------------------------------------------------
diff --git a/Rendering/OpenGL/vtkLightingHelper.h b/Rendering/OpenGL/vtkLightingHelper.h
index 580f614..3857fd0 100644
--- a/Rendering/OpenGL/vtkLightingHelper.h
+++ b/Rendering/OpenGL/vtkLightingHelper.h
@@ -39,18 +39,21 @@ public:
   vtkTypeMacro(vtkLightingHelper, vtkObject);
   void PrintSelf(ostream& os, vtkIndent indent);
 
+  enum {
+    VTK_MAX_LIGHTS=8
+  };
+
   // Description:
   // Get/Set the shader program to which we want to add the lighting kernels.
   // mode = VTK_SHADER_TYPE_VERTEX or VTK_SHADER_TYPE_FRAGMENT
   // depending on whether the vertex lighting or fragment lighting is to be
   // used.
-  void Initialize(vtkShaderProgram2 *shader,
-                  vtkShader2Type mode);
+  void Initialize(vtkShaderProgram2 *shader, vtkShader2Type mode);
   vtkGetObjectMacro(Shader, vtkShaderProgram2);
 
   // Description:
-  // Updates any lighting specific information needed.
-  // This must be called before the shader program is bound.
+  // Encodes light state in diffuse component 3, where the shader looks for it.
+  void EncodeLightState(){ this->PrepareForRendering(); }
   void PrepareForRendering();
 
 //BTX
diff --git a/Rendering/OpenGL/vtkLightsPass.cxx b/Rendering/OpenGL/vtkLightsPass.cxx
index 7c87b14..4c03035 100644
--- a/Rendering/OpenGL/vtkLightsPass.cxx
+++ b/Rendering/OpenGL/vtkLightsPass.cxx
@@ -15,7 +15,7 @@
 
 #include "vtkLightsPass.h"
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkRenderState.h"
 #include "vtkRenderer.h"
 #include "vtkgl.h"
diff --git a/Rendering/OpenGL/vtkLineIntegralConvolution2D.cxx b/Rendering/OpenGL/vtkLineIntegralConvolution2D.cxx
deleted file mode 100644
index fcbc43f..0000000
--- a/Rendering/OpenGL/vtkLineIntegralConvolution2D.cxx
+++ /dev/null
@@ -1,1002 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkLineIntegralConvolution2D.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#include "vtkLineIntegralConvolution2D.h"
-
-#include "vtkShader2.h"
-#include "vtkTextureObject.h"
-#include "vtkShaderProgram2.h"
-#include "vtkUniformVariables.h"
-#include "vtkShader2Collection.h"
-#include "vtkFrameBufferObject.h"
-#include "vtkOpenGLExtensionManager.h"
-
-#include "vtkMath.h"
-#include "vtkTimerLog.h"
-#include "vtkObjectFactory.h"
-#include "vtkOpenGLRenderWindow.h"
-
-#include <string>
-
-extern const char *vtkLineIntegralConvolution2D_fs;
-extern const char *vtkLineIntegralConvolution2D_fs1;
-extern const char *vtkLineIntegralConvolution2D_fs2;
-
-#include "vtkgl.h"
-
-static const char * vtkLineIntegralConvolution2DCode =
-// $,$ are replaced with [x,y,z,w]
-"vec2 getSelectedComponents(vec4 color)"
-"{"
-"  return color.$$;"
-"}";
-
-//#define VTK_LICDEBUGON
-
-#ifdef VTK_LICDEBUGON
-#define vtkLICDebug(x) cout << x << endl;
-#else
-#define vtkLICDebug(x)
-#endif
-
-vtkStandardNewMacro( vtkLineIntegralConvolution2D );
-
-// Given the coordinate range of the vector texture, that of the resulting
-// LIC texture, and the size of the output image, this function invokes the
-// GLSL vertex and fragment shaders by issuing a command of rendering a quad
-//
-// vTCoords[4]:   a sub-region of the input vector field that is determined
-//                by the view projection
-//
-// licTCoords[4]: the resulting LIC texture, of which the whole [ 0.0, 1.0 ]
-//                x [ 0.0,  1.0 ], though physically matching only a sub-
-//                region of the input vector field, is always rendered
-//
-// width and height: the size (in number of pixels) of the output image
-//
-static void vtkRenderQuad( double vTCoords[4], double licTCoords[4],
-                    unsigned int width, unsigned int height )
-{
-  // glTexCoord2f( tcoordx, tcoordy )
-  // == vtkgl::MultiTexCoord2f( vtkgl::TEXTURE0, tcoordx, tcoordy )
-
-  glBegin( GL_QUADS );
-
-    // lower left
-    vtkgl::MultiTexCoord2f(  vtkgl::TEXTURE0,
-                             static_cast<GLfloat>( licTCoords[0] ),
-                             static_cast<GLfloat>( licTCoords[2] ) );
-    vtkgl::MultiTexCoord2f(  vtkgl::TEXTURE1,
-                             static_cast< GLfloat >( vTCoords[0] ),
-                             static_cast< GLfloat >( vTCoords[2] )  );
-    glVertex2f( 0, 0 );
-
-    // lower right
-    vtkgl::MultiTexCoord2f(  vtkgl::TEXTURE0,
-                             static_cast< GLfloat >( licTCoords[1] ),
-                             static_cast< GLfloat >( licTCoords[2] )  );
-    vtkgl::MultiTexCoord2f(  vtkgl::TEXTURE1,
-                             static_cast< GLfloat >( vTCoords[1] ),
-                             static_cast< GLfloat >( vTCoords[2] )  );
-    glVertex2f(  static_cast< GLfloat >( width ),  0  );
-
-    // upper right
-    vtkgl::MultiTexCoord2f(  vtkgl::TEXTURE0,
-                             static_cast< GLfloat >( licTCoords[1] ),
-                             static_cast< GLfloat >( licTCoords[3] )  );
-    vtkgl::MultiTexCoord2f(  vtkgl::TEXTURE1,
-                             static_cast< GLfloat >( vTCoords[1] ),
-                             static_cast< GLfloat >( vTCoords[3] )  );
-    glVertex2f(  static_cast< GLfloat >( width  ),
-                 static_cast< GLfloat >( height )  );
-
-    // upper left
-    vtkgl::MultiTexCoord2f(  vtkgl::TEXTURE0,
-                             static_cast< GLfloat >( licTCoords[0] ),
-                             static_cast< GLfloat >( licTCoords[3] )  );
-    vtkgl::MultiTexCoord2f(  vtkgl::TEXTURE1,
-                             static_cast< GLfloat >( vTCoords[0] ),
-                             static_cast< GLfloat >( vTCoords[3] )  );
-    glVertex2f(  0,  static_cast< GLfloat >( height )  );
-
-
-
-  glEnd();
-}
-
-#define RENDERQUAD vtkRenderQuad( vTCoords, licTCoords, outWidth, outHeight );
-
-// ----------------------------------------------------------------------------
-vtkLineIntegralConvolution2D::vtkLineIntegralConvolution2D()
-{
-  this->LIC   = NULL;
-  this->Noise = NULL;
-  this->VectorField = NULL;
-
-  this->VectorShift   = 0.00;
-  this->VectorScale   = 1.00;
-  this->LICStepSize   = 0.01;
-  this->NumberOfSteps = 1;
-
-  this->GridSpacings[0]  = 1.0;
-  this->GridSpacings[1]  = 1.0;
-  this->ComponentIds[0]  = 0;
-  this->ComponentIds[1]  = 1;
-
-  this->EnhancedLIC      = 1;
-  this->LICForSurface    = 0;
-  this->Magnification    = 1;
-  this->TransformVectors = 1;
-}
-
-// ----------------------------------------------------------------------------
-vtkLineIntegralConvolution2D::~vtkLineIntegralConvolution2D()
-{
-  if ( this->LIC )
-    {
-    this->LIC->Delete();
-    this->LIC = NULL;
-    }
-
-  if ( this->Noise )
-    {
-    this->Noise->Delete();
-    this->Noise = NULL;
-   }
-
-  if ( this->VectorField )
-    {
-    this->VectorField->Delete();
-    this->VectorField = NULL;
-     }
-}
-
-// ----------------------------------------------------------------------------
-void vtkLineIntegralConvolution2D::SetLIC( vtkTextureObject * lic )
-{
-  vtkSetObjectBodyMacro( LIC, vtkTextureObject, lic );
-}
-
-// ----------------------------------------------------------------------------
-void vtkLineIntegralConvolution2D::SetNoise( vtkTextureObject * noise )
-{
-  vtkSetObjectBodyMacro( Noise, vtkTextureObject, noise );
-}
-
-// ----------------------------------------------------------------------------
-void vtkLineIntegralConvolution2D::SetVectorField
-  ( vtkTextureObject * vectorField )
-{
-  vtkSetObjectBodyMacro( VectorField, vtkTextureObject, vectorField );
-}
-
-// ----------------------------------------------------------------------------
-int vtkLineIntegralConvolution2D::Execute()
-{
-  unsigned int extent[4] = { 0, 0, 0, 0 };
-  extent[1] = this->VectorField->GetWidth()  - 1;
-  extent[3] = this->VectorField->GetHeight() - 1;
-
-  return this->Execute( extent );
-}
-
-// ----------------------------------------------------------------------------
-int vtkLineIntegralConvolution2D::Execute( int extent[4] )
-{
-  unsigned int uiExtent[4];
-
-  for ( int i = 0; i < 4; i ++ )
-    {
-    if ( extent[i] < 0 )
-      {
-      vtkErrorMacro( "Invalid input extent." );
-      return 0;
-      }
-
-    uiExtent[i] = static_cast< unsigned int >( extent[i] );
-    }
-
-  return this->Execute( uiExtent );
-}
-
-// ----------------------------------------------------------------------------
-// checks if the context supports the required extensions
-bool vtkLineIntegralConvolution2D::IsSupported(vtkRenderWindow *renWin)
-{
-  vtkOpenGLRenderWindow *w=static_cast<vtkOpenGLRenderWindow *>(renWin);
-
-  // As we cannot figure out more accurately why the LIC algorithm does not
-  // work on OpenGL 2.1/DX9 GPU, we discriminate an OpenGL3.0/DX10 GPU
-  // (like a nVidia GeForce 8) against an OpenGL 2.1/DX9 GPU (like an nVidia
-  // GeForce 6) by testing for geometry shader support, even if we are not
-  // using any geometry shader in the LIC algorithm.
-
-  vtkOpenGLExtensionManager *e=w->GetExtensionManager();
-  bool supportGS=e->ExtensionSupported("GL_VERSION_3_0")==1 ||
-    e->ExtensionSupported("GL_ARB_geometry_shader4")==1 ||
-    e->ExtensionSupported("GL_EXT_geometry_shader4")==1;
-
-  return supportGS && vtkTextureObject::IsSupported(renWin) &&
-    vtkFrameBufferObject::IsSupported(renWin) &&
-    vtkShaderProgram2::IsSupported(w);
-}
-
-// ----------------------------------------------------------------------------
-int vtkLineIntegralConvolution2D::Execute( unsigned int extent[4] )
-{
-  // check the number of steps and step size
-  if ( this->NumberOfSteps <= 0 )
-    {
-      vtkErrorMacro( "Number of integration steps should be positive." );
-      return 0;
-    }
-
-  if ( this->LICStepSize <= 0.0 )
-    {
-      vtkErrorMacro( "Streamline integration step size should be positive." );
-      return 0;
-    }
-
-  vtkTimerLog * timer = vtkTimerLog::New();
-  timer->StartTimer();
-
-  int components[2];
-  components[0] = this->ComponentIds[0];
-  components[1] = this->ComponentIds[1];
-
-  if ( this->VectorField->GetComponents() < 2 )
-    {
-    vtkErrorMacro( "VectorField must have at least 2 components." );
-    timer->Delete();
-    timer = NULL;
-    return 0;
-    }
-
-  // check the number of vector components
-  if ( this->VectorField->GetComponents() == 2 )
-    {
-    // for 2 component textures (LA texture)
-    components[0] = 0;
-    components[1] = 3;
-    }
-
-  // given the two specified vector-compoment Ids, modify the source code of
-  // the associated fragment shader such that the shader program can extract
-  // the two target components from each 3D vector
-  const char componentNames[] = { 'x', 0x0, 'y', 0x0, 'z', 0x0, 'w', 0x0 };
-  std::string   additionalKernel = ::vtkLineIntegralConvolution2DCode;
-  additionalKernel.replace(  additionalKernel.find( '$' ),  1,
-                            &componentNames[ 2 * components[0] ]  );
-  additionalKernel.replace(  additionalKernel.find( '$' ),  1,
-                            &componentNames[ 2 * components[1] ]  );
-
-  // size of the vector field (in number of pixels)
-  unsigned int inWidth  = this->VectorField->GetWidth();
-  unsigned int inHeight = this->VectorField->GetHeight();
-
-  // Compute the transform for the vector field. This is a 2x2 diagonal matrix.
-  // Hence, we only pass the non-NULL diagonal values.
-  double vectorTransform[2] = { 1.0, 1.0 };
-  if ( this->TransformVectors )
-    {
-    vectorTransform[0] = 1.0 / ( inWidth  * this->GridSpacings[0] );
-    vectorTransform[1] = 1.0 / ( inHeight * this->GridSpacings[1] );
-    }
-  vtkLICDebug( "vectorTransform: " << vectorTransform[0] << ", "
-                                   << vectorTransform[1] );
-
-  // size of the output LIC image
-  unsigned int outWidth  = ( extent[1] - extent[0] + 1 )
-                           * static_cast<unsigned int>( this->Magnification );
-  unsigned int outHeight = ( extent[3] - extent[2] + 1 )
-                           * static_cast<unsigned int>( this->Magnification );
-
-  // a sub-region of the input vector field that is determined by projection
-  double   vTCoords[4];
-  vTCoords[0] = extent[0] / static_cast<double>( inWidth  - 1 ); // xmin
-  vTCoords[1] = extent[1] / static_cast<double>( inWidth  - 1 ); // xmax
-  vTCoords[2] = extent[2] / static_cast<double>( inHeight - 1 ); // xmin
-  vTCoords[3] = extent[3] / static_cast<double>( inHeight - 1 ); // xmax
-
-  // the resulting LIC texture, of which the whole [ 0.0, 1.0 ] x [ 0.0,  1.0 ],
-  // though physically matching only a sub-region of the input vector field, is
-  // always rendered
-  double licTCoords[4] = { 0.0, 1.0, 0.0, 1.0 };
-
-  // obtain the rendering context
-  vtkOpenGLRenderWindow * context =
-  vtkOpenGLRenderWindow::SafeDownCast( this->VectorField->GetContext() );
-  if (  !context->GetExtensionManager()->
-                  LoadSupportedExtension( "GL_VERSION_1_3" )  )
-    {
-    vtkErrorMacro( "the required GL_VERSION_1_3 missing" );
-    timer->Delete();
-    timer   = NULL;
-    context = NULL;
-    return 0;
-    }
-
-  // pair #0: a 2D texture that stores the positions where particles released
-  // from the fragments (streamline centers) 'currently' are during integration.
-  // Note that this texture is indexed for regular / non-center streamline
-  // points only because the fragments' texture coordinates themselves are just
-  // the initial positions of the streamline centers.
-  // ( r, g ) == ( s, t ) tcoords; ( b ) == not-used.
-  vtkTextureObject * tcords0 = vtkTextureObject::New();
-  tcords0->SetContext( context );
-  tcords0->Create2D( outWidth, outHeight, 3, VTK_FLOAT, false );
-  vtkLICDebug( "texture object tcords0 Id = " << tcords0->GetHandle() );
-
-  // pair #0: a 2D texture that stores the intermediate accumulated texture
-  // values (r, g, b) for the fragments (and it is the output texture upon the
-  // completion of the entire LIC process)
-  vtkTextureObject * licTex0 = vtkTextureObject::New();
-  licTex0->SetContext( context );
-  licTex0->Create2D( outWidth, outHeight, 3, VTK_FLOAT, false );
-  vtkLICDebug( "texture object licTex0 Id = " << licTex0->GetHandle() );
-
-  // pair #1: a 2D texture that stores the positions where particles released
-  // from the fragments (streamline centers) 'currently' are during integration.
-  // Note that this texture is indexed for regular / non-center streamline
-  // points only because the fragments' texture coordinates themselves are just
-  // the initial positions of the streamline centers.
-  // ( r, g ) == ( s, t ) tcoords; ( b ) == not-used.
-  vtkTextureObject * tcords1 = vtkTextureObject::New();
-  tcords1->SetContext( context );
-  tcords1->Create2D( outWidth, outHeight, 3, VTK_FLOAT, false );
-  vtkLICDebug( "texture object tcords1 Id = " << tcords1->GetHandle() );
-
-  // pair #1: a 2D texture that stores the intermediate accumulated texture
-  // values (r, g, b) for the fragments (and it is the output texture upon the
-  // completion of the entire LIC process)
-  vtkTextureObject * licTex1 = vtkTextureObject::New();
-  licTex1->SetContext( context );
-  licTex1->Create2D( outWidth, outHeight, 3, VTK_FLOAT, false );
-  vtkLICDebug( "texture object licTex1 Id = " << licTex1->GetHandle() );
-
-  // a 2D texture that stores the output of the high-pass filtering (invoked
-  // when enhanced LIC is desired)
-  vtkTextureObject * lhpfTex = vtkTextureObject::New();
-  lhpfTex->SetContext( context );
-  lhpfTex->Create2D( outWidth, outHeight, 3, VTK_FLOAT, false );
-  vtkLICDebug( "texture object lhpfTex Id = " << lhpfTex->GetHandle() );
-
-  // frame buffer object that maintains multiple color buffers (texture objects)
-  vtkFrameBufferObject * frameBufs = vtkFrameBufferObject::New();
-  frameBufs->SetDepthBufferNeeded( false );
-  frameBufs->SetContext( context );
-  frameBufs->SetColorBuffer( 0, licTex0 );
-  frameBufs->SetColorBuffer( 1, tcords0 );
-  frameBufs->SetColorBuffer( 2, licTex1 );
-  frameBufs->SetColorBuffer( 3, tcords1 );
-  frameBufs->SetColorBuffer( 4, lhpfTex );
-  frameBufs->SetNumberOfRenderTargets( 5 );
-
-  // the four color buffers (texture objects) constitute two pairs (licTex0 with
-  // tcords0 and licTex1 with tcords1), which work in a ping-pong fashion, with
-  // one pair as the read texture objects, via
-  //     vtkgl::ActiveTexture( vtkgl::TEXTURE2 );
-  //     frameBufs->GetColorBuffer( pairX[0] )->Bind();
-  //     vtkgl::ActiveTexture( vtkgl::TEXTURE3 );
-  //     frameBufs->GetColorBuffer( pairX[1] )->Bind();
-  //
-  //     (note the input vector field and noise texture serve
-  //     as vtkgl::TEXTURE0 and vtkgl::TEXTURE1, respectively)
-  //
-  // and the other pair as the write / render textures / targets, via
-  //     frameBufs->SetActiveBuffers( 2, pairY )
-  unsigned int   pair0[2] = { 0, 1 };
-  unsigned int   pair1[2] = { 2, 3 };
-  unsigned int * pairs[2] = { pair0, pair1 };
-
-  // create a shader program invoking the fragment shaders
-  vtkShaderProgram2 * shaderProg = vtkShaderProgram2::New();
-  shaderProg->SetContext( context );
-  context = NULL;
-
-  // load the supporting fragment shader that contains utilitiy functions
-  vtkShader2 * utilities = vtkShader2::New();
-  utilities->SetContext( shaderProg->GetContext() );
-  utilities->SetType( VTK_SHADER_TYPE_FRAGMENT );
-  utilities->SetSourceCode( vtkLineIntegralConvolution2D_fs );
-  shaderProg->GetShaders()->AddItem( utilities );
-  utilities->Delete();
-
-  // load the supporting fragment shader program that tells which two
-  // components are needed from each 3D vector
-  vtkShader2 * selectComps = vtkShader2::New();
-  selectComps->SetContext( shaderProg->GetContext() );
-  selectComps->SetType( VTK_SHADER_TYPE_FRAGMENT );
-  selectComps->SetSourceCode( additionalKernel.c_str() );
-  shaderProg->GetShaders()->AddItem( selectComps );
-  selectComps->Delete();
-
-  // load the fragment shader program that implements the LIC process
-  vtkShader2 * glslFS1 = vtkShader2::New();
-  glslFS1->SetContext( shaderProg->GetContext() );
-  glslFS1->SetType( VTK_SHADER_TYPE_FRAGMENT );
-  glslFS1->SetSourceCode( vtkLineIntegralConvolution2D_fs1 );
-
-  // load the fragment shader program that implements high-pass filtering
-  vtkShader2 * glslFS2 = vtkShader2::New();
-  glslFS2->SetContext( shaderProg->GetContext() );
-  glslFS2->SetType( VTK_SHADER_TYPE_FRAGMENT );
-  glslFS2->SetSourceCode( vtkLineIntegralConvolution2D_fs2 );
-
-  // build the LIC fragment shader
-  vtkLICDebug( "building the LIC fragment shader (pass #1)" );
-  shaderProg->GetShaders()->AddItem( glslFS1 );
-  shaderProg->Build();
-  if ( shaderProg->GetLastBuildStatus() != VTK_SHADER_PROGRAM2_LINK_SUCCEEDED )
-    {
-    vtkErrorMacro( "error with building the LIC fragment shader (pass #1)" );
-    return 0;
-    }
-  vtkLICDebug( "the LIC fragment shader (pass #1) built" );
-
-  // input texture #0: the vector field, bound as TEXTURE0
-  vtkgl::ActiveTexture( vtkgl::TEXTURE0 );
-  this->VectorField->Bind();
-  glTexParameteri( this->VectorField->GetTarget(),
-                   GL_TEXTURE_MIN_FILTER, GL_LINEAR );
-  glTexParameteri( this->VectorField->GetTarget(),
-                   GL_TEXTURE_MAG_FILTER, GL_LINEAR );
-  vtkLICDebug( "texture object vectorField Id="
-                  << this->VectorField->GetHandle() );
-
-  // input texture #1: the noise texture, bound as TEXTURE1
-  vtkgl::ActiveTexture( vtkgl::TEXTURE1 );
-  this->Noise->Bind();
-  vtkLICDebug( "texture object Noise Id = " << this->Noise->GetHandle() );
-
-  // determine the noise scale factor that allows for the use of a noise texture
-  // smaller than the input vector field and the output image
-  double noiseScale[2] = { 1.0, 1.0 };
-  noiseScale[0] = this->Magnification * this->VectorField->GetWidth() /
-                  static_cast<double>( this->Noise->GetWidth() );
-  noiseScale[1] = this->Magnification * this->VectorField->GetHeight() /
-                  static_cast<double>( this->Noise->GetHeight() );
-  vtkLICDebug( "noiseScale: " << noiseScale[0] << ", " << noiseScale[1] );
-
-  // set the parameters for the LIC fragment shader
-  int   value;
-  float fvalues[2];
-  value = this->LICForSurface;
-  shaderProg->GetUniformVariables()->SetUniformi( "uSurfaced", 1, &value  );
-  value = 1 - this->EnhancedLIC; // it is the last one if EnancedLIC is OFF
-  shaderProg->GetUniformVariables()->SetUniformi( "uLastPass",  1,&value );
-  value = 0;
-  shaderProg->GetUniformVariables()->SetUniformi( "uMaskType", 1, &value  );
-  value = this->NumberOfSteps;
-  shaderProg->GetUniformVariables()->SetUniformi( "uNumSteps", 1, &value  );
-  fvalues[0] = static_cast<float>( this->LICStepSize );
-  shaderProg->GetUniformVariables()->SetUniformf( "uStepSize", 1, fvalues );
-  fvalues[0] = static_cast<float>( this->VectorShift );
-  fvalues[1] = static_cast<float>( this->VectorScale );
-  shaderProg->GetUniformVariables()->SetUniformf
-                                     ( "uVectorShiftScale", 2, fvalues );
-  fvalues[0] = static_cast<float>( noiseScale[0] );
-  fvalues[1] = static_cast<float>( noiseScale[1] );
-  shaderProg->GetUniformVariables()->SetUniformf
-                                     ( "uNoise2VecScaling", 2, fvalues );
-  fvalues[0] = static_cast<float>( vectorTransform[0] );
-  fvalues[1] = static_cast<float>( vectorTransform[1] );
-  shaderProg->GetUniformVariables()->SetUniformf
-                                     ( "uVectorTransform2", 2, fvalues );
-
-  float vtCordRange[4];
-  vtCordRange[0] = static_cast<float> ( vTCoords[0] );
-  vtCordRange[1] = static_cast<float> ( vTCoords[1] );
-  vtCordRange[2] = static_cast<float> ( vTCoords[2] );
-  vtCordRange[3] = static_cast<float> ( vTCoords[3] );
-  shaderProg->GetUniformVariables()->SetUniformf
-                                     ( "uVTCordRenderBBox", 4, vtCordRange );
-
-  value = 0;
-  shaderProg->GetUniformVariables()->SetUniformi
-                                     ( "uNTCordShiftScale", 1, &value );
-
-  // Declare the first two input texture objects of the fragment shader by
-  // specifying their names referenced in the shader.
-  // These two texture objects correspond to this->VectorField (bound to
-  // vtkgl::TEXTURE0) and this->Noise (bound to vtkgl::TEXTURE1), respectively.
-  value = 0;
-  shaderProg->GetUniformVariables()->SetUniformi( "texVectorField", 1, &value );
-  value = 1;
-  shaderProg->GetUniformVariables()->SetUniformi( "texNoise", 1, &value );
-
-  // Declare the last two input texture objects of the fragment shader by
-  // specifying their names referenced in the shader.
-  // Note that these two texture objects are dynamically determind and bound
-  // (to vtkgl::TEXTURE2 and vtkgl::TEXTURE3, respectively, below) as the two
-  // pairs of color buffers (tcords0 with licTex0 and tcords1 with licTex1)
-  // work in a ping-pong manner during the LIC process.
-  value = 2;
-  shaderProg->GetUniformVariables()->SetUniformi( "texLIC", 1, &value );
-  value = 3;
-  shaderProg->GetUniformVariables()->SetUniformi( "texTCoords", 1,&value );
-
-  shaderProg->Use();
-
-  int            readIndex = 0; // index of the pair used as the read buffers
-  unsigned int * readBuffs = NULL;
-  unsigned int * writeBufs = NULL;
-  for ( int direction = 0; direction < 2; direction ++ )
-    {
-    // NOTE: this->NumberOfSteps + 1 is used below beause the streamline center
-    //       point is actually visited two times (due to the outer loop),
-    //       one per integration direction. Thus ( this->NumberOfSteps + 1 ) *
-    //       2 visits access ( this->NumberOfSteps + 1 ) * 2 - 1 = 2 * this->
-    //       NumberOfSteps + 1 unique streamline points.
-    //
-    //       The associated fragment shader addresses this issue by asking
-    //       each center-visit to contribute half the texture value.
-    for ( int stepIdx = 0; stepIdx < this->NumberOfSteps + 1; stepIdx ++ )
-      {
-      // determine the pair of color buffers, among the four of the frame
-      // buffer object, used as the input and the one used as the output
-      readIndex = ( stepIdx % 2 );
-      readBuffs = pairs[     readIndex ];
-      writeBufs = pairs[ 1 - readIndex ];
-
-      // specify the 2D texture that stores the intermediate accumulated
-      // texture values (r, g, b) for the fragments
-      vtkgl::ActiveTexture( vtkgl::TEXTURE2 );
-      vtkTextureObject * accumLIC = frameBufs->GetColorBuffer( readBuffs[0] );
-      accumLIC->Bind();
-      vtkLICDebug( "accumLIC: " << accumLIC->GetHandle() );
-      accumLIC = NULL;
-
-      // Specify the 2D texture that stores the positions where particles
-      // released from the fragments (streamline centers) 'currently' are.
-      // Note this texture is indexed for regular / non-center streamline
-      // points only because the fragments' texture coordinates themselves
-      // are just the initial positions of the streamline centers.
-      // ( r, g ) == ( s, t ) tcoords; ( b ) == not-used.
-      vtkgl::ActiveTexture( vtkgl::TEXTURE3 );
-      vtkTextureObject * dynaTcords = frameBufs->GetColorBuffer( readBuffs[1] );
-      dynaTcords->Bind();
-      vtkLICDebug( "dynaTcords: " << dynaTcords->GetHandle() );
-      dynaTcords = NULL;
-
-      // specify the pair of texture objects as the render targets
-      frameBufs->SetActiveBuffers( 2, writeBufs );
-      if (  !frameBufs->Start( outWidth, outHeight, false )  )
-        {
-        shaderProg->GetShaders()->RemoveItem( glslFS1 );
-        shaderProg->GetShaders()->RemoveItem( utilities );
-        shaderProg->GetShaders()->RemoveItem( selectComps );
-
-        glslFS1->ReleaseGraphicsResources();
-        glslFS2->ReleaseGraphicsResources();
-        utilities->ReleaseGraphicsResources();
-        selectComps->ReleaseGraphicsResources();
-        shaderProg->ReleaseGraphicsResources();
-        glslFS1->Delete();
-        glslFS2->Delete();
-        shaderProg->Delete();
-
-        frameBufs->Delete();
-        tcords0->Delete();
-        tcords1->Delete();
-        licTex0->Delete();
-        licTex1->Delete();
-        lhpfTex->Delete();
-        timer->Delete();
-
-        glslFS1    = NULL;
-        glslFS2    = NULL;
-        utilities  = NULL;
-        selectComps= NULL;
-        shaderProg = NULL;
-        frameBufs  = NULL;
-        tcords0    = NULL;
-        tcords1    = NULL;
-        licTex0    = NULL;
-        licTex1    = NULL;
-        lhpfTex    = NULL;
-        timer      = NULL;
-
-        readBuffs  = NULL;
-        writeBufs  = NULL;
-        pairs[0]   = NULL;
-        pairs[1]   = NULL;
-
-        return 0;
-        }
-      vtkLICDebug( "active render buffers Ids: " << writeBufs[0] << ", "
-                      << writeBufs[1] << " for step #" << stepIdx );
-
-      // streamline integration direction: negative (-1) and positive (1)
-      value = ( direction << 1 ) - 1;
-      shaderProg->GetUniformVariables()->SetUniformi( "uStepSign", 1, &value );
-
-      // step type (0, 1, 2)
-      // 0: first access to the streamline center point
-      // 1: access to a regular / non-center streamline point
-      // 2: second access to the streamline center point
-      //    (due to a change in the streamline integration direction)
-      value = 1 + ( !stepIdx ) * (  ( direction << 1 ) - 1  );
-      shaderProg->GetUniformVariables()->SetUniformi( "uStepType", 1, &value );
-
-      // zero-vector fragment masking
-      // 0: retain the white noise texture value by storing the negated version
-      // 1: export ( -1.0, -1.0, -1.0, -1.0 ) for use by vtkSurfaceLICPainter
-      //    to make this LIC fragment totally transparent to show the underlying
-      //    geometry surface
-      //
-      // a zero-vector fragment is always masked with ( -1.0, -1.0, -1.0, -1.0)
-      // IF we need a basic LIC image (instead of an improved one) for display
-      value = int(  ( direction == 1 ) && ( stepIdx == this->NumberOfSteps ) &&
-                    ( this->EnhancedLIC == 0 )
-                 );
-      shaderProg->GetUniformVariables()->SetUniformi( "uMaskType", 1, &value );
-
-      shaderProg->SendUniforms(); // force resending uniforms
-      if( !shaderProg->IsValid() )
-        {
-        vtkErrorMacro( << " validation of the program failed: "
-                       << shaderProg->GetLastValidateLog() );
-        }
-
-      RENDERQUAD
-      }
-    }
-
-  if ( this->EnhancedLIC )
-    {
-    // --------------------------------------------- begin high-pass filtering
-    // perform Laplacian high-pass filtering using a fragment shader
-    shaderProg->Restore();
-    vtkLICDebug( "unbinding the LIC fragment shader (pass #1) ... " );
-
-    shaderProg->GetShaders()->RemoveItem( glslFS1 );
-    shaderProg->GetShaders()->AddItem( glslFS2 );
-
-    vtkLICDebug( "building the high-pass filtering shader ... " );
-    shaderProg->Build();
-    if( shaderProg->GetLastBuildStatus() != VTK_SHADER_PROGRAM2_LINK_SUCCEEDED )
-      {
-      vtkErrorMacro( "error with bulding the high-pass filtering shader" );
-      return 0;
-      }
-
-    // set parameters for the high-pass filtering shader and declare the only one
-    // input texture by specifying its name referenced in the shader
-    value = 0;
-    float licWidth  = static_cast<float>( outWidth  );
-    float licHeight = static_cast<float>( outHeight );
-    shaderProg->GetUniformVariables()->SetUniformi( "licTexture", 1, &value     );
-    shaderProg->GetUniformVariables()->SetUniformf( "uLicTexWid", 1, &licWidth  );
-    shaderProg->GetUniformVariables()->SetUniformf( "uLicTexHgt", 1, &licHeight );
-
-    // determine the read and write / render textures for the high-pass filter
-    unsigned int filterReadIdx = writeBufs[0]; // the output of pass #1 LIC
-    unsigned int filterWriteId = 4;            // texture object lhpfTex
-
-    // bind the input texture to the filter
-    vtkgl::ActiveTexture( vtkgl::TEXTURE0 );
-    vtkTextureObject * licImage = frameBufs->GetColorBuffer( filterReadIdx );
-    licImage->Bind();
-    licImage = NULL;
-
-    // set the output texture of the filter as the active one of the FBO
-    vtkLICDebug( "active render buffer Id: " << filterWriteId );
-    frameBufs->SetActiveBuffers( 1, &filterWriteId );
-    if (  !frameBufs->Start( outWidth, outHeight, false )  )
-      {
-      shaderProg->GetShaders()->RemoveItem( glslFS2 );
-      shaderProg->GetShaders()->RemoveItem( utilities );
-      shaderProg->GetShaders()->RemoveItem( selectComps );
-
-      glslFS1->ReleaseGraphicsResources();
-      glslFS2->ReleaseGraphicsResources();
-      utilities->ReleaseGraphicsResources();
-      selectComps->ReleaseGraphicsResources();
-      shaderProg->ReleaseGraphicsResources();
-      glslFS1->Delete();
-      glslFS2->Delete();
-      shaderProg->Delete();
-
-      frameBufs->Delete();
-      tcords0->Delete();
-      tcords1->Delete();
-      licTex0->Delete();
-      licTex1->Delete();
-      lhpfTex->Delete();
-      timer->Delete();
-
-      glslFS1    = NULL;
-      glslFS2    = NULL;
-      utilities  = NULL;
-      selectComps= NULL;
-      shaderProg = NULL;
-      frameBufs  = NULL;
-      tcords0    = NULL;
-      tcords1    = NULL;
-      licTex0    = NULL;
-      licTex1    = NULL;
-      lhpfTex    = NULL;
-      timer      = NULL;
-
-      readBuffs  = NULL;
-      writeBufs  = NULL;
-      pairs[0]   = NULL;
-      pairs[1]   = NULL;
-
-      return 0;
-      }
-
-    shaderProg->Use();
-    if( !shaderProg->IsValid() )
-      {
-      vtkErrorMacro( "error validating the high-pass filtering shader "
-                     << shaderProg->GetLastValidateLog() );
-      }
-
-    // invoke the high-pass filter by rendering the quad
-    RENDERQUAD
-    // --------------------------------------------- end  high-pass  filtering
-
-
-    // --------------------------------------------- begin   second-pass   LIC
-    shaderProg->Restore();
-    vtkLICDebug( "unbinding the high-pass filtering shader ... " );
-
-    shaderProg->GetShaders()->RemoveItem( glslFS2 );
-    shaderProg->GetShaders()->AddItem( glslFS1 );
-
-    vtkLICDebug( "building the LIC fragment shader (pass #2) ... " );
-    shaderProg->Build();
-    if( shaderProg->GetLastBuildStatus() != VTK_SHADER_PROGRAM2_LINK_SUCCEEDED )
-      {
-      vtkErrorMacro( "error with bulding the LIC fragment shader (pass #2)" );
-      return 0;
-      }
-
-    // this is the last pass of LIC (for non-suraceLIC, make sure the output
-    // pixel values are all positive since neither high-pass filtering nor
-    // geometry-LIC compositing is performed at all)
-    value = 1;
-    shaderProg->GetUniformVariables()->SetUniformi( "uLastPass",  1,&value );
-
-    // As pass #1 LIC has constructed the basic flow pattern (the tangential
-    // flow streaks have been curved 'out') and then the high-pass filter has
-    // even enhanced it, pass #2 LIC can save some integration steps and instead
-    // is focused on smoothing away those noisy components (those excessively
-    // contrasted fragments).
-    int lic2Steps = this->NumberOfSteps / 2;
-    shaderProg->GetUniformVariables()->SetUniformi( "uNumSteps", 1, &lic2Steps );
-
-    // When the output of pass #1 LIC is high-pass filtered and then forwarded
-    // to pass #2 LIC as the input 'noise', the size of this 'noise' texture
-    // (uVTCordRenderBBox) is equal to the current extent of the vector
-    // field (vTCoords[4]) times this->Magnification. Since noiseScale (or
-    // uNoise2VecScaling) involves this->Magnification and hence the value of
-    // uNoise2VecScaling for pass #2 LIC is just vec2(1.0, 1.0) AS LONG AS we
-    // take this 'noise' texture as an extent (uVTCordRenderBBox = vTCoords[4])
-    // of the virtual full 'noise' texture (for which the out-of-extent part
-    // is just not defined / provided by the output of the high-pass filter ---
-    // 'virtual'). To compensate for the effect of the 'extent', the vector
-    // field-based noise texture coordinate needs to be shifted and scaled in
-    // vtkLineIntegralConvolution2D_fs.glsl::getNoiseColor() to index this
-    // 'noise' texture (an extent of the virtual full 'noise' texture) properly.
-    fvalues[0] = 1.0;
-    fvalues[1] = 1.0;
-    shaderProg->GetUniformVariables()->SetUniformf( "uNoise2VecScaling", 2, fvalues );
-    value = 1;
-    shaderProg->GetUniformVariables()->SetUniformi( "uNTCordShiftScale", 1, &value );
-    shaderProg->SendUniforms();
-
-    // bind the vector field as an input texture
-    vtkgl::ActiveTexture( vtkgl::TEXTURE0 );
-    this->VectorField->Bind();
-
-    // replace the original white noise texture with a new 'noise' texture (the
-    // output generated by high-pass filtering pass #1 LIC image) and bind it
-    vtkgl::ActiveTexture( vtkgl::TEXTURE1 );
-    this->Noise->UnBind();
-    vtkTextureObject * tempTex = frameBufs->GetColorBuffer( 4 );
-    tempTex->Bind();
-    tempTex = NULL;
-
-    shaderProg->Use();
-
-    for ( int direction = 0; direction < 2; direction ++ )
-      {
-      // NOTE: lic2Steps + 1 is used below beause the streamline center point
-      //       is actually visited two times (due to the outer loop),  one per
-      //       integration direction. Thus ( lic2Steps + 1 ) * 2 visits access
-      //       ( lic2Steps + 1 ) * 2 - 1 = 2 * lic2Steps + 1 unique streamline
-      //       points.
-      //
-      //       The associated fragment shader addresses this issue by asking
-      //       each center-visit to contribute half the texture value.
-      for ( int stepIdx = 0; stepIdx < lic2Steps + 1; stepIdx ++ )
-        {
-        // determine the pair of color buffers, among the four of the frame
-        // buffer object, used as the input and the one used as the output
-        readIndex = ( stepIdx % 2 );
-        readBuffs = pairs[     readIndex ];
-        writeBufs = pairs[ 1 - readIndex ];
-
-        // specify the 2D texture that stores the intermediate accumulated
-        // texture values (r, g, b) for the fragments and bind it as an input
-        vtkgl::ActiveTexture( vtkgl::TEXTURE2 );
-        vtkTextureObject * accumLIC = frameBufs->GetColorBuffer( readBuffs[0] );
-        accumLIC->Bind();
-        vtkLICDebug( "accumLIC: " << accumLIC->GetHandle() );
-        accumLIC = NULL;
-
-        // Specify the 2D texture that stores the positions where particles
-        // released from the fragments (streamline centers) 'currently' are.
-        // Note this texture is indexed for regular / non-center streamline
-        // points only because the fragments' texture coordinates themselves
-        // are just the initial positions of the streamline centers.
-        // (r, g) == (s, t) tcoords; (b) == not-used.
-        vtkgl::ActiveTexture( vtkgl::TEXTURE3 );
-        vtkTextureObject * dynaTcords = frameBufs->GetColorBuffer( readBuffs[1] );
-        dynaTcords->Bind();
-        vtkLICDebug( "dynaTcords: " << dynaTcords->GetHandle() );
-        dynaTcords = NULL;
-
-        // specify the pair of texture objects as the render targets
-        frameBufs->SetActiveBuffers( 2, writeBufs );
-        if (  !frameBufs->Start( outWidth, outHeight, false )  )
-          {
-          shaderProg->GetShaders()->RemoveItem( glslFS1 );
-          shaderProg->GetShaders()->RemoveItem( utilities );
-          shaderProg->GetShaders()->RemoveItem( selectComps );
-
-          glslFS1->ReleaseGraphicsResources();
-          glslFS2->ReleaseGraphicsResources();
-          utilities->ReleaseGraphicsResources();
-          selectComps->ReleaseGraphicsResources();
-          shaderProg->ReleaseGraphicsResources();
-          glslFS1->Delete();
-          glslFS2->Delete();
-          shaderProg->Delete();
-
-          frameBufs->Delete();
-          tcords0->Delete();
-          tcords1->Delete();
-          licTex0->Delete();
-          licTex1->Delete();
-          lhpfTex->Delete();
-          timer->Delete();
-
-          glslFS1    = NULL;
-          glslFS2    = NULL;
-          utilities  = NULL;
-          selectComps= NULL;
-          shaderProg = NULL;
-          frameBufs  = NULL;
-          tcords0    = NULL;
-          tcords1    = NULL;
-          licTex0    = NULL;
-          licTex1    = NULL;
-          lhpfTex    = NULL;
-          timer      = NULL;
-
-          readBuffs  = NULL;
-          writeBufs  = NULL;
-          pairs[0]   = NULL;
-          pairs[1]   = NULL;
-
-          return 0;
-          }
-        vtkLICDebug( "active render buffers Ids: " << writeBufs[0] << ", "
-                        << writeBufs[1] << " for step #" << stepIdx );
-
-        // streamline integration direction: negative (-1) and positive (1)
-        value = ( direction << 1 ) - 1;
-        shaderProg->GetUniformVariables()->SetUniformi( "uStepSign", 1, &value );
-
-        // step type (0, 1, 2)
-        // 0: first access to the streamline center point
-        // 1: access to a regular / non-center streamline point
-        // 2: second access to the streamline center point
-        //    (due to a change in the streamline integration direction)
-        value = 1 + ( !stepIdx ) * (  ( direction << 1 ) - 1  );
-        shaderProg->GetUniformVariables()->SetUniformi( "uStepType", 1, &value );
-
-        // zero-vector fragment masking
-        // 0: retain the white noise texture value by storing the negated version
-        // 1: export ( -1.0, -1.0, -1.0, -1.0 ) for use by vtkSurfaceLICPainter
-        //    to make this LIC fragment totally transparent to show the underlying
-        //    geometry surface
-        value = int(  ( direction == 1 ) && ( stepIdx == lic2Steps )  );
-        shaderProg->GetUniformVariables()->SetUniformi( "uMaskType", 1, &value );
-
-        shaderProg->SendUniforms(); // force resending uniforms
-        if ( !shaderProg->IsValid() )
-          {
-          vtkErrorMacro( << " validation of the program failed: "
-                       << shaderProg->GetLastValidateLog() );
-          }
-
-        RENDERQUAD
-        }
-      }
-    // --------------------------------------------- end    second-pass    LIC
-    }
-
-
-  glFinish();
-  timer->StopTimer();
-  shaderProg->Restore();
-  vtkLICDebug( "Exec Time: " <<  timer->GetElapsedTime() );
-  timer->Delete();
-  timer = NULL;
-
-
-  // obtain the LIC image, either basic LIC or enhanced LIC
-  this->LIC = frameBufs->GetColorBuffer( writeBufs[0] ); // accept one licTex
-  frameBufs->GetColorBuffer( readBuffs[0] )->Delete();   // free other licTex
-
-
-  // memory deallocation (NOTE: do not deallocate licTex0 and licTex1 below
-  // since one is deallocated above and the other is deallocated via this->
-  // LIC upon the destruction of this class)
-  glslFS1->ReleaseGraphicsResources();
-  glslFS2->ReleaseGraphicsResources();
-  utilities->ReleaseGraphicsResources();
-  selectComps->ReleaseGraphicsResources();
-  shaderProg->ReleaseGraphicsResources();
-  glslFS1->Delete();
-  glslFS2->Delete();
-  shaderProg->Delete();
-  frameBufs->Delete();
-  tcords0->Delete();
-  tcords1->Delete();
-  lhpfTex->Delete();
-  glslFS1    = NULL;
-  glslFS2    = NULL;
-  utilities  = NULL;
-  selectComps= NULL;
-  shaderProg = NULL;
-  frameBufs  = NULL;
-  tcords0    = NULL;
-  tcords1    = NULL;
-  licTex0    = NULL;
-  licTex1    = NULL;
-  lhpfTex    = NULL;
-  readBuffs  = NULL;
-  writeBufs  = NULL;
-
-  return 1;
-}
-
-//-----------------------------------------------------------------------------
-void vtkLineIntegralConvolution2D::PrintSelf( ostream & os, vtkIndent indent )
-{
-  this->Superclass::PrintSelf( os, indent );
-
-  os << indent << "LIC: "              << this->LIC              << "\n";
-  os << indent << "Noise: "            << this->Noise            << "\n";
-  os << indent << "VectorField: "      << this->VectorField      << "\n";
-
-  os << indent << "EnahncedLIC: "      << this->EnhancedLIC      << "\n";
-  os << indent << "LICStepSize: "      << this->LICStepSize      << "\n";
-  os << indent << "VectorShift: "      << this->VectorShift      << "\n";
-  os << indent << "VectorScale: "      << this->VectorScale      << "\n";
-  os << indent << "Magnification: "    << this->Magnification    << "\n";
-  os << indent << "NumberOfSteps: "    << this->NumberOfSteps    << "\n";
-  os << indent << "ComponentIds: "     << this->ComponentIds[0]  << ", "
-                                       << this->ComponentIds[1]  << "\n";
-  os << indent << "GridSpacings: "     << this->GridSpacings[0]  << ", "
-                                       << this->GridSpacings[1]  << "\n";
-  os << indent << "LICForSurface: "    << this->LICForSurface    << "\n";
-  os << indent << "TransformVectors: " << this->TransformVectors << "\n";
-}
diff --git a/Rendering/OpenGL/vtkLineIntegralConvolution2D.h b/Rendering/OpenGL/vtkLineIntegralConvolution2D.h
deleted file mode 100644
index 0777d49..0000000
--- a/Rendering/OpenGL/vtkLineIntegralConvolution2D.h
+++ /dev/null
@@ -1,225 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkLineIntegralConvolution2D.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-// .NAME vtkLineIntegralConvolution2D - GPU-based implementation of Line
-//  Integral Convolution (LIC)
-//
-// .SECTION Description
-//  This class resorts to GLSL to implement GPU-based Line Integral Convolution
-//  (LIC) for visualizing a 2D vector field that may be obtained by projecting
-//  an original 3D vector field onto a surface (such that the resulting 2D
-//  vector at each grid point on the surface is tangential to the local normal,
-//  as done in vtkSurfaceLICPainter).
-//
-//  As an image-based technique, 2D LIC works by (1) integrating a bidirectional
-//  streamline from the center of each pixel (of the LIC output image), (2)
-//  locating the pixels along / hit by this streamline as the correlated pixels
-//  of the starting pixel (seed point / pixel), (3) indexing a (usually white)
-//  noise texture (another input to LIC, in addition to the 2D vector field,
-//  usually with the same size as that of the 2D vetor field) to determine the
-//  values (colors) of these pixels (the starting and the correlated pixels),
-//  typically through bi-linear interpolation, and (4) performing convolution
-//  (weighted averaging) on these values, by adopting a low-pass filter (such
-//  as box, ramp, and Hanning kernels), to obtain the result value (color) that
-//  is then assigned to the seed pixel.
-//
-//  The GLSL-based GPU implementation herein maps the aforementioned pipeline to
-//  fragment shaders and a box kernel is employed. Both the white noise and the
-//  vector field are provided to the GPU as texture objects (supported by the
-//  multi-texturing capability). In addition, there are four texture objects
-//  (color buffers) allocated to constitute two pairs that work in a ping-pong
-//  fashion, with one as the read buffers and the other as the write / render
-//  targets. Maintained by a frame buffer object (GL_EXT_framebuffer_object),
-//  each pair employs one buffer to store the current (dynamically updated)
-//  position (by means of the texture coordinate that keeps being warped by the
-//  underlying vector) of the (virtual) particle initially released from each
-//  fragment while using the bother buffer to store the current (dynamically
-//  updated too) accumulated texture value that each seed fragment (before the
-//  'mesh' is warped) collects. Given NumberOfSteps integration steps in each
-//  direction, there are a total of (2 * NumberOfSteps + 1) fragments (including
-//  the seed fragment) are convolved and each contributes 1 / (2 * NumberOfSteps
-//  + 1) of the associated texture value to fulfill the box filter.
-//
-//  One pass of LIC (basic LIC) tends to produce low-contrast / blurred images and
-//  vtkLineIntegralConvolution2D provides an option for creating enhanced LIC
-//  images. Enhanced LIC improves image quality by increasing inter-streamline
-//  contrast while suppressing artifacts. It performs two passes of LIC, with a
-//  3x3 Laplacian high-pass filter in between that processes the output of pass
-//  #1 LIC and forwards the result as the input 'noise' to pass #2 LIC. Enhanced
-//  LIC automatically degenerates to basic LIC during user interaction.
-//
-//  vtkLineIntegralConvolution2D applies masking to zero-vector fragments so
-//  that un-filtered white noise areas are made totally transparent by class
-//  vtkSurfaceLICPainter to show the underlying geometry surface.
-//
-// .SECTION Required OpenGL Extensins
-//  GL_ARB_texture_non_power_of_two
-//  GL_VERSION_2_0
-//  GL_ARB_texture_float
-//  GL_ARB_draw_buffers
-//  GL_EXT_framebuffer_object
-//
-// .SECTION See Also
-//  vtkSurfaceLICPainter vtkImageDataLIC2D vtkStructuredGridLIC2D
-
-#ifndef __vtkLineIntegralConvolution2D_h
-#define __vtkLineIntegralConvolution2D_h
-
-#include "vtkRenderingOpenGLModule.h" // For export macro
-#include "vtkObject.h"
-
-class vtkRenderWindow;
-class vtkTextureObject;
-
-class VTKRENDERINGOPENGL_EXPORT vtkLineIntegralConvolution2D : public vtkObject
-{
-public:
-
-  static vtkLineIntegralConvolution2D * New();
-  vtkTypeMacro( vtkLineIntegralConvolution2D, vtkObject );
-  void PrintSelf( ostream & os, vtkIndent indent );
-
-  // Description:
-  // Enable/Disable enhanced LIC that improves image quality by increasing
-  // inter-streamline contrast while suppressing artifacts. Enhanced LIC
-  // performs two passes of LIC, with a 3x3 Laplacian high-pass filter in
-  // between that processes the output of pass #1 LIC and forwards the result
-  // as the input 'noise' to pass #2 LIC. This flag is automatically turned
-  // off during user interaction.
-  vtkSetMacro( EnhancedLIC, int );
-  vtkGetMacro( EnhancedLIC, int );
-  vtkBooleanMacro( EnhancedLIC, int );
-
-  //Description:
-  // Enable/Disable LICForSurface, for which the LIC texture is composited
-  // with the underlying geometry.
-  vtkSetMacro( LICForSurface, int );
-  vtkGetMacro( LICForSurface, int );
-  vtkBooleanMacro( LICForSurface, int );
-
-  // Description:
-  // Number of streamline integration steps (initial value is 1).
-  // In term of visual quality, the greater (within some range) the better.
-  vtkSetMacro( NumberOfSteps, int );
-  vtkGetMacro( NumberOfSteps, int );
-
-  // Description:
-  // Get/Set the streamline integration step size (0.01 by default). This is
-  // the length of each step in normalized image space i.e. in range [0, 1].
-  // In term of visual quality, the smaller the better. The type for the
-  // interface is double as VTK interface is, but GPU only supports float.
-  // Thus it will be converted to float in the execution of the algorithm.
-  vtkSetClampMacro( LICStepSize, double, 0.0, 1.0 );
-  vtkGetMacro( LICStepSize, double );
-
-  // Description:
-  // Set/Get the input white noise texture (initial value is NULL).
-  void SetNoise( vtkTextureObject * noise );
-  vtkGetObjectMacro( Noise, vtkTextureObject );
-
-  // Description:
-  // Set/Get the vector field (initial value is NULL).
-  void SetVectorField( vtkTextureObject * vectorField );
-  vtkGetObjectMacro( VectorField, vtkTextureObject );
-
-  // Description:
-  // If VectorField has >= 3 components, we must choose which 2 components
-  // form the (X, Y) components for the vector field. Must be in the range
-  // [0, 3].
-  vtkSetVector2Macro( ComponentIds, int );
-  vtkGetVector2Macro( ComponentIds, int );
-
-  // Description:
-  // Set/Get the spacing in each dimension of the plane on which the vector
-  // field is defined. This class performs LIC in the normalized image space
-  // and hence generally it needs to transform the input vector field (given
-  // in physical space) to the normalized image space. The Spacing is needed
-  // to determine the transform. Default is (1.0, 1.0). It is possible to
-  // disable vector transformation by setting TransformVectors to 0.
-  vtkSetVector2Macro( GridSpacings, double );
-  vtkGetVector2Macro( GridSpacings, double );
-
-  // Description:
-  // This class performs LIC in the normalized image space. Hence, by default
-  // it transforms the input vectors to the normalized image space (using the
-  // GridSpacings and input vector field dimensions). Set this to 0 to disable
-  // tranformation if the vectors are already transformed.
-  vtkSetClampMacro( TransformVectors, int, 0, 1 );
-  vtkBooleanMacro( TransformVectors, int );
-  vtkGetMacro( TransformVectors, int );
-
-  // Description:
-  // The the magnification factor (default is 1.0).
-  vtkSetClampMacro( Magnification, int, 1, VTK_INT_MAX );
-  vtkGetMacro( Magnification, int );
-
-  // Description:
-  // On machines where the vector field texture is clamped between [0,1], one
-  // can specify the shift/scale factor used to convert the original vector
-  // field to lie in the clamped range. Default is (0.0, 1.0);
-  void SetVectorShiftScale( double shift, double scale )
-    {
-    this->VectorShift = shift;
-    this->VectorScale = scale;
-    this->Modified();
-    }
-
-  // Description:
-  // Returns if the context supports the required extensions.
-  static bool IsSupported( vtkRenderWindow * renWin );
-
-  // Description:
-  // Perform the LIC and obtain the LIC texture. Return 1 if no error.
-  int Execute();
-
-  // Description:
-  // Same as Execute() except that the LIC operation is performed only on a
-  // window (given by the \c extent) in the input VectorField. The \c extent
-  // is relative to the input VectorField. The output LIC image will be of
-  // the size specified by extent.
-  int Execute( unsigned int extent[4] );
-  int Execute( int extent[4] );
-
-  // Description:
-  // LIC texture (initial value is NULL) set by Execute().
-  void SetLIC( vtkTextureObject * lic );
-  vtkGetObjectMacro( LIC, vtkTextureObject );
-
-protected:
-   vtkLineIntegralConvolution2D();
-  ~vtkLineIntegralConvolution2D();
-
-  int     Magnification;
-  int     NumberOfSteps;
-  int     LICForSurface;
-  int     EnhancedLIC;
-  double  LICStepSize;
-  double  VectorShift;
-  double  VectorScale;
-
-  int     TransformVectors;
-  int     ComponentIds[2];
-  double  GridSpacings[2];
-
-  vtkTextureObject * VectorField;
-  vtkTextureObject * Noise;
-  vtkTextureObject * LIC;
-
-private:
-  vtkLineIntegralConvolution2D( const vtkLineIntegralConvolution2D & ); // Not implemented.
-  void operator = ( const vtkLineIntegralConvolution2D & );             // Not implemented.
-//ETX
-};
-
-#endif
diff --git a/Rendering/OpenGL/vtkLineIntegralConvolution2D_fs.glsl b/Rendering/OpenGL/vtkLineIntegralConvolution2D_fs.glsl
deleted file mode 100644
index 28710bf..0000000
--- a/Rendering/OpenGL/vtkLineIntegralConvolution2D_fs.glsl
+++ /dev/null
@@ -1,149 +0,0 @@
-//=========================================================================
-//
-//  Program:   Visualization Toolkit
-//  Module:    vtkLineIntegralConvolution2D_fs.glsl
-//
-//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-//  All rights reserved.
-//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-//
-//     This software is distributed WITHOUT ANY WARRANTY; without even
-//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-//     PURPOSE.  See the above copyright notice for more information.
-//
-//=========================================================================
-
-// Filename: vtkLineIntegralConvolution2D_fs.glsl
-// Filename is useful when using gldb-gui
-
-// Provides a set of methods that the shaders can use.
-
-#version 110
-
-uniform sampler2D texVectorField; // TEXTURE0
-uniform sampler2D texNoise;       // TEXTURE1
-
-uniform vec2 uNoise2VecScaling;   // scale = vector / noise
-uniform vec2 uVectorTransform2;
-uniform vec2 uVectorShiftScale;
-uniform vec4 uVTCordRenderBBox;   // Bounding box of vector texture coordinates
-uniform int  uNTCordShiftScale;   // to shift and scale noise texture coordinates
-                                  // when the output of pass #1 LIC is high-pass
-                                  // filtered and taken as the input 'noise' of
-                                  // pass #2 LIC
-vec2         noiseTexCordShift = vec2( -uVTCordRenderBBox.x, -uVTCordRenderBBox.z );
-vec2         noiseTexCordScale =
-                         vec2(  1.0 / ( uVTCordRenderBBox.y - uVTCordRenderBBox.x ),
-                                1.0 / ( uVTCordRenderBBox.w - uVTCordRenderBBox.z )
-                             );   // the texture coordinate scale factor
-
-vec2         miniVectorTCoords = vec2(  uVTCordRenderBBox.x,  uVTCordRenderBBox.z );
-vec2         maxiVectorTCoords = vec2(  uVTCordRenderBBox.y,  uVTCordRenderBBox.w );
-
-// the range (of the vector field) that a single copy of the noise texture (note
-// that the output of pass #1 LIC, after high-pass filtering, is just an extent /
-// sub-range of the virtual full noise texture) covers --- the reciprocal of
-// uNoise2VecScaling
-vec2         NoiseTexOccupancy = vec2( 1.0, 1.0 ) / uNoise2VecScaling;
-
-// to save division
-float        vcScaleReciprocal = 1.0 / uVectorShiftScale.y;
-float        rungeKutta_1Sixth = 1.0 / 6.0;  // for rk4
-
-// Define prototype.
-// This function is compiled in to select the two components that form
-// the surface vector (see vtkLineIntegralConvolution2D.cxx for the
-// actual code).
-vec2 getSelectedComponents(vec4 color);
-
-// Given a vector field based coordinate tcords, this function returns
-// the vector in "Normalized Image" space.
-vec2 getVector( vec2 tcords )
-{
-  vec4 color  = texture2D( texVectorField, tcords );
-  vec2 vector = getSelectedComponents( color );
-
-  // since the forward tranformation is y = ( x + shift ) * scale,
-  // now we perform backward transformation x = y / scale - shift
-  // to obtain the original vector
-  // note: vcScaleReciprocal = 1.0 / uVectorShiftScale.y
-  vector = ( vector * vcScaleReciprocal ) - uVectorShiftScale.x;
-  return vector * uVectorTransform2;
-}
-
-// get the normalized vector at a given point
-// note that direct use of the built-in function normalize( vec2 ) causes
-// problems as it fails to handle zero-length vectors (division by zero)
-vec2 getNormalizedVector( vec2 tcoord )
-{
-  vec2   vector = getVector( tcoord );
-  float  vecLen = length( vector );
-  vec2   retVec = ( vecLen == 0.0 ) ? vec2( 0.0, 0.0 ) : ( vector / vecLen );
-
-  // in case of an invalid vector texture coordinate
-  bvec2  beLess = lessThan   ( tcoord, miniVectorTCoords );
-  bvec2  greatr = greaterThan( tcoord, maxiVectorTCoords );
-  int    error0 = int(  any( beLess )  );
-  int    error1 = int(  any( greatr )  );
-  int    errors = ( error0 + error1  + 1 ) / 2;
-
-  return retVec * float( 1 - errors );
-}
-
-// fourth-order Runge-Kutta streamline integration
-vec2 rk2( vec2 point0, float fStep0 )
-{
-  vec2   vectr0 = getNormalizedVector(  point0                              );
-  vec2   vectr1 = getNormalizedVector(  point0 + vectr0 * ( fStep0 * 0.5 )  );
-  return point0 + vectr1 * fStep0;
-}
-
-// fourth-order Runge-Kutta streamline integration
-vec2 rk4( vec2 point0, float fStep0 )
-{
-  float  dtHalf = fStep0 * 0.5;
-  vec2   vectr0 = getNormalizedVector( point0                   );
-  vec2   vectr1 = getNormalizedVector( point0 + vectr0 * dtHalf );
-  vec2   vectr2 = getNormalizedVector( point0 + vectr1 * dtHalf );
-  vec2   vectr3 = getNormalizedVector( point0 + vectr2 * fStep0 );
-  return (  point0 + ( vectr0 + vectr1 + vectr1 + vectr2 + vectr2 + vectr3 )
-                   * ( fStep0 * rungeKutta_1Sixth )  );
-}
-
-// given a vector field-based texture coordinate vectrTCord, this function
-// accesses the noise texture (with a different size from that of the vector
-// field for pass #1 LIC, or the same size for pass #2 LIC) to locate the
-// target value
-vec3 getNoiseColor( vec2 vectrTCord )
-{
-  // 'mod' tells the position (still vector field based) to which the current
-  // fractional copy of the noise texture needs to be mapped (after possibly
-  // several full copies) and this position is then transformed to the noise
-  // texture space --- noiseTCord
-  vec2 noiseTCord = mod( vectrTCord, NoiseTexOccupancy ) * uNoise2VecScaling;
-
-  // When the output of pass #1 LIC is high-pass filtered and then taken
-  // to pass #2 LIC as the input 'noise', the size of this 'noise' texture
-  // (uVTCordRenderBBox) is equal to the current extent of the vector
-  // field x this->Magnification (see vtkLineIntegralConvolution2D.cxx).
-  // Since uNoise2VecScaling involves this->Magnification and hence the
-  // value of uNoise2VecScaling for pass #2 LIC is just vec2(1.0, 1.0) AS
-  // LONG AS we take this 'noise' texture as an extent (uVTCordRenderBBox)
-  // of the virtual full 'noise' texture (for which the out-of-extent part
-  // is just not defined / provided --- 'virtual'). To compensate for the
-  // concept of this 'extent', the INITIAL (since uNoise2VecScaling is 1.0
-  // by 1.0 above) vector field-based noise texture coordinate noiseTCord
-  // needs to be shifted and scaled below to index this 'noise' texture (an
-  // extent of the virtual full 'noise' texture) properly.
-  vec2 tempTCoord = ( noiseTCord + noiseTexCordShift ) * noiseTexCordScale;
-  noiseTCord = noiseTCord * float( 1 - uNTCordShiftScale ) +
-               tempTCoord * float(     uNTCordShiftScale );
-
-  // Given the 200 x 200 white noise (VTKData\Data\Data\noise.png) currently
-  // in use, half is actually used below (by multiplying the tcoord with 0.5)
-  // for better image quality.
-  noiseTCord = noiseTCord * (   float( uNTCordShiftScale + 1 ) * 0.5   );
-
-  // now given a noise texture based coordinate, return the value
-  return texture2D( texNoise, noiseTCord ).rgb;
-}
diff --git a/Rendering/OpenGL/vtkLineIntegralConvolution2D_fs1.glsl b/Rendering/OpenGL/vtkLineIntegralConvolution2D_fs1.glsl
deleted file mode 100644
index 6b684b0..0000000
--- a/Rendering/OpenGL/vtkLineIntegralConvolution2D_fs1.glsl
+++ /dev/null
@@ -1,151 +0,0 @@
-//=========================================================================
-//
-//  Program:   Visualization Toolkit
-//  Module:    vtkLineIntegralConvolution2D_fs1.glsl
-//
-//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-//  All rights reserved.
-//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-//
-//     This software is distributed WITHOUT ANY WARRANTY; without even
-//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-//     PURPOSE.  See the above copyright notice for more information.
-//
-//=========================================================================
-
-// Filename: vtkLineIntegralConvolution2D_fs1.glsl
-// Filename is useful when using gldb-gui
-
-#version 110
-
-#extension GL_ARB_draw_buffers : enable
-
-// four input texture objects
-uniform sampler2D texVectorField;    // TEXTURE0
-uniform sampler2D texNoise;          // TEXTURE1
-uniform sampler2D texLIC;            // TEXTURE2
-uniform sampler2D texTCoords;        // TEXTURE3
-
-// step type
-// 0: first access to the streamline center point
-// 1: access to a regular / non-center streamline point
-// 2: second access to the streamline center point
-//    (due to a change in the streamline integration direction)
-//
-// Texture texTCoords is indexed for type #1 (i.e., bReset = 0) only
-// while it is NOT indexed for type #0 and type #2 (i.e., bReset = 1)
-// and instead the original texture coordinate (prior to any integration)
-// is directly used.
-//
-// Type #0 and type #2 each contribute half the texture value since
-// they access to the same streamline point.
-//
-// The accumulation texture texLIC is NOT accessed for type #0 because
-// nothing has been accumulated upon the first integration step
-uniform int   uStepType;
-uniform int   uSurfaced;             // is surfaceLIC (0 / 1)?
-uniform int   uLastPass;             // is the last pass of LIC (0 / 1)?
-uniform int   uNumSteps;             // number of steps in each direction.
-uniform int   uStepSign;             // +1: forward;    -1: backward.
-uniform float uStepSize;             // step size in parametric space
-
-// two modes for masking the texture value of a zero-vector fragment
-// 0: retain the white noise texture value by storing the negated version
-// 1: export ( -1.0, -1.0, -1.0, -1.0 ) for use by vtkSurfaceLICPainter
-//    to make this LIC fragment totally transparent to show the underlying
-//    geometry surface
-uniform int   uMaskType;
-
-float   normalizer = 1.0 / float( 2 * uNumSteps + 1 ); // for normalization
-
-// functions defined in vtkLineIntegralConvolution2D_fs.glsl
-vec2 rk2( vec2 xy, float h );
-vec2 rk4( vec2 xy, float h );
-vec2 getVector( vec2 tcords );
-vec3 getNoiseColor( vec2 tcoord );
-
-
-void main( void )
-{
-  vec2 vector = getVector( gl_TexCoord[1].st );
-
-  // ==== for surfaceLIC ====
-  // Any fragment where the vector is zero needs to be assigned with a mask
-  // texture value, either vec4( -1.0, -1.0, -1.0, -1.0 ) or the negated
-  // version of the white noise texture value. The former is exploited by
-  // vtkSurfaceLICPainter to make this LIC fragment totally transparent
-  // to show the underlying geometry surface while the latter is used by the
-  // high-pass filter (vtkLineIntegralConvolution2D_fs2, invoked between
-  // two LIC passes that are employed for improved image quality) to ignore
-  // such fragments. Otherwise the output of the high-pass filter (taking
-  // pass #1's output as the input) would contain high-frequency noise while
-  // the (pass #2) LIC process requires white noise from the zero-vector area.
-  //
-  // ==== for non-surfaceLIC ====
-  // Any fragment where the vector is zero needs to be assigned with the
-  // negated version of the white noise texture value ONLY UNLESS this is the
-  // last pass of LIC, which is followed by a high-pass filter. Otherwise (it
-  // is the last process of LIC) the fragment takes the white noise value for
-  // the output.
-  if (    all(   equal(  vector,  vec2( 0.0, 0.0 )  )   )    )
-    {
-    if ( uSurfaced == 1 )
-      {
-      gl_FragData[0] = vec4(  ( -1.0 ) * getNoiseColor( gl_TexCoord[1].st ),
-                              ( -1.0 )  )             * float( 1 - uMaskType ) +
-                       vec4( -1.0, -1.0, -1.0, -1.0 ) * float(     uMaskType );
-      gl_FragData[1] = vec4( -1.0, -1.0, -1.0, -1.0 );
-      }
-    else
-      {
-      float   fscale = float( uLastPass + uLastPass - 1 );
-      gl_FragData[0] = vec4(  fscale * getNoiseColor( gl_TexCoord[1].st ),
-                              fscale  );
-      gl_FragData[1] = vec4(  fscale,  fscale,  fscale,  fscale  );
-      }
-
-    return;
-    }
-
-  // determine if the texture coordinate needs to be reset
-  // bReset = 0: texture texTCoords needs to be indexed to obtain the coordinate
-  //             for a regular / non-center streamline point.
-  // bReset = 1: the original texture coordinate (prior to any integration) is
-  //             used for the streamline center point.
-  int  bReset = 1 - (  ( uStepType + 1 ) / 2  ) * ( 1 - uStepType / 2 );
-
-  // obtain the actual texture coordinate
-  vec2 tcord0;
-  if(bReset==1)
-    {
-    tcord0=gl_TexCoord[1].st;
-    }
-  else
-    {
-    tcord0=texture2D( texTCoords, gl_TexCoord[0].st ).rg;
-    }
-
-  // normalize the contribution of this streamline point to the center ask the
-  // streamline center to contribute half the texture value per time (the stream-
-  // line center is accessed two times)
-  vec3 color0 = (  1.0  -  float( bReset )  *  0.5  ) *
-                (  getNoiseColor( tcord0 )  *  normalizer  );
-
-  // integration to locate the next streamline point
-  vec2 tcord1 = rk4(  tcord0,  float( uStepSign ) * uStepSize  );
-
-  // access the accumulation texture to obtain the summed texture value that will
-  // be eventually assigned to the streamline center (in fact, no accumulation is
-  // accessed and used for type #0 --- the first access to the center)
-  // NOTE: upon the first access to the center, the accumulation texture may (and
-  // in many cases, at least on some platforms) contain invalid ('NAN') values.
-  // Accessing the initial accumulation texture can cause problems.
-  vec3 accumu = vec3( 0.0, 0.0, 0.0 );
-  if ( uStepType > 0 )
-    {
-    accumu = texture2D( texLIC, gl_TexCoord[0].st ).rgb;
-    }
-
-  gl_FragData[0] = vec4( color0 + accumu, 1.0 );
-  gl_FragData[1] = vec4( tcord1.s, tcord1.t, 0.0, 1.0 );
-}
diff --git a/Rendering/OpenGL/vtkLineIntegralConvolution2D_fs2.glsl b/Rendering/OpenGL/vtkLineIntegralConvolution2D_fs2.glsl
deleted file mode 100644
index 10692d7..0000000
--- a/Rendering/OpenGL/vtkLineIntegralConvolution2D_fs2.glsl
+++ /dev/null
@@ -1,111 +0,0 @@
-//=========================================================================
-//
-//  Program:   Visualization Toolkit
-//  Module:    vtkLineIntegralConvolution2D_fs2.glsl
-//
-//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-//  All rights reserved.
-//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-//
-//     This software is distributed WITHOUT ANY WARRANTY; without even
-//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-//     PURPOSE.  See the above copyright notice for more information.
-//
-//=========================================================================
-
-// Filename: vtkLineIntegralConvolution2D_fs2.glsl
-// Filename is useful when using gldb-gui
-
-#version 110
-
-#extension GL_ARB_draw_buffers : enable
-
-uniform sampler2D licTexture;
-uniform float     uLicTexWid; // texture width
-uniform float     uLicTexHgt; // texture height
-
-// shift to the neighboring fragment
-float tcordxDelt = 1.0 / uLicTexWid;
-float tcordyDelt = 1.0 / uLicTexHgt;
-
-// the 8 surrounding fragments accessed by the 3x3 Laplacian matrix
-// -1 -1 -1
-// -1  9 -1
-// -1 -1 -1
-vec2  cordShift0 = vec2( -tcordxDelt,  tcordyDelt );
-vec2  cordShift1 = vec2(  0.0,         tcordyDelt );
-vec2  cordShift2 = vec2(  tcordxDelt,  tcordyDelt );
-
-vec2  cordShift3 = vec2( -tcordxDelt,  0.0        );
-vec2  cordShift4 = vec2(  tcordxDelt,  0.0        );
-
-vec2  cordShift5 = vec2( -tcordxDelt, -tcordyDelt );
-vec2  cordShift6 = vec2(  0.0,        -tcordyDelt );
-vec2  cordShift7 = vec2(  tcordxDelt, -tcordyDelt );
-
-// used for handling exceptions
-vec2  miniTCoord = vec2(       tcordxDelt,       tcordyDelt );
-vec2  maxiTCoord = vec2( 1.0 - tcordxDelt, 1.0 - tcordyDelt );
-vec4  miniTexVal = vec4( 0.0, 0.0, 0.0, 0.0 );
-vec4  maxiTexVal = vec4( 1.0, 1.0, 1.0, 1.0 );
-
-// perform a 3x3 Laplacian high-pass filter on the input image
-void main( void )
-{
-  int   bException;
-  vec4  outputValu;
-  vec4  fragTexVal = texture2D( licTexture, gl_TexCoord[0].st );
-
-  // In pass #1 LIC (vtkLineIntegralConvolution2D_fs1), any fragment where
-  // the vector is zero is assigned with a negative texture value (by negating
-  // the associated input noise texture value). High-pass filtering is skipped
-  // for this fragment in order to pass the original input noise value forward
-  // to pass #2 LIC. The line below checks if the current is such a fragment.
-  bvec4 exception0 = lessThan( fragTexVal, miniTexVal );
-
-  // checks if this fragment has 8 valid surrounding fragments (in tcoords)
-  bvec2 exception1 = lessThan   ( gl_TexCoord[0].st, miniTCoord );
-  bvec2 exception2 = greaterThan( gl_TexCoord[0].st, maxiTCoord );
-
-  // perform high-pass filtering
-  outputValu  = fragTexVal * 9.0;
-  outputValu -= texture2D( licTexture, gl_TexCoord[0].st + cordShift0 ) +
-                texture2D( licTexture, gl_TexCoord[0].st + cordShift1 ) +
-                texture2D( licTexture, gl_TexCoord[0].st + cordShift2 ) +
-
-                texture2D( licTexture, gl_TexCoord[0].st + cordShift3 ) +
-                texture2D( licTexture, gl_TexCoord[0].st + cordShift4 ) +
-
-                texture2D( licTexture, gl_TexCoord[0].st + cordShift5 ) +
-                texture2D( licTexture, gl_TexCoord[0].st + cordShift6 ) +
-                texture2D( licTexture, gl_TexCoord[0].st + cordShift7 );
-
-  // Checks if high-pass filtering produces out-of-range texture values
-  // that might incur artifacts near the interface between the valid flow
-  // areas and zero-vector areas. In case of such a filtering result, the
-  // initial texture value (from the output of pass #1 LIC) is simply
-  // adopted to suppress artifacts as much as possible.
-  bvec4 exception3 = lessThan   ( outputValu, miniTexVal );
-  bvec4 exception4 = greaterThan( outputValu, maxiTexVal );
-  bException = int(  any( exception3 )  ) + int(  any( exception4 )  );
-  outputValu = fragTexVal * float(     bException ) +
-               outputValu * float( 1 - bException );
-
-  // In cased of any invalid surrounding fragment, high-pass filtering is
-  // skipped and the initial texture value (from the output of pass #1 LIC)
-  // is employed instead.
-  bException = int(  any( exception1 )  ) + int(  any( exception2 )  );
-  bException = ( bException + 1 ) / 2;
-  outputValu = fragTexVal * float(     bException ) +
-               outputValu * float( 1 - bException );
-
-  // In case of a zero-vector fragment, the negative texture value (the noise
-  // texture value stored in the output of pass #1 LIC) is negated again below
-  // to restore the positive noise texture value that is then forwarded to pass
-  // #2 LIC as the input noise.
-  bException = int(  any( exception0 )  );
-  outputValu = fragTexVal * float( 0 - bException ) +
-               outputValu * float( 1 - bException );
-
-  gl_FragData[0]= outputValu;
-}
diff --git a/Rendering/OpenGL/vtkOSOpenGLRenderWindow.cxx b/Rendering/OpenGL/vtkOSOpenGLRenderWindow.cxx
index 24c46f8..cc09760 100644
--- a/Rendering/OpenGL/vtkOSOpenGLRenderWindow.cxx
+++ b/Rendering/OpenGL/vtkOSOpenGLRenderWindow.cxx
@@ -285,6 +285,7 @@ void vtkOSOpenGLRenderWindow::Finalize (void)
 // Change the window to fill the entire screen.
 void vtkOSOpenGLRenderWindow::SetFullScreen(int arg)
 {
+  (void)arg;
   this->Modified();
 }
 
@@ -438,10 +439,12 @@ void vtkOSOpenGLRenderWindow::SetParentInfo(char *info)
 
 void vtkOSOpenGLRenderWindow::SetWindowId(void *arg)
 {
+  (void)arg;
 //   this->SetWindowId((Window)arg);
 }
 void vtkOSOpenGLRenderWindow::SetParentId(void *arg)
 {
+  (void)arg;
 //   this->SetParentId((Window)arg);
 }
 
@@ -501,6 +504,7 @@ void vtkOSOpenGLRenderWindow::SetWindowName(const char * cname)
 
 void vtkOSOpenGLRenderWindow::SetNextWindowId(void *arg)
 {
+  (void)arg;
 //   this->SetNextWindowId((Window)arg);
 }
 
diff --git a/Rendering/OpenGL/vtkOpaquePass.cxx b/Rendering/OpenGL/vtkOpaquePass.cxx
index b572263..f974c60 100644
--- a/Rendering/OpenGL/vtkOpaquePass.cxx
+++ b/Rendering/OpenGL/vtkOpaquePass.cxx
@@ -15,7 +15,7 @@
 
 #include "vtkOpaquePass.h"
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkOpaquePass);
 
diff --git a/Rendering/OpenGL/vtkOpenGLActor.cxx b/Rendering/OpenGL/vtkOpenGLActor.cxx
index 308cf3c..0aad7f4 100644
--- a/Rendering/OpenGL/vtkOpenGLActor.cxx
+++ b/Rendering/OpenGL/vtkOpenGLActor.cxx
@@ -19,6 +19,7 @@
 #include "vtkObjectFactory.h"
 #include "vtkOpenGLRenderer.h"
 #include "vtkProperty.h"
+#include "vtkOpenGLError.h"
 
 #include "vtkOpenGL.h"
 #include <math.h>
@@ -28,6 +29,8 @@ vtkStandardNewMacro(vtkOpenGLActor);
 // Actual actor render method.
 void vtkOpenGLActor::Render(vtkRenderer *ren, vtkMapper *mapper)
 {
+  vtkOpenGLClearErrorMacro();
+
   // get opacity
   double opacity = this->GetProperty()->GetOpacity();
   if (opacity == 1.0)
@@ -87,6 +90,8 @@ void vtkOpenGLActor::Render(vtkRenderer *ren, vtkMapper *mapper)
     {
     glDepthMask(GL_TRUE);
     }
+
+  vtkOpenGLCheckErrorMacro("failed after Render");
 }
 
 //----------------------------------------------------------------------------
diff --git a/Rendering/OpenGL/vtkOpenGLCamera.cxx b/Rendering/OpenGL/vtkOpenGLCamera.cxx
index e29ef01..a1a2d65 100644
--- a/Rendering/OpenGL/vtkOpenGLCamera.cxx
+++ b/Rendering/OpenGL/vtkOpenGLCamera.cxx
@@ -19,6 +19,7 @@
 #include "vtkOpenGLRenderer.h"
 #include "vtkOutputWindow.h"
 #include "vtkOpenGLRenderWindow.h"
+#include "vtkOpenGLError.h"
 #include "vtkgluPickMatrix.h"
 
 #include "vtkOpenGL.h"
@@ -30,6 +31,8 @@ vtkStandardNewMacro(vtkOpenGLCamera);
 // Implement base class method.
 void vtkOpenGLCamera::Render(vtkRenderer *ren)
 {
+  vtkOpenGLClearErrorMacro();
+
   double aspect[2];
   int  lowerLeft[2];
   int usize, vsize;
@@ -160,11 +163,15 @@ void vtkOpenGLCamera::Render(vtkRenderer *ren)
     }
 
   matrix->Delete();
+
+  vtkOpenGLCheckErrorMacro("failed after Render");
 }
 
 //----------------------------------------------------------------------------
 void vtkOpenGLCamera::UpdateViewport(vtkRenderer *ren)
 {
+  vtkOpenGLClearErrorMacro();
+
   int lowerLeft[2];
   int usize, vsize;
   ren->GetTiledSizeAndOrigin(&usize, &vsize, lowerLeft, lowerLeft+1);
@@ -172,6 +179,8 @@ void vtkOpenGLCamera::UpdateViewport(vtkRenderer *ren)
   glViewport(lowerLeft[0], lowerLeft[1], usize, vsize);
   glEnable(GL_SCISSOR_TEST);
   glScissor(lowerLeft[0], lowerLeft[1], usize, vsize);
+
+  vtkOpenGLCheckErrorMacro("failed after UpdateViewport");
 }
 
 //----------------------------------------------------------------------------
diff --git a/Rendering/OpenGL/vtkOpenGLClipPlanesPainter.cxx b/Rendering/OpenGL/vtkOpenGLClipPlanesPainter.cxx
index 89905cd..cc8e7ec 100644
--- a/Rendering/OpenGL/vtkOpenGLClipPlanesPainter.cxx
+++ b/Rendering/OpenGL/vtkOpenGLClipPlanesPainter.cxx
@@ -20,8 +20,9 @@
 #include "vtkObjectFactory.h"
 #include "vtkPlane.h"
 #include "vtkPlaneCollection.h"
-
+#include "vtkOpenGLError.h"
 #include "vtkOpenGL.h"
+#include "vtkOpenGLError.h"
 
 vtkStandardNewMacro(vtkOpenGLClipPlanesPainter);
 
@@ -41,6 +42,8 @@ void vtkOpenGLClipPlanesPainter::RenderInternal(vtkRenderer *renderer,
                                                 unsigned long typeflags,
                                                 bool forceCompileOnly)
 {
+  vtkOpenGLClearErrorMacro();
+
   vtkPlaneCollection *clipPlanes;
   vtkPlane *plane;
   int i, numClipPlanes;
@@ -101,6 +104,8 @@ void vtkOpenGLClipPlanesPainter::RenderInternal(vtkRenderer *renderer,
     {
     glDisable(static_cast<GLenum>(GL_CLIP_PLANE0+i));
     }
+
+  vtkOpenGLCheckErrorMacro("failed after RenderInternal");
 }
 
 //-----------------------------------------------------------------------------
diff --git a/Rendering/OpenGL/vtkOpenGLCoincidentTopologyResolutionPainter.cxx b/Rendering/OpenGL/vtkOpenGLCoincidentTopologyResolutionPainter.cxx
index f426711..4eef514 100644
--- a/Rendering/OpenGL/vtkOpenGLCoincidentTopologyResolutionPainter.cxx
+++ b/Rendering/OpenGL/vtkOpenGLCoincidentTopologyResolutionPainter.cxx
@@ -23,6 +23,7 @@
 #include "vtkRenderer.h"
 
 #include "vtkOpenGL.h"
+#include "vtkOpenGLError.h"
 
 vtkStandardNewMacro(vtkOpenGLCoincidentTopologyResolutionPainter);
 
@@ -70,6 +71,7 @@ void vtkOpenGLCoincidentTopologyResolutionPainter::RenderPolygonOffset(
   bool forceCompileOnly)
   {
 #ifdef GL_VERSION_1_1
+  vtkOpenGLClearErrorMacro();
   if (this->OffsetFaces)
     {
     glEnable(GL_POLYGON_OFFSET_FILL);
@@ -95,6 +97,7 @@ void vtkOpenGLCoincidentTopologyResolutionPainter::RenderPolygonOffset(
     glDisable(GL_POLYGON_OFFSET_LINE);
     glDisable(GL_POLYGON_OFFSET_POINT);
     }
+  vtkOpenGLCheckErrorMacro("failed after RenderPolygonOffset");
 #endif
 }
 
@@ -103,6 +106,8 @@ void vtkOpenGLCoincidentTopologyResolutionPainter::RenderShiftZBuffer(
   vtkRenderer *renderer, vtkActor *actor, unsigned long typeflags,
   bool forceCompileOnly)
 {
+  vtkOpenGLClearErrorMacro();
+
   // Get the flags for each type of primitive.  Polygons can be drawn
   // as vertices or lines rather than filled, so check the property and
   // OpenGL flags to try to determine which one we are doing.
@@ -140,6 +145,7 @@ void vtkOpenGLCoincidentTopologyResolutionPainter::RenderShiftZBuffer(
       glMatrixMode(GL_PROJECTION);
       glPushMatrix();
       glTranslated(0.0, 0.0, 2.0*this->ZShift*(range[1]-range[0]));
+      vtkOpenGLCheckErrorMacro("failed after setup");
       this->Superclass::RenderInternal(renderer, actor, vertFlags,
                                        forceCompileOnly);
       glMatrixMode(GL_PROJECTION);
@@ -150,6 +156,7 @@ void vtkOpenGLCoincidentTopologyResolutionPainter::RenderShiftZBuffer(
       glMatrixMode(GL_PROJECTION);
       glPushMatrix();
       glTranslated(0.0, 0.0, this->ZShift*(range[1]-range[0]));
+      vtkOpenGLCheckErrorMacro("failed after setup");
       this->Superclass::RenderInternal(renderer, actor, lineFlags,
                                        forceCompileOnly);
       glMatrixMode(GL_PROJECTION);
@@ -166,6 +173,7 @@ void vtkOpenGLCoincidentTopologyResolutionPainter::RenderShiftZBuffer(
     this->Superclass::RenderInternal(renderer, actor, typeflags,
                                      forceCompileOnly);
     }
+  vtkOpenGLCheckErrorMacro("failed after RenderShiftZBuffer");
 }
 
 //-----------------------------------------------------------------------------
diff --git a/Rendering/OpenGL/vtkOpenGLDisplayListPainter.cxx b/Rendering/OpenGL/vtkOpenGLDisplayListPainter.cxx
index 5deb55e..4081681 100644
--- a/Rendering/OpenGL/vtkOpenGLDisplayListPainter.cxx
+++ b/Rendering/OpenGL/vtkOpenGLDisplayListPainter.cxx
@@ -24,6 +24,7 @@
 #include "vtkTimerLog.h"
 
 #include "vtkOpenGL.h"
+#include "vtkOpenGLError.h"
 
 #include <map>
 
@@ -40,6 +41,7 @@ public:
 
   void ReleaseAllLists()
     {
+    vtkOpenGLClearErrorMacro();
     DisplayListMapType::iterator iter;
     for (iter = this->DisplayListMap.begin(); iter != this->DisplayListMap.end();
       iter++)
@@ -47,16 +49,19 @@ public:
       glDeleteLists(iter->second, 1);
       }
     this->DisplayListMap.clear();
+    vtkOpenGLStaticCheckErrorMacro("failed after ReleaseAllLists");
     }
 
   void ReleaseList(unsigned long key)
     {
+    vtkOpenGLClearErrorMacro();
     DisplayListMapType::iterator iter = this->DisplayListMap.find(key);
     if (iter != this->DisplayListMap.end())
       {
       glDeleteLists(iter->second, 1);
       this->DisplayListMap.erase(iter);
       }
+    vtkOpenGLStaticCheckErrorMacro("failed after ReleaseList");
     }
 
   void UpdateBuildTime()
@@ -66,7 +71,6 @@ public:
       this->BuildTime.Modified();
       }
     }
-
 };
 
 //-----------------------------------------------------------------------------
@@ -105,6 +109,8 @@ void vtkOpenGLDisplayListPainter::RenderInternal(vtkRenderer *renderer,
                                                  unsigned long typeflags,
                                                  bool forceCompileOnly)
 {
+  vtkOpenGLClearErrorMacro();
+
   // if active render window has changed, then release the old display lists on
   // the old window, if the old window is still valid.
   if (this->LastWindow &&
@@ -178,6 +184,8 @@ void vtkOpenGLDisplayListPainter::RenderInternal(vtkRenderer *renderer,
     this->Timer->StopTimer();
     this->TimeToDraw += this->Timer->GetElapsedTime();
     }
+
+  vtkOpenGLCheckErrorMacro("failed after RenderInternal");
 }
 
 //-----------------------------------------------------------------------------
diff --git a/Rendering/OpenGL/vtkOpenGLError.h.in b/Rendering/OpenGL/vtkOpenGLError.h.in
new file mode 100644
index 0000000..009ffe0
--- /dev/null
+++ b/Rendering/OpenGL/vtkOpenGLError.h.in
@@ -0,0 +1,252 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkOpenGL.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#ifndef __vtkOpenGLError_h
+#define __vtkOpenGLError_h
+
+#include "vtkgl.h"
+#include "vtkSetGet.h"
+
+#cmakedefine VTK_REPORT_OPENGL_ERRORS
+
+// Description:
+// The following functions can be used to detect and report, and/or
+// silently clear OpenGL error flags. These are not intended to be
+// used driectly, instead use the following macros.
+//
+// vtkOpenGLClearErrorMacro() -- Silently clear OpenGL error flags.
+//
+// vtkOpenGLCheckErrorMacro(message) -- Check and clear OpenGL's error
+// flags. Report errors detected via vtkErrorMacro.
+//
+// vtkOpenGLStaticCheckErrorMacro(message) -- Check and clear OpenGL's
+// error flags. Report errors detected via vtkGenericWarningMacro.
+// This may be used in static methods and outside of vtkObjects.
+//
+// The intended usage pattern is to 1) call vtkOpenGLClearErrorMacro
+// at the top of, and 2) vtkOpenGLCheckErrorMacro at the bottom of
+// methods that make OpenGL calls.
+//
+// By calling vtkOpenGLClearErrorMacro at the top of a method that
+// makes OpenGL calls, you isolate the code and prevent it from
+// detecting any preceding errors. By calling vtkOpenGLCheckErrorMacro
+// at the bottom of the method you clear the error flags and report
+// any errors that have occured in the method where they occured.
+//
+// The macros maybe completely disabled via the CMakeLists variable
+// VTK_REPORT_OPENGL_ERRORS. Note that in that case error flags are
+// never cleared so that if an error occurs the flags will remain dirty
+// making it impossible for anyone else to use them reliably. Please
+// don't disable them with out a good reason.
+
+
+
+// Description:
+// Convert an OpenGL error code into a descriptive
+// string.
+inline
+const char *vtkOpenGLStrError(unsigned int code)
+{
+  switch(static_cast<GLenum>(code))
+    {
+    case GL_NO_ERROR:
+      return "No error";
+      break;
+    case GL_INVALID_ENUM:
+      return "Invalid enum";
+      break;
+    case GL_INVALID_VALUE:
+      return "Invalid value";
+      break;
+    case GL_INVALID_OPERATION:
+      return "Invalid operation";
+      break;
+    case GL_STACK_OVERFLOW:
+      return "Stack overflow";
+      break;
+    case GL_STACK_UNDERFLOW:
+      return "Stack underflow";
+      break;
+    case GL_OUT_OF_MEMORY:
+      return "Out of memory";
+      break;
+    case vtkgl::TABLE_TOO_LARGE:
+      return "Table too large";
+      break;
+    case vtkgl::INVALID_FRAMEBUFFER_OPERATION_EXT:
+      return "Invalid framebuffer operation";
+      break;
+    case vtkgl::TEXTURE_TOO_LARGE_EXT:
+      return "Texture too large";
+      break;
+    }
+  return "Unknown error";
+}
+
+// Description:
+// Check for OpenGL errors. Error status is querried until
+// OpenGL reports no errors. The list of errors and their
+// descriptions are returned in the user supplied arrays.
+// User passes the size of the arrays as the first argument.
+// Error flags will still be cleared if the user arays are
+// less than the number of errors.
+#if defined(VTK_REPORT_OPENGL_ERRORS)
+inline
+int vtkGetOpenGLErrors(
+      int maxNum,
+      unsigned int *errCode,
+      const char **errDesc)
+{
+  int i=0;
+  GLenum code = glGetError();
+  if (i<maxNum)
+    {
+    errCode[i] = static_cast<unsigned int>(code);
+    errDesc[i] = vtkOpenGLStrError(code);
+    }
+  while (code!=GL_NO_ERROR)
+    {
+    i+=1;
+    code = glGetError();
+    if (i<maxNum)
+      {
+      errCode[i] = static_cast<unsigned int>(code);
+      errDesc[i] = vtkOpenGLStrError(code);
+      }
+    }
+  return i;
+}
+#else
+inline
+int vtkGetOpenGLErrors(
+      int maxNum,
+      unsigned int *errCode,
+      const char **errDesc)
+{
+  (void)maxNum;
+  (void)errCode;
+  (void)errDesc;
+  return 0;
+}
+#endif
+
+// Description:
+// Send a set of errors collected by GetOpenGLErrors
+// to the give stream. The number of errors is obtained
+// in the return value of GetOpenGLErrors, while the max
+// errors gives the size of the error arrays.
+#if defined(VTK_REPORT_OPENGL_ERRORS)
+inline
+void vtkPrintOpenGLErrors(
+      ostream &os,
+      int maxErrors,
+      int numErrors,
+      unsigned int *errCode,
+      const char **errDesc)
+{
+  os << numErrors << " OpenGL errors detected" << endl;
+  for (int i=0; (i<numErrors)&&(i<maxErrors); ++i)
+    {
+    os << "  " <<  i << " : (" << errCode[i] << ") " << errDesc[i] << endl;
+    }
+  if (numErrors>maxErrors)
+    {
+    os
+      << "More than " << maxErrors
+      << " detected! The remainder are not reported"
+      << endl;
+    }
+}
+#else
+inline
+void vtkPrintOpenGLErrors(
+      ostream &os,
+      int maxErrors,
+      int numErrors,
+      unsigned int *errCode,
+      const char **errDesc)
+{
+  (void)os;
+  (void)maxErrors;
+  (void)numErrors;
+  (void)errCode;
+  (void)errDesc;
+}
+#endif
+
+// Description:
+// Clear OpenGL's error flags.
+#if defined(VTK_REPORT_OPENGL_ERRORS)
+inline
+void vtkClearOpenGLErrors()
+{
+  while (glGetError()!=GL_NO_ERROR){;}
+}
+#else
+inline
+void vtkClearOpenGLErrors(){}
+#endif
+
+#if !defined(VTK_REPORT_OPENGL_ERRORS)
+# define vtkOpenGLClearErrorMacro()
+# define vtkOpenGLCheckErrorMacro(message)
+# define vtkOpenGLStaticCheckErrorMacro(message)
+#else
+# define vtkOpenGLClearErrorMacro() vtkClearOpenGLErrors();
+# include <sstream> // for error macro
+# define vtkOpenGLCheckErrorMacroImpl(ostr, message) \
+{                                                    \
+  const int maxErrors = 16;                          \
+  unsigned int errCode[maxErrors] = {0};             \
+  const char *errDesc[maxErrors] = {NULL};           \
+                                                     \
+  int numErrors                                      \
+    = vtkGetOpenGLErrors(                            \
+        maxErrors,                                   \
+        errCode,                                     \
+        errDesc);                                    \
+                                                     \
+  if (numErrors)                                     \
+    {                                                \
+    std::ostringstream oss;                          \
+    vtkPrintOpenGLErrors(                            \
+          oss,                                       \
+          maxErrors,                                 \
+          numErrors,                                 \
+          errCode,                                   \
+          errDesc);                                  \
+                                                     \
+    ostr(<< message << " " << oss.str().c_str());    \
+    }                                                \
+}
+# define vtkOpenGLCheckErrorMacro(message) \
+  vtkOpenGLCheckErrorMacroImpl(vtkErrorMacro, message)
+# define vtkOpenGLStaticCheckErrorMacro(message) \
+  vtkOpenGLCheckErrorMacroImpl(vtkGenericWarningMacro, message)
+#endif
+
+// Use this macro for fine grained error checking during
+// debugging. It is removed for Release builds.
+#ifdef NDEBUG
+# define vtkOpenGLDebugClearErrorMacro()
+# define vtkOpenGLDebugCheckErrorMacro(message)
+#else
+# define vtkOpenGLDebugClearErrorMacro() \
+  vtkOpenGLClearErrorMacro()
+# define vtkOpenGLDebugCheckErrorMacro(message) \
+  vtkOpenGLStaticCheckErrorMacro(message)
+#endif
+
+#endif
+// VTK-HeaderTest-Exclude: vtkOpenGLError.h
diff --git a/Rendering/OpenGL/vtkOpenGLExtensionManager.cxx b/Rendering/OpenGL/vtkOpenGLExtensionManager.cxx
index f8b764e..fc83874 100644
--- a/Rendering/OpenGL/vtkOpenGLExtensionManager.cxx
+++ b/Rendering/OpenGL/vtkOpenGLExtensionManager.cxx
@@ -28,10 +28,11 @@
 #include "vtkObjectFactory.h"
 
 #include <string.h>
-
 #include <string>
-
-#include <assert.h>
+#include <cassert>
+#include <sstream>
+using std::istringstream;
+using std::string;
 
 #ifdef VTK_DEFINE_GLX_GET_PROC_ADDRESS_PROTOTYPE
 extern "C" vtkglX::__GLXextFuncPtr glXGetProcAddressARB(const GLubyte *);
@@ -39,7 +40,6 @@ extern "C" vtkglX::__GLXextFuncPtr glXGetProcAddressARB(const GLubyte *);
 
 #ifdef VTK_USE_VTK_DYNAMIC_LOADER
 #include "vtkDynamicLoader.h"
-#include <string>
 #include <list>
 #endif
 
@@ -54,7 +54,52 @@ extern "C" vtkglX::__GLXextFuncPtr glXGetProcAddressARB(const GLubyte *);
 // GLU is currently not linked in VTK.  We do not support it here.
 #define GLU_SUPPORTED   0
 
-vtkStandardNewMacro(vtkOpenGLExtensionManager);
+// ensure we never use a null pointer to
+// a string.
+#define safes(arg) (arg?((const char *)arg):"")
+
+namespace
+{
+// helper to locate a token(a substr delimitted by ' ' or '\n')
+// for eg searching for "ati" should not succeed on "corporation"
+bool FindToken(const string &str, string token)
+{
+  string tmp;
+  istringstream iss(str);
+  while (iss.good())
+    {
+    iss >> tmp;
+    if (tmp==token)
+      {
+      return true;
+      }
+    }
+  return false;
+}
+// convert a string in the forms of "#.#.#","#.#", or "#" to
+// major minor and patch version numbers.
+int StringToVersion(string ver, int &major, int &minor, int &patch)
+{
+  char dot;
+  istringstream iss(ver);
+  major=0;
+  minor=0;
+  patch=0;
+  if (!(iss >> major))
+    {
+    return 0;
+    }
+  if (!((iss >> dot) && (iss >> minor)))
+    {
+    return 1;
+    }
+  if (!((iss >> dot) && (iss >> patch)))
+    {
+    return 2;
+    }
+  return 3;
+}
+}
 
 namespace vtkgl
 {
@@ -70,15 +115,36 @@ int LoadAsARBExtension(const char *name,
                        vtkOpenGLExtensionManager *manager);
 }
 
+
+// ----------------------------------------------------------------------------
+vtkStandardNewMacro(vtkOpenGLExtensionManager);
+
+// ----------------------------------------------------------------------------
 vtkOpenGLExtensionManager::vtkOpenGLExtensionManager()
 {
   this->OwnRenderWindow = 0;
   this->RenderWindow = NULL;
   this->ExtensionsString = NULL;
-
+  this->DriverGLVersion = "";
+  this->DriverGLVersionMajor = 1;
+  this->DriverGLVersionMinor = 1;
+  this->DriverGLVersionPatch = 0;
+  this->DriverGLVendor = "";
+  this->DriverGLRenderer = "";
+  this->DriverVersionMajor = 0;
+  this->DriverVersionMinor = 0;
+  this->DriverVersionPatch = 0;
+  this->DriverGLVendorId = DRIVER_VENDOR_UNKNOWN;
+  this->IgnoreDriverBugs
+#if defined(VTK_IGNORE_GLDRIVER_BUGS)
+   = true;
+#else
+   = false;
+#endif
   this->Modified();
 }
 
+// ----------------------------------------------------------------------------
 vtkOpenGLExtensionManager::~vtkOpenGLExtensionManager()
 {
   this->SetRenderWindow(NULL);
@@ -86,6 +152,7 @@ vtkOpenGLExtensionManager::~vtkOpenGLExtensionManager()
   this->ExtensionsString = 0;
 }
 
+// ----------------------------------------------------------------------------
 void vtkOpenGLExtensionManager::PrintSelf(ostream &os, vtkIndent indent)
 {
   this->Superclass::PrintSelf(os, indent);
@@ -96,11 +163,13 @@ void vtkOpenGLExtensionManager::PrintSelf(ostream &os, vtkIndent indent)
      << (this->ExtensionsString ? this->ExtensionsString : "(NULL)") << endl;
 }
 
+// ----------------------------------------------------------------------------
 vtkRenderWindow* vtkOpenGLExtensionManager::GetRenderWindow()
 {
   return this->RenderWindow;
 }
 
+// ----------------------------------------------------------------------------
 void vtkOpenGLExtensionManager::SetRenderWindow(vtkRenderWindow *renwin)
 {
   if (renwin == this->RenderWindow)
@@ -117,9 +186,315 @@ void vtkOpenGLExtensionManager::SetRenderWindow(vtkRenderWindow *renwin)
   vtkDebugMacro("Setting RenderWindow to " << renwin);
   this->OwnRenderWindow = 0;
   this->RenderWindow = renwin;
+  this->DriverGLVersion = "";
+  this->DriverGLVersionMajor = 1;
+  this->DriverGLVersionMinor = 1;
+  this->DriverGLVersionPatch = 0;
+  this->DriverGLVendor = "";
+  this->DriverGLVendorId = DRIVER_VENDOR_UNKNOWN;
+  this->DriverGLRenderer = "";
+  this->DriverVersionMajor = 0;
+  this->DriverVersionMinor = 0;
+  this->DriverVersionPatch = 0;
+  this->IgnoreDriverBugs
+#if defined(VTK_IGNORE_GLDRIVER_BUGS)
+   = true;
+#else
+   = false;
+#endif
   this->Modified();
 }
 
+/*
+a few examples of the info that's provided
+by glGetString that can be used to identify
+specific driver/cards on various platforms
+
+linux
+GL_VENDOR: NVIDIA Corporation
+GL_VERSION: 3.3.0 NVIDIA 319.23
+GL_RENDERER: Quadro FX 580/PCIe/SSE2
+
+GL_VENDOR: NVIDIA Corporation
+GL_VERSION: 4.2.0 NVIDIA 304.88
+GL_RENDERER: GeForce GTX 480/PCIe/SSE2
+
+GL_VENDOR: VMware, Inc.
+GL_VERSION: 2.1 Mesa 9.2.0 (git-062317d)
+GL_RENDERER: Gallium 0.4 on llvmpipe (LLVM 3.2, 128 bits)
+
+GL_VENDOR: Brian Paul
+GL_VERSION: 2.1 Mesa 8.0.5
+GL_RENDERER: Mesa OffScreen
+
+GL_VENDOR: Tungsten Graphics, Inc
+GL_VERSION: 1.4 Mesa 8.0.4
+GL_RENDERER: Mesa DRI Intel(R) 945GME
+
+GL_VENDOR: Brian Paul
+GL_VERSION: 2.1 Mesa 7.10.3
+GL_RENDERER: Mesa OffScreen
+
+GL_VENDOR: Brian Paul
+GL_VERSION: 2.1 Mesa 9.2.0-devel
+GL_RENDERER: Mesa X11
+
+
+apple
+GL_VENDOR: NVIDIA Corporation
+GL_VERSION: 2.1 NVIDIA-7.32.12
+GL_RENDERER: NVIDIA GeForce 320M OpenGL Engine
+
+GL_VENDOR: ATI Technologies Inc.
+GL_VERSION: 2.1 ATI-1.0.29
+GL_RENDERER: AMD Radeon HD 6750M OpenGL Engine
+
+GL_VENDOR: Intel Inc.
+GL_VERSION: 2.1 APPLE-8.0.61
+GL_RENDERER: Intel HD Graphics 3000 OpenGL Engine
+
+GL_VENDOR: ATI Technologies Inc.
+GL_VERSION: 2.0 ATI-1.5.48
+GL_RENDERER: ATI Radeon 9600 XT OpenGL Engine
+
+
+windows
+GL_VENDOR: Intel
+GL_VERSION: 3.3.0 - Build 8.15.10.2712
+GL_RENDERER: Intel(R) HD Graphics 4000
+
+GL_VENDOR: Microsoft Corporation
+GL_VERSION: 1.1.0
+GL_RENDERER: GDI Generic
+
+GL_VENDOR: NVIDIA Corporation
+GL_VERSION: 3.3.0
+GL_RENDERER: GeForce GTS 250M/PCIe/SSE2
+
+GL_VENDOR: ATI Technologies Inc.
+GL_VERSION: 4.2.11631 Compatibility Profile Context
+GL_RENDERER: AMD Radeon(TM) HD 7670M
+*/
+
+// ----------------------------------------------------------------------------
+void vtkOpenGLExtensionManager::InitializeDriverInformation()
+{
+  // collect some info about the driver
+  // for use in fine-grained feature
+  // validation see examples below.
+  if (this->RenderWindow)
+    {
+    this->DriverGLVendor = safes(glGetString(GL_VENDOR));
+    this->DriverGLVersion = safes(glGetString(GL_VERSION));
+    this->DriverGLRenderer = safes(glGetString(GL_RENDERER));
+
+    // attempt to detect driver gl version
+    const char *numbers="0123456789.";
+    size_t glVerStart = this->DriverGLVersion.find_first_of(numbers);
+    size_t glVerEnd = this->DriverGLVersion.find_first_not_of(numbers, glVerStart);
+    if (glVerEnd==string::npos)
+      {
+      glVerEnd = this->DriverGLVersion.size();
+      }
+    if (glVerStart!=string::npos)
+      {
+      // driver gl version found
+      StringToVersion(
+            this->DriverGLVersion.substr(glVerStart,glVerEnd-glVerStart),
+            this->DriverGLVersionMajor,
+            this->DriverGLVersionMinor,
+            this->DriverGLVersionPatch);
+      // attempt to detect driver version
+      // windows sometimes doesn't have it.
+      size_t driverVerStart = this->DriverGLVersion.find_first_of(numbers, glVerEnd+1);
+      size_t driverVerEnd = this->DriverGLVersion.find_first_not_of(numbers, driverVerStart);
+      if (driverVerEnd==string::npos)
+        {
+        driverVerEnd = this->DriverGLVersion.size();
+        }
+      if (driverVerStart!=string::npos)
+        {
+        // driver version found
+        StringToVersion(
+              this->DriverGLVersion.substr(driverVerStart,driverVerEnd-driverVerStart),
+              this->DriverVersionMajor,
+              this->DriverVersionMinor,
+              this->DriverVersionPatch);
+        }
+      }
+
+    // attempt to identify driver vendor
+    if ( FindToken(this->DriverGLVendor, "ATI")
+      || FindToken(this->DriverGLVendor, "AMD") )
+      {
+      this->DriverGLVendorId = DRIVER_VENDOR_ATI;
+      }
+    else if (FindToken(this->DriverGLVendor, "NVIDIA"))
+      {
+      this->DriverGLVendorId = DRIVER_VENDOR_NVIDIA;
+      }
+    else if (FindToken(this->DriverGLVendor, "Intel"))
+      {
+      this->DriverGLVendorId = DRIVER_VENDOR_INTEL;
+      }
+    // Mesa's use of the vendor field is all over the map
+    // but they consistently use version field.
+    else if (FindToken(this->DriverGLVersion, "Mesa"))
+      {
+      this->DriverGLVendorId = DRIVER_VENDOR_MESA;
+      }
+    else if (FindToken(this->DriverGLVendor, "Microsoft"))
+      {
+      this->DriverGLVendorId = DRIVER_VENDOR_MICROSOFT;
+      }
+    else
+      {
+      this->DriverGLVendorId = DRIVER_VENDOR_UNKNOWN;
+      }
+    }
+}
+
+// ----------------------------------------------------------------------------
+bool vtkOpenGLExtensionManager::DriverIsATI()
+{
+  return this->DriverGLVendorId == DRIVER_VENDOR_ATI;
+}
+
+// ----------------------------------------------------------------------------
+bool vtkOpenGLExtensionManager::DriverIsNvidia()
+{
+  return this->DriverGLVendorId == DRIVER_VENDOR_NVIDIA;
+}
+
+// ----------------------------------------------------------------------------
+bool vtkOpenGLExtensionManager::DriverIsIntel()
+{
+  return this->DriverGLVendorId == DRIVER_VENDOR_INTEL;
+}
+
+// ----------------------------------------------------------------------------
+bool vtkOpenGLExtensionManager::DriverIsMesa()
+{
+  return this->DriverGLVendorId  == DRIVER_VENDOR_MESA;
+}
+
+// ----------------------------------------------------------------------------
+bool vtkOpenGLExtensionManager::DriverIsMicrosoft()
+{
+  return this->DriverGLVendorId == DRIVER_VENDOR_MICROSOFT;
+}
+
+// ----------------------------------------------------------------------------
+bool vtkOpenGLExtensionManager::DriverVersionIs(
+      int major,
+      int minor,
+      int patch)
+{
+  return (this->DriverVersionMajor==major)
+   && (this->DriverVersionMinor==minor)
+   && (this->DriverVersionPatch==patch);
+}
+
+// ----------------------------------------------------------------------------
+bool vtkOpenGLExtensionManager::DriverVersionIs(int major, int minor)
+{
+  return (this->DriverVersionMajor==major)
+   && (this->DriverVersionMinor==minor);
+}
+
+// ----------------------------------------------------------------------------
+bool vtkOpenGLExtensionManager::DriverVersionIs(int major)
+{
+  return (this->DriverVersionMajor==major);
+}
+
+// ----------------------------------------------------------------------------
+bool vtkOpenGLExtensionManager::DriverVersionAtLeast(
+      int major,
+      int minor,
+      int patch)
+{
+  return (this->DriverVersionMajor>major)
+   || ((this->DriverVersionMajor==major)
+   && ((this->DriverVersionMinor>minor)
+   || ((this->DriverVersionMinor==minor)
+   && (this->DriverVersionPatch>=patch))));
+}
+
+// ----------------------------------------------------------------------------
+bool vtkOpenGLExtensionManager::DriverVersionAtLeast(int major, int minor)
+{
+  return (this->DriverVersionMajor>major)
+   || ((this->DriverVersionMajor==major)
+   && (this->DriverVersionMinor>=minor));
+}
+
+// ----------------------------------------------------------------------------
+bool vtkOpenGLExtensionManager::DriverVersionAtLeast(int major)
+{
+  return (this->DriverVersionMajor>=major);
+}
+
+// ----------------------------------------------------------------------------
+bool vtkOpenGLExtensionManager::DriverGLVersionIs(
+      int major,
+      int minor,
+      int patch)
+{
+  return (this->DriverGLVersionMajor==major)
+   && (this->DriverGLVersionMinor==minor)
+   && (this->DriverGLVersionPatch==patch);
+}
+
+// ----------------------------------------------------------------------------
+bool vtkOpenGLExtensionManager::DriverGLVersionIs(int major, int minor)
+{
+  return (this->DriverGLVersionMajor==major)
+   && (this->DriverGLVersionMinor==minor);
+}
+
+// ----------------------------------------------------------------------------
+bool vtkOpenGLExtensionManager::DriverGLRendererIs(const char *str)
+{
+  return this->DriverGLRenderer==str;
+}
+
+// ----------------------------------------------------------------------------
+bool vtkOpenGLExtensionManager::DriverGLRendererHas(const char *str)
+{
+  return this->DriverGLRenderer.find(str)!=string::npos;
+}
+
+// ----------------------------------------------------------------------------
+bool vtkOpenGLExtensionManager::DriverGLRendererHasToken(const char *str)
+{
+  return FindToken(this->DriverGLRenderer, str);
+}
+
+// ----------------------------------------------------------------------------
+bool vtkOpenGLExtensionManager::DriverGLRendererIsOSMesa()
+{
+  // check the type of render window because
+  // OffScreen token is not used with the OS Mesa
+  // llvmpipe state tracker
+  return
+    this->DriverIsMesa()
+    && this->RenderWindow->IsA("vtkOSOpenGLRenderWindow");
+}
+
+// ----------------------------------------------------------------------------
+bool vtkOpenGLExtensionManager::GetIgnoreDriverBugs(const char *description)
+{
+  if (this->IgnoreDriverBugs)
+    {
+    vtkWarningMacro(
+      << "Ignoring OpenGL driver bug: " << description);
+    return true;
+    }
+  return false;
+}
+
+// ----------------------------------------------------------------------------
 void vtkOpenGLExtensionManager::Update()
 {
   if (this->BuildTime > this->MTime)
@@ -133,10 +508,12 @@ void vtkOpenGLExtensionManager::Update()
   this->ExtensionsString = 0;
 
   this->ReadOpenGLExtensions();
+  this->InitializeDriverInformation();
 
   this->BuildTime.Modified();
 }
 
+// ----------------------------------------------------------------------------
 int vtkOpenGLExtensionManager::ExtensionSupported(const char *name)
 {
   this->Update();
@@ -163,83 +540,77 @@ int vtkOpenGLExtensionManager::ExtensionSupported(const char *name)
     p += n;
     }
 
-  const char *gl_renderer =
-    reinterpret_cast<const char *>(glGetString(GL_RENDERER));
-
-  const char *gl_version=
-    reinterpret_cast<const char *>(glGetString(GL_VERSION));
-
-  const char *gl_vendor=
-    reinterpret_cast<const char *>(glGetString(GL_VENDOR));
-
-  const char *mesa_version = strstr(gl_version, "Mesa");
+  if (result == 0)
+    {
+    // the requested feature is not supported
+    return 0;
+    }
 
-  // Woraround for a nVidia bug in indirect/remote rendering mode (ssh -X)
+  // Workaround for a nVidia bug in indirect/remote rendering mode (ssh -X)
   // The version returns is not the one actually supported.
   // For example, the version returns is greater or equal to 2.1
   // but where PBO (which are core in 2.1) are not actually supported.
   // In this case, force the version to be 1.1 (minimal). Anything above
   // will be requested only through extensions.
   // See ParaView bug
-  if (result && !this->RenderWindow->IsDirect() && !mesa_version)
+  if ( (strncmp(name,"GL_VERSION_",11) == 0)
+     && !this->RenderWindow->IsDirect() && !this->DriverGLRendererIsOSMesa() )
     {
-    if (result && strncmp(name, "GL_VERSION_", 11) == 0)
-      {
-      // whatever is the OpenGL version, return false.
-      // (nobody asks for GL_VERSION_1_1)
-      result = 0;
-      }
+    // whatever is the OpenGL version, return false.
+    // (nobody asks for GL_VERSION_1_1)
+    return 0;
     }
 
-  // Workaround for a bug on Mac PowerPC G5 with nVidia GeForce FX 5200
-  // Mac OS 10.3.9 and driver 1.5 NVIDIA-1.3.42. It reports it supports
-  // OpenGL>=1.4 but querying for glPointParameteri and glPointParameteriv
-  // return null pointers. So it does not actually supports fully OpenGL 1.4.
-  // It will make this method return false with "GL_VERSION_1_4" and true
-  // with "GL_VERSION_1_5".
-  if (result && strcmp(name, "GL_VERSION_1_4") == 0)
+  if (strcmp(name, "GL_VERSION_1_4") == 0)
     {
-    result = this->GetProcAddress("glPointParameteri")!=0 &&
-      this->GetProcAddress("glPointParameteriv")!=0;
-    }
+    // Workaround for a bug on Mac PowerPC G5 with nVidia GeForce FX 5200
+    // Mac OS 10.3.9 and driver 1.5 NVIDIA-1.3.42. It reports it supports
+    // OpenGL>=1.4 but querying for glPointParameteri and glPointParameteriv
+    // return null pointers. So it does not actually supports fully OpenGL 1.4.
+    // It will make this method return false with "GL_VERSION_1_4" and true
+    // with "GL_VERSION_1_5".
+    if ( (this->GetProcAddress("glPointParameteri") == 0)
+      || (this->GetProcAddress("glPointParameteriv") == 0) )
+      {
+      return 0;
+      }
 
-  // Workaround for a bug on renderer string="Quadro4 900 XGL/AGP/SSE2"
-  // version string="1.5.8 NVIDIA 96.43.01" or "1.5.6 NVIDIA 87.56"
-  // The driver reports it supports 1.5 but the 1.4 core promoted extension
-  // GL_EXT_blend_func_separate is implemented in software (poor performance).
-  // All the NV2x chipsets are probably affected. NV2x chipsets are used
-  // in GeForce4 and Quadro4.
-  // It will make this method return false with "GL_VERSION_1_4" and true
-  // with "GL_VERSION_1_5".
-  if (result && strcmp(name, "GL_VERSION_1_4") == 0)
-    {
-    result = strstr(gl_renderer,"Quadro4")==0 &&
-      strstr(gl_renderer,"GeForce4") == 0;
+    // Workaround for a bug on renderer string="Quadro4 900 XGL/AGP/SSE2"
+    // version string="1.5.8 NVIDIA 96.43.01" or "1.5.6 NVIDIA 87.56"
+    // The driver reports it supports 1.5 but the 1.4 core promoted extension
+    // GL_EXT_blend_func_separate is implemented in software (poor performance).
+    // All the NV2x chipsets are probably affected. NV2x chipsets are used
+    // in GeForce4 and Quadro4.
+    // It will make this method return false with "GL_VERSION_1_4" and true
+    // with "GL_VERSION_1_5".
+    if ( result && (this->DriverGLRendererHasToken("Quadro4")
+      || this->DriverGLRendererHasToken("GeForce4")) )
+      {
+      return 0;
+      }
     }
 
   // Workaround for a bug on renderer string="ATI Radeon X1600 OpenGL Engine"
   // version string="2.0 ATI-1.4.58" vendor string="ATI Technologies Inc."
   // It happens on a Apple iMac Intel Core Duo (early 2006) with Mac OS X
   // 10.4.11 (Tiger) and an ATI Radeon X1600 128MB.
+
   // The driver reports it supports 2.0 (where GL_ARB_texture_non_power_of_two
   // extension has been promoted to core) and that it supports extension
   // GL_ARB_texture_non_power_of_two. Reality is that non power of two
   // textures just don't work in this OS/driver/card.
+
   // It will make this method returns false with "GL_VERSION_2_0" and true
   // with "GL_VERSION_2_1".
   // It will make this method returns false with
   // "GL_ARB_texture_non_power_of_two".
-  if (result && strcmp(name, "GL_VERSION_2_0") == 0)
+  if ( this->DriverIsATI()
+    && this->DriverVersionIs(1,4,58) && this->DriverGLVersionIs(2,0)
+    && this->DriverGLRendererIs("ATI Radeon X1600 OpenGL Engine")
+    && ((strcmp(name,"GL_VERSION_2_0") == 0)
+    || (strcmp(name, "GL_ARB_texture_non_power_of_two") == 0)))
     {
-    result=!(strcmp(gl_renderer,"ATI Radeon X1600 OpenGL Engine")==0 &&
-             strcmp(gl_version,"2.0 ATI-1.4.58")==0 &&
-             strcmp(gl_vendor,"ATI Technologies Inc.")==0);
-    }
-  if (result && strcmp(name, "GL_ARB_texture_non_power_of_two") == 0)
-    {
-    result=!(strcmp(gl_renderer,"ATI Radeon X1600 OpenGL Engine")==0 &&
-             strcmp(gl_version,"2.0 ATI-1.4.58")==0 &&
-             strcmp(gl_vendor,"ATI Technologies Inc.")==0);
+    return 0;
     }
 
   // Workaround for a bug in Mesa 7.7 with separate specular color. The
@@ -247,28 +618,21 @@ int vtkOpenGLExtensionManager::ExtensionSupported(const char *name)
   // Mesa prior to version 7.10. If the user is requesting the separate
   // specular color extension and the renderer is mesa and the mesa version
   // is less than 7.10 we report that the platform does not support it.
-  if (result && strcmp(name, "GL_EXT_separate_specular_color") == 0)
+  if ( (strcmp(name,"GL_EXT_separate_specular_color") == 0)
+     && this->DriverIsMesa() && !this->DriverVersionAtLeast(7,10) )
     {
-    if (mesa_version)
-      {
-      int mesa_major = 0;
-      int mesa_minor = 0;
-      int mesa_patch = 0;
-      if (sscanf(mesa_version,
-                "Mesa %d.%d.%d",
-                &mesa_major,
-                &mesa_minor,
-                &mesa_patch) >= 2)
-        {
-        if (mesa_major < 7 || (mesa_major == 7 && mesa_minor < 10))
-          {
-          result = 0;
-          }
-        }
-      }
+    return 0;
     }
 
-  return result;
+  // Workaround for a bug in OS Mesa's pre 8.0 FBO implementation.
+  if ( (strcmp(name,"GL_EXT_framebuffer_object")==0)
+    && this->DriverGLRendererIsOSMesa() && !this->DriverVersionAtLeast(8) )
+    {
+    return 0;
+    }
+
+  // feature is suppported
+  return 1;
 }
 
 vtkOpenGLExtensionManagerFunctionPointer
@@ -300,6 +664,10 @@ vtkOpenGLExtensionManager::GetProcAddress(const char *fname)
     }
 #endif //VTK_USE_APPLE_LOADER
 
+#ifdef VTK_USE_OSMESA_GET_PROC_ADDRESS
+  return reinterpret_cast<vtkOpenGLExtensionManagerFunctionPointer>(OSMesaGetProcAddress(fname));
+#endif
+
 #ifdef VTK_USE_X
  #ifdef VTK_USE_GLX_GET_PROC_ADDRESS
   // In a perfect world, it should be
@@ -753,7 +1121,7 @@ int vtkOpenGLExtensionManager::SafeLoadExtension(const char *name)
 
 // Those two functions are part of OpenGL2.0 but don't have direct
 // translation in the GL_ARB_shader_objects extension
-GLboolean IsProgramFromARBToPromoted(GLuint program)
+static GLboolean IsProgramFromARBToPromoted(GLuint program)
 {
   GLint param;
   // in this case, vtkgl::GetProgramiv has been initialized with the pointer to
@@ -763,7 +1131,7 @@ GLboolean IsProgramFromARBToPromoted(GLuint program)
   return param == static_cast<GLint>(vtkgl::PROGRAM_OBJECT_ARB);
 }
 
-GLboolean IsShaderFromARBToPromoted(GLuint shader)
+static GLboolean IsShaderFromARBToPromoted(GLuint shader)
 {
   GLint param;
   // in this case, vtkgl::GetShaderiv has been initialized with the pointer to
diff --git a/Rendering/OpenGL/vtkOpenGLExtensionManager.h b/Rendering/OpenGL/vtkOpenGLExtensionManager.h
index 0d19e0a..6b85079 100644
--- a/Rendering/OpenGL/vtkOpenGLExtensionManager.h
+++ b/Rendering/OpenGL/vtkOpenGLExtensionManager.h
@@ -124,6 +124,7 @@
 
 #include "vtkObject.h"
 #include "vtkWeakPointer.h" // needed for vtkWeakPointer.
+#include <string> // needed for std::string
 
 class vtkRenderWindow;
 
@@ -261,17 +262,120 @@ public:
   // Similar to LoadCorePromotedExtension().
   // It loads an EXT extension into the pointers of its ARB equivalent.
   virtual void LoadAsARBExtension(const char *name);
+
+  // Description:
+  // Return the driver's version parts. This may be used for
+  // fine grained feature testing.
+  virtual int GetDriverVersionMajor(){ return this->DriverVersionMajor; }
+  virtual int GetDriverVersionMinor(){ return this->DriverVersionMinor; }
+  virtual int GetDriverVersionPatch(){ return this->DriverVersionPatch; }
+
+  // Description:
+  // Get GL API version that the driver provides. This is
+  // often different than the GL version that VTK recognizes
+  // so only use this for identifying a specific driver.
+  virtual int GetDriverGLVersionMajor(){ return this->DriverGLVersionMajor; }
+  virtual int GetDriverGLVersionMinor(){ return this->DriverGLVersionMinor; }
+  virtual int GetDriverGLVersionPatch(){ return this->DriverGLVersionPatch; }
+
+  // Description:
+  // Test's for common implementors of rendering drivers. This may be used for
+  // fine grained feature testing. Note: DriverIsMesa succeeds for OS Mesa,
+  // use DriverGLRendererIsOSMessa to differentiate.
+  virtual bool DriverIsATI();
+  virtual bool DriverIsNvidia();
+  virtual bool DriverIsIntel();
+  virtual bool DriverIsMesa();
+  virtual bool DriverIsMicrosoft();
+
+  // Description:
+  // Test for a specific driver version.
+  virtual bool DriverVersionIs(int major);
+  virtual bool DriverVersionIs(int major, int minor);
+  virtual bool DriverVersionIs(int major, int minor, int patch);
+
+  // Description:
+  // Test for driver version greater than or equal
+  // to the named version.
+  virtual bool DriverVersionAtLeast(int major);
+  virtual bool DriverVersionAtLeast(int major, int minor);
+  virtual bool DriverVersionAtLeast(int major, int minor, int patch);
+
+  // Description:
+  // Test for the driver's GL version as reported in
+  // its GL_VERSION string. This is intended for driver
+  // identification only, use ExtensionSuppported
+  // to test for VTK support of a specific GL version.
+  virtual bool DriverGLVersionIs(int major, int minor, int patch);
+  virtual bool DriverGLVersionIs(int major, int minor);
+
+  // Description:
+  // Test for a specific renderer. This could be used
+  // in some cases to identify the graphics card or
+  // specific driver. Use HasToken to prevent false
+  // matches eg. avoid GeForce4 matching GeForce400
+  virtual bool DriverGLRendererIs(const char *str);
+  virtual bool DriverGLRendererHas(const char *str);
+  virtual bool DriverGLRendererHasToken(const char *str);
+
+  // Description:
+  // Test for Mesa's offscreen renderer.
+  virtual bool DriverGLRendererIsOSMesa();
+
+  // Description:
+  // Get the OpenGL version, vendor and renderer strings. These can
+  // be used to idnetify a specific driver.
+  virtual const char *GetDriverGLVendor(){ return this->DriverGLVendor.c_str(); }
+  virtual const char *GetDriverGLVersion(){ return this->DriverGLVersion.c_str(); }
+  virtual const char *GetDriverGLRenderer(){ return this->DriverGLRenderer.c_str(); }
+
+  // Description:
+  // When set known driver bugs are ignored during driver feature
+  // detection. This is used to evaluate the status of a new driver
+  // release to see if the bugs have been fixed. The function takes
+  // a description argument which, is sent to VTK's warning stream
+  // when the ignore flag is set. This makes the test output searchable
+  // for tests which have problems with certain drivers. The CMakeLists
+  // variable VTK_IGNORE_GLDRIVER_BUGS can be used to set this at
+  // build time. Default OFF.
+  bool GetIgnoreDriverBugs(const char *description);
+  vtkSetMacro(IgnoreDriverBugs, bool);
+  vtkBooleanMacro(IgnoreDriverBugs, bool);
+
 //BTX
 protected:
   vtkOpenGLExtensionManager();
   virtual ~vtkOpenGLExtensionManager();
 
-
   int OwnRenderWindow;
   char *ExtensionsString;
 
   vtkTimeStamp BuildTime;
 
+  // driver specific info
+  std::string DriverGLVersion;
+  int DriverGLVersionMajor;
+  int DriverGLVersionMinor;
+  int DriverGLVersionPatch;
+  std::string DriverGLVendor;
+  std::string DriverGLRenderer;
+  int DriverVersionMajor;
+  int DriverVersionMinor;
+  int DriverVersionPatch;
+  enum DriverGLVendorIdType
+    {
+    DRIVER_VENDOR_UNKNOWN=0,
+    DRIVER_VENDOR_ATI,
+    DRIVER_VENDOR_NVIDIA,
+    DRIVER_VENDOR_INTEL,
+    DRIVER_VENDOR_MESA,
+    DRIVER_VENDOR_MICROSOFT
+    };
+  DriverGLVendorIdType DriverGLVendorId;
+  bool IgnoreDriverBugs;
+
+  virtual void InitializeDriverInformation();
+
   virtual void ReadOpenGLExtensions();
 
   // Description:
diff --git a/Rendering/OpenGL/vtkOpenGLGlyph3DMapper.cxx b/Rendering/OpenGL/vtkOpenGLGlyph3DMapper.cxx
index 4b0d412..fb66fb4 100644
--- a/Rendering/OpenGL/vtkOpenGLGlyph3DMapper.cxx
+++ b/Rendering/OpenGL/vtkOpenGLGlyph3DMapper.cxx
@@ -40,8 +40,9 @@
 #include "vtkTimerLog.h"
 #include "vtkTransform.h"
 #include "vtkHardwareSelectionPolyDataPainter.h"
+#include "vtkOpenGLError.h"
 
-#include <assert.h>
+#include <cassert>
 #include <vector>
 #include "vtkgl.h"
 
@@ -166,6 +167,8 @@ void vtkOpenGLGlyph3DMapper::CopyInformationToSubMapper(
 // as each frame is rendered.
 void vtkOpenGLGlyph3DMapper::Render(vtkRenderer *ren, vtkActor *actor)
 {
+  vtkOpenGLClearErrorMacro();
+
   vtkHardwareSelector* selector = ren->GetSelector();
   bool selecting_points = selector && (selector->GetFieldAssociation() ==
     vtkDataObject::FIELD_ASSOCIATION_POINTS);
@@ -349,6 +352,8 @@ void vtkOpenGLGlyph3DMapper::Render(vtkRenderer *ren, vtkActor *actor)
     selector->EndRenderProp();
     }
 
+  vtkOpenGLCheckErrorMacro("Failed after Render");
+
   this->UpdateProgress(1.0);
 }
 
@@ -363,6 +368,8 @@ void vtkOpenGLGlyph3DMapper::Render(
     return;
     }
 
+  vtkOpenGLClearErrorMacro();
+
   vtkHardwareSelector* selector = ren->GetSelector();
   bool selecting_points = selector && (selector->GetFieldAssociation() ==
     vtkDataObject::FIELD_ASSOCIATION_POINTS);
@@ -618,6 +625,8 @@ void vtkOpenGLGlyph3DMapper::Render(
     // restore the blend function
     glPopAttrib();
     }
+
+  vtkOpenGLCheckErrorMacro("failed after Render");
 }
 
 // ---------------------------------------------------------------------------
@@ -649,6 +658,7 @@ void vtkOpenGLGlyph3DMapper::ReleaseList()
     {
     glDeleteLists(this->DisplayListId,1);
     this->DisplayListId = 0;
+    vtkOpenGLCheckErrorMacro("failed after ReleaseList");
     }
 }
 
diff --git a/Rendering/OpenGL/vtkOpenGLHardwareSelector.cxx b/Rendering/OpenGL/vtkOpenGLHardwareSelector.cxx
new file mode 100644
index 0000000..eab8e54
--- /dev/null
+++ b/Rendering/OpenGL/vtkOpenGLHardwareSelector.cxx
@@ -0,0 +1,217 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkOpenGLHardwareSelector.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkOpenGLHardwareSelector.h"
+
+#include "vtkObjectFactory.h"
+#include "vtkRenderWindow.h"
+#include "vtkOpenGLRenderWindow.h"
+#include "vtkOpenGLExtensionManager.h"
+
+#include "vtkgl.h"
+#include "vtkOpenGLError.h"
+
+// Description:
+// Internal state and helper methods.
+class vtkOpenGLHardwareSelector::vtkInternals
+{
+public:
+  vtkOpenGLRenderWindow *Context;
+  bool MultisampleSupport;
+  bool OriginalMultisample;
+  bool OriginalLighting;
+  bool OriginalBlending;
+
+  vtkInternals() :
+    Context(NULL),
+    MultisampleSupport(false),
+    OriginalMultisample(false),
+    OriginalLighting(false),
+    OriginalBlending(false)
+    {}
+
+  // Description:
+  // Set the rendering context and load the required
+  // extensions.
+  void SetContext(vtkRenderWindow *context)
+    {
+    if (this->Context != context)
+      {
+      this->MultisampleSupport = false;
+      this->Context = vtkOpenGLRenderWindow::SafeDownCast(context);
+      if (this->Context)
+        {
+        vtkOpenGLExtensionManager *manager
+           = this->Context->GetExtensionManager();
+
+        // don't need any of the functions so don't bother
+        // to load the extension, but do make sure enums are
+        // defined.
+        this->MultisampleSupport
+          = manager->ExtensionSupported("GL_ARB_multisample")==1;
+        }
+      }
+    }
+
+  // Description:
+  // Enable/disable lighting
+  void EnableLighting(bool mode)
+    {
+    if (mode)
+      {
+      glEnable(GL_LIGHTING);
+      }
+    else
+      {
+      glDisable(GL_LIGHTING);
+      }
+    }
+
+  // Description:
+  // Check if lighting is enabled.
+  bool QueryLighting()
+    {
+    if (glIsEnabled(GL_LIGHTING))
+      {
+      return true;
+      }
+    else
+      {
+      return false;
+      }
+    }
+
+  // Description:
+  // Enable/disable multisampling.
+  void EnableMultisampling(bool mode)
+    {
+    if (this->MultisampleSupport)
+      {
+      if (mode)
+        {
+        glEnable(vtkgl::MULTISAMPLE);
+        }
+      else
+        {
+        glDisable(vtkgl::MULTISAMPLE);
+        }
+      }
+    }
+
+  // Description:
+  // Check if multisample is enabled.
+  bool QueryMultisampling()
+    {
+    if (this->MultisampleSupport && glIsEnabled(vtkgl::MULTISAMPLE))
+      {
+      return true;
+      }
+    else
+      {
+      return false;
+      }
+    }
+
+  // Description:
+  // Enable/Disable blending
+  void EnableBlending(bool mode)
+    {
+    if (mode)
+      {
+      glEnable(GL_BLEND);
+      }
+    else
+      {
+      glDisable(GL_BLEND);
+      }
+    }
+
+  // Description:
+  // Check if blending is enabled.
+  bool QueryBlending()
+    {
+    if (glIsEnabled(GL_BLEND))
+      {
+      return true;
+      }
+    else
+      {
+      return false;
+      }
+    }
+};
+
+//----------------------------------------------------------------------------
+vtkStandardNewMacro(vtkOpenGLHardwareSelector);
+
+//----------------------------------------------------------------------------
+vtkOpenGLHardwareSelector::vtkOpenGLHardwareSelector()
+{
+  #ifdef vtkOpenGLHardwareSelectorDEBUG
+  cerr << "=====vtkOpenGLHardwareSelector::vtkOpenGLHardwareSelector" << endl;
+  #endif
+  this->Internals = new vtkInternals;
+}
+
+//----------------------------------------------------------------------------
+vtkOpenGLHardwareSelector::~vtkOpenGLHardwareSelector()
+{
+  #ifdef vtkOpenGLHardwareSelectorDEBUG
+  cerr << "=====vtkOpenGLHardwareSelector::~vtkOpenGLHardwareSelector" << endl;
+  #endif
+  delete this->Internals;
+}
+
+//----------------------------------------------------------------------------
+void vtkOpenGLHardwareSelector::BeginRenderProp(vtkRenderWindow *context)
+{
+  #ifdef vtkOpenGLHardwareSelectorDEBUG
+  cerr << "=====vtkOpenGLHardwareSelector::BeginRenderProp" << endl;
+  #endif
+
+  this->Internals->SetContext(context);
+
+  // Disable multisample, lighting, and blending.
+  this->Internals->OriginalMultisample = this->Internals->QueryMultisampling();
+  this->Internals->EnableMultisampling(false);
+
+  this->Internals->OriginalLighting = this->Internals->QueryLighting();
+  this->Internals->EnableLighting(false);
+
+  this->Internals->OriginalBlending = this->Internals->QueryBlending();
+  this->Internals->EnableBlending(false);
+}
+
+//----------------------------------------------------------------------------
+void vtkOpenGLHardwareSelector::EndRenderProp(vtkRenderWindow *)
+{
+  #ifdef vtkOpenGLHardwareSelectorDEBUG
+  cerr << "=====vtkOpenGLHardwareSelector::EndRenderProp" << endl;
+  #endif
+
+  // Restore multisample, lighting, and blending.
+  this->Internals->EnableMultisampling(this->Internals->OriginalMultisample);
+  this->Internals->EnableLighting(this->Internals->OriginalLighting);
+  this->Internals->EnableBlending(this->Internals->OriginalBlending);
+}
+
+//----------------------------------------------------------------------------
+void vtkOpenGLHardwareSelector::PrintSelf(ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+  os
+   << indent << "MultisampleSupport: "
+   << this->Internals->MultisampleSupport
+   << endl;
+}
diff --git a/Rendering/OpenGL/vtkOpenGLHardwareSelector.h b/Rendering/OpenGL/vtkOpenGLHardwareSelector.h
new file mode 100644
index 0000000..35a0122
--- /dev/null
+++ b/Rendering/OpenGL/vtkOpenGLHardwareSelector.h
@@ -0,0 +1,64 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkOpenGLHardwareSelector.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkOpenGLHardwareSelector - implements the device specific code of
+//  vtkOpenGLHardwareSelector.
+//
+// .SECTION Description
+// Implements the device specific code of vtkOpenGLHardwareSelector.
+//
+// .SECTION See Also
+// vtkHardwareSelector
+
+#ifndef __vtkOpenGLHardwareSelector_h
+#define __vtkOpenGLHardwareSelector_h
+
+#include "vtkRenderingOpenGLModule.h" // For export macro
+#include "vtkHardwareSelector.h"
+
+class VTKRENDERINGOPENGL_EXPORT vtkOpenGLHardwareSelector : public vtkHardwareSelector
+{
+public:
+  static vtkOpenGLHardwareSelector* New();
+  vtkTypeMacro(vtkOpenGLHardwareSelector, vtkHardwareSelector);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Called by the mapper (vtkHardwareSelectionPolyDataPainter) before and after
+  // rendering each prop.
+  virtual void BeginRenderProp()
+    { this->vtkHardwareSelector::BeginRenderProp(); }
+
+  virtual void EndRenderProp()
+    { this->vtkHardwareSelector::EndRenderProp(); }
+
+protected:
+  vtkOpenGLHardwareSelector();
+  virtual ~vtkOpenGLHardwareSelector();
+
+  // Called internally before and after each prop is rendered
+  // for device specific configuration/preparation etc.
+  virtual void BeginRenderProp(vtkRenderWindow *);
+  virtual void EndRenderProp(vtkRenderWindow *);
+
+  // for internal state
+  class vtkInternals;
+  vtkInternals* Internals;
+
+private:
+  vtkOpenGLHardwareSelector(const vtkOpenGLHardwareSelector&); // Not implemented.
+  void operator=(const vtkOpenGLHardwareSelector&); // Not implemented.
+};
+
+#endif
diff --git a/Rendering/OpenGL/vtkOpenGLImageMapper.cxx b/Rendering/OpenGL/vtkOpenGLImageMapper.cxx
index 8b60062..9d854e9 100644
--- a/Rendering/OpenGL/vtkOpenGLImageMapper.cxx
+++ b/Rendering/OpenGL/vtkOpenGLImageMapper.cxx
@@ -25,6 +25,7 @@
 #include "vtkgluPickMatrix.h"
 
 #include "vtkOpenGL.h"
+#include "vtkOpenGLError.h"
 #include <limits.h>
 
 vtkStandardNewMacro(vtkOpenGLImageMapper);
@@ -93,6 +94,8 @@ void vtkOpenGLImageMapperRenderDouble(vtkOpenGLImageMapper *self, vtkImageData *
                                       T *dataPtr, double shift, double scale,
                                       int *actorPos, int *actorPos2, int front, int *vsize)
 {
+  vtkOpenGLClearErrorMacro();
+
   int inMin0 = self->DisplayExtent[0];
   int inMax0 = self->DisplayExtent[1];
   int inMin1 = self->DisplayExtent[2];
@@ -204,6 +207,8 @@ void vtkOpenGLImageMapperRenderDouble(vtkOpenGLImageMapper *self, vtkImageData *
     glPixelZoom(1.0, 1.0);
     }
   delete [] newPtr;
+
+ vtkOpenGLStaticCheckErrorMacro("failed after ImageMapperRenderDouble");
 }
 
 //---------------------------------------------------------------
@@ -218,6 +223,8 @@ void vtkOpenGLImageMapperRenderShort(vtkOpenGLImageMapper *self, vtkImageData *d
                                      int *actorPos, int *actorPos2, int front,
                                      int *vsize)
 {
+  vtkOpenGLClearErrorMacro();
+
   int inMin0 = self->DisplayExtent[0];
   int inMax0 = self->DisplayExtent[1];
   int inMin1 = self->DisplayExtent[2];
@@ -350,6 +357,8 @@ void vtkOpenGLImageMapperRenderShort(vtkOpenGLImageMapper *self, vtkImageData *d
     glPixelZoom(1.0, 1.0);
     }
   delete [] newPtr;
+
+  vtkOpenGLStaticCheckErrorMacro("failed after ImageMapperRenderShort");
 }
 
 //---------------------------------------------------------------
@@ -360,6 +369,8 @@ void vtkOpenGLImageMapperRenderChar(vtkOpenGLImageMapper *self, vtkImageData *da
                                     T *dataPtr, int *actorPos, int *actorPos2,
                                     int front, int *vsize)
 {
+  vtkOpenGLClearErrorMacro();
+
   int inMin0 = self->DisplayExtent[0];
   int inMax0 = self->DisplayExtent[1];
   int inMin1 = self->DisplayExtent[2];
@@ -493,6 +504,8 @@ void vtkOpenGLImageMapperRenderChar(vtkOpenGLImageMapper *self, vtkImageData *da
     }
 
   glPixelStorei( GL_UNPACK_ROW_LENGTH, 0);
+
+  vtkOpenGLStaticCheckErrorMacro("failed after ImageMapperRenderChar");
 }
 
 //----------------------------------------------------------------------------
@@ -514,10 +527,10 @@ void vtkOpenGLImageMapperRender(vtkOpenGLImageMapper *self, vtkImageData *data,
                                    actorPos, actorPos2, front, vsize);
 }
 
-void vtkOpenGLImageMapperRender(vtkOpenGLImageMapper *self, vtkImageData *data,
-                                char* dataPtr, double shift, double scale,
-                                int *actorPos, int *actorPos2, int front, int *vsize,
-                                int)
+static void vtkOpenGLImageMapperRender(vtkOpenGLImageMapper *self, vtkImageData *data,
+                                       char* dataPtr, double shift, double scale,
+                                       int *actorPos, int *actorPos2, int front, int *vsize,
+                                       int)
 {
   if(shift == 0.0 && scale == 1.0)
     {
@@ -530,10 +543,11 @@ void vtkOpenGLImageMapperRender(vtkOpenGLImageMapper *self, vtkImageData *data,
                                     actorPos, actorPos2, front, vsize);
     }
 }
-void vtkOpenGLImageMapperRender(vtkOpenGLImageMapper *self, vtkImageData *data,
-                                unsigned char* dataPtr, double shift, double scale,
-                                int *actorPos, int *actorPos2, int front, int *vsize,
-                                int)
+
+static void vtkOpenGLImageMapperRender(vtkOpenGLImageMapper *self, vtkImageData *data,
+                                       unsigned char* dataPtr, double shift, double scale,
+                                       int *actorPos, int *actorPos2, int front, int *vsize,
+                                       int)
 {
   if(shift == 0.0 && scale == 1.0)
     {
@@ -546,10 +560,11 @@ void vtkOpenGLImageMapperRender(vtkOpenGLImageMapper *self, vtkImageData *data,
                                     actorPos, actorPos2, front, vsize);
     }
 }
-void vtkOpenGLImageMapperRender(vtkOpenGLImageMapper *self, vtkImageData *data,
-                                signed char* dataPtr, double shift, double scale,
-                                int *actorPos, int *actorPos2, int front, int *vsize,
-                                int)
+
+static void vtkOpenGLImageMapperRender(vtkOpenGLImageMapper *self, vtkImageData *data,
+                                       signed char* dataPtr, double shift, double scale,
+                                       int *actorPos, int *actorPos2, int front, int *vsize,
+                                       int)
 {
   if(shift == 0.0 && scale == 1.0)
     {
@@ -563,19 +578,19 @@ void vtkOpenGLImageMapperRender(vtkOpenGLImageMapper *self, vtkImageData *data,
     }
 }
 
-void vtkOpenGLImageMapperRender(vtkOpenGLImageMapper *self, vtkImageData *data,
-                                short* dataPtr, double shift, double scale,
-                                int *actorPos, int *actorPos2, int front, int *vsize,
-                                int)
+static void vtkOpenGLImageMapperRender(vtkOpenGLImageMapper *self, vtkImageData *data,
+                                       short* dataPtr, double shift, double scale,
+                                       int *actorPos, int *actorPos2, int front, int *vsize,
+                                       int)
 {
   vtkOpenGLImageMapperRenderShort(self, data, dataPtr, shift, scale,
                                   actorPos, actorPos2, front, vsize);
 }
 
-void vtkOpenGLImageMapperRender(vtkOpenGLImageMapper *self, vtkImageData *data,
-                                unsigned short* dataPtr, double shift, double scale,
-                                int *actorPos, int *actorPos2, int front, int *vsize,
-                                int)
+static void vtkOpenGLImageMapperRender(vtkOpenGLImageMapper *self, vtkImageData *data,
+                                       unsigned short* dataPtr, double shift, double scale,
+                                       int *actorPos, int *actorPos2, int front, int *vsize,
+                                       int)
 {
   vtkOpenGLImageMapperRenderShort(self, data, dataPtr, shift, scale,
                                   actorPos, actorPos2, front, vsize);
@@ -601,6 +616,8 @@ void vtkOpenGLImageMapper::RenderData(vtkViewport* viewport,
   // data updates since the render started.
   window->MakeCurrent();
 
+  vtkOpenGLClearErrorMacro();
+
   shift = this->GetColorShift();
   scale = this->GetColorScale();
 
@@ -658,6 +675,7 @@ void vtkOpenGLImageMapper::RenderData(vtkViewport* viewport,
     glMatrixMode( GL_MODELVIEW);
     glPopMatrix();
     glEnable( GL_LIGHTING);
+    vtkOpenGLCheckErrorMacro("failed after RenderData");
     return;
     }
 
@@ -686,6 +704,8 @@ void vtkOpenGLImageMapper::RenderData(vtkViewport* viewport,
 #if defined(sparc) && defined(GL_VERSION_1_1)
   glEnable(GL_BLEND);
 #endif
+
+  vtkOpenGLCheckErrorMacro("failed after RenderData");
 }
 
 void vtkOpenGLImageMapper::PrintSelf(ostream& os, vtkIndent indent)
diff --git a/Rendering/OpenGL/vtkOpenGLImageSliceMapper.cxx b/Rendering/OpenGL/vtkOpenGLImageSliceMapper.cxx
index 64e609d..b5b18e3 100644
--- a/Rendering/OpenGL/vtkOpenGLImageSliceMapper.cxx
+++ b/Rendering/OpenGL/vtkOpenGLImageSliceMapper.cxx
@@ -39,6 +39,7 @@
 #include <math.h>
 
 #include "vtkOpenGL.h"
+#include "vtkOpenGLError.h"
 #include "vtkgl.h" // vtkgl namespace
 
 vtkStandardNewMacro(vtkOpenGLImageSliceMapper);
@@ -84,6 +85,7 @@ void vtkOpenGLImageSliceMapper::ReleaseGraphicsResources(vtkWindow *renWin)
   if (this->TextureIndex && renWin && renWin->GetMapped())
     {
     static_cast<vtkRenderWindow *>(renWin)->MakeCurrent();
+    vtkOpenGLClearErrorMacro();
 #ifdef GL_VERSION_1_1
     // free any textures
     if (glIsTexture(this->TextureIndex))
@@ -122,6 +124,7 @@ void vtkOpenGLImageSliceMapper::ReleaseGraphicsResources(vtkWindow *renWin)
     this->TextureSize[0] = 0;
     this->TextureSize[1] = 0;
     this->TextureBytesPerPixel = 1;
+    vtkOpenGLCheckErrorMacro("failed after ReleaseGraphicsResources");
     }
   this->TextureIndex = 0;
   this->BackgroundTextureIndex = 0;
@@ -217,6 +220,8 @@ void vtkOpenGLImageSliceMapper::RenderTexturedPolygon(
     reuseTexture = false;
     }
 
+  vtkOpenGLClearErrorMacro();
+
   // get information about the image
   int xdim, ydim; // orientation of texture wrt input image
   vtkImageSliceMapper::GetDimensionIndices(this->Orientation, xdim, ydim);
@@ -501,6 +506,8 @@ void vtkOpenGLImageSliceMapper::RenderTexturedPolygon(
     {
     glDisable(vtkgl::FRAGMENT_PROGRAM_ARB);
     }
+
+  vtkOpenGLCheckErrorMacro("failed after RenderTexturedPolygon");
 }
 
 //----------------------------------------------------------------------------
@@ -508,6 +515,8 @@ void vtkOpenGLImageSliceMapper::RenderTexturedPolygon(
 void vtkOpenGLImageSliceMapper::RenderPolygon(
   vtkPoints *points, const int extent[6], bool textured)
 {
+  vtkOpenGLClearErrorMacro();
+
   static const int stripOrder[4] = { 3, 0, 2, 1 };
   static const double normals[3][3] =
     { { 1.0, 0.0, 0.0 }, { 0.0, -1.0, 0.0 }, { 0.0, 0.0, 1.0 } };
@@ -561,6 +570,8 @@ void vtkOpenGLImageSliceMapper::RenderPolygon(
       }
     glEnd();
     }
+
+  vtkOpenGLCheckErrorMacro("failed after RenderPolygon");
 }
 
 //----------------------------------------------------------------------------
@@ -569,6 +580,8 @@ void vtkOpenGLImageSliceMapper::RenderPolygon(
 void vtkOpenGLImageSliceMapper::RenderBackground(
   vtkPoints *points, const int extent[6], bool textured)
 {
+  vtkOpenGLClearErrorMacro();
+
   static double borderThickness = 1e6;
   static double normals[3][3] =
     { { 1.0, 0.0, 0.0 }, { 0.0, -1.0, 0.0 }, { 0.0, 0.0, 1.0 } };
@@ -695,12 +708,15 @@ void vtkOpenGLImageSliceMapper::RenderBackground(
       }
     glEnd();
     }
+  vtkOpenGLCheckErrorMacro("failed after RenderBackground");
 }
 
 //----------------------------------------------------------------------------
 void vtkOpenGLImageSliceMapper::BindFragmentProgram(
   vtkRenderer *ren, vtkImageProperty *property)
 {
+  vtkOpenGLClearErrorMacro();
+
   int xdim, ydim, zdim; // orientation of texture wrt input image
   vtkImageSliceMapper::GetDimensionIndices(this->Orientation, xdim, ydim);
   zdim = 3 - xdim - ydim; // they sum to three
@@ -768,6 +784,8 @@ void vtkOpenGLImageSliceMapper::BindFragmentProgram(
   vtkgl::ProgramLocalParameter4fARB(vtkgl::FRAGMENT_PROGRAM_ARB, 4,
     static_cast<float>(mat[4]), static_cast<float>(mat[5]),
     static_cast<float>(mat[6]), static_cast<float>(mat[7]));
+
+  vtkOpenGLCheckErrorMacro("failed after BindFragmentProgram");
 }
 
 //----------------------------------------------------------------------------
@@ -926,6 +944,8 @@ void vtkOpenGLImageSliceMapper::ComputeTextureSize(
 // Determine if a given texture size is supported by the video card
 bool vtkOpenGLImageSliceMapper::TextureSizeOK(const int size[2])
 {
+  vtkOpenGLClearErrorMacro();
+
 #ifdef GL_VERSION_1_1
   // First ask OpenGL what the max texture size is
   GLint maxSize;
@@ -942,6 +962,8 @@ bool vtkOpenGLImageSliceMapper::TextureSizeOK(const int size[2])
   glGetTexLevelParameteriv(GL_PROXY_TEXTURE_2D, 0, GL_TEXTURE_WIDTH,
                            &params);
 
+  vtkOpenGLCheckErrorMacro("failed after TextureSizeOK");
+
   // if it does fit, we will render it later
   return (params == 0 ? 0 : 1);
 #else
@@ -955,6 +977,8 @@ bool vtkOpenGLImageSliceMapper::TextureSizeOK(const int size[2])
 // Set the modelview transform and load the texture
 void vtkOpenGLImageSliceMapper::Render(vtkRenderer *ren, vtkImageSlice *prop)
 {
+  vtkOpenGLClearErrorMacro();
+
   vtkOpenGLRenderWindow *renWin =
     vtkOpenGLRenderWindow::SafeDownCast(ren->GetRenderWindow());
 
@@ -1106,6 +1130,8 @@ void vtkOpenGLImageSliceMapper::Render(vtkRenderer *ren, vtkImageSlice *prop)
     {
     this->TimeToDraw = 0.0001;
     }
+
+  vtkOpenGLCheckErrorMacro("failed after Render");
 }
 
 //----------------------------------------------------------------------------
diff --git a/Rendering/OpenGL/vtkOpenGLLight.cxx b/Rendering/OpenGL/vtkOpenGLLight.cxx
index 798d701..9801f26 100644
--- a/Rendering/OpenGL/vtkOpenGLLight.cxx
+++ b/Rendering/OpenGL/vtkOpenGLLight.cxx
@@ -17,6 +17,7 @@
 #include "vtkOpenGLRenderer.h"
 #include "vtkObjectFactory.h"
 #include "vtkMatrix4x4.h"
+#include "vtkOpenGLError.h"
 
 #include "vtkOpenGL.h"
 
@@ -27,6 +28,7 @@ vtkStandardNewMacro(vtkOpenGLLight);
 // Implement base class method.
 void vtkOpenGLLight::Render(vtkRenderer *vtkNotUsed(ren), int light_index)
 {
+  vtkOpenGLClearErrorMacro();
   float color[4];
   float info[4];
 
@@ -112,6 +114,8 @@ void vtkOpenGLLight::Render(vtkRenderer *vtkNotUsed(ren), int light_index)
     {
     glPopMatrix();
     }
+
+  vtkOpenGLCheckErrorMacro("failed after Render");
 }
 
 //----------------------------------------------------------------------------
diff --git a/Rendering/OpenGL/vtkOpenGLLightMonitor.cxx b/Rendering/OpenGL/vtkOpenGLLightMonitor.cxx
new file mode 100644
index 0000000..85f6a96
--- /dev/null
+++ b/Rendering/OpenGL/vtkOpenGLLightMonitor.cxx
@@ -0,0 +1,199 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkOpenGLLightMonitor
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkOpenGLLightMonitor.h"
+#include "vtkObjectFactory.h"
+#include "vtkgl.h"
+#include <cstring>
+#include <cmath>
+
+//-----------------------------------------------------------------------------
+vtkStandardNewMacro(vtkOpenGLLightMonitor)
+
+//-----------------------------------------------------------------------------
+vtkOpenGLLightMonitor *vtkOpenGLLightMonitor::New(int lightId)
+{
+  vtkOpenGLLightMonitor *mon = vtkOpenGLLightMonitor::New();
+  mon->SetLightId(lightId);
+  return mon;
+}
+
+//-----------------------------------------------------------------------------
+void vtkOpenGLLightMonitor::Initialize()
+{
+  this->Enabled = 0;
+  memset(this->Ambient,0,4*sizeof(float));
+  memset(this->Diffuse,0,4*sizeof(float));
+  memset(this->Specular,0,4*sizeof(float));
+  memset(this->Position,0,4*sizeof(float));
+  memset(this->SpotDirection,0,3*sizeof(float));
+  this->SpotExponent = 0.0f;
+  this->SpotCutoff = 0.0f;
+  memset(this->Attenuation,0,3*sizeof(float));
+  this->UpTime = 0;
+}
+
+//-----------------------------------------------------------------------------
+void vtkOpenGLLightMonitor::SetEnabled(int val)
+{
+  if (this->Enabled == val)
+    {
+    return;
+    }
+  this->Enabled = val;
+  this->UpTime += 1;
+}
+
+//-----------------------------------------------------------------------------
+#define vtkOpenGLLightMonitorSetMacro(_name)      \
+void vtkOpenGLLightMonitor::Set##_name(float val) \
+{                                           \
+  if ( fabs ( this->_name - val ) < 1e-5f ) \
+    {                                       \
+    return;                                 \
+    }                                       \
+  this->_name = val;                        \
+  this->UpTime += 1;                        \
+}
+vtkOpenGLLightMonitorSetMacro(SpotExponent)
+vtkOpenGLLightMonitorSetMacro(SpotCutoff)
+
+//-----------------------------------------------------------------------------
+#define vtkOpenGLLightMonitorSetVectorMacro(_name, _n)   \
+void vtkOpenGLLightMonitor::Set##_name(float *val)       \
+{                                                  \
+  int changed = 0;                                 \
+  for (int i=0; i<_n; ++i)                         \
+    {                                              \
+    if ( fabs( val[i] - this->_name[i] ) > 1e-5f ) \
+      {                                            \
+      changed=1;                                   \
+      this->_name[i] = val[i];                     \
+      }                                            \
+    }                                              \
+  if ( changed )                                   \
+    {                                              \
+    this->UpTime += 1;                             \
+    }                                              \
+}
+vtkOpenGLLightMonitorSetVectorMacro(Ambient, 4)
+vtkOpenGLLightMonitorSetVectorMacro(Diffuse, 4)
+vtkOpenGLLightMonitorSetVectorMacro(Specular, 4)
+vtkOpenGLLightMonitorSetVectorMacro(Position, 4)
+vtkOpenGLLightMonitorSetVectorMacro(SpotDirection, 3)
+vtkOpenGLLightMonitorSetVectorMacro(Attenuation, 3)
+
+//-----------------------------------------------------------------------------
+void vtkOpenGLLightMonitor::Update()
+{
+  float param[4];
+  GLenum light = (GLenum)GL_LIGHT0+this->LightId;
+
+  if (glIsEnabled(light))
+    {
+    this->SetEnabled(1);
+    }
+  else
+    {
+    this->SetEnabled(0);
+    }
+
+  if (this->Enabled)
+    {
+    glGetLightfv(light, GL_AMBIENT, param);
+    this->SetAmbient(param);
+
+    glGetLightfv(light, GL_DIFFUSE, param);
+    this->SetDiffuse(param);
+
+    glGetLightfv(light, GL_SPECULAR, param);
+    this->SetSpecular(param);
+
+    glGetLightfv(light, GL_POSITION, param);
+    this->SetPosition(param);
+
+    glGetLightfv(light, GL_SPOT_DIRECTION, param);
+    this->SetSpotDirection(param);
+
+    glGetLightfv(light, GL_SPOT_EXPONENT, param);
+    this->SetSpotExponent(param[0]);
+
+    glGetLightfv(light, GL_SPOT_CUTOFF, param);
+    this->SetSpotCutoff(param[0]);
+
+    glGetLightfv(light, GL_CONSTANT_ATTENUATION, param);
+    glGetLightfv(light, GL_LINEAR_ATTENUATION, param+1);
+    glGetLightfv(light, GL_QUADRATIC_ATTENUATION, param+2);
+    this->SetAttenuation(param);
+    }
+}
+
+//-----------------------------------------------------------------------------
+bool vtkOpenGLLightMonitor::StateChanged()
+{
+  if (!glIsEnabled(GL_LIGHTING))
+    {
+    return false;
+    }
+  long long lastUpTime = this->UpTime;
+  this->Update();
+  return (lastUpTime != this->UpTime);
+}
+
+//-----------------------------------------------------------------------------
+void vtkOpenGLLightMonitor::PrintSelf(ostream &os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os,indent);
+  os << indent << "LightId=" << this->LightId << endl;
+  os << indent << "Enabled=" << this->Enabled << endl;
+  os << indent << "Ambient=";
+  for (int q=0; q<4; ++q)
+    {
+    os << indent << this->Ambient[q] << " ";
+    }
+  os << endl;
+  os << indent << "Diffuse=";
+  for (int q=0; q<4; ++q)
+    {
+    os << indent << this->Diffuse[q] << " ";
+    }
+  os << endl;
+  os << indent << "Specular=";
+  for (int q=0; q<4; ++q)
+    {
+    os << this->Specular[q] << " ";
+    }
+  os << endl;
+  os << indent << "Position=";
+  for (int q=0; q<4; ++q)
+    {
+    os << this->Position[q] << " ";
+    }
+  os << endl;
+  os << indent << "SpotDirection=";
+  for (int q=0; q<3; ++q)
+    {
+    os << this->SpotDirection[q] << " ";
+    }
+  os << endl;
+  os << indent << "SpotExponent=" << this->SpotExponent << endl;
+  os << indent << "SpotCutoff=" << this->SpotCutoff << endl;
+  os << indent << "Attenuation=";
+  for (int q=0; q<3; ++q)
+    {
+    os << indent << this->Attenuation[q] << " ";
+    }
+  os << endl;
+  os << indent << "UpTime=" << this->UpTime << endl;
+}
diff --git a/Rendering/OpenGL/vtkOpenGLLightMonitor.h b/Rendering/OpenGL/vtkOpenGLLightMonitor.h
new file mode 100644
index 0000000..cc458eb
--- /dev/null
+++ b/Rendering/OpenGL/vtkOpenGLLightMonitor.h
@@ -0,0 +1,104 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkOpenGLLightMonitor
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkOpenGLLightMonitor -- A helper for painters that
+// tracks state of OpenGL model-view and projection matrices.
+//
+// .SECTION Description:
+// vtkOpenGLLightMonitor -- A helper for painters that
+// tracks state of OpenGL lights. A Painter could use this
+// to skip expensive processing that is only needed when
+// lights change.
+//
+// this is not intended to be shared. each object should use it's
+// own instance of this class. it's intended to be called once per
+// render.
+
+#ifndef __vtkOpenGLLightMonitor_H
+#define __vtkOpenGLLightMonitor_H
+
+#include "vtkRenderingOpenGLModule.h" // for export macro
+#include "vtkObject.h"
+
+class VTKRENDERINGOPENGL_EXPORT vtkOpenGLLightMonitor : public vtkObject
+{
+public:
+  static vtkOpenGLLightMonitor* New();
+  static vtkOpenGLLightMonitor *New(int lightId);
+  vtkTypeMacro(vtkOpenGLLightMonitor, vtkObject);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Set/Get the light id of the OpenGL light to track. The
+  // light id must be set prior to use. Default value 0.
+  vtkSetMacro(LightId, int);
+  vtkGetMacro(LightId, int);
+
+  // Description:
+  // Fetches the current GL state and updates the
+  // internal copies of the data. returns true if
+  // any of the tracked OpenGL lights have changed.
+  // Typically this is the only function a user needs
+  // to call.
+  bool StateChanged();
+
+  // Description:
+  // Fetch and save OpenGL light state. Note,
+  // this is done automatically in SateChanged.
+  void Update();
+
+  //BTX
+  // Description:
+  // Setters for internal state.
+  void SetEnabled(int val);
+  void SetAmbient(float *val);
+  void SetDiffuse(float *val);
+  void SetSpecular(float *val);
+  void SetPosition(float *val);
+  void SetSpotDirection(float *val);
+  void SetSpotExponent(float val);
+  void SetSpotCutoff(float val);
+  void SetAttenuation(float *val);
+  //ETX
+
+private:
+  vtkOpenGLLightMonitor(int lightId) : LightId(lightId), UpTime(0)
+  { this->Initialize(); }
+
+  vtkOpenGLLightMonitor() : LightId(0), UpTime(0)
+  { this->Initialize(); }
+
+  ~vtkOpenGLLightMonitor(){}
+
+  void Initialize();
+
+private:
+  int LightId;
+  int Enabled;
+  float Ambient[4];
+  float Diffuse[4];
+  float Specular[4];
+  float Position[4];
+  float SpotDirection[3];
+  float SpotExponent;
+  float SpotCutoff;
+  float Attenuation[3];
+  long long UpTime;
+
+private:
+  vtkOpenGLLightMonitor(const vtkOpenGLLightMonitor &); // Not implemented
+  void operator=(const vtkOpenGLLightMonitor &); // Not implemented
+};
+
+#endif
diff --git a/Rendering/OpenGL/vtkOpenGLModelViewProjectionMonitor.cxx b/Rendering/OpenGL/vtkOpenGLModelViewProjectionMonitor.cxx
new file mode 100644
index 0000000..b418e4f
--- /dev/null
+++ b/Rendering/OpenGL/vtkOpenGLModelViewProjectionMonitor.cxx
@@ -0,0 +1,88 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkOpenGLModelViewProjectionMonitor
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkOpenGLModelViewProjectionMonitor.h"
+#include "vtkObjectFactory.h"
+#include "vtkgl.h"
+#include <cstring>
+#include <cmath>
+
+//-----------------------------------------------------------------------------
+vtkStandardNewMacro(vtkOpenGLModelViewProjectionMonitor)
+
+//-----------------------------------------------------------------------------
+void vtkOpenGLModelViewProjectionMonitor::Initialize()
+{
+  memset(this->Projection,0,16*sizeof(float));
+  memset(this->ModelView,0,16*sizeof(float));
+}
+
+//-----------------------------------------------------------------------------
+#define vtkOpenGLModelViewProjectionMonitorSetVectorMacro(_name, _n) \
+void vtkOpenGLModelViewProjectionMonitor::Set##_name(float *val)     \
+{                                                    \
+  int changed = 0;                                   \
+  for (int i=0; i<_n; ++i)                           \
+    {                                                \
+    if ( fabs( val[i] - this->_name[i] ) > 1e-5f )   \
+      {                                              \
+      changed=1;                                     \
+      this->_name[i] = val[i];                       \
+      }                                              \
+    }                                                \
+  if ( changed )                                     \
+    {                                                \
+    this->UpTime += 1;                               \
+    }                                                \
+}
+vtkOpenGLModelViewProjectionMonitorSetVectorMacro(Projection, 16)
+vtkOpenGLModelViewProjectionMonitorSetVectorMacro(ModelView, 16)
+
+//-----------------------------------------------------------------------------
+void vtkOpenGLModelViewProjectionMonitor::Update()
+{
+  GLfloat matrix[16];
+  glGetFloatv(GL_PROJECTION_MATRIX, matrix);
+  this->SetProjection(matrix);
+
+  glGetFloatv(GL_MODELVIEW_MATRIX,  matrix);
+  this->SetModelView(matrix);
+}
+
+//-----------------------------------------------------------------------------
+bool vtkOpenGLModelViewProjectionMonitor::StateChanged()
+{
+  long long oldUpTime = this->UpTime;
+  this->Update();
+  return (this->UpTime != oldUpTime);
+}
+
+//-----------------------------------------------------------------------------
+void vtkOpenGLModelViewProjectionMonitor::PrintSelf(ostream &os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os,indent);
+  os << indent << "Projection=";
+  for (int q=0; q<16; ++q)
+    {
+    os << this->Projection[q] << " ";
+    }
+  os << endl;
+  os << indent << "ModelView=";
+  for (int q=0; q<16; ++q)
+    {
+    os << this->ModelView[q] << " ";
+    }
+  os << endl;
+  os << indent << "UpTime=" << this->UpTime << endl;
+}
diff --git a/Rendering/OpenGL/vtkOpenGLModelViewProjectionMonitor.h b/Rendering/OpenGL/vtkOpenGLModelViewProjectionMonitor.h
new file mode 100644
index 0000000..dc54cb5
--- /dev/null
+++ b/Rendering/OpenGL/vtkOpenGLModelViewProjectionMonitor.h
@@ -0,0 +1,79 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkOpenGLModelViewProjectionMonitor
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkOpenGLModelViewProjectionMonitor -- A helper for painters that
+// tracks state of OpenGL model-view and projection matrices.
+//
+// .SECTION Description:
+// vtkOpenGLModelViewProjectionMonitor -- A helper for painters that
+// tracks state of OpenGL model-view and projection matrices. A Painter
+// could use this to skip expensive processing that is only needed when
+// the model-view or projection matrices change.
+//
+// this is not intended to be shared. each object should use it's
+// own instance of this class. it's intended to be called once per
+// render.
+
+#ifndef __vtkOpenGLModelViewProjectionMonitor_H
+#define __vtkOpenGLModelViewProjectionMonitor_H
+
+#include "vtkRenderingOpenGLModule.h" // for export macro
+#include "vtkObject.h"
+
+class VTKRENDERINGOPENGL_EXPORT vtkOpenGLModelViewProjectionMonitor : public vtkObject
+{
+public:
+  static vtkOpenGLModelViewProjectionMonitor* New();
+  vtkTypeMacro(vtkOpenGLModelViewProjectionMonitor, vtkObject);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Fetches the current GL state and updates the
+  // internal copies of the data. returns true if
+  // any of the tracked OpenGL matrices have changed.
+  // Typically this is the only function a user needs
+  // to call.
+  bool StateChanged();
+
+  // Description:
+  // Fetch and store OpenGL model view matrix. Note,
+  // this is done automatically in SateChanged.
+  void Update();
+
+  //BTX
+  // Description:
+  // Set the matrix data.
+  void SetProjection(float *val);
+  void SetModelView(float *val);
+  //ETX
+
+protected:
+  vtkOpenGLModelViewProjectionMonitor() : UpTime(0)
+  { this->Initialize(); }
+
+  ~vtkOpenGLModelViewProjectionMonitor(){}
+
+  void Initialize();
+
+private:
+  float Projection[16];
+  float ModelView[16];
+  long long UpTime;
+
+private:
+  vtkOpenGLModelViewProjectionMonitor(const vtkOpenGLModelViewProjectionMonitor&); // Not implemented
+  void operator=(const vtkOpenGLModelViewProjectionMonitor &); // Not implemented
+};
+
+#endif
diff --git a/Rendering/OpenGL/vtkOpenGLPainterDeviceAdapter.cxx b/Rendering/OpenGL/vtkOpenGLPainterDeviceAdapter.cxx
index 9665177..855b7c8 100644
--- a/Rendering/OpenGL/vtkOpenGLPainterDeviceAdapter.cxx
+++ b/Rendering/OpenGL/vtkOpenGLPainterDeviceAdapter.cxx
@@ -27,13 +27,17 @@
 #include "vtkDataSetAttributes.h"
 #include "vtkObjectFactory.h"
 #include "vtkOpenGLGL2PSHelper.h"
+#include "vtkOpenGLRenderWindow.h"
+#ifndef VTK_LEGACY_REMOVE
 #include "vtkOpenGLExtensionManager.h"
+#endif
 #include "vtkRenderer.h"
 #include "vtkgl.h"
 
 #include <algorithm>
 
 #include "vtkOpenGL.h"
+#include "vtkOpenGLError.h"
 
 vtkStandardNewMacro(vtkOpenGLPainterDeviceAdapter);
 
@@ -844,15 +848,29 @@ void vtkOpenGLPainterDeviceAdapter::DrawElements(int mode, vtkIdType count,
 }
 
 //-----------------------------------------------------------------------------
-
 int vtkOpenGLPainterDeviceAdapter::Compatible(vtkRenderer *renderer)
 {
-  return renderer->IsA("vtkOpenGLRenderer");
+  vtkOpenGLRenderWindow *context
+    = vtkOpenGLRenderWindow::SafeDownCast(renderer->GetRenderWindow());
+  if (!context)
+    {
+    return false;
+    }
+#ifndef VTK_LEGACY_REMOVE
+  vtkOpenGLExtensionManager *manager = context->GetExtensionManager();
+  if (!manager->ExtensionSupported("GL_ARB_multisample"))
+    {
+    return false;
+    }
+#endif
+  return true;
 }
 
+#ifndef VTK_LEGACY_REMOVE
 //-----------------------------------------------------------------------------
 void vtkOpenGLPainterDeviceAdapter::MakeLighting(int mode)
 {
+  VTK_LEGACY_BODY(vtkOpenGLPainterDeviceAdapter::MakeLighting, "VTK 6.1");
   if (mode)
     {
     glEnable(GL_LIGHTING);
@@ -866,6 +884,7 @@ void vtkOpenGLPainterDeviceAdapter::MakeLighting(int mode)
 //-----------------------------------------------------------------------------
 int vtkOpenGLPainterDeviceAdapter::QueryLighting()
 {
+  VTK_LEGACY_BODY(vtkOpenGLPainterDeviceAdapter::QueryLighting, "VTK 6.1");
   if (glIsEnabled(GL_LIGHTING))
     {
     return 1;
@@ -879,6 +898,7 @@ int vtkOpenGLPainterDeviceAdapter::QueryLighting()
 //-----------------------------------------------------------------------------
 void vtkOpenGLPainterDeviceAdapter::MakeMultisampling(int mode)
 {
+  VTK_LEGACY_BODY(vtkOpenGLPainterDeviceAdapter::MakeMultisampling, "VTK 6.1");
   if (mode)
     {
     glEnable(vtkgl::MULTISAMPLE);
@@ -892,6 +912,7 @@ void vtkOpenGLPainterDeviceAdapter::MakeMultisampling(int mode)
 //-----------------------------------------------------------------------------
 int vtkOpenGLPainterDeviceAdapter::QueryMultisampling()
 {
+  VTK_LEGACY_BODY(vtkOpenGLPainterDeviceAdapter::QueryMultisampling, "VTK 6.1");
   if (glIsEnabled(vtkgl::MULTISAMPLE))
     {
     return 1;
@@ -905,6 +926,7 @@ int vtkOpenGLPainterDeviceAdapter::QueryMultisampling()
 //-----------------------------------------------------------------------------
 void vtkOpenGLPainterDeviceAdapter::MakeBlending(int mode)
 {
+  VTK_LEGACY_BODY(vtkOpenGLPainterDeviceAdapter::MakeBlending, "VTK 6.1");
   if (mode)
     {
     glEnable(GL_BLEND);
@@ -918,6 +940,7 @@ void vtkOpenGLPainterDeviceAdapter::MakeBlending(int mode)
 //-----------------------------------------------------------------------------
 int vtkOpenGLPainterDeviceAdapter::QueryBlending()
 {
+  VTK_LEGACY_BODY(vtkOpenGLPainterDeviceAdapter::QueryBlending, "VTK 6.1");
   if (glIsEnabled(GL_BLEND))
     {
     return 1;
@@ -927,6 +950,7 @@ int vtkOpenGLPainterDeviceAdapter::QueryBlending()
     return 0;
     }
 }
+#endif
 
 //-----------------------------------------------------------------------------
 void vtkOpenGLPainterDeviceAdapter::MakeVertexEmphasis(bool mode)
@@ -953,6 +977,7 @@ void vtkOpenGLPainterDeviceAdapter::MakeVertexEmphasis(bool mode)
     glDepthRange(this->RangeNear, this->RangeFar);
     glDepthMask(GL_TRUE);
     }
+  vtkOpenGLCheckErrorMacro("failed after MakeVertexEmphasis");
 }
 
 //-----------------------------------------------------------------------------
@@ -967,6 +992,7 @@ void vtkOpenGLPainterDeviceAdapter::WriteStencil(vtkIdType value)
       }
     glStencilFunc(GL_ALWAYS, static_cast<GLint>(value), this->MaxStencil);
     glStencilOp(GL_KEEP, GL_KEEP, GL_REPLACE);
+    vtkOpenGLCheckErrorMacro("failed after WriteStencil");
     }
 }
 
@@ -978,6 +1004,7 @@ void vtkOpenGLPainterDeviceAdapter::TestStencil(vtkIdType value)
     value = value % this->MaxStencil + 1;
     glStencilFunc(GL_EQUAL, static_cast<GLint>(value), this->MaxStencil);
     glStencilOp(GL_KEEP, GL_KEEP, GL_KEEP);
+    vtkOpenGLCheckErrorMacro("failed after TestStencil");
     }
 }
 
@@ -995,6 +1022,7 @@ void vtkOpenGLPainterDeviceAdapter::Stencil(int on)
     {
     glDisable(GL_STENCIL_TEST);
     }
+  vtkOpenGLCheckErrorMacro("failed after Stencil");
 }
 
 
diff --git a/Rendering/OpenGL/vtkOpenGLPainterDeviceAdapter.h b/Rendering/OpenGL/vtkOpenGLPainterDeviceAdapter.h
index a147aa2..3b4cb9b 100644
--- a/Rendering/OpenGL/vtkOpenGLPainterDeviceAdapter.h
+++ b/Rendering/OpenGL/vtkOpenGLPainterDeviceAdapter.h
@@ -105,28 +105,40 @@ public:
   virtual int Compatible(vtkRenderer *renderer);
 
   // Description:
+  // @deprecated code that needs access directly to OpenGL state should
+  // manage it locally.
   // Turns lighting on and off.
-  virtual void MakeLighting(int mode);
+  VTK_LEGACY(virtual void MakeLighting(int mode));
 
   // Description:
+  // @deprecated code that needs access directly to OpenGL state should
+  // manage it locally.
   // Returns current lighting setting.
-  virtual int QueryLighting();
+  VTK_LEGACY(virtual int QueryLighting());
 
   // Description:
+  // @deprecated code that needs access directly to OpenGL state should
+  // manage it locally.
   // Turns antialiasing on and off.
-  virtual void MakeMultisampling(int mode);
+  VTK_LEGACY(virtual void MakeMultisampling(int mode));
 
   // Description:
+  // @deprecated code that needs access directly to OpenGL state should
+  // manage it locally.
   // Returns current antialiasing setting.
-  virtual int QueryMultisampling();
+  VTK_LEGACY(virtual int QueryMultisampling());
 
   // Description:
+  // @deprecated code that needs access directly to OpenGL state should
+  // manage it locally.
   // Turns blending on and off.
-  virtual void MakeBlending(int mode);
+  VTK_LEGACY(virtual void MakeBlending(int mode));
 
   // Description:
+  // @deprecated code that needs access directly to OpenGL state should
+  // manage it locally.
   // Returns current blending setting.
-  virtual int QueryBlending();
+  VTK_LEGACY(virtual int QueryBlending());
 
   // Description:
   // Turns emphasis of vertices on or off for vertex selection.
diff --git a/Rendering/OpenGL/vtkOpenGLPolyDataMapper.cxx b/Rendering/OpenGL/vtkOpenGLPolyDataMapper.cxx
index bf187e5..1a0636d 100644
--- a/Rendering/OpenGL/vtkOpenGLPolyDataMapper.cxx
+++ b/Rendering/OpenGL/vtkOpenGLPolyDataMapper.cxx
@@ -28,11 +28,13 @@
 #include "vtkProperty.h"
 #include "vtkTimerLog.h"
 #include "vtkTriangle.h"
-#include "vtkOpenGLRenderWindow.h"
 #include "vtkOpenGLTexture.h"
 #include "vtkImageData.h"
+#include "vtkWindow.h"
+#include "vtkRenderWindow.h"
 
 #include "vtkOpenGL.h"
+#include "vtkOpenGLError.h"
 
 #include <math.h>
 
@@ -84,6 +86,7 @@ void vtkOpenGLPolyDataMapper::ReleaseGraphicsResources(vtkWindow *win)
     {
     win->MakeCurrent();
     glDeleteLists(this->ListId, 1);
+    vtkOpenGLCheckErrorMacro("failed after glDeleteLists");
     }
   this->ListId = 0;
   this->LastWindow = NULL;
@@ -99,6 +102,8 @@ void vtkOpenGLPolyDataMapper::ReleaseGraphicsResources(vtkWindow *win)
 //
 void vtkOpenGLPolyDataMapper::RenderPiece(vtkRenderer *ren, vtkActor *act)
 {
+  vtkOpenGLClearErrorMacro();
+
   vtkPolyData *input= this->GetInput();
 
   // make sure that we've been properly initialized
@@ -271,6 +276,8 @@ void vtkOpenGLPolyDataMapper::RenderPiece(vtkRenderer *ren, vtkActor *act)
     GLenum clipPlaneId = static_cast<GLenum>(GL_CLIP_PLANE0 + c);
     glDisable(clipPlaneId);
     }
+
+  vtkOpenGLCheckErrorMacro("failed after RenderPiece");
 }
 
 //
@@ -1067,6 +1074,8 @@ void vtkOpenGLPolyDataMapper::DrawPolygons(int idx,
                                          vtkCellArray *ca,
                                          vtkRenderer *ren)
 {
+  vtkOpenGLClearErrorMacro();
+
   void *voidPoints = p->GetVoidPointer(0);
   void *voidNormals = 0;
   void *voidTCoords = 0;
@@ -1310,6 +1319,7 @@ void vtkOpenGLPolyDataMapper::DrawPolygons(int idx,
       }
     }
     }
+  vtkOpenGLCheckErrorMacro("failed after DrawPolygons");
 }
 
 // fix refs here
@@ -1345,6 +1355,7 @@ void vtkOpenGLPolyDataMapper::DrawTStrips(int idx,
                                         vtkCellArray *ca,
                                         vtkRenderer *ren)
 {
+  vtkOpenGLClearErrorMacro();
   void *voidPoints = p->GetVoidPointer(0);
   void *voidNormals = 0;
   void *voidTCoords = 0;
@@ -1528,6 +1539,7 @@ void vtkOpenGLPolyDataMapper::DrawTStrips(int idx,
       }
     }
     }
+  vtkOpenGLCheckErrorMacro("failed after DrawTStrips");
 }
 
 static void vtkOpenGLPolyDataMapperDrawTStripLines(int idx,
@@ -1541,6 +1553,7 @@ static void vtkOpenGLPolyDataMapperDrawTStripLines(int idx,
                                             vtkCellArray *ca,
                                             vtkRenderer *ren)
 {
+  vtkOpenGLClearErrorMacro();
   void *voidPoints = p->GetVoidPointer(0);
   void *voidNormals = 0;
   void *voidTCoords = 0;
@@ -1769,11 +1782,13 @@ static void vtkOpenGLPolyDataMapperDrawTStripLines(int idx,
       }
     }
     }
+  vtkOpenGLStaticCheckErrorMacro("failed after DrawTStripLines");
 }
 
 // Draw method for OpenGL.
 int vtkOpenGLPolyDataMapper::Draw(vtkRenderer *aren, vtkActor *act)
 {
+  vtkOpenGLClearErrorMacro();
   vtkOpenGLRenderer *ren = static_cast<vtkOpenGLRenderer *>(aren);
   int rep, interpolation;
   float tran;
@@ -2083,6 +2098,7 @@ int vtkOpenGLPolyDataMapper::Draw(vtkRenderer *aren, vtkActor *act)
       }
     }
 
+  vtkOpenGLCheckErrorMacro("failed after Draw");
   this->UpdateProgress(1.0);
   return noAbort;
 }
diff --git a/Rendering/OpenGL/vtkOpenGLPolyDataMapper2D.cxx b/Rendering/OpenGL/vtkOpenGLPolyDataMapper2D.cxx
index 09035c2..b875d27 100644
--- a/Rendering/OpenGL/vtkOpenGLPolyDataMapper2D.cxx
+++ b/Rendering/OpenGL/vtkOpenGLPolyDataMapper2D.cxx
@@ -29,7 +29,7 @@
 #include "vtkViewport.h"
 #include "vtkWindow.h"
 #include "vtkgluPickMatrix.h"
-
+#include "vtkOpenGLError.h"
 #include <math.h>
 
 vtkStandardNewMacro(vtkOpenGLPolyDataMapper2D);
@@ -37,6 +37,7 @@ vtkStandardNewMacro(vtkOpenGLPolyDataMapper2D);
 void vtkOpenGLPolyDataMapper2D::RenderOverlay(vtkViewport* viewport,
                                               vtkActor2D* actor)
 {
+  vtkOpenGLClearErrorMacro();
   int            numPts;
   vtkPolyData    *input=static_cast<vtkPolyData *>(this->GetInput());
   int            j;
@@ -108,6 +109,7 @@ void vtkOpenGLPolyDataMapper2D::RenderOverlay(vtkViewport* viewport,
   int size[2];
   size[0] = viewport->GetSize()[0];
   size[1] = viewport->GetSize()[1];
+
   double *vport = viewport->GetViewport();
   int* actorPos =
     actor->GetPositionCoordinate()->GetComputedViewportValue(viewport);
@@ -213,16 +215,31 @@ void vtkOpenGLPolyDataMapper2D::RenderOverlay(vtkViewport* viewport,
   int yoff = static_cast<int>(actorPos[1] - (visVP[1] - vport[1])*
                               winSize[1]);
 
+  // set ortho projection
+  float left = -xoff;
+  float right = -xoff + size[0];
+  float bottom = -yoff;
+  float top = -yoff + size[1];
+
+  // it's an error to call glOrtho with
+  // either left==right or top==bottom
+  if (left==right)
+    {
+    right = left + 1.0;
+    }
+  if (bottom==top)
+    {
+    top = bottom + 1.0;
+    }
+
   if (actor->GetProperty()->GetDisplayLocation() ==
        VTK_FOREGROUND_LOCATION)
     {
-    glOrtho(-xoff,-xoff + size[0],
-            -yoff, -yoff +size[1], 0, 1);
+    glOrtho(left, right, bottom, top, 0, 1);
     }
   else
     {
-    glOrtho(-xoff,-xoff + size[0],
-            -yoff, -yoff + size[1], -1, 0);
+    glOrtho(left, right, bottom, top, -1, 0);
     }
 
   // Clipping plane stuff
@@ -422,6 +439,7 @@ void vtkOpenGLPolyDataMapper2D::RenderOverlay(vtkViewport* viewport,
   // Turn it back on in case we've turned it off
   glDepthMask(GL_TRUE);
   glDisable(GL_TEXTURE_2D);
+  vtkOpenGLCheckErrorMacro("failed after RenderOverlay");
 }
 
 //----------------------------------------------------------------------------
diff --git a/Rendering/OpenGL/vtkOpenGLProperty.cxx b/Rendering/OpenGL/vtkOpenGLProperty.cxx
index 4013288..5f0e137 100644
--- a/Rendering/OpenGL/vtkOpenGLProperty.cxx
+++ b/Rendering/OpenGL/vtkOpenGLProperty.cxx
@@ -30,13 +30,12 @@
 #include "vtkUniformVariables.h"
 #include "vtkShader2Collection.h"
 #include "vtkTextureUnitManager.h"
-#include "vtkOpenGLRenderWindow.h"
-#include "vtkXMLMaterial.h"
-#include "vtkXMLShader.h"
 #include "vtkGLSLShaderDeviceAdapter2.h"
 #include "vtkOpenGLPainterDeviceAdapter.h"
+#include "vtkOpenGLRenderWindow.h"
+#include "vtkOpenGLError.h"
 
-#include <assert.h>
+#include <cassert>
 
 namespace
 {
@@ -187,15 +186,17 @@ bool vtkOpenGLProperty::RenderShaders(vtkActor* vtkNotUsed(anActor), vtkRenderer
     vtkErrorMacro("the vtkOpenGLProperty need a vtkOpenGLRenderer to render.");
     return false;
     }
-  vtkOpenGLRenderWindow* context = vtkOpenGLRenderWindow::SafeDownCast(
-      ren->GetRenderWindow());
+  vtkOpenGLRenderWindow *context
+    = vtkOpenGLRenderWindow::SafeDownCast(ren->GetRenderWindow());
+
   vtkShaderProgram2* prog = oRenderer->GetShaderProgram();
   if (prog)
     {
     assert("check: prog is initialized" && prog->GetContext() == context);
     }
 
-  bool useShaders = false;
+  vtkOpenGLClearErrorMacro();
+
   vtkShaderProgram2 *propProg;
   if (this->Shading)
     {
@@ -205,18 +206,25 @@ bool vtkOpenGLProperty::RenderShaders(vtkActor* vtkNotUsed(anActor), vtkRenderer
     {
     propProg = 0;
     }
+
+  bool useShaders = false;
   if (prog || propProg)
     {
-    useShaders = vtkShaderProgram2::IsSupported(context);
-    if (useShaders)
+    bool shader_support = vtkShaderProgram2::IsSupported(context);
+
+    // mesa doesn't support separate compilation units
+    // os mesa:
+    // 9.1.4 some tests failing
+    vtkOpenGLExtensionManager *extensions = context->GetExtensionManager();
+
+    bool driver_support
+      = !extensions->DriverIsMesa()
+      || extensions->GetIgnoreDriverBugs(
+        "Mesa support for separate compilation units");
+
+    if (shader_support && driver_support)
       {
-      const char *gl_renderer =
-        reinterpret_cast<const char *>(glGetString(GL_RENDERER));
-      if (strstr(gl_renderer, "Mesa") != 0)
-        {
-        useShaders = false;
-        vtkErrorMacro(<<"Mesa does not support separate compilation units.");
-        }
+      useShaders = true;
       }
     else
       {
@@ -634,12 +642,17 @@ bool vtkOpenGLProperty::RenderTextures(vtkActor*, vtkRenderer* ren,
       vtkgl::ActiveTexture(vtkgl::TEXTURE0);
       }
     }
+
+  vtkOpenGLCheckErrorMacro("failed after Render");
+
   return (numTextures > 0);
 }
 
 //-----------------------------------------------------------------------------
 void vtkOpenGLProperty::PostRender(vtkActor *actor, vtkRenderer *renderer)
 {
+  vtkOpenGLClearErrorMacro();
+
   vtkOpenGLRenderer *oRenderer = static_cast<vtkOpenGLRenderer *>(renderer);
   vtkShaderProgram2 *prog = oRenderer->GetShaderProgram();
 
@@ -699,6 +712,8 @@ void vtkOpenGLProperty::PostRender(vtkActor *actor, vtkRenderer *renderer)
       vtkgl::ActiveTexture(vtkgl::TEXTURE0);
       }
     }
+
+  vtkOpenGLCheckErrorMacro("failed after PostRender");
 }
 
 //-----------------------------------------------------------------------------
@@ -743,56 +758,6 @@ void vtkOpenGLProperty::LoadMultiTexturingExtensions(vtkRenderer* ren)
     }
 }
 
-// ----------------------------------------------------------------------------
-// Description:
-// Read this->Material from new style shaders.
-void vtkOpenGLProperty::ReadFrameworkMaterial()
-{
-  vtkShaderProgram2* prog = vtkShaderProgram2::New();
-  this->SetPropProgram(prog);
-  prog->Delete();
-
-  if (!this->Material)
-    {
-    vtkErrorMacro("No Material set to read.");
-    return;
-    }
-
-  int cc;
-  int max = this->Material->GetNumberOfVertexShaders();
-  for (cc=0; cc < max; cc++)
-    {
-    vtkShader2 *shader = vtkShader2::New();
-    vtkXMLShader *XMLshader = this->Material->GetVertexShader(cc);
-
-    shader->SetType(VTK_SHADER_TYPE_VERTEX);
-    shader->SetSourceCode(XMLshader->GetCode());
-
-    // there is no uniform in the example
-//    vtkUniformVariables *var=shader->GetUniformVariables();
-//    var->
-
-    prog->GetShaders()->AddItem(shader);
-    shader->Delete();
-    }
-  vtkDebugMacro(<< max << " Vertex shaders added.");
-
-  max = this->Material->GetNumberOfFragmentShaders();
-  for (cc=0; cc < max; cc++)
-    {
-    vtkShader2 *shader = vtkShader2::New();
-    vtkXMLShader *XMLshader = this->Material->GetFragmentShader(cc);
-
-    shader->SetType(VTK_SHADER_TYPE_FRAGMENT);
-    shader->SetSourceCode(XMLshader->GetCode());
-
-    prog->GetShaders()->AddItem(shader);
-    shader->Delete();
-    }
-  vtkDebugMacro(<< max << " Fragment shaders added.");
-
-}
-
 //-----------------------------------------------------------------------------
 void vtkOpenGLProperty::ReleaseGraphicsResources(vtkWindow *win)
 {
@@ -800,6 +765,7 @@ void vtkOpenGLProperty::ReleaseGraphicsResources(vtkWindow *win)
   int numTextures = this->GetNumberOfTextures();
   if (win && win->GetMapped() && numTextures > 0 && vtkgl::ActiveTexture)
     {
+    vtkOpenGLClearErrorMacro();
     GLint numSupportedTextures;
     glGetIntegerv(vtkgl::MAX_TEXTURE_UNITS, &numSupportedTextures);
     for (int i = 0; i < numTextures; i++)
@@ -819,6 +785,7 @@ void vtkOpenGLProperty::ReleaseGraphicsResources(vtkWindow *win)
       this->GetTextureAtIndex(i)->ReleaseGraphicsResources(win);
       }
     vtkgl::ActiveTexture(vtkgl::TEXTURE0);
+    vtkOpenGLCheckErrorMacro("failwed during ReleaseGraphicsResources");
     }
   else if (numTextures > 0 && vtkgl::ActiveTexture)
     {
diff --git a/Rendering/OpenGL/vtkOpenGLProperty.h b/Rendering/OpenGL/vtkOpenGLProperty.h
index f0d4d68..4741fc6 100644
--- a/Rendering/OpenGL/vtkOpenGLProperty.h
+++ b/Rendering/OpenGL/vtkOpenGLProperty.h
@@ -114,10 +114,6 @@ protected:
   // Load OpenGL extensions for multi texturing.
   void LoadMultiTexturingExtensions(vtkRenderer* ren);
 
-  // Description:
-  // Read this->Material from new style shaders.
-  virtual void ReadFrameworkMaterial();
-
   // Owned. Result of merging the shader program of the renderer
   // and the PropProgram.
   vtkShaderProgram2 *CachedShaderProgram2;
diff --git a/Rendering/OpenGL/vtkOpenGLRenderWindow.cxx b/Rendering/OpenGL/vtkOpenGLRenderWindow.cxx
index 4a9c659..46c7aa0 100644
--- a/Rendering/OpenGL/vtkOpenGLRenderWindow.cxx
+++ b/Rendering/OpenGL/vtkOpenGLRenderWindow.cxx
@@ -13,7 +13,7 @@
 
 =========================================================================*/
 #include "vtkOpenGLRenderWindow.h"
-#include "assert.h"
+#include <cassert>
 #include "vtkFloatArray.h"
 #include "vtkgl.h"
 #include "vtkIdList.h"
@@ -26,28 +26,40 @@
 #include "vtkOpenGLPolyDataMapper.h"
 #include "vtkOpenGLProperty.h"
 #include "vtkOpenGLRenderer.h"
+#include "vtkOpenGLError.h"
 #include "vtkOpenGLTexture.h"
 #include "vtkUnsignedCharArray.h"
 #include "vtkTextureUnitManager.h"
+#include "vtkStdString.h"
+#include <sstream>
+using std::ostringstream;
 
 vtkCxxSetObjectMacro(vtkOpenGLRenderWindow, ExtensionManager, vtkOpenGLExtensionManager);
 vtkCxxSetObjectMacro(vtkOpenGLRenderWindow, HardwareSupport, vtkOpenGLHardwareSupport);
 vtkCxxSetObjectMacro(vtkOpenGLRenderWindow, TextureUnitManager, vtkTextureUnitManager);
 
 // Initialize static member that controls global maximum number of multisamples
+// (off by default on Apple because it causes problems on some Mac models).
+#if defined(__APPLE__)
+static int vtkOpenGLRenderWindowGlobalMaximumNumberOfMultiSamples = 0;
+#else
 static int vtkOpenGLRenderWindowGlobalMaximumNumberOfMultiSamples = 8;
+#endif
 
+// ----------------------------------------------------------------------------
 void vtkOpenGLRenderWindow::SetGlobalMaximumNumberOfMultiSamples(int val)
 {
   if (val == vtkOpenGLRenderWindowGlobalMaximumNumberOfMultiSamples) return;
   vtkOpenGLRenderWindowGlobalMaximumNumberOfMultiSamples = val;
 }
 
+// ----------------------------------------------------------------------------
 int vtkOpenGLRenderWindow::GetGlobalMaximumNumberOfMultiSamples()
 {
   return vtkOpenGLRenderWindowGlobalMaximumNumberOfMultiSamples;
 }
 
+// ----------------------------------------------------------------------------
 vtkOpenGLRenderWindow::vtkOpenGLRenderWindow()
 {
   this->ExtensionManager = NULL;
@@ -56,8 +68,7 @@ vtkOpenGLRenderWindow::vtkOpenGLRenderWindow()
 
   this->MultiSamples = vtkOpenGLRenderWindowGlobalMaximumNumberOfMultiSamples;
   this->TextureResourceIds = vtkIdList::New();
-  if ( this->WindowName )
-    delete [] this->WindowName;
+  delete [] this->WindowName;
   this->WindowName = new char[strlen("Visualization Toolkit - OpenGL")+1];
   strcpy( this->WindowName, "Visualization Toolkit - OpenGL" );
 
@@ -70,12 +81,15 @@ vtkOpenGLRenderWindow::vtkOpenGLRenderWindow()
   this->BackBuffer=static_cast<unsigned int>(GL_BACK);
   this->FrontBuffer=static_cast<unsigned int>(GL_FRONT);
 
+#ifndef VTK_LEGACY_REMOVE
   this->LastGraphicError=static_cast<unsigned int>(GL_NO_ERROR);
+#endif
 
   this->OwnContext=1;
 }
 
 // free up memory & close the window
+// ----------------------------------------------------------------------------
 vtkOpenGLRenderWindow::~vtkOpenGLRenderWindow()
 {
   this->TextureResourceIds->Delete();
@@ -1595,13 +1609,15 @@ int vtkOpenGLRenderWindow::CreateHardwareOffScreenWindow(int width, int height)
   int supports_GL_EXT_framebuffer_object=
     extensions->ExtensionSupported("GL_EXT_framebuffer_object");
 
+  // TODO Mesa 6.5.1 is from 2006 verify that this is still an issue
+  // with  newer releases
   // We skip it if you use Mesa. Even if the VTK offscreen test passes (OSCone)
   // with Mesa, all the Paraview batch test are failing (Mesa 6.5.1 or CVS)
   // After too much time spent to investigate this case, we just skip it.
-  const GLubyte *openglRenderer=glGetString(GL_RENDERER);
-  const char *substring=strstr(reinterpret_cast<const char *>(openglRenderer),
-                               "Mesa");
-  int isMesa=substring!=0;
+  int isMesa
+    = extensions->DriverGLRendererHas("Mesa")
+    && !extensions->GetIgnoreDriverBugs("Mesa 6.5.1 pvbatch offscreen bug");
+
   int supports_texture_non_power_of_two=
     extensions->ExtensionSupported("GL_VERSION_2_0") ||
     extensions->ExtensionSupported("GL_ARB_texture_non_power_of_two");
@@ -1868,29 +1884,25 @@ void vtkOpenGLRenderWindow::DestroyHardwareOffScreenWindow()
   assert("post: destroyed" && !this->OffScreenUseFrameBuffer);
 }
 
-// ----------------------------------------------------------------------------
-// Description:
-// Update graphic error status, regardless of ReportGraphicErrors flag.
-// It means this method can be used in any context and is not restricted to
-// debug mode.
+#ifndef VTK_LEGACY_REMOVE
+//----------------------------------------------------------------------------
 void vtkOpenGLRenderWindow::CheckGraphicError()
 {
+  VTK_LEGACY_BODY(vtkRenderWindow::CheckGraphicError, "VTK 6.1");
   this->LastGraphicError=static_cast<unsigned int>(glGetError());
 }
 
-// ----------------------------------------------------------------------------
-// Description:
-// Return the last graphic error status. Initial value is false.
+//----------------------------------------------------------------------------
 int vtkOpenGLRenderWindow::HasGraphicError()
 {
+  VTK_LEGACY_BODY(vtkRenderWindow::HasGraphics, "VTK 6.1");
   return static_cast<GLenum>(this->LastGraphicError)!=GL_NO_ERROR;
 }
 
-// ----------------------------------------------------------------------------
-// Description:
-// Return a string matching the last graphic error status.
+//----------------------------------------------------------------------------
 const char *vtkOpenGLRenderWindow::GetLastGraphicErrorString()
 {
+  VTK_LEGACY_BODY(vtkRenderWindow::GetLastGraphicErrorString, "VTK 6.1");
   const char *result;
   switch(static_cast<GLenum>(this->LastGraphicError))
     {
@@ -1933,7 +1945,7 @@ const char *vtkOpenGLRenderWindow::GetLastGraphicErrorString()
     }
   return result;
 }
-
+#endif
 
 // ----------------------------------------------------------------------------
 // Description:
diff --git a/Rendering/OpenGL/vtkOpenGLRenderWindow.h b/Rendering/OpenGL/vtkOpenGLRenderWindow.h
index 2165674..8197df3 100644
--- a/Rendering/OpenGL/vtkOpenGLRenderWindow.h
+++ b/Rendering/OpenGL/vtkOpenGLRenderWindow.h
@@ -24,13 +24,13 @@
 
 #include "vtkRenderingOpenGLModule.h" // For export macro
 #include "vtkRenderWindow.h"
-
 #include "vtkOpenGL.h" // Needed for GLuint.
 
 class vtkIdList;
 class vtkOpenGLExtensionManager;
 class vtkOpenGLHardwareSupport;
 class vtkTextureUnitManager;
+class vtkStdString;
 
 class VTKRENDERINGOPENGL_EXPORT vtkOpenGLRenderWindow : public vtkRenderWindow
 {
@@ -160,18 +160,11 @@ public:
   unsigned int GetFrontBuffer();
 
   // Description:
-  // Update graphic error status, regardless of ReportGraphicErrors flag.
-  // It means this method can be used in any context and is not restricted to
-  // debug mode.
-  virtual void CheckGraphicError();
-
-  // Description:
-  // Return the last graphic error status. Initial value is false.
-  virtual int HasGraphicError();
-
-  // Description:
-  // Return a string matching the last graphic error status.
-  virtual const char *GetLastGraphicErrorString();
+  // @deprecated Replaced by
+  // vtkOpenGLCheckErrorMacro
+  VTK_LEGACY(virtual void CheckGraphicError());
+  VTK_LEGACY(virtual int HasGraphicError());
+  VTK_LEGACY(virtual const char *GetLastGraphicErrorString());
 
   // Description:
   // Get the time when the OpenGL context was created.
@@ -257,8 +250,12 @@ protected:
   unsigned int FrontBuffer;
   unsigned int BackBuffer;
 
-  // Actual type is GLenum. Last value returned by glGetError().
+#ifndef VTK_LEGACY_REMOVE
+  // Description:
+  // @deprecated Replaced by
+  // vtkOpenGLCheckErrorMacro
   unsigned int LastGraphicError;
+#endif
 
   // Description:
   // Flag telling if the context has been created here or was inherited.
diff --git a/Rendering/OpenGL/vtkOpenGLRenderer.cxx b/Rendering/OpenGL/vtkOpenGLRenderer.cxx
index 1e59ff8..6075928 100644
--- a/Rendering/OpenGL/vtkOpenGLRenderer.cxx
+++ b/Rendering/OpenGL/vtkOpenGLRenderer.cxx
@@ -21,6 +21,7 @@ PURPOSE.  See the above copyright notice for more information.
 #include "vtkOpenGLLight.h"
 #include "vtkOpenGLProperty.h"
 #include "vtkRenderWindow.h"
+#include "vtkOpenGLRenderWindow.h"
 #include "vtkOpenGLExtensionManager.h"
 #include "vtkgl.h" // vtkgl namespace
 #include "vtkImageImport.h"
@@ -31,11 +32,12 @@ PURPOSE.  See the above copyright notice for more information.
 #include "vtkRenderState.h"
 
 #include "vtkOpenGL.h"
+#include "vtkOpenGLError.h"
 
 #include "vtkShaderProgram2.h"
 
 #include <math.h>
-#include <assert.h>
+#include <cassert>
 #include <list>
 
 class vtkGLPickInfo
@@ -89,6 +91,8 @@ vtkOpenGLRenderer::vtkOpenGLRenderer()
 // into graphics pipeline.
 void vtkOpenGLRenderer::ClearLights (void)
 {
+  vtkOpenGLClearErrorMacro();
+
   short curLight;
   float Info[4];
 
@@ -118,11 +122,15 @@ void vtkOpenGLRenderer::ClearLights (void)
     }
 
   this->NumberOfLightsBound = 0;
+
+  vtkOpenGLCheckErrorMacro("failed ater ClearLights");
 }
 
 // Ask lights to load themselves into graphics pipeline.
 int vtkOpenGLRenderer::UpdateLights ()
 {
+  vtkOpenGLClearErrorMacro();
+
   vtkLight *light;
   short curLight;
   float status;
@@ -180,6 +188,9 @@ int vtkOpenGLRenderer::UpdateLights ()
 
   glPopMatrix();
   glEnable(GL_LIGHTING);
+
+  vtkOpenGLCheckErrorMacro("failed after UpdateLights");
+
   return count;
 }
 
@@ -190,6 +201,7 @@ int vtkOpenGLRenderer::UpdateLights ()
 int vtkOpenGLRenderer::GetUseTextureUniformVariable()
 {
   GLint result=vtkgl::GetUniformLocation(this->ProgramShader,"useTexture");
+  vtkOpenGLCheckErrorMacro("failed at glGetUniformLocation");
   if(result==-1)
     {
     vtkErrorMacro(<<"useTexture is not a uniform variable");
@@ -204,6 +216,7 @@ int vtkOpenGLRenderer::GetUseTextureUniformVariable()
 int vtkOpenGLRenderer::GetTextureUniformVariable()
 {
   GLint result=vtkgl::GetUniformLocation(this->ProgramShader,"texture");
+  vtkOpenGLCheckErrorMacro("failed at glGetUniformLocation");
   if(result==-1)
     {
     vtkErrorMacro(<<"texture is not a uniform variable");
@@ -242,6 +255,7 @@ void vtkOpenGLRenderer::DeviceRender(void)
     // other windows might get rendered since the last time
     // a MakeCurrent was called.
     this->RenderWindow->MakeCurrent();
+    vtkOpenGLClearErrorMacro();
 
     // standard render method
     this->ClearLights();
@@ -257,7 +271,15 @@ void vtkOpenGLRenderer::DeviceRender(void)
 
     // clean up the model view matrix set up by the camera
     glMatrixMode(GL_MODELVIEW);
-    glPopMatrix();
+
+    GLint mvDepth;
+    glGetIntegerv(GL_MODELVIEW_STACK_DEPTH, &mvDepth);
+    if (mvDepth>1)
+      {
+      glPopMatrix();
+      }
+
+    vtkOpenGLCheckErrorMacro("failed after DeviceRender");
     }
 
   vtkTimerLog::MarkEndEvent("OpenGL Dev Render");
@@ -271,13 +293,24 @@ void vtkOpenGLRenderer::DeviceRender(void)
 // override this method.
 void vtkOpenGLRenderer::DeviceRenderTranslucentPolygonalGeometry()
 {
+  vtkOpenGLClearErrorMacro();
+
+  vtkOpenGLExtensionManager *extensions = NULL;
+
   if(this->UseDepthPeeling)
     {
+    vtkOpenGLRenderWindow *context
+      = vtkOpenGLRenderWindow::SafeDownCast(this->RenderWindow);
+    if (!context)
+      {
+      vtkErrorMacro("OpenGL render window is required.")
+      return;
+      }
+    extensions = context->GetExtensionManager();
+
     if(!this->DepthPeelingIsSupportedChecked)
       {
       this->DepthPeelingIsSupportedChecked=1;
-      vtkOpenGLExtensionManager *extensions=vtkOpenGLExtensionManager::New();
-      extensions->SetRenderWindow(this->RenderWindow);
 
       int supports_GL_1_3=extensions->ExtensionSupported("GL_VERSION_1_3");
       int supports_GL_1_4=extensions->ExtensionSupported("GL_VERSION_1_4");
@@ -351,6 +384,26 @@ void vtkOpenGLRenderer::DeviceRenderTranslucentPolygonalGeometry()
       glGetIntegerv(GL_ALPHA_BITS, &alphaBits);
       int supportsAtLeast8AlphaBits=alphaBits>=8;
 
+      // Mesa with "Offscreen" renderer (ie OS Mesa) supports depth peeling in
+      // all  versions tested 7.10, 8.0, 8.0.5, 9.0.3, 9.1.3, 9.1.4, 9.1.5
+      // Mesa version 7 with "Software Rasterizer" renderer all Opacity/Translucent
+      // ctests fail with depth peeling
+      // Mesa 9.2.0 with Gallium llvmpipe renderer some of these tests fail
+      // Mesa 8 with GMA945 renderer supports depth peeling
+      // ATI Radeon HD XXXXX on Windows chokes on PROXY_TEXTURE_RECTANGLE_ARB
+      // memory querries however if those are not used all the tests pass.
+      // ATI Radeon HD on Mac OSX PROXY_TEXTURE_RECTANGLE_ARB are fine but
+      // TestTranslucentLUTDepthPeeling fails. So leave it disabled on Apple
+      int driver_support
+        = (!(extensions->DriverIsATI()
+        && (extensions->GetDriverGLVersionMajor() < 3))
+        && (!extensions->DriverIsMesa()
+        || extensions->DriverGLRendererHas("Offscreen")
+        || (extensions->DriverVersionAtLeast(6,5,3)
+        && !extensions->DriverGLRendererIs("Software Rasterizer")
+        && !extensions->DriverGLRendererHasToken("llvmpipe"))))
+        || extensions->GetIgnoreDriverBugs("ATI/Mesa depth peeling bug.");
+
       this->DepthPeelingIsSupported =
         supports_depth_texture &&
         supports_shadow &&
@@ -363,7 +416,8 @@ void vtkOpenGLRenderer::DeviceRenderTranslucentPolygonalGeometry()
         supports_multitexture &&
         supports_GL_ARB_texture_rectangle &&
         supports_edge_clamp &&
-        supportsAtLeast8AlphaBits;
+        supportsAtLeast8AlphaBits &&
+        driver_support;
 
       if(this->DepthPeelingIsSupported)
         {
@@ -407,6 +461,28 @@ void vtkOpenGLRenderer::DeviceRenderTranslucentPolygonalGeometry()
           }
 
         extensions->LoadExtension("GL_ARB_texture_rectangle");
+
+        // Some OpenGL implementations such as Mesa or ATI
+        // claim to support both GLSL and GL_ARB_texture_rectangle but
+        // don't actually support sampler2DRectShadow in a GLSL code.
+        // To test that, we compile the shader, if it fails, we don't use
+        // deph peeling
+        GLuint shader =
+          vtkgl::CreateShader(vtkgl::FRAGMENT_SHADER);
+        vtkgl::ShaderSource(
+          shader, 1,
+          const_cast<const char **>(&vtkOpenGLRenderer_PeelingFS), 0);
+        vtkgl::CompileShader(shader);
+        GLint params;
+        vtkgl::GetShaderiv(shader,vtkgl::COMPILE_STATUS,
+                           &params);
+        this->DepthPeelingIsSupported = params==GL_TRUE;
+        vtkgl::DeleteShader(shader);
+        if(!this->DepthPeelingIsSupported)
+          {
+          vtkDebugMacro("this OpenGL implementation does not support "
+                        "GL_ARB_texture_rectangle in GLSL code");
+          }
         }
       else
         {
@@ -455,69 +531,9 @@ void vtkOpenGLRenderer::DeviceRenderTranslucentPolygonalGeometry()
           {
           vtkDebugMacro(<<"at least 8 alpha bits is not supported");
           }
-        }
-      extensions->Delete();
-
-      if(this->DepthPeelingIsSupported)
-        {
-        // Some OpenGL implementations such as Mesa or ATI
-        // claim to support both GLSL and GL_ARB_texture_rectangle but
-        // don't actually support sampler2DRectShadow in a GLSL code.
-        // To test that, we compile the shader, if it fails, we don't use
-        // deph peeling
-        GLuint shader =
-          vtkgl::CreateShader(vtkgl::FRAGMENT_SHADER);
-        vtkgl::ShaderSource(
-          shader, 1,
-          const_cast<const char **>(&vtkOpenGLRenderer_PeelingFS), 0);
-        vtkgl::CompileShader(shader);
-        GLint params;
-        vtkgl::GetShaderiv(shader,vtkgl::COMPILE_STATUS,
-                           &params);
-        this->DepthPeelingIsSupported = params==GL_TRUE;
-        vtkgl::DeleteShader(shader);
-        if(!this->DepthPeelingIsSupported)
-          {
-          vtkDebugMacro("this OpenGL implementation does not support "
-                        "GL_ARB_texture_rectangle in GLSL code");
-          }
-        }
-      if(this->DepthPeelingIsSupported)
-        {
-        // Some OpenGL implementations are buggy so depth peeling does not work:
-        //  - ATI
-        //  - Mesa 6.5.2 and lower
-        // Do alpha blending always.
-        const char* gl_renderer =
-          reinterpret_cast<const char *>(glGetString(GL_RENDERER));
-        int isATI = strstr(gl_renderer, "ATI") != 0;
-
-        const char* gl_version =
-          reinterpret_cast<const char *>(glGetString(GL_VERSION));
-        if(const char* mesa_version = strstr(gl_version, "Mesa"))
-          {
-          // Mesa versions 6.5.3 and higher work.  Versions much lower
-          // than 6.5.2 do not report support for the extensions to
-          // get this far.  Therefore if parsing of the version fails
-          // just assume it is a higher version that changed the
-          // format of the version string.
-          int mesa_major = 0;
-          int mesa_minor = 0;
-          int mesa_patch = 0;
-          if(sscanf(mesa_version, "Mesa %d.%d.%d",
-                    &mesa_major, &mesa_minor, &mesa_patch) >= 2)
-            {
-            if(mesa_major  < 6 ||
-               (mesa_major == 6 && mesa_major  < 5) ||
-               (mesa_major == 6 && mesa_minor == 5 && mesa_patch < 3))
-              {
-              this->DepthPeelingIsSupported = 0;
-              }
-            }
-          }
-        else if(isATI)
+        if (!driver_support)
           {
-          this->DepthPeelingIsSupported = 0;
+          vtkDebugMacro(<<"buggy driver (Mesa < 6.5.3 or ATI)");
           }
         }
       }
@@ -548,6 +564,80 @@ void vtkOpenGLRenderer::DeviceRenderTranslucentPolygonalGeometry()
       {
       this->DepthFormat=vtkgl::DEPTH_COMPONENT24_ARB;
       }
+
+    vtkgl::ActiveTexture(vtkgl::TEXTURE1);
+
+    // check if the GPU supports a viewport sized texture in
+    // the formats we will use. If not then we should fallback
+    // to alpha blending.
+
+    // This check is really an anachronism since modern GPU
+    // typically support full screen sized textures in all the
+    // common formats.
+
+    GLint depthTexSupport = 1;
+    GLint colorTexSupport = 1;
+
+    if ( extensions->DriverIsATI()
+       && !extensions->GetIgnoreDriverBugs("ATI proxy query bug.") )
+      {
+      // The ATI Radeon HD drivers currently choke on the proxy
+      // query, but depth peeling has been confirmed to work. For
+      // those driver fall back on the weaker max texture size
+      // check.
+      GLint maxTexSize = 0;
+      glGetIntegerv(GL_MAX_TEXTURE_SIZE, &maxTexSize);
+      if ( (this->ViewportWidth > maxTexSize)
+        || (this->ViewportHeight > maxTexSize) )
+        {
+        depthTexSupport = 0;
+        colorTexSupport = 0;
+        }
+      }
+    else
+      {
+      // Not a buggy ATI driver, it's OK to make the proxy query.
+      GLuint proxyQueryTex = 0;
+      glGenTextures(1, &proxyQueryTex);
+      glBindTexture(vtkgl::TEXTURE_RECTANGLE_ARB, proxyQueryTex);
+
+      // support for depth buffer format
+      glTexImage2D(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB, 0, this->DepthFormat,
+                   this->ViewportWidth, this->ViewportHeight, 0,
+                   GL_DEPTH_COMPONENT, GL_UNSIGNED_INT,0);
+
+      glGetTexLevelParameteriv(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB, 0,
+                               GL_TEXTURE_WIDTH, &depthTexSupport);
+
+      // support for color buffer
+      glTexImage2D(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB, 0, GL_RGBA8,
+                   this->ViewportWidth, this->ViewportHeight, 0, GL_RGBA,
+                   GL_UNSIGNED_BYTE, 0);
+
+      glGetTexLevelParameteriv(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB, 0,
+                               GL_TEXTURE_WIDTH,&colorTexSupport);
+
+      glBindTexture(vtkgl::TEXTURE_RECTANGLE_ARB, 0);
+      glDeleteTextures(1, &proxyQueryTex);
+      }
+
+    if (!(depthTexSupport && colorTexSupport))
+      {
+      // GPU does not support a view sized texture in this format.
+      // Do alpha blending technique instead.
+      vtkWarningMacro(
+          << "The GPU supports "
+          << this->ViewportWidth << "x"
+          << this->ViewportHeight << " texture: depth buffer "
+          << (depthTexSupport?"yes":"no") << ", color buffer "
+          << (colorTexSupport?"yes":"no"));
+
+      vtkgl::ActiveTexture(vtkgl::TEXTURE0);
+      this->LastRenderingUsedDepthPeeling = 0;
+      this->UpdateTranslucentPolygonalGeometry();
+      return;
+      }
+
     // 1. Grab the RGBAZ of the opaque layer.
     GLuint opaqueLayerZ=0;
     GLuint opaqueLayerRgba=0;
@@ -556,7 +646,6 @@ void vtkOpenGLRenderer::DeviceRenderTranslucentPolygonalGeometry()
 
     glGenTextures(1,&opaqueLayerRgba);
     // opaque z format
-    vtkgl::ActiveTexture(vtkgl::TEXTURE1 );
     glBindTexture(vtkgl::TEXTURE_RECTANGLE_ARB,opaqueLayerZ);
     glTexParameteri(vtkgl::TEXTURE_RECTANGLE_ARB,GL_TEXTURE_MIN_FILTER,
                     GL_NEAREST);
@@ -574,62 +663,32 @@ void vtkOpenGLRenderer::DeviceRenderTranslucentPolygonalGeometry()
                     GL_LESS);
 
     // Allocate memory
-    glTexImage2D(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB,0,this->DepthFormat,
-                 this->ViewportWidth,this->ViewportHeight,
-                 0,GL_DEPTH_COMPONENT, GL_UNSIGNED_INT, 0);
-    GLint width;
-    glGetTexLevelParameteriv(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB,0,
-                             GL_TEXTURE_WIDTH,&width);
-    if(width==0)
-      {
-      vtkDebugMacro(<<"not enough GPU RAM for opaque z");
-      // not enough GPU RAM. Do alpha blending technique instead
-      glDeleteTextures(1,&opaqueLayerRgba);
-      glDeleteTextures(1,&opaqueLayerZ);
-      this->LastRenderingUsedDepthPeeling=0;
-      vtkgl::ActiveTexture(vtkgl::TEXTURE0 );
-      this->UpdateTranslucentPolygonalGeometry();
-      return;
-      }
     glTexImage2D(vtkgl::TEXTURE_RECTANGLE_ARB,0,this->DepthFormat,
                  this->ViewportWidth,this->ViewportHeight, 0,
                  GL_DEPTH_COMPONENT, GL_UNSIGNED_INT, 0);
+
     // Grab the z-buffer
     glCopyTexSubImage2D(vtkgl::TEXTURE_RECTANGLE_ARB, 0, 0, 0, this->ViewportX,
                         this->ViewportY,this->ViewportWidth,
                         this->ViewportHeight);
+
     glBindTexture(vtkgl::TEXTURE_RECTANGLE_ARB,opaqueLayerRgba);
     // opaque rgba format
     glTexParameteri(vtkgl::TEXTURE_RECTANGLE_ARB,GL_TEXTURE_MIN_FILTER,
                     GL_NEAREST);
     glTexParameteri(vtkgl::TEXTURE_RECTANGLE_ARB,GL_TEXTURE_MAG_FILTER,
                     GL_NEAREST);
+
     // Allocate memory
-    glTexImage2D(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB, 0, GL_RGBA8,
-                 this->ViewportWidth,this->ViewportHeight,
-                 0,GL_RGBA, GL_UNSIGNED_BYTE, 0);
-    glGetTexLevelParameteriv(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB,0,
-                             GL_TEXTURE_WIDTH,&width);
-    if(width==0)
-      {
-      vtkDebugMacro(<<"not enough GPU RAM for opaque rgba");
-      // not enough GPU RAM. Do alpha blending technique instead
-      glDeleteTextures(1,&opaqueLayerRgba);
-      glDeleteTextures(1,&opaqueLayerZ);
-      this->LastRenderingUsedDepthPeeling=0;
-      vtkgl::ActiveTexture(vtkgl::TEXTURE0 );
-      this->UpdateTranslucentPolygonalGeometry();
-      return;
-      }
+    glTexImage2D(vtkgl::TEXTURE_RECTANGLE_ARB, 0, GL_RGBA8,
+                 this->ViewportWidth,this->ViewportHeight, 0, GL_RGBA,
+                 GL_UNSIGNED_BYTE, 0);
 
     // Have to be set before a call to UpdateTranslucentPolygonalGeometry()
     // because UpdateTranslucentPolygonalGeometry() will eventually call
     // vtkOpenGLActor::Render() that uses this flag.
     this->LastRenderingUsedDepthPeeling=1;
 
-    glTexImage2D(vtkgl::TEXTURE_RECTANGLE_ARB, 0, GL_RGBA8,
-                 this->ViewportWidth,this->ViewportHeight, 0, GL_RGBA,
-                 GL_UNSIGNED_BYTE, 0);
     // Grab the rgba-buffer
     glCopyTexSubImage2D(vtkgl::TEXTURE_RECTANGLE_ARB, 0, 0, 0, this->ViewportX,
                         this->ViewportY,this->ViewportWidth,
@@ -787,6 +846,8 @@ void vtkOpenGLRenderer::DeviceRenderTranslucentPolygonalGeometry()
     glDeleteTextures(1,&opaqueLayerRgba);
     glDeleteTextures(1,&opaqueLayerZ);
     }
+
+  vtkOpenGLCheckErrorMacro("failed after DeviceRenderTranslucentPolygonalGeometry");
 }
 
 // ----------------------------------------------------------------------------
@@ -795,6 +856,7 @@ void vtkOpenGLRenderer::DeviceRenderTranslucentPolygonalGeometry()
 void vtkOpenGLRenderer::CheckCompilation(
   unsigned int fragmentShader)
 {
+  vtkOpenGLClearErrorMacro();
   GLuint fs=static_cast<GLuint>(fragmentShader);
   GLint params;
   vtkgl::GetShaderiv(fs,vtkgl::COMPILE_STATUS,&params);
@@ -819,6 +881,7 @@ void vtkOpenGLRenderer::CheckCompilation(
       vtkErrorMacro(<<"no log");
       }
     }
+  vtkOpenGLCheckErrorMacro("failed after CheckCompilation");
 }
 
 // ----------------------------------------------------------------------------
@@ -831,6 +894,8 @@ int vtkOpenGLRenderer::RenderPeel(int layer)
 {
   assert("pre: positive_layer" && layer>=0);
 
+  vtkOpenGLClearErrorMacro();
+
   GLbitfield mask=GL_COLOR_BUFFER_BIT;
   if(layer>0)
     {
@@ -935,7 +1000,6 @@ int vtkOpenGLRenderer::RenderPeel(int layer)
     vtkgl::UseProgram(0);
     }
 
-  GLint width;
   vtkgl::ActiveTexture(vtkgl::TEXTURE1 );
   if(layer==0)
     {
@@ -961,19 +1025,8 @@ int vtkOpenGLRenderer::RenderPeel(int layer)
                       vtkgl::TEXTURE_COMPARE_FUNC,
                       GL_GREATER);
 
-      // Allocate memory
-      glTexImage2D(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB,0,this->DepthFormat,
-                   this->ViewportWidth,this->ViewportHeight,
-                   0,GL_DEPTH_COMPONENT, GL_UNSIGNED_INT, 0);
-      glGetTexLevelParameteriv(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB,0,
-                               GL_TEXTURE_WIDTH,&width);
-      if(width==0)
-        {
-        // not enough GPU RAM. Use alpha blending technique instead
-        glDeleteTextures(1,&transparentLayerZ);
-        this->TransparentLayerZ=0;
-        return 0;
-        }
+      // Allocate memory, note: verified GPU support for this
+      // texture format above.
       glTexImage2D(vtkgl::TEXTURE_RECTANGLE_ARB,0,this->DepthFormat,
                    this->ViewportWidth,this->ViewportHeight, 0,
                    GL_DEPTH_COMPONENT, GL_UNSIGNED_INT, 0);
@@ -1000,19 +1053,8 @@ int vtkOpenGLRenderer::RenderPeel(int layer)
     glTexParameteri(vtkgl::TEXTURE_RECTANGLE_ARB,GL_TEXTURE_MAG_FILTER,
                     GL_NEAREST);
 
-    // Allocate memory
-    glTexImage2D(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB, 0, GL_RGBA8,
-                 this->ViewportWidth,this->ViewportHeight,
-                 0,GL_RGBA, GL_UNSIGNED_BYTE, 0);
-    glGetTexLevelParameteriv(vtkgl::PROXY_TEXTURE_RECTANGLE_ARB,0,
-                             GL_TEXTURE_WIDTH,&width);
-    if(width==0)
-      {
-      // not enough GPU RAM. Do alpha blending technique instead
-      glDeleteTextures(1,&rgba);
-      return 0;
-      }
-
+    // Allocate memory, note: verified GPU support for this
+    // texture format above.
     glTexImage2D(vtkgl::TEXTURE_RECTANGLE_ARB, 0, GL_RGBA8,
                  this->ViewportWidth,this->ViewportHeight, 0, GL_RGBA,
                  GL_UNSIGNED_BYTE, 0);
@@ -1023,10 +1065,12 @@ int vtkOpenGLRenderer::RenderPeel(int layer)
                         this->ViewportHeight);
     this->LayerList->List.push_back(rgba);
 
+    vtkOpenGLCheckErrorMacro("failed after RenderPeel");
     return 1;
     }
   else
     {
+    vtkOpenGLCheckErrorMacro("failed after RenderPeel");
     return 0;
     }
 }
@@ -1056,6 +1100,8 @@ void vtkOpenGLRenderer::PrintSelf(ostream& os, vtkIndent indent)
 
 void vtkOpenGLRenderer::Clear(void)
 {
+  vtkOpenGLClearErrorMacro();
+
   GLbitfield  clear_mask = 0;
 
   if (! this->Transparent())
@@ -1153,10 +1199,12 @@ void vtkOpenGLRenderer::Clear(void)
     glPopMatrix();
     glPopAttrib();
     }
+  vtkOpenGLCheckErrorMacro("failed after Clear");
 }
 
 void vtkOpenGLRenderer::StartPick(unsigned int pickFromSize)
 {
+  vtkOpenGLClearErrorMacro();
 
   int bufferSize = pickFromSize * 4;
 
@@ -1178,6 +1226,8 @@ void vtkOpenGLRenderer::StartPick(unsigned int pickFromSize)
   // initialize the pick names and add a 0 name, for no pick
   glInitNames();
   glPushName(0);
+
+  vtkOpenGLCheckErrorMacro("failed after StartPick");
 }
 
 void vtkOpenGLRenderer::ReleaseGraphicsResources(vtkWindow *w)
@@ -1191,6 +1241,7 @@ void vtkOpenGLRenderer::ReleaseGraphicsResources(vtkWindow *w)
 void vtkOpenGLRenderer::UpdatePickId()
 {
   glLoadName(this->CurrentPickId++);
+  vtkOpenGLCheckErrorMacro("failed after UpdatePick");
 }
 
 
@@ -1201,6 +1252,7 @@ void vtkOpenGLRenderer::DevicePickRender()
   // other windows might get rendered since the last time
   // a MakeCurrent was called.
   this->RenderWindow->MakeCurrent();
+  vtkOpenGLClearErrorMacro();
 
   // standard render method
   this->ClearLights();
@@ -1217,6 +1269,7 @@ void vtkOpenGLRenderer::DevicePickRender()
   // clean up the model view matrix set up by the camera
   glMatrixMode(GL_MODELVIEW);
   glPopMatrix();
+  vtkOpenGLCheckErrorMacro("failed after DevicePickRender");
 }
 
 
diff --git a/Rendering/OpenGL/vtkOpenGLRepresentationPainter.cxx b/Rendering/OpenGL/vtkOpenGLRepresentationPainter.cxx
index fc0b78e..c8ed615 100644
--- a/Rendering/OpenGL/vtkOpenGLRepresentationPainter.cxx
+++ b/Rendering/OpenGL/vtkOpenGLRepresentationPainter.cxx
@@ -23,6 +23,7 @@
 #include "vtkProperty.h"
 
 #include "vtkOpenGL.h"
+#include "vtkOpenGLError.h"
 
 vtkStandardNewMacro(vtkOpenGLRepresentationPainter);
 
@@ -42,6 +43,8 @@ void vtkOpenGLRepresentationPainter::RenderInternal(vtkRenderer *renderer,
                                                     unsigned long typeflags,
                                                     bool forceCompileOnly)
 {
+  vtkOpenGLClearErrorMacro();
+
   vtkProperty* prop = actor->GetProperty();
   int rep = prop->GetRepresentation();
   int reset_needed = 0;
@@ -122,6 +125,7 @@ void vtkOpenGLRepresentationPainter::RenderInternal(vtkRenderer *renderer,
 
     glPopAttrib(); //(GL_CURRENT_BIT|GL_LIGHTING|GL_ENABLE_BIT)
     }
+  vtkOpenGLCheckErrorMacro("failed after RenderInternal");
 }
 //-----------------------------------------------------------------------------
 void vtkOpenGLRepresentationPainter::PrintSelf(ostream& os, vtkIndent indent)
diff --git a/Rendering/OpenGL/vtkOpenGLScalarsToColorsPainter.cxx b/Rendering/OpenGL/vtkOpenGLScalarsToColorsPainter.cxx
index 1762fff..4acc6e5 100644
--- a/Rendering/OpenGL/vtkOpenGLScalarsToColorsPainter.cxx
+++ b/Rendering/OpenGL/vtkOpenGLScalarsToColorsPainter.cxx
@@ -31,6 +31,7 @@
 #include "vtkRenderWindow.h"
 
 #include "vtkOpenGL.h"
+#include "vtkOpenGLError.h"
 #include "vtkOpenGLExtensionManager.h"
 
 #include "vtkgl.h" // vtkgl namespace
@@ -103,6 +104,8 @@ void vtkOpenGLScalarsToColorsPainter::RenderInternal(vtkRenderer *renderer,
                                                      unsigned long typeflags,
                                                      bool forceCompileOnly)
 {
+  vtkOpenGLClearErrorMacro();
+
   // If we have not yet set the alpha bit planes, do it based on the
   // render window so we're not querying GL in the middle of render.
   if (this->AlphaBitPlanes < 0)
@@ -231,11 +234,18 @@ void vtkOpenGLScalarsToColorsPainter::RenderInternal(vtkRenderer *renderer,
 
   this->Superclass::RenderInternal(renderer, actor, typeflags, forceCompileOnly);
 
+  if (this->InterpolateScalarsBeforeMapping && this->SupportsSeparateSpecularColor)
+    {
+    glLightModeli(vtkgl::LIGHT_MODEL_COLOR_CONTROL, vtkgl::SINGLE_COLOR);
+    glDisable(vtkgl::COLOR_SUM);
+    }
+
   if (pre_multiplied_by_alpha || this->InterpolateScalarsBeforeMapping)
     {
     // restore the blend function & lights
     glPopAttrib();
     }
+  vtkOpenGLCheckErrorMacro("failed after RenderInternal");
 }
 
 //-----------------------------------------------------------------------------
diff --git a/Rendering/OpenGL/vtkOpenGLState.cxx b/Rendering/OpenGL/vtkOpenGLState.cxx
index 690e565..8c23246 100644
--- a/Rendering/OpenGL/vtkOpenGLState.cxx
+++ b/Rendering/OpenGL/vtkOpenGLState.cxx
@@ -60,6 +60,7 @@ void vtkOpenGLState::Update()
   GLint ivalues[4];
 
   this->ErrorCode = glGetError(); // this change the state..
+
   // Texture environment
   // Has to be restored to this value.
   glGetIntegerv(vtkgl::ACTIVE_TEXTURE, ivalues);
diff --git a/Rendering/OpenGL/vtkOpenGLTexture.cxx b/Rendering/OpenGL/vtkOpenGLTexture.cxx
index 6e34297..1a22953 100644
--- a/Rendering/OpenGL/vtkOpenGLTexture.cxx
+++ b/Rendering/OpenGL/vtkOpenGLTexture.cxx
@@ -27,14 +27,16 @@
 #include "vtkTransform.h"
 #include "vtkPixelBufferObject.h"
 #include "vtkOpenGL.h"
+#include "vtkOpenGLError.h"
 #include "vtkgl.h" // vtkgl namespace
 
 #include <math.h>
 
+
+// ----------------------------------------------------------------------------
 vtkStandardNewMacro(vtkOpenGLTexture);
 
 // ----------------------------------------------------------------------------
-// Initializes an instance, generates a unique index.
 vtkOpenGLTexture::vtkOpenGLTexture()
 {
   this->Index = 0;
@@ -66,11 +68,13 @@ void vtkOpenGLTexture::Initialize(vtkRenderer* vtkNotUsed(ren))
 
 // ----------------------------------------------------------------------------
 // Release the graphics resources used by this texture.
-void vtkOpenGLTexture::ReleaseGraphicsResources(vtkWindow *renWin)
+void vtkOpenGLTexture::ReleaseGraphicsResources(vtkWindow *win)
 {
-  if (this->Index && renWin && renWin->GetMapped())
+  if (this->Index && win && win->GetMapped())
     {
-    static_cast<vtkRenderWindow *>(renWin)->MakeCurrent();
+    vtkRenderWindow *renWin = dynamic_cast<vtkRenderWindow *>(win);
+    renWin->MakeCurrent();
+    vtkOpenGLClearErrorMacro();
 #ifdef GL_VERSION_1_1
     // free any textures
     if (glIsTexture(static_cast<GLuint>(this->Index)))
@@ -87,7 +91,9 @@ void vtkOpenGLTexture::ReleaseGraphicsResources(vtkWindow *renWin)
       glDeleteLists(this->Index,1);
       }
 #endif
+    vtkOpenGLCheckErrorMacro("failed after ReleaseGraphicsResources");
     }
+
   this->Index = 0;
   this->RenderWindow = NULL;
   this->CheckedHardwareSupport = false;
@@ -109,7 +115,6 @@ void vtkOpenGLTexture::Load(vtkRenderer *ren)
   vtkImageData *input = this->GetInput();
 
   this->Initialize(ren);
-
   // Need to reload the texture.
   // There used to be a check on the render window's mtime, but
   // this is too broad of a check (e.g. it would cause all textures
@@ -119,6 +124,8 @@ void vtkOpenGLTexture::Load(vtkRenderer *ren)
   vtkOpenGLRenderWindow* renWin =
     static_cast<vtkOpenGLRenderWindow*>(ren->GetRenderWindow());
 
+  vtkOpenGLClearErrorMacro();
+
   if (this->BlendingMode != VTK_TEXTURE_BLENDING_MODE_NONE
      && vtkgl::ActiveTexture)
     {
@@ -168,6 +175,7 @@ void vtkOpenGLTexture::Load(vtkRenderer *ren)
         glTexEnvf(GL_TEXTURE_ENV, vtkgl::COMBINE_ALPHA, GL_ADD);
         }
       }
+    vtkOpenGLCheckErrorMacro("failed at glTexEnvf");
     }
 
   if (this->GetMTime() > this->LoadTime.GetMTime() ||
@@ -193,6 +201,15 @@ void vtkOpenGLTexture::Load(vtkRenderer *ren)
       return;
       }
 
+    // free any old display lists (from the old context)
+    // make the new context current before we mess with opengl
+    if (this->RenderWindow)
+      {
+      this->ReleaseGraphicsResources(this->RenderWindow);
+      }
+    this->RenderWindow = renWin;
+    this->RenderWindow->MakeCurrent();
+
     // get some info
     input->GetDimensions(size);
 
@@ -247,7 +264,6 @@ void vtkOpenGLTexture::Load(vtkRenderer *ren)
         }
       }
 
-
     if (!this->CheckedHardwareSupport)
       {
       vtkOpenGLExtensionManager *m = renWin->GetExtensionManager();
@@ -262,6 +278,7 @@ void vtkOpenGLTexture::Load(vtkRenderer *ren)
 
     GLint maxDimGL;
     glGetIntegerv(GL_MAX_TEXTURE_SIZE, &maxDimGL);
+    vtkOpenGLCheckErrorMacro("failed at glGetIntegerv");
     // if larger than permitted by the graphics library then must resample
     bool resampleNeeded=xsize > maxDimGL || ysize > maxDimGL;
     if(resampleNeeded)
@@ -288,28 +305,22 @@ void vtkOpenGLTexture::Load(vtkRenderer *ren)
         resultData = dataPtr;
         }
 
-    // free any old display lists (from the old context)
-    if (this->RenderWindow)
-      {
-      this->ReleaseGraphicsResources(this->RenderWindow);
-      }
-
-     this->RenderWindow = ren->GetRenderWindow();
-
-    // make the new context current before we mess with opengl
-    this->RenderWindow->MakeCurrent();
 
     // define a display list for this texture
     // get a unique display list id
 
 #ifdef GL_VERSION_1_1
     glGenTextures(1, &tempIndex);
+    vtkOpenGLCheckErrorMacro("failed at glGenTextures");
     this->Index = static_cast<long>(tempIndex);
     glBindTexture(GL_TEXTURE_2D, this->Index);
+    vtkOpenGLCheckErrorMacro("failed at glBindTexture");
 #else
     this->Index = glGenLists(1);
     glDeleteLists(static_cast<GLuint>(this->Index), static_cast<GLsizei>(0));
+    vtkOpenGLCheckErrorMacro("failed at glDeleteLists");
     glNewList(static_cast<GLuint>(this->Index), GL_COMPILE);
+    vtkOpenGLCheckErrorMacro("failed at glNewList");
 #endif
     //seg fault protection for those wackos that don't use an
     //opengl render window
@@ -321,10 +332,8 @@ void vtkOpenGLTexture::Load(vtkRenderer *ren)
 
     if (this->Interpolate)
       {
-      glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
-                      GL_LINEAR);
-      glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER,
-                      GL_LINEAR);
+      glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+      glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
       }
     else
       {
@@ -343,10 +352,8 @@ void vtkOpenGLTexture::Load(vtkRenderer *ren)
            (manager->ExtensionSupported("GL_VERSION_1_2") ||
             manager->ExtensionSupported("GL_EXT_texture_edge_clamp")))
         {
-        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S,
-                        vtkgl::CLAMP_TO_EDGE);
-        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T,
-                        vtkgl::CLAMP_TO_EDGE);
+        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, vtkgl::CLAMP_TO_EDGE);
+        glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, vtkgl::CLAMP_TO_EDGE);
         }
       else
         {
@@ -354,6 +361,7 @@ void vtkOpenGLTexture::Load(vtkRenderer *ren)
         glTexParameterf(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
         }
       }
+    vtkOpenGLCheckErrorMacro("failed at glTexParameterf");
     int internalFormat = bytesPerPixel;
     switch (bytesPerPixel)
       {
@@ -398,13 +406,13 @@ void vtkOpenGLTexture::Load(vtkRenderer *ren)
       vtkIdType increments[2];
       increments[0] = 0;
       increments[1] = 0;
-      this->PBO->Upload2D(VTK_UNSIGNED_CHAR, resultData, dims, bytesPerPixel,
-        increments);
+      this->PBO->Upload2D(VTK_UNSIGNED_CHAR, resultData, dims, bytesPerPixel, increments);
       // non-blocking call
       this->PBO->Bind(vtkPixelBufferObject::UNPACKED_BUFFER);
       glTexImage2D(GL_TEXTURE_2D, 0, internalFormat,
                    xsize, ysize, 0, format,
                    GL_UNSIGNED_BYTE, 0);
+      vtkOpenGLCheckErrorMacro("failed at glTexImage2D");
       this->PBO->UnBind();
       }
     else
@@ -413,10 +421,11 @@ void vtkOpenGLTexture::Load(vtkRenderer *ren)
       glTexImage2D(GL_TEXTURE_2D, 0 , internalFormat,
                    xsize, ysize, 0, format, GL_UNSIGNED_BYTE,
                    static_cast<const GLvoid *>(resultData));
-
+      vtkOpenGLCheckErrorMacro("failed at glTexImage2D");
       }
 #ifndef GL_VERSION_1_1
     glEndList ();
+    vtkOpenGLCheckErrorMacro("failed at glEndList");
 #endif
     // modify the load time to the current time
     this->LoadTime.Modified();
@@ -432,8 +441,10 @@ void vtkOpenGLTexture::Load(vtkRenderer *ren)
   // execute the display list that uses creates the texture
 #ifdef GL_VERSION_1_1
   glBindTexture(GL_TEXTURE_2D, this->Index);
+  vtkOpenGLCheckErrorMacro("failed at glBindTexture");
 #else
   glCallList(this->Index);
+  vtkOpenGLCheckErrorMacro("failed at glCallList");
 #endif
 
   // don't accept fragments if they have zero opacity. this will stop the
@@ -479,15 +490,19 @@ void vtkOpenGLTexture::Load(vtkRenderer *ren)
     vtkgl::Uniform1i(uUseTexture, 1);
     vtkgl::Uniform1i(uTexture, 0); // active texture 0
     }
+
+  vtkOpenGLCheckErrorMacro("failed after Load");
 }
 
 // ----------------------------------------------------------------------------
-void vtkOpenGLTexture::PostRender(vtkRenderer *vtkNotUsed(ren))
+void vtkOpenGLTexture::PostRender(vtkRenderer *ren)
 {
+  (void)ren;
   if (this->GetInput() && this->PremultipliedAlpha)
     {
     // restore the blend function
     glPopAttrib();
+    vtkOpenGLCheckErrorMacro("failed after PostRender");
     }
 }
 
@@ -522,6 +537,8 @@ unsigned char *vtkOpenGLTexture::ResampleToPowerOfTwo(int &xs,
   unsigned char *tptr, *p, *p1, *p2, *p3, *p4;
   int jOffset, iIdx, jIdx;
   double pcoords[3], rm, sm, w0, w1, w2, w3;
+  int yInIncr = xs;
+  int xInIncr = 1;
 
   int xsize = FindPowerOfTwo(xs);
   int ysize = FindPowerOfTwo(ys);
@@ -550,7 +567,15 @@ unsigned char *vtkOpenGLTexture::ResampleToPowerOfTwo(int &xs,
     jIdx = static_cast<int>(pcoords[1]);
     if (jIdx >= (ys-1)) //make sure to interpolate correctly at edge
       {
-      jIdx = ys - 2;
+      if (ys == 1)
+        {
+        jIdx = 0;
+        yInIncr = 0;
+        }
+      else
+        {
+        jIdx = ys - 2;
+        }
       pcoords[1] = 1.0;
       }
     else
@@ -566,7 +591,15 @@ unsigned char *vtkOpenGLTexture::ResampleToPowerOfTwo(int &xs,
       iIdx = static_cast<int>(pcoords[0]);
       if (iIdx >= (xs-1))
         {
-        iIdx = xs - 2;
+        if (xs == 1)
+          {
+          iIdx = 0;
+          xInIncr = 0;
+          }
+        else
+          {
+          iIdx = xs - 2;
+          }
         pcoords[0] = 1.0;
         }
       else
@@ -577,9 +610,9 @@ unsigned char *vtkOpenGLTexture::ResampleToPowerOfTwo(int &xs,
 
       // Get pointers to 4 surrounding pixels
       p1 = dptr + bpp*(iIdx + jOffset);
-      p2 = p1 + bpp;
-      p3 = p1 + bpp*xs;
-      p4 = p3 + bpp;
+      p2 = p1 + bpp*xInIncr;
+      p3 = p1 + bpp*yInIncr;
+      p4 = p3 + bpp*xInIncr;
 
       // Compute interpolation weights interpolate components
       w0 = rm*sm;
diff --git a/Rendering/OpenGL/vtkOverlayPass.cxx b/Rendering/OpenGL/vtkOverlayPass.cxx
index 83bc4f5..483f906 100644
--- a/Rendering/OpenGL/vtkOverlayPass.cxx
+++ b/Rendering/OpenGL/vtkOverlayPass.cxx
@@ -15,7 +15,7 @@
 
 #include "vtkOverlayPass.h"
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkOverlayPass);
 
diff --git a/Rendering/OpenGL/vtkPixelBufferObject.cxx b/Rendering/OpenGL/vtkPixelBufferObject.cxx
index 7562236..8026455 100644
--- a/Rendering/OpenGL/vtkPixelBufferObject.cxx
+++ b/Rendering/OpenGL/vtkPixelBufferObject.cxx
@@ -21,6 +21,7 @@
 
 #include "vtkgl.h"
 #include "vtkOpenGL.h"
+#include "vtkOpenGLError.h"
 
 //#define VTK_PBO_DEBUG
 //#define VTK_PBO_TIMING
@@ -29,6 +30,8 @@
 #include "vtkTimerLog.h"
 #endif
 
+#include <cassert>
+
 // Mapping from Usage values to OpenGL values.
 
 static const GLenum OpenGLBufferObjectUsage[9]=
@@ -57,31 +60,74 @@ static const char *BufferObjectUsageAsString[9]=
   "DynamicCopy"
 };
 
+// access modes
+const GLenum OpenGLBufferObjectAccess[2]=
+{
+  vtkgl::WRITE_ONLY,
+  vtkgl::READ_ONLY
+};
+
+// targets
+const GLenum OpenGLBufferObjectTarget[2]=
+{
+  vtkgl::PIXEL_UNPACK_BUFFER_ARB,
+  vtkgl::PIXEL_PACK_BUFFER_ARB
+};
+
+
 #ifdef  VTK_PBO_DEBUG
 #include <pthread.h> // for debugging with MPI, pthread_self()
 #endif
 
+// converting double to float behind the
+// scene so we need sizeof(double)==4
+template< class T >
+class vtksizeof
+{
+public:
+  static int GetSize() { return sizeof(T); }
+};
+
+template<>
+class vtksizeof< double >
+{
+public:
+  static int GetSize() { return sizeof(float); }
+};
+
+static int vtkGetSize(int type)
+{
+  switch (type)
+    {
+    vtkTemplateMacro(
+      return ::vtksizeof<VTK_TT>::GetSize();
+      );
+    }
+  return 0;
+}
+
+//----------------------------------------------------------------------------
 vtkStandardNewMacro(vtkPixelBufferObject);
+
 //----------------------------------------------------------------------------
 vtkPixelBufferObject::vtkPixelBufferObject()
 {
   this->Handle = 0;
-  this->Context = 0;
+  this->Context = NULL;
   this->BufferTarget = 0;
-  this->Size=0;
-  this->Type=VTK_UNSIGNED_CHAR;
-  this->Usage=StaticDraw;
+  this->Components = 0;
+  this->Size = 0;
+  this->Type = VTK_UNSIGNED_CHAR;
+  this->Usage = StaticDraw;
 }
 
 //----------------------------------------------------------------------------
 vtkPixelBufferObject::~vtkPixelBufferObject()
 {
-  this->SetContext(0);
+  this->DestroyBuffer();
 }
 
 //----------------------------------------------------------------------------
-// Description:
-// Returns if the context supports the required extensions.
 bool vtkPixelBufferObject::IsSupported(vtkRenderWindow* win)
 {
   vtkOpenGLRenderWindow* renWin = vtkOpenGLRenderWindow::SafeDownCast(win);
@@ -103,9 +149,15 @@ bool vtkPixelBufferObject::IsSupported(vtkRenderWindow* win)
 }
 
 //----------------------------------------------------------------------------
-bool vtkPixelBufferObject::LoadRequiredExtensions(
-  vtkOpenGLExtensionManager* mgr)
+bool vtkPixelBufferObject::LoadRequiredExtensions(vtkRenderWindow *renWin)
 {
+  vtkOpenGLRenderWindow* context =
+    vtkOpenGLRenderWindow::SafeDownCast(renWin);
+
+  if ( !context ) return false;
+
+  vtkOpenGLExtensionManager* mgr = context->GetExtensionManager();
+
   bool gl15=mgr->ExtensionSupported("GL_VERSION_1_5")==1;
   bool gl21=mgr->ExtensionSupported("GL_VERSION_2_1")==1;
 
@@ -135,26 +187,32 @@ bool vtkPixelBufferObject::LoadRequiredExtensions(
 //----------------------------------------------------------------------------
 void vtkPixelBufferObject::SetContext(vtkRenderWindow* renWin)
 {
-  if (this->Context == renWin)
+  // avoid pointless re-assignment
+  if (this->Context==renWin)
     {
     return;
     }
-
+  // free resource allocations
   this->DestroyBuffer();
-
-  vtkOpenGLRenderWindow* openGLRenWin =
+  this->Context = NULL;
+  this->Modified();
+  // all done if assigned null
+  if (!renWin)
+    {
+    return;
+    }
+  // check for support
+  vtkOpenGLRenderWindow* context =
     vtkOpenGLRenderWindow::SafeDownCast(renWin);
-  this->Context = openGLRenWin;
-  if (openGLRenWin)
+  if ( !context
+    || !this->LoadRequiredExtensions(renWin) )
     {
-    if (!this->LoadRequiredExtensions(openGLRenWin->GetExtensionManager()))
-      {
-      this->Context = 0;
-      vtkErrorMacro("Required OpenGL extensions not supported by the context.");
-      }
+    vtkErrorMacro("Required OpenGL extensions not supported by the context.");
+    return;
     }
-
-  this->Modified();
+  // update context
+  this->Context = renWin;
+  this->Context->MakeCurrent();
 }
 
 //----------------------------------------------------------------------------
@@ -164,13 +222,15 @@ vtkRenderWindow* vtkPixelBufferObject::GetContext()
 }
 
 //----------------------------------------------------------------------------
+void vtkPixelBufferObject::SetSize(unsigned int nTups, int nComps)
+{
+  this->Size = nTups*nComps;
+}
+
+//----------------------------------------------------------------------------
 void vtkPixelBufferObject::Bind(BufferType type)
 {
-  if (!this->Context)
-    {
-    vtkErrorMacro("No context specified. Cannot Bind.");
-    return;
-    }
+  assert(this->Context);
 
   this->CreateBuffer();
 
@@ -192,16 +252,17 @@ void vtkPixelBufferObject::Bind(BufferType type)
     }
   this->BufferTarget = target;
   vtkgl::BindBuffer(static_cast<GLenum>(this->BufferTarget), this->Handle);
-  vtkGraphicErrorMacro(this->Context,"after BindBuffer");
+  vtkOpenGLCheckErrorMacro("failed at glBindBuffer");
 }
 
 //----------------------------------------------------------------------------
 void vtkPixelBufferObject::UnBind()
 {
-  if (this->Context && this->Handle && this->BufferTarget)
+  assert(this->Context);
+  if (this->Handle && this->BufferTarget)
     {
     vtkgl::BindBuffer(this->BufferTarget, 0);
-    vtkGraphicErrorMacro(this->Context,"after BindBuffer");
+    vtkOpenGLCheckErrorMacro("failed at glBindBuffer(0)");
     this->BufferTarget = 0;
     }
 }
@@ -209,12 +270,11 @@ void vtkPixelBufferObject::UnBind()
 //----------------------------------------------------------------------------
 void vtkPixelBufferObject::CreateBuffer()
 {
-  this->Context->MakeCurrent();
   if (!this->Handle)
     {
     GLuint ioBuf;
     vtkgl::GenBuffers(1, &ioBuf);
-    vtkGraphicErrorMacro(this->Context,"after GenBuffers");
+    vtkOpenGLCheckErrorMacro("failed at glGenBuffers");
     this->Handle = ioBuf;
     }
 }
@@ -222,39 +282,20 @@ void vtkPixelBufferObject::CreateBuffer()
 //----------------------------------------------------------------------------
 void vtkPixelBufferObject::DestroyBuffer()
 {
+  // because we don't hold a reference to the render
+  // context we don't have any control on when it is
+  // destroyed. In fact it may be destroyed before
+  // we are(eg smart pointers), in which case we should
+  // do nothing.
   if (this->Context && this->Handle)
     {
     GLuint ioBuf = static_cast<GLuint>(this->Handle);
     vtkgl::DeleteBuffers(1, &ioBuf);
+    vtkOpenGLCheckErrorMacro("failed at glDeleteBuffers");
     }
   this->Handle = 0;
 }
 
-template< class T >
-class vtksizeof
-{
-public:
-  static int GetSize() { return sizeof(T); }
-};
-
-template<>
-class vtksizeof< double >
-{
-public:
-  static int GetSize() { return sizeof(float); }
-};
-
-static int vtkGetSize(int type)
-{
-  switch (type)
-    {
-    vtkTemplateMacro(
-      return ::vtksizeof<VTK_TT>::GetSize();
-      );
-    }
-  return 0;
-}
-
 //----------------------------------------------------------------------------
 template <class T>
 class vtkUpload3D
@@ -390,6 +431,106 @@ public:
 };
 
 //----------------------------------------------------------------------------
+void *vtkPixelBufferObject::MapBuffer(
+        unsigned int nbytes,
+        BufferType mode)
+{
+  // from vtk to opengl enums
+  GLenum target = OpenGLBufferObjectTarget[mode];
+  GLenum access = OpenGLBufferObjectAccess[mode];
+  GLenum usage = OpenGLBufferObjectUsage[mode];
+  GLuint size = static_cast<GLuint>(nbytes);
+  GLuint ioBuf = static_cast<GLuint>(this->Handle);
+
+  if (!ioBuf)
+    {
+    vtkgl::GenBuffers(1, &ioBuf);
+    vtkOpenGLCheckErrorMacro("failed at glGenBuffers");
+    this->Handle = static_cast<unsigned int>(ioBuf);
+    }
+  this->BufferTarget = 0;
+
+  // pointer to the mapped memory
+  vtkgl::BindBuffer(target, ioBuf);
+  vtkOpenGLCheckErrorMacro("failed at glBindBuffer");
+
+  vtkgl::BufferData(target, size, NULL, usage);
+  vtkOpenGLCheckErrorMacro("failed at glBufferData");
+
+  void *pPBO = vtkgl::MapBuffer(target, access);
+  vtkOpenGLCheckErrorMacro("failed at glMapBuffer");
+
+  vtkgl::BindBuffer(target, 0);
+
+  return pPBO;
+}
+
+//----------------------------------------------------------------------------
+void *vtkPixelBufferObject::MapBuffer(
+        int type,
+        unsigned int numtuples,
+        int comps,
+        BufferType mode)
+{
+  // from vtk to opengl enums
+  this->Size = numtuples*comps;
+  this->Type = type;
+  this->Components = comps;
+  unsigned int size = ::vtkGetSize(type)*this->Size;
+
+  return this->MapBuffer(size, mode);
+}
+
+//----------------------------------------------------------------------------
+void *vtkPixelBufferObject::MapBuffer(BufferType mode)
+{
+  // from vtk to opengl enum
+  GLuint ioBuf = static_cast<GLuint>(this->Handle);
+  if (!ioBuf)
+    {
+    vtkErrorMacro("Uninitialized object");
+    return NULL;
+    }
+  GLenum target = OpenGLBufferObjectTarget[mode];
+  GLenum access = OpenGLBufferObjectAccess[mode];
+
+  // pointer to the mnapped memory
+  vtkgl::BindBuffer(target, ioBuf);
+  vtkOpenGLCheckErrorMacro("failed at glBindBuffer");
+
+  void *pPBO = vtkgl::MapBuffer(target, access);
+  vtkOpenGLCheckErrorMacro("failed at glMapBuffer");
+
+  vtkgl::BindBuffer(target, 0);
+  vtkOpenGLCheckErrorMacro("failed at glBindBuffer(0)");
+
+  this->BufferTarget = 0;
+
+  return pPBO;
+}
+
+//----------------------------------------------------------------------------
+void vtkPixelBufferObject::UnmapBuffer(BufferType mode)
+{
+  GLuint ioBuf = static_cast<GLuint>(this->Handle);
+  if (!ioBuf)
+    {
+    vtkErrorMacro("Uninitialized object");
+    return;
+    }
+  GLenum target = OpenGLBufferObjectTarget[mode];
+
+  vtkgl::BindBuffer(target, ioBuf);
+  vtkOpenGLCheckErrorMacro("failed at glBindBuffer");
+
+  vtkgl::UnmapBuffer(target);
+  vtkOpenGLCheckErrorMacro("failed at glUnmapBuffer");
+
+  vtkgl::BindBuffer(target, 0);
+  vtkOpenGLCheckErrorMacro("failed at glBindBuffer(0)");
+}
+
+//----------------------------------------------------------------------------
 bool vtkPixelBufferObject::Upload3D(
   int type, void* data,
   unsigned int dims[3],
@@ -402,16 +543,9 @@ bool vtkPixelBufferObject::Upload3D(
   vtkTimerLog *timer=vtkTimerLog::New();
   timer->StartTimer();
 #endif
-
-  if (!this->Context)
-    {
-    vtkErrorMacro("No context specified. Cannot upload data.");
-    return false;
-    }
+  assert(this->Context);
 
   this->CreateBuffer();
-
-//  this->Bind(vtkPixelBufferObject::PACKED_BUFFER);
   this->Bind(vtkPixelBufferObject::UNPACKED_BUFFER);
 
   unsigned int size;
@@ -425,6 +559,7 @@ bool vtkPixelBufferObject::Upload3D(
     size = dims[0]*dims[1]*dims[2]*static_cast<unsigned int>(components);
     }
 
+  this->Components = numComponents;
 
   if(data!=0)
     {
@@ -438,7 +573,7 @@ bool vtkPixelBufferObject::Upload3D(
   vtkgl::BufferData(this->BufferTarget,
                     size*static_cast<unsigned int>(::vtkGetSize(type)),
                     NULL,OpenGLBufferObjectUsage[this->Usage]);
-  vtkGraphicErrorMacro(this->Context,"");
+  vtkOpenGLCheckErrorMacro("failed at glBufferData");
   this->Type = type;
   if (this->Type == VTK_DOUBLE)
     {
@@ -446,50 +581,10 @@ bool vtkPixelBufferObject::Upload3D(
     }
   this->Size = size;
 
-#ifdef  VTK_PBO_DEBUG
-  GLint value;
-  glGetIntegerv(vtkgl::PIXEL_UNPACK_BUFFER_BINDING,&value);
-
-  cout << pthread_self() << "this->Handle=" << this->Handle << " pixel unpack buffer=" << value << endl;
-  glGetIntegerv(vtkgl::PIXEL_PACK_BUFFER_BINDING,&value);
-
-  cout << pthread_self() << "this->Handle=" << this->Handle << " pixel pack buffer=" << value << endl;
-
-  vtkgl::GetBufferParameteriv(vtkgl::PIXEL_PACK_BUFFER,vtkgl::BUFFER_MAPPED,&value);
-
-
-  cout << pthread_self() << "this->Handle=" << this->Handle << " packed buffer is";
-  if(value==GL_TRUE)
-    {
-    cout << " mapped." << endl;
-    }
-  else
-    {
-    cout << " not mapped." << endl;
-    }
-
-  vtkgl::GetBufferParameteriv(this->BufferTarget,vtkgl::BUFFER_MAPPED,&value);
-
-  cout << pthread_self() << "this->Handle=" << this->Handle << " buffer target is";
-  if(value==GL_TRUE)
-    {
-    cout << " mapped." << endl;
-    }
-  else
-    {
-    cout << " not mapped." << endl;
-    }
-#endif
   if (data)
     {
-#ifdef  VTK_PBO_DEBUG
-    cout << pthread_self() << "this->Handle=" << this->Handle  << " mapping" << endl;
-#endif
     void* ioMem = vtkgl::MapBuffer(this->BufferTarget, vtkgl::WRITE_ONLY);
-#ifdef  VTK_PBO_DEBUG
-    cout << pthread_self() << "this->Handle=" << this->Handle  << " mapped: ioMem=" << ioMem << endl;
-#endif
-    vtkGraphicErrorMacro(this->Context,"");
+    vtkOpenGLCheckErrorMacro("");
     switch (type)
       {
       vtkTemplateMacro(
@@ -499,19 +594,11 @@ bool vtkPixelBufferObject::Upload3D(
                                         components,componentList);
         );
       default:
-#ifdef  VTK_PBO_DEBUG
-        cout << pthread_self() << "this->Handle=" << this->Handle  << " WTF" << endl;
-#endif
+        vtkErrorMacro("unsupported vtk type");
         return false;
       }
-#ifdef  VTK_PBO_DEBUG
-    cout << pthread_self() << "this->Handle=" << this->Handle  << " unmapping" << endl;
-#endif
     vtkgl::UnmapBuffer(this->BufferTarget);
-#ifdef  VTK_PBO_DEBUG
-    cout << pthread_self() << "this->Handle=" << this->Handle  << " unmapped" << endl;
-#endif
-    vtkGraphicErrorMacro(this->Context,"");
+    vtkOpenGLCheckErrorMacro("failed at glUnmapBuffer");
     }
 
   this->UnBind();
@@ -525,38 +612,64 @@ bool vtkPixelBufferObject::Upload3D(
 }
 
 //----------------------------------------------------------------------------
-// Description:
-// Allocate the memory
-void vtkPixelBufferObject::Allocate(unsigned int size,
-                                    int type)
+void vtkPixelBufferObject::Allocate(
+        int type,
+        unsigned int numtuples,
+        int comps,
+        BufferType mode)
+{
+  assert(this->Context);
+
+  // from vtk to opengl enums
+  this->Size = numtuples*comps;
+  this->Type = type;
+  this->Components = comps;
+  unsigned int size = ::vtkGetSize(type)*this->Size;
+
+  this->Allocate(size, mode);
+}
+
+//----------------------------------------------------------------------------
+void vtkPixelBufferObject::Allocate(
+        unsigned int nbytes,
+        BufferType mode)
 {
-  if(this->Context!=0)
+  assert(this->Context);
+
+  // from vtk to opengl enums
+  GLenum target = OpenGLBufferObjectTarget[mode];
+  GLenum usage = OpenGLBufferObjectUsage[mode];
+  GLuint size = static_cast<GLuint>(nbytes);
+  GLuint ioBuf = static_cast<GLuint>(this->Handle);
+
+  if (!ioBuf)
     {
-    if(this->Size!=size)
-      {
-      this->Size=size;
-      this->Bind(vtkPixelBufferObject::PACKED_BUFFER);
-      vtkgl::BufferData(this->BufferTarget,size,NULL,
-                        OpenGLBufferObjectUsage[this->Usage]);
-      this->UnBind();
-      }
-    this->Type=type;
-    if (this->Type == VTK_DOUBLE)
-      {
-      this->Type = VTK_FLOAT;
-      }
+    vtkgl::GenBuffers(1, &ioBuf);
+    vtkOpenGLCheckErrorMacro("failed at glGenBuffers");
+    this->Handle = static_cast<unsigned int>(ioBuf);
     }
+  this->BufferTarget = 0;
+
+  vtkgl::BindBuffer(target, ioBuf);
+  vtkOpenGLCheckErrorMacro("failed at glBindBuffer");
+
+  vtkgl::BufferData(target, size, NULL, usage);
+  vtkOpenGLCheckErrorMacro("failed at glBufferData");
+
+  vtkgl::BindBuffer(target, 0);
 }
 
+
 //----------------------------------------------------------------------------
 void vtkPixelBufferObject::ReleaseMemory()
 {
-  if (this->Context && this->Handle)
-    {
-    this->Bind(vtkPixelBufferObject::PACKED_BUFFER);
-    vtkgl::BufferData(this->BufferTarget, 0, NULL, vtkgl::STREAM_DRAW);
-    this->Size = 0;
-    }
+  assert(this->Context);
+  assert(this->Handle);
+
+  this->Bind(vtkPixelBufferObject::PACKED_BUFFER);
+  vtkgl::BufferData(this->BufferTarget, 0, NULL, vtkgl::STREAM_DRAW);
+  vtkOpenGLCheckErrorMacro("failed at glBufferData");
+  this->Size = 0;
 }
 
 // ----------------------------------------------------------------------------
@@ -630,7 +743,9 @@ bool vtkPixelBufferObject::Download3D(
   vtkTimerLog *timer=vtkTimerLog::New();
   timer->StartTimer();
 #endif
-  if (!this->Handle || !this->Context)
+  assert(this->Context);
+
+  if (!this->Handle)
     {
     vtkErrorMacro("No GPU data available.");
     return false;
@@ -645,58 +760,9 @@ bool vtkPixelBufferObject::Download3D(
   this->Bind(vtkPixelBufferObject::PACKED_BUFFER);
 
 
-#ifdef  VTK_PBO_DEBUG
-  GLint value;
-  glGetIntegerv(vtkgl::PIXEL_UNPACK_BUFFER_BINDING,&value);
-
-  cout << pthread_self() << "d this->Handle=" << this->Handle << " pixel unpack buffer=" << value << endl;
-  glGetIntegerv(vtkgl::PIXEL_PACK_BUFFER_BINDING,&value);
-
-  cout << pthread_self() << "d this->Handle=" << this->Handle << " pixel pack buffer=" << value << endl;
-
-  vtkgl::GetBufferParameteriv(vtkgl::PIXEL_PACK_BUFFER,vtkgl::BUFFER_MAPPED,&value);
-
-
-  cout << pthread_self() << "d this->Handle=" << this->Handle << " packed buffer is";
-  if(value==GL_TRUE)
-    {
-    cout << " mapped." << endl;
-    }
-  else
-    {
-    cout << " not mapped." << endl;
-    }
-
-  vtkgl::GetBufferParameteriv(this->BufferTarget,vtkgl::BUFFER_MAPPED,&value);
-
-  cout << pthread_self() << "d this->Handle=" << this->Handle << " buffer target is";
-  if(value==GL_TRUE)
-    {
-    cout << " mapped." << endl;
-    }
-  else
-    {
-    cout << " not mapped." << endl;
-    }
-
-
-
-  cout << pthread_self() << "d this->Handle=" << this->Handle  << " mapping" << endl;
-#endif
   void* ioMem = vtkgl::MapBuffer(this->BufferTarget, vtkgl::READ_ONLY);
-#ifdef  VTK_PBO_DEBUG
-  cout << pthread_self() << "d this->Handle=" << this->Handle  << " mapped: ioMem=" << ioMem << endl;
-#endif
-  vtkGraphicErrorMacro(this->Context,"after MapBuffer");
-#ifdef  VTK_PBO_DEBUG
-  cout << pthread_self() << "d type="<< type << endl;
-  cout << pthread_self() << "d this->Type="<< this->Type << endl;
-#endif
+  vtkOpenGLCheckErrorMacro("failed at glMapBuffer");
 
-#ifdef  VTK_PBO_DEBUG
-  cout << pthread_self() << "d2 type="<< type << endl;
-  cout << pthread_self() << "d2 this->Type="<< this->Type << endl;
-#endif
   switch (type)
     {
     vtkTemplateMacro(
@@ -704,19 +770,11 @@ bool vtkPixelBufferObject::Download3D(
       ::vtkDownload3DSpe(this->Type,ioMem,odata,dims,numcomps,increments);
       );
     default:
-#ifdef  VTK_PBO_DEBUG
-      cout << pthread_self() << "d this->Handle=" << this->Handle  << " WTF" << endl;
-#endif
+      vtkErrorMacro("unsupported vtk type");
       return false;
     }
-#ifdef  VTK_PBO_DEBUG
-  cout << pthread_self() << "d this->Handle=" << this->Handle  << " unmapping" << endl;
-#endif
   vtkgl::UnmapBuffer(this->BufferTarget);
-#ifdef VTK_PBO_DEBUG
-  cout << pthread_self() << "d this->Handle=" << this->Handle  << " unmapped" << endl;
-#endif
-  vtkGraphicErrorMacro(this->Context,"after UnmapBuffer");
+  vtkOpenGLCheckErrorMacro("failed at glUnmapBuffer");
   this->UnBind();
 
 #ifdef VTK_PBO_TIMING
@@ -729,7 +787,6 @@ bool vtkPixelBufferObject::Download3D(
   return true;
 }
 
-
 //----------------------------------------------------------------------------
 void vtkPixelBufferObject::PrintSelf(ostream& os, vtkIndent indent)
 {
diff --git a/Rendering/OpenGL/vtkPixelBufferObject.h b/Rendering/OpenGL/vtkPixelBufferObject.h
index 2e365ad..ff1dfe7 100644
--- a/Rendering/OpenGL/vtkPixelBufferObject.h
+++ b/Rendering/OpenGL/vtkPixelBufferObject.h
@@ -14,21 +14,24 @@
 =========================================================================*/
 // .NAME vtkPixelBufferObject - abstracts an OpenGL pixel buffer object.
 // .SECTION Description
-// Provides low-level access to GPU memory. Used to pass raw data to GPU.
-// The data is uploaded into a pixel buffer.
+// Provides low-level access to PBO mapped memory. Used to transfer raw data
+// to/from PBO mapped memory and the application. Once data is transfered to
+// the PBO it can then be transfered to the GPU (eg texture memory). Data may
+// be uploaded from the application into a pixel buffer or downloaded from the
+// pixel bufer to the application. The vtkTextureObject is used to transfer
+// data from/to the PBO to/from texture memory on the GPU.
 // .SECTION See Also
 // OpenGL Pixel Buffer Object Extension Spec (ARB_pixel_buffer_object):
 // http://www.opengl.org/registry/specs/ARB/pixel_buffer_object.txt
 // .SECTION Caveats
-// Since most GPUs don't support double format all double data is converted to
+// Since most PBO mappeds don't support double format all double data is converted to
 // float and then uploaded.
-// DON'T PLAY WITH IT YET.
 
 #ifndef __vtkPixelBufferObject_h
 #define __vtkPixelBufferObject_h
 
-#include "vtkRenderingOpenGLModule.h" // For export macro
 #include "vtkObject.h"
+#include "vtkRenderingOpenGLModule.h" // For export macro
 #include "vtkWeakPointer.h" // needed for vtkWeakPointer.
 
 class vtkRenderWindow;
@@ -88,10 +91,10 @@ public:
   vtkSetMacro(Usage,int);
 
   // Description:
-  // Upload data to GPU.
+  // Upload data to PBO mapped.
   // The input data can be freed after this call.
   // The data ptr is treated as an 1D array with the given number of tuples and
-  // given number of components in each tuple to be copied to the GPU. increment
+  // given number of components in each tuple to be copied to the PBO mapped. increment
   // is the offset added after the last component in each tuple is transferred.
   // Look at the documentation for ContinuousIncrements in vtkImageData for
   // details about how increments are specified.
@@ -110,7 +113,7 @@ public:
     }
 
   // Description:
-  // Update data to GPU sourcing it from a 2D array.
+  // Update data to PBO mapped sourcing it from a 2D array.
   // The input data can be freed after this call.
   // The data ptr is treated as a 2D array with increments indicating how to
   // iterate over the data.
@@ -133,7 +136,7 @@ public:
     }
 
   // Description:
-  // Update data to GPU sourcing it from a 3D array.
+  // Update data to PBO mapped sourcing it from a 3D array.
   // The input data can be freed after this call.
   // The data ptr is treated as a 3D array with increments indicating how to
   // iterate over the data.
@@ -146,15 +149,23 @@ public:
                 int *componentList);
 
   // Description:
-  // Get the type with which the data is loaded into the GPU.
+  // Get the type with which the data is loaded into the PBO mapped.
   // eg. VTK_FLOAT for float32, VTK_CHAR for byte, VTK_UNSIGNED_CHAR for
   // unsigned byte etc.
   vtkGetMacro(Type, int);
+  vtkSetMacro(Type, int);
+
+  // Description:
+  // Get the number of components used to initialize the buffer.
+  vtkGetMacro(Components, int);
+  vtkSetMacro(Components, int);
 
   // Description:
-  // Get the size of the data loaded into the GPU. Size is in the number of
-  // elements of the uploaded Type.
+  // Get the size of the data loaded into the PBO mapped memory. Size is
+  // in the number of elements of the uploaded Type.
   vtkGetMacro(Size, unsigned int);
+  vtkSetMacro(Size, unsigned int);
+  void SetSize(unsigned int nTups, int nComps);
 
   // Description:
   // Get the openGL buffer handle.
@@ -207,7 +218,7 @@ public:
     int numcomps, vtkIdType increments[3]);
 
   // Description:
-  // For wrapping.
+  // Convenience methods for binding.
   void BindToPackedBuffer()
     { this->Bind(PACKED_BUFFER); }
 
@@ -218,12 +229,42 @@ public:
   // Inactivate the buffer.
   void UnBind();
 
+  // Description:
+  // Convenience api for mapping buffers to app address space.
+  // See also MapBuffer.
+  void *MapPackedBuffer()
+    { return this->MapBuffer(PACKED_BUFFER); }
+
+  void *MapPackedBuffer(int type, unsigned int numtuples, int comps)
+    { return this->MapBuffer(type, numtuples, comps, PACKED_BUFFER); }
+
+  void *MapPackedBuffer(unsigned int numbytes)
+    { return this->MapBuffer(numbytes, PACKED_BUFFER); }
+
+  void *MapUnpackedBuffer()
+    { return this->MapBuffer(UNPACKED_BUFFER); }
+
+  void *MapUnpackedBuffer(int type, unsigned int numtuples, int comps)
+    { return this->MapBuffer(type, numtuples, comps, UNPACKED_BUFFER); }
+
+  void *MapUnpackedBuffer(unsigned int numbytes)
+    { return this->MapBuffer(numbytes, UNPACKED_BUFFER); }
+
+  // Description:
+  // Convenience api for unmapping buffers from app address space.
+  // See also UnmapBuffer.
+  void UnmapUnpackedBuffer()
+    { this->UnmapBuffer(UNPACKED_BUFFER); }
+
+  void UnmapPackedBuffer()
+    { this->UnmapBuffer(PACKED_BUFFER); }
+
 //BTX
-  // We can't use just PACKED because this is a cygwin macro defined as
-  // __attribute__((packed))
+  // PACKED_BUFFER for download APP<-PBO
+  // UNPACKED_BUFFER for upload APP->PBO
   enum BufferType{
-    PACKED_BUFFER,
-    UNPACKED_BUFFER
+    UNPACKED_BUFFER=0,
+    PACKED_BUFFER
   };
 
   // Description:
@@ -231,9 +272,32 @@ public:
   void Bind(BufferType buffer);
 
   // Description:
-  // Allocate the memory. size is in number of bytes. type is a VTK type.
-  void Allocate(unsigned int size,
-                int type);
+  // Map the buffer to our addresspace. Returns a pointer to the mapped memory
+  // for read/write access. If type, tuples and components are specified new buffer
+  // data will be allocated, else the current allocation is mapped. When finished
+  // call UnmapBuffer.
+  void *MapBuffer(int type, unsigned int numtuples, int comps, BufferType mode);
+  void *MapBuffer(unsigned int numbytes, BufferType mode);
+  void *MapBuffer(BufferType mode);
+
+  // Description:
+  // Un-map the buffer from our address space, OpenGL can then use/reclaim the
+  // buffer contents.
+  void UnmapBuffer(BufferType mode);
+
+  // Description:
+  // Allocate PACKED/UNPACKED memory to hold numTuples*numComponents of vtkType.
+  void Allocate(
+        int vtkType,
+        unsigned int numtuples,
+        int comps,
+        BufferType mode);
+
+  // Description:
+  // Allocate PACKED/UNPACKED memory to hold nBytes of data.
+  void Allocate(
+        unsigned int nbytes,
+        BufferType mode);
 
   // Description:
   // Release the memory allocated without destroying the PBO handle.
@@ -241,6 +305,7 @@ public:
 
   // Description:
   // Returns if the context supports the required extensions.
+  // Extension will be loaded when the conetxt is set.
   static bool IsSupported(vtkRenderWindow* renWin);
 
 //ETX
@@ -252,7 +317,7 @@ protected:
   // Description:
   // Loads all required OpenGL extensions. Must be called every time a new
   // context is set.
-  bool LoadRequiredExtensions(vtkOpenGLExtensionManager* mgr);
+  bool LoadRequiredExtensions(vtkRenderWindow* renWin);
 
   // Description:
   // Create the pixel buffer object.
@@ -265,6 +330,7 @@ protected:
   int Usage;
   unsigned int BufferTarget; // GLenum
   int Type;
+  int Components;
   unsigned int Size;
   vtkWeakPointer<vtkRenderWindow> Context;
   unsigned int Handle;
@@ -275,5 +341,3 @@ private:
 };
 
 #endif
-
-
diff --git a/Rendering/OpenGL/vtkRenderPass.cxx b/Rendering/OpenGL/vtkRenderPass.cxx
index 7c14f9c..8f75ca6 100644
--- a/Rendering/OpenGL/vtkRenderPass.cxx
+++ b/Rendering/OpenGL/vtkRenderPass.cxx
@@ -14,7 +14,7 @@
 =========================================================================*/
 
 #include "vtkRenderPass.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkRenderer.h"
 #include "vtkOpenGLRenderer.h"
 
diff --git a/Rendering/OpenGL/vtkRenderState.cxx b/Rendering/OpenGL/vtkRenderState.cxx
index 525155a..d0e1788 100644
--- a/Rendering/OpenGL/vtkRenderState.cxx
+++ b/Rendering/OpenGL/vtkRenderState.cxx
@@ -13,7 +13,7 @@
 
 =========================================================================*/
 #include "vtkRenderState.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkRenderer.h"
 #include "vtkFrameBufferObject.h"
 
diff --git a/Rendering/OpenGL/vtkRenderbuffer.cxx b/Rendering/OpenGL/vtkRenderbuffer.cxx
new file mode 100644
index 0000000..40e8b24
--- /dev/null
+++ b/Rendering/OpenGL/vtkRenderbuffer.cxx
@@ -0,0 +1,215 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkRenderbuffer.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkRenderbuffer.h"
+
+#include "vtkObjectFactory.h"
+#include "vtkOpenGLExtensionManager.h"
+#include "vtkOpenGLRenderWindow.h"
+#include "vtkFrameBufferObject2.h" // for LoadRequiredExtension
+#include "vtkgl.h"
+#include "vtkOpenGLError.h"
+
+#include <cassert>
+
+//----------------------------------------------------------------------------
+vtkStandardNewMacro(vtkRenderbuffer);
+
+//----------------------------------------------------------------------------
+vtkRenderbuffer::vtkRenderbuffer()
+{
+  this->Context = NULL;
+  this->Handle = 0U;
+  this->DepthBufferFloat = 0;
+}
+
+//----------------------------------------------------------------------------
+vtkRenderbuffer::~vtkRenderbuffer()
+{
+  this->Free();
+}
+
+//----------------------------------------------------------------------------
+bool vtkRenderbuffer::IsSupported(vtkRenderWindow *win)
+{
+  bool supported = false;
+
+  vtkOpenGLRenderWindow *glwin = dynamic_cast<vtkOpenGLRenderWindow*>(win);
+  if (glwin)
+    {
+    vtkOpenGLExtensionManager *mgr = glwin->GetExtensionManager();
+
+    bool floatTex = mgr->ExtensionSupported("GL_ARB_texture_float")==1;
+    //bool floatDepth = mgr->ExtensionSupported("GL_ARB_depth_buffer_float")==1;
+    bool floatDepth = true;
+    bool fbo = vtkFrameBufferObject2::IsSupported(win);
+
+    supported = floatTex && floatDepth && fbo;
+    }
+
+  return supported;
+}
+
+//----------------------------------------------------------------------------
+bool vtkRenderbuffer::LoadRequiredExtensions(vtkRenderWindow *win)
+{
+  bool supported = false;
+
+  vtkOpenGLRenderWindow *glwin = dynamic_cast<vtkOpenGLRenderWindow*>(win);
+  if (glwin)
+    {
+    vtkOpenGLExtensionManager *mgr = glwin->GetExtensionManager();
+
+    bool floatTex = mgr->ExtensionSupported("GL_ARB_texture_float")==1;
+    bool fbo = vtkFrameBufferObject2::IsSupported(win);
+
+    supported = floatTex && fbo;
+
+    if (supported)
+      {
+      // no functions to load for floatTex
+
+      // we'll use floating point depth buffers if they are
+      // available
+      this->DepthBufferFloat
+         = mgr->ExtensionSupported("GL_ARB_depth_buffer_float");
+      if (this->DepthBufferFloat)
+        {
+        mgr->LoadSupportedExtension("GL_ARB_depth_buffer_float");
+        }
+
+      // the rest is part of the FBO ext defer to that
+      // class to leverage its cross platform ext loading
+      // gymnastics
+      vtkFrameBufferObject2::LoadRequiredExtensions(win);
+      }
+    }
+
+  return supported;
+}
+
+//----------------------------------------------------------------------------
+void vtkRenderbuffer::Alloc()
+{
+  vtkgl::GenRenderbuffersEXT(1, &this->Handle);
+  vtkOpenGLCheckErrorMacro("failed at glGenRenderbuffers");
+}
+
+//----------------------------------------------------------------------------
+void vtkRenderbuffer::Free()
+{
+  // because we don't hold a reference to the render
+  // context we don't have any control on when it is
+  // destroyed. In fact it may be destroyed before
+  // we are(eg smart pointers), in which case we should
+  // do nothing.
+  if (this->Context && this->Handle)
+    {
+    vtkgl::DeleteRenderbuffersEXT(1, &this->Handle);
+    vtkOpenGLCheckErrorMacro("failed at glDeleteRenderBuffers");
+    }
+}
+
+//----------------------------------------------------------------------------
+vtkRenderWindow *vtkRenderbuffer::GetContext()
+{
+  return this->Context;
+}
+
+//----------------------------------------------------------------------------
+void vtkRenderbuffer::SetContext(vtkRenderWindow *renWin)
+{
+  // avoid pointless re-assignment
+  if (this->Context==renWin){ return; }
+
+  // free previous resources
+  this->Free();
+  this->Context = NULL;
+  this->DepthBufferFloat = 0;
+  this->Modified();
+
+  // check for supported context
+  vtkOpenGLRenderWindow *context = dynamic_cast<vtkOpenGLRenderWindow*>(renWin);
+  if ( !context
+    || !this->LoadRequiredExtensions(renWin) )
+    {
+    vtkErrorMacro("Unsupported render context");
+    return;
+    }
+
+  // allocate new fbo
+  this->Context=renWin;
+  this->Context->MakeCurrent();
+  this->Alloc();
+}
+//----------------------------------------------------------------------------
+int vtkRenderbuffer::CreateColorAttachment(
+      unsigned int width,
+      unsigned int height)
+{
+  assert(this->Context);
+  return this->Create(vtkgl::RGBA32F, width, height);
+}
+
+//----------------------------------------------------------------------------
+int vtkRenderbuffer::CreateDepthAttachment(
+      unsigned int width,
+      unsigned int height)
+{
+  assert(this->Context);
+
+  // typically DEPTH_COMPONENT will end up being a 32 bit floating
+  // point format however it's not a guarantee and does not seem
+  // to be the case with mesa hence the need to explicitly specify
+  // it as such if at all possible.
+  if (this->DepthBufferFloat)
+    {
+    return this->Create(
+          vtkgl::DEPTH_COMPONENT32F,
+          width,
+          height);
+    }
+
+  return this->Create(
+        GL_DEPTH_COMPONENT,
+        width,
+        height);
+}
+
+//----------------------------------------------------------------------------
+int vtkRenderbuffer::Create(
+      unsigned int format,
+      unsigned int width,
+      unsigned int height)
+{
+  assert(this->Context);
+
+  vtkgl::BindRenderbufferEXT(vtkgl::RENDERBUFFER, (GLuint)this->Handle);
+  vtkOpenGLCheckErrorMacro("failed at glBindRenderBuffer");
+
+  vtkgl::RenderbufferStorageEXT(vtkgl::RENDERBUFFER, (GLenum)format, width, height);
+  vtkOpenGLCheckErrorMacro("failed at glRenderbufferStorage");
+
+  return 1;
+}
+
+// ----------------------------------------------------------------------------
+void vtkRenderbuffer::PrintSelf(ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+
+  os
+    << indent << "Handle=" << this->Handle << endl
+    << indent << "Context=" << this->Context << endl;
+}
diff --git a/Rendering/OpenGL/vtkRenderbuffer.h b/Rendering/OpenGL/vtkRenderbuffer.h
new file mode 100644
index 0000000..85359c2
--- /dev/null
+++ b/Rendering/OpenGL/vtkRenderbuffer.h
@@ -0,0 +1,94 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkRenderbuffer.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkRenderbuffer - Storage for FBO's
+// .SECTION Description
+// Lightweight API to OpenGL Framebuffer Object EXT renderbuffers.
+#ifndef __vtkRenderbuffer_h
+#define __vtkRenderbuffer_h
+
+#include "vtkObject.h"
+#include "vtkRenderingOpenGLModule.h" // for export macro
+#include "vtkWeakPointer.h" // for render context
+
+class vtkRenderWindow;
+class vtkTextureObject;
+
+class VTKRENDERINGOPENGL_EXPORT vtkRenderbuffer : public vtkObject
+{
+public:
+  static vtkRenderbuffer* New();
+  vtkTypeMacro(vtkRenderbuffer, vtkObject);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Returns if the context supports the required extensions.
+  // Extension will be loaded when the conetxt is set.
+  static bool IsSupported(vtkRenderWindow *renWin);
+
+  // Description:
+  // Get the name of the buffer for use opengl code.
+  vtkGetMacro(Handle, unsigned int);
+
+  // Description:
+  // Setting the context has the side affect of initializing OpenGL
+  // required extensions and allocates an OpenGL name(handle) that is
+  // released when the object is destroyed. NOTE: the reference count
+  // to the passed in object is not incremented. Contex must be set
+  // prior to other use.
+  void SetContext(vtkRenderWindow *win);
+  vtkRenderWindow* GetContext();
+
+  // Description:
+  // Sets up an RGBAF renderbufffer for use as a color attachment. Use mode
+  // to control READ or DRAW operation.
+  int CreateColorAttachment(
+        unsigned int width,
+        unsigned int height);
+
+  // Description:
+  // Sets up an DEPTH renderbufffer for use as a color attachment. Use mode
+  // to control READ or DRAW operation.
+  int CreateDepthAttachment(
+        unsigned int width,
+        unsigned int height);
+
+  // Description:
+  // Sets up an renderbufffer. Use mode to control READ or DRAW operation and
+  // format to control the internal format. (see OpenGL doc for more info)
+  int Create(
+        unsigned int format,
+        unsigned int width,
+        unsigned int height);
+
+protected:
+  vtkRenderbuffer();
+  ~vtkRenderbuffer();
+
+  bool LoadRequiredExtensions(vtkRenderWindow *renWin);
+  void Alloc();
+  void Free();
+
+  int DepthBufferFloat;
+
+private:
+  unsigned int Handle;
+  vtkWeakPointer<vtkRenderWindow> Context;
+
+private:
+  vtkRenderbuffer(const vtkRenderbuffer&); // Not implemented.
+  void operator=(const vtkRenderbuffer&); // Not implemented.
+};
+
+#endif
diff --git a/Rendering/OpenGL/vtkRenderingOpenGLConfigure.h.in b/Rendering/OpenGL/vtkRenderingOpenGLConfigure.h.in
index ac21327..329f8d9 100644
--- a/Rendering/OpenGL/vtkRenderingOpenGLConfigure.h.in
+++ b/Rendering/OpenGL/vtkRenderingOpenGLConfigure.h.in
@@ -29,7 +29,6 @@
 #cmakedefine VTK_OPENGL_HAS_OSMESA
 #cmakedefine VTK_USE_OFFSCREEN
 
-#cmakedefine VTK_USE_CG_SHADERS
 #cmakedefine VTK_USE_GLSL_SHADERS
 
 #cmakedefine VTK_MATERIALS_DIRS
@@ -38,9 +37,6 @@
 #  define VTK_MATERIALS_DIRS "@VTK_MATERIALS_DIRS@"
 #endif
 
-/* Should VTK use the display?  */
-#cmakedefine VTK_USE_DISPLAY
-
 /* Options for GPUInfo */
 #cmakedefine VTK_USE_DIRECTX
 #cmakedefine VTK_USE_CORE_GRAPHICS
diff --git a/Rendering/OpenGL/vtkSequencePass.cxx b/Rendering/OpenGL/vtkSequencePass.cxx
index 6a70529..69f7ddf 100644
--- a/Rendering/OpenGL/vtkSequencePass.cxx
+++ b/Rendering/OpenGL/vtkSequencePass.cxx
@@ -15,7 +15,7 @@
 
 #include "vtkSequencePass.h"
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkRenderPassCollection.h"
 
 vtkStandardNewMacro(vtkSequencePass);
diff --git a/Rendering/OpenGL/vtkShader2.cxx b/Rendering/OpenGL/vtkShader2.cxx
index 3bcb444..90e7662 100644
--- a/Rendering/OpenGL/vtkShader2.cxx
+++ b/Rendering/OpenGL/vtkShader2.cxx
@@ -14,11 +14,12 @@
 =========================================================================*/
 #include "vtkShader2.h"
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 #include <vtkgl.h>
 #include "vtkUniformVariables.h"
 #include "vtkOpenGLRenderWindow.h"
 #include "vtkOpenGLExtensionManager.h"
+#include "vtkOpenGLError.h"
 
 static GLenum vtkShaderTypeVTKToGL[5] = {
   vtkgl::VERTEX_SHADER, // VTK_SHADER_TYPE_VERTEX=0
@@ -38,6 +39,8 @@ static const char *TypeAsStringArray[5] = {
 
 //-----------------------------------------------------------------------------
 vtkStandardNewMacro(vtkShader2);
+
+//-----------------------------------------------------------------------------
 vtkCxxSetObjectMacro(vtkShader2,UniformVariables,vtkUniformVariables);
 
 // ----------------------------------------------------------------------------
@@ -50,7 +53,7 @@ vtkShader2::vtkShader2()
   this->Type = VTK_SHADER_TYPE_VERTEX;
 
   // OpenGL part
-  this->Context = 0;
+  this->Context = NULL;
   this->Id = 0;
   this->ExtensionsLoaded = false;
   this->SupportGeometryShader = false;
@@ -68,20 +71,16 @@ vtkShader2::vtkShader2()
 // ----------------------------------------------------------------------------
 void vtkShader2::ReleaseGraphicsResources()
 {
-  if (this->Context)
+  // because we don't hold a reference to the render
+  // context we don't have any control on when it is
+  // destroyed. In fact it may be destroyed before
+  // we are(eg smart pointers), in which case we should
+  // do nothing.
+  if (this->Context && (this->Id != 0))
     {
-    if (this->Id !=0)
-      {
-      vtkgl::DeleteShader(this->Id);
-      this->Id = 0;
-      }
-    }
-  else
-    {
-    if (this->Id != 0)
-      {
-      vtkErrorMacro(<<" no context but some OpenGL resource has not been deleted.");
-      }
+    vtkgl::DeleteShader(this->Id);
+    vtkOpenGLCheckErrorMacro("failed at glDeleteShader");
+    this->Id = 0;
     }
 }
 
@@ -90,6 +89,9 @@ void vtkShader2::ReleaseGraphicsResources()
 // Destructor. Delete SourceCode if any.
 vtkShader2::~vtkShader2()
 {
+  // esplicitly release resources
+  this->ReleaseGraphicsResources();
+
   delete[] this->SourceCode;
   delete[] this->LastCompileLog;
 
@@ -97,17 +99,15 @@ vtkShader2::~vtkShader2()
     {
     this->UniformVariables->Delete();
     }
-
-  if (this->Id != 0)
-    {
-    vtkErrorMacro(<<"a vtkShader2 object is being deleted before ReleaseGraphicsResources() has been called.");
-    }
 }
 
 //----------------------------------------------------------------------------
-bool vtkShader2::IsSupported(vtkOpenGLRenderWindow *context)
+bool vtkShader2::IsSupported(vtkRenderWindow *renWin)
 {
-  assert("pre: context_exists" && context!=0);
+  vtkOpenGLRenderWindow *context
+    = dynamic_cast<vtkOpenGLRenderWindow*>(renWin);
+
+  if (!context) return false;
 
   vtkOpenGLExtensionManager *e = context->GetExtensionManager();
   return e->ExtensionSupported("GL_VERSION_2_0") ||
@@ -118,17 +118,23 @@ bool vtkShader2::IsSupported(vtkOpenGLRenderWindow *context)
 }
 
 //----------------------------------------------------------------------------
-bool vtkShader2::LoadExtensions(vtkOpenGLRenderWindow *context)
+bool vtkShader2::LoadRequiredExtensions(vtkRenderWindow *renWin)
 {
-  assert("pre: context_exists" && context!=0);
+  vtkOpenGLRenderWindow *context
+    = dynamic_cast<vtkOpenGLRenderWindow*>(renWin);
+
+  this->ExtensionsLoaded = false;
+  this->SupportGeometryShader = false;
+
+  if (!context) return false;
 
   vtkOpenGLExtensionManager *e = context->GetExtensionManager();
 
-  bool result = false;
+
   if (e->ExtensionSupported("GL_VERSION_2_0"))
     {
     e->LoadExtension("GL_VERSION_2_0");
-    result = true;
+    this->ExtensionsLoaded = true;
     }
   else
     {
@@ -141,46 +147,71 @@ bool vtkShader2::LoadExtensions(vtkOpenGLRenderWindow *context)
       e->LoadCorePromotedExtension("GL_ARB_shader_objects");
       e->LoadCorePromotedExtension("GL_ARB_vertex_shader");
       e->LoadCorePromotedExtension("GL_ARB_fragment_shader");
-      result = true;
+      this->ExtensionsLoaded = true;
       }
     }
-  return result;
+
+  if (this->ExtensionsLoaded)
+    {
+    bool supportGeometryShaderARB
+      = e->ExtensionSupported("GL_ARB_geometry_shader4") == 1;
+
+    this->SupportGeometryShader
+      = supportGeometryShaderARB
+      || e->ExtensionSupported("GL_EXT_geometry_shader4") == 1;
+
+    if (this->SupportGeometryShader)
+      {
+      if (supportGeometryShaderARB)
+        {
+        e->LoadExtension("GL_ARB_geometry_shader4");
+        }
+      else
+        {
+        e->LoadAsARBExtension("GL_EXT_geometry_shader4");
+        }
+      }
+    }
+
+  return this->ExtensionsLoaded;
 }
 
 // ----------------------------------------------------------------------------
-void vtkShader2::SetContext(vtkOpenGLRenderWindow *context)
+vtkRenderWindow *vtkShader2::GetContext()
 {
-  if (this->Context == context)
+  return this->Context;
+}
+
+// ----------------------------------------------------------------------------
+void vtkShader2::SetContext(vtkRenderWindow *renWin)
+{
+  // avoid pointless reassignment
+  if (this->Context == renWin)
     {
     return;
     }
-
+  // free resources
   this->ReleaseGraphicsResources();
-  this->Context = context;
-  if (this->Context)
+  this->Context = NULL;
+  this->Modified();
+  // all done if assigned null
+  if (!renWin)
     {
-    this->ExtensionsLoaded=this->LoadExtensions(this->Context);
-    if (this->ExtensionsLoaded)
-      {
-      vtkOpenGLExtensionManager *e = this->Context->GetExtensionManager();
-      bool supportGeometryShaderARB =
-        e->ExtensionSupported("GL_ARB_geometry_shader4") == 1;
-      this->SupportGeometryShader = supportGeometryShaderARB
-        || e->ExtensionSupported("GL_EXT_geometry_shader4") == 1;
-      if (this->SupportGeometryShader)
-        {
-        if (supportGeometryShaderARB)
-          {
-          e->LoadExtension("GL_ARB_geometry_shader4");
-          }
-        else
-          {
-          e->LoadAsARBExtension("GL_EXT_geometry_shader4");
-          }
-        }
-      }
+    return;
     }
-  this->Modified();
+  // check for support
+  vtkOpenGLRenderWindow *context
+    = dynamic_cast<vtkOpenGLRenderWindow*>(renWin);
+
+  if ( !context
+    || !this->LoadRequiredExtensions(renWin) )
+    {
+    vtkErrorMacro("The context does not support the required extensions");
+    return;
+    }
+  // initialize
+  this->Context = renWin;
+  this->Context->MakeCurrent();
 }
 
 //-----------------------------------------------------------------------------
@@ -192,6 +223,7 @@ void vtkShader2::SetContext(vtkOpenGLRenderWindow *context)
 void vtkShader2::Compile()
 {
   assert("pre: SourceCode_exists" && this->SourceCode != 0);
+  vtkOpenGLClearErrorMacro();
 
   if(this->Id == 0 || this->LastCompileTime < this->MTime)
     {
@@ -245,6 +277,8 @@ void vtkShader2::Compile()
     vtkgl::GetShaderInfoLog(shaderId,value, 0, this->LastCompileLog);
     this->LastCompileTime.Modified();
     }
+
+  vtkOpenGLCheckErrorMacro("failed after Compile");
 }
 
 //-----------------------------------------------------------------------------
diff --git a/Rendering/OpenGL/vtkShader2.h b/Rendering/OpenGL/vtkShader2.h
index b5bd21a..d9d7723 100644
--- a/Rendering/OpenGL/vtkShader2.h
+++ b/Rendering/OpenGL/vtkShader2.h
@@ -36,6 +36,7 @@
 #ifndef __vtkShader2_h
 #define __vtkShader2_h
 
+#include "vtkWeakPointer.h" // for ren context
 #include "vtkRenderingOpenGLModule.h" // For export macro
 #include "vtkObject.h"
 
@@ -49,7 +50,7 @@ enum vtkShader2Type
   VTK_SHADER_TYPE_FRAGMENT = 2
 };
 
-class vtkOpenGLRenderWindow;
+class vtkRenderWindow;
 class vtkUniformVariables;
 
 class VTKRENDERINGOPENGL_EXPORT vtkShader2 : public vtkObject
@@ -61,8 +62,8 @@ public:
 
   // Description:
   // Returns if the context supports the required extensions.
-  static bool IsSupported(vtkOpenGLRenderWindow *context);
-  static bool LoadExtensions(vtkOpenGLRenderWindow *context);
+  // Extensions are load when the context is set.
+  static bool IsSupported(vtkRenderWindow *context);
 
   // Description:
   // String containing the shader source code. Reminder SetString makes a copy
@@ -116,8 +117,8 @@ public:
   // context to avoid reference loops.
   // SetContext() may raise an error is the OpenGL context does not support the
   // required OpenGL extensions.
-  void SetContext(vtkOpenGLRenderWindow *context);
-  vtkGetObjectMacro(Context,vtkOpenGLRenderWindow);
+  void SetContext(vtkRenderWindow *context);
+  vtkRenderWindow *GetContext();
 
   // Description:
   // Release OpenGL resource (shader id).
@@ -142,6 +143,10 @@ protected:
   // Destructor. Delete SourceCode and LastCompileLog if any.
   virtual ~vtkShader2();
 
+  // Description:
+  // Load the required OpenGL extensions.
+  bool LoadRequiredExtensions(vtkRenderWindow *context);
+
   char *SourceCode;
   int Type;
 
@@ -153,7 +158,8 @@ protected:
 
   vtkTimeStamp LastCompileTime;
   vtkUniformVariables *UniformVariables; // Initial values is an empty list
-  vtkOpenGLRenderWindow *Context;
+
+  vtkWeakPointer<vtkRenderWindow> Context;
 
   bool ExtensionsLoaded;
   bool SupportGeometryShader;
diff --git a/Rendering/OpenGL/vtkShader2Collection.cxx b/Rendering/OpenGL/vtkShader2Collection.cxx
index ad203c0..c463fb4 100644
--- a/Rendering/OpenGL/vtkShader2Collection.cxx
+++ b/Rendering/OpenGL/vtkShader2Collection.cxx
@@ -15,7 +15,7 @@
 #include "vtkShader2Collection.h"
 #include "vtkObjectFactory.h"
 #include "vtkShader2.h"
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkShader2Collection);
 
diff --git a/Rendering/OpenGL/vtkShaderProgram2.cxx b/Rendering/OpenGL/vtkShaderProgram2.cxx
index ddb9439..d736da4 100644
--- a/Rendering/OpenGL/vtkShaderProgram2.cxx
+++ b/Rendering/OpenGL/vtkShaderProgram2.cxx
@@ -22,10 +22,11 @@
 #include "vtkUniformVariables.h"
 
 #include "vtkgl.h"
+#include "vtkOpenGLError.h"
 
 #include <vector>
 #include <vtksys/ios/sstream>
-#include <assert.h>
+#include <cassert>
 #include "vtkStdString.h"
 
 static GLenum vtkGeometryTypeInVTKToGL[5] = {
@@ -50,13 +51,16 @@ static GLenum vtkGeometryTypeOutVTKToGL[3] = {
   GL_TRIANGLE_STRIP, // VTK_GEOMETRY_SHADER_OUT_TYPE_TRIANGLE_STRIP=2
 };
 
+//----------------------------------------------------------------------------
 vtkStandardNewMacro(vtkShaderProgram2);
+
+//----------------------------------------------------------------------------
 vtkCxxSetObjectMacro(vtkShaderProgram2, UniformVariables, vtkUniformVariables);
 
 //----------------------------------------------------------------------------
 vtkShaderProgram2::vtkShaderProgram2()
 {
-  this->Context = 0;
+  this->Context = NULL;
   this->ExtensionsLoaded = false;
 
   this->Id = 0;
@@ -80,9 +84,6 @@ vtkShaderProgram2::vtkShaderProgram2()
   this->LastValidateLog = new char[this->LastValidateLogCapacity];
   this->LastValidateLog[0] = '\0'; // empty string
 
-//  this->Context = 0;
-//  this->GeometryShadersSupported = false;
-
   this->UniformVariables = vtkUniformVariables::New(); // empty list
   this->PrintErrors = true;
 }
@@ -90,51 +91,47 @@ vtkShaderProgram2::vtkShaderProgram2()
 //-----------------------------------------------------------------------------
 void vtkShaderProgram2::ReleaseGraphicsResources()
 {
-  if (this->Context)
+  // because we don't hold a reference to the render
+  // context we don't have any control on when it is
+  // destroyed. In fact it may be destroyed before
+  // we are(eg smart pointers), in which case we should
+  // do nothing.
+  this->Shaders->ReleaseGraphicsResources();
+  if (this->Context && (this->Id != 0))
     {
-    if (this->Id != 0)
-      {
-      vtkgl::DeleteProgram(this->Id);
-      this->Id = 0;
-      }
-    this->LastBuildStatus = VTK_SHADER_PROGRAM2_COMPILE_FAILED;
-    this->Shaders->ReleaseGraphicsResources();
-    }
-  else
-    {
-    if (this->Id != 0)
-      {
-      vtkErrorMacro(<<" no context but some OpenGL resource has not been deleted.");
-      }
+    vtkgl::DeleteProgram(this->Id);
+    vtkOpenGLCheckErrorMacro("failed at glDeleteProgram");
+    this->Id = 0;
     }
+  this->LastBuildStatus = VTK_SHADER_PROGRAM2_COMPILE_FAILED;
 }
 
 //----------------------------------------------------------------------------
 vtkShaderProgram2::~vtkShaderProgram2()
 {
+  this->ReleaseGraphicsResources();
+
   delete[] this->LastLinkLog;
   delete[] this->LastValidateLog;
 
   if (this->UniformVariables)
     {
     this->UniformVariables->Delete();
-    this->UniformVariables = 0;
-    }
-  if (this->Id != 0)
-    {
-    vtkErrorMacro(<<"a vtkShaderProgram2 object is being deleted before ReleaseGraphicsResources() has been called.");
     }
+
   if (this->Shaders)
     {
     this->Shaders->Delete();
-    this->Shaders = 0;
     }
 }
 
 //----------------------------------------------------------------------------
-bool vtkShaderProgram2::IsSupported(vtkOpenGLRenderWindow *context)
+bool vtkShaderProgram2::IsSupported(vtkRenderWindow *renWin)
 {
-  assert("pre: context_exists" && context);
+  vtkOpenGLRenderWindow *context
+    = dynamic_cast<vtkOpenGLRenderWindow*>(renWin);
+
+  if (!context) return false;
 
   vtkOpenGLExtensionManager *e = context->GetExtensionManager();
 
@@ -151,9 +148,14 @@ bool vtkShaderProgram2::IsSupported(vtkOpenGLRenderWindow *context)
 }
 
 //----------------------------------------------------------------------------
-bool vtkShaderProgram2::LoadExtensions(vtkOpenGLRenderWindow *context)
+bool vtkShaderProgram2::LoadRequiredExtensions(vtkRenderWindow *renWin)
 {
-  assert("pre: context_exists" && context);
+  vtkOpenGLRenderWindow *context
+    = dynamic_cast<vtkOpenGLRenderWindow*>(renWin);
+
+  this->ExtensionsLoaded = false;
+
+  if (!context) return false;
 
   vtkOpenGLExtensionManager *e = context->GetExtensionManager();
 
@@ -166,9 +168,9 @@ bool vtkShaderProgram2::LoadExtensions(vtkOpenGLRenderWindow *context)
     e->ExtensionSupported("GL_ARB_vertex_shader") &&
     e->ExtensionSupported("GL_ARB_fragment_shader"));
 
-  bool result = (multiTexture && glsl);
+  this->ExtensionsLoaded = (multiTexture && glsl);
 
-  if (result)
+  if (this->ExtensionsLoaded)
     {
     if (gl13)
       {
@@ -191,22 +193,45 @@ bool vtkShaderProgram2::LoadExtensions(vtkOpenGLRenderWindow *context)
       }
     }
 
-  return result;
+  return this->ExtensionsLoaded;
 }
 
 // ----------------------------------------------------------------------------
-void vtkShaderProgram2::SetContext(vtkOpenGLRenderWindow *context)
+vtkRenderWindow *vtkShaderProgram2::GetContext()
 {
-  if (this->Context != context)
+  return this->Context;
+}
+
+// ----------------------------------------------------------------------------
+void vtkShaderProgram2::SetContext(vtkRenderWindow *renWin)
+{
+  // avoid pointless reassignment
+  if (this->Context == renWin)
     {
-    this->ReleaseGraphicsResources();
-    this->Context = context;
-    if (this->Context)
-      {
-      this->ExtensionsLoaded = this->LoadExtensions(this->Context);
-      }
-    this->Modified();
+    return;
+    }
+  // free ressources
+  this->ReleaseGraphicsResources();
+  this->Context = NULL;
+  this->Modified();
+  // all done if assigned null
+  if (!renWin)
+    {
+    return;
     }
+  // check for support
+  vtkOpenGLRenderWindow *context
+    = dynamic_cast<vtkOpenGLRenderWindow*>(renWin);
+
+  if ( !context
+    || !this->LoadRequiredExtensions(renWin) )
+    {
+    vtkErrorMacro("The context does not support the required extensions.");
+    return;
+    }
+  // intialize
+  this->Context = renWin;
+  this->Context->MakeCurrent();
 }
 
 // ----------------------------------------------------------------------------
@@ -303,10 +328,6 @@ bool vtkShaderProgram2::DisplayListUnderCreationInCompileMode()
 }
 
 // ----------------------------------------------------------------------------
-// Description:
-// Use the shader program.
-// It saves the current shader program or fixed-pipeline in use.
-// It also set the uniform variables.
 void vtkShaderProgram2::Use()
 {
   assert("pre: context_is_set" && this->Context);
@@ -326,6 +347,7 @@ void vtkShaderProgram2::Use()
       // don't look at current program, don't save it, don't restore it
       // later.
       vtkgl::UseProgram(progId);
+      vtkOpenGLCheckErrorMacro("failed at glUseProgram");
       }
     else
       {
@@ -334,12 +356,15 @@ void vtkShaderProgram2::Use()
       if (static_cast<GLuint>(value) != progId)
         {
         this->SavedId = static_cast<unsigned int>(value);
+        #ifndef NDEBUG
         if (this->SavedId != 0)
           {
           vtkWarningMacro(<<"another program was used (id=" << this->SavedId
                           <<"), our id is" << progId << ".");
           }
+        #endif
         vtkgl::UseProgram(progId);
+        vtkOpenGLCheckErrorMacro("failed at glUseProgram");
         }
       assert("check: in_use" && this->IsUsed());
       }
@@ -364,18 +389,13 @@ void vtkShaderProgram2::Restore()
     {
     GLint value;
     glGetIntegerv(vtkgl::CURRENT_PROGRAM, &value);
-    if (static_cast<GLuint>(value) == static_cast<GLuint>(this->Id))
+    if (static_cast<GLuint>(value) != static_cast<GLuint>(this->Id))
       {
-      vtkgl::UseProgram(static_cast<GLuint>(this->SavedId));
-      this->SavedId = 0;
-      }
-    else
-      {
-      vtkWarningMacro(<<"cannot restore because the program in use (id="
-                      << value <<
-                      ") is not the id of the vtkShaderProgram2 object (id="
-                      << this->Id << ").");
+      vtkErrorMacro("Restore failed because the porgram is not in use");
       }
+    vtkgl::UseProgram(static_cast<GLuint>(this->SavedId));
+    vtkOpenGLCheckErrorMacro("failed at glUseProgram");
+    this->SavedId = 0;
     }
 }
 
@@ -407,27 +427,29 @@ void vtkShaderProgram2::Build()
     if (progId == 0)
       {
       progId = vtkgl::CreateProgram();
+      vtkOpenGLCheckErrorMacro("failed at glCreateProgram");
       if (progId == 0)
         {
-        vtkErrorMacro(<<"fatal error (bad current OpenGL context?, extension not supported?).");
+        vtkErrorMacro("failed to create program");
         return;
         }
+
       this->Id = static_cast<unsigned int>(progId);
       }
     // Detach all previous shaders (some may have disappeared
     // from this->Shaders)
     GLint numberOfAttachedShaders;
-    vtkgl::GetProgramiv(progId, vtkgl::ATTACHED_SHADERS,
-      &numberOfAttachedShaders);
+    vtkgl::GetProgramiv(progId, vtkgl::ATTACHED_SHADERS, &numberOfAttachedShaders);
     if (numberOfAttachedShaders > 0)
       {
       GLuint *attachedShaders = new GLuint[numberOfAttachedShaders];
-      vtkgl::GetAttachedShaders(progId,numberOfAttachedShaders,0,
-                                attachedShaders);
+      vtkgl::GetAttachedShaders(progId,numberOfAttachedShaders,0, attachedShaders);
       int i = 0;
       while(i < numberOfAttachedShaders)
         {
         vtkgl::DetachShader(progId, attachedShaders[i]);
+        vtkOpenGLCheckErrorMacro("failed at glDettachShader");
+
         ++i;
         }
       delete[] attachedShaders;
@@ -446,13 +468,17 @@ void vtkShaderProgram2::Build()
       if (s->GetLastCompileStatus())
         {
         vtkgl::AttachShader(progId, static_cast<GLuint>(s->GetId()));
+        vtkOpenGLCheckErrorMacro("failed at glAttachShader");
         }
       else
         {
         compileDone = false;
         if (this->PrintErrors)
           {
-          vtkErrorMacro(<<" a shader failed to compile. Its log is:\n" << s->GetLastCompileLog() << "\n. Its source code is:\n" << s->GetSourceCode());
+          vtkErrorMacro(
+            <<" a shader failed to compile. Its log is:\n"
+            << s->GetLastCompileLog() << "\n. Its source code is:\n"
+            << s->GetSourceCode());
           }
         }
       s = this->Shaders->GetNextShader();
@@ -474,6 +500,8 @@ void vtkShaderProgram2::Build()
         }
 
       vtkgl::LinkProgram(progId);
+      vtkOpenGLCheckErrorMacro("failed at glLinkProgram");
+
       GLint value;
       vtkgl::GetProgramiv(progId, vtkgl::LINK_STATUS ,&value);
       if (value == GL_TRUE)
@@ -558,20 +586,23 @@ void vtkShaderProgram2::SendUniforms()
         }
       }
 
-    GLuint progId = static_cast<GLuint>(this->Id);
+    // NOTE -- a given uniform variable state is held and is set from
+    // two different places, in the program and in the shader. If not
+    // used carefully this is both inefficient and error prone!! However
+    // it's done like that to simplify support for arbitrary collections of
+    // shaders.
 
     this->Shaders->InitTraversal();
     s = this->Shaders->GetNextShader();
-    const char *name;
-    GLint uniformId;
     while (s)
       {
       list = s->GetUniformVariables();
       list->Start();
       while(!list->IsAtEnd())
         {
-        name = list->GetCurrentName();
-        uniformId = vtkgl::GetUniformLocation(progId,name);
+        const char *name = list->GetCurrentName();
+        GLint uniformId = this->GetUniformLocationInternal(name);
+        vtkOpenGLCheckErrorMacro("failed at glGetUniformLocation");
         if (uniformId != -1)
           {
           // -1 means is not an active uniform
@@ -589,8 +620,8 @@ void vtkShaderProgram2::SendUniforms()
     list->Start();
     while (!list->IsAtEnd())
       {
-      name = list->GetCurrentName();
-      uniformId = vtkgl::GetUniformLocation(progId,name);
+      const char *name = list->GetCurrentName();
+      GLint uniformId = this->GetUniformLocationInternal(name);
       if (uniformId != -1)
         {
         // -1 means is not an active uniform
@@ -608,6 +639,63 @@ void vtkShaderProgram2::SendUniforms()
 }
 
 // ----------------------------------------------------------------------------
+int vtkShaderProgram2::GetUniformLocationInternal(const char *name)
+{
+  assert(this->Id!=0);
+  assert(name!=NULL);
+  int loc = (int)vtkgl::GetUniformLocation((GLuint)this->Id, name);
+  vtkOpenGLCheckErrorMacro("failed at glGetUniformLocation");
+  return loc;
+}
+
+// ----------------------------------------------------------------------------
+int vtkShaderProgram2::GetUniformLocation(const char *name)
+{
+  assert(this->Id!=0);
+  assert(name!=NULL);
+  int loc = (int)vtkgl::GetUniformLocation((GLuint)this->Id, name);
+  vtkOpenGLCheckErrorMacro("failed at glGetUniformLocation");
+  #ifndef NDEBUG
+  if (loc==-1)
+    { vtkErrorMacro("no variable named " << name); }
+  #endif
+  return loc;
+}
+
+// ----------------------------------------------------------------------------
+#define vtkSetUniformAPIMacro(ctype, num, letter) \
+void vtkShaderProgram2::SetUniform##num##letter(int loc, ctype *val) \
+{ \
+  assert(loc!=-1); \
+  vtkgl::Uniform##num##letter##v(loc, 1, val); \
+  vtkOpenGLCheckErrorMacro("failed at glUniform" #num #letter "v"); \
+}
+vtkSetUniformAPIMacro(float, 1, f);
+vtkSetUniformAPIMacro(float, 2, f);
+vtkSetUniformAPIMacro(float, 3, f);
+vtkSetUniformAPIMacro(float, 4, f);
+vtkSetUniformAPIMacro(int, 1, i);
+vtkSetUniformAPIMacro(int, 2, i);
+vtkSetUniformAPIMacro(int, 3, i);
+vtkSetUniformAPIMacro(int, 4, i);
+
+// ----------------------------------------------------------------------------
+void vtkShaderProgram2::UseProgram()
+{
+  assert(this->Context);
+  vtkgl::UseProgram((GLuint)this->Id);
+  vtkOpenGLCheckErrorMacro("failed at glUseProgram");
+}
+
+// ----------------------------------------------------------------------------
+void vtkShaderProgram2::UnuseProgram()
+{
+  assert(this->Context);
+  vtkgl::UseProgram((GLuint)0U);
+  vtkOpenGLCheckErrorMacro("failed at glUseProgram");
+}
+
+// ----------------------------------------------------------------------------
 // Description:
 // Introspection. Return the list of active uniform variables of the program.
 void vtkShaderProgram2::PrintActiveUniformVariables(
@@ -992,7 +1080,9 @@ int vtkShaderProgram2::GetAttributeLocation(const char *name)
   assert("pre: name_exists" && name);
   assert("pre: built" &&
          this->LastBuildStatus == VTK_SHADER_PROGRAM2_LINK_SUCCEEDED);
-  return vtkgl::GetAttribLocation(this->Id, name);
+  int loc = vtkgl::GetAttribLocation(this->Id, name);
+  vtkOpenGLCheckErrorMacro("glGetAttribLocation");
+  return loc;
 }
 
 //----------------------------------------------------------------------------
diff --git a/Rendering/OpenGL/vtkShaderProgram2.h b/Rendering/OpenGL/vtkShaderProgram2.h
index 35ac272..71ea1c3 100644
--- a/Rendering/OpenGL/vtkShaderProgram2.h
+++ b/Rendering/OpenGL/vtkShaderProgram2.h
@@ -22,14 +22,13 @@
 #ifndef __vtkShaderProgram2_h
 #define __vtkShaderProgram2_h
 
-#include "vtkRenderingOpenGLModule.h" // For export macro
+#include <cassert> // for templated functions
+#include "vtkRenderingOpenGLModule.h" // for export macro
+#include "vtkWeakPointer.h" // for weak ptr to rendering context
 #include "vtkObject.h"
-#include "vtkWeakPointer.h" // needed for vtkWeakPointer.
 
-class vtkWindow;
-class vtkOpenGLRenderWindow;
+class vtkRenderWindow;
 class vtkOpenGLExtensionManager;
-class vtkShaderProgram2Uniforms; // internal
 class vtkShader2Collection;
 class vtkUniformVariables;
 
@@ -69,8 +68,8 @@ public:
 
   // Description:
   // Returns if the context supports the required extensions.
-  static bool IsSupported(vtkOpenGLRenderWindow *context);
-  static bool LoadExtensions(vtkOpenGLRenderWindow *context);
+  // Extensions are loaded when the context is set.
+  static bool IsSupported(vtkRenderWindow *context);
 
   // Description:
   // Tell if vtkErrorMacro should be called when there is a build error or not.
@@ -87,8 +86,8 @@ public:
   // context to avoid reference loops.
   // SetContext() may raise an error is the OpenGL context does not support the
   // required OpenGL extensions.
-  void SetContext(vtkOpenGLRenderWindow *context);
-  vtkGetObjectMacro(Context,vtkOpenGLRenderWindow);
+  void SetContext(vtkRenderWindow *context);
+  vtkRenderWindow *GetContext();
 
   // Description:
   // The list of shaders. Initially, the list is empty.
@@ -166,7 +165,8 @@ public:
   // Description:
   // Use the shader program.
   // It saves the current shader program or fixed-pipeline in use.
-  // It also set the uniform variables.
+  // As a side affect it also set the uniform variables. If you don't
+  // want that then see UseProgram.
   // \pre context_is_set: this->GetContext()!=0
   // \pre current_context_matches: this->GetContext()->IsCurrent()
   void Use();
@@ -186,6 +186,12 @@ public:
   void RestoreFixedPipeline();
 
   // Description:
+  // Simple direct use of the program without side affects and with
+  // error check. The Unuse version restores the default program.
+  void UseProgram();
+  void UnuseProgram();
+
+  // Description:
   // Tells if the last build: failed during compilation of one of the
   // shader, fails during link of the program or succeeded to link the
   // program.
@@ -278,18 +284,112 @@ public:
   vtkSetMacro(GeometryTypeOut,int);
   vtkGetMacro(GeometryTypeOut,int);
 
+  /*
+  Low level api --
+  this is provided as a way to avoid some of the overhead in this
+  class's implementation of SendUniforms. One should use the
+  following API if performance is a concern (eg. uniforms are
+  set per primative), or if the uniform management is not needed
+  (eg. variables are already managed in other vtkObjects)
+  */
+
+  // Description:
+  // Get a uniform's location.
+  // Low level API
+  int GetUniformLocation(const char *name);
+
+  // Description:
+  // Set a uniform value directly. The driving use case for this api
+  // is modifying a uniform per-primative in a loop. In that case
+  // we need the minimal implementtion passing the value directly to
+  // the driver. It is an error to specify an invalid location.
+  // Low level API
+  void SetUniformf(const char *name, float val)
+    { this->SetUniform1f(name, &val); }
+  void SetUniform1f(const char *name, float *val)
+    { this->SetUniform1f(this->GetUniformLocation(name), val); }
+  void SetUniform2f(const char *name, float *val)
+    { this->SetUniform2f(this->GetUniformLocation(name), val); }
+  void SetUniform3f(const char *name, float *val)
+    { this->SetUniform3f(this->GetUniformLocation(name), val); }
+  void SetUniform4f(const char *name, float *val)
+    { this->SetUniform4f(this->GetUniformLocation(name), val); }
+
+  void SetUniformi(const char *name, int val)
+    { this->SetUniform1i(name, &val); }
+  void SetUniform1i(const char *name, int *val)
+    { this->SetUniform1i(this->GetUniformLocation(name), val); }
+  void SetUniform2i(const char *name, int *val)
+    { this->SetUniform2i(this->GetUniformLocation(name), val); }
+  void SetUniform3i(const char *name, int *val)
+    { this->SetUniform3i(this->GetUniformLocation(name), val); }
+  void SetUniform4i(const char *name, int *val)
+    { this->SetUniform4i(this->GetUniformLocation(name), val); }
+
+  void SetUniformf(int loc, float val)
+    { this->SetUniform1f(loc, &val); }
+  void SetUniform1f(int loc, float *val);
+  void SetUniform2f(int loc, float *val);
+  void SetUniform3f(int loc, float *val);
+  void SetUniform4f(int loc, float *val);
+
+  void SetUniformi(int loc, int val)
+    { this->SetUniform1i(loc, &val); }
+  void SetUniform1i(int loc, int *val);
+  void SetUniform2i(int loc, int *val);
+  void SetUniform3i(int loc, int *val);
+  void SetUniform4i(int loc, int *val);
+
+  // Description:
+  // Convenience methods for copy/convert to supported type. Typically
+  // this arises because VTK stores data in an internal format (eg double)
+  // that's not supported.
+  template<typename T> void SetUniform1it(const char *name, T *value);
+  template<typename T> void SetUniform2it(const char *name, T *value);
+  template<typename T> void SetUniform3it(const char *name, T *value);
+  template<typename T> void SetUniform4it(const char *name, T *value);
+
+  template<typename T> void SetUniform1ft(const char *name, T *value);
+  template<typename T> void SetUniform2ft(const char *name, T *value);
+  template<typename T> void SetUniform3ft(const char *name, T *value);
+  template<typename T> void SetUniform4ft(const char *name, T *value);
+
+  template<typename T> void SetUniform1it(int loc, T *value);
+  template<typename T> void SetUniform2it(int loc, T *value);
+  template<typename T> void SetUniform3it(int loc, T *value);
+  template<typename T> void SetUniform4it(int loc, T *value);
+
+  template<typename T> void SetUniform1ft(int loc, T *value);
+  template<typename T> void SetUniform2ft(int loc, T *value);
+  template<typename T> void SetUniform3ft(int loc, T *value);
+  template<typename T> void SetUniform4ft(int loc, T *value);
+
 protected:
   vtkShaderProgram2();
   virtual ~vtkShaderProgram2();
 
+  // Description:
+  // Load the required OpenGL extentions.
+  bool LoadRequiredExtensions(vtkRenderWindow *context);
+
+  // Description:
+  // Get the location of a uniform, without
+  // caring if it really exists. This is used
+  // because this class will attempt to set *all*
+  // uniforms knows about via the associated
+  // vtkUniformVariables on *all* shaders it manages
+  // regardless of if a given uniform actually
+  // belongs to a given shader.
+  int GetUniformLocationInternal(const char *name);
+
   unsigned int Id; // actually GLuint. Initial value is 0.
   unsigned int SavedId;
 
   vtkTimeStamp LastLinkTime;
   vtkTimeStamp LastSendUniformsTime;
 
-  vtkShaderProgram2Uniforms *Uniforms;
   vtkShader2Collection *Shaders;
+  vtkUniformVariables *UniformVariables;
 
   int LastBuildStatus; // Initial value is VTK_SHADER_PROGRAM2_COMPILE_FAILED
 
@@ -299,11 +399,10 @@ protected:
   char *LastValidateLog; // Initial value is the empty string ""='\0'
   size_t LastValidateLogCapacity; // Initial value is 8.
 
-  vtkUniformVariables *UniformVariables; // Initial values is an empty list
 
-  bool PrintErrors; // use vtkErrorMacro ?
+  bool PrintErrors;
 
-  vtkOpenGLRenderWindow *Context;
+  vtkWeakPointer<vtkRenderWindow> Context;
   bool ExtensionsLoaded;
 
   int GeometryTypeIn;
@@ -315,4 +414,39 @@ private:
   void operator=(const vtkShaderProgram2&); // Not implemented.
 };
 
+// ----------------------------------------------------------------------------
+//BTX
+#define vtkShaderProgram2SetUniformCopyCastMacro(toLetter, toType, num) \
+template<typename fromType> \
+void vtkShaderProgram2::SetUniform##num##toLetter##t(const char *name, fromType *fvalues) \
+{ \
+  toType tvalues[num]; \
+  for (int i=0; i<num; ++i) \
+    { \
+    tvalues[i] = static_cast<toType>(fvalues[i]); \
+    } \
+  this->SetUniform##num##toLetter(name, tvalues); \
+} \
+template<typename fromType> \
+void vtkShaderProgram2::SetUniform##num##toLetter##t(int location, fromType *fvalues) \
+{ \
+  assert(location!=-1); \
+  toType tvalues[num]; \
+  for (int i=0; i<num; ++i) \
+    { \
+    tvalues[i] = static_cast<toType>(fvalues[i]); \
+    } \
+  this->SetUniform##num##toLetter(location, tvalues); \
+}
+vtkShaderProgram2SetUniformCopyCastMacro(f, float, 1)
+vtkShaderProgram2SetUniformCopyCastMacro(f, float, 2)
+vtkShaderProgram2SetUniformCopyCastMacro(f, float, 3)
+vtkShaderProgram2SetUniformCopyCastMacro(f, float, 4)
+vtkShaderProgram2SetUniformCopyCastMacro(i, int, 1)
+vtkShaderProgram2SetUniformCopyCastMacro(i, int, 2)
+vtkShaderProgram2SetUniformCopyCastMacro(i, int, 3)
+vtkShaderProgram2SetUniformCopyCastMacro(i, int, 4)
+//ETX
+
+
 #endif
diff --git a/Rendering/OpenGL/vtkShadowMapBakerPass.cxx b/Rendering/OpenGL/vtkShadowMapBakerPass.cxx
index 338fbf8..c9672b8 100644
--- a/Rendering/OpenGL/vtkShadowMapBakerPass.cxx
+++ b/Rendering/OpenGL/vtkShadowMapBakerPass.cxx
@@ -24,6 +24,7 @@
 #include "vtkTextureObject.h"
 #include "vtkShaderProgram2.h"
 #include "vtkOpenGLRenderWindow.h"
+#include "vtkOpenGLError.h"
 #include "vtkInformationIntegerKey.h"
 #include "vtkMath.h"
 
@@ -252,6 +253,8 @@ void vtkShadowMapBakerPass::Render(const vtkRenderState *s)
 {
   assert("pre: s_exists" && s!=0);
 
+  vtkOpenGLClearErrorMacro();
+
   this->NumberOfRenderedProps=0;
   this->HasShadows=false;
 
@@ -676,6 +679,8 @@ void vtkShadowMapBakerPass::Render(const vtkRenderState *s)
     {
     vtkWarningMacro(<<" no delegate.");
     }
+
+  vtkOpenGLCheckErrorMacro("failed after Render");
 }
 
 // ----------------------------------------------------------------------------
diff --git a/Rendering/OpenGL/vtkShadowMapPass.cxx b/Rendering/OpenGL/vtkShadowMapPass.cxx
index 31d812c..f703244 100644
--- a/Rendering/OpenGL/vtkShadowMapPass.cxx
+++ b/Rendering/OpenGL/vtkShadowMapPass.cxx
@@ -15,7 +15,7 @@
 
 #include "vtkShadowMapPass.h"
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 
 #include "vtkRenderState.h"
 #include "vtkOpenGLRenderer.h"
@@ -27,6 +27,7 @@
 #include "vtkShader2Collection.h"
 #include "vtkUniformVariables.h"
 #include "vtkOpenGLRenderWindow.h"
+#include "vtkOpenGLError.h"
 #include "vtkTextureUnitManager.h"
 #include "vtkInformationIntegerKey.h"
 #include "vtkMath.h"
@@ -165,6 +166,8 @@ void vtkShadowMapPass::Render(const vtkRenderState *s)
 {
   assert("pre: s_exists" && s!=0);
 
+  vtkOpenGLClearErrorMacro();
+
   this->NumberOfRenderedProps=0;
 
   vtkOpenGLRenderer *r=static_cast<vtkOpenGLRenderer *>(s->GetRenderer());
@@ -666,6 +669,8 @@ void vtkShadowMapPass::Render(const vtkRenderState *s)
     {
     vtkWarningMacro(<<" no ShadowMapBakerPass or no OpaquePass on the ShadowMapBakerPass.");
     }
+
+  vtkOpenGLCheckErrorMacro("failed after Render");
 }
 
 // ----------------------------------------------------------------------------
diff --git a/Rendering/OpenGL/vtkSobelGradientMagnitudePass.cxx b/Rendering/OpenGL/vtkSobelGradientMagnitudePass.cxx
index f43e2a0..a8eb49f 100644
--- a/Rendering/OpenGL/vtkSobelGradientMagnitudePass.cxx
+++ b/Rendering/OpenGL/vtkSobelGradientMagnitudePass.cxx
@@ -15,7 +15,7 @@
 
 #include "vtkSobelGradientMagnitudePass.h"
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkRenderState.h"
 #include "vtkRenderer.h"
 #include "vtkgl.h"
@@ -26,6 +26,8 @@
 #include "vtkShader2Collection.h"
 #include "vtkUniformVariables.h"
 #include "vtkOpenGLRenderWindow.h"
+#include "vtkOpenGLExtensionManager.h"
+#include "vtkOpenGLError.h"
 #include "vtkTextureUnitManager.h"
 
 // to be able to dump intermediate passes into png files for debugging.
@@ -102,42 +104,35 @@ void vtkSobelGradientMagnitudePass::Render(const vtkRenderState *s)
 
   if(this->DelegatePass!=0)
     {
-     vtkRenderer *r=s->GetRenderer();
-
-     // Test for Hardware support. If not supported, just render the delegate.
-     bool supported=vtkFrameBufferObject::IsSupported(r->GetRenderWindow());
-
-     if(!supported)
-       {
-       vtkErrorMacro("FBOs are not supported by the context. Cannot detect edges on the image.");
-       }
-     if(supported)
-       {
-       supported=vtkTextureObject::IsSupported(r->GetRenderWindow());
-       if(!supported)
-         {
-         vtkErrorMacro("Texture Objects are not supported by the context. Cannot detect edges on the image.");
-         }
-       }
-
-     if(supported)
-       {
-       supported=
-         vtkShaderProgram2::IsSupported(static_cast<vtkOpenGLRenderWindow *>(
-                                          r->GetRenderWindow()));
-       if(!supported)
-         {
-         vtkErrorMacro("GLSL is not supported by the context. Cannot detect edges on the image.");
-         }
-       }
-
-     if(!supported)
-       {
-       this->DelegatePass->Render(s);
-       this->NumberOfRenderedProps+=
-         this->DelegatePass->GetNumberOfRenderedProps();
-       return;
-       }
+    vtkRenderer *renderer = s->GetRenderer();
+
+    vtkOpenGLRenderWindow *context
+      = vtkOpenGLRenderWindow::SafeDownCast(renderer->GetRenderWindow());
+
+    // Test for Hardware support. If not supported, just render the delegate.
+    bool fbo_support = vtkFrameBufferObject::IsSupported(context)!=0;
+    bool texture_support = vtkTextureObject::IsSupported(context)!=0;
+    bool shader_support = vtkShaderProgram2::IsSupported(context)!=0;
+
+    bool supported
+      = fbo_support && texture_support && shader_support;
+
+    if (!supported)
+      {
+      vtkErrorMacro(
+        << "The required extensions are not supported."
+        << " fbo_support=" << fbo_support
+        << " texture_support=" << texture_support
+        << " shader_support=" << shader_support);
+
+      this->DelegatePass->Render(s);
+      this->NumberOfRenderedProps
+        += this->DelegatePass->GetNumberOfRenderedProps();
+
+      return;
+      }
+
+    vtkOpenGLClearErrorMacro();
 
     GLint savedDrawBuffer;
     glGetIntegerv(GL_DRAW_BUFFER,&savedDrawBuffer);
@@ -159,13 +154,13 @@ void vtkSobelGradientMagnitudePass::Render(const vtkRenderState *s)
     if(this->Pass1==0)
       {
       this->Pass1=vtkTextureObject::New();
-      this->Pass1->SetContext(r->GetRenderWindow());
+      this->Pass1->SetContext(context);
       }
 
     if(this->FrameBufferObject==0)
       {
       this->FrameBufferObject=vtkFrameBufferObject::New();
-      this->FrameBufferObject->SetContext(r->GetRenderWindow());
+      this->FrameBufferObject->SetContext(context);
       }
 
     this->RenderDelegate(s,width,height,w,h,this->FrameBufferObject,
@@ -296,8 +291,7 @@ void vtkSobelGradientMagnitudePass::Render(const vtkRenderState *s)
       }
 
     vtkUniformVariables *var=this->Program1->GetUniformVariables();
-    vtkTextureUnitManager *tu=
-      static_cast<vtkOpenGLRenderWindow *>(r->GetRenderWindow())->GetTextureUnitManager();
+    vtkTextureUnitManager *tu=context->GetTextureUnitManager();
 
     int sourceId=tu->Allocate();
     vtkgl::ActiveTexture(vtkgl::TEXTURE0+sourceId);
@@ -307,7 +301,7 @@ void vtkSobelGradientMagnitudePass::Render(const vtkRenderState *s)
     var->SetUniformi("source",1,&sourceId);
 
     float fvalue=static_cast<float>(1.0/w);
-    var->SetUniformf("step",1,&fvalue);
+    var->SetUniformf("stepSize",1,&fvalue);
 
     this->Program1->Use();
 
@@ -459,7 +453,7 @@ void vtkSobelGradientMagnitudePass::Render(const vtkRenderState *s)
     var->SetUniformi("gy1",1,&id1);
 
     fvalue=static_cast<float>(1.0/h);
-    var->SetUniformf("step",1,&fvalue);
+    var->SetUniformf("stepSize",1,&fvalue);
 
     this->Program2->Use();
 
@@ -512,6 +506,8 @@ void vtkSobelGradientMagnitudePass::Render(const vtkRenderState *s)
     {
     vtkWarningMacro(<<" no delegate.");
     }
+
+  vtkOpenGLCheckErrorMacro("failed after Render");
 }
 
 // ----------------------------------------------------------------------------
diff --git a/Rendering/OpenGL/vtkSobelGradientMagnitudePassShader1_fs.glsl b/Rendering/OpenGL/vtkSobelGradientMagnitudePassShader1_fs.glsl
index 144db58..f6f6180 100644
--- a/Rendering/OpenGL/vtkSobelGradientMagnitudePassShader1_fs.glsl
+++ b/Rendering/OpenGL/vtkSobelGradientMagnitudePassShader1_fs.glsl
@@ -24,12 +24,12 @@
 #extension GL_ARB_draw_buffers : enable
 
 uniform sampler2D source;
-uniform float step; // 1/W
+uniform float stepSize; // 1/W
 
 void main(void)
 {
   vec2 tcoord=gl_TexCoord[0].st;
-  vec2 offset=vec2(step,0.0);
+  vec2 offset=vec2(stepSize,0.0);
   vec4 t1=texture2D(source,tcoord-offset);
   vec4 t2=texture2D(source,tcoord);
   vec4 t3=texture2D(source,tcoord+offset);
diff --git a/Rendering/OpenGL/vtkSobelGradientMagnitudePassShader2_fs.glsl b/Rendering/OpenGL/vtkSobelGradientMagnitudePassShader2_fs.glsl
index ba366de..a5ca96e 100644
--- a/Rendering/OpenGL/vtkSobelGradientMagnitudePassShader2_fs.glsl
+++ b/Rendering/OpenGL/vtkSobelGradientMagnitudePassShader2_fs.glsl
@@ -19,12 +19,12 @@
 
 uniform sampler2D gx1;
 uniform sampler2D gy1;
-uniform float step; // 1/H
+uniform float stepSize; // 1/H
 
 void main(void)
 {
   vec2 tcoord=gl_TexCoord[0].st;
-  vec2 offset=vec2(0.0,step);
+  vec2 offset=vec2(0.0,stepSize);
 
   // Gx
 
diff --git a/Rendering/OpenGL/vtkTDxDevice.cxx b/Rendering/OpenGL/vtkTDxDevice.cxx
index 2862234..ef269f5 100644
--- a/Rendering/OpenGL/vtkTDxDevice.cxx
+++ b/Rendering/OpenGL/vtkTDxDevice.cxx
@@ -14,7 +14,7 @@
 =========================================================================*/
 #include "vtkTDxDevice.h"
 
-#include <assert.h>
+#include <cassert>
 
 
 // ----------------------------------------------------------------------------
diff --git a/Rendering/OpenGL/vtkTDxInteractorStyleGeo.cxx b/Rendering/OpenGL/vtkTDxInteractorStyleGeo.cxx
index 851c748..7cb9e3b 100644
--- a/Rendering/OpenGL/vtkTDxInteractorStyleGeo.cxx
+++ b/Rendering/OpenGL/vtkTDxInteractorStyleGeo.cxx
@@ -16,7 +16,7 @@
 #include "vtkTDxInteractorStyleGeo.h"
 
 #include "vtkTransform.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkCamera.h"
 #include "vtkRenderer.h"
 #include "vtkRenderWindow.h"
diff --git a/Rendering/OpenGL/vtkTDxMacDevice.cxx b/Rendering/OpenGL/vtkTDxMacDevice.cxx
index c21d501..6d56b9a 100644
--- a/Rendering/OpenGL/vtkTDxMacDevice.cxx
+++ b/Rendering/OpenGL/vtkTDxMacDevice.cxx
@@ -14,7 +14,7 @@
 =========================================================================*/
 #include "vtkTDxMacDevice.h"
 
-#include <assert.h>
+#include <cassert>
 
 #include "vtkTDxMotionEventInfo.h"
 #include "vtkCommand.h"
diff --git a/Rendering/OpenGL/vtkTDxUnixDevice.cxx b/Rendering/OpenGL/vtkTDxUnixDevice.cxx
index 680ce7c..6d04420 100644
--- a/Rendering/OpenGL/vtkTDxUnixDevice.cxx
+++ b/Rendering/OpenGL/vtkTDxUnixDevice.cxx
@@ -14,7 +14,7 @@
 =========================================================================*/
 #include "vtkTDxUnixDevice.h"
 
-#include <assert.h>
+#include <cassert>
 
 #include <X11/Xlib.h> // Needed for X types used in the public interface
 // Display *DisplayId; // Actually a "Display *" but we cannot include Xlib.h
diff --git a/Rendering/OpenGL/vtkTDxWinDevice.cxx b/Rendering/OpenGL/vtkTDxWinDevice.cxx
index 8fba3f0..fb580e7 100644
--- a/Rendering/OpenGL/vtkTDxWinDevice.cxx
+++ b/Rendering/OpenGL/vtkTDxWinDevice.cxx
@@ -27,7 +27,7 @@
 // Most of the code is derived from the SDK with sample code
 // Cube3dPolling.cpp from archive Cube3Dpolling.zip from 3DConnexion.
 
-#include <assert.h>
+#include <cassert>
 
 #include "vtkTDxMotionEventInfo.h"
 #include "vtkCommand.h"
diff --git a/Rendering/OpenGL/vtkTextureObject.cxx b/Rendering/OpenGL/vtkTextureObject.cxx
index 206c43d..7cd79da 100644
--- a/Rendering/OpenGL/vtkTextureObject.cxx
+++ b/Rendering/OpenGL/vtkTextureObject.cxx
@@ -14,13 +14,14 @@
 =========================================================================*/
 #include "vtkTextureObject.h"
 
-#include "vtkPixelBufferObject.h"
 #include "vtkObjectFactory.h"
+#include "vtkPixelBufferObject.h"
 #include "vtkOpenGLExtensionManager.h"
 #include "vtkOpenGLRenderWindow.h"
+#include "vtkOpenGLError.h"
 
 #include "vtkgl.h"
-#include <assert.h>
+#include <cassert>
 
 //#define VTK_TO_DEBUG
 //#define VTK_TO_TIMING
@@ -103,7 +104,14 @@ static GLint OpenGLMinFilter[6]=
   GL_LINEAR_MIPMAP_LINEAR
 };
 
-static const char *MinFilterAsString[6]=
+// Mapping MagnificationFilter values to OpenGL values.
+static GLint OpenGLMagFilter[6]=
+{
+  GL_NEAREST,
+  GL_LINEAR
+};
+
+static const char *MinMagFilterAsString[6]=
 {
   "Nearest",
   "Linear",
@@ -119,63 +127,93 @@ static GLenum OpenGLDepthInternalFormat[5]=
   vtkgl::DEPTH_COMPONENT16,
   vtkgl::DEPTH_COMPONENT24,
   vtkgl::DEPTH_COMPONENT32,
-  vtkgl::DEPTH_COMPONENT32F,
+  vtkgl::DEPTH_COMPONENT32F
+};
+
+static GLenum OpenGLDepthInternalFormatType[5]=
+{
+  GL_UNSIGNED_INT,
+  GL_UNSIGNED_INT,
+  GL_UNSIGNED_INT,
+  GL_UNSIGNED_INT,
+  GL_FLOAT
+};
+
+/*
+static const char *DepthInternalFormatFilterAsString[6]=
+{
+  "Native",
+  "Fixed16",
+  "Fixed24",
+  "Fixed32",
+  "Float32"
 };
+*/
 
+//----------------------------------------------------------------------------
 vtkStandardNewMacro(vtkTextureObject);
+
 //----------------------------------------------------------------------------
 vtkTextureObject::vtkTextureObject()
 {
-  this->Context = 0;
+  this->Context = NULL;
   this->Handle = 0;
   this->NumberOfDimensions = 0;
-  this->Target =0;
+  this->Target = 0;
   this->Format = 0;
   this->Type = 0;
   this->Components = 0;
-  this->Width=this->Height=this->Depth=0;
-  this->SupportsTextureInteger=false;
-  this->SupportsTextureFloat=false;
-
-  this->WrapS=Repeat;
-  this->WrapT=Repeat;
-  this->WrapR=Repeat;
-  this->MinificationFilter=Nearest;
-  this->LinearMagnification=false;
-  this->BorderColor[0]=0.0f;
-  this->BorderColor[1]=0.0f;
-  this->BorderColor[2]=0.0f;
-  this->BorderColor[3]=0.0f;
-  this->Priority=1.0f;
-  this->MinLOD=-1000.0f;
-  this->MaxLOD=1000.0f;
-  this->BaseLevel=0;
-  this->MaxLevel=1000;
-
-  this->DepthTextureCompare=false;
-  this->DepthTextureCompareFunction=Lequal;
-  this->DepthTextureMode=Luminance;
-
-  this->GenerateMipmap=false;
+  this->Width = 0;
+  this->Height = 0;
+  this->Depth = 0;
+  this->RequireTextureInteger = false;
+  this->SupportsTextureInteger = false;
+  this->RequireTextureFloat = false;
+  this->SupportsTextureFloat = false;
+  this->RequireDepthBufferFloat = false;
+  this->SupportsDepthBufferFloat = false;
+  this->AutoParameters = 1;
+  this->WrapS = Repeat;
+  this->WrapT = Repeat;
+  this->WrapR = Repeat;
+  this->MinificationFilter = Nearest;
+  this->MagnificationFilter = Nearest;
+  this->LinearMagnification = false;
+  this->BorderColor[0] = 0.0f;
+  this->BorderColor[1] = 0.0f;
+  this->BorderColor[2] = 0.0f;
+  this->BorderColor[3] = 0.0f;
+  this->Priority = 1.0f;
+  this->MinLOD = -1000.0f;
+  this->MaxLOD = 1000.0f;
+  this->BaseLevel = 0;
+  this->MaxLevel = 0;
+  this->DepthTextureCompare = false;
+  this->DepthTextureCompareFunction = Lequal;
+  this->DepthTextureMode = Luminance;
+  this->GenerateMipmap = false;
 }
 
 //----------------------------------------------------------------------------
 vtkTextureObject::~vtkTextureObject()
 {
-  this->SetContext(0);
+  this->DestroyTexture();
 }
 
 //----------------------------------------------------------------------------
-bool vtkTextureObject::IsSupported(vtkRenderWindow* win)
+bool vtkTextureObject::IsSupported(vtkRenderWindow* win,
+      bool requireTexFloat,
+      bool requireDepthFloat,
+      bool requireTexInt)
 {
   vtkOpenGLRenderWindow* renWin = vtkOpenGLRenderWindow::SafeDownCast(win);
   if (renWin)
     {
     vtkOpenGLExtensionManager* mgr = renWin->GetExtensionManager();
 
-    bool gl12=mgr->ExtensionSupported("GL_VERSION_1_2")==1;
-    bool gl13=mgr->ExtensionSupported("GL_VERSION_1_3")==1;
-    bool gl20=mgr->ExtensionSupported("GL_VERSION_2_0")==1;
+    bool gl12 = mgr->ExtensionSupported("GL_VERSION_1_2")==1;
+    bool gl13 = mgr->ExtensionSupported("GL_VERSION_1_3")==1;
+    bool gl20 = mgr->ExtensionSupported("GL_VERSION_2_0")==1;
 
     bool npot=gl20 ||
       mgr->ExtensionSupported("GL_ARB_texture_non_power_of_two");
@@ -183,34 +221,72 @@ bool vtkTextureObject::IsSupported(vtkRenderWindow* win)
     bool tex3D=gl12 || mgr->ExtensionSupported("GL_EXT_texture3D");
     bool multi=gl13 || mgr->ExtensionSupported("GL_ARB_multitexture");
 
-    return npot && tex3D && multi;
+    bool texFloat = true;
+    if (requireTexFloat)
+      {
+      texFloat = mgr->ExtensionSupported("GL_ARB_texture_float")==1;
+      }
+
+    bool depthFloat = true;
+    if (requireDepthFloat)
+      {
+      depthFloat = mgr->ExtensionSupported("GL_ARB_depth_buffer_float")==1;
+      }
+
+    bool texInt = true;
+    if (requireTexInt)
+      {
+      texInt = mgr->ExtensionSupported("GL_EXT_texture_integer")==1;
+      }
+
+    return npot && tex3D && multi && texFloat && depthFloat && texInt;
     }
   return false;
 }
 
 //----------------------------------------------------------------------------
-bool vtkTextureObject::LoadRequiredExtensions(vtkOpenGLExtensionManager* mgr)
+bool vtkTextureObject::LoadRequiredExtensions(vtkRenderWindow *renWin)
 {
-  // Optional extension, requires GeForce8
-  this->SupportsTextureInteger =
-    mgr->LoadSupportedExtension("GL_EXT_texture_integer") != 0;
+  vtkOpenGLRenderWindow *context = vtkOpenGLRenderWindow::SafeDownCast(renWin);
+  if (!context)
+    {
+    return false;
+    }
+
+  vtkOpenGLExtensionManager* mgr = context->GetExtensionManager();
+
+  bool gl12 = mgr->ExtensionSupported("GL_VERSION_1_2")==1;
+  bool gl13 = mgr->ExtensionSupported("GL_VERSION_1_3")==1;
+  bool gl20 = mgr->ExtensionSupported("GL_VERSION_2_0")==1;
+
+  bool npot = (gl20 ||
+    mgr->ExtensionSupported("GL_ARB_texture_non_power_of_two"));
+
+  bool tex3D = (gl12 || mgr->ExtensionSupported("GL_EXT_texture3D"));
+  bool multi = (gl13 || mgr->ExtensionSupported("GL_ARB_multitexture"));
+
+  this->SupportsTextureInteger
+    = mgr->LoadSupportedExtension("GL_EXT_texture_integer")==1;
+
+  bool texInt
+    = (!this->RequireTextureInteger || this->SupportsTextureInteger);
 
-  this->SupportsTextureFloat=
-    mgr->ExtensionSupported("GL_ARB_texture_float")==1;
+  this->SupportsTextureFloat
+    = mgr->ExtensionSupported("GL_ARB_texture_float")==1;
 
-  bool gl12=mgr->ExtensionSupported("GL_VERSION_1_2")==1;
-  bool gl13=mgr->ExtensionSupported("GL_VERSION_1_3")==1;
-  bool gl20=mgr->ExtensionSupported("GL_VERSION_2_0")==1;
+  bool texFloat
+    = (!this->RequireTextureFloat || this->SupportsTextureFloat);
 
-  bool npot=gl20 ||
-    mgr->ExtensionSupported("GL_ARB_texture_non_power_of_two");
+  this->SupportsDepthBufferFloat
+    = mgr->ExtensionSupported("GL_ARB_depth_buffer_float")==1;
 
-  bool tex3D=gl12 || mgr->ExtensionSupported("GL_EXT_texture3D");
-  bool multi=gl13 || mgr->ExtensionSupported("GL_ARB_multitexture");
+  bool depthFloat
+    = (!this->RequireDepthBufferFloat || this->SupportsDepthBufferFloat);
 
-  bool supported=npot && tex3D && multi;
+  bool supported
+    = npot && tex3D && multi && texInt && texFloat && depthFloat;
 
-  if(supported)
+  if (supported)
     {
     // tex3D
     if(gl12)
@@ -230,118 +306,148 @@ bool vtkTextureObject::LoadRequiredExtensions(vtkOpenGLExtensionManager* mgr)
       {
       mgr->LoadCorePromotedExtension("GL_ARB_multitexture");
       }
-    // npot does not provide new functions, nothing to do.
-    // texture_float does not provide new functions, nothing to do.
+    // nothing to load for:
+    // GL_ARB_texture_non_power_of_two, GL_ARB_texture_float,
+    // GL_ARB_depth_buffer_float only defineconstants
+    // only using constants from GL_EXT_texture_integer
     }
+
   return supported;
 }
 
 //----------------------------------------------------------------------------
 void vtkTextureObject::SetContext(vtkRenderWindow* renWin)
 {
+  // avoid pointless reassignment
   if (this->Context == renWin)
     {
     return;
     }
-
+  // free previous resources
   this->DestroyTexture();
+  this->Context = NULL;
+  this->Modified();
+  // all done if assigned null
+  if (!renWin)
+    {
+    return;
+    }
+  // check for support
+  vtkOpenGLRenderWindow *context
+     = dynamic_cast<vtkOpenGLRenderWindow*>(renWin);
 
-  vtkOpenGLRenderWindow* openGLRenWin = vtkOpenGLRenderWindow::SafeDownCast(renWin);
-  this->Context = openGLRenWin;
-  if (openGLRenWin)
+  if ( !context
+    || !this->LoadRequiredExtensions(renWin) )
     {
-    if (!this->LoadRequiredExtensions(openGLRenWin->GetExtensionManager()))
-      {
-      this->Context = 0;
-      vtkErrorMacro("Required OpenGL extensions not supported by the context.");
-      }
+    vtkErrorMacro("Required OpenGL extensions not supported by the context.");
+    return;
     }
-  this->Modified();
+  // initialize
+  this->Context = renWin;
+  this->Context->MakeCurrent();
 }
 
 //----------------------------------------------------------------------------
 vtkRenderWindow* vtkTextureObject::GetContext()
 {
-  return this->Context;
+  return static_cast<vtkRenderWindow*>(this->Context);
 }
 
 //----------------------------------------------------------------------------
 void vtkTextureObject::DestroyTexture()
 {
+  // because we don't hold a reference to the render
+  // context we don't have any control on when it is
+  // destroyed. In fact it may be destroyed before
+  // we are(eg smart pointers), in which case we should
+  // do nothing.
   if (this->Context && this->Handle)
     {
     GLuint tex = this->Handle;
     glDeleteTextures(1, &tex);
-    vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
-    this->Handle=0;
+    vtkOpenGLCheckErrorMacro("failed at glDeleteTexture");
     }
+  this->Handle = 0;
   this->NumberOfDimensions = 0;
   this->Target =0;
   this->Format = 0;
   this->Type = 0;
   this->Components = 0;
-  this->Width=this->Height=this->Depth=0;
+  this->Width = this->Height = this->Depth = 0;
 }
 
 //----------------------------------------------------------------------------
 void vtkTextureObject::CreateTexture()
 {
-  if (this->Context && !this->Handle)
+  assert(this->Context);
+
+  // reuse the existing handle if we have one
+  if (!this->Handle)
     {
     GLuint tex=0;
     glGenTextures(1, &tex);
-    vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
+    vtkOpenGLCheckErrorMacro("failed at glGenTextures");
     this->Handle=tex;
 
     if (this->Target)
       {
       glBindTexture(this->Target, this->Handle);
-      vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
-      // NVidia drivers has some initialization bug. min_filter and
-      // mag_filter has to explicitly initialized even if the OpenGL spec
-      // states there is a default value.
+      vtkOpenGLCheckErrorMacro("failed at glBindTexture");
+
+      // See: http://www.opengl.org/wiki/Common_Mistakes#Creating_a_complete_texture
+      // turn off mip map filter or set the base and max level correctly. here
+      // both are done.
       glTexParameteri(this->Target, GL_TEXTURE_MIN_FILTER, GL_NEAREST);
-      vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
       glTexParameteri(this->Target, GL_TEXTURE_MAG_FILTER, GL_NEAREST);
-      vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
 
       glTexParameteri(this->Target, GL_TEXTURE_WRAP_S, GL_CLAMP);
-      vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
       glTexParameteri(this->Target, GL_TEXTURE_WRAP_T, GL_CLAMP);
-      vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
+
+      glTexParameteri(GL_TEXTURE_2D, vtkgl::TEXTURE_BASE_LEVEL, 0);
+      glTexParameteri(GL_TEXTURE_2D, vtkgl::TEXTURE_MAX_LEVEL, 0);
 
       glBindTexture(this->Target, 0);
-      vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
       }
     }
 }
 
+//---------------------------------------------------------------------------
+void vtkTextureObject::Activate(unsigned int texUnit)
+{
+  vtkgl::ActiveTexture(static_cast<GLenum>(texUnit));
+  this->Bind();
+}
+
+//---------------------------------------------------------------------------
+void vtkTextureObject::Deactivate(unsigned int texUnit)
+{
+  vtkgl::ActiveTexture(static_cast<GLenum>(texUnit));
+  this->UnBind();
+}
+
 //----------------------------------------------------------------------------
 void vtkTextureObject::Bind()
 {
-  if (this->Context && this->Handle)
+  assert(this->Context);
+  assert(this->Handle);
+
+  glBindTexture(this->Target, this->Handle);
+  vtkOpenGLCheckErrorMacro("failed at glBindTexture");
+
+  if (this->AutoParameters && (this->GetMTime()>this->SendParametersTime))
     {
-    glBindTexture(this->Target, this->Handle);
     this->SendParameters();
-    vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
     }
 }
 
-
 //----------------------------------------------------------------------------
 void vtkTextureObject::UnBind()
 {
-  if (this->Context && this->Handle)
-    {
-    glBindTexture(this->Target, 0);
-    vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
-    }
+  glBindTexture(this->Target, 0);
+  vtkOpenGLCheckErrorMacro("failed at glBindTexture(0)");
 }
 
 //----------------------------------------------------------------------------
-// Description:
-// Tells if the texture object is bound to the active texture image unit.
-// (a texture object can be bound to multiple texture image unit).
 bool vtkTextureObject::IsBound()
 {
   bool result=false;
@@ -371,59 +477,63 @@ bool vtkTextureObject::IsBound()
 }
 
 //----------------------------------------------------------------------------
-// Description:
-// Send all the texture object parameters to the hardware if not done yet.
-// \pre is_bound: IsBound()
 void vtkTextureObject::SendParameters()
 {
   assert("pre: is_bound" && this->IsBound());
 
-  if(this->GetMTime()>this->SendParametersTime)
-    {
-    glTexParameteri(this->Target,GL_TEXTURE_WRAP_S, OpenGLWrap[this->WrapS]);
-    glTexParameteri(this->Target,GL_TEXTURE_WRAP_T,OpenGLWrap[this->WrapT]);
-    glTexParameteri(this->Target,vtkgl::TEXTURE_WRAP_R,
-                    OpenGLWrap[this->WrapR]);
-
-    glTexParameteri(this->Target,GL_TEXTURE_MIN_FILTER,
-                    OpenGLMinFilter[this->MinificationFilter]);
-    if(this->LinearMagnification)
-      {
-      glTexParameteri(this->Target,GL_TEXTURE_MAG_FILTER,GL_LINEAR);
-      }
-    else
-      {
-      glTexParameteri(this->Target,GL_TEXTURE_MAG_FILTER,GL_NEAREST);
-      }
+  glTexParameteri(this->Target,GL_TEXTURE_WRAP_S, OpenGLWrap[this->WrapS]);
+  glTexParameteri(this->Target,GL_TEXTURE_WRAP_T,OpenGLWrap[this->WrapT]);
 
-    glTexParameterfv(this->Target,GL_TEXTURE_BORDER_COLOR,this->BorderColor);
+  glTexParameteri(
+        this->Target,
+        vtkgl::TEXTURE_WRAP_R,
+        OpenGLWrap[this->WrapR]);
 
-    glTexParameterf(this->Target,GL_TEXTURE_PRIORITY,this->Priority);
-    glTexParameterf(this->Target,vtkgl::TEXTURE_MIN_LOD,this->MinLOD);
-    glTexParameterf(this->Target,vtkgl::TEXTURE_MAX_LOD,this->MaxLOD);
-    glTexParameteri(this->Target,vtkgl::TEXTURE_BASE_LEVEL,this->BaseLevel);
-    glTexParameteri(this->Target,vtkgl::TEXTURE_MAX_LEVEL,this->MaxLevel);
+  glTexParameteri(
+        this->Target,
+        GL_TEXTURE_MIN_FILTER,
+        OpenGLMinFilter[this->MinificationFilter]);
 
-    glTexParameteri(this->Target,vtkgl::DEPTH_TEXTURE_MODE,
-                    OpenGLDepthTextureMode[this->DepthTextureMode]);
+  glTexParameteri(
+        this->Target,
+        GL_TEXTURE_MAG_FILTER,
+        OpenGLMagFilter[this->MagnificationFilter]);
 
-    if(DepthTextureCompare)
-      {
-      glTexParameteri(this->Target,vtkgl::TEXTURE_COMPARE_MODE,
-                      vtkgl::COMPARE_R_TO_TEXTURE);
-      }
-    else
-      {
-      glTexParameteri(this->Target,vtkgl::TEXTURE_COMPARE_MODE,
-                      GL_NONE);
-      }
+  glTexParameterfv(this->Target,GL_TEXTURE_BORDER_COLOR,this->BorderColor);
 
-    glTexParameteri(this->Target,vtkgl::TEXTURE_COMPARE_FUNC,
-                    OpenGLDepthTextureCompareFunction[this->DepthTextureCompareFunction]
-      );
+  glTexParameterf(this->Target,GL_TEXTURE_PRIORITY,this->Priority);
+  glTexParameterf(this->Target,vtkgl::TEXTURE_MIN_LOD,this->MinLOD);
+  glTexParameterf(this->Target,vtkgl::TEXTURE_MAX_LOD,this->MaxLOD);
+  glTexParameteri(this->Target,vtkgl::TEXTURE_BASE_LEVEL,this->BaseLevel);
+  glTexParameteri(this->Target,vtkgl::TEXTURE_MAX_LEVEL,this->MaxLevel);
 
-    this->SendParametersTime.Modified();
+  glTexParameteri(
+        this->Target,
+        vtkgl::DEPTH_TEXTURE_MODE,
+        OpenGLDepthTextureMode[this->DepthTextureMode]);
+
+  if(DepthTextureCompare)
+    {
+    glTexParameteri(
+          this->Target,
+          vtkgl::TEXTURE_COMPARE_MODE,
+          vtkgl::COMPARE_R_TO_TEXTURE);
     }
+  else
+    {
+    glTexParameteri(
+          this->Target,
+          vtkgl::TEXTURE_COMPARE_MODE,
+          GL_NONE);
+    }
+
+  glTexParameteri(
+        this->Target,
+        vtkgl::TEXTURE_COMPARE_FUNC,
+        OpenGLDepthTextureCompareFunction[this->DepthTextureCompareFunction]);
+
+  vtkOpenGLCheckErrorMacro("failed after SendParameters");
+  this->SendParametersTime.Modified();
 }
 
 //----------------------------------------------------------------------------
@@ -850,17 +960,8 @@ bool vtkTextureObject::Create1D(int numComps,
   vtkTimerLog *timer=vtkTimerLog::New();
   timer->StartTimer();
 #endif
-  if (!this->Context)
-    {
-    vtkErrorMacro("No context specified. Cannot create texture.");
-    return false;
-    }
-
-  if (pbo->GetContext() != this->Context)
-    {
-    vtkErrorMacro("Context mismatch. Cannot load data.");
-    return false;
-    }
+  assert(this->Context);
+  assert(pbo->GetContext() == this->Context);
 
   GLenum target = GL_TEXTURE_1D;
 
@@ -888,8 +989,7 @@ bool vtkTextureObject::Create1D(int numComps,
   this->Bind();
 
   pbo->Bind(vtkPixelBufferObject::UNPACKED_BUFFER);
-  //vtkgl::ClampColorARB(vtkgl::CLAMP_FRAGMENT_COLOR_ARB, GL_FALSE);
-  vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
+
   // Source texture data from the PBO.
   glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
   glTexImage1D(target, 0, static_cast<GLint>(internalFormat),
@@ -897,7 +997,7 @@ bool vtkTextureObject::Create1D(int numComps,
                                     static_cast<unsigned int>(numComps)),
                0, format,
                type, BUFFER_OFFSET(0));
-  vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
+  vtkOpenGLCheckErrorMacro("failed at glTexImage1D");
   pbo->UnBind();
   this->UnBind();
 
@@ -913,7 +1013,7 @@ bool vtkTextureObject::Create1D(int numComps,
   timer->StopTimer();
   double time=timer->GetElapsedTime();
   timer->Delete();
-  cout<<"upload PBO to 1D texture time="<<time<<" seconds."<<endl;
+  cerr<<"upload PBO to 1D texture time="<<time<<" seconds."<<endl;
 #endif
   return true;
 }
@@ -927,38 +1027,28 @@ bool vtkTextureObject::Create2D(unsigned int width, unsigned int height,
   vtkTimerLog *timer=vtkTimerLog::New();
   timer->StartTimer();
 #endif
-  if (!this->Context)
-    {
-    vtkErrorMacro("No context specified. Cannot create texture.");
-    return false;
-    }
-
-  if (pbo->GetContext() != this->Context)
-    {
-    vtkErrorMacro("Context mismatch. Cannot load data.");
-    return false;
-    }
+  assert(this->Context);
+  assert(pbo->GetContext() == this->Context);
 
-  if (pbo->GetSize() != width*height*static_cast<unsigned int>(numComps))
+  if (pbo->GetSize() < width*height*static_cast<unsigned int>(numComps))
     {
     vtkErrorMacro("PBO size must match texture size.");
     return false;
     }
 
-  GLenum target = GL_TEXTURE_2D;
-
   // Now, detemine texture parameters using the information from the pbo.
-
   // * internalFormat depends on number of components and the data type.
-  GLenum internalFormat =this->GetInternalFormat(pbo->GetType(), numComps,
-                                                 shaderSupportsTextureInt);
-
   // * format depends on the number of components.
-  GLenum format = this->GetFormat(pbo->GetType(), numComps,
-                                 shaderSupportsTextureInt);
-
   // * type if the data type in the pbo
-  GLenum type = ::vtkGetType(pbo->GetType());
+
+  int vtktype = pbo->GetType();
+  GLenum type = ::vtkGetType(vtktype);
+
+  GLenum internalFormat
+    = this->GetInternalFormat(vtktype, numComps, shaderSupportsTextureInt);
+
+  GLenum format
+    = this->GetFormat(vtktype, numComps, shaderSupportsTextureInt);
 
   if (!internalFormat || !format || !type)
     {
@@ -966,19 +1056,28 @@ bool vtkTextureObject::Create2D(unsigned int width, unsigned int height,
     return false;
     }
 
+  GLenum target = GL_TEXTURE_2D;
   this->Target = target;
   this->CreateTexture();
   this->Bind();
 
-  pbo->Bind(vtkPixelBufferObject::UNPACKED_BUFFER);
-  //vtkgl::ClampColorARB(vtkgl::CLAMP_FRAGMENT_COLOR_ARB, GL_FALSE);
-  vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
   // Source texture data from the PBO.
+  pbo->Bind(vtkPixelBufferObject::UNPACKED_BUFFER);
   glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
-  glTexImage2D(target, 0, static_cast<GLint>(internalFormat),
-               static_cast<GLsizei>(width), static_cast<GLsizei>(height), 0,
-               format, type, BUFFER_OFFSET(0));
-  vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
+
+  glTexImage2D(
+        target,
+        0,
+        internalFormat,
+        static_cast<GLsizei>(width),
+        static_cast<GLsizei>(height),
+        0,
+        format,
+        type,
+        BUFFER_OFFSET(0));
+
+  vtkOpenGLCheckErrorMacro("failed at glTexImage2D");
+
   pbo->UnBind();
   this->UnBind();
 
@@ -995,7 +1094,7 @@ bool vtkTextureObject::Create2D(unsigned int width, unsigned int height,
   timer->StopTimer();
   double time=timer->GetElapsedTime();
   timer->Delete();
-  cout<<"upload PBO to 2D texture time="<<time<<" seconds."<<endl;
+  cerr<<"upload PBO to 2D texture time="<<time<<" seconds."<<endl;
 #endif
   return true;
 }
@@ -1011,18 +1110,17 @@ bool vtkTextureObject::CreateDepth(unsigned int width,
   assert("pre: context_exists" && this->GetContext()!=0);
   assert("pre: pbo_context_exists" && pbo->GetContext()!=0);
   assert("pre: context_match" && this->GetContext()==pbo->GetContext());
-
-#ifdef VTK_TO_DEBUG
-  cout << "pbo size=" << pbo->GetSize() << endl;
-  cout << "width=" << width << endl;
-  cout << "height=" << height << endl;
-  cout << "width*height=" << width*height << endl;
-#endif
-
   assert("pre: sizes_match" && pbo->GetSize()==width*height);
   assert("pre: valid_internalFormat" && internalFormat>=0
          && internalFormat<NumberOfDepthFormats);
 
+#ifdef VTK_TO_DEBUG
+  cerr << "pbo size=" << pbo->GetSize() << endl;
+  cerr << "width=" << width << endl;
+  cerr << "height=" << height << endl;
+  cerr << "width*height=" << width*height << endl;
+#endif
+
   GLenum inFormat=OpenGLDepthInternalFormat[internalFormat];
   GLenum type=::vtkGetType(pbo->GetType());
 
@@ -1039,13 +1137,13 @@ bool vtkTextureObject::CreateDepth(unsigned int width,
   this->Bind();
 
   pbo->Bind(vtkPixelBufferObject::UNPACKED_BUFFER);
-  vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
+
   // Source texture data from the PBO.
   glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
   glTexImage2D(this->Target, 0, static_cast<GLint>(inFormat),
                static_cast<GLsizei>(width), static_cast<GLsizei>(height), 0,
                this->Format, this->Type, BUFFER_OFFSET(0));
-  vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
+  vtkOpenGLCheckErrorMacro("failed at glTexImage2D");
   pbo->UnBind();
   this->UnBind();
   return true;
@@ -1065,9 +1163,9 @@ bool vtkTextureObject::CreateDepthFromRaw(unsigned int width,
   assert("pre: raw_exists" && raw!=0);
 
 #ifdef VTK_TO_DEBUG
-  cout << "width=" << width << endl;
-  cout << "height=" << height << endl;
-  cout << "width*height=" << width*height << endl;
+  cerr << "width=" << width << endl;
+  cerr << "height=" << height << endl;
+  cerr << "width*height=" << width*height << endl;
 #endif
 
   assert("pre: valid_internalFormat" && internalFormat>=0
@@ -1092,13 +1190,12 @@ bool vtkTextureObject::CreateDepthFromRaw(unsigned int width,
   glTexImage2D(this->Target, 0, static_cast<GLint>(inFormat),
                static_cast<GLsizei>(width), static_cast<GLsizei>(height), 0,
                this->Format, this->Type,raw);
+  vtkOpenGLCheckErrorMacro("failed at glTexImage2D");
   this->UnBind();
   return true;
 }
 
 // ----------------------------------------------------------------------------
-// Description:
-// Create a 2D depth texture but does not initialize its values.
 bool vtkTextureObject::AllocateDepth(unsigned int width,unsigned int height,
                                      int internalFormat)
 {
@@ -1106,10 +1203,10 @@ bool vtkTextureObject::AllocateDepth(unsigned int width,unsigned int height,
   assert("pre: valid_internalFormat" && internalFormat>=0
          && internalFormat<NumberOfDepthFormats);
 
-  GLenum inFormat=OpenGLDepthInternalFormat[internalFormat];
   this->Target=GL_TEXTURE_2D;
   this->Format=GL_DEPTH_COMPONENT;
-  this->Type=GL_UNSIGNED_BYTE; // it does not matter.
+  // try to match vtk type to internal fmt
+  this->Type=OpenGLDepthInternalFormatType[internalFormat];
   this->Width=width;
   this->Height=height;
   this->Depth=1;
@@ -1119,27 +1216,31 @@ bool vtkTextureObject::AllocateDepth(unsigned int width,unsigned int height,
   this->CreateTexture();
   this->Bind();
 
-  glTexImage2D(this->Target, 0, static_cast<GLint>(inFormat),
-               static_cast<GLsizei>(width), static_cast<GLsizei>(height), 0,
-               this->Format, this->Type,0);
+  GLenum inFormat=OpenGLDepthInternalFormat[internalFormat];
+  glTexImage2D(
+          this->Target,
+          0,
+          static_cast<GLint>(inFormat),
+          static_cast<GLsizei>(width),
+          static_cast<GLsizei>(height),
+          0,
+          this->Format,
+          this->Type,
+          0);
+
+  vtkOpenGLCheckErrorMacro("failed at glTexImage2D");
+
   this->UnBind();
   return true;
 }
 
 // ----------------------------------------------------------------------------
-// Description:
-// Create a 1D color texture but does not initialize its values.
-// Internal format is deduced from numComps and vtkType.
 bool vtkTextureObject::Allocate1D(unsigned int width, int numComps,
                                   int vtkType)
 {
-  this->Target=GL_TEXTURE_1D;
+  assert(this->Context);
 
-  if(this->Context==0)
-    {
-    vtkErrorMacro("No context specified. Cannot create texture.");
-    return false;
-    }
+  this->Target=GL_TEXTURE_1D;
   GLenum internalFormat = this->GetInternalFormat(vtkType, numComps,
                                                   false);
 
@@ -1160,6 +1261,7 @@ bool vtkTextureObject::Allocate1D(unsigned int width, int numComps,
   this->Bind();
   glTexImage1D(this->Target, 0, static_cast<GLint>(internalFormat),
                static_cast<GLsizei>(width),0, format, type,0);
+  vtkOpenGLCheckErrorMacro("failed at glTexImage1D");
   this->UnBind();
   return true;
 }
@@ -1171,14 +1273,10 @@ bool vtkTextureObject::Allocate1D(unsigned int width, int numComps,
 bool vtkTextureObject::Allocate2D(unsigned int width,unsigned int height,
                                   int numComps,int vtkType)
 {
-  this->Target=GL_TEXTURE_2D;
+  assert(this->Context);
 
+  this->Target=GL_TEXTURE_2D;
 
-  if(this->Context==0)
-    {
-    vtkErrorMacro("No context specified. Cannot create texture.");
-    return false;
-    }
   GLenum internalFormat = this->GetInternalFormat(vtkType, numComps,
                                                   false);
 
@@ -1200,6 +1298,7 @@ bool vtkTextureObject::Allocate2D(unsigned int width,unsigned int height,
   glTexImage2D(this->Target, 0, static_cast<GLint>(internalFormat),
                static_cast<GLsizei>(width), static_cast<GLsizei>(height),
                0, format, type,0);
+  vtkOpenGLCheckErrorMacro("failed at glTexImage2D");
   this->UnBind();
   return true;
 }
@@ -1240,6 +1339,7 @@ bool vtkTextureObject::Allocate3D(unsigned int width,unsigned int height,
   vtkgl::TexImage3D(this->Target, 0, static_cast<GLint>(internalFormat),
                     static_cast<GLsizei>(width), static_cast<GLsizei>(height),
                     static_cast<GLsizei>(depth), 0, format, type,0);
+  vtkOpenGLCheckErrorMacro("failed at glTexImage3D");
   this->UnBind();
   return true;
 }
@@ -1254,17 +1354,8 @@ bool vtkTextureObject::Create3D(unsigned int width, unsigned int height,
   vtkTimerLog *timer=vtkTimerLog::New();
   timer->StartTimer();
 #endif
-  if (!this->Context)
-    {
-    vtkErrorMacro("No context specified. Cannot create texture.");
-    return false;
-    }
-
-  if (pbo->GetContext() != this->Context)
-    {
-    vtkErrorMacro("Context mismatch. Cannot load data.");
-    return false;
-    }
+  assert(this->Context);
+  assert(this->Context == pbo->GetContext());
 
   if (pbo->GetSize() != width*height*depth*static_cast<unsigned int>(numComps))
     {
@@ -1298,14 +1389,15 @@ bool vtkTextureObject::Create3D(unsigned int width, unsigned int height,
   this->Bind();
 
   pbo->Bind(vtkPixelBufferObject::UNPACKED_BUFFER);
-  //vtkgl::ClampColorARB(vtkgl::CLAMP_FRAGMENT_COLOR_ARB, GL_FALSE);
-  vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
+
   // Source texture data from the PBO.
   vtkgl::TexImage3D(target, 0, static_cast<GLint>(internalFormat),
                     static_cast<GLsizei>(width), static_cast<GLsizei>(height),
                     static_cast<GLsizei>(depth), 0, format, type,
                     BUFFER_OFFSET(0));
-  vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
+
+  vtkOpenGLCheckErrorMacro("failed at glTexImage3D");
+
   pbo->UnBind();
   this->UnBind();
 
@@ -1322,7 +1414,7 @@ bool vtkTextureObject::Create3D(unsigned int width, unsigned int height,
   timer->StopTimer();
   double time=timer->GetElapsedTime();
   timer->Delete();
-  cout<<"upload PBO to 3D texture time="<<time<<" seconds."<<endl;
+  cerr<<"upload PBO to 3D texture time="<<time<<" seconds."<<endl;
 #endif
   return true;
 }
@@ -1334,11 +1426,8 @@ vtkPixelBufferObject* vtkTextureObject::Download()
   vtkTimerLog *timer=vtkTimerLog::New();
   timer->StartTimer();
 #endif
-  if (!this->Context || !this->Handle)
-    {
-    vtkErrorMacro("Texture must be created before downloading.");
-    return 0;
-    }
+  assert(this->Context);
+  assert(this->Handle);
 
   vtkPixelBufferObject* pbo = vtkPixelBufferObject::New();
   pbo->SetContext(this->Context);
@@ -1364,15 +1453,17 @@ vtkPixelBufferObject* vtkTextureObject::Download()
   pbo->Bind(vtkPixelBufferObject::PACKED_BUFFER);
   this->Bind();
   glGetTexImage(this->Target, 0, this->Format, this->Type, BUFFER_OFFSET(0));
-  vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
+  vtkOpenGLCheckErrorMacro("failed at glGetTexImage");
   this->UnBind();
   pbo->UnBind();
 
+  pbo->SetComponents(this->Components);
+
 #ifdef VTK_TO_TIMING
   timer->StopTimer();
   double time=timer->GetElapsedTime();
   timer->Delete();
-  cout<<"download texture to PBO, time="<<time<<" seconds."<<endl;
+  cerr<<"download texture to PBO, time="<<time<<" seconds."<<endl;
 #endif
 
   return pbo;
@@ -1383,11 +1474,7 @@ bool vtkTextureObject::Create2D(unsigned int width, unsigned int height,
                                 int numComps, int vtktype,
                                 bool shaderSupportsTextureInt)
 {
-  if (!this->Context)
-    {
-    vtkErrorMacro("No context specified. Cannot create texture.");
-    return false;
-    }
+  assert(this->Context);
 
   GLenum target = GL_TEXTURE_2D;
 
@@ -1413,13 +1500,11 @@ bool vtkTextureObject::Create2D(unsigned int width, unsigned int height,
   this->CreateTexture();
   this->Bind();
 
-  //vtkgl::ClampColorARB(vtkgl::CLAMP_FRAGMENT_COLOR_ARB, GL_FALSE);
-  vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
   // Allocate space for texture, don't upload any data.
   glTexImage2D(target, 0, static_cast<GLint>(internalFormat),
                static_cast<GLsizei>(width), static_cast<GLsizei>(height),
                0, format, type, NULL);
-  vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
+  vtkOpenGLCheckErrorMacro("failed at glTexImage2D");
   this->UnBind();
 
   this->Target = target;
@@ -1439,11 +1524,7 @@ bool vtkTextureObject::Create3D(unsigned int width, unsigned int height,
                                 int numComps, int vtktype,
                                 bool shaderSupportsTextureInt)
 {
-  if (!this->Context)
-    {
-    vtkErrorMacro("No context specified. Cannot create texture.");
-    return false;
-    }
+  assert(this->Context);
 
   GLenum target = vtkgl::TEXTURE_3D;
 
@@ -1469,14 +1550,12 @@ bool vtkTextureObject::Create3D(unsigned int width, unsigned int height,
   this->CreateTexture();
   this->Bind();
 
-  //vtkgl::ClampColorARB(vtkgl::CLAMP_FRAGMENT_COLOR_ARB, GL_FALSE);
-  vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
   // Allocate space for texture, don't upload any data.
   glPixelStorei(GL_UNPACK_ALIGNMENT, 1);
   vtkgl::TexImage3D(target, 0, static_cast<GLint>(internalFormat),
                     static_cast<GLsizei>(width), static_cast<GLsizei>(height),
                     static_cast<GLsizei>(depth), 0, format, type, NULL);
-  vtkGraphicErrorMacro(this->Context,"__FILE__ __LINE__");
+  vtkOpenGLCheckErrorMacro("falied at glTexImage3D");
   this->UnBind();
 
   this->Target = target;
@@ -1515,6 +1594,8 @@ void vtkTextureObject::CopyToFrameBuffer(int srcXmin,
   assert("pre: x_fit" && dstXmin+(srcXmax-srcXmin)<width);
   assert("pre: y_fit" && dstYmin+(srcYmax-srcYmin)<height);
 
+  vtkOpenGLClearErrorMacro();
+
   glMatrixMode(GL_PROJECTION);
   glPushMatrix();
   glLoadIdentity();
@@ -1570,6 +1651,8 @@ void vtkTextureObject::CopyToFrameBuffer(int srcXmin,
   glPopMatrix();
   glMatrixMode(GL_MODELVIEW);
   glPopMatrix();
+
+  vtkOpenGLCheckErrorMacro("failed after CopyToFrameBuffer")
 }
 
 //----------------------------------------------------------------------------
@@ -1594,6 +1677,7 @@ void vtkTextureObject::CopyFromFrameBuffer(int srcXmin,
   this->Bind();
   glCopyTexSubImage2D(this->Target,0,dstXmin,dstYmin,srcXmin,srcYmin,width,
                       height);
+  vtkOpenGLCheckErrorMacro("failed at glCopyTexSubImage2D");
   this->UnBind();
 }
 
@@ -1632,7 +1716,10 @@ void vtkTextureObject::PrintSelf(ostream& os, vtkIndent indent)
   os << indent << "WrapR: " << WrapAsString[this->WrapR] << endl;
 
   os << indent << "MinificationFilter: "
-     << MinFilterAsString[this->MinificationFilter] << endl;
+     << MinMagFilterAsString[this->MinificationFilter] << endl;
+
+  os << indent << "MagnificationFilter: "
+     << MinMagFilterAsString[this->MagnificationFilter] << endl;
 
   os << indent << "LinearMagnification: " << this->LinearMagnification << endl;
 
diff --git a/Rendering/OpenGL/vtkTextureObject.h b/Rendering/OpenGL/vtkTextureObject.h
index b092795..df250fa 100644
--- a/Rendering/OpenGL/vtkTextureObject.h
+++ b/Rendering/OpenGL/vtkTextureObject.h
@@ -24,9 +24,12 @@
 
 #include "vtkRenderingOpenGLModule.h" // For export macro
 #include "vtkObject.h"
-#include "vtkWeakPointer.h" // needed for vtkWeakPointer.
+#include "vtkWeakPointer.h" // for render context
 
+class vtkFloatArray;
+class vtkTextureObject;
 class vtkRenderWindow;
+class vtkOpenGLRenderWindow;
 class vtkOpenGLExtensionManager;
 class vtkPixelBufferObject;
 
@@ -113,6 +116,8 @@ public:
   vtkGetMacro(Height, unsigned int);
   vtkGetMacro(Depth, unsigned int);
   vtkGetMacro(Components, int);
+  unsigned int GetTuples()
+  { return this->Width*this->Height*this->Depth; }
 
   vtkGetMacro(NumberOfDimensions, int);
 
@@ -126,19 +131,30 @@ public:
 
   // Description:
   // Activate the texture. The texture must have been created using Create().
+  // A side affect is that tex paramteres are sent.
   // RenderWindow must be set before calling this.
   void Bind();
   void UnBind();
 
   // Description:
+  // Set the active tex unit and bind (using our bind).
+  void Activate(unsigned int texUnit);
+  void Deactivate(unsigned int texUnit);
+
+
+  // Description:
   // Tells if the texture object is bound to the active texture image unit.
   // (a texture object can be bound to multiple texture image unit).
   bool IsBound();
 
   // Description:
   // Send all the texture object parameters to the hardware if not done yet.
+  // Parameters are automatically sent as a side affect of Bind. Disable
+  // this by setting AutoParameters 0.
   // \pre is_bound: IsBound()
   void SendParameters();
+  vtkSetMacro(AutoParameters, int);
+  vtkGetMacro(AutoParameters, int);
 
   // Description:
   // Create a 1D texture using the PBO.
@@ -241,6 +257,33 @@ public:
                          bool shaderSupportsTextureInt);
 
   // Description:
+  // Optional, require support for floating point depth buffer
+  // formats. If supported extensions will be loaded, however
+  // loading will fail if the extension is required but not
+  // available.
+  vtkSetMacro(RequireDepthBufferFloat, bool);
+  vtkGetMacro(RequireDepthBufferFloat, bool);
+  vtkGetMacro(SupportsDepthBufferFloat, bool);
+
+  // Description:
+  // Optional, require support for floating point texture
+  // formats. If supported extensions will be loaded, however
+  // loading will fail if the extension is required but not
+  // available.
+  vtkSetMacro(RequireTextureFloat,bool);
+  vtkGetMacro(RequireTextureFloat,bool);
+  vtkGetMacro(SupportsTextureFloat,bool);
+
+  // Description:
+  // Optional, require support for integer texture
+  // formats. If supported extensions will be loaded, however
+  // loading will fail if the extension is required but not
+  // available.
+  vtkSetMacro(RequireTextureInteger,bool);
+  vtkGetMacro(RequireTextureInteger,bool);
+  vtkGetMacro(SupportsTextureInteger,bool);
+
+  // Description:
   // Wrap mode for the first texture coordinate "s"
   // Valid values are:
   // - Clamp
@@ -292,14 +335,27 @@ public:
   vtkSetMacro(MinificationFilter,int);
 
   // Description:
+  // Magnification filter mode.
+  // Valid values are:
+  // - Nearest
+  // - Linear
+  // Initial value is Nearest
+  vtkGetMacro(MagnificationFilter,int);
+  vtkSetMacro(MagnificationFilter,int);
+
+  // Description:
   // Tells if the magnification mode is linear (true) or nearest (false).
   // Initial value is false (initial value in OpenGL spec is true).
-  vtkGetMacro(LinearMagnification,bool);
-  vtkSetMacro(LinearMagnification,bool);
+  void SetLinearMagnification(bool val)
+  { this->SetMagnificationFilter(val?Linear:Nearest); }
+
+  bool GetLinearMagnification()
+  { return this->MagnificationFilter==Linear; }
 
   // Description:
-  // Border Color (RGBA). Each component is in [0.0f,1.0f].
-  // Initial value is (0.0f,0.0f,0.0f,0.0f), as in OpenGL spec.
+  // Border Color (RGBA). The values can be any valid float value,
+  // if the gpu supports it. Initial value is (0.0f,0.0f,0.0f,0.0f)
+  // , as in OpenGL spec.
   vtkSetVector4Macro(BorderColor,float);
   vtkGetVector4Macro(BorderColor,float);
 
@@ -389,8 +445,19 @@ public:
   vtkSetMacro(GenerateMipmap,bool);
 
   // Description:
-  // Returns if the context supports the required extensions.
-  static bool IsSupported(vtkRenderWindow* renWin);
+  // Returns if the context supports the required extensions. If flags
+  // for optional extenisons are set then the test fails when support
+  // for them is not found.
+  static bool IsSupported(
+        vtkRenderWindow* renWin,
+        bool requireTexFloat,
+        bool requireDepthFloat,
+        bool requireTexInt);
+
+  // Description:
+  // Check for feature support, without any optional features.
+  static bool IsSupported(vtkRenderWindow* renWin)
+    { return vtkTextureObject::IsSupported(renWin, false, false, false); }
 
   // Description:
   // Copy a sub-part of the texture (src) in the current framebuffer
@@ -445,6 +512,7 @@ public:
                            int height);
 
 
+
 //BTX
 protected:
   vtkTextureObject();
@@ -452,7 +520,7 @@ protected:
 
   // Description:
   // Load all necessary extensions.
-  bool LoadRequiredExtensions(vtkOpenGLExtensionManager*);
+  bool LoadRequiredExtensions(vtkRenderWindow *renWin);
 
   // Description:
   // Creates a texture handle if not already created.
@@ -474,13 +542,18 @@ protected:
 
   vtkWeakPointer<vtkRenderWindow> Context;
   unsigned int Handle;
+  bool RequireTextureInteger;
   bool SupportsTextureInteger;
+  bool RequireTextureFloat;
   bool SupportsTextureFloat;
+  bool RequireDepthBufferFloat;
+  bool SupportsDepthBufferFloat;
 
   int WrapS;
   int WrapT;
   int WrapR;
   int MinificationFilter;
+  int MagnificationFilter;
   bool LinearMagnification;
   float BorderColor[4];
 
@@ -497,6 +570,7 @@ protected:
 
   bool GenerateMipmap;
 
+  int AutoParameters;
   vtkTimeStamp SendParametersTime;
 
 private:
@@ -506,5 +580,3 @@ private:
 };
 
 #endif
-
-
diff --git a/Rendering/OpenGL/vtkTextureUnitManager.cxx b/Rendering/OpenGL/vtkTextureUnitManager.cxx
index efd9be6..74f2da6 100644
--- a/Rendering/OpenGL/vtkTextureUnitManager.cxx
+++ b/Rendering/OpenGL/vtkTextureUnitManager.cxx
@@ -19,7 +19,7 @@
 #include "vtkOpenGLRenderWindow.h"
 #include "vtkOpenGLHardwareSupport.h"
 
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkTextureUnitManager);
 
diff --git a/Rendering/OpenGL/vtkTranslucentPass.cxx b/Rendering/OpenGL/vtkTranslucentPass.cxx
index e33ab85..c79023d 100644
--- a/Rendering/OpenGL/vtkTranslucentPass.cxx
+++ b/Rendering/OpenGL/vtkTranslucentPass.cxx
@@ -15,7 +15,7 @@
 
 #include "vtkTranslucentPass.h"
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkTranslucentPass);
 
diff --git a/Rendering/OpenGL/vtkUniformVariables.cxx b/Rendering/OpenGL/vtkUniformVariables.cxx
index 03671da..62251a0 100644
--- a/Rendering/OpenGL/vtkUniformVariables.cxx
+++ b/Rendering/OpenGL/vtkUniformVariables.cxx
@@ -15,8 +15,9 @@
 
 #include "vtkUniformVariables.h"
 #include "vtkgl.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkObjectFactory.h"
+#include "vtkOpenGLError.h"
 
 #include <vtksys/stl/map>
 
@@ -168,6 +169,7 @@ public:
                            this->Values[2],this->Values[3]);
           break;
         }
+      vtkOpenGLStaticCheckErrorMacro("failed at glUniform*i");
     }
 
   virtual void PrintSelf(ostream &os, vtkIndent indent)
@@ -260,6 +262,7 @@ public:
                            this->Values[2],this->Values[3]);
           break;
         }
+      vtkOpenGLStaticCheckErrorMacro("failed at glUniform*f");
     }
 
   virtual void PrintSelf(ostream &os, vtkIndent indent)
@@ -375,6 +378,7 @@ public:
           vtkgl::Uniform4iv(location,this->ArraySize,this->Values);
           break;
         }
+      vtkOpenGLStaticCheckErrorMacro("failed at glUniform*iv");
     }
 
   virtual void PrintSelf(ostream &os, vtkIndent indent)
@@ -480,6 +484,7 @@ public:
           vtkgl::Uniform4fv(location,this->ArraySize,this->Values);
           break;
         }
+      vtkOpenGLStaticCheckErrorMacro("failed at glUniform*fv");
     }
 
   virtual void PrintSelf(ostream &os, vtkIndent indent)
@@ -628,6 +633,7 @@ public:
             }
           break;
         }
+      vtkOpenGLStaticCheckErrorMacro("failed at glUniformMatrix*fv");
     }
 
   virtual void PrintSelf(ostream &os, vtkIndent indent)
diff --git a/Rendering/OpenGL/vtkUniformVariables.h b/Rendering/OpenGL/vtkUniformVariables.h
index 27591fc..4f4956a 100644
--- a/Rendering/OpenGL/vtkUniformVariables.h
+++ b/Rendering/OpenGL/vtkUniformVariables.h
@@ -45,6 +45,17 @@ public:
                    int numberOfComponents,
                    int *value);
 
+  //BTX
+  template<typename T>
+  void SetUniformit(const char *name,
+                   int numberOfComponents,
+                   T *value);
+
+  template<typename T>
+  void SetUniformit(const char *name, T value)
+  { this->SetUniformit(name, 1, &value); }
+  //ETX
+
   // Description:
   // Set an float uniform variable.
   // \pre name_exists: name!=0
@@ -54,6 +65,17 @@ public:
                    int numberOfComponents,
                    float *value);
 
+  //BTX
+  template<typename T>
+  void SetUniformft(const char *name,
+                   int numberOfComponents,
+                   T *value);
+
+  template<typename T>
+  void SetUniformft(const char *name, T value)
+  { this->SetUniformft(name, 1, &value); }
+  //ETX
+
   // Description:
   // Set an array of integer uniform variables.
   // The array `value' is of size `numberOfElements'*`numberOfComponents.'.
@@ -149,4 +171,35 @@ private:
 
   vtkUniformVariablesMap *Map;
 };
+
+//BTX
+// ----------------------------------------------------------------------------
+template<typename T>
+void vtkUniformVariables::SetUniformit(const char *name,
+                   int numberOfComponents,
+                   T *value)
+{
+  int ivalues[4];
+  for (int i=0; i<numberOfComponents; ++i)
+    {
+    ivalues[i] = static_cast<int>(value[i]);
+    }
+  this->SetUniformi(name, numberOfComponents, ivalues);
+}
+
+// ----------------------------------------------------------------------------
+template<typename T>
+void vtkUniformVariables::SetUniformft(const char *name,
+                   int numberOfComponents,
+                   T *value)
+{
+  float fvalues[4];
+  for (int i=0; i<numberOfComponents; ++i)
+    {
+    fvalues[i] = static_cast<float>(value[i]);
+    }
+  this->SetUniformf(name, numberOfComponents, fvalues);
+}
+//ETX
+
 #endif
diff --git a/Rendering/OpenGL/vtkVolumetricPass.cxx b/Rendering/OpenGL/vtkVolumetricPass.cxx
index 479cb93..4cc0fdc 100644
--- a/Rendering/OpenGL/vtkVolumetricPass.cxx
+++ b/Rendering/OpenGL/vtkVolumetricPass.cxx
@@ -15,7 +15,7 @@
 
 #include "vtkVolumetricPass.h"
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkVolumetricPass);
 
diff --git a/Rendering/OpenGL/vtkWin32OpenGLRenderWindow.cxx b/Rendering/OpenGL/vtkWin32OpenGLRenderWindow.cxx
index 4b3e6f0..6ca0c0b 100644
--- a/Rendering/OpenGL/vtkWin32OpenGLRenderWindow.cxx
+++ b/Rendering/OpenGL/vtkWin32OpenGLRenderWindow.cxx
@@ -24,6 +24,8 @@ PURPOSE.  See the above copyright notice for more information.
 #include "vtkOpenGLProperty.h"
 #include "vtkOpenGLRenderer.h"
 #include "vtkOpenGLTexture.h"
+#include "vtkOpenGLRenderWindow.h"
+#include "vtkOpenGLError.h"
 #include "vtkRendererCollection.h"
 #include "vtkWin32RenderWindowInteractor.h"
 
@@ -31,6 +33,7 @@ PURPOSE.  See the above copyright notice for more information.
 #include <vtksys/ios/sstream>
 
 #include "vtkOpenGL.h"
+#include "vtkOpenGLError.h"
 #include "vtkgl.h"
 
 vtkStandardNewMacro(vtkWin32OpenGLRenderWindow);
@@ -42,7 +45,6 @@ vtkWin32OpenGLRenderWindow::vtkWin32OpenGLRenderWindow()
   this->ApplicationInstance =  NULL;
   this->Palette = NULL;
   this->ContextId = 0;
-  this->MultiSamples = 8;
   this->WindowId = 0;
   this->ParentId = 0;
   this->NextWindowId = 0;
@@ -88,6 +90,7 @@ void vtkWin32OpenGLRenderWindow::Clean()
   if (this->OwnContext && this->ContextId)
     {
     this->MakeCurrent();
+    vtkOpenGLClearErrorMacro();
 
      /* first delete all the old lights */
     for (short cur_light = GL_LIGHT0; cur_light < GL_LIGHT0+VTK_MAX_LIGHTS; cur_light++)
@@ -112,6 +115,7 @@ void vtkWin32OpenGLRenderWindow::Clean()
         }
 #endif
       }
+    vtkOpenGLCheckErrorMacro("failed in Clean");
 
     this->CleanUpRenderers();
 
@@ -149,7 +153,6 @@ void vtkWin32OpenGLRenderWindow::CleanUpRenderers()
        (ren = this->Renderers->GetNextRenderer(rsit));)
     {
     ren->SetRenderWindow(NULL);
-    ren->SetRenderWindow(this);
     }
 }
 
@@ -645,7 +648,7 @@ void vtkWin32OpenGLRenderWindow::SetupPixelFormat(HDC hDC, DWORD dwFlags,
   wglMakeCurrent(tempDC, 0);
   wglDeleteContext(tempContext);
   ReleaseDC(tempId, tempDC);
-  ::DestroyWindow(tempId);
+  ::DestroyWindow(tempId); // windows api
 
   // If we got a valid pixel format in the process, we are done.
   // Otherwise, we use the old approach of using ChoosePixelFormat.
@@ -1144,7 +1147,7 @@ void vtkWin32OpenGLRenderWindow::DestroyWindow()
         vtkSetWindowLong(this->WindowId,sizeof(vtkLONG),(vtkLONG)0);
         if(this->OwnWindow)
           {
-          ::DestroyWindow(this->WindowId);
+          ::DestroyWindow(this->WindowId); // windows api
           this->WindowId=0;
           }
         }
@@ -1522,7 +1525,14 @@ void vtkWin32OpenGLRenderWindow::CreateOffScreenDC(HBITMAP hbmp, HDC aHdc)
   SelectObject(this->MemoryHdc, this->MemoryBuffer);
 
   // Renderers will need to redraw anything cached in display lists
-  this->CleanUpRenderers();
+  vtkRenderer *ren;
+  vtkCollectionSimpleIterator rsit;
+  for (this->Renderers->InitTraversal(rsit);
+       (ren = this->Renderers->GetNextRenderer(rsit));)
+    {
+    ren->SetRenderWindow(NULL);
+    ren->SetRenderWindow(this);
+    }
 
   // adjust settings for renderwindow
   this->Mapped =0;
@@ -1617,7 +1627,15 @@ void vtkWin32OpenGLRenderWindow::ResumeScreenRendering(void)
   if(this->ContextId!=0)
     {
       this->MakeCurrent();
-      this->CleanUpRenderers();
+      // Renderers will need to redraw anything cached in display lists
+      vtkRenderer *ren;
+      vtkCollectionSimpleIterator rsit;
+      for (this->Renderers->InitTraversal(rsit);
+           (ren = this->Renderers->GetNextRenderer(rsit));)
+        {
+        ren->SetRenderWindow(NULL);
+        ren->SetRenderWindow(this);
+        }
     }
 
   this->Mapped = this->ScreenMapped;
diff --git a/Rendering/OpenGL/vtkXGPUInfoList.cxx b/Rendering/OpenGL/vtkXGPUInfoList.cxx
index 12277a7..9e4d062 100644
--- a/Rendering/OpenGL/vtkXGPUInfoList.cxx
+++ b/Rendering/OpenGL/vtkXGPUInfoList.cxx
@@ -17,7 +17,7 @@
 #include "vtkGPUInfoListArray.h"
 
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 
 #include <X11/Xlib.h> // Display structure, XOpenDisplay(), XScreenCount()
 
diff --git a/Rendering/OpenGL/vtkXOpenGLRenderWindow.cxx b/Rendering/OpenGL/vtkXOpenGLRenderWindow.cxx
index 3fcbf5b..997857f 100644
--- a/Rendering/OpenGL/vtkXOpenGLRenderWindow.cxx
+++ b/Rendering/OpenGL/vtkXOpenGLRenderWindow.cxx
@@ -319,8 +319,8 @@ XVisualInfo *vtkXOpenGLRenderWindow::GetDesiredVisualInfo()
     if (this->DisplayId == NULL)
       {
       vtkErrorMacro(<< "bad X server connection. DISPLAY="
-        << vtksys::SystemTools::GetEnv("DISPLAY") << "\n");
-      return NULL;
+        << vtksys::SystemTools::GetEnv("DISPLAY") << ". Aborting.\n");
+      abort();
       }
 
     this->OwnDisplay = 1;
@@ -528,7 +528,8 @@ void vtkXOpenGLRenderWindow::CreateAWindow()
     if (this->DisplayId == NULL)
       {
       vtkErrorMacro(<< "bad X server connection. DISPLAY="
-        << vtksys::SystemTools::GetEnv("DISPLAY") << "\n");
+        << vtksys::SystemTools::GetEnv("DISPLAY") << ". Aborting.\n");
+      abort();
       }
     this->OwnDisplay = 1;
     }
@@ -833,7 +834,8 @@ void vtkXOpenGLRenderWindow::CreateOffScreenWindow(int width, int height)
         if (this->DisplayId == NULL)
           {
           vtkErrorMacro(<< "bad X server connection. DISPLAY="
-            << vtksys::SystemTools::GetEnv("DISPLAY") << "\n");
+            << vtksys::SystemTools::GetEnv("DISPLAY") << ". Aborting.\n");
+          abort();
           }
         this->OwnDisplay = 1;
         }
@@ -1462,7 +1464,8 @@ int *vtkXOpenGLRenderWindow::GetScreenSize()
     if (this->DisplayId == NULL)
       {
       vtkErrorMacro(<< "bad X server connection. DISPLAY="
-        << vtksys::SystemTools::GetEnv("DISPLAY") << "\n");
+        << vtksys::SystemTools::GetEnv("DISPLAY") << ". Aborting.\n");
+      abort();
       }
     else
       {
@@ -1585,7 +1588,8 @@ void vtkXOpenGLRenderWindow::SetWindowInfo(char *info)
     if (this->DisplayId == NULL)
       {
       vtkErrorMacro(<< "bad X server connection. DISPLAY="
-        << vtksys::SystemTools::GetEnv("DISPLAY") << "\n");
+        << vtksys::SystemTools::GetEnv("DISPLAY") << ". Aborting.\n");
+      abort();
       }
     else
       {
@@ -1619,7 +1623,8 @@ void vtkXOpenGLRenderWindow::SetParentInfo(char *info)
     if (this->DisplayId == NULL)
       {
       vtkErrorMacro(<< "bad X server connection. DISPLAY="
-        << vtksys::SystemTools::GetEnv("DISPLAY") << "\n");
+        << vtksys::SystemTools::GetEnv("DISPLAY") << ". Aborting.\n");
+      abort();
       }
     else
       {
diff --git a/Rendering/Parallel/Testing/Cxx/CMakeLists.txt b/Rendering/Parallel/Testing/Cxx/CMakeLists.txt
index 64737c9..65222cd 100644
--- a/Rendering/Parallel/Testing/Cxx/CMakeLists.txt
+++ b/Rendering/Parallel/Testing/Cxx/CMakeLists.txt
@@ -1,6 +1,14 @@
-vtk_tests(BASELINEDIR Parallel PrmMagnify.cxx)
+include(vtkMPI)
+
+vtk_add_test_cxx(PrmMagnify.cxx)
+vtk_test_cxx_executable(${vtk-module}CxxTests)
 vtk_module_test_executable(TestClientServerRendering TestClientServerRendering.cxx)
-add_test_mpi(TestDistributedDataCompositeZPass.cxx DATADIR ${VTK_DATA_ROOT})
-add_test_mpi(TestPCompositeZPass.cxx DATADIR ${VTK_DATA_ROOT})
-add_test_mpi(TestPShadowMapPass.cxx DATADIR ${VTK_DATA_ROOT})
-add_test_mpi(TestParallelRendering.cxx)
+vtk_add_test_mpi(TestDistributedDataCompositeZPass.cxx TESTING_DATA)
+vtk_add_test_mpi(TestPCompositeZPass.cxx TESTING_DATA)
+vtk_add_test_mpi(TestPShadowMapPass.cxx TESTING_DATA)
+vtk_add_test_mpi(TestParallelRendering.cxx)
+
+vtk_mpi_link(TestDistributedDataCompositeZPass)
+vtk_mpi_link(TestPCompositeZPass)
+vtk_mpi_link(TestPShadowMapPass)
+vtk_mpi_link(TestParallelRendering)
diff --git a/Rendering/Parallel/Testing/Cxx/TestDistributedDataCompositeZPass.cxx b/Rendering/Parallel/Testing/Cxx/TestDistributedDataCompositeZPass.cxx
index 9d82e35..7266d0d 100644
--- a/Rendering/Parallel/Testing/Cxx/TestDistributedDataCompositeZPass.cxx
+++ b/Rendering/Parallel/Testing/Cxx/TestDistributedDataCompositeZPass.cxx
@@ -66,7 +66,7 @@
 #include "vtkProperty.h"
 #include "vtkLight.h"
 #include "vtkLightCollection.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkMath.h"
 #include "vtkFrustumSource.h"
 #include "vtkPlanes.h"
@@ -137,7 +137,6 @@ void MyProcess::Execute()
     }
 
   vtkRenderWindow *renWin = prm->MakeRenderWindow();
-  renWin->SetReportGraphicErrors(true);
   renWin->SetMultiSamples(0);
 
   renWin->SetAlphaBitPlanes(1);
diff --git a/Rendering/Parallel/Testing/Cxx/TestPCompositeZPass.cxx b/Rendering/Parallel/Testing/Cxx/TestPCompositeZPass.cxx
index 766dacb..7d9095e 100644
--- a/Rendering/Parallel/Testing/Cxx/TestPCompositeZPass.cxx
+++ b/Rendering/Parallel/Testing/Cxx/TestPCompositeZPass.cxx
@@ -66,7 +66,7 @@
 #include "vtkProperty.h"
 #include "vtkLight.h"
 #include "vtkLightCollection.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkMath.h"
 #include "vtkFrustumSource.h"
 #include "vtkPlanes.h"
@@ -137,7 +137,6 @@ void MyProcess::Execute()
     }
 
   vtkRenderWindow *renWin = prm->MakeRenderWindow();
-  renWin->SetReportGraphicErrors(true);
   renWin->SetMultiSamples(0);
 
   renWin->SetAlphaBitPlanes(1);
diff --git a/Rendering/Parallel/Testing/Cxx/TestPShadowMapPass.cxx b/Rendering/Parallel/Testing/Cxx/TestPShadowMapPass.cxx
index d90be9e..d3ee675 100644
--- a/Rendering/Parallel/Testing/Cxx/TestPShadowMapPass.cxx
+++ b/Rendering/Parallel/Testing/Cxx/TestPShadowMapPass.cxx
@@ -66,7 +66,7 @@
 #include "vtkProperty.h"
 #include "vtkLight.h"
 #include "vtkLightCollection.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkMath.h"
 #include "vtkFrustumSource.h"
 #include "vtkPlanes.h"
@@ -133,7 +133,6 @@ void MyProcess::Execute()
     }
 
   vtkRenderWindow *renWin = prm->MakeRenderWindow();
-  renWin->SetReportGraphicErrors(true);
   renWin->SetMultiSamples(0);
 
   renWin->SetAlphaBitPlanes(1);
diff --git a/Rendering/Parallel/Testing/Data/Baseline/PrmMagnify.png.md5 b/Rendering/Parallel/Testing/Data/Baseline/PrmMagnify.png.md5
new file mode 100644
index 0000000..d4676e0
--- /dev/null
+++ b/Rendering/Parallel/Testing/Data/Baseline/PrmMagnify.png.md5
@@ -0,0 +1 @@
+376c33c24f4a3cf6bb7eced5c4ec33c3
diff --git a/Rendering/Parallel/Testing/Data/Baseline/TestDistributedDataCompositeZPass.png.md5 b/Rendering/Parallel/Testing/Data/Baseline/TestDistributedDataCompositeZPass.png.md5
new file mode 100644
index 0000000..4c4dd74
--- /dev/null
+++ b/Rendering/Parallel/Testing/Data/Baseline/TestDistributedDataCompositeZPass.png.md5
@@ -0,0 +1 @@
+8a23cdc6ce7985a0a112e5bf6d725dbf
diff --git a/Rendering/Parallel/Testing/Data/Baseline/TestPCompositeZPass.png.md5 b/Rendering/Parallel/Testing/Data/Baseline/TestPCompositeZPass.png.md5
new file mode 100644
index 0000000..0778602
--- /dev/null
+++ b/Rendering/Parallel/Testing/Data/Baseline/TestPCompositeZPass.png.md5
@@ -0,0 +1 @@
+d90f244f47f9f75f707d6e633e65cd80
diff --git a/Rendering/Parallel/Testing/Data/Baseline/TestPShadowMapPass.png.md5 b/Rendering/Parallel/Testing/Data/Baseline/TestPShadowMapPass.png.md5
new file mode 100644
index 0000000..61decfe
--- /dev/null
+++ b/Rendering/Parallel/Testing/Data/Baseline/TestPShadowMapPass.png.md5
@@ -0,0 +1 @@
+4a086ad4635e7756e172b5e005bb0998
diff --git a/Rendering/Parallel/Testing/Data/Baseline/TestPShadowMapPass_1.png.md5 b/Rendering/Parallel/Testing/Data/Baseline/TestPShadowMapPass_1.png.md5
new file mode 100644
index 0000000..b208f02
--- /dev/null
+++ b/Rendering/Parallel/Testing/Data/Baseline/TestPShadowMapPass_1.png.md5
@@ -0,0 +1 @@
+59c27dd4eb2b935d4a544fb6bb8007c6
diff --git a/Rendering/Parallel/Testing/Data/Baseline/TestPShadowMapPass_2.png.md5 b/Rendering/Parallel/Testing/Data/Baseline/TestPShadowMapPass_2.png.md5
new file mode 100644
index 0000000..3b6bc29
--- /dev/null
+++ b/Rendering/Parallel/Testing/Data/Baseline/TestPShadowMapPass_2.png.md5
@@ -0,0 +1 @@
+5ca22421ab721cabf9d2440bb70bd701
diff --git a/Rendering/Parallel/module.cmake b/Rendering/Parallel/module.cmake
index 2419a7a..dc42df9 100644
--- a/Rendering/Parallel/module.cmake
+++ b/Rendering/Parallel/module.cmake
@@ -3,6 +3,8 @@ vtk_module(vtkRenderingParallel
     vtkParallelCore
     vtkFiltersParallel
     vtkRenderingOpenGL
+  PRIVATE_DEPENDS
+    vtkIOImage
   TEST_DEPENDS
     vtkParallelMPI
     vtkFiltersParallelMPI
diff --git a/Rendering/Parallel/vtkClientServerSynchronizedRenderers.cxx b/Rendering/Parallel/vtkClientServerSynchronizedRenderers.cxx
index fec445f..37014fb 100644
--- a/Rendering/Parallel/vtkClientServerSynchronizedRenderers.cxx
+++ b/Rendering/Parallel/vtkClientServerSynchronizedRenderers.cxx
@@ -17,7 +17,7 @@
 #include "vtkObjectFactory.h"
 #include "vtkMultiProcessController.h"
 
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkClientServerSynchronizedRenderers);
 //----------------------------------------------------------------------------
diff --git a/Rendering/Parallel/vtkCompositeRGBAPass.cxx b/Rendering/Parallel/vtkCompositeRGBAPass.cxx
index 682b820..bd6f559 100644
--- a/Rendering/Parallel/vtkCompositeRGBAPass.cxx
+++ b/Rendering/Parallel/vtkCompositeRGBAPass.cxx
@@ -15,7 +15,7 @@
 
 #include "vtkCompositeRGBAPass.h"
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkRenderState.h"
 #include "vtkOpenGLRenderer.h"
 #include "vtkgl.h"
@@ -26,6 +26,7 @@
 #include "vtkShader2Collection.h"
 #include "vtkUniformVariables.h"
 #include "vtkOpenGLRenderWindow.h"
+#include "vtkOpenGLExtensionManager.h"
 #include "vtkTextureUnitManager.h"
 
 // to be able to dump intermediate result into png files for debugging.
@@ -127,8 +128,14 @@ void vtkCompositeRGBAPass::PrintSelf(ostream& os, vtkIndent indent)
 // ----------------------------------------------------------------------------
 bool vtkCompositeRGBAPass::IsSupported(vtkOpenGLRenderWindow *context)
 {
-  return vtkFrameBufferObject::IsSupported(context)
-    && vtkTextureObject::IsSupported(context);
+  vtkOpenGLExtensionManager *extmgr = context->GetExtensionManager();
+
+  bool fbo_support=vtkFrameBufferObject::IsSupported(context);
+  bool texture_support
+     =  vtkTextureObject::IsSupported(context)
+       && (extmgr->ExtensionSupported("GL_ARB_texture_float")==1);
+
+  return fbo_support && texture_support;
 }
 
 // ----------------------------------------------------------------------------
@@ -162,27 +169,19 @@ void vtkCompositeRGBAPass::Render(const vtkRenderState *s)
 
   const int VTK_COMPOSITE_RGBA_PASS_MESSAGE_GATHER=201;
 
-  vtkOpenGLRenderer *r=static_cast<vtkOpenGLRenderer *>(s->GetRenderer());
-  vtkOpenGLRenderWindow *context=static_cast<vtkOpenGLRenderWindow *>(
-    r->GetRenderWindow());
+  vtkOpenGLRenderer *r
+    = static_cast<vtkOpenGLRenderer *>(s->GetRenderer());
 
-  // Test for Hardware support. If not supported, return.
-  bool supported=vtkFrameBufferObject::IsSupported(context);
+  vtkOpenGLRenderWindow *context
+    = static_cast<vtkOpenGLRenderWindow *>(r->GetRenderWindow());
 
-  if(!supported)
+  if (!this->IsSupported(context))
     {
-    vtkErrorMacro("FBOs are not supported by the context. Cannot perform rgba-compositing.");
+    vtkErrorMacro(
+      << "Missing required OpenGL extensions. "
+      << "Cannot perform rgba-compositing.");
     return;
     }
-  if(supported)
-    {
-    supported=vtkTextureObject::IsSupported(context);
-    if(!supported)
-      {
-      vtkErrorMacro("Texture Objects are not supported by the context. Cannot perform rgba-compositing.");
-      return;
-      }
-    }
 
 #ifdef VTK_COMPOSITE_RGBAPASS_DEBUG
   vtkOpenGLState *state=new vtkOpenGLState(context);
@@ -204,8 +203,8 @@ void vtkCompositeRGBAPass::Render(const vtkRenderState *s)
     h=size[1];
     }
 
-  unsigned int byteSize=static_cast<unsigned int>(w*h*4)
-    *static_cast<unsigned int>(sizeof(float));
+  int numComps = 4;
+  unsigned int numTups = w*h;
 
   // pbo arguments.
   unsigned int dims[2];
@@ -270,10 +269,13 @@ void vtkCompositeRGBAPass::Render(const vtkRenderState *s)
     // for debugging only.
 
     // Framebuffer to PBO
-    this->PBO->Allocate(byteSize);
-    cout << "after pbo allocate." << endl;
+    this->PBO->Allocate(
+          VTK_FLOAT,
+          numTups,
+          numComps,
+          vtkPixelBufferObject::PACKED_BUFFER);
+
     this->PBO->Bind(vtkPixelBufferObject::PACKED_BUFFER);
-    cout << "after pbo bind." << endl;
     glReadPixels(0,0,w,h,GL_RGBA,GL_FLOAT,
                  static_cast<GLfloat *>(NULL));
     cout << "after readpixel." << endl;
@@ -438,7 +440,12 @@ void vtkCompositeRGBAPass::Render(const vtkRenderState *s)
     // for debugging only.
 
     // Framebuffer to PBO
-    this->PBO->Allocate(byteSize);
+    this->PBO->Allocate(
+          VTK_FLOAT,
+          numTups,
+          numComps,
+          vtkPixelBufferObject::PACKED_BUFFER);
+
     this->PBO->Bind(vtkPixelBufferObject::PACKED_BUFFER);
     glReadPixels(0,0,w,h,GL_RGBA,GL_FLOAT,
                  static_cast<GLfloat *>(NULL));
@@ -496,7 +503,12 @@ void vtkCompositeRGBAPass::Render(const vtkRenderState *s)
     // send rgba-buffer
 
     // framebuffer to PBO.
-    this->PBO->Allocate(byteSize,VTK_FLOAT);
+    this->PBO->Allocate(
+          VTK_FLOAT,
+          numTups,
+          numComps,
+          vtkPixelBufferObject::PACKED_BUFFER);
+
     this->PBO->Bind(vtkPixelBufferObject::PACKED_BUFFER);
     glReadPixels(0,0,w,h,GL_RGBA,GL_FLOAT,
                  static_cast<GLfloat *>(NULL));
diff --git a/Rendering/Parallel/vtkCompositeZPass.cxx b/Rendering/Parallel/vtkCompositeZPass.cxx
index 4c740a9..2bfc6fe 100644
--- a/Rendering/Parallel/vtkCompositeZPass.cxx
+++ b/Rendering/Parallel/vtkCompositeZPass.cxx
@@ -15,7 +15,7 @@
 
 #include "vtkCompositeZPass.h"
 #include "vtkObjectFactory.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkRenderState.h"
 #include "vtkOpenGLRenderer.h"
 #include "vtkgl.h"
@@ -197,8 +197,7 @@ void vtkCompositeZPass::Render(const vtkRenderState *s)
     h=size[1];
     }
 
-  unsigned int byteSize=static_cast<unsigned int>(w*h)
-    *static_cast<unsigned int>(sizeof(float));
+  unsigned int numTups = static_cast<unsigned int>(w*h);
 
   // pbo arguments.
   unsigned int dims[2];
@@ -262,7 +261,12 @@ void vtkCompositeZPass::Render(const vtkRenderState *s)
     // for debugging only.
 
     // Framebuffer to PBO
-    this->PBO->Allocate(byteSize,VTK_FLOAT);
+    this->PBO->Allocate(
+          VTK_FLOAT,
+          numTups,
+          1,
+          vtkPixelBufferObject::PACKED_BUFFER);
+
     this->PBO->Bind(vtkPixelBufferObject::PACKED_BUFFER);
     glReadPixels(0,0,w,h,GL_DEPTH_COMPONENT,GL_FLOAT,
                  static_cast<GLfloat *>(NULL));
@@ -518,9 +522,12 @@ void vtkCompositeZPass::Render(const vtkRenderState *s)
     // Send the final z-buffer from the framebuffer to a PBO
     // TODO
 
+    this->PBO->Allocate(
+          VTK_FLOAT,
+          numTups,
+          1,
+          vtkPixelBufferObject::PACKED_BUFFER);
 
-
-    this->PBO->Allocate(byteSize,VTK_FLOAT);
     this->PBO->Bind(vtkPixelBufferObject::PACKED_BUFFER);
     glReadPixels(0,0,w,h,GL_DEPTH_COMPONENT,GL_FLOAT,
                  static_cast<GLfloat *>(NULL));
@@ -586,7 +593,12 @@ void vtkCompositeZPass::Render(const vtkRenderState *s)
     // 2. receive final z-buffer and copy it
 
     // framebuffer to PBO.
-    this->PBO->Allocate(byteSize,VTK_FLOAT);
+    this->PBO->Allocate(
+          VTK_FLOAT,
+          numTups,
+          1,
+          vtkPixelBufferObject::PACKED_BUFFER);
+
     this->PBO->Bind(vtkPixelBufferObject::PACKED_BUFFER);
     glReadPixels(0,0,w,h,GL_DEPTH_COMPONENT,GL_FLOAT,
                  static_cast<GLfloat *>(NULL));
diff --git a/Rendering/Parallel/vtkPHardwareSelector.cxx b/Rendering/Parallel/vtkPHardwareSelector.cxx
index ac9cd1b..bb1c20b 100644
--- a/Rendering/Parallel/vtkPHardwareSelector.cxx
+++ b/Rendering/Parallel/vtkPHardwareSelector.cxx
@@ -38,7 +38,9 @@ public:
   vtkPHardwareSelector* Target;
 };
 
+//----------------------------------------------------------------------------
 vtkStandardNewMacro(vtkPHardwareSelector);
+
 //----------------------------------------------------------------------------
 vtkPHardwareSelector::vtkPHardwareSelector()
 {
@@ -92,7 +94,6 @@ void vtkPHardwareSelector::StartRender()
 //----------------------------------------------------------------------------
 void vtkPHardwareSelector::EndRender()
 {
-
   this->CurrentPass++;
   for (; this->CurrentPass < MAX_KNOWN_PASS; this->CurrentPass++)
     {
diff --git a/Rendering/Parallel/vtkPHardwareSelector.h b/Rendering/Parallel/vtkPHardwareSelector.h
index bb8d074..c26be53 100644
--- a/Rendering/Parallel/vtkPHardwareSelector.h
+++ b/Rendering/Parallel/vtkPHardwareSelector.h
@@ -28,13 +28,13 @@
 #define __vtkPHardwareSelector_h
 
 #include "vtkRenderingParallelModule.h" // For export macro
-#include "vtkHardwareSelector.h"
+#include "vtkOpenGLHardwareSelector.h"
 
-class VTKRENDERINGPARALLEL_EXPORT vtkPHardwareSelector : public vtkHardwareSelector
+class VTKRENDERINGPARALLEL_EXPORT vtkPHardwareSelector : public vtkOpenGLHardwareSelector
 {
 public:
   static vtkPHardwareSelector* New();
-  vtkTypeMacro(vtkPHardwareSelector, vtkHardwareSelector);
+  vtkTypeMacro(vtkPHardwareSelector, vtkOpenGLHardwareSelector);
   void PrintSelf(ostream& os, vtkIndent indent);
 
   // Description:
diff --git a/Rendering/Parallel/vtkParallelRenderManager.h b/Rendering/Parallel/vtkParallelRenderManager.h
index 8db3f29..b083a20 100644
--- a/Rendering/Parallel/vtkParallelRenderManager.h
+++ b/Rendering/Parallel/vtkParallelRenderManager.h
@@ -455,8 +455,8 @@ protected:
   // bufferred sending of information over.
   virtual void SendWindowInformation() {}
   virtual void ReceiveWindowInformation() {}
-  virtual void SendRendererInformation(vtkRenderer *) {};
-  virtual void ReceiveRendererInformation(vtkRenderer *) {};
+  virtual void SendRendererInformation(vtkRenderer *) {}
+  virtual void ReceiveRendererInformation(vtkRenderer *) {}
 
   // Description:
   // Subclass should override these methods (instead of
diff --git a/Rendering/Parallel/vtkSynchronizedRenderers.cxx b/Rendering/Parallel/vtkSynchronizedRenderers.cxx
index ce06419..0ff3423 100644
--- a/Rendering/Parallel/vtkSynchronizedRenderers.cxx
+++ b/Rendering/Parallel/vtkSynchronizedRenderers.cxx
@@ -28,9 +28,10 @@
 #include "vtkRenderer.h"
 #include "vtkRenderWindow.h"
 #include "vtkOpenGLRenderer.h"
+#include "vtkOpenGLError.h"
 
 #include "vtkgl.h"
-#include <assert.h>
+#include <cassert>
 
 
 //----------------------------------------------------------------------------
@@ -680,6 +681,8 @@ bool vtkSynchronizedRenderers::vtkRawImage::PushToFrameBuffer()
     return false;
     }
 
+  vtkOpenGLClearErrorMacro();
+
   glPushAttrib(GL_ENABLE_BIT | GL_TRANSFORM_BIT| GL_TEXTURE_BIT);
   glMatrixMode(GL_MODELVIEW);
   glPushMatrix();
@@ -744,6 +747,8 @@ bool vtkSynchronizedRenderers::vtkRawImage::PushToFrameBuffer()
   glMatrixMode(GL_MODELVIEW);
   glPopMatrix();
   glPopAttrib();
+
+  vtkOpenGLStaticCheckErrorMacro("failed after PushToFrameBuffer");
   return true;
 }
 
diff --git a/Rendering/ParallelLIC/CMakeLists.txt b/Rendering/ParallelLIC/CMakeLists.txt
new file mode 100644
index 0000000..ef5d8e6
--- /dev/null
+++ b/Rendering/ParallelLIC/CMakeLists.txt
@@ -0,0 +1,102 @@
+set(Module_SRCS
+  vtkMPIPixelTT.cxx
+  vtkParallelTimer.cxx
+  vtkPPixelTransfer.cxx
+  vtkPLineIntegralConvolution2D.cxx
+  vtkPPainterCommunicator.cxx
+  vtkPSurfaceLICComposite.cxx
+  vtkPSurfaceLICPainter.cxx
+  ${CMAKE_CURRENT_BINARY_DIR}/vtkRenderingParallelLICObjectFactory.cxx
+  )
+
+set_source_files_properties(
+  vtkMPIPixelTT.cxx
+  vtkPPixelTransfer.cxx
+  vtkPLineIntegralConvolution2D.cxx
+  vtkPPainterCommunicator.cxx
+  vtkPSurfaceLICComposite.cxx
+  WRAP_EXCLUDE
+  )
+
+option(
+  VTK_RENDERINGPARALLELLIC_SURFACELICPAINTER_TIMER
+  "enable parallel timers for the surface lic painter"
+  OFF
+  )
+mark_as_advanced(VTK_RENDERINGPARALLELLIC_SURFACELICPAINTER_TIMER)
+option(
+  VTK_RENDERINGPARALLELLIC_LINEINTEGRALCONVLOLUTION2D_TIMER
+  "enable parallel timers for the 2d line integral convolution"
+  OFF
+  )
+mark_as_advanced(VTK_RENDERINGPARALLELLIC_LINEINTEGRALCONVLOLUTION2D_TIMER)
+if (VTK_RENDERINGPARALLELLIC_SURFACELICPAINTER_TIMER)
+  add_definitions("-DvtkSurfaceLICPainterTIME")
+endif()
+if (VTK_RENDERINGPARALLELLIC_LINEINTEGRALCONVLOLUTION2D_TIMER)
+  add_definitions("-DvtkLineIntegralConvolution2DTIME")
+endif()
+
+set(shader_files
+  vtkPSurfaceLICComposite_Comp.glsl
+  )
+
+unset(shader_h_files)
+foreach(file ${shader_files})
+  get_filename_component(file_we ${file} NAME_WE)
+  set(src ${CMAKE_CURRENT_SOURCE_DIR}/${file})
+  set(res ${CMAKE_CURRENT_BINARY_DIR}/${file_we}.cxx)
+  set(resh ${CMAKE_CURRENT_BINARY_DIR}/${file_we}.h)
+  list(APPEND shader_h_files ${resh})
+  add_custom_command(
+    OUTPUT ${res} ${resh}
+    DEPENDS ${src} vtkEncodeString
+    COMMAND vtkEncodeString
+    ARGS ${res} ${src} ${file_we}
+    --build-header
+    VTKRENDERINGPARALLELLIC_EXPORT
+    vtkRenderingParallelLICModule.h
+    )
+  list(APPEND Module_SRCS ${res})
+  set_source_files_properties(${file_we} WRAP_EXCLUDE)
+endforeach()
+
+# Now we need to generate the object factory for this module. Create a list of
+# overrides, and then generate the class that overrides those classes.
+macro(vtk_add_override base override)
+  list(APPEND vtk_module_overrides ${base})
+  set(vtk_module_${base}_override ${override})
+endmacro()
+vtk_add_override(vtkLineIntegralConvolution2D vtkPLineIntegralConvolution2D)
+vtk_add_override(vtkSurfaceLICComposite vtkPSurfaceLICComposite)
+vtk_add_override(vtkSurfaceLICPainter vtkPSurfaceLICPainter)
+
+# Now we iterate and create that class file...
+foreach(_class ${vtk_module_overrides})
+  set(_override ${vtk_module_${_class}_override})
+  set(_vtk_override_includes "${_vtk_override_includes}
+#include \"${_override}.h\"")
+  set(_vtk_override_creates "${_vtk_override_creates}
+VTK_CREATE_CREATE_FUNCTION(${_override})")
+  set(_vtk_override_do "${_vtk_override_do}
+  this->RegisterOverride(\"${_class}\",
+                         \"${_override}\",
+                         \"Override for ${vtk-module} module\", 1,
+                         vtkObjectFactoryCreate${_override});")
+endforeach()
+
+# Now lets create the object factory classes
+string(TOUPPER ${vtk-module} VTK-MODULE)
+configure_file(
+  ${VTK_CMAKE_DIR}/vtkObjectFactory.h.in
+  ${CMAKE_CURRENT_BINARY_DIR}/${vtk-module}ObjectFactory.h
+  )
+configure_file(
+  ${VTK_CMAKE_DIR}/vtkObjectFactory.cxx.in
+  ${CMAKE_CURRENT_BINARY_DIR}/${vtk-module}ObjectFactory.cxx
+  )
+
+include(vtkMPI)
+
+vtk_module_library(${vtk-module} ${Module_SRCS})
+vtk_mpi_link(${vtk-module})
diff --git a/Rendering/ParallelLIC/module.cmake b/Rendering/ParallelLIC/module.cmake
new file mode 100644
index 0000000..ec80c75
--- /dev/null
+++ b/Rendering/ParallelLIC/module.cmake
@@ -0,0 +1,8 @@
+vtk_module(vtkRenderingParallelLIC
+  IMPLEMENTS
+    vtkRenderingLIC
+  DEPENDS
+    vtkRenderingOpenGL
+    vtkParallelMPI
+    vtkIOLegacy
+  )
diff --git a/Rendering/ParallelLIC/vtkMPIPixelTT.cxx b/Rendering/ParallelLIC/vtkMPIPixelTT.cxx
new file mode 100644
index 0000000..289195f
--- /dev/null
+++ b/Rendering/ParallelLIC/vtkMPIPixelTT.cxx
@@ -0,0 +1,43 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkMPIPixelTT.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkMPIPixelTT.h"
+
+#define vtkMPIPixelTTMacro2(_ctype, _mpiEnum, _vtkEnum) \
+MPI_Datatype vtkMPIPixelTT<_ctype>::MPIType = _mpiEnum; \
+int vtkMPIPixelTT<_ctype>::VTKType = _vtkEnum;
+
+vtkMPIPixelTTMacro2(void, MPI_BYTE, VTK_VOID)
+vtkMPIPixelTTMacro2(char, MPI_CHAR, VTK_CHAR)
+vtkMPIPixelTTMacro2(signed char, MPI_CHAR, VTK_SIGNED_CHAR)
+vtkMPIPixelTTMacro2(unsigned char, MPI_UNSIGNED_CHAR, VTK_UNSIGNED_CHAR)
+vtkMPIPixelTTMacro2(short, MPI_SHORT, VTK_SHORT)
+vtkMPIPixelTTMacro2(unsigned short, MPI_UNSIGNED_SHORT, VTK_UNSIGNED_SHORT)
+vtkMPIPixelTTMacro2(int, MPI_INT, VTK_INT)
+vtkMPIPixelTTMacro2(unsigned int, MPI_UNSIGNED, VTK_UNSIGNED_INT)
+vtkMPIPixelTTMacro2(long, MPI_LONG, VTK_LONG)
+vtkMPIPixelTTMacro2(unsigned long, MPI_UNSIGNED_LONG, VTK_UNSIGNED_LONG)
+vtkMPIPixelTTMacro2(float, MPI_FLOAT, VTK_FLOAT)
+vtkMPIPixelTTMacro2(double, MPI_DOUBLE, VTK_DOUBLE)
+//vtkMPIPixelTTMacro2(vtkIdType, MPI_LONG_LONG, VTK_IDTYPE)
+#ifdef VTK_TYPE_USE_LONG_LONG
+vtkMPIPixelTTMacro2(long long, MPI_LONG_LONG, VTK_LONG_LONG)
+vtkMPIPixelTTMacro2(unsigned long long, MPI_UNSIGNED_LONG_LONG, VTK_UNSIGNED_LONG_LONG)
+#endif
+#ifdef VTK_TYPE_USE___INT64
+vtkMPIPixelTTMacro2(__int64, MPI_LONG_LONG, VTK___INT64)
+# ifdef VTK_TYPE_CONVERT_UI64_TO_DOUBLE
+vtkMPIPixelTTMacro2(unsigned __int64, MPI_UNSIGNED_LONG_LONG, VTK_UNSIGNED___INT64)
+# endif
+#endif
diff --git a/Rendering/ParallelLIC/vtkMPIPixelTT.h b/Rendering/ParallelLIC/vtkMPIPixelTT.h
new file mode 100644
index 0000000..f19d700
--- /dev/null
+++ b/Rendering/ParallelLIC/vtkMPIPixelTT.h
@@ -0,0 +1,61 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkMPIPixelTT.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#ifndef __vtkMPIPixelTT_h
+#define __vtkMPIPixelTT_h
+
+#include "vtkType.h" // for vtk types
+#include "vtkMPI.h"
+
+// Description:
+// Traits class for converting from vtk data type enum
+// to the appropriate C or MPI datatype.
+template<typename T> class vtkMPIPixelTT;
+
+//BTX
+#define vtkMPIPixelTTMacro1(_ctype) \
+template<> \
+class vtkMPIPixelTT<_ctype> \
+{ \
+public: \
+  static MPI_Datatype MPIType; \
+  static int VTKType; \
+};
+
+vtkMPIPixelTTMacro1(void)
+vtkMPIPixelTTMacro1(char)
+vtkMPIPixelTTMacro1(signed char)
+vtkMPIPixelTTMacro1(unsigned char)
+vtkMPIPixelTTMacro1(short)
+vtkMPIPixelTTMacro1(unsigned short)
+vtkMPIPixelTTMacro1(int)
+vtkMPIPixelTTMacro1(unsigned int)
+vtkMPIPixelTTMacro1(long)
+vtkMPIPixelTTMacro1(unsigned long)
+vtkMPIPixelTTMacro1(float)
+vtkMPIPixelTTMacro1(double)
+//vtkMPIPixelTTMacro1(vtkIdType)
+#ifdef VTK_TYPE_USE_LONG_LONG
+vtkMPIPixelTTMacro1(long long)
+vtkMPIPixelTTMacro1(unsigned long long)
+#endif
+#ifdef VTK_TYPE_USE___INT64
+vtkMPIPixelTTMacro1(__int64)
+# ifdef VTK_TYPE_CONVERT_UI64_TO_DOUBLE
+vtkMPIPixelTTMacro1(unsigned __int64)
+# endif
+#endif
+//ETX
+#endif
+// VTK-HeaderTest-Exclude: vtkMPIPixelTT.h
diff --git a/Rendering/ParallelLIC/vtkMPIPixelView.h b/Rendering/ParallelLIC/vtkMPIPixelView.h
new file mode 100644
index 0000000..6dc081c
--- /dev/null
+++ b/Rendering/ParallelLIC/vtkMPIPixelView.h
@@ -0,0 +1,108 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkMPIPixelView.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkMPIPixelView -- Templated helper function for creating
+// MPI datatypes that describe a vtkPixelExtent.
+
+#ifndef __vtkMPIPixelView_h
+#define __vtkMPIPixelView_h
+
+#include "vtkPixelExtent.h" // for pixel extent
+#include "vtkMPI.h" // for mpi
+#include "vtkMPIPixelTT.h" // for type traits
+#include <iostream> // for cerr
+
+//-----------------------------------------------------------------------------
+template<typename T>
+int vtkMPIPixelViewNew(
+      const vtkPixelExtent &domain,
+      const vtkPixelExtent &decomp,
+      int nComps,
+      MPI_Datatype &view)
+{
+  #ifndef NDEBUG
+  int mpiOk=0;
+  MPI_Initialized(&mpiOk);
+  if (!mpiOk)
+    {
+    std::cerr << "This class requires the MPI runtime." << std::endl;
+    return -1;
+    }
+  #endif
+
+  int iErr;
+
+  MPI_Datatype nativeType;
+  iErr=MPI_Type_contiguous(
+        nComps,
+        vtkMPIPixelTT<T>::MPIType,
+        &nativeType);
+  if (iErr)
+    {
+    return -2;
+    }
+
+  int domainDims[2];
+  domain.Size(domainDims);
+
+  int domainStart[2];
+  domain.GetStartIndex(domainStart);
+
+  int decompDims[2];
+  decomp.Size(decompDims);
+
+  int decompStart[2];
+  decomp.GetStartIndex(decompStart, domainStart);
+
+  // use a contiguous type when possible.
+  if (domain==decomp)
+    {
+    unsigned long long nCells=decomp.Size();
+    iErr=MPI_Type_contiguous((int)nCells, nativeType, &view);
+    if (iErr)
+      {
+      MPI_Type_free(&nativeType);
+      return -3;
+      }
+    }
+  else
+    {
+    iErr=MPI_Type_create_subarray(
+        2,
+        domainDims,
+        decompDims,
+        decompStart,
+        MPI_ORDER_FORTRAN,
+        nativeType,
+        &view);
+    if (iErr)
+      {
+      MPI_Type_free(&nativeType);
+      return -4;
+      }
+    }
+  iErr=MPI_Type_commit(&view);
+  if (iErr)
+    {
+    MPI_Type_free(&nativeType);
+    return -5;
+    }
+
+  MPI_Type_free(&nativeType);
+
+  return 0;
+}
+
+#endif
+// VTK-HeaderTest-Exclude: vtkMPIPixelView.h
diff --git a/Rendering/ParallelLIC/vtkPLineIntegralConvolution2D.cxx b/Rendering/ParallelLIC/vtkPLineIntegralConvolution2D.cxx
new file mode 100644
index 0000000..d71d9e1
--- /dev/null
+++ b/Rendering/ParallelLIC/vtkPLineIntegralConvolution2D.cxx
@@ -0,0 +1,128 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPLineIntegralConvolution2D.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkPLineIntegralConvolution2D.h"
+
+#include "vtkObjectFactory.h"
+#include "vtkPainterCommunicator.h"
+#include "vtkPPainterCommunicator.h"
+#include "vtkParallelTimer.h"
+#include "vtkMPI.h"
+
+// ----------------------------------------------------------------------------
+vtkStandardNewMacro(vtkPLineIntegralConvolution2D);
+
+// ----------------------------------------------------------------------------
+vtkPLineIntegralConvolution2D::vtkPLineIntegralConvolution2D()
+{
+  this->Comm = new vtkPPainterCommunicator;
+}
+
+// ----------------------------------------------------------------------------
+vtkPLineIntegralConvolution2D::~vtkPLineIntegralConvolution2D()
+{}
+
+// ----------------------------------------------------------------------------
+void vtkPLineIntegralConvolution2D::SetCommunicator(vtkPainterCommunicator *comm)
+{
+  this->Comm->Copy(comm, false);
+}
+
+// ----------------------------------------------------------------------------
+vtkPainterCommunicator *vtkPLineIntegralConvolution2D::GetCommunicator()
+{
+  return this->Comm;
+}
+
+// ----------------------------------------------------------------------------
+void vtkPLineIntegralConvolution2D::GetGlobalMinMax(
+      vtkPainterCommunicator *painterComm,
+      float &min,
+      float &max)
+{
+  vtkPPainterCommunicator *pPainterComm
+    = dynamic_cast<vtkPPainterCommunicator*>(painterComm);
+
+  if (pPainterComm->GetMPIInitialized())
+    {
+    MPI_Comm comm = *((MPI_Comm*)pPainterComm->GetCommunicator());
+
+    MPI_Allreduce(
+          MPI_IN_PLACE,
+          &min,
+          1,
+          MPI_FLOAT,
+          MPI_MIN,
+          comm);
+
+    MPI_Allreduce(
+          MPI_IN_PLACE,
+          &max,
+          1,
+          MPI_FLOAT,
+          MPI_MAX,
+          comm);
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkPLineIntegralConvolution2D::StartTimerEvent(const char *event)
+{
+#if defined(vtkLineIntegralConvolution2DTIME) || defined(vtkSurfaceLICPainterTIME)
+  vtkParallelTimer *log = vtkParallelTimer::GetGlobalInstance();
+  log->StartEvent(event);
+#else
+  (void)event;
+#endif
+}
+
+//-----------------------------------------------------------------------------
+void vtkPLineIntegralConvolution2D::EndTimerEvent(const char *event)
+{
+#if defined(vtkLineIntegralConvolution2DTIME) || defined(vtkSurfaceLICPainterTIME)
+  vtkParallelTimer *log = vtkParallelTimer::GetGlobalInstance();
+  log->EndEvent(event);
+#else
+  (void)event;
+#endif
+}
+
+//----------------------------------------------------------------------------
+void vtkPLineIntegralConvolution2D::WriteTimerLog(const char *fileName)
+{
+#ifdef vtkLineIntegralConvolution2DTIME
+  std::string fname = fileName?fileName:"";
+  if (fname==this->LogFileName)
+    {
+    return;
+    }
+  this->LogFileName = fname;
+  if (!fname.empty())
+    {
+    vtkParallelTimer *log = vtkParallelTimer::GetGlobalInstance();
+    log->SetFileName(fname.c_str());
+    log->Update();
+    log->Write();
+    }
+#else
+  (void)fileName;
+#endif
+}
+
+//-----------------------------------------------------------------------------
+void vtkPLineIntegralConvolution2D::PrintSelf(ostream &os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+  os << indent << "LogFileName=" << this->LogFileName << endl;
+}
diff --git a/Rendering/ParallelLIC/vtkPLineIntegralConvolution2D.h b/Rendering/ParallelLIC/vtkPLineIntegralConvolution2D.h
new file mode 100644
index 0000000..0593353
--- /dev/null
+++ b/Rendering/ParallelLIC/vtkPLineIntegralConvolution2D.h
@@ -0,0 +1,88 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPLineIntegralConvolution2D.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkPLineIntegralConvolution2D - parallel part of GPU-based
+// implementation of Line Integral Convolution (LIC)
+//
+// .SECTION Description
+// Implements the parallel parts of the algorithm.
+//
+// .SECTION See Also
+//  vtkPLineIntegralConvolution2D
+
+#ifndef __vtkPLineIntegralConvolution2D_h
+#define __vtkPLineIntegralConvolution2D_h
+
+#include "vtkLineIntegralConvolution2D.h"
+#include "vtkRenderingParallelLICModule.h" // for export macro
+#include <string> // for string
+
+class vtkPainterCommunicator;
+class vtkPPainterCommunicator;
+
+class VTKRENDERINGPARALLELLIC_EXPORT vtkPLineIntegralConvolution2D : public vtkLineIntegralConvolution2D
+{
+public:
+  static vtkPLineIntegralConvolution2D *New();
+  vtkTypeMacro(vtkPLineIntegralConvolution2D, vtkLineIntegralConvolution2D);
+  virtual void PrintSelf(ostream &os, vtkIndent indent);
+
+  //BTX
+  // Description:
+  // Set the communicator to use during parallel operation
+  // The communicator will not be duplicated or reference
+  // counted for performance reasons thus caller should
+  // hold/manage reference to the communicator during use
+  // of the LIC object.
+  virtual void SetCommunicator(vtkPainterCommunicator *);
+  virtual vtkPainterCommunicator *GetCommunicator();
+
+  // Description:
+  // For parallel operation, find global min/max
+  // min/max are in/out.
+  virtual void GetGlobalMinMax(
+        vtkPainterCommunicator *comm,
+        float &min,
+        float &max);
+  //ETX
+
+  // Description:
+  // Methods used for parallel benchmarks. Use cmake to define
+  // vtkLineIntegralConviolution2DTIME to enable benchmarks.
+  // During each update timing information is stored, it can
+  // be written to disk by calling WriteLog.
+  virtual void WriteTimerLog(const char *fileName);
+
+protected:
+  vtkPLineIntegralConvolution2D();
+  virtual ~vtkPLineIntegralConvolution2D();
+
+  // Description:
+  // Methods used for parallel benchmarks. Use cmake to define
+  // vtkSurfaceLICPainterTIME to enable benchmarks. During each
+  // update timing information is stored, it can be written to
+  // disk by calling WriteLog. Note: Some of the timings are
+  // enabled by the surface lic painter.
+  virtual void StartTimerEvent(const char *name);
+  virtual void EndTimerEvent(const char *name);
+
+private:
+  std::string LogFileName;
+
+private:
+  vtkPLineIntegralConvolution2D(const vtkPLineIntegralConvolution2D &); // Not implemented.
+  void operator=(const vtkPLineIntegralConvolution2D &); // Not implemented.
+};
+
+#endif
diff --git a/Rendering/ParallelLIC/vtkPPainterCommunicator.cxx b/Rendering/ParallelLIC/vtkPPainterCommunicator.cxx
new file mode 100644
index 0000000..8f59d5b
--- /dev/null
+++ b/Rendering/ParallelLIC/vtkPPainterCommunicator.cxx
@@ -0,0 +1,343 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPPainterCommunicator.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkPPainterCommunicator.h"
+
+#include "vtkMPI.h"
+#include "vtkMultiProcessController.h"
+#include "vtkMPIController.h"
+#include "vtkMPICommunicator.h"
+
+#include <vector>
+
+using std::vector;
+
+// use PImpl to avoid MPI types in public API.
+class vtkPPainterCommunicatorInternals
+{
+public:
+  vtkPPainterCommunicatorInternals()
+      :
+  Ownership(false),
+  Communicator(MPI_COMM_WORLD)
+  {}
+
+  ~vtkPPainterCommunicatorInternals();
+
+  // Description:
+  // Set the communicator, by default ownership is not taken.
+  void SetCommunicator(MPI_Comm comm, bool ownership=false);
+
+  // Description:
+  // Duplicate the communicator, ownership of the new
+  // communicator is always taken.
+  void DuplicateCommunicator(MPI_Comm comm);
+
+  bool Ownership;
+  MPI_Comm Communicator;
+};
+
+//-----------------------------------------------------------------------------
+vtkPPainterCommunicatorInternals::~vtkPPainterCommunicatorInternals()
+{
+  this->SetCommunicator(MPI_COMM_NULL);
+}
+
+//-----------------------------------------------------------------------------
+void vtkPPainterCommunicatorInternals::SetCommunicator(
+      MPI_Comm comm,
+      bool ownership)
+{
+  // avoid uneccessary operations
+  if (this->Communicator == comm)
+    {
+    return;
+    }
+  // do nothing without mpi
+  if ( vtkPPainterCommunicator::MPIInitialized()
+    && !vtkPPainterCommunicator::MPIFinalized() )
+    {
+    // release the old communicator if it's ours
+    if ( this->Ownership
+      && (this->Communicator != MPI_COMM_NULL)
+      && (this->Communicator != MPI_COMM_WORLD) )
+      {
+      MPI_Comm_free(&this->Communicator);
+      }
+    }
+  // assign
+  this->Ownership = ownership;
+  this->Communicator = comm;
+}
+
+//-----------------------------------------------------------------------------
+void vtkPPainterCommunicatorInternals::DuplicateCommunicator(MPI_Comm comm)
+{
+  // avoid uneccessary operations
+  if (this->Communicator == comm)
+    {
+    return;
+    }
+  // handle no mpi gracefully
+  if ( !vtkPPainterCommunicator::MPIInitialized()
+    || vtkPPainterCommunicator::MPIFinalized() )
+    {
+    this->Ownership = false;
+    this->Communicator = comm;
+    return;
+    }
+  // release the old communicator if it's ours
+  this->SetCommunicator(MPI_COMM_NULL);
+  if (comm != MPI_COMM_NULL)
+    {
+    // duplcate
+    this->Ownership = true;
+    MPI_Comm_dup(comm, &this->Communicator);
+    }
+}
+
+
+//-----------------------------------------------------------------------------
+vtkPPainterCommunicator::vtkPPainterCommunicator()
+{
+  this->Internals = new ::vtkPPainterCommunicatorInternals;
+}
+
+//-----------------------------------------------------------------------------
+vtkPPainterCommunicator::~vtkPPainterCommunicator()
+{
+  delete this->Internals;
+}
+
+//-----------------------------------------------------------------------------
+void vtkPPainterCommunicator::Copy(
+      const vtkPainterCommunicator *other,
+      bool ownership)
+{
+  const vtkPPainterCommunicator *pOther
+    = dynamic_cast<const vtkPPainterCommunicator*>(other);
+
+  if (pOther && (pOther != this))
+    {
+    this->Internals->SetCommunicator(
+          pOther->Internals->Communicator,
+          ownership);
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkPPainterCommunicator::Duplicate(const vtkPainterCommunicator *comm)
+{
+  const vtkPPainterCommunicator *pcomm
+     = dynamic_cast<const vtkPPainterCommunicator*>(comm);
+
+  if (pcomm)
+    {
+    this->Internals->DuplicateCommunicator(pcomm->Internals->Communicator);
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkPPainterCommunicator::SetCommunicator(vtkMPICommunicatorOpaqueComm *comm)
+{
+  this->Internals->SetCommunicator(*comm->GetHandle());
+}
+
+//-----------------------------------------------------------------------------
+void vtkPPainterCommunicator::GetCommunicator(vtkMPICommunicatorOpaqueComm *comm)
+{
+  *comm = &this->Internals->Communicator;
+}
+
+//-----------------------------------------------------------------------------
+void *vtkPPainterCommunicator::GetCommunicator()
+{
+  return &this->Internals->Communicator;
+}
+
+//-----------------------------------------------------------------------------
+int vtkPPainterCommunicator::GetRank()
+{
+  if (!this->MPIInitialized() || this->MPIFinalized())
+    {
+    return 0;
+    }
+  int rank;
+  MPI_Comm_rank(this->Internals->Communicator, &rank);
+  return rank;
+}
+
+//-----------------------------------------------------------------------------
+int vtkPPainterCommunicator::GetSize()
+{
+  if (!this->MPIInitialized() || this->MPIFinalized())
+    {
+    return 1;
+    }
+  int size;
+  MPI_Comm_size(this->Internals->Communicator, &size);
+  return size;
+}
+
+//-----------------------------------------------------------------------------
+int vtkPPainterCommunicator::GetWorldRank()
+{
+  if (!this->MPIInitialized() || this->MPIFinalized())
+    {
+    return 0;
+    }
+  int rank;
+  MPI_Comm_rank(MPI_COMM_WORLD, &rank);
+  return rank;
+}
+
+//-----------------------------------------------------------------------------
+int vtkPPainterCommunicator::GetWorldSize()
+{
+  if (!this->MPIInitialized() || this->MPIFinalized())
+    {
+    return 1;
+    }
+  int size;
+  MPI_Comm_size(MPI_COMM_WORLD, &size);
+  return size;
+}
+
+// ----------------------------------------------------------------------------
+vtkMPICommunicatorOpaqueComm *vtkPPainterCommunicator::GetGlobalCommunicator()
+{
+  static vtkMPICommunicatorOpaqueComm *globalComm = NULL;
+  if (!globalComm)
+    {
+    if (vtkPPainterCommunicator::MPIInitialized())
+      {
+      vtkMultiProcessController *controller = vtkMultiProcessController::GetGlobalController();
+
+      vtkMPIController *mpiController;
+      vtkMPICommunicator *mpiCommunicator;
+
+      if ( (mpiController = vtkMPIController::SafeDownCast(controller))
+        && (mpiCommunicator = vtkMPICommunicator::SafeDownCast(controller->GetCommunicator())) )
+        {
+        globalComm = new vtkMPICommunicatorOpaqueComm(*mpiCommunicator->GetMPIComm());
+        }
+      else
+        {
+        vtkGenericWarningMacro("MPI is required for parallel operations.");
+        }
+      }
+    }
+  return globalComm;
+}
+
+//-----------------------------------------------------------------------------
+bool vtkPPainterCommunicator::MPIInitialized()
+{
+  int initialized;
+  MPI_Initialized(&initialized);
+  return initialized == 1;
+}
+
+//-----------------------------------------------------------------------------
+bool vtkPPainterCommunicator::MPIFinalized()
+{
+  int finished;
+  MPI_Finalized(&finished);
+  return finished == 1;
+}
+
+//-----------------------------------------------------------------------------
+bool vtkPPainterCommunicator::GetIsNull()
+{
+  return this->Internals->Communicator == MPI_COMM_NULL;
+}
+
+//-----------------------------------------------------------------------------
+void vtkPPainterCommunicator::SubsetCommunicator(
+      vtkMPICommunicatorOpaqueComm *comm,
+      int include)
+{
+  #if defined(vtkPPainterCommunicatorDEBUG)
+  cerr
+    << "=====vtkPPainterCommunicator::SubsetCommunicator" << endl
+    << "creating communicator " << (include?"with":"WITHOUT")
+    << this->GetWorldRank() << endl;
+  #endif
+
+  if (this->MPIInitialized() && !this->MPIFinalized())
+    {
+    MPI_Comm defaultComm = *((MPI_Comm*)comm->GetHandle());
+
+    // exchange include status
+    // make list of active ranks
+    int worldSize = 0;
+    MPI_Comm_size(defaultComm, &worldSize);
+
+    vector<int> included(worldSize,0);
+    MPI_Allgather(
+          &include,
+          1,
+          MPI_INT,
+          &included[0],
+          1,
+          MPI_INT,
+          defaultComm);
+
+    vector<int> activeRanks;
+    activeRanks.reserve(worldSize);
+    for (int i=0; i<worldSize; ++i)
+      {
+      if (included[i] != 0)
+        {
+        activeRanks.push_back(i);
+        }
+      }
+
+    int nActive = (int)activeRanks.size();
+    if (nActive==0)
+      {
+      // no active ranks
+      // no rendering will occur so no communicator
+      // is needed
+      this->Internals->SetCommunicator(MPI_COMM_NULL);
+      }
+    else
+    if (nActive==worldSize)
+      {
+      // all ranks are active
+      // use the default communicator.
+      this->Internals->SetCommunicator(defaultComm);
+      }
+    else
+      {
+      // a subset of the ranks are active
+      // make a new communicator
+      MPI_Group wholeGroup;
+      MPI_Comm_group(defaultComm, &wholeGroup);
+
+      MPI_Group activeGroup;
+      MPI_Group_incl(
+            wholeGroup,
+            nActive,
+            &activeRanks[0],
+            &activeGroup);
+
+      MPI_Comm subsetComm;
+      MPI_Comm_create(defaultComm, activeGroup, &subsetComm);
+      MPI_Group_free(&activeGroup);
+
+      this->Internals->SetCommunicator(subsetComm, true);
+      }
+    }
+}
diff --git a/Rendering/ParallelLIC/vtkPPainterCommunicator.h b/Rendering/ParallelLIC/vtkPPainterCommunicator.h
new file mode 100644
index 0000000..0859cba
--- /dev/null
+++ b/Rendering/ParallelLIC/vtkPPainterCommunicator.h
@@ -0,0 +1,101 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPPainterCommunicator.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkPainterCommunicator -- A communicator containing only
+// ranks that will execute a painter chain.
+//
+// .SECTION Description
+// A communicator that can safely be used inside a painter.
+// A simple container holding an MPI communicator. The simple API
+// is sufficient to allow serial code (no MPI available) to steer
+// execution.
+#ifndef __vtkPPainterCommunicator_h
+#define __vtkPPainterCommunicator_h
+
+#include "vtkPainterCommunicator.h"
+#include "vtkRenderingParallelLICModule.h" // for export macro
+
+class vtkPPainterCommunicatorInternals;
+class vtkMPICommunicatorOpaqueComm;
+
+class VTKRENDERINGPARALLELLIC_EXPORT vtkPPainterCommunicator : public vtkPainterCommunicator
+{
+public:
+  vtkPPainterCommunicator();
+  virtual ~vtkPPainterCommunicator();
+
+  // Description:
+  // Copier and assignment operators.
+  vtkPPainterCommunicator(const vtkPPainterCommunicator &other) : vtkPainterCommunicator(other)
+    { this->Copy(&other, false); }
+
+  vtkPPainterCommunicator &operator=(const vtkPPainterCommunicator &other)
+    { this->Copy(&other, false); return *this; }
+
+  // Description:
+  // Copy the communicator.
+  virtual void Copy(const vtkPainterCommunicator *other, bool ownership);
+
+  // Description:
+  // Duplicate the communicator.
+  virtual void Duplicate(const vtkPainterCommunicator *other);
+
+  // Description:
+  // Querry MPI for inforrmation about the communicator.
+  virtual int GetRank();
+  virtual int GetSize();
+  virtual bool GetIsNull();
+
+  // Description:
+  // Querry MPI for information a bout the world communicator.
+  virtual int GetWorldRank();
+  virtual int GetWorldSize();
+
+  // Description:
+  // Querry MPI state.
+  virtual bool GetMPIInitialized(){ return this->MPIInitialized(); }
+  virtual bool GetMPIFinalized(){ return this->MPIFinalized(); }
+
+  static bool MPIInitialized();
+  static bool MPIFinalized();
+
+  // Description:
+  // Set/Get the communicator. Ownership is not assumed
+  // thus caller must keep the commuicator alive while
+  // this class is in use and free the communicator when
+  // finished.
+  void SetCommunicator(vtkMPICommunicatorOpaqueComm *comm);
+  void GetCommunicator(vtkMPICommunicatorOpaqueComm *comm);
+  void *GetCommunicator();
+
+  // Description:
+  // Creates a new communicator with/without the calling processes
+  // as indicated by the passed in flag, if not 0 the calling process
+  // is included in the new communicator. The new communicator is
+  // accessed via GetCommunicator. In parallel this call is mpi
+  // collective on the world communicator. In serial this is a no-op.
+  void SubsetCommunicator(vtkMPICommunicatorOpaqueComm *comm, int include);
+
+  // Description:
+  // Get VTK's world communicator. Return's a null communictor if
+  // MPI was not yet initialized.
+  static vtkMPICommunicatorOpaqueComm *GetGlobalCommunicator();
+
+private:
+  // PImpl for MPI datatypes
+  vtkPPainterCommunicatorInternals *Internals;
+};
+
+#endif
+// VTK-HeaderTest-Exclude: vtkPPainterCommunicator.h
diff --git a/Rendering/ParallelLIC/vtkPPixelTransfer.cxx b/Rendering/ParallelLIC/vtkPPixelTransfer.cxx
new file mode 100644
index 0000000..525443e
--- /dev/null
+++ b/Rendering/ParallelLIC/vtkPPixelTransfer.cxx
@@ -0,0 +1,84 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPPixelTransfer.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkPPixelTransfer.h"
+using std::ostream;
+using std::vector;
+using std::deque;
+
+//*****************************************************************************
+ostream &operator<<(ostream &os, const vtkPPixelTransfer &pt)
+{
+  os
+    << "[" << pt.GetSourceRank() << "]"
+    << " "  << pt.GetSourceWholeExtent()
+    << " "  << pt.GetSourceExtent()
+    << " -> "
+    << "[" << pt.GetDestinationRank() << "]"
+    << " "  << pt.GetDestinationWholeExtent()
+    << " "  << pt.GetDestinationExtent();
+  return os;
+}
+
+//-----------------------------------------------------------------------------
+int vtkPPixelTransfer::Execute(
+       MPI_Comm comm,
+       int rank,
+       int nComps,
+       int srcType,
+       void *srcData,
+       int destType,
+       void *destData,
+       vector<MPI_Request> &reqs,
+       deque<MPI_Datatype> &types,
+       int tag)
+{
+  // first layer of dispatch
+  switch(srcType)
+    {
+    vtkTemplateMacro(
+        return this->Execute(
+            comm,
+            rank,
+            nComps,
+            (VTK_TT*)srcData,
+            destType,
+            destData,
+            reqs,
+            types,
+            tag););
+    }
+  return 0;
+}
+
+//-----------------------------------------------------------------------------
+int vtkPPixelTransfer::Blit(
+         int nComps,
+         int srcType,
+         void *srcData,
+         int destType,
+         void *destData)
+{
+  return vtkPixelTransfer::Blit(
+        this->SrcWholeExt,
+        this->SrcExt,
+        this->DestWholeExt,
+        this->DestExt,
+        nComps,
+        srcType,
+        srcData,
+        nComps,
+        destType,
+        destData);
+}
diff --git a/Rendering/ParallelLIC/vtkPPixelTransfer.h b/Rendering/ParallelLIC/vtkPPixelTransfer.h
new file mode 100644
index 0000000..e1fab67
--- /dev/null
+++ b/Rendering/ParallelLIC/vtkPPixelTransfer.h
@@ -0,0 +1,524 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPPixelTransfer.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkPPixelTransfer -- For movement of pixel data described by extents
+// .SECTION Description
+// class to handle inter-process communication of pixel data from
+// non-contiguous regions of a shared index space. For example copying
+// a subset of one image to a subset of another. The class can be used
+// for purely local(no MPI) non-contigious data transfers by setting
+// the source and destination ranks to the same id. In that case
+// memcpy is used.
+//
+// .SECTION See also
+// vtkPixelExtent
+
+#ifndef __vtkPPixelTransfer_h
+#define __vtkPPixelTransfer_h
+
+#include "vtkPixelTransfer.h"
+#include "vtkRenderingParallelLICModule.h" // for export
+#include "vtkSetGet.h" // for macros
+#include "vtkPixelExtent.h" // for pixel extent
+#include "vtkMPI.h" // for mpi
+#include "vtkMPIPixelTT.h" // for type traits
+#include "vtkMPIPixelView.h" // for mpi subarrays
+
+// included vtkSystemIncludes.h in the base class.
+#include <iostream> // for ostream
+#include <vector> // for vector
+#include <cstring> // for memcpy
+
+// #define vtkPPixelTransferDEBUG
+
+class VTKRENDERINGPARALLELLIC_EXPORT vtkPPixelTransfer : public vtkPixelTransfer
+{
+public:
+  vtkPPixelTransfer()
+      :
+    SrcRank(0),
+    DestRank(0),
+    UseBlockingSend(0),
+    UseBlockingRecv(0)
+    {}
+
+  // Description:
+  // Initialize a transaction from sub extent of source to sub extent
+  // of dest, where the subsets are different.
+  vtkPPixelTransfer(
+        int srcRank,
+        const vtkPixelExtent &srcWholeExt,
+        const vtkPixelExtent &srcExt,
+        int destRank,
+        const vtkPixelExtent &destWholeExt,
+        const vtkPixelExtent &destExt,
+        int id=0)
+        :
+    Id(id),
+    SrcRank(srcRank),
+    SrcWholeExt(srcWholeExt),
+    SrcExt(srcExt),
+    DestRank(destRank),
+    DestWholeExt(destWholeExt),
+    DestExt(destExt),
+    UseBlockingSend(0),
+    UseBlockingRecv(0)
+    {}
+
+  // Description:
+  // Initialize a transaction from sub extent of source to sub extent
+  // of dest, where the subsets are the same.
+  vtkPPixelTransfer(
+        int srcRank,
+        const vtkPixelExtent &srcWholeExt,
+        const vtkPixelExtent &targetExt,
+        int destRank,
+        const vtkPixelExtent &destWholeExt,
+        int id)
+        :
+    Id(id),
+    SrcRank(srcRank),
+    SrcWholeExt(srcWholeExt),
+    SrcExt(targetExt),
+    DestRank(destRank),
+    DestWholeExt(destWholeExt),
+    DestExt(targetExt),
+    UseBlockingSend(0),
+    UseBlockingRecv(0)
+    {}
+
+  // Description:
+  // Initialize a transaction from sub extent of source to sub extent
+  // of dest, both the whole and the subsets are the same.
+  vtkPPixelTransfer(
+        int srcRank,
+        int destRank,
+        const vtkPixelExtent &wholeExt,
+        const vtkPixelExtent &targetExt,
+        int id=0)
+        :
+    Id(id),
+    SrcRank(srcRank),
+    SrcWholeExt(wholeExt),
+    SrcExt(targetExt),
+    DestRank(destRank),
+    DestWholeExt(wholeExt),
+    DestExt(targetExt),
+    UseBlockingSend(0),
+    UseBlockingRecv(0)
+    {}
+
+  // Description:
+  // Initialize a transaction from sub extent of source to sub extent
+  // of dest, both the whole and the subsets are the same.
+  vtkPPixelTransfer(
+        int srcRank,
+        int destRank,
+        const vtkPixelExtent &ext,
+        int id=0)
+        :
+    Id(id),
+    SrcRank(srcRank),
+    SrcWholeExt(ext),
+    SrcExt(ext),
+    DestRank(destRank),
+    DestWholeExt(ext),
+    DestExt(ext),
+    UseBlockingSend(0),
+    UseBlockingRecv(0)
+    {}
+
+  // Description:
+  // Initialize a transaction from whole extent of source to whole extent
+  // of dest, where source and destination have different whole extents.
+  vtkPPixelTransfer(
+        int srcRank,
+        const vtkPixelExtent &srcWholeExt,
+        int destRank,
+        const vtkPixelExtent &destWholeExt,
+        int id=0)
+        :
+    Id(id),
+    SrcRank(srcRank),
+    SrcWholeExt(srcWholeExt),
+    SrcExt(srcWholeExt),
+    DestRank(destRank),
+    DestWholeExt(destWholeExt),
+    DestExt(destWholeExt),
+    UseBlockingSend(0),
+    UseBlockingRecv(0)
+    {}
+
+  // Description:
+  // Initialize a transaction from sub extent of source to sub extent
+  // of dest, where the subsets are different. This is a local
+  // operation there will be no communication.
+  vtkPPixelTransfer(
+        const vtkPixelExtent &srcWholeExt,
+        const vtkPixelExtent &srcExt,
+        const vtkPixelExtent &destWholeExt,
+        const vtkPixelExtent &destExt)
+        :
+    Id(0),
+    SrcRank(0),
+    SrcWholeExt(srcWholeExt),
+    SrcExt(srcExt),
+    DestRank(0),
+    DestWholeExt(destWholeExt),
+    DestExt(destExt),
+    UseBlockingSend(0),
+    UseBlockingRecv(0)
+    {}
+
+  ~vtkPPixelTransfer(){}
+
+  // Description:
+  // Set/Get the MPI rank of source and destination
+  // processes.
+  void SetSourceRank(int rank)
+  { this->SrcRank=rank; }
+
+  int GetSourceRank() const
+  { return this->SrcRank; }
+
+  void SetDestinationRank(int rank)
+  { this->DestRank=rank; }
+
+  int GetDestinationRank() const
+  { return this->DestRank; }
+
+  // Description:
+  // Tests to determine a given rank's role in this transaction.
+  // If both Sender and Receiver are true then the operation
+  // is local and no mpi calls are made.
+  bool Sender(int rank) const { return (this->SrcRank == rank); }
+  bool Receiver(int rank) const { return (this->DestRank == rank); }
+  bool Local(int rank) const
+  { return (this->Sender(rank) && this->Receiver(rank)); }
+
+  // Description:
+  // Set/Get the source extent. This is the extent of the
+  // array that data will be coppied from.
+  void SetSourceWholeExtent(vtkPixelExtent &srcExt)
+  { this->SrcWholeExt=srcExt; }
+
+  vtkPixelExtent &GetSourceWholeExtent()
+  { return this->SrcWholeExt; }
+
+  const vtkPixelExtent &GetSourceWholeExtent() const
+  { return this->SrcWholeExt; }
+
+  // Description:
+  // Set/Get the source extent. This is the subset extent in the
+  // array that data will be coppied from.
+  void SetSourceExtent(vtkPixelExtent &srcExt)
+  { this->SrcExt=srcExt; }
+
+  vtkPixelExtent &GetSourceExtent()
+  { return this->SrcExt; }
+
+  const vtkPixelExtent &GetSourceExtent() const
+  { return this->SrcExt; }
+
+  // Description:
+  // Set/get the destination extent. This is the extent
+  // of array that will recveive the data.
+  void SetDestinationWholeExtent(vtkPixelExtent &destExt)
+  { this->DestWholeExt=destExt; }
+
+  vtkPixelExtent &GetDestinationWholeExtent()
+  { return this->DestWholeExt; }
+
+  const vtkPixelExtent &GetDestinationWholeExtent() const
+  { return this->DestWholeExt; }
+
+  // Description:
+  // Set/get the destination extent. This is the subset extent
+  // in the array that will recveive the data.
+  void SetDestinationExtent(vtkPixelExtent &destExt)
+  { this->DestExt=destExt; }
+
+  vtkPixelExtent &GetDestinationExtent()
+  { return this->DestExt; }
+
+  const vtkPixelExtent &GetDestinationExtent() const
+  { return this->DestExt; }
+
+  // Description:
+  // Set/get the transaction id.
+  void SetTransactionId(int id)
+  { this->Id=id; }
+
+  int GetTransactionId() const
+  { return this->Id; }
+
+  // Description:
+  // Enable/diasable non-blocking communication
+  void SetUseBlockingSend(int val)
+  { this->UseBlockingSend=val; }
+
+  int GetUseBlockingSend() const
+  { return this->UseBlockingSend; }
+
+  void SetUseBlockingRecv(int val)
+  { this->UseBlockingRecv=val; }
+
+  int GetUseBlockingRecv() const
+  { return this->UseBlockingRecv; }
+
+  // Description:
+  // Transfer data from source to destination.
+  template<typename SOURCE_TYPE, typename DEST_TYPE>
+  int Execute(
+        MPI_Comm comm,
+        int rank,
+        int nComps,
+        SOURCE_TYPE *srcData,
+        DEST_TYPE *destData,
+        std::vector<MPI_Request> &reqs,
+        std::deque<MPI_Datatype> &types,
+        int tag);
+
+  // Description:
+  // Transfer data from source to destination. convenience for working
+  // with vtk data type enum rather than c types.
+  int Execute(
+        MPI_Comm comm,
+        int rank,
+        int nComps,
+        int srcType,
+        void *srcData,
+        int destType,
+        void *destData,
+        std::vector<MPI_Request> &reqs,
+        std::deque<MPI_Datatype> &types,
+        int tag);
+
+  // Description:
+  // Block transfer for local memory to memory transfers, without using mpi.
+  int Blit(
+         int nComps,
+         int srcType,
+         void *srcData,
+         int destType,
+         void *destData);
+
+private:
+  // distpatch helper for vtk data type enum
+  template<typename SOURCE_TYPE>
+  int Execute(
+        MPI_Comm comm,
+        int rank,
+        int nComps,
+        SOURCE_TYPE *srcData,
+        int destType,
+        void *destData,
+        std::vector<MPI_Request> &reqs,
+        std::deque<MPI_Datatype> &types,
+        int tag);
+
+private:
+  int Id;                      // transaction id
+  int SrcRank;                 // rank who owns source memory
+  vtkPixelExtent SrcWholeExt;  // source extent
+  vtkPixelExtent SrcExt;       // source subset to transfer
+  int DestRank;                // rank who owns destination memory
+  vtkPixelExtent DestWholeExt; // destination extent
+  vtkPixelExtent DestExt;      // destination subset
+  int UseBlockingSend;         // controls for non-blocking comm
+  int UseBlockingRecv;
+};
+
+//-----------------------------------------------------------------------------
+template<typename SOURCE_TYPE>
+int vtkPPixelTransfer::Execute(
+       MPI_Comm comm,
+       int rank,
+       int nComps,
+       SOURCE_TYPE *srcData,
+       int destType,
+       void *destData,
+       std::vector<MPI_Request> &reqs,
+       std::deque<MPI_Datatype> &types,
+       int tag)
+{
+  // second layer of dispatch
+  switch(destType)
+    {
+    vtkTemplateMacro(
+        return this->Execute(
+            comm,
+            rank,
+            nComps,
+            srcData,
+            (VTK_TT*)destData,
+            reqs,
+            types,
+            tag));
+    }
+  return 0;
+}
+
+//-----------------------------------------------------------------------------
+template<typename SOURCE_TYPE, typename DEST_TYPE>
+int vtkPPixelTransfer::Execute(
+       MPI_Comm comm,
+       int rank,
+       int nComps,
+       SOURCE_TYPE *srcData,
+       DEST_TYPE *destData,
+       std::vector<MPI_Request> &reqs,
+       std::deque<MPI_Datatype> &types,
+       int tag)
+{
+  int iErr = 0;
+  if ((comm == MPI_COMM_NULL) || (this->Local(rank)))
+    {
+    // transaction is local, bypass mpi in favor of memcpy
+    return vtkPixelTransfer::Blit(
+            this->SrcWholeExt,
+            this->SrcExt,
+            this->DestWholeExt,
+            this->DestExt,
+            nComps,
+            srcData,
+            nComps,
+            destData);
+    }
+
+  if (rank == this->DestRank)
+    {
+    // use mpi to receive the data
+    if (destData == NULL)
+      {
+      return -1;
+      }
+
+    MPI_Datatype subarray;
+    iErr = vtkMPIPixelViewNew<DEST_TYPE>(
+          this->DestWholeExt,
+          this->DestExt,
+          nComps,
+          subarray);
+    if (iErr)
+      {
+      return -4;
+      }
+
+    if (this->UseBlockingRecv)
+      {
+      MPI_Status stat;
+      iErr = MPI_Recv(
+            destData,
+            1,
+            subarray,
+            this->SrcRank,
+            tag,
+            comm,
+            &stat);
+      }
+    else
+      {
+      reqs.push_back(MPI_REQUEST_NULL);
+      iErr = MPI_Irecv(
+            destData,
+            1,
+            subarray,
+            this->SrcRank,
+            tag,
+            comm,
+            &reqs.back());
+      }
+
+    #define HOLD_RECV_TYPES
+    #ifdef HOLD_RECV_YPES
+    types.push_back(subarray);
+    #else
+    MPI_Type_free(&subarray);
+    #endif
+
+    if (iErr)
+      {
+      return -5;
+      }
+    }
+
+  if (rank == this->SrcRank)
+    {
+    // use mpi to send the data
+    if (srcData == NULL)
+      {
+      return -1;
+      }
+
+    MPI_Datatype subarray;
+    iErr = vtkMPIPixelViewNew<SOURCE_TYPE>(
+          this->SrcWholeExt,
+          this->SrcExt,
+          nComps,
+          subarray);
+    if (iErr)
+      {
+      return -2;
+      }
+
+    if (this->UseBlockingSend)
+      {
+      iErr = MPI_Ssend(
+            srcData,
+            1,
+            subarray,
+            this->DestRank,
+            tag,
+            comm);
+      }
+    else
+      {
+      MPI_Request req;
+      iErr = MPI_Isend(
+            srcData,
+            1,
+            subarray,
+            this->DestRank,
+            tag,
+            comm,
+            &req);
+      #define SAVE_SEND_REQS
+      #ifdef SAVE_SEND_REQS
+      reqs.push_back(req);
+      #else
+      MPI_Request_free(&req);
+      #endif
+      }
+
+    #define HOLD_SEND_TYPES
+    #ifdef HOLD_SEND_TYPES
+    types.push_back(subarray);
+    #else
+    MPI_Type_free(&subarray);
+    #endif
+
+    if (iErr)
+      {
+      return -3;
+      }
+    }
+
+  return iErr;
+}
+
+ostream &operator<<(std::ostream &os, const vtkPPixelTransfer &gt);
+
+#endif
+// VTK-HeaderTest-Exclude: vtkPPixelTransfer.h
diff --git a/Rendering/ParallelLIC/vtkPSurfaceLICComposite.cxx b/Rendering/ParallelLIC/vtkPSurfaceLICComposite.cxx
new file mode 100644
index 0000000..0767d34
--- /dev/null
+++ b/Rendering/ParallelLIC/vtkPSurfaceLICComposite.cxx
@@ -0,0 +1,1857 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPSurfaceLICComposite.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkPSurfaceLICComposite.h"
+
+#include "vtkObjectFactory.h"
+#include "vtkPixelExtent.h"
+#include "vtkPPixelTransfer.h"
+#include "vtkPainterCommunicator.h"
+#include "vtkPPainterCommunicator.h"
+#include "vtkRenderWindow.h"
+#include "vtkOpenGLRenderWindow.h"
+#include "vtkTextureObject.h"
+#include "vtkPixelBufferObject.h"
+#include "vtkFrameBufferObject2.h"
+#include "vtkRenderbuffer.h"
+#include "vtkShader2.h"
+#include "vtkShaderProgram2.h"
+#include "vtkUniformVariables.h"
+#include "vtkShader2Collection.h"
+#include "vtkOpenGLExtensionManager.h"
+#include "vtkgl.h"
+#include "vtkMPI.h"
+
+#include <list>
+#include <deque>
+#include <vector>
+#include <utility>
+#include <algorithm>
+#include <cstddef>
+
+using std::list;
+using std::deque;
+using std::vector;
+using std::pair;
+
+// use parallel timer for benchmarks and scaling
+// if not defined vtkTimerLOG is used.
+// #define vtkSurfaceLICPainterTIME
+#if defined(vtkSurfaceLICPainterTIME)
+#include "vtkParallelTimer.h"
+#endif
+
+// Enable debug output.
+// 1 decomp extents, 2 +intermediate compositing steps
+#define vtkPSurfaceLICCompositeDEBUG 0
+#if vtkPSurfaceLICCompositeDEBUG>=1
+#include "vtkPixelExtentIO.h"
+#endif
+#if vtkPSurfaceLICCompositeDEBUG>=2
+#include "vtkTextureIO.h"
+#include <sstream>
+using std::ostringstream;
+using std::string;
+//----------------------------------------------------------------------------
+static
+string mpifn(int rank, const char *fn)
+{
+  ostringstream oss;
+  oss << rank << "_" << fn;
+  return oss.str();
+}
+#endif
+
+// use PBO's for MPI communication.
+
+#define PBO_RECV_BUFFERS
+
+// isolate this class's communications.
+// this is a non-scalable operation so
+// only use it for debugging.
+
+// #define DUPLICATE_COMMUNICATOR
+
+// compositing shader
+extern const char *vtkPSurfaceLICComposite_Comp;
+
+// ***************************************************************************
+static
+int maxNumPasses(){ return 100; }
+
+// ***************************************************************************
+static
+int encodeTag(int id, int tagBase)
+{
+  return maxNumPasses()*(id+1)+tagBase;
+}
+
+// ***************************************************************************
+static
+int decodeTag(int tag, int tagBase)
+{
+  return (tag-tagBase)/maxNumPasses() - 1;
+}
+
+// to sort rank/extent pairs by extent size
+// ***************************************************************************
+static
+bool operator<(
+      const pair<int, vtkPixelExtent> &l,
+      const pair<int, vtkPixelExtent> &r)
+{
+  return l.second<r.second;
+}
+
+// In Windows our callback must use the same calling convention
+// as the MPI library. Currently this is only an issue with
+// MS MPI which uses __stdcall/__fastcall other's use __cdecl
+// which match VTK's defaults.
+#ifndef MPIAPI
+#define MPIAPI
+#endif
+// for parallel union of extents
+// ***************************************************************************
+static void MPIAPI
+vtkPixelExtentUnion(void *in, void *out, int *len, MPI_Datatype *type)
+{
+  (void)type; // known to be MPI_INT
+  int n = *len/4;
+  for (int i=0; i<n; ++i)
+    {
+    int ii = 4*i;
+    vtkPixelExtent lhs(((int*)in)+ii);
+    vtkPixelExtent rhs(((int*)out)+ii);
+    rhs |= lhs;
+    rhs.GetData(((int*)out)+ii);
+    }
+}
+
+// Description:
+// Container for our custom MPI_Op's
+class vtkPPixelExtentOps
+{
+public:
+  vtkPPixelExtentOps() : Union(MPI_OP_NULL) {}
+  ~vtkPPixelExtentOps();
+
+  // Description:
+  // Create/Delete the custom operations. If these
+  // methods are used before MPI initialize or after
+  // MPI finalize they have no affect.
+  void CreateOps();
+  void DeleteOps();
+
+  // Description:
+  // Get the operator for performing parallel
+  // unions.
+  MPI_Op GetUnion(){ return this->Union; }
+
+private:
+  MPI_Op Union;
+};
+
+// ---------------------------------------------------------------------------
+vtkPPixelExtentOps::~vtkPPixelExtentOps()
+{
+  this->DeleteOps();
+}
+
+// ---------------------------------------------------------------------------
+void vtkPPixelExtentOps::CreateOps()
+{
+  if ( (this->Union == MPI_OP_NULL)
+    && vtkPPainterCommunicator::MPIInitialized() )
+    {
+    MPI_Op_create(vtkPixelExtentUnion, 1, &this->Union);
+    }
+}
+
+// ---------------------------------------------------------------------------
+void vtkPPixelExtentOps::DeleteOps()
+{
+  if ( (this->Union != MPI_OP_NULL)
+     && vtkPPainterCommunicator::MPIInitialized()
+     && !vtkPPainterCommunicator::MPIFinalized() )
+    {
+    MPI_Op_free(&this->Union);
+    }
+}
+
+// ****************************************************************************
+void MPITypeFree(deque<MPI_Datatype> &types)
+{
+  size_t n = types.size();
+  for (size_t i=0; i<n; ++i)
+    {
+    MPI_Type_free(&types[i]);
+    }
+}
+
+// ****************************************************************************
+static
+size_t Size(deque< deque<vtkPixelExtent> > exts)
+{
+  size_t np = 0;
+  size_t nr = exts.size();
+  for (size_t r=0; r<nr; ++r)
+    {
+    const deque<vtkPixelExtent> &rexts = exts[r];
+    size_t ne = rexts.size();
+    for (size_t e=0; e<ne; ++e)
+      {
+      np += rexts[e].Size();
+      }
+    }
+  return np;
+}
+
+#if vtkPSurfaceLICCompositeDEBUG>=1 || defined(vtkSurfaceLICPainterTIME)
+// ****************************************************************************
+static
+int NumberOfExtents(deque< deque<vtkPixelExtent> > exts)
+{
+  size_t ne = 0;
+  size_t nr = exts.size();
+  for (size_t r=0; r<nr; ++r)
+    {
+    ne += exts[r].size();
+    }
+  return static_cast<int>(ne);
+}
+#endif
+
+#if vtkPSurfaceLICCompositeDEBUG>0
+// ****************************************************************************
+static
+ostream &operator<<(ostream &os, const vector<float> &vf)
+{
+  size_t n = vf.size();
+  if (n)
+    {
+    os << vf[0];
+    }
+  for (size_t i=1; i<n; ++i)
+    {
+    os << ", " << vf[i];
+    }
+  return os;
+}
+
+// ****************************************************************************
+static
+ostream &operator<<(ostream &os, const vector<vector<float> >  &vvf)
+{
+  size_t n = vvf.size();
+  for (size_t i=0; i<n; ++i)
+    {
+    os << i << " = {" << vvf[i] << "}" << endl;
+    }
+  return os;
+}
+#endif
+
+#if vtkPSurfaceLICCompositeDEBUG>=2
+// ****************************************************************************
+static
+int ScanMPIStatusForError(vector<MPI_Status> &stat)
+{
+  int nStats = stat.size();
+  for (int q=0; q<nStats; ++q)
+    {
+    int ierr = stat[q].MPI_ERROR;
+    if ((ierr != MPI_SUCCESS) && (ierr != MPI_ERR_PENDING))
+      {
+      char eStr[MPI_MAX_ERROR_STRING] = {'\0'};
+      int eStrLen = 0;
+      MPI_Error_string(ierr, eStr, &eStrLen);
+      cerr
+        << "transaction for request " << q << " failed." << endl
+        << eStr << endl
+        << endl;
+      return -1;
+      }
+    }
+  return 0;
+}
+#endif
+
+
+//-----------------------------------------------------------------------------
+vtkStandardNewMacro(vtkPSurfaceLICComposite);
+
+// ----------------------------------------------------------------------------
+vtkPSurfaceLICComposite::vtkPSurfaceLICComposite()
+        :
+     vtkSurfaceLICComposite(),
+     PainterComm(NULL),
+     PixelOps(NULL),
+     CommRank(0),
+     CommSize(1),
+     Context(NULL),
+     FBO(NULL),
+     CompositeShader(NULL)
+{
+  this->PainterComm = new vtkPPainterCommunicator;
+  this->PixelOps = new vtkPPixelExtentOps;
+}
+
+// ----------------------------------------------------------------------------
+vtkPSurfaceLICComposite::~vtkPSurfaceLICComposite()
+{
+  delete this->PainterComm;
+  delete this->PixelOps;
+  if (this->CompositeShader)
+    {
+    this->CompositeShader->Delete();
+    }
+  if (this->FBO)
+    {
+    this->FBO->Delete();
+    }
+}
+
+// ----------------------------------------------------------------------------
+void vtkPSurfaceLICComposite::SetCommunicator(vtkPainterCommunicator *comm)
+{
+  #if DUPLICATE_COMMUNICATOR
+  this->PainterComm->Duplicate(comm);
+  #else
+  this->PainterComm->Copy(comm, false);
+  #endif
+  this->CommRank = this->PainterComm->GetRank();
+  this->CommSize = this->PainterComm->GetSize();
+  // do this here since we know that
+  // mpi is initialized by now.
+  this->PixelOps->CreateOps();
+}
+
+// ----------------------------------------------------------------------------
+void vtkPSurfaceLICComposite::SetContext(vtkOpenGLRenderWindow *rwin)
+{
+  if (this->Context == rwin)
+    {
+    return;
+    }
+  this->Context = rwin;
+
+  // free the existing shader and fbo
+  if ( this->CompositeShader )
+    {
+    this->CompositeShader->Delete();
+    this->CompositeShader = NULL;
+    }
+
+  if ( this->FBO )
+    {
+    this->FBO->Delete();
+    this->FBO = NULL;
+    }
+
+  if ( this->Context )
+    {
+    // load, compile, and link the shader
+    vtkShader2 *compositeShaderSrc = vtkShader2::New();
+    compositeShaderSrc->SetContext(this->Context);
+    compositeShaderSrc->SetType(VTK_SHADER_TYPE_FRAGMENT);
+    compositeShaderSrc->SetSourceCode(vtkPSurfaceLICComposite_Comp);
+
+    this->CompositeShader = vtkShaderProgram2::New();
+    this->CompositeShader->SetContext(this->Context);
+    this->CompositeShader->GetShaders()->AddItem(compositeShaderSrc);
+    this->CompositeShader->Build();
+
+    compositeShaderSrc->Delete();
+
+    // setup a FBO for rendering
+    this->FBO = vtkFrameBufferObject2::New();
+    this->FBO->SetContext(this->Context);
+    }
+}
+
+// ----------------------------------------------------------------------------
+int vtkPSurfaceLICComposite::AllGatherExtents(
+        const deque<vtkPixelExtent> &localExts,
+        deque<deque<vtkPixelExtent> >&remoteExts,
+        vtkPixelExtent &dataSetExt)
+{
+  #if vtkPSurfaceLICCompositeDEBUG>=2
+  cerr << "=====vtkPSurfaceLICComposite::AllGatherExtents" << endl;
+  #endif
+
+  // serialize the local extents
+  int nLocal = static_cast<int>(localExts.size());
+  int localSize = 4*nLocal;
+  int *sendBuf = static_cast<int*>(malloc(localSize*sizeof(int)));
+  for (int i=0; i<nLocal; ++i)
+    {
+    localExts[i].GetData(sendBuf+4*i);
+    }
+
+  // share local extent counts
+  MPI_Comm comm = *(static_cast<MPI_Comm*>(this->PainterComm->GetCommunicator()));
+  int *nRemote = static_cast<int*>(malloc(this->CommSize*sizeof(int)));
+
+  MPI_Allgather(
+        &nLocal,
+        1,
+        MPI_INT,
+        nRemote,
+        1,
+        MPI_INT,
+        comm);
+
+  // allocate a bufffer to receive the remote exts
+  int *recvCounts = static_cast<int*>(malloc(this->CommSize*sizeof(int)));
+  int *recvDispls = static_cast<int*>(malloc(this->CommSize*sizeof(int)));
+  int bufSize = 0;
+  for (int i=0; i<this->CommSize; ++i)
+    {
+    int n = 4*nRemote[i];
+    recvCounts[i] = n;
+    recvDispls[i] = bufSize;
+    bufSize += n;
+    }
+  int *recvBuf = static_cast<int*>(malloc(bufSize*sizeof(int)));
+
+  // collect remote extents
+  MPI_Allgatherv(
+        sendBuf,
+        localSize,
+        MPI_INT,
+        recvBuf,
+        recvCounts,
+        recvDispls,
+        MPI_INT,
+        comm);
+
+  // de-serialize the set of extents
+  dataSetExt.Clear();
+  remoteExts.resize(this->CommSize);
+  for (int i=0; i<this->CommSize; ++i)
+    {
+    int nRemt = recvCounts[i]/4;
+    remoteExts[i].resize(nRemt);
+
+    int *pBuf = recvBuf+recvDispls[i];
+
+    for (int j=0; j<nRemt; ++j)
+      {
+      vtkPixelExtent &remoteExt = remoteExts[i][j];
+      remoteExt.SetData(pBuf+4*j);
+      dataSetExt |= remoteExt;
+      }
+    }
+
+  free(sendBuf);
+  free(nRemote);
+  free(recvCounts);
+  free(recvDispls);
+  free(recvBuf);
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+int vtkPSurfaceLICComposite::AllReduceVectorMax(
+    const deque<vtkPixelExtent> &originalExts, // local data
+    const deque<deque<vtkPixelExtent> > &newExts, // all composited regions
+    float *vectors,
+    vector<vector<float> > &vectorMax)
+{
+  #if vtkPSurfaceLICCompositeDEBUG>=2
+  cerr << "=====vtkPSurfaceLICComposite::AllReduceVectorMax" << endl;
+  #endif
+
+  // vector data is currently on the original decomp (m blocks for n ranks)
+  // the new decomp (p blocks for n ranks), for each of the p new blocks
+  // each rank computes the max on this region, a reduction is made to get
+  // the true value.
+  size_t nOriginal = originalExts.size();
+  MPI_Comm comm = *(static_cast<MPI_Comm*>(this->PainterComm->GetCommunicator()));
+  vector<vector<float> > tmpMax(this->CommSize);
+  for (int r=0; r<this->CommSize; ++r)
+    {
+    // check the intersection of each new extent with that of each
+    // original extent. data for origial extent is local.
+    size_t nNew = newExts[r].size();
+    tmpMax[r].resize(nNew, -VTK_FLOAT_MAX);
+    for (size_t n=0; n<nNew; ++n)
+      {
+      const vtkPixelExtent &newExt = newExts[r][n];
+      float eMax = -VTK_FLOAT_MAX;
+      for (size_t o=0; o<nOriginal; ++o)
+        {
+        vtkPixelExtent intExt(originalExts[o]);
+        intExt &= newExt;
+        if (!intExt.Empty())
+          {
+          float oMax = this->VectorMax(intExt, vectors);
+          eMax = eMax<oMax ? oMax : eMax;
+          }
+        }
+
+      MPI_Allreduce(
+            MPI_IN_PLACE,
+            &eMax,
+            1,
+            MPI_FLOAT,
+            MPI_MAX,
+            comm);
+
+      tmpMax[r][n] = eMax;
+      }
+    }
+
+  // since integration run's into other blocks data use the max of the
+  // block and it's neighbors for gaurd cell size computation
+  vectorMax.resize(this->CommSize);
+  for (int r=0; r<this->CommSize; ++r)
+    {
+    size_t nNew = newExts[r].size();
+    vectorMax[r].resize(nNew);
+    for (size_t n=0; n<nNew; ++n)
+      {
+      vtkPixelExtent newExt = newExts[r][n];
+      newExt.Grow(1);
+
+      float eMax = tmpMax[r][n];
+
+      // find neighbors
+      for (int R=0; R<this->CommSize; ++R)
+        {
+        size_t NNew = newExts[R].size();
+        for (size_t N=0; N<NNew; ++N)
+          {
+          vtkPixelExtent intExt(newExts[R][N]);
+          intExt &= newExt;
+
+          if (!intExt.Empty())
+            {
+            // this is a neighbor(or self), take the larger of ours
+            // and theirs
+            float nMax = tmpMax[R][N];
+            eMax = eMax<nMax ? nMax : eMax;
+            }
+          }
+        }
+
+      vectorMax[r][n] = eMax;
+      }
+    }
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+int vtkPSurfaceLICComposite::DecomposeExtent(
+      vtkPixelExtent &in,
+      int nPieces,
+      list<vtkPixelExtent> &out)
+{
+  #if vtkPSurfaceLICCompositeDEBUG>=2
+  cerr << "=====vtkPSurfaceLICComposite::DecomposeWindowExtent" << endl;
+  #endif
+
+  int res[3];
+  in.Size(res);
+
+  int nPasses[2] = {0,0};
+  int maxPasses[2] = {res[0]/2, res[1]/2};
+
+  out.push_back(in);
+
+  list<vtkPixelExtent> splitExts;
+
+  int dir=0;
+  while(1)
+    {
+    // stop when we have enough out or all out have unit size
+    int nExts = static_cast<int>(out.size());
+    if ( (nExts >= nPieces)
+     || ((nPasses[0] > maxPasses[0]) && (nPasses[1] > maxPasses[1])) )
+      {
+      break;
+      }
+
+    for (int i=0; i<nExts; ++i)
+      {
+      int nExtsTotal = static_cast<int>(out.size() + splitExts.size());
+      if (nExtsTotal >= nPieces)
+        {
+        break;
+        }
+
+      // split this ext into two
+      vtkPixelExtent ext = out.back();
+      out.pop_back();
+
+      vtkPixelExtent newExt = ext.Split(dir);
+
+      splitExts.push_back(ext);
+
+      if (!newExt.Empty())
+        {
+        splitExts.push_back(newExt);
+        }
+      }
+
+    // transfer the split out to the head so that
+    // they are split again only after others.
+    out.insert(out.begin(), splitExts.begin(), splitExts.end());
+    splitExts.clear();
+
+    nPasses[dir] += 1;
+
+    // alternate splitting direction
+    dir = (dir + 1) % 2;
+    if (nPasses[dir] > maxPasses[dir])
+      {
+      dir = (dir + 1) % 2;
+      }
+    }
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+int vtkPSurfaceLICComposite::DecomposeScreenExtent(
+      deque< deque<vtkPixelExtent> >&newExts,
+      float *vectors)
+{
+  #if vtkPSurfaceLICCompositeDEBUG>=2
+  cerr << "=====vtkPSurfaceLICComposite::DecomposeWindowExtent" << endl;
+  #endif
+
+  // TODO -- the balanced compositor is not finished. details
+  // below.
+  (void)vectors;
+
+  // use 128x128 extents
+  int dataSetSize[2];
+  this->DataSetExt.Size(dataSetSize);
+
+  int ni = dataSetSize[0]/128;
+  ni = ni<1 ? 1 : ni;
+
+  int nj = dataSetSize[1]/128;
+  nj = nj<1 ? 1 : nj;
+
+  int nPieces = ni*nj;
+  nPieces = nPieces<this->CommSize ? this->CommSize : nPieces;
+
+  // decompose
+  list<vtkPixelExtent> tmpOut0;
+  this->DecomposeExtent(this->DataSetExt, nPieces, tmpOut0);
+
+  // make the assignment to ranks
+  int nPer = nPieces/this->CommSize;
+  int nLarge = nPieces%this->CommSize;
+
+  deque<deque<vtkPixelExtent> > tmpOut1;
+  tmpOut1.resize(this->CommSize);
+
+  int N = static_cast<int>(tmpOut0.size());
+  list<vtkPixelExtent>::iterator it = tmpOut0.begin();
+
+  for (int r=0; r<this->CommSize; ++r)
+    {
+    int n = nPer;
+    if (r < nLarge)
+      {
+      ++n;
+      }
+    for (int i=0; (i<n) && (N>0); ++i,--N,++it)
+      {
+      tmpOut1[r].push_back(*it);
+      }
+    }
+
+  // TODO -- we need to implement some sore of load
+  // balancing here.
+  // compute tight extents and assign to ranks based on weight
+  // and location
+  newExts = tmpOut1;
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+int vtkPSurfaceLICComposite::MakeDecompLocallyDisjoint(
+     const deque< deque< vtkPixelExtent> > &in,
+     deque< deque< vtkPixelExtent> > &out)
+{
+  size_t nr = in.size();
+  out.clear();
+  out.resize(nr);
+  for (size_t r=0; r<nr; ++r)
+    {
+    deque<vtkPixelExtent> tmp(in[r]);
+    this->MakeDecompDisjoint(tmp, out[r]);
+    }
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+int vtkPSurfaceLICComposite::MakeDecompDisjoint(
+     const deque< deque< vtkPixelExtent> > &in,
+     deque< deque< vtkPixelExtent> > &out,
+     float *vectors)
+{
+  // flatten
+  deque<pair<int, vtkPixelExtent> > tmpIn;
+  for (int r=0; r<this->CommSize; ++r)
+    {
+    const deque<vtkPixelExtent> &blocks = in[r];
+    size_t nBlocks = blocks.size();
+    for (size_t b=0; b<nBlocks; ++b)
+      {
+      pair<int, vtkPixelExtent> elem(r, blocks[b]);
+      tmpIn.push_back(elem);
+      }
+    }
+  // sort by size
+  sort(tmpIn.begin(), tmpIn.end());
+
+  // from largest to smallest, make it disjoint
+  // to others
+  deque<pair<int, vtkPixelExtent> > tmpOut0;
+
+  while ( !tmpIn.empty() )
+    {
+    // largest element
+    int rank = tmpIn.back().first;
+    deque<vtkPixelExtent> tmpOut1(1,tmpIn.back().second);
+
+    tmpIn.pop_back();
+
+    // subtract smaller elements
+    size_t ns = tmpIn.size();
+    for (size_t se=0; se<ns; ++se)
+      {
+      vtkPixelExtent &selem = tmpIn[se].second;
+      deque<vtkPixelExtent> tmpOut2;
+      size_t nl = tmpOut1.size();
+      for (size_t le=0; le<nl; ++le)
+        {
+        vtkPixelExtent &lelem = tmpOut1[le];
+        vtkPixelExtent::Subtract(lelem, selem, tmpOut2);
+        }
+      tmpOut1 = tmpOut2;
+      }
+
+    // move to output
+    size_t nn = tmpOut1.size();
+    for (size_t ne=0; ne<nn; ++ne)
+      {
+      pair<int, vtkPixelExtent> elem(rank, tmpOut1[ne]);
+      tmpOut0.push_back(elem);
+      }
+    }
+
+  // reduce communication and compositing overhead by
+  // shrinking the new set of extents to tightly bound the
+  // data on it's new/future layout.
+  int nx[2];
+  this->WindowExt.Size(nx);
+
+  const deque<vtkPixelExtent> &inR = in[this->CommRank];
+  size_t ni = inR.size();
+
+  deque<pair<int, vtkPixelExtent> > tmpOut1(tmpOut0);
+  size_t ne = tmpOut1.size();
+  for (size_t e=0; e<ne; ++e)
+    {
+    vtkPixelExtent &newExt = tmpOut1[e].second;
+    vtkPixelExtent tightExt;
+    for (size_t i=0; i<ni; ++i)
+      {
+      vtkPixelExtent inExt(inR[i]);
+      inExt &= newExt;
+      if (!inExt.Empty())
+        {
+        GetPixelBounds(vectors, nx[0], inExt);
+        tightExt |= inExt; // accumulate the contrib from local data
+        }
+      }
+    newExt = tightExt;
+    }
+
+  // accumulate contrib from remote data
+  size_t remSize = 4*ne;
+  vector<int> rem(remSize);
+  int *pRem = &rem[0];
+  for (size_t e=0; e<ne; ++e, pRem+=4)
+    {
+    tmpOut1[e].second.GetData(pRem);
+    }
+  MPI_Comm comm = *(static_cast<MPI_Comm*>(this->PainterComm->GetCommunicator()));
+  MPI_Op parUnion = this->PixelOps->GetUnion();
+  MPI_Allreduce(
+        MPI_IN_PLACE,
+        &rem[0],
+        (int)remSize,
+        MPI_INT,
+        parUnion,
+        comm);
+
+  // move from flat order back to rank indexed order and remove
+  // empty extents
+  pRem = &rem[0];
+  out.resize(this->CommSize);
+  for (size_t e=0; e<ne; ++e, pRem+=4)
+    {
+    int r = tmpOut1[e].first;
+    vtkPixelExtent ext(pRem);
+    if (!ext.Empty())
+      {
+      out[r].push_back(ext);
+      }
+    }
+
+  // merge compatible extents
+  for (int r=0; r<this->CommSize; ++r)
+    {
+    vtkPixelExtent::Merge(out[r]);
+    }
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+int vtkPSurfaceLICComposite::AddGuardPixels(
+      const deque<deque<vtkPixelExtent> > &exts,
+      deque<deque<vtkPixelExtent> > &guardExts,
+      deque<deque<vtkPixelExtent> > &disjointGuardExts,
+      float *vectors)
+{
+  #if vtkPSurfaceLICCompositeDEBUG>=2
+  cerr << "=====vtkPSurfaceLICComposite::AddGuardPixels" << endl;
+  #endif
+  #ifdef vtkSurfaceLICPainterTIME
+  vtkParallelTimer *log = vtkParallelTimer::GetGlobalInstance();
+  #endif
+
+  guardExts.resize(this->CommSize);
+  disjointGuardExts.resize(this->CommSize);
+
+  int nx[2];
+  this->WindowExt.Size(nx);
+  float fudge = this->GetFudgeFactor(nx);
+  #if vtkPSurfaceLICCompositeDEBUG>=2
+  cerr << " fudge=" << fudge << endl;
+  #endif
+
+  float arc
+    = this->StepSize*this->NumberOfSteps*this->NumberOfGuardLevels*fudge;
+
+  if (this->NormalizeVectors)
+    {
+    // when normalizing velocity is always 1, all extents have the
+    // same number of gaurd cells.
+    int ng
+      = static_cast<int>(arc)
+      + this->NumberOfEEGuardPixels
+      + this->NumberOfAAGuardPixels;
+    ng = ng<2 ? 2 : ng;
+    #ifdef vtkSurfaceLICPainterTIME
+    log->GetHeader() << "ng=" << ng << "\n";
+    #endif
+    #if vtkPSurfaceLICCompositeDEBUG>=2
+    cerr << "ng=" << ng << endl;
+    #endif
+    for (int r=0; r<this->CommSize; ++r)
+      {
+      deque<vtkPixelExtent> tmpExts(exts[r]);
+      int nExts = static_cast<int>(tmpExts.size());
+      // add guard pixles
+      for (int b=0; b<nExts; ++b)
+        {
+        tmpExts[b].Grow(ng);
+        tmpExts[b] &= this->DataSetExt;
+        }
+      guardExts[r] = tmpExts;
+      // make sure it's disjoint
+      disjointGuardExts[r].clear();
+      this->MakeDecompDisjoint(tmpExts, disjointGuardExts[r]);
+      }
+    }
+  else
+    {
+    // when not normailzing during integration we need max(V) on the LIC
+    // decomp. Each domain has the potential to require a unique number
+    // of gaurd cells.
+    vector<vector<float> > vectorMax;
+    this->AllReduceVectorMax(
+            this->BlockExts,
+            exts,
+            vectors,
+            vectorMax);
+
+    #ifdef vtkSurfaceLICPainterTIME
+    log->GetHeader() << "ng=";
+    #endif
+    #if vtkPSurfaceLICCompositeDEBUG>=2
+    cerr << "ng=";
+    #endif
+    for (int r=0; r<this->CommSize; ++r)
+      {
+      deque<vtkPixelExtent> tmpExts(exts[r]);
+      size_t nExts = tmpExts.size();
+      for (size_t b=0; b<nExts; ++b)
+        {
+        int ng
+          = static_cast<int>(vectorMax[r][b]*arc)
+          + this->NumberOfEEGuardPixels
+          + this->NumberOfAAGuardPixels;;
+        ng = ng<2 ? 2 : ng;
+        #ifdef vtkSurfaceLICPainterTIME
+        log->GetHeader() << " " << ng;
+        #endif
+        #if vtkPSurfaceLICCompositeDEBUG>=2
+        cerr << "  " << ng;
+        #endif
+        tmpExts[b].Grow(ng);
+        tmpExts[b] &= this->DataSetExt;
+        }
+      guardExts[r] = tmpExts;
+      // make sure it's disjoint
+      disjointGuardExts[r].clear();
+      this->MakeDecompDisjoint(tmpExts, disjointGuardExts[r]);
+      }
+    #ifdef vtkSurfaceLICPainterTIME
+    log->GetHeader() << "\n";
+    #endif
+    #if vtkPSurfaceLICCompositeDEBUG>=2
+    cerr << endl;
+    #endif
+    }
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+double vtkPSurfaceLICComposite::EstimateCommunicationCost(
+      const deque<deque<vtkPixelExtent> > &srcExts,
+      const deque<deque<vtkPixelExtent> > &destExts)
+{
+  // compute the number off rank overlaping pixels, this is the
+  // the number of pixels that need to be communicated. This is
+  // not the number of pixels to be composited since some of those
+  // may be on-rank.
+
+  size_t total = 0;
+  size_t overlap = 0;
+
+  for (int sr=0; sr<this->CommSize; ++sr)
+    {
+    size_t nse = srcExts[sr].size();
+    for (size_t se=0; se<nse; ++se)
+      {
+      const vtkPixelExtent &srcExt = srcExts[sr][se];
+      total += srcExt.Size(); // count all pixels in the total
+
+      for (int dr=0; dr<this->CommSize; ++dr)
+        {
+        // only off rank overlap incurrs comm cost
+        if (sr == dr)
+          {
+          continue;
+          }
+
+        size_t nde = destExts[dr].size();
+        for (size_t de=0; de<nde; ++de)
+          {
+          vtkPixelExtent destExt = destExts[dr][de];
+          destExt &= srcExt;
+          if (!destExt.Empty())
+            {
+            overlap += destExt.Size(); // cost is number of overlap pixels
+            }
+          }
+        }
+      }
+    }
+
+  return (static_cast<double>(overlap))/(static_cast<double>(total));
+}
+
+// ----------------------------------------------------------------------------
+double vtkPSurfaceLICComposite::EstimateDecompEfficiency(
+      const deque< deque<vtkPixelExtent> > &exts,
+      const deque< deque<vtkPixelExtent> > &guardExts)
+{
+  // number of pixels in the domain decomp
+  double ne = static_cast<double>(Size(exts));
+  double nge = static_cast<double>(Size(guardExts));
+
+  // efficiency is the ratio of valid pixels
+  // to guard pixels
+  return ne/fabs(ne - nge);
+}
+
+// ----------------------------------------------------------------------------
+int vtkPSurfaceLICComposite::BuildProgram(float *vectors)
+{
+  #if vtkPSurfaceLICCompositeDEBUG>=2
+  cerr << "=====vtkPSurfaceLICComposite::BuildProgram" << endl;
+  #endif
+
+  #ifdef vtkSurfaceLICPainterTIME
+  vtkParallelTimer *log = vtkParallelTimer::GetGlobalInstance();
+  #endif
+
+  // gather current geometry extents, compute the whole extent
+  deque<deque<vtkPixelExtent> >allBlockExts;
+  this->AllGatherExtents(
+        this->BlockExts,
+        allBlockExts,
+        this->DataSetExt);
+
+  if (this->Strategy == COMPOSITE_AUTO)
+    {
+    double commCost = this->EstimateCommunicationCost(allBlockExts, allBlockExts);
+    #ifdef vtkSurfaceLICPainterTIME
+    log->GetHeader() << "in-place comm cost=" << commCost << "\n";
+    #endif
+    #if vtkPSurfaceLICCompositeDEBUG>=2
+    cerr << "in-place comm cost=" << commCost << endl;
+    #endif
+    if (commCost <= 0.3)
+      {
+      this->Strategy = COMPOSITE_INPLACE;
+      #ifdef vtkSurfaceLICPainterTIME
+      log->GetHeader() << "using in-place composite\n";
+      #endif
+      #if vtkPSurfaceLICCompositeDEBUG>=2
+      cerr << "using in-place composite" << endl;
+      #endif
+      }
+    else
+      {
+      this->Strategy = COMPOSITE_INPLACE_DISJOINT;
+      #ifdef vtkSurfaceLICPainterTIME
+      log->GetHeader() << "using disjoint composite\n";
+      #endif
+      #if vtkPSurfaceLICCompositeDEBUG>=2
+      cerr << "using disjoint composite" << endl;
+      #endif
+      }
+    }
+
+  // decompose the screen
+  deque< deque<vtkPixelExtent> > newExts;
+  switch (this->Strategy)
+    {
+    case COMPOSITE_INPLACE:
+      // make it locally disjoint to avoid redundant computation
+      this->MakeDecompLocallyDisjoint(allBlockExts, newExts);
+      break;
+
+    case COMPOSITE_INPLACE_DISJOINT:
+      this->MakeDecompDisjoint(allBlockExts, newExts, vectors);
+      break;
+
+    case COMPOSITE_BALANCED:
+      this->DecomposeScreenExtent(newExts, vectors);
+      break;
+
+    default:
+      return -1;
+    }
+
+  #if defined(vtkSurfaceLICPainterTIME) || vtkPSurfaceLICCompositeDEBUG>=2
+  double commCost = this->EstimateCommunicationCost(allBlockExts, newExts);
+  #endif
+  #ifdef vtkSurfaceLICPainterTIME
+  log->GetHeader() << "actual comm cost=" << commCost << "\n";
+  #endif
+  #if vtkPSurfaceLICCompositeDEBUG>=2
+  cerr << "actual comm cost=" << commCost << endl;
+  #endif
+
+  // save the local decomp
+  // it's the valid region as no gaurd pixels were added
+  this->CompositeExt = newExts[this->CommRank];
+
+  int id=0;
+  this->ScatterProgram.clear();
+  if (this->Strategy != COMPOSITE_INPLACE)
+    {
+    // construct program describing communication patterns that are
+    // required to move data to geometry decomp from the new lic
+    // decomp after LIC
+    for (int srcRank=0; srcRank<this->CommSize; ++srcRank)
+      {
+      deque<vtkPixelExtent> &srcBlocks = newExts[srcRank];
+      int nSrcBlocks = static_cast<int>(srcBlocks.size());
+
+      for (int sb=0; sb<nSrcBlocks; ++sb)
+        {
+        const vtkPixelExtent &srcExt = srcBlocks[sb];
+
+        for (int destRank=0; destRank<this->CommSize; ++destRank)
+          {
+          int nBlocks = static_cast<int>(allBlockExts[destRank].size());
+          for (int b=0; b<nBlocks; ++b)
+            {
+            const vtkPixelExtent &destExt = allBlockExts[destRank][b];
+
+            vtkPixelExtent sharedExt(destExt);
+            sharedExt &= srcExt;
+
+            if (!sharedExt.Empty())
+              {
+              this->ScatterProgram.push_back(
+                    vtkPPixelTransfer(
+                          srcRank,
+                          this->WindowExt,
+                          sharedExt,
+                          destRank,
+                          this->WindowExt,
+                          sharedExt,
+                          id));
+              }
+            id += 1;
+            }
+          }
+        }
+      }
+    }
+
+  #if vtkPSurfaceLICCompositeDEBUG>=1
+  vtkPixelExtentIO::Write(this->CommRank, "ViewExtent.vtk", this->WindowExt);
+  vtkPixelExtentIO::Write(this->CommRank, "GeometryDecomp.vtk", allBlockExts);
+  vtkPixelExtentIO::Write(this->CommRank, "LICDecomp.vtk", newExts);
+  #endif
+
+  // add gaurd cells to the new decomp that prevent artifacts
+  deque<deque<vtkPixelExtent> > guardExts;
+  deque<deque<vtkPixelExtent> > disjointGuardExts;
+  this->AddGuardPixels(newExts, guardExts, disjointGuardExts, vectors);
+
+  #if vtkPSurfaceLICCompositeDEBUG>=1
+  vtkPixelExtentIO::Write(this->CommRank, "LICDecompGuard.vtk", guardExts);
+  vtkPixelExtentIO::Write(this->CommRank, "LICDisjointDecompGuard.vtk", disjointGuardExts);
+  #endif
+
+  #if defined(vtkSurfaceLICPainterTIME) || vtkPSurfaceLICCompositeDEBUG>=2
+  double efficiency = this->EstimateDecompEfficiency(newExts, disjointGuardExts);
+  size_t nNewExts = NumberOfExtents(newExts);
+  #endif
+  #if defined(vtkSurfaceLICPainterTIME)
+  log->GetHeader()
+    << "decompEfficiency=" << efficiency << "\n"
+    << "numberOfExtents=" << nNewExts << "\n";
+  #endif
+  #if vtkPSurfaceLICCompositeDEBUG>=2
+  cerr
+    << "decompEfficiency=" << efficiency << endl
+    << "numberOfExtents=" << nNewExts << endl;
+  #endif
+
+  // save the local decomp with gaurd cells
+  this->GuardExt = guardExts[this->CommRank];
+  this->DisjointGuardExt = disjointGuardExts[this->CommRank];
+
+  // construct program describing communication patterns that are
+  // required to move data from the geometry decomp to the new
+  // disjoint decomp containing guard pixels
+  this->GatherProgram.clear();
+  id=0;
+  for (int destRank=0; destRank<this->CommSize; ++destRank)
+    {
+    deque<vtkPixelExtent> &destBlocks = disjointGuardExts[destRank];
+    int nDestBlocks = static_cast<int>(destBlocks.size());
+
+    for (int db=0; db<nDestBlocks; ++db)
+      {
+      const vtkPixelExtent &destExt = destBlocks[db];
+
+      for (int srcRank=0; srcRank<this->CommSize; ++srcRank)
+        {
+        int nBlocks = static_cast<int>(allBlockExts[srcRank].size());
+        for (int b=0; b<nBlocks; ++b)
+          {
+          const vtkPixelExtent &srcExt = allBlockExts[srcRank][b];
+
+          vtkPixelExtent sharedExt(destExt);
+          sharedExt &= srcExt;
+
+          if (!sharedExt.Empty())
+            {
+            // to move vectors for the LIC decomp
+            // into a contiguous recv buffer
+            this->GatherProgram.push_back(
+                  vtkPPixelTransfer(
+                        srcRank,
+                        this->WindowExt,
+                        sharedExt,
+                        destRank,
+                        sharedExt, // dest ext
+                        sharedExt,
+                        id));
+            }
+
+          id += 1;
+          }
+        }
+      }
+    }
+
+  #if vtkSurfaceLICCompoisteDEBUG>=2
+  cerr << *this << endl;
+  #endif
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+int vtkPSurfaceLICComposite::Gather(
+        void *pSendPBO,
+        int dataType,
+        int nComps,
+        vtkTextureObject *&newImage)
+{
+  #if vtkPSurfaceLICCompositeDEBUG>=2
+  cerr << "=====vtkPSurfaceLICComposite::Composite" << endl;
+  #endif
+
+  // two pipleines depending on if this process recv's or send's
+  //
+  // send:
+  // tex -> pbo -> mpi_send
+  //
+  // recv:
+  // mpi_recv -> pbo -> tex -> composite shader -> fbo
+
+  // pass id is decoded into mpi tag form non-blocking comm
+  this->Pass += 1;
+
+  // validate inputs
+  if (this->Pass >= maxNumPasses())
+    {
+    return -1;
+    }
+  if (pSendPBO == NULL)
+    {
+    return -2;
+    }
+  if (this->Context == NULL)
+    {
+    return -3;
+    }
+  if (this->CompositeShader == NULL)
+    {
+    return -4;
+    }
+
+  // get the size of the array datatype
+  int dataTypeSize = 0;
+  switch (dataType)
+    {
+    vtkTemplateMacro(dataTypeSize = sizeof(VTK_TT););
+    default:
+      return -5;
+    }
+
+  // initiate non-blocking comm
+  MPI_Comm comm = *(static_cast<MPI_Comm*>(this->PainterComm->GetCommunicator()));
+  int nTransactions = static_cast<int>(this->GatherProgram.size());
+  vector<MPI_Request> mpiRecvReqs;
+  vector<MPI_Request> mpiSendReqs;
+  deque<MPI_Datatype> mpiTypes;
+  #ifdef PBO_RECV_BUFFERS
+  deque<vtkPixelBufferObject*> recvPBOs(nTransactions, static_cast<vtkPixelBufferObject*>(NULL));
+  #else
+  deque<void*> recvBufs(nTransactions, static_cast<void*>(NULL));
+  #endif
+  for (int j=0; j<nTransactions; ++j)
+    {
+    vtkPPixelTransfer &transaction = this->GatherProgram[j];
+
+    // postpone local transactions, they will be overlapped
+    // with transactions requiring communication
+    if (transaction.Local(this->CommRank))
+      {
+      continue;
+      }
+
+    #ifdef PBO_RECV_BUFFERS
+    void *pRecvPBO = NULL;
+    #endif
+
+    // encode transaction.
+    int tag = encodeTag(j, this->Pass);
+
+    if ( transaction.Receiver(this->CommRank) )
+      {
+      // allocate receive buffers
+      const vtkPixelExtent &destExt = transaction.GetDestinationExtent();
+
+      unsigned int pboSize = static_cast<unsigned int>(destExt.Size()*nComps);
+      unsigned int bufSize = pboSize*dataTypeSize;
+
+      #ifdef PBO_RECV_BUFFERS
+      vtkPixelBufferObject *pbo;
+      pbo = vtkPixelBufferObject::New();
+      pbo->SetContext(this->Context);
+      pbo->SetType(dataType);
+      pbo->SetComponents(nComps);
+      pbo->SetSize(pboSize);
+      recvPBOs[j] = pbo;
+
+      pRecvPBO = pbo->MapUnpackedBuffer(bufSize);
+      #else
+      recvBufs[j] = malloc(bufSize);
+      #endif
+      }
+
+    vector<MPI_Request> &mpiReqs
+      = transaction.Receiver(this->CommRank) ? mpiRecvReqs : mpiSendReqs;
+
+    // start send/recv data
+    int iErr = 0;
+    iErr = transaction.Execute(
+        comm,
+        this->CommRank,
+        nComps,
+        dataType,
+        pSendPBO,
+        dataType,
+        #ifdef PBO_RECV_BUFFERS
+        pRecvPBO,
+        #else
+        recvBufs[j],
+        #endif
+        mpiReqs,
+        mpiTypes,
+        tag);
+    if (iErr)
+      {
+      cerr
+        << this->CommRank
+        << " transaction " << j << ":" << tag
+        << " failed " << iErr << endl
+        << transaction << endl;
+      }
+    }
+
+  // overlap framebuffer and shader config with communication
+  unsigned int winExtSize[2];
+  this->WindowExt.Size(winExtSize);
+
+  if (newImage == NULL)
+    {
+    newImage = vtkTextureObject::New();
+    newImage->SetContext(this->Context);
+    newImage->Create2D(
+          winExtSize[0],
+          winExtSize[1],
+          nComps,
+          dataType,
+          false);
+    }
+
+  this->FBO->SaveCurrentBindings();
+  this->FBO->Bind(vtkgl::FRAMEBUFFER_EXT);
+  this->FBO->AddColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 0U, newImage);
+  this->FBO->ActivateDrawBuffer(0U);
+
+  vtkRenderbuffer *depthBuf = vtkRenderbuffer::New();
+  depthBuf->SetContext(this->Context);
+  depthBuf->CreateDepthAttachment(winExtSize[0], winExtSize[1]);
+  this->FBO->AddDepthAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, depthBuf);
+
+  vtkCheckFrameBufferStatusMacro(vtkgl::FRAMEBUFFER_EXT);
+
+  // the LIC'er requires all fragments in the vector
+  // texture to be initialized to 0
+  this->FBO->InitializeViewport(winExtSize[0], winExtSize[1]);
+  glEnable(GL_DEPTH_TEST);
+  glDisable(GL_SCISSOR_TEST);
+  glClearColor(0.0, 0.0, 0.0, 0.0);
+  glClear(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
+
+  vtkUniformVariables *uniforms = this->CompositeShader->GetUniformVariables();
+  uniforms->SetUniformit("texData", 0);
+  this->CompositeShader->Use();
+
+  // overlap compositing of local data with communication
+  for (int j=0; j<nTransactions; ++j)
+    {
+    vtkPPixelTransfer &transaction = this->GatherProgram[j];
+
+    if (!transaction.Local(this->CommRank))
+      {
+      continue;
+      }
+
+    #if vtkPSurfaceLICCompositeDEBUG>=2
+    cerr
+      << this->CommRank << ":" << j << ":"
+      << encodeTag(j, this->Pass) << " Local " << transaction
+      << endl;
+    #endif
+
+    const vtkPixelExtent &destExt = transaction.GetDestinationExtent();
+    unsigned int pboSize = static_cast<unsigned int>(destExt.Size()*nComps);
+    unsigned int bufSize = pboSize*dataTypeSize;
+
+    vtkPixelBufferObject *pbo = vtkPixelBufferObject::New();
+    pbo->SetContext(this->Context);
+    pbo->SetType(dataType);
+    pbo->SetComponents(nComps);
+    pbo->SetSize(pboSize);
+
+    void *pRecvPBO = pbo->MapUnpackedBuffer(bufSize);
+
+    int iErr = transaction.Blit(
+          nComps,
+          dataType,
+          pSendPBO,
+          dataType,
+          pRecvPBO);
+
+    if (iErr)
+      {
+      cerr
+        << this->CommRank
+        << " local transaction " << j << ":" << this->Pass
+        << " failed " << iErr << endl
+        << transaction << endl;
+      }
+
+    pbo->UnmapUnpackedBuffer();
+
+    unsigned int destDims[2];
+    destExt.Size(destDims);
+
+    vtkTextureObject *tex = vtkTextureObject::New();
+    tex->SetContext(this->Context);
+    tex->Create2D(destDims[0], destDims[1], nComps, pbo, false);
+
+    pbo->Delete();
+
+    #if vtkPSurfaceLICCompositeDEBUG>=2
+    ostringstream oss;
+    oss << j << ":" << this->Pass << "_localRecvdData.vtk";
+    vtkTextureIO::Write(mpifn(this->CommRank, oss.str().c_str()), tex);
+    #endif
+
+    // Compositing because of overlap in guard pixels
+    this->ExecuteShader(destExt, tex);
+
+    tex->Delete();
+    }
+
+  // composite inflight data as it arrives.
+  int nRecvReqs = static_cast<int>(mpiRecvReqs.size());
+  for (int i=0; i<nRecvReqs; ++i)
+    {
+    // wait for the completion of one of the recvs
+    MPI_Status stat;
+    int reqId;
+    int iErr = MPI_Waitany(nRecvReqs, &mpiRecvReqs[0], &reqId, &stat);
+    if (iErr)
+      {
+      vtkErrorMacro("comm error in recv");
+      }
+
+    // decode transaction id
+    int j = decodeTag(stat.MPI_TAG, this->Pass);
+    vtkPPixelTransfer &transaction = this->GatherProgram[j];
+
+    #if vtkPSurfaceLICCompositeDEBUG>=2
+    cerr
+      << this->CommRank << ":" << j << ":"
+      << stat.MPI_TAG << " Recv " << transaction
+      << endl;
+    #endif
+
+    // move recv'd data from pbo to texture
+    const vtkPixelExtent &destExt = transaction.GetDestinationExtent();
+
+    unsigned int destDims[2];
+    destExt.Size(destDims);
+
+    #ifdef PBO_RECV_BUFFERS
+    vtkPixelBufferObject *&pbo = recvPBOs[j];
+    pbo->UnmapUnpackedBuffer();
+    #else
+    unsigned int pboSize = nComps*destExt.Size();
+    unsigned int bufSize = pboSize*dataTypeSize;
+
+    vtkPixelBufferObject *pbo = vtkPixelBufferObject::New();
+    pbo->SetContext(this->Context);
+    pbo->SetType(dataType);
+    pbo->SetComponents(nComps);
+    pbo->SetSize(pboSize);
+
+    void *pbuf = pbo->MapUnpackedBuffer(bufSize);
+
+    void *&rbuf = recvBufs[j];
+
+    memcpy(pbuf, rbuf, bufSize);
+
+    pbo->UnmapUnpackedBuffer();
+
+    free(rbuf);
+    rbuf = NULL;
+    #endif
+
+    vtkTextureObject *tex = vtkTextureObject::New();
+    tex->SetContext(this->Context);
+    tex->Create2D(destDims[0], destDims[1], nComps, pbo, false);
+
+    pbo->Delete();
+    pbo = NULL;
+
+    #if vtkPSurfaceLICCompositeDEBUG>=2
+    ostringstream oss;
+    oss << j << ":" << this->Pass << "_recvdData.vtk";
+    vtkTextureIO::Write(mpifn(this->CommRank, oss.str().c_str()), tex);
+    #endif
+
+    this->ExecuteShader(destExt, tex);
+
+    tex->Delete();
+    }
+  this->CompositeShader->Restore();
+
+  this->FBO->DeactivateDrawBuffers();
+  this->FBO->RemoveTexColorAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT, 0U);
+  this->FBO->RemoveRenDepthAttachment(vtkgl::DRAW_FRAMEBUFFER_EXT);
+  this->FBO->UnBind(vtkgl::FRAMEBUFFER_EXT);
+  depthBuf->Delete();
+
+  // wait for sends to complete
+  int nSendReqs = static_cast<int>(mpiSendReqs.size());
+  if (nSendReqs)
+    {
+    int iErr = MPI_Waitall(nSendReqs, &mpiSendReqs[0], MPI_STATUSES_IGNORE);
+    if (iErr)
+      {
+      vtkErrorMacro("comm error in send");
+      }
+    }
+
+  MPITypeFree(mpiTypes);
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+int vtkPSurfaceLICComposite::ExecuteShader(
+      const vtkPixelExtent &ext,
+      vtkTextureObject *tex)
+{
+  tex->Activate(vtkgl::TEXTURE0);
+
+  // cell to node
+  vtkPixelExtent next(ext);
+  next.CellToNode();
+
+  float fext[4];
+  next.GetData(fext);
+
+  float tcoords[4] = {0.0f,1.0f, 0.0f,1.0f};
+  int ids[8] = {0,2, 1,2, 1,3, 0,3};
+
+  glBegin(GL_QUADS);
+  for (int q=0; q<4; ++q)
+    {
+    int qq = 2*q;
+    glTexCoord2f(tcoords[ids[qq]], tcoords[ids[qq+1]]);
+    glVertex2f(fext[ids[qq]], fext[ids[qq+1]]);
+    }
+  glEnd();
+
+  //tex->Deactivate(vtkgl::TEXTURE0);
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+int vtkPSurfaceLICComposite::Scatter(
+        void *pSendPBO,
+        int dataType,
+        int nComps,
+        vtkTextureObject *&newImage)
+{
+  #if vtkPSurfaceLICCompositeDEBUG>=2
+  cerr << "=====vtkPSurfaceLICComposite::Scatter" << endl;
+  #endif
+
+  int iErr = 0;
+  // two pipleines depending on if this process recv's or send's
+  //
+  // send:
+  // tex -> pbo -> mpi_send
+  //
+  // recv:
+  // mpi_recv -> pbo -> tex -> composite shader -> fbo
+
+  // pass id is decoded into mpi tag form non-blocking comm
+  this->Pass += 1;
+
+  // validate inputs
+  if (this->Pass >= maxNumPasses())
+    {
+    return -1;
+    }
+  if (pSendPBO == NULL)
+    {
+    return -2;
+    }
+  if (this->Context == NULL)
+    {
+    return -3;
+    }
+
+  // get the size of the array datatype
+  int dataTypeSize = 0;
+  switch (dataType)
+    {
+    vtkTemplateMacro(dataTypeSize = sizeof(VTK_TT););
+    default:
+      return -4;
+    }
+  unsigned int pboSize = (unsigned int)this->WindowExt.Size()*nComps;
+  unsigned int bufSize = pboSize*dataTypeSize;
+
+  #ifdef PBO_RECV_BUFFERS
+  vtkPixelBufferObject *recvPBO;
+  recvPBO = vtkPixelBufferObject::New();
+  recvPBO->SetContext(this->Context);
+  recvPBO->SetType(dataType);
+  recvPBO->SetComponents(nComps);
+  recvPBO->SetSize(pboSize);
+
+  void *pRecvPBO = recvPBO->MapUnpackedBuffer(bufSize);
+  memset(pRecvPBO, 0, bufSize);
+  #else
+  void *pRecvBuf = malloc(bufSize);
+  memset(pRecvBuf, 0, bufSize);
+  #endif
+
+  // initiate non-blocking comm
+  MPI_Comm comm = *(static_cast<MPI_Comm*>(this->PainterComm->GetCommunicator()));
+  int nTransactions = static_cast<int>(this->ScatterProgram.size());
+  vector<MPI_Request> mpiRecvReqs;
+  vector<MPI_Request> mpiSendReqs;
+  deque<MPI_Datatype> mpiTypes;
+  for (int j=0; j<nTransactions; ++j)
+    {
+    vtkPPixelTransfer &transaction = this->ScatterProgram[j];
+
+    // postpone local transactions, they will be overlapped
+    // with transactions requiring communication
+    if (transaction.Local(this->CommRank))
+      {
+      continue;
+      }
+
+    // encode transaction.
+    int tag = encodeTag(j, this->Pass);
+
+    vector<MPI_Request> &mpiReqs
+      = transaction.Receiver(this->CommRank) ? mpiRecvReqs : mpiSendReqs;
+
+    // start send/recv data
+    iErr = transaction.Execute(
+        comm,
+        this->CommRank,
+        nComps,
+        dataType,
+        pSendPBO,
+        dataType,
+        #ifdef PBO_RECV_BUFFERS
+        pRecvPBO,
+        #else
+        pRecvBuf,
+        #endif
+        mpiReqs,
+        mpiTypes,
+        tag);
+    if (iErr)
+      {
+      vtkErrorMacro(
+        << this->CommRank
+        << " transaction " << j << ":" << tag
+        << " failed " << iErr << endl
+        << transaction);
+      }
+    }
+
+  // overlap transfer of local data with communication. compositing is not
+  // needed since source blocks are disjoint.
+  for (int j=0; j<nTransactions; ++j)
+    {
+    vtkPPixelTransfer &transaction = this->ScatterProgram[j];
+
+    if (!transaction.Local(this->CommRank))
+      {
+      continue;
+      }
+
+    #if vtkPSurfaceLICCompositeDEBUG>=2
+    cerr
+      << this->CommRank << ":" << j << ":"
+      << encodeTag(j, this->Pass) << " Local " << transaction
+      << endl;
+    #endif
+
+    iErr = transaction.Blit(
+        nComps,
+        dataType,
+        pSendPBO,
+        dataType,
+        #ifdef PBO_RECV_BUFFERS
+        pRecvPBO
+        #else
+        pRecvBuf
+        #endif
+        );
+    if (iErr)
+      {
+      vtkErrorMacro(
+        << this->CommRank
+        << " local transaction " << j << ":" << this->Pass
+        << " failed " << iErr << endl
+        << transaction);
+      }
+    }
+
+  // recv remote data. compsiting is not needed since source blocks are
+  // disjoint.
+  int nRecvReqs = static_cast<int>(mpiRecvReqs.size());
+  if (nRecvReqs)
+    {
+    iErr = MPI_Waitall(nRecvReqs, &mpiRecvReqs[0], MPI_STATUSES_IGNORE);
+    if (iErr)
+      {
+      vtkErrorMacro("comm error in recv");
+      }
+    }
+
+  unsigned int winExtSize[2];
+  this->WindowExt.Size(winExtSize);
+
+  if (newImage == NULL)
+    {
+    newImage = vtkTextureObject::New();
+    newImage->SetContext(this->Context);
+    newImage->Create2D(
+          winExtSize[0],
+          winExtSize[1],
+          nComps,
+          dataType,
+          false);
+    }
+
+  // transfer received data to the icet/decomp.
+  #ifdef PBO_RECV_BUFFERS
+  recvPBO->UnmapUnpackedBuffer();
+  newImage->Create2D(winExtSize[0], winExtSize[1], nComps, recvPBO, false);
+  recvPBO->Delete();
+  #else
+  vtkPixelBufferObject *recvPBO;
+  recvPBO = vtkPixelBufferObject::New();
+  recvPBO->SetContext(this->Context);
+  recvPBO->SetType(dataType);
+  recvPBO->SetComponents(nComps);
+  recvPBO->SetSize(pboSize);
+  void *pRecvPBO = recvPBO->MapUnpackedBuffer(bufSize);
+  memcpy(pRecvPBO, pRecvBuf, bufSize);
+  recvPBO->UnmapUnpackedBuffer();
+  newImage->Create2D(winExtSize[0], winExtSize[1], nComps, recvPBO, false);
+  recvPBO->Delete();
+  #endif
+
+  // wait for sends to complete
+  int nSendReqs = static_cast<int>(mpiSendReqs.size());
+  if (nSendReqs)
+    {
+    iErr = MPI_Waitall(nSendReqs, &mpiSendReqs[0], MPI_STATUSES_IGNORE);
+    if (iErr)
+      {
+      vtkErrorMacro("comm error in send");
+      }
+    }
+
+  MPITypeFree(mpiTypes);
+
+  return 0;
+}
+
+// ----------------------------------------------------------------------------
+void vtkPSurfaceLICComposite::PrintSelf(ostream &os, vtkIndent indent)
+{
+  vtkObject::PrintSelf(os, indent);
+  os << *this << endl;
+}
+
+// ****************************************************************************
+ostream &operator<<(ostream &os, vtkPSurfaceLICComposite &ss)
+{
+  // this puts output in rank order
+  MPI_Comm comm = *(static_cast<MPI_Comm*>(ss.PainterComm->GetCommunicator()));
+  int rankBelow = ss.CommRank-1;
+  if (rankBelow >= 0)
+    {
+    MPI_Recv(NULL, 0, MPI_BYTE, rankBelow, 13579, comm, MPI_STATUS_IGNORE);
+    }
+  os << "winExt=" << ss.WindowExt << endl;
+  os << "blockExts=" << endl;
+  size_t nExts = ss.BlockExts.size();
+  for (size_t i=0; i<nExts; ++i)
+    {
+    os << "  " << ss.BlockExts[i] << endl;
+    }
+  os << "compositeExts=" << endl;
+  nExts = ss.CompositeExt.size();
+  for (size_t i=0; i<nExts; ++i)
+    {
+    os << ss.CompositeExt[i] << endl;
+    }
+  os << "guardExts=" << endl;
+  for (size_t i=0; i<nExts; ++i)
+    {
+    os << ss.GuardExt[i] << endl;
+    }
+  os << "disjointGuardExts=" << endl;
+  for (size_t i=0; i<nExts; ++i)
+    {
+    os << ss.DisjointGuardExt[i] << endl;
+    }
+  os << "SuffleProgram:" << endl;
+  size_t nTransactions = ss.GatherProgram.size();
+  for (size_t j=0; j<nTransactions; ++j)
+    {
+    os << "  " << ss.GatherProgram[j] << endl;
+    }
+  os << "UnSuffleProgram:" << endl;
+  nTransactions = ss.ScatterProgram.size();
+  for (size_t j=0; j<nTransactions; ++j)
+    {
+    os << "  " << ss.ScatterProgram[j] << endl;
+    }
+  int rankAbove = ss.CommRank+1;
+  if (rankAbove < ss.CommSize)
+    {
+    MPI_Send(NULL, 0, MPI_BYTE, rankAbove, 13579, comm);
+    }
+  return os;
+}
diff --git a/Rendering/ParallelLIC/vtkPSurfaceLICComposite.h b/Rendering/ParallelLIC/vtkPSurfaceLICComposite.h
new file mode 100644
index 0000000..6a654de
--- /dev/null
+++ b/Rendering/ParallelLIC/vtkPSurfaceLICComposite.h
@@ -0,0 +1,198 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPSurfaceLICComposite.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkPSurfaceLICComposite -- To move data during parallel surface LIC
+// .SECTION Description
+// This class decomposes the image space and shuffles image space
+// data onto the new decomposition with the necessary guard cells
+// to prevent artifacts at the decomposition boundaries. After the
+// image LIC is computed on the new decomposition this class will
+// un-shuffle the computed LIC back onto the original decomposition.
+
+#ifndef __vtkPSurfaceLICComposite_h
+#define __vtkPSurfaceLICComposite_h
+
+#include "vtkSurfaceLICComposite.h"
+#include "vtkWeakPointer.h" // for ren context
+#include "vtkOpenGLRenderWindow.h" // for context
+#include "vtkRenderingParallelLICModule.h" // for export macro
+#include "vtkPixelExtent.h" // for pixel extent
+#include "vtkPPixelTransfer.h" // for pixel transfer
+#include <deque> // for deque
+#include <vector> // for vector
+#include <list> // for list
+
+class vtkFloatArray;
+class vtkRenderWindow;
+class vtkFrameBufferObject2;
+class vtkTextureObject;
+class vtkShaderProgram2;
+class vtkPainterCommunicator;
+class vtkPPainterCommunicator;
+class vtkPPixelExtentOps;
+
+class VTKRENDERINGPARALLELLIC_EXPORT vtkPSurfaceLICComposite : public vtkSurfaceLICComposite
+{
+public:
+  static vtkPSurfaceLICComposite *New();
+  vtkTypeMacro(vtkPSurfaceLICComposite, vtkSurfaceLICComposite);
+  virtual void PrintSelf(ostream &os, vtkIndent indent);
+
+  // Description:
+  // Set the rendering context. Must set prior to use. Reference is not
+  // held, so caller must ensure the renderer is not destroyed durring
+  // use.
+  virtual void SetContext(vtkOpenGLRenderWindow *rwin);
+  virtual vtkOpenGLRenderWindow *GetContext(){ return this->Context; }
+
+  // Description:
+  // Set the communicator for parallel communication. The Default is
+  // COMM_NULL.
+   virtual void SetCommunicator(vtkPainterCommunicator *comm);
+
+  // Description:
+  // Build programs to move data to the new decomp
+  // THIS IS A COLLECTIVE OPERATION
+  virtual int BuildProgram(float *vectors);
+
+  // Description:
+  // Move a single buffer from the geometry decomp to the LIC decomp.
+  // THIS IS A COLLECTIVE OPERATION
+  virtual int Gather(
+        void *pSendPBO,
+        int dataType,
+        int nComps,
+        vtkTextureObject *&newImage);
+
+  // Description:
+  // Move a single buffer from the LIC decomp to the geometry decomp
+  // THIS IS A COLLECTIVE OPERATION
+  virtual int Scatter(
+        void *pSendPBO,
+        int dataType,
+        int nComps,
+        vtkTextureObject *&newImage);
+
+protected:
+  vtkPSurfaceLICComposite();
+  ~vtkPSurfaceLICComposite();
+
+private:
+  // Description:
+  // Load, compile, and link the shader.
+  int InitializeCompositeShader(vtkOpenGLRenderWindow *context);
+
+  // Description:
+  // Composite incoming data.
+  int ExecuteShader(const vtkPixelExtent &ext, vtkTextureObject *tex);
+
+  // Description:
+  // The communication cost to move from one decomposition to another
+  // is given by the ratio of pixels to send off rank to the total
+  // number of source pixels.
+  double EstimateCommunicationCost(
+        const std::deque<std::deque<vtkPixelExtent> > &srcExts,
+        const std::deque<std::deque<vtkPixelExtent> > &destExts);
+
+  // Description:
+  // The efficiency of a decomposition is the ratio of useful pixels
+  // to guard pixels. If this factor shrinks bellow 1 there may be
+  // an issue.
+  double EstimateDecompEfficiency(
+        const std::deque< std::deque<vtkPixelExtent> > &exts,
+        const std::deque< std::deque<vtkPixelExtent> > &guardExts);
+
+  // Description:
+  // Given a window extent, decompose into the requested number of
+  // pieces.
+  int DecomposeScreenExtent(
+        std::deque< std::deque<vtkPixelExtent> >&newExts,
+        float *vectors);
+
+  // Description:
+  // Given an extent, decompose into the requested number of
+  // pieces.
+  int DecomposeExtent(
+      vtkPixelExtent &in,
+      int nPieces,
+      std::list<vtkPixelExtent> &out);
+
+  // Description:
+  // For parallel run. Make a decomposition disjoint. Sorts extents
+  // and processes largest to smallest , repeatedly subtracting smaller
+  // remaining blocks from the largest remaining.  Each extent in the
+  // new disjoint set is shrunk to tightly bound the vector data,
+  // extents with empty vectors are removed. This is a global operation
+  // as the vector field is distributed and has not been composited yet.
+  int MakeDecompDisjoint(
+        const std::deque< std::deque< vtkPixelExtent> > &in,
+        std::deque< std::deque< vtkPixelExtent> > &out,
+        float *vectors);
+
+
+  // decomp set of extents
+  int MakeDecompLocallyDisjoint(
+       const std::deque< std::deque< vtkPixelExtent> > &in,
+       std::deque< std::deque< vtkPixelExtent> > &out);
+
+  using vtkSurfaceLICComposite::MakeDecompDisjoint;
+
+  // Description:
+  // All gather geometry domain decomposition. The extent of local
+  // blocks are passed in, the collection of all blocks is returned
+  // along with the dataset extent.
+  int AllGatherExtents(
+        const std::deque<vtkPixelExtent> &localExts,
+        std::deque<std::deque<vtkPixelExtent> > &remoteExts,
+        vtkPixelExtent &dataSetExt);
+
+  // Description:
+  // All reduce max(|V|) on the new decomposition.
+  int AllReduceVectorMax(
+        const std::deque<vtkPixelExtent> &originalExts,
+        const std::deque<std::deque<vtkPixelExtent> > &newExts,
+        float *vectors,
+        std::vector<std::vector<float> > &vectorMax);
+
+  // Description:
+  // Add guard pixels (Parallel run)
+  int AddGuardPixels(
+      const std::deque<std::deque<vtkPixelExtent> > &exts,
+      std::deque<std::deque<vtkPixelExtent> > &guardExts,
+      std::deque<std::deque<vtkPixelExtent> > &disjointGuardExts,
+      float *vectors);
+
+private:
+  vtkPPainterCommunicator *PainterComm;          // mpi state
+  vtkPPixelExtentOps *PixelOps;
+  int CommRank;
+  int CommSize;
+
+  vtkWeakPointer<vtkOpenGLRenderWindow> Context; // rendering context
+  vtkFrameBufferObject2 *FBO;                    // buffer object
+  vtkShaderProgram2 *CompositeShader;            // shader program for compositing
+
+  std::deque<vtkPPixelTransfer> GatherProgram;   // ordered steps required to move data to new decomp
+  std::deque<vtkPPixelTransfer> ScatterProgram;  // ordered steps required to unmove data from new decomp
+
+  friend
+  ostream &operator<<(ostream &os, vtkPSurfaceLICComposite &ss);
+
+  vtkPSurfaceLICComposite(const vtkPSurfaceLICComposite&); // Not implemented
+  void operator=(const vtkPSurfaceLICComposite&); // Not implemented
+};
+
+ostream &operator<<(ostream &os, vtkPSurfaceLICComposite &ss);
+
+#endif
diff --git a/Rendering/ParallelLIC/vtkPSurfaceLICComposite_Comp.glsl b/Rendering/ParallelLIC/vtkPSurfaceLICComposite_Comp.glsl
new file mode 100644
index 0000000..c8c87f1
--- /dev/null
+++ b/Rendering/ParallelLIC/vtkPSurfaceLICComposite_Comp.glsl
@@ -0,0 +1,34 @@
+//=========================================================================
+//
+//  Program:   Visualization Toolkit
+//  Module:    vtkSurfaceLICComposite_fs1.glsl
+//
+//  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+//  All rights reserved.
+//  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+//
+//     This software is distributed WITHOUT ANY WARRANTY; without even
+//     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+//     PURPOSE.  See the above copyright notice for more information.
+//
+//=========================================================================
+
+// This shader composites for surface lic
+// it expects float depth values encoded
+// in alpha channel.
+
+#version 110
+
+uniform sampler2D texData;
+
+void main()
+{
+  vec4 newData = texture2D(texData, gl_TexCoord[0].st);
+  float newDepth = newData.a;
+  if (newDepth == 0.0)
+    {
+    discard;
+    }
+  gl_FragDepth = newDepth;
+  gl_FragData[0] = newData;
+}
diff --git a/Rendering/ParallelLIC/vtkPSurfaceLICPainter.cxx b/Rendering/ParallelLIC/vtkPSurfaceLICPainter.cxx
new file mode 100644
index 0000000..d2ec12d
--- /dev/null
+++ b/Rendering/ParallelLIC/vtkPSurfaceLICPainter.cxx
@@ -0,0 +1,172 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPSurfaceLICPainter.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkPSurfaceLICPainter.h"
+
+#include "vtkObjectFactory.h"
+#include "vtkPainterCommunicator.h"
+#include "vtkPPainterCommunicator.h"
+#include "vtkMPI.h"
+#include "vtkParallelTimer.h"
+
+//----------------------------------------------------------------------------
+vtkStandardNewMacro(vtkPSurfaceLICPainter);
+
+//----------------------------------------------------------------------------
+vtkPSurfaceLICPainter::vtkPSurfaceLICPainter()
+{}
+
+//----------------------------------------------------------------------------
+vtkPSurfaceLICPainter::~vtkPSurfaceLICPainter()
+{
+  #ifdef vtkPSurfaceLICPainterDEBUG
+  cerr << "=====vtkPSurfaceLICPainter::~vtkPSurfaceLICPainter" << endl;
+  #endif
+}
+
+//----------------------------------------------------------------------------
+bool vtkPSurfaceLICPainter::NeedToUpdateCommunicator()
+{
+  // TODO -- with slice widget in PV the input dataset
+  // MTime is changing at different rates on different
+  // MPI ranks. Because of this some ranks want to update
+  // there comunicator while other do not. To work around
+  // this force the communicator update on all ranks if any
+  // rank will update it.
+
+  int updateComm = this->Superclass::NeedToUpdateCommunicator() ? 1 : 0;
+
+  vtkMPICommunicatorOpaqueComm *globalComm
+    = vtkPPainterCommunicator::GetGlobalCommunicator();
+
+  if (globalComm)
+    {
+    MPI_Allreduce(
+         MPI_IN_PLACE,
+         &updateComm,
+         1,
+         MPI_INT,
+         MPI_MAX,
+         *globalComm->GetHandle());
+
+    if (updateComm != 0)
+      {
+      this->SetUpdateAll();
+      }
+    }
+
+  return updateComm != 0;
+}
+
+// ----------------------------------------------------------------------------
+void vtkPSurfaceLICPainter::GetGlobalMinMax(
+      vtkPainterCommunicator *painterComm,
+      float &min,
+      float &max)
+{
+  vtkPPainterCommunicator *pPainterComm
+    = dynamic_cast<vtkPPainterCommunicator*>(painterComm);
+
+  if (pPainterComm->GetMPIInitialized())
+    {
+    MPI_Comm comm
+      = *static_cast<MPI_Comm*>(pPainterComm->GetCommunicator());
+
+    MPI_Allreduce(
+          MPI_IN_PLACE,
+          &min,
+          1,
+          MPI_FLOAT,
+          MPI_MIN,
+          comm);
+
+    MPI_Allreduce(
+          MPI_IN_PLACE,
+          &max,
+          1,
+          MPI_FLOAT,
+          MPI_MAX,
+          comm);
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkPSurfaceLICPainter::StartTimerEvent(const char *event)
+{
+  #if defined(vtkSurfaceLICPainterTIME)
+  vtkParallelTimer *log = vtkParallelTimer::GetGlobalInstance();
+  log->StartEvent(event);
+  #else
+  (void)event;
+  #endif
+}
+
+//-----------------------------------------------------------------------------
+void vtkPSurfaceLICPainter::EndTimerEvent(const char *event)
+{
+  #if defined(vtkSurfaceLICPainterTIME)
+  vtkParallelTimer *log = vtkParallelTimer::GetGlobalInstance();
+  log->EndEvent(event);
+  #else
+  (void)event;
+  #endif
+}
+
+//----------------------------------------------------------------------------
+void vtkPSurfaceLICPainter::WriteTimerLog(const char *fileName)
+{
+  #if defined(vtkSurfaceLICPainterTIME)
+  std::string fname = fileName?fileName:"";
+  if (fname == this->LogFileName)
+    {
+    return;
+    }
+  this->LogFileName = fname;
+  if (!fname.empty())
+    {
+    vtkParallelTimer *log = vtkParallelTimer::GetGlobalInstance();
+    log->SetFileName(fname.c_str());
+    log->Update();
+    log->Write();
+    }
+  #else
+  (void)fileName;
+  #endif
+}
+
+//----------------------------------------------------------------------------
+vtkPainterCommunicator *vtkPSurfaceLICPainter::CreateCommunicator(int include)
+{
+  // if we're using MPI and it's been initialized then
+  // subset VTK's world communicator otherwise run the
+  // painter serially.
+  vtkPPainterCommunicator *comm = new vtkPPainterCommunicator;
+
+  vtkMPICommunicatorOpaqueComm *globalComm
+    = vtkPPainterCommunicator::GetGlobalCommunicator();
+
+  if (globalComm)
+    {
+    comm->SubsetCommunicator(globalComm, include);
+    }
+
+  return comm;
+}
+
+//----------------------------------------------------------------------------
+void vtkPSurfaceLICPainter::PrintSelf(ostream & os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+  os << indent << "LogFileName=" << this->LogFileName << endl;
+}
diff --git a/Rendering/ParallelLIC/vtkPSurfaceLICPainter.h b/Rendering/ParallelLIC/vtkPSurfaceLICPainter.h
new file mode 100644
index 0000000..5702eca
--- /dev/null
+++ b/Rendering/ParallelLIC/vtkPSurfaceLICPainter.h
@@ -0,0 +1,87 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkPSurfaceLICPainter.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkPSurfaceLICPainter - parallel parts of the vtkSurfaceLICPainter
+//
+// .SECTION Description
+// Parallel parts of the vtkSurfaceLICPainter, see that class for
+// documentation.
+
+#ifndef __vtkPSurfaceLICPainter_h
+#define __vtkPSurfaceLICPainter_h
+
+#include "vtkSurfaceLICPainter.h"
+#include "vtkRenderingParallelLICModule.h" // For export macro
+#include <string> // for string
+
+class vtkPainterCommunicator;
+
+class VTKRENDERINGPARALLELLIC_EXPORT vtkPSurfaceLICPainter : public vtkSurfaceLICPainter
+{
+public:
+  static vtkPSurfaceLICPainter* New();
+  vtkTypeMacro(vtkPSurfaceLICPainter, vtkSurfaceLICPainter);
+  virtual void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Methods used for parallel benchmarks. Use cmake to define
+  // vtkSurfaceLICPainterTIME to enable benchmarks. During each
+  // update timing information is stored, it can be written to
+  // disk by calling WriteLog.
+  virtual void WriteTimerLog(const char *fileName);
+
+protected:
+  vtkPSurfaceLICPainter();
+  ~vtkPSurfaceLICPainter();
+
+  //BTX
+  // Description:
+  // Get the min/max across all ranks. min/max are in/out.
+  // In serial operation this is a no-op, in parallel it
+  // is a global collective reduction.
+  virtual void GetGlobalMinMax(
+        vtkPainterCommunicator *comm,
+        float &min,
+        float &max);
+
+  // Description:
+  // Creates a new communicator with/without the calling processes
+  // as indicated by the passed in flag, if not 0 the calling process
+  // is included in the new communicator. In parallel this call is mpi
+  // collective on the world communicator. In serial this is a no-op.
+  virtual vtkPainterCommunicator *CreateCommunicator(int include);
+  //ETX
+
+  // Description:
+  // Ensure that if any rank udpates the communicator they all
+  // do. This is a global collective operation.
+  virtual bool NeedToUpdateCommunicator();
+
+  // Description:
+  // Methods used for parallel benchmarks. Use cmake to define
+  // vtkSurfaceLICPainterTIME to enable benchmarks. During each
+  // update timing information is stored, it can be written to
+  // disk by calling WriteLog.
+  virtual void StartTimerEvent(const char *name);
+  virtual void EndTimerEvent(const char *name);
+
+private:
+  std::string LogFileName;
+
+private:
+  vtkPSurfaceLICPainter(const vtkPSurfaceLICPainter&); // Not implemented.
+  void operator=(const vtkPSurfaceLICPainter&); // Not implemented.
+};
+
+#endif
diff --git a/Rendering/ParallelLIC/vtkParallelTimer.cxx b/Rendering/ParallelLIC/vtkParallelTimer.cxx
new file mode 100644
index 0000000..e598807
--- /dev/null
+++ b/Rendering/ParallelLIC/vtkParallelTimer.cxx
@@ -0,0 +1,673 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkParallelTimer.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkParallelTimer.h"
+
+#if defined(_WIN32)
+// The POSIX name for this item is deprecated. Instead, use the ISO C++ conformant name
+#pragma warning(disable : 4996)
+#endif
+
+#include "vtkObjectFactory.h"
+#include "vtkMPI.h"
+
+using std::cerr;
+using std::endl;
+using std::vector;
+using std::string;
+using std::ostringstream;
+
+#include <ctime>
+#if !defined(_WIN32)
+#include <sys/time.h>
+#include <unistd.h>
+#else
+#include <process.h>
+#include <Winsock2.h>
+#include <time.h>
+static
+int gettimeofday(struct timeval *tv, void *)
+{
+  FILETIME ft;
+  GetSystemTimeAsFileTime(&ft);
+
+  __int64 tmpres = 0;
+  tmpres = ft.dwHighDateTime;
+  tmpres <<= 32;
+  tmpres |= ft.dwLowDateTime;
+
+  /*converting file time to unix epoch*/
+  const __int64 DELTA_EPOCH_IN_MICROSECS= 11644473600000000;
+  tmpres /= 10;  /*convert into microseconds*/
+  tmpres -= DELTA_EPOCH_IN_MICROSECS;
+  tv->tv_sec = (__int32)(tmpres*0.000001);
+  tv->tv_usec = (tmpres%1000000);
+
+  return 0;
+}
+#endif
+
+#include <fstream>
+using std::ofstream;
+using std::ios_base;
+
+
+
+/*
+For singleton pattern
+**/
+vtkParallelTimer *vtkParallelTimer::GlobalInstance = 0;
+vtkParallelTimer::vtkParallelTimerDestructor vtkParallelTimer::GlobalInstanceDestructor;
+
+//-----------------------------------------------------------------------------
+vtkParallelTimer::vtkParallelTimerDestructor::~vtkParallelTimerDestructor()
+{
+  if (this->Log)
+    {
+    this->Log->Delete();
+    this->Log = NULL;
+    }
+}
+
+
+
+// .NAME vtkParallelTimerBuffer -- A parallel buffer
+//
+// .SECTION Description
+//  A parallel buffer for logging events and other data during an MPI
+//  run. This is an implementation class you should not use it directly.
+//  Use vtkParallelTimer instead.
+class vtkParallelTimerBuffer
+{
+public:
+  vtkParallelTimerBuffer();
+  ~vtkParallelTimerBuffer();
+
+  vtkParallelTimerBuffer(const vtkParallelTimerBuffer &other);
+  void operator=(const vtkParallelTimerBuffer &other);
+
+  // Description:
+  // Access state and internal data.
+  const char *GetData() const { return this->Data; }
+  char *GetData(){ return this->Data; }
+  size_t GetSize() const { return this->At; }
+  size_t GetCapacity() const { return this->Size; }
+
+  // Description:
+  // Clear the buffer but don't release memory.
+  void Clear(){ this->At = 0; }
+
+  // Description:
+  // Clear the buffer and release all resources.
+  void ClearForReal();
+
+  // Description:
+  // Stream insertion operators for adding data to the buffer.
+  vtkParallelTimerBuffer &operator<<(const int v);
+  vtkParallelTimerBuffer &operator<<(const long long v);
+  vtkParallelTimerBuffer &operator<<(const double v);
+  vtkParallelTimerBuffer &operator<<(const char *v);
+  template<size_t N> vtkParallelTimerBuffer &operator<<(const char v[N]);
+
+  // Description:
+  // Stream extraction operator for getting formatted data out.
+  vtkParallelTimerBuffer &operator>>(std::ostringstream &s);
+
+  // Description:
+  // Gather buffer to a root process. This is a collective
+  // operation.
+  void Gather(int rootRank);
+
+protected:
+  // Description:
+  // Push n bytes onto the buffer, resizing if necessary.
+  void PushBack(const void *data, size_t n);
+
+  // Description:
+  // resize to at least newSize bytes.
+  void Resize(size_t newSize);
+
+private:
+  size_t Size;
+  size_t At;
+  size_t GrowBy;
+  char *Data;
+};
+
+//-----------------------------------------------------------------------------
+template<size_t N>
+vtkParallelTimerBuffer &vtkParallelTimerBuffer::operator<<(const char v[N])
+{
+  const char c = 's';
+  this->PushBack(&c, 1);
+  this->PushBack(&v[0], N);
+  return *this;
+}
+
+//-----------------------------------------------------------------------------
+vtkParallelTimerBuffer::vtkParallelTimerBuffer()
+      :
+    Size(0),
+    At(0),
+    GrowBy(4096),
+    Data(0)
+{}
+
+//-----------------------------------------------------------------------------
+vtkParallelTimerBuffer::~vtkParallelTimerBuffer(){ free(this->Data); }
+
+//-----------------------------------------------------------------------------
+vtkParallelTimerBuffer::vtkParallelTimerBuffer(const vtkParallelTimerBuffer &other)
+      :
+    Size(0),
+    At(0),
+    GrowBy(4096),
+    Data(0)
+{
+  *this = other;
+}
+
+//-----------------------------------------------------------------------------
+void vtkParallelTimerBuffer::operator=(const vtkParallelTimerBuffer &other)
+{
+  if (this == &other)
+    {
+    return;
+    }
+  this->Clear();
+  this->Resize(other.GetSize());
+  memcpy(this->Data, other.Data, other.GetSize());
+}
+
+//-----------------------------------------------------------------------------
+void vtkParallelTimerBuffer::ClearForReal()
+{
+  this->At = 0;
+  this->Size = 0;
+  free(this->Data);
+  this->Data = 0;
+}
+
+//-----------------------------------------------------------------------------
+vtkParallelTimerBuffer &vtkParallelTimerBuffer::operator<<(const int v)
+{
+  const char c = 'i';
+  this->PushBack(&c, 1);
+  this->PushBack(&v, sizeof(int));
+  return *this;
+}
+
+//-----------------------------------------------------------------------------
+vtkParallelTimerBuffer &vtkParallelTimerBuffer::operator<<(const long long v)
+{
+  const char c = 'l';
+  this->PushBack(&c, 1);
+  this->PushBack(&v, sizeof(long long));
+  return *this;
+}
+
+//-----------------------------------------------------------------------------
+vtkParallelTimerBuffer &vtkParallelTimerBuffer::operator<<(const double v)
+{
+  const char c = 'd';
+  this->PushBack(&c, 1);
+  this->PushBack(&v, sizeof(double));
+  return *this;
+}
+
+//-----------------------------------------------------------------------------
+vtkParallelTimerBuffer &vtkParallelTimerBuffer::operator<<(const char *v)
+{
+  const char c='s';
+  this->PushBack(&c, 1);
+  size_t n = strlen(v) + 1;
+  this->PushBack(v, n);
+  return *this;
+}
+
+//-----------------------------------------------------------------------------
+vtkParallelTimerBuffer &vtkParallelTimerBuffer::operator>>(ostringstream &s)
+{
+  size_t i = 0;
+  while (i < this->At)
+    {
+    char c = this->Data[i];
+    ++i;
+    switch (c)
+      {
+      case 'i':
+        s << *(reinterpret_cast<int*>(this->Data+i));
+        i += sizeof(int);
+        break;
+
+      case 'l':
+        s << *(reinterpret_cast<long long*>(this->Data+i));
+        i += sizeof(long long);
+        break;
+
+      case 'd':
+        s << *(reinterpret_cast<double*>(this->Data+i));
+        i += sizeof(double);
+        break;
+
+      case 's':
+        {
+        s << this->Data+i;
+        size_t n = strlen(this->Data+i)+1;
+        i += n;
+        }
+        break;
+
+      default:
+        cerr <<
+          "Bad case at " << i-1 << " " << c << ", " << (int)c;
+        return *this;
+      }
+    }
+  return *this;
+}
+
+//-----------------------------------------------------------------------------
+void vtkParallelTimerBuffer::Gather(int rootRank)
+{
+  int mpiOk;
+  MPI_Initialized(&mpiOk);
+  if (!mpiOk)
+    {
+    return;
+    }
+  int worldRank;
+  MPI_Comm_rank(MPI_COMM_WORLD, &worldRank);
+  int worldSize;
+  MPI_Comm_size(MPI_COMM_WORLD, &worldSize);
+
+  // in serial this is a no-op
+  if (worldSize > 1)
+    {
+    int *bufferSizes = 0;
+    int *disp = 0;
+    if (worldRank == rootRank)
+      {
+      bufferSizes = static_cast<int*>(malloc(worldSize*sizeof(int)));
+      disp = static_cast<int*>(malloc(worldSize*sizeof(int)));
+      }
+    int bufferSize = static_cast<int>(this->GetSize());
+    MPI_Gather(
+        &bufferSize,
+        1,
+        MPI_INT,
+        bufferSizes,
+        1,
+        MPI_INT,
+        rootRank,
+        MPI_COMM_WORLD);
+    char *log = 0;
+    int cumSize = 0;
+    if (worldRank == rootRank)
+      {
+      for (int i=0; i<worldSize; ++i)
+        {
+        disp[i] = cumSize;
+        cumSize += bufferSizes[i];
+        }
+      log = static_cast<char*>(malloc(cumSize));
+      }
+    MPI_Gatherv(
+      this->Data,
+      bufferSize,
+      MPI_CHAR,
+      log,
+      bufferSizes,
+      disp,
+      MPI_CHAR,
+      rootRank,
+      MPI_COMM_WORLD);
+    if (worldRank == rootRank)
+      {
+      this->Clear();
+      this->PushBack(log,cumSize);
+      free(bufferSizes);
+      free(disp);
+      free(log);
+      }
+    else
+      {
+      this->Clear();
+      }
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkParallelTimerBuffer::PushBack(const void *data, size_t n)
+{
+  size_t nextAt = this->At+n;
+  this->Resize(nextAt);
+  memcpy(this->Data+this->At, data, n);
+  this->At = nextAt;
+}
+
+//-----------------------------------------------------------------------------
+void vtkParallelTimerBuffer::Resize(size_t newSize)
+{
+  #if defined(vtkParallelTimerBufferDEBUG)
+  size_t oldSize = this->Size;
+  #endif
+  if (newSize <= this->Size)
+    {
+    return;
+    }
+  while(this->Size < newSize)
+    {
+    this->Size += this->GrowBy;
+    }
+  this->Data = static_cast<char*>(realloc(this->Data,this->Size));
+  #if defined(vtkParallelTimerBufferDEBUG)
+  memset(this->Data+oldSize, -1, this->Size-oldSize);
+  #endif
+}
+
+
+
+
+//-----------------------------------------------------------------------------
+vtkStandardNewMacro(vtkParallelTimer);
+
+//-----------------------------------------------------------------------------
+vtkParallelTimer::vtkParallelTimer()
+        :
+    GlobalLevel(0),
+    WorldRank(0),
+    WriterRank(0),
+    FileName(0),
+    WriteOnClose(0),
+    Log(0)
+{
+  #if vtkParallelTimerDEBUG > 1
+  cerr << "=====vtkParallelTimer::vtkParallelTimer" << endl;
+  #endif
+
+  MPI_Initialized(&this->Initialized);
+  if (this->Initialized)
+    {
+    MPI_Comm_rank(MPI_COMM_WORLD,&this->WorldRank);
+    }
+  this->StartTime.reserve(256);
+  this->Log=new vtkParallelTimerBuffer;
+}
+
+//-----------------------------------------------------------------------------
+vtkParallelTimer::~vtkParallelTimer()
+{
+  #if vtkParallelTimerDEBUG > 1
+  cerr << "=====vtkParallelTimer::~vtkParallelTimer" << endl;
+  #endif
+
+  // Alert the user that he left events on the stack,
+  // this is usually a sign of trouble.
+  if (this->StartTime.size()>0)
+    {
+    vtkErrorMacro(
+      << "Start time stack has "
+      << this->StartTime.size()
+      << " remaining.");
+    }
+
+  #if vtkParallelTimerDEBUG < 0
+  if (this->EventId.size()>0)
+    {
+    size_t nIds=this->EventId.size();
+    vtkErrorMacro(
+      << "Event id stack has "
+      << nIds << " remaining.");
+    for (size_t i=0; i<nIds; ++i)
+      {
+      cerr << "EventId[" << i << "]=" << this->EventId[i] << endl;
+      }
+    }
+  #endif
+
+  this->SetFileName(0);
+
+  delete this->Log;
+}
+
+//-----------------------------------------------------------------------------
+vtkParallelTimer *vtkParallelTimer::GetGlobalInstance()
+{
+  #if vtkParallelTimerDEBUG > 1
+  cerr << "=====vtkParallelTimer::GetGlobalInstance" << endl;
+  #endif
+
+  if (vtkParallelTimer::GlobalInstance==0)
+    {
+    vtkParallelTimer *log=vtkParallelTimer::New();
+    ostringstream oss;
+    oss << getpid() << ".log";
+    log->SetFileName(oss.str().c_str());
+
+    vtkParallelTimer::GlobalInstance=log;
+    vtkParallelTimer::GlobalInstanceDestructor.SetLog(log);
+    }
+  return vtkParallelTimer::GlobalInstance;
+}
+
+//-----------------------------------------------------------------------------
+void vtkParallelTimer::DeleteGlobalInstance()
+{
+  #if vtkParallelTimerDEBUG > 1
+  cerr << "=====vtkParallelTimer::GetGlobalInstance" << endl;
+  #endif
+
+  if (vtkParallelTimer::GlobalInstance)
+    {
+    vtkParallelTimer::GlobalInstance->Delete();
+    vtkParallelTimer::GlobalInstance = NULL;
+
+    vtkParallelTimer::GlobalInstanceDestructor.SetLog(0);
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkParallelTimer::Clear()
+{
+  #if vtkParallelTimerDEBUG > 1
+  cerr << "=====vtkParallelTimer::Clear" << endl;
+  #endif
+
+  this->Log->Clear();
+  this->HeaderBuffer.str("");
+}
+
+//-----------------------------------------------------------------------------
+void vtkParallelTimer::StartEvent(int rank, const char *event)
+{
+  #if vtkParallelTimerDEBUG > 2
+  cerr << "=====vtkParallelTimer::StartEvent" << endl;
+  #endif
+
+  if (this->WorldRank != rank)
+    {
+    return;
+    }
+  this->StartEvent(event);
+}
+
+//-----------------------------------------------------------------------------
+void vtkParallelTimer::StartEvent(const char *event)
+{
+  #if vtkParallelTimerDEBUG > 1
+  cerr << "=====vtkParallelTimer::StartEvent" << endl;
+  #endif
+
+  double walls = 0.0;
+  timeval wallt;
+  gettimeofday(&wallt, 0x0);
+  walls = static_cast<double>(wallt.tv_sec)
+    + static_cast<double>(wallt.tv_usec)/1.0E6;
+
+  #if vtkParallelTimerDEBUG < 0
+  this->EventId.push_back(event);
+  #endif
+
+  this->StartTime.push_back(walls);
+}
+
+//-----------------------------------------------------------------------------
+void vtkParallelTimer::EndEvent(int rank, const char *event)
+{
+  #if vtkParallelTimerDEBUG > 2
+  cerr << "=====vtkParallelTimer::EndEvent" << endl;
+  #endif
+
+  if (this->WorldRank != rank)
+    {
+    return;
+    }
+  this->EndEvent(event);
+}
+
+//-----------------------------------------------------------------------------
+void vtkParallelTimer::EndEvent(const char *event)
+{
+  #if vtkParallelTimerDEBUG > 1
+  cerr << "=====vtkParallelTimer::EndEvent" << endl;
+  #endif
+
+  double walle = 0.0;
+  timeval wallt;
+  gettimeofday(&wallt, 0x0);
+  walle = static_cast<double>(wallt.tv_sec)
+    + static_cast<double>(wallt.tv_usec)/1.0E6;
+
+  #if vtkParallelTimerDEBUG > 0
+  if (this->StartTime.size() == 0)
+    {
+    vtkErrorMacro("No event to end! " << event);
+    return;
+    }
+  #endif
+
+  double walls = this->StartTime.back();
+  this->StartTime.pop_back();
+
+  *this->Log
+    << this->WorldRank << " "
+    << event << " "
+    << walls << " "
+    << walle << " "
+    << walle-walls
+    << "\n";
+
+  #if vtkParallelTimerDEBUG < 0
+  const string &sEventId = this->EventId.back();
+  const string eEventId = event;
+  if (sEventId != eEventId)
+    {
+    vtkErrorMacro(
+      << "Event mismatch " << sEventId.c_str() << " != " << eEventId.c_str());
+    }
+  this->EventId.pop_back();
+  #endif
+
+}
+
+//-----------------------------------------------------------------------------
+void vtkParallelTimer::EndEventSynch(int rank, const char *event)
+{
+  #if vtkParallelTimerDEBUG > 1
+  cerr << "=====vtkParallelTimer::EndEventSynch" << endl;
+  #endif
+
+  if (this->Initialized)
+    {
+    MPI_Barrier(MPI_COMM_WORLD);
+    }
+  if (this->WorldRank != rank)
+    {
+    return;
+    }
+  this->EndEvent(event);
+}
+
+//-----------------------------------------------------------------------------
+void vtkParallelTimer::EndEventSynch(const char *event)
+{
+  #if vtkParallelTimerDEBUG > 1
+  cerr << "=====vtkParallelTimer::EndEventSynch" << endl;
+  #endif
+
+  if (this->Initialized)
+    {
+    MPI_Barrier(MPI_COMM_WORLD);
+    }
+  this->EndEvent(event);
+}
+
+//-----------------------------------------------------------------------------
+void vtkParallelTimer::Update()
+{
+  #if vtkParallelTimerDEBUG > 1
+  cerr << "=====vtkParallelTimer::Update" << endl;
+  #endif
+
+  if (this->Initialized)
+    {
+    this->Log->Gather(this->WriterRank);
+    }
+}
+
+//-----------------------------------------------------------------------------
+int vtkParallelTimer::Write()
+{
+  #if vtkParallelTimerDEBUG > 1
+  cerr << "=====vtkParallelTimer::Write" << endl;
+  #endif
+
+  if ((this->WorldRank == this->WriterRank) && this->Log->GetSize())
+    {
+    cerr << "Wrote " << this->FileName << endl;
+
+    ostringstream oss;
+    *this->Log >> oss;
+    ofstream f(this->FileName, ios_base::out|ios_base::app);
+    if (!f.good())
+      {
+      vtkErrorMacro(
+        << "Failed to open "
+        << this->FileName
+        << " for  writing.");
+      return -1;
+      }
+    time_t t;
+    time(&t);
+    f << "# " << ctime(&t) << this->HeaderBuffer.str() << oss.str();
+    f.close();
+    }
+  return 0;
+}
+
+//-----------------------------------------------------------------------------
+void vtkParallelTimer::PrintSelf(ostream& os, vtkIndent)
+{
+  time_t t;
+  time(&t);
+  os << "# " << ctime(&t);
+  if (this->WorldRank == this->WriterRank)
+    {
+    os << this->HeaderBuffer.str();
+    }
+  ostringstream oss;
+  *this->Log >> oss;
+  os << oss.str();
+}
diff --git a/Rendering/ParallelLIC/vtkParallelTimer.h b/Rendering/ParallelLIC/vtkParallelTimer.h
new file mode 100644
index 0000000..3945fe9
--- /dev/null
+++ b/Rendering/ParallelLIC/vtkParallelTimer.h
@@ -0,0 +1,262 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkParallelTimer.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+//
+// .NAME vtkParallelTimer -- Distributed log for timing parallel algorithms
+// .SECTION Description
+//
+//  Provides ditributed log functionality. When the file is
+//  written each process data is collected by rank 0 who
+//  writes the data to a single file in rank order.
+//
+//  The log works as an event stack. EventStart pushes the
+//  event identifier and its start time onto the stack. EventEnd
+//  pops the most recent event time and identifier computes the
+//  ellapsed time and adds an entry to the log recording the
+//  event, it's start and end times, and its ellapsed time.
+//  EndEventSynch includes a barrier before the measurement.
+//
+//  The log class implements the singleton patern so that it
+//  may be shared accross class boundaries. If the log instance
+//  doesn't exist then one is created. It will be automatically
+//  destroyed at exit by the signleton destructor. It can be
+//  destroyed explicitly by calling DeleteGlobalInstance.
+
+#ifndef __vtkParallelTimer_h
+#define __vtkParallelTimer_h
+
+#define vtkParallelTimerDEBUG -1
+
+#include "vtkObject.h"
+#include "vtkRenderingParallelLICModule.h" // for export
+
+//BTX
+#include <vector> // for vector
+#include <string> // for string
+#include <sstream> // for sstream
+#if vtkParallelTimerDEBUG > 0
+#include <iostream> // for cerr
+#endif
+//ETX
+
+class vtkParallelTimerBuffer;
+
+class VTKRENDERINGPARALLELLIC_EXPORT vtkParallelTimer : public vtkObject
+{
+public:
+  static vtkParallelTimer *New();
+  vtkTypeMacro(vtkParallelTimer,vtkObject);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description:
+  // Type used to direct an output stream into the log's header. The header
+  // is a buffer used only by the root rank.
+  class LogHeaderType
+    {
+    public:
+      template<typename T> LogHeaderType &operator<<(const T& s);
+    };
+
+  // Description:
+  // Type used to direct an output stream into the log's body. The body is a
+  // buffer that all ranks write to.
+  class LogBodyType
+    {
+    public:
+      template<typename T> LogBodyType &operator<<(const T& s);
+    };
+
+  // Description:
+  // Set the rank who writes.
+  vtkSetMacro(WriterRank,int);
+  vtkGetMacro(WriterRank,int);
+
+  // Description:
+  // Set the filename that is used during write when the object
+  // is used as a singleton. If nothing is set the default is
+  // ROOT_RANKS_PID.log
+  vtkSetStringMacro(FileName);
+  vtkGetStringMacro(FileName);
+  //BTX
+  void SetFileName(const std::string &fileName)
+    { this->SetFileName(fileName.c_str()); }
+  //ETX
+
+  // Description:
+  // The log works as an event stack. EventStart pushes the
+  // event identifier and its start time onto the stack. EventEnd
+  // pops the most recent event time and identifier computes the
+  // ellapsed time and adds an entry to the log recording the
+  // event, it's start and end times, and its ellapsed time.
+  // EndEventSynch includes a barrier before the measurement.
+  void StartEvent(const char *event);
+  void StartEvent(int rank, const char *event);
+  void EndEvent(const char *event);
+  void EndEvent(int rank, const char *event);
+  void EndEventSynch(const char *event);
+  void EndEventSynch(int rank, const char *event);
+
+  //BTX
+  // Description:
+  // Insert text into the log header on the writer rank.
+  template<typename T>
+  vtkParallelTimer &operator<<(const T& s);
+
+  // Description:
+  // stream output to the log's header(root rank only).
+  vtkParallelTimer::LogHeaderType GetHeader()
+    { return vtkParallelTimer::LogHeaderType(); }
+
+  // Description:
+  // stream output to log body(all ranks).
+  vtkParallelTimer::LogBodyType GetBody()
+    { return vtkParallelTimer::LogBodyType(); }
+  //ETX
+
+  // Description:
+  // Clear the log.
+  void Clear();
+
+  // Description:
+  // When an object is finished writing data to the log
+  // object it must call Update to send the data to the writer
+  // rank.
+  // This ensures that all data is transfered to the root before
+  // MPI_Finalize is called while allowing the write to occur
+  // after Mpi_finalize. Note: This is a collective call.
+  void Update();
+
+  // Description:
+  // Write the log contents to a file.
+  int Write();
+
+  // Description:
+  // The log class implements the singleton patern so that it
+  // may be shared accross class boundaries. If the log instance
+  // doesn't exist then one is created. It will be automatically
+  // destroyed at exit by the signleton destructor. It can be
+  // destroyed explicitly by calling DeleteGlobalInstance.
+  static vtkParallelTimer *GetGlobalInstance();
+
+  // Description:
+  // Explicitly delete the singleton.
+  static void DeleteGlobalInstance();
+
+  // Description:
+  // If enabled and used as a singleton the log will write
+  // it's contents to disk during program termination.
+  vtkSetMacro(WriteOnClose, int);
+  vtkGetMacro(WriteOnClose, int);
+
+  // Description:
+  // Set/Get the global log level. Applications can set this to the
+  // desired level so that all pipeline objects will log data.
+  vtkSetMacro(GlobalLevel, int);
+  vtkGetMacro(GlobalLevel, int);
+
+protected:
+  vtkParallelTimer();
+  virtual ~vtkParallelTimer();
+
+private:
+  vtkParallelTimer(const vtkParallelTimer&); // Not implemented
+  void operator=(const vtkParallelTimer&); // Not implemented
+
+//BTX
+  // Description:
+  // A class responsible for delete'ing the global instance of the log.
+  class VTKRENDERINGPARALLELLIC_EXPORT vtkParallelTimerDestructor
+    {
+    public:
+      vtkParallelTimerDestructor() : Log(0) {}
+      ~vtkParallelTimerDestructor();
+
+      void SetLog(vtkParallelTimer *log){ this->Log = log; }
+
+    private:
+      vtkParallelTimer *Log;
+    };
+//ETX
+
+private:
+  int GlobalLevel;
+  int Initialized;
+  int WorldRank;
+  int WriterRank;
+  char *FileName;
+  int WriteOnClose;
+  std::vector<double> StartTime;
+  #if vtkParallelTimerDEBUG < 0
+  std::vector<std::string> EventId;
+  #endif
+
+  vtkParallelTimerBuffer *Log;
+
+  static vtkParallelTimer *GlobalInstance;
+  static vtkParallelTimerDestructor GlobalInstanceDestructor;
+
+  std::ostringstream HeaderBuffer;
+
+  friend class LogHeaderType;
+  friend class LogBodyType;
+};
+
+//BTX
+//-----------------------------------------------------------------------------
+template<typename T>
+vtkParallelTimer &vtkParallelTimer::operator<<(const T& s)
+{
+  if (this->WorldRank == this->WriterRank)
+    {
+    this->HeaderBuffer << s;
+    #if vtkParallelTimerDEBUG > 0
+    std::cerr << s;
+    #endif
+    }
+  return *this;
+}
+
+//-----------------------------------------------------------------------------
+template<typename T>
+vtkParallelTimer::LogHeaderType &vtkParallelTimer::LogHeaderType::operator<<(const T& s)
+{
+  vtkParallelTimer *log = vtkParallelTimer::GetGlobalInstance();
+
+  if (log->WorldRank == log->WriterRank)
+    {
+    log->HeaderBuffer << s;
+    #if vtkParallelTimerDEBUG > 0
+    std::cerr << s;
+    #endif
+    }
+
+  return *this;
+}
+
+//-----------------------------------------------------------------------------
+template<typename T>
+vtkParallelTimer::LogBodyType &vtkParallelTimer::LogBodyType::operator<<(const T& s)
+{
+  vtkParallelTimer *log = vtkParallelTimer::GetGlobalInstance();
+
+  *(log->Log) <<  s;
+  #if vtkParallelTimerDEBUG > 0
+  std::cerr << s;
+  #endif
+
+  return *this;
+}
+//ETX
+
+#endif
diff --git a/Rendering/Qt/CMakeLists.txt b/Rendering/Qt/CMakeLists.txt
index ddb499e..ff6ab4f 100644
--- a/Rendering/Qt/CMakeLists.txt
+++ b/Rendering/Qt/CMakeLists.txt
@@ -1,3 +1,5 @@
+include(vtkQt)
+
 set(LibSrcs
   vtkQImageToImageSource.cxx
   vtkQtInitialization.cxx
@@ -6,9 +8,16 @@ set(LibSrcs
   vtkQtTreeRingLabelMapper.cxx
   )
 
-# import Qt4 build settings
-find_package(Qt4 REQUIRED QtCore QtGui QUIET)
-include(${QT_USE_FILE})
+if(VTK_QT_VERSION VERSION_GREATER "4")
+  find_package(Qt5Widgets REQUIRED QUIET)
+  include_directories(${Qt5Widgets_INCLUDE_DIRS})
+  add_definitions(${Qt5Widgets_DEFINITIONS})
+  set(QT_LIBRARIES ${Qt5Widgets_LIBRARIES})
+else()
+  # import Qt4 build settings
+  find_package(Qt4 REQUIRED QtCore QtGui QUIET)
+  include(${QT_USE_FILE})
+endif()
 
 vtk_module_library(${vtk-module} ${LibSrcs})
-target_link_libraries(${vtk-module} ${QT_LIBRARIES})
+target_link_libraries(${vtk-module} LINK_PRIVATE ${QT_LIBRARIES})
diff --git a/Rendering/Qt/module.cmake b/Rendering/Qt/module.cmake
index 94dd366..0150d3f 100644
--- a/Rendering/Qt/module.cmake
+++ b/Rendering/Qt/module.cmake
@@ -3,8 +3,10 @@ vtk_module(vtkRenderingQt
     Qt
   DEPENDS
     vtkRenderingLabel
-    vtkGUISupportQt
+  PRIVATE_DEPENDS
     vtkFiltersTexture
+    vtkFiltersSources
+    vtkGUISupportQt
   TEST_DEPENDS
     vtkTestingCore
   )
diff --git a/Rendering/Qt/vtkQImageToImageSource.h b/Rendering/Qt/vtkQImageToImageSource.h
index 2868674..591c4d2 100644
--- a/Rendering/Qt/vtkQImageToImageSource.h
+++ b/Rendering/Qt/vtkQImageToImageSource.h
@@ -41,7 +41,7 @@ public:
 
 protected:
   vtkQImageToImageSource();
-  ~vtkQImageToImageSource() {};
+  ~vtkQImageToImageSource() {}
 
   const QImage* QtImage;
   int DataExtent[6];
diff --git a/Rendering/Tk/Testing/Data/Baseline/TestTkRenderWidget.png.md5 b/Rendering/Tk/Testing/Data/Baseline/TestTkRenderWidget.png.md5
new file mode 100644
index 0000000..a798bbd
--- /dev/null
+++ b/Rendering/Tk/Testing/Data/Baseline/TestTkRenderWidget.png.md5
@@ -0,0 +1 @@
+27831f3a108a73b8d992210905493d1f
diff --git a/Rendering/Tk/Testing/Data/Baseline/TestTkRenderWindowInteractor.png.md5 b/Rendering/Tk/Testing/Data/Baseline/TestTkRenderWindowInteractor.png.md5
new file mode 100644
index 0000000..a798bbd
--- /dev/null
+++ b/Rendering/Tk/Testing/Data/Baseline/TestTkRenderWindowInteractor.png.md5
@@ -0,0 +1 @@
+27831f3a108a73b8d992210905493d1f
diff --git a/Rendering/Tk/Testing/Data/Baseline/cursor3D.png.md5 b/Rendering/Tk/Testing/Data/Baseline/cursor3D.png.md5
new file mode 100644
index 0000000..025e9f9
--- /dev/null
+++ b/Rendering/Tk/Testing/Data/Baseline/cursor3D.png.md5
@@ -0,0 +1 @@
+434cb1fb401614e0af8db651664e161e
diff --git a/Rendering/Tk/Testing/Python/CMakeLists.txt b/Rendering/Tk/Testing/Python/CMakeLists.txt
index 39b715a..bcbeb69 100644
--- a/Rendering/Tk/Testing/Python/CMakeLists.txt
+++ b/Rendering/Tk/Testing/Python/CMakeLists.txt
@@ -1,16 +1,4 @@
 if(VTK_PYTHON_EXE)
-  foreach(tfile
-    TestTkRenderWidget
-    TestTkRenderWindowInteractor
-    )
-    if(VTK_DATA_ROOT)
-      add_test(NAME ${vtk-module}Python-${tfile}
-        COMMAND ${VTK_PYTHON_EXE}
-        ${CMAKE_CURRENT_SOURCE_DIR}/${tfile}.py
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -B ${VTK_DATA_ROOT}/Baseline/Rendering/
-        )
-    endif()
-  endforeach()
+  vtk_add_test_python(TestTkRenderWidget.py NO_RT)
+  vtk_add_test_python(TestTkRenderWindowInteractor.py NO_RT)
 endif()
diff --git a/Rendering/Tk/Testing/Python/TestTkRenderWidget.py b/Rendering/Tk/Testing/Python/TestTkRenderWidget.py
index 3324794..9a54342 100644
--- a/Rendering/Tk/Testing/Python/TestTkRenderWidget.py
+++ b/Rendering/Tk/Testing/Python/TestTkRenderWidget.py
@@ -1,6 +1,3 @@
-# A simple test for a vtkTkRenderWidget.  Run it like so:
-# python TestTkRenderWidget.py -B $VTK_DATA_ROOT/Baseline/Rendering
-
 import os
 
 import vtk
diff --git a/Rendering/Tk/Testing/Python/TestTkRenderWindowInteractor.py b/Rendering/Tk/Testing/Python/TestTkRenderWindowInteractor.py
index 7d19562..07bb79b 100644
--- a/Rendering/Tk/Testing/Python/TestTkRenderWindowInteractor.py
+++ b/Rendering/Tk/Testing/Python/TestTkRenderWindowInteractor.py
@@ -1,6 +1,3 @@
-# A simple test for a vtkTkRenderWidget. Run it like so:
-# python TestTkRenderWindowInteractor.py -B $VTK_DATA_ROOT/Baseline/Rendering
-
 import os
 import vtk
 from vtk.test import Testing
diff --git a/Rendering/Tk/Testing/Tcl/CMakeLists.txt b/Rendering/Tk/Testing/Tcl/CMakeLists.txt
index 77ea2d8..a0f48fb 100644
--- a/Rendering/Tk/Testing/Tcl/CMakeLists.txt
+++ b/Rendering/Tk/Testing/Tcl/CMakeLists.txt
@@ -1,3 +1 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(cursor3D VolumeRendering)
-endif()
+vtk_add_test_tcl(cursor3D)
diff --git a/Rendering/Tk/vtkCocoaTkUtilities.h b/Rendering/Tk/vtkCocoaTkUtilities.h
index ef381cc..34aeb93 100644
--- a/Rendering/Tk/vtkCocoaTkUtilities.h
+++ b/Rendering/Tk/vtkCocoaTkUtilities.h
@@ -48,8 +48,8 @@ public:
   static void* GetDrawableView(Tk_Window_ *window);
 
 protected:
-  vtkCocoaTkUtilities() {};
-  ~vtkCocoaTkUtilities() {};
+  vtkCocoaTkUtilities() {}
+  ~vtkCocoaTkUtilities() {}
 
 private:
   vtkCocoaTkUtilities(const vtkCocoaTkUtilities&);  // Not implemented.
diff --git a/Rendering/Tk/vtkTkImageViewerWidget.cxx b/Rendering/Tk/vtkTkImageViewerWidget.cxx
index 0938a67..4484a39 100644
--- a/Rendering/Tk/vtkTkImageViewerWidget.cxx
+++ b/Rendering/Tk/vtkTkImageViewerWidget.cxx
@@ -801,14 +801,14 @@ vtkTkImageViewerWidget_MakeImageViewer(struct vtkTkImageViewerWidget *self)
   // Use the same display
   imgWindow->SetDisplayId(dpy);
 
-  // Set the parent correctly and get the actual OSX window on the screen
+  // Set the parent correctly and get the actual OS X window on the screen
   // Window must be up so that the aglContext can be attached to it
   if ((winPtr->parentPtr != NULL) && !(winPtr->flags & TK_TOP_LEVEL))
     {
       if (winPtr->parentPtr->window == None)
         {
         // Look at each parent TK window in order until we run out
-        // of windows or find the top level. Then the OSX window that will be
+        // of windows or find the top level. Then the OS X window that will be
         // the parent is created so that we have a window to pass to the
         // vtkRenderWindow so it can attach its openGL context.
         // Ideally the Tk_MakeWindowExist call would do the deed. (I think)
diff --git a/Rendering/Tk/vtkTkRenderWidget.cxx b/Rendering/Tk/vtkTkRenderWidget.cxx
index e664b21..eb7ca23 100644
--- a/Rendering/Tk/vtkTkRenderWidget.cxx
+++ b/Rendering/Tk/vtkTkRenderWidget.cxx
@@ -1198,14 +1198,14 @@ vtkTkRenderWidget_MakeRenderWindow(struct vtkTkRenderWidget *self)
   //                                winPtr->privatePtr->yOff);
   self->RenderWindow->SetSize(self->Width, self->Height);
 
-  // Set the parent correctly and get the actual OSX window on the screen
+  // Set the parent correctly and get the actual OS X window on the screen
   // Window must be up so that the aglContext can be attached to it
   if ((winPtr->parentPtr != NULL) && !(winPtr->flags & TK_TOP_LEVEL))
     {
     if (winPtr->parentPtr->window == None)
       {
       // Look at each parent TK window in order until we run out
-      // of windows or find the top level. Then the OSX window that will be
+      // of windows or find the top level. Then the OS X window that will be
       // the parent is created so that we have a window to pass to the
       // vtkRenderWindow so it can attach its openGL context.
       // Ideally the Tk_MakeWindowExist call would do the deed. (I think)
diff --git a/Rendering/Volume/Testing/Cxx/CMakeLists.txt b/Rendering/Volume/Testing/Cxx/CMakeLists.txt
index 6c98961..7902a32 100644
--- a/Rendering/Volume/Testing/Cxx/CMakeLists.txt
+++ b/Rendering/Volume/Testing/Cxx/CMakeLists.txt
@@ -1,74 +1,50 @@
-set(MyTests
+vtk_add_test_cxx(
   HomogeneousRayIntegration.cxx
   LinearRayIntegration.cxx
   PartialPreIntegration.cxx
   PreIntegrationIncremental.cxx
   PreIntegrationNonIncremental.cxx
+  TestFixedPointRayCastLightComponents.cxx
+  TestGPURayCastAdditive.cxx
   TestGPURayCastCompositeBinaryMask.cxx
+  TestGPURayCastCompositeMask.cxx
+  TestGPURayCastCompositeMaskBlend.cxx
+  TestGPURayCastCompositeShadeMask.cxx
+  TestGPURayCastCompositeToMIP.cxx
+  TestGPURayCastCropping.cxx
+  TestGPURayCastDataTypesMIP.cxx
+  TestGPURayCastFourComponentsComposite.cxx
+  TestGPURayCastFourComponentsCompositeStreaming.cxx
+  TestGPURayCastFourComponentsMIP.cxx
+  TestGPURayCastFourComponentsMinIP.cxx
   TestGPURayCastMIPBinaryMask.cxx
-)
+  TestGPURayCastMIPToComposite.cxx
+  TestGPURayCastPerspectiveParallel.cxx
+  TestProjectedHexahedra.cxx
+  TestSmartVolumeMapper.cxx
+  TestSmartVolumeMapperWindowLevel.cxx
+  TestTM3DLightComponents.cxx
+  ZsweepConcavities.cxx
+  )
 
 # Tests with data
-if(VTK_DATA_ROOT)
-  set(MyTests
-    ${MyTests}
-    ProjectedTetrahedraZoomIn.cxx
-    TestFinalColorWindowLevel.cxx
-    TestGPURayCastAdditive.cxx
-    TestGPURayCastCompositeMask.cxx
-    TestGPURayCastCompositeMaskBlend.cxx
-    TestGPURayCastCompositeShadeMask.cxx
-    TestGPURayCastCompositeToMIP.cxx
-    TestGPURayCastCropping.cxx
-    TestGPURayCastDataTypesMIP.cxx
-    TestGPURayCastDataTypesMinIP.cxx
-    TestGPURayCastFourComponentsComposite.cxx
-    TestGPURayCastFourComponentsCompositeStreaming.cxx
-    TestGPURayCastFourComponentsMIP.cxx
-    TestGPURayCastFourComponentsMinIP.cxx
-    TestGPURayCastMIPToComposite.cxx
-    TestGPURayCastNearestDataTypesMIP.cxx
-    TestGPURayCastPerspectiveParallel.cxx
-    TestHAVSVolumeMapper.cxx
-    TestMinIntensityRendering.cxx
-    TestProjectedTetrahedra.cxx
-    TestProp3DFollower.cxx
-    TestFixedPointRayCastLightComponents.cxx
-    TestSmartVolumeMapper.cxx
-    TestSmartVolumeMapperWindowLevel.cxx
-    TestTM3DLightComponents.cxx
-    ZsweepConcavities.cxx
-    volProt.cxx
-    )
-endif()
+vtk_add_test_cxx(
+  ProjectedTetrahedraZoomIn.cxx,NO_VALID
+  TestFinalColorWindowLevel.cxx
+  TestGPURayCastNearestDataTypesMIP.cxx
+  TestGPURayCastDataTypesMinIP.cxx
+  TestHAVSVolumeMapper.cxx
+  TestMinIntensityRendering.cxx
+  TestProjectedTetrahedra.cxx
+  TestProp3DFollower.cxx
+  volProt.cxx
+  )
 
-# Use the testing object factory, to reduce boilerplate code in tests.
-include(vtkTestingObjectFactory)
+vtk_test_cxx_executable(${vtk-module}CxxTests RENDERING_FACTORY
+  ExerciseUnstructuredGridRayCastMapper.cxx)
 
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests}
-  ExerciseUnstructuredGridRayCastMapper.cxx )
+set_tests_properties(${vtk-module}Cxx-TestProp3DFollower
+  PROPERTIES TIMEOUT 100)
 
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName}
-        -D ${VTK_DATA_ROOT}
-        -T ${VTK_TEST_OUTPUT_DIR}
-        -V Baseline/VolumeRendering/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName}
-      COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
-
-if (VTK_DATA_ROOT)
-  set_tests_properties(${vtk-module}Cxx-TestProp3DFollower
-    PROPERTIES TIMEOUT 100)
-  set_tests_properties(${vtk-module}Cxx-TestTM3DLightComponents
-    PROPERTIES TIMEOUT 120)
-endif()
+set_tests_properties(${vtk-module}Cxx-TestTM3DLightComponents
+  PROPERTIES TIMEOUT 120)
diff --git a/Rendering/Volume/Testing/Cxx/ProjectedTetrahedraZoomIn.cxx b/Rendering/Volume/Testing/Cxx/ProjectedTetrahedraZoomIn.cxx
index 2fa9427..3f51d40 100644
--- a/Rendering/Volume/Testing/Cxx/ProjectedTetrahedraZoomIn.cxx
+++ b/Rendering/Volume/Testing/Cxx/ProjectedTetrahedraZoomIn.cxx
@@ -78,6 +78,15 @@ int ProjectedTetrahedraZoomIn(int argc, char *argv[])
   iren->SetRenderWindow(renWin);
   iren->SetDesiredUpdateRate(3);
 
+  // check for driver support
+  renWin->Render();
+  VTK_CREATE(vtkProjectedTetrahedraMapper, volumeMapper);
+  if (!volumeMapper->IsSupported(renWin))
+    {
+    vtkGenericWarningMacro("Projected tetrahedra is not supported. Skipping tests.");
+    return 0;
+    }
+
   // Create the reader for the data.
   // This is the data that will be volume rendered.
   vtkStdString filename;
@@ -128,7 +137,6 @@ int ProjectedTetrahedraZoomIn(int argc, char *argv[])
   volumeProperty->SetInterpolationTypeToLinear();
 
   // The mapper that renders the volume data.
-  VTK_CREATE(vtkProjectedTetrahedraMapper, volumeMapper);
   volumeMapper->SetInputConnection(trifilter->GetOutputPort());
 
   // The volume holds the mapper and the property and can be used to
@@ -155,6 +163,7 @@ int ProjectedTetrahedraZoomIn(int argc, char *argv[])
   ren1->AddVolume(volume);
 
   renWin->SetSize(300, 300);
+  ren1->ResetCamera();
 
   vtkCamera *camera = ren1->GetActiveCamera();
   camera->ParallelProjectionOff();
@@ -167,15 +176,15 @@ int ProjectedTetrahedraZoomIn(int argc, char *argv[])
 
   renWin->Render();
 
-#if 0
-  // For now we are just checking to make sure that the mapper does not crash.
-  // Maybe in the future we will do an image comparison.
   int retVal = vtkTesting::Test(argc, argv, renWin, 75);
   if (retVal == vtkRegressionTester::DO_INTERACTOR)
     {
     iren->Start();
     }
 
+  // For now we are just checking to make sure that the mapper does not crash.
+  // Maybe in the future we will do an image comparison.
+#if 0
   if ((retVal == vtkTesting::PASSED) || (retVal == vtkTesting::DO_INTERACTOR))
     {
     return 0;
@@ -185,6 +194,7 @@ int ProjectedTetrahedraZoomIn(int argc, char *argv[])
     return 1;
     }
 #else
+  vtkGenericWarningMacro("This test will always pass.");
   return 0;
 #endif
 }
diff --git a/Rendering/Volume/Testing/Cxx/TestGPURayCastCompositeShadeMask.cxx b/Rendering/Volume/Testing/Cxx/TestGPURayCastCompositeShadeMask.cxx
index c032efa..3cbde99 100644
--- a/Rendering/Volume/Testing/Cxx/TestGPURayCastCompositeShadeMask.cxx
+++ b/Rendering/Volume/Testing/Cxx/TestGPURayCastCompositeShadeMask.cxx
@@ -26,6 +26,8 @@
 #include "vtkRenderWindowInteractor.h"
 #include "vtkRenderWindow.h"
 #include "vtkRenderer.h"
+#include "vtkOpenGLRenderWindow.h"
+#include "vtkOpenGLExtensionManager.h"
 #include "vtkCamera.h"
 
 int TestGPURayCastCompositeShadeMask(int argc, char *argv[])
@@ -159,10 +161,11 @@ int TestGPURayCastCompositeShadeMask(int argc, char *argv[])
                                opacityLevel+0.5*opacityWindow,1.0,0.0,0.0);
 
 
+  vtkRenderWindowInteractor *iren=vtkRenderWindowInteractor::New();
 
+  vtkOpenGLRenderWindow *renWin
+    = vtkOpenGLRenderWindow::SafeDownCast(vtkRenderWindow::New());
 
-  vtkRenderWindowInteractor *iren=vtkRenderWindowInteractor::New();
-  vtkRenderWindow *renWin=vtkRenderWindow::New();
   renWin->SetSize(300,300);
   iren->SetRenderWindow(renWin);
 
@@ -171,10 +174,17 @@ int TestGPURayCastCompositeShadeMask(int argc, char *argv[])
 
   renWin->Render();
 
-  int valid=mapper->IsRenderSupported(renWin,property);
+  bool mapper_support = mapper->IsRenderSupported(renWin,property)!=0;
+
+  // Workaround for Mesa bug
+  vtkOpenGLExtensionManager *extensions = renWin->GetExtensionManager();
+  bool driver_support
+    = (!extensions->DriverGLRendererIsOSMesa()
+    || extensions->DriverGLRendererHasToken("llvmpipe"))
+    || extensions->GetIgnoreDriverBugs("OS Mesa GPURayCastCompositeShadeMask bug");
 
   int retVal;
-  if(valid)
+  if(mapper_support && driver_support)
     {
     ren1->AddViewProp(volume);
     iren->Initialize();
diff --git a/Rendering/Volume/Testing/Cxx/TestProjectedTetrahedra.cxx b/Rendering/Volume/Testing/Cxx/TestProjectedTetrahedra.cxx
index 3a87240..2995ebb 100644
--- a/Rendering/Volume/Testing/Cxx/TestProjectedTetrahedra.cxx
+++ b/Rendering/Volume/Testing/Cxx/TestProjectedTetrahedra.cxx
@@ -64,12 +64,27 @@ int TestProjectedTetrahedra(int argc, char *argv[])
     }
 
   // Create the standard renderer, render window, and interactor.
-  vtkRenderer *ren1 = vtkRenderer::New();
   vtkRenderWindow *renWin = vtkRenderWindow::New();
+  vtkRenderer *ren1 = vtkRenderer::New();
   renWin->AddRenderer(ren1);
+  ren1->Delete();
+
   vtkRenderWindowInteractor *iren = vtkRenderWindowInteractor::New();
   iren->SetRenderWindow(renWin);
   iren->SetDesiredUpdateRate(3);
+  renWin->Delete();
+
+  // check for driver support
+  renWin->Render();
+  vtkProjectedTetrahedraMapper *volumeMapper
+    = vtkProjectedTetrahedraMapper::New();
+  if (!volumeMapper->IsSupported(renWin))
+    {
+    volumeMapper->Delete();
+    iren->Delete();
+    vtkGenericWarningMacro("Projected tetrahedra is not supported. Skipping tests.");
+    return 0;
+    }
 
   // Create the reader for the data.
   // This is the data that will be volume rendered.
@@ -122,8 +137,6 @@ int TestProjectedTetrahedra(int argc, char *argv[])
   volumeProperty->SetInterpolationTypeToLinear();
 
   // The mapper that renders the volume data.
-  vtkProjectedTetrahedraMapper *volumeMapper
-    = vtkProjectedTetrahedraMapper::New();
   volumeMapper->SetInputConnection(trifilter->GetOutputPort());
 
   // The volume holds the mapper and the property and can be used to
@@ -165,8 +178,6 @@ int TestProjectedTetrahedra(int argc, char *argv[])
     }
 
   // Clean up.
-  ren1->Delete();
-  renWin->Delete();
   iren->Delete();
   reader->Delete();
   reader2->Delete();
diff --git a/Rendering/Volume/Testing/Data/Baseline/HomogeneousRayIntegration.png.md5 b/Rendering/Volume/Testing/Data/Baseline/HomogeneousRayIntegration.png.md5
new file mode 100644
index 0000000..cd4b200
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/HomogeneousRayIntegration.png.md5
@@ -0,0 +1 @@
+f3df2d8c059bdd49912b5d0d5621aeaf
diff --git a/Rendering/Volume/Testing/Data/Baseline/LinearRayIntegration.png.md5 b/Rendering/Volume/Testing/Data/Baseline/LinearRayIntegration.png.md5
new file mode 100644
index 0000000..44b3b01
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/LinearRayIntegration.png.md5
@@ -0,0 +1 @@
+294c3a848c01b6aad4cc90d3df15f313
diff --git a/Rendering/Volume/Testing/Data/Baseline/PartialPreIntegration.png.md5 b/Rendering/Volume/Testing/Data/Baseline/PartialPreIntegration.png.md5
new file mode 100644
index 0000000..b062ddb
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/PartialPreIntegration.png.md5
@@ -0,0 +1 @@
+6d4d9247e8ce95276a680f0eea5d22a3
diff --git a/Rendering/Volume/Testing/Data/Baseline/PreIntegrationIncremental.png.md5 b/Rendering/Volume/Testing/Data/Baseline/PreIntegrationIncremental.png.md5
new file mode 100644
index 0000000..83da30a
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/PreIntegrationIncremental.png.md5
@@ -0,0 +1 @@
+e48ca82ceb34e15e13d46a1bf19f6e60
diff --git a/Rendering/Volume/Testing/Data/Baseline/PreIntegrationNonIncremental.png.md5 b/Rendering/Volume/Testing/Data/Baseline/PreIntegrationNonIncremental.png.md5
new file mode 100644
index 0000000..e896a0d
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/PreIntegrationNonIncremental.png.md5
@@ -0,0 +1 @@
+1251475c7b803c596d726008e76b8d53
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestBunykRayCastFunction.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestBunykRayCastFunction.png.md5
new file mode 100644
index 0000000..8a89c1f
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestBunykRayCastFunction.png.md5
@@ -0,0 +1 @@
+f1b450f7fddb8277b803c0af4da0a2b2
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestFinalColorWindowLevel.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestFinalColorWindowLevel.png.md5
new file mode 100644
index 0000000..6b1d959
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestFinalColorWindowLevel.png.md5
@@ -0,0 +1 @@
+6828b280fd33fcc049b26f64e7385c70
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCastLightComponents.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCastLightComponents.png.md5
new file mode 100644
index 0000000..1a3ea24
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCastLightComponents.png.md5
@@ -0,0 +1 @@
+929db51fd19635a46e9fb038bf810586
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCasterLinear.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCasterLinear.png.md5
new file mode 100644
index 0000000..6e21889
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCasterLinear.png.md5
@@ -0,0 +1 @@
+686e3209e3031c05fcbb6d2ed6306774
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCasterLinearCropped.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCasterLinearCropped.png.md5
new file mode 100644
index 0000000..afac124
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCasterLinearCropped.png.md5
@@ -0,0 +1 @@
+3b96c8d99c156b1ade8859bc7b1112ab
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCasterLinear_1.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCasterLinear_1.png.md5
new file mode 100644
index 0000000..79deabe
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCasterLinear_1.png.md5
@@ -0,0 +1 @@
+a0859a4a8341ca80810aa1c9e95d675a
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCasterNearest.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCasterNearest.png.md5
new file mode 100644
index 0000000..ca82615
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCasterNearest.png.md5
@@ -0,0 +1 @@
+01fae5f234abdf7256ac19af96058bbb
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCasterNearestCropped.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCasterNearestCropped.png.md5
new file mode 100644
index 0000000..a61bb84
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCasterNearestCropped.png.md5
@@ -0,0 +1 @@
+7ea150860999bf563657e0a993494df9
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCasterNearest_1.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCasterNearest_1.png.md5
new file mode 100644
index 0000000..e096d9a
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestFixedPointRayCasterNearest_1.png.md5
@@ -0,0 +1 @@
+56c08d64b8ecc17724e5851f44193a06
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastAdditive.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastAdditive.png.md5
new file mode 100644
index 0000000..e74f4e1
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastAdditive.png.md5
@@ -0,0 +1 @@
+4d50e4696efbcbe7f8ea59c28ed62545
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeBinaryMask.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeBinaryMask.png.md5
new file mode 100644
index 0000000..e5eaab3
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeBinaryMask.png.md5
@@ -0,0 +1 @@
+89d3366de63898e4aaa970b5604d9605
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeMask.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeMask.png.md5
new file mode 100644
index 0000000..9718717
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeMask.png.md5
@@ -0,0 +1 @@
+923b0bd6f8527dd0a848acc437a2bc11
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeMaskBlend.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeMaskBlend.png.md5
new file mode 100644
index 0000000..9d1cb92
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeMaskBlend.png.md5
@@ -0,0 +1 @@
+4863fe232e10b32727aecb1d0559ebf2
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeMask_1.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeMask_1.png.md5
new file mode 100644
index 0000000..5b1406b
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeMask_1.png.md5
@@ -0,0 +1 @@
+1363c08a47439b18c50fccbf1264762a
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeShadeMask.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeShadeMask.png.md5
new file mode 100644
index 0000000..8ac719a
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeShadeMask.png.md5
@@ -0,0 +1 @@
+675d46087aebc696d677eac863f01f24
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeShadeMask_1.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeShadeMask_1.png.md5
new file mode 100644
index 0000000..022935f
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeShadeMask_1.png.md5
@@ -0,0 +1 @@
+97670565c30cc51f4fec041e22aaff0e
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeToMIP.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeToMIP.png.md5
new file mode 100644
index 0000000..d86179c
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCompositeToMIP.png.md5
@@ -0,0 +1 @@
+bd7e16b083c2e450bff0dff910b1272a
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCropping.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCropping.png.md5
new file mode 100644
index 0000000..9302626
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastCropping.png.md5
@@ -0,0 +1 @@
+fda72495b07135641075cf71f8ce8cb6
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastDataTypesMIP.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastDataTypesMIP.png.md5
new file mode 100644
index 0000000..2577f75
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastDataTypesMIP.png.md5
@@ -0,0 +1 @@
+5f7f947a6ea4fc18706fca55f7cb5fc9
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastDataTypesMinIP.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastDataTypesMinIP.png.md5
new file mode 100644
index 0000000..81ccf6f
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastDataTypesMinIP.png.md5
@@ -0,0 +1 @@
+3deec692d02d733e6c050bacb2096964
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastFourComponentsComposite.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastFourComponentsComposite.png.md5
new file mode 100644
index 0000000..f0a66f4
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastFourComponentsComposite.png.md5
@@ -0,0 +1 @@
+1d71884eb5232ce856888f1860ced264
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastFourComponentsCompositeStreaming.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastFourComponentsCompositeStreaming.png.md5
new file mode 100644
index 0000000..eb995da
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastFourComponentsCompositeStreaming.png.md5
@@ -0,0 +1 @@
+e686f15cd1cef377313fb55194f63efb
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastFourComponentsMIP.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastFourComponentsMIP.png.md5
new file mode 100644
index 0000000..32f8872
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastFourComponentsMIP.png.md5
@@ -0,0 +1 @@
+ff4021b3e2d86d48b57c825197840c98
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastFourComponentsMinIP.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastFourComponentsMinIP.png.md5
new file mode 100644
index 0000000..302e66f
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastFourComponentsMinIP.png.md5
@@ -0,0 +1 @@
+11b0dbfafab6df40543c14cc40c57fc2
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastMIPBinaryMask.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastMIPBinaryMask.png.md5
new file mode 100644
index 0000000..897d317
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastMIPBinaryMask.png.md5
@@ -0,0 +1 @@
+f45546d424676ed764f781d90ab7c6d7
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastMIPToComposite.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastMIPToComposite.png.md5
new file mode 100644
index 0000000..7a31e4c
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastMIPToComposite.png.md5
@@ -0,0 +1 @@
+82c690a9d0a66dbebd3bd959ed92510a
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastNearestDataTypesMIP.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastNearestDataTypesMIP.png.md5
new file mode 100644
index 0000000..0769ff0
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastNearestDataTypesMIP.png.md5
@@ -0,0 +1 @@
+2c2f310de7fa42f7a559ee83f9841efa
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastPerspectiveParallel.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastPerspectiveParallel.png.md5
new file mode 100644
index 0000000..716e03e
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestGPURayCastPerspectiveParallel.png.md5
@@ -0,0 +1 @@
+04fed2078c1e916f4796e55c20358f59
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestHAVSVolumeMapper.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestHAVSVolumeMapper.png.md5
new file mode 100644
index 0000000..82153f9
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestHAVSVolumeMapper.png.md5
@@ -0,0 +1 @@
+f8eb494f80b0a4bb77986ab6d7598af1
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestHAVSVolumeMapper_1.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestHAVSVolumeMapper_1.png.md5
new file mode 100644
index 0000000..5e8d269
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestHAVSVolumeMapper_1.png.md5
@@ -0,0 +1 @@
+09b1f2444b33fa4c54a0a366b846281e
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestLODProp3D.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestLODProp3D.png.md5
new file mode 100644
index 0000000..6524170
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestLODProp3D.png.md5
@@ -0,0 +1 @@
+3cbc336fe303dfa057f6388a9f14f179
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestMinIntensityRendering.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestMinIntensityRendering.png.md5
new file mode 100644
index 0000000..ca38648
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestMinIntensityRendering.png.md5
@@ -0,0 +1 @@
+ae0d4f031dba2d524945e10eedd66635
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestPTZSweep.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestPTZSweep.png.md5
new file mode 100644
index 0000000..b192da7
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestPTZSweep.png.md5
@@ -0,0 +1 @@
+2ee5458fd89cb0853f670ea1db3da0cb
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestProjectedHexahedra.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestProjectedHexahedra.png.md5
new file mode 100644
index 0000000..01346aa
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestProjectedHexahedra.png.md5
@@ -0,0 +1 @@
+e644d475286a361d5f916062cb943563
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestProjectedTetrahedra.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestProjectedTetrahedra.png.md5
new file mode 100644
index 0000000..a810319
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestProjectedTetrahedra.png.md5
@@ -0,0 +1 @@
+98a2bc55bc5dc346cd01dfe6c4378d56
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestProjectedTetrahedra_1.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestProjectedTetrahedra_1.png.md5
new file mode 100644
index 0000000..7f2ac61
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestProjectedTetrahedra_1.png.md5
@@ -0,0 +1 @@
+ca0bbbb30f642cb311d8c1b406f37eed
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestProjectedTetrahedra_2.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestProjectedTetrahedra_2.png.md5
new file mode 100644
index 0000000..c076e28
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestProjectedTetrahedra_2.png.md5
@@ -0,0 +1 @@
+7c750185f5a6641a059fc8a8dfd818ef
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestProjectedTetrahedra_3.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestProjectedTetrahedra_3.png.md5
new file mode 100644
index 0000000..6fdab1d
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestProjectedTetrahedra_3.png.md5
@@ -0,0 +1 @@
+e2ed75c938f7fe5bdae2712b1a21f6d1
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestProp3DFollower.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestProp3DFollower.png.md5
new file mode 100644
index 0000000..6a31e0f
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestProp3DFollower.png.md5
@@ -0,0 +1 @@
+98bc9d2dfdd100ec486265a9e46a05be
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestSmartVolumeMapper.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestSmartVolumeMapper.png.md5
new file mode 100644
index 0000000..c7a3e2c
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestSmartVolumeMapper.png.md5
@@ -0,0 +1 @@
+ccdababa2adb691d97004de07486b1e1
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestSmartVolumeMapperWindowLevel.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestSmartVolumeMapperWindowLevel.png.md5
new file mode 100644
index 0000000..a00ff12
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestSmartVolumeMapperWindowLevel.png.md5
@@ -0,0 +1 @@
+0b7e3c2ace8cf6cae256200d60dede80
diff --git a/Rendering/Volume/Testing/Data/Baseline/TestTM3DLightComponents.png.md5 b/Rendering/Volume/Testing/Data/Baseline/TestTM3DLightComponents.png.md5
new file mode 100644
index 0000000..a685b1f
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/TestTM3DLightComponents.png.md5
@@ -0,0 +1 @@
+f1d3181430bdbaf798f7525fb98bcea9
diff --git a/Rendering/Volume/Testing/Data/Baseline/VolumeOutlineSource.png.md5 b/Rendering/Volume/Testing/Data/Baseline/VolumeOutlineSource.png.md5
new file mode 100644
index 0000000..32e82e2
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/VolumeOutlineSource.png.md5
@@ -0,0 +1 @@
+71a6aca4942ed880a9dc0b991eeaf9d9
diff --git a/Rendering/Volume/Testing/Data/Baseline/VolumeOutlineSourceClipped.png.md5 b/Rendering/Volume/Testing/Data/Baseline/VolumeOutlineSourceClipped.png.md5
new file mode 100644
index 0000000..29f8b10
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/VolumeOutlineSourceClipped.png.md5
@@ -0,0 +1 @@
+7cbf1fb747356aae2b20bf2b84d202e6
diff --git a/Rendering/Volume/Testing/Data/Baseline/VolumePicker.png.md5 b/Rendering/Volume/Testing/Data/Baseline/VolumePicker.png.md5
new file mode 100644
index 0000000..3d6d021
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/VolumePicker.png.md5
@@ -0,0 +1 @@
+a01477af1acd39b63585303ed8b56d89
diff --git a/Rendering/Volume/Testing/Data/Baseline/VolumePickerCrop.png.md5 b/Rendering/Volume/Testing/Data/Baseline/VolumePickerCrop.png.md5
new file mode 100644
index 0000000..c1650f3
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/VolumePickerCrop.png.md5
@@ -0,0 +1 @@
+90504e91e86dfd1283648db8cbf20d36
diff --git a/Rendering/Volume/Testing/Data/Baseline/ZsweepConcavities.png.md5 b/Rendering/Volume/Testing/Data/Baseline/ZsweepConcavities.png.md5
new file mode 100644
index 0000000..be6aaea
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/ZsweepConcavities.png.md5
@@ -0,0 +1 @@
+a3c8d448fac6b287bda144bc56c60a7c
diff --git a/Rendering/Volume/Testing/Data/Baseline/cursor3D.png.md5 b/Rendering/Volume/Testing/Data/Baseline/cursor3D.png.md5
new file mode 100644
index 0000000..025e9f9
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/cursor3D.png.md5
@@ -0,0 +1 @@
+434cb1fb401614e0af8db651664e161e
diff --git a/Rendering/Volume/Testing/Data/Baseline/gaussian.png.md5 b/Rendering/Volume/Testing/Data/Baseline/gaussian.png.md5
new file mode 100644
index 0000000..1802772
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/gaussian.png.md5
@@ -0,0 +1 @@
+8edb1c1f1aa8ad4358234a5073b186c3
diff --git a/Rendering/Volume/Testing/Data/Baseline/volProt.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volProt.png.md5
new file mode 100644
index 0000000..5cef810
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volProt.png.md5
@@ -0,0 +1 @@
+b21eb0e28637b3945f8ca075287d47c1
diff --git a/Rendering/Volume/Testing/Data/Baseline/volProt_1.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volProt_1.png.md5
new file mode 100644
index 0000000..5c482f2
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volProt_1.png.md5
@@ -0,0 +1 @@
+7309f7d4106e4e5235ca03474bed9ec2
diff --git a/Rendering/Volume/Testing/Data/Baseline/volRCClipPlanes.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volRCClipPlanes.png.md5
new file mode 100644
index 0000000..d066aa1
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volRCClipPlanes.png.md5
@@ -0,0 +1 @@
+4f9949aba82d354fa76457ab613f080f
diff --git a/Rendering/Volume/Testing/Data/Baseline/volRCCropRegions.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volRCCropRegions.png.md5
new file mode 100644
index 0000000..e8da732
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volRCCropRegions.png.md5
@@ -0,0 +1 @@
+905f85489f67a92d2a8b7aaef6ad6d89
diff --git a/Rendering/Volume/Testing/Data/Baseline/volRCRotateClip.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volRCRotateClip.png.md5
new file mode 100644
index 0000000..a3134d1
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volRCRotateClip.png.md5
@@ -0,0 +1 @@
+1ae2dd7c15f2477c98ef56562939859a
diff --git a/Rendering/Volume/Testing/Data/Baseline/volTM2DCropRegions.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volTM2DCropRegions.png.md5
new file mode 100644
index 0000000..98a04ac
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volTM2DCropRegions.png.md5
@@ -0,0 +1 @@
+89350cfac3f78a151274f254c85e9060
diff --git a/Rendering/Volume/Testing/Data/Baseline/volTM2DRotateClip.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volTM2DRotateClip.png.md5
new file mode 100644
index 0000000..2e0367a
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volTM2DRotateClip.png.md5
@@ -0,0 +1 @@
+01f24c3f2201722e1ee6948a93274a26
diff --git a/Rendering/Volume/Testing/Data/Baseline/volTM3DCompressedCropRegions.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volTM3DCompressedCropRegions.png.md5
new file mode 100644
index 0000000..f0018c1
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volTM3DCompressedCropRegions.png.md5
@@ -0,0 +1 @@
+b166ff0c49465231864cf9f8740ab686
diff --git a/Rendering/Volume/Testing/Data/Baseline/volTM3DCompressedCropRegions_1.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volTM3DCompressedCropRegions_1.png.md5
new file mode 100644
index 0000000..d545761
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volTM3DCompressedCropRegions_1.png.md5
@@ -0,0 +1 @@
+585718bd89e9a5d5d3a79fd01b85feab
diff --git a/Rendering/Volume/Testing/Data/Baseline/volTM3DCompressedCropRegions_2.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volTM3DCompressedCropRegions_2.png.md5
new file mode 100644
index 0000000..1291943
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volTM3DCompressedCropRegions_2.png.md5
@@ -0,0 +1 @@
+c9e4872b3be0a82e582652dabe8fb4c8
diff --git a/Rendering/Volume/Testing/Data/Baseline/volTM3DCompressedCropRegions_3.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volTM3DCompressedCropRegions_3.png.md5
new file mode 100644
index 0000000..3abfe61
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volTM3DCompressedCropRegions_3.png.md5
@@ -0,0 +1 @@
+1d0a527f7caadb5aaff8f810bbf9bf30
diff --git a/Rendering/Volume/Testing/Data/Baseline/volTM3DCropRegions.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volTM3DCropRegions.png.md5
new file mode 100644
index 0000000..e10ae85
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volTM3DCropRegions.png.md5
@@ -0,0 +1 @@
+75601bf59e87cd8e61e0f4e3b0d2f684
diff --git a/Rendering/Volume/Testing/Data/Baseline/volTM3DCropRegions_1.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volTM3DCropRegions_1.png.md5
new file mode 100644
index 0000000..85b57f3
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volTM3DCropRegions_1.png.md5
@@ -0,0 +1 @@
+108d92e8058bef59157c8d3142e0250b
diff --git a/Rendering/Volume/Testing/Data/Baseline/volTM3DCropRegions_2.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volTM3DCropRegions_2.png.md5
new file mode 100644
index 0000000..d495f4e
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volTM3DCropRegions_2.png.md5
@@ -0,0 +1 @@
+595f6df3e1f9b542aecaf370a5b215c7
diff --git a/Rendering/Volume/Testing/Data/Baseline/volTM3DCropRegions_3.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volTM3DCropRegions_3.png.md5
new file mode 100644
index 0000000..7381798
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volTM3DCropRegions_3.png.md5
@@ -0,0 +1 @@
+f9525e6e9d56bccf83c7e1af42b92e26
diff --git a/Rendering/Volume/Testing/Data/Baseline/volTM3DCropRegions_4.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volTM3DCropRegions_4.png.md5
new file mode 100644
index 0000000..b4a58a7
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volTM3DCropRegions_4.png.md5
@@ -0,0 +1 @@
+76953981e1397a08a2bf7bc3701e4b0a
diff --git a/Rendering/Volume/Testing/Data/Baseline/volTM3DCropRegions_5.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volTM3DCropRegions_5.png.md5
new file mode 100644
index 0000000..f797ec7
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volTM3DCropRegions_5.png.md5
@@ -0,0 +1 @@
+f74581f63c5edb6742b568dc2c5a550f
diff --git a/Rendering/Volume/Testing/Data/Baseline/volTM3DRotateClip.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volTM3DRotateClip.png.md5
new file mode 100644
index 0000000..dc3ac4e
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volTM3DRotateClip.png.md5
@@ -0,0 +1 @@
+c197c7c42f57e90351e5bf64e4952bb4
diff --git a/Rendering/Volume/Testing/Data/Baseline/volTM3DRotateClip_1.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volTM3DRotateClip_1.png.md5
new file mode 100644
index 0000000..3bfc1f6
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volTM3DRotateClip_1.png.md5
@@ -0,0 +1 @@
+e57f78a5263e482696d7bc9a7ddc4ba0
diff --git a/Rendering/Volume/Testing/Data/Baseline/volTM3DRotateClip_2.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volTM3DRotateClip_2.png.md5
new file mode 100644
index 0000000..6abefde
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volTM3DRotateClip_2.png.md5
@@ -0,0 +1 @@
+91ed06c3ef8473464e6205638d262066
diff --git a/Rendering/Volume/Testing/Data/Baseline/volTM3DRotateClip_3.png.md5 b/Rendering/Volume/Testing/Data/Baseline/volTM3DRotateClip_3.png.md5
new file mode 100644
index 0000000..90bce45
--- /dev/null
+++ b/Rendering/Volume/Testing/Data/Baseline/volTM3DRotateClip_3.png.md5
@@ -0,0 +1 @@
+f5ec25f6c89e7db6663a78faf8c9ccc5
diff --git a/Rendering/Volume/Testing/Python/CMakeLists.txt b/Rendering/Volume/Testing/Python/CMakeLists.txt
index 2f228ea..5f8d3c3 100644
--- a/Rendering/Volume/Testing/Python/CMakeLists.txt
+++ b/Rendering/Volume/Testing/Python/CMakeLists.txt
@@ -1,11 +1,24 @@
-add_test_python(TestBunykRayCastFunction.py VolumeRendering)
-add_test_python(TestPTZSweep.py VolumeRendering)
-add_test_python(volRCRotateClip.py VolumeRendering)
-add_test_python(volTM2DRotateClip.py VolumeRendering)
-add_test_python(VolumePickerCrop.py VolumeRendering)
-add_test_python(volTM3DCompressedCropRegions.py VolumeRendering)
-add_test_python(volTM3DCropRegions.py VolumeRendering)
-add_test_python(volTM3DRotateClip.py VolumeRendering)
+vtk_add_test_python(TestBunykRayCastFunction.py)
+vtk_add_test_python(TestLODProp3D.py)
+vtk_add_test_python(TestPTZSweep.py)
+vtk_add_test_python(gaussian.py)
+vtk_add_test_python(volRCClipPlanes.py)
+vtk_add_test_python(volRCCropRegions.py)
+vtk_add_test_python(volRCRotateClip.py)
+vtk_add_test_python(volTM2DCropRegions.py)
+vtk_add_test_python(volTM2DRotateClip.py)
+vtk_add_test_python(VolumeOutlineSource.py)
+vtk_add_test_python(VolumeOutlineSourceClipped.py)
+vtk_add_test_python(VolumePickerCrop.py)
+vtk_add_test_python(cursor3D.py)
+vtk_add_test_python(volTM3DCompressedCropRegions.py)
+vtk_add_test_python(volTM3DCropRegions.py)
+vtk_add_test_python(volTM3DRotateClip.py)
+vtk_add_test_python(TestFixedPointRayCasterLinear.py NO_RT)
+vtk_add_test_python(TestFixedPointRayCasterLinearCropped.py NO_RT)
+vtk_add_test_python(TestFixedPointRayCasterNearest.py NO_RT)
+vtk_add_test_python(TestFixedPointRayCasterNearestCropped.py NO_RT)
+vtk_add_test_python(VolumePicker.py NO_RT)
 
 set_tests_properties(vtkRenderingVolumePython-volTM3DCompressedCropRegions
   PROPERTIES TIMEOUT 250)
@@ -13,19 +26,3 @@ set_tests_properties(vtkRenderingVolumePython-volTM3DCropRegions
   PROPERTIES TIMEOUT 250)
 set_tests_properties(vtkRenderingVolumePython-volTM3DRotateClip
   PROPERTIES TIMEOUT 80)
-
-if (VTK_DATA_ROOT)
-  add_test_python(TestLODProp3D.py VolumeRendering)
-  add_test_python(gaussian.py VolumeRendering)
-  add_test_python(volRCClipPlanes.py VolumeRendering)
-  add_test_python(volRCCropRegions.py VolumeRendering)
-  add_test_python(volTM2DCropRegions.py VolumeRendering)
-  add_test_python(VolumeOutlineSource.py VolumeRendering)
-  add_test_python(VolumeOutlineSourceClipped.py VolumeRendering)
-  add_test_python(cursor3D.py VolumeRendering)
-  add_test_python1(TestFixedPointRayCasterLinear.py Baseline/VolumeRendering)
-  add_test_python1(TestFixedPointRayCasterLinearCropped.py Baseline/VolumeRendering)
-  add_test_python1(TestFixedPointRayCasterNearest.py Baseline/VolumeRendering)
-  add_test_python1(TestFixedPointRayCasterNearestCropped.py Baseline/VolumeRendering)
-  add_test_python1(VolumePicker.py Baseline/VolumeRendering)
-endif()
diff --git a/Rendering/Volume/Testing/Python/TestFixedPointRayCasterLinear.py b/Rendering/Volume/Testing/Python/TestFixedPointRayCasterLinear.py
index 6b4969d..9586a42 100755
--- a/Rendering/Volume/Testing/Python/TestFixedPointRayCasterLinear.py
+++ b/Rendering/Volume/Testing/Python/TestFixedPointRayCasterLinear.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestFixedPointRayCasterLinear.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/VolumeRendering
-
 import sys
 import vtk
 import vtk.test.Testing
diff --git a/Rendering/Volume/Testing/Python/TestFixedPointRayCasterLinearCropped.py b/Rendering/Volume/Testing/Python/TestFixedPointRayCasterLinearCropped.py
index c380b03..050e277 100755
--- a/Rendering/Volume/Testing/Python/TestFixedPointRayCasterLinearCropped.py
+++ b/Rendering/Volume/Testing/Python/TestFixedPointRayCasterLinearCropped.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestFixedPointRayCasterLinearCropped.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/VolumeRendering
-
 import sys
 import vtk
 import vtk.test.Testing
diff --git a/Rendering/Volume/Testing/Python/TestFixedPointRayCasterNearest.py b/Rendering/Volume/Testing/Python/TestFixedPointRayCasterNearest.py
index 8321cd2..840166d 100755
--- a/Rendering/Volume/Testing/Python/TestFixedPointRayCasterNearest.py
+++ b/Rendering/Volume/Testing/Python/TestFixedPointRayCasterNearest.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestFixedPointRayCasterNearest.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/VolumeRendering
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Rendering/Volume/Testing/Python/TestFixedPointRayCasterNearestCropped.py b/Rendering/Volume/Testing/Python/TestFixedPointRayCasterNearestCropped.py
index ecc3c79..e3361d1 100755
--- a/Rendering/Volume/Testing/Python/TestFixedPointRayCasterNearestCropped.py
+++ b/Rendering/Volume/Testing/Python/TestFixedPointRayCasterNearestCropped.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython TestFixedPointRayCasterNearestCropped.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/VolumeRendering
-
 import sys
 import vtk
 import vtk.test.Testing
diff --git a/Rendering/Volume/Testing/Python/VolumePicker.py b/Rendering/Volume/Testing/Python/VolumePicker.py
index 319dcf4..2e5c907 100755
--- a/Rendering/Volume/Testing/Python/VolumePicker.py
+++ b/Rendering/Volume/Testing/Python/VolumePicker.py
@@ -18,10 +18,6 @@
 =========================================================================
 '''
 
-# Run this test like so:
-# vtkpython VolumePicker.py  -D $VTK_DATA_ROOT \
-# -B $VTK_DATA_ROOT/Baseline/VolumeRendering
-
 import vtk
 import vtk.test.Testing
 from vtk.util.misc import vtkGetDataRoot
diff --git a/Rendering/Volume/Testing/Python/volTM3DRotateClip.py b/Rendering/Volume/Testing/Python/volTM3DRotateClip.py
index f4d1ee8..8568bb9 100755
--- a/Rendering/Volume/Testing/Python/volTM3DRotateClip.py
+++ b/Rendering/Volume/Testing/Python/volTM3DRotateClip.py
@@ -104,7 +104,6 @@ volumeMapper.AddClippingPlane(plane4)
 # Okay now the graphics stuff
 ren1 = vtk.vtkRenderer()
 renWin = vtk.vtkRenderWindow()
-renWin.ReportGraphicErrorsOn()
 renWin.AddRenderer(ren1)
 
 renWin.SetSize(256, 256)
diff --git a/Rendering/Volume/Testing/Tcl/CMakeLists.txt b/Rendering/Volume/Testing/Tcl/CMakeLists.txt
index 6a1e5ca..b832f92 100644
--- a/Rendering/Volume/Testing/Tcl/CMakeLists.txt
+++ b/Rendering/Volume/Testing/Tcl/CMakeLists.txt
@@ -1,23 +1,20 @@
-if(VTK_DATA_ROOT)
-  add_test_tcl(TestBunykRayCastFunction VolumeRendering)
-  add_test_tcl(TestLODProp3D VolumeRendering)
-  add_test_tcl(VolumeOutlineSourceClipped VolumeRendering)
-  add_test_tcl(VolumePickerCrop VolumeRendering)
-  add_test_tcl(VolumePicker VolumeRendering)
-  add_test_tcl(gaussian VolumeRendering)
-  add_test_tcl(volRCClipPlanes VolumeRendering)
-  add_test_tcl(volRCCropRegions VolumeRendering)
-  add_test_tcl(volRCRotateClip VolumeRendering)
-  add_test_tcl(volTM3DCompressedCropRegions VolumeRendering)
-  add_test_tcl(volTM3DCropRegions VolumeRendering)
-  add_test_tcl(volTM3DRotateClip VolumeRendering)
-  add_test_tcl(volTM2DCropRegions VolumeRendering)
-  add_test_tcl(VolumeOutlineSource VolumeRendering)
-endif()
-
-add_test_tcl(TestFixedPointRayCasterLinearCropped VolumeRendering)
-add_test_tcl(TestFixedPointRayCasterLinear VolumeRendering)
-add_test_tcl(TestFixedPointRayCasterNearestCropped VolumeRendering)
-add_test_tcl(TestFixedPointRayCasterNearest VolumeRendering)
-add_test_tcl(TestPTZSweep VolumeRendering)
-add_test_tcl(volTM2DRotateClip VolumeRendering)
+vtk_add_test_tcl(TestBunykRayCastFunction)
+vtk_add_test_tcl(TestLODProp3D)
+vtk_add_test_tcl(VolumeOutlineSourceClipped)
+vtk_add_test_tcl(VolumePickerCrop)
+vtk_add_test_tcl(VolumePicker)
+vtk_add_test_tcl(gaussian)
+vtk_add_test_tcl(volRCClipPlanes)
+vtk_add_test_tcl(volRCCropRegions)
+vtk_add_test_tcl(volRCRotateClip)
+vtk_add_test_tcl(volTM3DCompressedCropRegions)
+vtk_add_test_tcl(volTM3DCropRegions)
+vtk_add_test_tcl(volTM3DRotateClip)
+vtk_add_test_tcl(volTM2DCropRegions)
+vtk_add_test_tcl(VolumeOutlineSource)
+vtk_add_test_tcl(TestFixedPointRayCasterLinearCropped)
+vtk_add_test_tcl(TestFixedPointRayCasterLinear)
+vtk_add_test_tcl(TestFixedPointRayCasterNearestCropped)
+vtk_add_test_tcl(TestFixedPointRayCasterNearest)
+vtk_add_test_tcl(TestPTZSweep)
+vtk_add_test_tcl(volTM2DRotateClip)
diff --git a/Rendering/Volume/Testing/Tcl/volTM3DRotateClip.tcl b/Rendering/Volume/Testing/Tcl/volTM3DRotateClip.tcl
index 748e198..0392da1 100644
--- a/Rendering/Volume/Testing/Tcl/volTM3DRotateClip.tcl
+++ b/Rendering/Volume/Testing/Tcl/volTM3DRotateClip.tcl
@@ -105,7 +105,6 @@ volumeMapper AddClippingPlane plane4
 # Okay now the graphics stuff
 vtkRenderer ren1
 vtkRenderWindow renWin
-    renWin ReportGraphicErrorsOn
     renWin AddRenderer ren1
     renWin SetSize 256 256
 vtkRenderWindowInteractor iren
diff --git a/Rendering/Volume/vtkDirectionEncoder.h b/Rendering/Volume/vtkDirectionEncoder.h
index e889449..75bf339 100644
--- a/Rendering/Volume/vtkDirectionEncoder.h
+++ b/Rendering/Volume/vtkDirectionEncoder.h
@@ -61,8 +61,8 @@ public:
   virtual float *GetDecodedGradientTable( void )=0;
 
 protected:
-  vtkDirectionEncoder() {};
-  ~vtkDirectionEncoder() {};
+  vtkDirectionEncoder() {}
+  ~vtkDirectionEncoder() {}
 private:
   vtkDirectionEncoder(const vtkDirectionEncoder&);  // Not implemented.
   void operator=(const vtkDirectionEncoder&);  // Not implemented.
diff --git a/Rendering/Volume/vtkGPUVolumeRayCastMapper.cxx b/Rendering/Volume/vtkGPUVolumeRayCastMapper.cxx
index b099685..a1166e5 100644
--- a/Rendering/Volume/vtkGPUVolumeRayCastMapper.cxx
+++ b/Rendering/Volume/vtkGPUVolumeRayCastMapper.cxx
@@ -25,7 +25,7 @@
 #include "vtkVolumeProperty.h"
 #include "vtkRenderer.h"
 #include "vtkRenderWindow.h"
-#include <assert.h>
+#include <cassert>
 #include "vtkCommand.h" // for VolumeMapperRender{Start|End|Progress}Event
 #include "vtkCamera.h"
 #include "vtkRendererCollection.h"
diff --git a/Rendering/Volume/vtkGPUVolumeRayCastMapper.h b/Rendering/Volume/vtkGPUVolumeRayCastMapper.h
index 51e8bca..20b6f16 100644
--- a/Rendering/Volume/vtkGPUVolumeRayCastMapper.h
+++ b/Rendering/Volume/vtkGPUVolumeRayCastMapper.h
@@ -196,7 +196,7 @@ public:
   // Release any graphics resources that are being consumed by this mapper.
   // The parameter window could be used to determine which graphic
   // resources to release.
-  void ReleaseGraphicsResources(vtkWindow *) {};
+  void ReleaseGraphicsResources(vtkWindow *) {}
 
   // Description:
   // Return how much the dataset has to be reduced in each dimension to
diff --git a/Rendering/Volume/vtkHAVSVolumeMapper.cxx b/Rendering/Volume/vtkHAVSVolumeMapper.cxx
index 8ef6934..f73c9c4 100644
--- a/Rendering/Volume/vtkHAVSVolumeMapper.cxx
+++ b/Rendering/Volume/vtkHAVSVolumeMapper.cxx
@@ -19,12 +19,14 @@ PURPOSE.  See the above copyright notice for more information.
 
 #include "vtkCell.h"
 #include "vtkCellArray.h"
+#include "vtkCellIterator.h"
 #include "vtkColorTransferFunction.h"
 #include "vtkDataArray.h"
 #include "vtkInformation.h"
 #include "vtkObjectFactory.h"
 #include "vtkPiecewiseFunction.h"
 #include "vtkPointData.h"
+#include "vtkSmartPointer.h"
 #include "vtkUnstructuredGrid.h"
 #include "vtkVolume.h"
 #include "vtkVolumeProperty.h"
@@ -278,15 +280,15 @@ vtkHAVSVolumeMapper::vtkHAVSVolumeMapper()
 //----------------------------------------------------------------------------
 vtkHAVSVolumeMapper::~vtkHAVSVolumeMapper()
 {
-  if (this->Vertices) { delete [] this->Vertices; }
-  if (this->Scalars) { delete [] this->Scalars; }
-  if (this->Triangles) { delete [] this->Triangles; }
-  if (this->BoundaryTriangles) { delete [] this->BoundaryTriangles; }
-  if (this->InternalTriangles) { delete [] this->InternalTriangles; }
-  if (this->SortedFaces) { delete [] this->SortedFaces; }
-  if (this->RadixTemp) { delete [] this->RadixTemp; }
-  if (this->Centers) { delete [] this->Centers; }
-  if (this->TransferFunction) { delete [] this->TransferFunction; }
+  delete [] this->Vertices;
+  delete [] this->Scalars;
+  delete [] this->Triangles;
+  delete [] this->BoundaryTriangles;
+  delete [] this->InternalTriangles;
+  delete [] this->SortedFaces;
+  delete [] this->RadixTemp;
+  delete [] this->Centers;
+  delete [] this->TransferFunction;
 }
 
 //----------------------------------------------------------------------------
@@ -296,7 +298,7 @@ vtkHAVSVolumeMapper::~vtkHAVSVolumeMapper()
 void vtkHAVSVolumeMapper::InitializePrimitives(vtkVolume *vol)
 {
   // Check for valid input
-  vtkUnstructuredGrid *ugrid = this->GetInput();
+  vtkUnstructuredGridBase *ugrid = this->GetInput();
   vtkIdType numCells = ugrid->GetNumberOfCells();
   if (!numCells)
     {
@@ -304,12 +306,16 @@ void vtkHAVSVolumeMapper::InitializePrimitives(vtkVolume *vol)
     return;
     }
   bool tetrahedra = true;
-  for (vtkIdType i = 0; i < numCells; i++)
+  vtkSmartPointer<vtkCellIterator> cellIter =
+      vtkSmartPointer<vtkCellIterator>::Take(ugrid->NewCellIterator());
+  for (cellIter->InitTraversal(); !cellIter->IsDoneWithTraversal();
+       cellIter->GoToNextCell())
     {
-    vtkCell *c = ugrid->GetCell(i);
-    if (c->GetNumberOfPoints() != 4 && c->GetNumberOfPoints() != 3)
+    if (cellIter->GetNumberOfPoints() != 4 &&
+        cellIter->GetNumberOfPoints() != 3)
       {
       tetrahedra = false;
+      break;
       }
     }
   if (!tetrahedra)
@@ -318,13 +324,13 @@ void vtkHAVSVolumeMapper::InitializePrimitives(vtkVolume *vol)
     return;
     }
 
-  if (this->Vertices) { delete [] this->Vertices; }
-  if (this->Triangles) { delete [] this->Triangles; }
-  if (this->BoundaryTriangles) { delete [] this->BoundaryTriangles; }
-  if (this->InternalTriangles) { delete [] this->InternalTriangles; }
-  if (this->SortedFaces) { delete [] this->SortedFaces; }
-  if (this->RadixTemp) { delete [] this->RadixTemp; }
-  if (this->Centers) { delete [] this->Centers; }
+  delete [] this->Vertices;
+  delete [] this->Triangles;
+  delete [] this->BoundaryTriangles;
+  delete [] this->InternalTriangles;
+  delete [] this->SortedFaces;
+  delete [] this->RadixTemp;
+  delete [] this->Centers;
 
 
   // Extract the triangles from the tetrahedra
@@ -338,12 +344,12 @@ void vtkHAVSVolumeMapper::InitializePrimitives(vtkVolume *vol)
   std::pair<std::set<vtkHAVSFace, vtkHAVSFaceSetPIMPL::vtkHAVSLTFace>::iterator, bool> result4;
 
   // Insert faces into an stl set
-  for (unsigned int cellId = 0; cellId < this->NumberOfCells; cellId++)
+  for (cellIter->InitTraversal(); !cellIter->IsDoneWithTraversal();
+       cellIter->GoToNextCell())
     {
-    vtkCell *cell = ugrid->GetCell(cellId);
-    if (cell->GetNumberOfPoints() == 4)
+    if (cellIter->GetNumberOfPoints() == 4)
       {
-      vtkIdList *ids = cell->GetPointIds();
+      vtkIdList *ids = cellIter->GetPointIds();
 
       vtkHAVSFace f1(ids->GetId(0), ids->GetId(1), ids->GetId(2));
       vtkHAVSFace f2(ids->GetId(0), ids->GetId(1), ids->GetId(3));
@@ -360,9 +366,9 @@ void vtkHAVSVolumeMapper::InitializePrimitives(vtkVolume *vol)
       if (!result3.second) { (*result3.first).Boundary = false; }
       if (!result4.second) { (*result4.first).Boundary = false; }
       }
-    else if (cell->GetNumberOfPoints() == 3)
+    else if (cellIter->GetNumberOfPoints() == 3)
       {
-      vtkIdList *ids = cell->GetPointIds();
+      vtkIdList *ids = cellIter->GetPointIds();
       vtkHAVSFace f1(ids->GetId(0), ids->GetId(1), ids->GetId(2));
       result1 = faceSetContainer->FaceSet.insert(f1);
       if (!result1.second) { (*result1.first).Boundary = false; }
@@ -470,7 +476,7 @@ void vtkHAVSVolumeMapper::InitializePrimitives(vtkVolume *vol)
 // Get current scalars, normalize them, and create GPU structure
 void vtkHAVSVolumeMapper::InitializeScalars()
 {
-  vtkUnstructuredGrid *ugrid = this->GetInput();
+  vtkUnstructuredGridBase *ugrid = this->GetInput();
 
   if (this->Scalars) { delete [] this->Scalars; }
   this->Scalars = NULL;
@@ -690,7 +696,7 @@ void vtkHAVSVolumeMapper::InitializeLookupTables(vtkVolume *vol)
 void
 vtkHAVSVolumeMapper::FRadix(int byte, int len, vtkHAVSSortedFace *source, vtkHAVSSortedFace *dest, int *count)
 {
-  register unsigned int i, j;
+  unsigned int i, j;
   vtkHAVSSortedFace *k;
 
   static int index[256];
@@ -712,9 +718,9 @@ vtkHAVSVolumeMapper::FRadix(int byte, int len, vtkHAVSSortedFace *source, vtkHAV
 void
 vtkHAVSVolumeMapper::FRadixSort(vtkHAVSSortedFace *array, vtkHAVSSortedFace *temp, int lo, int up)
 {
-  register int len = up-lo;
-  register unsigned int i;
-  register unsigned int u;
+  int len = up-lo;
+  unsigned int i;
+  unsigned int u;
 
   vtkHAVSSortedFace * uints = array + lo;
 
diff --git a/Rendering/Volume/vtkProjectedTetrahedraMapper.cxx b/Rendering/Volume/vtkProjectedTetrahedraMapper.cxx
index bd852d5..728f65d 100644
--- a/Rendering/Volume/vtkProjectedTetrahedraMapper.cxx
+++ b/Rendering/Volume/vtkProjectedTetrahedraMapper.cxx
@@ -28,6 +28,7 @@
 #include "vtkCellCenterDepthSort.h"
 #include "vtkCellData.h"
 #include "vtkColorTransferFunction.h"
+#include "vtkDataArrayIteratorMacro.h"
 #include "vtkDoubleArray.h"
 #include "vtkFloatArray.h"
 #include "vtkGarbageCollector.h"
@@ -83,8 +84,8 @@ void vtkProjectedTetrahedraMapper::ReportReferences(vtkGarbageCollector *collect
 
 //-----------------------------------------------------------------------------
 
-template<class point_type>
-void vtkProjectedTetrahedraMapperTransformPoints(const point_type *in_points,
+template<class PointIterator>
+void vtkProjectedTetrahedraMapperTransformPoints(PointIterator in_points,
                                                  vtkIdType num_points,
                                                  const float projection_mat[16],
                                                  const float modelview_mat[16],
@@ -93,7 +94,7 @@ void vtkProjectedTetrahedraMapperTransformPoints(const point_type *in_points,
   float mat[16];
   int row, col;
   vtkIdType i;
-  const point_type *in_p;
+  PointIterator in_p;
   float *out_p;
 
   // Combine two transforms into one transform.
@@ -155,11 +156,11 @@ void vtkProjectedTetrahedraMapper::TransformPoints(
   outPoints->SetNumberOfTuples(inPoints->GetNumberOfPoints());
   switch (inPoints->GetDataType())
     {
-    vtkTemplateMacro(vtkProjectedTetrahedraMapperTransformPoints(
-                                    (const VTK_TT *)inPoints->GetVoidPointer(0),
-                                     inPoints->GetNumberOfPoints(),
-                                     projection_mat, modelview_mat,
-                                     outPoints->GetPointer(0)));
+    vtkDataArrayIteratorMacro(inPoints->GetData(),
+      vtkProjectedTetrahedraMapperTransformPoints(
+        vtkDABegin, inPoints->GetNumberOfPoints(),
+        projection_mat, modelview_mat,
+        outPoints->GetPointer(0)));
     }
 }
 
@@ -170,24 +171,24 @@ namespace vtkProjectedTetrahedraMapperNamespace
   template<class ColorType>
   void MapScalarsToColors1(ColorType *colors, vtkVolumeProperty *property,
                            vtkDataArray *scalars);
-  template<class ColorType, class ScalarType>
+  template<class ColorType, class ScalarIterator>
   void MapScalarsToColors2(ColorType *colors, vtkVolumeProperty *property,
-                           ScalarType *scalars,
+                           ScalarIterator scalars,
                            int num_scalar_components,
                            vtkIdType num_scalars);
-  template<class ColorType, class ScalarType>
+  template<class ColorType, class ScalarIterator>
   void MapIndependentComponents(ColorType *colors,
                                 vtkVolumeProperty *property,
-                                ScalarType *scalars,
+                                ScalarIterator scalars,
                                 int num_scalar_components,
                                 vtkIdType num_scalars);
-  template<class ColorType, class ScalarType>
+  template<class ColorType, class ScalarIterator>
   void Map2DependentComponents(ColorType *colors,
                                vtkVolumeProperty *property,
-                               ScalarType *scalars,
+                               ScalarIterator scalars,
                                vtkIdType num_scalars);
-  template<class ColorType, class ScalarType>
-  void Map4DependentComponents(ColorType *colors, ScalarType *scalars,
+  template<class ColorType, class ScalarIterator>
+  void Map4DependentComponents(ColorType *colors, ScalarIterator scalars,
                                vtkIdType num_scalars);
 }
 
@@ -261,19 +262,19 @@ namespace vtkProjectedTetrahedraMapperNamespace
   void MapScalarsToColors1(ColorType *colors, vtkVolumeProperty *property,
                            vtkDataArray *scalars)
   {
-    void *scalarpointer = scalars->GetVoidPointer(0);
     switch(scalars->GetDataType())
       {
-      vtkTemplateMacro(MapScalarsToColors2(colors, property,
-                                           static_cast<VTK_TT *>(scalarpointer),
-                                           scalars->GetNumberOfComponents(),
-                                           scalars->GetNumberOfTuples()));
+      vtkDataArrayIteratorMacro(
+        scalars,
+        MapScalarsToColors2(colors, property, vtkDABegin,
+                            scalars->GetNumberOfComponents(),
+                            scalars->GetNumberOfTuples()));
       }
   }
 
-  template<class ColorType, class ScalarType>
+  template<class ColorType, class ScalarIterator>
   void MapScalarsToColors2(ColorType *colors, vtkVolumeProperty *property,
-                           ScalarType *scalars,
+                           ScalarIterator scalars,
                            int num_scalar_components, vtkIdType num_scalars)
   {
     if (property->GetIndependentComponents())
@@ -300,10 +301,10 @@ namespace vtkProjectedTetrahedraMapperNamespace
       }
   }
 
-  template<class ColorType, class ScalarType>
+  template<class ColorType, class ScalarIterator>
   void MapIndependentComponents(ColorType *colors,
                                 vtkVolumeProperty *property,
-                                ScalarType *scalars,
+                                ScalarIterator scalars,
                                 int num_scalar_components,
                                 vtkIdType num_scalars)
   {
@@ -312,7 +313,7 @@ namespace vtkProjectedTetrahedraMapperNamespace
     // what to do, and the whole thing seems kinda pointless anyway, I'm just
     // going to punt and copy over the first scalar.
     ColorType *c = colors;
-    ScalarType *s = scalars;
+    ScalarIterator s = scalars;
     vtkIdType i;
 
     if (property->GetColorChannels() == 1)
@@ -343,9 +344,9 @@ namespace vtkProjectedTetrahedraMapperNamespace
       }
   }
 
-  template<class ColorType, class ScalarType>
+  template<class ColorType, class ScalarIterator>
   void Map2DependentComponents(ColorType *colors, vtkVolumeProperty *property,
-                               ScalarType *scalars, vtkIdType num_scalars)
+                               ScalarIterator scalars, vtkIdType num_scalars)
   {
     vtkColorTransferFunction *rgb = property->GetRGBTransferFunction();
     vtkPiecewiseFunction *alpha = property->GetScalarOpacity();
@@ -364,8 +365,8 @@ namespace vtkProjectedTetrahedraMapperNamespace
       }
   }
 
-  template<class ColorType, class ScalarType>
-  void Map4DependentComponents(ColorType *colors, ScalarType *scalars,
+  template<class ColorType, class ScalarIterator>
+  void Map4DependentComponents(ColorType *colors, ScalarIterator scalars,
                                vtkIdType num_scalars)
   {
     for (vtkIdType i = 0; i < num_scalars; i++)
diff --git a/Rendering/Volume/vtkProjectedTetrahedraMapper.h b/Rendering/Volume/vtkProjectedTetrahedraMapper.h
index c5cd74d..aaf9a98 100644
--- a/Rendering/Volume/vtkProjectedTetrahedraMapper.h
+++ b/Rendering/Volume/vtkProjectedTetrahedraMapper.h
@@ -47,6 +47,7 @@ class vtkPoints;
 class vtkUnsignedCharArray;
 class vtkVisibilitySort;
 class vtkVolumeProperty;
+class vtkRenderWindow;
 
 class VTKRENDERINGVOLUME_EXPORT vtkProjectedTetrahedraMapper : public vtkUnstructuredGridVolumeMapper
 {
@@ -67,6 +68,12 @@ public:
                               const float modelview_mat[16],
                               vtkFloatArray *outPoints);
 
+  // Description:
+  // Return true if the rendering context provides
+  // the nececessary functionality to use this class.
+  virtual bool IsSupported(vtkRenderWindow *)
+    { return false; }
+
 protected:
   vtkProjectedTetrahedraMapper();
   ~vtkProjectedTetrahedraMapper();
diff --git a/Rendering/Volume/vtkSmartVolumeMapper.cxx b/Rendering/Volume/vtkSmartVolumeMapper.cxx
index 733e6c0..1ace6b3 100644
--- a/Rendering/Volume/vtkSmartVolumeMapper.cxx
+++ b/Rendering/Volume/vtkSmartVolumeMapper.cxx
@@ -26,7 +26,6 @@
 #include "vtkPiecewiseFunction.h"
 #include "vtkRenderer.h"
 #include "vtkRenderWindow.h"
-#include "vtkTrivialProducer.h"
 #include "vtkVolume.h"
 #include "vtkVolumeProperty.h"
 #include "vtkVolumeTextureMapper3D.h"
@@ -528,10 +527,7 @@ void vtkSmartVolumeMapper::ConnectMapperInput(vtkVolumeMapper *m)
     // make sure we not create a shallow copy each time to avoid
     // performance penalty.
     input2=vtkImageData::New();
-    vtkTrivialProducer* tp = vtkTrivialProducer::New();
-    tp->SetOutput(input2);
-    m->SetInputConnection(tp->GetOutputPort());
-    tp->Delete();
+    m->SetInputDataObject(input2);
     input2->Delete();
     needShallowCopy=true;
     }
@@ -557,7 +553,7 @@ void vtkSmartVolumeMapper::ConnectFilterInput(vtkImageResample *f)
     // make sure we not create a shallow copy each time to avoid
     // performance penalty.
     input2=vtkImageData::New();
-    f->SetInputConnection(f->GetInputConnection(0, 0));
+    f->SetInputDataObject(input2);
     input2->Delete();
     needShallowCopy=true;
     }
diff --git a/Rendering/Volume/vtkUnstructuredGridBunykRayCastFunction.cxx b/Rendering/Volume/vtkUnstructuredGridBunykRayCastFunction.cxx
index 3472f29..5817acb 100644
--- a/Rendering/Volume/vtkUnstructuredGridBunykRayCastFunction.cxx
+++ b/Rendering/Volume/vtkUnstructuredGridBunykRayCastFunction.cxx
@@ -32,14 +32,17 @@
 #include "vtkColorTransferFunction.h"
 #include "vtkVolumeProperty.h"
 #include "vtkUnstructuredGridVolumeRayCastIterator.h"
+#include "vtkSmartPointer.h"
+#include "vtkCellIterator.h"
+#include "vtkDataArrayIteratorMacro.h"
 
 vtkStandardNewMacro(vtkUnstructuredGridBunykRayCastFunction);
 
 #define VTK_BUNYKRCF_NUMLISTS 100000
 
-template <class T>
+template <class T, class ScalarIterator>
 vtkIdType TemplateCastRay(
-  const T *scalars,
+  const ScalarIterator scalars,
   vtkUnstructuredGridBunykRayCastFunction *self,
   int numComponents,
   int x, int y,
@@ -177,16 +180,15 @@ vtkIdType vtkUnstructuredGridBunykRayCastIterator::GetNextIntersections(
 
     switch (scalars->GetDataType())
       {
-      vtkTemplateMacro
-        (numIntersections = TemplateCastRay
-         ((const VTK_TT *)scalars->GetVoidPointer(0),
-          this->RayCastFunction, scalars->GetNumberOfComponents(),
+      vtkDataArrayIteratorMacro(scalars,
+        numIntersections = TemplateCastRay(
+          vtkDABegin, this->RayCastFunction, scalars->GetNumberOfComponents(),
           this->RayPosition[0], this->RayPosition[1], this->Bounds[1],
           this->IntersectionPtr, this->CurrentTriangle, this->CurrentTetra,
           (intersectedCells ? intersectedCells->GetPointer(0) : NULL),
           (intersectionLengths ? intersectionLengths->GetPointer(0) : NULL),
-          (VTK_TT *)nearIntersections->GetVoidPointer(0),
-          (VTK_TT *)farIntersections->GetVoidPointer(0),
+          static_cast<vtkDAValueType*>(nearIntersections->GetVoidPointer(0)),
+          static_cast<vtkDAValueType*>(farIntersections->GetVoidPointer(0)),
           this->MaxNumberOfIntersections));
       }
 
@@ -345,7 +347,7 @@ void vtkUnstructuredGridBunykRayCastFunction::Initialize( vtkRenderer *ren,
   this->Volume     = vol;
 
 
-  vtkUnstructuredGrid *input = this->Mapper->GetInput();
+  vtkUnstructuredGridBase *input = this->Mapper->GetInput();
   int numPoints = input->GetNumberOfPoints();
 
   // If the number of points have changed, recreate the structure
@@ -424,7 +426,7 @@ int vtkUnstructuredGridBunykRayCastFunction::CheckValidity( vtkRenderer *ren,
     }
 
   // The mapper must have input
-  vtkUnstructuredGrid *input = mapper->GetInput();
+  vtkUnstructuredGridBase *input = mapper->GetInput();
   if ( !input )
     {
     vtkErrorMacro("No input to mapper");
@@ -478,7 +480,7 @@ void vtkUnstructuredGridBunykRayCastFunction::TransformPoints()
   double *transformedPtr = this->Points;
   double in[4], out[4];
   in[3] = 1.0;
-  vtkUnstructuredGrid *input = this->Mapper->GetInput();
+  vtkUnstructuredGridBase *input = this->Mapper->GetInput();
   int numPoints = input->GetNumberOfPoints();
 
   // Loop through all the points and transform them
@@ -517,7 +519,7 @@ void  vtkUnstructuredGridBunykRayCastFunction::UpdateTriangleList()
     }
 
   // If the data has changed in some way then we need to update
-  vtkUnstructuredGrid *input = this->Mapper->GetInput();
+  vtkUnstructuredGridBase *input = this->Mapper->GetInput();
   if ( this->SavedTriangleListInput != input ||
        input->GetMTime() > this->SavedTriangleListMTime.GetMTime() )
     {
@@ -573,24 +575,26 @@ void  vtkUnstructuredGridBunykRayCastFunction::UpdateTriangleList()
     }
 
   // Loop through all the cells
-  for ( i = 0; i < numCells; i++ )
+  vtkSmartPointer<vtkCellIterator> cellIter =
+      vtkSmartPointer<vtkCellIterator>::Take(input->NewCellIterator());
+  for (cellIter->InitTraversal(); !cellIter->IsDoneWithTraversal();
+       cellIter->GoToNextCell())
     {
     // We only handle tetra
-    if ( input->GetCellType(i) != VTK_TETRA )
+    if (cellIter->GetCellType() != VTK_TETRA)
       {
       nonTetraWarningNeeded = 1;
       continue;
       }
 
-    // Get the cell
-    vtkCell *cell = input->GetCell(i);
-
     // Get the four points
+    i = cellIter->GetCellId();
+    vtkIdList *ptIds = cellIter->GetPointIds();
     vtkIdType pts[4];
-    pts[0] = cell->GetPointId(0);
-    pts[1] = cell->GetPointId(1);
-    pts[2] = cell->GetPointId(2);
-    pts[3] = cell->GetPointId(3);
+    pts[0] = ptIds->GetId(0);
+    pts[1] = ptIds->GetId(1);
+    pts[2] = ptIds->GetId(2);
+    pts[3] = ptIds->GetId(3);
 
     // Build each of the four triangles
     int ii, jj;
@@ -903,9 +907,9 @@ int  vtkUnstructuredGridBunykRayCastFunction::IsTriangleFrontFacing( Triangle *t
   return (d>0);
 }
 
-template <class T>
+template <class T, class ScalarIterator>
 vtkIdType TemplateCastRay(
-  const T *scalars,
+  const ScalarIterator scalars,
   vtkUnstructuredGridBunykRayCastFunction *self,
   int numComponents,
   int x, int y,
diff --git a/Rendering/Volume/vtkUnstructuredGridBunykRayCastFunction.h b/Rendering/Volume/vtkUnstructuredGridBunykRayCastFunction.h
index 72c1b79..2965739 100644
--- a/Rendering/Volume/vtkUnstructuredGridBunykRayCastFunction.h
+++ b/Rendering/Volume/vtkUnstructuredGridBunykRayCastFunction.h
@@ -66,7 +66,7 @@ class vtkUnstructuredGridVolumeRayCastMapper;
 class vtkMatrix4x4;
 class vtkPiecewiseFunction;
 class vtkColorTransferFunction;
-class vtkUnstructuredGrid;
+class vtkUnstructuredGridBase;
 class vtkIdList;
 class vtkDoubleArray;
 class vtkDataArray;
@@ -193,7 +193,7 @@ protected:
 
   // These are values saved for the building of the TriangleList. Basically
   // we need to check if the data has changed in some way.
-  vtkUnstructuredGrid       *SavedTriangleListInput;
+  vtkUnstructuredGridBase   *SavedTriangleListInput;
   vtkTimeStamp               SavedTriangleListMTime;
 
 //BTX
diff --git a/Rendering/Volume/vtkUnstructuredGridLinearRayIntegrator.cxx b/Rendering/Volume/vtkUnstructuredGridLinearRayIntegrator.cxx
index cd6f5a3..7b640dc 100644
--- a/Rendering/Volume/vtkUnstructuredGridLinearRayIntegrator.cxx
+++ b/Rendering/Volume/vtkUnstructuredGridLinearRayIntegrator.cxx
@@ -92,8 +92,8 @@ vtkLinearRayIntegratorTransferFunction::vtkLinearRayIntegratorTransferFunction()
 
 vtkLinearRayIntegratorTransferFunction::~vtkLinearRayIntegratorTransferFunction()
 {
-  if (this->ControlPoints) delete[] this->ControlPoints;
-  if (this->Colors) delete[] this->Colors;
+  delete[] this->ControlPoints;
+  delete[] this->Colors;
 }
 
 static const double huebends[6] = {
@@ -219,8 +219,8 @@ void vtkLinearRayIntegratorTransferFunction::GetTransferFunction(
     }
 
   // Now record control points and colors.
-  if (this->ControlPoints) delete[] this->ControlPoints;
-  if (this->Colors) delete[] this->Colors;
+  delete[] this->ControlPoints;
+  delete[] this->Colors;
   this->NumControlPoints = static_cast<int>(cpset.size());
   this->ControlPoints = new double[this->NumControlPoints];
   this->Colors = new acolor[this->NumControlPoints];
@@ -272,8 +272,8 @@ void vtkLinearRayIntegratorTransferFunction::GetTransferFunction(
     }
 
   // Now record control points and colors.
-  if (this->ControlPoints) delete[] this->ControlPoints;
-  if (this->Colors) delete[] this->Colors;
+  delete[] this->ControlPoints;
+  delete[] this->Colors;
   this->NumControlPoints = static_cast<int>(cpset.size());
   this->ControlPoints = new double[this->NumControlPoints];
   this->Colors = new acolor[this->NumControlPoints];
diff --git a/Rendering/Volume/vtkUnstructuredGridVolumeMapper.cxx b/Rendering/Volume/vtkUnstructuredGridVolumeMapper.cxx
index 8d760c6..295ffc1 100644
--- a/Rendering/Volume/vtkUnstructuredGridVolumeMapper.cxx
+++ b/Rendering/Volume/vtkUnstructuredGridVolumeMapper.cxx
@@ -33,8 +33,8 @@ vtkUnstructuredGridVolumeMapper::~vtkUnstructuredGridVolumeMapper()
 
 void vtkUnstructuredGridVolumeMapper::SetInputData( vtkDataSet *genericInput )
 {
-  vtkUnstructuredGrid *input =
-    vtkUnstructuredGrid::SafeDownCast( genericInput );
+  vtkUnstructuredGridBase *input =
+    vtkUnstructuredGridBase::SafeDownCast( genericInput );
 
   if ( input )
     {
@@ -42,22 +42,23 @@ void vtkUnstructuredGridVolumeMapper::SetInputData( vtkDataSet *genericInput )
     }
   else
     {
-    vtkErrorMacro("The SetInput method of this mapper requires vtkUnstructuredGrid as input");
+    vtkErrorMacro("The SetInput method of this mapper requires "
+                  "vtkUnstructuredGridBase as input");
     }
 }
 
-void vtkUnstructuredGridVolumeMapper::SetInputData( vtkUnstructuredGrid *input )
+void vtkUnstructuredGridVolumeMapper::SetInputData( vtkUnstructuredGridBase *input )
 {
   this->SetInputDataInternal(0, input);
 }
 
-vtkUnstructuredGrid *vtkUnstructuredGridVolumeMapper::GetInput()
+vtkUnstructuredGridBase *vtkUnstructuredGridVolumeMapper::GetInput()
 {
   if (this->GetNumberOfInputConnections(0) < 1)
     {
     return 0;
     }
-  return vtkUnstructuredGrid::SafeDownCast(
+  return vtkUnstructuredGridBase::SafeDownCast(
     this->GetExecutive()->GetInputData(0, 0));
 }
 
@@ -74,7 +75,8 @@ void vtkUnstructuredGridVolumeMapper::PrintSelf(ostream& os, vtkIndent indent)
 int vtkUnstructuredGridVolumeMapper::FillInputPortInformation(
   int vtkNotUsed( port ), vtkInformation* info)
 {
-  info->Set(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(), "vtkUnstructuredGrid");
+  info->Set(vtkAlgorithm::INPUT_REQUIRED_DATA_TYPE(),
+            "vtkUnstructuredGridBase");
   return 1;
 }
 
diff --git a/Rendering/Volume/vtkUnstructuredGridVolumeMapper.h b/Rendering/Volume/vtkUnstructuredGridVolumeMapper.h
index 5b5e9b0..667fbb5 100644
--- a/Rendering/Volume/vtkUnstructuredGridVolumeMapper.h
+++ b/Rendering/Volume/vtkUnstructuredGridVolumeMapper.h
@@ -30,7 +30,7 @@
 
 class vtkRenderer;
 class vtkVolume;
-class vtkUnstructuredGrid;
+class vtkUnstructuredGridBase;
 class vtkWindow;
 
 
@@ -42,9 +42,9 @@ public:
 
   // Description:
   // Set/Get the input data
-  virtual void SetInputData( vtkUnstructuredGrid * );
+  virtual void SetInputData( vtkUnstructuredGridBase * );
   virtual void SetInputData( vtkDataSet * );
-  vtkUnstructuredGrid *GetInput();
+  vtkUnstructuredGridBase *GetInput();
 
   vtkSetMacro( BlendMode, int );
   void SetBlendModeToComposite()
@@ -67,7 +67,7 @@ public:
   // Release any graphics resources that are being consumed by this mapper.
   // The parameter window could be used to determine which graphic
   // resources to release.
-  virtual void ReleaseGraphicsResources(vtkWindow *) {};
+  virtual void ReleaseGraphicsResources(vtkWindow *) {}
 
   enum
   {
diff --git a/Rendering/Volume/vtkUnstructuredGridVolumeRayCastFunction.h b/Rendering/Volume/vtkUnstructuredGridVolumeRayCastFunction.h
index 328ee16..2f2275d 100644
--- a/Rendering/Volume/vtkUnstructuredGridVolumeRayCastFunction.h
+++ b/Rendering/Volume/vtkUnstructuredGridVolumeRayCastFunction.h
@@ -52,8 +52,8 @@ public:
 
 
 protected:
-  vtkUnstructuredGridVolumeRayCastFunction() {};
-  ~vtkUnstructuredGridVolumeRayCastFunction() {};
+  vtkUnstructuredGridVolumeRayCastFunction() {}
+  ~vtkUnstructuredGridVolumeRayCastFunction() {}
 
 private:
   vtkUnstructuredGridVolumeRayCastFunction(const vtkUnstructuredGridVolumeRayCastFunction&);  // Not implemented.
diff --git a/Rendering/Volume/vtkUnstructuredGridVolumeZSweepMapper.cxx b/Rendering/Volume/vtkUnstructuredGridVolumeZSweepMapper.cxx
index 051876e..9b83488 100644
--- a/Rendering/Volume/vtkUnstructuredGridVolumeZSweepMapper.cxx
+++ b/Rendering/Volume/vtkUnstructuredGridVolumeZSweepMapper.cxx
@@ -23,12 +23,14 @@
 #include "vtkRayCastImageDisplayHelper.h"
 #include "vtkTransform.h"
 #include "vtkCamera.h"
+#include "vtkCellIterator.h"
 #include "vtkGenericCell.h"
 #include "vtkPriorityQueue.h"
 #include "vtkIdList.h"
 #include "vtkVolumeProperty.h"
 #include "vtkColorTransferFunction.h"
 #include "vtkPiecewiseFunction.h"
+#include "vtkSmartPointer.h"
 #include "vtkUnstructuredGridPreIntegration.h"
 #include "vtkUnstructuredGridPartialPreIntegration.h"
 #include "vtkUnstructuredGridHomogeneousRayIntegrator.h"
@@ -40,7 +42,7 @@
 //#include "vtkXMLPolyDataWriter.h"
 #include "vtkPointData.h"
 
-#include <assert.h>
+#include <cassert>
 #include <string.h> // memset()
 #include <vector>
 #include <list>
@@ -2997,7 +2999,7 @@ void vtkUnstructuredGridVolumeZSweepMapper::BuildUseSets()
     }
 
   // If the data has changed in some way then we need to update
-  vtkUnstructuredGrid *input = this->GetInput();
+  vtkUnstructuredGridBase *input = this->GetInput();
   if ( input->GetMTime() > this->SavedTriangleListMTime.GetMTime() )
     {
     needsUpdate = 1;
@@ -3015,7 +3017,6 @@ void vtkUnstructuredGridVolumeZSweepMapper::BuildUseSets()
     return;
     }
 
-  vtkIdType numberOfCells=input->GetNumberOfCells();
   vtkIdType numberOfPoints=input->GetNumberOfPoints();
 
   vtkIdList *cellNeighbors = vtkIdList::New();
@@ -3030,34 +3031,39 @@ void vtkUnstructuredGridVolumeZSweepMapper::BuildUseSets()
       this->Scalars->GetNumberOfComponents());
     }
   // for each cell
-  vtkIdType cellIdx=0;
-  while(cellIdx<numberOfCells)
-    {
-    input->GetCell(cellIdx,this->Cell);
-
-    vtkIdType faces=this->Cell->GetNumberOfFaces();
-    vtkIdType faceidx=0;
-    vtkCell *face;
-    vtkIdType faceIds[3];
-    vtkIdType orderedFaceIds[3];
-    // for each face
-    while(faceidx<faces)
+  vtkSmartPointer<vtkCellIterator> cellIter =
+      vtkSmartPointer<vtkCellIterator>::Take(input->NewCellIterator());
+  for (cellIter->InitTraversal(); !cellIter->IsDoneWithTraversal();
+       cellIter->GoToNextCell())
+    {
+    cellIter->GetCell(this->Cell);
+    vtkIdType faces = this->Cell->GetNumberOfFaces();
+    if (faces > 0)
       {
-      face=this->Cell->GetFace(faceidx);
-      faceIds[0]=face->GetPointId(0);
-      faceIds[1]=face->GetPointId(1);
-      faceIds[2]=face->GetPointId(2);
-      int orientationChanged=this->ReorderTriangle(faceIds,orderedFaceIds);
-      input->GetCellNeighbors(cellIdx, face->GetPointIds(), cellNeighbors);
-      bool external = (cellNeighbors->GetNumberOfIds() == 0);
-
-      // Add face only if it is not already in the useset.
-      this->UseSet->AddFace(orderedFaceIds, this->Scalars,
-                            cellIdx, orientationChanged, external);
-
-      ++faceidx;
+      vtkIdType faceidx=0;
+      vtkCell *face;
+      vtkIdType faceIds[3];
+      vtkIdType orderedFaceIds[3];
+      // for each face
+      while(faceidx<faces)
+        {
+        face=this->Cell->GetFace(faceidx);
+        faceIds[0]=face->GetPointId(0);
+        faceIds[1]=face->GetPointId(1);
+        faceIds[2]=face->GetPointId(2);
+        int orientationChanged=this->ReorderTriangle(faceIds,orderedFaceIds);
+        input->GetCellNeighbors(cellIter->GetCellId(), face->GetPointIds(),
+                                cellNeighbors);
+        bool external = (cellNeighbors->GetNumberOfIds() == 0);
+
+        // Add face only if it is not already in the useset.
+        this->UseSet->AddFace(orderedFaceIds, this->Scalars,
+                              cellIter->GetCellId(), orientationChanged,
+                              external);
+
+        ++faceidx;
+        }
       }
-    ++cellIdx;
     }
   cellNeighbors->Delete();
   this->SavedTriangleListMTime.Modified();
@@ -3124,7 +3130,7 @@ void vtkUnstructuredGridVolumeZSweepMapper::ProjectAndSortVertices(
 {
   assert("pre: empty list" && this->EventList->GetNumberOfItems()==0);
 
-  vtkUnstructuredGrid *input = this->GetInput();
+  vtkUnstructuredGridBase *input = this->GetInput();
   vtkIdType numberOfPoints=input->GetNumberOfPoints();
 
   vtkIdType pointId=0;
diff --git a/Rendering/Volume/vtkVolumeMapper.h b/Rendering/Volume/vtkVolumeMapper.h
index 4542da7..a4fd30e 100644
--- a/Rendering/Volume/vtkVolumeMapper.h
+++ b/Rendering/Volume/vtkVolumeMapper.h
@@ -126,7 +126,7 @@ public:
   // Release any graphics resources that are being consumed by this mapper.
   // The parameter window could be used to determine which graphic
   // resources to release.
-  virtual void ReleaseGraphicsResources(vtkWindow *) {};
+  virtual void ReleaseGraphicsResources(vtkWindow *) {}
 
   enum
   {
diff --git a/Rendering/Volume/vtkVolumeRayCastFunction.h b/Rendering/Volume/vtkVolumeRayCastFunction.h
index 19f95d7..a64462e 100644
--- a/Rendering/Volume/vtkVolumeRayCastFunction.h
+++ b/Rendering/Volume/vtkVolumeRayCastFunction.h
@@ -155,8 +155,8 @@ public:
   virtual float GetZeroOpacityThreshold( vtkVolume *vol )=0;
 
 protected:
-  vtkVolumeRayCastFunction() {};
-  ~vtkVolumeRayCastFunction() {};
+  vtkVolumeRayCastFunction() {}
+  ~vtkVolumeRayCastFunction() {}
 
 //BTX
   // Description:
diff --git a/Rendering/Volume/vtkVolumeTextureMapper2D.h b/Rendering/Volume/vtkVolumeTextureMapper2D.h
index b752b26..c60cb34 100644
--- a/Rendering/Volume/vtkVolumeTextureMapper2D.h
+++ b/Rendering/Volume/vtkVolumeTextureMapper2D.h
@@ -67,13 +67,13 @@ public:
   // WARNING: INTERNAL METHOD - NOT INTENDED FOR GENERAL USE
   // DO NOT USE THIS METHOD OUTSIDE OF THE RENDERING PROCESS
   // Render the volume
-  virtual void Render(vtkRenderer *, vtkVolume *) {};
+  virtual void Render(vtkRenderer *, vtkVolume *) {}
 
   virtual void RenderQuads( int vtkNotUsed(count),
                             float *vtkNotUsed(v), float *vtkNotUsed(t),
                             unsigned char *vtkNotUsed(texture),
                             int vtkNotUsed(size)[2],
-                            int vtkNotUsed(reverseFlag)) {};
+                            int vtkNotUsed(reverseFlag)) {}
 
   // Description:
   // Made public only for access from the templated method. Not a vtkGetMacro
diff --git a/Rendering/Volume/vtkVolumeTextureMapper3D.h b/Rendering/Volume/vtkVolumeTextureMapper3D.h
index 97edf9b..0db286f 100644
--- a/Rendering/Volume/vtkVolumeTextureMapper3D.h
+++ b/Rendering/Volume/vtkVolumeTextureMapper3D.h
@@ -130,7 +130,7 @@ public:
   // WARNING: INTERNAL METHOD - NOT INTENDED FOR GENERAL USE
   // DO NOT USE THIS METHOD OUTSIDE OF THE RENDERING PROCESS
   // Render the volume
-  virtual void Render(vtkRenderer *, vtkVolume *) {};
+  virtual void Render(vtkRenderer *, vtkVolume *) {}
 
   // Description:
   // What rendering method is supported?
diff --git a/Rendering/VolumeOpenGL/module.cmake b/Rendering/VolumeOpenGL/module.cmake
index 6df66de..6b16f28 100644
--- a/Rendering/VolumeOpenGL/module.cmake
+++ b/Rendering/VolumeOpenGL/module.cmake
@@ -5,6 +5,10 @@ vtk_module(vtkRenderingVolumeOpenGL
     vtkRenderingVolume
   DEPENDS
     vtkRenderingOpenGL
+  PRIVATE_DEPENDS
+    vtksys
+    vtkFiltersGeneral
+    vtkFiltersSources
   TEST_DEPENDS
     vtkTestingCore
     vtkTestingRendering
diff --git a/Rendering/VolumeOpenGL/vtkOpenGLGPUVolumeRayCastMapper.cxx b/Rendering/VolumeOpenGL/vtkOpenGLGPUVolumeRayCastMapper.cxx
index ba4ff3d..a5aa3f7 100644
--- a/Rendering/VolumeOpenGL/vtkOpenGLGPUVolumeRayCastMapper.cxx
+++ b/Rendering/VolumeOpenGL/vtkOpenGLGPUVolumeRayCastMapper.cxx
@@ -38,7 +38,7 @@
 #include <string>
 #include <map>
 #include <vector>
-#include <assert.h>
+#include <cassert>
 
 #include "vtkClipDataSet.h"
 #include "vtkCellArray.h"
@@ -87,6 +87,7 @@
 #include "vtkUniformVariables.h"
 #include "vtkShader2Collection.h"
 #include "vtkOpenGLRenderWindow.h"
+#include "vtkOpenGLError.h"
 
 // Uncomment the following line to debug Snow Leopard
 //#define APPLE_SNOW_LEOPARD_BUG
@@ -263,6 +264,7 @@ public:
       if(this->TextureId!=0)
         {
         glDeleteTextures(1,&this->TextureId);
+        vtkOpenGLStaticCheckErrorMacro("failed at glDeleteTextures");
         this->TextureId=0;
         }
       if(this->Table!=0)
@@ -281,6 +283,7 @@ public:
     {
       assert("pre: uptodate" && this->Loaded);
       glBindTexture(GL_TEXTURE_1D,this->TextureId);
+      vtkOpenGLStaticCheckErrorMacro("failed at glBindtexture");
     }
 
   // \pre the active texture is set to TEXTURE2
@@ -292,6 +295,8 @@ public:
               bool linearInterpolation)
     {
       assert("pre: scalarOpacity_exists" && scalarOpacity!=0);
+      vtkOpenGLClearErrorMacro();
+
       bool needUpdate=false;
       if(this->TextureId==0)
         {
@@ -370,7 +375,7 @@ public:
         glTexImage1D(GL_TEXTURE_1D,0,GL_ALPHA16,
                      vtkOpenGLGPUVolumeRayCastMapperOpacityTableSize,0,
                      GL_ALPHA,GL_FLOAT,this->Table);
-        vtkOpenGLGPUVolumeRayCastMapper::PrintError("1d opacity texture is too large");
+        vtkOpenGLStaticCheckErrorMacro("1d opacity texture is too large");
         this->Loaded=true;
         this->BuildTime.Modified();
         }
@@ -392,6 +397,7 @@ public:
         glTexParameteri(GL_TEXTURE_1D,GL_TEXTURE_MIN_FILTER,value);
         glTexParameteri(GL_TEXTURE_1D,GL_TEXTURE_MAG_FILTER,value);
         }
+    vtkOpenGLStaticCheckErrorMacro("failed after Update");
     }
 protected:
   GLuint TextureId;
@@ -402,6 +408,9 @@ protected:
   bool Loaded;
   bool LastLinearInterpolation;
   double LastRange[2];
+private:
+  vtkOpacityTable(const vtkOpacityTable&);
+  vtkOpacityTable& operator=(const vtkOpacityTable&);
 };
 
 //-----------------------------------------------------------------------------
@@ -410,12 +419,28 @@ protected:
 class vtkOpacityTables
 {
 public:
-  std::vector<vtkOpacityTable> Vector;
-  vtkOpacityTables(size_t numberOfLevels)
-    : Vector(numberOfLevels)
+  vtkOpacityTables(unsigned int numberOfTables)
+    {
+    this->Tables = new vtkOpacityTable[numberOfTables];
+    this->NumberOfTables = numberOfTables;
+    }
+  ~vtkOpacityTables()
+    {
+    delete [] this->Tables;
+    }
+  vtkOpacityTable* GetTable(unsigned int i)
+    {
+    return &this->Tables[i];
+    }
+  unsigned int GetNumberOfTables()
     {
+    return this->NumberOfTables;
     }
 private:
+  unsigned int NumberOfTables;
+  vtkOpacityTable *Tables;
+  // undefined default constructor.
+  vtkOpacityTables();
   // undefined copy constructor.
   vtkOpacityTables(const vtkOpacityTables &other);
   // undefined assignment operator.
@@ -440,6 +465,7 @@ public:
       if(this->TextureId!=0)
         {
         glDeleteTextures(1,&this->TextureId);
+        vtkOpenGLStaticCheckErrorMacro("failed at glDeleteTextures");
         this->TextureId=0;
         }
       if(this->Table!=0)
@@ -458,6 +484,7 @@ public:
     {
       assert("pre: uptodate" && this->Loaded);
       glBindTexture(GL_TEXTURE_1D,this->TextureId);
+      vtkOpenGLStaticCheckErrorMacro("failed at glBindTexture");
     }
 
   // \pre the active texture is set properly. (default color,
@@ -467,6 +494,8 @@ public:
               bool linearInterpolation)
     {
       assert("pre: scalarRGB_exists" && scalarRGB!=0);
+      vtkOpenGLClearErrorMacro();
+
       bool needUpdate=false;
       if(this->TextureId==0)
         {
@@ -500,7 +529,7 @@ public:
         glTexImage1D(GL_TEXTURE_1D,0,GL_RGB16,
                      vtkOpenGLGPUVolumeRayCastMapperOpacityTableSize,0,
                      GL_RGB,GL_FLOAT,this->Table);
-        vtkOpenGLGPUVolumeRayCastMapper::PrintError("1d RGB texture is too large");
+        vtkOpenGLStaticCheckErrorMacro("1d RGB texture is too large");
         this->Loaded=true;
         this->BuildTime.Modified();
         this->LastRange[0] = range[0];
@@ -524,6 +553,7 @@ public:
         glTexParameteri(GL_TEXTURE_1D,GL_TEXTURE_MIN_FILTER,value);
         glTexParameteri(GL_TEXTURE_1D,GL_TEXTURE_MAG_FILTER,value);
         }
+    vtkOpenGLStaticCheckErrorMacro("failed after Update");
     }
 protected:
   GLuint TextureId;
@@ -559,6 +589,7 @@ public:
       if(this->TextureId!=0)
         {
         glDeleteTextures(1,&this->TextureId);
+        vtkOpenGLStaticCheckErrorMacro("failed at glDeleteTextures");
         this->TextureId=0;
         }
     }
@@ -572,6 +603,7 @@ public:
     {
       assert("pre: uptodate" && this->Loaded);
       glBindTexture(vtkgl::TEXTURE_3D,this->TextureId);
+      vtkOpenGLStaticCheckErrorMacro("failed at glBindTexture");
     }
 
   void Update(vtkImageData *input,
@@ -585,6 +617,8 @@ public:
               double tableRange[2],
               vtkIdType maxMemoryInBytes)
     {
+      vtkOpenGLClearErrorMacro();
+
       bool needUpdate=false;
       bool modified=false;
       if(this->TextureId==0)
@@ -917,25 +951,25 @@ public:
                     {
                     GLuint pbo=0;
                     vtkgl::GenBuffers(1,&pbo);
-                    vtkOpenGLGPUVolumeRayCastMapper::PrintError("genbuffer");
+                    vtkOpenGLStaticCheckErrorMacro("genbuffer");
                     vtkgl::BindBuffer(vtkgl::PIXEL_UNPACK_BUFFER,pbo);
-                    vtkOpenGLGPUVolumeRayCastMapper::PrintError("binbuffer");
+                    vtkOpenGLStaticCheckErrorMacro("binbuffer");
                     vtkgl::GLsizeiptr texSize=
                       textureSize[0]*textureSize[1]*textureSize[2]*
                       vtkAbstractArray::GetDataTypeSize(scalarType)*
                       scalars->GetNumberOfComponents();
                     vtkgl::BufferData(vtkgl::PIXEL_UNPACK_BUFFER,texSize,dataPtr,
                                       vtkgl::STREAM_DRAW);
-                    vtkOpenGLGPUVolumeRayCastMapper::PrintError("bufferdata");
+                    vtkOpenGLStaticCheckErrorMacro("bufferdata");
                     vtkgl::TexImage3D(vtkgl::TEXTURE_3D, 0, internalFormat,
                                       textureSize[0],textureSize[1],textureSize[2],
                                       0,format,type,0);
-                    vtkOpenGLGPUVolumeRayCastMapper::PrintError("teximage3d");
+                    vtkOpenGLStaticCheckErrorMacro("teximage3d");
                     vtkgl::BindBuffer(vtkgl::PIXEL_UNPACK_BUFFER,0);
-                    vtkOpenGLGPUVolumeRayCastMapper::PrintError("bindbuffer to 0");
+                    vtkOpenGLStaticCheckErrorMacro("bindbuffer to 0");
                     vtkgl::DeleteBuffers(1,&pbo);
                     }
-                  vtkOpenGLGPUVolumeRayCastMapper::PrintError("3d texture is too large2");
+                  vtkOpenGLStaticCheckErrorMacro("3d texture is too large2");
                   // make sure TexImage3D is executed with our PixelTransfer mode
                   glFinish();
                   // Restore the default values.
@@ -1072,6 +1106,7 @@ public:
         {
         this->BuildTime.Modified();
         }
+    vtkOpenGLStaticCheckErrorMacro("failed after Update");
     }
 
   double *GetLoadedBounds()
@@ -1140,6 +1175,7 @@ public:
       if(this->TextureId!=0)
         {
         glDeleteTextures(1,&this->TextureId);
+        vtkOpenGLStaticCheckErrorMacro("failed at glDeleteTextures");
         this->TextureId=0;
         }
     }
@@ -1154,6 +1190,7 @@ public:
     {
       assert("pre: uptodate" && this->Loaded);
       glBindTexture(vtkgl::TEXTURE_3D,this->TextureId);
+      vtkOpenGLStaticCheckErrorMacro("failed at glBindTexture");
     }
 
   // \pre vtkgl::ActiveTexture(vtkgl::TEXTURE7) has to be called first.
@@ -1166,6 +1203,8 @@ public:
               const char *arrayName,
               vtkIdType maxMemoryInBytes)
     {
+      vtkOpenGLClearErrorMacro();
+
       bool needUpdate=false;
       bool modified=false;
       if(this->TextureId==0)
@@ -1425,6 +1464,7 @@ public:
         {
         this->BuildTime.Modified();
         }
+      vtkOpenGLStaticCheckErrorMacro("failed after Update");
     }
 
   double *GetLoadedBounds()
@@ -2162,6 +2202,8 @@ void vtkOpenGLGPUVolumeRayCastMapper::LoadExtensions(
   // Cg compiler about an infinite loop.
 #ifndef APPLE_SNOW_LEOPARD_BUG
  #ifdef __APPLE__
+  this->UnsupportedRequiredExtensions->Stream<<
+    " Disabled on Apple OS X Snow Leopard with nVidia.";
   this->LoadExtensionsSucceeded=0;
   return;
  #endif
@@ -2170,25 +2212,30 @@ void vtkOpenGLGPUVolumeRayCastMapper::LoadExtensions(
   // Assume success
   this->LoadExtensionsSucceeded=1;
 
-  const char *gl_version=reinterpret_cast<const char *>(glGetString(GL_VERSION));
-  if(strstr(gl_version,"Mesa")!=0)
-    {
-    // - GL_VENDOR cannot be used because it can be "Brian Paul" or
-    // "Mesa project"
-    // - GL_RENDERER cannot be used because it can be "Software Rasterizer" or
-    // "Mesa X11"
-    // - GL_VERSION is more robust. It has things like "2.0 Mesa 7.0.4" or
-    // "2.1 Mesa 7.2" or "2.1 Mesa 7.3-devel"
-    // Mesa does not work with multiple draw buffers:
-    // "framebuffer has bad draw buffer"
-    // "render clipped 1 ERROR (x506) invalid framebuffer operation ext"
+  // get the extension manager
+  vtkOpenGLRenderWindow *context = vtkOpenGLRenderWindow::SafeDownCast(window);
+  if (!context)
+    {
+    this->UnsupportedRequiredExtensions->Stream<<
+      " Disabled because context is not a vtkOpenGLRederWindow.";
     this->LoadExtensionsSucceeded=0;
     return;
     }
+  vtkOpenGLExtensionManager *extensions = context->GetExtensionManager();
 
-  // Create an extension manager
-  vtkOpenGLExtensionManager *extensions=vtkOpenGLExtensionManager::New();
-  extensions->SetRenderWindow(window);
+  // mesa notes:
+  // 8.0.0 -- missing some required extensions
+  // 8.0.5 -- tests pass but there are invalid enum opengl errors reported (mesa bug)
+  // 9.1.3 & 9.1.4 w/ OS Mesa -- GPURayCastCompositeShadeMask fails (mesa bug?) test disabled
+  // 9.2.0 w/llvmpipe -- tests pass cleanly
+  if ( (extensions->DriverIsMesa()
+    && !(extensions->DriverGLRendererIsOSMesa() && extensions->DriverVersionAtLeast(9)))
+    && !extensions->GetIgnoreDriverBugs("Mesa FBO bugs"))
+    {
+    this->UnsupportedRequiredExtensions->Stream<<
+      " Disabled because of Mesa FBO bugs.";
+    this->LoadExtensionsSucceeded=0;
+    }
 
   // GL_ARB_draw_buffers requires OpenGL 1.3, so we must have OpenGL 1.3
   // We don't need to check for some extensions that become part of OpenGL
@@ -2207,7 +2254,6 @@ void vtkOpenGLGPUVolumeRayCastMapper::LoadExtensions(
     this->LoadExtensionsSucceeded=0;
     this->UnsupportedRequiredExtensions->Stream<<
       " OpenGL 1.3 is required but not supported";
-    extensions->Delete();
     return;
     }
 
@@ -2329,7 +2375,6 @@ void vtkOpenGLGPUVolumeRayCastMapper::LoadExtensions(
   // Have we succeeded so far? If not, just return.
   if(!this->LoadExtensionsSucceeded)
     {
-    extensions->Delete();
     return;
     }
 
@@ -2443,8 +2488,6 @@ void vtkOpenGLGPUVolumeRayCastMapper::LoadExtensions(
   this->LastComponent=
     vtkOpenGLGPUVolumeRayCastMapperComponentNotInitialized;
   this->LastShade=vtkOpenGLGPUVolumeRayCastMapperShadeNotInitialized;
-
-  extensions->Delete();
 }
 
 //-----------------------------------------------------------------------------
@@ -2457,6 +2500,7 @@ void vtkOpenGLGPUVolumeRayCastMapper::ReleaseGraphicsResources(
   if(this->OpenGLObjectsCreated)
     {
     window->MakeCurrent();
+    vtkOpenGLClearErrorMacro();
     this->LastSize[0]=0;
     this->LastSize[1]=0;
     GLuint frameBufferObject=static_cast<GLuint>(this->FrameBufferObject);
@@ -2497,14 +2541,17 @@ void vtkOpenGLGPUVolumeRayCastMapper::ReleaseGraphicsResources(
       vtkOpenGLGPUVolumeRayCastMapperComponentNotInitialized;
     this->LastShade=vtkOpenGLGPUVolumeRayCastMapperShadeNotInitialized;
     this->OpenGLObjectsCreated=0;
+    vtkOpenGLCheckErrorMacro("failed during ReleaseGraphicsResources");
     }
 
   if(this->NoiseTextureId!=0)
     {
     window->MakeCurrent();
+    vtkOpenGLClearErrorMacro();
     GLuint noiseTextureObjects=static_cast<GLuint>(this->NoiseTextureId);
     glDeleteTextures(1,&noiseTextureObjects);
     this->NoiseTextureId=0;
+    vtkOpenGLCheckErrorMacro("failed during ReleaseGraphicsResources");
     }
 
   if(this->ScalarsTextures!=0)
@@ -2609,6 +2656,8 @@ void vtkOpenGLGPUVolumeRayCastMapper::ReleaseGraphicsResources(
 //-----------------------------------------------------------------------------
 void vtkOpenGLGPUVolumeRayCastMapper::CreateOpenGLObjects(vtkRenderer *ren)
 {
+  vtkOpenGLClearErrorMacro();
+
   GLint value;
   glGetIntegerv(vtkgl::FRAMEBUFFER_BINDING_EXT,&value);
   GLuint savedFrameBuffer=static_cast<GLuint>(value);
@@ -2758,6 +2807,7 @@ void vtkOpenGLGPUVolumeRayCastMapper::CreateOpenGLObjects(vtkRenderer *ren)
   // Restore default frame buffer.
   vtkgl::BindFramebufferEXT(vtkgl::FRAMEBUFFER_EXT,savedFrameBuffer);
 
+  vtkOpenGLCheckErrorMacro("failed after CreateOpenGLObjects");
 }
 
 
@@ -2774,6 +2824,8 @@ int vtkOpenGLGPUVolumeRayCastMapper::AllocateFrameBuffers(vtkRenderer *ren)
   assert("pre: ren_exists" && ren!=0);
   assert("pre: opengl_objects_created" && this->OpenGLObjectsCreated);
 
+  vtkOpenGLClearErrorMacro();
+
   int result=1;
   int size[2];
   ren->GetTiledSize(&size[0],&size[1]);
@@ -2888,7 +2940,7 @@ int vtkOpenGLGPUVolumeRayCastMapper::AllocateFrameBuffers(vtkRenderer *ren)
       }
     }
 
-  this->PrintError("AllocateFrameBuffers");
+  vtkOpenGLCheckErrorMacro("failed after AllocateFrameBuffers");
   return result;
 }
 
@@ -3103,6 +3155,8 @@ int vtkOpenGLGPUVolumeRayCastMapper::LoadScalarField(vtkImageData *input,
                                        textureExtent[2]<=textureExtent[3] &&
                                        textureExtent[4]<=textureExtent[5])));
 
+  vtkOpenGLClearErrorMacro();
+
   int result=1; // succeeded
 
   // make sure we rebind our texture object to texture0 even if we don't have
@@ -3173,6 +3227,8 @@ int vtkOpenGLGPUVolumeRayCastMapper::LoadScalarField(vtkImageData *input,
     vtkgl::ActiveTexture(vtkgl::TEXTURE0);
     }
 
+  vtkOpenGLCheckErrorMacro("failed after LoadScalarField");
+
   return result;
 }
 
@@ -3190,6 +3246,8 @@ int vtkOpenGLGPUVolumeRayCastMapper::UpdateColorTransferFunction(
   assert("pre: valid_numberOfScalarComponents" &&
          (numberOfScalarComponents==1 || numberOfScalarComponents==4));
 
+  vtkOpenGLClearErrorMacro();
+
   // Build the colormap in a 1D texture.
 
   // 1D RGB-texture=mapping from scalar values to color values
@@ -3224,6 +3282,8 @@ int vtkOpenGLGPUVolumeRayCastMapper::UpdateColorTransferFunction(
      // Restore default
     vtkgl::ActiveTexture( vtkgl::TEXTURE0);
     }
+
+  vtkOpenGLCheckErrorMacro("failed after UpdateColorTransferFunction");
   return 1;
 }
 
@@ -3242,13 +3302,15 @@ int vtkOpenGLGPUVolumeRayCastMapper::UpdateOpacityTransferFunction(
   assert("pre: valid_numberOfScalarComponents" &&
          (numberOfScalarComponents==1 || numberOfScalarComponents==4));
 
+  vtkOpenGLClearErrorMacro();
+
   (void)numberOfScalarComponents; // remove warning in release mode.
 
   vtkVolumeProperty *volumeProperty=vol->GetProperty();
   vtkPiecewiseFunction *scalarOpacity=volumeProperty->GetScalarOpacity();
 
   vtkgl::ActiveTexture( vtkgl::TEXTURE2); //stay here
-  this->OpacityTables->Vector[level].Update(
+  this->OpacityTables->GetTable(level)->Update(
     scalarOpacity,this->BlendMode,
     this->ActualSampleDistance,
     this->TableRange,
@@ -3257,6 +3319,8 @@ int vtkOpenGLGPUVolumeRayCastMapper::UpdateOpacityTransferFunction(
   // Restore default active texture
   vtkgl::ActiveTexture( vtkgl::TEXTURE0);
 
+  vtkOpenGLCheckErrorMacro("failed after UpdateOpacityTransferFunction");
+
   return 1;
 }
 
@@ -3271,6 +3335,8 @@ void vtkOpenGLGPUVolumeRayCastMapper::SetupRender(vtkRenderer *ren,
   assert("pre: ren_exists" && ren!=0);
   assert("pre: vol_exists" && vol!=0);
 
+  vtkOpenGLClearErrorMacro();
+
   int  lowerLeft[2];
   int usize, vsize;
 
@@ -3327,7 +3393,8 @@ void vtkOpenGLGPUVolumeRayCastMapper::SetupRender(vtkRenderer *ren,
   glEnable (GL_CULL_FACE);
   glDisable(GL_DEPTH_TEST);
   glDisable(GL_BLEND); // very important, otherwise the first image looks dark.
-  this->PrintError("SetupRender");
+
+  vtkOpenGLCheckErrorMacro("failed after SetupRender");
 }
 
 //-----------------------------------------------------------------------------
@@ -3558,6 +3625,8 @@ int vtkOpenGLGPUVolumeRayCastMapper::RenderClippedBoundingBox(
 {
   assert("pre: valid_currentBlock" && currentBlock<numberOfBlocks);
 
+  vtkOpenGLClearErrorMacro();
+
   vtkPoints *points = this->ClippedBoundingBox->GetPoints();
   vtkCellArray *polys = this->ClippedBoundingBox->GetPolys();
 
@@ -3719,12 +3788,15 @@ int vtkOpenGLGPUVolumeRayCastMapper::RenderClippedBoundingBox(
       }
     ++polyId;
     }
+
+  vtkOpenGLCheckErrorMacro("failed after RenderClippedBoundingBox");
   return abort;
 }
 
 // ----------------------------------------------------------------------------
 void vtkOpenGLGPUVolumeRayCastMapper::CopyFBOToTexture()
 {
+  vtkOpenGLClearErrorMacro();
   // in OpenGL copy texture to texture does not exist but
   // framebuffer to texture exists (and our FB is an FBO).
   // we have to copy and not just to switch color textures because the
@@ -3751,6 +3823,10 @@ void vtkOpenGLGPUVolumeRayCastMapper::CopyFBOToTexture()
                       this->ReducedSize[1]);
     }
   vtkgl::ActiveTexture(vtkgl::TEXTURE0);
+  // reset the readbuffer to keep os mesa happy
+  // during CheckFrameBufferStatus
+  glReadBuffer(vtkgl::COLOR_ATTACHMENT0_EXT);
+  vtkOpenGLCheckErrorMacro("failed after CopyFBOToTexture");
 }
 
 //-----------------------------------------------------------------------------
@@ -3760,6 +3836,7 @@ void vtkOpenGLGPUVolumeRayCastMapper::CleanupRender()
 {
   glPopMatrix();
   glDisable(GL_CULL_FACE);
+  vtkOpenGLCheckErrorMacro("failed after CleanupRender");
 }
 
 //-----------------------------------------------------------------------------
@@ -3791,6 +3868,8 @@ void vtkOpenGLGPUVolumeRayCastMapper::RenderTextureToScreen(vtkRenderer *ren)
 {
   assert("pre: ren_exists" && ren!=0);
 
+  vtkOpenGLClearErrorMacro();
+
   if ( this->GeneratingCanonicalView )
     {
     // We just need to copy of the data, not render it
@@ -3898,6 +3977,8 @@ void vtkOpenGLGPUVolumeRayCastMapper::RenderTextureToScreen(vtkRenderer *ren)
   glPopMatrix();
   glMatrixMode(GL_MODELVIEW);
   glPopMatrix();
+
+  vtkOpenGLCheckErrorMacro("failed after RenderTextureToScreen");
 }
 
 //-----------------------------------------------------------------------------
@@ -4007,6 +4088,7 @@ void vtkOpenGLGPUVolumeRayCastMapper::PreRender(vtkRenderer *ren,
 {
   // make sure our window is the current OpenGL context.
   ren->GetRenderWindow()->MakeCurrent();
+  vtkOpenGLClearErrorMacro();
 
   // If we haven't already succeeded in loading the extensions,
   // try to load them
@@ -4167,7 +4249,6 @@ void vtkOpenGLGPUVolumeRayCastMapper::PreRender(vtkRenderer *ren,
       if ( vol->GetProperty()->GetShade() )
         {
         shadeMethod=vtkOpenGLGPUVolumeRayCastMapperShadeYes;
-        assert("check: only_1_component_todo" && numberOfScalarComponents==1);
         }
       else
         {
@@ -4397,8 +4478,7 @@ void vtkOpenGLGPUVolumeRayCastMapper::PreRender(vtkRenderer *ren,
   fvalue[0]=static_cast<float>(1.0/size[0]);
   fvalue[1]=static_cast<float>(1.0/size[1]);
   v->SetUniformf("invWindowSize",2,fvalue);
-
-  this->PrintError("after uniforms for textures");
+  vtkOpenGLCheckErrorMacro("after uniforms for textures");
 
 
   this->CheckFrameBufferStatus();
@@ -4560,7 +4640,7 @@ void vtkOpenGLGPUVolumeRayCastMapper::PreRender(vtkRenderer *ren,
   this->CheckFrameBufferStatus();
 
   if(this->OpacityTables!=0 &&
-     this->OpacityTables->Vector.size()!=numberOfLevels)
+     this->OpacityTables->GetNumberOfTables()!=numberOfLevels)
     {
     delete this->OpacityTables;
     this->OpacityTables=0;
@@ -4590,7 +4670,7 @@ void vtkOpenGLGPUVolumeRayCastMapper::PreRender(vtkRenderer *ren,
     this->LastProgressEventTime=vtkTimerLog::GetUniversalTime();
     }
 
-  this->PrintError("PreRender end");
+  vtkOpenGLCheckErrorMacro("failed after PreRender");
 }
 
 //-----------------------------------------------------------------------------
@@ -4836,16 +4916,15 @@ void vtkOpenGLGPUVolumeRayCastMapper::RenderBlock(vtkRenderer *ren,
 
   // opacitytable
   vtkgl::ActiveTexture(vtkgl::TEXTURE2);
-  this->OpacityTables->Vector[level].Bind();
+  this->OpacityTables->GetTable(level)->Bind();
   vtkgl::ActiveTexture(vtkgl::TEXTURE0);
 
-  this->PrintError("after uniforms for projection and shade");
+  vtkOpenGLCheckErrorMacro("after uniforms for projection and shade");
 
   // debug code
   // DO NOT REMOVE the following commented line
 //  this->ValidateProgram();
 
-  this->PrintError("before render");
   if(!this->Cropping)
     {
     this->RenderWholeVolume(ren,vol);
@@ -4855,7 +4934,7 @@ void vtkOpenGLGPUVolumeRayCastMapper::RenderBlock(vtkRenderer *ren,
     this->ClipCroppingRegionPlanes();
     this->RenderRegions(ren,vol);
     }
-  this->PrintError("after render");
+  vtkOpenGLCheckErrorMacro("after render");
 }
 
 //-----------------------------------------------------------------------------
@@ -4865,7 +4944,7 @@ void vtkOpenGLGPUVolumeRayCastMapper::PostRender(
   vtkRenderer *ren,
   int numberOfScalarComponents)
 {
-  this->PrintError("PostRender1");
+  vtkOpenGLClearErrorMacro();
   if(this->NumberOfCroppingRegions>1)
     {
     if(this->BlendMode==vtkVolumeMapper::MAXIMUM_INTENSITY_BLEND
@@ -4918,7 +4997,6 @@ void vtkOpenGLGPUVolumeRayCastMapper::PostRender(
   this->Program->Restore();
 
   this->CleanupRender();
-  this->PrintError("after CleanupRender");
 
   vtkgl::BindFramebufferEXT(vtkgl::FRAMEBUFFER_EXT,
                             static_cast<GLuint>(this->SavedFrameBuffer));
@@ -4935,7 +5013,6 @@ void vtkOpenGLGPUVolumeRayCastMapper::PostRender(
   // Render the texture to the screen - this copies the offscreen buffer
   // onto the screen as a texture mapped polygon
   this->RenderTextureToScreen(ren);
-  this->PrintError("after RenderTextureToScreen");
 
   glEnable(GL_DEPTH_TEST);
 
@@ -4943,7 +5020,7 @@ void vtkOpenGLGPUVolumeRayCastMapper::PostRender(
 
   glFinish();
 
-  this->PrintError("PostRender2");
+  vtkOpenGLCheckErrorMacro("failed after PostRender");
 }
 
 //-----------------------------------------------------------------------------
@@ -4998,9 +5075,6 @@ void vtkOpenGLGPUVolumeRayCastMapper::GPURender(vtkRenderer *ren,
     this->PostRender(ren,numberOfScalarComponents);
     }
 
-  // Let's just make sure no OpenGL errors occurred during this render
-  this->PrintError("End GPU Render");
-
   // If this isn't a canonical view render, then update the progress to
   // 1 because we are done.
   if (!this->GeneratingCanonicalView )
@@ -5281,6 +5355,8 @@ int vtkOpenGLGPUVolumeRayCastMapper::RenderSubVolume(vtkRenderer *ren,
                                                        double bounds[6],
                                                        vtkVolume *volume)
 {
+  vtkOpenGLClearErrorMacro();
+
   // Time to load scalar field
   size_t i;
   int wholeTextureExtent[6];
@@ -5846,7 +5922,6 @@ int vtkOpenGLGPUVolumeRayCastMapper::RenderSubVolume(vtkRenderer *ren,
           v->SetUniformf("lowBounds",3,lowBounds);
           v->SetUniformf("highBounds",3,highBounds);
 
-          this->PrintError("uniform low/high bounds block");
           // other sub-volume rendering code
           this->LoadProjectionParameters(ren,volume);
           this->ClipBoundingBox(ren,blocks[k].Bounds,volume);
@@ -5857,7 +5932,6 @@ int vtkOpenGLGPUVolumeRayCastMapper::RenderSubVolume(vtkRenderer *ren,
             {
             this->CopyFBOToTexture();
             }
-          this->PrintError("render clipped block 1");
 
           ++i;
           }
@@ -5922,7 +5996,6 @@ int vtkOpenGLGPUVolumeRayCastMapper::RenderSubVolume(vtkRenderer *ren,
   v->SetUniformf("lowBounds",3,lowBounds);
   v->SetUniformf("highBounds",3,highBounds);
 
-  this->PrintError("uniform low/high bounds");
   // other sub-volume rendering code
   this->LoadProjectionParameters(ren,volume);
   this->ClipBoundingBox(ren,bounds,volume);
@@ -5940,7 +6013,7 @@ int vtkOpenGLGPUVolumeRayCastMapper::RenderSubVolume(vtkRenderer *ren,
     {
     this->CopyFBOToTexture();
     }
-  this->PrintError("render clipped 1");
+  vtkOpenGLCheckErrorMacro("failed after RenderSubVolume");
   return abort;
 }
 
@@ -6116,7 +6189,6 @@ void vtkOpenGLGPUVolumeRayCastMapper::LoadProjectionParameters(
     fvalues[2]=static_cast<float>(2*coefMatrix->GetElement(0,2));
     v->SetUniformf("matrix2",3,fvalues);
     }
-  this->PrintError("after uniforms for projection");
 
   // Change-of-coordinate matrix from Eye space to texture space.
   vtkMatrix4x4 *eyeToTexture=this->TempMatrix[1];
@@ -6597,6 +6669,7 @@ void vtkOpenGLGPUVolumeRayCastMapper::GetOpenGLState()
 
   // restore current active texture
   vtkgl::ActiveTexture(activeTexture);
+  vtkOpenGLClearErrorMacro();
 }
 
 //-----------------------------------------------------------------------------
@@ -6754,6 +6827,8 @@ int vtkOpenGLGPUVolumeRayCastMapper::PowerOfTwoGreaterOrEqual(int x)
 //-----------------------------------------------------------------------------
 void vtkOpenGLGPUVolumeRayCastMapper::UpdateNoiseTexture()
 {
+  vtkOpenGLClearErrorMacro();
+
   if(this->NoiseTextureId==0)
     {
     GLuint noiseTextureObject;
@@ -6812,6 +6887,8 @@ void vtkOpenGLGPUVolumeRayCastMapper::UpdateNoiseTexture()
 
     vtkgl::ActiveTexture(vtkgl::TEXTURE0);
     }
+
+  vtkOpenGLCheckErrorMacro("failed after UpdateNoiseTexture");
 }
 
 // ----------------------------------------------------------------------------
diff --git a/Rendering/VolumeOpenGL/vtkOpenGLHAVSVolumeMapper.cxx b/Rendering/VolumeOpenGL/vtkOpenGLHAVSVolumeMapper.cxx
index 3bac471..fc73709 100644
--- a/Rendering/VolumeOpenGL/vtkOpenGLHAVSVolumeMapper.cxx
+++ b/Rendering/VolumeOpenGL/vtkOpenGLHAVSVolumeMapper.cxx
@@ -33,6 +33,7 @@ PURPOSE.  See the above copyright notice for more information.
 #include "vtkUnstructuredGridPartialPreIntegration.h"
 #include "vtkVolumeProperty.h"
 #include "vtkOpenGLRenderWindow.h"
+#include "vtkOpenGLError.h"
 
 #include "vtkHAVSVolumeMapper_kbufferVP.h"
 #include "vtkHAVSVolumeMapper_k2BeginFP.h"
@@ -86,6 +87,8 @@ void vtkOpenGLHAVSVolumeMapper::ReleaseGraphicsResources(vtkWindow *renWin)
   if (this->Initialized)
     {
     static_cast<vtkRenderWindow *>(renWin)->MakeCurrent();
+    vtkOpenGLClearErrorMacro();
+
     this->DeleteShaders();
 
     glDisable( vtkgl::VERTEX_PROGRAM_ARB );
@@ -109,6 +112,7 @@ void vtkOpenGLHAVSVolumeMapper::ReleaseGraphicsResources(vtkWindow *renWin)
       vtkgl::BindBuffer(vtkgl::ARRAY_BUFFER, 0);
       vtkgl::BindBuffer(vtkgl::ELEMENT_ARRAY_BUFFER, 0);
       }
+    vtkOpenGLCheckErrorMacro("failed after ReleaseGraphicsResources");
     }
   this->Superclass::ReleaseGraphicsResources(renWin);
 }
@@ -256,6 +260,8 @@ void vtkOpenGLHAVSVolumeMapper::SetGPUDataStructures(bool gpu)
 // Store data structures on GPU if possible
 void vtkOpenGLHAVSVolumeMapper::InitializeGPUDataStructures()
 {
+  vtkOpenGLClearErrorMacro();
+
   if (this->GPUDataStructures)
     {
     if (VBOVertexName)
@@ -287,7 +293,6 @@ void vtkOpenGLHAVSVolumeMapper::InitializeGPUDataStructures()
 
     vtkgl::BindBuffer(vtkgl::ARRAY_BUFFER, 0);
     vtkgl::BindBuffer(vtkgl::ELEMENT_ARRAY_BUFFER, 0);
-    this->CheckOpenGLError("Initializing VBOs");
 
     // Build tex coord array
     vtkgl::GenBuffers(1, reinterpret_cast<GLuint *>(&this->VBOTexCoordName));
@@ -305,12 +310,16 @@ void vtkOpenGLHAVSVolumeMapper::InitializeGPUDataStructures()
       }
     this->OrderedTriangles = new unsigned int[this->NumberOfTriangles*3];
     }
+
+  vtkOpenGLCheckErrorMacro("failed after InitializeGPUDataStructures");
 }
 
 //----------------------------------------------------------------------------
 // Vertex and Fragment shaders
 void vtkOpenGLHAVSVolumeMapper::InitializeShaders()
 {
+  vtkOpenGLClearErrorMacro();
+
   // Create vertex shader
   glEnable( vtkgl::VERTEX_PROGRAM_ARB );
   vtkgl::GenProgramsARB(1, reinterpret_cast<GLuint *>(&this->VertexProgram));
@@ -376,6 +385,8 @@ void vtkOpenGLHAVSVolumeMapper::InitializeShaders()
   vtkgl::BindProgramARB(vtkgl::FRAGMENT_PROGRAM_ARB, 0);
   glDisable( vtkgl::VERTEX_PROGRAM_ARB );
   glDisable( vtkgl::FRAGMENT_PROGRAM_ARB );
+
+  vtkOpenGLCheckErrorMacro("failed after InitializeShaders");
 }
 
 //----------------------------------------------------------------------------
@@ -387,12 +398,15 @@ void vtkOpenGLHAVSVolumeMapper::DeleteShaders()
   vtkgl::DeleteProgramsARB(1, reinterpret_cast<GLuint *>(&this->FragmentProgram));
   vtkgl::DeleteProgramsARB(1,
                            reinterpret_cast<GLuint *>(&this->FragmentProgramEnd));
+
+  vtkOpenGLCheckErrorMacro("failed after DeleteShaders");
 }
 
 //----------------------------------------------------------------------------
 // Build the lookup tables used for partial pre-integration
 void vtkOpenGLHAVSVolumeMapper::InitializeLookupTables(vtkVolume *vol)
 {
+  vtkOpenGLClearErrorMacro();
   this->Superclass::InitializeLookupTables(vol);
 
   // Create a 1D texture for transfer function look up
@@ -423,12 +437,15 @@ void vtkOpenGLHAVSVolumeMapper::InitializeLookupTables(vtkVolume *vol)
                  0, GL_LUMINANCE, GL_FLOAT, psiTable);
     ppi->Delete();
     }
+  vtkOpenGLCheckErrorMacro("failed after InitializeLookupTables");
 }
 
 //----------------------------------------------------------------------------
 // Initialize FBO and attach color and depth textures.
 void vtkOpenGLHAVSVolumeMapper::InitializeFramebufferObject()
 {
+  vtkOpenGLClearErrorMacro();
+
   GLint maxRB;
   glGetIntegerv(vtkgl::MAX_RENDERBUFFER_SIZE_EXT, &maxRB);
   int texSize = (maxRB > 1024)? 1024 : maxRB;
@@ -439,7 +456,7 @@ void vtkOpenGLHAVSVolumeMapper::InitializeFramebufferObject()
     // Create FBO
     vtkgl::GenFramebuffersEXT(1,
                               reinterpret_cast<GLuint *>(&this->FramebufferObject));
-    this->CheckOpenGLError("creating FBO");
+    vtkOpenGLCheckErrorMacro("creating FBO");
     }
   else
     {
@@ -464,13 +481,13 @@ void vtkOpenGLHAVSVolumeMapper::InitializeFramebufferObject()
                  GL_RGBA, GL_FLOAT, 0);
     }
 
-  this->CheckOpenGLError("creating fbo textures");
+  vtkOpenGLCheckErrorMacro("creating fbo textures");
 
   // Bind framebuffer object
   GLint savedFrameBuffer;
   glGetIntegerv(vtkgl::FRAMEBUFFER_BINDING_EXT,&savedFrameBuffer);
   vtkgl::BindFramebufferEXT(vtkgl::FRAMEBUFFER_EXT, this->FramebufferObject);
-  this->CheckOpenGLError("binding FBO");
+  vtkOpenGLCheckErrorMacro("binding FBO");
 
   // Generate depth buffer texture for framebuffer
   vtkgl::GenRenderbuffersEXT(1, reinterpret_cast<GLuint *>(&this->DepthTexture));
@@ -504,7 +521,7 @@ void vtkOpenGLHAVSVolumeMapper::InitializeFramebufferObject()
                                     vtkgl::RENDERBUFFER_EXT,
                                     this->DepthTexture);
 
-  this->CheckOpenGLError("attach textures to FBO");
+  vtkOpenGLCheckErrorMacro("attach textures to FBO");
 
   // Validate FBO after attaching textures
   if (vtkgl::CheckFramebufferStatusEXT(vtkgl::FRAMEBUFFER_EXT) !=
@@ -518,6 +535,8 @@ void vtkOpenGLHAVSVolumeMapper::InitializeFramebufferObject()
 
   this->FramebufferObjectSize = texSize;
   this->KBufferState = this->KBufferSize;
+
+  vtkOpenGLCheckErrorMacro("failed after InitializeFramebufferObject");
 }
 
 //----------------------------------------------------------------------------
@@ -615,6 +634,8 @@ void vtkOpenGLHAVSVolumeMapper::Render(vtkRenderer *ren,
 // The OpenGL rendering
 void vtkOpenGLHAVSVolumeMapper::RenderHAVS(vtkRenderer *ren)
 {
+  vtkOpenGLClearErrorMacro();
+
   glPushAttrib(GL_ENABLE_BIT         |
                GL_CURRENT_BIT        |
                GL_COLOR_BUFFER_BIT   |
@@ -681,6 +702,7 @@ void vtkOpenGLHAVSVolumeMapper::RenderHAVS(vtkRenderer *ren)
 
   if (ren->GetRenderWindow()->CheckAbortStatus())
     {
+    vtkOpenGLCheckErrorMacro("failed during Render");
     return;
     }
 
@@ -707,6 +729,7 @@ void vtkOpenGLHAVSVolumeMapper::RenderHAVS(vtkRenderer *ren)
   this->UpdateProgress(0.4);
   if (ren->GetRenderWindow()->CheckAbortStatus())
     {
+    vtkOpenGLCheckErrorMacro("failed during Render");
     return;
     }
 
@@ -750,6 +773,7 @@ void vtkOpenGLHAVSVolumeMapper::RenderHAVS(vtkRenderer *ren)
   this->DrawBlend(vpWidth, vpHeight, depthRange[0], depthRange[1]);
 
   this->UpdateProgress(1.0);
+  vtkOpenGLCheckErrorMacro("failed after Render");
 }
 
 //----------------------------------------------------------------------------
@@ -761,6 +785,8 @@ void vtkOpenGLHAVSVolumeMapper::SetupFBOZBuffer(int screenWidth,
                                                 float depthFar,
                                                 float *zbuffer)
 {
+  vtkOpenGLClearErrorMacro();
+
   // Setup view for z-buffer copy
   glMatrixMode(GL_PROJECTION);
   glPushMatrix();
@@ -778,7 +804,6 @@ void vtkOpenGLHAVSVolumeMapper::SetupFBOZBuffer(int screenWidth,
   glClear(GL_DEPTH_BUFFER_BIT);
   glDepthFunc(GL_LESS);
 
-  glDrawBuffer(vtkgl::DEPTH_ATTACHMENT_EXT);
   glRasterPos2i(0,0);
   glPixelStorei(GL_PACK_ALIGNMENT, 1);
   glDrawPixels(screenWidth, screenHeight, GL_DEPTH_COMPONENT, GL_FLOAT,
@@ -793,12 +818,16 @@ void vtkOpenGLHAVSVolumeMapper::SetupFBOZBuffer(int screenWidth,
   glPopMatrix();
   glMatrixMode(GL_MODELVIEW);
   glPopMatrix();
+
+  vtkOpenGLCheckErrorMacro("failed after SetupFBOZBuffer");
 }
 
 //----------------------------------------------------------------------------
 // Setup reading and writing into multiple textures from an FBO
 void vtkOpenGLHAVSVolumeMapper::SetupFBOMRT()
 {
+  vtkOpenGLClearErrorMacro();
+
   int numBuffers = (this->KBufferSize == VTK_KBUFFER_SIZE_2)? 2 : 4;
   GLenum buffers[4] = {vtkgl::COLOR_ATTACHMENT0_EXT,
                        vtkgl::COLOR_ATTACHMENT1_EXT,
@@ -806,9 +835,6 @@ void vtkOpenGLHAVSVolumeMapper::SetupFBOMRT()
                        vtkgl::COLOR_ATTACHMENT3_EXT};
   vtkgl::DrawBuffers(numBuffers, buffers);
 
-
-  this->CheckOpenGLError("setup MRTs");
-
   // Bind textures for reading
   glEnable(GL_TEXTURE_2D);
   vtkgl::ActiveTexture(vtkgl::TEXTURE0);
@@ -846,7 +872,7 @@ void vtkOpenGLHAVSVolumeMapper::SetupFBOMRT()
     glBindTexture(GL_TEXTURE_1D,this->TransferFunctionTexture);
     }
 
-  this->CheckOpenGLError("setup FBO reading");
+  vtkOpenGLCheckErrorMacro("SetupFBOMRT");
 }
 
 //----------------------------------------------------------------------------
@@ -855,6 +881,8 @@ void vtkOpenGLHAVSVolumeMapper::SetupFBOMRT()
 void vtkOpenGLHAVSVolumeMapper::DrawFBOInit(int screenWidth, int screenHeight,
                                             float depthNear, float depthFar)
 {
+  vtkOpenGLClearErrorMacro();
+
   // Bind initializing fragment shader
   glEnable(vtkgl::FRAGMENT_PROGRAM_ARB);
   vtkgl::BindProgramARB(vtkgl::FRAGMENT_PROGRAM_ARB,
@@ -887,6 +915,8 @@ void vtkOpenGLHAVSVolumeMapper::DrawFBOInit(int screenWidth, int screenHeight,
   glPopMatrix();
   glMatrixMode(GL_MODELVIEW);
   glPopMatrix();
+
+  vtkOpenGLCheckErrorMacro("failed after DrawFBOInit");
 }
 
 //----------------------------------------------------------------------------
@@ -894,6 +924,8 @@ void vtkOpenGLHAVSVolumeMapper::DrawFBOInit(int screenWidth, int screenHeight,
 // framebuffer.
 void vtkOpenGLHAVSVolumeMapper::DrawFBOGeometry()
 {
+  vtkOpenGLClearErrorMacro();
+
   // Bind shaders
   glEnable(vtkgl::VERTEX_PROGRAM_ARB);
   vtkgl::BindProgramARB(vtkgl::VERTEX_PROGRAM_ARB, this->VertexProgram);
@@ -925,6 +957,8 @@ void vtkOpenGLHAVSVolumeMapper::DrawFBOGeometry()
   vtkgl::BindProgramARB(vtkgl::FRAGMENT_PROGRAM_ARB, 0);
   vtkgl::BindProgramARB(vtkgl::VERTEX_PROGRAM_ARB, 0);
   glDisable( vtkgl::VERTEX_PROGRAM_ARB);
+
+  vtkOpenGLCheckErrorMacro("failed after DrawFBOGeometry");
 }
 
 //----------------------------------------------------------------------------
@@ -934,6 +968,8 @@ void vtkOpenGLHAVSVolumeMapper::DrawFBOFlush(int screenWidth,
                                              int screenHeight,
                                              float depthNear, float depthFar)
 {
+  vtkOpenGLClearErrorMacro();
+
   float scale = this->MaxEdgeLength;
   if (this->LevelOfDetail || !this->PartiallyRemoveNonConvexities)
     {
@@ -1005,7 +1041,6 @@ void vtkOpenGLHAVSVolumeMapper::DrawFBOFlush(int screenWidth,
   glDisable(GL_TEXTURE_2D);
   vtkgl::ActiveTexture(vtkgl::TEXTURE0);
   glDisable(GL_TEXTURE_2D);
-  vtkgl::ActiveTexture(0);
 
   glDisable(GL_DEPTH_TEST);
 
@@ -1024,7 +1059,8 @@ void vtkOpenGLHAVSVolumeMapper::DrawFBOFlush(int screenWidth,
     glDisableClientState(GL_VERTEX_ARRAY);
     glDisableClientState(GL_TEXTURE_COORD_ARRAY);
     }
-  this->CheckOpenGLError("Flushed FBO");
+
+  vtkOpenGLCheckErrorMacro("failed after DrawFBOFlush");
 }
 
 //----------------------------------------------------------------------------
@@ -1034,6 +1070,8 @@ void vtkOpenGLHAVSVolumeMapper::DrawFBOFlush(int screenWidth,
 void vtkOpenGLHAVSVolumeMapper::DrawBlend(int screenWidth, int screenHeight,
                                           float depthNear, float depthFar)
 {
+  vtkOpenGLClearErrorMacro();
+
   // Setup draw buffer
   glDrawBuffer(GL_BACK);
 
@@ -1086,7 +1124,7 @@ void vtkOpenGLHAVSVolumeMapper::DrawBlend(int screenWidth, int screenHeight,
   glBindTexture(GL_TEXTURE_2D, 0);
   glDisable(GL_TEXTURE_2D);
 
-  CheckOpenGLError("Final Blend");
+  vtkOpenGLCheckErrorMacro("failed after DrawBlend");
 }
 
 //----------------------------------------------------------------------------
@@ -1147,6 +1185,17 @@ bool vtkOpenGLHAVSVolumeMapper::SupportedByHardware(vtkRenderer *r)
     static_cast<vtkOpenGLRenderWindow *>(r->GetRenderWindow())
     ->GetExtensionManager();
 
+  // os mesa notes:
+  // 9.1.4 -- test fails with open gl errors (mesa bug)
+  // 9.2.0 w/llvmpipe -- test passes
+  if ( extensions->DriverIsMesa()
+    && !(extensions->DriverGLRendererIsOSMesa()
+    && extensions->DriverGLRendererHasToken("llvmpipe"))
+    && !extensions->GetIgnoreDriverBugs("Mesa FBO bugs") )
+    {
+    return false;
+    }
+
   // Temporarily filter out the Macs, as this mapper makes the ATI driver crash
   // (RogueResearch2 on VTK, ATI Radeon X1600 OpenGL Engine 2.0 ATI-1.4.56) and
   // makes the Nvidia driver render some corrupted image (kamino on ParaView3
diff --git a/Rendering/VolumeOpenGL/vtkOpenGLProjectedAAHexahedraMapper.cxx b/Rendering/VolumeOpenGL/vtkOpenGLProjectedAAHexahedraMapper.cxx
index 7e2d491..f9b79b8 100644
--- a/Rendering/VolumeOpenGL/vtkOpenGLProjectedAAHexahedraMapper.cxx
+++ b/Rendering/VolumeOpenGL/vtkOpenGLProjectedAAHexahedraMapper.cxx
@@ -29,19 +29,22 @@
 #include "vtkCellArray.h"
 #include "vtkCellCenterDepthSort.h"
 #include "vtkCellData.h"
+#include "vtkCellIterator.h"
 #include "vtkColorTransferFunction.h"
 #include "vtkDoubleArray.h"
 #include "vtkFloatArray.h"
+#include "vtkIdList.h"
 #include "vtkIdTypeArray.h"
 #include "vtkGarbageCollector.h"
 #include "vtkMath.h"
+#include "vtkNew.h"
 #include "vtkObjectFactory.h"
 #include "vtkOpenGLExtensionManager.h"
+#include "vtkOpenGLRenderWindow.h"
 #include "vtkPiecewiseFunction.h"
 #include "vtkPointData.h"
 #include "vtkRenderer.h"
 #include "vtkRenderWindow.h"
-#include "vtkOpenGLRenderWindow.h"
 #include "vtkTimerLog.h"
 #include "vtkUnsignedCharArray.h"
 #include "vtkUnstructuredGrid.h"
@@ -52,8 +55,10 @@
 #include "vtkgl.h"
 #include "vtkShaderProgram2.h"
 #include "vtkShader2.h"
+#include "vtkSmartPointer.h"
 #include "vtkUniformVariables.h"
 #include "vtkShader2Collection.h"
+#include "vtkOpenGLError.h"
 
 #include <math.h>
 #include <algorithm>
@@ -191,11 +196,13 @@ void vtkOpenGLProjectedAAHexahedraMapper::Initialize(
 void vtkOpenGLProjectedAAHexahedraMapper::Render(vtkRenderer *renderer,
                                                  vtkVolume *volume)
 {
+  vtkOpenGLClearErrorMacro();
+
   if ( !this->Initialized )
     {
     this->Initialize(renderer, volume);
     }
-  vtkUnstructuredGrid *input = this->GetInput();
+  vtkUnstructuredGridBase *input = this->GetInput();
   vtkVolumeProperty *property = volume->GetProperty();
 
   float last_max_cell_size = this->MaxCellSize;
@@ -206,17 +213,20 @@ void vtkOpenGLProjectedAAHexahedraMapper::Render(vtkRenderer *renderer,
     {
     this->GaveError = 0;
 
-    vtkCellArray *cells = input->GetCells();
-    if (!cells)
+    if (input->GetNumberOfCells() == 0)
       {
       // Apparently, the input has no cells.  Just do nothing.
       return;
       }
 
-    vtkIdType npts, *pts, i;
-    cells->InitTraversal();
-    for (i = 0; cells->GetNextCell(npts, pts); i++)
+    vtkIdType npts, *pts;
+    vtkSmartPointer<vtkCellIterator> cellIter =
+        vtkSmartPointer<vtkCellIterator>::Take(input->NewCellIterator());
+    for (cellIter->InitTraversal(); !cellIter->IsDoneWithTraversal();
+         cellIter->GoToNextCell())
       {
+      npts = cellIter->GetNumberOfPoints();
+      pts = cellIter->GetPointIds()->GetPointer(0);
       int j;
       if (npts != 8)
         {
@@ -319,6 +329,8 @@ void vtkOpenGLProjectedAAHexahedraMapper::Render(vtkRenderer *renderer,
 
   this->Timer->StopTimer();
   this->TimeToDraw = this->Timer->GetElapsedTime();
+
+  vtkOpenGLCheckErrorMacro("failed after Render");
 }
 
 // ----------------------------------------------------------------------------
@@ -326,6 +338,8 @@ void vtkOpenGLProjectedAAHexahedraMapper::UpdatePreintegrationTexture(
   vtkVolume *volume,
   vtkDataArray *scalars)
 {
+  vtkOpenGLClearErrorMacro();
+
   // rebuild the preintegration texture
   vtkUnstructuredGridPreIntegration *pi=
     vtkUnstructuredGridPreIntegration::New();
@@ -357,6 +371,7 @@ void vtkOpenGLProjectedAAHexahedraMapper::UpdatePreintegrationTexture(
 
   pi->Delete();
 
+  vtkOpenGLCheckErrorMacro("failed after UpdatePreintegrationTexture");
 }
 
 // ----------------------------------------------------------------------------
@@ -399,6 +414,8 @@ void vtkOpenGLProjectedAAHexahedraMapper::CreateProgram(vtkRenderWindow *w)
 // ----------------------------------------------------------------------------
 void vtkOpenGLProjectedAAHexahedraMapper::SetState(double *observer)
 {
+  vtkOpenGLClearErrorMacro();
+
   glDepthMask(GL_FALSE);
 
   // save the default blend function.
@@ -452,6 +469,8 @@ void vtkOpenGLProjectedAAHexahedraMapper::SetState(double *observer)
   glTexCoordPointer( 4, GL_FLOAT, 0, node_data2);
 
   this->num_points = 0;
+
+  vtkOpenGLCheckErrorMacro("failed after SetState");
 }
 
 // ----------------------------------------------------------------------------
@@ -490,6 +509,8 @@ void vtkOpenGLProjectedAAHexahedraMapper::RenderHexahedron(float vmin[3],
 // ----------------------------------------------------------------------------
 void vtkOpenGLProjectedAAHexahedraMapper::UnsetState()
 {
+  vtkOpenGLClearErrorMacro();
+
   // flush what remains of our points
   if (this->num_points>0)
     {
@@ -509,6 +530,8 @@ void vtkOpenGLProjectedAAHexahedraMapper::UnsetState()
   glBindTexture(vtkgl::TEXTURE_3D, 0);
 
   glDepthMask(GL_TRUE);
+
+  vtkOpenGLCheckErrorMacro("failed fater UnsetState");
 }
 
 // ----------------------------------------------------------------------------
@@ -577,7 +600,7 @@ void vtkOpenGLProjectedAAHexahedraMapper::ProjectHexahedra(
   vtkRenderer *renderer,
   vtkVolume *volume)
 {
-  vtkUnstructuredGrid *input = this->GetInput();
+  vtkUnstructuredGridBase *input = this->GetInput();
 
   this->VisibilitySort->SetInput(input);
   this->VisibilitySort->SetDirectionToBackToFront();
@@ -604,11 +627,11 @@ void vtkOpenGLProjectedAAHexahedraMapper::ProjectHexahedra(
 
   this->SetState(observer);
 
-  vtkIdType *cells = input->GetCells()->GetPointer();
   vtkIdType totalnumcells = input->GetNumberOfCells();
   vtkIdType numcellsrendered = 0;
 
   // Let's do it!
+  vtkNew<vtkIdList> cellPtIds;
   for (vtkIdTypeArray *sorted_cell_ids = this->VisibilitySort->GetNextCells();
        sorted_cell_ids != NULL;
        sorted_cell_ids = this->VisibilitySort->GetNextCells())
@@ -624,11 +647,12 @@ void vtkOpenGLProjectedAAHexahedraMapper::ProjectHexahedra(
     for (vtkIdType i = 0; i < num_cell_ids; i++)
       {
       vtkIdType cell = cell_ids[i];
+      input->GetCellPoints(cell, cellPtIds.GetPointer());
 
       float corner_scalars[8];
 
       // get the data for the current hexahedron
-      vtkIdType index = cells [ 9 * cell + 1 ];
+      vtkIdType index = cellPtIds->GetId(0);
       float* p = points + 3 * index;
 
       float vmin[3] = {p[0],p[1],p[2]},
@@ -637,7 +661,7 @@ void vtkOpenGLProjectedAAHexahedraMapper::ProjectHexahedra(
         int j;
         for(j = 1; j < 8; j++)
           {
-          index = cells [ 9 * cell + 1 + j ];
+          index = cellPtIds->GetId(j);
 
           p = points + 3 * index;
           if (p[0]<vmin[0])
@@ -677,7 +701,7 @@ void vtkOpenGLProjectedAAHexahedraMapper::ProjectHexahedra(
 
         for(j = 0; j < 8; j++)
           {
-          index = cells [ 9 * cell + 1 + j ];
+          index = cellPtIds->GetId(j);
 
           p = points + 3 * index;
           int corner = 0;
@@ -731,6 +755,7 @@ void vtkOpenGLProjectedAAHexahedraMapper::ReleaseGraphicsResources(
     {
     GLuint texid = this->PreintTexture;
     glDeleteTextures(1, &texid);
+    vtkOpenGLCheckErrorMacro("failed at glDeleteTextures");
     this->PreintTexture = 0;
     }
   this->Superclass::ReleaseGraphicsResources(win);
diff --git a/Rendering/VolumeOpenGL/vtkOpenGLProjectedTetrahedraMapper.cxx b/Rendering/VolumeOpenGL/vtkOpenGLProjectedTetrahedraMapper.cxx
index e1c0eaa..47477b8 100644
--- a/Rendering/VolumeOpenGL/vtkOpenGLProjectedTetrahedraMapper.cxx
+++ b/Rendering/VolumeOpenGL/vtkOpenGLProjectedTetrahedraMapper.cxx
@@ -27,15 +27,19 @@
 #include "vtkCamera.h"
 #include "vtkCellArray.h"
 #include "vtkCellData.h"
+#include "vtkCellIterator.h"
 #include "vtkFloatArray.h"
+#include "vtkIdList.h"
 #include "vtkIdTypeArray.h"
 #include "vtkMath.h"
 #include "vtkMatrix4x4.h"
+#include "vtkNew.h"
 #include "vtkObjectFactory.h"
 #include "vtkPointData.h"
 #include "vtkRenderer.h"
 #include "vtkOpenGLRenderWindow.h"
 #include "vtkOpenGLExtensionManager.h"
+#include "vtkSmartPointer.h"
 #include "vtkTimerLog.h"
 #include "vtkUnsignedCharArray.h"
 #include "vtkUnstructuredGrid.h"
@@ -44,16 +48,12 @@
 #include "vtkVolumeProperty.h"
 
 #include "vtkOpenGL.h"
+#include "vtkOpenGLError.h"
 
 #include "vtkgl.h"
 
-#include <math.h>
+#include <cmath>
 #include <algorithm>
-#include <vtksys/SystemTools.hxx>
-
-
-static GLint  db;
-//-----------------------------------------------------------------------------
 
 // static int tet_faces[4][3] = { {1,2,3}, {2,0,3}, {0,1,3}, {0,2,1} };
 static int tet_edges[6][2] = { {0,1}, {1,2}, {2,0},
@@ -65,57 +65,51 @@ const int SqrtTableSize = 2048;
 class vtkOpenGLProjectedTetrahedraMapper::vtkInternals
 {
 public:
+  vtkInternals()
+    {
+    this->FrameBufferObjectId = 0;
+    this->RenderBufferObjectIds[0] = 0;
+    this->RenderBufferObjectIds[1] = 0;
+    this->OpacityTexture = 0;
+    }
   GLuint FrameBufferObjectId;
   GLuint RenderBufferObjectIds[2];
-
   GLuint OpacityTexture;
 };
 
 //-----------------------------------------------------------------------------
-
 vtkStandardNewMacro(vtkOpenGLProjectedTetrahedraMapper);
 
+//-----------------------------------------------------------------------------
 vtkOpenGLProjectedTetrahedraMapper::vtkOpenGLProjectedTetrahedraMapper()
 {
   this->TransformedPoints = vtkFloatArray::New();
   this->Colors = vtkUnsignedCharArray::New();
-
   this->LastProperty = NULL;
-
   this->MaxCellSize = 0;
-
   this->GaveError = 0;
-
   this->SqrtTable = new float[SqrtTableSize];
   this->SqrtTableBias = 0.0;
-
   this->Initialized = false;
   this->CurrentFBOWidth = -1;
   this->CurrentFBOHeight = -1;
   this->FloatingPointFrameBufferResourcesAllocated = false;
-
   this->Internals = new vtkOpenGLProjectedTetrahedraMapper::vtkInternals;
-  this->Internals->FrameBufferObjectId = 0;
-  this->Internals->RenderBufferObjectIds[0]
-      = this->Internals->RenderBufferObjectIds[1] = 0;
-  this->Internals->OpacityTexture = 0;
-
   this->UseFloatingPointFrameBuffer = true;
+  this->CanDoFloatingPointFrameBuffer = false;
 }
 
+//-----------------------------------------------------------------------------
 vtkOpenGLProjectedTetrahedraMapper::~vtkOpenGLProjectedTetrahedraMapper()
 {
   this->ReleaseGraphicsResources(NULL);
   this->TransformedPoints->Delete();
   this->Colors->Delete();
-
-  if (this->Internals)
-    {
-    delete this->Internals;
-    }
+  delete this->Internals;
   delete[] this->SqrtTable;
 }
 
+//-----------------------------------------------------------------------------
 void vtkOpenGLProjectedTetrahedraMapper::PrintSelf(ostream &os, vtkIndent indent)
 {
   this->Superclass::PrintSelf(os, indent);
@@ -124,107 +118,215 @@ void vtkOpenGLProjectedTetrahedraMapper::PrintSelf(ostream &os, vtkIndent indent
      << (this->UseFloatingPointFrameBuffer ? "True" : "False") << endl;
 }
 
-void vtkOpenGLProjectedTetrahedraMapper::Initialize(vtkRenderer *renderer)
+//-----------------------------------------------------------------------------
+bool vtkOpenGLProjectedTetrahedraMapper::IsSupported(vtkRenderWindow *rwin)
 {
-  this->Initialized = true;
+  vtkOpenGLRenderWindow *context = vtkOpenGLRenderWindow::SafeDownCast(rwin);
+  if (!context)
+    {
+    vtkErrorMacro(
+      << "Support for " << rwin->GetClassName() << " not implemented");
+    return false;
+    }
 
-  vtkOpenGLRenderWindow *renwin
-      = vtkOpenGLRenderWindow::SafeDownCast(renderer->GetRenderWindow());
-  vtkOpenGLExtensionManager *extensions = renwin->GetExtensionManager();
+  vtkOpenGLExtensionManager *extensions = context->GetExtensionManager();
+  bool texSupport
+    = (extensions->ExtensionSupported("GL_ARB_texture_float") != 0)
+    && (extensions->ExtensionSupported("GL_VERSION_1_3") != 0);
+
+  // use render to FBO when it's supported
+  this->CanDoFloatingPointFrameBuffer = false;
+  if (this->UseFloatingPointFrameBuffer)
+    {
+    this->CanDoFloatingPointFrameBuffer
+      = (extensions->ExtensionSupported("GL_ARB_framebuffer_object") != 0)
+      && (extensions->ExtensionSupported("GL_ARB_draw_buffers") != 0);
+    if (!this->CanDoFloatingPointFrameBuffer)
+      {
+      vtkWarningMacro(
+        "Missing FBO support. The algorithm may produce visual artifacts.");
+      }
+    }
 
-  this->CanDoFloatingPointFrameBuffer
-      = extensions->ExtensionSupported("GL_ARB_framebuffer_object") &&
-               extensions->ExtensionSupported("GL_ARB_texture_float");
+  // exclude ATI Radeon HD, except on Apple, because there seems to
+  // be a bug in ATI's legacy fixed function texturing support in recent
+  // drivers. The Radeon HD cards are identified here by the OpenGL version
+  // because the renderer string is inconsistent across platforms.
+  // win7 RHD5800, win7 RHD6750M, Linux RHD7870 all exhibit the bug.
+  bool driverSupport
+  #if defined(__APPLE__)
+    = true;
+  #else
+    = !(extensions->DriverIsATI()
+    && (extensions->GetDriverGLVersionMajor() >= 3))
+    || extensions->GetIgnoreDriverBugs("ATI texturing bug");
+  #endif
+
+  return texSupport && driverSupport;
+}
 
-  if (this->CanDoFloatingPointFrameBuffer)
+//-----------------------------------------------------------------------------
+void vtkOpenGLProjectedTetrahedraMapper::Initialize(vtkRenderer *renderer)
+{
+  if (this->Initialized)
     {
-    extensions->LoadExtension("GL_ARB_framebuffer_object");
-    extensions->LoadExtension("GL_ARB_texture_float");
+    return;
+    }
 
-    vtkgl::GenFramebuffers(1, &this->Internals->FrameBufferObjectId);
-    vtkgl::GenRenderbuffers(2, this->Internals->RenderBufferObjectIds);
+  this->Initialized = true;
 
-    vtkgl::BindFramebuffer(vtkgl::FRAMEBUFFER_EXT, 0);
-    vtkgl::BindFramebuffer(vtkgl::DRAW_FRAMEBUFFER_EXT,
-                           this->Internals->FrameBufferObjectId);
+  vtkOpenGLRenderWindow *renwin
+    = vtkOpenGLRenderWindow::SafeDownCast(renderer->GetRenderWindow());
+  if ( !renwin || !this->IsSupported(renwin) )
+    {
+    // this is an error since there's no fallback.
+    vtkErrorMacro("The required extensions are not supported.");
+    }
 
-    this->CanDoFloatingPointFrameBuffer = this->CheckFBOResources(renderer);
+  // load required extensions
+  vtkOpenGLExtensionManager *extensions = renwin->GetExtensionManager();
+  extensions->LoadExtension("GL_VERSION_1_3"); // for multitexture
+  // used GL_ARB_texture_float but nothing to load for it
+  if ( this->UseFloatingPointFrameBuffer
+     && this->CanDoFloatingPointFrameBuffer)
+    {
+    extensions->LoadExtension("GL_ARB_framebuffer_object");
+    extensions->LoadExtension("GL_ARB_draw_buffers");
     }
 }
 
-bool vtkOpenGLProjectedTetrahedraMapper::CheckFBOResources(vtkRenderer *r)
+//-----------------------------------------------------------------------------
+bool vtkOpenGLProjectedTetrahedraMapper::AllocateFBOResources(vtkRenderer *r)
 {
+  vtkOpenGLClearErrorMacro();
+
   int *size = r->GetSize();
-  if (!this->FloatingPointFrameBufferResourcesAllocated ||
-      (size[0] != this->CurrentFBOWidth) ||
-      (size[0] != this->CurrentFBOHeight))
-  {
+  if ( this->UseFloatingPointFrameBuffer
+    && this->CanDoFloatingPointFrameBuffer
+    && (!this->FloatingPointFrameBufferResourcesAllocated
+    || (size[0] != this->CurrentFBOWidth)
+    || (size[0] != this->CurrentFBOHeight)) )
+    {
     this->CurrentFBOWidth = size[0];
     this->CurrentFBOHeight = size[1];
 
+    // reserver handles fbo and renderbuffers
     if (!this->FloatingPointFrameBufferResourcesAllocated)
-    {
+      {
       vtkgl::GenFramebuffers(1, &this->Internals->FrameBufferObjectId);
-      vtkgl::GenRenderbuffers(2, this->Internals->RenderBufferObjectIds);
+      vtkOpenGLCheckErrorMacro("failed at glGenFramebuffers");
 
-      vtkgl::BindFramebuffer(vtkgl::FRAMEBUFFER_EXT, 0);
-      vtkgl::BindFramebuffer(vtkgl::DRAW_FRAMEBUFFER_EXT,
-                             this->Internals->FrameBufferObjectId);
+      vtkgl::GenRenderbuffers(2, this->Internals->RenderBufferObjectIds);
+      vtkOpenGLCheckErrorMacro("failed at glGenRenderBuffers");
 
       this->FloatingPointFrameBufferResourcesAllocated = true;
-    }
-    else
-    {
-      vtkgl::FramebufferRenderbuffer(vtkgl::FRAMEBUFFER_EXT,
-                                     vtkgl::COLOR_ATTACHMENT0_EXT,
-                                     vtkgl::RENDERBUFFER_EXT, 0);
-      vtkgl::FramebufferRenderbuffer(vtkgl::FRAMEBUFFER_EXT,
-                                     vtkgl::DEPTH_ATTACHMENT_EXT,
-                                     vtkgl::RENDERBUFFER_EXT, 0);
-    }
-
-    vtkgl::BindRenderbuffer(vtkgl::RENDERBUFFER_EXT,
-                            this->Internals->RenderBufferObjectIds[0]);
-    vtkgl::RenderbufferStorage(vtkgl::RENDERBUFFER_EXT,
-                               vtkgl::RGBA32F_ARB,
-                               this->CurrentFBOWidth,
-                               this->CurrentFBOHeight);
-
-    vtkgl::BindRenderbuffer(vtkgl::RENDERBUFFER_EXT,
-                            this->Internals->RenderBufferObjectIds[1]);
-    vtkgl::RenderbufferStorage(vtkgl::RENDERBUFFER_EXT,
-                               vtkgl::DEPTH_COMPONENT24,
-                               this->CurrentFBOWidth,
-                               this->CurrentFBOHeight);
-
-    vtkgl::FramebufferRenderbuffer(vtkgl::FRAMEBUFFER_EXT,
-                                   vtkgl::COLOR_ATTACHMENT0_EXT,
-                                   vtkgl::RENDERBUFFER_EXT,
-                                   this->Internals->RenderBufferObjectIds[0]);
-
-    vtkgl::FramebufferRenderbuffer(vtkgl::FRAMEBUFFER_EXT,
-                                   vtkgl::DEPTH_ATTACHMENT_EXT,
-                                   vtkgl::RENDERBUFFER_EXT,
-                                   this->Internals->RenderBufferObjectIds[1]);
-
-    GLenum status = vtkgl::CheckFramebufferStatus(vtkgl::FRAMEBUFFER_EXT);
-    if(status != vtkgl::FRAMEBUFFER_COMPLETE_EXT)
+      }
+    // handle multisampling
+    // the ARB says if SAMPLE_BUFFERS is greater than 1
+    // on both READ and DRAW FBO then SAMPLES has to match.
+    // but if either have SAMPLE_BUFFERS zero then conversions
+    // are made.
+    vtkgl::BindFramebuffer(vtkgl::FRAMEBUFFER, 0);
+
+    GLint winSampleBuffers = 0;
+    glGetIntegerv(vtkgl::SAMPLE_BUFFERS, &winSampleBuffers);
+
+    GLint winSamples = 0;
+    glGetIntegerv(vtkgl::SAMPLES, &winSamples);
+
+    vtkgl::BindFramebuffer(
+          vtkgl::FRAMEBUFFER,
+          this->Internals->FrameBufferObjectId);
+
+    GLint fboSampleBuffers = 0;
+    glGetIntegerv(vtkgl::SAMPLE_BUFFERS, &fboSampleBuffers);
+
+    vtkDebugMacro(
+      << "mutisample enabled "
+      << (glIsEnabled(vtkgl::MULTISAMPLE)?"yes":"no")
+      << " winSampleBuffers=" << winSampleBuffers
+      << " winSamples=" << winSamples
+      << " fboSampleBuffers=" << fboSampleBuffers);
+
+    int fboSamples
+      = ((fboSampleBuffers >= 1)
+      && (winSampleBuffers >= 1)
+      && (winSamples >= 1))?winSamples:0;
+
+    // allocate storage for renderbuffers
+    vtkgl::BindRenderbuffer(
+          vtkgl::RENDERBUFFER,
+          this->Internals->RenderBufferObjectIds[0]);
+    vtkOpenGLCheckErrorMacro("failed at glBindRenderBuffer color");
+
+    vtkgl::RenderbufferStorageMultisample(
+          vtkgl::RENDERBUFFER,
+          fboSamples,
+          vtkgl::RGBA32F_ARB,
+          this->CurrentFBOWidth,
+          this->CurrentFBOHeight);
+    vtkOpenGLCheckErrorMacro("failed at glRenderBufferStorage color");
+
+    vtkgl::BindRenderbuffer(
+          vtkgl::RENDERBUFFER,
+          this->Internals->RenderBufferObjectIds[1]);
+    vtkOpenGLCheckErrorMacro("failed at glBindRenderBuffer depth");
+
+    vtkgl::RenderbufferStorageMultisample(
+          vtkgl::RENDERBUFFER,
+          fboSamples,
+          GL_DEPTH_COMPONENT,
+          this->CurrentFBOWidth,
+          this->CurrentFBOHeight);
+    vtkOpenGLCheckErrorMacro("failed at glRenderBufferStorage depth");
+
+   // best way to make it complete: bind the fbo for both draw+read
+   // durring setup
+   vtkgl::BindFramebuffer(
+         vtkgl::FRAMEBUFFER,
+         this->Internals->FrameBufferObjectId);
+   vtkOpenGLCheckErrorMacro("failed at glBindFramebuffer");
+
+   vtkgl::FramebufferRenderbuffer(
+         vtkgl::FRAMEBUFFER,
+         vtkgl::COLOR_ATTACHMENT0,
+         vtkgl::RENDERBUFFER,
+         this->Internals->RenderBufferObjectIds[0]);
+    vtkOpenGLCheckErrorMacro("failed at glFramebufferRenderBuffer for color");
+
+    vtkgl::FramebufferRenderbuffer(
+          vtkgl::FRAMEBUFFER,
+          vtkgl::DEPTH_ATTACHMENT,
+          vtkgl::RENDERBUFFER,
+          this->Internals->RenderBufferObjectIds[1]);
+    vtkOpenGLCheckErrorMacro("failed at glFramebufferRenderBuffer for depth");
+
+    // verify that it is usable
+    GLenum status = vtkgl::CheckFramebufferStatus(vtkgl::FRAMEBUFFER);
+    if(status != vtkgl::FRAMEBUFFER_COMPLETE)
       {
-      cerr << "Can't initialize FBO\n";
+      vtkgl::BindFramebuffer(vtkgl::FRAMEBUFFER, 0);
+      vtkWarningMacro(
+        "Missing FBO support. The algorithm may produce visual artifacts.");
+      this->CanDoFloatingPointFrameBuffer = false;
       return false;
       }
-  }
+    vtkgl::BindFramebuffer(vtkgl::FRAMEBUFFER, 0);
+    this->CanDoFloatingPointFrameBuffer = true;
+    }
 
   return true;
 }
 
 //-----------------------------------------------------------------------------
-
 void vtkOpenGLProjectedTetrahedraMapper::ReleaseGraphicsResources(vtkWindow *win)
 {
+  this->Initialized = false;
+
   if (this->Internals->OpacityTexture)
     {
     glDeleteTextures(1, &this->Internals->OpacityTexture);
+    vtkOpenGLCheckErrorMacro("failed at glDeleteTextures");
     this->Internals->OpacityTexture = 0;
     }
 
@@ -233,9 +335,11 @@ void vtkOpenGLProjectedTetrahedraMapper::ReleaseGraphicsResources(vtkWindow *win
       this->FloatingPointFrameBufferResourcesAllocated = false;
 
       vtkgl::DeleteFramebuffers(1, &this->Internals->FrameBufferObjectId);
+      vtkOpenGLCheckErrorMacro("failed at glDeleteFramebuffers");
       this->Internals->FrameBufferObjectId = 0;
 
-      vtkgl::DeleteFramebuffers(2, this->Internals->RenderBufferObjectIds);
+      vtkgl::DeleteRenderbuffers(2, this->Internals->RenderBufferObjectIds);
+      vtkOpenGLCheckErrorMacro("failed at glDeleteRenderbuffers");
       this->Internals->RenderBufferObjectIds[0] = 0;
       this->Internals->RenderBufferObjectIds[1] = 0;
     }
@@ -244,11 +348,15 @@ void vtkOpenGLProjectedTetrahedraMapper::ReleaseGraphicsResources(vtkWindow *win
 }
 
 //-----------------------------------------------------------------------------
-
 void vtkOpenGLProjectedTetrahedraMapper::Render(vtkRenderer *renderer,
                                                 vtkVolume *volume)
 {
-  vtkUnstructuredGrid *input = this->GetInput();
+  vtkOpenGLClearErrorMacro();
+
+  // load required extensions
+  this->Initialize(renderer);
+
+  vtkUnstructuredGridBase *input = this->GetInput();
   vtkVolumeProperty *property = volume->GetProperty();
 
   float last_max_cell_size = this->MaxCellSize;
@@ -260,18 +368,18 @@ void vtkOpenGLProjectedTetrahedraMapper::Render(vtkRenderer *renderer,
     this->GaveError = 0;
     float max_cell_size2 = 0;
 
-    vtkCellArray *cells = input->GetCells();
-    if (!cells)
+    if (input->GetNumberOfCells() == 0)
       {
       // Apparently, the input has no cells.  Just do nothing.
       return;
       }
 
-    vtkIdType npts, *pts, i;
-    cells->InitTraversal();
-    for (i = 0; cells->GetNextCell(npts, pts); i++)
+    vtkSmartPointer<vtkCellIterator> cellIter =
+        vtkSmartPointer<vtkCellIterator>::Take(input->NewCellIterator());
+    for (cellIter->InitTraversal(); !cellIter->IsDoneWithTraversal();
+         cellIter->GoToNextCell())
       {
-      int j;
+      vtkIdType npts = cellIter->GetNumberOfPoints();
       if (npts != 4)
         {
         if (!this->GaveError)
@@ -281,13 +389,17 @@ void vtkOpenGLProjectedTetrahedraMapper::Render(vtkRenderer *renderer,
           }
         continue;
         }
-      for (j = 0; j < 6; j++)
+      vtkIdType *pts = cellIter->GetPointIds()->GetPointer(0);
+      for (int j = 0; j < 6; j++)
         {
         double p1[3], p2[3];
         input->GetPoint(pts[tet_edges[j][0]], p1);
         input->GetPoint(pts[tet_edges[j][1]], p2);
         float size2 = (float)vtkMath::Distance2BetweenPoints(p1, p2);
-        if (size2 > max_cell_size2) max_cell_size2 = size2;
+        if (size2 > max_cell_size2)
+          {
+          max_cell_size2 = size2;
+          }
         }
       }
 
@@ -297,7 +409,7 @@ void vtkOpenGLProjectedTetrahedraMapper::Render(vtkRenderer *renderer,
     // modes we have to take a lot of square roots, and a table is much faster
     // than calling the sqrt function.
     this->SqrtTableBias = (SqrtTableSize-1)/max_cell_size2;
-    for (i = 0; i < SqrtTableSize; i++)
+    for (int i = 0; i < SqrtTableSize; i++)
       {
       this->SqrtTable[i] = (float)sqrt(i/this->SqrtTableBias);
       }
@@ -307,6 +419,7 @@ void vtkOpenGLProjectedTetrahedraMapper::Render(vtkRenderer *renderer,
 
   if (renderer->GetRenderWindow()->CheckAbortStatus() || this->GaveError)
     {
+    vtkOpenGLCheckErrorMacro("failed during Render");
     return;
     }
 
@@ -320,7 +433,12 @@ void vtkOpenGLProjectedTetrahedraMapper::Render(vtkRenderer *renderer,
       {
       glGenTextures(1, &this->Internals->OpacityTexture);
       }
+    vtkgl::ActiveTexture(vtkgl::TEXTURE0);
     glBindTexture(GL_TEXTURE_2D, this->Internals->OpacityTexture);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
+    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
 
     float unit_distance = property->GetScalarOpacityUnitDistance();
 
@@ -328,11 +446,6 @@ void vtkOpenGLProjectedTetrahedraMapper::Render(vtkRenderer *renderer,
     float *texture = new float[TEXRES*TEXRES];
     for (int depthi = 0; depthi < TEXRES; depthi++)
       {
-      if (renderer->GetRenderWindow()->CheckAbortStatus())
-        {
-        delete[] texture;
-        return;
-        }
       float depth = depthi*this->MaxCellSize/(TEXRES);
       for (int attenuationi = 0; attenuationi < TEXRES; attenuationi++)
         {
@@ -345,17 +458,13 @@ void vtkOpenGLProjectedTetrahedraMapper::Render(vtkRenderer *renderer,
                  GL_FLOAT, texture);
     delete[] texture;
 
-    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER, GL_LINEAR);
-    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
-    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, GL_CLAMP);
-    glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, GL_CLAMP);
-
     glBindTexture(GL_TEXTURE_2D, 0);
 
     this->OpacityTextureTime.Modified();
     }
   if (renderer->GetRenderWindow()->CheckAbortStatus())
     {
+    vtkOpenGLCheckErrorMacro("failed during Render");
     return;
     }
 
@@ -372,6 +481,7 @@ void vtkOpenGLProjectedTetrahedraMapper::Render(vtkRenderer *renderer,
     if (!scalars)
       {
       vtkErrorMacro(<< "Can't use projected tetrahedra without scalars!");
+      vtkOpenGLCheckErrorMacro("failed during Render");
       return;
       }
 
@@ -383,6 +493,7 @@ void vtkOpenGLProjectedTetrahedraMapper::Render(vtkRenderer *renderer,
     }
   if (renderer->GetRenderWindow()->CheckAbortStatus())
     {
+    vtkOpenGLCheckErrorMacro("failed during Render");
     return;
     }
 
@@ -392,6 +503,7 @@ void vtkOpenGLProjectedTetrahedraMapper::Render(vtkRenderer *renderer,
 
   this->Timer->StopTimer();
   this->TimeToDraw = this->Timer->GetElapsedTime();
+  vtkOpenGLCheckErrorMacro("failed after Render");
 }
 
 //-----------------------------------------------------------------------------
@@ -486,34 +598,56 @@ inline float vtkOpenGLProjectedTetrahedraMapper::GetCorrectedDepth(
 void vtkOpenGLProjectedTetrahedraMapper::ProjectTetrahedra(vtkRenderer *renderer,
                                                      vtkVolume *volume)
 {
-  if (! Initialized)
-    {
-    Initialize(renderer);
-    }
+  vtkOpenGLClearErrorMacro();
+
+  // after mucking about with FBO bindings be sure
+  // we're saving the default fbo attributes/blend function
+  glPushAttrib(GL_COLOR_BUFFER_BIT);
+  vtkOpenGLCheckErrorMacro("failed at glPushAttrib");
 
-  if (this->CanDoFloatingPointFrameBuffer && this->UseFloatingPointFrameBuffer)
+  this->AllocateFBOResources(renderer);
+
+  if (this->UseFloatingPointFrameBuffer
+    && this->CanDoFloatingPointFrameBuffer)
     {
-    this->CanDoFloatingPointFrameBuffer = CheckFBOResources(renderer);
-    if (this->CanDoFloatingPointFrameBuffer)
+    // bind draw+read to set it up
+    vtkgl::BindFramebuffer(
+          vtkgl::FRAMEBUFFER,
+          this->Internals->FrameBufferObjectId);
+
+    glReadBuffer(GL_NONE);
+    GLenum dbuf = vtkgl::COLOR_ATTACHMENT0;
+    vtkgl::DrawBuffersARB(1, &dbuf);
+
+    GLenum status = vtkgl::CheckFramebufferStatus(vtkgl::DRAW_FRAMEBUFFER);
+    if (status!=vtkgl::FRAMEBUFFER_COMPLETE)
       {
-      vtkgl::BindFramebuffer(vtkgl::FRAMEBUFFER_EXT, 0);
-      vtkgl::BindFramebuffer(vtkgl::DRAW_FRAMEBUFFER_EXT,
-                             this->Internals->FrameBufferObjectId);
-
-      glGetIntegerv(GL_DRAW_BUFFER, &db);
-      glReadBuffer(db);
-
-      vtkgl::BlitFramebuffer(0, 0,
-                             this->CurrentFBOWidth, this->CurrentFBOHeight,
-                             0, 0,
-                             this->CurrentFBOWidth, this->CurrentFBOHeight,
-                             GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT,
-                             GL_NEAREST);
+      vtkErrorMacro("FBO is incomplete " << status);
       }
+
+    // read from default
+    vtkgl::BindFramebuffer(vtkgl::READ_FRAMEBUFFER, 0);
+    // draw to fbo
+    vtkgl::BindFramebuffer(vtkgl::DRAW_FRAMEBUFFER,
+                           this->Internals->FrameBufferObjectId);
+
+    vtkgl::BlitFramebuffer(0, 0,
+                           this->CurrentFBOWidth, this->CurrentFBOHeight,
+                           0, 0,
+                           this->CurrentFBOWidth, this->CurrentFBOHeight,
+                           GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT,
+                           GL_NEAREST);
+
+    vtkOpenGLCheckErrorMacro("failed at glBlitFramebuffer");
     }
 
-  vtkUnstructuredGrid *input = this->GetInput();
+  // TODO:
+  // There are some caching optimizations that could be used
+  // here to skip various expensive operations (eg sorting
+  // cells could be skipped if input data and MVP matrices
+  // haven't changed).
 
+  vtkUnstructuredGridBase *input = this->GetInput();
   this->VisibilitySort->SetInput(input);
   this->VisibilitySort->SetDirectionToBackToFront();
   this->VisibilitySort->SetModelTransform(volume->GetMatrix());
@@ -568,7 +702,6 @@ void vtkOpenGLProjectedTetrahedraMapper::ProjectTetrahedra(vtkRenderer *renderer
 
     linear_depth_correction = sqrt(vtkMath::Distance2BetweenPoints(pos1, pos2));
     }
-
   // Transform all the points.
   vtkProjectedTetrahedraMapper::TransformPoints(input->GetPoints(),
                                                 projection_mat, modelview_mat,
@@ -591,21 +724,20 @@ void vtkOpenGLProjectedTetrahedraMapper::ProjectTetrahedra(vtkRenderer *renderer
   glPolygonMode(GL_FRONT_AND_BACK, GL_FILL);
   glDisable(GL_CULL_FACE);
 
-  // save the default blend function.
-  glPushAttrib(GL_COLOR_BUFFER_BIT);
-
   glBlendFunc(GL_ONE, GL_ONE_MINUS_SRC_ALPHA);
 
   // Establish vertex arrays.
-  float tet_points[5*3];
+  // tets have 4 points, 5th point here is used
+  // to insert a point in case of intersections
+  float tet_points[5*3] = {0.0f};
   glVertexPointer(3, GL_FLOAT, 0, tet_points);
   glEnableClientState(GL_VERTEX_ARRAY);
 
-  unsigned char tet_colors[5*3];
+  unsigned char tet_colors[5*3] = {'\0'};
   glColorPointer(3, GL_UNSIGNED_BYTE, 0, tet_colors);
   glEnableClientState(GL_COLOR_ARRAY);
 
-  float tet_texcoords[5*2];
+  float tet_texcoords[5*2] = {0.0f};
   glTexCoordPointer(2, GL_FLOAT, 0, tet_texcoords);
   glEnableClientState(GL_TEXTURE_COORD_ARRAY);
 
@@ -617,10 +749,9 @@ void vtkOpenGLProjectedTetrahedraMapper::ProjectTetrahedra(vtkRenderer *renderer
   glLoadIdentity();
 
   unsigned char *colors = this->Colors->GetPointer(0);
-  vtkIdType *cells = input->GetCells()->GetPointer();
   vtkIdType totalnumcells = input->GetNumberOfCells();
   vtkIdType numcellsrendered = 0;
-
+  vtkNew<vtkIdList> cellPointIds;
   // Let's do it!
   for (vtkIdTypeArray *sorted_cell_ids = this->VisibilitySort->GetNextCells();
        sorted_cell_ids != NULL;
@@ -636,6 +767,7 @@ void vtkOpenGLProjectedTetrahedraMapper::ProjectTetrahedra(vtkRenderer *renderer
     for (vtkIdType i = 0; i < num_cell_ids; i++)
       {
       vtkIdType cell = cell_ids[i];
+      input->GetCellPoints(cell, cellPointIds.GetPointer());
       int j;
 
       // Get the data for the tetrahedra.
@@ -643,7 +775,7 @@ void vtkOpenGLProjectedTetrahedraMapper::ProjectTetrahedra(vtkRenderer *renderer
         {
         // Assuming we only have tetrahedra, each entry in cells has 5
         // components.
-        const float *p = points + 3*cells[5*cell + j + 1];
+        const float *p = points + 3 * cellPointIds->GetId(j);
         tet_points[j*3 + 0] = p[0];
         tet_points[j*3 + 1] = p[1];
         tet_points[j*3 + 2] = p[2];
@@ -655,13 +787,14 @@ void vtkOpenGLProjectedTetrahedraMapper::ProjectTetrahedra(vtkRenderer *renderer
           }
         else
           {
-          c = colors + 4*cells[5*cell + j + 1];
+          c = colors + 4 * cellPointIds->GetId(j);
           }
+
         tet_colors[j*3 + 0] = c[0];
         tet_colors[j*3 + 1] = c[1];
         tet_colors[j*3 + 2] = c[2];
 
-        tet_texcoords[j*2 + 0] = (float)c[3]/255;
+        tet_texcoords[j*2 + 0] = static_cast<float>(c[3])/255.0f;
         tet_texcoords[j*2 + 1] = 0;
         }
 
@@ -778,24 +911,27 @@ void vtkOpenGLProjectedTetrahedraMapper::ProjectTetrahedra(vtkRenderer *renderer
         tet_points[3*4 + 2] = P1[2] + alpha*A[2];
 
         // Find depth at intersection.
-        float depth = GetCorrectedDepth(tet_points[3*4 + 0],
-                                        tet_points[3*4 + 1],
-                                        tet_points[3*4 + 2],
-                                        P3[2] + beta*B[2],
-                                        inverse_projection_mat,
-                                        use_linear_depth_correction,
-                                        linear_depth_correction);
+        float depth = this->GetCorrectedDepth(
+              tet_points[3*4 + 0],
+              tet_points[3*4 + 1],
+              tet_points[3*4 + 2],
+              P3[2] + beta*B[2],
+              inverse_projection_mat,
+              use_linear_depth_correction,
+              linear_depth_correction);
 
         // Find color at intersection.
-        tet_colors[3*4 + 0] =
-          (unsigned char)(0.5f*(  C1[0] + alpha*(C2[0]-C1[0])
-                                + C3[0] +  beta*(C4[0]-C3[0]) ));
-        tet_colors[3*4 + 1] =
-          (unsigned char)(0.5f*(  C1[1] + alpha*(C2[1]-C1[1])
-                                + C3[1] +  beta*(C4[1]-C3[1]) ));
-        tet_colors[3*4 + 2] =
-          (unsigned char)(0.5f*(  C1[2] + alpha*(C2[2]-C1[2])
-                                + C3[2] +  beta*(C4[2]-C3[2]) ));
+        tet_colors[3*4 + 0] = static_cast<unsigned char>
+              (0.5f*(C1[0] + alpha*(C2[0]-C1[0])
+              + C3[0] + beta*(C4[0]-C3[0])));
+
+        tet_colors[3*4 + 1] = static_cast<unsigned char>
+              (0.5f*(C1[1] + alpha*(C2[1]-C1[1])
+              + C3[1] + beta*(C4[1]-C3[1])));
+
+        tet_colors[3*4 + 2] = static_cast<unsigned char>
+              (0.5f*(C1[2] + alpha*(C2[2]-C1[2])
+              + C3[2] + beta*(C4[2]-C3[2])));
 
 //         tet_colors[3*0 + 0] = 255;
 //         tet_colors[3*0 + 1] = 0;
@@ -821,16 +957,15 @@ void vtkOpenGLProjectedTetrahedraMapper::ProjectTetrahedra(vtkRenderer *renderer
         tet_texcoords[2*4 + 1] = depth/this->MaxCellSize;
 
         // Establish the order in which the points should be rendered.
-        unsigned char gl_indices[6];
-        gl_indices[0] = 4;
-        gl_indices[1] = segment1[0];
-        gl_indices[2] = segment2[0];
-        gl_indices[3] = segment1[1];
-        gl_indices[4] = segment2[1];
-        gl_indices[5] = segment1[0];
-
+        unsigned char indices[6];
+        indices[0] = 4;
+        indices[1] = segment1[0];
+        indices[2] = segment2[0];
+        indices[3] = segment1[1];
+        indices[4] = segment2[1];
+        indices[5] = segment1[0];
         // Render
-        glDrawElements(GL_TRIANGLE_FAN, 6, GL_UNSIGNED_BYTE, gl_indices);
+        glDrawElements(GL_TRIANGLE_FAN, 6, GL_UNSIGNED_BYTE, indices);
         }
       else
         {
@@ -891,34 +1026,51 @@ void vtkOpenGLProjectedTetrahedraMapper::ProjectTetrahedra(vtkRenderer *renderer
         T2[1] = depth/this->MaxCellSize;
 
         // Establish the order in which the points should be rendered.
-        unsigned char gl_indices[5];
-        gl_indices[0] = segment1[1];
-        gl_indices[1] = segment1[0];
-        gl_indices[2] = segment2[0];
-        gl_indices[3] = segment2[1];
-        gl_indices[4] = segment1[0];
-
+        unsigned char indices[5];
+        indices[0] = segment1[1];
+        indices[1] = segment1[0];
+        indices[2] = segment2[0];
+        indices[3] = segment2[1];
+        indices[4] = segment1[0];
         // Render
-        glDrawElements(GL_TRIANGLE_FAN, 5, GL_UNSIGNED_BYTE, gl_indices);
+        glDrawElements(GL_TRIANGLE_FAN, 5, GL_UNSIGNED_BYTE, indices);
         }
       }
     numcellsrendered += num_cell_ids;
     }
 
-  if (this->CanDoFloatingPointFrameBuffer && this->UseFloatingPointFrameBuffer)
-  {
-    vtkgl::BindFramebuffer(vtkgl::FRAMEBUFFER_EXT, 0);
-    vtkgl::BindFramebuffer(vtkgl::READ_FRAMEBUFFER_EXT,
-                           this->Internals->FrameBufferObjectId);
+  if (this->UseFloatingPointFrameBuffer
+    && this->CanDoFloatingPointFrameBuffer)
+    {
+    // copy from our fbo to the default one
+    vtkgl::BindFramebuffer(
+          vtkgl::FRAMEBUFFER,
+          this->Internals->FrameBufferObjectId);
 
-    glDrawBuffer(db);
+    glReadBuffer(vtkgl::COLOR_ATTACHMENT0);
+    glDrawBuffer(GL_NONE);
+
+    GLenum status = vtkgl::CheckFramebufferStatus(vtkgl::READ_FRAMEBUFFER);
+    if (status!=vtkgl::FRAMEBUFFER_COMPLETE)
+      {
+      vtkErrorMacro("FBO is incomplete " << status);
+      }
+
+    // read from fbo
+    vtkgl::BindFramebuffer(vtkgl::READ_FRAMEBUFFER,
+                           this->Internals->FrameBufferObjectId);
+    // draw to default fbo
+    vtkgl::BindFramebuffer(vtkgl::DRAW_FRAMEBUFFER, 0);
 
     vtkgl::BlitFramebuffer(0, 0, this->CurrentFBOWidth, this->CurrentFBOHeight,
                          0, 0, this->CurrentFBOWidth, this->CurrentFBOHeight,
                          GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT, GL_NEAREST);
 
-    vtkgl::BindFramebuffer(vtkgl::FRAMEBUFFER_EXT, 0);
-  }
+    vtkOpenGLCheckErrorMacro("failed at glBlitFramebuffer");
+
+    // restore default fbo for both read+draw
+    vtkgl::BindFramebuffer(vtkgl::FRAMEBUFFER, 0);
+    }
 
   // Restore OpenGL state.
   glMatrixMode(GL_PROJECTION);
@@ -932,6 +1084,7 @@ void vtkOpenGLProjectedTetrahedraMapper::ProjectTetrahedra(vtkRenderer *renderer
 
   // Restore the blend function.
   glPopAttrib();
+  vtkOpenGLCheckErrorMacro("failed at glPopAttrib");
 
   glBindTexture(GL_TEXTURE_2D, 0);
   glDisable(GL_TEXTURE_2D);
@@ -939,5 +1092,6 @@ void vtkOpenGLProjectedTetrahedraMapper::ProjectTetrahedra(vtkRenderer *renderer
   glDepthMask(GL_TRUE);
   glEnable(GL_LIGHTING);
 
+  vtkOpenGLCheckErrorMacro("failed after ProjectTetrahedra");
   this->UpdateProgress(1.0);
 }
diff --git a/Rendering/VolumeOpenGL/vtkOpenGLProjectedTetrahedraMapper.h b/Rendering/VolumeOpenGL/vtkOpenGLProjectedTetrahedraMapper.h
index fef6d38..b43e24b 100644
--- a/Rendering/VolumeOpenGL/vtkOpenGLProjectedTetrahedraMapper.h
+++ b/Rendering/VolumeOpenGL/vtkOpenGLProjectedTetrahedraMapper.h
@@ -39,6 +39,8 @@
 class vtkVisibilitySort;
 class vtkUnsignedCharArray;
 class vtkFloatArray;
+class vtkRenderWindow;
+class vtkOpenGLRenderWindow;
 
 class VTKRENDERINGVOLUMEOPENGL_EXPORT vtkOpenGLProjectedTetrahedraMapper
   : public vtkProjectedTetrahedraMapper
@@ -60,6 +62,11 @@ public:
   vtkGetMacro(UseFloatingPointFrameBuffer,bool);
   vtkBooleanMacro(UseFloatingPointFrameBuffer,bool);
 
+  // Description:
+  // Return true if the rendering context provides
+  // the nececessary functionality to use this class.
+  virtual bool IsSupported(vtkRenderWindow *context);
+
 protected:
   vtkOpenGLProjectedTetrahedraMapper();
   ~vtkOpenGLProjectedTetrahedraMapper();
@@ -67,7 +74,7 @@ protected:
   void Initialize(vtkRenderer *ren);
   bool Initialized;
   int  CurrentFBOWidth, CurrentFBOHeight;
-  bool CheckFBOResources(vtkRenderer *ren);
+  bool AllocateFBOResources(vtkRenderer *ren);
   bool CanDoFloatingPointFrameBuffer;
   bool FloatingPointFrameBufferResourcesAllocated;
   bool UseFloatingPointFrameBuffer;
@@ -101,7 +108,6 @@ private:
   void operator=(const vtkOpenGLProjectedTetrahedraMapper &);  // Not Implemented.
 
   class vtkInternals;
-
   vtkInternals *Internals;
 };
 
diff --git a/Rendering/VolumeOpenGL/vtkOpenGLRayCastImageDisplayHelper.cxx b/Rendering/VolumeOpenGL/vtkOpenGLRayCastImageDisplayHelper.cxx
index 2ad9d9c..e94d0db 100644
--- a/Rendering/VolumeOpenGL/vtkOpenGLRayCastImageDisplayHelper.cxx
+++ b/Rendering/VolumeOpenGL/vtkOpenGLRayCastImageDisplayHelper.cxx
@@ -23,6 +23,7 @@
 #include "vtkFixedPointRayCastImage.h"
 
 #include "vtkOpenGL.h"
+#include "vtkOpenGLError.h"
 
 #include <math.h>
 
@@ -96,6 +97,8 @@ void vtkOpenGLRayCastImageDisplayHelper::RenderTextureInternal( vtkVolume *vol,
                                                                 int imageScalarType,
                                                                 void *image )
 {
+  vtkOpenGLClearErrorMacro();
+
   int i;
   float offsetX, offsetY;
   float xMinOffset, xMaxOffset, yMinOffset, yMaxOffset;
@@ -203,6 +206,8 @@ void vtkOpenGLRayCastImageDisplayHelper::RenderTextureInternal( vtkVolume *vol,
   glPixelTransferf( GL_BLUE_SCALE,   this->PixelScale );
   glPixelTransferf( GL_ALPHA_SCALE,  this->PixelScale );
 
+  glEnable( GL_BLEND );
+
   if ( this->PreMultipliedColors )
     {
     // Values in the texture map have already been pre-multiplied by alpha
@@ -489,6 +494,7 @@ void vtkOpenGLRayCastImageDisplayHelper::RenderTextureInternal( vtkVolume *vol,
     // Restore state
     glPopAttrib();
 
+    vtkOpenGLCheckErrorMacro("failed after RenderTextureInternal");
     return;
     }
 
@@ -544,6 +550,8 @@ void vtkOpenGLRayCastImageDisplayHelper::RenderTextureInternal( vtkVolume *vol,
 
   // Restore state
   glPopAttrib();
+
+  vtkOpenGLCheckErrorMacro("failed after RenderTextureInternal");
 }
 
 void vtkOpenGLRayCastImageDisplayHelper::PrintSelf(ostream& os, vtkIndent indent)
diff --git a/Rendering/VolumeOpenGL/vtkOpenGLVolumeTextureMapper2D.cxx b/Rendering/VolumeOpenGL/vtkOpenGLVolumeTextureMapper2D.cxx
index 095a030..8b4604a 100644
--- a/Rendering/VolumeOpenGL/vtkOpenGLVolumeTextureMapper2D.cxx
+++ b/Rendering/VolumeOpenGL/vtkOpenGLVolumeTextureMapper2D.cxx
@@ -20,6 +20,7 @@
 #include "vtkVolume.h"
 
 #include "vtkOpenGL.h"
+#include "vtkOpenGLError.h"
 
 vtkStandardNewMacro(vtkOpenGLVolumeTextureMapper2D);
 
@@ -33,6 +34,8 @@ vtkOpenGLVolumeTextureMapper2D::~vtkOpenGLVolumeTextureMapper2D()
 
 void vtkOpenGLVolumeTextureMapper2D::Render(vtkRenderer *ren, vtkVolume *vol)
 {
+  vtkOpenGLClearErrorMacro();
+
   vtkMatrix4x4       *matrix;
   double             matrixForGL[16];
   int                i, numClipPlanes = 0;
@@ -118,6 +121,8 @@ void vtkOpenGLVolumeTextureMapper2D::Render(vtkRenderer *ren, vtkVolume *vol)
     {
     this->TimeToDraw = 0.0001;
     }
+
+  vtkOpenGLCheckErrorMacro("failed after Render");
 }
 
 void vtkOpenGLVolumeTextureMapper2D::RenderQuads( int numQuads,
@@ -126,6 +131,8 @@ void vtkOpenGLVolumeTextureMapper2D::RenderQuads( int numQuads,
                                                   unsigned char *texture,
                                                   int size[2], int reverseFlag )
 {
+  vtkOpenGLClearErrorMacro();
+
 #ifdef GL_VERSION_1_1
   glTexImage2D( GL_TEXTURE_2D, 0, GL_RGBA8, size[0], size[1],
                 0, GL_RGBA, GL_UNSIGNED_BYTE, texture );
@@ -168,6 +175,8 @@ void vtkOpenGLVolumeTextureMapper2D::RenderQuads( int numQuads,
     }
 
   glEnd();
+
+  vtkOpenGLCheckErrorMacro("failed after RenderQuads");
 }
 
 // Print the vtkOpenGLVolumeTextureMapper2D
diff --git a/Rendering/VolumeOpenGL/vtkOpenGLVolumeTextureMapper3D.cxx b/Rendering/VolumeOpenGL/vtkOpenGLVolumeTextureMapper3D.cxx
index 1b571c8..787a457 100644
--- a/Rendering/VolumeOpenGL/vtkOpenGLVolumeTextureMapper3D.cxx
+++ b/Rendering/VolumeOpenGL/vtkOpenGLVolumeTextureMapper3D.cxx
@@ -31,6 +31,7 @@
 #include "vtkOpenGLExtensionManager.h"
 #include "vtkgl.h"
 #include "vtkOpenGLRenderWindow.h"
+#include "vtkOpenGLError.h"
 
 #include "vtkVolumeTextureMapper3D_OneComponentShadeFP.h"
 #include "vtkVolumeTextureMapper3D_OneComponentNoShadeFP.h"
@@ -99,6 +100,7 @@ void vtkOpenGLVolumeTextureMapper3D::Render(vtkRenderer *ren, vtkVolume *vol)
     return;
     }
 
+  vtkOpenGLClearErrorMacro();
 
   vtkMatrix4x4       *matrix;
   double             matrixForGL[16];
@@ -162,7 +164,7 @@ void vtkOpenGLVolumeTextureMapper3D::Render(vtkRenderer *ren, vtkVolume *vol)
     this->InternalRGBA=GL_RGBA8;
     }
 
-  vtkGraphicErrorMacro(ren->GetRenderWindow(),"Before actual render method");
+  vtkOpenGLCheckErrorMacro("Before actual render method");
   switch ( this->RenderMethod )
     {
     case vtkVolumeTextureMapper3D::NVIDIA_METHOD:
@@ -183,6 +185,7 @@ void vtkOpenGLVolumeTextureMapper3D::Render(vtkRenderer *ren, vtkVolume *vol)
   glFlush();
   glFinish();
 
+  vtkOpenGLCheckErrorMacro("failed after Render");
 
   this->Timer->StopTimer();
 
@@ -199,6 +202,8 @@ void vtkOpenGLVolumeTextureMapper3D::Render(vtkRenderer *ren, vtkVolume *vol)
 void vtkOpenGLVolumeTextureMapper3D::RenderFP(vtkRenderer *ren,
                                               vtkVolume *vol)
 {
+  vtkOpenGLClearErrorMacro();
+
   glAlphaFunc (GL_GREATER, static_cast<GLclampf>(0));
   glEnable (GL_ALPHA_TEST);
 
@@ -253,10 +258,14 @@ void vtkOpenGLVolumeTextureMapper3D::RenderFP(vtkRenderer *ren,
   vtkgl::ActiveTexture( vtkgl::TEXTURE0);
   glDisable( GL_TEXTURE_2D );
   glDisable( vtkgl::TEXTURE_3D );
+
+  vtkOpenGLCheckErrorMacro("failed after RenderFP");
 }
 
 void vtkOpenGLVolumeTextureMapper3D::RenderNV( vtkRenderer *ren, vtkVolume *vol )
 {
+  vtkOpenGLClearErrorMacro();
+
   glAlphaFunc (GL_GREATER, static_cast<GLclampf>(0));
   glEnable (GL_ALPHA_TEST);
 
@@ -315,6 +324,8 @@ void vtkOpenGLVolumeTextureMapper3D::RenderNV( vtkRenderer *ren, vtkVolume *vol
   glDisable( vtkgl::TEXTURE_SHADER_NV );
 
   glDisable(vtkgl::REGISTER_COMBINERS_NV);
+
+  vtkOpenGLCheckErrorMacro("failed after RenderNV");
 }
 
 void vtkOpenGLVolumeTextureMapper3D::DeleteTextureIndex( GLuint *index )
@@ -324,6 +335,7 @@ void vtkOpenGLVolumeTextureMapper3D::DeleteTextureIndex( GLuint *index )
     GLuint tempIndex;
     tempIndex = *index;
     glDeleteTextures(1, &tempIndex);
+    vtkOpenGLCheckErrorMacro("failed at glDeleteTextures");
     *index = 0;
     }
 }
@@ -346,6 +358,8 @@ void vtkOpenGLVolumeTextureMapper3D::RenderPolygons( vtkRenderer *ren,
     return;
     }
 
+  vtkOpenGLClearErrorMacro();
+
   double bounds[27][6];
   float distance2[27];
 
@@ -523,10 +537,14 @@ void vtkOpenGLVolumeTextureMapper3D::RenderPolygons( vtkRenderer *ren,
       glEnd();
       }
     }
+
+  vtkOpenGLCheckErrorMacro("failed after RenderPolygons");
 }
 
 void vtkOpenGLVolumeTextureMapper3D::Setup3DTextureParameters( vtkVolumeProperty *property )
 {
+  vtkOpenGLClearErrorMacro();
+
   if ( property->GetInterpolationType() == VTK_NEAREST_INTERPOLATION )
     {
     glTexParameterf( vtkgl::TEXTURE_3D, GL_TEXTURE_MIN_FILTER, GL_NEAREST );
@@ -539,11 +557,15 @@ void vtkOpenGLVolumeTextureMapper3D::Setup3DTextureParameters( vtkVolumeProperty
     }
   glTexParameterf( vtkgl::TEXTURE_3D, GL_TEXTURE_WRAP_S, GL_CLAMP );
   glTexParameterf( vtkgl::TEXTURE_3D, GL_TEXTURE_WRAP_T, GL_CLAMP );
+
+  vtkOpenGLCheckErrorMacro("failed after Setup3DTextureParameters");
 }
 
 void vtkOpenGLVolumeTextureMapper3D::SetupOneIndependentTextures( vtkRenderer *vtkNotUsed(ren),
                     vtkVolume *vol )
 {
+  vtkOpenGLClearErrorMacro();
+
   vtkgl::ActiveTexture( vtkgl::TEXTURE0 );
   glDisable( GL_TEXTURE_2D );
   glEnable( vtkgl::TEXTURE_3D );
@@ -626,12 +648,16 @@ void vtkOpenGLVolumeTextureMapper3D::SetupOneIndependentTextures( vtkRenderer *v
     }
 
   glBindTexture(GL_TEXTURE_2D, this->ColorLookupIndex);
+
+  vtkOpenGLCheckErrorMacro("failed after SetupOneIndependentTextures");
 }
 
 void vtkOpenGLVolumeTextureMapper3D::SetupRegisterCombinersNoShadeNV( vtkRenderer *vtkNotUsed(ren),
                   vtkVolume *vtkNotUsed(vol),
                   int components )
 {
+  vtkOpenGLClearErrorMacro();
+
   if ( components < 3 )
     {
     vtkgl::ActiveTexture(vtkgl::TEXTURE2);
@@ -669,12 +695,16 @@ void vtkOpenGLVolumeTextureMapper3D::SetupRegisterCombinersNoShadeNV( vtkRendere
     {
     vtkgl::FinalCombinerInputNV(vtkgl::VARIABLE_G_NV, vtkgl::TEXTURE3, vtkgl::UNSIGNED_IDENTITY_NV, GL_ALPHA);
     }
+
+  vtkOpenGLCheckErrorMacro("failed after SetupRegisterCombinersNoShadeNV");
 }
 
 void vtkOpenGLVolumeTextureMapper3D::SetupRegisterCombinersShadeNV( vtkRenderer *ren,
                       vtkVolume *vol,
                       int components )
 {
+  vtkOpenGLClearErrorMacro();
+
   if ( components == 1 )
     {
     vtkgl::ActiveTexture(vtkgl::TEXTURE3);
@@ -922,6 +952,7 @@ void vtkOpenGLVolumeTextureMapper3D::SetupRegisterCombinersShadeNV( vtkRenderer
                                 vtkgl::UNSIGNED_IDENTITY_NV, GL_ALPHA);
     }
 
+  vtkOpenGLCheckErrorMacro("failed after SetupRegisterCombinersShadeNV");
 }
 
 void vtkOpenGLVolumeTextureMapper3D::RenderOneIndependentNoShadeNV(
@@ -960,6 +991,8 @@ void vtkOpenGLVolumeTextureMapper3D::SetupTwoDependentTextures(
   vtkRenderer *vtkNotUsed(ren),
   vtkVolume *vol )
 {
+  vtkOpenGLClearErrorMacro();
+
   vtkgl::ActiveTexture( vtkgl::TEXTURE0 );
   glDisable( GL_TEXTURE_2D );
   glEnable( vtkgl::TEXTURE_3D );
@@ -1069,6 +1102,8 @@ void vtkOpenGLVolumeTextureMapper3D::SetupTwoDependentTextures(
 
   vtkgl::ActiveTexture( vtkgl::TEXTURE3 );
   glBindTexture(GL_TEXTURE_2D, this->AlphaLookupIndex);
+
+  vtkOpenGLCheckErrorMacro("failed after SetupTwoDependentTextures");
 }
 
 void vtkOpenGLVolumeTextureMapper3D::RenderTwoDependentNoShadeNV(
@@ -1105,6 +1140,8 @@ void vtkOpenGLVolumeTextureMapper3D::SetupFourDependentTextures(
   vtkRenderer *vtkNotUsed(ren),
   vtkVolume *vol )
 {
+  vtkOpenGLClearErrorMacro();
+
   vtkgl::ActiveTexture( vtkgl::TEXTURE0 );
   glDisable( GL_TEXTURE_2D );
   glEnable( vtkgl::TEXTURE_3D );
@@ -1211,6 +1248,8 @@ void vtkOpenGLVolumeTextureMapper3D::SetupFourDependentTextures(
 
   vtkgl::ActiveTexture( vtkgl::TEXTURE3 );
   glBindTexture(GL_TEXTURE_2D, this->AlphaLookupIndex);
+
+  vtkOpenGLCheckErrorMacro("failed after SetupFourDependentTextures");
 }
 
 void vtkOpenGLVolumeTextureMapper3D::RenderFourDependentNoShadeNV(
@@ -1247,6 +1286,8 @@ void vtkOpenGLVolumeTextureMapper3D::RenderOneIndependentNoShadeFP(
   vtkRenderer *ren,
   vtkVolume *vol )
 {
+  vtkOpenGLClearErrorMacro();
+
   glEnable( vtkgl::FRAGMENT_PROGRAM_ARB );
 
   GLuint fragmentProgram;
@@ -1271,12 +1312,16 @@ void vtkOpenGLVolumeTextureMapper3D::RenderOneIndependentNoShadeFP(
   glDisable( vtkgl::FRAGMENT_PROGRAM_ARB );
 
   vtkgl::DeleteProgramsARB( 1, &fragmentProgram );
+
+  vtkOpenGLCheckErrorMacro("failed after RenderOneIndependentNoShadeFP");
 }
 
 void vtkOpenGLVolumeTextureMapper3D::RenderOneIndependentShadeFP(
   vtkRenderer *ren,
   vtkVolume *vol )
 {
+  vtkOpenGLClearErrorMacro();
+
   glEnable( vtkgl::FRAGMENT_PROGRAM_ARB );
 
   GLuint fragmentProgram;
@@ -1302,12 +1347,16 @@ void vtkOpenGLVolumeTextureMapper3D::RenderOneIndependentShadeFP(
   glDisable( vtkgl::FRAGMENT_PROGRAM_ARB );
 
   vtkgl::DeleteProgramsARB( 1, &fragmentProgram );
+
+  vtkOpenGLCheckErrorMacro("failed after RenderOneIndependentShadeFP");
 }
 
 void vtkOpenGLVolumeTextureMapper3D::RenderTwoDependentNoShadeFP(
   vtkRenderer *ren,
   vtkVolume *vol )
 {
+  vtkOpenGLClearErrorMacro();
+
   glEnable( vtkgl::FRAGMENT_PROGRAM_ARB );
 
   GLuint fragmentProgram;
@@ -1331,6 +1380,8 @@ void vtkOpenGLVolumeTextureMapper3D::RenderTwoDependentNoShadeFP(
   glDisable( vtkgl::FRAGMENT_PROGRAM_ARB );
 
   vtkgl::DeleteProgramsARB( 1, &fragmentProgram );
+
+  vtkOpenGLCheckErrorMacro("failed after RenderTwoDependentNoShadeFP");
 }
 
 
@@ -1338,6 +1389,8 @@ void vtkOpenGLVolumeTextureMapper3D::RenderTwoDependentShadeFP(
   vtkRenderer *ren,
   vtkVolume *vol )
 {
+  vtkOpenGLClearErrorMacro();
+
   glEnable( vtkgl::FRAGMENT_PROGRAM_ARB );
 
   GLuint fragmentProgram;
@@ -1362,12 +1415,16 @@ void vtkOpenGLVolumeTextureMapper3D::RenderTwoDependentShadeFP(
   glDisable( vtkgl::FRAGMENT_PROGRAM_ARB );
 
   vtkgl::DeleteProgramsARB( 1, &fragmentProgram );
+
+  vtkOpenGLCheckErrorMacro("failed after RenderTwoDependentShadeFP");
 }
 
 void vtkOpenGLVolumeTextureMapper3D::RenderFourDependentNoShadeFP(
   vtkRenderer *ren,
   vtkVolume *vol )
 {
+  vtkOpenGLClearErrorMacro();
+
   glEnable( vtkgl::FRAGMENT_PROGRAM_ARB );
 
   GLuint fragmentProgram;
@@ -1391,12 +1448,16 @@ void vtkOpenGLVolumeTextureMapper3D::RenderFourDependentNoShadeFP(
   glDisable( vtkgl::FRAGMENT_PROGRAM_ARB );
 
   vtkgl::DeleteProgramsARB( 1, &fragmentProgram );
+
+  vtkOpenGLCheckErrorMacro("failed after RenderFourDependentNoShadeFP");
 }
 
 void vtkOpenGLVolumeTextureMapper3D::RenderFourDependentShadeFP(
   vtkRenderer *ren,
   vtkVolume *vol )
 {
+  vtkOpenGLClearErrorMacro();
+
   glEnable( vtkgl::FRAGMENT_PROGRAM_ARB );
 
   GLuint fragmentProgram;
@@ -1421,6 +1482,8 @@ void vtkOpenGLVolumeTextureMapper3D::RenderFourDependentShadeFP(
   glDisable( vtkgl::FRAGMENT_PROGRAM_ARB );
 
   vtkgl::DeleteProgramsARB( 1, &fragmentProgram );
+
+  vtkOpenGLCheckErrorMacro("failed after RenderFourDependentShadeFP");
 }
 
 
@@ -1562,6 +1625,8 @@ void vtkOpenGLVolumeTextureMapper3D::SetupProgramLocalsForShadingFP(
   vtkRenderer *ren,
   vtkVolume *vol )
 {
+  vtkOpenGLClearErrorMacro();
+
   GLfloat lightDirection[2][4];
   GLfloat lightDiffuseColor[2][4];
   GLfloat lightSpecularColor[2][4];
@@ -1727,6 +1792,8 @@ void vtkOpenGLVolumeTextureMapper3D::SetupProgramLocalsForShadingFP(
 
   vtkgl::ProgramLocalParameter4fARB( vtkgl::FRAGMENT_PROGRAM_ARB, 6,
                                      2.0, -1.0, 0.0, 0.0 );
+
+  vtkOpenGLCheckErrorMacro("failed after SetupProgramLocalsForShadingFP");
 }
 
 int  vtkOpenGLVolumeTextureMapper3D::IsRenderSupported(
@@ -1738,6 +1805,8 @@ int  vtkOpenGLVolumeTextureMapper3D::IsRenderSupported(
     this->Initialize(r);
     }
 
+  // NO_METHOD occurs when required OpenGL extensions are
+  // not found during initialization
   if ( this->RenderMethod == vtkVolumeTextureMapper3D::NO_METHOD )
     {
     return 0;
@@ -1805,54 +1874,40 @@ void vtkOpenGLVolumeTextureMapper3D::Initialize(vtkRenderer *r)
       }
     }
 
-  const char *gl_version=
-    reinterpret_cast<const char *>(glGetString(GL_VERSION));
-  const char *mesa_version=strstr(gl_version,"Mesa");
-
-
-  // Workaround for broken Mesa
-  if(mesa_version!=0) // any Mesa
-    {
-    this->SupportsCompressedTexture=false;
-    }
-
-  this->SupportsNonPowerOfTwoTextures=
-        extensions->ExtensionSupported("GL_VERSION_2_0")
-        || extensions->ExtensionSupported("GL_ARB_texture_non_power_of_two");
-
   bool brokenMesa=false;
-
-  if(mesa_version!=0)
+  if(extensions->DriverIsMesa())
     {
-    // Workaround for broken Mesa (dash16-sql):
-    // GL_VENDOR="Mesa project: www.mesa3d.org"
-    // GL_VERSION="1.4 (2.1 Mesa 7.0.4)"
-    // GL_RENDERER="Mesa GLX Indirect"
-    // there is no problem with (dash6):
-    // GL_VENDOR="Brian Paul"
-    // GL_VERSION="2.0 Mesa 7.0.4"
-    // GL_RENDERER="Mesa X11"
+    // Workaround for broken Mesa
+    if (!extensions->GetIgnoreDriverBugs("Mesa compressed texture bugs"))
+      {
+      this->SupportsCompressedTexture=false;
+      }
+
+    // Workaround Mesa 7.0.4 bug
     // glGetIntegerv(vtkgl::MAX_3D_TEXTURE_SIZE,&maxSize) return some
     // uninitialized value and a loading a Luminance-alpha 16x16x16 just
     // crashes glx.
-    int mesa_major=0;
-    int mesa_minor=0;
-    int mesa_patch=0;
-    int opengl_major=0;
-    int opengl_minor=0;
-    if(sscanf(gl_version,"%d.%d",&opengl_major, &opengl_minor)>=2)
+    if ( extensions->DriverVersionIs(7,0,4)
+      && extensions->DriverGLVersionIs(1,4) )
       {
-      if(opengl_major==1 && opengl_minor==4)
-        {
-        if(sscanf(mesa_version,"Mesa %d.%d.%d",&mesa_major,
-                  &mesa_minor,&mesa_patch)>=3)
-          {
-          brokenMesa=mesa_major==7 && mesa_minor==0 && mesa_patch==4;
-          }
-        }
+      brokenMesa = true;
+      }
+
+    // Workaround bug in Mesa 8 and OS Mesa renderers
+    // tests all pass however in ::Render causes glPopAttrib
+    // "Invalid enum".
+    if ( extensions->DriverGLRendererIsOSMesa()
+      && !extensions->DriverVersionAtLeast(9)
+      && !extensions->GetIgnoreDriverBugs("Mesa 8 OS Mesa invalid enum") )
+      {
+      brokenMesa = true;
       }
     }
 
+  this->SupportsNonPowerOfTwoTextures=
+        extensions->ExtensionSupported("GL_VERSION_2_0")
+        || extensions->ExtensionSupported("GL_ARB_texture_non_power_of_two");
+
   int supports_GL_NV_texture_shader2     = extensions->ExtensionSupported( "GL_NV_texture_shader2" );
   int supports_GL_NV_register_combiners2 = extensions->ExtensionSupported( "GL_NV_register_combiners2" );
   int supports_GL_ATI_fragment_shader    = extensions->ExtensionSupported( "GL_ATI_fragment_shader" );
@@ -1954,6 +2009,8 @@ void vtkOpenGLVolumeTextureMapper3D::Initialize(vtkRenderer *r)
 int vtkOpenGLVolumeTextureMapper3D::IsTextureSizeSupported(int size[3],
                                                            int components)
 {
+  vtkOpenGLClearErrorMacro();
+
   GLint maxSize;
   glGetIntegerv(vtkgl::MAX_3D_TEXTURE_SIZE,&maxSize);
 
@@ -2013,6 +2070,8 @@ int vtkOpenGLVolumeTextureMapper3D::IsTextureSizeSupported(int size[3],
     }
   glBindTexture(vtkgl::TEXTURE_3D,0); // bind to default texture object.
   glDeleteTextures(1,&id1);
+
+  vtkOpenGLCheckErrorMacro("failed after IsTextureSizeSupported");
   return result;
 }
 
diff --git a/Testing/Core/ConfigSummary.txt.in b/Testing/Core/ConfigSummary.txt.in
index 9733249..91f9aaf 100644
--- a/Testing/Core/ConfigSummary.txt.in
+++ b/Testing/Core/ConfigSummary.txt.in
@@ -84,12 +84,6 @@ ConfigSummary = {
 "BUILD_EXAMPLES":
 "@BUILD_EXAMPLES@",
 
-"VTK_DATA_ROOT":
-"@VTK_DATA_ROOT@",
-
-"VTK_LARGE_DATA_ROOT":
-"@VTK_LARGE_DATA_ROOT@",
-
 "VTK_DEBUG_LEAKS":
 "@VTK_DEBUG_LEAKS@",
 
diff --git a/Testing/Core/HeaderTesting.py b/Testing/Core/HeaderTesting.py
index 9ac3a3c..59aaa8c 100755
--- a/Testing/Core/HeaderTesting.py
+++ b/Testing/Core/HeaderTesting.py
@@ -187,7 +187,7 @@ class TestVTKFiles:
         pass
 
     def CheckParent(self):
-        classre = "^class\s*(.*_EXPORT|\s*) (vtk[A-Z0-9_][^ :\n]*)\s*:\s*public\s*(vtk[^ \n\{]*)"
+        classre = "^class(\s+[^\s]*_EXPORT)?\s+(vtk[A-Z0-9_][^ :\n]*)\s*:\s*public\s+(vtk[^ \n\{]*)"
         cname = ""
         pname = ""
         classlines = []
@@ -236,8 +236,8 @@ class TestVTKFiles:
         count = 0
         lines = []
         oldlines = []
-        typere = "^\s*vtkType(Revision)*Macro\s*\(\s*(vtk[^ ,]+)\s*,\s*(vtk[^ \)]+)\s*\)\s*"
-        typesplitre = "^\s*vtkType(Revision)*Macro\s*\("
+        typere = "^\s*vtk(Abstract)?Type(Revision)*Macro\s*\(\s*(vtk[^ ,]+)\s*,\s*(vtk[^ \)]+)\s*\)\s*"
+        typesplitre = "^\s*vtk(Abstract)?Type(Revision)*Macro\s*\("
 
         regx = re.compile(typere)
         regxs = re.compile(typesplitre)
@@ -248,10 +248,10 @@ class TestVTKFiles:
             rm = regx.match(line)
             if rm:
                 found = 1
-                if rm.group(1) == "Revision":
+                if rm.group(2) == "Revision":
                     oldlines.append(" %4d: %s" % (cc, line))
-                cname = rm.group(2)
-                pname = rm.group(3)
+                cname = rm.group(3)
+                pname = rm.group(4)
                 if cname != self.ClassName or pname != self.ParentName:
                     lines.append(" %4d: %s" % (cc, line))
             else:
@@ -263,10 +263,10 @@ class TestVTKFiles:
                     rm = regx.match(line)
                     if rm:
                         found = 1
-                        if rm.group(1) == "Revision":
+                        if rm.group(2) == "Revision":
                             oldlines.append(" %4d: %s" % (cc, line))
-                        cname = rm.group(2)
-                        pname = rm.group(3)
+                        cname = rm.group(3)
+                        pname = rm.group(4)
                         if cname != self.ClassName or pname != self.ParentName:
                             lines.append(" %4d: %s" % (cc, line))
             cc = cc + 1
diff --git a/Testing/Core/vtkTestDriver.h b/Testing/Core/vtkTestDriver.h
index 0dd51de..2e28389 100644
--- a/Testing/Core/vtkTestDriver.h
+++ b/Testing/Core/vtkTestDriver.h
@@ -22,4 +22,5 @@
 #include <clocale> // C setlocale()
 #include <locale> // C++ locale
 
+#include <vtksys/SystemInformation.hxx> // for stacktrace
 #endif
diff --git a/Testing/Core/vtk_site_history.py b/Testing/Core/vtk_site_history.py
new file mode 100755
index 0000000..cff98e2
--- /dev/null
+++ b/Testing/Core/vtk_site_history.py
@@ -0,0 +1,151 @@
+#!/usr/bin/python
+"""
+This script scrapes submitter test failures for the last N days in order to
+diagnose general dashboard health and especially intermittently problematic
+machines.
+
+The script saves out a summary that shows, for each submitter and for each of
+the last N days, the number of failures, the names of each failing test and
+the url to the cdash page for each test.
+
+To use it, get to a command line and run:
+python vtk_site_history.py
+Doing so will results in one cvs file for each submitter containing detailed
+results. While it works it prints out a summary to show progress.
+If you give the script a numeric argument it will instead append just the last
+N days worth of results to the files.
+
+NOTE: this script assumes a unix shell so it exits immediately on windows.
+"""
+
+import subprocess as sub
+import time
+from datetime import date, timedelta
+import sys
+
+import platform
+print platform.system()
+if platform.system() == 'Windows':
+    print "Sorry this script assumes a posix shell."
+    sys.exit()
+
+oneday = timedelta(1)
+today = date.today()
+tomorrow = today + oneday
+numdays = 4*30 #cdash saves 4 months back
+start = today-(oneday*numdays)
+
+url1 = ''
+url2 = ''
+url3 = ''
+url4 = ''
+
+#build up a string to ask CDASH for just what we want.
+url1 = url1 +  'curl "http://open.cdash.org/queryTests.php?project=VTK'
+url1 = url1 + '&date='
+url2 = url2 + '&limit=200'
+url2 = url2 + '&showfilters=1'
+url2 = url2 + '&filtercombine=and'
+url2 = url2 + '&filtercount=3'
+url2 = url2 + '&field1=status/string&compare1=61&value1=Failed'
+url2 = url2 + '&field2=site/string&compare2=61&value2='
+url3 = url3 + '&field3=buildname/string&compare3=61&value3='
+url4 = url4 + '" 2>&1 | egrep "testDetails.*Failed" -B 2 | egrep "/td|/a"'
+
+#submissions in VTK's nightly expected section
+submissions = [
+['amber10.kitware','Win64-VS10'],
+['amber12.kitware','Win32-mingw-gcc-4.5'],
+['Blight.kitware','blight'],
+['bubbles.hooperlab','Fedora-17_OSMesa-9.1.3-x86_64'],
+['DASH11.kitware','Win32-vs71-static'],
+['DASH3.kitware','Win32-vs9-Static'],
+['DASH3.kitware','Win32-vs9-Shared'],
+['firefly.kitware','Ubuntu-GCC-4.7-release'],
+['hythloth.kitware','Linux-gcc'],
+['hythloth.kitware','TestExternal-Linux-gcc'],
+['kamino.kitware','Mac10.7.5-clang-release-x86_64-nightly_master'],
+['kamino.kitware','Mac10.7.5-gcc-release-x86_64-nightly_master'],
+['karego-at.kitware','Ubuntu-Valgrind'],
+['karego-at.kitware','Ubuntu-Coverage'],
+['londinium.kitware','Arch-Clang-3.2-x86_64-debug'],
+['londinium.kitware','Arch-GCC-4.8-x86_64-debug'],
+['londinium.kitware','Arch-GCC-4.8-x86_64-release'],
+['mirkwood.dlrsoftware','Win32-ninja-Debug'],
+['mirkwood.dlrsoftware','Win32-ninja-Release'],
+['p90n03.pbm.ihost.com','AIX00F614-xlC'],
+['RogueResearch11','Mac10.7-clang-dbg-x86_64'],
+['RogueResearch11','Mac10.7-clang-rel-x86_64'],
+['RogueResearch3','Mac10.5-gcc-dbg-ppc64-static'],
+['RogueResearch3','Mac10.5-gcc-dbg-ppc-shared'],
+['RogueResearch7','Mac10.8-clang-dbg-x86_64'],
+['RogueResearch7','Mac10.8-clang-rel-x86_64'],
+['RogueResearch9','Mac10.6-gcc-dbg-i386'],
+['RogueResearch9','Mac10.6-gcc-rel-x86_64'],
+]
+
+#open the pages for one particular submitter and compile output into a dictionary
+def getResults(site, submission):
+    results = []
+    aday = start
+    while aday < tomorrow:
+        datestr = "%4d-%02d-%02d"%(aday.year, aday.month, aday.day)
+        cmd = url1+datestr+url2+site+url3+submission+url4
+        aday = aday + oneday
+        #print cmd
+        p = sub.Popen(['/bin/bash', '-c', cmd], stdout=sub.PIPE, stderr=sub.PIPE)
+        output = p.stdout.read()[:-1]
+        error = p.stderr.read()[:-1]
+        #print site, submission, ":", datestr
+        #print output
+        results.append({'date':datestr,'fails':output})
+    return results
+
+#run through gathered results and format as a long string
+def formatResults(results):
+    formatted = ""
+    cnt = -1
+    for r in results:
+        cnt = cnt +1
+        d = r['date']
+        f = r['fails']
+        lines = f.split('\n')
+        if len(f) == 0: #no failures!
+            formatted = formatted  + d + ", " + "0" + "\n"
+            continue
+        if len(lines)>20: #specifics are capped at 10 failures
+            formatted = formatted +  d + ", " + "11" + "\n"
+            continue
+        #a small number of failures, keep details
+        tres = [d,",",len(lines)/2]
+        for c in range(0, len(lines), 2):
+            tname = lines[c+0].strip()[4:-5]
+            turl = "http://open.cdash.org/" + lines[c+1].strip()[9:-12].replace('amp;','')
+            tres.append(",")
+            tres.append(tname)
+            tres.append(",")
+            tres.append(turl)
+        for x in tres:
+            formatted = formatted + str(x) + " "
+        formatted = formatted + "\n"
+    return formatted
+
+if __name__ == '__main__':
+    printheader = True
+    if len(sys.argv) == 2: #just append last few days
+        numdays = int(sys.argv[1])
+        printheader = False
+
+    for x,y in submissions:
+        fname = x+'_'+y+'.csv'
+        print x,y
+        fd = open(fname, 'a')
+        if printheader:
+            fd.write("#"+fname+" date, numfails, failed test name1, failed test url1, ...\n")
+        start = today-(oneday*numdays)
+        res = getResults(x,y)
+        resStrings = formatResults(res)
+        for x in resStrings.split("\n")[0:-1]:
+           print x.split(",")[0], x.split(",")[1]
+        fd.write(resStrings)
+        fd.close()
diff --git a/Testing/Core/vtk_submitter_summary.py b/Testing/Core/vtk_submitter_summary.py
index 2ae9fc2..93d64df 100755
--- a/Testing/Core/vtk_submitter_summary.py
+++ b/Testing/Core/vtk_submitter_summary.py
@@ -1,15 +1,15 @@
 #!/usr/bin/python
 """
-This module script scrapes test results from a days dashboard that explain the
-configuration of the submitting machine. Source this script and then you can
-query 'configs' to see what each machine was testing.
+This module scrapes test results from a day's dashboard that explain the
+configuration of the submitting machine. It is useful to find under tested spaces
+in the option and platform set.
 
 To use it get to a command line and run:
 python vtk_submitter_summary.py
-That will load the days results, save them locally and
+That will load the day's results, save them locally and
 print and save two reports, which can then be imported into a spreadsheet.
 
-You can of course import it in python and query the data manually if you like.
+You can also import the module in python and query the results manually.
 """
 
 import sys
@@ -24,14 +24,15 @@ summary = {}
 
 def scrape_cdash(date):
 
-  test_sysinfo_url = 'http://open.cdash.org/testSummary.php?project=11&name=vtkCommonCore-TestSystemInformation&date='+date
+  #test_sysinfo_url = 'http://open.cdash.org/testSummary.php?project=11&name=vtkCommonCore-TestSystemInformation&date='+date
+  test_sysinfo_url = 'http://open.cdash.org/testSummary.php?project=11&name=vtkCommonCoreCxx-TestSystemInformation&date='+date
   test_fbo_url = 'http://open.cdash.org/testSummary.php?project=11&name=vtkRenderingOpenGLCxx-TestFBO&date='+date
 
   testspage = urllib.urlopen(test_sysinfo_url)
   response = "".join(testspage.readlines())
   #print response
 
-  print "scrapeing config info"
+  print "scraping config info"
 
   #scan page for all machines that submitted that test
   testdetailspage_re = 'testDetails[^"]+'
@@ -93,7 +94,7 @@ def scrape_cdash(date):
     configs[key] = configuration
 
   print
-  print "scrapeing GPU info"
+  print "scraping GPU info"
 
   #TODO: pull out common parts into a scraper function
   #Now grab GL info from TestFBO
diff --git a/Testing/Data/2LYZ.pdb.md5 b/Testing/Data/2LYZ.pdb.md5
new file mode 100644
index 0000000..52291cb
--- /dev/null
+++ b/Testing/Data/2LYZ.pdb.md5
@@ -0,0 +1 @@
+72b1784f9998d7bfb2f7bebf1b9b7870
diff --git a/Testing/Data/2h2o.aux.md5 b/Testing/Data/2h2o.aux.md5
new file mode 100644
index 0000000..fc7bba5
--- /dev/null
+++ b/Testing/Data/2h2o.aux.md5
@@ -0,0 +1 @@
+7dc839fb2825efa8c55e4275fee02a75
diff --git a/Testing/Data/3GQP.pdb.md5 b/Testing/Data/3GQP.pdb.md5
new file mode 100644
index 0000000..2bd4522
--- /dev/null
+++ b/Testing/Data/3GQP.pdb.md5
@@ -0,0 +1 @@
+d0b26af904ed06778c0c8643268ce15e
diff --git a/Testing/Data/42400-IDGH.stl.md5 b/Testing/Data/42400-IDGH.stl.md5
new file mode 100644
index 0000000..01816f3
--- /dev/null
+++ b/Testing/Data/42400-IDGH.stl.md5
@@ -0,0 +1 @@
+31c7bb73f4b6c19c53f98c6d8fa4b686
diff --git a/Testing/Data/AMR/Enzo/DD0010/moving7_0010.boundary.hdf.md5 b/Testing/Data/AMR/Enzo/DD0010/moving7_0010.boundary.hdf.md5
new file mode 100644
index 0000000..bb94ad7
--- /dev/null
+++ b/Testing/Data/AMR/Enzo/DD0010/moving7_0010.boundary.hdf.md5
@@ -0,0 +1 @@
+9c113b1f2469c679601f208db4258447
diff --git a/Testing/Data/AMR/Enzo/DD0010/moving7_0010.boundary.md5 b/Testing/Data/AMR/Enzo/DD0010/moving7_0010.boundary.md5
new file mode 100644
index 0000000..2e2d128
--- /dev/null
+++ b/Testing/Data/AMR/Enzo/DD0010/moving7_0010.boundary.md5
@@ -0,0 +1 @@
+9b20bb19808dab2d2a3d96558d8665b0
diff --git a/Testing/Data/AMR/Enzo/DD0010/moving7_0010.cpu0000.md5 b/Testing/Data/AMR/Enzo/DD0010/moving7_0010.cpu0000.md5
new file mode 100644
index 0000000..12f4a0c
--- /dev/null
+++ b/Testing/Data/AMR/Enzo/DD0010/moving7_0010.cpu0000.md5
@@ -0,0 +1 @@
+2de2c26afefe92c375ecbc193bd24794
diff --git a/Testing/Data/AMR/Enzo/DD0010/moving7_0010.harrays.md5 b/Testing/Data/AMR/Enzo/DD0010/moving7_0010.harrays.md5
new file mode 100644
index 0000000..6e1ef00
--- /dev/null
+++ b/Testing/Data/AMR/Enzo/DD0010/moving7_0010.harrays.md5
@@ -0,0 +1 @@
+d15688d825f8bbe7c9ace84513496564
diff --git a/Testing/Data/AMR/Enzo/DD0010/moving7_0010.hierarchy.md5 b/Testing/Data/AMR/Enzo/DD0010/moving7_0010.hierarchy.md5
new file mode 100644
index 0000000..2ec88be
--- /dev/null
+++ b/Testing/Data/AMR/Enzo/DD0010/moving7_0010.hierarchy.md5
@@ -0,0 +1 @@
+8d74741aaad5aa6d4742e58686a7d871
diff --git a/Testing/Data/AMR/Enzo/DD0010/moving7_0010.md5 b/Testing/Data/AMR/Enzo/DD0010/moving7_0010.md5
new file mode 100644
index 0000000..21b9150
--- /dev/null
+++ b/Testing/Data/AMR/Enzo/DD0010/moving7_0010.md5
@@ -0,0 +1 @@
+498f86b6c8285bee28643b43090ab453
diff --git a/Testing/Data/AMR/Enzo/DD0010/moving7_0010.procmap.md5 b/Testing/Data/AMR/Enzo/DD0010/moving7_0010.procmap.md5
new file mode 100644
index 0000000..f3c1856
--- /dev/null
+++ b/Testing/Data/AMR/Enzo/DD0010/moving7_0010.procmap.md5
@@ -0,0 +1 @@
+516f8d83a1ab9fcbc826d20b94c677bc
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0.vthb.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0.vthb.md5
new file mode 100644
index 0000000..f4ed41b
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0.vthb.md5
@@ -0,0 +1 @@
+7297c527a774a55b253ba8cf8d31e6e6
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_0.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_0.vti.md5
new file mode 100644
index 0000000..69b8565
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_0.vti.md5
@@ -0,0 +1 @@
+b2e6e376a42451a65027685008865ec7
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_1.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_1.vti.md5
new file mode 100644
index 0000000..c7d998e
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_1.vti.md5
@@ -0,0 +1 @@
+1dda8664c504b0a1bb046d1e844bfb5e
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_10.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_10.vti.md5
new file mode 100644
index 0000000..2a823f5
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_10.vti.md5
@@ -0,0 +1 @@
+5a00e7bc74f6d18d6bcb6587ab58d4f3
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_11.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_11.vti.md5
new file mode 100644
index 0000000..fbeb70c
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_11.vti.md5
@@ -0,0 +1 @@
+2f2b317f5d713c83e3027745690ece03
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_12.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_12.vti.md5
new file mode 100644
index 0000000..3a2a441
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_12.vti.md5
@@ -0,0 +1 @@
+b7640d8326329e870d0a972a71ef5a9c
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_13.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_13.vti.md5
new file mode 100644
index 0000000..ad0fed8
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_13.vti.md5
@@ -0,0 +1 @@
+e604fe8936493825768781611a87f00d
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_14.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_14.vti.md5
new file mode 100644
index 0000000..efd69c7
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_14.vti.md5
@@ -0,0 +1 @@
+2c850fa5332ba91682e4a9ae837a9ede
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_15.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_15.vti.md5
new file mode 100644
index 0000000..6d3f3cd
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_15.vti.md5
@@ -0,0 +1 @@
+14d6543c8dc4db6e50e6fc4c6dcb300c
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_16.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_16.vti.md5
new file mode 100644
index 0000000..28786d4
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_16.vti.md5
@@ -0,0 +1 @@
+5307d863aa4949b5e2c15a40dfd96a1b
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_17.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_17.vti.md5
new file mode 100644
index 0000000..888281a
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_17.vti.md5
@@ -0,0 +1 @@
+94e24b7b61c2fdd7de16a7cb0e4e37f0
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_18.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_18.vti.md5
new file mode 100644
index 0000000..9325a27
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_18.vti.md5
@@ -0,0 +1 @@
+e959f4756d58a92c1ed06b430add4fd5
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_19.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_19.vti.md5
new file mode 100644
index 0000000..7e87af3
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_19.vti.md5
@@ -0,0 +1 @@
+88a32218d24a98828e3d152831a0b16b
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_2.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_2.vti.md5
new file mode 100644
index 0000000..d7b7366
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_2.vti.md5
@@ -0,0 +1 @@
+ac6fd0cb75b86da8b5022603250ae259
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_20.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_20.vti.md5
new file mode 100644
index 0000000..504c5f1
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_20.vti.md5
@@ -0,0 +1 @@
+cdf2c3eaa0ebc45b228e576b7c899eee
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_21.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_21.vti.md5
new file mode 100644
index 0000000..3f9b1a0
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_21.vti.md5
@@ -0,0 +1 @@
+5d5c3904afef1c537ed7e7d4edb1a7be
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_22.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_22.vti.md5
new file mode 100644
index 0000000..4f4bf3e
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_22.vti.md5
@@ -0,0 +1 @@
+b9890e09e7a311632955ba4ce92470a5
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_23.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_23.vti.md5
new file mode 100644
index 0000000..5b3c8be
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_23.vti.md5
@@ -0,0 +1 @@
+e727fbb7d1307b12391490d197978b30
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_24.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_24.vti.md5
new file mode 100644
index 0000000..5e52bb0
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_24.vti.md5
@@ -0,0 +1 @@
+16feee98aab6255877738e7a62da0c8d
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_25.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_25.vti.md5
new file mode 100644
index 0000000..7277aef
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_25.vti.md5
@@ -0,0 +1 @@
+aa0e8d3741e75fb8dfcf64c67d6de92b
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_26.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_26.vti.md5
new file mode 100644
index 0000000..eb44d1c
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_26.vti.md5
@@ -0,0 +1 @@
+d4365b75158780381c680c6c3cbeea09
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_27.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_27.vti.md5
new file mode 100644
index 0000000..3217475
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_27.vti.md5
@@ -0,0 +1 @@
+ab830ec29f01d93099c59a5f9725652f
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_28.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_28.vti.md5
new file mode 100644
index 0000000..dea48d2
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_28.vti.md5
@@ -0,0 +1 @@
+8b20d858e8a0f0caef3c4f9140d966c7
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_29.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_29.vti.md5
new file mode 100644
index 0000000..3a8f147
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_29.vti.md5
@@ -0,0 +1 @@
+3217913f101c5e58cf04c977ac42d88f
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_3.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_3.vti.md5
new file mode 100644
index 0000000..1701461
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_3.vti.md5
@@ -0,0 +1 @@
+5b9cd9b4c19d8fb0a17f92e73f9b2986
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_30.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_30.vti.md5
new file mode 100644
index 0000000..29836c6
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_30.vti.md5
@@ -0,0 +1 @@
+4372991290f1f8f137ad6f095ef13f5e
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_31.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_31.vti.md5
new file mode 100644
index 0000000..92dbe4c
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_31.vti.md5
@@ -0,0 +1 @@
+477bc3e70b836de880ef6366051ae9f3
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_32.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_32.vti.md5
new file mode 100644
index 0000000..1690056
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_32.vti.md5
@@ -0,0 +1 @@
+05dbcd6fdb107c9bcf2cffca9e0e5281
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_33.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_33.vti.md5
new file mode 100644
index 0000000..7180e51
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_33.vti.md5
@@ -0,0 +1 @@
+9b111903d401780465dcc0d3001ce471
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_34.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_34.vti.md5
new file mode 100644
index 0000000..ea376c8
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_34.vti.md5
@@ -0,0 +1 @@
+481a686ef9d9fb1f261597e2a04cb5c9
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_35.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_35.vti.md5
new file mode 100644
index 0000000..b24c0db
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_35.vti.md5
@@ -0,0 +1 @@
+105c948bc68f53c9ef67889d3f24c0dd
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_36.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_36.vti.md5
new file mode 100644
index 0000000..9735b55
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_36.vti.md5
@@ -0,0 +1 @@
+c77de9e53932828de909576dd7b08366
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_37.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_37.vti.md5
new file mode 100644
index 0000000..3d413f6
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_37.vti.md5
@@ -0,0 +1 @@
+a74423c782ffdbc3b96865e9d197cdcc
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_38.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_38.vti.md5
new file mode 100644
index 0000000..b2ec19b
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_38.vti.md5
@@ -0,0 +1 @@
+e5a4928c3a86a0a1af83016d116a564f
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_39.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_39.vti.md5
new file mode 100644
index 0000000..231996c
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_39.vti.md5
@@ -0,0 +1 @@
+b2e66575f34b7c0a1d8b28e0783d09a3
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_4.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_4.vti.md5
new file mode 100644
index 0000000..9b9ec42
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_4.vti.md5
@@ -0,0 +1 @@
+82888a1c400acb87b56a22c7bbd3dd91
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_40.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_40.vti.md5
new file mode 100644
index 0000000..0c2b65c
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_40.vti.md5
@@ -0,0 +1 @@
+ab25f5955e2beba668d2c190a5365c7b
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_41.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_41.vti.md5
new file mode 100644
index 0000000..06f5363
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_41.vti.md5
@@ -0,0 +1 @@
+002f919ef79b4157345c95d6b3fbd1cd
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_42.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_42.vti.md5
new file mode 100644
index 0000000..6a1cb9f
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_42.vti.md5
@@ -0,0 +1 @@
+b4ab5d27a6407f4b9858db2c509f4561
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_43.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_43.vti.md5
new file mode 100644
index 0000000..09a9a95
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_43.vti.md5
@@ -0,0 +1 @@
+85bbe4d071a147b3d5b947f2b6b6a6f3
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_44.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_44.vti.md5
new file mode 100644
index 0000000..69f8a62
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_44.vti.md5
@@ -0,0 +1 @@
+8562d7fb83f5014961d5746d64627ea5
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_45.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_45.vti.md5
new file mode 100644
index 0000000..d8d310f
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_45.vti.md5
@@ -0,0 +1 @@
+69a8c900f3b6f8a3dfa2157f9c30a6cc
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_46.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_46.vti.md5
new file mode 100644
index 0000000..3ca4bc5
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_46.vti.md5
@@ -0,0 +1 @@
+8829d107530cc2378f8f9ca7df66655d
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_47.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_47.vti.md5
new file mode 100644
index 0000000..d57d474
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_47.vti.md5
@@ -0,0 +1 @@
+679bd61efda8d3311291a28247e4e05b
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_48.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_48.vti.md5
new file mode 100644
index 0000000..330a1f9
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_48.vti.md5
@@ -0,0 +1 @@
+c409979873a8fbd6f14dc8c3885b162f
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_49.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_49.vti.md5
new file mode 100644
index 0000000..1183c5f
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_49.vti.md5
@@ -0,0 +1 @@
+6780703c673e84456b8045bb78a3bcbb
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_5.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_5.vti.md5
new file mode 100644
index 0000000..8089e2d
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_5.vti.md5
@@ -0,0 +1 @@
+1cf3deadb152f302df4ea3c546b100dc
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_50.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_50.vti.md5
new file mode 100644
index 0000000..0576eb9
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_50.vti.md5
@@ -0,0 +1 @@
+3c18887a41569a04b46356418e69eabc
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_51.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_51.vti.md5
new file mode 100644
index 0000000..9eb902e
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_51.vti.md5
@@ -0,0 +1 @@
+111931e1272d837cfa650bf663fe11a7
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_52.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_52.vti.md5
new file mode 100644
index 0000000..4b19241
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_52.vti.md5
@@ -0,0 +1 @@
+a672343d8393ec2bff1b8dece11f28d3
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_53.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_53.vti.md5
new file mode 100644
index 0000000..07ba2f2
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_53.vti.md5
@@ -0,0 +1 @@
+433b23763d3398a2fe37d211f6a3dce0
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_54.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_54.vti.md5
new file mode 100644
index 0000000..597b3dd
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_54.vti.md5
@@ -0,0 +1 @@
+7250d038d39719576f8ad3e8979a5371
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_55.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_55.vti.md5
new file mode 100644
index 0000000..698b594
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_55.vti.md5
@@ -0,0 +1 @@
+e36fa59c1b429f9c6a0fdd5b6742f9cb
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_56.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_56.vti.md5
new file mode 100644
index 0000000..d1970ba
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_56.vti.md5
@@ -0,0 +1 @@
+8c5cf1c6564cdebd79f5ed2628c2a60c
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_57.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_57.vti.md5
new file mode 100644
index 0000000..1b298ee
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_57.vti.md5
@@ -0,0 +1 @@
+f389b8abf0196ab9833fe2b657d79bb7
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_58.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_58.vti.md5
new file mode 100644
index 0000000..98c0153
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_58.vti.md5
@@ -0,0 +1 @@
+56e1d105b384ab9402bfc6c10e1f26f4
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_59.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_59.vti.md5
new file mode 100644
index 0000000..41ace80
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_59.vti.md5
@@ -0,0 +1 @@
+9a80998b3b809c63372fa81c9e10b22e
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_6.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_6.vti.md5
new file mode 100644
index 0000000..3cc6da4
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_6.vti.md5
@@ -0,0 +1 @@
+447140742b05c2b73e3ef7eae8c9568f
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_60.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_60.vti.md5
new file mode 100644
index 0000000..4bd5b59
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_60.vti.md5
@@ -0,0 +1 @@
+2c56251b0fee6e1ae29e7e5c2959d498
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_61.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_61.vti.md5
new file mode 100644
index 0000000..65481aa
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_61.vti.md5
@@ -0,0 +1 @@
+ed4efb4e1587e14ab8ca16be47cd621a
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_62.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_62.vti.md5
new file mode 100644
index 0000000..c146f0a
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_62.vti.md5
@@ -0,0 +1 @@
+45b72d8fa41d7f327a5c74926b555545
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_63.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_63.vti.md5
new file mode 100644
index 0000000..65eeec4
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_63.vti.md5
@@ -0,0 +1 @@
+a85780f36f85080659fd84ac5f9996a6
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_64.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_64.vti.md5
new file mode 100644
index 0000000..7efa752
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_64.vti.md5
@@ -0,0 +1 @@
+dc317d2bf6ced412015d47aace5d0996
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_65.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_65.vti.md5
new file mode 100644
index 0000000..a42b258
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_65.vti.md5
@@ -0,0 +1 @@
+0518ae07c78075a0c97650d0cf57fc34
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_66.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_66.vti.md5
new file mode 100644
index 0000000..0dfaac2
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_66.vti.md5
@@ -0,0 +1 @@
+2b0312a90dd5f7b7529f79a88b14cbd3
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_67.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_67.vti.md5
new file mode 100644
index 0000000..fe2b253
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_67.vti.md5
@@ -0,0 +1 @@
+125ff22c8339c9563237e09b58f5a722
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_68.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_68.vti.md5
new file mode 100644
index 0000000..a417788
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_68.vti.md5
@@ -0,0 +1 @@
+ae7187a1ef59734804492c83954a496f
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_69.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_69.vti.md5
new file mode 100644
index 0000000..707d599
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_69.vti.md5
@@ -0,0 +1 @@
+471c3558077993ad9cb5afecec58c8ea
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_7.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_7.vti.md5
new file mode 100644
index 0000000..aab3e59
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_7.vti.md5
@@ -0,0 +1 @@
+fb68e0f8e8ae436ee0c031d3b1fb3fa4
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_70.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_70.vti.md5
new file mode 100644
index 0000000..185206c
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_70.vti.md5
@@ -0,0 +1 @@
+88ff87d97b8f5be081328455b962e17a
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_71.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_71.vti.md5
new file mode 100644
index 0000000..f5a2f23
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_71.vti.md5
@@ -0,0 +1 @@
+386f35b42b32dfc5686829f3cffcc75b
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_72.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_72.vti.md5
new file mode 100644
index 0000000..0c70cc0
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_72.vti.md5
@@ -0,0 +1 @@
+5c5502f69d2e70282398f0a59a3d28cc
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_73.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_73.vti.md5
new file mode 100644
index 0000000..5bb6d0b
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_73.vti.md5
@@ -0,0 +1 @@
+a2bbc49290b2d86478d8d0ccc0e62e32
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_74.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_74.vti.md5
new file mode 100644
index 0000000..ceaa5be
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_74.vti.md5
@@ -0,0 +1 @@
+3ae2f863e934fd6868a29bfd36140070
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_75.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_75.vti.md5
new file mode 100644
index 0000000..cf96390
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_75.vti.md5
@@ -0,0 +1 @@
+bc84178c770de01ba631d69c7eee3f0d
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_76.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_76.vti.md5
new file mode 100644
index 0000000..cffd1c6
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_76.vti.md5
@@ -0,0 +1 @@
+7bfe4697a5f9010accd331a23d64177d
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_77.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_77.vti.md5
new file mode 100644
index 0000000..d7ad7b5
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_77.vti.md5
@@ -0,0 +1 @@
+1233cb70f6749b6fc6bb627d240469e9
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_78.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_78.vti.md5
new file mode 100644
index 0000000..d624519
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_78.vti.md5
@@ -0,0 +1 @@
+b03d83f84f6112a6344dced7fe1be029
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_79.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_79.vti.md5
new file mode 100644
index 0000000..d79a6c3
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_79.vti.md5
@@ -0,0 +1 @@
+2123c01b6f7125e92563b9d1b5a738a1
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_8.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_8.vti.md5
new file mode 100644
index 0000000..891ffbf
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_8.vti.md5
@@ -0,0 +1 @@
+82dca04271b4dd813155047c89c027df
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_80.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_80.vti.md5
new file mode 100644
index 0000000..986010c
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_80.vti.md5
@@ -0,0 +1 @@
+c96b108493c57a9078fe4a2126641d39
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_9.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_9.vti.md5
new file mode 100644
index 0000000..200d30b
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.0/HierarchicalBoxDataset.v1.0_9.vti.md5
@@ -0,0 +1 @@
+86dd317fe4cf9ee04c05914f98d598b9
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1.vthb.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1.vthb.md5
new file mode 100644
index 0000000..84e2094
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1.vthb.md5
@@ -0,0 +1 @@
+7b674925723fe4d16414f569e8a4855e
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_0.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_0.vti.md5
new file mode 100644
index 0000000..69b8565
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_0.vti.md5
@@ -0,0 +1 @@
+b2e6e376a42451a65027685008865ec7
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_1.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_1.vti.md5
new file mode 100644
index 0000000..c7d998e
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_1.vti.md5
@@ -0,0 +1 @@
+1dda8664c504b0a1bb046d1e844bfb5e
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_10.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_10.vti.md5
new file mode 100644
index 0000000..2a823f5
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_10.vti.md5
@@ -0,0 +1 @@
+5a00e7bc74f6d18d6bcb6587ab58d4f3
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_11.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_11.vti.md5
new file mode 100644
index 0000000..fbeb70c
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_11.vti.md5
@@ -0,0 +1 @@
+2f2b317f5d713c83e3027745690ece03
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_12.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_12.vti.md5
new file mode 100644
index 0000000..3a2a441
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_12.vti.md5
@@ -0,0 +1 @@
+b7640d8326329e870d0a972a71ef5a9c
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_13.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_13.vti.md5
new file mode 100644
index 0000000..ad0fed8
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_13.vti.md5
@@ -0,0 +1 @@
+e604fe8936493825768781611a87f00d
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_14.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_14.vti.md5
new file mode 100644
index 0000000..efd69c7
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_14.vti.md5
@@ -0,0 +1 @@
+2c850fa5332ba91682e4a9ae837a9ede
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_15.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_15.vti.md5
new file mode 100644
index 0000000..6d3f3cd
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_15.vti.md5
@@ -0,0 +1 @@
+14d6543c8dc4db6e50e6fc4c6dcb300c
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_16.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_16.vti.md5
new file mode 100644
index 0000000..28786d4
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_16.vti.md5
@@ -0,0 +1 @@
+5307d863aa4949b5e2c15a40dfd96a1b
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_17.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_17.vti.md5
new file mode 100644
index 0000000..888281a
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_17.vti.md5
@@ -0,0 +1 @@
+94e24b7b61c2fdd7de16a7cb0e4e37f0
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_18.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_18.vti.md5
new file mode 100644
index 0000000..9325a27
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_18.vti.md5
@@ -0,0 +1 @@
+e959f4756d58a92c1ed06b430add4fd5
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_19.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_19.vti.md5
new file mode 100644
index 0000000..7e87af3
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_19.vti.md5
@@ -0,0 +1 @@
+88a32218d24a98828e3d152831a0b16b
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_2.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_2.vti.md5
new file mode 100644
index 0000000..d7b7366
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_2.vti.md5
@@ -0,0 +1 @@
+ac6fd0cb75b86da8b5022603250ae259
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_20.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_20.vti.md5
new file mode 100644
index 0000000..504c5f1
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_20.vti.md5
@@ -0,0 +1 @@
+cdf2c3eaa0ebc45b228e576b7c899eee
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_21.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_21.vti.md5
new file mode 100644
index 0000000..3f9b1a0
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_21.vti.md5
@@ -0,0 +1 @@
+5d5c3904afef1c537ed7e7d4edb1a7be
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_22.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_22.vti.md5
new file mode 100644
index 0000000..4f4bf3e
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_22.vti.md5
@@ -0,0 +1 @@
+b9890e09e7a311632955ba4ce92470a5
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_23.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_23.vti.md5
new file mode 100644
index 0000000..5b3c8be
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_23.vti.md5
@@ -0,0 +1 @@
+e727fbb7d1307b12391490d197978b30
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_24.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_24.vti.md5
new file mode 100644
index 0000000..5e52bb0
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_24.vti.md5
@@ -0,0 +1 @@
+16feee98aab6255877738e7a62da0c8d
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_25.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_25.vti.md5
new file mode 100644
index 0000000..7277aef
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_25.vti.md5
@@ -0,0 +1 @@
+aa0e8d3741e75fb8dfcf64c67d6de92b
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_26.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_26.vti.md5
new file mode 100644
index 0000000..eb44d1c
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_26.vti.md5
@@ -0,0 +1 @@
+d4365b75158780381c680c6c3cbeea09
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_27.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_27.vti.md5
new file mode 100644
index 0000000..3217475
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_27.vti.md5
@@ -0,0 +1 @@
+ab830ec29f01d93099c59a5f9725652f
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_28.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_28.vti.md5
new file mode 100644
index 0000000..dea48d2
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_28.vti.md5
@@ -0,0 +1 @@
+8b20d858e8a0f0caef3c4f9140d966c7
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_29.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_29.vti.md5
new file mode 100644
index 0000000..3a8f147
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_29.vti.md5
@@ -0,0 +1 @@
+3217913f101c5e58cf04c977ac42d88f
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_3.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_3.vti.md5
new file mode 100644
index 0000000..1701461
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_3.vti.md5
@@ -0,0 +1 @@
+5b9cd9b4c19d8fb0a17f92e73f9b2986
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_30.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_30.vti.md5
new file mode 100644
index 0000000..29836c6
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_30.vti.md5
@@ -0,0 +1 @@
+4372991290f1f8f137ad6f095ef13f5e
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_31.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_31.vti.md5
new file mode 100644
index 0000000..92dbe4c
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_31.vti.md5
@@ -0,0 +1 @@
+477bc3e70b836de880ef6366051ae9f3
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_32.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_32.vti.md5
new file mode 100644
index 0000000..1690056
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_32.vti.md5
@@ -0,0 +1 @@
+05dbcd6fdb107c9bcf2cffca9e0e5281
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_33.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_33.vti.md5
new file mode 100644
index 0000000..7180e51
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_33.vti.md5
@@ -0,0 +1 @@
+9b111903d401780465dcc0d3001ce471
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_34.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_34.vti.md5
new file mode 100644
index 0000000..ea376c8
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_34.vti.md5
@@ -0,0 +1 @@
+481a686ef9d9fb1f261597e2a04cb5c9
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_35.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_35.vti.md5
new file mode 100644
index 0000000..b24c0db
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_35.vti.md5
@@ -0,0 +1 @@
+105c948bc68f53c9ef67889d3f24c0dd
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_36.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_36.vti.md5
new file mode 100644
index 0000000..9735b55
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_36.vti.md5
@@ -0,0 +1 @@
+c77de9e53932828de909576dd7b08366
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_37.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_37.vti.md5
new file mode 100644
index 0000000..3d413f6
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_37.vti.md5
@@ -0,0 +1 @@
+a74423c782ffdbc3b96865e9d197cdcc
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_38.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_38.vti.md5
new file mode 100644
index 0000000..b2ec19b
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_38.vti.md5
@@ -0,0 +1 @@
+e5a4928c3a86a0a1af83016d116a564f
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_39.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_39.vti.md5
new file mode 100644
index 0000000..231996c
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_39.vti.md5
@@ -0,0 +1 @@
+b2e66575f34b7c0a1d8b28e0783d09a3
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_4.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_4.vti.md5
new file mode 100644
index 0000000..9b9ec42
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_4.vti.md5
@@ -0,0 +1 @@
+82888a1c400acb87b56a22c7bbd3dd91
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_40.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_40.vti.md5
new file mode 100644
index 0000000..0c2b65c
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_40.vti.md5
@@ -0,0 +1 @@
+ab25f5955e2beba668d2c190a5365c7b
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_41.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_41.vti.md5
new file mode 100644
index 0000000..06f5363
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_41.vti.md5
@@ -0,0 +1 @@
+002f919ef79b4157345c95d6b3fbd1cd
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_42.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_42.vti.md5
new file mode 100644
index 0000000..6a1cb9f
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_42.vti.md5
@@ -0,0 +1 @@
+b4ab5d27a6407f4b9858db2c509f4561
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_43.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_43.vti.md5
new file mode 100644
index 0000000..09a9a95
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_43.vti.md5
@@ -0,0 +1 @@
+85bbe4d071a147b3d5b947f2b6b6a6f3
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_44.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_44.vti.md5
new file mode 100644
index 0000000..69f8a62
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_44.vti.md5
@@ -0,0 +1 @@
+8562d7fb83f5014961d5746d64627ea5
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_45.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_45.vti.md5
new file mode 100644
index 0000000..d8d310f
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_45.vti.md5
@@ -0,0 +1 @@
+69a8c900f3b6f8a3dfa2157f9c30a6cc
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_46.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_46.vti.md5
new file mode 100644
index 0000000..3ca4bc5
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_46.vti.md5
@@ -0,0 +1 @@
+8829d107530cc2378f8f9ca7df66655d
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_47.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_47.vti.md5
new file mode 100644
index 0000000..d57d474
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_47.vti.md5
@@ -0,0 +1 @@
+679bd61efda8d3311291a28247e4e05b
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_48.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_48.vti.md5
new file mode 100644
index 0000000..330a1f9
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_48.vti.md5
@@ -0,0 +1 @@
+c409979873a8fbd6f14dc8c3885b162f
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_49.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_49.vti.md5
new file mode 100644
index 0000000..1183c5f
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_49.vti.md5
@@ -0,0 +1 @@
+6780703c673e84456b8045bb78a3bcbb
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_5.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_5.vti.md5
new file mode 100644
index 0000000..8089e2d
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_5.vti.md5
@@ -0,0 +1 @@
+1cf3deadb152f302df4ea3c546b100dc
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_50.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_50.vti.md5
new file mode 100644
index 0000000..0576eb9
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_50.vti.md5
@@ -0,0 +1 @@
+3c18887a41569a04b46356418e69eabc
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_51.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_51.vti.md5
new file mode 100644
index 0000000..9eb902e
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_51.vti.md5
@@ -0,0 +1 @@
+111931e1272d837cfa650bf663fe11a7
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_52.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_52.vti.md5
new file mode 100644
index 0000000..4b19241
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_52.vti.md5
@@ -0,0 +1 @@
+a672343d8393ec2bff1b8dece11f28d3
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_53.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_53.vti.md5
new file mode 100644
index 0000000..07ba2f2
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_53.vti.md5
@@ -0,0 +1 @@
+433b23763d3398a2fe37d211f6a3dce0
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_54.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_54.vti.md5
new file mode 100644
index 0000000..597b3dd
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_54.vti.md5
@@ -0,0 +1 @@
+7250d038d39719576f8ad3e8979a5371
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_55.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_55.vti.md5
new file mode 100644
index 0000000..698b594
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_55.vti.md5
@@ -0,0 +1 @@
+e36fa59c1b429f9c6a0fdd5b6742f9cb
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_56.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_56.vti.md5
new file mode 100644
index 0000000..d1970ba
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_56.vti.md5
@@ -0,0 +1 @@
+8c5cf1c6564cdebd79f5ed2628c2a60c
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_57.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_57.vti.md5
new file mode 100644
index 0000000..1b298ee
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_57.vti.md5
@@ -0,0 +1 @@
+f389b8abf0196ab9833fe2b657d79bb7
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_58.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_58.vti.md5
new file mode 100644
index 0000000..98c0153
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_58.vti.md5
@@ -0,0 +1 @@
+56e1d105b384ab9402bfc6c10e1f26f4
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_59.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_59.vti.md5
new file mode 100644
index 0000000..41ace80
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_59.vti.md5
@@ -0,0 +1 @@
+9a80998b3b809c63372fa81c9e10b22e
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_6.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_6.vti.md5
new file mode 100644
index 0000000..3cc6da4
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_6.vti.md5
@@ -0,0 +1 @@
+447140742b05c2b73e3ef7eae8c9568f
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_60.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_60.vti.md5
new file mode 100644
index 0000000..4bd5b59
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_60.vti.md5
@@ -0,0 +1 @@
+2c56251b0fee6e1ae29e7e5c2959d498
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_61.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_61.vti.md5
new file mode 100644
index 0000000..65481aa
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_61.vti.md5
@@ -0,0 +1 @@
+ed4efb4e1587e14ab8ca16be47cd621a
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_62.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_62.vti.md5
new file mode 100644
index 0000000..c146f0a
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_62.vti.md5
@@ -0,0 +1 @@
+45b72d8fa41d7f327a5c74926b555545
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_63.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_63.vti.md5
new file mode 100644
index 0000000..65eeec4
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_63.vti.md5
@@ -0,0 +1 @@
+a85780f36f85080659fd84ac5f9996a6
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_64.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_64.vti.md5
new file mode 100644
index 0000000..7efa752
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_64.vti.md5
@@ -0,0 +1 @@
+dc317d2bf6ced412015d47aace5d0996
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_65.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_65.vti.md5
new file mode 100644
index 0000000..a42b258
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_65.vti.md5
@@ -0,0 +1 @@
+0518ae07c78075a0c97650d0cf57fc34
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_66.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_66.vti.md5
new file mode 100644
index 0000000..0dfaac2
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_66.vti.md5
@@ -0,0 +1 @@
+2b0312a90dd5f7b7529f79a88b14cbd3
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_67.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_67.vti.md5
new file mode 100644
index 0000000..fe2b253
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_67.vti.md5
@@ -0,0 +1 @@
+125ff22c8339c9563237e09b58f5a722
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_68.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_68.vti.md5
new file mode 100644
index 0000000..a417788
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_68.vti.md5
@@ -0,0 +1 @@
+ae7187a1ef59734804492c83954a496f
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_69.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_69.vti.md5
new file mode 100644
index 0000000..707d599
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_69.vti.md5
@@ -0,0 +1 @@
+471c3558077993ad9cb5afecec58c8ea
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_7.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_7.vti.md5
new file mode 100644
index 0000000..aab3e59
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_7.vti.md5
@@ -0,0 +1 @@
+fb68e0f8e8ae436ee0c031d3b1fb3fa4
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_70.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_70.vti.md5
new file mode 100644
index 0000000..185206c
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_70.vti.md5
@@ -0,0 +1 @@
+88ff87d97b8f5be081328455b962e17a
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_71.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_71.vti.md5
new file mode 100644
index 0000000..f5a2f23
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_71.vti.md5
@@ -0,0 +1 @@
+386f35b42b32dfc5686829f3cffcc75b
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_72.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_72.vti.md5
new file mode 100644
index 0000000..0c70cc0
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_72.vti.md5
@@ -0,0 +1 @@
+5c5502f69d2e70282398f0a59a3d28cc
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_73.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_73.vti.md5
new file mode 100644
index 0000000..5bb6d0b
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_73.vti.md5
@@ -0,0 +1 @@
+a2bbc49290b2d86478d8d0ccc0e62e32
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_74.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_74.vti.md5
new file mode 100644
index 0000000..ceaa5be
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_74.vti.md5
@@ -0,0 +1 @@
+3ae2f863e934fd6868a29bfd36140070
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_75.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_75.vti.md5
new file mode 100644
index 0000000..cf96390
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_75.vti.md5
@@ -0,0 +1 @@
+bc84178c770de01ba631d69c7eee3f0d
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_76.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_76.vti.md5
new file mode 100644
index 0000000..cffd1c6
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_76.vti.md5
@@ -0,0 +1 @@
+7bfe4697a5f9010accd331a23d64177d
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_77.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_77.vti.md5
new file mode 100644
index 0000000..d7ad7b5
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_77.vti.md5
@@ -0,0 +1 @@
+1233cb70f6749b6fc6bb627d240469e9
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_78.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_78.vti.md5
new file mode 100644
index 0000000..d624519
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_78.vti.md5
@@ -0,0 +1 @@
+b03d83f84f6112a6344dced7fe1be029
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_79.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_79.vti.md5
new file mode 100644
index 0000000..d79a6c3
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_79.vti.md5
@@ -0,0 +1 @@
+2123c01b6f7125e92563b9d1b5a738a1
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_8.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_8.vti.md5
new file mode 100644
index 0000000..891ffbf
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_8.vti.md5
@@ -0,0 +1 @@
+82dca04271b4dd813155047c89c027df
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_80.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_80.vti.md5
new file mode 100644
index 0000000..986010c
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_80.vti.md5
@@ -0,0 +1 @@
+c96b108493c57a9078fe4a2126641d39
diff --git a/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_9.vti.md5 b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_9.vti.md5
new file mode 100644
index 0000000..200d30b
--- /dev/null
+++ b/Testing/Data/AMR/HierarchicalBoxDataset.v1.1/HierarchicalBoxDataset.v1.1_9.vti.md5
@@ -0,0 +1 @@
+86dd317fe4cf9ee04c05914f98d598b9
diff --git a/Testing/Data/AngularSector.vtk.md5 b/Testing/Data/AngularSector.vtk.md5
new file mode 100644
index 0000000..12656f4
--- /dev/null
+++ b/Testing/Data/AngularSector.vtk.md5
@@ -0,0 +1 @@
+b6d14eef7c6488fd05685a51f1d16c0e
diff --git a/Testing/Data/B.pgm.md5 b/Testing/Data/B.pgm.md5
new file mode 100644
index 0000000..e94b9d9
--- /dev/null
+++ b/Testing/Data/B.pgm.md5
@@ -0,0 +1 @@
+596db5b40ceb06b514c870a822866a67
diff --git a/Testing/Data/BlueCircle.png.md5 b/Testing/Data/BlueCircle.png.md5
new file mode 100644
index 0000000..da4206f
--- /dev/null
+++ b/Testing/Data/BlueCircle.png.md5
@@ -0,0 +1 @@
+24c5c1be5d3bb484f1e4330e1f9fcb1b
diff --git a/Testing/Data/CityPopulationsUTF8.txt.md5 b/Testing/Data/CityPopulationsUTF8.txt.md5
new file mode 100644
index 0000000..601de79
--- /dev/null
+++ b/Testing/Data/CityPopulationsUTF8.txt.md5
@@ -0,0 +1 @@
+6424e2a51d932a5c93c361c14c3c4c49
diff --git a/Testing/Data/Dave_Karelitz_Small/sample.spcth-timeseries.md5 b/Testing/Data/Dave_Karelitz_Small/sample.spcth-timeseries.md5
new file mode 100644
index 0000000..21447de
--- /dev/null
+++ b/Testing/Data/Dave_Karelitz_Small/sample.spcth-timeseries.md5
@@ -0,0 +1 @@
+10c282ca10656b74e440bf2ca20d0779
diff --git a/Testing/Data/Dave_Karelitz_Small/spcth.0.md5 b/Testing/Data/Dave_Karelitz_Small/spcth.0.md5
new file mode 100644
index 0000000..079602a
--- /dev/null
+++ b/Testing/Data/Dave_Karelitz_Small/spcth.0.md5
@@ -0,0 +1 @@
+395cb6648da583510d21e098c084b93b
diff --git a/Testing/Data/Dave_Karelitz_Small/spcth.1.md5 b/Testing/Data/Dave_Karelitz_Small/spcth.1.md5
new file mode 100644
index 0000000..8eb9efd
--- /dev/null
+++ b/Testing/Data/Dave_Karelitz_Small/spcth.1.md5
@@ -0,0 +1 @@
+200ddfe35c4a74e229ff5da9d5ac68ae
diff --git a/Testing/Data/Dave_Karelitz_Small/spcth.2.md5 b/Testing/Data/Dave_Karelitz_Small/spcth.2.md5
new file mode 100644
index 0000000..420b66a
--- /dev/null
+++ b/Testing/Data/Dave_Karelitz_Small/spcth.2.md5
@@ -0,0 +1 @@
+2dd564f174b0445d34a938ca8573d204
diff --git a/Testing/Data/Dave_Karelitz_Small/spcth.3.md5 b/Testing/Data/Dave_Karelitz_Small/spcth.3.md5
new file mode 100644
index 0000000..16809d5
--- /dev/null
+++ b/Testing/Data/Dave_Karelitz_Small/spcth.3.md5
@@ -0,0 +1 @@
+4c3faad99a7ac1d1a655f8af36b31484
diff --git a/Testing/Data/Dave_Karelitz_Small/spcth_a.0.md5 b/Testing/Data/Dave_Karelitz_Small/spcth_a.0.md5
new file mode 100644
index 0000000..f560946
--- /dev/null
+++ b/Testing/Data/Dave_Karelitz_Small/spcth_a.0.md5
@@ -0,0 +1 @@
+7397af070345b6238fae0162a882c4f3
diff --git a/Testing/Data/Dave_Karelitz_Small/spcth_a.1.md5 b/Testing/Data/Dave_Karelitz_Small/spcth_a.1.md5
new file mode 100644
index 0000000..338aa65
--- /dev/null
+++ b/Testing/Data/Dave_Karelitz_Small/spcth_a.1.md5
@@ -0,0 +1 @@
+8b6d8c33394bea26cca5d4eb57bb95cf
diff --git a/Testing/Data/Dave_Karelitz_Small/spcth_a.2.md5 b/Testing/Data/Dave_Karelitz_Small/spcth_a.2.md5
new file mode 100644
index 0000000..14762a7
--- /dev/null
+++ b/Testing/Data/Dave_Karelitz_Small/spcth_a.2.md5
@@ -0,0 +1 @@
+5572c9d0cad55d9018ca9bf0833c608e
diff --git a/Testing/Data/Dave_Karelitz_Small/spcth_a.3.md5 b/Testing/Data/Dave_Karelitz_Small/spcth_a.3.md5
new file mode 100644
index 0000000..4698eff
--- /dev/null
+++ b/Testing/Data/Dave_Karelitz_Small/spcth_a.3.md5
@@ -0,0 +1 @@
+6b440c33aab99b3b5837987a33502b8f
diff --git a/Testing/Data/E07733S002I009.MR.md5 b/Testing/Data/E07733S002I009.MR.md5
new file mode 100644
index 0000000..40b5b28
--- /dev/null
+++ b/Testing/Data/E07733S002I009.MR.md5
@@ -0,0 +1 @@
+1ec716dc9d0949c344f930f3f17dcc2e
diff --git a/Testing/Data/EnSight/RectGrid_ascii.case.md5 b/Testing/Data/EnSight/RectGrid_ascii.case.md5
new file mode 100644
index 0000000..fe7740c
--- /dev/null
+++ b/Testing/Data/EnSight/RectGrid_ascii.case.md5
@@ -0,0 +1 @@
+f9a686e99d9be4a1f70861c7a026782b
diff --git a/Testing/Data/EnSight/RectGrid_ascii.geo.md5 b/Testing/Data/EnSight/RectGrid_ascii.geo.md5
new file mode 100644
index 0000000..ecc567c
--- /dev/null
+++ b/Testing/Data/EnSight/RectGrid_ascii.geo.md5
@@ -0,0 +1 @@
+4a28725acb99782a8072af0135a5d5a3
diff --git a/Testing/Data/EnSight/RectGrid_ascii_pd_scalars.md5 b/Testing/Data/EnSight/RectGrid_ascii_pd_scalars.md5
new file mode 100644
index 0000000..3488899
--- /dev/null
+++ b/Testing/Data/EnSight/RectGrid_ascii_pd_scalars.md5
@@ -0,0 +1 @@
+45dc9525cd0345a99eea8836eab221e0
diff --git a/Testing/Data/EnSight/RectGrid_ascii_pd_vectors.md5 b/Testing/Data/EnSight/RectGrid_ascii_pd_vectors.md5
new file mode 100644
index 0000000..7a5013c
--- /dev/null
+++ b/Testing/Data/EnSight/RectGrid_ascii_pd_vectors.md5
@@ -0,0 +1 @@
+a9a8c285f1c4a4d5f48b6e27b338e491
diff --git a/Testing/Data/EnSight/RectGrid_bin.case.md5 b/Testing/Data/EnSight/RectGrid_bin.case.md5
new file mode 100644
index 0000000..e8a1f19
--- /dev/null
+++ b/Testing/Data/EnSight/RectGrid_bin.case.md5
@@ -0,0 +1 @@
+f86cecc9cac3edccc3552222b2c217b1
diff --git a/Testing/Data/EnSight/RectGrid_bin.geo.md5 b/Testing/Data/EnSight/RectGrid_bin.geo.md5
new file mode 100644
index 0000000..c0cd02a
--- /dev/null
+++ b/Testing/Data/EnSight/RectGrid_bin.geo.md5
@@ -0,0 +1 @@
+4eb142c676c103133fdf00f4011fc2a3
diff --git a/Testing/Data/EnSight/RectGrid_bin_pd_scalars.md5 b/Testing/Data/EnSight/RectGrid_bin_pd_scalars.md5
new file mode 100644
index 0000000..27bb2c0
--- /dev/null
+++ b/Testing/Data/EnSight/RectGrid_bin_pd_scalars.md5
@@ -0,0 +1 @@
+341878d2903ca1c65c09183b2d63e238
diff --git a/Testing/Data/EnSight/RectGrid_bin_pd_vectors.md5 b/Testing/Data/EnSight/RectGrid_bin_pd_vectors.md5
new file mode 100644
index 0000000..f3d0620
--- /dev/null
+++ b/Testing/Data/EnSight/RectGrid_bin_pd_vectors.md5
@@ -0,0 +1 @@
+8ab7135eda5aa95c434558cbbca40438
diff --git a/Testing/Data/EnSight/TEST.case.md5 b/Testing/Data/EnSight/TEST.case.md5
new file mode 100644
index 0000000..fd3ca5f
--- /dev/null
+++ b/Testing/Data/EnSight/TEST.case.md5
@@ -0,0 +1 @@
+74149b9deb52ca43acca04343003f8d4
diff --git a/Testing/Data/EnSight/TEST_bin.case.md5 b/Testing/Data/EnSight/TEST_bin.case.md5
new file mode 100644
index 0000000..98fea71
--- /dev/null
+++ b/Testing/Data/EnSight/TEST_bin.case.md5
@@ -0,0 +1 @@
+2549858455b03977874d729fb46ccd18
diff --git a/Testing/Data/EnSight/blow1_ascii.case.md5 b/Testing/Data/EnSight/blow1_ascii.case.md5
new file mode 100644
index 0000000..d69757c
--- /dev/null
+++ b/Testing/Data/EnSight/blow1_ascii.case.md5
@@ -0,0 +1 @@
+828b3c4e23f16d4369f304acdca32e8c
diff --git a/Testing/Data/EnSight/blow1_ascii.geo.md5 b/Testing/Data/EnSight/blow1_ascii.geo.md5
new file mode 100644
index 0000000..4f5f996
--- /dev/null
+++ b/Testing/Data/EnSight/blow1_ascii.geo.md5
@@ -0,0 +1 @@
+305cbe0dd60e8b8997d5f2dfbb76dd70
diff --git a/Testing/Data/EnSight/blow1_ascii_cd_displacement.md5 b/Testing/Data/EnSight/blow1_ascii_cd_displacement.md5
new file mode 100644
index 0000000..4681a6c
--- /dev/null
+++ b/Testing/Data/EnSight/blow1_ascii_cd_displacement.md5
@@ -0,0 +1 @@
+b9f61b805f85e551af8d7ef1beb8f5fe
diff --git a/Testing/Data/EnSight/blow1_ascii_cd_thickness.md5 b/Testing/Data/EnSight/blow1_ascii_cd_thickness.md5
new file mode 100644
index 0000000..2e47da8
--- /dev/null
+++ b/Testing/Data/EnSight/blow1_ascii_cd_thickness.md5
@@ -0,0 +1 @@
+cda82848701b1deacfb235f052618b39
diff --git a/Testing/Data/EnSight/blow1_ascii_pd_displacement.md5 b/Testing/Data/EnSight/blow1_ascii_pd_displacement.md5
new file mode 100644
index 0000000..18c3922
--- /dev/null
+++ b/Testing/Data/EnSight/blow1_ascii_pd_displacement.md5
@@ -0,0 +1 @@
+2ec684cd0366827b4dc400480fc9c1b7
diff --git a/Testing/Data/EnSight/blow1_ascii_pd_displacement1.md5 b/Testing/Data/EnSight/blow1_ascii_pd_displacement1.md5
new file mode 100644
index 0000000..bf8e1ea
--- /dev/null
+++ b/Testing/Data/EnSight/blow1_ascii_pd_displacement1.md5
@@ -0,0 +1 @@
+ac2bb7ae932ccbc5e6dedc0a10694475
diff --git a/Testing/Data/EnSight/blow1_ascii_pd_thickness.md5 b/Testing/Data/EnSight/blow1_ascii_pd_thickness.md5
new file mode 100644
index 0000000..6b3d273
--- /dev/null
+++ b/Testing/Data/EnSight/blow1_ascii_pd_thickness.md5
@@ -0,0 +1 @@
+a780425f1f98dec9ab832a74f5b019ba
diff --git a/Testing/Data/EnSight/blow1_ascii_pd_thickness1.md5 b/Testing/Data/EnSight/blow1_ascii_pd_thickness1.md5
new file mode 100644
index 0000000..84b8118
--- /dev/null
+++ b/Testing/Data/EnSight/blow1_ascii_pd_thickness1.md5
@@ -0,0 +1 @@
+8d56e15c408678a8bf977e19db3b771c
diff --git a/Testing/Data/EnSight/blow1_bin.case.md5 b/Testing/Data/EnSight/blow1_bin.case.md5
new file mode 100644
index 0000000..67b32c5
--- /dev/null
+++ b/Testing/Data/EnSight/blow1_bin.case.md5
@@ -0,0 +1 @@
+8034f6daccdb9a68a1fbb4493eaa4a47
diff --git a/Testing/Data/EnSight/blow1_bin.geo.md5 b/Testing/Data/EnSight/blow1_bin.geo.md5
new file mode 100644
index 0000000..8b949f5
--- /dev/null
+++ b/Testing/Data/EnSight/blow1_bin.geo.md5
@@ -0,0 +1 @@
+58c536bccfe18a4dc4d157f35e821489
diff --git a/Testing/Data/EnSight/blow1_bin_cd_displacement.md5 b/Testing/Data/EnSight/blow1_bin_cd_displacement.md5
new file mode 100644
index 0000000..b08c926
--- /dev/null
+++ b/Testing/Data/EnSight/blow1_bin_cd_displacement.md5
@@ -0,0 +1 @@
+2ae837fd63b90c241a110cadc49000ef
diff --git a/Testing/Data/EnSight/blow1_bin_cd_thickness.md5 b/Testing/Data/EnSight/blow1_bin_cd_thickness.md5
new file mode 100644
index 0000000..14a1c2f
--- /dev/null
+++ b/Testing/Data/EnSight/blow1_bin_cd_thickness.md5
@@ -0,0 +1 @@
+5f92bf44ea4cdb3e4ddc2f25822a669f
diff --git a/Testing/Data/EnSight/blow1_bin_pd_displacement.md5 b/Testing/Data/EnSight/blow1_bin_pd_displacement.md5
new file mode 100644
index 0000000..8d25126
--- /dev/null
+++ b/Testing/Data/EnSight/blow1_bin_pd_displacement.md5
@@ -0,0 +1 @@
+97770c79ccc2523eeeef51483a5f2ce7
diff --git a/Testing/Data/EnSight/blow1_bin_pd_displacement1.md5 b/Testing/Data/EnSight/blow1_bin_pd_displacement1.md5
new file mode 100644
index 0000000..7efed80
--- /dev/null
+++ b/Testing/Data/EnSight/blow1_bin_pd_displacement1.md5
@@ -0,0 +1 @@
+b5657dcd9c75acab644c094a9fede5b4
diff --git a/Testing/Data/EnSight/blow1_bin_pd_thickness.md5 b/Testing/Data/EnSight/blow1_bin_pd_thickness.md5
new file mode 100644
index 0000000..01e2858
--- /dev/null
+++ b/Testing/Data/EnSight/blow1_bin_pd_thickness.md5
@@ -0,0 +1 @@
+c94f391f1fda11962801c1df10781e7b
diff --git a/Testing/Data/EnSight/blow1_bin_pd_thickness1.md5 b/Testing/Data/EnSight/blow1_bin_pd_thickness1.md5
new file mode 100644
index 0000000..2dae5d1
--- /dev/null
+++ b/Testing/Data/EnSight/blow1_bin_pd_thickness1.md5
@@ -0,0 +1 @@
+13b58afd786e3a06d3af8d70b9a52fcc
diff --git a/Testing/Data/EnSight/blow2_ascii.case.md5 b/Testing/Data/EnSight/blow2_ascii.case.md5
new file mode 100644
index 0000000..426ef2e
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_ascii.case.md5
@@ -0,0 +1 @@
+49b491381aa660079f2853a3fb975694
diff --git a/Testing/Data/EnSight/blow2_ascii.geo000.md5 b/Testing/Data/EnSight/blow2_ascii.geo000.md5
new file mode 100644
index 0000000..67b673a
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_ascii.geo000.md5
@@ -0,0 +1 @@
+3815f97da6e1134e230b6a71728a41c9
diff --git a/Testing/Data/EnSight/blow2_ascii.geo001.md5 b/Testing/Data/EnSight/blow2_ascii.geo001.md5
new file mode 100644
index 0000000..a959bce
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_ascii.geo001.md5
@@ -0,0 +1 @@
+01fb3eed0d86efbc9a99da1306eb3af0
diff --git a/Testing/Data/EnSight/blow2_ascii_cd_displacement000.md5 b/Testing/Data/EnSight/blow2_ascii_cd_displacement000.md5
new file mode 100644
index 0000000..074e870
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_ascii_cd_displacement000.md5
@@ -0,0 +1 @@
+bb736bcf31309d6d68312a326d9eb4a8
diff --git a/Testing/Data/EnSight/blow2_ascii_cd_displacement001.md5 b/Testing/Data/EnSight/blow2_ascii_cd_displacement001.md5
new file mode 100644
index 0000000..ddb13b9
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_ascii_cd_displacement001.md5
@@ -0,0 +1 @@
+4a6d03444f55a1f14ebbeae243712c33
diff --git a/Testing/Data/EnSight/blow2_ascii_cd_thickness000.md5 b/Testing/Data/EnSight/blow2_ascii_cd_thickness000.md5
new file mode 100644
index 0000000..274e6bc
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_ascii_cd_thickness000.md5
@@ -0,0 +1 @@
+02dad6275c941b25b5bde8b245e804c3
diff --git a/Testing/Data/EnSight/blow2_ascii_cd_thickness001.md5 b/Testing/Data/EnSight/blow2_ascii_cd_thickness001.md5
new file mode 100644
index 0000000..d4f249b
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_ascii_cd_thickness001.md5
@@ -0,0 +1 @@
+be408ab2e123fad632ba921cd5e56ec6
diff --git a/Testing/Data/EnSight/blow2_ascii_pd_displacement000.md5 b/Testing/Data/EnSight/blow2_ascii_pd_displacement000.md5
new file mode 100644
index 0000000..23020b6
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_ascii_pd_displacement000.md5
@@ -0,0 +1 @@
+8b6002e62eb9f8c3be73f7776bb4ea54
diff --git a/Testing/Data/EnSight/blow2_ascii_pd_displacement001.md5 b/Testing/Data/EnSight/blow2_ascii_pd_displacement001.md5
new file mode 100644
index 0000000..1080bb4
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_ascii_pd_displacement001.md5
@@ -0,0 +1 @@
+6bd1eb9478078a0ff0b76aa095d585ba
diff --git a/Testing/Data/EnSight/blow2_ascii_pd_displacement1000.md5 b/Testing/Data/EnSight/blow2_ascii_pd_displacement1000.md5
new file mode 100644
index 0000000..13fa522
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_ascii_pd_displacement1000.md5
@@ -0,0 +1 @@
+b520d9fb7670df2ceb69ba4bf167d388
diff --git a/Testing/Data/EnSight/blow2_ascii_pd_displacement1001.md5 b/Testing/Data/EnSight/blow2_ascii_pd_displacement1001.md5
new file mode 100644
index 0000000..13fa522
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_ascii_pd_displacement1001.md5
@@ -0,0 +1 @@
+b520d9fb7670df2ceb69ba4bf167d388
diff --git a/Testing/Data/EnSight/blow2_ascii_pd_thickness000.md5 b/Testing/Data/EnSight/blow2_ascii_pd_thickness000.md5
new file mode 100644
index 0000000..8681ae5
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_ascii_pd_thickness000.md5
@@ -0,0 +1 @@
+1b6db7a88a533cadbd96c6e944d3c396
diff --git a/Testing/Data/EnSight/blow2_ascii_pd_thickness001.md5 b/Testing/Data/EnSight/blow2_ascii_pd_thickness001.md5
new file mode 100644
index 0000000..5491d40
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_ascii_pd_thickness001.md5
@@ -0,0 +1 @@
+507094b9a6d9d602f11b85c5979a78ad
diff --git a/Testing/Data/EnSight/blow2_ascii_pd_thickness1000.md5 b/Testing/Data/EnSight/blow2_ascii_pd_thickness1000.md5
new file mode 100644
index 0000000..98f42b7
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_ascii_pd_thickness1000.md5
@@ -0,0 +1 @@
+6d51e6f050f148f6b2d23c0a549f6f1f
diff --git a/Testing/Data/EnSight/blow2_ascii_pd_thickness1001.md5 b/Testing/Data/EnSight/blow2_ascii_pd_thickness1001.md5
new file mode 100644
index 0000000..98f42b7
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_ascii_pd_thickness1001.md5
@@ -0,0 +1 @@
+6d51e6f050f148f6b2d23c0a549f6f1f
diff --git a/Testing/Data/EnSight/blow2_bin.case.md5 b/Testing/Data/EnSight/blow2_bin.case.md5
new file mode 100644
index 0000000..77166dc
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_bin.case.md5
@@ -0,0 +1 @@
+c5f718dcf96fb8c1795b7f32d49bc6ba
diff --git a/Testing/Data/EnSight/blow2_bin.geo000.md5 b/Testing/Data/EnSight/blow2_bin.geo000.md5
new file mode 100644
index 0000000..092ea43
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_bin.geo000.md5
@@ -0,0 +1 @@
+22e6a3df3139c627e33e4ece207920f5
diff --git a/Testing/Data/EnSight/blow2_bin.geo001.md5 b/Testing/Data/EnSight/blow2_bin.geo001.md5
new file mode 100644
index 0000000..e723354
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_bin.geo001.md5
@@ -0,0 +1 @@
+512e3d9e8fd2be116093ff7f5685669d
diff --git a/Testing/Data/EnSight/blow2_bin_cd_displacement000.md5 b/Testing/Data/EnSight/blow2_bin_cd_displacement000.md5
new file mode 100644
index 0000000..8a14d75
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_bin_cd_displacement000.md5
@@ -0,0 +1 @@
+005b47be2bdf328a7477a8fb4307825f
diff --git a/Testing/Data/EnSight/blow2_bin_cd_displacement001.md5 b/Testing/Data/EnSight/blow2_bin_cd_displacement001.md5
new file mode 100644
index 0000000..995a641
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_bin_cd_displacement001.md5
@@ -0,0 +1 @@
+f910726228030952f34dfec953f196c4
diff --git a/Testing/Data/EnSight/blow2_bin_cd_thickness000.md5 b/Testing/Data/EnSight/blow2_bin_cd_thickness000.md5
new file mode 100644
index 0000000..47075a5
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_bin_cd_thickness000.md5
@@ -0,0 +1 @@
+447d0c22b94763604b5b67221b0b0d85
diff --git a/Testing/Data/EnSight/blow2_bin_cd_thickness001.md5 b/Testing/Data/EnSight/blow2_bin_cd_thickness001.md5
new file mode 100644
index 0000000..4539f6d
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_bin_cd_thickness001.md5
@@ -0,0 +1 @@
+a7692f1bbe4c43065fcfc0ef8de94c3f
diff --git a/Testing/Data/EnSight/blow2_bin_pd_displacement000.md5 b/Testing/Data/EnSight/blow2_bin_pd_displacement000.md5
new file mode 100644
index 0000000..07b9907
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_bin_pd_displacement000.md5
@@ -0,0 +1 @@
+6d58fc3dd9229ef1e78b095fb1bef46f
diff --git a/Testing/Data/EnSight/blow2_bin_pd_displacement001.md5 b/Testing/Data/EnSight/blow2_bin_pd_displacement001.md5
new file mode 100644
index 0000000..3cf277a
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_bin_pd_displacement001.md5
@@ -0,0 +1 @@
+45e81ce85e0e508ecebc3840d517dfd5
diff --git a/Testing/Data/EnSight/blow2_bin_pd_thickness000.md5 b/Testing/Data/EnSight/blow2_bin_pd_thickness000.md5
new file mode 100644
index 0000000..aede7f9
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_bin_pd_thickness000.md5
@@ -0,0 +1 @@
+c58291941d8b6c3b31393975279b0089
diff --git a/Testing/Data/EnSight/blow2_bin_pd_thickness001.md5 b/Testing/Data/EnSight/blow2_bin_pd_thickness001.md5
new file mode 100644
index 0000000..66a62c5
--- /dev/null
+++ b/Testing/Data/EnSight/blow2_bin_pd_thickness001.md5
@@ -0,0 +1 @@
+b9a0da0d347eac060c257f4d5d2cbaf4
diff --git a/Testing/Data/EnSight/blow3_bin.case.md5 b/Testing/Data/EnSight/blow3_bin.case.md5
new file mode 100644
index 0000000..73e6e1b
--- /dev/null
+++ b/Testing/Data/EnSight/blow3_bin.case.md5
@@ -0,0 +1 @@
+068b5eb6b50538d31c02f60a337384ff
diff --git a/Testing/Data/EnSight/blow3_bin.geo.md5 b/Testing/Data/EnSight/blow3_bin.geo.md5
new file mode 100644
index 0000000..02e64bd
--- /dev/null
+++ b/Testing/Data/EnSight/blow3_bin.geo.md5
@@ -0,0 +1 @@
+aca37e02d458d924d48fd8304d81bf1d
diff --git a/Testing/Data/EnSight/blow3_bin_cd_displacement.md5 b/Testing/Data/EnSight/blow3_bin_cd_displacement.md5
new file mode 100644
index 0000000..4cab515
--- /dev/null
+++ b/Testing/Data/EnSight/blow3_bin_cd_displacement.md5
@@ -0,0 +1 @@
+00ba7d9399c914edb4a6faa0b10ae7e5
diff --git a/Testing/Data/EnSight/blow3_bin_cd_thickness.md5 b/Testing/Data/EnSight/blow3_bin_cd_thickness.md5
new file mode 100644
index 0000000..e25d138
--- /dev/null
+++ b/Testing/Data/EnSight/blow3_bin_cd_thickness.md5
@@ -0,0 +1 @@
+112c37707868c4f01d4740dc9008d227
diff --git a/Testing/Data/EnSight/blow3_bin_pd_displacement.md5 b/Testing/Data/EnSight/blow3_bin_pd_displacement.md5
new file mode 100644
index 0000000..70c017c
--- /dev/null
+++ b/Testing/Data/EnSight/blow3_bin_pd_displacement.md5
@@ -0,0 +1 @@
+1e7451b69290f6d2317f261c649558d9
diff --git a/Testing/Data/EnSight/blow3_bin_pd_thickness.md5 b/Testing/Data/EnSight/blow3_bin_pd_thickness.md5
new file mode 100644
index 0000000..8e3c8c6
--- /dev/null
+++ b/Testing/Data/EnSight/blow3_bin_pd_thickness.md5
@@ -0,0 +1 @@
+b92d57a13f181a90588cc85de822c3df
diff --git a/Testing/Data/EnSight/blow4_bin.case.md5 b/Testing/Data/EnSight/blow4_bin.case.md5
new file mode 100644
index 0000000..59a00ca
--- /dev/null
+++ b/Testing/Data/EnSight/blow4_bin.case.md5
@@ -0,0 +1 @@
+22d621f3454c35ab37c185fbd17326eb
diff --git a/Testing/Data/EnSight/blow4_bin.geo000.md5 b/Testing/Data/EnSight/blow4_bin.geo000.md5
new file mode 100644
index 0000000..4b87abe
--- /dev/null
+++ b/Testing/Data/EnSight/blow4_bin.geo000.md5
@@ -0,0 +1 @@
+2dbfd4f6bb01ac023ecadeb27d21574d
diff --git a/Testing/Data/EnSight/blow4_bin.geo001.md5 b/Testing/Data/EnSight/blow4_bin.geo001.md5
new file mode 100644
index 0000000..2129b82
--- /dev/null
+++ b/Testing/Data/EnSight/blow4_bin.geo001.md5
@@ -0,0 +1 @@
+6f17cd26295a79d2618789616e6530a1
diff --git a/Testing/Data/EnSight/blow4_bin_cd_displacement000.md5 b/Testing/Data/EnSight/blow4_bin_cd_displacement000.md5
new file mode 100644
index 0000000..e752b81
--- /dev/null
+++ b/Testing/Data/EnSight/blow4_bin_cd_displacement000.md5
@@ -0,0 +1 @@
+24c50de45723dd07c8cf435a019ac995
diff --git a/Testing/Data/EnSight/blow4_bin_cd_displacement001.md5 b/Testing/Data/EnSight/blow4_bin_cd_displacement001.md5
new file mode 100644
index 0000000..026e759
--- /dev/null
+++ b/Testing/Data/EnSight/blow4_bin_cd_displacement001.md5
@@ -0,0 +1 @@
+daed01696e2c49c616a0445b6f196968
diff --git a/Testing/Data/EnSight/blow4_bin_cd_thickness000.md5 b/Testing/Data/EnSight/blow4_bin_cd_thickness000.md5
new file mode 100644
index 0000000..91f7f7f
--- /dev/null
+++ b/Testing/Data/EnSight/blow4_bin_cd_thickness000.md5
@@ -0,0 +1 @@
+91d1bcf2466fb6aa056c554710b4c998
diff --git a/Testing/Data/EnSight/blow4_bin_cd_thickness001.md5 b/Testing/Data/EnSight/blow4_bin_cd_thickness001.md5
new file mode 100644
index 0000000..ad851ec
--- /dev/null
+++ b/Testing/Data/EnSight/blow4_bin_cd_thickness001.md5
@@ -0,0 +1 @@
+af24f071a7661216ca7b03e769d6f1b6
diff --git a/Testing/Data/EnSight/blow4_bin_pd_displacement000.md5 b/Testing/Data/EnSight/blow4_bin_pd_displacement000.md5
new file mode 100644
index 0000000..07f8b52
--- /dev/null
+++ b/Testing/Data/EnSight/blow4_bin_pd_displacement000.md5
@@ -0,0 +1 @@
+def1e176f3e062f44ef8739a71005c5c
diff --git a/Testing/Data/EnSight/blow4_bin_pd_displacement001.md5 b/Testing/Data/EnSight/blow4_bin_pd_displacement001.md5
new file mode 100644
index 0000000..082cb64
--- /dev/null
+++ b/Testing/Data/EnSight/blow4_bin_pd_displacement001.md5
@@ -0,0 +1 @@
+924ccabc8a4d1b500ff421c1d4eebf43
diff --git a/Testing/Data/EnSight/blow4_bin_pd_thickness000.md5 b/Testing/Data/EnSight/blow4_bin_pd_thickness000.md5
new file mode 100644
index 0000000..c1f3fb2
--- /dev/null
+++ b/Testing/Data/EnSight/blow4_bin_pd_thickness000.md5
@@ -0,0 +1 @@
+fec52b0686276e0927f67d5e529cd206
diff --git a/Testing/Data/EnSight/blow4_bin_pd_thickness001.md5 b/Testing/Data/EnSight/blow4_bin_pd_thickness001.md5
new file mode 100644
index 0000000..699f277
--- /dev/null
+++ b/Testing/Data/EnSight/blow4_bin_pd_thickness001.md5
@@ -0,0 +1 @@
+7c8b8dec0cae1cc8d4b45cb3eb2682e6
diff --git a/Testing/Data/EnSight/blow5_ascii.case.md5 b/Testing/Data/EnSight/blow5_ascii.case.md5
new file mode 100644
index 0000000..74a754e
--- /dev/null
+++ b/Testing/Data/EnSight/blow5_ascii.case.md5
@@ -0,0 +1 @@
+c1882348b21060b289cbcfac1f222035
diff --git a/Testing/Data/EnSight/blow5_ascii.geo.md5 b/Testing/Data/EnSight/blow5_ascii.geo.md5
new file mode 100644
index 0000000..50b8006
--- /dev/null
+++ b/Testing/Data/EnSight/blow5_ascii.geo.md5
@@ -0,0 +1 @@
+5e4acdf4f6f8dc68d76c3565e7e0bc48
diff --git a/Testing/Data/EnSight/blow5_ascii_cd_displacement.md5 b/Testing/Data/EnSight/blow5_ascii_cd_displacement.md5
new file mode 100644
index 0000000..c03d2f2
--- /dev/null
+++ b/Testing/Data/EnSight/blow5_ascii_cd_displacement.md5
@@ -0,0 +1 @@
+e16f03bf103c1ede598a68ee162d3e14
diff --git a/Testing/Data/EnSight/blow5_ascii_cd_thickness.md5 b/Testing/Data/EnSight/blow5_ascii_cd_thickness.md5
new file mode 100644
index 0000000..2e63f60
--- /dev/null
+++ b/Testing/Data/EnSight/blow5_ascii_cd_thickness.md5
@@ -0,0 +1 @@
+1056b910564ca79e5c0d03355c586c9a
diff --git a/Testing/Data/EnSight/blow5_ascii_pd_displacement.md5 b/Testing/Data/EnSight/blow5_ascii_pd_displacement.md5
new file mode 100644
index 0000000..f8f2df1
--- /dev/null
+++ b/Testing/Data/EnSight/blow5_ascii_pd_displacement.md5
@@ -0,0 +1 @@
+aba8e9c3ced6a43f1faf8ccc601d348d
diff --git a/Testing/Data/EnSight/blow5_ascii_pd_thickness.md5 b/Testing/Data/EnSight/blow5_ascii_pd_thickness.md5
new file mode 100644
index 0000000..4b3b0bb
--- /dev/null
+++ b/Testing/Data/EnSight/blow5_ascii_pd_thickness.md5
@@ -0,0 +1 @@
+4dd7a22f113a1f8c5b4bbd15cf4c3d88
diff --git a/Testing/Data/EnSight/elements.ECsca_i.md5 b/Testing/Data/EnSight/elements.ECsca_i.md5
new file mode 100644
index 0000000..f18dab5
--- /dev/null
+++ b/Testing/Data/EnSight/elements.ECsca_i.md5
@@ -0,0 +1 @@
+ce58be76b30a853e0701c3166465f2af
diff --git a/Testing/Data/EnSight/elements.ECsca_r.md5 b/Testing/Data/EnSight/elements.ECsca_r.md5
new file mode 100644
index 0000000..9eeb163
--- /dev/null
+++ b/Testing/Data/EnSight/elements.ECsca_r.md5
@@ -0,0 +1 @@
+bd440880dd6849780c97770be06e7cdd
diff --git a/Testing/Data/EnSight/elements.ECvec_i.md5 b/Testing/Data/EnSight/elements.ECvec_i.md5
new file mode 100644
index 0000000..55bc744
--- /dev/null
+++ b/Testing/Data/EnSight/elements.ECvec_i.md5
@@ -0,0 +1 @@
+5567d10b061b18e7adc7ad949d9182bf
diff --git a/Testing/Data/EnSight/elements.ECvec_r.md5 b/Testing/Data/EnSight/elements.ECvec_r.md5
new file mode 100644
index 0000000..7183d1a
--- /dev/null
+++ b/Testing/Data/EnSight/elements.ECvec_r.md5
@@ -0,0 +1 @@
+fa359accd98d788fa00c46a931d60d56
diff --git a/Testing/Data/EnSight/elements.Esca.md5 b/Testing/Data/EnSight/elements.Esca.md5
new file mode 100644
index 0000000..6602a63
--- /dev/null
+++ b/Testing/Data/EnSight/elements.Esca.md5
@@ -0,0 +1 @@
+6a47571576f4535df7333bed82c73346
diff --git a/Testing/Data/EnSight/elements.Eten.md5 b/Testing/Data/EnSight/elements.Eten.md5
new file mode 100644
index 0000000..2d33d0d
--- /dev/null
+++ b/Testing/Data/EnSight/elements.Eten.md5
@@ -0,0 +1 @@
+9a2f082a075d0d64d31d0365e6c2beba
diff --git a/Testing/Data/EnSight/elements.Evec.md5 b/Testing/Data/EnSight/elements.Evec.md5
new file mode 100644
index 0000000..44a01cb
--- /dev/null
+++ b/Testing/Data/EnSight/elements.Evec.md5
@@ -0,0 +1 @@
+73f384710f3ad876c40018dcbd04dd2a
diff --git a/Testing/Data/EnSight/elements.NCsca_i.md5 b/Testing/Data/EnSight/elements.NCsca_i.md5
new file mode 100644
index 0000000..dbdf143
--- /dev/null
+++ b/Testing/Data/EnSight/elements.NCsca_i.md5
@@ -0,0 +1 @@
+7059be9ff257755008a979f34868e4f2
diff --git a/Testing/Data/EnSight/elements.NCsca_r.md5 b/Testing/Data/EnSight/elements.NCsca_r.md5
new file mode 100644
index 0000000..09705b1
--- /dev/null
+++ b/Testing/Data/EnSight/elements.NCsca_r.md5
@@ -0,0 +1 @@
+88ed9f48baae5dd354844d730ab48fbd
diff --git a/Testing/Data/EnSight/elements.NCvec_i.md5 b/Testing/Data/EnSight/elements.NCvec_i.md5
new file mode 100644
index 0000000..2122428
--- /dev/null
+++ b/Testing/Data/EnSight/elements.NCvec_i.md5
@@ -0,0 +1 @@
+09b3048f15455e7aa9e394af5c2935a6
diff --git a/Testing/Data/EnSight/elements.NCvec_r.md5 b/Testing/Data/EnSight/elements.NCvec_r.md5
new file mode 100644
index 0000000..2ad1783
--- /dev/null
+++ b/Testing/Data/EnSight/elements.NCvec_r.md5
@@ -0,0 +1 @@
+f62f7e10330b3cfdfc91916f27fee578
diff --git a/Testing/Data/EnSight/elements.Nsca.md5 b/Testing/Data/EnSight/elements.Nsca.md5
new file mode 100644
index 0000000..8f15fb4
--- /dev/null
+++ b/Testing/Data/EnSight/elements.Nsca.md5
@@ -0,0 +1 @@
+323b1c644fefb04a528661d623dd91fc
diff --git a/Testing/Data/EnSight/elements.Nten.md5 b/Testing/Data/EnSight/elements.Nten.md5
new file mode 100644
index 0000000..b463ad0
--- /dev/null
+++ b/Testing/Data/EnSight/elements.Nten.md5
@@ -0,0 +1 @@
+9020edb05d2a98257a91652d56ea100f
diff --git a/Testing/Data/EnSight/elements.Nvec.md5 b/Testing/Data/EnSight/elements.Nvec.md5
new file mode 100644
index 0000000..1750afa
--- /dev/null
+++ b/Testing/Data/EnSight/elements.Nvec.md5
@@ -0,0 +1 @@
+9c6613093a2757d9dc1390f6714251e2
diff --git a/Testing/Data/EnSight/elements.case.md5 b/Testing/Data/EnSight/elements.case.md5
new file mode 100644
index 0000000..7b02c15
--- /dev/null
+++ b/Testing/Data/EnSight/elements.case.md5
@@ -0,0 +1 @@
+f9eae208579b36c20a59191f843e32b8
diff --git a/Testing/Data/EnSight/elements.geo.md5 b/Testing/Data/EnSight/elements.geo.md5
new file mode 100644
index 0000000..4bc51db
--- /dev/null
+++ b/Testing/Data/EnSight/elements.geo.md5
@@ -0,0 +1 @@
+55e3f8546e3270fea931a901dc3d7a4a
diff --git a/Testing/Data/EnSight/elements.sos.md5 b/Testing/Data/EnSight/elements.sos.md5
new file mode 100644
index 0000000..40d64eb
--- /dev/null
+++ b/Testing/Data/EnSight/elements.sos.md5
@@ -0,0 +1 @@
+2d6ce791b6c0356a2e7c002a07637396
diff --git a/Testing/Data/EnSight/elements6.ECsca_i.md5 b/Testing/Data/EnSight/elements6.ECsca_i.md5
new file mode 100644
index 0000000..5f2176d
--- /dev/null
+++ b/Testing/Data/EnSight/elements6.ECsca_i.md5
@@ -0,0 +1 @@
+c1680445bd525200a3f6152f05041d4e
diff --git a/Testing/Data/EnSight/elements6.ECsca_r.md5 b/Testing/Data/EnSight/elements6.ECsca_r.md5
new file mode 100644
index 0000000..ff49c1f
--- /dev/null
+++ b/Testing/Data/EnSight/elements6.ECsca_r.md5
@@ -0,0 +1 @@
+879a0a9b32e6bb4af543fb8e4acf6454
diff --git a/Testing/Data/EnSight/elements6.ECvec_i.md5 b/Testing/Data/EnSight/elements6.ECvec_i.md5
new file mode 100644
index 0000000..c829c63
--- /dev/null
+++ b/Testing/Data/EnSight/elements6.ECvec_i.md5
@@ -0,0 +1 @@
+8755c94bde79c1453e3aecadd7bb5bc0
diff --git a/Testing/Data/EnSight/elements6.ECvec_r.md5 b/Testing/Data/EnSight/elements6.ECvec_r.md5
new file mode 100644
index 0000000..78b6ead
--- /dev/null
+++ b/Testing/Data/EnSight/elements6.ECvec_r.md5
@@ -0,0 +1 @@
+52dea97e11183a9a0093bb60a14389da
diff --git a/Testing/Data/EnSight/elements6.Esca.md5 b/Testing/Data/EnSight/elements6.Esca.md5
new file mode 100644
index 0000000..896c754
--- /dev/null
+++ b/Testing/Data/EnSight/elements6.Esca.md5
@@ -0,0 +1 @@
+bfe94b63a2f61a551342009f52867336
diff --git a/Testing/Data/EnSight/elements6.Eten.md5 b/Testing/Data/EnSight/elements6.Eten.md5
new file mode 100644
index 0000000..65808af
--- /dev/null
+++ b/Testing/Data/EnSight/elements6.Eten.md5
@@ -0,0 +1 @@
+cfb47431b056a1b28ca2155c3cdfecf9
diff --git a/Testing/Data/EnSight/elements6.Evec.md5 b/Testing/Data/EnSight/elements6.Evec.md5
new file mode 100644
index 0000000..69c462a
--- /dev/null
+++ b/Testing/Data/EnSight/elements6.Evec.md5
@@ -0,0 +1 @@
+b40a36055c5068b21db6c64a19606838
diff --git a/Testing/Data/EnSight/elements6.NCsca_i.md5 b/Testing/Data/EnSight/elements6.NCsca_i.md5
new file mode 100644
index 0000000..7612311
--- /dev/null
+++ b/Testing/Data/EnSight/elements6.NCsca_i.md5
@@ -0,0 +1 @@
+f627b5f8a54aef9884036ef723e7ce58
diff --git a/Testing/Data/EnSight/elements6.NCsca_r.md5 b/Testing/Data/EnSight/elements6.NCsca_r.md5
new file mode 100644
index 0000000..6d6d411
--- /dev/null
+++ b/Testing/Data/EnSight/elements6.NCsca_r.md5
@@ -0,0 +1 @@
+b5c008abe232fdad2f001243d01bd6b5
diff --git a/Testing/Data/EnSight/elements6.NCvec_i.md5 b/Testing/Data/EnSight/elements6.NCvec_i.md5
new file mode 100644
index 0000000..af0f938
--- /dev/null
+++ b/Testing/Data/EnSight/elements6.NCvec_i.md5
@@ -0,0 +1 @@
+7bde228af2579d9bf0ce8ecdf891d7f6
diff --git a/Testing/Data/EnSight/elements6.NCvec_r.md5 b/Testing/Data/EnSight/elements6.NCvec_r.md5
new file mode 100644
index 0000000..dc8eec2
--- /dev/null
+++ b/Testing/Data/EnSight/elements6.NCvec_r.md5
@@ -0,0 +1 @@
+1727d0a9cbfebef640c33f92aa7cf2a3
diff --git a/Testing/Data/EnSight/elements6.Nsca.md5 b/Testing/Data/EnSight/elements6.Nsca.md5
new file mode 100644
index 0000000..e8ef6c3
--- /dev/null
+++ b/Testing/Data/EnSight/elements6.Nsca.md5
@@ -0,0 +1 @@
+b90e7fcf3b7c7855cc1b07e82be3e587
diff --git a/Testing/Data/EnSight/elements6.Nten.md5 b/Testing/Data/EnSight/elements6.Nten.md5
new file mode 100644
index 0000000..17dca1a
--- /dev/null
+++ b/Testing/Data/EnSight/elements6.Nten.md5
@@ -0,0 +1 @@
+9dccca2c13365258020fe802db25b30f
diff --git a/Testing/Data/EnSight/elements6.Nvec.md5 b/Testing/Data/EnSight/elements6.Nvec.md5
new file mode 100644
index 0000000..cf205b6
--- /dev/null
+++ b/Testing/Data/EnSight/elements6.Nvec.md5
@@ -0,0 +1 @@
+0e36ddf705ec9b1201c6580d57be6e9b
diff --git a/Testing/Data/EnSight/elements6.case.md5 b/Testing/Data/EnSight/elements6.case.md5
new file mode 100644
index 0000000..d7763e8
--- /dev/null
+++ b/Testing/Data/EnSight/elements6.case.md5
@@ -0,0 +1 @@
+4b9baecea44daaf0cd06126cb8dd0272
diff --git a/Testing/Data/EnSight/elements6.geo.md5 b/Testing/Data/EnSight/elements6.geo.md5
new file mode 100644
index 0000000..716bb27
--- /dev/null
+++ b/Testing/Data/EnSight/elements6.geo.md5
@@ -0,0 +1 @@
+e4b0a5f43a107cfd6ae9bfd682178f47
diff --git a/Testing/Data/EnSight/ironProt_ascii.case.md5 b/Testing/Data/EnSight/ironProt_ascii.case.md5
new file mode 100644
index 0000000..79d66a4
--- /dev/null
+++ b/Testing/Data/EnSight/ironProt_ascii.case.md5
@@ -0,0 +1 @@
+adbb944d5355733150086fd73e6d9801
diff --git a/Testing/Data/EnSight/ironProt_ascii.geo.md5 b/Testing/Data/EnSight/ironProt_ascii.geo.md5
new file mode 100644
index 0000000..42268e5
--- /dev/null
+++ b/Testing/Data/EnSight/ironProt_ascii.geo.md5
@@ -0,0 +1 @@
+a682ca169acf9a3cda1c00002b759e8d
diff --git a/Testing/Data/EnSight/ironProt_ascii_pd_scalars.md5 b/Testing/Data/EnSight/ironProt_ascii_pd_scalars.md5
new file mode 100644
index 0000000..f85fd46
--- /dev/null
+++ b/Testing/Data/EnSight/ironProt_ascii_pd_scalars.md5
@@ -0,0 +1 @@
+9155aeb1ee4f2ea67bc7f5ae0065da02
diff --git a/Testing/Data/EnSight/ironProt_bin.case.md5 b/Testing/Data/EnSight/ironProt_bin.case.md5
new file mode 100644
index 0000000..611a21e
--- /dev/null
+++ b/Testing/Data/EnSight/ironProt_bin.case.md5
@@ -0,0 +1 @@
+248b0510ac3c402f79fe2e04fa5160ef
diff --git a/Testing/Data/EnSight/ironProt_bin.geo.md5 b/Testing/Data/EnSight/ironProt_bin.geo.md5
new file mode 100644
index 0000000..7c058a5
--- /dev/null
+++ b/Testing/Data/EnSight/ironProt_bin.geo.md5
@@ -0,0 +1 @@
+1eb2ad49f884febef6067bc466cb0745
diff --git a/Testing/Data/EnSight/ironProt_bin_pd_scalars.md5 b/Testing/Data/EnSight/ironProt_bin_pd_scalars.md5
new file mode 100644
index 0000000..81e6b61
--- /dev/null
+++ b/Testing/Data/EnSight/ironProt_bin_pd_scalars.md5
@@ -0,0 +1 @@
+4aa27fb269490800b3f25d5934046397
diff --git a/Testing/Data/EnSight/mandelbrot.sos.md5 b/Testing/Data/EnSight/mandelbrot.sos.md5
new file mode 100644
index 0000000..ad302ab
--- /dev/null
+++ b/Testing/Data/EnSight/mandelbrot.sos.md5
@@ -0,0 +1 @@
+674b641f06df056984eaf233892b75df
diff --git a/Testing/Data/EnSight/mandelbrot1.case.md5 b/Testing/Data/EnSight/mandelbrot1.case.md5
new file mode 100644
index 0000000..ea68498
--- /dev/null
+++ b/Testing/Data/EnSight/mandelbrot1.case.md5
@@ -0,0 +1 @@
+3b29561e070ceba8e9c1dc3c53220489
diff --git a/Testing/Data/EnSight/mandelbrot1.geo.md5 b/Testing/Data/EnSight/mandelbrot1.geo.md5
new file mode 100644
index 0000000..3587ff1
--- /dev/null
+++ b/Testing/Data/EnSight/mandelbrot1.geo.md5
@@ -0,0 +1 @@
+121647650719f95dbb20d3efaec15292
diff --git a/Testing/Data/EnSight/mandelbrot1_pd_Iterations.md5 b/Testing/Data/EnSight/mandelbrot1_pd_Iterations.md5
new file mode 100644
index 0000000..a627b2d
--- /dev/null
+++ b/Testing/Data/EnSight/mandelbrot1_pd_Iterations.md5
@@ -0,0 +1 @@
+ea558afed703cb84b6363559c4964875
diff --git a/Testing/Data/EnSight/mandelbrot2.case.md5 b/Testing/Data/EnSight/mandelbrot2.case.md5
new file mode 100644
index 0000000..dd59b71
--- /dev/null
+++ b/Testing/Data/EnSight/mandelbrot2.case.md5
@@ -0,0 +1 @@
+00d2efba0581fb7d436bcc45a9acedcb
diff --git a/Testing/Data/EnSight/mandelbrot2.geo.md5 b/Testing/Data/EnSight/mandelbrot2.geo.md5
new file mode 100644
index 0000000..b19303e
--- /dev/null
+++ b/Testing/Data/EnSight/mandelbrot2.geo.md5
@@ -0,0 +1 @@
+77e9890e5be0c7116feed84b39376fff
diff --git a/Testing/Data/EnSight/mandelbrot2_pd_Iterations.md5 b/Testing/Data/EnSight/mandelbrot2_pd_Iterations.md5
new file mode 100644
index 0000000..16a4365
--- /dev/null
+++ b/Testing/Data/EnSight/mandelbrot2_pd_Iterations.md5
@@ -0,0 +1 @@
+b3b1e2961c06099ebda6ea93d7cd1804
diff --git a/Testing/Data/EnSight/naca.bin.case.md5 b/Testing/Data/EnSight/naca.bin.case.md5
new file mode 100644
index 0000000..3938cef
--- /dev/null
+++ b/Testing/Data/EnSight/naca.bin.case.md5
@@ -0,0 +1 @@
+36134ceed9f18d15de214a8493cec75a
diff --git a/Testing/Data/EnSight/naca.gold.bin.DENS_1.md5 b/Testing/Data/EnSight/naca.gold.bin.DENS_1.md5
new file mode 100644
index 0000000..d770b9b
--- /dev/null
+++ b/Testing/Data/EnSight/naca.gold.bin.DENS_1.md5
@@ -0,0 +1 @@
+b5061634d21e830fe66516c6b89e5662
diff --git a/Testing/Data/EnSight/naca.gold.bin.DENS_3.md5 b/Testing/Data/EnSight/naca.gold.bin.DENS_3.md5
new file mode 100644
index 0000000..7164e6a
--- /dev/null
+++ b/Testing/Data/EnSight/naca.gold.bin.DENS_3.md5
@@ -0,0 +1 @@
+26135d03215b05b7e8f91cf53e6a3fbc
diff --git a/Testing/Data/EnSight/naca.gold.bin.geo.md5 b/Testing/Data/EnSight/naca.gold.bin.geo.md5
new file mode 100644
index 0000000..a0168a4
--- /dev/null
+++ b/Testing/Data/EnSight/naca.gold.bin.geo.md5
@@ -0,0 +1 @@
+61152f16995bf25298d54c83911c8d2e
diff --git a/Testing/Data/EnSight/office6_bin.case.md5 b/Testing/Data/EnSight/office6_bin.case.md5
new file mode 100644
index 0000000..dbcdfd8
--- /dev/null
+++ b/Testing/Data/EnSight/office6_bin.case.md5
@@ -0,0 +1 @@
+c8d49e2345cc2dfb58af4dd88768234f
diff --git a/Testing/Data/EnSight/office6_bin.geo.md5 b/Testing/Data/EnSight/office6_bin.geo.md5
new file mode 100644
index 0000000..e2a6afc
--- /dev/null
+++ b/Testing/Data/EnSight/office6_bin.geo.md5
@@ -0,0 +1 @@
+6ed30ce03def6cae54320fe263c4cc9f
diff --git a/Testing/Data/EnSight/office6_bin_pd_scalars.md5 b/Testing/Data/EnSight/office6_bin_pd_scalars.md5
new file mode 100644
index 0000000..c9256e8
--- /dev/null
+++ b/Testing/Data/EnSight/office6_bin_pd_scalars.md5
@@ -0,0 +1 @@
+fa3ea469f12f22f76052c1f8df11d1a4
diff --git a/Testing/Data/EnSight/office6_bin_pd_vectors.md5 b/Testing/Data/EnSight/office6_bin_pd_vectors.md5
new file mode 100644
index 0000000..fba19a5
--- /dev/null
+++ b/Testing/Data/EnSight/office6_bin_pd_vectors.md5
@@ -0,0 +1 @@
+21dd2a01c4155290b25afe3050a6986e
diff --git a/Testing/Data/EnSight/office_ascii.case.md5 b/Testing/Data/EnSight/office_ascii.case.md5
new file mode 100644
index 0000000..3ef9249
--- /dev/null
+++ b/Testing/Data/EnSight/office_ascii.case.md5
@@ -0,0 +1 @@
+7b22afe6039ea53701b8f5ccd24d16fa
diff --git a/Testing/Data/EnSight/office_ascii.geo.md5 b/Testing/Data/EnSight/office_ascii.geo.md5
new file mode 100644
index 0000000..9a50151
--- /dev/null
+++ b/Testing/Data/EnSight/office_ascii.geo.md5
@@ -0,0 +1 @@
+f7d536fcdce7a26f2a5ab3d031e16a90
diff --git a/Testing/Data/EnSight/office_ascii_pd_scalars.md5 b/Testing/Data/EnSight/office_ascii_pd_scalars.md5
new file mode 100644
index 0000000..bb85e36
--- /dev/null
+++ b/Testing/Data/EnSight/office_ascii_pd_scalars.md5
@@ -0,0 +1 @@
+cd66e613f9bafc3b843104769dd32fc2
diff --git a/Testing/Data/EnSight/office_ascii_pd_vectors.md5 b/Testing/Data/EnSight/office_ascii_pd_vectors.md5
new file mode 100644
index 0000000..1ea0bd1
--- /dev/null
+++ b/Testing/Data/EnSight/office_ascii_pd_vectors.md5
@@ -0,0 +1 @@
+07e9756d22106414d23302ed7e29901e
diff --git a/Testing/Data/EnSight/office_bin.case.md5 b/Testing/Data/EnSight/office_bin.case.md5
new file mode 100644
index 0000000..d33288f
--- /dev/null
+++ b/Testing/Data/EnSight/office_bin.case.md5
@@ -0,0 +1 @@
+028d919d776d0005433e05f7231fab55
diff --git a/Testing/Data/EnSight/office_bin.geo.md5 b/Testing/Data/EnSight/office_bin.geo.md5
new file mode 100644
index 0000000..ab92c6f
--- /dev/null
+++ b/Testing/Data/EnSight/office_bin.geo.md5
@@ -0,0 +1 @@
+2dbd32bbe0235571466934c656a44bc1
diff --git a/Testing/Data/EnSight/office_bin_pd_scalars.md5 b/Testing/Data/EnSight/office_bin_pd_scalars.md5
new file mode 100644
index 0000000..984d683
--- /dev/null
+++ b/Testing/Data/EnSight/office_bin_pd_scalars.md5
@@ -0,0 +1 @@
+07634797dd4928d98003f9aa8953b7de
diff --git a/Testing/Data/EnSight/office_bin_pd_vectors.md5 b/Testing/Data/EnSight/office_bin_pd_vectors.md5
new file mode 100644
index 0000000..a981735
--- /dev/null
+++ b/Testing/Data/EnSight/office_bin_pd_vectors.md5
@@ -0,0 +1 @@
+c65e107e8009021488c8165e8072effc
diff --git a/Testing/Data/EnSight/test.extr_pressure.0001.md5 b/Testing/Data/EnSight/test.extr_pressure.0001.md5
new file mode 100644
index 0000000..bd2c0ea
--- /dev/null
+++ b/Testing/Data/EnSight/test.extr_pressure.0001.md5
@@ -0,0 +1 @@
+906021c37c8bf75ba086a2474178e99a
diff --git a/Testing/Data/EnSight/test.extr_velocity.0001.md5 b/Testing/Data/EnSight/test.extr_velocity.0001.md5
new file mode 100644
index 0000000..2f48fec
--- /dev/null
+++ b/Testing/Data/EnSight/test.extr_velocity.0001.md5
@@ -0,0 +1 @@
+872895f4c54683ab09c4dfb855650d40
diff --git a/Testing/Data/EnSight/test.geo.md5 b/Testing/Data/EnSight/test.geo.md5
new file mode 100644
index 0000000..44faf8f
--- /dev/null
+++ b/Testing/Data/EnSight/test.geo.md5
@@ -0,0 +1 @@
+ce6e3812abfcd3e9c74c99cbd496c480
diff --git a/Testing/Data/EnSight/test.pressure.0001.md5 b/Testing/Data/EnSight/test.pressure.0001.md5
new file mode 100644
index 0000000..2a388a3
--- /dev/null
+++ b/Testing/Data/EnSight/test.pressure.0001.md5
@@ -0,0 +1 @@
+27eb7371552fa3c0524c9c2113d45a01
diff --git a/Testing/Data/EnSight/test.velocity.0001.md5 b/Testing/Data/EnSight/test.velocity.0001.md5
new file mode 100644
index 0000000..155d121
--- /dev/null
+++ b/Testing/Data/EnSight/test.velocity.0001.md5
@@ -0,0 +1 @@
+7e6f892c73533be07f0fe84ca042e405
diff --git a/Testing/Data/EnSight/test_bin.extr_pressure.0001.md5 b/Testing/Data/EnSight/test_bin.extr_pressure.0001.md5
new file mode 100644
index 0000000..0795882
--- /dev/null
+++ b/Testing/Data/EnSight/test_bin.extr_pressure.0001.md5
@@ -0,0 +1 @@
+6835106230023d2c4db527defc7f61ac
diff --git a/Testing/Data/EnSight/test_bin.extr_velocity.0001.md5 b/Testing/Data/EnSight/test_bin.extr_velocity.0001.md5
new file mode 100644
index 0000000..65fa2c5
--- /dev/null
+++ b/Testing/Data/EnSight/test_bin.extr_velocity.0001.md5
@@ -0,0 +1 @@
+de8c2b4e8197914fc18f1023c5967db2
diff --git a/Testing/Data/EnSight/test_bin.geo.md5 b/Testing/Data/EnSight/test_bin.geo.md5
new file mode 100644
index 0000000..bea2787
--- /dev/null
+++ b/Testing/Data/EnSight/test_bin.geo.md5
@@ -0,0 +1 @@
+10363bcce6ae894626b515901f1323bd
diff --git a/Testing/Data/EnSight/test_bin.pressure.0001.md5 b/Testing/Data/EnSight/test_bin.pressure.0001.md5
new file mode 100644
index 0000000..a5a6243
--- /dev/null
+++ b/Testing/Data/EnSight/test_bin.pressure.0001.md5
@@ -0,0 +1 @@
+510b03bf7c288e55eb2b42bc43c16076
diff --git a/Testing/Data/EnSight/test_bin.velocity.0001.md5 b/Testing/Data/EnSight/test_bin.velocity.0001.md5
new file mode 100644
index 0000000..fa89470
--- /dev/null
+++ b/Testing/Data/EnSight/test_bin.velocity.0001.md5
@@ -0,0 +1 @@
+ad307b7e623e0cad2a60821230ce3344
diff --git a/Testing/Data/EventRecording.log.md5 b/Testing/Data/EventRecording.log.md5
new file mode 100644
index 0000000..f6a614e
--- /dev/null
+++ b/Testing/Data/EventRecording.log.md5
@@ -0,0 +1 @@
+7888f4c1003a6f9f4371da336338e481
diff --git a/Testing/Data/GIS/countries.dbf.md5 b/Testing/Data/GIS/countries.dbf.md5
new file mode 100644
index 0000000..85ad44b
--- /dev/null
+++ b/Testing/Data/GIS/countries.dbf.md5
@@ -0,0 +1 @@
+f0dc2a6b6e856e34255fa3b214cc161d
diff --git a/Testing/Data/GIS/countries.prj.md5 b/Testing/Data/GIS/countries.prj.md5
new file mode 100644
index 0000000..44e10c6
--- /dev/null
+++ b/Testing/Data/GIS/countries.prj.md5
@@ -0,0 +1 @@
+c742bee3d4edfc2948a2ad08de1790a5
diff --git a/Testing/Data/GIS/countries.shp.md5 b/Testing/Data/GIS/countries.shp.md5
new file mode 100644
index 0000000..accef94
--- /dev/null
+++ b/Testing/Data/GIS/countries.shp.md5
@@ -0,0 +1 @@
+a099abb0233818ca44b72e334adbcac1
diff --git a/Testing/Data/GIS/countries.shx.md5 b/Testing/Data/GIS/countries.shx.md5
new file mode 100644
index 0000000..c34b17d
--- /dev/null
+++ b/Testing/Data/GIS/countries.shx.md5
@@ -0,0 +1 @@
+65fd24129d97b91a5571fce25e9f0aae
diff --git a/Testing/Data/GreenCircle.png.md5 b/Testing/Data/GreenCircle.png.md5
new file mode 100644
index 0000000..204264c
--- /dev/null
+++ b/Testing/Data/GreenCircle.png.md5
@@ -0,0 +1 @@
+f3370b52ee1d8c4ab00eafad51a1f0ab
diff --git a/Testing/Data/HeadMRVolume.mhd.md5 b/Testing/Data/HeadMRVolume.mhd.md5
new file mode 100644
index 0000000..310f9e1
--- /dev/null
+++ b/Testing/Data/HeadMRVolume.mhd.md5
@@ -0,0 +1 @@
+a5fa9f74933e47b4657df479daa902f1
diff --git a/Testing/Data/HeadMRVolume.raw.md5 b/Testing/Data/HeadMRVolume.raw.md5
new file mode 100644
index 0000000..4497481
--- /dev/null
+++ b/Testing/Data/HeadMRVolume.raw.md5
@@ -0,0 +1 @@
+dd13d3cbfd470c6e3cd2ddb2302fd466
diff --git a/Testing/Data/IncOctPntLocData.dat.md5 b/Testing/Data/IncOctPntLocData.dat.md5
new file mode 100644
index 0000000..4349cd9
--- /dev/null
+++ b/Testing/Data/IncOctPntLocData.dat.md5
@@ -0,0 +1 @@
+a89d5be7fd24a252a6525dc670cc431b
diff --git a/Testing/Data/IncOctPntLocResult.dat.md5 b/Testing/Data/IncOctPntLocResult.dat.md5
new file mode 100644
index 0000000..fb5decd
--- /dev/null
+++ b/Testing/Data/IncOctPntLocResult.dat.md5
@@ -0,0 +1 @@
+0f6c4bfe60bfd5d56c65329cb0a3148c
diff --git a/Testing/Data/Infovis/DaveDS_-_Sketchy.ttf.md5 b/Testing/Data/Infovis/DaveDS_-_Sketchy.ttf.md5
new file mode 100644
index 0000000..25bb37b
--- /dev/null
+++ b/Testing/Data/Infovis/DaveDS_-_Sketchy.ttf.md5
@@ -0,0 +1 @@
+25fe6c1baee4df8d06b617e450bb2e95
diff --git a/Testing/Data/Infovis/DimacsGraphs/iso_pattern.gr.md5 b/Testing/Data/Infovis/DimacsGraphs/iso_pattern.gr.md5
new file mode 100644
index 0000000..9440383
--- /dev/null
+++ b/Testing/Data/Infovis/DimacsGraphs/iso_pattern.gr.md5
@@ -0,0 +1 @@
+c53bd0308f4c834093d2452b56e39935
diff --git a/Testing/Data/Infovis/DimacsGraphs/iso_target.gr.md5 b/Testing/Data/Infovis/DimacsGraphs/iso_target.gr.md5
new file mode 100644
index 0000000..77cf610
--- /dev/null
+++ b/Testing/Data/Infovis/DimacsGraphs/iso_target.gr.md5
@@ -0,0 +1 @@
+f04030f7250f451b96f9d1160ba9a4d8
diff --git a/Testing/Data/Infovis/DimacsGraphs/maxflow.max.md5 b/Testing/Data/Infovis/DimacsGraphs/maxflow.max.md5
new file mode 100644
index 0000000..c2f340a
--- /dev/null
+++ b/Testing/Data/Infovis/DimacsGraphs/maxflow.max.md5
@@ -0,0 +1 @@
+9d0575f35ff04d066294cef5c526d7f1
diff --git a/Testing/Data/Infovis/Images/NE2_ps_bath.jpg.md5 b/Testing/Data/Infovis/Images/NE2_ps_bath.jpg.md5
new file mode 100644
index 0000000..160a421
--- /dev/null
+++ b/Testing/Data/Infovis/Images/NE2_ps_bath.jpg.md5
@@ -0,0 +1 @@
+5d631b1ac8d00726f02ebbad9d2b8c7f
diff --git a/Testing/Data/Infovis/Images/NE2_ps_bath.png.md5 b/Testing/Data/Infovis/Images/NE2_ps_bath.png.md5
new file mode 100644
index 0000000..cfacecd
--- /dev/null
+++ b/Testing/Data/Infovis/Images/NE2_ps_bath.png.md5
@@ -0,0 +1 @@
+24e8ad30ad2b4949cc39639df9647fdc
diff --git a/Testing/Data/Infovis/Images/NE2_ps_bath_transparent.png.md5 b/Testing/Data/Infovis/Images/NE2_ps_bath_transparent.png.md5
new file mode 100644
index 0000000..0a2a231
--- /dev/null
+++ b/Testing/Data/Infovis/Images/NE2_ps_bath_transparent.png.md5
@@ -0,0 +1 @@
+0d4ecaff1509952e636e049c1679bd95
diff --git a/Testing/Data/Infovis/PintassilgoPrints_-_Talvez_Assim.ttf.md5 b/Testing/Data/Infovis/PintassilgoPrints_-_Talvez_Assim.ttf.md5
new file mode 100644
index 0000000..4e14722
--- /dev/null
+++ b/Testing/Data/Infovis/PintassilgoPrints_-_Talvez_Assim.ttf.md5
@@ -0,0 +1 @@
+63932de36489636d1382f3514415d8f3
diff --git a/Testing/Data/Infovis/SQLite/SmallEmailTest.db.md5 b/Testing/Data/Infovis/SQLite/SmallEmailTest.db.md5
new file mode 100644
index 0000000..6148d11
--- /dev/null
+++ b/Testing/Data/Infovis/SQLite/SmallEmailTest.db.md5
@@ -0,0 +1 @@
+0331f3c8d436aab5b1c0150e25d6e528
diff --git a/Testing/Data/Infovis/SQLite/SmallTestGraphs.db.md5 b/Testing/Data/Infovis/SQLite/SmallTestGraphs.db.md5
new file mode 100644
index 0000000..e8bd637
--- /dev/null
+++ b/Testing/Data/Infovis/SQLite/SmallTestGraphs.db.md5
@@ -0,0 +1 @@
+4fa0daaccd330ed4e877c5bf7b4278c5
diff --git a/Testing/Data/Infovis/SQLite/ports_protocols.db.md5 b/Testing/Data/Infovis/SQLite/ports_protocols.db.md5
new file mode 100644
index 0000000..7e0b286
--- /dev/null
+++ b/Testing/Data/Infovis/SQLite/ports_protocols.db.md5
@@ -0,0 +1 @@
+3fe5110cf1c4e295c5969a14502c1e9b
diff --git a/Testing/Data/Infovis/SQLite/temperatures.db.md5 b/Testing/Data/Infovis/SQLite/temperatures.db.md5
new file mode 100644
index 0000000..297367e
--- /dev/null
+++ b/Testing/Data/Infovis/SQLite/temperatures.db.md5
@@ -0,0 +1 @@
+6aa97560868105eac1e4e055defd7a2a
diff --git a/Testing/Data/Infovis/XML/smalltest.xml.md5 b/Testing/Data/Infovis/XML/smalltest.xml.md5
new file mode 100644
index 0000000..b83c326
--- /dev/null
+++ b/Testing/Data/Infovis/XML/smalltest.xml.md5
@@ -0,0 +1 @@
+c640b439262927a44c2999bb7a505feb
diff --git a/Testing/Data/Infovis/XML/vtkclasses.xml.md5 b/Testing/Data/Infovis/XML/vtkclasses.xml.md5
new file mode 100644
index 0000000..b5a1b1d
--- /dev/null
+++ b/Testing/Data/Infovis/XML/vtkclasses.xml.md5
@@ -0,0 +1 @@
+61da22343525741f1e7a2bd6088cffd5
diff --git a/Testing/Data/Infovis/XML/vtkfiles.xml.md5 b/Testing/Data/Infovis/XML/vtkfiles.xml.md5
new file mode 100644
index 0000000..8d48add
--- /dev/null
+++ b/Testing/Data/Infovis/XML/vtkfiles.xml.md5
@@ -0,0 +1 @@
+d237dc38d0797d42469de1fa81a7ce9d
diff --git a/Testing/Data/Infovis/XML/vtklibrary.xml.md5 b/Testing/Data/Infovis/XML/vtklibrary.xml.md5
new file mode 100644
index 0000000..f0d8f14
--- /dev/null
+++ b/Testing/Data/Infovis/XML/vtklibrary.xml.md5
@@ -0,0 +1 @@
+972dfa959f072e974bfc9f6cd84a4015
diff --git a/Testing/Data/Infovis/authors-tabletographtest.csv.md5 b/Testing/Data/Infovis/authors-tabletographtest.csv.md5
new file mode 100644
index 0000000..c31f943
--- /dev/null
+++ b/Testing/Data/Infovis/authors-tabletographtest.csv.md5
@@ -0,0 +1 @@
+02ec182388e0816d53c24ff6b2e3359c
diff --git a/Testing/Data/Infovis/authors.csv.md5 b/Testing/Data/Infovis/authors.csv.md5
new file mode 100644
index 0000000..46f5f3e
--- /dev/null
+++ b/Testing/Data/Infovis/authors.csv.md5
@@ -0,0 +1 @@
+f6b451b74bdd07d7b5b410c84bc2421f
diff --git a/Testing/Data/Infovis/classes.csv.md5 b/Testing/Data/Infovis/classes.csv.md5
new file mode 100644
index 0000000..012a231
--- /dev/null
+++ b/Testing/Data/Infovis/classes.csv.md5
@@ -0,0 +1 @@
+5c1be1a962cde3a13de6d74a8cb54938
diff --git a/Testing/Data/Infovis/clustered-graph.tlp.md5 b/Testing/Data/Infovis/clustered-graph.tlp.md5
new file mode 100644
index 0000000..8b96894
--- /dev/null
+++ b/Testing/Data/Infovis/clustered-graph.tlp.md5
@@ -0,0 +1 @@
+95144c709632760f3a2d772cc7a2b9cf
diff --git a/Testing/Data/Infovis/document-term.csv.md5 b/Testing/Data/Infovis/document-term.csv.md5
new file mode 100644
index 0000000..4405e73
--- /dev/null
+++ b/Testing/Data/Infovis/document-term.csv.md5
@@ -0,0 +1 @@
+5a0bceb1af58ba67e2f12f163fcf4950
diff --git a/Testing/Data/Infovis/eg1.ris.md5 b/Testing/Data/Infovis/eg1.ris.md5
new file mode 100644
index 0000000..be2222a
--- /dev/null
+++ b/Testing/Data/Infovis/eg1.ris.md5
@@ -0,0 +1 @@
+9f52367d1f428cae172608b31112f889
diff --git a/Testing/Data/Infovis/eg2.isi.md5 b/Testing/Data/Infovis/eg2.isi.md5
new file mode 100644
index 0000000..9569d5c
--- /dev/null
+++ b/Testing/Data/Infovis/eg2.isi.md5
@@ -0,0 +1 @@
+a8f27be561abd5c28d6d6cd0f7d54118
diff --git a/Testing/Data/Infovis/fruit.csv.md5 b/Testing/Data/Infovis/fruit.csv.md5
new file mode 100644
index 0000000..90ecc4d
--- /dev/null
+++ b/Testing/Data/Infovis/fruit.csv.md5
@@ -0,0 +1 @@
+6344f6f1acb4a313f27056cbfbd63f8d
diff --git a/Testing/Data/Infovis/fsm.gml.md5 b/Testing/Data/Infovis/fsm.gml.md5
new file mode 100644
index 0000000..6e6b7e1
--- /dev/null
+++ b/Testing/Data/Infovis/fsm.gml.md5
@@ -0,0 +1 @@
+8e1ff7472165d3a791030287d0f32178
diff --git a/Testing/Data/Infovis/kcore_edges.csv.md5 b/Testing/Data/Infovis/kcore_edges.csv.md5
new file mode 100644
index 0000000..64768f2
--- /dev/null
+++ b/Testing/Data/Infovis/kcore_edges.csv.md5
@@ -0,0 +1 @@
+daecaa53e737f711d4599ea168669f4d
diff --git a/Testing/Data/Infovis/kcore_verts.csv.md5 b/Testing/Data/Infovis/kcore_verts.csv.md5
new file mode 100644
index 0000000..051b01e
--- /dev/null
+++ b/Testing/Data/Infovis/kcore_verts.csv.md5
@@ -0,0 +1 @@
+bbf1a0e328f540d829910dc9b121118d
diff --git a/Testing/Data/Infovis/martyb_-_Ridiculous.ttf.md5 b/Testing/Data/Infovis/martyb_-_Ridiculous.ttf.md5
new file mode 100644
index 0000000..7849c01
--- /dev/null
+++ b/Testing/Data/Infovis/martyb_-_Ridiculous.ttf.md5
@@ -0,0 +1 @@
+ae6370aa928431144eff4415148fa4c8
diff --git a/Testing/Data/Infovis/matrix.csv.md5 b/Testing/Data/Infovis/matrix.csv.md5
new file mode 100644
index 0000000..ac9f3e5
--- /dev/null
+++ b/Testing/Data/Infovis/matrix.csv.md5
@@ -0,0 +1 @@
+4f97959b04f46d7b52835abbe220bf10
diff --git a/Testing/Data/Infovis/merge1.csv.md5 b/Testing/Data/Infovis/merge1.csv.md5
new file mode 100644
index 0000000..d53e606
--- /dev/null
+++ b/Testing/Data/Infovis/merge1.csv.md5
@@ -0,0 +1 @@
+ebdfd8b5fbfa49f108abdd9306ebccff
diff --git a/Testing/Data/Infovis/merge2.csv.md5 b/Testing/Data/Infovis/merge2.csv.md5
new file mode 100644
index 0000000..48b6e73
--- /dev/null
+++ b/Testing/Data/Infovis/merge2.csv.md5
@@ -0,0 +1 @@
+2b2b802f7b60d2711cc639225c0239a2
diff --git a/Testing/Data/Infovis/multi_tree.tre.md5 b/Testing/Data/Infovis/multi_tree.tre.md5
new file mode 100644
index 0000000..53d9e52
--- /dev/null
+++ b/Testing/Data/Infovis/multi_tree.tre.md5
@@ -0,0 +1 @@
+0a582f426dbc9090ae22a51fea4766c7
diff --git a/Testing/Data/Infovis/otu_table.biom.md5 b/Testing/Data/Infovis/otu_table.biom.md5
new file mode 100644
index 0000000..93f7f38
--- /dev/null
+++ b/Testing/Data/Infovis/otu_table.biom.md5
@@ -0,0 +1 @@
+1f88c896b0b33f60cfe13fd194209a3e
diff --git a/Testing/Data/Infovis/person-document.csv.md5 b/Testing/Data/Infovis/person-document.csv.md5
new file mode 100644
index 0000000..0273afc
--- /dev/null
+++ b/Testing/Data/Infovis/person-document.csv.md5
@@ -0,0 +1 @@
+c343d5168de8a1cddcdc321e4f047b39
diff --git a/Testing/Data/Infovis/publications.csv.md5 b/Testing/Data/Infovis/publications.csv.md5
new file mode 100644
index 0000000..3e4d125
--- /dev/null
+++ b/Testing/Data/Infovis/publications.csv.md5
@@ -0,0 +1 @@
+a16cf8694e6f86a3b19122822d3f9c40
diff --git a/Testing/Data/Infovis/rep_set.tre.md5 b/Testing/Data/Infovis/rep_set.tre.md5
new file mode 100644
index 0000000..660cddc
--- /dev/null
+++ b/Testing/Data/Infovis/rep_set.tre.md5
@@ -0,0 +1 @@
+fdd11da9dcf4b2d55187c6021bd0d181
diff --git a/Testing/Data/Infovis/small.graph.md5 b/Testing/Data/Infovis/small.graph.md5
new file mode 100644
index 0000000..56349d8
--- /dev/null
+++ b/Testing/Data/Infovis/small.graph.md5
@@ -0,0 +1 @@
+45cdbd8f61735c89fb6b7922b13f2b0c
diff --git a/Testing/Data/Infovis/small.tlp.md5 b/Testing/Data/Infovis/small.tlp.md5
new file mode 100644
index 0000000..8930d1b
--- /dev/null
+++ b/Testing/Data/Infovis/small.tlp.md5
@@ -0,0 +1 @@
+e006ee13798c31e1a711785c1b7e4a9c
diff --git a/Testing/Data/Infovis/term-concept.csv.md5 b/Testing/Data/Infovis/term-concept.csv.md5
new file mode 100644
index 0000000..2cb7045
--- /dev/null
+++ b/Testing/Data/Infovis/term-concept.csv.md5
@@ -0,0 +1 @@
+133654776cd61d1a9b4cfae00d640072
diff --git a/Testing/Data/LSDyna/foam/foam.d3plot.md5 b/Testing/Data/LSDyna/foam/foam.d3plot.md5
new file mode 100644
index 0000000..80cc4e8
--- /dev/null
+++ b/Testing/Data/LSDyna/foam/foam.d3plot.md5
@@ -0,0 +1 @@
+40bb98e6e8f7f177e5cfc1e36f2b7d46
diff --git a/Testing/Data/LSDyna/foam/foam.d3plot01.md5 b/Testing/Data/LSDyna/foam/foam.d3plot01.md5
new file mode 100644
index 0000000..3f2c62e
--- /dev/null
+++ b/Testing/Data/LSDyna/foam/foam.d3plot01.md5
@@ -0,0 +1 @@
+e81d665114b9b2ae71b8301715151082
diff --git a/Testing/Data/LSDyna/hemi.draw/hemi_draw.d3plot.md5 b/Testing/Data/LSDyna/hemi.draw/hemi_draw.d3plot.md5
new file mode 100644
index 0000000..b56a54a
--- /dev/null
+++ b/Testing/Data/LSDyna/hemi.draw/hemi_draw.d3plot.md5
@@ -0,0 +1 @@
+108a797cbfa3e461d8058dee1e41d88e
diff --git a/Testing/Data/LSDyna/hemi.draw/hemi_draw.d3plot01.md5 b/Testing/Data/LSDyna/hemi.draw/hemi_draw.d3plot01.md5
new file mode 100644
index 0000000..a296832
--- /dev/null
+++ b/Testing/Data/LSDyna/hemi.draw/hemi_draw.d3plot01.md5
@@ -0,0 +1 @@
+d85fba645714229b6f71bafb7550d1cb
diff --git a/Testing/Data/LSDyna/hemi.draw/hemi_draw.d3thdt.md5 b/Testing/Data/LSDyna/hemi.draw/hemi_draw.d3thdt.md5
new file mode 100644
index 0000000..19946e4
--- /dev/null
+++ b/Testing/Data/LSDyna/hemi.draw/hemi_draw.d3thdt.md5
@@ -0,0 +1 @@
+4d82c82604c1a1c79fed271fd37e4c44
diff --git a/Testing/Data/LSDyna/hemi.draw/hemi_draw.glstat.md5 b/Testing/Data/LSDyna/hemi.draw/hemi_draw.glstat.md5
new file mode 100644
index 0000000..be94c2e
--- /dev/null
+++ b/Testing/Data/LSDyna/hemi.draw/hemi_draw.glstat.md5
@@ -0,0 +1 @@
+9051e66381ae8138456cc9d78a40b39e
diff --git a/Testing/Data/LSDyna/hemi.draw/hemi_draw.k.md5 b/Testing/Data/LSDyna/hemi.draw/hemi_draw.k.md5
new file mode 100644
index 0000000..e7c4b0e
--- /dev/null
+++ b/Testing/Data/LSDyna/hemi.draw/hemi_draw.k.md5
@@ -0,0 +1 @@
+f80fe30e72c80c6342a31623135098d1
diff --git a/Testing/Data/MFIXReader/BUB01.RES.md5 b/Testing/Data/MFIXReader/BUB01.RES.md5
new file mode 100644
index 0000000..62dbd06
--- /dev/null
+++ b/Testing/Data/MFIXReader/BUB01.RES.md5
@@ -0,0 +1 @@
+898d7d3d25c5eb075b247114b115c4ab
diff --git a/Testing/Data/MFIXReader/BUB01.SP1.md5 b/Testing/Data/MFIXReader/BUB01.SP1.md5
new file mode 100644
index 0000000..0299d69
--- /dev/null
+++ b/Testing/Data/MFIXReader/BUB01.SP1.md5
@@ -0,0 +1 @@
+51293ec33a5906800d45419d9138eced
diff --git a/Testing/Data/MFIXReader/BUB01.SP2.md5 b/Testing/Data/MFIXReader/BUB01.SP2.md5
new file mode 100644
index 0000000..f307f70
--- /dev/null
+++ b/Testing/Data/MFIXReader/BUB01.SP2.md5
@@ -0,0 +1 @@
+d359db6bdee28549a702181861b764e9
diff --git a/Testing/Data/MFIXReader/BUB01.SP3.md5 b/Testing/Data/MFIXReader/BUB01.SP3.md5
new file mode 100644
index 0000000..8d50017
--- /dev/null
+++ b/Testing/Data/MFIXReader/BUB01.SP3.md5
@@ -0,0 +1 @@
+573721a463eda19a8357dd3b46acc8e4
diff --git a/Testing/Data/MFIXReader/BUB01.SP4.md5 b/Testing/Data/MFIXReader/BUB01.SP4.md5
new file mode 100644
index 0000000..53fd104
--- /dev/null
+++ b/Testing/Data/MFIXReader/BUB01.SP4.md5
@@ -0,0 +1 @@
+07fd5ea3af04b746667a1a6dd28e2c48
diff --git a/Testing/Data/MFIXReader/BUB01.SP5.md5 b/Testing/Data/MFIXReader/BUB01.SP5.md5
new file mode 100644
index 0000000..c2f80e9
--- /dev/null
+++ b/Testing/Data/MFIXReader/BUB01.SP5.md5
@@ -0,0 +1 @@
+67f7c30095246d93154c8fa1a308649b
diff --git a/Testing/Data/MFIXReader/BUB01.SP6.md5 b/Testing/Data/MFIXReader/BUB01.SP6.md5
new file mode 100644
index 0000000..21c6a55
--- /dev/null
+++ b/Testing/Data/MFIXReader/BUB01.SP6.md5
@@ -0,0 +1 @@
+413072379d645b9684830b12d73cfdff
diff --git a/Testing/Data/MFIXReader/BUB01.SP7.md5 b/Testing/Data/MFIXReader/BUB01.SP7.md5
new file mode 100644
index 0000000..827dfb1
--- /dev/null
+++ b/Testing/Data/MFIXReader/BUB01.SP7.md5
@@ -0,0 +1 @@
+670ff03f8463a9e72ebc17f19ebde171
diff --git a/Testing/Data/MFIXReader/BUB01.SP8.md5 b/Testing/Data/MFIXReader/BUB01.SP8.md5
new file mode 100644
index 0000000..1c03f35
--- /dev/null
+++ b/Testing/Data/MFIXReader/BUB01.SP8.md5
@@ -0,0 +1 @@
+efcabfbaf1f022de2611b93edb8f7d6d
diff --git a/Testing/Data/MFIXReader/BUB01.SP9.md5 b/Testing/Data/MFIXReader/BUB01.SP9.md5
new file mode 100644
index 0000000..70f10bb
--- /dev/null
+++ b/Testing/Data/MFIXReader/BUB01.SP9.md5
@@ -0,0 +1 @@
+093312d734f8d4d4b524e3ec787fa335
diff --git a/Testing/Data/MFIXReader/BUB01.SPA.md5 b/Testing/Data/MFIXReader/BUB01.SPA.md5
new file mode 100644
index 0000000..1a4ba90
--- /dev/null
+++ b/Testing/Data/MFIXReader/BUB01.SPA.md5
@@ -0,0 +1 @@
+0e92e4059d495ebad3cffe539f722f36
diff --git a/Testing/Data/MetaIO/ChestCT-SHORT.mha.md5 b/Testing/Data/MetaIO/ChestCT-SHORT.mha.md5
new file mode 100644
index 0000000..4002a47
--- /dev/null
+++ b/Testing/Data/MetaIO/ChestCT-SHORT.mha.md5
@@ -0,0 +1 @@
+ab82975ffaeb9606ec554bdc418bdb38
diff --git a/Testing/Data/NE2_ps_bath_small.jpg.md5 b/Testing/Data/NE2_ps_bath_small.jpg.md5
new file mode 100644
index 0000000..c4b4f28
--- /dev/null
+++ b/Testing/Data/NE2_ps_bath_small.jpg.md5
@@ -0,0 +1 @@
+26666a46d6ca90de86702c801d11dfe7
diff --git a/Testing/Data/NetCDF/CAMReaderConnectivity.nc.md5 b/Testing/Data/NetCDF/CAMReaderConnectivity.nc.md5
new file mode 100644
index 0000000..1c57773
--- /dev/null
+++ b/Testing/Data/NetCDF/CAMReaderConnectivity.nc.md5
@@ -0,0 +1 @@
+3aa316e3131e67ed564bbe66ba42639f
diff --git a/Testing/Data/NetCDF/CAMReaderPoints.nc.md5 b/Testing/Data/NetCDF/CAMReaderPoints.nc.md5
new file mode 100644
index 0000000..470fe22
--- /dev/null
+++ b/Testing/Data/NetCDF/CAMReaderPoints.nc.md5
@@ -0,0 +1 @@
+aac0a58a5dce0088ce32afc69fe90a30
diff --git a/Testing/Data/NetCDF/MPASReader.nc.md5 b/Testing/Data/NetCDF/MPASReader.nc.md5
new file mode 100644
index 0000000..a55e5f5
--- /dev/null
+++ b/Testing/Data/NetCDF/MPASReader.nc.md5
@@ -0,0 +1 @@
+fa1ab3c7818d777d1c0ce19388e29a9a
diff --git a/Testing/Data/NetCDF/test.pop.nc.md5 b/Testing/Data/NetCDF/test.pop.nc.md5
new file mode 100644
index 0000000..ddc3d8f
--- /dev/null
+++ b/Testing/Data/NetCDF/test.pop.nc.md5
@@ -0,0 +1 @@
+dad5348a8cf6ea42cc2e0edb1ee7993a
diff --git a/Testing/Data/OpenFOAM/cavity/0.5/U.md5 b/Testing/Data/OpenFOAM/cavity/0.5/U.md5
new file mode 100644
index 0000000..9f8a90d
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/0.5/U.md5
@@ -0,0 +1 @@
+2c30e5974e4e78b843346176692ac42f
diff --git a/Testing/Data/OpenFOAM/cavity/0.5/p.md5 b/Testing/Data/OpenFOAM/cavity/0.5/p.md5
new file mode 100644
index 0000000..9f36856
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/0.5/p.md5
@@ -0,0 +1 @@
+a7d3a6e9e477a5dd2ad77dd2860667a9
diff --git a/Testing/Data/OpenFOAM/cavity/0.5/phi.md5 b/Testing/Data/OpenFOAM/cavity/0.5/phi.md5
new file mode 100644
index 0000000..c6a1f9b
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/0.5/phi.md5
@@ -0,0 +1 @@
+7595fc186b1e3085cf2a6dd539513d3d
diff --git a/Testing/Data/OpenFOAM/cavity/0.5/uniform/time.md5 b/Testing/Data/OpenFOAM/cavity/0.5/uniform/time.md5
new file mode 100644
index 0000000..bad03a3
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/0.5/uniform/time.md5
@@ -0,0 +1 @@
+9726fa9516bca06b34a153e6100d038e
diff --git a/Testing/Data/OpenFOAM/cavity/0/U.md5 b/Testing/Data/OpenFOAM/cavity/0/U.md5
new file mode 100644
index 0000000..1df972a
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/0/U.md5
@@ -0,0 +1 @@
+20ba46c478adc2f23bbd8f3485acf255
diff --git a/Testing/Data/OpenFOAM/cavity/0/p.md5 b/Testing/Data/OpenFOAM/cavity/0/p.md5
new file mode 100644
index 0000000..b9a84d8
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/0/p.md5
@@ -0,0 +1 @@
+c2b0351edefd1a9fe7911e45abedb72f
diff --git a/Testing/Data/OpenFOAM/cavity/1.5/U.md5 b/Testing/Data/OpenFOAM/cavity/1.5/U.md5
new file mode 100644
index 0000000..7dcc484
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/1.5/U.md5
@@ -0,0 +1 @@
+c444d6251cfb08b2392be26a33d395f1
diff --git a/Testing/Data/OpenFOAM/cavity/1.5/p.md5 b/Testing/Data/OpenFOAM/cavity/1.5/p.md5
new file mode 100644
index 0000000..c9da739
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/1.5/p.md5
@@ -0,0 +1 @@
+1fafe79d35980b7055b56e81a345a51d
diff --git a/Testing/Data/OpenFOAM/cavity/1.5/phi.md5 b/Testing/Data/OpenFOAM/cavity/1.5/phi.md5
new file mode 100644
index 0000000..8a73286
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/1.5/phi.md5
@@ -0,0 +1 @@
+64a336b8261a2339513857204e105095
diff --git a/Testing/Data/OpenFOAM/cavity/1.5/uniform/time.md5 b/Testing/Data/OpenFOAM/cavity/1.5/uniform/time.md5
new file mode 100644
index 0000000..21d66c0
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/1.5/uniform/time.md5
@@ -0,0 +1 @@
+8a8a4c937ae5b3d12d4be237d58d4284
diff --git a/Testing/Data/OpenFOAM/cavity/1/U.md5 b/Testing/Data/OpenFOAM/cavity/1/U.md5
new file mode 100644
index 0000000..4c0e420
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/1/U.md5
@@ -0,0 +1 @@
+5f90b349da746f00f20f2efd2d5ecd6a
diff --git a/Testing/Data/OpenFOAM/cavity/1/p.md5 b/Testing/Data/OpenFOAM/cavity/1/p.md5
new file mode 100644
index 0000000..6d9d1d3
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/1/p.md5
@@ -0,0 +1 @@
+5eaaa8d2c73c1888754b31b1a8c4f63c
diff --git a/Testing/Data/OpenFOAM/cavity/1/phi.md5 b/Testing/Data/OpenFOAM/cavity/1/phi.md5
new file mode 100644
index 0000000..a82f26a
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/1/phi.md5
@@ -0,0 +1 @@
+3c0d1a193cf59d242171d823249418f9
diff --git a/Testing/Data/OpenFOAM/cavity/1/uniform/time.md5 b/Testing/Data/OpenFOAM/cavity/1/uniform/time.md5
new file mode 100644
index 0000000..342c5f7
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/1/uniform/time.md5
@@ -0,0 +1 @@
+dbe48668a3f179ea5caf3193d37608ce
diff --git a/Testing/Data/OpenFOAM/cavity/2.5/U.md5 b/Testing/Data/OpenFOAM/cavity/2.5/U.md5
new file mode 100644
index 0000000..7a0dd80
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/2.5/U.md5
@@ -0,0 +1 @@
+d4bce4042e9a9e6591706be61b16ef0a
diff --git a/Testing/Data/OpenFOAM/cavity/2.5/p.md5 b/Testing/Data/OpenFOAM/cavity/2.5/p.md5
new file mode 100644
index 0000000..5ad8f44
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/2.5/p.md5
@@ -0,0 +1 @@
+8d3f3fcdd6a9374508a94937f3b9b95f
diff --git a/Testing/Data/OpenFOAM/cavity/2.5/phi.md5 b/Testing/Data/OpenFOAM/cavity/2.5/phi.md5
new file mode 100644
index 0000000..366d242
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/2.5/phi.md5
@@ -0,0 +1 @@
+bbedde123cb5a6be39021510a899d646
diff --git a/Testing/Data/OpenFOAM/cavity/2.5/uniform/time.md5 b/Testing/Data/OpenFOAM/cavity/2.5/uniform/time.md5
new file mode 100644
index 0000000..f02f194
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/2.5/uniform/time.md5
@@ -0,0 +1 @@
+e8b3b205c6a61fa2cc8afa92c45bd745
diff --git a/Testing/Data/OpenFOAM/cavity/2/U.md5 b/Testing/Data/OpenFOAM/cavity/2/U.md5
new file mode 100644
index 0000000..749c3bc
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/2/U.md5
@@ -0,0 +1 @@
+31b04b63a35d264289aa66f95a3534d9
diff --git a/Testing/Data/OpenFOAM/cavity/2/p.md5 b/Testing/Data/OpenFOAM/cavity/2/p.md5
new file mode 100644
index 0000000..ba67463
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/2/p.md5
@@ -0,0 +1 @@
+af835e44b41806054775428d4e365b6a
diff --git a/Testing/Data/OpenFOAM/cavity/2/phi.md5 b/Testing/Data/OpenFOAM/cavity/2/phi.md5
new file mode 100644
index 0000000..9e3236e
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/2/phi.md5
@@ -0,0 +1 @@
+f5aed65f5a8bda516599b2f1949858dc
diff --git a/Testing/Data/OpenFOAM/cavity/2/uniform/time.md5 b/Testing/Data/OpenFOAM/cavity/2/uniform/time.md5
new file mode 100644
index 0000000..f785885
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/2/uniform/time.md5
@@ -0,0 +1 @@
+699fa2bc65ca4948b579bf3e464eddd6
diff --git a/Testing/Data/OpenFOAM/cavity/cavity.foam.md5 b/Testing/Data/OpenFOAM/cavity/cavity.foam.md5
new file mode 100644
index 0000000..df9edc4
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/cavity.foam.md5
@@ -0,0 +1 @@
+d41d8cd98f00b204e9800998ecf8427e
diff --git a/Testing/Data/OpenFOAM/cavity/constant/polyMesh/blockMeshDict.md5 b/Testing/Data/OpenFOAM/cavity/constant/polyMesh/blockMeshDict.md5
new file mode 100644
index 0000000..6cc73d7
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/constant/polyMesh/blockMeshDict.md5
@@ -0,0 +1 @@
+e5d915c937a3086252511a71004d50ef
diff --git a/Testing/Data/OpenFOAM/cavity/constant/polyMesh/boundary.md5 b/Testing/Data/OpenFOAM/cavity/constant/polyMesh/boundary.md5
new file mode 100644
index 0000000..af660a3
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/constant/polyMesh/boundary.md5
@@ -0,0 +1 @@
+23c7319b0f8797407a492feec1ae093f
diff --git a/Testing/Data/OpenFOAM/cavity/constant/polyMesh/faces.md5 b/Testing/Data/OpenFOAM/cavity/constant/polyMesh/faces.md5
new file mode 100644
index 0000000..630bb61
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/constant/polyMesh/faces.md5
@@ -0,0 +1 @@
+523a4f1a54c4896d2bd4ae45db121f66
diff --git a/Testing/Data/OpenFOAM/cavity/constant/polyMesh/neighbour.md5 b/Testing/Data/OpenFOAM/cavity/constant/polyMesh/neighbour.md5
new file mode 100644
index 0000000..589103e
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/constant/polyMesh/neighbour.md5
@@ -0,0 +1 @@
+cf5934389882acbfb58318d7a8abc9b8
diff --git a/Testing/Data/OpenFOAM/cavity/constant/polyMesh/owner.md5 b/Testing/Data/OpenFOAM/cavity/constant/polyMesh/owner.md5
new file mode 100644
index 0000000..9183a7e
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/constant/polyMesh/owner.md5
@@ -0,0 +1 @@
+d20a94b5bf338cc56ac6ff976260fb91
diff --git a/Testing/Data/OpenFOAM/cavity/constant/polyMesh/points.md5 b/Testing/Data/OpenFOAM/cavity/constant/polyMesh/points.md5
new file mode 100644
index 0000000..47bd5e7
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/constant/polyMesh/points.md5
@@ -0,0 +1 @@
+c38d968ecea904353319aa0449041029
diff --git a/Testing/Data/OpenFOAM/cavity/constant/transportProperties.md5 b/Testing/Data/OpenFOAM/cavity/constant/transportProperties.md5
new file mode 100644
index 0000000..1cafa79
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/constant/transportProperties.md5
@@ -0,0 +1 @@
+c81eeacc586ecfeb8a15f9c554ed67e6
diff --git a/Testing/Data/OpenFOAM/cavity/system/controlDict.md5 b/Testing/Data/OpenFOAM/cavity/system/controlDict.md5
new file mode 100644
index 0000000..a6bdebc
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/system/controlDict.md5
@@ -0,0 +1 @@
+5284569a63b9ec76871c6095bc5379b3
diff --git a/Testing/Data/OpenFOAM/cavity/system/fvSchemes.md5 b/Testing/Data/OpenFOAM/cavity/system/fvSchemes.md5
new file mode 100644
index 0000000..9286e94
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/system/fvSchemes.md5
@@ -0,0 +1 @@
+26cbbe6639b288385cd325387a14aa5e
diff --git a/Testing/Data/OpenFOAM/cavity/system/fvSolution.md5 b/Testing/Data/OpenFOAM/cavity/system/fvSolution.md5
new file mode 100644
index 0000000..1f0196f
--- /dev/null
+++ b/Testing/Data/OpenFOAM/cavity/system/fvSolution.md5
@@ -0,0 +1 @@
+1a0e074307766ee99cec8335b839ea71
diff --git a/Testing/Data/Particles.raw.md5 b/Testing/Data/Particles.raw.md5
new file mode 100644
index 0000000..727d1ae
--- /dev/null
+++ b/Testing/Data/Particles.raw.md5
@@ -0,0 +1 @@
+ecf31eab1246c778380ec80001a70905
diff --git a/Testing/Data/PentaHexa.vtk.md5 b/Testing/Data/PentaHexa.vtk.md5
new file mode 100644
index 0000000..063530f
--- /dev/null
+++ b/Testing/Data/PentaHexa.vtk.md5
@@ -0,0 +1 @@
+83d94c94d1adc143f749d46a56be9157
diff --git a/Testing/Data/Quadratic/CylinderLinear.vtk.md5 b/Testing/Data/Quadratic/CylinderLinear.vtk.md5
new file mode 100644
index 0000000..6348185
--- /dev/null
+++ b/Testing/Data/Quadratic/CylinderLinear.vtk.md5
@@ -0,0 +1 @@
+26dbc2fc247de63cd8a2318a46272a08
diff --git a/Testing/Data/Quadratic/CylinderQuadratic.vtk.md5 b/Testing/Data/Quadratic/CylinderQuadratic.vtk.md5
new file mode 100644
index 0000000..0e57f53
--- /dev/null
+++ b/Testing/Data/Quadratic/CylinderQuadratic.vtk.md5
@@ -0,0 +1 @@
+3cf5204fb6e984c3b123e39c3c39057b
diff --git a/Testing/Data/RectGrid2.vtk.md5 b/Testing/Data/RectGrid2.vtk.md5
new file mode 100644
index 0000000..7c8f58b
--- /dev/null
+++ b/Testing/Data/RectGrid2.vtk.md5
@@ -0,0 +1 @@
+16e8ea7cf94f6605c7d19de88ea079e5
diff --git a/Testing/Data/RedCircle.png.md5 b/Testing/Data/RedCircle.png.md5
new file mode 100644
index 0000000..b22b96c
--- /dev/null
+++ b/Testing/Data/RedCircle.png.md5
@@ -0,0 +1 @@
+115bb7287bd76e98487699fa05ef2484
diff --git a/Testing/Data/SLAC/ll-9cell-f523/README.md5 b/Testing/Data/SLAC/ll-9cell-f523/README.md5
new file mode 100644
index 0000000..1a08a39
--- /dev/null
+++ b/Testing/Data/SLAC/ll-9cell-f523/README.md5
@@ -0,0 +1 @@
+5081358fbcb5f3c44c3865e8aac6b2c0
diff --git a/Testing/Data/SLAC/ll-9cell-f523/ll-9cell-f523.ncdf.md5 b/Testing/Data/SLAC/ll-9cell-f523/ll-9cell-f523.ncdf.md5
new file mode 100644
index 0000000..f9c3b21
--- /dev/null
+++ b/Testing/Data/SLAC/ll-9cell-f523/ll-9cell-f523.ncdf.md5
@@ -0,0 +1 @@
+883b0b8c1f1fd033e19e29b10a22c994
diff --git a/Testing/Data/SLAC/ll-9cell-f523/mode0.l0.R2.457036E+09I2.778314E+04.m3.md5 b/Testing/Data/SLAC/ll-9cell-f523/mode0.l0.R2.457036E+09I2.778314E+04.m3.md5
new file mode 100644
index 0000000..81bc606
--- /dev/null
+++ b/Testing/Data/SLAC/ll-9cell-f523/mode0.l0.R2.457036E+09I2.778314E+04.m3.md5
@@ -0,0 +1 @@
+ec5a560a4db6079b6cfaa3727c6fc215
diff --git a/Testing/Data/SLAC/pic-example/README.md5 b/Testing/Data/SLAC/pic-example/README.md5
new file mode 100644
index 0000000..418b2da
--- /dev/null
+++ b/Testing/Data/SLAC/pic-example/README.md5
@@ -0,0 +1 @@
+ffc39f28fd07ce714df7a4a1269cee27
diff --git a/Testing/Data/SLAC/pic-example/fields_0.mod.md5 b/Testing/Data/SLAC/pic-example/fields_0.mod.md5
new file mode 100644
index 0000000..aa0bdba
--- /dev/null
+++ b/Testing/Data/SLAC/pic-example/fields_0.mod.md5
@@ -0,0 +1 @@
+a5389e96cf875f09a3af09b7cbc99e6f
diff --git a/Testing/Data/SLAC/pic-example/fields_1.mod.md5 b/Testing/Data/SLAC/pic-example/fields_1.mod.md5
new file mode 100644
index 0000000..81f7422
--- /dev/null
+++ b/Testing/Data/SLAC/pic-example/fields_1.mod.md5
@@ -0,0 +1 @@
+57b818f6cfd0d4e4274412e030333482
diff --git a/Testing/Data/SLAC/pic-example/fields_2.mod.md5 b/Testing/Data/SLAC/pic-example/fields_2.mod.md5
new file mode 100644
index 0000000..3ae4277
--- /dev/null
+++ b/Testing/Data/SLAC/pic-example/fields_2.mod.md5
@@ -0,0 +1 @@
+c9181025fe91118ba27707162b14ef80
diff --git a/Testing/Data/SLAC/pic-example/fields_3.mod.md5 b/Testing/Data/SLAC/pic-example/fields_3.mod.md5
new file mode 100644
index 0000000..235c07b
--- /dev/null
+++ b/Testing/Data/SLAC/pic-example/fields_3.mod.md5
@@ -0,0 +1 @@
+06532842b1e944246599546b3bfd7178
diff --git a/Testing/Data/SLAC/pic-example/fields_4.mod.md5 b/Testing/Data/SLAC/pic-example/fields_4.mod.md5
new file mode 100644
index 0000000..2313183
--- /dev/null
+++ b/Testing/Data/SLAC/pic-example/fields_4.mod.md5
@@ -0,0 +1 @@
+d7c19ce629fcb2371f0b0689f075a315
diff --git a/Testing/Data/SLAC/pic-example/fields_5.mod.md5 b/Testing/Data/SLAC/pic-example/fields_5.mod.md5
new file mode 100644
index 0000000..cc3a128
--- /dev/null
+++ b/Testing/Data/SLAC/pic-example/fields_5.mod.md5
@@ -0,0 +1 @@
+f42f2f60701432de4b67f47f871ca2f3
diff --git a/Testing/Data/SLAC/pic-example/fields_6.mod.md5 b/Testing/Data/SLAC/pic-example/fields_6.mod.md5
new file mode 100644
index 0000000..289c573
--- /dev/null
+++ b/Testing/Data/SLAC/pic-example/fields_6.mod.md5
@@ -0,0 +1 @@
+588f1c081842a24df0a5b97e4506cf23
diff --git a/Testing/Data/SLAC/pic-example/fields_7.mod.md5 b/Testing/Data/SLAC/pic-example/fields_7.mod.md5
new file mode 100644
index 0000000..1f63a27
--- /dev/null
+++ b/Testing/Data/SLAC/pic-example/fields_7.mod.md5
@@ -0,0 +1 @@
+4de83d8a737469969a8ef3d46fcf4484
diff --git a/Testing/Data/SLAC/pic-example/fields_8.mod.md5 b/Testing/Data/SLAC/pic-example/fields_8.mod.md5
new file mode 100644
index 0000000..0c0ad0d
--- /dev/null
+++ b/Testing/Data/SLAC/pic-example/fields_8.mod.md5
@@ -0,0 +1 @@
+cfe70450dc6f312241980b35af503969
diff --git a/Testing/Data/SLAC/pic-example/mesh.ncdf.md5 b/Testing/Data/SLAC/pic-example/mesh.ncdf.md5
new file mode 100644
index 0000000..0fd5606
--- /dev/null
+++ b/Testing/Data/SLAC/pic-example/mesh.ncdf.md5
@@ -0,0 +1 @@
+17cdb2653a82cf49b798a6a199741ee5
diff --git a/Testing/Data/SLAC/pic-example/particles_0.ncdf.md5 b/Testing/Data/SLAC/pic-example/particles_0.ncdf.md5
new file mode 100644
index 0000000..8b39bc0
--- /dev/null
+++ b/Testing/Data/SLAC/pic-example/particles_0.ncdf.md5
@@ -0,0 +1 @@
+d8fe2aa67b5d4783dfb9b8968a131669
diff --git a/Testing/Data/SLAC/pic-example/particles_1.ncdf.md5 b/Testing/Data/SLAC/pic-example/particles_1.ncdf.md5
new file mode 100644
index 0000000..1105bdf
--- /dev/null
+++ b/Testing/Data/SLAC/pic-example/particles_1.ncdf.md5
@@ -0,0 +1 @@
+f21f11890a354a1056e1b0bf41ecd92a
diff --git a/Testing/Data/SLAC/pic-example/particles_2.ncdf.md5 b/Testing/Data/SLAC/pic-example/particles_2.ncdf.md5
new file mode 100644
index 0000000..2612f34
--- /dev/null
+++ b/Testing/Data/SLAC/pic-example/particles_2.ncdf.md5
@@ -0,0 +1 @@
+7e391e49d6ed5a10ecf5abe70de5c0c9
diff --git a/Testing/Data/SLAC/pic-example/particles_3.ncdf.md5 b/Testing/Data/SLAC/pic-example/particles_3.ncdf.md5
new file mode 100644
index 0000000..978a958
--- /dev/null
+++ b/Testing/Data/SLAC/pic-example/particles_3.ncdf.md5
@@ -0,0 +1 @@
+cdaca41a01aa743b5e12981331d90b96
diff --git a/Testing/Data/SLAC/pic-example/particles_4.ncdf.md5 b/Testing/Data/SLAC/pic-example/particles_4.ncdf.md5
new file mode 100644
index 0000000..1aae1a7
--- /dev/null
+++ b/Testing/Data/SLAC/pic-example/particles_4.ncdf.md5
@@ -0,0 +1 @@
+ec2d4b9f39761f5c4a765e04b3b3754d
diff --git a/Testing/Data/SLAC/pic-example/particles_5.ncdf.md5 b/Testing/Data/SLAC/pic-example/particles_5.ncdf.md5
new file mode 100644
index 0000000..3bc4f99
--- /dev/null
+++ b/Testing/Data/SLAC/pic-example/particles_5.ncdf.md5
@@ -0,0 +1 @@
+ca4ec1c2a068829043ff911ebd7044ec
diff --git a/Testing/Data/SLAC/pic-example/particles_6.ncdf.md5 b/Testing/Data/SLAC/pic-example/particles_6.ncdf.md5
new file mode 100644
index 0000000..91a42e9
--- /dev/null
+++ b/Testing/Data/SLAC/pic-example/particles_6.ncdf.md5
@@ -0,0 +1 @@
+7cff16a1117ac03aa32ec5a1372ed767
diff --git a/Testing/Data/SLAC/pic-example/particles_7.ncdf.md5 b/Testing/Data/SLAC/pic-example/particles_7.ncdf.md5
new file mode 100644
index 0000000..dac3908
--- /dev/null
+++ b/Testing/Data/SLAC/pic-example/particles_7.ncdf.md5
@@ -0,0 +1 @@
+6fcd694e3cf6f8692090fa0c8ad3bf37
diff --git a/Testing/Data/SLAC/pic-example/particles_8.ncdf.md5 b/Testing/Data/SLAC/pic-example/particles_8.ncdf.md5
new file mode 100644
index 0000000..9f1873d
--- /dev/null
+++ b/Testing/Data/SLAC/pic-example/particles_8.ncdf.md5
@@ -0,0 +1 @@
+562b5d4b3ac249013eebbfbbea54b817
diff --git a/Testing/Data/SLAC/pillbox/Pillbox3TenDSlice.ncdf.md5 b/Testing/Data/SLAC/pillbox/Pillbox3TenDSlice.ncdf.md5
new file mode 100644
index 0000000..a1fcaec
--- /dev/null
+++ b/Testing/Data/SLAC/pillbox/Pillbox3TenDSlice.ncdf.md5
@@ -0,0 +1 @@
+c384b156b061692aed069869fb810fd5
diff --git a/Testing/Data/SLAC/pillbox/omega3p.l0.m0000.1.3138186e+09.mod.md5 b/Testing/Data/SLAC/pillbox/omega3p.l0.m0000.1.3138186e+09.mod.md5
new file mode 100644
index 0000000..c86bfd4
--- /dev/null
+++ b/Testing/Data/SLAC/pillbox/omega3p.l0.m0000.1.3138186e+09.mod.md5
@@ -0,0 +1 @@
+e1137145bf248ff59fa1865f9ee61e14
diff --git a/Testing/Data/SLAC/pillbox/omega3p.l0.m0001.1.3138187e+09.mod.md5 b/Testing/Data/SLAC/pillbox/omega3p.l0.m0001.1.3138187e+09.mod.md5
new file mode 100644
index 0000000..693d601
--- /dev/null
+++ b/Testing/Data/SLAC/pillbox/omega3p.l0.m0001.1.3138187e+09.mod.md5
@@ -0,0 +1 @@
+ffcdb8e87d8149d0a2a5db330adcbbfa
diff --git a/Testing/Data/SLAC/pillbox/omega3p.l0.m0002.1.3138189e+09.mod.md5 b/Testing/Data/SLAC/pillbox/omega3p.l0.m0002.1.3138189e+09.mod.md5
new file mode 100644
index 0000000..18282dd
--- /dev/null
+++ b/Testing/Data/SLAC/pillbox/omega3p.l0.m0002.1.3138189e+09.mod.md5
@@ -0,0 +1 @@
+5303613df3474c0e144cec7a6fd706d0
diff --git a/Testing/Data/SainteHelens.dem.md5 b/Testing/Data/SainteHelens.dem.md5
new file mode 100644
index 0000000..bfd52e0
--- /dev/null
+++ b/Testing/Data/SainteHelens.dem.md5
@@ -0,0 +1 @@
+c1554b02af3e2eaceab9e96096160bb3
diff --git a/Testing/Data/SampleStructGrid.vtk.md5 b/Testing/Data/SampleStructGrid.vtk.md5
new file mode 100644
index 0000000..fde0beb
--- /dev/null
+++ b/Testing/Data/SampleStructGrid.vtk.md5
@@ -0,0 +1 @@
+0c862c697f5cfb7ffb4709f1bb8c2d72
diff --git a/Testing/Data/SemiDisk/SemiDisk-0.vtp.md5 b/Testing/Data/SemiDisk/SemiDisk-0.vtp.md5
new file mode 100644
index 0000000..925c4dc
--- /dev/null
+++ b/Testing/Data/SemiDisk/SemiDisk-0.vtp.md5
@@ -0,0 +1 @@
+29168e202e49c55f5af8525ddeef9409
diff --git a/Testing/Data/SemiDisk/SemiDisk-1.vtp.md5 b/Testing/Data/SemiDisk/SemiDisk-1.vtp.md5
new file mode 100644
index 0000000..86a6a63
--- /dev/null
+++ b/Testing/Data/SemiDisk/SemiDisk-1.vtp.md5
@@ -0,0 +1 @@
+c75293d5a900a3a9ad2c311aaaef71ab
diff --git a/Testing/Data/SemiDisk/SemiDisk.vtk.md5 b/Testing/Data/SemiDisk/SemiDisk.vtk.md5
new file mode 100644
index 0000000..a50558f
--- /dev/null
+++ b/Testing/Data/SemiDisk/SemiDisk.vtk.md5
@@ -0,0 +1 @@
+c8f9e572308b2105726bc6d2d815586b
diff --git a/Testing/Data/SemiDisk/SemiDisk.xml.md5 b/Testing/Data/SemiDisk/SemiDisk.xml.md5
new file mode 100644
index 0000000..28ff251
--- /dev/null
+++ b/Testing/Data/SemiDisk/SemiDisk.xml.md5
@@ -0,0 +1 @@
+59e8cae9839c97e6666bca0d64de027a
diff --git a/Testing/Data/SurfaceVectors.vtk.md5 b/Testing/Data/SurfaceVectors.vtk.md5
new file mode 100644
index 0000000..a8607af
--- /dev/null
+++ b/Testing/Data/SurfaceVectors.vtk.md5
@@ -0,0 +1 @@
+ccb9fd0808047fea6c21bf7769b7f66f
diff --git a/Testing/Data/SyntheticPolyline.vtp.md5 b/Testing/Data/SyntheticPolyline.vtp.md5
new file mode 100644
index 0000000..65b433d
--- /dev/null
+++ b/Testing/Data/SyntheticPolyline.vtp.md5
@@ -0,0 +1 @@
+bcde7849ec4e37a044604f5b4a69f436
diff --git a/Testing/Data/Tango/README.VTK.txt.md5 b/Testing/Data/Tango/README.VTK.txt.md5
new file mode 100644
index 0000000..9c52c55
--- /dev/null
+++ b/Testing/Data/Tango/README.VTK.txt.md5
@@ -0,0 +1 @@
+6de41dad4b2cf4b4ebaaebdf5555e910
diff --git a/Testing/Data/Tango/TangoIcons.png.md5 b/Testing/Data/Tango/TangoIcons.png.md5
new file mode 100644
index 0000000..ce2e2a9
--- /dev/null
+++ b/Testing/Data/Tango/TangoIcons.png.md5
@@ -0,0 +1 @@
+d6cb3cf0b80f3d17127882f96c35bb0c
diff --git a/Testing/Data/UCD2D/UCD_00000.inp.md5 b/Testing/Data/UCD2D/UCD_00000.inp.md5
new file mode 100644
index 0000000..7da41e3
--- /dev/null
+++ b/Testing/Data/UCD2D/UCD_00000.inp.md5
@@ -0,0 +1 @@
+71777cacd1b816687b7e718aabc70661
diff --git a/Testing/Data/UCD2D/UCD_00001.inp.md5 b/Testing/Data/UCD2D/UCD_00001.inp.md5
new file mode 100644
index 0000000..b2a8a2e
--- /dev/null
+++ b/Testing/Data/UCD2D/UCD_00001.inp.md5
@@ -0,0 +1 @@
+03edf03c1fa21b901fce05e29e62626a
diff --git a/Testing/Data/UCD2D/UCD_00002.inp.md5 b/Testing/Data/UCD2D/UCD_00002.inp.md5
new file mode 100644
index 0000000..c662b00
--- /dev/null
+++ b/Testing/Data/UCD2D/UCD_00002.inp.md5
@@ -0,0 +1 @@
+e357ea8760c74a959fd9816c500c4697
diff --git a/Testing/Data/UCD2D/UCD_00003.inp.md5 b/Testing/Data/UCD2D/UCD_00003.inp.md5
new file mode 100644
index 0000000..e792b60
--- /dev/null
+++ b/Testing/Data/UCD2D/UCD_00003.inp.md5
@@ -0,0 +1 @@
+25363330c51c65b6c28208c5f027b927
diff --git a/Testing/Data/UCD2D/UCD_00004.inp.md5 b/Testing/Data/UCD2D/UCD_00004.inp.md5
new file mode 100644
index 0000000..6a938e0
--- /dev/null
+++ b/Testing/Data/UCD2D/UCD_00004.inp.md5
@@ -0,0 +1 @@
+a9f34c3a7ea136062d8d45de09ecc4b6
diff --git a/Testing/Data/UCD2D/UCD_00005.inp.md5 b/Testing/Data/UCD2D/UCD_00005.inp.md5
new file mode 100644
index 0000000..e9b0fd1
--- /dev/null
+++ b/Testing/Data/UCD2D/UCD_00005.inp.md5
@@ -0,0 +1 @@
+3d142af74530223425e4ca73644dc2a0
diff --git a/Testing/Data/UCD2D/UCD_00006.inp.md5 b/Testing/Data/UCD2D/UCD_00006.inp.md5
new file mode 100644
index 0000000..e4dbb68
--- /dev/null
+++ b/Testing/Data/UCD2D/UCD_00006.inp.md5
@@ -0,0 +1 @@
+a7e2d08b48bff64f4ab379495b7cf6c3
diff --git a/Testing/Data/UCD2D/UCD_00007.inp.md5 b/Testing/Data/UCD2D/UCD_00007.inp.md5
new file mode 100644
index 0000000..d8b0c8c
--- /dev/null
+++ b/Testing/Data/UCD2D/UCD_00007.inp.md5
@@ -0,0 +1 @@
+484515e3fcbe7d0b5205a89c915e76f8
diff --git a/Testing/Data/UCD2D/UCD_00008.inp.md5 b/Testing/Data/UCD2D/UCD_00008.inp.md5
new file mode 100644
index 0000000..838bfae
--- /dev/null
+++ b/Testing/Data/UCD2D/UCD_00008.inp.md5
@@ -0,0 +1 @@
+7aed6d18a0260a4d57a0d4776d62b59c
diff --git a/Testing/Data/UCD2D/UCD_00009.inp.md5 b/Testing/Data/UCD2D/UCD_00009.inp.md5
new file mode 100644
index 0000000..d5868d9
--- /dev/null
+++ b/Testing/Data/UCD2D/UCD_00009.inp.md5
@@ -0,0 +1 @@
+4c74980989d8c5a26c98920ea2216314
diff --git a/Testing/Data/UCD2D/UCD_00010.inp.md5 b/Testing/Data/UCD2D/UCD_00010.inp.md5
new file mode 100644
index 0000000..61a1c27
--- /dev/null
+++ b/Testing/Data/UCD2D/UCD_00010.inp.md5
@@ -0,0 +1 @@
+703fa35b92341e46b7e813b6555a7535
diff --git a/Testing/Data/Viewpoint/README.md5 b/Testing/Data/Viewpoint/README.md5
new file mode 100644
index 0000000..50a369b
--- /dev/null
+++ b/Testing/Data/Viewpoint/README.md5
@@ -0,0 +1 @@
+fa5a37dc723bf002ed76aaa2fa018c28
diff --git a/Testing/Data/Viewpoint/cow.g.md5 b/Testing/Data/Viewpoint/cow.g.md5
new file mode 100644
index 0000000..b3a0465
--- /dev/null
+++ b/Testing/Data/Viewpoint/cow.g.md5
@@ -0,0 +1 @@
+c845ca8f23b1a03690c9e91935fb0028
diff --git a/Testing/Data/Viewpoint/cow.obj.md5 b/Testing/Data/Viewpoint/cow.obj.md5
new file mode 100644
index 0000000..77167c1
--- /dev/null
+++ b/Testing/Data/Viewpoint/cow.obj.md5
@@ -0,0 +1 @@
+442efbeffaad664d76a6b7cd6605738c
diff --git a/Testing/Data/Viewpoint/iflamigm.3ds.md5 b/Testing/Data/Viewpoint/iflamigm.3ds.md5
new file mode 100644
index 0000000..e2b9709
--- /dev/null
+++ b/Testing/Data/Viewpoint/iflamigm.3ds.md5
@@ -0,0 +1 @@
+e619e8b713ba78500d4a84c6987da911
diff --git a/Testing/Data/WindBladeReader/WT_topo1.dat.md5 b/Testing/Data/WindBladeReader/WT_topo1.dat.md5
new file mode 100644
index 0000000..f8080cc
--- /dev/null
+++ b/Testing/Data/WindBladeReader/WT_topo1.dat.md5
@@ -0,0 +1 @@
+f089ae7fbeac0a752ef0a69e4b434eaa
diff --git a/Testing/Data/WindBladeReader/field/comp.out.10.md5 b/Testing/Data/WindBladeReader/field/comp.out.10.md5
new file mode 100644
index 0000000..4e12c2d
--- /dev/null
+++ b/Testing/Data/WindBladeReader/field/comp.out.10.md5
@@ -0,0 +1 @@
+e1bc2bf8f0e85161cfab9440d20c93b9
diff --git a/Testing/Data/WindBladeReader/field/comp.out.5500.md5 b/Testing/Data/WindBladeReader/field/comp.out.5500.md5
new file mode 100644
index 0000000..8c43917
--- /dev/null
+++ b/Testing/Data/WindBladeReader/field/comp.out.5500.md5
@@ -0,0 +1 @@
+c1a9211df9a954272161f80626583733
diff --git a/Testing/Data/WindBladeReader/test1_topo.wind.md5 b/Testing/Data/WindBladeReader/test1_topo.wind.md5
new file mode 100644
index 0000000..c9026e3
--- /dev/null
+++ b/Testing/Data/WindBladeReader/test1_topo.wind.md5
@@ -0,0 +1 @@
+057e6f518ff039b41c2bb8858e198e48
diff --git a/Testing/Data/WindBladeReader/turbine/WT_list.md5 b/Testing/Data/WindBladeReader/turbine/WT_list.md5
new file mode 100644
index 0000000..09bb4c1
--- /dev/null
+++ b/Testing/Data/WindBladeReader/turbine/WT_list.md5
@@ -0,0 +1 @@
+3a9ba515441f31604f9529d05c7102f9
diff --git a/Testing/Data/WindBladeReader/turbine/WT_rist.md5 b/Testing/Data/WindBladeReader/turbine/WT_rist.md5
new file mode 100644
index 0000000..7c3e583
--- /dev/null
+++ b/Testing/Data/WindBladeReader/turbine/WT_rist.md5
@@ -0,0 +1 @@
+0b35b650622f114bac531b0e3851a8b3
diff --git a/Testing/Data/WindBladeReader/turbine/wtbl.10.md5 b/Testing/Data/WindBladeReader/turbine/wtbl.10.md5
new file mode 100644
index 0000000..40e204d
--- /dev/null
+++ b/Testing/Data/WindBladeReader/turbine/wtbl.10.md5
@@ -0,0 +1 @@
+c669c5682748f7a88e839d4ce8a0554d
diff --git a/Testing/Data/WindBladeReader/turbine/wtbl.5500.md5 b/Testing/Data/WindBladeReader/turbine/wtbl.5500.md5
new file mode 100644
index 0000000..f46caba
--- /dev/null
+++ b/Testing/Data/WindBladeReader/turbine/wtbl.5500.md5
@@ -0,0 +1 @@
+d58f3e4a7a38102b6a3f6fcc402b4c3a
diff --git a/Testing/Data/WineGlass.wrl.md5 b/Testing/Data/WineGlass.wrl.md5
new file mode 100644
index 0000000..ad0f0c7
--- /dev/null
+++ b/Testing/Data/WineGlass.wrl.md5
@@ -0,0 +1 @@
+0470ebb3419bde4348c55d24cd13f289
diff --git a/Testing/Data/alphachannel.png.md5 b/Testing/Data/alphachannel.png.md5
new file mode 100644
index 0000000..8399bde
--- /dev/null
+++ b/Testing/Data/alphachannel.png.md5
@@ -0,0 +1 @@
+595891dee01f082a4dfaf608dee0ff9d
diff --git a/Testing/Data/authors.csv.md5 b/Testing/Data/authors.csv.md5
new file mode 100644
index 0000000..f067c26
--- /dev/null
+++ b/Testing/Data/authors.csv.md5
@@ -0,0 +1 @@
+1f6da29b896bf37b580ac81d77547bbf
diff --git a/Testing/Data/avg152T1_RL_nifti.nii.gz.md5 b/Testing/Data/avg152T1_RL_nifti.nii.gz.md5
new file mode 100644
index 0000000..0eb6456
--- /dev/null
+++ b/Testing/Data/avg152T1_RL_nifti.nii.gz.md5
@@ -0,0 +1 @@
+76ef297b8b927471e1dd80b819540e9d
diff --git a/Testing/Data/beach.ascii.md5 b/Testing/Data/beach.ascii.md5
new file mode 100644
index 0000000..454d37e
--- /dev/null
+++ b/Testing/Data/beach.ascii.md5
@@ -0,0 +1 @@
+8e16da1df519fddef62be98b93d8bffd
diff --git a/Testing/Data/beach.ascii.nhdr.md5 b/Testing/Data/beach.ascii.nhdr.md5
new file mode 100644
index 0000000..9f0a0ef
--- /dev/null
+++ b/Testing/Data/beach.ascii.nhdr.md5
@@ -0,0 +1 @@
+ee18a3d54dcc0c68608136350bfe4915
diff --git a/Testing/Data/beach.jpg.md5 b/Testing/Data/beach.jpg.md5
new file mode 100644
index 0000000..4012f5f
--- /dev/null
+++ b/Testing/Data/beach.jpg.md5
@@ -0,0 +1 @@
+acbbd3a051ddefdbdd6c53038bfe3b16
diff --git a/Testing/Data/beach.nrrd.md5 b/Testing/Data/beach.nrrd.md5
new file mode 100644
index 0000000..322004f
--- /dev/null
+++ b/Testing/Data/beach.nrrd.md5
@@ -0,0 +1 @@
+9eb48da7c20801b6619aecb3291fe14a
diff --git a/Testing/Data/beach.tif.md5 b/Testing/Data/beach.tif.md5
new file mode 100644
index 0000000..16b6573
--- /dev/null
+++ b/Testing/Data/beach.tif.md5
@@ -0,0 +1 @@
+bfb986abaed886e53aa95eb732a805f8
diff --git a/Testing/Data/billBoard.pgm.md5 b/Testing/Data/billBoard.pgm.md5
new file mode 100644
index 0000000..0f97e8d
--- /dev/null
+++ b/Testing/Data/billBoard.pgm.md5
@@ -0,0 +1 @@
+cd8606d5c24109ba8a5eaeb368354c50
diff --git a/Testing/Data/blow.vtk.md5 b/Testing/Data/blow.vtk.md5
new file mode 100644
index 0000000..8475158
--- /dev/null
+++ b/Testing/Data/blow.vtk.md5
@@ -0,0 +1 @@
+4d37df71d04579a4eb04e51ec0767fab
diff --git a/Testing/Data/blowAttr.vtk.md5 b/Testing/Data/blowAttr.vtk.md5
new file mode 100644
index 0000000..11084d6
--- /dev/null
+++ b/Testing/Data/blowAttr.vtk.md5
@@ -0,0 +1 @@
+99c1c42fd2b1e7d2fcf557d9515dd1f3
diff --git a/Testing/Data/blowGeom.vtk.md5 b/Testing/Data/blowGeom.vtk.md5
new file mode 100644
index 0000000..4af3f8d
--- /dev/null
+++ b/Testing/Data/blowGeom.vtk.md5
@@ -0,0 +1 @@
+59a5f718662a3087994310f42ec182df
diff --git a/Testing/Data/bluntfinq.bin.md5 b/Testing/Data/bluntfinq.bin.md5
new file mode 100644
index 0000000..077117f
--- /dev/null
+++ b/Testing/Data/bluntfinq.bin.md5
@@ -0,0 +1 @@
+42913a6af9f484affd594ca40a4d72aa
diff --git a/Testing/Data/bluntfinxyz.bin.md5 b/Testing/Data/bluntfinxyz.bin.md5
new file mode 100644
index 0000000..9fc90ca
--- /dev/null
+++ b/Testing/Data/bluntfinxyz.bin.md5
@@ -0,0 +1 @@
+ce5b5bc6c721450a2c90c53649744987
diff --git a/Testing/Data/bolt.fac.md5 b/Testing/Data/bolt.fac.md5
new file mode 100644
index 0000000..e661df7
--- /dev/null
+++ b/Testing/Data/bolt.fac.md5
@@ -0,0 +1 @@
+e86316cc00c3a29ca44a09a3bb62c1d8
diff --git a/Testing/Data/bolt.slc.md5 b/Testing/Data/bolt.slc.md5
new file mode 100644
index 0000000..7a6cb67
--- /dev/null
+++ b/Testing/Data/bolt.slc.md5
@@ -0,0 +1 @@
+9efa4ed5e756af1d5bf8eab2d5edb660
diff --git a/Testing/Data/bore.vtk.md5 b/Testing/Data/bore.vtk.md5
new file mode 100644
index 0000000..2225f1c
--- /dev/null
+++ b/Testing/Data/bore.vtk.md5
@@ -0,0 +1 @@
+d9421e8c10c588ea7c3501b74daa5c95
diff --git a/Testing/Data/bot2.wrl.md5 b/Testing/Data/bot2.wrl.md5
new file mode 100644
index 0000000..50b5f90
--- /dev/null
+++ b/Testing/Data/bot2.wrl.md5
@@ -0,0 +1 @@
+03e3bed85619cebbaf41c6ba8b639efa
diff --git a/Testing/Data/box-noglom.ex2.md5 b/Testing/Data/box-noglom.ex2.md5
new file mode 100644
index 0000000..d2d56a2
--- /dev/null
+++ b/Testing/Data/box-noglom.ex2.md5
@@ -0,0 +1 @@
+ea00ece69fa2a618a4840b12d6bd038d
diff --git a/Testing/Data/bpa.mol.md5 b/Testing/Data/bpa.mol.md5
new file mode 100644
index 0000000..cb6c7e7
--- /dev/null
+++ b/Testing/Data/bpa.mol.md5
@@ -0,0 +1 @@
+bcd10e2cb0c4d8b57e9508e1c86e6ef9
diff --git a/Testing/Data/brainImageSmooth.vtk.md5 b/Testing/Data/brainImageSmooth.vtk.md5
new file mode 100644
index 0000000..06f9030
--- /dev/null
+++ b/Testing/Data/brainImageSmooth.vtk.md5
@@ -0,0 +1 @@
+d90f433cff6a23d1fac22493d6d7a94d
diff --git a/Testing/Data/bunny.ply.md5 b/Testing/Data/bunny.ply.md5
new file mode 100644
index 0000000..de8697a
--- /dev/null
+++ b/Testing/Data/bunny.ply.md5
@@ -0,0 +1 @@
+4326db2fd98be6d3538f504159aca44c
diff --git a/Testing/Data/cactus.3337.pts.md5 b/Testing/Data/cactus.3337.pts.md5
new file mode 100644
index 0000000..ddc26bb
--- /dev/null
+++ b/Testing/Data/cactus.3337.pts.md5
@@ -0,0 +1 @@
+6205709f4bc17de243fe65bfcea0ea63
diff --git a/Testing/Data/caffeine.pdb.md5 b/Testing/Data/caffeine.pdb.md5
new file mode 100644
index 0000000..a3b2aad
--- /dev/null
+++ b/Testing/Data/caffeine.pdb.md5
@@ -0,0 +1 @@
+7d34c17d65e32fcb568ba5b022cb3e9d
diff --git a/Testing/Data/camscene.png.md5 b/Testing/Data/camscene.png.md5
new file mode 100644
index 0000000..74063fe
--- /dev/null
+++ b/Testing/Data/camscene.png.md5
@@ -0,0 +1 @@
+b54534b27e6a59daf5e83e7eb8c9c4dc
diff --git a/Testing/Data/cellcentered.tec.md5 b/Testing/Data/cellcentered.tec.md5
new file mode 100644
index 0000000..9608a4b
--- /dev/null
+++ b/Testing/Data/cellcentered.tec.md5
@@ -0,0 +1 @@
+073798c3688dcbe7b6322d000e77490c
diff --git a/Testing/Data/cellsnd.ascii.inp.md5 b/Testing/Data/cellsnd.ascii.inp.md5
new file mode 100644
index 0000000..daced7f
--- /dev/null
+++ b/Testing/Data/cellsnd.ascii.inp.md5
@@ -0,0 +1 @@
+d90a0437bff891f453ed6ebd380cc34d
diff --git a/Testing/Data/cellsnd.bin.inp.md5 b/Testing/Data/cellsnd.bin.inp.md5
new file mode 100644
index 0000000..35ec3f7
--- /dev/null
+++ b/Testing/Data/cellsnd.bin.inp.md5
@@ -0,0 +1 @@
+cecc3201fd255602536466145f039cd9
diff --git a/Testing/Data/chi_field/chi_chunk_0_700_0_50x50x50.bov.md5 b/Testing/Data/chi_field/chi_chunk_0_700_0_50x50x50.bov.md5
new file mode 100644
index 0000000..ed16759
--- /dev/null
+++ b/Testing/Data/chi_field/chi_chunk_0_700_0_50x50x50.bov.md5
@@ -0,0 +1 @@
+9664678da747bfe2ce3f5d452e76cb06
diff --git a/Testing/Data/chombo3d/chombo3d.vtm.md5 b/Testing/Data/chombo3d/chombo3d.vtm.md5
new file mode 100644
index 0000000..abad5e1
--- /dev/null
+++ b/Testing/Data/chombo3d/chombo3d.vtm.md5
@@ -0,0 +1 @@
+ca0f49246377ee8ffd0950dc4ae0d15a
diff --git a/Testing/Data/chombo3d/chombo3d_0.vti.md5 b/Testing/Data/chombo3d/chombo3d_0.vti.md5
new file mode 100644
index 0000000..47cacb1
--- /dev/null
+++ b/Testing/Data/chombo3d/chombo3d_0.vti.md5
@@ -0,0 +1 @@
+106e2a037ef63a020a754d651e6644eb
diff --git a/Testing/Data/chombo3d/chombo3d_1.vti.md5 b/Testing/Data/chombo3d/chombo3d_1.vti.md5
new file mode 100644
index 0000000..581fd1c
--- /dev/null
+++ b/Testing/Data/chombo3d/chombo3d_1.vti.md5
@@ -0,0 +1 @@
+af45eeccfdefa73097dfd6ae55138057
diff --git a/Testing/Data/chombo3d/chombo3d_10.vti.md5 b/Testing/Data/chombo3d/chombo3d_10.vti.md5
new file mode 100644
index 0000000..d884b21
--- /dev/null
+++ b/Testing/Data/chombo3d/chombo3d_10.vti.md5
@@ -0,0 +1 @@
+60fc8de925ac8d3adac2cac9d3a4cfd0
diff --git a/Testing/Data/chombo3d/chombo3d_11.vti.md5 b/Testing/Data/chombo3d/chombo3d_11.vti.md5
new file mode 100644
index 0000000..a811a30
--- /dev/null
+++ b/Testing/Data/chombo3d/chombo3d_11.vti.md5
@@ -0,0 +1 @@
+b05ce7dfaa6b7b97f19d766c188c4ed8
diff --git a/Testing/Data/chombo3d/chombo3d_12.vti.md5 b/Testing/Data/chombo3d/chombo3d_12.vti.md5
new file mode 100644
index 0000000..a34daba
--- /dev/null
+++ b/Testing/Data/chombo3d/chombo3d_12.vti.md5
@@ -0,0 +1 @@
+2d4ec13a429ed4fcf08b4b1e2f9ca14a
diff --git a/Testing/Data/chombo3d/chombo3d_13.vti.md5 b/Testing/Data/chombo3d/chombo3d_13.vti.md5
new file mode 100644
index 0000000..f9d149d
--- /dev/null
+++ b/Testing/Data/chombo3d/chombo3d_13.vti.md5
@@ -0,0 +1 @@
+6d8c6f263c478b88ba66f56731f0e6a4
diff --git a/Testing/Data/chombo3d/chombo3d_14.vti.md5 b/Testing/Data/chombo3d/chombo3d_14.vti.md5
new file mode 100644
index 0000000..d370c6b
--- /dev/null
+++ b/Testing/Data/chombo3d/chombo3d_14.vti.md5
@@ -0,0 +1 @@
+c225607bdae0962633dae042266672ff
diff --git a/Testing/Data/chombo3d/chombo3d_15.vti.md5 b/Testing/Data/chombo3d/chombo3d_15.vti.md5
new file mode 100644
index 0000000..af471b9
--- /dev/null
+++ b/Testing/Data/chombo3d/chombo3d_15.vti.md5
@@ -0,0 +1 @@
+c31762265220e5aa80829a919f8c96f0
diff --git a/Testing/Data/chombo3d/chombo3d_2.vti.md5 b/Testing/Data/chombo3d/chombo3d_2.vti.md5
new file mode 100644
index 0000000..cfd6caa
--- /dev/null
+++ b/Testing/Data/chombo3d/chombo3d_2.vti.md5
@@ -0,0 +1 @@
+60241bed002791fca49e239cf6483588
diff --git a/Testing/Data/chombo3d/chombo3d_3.vti.md5 b/Testing/Data/chombo3d/chombo3d_3.vti.md5
new file mode 100644
index 0000000..0272700
--- /dev/null
+++ b/Testing/Data/chombo3d/chombo3d_3.vti.md5
@@ -0,0 +1 @@
+8e6f4c9c7cbe0db5f3133f4bd42ef70d
diff --git a/Testing/Data/chombo3d/chombo3d_4.vti.md5 b/Testing/Data/chombo3d/chombo3d_4.vti.md5
new file mode 100644
index 0000000..b6b27c4
--- /dev/null
+++ b/Testing/Data/chombo3d/chombo3d_4.vti.md5
@@ -0,0 +1 @@
+0bcea675dcf0dfe4c6194689de66b0f4
diff --git a/Testing/Data/chombo3d/chombo3d_5.vti.md5 b/Testing/Data/chombo3d/chombo3d_5.vti.md5
new file mode 100644
index 0000000..978b5a4
--- /dev/null
+++ b/Testing/Data/chombo3d/chombo3d_5.vti.md5
@@ -0,0 +1 @@
+c26141724efc65eeeb2d29154ca73280
diff --git a/Testing/Data/chombo3d/chombo3d_6.vti.md5 b/Testing/Data/chombo3d/chombo3d_6.vti.md5
new file mode 100644
index 0000000..566243d
--- /dev/null
+++ b/Testing/Data/chombo3d/chombo3d_6.vti.md5
@@ -0,0 +1 @@
+48dab75f1af46e2f88fc12e2acd67956
diff --git a/Testing/Data/chombo3d/chombo3d_7.vti.md5 b/Testing/Data/chombo3d/chombo3d_7.vti.md5
new file mode 100644
index 0000000..82720d8
--- /dev/null
+++ b/Testing/Data/chombo3d/chombo3d_7.vti.md5
@@ -0,0 +1 @@
+03bc1c8a606db4d51f1765100eae6719
diff --git a/Testing/Data/chombo3d/chombo3d_8.vti.md5 b/Testing/Data/chombo3d/chombo3d_8.vti.md5
new file mode 100644
index 0000000..9355f09
--- /dev/null
+++ b/Testing/Data/chombo3d/chombo3d_8.vti.md5
@@ -0,0 +1 @@
+2c35201f0c0a557deb9df0a121ba0739
diff --git a/Testing/Data/chombo3d/chombo3d_9.vti.md5 b/Testing/Data/chombo3d/chombo3d_9.vti.md5
new file mode 100644
index 0000000..93db715
--- /dev/null
+++ b/Testing/Data/chombo3d/chombo3d_9.vti.md5
@@ -0,0 +1 @@
+254f948cde2fd055068601092643e90e
diff --git a/Testing/Data/clouds.jpeg.md5 b/Testing/Data/clouds.jpeg.md5
new file mode 100644
index 0000000..6dd0d22
--- /dev/null
+++ b/Testing/Data/clouds.jpeg.md5
@@ -0,0 +1 @@
+623911b12cb17e1e2904b9772a614f22
diff --git a/Testing/Data/clown.facet.md5 b/Testing/Data/clown.facet.md5
new file mode 100644
index 0000000..f24200a
--- /dev/null
+++ b/Testing/Data/clown.facet.md5
@@ -0,0 +1 @@
+3182660a5edb496b7803e6ab7b3a3a83
diff --git a/Testing/Data/combq.bin.md5 b/Testing/Data/combq.bin.md5
new file mode 100644
index 0000000..050d395
--- /dev/null
+++ b/Testing/Data/combq.bin.md5
@@ -0,0 +1 @@
+ff18fe9c1c68ce2842e0f4875464107d
diff --git a/Testing/Data/combxyz.bin.md5 b/Testing/Data/combxyz.bin.md5
new file mode 100644
index 0000000..8dd2dea
--- /dev/null
+++ b/Testing/Data/combxyz.bin.md5
@@ -0,0 +1 @@
+13338e5fa5a798f5d709b5ef6a9be1a0
diff --git a/Testing/Data/cow.vtp.md5 b/Testing/Data/cow.vtp.md5
new file mode 100644
index 0000000..da04a56
--- /dev/null
+++ b/Testing/Data/cow.vtp.md5
@@ -0,0 +1 @@
+7ad252bb584395acd48ae86eacc261b7
diff --git a/Testing/Data/cth.vtr.md5 b/Testing/Data/cth.vtr.md5
new file mode 100644
index 0000000..3cef3c6
--- /dev/null
+++ b/Testing/Data/cth.vtr.md5
@@ -0,0 +1 @@
+11e41914096cc5f90f5c2e3220c57308
diff --git a/Testing/Data/delimited.txt.md5 b/Testing/Data/delimited.txt.md5
new file mode 100644
index 0000000..9f9a280
--- /dev/null
+++ b/Testing/Data/delimited.txt.md5
@@ -0,0 +1 @@
+734985b710f9e0ffbdddc879e8696529
diff --git a/Testing/Data/delimited2.txt.md5 b/Testing/Data/delimited2.txt.md5
new file mode 100644
index 0000000..988b6db
--- /dev/null
+++ b/Testing/Data/delimited2.txt.md5
@@ -0,0 +1 @@
+2348d135d21a6ed54882fa22e358f60d
diff --git a/Testing/Data/delimited2UTF16.txt.md5 b/Testing/Data/delimited2UTF16.txt.md5
new file mode 100644
index 0000000..eab841b
--- /dev/null
+++ b/Testing/Data/delimited2UTF16.txt.md5
@@ -0,0 +1 @@
+a841f77f95e9a68bf841e11b1e0f54a7
diff --git a/Testing/Data/delimited2UTF16BE.txt.md5 b/Testing/Data/delimited2UTF16BE.txt.md5
new file mode 100644
index 0000000..8d24e27
--- /dev/null
+++ b/Testing/Data/delimited2UTF16BE.txt.md5
@@ -0,0 +1 @@
+548856b9b91d9ad787fd0b2583408dc5
diff --git a/Testing/Data/delimited2UTF16LE.txt.md5 b/Testing/Data/delimited2UTF16LE.txt.md5
new file mode 100644
index 0000000..1e856af
--- /dev/null
+++ b/Testing/Data/delimited2UTF16LE.txt.md5
@@ -0,0 +1 @@
+409dbe562cc127b4f7a4a23abd2ed8a1
diff --git a/Testing/Data/delimitedUTF16.txt.md5 b/Testing/Data/delimitedUTF16.txt.md5
new file mode 100644
index 0000000..a589422
--- /dev/null
+++ b/Testing/Data/delimitedUTF16.txt.md5
@@ -0,0 +1 @@
+9602dd06fec96ec4683fe0634d008581
diff --git a/Testing/Data/delimitedUTF16BE.txt.md5 b/Testing/Data/delimitedUTF16BE.txt.md5
new file mode 100644
index 0000000..2bea250
--- /dev/null
+++ b/Testing/Data/delimitedUTF16BE.txt.md5
@@ -0,0 +1 @@
+47b705b99a62e6ed03fa1a33905e3d9e
diff --git a/Testing/Data/delimitedUTF16LE.txt.md5 b/Testing/Data/delimitedUTF16LE.txt.md5
new file mode 100644
index 0000000..ecbe3d0
--- /dev/null
+++ b/Testing/Data/delimitedUTF16LE.txt.md5
@@ -0,0 +1 @@
+67e87cc743f3d77c6ed53459665ec07e
diff --git a/Testing/Data/disk_out_ref.ex2.md5 b/Testing/Data/disk_out_ref.ex2.md5
new file mode 100644
index 0000000..30483f6
--- /dev/null
+++ b/Testing/Data/disk_out_ref.ex2.md5
@@ -0,0 +1 @@
+93f5e365df93ddfbd2d05e4e3e0dabfa
diff --git a/Testing/Data/disk_out_ref_surface.vtp.md5 b/Testing/Data/disk_out_ref_surface.vtp.md5
new file mode 100644
index 0000000..d2fa928
--- /dev/null
+++ b/Testing/Data/disk_out_ref_surface.vtp.md5
@@ -0,0 +1 @@
+ed47135a3f63e07db3466ed0a109fbf4
diff --git a/Testing/Data/earth.ppm.md5 b/Testing/Data/earth.ppm.md5
new file mode 100644
index 0000000..7e7031a
--- /dev/null
+++ b/Testing/Data/earth.ppm.md5
@@ -0,0 +1 @@
+fb9d2fa4bb81f483a8c7a1e746194cfc
diff --git a/Testing/Data/edgeFaceElem.exii.md5 b/Testing/Data/edgeFaceElem.exii.md5
new file mode 100644
index 0000000..de306d2
--- /dev/null
+++ b/Testing/Data/edgeFaceElem.exii.md5
@@ -0,0 +1 @@
+e930c7c06914abb0c395f04629101d9f
diff --git a/Testing/Data/ex-blow_5.vtm.md5 b/Testing/Data/ex-blow_5.vtm.md5
new file mode 100644
index 0000000..68633f5
--- /dev/null
+++ b/Testing/Data/ex-blow_5.vtm.md5
@@ -0,0 +1 @@
+4a9fc82e8968cc789a8a7d8f5f71df97
diff --git a/Testing/Data/ex-blow_5/ex-blow_5_0_0.vtu.md5 b/Testing/Data/ex-blow_5/ex-blow_5_0_0.vtu.md5
new file mode 100644
index 0000000..f6867e7
--- /dev/null
+++ b/Testing/Data/ex-blow_5/ex-blow_5_0_0.vtu.md5
@@ -0,0 +1 @@
+a8b12fd2689712b0c8e7a1649efaf2ab
diff --git a/Testing/Data/ex-blow_5/ex-blow_5_0_1.vtu.md5 b/Testing/Data/ex-blow_5/ex-blow_5_0_1.vtu.md5
new file mode 100644
index 0000000..32af7bd
--- /dev/null
+++ b/Testing/Data/ex-blow_5/ex-blow_5_0_1.vtu.md5
@@ -0,0 +1 @@
+c0fee18e7572910c512ac057e65d2422
diff --git a/Testing/Data/ex-blow_5/ex-blow_5_0_2.vtu.md5 b/Testing/Data/ex-blow_5/ex-blow_5_0_2.vtu.md5
new file mode 100644
index 0000000..1be9316
--- /dev/null
+++ b/Testing/Data/ex-blow_5/ex-blow_5_0_2.vtu.md5
@@ -0,0 +1 @@
+0e5348a936926f0f3dc29579eea21bc9
diff --git a/Testing/Data/ex-blow_5/ex-blow_5_0_3.vtu.md5 b/Testing/Data/ex-blow_5/ex-blow_5_0_3.vtu.md5
new file mode 100644
index 0000000..b45980c
--- /dev/null
+++ b/Testing/Data/ex-blow_5/ex-blow_5_0_3.vtu.md5
@@ -0,0 +1 @@
+5e7d3d3bcb5dd08d5a1f1dc783689744
diff --git a/Testing/Data/ex-blow_5/ex-blow_5_0_4.vtu.md5 b/Testing/Data/ex-blow_5/ex-blow_5_0_4.vtu.md5
new file mode 100644
index 0000000..0e338b5
--- /dev/null
+++ b/Testing/Data/ex-blow_5/ex-blow_5_0_4.vtu.md5
@@ -0,0 +1 @@
+a186b7fa6725b3b2c30edcec7773d6b1
diff --git a/Testing/Data/ex-blow_5/ex-blow_5_0_5.vtu.md5 b/Testing/Data/ex-blow_5/ex-blow_5_0_5.vtu.md5
new file mode 100644
index 0000000..8c9ac80
--- /dev/null
+++ b/Testing/Data/ex-blow_5/ex-blow_5_0_5.vtu.md5
@@ -0,0 +1 @@
+467ccf41df1b4f9139c6153e206004b9
diff --git a/Testing/Data/ex-blow_5/ex-blow_5_0_6.vtu.md5 b/Testing/Data/ex-blow_5/ex-blow_5_0_6.vtu.md5
new file mode 100644
index 0000000..af7b39c
--- /dev/null
+++ b/Testing/Data/ex-blow_5/ex-blow_5_0_6.vtu.md5
@@ -0,0 +1 @@
+3a5df717510b2b8f59e5c16dc91ff067
diff --git a/Testing/Data/ex-blow_5/ex-blow_5_0_7.vtu.md5 b/Testing/Data/ex-blow_5/ex-blow_5_0_7.vtu.md5
new file mode 100644
index 0000000..2e721c3
--- /dev/null
+++ b/Testing/Data/ex-blow_5/ex-blow_5_0_7.vtu.md5
@@ -0,0 +1 @@
+9795674d9980cf298bf00c5f1377069b
diff --git a/Testing/Data/ex-blow_5/ex-blow_5_1_0.vtu.md5 b/Testing/Data/ex-blow_5/ex-blow_5_1_0.vtu.md5
new file mode 100644
index 0000000..71059c3
--- /dev/null
+++ b/Testing/Data/ex-blow_5/ex-blow_5_1_0.vtu.md5
@@ -0,0 +1 @@
+37e0a7041e611163fa1e468013b713f3
diff --git a/Testing/Data/ex-blow_5/ex-blow_5_1_1.vtu.md5 b/Testing/Data/ex-blow_5/ex-blow_5_1_1.vtu.md5
new file mode 100644
index 0000000..d5680d9
--- /dev/null
+++ b/Testing/Data/ex-blow_5/ex-blow_5_1_1.vtu.md5
@@ -0,0 +1 @@
+dee6d6962e2fa34d0b6283f18fcf72ae
diff --git a/Testing/Data/ex-blow_5/ex-blow_5_1_2.vtu.md5 b/Testing/Data/ex-blow_5/ex-blow_5_1_2.vtu.md5
new file mode 100644
index 0000000..7e29d24
--- /dev/null
+++ b/Testing/Data/ex-blow_5/ex-blow_5_1_2.vtu.md5
@@ -0,0 +1 @@
+84001730339b29750bee19f1b4af52ef
diff --git a/Testing/Data/ex-blow_5/ex-blow_5_1_3.vtu.md5 b/Testing/Data/ex-blow_5/ex-blow_5_1_3.vtu.md5
new file mode 100644
index 0000000..849b75e
--- /dev/null
+++ b/Testing/Data/ex-blow_5/ex-blow_5_1_3.vtu.md5
@@ -0,0 +1 @@
+cb99fab7d308c3188859fd44a4968fd1
diff --git a/Testing/Data/ex-blow_5/ex-blow_5_1_4.vtu.md5 b/Testing/Data/ex-blow_5/ex-blow_5_1_4.vtu.md5
new file mode 100644
index 0000000..a5b2645
--- /dev/null
+++ b/Testing/Data/ex-blow_5/ex-blow_5_1_4.vtu.md5
@@ -0,0 +1 @@
+26891c1b4a3dcd50ca028e4b84497f38
diff --git a/Testing/Data/ex-blow_5/ex-blow_5_1_5.vtu.md5 b/Testing/Data/ex-blow_5/ex-blow_5_1_5.vtu.md5
new file mode 100644
index 0000000..e7f3e66
--- /dev/null
+++ b/Testing/Data/ex-blow_5/ex-blow_5_1_5.vtu.md5
@@ -0,0 +1 @@
+7c5cd57e9eb528a9150eaeffa75fcd20
diff --git a/Testing/Data/ex-blow_5/ex-blow_5_1_6.vtu.md5 b/Testing/Data/ex-blow_5/ex-blow_5_1_6.vtu.md5
new file mode 100644
index 0000000..ab32362
--- /dev/null
+++ b/Testing/Data/ex-blow_5/ex-blow_5_1_6.vtu.md5
@@ -0,0 +1 @@
+77e8a20fb0f439bc3f323679c63995e0
diff --git a/Testing/Data/ex-blow_5/ex-blow_5_1_7.vtu.md5 b/Testing/Data/ex-blow_5/ex-blow_5_1_7.vtu.md5
new file mode 100644
index 0000000..92408c3
--- /dev/null
+++ b/Testing/Data/ex-blow_5/ex-blow_5_1_7.vtu.md5
@@ -0,0 +1 @@
+3a9323c5a08878775e4ae05d915588f6
diff --git a/Testing/Data/faults.vtk.md5 b/Testing/Data/faults.vtk.md5
new file mode 100644
index 0000000..86fa0ce
--- /dev/null
+++ b/Testing/Data/faults.vtk.md5
@@ -0,0 +1 @@
+36be00c40cc38c07c42c39384e818c5f
diff --git a/Testing/Data/fieldfile.vtk.md5 b/Testing/Data/fieldfile.vtk.md5
new file mode 100644
index 0000000..114b090
--- /dev/null
+++ b/Testing/Data/fieldfile.vtk.md5
@@ -0,0 +1 @@
+b009cefc1455ab6a9b33508205597fe2
diff --git a/Testing/Data/filledContours.vtp.md5 b/Testing/Data/filledContours.vtp.md5
new file mode 100644
index 0000000..463410c
--- /dev/null
+++ b/Testing/Data/filledContours.vtp.md5
@@ -0,0 +1 @@
+7bd8c9d73cc9c0ca19faa3077ec1295e
diff --git a/Testing/Data/financial.txt.md5 b/Testing/Data/financial.txt.md5
new file mode 100644
index 0000000..b4539d7
--- /dev/null
+++ b/Testing/Data/financial.txt.md5
@@ -0,0 +1 @@
+ffe9e776e700bb517774bd397968ae07
diff --git a/Testing/Data/financial.vtk.md5 b/Testing/Data/financial.vtk.md5
new file mode 100644
index 0000000..5ac0b2c
--- /dev/null
+++ b/Testing/Data/financial.vtk.md5
@@ -0,0 +1 @@
+a5366362ecc4c81e27f03d284947a547
diff --git a/Testing/Data/fixedwidth.txt.md5 b/Testing/Data/fixedwidth.txt.md5
new file mode 100644
index 0000000..cda04d9
--- /dev/null
+++ b/Testing/Data/fixedwidth.txt.md5
@@ -0,0 +1 @@
+4335ec5a216ccbc9d4e80767a08f2fe2
diff --git a/Testing/Data/flow.tec.gz.md5 b/Testing/Data/flow.tec.gz.md5
new file mode 100644
index 0000000..e076f3b
--- /dev/null
+++ b/Testing/Data/flow.tec.gz.md5
@@ -0,0 +1 @@
+7fecc3ceca70a06aad8bac5c77975eed
diff --git a/Testing/Data/flow.tec.md5 b/Testing/Data/flow.tec.md5
new file mode 100644
index 0000000..a4e205a
--- /dev/null
+++ b/Testing/Data/flow.tec.md5
@@ -0,0 +1 @@
+e73f6d13c18263f37721909c97b16fd7
diff --git a/Testing/Data/foot/foot.mha.md5 b/Testing/Data/foot/foot.mha.md5
new file mode 100644
index 0000000..7459bc5
--- /dev/null
+++ b/Testing/Data/foot/foot.mha.md5
@@ -0,0 +1 @@
+8c7f5b17081be0a9ec712a0e5381ed64
diff --git a/Testing/Data/foot/foot.raw.md5 b/Testing/Data/foot/foot.raw.md5
new file mode 100644
index 0000000..89b7e49
--- /dev/null
+++ b/Testing/Data/foot/foot.raw.md5
@@ -0,0 +1 @@
+a0735aeb2cef86eb7a589b7d184d6e9d
diff --git a/Testing/Data/fran_cut.png.md5 b/Testing/Data/fran_cut.png.md5
new file mode 100644
index 0000000..1a1c3ee
--- /dev/null
+++ b/Testing/Data/fran_cut.png.md5
@@ -0,0 +1 @@
+15808a2ed8da896365f73079efc98be0
diff --git a/Testing/Data/fran_cut.vtk.md5 b/Testing/Data/fran_cut.vtk.md5
new file mode 100644
index 0000000..9d8e57b
--- /dev/null
+++ b/Testing/Data/fran_cut.vtk.md5
@@ -0,0 +1 @@
+87c0b9e936e15ec453b93373456602ab
diff --git a/Testing/Data/fullhead15.png.md5 b/Testing/Data/fullhead15.png.md5
new file mode 100644
index 0000000..11f2ea0
--- /dev/null
+++ b/Testing/Data/fullhead15.png.md5
@@ -0,0 +1 @@
+918cb98cc1e56b59cea3d7fbb2c3f249
diff --git a/Testing/Data/headsq/quarter.1.md5 b/Testing/Data/headsq/quarter.1.md5
new file mode 100644
index 0000000..e652c8a
--- /dev/null
+++ b/Testing/Data/headsq/quarter.1.md5
@@ -0,0 +1 @@
+6a7afa12fc9af4fbeb76e230d0ff519e
diff --git a/Testing/Data/headsq/quarter.10.md5 b/Testing/Data/headsq/quarter.10.md5
new file mode 100644
index 0000000..76c83eb
--- /dev/null
+++ b/Testing/Data/headsq/quarter.10.md5
@@ -0,0 +1 @@
+a46d358e8804c8def007b2ea24427f9e
diff --git a/Testing/Data/headsq/quarter.11.md5 b/Testing/Data/headsq/quarter.11.md5
new file mode 100644
index 0000000..d04f294
--- /dev/null
+++ b/Testing/Data/headsq/quarter.11.md5
@@ -0,0 +1 @@
+2c790800a3ef9ddf3e9542d711365c8a
diff --git a/Testing/Data/headsq/quarter.12.md5 b/Testing/Data/headsq/quarter.12.md5
new file mode 100644
index 0000000..98a5514
--- /dev/null
+++ b/Testing/Data/headsq/quarter.12.md5
@@ -0,0 +1 @@
+7720d0af7ab4608defdf9d59dc454c88
diff --git a/Testing/Data/headsq/quarter.13.md5 b/Testing/Data/headsq/quarter.13.md5
new file mode 100644
index 0000000..6c38c2f
--- /dev/null
+++ b/Testing/Data/headsq/quarter.13.md5
@@ -0,0 +1 @@
+1b80cdceaa96122b359becda329763d4
diff --git a/Testing/Data/headsq/quarter.14.md5 b/Testing/Data/headsq/quarter.14.md5
new file mode 100644
index 0000000..31171d1
--- /dev/null
+++ b/Testing/Data/headsq/quarter.14.md5
@@ -0,0 +1 @@
+c04e1ac7fa8d8c32d59605e96354e6b7
diff --git a/Testing/Data/headsq/quarter.15.md5 b/Testing/Data/headsq/quarter.15.md5
new file mode 100644
index 0000000..8edf494
--- /dev/null
+++ b/Testing/Data/headsq/quarter.15.md5
@@ -0,0 +1 @@
+3b41dfb25b578948954c45f975080487
diff --git a/Testing/Data/headsq/quarter.16.md5 b/Testing/Data/headsq/quarter.16.md5
new file mode 100644
index 0000000..db1b165
--- /dev/null
+++ b/Testing/Data/headsq/quarter.16.md5
@@ -0,0 +1 @@
+19e4f81fb0ac2422f72aa29bc931a89d
diff --git a/Testing/Data/headsq/quarter.17.md5 b/Testing/Data/headsq/quarter.17.md5
new file mode 100644
index 0000000..bb62494
--- /dev/null
+++ b/Testing/Data/headsq/quarter.17.md5
@@ -0,0 +1 @@
+8c23df5e865d53da53c7b166c0854793
diff --git a/Testing/Data/headsq/quarter.18.md5 b/Testing/Data/headsq/quarter.18.md5
new file mode 100644
index 0000000..b5952bd
--- /dev/null
+++ b/Testing/Data/headsq/quarter.18.md5
@@ -0,0 +1 @@
+4594b66a4e4fa32ead58b5febaae0700
diff --git a/Testing/Data/headsq/quarter.19.md5 b/Testing/Data/headsq/quarter.19.md5
new file mode 100644
index 0000000..ce0c9d4
--- /dev/null
+++ b/Testing/Data/headsq/quarter.19.md5
@@ -0,0 +1 @@
+86132415625d813627130aac058fa157
diff --git a/Testing/Data/headsq/quarter.2.md5 b/Testing/Data/headsq/quarter.2.md5
new file mode 100644
index 0000000..eefe4b4
--- /dev/null
+++ b/Testing/Data/headsq/quarter.2.md5
@@ -0,0 +1 @@
+b98a134f7df4fbb60faebe32d8de8872
diff --git a/Testing/Data/headsq/quarter.20.md5 b/Testing/Data/headsq/quarter.20.md5
new file mode 100644
index 0000000..71650f7
--- /dev/null
+++ b/Testing/Data/headsq/quarter.20.md5
@@ -0,0 +1 @@
+20734271163ce911fd8f4a8f40efd94f
diff --git a/Testing/Data/headsq/quarter.21.md5 b/Testing/Data/headsq/quarter.21.md5
new file mode 100644
index 0000000..e1f3682
--- /dev/null
+++ b/Testing/Data/headsq/quarter.21.md5
@@ -0,0 +1 @@
+5ac3635292108d553b840268688e4a62
diff --git a/Testing/Data/headsq/quarter.22.md5 b/Testing/Data/headsq/quarter.22.md5
new file mode 100644
index 0000000..82f80fd
--- /dev/null
+++ b/Testing/Data/headsq/quarter.22.md5
@@ -0,0 +1 @@
+3ca8f0214eacd99e977d8b48a24299ee
diff --git a/Testing/Data/headsq/quarter.23.md5 b/Testing/Data/headsq/quarter.23.md5
new file mode 100644
index 0000000..23adf34
--- /dev/null
+++ b/Testing/Data/headsq/quarter.23.md5
@@ -0,0 +1 @@
+256c61b9bf56dbdd9c2b0347b15427a2
diff --git a/Testing/Data/headsq/quarter.24.md5 b/Testing/Data/headsq/quarter.24.md5
new file mode 100644
index 0000000..f93fadf
--- /dev/null
+++ b/Testing/Data/headsq/quarter.24.md5
@@ -0,0 +1 @@
+cf1a782163c097f781e237a7f879e4c9
diff --git a/Testing/Data/headsq/quarter.25.md5 b/Testing/Data/headsq/quarter.25.md5
new file mode 100644
index 0000000..0e1def2
--- /dev/null
+++ b/Testing/Data/headsq/quarter.25.md5
@@ -0,0 +1 @@
+a503185b6bfac7c2bca58691b4b48446
diff --git a/Testing/Data/headsq/quarter.26.md5 b/Testing/Data/headsq/quarter.26.md5
new file mode 100644
index 0000000..5c448e0
--- /dev/null
+++ b/Testing/Data/headsq/quarter.26.md5
@@ -0,0 +1 @@
+1971684ee5916ec22ceca68d571e9d64
diff --git a/Testing/Data/headsq/quarter.27.md5 b/Testing/Data/headsq/quarter.27.md5
new file mode 100644
index 0000000..d97c3d2
--- /dev/null
+++ b/Testing/Data/headsq/quarter.27.md5
@@ -0,0 +1 @@
+262448030f11dcec6674089ff8b108cd
diff --git a/Testing/Data/headsq/quarter.28.md5 b/Testing/Data/headsq/quarter.28.md5
new file mode 100644
index 0000000..7f9236e
--- /dev/null
+++ b/Testing/Data/headsq/quarter.28.md5
@@ -0,0 +1 @@
+cd0c4b78fe318de0f7554c9081fbd1ca
diff --git a/Testing/Data/headsq/quarter.29.md5 b/Testing/Data/headsq/quarter.29.md5
new file mode 100644
index 0000000..c7793f7
--- /dev/null
+++ b/Testing/Data/headsq/quarter.29.md5
@@ -0,0 +1 @@
+bade06baea6d6fb4cb0cc3c3c3fa9ef0
diff --git a/Testing/Data/headsq/quarter.3.md5 b/Testing/Data/headsq/quarter.3.md5
new file mode 100644
index 0000000..32f22a3
--- /dev/null
+++ b/Testing/Data/headsq/quarter.3.md5
@@ -0,0 +1 @@
+fb1fb8633a35918c5e5a509a2d856887
diff --git a/Testing/Data/headsq/quarter.30.md5 b/Testing/Data/headsq/quarter.30.md5
new file mode 100644
index 0000000..acd0813
--- /dev/null
+++ b/Testing/Data/headsq/quarter.30.md5
@@ -0,0 +1 @@
+439bea04e7df8513077f9fae0b318925
diff --git a/Testing/Data/headsq/quarter.31.md5 b/Testing/Data/headsq/quarter.31.md5
new file mode 100644
index 0000000..014c07c
--- /dev/null
+++ b/Testing/Data/headsq/quarter.31.md5
@@ -0,0 +1 @@
+1905ed7693fe278cf6c37ad0c676cd25
diff --git a/Testing/Data/headsq/quarter.32.md5 b/Testing/Data/headsq/quarter.32.md5
new file mode 100644
index 0000000..04a3615
--- /dev/null
+++ b/Testing/Data/headsq/quarter.32.md5
@@ -0,0 +1 @@
+8846d8e7ff9f9a446af423891c4f07b8
diff --git a/Testing/Data/headsq/quarter.33.md5 b/Testing/Data/headsq/quarter.33.md5
new file mode 100644
index 0000000..e6616e9
--- /dev/null
+++ b/Testing/Data/headsq/quarter.33.md5
@@ -0,0 +1 @@
+59ac560a5e65964a942ac3d0f19c2169
diff --git a/Testing/Data/headsq/quarter.34.md5 b/Testing/Data/headsq/quarter.34.md5
new file mode 100644
index 0000000..a045314
--- /dev/null
+++ b/Testing/Data/headsq/quarter.34.md5
@@ -0,0 +1 @@
+92662a2d41807836c0d693620d1f05f7
diff --git a/Testing/Data/headsq/quarter.35.md5 b/Testing/Data/headsq/quarter.35.md5
new file mode 100644
index 0000000..d76ee37
--- /dev/null
+++ b/Testing/Data/headsq/quarter.35.md5
@@ -0,0 +1 @@
+0a01cabd72233dff642631de034ead25
diff --git a/Testing/Data/headsq/quarter.36.md5 b/Testing/Data/headsq/quarter.36.md5
new file mode 100644
index 0000000..7038b32
--- /dev/null
+++ b/Testing/Data/headsq/quarter.36.md5
@@ -0,0 +1 @@
+92dc28c2f271b3d1abeafc5944031894
diff --git a/Testing/Data/headsq/quarter.37.md5 b/Testing/Data/headsq/quarter.37.md5
new file mode 100644
index 0000000..7a9cb0e
--- /dev/null
+++ b/Testing/Data/headsq/quarter.37.md5
@@ -0,0 +1 @@
+73dceacd760f4b01cd633cb14748e02d
diff --git a/Testing/Data/headsq/quarter.38.md5 b/Testing/Data/headsq/quarter.38.md5
new file mode 100644
index 0000000..b35a176
--- /dev/null
+++ b/Testing/Data/headsq/quarter.38.md5
@@ -0,0 +1 @@
+5371e78e3b009d0216652d6b27623a98
diff --git a/Testing/Data/headsq/quarter.39.md5 b/Testing/Data/headsq/quarter.39.md5
new file mode 100644
index 0000000..ceb94c4
--- /dev/null
+++ b/Testing/Data/headsq/quarter.39.md5
@@ -0,0 +1 @@
+e8c7246b8ed4944d57586357103b1460
diff --git a/Testing/Data/headsq/quarter.4.md5 b/Testing/Data/headsq/quarter.4.md5
new file mode 100644
index 0000000..80668b9
--- /dev/null
+++ b/Testing/Data/headsq/quarter.4.md5
@@ -0,0 +1 @@
+7b052c5a4cb791e82077a723a901de86
diff --git a/Testing/Data/headsq/quarter.40.md5 b/Testing/Data/headsq/quarter.40.md5
new file mode 100644
index 0000000..9928af3
--- /dev/null
+++ b/Testing/Data/headsq/quarter.40.md5
@@ -0,0 +1 @@
+deb7607f6388a5a8322b911eb238ad5e
diff --git a/Testing/Data/headsq/quarter.41.md5 b/Testing/Data/headsq/quarter.41.md5
new file mode 100644
index 0000000..7a11d55
--- /dev/null
+++ b/Testing/Data/headsq/quarter.41.md5
@@ -0,0 +1 @@
+4beb2717de85a73463b857418d766d8b
diff --git a/Testing/Data/headsq/quarter.42.md5 b/Testing/Data/headsq/quarter.42.md5
new file mode 100644
index 0000000..9c26527
--- /dev/null
+++ b/Testing/Data/headsq/quarter.42.md5
@@ -0,0 +1 @@
+639ef07d74f3cb5467cb614e99869f02
diff --git a/Testing/Data/headsq/quarter.43.md5 b/Testing/Data/headsq/quarter.43.md5
new file mode 100644
index 0000000..a819062
--- /dev/null
+++ b/Testing/Data/headsq/quarter.43.md5
@@ -0,0 +1 @@
+2f3dd2a987d6e894c654baf4e54b43f2
diff --git a/Testing/Data/headsq/quarter.44.md5 b/Testing/Data/headsq/quarter.44.md5
new file mode 100644
index 0000000..b1eda2d
--- /dev/null
+++ b/Testing/Data/headsq/quarter.44.md5
@@ -0,0 +1 @@
+bdddb629a584b1b4d7966361b747a67c
diff --git a/Testing/Data/headsq/quarter.45.md5 b/Testing/Data/headsq/quarter.45.md5
new file mode 100644
index 0000000..45893fa
--- /dev/null
+++ b/Testing/Data/headsq/quarter.45.md5
@@ -0,0 +1 @@
+b85f72bd2fe6fec3a6314f9394289c44
diff --git a/Testing/Data/headsq/quarter.46.md5 b/Testing/Data/headsq/quarter.46.md5
new file mode 100644
index 0000000..3c369eb
--- /dev/null
+++ b/Testing/Data/headsq/quarter.46.md5
@@ -0,0 +1 @@
+11001b88e2adfa6ae39362723d406b21
diff --git a/Testing/Data/headsq/quarter.47.md5 b/Testing/Data/headsq/quarter.47.md5
new file mode 100644
index 0000000..bdd733e
--- /dev/null
+++ b/Testing/Data/headsq/quarter.47.md5
@@ -0,0 +1 @@
+826ccec115cb9f4efd3d449012d9a528
diff --git a/Testing/Data/headsq/quarter.48.md5 b/Testing/Data/headsq/quarter.48.md5
new file mode 100644
index 0000000..5a40c4c
--- /dev/null
+++ b/Testing/Data/headsq/quarter.48.md5
@@ -0,0 +1 @@
+86d1b4f2ee1d6b435c0fb896fe1242b2
diff --git a/Testing/Data/headsq/quarter.49.md5 b/Testing/Data/headsq/quarter.49.md5
new file mode 100644
index 0000000..ddf1481
--- /dev/null
+++ b/Testing/Data/headsq/quarter.49.md5
@@ -0,0 +1 @@
+3e0c36a7398ae871bdd2446195ffb2c0
diff --git a/Testing/Data/headsq/quarter.5.md5 b/Testing/Data/headsq/quarter.5.md5
new file mode 100644
index 0000000..1dbdefe
--- /dev/null
+++ b/Testing/Data/headsq/quarter.5.md5
@@ -0,0 +1 @@
+b93412ea6782ad3cfeb21cef93604301
diff --git a/Testing/Data/headsq/quarter.50.md5 b/Testing/Data/headsq/quarter.50.md5
new file mode 100644
index 0000000..66f3b91
--- /dev/null
+++ b/Testing/Data/headsq/quarter.50.md5
@@ -0,0 +1 @@
+0b57f4a9046d659fe400bf3b178e6980
diff --git a/Testing/Data/headsq/quarter.51.md5 b/Testing/Data/headsq/quarter.51.md5
new file mode 100644
index 0000000..0ebe628
--- /dev/null
+++ b/Testing/Data/headsq/quarter.51.md5
@@ -0,0 +1 @@
+18d0ebd39af6a794dbac45496ce4b902
diff --git a/Testing/Data/headsq/quarter.52.md5 b/Testing/Data/headsq/quarter.52.md5
new file mode 100644
index 0000000..d249267
--- /dev/null
+++ b/Testing/Data/headsq/quarter.52.md5
@@ -0,0 +1 @@
+e41653932af92cb85729daf600f3665d
diff --git a/Testing/Data/headsq/quarter.53.md5 b/Testing/Data/headsq/quarter.53.md5
new file mode 100644
index 0000000..8a0433f
--- /dev/null
+++ b/Testing/Data/headsq/quarter.53.md5
@@ -0,0 +1 @@
+4497b4b630a8329463d60ac891a48151
diff --git a/Testing/Data/headsq/quarter.54.md5 b/Testing/Data/headsq/quarter.54.md5
new file mode 100644
index 0000000..750bfdd
--- /dev/null
+++ b/Testing/Data/headsq/quarter.54.md5
@@ -0,0 +1 @@
+feaa24e24172ce8ece3d882a4b0783b6
diff --git a/Testing/Data/headsq/quarter.55.md5 b/Testing/Data/headsq/quarter.55.md5
new file mode 100644
index 0000000..3e7921d
--- /dev/null
+++ b/Testing/Data/headsq/quarter.55.md5
@@ -0,0 +1 @@
+9bee47e99eedefbd91350c55976c45c2
diff --git a/Testing/Data/headsq/quarter.56.md5 b/Testing/Data/headsq/quarter.56.md5
new file mode 100644
index 0000000..782bc4c
--- /dev/null
+++ b/Testing/Data/headsq/quarter.56.md5
@@ -0,0 +1 @@
+f3fd748f1cd25a60e69f94db669f9255
diff --git a/Testing/Data/headsq/quarter.57.md5 b/Testing/Data/headsq/quarter.57.md5
new file mode 100644
index 0000000..789edb1
--- /dev/null
+++ b/Testing/Data/headsq/quarter.57.md5
@@ -0,0 +1 @@
+9060b629c30e647f1213cebcb19eecd0
diff --git a/Testing/Data/headsq/quarter.58.md5 b/Testing/Data/headsq/quarter.58.md5
new file mode 100644
index 0000000..7d2f50f
--- /dev/null
+++ b/Testing/Data/headsq/quarter.58.md5
@@ -0,0 +1 @@
+e4eb32083ca28bb7fc3220fc1dbb8b09
diff --git a/Testing/Data/headsq/quarter.59.md5 b/Testing/Data/headsq/quarter.59.md5
new file mode 100644
index 0000000..c772646
--- /dev/null
+++ b/Testing/Data/headsq/quarter.59.md5
@@ -0,0 +1 @@
+1834f6616a31d153ddcec591a33d4045
diff --git a/Testing/Data/headsq/quarter.6.md5 b/Testing/Data/headsq/quarter.6.md5
new file mode 100644
index 0000000..72a824a
--- /dev/null
+++ b/Testing/Data/headsq/quarter.6.md5
@@ -0,0 +1 @@
+39220f0268776eb0cab864c32abb7eb9
diff --git a/Testing/Data/headsq/quarter.60.md5 b/Testing/Data/headsq/quarter.60.md5
new file mode 100644
index 0000000..b475fec
--- /dev/null
+++ b/Testing/Data/headsq/quarter.60.md5
@@ -0,0 +1 @@
+cbd6ccf072f9b4f45200e2b632bea424
diff --git a/Testing/Data/headsq/quarter.61.md5 b/Testing/Data/headsq/quarter.61.md5
new file mode 100644
index 0000000..fb1e4e3
--- /dev/null
+++ b/Testing/Data/headsq/quarter.61.md5
@@ -0,0 +1 @@
+d4737c7bf6bd887ce078351a8e0a0edd
diff --git a/Testing/Data/headsq/quarter.62.md5 b/Testing/Data/headsq/quarter.62.md5
new file mode 100644
index 0000000..afc78f9
--- /dev/null
+++ b/Testing/Data/headsq/quarter.62.md5
@@ -0,0 +1 @@
+f58529c094eca3ccdb33ebb6ed5ab66d
diff --git a/Testing/Data/headsq/quarter.63.md5 b/Testing/Data/headsq/quarter.63.md5
new file mode 100644
index 0000000..d0b8252
--- /dev/null
+++ b/Testing/Data/headsq/quarter.63.md5
@@ -0,0 +1 @@
+aba3969ec8873c67eed3e0d15c04de54
diff --git a/Testing/Data/headsq/quarter.64.md5 b/Testing/Data/headsq/quarter.64.md5
new file mode 100644
index 0000000..c027c14
--- /dev/null
+++ b/Testing/Data/headsq/quarter.64.md5
@@ -0,0 +1 @@
+dd4529a52f9e2c462b9d5947e6d90c0a
diff --git a/Testing/Data/headsq/quarter.65.md5 b/Testing/Data/headsq/quarter.65.md5
new file mode 100644
index 0000000..2aa18e2
--- /dev/null
+++ b/Testing/Data/headsq/quarter.65.md5
@@ -0,0 +1 @@
+2a727b2328795065cf3ad04124da2514
diff --git a/Testing/Data/headsq/quarter.66.md5 b/Testing/Data/headsq/quarter.66.md5
new file mode 100644
index 0000000..9d9685e
--- /dev/null
+++ b/Testing/Data/headsq/quarter.66.md5
@@ -0,0 +1 @@
+56a841f8a2bad641ba9611edb3d3483c
diff --git a/Testing/Data/headsq/quarter.67.md5 b/Testing/Data/headsq/quarter.67.md5
new file mode 100644
index 0000000..514f022
--- /dev/null
+++ b/Testing/Data/headsq/quarter.67.md5
@@ -0,0 +1 @@
+60ed564fcb1b5d7f8c0a3d1033f74110
diff --git a/Testing/Data/headsq/quarter.68.md5 b/Testing/Data/headsq/quarter.68.md5
new file mode 100644
index 0000000..29d3269
--- /dev/null
+++ b/Testing/Data/headsq/quarter.68.md5
@@ -0,0 +1 @@
+5f13a7d68b7527a9e0b526db13db12d4
diff --git a/Testing/Data/headsq/quarter.69.md5 b/Testing/Data/headsq/quarter.69.md5
new file mode 100644
index 0000000..8dc75e5
--- /dev/null
+++ b/Testing/Data/headsq/quarter.69.md5
@@ -0,0 +1 @@
+93a2c4bbe7b9461f96654a776e5d73ed
diff --git a/Testing/Data/headsq/quarter.7.md5 b/Testing/Data/headsq/quarter.7.md5
new file mode 100644
index 0000000..412cc7d
--- /dev/null
+++ b/Testing/Data/headsq/quarter.7.md5
@@ -0,0 +1 @@
+691ff8252b3082a07e1ed5ed127e947e
diff --git a/Testing/Data/headsq/quarter.70.md5 b/Testing/Data/headsq/quarter.70.md5
new file mode 100644
index 0000000..0e58de4
--- /dev/null
+++ b/Testing/Data/headsq/quarter.70.md5
@@ -0,0 +1 @@
+3f8f1274969bf59e1009661e8ee49fa7
diff --git a/Testing/Data/headsq/quarter.71.md5 b/Testing/Data/headsq/quarter.71.md5
new file mode 100644
index 0000000..eb1e711
--- /dev/null
+++ b/Testing/Data/headsq/quarter.71.md5
@@ -0,0 +1 @@
+e623800392790f5c84faa47d7bcfeccc
diff --git a/Testing/Data/headsq/quarter.72.md5 b/Testing/Data/headsq/quarter.72.md5
new file mode 100644
index 0000000..a790691
--- /dev/null
+++ b/Testing/Data/headsq/quarter.72.md5
@@ -0,0 +1 @@
+3688c490aa78217ee5f608dbda699219
diff --git a/Testing/Data/headsq/quarter.73.md5 b/Testing/Data/headsq/quarter.73.md5
new file mode 100644
index 0000000..4c00c14
--- /dev/null
+++ b/Testing/Data/headsq/quarter.73.md5
@@ -0,0 +1 @@
+c573bea44d837d8f78cdf122bc14966f
diff --git a/Testing/Data/headsq/quarter.74.md5 b/Testing/Data/headsq/quarter.74.md5
new file mode 100644
index 0000000..4533afe
--- /dev/null
+++ b/Testing/Data/headsq/quarter.74.md5
@@ -0,0 +1 @@
+1d0733e12aa21e5589f761953a82fca1
diff --git a/Testing/Data/headsq/quarter.75.md5 b/Testing/Data/headsq/quarter.75.md5
new file mode 100644
index 0000000..7c28b51
--- /dev/null
+++ b/Testing/Data/headsq/quarter.75.md5
@@ -0,0 +1 @@
+0634507bac3e1b0e5c947121aaa14aee
diff --git a/Testing/Data/headsq/quarter.76.md5 b/Testing/Data/headsq/quarter.76.md5
new file mode 100644
index 0000000..3fdbc20
--- /dev/null
+++ b/Testing/Data/headsq/quarter.76.md5
@@ -0,0 +1 @@
+8c9cb2e53ee51096d81bb350736c7903
diff --git a/Testing/Data/headsq/quarter.77.md5 b/Testing/Data/headsq/quarter.77.md5
new file mode 100644
index 0000000..dfa70f5
--- /dev/null
+++ b/Testing/Data/headsq/quarter.77.md5
@@ -0,0 +1 @@
+baa65c493ef9c7c7010af5c705dc9ea7
diff --git a/Testing/Data/headsq/quarter.78.md5 b/Testing/Data/headsq/quarter.78.md5
new file mode 100644
index 0000000..bb4e2cb
--- /dev/null
+++ b/Testing/Data/headsq/quarter.78.md5
@@ -0,0 +1 @@
+108ffe6e0ebe8fc0e5717a5a78c28811
diff --git a/Testing/Data/headsq/quarter.79.md5 b/Testing/Data/headsq/quarter.79.md5
new file mode 100644
index 0000000..5003a2d
--- /dev/null
+++ b/Testing/Data/headsq/quarter.79.md5
@@ -0,0 +1 @@
+2c40c6ac2dcffdbde8b116fa90e6dcfe
diff --git a/Testing/Data/headsq/quarter.8.md5 b/Testing/Data/headsq/quarter.8.md5
new file mode 100644
index 0000000..9a50527
--- /dev/null
+++ b/Testing/Data/headsq/quarter.8.md5
@@ -0,0 +1 @@
+e6bddce494cbfb52713f1a194d22c2e0
diff --git a/Testing/Data/headsq/quarter.80.md5 b/Testing/Data/headsq/quarter.80.md5
new file mode 100644
index 0000000..9981dc1
--- /dev/null
+++ b/Testing/Data/headsq/quarter.80.md5
@@ -0,0 +1 @@
+7f49b4a727f55824ae5d6738c35facff
diff --git a/Testing/Data/headsq/quarter.81.md5 b/Testing/Data/headsq/quarter.81.md5
new file mode 100644
index 0000000..6926033
--- /dev/null
+++ b/Testing/Data/headsq/quarter.81.md5
@@ -0,0 +1 @@
+38ff16ef050bc9f51f3160d2d9e0957d
diff --git a/Testing/Data/headsq/quarter.82.md5 b/Testing/Data/headsq/quarter.82.md5
new file mode 100644
index 0000000..f84d4b6
--- /dev/null
+++ b/Testing/Data/headsq/quarter.82.md5
@@ -0,0 +1 @@
+0358beaef372734e8b8dbd418ead9bc2
diff --git a/Testing/Data/headsq/quarter.83.md5 b/Testing/Data/headsq/quarter.83.md5
new file mode 100644
index 0000000..63e6a22
--- /dev/null
+++ b/Testing/Data/headsq/quarter.83.md5
@@ -0,0 +1 @@
+a6e06364035e437946799d789f84f693
diff --git a/Testing/Data/headsq/quarter.84.md5 b/Testing/Data/headsq/quarter.84.md5
new file mode 100644
index 0000000..003447f
--- /dev/null
+++ b/Testing/Data/headsq/quarter.84.md5
@@ -0,0 +1 @@
+dabca902f6c91539428b0c5016023290
diff --git a/Testing/Data/headsq/quarter.85.md5 b/Testing/Data/headsq/quarter.85.md5
new file mode 100644
index 0000000..65f3920
--- /dev/null
+++ b/Testing/Data/headsq/quarter.85.md5
@@ -0,0 +1 @@
+2ca15c19034cd24f8ea1b1a90517bd02
diff --git a/Testing/Data/headsq/quarter.86.md5 b/Testing/Data/headsq/quarter.86.md5
new file mode 100644
index 0000000..2a8f1ad
--- /dev/null
+++ b/Testing/Data/headsq/quarter.86.md5
@@ -0,0 +1 @@
+ff5230716e583364025f2de6b4d60f2a
diff --git a/Testing/Data/headsq/quarter.87.md5 b/Testing/Data/headsq/quarter.87.md5
new file mode 100644
index 0000000..4155b3e
--- /dev/null
+++ b/Testing/Data/headsq/quarter.87.md5
@@ -0,0 +1 @@
+2959cf7e33288d9eef408404a41de433
diff --git a/Testing/Data/headsq/quarter.88.md5 b/Testing/Data/headsq/quarter.88.md5
new file mode 100644
index 0000000..2a098e6
--- /dev/null
+++ b/Testing/Data/headsq/quarter.88.md5
@@ -0,0 +1 @@
+f46e7b8f529fb54c8c45c8947fded13d
diff --git a/Testing/Data/headsq/quarter.89.md5 b/Testing/Data/headsq/quarter.89.md5
new file mode 100644
index 0000000..ee09baa
--- /dev/null
+++ b/Testing/Data/headsq/quarter.89.md5
@@ -0,0 +1 @@
+4b0e4db2cd8cdca423f5b7baf71d17d9
diff --git a/Testing/Data/headsq/quarter.9.md5 b/Testing/Data/headsq/quarter.9.md5
new file mode 100644
index 0000000..7ef5646
--- /dev/null
+++ b/Testing/Data/headsq/quarter.9.md5
@@ -0,0 +1 @@
+9b22f16e3521f92312cd7e40c17ed40d
diff --git a/Testing/Data/headsq/quarter.90.md5 b/Testing/Data/headsq/quarter.90.md5
new file mode 100644
index 0000000..7c906c4
--- /dev/null
+++ b/Testing/Data/headsq/quarter.90.md5
@@ -0,0 +1 @@
+5e6af2ec09eea82e4ab663c3a18cb7ad
diff --git a/Testing/Data/headsq/quarter.91.md5 b/Testing/Data/headsq/quarter.91.md5
new file mode 100644
index 0000000..447e193
--- /dev/null
+++ b/Testing/Data/headsq/quarter.91.md5
@@ -0,0 +1 @@
+f357d679bfc79bde3c89a6d717f271d5
diff --git a/Testing/Data/headsq/quarter.92.md5 b/Testing/Data/headsq/quarter.92.md5
new file mode 100644
index 0000000..681ef57
--- /dev/null
+++ b/Testing/Data/headsq/quarter.92.md5
@@ -0,0 +1 @@
+b9c545203242da6fd8abcb47a0918981
diff --git a/Testing/Data/headsq/quarter.93.md5 b/Testing/Data/headsq/quarter.93.md5
new file mode 100644
index 0000000..da26288
--- /dev/null
+++ b/Testing/Data/headsq/quarter.93.md5
@@ -0,0 +1 @@
+88bf29f0882b13486358e2e15e4f8166
diff --git a/Testing/Data/headsq/quarter.nhdr.md5 b/Testing/Data/headsq/quarter.nhdr.md5
new file mode 100644
index 0000000..1a221a0
--- /dev/null
+++ b/Testing/Data/headsq/quarter.nhdr.md5
@@ -0,0 +1 @@
+72ba13efd3c24e779d4cd8df37bfe4d5
diff --git a/Testing/Data/hello.vtk.md5 b/Testing/Data/hello.vtk.md5
new file mode 100644
index 0000000..50803d0
--- /dev/null
+++ b/Testing/Data/hello.vtk.md5
@@ -0,0 +1 @@
+38aa06245a72d635e804dada001f522c
diff --git a/Testing/Data/hexa.vtk.md5 b/Testing/Data/hexa.vtk.md5
new file mode 100644
index 0000000..7cf48b8
--- /dev/null
+++ b/Testing/Data/hexa.vtk.md5
@@ -0,0 +1 @@
+4625a7dcecd087299fe51c686898dff2
diff --git a/Testing/Data/iflamigm.3ds.md5 b/Testing/Data/iflamigm.3ds.md5
new file mode 100644
index 0000000..e2b9709
--- /dev/null
+++ b/Testing/Data/iflamigm.3ds.md5
@@ -0,0 +1 @@
+e619e8b713ba78500d4a84c6987da911
diff --git a/Testing/Data/ironProt.vtk.md5 b/Testing/Data/ironProt.vtk.md5
new file mode 100644
index 0000000..915b879
--- /dev/null
+++ b/Testing/Data/ironProt.vtk.md5
@@ -0,0 +1 @@
+3e2b46c6e2ddd6880dbfbfe5219f5771
diff --git a/Testing/Data/libtiff/test.tif.md5 b/Testing/Data/libtiff/test.tif.md5
new file mode 100644
index 0000000..b3d1c25
--- /dev/null
+++ b/Testing/Data/libtiff/test.tif.md5
@@ -0,0 +1 @@
+fd08ca34d00ea4b7024b39cc6777b884
diff --git a/Testing/Data/m4_TotalDensity.cube.md5 b/Testing/Data/m4_TotalDensity.cube.md5
new file mode 100644
index 0000000..f8d5638
--- /dev/null
+++ b/Testing/Data/m4_TotalDensity.cube.md5
@@ -0,0 +1 @@
+cd78605ee30d5b0401c2177c46e98f1c
diff --git a/Testing/Data/many_blocks/many_blocks.vtm.md5 b/Testing/Data/many_blocks/many_blocks.vtm.md5
new file mode 100644
index 0000000..f5c0dd2
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks.vtm.md5
@@ -0,0 +1 @@
+4d72161e8e8c9e1b88f6e9836704dcbd
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_0_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_0_0.vtp.md5
new file mode 100644
index 0000000..bc5b8ef
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_0_0.vtp.md5
@@ -0,0 +1 @@
+373f1f58e7248ff0b031a98cc64cbbe0
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_10_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_10_0.vtp.md5
new file mode 100644
index 0000000..5ffbee0
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_10_0.vtp.md5
@@ -0,0 +1 @@
+95260277a4d8370234393d9960d229aa
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_11_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_11_0.vtp.md5
new file mode 100644
index 0000000..6bb9ea0
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_11_0.vtp.md5
@@ -0,0 +1 @@
+73fbc0aaf66b2778ed271ecc2d7b03a0
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_12_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_12_0.vtp.md5
new file mode 100644
index 0000000..f364f7c
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_12_0.vtp.md5
@@ -0,0 +1 @@
+586a7d2baa0180a5338b2dc40383dece
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_13_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_13_0.vtp.md5
new file mode 100644
index 0000000..efe0417
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_13_0.vtp.md5
@@ -0,0 +1 @@
+739bfb445286d8ca9a3a2cdd5e0ac01c
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_14_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_14_0.vtp.md5
new file mode 100644
index 0000000..f496fe8
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_14_0.vtp.md5
@@ -0,0 +1 @@
+a41556b0a432b74f0e2031080091bf4c
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_15_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_15_0.vtp.md5
new file mode 100644
index 0000000..15fb5d7
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_15_0.vtp.md5
@@ -0,0 +1 @@
+de8be187414cb02b76acd72aa5330b50
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_16_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_16_0.vtp.md5
new file mode 100644
index 0000000..f9d3598
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_16_0.vtp.md5
@@ -0,0 +1 @@
+1d53dbae4c766cd972cdd2a244ae99f8
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_17_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_17_0.vtp.md5
new file mode 100644
index 0000000..8503bc9
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_17_0.vtp.md5
@@ -0,0 +1 @@
+4fb756603617470bf9be64158cae6922
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_18_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_18_0.vtp.md5
new file mode 100644
index 0000000..8a948ed
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_18_0.vtp.md5
@@ -0,0 +1 @@
+4746551a51b23d4c14d742e41b755baf
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_19_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_19_0.vtp.md5
new file mode 100644
index 0000000..b3a55b6
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_19_0.vtp.md5
@@ -0,0 +1 @@
+f8e9b8185515b5ea749bab9de14e217f
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_1_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_1_0.vtp.md5
new file mode 100644
index 0000000..445eefd
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_1_0.vtp.md5
@@ -0,0 +1 @@
+7ac9ccea839c51c45d6cec64edc17a96
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_20_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_20_0.vtp.md5
new file mode 100644
index 0000000..462cc1d
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_20_0.vtp.md5
@@ -0,0 +1 @@
+a8be520fb374555d27790cf62a192ac2
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_21_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_21_0.vtp.md5
new file mode 100644
index 0000000..ef4a29e
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_21_0.vtp.md5
@@ -0,0 +1 @@
+c98eed1e72de1e0c6e92fa3d8cbb56f8
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_22_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_22_0.vtp.md5
new file mode 100644
index 0000000..eb85fa5
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_22_0.vtp.md5
@@ -0,0 +1 @@
+a2572086c4d3fb87873386ea9901e453
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_23_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_23_0.vtp.md5
new file mode 100644
index 0000000..485388f
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_23_0.vtp.md5
@@ -0,0 +1 @@
+0c2a36b7a58228a185e275a8ed992923
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_24_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_24_0.vtp.md5
new file mode 100644
index 0000000..d356e31
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_24_0.vtp.md5
@@ -0,0 +1 @@
+780aabdb8e694345fcd081b09b5d5cdc
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_25_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_25_0.vtp.md5
new file mode 100644
index 0000000..49367e0
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_25_0.vtp.md5
@@ -0,0 +1 @@
+c2479fc4519e6994f9d6fdb245c7d729
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_26_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_26_0.vtp.md5
new file mode 100644
index 0000000..56182e3
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_26_0.vtp.md5
@@ -0,0 +1 @@
+a3f6b80497ae5e2b35b58a8201c08c62
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_27_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_27_0.vtp.md5
new file mode 100644
index 0000000..3a7e269
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_27_0.vtp.md5
@@ -0,0 +1 @@
+9334fe4418343c63176e14cda69340af
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_28_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_28_0.vtp.md5
new file mode 100644
index 0000000..d8994f2
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_28_0.vtp.md5
@@ -0,0 +1 @@
+1eb5f51e462a5c609f5164dcce2497d1
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_29_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_29_0.vtp.md5
new file mode 100644
index 0000000..ec7e1c8
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_29_0.vtp.md5
@@ -0,0 +1 @@
+9bddf6c78fd6550d38ac0550f82c41cf
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_2_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_2_0.vtp.md5
new file mode 100644
index 0000000..c435543
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_2_0.vtp.md5
@@ -0,0 +1 @@
+4f363f24ee6a1fd30fd28037d0c55e50
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_30_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_30_0.vtp.md5
new file mode 100644
index 0000000..a5d5505
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_30_0.vtp.md5
@@ -0,0 +1 @@
+17dcef301d7d7ea17ee2deb083a7417f
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_31_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_31_0.vtp.md5
new file mode 100644
index 0000000..c6da89c
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_31_0.vtp.md5
@@ -0,0 +1 @@
+f8f5a034be04ccc441121399f6d88e82
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_32_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_32_0.vtp.md5
new file mode 100644
index 0000000..0e4c659
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_32_0.vtp.md5
@@ -0,0 +1 @@
+355ef17223b4a6e3b1973021ac7e8334
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_33_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_33_0.vtp.md5
new file mode 100644
index 0000000..6c32fac
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_33_0.vtp.md5
@@ -0,0 +1 @@
+d944c11f1ad5a546339e42467f0e3f75
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_34_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_34_0.vtp.md5
new file mode 100644
index 0000000..d6b805a
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_34_0.vtp.md5
@@ -0,0 +1 @@
+3db3c323641df1ad225e4346f3911c68
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_35_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_35_0.vtp.md5
new file mode 100644
index 0000000..fbb7dce
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_35_0.vtp.md5
@@ -0,0 +1 @@
+218dc25e215ebf1c38439ea2a32538ae
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_36_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_36_0.vtp.md5
new file mode 100644
index 0000000..eb1a194
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_36_0.vtp.md5
@@ -0,0 +1 @@
+94b8780719abf83942dcb66d3f04cf1b
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_37_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_37_0.vtp.md5
new file mode 100644
index 0000000..f147940
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_37_0.vtp.md5
@@ -0,0 +1 @@
+588e39566acc5a269e4f672696d84a88
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_38_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_38_0.vtp.md5
new file mode 100644
index 0000000..6cb02d6
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_38_0.vtp.md5
@@ -0,0 +1 @@
+97465645fd0a73a7a5980929ad5b712b
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_39_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_39_0.vtp.md5
new file mode 100644
index 0000000..ff7f284
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_39_0.vtp.md5
@@ -0,0 +1 @@
+de115ba831632e2f1a2609646cc56f77
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_3_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_3_0.vtp.md5
new file mode 100644
index 0000000..1c6d23d
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_3_0.vtp.md5
@@ -0,0 +1 @@
+c4a32d4cfb764c50a452dc07c7c07bb1
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_40_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_40_0.vtp.md5
new file mode 100644
index 0000000..ce12c45
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_40_0.vtp.md5
@@ -0,0 +1 @@
+fad61327097b9b356c7d6fcf9f4ba06a
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_41_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_41_0.vtp.md5
new file mode 100644
index 0000000..c55b6f9
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_41_0.vtp.md5
@@ -0,0 +1 @@
+e76b17193bf83b53d400d7d5d4ba96d4
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_42_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_42_0.vtp.md5
new file mode 100644
index 0000000..0b4263a
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_42_0.vtp.md5
@@ -0,0 +1 @@
+2dc243c78af7b0645a854808f169fad2
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_43_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_43_0.vtp.md5
new file mode 100644
index 0000000..8d1580a
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_43_0.vtp.md5
@@ -0,0 +1 @@
+ee8c4c3d16130cf3258fecd4f36c86b5
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_44_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_44_0.vtp.md5
new file mode 100644
index 0000000..40047b3
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_44_0.vtp.md5
@@ -0,0 +1 @@
+1faabaa54a53cca8cceba78c6d74aaf7
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_45_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_45_0.vtp.md5
new file mode 100644
index 0000000..9795026
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_45_0.vtp.md5
@@ -0,0 +1 @@
+8bdb48d37e104683af8655b60af6fdac
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_46_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_46_0.vtp.md5
new file mode 100644
index 0000000..3392930
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_46_0.vtp.md5
@@ -0,0 +1 @@
+896468a115912626e11847c66119f5c9
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_47_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_47_0.vtp.md5
new file mode 100644
index 0000000..7283084
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_47_0.vtp.md5
@@ -0,0 +1 @@
+e07a4b49d54b37c37567e459c5b4078f
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_48_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_48_0.vtp.md5
new file mode 100644
index 0000000..77ccb7f
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_48_0.vtp.md5
@@ -0,0 +1 @@
+ddb2a595dcd263f5a5e22ee67b1bdc0b
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_49_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_49_0.vtp.md5
new file mode 100644
index 0000000..8fb4edf
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_49_0.vtp.md5
@@ -0,0 +1 @@
+83ae4e78dacc1f1f37379224c277e1d2
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_4_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_4_0.vtp.md5
new file mode 100644
index 0000000..90f8998
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_4_0.vtp.md5
@@ -0,0 +1 @@
+a2034bf7c5d7c07663b97c52ca8918c7
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_50_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_50_0.vtp.md5
new file mode 100644
index 0000000..272c1f6
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_50_0.vtp.md5
@@ -0,0 +1 @@
+2e891e1f4fa774260541ffc71d50a917
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_51_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_51_0.vtp.md5
new file mode 100644
index 0000000..cbd5646
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_51_0.vtp.md5
@@ -0,0 +1 @@
+97e14fe18adabb1a8cc4ce2375a32025
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_52_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_52_0.vtp.md5
new file mode 100644
index 0000000..f60b804
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_52_0.vtp.md5
@@ -0,0 +1 @@
+2991eac2dc991f879703ce636c99b409
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_53_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_53_0.vtp.md5
new file mode 100644
index 0000000..ca066eb
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_53_0.vtp.md5
@@ -0,0 +1 @@
+f2c32077a8b0e7f8afa04426396e37f5
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_54_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_54_0.vtp.md5
new file mode 100644
index 0000000..13c25de
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_54_0.vtp.md5
@@ -0,0 +1 @@
+aaea321a8016d00c1b22b71c73df938e
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_55_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_55_0.vtp.md5
new file mode 100644
index 0000000..4d6e74e
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_55_0.vtp.md5
@@ -0,0 +1 @@
+9de714f8b06c1f6765259d0da24af5ad
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_56_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_56_0.vtp.md5
new file mode 100644
index 0000000..54d64c4
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_56_0.vtp.md5
@@ -0,0 +1 @@
+192b0a23469b7e5c5b67683d704ad8bb
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_57_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_57_0.vtp.md5
new file mode 100644
index 0000000..c1e992e
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_57_0.vtp.md5
@@ -0,0 +1 @@
+d7178ea2b8e6b605a9ba51da106cb143
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_58_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_58_0.vtp.md5
new file mode 100644
index 0000000..e5bb930
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_58_0.vtp.md5
@@ -0,0 +1 @@
+13a7f59453cebe26a5bb65ddf098a9c9
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_59_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_59_0.vtp.md5
new file mode 100644
index 0000000..5072f56
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_59_0.vtp.md5
@@ -0,0 +1 @@
+a756f0a7e8c2cdc85546eb6f3e5a1d16
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_5_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_5_0.vtp.md5
new file mode 100644
index 0000000..963cd31
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_5_0.vtp.md5
@@ -0,0 +1 @@
+a094d76241d92429bb40ce06c5724a7c
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_60_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_60_0.vtp.md5
new file mode 100644
index 0000000..9faf869
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_60_0.vtp.md5
@@ -0,0 +1 @@
+335fb5addfbba81096cc5964c322c973
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_61_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_61_0.vtp.md5
new file mode 100644
index 0000000..122645f
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_61_0.vtp.md5
@@ -0,0 +1 @@
+b3dda5fae35bac50fa0639954e1d22f9
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_62_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_62_0.vtp.md5
new file mode 100644
index 0000000..f7b2e98
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_62_0.vtp.md5
@@ -0,0 +1 @@
+1141885da96e4a8e654ff2a6722a1ebc
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_63_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_63_0.vtp.md5
new file mode 100644
index 0000000..09a5df0
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_63_0.vtp.md5
@@ -0,0 +1 @@
+46c8e40db76251431cf086673a413ef8
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_64_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_64_0.vtp.md5
new file mode 100644
index 0000000..977bb48
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_64_0.vtp.md5
@@ -0,0 +1 @@
+ce96f3ebfa1396aee783c6e5c37ca5bc
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_65_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_65_0.vtp.md5
new file mode 100644
index 0000000..5d6732f
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_65_0.vtp.md5
@@ -0,0 +1 @@
+3aa7e68588a5c32ca08462bbbd31a291
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_6_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_6_0.vtp.md5
new file mode 100644
index 0000000..bf77811
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_6_0.vtp.md5
@@ -0,0 +1 @@
+79202b46a177bb3bd74c35c18f2c8487
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_7_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_7_0.vtp.md5
new file mode 100644
index 0000000..541b9e8
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_7_0.vtp.md5
@@ -0,0 +1 @@
+f73d6b1cf811b8fec3119f625259ccc1
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_8_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_8_0.vtp.md5
new file mode 100644
index 0000000..d1b38fd
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_8_0.vtp.md5
@@ -0,0 +1 @@
+fdc3608548cb69e2185d9c67ada5051e
diff --git a/Testing/Data/many_blocks/many_blocks/many_blocks_9_0.vtp.md5 b/Testing/Data/many_blocks/many_blocks/many_blocks_9_0.vtp.md5
new file mode 100644
index 0000000..477d98d
--- /dev/null
+++ b/Testing/Data/many_blocks/many_blocks/many_blocks_9_0.vtp.md5
@@ -0,0 +1 @@
+693fca059431f1dba7cd6c89eadaf68a
diff --git a/Testing/Data/masonry-wide.jpg.md5 b/Testing/Data/masonry-wide.jpg.md5
new file mode 100644
index 0000000..01f71b4
--- /dev/null
+++ b/Testing/Data/masonry-wide.jpg.md5
@@ -0,0 +1 @@
+1081755f6849b87094b3727c2bfc41a3
diff --git a/Testing/Data/masonry.bmp.md5 b/Testing/Data/masonry.bmp.md5
new file mode 100644
index 0000000..33d9ba8
--- /dev/null
+++ b/Testing/Data/masonry.bmp.md5
@@ -0,0 +1 @@
+8aa8fa634bb80a41256b2fd0e01819af
diff --git a/Testing/Data/matrix.vtk.md5 b/Testing/Data/matrix.vtk.md5
new file mode 100644
index 0000000..f0c9ee5
--- /dev/null
+++ b/Testing/Data/matrix.vtk.md5
@@ -0,0 +1 @@
+896dc6012fb0e62e692fbf9fb856d4ef
diff --git a/Testing/Data/mbwavelet_ascii.q.md5 b/Testing/Data/mbwavelet_ascii.q.md5
new file mode 100644
index 0000000..a3d4ae3
--- /dev/null
+++ b/Testing/Data/mbwavelet_ascii.q.md5
@@ -0,0 +1 @@
+57961e175cbbe2b1f899abfd143fa1b5
diff --git a/Testing/Data/mbwavelet_ascii.xyz.md5 b/Testing/Data/mbwavelet_ascii.xyz.md5
new file mode 100644
index 0000000..8560ac6
--- /dev/null
+++ b/Testing/Data/mbwavelet_ascii.xyz.md5
@@ -0,0 +1 @@
+ef9068e8e63a8ea169aa35e0888f9851
diff --git a/Testing/Data/minimal.hdr.gz.md5 b/Testing/Data/minimal.hdr.gz.md5
new file mode 100644
index 0000000..90024e1
--- /dev/null
+++ b/Testing/Data/minimal.hdr.gz.md5
@@ -0,0 +1 @@
+83ca49e95e7347850210bd10d78f9dd0
diff --git a/Testing/Data/minimal.img.gz.md5 b/Testing/Data/minimal.img.gz.md5
new file mode 100644
index 0000000..2783d66
--- /dev/null
+++ b/Testing/Data/minimal.img.gz.md5
@@ -0,0 +1 @@
+2fd38978358bfe07f72c0c12f29822e3
diff --git a/Testing/Data/minimal.nii.gz.md5 b/Testing/Data/minimal.nii.gz.md5
new file mode 100644
index 0000000..a288618
--- /dev/null
+++ b/Testing/Data/minimal.nii.gz.md5
@@ -0,0 +1 @@
+fc6571acfdd3a7bb51a4c1a892bd5334
diff --git a/Testing/Data/mni-surface-mesh.obj.md5 b/Testing/Data/mni-surface-mesh.obj.md5
new file mode 100644
index 0000000..29f34e7
--- /dev/null
+++ b/Testing/Data/mni-surface-mesh.obj.md5
@@ -0,0 +1 @@
+db732e122d32d15d4610175e75162b39
diff --git a/Testing/Data/motor.g.md5 b/Testing/Data/motor.g.md5
new file mode 100644
index 0000000..3fbc1ea
--- /dev/null
+++ b/Testing/Data/motor.g.md5
@@ -0,0 +1 @@
+995276e58646d587e00cedd32a03350f
diff --git a/Testing/Data/mr.001.md5 b/Testing/Data/mr.001.md5
new file mode 100644
index 0000000..2a76a9e
--- /dev/null
+++ b/Testing/Data/mr.001.md5
@@ -0,0 +1 @@
+09e97bf8d70123d27a4526f75d325dc9
diff --git a/Testing/Data/multi-ascii.q.md5 b/Testing/Data/multi-ascii.q.md5
new file mode 100644
index 0000000..1d89f49
--- /dev/null
+++ b/Testing/Data/multi-ascii.q.md5
@@ -0,0 +1 @@
+ece4e707449d61abf3a20ce29e62354e
diff --git a/Testing/Data/multi-ascii.xyz.md5 b/Testing/Data/multi-ascii.xyz.md5
new file mode 100644
index 0000000..b77da98
--- /dev/null
+++ b/Testing/Data/multi-ascii.xyz.md5
@@ -0,0 +1 @@
+b8bf1bab16558de240dfc445f8368ab3
diff --git a/Testing/Data/multi-bin-2D.q.md5 b/Testing/Data/multi-bin-2D.q.md5
new file mode 100644
index 0000000..a2a00f5
--- /dev/null
+++ b/Testing/Data/multi-bin-2D.q.md5
@@ -0,0 +1 @@
+02e2e5b75b2a889050900adfdee47c2c
diff --git a/Testing/Data/multi-bin-2D.xyz.md5 b/Testing/Data/multi-bin-2D.xyz.md5
new file mode 100644
index 0000000..920118f
--- /dev/null
+++ b/Testing/Data/multi-bin-2D.xyz.md5
@@ -0,0 +1 @@
+a22f5a5dcd3f555ada8ca1e438ebe842
diff --git a/Testing/Data/multi-bin-C.q.md5 b/Testing/Data/multi-bin-C.q.md5
new file mode 100644
index 0000000..4813809
--- /dev/null
+++ b/Testing/Data/multi-bin-C.q.md5
@@ -0,0 +1 @@
+4ee7ff9df1b2fc65610bfe9e2b5f10d1
diff --git a/Testing/Data/multi-bin-C.xyz.md5 b/Testing/Data/multi-bin-C.xyz.md5
new file mode 100644
index 0000000..efa6b3e
--- /dev/null
+++ b/Testing/Data/multi-bin-C.xyz.md5
@@ -0,0 +1 @@
+0931394a6513a2f4f0cbfbff9195f35f
diff --git a/Testing/Data/multi-bin-oflow.q.md5 b/Testing/Data/multi-bin-oflow.q.md5
new file mode 100644
index 0000000..7bee043
--- /dev/null
+++ b/Testing/Data/multi-bin-oflow.q.md5
@@ -0,0 +1 @@
+b734d86798d486833b3c107d998f1b9f
diff --git a/Testing/Data/multi-bin.f.md5 b/Testing/Data/multi-bin.f.md5
new file mode 100644
index 0000000..ff7054c
--- /dev/null
+++ b/Testing/Data/multi-bin.f.md5
@@ -0,0 +1 @@
+11a0c9dd68b3d8dfc1c337fc3b62e945
diff --git a/Testing/Data/multi-bin.q.md5 b/Testing/Data/multi-bin.q.md5
new file mode 100644
index 0000000..1b33b36
--- /dev/null
+++ b/Testing/Data/multi-bin.q.md5
@@ -0,0 +1 @@
+db6519c198a01372f5ef363b1f3fd8e4
diff --git a/Testing/Data/multi-bin.xyz.md5 b/Testing/Data/multi-bin.xyz.md5
new file mode 100644
index 0000000..a09461d
--- /dev/null
+++ b/Testing/Data/multi-bin.xyz.md5
@@ -0,0 +1 @@
+342877fcc2d21cf457b8bb0373bae922
diff --git a/Testing/Data/multi.p3d.md5 b/Testing/Data/multi.p3d.md5
new file mode 100644
index 0000000..e84d76b
--- /dev/null
+++ b/Testing/Data/multi.p3d.md5
@@ -0,0 +1 @@
+9dcd1374fe079a6c58d287c05cfd16a0
diff --git a/Testing/Data/multicomb_0.vts.md5 b/Testing/Data/multicomb_0.vts.md5
new file mode 100644
index 0000000..bd8d75e
--- /dev/null
+++ b/Testing/Data/multicomb_0.vts.md5
@@ -0,0 +1 @@
+ed98c9e5228c61770b93c16f1f8999f7
diff --git a/Testing/Data/multicomb_1.vts.md5 b/Testing/Data/multicomb_1.vts.md5
new file mode 100644
index 0000000..bdd07e1
--- /dev/null
+++ b/Testing/Data/multicomb_1.vts.md5
@@ -0,0 +1 @@
+899482068b5a812566d2836489be12b5
diff --git a/Testing/Data/multicomb_2.vts.md5 b/Testing/Data/multicomb_2.vts.md5
new file mode 100644
index 0000000..c10c154
--- /dev/null
+++ b/Testing/Data/multicomb_2.vts.md5
@@ -0,0 +1 @@
+26628e7d9b765b51bd8413a2c391d97f
diff --git a/Testing/Data/neghip.slc.md5 b/Testing/Data/neghip.slc.md5
new file mode 100644
index 0000000..cdb5f38
--- /dev/null
+++ b/Testing/Data/neghip.slc.md5
@@ -0,0 +1 @@
+ee4b3fbbb9f51cf99c3e3ad358df470d
diff --git a/Testing/Data/noise.png.md5 b/Testing/Data/noise.png.md5
new file mode 100644
index 0000000..dcfe34d
--- /dev/null
+++ b/Testing/Data/noise.png.md5
@@ -0,0 +1 @@
+6477e301d5faa287854c968635a1862a
diff --git a/Testing/Data/nut.slc.md5 b/Testing/Data/nut.slc.md5
new file mode 100644
index 0000000..5fd34cf
--- /dev/null
+++ b/Testing/Data/nut.slc.md5
@@ -0,0 +1 @@
+de6777ab4fcbf544e0a47edfddf8cba8
diff --git a/Testing/Data/office.binary.vtk.md5 b/Testing/Data/office.binary.vtk.md5
new file mode 100644
index 0000000..0910c3f
--- /dev/null
+++ b/Testing/Data/office.binary.vtk.md5
@@ -0,0 +1 @@
+51be2666cf17f3ee082aaa8a995866e7
diff --git a/Testing/Data/plate.vtk.md5 b/Testing/Data/plate.vtk.md5
new file mode 100644
index 0000000..852ff83
--- /dev/null
+++ b/Testing/Data/plate.vtk.md5
@@ -0,0 +1 @@
+a3f938178d8af24577a28becd8d87aac
diff --git a/Testing/Data/points.txt.md5 b/Testing/Data/points.txt.md5
new file mode 100644
index 0000000..37fca54
--- /dev/null
+++ b/Testing/Data/points.txt.md5
@@ -0,0 +1 @@
+fc85651162cbf6577f067820a388d09d
diff --git a/Testing/Data/political.vtp.md5 b/Testing/Data/political.vtp.md5
new file mode 100644
index 0000000..80f9073
--- /dev/null
+++ b/Testing/Data/political.vtp.md5
@@ -0,0 +1 @@
+ac98e21ef49c00040eda384905741d01
diff --git a/Testing/Data/polyEx.vtk.md5 b/Testing/Data/polyEx.vtk.md5
new file mode 100644
index 0000000..333fceb
--- /dev/null
+++ b/Testing/Data/polyEx.vtk.md5
@@ -0,0 +1 @@
+d177fe23ece1dd5291a5b8087950b45c
diff --git a/Testing/Data/polyhedron2pieces.vtu.md5 b/Testing/Data/polyhedron2pieces.vtu.md5
new file mode 100644
index 0000000..0942e47
--- /dev/null
+++ b/Testing/Data/polyhedron2pieces.vtu.md5
@@ -0,0 +1 @@
+4ef32be62baf8370a87512f04e567c0b
diff --git a/Testing/Data/porphyrin.cml.md5 b/Testing/Data/porphyrin.cml.md5
new file mode 100644
index 0000000..2506547
--- /dev/null
+++ b/Testing/Data/porphyrin.cml.md5
@@ -0,0 +1 @@
+d885dcea91fbfe62698c310e9a759ad4
diff --git a/Testing/Data/post.vtk.md5 b/Testing/Data/post.vtk.md5
new file mode 100644
index 0000000..24af2d3
--- /dev/null
+++ b/Testing/Data/post.vtk.md5
@@ -0,0 +1 @@
+75ff20bc2b8883ffe274c243749c25d2
diff --git a/Testing/Data/prism.neu.md5 b/Testing/Data/prism.neu.md5
new file mode 100644
index 0000000..e4b1a3f
--- /dev/null
+++ b/Testing/Data/prism.neu.md5
@@ -0,0 +1 @@
+7b58ffd08ff7d14feefe94804988a200
diff --git a/Testing/Data/prostar.cel.md5 b/Testing/Data/prostar.cel.md5
new file mode 100644
index 0000000..8b4a38e
--- /dev/null
+++ b/Testing/Data/prostar.cel.md5
@@ -0,0 +1 @@
+53d6165d1d1658dd5b582af704d16d8e
diff --git a/Testing/Data/prostar.vrt.md5 b/Testing/Data/prostar.vrt.md5
new file mode 100644
index 0000000..0700714
--- /dev/null
+++ b/Testing/Data/prostar.vrt.md5
@@ -0,0 +1 @@
+762b87d9d734c080e76dc27340333a22
diff --git a/Testing/Data/quadraticTetra01.vtu.md5 b/Testing/Data/quadraticTetra01.vtu.md5
new file mode 100644
index 0000000..e873080
--- /dev/null
+++ b/Testing/Data/quadraticTetra01.vtu.md5
@@ -0,0 +1 @@
+b4dd6706de0ecd01fe07404e0e87f79d
diff --git a/Testing/Data/qualityEx.vtk.md5 b/Testing/Data/qualityEx.vtk.md5
new file mode 100644
index 0000000..c5f7a4a
--- /dev/null
+++ b/Testing/Data/qualityEx.vtk.md5
@@ -0,0 +1 @@
+cf86fef34adcbde85b3153af088e19af
diff --git a/Testing/Data/ruler.png.md5 b/Testing/Data/ruler.png.md5
new file mode 100644
index 0000000..17b8875
--- /dev/null
+++ b/Testing/Data/ruler.png.md5
@@ -0,0 +1 @@
+7a06d2366399206c300fdf397885b7ec
diff --git a/Testing/Data/sample.xml.md5 b/Testing/Data/sample.xml.md5
new file mode 100644
index 0000000..048bf20
--- /dev/null
+++ b/Testing/Data/sample.xml.md5
@@ -0,0 +1 @@
+91fff9122a38b486a82f06af939be4c7
diff --git a/Testing/Data/sampleCurveGrid4.nc.md5 b/Testing/Data/sampleCurveGrid4.nc.md5
new file mode 100644
index 0000000..86044c9
--- /dev/null
+++ b/Testing/Data/sampleCurveGrid4.nc.md5
@@ -0,0 +1 @@
+9f6d62ea4393522dccfa64e092d8af91
diff --git a/Testing/Data/sampleGenGrid3.nc.md5 b/Testing/Data/sampleGenGrid3.nc.md5
new file mode 100644
index 0000000..e36bec2
--- /dev/null
+++ b/Testing/Data/sampleGenGrid3.nc.md5
@@ -0,0 +1 @@
+0109b749c3887ee20da962b21d8f669c
diff --git a/Testing/Data/sphere.slc.md5 b/Testing/Data/sphere.slc.md5
new file mode 100644
index 0000000..d4969f3
--- /dev/null
+++ b/Testing/Data/sphere.slc.md5
@@ -0,0 +1 @@
+39cb6a4fb0c60606474341e0f5ba954e
diff --git a/Testing/Data/t3_grid_0.mnc.md5 b/Testing/Data/t3_grid_0.mnc.md5
new file mode 100644
index 0000000..f8d3da0
--- /dev/null
+++ b/Testing/Data/t3_grid_0.mnc.md5
@@ -0,0 +1 @@
+f956067bda0ad1708c264e33f140de4d
diff --git a/Testing/Data/teapot.g.md5 b/Testing/Data/teapot.g.md5
new file mode 100644
index 0000000..96af1b5
--- /dev/null
+++ b/Testing/Data/teapot.g.md5
@@ -0,0 +1 @@
+c0ac0ac1fc99328942a941e5cfce1f34
diff --git a/Testing/Data/tensors.vtk.md5 b/Testing/Data/tensors.vtk.md5
new file mode 100644
index 0000000..144f3ee
--- /dev/null
+++ b/Testing/Data/tensors.vtk.md5
@@ -0,0 +1 @@
+be704b99bfd92b741b579176ba879fd0
diff --git a/Testing/Data/test.p3d.md5 b/Testing/Data/test.p3d.md5
new file mode 100644
index 0000000..5b9f884
--- /dev/null
+++ b/Testing/Data/test.p3d.md5
@@ -0,0 +1 @@
+7aea824e978d52ce0f446ede7f930bfc
diff --git a/Testing/Data/tetraMesh.vtk.md5 b/Testing/Data/tetraMesh.vtk.md5
new file mode 100644
index 0000000..f702053
--- /dev/null
+++ b/Testing/Data/tetraMesh.vtk.md5
@@ -0,0 +1 @@
+a61954053206a1ddfe664a38337d46f5
diff --git a/Testing/Data/texThres2.vtk.md5 b/Testing/Data/texThres2.vtk.md5
new file mode 100644
index 0000000..970f21b
--- /dev/null
+++ b/Testing/Data/texThres2.vtk.md5
@@ -0,0 +1 @@
+8fc1aa50b67b0a2240242a955330511e
diff --git a/Testing/Data/textureRGBA.png.md5 b/Testing/Data/textureRGBA.png.md5
new file mode 100644
index 0000000..3f2a936
--- /dev/null
+++ b/Testing/Data/textureRGBA.png.md5
@@ -0,0 +1 @@
+e17228d2310e4f1315ed8c9f83e1990a
diff --git a/Testing/Data/thio3xx.xyz.md5 b/Testing/Data/thio3xx.xyz.md5
new file mode 100644
index 0000000..b8a9348
--- /dev/null
+++ b/Testing/Data/thio3xx.xyz.md5
@@ -0,0 +1 @@
+199ea5fbfc0d10f97f2244c52952cace
diff --git a/Testing/Data/timestep_0_15.vts.md5 b/Testing/Data/timestep_0_15.vts.md5
new file mode 100644
index 0000000..e54a9e5
--- /dev/null
+++ b/Testing/Data/timestep_0_15.vts.md5
@@ -0,0 +1 @@
+865ee36c795ff4249138fb0b2611a33b
diff --git a/Testing/Data/tos_O1_2001-2002.nc.md5 b/Testing/Data/tos_O1_2001-2002.nc.md5
new file mode 100644
index 0000000..b06691e
--- /dev/null
+++ b/Testing/Data/tos_O1_2001-2002.nc.md5
@@ -0,0 +1 @@
+8bfb78e61956e35ee346e5d9d702402a
diff --git a/Testing/Data/track1.binary.vtk.md5 b/Testing/Data/track1.binary.vtk.md5
new file mode 100644
index 0000000..ca2ac0f
--- /dev/null
+++ b/Testing/Data/track1.binary.vtk.md5
@@ -0,0 +1 @@
+3a1e7c3cdf47a9cf3b9cbe6f728127d5
diff --git a/Testing/Data/track2.binary.vtk.md5 b/Testing/Data/track2.binary.vtk.md5
new file mode 100644
index 0000000..e420286
--- /dev/null
+++ b/Testing/Data/track2.binary.vtk.md5
@@ -0,0 +1 @@
+a8b5ebcd7b965073a887c40e141a60fc
diff --git a/Testing/Data/track3.binary.vtk.md5 b/Testing/Data/track3.binary.vtk.md5
new file mode 100644
index 0000000..9a426c8
--- /dev/null
+++ b/Testing/Data/track3.binary.vtk.md5
@@ -0,0 +1 @@
+b4af0a0e3927b0ad845efe86c3c56889
diff --git a/Testing/Data/treetest.xml.md5 b/Testing/Data/treetest.xml.md5
new file mode 100644
index 0000000..7a902fa
--- /dev/null
+++ b/Testing/Data/treetest.xml.md5
@@ -0,0 +1 @@
+e187244cea5503e4ce80a61e823b69a8
diff --git a/Testing/Data/uGridEx.vtk.md5 b/Testing/Data/uGridEx.vtk.md5
new file mode 100644
index 0000000..f33c8cd
--- /dev/null
+++ b/Testing/Data/uGridEx.vtk.md5
@@ -0,0 +1 @@
+48bd364912b8134d205decf4a62e9441
diff --git a/Testing/Data/uniform-001371-5x5x5.vtp.md5 b/Testing/Data/uniform-001371-5x5x5.vtp.md5
new file mode 100644
index 0000000..2c6b4ca
--- /dev/null
+++ b/Testing/Data/uniform-001371-5x5x5.vtp.md5
@@ -0,0 +1 @@
+98770c84b3404c83ee4fbe246abfbba5
diff --git a/Testing/Data/usa.vtk.md5 b/Testing/Data/usa.vtk.md5
new file mode 100644
index 0000000..8183e74
--- /dev/null
+++ b/Testing/Data/usa.vtk.md5
@@ -0,0 +1 @@
+c62c09eb1914481b1ca48edbfc287980
diff --git a/Testing/Data/usa_image.jpg.md5 b/Testing/Data/usa_image.jpg.md5
new file mode 100644
index 0000000..0875e3a
--- /dev/null
+++ b/Testing/Data/usa_image.jpg.md5
@@ -0,0 +1 @@
+c3a85b21946b6eb8675ac73e166eafd7
diff --git a/Testing/Data/vase_1comp.vti.md5 b/Testing/Data/vase_1comp.vti.md5
new file mode 100644
index 0000000..dc9e9e9
--- /dev/null
+++ b/Testing/Data/vase_1comp.vti.md5
@@ -0,0 +1 @@
+291004f3061977130607dba70c486502
diff --git a/Testing/Data/vase_4comp.vti.md5 b/Testing/Data/vase_4comp.vti.md5
new file mode 100644
index 0000000..10dee6b
--- /dev/null
+++ b/Testing/Data/vase_4comp.vti.md5
@@ -0,0 +1 @@
+e61f4847408f8f390cb3613896aa50ce
diff --git a/Testing/Data/vehicle_data.csv.md5 b/Testing/Data/vehicle_data.csv.md5
new file mode 100644
index 0000000..02361ba
--- /dev/null
+++ b/Testing/Data/vehicle_data.csv.md5
@@ -0,0 +1 @@
+061b7e6328555158c1e2807639e6833a
diff --git a/Testing/Data/vtk.png.md5 b/Testing/Data/vtk.png.md5
new file mode 100644
index 0000000..54f0d0c
--- /dev/null
+++ b/Testing/Data/vtk.png.md5
@@ -0,0 +1 @@
+865030d5a2ab99018569be5eb0a6f8c0
diff --git a/Testing/Data/vtk.vtk.md5 b/Testing/Data/vtk.vtk.md5
new file mode 100644
index 0000000..d676d57
--- /dev/null
+++ b/Testing/Data/vtk.vtk.md5
@@ -0,0 +1 @@
+299b19441cee0413138bd667e796b0bd
diff --git a/Testing/Data/vwgt.coords.md5 b/Testing/Data/vwgt.coords.md5
new file mode 100644
index 0000000..181926f
--- /dev/null
+++ b/Testing/Data/vwgt.coords.md5
@@ -0,0 +1 @@
+e93b0c96de07053c617b679414c3f696
diff --git a/Testing/Data/vwgt.graph.md5 b/Testing/Data/vwgt.graph.md5
new file mode 100644
index 0000000..e6c7636
--- /dev/null
+++ b/Testing/Data/vwgt.graph.md5
@@ -0,0 +1 @@
+c6bf80c4660ec0ea8b6f761156a79db3
diff --git a/Testing/External/CMakeLists.txt b/Testing/External/CMakeLists.txt
index a02e8bb..b557b3c 100644
--- a/Testing/External/CMakeLists.txt
+++ b/Testing/External/CMakeLists.txt
@@ -1,6 +1,11 @@
 # This project builds the test directories from all VTK modules as a separate
 # project outside the main VTK build tree as if they were an application.
 cmake_minimum_required(VERSION 2.8.5 FATAL_ERROR)
+
+if(POLICY CMP0022)
+  cmake_policy(SET CMP0022 NEW)
+endif()
+
 project(VTKTestExternal)
 if(VTK_SOURCE_DIR OR VTK_BINARY_DIR)
   message(FATAL_ERROR "This directory may build only outside VTK!")
@@ -10,10 +15,12 @@ include(CTest)
 
 # Find the top of the main VTK source tree.
 get_filename_component(VTK_TOP_DIR ${VTKTestExternal_SOURCE_DIR}/../.. ABSOLUTE)
+set(ExternalData_SOURCE_ROOT ${VTK_TOP_DIR})
 
 # Load module infrastructure macros.
 list(APPEND CMAKE_MODULE_PATH ${VTK_TOP_DIR}/CMake)
 include(vtkModuleMacros)
+include(vtkExternalData)
 include(vtkTestingMacros)
 
 # Find the VTK build or install tree.  Assume the version matches exactly.
@@ -43,10 +50,13 @@ endif()
 vtk_module_glob("${VTK_TOP_DIR}" "${VTKTestExternal_BINARY_DIR}" Cxx)
 
 # Input information for test build files.
-find_path(VTK_DATA_ROOT VTKData.readme ${VTK_TOP_DIR}/../VTKData)
-find_path(VTK_LARGE_DATA_ROOT VTKLargeData.readme ${VTK_TOP_DIR}/../VTKLargeData)
+option(VTK_USE_LARGE_DATA "Enable tests requiring \"large\" data" OFF)
+set(VTK_TEST_DATA_DIR "${ExternalData_BINARY_ROOT}/Testing")
+set(VTK_TEST_INPUT_DIR "${VTK_TOP_DIR}/Testing/Data")
 set(VTK_TEST_OUTPUT_DIR "${VTKTestExternal_BINARY_DIR}/Testing/Temporary")
 
+include(vtkLegacyData)
+
 # If vtkParallelMPI is found then look for MPI and bring in
 # vtkTestingMPISupport.
 if(vtkParallelMPI_LOADED)
@@ -72,3 +82,7 @@ foreach(mod ${VTK_MODULES_ALL})
     endif()
   endif()
 endforeach()
+
+# Create target to download data from the VTKData group.  This must come after
+# all tests have been added that reference the group, so we put it last.
+ExternalData_Add_Target(VTKData)
diff --git a/Testing/GenericBridge/vtkBridgeAttribute.cxx b/Testing/GenericBridge/vtkBridgeAttribute.cxx
index 706f656..ce15a99 100644
--- a/Testing/GenericBridge/vtkBridgeAttribute.cxx
+++ b/Testing/GenericBridge/vtkBridgeAttribute.cxx
@@ -32,7 +32,7 @@
 #include "vtkPointData.h"
 #include "vtkSetGet.h"
 
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkBridgeAttribute);
 
diff --git a/Testing/GenericBridge/vtkBridgeCell.cxx b/Testing/GenericBridge/vtkBridgeCell.cxx
index d07ab1d..d200d40 100644
--- a/Testing/GenericBridge/vtkBridgeCell.cxx
+++ b/Testing/GenericBridge/vtkBridgeCell.cxx
@@ -22,7 +22,7 @@
 
 #include "vtkBridgeCell.h"
 
-#include <assert.h>
+#include <cassert>
 
 #include "vtkBridgeCellIterator.h"
 #include "vtkObjectFactory.h"
diff --git a/Testing/GenericBridge/vtkBridgeCellIterator.cxx b/Testing/GenericBridge/vtkBridgeCellIterator.cxx
index 409b7e2..442f463 100644
--- a/Testing/GenericBridge/vtkBridgeCellIterator.cxx
+++ b/Testing/GenericBridge/vtkBridgeCellIterator.cxx
@@ -20,7 +20,7 @@
 
 #include "vtkBridgeCellIterator.h"
 
-#include <assert.h>
+#include <cassert>
 
 #include "vtkObjectFactory.h"
 #include "vtkBridgeCell.h"
diff --git a/Testing/GenericBridge/vtkBridgeCellIteratorOnCellBoundaries.cxx b/Testing/GenericBridge/vtkBridgeCellIteratorOnCellBoundaries.cxx
index 4f99c59..02167c8 100644
--- a/Testing/GenericBridge/vtkBridgeCellIteratorOnCellBoundaries.cxx
+++ b/Testing/GenericBridge/vtkBridgeCellIteratorOnCellBoundaries.cxx
@@ -18,7 +18,7 @@
 
 #include "vtkBridgeCellIteratorOnCellBoundaries.h"
 
-#include <assert.h>
+#include <cassert>
 
 #include "vtkObjectFactory.h"
 #include "vtkBridgeCell.h"
diff --git a/Testing/GenericBridge/vtkBridgeCellIteratorOnCellList.cxx b/Testing/GenericBridge/vtkBridgeCellIteratorOnCellList.cxx
index 0150a39..1e9e18d 100644
--- a/Testing/GenericBridge/vtkBridgeCellIteratorOnCellList.cxx
+++ b/Testing/GenericBridge/vtkBridgeCellIteratorOnCellList.cxx
@@ -18,7 +18,7 @@
 
 #include "vtkBridgeCellIteratorOnCellList.h"
 
-#include <assert.h>
+#include <cassert>
 
 #include "vtkObjectFactory.h"
 #include "vtkBridgeCell.h"
diff --git a/Testing/GenericBridge/vtkBridgeCellIteratorOnDataSet.cxx b/Testing/GenericBridge/vtkBridgeCellIteratorOnDataSet.cxx
index b5640f3..fa7cdf2 100644
--- a/Testing/GenericBridge/vtkBridgeCellIteratorOnDataSet.cxx
+++ b/Testing/GenericBridge/vtkBridgeCellIteratorOnDataSet.cxx
@@ -18,7 +18,7 @@
 
 #include "vtkBridgeCellIteratorOnDataSet.h"
 
-#include <assert.h>
+#include <cassert>
 
 #include "vtkObjectFactory.h"
 #include "vtkBridgeCell.h"
diff --git a/Testing/GenericBridge/vtkBridgeCellIteratorOne.cxx b/Testing/GenericBridge/vtkBridgeCellIteratorOne.cxx
index 2d58a2b..0c4e2b8 100644
--- a/Testing/GenericBridge/vtkBridgeCellIteratorOne.cxx
+++ b/Testing/GenericBridge/vtkBridgeCellIteratorOne.cxx
@@ -18,7 +18,7 @@
 
 #include "vtkBridgeCellIteratorOne.h"
 
-#include <assert.h>
+#include <cassert>
 
 #include "vtkObjectFactory.h"
 #include "vtkBridgeCell.h"
diff --git a/Testing/GenericBridge/vtkBridgeCellIteratorStrategy.cxx b/Testing/GenericBridge/vtkBridgeCellIteratorStrategy.cxx
index eeff53a..09b37dc 100644
--- a/Testing/GenericBridge/vtkBridgeCellIteratorStrategy.cxx
+++ b/Testing/GenericBridge/vtkBridgeCellIteratorStrategy.cxx
@@ -24,7 +24,7 @@
 
 #include "vtkBridgeCellIteratorStrategy.h"
 
-#include <assert.h>
+#include <cassert>
 
 
 //-----------------------------------------------------------------------------
diff --git a/Testing/GenericBridge/vtkBridgeDataSet.cxx b/Testing/GenericBridge/vtkBridgeDataSet.cxx
index d140b90..19f9944 100644
--- a/Testing/GenericBridge/vtkBridgeDataSet.cxx
+++ b/Testing/GenericBridge/vtkBridgeDataSet.cxx
@@ -19,7 +19,7 @@
 
 #include "vtkBridgeDataSet.h"
 
-#include <assert.h>
+#include <cassert>
 
 #include "vtkObjectFactory.h"
 #include "vtkDataSet.h"
diff --git a/Testing/GenericBridge/vtkBridgePointIterator.cxx b/Testing/GenericBridge/vtkBridgePointIterator.cxx
index c9db336..63a2f3b 100644
--- a/Testing/GenericBridge/vtkBridgePointIterator.cxx
+++ b/Testing/GenericBridge/vtkBridgePointIterator.cxx
@@ -21,7 +21,7 @@
 
 #include "vtkBridgePointIterator.h"
 
-#include <assert.h>
+#include <cassert>
 
 #include "vtkObjectFactory.h"
 #include "vtkBridgeDataSet.h"
@@ -156,7 +156,7 @@ void vtkBridgePointIterator::InitWithOnePoint(vtkBridgeDataSet *ds,
                                               vtkIdType id)
 {
   assert("pre: valid_id" &&
-         ((!ds!=0)|| ((id>=0)&&(id<=ds->GetNumberOfCells())))); // A=>B: !A||B
+         ((ds==0) || ((id>=0)&&(id<=ds->GetNumberOfCells())))); // A=>B: !A||B
 
   this->IteratorOne->InitWithOnePoint(ds,id);
   this->CurrentIterator=this->IteratorOne;
diff --git a/Testing/GenericBridge/vtkBridgePointIteratorOnCell.cxx b/Testing/GenericBridge/vtkBridgePointIteratorOnCell.cxx
index 684b33c..6346e09 100644
--- a/Testing/GenericBridge/vtkBridgePointIteratorOnCell.cxx
+++ b/Testing/GenericBridge/vtkBridgePointIteratorOnCell.cxx
@@ -21,7 +21,7 @@
 
 #include "vtkBridgePointIteratorOnCell.h"
 
-#include <assert.h>
+#include <cassert>
 
 #include "vtkObjectFactory.h"
 #include "vtkBridgeDataSet.h"
diff --git a/Testing/GenericBridge/vtkBridgePointIteratorOnDataSet.cxx b/Testing/GenericBridge/vtkBridgePointIteratorOnDataSet.cxx
index 6e7187b..f6f7a1f 100644
--- a/Testing/GenericBridge/vtkBridgePointIteratorOnDataSet.cxx
+++ b/Testing/GenericBridge/vtkBridgePointIteratorOnDataSet.cxx
@@ -21,7 +21,7 @@
 
 #include "vtkBridgePointIteratorOnDataSet.h"
 
-#include <assert.h>
+#include <cassert>
 
 #include "vtkObjectFactory.h"
 #include "vtkBridgeDataSet.h"
diff --git a/Testing/GenericBridge/vtkBridgePointIteratorOne.cxx b/Testing/GenericBridge/vtkBridgePointIteratorOne.cxx
index ae59ef8..b7488d1 100644
--- a/Testing/GenericBridge/vtkBridgePointIteratorOne.cxx
+++ b/Testing/GenericBridge/vtkBridgePointIteratorOne.cxx
@@ -21,7 +21,7 @@
 
 #include "vtkBridgePointIteratorOne.h"
 
-#include <assert.h>
+#include <cassert>
 
 #include "vtkObjectFactory.h"
 #include "vtkBridgeDataSet.h"
@@ -126,7 +126,7 @@ void vtkBridgePointIteratorOne::InitWithOnePoint(vtkBridgeDataSet *ds,
                                                  vtkIdType id)
 {
   assert("pre: valid_id" &&
-         ((!ds!=0)|| ((id>=0)&&(id<=ds->GetNumberOfCells())))); // A=>B: !A||B
+         ((ds==0) || ((id>=0)&&(id<=ds->GetNumberOfCells())))); // A=>B: !A||B
 
   vtkSetObjectBodyMacro(DataSet,vtkBridgeDataSet,ds);
   this->Id=id;
diff --git a/Testing/Install/VIT/CMakeLists.txt b/Testing/Install/VIT/CMakeLists.txt
index 6c95536..0c227ca 100644
--- a/Testing/Install/VIT/CMakeLists.txt
+++ b/Testing/Install/VIT/CMakeLists.txt
@@ -23,7 +23,7 @@ IF(VTK_FOUND)
   SET(VIT_FORWARD_EXE_INSTALL "\"vit${VIT_EXE_SUFFIX}\"")
   CONFIGURE_FILE(${CMAKE_CURRENT_SOURCE_DIR}/vit-forward.c.in
     ${CMAKE_CURRENT_BINARY_DIR}/vit-forward.c
-    @ONLY IMMEDIATE)
+    @ONLY)
   ADD_EXECUTABLE(vit
     ${CMAKE_CURRENT_BINARY_DIR}/vit-forward.c)
   INSTALL(TARGETS vit DESTINATION bin)
diff --git a/Testing/Rendering/module.cmake b/Testing/Rendering/module.cmake
index 68494ba..a5b02e5 100644
--- a/Testing/Rendering/module.cmake
+++ b/Testing/Rendering/module.cmake
@@ -3,4 +3,7 @@ vtk_module(vtkTestingRendering
     vtkTestingCore
     vtkRenderingCore
     vtkImagingCore
+  PRIVATE_DEPENDS
+    vtksys
+    vtkIOImage
   )
diff --git a/Testing/Rendering/vtkRegressionTestImage.h b/Testing/Rendering/vtkRegressionTestImage.h
index 5ed45b2..3900d45 100644
--- a/Testing/Rendering/vtkRegressionTestImage.h
+++ b/Testing/Rendering/vtkRegressionTestImage.h
@@ -24,8 +24,8 @@
 class vtkRegressionTester : public vtkTesting
 {
 protected:
-  vtkRegressionTester() {};
-  ~vtkRegressionTester() {};
+  vtkRegressionTester() {}
+  ~vtkRegressionTester() {}
 private:
   vtkRegressionTester(const vtkRegressionTester&);  // Not implemented.
   void operator=(const vtkRegressionTester&);  // Not implemented.
diff --git a/Testing/Rendering/vtkTesting.cxx b/Testing/Rendering/vtkTesting.cxx
index 12ce02a..b639d95 100644
--- a/Testing/Rendering/vtkTesting.cxx
+++ b/Testing/Rendering/vtkTesting.cxx
@@ -16,14 +16,13 @@
 
 #include "vtkObjectFactory.h"
 #include "vtkWindowToImageFilter.h"
+#include "vtkPNGReader.h"
 #include "vtkPNGWriter.h"
 #include "vtkImageShiftScale.h"
 #include "vtkImageDifference.h"
-#include "vtkPNGReader.h"
 #include "vtkRenderWindow.h"
 #include "vtkImageData.h"
 #include "vtkTimerLog.h"
-#include "vtkSmartPointer.h"
 #include "vtkNew.h"
 #include "vtkRenderWindowInteractor.h"
 #include "vtkInteractorEventRecorder.h"
@@ -37,11 +36,6 @@
 #include "vtkFloatArray.h"
 #include "vtkStreamingDemandDrivenPipeline.h"
 #include "vtkInformation.h"
-#include "vtkAlgorithm.h"
-
-#include "vtkSmartPointer.h"
-#define VTK_CREATE(type, name) \
-  vtkSmartPointer<type> name = vtkSmartPointer<type>::New()
 
 #include <sys/stat.h>
 
@@ -57,7 +51,7 @@ using std::string;
 // Find in command tail, failing that find in environment,
 // failing that return a default.
 // Up to caller to delete the string returned.
-string vtkTestingGetArgOrEnvOrDefault(
+static string vtkTestingGetArgOrEnvOrDefault(
           string argName,       // argument idnetifier flag. eg "-D"
           vector<string> &argv, // command tail
           string env,           // environment variable name to find
@@ -66,12 +60,12 @@ string vtkTestingGetArgOrEnvOrDefault(
   string argValue;
 
   // Serach command tail.
-  int argc=static_cast<int>(argv.size());
-  for (int i=0; i<argc; i++)
+  int argc = static_cast<int>(argv.size());
+  for (int i = 0; i < argc; i++)
     {
-    if (argName==argv[i] && i<(argc-1))
+    if (argName == argv[i] && i < (argc - 1))
       {
-      argValue=argv[i+1];
+      argValue = argv[i + 1];
       }
     }
   // If not found search environment.
@@ -81,12 +75,12 @@ string vtkTestingGetArgOrEnvOrDefault(
     char *foundenv=getenv(env.c_str());
     if (foundenv)
       {
-      argValue=foundenv;
+      argValue = foundenv;
       }
     else
       {
       // Not found, fall back to default.
-      argValue=def;
+      argValue = def;
       }
     }
 
@@ -108,24 +102,24 @@ vtkIdType AccumulateScaledL2Norm(
         double &SumModR) // result
 {
   //
-  SumModR=0.0;
-  for (vtkIdType i=0; i<nTups; ++i)
+  SumModR = 0.0;
+  for (vtkIdType i = 0; i < nTups; ++i)
     {
-    double modR=0.0;
-    double modA=0.0;
-    for (int q=0; q<nComps; ++q)
+    double modR = 0.0;
+    double modA = 0.0;
+    for (int q = 0; q < nComps; ++q)
       {
-      double a=pA[q];
-      double b=pB[q];
-      modA+=a*a;
-      double r=b-a;
-      modR+=r*r;
+      double a = pA[q];
+      double b = pB[q];
+      modA += a * a;
+      double r = b - a;
+      modR += r * r;
       }
-    modA=sqrt(modA);
-    modA= modA<1.0 ? 1.0 : modA;
-    SumModR+=sqrt(modR)/modA;
-    pA+=nComps;
-    pB+=nComps;
+    modA = sqrt(modA);
+    modA = modA<1.0 ? 1.0 : modA;
+    SumModR += sqrt(modR) / modA;
+    pA += nComps;
+    pB += nComps;
     }
   return nTups;
 }
@@ -146,6 +140,7 @@ vtkTesting::vtkTesting()
   this->StartCPUTime = vtkTimerLog::GetCPUTime();
   this->StartWallTime = vtkTimerLog::GetUniversalTime();
 }
+
 //-----------------------------------------------------------------------------
 vtkTesting::~vtkTesting()
 {
@@ -154,15 +149,17 @@ vtkTesting::~vtkTesting()
   this->SetDataRoot(0);
   this->SetTempDirectory(0);
 }
+
 //-----------------------------------------------------------------------------
 void vtkTesting::AddArgument(const char *arg)
 {
   this->Args.push_back(arg);
 }
+
 //-----------------------------------------------------------------------------
-void vtkTesting::AddArguments(int argc,const char **argv)
+void vtkTesting::AddArguments(int argc, const char **argv)
 {
-  for (int i=0; i<argc; ++i)
+  for (int i = 0; i < argc; ++i)
     {
     this->Args.push_back(argv[i]);
     }
@@ -171,10 +168,10 @@ void vtkTesting::AddArguments(int argc,const char **argv)
 char *vtkTesting::GetArgument(const char *argName)
 {
   string argValue
-    = vtkTestingGetArgOrEnvOrDefault(argName,this->Args,"","");
+    = vtkTestingGetArgOrEnvOrDefault(argName, this->Args, "", "");
 
-  char *cArgValue=new char [argValue.size()+1];
-  strcpy(cArgValue,argValue.c_str());
+  char *cArgValue = new char[argValue.size() + 1];
+  strcpy(cArgValue, argValue.c_str());
 
   return cArgValue;
 }
@@ -220,24 +217,24 @@ const char *vtkTesting::GetValidImageFileName()
   string baseline=vtkTestingGetArgOrEnvOrDefault(
                 "-B", this->Args,"VTK_BASELINE_ROOT", this->GetDataRoot());
 
-  for (size_t i=0; i<(this->Args.size()-1); ++i)
+  for (size_t i = 0; i < (this->Args.size() - 1); ++i)
     {
-    if ( this->Args[i] == "-V")
+    if (this->Args[i] == "-V")
       {
-      const char *ch = this->Args[i+1].c_str();
-      if ( ch[0] == '/'
+      const char *ch = this->Args[i + 1].c_str();
+      if (ch[0] == '/'
 #ifdef _WIN32
-        || (ch[0] >= 'a' && ch[0] <= 'z' && ch[1] == ':' )
-        || (ch[0] >= 'A' && ch[0] <= 'Z' && ch[1] == ':' )
+        || (ch[0] >= 'a' && ch[0] <= 'z' && ch[1] == ':')
+        || (ch[0] >= 'A' && ch[0] <= 'Z' && ch[1] == ':')
 #endif
         )
         {
-        baseline = this->Args[i+1];
+        baseline = this->Args[i + 1];
         }
       else
         {
         baseline += "/";
-        baseline += this->Args[i+1];
+        baseline += this->Args[i + 1];
         }
       break;
       }
@@ -250,10 +247,9 @@ const char *vtkTesting::GetValidImageFileName()
 //-----------------------------------------------------------------------------
 int vtkTesting::IsInteractiveModeSpecified()
 {
-  unsigned int i;
-  for (i = 0; i < this->Args.size(); ++i)
+  for (size_t i = 0; i < this->Args.size(); ++i)
     {
-    if ( this->Args[i] == "-I")
+    if (this->Args[i] == "-I")
       {
       return 1;
       }
@@ -263,10 +259,9 @@ int vtkTesting::IsInteractiveModeSpecified()
 //-----------------------------------------------------------------------------
 int vtkTesting::IsFlagSpecified(const char *flag)
 {
-  unsigned int i;
-  for (i = 0; i < this->Args.size(); ++i)
+  for (size_t i = 0; i < this->Args.size(); ++i)
     {
-    if ( this->Args[i] == flag)
+    if (this->Args[i] == flag)
       {
       return 1;
       }
@@ -276,10 +271,9 @@ int vtkTesting::IsFlagSpecified(const char *flag)
 //-----------------------------------------------------------------------------
 int vtkTesting::IsValidImageSpecified()
 {
-  unsigned int i;
-  for (i = 1; i < this->Args.size(); ++i)
+  for (size_t i = 1; i < this->Args.size(); ++i)
     {
-    if ( this->Args[i-1] == "-V")
+    if (this->Args[i-1] == "-V")
       {
       return 1;
       }
@@ -298,16 +292,17 @@ char* vtkTesting::IncrementFileName(const char* fname, int count)
     return 0;
     }
   int extLen = static_cast<int>(strlen(counts));
-  char* newFileName = new char[orgLen+extLen+2];
+  char* newFileName = new char[orgLen + extLen + 2];
   strcpy(newFileName, fname);
 
-  newFileName[orgLen-4] = '_';
+  newFileName[orgLen - 4] = '_';
   int i, marker;
-  for(marker=orgLen-3, i=0; marker < orgLen-3+extLen; marker++, i++)
+  for (marker = orgLen - 3, i = 0; marker < orgLen - 3 + extLen;
+       marker++, i++)
     {
     newFileName[marker] = counts[i];
     }
-  strcpy( newFileName + marker, ".png" );
+  strcpy(newFileName + marker, ".png");
 
   return newFileName;
 }
@@ -359,46 +354,45 @@ int vtkTesting::RegressionTest(double thresh)
 //-----------------------------------------------------------------------------
 int vtkTesting::RegressionTest(double thresh, ostream &os)
 {
-  VTK_CREATE(vtkWindowToImageFilter, rt_w2if);
-  rt_w2if->SetInput(this->RenderWindow);
+  vtkNew<vtkWindowToImageFilter> rtW2if;
+  rtW2if->SetInput(this->RenderWindow);
 
-  unsigned int i;
-  for (i=0; i<this->Args.size(); i++)
+  for (unsigned int i = 0; i < this->Args.size(); ++i)
     {
-    if ( strcmp("-FrontBuffer", this->Args[i].c_str()) == 0 )
+    if ("-FrontBuffer" == this->Args[i])
       {
       this->FrontBufferOn();
       }
-    else if ( strcmp("-NoRerender", this->Args[i].c_str()) == 0 )
+    else if ("-NoRerender" == this->Args[i])
       {
-      rt_w2if->ShouldRerenderOff();
+      rtW2if->ShouldRerenderOff();
       }
     }
 
   // perform and extra render to make sure it is displayed
-  if ( !this->FrontBuffer)
+  if (!this->FrontBuffer)
     {
     this->RenderWindow->Render();
     // tell it to read the back buffer
-    rt_w2if->ReadFrontBufferOff();
+    rtW2if->ReadFrontBufferOff();
     }
   else
     {
     // read the front buffer
-    rt_w2if->ReadFrontBufferOn();
+    rtW2if->ReadFrontBufferOn();
     }
 
-  rt_w2if->Update();
-  int res = this->RegressionTest(rt_w2if, thresh, os);
+  rtW2if->Update();
+  int res = this->RegressionTest(rtW2if.Get(), thresh, os);
   return res;
 }
 //-----------------------------------------------------------------------------
-int vtkTesting::RegressionTest(const std::string &pngFileName, double thresh)
+int vtkTesting::RegressionTest(const string &pngFileName, double thresh)
 {
   return this->RegressionTest(pngFileName, thresh, cout);
 }
 //-----------------------------------------------------------------------------
-int vtkTesting::RegressionTest(const std::string &pngFileName, double thresh,
+int vtkTesting::RegressionTest(const string &pngFileName, double thresh,
                                ostream &os)
 {
   vtkNew<vtkPNGReader> inputReader;
@@ -413,48 +407,48 @@ int vtkTesting::RegressionTest(vtkAlgorithm* imageSource,
 {
   // do a get to compute the real value
   this->GetValidImageFileName();
-  std::string tmpDir = this->GetTempDirectory();
+  string tmpDir = this->GetTempDirectory();
 
   // construct the names for the error images
-  std::string validName = this->ValidImageFileName;
-  std::string::size_type slash_pos = validName.rfind("/");
-  if(slash_pos != std::string::npos)
+  string validName = this->ValidImageFileName;
+  string::size_type slashPos = validName.rfind("/");
+  if (slashPos != string::npos)
     {
-    validName = validName.substr(slash_pos + 1);
+    validName = validName.substr(slashPos + 1);
     }
 
   // check the valid image
-  FILE *rt_fin = fopen(this->ValidImageFileName,"r");
-  if (rt_fin)
+  FILE *rtFin = fopen(this->ValidImageFileName, "r");
+  if (rtFin)
     {
-    fclose(rt_fin);
+    fclose(rtFin);
     }
   else // there was no valid image, so write one to the temp dir
     {
-    std::string vImage = tmpDir + "/" + validName;
-    VTK_CREATE(vtkPNGWriter, rt_pngw);
-    rt_pngw->SetFileName(vImage.c_str());
-    rt_pngw->SetInputConnection(imageSource->GetOutputPort());
-    rt_pngw->Write();
+    string vImage = tmpDir + "/" + validName;
+    vtkNew<vtkPNGWriter> rtPngw;
+    rtPngw->SetFileName(vImage.c_str());
+    rtPngw->SetInputConnection(imageSource->GetOutputPort());
+    rtPngw->Write();
     os << "<DartMeasurement name=\"ImageNotFound\" type=\"text/string\">"
-      << this->ValidImageFileName << "</DartMeasurement>" << endl;
+       << this->ValidImageFileName << "</DartMeasurement>" << endl;
     return FAILED;
     }
 
-  VTK_CREATE(vtkPNGReader, rt_png);
-  rt_png->SetFileName(this->ValidImageFileName);
-  rt_png->Update();
+  vtkNew<vtkPNGReader> rtPng;
+  rtPng->SetFileName(this->ValidImageFileName);
+  rtPng->Update();
   imageSource->Update();
 
-  VTK_CREATE(vtkImageDifference, rt_id);
+  vtkNew<vtkImageDifference> rtId;
 
-  VTK_CREATE(vtkImageClip, ic1);
+  vtkNew<vtkImageClip> ic1;
   ic1->SetClipData(1);
   ic1->SetInputConnection(imageSource->GetOutputPort());
 
-  VTK_CREATE(vtkImageClip, ic2);
+  vtkNew<vtkImageClip> ic2;
   ic2->SetClipData(1);
-  ic2->SetInputConnection(rt_png->GetOutputPort());
+  ic2->SetInputConnection(rtPng->GetOutputPort());
 
   int* wExt1 = ic1->GetInputInformation()->Get(
     vtkStreamingDemandDrivenPipeline::WHOLE_EXTENT());
@@ -475,10 +469,10 @@ int vtkTesting::RegressionTest(vtkAlgorithm* imageSource,
                             wExt2[5]);
 
   int ext1[6], ext2[6];
-  rt_id->SetInputConnection(ic1->GetOutputPort());
+  rtId->SetInputConnection(ic1->GetOutputPort());
   ic1->Update();
   ic1->GetOutput()->GetExtent(ext1);
-  rt_id->SetImageConnection(ic2->GetOutputPort());
+  rtId->SetImageConnection(ic2->GetOutputPort());
   ic2->Update();
   ic2->GetOutput()->GetExtent(ext2);
 
@@ -489,8 +483,8 @@ int vtkTesting::RegressionTest(vtkAlgorithm* imageSource,
       (ext2[5]-ext2[4]) == (ext1[5]-ext1[4]))
     {
     // Cannot compute difference unless image sizes are the same
-    rt_id->Update();
-    minError = rt_id->GetThresholdedError();
+    rtId->Update();
+    minError = rtId->GetThresholdedError();
     }
 
   this->ImageDifference = minError;
@@ -499,12 +493,12 @@ int vtkTesting::RegressionTest(vtkAlgorithm* imageSource,
     {
     // Make sure there was actually a difference image before
     // accepting the error measure.
-    vtkImageData* output = rt_id->GetOutput();
-    if(output)
+    vtkImageData* output = rtId->GetOutput();
+    if (output)
       {
       int dims[3];
       output->GetDimensions(dims);
-      if(dims[0]*dims[1]*dims[2] > 0)
+      if(dims[0] * dims[1] * dims[2] > 0)
         {
         passed = 1;
         }
@@ -523,7 +517,7 @@ int vtkTesting::RegressionTest(vtkAlgorithm* imageSource,
   // images of the form foo_N.png (where N=1,2,3...) and compare against
   // them.
   double error;
-  int count=1, errIndex=-1;
+  int count = 1, errIndex = -1;
   char* newFileName;
   while (!passed)
     {
@@ -534,7 +528,7 @@ int vtkTesting::RegressionTest(vtkAlgorithm* imageSource,
       break;
       }
 
-    rt_png->SetFileName(newFileName);
+    rtPng->SetFileName(newFileName);
 
     // Need to reset the output whole extent cause we may have baselines
     // of differing sizes. (Yes, we have such cases !)
@@ -547,14 +541,14 @@ int vtkTesting::RegressionTest(vtkAlgorithm* imageSource,
                               wExt2[5]);
     ic2->UpdateWholeExtent();
 
-    rt_id->GetImage()->GetExtent(ext2);
-    if ((ext2[1]-ext2[0]) == (ext1[1]-ext1[0]) &&
-        (ext2[3]-ext2[2]) == (ext1[3]-ext1[2]) &&
-        (ext2[5]-ext2[4]) == (ext1[5]-ext1[4]))
+    rtId->GetImage()->GetExtent(ext2);
+    if ((ext2[1] - ext2[0]) == (ext1[1] - ext1[0]) &&
+        (ext2[3] - ext2[2]) == (ext1[3] - ext1[2]) &&
+        (ext2[5] - ext2[4]) == (ext1[5] - ext1[4]))
       {
       // Cannot compute difference unless image sizes are the same
-      rt_id->Update();
-      error = rt_id->GetThresholdedError();
+      rtId->Update();
+      error = rtId->GetThresholdedError();
       }
     else
       {
@@ -565,12 +559,12 @@ int vtkTesting::RegressionTest(vtkAlgorithm* imageSource,
       {
       // Make sure there was actually a difference image before
       // accepting the error measure.
-      vtkImageData* output = rt_id->GetOutput();
-      if(output)
+      vtkImageData* output = rtId->GetOutput();
+      if (output)
         {
         int dims[3];
         output->GetDimensions(dims);
-        if(dims[0]*dims[1]*dims[2] > 0)
+        if (dims[0] * dims[1] * dims[2] > 0)
           {
           minError = error;
           passed = 1;
@@ -593,7 +587,7 @@ int vtkTesting::RegressionTest(vtkAlgorithm* imageSource,
   os << "<DartMeasurement name=\"ImageError\" type=\"numeric/double\">";
   os << minError;
   os << "</DartMeasurement>";
-  if ( errIndex <= 0)
+  if (errIndex <= 0)
     {
     os << "<DartMeasurement name=\"BaselineImage\" type=\"text/string\">Standard</DartMeasurement>";
     }
@@ -613,75 +607,75 @@ int vtkTesting::RegressionTest(vtkAlgorithm* imageSource,
   if (errIndex >= 0)
     {
     newFileName = IncrementFileName(this->ValidImageFileName, errIndex);
-    rt_png->SetFileName(newFileName);
+    rtPng->SetFileName(newFileName);
     delete[] newFileName;
     }
   else
     {
-    rt_png->SetFileName(this->ValidImageFileName);
+    rtPng->SetFileName(this->ValidImageFileName);
     }
 
-  rt_png->Update();
-  rt_id->GetImage()->GetExtent(ext2);
+  rtPng->Update();
+  rtId->GetImage()->GetExtent(ext2);
 
   // If no image differences produced an image, do not write a
   // difference image.
-  if(minError <= 0)
+  if (minError <= 0)
     {
     os << "Image differencing failed to produce an image." << endl;
     return FAILED;
     }
-  if(!(
-      (ext2[1]-ext2[0]) == (ext1[1]-ext1[0]) &&
-      (ext2[3]-ext2[2]) == (ext1[3]-ext1[2]) &&
-      (ext2[5]-ext2[4]) == (ext1[5]-ext1[4])))
+  if (!(
+       (ext2[1] - ext2[0]) == (ext1[1] - ext1[0]) &&
+       (ext2[3] - ext2[2]) == (ext1[3] - ext1[2]) &&
+       (ext2[5] - ext2[4]) == (ext1[5] - ext1[4])))
     {
     os << "Image differencing failed to produce an image because images are "
       "different size:" << endl;
-    os << "Valid image: " << (ext2[1]-ext2[0]) << ", " << (ext2[3]-ext2[2])
-      << ", " << (ext2[5]-ext2[4]) << endl;
-    os << "Test image: " << (ext1[1]-ext1[0]) << ", " << (ext1[3]-ext1[2])
-      << ", " << (ext1[5]-ext1[4]) << endl;
+    os << "Valid image: " << (ext2[1] - ext2[0]) << ", " << (ext2[3] - ext2[2])
+      << ", " << (ext2[5] - ext2[4]) << endl;
+    os << "Test image: " << (ext1[1] - ext1[0]) << ", " << (ext1[3] - ext1[2])
+      << ", " << (ext1[5] - ext1[4]) << endl;
     return FAILED;
     }
 
-  rt_id->Update();
+  rtId->Update();
 
   // test the directory for writing
-  std::string diff_filename = tmpDir + "/" + validName;
-  std::string::size_type dot_pos = diff_filename.rfind(".");
-  if(dot_pos != std::string::npos)
+  string diffFilename = tmpDir + "/" + validName;
+  string::size_type dotPos = diffFilename.rfind(".");
+  if (dotPos != string::npos)
     {
-    diff_filename = diff_filename.substr(0, dot_pos);
+    diffFilename = diffFilename.substr(0, dotPos);
     }
-  diff_filename += ".diff.png";
-  FILE *rt_dout = fopen(diff_filename.c_str(), "wb");
-  if (rt_dout)
+  diffFilename += ".diff.png";
+  FILE *rtDout = fopen(diffFilename.c_str(), "wb");
+  if (rtDout)
     {
-    fclose(rt_dout);
+    fclose(rtDout);
 
     // write out the difference image gamma adjusted for the dashboard
-    VTK_CREATE(vtkImageShiftScale, rt_gamma);
-    rt_gamma->SetInputConnection(rt_id->GetOutputPort());
-    rt_gamma->SetShift(0);
-    rt_gamma->SetScale(10);
+    vtkNew<vtkImageShiftScale> rtGamma;
+    rtGamma->SetInputConnection(rtId->GetOutputPort());
+    rtGamma->SetShift(0);
+    rtGamma->SetScale(10);
 
-    VTK_CREATE(vtkPNGWriter, rt_pngw);
-    rt_pngw->SetFileName(diff_filename.c_str());
-    rt_pngw->SetInputConnection(rt_gamma->GetOutputPort());
-    rt_pngw->Write();
+    vtkNew<vtkPNGWriter> rtPngw;
+    rtPngw->SetFileName(diffFilename.c_str());
+    rtPngw->SetInputConnection(rtGamma->GetOutputPort());
+    rtPngw->Write();
 
     // write out the image that was generated
-    std::string vImage = tmpDir + "/" + validName;
-    rt_pngw->SetFileName(vImage.c_str());
-    rt_pngw->SetInputConnection(imageSource->GetOutputPort());
-    rt_pngw->Write();
+    string vImage = tmpDir + "/" + validName;
+    rtPngw->SetFileName(vImage.c_str());
+    rtPngw->SetInputConnection(imageSource->GetOutputPort());
+    rtPngw->Write();
 
     os <<  "<DartMeasurementFile name=\"TestImage\" type=\"image/png\">";
     os << vImage;
     os << "</DartMeasurementFile>";
     os << "<DartMeasurementFile name=\"DifferenceImage\" type=\"image/png\">";
-    os << diff_filename;
+    os << diffFilename;
     os << "</DartMeasurementFile>";
     os << "<DartMeasurementFile name=\"ValidImage\" type=\"image/png\">";
     os << this->ValidImageFileName;
@@ -694,9 +688,8 @@ int vtkTesting::RegressionTest(vtkAlgorithm* imageSource,
 int vtkTesting::Test(int argc, char *argv[], vtkRenderWindow *rw,
                      double thresh )
 {
-  VTK_CREATE(vtkTesting, testing);
-  int i;
-  for (i = 0; i < argc; ++i)
+  vtkNew<vtkTesting> testing;
+  for (int i = 0; i < argc; ++i)
     {
     testing->AddArgument(argv[i]);
     }
@@ -707,9 +700,9 @@ int vtkTesting::Test(int argc, char *argv[], vtkRenderWindow *rw,
     }
 
   testing->FrontBufferOff();
-  for (i=0; i<argc; i++)
+  for (int i = 0; i < argc; ++i)
     {
-    if ( strcmp("-FrontBuffer", argv[i]) == 0 )
+    if (strcmp("-FrontBuffer", argv[i]) == 0)
       {
       testing->FrontBufferOn();
       }
@@ -725,14 +718,13 @@ int vtkTesting::Test(int argc, char *argv[], vtkRenderWindow *rw,
   return NOT_RUN;
 }
 //-----------------------------------------------------------------------------
-int vtkTesting::CompareAverageOfL2Norm(
-        vtkDataArray *daA,
-        vtkDataArray *daB,
-        double tol)
+int vtkTesting::CompareAverageOfL2Norm(vtkDataArray *daA,
+                                       vtkDataArray *daB,
+                                       double tol)
 {
-  int typeA=daA->GetDataType();
-  int typeB=daB->GetDataType();
-  if (typeA!=typeB)
+  int typeA = daA->GetDataType();
+  int typeB = daB->GetDataType();
+  if (typeA != typeB)
     {
     vtkWarningMacro("Incompatible data types: "
                     << typeA << ","
@@ -740,13 +732,13 @@ int vtkTesting::CompareAverageOfL2Norm(
     return 0;
     }
   //
-  vtkIdType nTupsA=daA->GetNumberOfTuples();
-  vtkIdType nTupsB=daB->GetNumberOfTuples();
-  int nCompsA=daA->GetNumberOfComponents();
-  int nCompsB=daB->GetNumberOfComponents();
+  vtkIdType nTupsA = daA->GetNumberOfTuples();
+  vtkIdType nTupsB = daB->GetNumberOfTuples();
+  int nCompsA = daA->GetNumberOfComponents();
+  int nCompsB = daB->GetNumberOfComponents();
   //
-  if ((nTupsA!=nTupsB)
-     || (nCompsA!=nCompsB))
+  if ((nTupsA != nTupsB)
+     || (nCompsA != nCompsB))
     {
     vtkWarningMacro(
               "Arrays: " << daA->GetName()
@@ -759,26 +751,26 @@ int vtkTesting::CompareAverageOfL2Norm(
     return 0;
     }
 
-  double L2=0.0;
-  vtkIdType N=0;
+  double L2 = 0.0;
+  vtkIdType N = 0;
   switch (typeA)
     {
     case VTK_DOUBLE:
       {
-      vtkDoubleArray *A=vtkDoubleArray::SafeDownCast(daA);
-      double *pA=A->GetPointer(0);
-      vtkDoubleArray *B=vtkDoubleArray::SafeDownCast(daB);
-      double *pB=B->GetPointer(0);
-      N=AccumulateScaledL2Norm(pA,pB,nTupsA,nCompsA,L2);
+      vtkDoubleArray *A = vtkDoubleArray::SafeDownCast(daA);
+      double *pA = A->GetPointer(0);
+      vtkDoubleArray *B = vtkDoubleArray::SafeDownCast(daB);
+      double *pB = B->GetPointer(0);
+      N = AccumulateScaledL2Norm(pA, pB, nTupsA, nCompsA, L2);
       }
       break;
     case VTK_FLOAT:
       {
-      vtkFloatArray *A=vtkFloatArray::SafeDownCast(daA);
-      float *pA=A->GetPointer(0);
-      vtkFloatArray *B=vtkFloatArray::SafeDownCast(daB);
-      float *pB=B->GetPointer(0);
-      N=AccumulateScaledL2Norm(pA,pB,nTupsA,nCompsA,L2);
+      vtkFloatArray *A = vtkFloatArray::SafeDownCast(daA);
+      float *pA = A->GetPointer(0);
+      vtkFloatArray *B = vtkFloatArray::SafeDownCast(daB);
+      float *pB = B->GetPointer(0);
+      N = AccumulateScaledL2Norm(pA, pB, nTupsA, nCompsA, L2);
       }
       break;
     default:
@@ -790,7 +782,7 @@ int vtkTesting::CompareAverageOfL2Norm(
       break;
     }
   //
-  if (N<=0)
+  if (N <= 0)
   {
     return 0;
   }
@@ -805,8 +797,8 @@ int vtkTesting::CompareAverageOfL2Norm(
          << "."  << endl;
     }
   //
-  double avgL2=L2/static_cast<double>(N);
-  if (avgL2>tol)
+  double avgL2 = L2 / static_cast<double>(N);
+  if (avgL2 > tol)
     {
     return 0;
     }
@@ -815,30 +807,28 @@ int vtkTesting::CompareAverageOfL2Norm(
   return 1;
 }
 //-----------------------------------------------------------------------------
-int vtkTesting::CompareAverageOfL2Norm(
-        vtkDataSet *dsA,
-        vtkDataSet *dsB,
-        double tol)
+int vtkTesting::CompareAverageOfL2Norm(vtkDataSet *dsA, vtkDataSet *dsB,
+                                       double tol)
 {
-  vtkDataArray *daA=0;
-  vtkDataArray *daB=0;
-  int status=0;
+  vtkDataArray *daA = 0;
+  vtkDataArray *daB = 0;
+  int status = 0;
 
   // Compare points if the dataset derives from
   // vtkPointSet.
-  vtkPointSet *ptSetA=vtkPointSet::SafeDownCast(dsA);
-  vtkPointSet *ptSetB=vtkPointSet::SafeDownCast(dsB);
-  if (ptSetA!=NULL && ptSetB!=NULL)
+  vtkPointSet *ptSetA = vtkPointSet::SafeDownCast(dsA);
+  vtkPointSet *ptSetB = vtkPointSet::SafeDownCast(dsB);
+  if (ptSetA != NULL && ptSetB != NULL)
     {
     if (this->Verbose)
       {
       cout << "Comparing points:" << endl;
       }
-    daA=ptSetA->GetPoints()->GetData();
-    daB=ptSetB->GetPoints()->GetData();
+    daA = ptSetA->GetPoints()->GetData();
+    daB = ptSetB->GetPoints()->GetData();
     //
-    status=CompareAverageOfL2Norm(daA,daB,tol);
-    if (status==0)
+    status = CompareAverageOfL2Norm(daA, daB, tol);
+    if (status == 0)
       {
       return 0;
       }
@@ -849,9 +839,9 @@ int vtkTesting::CompareAverageOfL2Norm(
     {
     cout << "Comparing data arrays:" << endl;
     }
-  int nDaA=dsA->GetPointData()->GetNumberOfArrays();
-  int nDaB=dsB->GetPointData()->GetNumberOfArrays();
-  if (nDaA!=nDaB)
+  int nDaA = dsA->GetPointData()->GetNumberOfArrays();
+  int nDaB = dsB->GetPointData()->GetNumberOfArrays();
+  if (nDaA != nDaB)
     {
     vtkWarningMacro("Point data, " << dsA
               <<  " and " << dsB << " differ in number of arrays"
@@ -859,13 +849,13 @@ int vtkTesting::CompareAverageOfL2Norm(
     return 0;
     }
   //
-  for (int arrayId=0; arrayId<nDaA; ++arrayId)
+  for (int arrayId = 0; arrayId < nDaA; ++arrayId)
     {
-    daA=dsA->GetPointData()->GetArray(arrayId);
-    daB=dsB->GetPointData()->GetArray(arrayId);
+    daA = dsA->GetPointData()->GetArray(arrayId);
+    daB = dsB->GetPointData()->GetArray(arrayId);
     //
-    status=CompareAverageOfL2Norm(daA,daB,tol);
-    if (status==0)
+    status = CompareAverageOfL2Norm(daA, daB, tol);
+    if (status == 0)
       {
       return 0;
       }
@@ -875,10 +865,10 @@ int vtkTesting::CompareAverageOfL2Norm(
 }
 
 //-----------------------------------------------------------------------------
-int vtkTesting::InteractorEventLoop( int argc,
-                                     char *argv[],
-                                     vtkRenderWindowInteractor *iren,
-                                     const char *playbackStream )
+int vtkTesting::InteractorEventLoop(int argc,
+                                    char *argv[],
+                                    vtkRenderWindowInteractor *iren,
+                                    const char *playbackStream )
 {
   bool disableReplay = false, record = false;
   for (int i = 0; i < argc; i++)
@@ -887,8 +877,7 @@ int vtkTesting::InteractorEventLoop( int argc,
     record        |= (strcmp("--Record", argv[i]) == 0);
     }
 
-  vtkSmartPointer<vtkInteractorEventRecorder> recorder =
-      vtkSmartPointer<vtkInteractorEventRecorder>::New();
+  vtkNew<vtkInteractorEventRecorder> recorder;
   recorder->SetInteractor(iren);
 
   if (!disableReplay)
diff --git a/Testing/Rendering/vtkTesting.h b/Testing/Rendering/vtkTesting.h
index 76a907f..6fc055b 100644
--- a/Testing/Rendering/vtkTesting.h
+++ b/Testing/Rendering/vtkTesting.h
@@ -46,7 +46,7 @@
 //
 //  ...
 //
-//  if ( res == vtkTesting::PASSED )
+//  if (res == vtkTesting::PASSED)
 //    {
 //    Test passed
 //    }
@@ -123,8 +123,9 @@ public:
   //     return vtkTesting::InteractorEventLoop( argc, argv, iren );
   //   }
   //
-  static int InteractorEventLoop( int argc, char *argv[],
-      vtkRenderWindowInteractor *iren, const char *stream = NULL );
+  static int InteractorEventLoop(int argc, char *argv[],
+                                 vtkRenderWindowInteractor *iren,
+                                 const char *stream = NULL);
 
 //ETX
 
@@ -150,7 +151,8 @@ public:
   // Description:
   // Compare the image with the valid image.
   virtual int RegressionTest(vtkAlgorithm* imageSource, double thresh);
-  virtual int RegressionTest(vtkAlgorithm* imageSource, double thresh, ostream& os);
+  virtual int RegressionTest(vtkAlgorithm* imageSource, double thresh,
+                             ostream& os);
 
   // Description:
   // Compute the average L2 norm between all point data data arrays
@@ -158,6 +160,7 @@ public:
   // (this includes instances of vtkPoints) Compare the result of
   // each L2 comutation to "tol".
   int CompareAverageOfL2Norm(vtkDataSet *pdA, vtkDataSet *pdB, double tol);
+
   // Description:
   // Compute the average L2 norm between two data arrays "daA" and "daB"
   // and compare against "tol".
diff --git a/ThirdParty/AutobahnPython/CMakeLists.txt b/ThirdParty/AutobahnPython/CMakeLists.txt
new file mode 100644
index 0000000..22fc36c
--- /dev/null
+++ b/ThirdParty/AutobahnPython/CMakeLists.txt
@@ -0,0 +1,21 @@
+vtk_module_impl()
+vtk_module_export("")
+
+option(VTK_USE_SYSTEM_AUTOBAHN "Use system Autobahn Python package" OFF)
+mark_as_advanced(VTK_USE_SYSTEM_AUTOBAHN)
+
+if(NOT VTK_USE_SYSTEM_AUTOBAHN)
+  find_package(PythonInterp)
+
+  include(vtkPythonPackages)
+
+  set(AutobahnPython_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/autobahn")
+  set(AutobahnPython_BINARY_DIR "${VTK_BUILD_PYTHON_MODULE_DIR}/autobahn")
+
+  build_python_package("AutobahnPython" ${AutobahnPython_SOURCE_DIR}
+    ${AutobahnPython_BINARY_DIR})
+
+  install(DIRECTORY ${AutobahnPython_BINARY_DIR}
+    DESTINATION "${VTK_INSTALL_PYTHON_MODULE_DIR}"
+    COMPONENT Runtime)
+endif()
diff --git a/ThirdParty/AutobahnPython/PKG-INFO b/ThirdParty/AutobahnPython/PKG-INFO
new file mode 100644
index 0000000..2d5b228
--- /dev/null
+++ b/ThirdParty/AutobahnPython/PKG-INFO
@@ -0,0 +1,38 @@
+Metadata-Version: 1.0
+Name: autobahn
+Version: 0.5.9
+Summary: AutobahnPython - WebSocket/WAMP implementation for Python/Twisted.
+Home-page: http://autobahn.ws/python
+Author: Tavendo GmbH
+Author-email: autobahnws at googlegroups.com
+License: Apache License 2.0
+Description: 
+        Twisted-based WebSocket/WAMP client and server framework.
+        
+        AutobahnPython provides a WebSocket (RFC6455, Hybi-10 to -17, Hixie-76)
+        framework for creating WebSocket-based clients and servers.
+        
+        AutobahnPython also includes an implementation of WAMP
+        (The WebSockets Application Messaging Protocol), a light-weight,
+        asynchronous RPC/PubSub over JSON/WebSocket protocol.
+        
+        More information:
+        
+           * http://autobahn.ws/python
+           * http://wamp.ws
+        
+        Source Code:
+        
+           * https://github.com/tavendo/AutobahnPython
+        
+Keywords: autobahn autobahn.ws websocket realtime rfc6455 wamp rpc pubsub
+Platform: Any
+Classifier: License :: OSI Approved :: Apache Software License
+Classifier: Development Status :: 5 - Production/Stable
+Classifier: Environment :: Console
+Classifier: Framework :: Twisted
+Classifier: Intended Audience :: Developers
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Internet
+Classifier: Topic :: Software Development :: Libraries
diff --git a/ThirdParty/AutobahnPython/autobahn/__init__.py b/ThirdParty/AutobahnPython/autobahn/__init__.py
new file mode 100644
index 0000000..e311954
--- /dev/null
+++ b/ThirdParty/AutobahnPython/autobahn/__init__.py
@@ -0,0 +1,31 @@
+###############################################################################
+##
+##  Copyright 2011-2013 Tavendo GmbH
+##
+##  Licensed under the Apache License, Version 2.0 (the "License");
+##  you may not use this file except in compliance with the License.
+##  You may obtain a copy of the License at
+##
+##      http://www.apache.org/licenses/LICENSE-2.0
+##
+##  Unless required by applicable law or agreed to in writing, software
+##  distributed under the License is distributed on an "AS IS" BASIS,
+##  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+##  See the License for the specific language governing permissions and
+##  limitations under the License.
+##
+###############################################################################
+
+from _version import __version__
+version = __version__ # backward compat.
+
+import util
+import useragent
+import flashpolicy
+import httpstatus
+import utf8validator
+import xormasker
+import websocket
+import resource
+import prefixmap
+import wamp
diff --git a/ThirdParty/AutobahnPython/autobahn/_version.py b/ThirdParty/AutobahnPython/autobahn/_version.py
new file mode 100644
index 0000000..bb94928
--- /dev/null
+++ b/ThirdParty/AutobahnPython/autobahn/_version.py
@@ -0,0 +1,19 @@
+###############################################################################
+##
+##  Copyright 2011-2013 Tavendo GmbH
+##
+##  Licensed under the Apache License, Version 2.0 (the "License");
+##  you may not use this file except in compliance with the License.
+##  You may obtain a copy of the License at
+##
+##      http://www.apache.org/licenses/LICENSE-2.0
+##
+##  Unless required by applicable law or agreed to in writing, software
+##  distributed under the License is distributed on an "AS IS" BASIS,
+##  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+##  See the License for the specific language governing permissions and
+##  limitations under the License.
+##
+###############################################################################
+
+__version__ = "0.6.0"
diff --git a/ThirdParty/AutobahnPython/autobahn/flashpolicy.py b/ThirdParty/AutobahnPython/autobahn/flashpolicy.py
new file mode 100644
index 0000000..eb549d9
--- /dev/null
+++ b/ThirdParty/AutobahnPython/autobahn/flashpolicy.py
@@ -0,0 +1,106 @@
+###############################################################################
+##
+##  Copyright 2013 Tavendo GmbH
+##
+##  Licensed under the Apache License, Version 2.0 (the "License");
+##  you may not use this file except in compliance with the License.
+##  You may obtain a copy of the License at
+##
+##      http://www.apache.org/licenses/LICENSE-2.0
+##
+##  Unless required by applicable law or agreed to in writing, software
+##  distributed under the License is distributed on an "AS IS" BASIS,
+##  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+##  See the License for the specific language governing permissions and
+##  limitations under the License.
+##
+###############################################################################
+
+__all__ = ("FlashPolicyProtocol", "FlashPolicyFactory",)
+
+
+import re
+
+from twisted.python import log
+from twisted.internet import reactor
+from twisted.application.internet import TCPServer
+from twisted.internet.protocol import Protocol, Factory
+
+
+class FlashPolicyProtocol(Protocol):
+   """
+   Flash Player 9 (version 9.0.124.0 and above) implements a strict new access
+   policy for Flash applications that make Socket or XMLSocket connections to
+   a remote host. It now requires the presence of a socket policy file
+   on the server.
+
+   We want this to support the Flash WebSockets bridge which is needed for
+   older browser, in particular MSIE9/8.
+
+   See:
+      * `Autobahn WebSocket fallbacks example <https://github.com/tavendo/AutobahnPython/tree/master/examples/websocket/echo_wsfallbacks>`_
+      * `Flash policy files background <http://www.lightsphere.com/dev/articles/flash_socket_policy.html>`_
+   """
+
+   REQUESTPAT = re.compile("^\s*<policy-file-request\s*/>")
+   REQUESTMAXLEN = 200
+   REQUESTTIMEOUT = 5
+   POLICYFILE = """<?xml version="1.0"?><cross-domain-policy><allow-access-from domain="*" to-ports="%d" /></cross-domain-policy>"""
+
+   def __init__(self, allowedPort):
+      """
+      Ctor.
+
+      :param allowedPort: The port to which Flash player should be allowed to connect.
+      :type allowedPort: int
+      """
+      self.allowedPort = allowedPort
+      self.received = ""
+      self.dropConnection = None
+
+
+   def connectionMade(self):
+      ## DoS protection
+      ##
+      def dropConnection():
+         self.transport.abortConnection()
+         self.dropConnection = None
+      self.dropConnection = reactor.callLater(FlashPolicyProtocol.REQUESTTIMEOUT, dropConnection)
+
+
+   def connectionLost(self, reason):
+      if self.dropConnection:
+         self.dropConnection.cancel()
+         self.dropConnection = None
+
+
+   def dataReceived(self, data):
+      self.received += data
+      if FlashPolicyProtocol.REQUESTPAT.match(self.received):
+         ## got valid request: send policy file
+         ##
+         self.transport.write(FlashPolicyProtocol.POLICYFILE % self.allowedPort)
+         self.transport.loseConnection()
+      elif len(self.received) > FlashPolicyProtocol.REQUESTMAXLEN:
+         ## possible DoS attack
+         ##
+         self.transport.abortConnection()
+      else:
+         ## need more data
+         ##
+         pass
+
+
+class FlashPolicyFactory(Factory):
+
+   def __init__(self, allowedPort):
+      """
+      Ctor.
+
+      :param allowedPort: The port to which Flash player should be allowed to connect.
+      :type allowedPort: int
+      """
+      self.allowedPort = allowedPort
+
+   def buildProtocol(self, addr):
+      return FlashPolicyProtocol(self.allowedPort)
diff --git a/ThirdParty/AutobahnPython/autobahn/httpstatus.py b/ThirdParty/AutobahnPython/autobahn/httpstatus.py
new file mode 100644
index 0000000..ca44399
--- /dev/null
+++ b/ThirdParty/AutobahnPython/autobahn/httpstatus.py
@@ -0,0 +1,271 @@
+###############################################################################
+##
+##  Copyright 201-2013 Tavendo GmbH
+##
+##  Licensed under the Apache License, Version 2.0 (the "License");
+##  you may not use this file except in compliance with the License.
+##  You may obtain a copy of the License at
+##
+##      http://www.apache.org/licenses/LICENSE-2.0
+##
+##  Unless required by applicable law or agreed to in writing, software
+##  distributed under the License is distributed on an "AS IS" BASIS,
+##  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+##  See the License for the specific language governing permissions and
+##  limitations under the License.
+##
+###############################################################################
+
+##
+## HTTP Status Codes
+##
+## Source: http://en.wikipedia.org/wiki/List_of_HTTP_status_codes
+## Adapted on 2011/10/11
+##
+
+##
+## 1xx Informational
+##
+## Request received, continuing process.
+##
+## This class of status code indicates a provisional response, consisting only of
+## the Status-Line and optional headers, and is terminated by an empty line.
+## Since HTTP/1.0 did not define any 1xx status codes, servers must not send
+## a 1xx response to an HTTP/1.0 client except under experimental conditions.
+##
+
+HTTP_STATUS_CODE_CONTINUE                 = (100, "Continue",
+                                             "This means that the server has received the request headers, and that the client should proceed to send the request body (in the case of a request for which a body needs to be sent; for example, a POST request). If the request body is large, sending it to a server when a request has already been rejected based upon inappropriate headers is inefficient. To have a server check if the request could be accepted based on the request's headers alon [...]
+
+HTTP_STATUS_CODE_SWITCHING_PROTOCOLS      = (101, "Switching Protocols",
+                                             "This means the requester has asked the server to switch protocols and the server is acknowledging that it will do so.")
+
+HTTP_STATUS_CODE_PROCESSING               = (102, "Processing (WebDAV) (RFC 2518)",
+                                             "As a WebDAV request may contain many sub-requests involving file operations, it may take a long time to complete the request. This code indicates that the server has received and is processing the request, but no response is available yet.[3] This prevents the client from timing out and assuming the request was lost.")
+
+HTTP_STATUS_CODE_CHECKPOINT               = (103, "Checkpoint",
+                                             "This code is used in the Resumable HTTP Requests Proposal to resume aborted PUT or POST requests.")
+
+HTTP_STATUS_CODE_REQUEST_URI_TOO_LONG     = (122, "Request-URI too long",
+                                             "This is a non-standard IE7-only code which means the URI is longer than a maximum of 2083 characters.[5][6] (See code 414.)")
+
+##
+## 2xx Success
+##
+## This class of status codes indicates the action requested by the client was
+## received, understood, accepted and processed successfully.
+##
+
+HTTP_STATUS_CODE_OK                       = (200, "OK",
+                                             "Standard response for successful HTTP requests. The actual response will depend on the request method used. In a GET request, the response will contain an entity corresponding to the requested resource. In a POST request the response will contain an entity describing or containing the result of the action.")
+
+HTTP_STATUS_CODE_CREATED                  = (201, "Created",
+                                             "The request has been fulfilled and resulted in a new resource being created.")
+
+HTTP_STATUS_CODE_ACCEPTED                 = (202, "Accepted",
+                                             "The request has been accepted for processing, but the processing has not been completed. The request might or might not eventually be acted upon, as it might be disallowed when processing actually takes place.")
+
+HTTP_STATUS_CODE_NON_AUTHORATIVE          = (203, "Non-Authoritative Information (since HTTP/1.1)",
+                                             "The server successfully processed the request, but is returning information that may be from another source.")
+
+HTTP_STATUS_CODE_NO_CONTENT               = (204, "No Content",
+                                             "The server successfully processed the request, but is not returning any content.")
+
+HTTP_STATUS_CODE_RESET_CONTENT            = (205, "Reset Content",
+                                             "The server successfully processed the request, but is not returning any content. Unlike a 204 response, this response requires that the requester reset the document view.")
+
+HTTP_STATUS_CODE_PARTIAL_CONTENT          = (206, "Partial Content",
+                                             "The server is delivering only part of the resource due to a range header sent by the client. The range header is used by tools like wget to enable resuming of interrupted downloads, or split a download into multiple simultaneous streams.")
+
+HTTP_STATUS_CODE_MULTI_STATUS             = (207, "Multi-Status (WebDAV) (RFC 4918)",
+                                             "The message body that follows is an XML message and can contain a number of separate response codes, depending on how many sub-requests were made.")
+
+HTTP_STATUS_CODE_IM_USED                  = (226, "IM Used (RFC 3229)",
+                                             "The server has fulfilled a GET request for the resource, and the response is a representation of the result of one or more instance-manipulations applied to the current instance.")
+
+##
+## 3xx Redirection
+##
+## The client must take additional action to complete the request.
+##
+## This class of status code indicates that further action needs to be taken
+## by the user agent in order to fulfil the request. The action required may
+## be carried out by the user agent without interaction with the user if and
+## only if the method used in the second request is GET or HEAD. A user agent
+## should not automatically redirect a request more than five times, since such
+## redirections usually indicate an infinite loop.
+##
+
+HTTP_STATUS_CODE_MULTIPLE_CHOICES         = (300, "Multiple Choices",
+                                             "Indicates multiple options for the resource that the client may follow. It, for instance, could be used to present different format options for video, list files with different extensions, or word sense disambiguation.")
+
+HTTP_STATUS_CODE_MOVED_PERMANENTLY        = (301, "Moved Permanently",
+                                             "This and all future requests should be directed to the given URI.")
+
+HTTP_STATUS_CODE_FOUND                    = (302, "Found",
+                                             "This is an example of industrial practice contradicting the standard. HTTP/1.0 specification (RFC 1945) required the client to perform a temporary redirect (the original describing phrase was 'Moved Temporarily', but popular browsers implemented 302 with the functionality of a 303 See Other. Therefore, HTTP/1.1 added status codes 303 and 307 to distinguish between the two behaviours. However, some Web applications and frameworks use the 302 s [...]
+
+HTTP_STATUS_CODE_SEE_OTHER                = (303, "See Other (since HTTP/1.1)",
+                                             "The response to the request can be found under another URI using a GET method. When received in response to a POST (or PUT/DELETE), it should be assumed that the server has received the data and the redirect should be issued with a separate GET message.")
+
+HTTP_STATUS_CODE_NOT_MODIFIED             = (304, "Not Modified",
+                                             "Indicates the resource has not been modified since last requested.[2] Typically, the HTTP client provides a header like the If-Modified-Since header to provide a time against which to compare. Using this saves bandwidth and reprocessing on both the server and client, as only the header data must be sent and received in comparison to the entirety of the page being re-processed by the server, then sent again using more bandwidth of the server a [...]
+
+HTTP_STATUS_CODE_USE_PROXY                = (305, "Use Proxy (since HTTP/1.1)",
+                                             "Many HTTP clients (such as Mozilla[11] and Internet Explorer) do not correctly handle responses with this status code, primarily for security reasons.")
+
+HTTP_STATUS_CODE_SWITCH_PROXY             = (306, "Switch Proxy",
+                                             "No longer used. Originally meant 'Subsequent requests should use the specified proxy'.")
+
+HTTP_STATUS_CODE_TEMPORARY_REDIRECT       = (307, "Temporary Redirect (since HTTP/1.1)",
+                                             "In this occasion, the request should be repeated with another URI, but future requests can still use the original URI.[2] In contrast to 303, the request method should not be changed when reissuing the original request. For instance, a POST request must be repeated using another POST request.")
+
+HTTP_STATUS_CODE_RESUME_INCOMPLETE        = (308, "Resume Incomplete",
+                                             "This code is used in the Resumable HTTP Requests Proposal to resume aborted PUT or POST requests.")
+
+##
+## 4xx Client Error
+##
+## The 4xx class of status code is intended for cases in which the client
+## seems to have erred. Except when responding to a HEAD request, the server
+## should include an entity containing an explanation of the error situation,
+## and whether it is a temporary or permanent condition. These status codes are
+## applicable to any request method. User agents should display any included
+## entity to the user. These are typically the most common error codes
+## encountered while online.
+##
+
+HTTP_STATUS_CODE_BAD_REQUEST              = (400, "Bad Request",
+                                             "The request cannot be fulfilled due to bad syntax.")
+
+HTTP_STATUS_CODE_UNAUTHORIZED             = (401, "Unauthorized",
+                                             "Similar to 403 Forbidden, but specifically for use when authentication is possible but has failed or not yet been provided.[2] The response must include a WWW-Authenticate header field containing a challenge applicable to the requested resource. See Basic access authentication and Digest access authentication.")
+
+HTTP_STATUS_CODE_PAYMENT_REQUIRED         = (402, "Payment Required",
+                                             "Reserved for future use.[2] The original intention was that this code might be used as part of some form of digital cash or micropayment scheme, but that has not happened, and this code is not usually used. As an example of its use, however, Apple's MobileMe service generates a 402 error if the MobileMe account is delinquent.")
+
+HTTP_STATUS_CODE_FORBIDDEN                = (403, "Forbidden",
+                                             "The request was a legal request, but the server is refusing to respond to it.[2] Unlike a 401 Unauthorized response, authenticating will make no difference.[2]")
+
+HTTP_STATUS_CODE_NOT_FOUND                = (404, "Not Found",
+                                             "The requested resource could not be found but may be available again in the future.[2] Subsequent requests by the client are permissible.")
+
+HTTP_STATUS_CODE_METHOD_NOT_ALLOWED       = (405, "Method Not Allowed",
+                                             "A request was made of a resource using a request method not supported by that resource;[2] for example, using GET on a form which requires data to be presented via POST, or using PUT on a read-only resource.")
+
+HTTP_STATUS_CODE_NOT_ACCEPTABLE           = (406, "Not Acceptable",
+                                             "The requested resource is only capable of generating content not acceptable according to the Accept headers sent in the request.")
+
+HTTP_STATUS_CODE_PROXY_AUTH_REQUIRED      = (407, "Proxy Authentication Required",
+                                             "The client must first authenticate itself with the proxy.")
+
+HTTP_STATUS_CODE_REQUEST_TIMEOUT          = (408, "Request Timeout",
+                                             "The server timed out waiting for the request. According to W3 HTTP specifications: 'The client did not produce a request within the time that the server was prepared to wait. The client MAY repeat the request without modifications at any later time.'")
+
+HTTP_STATUS_CODE_CONFLICT                 = (409, "Conflict",
+                                             "Indicates that the request could not be processed because of conflict in the request, such as an edit conflict.")
+
+HTTP_STATUS_CODE_GONE                     = (410, "Gone",
+                                             "Indicates that the resource requested is no longer available and will not be available again.[2] This should be used when a resource has been intentionally removed and the resource should be purged. Upon receiving a 410 status code, the client should not request the resource again in the future. Clients such as search engines should remove the resource from their indices. Most use cases do not require clients and search engines to purge the r [...]
+
+HTTP_STATUS_CODE_LENGTH_REQUIRED          = (411, "Length Required",
+                                             "The request did not specify the length of its content, which is required by the requested resource.")
+
+HTTP_STATUS_CODE_PRECONDITION_FAILED      = (412, "Precondition Failed",
+                                             "The server does not meet one of the preconditions that the requester put on the request.")
+
+HTTP_STATUS_CODE_REQUEST_ENTITY_TOO_LARGE = (413, "Request Entity Too Large",
+                                             "The request is larger than the server is willing or able to process.")
+
+HTTP_STATUS_CODE_REQUEST_URI_TOO_LARGE    = (414, "Request-URI Too Long",
+                                             "The URI provided was too long for the server to process.")
+
+HTTP_STATUS_CODE_UNSUPPORTED_MEDIA_TYPE   = (415, "Unsupported Media Type",
+                                             "The request entity has a media type which the server or resource does not support. For example, the client uploads an image as image/svg+xml, but the server requires that images use a different format.")
+
+HTTP_STATUS_CODE_INVALID_REQUEST_RANGE    = (416, "Requested Range Not Satisfiable",
+                                             "The client has asked for a portion of the file, but the server cannot supply that portion.[2] For example, if the client asked for a part of the file that lies beyond the end of the file.")
+
+HTTP_STATUS_CODE_EXPECTATION_FAILED       = (417, "Expectation Failed",
+                                             "The server cannot meet the requirements of the Expect request-header field.")
+
+HTTP_STATUS_CODE_TEAPOT                   = (418, "I'm a teapot (RFC 2324)",
+                                             "This code was defined in 1998 as one of the traditional IETF April Fools' jokes, in RFC 2324, Hyper Text Coffee Pot Control Protocol, and is not expected to be implemented by actual HTTP servers.")
+
+HTTP_STATUS_CODE_UNPROCESSABLE_ENTITY     = (422, "Unprocessable Entity (WebDAV) (RFC 4918)",
+                                             "The request was well-formed but was unable to be followed due to semantic errors.")
+
+HTTP_STATUS_CODE_LOCKED                   = (423, "Locked (WebDAV) (RFC 4918)",
+                                             "The resource that is being accessed is locked.")
+
+HTTP_STATUS_CODE_FAILED_DEPENDENCY        = (424, "Failed Dependency (WebDAV) (RFC 4918)",
+                                             "The request failed due to failure of a previous request (e.g. a PROPPATCH).")
+
+HTTP_STATUS_CODE_UNORDERED_COLLECTION     = (425, "Unordered Collection (RFC 3648)",
+                                             "Defined in drafts of 'WebDAV Advanced Collections Protocol', but not present in 'Web Distributed Authoring and Versioning (WebDAV) Ordered Collections Protocol'.")
+
+HTTP_STATUS_CODE_UPGRADE_REQUIRED         = (426, "Upgrade Required (RFC 2817)",
+                                             "The client should switch to a different protocol such as TLS/1.0.")
+
+HTTP_STATUS_CODE_NO_RESPONSE              = (444, "No Response",
+                                             "A Nginx HTTP server extension. The server returns no information to the client and closes the connection (useful as a deterrent for malware).")
+
+HTTP_STATUS_CODE_RETRY_WITH               = (449, "Retry With",
+                                             "A Microsoft extension. The request should be retried after performing the appropriate action.")
+
+HTTP_STATUS_CODE_PARANTAL_BLOCKED         = (450, "Blocked by Windows Parental Controls",
+                                             "A Microsoft extension. This error is given when Windows Parental Controls are turned on and are blocking access to the given webpage.")
+
+HTTP_STATUS_CODE_CLIENT_CLOSED_REQUEST    = (499, "Client Closed Request",
+                                             "An Nginx HTTP server extension. This code is introduced to log the case when the connection is closed by client while HTTP server is processing its request, making server unable to send the HTTP header back.")
+
+
+##
+## 5xx Server Error
+##
+## The server failed to fulfill an apparently valid request.
+##
+## Response status codes beginning with the digit "5" indicate cases in which
+## the server is aware that it has encountered an error or is otherwise incapable
+## of performing the request. Except when responding to a HEAD request, the server
+## should include an entity containing an explanation of the error situation, and
+## indicate whether it is a temporary or permanent condition. Likewise, user agents
+## should display any included entity to the user. These response codes are
+## applicable to any request method.
+##
+
+HTTP_STATUS_CODE_INTERNAL_SERVER_ERROR    = (500, "Internal Server Error",
+                                             "A generic error message, given when no more specific message is suitable.")
+
+HTTP_STATUS_CODE_NOT_IMPLEMENTED          = (501, "Not Implemented",
+                                             "The server either does not recognise the request method, or it lacks the ability to fulfill the request.")
+
+HTTP_STATUS_CODE_BAD_GATEWAY              = (502, "Bad Gateway",
+                                             "The server was acting as a gateway or proxy and received an invalid response from the upstream server.")
+
+HTTP_STATUS_CODE_SERVICE_UNAVAILABLE      = (503, "Service Unavailable",
+                                             "The server is currently unavailable (because it is overloaded or down for maintenance). Generally, this is a temporary state.")
+
+HTTP_STATUS_CODE_GATEWAY_TIMEOUT          = (504, "Gateway Timeout",
+                                             "The server was acting as a gateway or proxy and did not receive a timely response from the upstream server.")
+
+HTTP_STATUS_CODE_UNSUPPORTED_HTTP_VERSION = (505, "HTTP Version Not Supported",
+                                             "The server does not support the HTTP protocol version used in the request.")
+
+HTTP_STATUS_CODE_VARIANT_ALSO_NEGOTIATES  = (506, "Variant Also Negotiates (RFC 2295)",
+                                             "Transparent content negotiation for the request results in a circular reference.")
+
+HTTP_STATUS_CODE_INSUFFICIENT_STORAGE     = (507, "Insufficient Storage (WebDAV)(RFC 4918)",
+                                             "The server is unable to store the representation needed to complete the request.")
+
+HTTP_STATUS_CODE_BANDWIDTH_LIMIT_EXCEEDED = (509, "Bandwidth Limit Exceeded (Apache bw/limited extension)",
+                                             "This status code, while used by many servers, is not specified in any RFCs.")
+
+HTTP_STATUS_CODE_NOT_EXTENDED             = (510, "Not Extended (RFC 2774)",
+                                             "Further extensions to the request are required for the server to fulfill it.")
+
+HTTP_STATUS_CODE_NETWORK_READ_TIMEOUT     = (598, "Network read timeout error (Informal convention)",
+                                             "This status code is not specified in any RFCs, but is used by some HTTP proxies to signal a network read timeout behind the proxy to a client in front of the proxy.")
+
+HTTP_STATUS_CODE_NETWORK_CONNECT_TIMEOUT  = (599, "Network connect timeout error (Informal convention)",
+                                             "This status code is not specified in any RFCs, but is used by some HTTP proxies to signal a network connect timeout behind the proxy to a client in front of the proxy.")
diff --git a/ThirdParty/AutobahnPython/autobahn/pbkdf2.py b/ThirdParty/AutobahnPython/autobahn/pbkdf2.py
new file mode 100644
index 0000000..1ca7be1
--- /dev/null
+++ b/ThirdParty/AutobahnPython/autobahn/pbkdf2.py
@@ -0,0 +1,134 @@
+# -*- coding: utf-8 -*-
+"""
+    pbkdf2
+    ~~~~~~
+
+    This module implements pbkdf2 for Python.  It also has some basic
+    tests that ensure that it works.  The implementation is straightforward
+    and uses stdlib only stuff and can be easily be copy/pasted into
+    your favourite application.
+
+    Use this as replacement for bcrypt that does not need a c implementation
+    of a modified blowfish crypto algo.
+
+    Example usage:
+
+    >>> pbkdf2_hex('what i want to hash', 'the random salt')
+    'fa7cc8a2b0a932f8e6ea42f9787e9d36e592e0c222ada6a9'
+
+    How to use this:
+
+    1.  Use a constant time string compare function to compare the stored hash
+        with the one you're generating::
+
+            def safe_str_cmp(a, b):
+                if len(a) != len(b):
+                    return False
+                rv = 0
+                for x, y in izip(a, b):
+                    rv |= ord(x) ^ ord(y)
+                return rv == 0
+
+    2.  Use `os.urandom` to generate a proper salt of at least 8 byte.
+        Use a unique salt per hashed password.
+
+    3.  Store ``algorithm$salt:costfactor$hash`` in the database so that
+        you can upgrade later easily to a different algorithm if you need
+        one.  For instance ``PBKDF2-256$thesalt:10000$deadbeef...``.
+
+
+    :copyright: (c) Copyright 2011 by Armin Ronacher.
+    :license: BSD, see LICENSE for more details.
+"""
+
+__all__ = ("pbkdf2_hex",
+           "pbkdf2_bin",)
+
+import hmac
+import hashlib
+from struct import Struct
+from operator import xor
+from itertools import izip, starmap
+
+
+_pack_int = Struct('>I').pack
+
+
+def pbkdf2_hex(data, salt, iterations=1000, keylen=24, hashfunc=None):
+    """Like :func:`pbkdf2_bin` but returns a hex encoded string."""
+    return pbkdf2_bin(data, salt, iterations, keylen, hashfunc).encode('hex')
+
+
+def pbkdf2_bin(data, salt, iterations=1000, keylen=24, hashfunc=None):
+    """Returns a binary digest for the PBKDF2 hash algorithm of `data`
+    with the given `salt`.  It iterates `iterations` time and produces a
+    key of `keylen` bytes.  By default SHA-1 is used as hash function,
+    a different hashlib `hashfunc` can be provided.
+    """
+    hashfunc = hashfunc or hashlib.sha1
+    mac = hmac.new(data, None, hashfunc)
+    def _pseudorandom(x, mac=mac):
+        h = mac.copy()
+        h.update(x)
+        return map(ord, h.digest())
+    buf = []
+    for block in xrange(1, -(-keylen // mac.digest_size) + 1):
+        rv = u = _pseudorandom(salt + _pack_int(block))
+        for i in xrange(iterations - 1):
+            u = _pseudorandom(''.join(map(chr, u)))
+            rv = starmap(xor, izip(rv, u))
+        buf.extend(rv)
+    return ''.join(map(chr, buf))[:keylen]
+
+
+def test():
+    failed = []
+    def check(data, salt, iterations, keylen, expected):
+        rv = pbkdf2_hex(data, salt, iterations, keylen)
+        if rv != expected:
+            print 'Test failed:'
+            print '  Expected:   %s' % expected
+            print '  Got:        %s' % rv
+            print '  Parameters:'
+            print '    data=%s' % data
+            print '    salt=%s' % salt
+            print '    iterations=%d' % iterations
+            print
+            failed.append(1)
+
+    # From RFC 6070
+    check('password', 'salt', 1, 20,
+          '0c60c80f961f0e71f3a9b524af6012062fe037a6')
+    check('password', 'salt', 2, 20,
+          'ea6c014dc72d6f8ccd1ed92ace1d41f0d8de8957')
+    check('password', 'salt', 4096, 20,
+          '4b007901b765489abead49d926f721d065a429c1')
+    check('passwordPASSWORDpassword', 'saltSALTsaltSALTsaltSALTsaltSALTsalt',
+          4096, 25, '3d2eec4fe41c849b80c8d83662c0e44a8b291a964cf2f07038')
+    check('pass\x00word', 'sa\x00lt', 4096, 16,
+          '56fa6aa75548099dcc37d7f03425e0c3')
+    # This one is from the RFC but it just takes for ages
+    ##check('password', 'salt', 16777216, 20,
+    ##      'eefe3d61cd4da4e4e9945b3d6ba2158c2634e984')
+
+    # From Crypt-PBKDF2
+    check('password', 'ATHENA.MIT.EDUraeburn', 1, 16,
+          'cdedb5281bb2f801565a1122b2563515')
+    check('password', 'ATHENA.MIT.EDUraeburn', 1, 32,
+          'cdedb5281bb2f801565a1122b25635150ad1f7a04bb9f3a333ecc0e2e1f70837')
+    check('password', 'ATHENA.MIT.EDUraeburn', 2, 16,
+          '01dbee7f4a9e243e988b62c73cda935d')
+    check('password', 'ATHENA.MIT.EDUraeburn', 2, 32,
+          '01dbee7f4a9e243e988b62c73cda935da05378b93244ec8f48a99e61ad799d86')
+    check('password', 'ATHENA.MIT.EDUraeburn', 1200, 32,
+          '5c08eb61fdf71e4e4ec3cf6ba1f5512ba7e52ddbc5e5142f708a31e2e62b1e13')
+    check('X' * 64, 'pass phrase equals block size', 1200, 32,
+          '139c30c0966bc32ba55fdbf212530ac9c5ec59f1a452f5cc9ad940fea0598ed1')
+    check('X' * 65, 'pass phrase exceeds block size', 1200, 32,
+          '9ccad6d468770cd51b10e6a68721be611a8b4d282601db3b36be9246915ec82a')
+
+    raise SystemExit(bool(failed))
+
+
+if __name__ == '__main__':
+    test()
diff --git a/ThirdParty/AutobahnPython/autobahn/prefixmap.py b/ThirdParty/AutobahnPython/autobahn/prefixmap.py
new file mode 100644
index 0000000..be4dc1d
--- /dev/null
+++ b/ThirdParty/AutobahnPython/autobahn/prefixmap.py
@@ -0,0 +1,144 @@
+###############################################################################
+##
+##  Copyright 2011-2013 Tavendo GmbH
+##
+##  Licensed under the Apache License, Version 2.0 (the "License");
+##  you may not use this file except in compliance with the License.
+##  You may obtain a copy of the License at
+##
+##      http://www.apache.org/licenses/LICENSE-2.0
+##
+##  Unless required by applicable law or agreed to in writing, software
+##  distributed under the License is distributed on an "AS IS" BASIS,
+##  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+##  See the License for the specific language governing permissions and
+##  limitations under the License.
+##
+###############################################################################
+
+__all__ = ("PrefixMap",)
+
+class PrefixMap:
+   """
+   Provides a two-way mapping between CURIEs (Compact URI Expressions) and
+   full URIs. See http://www.w3.org/TR/curie/.
+   """
+
+   def __init__(self):
+      self.index = {}
+      self.rindex = {}
+
+      ## add a couple of well-know prefixes
+      ##
+      #self.set("owl", "http://www.w3.org/2002/07/owl#")
+      #self.set("rdf", "http://www.w3.org/1999/02/22-rdf-syntax-ns#")
+      #self.set("rdfs", "http://www.w3.org/2000/01/rdf-schema#")
+      #self.set("rdfa", "http://www.w3.org/ns/rdfa#")
+      #self.set("xhv", "http://www.w3.org/1999/xhtml/vocab#")
+      #self.set("xml", "http://www.w3.org/XML/1998/namespace")
+      #self.set("xsd", "http://www.w3.org/2001/XMLSchema#")
+
+
+   def get(self, prefix):
+      """
+      Returns the URI for the prefix or None if prefix has no mapped URI.
+
+      :param prefix: Prefix to map.
+      :type prefix: str
+      :returns: str -- Mapped URI for prefix or None.
+      """
+      return self.index.get(prefix, None)
+
+
+   def set(self, prefix, uri):
+      """
+      Set mapping of prefix to URI.
+
+      :param prefix: Prefix to be mapped.
+      :type prefix: str
+      :param uri: URI the prefix is to be mapped to.
+      :type uri: str
+      """
+      self.index[prefix] = uri
+      self.rindex[uri] = prefix
+
+
+   def setDefault(self, uri):
+      """
+      Set default URI mapping of empty prefix (prefix of length 0).
+
+      :param uri: URI the empty prefix to be mapped to (i.e. :label should map to uri:label).
+      :type str
+      """
+      self.set("", uri)
+
+
+   def remove(self, prefix):
+      """
+      Remove mapping of prefix to URI.
+
+      :param prefix: Prefix for which mapping should be removed.
+      :type str
+      """
+      uri = index.get(index, None)
+      if uri:
+         del self.index[prefix]
+         del self.rindex[uri]
+
+
+   def resolve(self, curie):
+      """
+      Resolve given CURIE to full URI.
+
+      :param curie: CURIE (i.e. "rdf:label").
+      :type curie: str
+      :returns: str -- Full URI for CURIE or None.
+      """
+      i = curie.find(":")
+      if i > 0:
+         prefix = curie[:i]
+         if self.index.has_key(prefix):
+            return self.index[prefix] + curie[i+1:]
+      return None
+
+
+   def resolveOrPass(self, curieOrUri):
+      """
+      Resolve given CURIE/URI and return string verbatim if cannot be resolved.
+
+      :param curieOrUri: CURIE or URI.
+      :type curieOrUri: str
+      :returns: str -- Full URI for CURIE or original string.
+      """
+      u = self.resolve(curieOrUri)
+      if u:
+         return u
+      else:
+         return curieOrUri
+
+
+   def shrink(self, uri):
+      """
+      Shrink given URI to CURIE. If no appropriate prefix mapping is available,
+      return original URI.
+
+      :param uri: URI to shrink.
+      :type uri: str
+      :returns str -- CURIE or original URI.
+      """
+      for i in xrange(len(uri), 1, -1):
+         u = uri[:i]
+         p = self.rindex.get(u, None)
+         if p:
+            return p + ":" + uri[i:]
+      return uri
+
+
+if __name__ == '__main__':
+   m = PrefixMap()
+   print m.resolve("http://www.w3.org/1999/02/22-rdf-syntax-ns#label")
+   print m.resolve("rdf:label")
+   print m.resolve("foobar:label")
+   print m.shrink("http://www.w3.org/1999/02/22-rdf-syntax-ns#")
+   print m.shrink("http://www.w3.org/1999/02/22-rdf-syntax-ns#label")
+   print m.shrink("http://foobar.org#label")
diff --git a/ThirdParty/AutobahnPython/autobahn/resource.py b/ThirdParty/AutobahnPython/autobahn/resource.py
new file mode 100644
index 0000000..7f6913c
--- /dev/null
+++ b/ThirdParty/AutobahnPython/autobahn/resource.py
@@ -0,0 +1,172 @@
+###############################################################################
+##
+##  Copyright 2012-2013 Tavendo GmbH
+##
+##  Licensed under the Apache License, Version 2.0 (the "License");
+##  you may not use this file except in compliance with the License.
+##  You may obtain a copy of the License at
+##
+##      http://www.apache.org/licenses/LICENSE-2.0
+##
+##  Unless required by applicable law or agreed to in writing, software
+##  distributed under the License is distributed on an "AS IS" BASIS,
+##  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+##  See the License for the specific language governing permissions and
+##  limitations under the License.
+##
+###############################################################################
+
+__all__ = ("WebSocketResource",
+           "HTTPChannelHixie76Aware",
+           "WSGIRootResource",)
+
+
+from zope.interface import implements
+
+from twisted.python import log
+from twisted.protocols.policies import ProtocolWrapper
+try:
+   from twisted.web.error import NoResource
+except:
+   ## starting from Twisted 12.2, NoResource has moved
+   from twisted.web.resource import NoResource
+from twisted.web.error import UnsupportedMethod
+from twisted.web.resource import IResource, Resource
+from twisted.web.server import NOT_DONE_YET
+from twisted.web.http import HTTPChannel
+
+from websocket import WebSocketServerFactory, WebSocketServerProtocol
+
+
+class HTTPChannelHixie76Aware(HTTPChannel):
+   """
+   Hixie-76 is deadly broken. It includes 8 bytes of body, but then does not
+   set content-length header. This hacked HTTPChannel injects the missing
+   HTTP header upon detecting Hixie-76. We need this since otherwise
+   Twisted Web will silently ignore the body.
+
+   To use this, set `protocol = HTTPChannelHixie76Aware` on your
+   `twisted.web.server.Site <http://twistedmatrix.com/documents/current/api/twisted.web.server.Site.html>`_ instance.
+
+   See:
+      * `Autobahn Twisted Web site example <https://github.com/tavendo/AutobahnPython/tree/master/examples/websocket/echo_site>`_
+   """
+
+   def headerReceived(self, line):
+      header = line.split(':')[0].lower()
+      if header == "sec-websocket-key1" and not self._transferDecoder:
+         HTTPChannel.headerReceived(self, "Content-Length: 8")
+      HTTPChannel.headerReceived(self, line)
+
+
+class WSGIRootResource(Resource):
+   """
+   Root resource when you want a WSGI resource be the default serving
+   resource for a Twisted Web site, but have subpaths served by
+   different resources.
+
+   This is a hack needed since
+   `twisted.web.wsgi.WSGIResource <http://twistedmatrix.com/documents/current/api/twisted.web.wsgi.WSGIResource.html>`_.
+   does not provide a `putChild()` method.
+
+   See also:
+      * `Autobahn Twisted Web WSGI example <https://github.com/tavendo/AutobahnPython/tree/master/examples/websocket/echo_wsgi>`_
+      * `Original hack <http://blog.vrplumber.com/index.php?/archives/2426-Making-your-Twisted-resources-a-url-sub-tree-of-your-WSGI-resource....html>`_
+   """
+
+   def __init__(self, wsgiResource, children):
+      """
+      Creates a Twisted Web root resource.
+
+      :param wsgiResource:
+      :type wsgiResource: Instance of `twisted.web.wsgi.WSGIResource <http://twistedmatrix.com/documents/current/api/twisted.web.wsgi.WSGIResource.html>`_.
+      :param children: A dictionary with string keys constituting URL subpaths, and Twisted Web resources as values.
+      :type children: dict
+      """
+      Resource.__init__(self)
+      self._wsgiResource = wsgiResource
+      self.children = children
+
+   def getChild(self, path, request):
+      request.prepath.pop()
+      request.postpath.insert(0, path)
+      return self._wsgiResource
+
+
+class WebSocketResource(object):
+   """
+   A Twisted Web resource for WebSocket. This resource needs to be instantiated
+   with a factory derived from WebSocketServerFactory.
+   """
+
+   implements(IResource)
+
+   isLeaf = True
+
+   def __init__(self, factory):
+      """
+      Ctor.
+
+      :param factory: An instance of WebSocketServerFactory.
+      :type factory: obj
+      """
+      self._factory = factory
+
+
+   def getChildWithDefault(self, name, request):
+      """
+      This resource cannot have children, hence this will always fail.
+      """
+      return NoResource("No such child resource.")
+
+
+   def putChild(self, path, child):
+      """
+      This resource cannot have children, hence this is always ignored.
+      """
+      pass
+
+
+   def render(self, request):
+      """
+      Render the resource. This will takeover the transport underlying
+      the request, create a WebSocketServerProtocol and let that do
+      any subsequent communication.
+      """
+
+      ## Create Autobahn WebSocket protocol.
+      ##
+      protocol = self._factory.buildProtocol(request.transport.getPeer())
+      if not protocol:
+         ## If protocol creation fails, we signal "internal server error"
+         request.setResponseCode(500)
+         return ""
+
+      ## Take over the transport from Twisted Web
+      ##
+      transport, request.transport = request.transport, None
+
+      ## Connect the transport to our protocol. Once #3204 is fixed, there
+      ## may be a cleaner way of doing this.
+      ## http://twistedmatrix.com/trac/ticket/3204
+      ##
+      if isinstance(transport, ProtocolWrapper):
+         ## i.e. TLS is a wrapping protocol
+         transport.wrappedProtocol = protocol
+      else:
+         transport.protocol = protocol
+      protocol.makeConnection(transport)
+
+      ## We recreate the request and forward the raw data. This is somewhat
+      ## silly (since Twisted Web already did the HTTP request parsing
+      ## which we will do a 2nd time), but it's totally non-invasive to our
+      ## code. Maybe improve this.
+      ##
+      data = "%s %s HTTP/1.1\x0d\x0a" % (request.method, request.path)
+      for h in request.requestHeaders.getAllRawHeaders():
+         data += "%s: %s\x0d\x0a" % (h[0], ",".join(h[1]))
+      data += "\x0d\x0a"
+      data += request.content.read() # we need this for Hixie-76
+      protocol.dataReceived(data)
+
+      return NOT_DONE_YET
diff --git a/ThirdParty/AutobahnPython/autobahn/useragent.py b/ThirdParty/AutobahnPython/autobahn/useragent.py
new file mode 100644
index 0000000..1b5d5fe
--- /dev/null
+++ b/ThirdParty/AutobahnPython/autobahn/useragent.py
@@ -0,0 +1,312 @@
+###############################################################################
+##
+##  Copyright 2011-2013 Tavendo GmbH
+##
+##  Licensed under the Apache License, Version 2.0 (the "License");
+##  you may not use this file except in compliance with the License.
+##  You may obtain a copy of the License at
+##
+##      http://www.apache.org/licenses/LICENSE-2.0
+##
+##  Unless required by applicable law or agreed to in writing, software
+##  distributed under the License is distributed on an "AS IS" BASIS,
+##  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+##  See the License for the specific language governing permissions and
+##  limitations under the License.
+##
+###############################################################################
+
+__all__ = ("lookupWsSupport",)
+
+
+import re
+
+UA_FIREFOX = re.compile(".*Firefox/(\d*).*")
+UA_CHROME = re.compile(".*Chrome/(\d*).*")
+UA_CHROMEFRAME = re.compile(".*chromeframe/(\d*).*")
+UA_WEBKIT = re.compile(".*AppleWebKit/([0-9+\.]*)\w*.*")
+UA_WEBOS = re.compile(".*webos/([0-9+\.]*)\w*.*")
+UA_HPWEBOS = re.compile(".*hpwOS/([0-9+\.]*)\w*.*")
+
+
+
+# Chrome =============================================================
+
+# Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.11 (KHTML, like Gecko) Chrome/17.0.963.56 Safari/535.11
+
+
+# Chrome Frame =======================================================
+
+# IE6 on Windows with Chrome Frame
+# Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; SV1; chromeframe/11.0.660.0)
+
+
+# Firefox ============================================================
+
+# Windows 7 64 Bit
+# Mozilla/5.0 (Windows NT 6.1; WOW64; rv:12.0a2) Gecko/20120227 Firefox/12.0a2
+
+
+# Android ============================================================
+
+# Firefox Mobile
+# Mozilla/5.0 (Android; Linux armv7l; rv:10.0.2) Gecko/20120215 Firefox/10.0.2 Fennec/10.0.2
+
+# Chrome for Android (on ICS)
+# Mozilla/5.0 (Linux; U; Android-4.0.3; en-us; Galaxy Nexus Build/IML74K) AppleWebKit/535.7 (KHTML, like Gecko) CrMo/16.0.912.75 Mobile Safari/535.7
+
+# Android builtin browser
+
+# Samsung Galaxy Tab 1
+# Mozilla/5.0 (Linux; U; Android 2.2; de-de; GT-P1000 Build/FROYO) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1
+
+# Samsung Galaxy S
+# Mozilla/5.0 (Linux; U; Android 2.3.3; de-de; GT-I9000 Build/GINGERBREAD) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1
+
+# Samsung Galaxy Note
+# Mozilla/5.0 (Linux; U; Android 2.3.6; de-de; GT-N7000 Build/GINGERBREAD) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1
+
+# Samsung Galaxy ACE (no Flash since ARM)
+# Mozilla/5.0 (Linux; U; Android 2.2.1; de-de; GT-S5830 Build/FROYO) AppleWebKit/533.1 (KHTML, like Gecko) Version/4.0 Mobile Safari/533.1
+
+
+# WebOS ==============================================================
+
+# HP Touchpad
+# Mozilla/5.0 (hp-tablet; Linux; hpwOS/3.0.5; U; en-US) AppleWebKit/534.6 (KHTML, like Gecko) wOSBrowser/234.83 Safari/534.6 TouchPad/1.0
+# => Qt-WebKit, Hixie-76, Flash
+
+
+# Safari =============================================================
+
+# iPod Touch, iOS 4.2.1
+# Mozilla/5.0 (iPod; U; CPU iPhone OS 4_2_1 like Mac OS X; de-de) AppleWebKit/533.17.9 (KHTML, like Gecko) Version/5.0.2 Mobile/8C148 Safari/6533.18.5
+# => Hixie-76
+
+# MacBook Pro, OSX 10.5.8, Safari 5.0.6
+# Mozilla/5.0 (Macintosh; Intel Mac OS X 10_5_8) AppleWebKit/534.50.2 (KHTML, like Gecko) Version/5.0.6 Safari/533.22.3
+# => Hixie-76
+
+# RFC6455
+# Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534+ (KHTML, like Gecko) Version/5.1.2 Safari/534.52.7
+# Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/535.24+ (KHTML, like Gecko) Version/5.1.3 Safari/534.53.10
+
+# Hixie-76
+# Mozilla/5.0 (Macintosh; Intel Mac OS X 10_7_3) AppleWebKit/534.53.11 (KHTML, like Gecko) Version/5.1.3 Safari/534.53.10
+
+# Hixie-76
+# Mozilla/5.0 (Macintosh; Intel Mac OS X 10_5_8) AppleWebKit/534.50.2 (KHTML, like Gecko) Version/5.0.6 Safari/533.22.3
+
+
+# Opera ==============================================================
+
+# Windows 7 32-Bit
+# Opera/9.80 (Windows NT 6.1; U; de) Presto/2.10.229 Version/11.61
+
+# Windows 7 64-Bit
+# Opera/9.80 (Windows NT 6.1; WOW64; U; de) Presto/2.10.229 Version/11.62
+
+# Samsung Galaxy S
+# Opera/9.80 (Android 2.3.3; Linux; Opera Mobi/ADR-1202231246; U; de) Presto/2.10.254 Version/12.00
+
+# Samsung Galaxy Tab 1
+# Opera/9.80 (Android 2.2; Linux; Opera Tablet/ADR-1203051631; U; de) Presto/2.10.254 Version/12.00
+
+# Samsung Galaxy ACE:
+# Opera/9.80 (Android 2.2.1; Linux; Opera Mobi/ADR-1203051631; U; de) Presto/2.10.254 Version/12.00
+
+# Nokia N8, Symbian S60 5th Ed., S60 Bell
+# Opera/9.80 (S60; SymbOS; Opera Mobi/SYB-1111151949; U; de) Presto/2.9.201 Version/11.50
+
+
+def _lookupWsSupport(ua):
+   ## Internet Explorer
+   ##
+   ## FIXME: handle Windows Phone
+   ##
+   if ua.find("MSIE") >= 0:
+      # IE10 has native support
+      if ua.find("MSIE 10") >= 0:
+         # native Hybi-10+
+         return (True, False, True)
+
+      # first, check for Google Chrome Frame
+      # http://www.chromium.org/developers/how-tos/chrome-frame-getting-started/understanding-chrome-frame-user-agent
+      if ua.find("chromeframe") >= 0:
+
+         r = UA_CHROMEFRAME.match(ua)
+         try:
+            v = int(r.groups()[0])
+            if v >= 14:
+               # native Hybi-10+
+               return (True, False, True)
+         except:
+            # detection problem
+            return (False, False, False)
+
+      # Flash fallback
+      if ua.find("MSIE 8") >= 0 or ua.find("MSIE 9") >= 0:
+         return (True, True, True)
+
+      # unsupported
+      return (False, False, True)
+
+
+   ## iOS
+   ##
+   if ua.find("iPhone") >= 0 or ua.find("iPad") >= 0 or ua.find("iPod") >= 0:
+      ## native Hixie76 (as of March 2012), no Flash, no alternative browsers
+      return (True, False, True)
+
+
+   ## Android
+   ##
+   if ua.find("Android") >= 0:
+
+      ## Firefox Mobile
+      ##
+      if ua.find("Firefox") >= 0:
+         # Hybi-10+ for FF Mobile 8+
+         return (True, False, True)
+
+      ## Opera Mobile
+      ##
+      if ua.find("Opera") >= 0:
+         # Hixie76 for Opera 11+
+         return (True, False, True)
+
+      ## Chrome for Android
+      ##
+      if ua.find("CrMo") >= 0:
+         # http://code.google.com/chrome/mobile/docs/faq.html
+         return (True, False, True)
+
+      ## Android builtin Browser (ooold WebKit)
+      ##
+      if ua.find("AppleWebKit") >= 0:
+
+         # Though we return WS = True, and Flash = True here, when the device has no actual Flash support, that
+         # will get later detected in JS. This applies to i.e. ARMv6 devices like Samsung Galaxy ACE
+
+         # builtin browser, only works via Flash
+         return (True, True, True)
+
+      # detection problem
+      return (False, False, False)
+
+
+   ## webOS
+   ##
+   if ua.find("hpwOS") >= 0 or ua.find("webos") >= 0:
+      try:
+         if ua.find("hpwOS") >= 0:
+            vv = [int(x) for x in UA_HPWEBOS.match(ua).groups()[0].split('.')]
+            if vv[0] >= 3:
+               return (True, False, True)
+         elif ua.find("webos") >= 0:
+            vv = [int(x) for x in UA_WEBOS.match(ua).groups()[0].split('.')]
+            if vv[0] >= 2:
+               return (True, False, True)
+      except:
+         # detection problem
+         return (False, False, False)
+      else:
+         # unsupported
+         return (False, False, True)
+
+
+   ## Opera
+   ##
+   if ua.find("Opera") >= 0:
+      # Opera 11+ has Hixie76 (needs to be manually activated though)
+      return (True, False, True)
+
+
+   ## Firefox
+   ##
+   if ua.find("Firefox") >= 0:
+      r = UA_FIREFOX.match(ua)
+      try:
+         v = int(r.groups()[0])
+         if v >= 7:
+            # native Hybi-10+
+            return (True, False, True)
+         elif v >= 3:
+            # works with Flash bridge
+            return (True, True, True)
+         else:
+            # unsupported
+            return (False, False, True)
+      except:
+         # detection problem
+         return (False, False, False)
+
+
+   ## Safari
+   ##
+   if ua.find("Safari") >= 0 and not ua.find("Chrome") >= 0:
+
+      # rely on at least Hixie76
+      return (True, False, True)
+
+
+   ## Chrome
+   ##
+   if ua.find("Chrome") >= 0:
+      r = UA_CHROME.match(ua)
+      try:
+         v = int(r.groups()[0])
+         if v >= 14:
+            # native Hybi-10+
+            return (True, False, True)
+         elif v >= 4:
+            # works with Flash bridge
+            return (True, True, True)
+         else:
+            # unsupported
+            return (False, False, True)
+      except:
+         # detection problem
+         return (False, False, False)
+
+
+   # detection problem
+   return (False, False, False)
+
+
+UA_DETECT_WS_SUPPORT_DB = {}
+
+def lookupWsSupport(ua, debug = True):
+   """
+   Lookup if browser supports WebSocket (Hixie76, Hybi10+, RFC6455) natively,
+   and if not, whether the `web-socket-js <https://github.com/gimite/web-socket-js>`_
+   Flash bridge works to polyfill that.
+
+   Returns a tuple of booleans `(ws_supported, needs_flash, detected)` where
+
+      * `ws_supported`: WebSocket is supported
+      * `needs_flash`: Flash Bridge is needed for support
+      * `detected` the code has explicitly mapped the support/nosupport
+
+   :param ua: The browser user agent string as sent in the HTTP header, e.g. provided as `flask.request.user_agent.string` in Flask.
+   :type ua: str
+
+   :returns: tuple -- A tuple `(ws_supported, needs_flash, detected)`.
+   """
+   ws = _lookupWsSupport(ua)
+   if debug:
+      if not UA_DETECT_WS_SUPPORT_DB.has_key(ua):
+         UA_DETECT_WS_SUPPORT_DB[ua] = ws
+
+      if not ws[2]:
+         msg = "UNDETECTED"
+      elif ws[0]:
+         msg = "SUPPORTED"
+      elif not ws[0]:
+         msg = "UNSUPPORTED"
+      else:
+         msg = "ERROR"
+
+      print "DETECT_WS_SUPPORT", ua, ws[0], ws[1], ws[2], msg
+
+   return ws
diff --git a/ThirdParty/AutobahnPython/autobahn/utf8validator.py b/ThirdParty/AutobahnPython/autobahn/utf8validator.py
new file mode 100644
index 0000000..f7d6bfb
--- /dev/null
+++ b/ThirdParty/AutobahnPython/autobahn/utf8validator.py
@@ -0,0 +1,120 @@
+###############################################################################
+##
+##  Copyright 2011-2013 Tavendo GmbH
+##
+##  Note:
+##
+##  This code is a Python implementation of the algorithm
+##
+##            "Flexible and Economical UTF-8 Decoder"
+##
+##  by Bjoern Hoehrmann
+##
+##       bjoern at hoehrmann.de
+##       http://bjoern.hoehrmann.de/utf-8/decoder/dfa/
+##
+##  Licensed under the Apache License, Version 2.0 (the "License");
+##  you may not use this file except in compliance with the License.
+##  You may obtain a copy of the License at
+##
+##      http://www.apache.org/licenses/LICENSE-2.0
+##
+##  Unless required by applicable law or agreed to in writing, software
+##  distributed under the License is distributed on an "AS IS" BASIS,
+##  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+##  See the License for the specific language governing permissions and
+##  limitations under the License.
+##
+###############################################################################
+
+__all__ = ("Utf8Validator",)
+
+## use Cython implementation of UTF8 validator if available
+##
+try:
+   from wsaccel.utf8validator import Utf8Validator
+
+except:
+   ## fallback to pure Python implementation
+
+   class Utf8Validator:
+      """
+      Incremental UTF-8 validator with constant memory consumption (minimal state).
+
+      Implements the algorithm "Flexible and Economical UTF-8 Decoder" by
+      Bjoern Hoehrmann (http://bjoern.hoehrmann.de/utf-8/decoder/dfa/).
+      """
+
+      ## DFA transitions
+      UTF8VALIDATOR_DFA = [
+        0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 00..1f
+        0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 20..3f
+        0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 40..5f
+        0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, # 60..7f
+        1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9,9, # 80..9f
+        7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7, # a0..bf
+        8,8,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2, # c0..df
+        0xa,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x3,0x4,0x3,0x3, # e0..ef
+        0xb,0x6,0x6,0x6,0x5,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8,0x8, # f0..ff
+        0x0,0x1,0x2,0x3,0x5,0x8,0x7,0x1,0x1,0x1,0x4,0x6,0x1,0x1,0x1,0x1, # s0..s0
+        1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,1,1,1,1,0,1,0,1,1,1,1,1,1, # s1..s2
+        1,2,1,1,1,1,1,2,1,2,1,1,1,1,1,1,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1, # s3..s4
+        1,2,1,1,1,1,1,1,1,2,1,1,1,1,1,1,1,1,1,1,1,1,1,3,1,3,1,1,1,1,1,1, # s5..s6
+        1,3,1,1,1,1,1,3,1,3,1,1,1,1,1,1,1,3,1,1,1,1,1,1,1,1,1,1,1,1,1,1, # s7..s8
+      ]
+
+      UTF8_ACCEPT = 0
+      UTF8_REJECT = 1
+
+      def __init__(self):
+         self.reset()
+
+      def decode(self, b):
+         """
+         Eat one UTF-8 octet, and validate on the fly.
+
+         Returns UTF8_ACCEPT when enough octets have been consumed, in which case
+         self.codepoint contains the decoded Unicode code point.
+
+         Returns UTF8_REJECT when invalid UTF-8 was encountered.
+
+         Returns some other positive integer when more octets need to be eaten.
+         """
+         type = Utf8Validator.UTF8VALIDATOR_DFA[b]
+         if self.state != Utf8Validator.UTF8_ACCEPT:
+            self.codepoint = (b & 0x3f) | (self.codepoint << 6)
+         else:
+            self.codepoint = (0xff >> type) & b
+         self.state = Utf8Validator.UTF8VALIDATOR_DFA[256 + self.state * 16 + type]
+         return self.state
+
+      def reset(self):
+         """
+         Reset validator to start new incremental UTF-8 decode/validation.
+         """
+         self.state = Utf8Validator.UTF8_ACCEPT
+         self.codepoint = 0
+         self.i = 0
+
+      def validate(self, ba):
+         """
+         Incrementally validate a chunk of bytes provided as string.
+
+         Will return a quad (valid?, endsOnCodePoint?, currentIndex, totalIndex).
+
+         As soon as an octet is encountered which renders the octet sequence
+         invalid, a quad with valid? == False is returned. currentIndex returns
+         the index within the currently consumed chunk, and totalIndex the
+         index within the total consumed sequence that was the point of bail out.
+         When valid? == True, currentIndex will be len(ba) and totalIndex the
+         total amount of consumed bytes.
+         """
+         l = len(ba)
+         for i in xrange(l):
+            ## optimized version of decode(), since we are not interested in actual code points
+            self.state = Utf8Validator.UTF8VALIDATOR_DFA[256 + (self.state << 4) + Utf8Validator.UTF8VALIDATOR_DFA[ord(ba[i])]]
+            if self.state == Utf8Validator.UTF8_REJECT:
+               self.i += i
+               return False, False, i, self.i
+         self.i += l
+         return True, self.state == Utf8Validator.UTF8_ACCEPT, l, self.i
diff --git a/ThirdParty/AutobahnPython/autobahn/util.py b/ThirdParty/AutobahnPython/autobahn/util.py
new file mode 100644
index 0000000..3959be2
--- /dev/null
+++ b/ThirdParty/AutobahnPython/autobahn/util.py
@@ -0,0 +1,154 @@
+###############################################################################
+##
+##  Copyright 2011-2013 Tavendo GmbH
+##
+##  Licensed under the Apache License, Version 2.0 (the "License");
+##  you may not use this file except in compliance with the License.
+##  You may obtain a copy of the License at
+##
+##      http://www.apache.org/licenses/LICENSE-2.0
+##
+##  Unless required by applicable law or agreed to in writing, software
+##  distributed under the License is distributed on an "AS IS" BASIS,
+##  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+##  See the License for the specific language governing permissions and
+##  limitations under the License.
+##
+###############################################################################
+
+__all__ = ("utcnow",
+           "parseutc",
+           "utcstr",
+           "newid",
+           "rtime",
+           "Stopwatch",)
+
+import datetime
+import time
+import random
+import sys
+
+UTC_TIMESTAMP_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
+
+
+def utcnow():
+   """
+   Get current time in UTC as ISO 8601 string.
+   """
+   now = datetime.datetime.utcnow()
+   return now.strftime(UTC_TIMESTAMP_FORMAT)
+
+
+def parseutc(s):
+   """
+   Parse an ISO 8601 combined date and time string, like i.e. 2011-11-23T12:23Z
+   into a UTC datetime instance.
+   """
+   try:
+      return datetime.datetime.strptime(s, UTC_TIMESTAMP_FORMAT)
+   except:
+      return None
+
+
+def utcstr(dt):
+   """
+   Convert an UTC datetime instance into an ISO 8601 combined date and time,
+   like i.e. 2011-11-23T12:23Z
+   """
+   try:
+      return dt.strftime(UTC_TIMESTAMP_FORMAT)
+   except:
+      return None
+
+
+def newid():
+   """
+   Generate a new random object ID.
+   """
+   return ''.join([random.choice("ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-_") for i in xrange(16)])
+
+
+
+## Select the most precise walltime measurement function available
+## on the platform
+##
+if sys.platform.startswith('win'):
+   ## On Windows, this function returns wall-clock seconds elapsed since the
+   ## first call to this function, as a floating point number, based on the
+   ## Win32 function QueryPerformanceCounter(). The resolution is typically
+   ## better than one microsecond
+   rtime = time.clock
+   _ = rtime()
+else:
+   ## On Unix-like platforms, this used the first available from this list:
+   ## (1) gettimeofday() -- resolution in microseconds
+   ## (2) ftime() -- resolution in milliseconds
+   ## (3) time() -- resolution in seconds
+   rtime = time.time
+
+
+class Stopwatch:
+   """
+   Stopwatch based on walltime. Can be used to do code timing and uses the
+   most precise walltime measurement available on the platform. This is
+   a very light-weight object, so create/dispose is very cheap.
+   """
+
+   def __init__(self, start = True):
+      """
+      Creates a new stopwatch and by default immediately starts (= resumes) it.
+      """
+      self._elapsed = 0
+      if start:
+         self._started = rtime()
+         self._running = True
+      else:
+         self._started = None
+         self._running = False
+
+   def elapsed(self):
+      """
+      Return total time elapsed in seconds during which the stopwatch was running.
+      """
+      if self._running:
+         now = rtime()
+         return self._elapsed + (now - self._started)
+      else:
+         return self._elapsed
+
+   def pause(self):
+      """
+      Pauses the stopwatch and returns total time elapsed in seconds during which
+      the stopwatch was running.
+      """
+      if self._running:
+         now = rtime()
+         self._elapsed += now - self._started
+         self._running = False
+         return self._elapsed
+      else:
+         return self._elapsed
+
+   def resume(self):
+      """
+      Resumes a paused stopwatch and returns total elapsed time in seconds
+      during which the stopwatch was running.
+      """
+      if not self._running:
+         self._started = rtime()
+         self._running = True
+         return self._elapsed
+      else:
+         now = rtime()
+         return self._elapsed + (now - self._started)
+
+   def stop(self):
+      """
+      Stops the stopwatch and returns total time elapsed in seconds during which
+      the stopwatch was (previously) running.
+      """
+      elapsed = self.pause()
+      self._elapsed = 0
+      self._started = None
+      self._running = False
+      return elapsed
diff --git a/ThirdParty/AutobahnPython/autobahn/wamp.py b/ThirdParty/AutobahnPython/autobahn/wamp.py
new file mode 100644
index 0000000..a141463
--- /dev/null
+++ b/ThirdParty/AutobahnPython/autobahn/wamp.py
@@ -0,0 +1,2344 @@
+###############################################################################
+##
+##  Copyright 2011-2013 Tavendo GmbH
+##
+##  Licensed under the Apache License, Version 2.0 (the "License");
+##  you may not use this file except in compliance with the License.
+##  You may obtain a copy of the License at
+##
+##      http://www.apache.org/licenses/LICENSE-2.0
+##
+##  Unless required by applicable law or agreed to in writing, software
+##  distributed under the License is distributed on an "AS IS" BASIS,
+##  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+##  See the License for the specific language governing permissions and
+##  limitations under the License.
+##
+###############################################################################
+
+__all__ = ("WampProtocol",
+           "WampFactory",
+           "WampServerProtocol",
+           "WampServerFactory",
+           "WampClientProtocol",
+           "WampClientFactory",
+           "WampCraProtocol",
+           "WampCraClientProtocol",
+           "WampCraServerProtocol",)
+
+import json
+import random
+import inspect, types
+import traceback
+
+import hashlib, hmac, binascii
+
+from twisted.python import log
+from twisted.internet import reactor
+from twisted.internet.defer import Deferred, \
+                                   maybeDeferred, \
+                                   returnValue, \
+                                   inlineCallbacks
+
+from _version import __version__
+from websocket import WebSocketProtocol, HttpException, Timings
+from websocket import WebSocketClientProtocol, WebSocketClientFactory
+from websocket import WebSocketServerFactory, WebSocketServerProtocol
+
+from httpstatus import HTTP_STATUS_CODE_BAD_REQUEST
+from pbkdf2 import pbkdf2_bin
+from prefixmap import PrefixMap
+from util import utcstr, utcnow, parseutc, newid
+
+
+def exportRpc(arg = None):
+   """
+   Decorator for RPC'ed callables.
+   """
+   ## decorator without argument
+   if type(arg) is types.FunctionType:
+      arg._autobahn_rpc_id = arg.__name__
+      return arg
+   ## decorator with argument
+   else:
+      def inner(f):
+         f._autobahn_rpc_id = arg
+         return f
+      return inner
+
+def exportSub(arg, prefixMatch = False):
+   """
+   Decorator for subscription handlers.
+   """
+   def inner(f):
+      f._autobahn_sub_id = arg
+      f._autobahn_sub_prefix_match = prefixMatch
+      return f
+   return inner
+
+def exportPub(arg, prefixMatch = False):
+   """
+   Decorator for publication handlers.
+   """
+   def inner(f):
+      f._autobahn_pub_id = arg
+      f._autobahn_pub_prefix_match = prefixMatch
+      return f
+   return inner
+
+
+class WampProtocol:
+   """
+   WAMP protocol base class. Mixin for WampServerProtocol and WampClientProtocol.
+   """
+
+   URI_WAMP_BASE = "http://api.wamp.ws/"
+   """
+   WAMP base URI for WAMP predefined things.
+   """
+
+   URI_WAMP_ERROR = URI_WAMP_BASE + "error#"
+   """
+   Prefix for WAMP errors.
+   """
+
+   URI_WAMP_PROCEDURE = URI_WAMP_BASE + "procedure#"
+   """
+   Prefix for WAMP predefined RPC endpoints.
+   """
+
+   URI_WAMP_TOPIC = URI_WAMP_BASE + "topic#"
+   """
+   Prefix for WAMP predefined PubSub topics.
+   """
+
+   URI_WAMP_ERROR_GENERIC = URI_WAMP_ERROR + "generic"
+   """
+   WAMP error URI for generic errors.
+   """
+
+   DESC_WAMP_ERROR_GENERIC = "generic error"
+   """
+   Description for WAMP generic errors.
+   """
+
+   URI_WAMP_ERROR_INTERNAL = URI_WAMP_ERROR + "internal"
+   """
+   WAMP error URI for internal errors.
+   """
+
+   DESC_WAMP_ERROR_INTERNAL = "internal error"
+   """
+   Description for WAMP internal errors.
+   """
+
+   WAMP_PROTOCOL_VERSION         = 1
+   """
+   WAMP version this server speaks. Versions are numbered consecutively
+   (integers, no gaps).
+   """
+
+   MESSAGE_TYPEID_WELCOME        = 0
+   """
+   Server-to-client welcome message containing session ID.
+   """
+
+   MESSAGE_TYPEID_PREFIX         = 1
+   """
+   Client-to-server message establishing a URI prefix to be used in CURIEs.
+   """
+
+   MESSAGE_TYPEID_CALL           = 2
+   """
+   Client-to-server message initiating an RPC.
+   """
+
+   MESSAGE_TYPEID_CALL_RESULT    = 3
+   """
+   Server-to-client message returning the result of a successful RPC.
+   """
+
+   MESSAGE_TYPEID_CALL_ERROR     = 4
+   """
+   Server-to-client message returning the error of a failed RPC.
+   """
+
+   MESSAGE_TYPEID_SUBSCRIBE      = 5
+   """
+   Client-to-server message subscribing to a topic.
+   """
+
+   MESSAGE_TYPEID_UNSUBSCRIBE    = 6
+   """
+   Client-to-server message unsubscribing from a topic.
+   """
+
+   MESSAGE_TYPEID_PUBLISH        = 7
+   """
+   Client-to-server message publishing an event to a topic.
+   """
+
+   MESSAGE_TYPEID_EVENT          = 8
+   """
+   Server-to-client message providing the event of a (subscribed) topic.
+   """
+
+   def connectionMade(self):
+      self.debugWamp = self.factory.debugWamp
+      self.debugApp = self.factory.debugApp
+      self.prefixes = PrefixMap()
+      self.calls = {}
+      self.procs = {}
+
+
+   def connectionLost(self, reason):
+      pass
+
+
+   def _protocolError(self, reason):
+      if self.debugWamp:
+         log.msg("Closing Wamp session on protocol violation : %s" % reason)
+
+      ## FIXME: subprotocols are probably not supposed to close with CLOSE_STATUS_CODE_PROTOCOL_ERROR
+      ##
+      self.protocolViolation("Wamp RPC/PubSub protocol violation ('%s')" % reason)
+
+
+   def shrink(self, uri, passthrough = False):
+      """
+      Shrink given URI to CURIE according to current prefix mapping.
+      If no appropriate prefix mapping is available, return original URI.
+
+      :param uri: URI to shrink.
+      :type uri: str
+
+      :returns str -- CURIE or original URI.
+      """
+      return self.prefixes.shrink(uri)
+
+
+   def resolve(self, curieOrUri, passthrough = False):
+      """
+      Resolve given CURIE/URI according to current prefix mapping or return
+      None if cannot be resolved.
+
+      :param curieOrUri: CURIE or URI.
+      :type curieOrUri: str
+
+      :returns: str -- Full URI for CURIE or None.
+      """
+      return self.prefixes.resolve(curieOrUri)
+
+
+   def resolveOrPass(self, curieOrUri):
+      """
+      Resolve given CURIE/URI according to current prefix mapping or return
+      string verbatim if cannot be resolved.
+
+      :param curieOrUri: CURIE or URI.
+      :type curieOrUri: str
+
+      :returns: str -- Full URI for CURIE or original string.
+      """
+      return self.prefixes.resolveOrPass(curieOrUri)
+
+
+   def serializeMessage(self, msg):
+      """
+      Delegate message serialization to the factory.
+      :param msg: The message to be serialized.
+      :type msg: str
+      :return: The serialized message.
+      """
+      return self.factory._serialize(msg)
+
+
+   def registerForRpc(self, obj, baseUri = "", methods = None):
+      """
+      Register an service object for RPC. A service object has methods
+      which are decorated using @exportRpc.
+
+      :param obj: The object to be registered (in this WebSockets session) for RPC.
+      :type obj: Object with methods decorated using @exportRpc.
+      :param baseUri: Optional base URI which is prepended to method names for export.
+      :type baseUri: String.
+      :param methods: If not None, a list of unbound class methods corresponding to obj
+                     which should be registered. This can be used to register only a subset
+                     of the methods decorated with @exportRpc.
+      :type methods: List of unbound class methods.
+      """
+      for k in inspect.getmembers(obj.__class__, inspect.ismethod):
+         if k[1].__dict__.has_key("_autobahn_rpc_id"):
+            if methods is None or k[1] in methods:
+               uri = baseUri + k[1].__dict__["_autobahn_rpc_id"]
+               proc = k[1]
+               self.registerMethodForRpc(uri, obj, proc)
+
+
+   def registerMethodForRpc(self, uri, obj, proc):
+      """
+      Register a method of an object for RPC.
+
+      :param uri: URI to register RPC method under.
+      :type uri: str
+      :param obj: The object on which to register a method for RPC.
+      :type obj: object
+      :param proc: Unbound object method to register RPC for.
+      :type proc: unbound method
+      """
+      self.procs[uri] = (obj, proc, False)
+      if self.debugWamp:
+         log.msg("registered remote method on %s" % uri)
+
+
+   def registerProcedureForRpc(self, uri, proc):
+      """
+      Register a (free standing) function/procedure for RPC.
+
+      :param uri: URI to register RPC function/procedure under.
+      :type uri: str
+      :param proc: Free-standing function/procedure.
+      :type proc: callable
+      """
+      self.procs[uri] = (None, proc, False)
+      if self.debugWamp:
+         log.msg("registered remote procedure on %s" % uri)
+
+
+   def registerHandlerMethodForRpc(self, uri, obj, handler, extra = None):
+      """
+      Register a handler on an object for RPC.
+
+      :param uri: URI to register RPC method under.
+      :type uri: str
+      :param obj: The object on which to register the RPC handler
+      :type obj: object
+      :param proc: Unbound object method to register RPC for.
+      :type proc: unbound method
+      :param extra: Optional extra data that will be given to the handler at call time.
+      :type extra: object
+      """
+      self.procs[uri] = (obj, handler, True, extra)
+      if self.debugWamp:
+         log.msg("registered remote handler method on %s" % uri)
+
+
+   def registerHandlerProcedureForRpc(self, uri, handler, extra = None):
+      """
+      Register a (free standing) handler for RPC.
+
+      :param uri: URI to register RPC handler under.
+      :type uri: str
+      :param proc: Free-standing handler
+      :type proc: callable
+      :param extra: Optional extra data that will be given to the handler at call time.
+      :type extra: object
+      """
+      self.procs[uri] = (None, handler, True, extra)
+      if self.debugWamp:
+         log.msg("registered remote handler procedure on %s" % uri)
+
+
+   def procForUri(self, uri):
+      """
+      Returns the procedure specification for `uri` or None, if it does not exist.
+
+      :param uri: URI to be checked.
+      :type uri: str
+      :returns: The procedure specification for `uri`, if it exists,
+                `None` otherwise.
+      """
+      return self.procs[uri] if uri in self.procs else None
+
+
+   def onBeforeCall(self, callid, uri, args, isRegistered):
+      """
+      Callback fired before executing incoming RPC. This can be used for
+      logging, statistics tracking or redirecting RPCs or argument mangling i.e.
+
+      The default implementation just returns the incoming URI/args.
+
+      :param uri: RPC endpoint URI (fully-qualified).
+      :type uri: str
+      :param args: RPC arguments array.
+      :type args: list
+      :param isRegistered: True, iff RPC endpoint URI is registered in this session.
+      :type isRegistered: bool
+      :returns pair -- Must return URI/Args pair.
+      """
+      return uri, args
+
+
+   def onAfterCallSuccess(self, result, call):
+      """
+      Callback fired after executing incoming RPC with success, but before
+      sending the RPC success message.
+
+      The default implementation will just return `result` to the client.
+
+      :param result: Result returned for executing the incoming RPC.
+      :type result: Anything returned by the user code for the endpoint.
+      :param call: WAMP call object for incoming RPC.
+      :type call: instance of Call
+      :returns obj -- Result send back to client.
+      """
+      return result
+
+
+   def onAfterCallError(self, error, call):
+      """
+      Callback fired after executing incoming RPC with failure, but before
+      sending the RPC error message.
+
+      The default implementation will just return `error` to the client.
+
+      :param error: Error that occurred during incomnig RPC call execution.
+      :type error: Instance of twisted.python.failure.Failure
+      :param call: WAMP call object for incoming RPC.
+      :type call: instance of Call
+      :returns twisted.python.failure.Failure -- Error sent back to client.
+      """
+      return error
+
+
+   def onAfterSendCallSuccess(self, msg, call):
+      """
+      Callback fired after sending RPC success message.
+
+      :param msg: Serialized WAMP message.
+      :type msg: str
+      :param call: WAMP call object for incoming RPC.
+      :type call: instance of Call
+      """
+      pass
+
+
+   def onAfterSendCallError(self, msg, call):
+      """
+      Callback fired after sending RPC error message.
+
+      :param msg: Serialized WAMP message.
+      :type msg: str
+      :param call: WAMP call object for incoming RPC.
+      :type call: instance of Call
+      """
+      pass
+
+
+   def call(self, *args):
+      """
+      Perform a remote-procedure call (RPC). The first argument is the procedure
+      URI (mandatory). Subsequent positional arguments can be provided (must be
+      JSON serializable). The return value is a Twisted Deferred.
+      """
+
+      if len(args) < 1:
+         raise Exception("missing procedure URI")
+
+      if type(args[0]) not in [unicode, str]:
+         raise Exception("invalid type for procedure URI")
+
+      procuri = args[0]
+      while True:
+         callid = newid()
+         if not self.calls.has_key(callid):
+            break
+      d = Deferred()
+      self.calls[callid] = d
+      msg = [WampProtocol.MESSAGE_TYPEID_CALL, callid, procuri]
+      msg.extend(args[1:])
+
+      try:
+         o = self.factory._serialize(msg)
+      except:
+         raise Exception("call argument(s) not JSON serializable")
+
+      self.sendMessage(o)
+      return d
+
+
+
+class WampFactory:
+   """
+   WAMP factory base class. Mixin for WampServerFactory and WampClientFactory.
+   """
+
+   def _serialize(self, obj):
+      """
+      Default object serializer.
+      """
+      return json.dumps(obj)
+
+
+   def _unserialize(self, bytes):
+      """
+      Default object deserializer.
+      """
+      return json.loads(bytes)
+
+
+
+class WampServerProtocol(WebSocketServerProtocol, WampProtocol):
+   """
+   Server factory for Wamp RPC/PubSub.
+   """
+
+   SUBSCRIBE = 1
+   PUBLISH = 2
+
+   def onSessionOpen(self):
+      """
+      Callback fired when WAMP session was fully established.
+      """
+      pass
+
+
+   def onOpen(self):
+      """
+      Default implementation for WAMP connection opened sends
+      Welcome message containing session ID.
+      """
+      self.session_id = newid()
+
+      ## include traceback as error detail for RPC errors with
+      ## no error URI - that is errors returned with URI_WAMP_ERROR_GENERIC
+      self.includeTraceback = True
+
+      msg = [WampProtocol.MESSAGE_TYPEID_WELCOME,
+             self.session_id,
+             WampProtocol.WAMP_PROTOCOL_VERSION,
+             "Autobahn/%s" % __version__]
+      o = self.factory._serialize(msg)
+      self.sendMessage(o)
+
+      self.factory._addSession(self, self.session_id)
+      self.onSessionOpen()
+
+
+   def onConnect(self, connectionRequest):
+      """
+      Default implementation for WAMP connection acceptance:
+      check if client announced WAMP subprotocol, and only accept connection
+      if client did so.
+      """
+      for p in connectionRequest.protocols:
+         if p in self.factory.protocols:
+            return p
+      raise HttpException(HTTP_STATUS_CODE_BAD_REQUEST[0], "this server only speaks WAMP")
+
+
+   def connectionMade(self):
+      WebSocketServerProtocol.connectionMade(self)
+      WampProtocol.connectionMade(self)
+
+      ## RPCs registered in this session (a URI map of (object, procedure)
+      ## pairs for object methods or (None, procedure) for free standing procedures)
+      self.procs = {}
+
+      ## Publication handlers registered in this session (a URI map of (object, pubHandler) pairs
+      ## pairs for object methods (handlers) or (None, None) for topic without handler)
+      self.pubHandlers = {}
+
+      ## Subscription handlers registered in this session (a URI map of (object, subHandler) pairs
+      ## pairs for object methods (handlers) or (None, None) for topic without handler)
+      self.subHandlers = {}
+
+      self.handlerMapping = {
+         self.MESSAGE_TYPEID_CALL: CallHandler(self, self.prefixes),
+         self.MESSAGE_TYPEID_CALL_RESULT: CallResultHandler(self, self.prefixes),
+         self.MESSAGE_TYPEID_CALL_ERROR: CallErrorHandler(self, self.prefixes)}
+
+
+   def connectionLost(self, reason):
+      self.factory._unsubscribeClient(self)
+      self.factory._removeSession(self)
+
+      WampProtocol.connectionLost(self, reason)
+      WebSocketServerProtocol.connectionLost(self, reason)
+
+
+   def sendMessage(self, payload):
+      if self.debugWamp:
+         log.msg("TX WAMP: %s" % str(payload))
+      WebSocketServerProtocol.sendMessage(self, payload)
+
+
+   def _getPubHandler(self, topicUri):
+      ## Longest matching prefix based resolution of (full) topic URI to
+      ## publication handler.
+      ## Returns a 5-tuple (consumedUriPart, unconsumedUriPart, handlerObj, handlerProc, prefixMatch)
+      ##
+      for i in xrange(len(topicUri), -1, -1):
+         tt = topicUri[:i]
+         if self.pubHandlers.has_key(tt):
+            h = self.pubHandlers[tt]
+            return (tt, topicUri[i:], h[0], h[1], h[2])
+      return None
+
+
+   def _getSubHandler(self, topicUri):
+      ## Longest matching prefix based resolution of (full) topic URI to
+      ## subscription handler.
+      ## Returns a 5-tuple (consumedUriPart, unconsumedUriPart, handlerObj, handlerProc, prefixMatch)
+      ##
+      for i in xrange(len(topicUri), -1, -1):
+         tt = topicUri[:i]
+         if self.subHandlers.has_key(tt):
+            h = self.subHandlers[tt]
+            return (tt, topicUri[i:], h[0], h[1], h[2])
+      return None
+
+
+   def registerForPubSub(self, topicUri, prefixMatch = False, pubsub = PUBLISH | SUBSCRIBE):
+      """
+      Register a topic URI as publish/subscribe channel in this session.
+
+      :param topicUri: Topic URI to be established as publish/subscribe channel.
+      :type topicUri: str
+      :param prefixMatch: Allow to match this topic URI by prefix.
+      :type prefixMatch: bool
+      :param pubsub: Allow publication and/or subscription.
+      :type pubsub: WampServerProtocol.PUB, WampServerProtocol.SUB, WampServerProtocol.PUB | WampServerProtocol.SUB
+      """
+      if pubsub & WampServerProtocol.PUBLISH:
+         self.pubHandlers[topicUri] = (None, None, prefixMatch)
+         if self.debugWamp:
+            log.msg("registered topic %s for publication (match by prefix = %s)" % (topicUri, prefixMatch))
+      if pubsub & WampServerProtocol.SUBSCRIBE:
+         self.subHandlers[topicUri] = (None, None, prefixMatch)
+         if self.debugWamp:
+            log.msg("registered topic %s for subscription (match by prefix = %s)" % (topicUri, prefixMatch))
+
+
+   def registerHandlerForPubSub(self, obj, baseUri = ""):
+      """
+      Register a handler object for PubSub. A handler object has methods
+      which are decorated using @exportPub and @exportSub.
+
+      :param obj: The object to be registered (in this WebSockets session) for PubSub.
+      :type obj: Object with methods decorated using @exportPub and @exportSub.
+      :param baseUri: Optional base URI which is prepended to topic names for export.
+      :type baseUri: String.
+      """
+      for k in inspect.getmembers(obj.__class__, inspect.ismethod):
+         if k[1].__dict__.has_key("_autobahn_pub_id"):
+            uri = baseUri + k[1].__dict__["_autobahn_pub_id"]
+            prefixMatch = k[1].__dict__["_autobahn_pub_prefix_match"]
+            proc = k[1]
+            self.registerHandlerForPub(uri, obj, proc, prefixMatch)
+         elif k[1].__dict__.has_key("_autobahn_sub_id"):
+            uri = baseUri + k[1].__dict__["_autobahn_sub_id"]
+            prefixMatch = k[1].__dict__["_autobahn_sub_prefix_match"]
+            proc = k[1]
+            self.registerHandlerForSub(uri, obj, proc, prefixMatch)
+
+
+   def registerHandlerForSub(self, uri, obj, proc, prefixMatch = False):
+      """
+      Register a method of an object as subscription handler.
+
+      :param uri: Topic URI to register subscription handler for.
+      :type uri: str
+      :param obj: The object on which to register a method as subscription handler.
+      :type obj: object
+      :param proc: Unbound object method to register as subscription handler.
+      :type proc: unbound method
+      :param prefixMatch: Allow to match this topic URI by prefix.
+      :type prefixMatch: bool
+      """
+      self.subHandlers[uri] = (obj, proc, prefixMatch)
+      if not self.pubHandlers.has_key(uri):
+         self.pubHandlers[uri] = (None, None, False)
+      if self.debugWamp:
+         log.msg("registered subscription handler for topic %s" % uri)
+
+
+   def registerHandlerForPub(self, uri, obj, proc, prefixMatch = False):
+      """
+      Register a method of an object as publication handler.
+
+      :param uri: Topic URI to register publication handler for.
+      :type uri: str
+      :param obj: The object on which to register a method as publication handler.
+      :type obj: object
+      :param proc: Unbound object method to register as publication handler.
+      :type proc: unbound method
+      :param prefixMatch: Allow to match this topic URI by prefix.
+      :type prefixMatch: bool
+      """
+      self.pubHandlers[uri] = (obj, proc, prefixMatch)
+      if not self.subHandlers.has_key(uri):
+         self.subHandlers[uri] = (None, None, False)
+      if self.debugWamp:
+         log.msg("registered publication handler for topic %s" % uri)
+
+
+   def dispatch(self, topicUri, event, exclude = [], eligible = None):
+      """
+      Dispatch an event for a topic to all clients subscribed to
+      and authorized for that topic.
+
+      Optionally, exclude list of clients and/or only consider clients
+      from explicit eligibles. In other words, the event is delivered
+      to the set
+
+         (subscribers - excluded) & eligible
+
+      :param topicUri: URI of topic to publish event to.
+      :type topicUri: str
+      :param event: Event to dispatch.
+      :type event: obj
+      :param exclude: Optional list of clients (WampServerProtocol instances) to exclude.
+      :type exclude: list of obj
+      :param eligible: Optional list of clients (WampServerProtocol instances) eligible at all (or None for all).
+      :type eligible: list of obj
+
+      :returns twisted.internet.defer.Deferred -- Will be fired when event was
+      dispatched to all subscribers. The return value provided to the deferred
+      is a pair (delivered, requested), where delivered = number of actual
+      receivers, and requested = number of (subscribers - excluded) & eligible.
+      """
+      return self.factory.dispatch(topicUri, event, exclude, eligible)
+
+
+   def onMessage(self, msg, binary):
+      """
+      Handle WAMP messages received from WAMP client.
+      """
+
+      if self.debugWamp:
+         log.msg("RX WAMP: %s" % str(msg))
+
+      if not binary:
+         try:
+            obj = self.factory._unserialize(msg)
+            if type(obj) == list:
+
+               msgtype = obj[0]
+
+               ### XXX Replace check by try...except when all handlers
+               ### XXX are in place. Exception handling should create
+               ### XXX a protocolError message about unsupported
+               ### XXX message type
+               if msgtype in [WampProtocol.MESSAGE_TYPEID_CALL,
+                              WampProtocol.MESSAGE_TYPEID_CALL_RESULT,
+                              WampProtocol.MESSAGE_TYPEID_CALL_ERROR]:
+                  self.handlerMapping[msgtype].handleMessage(obj)
+
+               ### XXX Move remaining code to appropriate handlers
+
+               ## Subscribe Message
+               ##
+               elif msgtype == WampProtocol.MESSAGE_TYPEID_SUBSCRIBE:
+                  topicUri = self.prefixes.resolveOrPass(obj[1]) ### PFX - remove
+                  h = self._getSubHandler(topicUri)
+                  if h:
+                     ## either exact match or prefix match allowed
+                     if h[1] == "" or h[4]:
+
+                        ## direct topic
+                        if h[2] is None and h[3] is None:
+                           self.factory._subscribeClient(self, topicUri)
+
+                        ## topic handled by subscription handler
+                        else:
+                           try:
+                              ## handler is object method
+                              if h[2]:
+                                 a = h[3](h[2], str(h[0]), str(h[1]))
+
+                              ## handler is free standing procedure
+                              else:
+                                 a = h[3](str(h[0]), str(h[1]))
+
+                              ## only subscribe client if handler did return True
+                              if a:
+                                 self.factory._subscribeClient(self, topicUri)
+                           except:
+                              if self.debugWamp:
+                                 log.msg("exception during topic subscription handler:")
+                              traceback.print_exc()
+                     else:
+                        if self.debugWamp:
+                           log.msg("topic %s matches only by prefix and prefix match disallowed" % topicUri)
+                  else:
+                     if self.debugWamp:
+                        log.msg("no topic / subscription handler registered for %s" % topicUri)
+
+               ## Unsubscribe Message
+               ##
+               elif msgtype == WampProtocol.MESSAGE_TYPEID_UNSUBSCRIBE:
+                  topicUri = self.prefixes.resolveOrPass(obj[1]) ### PFX - remove
+                  self.factory._unsubscribeClient(self, topicUri)
+
+               ## Publish Message
+               ##
+               elif msgtype == WampProtocol.MESSAGE_TYPEID_PUBLISH:
+                  topicUri = self.prefixes.resolveOrPass(obj[1]) ### PFX - remove
+                  h = self._getPubHandler(topicUri)
+                  if h:
+                     ## either exact match or prefix match allowed
+                     if h[1] == "" or h[4]:
+
+                        ## Event
+                        ##
+                        event = obj[2]
+
+                        ## Exclude Sessions List
+                        ##
+                        exclude = [self] # exclude publisher by default
+                        if len(obj) >= 4:
+                           if type(obj[3]) == bool:
+                              if not obj[3]:
+                                 exclude = []
+                           elif type(obj[3]) == list:
+                              ## map session IDs to protos
+                              exclude = self.factory.sessionIdsToProtos(obj[3])
+                           else:
+                              ## FIXME: invalid type
+                              pass
+
+                        ## Eligible Sessions List
+                        ##
+                        eligible = None # all sessions are eligible by default
+                        if len(obj) >= 5:
+                           if type(obj[4]) == list:
+                              ## map session IDs to protos
+                              eligible = self.factory.sessionIdsToProtos(obj[4])
+                           else:
+                              ## FIXME: invalid type
+                              pass
+
+                        ## direct topic
+                        if h[2] is None and h[3] is None:
+                           self.factory.dispatch(topicUri, event, exclude, eligible)
+
+                        ## topic handled by publication handler
+                        else:
+                           try:
+                              ## handler is object method
+                              if h[2]:
+                                 e = h[3](h[2], str(h[0]), str(h[1]), event)
+
+                              ## handler is free standing procedure
+                              else:
+                                 e = h[3](str(h[0]), str(h[1]), event)
+
+                              ## only dispatch event if handler did return event
+                              if e:
+                                 self.factory.dispatch(topicUri, e, exclude, eligible)
+                           except:
+                              if self.debugWamp:
+                                 log.msg("exception during topic publication handler:")
+                              traceback.print_exc()
+                     else:
+                        if self.debugWamp:
+                           log.msg("topic %s matches only by prefix and prefix match disallowed" % topicUri)
+                  else:
+                     if self.debugWamp:
+                        log.msg("no topic / publication handler registered for %s" % topicUri)
+
+               ## Define prefix to be used in CURIEs
+               ##
+               elif msgtype == WampProtocol.MESSAGE_TYPEID_PREFIX:
+                  prefix = obj[1]
+                  uri = obj[2]
+                  self.prefixes.set(prefix, uri) ### PFX - remove whole block (this msg type won't survive)
+
+               else:
+                  log.msg("unknown message type")
+            else:
+               log.msg("msg not a list")
+         except Exception, e:
+            traceback.print_exc()
+      else:
+         log.msg("binary message")
+
+
+
+class WampServerFactory(WebSocketServerFactory, WampFactory):
+   """
+   Server factory for Wamp RPC/PubSub.
+   """
+
+   protocol = WampServerProtocol
+   """
+   Twisted protocol used by default for WAMP servers.
+   """
+
+   def __init__(self, url, debug = False, debugCodePaths = False, debugWamp = False, debugApp = False, externalPort = None):
+      WebSocketServerFactory.__init__(self, url, protocols = ["wamp"], debug = debug, debugCodePaths = debugCodePaths, externalPort = externalPort)
+      self.debugWamp = debugWamp
+      self.debugApp = debugApp
+
+
+   def onClientSubscribed(self, proto, topicUri):
+      """
+      Callback fired when peer was (successfully) subscribed on some topic.
+
+      :param proto: Peer protocol instance subscribed.
+      :type proto: Instance of WampServerProtocol.
+      :param topicUri: Fully qualified, resolved URI of topic subscribed.
+      :type topicUri: str
+      """
+      pass
+
+
+   def _subscribeClient(self, proto, topicUri):
+      """
+      Called from proto to subscribe client for topic.
+      """
+      if not self.subscriptions.has_key(topicUri):
+         self.subscriptions[topicUri] = set()
+         if self.debugWamp:
+            log.msg("subscriptions map created for topic %s" % topicUri)
+      if not proto in self.subscriptions[topicUri]:
+         self.subscriptions[topicUri].add(proto)
+         if self.debugWamp:
+            log.msg("subscribed peer %s on topic %s" % (proto.peerstr, topicUri))
+         self.onClientSubscribed(proto, topicUri)
+      else:
+         if self.debugWamp:
+            log.msg("peer %s already subscribed on topic %s" % (proto.peerstr, topicUri))
+
+
+   def onClientUnsubscribed(self, proto, topicUri):
+      """
+      Callback fired when peer was (successfully) unsubscribed from some topic.
+
+      :param proto: Peer protocol instance unsubscribed.
+      :type proto: Instance of WampServerProtocol.
+      :param topicUri: Fully qualified, resolved URI of topic unsubscribed.
+      :type topicUri: str
+      """
+      pass
+
+
+   def _unsubscribeClient(self, proto, topicUri = None):
+      """
+      Called from proto to unsubscribe client from topic.
+      """
+      if topicUri:
+         if self.subscriptions.has_key(topicUri) and proto in self.subscriptions[topicUri]:
+            self.subscriptions[topicUri].discard(proto)
+            if self.debugWamp:
+               log.msg("unsubscribed peer %s from topic %s" % (proto.peerstr, topicUri))
+            if len(self.subscriptions[topicUri]) == 0:
+               del self.subscriptions[topicUri]
+               if self.debugWamp:
+                  log.msg("topic %s removed from subscriptions map - no one subscribed anymore" % topicUri)
+            self.onClientUnsubscribed(proto, topicUri)
+         else:
+            if self.debugWamp:
+               log.msg("peer %s not subscribed on topic %s" % (proto.peerstr, topicUri))
+      else:
+         for topicUri, subscribers in self.subscriptions.items():
+            if proto in subscribers:
+               subscribers.discard(proto)
+               if self.debugWamp:
+                  log.msg("unsubscribed peer %s from topic %s" % (proto.peerstr, topicUri))
+               if len(subscribers) == 0:
+                  del self.subscriptions[topicUri]
+                  if self.debugWamp:
+                     log.msg("topic %s removed from subscriptions map - no one subscribed anymore" % topicUri)
+               self.onClientUnsubscribed(proto, topicUri)
+         if self.debugWamp:
+            log.msg("unsubscribed peer %s from all topics" % (proto.peerstr))
+
+
+   def dispatch(self, topicUri, event, exclude = [], eligible = None):
+      """
+      Dispatch an event to all peers subscribed to the event topic.
+
+      :param topicUri: Topic to publish event to.
+      :type topicUri: str
+      :param event: Event to publish (must be JSON serializable).
+      :type event: obj
+      :param exclude: List of WampServerProtocol instances to exclude from receivers.
+      :type exclude: List of obj
+      :param eligible: List of WampServerProtocol instances eligible as receivers (or None for all).
+      :type eligible: List of obj
+
+      :returns twisted.internet.defer.Deferred -- Will be fired when event was
+      dispatched to all subscribers. The return value provided to the deferred
+      is a pair (delivered, requested), where delivered = number of actual
+      receivers, and requested = number of (subscribers - excluded) & eligible.
+      """
+      if self.debugWamp:
+         log.msg("publish event %s for topicUri %s" % (str(event), topicUri))
+
+      d = Deferred()
+
+      if self.subscriptions.has_key(topicUri) and len(self.subscriptions[topicUri]) > 0:
+
+         ## FIXME: this might break ordering of event delivery from a
+         ## receiver perspective. We might need to have send queues
+         ## per receiver OR do recvs = deque(sorted(..))
+
+         ## However, see http://twistedmatrix.com/trac/ticket/1396
+
+         if eligible is not None:
+            subscrbs = set(eligible) & self.subscriptions[topicUri]
+         else:
+            subscrbs = self.subscriptions[topicUri]
+
+         if len(exclude) > 0:
+            recvs = subscrbs - set(exclude)
+         else:
+            recvs = subscrbs
+
+         l = len(recvs)
+         if l > 0:
+
+            ## ok, at least 1 subscriber not excluded and eligible
+            ## => prepare message for mass sending
+            ##
+            o = [WampProtocol.MESSAGE_TYPEID_EVENT, topicUri, event]
+            try:
+               msg = self._serialize(o)
+               if self.debugWamp:
+                  log.msg("serialized event msg: " + str(msg))
+            except Exception, e:
+               raise Exception("invalid type for event - serialization failed [%s]" % e)
+
+            preparedMsg = self.prepareMessage(msg)
+
+            ## chunked sending of prepared message
+            ##
+            self._sendEvents(preparedMsg, recvs.copy(), 0, l, d)
+
+         else:
+            ## receivers list empty after considering exlude and eligible sessions
+            ##
+            d.callback((0, 0))
+      else:
+         ## no one subscribed on topic
+         ##
+         d.callback((0, 0))
+
+      return d
+
+
+   def _sendEvents(self, preparedMsg, recvs, delivered, requested, d):
+      """
+      Delivers events to receivers in chunks and reenters the reactor
+      in-between, so that other stuff can run.
+      """
+      ## deliver a batch of events
+      done = False
+      for i in xrange(0, 256):
+         try:
+            proto = recvs.pop()
+            if proto.state == WebSocketProtocol.STATE_OPEN:
+               try:
+                  proto.sendPreparedMessage(preparedMsg)
+               except:
+                  pass
+               else:
+                  if self.debugWamp:
+                     log.msg("delivered event to peer %s" % proto.peerstr)
+                  delivered += 1
+         except KeyError:
+            # all receivers done
+            done = True
+            break
+
+      if not done:
+         ## if there are receivers left, redo
+         reactor.callLater(0, self._sendEvents, preparedMsg, recvs, delivered, requested, d)
+      else:
+         ## else fire final result
+         d.callback((delivered, requested))
+
+
+   def _addSession(self, proto, session_id):
+      """
+      Add proto for session ID.
+      """
+      if not self.protoToSessions.has_key(proto):
+         self.protoToSessions[proto] = session_id
+      else:
+         raise Exception("logic error - dublicate _addSession for protoToSessions")
+      if not self.sessionsToProto.has_key(session_id):
+         self.sessionsToProto[session_id] = proto
+      else:
+         raise Exception("logic error - dublicate _addSession for sessionsToProto")
+
+
+   def _removeSession(self, proto):
+      """
+      Remove session by proto.
+      """
+      if self.protoToSessions.has_key(proto):
+         session_id = self.protoToSessions[proto]
+         del self.protoToSessions[proto]
+         if self.sessionsToProto.has_key(session_id):
+            del self.sessionsToProto[session_id]
+
+
+   def sessionIdToProto(self, sessionId):
+      """
+      Map WAMP session ID to connected protocol instance (object of type WampServerProtocol).
+
+      :param sessionId: WAMP session ID to be mapped.
+      :type sessionId: str
+
+      :returns obj -- WampServerProtocol instance or None.
+      """
+      return self.sessionsToProto.get(sessionId, None)
+
+
+   def sessionIdsToProtos(self, sessionIds):
+      """
+      Map WAMP session IDs to connected protocol instances (objects of type WampServerProtocol).
+
+      :param sessionIds: List of session IDs to be mapped.
+      :type sessionIds: list of str
+
+      :returns list -- List of WampServerProtocol instances corresponding to the WAMP session IDs.
+      """
+      protos = []
+      for s in sessionIds:
+         if self.sessionsToProto.has_key(s):
+            protos.append(self.sessionsToProto[s])
+      return protos
+
+
+   def protoToSessionId(self, proto):
+      """
+      Map connected protocol instance (object of type WampServerProtocol) to WAMP session ID.
+
+      :param proto: Instance of WampServerProtocol to be mapped.
+      :type proto: obj of WampServerProtocol
+
+      :returns str -- WAMP session ID or None.
+      """
+      return self.protoToSessions.get(proto, None)
+
+
+   def protosToSessionIds(self, protos):
+      """
+      Map connected protocol instances (objects of type WampServerProtocol) to WAMP session IDs.
+
+      :param protos: List of instances of WampServerProtocol to be mapped.
+      :type protos: list of WampServerProtocol
+
+      :returns list -- List of WAMP session IDs corresponding to the protos.
+      """
+      sessionIds = []
+      for p in protos:
+         if self.protoToSessions.has_key(p):
+            sessionIds.append(self.protoToSessions[p])
+      return sessionIds
+
+
+   def startFactory(self):
+      """
+      Called by Twisted when the factory starts up. When overriding, make
+      sure to call the base method.
+      """
+      if self.debugWamp:
+         log.msg("WampServerFactory starting")
+      self.subscriptions = {}
+      self.protoToSessions = {}
+      self.sessionsToProto = {}
+
+
+   def stopFactory(self):
+      """
+      Called by Twisted when the factory shuts down. When overriding, make
+      sure to call the base method.
+      """
+      if self.debugWamp:
+         log.msg("WampServerFactory stopped")
+
+
+
+class WampClientProtocol(WebSocketClientProtocol, WampProtocol):
+   """
+   Twisted client protocol for WAMP.
+   """
+
+   def onSessionOpen(self):
+      """
+      Callback fired when WAMP session was fully established. Override
+      in derived class.
+      """
+      pass
+
+
+   def onOpen(self):
+      ## do nothing here .. onSessionOpen is only fired when welcome
+      ## message was received (and thus session ID set)
+      pass
+
+
+   def onConnect(self, connectionResponse):
+      if connectionResponse.protocol not in self.factory.protocols:
+         raise Exception("server does not speak WAMP")
+
+
+   def connectionMade(self):
+      WebSocketClientProtocol.connectionMade(self)
+      WampProtocol.connectionMade(self)
+
+      self.subscriptions = {}
+
+      self.handlerMapping = {
+         self.MESSAGE_TYPEID_CALL: CallHandler(self, self.prefixes),
+         self.MESSAGE_TYPEID_CALL_RESULT: CallResultHandler(self, self.prefixes),
+         self.MESSAGE_TYPEID_CALL_ERROR: CallErrorHandler(self, self.prefixes)}
+
+
+   def connectionLost(self, reason):
+      WampProtocol.connectionLost(self, reason)
+      WebSocketClientProtocol.connectionLost(self, reason)
+
+
+   def sendMessage(self, payload):
+      if self.debugWamp:
+         log.msg("TX WAMP: %s" % str(payload))
+      WebSocketClientProtocol.sendMessage(self, payload)
+
+
+   def onMessage(self, msg, binary):
+      """Internal method to handle WAMP messages received from WAMP server."""
+
+      ## WAMP is text message only
+      ##
+      if binary:
+         self._protocolError("binary WebSocket message received")
+         return
+
+      if self.debugWamp:
+         log.msg("RX WAMP: %s" % str(msg))
+
+      ## WAMP is proper JSON payload
+      ##
+      try:
+         obj = self.factory._unserialize(msg)
+      except Exception, e:
+         self._protocolError("WAMP message payload could not be unserialized [%s]" % e)
+         return
+
+      ## Every WAMP message is a list
+      ##
+      if type(obj) != list:
+         self._protocolError("WAMP message payload not a list")
+         return
+
+      ## Every WAMP message starts with an integer for message type
+      ##
+      if len(obj) < 1:
+         self._protocolError("WAMP message without message type")
+         return
+      if type(obj[0]) != int:
+         self._protocolError("WAMP message type not an integer")
+         return
+
+      ## WAMP message type
+      ##
+      msgtype = obj[0]
+
+      ## Valid WAMP message types received by WAMP clients
+      ##
+      if msgtype not in [WampProtocol.MESSAGE_TYPEID_WELCOME,
+                         WampProtocol.MESSAGE_TYPEID_CALL,
+                         WampProtocol.MESSAGE_TYPEID_CALL_RESULT,
+                         WampProtocol.MESSAGE_TYPEID_CALL_ERROR,
+                         WampProtocol.MESSAGE_TYPEID_EVENT]:
+         self._protocolError("invalid WAMP message type %d" % msgtype)
+         return
+
+      if msgtype in [WampProtocol.MESSAGE_TYPEID_CALL,
+                     WampProtocol.MESSAGE_TYPEID_CALL_RESULT,
+                     WampProtocol.MESSAGE_TYPEID_CALL_ERROR]:
+         self.handlerMapping[msgtype].handleMessage(obj)
+
+      ## WAMP EVENT
+      ##
+      elif msgtype == WampProtocol.MESSAGE_TYPEID_EVENT:
+         ## Topic
+         ##
+         if len(obj) != 3:
+            self._protocolError("WAMP EVENT message invalid length %d" % len(obj))
+            return
+         if type(obj[1]) not in [unicode, str]:
+            self._protocolError("invalid type for <topic> in WAMP EVENT message")
+            return
+         unresolvedTopicUri = str(obj[1])
+         topicUri = self.prefixes.resolveOrPass(unresolvedTopicUri) ### PFX - remove
+
+         ## Fire PubSub Handler
+         ##
+         if self.subscriptions.has_key(topicUri):
+            event = obj[2]
+            self.subscriptions[topicUri](topicUri, event)
+         else:
+            ## event received for non-subscribed topic (could be because we
+            ## just unsubscribed, and server already sent out event for
+            ## previous subscription)
+            pass
+
+      ## WAMP WELCOME
+      ##
+      elif msgtype == WampProtocol.MESSAGE_TYPEID_WELCOME:
+         ## Session ID
+         ##
+         if len(obj) < 2:
+            self._protocolError("WAMP WELCOME message invalid length %d" % len(obj))
+            return
+         if type(obj[1]) not in [unicode, str]:
+            self._protocolError("invalid type for <sessionid> in WAMP WELCOME message")
+            return
+         self.session_id = str(obj[1])
+
+         ## WAMP Protocol Version
+         ##
+         if len(obj) > 2:
+            if type(obj[2]) not in [int]:
+               self._protocolError("invalid type for <version> in WAMP WELCOME message")
+               return
+            else:
+               self.session_protocol_version = obj[2]
+         else:
+            self.session_protocol_version = None
+
+         ## Server Ident
+         ##
+         if len(obj) > 3:
+            if type(obj[3]) not in [unicode, str]:
+               self._protocolError("invalid type for <server> in WAMP WELCOME message")
+               return
+            else:
+               self.session_server = obj[3]
+         else:
+            self.session_server = None
+
+         self.onSessionOpen()
+
+      else:
+         raise Exception("logic error")
+
+
+   def prefix(self, prefix, uri):
+      """
+      Establishes a prefix to be used in CURIEs instead of URIs having that
+      prefix for both client-to-server and server-to-client messages.
+
+      :param prefix: Prefix to be used in CURIEs.
+      :type prefix: str
+      :param uri: URI that this prefix will resolve to.
+      :type uri: str
+      """
+
+      if type(prefix) != str:
+         raise Exception("invalid type for prefix")
+
+      if type(uri) not in [unicode, str]:
+         raise Exception("invalid type for URI")
+
+      if self.prefixes.get(prefix):  ### PFX - keep
+         raise Exception("prefix already defined")
+
+      self.prefixes.set(prefix, uri) ### PFX - keep
+
+      msg = [WampProtocol.MESSAGE_TYPEID_PREFIX, prefix, uri]
+
+      self.sendMessage(self.factory._serialize(msg))
+
+
+   def publish(self, topicUri, event, excludeMe = None, exclude = None, eligible = None):
+      """
+      Publish an event under a topic URI. The latter may be abbreviated using a
+      CURIE which has been previously defined using prefix(). The event must
+      be JSON serializable.
+
+      :param topicUri: The topic URI or CURIE.
+      :type topicUri: str
+      :param event: Event to be published (must be JSON serializable) or None.
+      :type event: value
+      :param excludeMe: When True, don't deliver the published event to myself (when I'm subscribed).
+      :type excludeMe: bool
+      :param exclude: Optional list of session IDs to exclude from receivers.
+      :type exclude: list of str
+      :param eligible: Optional list of session IDs to that are eligible as receivers.
+      :type eligible: list of str
+      """
+
+      if type(topicUri) not in [unicode, str]:
+         raise Exception("invalid type for parameter 'topicUri' - must be string (was %s)" % type(topicUri))
+
+      if excludeMe is not None:
+         if type(excludeMe) != bool:
+            raise Exception("invalid type for parameter 'excludeMe' - must be bool (was %s)" % type(excludeMe))
+
+      if exclude is not None:
+         if type(exclude) != list:
+            raise Exception("invalid type for parameter 'exclude' - must be list (was %s)" % type(exclude))
+
+      if eligible is not None:
+         if type(eligible) != list:
+            raise Exception("invalid type for parameter 'eligible' - must be list (was %s)" % type(eligible))
+
+      if exclude is not None or eligible is not None:
+         if exclude is None:
+            if excludeMe is not None:
+               if excludeMe:
+                  exclude = [self.session_id]
+               else:
+                  exclude = []
+            else:
+               exclude = [self.session_id]
+         if eligible is not None:
+            msg = [WampProtocol.MESSAGE_TYPEID_PUBLISH, topicUri, event, exclude, eligible]
+         else:
+            msg = [WampProtocol.MESSAGE_TYPEID_PUBLISH, topicUri, event, exclude]
+      else:
+         if excludeMe:
+            msg = [WampProtocol.MESSAGE_TYPEID_PUBLISH, topicUri, event]
+         else:
+            msg = [WampProtocol.MESSAGE_TYPEID_PUBLISH, topicUri, event, excludeMe]
+
+      try:
+         o = self.factory._serialize(msg)
+      except:
+         raise Exception("invalid type for parameter 'event' - not JSON serializable")
+
+      self.sendMessage(o)
+
+
+   def subscribe(self, topicUri, handler):
+      """
+      Subscribe to topic. When already subscribed, will overwrite the handler.
+
+      :param topicUri: URI or CURIE of topic to subscribe to.
+      :type topicUri: str
+      :param handler: Event handler to be invoked upon receiving events for topic.
+      :type handler: Python callable, will be called as in <callable>(eventUri, event).
+      """
+      if type(topicUri) not in [unicode, str]:
+         raise Exception("invalid type for parameter 'topicUri' - must be string (was %s)" % type(topicUri))
+
+      if type(handler) not in [types.FunctionType, types.MethodType, types.BuiltinFunctionType, types.BuiltinMethodType]:
+         raise Exception("invalid type for parameter 'handler' - must be a callable (was %s)" % type(handler))
+
+      turi = self.prefixes.resolveOrPass(topicUri) ### PFX - keep
+      if not self.subscriptions.has_key(turi):
+         msg = [WampProtocol.MESSAGE_TYPEID_SUBSCRIBE, topicUri]
+         o = self.factory._serialize(msg)
+         self.sendMessage(o)
+      self.subscriptions[turi] = handler
+
+
+   def unsubscribe(self, topicUri):
+      """
+      Unsubscribe from topic. Will do nothing when currently not subscribed to the topic.
+
+      :param topicUri: URI or CURIE of topic to unsubscribe from.
+      :type topicUri: str
+      """
+      if type(topicUri) not in [unicode, str]:
+         raise Exception("invalid type for parameter 'topicUri' - must be string (was %s)" % type(topicUri))
+
+      turi = self.prefixes.resolveOrPass(topicUri) ### PFX - keep
+      if self.subscriptions.has_key(turi):
+         msg = [WampProtocol.MESSAGE_TYPEID_UNSUBSCRIBE, topicUri]
+         o = self.factory._serialize(msg)
+         self.sendMessage(o)
+         del self.subscriptions[turi]
+
+
+
+class WampClientFactory(WebSocketClientFactory, WampFactory):
+   """
+   Twisted client factory for WAMP.
+   """
+
+   protocol = WampClientProtocol
+
+   def __init__(self, url, debug = False, debugCodePaths = False, debugWamp = False, debugApp = False):
+      WebSocketClientFactory.__init__(self, url, protocols = ["wamp"], debug = debug, debugCodePaths = debugCodePaths)
+      self.debugWamp = debugWamp
+      self.debugApp = debugApp
+
+
+   def startFactory(self):
+      """
+      Called by Twisted when the factory starts up. When overriding, make
+      sure to call the base method.
+      """
+      if self.debugWamp:
+         log.msg("WebSocketClientFactory starting")
+
+
+   def stopFactory(self):
+      """
+      Called by Twisted when the factory shuts down. When overriding, make
+      sure to call the base method.
+      """
+      if self.debugWamp:
+         log.msg("WebSocketClientFactory stopped")
+
+
+
+class WampCraProtocol(WampProtocol):
+   """
+   Base class for WAMP Challenge-Response Authentication protocols (client and server).
+
+   WAMP-CRA is a cryptographically strong challenge response authentication
+   protocol based on HMAC-SHA256.
+
+   The protocol performs in-band authentication of WAMP clients to WAMP servers.
+
+   WAMP-CRA does not introduce any new WAMP protocol level message types, but
+   implements the authentication handshake via standard WAMP RPCs with well-known
+   procedure URIs and signatures.
+   """
+
+   def deriveKey(secret, extra = None):
+      """
+      Computes a derived cryptographic key from a password according to PBKDF2
+      http://en.wikipedia.org/wiki/PBKDF2.
+
+      The function will only return a derived key if at least 'salt' is
+      present in the 'extra' dictionary. The complete set of attributes
+      that can be set in 'extra':
+
+         salt: The salt value to be used.
+         iterations: Number of iterations of derivation algorithm to run.
+         keylen: Key length to derive.
+
+      :returns str -- The derived key or the original secret.
+      """
+      if type(extra) == dict and extra.has_key('salt'):
+         salt = str(extra['salt'])
+         iterations = int(extra.get('iterations', 10000))
+         keylen = int(extra.get('keylen', 32))
+         b = pbkdf2_bin(secret, salt, iterations, keylen, hashlib.sha256)
+         return binascii.b2a_base64(b).strip()
+      else:
+         return secret
+
+   deriveKey = staticmethod(deriveKey)
+
+
+   def authSignature(self, authChallenge, authSecret = None, authExtra = None):
+      """
+      Compute the authentication signature from an authentication challenge and a secret.
+
+      :param authChallenge: The authentication challenge.
+      :type authChallenge: str
+      :param authSecret: The authentication secret.
+      :type authSecret: str
+      :authExtra: Extra authentication information for salting the secret. (salt, keylen,
+              iterations)
+      :type authExtra: dict
+
+      :returns str -- The authentication signature.
+      """
+      if authSecret is None:
+         authSecret = ""
+      authSecret = WampCraProtocol.deriveKey(authSecret, authExtra)
+      h = hmac.new(authSecret, authChallenge, hashlib.sha256)
+      sig = binascii.b2a_base64(h.digest()).strip()
+      return sig
+
+
+
+class WampCraClientProtocol(WampClientProtocol, WampCraProtocol):
+   """
+   Simple, authenticated WAMP client protocol.
+
+   The client can perform WAMP-Challenge-Response-Authentication ("WAMP-CRA") to authenticate
+   itself to a WAMP server. The server needs to implement WAMP-CRA also of course.
+   """
+
+   def authenticate(self, authKey = None, authExtra = None, authSecret = None):
+      """
+      Authenticate the WAMP session to server.
+
+      :param authKey: The key of the authentication credentials, something like a user or application name.
+      :type authKey: str
+      :param authExtra: Any extra authentication information.
+      :type authExtra: dict
+      :param authSecret: The secret of the authentication credentials, something like the user password or application secret key.
+      :type authsecret: str
+
+      :returns Deferred -- Deferred that fires upon authentication success (with permissions) or failure.
+      """
+
+      def _onAuthChallenge(challenge):
+         if authKey is not None:
+            challengeObj =  self.factory._unserialize(challenge)
+            if 'authextra' in challengeObj:
+                authExtra = challengeObj['authextra']
+                sig = self.authSignature(challenge, authSecret, authExtra)
+            else:
+                sig = self.authSignature(challenge, authSecret)
+         else:
+            sig = None
+         d = self.call(WampProtocol.URI_WAMP_PROCEDURE + "auth", sig)
+         return d
+
+      d = self.call(WampProtocol.URI_WAMP_PROCEDURE + "authreq", authKey, authExtra)
+      d.addCallback(_onAuthChallenge)
+      return d
+
+
+
+class WampCraServerProtocol(WampServerProtocol, WampCraProtocol):
+   """
+   Simple, authenticating WAMP server protocol.
+
+   The server lets clients perform WAMP-Challenge-Response-Authentication ("WAMP-CRA")
+   to authenticate. The clients need to implement WAMP-CRA also of course.
+
+   To implement an authenticating server, override:
+
+      * getAuthSecret
+      * getAuthPermissions
+      * onAuthenticated
+
+   in your class deriving from this class.
+   """
+
+   clientAuthTimeout = 0
+   """
+   Client authentication timeout in seconds or 0 for infinite. A client
+   must perform authentication after the initial WebSocket handshake within
+   this timeout or the connection is failed.
+   """
+
+   clientAuthAllowAnonymous = True
+   """
+   Allow anonymous client authentication. When this is set to True, a client
+   may "authenticate" as anonymous.
+   """
+
+
+   def getAuthPermissions(self, authKey, authExtra):
+      """
+      Get the permissions the session is granted when the authentication succeeds
+      for the given key / extra information.
+
+      Override in derived class to implement your authentication.
+
+      A permissions object is structured like this::
+
+         {'permissions': {'rpc': [
+                                    {'uri':  / RPC Endpoint URI - String /,
+                                     'call': / Allow to call? - Boolean /}
+                                 ],
+                          'pubsub': [
+                                       {'uri':    / PubSub Topic URI / URI prefix - String /,
+                                        'prefix': / URI matched by prefix? - Boolean /,
+                                        'pub':    / Allow to publish? - Boolean /,
+                                        'sub':    / Allow to subscribe? - Boolean /}
+                                    ]
+                          }
+         }
+
+      You can add custom information to this object. The object will be provided again
+      when the client authentication succeeded in :meth:`onAuthenticated`.
+
+      :param authKey: The authentication key.
+      :type authKey: str
+      :param authExtra: Authentication extra information.
+      :type authExtra: dict
+
+      :returns obj or Deferred -- Return a permissions object or None when no permissions granted.
+      """
+      return None
+
+
+   def getAuthSecret(self, authKey):
+      """
+      Get the authentication secret for an authentication key, i.e. the
+      user password for the user name. Return None when the authentication
+      key does not exist.
+
+      Override in derived class to implement your authentication.
+
+      :param authKey: The authentication key.
+      :type authKey: str
+
+      :returns str or Deferred -- The authentication secret for the key or None when the key does not exist.
+      """
+      return None
+
+
+   def onAuthTimeout(self):
+      """
+      Fired when the client does not authenticate itself in time. The default implementation
+      will simply fail the connection.
+
+      May be overridden in derived class.
+      """
+      if not self._clientAuthenticated:
+         log.msg("failing connection upon client authentication timeout [%s secs]" % self.clientAuthTimeout)
+         self.failConnection()
+
+
+   def onAuthenticated(self, permissions):
+      """
+      Fired when client authentication was successful.
+
+      Override in derived class and register PubSub topics and/or RPC endpoints.
+
+      :param permissions: The permissions object returned from :meth:`getAuthPermissions`.
+      :type permissions: obj
+      """
+      pass
+
+
+   def registerForPubSubFromPermissions(self, permissions):
+      """
+      Register topics for PubSub from auth permissions.
+
+      :param permissions: The permissions granted to the now authenticated client.
+      :type permissions: list
+      """
+      for p in permissions['pubsub']:
+         ## register topics for the clients
+         ##
+         pubsub = (WampServerProtocol.PUBLISH if p['pub'] else 0) | \
+                  (WampServerProtocol.SUBSCRIBE if p['sub'] else 0)
+         topic = p['uri']
+         if self.pubHandlers.has_key(topic) or self.subHandlers.has_key(topic):
+            ## FIXME: handle dups!
+            log.msg("DUPLICATE TOPIC PERMISSION !!! " + topic)
+         self.registerForPubSub(topic, p['prefix'], pubsub)
+
+
+   def onSessionOpen(self):
+      """
+      Called when WAMP session has been established, but not yet authenticated. The default
+      implementation will prepare the session allowing the client to authenticate itself.
+      """
+
+      ## register RPC endpoints for WAMP-CRA authentication
+      ##
+      self.registerForRpc(self, WampProtocol.URI_WAMP_PROCEDURE, [WampCraServerProtocol.authRequest,
+                                                                  WampCraServerProtocol.auth])
+
+      ## reset authentication state
+      ##
+      self._clientAuthenticated = False
+      self._clientPendingAuth = None
+      self._clientAuthTimeoutCall = None
+
+      ## client authentication timeout
+      ##
+      if self.clientAuthTimeout > 0:
+         self._clientAuthTimeoutCall = reactor.callLater(self.clientAuthTimeout, self.onAuthTimeout)
+
+
+   @exportRpc("authreq")
+   def authRequest(self, authKey = None, extra = None):
+      """
+      RPC endpoint for clients to initiate the authentication handshake.
+
+      :param authKey: Authentication key, such as user name or application name.
+      :type authKey: str
+      :param extra: Authentication extra information.
+      :type extra: dict
+
+      :returns str -- Authentication challenge. The client will need to create an authentication signature from this.
+      """
+
+      ## check authentication state
+      ##
+      if self._clientAuthenticated:
+         raise Exception(self.shrink(WampProtocol.URI_WAMP_ERROR + "already-authenticated"), "already authenticated")
+      if self._clientPendingAuth is not None:
+         raise Exception(self.shrink(WampProtocol.URI_WAMP_ERROR + "authentication-already-requested"), "authentication request already issues - authentication pending")
+
+      ## check extra
+      ##
+      if extra:
+         if type(extra) != dict:
+            raise Exception(self.shrink(WampProtocol.URI_WAMP_ERROR + "invalid-argument"), "extra not a dictionary (was %s)." % str(type(extra)))
+      else:
+         extra = {}
+      #for k in extra:
+      #   if type(extra[k]) not in [str, unicode, int, long, float, bool, types.NoneType]:
+      #      raise Exception(self.shrink(WampProtocol.URI_WAMP_ERROR + "invalid-argument"), "attribute '%s' in extra not a primitive type (was %s)" % (k, str(type(extra[k]))))
+
+      ## check authKey
+      ##
+      if authKey is None and not self.clientAuthAllowAnonymous:
+         raise Exception(self.shrink(WampProtocol.URI_WAMP_ERROR + "anonymous-auth-forbidden"), "authentication as anonymous forbidden")
+
+      if type(authKey) not in [str, unicode, types.NoneType]:
+         raise Exception(self.shrink(WampProtocol.URI_WAMP_ERROR + "invalid-argument"), "authentication key must be a string (was %s)" % str(type(authKey)))
+
+      d = maybeDeferred(self.getAuthSecret, authKey)
+
+      def onGetAuthSecretOk(authSecret, authKey, extra):
+         if authKey is not None and authSecret is None:
+            raise Exception(self.shrink(WampProtocol.URI_WAMP_ERROR + "no-such-authkey"), "authentication key '%s' does not exist." % authKey)
+
+         ## each authentication request gets a unique authid, which can only be used (later) once!
+         ##
+         authid = newid()
+
+         ## create authentication challenge
+         ##
+         info = {}
+         info['authid'] = authid
+         info['authkey'] = authKey
+         info['timestamp'] = utcnow()
+         info['sessionid'] = self.session_id
+         info['extra'] = extra
+
+         pp = maybeDeferred(self.getAuthPermissions, authKey, extra)
+
+         def onAuthPermissionsOk(res):
+            if res is None:
+               res = {'permissions': {}}
+               res['permissions'] = {'pubsub': [], 'rpc': []}
+            info['permissions'] = res['permissions']
+            if 'authextra' in res:
+                info['authextra'] = res['authextra']
+
+            if authKey:
+               ## authenticated session
+               ##
+               infoser = self.factory._serialize(info)
+               sig = self.authSignature(infoser, authSecret)
+
+               self._clientPendingAuth = (info, sig, res)
+               return infoser
+            else:
+               ## anonymous session
+               ##
+               self._clientPendingAuth = (info, None, res)
+               return None
+
+         def onAuthPermissionsError(e):
+            raise Exception(self.shrink(WampProtocol.URI_WAMP_ERROR + "auth-permissions-error"), str(e))
+
+         pp.addCallbacks(onAuthPermissionsOk, onAuthPermissionsError)
+
+         return pp
+
+      d.addCallback(onGetAuthSecretOk, authKey, extra)
+      return d
+
+
+   @exportRpc("auth")
+   def auth(self, signature = None):
+      """
+      RPC endpoint for clients to actually authenticate after requesting authentication and computing
+      a signature from the authentication challenge.
+
+      :param signature: Authentication signature computed by the client.
+      :type signature: str
+
+      :returns list -- A list of permissions the client is granted when authentication was successful.
+      """
+
+      ## check authentication state
+      ##
+      if self._clientAuthenticated:
+         raise Exception(self.shrink(WampProtocol.URI_WAMP_ERROR + "already-authenticated"), "already authenticated")
+      if self._clientPendingAuth is None:
+         raise Exception(self.shrink(WampProtocol.URI_WAMP_ERROR + "no-authentication-requested"), "no authentication previously requested")
+
+      ## check signature
+      ##
+      if type(signature) not in [str, unicode, types.NoneType]:
+         raise Exception(self.shrink(WampProtocol.URI_WAMP_ERROR + "invalid-argument"), "signature must be a string or None (was %s)" % str(type(signature)))
+      if self._clientPendingAuth[1] != signature:
+         ## delete pending authentication, so that no retries are possible. authid is only valid for 1 try!!
+         ## FIXME: drop the connection?
+         self._clientPendingAuth = None
+         raise Exception(self.shrink(WampProtocol.URI_WAMP_ERROR + "invalid-signature"), "signature for authentication request is invalid")
+
+      ## at this point, the client has successfully authenticated!
+
+      ## get the permissions we determined earlier
+      ##
+      perms = self._clientPendingAuth[2]
+
+      ## delete auth request and mark client as authenticated
+      ##
+      authKey = self._clientPendingAuth[0]['authkey']
+      self._clientAuthenticated = True
+      self._clientPendingAuth = None
+      if self._clientAuthTimeoutCall is not None:
+         self._clientAuthTimeoutCall.cancel()
+         self._clientAuthTimeoutCall = None
+
+      ## fire authentication callback
+      ##
+      self.onAuthenticated(authKey, perms)
+
+      ## return permissions to client
+      ##
+      return perms['permissions']
+
+
+
+class Call:
+   """
+   Thin-wrapper for incoming RPCs provided to call handlers registered via
+
+     - registerHandlerMethodForRpc
+     - registerHandlerProcedureForRpc
+   """
+
+
+   def __init__(self,
+             proto,
+             callid,
+             uri,
+             args,
+             extra = None):
+      self.proto = proto
+      self.callid = callid
+      self.uri = uri
+      self.args = args
+      self.extra = extra
+      self.timings = None
+
+
+
+class Handler(object):
+   """
+   A handler for a certain class of messages.
+   """
+
+
+   typeid = None
+
+
+   def __init__(self, proto, prefixes):
+      """
+      Remember protocol and prefix map in instance variables.
+      """
+      self.proto = proto
+      self.prefixes = prefixes
+
+
+   def handleMessage(self, msg_parts):
+      """
+      Template method for handling a message.
+
+      Check if the correct handler for the message type was
+      called. Afterwards, assign all relevant parts of the message to
+      instance variables and call the (overridden) method
+      _handleMessage to actually handle the message.
+      """
+      msgtype = msg_parts[0]
+      if self.typeid:
+         assert msgtype == self.typeid, \
+             "Message type %s does not match type id %s" % (msgtype,
+                                                            self.typeid)
+      else:
+         assert False, \
+             "No typeid defined for %s" % self.__class__.__name__
+
+      if self._messageIsValid(msg_parts):
+         self._parseMessageParts(msg_parts)
+         self._handleMessage()
+
+
+   def _parseMessageParts(self, msg_parts):
+      """
+      Assign the message parts to instance variables.
+      Has to be overridden in subclasses.
+      """
+      raise NotImplementedError
+
+   def _messageIsValid(self, msg_parts):
+      """
+      Check if the message parts have expected properties (type, etc.).
+      Has to be overridden in subclasses.
+      """
+      raise NotImplementedError
+
+
+   def _handleMessage(self):
+      """
+      Handle a specific kind of message.
+      Has to be overridden in subclasses.
+      """
+      raise NotImplementedError
+
+
+   def maybeTrackTimings(self, call, msg):
+      """
+      Track timings, if desired.
+      """
+      if self.proto.trackTimings:
+         self.proto.doTrack(msg)
+         call.timings = self.proto.trackedTimings
+         self.proto.trackedTimings = Timings()
+
+
+
+class CallHandler(Handler):
+   """
+   A handler for incoming RPC calls.
+   """
+
+
+   typeid = WampProtocol.MESSAGE_TYPEID_CALL
+
+
+   def _messageIsValid(self, msg_parts):
+      callid, uri = msg_parts[1:3]
+      if not isinstance(callid, (str, unicode)):
+         self.proto._protocolError(
+            ("WAMP CALL message with invalid type %s for "
+            "<callid>") % type(callid))
+         return False
+
+      if not isinstance(uri, (str, unicode)):
+         self.proto._protocolError(
+            ("WAMP CALL message with invalid type %s for "
+            "<uri>") % type(uri))
+         return False
+
+      return True
+
+
+   def _parseMessageParts(self, msg_parts):
+      """
+      Parse message and create call object.
+      """
+      self.callid = msg_parts[1]
+      self.uri = self.prefixes.resolveOrPass(msg_parts[2]) ### PFX - remove
+      self.args = msg_parts[3:]
+
+
+   def _handleMessage(self):
+      """
+      Perform the RPC call and attach callbacks to its deferred object.
+      """
+      call = self._onBeforeCall()
+      ## execute incoming RPC
+      d = maybeDeferred(self._callProcedure, call)
+      ## register callback and errback with extra argument call
+      d.addCallbacks(self._onAfterCallSuccess,
+                     self._onAfterCallError,
+                     callbackArgs = (call,),
+                     errbackArgs = (call,))
+
+
+   def _onBeforeCall(self):
+      """
+      Create call object to move around call data
+      """
+      uri, args = self.proto.onBeforeCall(self.callid, self.uri, self.args,
+                                          bool(self.proto.procForUri(self.uri)))
+
+      call = Call(self.proto, self.callid, uri, args)
+      self.maybeTrackTimings(call, "onBeforeCall")
+      return call
+
+
+   def _callProcedure(self, call):
+      """
+      Actually performs the call of a procedure invoked via RPC.
+      """
+      m = self.proto.procForUri(call.uri)
+      if m is None:
+         raise Exception("no procedure registered for %s" % call.uri)
+
+      obj, method_or_proc, is_handler = m[:3]
+      if not is_handler:
+         return self._performProcedureCall(call, obj, method_or_proc)
+      else:
+         call.extra = m[3]
+         return self._delegateToRpcHandler(call, obj, method_or_proc)
+
+
+   def _performProcedureCall(self, call, obj, method_or_proc):
+      """
+      Perform a RPC method / procedure call.
+      """
+      cargs = tuple(call.args) if call.args else ()
+      if obj:
+         ## call object method
+         return method_or_proc(obj, *cargs)
+      else:
+         ## call free-standing function/procedure
+         return method_or_proc(*cargs)
+
+
+   def _delegateToRpcHandler(self, call, obj, method_or_proc):
+      """
+      Delegate call to RPC handler.
+      """
+      if obj:
+         ## call RPC handler on object
+         return method_or_proc(obj, call)
+      else:
+         ## call free-standing RPC handler
+         return method_or_proc(call)
+
+
+   def _onAfterCallSuccess(self, result, call):
+      """
+      Execute custom success handler and send call result.
+      """
+      self.maybeTrackTimings(call, "onAfterCallSuccess")
+      call.result = self.proto.onAfterCallSuccess(result, call)
+
+      ## send out WAMP message
+      self._sendCallResult(call)
+
+
+   def _onAfterCallError(self, error, call):
+      """
+      Execute custom error handler and send call error.
+      """
+      self.maybeTrackTimings(call, "onAfterCallError")
+      ## fire user callback
+      call.error = self.proto.onAfterCallError(error, call)
+
+      ## send out WAMP message
+      self._sendCallError(call)
+
+
+   def _sendCallResult(self, call):
+      """
+      Marshal and send a RPC success result.
+      """
+      msg = [WampProtocol.MESSAGE_TYPEID_CALL_RESULT, call.callid, call.result]
+      try:
+         rmsg = self.proto.serializeMessage(msg)
+      except:
+         raise Exception("call result not JSON serializable")
+      else:
+         self.proto.sendMessage(rmsg)
+         ### XXX self.maybeTrackTimings(call, "onAfterSendCallSuccess")
+         if self.proto.trackTimings:
+            self.proto.trackedTimings.track("onAfterSendCallSuccess")
+         self.proto.onAfterSendCallSuccess(rmsg, call)
+
+
+   def _sendCallError(self, call):
+      """
+      Marshal and send a RPC error result.
+      """
+      killsession = False
+      try:
+         error_info, killsession = self._extractErrorInfo(call)
+         rmsg = self._assembleErrorMessage(call, *error_info)
+      except Exception, e:
+         rmsg = self._handleProcessingError(call, e)
+      finally:
+         self._sendMessageAndCleanUp(rmsg, call, killsession)
+
+
+   def _extractErrorInfo(self, call):
+      """
+      Extract error information from the call.
+      """
+      ## get error args and len
+      ##
+      eargs = call.error.value.args
+      num_args = len(eargs)
+
+      if num_args > 4:
+         raise Exception("invalid args length %d for exception" % num_args)
+
+      erroruri = (WampProtocol.URI_WAMP_ERROR_GENERIC
+                  if num_args < 1
+                  else eargs[0])
+      errordesc = (WampProtocol.DESC_WAMP_ERROR_GENERIC
+                   if num_args < 2
+                   else eargs[1])
+      # errordetails must be JSON serializable .. if not, we get exception
+      # later in sendMessage
+      errordetails = (eargs[2]
+                      if num_args >= 3
+                      else (call.error.getTraceback().splitlines()
+                            if self.proto.includeTraceback
+                            else None))
+      killsession = (eargs[3]
+                     if num_args >= 4
+                     else False)
+
+      if type(erroruri) not in [str, unicode]:
+         raise Exception("invalid type %s for errorUri" % type(erroruri))
+      if type(errordesc) not in [str, unicode]:
+         raise Exception("invalid type %s for errorDesc" % type(errordesc))
+      if type(killsession) not in [bool, types.NoneType]:
+         raise Exception("invalid type %s for killSession" %
+                         type(killsession))
+
+      return (erroruri, errordesc, errordetails), killsession
+
+
+   def _assembleErrorMessage(self, call, erroruri, errordesc, errordetails):
+      """
+      Assemble a WAMP RPC error message.
+      """
+      if errordetails is not None:
+         msg = [WampProtocol.MESSAGE_TYPEID_CALL_ERROR,
+                call.callid,
+                self.prefixes.shrink(erroruri), ### PFX - remove
+                errordesc,
+                errordetails]
+      else:
+         msg = [WampProtocol.MESSAGE_TYPEID_CALL_ERROR,
+                call.callid,
+                self.prefixes.shrink(erroruri), ### PFX - remove
+                errordesc]
+
+      ## serialize message. this can fail if errorDetails is not
+      ## serializable
+      try:
+         rmsg = self.proto.serializeMessage(msg)
+      except Exception, e:
+         raise Exception(
+            "invalid object for errorDetails - not serializable (%s)" %
+            str(e))
+
+      return rmsg
+
+
+   def _handleProcessingError(self, call, e):
+      """
+      Create a message describing what went wrong during processing an
+      exception.
+      """
+      msg = [WampProtocol.MESSAGE_TYPEID_CALL_ERROR,
+             call.callid,
+              ### PFX - remove
+             self.prefixes.shrink(WampProtocol.URI_WAMP_ERROR_INTERNAL),
+             str(e)]
+
+      if self.proto.includeTraceback:
+         msg.append(call.error.getTraceback().splitlines())
+      result = self.proto.serializeMessage(msg)
+      return result
+
+
+   def _sendMessageAndCleanUp(self, rmsg, call, killsession):
+      self.proto.sendMessage(rmsg)
+      ### XXX maybeTrackTimings("onAfterSendCallError")
+      if self.proto.trackTimings:
+         self.proto.doTrack("onAfterSendCallError")
+      self.proto.onAfterSendCallError(rmsg, call)
+
+      if killsession:
+         self.proto.sendClose(3000,
+            "killing WAMP session upon request by application exception")
+
+
+
+class CallResultHandler(Handler):
+   """
+   A handler for to RPC call results.
+   """
+
+   typeid = WampProtocol.MESSAGE_TYPEID_CALL_RESULT
+
+
+   def _messageIsValid(self, msg_parts):
+      if len(msg_parts) < 2:
+         self.proto._protocolError(
+            "WAMP CALL_RESULT message without <callid>")
+         return False
+      if len(msg_parts) != 3:
+         self.proto._protocolError(
+            "WAMP CALL_RESULT message with invalid length %d" % len(msg_parts))
+         return False
+
+      if type(msg_parts[1]) not in [unicode, str]:
+         self.proto._protocolError(
+            ("WAMP CALL_RESULT message with invalid type %s for "
+            "<callid>") % type(msg_parts[1]))
+         return False
+
+      return True
+
+
+   def _parseMessageParts(self, msg_parts):
+      """
+      Extract call result from message parts.
+      """
+      self.callid = str(msg_parts[1])
+      self.result = msg_parts[2]
+
+
+   def _handleMessage(self):
+      ## Pop and process Call Deferred
+      ##
+      d = self.proto.calls.pop(self.callid, None)
+      if d:
+         ## WAMP CALL_RESULT
+         ##
+         d.callback(self.result)
+      else:
+         if self.proto.debugWamp:
+            log.msg("callid not found for received call result message")
+
+
+
+class CallErrorHandler(Handler):
+
+   typeid = WampProtocol.MESSAGE_TYPEID_CALL_ERROR
+
+
+   def _messageIsValid(self, msg_parts):
+      if len(msg_parts) not in [4, 5]:
+         self.proto._protocolError(
+            "call error message invalid length %d" % len(msg_parts))
+         return False
+
+      ## Error URI
+      ##
+      if type(msg_parts[2]) not in [unicode, str]:
+         self.proto._protocolError(
+            "invalid type %s for errorUri in call error message" %
+            str(type(msg_parts[2])))
+         return False
+
+      ## Error Description
+      ##
+      if type(msg_parts[3]) not in [unicode, str]:
+         self.proto._protocolError(
+            "invalid type %s for errorDesc in call error message" %
+            str(type(msg_parts[3])))
+         return False
+
+      return True
+
+
+   def _parseMessageParts(self, msg_parts):
+      """
+      Extract error information from message parts.
+      """
+      self.callid = str(msg_parts[1])
+      self.erroruri = str(msg_parts[2])
+      self.errordesc = str(msg_parts[3])
+
+      ## Error Details
+      ##
+      if len(msg_parts) > 4:
+         self.errordetails = msg_parts[4]
+      else:
+         self.errordetails = None
+
+
+   def _handleMessage(self):
+      """
+      Fire Call Error Deferred.
+      """
+      ##
+      ## Pop and process Call Deferred
+      d = self.proto.calls.pop(self.callid, None)
+      if d:
+         e = Exception()
+         e.args = (self.erroruri, self.errordesc, self.errordetails)
+         d.errback(e)
+      else:
+         if self.proto.debugWamp:
+            log.msg("callid not found for received call error message")
diff --git a/ThirdParty/AutobahnPython/autobahn/websocket.py b/ThirdParty/AutobahnPython/autobahn/websocket.py
new file mode 100644
index 0000000..c65c418
--- /dev/null
+++ b/ThirdParty/AutobahnPython/autobahn/websocket.py
@@ -0,0 +1,3847 @@
+###############################################################################
+##
+##  Copyright 2011-2013 Tavendo GmbH
+##
+##  Licensed under the Apache License, Version 2.0 (the "License");
+##  you may not use this file except in compliance with the License.
+##  You may obtain a copy of the License at
+##
+##      http://www.apache.org/licenses/LICENSE-2.0
+##
+##  Unless required by applicable law or agreed to in writing, software
+##  distributed under the License is distributed on an "AS IS" BASIS,
+##  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+##  See the License for the specific language governing permissions and
+##  limitations under the License.
+##
+###############################################################################
+
+__all__ = ["createWsUrl",
+           "parseWsUrl",
+           "connectWS",
+           "listenWS",
+
+           "HttpException",
+           "ConnectionRequest",
+           "ConnectionResponse",
+           "Timings",
+
+           "WebSocketProtocol",
+           "WebSocketFactory",
+           "WebSocketServerProtocol",
+           "WebSocketServerFactory",
+           "WebSocketClientProtocol",
+           "WebSocketClientFactory"]
+
+## The Python urlparse module currently does not contain the ws/wss
+## schemes, so we add those dynamically (which is a hack of course).
+##
+import urlparse
+wsschemes = ["ws", "wss"]
+urlparse.uses_relative.extend(wsschemes)
+urlparse.uses_netloc.extend(wsschemes)
+urlparse.uses_params.extend(wsschemes)
+urlparse.uses_query.extend(wsschemes)
+urlparse.uses_fragment.extend(wsschemes)
+
+import urllib
+import binascii
+import hashlib
+import base64
+import struct
+import random
+import os
+from pprint import pformat
+from array import array
+from collections import deque
+
+from twisted.internet import reactor, protocol
+from twisted.python import log
+
+from _version import __version__
+from utf8validator import Utf8Validator
+from xormasker import XorMaskerNull, createXorMasker
+from httpstatus import *
+from util import Stopwatch
+
+
+def createWsUrl(hostname, port = None, isSecure = False, path = None, params = None):
+   """
+   Create a WebSocket URL from components.
+
+   :param hostname: WebSocket server hostname.
+   :type hostname: str
+   :param port: WebSocket service port or None (to select default ports 80/443 depending on isSecure).
+   :type port: int
+   :param isSecure: Set True for secure WebSocket ("wss" scheme).
+   :type isSecure: bool
+   :param path: Path component of addressed resource (will be properly URL escaped).
+   :type path: str
+   :param params: A dictionary of key-values to construct the query component of the addressed resource (will be properly URL escaped).
+   :type params: dict
+
+   :returns: str -- Constructed WebSocket URL.
+   """
+   if port is not None:
+      netloc = "%s:%d" % (hostname, port)
+   else:
+      if isSecure:
+         netloc = "%s:443" % hostname
+      else:
+         netloc = "%s:80" % hostname
+   if isSecure:
+      scheme = "wss"
+   else:
+      scheme = "ws"
+   if path is not None:
+      ppath = urllib.quote(path)
+   else:
+      ppath = "/"
+   if params is not None:
+      query = urllib.urlencode(params)
+   else:
+      query = None
+   return urlparse.urlunparse((scheme, netloc, ppath, None, query, None))
+
+
+def parseWsUrl(url):
+   """
+   Parses as WebSocket URL into it's components and returns a tuple (isSecure, host, port, resource, path, params).
+
+   isSecure is a flag which is True for wss URLs.
+   host is the hostname or IP from the URL.
+   port is the port from the URL or standard port derived from scheme (ws = 80, wss = 443).
+   resource is the /resource name/ from the URL, the /path/ together with the (optional) /query/ component.
+   path is the /path/ component properly unescaped.
+   params is the /query) component properly unescaped and returned as dictionary.
+
+   :param url: A valid WebSocket URL, i.e. `ws://localhost:9000/myresource?param1=23&param2=666`
+   :type url: str
+
+   :returns: tuple -- A tuple (isSecure, host, port, resource, path, params)
+   """
+   parsed = urlparse.urlparse(url)
+   if parsed.scheme not in ["ws", "wss"]:
+      raise Exception("invalid WebSocket scheme '%s'" % parsed.scheme)
+   if parsed.port is None or parsed.port == "":
+      if parsed.scheme == "ws":
+         port = 80
+      else:
+         port = 443
+   else:
+      port = int(parsed.port)
+   if parsed.fragment is not None and parsed.fragment != "":
+      raise Exception("invalid WebSocket URL: non-empty fragment '%s" % parsed.fragment)
+   if parsed.path is not None and parsed.path != "":
+      ppath = parsed.path
+      path = urllib.unquote(ppath)
+   else:
+      ppath = "/"
+      path = ppath
+   if parsed.query is not None and parsed.query != "":
+      resource = ppath + "?" + parsed.query
+      params = urlparse.parse_qs(parsed.query)
+   else:
+      resource = ppath
+      params = {}
+   return (parsed.scheme == "wss", parsed.hostname, port, resource, path, params)
+
+
+def connectWS(factory, contextFactory = None, timeout = 30, bindAddress = None):
+   """
+   Establish WebSocket connection to a server. The connection parameters like target
+   host, port, resource and others are provided via the factory.
+
+   :param factory: The WebSocket protocol factory to be used for creating client protocol instances.
+   :type factory: An :class:`autobahn.websocket.WebSocketClientFactory` instance.
+   :param contextFactory: SSL context factory, required for secure WebSocket connections ("wss").
+   :type contextFactory: A `twisted.internet.ssl.ClientContextFactory <http://twistedmatrix.com/documents/current/api/twisted.internet.ssl.ClientContextFactory.html>`_ instance.
+   :param timeout: Number of seconds to wait before assuming the connection has failed.
+   :type timeout: int
+   :param bindAddress: A (host, port) tuple of local address to bind to, or None.
+   :type bindAddress: tuple
+
+   :returns: obj -- An object which implements `twisted.interface.IConnector <http://twistedmatrix.com/documents/current/api/twisted.internet.interfaces.IConnector.html>`_.
+   """
+   if factory.isSecure:
+      if contextFactory is None:
+         # create default client SSL context factory when none given
+         from twisted.internet import ssl
+         contextFactory = ssl.ClientContextFactory()
+      conn = reactor.connectSSL(factory.host, factory.port, factory, contextFactory, timeout, bindAddress)
+   else:
+      conn = reactor.connectTCP(factory.host, factory.port, factory, timeout, bindAddress)
+   return conn
+
+
+def listenWS(factory, contextFactory = None, backlog = 50, interface = ''):
+   """
+   Listen for incoming WebSocket connections from clients. The connection parameters like
+   listening port and others are provided via the factory.
+
+   :param factory: The WebSocket protocol factory to be used for creating server protocol instances.
+   :type factory: An :class:`autobahn.websocket.WebSocketServerFactory` instance.
+   :param contextFactory: SSL context factory, required for secure WebSocket connections ("wss").
+   :type contextFactory: A twisted.internet.ssl.ContextFactory.
+   :param backlog: Size of the listen queue.
+   :type backlog: int
+   :param interface: The interface (derived from hostname given) to bind to, defaults to '' (all).
+   :type interface: str
+
+   :returns: obj -- An object that implements `twisted.interface.IListeningPort <http://twistedmatrix.com/documents/current/api/twisted.internet.interfaces.IListeningPort.html>`_.
+   """
+   if factory.isSecure:
+      if contextFactory is None:
+         raise Exception("Secure WebSocket listen requested, but no SSL context factory given")
+      listener = reactor.listenSSL(factory.port, factory, contextFactory, backlog, interface)
+   else:
+      listener = reactor.listenTCP(factory.port, factory, backlog, interface)
+   return listener
+
+
+class FrameHeader:
+   """
+   Thin-wrapper for storing WebSocket frame metadata.
+
+   FOR INTERNAL USE ONLY!
+   """
+
+   def __init__(self, opcode, fin, rsv, length, mask):
+      """
+      Constructor.
+
+      :param opcode: Frame opcode (0-15).
+      :type opcode: int
+      :param fin: Frame FIN flag.
+      :type fin: bool
+      :param rsv: Frame reserved flags (0-7).
+      :type rsv: int
+      :param length: Frame payload length.
+      :type length: int
+      :param mask: Frame mask (binary string) or None.
+      :type mask: str
+      """
+      self.opcode = opcode
+      self.fin = fin
+      self.rsv = rsv
+      self.length = length
+      self.mask = mask
+
+
+class HttpException:
+   """
+   Throw an instance of this class to deny a WebSocket connection
+   during handshake in :meth:`autobahn.websocket.WebSocketServerProtocol.onConnect`.
+   You can find definitions of HTTP status codes in module :mod:`autobahn.httpstatus`.
+   """
+
+   def __init__(self, code, reason):
+      """
+      Constructor.
+
+      :param code: HTTP error code.
+      :type code: int
+      :param reason: HTTP error reason.
+      :type reason: str
+      """
+      self.code = code
+      self.reason = reason
+
+
+class ConnectionRequest:
+   """
+   Thin-wrapper for WebSocket connection request information
+   provided in :meth:`autobahn.websocket.WebSocketServerProtocol.onConnect` when a WebSocket
+   client establishes a connection to a WebSocket server.
+   """
+   def __init__(self, peer, peerstr, headers, host, path, params, version, origin, protocols, extensions):
+      """
+      Constructor.
+
+      :param peer: IP address/port of the connecting client.
+      :type peer: object
+      :param peerstr: IP address/port of the connecting client as string.
+      :type peerstr: str
+      :param headers: HTTP headers from opening handshake request.
+      :type headers: dict
+      :param host: Host from opening handshake HTTP header.
+      :type host: str
+      :param path: Path from requested HTTP resource URI. For example, a resource URI of `/myservice?foo=23&foo=66&bar=2` will be parsed to `/myservice`.
+      :type path: str
+      :param params: Query parameters (if any) from requested HTTP resource URI. For example, a resource URI of `/myservice?foo=23&foo=66&bar=2` will be parsed to `{'foo': ['23', '66'], 'bar': ['2']}`.
+      :type params: dict of arrays of strings
+      :param version: The WebSocket protocol version the client announced (and will be spoken, when connection is accepted).
+      :type version: int
+      :param origin: The WebSocket origin header or None. Note that this only a reliable source of information for browser clients!
+      :type origin: str
+      :param protocols: The WebSocket (sub)protocols the client announced. You must select and return one of those (or None) in :meth:`autobahn.websocket.WebSocketServerProtocol.onConnect`.
+      :type protocols: array of strings
+      :param extensions: The WebSocket extensions the client requested and the server accepted (and thus will be spoken, when WS connection is established).
+      :type extensions: array of strings
+      """
+      self.peer = peer
+      self.peerstr = peerstr
+      self.headers = headers
+      self.host = host
+      self.path = path
+      self.params = params
+      self.version = version
+      self.origin = origin
+      self.protocols = protocols
+      self.extensions = extensions
+
+
+class ConnectionResponse():
+   """
+   Thin-wrapper for WebSocket connection response information
+   provided in :meth:`autobahn.websocket.WebSocketClientProtocol.onConnect` when a WebSocket
+   client has established a connection to a WebSocket server.
+   """
+   def __init__(self, peer, peerstr, headers, version, protocol, extensions):
+      """
+      Constructor.
+
+      :param peer: IP address/port of the connected server.
+      :type peer: object
+      :param peerstr: IP address/port of the connected server as string.
+      :type peerstr: str
+      :param headers: HTTP headers from opening handshake response.
+      :type headers: dict
+      :param version: The WebSocket protocol version that is spoken.
+      :type version: int
+      :param protocol: The WebSocket (sub)protocol in use.
+      :type protocol: str
+      :param extensions: The WebSocket extensions in use.
+      :type extensions: array of strings
+      """
+      self.peer = peer
+      self.peerstr = peerstr
+      self.headers = headers
+      self.version = version
+      self.protocol = protocol
+      self.extensions = extensions
+
+
+def parseHttpHeader(data):
+   """
+   Parses the beginning of a HTTP request header (the data up to the \n\n line) into a pair
+   of status line and HTTP headers dictionary.
+   Header keys are normalized to all-lower-case.
+
+   FOR INTERNAL USE ONLY!
+
+   :param data: The HTTP header data up to the \n\n line.
+   :type data: str
+
+   :returns: tuple -- Tuple of HTTP status line, headers and headers count.
+   """
+   raw = data.splitlines()
+   http_status_line = raw[0].strip()
+   http_headers = {}
+   http_headers_cnt = {}
+   for h in raw[1:]:
+      i = h.find(":")
+      if i > 0:
+         ## HTTP header keys are case-insensitive
+         key = h[:i].strip().lower()
+
+         ## not sure if UTF-8 is allowed for HTTP header values..
+         value = h[i+1:].strip().decode("utf-8")
+
+         ## handle HTTP headers split across multiple lines
+         if http_headers.has_key(key):
+            http_headers[key] += ", %s" % value
+            http_headers_cnt[key] += 1
+         else:
+            http_headers[key] = value
+            http_headers_cnt[key] = 1
+      else:
+         # skip bad HTTP header
+         pass
+   return (http_status_line, http_headers, http_headers_cnt)
+
+
+class Timings:
+   """
+   Helper class to track timings by key. This class also supports item access,
+   iteration and conversion to string.
+   """
+
+   def __init__(self):
+      self._stopwatch = Stopwatch()
+      self._timings = {}
+
+   def track(self, key):
+      """
+      Track elapsed for key.
+
+      :param key: Key under which to track the timing.
+      :type key: str
+      """
+      self._timings[key] = self._stopwatch.elapsed()
+
+   def diff(self, startKey, endKey, format = True):
+      """
+      Get elapsed difference between two previously tracked keys.
+
+      :param startKey: First key for interval (older timestamp).
+      :type startKey: str
+      :param endKey: Second key for interval (younger timestamp).
+      :type endKey: str
+      :param format: If `True`, format computed time period and return string.
+      :type format: bool
+
+      :returns: float or str -- Computed time period in seconds (or formatted string).
+      """
+      if self._timings.has_key(endKey) and self._timings.has_key(startKey):
+         d = self._timings[endKey] - self._timings[startKey]
+         if format:
+            if d < 0.00001: # 10us
+               s = "%d ns" % round(d * 1000000000.)
+            elif d < 0.01: # 10ms
+               s = "%d us" % round(d * 1000000.)
+            elif d < 10: # 10s
+               s = "%d ms" % round(d * 1000.)
+            else:
+               s = "%d s" % round(d)
+            return s.rjust(8)
+         else:
+            return d
+      else:
+         if format:
+            return "n.a.".rjust(8)
+         else:
+            return None
+
+   def __getitem__(self, key):
+      return self._timings.get(key, None)
+
+   def __iter__(self):
+      return self._timings.__iter__(self)
+
+   def __str__(self):
+      return pformat(self._timings)
+
+
+
+class WebSocketProtocol(protocol.Protocol):
+   """
+   A Twisted Protocol class for WebSocket. This class is used by both WebSocket
+   client and server protocol version. It is unusable standalone, for example
+   the WebSocket initial handshake is implemented in derived class differently
+   for clients and servers.
+   """
+
+   SUPPORTED_SPEC_VERSIONS = [0, 10, 11, 12, 13, 14, 15, 16, 17, 18]
+   """
+   WebSocket protocol spec (draft) versions supported by this implementation.
+   Use of version 18 indicates RFC6455. Use of versions < 18 indicate actual
+   draft spec versions (Hybi-Drafts). Use of version 0 indicates Hixie-76.
+   """
+
+   SUPPORTED_PROTOCOL_VERSIONS = [0, 8, 13]
+   """
+   WebSocket protocol versions supported by this implementation. For Hixie-76,
+   there is no protocol version announced in HTTP header, and we just use the
+   draft version (0) in this case.
+   """
+
+   SPEC_TO_PROTOCOL_VERSION = {0: 0, 10: 8, 11: 8, 12: 8, 13: 13, 14: 13, 15: 13, 16: 13, 17: 13, 18: 13}
+   """
+   Mapping from protocol spec (draft) version to protocol version.  For Hixie-76,
+   there is no protocol version announced in HTTP header, and we just use the
+   pseudo protocol version 0 in this case.
+   """
+
+   PROTOCOL_TO_SPEC_VERSION = {0: 0, 8: 12, 13: 18}
+   """
+   Mapping from protocol version to the latest protocol spec (draft) version
+   using that protocol version.  For Hixie-76, there is no protocol version
+   announced in HTTP header, and we just use the draft version (0) in this case.
+   """
+
+   DEFAULT_SPEC_VERSION = 18
+   """
+   Default WebSocket protocol spec version this implementation speaks: final RFC6455.
+   """
+
+   DEFAULT_ALLOW_HIXIE76 = False
+   """
+   By default, this implementation will not allow to speak the obsoleted
+   Hixie-76 protocol version. That protocol version has security issues, but
+   is still spoken by some clients. Enable at your own risk! Enabling can be
+   done by using setProtocolOptions() on the factories for clients and servers.
+   """
+
+   _WS_MAGIC = "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"
+   """
+   Protocol defined magic used during WebSocket handshake (used in Hybi-drafts
+   and final RFC6455.
+   """
+
+   _QUEUED_WRITE_DELAY = 0.00001
+   """
+   For synched/chopped writes, this is the reactor reentry delay in seconds.
+   """
+
+   MESSAGE_TYPE_TEXT = 1
+   """
+   WebSocket text message type (UTF-8 payload).
+   """
+
+   MESSAGE_TYPE_BINARY = 2
+   """
+   WebSocket binary message type (arbitrary binary payload).
+   """
+
+   ## WebSocket protocol state:
+   ## STATE_CONNECTING => STATE_OPEN => STATE_CLOSING => STATE_CLOSED
+   ##
+   STATE_CLOSED = 0
+   STATE_CONNECTING = 1
+   STATE_CLOSING = 2
+   STATE_OPEN = 3
+
+   ## Streaming Send State
+   SEND_STATE_GROUND = 0
+   SEND_STATE_MESSAGE_BEGIN = 1
+   SEND_STATE_INSIDE_MESSAGE = 2
+   SEND_STATE_INSIDE_MESSAGE_FRAME = 3
+
+   ## WebSocket protocol close codes
+   ##
+   CLOSE_STATUS_CODE_NORMAL = 1000
+   """Normal close of connection."""
+
+   CLOSE_STATUS_CODE_GOING_AWAY = 1001
+   """Going away."""
+
+   CLOSE_STATUS_CODE_PROTOCOL_ERROR = 1002
+   """Protocol error."""
+
+   CLOSE_STATUS_CODE_UNSUPPORTED_DATA = 1003
+   """Unsupported data."""
+
+   CLOSE_STATUS_CODE_RESERVED1 = 1004
+   """RESERVED"""
+
+   CLOSE_STATUS_CODE_NULL = 1005 # MUST NOT be set in close frame!
+   """No status received. (MUST NOT be used as status code when sending a close)."""
+
+   CLOSE_STATUS_CODE_ABNORMAL_CLOSE = 1006 # MUST NOT be set in close frame!
+   """Abnormal close of connection. (MUST NOT be used as status code when sending a close)."""
+
+   CLOSE_STATUS_CODE_INVALID_PAYLOAD = 1007
+   """Invalid frame payload data."""
+
+   CLOSE_STATUS_CODE_POLICY_VIOLATION = 1008
+   """Policy violation."""
+
+   CLOSE_STATUS_CODE_MESSAGE_TOO_BIG = 1009
+   """Message too big."""
+
+   CLOSE_STATUS_CODE_MANDATORY_EXTENSION = 1010
+   """Mandatory extension."""
+
+   CLOSE_STATUS_CODE_INTERNAL_ERROR = 1011
+   """The peer encountered an unexpected condition or internal error."""
+
+   CLOSE_STATUS_CODE_TLS_HANDSHAKE_FAILED = 1015 # MUST NOT be set in close frame!
+   """TLS handshake failed, i.e. server certificate could not be verified. (MUST NOT be used as status code when sending a close)."""
+
+   CLOSE_STATUS_CODES_ALLOWED = [CLOSE_STATUS_CODE_NORMAL,
+                                 CLOSE_STATUS_CODE_GOING_AWAY,
+                                 CLOSE_STATUS_CODE_PROTOCOL_ERROR,
+                                 CLOSE_STATUS_CODE_UNSUPPORTED_DATA,
+                                 CLOSE_STATUS_CODE_INVALID_PAYLOAD,
+                                 CLOSE_STATUS_CODE_POLICY_VIOLATION,
+                                 CLOSE_STATUS_CODE_MESSAGE_TOO_BIG,
+                                 CLOSE_STATUS_CODE_MANDATORY_EXTENSION,
+                                 CLOSE_STATUS_CODE_INTERNAL_ERROR]
+   """Status codes allowed to send in close."""
+
+
+   def onOpen(self):
+      """
+      Callback when initial WebSocket handshake was completed. Now you may send messages.
+      Default implementation does nothing. Override in derived class.
+
+      Modes: Hybi, Hixie
+      """
+      if self.debugCodePaths:
+         log.msg("WebSocketProtocol.onOpen")
+
+
+   def onMessageBegin(self, opcode):
+      """
+      Callback when receiving a new message has begun. Default implementation will
+      prepare to buffer message frames. Override in derived class.
+
+      Modes: Hybi, Hixie
+
+      :param opcode: Opcode of message.
+      :type opcode: int
+      """
+      self.message_opcode = opcode
+      self.message_data = []
+      self.message_data_total_length = 0
+
+
+   def onMessageFrameBegin(self, length, reserved):
+      """
+      Callback when receiving a new message frame has begun. Default implementation will
+      prepare to buffer message frame data. Override in derived class.
+
+      Modes: Hybi
+
+      :param length: Payload length of message frame which is to be received.
+      :type length: int
+      :param reserved: Reserved bits set in frame (an integer from 0 to 7).
+      :type reserved: int
+      """
+      self.frame_length = length
+      self.frame_reserved = reserved
+      self.frame_data = []
+      self.message_data_total_length += length
+      if not self.failedByMe:
+         if self.maxMessagePayloadSize > 0 and self.message_data_total_length > self.maxMessagePayloadSize:
+            self.wasMaxMessagePayloadSizeExceeded = True
+            self.failConnection(WebSocketProtocol.CLOSE_STATUS_CODE_MESSAGE_TOO_BIG, "message exceeds payload limit of %d octets" % self.maxMessagePayloadSize)
+         elif self.maxFramePayloadSize > 0 and length > self.maxFramePayloadSize:
+            self.wasMaxFramePayloadSizeExceeded = True
+            self.failConnection(WebSocketProtocol.CLOSE_STATUS_CODE_POLICY_VIOLATION, "frame exceeds payload limit of %d octets" % self.maxFramePayloadSize)
+
+
+   def onMessageFrameData(self, payload):
+      """
+      Callback when receiving data witin message frame. Default implementation will
+      buffer data for frame. Override in derived class.
+
+      Modes: Hybi, Hixie
+
+      Notes:
+        - For Hixie mode, this method is slightly misnamed for historic reasons.
+
+      :param payload: Partial payload for message frame.
+      :type payload: str
+      """
+      if not self.failedByMe:
+         if self.websocket_version == 0:
+            self.message_data_total_length += len(payload)
+            if self.maxMessagePayloadSize > 0 and self.message_data_total_length > self.maxMessagePayloadSize:
+               self.wasMaxMessagePayloadSizeExceeded = True
+               self.failConnection(WebSocketProtocol.CLOSE_STATUS_CODE_MESSAGE_TOO_BIG, "message exceeds payload limit of %d octets" % self.maxMessagePayloadSize)
+            self.message_data.append(payload)
+         else:
+            self.frame_data.append(payload)
+
+
+   def onMessageFrameEnd(self):
+      """
+      Callback when a message frame has been completely received. Default implementation
+      will flatten the buffered frame data and callback onMessageFrame. Override
+      in derived class.
+
+      Modes: Hybi
+      """
+      if not self.failedByMe:
+         self.onMessageFrame(self.frame_data, self.frame_reserved)
+
+      self.frame_data = None
+
+
+   def onMessageFrame(self, payload, reserved):
+      """
+      Callback fired when complete message frame has been received. Default implementation
+      will buffer frame for message. Override in derived class.
+
+      Modes: Hybi
+
+      :param payload: Message frame payload.
+      :type payload: list of str
+      :param reserved: Reserved bits set in frame (an integer from 0 to 7).
+      :type reserved: int
+      """
+      if not self.failedByMe:
+         self.message_data.extend(payload)
+
+
+   def onMessageEnd(self):
+      """
+      Callback when a message has been completely received. Default implementation
+      will flatten the buffered frames and callback onMessage. Override
+      in derived class.
+
+      Modes: Hybi, Hixie
+      """
+      if not self.failedByMe:
+         payload = ''.join(self.message_data)
+         if self.trackedTimings:
+            self.trackedTimings.track("onMessage")
+         self.onMessage(payload, self.message_opcode == WebSocketProtocol.MESSAGE_TYPE_BINARY)
+
+      self.message_data = None
+
+
+   def onMessage(self, payload, binary):
+      """
+      Callback when a complete message was received. Default implementation does nothing.
+      Override in derived class.
+
+      Modes: Hybi, Hixie
+
+      :param payload: Message payload (UTF-8 encoded text string or binary string). Can also be an empty string, when message contained no payload.
+      :type payload: str
+      :param binary: If True, payload is binary, otherwise text.
+      :type binary: bool
+      """
+      if self.debug:
+         log.msg("WebSocketProtocol.onMessage")
+
+
+   def onPing(self, payload):
+      """
+      Callback when Ping was received. Default implementation responds
+      with a Pong. Override in derived class.
+
+      Modes: Hybi
+
+      :param payload: Payload of Ping, when there was any. Can be arbitrary, up to 125 octets.
+      :type payload: str
+      """
+      if self.debug:
+         log.msg("WebSocketProtocol.onPing")
+      if self.state == WebSocketProtocol.STATE_OPEN:
+         self.sendPong(payload)
+
+
+   def onPong(self, payload):
+      """
+      Callback when Pong was received. Default implementation does nothing.
+      Override in derived class.
+
+      Modes: Hybi
+
+      :param payload: Payload of Pong, when there was any. Can be arbitrary, up to 125 octets.
+      """
+      if self.debug:
+         log.msg("WebSocketProtocol.onPong")
+
+
+   def onClose(self, wasClean, code, reason):
+      """
+      Callback when the connection has been closed. Override in derived class.
+
+      Modes: Hybi, Hixie
+
+      :param wasClean: True, iff the connection was closed cleanly.
+      :type wasClean: bool
+      :param code: None or close status code (sent by peer), if there was one (:class:`WebSocketProtocol`.CLOSE_STATUS_CODE_*).
+      :type code: int
+      :param reason: None or close reason (sent by peer) (when present, a status code MUST have been also be present).
+      :type reason: str
+      """
+      if self.debugCodePaths:
+         s = "WebSocketProtocol.onClose:\n"
+         s += "wasClean=%s\n" % wasClean
+         s += "code=%s\n" % code
+         s += "reason=%s\n" % reason
+         s += "self.closedByMe=%s\n" % self.closedByMe
+         s += "self.failedByMe=%s\n" % self.failedByMe
+         s += "self.droppedByMe=%s\n" % self.droppedByMe
+         s += "self.wasClean=%s\n" % self.wasClean
+         s += "self.wasNotCleanReason=%s\n" % self.wasNotCleanReason
+         s += "self.localCloseCode=%s\n" % self.localCloseCode
+         s += "self.localCloseReason=%s\n" % self.localCloseReason
+         s += "self.remoteCloseCode=%s\n" % self.remoteCloseCode
+         s += "self.remoteCloseReason=%s\n" % self.remoteCloseReason
+         log.msg(s)
+
+
+   def onCloseFrame(self, code, reasonRaw):
+      """
+      Callback when a Close frame was received. The default implementation answers by
+      sending a Close when no Close was sent before. Otherwise it drops
+      the TCP connection either immediately (when we are a server) or after a timeout
+      (when we are a client and expect the server to drop the TCP).
+
+      Modes: Hybi, Hixie
+
+      Notes:
+        - For Hixie mode, this method is slightly misnamed for historic reasons.
+        - For Hixie mode, code and reasonRaw are silently ignored.
+
+      :param code: None or close status code, if there was one (:class:`WebSocketProtocol`.CLOSE_STATUS_CODE_*).
+      :type code: int
+      :param reason: None or close reason (when present, a status code MUST have been also be present).
+      :type reason: str
+      """
+      if self.debugCodePaths:
+         log.msg("WebSocketProtocol.onCloseFrame")
+
+      self.remoteCloseCode = code
+      self.remoteCloseReason = reasonRaw
+
+      ## reserved close codes: 0-999, 1004, 1005, 1006, 1011-2999, >= 5000
+      ##
+      if code is not None and (code < 1000 or (code >= 1000 and code <= 2999 and code not in WebSocketProtocol.CLOSE_STATUS_CODES_ALLOWED) or code >= 5000):
+         if self.protocolViolation("invalid close code %d" % code):
+            return True
+
+      ## closing reason
+      ##
+      if reasonRaw is not None:
+         ## we use our own UTF-8 validator to get consistent and fully conformant
+         ## UTF-8 validation behavior
+         u = Utf8Validator()
+         val = u.validate(reasonRaw)
+         if not val[0]:
+            if self.invalidPayload("invalid close reason (non-UTF-8 payload)"):
+               return True
+
+      if self.state == WebSocketProtocol.STATE_CLOSING:
+         ## We already initiated the closing handshake, so this
+         ## is the peer's reply to our close frame.
+
+         ## cancel any closing HS timer if present
+         ##
+         if self.closeHandshakeTimeoutCall is not None:
+            if self.debugCodePaths:
+               log.msg("closeHandshakeTimeoutCall.cancel")
+            self.closeHandshakeTimeoutCall.cancel()
+            self.closeHandshakeTimeoutCall = None
+
+         self.wasClean = True
+
+         if self.isServer:
+            ## When we are a server, we immediately drop the TCP.
+            self.dropConnection(abort = True)
+         else:
+            ## When we are a client, the server should drop the TCP
+            ## If that doesn't happen, we do. And that will set wasClean = False.
+            if self.serverConnectionDropTimeout > 0:
+               self.serverConnectionDropTimeoutCall = reactor.callLater(self.serverConnectionDropTimeout, self.onServerConnectionDropTimeout)
+
+      elif self.state == WebSocketProtocol.STATE_OPEN:
+         ## The peer initiates a closing handshake, so we reply
+         ## by sending close frame.
+
+         self.wasClean = True
+
+         if self.websocket_version == 0:
+            self.sendCloseFrame(isReply = True)
+         else:
+            ## Either reply with same code/reason, or code == NORMAL/reason=None
+            if self.echoCloseCodeReason:
+               self.sendCloseFrame(code = code, reasonUtf8 = reason.encode("UTF-8"), isReply = True)
+            else:
+               self.sendCloseFrame(code = WebSocketProtocol.CLOSE_STATUS_CODE_NORMAL, isReply = True)
+
+         if self.isServer:
+            ## When we are a server, we immediately drop the TCP.
+            self.dropConnection(abort = False)
+         else:
+            ## When we are a client, we expect the server to drop the TCP,
+            ## and when the server fails to do so, a timeout in sendCloseFrame()
+            ## will set wasClean = False back again.
+            pass
+
+      else:
+         ## STATE_CONNECTING, STATE_CLOSED
+         raise Exception("logic error")
+
+
+   def onServerConnectionDropTimeout(self):
+      """
+      We (a client) expected the peer (a server) to drop the connection,
+      but it didn't (in time self.serverConnectionDropTimeout).
+      So we drop the connection, but set self.wasClean = False.
+
+      Modes: Hybi, Hixie
+      """
+      self.serverConnectionDropTimeoutCall = None
+      if self.state != WebSocketProtocol.STATE_CLOSED:
+         if self.debugCodePaths:
+            log.msg("onServerConnectionDropTimeout")
+         self.wasClean = False
+         self.wasNotCleanReason = "server did not drop TCP connection (in time)"
+         self.wasServerConnectionDropTimeout = True
+         self.dropConnection(abort = True)
+      else:
+         if self.debugCodePaths:
+            log.msg("skipping onServerConnectionDropTimeout since connection is already closed")
+
+
+   def onOpenHandshakeTimeout(self):
+      """
+      We expected the peer to complete the opening handshake with to us.
+      It didn't do so (in time self.openHandshakeTimeout).
+      So we drop the connection, but set self.wasClean = False.
+
+      Modes: Hybi, Hixie
+      """
+      self.openHandshakeTimeoutCall = None
+      if self.state == WebSocketProtocol.STATE_CONNECTING:
+         if self.debugCodePaths:
+            log.msg("onOpenHandshakeTimeout fired")
+         self.wasClean = False
+         self.wasNotCleanReason = "peer did not finish (in time) the opening handshake"
+         self.wasOpenHandshakeTimeout = True
+         self.dropConnection(abort = True)
+      elif self.state == WebSocketProtocol.STATE_OPEN:
+         if self.debugCodePaths:
+            log.msg("skipping onOpenHandshakeTimeout since WebSocket connection is open (opening handshake already finished)")
+      elif self.state == WebSocketProtocol.STATE_CLOSING:
+         if self.debugCodePaths:
+            log.msg("skipping onOpenHandshakeTimeout since WebSocket connection is closing")
+      elif self.state == WebSocketProtocol.STATE_CLOSED:
+         if self.debugCodePaths:
+            log.msg("skipping onOpenHandshakeTimeout since WebSocket connection already closed")
+      else:
+         # should not arrive here
+         raise Exception("logic error")
+
+
+   def onCloseHandshakeTimeout(self):
+      """
+      We expected the peer to respond to us initiating a close handshake. It didn't
+      respond (in time self.closeHandshakeTimeout) with a close response frame though.
+      So we drop the connection, but set self.wasClean = False.
+
+      Modes: Hybi, Hixie
+      """
+      self.closeHandshakeTimeoutCall = None
+      if self.state != WebSocketProtocol.STATE_CLOSED:
+         if self.debugCodePaths:
+            log.msg("onCloseHandshakeTimeout fired")
+         self.wasClean = False
+         self.wasNotCleanReason = "peer did not respond (in time) in closing handshake"
+         self.wasCloseHandshakeTimeout = True
+         self.dropConnection(abort = True)
+      else:
+         if self.debugCodePaths:
+            log.msg("skipping onCloseHandshakeTimeout since connection is already closed")
+
+
+   def dropConnection(self, abort = False):
+      """
+      Drop the underlying TCP connection. For abort parameter, see:
+
+        * http://twistedmatrix.com/documents/current/core/howto/servers.html#auto2
+        * https://github.com/tavendo/AutobahnPython/issues/96
+
+      Modes: Hybi, Hixie
+      """
+      if self.state != WebSocketProtocol.STATE_CLOSED:
+         if self.debugCodePaths:
+            log.msg("dropping connection")
+         self.droppedByMe = True
+         self.state = WebSocketProtocol.STATE_CLOSED
+
+         if abort:
+            self.transport.abortConnection()
+         else:
+            self.transport.loseConnection()
+      else:
+         if self.debugCodePaths:
+            log.msg("skipping dropConnection since connection is already closed")
+
+
+   def failConnection(self, code = CLOSE_STATUS_CODE_GOING_AWAY, reason = "Going Away"):
+      """
+      Fails the WebSocket connection.
+
+      Modes: Hybi, Hixie
+
+      Notes:
+        - For Hixie mode, the code and reason are silently ignored.
+      """
+      if self.state != WebSocketProtocol.STATE_CLOSED:
+         if self.debugCodePaths:
+            log.msg("Failing connection : %s - %s" % (code, reason))
+         self.failedByMe = True
+         if self.failByDrop:
+            ## brutally drop the TCP connection
+            self.wasClean = False
+            self.wasNotCleanReason = "I failed the WebSocket connection by dropping the TCP connection"
+            self.dropConnection(abort = True)
+         else:
+            ## perform WebSocket closing handshake
+            if self.state != WebSocketProtocol.STATE_CLOSING:
+               self.sendCloseFrame(code = code, reasonUtf8 = reason.encode("UTF-8"), isReply = False)
+            else:
+               if self.debugCodePaths:
+                  log.msg("skipping failConnection since connection is already closing")
+      else:
+         if self.debugCodePaths:
+            log.msg("skipping failConnection since connection is already closed")
+
+
+   def protocolViolation(self, reason):
+      """
+      Fired when a WebSocket protocol violation/error occurs.
+
+      Modes: Hybi, Hixie
+
+      Notes:
+        - For Hixie mode, reason is silently ignored.
+
+      :param reason: Protocol violation that was encountered (human readable).
+      :type reason: str
+
+      :returns: bool -- True, when any further processing should be discontinued.
+      """
+      if self.debugCodePaths:
+         log.msg("Protocol violation : %s" % reason)
+      self.failConnection(WebSocketProtocol.CLOSE_STATUS_CODE_PROTOCOL_ERROR, reason)
+      if self.failByDrop:
+         return True
+      else:
+         ## if we don't immediately drop the TCP, we need to skip the invalid frame
+         ## to continue to later receive the closing handshake reply
+         return False
+
+
+   def invalidPayload(self, reason):
+      """
+      Fired when invalid payload is encountered. Currently, this only happens
+      for text message when payload is invalid UTF-8 or close frames with
+      close reason that is invalid UTF-8.
+
+      Modes: Hybi, Hixie
+
+      Notes:
+        - For Hixie mode, reason is silently ignored.
+
+      :param reason: What was invalid for the payload (human readable).
+      :type reason: str
+
+      :returns: bool -- True, when any further processing should be discontinued.
+      """
+      if self.debugCodePaths:
+         log.msg("Invalid payload : %s" % reason)
+      self.failConnection(WebSocketProtocol.CLOSE_STATUS_CODE_INVALID_PAYLOAD, reason)
+      if self.failByDrop:
+         return True
+      else:
+         ## if we don't immediately drop the TCP, we need to skip the invalid frame
+         ## to continue to later receive the closing handshake reply
+         return False
+
+
+   def setTrackTimings(self, enable):
+      """
+      Enable/disable tracking of detailed timings.
+
+      :param enable: Turn time tracking on/off.
+      :type enable: bool
+      """
+      if not hasattr(self, 'trackTimings') or self.trackTimings != enable:
+         self.trackTimings = enable
+         if self.trackTimings:
+            self.trackedTimings = Timings()
+         else:
+            self.trackedTimings = None
+
+
+   def doTrack(self, msg):
+      if not hasattr(self, 'trackTimings') or not self.trackTimings:
+         return
+      self.trackedTimings.track(msg)
+
+
+   def connectionMade(self):
+      """
+      This is called by Twisted framework when a new TCP connection has been established
+      and handed over to a Protocol instance (an instance of this class).
+
+      Modes: Hybi, Hixie
+      """
+
+      ## copy default options from factory (so we are not affected by changed on those)
+      ##
+
+      self.debug = self.factory.debug
+      self.debugCodePaths = self.factory.debugCodePaths
+
+      self.logOctets = self.factory.logOctets
+      self.logFrames = self.factory.logFrames
+
+      self.setTrackTimings(self.factory.trackTimings)
+
+      self.allowHixie76 = self.factory.allowHixie76
+      self.utf8validateIncoming = self.factory.utf8validateIncoming
+      self.applyMask = self.factory.applyMask
+      self.maxFramePayloadSize = self.factory.maxFramePayloadSize
+      self.maxMessagePayloadSize = self.factory.maxMessagePayloadSize
+      self.autoFragmentSize = self.factory.autoFragmentSize
+      self.failByDrop = self.factory.failByDrop
+      self.echoCloseCodeReason = self.factory.echoCloseCodeReason
+      self.openHandshakeTimeout = self.factory.openHandshakeTimeout
+      self.closeHandshakeTimeout = self.factory.closeHandshakeTimeout
+      self.tcpNoDelay = self.factory.tcpNoDelay
+
+      if self.isServer:
+         self.versions = self.factory.versions
+         self.webStatus = self.factory.webStatus
+         self.requireMaskedClientFrames = self.factory.requireMaskedClientFrames
+         self.maskServerFrames = self.factory.maskServerFrames
+      else:
+         self.version = self.factory.version
+         self.acceptMaskedServerFrames = self.factory.acceptMaskedServerFrames
+         self.maskClientFrames = self.factory.maskClientFrames
+         self.serverConnectionDropTimeout = self.factory.serverConnectionDropTimeout
+
+      ## Set "Nagle"
+      self.transport.setTcpNoDelay(self.tcpNoDelay)
+
+      ## the peer we are connected to
+      self.peer = self.transport.getPeer()
+      self.peerstr = "%s:%d" % (self.peer.host, self.peer.port)
+
+      ## initial state
+      self.state = WebSocketProtocol.STATE_CONNECTING
+      self.send_state = WebSocketProtocol.SEND_STATE_GROUND
+      self.data = ""
+
+      ## for chopped/synched sends, we need to queue to maintain
+      ## ordering when recalling the reactor to actually "force"
+      ## the octets to wire (see test/trickling in the repo)
+      self.send_queue = deque()
+      self.triggered = False
+
+      ## incremental UTF8 validator
+      self.utf8validator = Utf8Validator()
+
+      ## track when frame/message payload sizes (incoming) were exceeded
+      self.wasMaxFramePayloadSizeExceeded = False
+      self.wasMaxMessagePayloadSizeExceeded = False
+
+      ## the following vars are related to connection close handling/tracking
+
+      # True, iff I have initiated closing HS (that is, did send close first)
+      self.closedByMe = False
+
+      # True, iff I have failed the WS connection (i.e. due to protocol error)
+      # Failing can be either by initiating close HS or brutal drop (this is
+      # controlled by failByDrop option)
+      self.failedByMe = False
+
+      # True, iff I dropped the TCP connection (called transport.loseConnection())
+      self.droppedByMe = False
+
+      # True, iff full WebSocket closing handshake was performed (close frame sent
+      # and received) _and_ the server dropped the TCP (which is its responsibility)
+      self.wasClean = False
+
+      # When self.wasClean = False, the reason (what happened)
+      self.wasNotCleanReason = None
+
+      # When we are a client, and we expected the server to drop the TCP, but that
+      # didn't happen in time, this gets True
+      self.wasServerConnectionDropTimeout = False
+
+      # When the initial WebSocket opening handshake times out, this gets True
+      self.wasOpenHandshakeTimeout = False
+
+      # When we initiated a closing handshake, but the peer did not respond in
+      # time, this gets True
+      self.wasCloseHandshakeTimeout = False
+
+      # The close code I sent in close frame (if any)
+      self.localCloseCode = None
+
+      # The close reason I sent in close frame (if any)
+      self.localCloseReason = None
+
+      # The close code the peer sent me in close frame (if any)
+      self.remoteCloseCode = None
+
+      # The close reason the peer sent me in close frame (if any)
+      self.remoteCloseReason = None
+
+      # timers, which might get set up later, and remembered here to get canceled
+      # when appropriate
+      if not self.isServer:
+         self.serverConnectionDropTimeoutCall = None
+      self.openHandshakeTimeoutCall = None
+      self.closeHandshakeTimeoutCall = None
+
+      # set opening handshake timeout handler
+      if self.openHandshakeTimeout > 0:
+         self.openHandshakeTimeoutCall = reactor.callLater(self.openHandshakeTimeout, self.onOpenHandshakeTimeout)
+
+
+   def connectionLost(self, reason):
+      """
+      This is called by Twisted framework when a TCP connection was lost.
+
+      Modes: Hybi, Hixie
+      """
+      ## cancel any server connection drop timer if present
+      ##
+      if not self.isServer and self.serverConnectionDropTimeoutCall is not None:
+         if self.debugCodePaths:
+            log.msg("serverConnectionDropTimeoutCall.cancel")
+         self.serverConnectionDropTimeoutCall.cancel()
+         self.serverConnectionDropTimeoutCall = None
+
+      self.state = WebSocketProtocol.STATE_CLOSED
+      if not self.wasClean:
+         if not self.droppedByMe and self.wasNotCleanReason is None:
+            self.wasNotCleanReason = "peer dropped the TCP connection without previous WebSocket closing handshake"
+         self.onClose(self.wasClean, WebSocketProtocol.CLOSE_STATUS_CODE_ABNORMAL_CLOSE, "connection was closed uncleanly (%s)" % self.wasNotCleanReason)
+      else:
+         self.onClose(self.wasClean, self.remoteCloseCode, self.remoteCloseReason)
+
+
+   def logRxOctets(self, data):
+      """
+      Hook fired right after raw octets have been received, but only when self.logOctets == True.
+
+      Modes: Hybi, Hixie
+      """
+      log.msg("RX Octets from %s : octets = %s" % (self.peerstr, binascii.b2a_hex(data)))
+
+
+   def logTxOctets(self, data, sync):
+      """
+      Hook fired right after raw octets have been sent, but only when self.logOctets == True.
+
+      Modes: Hybi, Hixie
+      """
+      log.msg("TX Octets to %s : sync = %s, octets = %s" % (self.peerstr, sync, binascii.b2a_hex(data)))
+
+
+   def logRxFrame(self, frameHeader, payload):
+      """
+      Hook fired right after WebSocket frame has been received and decoded, but only when self.logFrames == True.
+
+      Modes: Hybi
+      """
+      data = ''.join(payload)
+      info = (self.peerstr,
+              frameHeader.fin,
+              frameHeader.rsv,
+              frameHeader.opcode,
+              binascii.b2a_hex(frameHeader.mask) if frameHeader.mask else "-",
+              frameHeader.length,
+              data if frameHeader.opcode == 1 else binascii.b2a_hex(data))
+
+      log.msg("RX Frame from %s : fin = %s, rsv = %s, opcode = %s, mask = %s, length = %s, payload = %s" % info)
+
+
+   def logTxFrame(self, frameHeader, payload, repeatLength, chopsize, sync):
+      """
+      Hook fired right after WebSocket frame has been encoded and sent, but only when self.logFrames == True.
+
+      Modes: Hybi
+      """
+      info = (self.peerstr,
+              frameHeader.fin,
+              frameHeader.rsv,
+              frameHeader.opcode,
+              binascii.b2a_hex(frameHeader.mask) if frameHeader.mask else "-",
+              frameHeader.length,
+              repeatLength,
+              chopsize,
+              sync,
+              payload if frameHeader.opcode == 1 else binascii.b2a_hex(payload))
+
+      log.msg("TX Frame to %s : fin = %s, rsv = %s, opcode = %s, mask = %s, length = %s, repeat_length = %s, chopsize = %s, sync = %s, payload = %s" % info)
+
+
+   def dataReceived(self, data):
+      """
+      This is called by Twisted framework upon receiving data on TCP connection.
+
+      Modes: Hybi, Hixie
+      """
+      if self.logOctets:
+         self.logRxOctets(data)
+      self.data += data
+      self.consumeData()
+
+
+   def consumeData(self):
+      """
+      Consume buffered (incoming) data.
+
+      Modes: Hybi, Hixie
+      """
+
+      ## WebSocket is open (handshake was completed) or close was sent
+      ##
+      if self.state == WebSocketProtocol.STATE_OPEN or self.state == WebSocketProtocol.STATE_CLOSING:
+
+         ## process until no more buffered data left or WS was closed
+         ##
+         while self.processData() and self.state != WebSocketProtocol.STATE_CLOSED:
+            pass
+
+      ## WebSocket needs handshake
+      ##
+      elif self.state == WebSocketProtocol.STATE_CONNECTING:
+
+         ## the implementation of processHandshake() in derived
+         ## class needs to perform client or server handshake
+         ## from other party here ..
+         ##
+         self.processHandshake()
+
+      ## we failed the connection .. don't process any more data!
+      ##
+      elif self.state == WebSocketProtocol.STATE_CLOSED:
+
+         ## ignore any data received after WS was closed
+         ##
+         if self.debugCodePaths:
+            log.msg("received data in STATE_CLOSED")
+
+      ## should not arrive here (invalid state)
+      ##
+      else:
+         raise Exception("invalid state")
+
+
+   def processHandshake(self):
+      """
+      Process WebSocket handshake.
+
+      Modes: Hybi, Hixie
+      """
+      raise Exception("must implement handshake (client or server) in derived class")
+
+
+   def registerProducer(self, producer, streaming):
+      """
+      Register a Twisted producer with this protocol.
+
+      Modes: Hybi, Hixie
+
+      :param producer: A Twisted push or pull producer.
+      :type producer: object
+      :param streaming: Producer type.
+      :type streaming: bool
+      """
+      self.transport.registerProducer(producer, streaming)
+
+
+   def _trigger(self):
+      """
+      Trigger sending stuff from send queue (which is only used for chopped/synched writes).
+
+      Modes: Hybi, Hixie
+      """
+      if not self.triggered:
+         self.triggered = True
+         self._send()
+
+
+   def _send(self):
+      """
+      Send out stuff from send queue. For details how this works, see test/trickling
+      in the repo.
+
+      Modes: Hybi, Hixie
+      """
+      if len(self.send_queue) > 0:
+         e = self.send_queue.popleft()
+         if self.state != WebSocketProtocol.STATE_CLOSED:
+            self.transport.write(e[0])
+            if self.logOctets:
+               self.logTxOctets(e[0], e[1])
+         else:
+            if self.debugCodePaths:
+               log.msg("skipped delayed write, since connection is closed")
+         # we need to reenter the reactor to make the latter
+         # reenter the OS network stack, so that octets
+         # can get on the wire. Note: this is a "heuristic",
+         # since there is no (easy) way to really force out
+         # octets from the OS network stack to wire.
+         reactor.callLater(WebSocketProtocol._QUEUED_WRITE_DELAY, self._send)
+      else:
+         self.triggered = False
+
+
+   def sendData(self, data, sync = False, chopsize = None):
+      """
+      Wrapper for self.transport.write which allows to give a chopsize.
+      When asked to chop up writing to TCP stream, we write only chopsize octets
+      and then give up control to select() in underlying reactor so that bytes
+      get onto wire immediately. Note that this is different from and unrelated
+      to WebSocket data message fragmentation. Note that this is also different
+      from the TcpNoDelay option which can be set on the socket.
+
+      Modes: Hybi, Hixie
+      """
+      if chopsize and chopsize > 0:
+         i = 0
+         n = len(data)
+         done = False
+         while not done:
+            j = i + chopsize
+            if j >= n:
+               done = True
+               j = n
+            self.send_queue.append((data[i:j], True))
+            i += chopsize
+         self._trigger()
+      else:
+         if sync or len(self.send_queue) > 0:
+            self.send_queue.append((data, sync))
+            self._trigger()
+         else:
+            self.transport.write(data)
+            if self.logOctets:
+               self.logTxOctets(data, False)
+
+
+   def sendPreparedMessage(self, preparedMsg):
+      """
+      Send a message that was previously prepared with
+      WebSocketFactory.prepareMessage().
+
+      Modes: Hybi, Hixie
+      """
+      if self.websocket_version == 0:
+         self.sendData(preparedMsg.payloadHixie)
+      else:
+         self.sendData(preparedMsg.payloadHybi)
+
+
+   def processData(self):
+      """
+      After WebSocket handshake has been completed, this procedure will do all
+      subsequent processing of incoming bytes.
+
+      Modes: Hybi, Hixie
+      """
+      if self.websocket_version == 0:
+         return self.processDataHixie76()
+      else:
+         return self.processDataHybi()
+
+
+   def processDataHixie76(self):
+      """
+      Hixie-76 incoming data processing.
+
+      Modes: Hixie
+      """
+      buffered_len = len(self.data)
+
+      ## outside a message, that is we are awaiting data which starts a new message
+      ##
+      if not self.inside_message:
+         if buffered_len >= 2:
+
+            ## new message
+            ##
+            if self.data[0] == '\x00':
+
+               self.inside_message = True
+
+               if self.utf8validateIncoming:
+                  self.utf8validator.reset()
+                  self.utf8validateIncomingCurrentMessage = True
+                  self.utf8validateLast = (True, True, 0, 0)
+               else:
+                  self.utf8validateIncomingCurrentMessage = False
+
+               self.data = self.data[1:]
+               if self.trackedTimings:
+                  self.trackedTimings.track("onMessageBegin")
+               self.onMessageBegin(1)
+
+            ## Hixie close from peer received
+            ##
+            elif self.data[0] == '\xff' and self.data[1] == '\x00':
+               self.onCloseFrame(None, None)
+               self.data = self.data[2:]
+               # stop receiving/processing after having received close!
+               return False
+
+            ## malformed data
+            ##
+            else:
+               if self.protocolViolation("malformed data received"):
+                  return False
+         else:
+            ## need more data
+            return False
+
+      end_index = self.data.find('\xff')
+      if end_index > 0:
+         payload = self.data[:end_index]
+         self.data = self.data[end_index + 1:]
+      else:
+         payload = self.data
+         self.data = ''
+
+      ## incrementally validate UTF-8 payload
+      ##
+      if self.utf8validateIncomingCurrentMessage:
+         self.utf8validateLast = self.utf8validator.validate(payload)
+         if not self.utf8validateLast[0]:
+            if self.invalidPayload("encountered invalid UTF-8 while processing text message at payload octet index %d" % self.utf8validateLast[3]):
+               return False
+
+      self.onMessageFrameData(payload)
+
+      if end_index > 0:
+         self.inside_message = False
+         self.onMessageEnd()
+
+      return len(self.data) > 0
+
+
+   def processDataHybi(self):
+      """
+      RFC6455/Hybi-Drafts incoming data processing.
+
+      Modes: Hybi
+      """
+      buffered_len = len(self.data)
+
+      ## outside a frame, that is we are awaiting data which starts a new frame
+      ##
+      if self.current_frame is None:
+
+         ## need minimum of 2 octets to for new frame
+         ##
+         if buffered_len >= 2:
+
+            ## FIN, RSV, OPCODE
+            ##
+            b = ord(self.data[0])
+            frame_fin = (b & 0x80) != 0
+            frame_rsv = (b & 0x70) >> 4
+            frame_opcode = b & 0x0f
+
+            ## MASK, PAYLOAD LEN 1
+            ##
+            b = ord(self.data[1])
+            frame_masked = (b & 0x80) != 0
+            frame_payload_len1 = b & 0x7f
+
+            ## MUST be 0 when no extension defining
+            ## the semantics of RSV has been negotiated
+            ##
+            if frame_rsv != 0:
+               if self.protocolViolation("RSV != 0 and no extension negotiated"):
+                  return False
+
+            ## all client-to-server frames MUST be masked
+            ##
+            if self.isServer and self.requireMaskedClientFrames and not frame_masked:
+               if self.protocolViolation("unmasked client-to-server frame"):
+                  return False
+
+            ## all server-to-client frames MUST NOT be masked
+            ##
+            if not self.isServer and not self.acceptMaskedServerFrames and frame_masked:
+               if self.protocolViolation("masked server-to-client frame"):
+                  return False
+
+            ## check frame
+            ##
+            if frame_opcode > 7: # control frame (have MSB in opcode set)
+
+               ## control frames MUST NOT be fragmented
+               ##
+               if not frame_fin:
+                  if self.protocolViolation("fragmented control frame"):
+                     return False
+
+               ## control frames MUST have payload 125 octets or less
+               ##
+               if frame_payload_len1 > 125:
+                  if self.protocolViolation("control frame with payload length > 125 octets"):
+                     return False
+
+               ## check for reserved control frame opcodes
+               ##
+               if frame_opcode not in [8, 9, 10]:
+                  if self.protocolViolation("control frame using reserved opcode %d" % frame_opcode):
+                     return False
+
+               ## close frame : if there is a body, the first two bytes of the body MUST be a 2-byte
+               ## unsigned integer (in network byte order) representing a status code
+               ##
+               if frame_opcode == 8 and frame_payload_len1 == 1:
+                  if self.protocolViolation("received close control frame with payload len 1"):
+                     return False
+
+            else: # data frame
+
+               ## check for reserved data frame opcodes
+               ##
+               if frame_opcode not in [0, 1, 2]:
+                  if self.protocolViolation("data frame using reserved opcode %d" % frame_opcode):
+                     return False
+
+               ## check opcode vs message fragmentation state 1/2
+               ##
+               if not self.inside_message and frame_opcode == 0:
+                  if self.protocolViolation("received continuation data frame outside fragmented message"):
+                     return False
+
+               ## check opcode vs message fragmentation state 2/2
+               ##
+               if self.inside_message and frame_opcode != 0:
+                  if self.protocolViolation("received non-continuation data frame while inside fragmented message"):
+                     return False
+
+            ## compute complete header length
+            ##
+            if frame_masked:
+               mask_len = 4
+            else:
+               mask_len = 0
+
+            if frame_payload_len1 <  126:
+               frame_header_len = 2 + mask_len
+            elif frame_payload_len1 == 126:
+               frame_header_len = 2 + 2 + mask_len
+            elif frame_payload_len1 == 127:
+               frame_header_len = 2 + 8 + mask_len
+            else:
+               raise Exception("logic error")
+
+            ## only proceed when we have enough data buffered for complete
+            ## frame header (which includes extended payload len + mask)
+            ##
+            if buffered_len >= frame_header_len:
+
+               ## minimum frame header length (already consumed)
+               ##
+               i = 2
+
+               ## extract extended payload length
+               ##
+               if frame_payload_len1 == 126:
+                  frame_payload_len = struct.unpack("!H", self.data[i:i+2])[0]
+                  if frame_payload_len < 126:
+                     if self.protocolViolation("invalid data frame length (not using minimal length encoding)"):
+                        return False
+                  i += 2
+               elif frame_payload_len1 == 127:
+                  frame_payload_len = struct.unpack("!Q", self.data[i:i+8])[0]
+                  if frame_payload_len > 0x7FFFFFFFFFFFFFFF: # 2**63
+                     if self.protocolViolation("invalid data frame length (>2^63)"):
+                        return False
+                  if frame_payload_len < 65536:
+                     if self.protocolViolation("invalid data frame length (not using minimal length encoding)"):
+                        return False
+                  i += 8
+               else:
+                  frame_payload_len = frame_payload_len1
+
+               ## when payload is masked, extract frame mask
+               ##
+               frame_mask = None
+               if frame_masked:
+                  frame_mask = self.data[i:i+4]
+                  i += 4
+
+               if frame_masked and frame_payload_len > 0 and self.applyMask:
+                  self.current_frame_masker = createXorMasker(frame_mask, frame_payload_len)
+               else:
+                  self.current_frame_masker = XorMaskerNull()
+
+
+               ## remember rest (payload of current frame after header and everything thereafter)
+               ##
+               self.data = self.data[i:]
+
+               ## ok, got complete frame header
+               ##
+               self.current_frame = FrameHeader(frame_opcode,
+                                                frame_fin,
+                                                frame_rsv,
+                                                frame_payload_len,
+                                                frame_mask)
+
+               ## process begin on new frame
+               ##
+               self.onFrameBegin()
+
+               ## reprocess when frame has no payload or and buffered data left
+               ##
+               return frame_payload_len == 0 or len(self.data) > 0
+
+            else:
+               return False # need more data
+         else:
+            return False # need more data
+
+      ## inside a started frame
+      ##
+      else:
+
+         ## cut out rest of frame payload
+         ##
+         rest = self.current_frame.length - self.current_frame_masker.pointer()
+         if buffered_len >= rest:
+            data = self.data[:rest]
+            length = rest
+            self.data = self.data[rest:]
+         else:
+            data = self.data
+            length = buffered_len
+            self.data = ""
+
+         if length > 0:
+            ## unmask payload
+            ##
+            payload = self.current_frame_masker.process(data)
+
+            ## process frame data
+            ##
+            fr = self.onFrameData(payload)
+            if fr == False:
+               return False
+
+         ## fire frame end handler when frame payload is complete
+         ##
+         if self.current_frame_masker.pointer() == self.current_frame.length:
+            fr = self.onFrameEnd()
+            if fr == False:
+               return False
+
+         ## reprocess when no error occurred and buffered data left
+         ##
+         return len(self.data) > 0
+
+
+   def onFrameBegin(self):
+      """
+      Begin of receive new frame.
+
+      Modes: Hybi
+      """
+      if self.current_frame.opcode > 7:
+         self.control_frame_data = []
+      else:
+         ## new message started
+         ##
+         if not self.inside_message:
+
+            self.inside_message = True
+
+            if self.current_frame.opcode == WebSocketProtocol.MESSAGE_TYPE_TEXT and self.utf8validateIncoming:
+               self.utf8validator.reset()
+               self.utf8validateIncomingCurrentMessage = True
+               self.utf8validateLast = (True, True, 0, 0)
+            else:
+               self.utf8validateIncomingCurrentMessage = False
+
+            if self.trackedTimings:
+               self.trackedTimings.track("onMessageBegin")
+            self.onMessageBegin(self.current_frame.opcode)
+
+         self.onMessageFrameBegin(self.current_frame.length, self.current_frame.rsv)
+
+
+   def onFrameData(self, payload):
+      """
+      New data received within frame.
+
+      Modes: Hybi
+      """
+      if self.current_frame.opcode > 7:
+         self.control_frame_data.append(payload)
+      else:
+         ## incrementally validate UTF-8 payload
+         ##
+         if self.utf8validateIncomingCurrentMessage:
+            self.utf8validateLast = self.utf8validator.validate(payload)
+            if not self.utf8validateLast[0]:
+               if self.invalidPayload("encountered invalid UTF-8 while processing text message at payload octet index %d" % self.utf8validateLast[3]):
+                  return False
+
+         self.onMessageFrameData(payload)
+
+
+   def onFrameEnd(self):
+      """
+      End of frame received.
+
+      Modes: Hybi
+      """
+      if self.current_frame.opcode > 7:
+         if self.logFrames:
+            self.logRxFrame(self.current_frame, self.control_frame_data)
+         self.processControlFrame()
+      else:
+         if self.logFrames:
+            self.logRxFrame(self.current_frame, self.frame_data)
+         self.onMessageFrameEnd()
+         if self.current_frame.fin:
+            if self.utf8validateIncomingCurrentMessage:
+               if not self.utf8validateLast[1]:
+                  if self.invalidPayload("UTF-8 text message payload ended within Unicode code point at payload octet index %d" % self.utf8validateLast[3]):
+                     return False
+            self.onMessageEnd()
+            self.inside_message = False
+      self.current_frame = None
+
+
+   def processControlFrame(self):
+      """
+      Process a completely received control frame.
+
+      Modes: Hybi
+      """
+
+      payload = ''.join(self.control_frame_data)
+      self.control_frame_data = None
+
+      ## CLOSE frame
+      ##
+      if self.current_frame.opcode == 8:
+
+         code = None
+         reasonRaw = None
+         ll = len(payload)
+         if ll > 1:
+            code = struct.unpack("!H", payload[0:2])[0]
+            if ll > 2:
+               reasonRaw = payload[2:]
+
+         if self.onCloseFrame(code, reasonRaw):
+            return False
+
+      ## PING frame
+      ##
+      elif self.current_frame.opcode == 9:
+         self.onPing(payload)
+
+      ## PONG frame
+      ##
+      elif self.current_frame.opcode == 10:
+         self.onPong(payload)
+
+      else:
+         ## we might arrive here, when protocolViolation
+         ## wants us to continue anyway
+         pass
+
+      return True
+
+
+   def sendFrame(self, opcode, payload = "", fin = True, rsv = 0, mask = None, payload_len = None, chopsize = None, sync = False):
+      """
+      Send out frame. Normally only used internally via sendMessage(), sendPing(), sendPong() and sendClose().
+
+      This method deliberately allows to send invalid frames (that is frames invalid
+      per-se, or frames invalid because of protocol state). Other than in fuzzing servers,
+      calling methods will ensure that no invalid frames are sent.
+
+      In addition, this method supports explicit specification of payload length.
+      When payload_len is given, it will always write that many octets to the stream.
+      It'll wrap within payload, resending parts of that when more octets were requested
+      The use case is again for fuzzing server which want to sent increasing amounts
+      of payload data to peers without having to construct potentially large messges
+      themselfes.
+
+      Modes: Hybi
+      """
+      if self.websocket_version == 0:
+         raise Exception("function not supported in Hixie-76 mode")
+
+      if payload_len is not None:
+         if len(payload) < 1:
+            raise Exception("cannot construct repeated payload with length %d from payload of length %d" % (payload_len, len(payload)))
+         l = payload_len
+         pl = ''.join([payload for k in range(payload_len / len(payload))]) + payload[:payload_len % len(payload)]
+      else:
+         l = len(payload)
+         pl = payload
+
+      ## first byte
+      ##
+      b0 = 0
+      if fin:
+         b0 |= (1 << 7)
+      b0 |= (rsv % 8) << 4
+      b0 |= opcode % 128
+
+      ## second byte, payload len bytes and mask
+      ##
+      b1 = 0
+      if mask or (not self.isServer and self.maskClientFrames) or (self.isServer and self.maskServerFrames):
+         b1 |= 1 << 7
+         if not mask:
+            mask = struct.pack("!I", random.getrandbits(32))
+            mv = mask
+         else:
+            mv = ""
+
+         ## mask frame payload
+         ##
+         if l > 0 and self.applyMask:
+            masker = createXorMasker(mask, l)
+            plm = masker.process(pl)
+         else:
+            plm = pl
+
+      else:
+         mv = ""
+         plm = pl
+
+      el = ""
+      if l <= 125:
+         b1 |= l
+      elif l <= 0xFFFF:
+         b1 |= 126
+         el = struct.pack("!H", l)
+      elif l <= 0x7FFFFFFFFFFFFFFF:
+         b1 |= 127
+         el = struct.pack("!Q", l)
+      else:
+         raise Exception("invalid payload length")
+
+      raw = ''.join([chr(b0), chr(b1), el, mv, plm])
+
+      if self.logFrames:
+         frameHeader = FrameHeader(opcode, fin, rsv, l, mask)
+         self.logTxFrame(frameHeader, payload, payload_len, chopsize, sync)
+
+      ## send frame octets
+      ##
+      self.sendData(raw, sync, chopsize)
+
+
+   def sendPing(self, payload = None):
+      """
+      Send out Ping to peer. A peer is expected to Pong back the payload a soon
+      as "practical". When more than 1 Ping is outstanding at a peer, the peer may
+      elect to respond only to the last Ping.
+
+      Modes: Hybi
+
+      :param payload: An optional, arbitrary payload of length < 126 octets.
+      :type payload: str
+      """
+      if self.websocket_version == 0:
+         raise Exception("function not supported in Hixie-76 mode")
+      if self.state != WebSocketProtocol.STATE_OPEN:
+         return
+      if payload:
+         l = len(payload)
+         if l > 125:
+            raise Exception("invalid payload for PING (payload length must be <= 125, was %d)" % l)
+         self.sendFrame(opcode = 9, payload = payload)
+      else:
+         self.sendFrame(opcode = 9)
+
+
+   def sendPong(self, payload = None):
+      """
+      Send out Pong to peer. A Pong frame MAY be sent unsolicited.
+      This serves as a unidirectional heartbeat. A response to an unsolicited pong is "not expected".
+
+      Modes: Hybi
+
+      :param payload: An optional, arbitrary payload of length < 126 octets.
+      :type payload: str
+      """
+      if self.websocket_version == 0:
+         raise Exception("function not supported in Hixie-76 mode")
+      if self.state != WebSocketProtocol.STATE_OPEN:
+         return
+      if payload:
+         l = len(payload)
+         if l > 125:
+            raise Exception("invalid payload for PONG (payload length must be <= 125, was %d)" % l)
+         self.sendFrame(opcode = 10, payload = payload)
+      else:
+         self.sendFrame(opcode = 10)
+
+
+   def sendCloseFrame(self, code = None, reasonUtf8 = None, isReply = False):
+      """
+      Send a close frame and update protocol state. Note, that this is
+      an internal method which deliberately allows not send close
+      frame with invalid payload.
+
+      Modes: Hybi, Hixie
+
+      Notes:
+        - For Hixie mode, this method is slightly misnamed for historic reasons.
+        - For Hixie mode, code and reasonUtf8 will be silently ignored.
+      """
+      if self.state == WebSocketProtocol.STATE_CLOSING:
+         if self.debugCodePaths:
+            log.msg("ignoring sendCloseFrame since connection is closing")
+
+      elif self.state == WebSocketProtocol.STATE_CLOSED:
+         if self.debugCodePaths:
+            log.msg("ignoring sendCloseFrame since connection already closed")
+
+      elif self.state == WebSocketProtocol.STATE_CONNECTING:
+         raise Exception("cannot close a connection not yet connected")
+
+      elif self.state == WebSocketProtocol.STATE_OPEN:
+
+         if self.websocket_version == 0:
+            self.sendData("\xff\x00")
+         else:
+            ## construct Hybi close frame payload and send frame
+            payload = ""
+            if code is not None:
+               payload += struct.pack("!H", code)
+            if reasonUtf8 is not None:
+               payload += reasonUtf8
+            self.sendFrame(opcode = 8, payload = payload)
+
+         ## update state
+         self.state = WebSocketProtocol.STATE_CLOSING
+         self.closedByMe = not isReply
+
+         ## remember payload of close frame we sent
+         self.localCloseCode = code
+         self.localCloseReason = reasonUtf8
+
+         ## drop connection when timeout on receiving close handshake reply
+         if self.closedByMe and self.closeHandshakeTimeout > 0:
+            self.closeHandshakeTimeoutCall = reactor.callLater(self.closeHandshakeTimeout, self.onCloseHandshakeTimeout)
+
+      else:
+         raise Exception("logic error")
+
+
+   def sendClose(self, code = None, reason = None):
+      """
+      Starts a closing handshake.
+
+      Modes: Hybi, Hixie
+
+      Notes:
+        - For Hixie mode, code and reason will be silently ignored.
+
+      :param code: An optional close status code (:class:`WebSocketProtocol`.CLOSE_STATUS_CODE_NORMAL or 3000-4999).
+      :type code: int
+      :param reason: An optional close reason (a string that when present, a status code MUST also be present).
+      :type reason: str
+      """
+      if code is not None:
+         if type(code) != int:
+            raise Exception("invalid type %s for close code" % type(code))
+         if code != 1000 and not (code >= 3000 and code <= 4999):
+            raise Exception("invalid close code %d" % code)
+      if reason is not None:
+         if code is None:
+            raise Exception("close reason without close code")
+         if type(reason) not in [str, unicode]:
+            raise Exception("invalid type %s for close reason" % type(reason))
+         reasonUtf8 = reason.encode("UTF-8")
+         if len(reasonUtf8) + 2 > 125:
+            raise Exception("close reason too long (%d)" % len(reasonUtf8))
+      else:
+         reasonUtf8 = None
+      self.sendCloseFrame(code = code, reasonUtf8 = reasonUtf8, isReply = False)
+
+
+   def beginMessage(self, opcode = MESSAGE_TYPE_TEXT):
+      """
+      Begin sending new message.
+
+      Modes: Hybi, Hixie
+
+      :param opcode: Message type, normally either WebSocketProtocol.MESSAGE_TYPE_TEXT (default) or
+                     WebSocketProtocol.MESSAGE_TYPE_BINARY (only Hybi mode).
+      """
+      if self.state != WebSocketProtocol.STATE_OPEN:
+         return
+
+      ## check if sending state is valid for this method
+      ##
+      if self.send_state != WebSocketProtocol.SEND_STATE_GROUND:
+         raise Exception("WebSocketProtocol.beginMessage invalid in current sending state")
+
+      if self.websocket_version == 0:
+         if opcode != 1:
+            raise Exception("cannot send non-text message in Hixie mode")
+
+         self.sendData('\x00')
+         self.send_state = WebSocketProtocol.SEND_STATE_INSIDE_MESSAGE
+      else:
+         if opcode not in [1, 2]:
+            raise Exception("use of reserved opcode %d" % opcode)
+
+         ## remember opcode for later (when sending first frame)
+         ##
+         self.send_message_opcode = opcode
+         self.send_state = WebSocketProtocol.SEND_STATE_MESSAGE_BEGIN
+
+
+   def beginMessageFrame(self, length, reserved = 0, mask = None):
+      """
+      Begin sending new message frame.
+
+      Modes: Hybi
+
+      :param length: Length of frame which is started. Must be >= 0 and <= 2^63.
+      :type length: int
+      :param reserved: Reserved bits for frame (an integer from 0 to 7). Note that reserved != 0 is only legal when an extension has been negoiated which defines semantics.
+      :type reserved: int
+      :param mask: Optional frame mask. When given, this is used. When None and the peer is a client, a mask will be internally generated. For servers None is default.
+      :type mask: str
+      """
+      if self.websocket_version == 0:
+         raise Exception("function not supported in Hixie-76 mode")
+
+      if self.state != WebSocketProtocol.STATE_OPEN:
+         return
+      ## check if sending state is valid for this method
+      ##
+      if self.send_state not in [WebSocketProtocol.SEND_STATE_MESSAGE_BEGIN, WebSocketProtocol.SEND_STATE_INSIDE_MESSAGE]:
+         raise Exception("WebSocketProtocol.beginMessageFrame invalid in current sending state")
+
+      if (not type(length) in [int, long]) or length < 0 or length > 0x7FFFFFFFFFFFFFFF: # 2**63
+         raise Exception("invalid value for message frame length")
+
+      if type(reserved) is not int or reserved < 0 or reserved > 7:
+         raise Exception("invalid value for reserved bits")
+
+      self.send_message_frame_length = length
+
+      if mask:
+         ## explicit mask given
+         ##
+         assert type(mask) == str
+         assert len(mask) == 4
+         self.send_message_frame_mask = mask
+
+      elif (not self.isServer and self.maskClientFrames) or (self.isServer and self.maskServerFrames):
+         ## automatic mask:
+         ##  - client-to-server masking (if not deactivated)
+         ##  - server-to-client masking (if activated)
+         ##
+         self.send_message_frame_mask = struct.pack("!I", random.getrandbits(32))
+
+      else:
+         ## no mask
+         ##
+         self.send_message_frame_mask = None
+
+      ## payload masker
+      ##
+      if self.send_message_frame_mask and length > 0 and self.applyMask:
+         self.send_message_frame_masker = createXorMasker(self.send_message_frame_mask, length)
+      else:
+         self.send_message_frame_masker = XorMaskerNull()
+
+      ## first byte
+      ##
+      b0 = (reserved % 8) << 4 # FIN = false .. since with streaming, we don't know when message ends
+
+      if self.send_state == WebSocketProtocol.SEND_STATE_MESSAGE_BEGIN:
+         self.send_state = WebSocketProtocol.SEND_STATE_INSIDE_MESSAGE
+         b0 |= self.send_message_opcode % 128
+      else:
+         pass # message continuation frame
+
+      ## second byte, payload len bytes and mask
+      ##
+      b1 = 0
+      if self.send_message_frame_mask:
+         b1 |= 1 << 7
+         mv = self.send_message_frame_mask
+      else:
+         mv = ""
+
+      el = ""
+      if length <= 125:
+         b1 |= length
+      elif length <= 0xFFFF:
+         b1 |= 126
+         el = struct.pack("!H", length)
+      elif length <= 0x7FFFFFFFFFFFFFFF:
+         b1 |= 127
+         el = struct.pack("!Q", length)
+      else:
+         raise Exception("invalid payload length")
+
+      ## write message frame header
+      ##
+      header = ''.join([chr(b0), chr(b1), el, mv])
+      self.sendData(header)
+
+      ## now we are inside message frame ..
+      ##
+      self.send_state = WebSocketProtocol.SEND_STATE_INSIDE_MESSAGE_FRAME
+
+
+   def sendMessageFrameData(self, payload, sync = False):
+      """
+      Send out data when within message frame (message was begun, frame was begun).
+      Note that the frame is automatically ended when enough data has been sent
+      that is, there is no endMessageFrame, since you have begun the frame specifying
+      the frame length, which implicitly defined the frame end. This is different from
+      messages, which you begin and end, since a message can contain an unlimited number
+      of frames.
+
+      Modes: Hybi, Hixie
+
+      Notes:
+        - For Hixie mode, this method is slightly misnamed for historic reasons.
+
+      :param payload: Data to send.
+
+      :returns: int -- Hybi mode: when frame still incomplete, returns outstanding octets, when frame complete, returns <= 0, when < 0, the amount of unconsumed data in payload argument. Hixie mode: returns None.
+      """
+      if self.state != WebSocketProtocol.STATE_OPEN:
+         return
+
+      if self.websocket_version == 0:
+         ## Hixie Mode
+         ##
+         if self.send_state != WebSocketProtocol.SEND_STATE_INSIDE_MESSAGE:
+            raise Exception("WebSocketProtocol.sendMessageFrameData invalid in current sending state")
+         self.sendData(payload, sync = sync)
+         return None
+
+      else:
+         ## Hybi Mode
+         ##
+         if self.send_state != WebSocketProtocol.SEND_STATE_INSIDE_MESSAGE_FRAME:
+            raise Exception("WebSocketProtocol.sendMessageFrameData invalid in current sending state")
+
+         rl = len(payload)
+         if self.send_message_frame_masker.pointer() + rl > self.send_message_frame_length:
+            l = self.send_message_frame_length - self.send_message_frame_masker.pointer()
+            rest = -(rl - l)
+            pl = payload[:l]
+         else:
+            l = rl
+            rest = self.send_message_frame_length - self.send_message_frame_masker.pointer() - l
+            pl = payload
+
+         ## mask frame payload
+         ##
+         plm = self.send_message_frame_masker.process(pl)
+
+         ## send frame payload
+         ##
+         self.sendData(plm, sync = sync)
+
+         ## if we are done with frame, move back into "inside message" state
+         ##
+         if self.send_message_frame_masker.pointer() >= self.send_message_frame_length:
+            self.send_state = WebSocketProtocol.SEND_STATE_INSIDE_MESSAGE
+
+         ## when =0 : frame was completed exactly
+         ## when >0 : frame is still uncomplete and that much amount is still left to complete the frame
+         ## when <0 : frame was completed and there was this much unconsumed data in payload argument
+         ##
+         return rest
+
+
+   def endMessage(self):
+      """
+      End a previously begun message. No more frames may be sent (for that message). You have to
+      begin a new message before sending again.
+
+      Modes: Hybi, Hixie
+      """
+      if self.state != WebSocketProtocol.STATE_OPEN:
+         return
+      ## check if sending state is valid for this method
+      ##
+      if self.send_state != WebSocketProtocol.SEND_STATE_INSIDE_MESSAGE:
+         raise Exception("WebSocketProtocol.endMessage invalid in current sending state [%d]" % self.send_state)
+
+      if self.websocket_version == 0:
+         self.sendData('\x00')
+      else:
+         self.sendFrame(opcode = 0, fin = True)
+
+      self.send_state = WebSocketProtocol.SEND_STATE_GROUND
+
+
+   def sendMessageFrame(self, payload, reserved = 0, mask = None, sync = False):
+      """
+      When a message has begun, send a complete message frame in one go.
+
+      Modes: Hybi
+      """
+      if self.websocket_version == 0:
+         raise Exception("function not supported in Hixie-76 mode")
+
+      if self.state != WebSocketProtocol.STATE_OPEN:
+         return
+      if self.websocket_version == 0:
+         raise Exception("function not supported in Hixie-76 mode")
+      self.beginMessageFrame(len(payload), reserved, mask)
+      self.sendMessageFrameData(payload, sync)
+
+
+   def sendMessage(self, payload, binary = False, payload_frag_size = None, sync = False):
+      """
+      Send out a message in one go.
+
+      You can send text or binary message, and optionally specifiy a payload fragment size.
+      When the latter is given, the payload will be split up into frames with
+      payload <= the payload_frag_size given.
+
+      Modes: Hybi, Hixie
+      """
+      if self.trackedTimings:
+         self.trackedTimings.track("sendMessage")
+      if self.state != WebSocketProtocol.STATE_OPEN:
+         return
+      if self.websocket_version == 0:
+         if binary:
+            raise Exception("cannot send binary message in Hixie76 mode")
+         if payload_frag_size:
+            raise Exception("cannot fragment messages in Hixie76 mode")
+         self.sendMessageHixie76(payload, sync)
+      else:
+         self.sendMessageHybi(payload, binary, payload_frag_size, sync)
+
+
+   def sendMessageHixie76(self, payload, sync = False):
+      """
+      Hixie76-Variant of sendMessage().
+
+      Modes: Hixie
+      """
+      self.sendData('\x00' + payload + '\xff', sync = sync)
+
+
+   def sendMessageHybi(self, payload, binary = False, payload_frag_size = None, sync = False):
+      """
+      Hybi-Variant of sendMessage().
+
+      Modes: Hybi
+      """
+      ## (initial) frame opcode
+      ##
+      if binary:
+         opcode = 2
+      else:
+         opcode = 1
+
+      ## explicit payload_frag_size arguments overrides autoFragmentSize setting
+      ##
+      if payload_frag_size is not None:
+         pfs = payload_frag_size
+      else:
+         if self.autoFragmentSize > 0:
+            pfs = self.autoFragmentSize
+         else:
+            pfs = None
+
+      ## send unfragmented
+      ##
+      if pfs is None or len(payload) <= pfs:
+         self.sendFrame(opcode = opcode, payload = payload, sync = sync)
+
+      ## send data message in fragments
+      ##
+      else:
+         if pfs < 1:
+            raise Exception("payload fragment size must be at least 1 (was %d)" % pfs)
+         n = len(payload)
+         i = 0
+         done = False
+         first = True
+         while not done:
+            j = i + pfs
+            if j > n:
+               done = True
+               j = n
+            if first:
+               self.sendFrame(opcode = opcode, payload = payload[i:j], fin = done, sync = sync)
+               first = False
+            else:
+               self.sendFrame(opcode = 0, payload = payload[i:j], fin = done, sync = sync)
+            i += pfs
+
+
+
+class PreparedMessage:
+   """
+   Encapsulates a prepared message to be sent later once or multiple
+   times. This is used for optimizing Broadcast/PubSub.
+
+   The message serialization formats currently created internally are:
+      * Hybi
+      * Hixie
+
+   The construction of different formats is needed, since we support
+   mixed clients (speaking different protocol versions).
+
+   It will also be the place to add a 3rd format, when we support
+   the deflate extension, since then, the clients will be mixed
+   between Hybi-Deflate-Unsupported, Hybi-Deflate-Supported and Hixie.
+   """
+
+   def __init__(self, payload, binary, masked):
+      """
+      Ctor for a prepared message.
+
+      :param payload: The message payload.
+      :type payload: str
+      :param binary: Provide `True` for binary payload.
+      :type binary: bool
+      :param masked: Provide `True` if WebSocket message is to be masked (required for client to server WebSocket messages).
+      :type masked: bool
+      """
+      self._initHixie(payload, binary)
+      self._initHybi(payload, binary, masked)
+
+
+   def _initHixie(self, payload, binary):
+      if binary:
+         # silently filter out .. probably do something else:
+         # base64?
+         # dunno
+         self.payloadHixie = ''
+      else:
+         self.payloadHixie = '\x00' + payload + '\xff'
+
+
+   def _initHybi(self, payload, binary, masked):
+      l = len(payload)
+
+      ## first byte
+      ##
+      b0 = ((1 << 7) | 2) if binary else ((1 << 7) | 1)
+
+      ## second byte, payload len bytes and mask
+      ##
+      if masked:
+         b1 = 1 << 7
+         mask = struct.pack("!I", random.getrandbits(32))
+         if l == 0:
+            plm = payload
+         else:
+            plm = createXorMasker(mask, l).process(payload)
+      else:
+         b1 = 0
+         mask = ""
+         plm = payload
+
+      ## payload extended length
+      ##
+      el = ""
+      if l <= 125:
+         b1 |= l
+      elif l <= 0xFFFF:
+         b1 |= 126
+         el = struct.pack("!H", l)
+      elif l <= 0x7FFFFFFFFFFFFFFF:
+         b1 |= 127
+         el = struct.pack("!Q", l)
+      else:
+         raise Exception("invalid payload length")
+
+      ## raw WS message (single frame)
+      ##
+      self.payloadHybi = ''.join([chr(b0), chr(b1), el, mask, plm])
+
+
+
+class WebSocketFactory:
+   """
+   Mixin for
+   :class:`autobahn.websocket.WebSocketClientFactory` and
+   :class:`autobahn.websocket.WebSocketServerFactory`.
+   """
+
+   def prepareMessage(self, payload, binary = False, masked = None):
+      """
+      Prepare a WebSocket message. This can be later used on multiple
+      instances of :class:`autobahn.websocket.WebSocketProtocol` using
+      :meth:`autobahn.websocket.WebSocketProtocol.sendPreparedMessage`.
+
+      By doing so, you can avoid the (small) overhead of framing the
+      *same* payload into WS messages when that payload is to be sent
+      out on multiple connections.
+
+      Caveats:
+
+         1. Only use when you know what you are doing. I.e. calling
+            :meth:`autobahn.websocket.WebSocketProtocol.sendPreparedMessage`
+            on the *same* protocol instance multiples times with the *same*
+            prepared message might break the spec, since i.e. the frame mask
+            will be the same!
+
+         2. Treat the object returned as opaque. It may change!
+
+      Modes: Hybi, Hixie
+
+      :param payload: The message payload.
+      :type payload: str
+      :param binary: Provide `True` for binary payload.
+      :type binary: bool
+      :param masked: Provide `True` if WebSocket message is to be
+                     masked (required for client-to-server WebSocket messages).
+      :type masked: bool
+
+      :returns: obj -- The prepared message.
+      """
+      if masked is None:
+         masked = not self.isServer
+
+      return PreparedMessage(payload, binary, masked)
+
+
+
+class WebSocketServerProtocol(WebSocketProtocol):
+   """
+   A Twisted protocol for WebSocket servers.
+   """
+
+   def onConnect(self, connectionRequest):
+      """
+      Callback fired during WebSocket opening handshake when new WebSocket client
+      connection is about to be established.
+
+      Throw HttpException when you don't want to accept the WebSocket
+      connection request. For example, throw a
+      `HttpException(httpstatus.HTTP_STATUS_CODE_UNAUTHORIZED[0], "You are not authorized for this!")`.
+
+      When you want to accept the connection, return the accepted protocol
+      from list of WebSocket (sub)protocols provided by client or None to
+      speak no specific one or when the client list was empty.
+
+      :param connectionRequest: WebSocket connection request information.
+      :type connectionRequest: instance of :class:`autobahn.websocket.ConnectionRequest`
+      """
+      return None
+
+
+   def connectionMade(self):
+      """
+      Called by Twisted when new TCP connection from client was accepted. Default
+      implementation will prepare for initial WebSocket opening handshake.
+      When overriding in derived class, make sure to call this base class
+      implementation *before* your code.
+      """
+      self.isServer = True
+      WebSocketProtocol.connectionMade(self)
+      self.factory.countConnections += 1
+      if self.debug:
+         log.msg("connection accepted from peer %s" % self.peerstr)
+
+
+   def connectionLost(self, reason):
+      """
+      Called by Twisted when established TCP connection from client was lost. Default
+      implementation will tear down all state properly.
+      When overriding in derived class, make sure to call this base class
+      implementation *after* your code.
+      """
+      WebSocketProtocol.connectionLost(self, reason)
+      self.factory.countConnections -= 1
+      if self.debug:
+         log.msg("connection from %s lost" % self.peerstr)
+
+
+   def parseHixie76Key(self, key):
+      """
+      Parse Hixie76 opening handshake key provided by client.
+      """
+      return int(filter(lambda x: x.isdigit(), key)) / key.count(" ")
+
+
+   def processHandshake(self):
+      """
+      Process WebSocket opening handshake request from client.
+      """
+      ## only proceed when we have fully received the HTTP request line and all headers
+      ##
+      end_of_header = self.data.find("\x0d\x0a\x0d\x0a")
+      if end_of_header >= 0:
+
+         self.http_request_data = self.data[:end_of_header + 4]
+         if self.debug:
+            log.msg("received HTTP request:\n\n%s\n\n" % self.http_request_data)
+
+         ## extract HTTP status line and headers
+         ##
+         (self.http_status_line, self.http_headers, http_headers_cnt) = parseHttpHeader(self.http_request_data)
+
+         ## validate WebSocket opening handshake client request
+         ##
+         if self.debug:
+            log.msg("received HTTP status line in opening handshake : %s" % str(self.http_status_line))
+            log.msg("received HTTP headers in opening handshake : %s" % str(self.http_headers))
+
+         ## HTTP Request line : METHOD, VERSION
+         ##
+         rl = self.http_status_line.split()
+         if len(rl) != 3:
+            return self.failHandshake("Bad HTTP request status line '%s'" % self.http_status_line)
+         if rl[0].strip() != "GET":
+            return self.failHandshake("HTTP method '%s' not allowed" % rl[0], HTTP_STATUS_CODE_METHOD_NOT_ALLOWED[0])
+         vs = rl[2].strip().split("/")
+         if len(vs) != 2 or vs[0] != "HTTP" or vs[1] not in ["1.1"]:
+            return self.failHandshake("Unsupported HTTP version '%s'" % rl[2], HTTP_STATUS_CODE_UNSUPPORTED_HTTP_VERSION[0])
+
+         ## HTTP Request line : REQUEST-URI
+         ##
+         self.http_request_uri = rl[1].strip()
+         try:
+            (scheme, netloc, path, params, query, fragment) = urlparse.urlparse(self.http_request_uri)
+
+            ## FIXME: check that if absolute resource URI is given,
+            ## the scheme/netloc matches the server
+            if scheme != "" or netloc != "":
+               pass
+
+            ## Fragment identifiers are meaningless in the context of WebSocket
+            ## URIs, and MUST NOT be used on these URIs.
+            if fragment != "":
+               return self.failHandshake("HTTP requested resource contains a fragment identifier '%s'" % fragment)
+
+            ## resource path and query parameters .. this will get forwarded
+            ## to onConnect()
+            self.http_request_path = path
+            self.http_request_params = urlparse.parse_qs(query)
+         except:
+            return self.failHandshake("Bad HTTP request resource - could not parse '%s'" % rl[1].strip())
+
+         ## Host
+         ##
+         if not self.http_headers.has_key("host"):
+            return self.failHandshake("HTTP Host header missing in opening handshake request")
+         if http_headers_cnt["host"] > 1:
+            return self.failHandshake("HTTP Host header appears more than once in opening handshake request")
+         self.http_request_host = self.http_headers["host"].strip()
+         if self.http_request_host.find(":") >= 0:
+            (h, p) = self.http_request_host.split(":")
+            try:
+               port = int(str(p.strip()))
+            except:
+               return self.failHandshake("invalid port '%s' in HTTP Host header '%s'" % (str(p.strip()), str(self.http_request_host)))
+            if port != self.factory.externalPort:
+               return self.failHandshake("port %d in HTTP Host header '%s' does not match server listening port %s" % (port, str(self.http_request_host), self.factory.externalPort))
+            self.http_request_host = h
+         else:
+            if not ((self.factory.isSecure and self.factory.externalPort == 443) or (not self.factory.isSecure and self.factory.externalPort == 80)):
+               return self.failHandshake("missing port in HTTP Host header '%s' and server runs on non-standard port %d (wss = %s)" % (str(self.http_request_host), self.factory.externalPort, self.factory.isSecure))
+
+         ## Upgrade
+         ##
+         if not self.http_headers.has_key("upgrade"):
+            ## When no WS upgrade, render HTML server status page
+            ##
+            if self.webStatus:
+               if self.http_request_params.has_key('redirect') and len(self.http_request_params['redirect']) > 0:
+                  ## To specifiy an URL for redirection, encode the URL, i.e. from JavaScript:
+                  ##
+                  ##    var url = encodeURIComponent("http://autobahn.ws/python");
+                  ##
+                  ## and append the encoded string as a query parameter 'redirect'
+                  ##
+                  ##    http://localhost:9000?redirect=http%3A%2F%2Fautobahn.ws%2Fpython
+                  ##    https://localhost:9000?redirect=https%3A%2F%2Ftwitter.com%2F
+                  ##
+                  ## This will perform an immediate HTTP-303 redirection. If you provide
+                  ## an additional parameter 'after' (int >= 0), the redirection happens
+                  ## via Meta-Refresh in the rendered HTML status page, i.e.
+                  ##
+                  ##    https://localhost:9000/?redirect=https%3A%2F%2Ftwitter.com%2F&after=3
+                  ##
+                  url = self.http_request_params['redirect'][0]
+                  if self.http_request_params.has_key('after') and len(self.http_request_params['after']) > 0:
+                     after = int(self.http_request_params['after'][0])
+                     if self.debugCodePaths:
+                        log.msg("HTTP Upgrade header missing : render server status page and meta-refresh-redirecting to %s after %d seconds" % (url, after))
+                     self.sendServerStatus(url, after)
+                  else:
+                     if self.debugCodePaths:
+                        log.msg("HTTP Upgrade header missing : 303-redirecting to %s" % url)
+                     self.sendRedirect(url)
+               else:
+                  if self.debugCodePaths:
+                     log.msg("HTTP Upgrade header missing : render server status page")
+                  self.sendServerStatus()
+               self.dropConnection(abort = False)
+               return
+            else:
+               return self.failHandshake("HTTP Upgrade header missing", HTTP_STATUS_CODE_UPGRADE_REQUIRED[0])
+         upgradeWebSocket = False
+         for u in self.http_headers["upgrade"].split(","):
+            if u.strip().lower() == "websocket":
+               upgradeWebSocket = True
+               break
+         if not upgradeWebSocket:
+            return self.failHandshake("HTTP Upgrade headers do not include 'websocket' value (case-insensitive) : %s" % self.http_headers["upgrade"])
+
+         ## Connection
+         ##
+         if not self.http_headers.has_key("connection"):
+            return self.failHandshake("HTTP Connection header missing")
+         connectionUpgrade = False
+         for c in self.http_headers["connection"].split(","):
+            if c.strip().lower() == "upgrade":
+               connectionUpgrade = True
+               break
+         if not connectionUpgrade:
+            return self.failHandshake("HTTP Connection headers do not include 'upgrade' value (case-insensitive) : %s" % self.http_headers["connection"])
+
+         ## Sec-WebSocket-Version PLUS determine mode: Hybi or Hixie
+         ##
+         if not self.http_headers.has_key("sec-websocket-version"):
+            if self.debugCodePaths:
+               log.msg("Hixie76 protocol detected")
+            if self.allowHixie76:
+               version = 0
+            else:
+               return self.failHandshake("WebSocket connection denied - Hixie76 protocol mode disabled.")
+         else:
+            if self.debugCodePaths:
+               log.msg("Hybi protocol detected")
+            if http_headers_cnt["sec-websocket-version"] > 1:
+               return self.failHandshake("HTTP Sec-WebSocket-Version header appears more than once in opening handshake request")
+            try:
+               version = int(self.http_headers["sec-websocket-version"])
+            except:
+               return self.failHandshake("could not parse HTTP Sec-WebSocket-Version header '%s' in opening handshake request" % self.http_headers["sec-websocket-version"])
+
+         if version not in self.versions:
+
+            ## respond with list of supported versions (descending order)
+            ##
+            sv = sorted(self.versions)
+            sv.reverse()
+            svs = ','.join([str(x) for x in sv])
+            return self.failHandshake("WebSocket version %d not supported (supported versions: %s)" % (version, svs),
+                                      HTTP_STATUS_CODE_BAD_REQUEST[0],
+                                      [("Sec-WebSocket-Version", svs)])
+         else:
+            ## store the protocol version we are supposed to talk
+            self.websocket_version = version
+
+         ## Sec-WebSocket-Protocol
+         ##
+         if self.http_headers.has_key("sec-websocket-protocol"):
+            protocols = [str(x.strip()) for x in self.http_headers["sec-websocket-protocol"].split(",")]
+            # check for duplicates in protocol header
+            pp = {}
+            for p in protocols:
+               if pp.has_key(p):
+                  return self.failHandshake("duplicate protocol '%s' specified in HTTP Sec-WebSocket-Protocol header" % p)
+               else:
+                  pp[p] = 1
+            # ok, no duplicates, save list in order the client sent it
+            self.websocket_protocols = protocols
+         else:
+            self.websocket_protocols = []
+
+         ## Origin / Sec-WebSocket-Origin
+         ## http://tools.ietf.org/html/draft-ietf-websec-origin-02
+         ##
+         if self.websocket_version < 13 and self.websocket_version != 0:
+            # Hybi, but only < Hybi-13
+            websocket_origin_header_key = 'sec-websocket-origin'
+         else:
+            # RFC6455, >= Hybi-13 and Hixie
+            websocket_origin_header_key = "origin"
+
+         self.websocket_origin = None
+         if self.http_headers.has_key(websocket_origin_header_key):
+            if http_headers_cnt[websocket_origin_header_key] > 1:
+               return self.failHandshake("HTTP Origin header appears more than once in opening handshake request")
+            self.websocket_origin = self.http_headers[websocket_origin_header_key].strip()
+         else:
+            # non-browser clients are allowed to omit this header
+            pass
+
+         ## Sec-WebSocket-Extensions
+         ##
+         ## extensions requested by client
+         self.websocket_extensions = []
+         ## extensions selected by server
+         self.websocket_extensions_in_use = []
+
+         if self.http_headers.has_key("sec-websocket-extensions"):
+            if self.websocket_version == 0:
+               return self.failHandshake("Sec-WebSocket-Extensions header specified for Hixie-76")
+            extensions = [x.strip() for x in self.http_headers["sec-websocket-extensions"].split(',')]
+            if len(extensions) > 0:
+               self.websocket_extensions = extensions
+               if self.debug:
+                  log.msg("client requested extensions we don't support (%s)" % str(extensions))
+
+         ## Sec-WebSocket-Key (Hybi) or Sec-WebSocket-Key1/Sec-WebSocket-Key2 (Hixie-76)
+         ##
+         if self.websocket_version == 0:
+            for kk in ['Sec-WebSocket-Key1', 'Sec-WebSocket-Key2']:
+               k = kk.lower()
+               if not self.http_headers.has_key(k):
+                  return self.failHandshake("HTTP %s header missing" % kk)
+               if http_headers_cnt[k] > 1:
+                  return self.failHandshake("HTTP %s header appears more than once in opening handshake request" % kk)
+               try:
+                  key1 = self.parseHixie76Key(self.http_headers["sec-websocket-key1"].strip())
+                  key2 = self.parseHixie76Key(self.http_headers["sec-websocket-key2"].strip())
+               except:
+                  return self.failHandshake("could not parse Sec-WebSocket-Key1/2")
+         else:
+            if not self.http_headers.has_key("sec-websocket-key"):
+               return self.failHandshake("HTTP Sec-WebSocket-Key header missing")
+            if http_headers_cnt["sec-websocket-key"] > 1:
+               return self.failHandshake("HTTP Sec-WebSocket-Key header appears more than once in opening handshake request")
+            key = self.http_headers["sec-websocket-key"].strip()
+            if len(key) != 24: # 16 bytes => (ceil(128/24)*24)/6 == 24
+               return self.failHandshake("bad Sec-WebSocket-Key (length must be 24 ASCII chars) '%s'" % key)
+            if key[-2:] != "==": # 24 - ceil(128/6) == 2
+               return self.failHandshake("bad Sec-WebSocket-Key (invalid base64 encoding) '%s'" % key)
+            for c in key[:-2]:
+               if c not in "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789+/":
+                  return self.failHandshake("bad character '%s' in Sec-WebSocket-Key (invalid base64 encoding) '%s'" (c, key))
+
+         ## For Hixie-76, we need 8 octets of HTTP request body to complete HS!
+         ##
+         if self.websocket_version == 0:
+            if len(self.data) < end_of_header + 4 + 8:
+               return
+            else:
+               key3 =  self.data[end_of_header + 4:end_of_header + 4 + 8]
+               if self.debug:
+                  log.msg("received HTTP request body containing key3 for Hixie-76: %s" % key3)
+
+         ## Ok, got complete HS input, remember rest (if any)
+         ##
+         if self.websocket_version == 0:
+            self.data = self.data[end_of_header + 4 + 8:]
+         else:
+            self.data = self.data[end_of_header + 4:]
+
+         ## WebSocket handshake validated => produce opening handshake response
+
+         ## Now fire onConnect() on derived class, to give that class a chance to accept or deny
+         ## the connection. onConnect() may throw, in which case the connection is denied, or it
+         ## may return a protocol from the protocols provided by client or None.
+         ##
+         try:
+            connectionRequest = ConnectionRequest(self.peer,
+                                                  self.peerstr,
+                                                  self.http_headers,
+                                                  self.http_request_host,
+                                                  self.http_request_path,
+                                                  self.http_request_params,
+                                                  self.websocket_version,
+                                                  self.websocket_origin,
+                                                  self.websocket_protocols,
+                                                  self.websocket_extensions)
+
+            ## onConnect() will return the selected subprotocol or None
+            ## or raise an HttpException
+            ##
+            protocol = self.onConnect(connectionRequest)
+
+            if protocol is not None and not (protocol in self.websocket_protocols):
+               raise Exception("protocol accepted must be from the list client sent or None")
+
+            self.websocket_protocol_in_use = protocol
+
+         except HttpException, e:
+            return self.failHandshake(e.reason, e.code)
+            #return self.sendHttpRequestFailure(e.code, e.reason)
+
+         except Exception, e:
+            log.msg("Exception raised in onConnect() - %s" % str(e))
+            return self.failHandshake("Internal Server Error", HTTP_STATUS_CODE_INTERNAL_SERVER_ERROR[0])
+
+
+         ## build response to complete WebSocket handshake
+         ##
+         response  = "HTTP/1.1 %d Switching Protocols\x0d\x0a" % HTTP_STATUS_CODE_SWITCHING_PROTOCOLS[0]
+
+         if self.factory.server is not None and self.factory.server != "":
+            response += "Server: %s\x0d\x0a" % self.factory.server.encode("utf-8")
+
+         response += "Upgrade: WebSocket\x0d\x0a"
+         response += "Connection: Upgrade\x0d\x0a"
+
+         if self.websocket_protocol_in_use is not None:
+            response += "Sec-WebSocket-Protocol: %s\x0d\x0a" % str(self.websocket_protocol_in_use)
+
+         if self.websocket_version == 0:
+
+            if self.websocket_origin:
+               ## browser client provide the header, and expect it to be echo'ed
+               response += "Sec-WebSocket-Origin: %s\x0d\x0a" % str(self.websocket_origin)
+
+            if self.debugCodePaths:
+               log.msg('factory isSecure = %s port = %s' % (self.factory.isSecure, self.factory.externalPort))
+
+            if (self.factory.isSecure and self.factory.externalPort != 443) or ((not self.factory.isSecure) and self.factory.externalPort != 80):
+               if self.debugCodePaths:
+                  log.msg('factory running on non-default port')
+               response_port = ':' + str(self.factory.externalPort)
+            else:
+               if self.debugCodePaths:
+                  log.msg('factory running on default port')
+               response_port = ''
+
+            ## FIXME: check this! But see below ..
+            if False:
+               response_host = str(self.factory.host)
+               response_path = str(self.factory.path)
+            else:
+               response_host = str(self.http_request_host)
+               response_path = str(self.http_request_uri)
+
+            location = "%s://%s%s%s" % ('wss' if self.factory.isSecure else 'ws', response_host, response_port, response_path)
+
+            # Safari is very picky about this one
+            response += "Sec-WebSocket-Location: %s\x0d\x0a" % location
+
+            ## end of HTTP response headers
+            response += "\x0d\x0a"
+
+            ## compute accept body
+            ##
+            accept_val = struct.pack(">II", key1, key2) + key3
+            accept = hashlib.md5(accept_val).digest()
+            response_body = str(accept)
+         else:
+            ## compute Sec-WebSocket-Accept
+            ##
+            sha1 = hashlib.sha1()
+            sha1.update(key + WebSocketProtocol._WS_MAGIC)
+            sec_websocket_accept = base64.b64encode(sha1.digest())
+
+            response += "Sec-WebSocket-Accept: %s\x0d\x0a" % sec_websocket_accept
+
+            if len(self.websocket_extensions_in_use) > 0:
+               response += "Sec-WebSocket-Extensions: %s\x0d\x0a" % ','.join(self.websocket_extensions_in_use)
+
+            ## end of HTTP response headers
+            response += "\x0d\x0a"
+            response_body = ''
+
+         if self.debug:
+            log.msg("sending HTTP response:\n\n%s%s\n\n" % (response, binascii.b2a_hex(response_body)))
+
+         ## save and send out opening HS data
+         ##
+         self.http_response_data = response + response_body
+         self.sendData(self.http_response_data)
+
+         ## opening handshake completed, move WebSocket connection into OPEN state
+         ##
+         self.state = WebSocketProtocol.STATE_OPEN
+
+         ## cancel any opening HS timer if present
+         ##
+         if self.openHandshakeTimeoutCall is not None:
+            if self.debugCodePaths:
+               log.msg("openHandshakeTimeoutCall.cancel")
+            self.openHandshakeTimeoutCall.cancel()
+            self.openHandshakeTimeoutCall = None
+
+         ## init state
+         ##
+         self.inside_message = False
+         if self.websocket_version != 0:
+            self.current_frame = None
+
+         ## fire handler on derived class
+         ##
+         if self.trackedTimings:
+            self.trackedTimings.track("onOpen")
+         self.onOpen()
+
+         ## process rest, if any
+         ##
+         if len(self.data) > 0:
+            self.consumeData()
+
+
+   def failHandshake(self, reason, code = HTTP_STATUS_CODE_BAD_REQUEST[0], responseHeaders = []):
+      """
+      During opening handshake the client request was invalid, we send a HTTP
+      error response and then drop the connection.
+      """
+      if self.debug:
+         log.msg("failing WebSocket opening handshake ('%s')" % reason)
+      self.sendHttpErrorResponse(code, reason, responseHeaders)
+      self.dropConnection(abort = False)
+
+
+   def sendHttpErrorResponse(self, code, reason, responseHeaders = []):
+      """
+      Send out HTTP error response.
+      """
+      response  = "HTTP/1.1 %d %s\x0d\x0a" % (code, reason.encode("utf-8"))
+      for h in responseHeaders:
+         response += "%s: %s\x0d\x0a" % (h[0], h[1].encode("utf-8"))
+      response += "\x0d\x0a"
+      self.sendData(response)
+
+
+   def sendHtml(self, html):
+      """
+      Send HTML page HTTP response.
+      """
+      raw = html.encode("utf-8")
+      response  = "HTTP/1.1 %d %s\x0d\x0a" % (HTTP_STATUS_CODE_OK[0], HTTP_STATUS_CODE_OK[1])
+      if self.factory.server is not None and self.factory.server != "":
+         response += "Server: %s\x0d\x0a" % self.factory.server.encode("utf-8")
+      response += "Content-Type: text/html; charset=UTF-8\x0d\x0a"
+      response += "Content-Length: %d\x0d\x0a" % len(raw)
+      response += "\x0d\x0a"
+      response += raw
+      self.sendData(response)
+
+
+   def sendRedirect(self, url):
+      """
+      Send HTTP Redirect (303) response.
+      """
+      response  = "HTTP/1.1 %d\x0d\x0a" % HTTP_STATUS_CODE_SEE_OTHER[0]
+      #if self.factory.server is not None and self.factory.server != "":
+      #   response += "Server: %s\x0d\x0a" % self.factory.server.encode("utf-8")
+      response += "Location: %s\x0d\x0a" % url.encode("utf-8")
+      response += "\x0d\x0a"
+      self.sendData(response)
+
+
+   def sendServerStatus(self, redirectUrl = None, redirectAfter = 0):
+      """
+      Used to send out server status/version upon receiving a HTTP/GET without
+      upgrade to WebSocket header (and option serverStatus is True).
+      """
+      if redirectUrl:
+         redirect = """<meta http-equiv="refresh" content="%d;URL='%s'">""" % (redirectAfter, redirectUrl)
+      else:
+         redirect = ""
+      html = """
+<!DOCTYPE html>
+<html>
+   <head>
+      %s
+      <style>
+         body {
+            color: #fff;
+            background-color: #027eae;
+            font-family: "Segoe UI", "Lucida Grande", "Helvetica Neue", Helvetica, Arial, sans-serif;
+            font-size: 16px;
+         }
+
+         a, a:visited, a:hover {
+            color: #fff;
+         }
+      </style>
+   </head>
+   <body>
+      <h1>AutobahnPython %s</h1>
+      <p>
+         I am not Web server, but a WebSocket endpoint.
+         You can talk to me using the WebSocket <a href="http://tools.ietf.org/html/rfc6455">protocol</a>.
+      </p>
+      <p>
+         For more information, please visit <a href="http://autobahn.ws/python">my homepage</a>.
+      </p>
+   </body>
+</html>
+""" % (redirect, __version__)
+      self.sendHtml(html)
+
+
+class WebSocketServerFactory(protocol.ServerFactory, WebSocketFactory):
+   """
+   A Twisted factory for WebSocket server protocols.
+   """
+
+   protocol = WebSocketServerProtocol
+   """
+   The protocol to be spoken. Must be derived from :class:`autobahn.websocket.WebSocketServerProtocol`.
+   """
+
+
+   def __init__(self, url = None, protocols = [], server = "AutobahnPython/%s" % __version__, debug = False, debugCodePaths = False, externalPort = None):
+      """
+      Create instance of WebSocket server factory.
+
+      Note that you MUST provide URL either here or using
+      :meth:`autobahn.websocket.WebSocketServerFactory.setSessionParameters`
+      *before* the factory is started.
+
+      :param url: WebSocket listening URL - ("ws:" | "wss:") "//" host [ ":" port ].
+      :type url: str
+      :param protocols: List of subprotocols the server supports. The subprotocol used is the first from the list of subprotocols announced by the client that is contained in this list.
+      :type protocols: list of strings
+      :param server: Server as announced in HTTP response header during opening handshake or None (default: "AutobahnWebSocket/x.x.x").
+      :type server: str
+      :param debug: Debug mode (default: False).
+      :type debug: bool
+      :param debugCodePaths: Debug code paths mode (default: False).
+      :type debugCodePaths: bool
+      :param externalPort: Optionally, the external visible port this server will be reachable under (i.e. when running behind a L2/L3 forwarding device).
+      :type externalPort: int
+      """
+      self.debug = debug
+      self.debugCodePaths = debugCodePaths
+
+      self.logOctets = debug
+      self.logFrames = debug
+
+      self.trackTimings = False
+
+      self.isServer = True
+
+      ## seed RNG which is used for WS frame masks generation
+      random.seed()
+
+      ## default WS session parameters
+      ##
+      self.setSessionParameters(url, protocols, server, externalPort)
+
+      ## default WebSocket protocol options
+      ##
+      self.resetProtocolOptions()
+
+      ## number of currently connected clients
+      ##
+      self.countConnections = 0
+
+
+   def setSessionParameters(self, url = None, protocols = [], server = None, externalPort = None):
+      """
+      Set WebSocket session parameters.
+
+      :param url: WebSocket listening URL - ("ws:" | "wss:") "//" host [ ":" port ].
+      :type url: str
+      :param protocols: List of subprotocols the server supports. The subprotocol used is the first from the list of subprotocols announced by the client that is contained in this list.
+      :type protocols: list of strings
+      :param server: Server as announced in HTTP response header during opening handshake.
+      :type server: str
+      :param externalPort: Optionally, the external visible port this server will be reachable under (i.e. when running behind a L2/L3 forwarding device).
+      :type externalPort: int
+      """
+      if url is not None:
+         ## parse WebSocket URI into components
+         (isSecure, host, port, resource, path, params) = parseWsUrl(url)
+         if path != "/":
+            raise Exception("path specified for server WebSocket URL")
+         if len(params) > 0:
+            raise Exception("query parameters specified for server WebSocket URL")
+         self.url = url
+         self.isSecure = isSecure
+         self.host = host
+         self.port = port
+      else:
+         self.url = None
+         self.isSecure = None
+         self.host = None
+         self.port = None
+
+      self.externalPort = externalPort if externalPort is not None else self.port
+      self.protocols = protocols
+      self.server = server
+
+
+   def resetProtocolOptions(self):
+      """
+      Reset all WebSocket protocol options to defaults.
+      """
+      self.versions = WebSocketProtocol.SUPPORTED_PROTOCOL_VERSIONS
+      self.allowHixie76 = WebSocketProtocol.DEFAULT_ALLOW_HIXIE76
+      self.webStatus = True
+      self.utf8validateIncoming = True
+      self.requireMaskedClientFrames = True
+      self.maskServerFrames = False
+      self.applyMask = True
+      self.maxFramePayloadSize = 0
+      self.maxMessagePayloadSize = 0
+      self.autoFragmentSize = 0
+      self.failByDrop = True
+      self.echoCloseCodeReason = False
+      self.openHandshakeTimeout = 5
+      self.closeHandshakeTimeout = 1
+      self.tcpNoDelay = True
+
+
+   def setProtocolOptions(self,
+                          versions = None,
+                          allowHixie76 = None,
+                          webStatus = None,
+                          utf8validateIncoming = None,
+                          maskServerFrames = None,
+                          requireMaskedClientFrames = None,
+                          applyMask = None,
+                          maxFramePayloadSize = None,
+                          maxMessagePayloadSize = None,
+                          autoFragmentSize = None,
+                          failByDrop = None,
+                          echoCloseCodeReason = None,
+                          openHandshakeTimeout = None,
+                          closeHandshakeTimeout = None,
+                          tcpNoDelay = None):
+      """
+      Set WebSocket protocol options used as defaults for new protocol instances.
+
+      :param versions: The WebSocket protocol versions accepted by the server (default: WebSocketProtocol.SUPPORTED_PROTOCOL_VERSIONS).
+      :type versions: list of ints
+      :param allowHixie76: Allow to speak Hixie76 protocol version.
+      :type allowHixie76: bool
+      :param webStatus: Return server status/version on HTTP/GET without WebSocket upgrade header (default: True).
+      :type webStatus: bool
+      :param utf8validateIncoming: Validate incoming UTF-8 in text message payloads (default: True).
+      :type utf8validateIncoming: bool
+      :param maskServerFrames: Mask server-to-client frames (default: False).
+      :type maskServerFrames: bool
+      :param requireMaskedClientFrames: Require client-to-server frames to be masked (default: True).
+      :type requireMaskedClientFrames: bool
+      :param applyMask: Actually apply mask to payload when mask it present. Applies for outgoing and incoming frames (default: True).
+      :type applyMask: bool
+      :param maxFramePayloadSize: Maximum frame payload size that will be accepted when receiving or 0 for unlimited (default: 0).
+      :type maxFramePayloadSize: int
+      :param maxMessagePayloadSize: Maximum message payload size (after reassembly of fragmented messages) that will be accepted when receiving or 0 for unlimited (default: 0).
+      :type maxMessagePayloadSize: int
+      :param autoFragmentSize: Automatic fragmentation of outgoing data messages (when using the message-based API) into frames with payload length <= this size or 0 for no auto-fragmentation (default: 0).
+      :type autoFragmentSize: int
+      :param failByDrop: Fail connections by dropping the TCP connection without performaing closing handshake (default: True).
+      :type failbyDrop: bool
+      :param echoCloseCodeReason: Iff true, when receiving a close, echo back close code/reason. Otherwise reply with code == NORMAL, reason = "" (default: False).
+      :type echoCloseCodeReason: bool
+      :param openHandshakeTimeout: Opening WebSocket handshake timeout, timeout in seconds or 0 to deactivate (default: 0).
+      :type openHandshakeTimeout: float
+      :param closeHandshakeTimeout: When we expect to receive a closing handshake reply, timeout in seconds (default: 1).
+      :type closeHandshakeTimeout: float
+      :param tcpNoDelay: TCP NODELAY ("Nagle") socket option (default: True).
+      :type tcpNoDelay: bool
+      """
+      if allowHixie76 is not None and allowHixie76 != self.allowHixie76:
+         self.allowHixie76 = allowHixie76
+
+      if versions is not None:
+         for v in versions:
+            if v not in WebSocketProtocol.SUPPORTED_PROTOCOL_VERSIONS:
+               raise Exception("invalid WebSocket protocol version %s (allowed values: %s)" % (v, str(WebSocketProtocol.SUPPORTED_PROTOCOL_VERSIONS)))
+            if v == 0 and not self.allowHixie76:
+               raise Exception("use of Hixie-76 requires allowHixie76 == True")
+         if set(versions) != set(self.versions):
+            self.versions = versions
+
+      if webStatus is not None and webStatus != self.webStatus:
+         self.webStatus = webStatus
+
+      if utf8validateIncoming is not None and utf8validateIncoming != self.utf8validateIncoming:
+         self.utf8validateIncoming = utf8validateIncoming
+
+      if requireMaskedClientFrames is not None and requireMaskedClientFrames != self.requireMaskedClientFrames:
+         self.requireMaskedClientFrames = requireMaskedClientFrames
+
+      if maskServerFrames is not None and maskServerFrames != self.maskServerFrames:
+         self.maskServerFrames = maskServerFrames
+
+      if applyMask is not None and applyMask != self.applyMask:
+         self.applyMask = applyMask
+
+      if maxFramePayloadSize is not None and maxFramePayloadSize != self.maxFramePayloadSize:
+         self.maxFramePayloadSize = maxFramePayloadSize
+
+      if maxMessagePayloadSize is not None and maxMessagePayloadSize != self.maxMessagePayloadSize:
+         self.maxMessagePayloadSize = maxMessagePayloadSize
+
+      if autoFragmentSize is not None and autoFragmentSize != self.autoFragmentSize:
+         self.autoFragmentSize = autoFragmentSize
+
+      if failByDrop is not None and failByDrop != self.failByDrop:
+         self.failByDrop = failByDrop
+
+      if echoCloseCodeReason is not None and echoCloseCodeReason != self.echoCloseCodeReason:
+         self.echoCloseCodeReason = echoCloseCodeReason
+
+      if openHandshakeTimeout is not None and openHandshakeTimeout != self.openHandshakeTimeout:
+         self.openHandshakeTimeout = openHandshakeTimeout
+
+      if closeHandshakeTimeout is not None and closeHandshakeTimeout != self.closeHandshakeTimeout:
+         self.closeHandshakeTimeout = closeHandshakeTimeout
+
+      if tcpNoDelay is not None and tcpNoDelay != self.tcpNoDelay:
+         self.tcpNoDelay = tcpNoDelay
+
+
+   def getConnectionCount(self):
+      """
+      Get number of currently connected clients.
+
+      :returns: int -- Number of currently connected clients.
+      """
+      return self.countConnections
+
+
+   def startFactory(self):
+      """
+      Called by Twisted before starting to listen on port for incoming connections.
+      Default implementation does nothing. Override in derived class when appropriate.
+      """
+      pass
+
+
+   def stopFactory(self):
+      """
+      Called by Twisted before stopping to listen on port for incoming connections.
+      Default implementation does nothing. Override in derived class when appropriate.
+      """
+      pass
+
+
+class WebSocketClientProtocol(WebSocketProtocol):
+   """
+   Client protocol for WebSocket.
+   """
+
+   def onConnect(self, connectionResponse):
+      """
+      Callback fired directly after WebSocket opening handshake when new WebSocket server
+      connection was established.
+
+      :param connectionResponse: WebSocket connection response information.
+      :type connectionResponse: instance of :class:`autobahn.websocket.ConnectionResponse`
+      """
+      pass
+
+
+   def connectionMade(self):
+      """
+      Called by Twisted when new TCP connection to server was established. Default
+      implementation will start the initial WebSocket opening handshake.
+      When overriding in derived class, make sure to call this base class
+      implementation _before_ your code.
+      """
+      self.isServer = False
+      WebSocketProtocol.connectionMade(self)
+      if self.debug:
+         log.msg("connection to %s established" % self.peerstr)
+      self.startHandshake()
+
+
+   def connectionLost(self, reason):
+      """
+      Called by Twisted when established TCP connection to server was lost. Default
+      implementation will tear down all state properly.
+      When overriding in derived class, make sure to call this base class
+      implementation _after_ your code.
+      """
+      WebSocketProtocol.connectionLost(self, reason)
+      if self.debug:
+         log.msg("connection to %s lost" % self.peerstr)
+
+
+   def createHixieKey(self):
+      """
+      Supposed to implement the crack smoker algorithm below. Well, crack
+      probably wasn't the stuff they smoked - dog poo?
+
+      http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76#page-21
+      Items 16 - 22
+      """
+      spaces1 = random.randint(1, 12)
+      max1 = int(4294967295L / spaces1)
+      number1 = random.randint(0, max1)
+      product1 = number1 * spaces1
+      key1 = str(product1)
+      rchars = filter(lambda x: (x >= 0x21 and x <= 0x2f) or (x >= 0x3a and x <= 0x7e), range(0,127))
+      for i in xrange(random.randint(1, 12)):
+         p = random.randint(0, len(key1) - 1)
+         key1 = key1[:p] + chr(random.choice(rchars)) + key1[p:]
+      for i in xrange(spaces1):
+         p = random.randint(1, len(key1) - 2)
+         key1 = key1[:p] + ' ' + key1[p:]
+      return (key1, number1)
+
+
+   def startHandshake(self):
+      """
+      Start WebSocket opening handshake.
+      """
+
+      ## construct WS opening handshake HTTP header
+      ##
+      request  = "GET %s HTTP/1.1\x0d\x0a" % self.factory.resource.encode("utf-8")
+
+      if self.factory.useragent is not None and self.factory.useragent != "":
+         request += "User-Agent: %s\x0d\x0a" % self.factory.useragent.encode("utf-8")
+
+      request += "Host: %s:%d\x0d\x0a" % (self.factory.host.encode("utf-8"), self.factory.port)
+      request += "Upgrade: WebSocket\x0d\x0a"
+      request += "Connection: Upgrade\x0d\x0a"
+
+      ## this seems to prohibit some non-compliant proxies from removing the
+      ## connection "Upgrade" header
+      ## See also:
+      ##   http://www.ietf.org/mail-archive/web/hybi/current/msg09841.html
+      ##   http://code.google.com/p/chromium/issues/detail?id=148908
+      ##
+      request += "Pragma: no-cache\x0d\x0a"
+      request += "Cache-Control: no-cache\x0d\x0a"
+
+      ## handshake random key
+      ##
+      if self.version == 0:
+         (self.websocket_key1, number1) = self.createHixieKey()
+         (self.websocket_key2, number2) = self.createHixieKey()
+         self.websocket_key3 = os.urandom(8)
+         accept_val = struct.pack(">II", number1, number2) + self.websocket_key3
+         self.websocket_expected_challenge_response = hashlib.md5(accept_val).digest()
+
+         ## Safari does NOT set Content-Length, even though the body is
+         ## non-empty, and the request unchunked. We do it.
+         ## See also: http://www.ietf.org/mail-archive/web/hybi/current/msg02149.html
+         request += "Content-Length: %s\x0d\x0a" % len(self.websocket_key3)
+
+         ## First two keys.
+         request += "Sec-WebSocket-Key1: %s\x0d\x0a" % self.websocket_key1
+         request += "Sec-WebSocket-Key2: %s\x0d\x0a" % self.websocket_key2
+      else:
+         self.websocket_key = base64.b64encode(os.urandom(16))
+         request += "Sec-WebSocket-Key: %s\x0d\x0a" % self.websocket_key
+
+      ## optional origin announced
+      ##
+      if self.factory.origin:
+         if self.version > 10 or self.version == 0:
+            request += "Origin: %d\x0d\x0a" % self.factory.origin.encode("utf-8")
+         else:
+            request += "Sec-WebSocket-Origin: %d\x0d\x0a" % self.factory.origin.encode("utf-8")
+
+      ## optional list of WS subprotocols announced
+      ##
+      if len(self.factory.protocols) > 0:
+         request += "Sec-WebSocket-Protocol: %s\x0d\x0a" % ','.join(self.factory.protocols)
+
+      ## set WS protocol version depending on WS spec version
+      ##
+      if self.version != 0:
+         request += "Sec-WebSocket-Version: %d\x0d\x0a" % WebSocketProtocol.SPEC_TO_PROTOCOL_VERSION[self.version]
+
+      request += "\x0d\x0a"
+
+      if self.version == 0:
+         ## Write HTTP request body for Hixie-76
+         request += self.websocket_key3
+
+      self.http_request_data = request
+
+      if self.debug:
+         log.msg(self.http_request_data)
+
+      self.sendData(self.http_request_data)
+
+
+   def processHandshake(self):
+      """
+      Process WebSocket opening handshake response from server.
+      """
+      ## only proceed when we have fully received the HTTP request line and all headers
+      ##
+      end_of_header = self.data.find("\x0d\x0a\x0d\x0a")
+      if end_of_header >= 0:
+
+         self.http_response_data = self.data[:end_of_header + 4]
+         if self.debug:
+            log.msg("received HTTP response:\n\n%s\n\n" % self.http_response_data)
+
+         ## extract HTTP status line and headers
+         ##
+         (self.http_status_line, self.http_headers, http_headers_cnt) = parseHttpHeader(self.http_response_data)
+
+         ## validate WebSocket opening handshake server response
+         ##
+         if self.debug:
+            log.msg("received HTTP status line in opening handshake : %s" % str(self.http_status_line))
+            log.msg("received HTTP headers in opening handshake : %s" % str(self.http_headers))
+
+         ## Response Line
+         ##
+         sl = self.http_status_line.split()
+         if len(sl) < 2:
+            return self.failHandshake("Bad HTTP response status line '%s'" % self.http_status_line)
+
+         ## HTTP version
+         ##
+         http_version = sl[0].strip()
+         if http_version != "HTTP/1.1":
+            return self.failHandshake("Unsupported HTTP version ('%s')" % http_version)
+
+         ## HTTP status code
+         ##
+         try:
+            status_code = int(sl[1].strip())
+         except:
+            return self.failHandshake("Bad HTTP status code ('%s')" % sl[1].strip())
+         if status_code != HTTP_STATUS_CODE_SWITCHING_PROTOCOLS[0]:
+
+            ## FIXME: handle redirects
+            ## FIXME: handle authentication required
+
+            if len(sl) > 2:
+               reason = " - %s" % ''.join(sl[2:])
+            else:
+               reason = ""
+            return self.failHandshake("WebSocket connection upgrade failed (%d%s)" % (status_code, reason))
+
+         ## Upgrade
+         ##
+         if not self.http_headers.has_key("upgrade"):
+            return self.failHandshake("HTTP Upgrade header missing")
+         if self.http_headers["upgrade"].strip().lower() != "websocket":
+            return self.failHandshake("HTTP Upgrade header different from 'websocket' (case-insensitive) : %s" % self.http_headers["upgrade"])
+
+         ## Connection
+         ##
+         if not self.http_headers.has_key("connection"):
+            return self.failHandshake("HTTP Connection header missing")
+         connectionUpgrade = False
+         for c in self.http_headers["connection"].split(","):
+            if c.strip().lower() == "upgrade":
+               connectionUpgrade = True
+               break
+         if not connectionUpgrade:
+            return self.failHandshake("HTTP Connection header does not include 'upgrade' value (case-insensitive) : %s" % self.http_headers["connection"])
+
+         ## compute Sec-WebSocket-Accept
+         ##
+         if self.version != 0:
+            if not self.http_headers.has_key("sec-websocket-accept"):
+               return self.failHandshake("HTTP Sec-WebSocket-Accept header missing in opening handshake reply")
+            else:
+               if http_headers_cnt["sec-websocket-accept"] > 1:
+                  return self.failHandshake("HTTP Sec-WebSocket-Accept header appears more than once in opening handshake reply")
+               sec_websocket_accept_got = self.http_headers["sec-websocket-accept"].strip()
+
+               sha1 = hashlib.sha1()
+               sha1.update(self.websocket_key + WebSocketProtocol._WS_MAGIC)
+               sec_websocket_accept = base64.b64encode(sha1.digest())
+
+               if sec_websocket_accept_got != sec_websocket_accept:
+                  return self.failHandshake("HTTP Sec-WebSocket-Accept bogus value : expected %s / got %s" % (sec_websocket_accept, sec_websocket_accept_got))
+
+         ## handle "extensions in use" - if any
+         ##
+         self.websocket_extensions_in_use = []
+         if self.version != 0:
+            if self.http_headers.has_key("sec-websocket-extensions"):
+               if http_headers_cnt["sec-websocket-extensions"] > 1:
+                  return self.failHandshake("HTTP Sec-WebSocket-Extensions header appears more than once in opening handshake reply")
+               exts = self.http_headers["sec-websocket-extensions"].strip()
+               ##
+               ## we don't support any extension, but if we did, we needed
+               ## to set self.websocket_extensions_in_use here, and don't fail the handshake
+               ##
+               return self.failHandshake("server wants to use extensions (%s), but no extensions implemented" % exts)
+
+         ## handle "subprotocol in use" - if any
+         ##
+         self.websocket_protocol_in_use = None
+         if self.http_headers.has_key("sec-websocket-protocol"):
+            if http_headers_cnt["sec-websocket-protocol"] > 1:
+               return self.failHandshake("HTTP Sec-WebSocket-Protocol header appears more than once in opening handshake reply")
+            sp = str(self.http_headers["sec-websocket-protocol"].strip())
+            if sp != "":
+               if sp not in self.factory.protocols:
+                  return self.failHandshake("subprotocol selected by server (%s) not in subprotocol list requested by client (%s)" % (sp, str(self.factory.protocols)))
+               else:
+                  ## ok, subprotocol in use
+                  ##
+                  self.websocket_protocol_in_use = sp
+
+
+         ## For Hixie-76, we need 16 octets of HTTP request body to complete HS!
+         ##
+         if self.version == 0:
+            if len(self.data) < end_of_header + 4 + 16:
+               return
+            else:
+               challenge_response =  self.data[end_of_header + 4:end_of_header + 4 + 16]
+               if challenge_response != self.websocket_expected_challenge_response:
+                  return self.failHandshake("invalid challenge response received from server (Hixie-76)")
+
+         ## Ok, got complete HS input, remember rest (if any)
+         ##
+         if self.version == 0:
+            self.data = self.data[end_of_header + 4 + 16:]
+         else:
+            self.data = self.data[end_of_header + 4:]
+
+         ## opening handshake completed, move WebSocket connection into OPEN state
+         ##
+         self.state = WebSocketProtocol.STATE_OPEN
+         self.inside_message = False
+         if self.version != 0:
+            self.current_frame = None
+         self.websocket_version = self.version
+
+         ## we handle this symmetrical to server-side .. that is, give the
+         ## client a chance to bail out .. i.e. on no subprotocol selected
+         ## by server
+         try:
+            connectionResponse = ConnectionResponse(self.peer,
+                                                    self.peerstr,
+                                                    self.http_headers,
+                                                    None, # FIXME
+                                                    self.websocket_protocol_in_use,
+                                                    self.websocket_extensions_in_use)
+
+            self.onConnect(connectionResponse)
+
+         except Exception, e:
+            ## immediately close the WS connection
+            ##
+            self.failConnection(1000, str(e))
+         else:
+            ## fire handler on derived class
+            ##
+            if self.trackedTimings:
+               self.trackedTimings.track("onOpen")
+            self.onOpen()
+
+         ## process rest, if any
+         ##
+         if len(self.data) > 0:
+            self.consumeData()
+
+
+   def failHandshake(self, reason):
+      """
+      During opening handshake the server response is invalid and we drop the
+      connection.
+      """
+      if self.debug:
+         log.msg("failing WebSocket opening handshake ('%s')" % reason)
+      self.dropConnection(abort = True)
+
+
+class WebSocketClientFactory(protocol.ClientFactory, WebSocketFactory):
+   """
+   A Twisted factory for WebSocket client protocols.
+   """
+
+   protocol = WebSocketClientProtocol
+   """
+   The protocol to be spoken. Must be derived from :class:`autobahn.websocket.WebSocketClientProtocol`.
+   """
+
+
+   def __init__(self, url = None, origin = None, protocols = [], useragent = "AutobahnPython/%s" % __version__, debug = False, debugCodePaths = False):
+      """
+      Create instance of WebSocket client factory.
+
+      Note that you MUST provide URL either here or set using
+      :meth:`autobahn.websocket.WebSocketClientFactory.setSessionParameters`
+      *before* the factory is started.
+
+      :param url: WebSocket URL to connect to - ("ws:" | "wss:") "//" host [ ":" port ] path [ "?" query ].
+      :type url: str
+      :param origin: The origin to be sent in WebSocket opening handshake or None (default: None).
+      :type origin: str
+      :param protocols: List of subprotocols the client should announce in WebSocket opening handshake (default: []).
+      :type protocols: list of strings
+      :param useragent: User agent as announced in HTTP request header or None (default: "AutobahnWebSocket/x.x.x").
+      :type useragent: str
+      :param debug: Debug mode (default: False).
+      :type debug: bool
+      :param debugCodePaths: Debug code paths mode (default: False).
+      :type debugCodePaths: bool
+      """
+      self.debug = debug
+      self.debugCodePaths = debugCodePaths
+
+      self.logOctets = debug
+      self.logFrames = debug
+
+      self.trackTimings = False
+
+      self.isServer = False
+
+      ## seed RNG which is used for WS opening handshake key and WS frame masks generation
+      random.seed()
+
+      ## default WS session parameters
+      ##
+      self.setSessionParameters(url, origin, protocols, useragent)
+
+      ## default WebSocket protocol options
+      ##
+      self.resetProtocolOptions()
+
+
+   def setSessionParameters(self, url = None, origin = None, protocols = [], useragent = None):
+      """
+      Set WebSocket session parameters.
+
+      :param url: WebSocket URL to connect to - ("ws:" | "wss:") "//" host [ ":" port ] path [ "?" query ].
+      :type url: str
+      :param origin: The origin to be sent in opening handshake.
+      :type origin: str
+      :param protocols: List of WebSocket subprotocols the client should announce in opening handshake.
+      :type protocols: list of strings
+      :param useragent: User agent as announced in HTTP request header during opening handshake.
+      :type useragent: str
+      """
+      if url is not None:
+         ## parse WebSocket URI into components
+         (isSecure, host, port, resource, path, params) = parseWsUrl(url)
+         self.url = url
+         self.isSecure = isSecure
+         self.host = host
+         self.port = port
+         self.resource = resource
+         self.path = path
+         self.params = params
+      else:
+         self.url = None
+         self.isSecure = None
+         self.host = None
+         self.port = None
+         self.resource = None
+         self.path = None
+         self.params = None
+
+      self.origin = origin
+      self.protocols = protocols
+      self.useragent = useragent
+
+
+   def resetProtocolOptions(self):
+      """
+      Reset all WebSocket protocol options to defaults.
+      """
+      self.version = WebSocketProtocol.DEFAULT_SPEC_VERSION
+      self.allowHixie76 = WebSocketProtocol.DEFAULT_ALLOW_HIXIE76
+      self.utf8validateIncoming = True
+      self.acceptMaskedServerFrames = False
+      self.maskClientFrames = True
+      self.applyMask = True
+      self.maxFramePayloadSize = 0
+      self.maxMessagePayloadSize = 0
+      self.autoFragmentSize = 0
+      self.failByDrop = True
+      self.echoCloseCodeReason = False
+      self.serverConnectionDropTimeout = 1
+      self.openHandshakeTimeout = 5
+      self.closeHandshakeTimeout = 1
+      self.tcpNoDelay = True
+
+
+   def setProtocolOptions(self,
+                          version = None,
+                          allowHixie76 = None,
+                          utf8validateIncoming = None,
+                          acceptMaskedServerFrames = None,
+                          maskClientFrames = None,
+                          applyMask = None,
+                          maxFramePayloadSize = None,
+                          maxMessagePayloadSize = None,
+                          autoFragmentSize = None,
+                          failByDrop = None,
+                          echoCloseCodeReason = None,
+                          serverConnectionDropTimeout = None,
+                          openHandshakeTimeout = None,
+                          closeHandshakeTimeout = None,
+                          tcpNoDelay = None):
+      """
+      Set WebSocket protocol options used as defaults for _new_ protocol instances.
+
+      :param version: The WebSocket protocol spec (draft) version to be used (default: WebSocketProtocol.DEFAULT_SPEC_VERSION).
+      :type version: int
+      :param allowHixie76: Allow to speak Hixie76 protocol version.
+      :type allowHixie76: bool
+      :param utf8validateIncoming: Validate incoming UTF-8 in text message payloads (default: True).
+      :type utf8validateIncoming: bool
+      :param acceptMaskedServerFrames: Accept masked server-to-client frames (default: False).
+      :type acceptMaskedServerFrames: bool
+      :param maskClientFrames: Mask client-to-server frames (default: True).
+      :type maskClientFrames: bool
+      :param applyMask: Actually apply mask to payload when mask it present. Applies for outgoing and incoming frames (default: True).
+      :type applyMask: bool
+      :param maxFramePayloadSize: Maximum frame payload size that will be accepted when receiving or 0 for unlimited (default: 0).
+      :type maxFramePayloadSize: int
+      :param maxMessagePayloadSize: Maximum message payload size (after reassembly of fragmented messages) that will be accepted when receiving or 0 for unlimited (default: 0).
+      :type maxMessagePayloadSize: int
+      :param autoFragmentSize: Automatic fragmentation of outgoing data messages (when using the message-based API) into frames with payload length <= this size or 0 for no auto-fragmentation (default: 0).
+      :type autoFragmentSize: int
+      :param failByDrop: Fail connections by dropping the TCP connection without performing closing handshake (default: True).
+      :type failbyDrop: bool
+      :param echoCloseCodeReason: Iff true, when receiving a close, echo back close code/reason. Otherwise reply with code == NORMAL, reason = "" (default: False).
+      :type echoCloseCodeReason: bool
+      :param serverConnectionDropTimeout: When the client expects the server to drop the TCP, timeout in seconds (default: 1).
+      :type serverConnectionDropTimeout: float
+      :param openHandshakeTimeout: Opening WebSocket handshake timeout, timeout in seconds or 0 to deactivate (default: 0).
+      :type openHandshakeTimeout: float
+      :param closeHandshakeTimeout: When we expect to receive a closing handshake reply, timeout in seconds (default: 1).
+      :type closeHandshakeTimeout: float
+      :param tcpNoDelay: TCP NODELAY ("Nagle") socket option (default: True).
+      :type tcpNoDelay: bool
+      """
+      if allowHixie76 is not None and allowHixie76 != self.allowHixie76:
+         self.allowHixie76 = allowHixie76
+
+      if version is not None:
+         if version not in WebSocketProtocol.SUPPORTED_SPEC_VERSIONS:
+            raise Exception("invalid WebSocket draft version %s (allowed values: %s)" % (version, str(WebSocketProtocol.SUPPORTED_SPEC_VERSIONS)))
+         if version == 0 and not self.allowHixie76:
+            raise Exception("use of Hixie-76 requires allowHixie76 == True")
+         if version != self.version:
+            self.version = version
+
+      if utf8validateIncoming is not None and utf8validateIncoming != self.utf8validateIncoming:
+         self.utf8validateIncoming = utf8validateIncoming
+
+      if acceptMaskedServerFrames is not None and acceptMaskedServerFrames != self.acceptMaskedServerFrames:
+         self.acceptMaskedServerFrames = acceptMaskedServerFrames
+
+      if maskClientFrames is not None and maskClientFrames != self.maskClientFrames:
+         self.maskClientFrames = maskClientFrames
+
+      if applyMask is not None and applyMask != self.applyMask:
+         self.applyMask = applyMask
+
+      if maxFramePayloadSize is not None and maxFramePayloadSize != self.maxFramePayloadSize:
+         self.maxFramePayloadSize = maxFramePayloadSize
+
+      if maxMessagePayloadSize is not None and maxMessagePayloadSize != self.maxMessagePayloadSize:
+         self.maxMessagePayloadSize = maxMessagePayloadSize
+
+      if autoFragmentSize is not None and autoFragmentSize != self.autoFragmentSize:
+         self.autoFragmentSize = autoFragmentSize
+
+      if failByDrop is not None and failByDrop != self.failByDrop:
+         self.failByDrop = failByDrop
+
+      if echoCloseCodeReason is not None and echoCloseCodeReason != self.echoCloseCodeReason:
+         self.echoCloseCodeReason = echoCloseCodeReason
+
+      if serverConnectionDropTimeout is not None and serverConnectionDropTimeout != self.serverConnectionDropTimeout:
+         self.serverConnectionDropTimeout = serverConnectionDropTimeout
+
+      if openHandshakeTimeout is not None and openHandshakeTimeout != self.openHandshakeTimeout:
+         self.openHandshakeTimeout = openHandshakeTimeout
+
+      if closeHandshakeTimeout is not None and closeHandshakeTimeout != self.closeHandshakeTimeout:
+         self.closeHandshakeTimeout = closeHandshakeTimeout
+
+      if tcpNoDelay is not None and tcpNoDelay != self.tcpNoDelay:
+         self.tcpNoDelay = tcpNoDelay
+
+
+   def clientConnectionFailed(self, connector, reason):
+      """
+      Called by Twisted when the connection to server has failed. Default implementation
+      does nothing. Override in derived class when appropriate.
+      """
+      pass
+
+
+   def clientConnectionLost(self, connector, reason):
+      """
+      Called by Twisted when the connection to server was lost. Default implementation
+      does nothing. Override in derived class when appropriate.
+      """
+      pass
diff --git a/ThirdParty/AutobahnPython/autobahn/xormasker.py b/ThirdParty/AutobahnPython/autobahn/xormasker.py
new file mode 100644
index 0000000..b16cbdf
--- /dev/null
+++ b/ThirdParty/AutobahnPython/autobahn/xormasker.py
@@ -0,0 +1,100 @@
+###############################################################################
+##
+##  Copyright 2012-2013 Tavendo GmbH
+##
+##  Licensed under the Apache License, Version 2.0 (the "License");
+##  you may not use this file except in compliance with the License.
+##  You may obtain a copy of the License at
+##
+##      http://www.apache.org/licenses/LICENSE-2.0
+##
+##  Unless required by applicable law or agreed to in writing, software
+##  distributed under the License is distributed on an "AS IS" BASIS,
+##  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+##  See the License for the specific language governing permissions and
+##  limitations under the License.
+##
+###############################################################################
+
+
+## use Cython implementation of XorMasker validator if available
+##
+try:
+   from wsaccel.xormask import XorMaskerNull, createXorMasker
+
+except:
+   ## fallback to pure Python implementation
+
+   from array import array
+
+   class XorMaskerNull:
+
+      def __init__(self, mask = None):
+         self.ptr = 0
+
+      def pointer(self):
+         return self.ptr
+
+      def reset(self):
+         self.ptr = 0
+
+      def process(self, data):
+         self.ptr += len(data)
+         return data
+
+
+   class XorMaskerSimple:
+
+      def __init__(self, mask):
+         assert len(mask) == 4
+         self.ptr = 0
+         self.msk = array('B', mask)
+
+      def pointer(self):
+         return self.ptr
+
+      def reset(self):
+         self.ptr = 0
+
+      def process(self, data):
+         dlen = len(data)
+         payload = array('B', data)
+         for k in xrange(dlen):
+            payload[k] ^= self.msk[self.ptr & 3]
+            self.ptr += 1
+         return payload.tostring()
+
+
+   class XorMaskerShifted1:
+
+      def __init__(self, mask):
+         assert len(mask) == 4
+         self.ptr = 0
+         self.mskarray = [array('B'), array('B'), array('B'), array('B')]
+         for j in xrange(4):
+            self.mskarray[0].append(ord(mask[ j & 3]))
+            self.mskarray[1].append(ord(mask[(j + 1) & 3]))
+            self.mskarray[2].append(ord(mask[(j + 2) & 3]))
+            self.mskarray[3].append(ord(mask[(j + 3) & 3]))
+
+      def pointer(self):
+         return self.ptr
+
+      def reset(self):
+         self.ptr = 0
+
+      def process(self, data):
+         dlen = len(data)
+         payload = array('B', data)
+         msk = self.mskarray[self.ptr & 3]
+         for k in xrange(dlen):
+            payload[k] ^= msk[k & 3]
+         self.ptr += dlen
+         return payload.tostring()
+
+
+   def createXorMasker(mask, len = None):
+      if len is None or len < 128:
+         return XorMaskerSimple(mask)
+      else:
+         return XorMaskerShifted1(mask)
diff --git a/ThirdParty/AutobahnPython/module.cmake b/ThirdParty/AutobahnPython/module.cmake
new file mode 100644
index 0000000..515de48
--- /dev/null
+++ b/ThirdParty/AutobahnPython/module.cmake
@@ -0,0 +1,5 @@
+vtk_module(AutobahnPython
+  DEPENDS
+    Twisted
+    vtkPython
+  EXCLUDE_FROM_WRAPPING)
diff --git a/ThirdParty/Cosmo/BasicDefinition.h b/ThirdParty/Cosmo/BasicDefinition.h
deleted file mode 100644
index ff8f3de..0000000
--- a/ThirdParty/Cosmo/BasicDefinition.h
+++ /dev/null
@@ -1,244 +0,0 @@
-/*=========================================================================
-                                                                                
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC. 
-This software was produced under U.S. Government contract DE-AC52-06NA25396 
-for Los Alamos National Laboratory (LANL), which is operated by 
-Los Alamos National Security, LLC for the U.S. Department of Energy. 
-The U.S. Government has rights to use, reproduce, and distribute this software. 
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.  
-If software is modified to produce derivative works, such modified software 
-should be clearly marked, so as not to confuse it with the version available 
-from LANL.
- 
-Additionally, redistribution and use in source and binary forms, with or 
-without modification, are permitted provided that the following conditions 
-are met:
--   Redistributions of source code must retain the above copyright notice, 
-    this list of conditions and the following disclaimer. 
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution. 
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software 
-    without specific prior written permission. 
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR 
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-                                                                                
-=========================================================================*/
-
-#ifndef BasicDefinition_h
-#define BasicDefinition_h
-
-#ifdef USE_VTK_COSMO
-#include "vtkType.h"
-#else
-#include <stdint.h>
-#endif
-
-///////////////////////////////////////////////////////////////////////////
-//
-
-#ifdef USE_VTK_COSMO
-#ifdef ID_64
-   typedef      vtkTypeInt64 ID_T;           // Particle and halo ids
-#else
-   typedef      vtkTypeInt32 ID_T;           // Particle and halo ids
-#endif
-#else
-#ifdef ID_64
-   typedef      int64_t ID_T;           // Particle and halo ids
-#else
-   typedef      int32_t ID_T;           // Particle and halo ids
-#endif
-#endif
-
-#ifdef POSVEL_64
-   typedef      double  POSVEL_T;       // Position,velocity
-   typedef      double  POTENTIAL_T;    // Potential
-#else
-   typedef      float   POSVEL_T;       // Position,velocity
-   typedef      float   POTENTIAL_T;    // Potential
-#endif
-
-#ifdef GRID_64
-   typedef      double  GRID_T;         // Grid types
-#else
-   typedef      float   GRID_T;         // Grid types
-#endif
-
-#ifdef USE_VTK_COSMO
-typedef vtkTypeInt32    STATUS_T; // Dead (which neighbor) or alive particles
-typedef vtkTypeUInt16   MASK_T;   // Other particle information
-#else
-typedef int32_t         STATUS_T; // Dead (which neighbor) or alive particles
-typedef uint16_t        MASK_T;   // Other particle information
-#endif
-
-///////////////////////////////////////////////////////////////////////////
-
-const float MAX_FLOAT   = 1.0e15;
-const float MIN_FLOAT   = -1.0e15;
-
-const int   RECORD      = 0;    // Input data is by particle record
-const int   BLOCK       = 1;    // Input data is blocked by variable
-
-const int   DIMENSION   = 3;
-const int   BUF_SZ      = 512;  // Character buffer
-
-// Constants for Spherical Over Dense calculation
-const double CHAIN_SIZE         = 2.0;           // Size for bucket mesh
-const double RHO_C              = 2.77536627e11; // Critical density
-                                                 // in (M_sun/h) / (Mpc/h)^3
-const double RHO_RATIO          = 200.0;         // density/critical density
-const double SOD_MASS           = 1.0e14;        // for initial SOD radius
-                                                 // in (M_sun/h)
-const double MIN_RADIUS_FACTOR  = 0.5;           // Factor of initial SOD radius
-const double MAX_RADIUS_FACTOR  = 2.0;           // Factor of initial SOD radius
-const int    MIN_SOD_SIZE       = 1000;          // Min FOF halo for SOD
-const float  MIN_SOD_MASS       = 5.0e12;        // Min FOF mass for SOD
-const int    NUM_SOD_BINS       = 20;            // Log bins for SOD halo
-
-// Constants for subhalo finding
-const double GRAVITY_C          = 43.015e-10;    // Gravitional constant for
-                                                 // potential energy
-
-// Cosmology record data in .cosmo format
-const int   COSMO_FLOAT = 7;    // x,y,z location and velocity plus mass
-const int   COSMO_INT   = 1;    // Particle id
-const int   RECORD_SIZE = sizeof(POSVEL_T) * COSMO_FLOAT + 
-                          sizeof(ID_T) * COSMO_INT;
-
-const bool  ENFORCE_MAX_READ = false;
-const int   MAX_READ    = 8000000;
-                                // Maximum number of particles to read at a time
-                                // Multipled by COSMO_FLOAT floats
-                                // makes the largest MPI allowed buffer
-
-const float DEAD_FACTOR = 1.20f; // Number of dead allocated is % more than max
-
-const int   ALIVE       = -1;   // Particle belongs to this processor
-const int   MIXED       = ALIVE - 1;
-                                // For a trick to quickly know what
-                                // particles should be output
-
-const int   UNMARKED    = -1;   // Mixed halo needs MASTER to arbitrate
-const int   INVALID     = 0;    // Mixed halo is not recorded on processor
-const int   VALID       = 1;    // Mixed halo is recorded on processor
-
-const int   MASTER      = 0;    // Processor to do merge step
-
-const int   MERGE_COUNT = 20;   // Number of tags to merge on in mixed
-
-// Parameters for center finding
-const int   MBP_THRESHOLD = 5000; // Threshold between n^2 and AStar methods
-const int   MCP_THRESHOLD = 8000;// Threshold between n^2 and Chain methods
-const int   MCP_CHAIN_FACTOR = 5; // Subdivide bb for building chaining mesh
-
-//
-// Neighbors are enumerated so that particles can be attached to the correct
-// neighbor, but these pairs must be preserved for the ParticleExchange.
-// Every processor should be able to send and receive on every iteration of
-// the exchange, so if everyone sends RIGHT and receives LEFT it works
-//
-// Do not change this pairing order.
-//
-enum NEIGHBOR
-{
-  X0,                   // Left face
-  X1,                   // Right face
-
-  Y0,                   // Bottom face
-  Y1,                   // Top face
-
-  Z0,                   // Front face
-  Z1,                   // Back face
-
-  X0_Y0,                // Left   bottom edge
-  X1_Y1,                // Right  top    edge
-
-  X0_Y1,                // Left   top    edge
-  X1_Y0,                // Right  bottom edge
-
-  Y0_Z0,                // Bottom front  edge
-  Y1_Z1,                // Top    back   edge
-
-  Y0_Z1,                // Bottom back   edge
-  Y1_Z0,                // Top    front  edge
-
-  Z0_X0,                // Front  left   edge
-  Z1_X1,                // Back   right  edge
-
-  Z0_X1,                // Front  right  edge
-  Z1_X0,                // Back   left   edge
-
-  X0_Y0_Z0,             // Left  bottom front corner
-  X1_Y1_Z1,             // Right top    back  corner
-
-  X0_Y0_Z1,             // Left  bottom back  corner
-  X1_Y1_Z0,             // Right top    front corner
-
-  X0_Y1_Z0,             // Left  top    front corner
-  X1_Y0_Z1,             // Right bottom back  corner
-
-  X0_Y1_Z1,             // Left  top    back  corner
-  X1_Y0_Z0              // Right bottom front corner
-};
-
-const int NUM_OF_NEIGHBORS      = 26;
-
-// Header for Gadget input files
-const int GADGET_GAS            = 0;
-const int GADGET_HALO           = 1;
-const int GADGET_DISK           = 2;
-const int GADGET_BULGE          = 3;
-const int GADGET_STARS          = 4;
-const int GADGET_BOUND          = 5;
-const int NUM_GADGET_TYPES      = 6;    // Types of gadget particles
-
-const int GADGET_HEADER_SIZE    = 256;  // Size when the endian matches
-const int GADGET_HEADER_SIZE_SWP= 65536;// Size when the endian doesn't match
-const int GADGET_FILL           = 60;   // Current fill to HEADER SIZE
-const int GADGET_SKIP           = 4;    // Bytes the indicate block size
-const int GADGET_2_SKIP         = 16;   // Extra bytes in gadget-2
-
-const int GADGET_1              = 1;
-const int GADGET_2              = 2;
-
-struct GadgetHeader {
-  int      npart[NUM_GADGET_TYPES];
-  double   mass[NUM_GADGET_TYPES];
-  double   time;
-  double   redshift;
-  int      flag_sfr;
-  int      flag_feedback;
-  int      npartTotal[NUM_GADGET_TYPES];
-  int      flag_cooling;
-  int      num_files;
-  double   BoxSize;
-  double   Omega0;
-  double   OmegaLambda;
-  double   HubbleParam;
-  int      flag_stellarage;
-  int      flag_metals;
-  int      HighWord[NUM_GADGET_TYPES];
-  int      flag_entropy;
-  char     fill[GADGET_FILL];
-};
-
-#endif
diff --git a/ThirdParty/Cosmo/CMakeLists.txt b/ThirdParty/Cosmo/CMakeLists.txt
deleted file mode 100644
index 75255eb..0000000
--- a/ThirdParty/Cosmo/CMakeLists.txt
+++ /dev/null
@@ -1,66 +0,0 @@
- 
-project(Cosmo)
-
-set(vtkCosmo_THIRD_PARTY 1)
-set(vtkCosmo_LIBRARIES Cosmo)
-vtk_module_export_info()
-
-# Build with or without MPI, for now do this rather compiling a
-# parallel and non parallel version
-option(VTK_COSMO_USE_MPI "Build Cosmo with MPI" OFF)
-mark_as_advanced(VTK_COSMO_USE_MPI)
-
-#SET(Cosmo_BUILD_SHARED_LIBS ${BUILD_SHARED_LIBS})
-SET(Cosmo_LIBS)
-if(VTK_COSMO_USE_MPI)
-  find_package(MPI REQUIRED)
-  include_directories(${MPI_C_INCLUDE_PATH})
-  set(Cosmo_LIBS ${MPI_C_LIBRARIES})
-  if (MPI_CXX_LIBRARIES)
-    set (Cosmo_LIBS ${Cosmo_LIBS} ${MPI_CXX_LIBRARIES})
-  endif()
-  add_definitions(-DMPICH_IGNORE_CXX_SEEK -DUSE_VTK_COSMO)
-else()
-  add_definitions(-DUSE_SERIAL_COSMO -DUSE_VTK_COSMO)
-endif()
-# Needed for mpich 2
-
-CONFIGURE_FILE(${Cosmo_SOURCE_DIR}/CosmoDefinition.h.in
-               ${Cosmo_BINARY_DIR}/CosmoDefinition.h)
- 
-SET(Cosmo_SOURCES
-  Partition.cxx
-  ParticleDistribute.cxx
-  ParticleExchange.cxx
-  Message.cxx
-  CosmoHaloFinder.cxx
-  CosmoHaloFinderP.cxx
-  FOFHaloProperties.cxx
-  ChainingMesh.cxx
-  HaloCenterFinder.cxx
-  SODHalo.cxx
-)
-
-INCLUDE_DIRECTORIES(${Cosmo_SOURCE_DIR} ${Cosmo_BINARY_DIR})
-
-VTK_ADD_LIBRARY(Cosmo ${Cosmo_SOURCES})
-TARGET_LINK_LIBRARIES(Cosmo ${vtkCommonCore_LIBRARIES} vtksys ${Cosmo_LIBS})
-
-IF(NOT VTK_INSTALL_NO_DEVELOPMENT)
-  INSTALL(FILES
-    ${Cosmo_SOURCE_DIR}/BasicDefinition.h
-    ${Cosmo_SOURCE_DIR}/ChainingMesh.h
-    ${Cosmo_SOURCE_DIR}/CosmoHalo.h
-    ${Cosmo_SOURCE_DIR}/CosmoHaloFinder.h
-    ${Cosmo_SOURCE_DIR}/CosmoHaloFinderP.h
-    ${Cosmo_SOURCE_DIR}/FOFHaloProperties.h
-    ${Cosmo_SOURCE_DIR}/HaloCenterFinder.h
-    ${Cosmo_SOURCE_DIR}/Message.h
-    ${Cosmo_SOURCE_DIR}/ParticleDistribute.h
-    ${Cosmo_SOURCE_DIR}/ParticleExchange.h
-    ${Cosmo_SOURCE_DIR}/Partition.h
-    ${Cosmo_SOURCE_DIR}/winDirent.h
-    ${Cosmo_BINARY_DIR}/CosmoDefinition.h
-    DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/Cosmo
-    COMPONENT Development)
-ENDIF(NOT VTK_INSTALL_NO_DEVELOPMENT)
diff --git a/ThirdParty/Cosmo/ChainingMesh.cxx b/ThirdParty/Cosmo/ChainingMesh.cxx
deleted file mode 100644
index 5eab91a..0000000
--- a/ThirdParty/Cosmo/ChainingMesh.cxx
+++ /dev/null
@@ -1,306 +0,0 @@
-/*=========================================================================
-                                                                                
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC. 
-This software was produced under U.S. Government contract DE-AC52-06NA25396 
-for Los Alamos National Laboratory (LANL), which is operated by 
-Los Alamos National Security, LLC for the U.S. Department of Energy. 
-The U.S. Government has rights to use, reproduce, and distribute this software. 
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.  
-If software is modified to produce derivative works, such modified software 
-should be clearly marked, so as not to confuse it with the version available 
-from LANL.
- 
-Additionally, redistribution and use in source and binary forms, with or 
-without modification, are permitted provided that the following conditions 
-are met:
--   Redistributions of source code must retain the above copyright notice, 
-    this list of conditions and the following disclaimer. 
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution. 
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software 
-    without specific prior written permission. 
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR 
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-                                                                                
-=========================================================================*/
-
-#include "Partition.h"
-#include "ChainingMesh.h"
-
-#include <iostream>
-#include <fstream>
-#include <sstream>
-#include <iomanip>
-#include <set>
-#include <vector>
-#include <algorithm>
-#include <math.h>
-
-using namespace std;
-
-/////////////////////////////////////////////////////////////////////////
-//
-// ChainingMesh assigns all particles on this processor to a 3D mesh 
-// of buckets for more efficient iteration on particles in an area
-//
-/////////////////////////////////////////////////////////////////////////
-
-ChainingMesh::ChainingMesh(
-                        POSVEL_T rL,
-                        POSVEL_T deadSz,
-                        POSVEL_T chainSz,
-                        vector<POSVEL_T>* xLoc,
-                        vector<POSVEL_T>* yLoc,
-                        vector<POSVEL_T>* zLoc)
-{
-  // Sizes for the entire problem
-  this->boxSize = rL;
-  this->deadSize = deadSz;
-
-  // Imposed bucket size on this processor
-  this->chainSize = chainSz;
-
-  // Extract the contiguous data block from a vector pointer
-  this->particleCount = (long)xLoc->size();
-  this->xx = &(*xLoc)[0];
-  this->yy = &(*yLoc)[0];
-  this->zz = &(*zLoc)[0];
-
-  // Get the number of processors and rank of this processor
-  this->numProc = Partition::getNumProc();
-  this->myProc = Partition::getMyProc();
-
-  // Get the number of processors in each dimension
-  Partition::getDecompSize(this->layoutSize);
-
-  // Get my position within the Cartesian topology
-  Partition::getMyPosition(this->layoutPos);
-
-  // Calculate the physical boundary on this processor for alive particles
-  POSVEL_T boxStep[DIMENSION];
-  POSVEL_T minAlive[DIMENSION];
-  POSVEL_T maxAlive[DIMENSION];
-  this->meshSize = new int[DIMENSION];
-  this->minRange = new POSVEL_T[DIMENSION];
-  this->maxRange = new POSVEL_T[DIMENSION];
-
-  for (int dim = 0; dim < DIMENSION; dim++) {
-    boxStep[dim] = this->boxSize / this->layoutSize[dim];
-   
-    // Region of particles that are alive on this processor
-    minAlive[dim] = this->layoutPos[dim] * boxStep[dim];
-    maxAlive[dim] = minAlive[dim] + boxStep[dim];
-    if (maxAlive[dim] > this->boxSize)
-      maxAlive[dim] = this->boxSize;
-      
-    // Allow for the boundary of dead particles, normalized to 0
-    // Overall boundary will be [0:(rL+2*deadSize)]
-    this->minRange[dim] = minAlive[dim] - this->deadSize;
-    this->maxRange[dim] = maxAlive[dim] + this->deadSize;
-
-    // How many chain mesh grids will fit
-    this->meshSize[dim] = (int)((this->maxRange[dim] - this->minRange[dim]) / 
-				this->chainSize) + 1;
-  }
-
-  // Create the chaining mesh
-  createChainingMesh();
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// ChainingMesh assigns all particles in a halo to a 3D mesh
-// of buckets for more efficient iteration on particles in an area
-//
-/////////////////////////////////////////////////////////////////////////
-
-ChainingMesh::ChainingMesh(
-                        POSVEL_T* minLoc,
-                        POSVEL_T* maxLoc,
-                        POSVEL_T chainSz,
-                        int haloCount,
-                        POSVEL_T* xLoc,
-                        POSVEL_T* yLoc,
-                        POSVEL_T* zLoc)
-{
-  this->meshSize = new int[DIMENSION];
-  this->minRange = new POSVEL_T[DIMENSION];
-  this->maxRange = new POSVEL_T[DIMENSION];
-
-  // Bucket size
-  this->chainSize = chainSz;
-
-  // Extract the contiguous data block from a vector pointer
-  this->particleCount = haloCount;
-  this->xx = xLoc;
-  this->yy = yLoc;
-  this->zz = zLoc;
-
-  // Find the grid size of this chaining mesh
-  for (int dim = 0; dim < DIMENSION; dim++) {
-    this->minRange[dim] = minLoc[dim];
-    this->maxRange[dim] = maxLoc[dim];
-    this->meshSize[dim] = (int)((this->maxRange[dim] - this->minRange[dim]) / 
-				this->chainSize) + 1;
-  }
-
-  // Create the chaining mesh
-  createChainingMesh();
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Destructor of buckets
-//
-/////////////////////////////////////////////////////////////////////////
-
-ChainingMesh::~ChainingMesh()
-{
-  for (int i = 0; i < this->meshSize[0]; i++) {
-    for (int j = 0; j < this->meshSize[1]; j++) {
-      delete [] this->buckets[i][j];
-      delete [] this->bucketCount[i][j];
-    }
-    delete [] this->buckets[i];
-    delete [] this->bucketCount[i];
-  }
-  delete [] this->buckets;
-  delete [] this->bucketCount;
-  delete [] this->bucketList;
-  delete [] this->meshSize;
-  delete [] this->minRange;
-  delete [] this->maxRange;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Create the chaining mesh which organizes particles into location grids
-// by creating buckets of locations and chaining the indices of the
-// particles so that all particles in a bucket can be located
-//
-/////////////////////////////////////////////////////////////////////////
-
-void ChainingMesh::createChainingMesh()
-{
-  // Create the bucket grid and initialize to -1
-  this->buckets = new int**[this->meshSize[0]];
-  this->bucketCount = new int**[this->meshSize[0]];
-
-  for (int i = 0; i < this->meshSize[0]; i++) {
-    this->buckets[i] = new int*[this->meshSize[1]];
-    this->bucketCount[i] = new int*[this->meshSize[1]];
-
-    for (int j = 0; j < this->meshSize[1]; j++) {
-      this->buckets[i][j] = new int[this->meshSize[2]];
-      this->bucketCount[i][j] = new int[this->meshSize[2]];
-
-      for (int k = 0; k < this->meshSize[2]; k++) {
-        this->buckets[i][j][k] = -1;
-        this->bucketCount[i][j][k] = 0;
-      }
-    }
-  }
-
-  // Create the chaining list of particles and initialize to -1
-  this->bucketList = new int[this->particleCount];
-  for (int p = 0; p < this->particleCount; p++)
-    this->bucketList[p] = -1;
-
-  // Iterate on all particles on this processor and assign a bucket grid
-  // First particle index is assigned to the actual bucket grid
-  // Next particle found is assigned to the bucket grid, only after the
-  // index which is already there is assigned to the new particles index
-  // position in the bucketList.
-  // Then to iterate through all particles in a bucket, start with the index
-  // in the buckets grid and follow it through the bucketList until -1 reached
-
-  for (int p = 0; p < this->particleCount; p++) {
-
-    POSVEL_T loc[DIMENSION];
-    loc[0] = this->xx[p];
-    loc[1] = this->yy[p];
-    loc[2] = this->zz[p];
-
-    int i = (int)((loc[0] - this->minRange[0]) / this->chainSize);
-    int j = (int)((loc[1] - this->minRange[1]) / this->chainSize);
-    int k = (int)((loc[2] - this->minRange[2]) / this->chainSize);
-
-    // First particle in bucket
-    if (this->buckets[i][j][k] == -1) {
-      this->buckets[i][j][k] = p;
-      this->bucketCount[i][j][k]++;
-    }
-
-    // Other particles in same bucket
-    else {
-      this->bucketList[p] = this->buckets[i][j][k];
-      this->buckets[i][j][k] = p;
-      this->bucketCount[i][j][k]++;
-    }
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Demonstration method to show how to iterate over the chaining mesh
-// Calculate the centroid of each bucket
-//
-/////////////////////////////////////////////////////////////////////////
-
-void ChainingMesh::printChainingMeshCentroids()
-{
-  // Test by calculating centroid of each bucket grid
-  POSVEL_T centroid[DIMENSION];
-
-  // Iterate on all particles in a bucket
-  for (int i = 0; i < this->meshSize[0]; i++) {
-    for (int j = 0; j < this->meshSize[1]; j++) {
-      for (int k = 0; k < this->meshSize[2]; k++) {
-
-        centroid[0] = 0.0;
-        centroid[1] = 0.0;
-        centroid[2] = 0.0;
-    
-        // First particle in the bucket
-        int p = this->buckets[i][j][k];
-
-        while (p != -1) {
-          centroid[0] += this->xx[p];
-          centroid[1] += this->yy[p];
-          centroid[2] += this->zz[p];
-
-          // Next particle in the bucket
-          p = this->bucketList[p];
-        }
-        for (int dim = 0; dim < DIMENSION; dim++) {
-          if (centroid[dim] != 0.0)
-            centroid[dim] /= this->bucketCount[i][j][k];
-        }
-#ifndef USE_VTK_COSMO
-        cout << "Bucket " << i << "," << j << "," << k 
-             << " count = " << bucketCount[i][j][k]
-             << " centroid = " << centroid[0] << "," << centroid[1] << "," 
-             << centroid[2] << endl;
-#endif
-      }
-    }
-  }
-}
diff --git a/ThirdParty/Cosmo/ChainingMesh.h b/ThirdParty/Cosmo/ChainingMesh.h
deleted file mode 100644
index ff9ad4a..0000000
--- a/ThirdParty/Cosmo/ChainingMesh.h
+++ /dev/null
@@ -1,143 +0,0 @@
-/*=========================================================================
-                                                                                
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC. 
-This software was produced under U.S. Government contract DE-AC52-06NA25396 
-for Los Alamos National Laboratory (LANL), which is operated by 
-Los Alamos National Security, LLC for the U.S. Department of Energy. 
-The U.S. Government has rights to use, reproduce, and distribute this software. 
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.  
-If software is modified to produce derivative works, such modified software 
-should be clearly marked, so as not to confuse it with the version available 
-from LANL.
- 
-Additionally, redistribution and use in source and binary forms, with or 
-without modification, are permitted provided that the following conditions 
-are met:
--   Redistributions of source code must retain the above copyright notice, 
-    this list of conditions and the following disclaimer. 
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution. 
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software 
-    without specific prior written permission. 
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR 
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-                                                                                
-=========================================================================*/
-
-// .NAME ChainingMesh - Assign all particles on this processor to a 3D bucket
-//
-// .SECTION Description
-// ChainingMesh takes particle locations and assigns particles to a mesh
-// location in a 3D grid so that when an area of interest must be searched,
-// only the particles in buckets for that area will be examined.  The 
-// chaining mesh is designed such that the 3D mesh holds the first particle
-// index in the bucket, and that array position points to the next particle
-// in the bucket.
-//
-
-#ifndef ChainingMesh_h
-#define ChainingMesh_h
-
-#ifdef USE_VTK_COSMO
-#include "CosmoDefinition.h"
-#else
-#include "Definition.h"
-#endif
-
-#include <string>
-#include <vector>
-
-using namespace std;
-
-#ifdef USE_VTK_COSMO
-class COSMO_EXPORT ChainingMesh {
-#else
-class ChainingMesh {
-#endif
-public:
-  // Chaining mesh for all particles on a processor
-  ChainingMesh(
-        POSVEL_T rL,            // Box size of entire physical problem
-        POSVEL_T deadSize,      // Dead size of overflow particles
-        POSVEL_T chainSize,     // Mesh size imposed on this physical space
-        vector<POSVEL_T>* xLoc, // Locations of every particle on processor
-        vector<POSVEL_T>* yLoc,
-        vector<POSVEL_T>* zLoc);
-
-  // Chaining mesh for a single halo
-  ChainingMesh(
-        POSVEL_T* minLoc,       // Bounding box of halo
-        POSVEL_T* maxLoc,       // Bounding box of halo
-        POSVEL_T chainSize,     // Mesh size imposed on this physical space
-        int haloCount,          // Number of particles in halo
-        POSVEL_T* xLoc,         // Locations of every particle
-        POSVEL_T* yLoc,
-        POSVEL_T* zLoc);
-
-  ~ChainingMesh();
-
-  // Construct the chaining mesh
-  void createChainingMesh();
-
-  // Demonstration method to show how to iterate over the chaining mesh
-  void printChainingMeshCentroids();
-
-  POSVEL_T getChainSize()       { return this->chainSize; }
-
-  POSVEL_T getMinMine(int dim)  { return this->minRange[dim]; }
-  POSVEL_T getMaxMine(int dim)  { return this->maxRange[dim]; }
-  int getMeshSize(int dim)      { return this->meshSize[dim]; }
-
-  POSVEL_T* getMinRange()       { return this->minRange; }
-  POSVEL_T* getMaxRange()       { return this->maxRange; }
-  int* getMeshSize()            { return this->meshSize; }
-
-  int*** getBucketCount()       { return this->bucketCount; }
-  int*** getBuckets()           { return this->buckets; }
-  int* getBucketList()          { return this->bucketList; }
-
-private:
-  int    myProc;                // My processor number
-  int    numProc;               // Total number of processors
-
-  int    layoutSize[DIMENSION]; // Decomposition of processors
-  int    layoutPos[DIMENSION];  // Position of this processor in decomposition
-
-  POSVEL_T boxSize;             // Physical box size of the data set
-  POSVEL_T deadSize;            // Physical size of dead particle region
-
-  long   particleCount;         // Total particles on this processor
-  POSVEL_T* xx;                 // X location for particles on this processor
-  POSVEL_T* yy;                 // Y location for particles on this processor
-  POSVEL_T* zz;                 // Z location for particles on this processor
-                                // processor index where it is ALIVE
-
-  POSVEL_T chainSize;           // Grid size in chaining mesh
-  POSVEL_T* minRange;           // Physical range on processor, including dead
-  POSVEL_T* maxRange;           // Physical range on processor, including dead
-  int* meshSize;                // Chaining mesh grid dimension
-
-  int*** buckets;               // First particle index into bucketList
-  int*** bucketCount;           // Size of each bucket 
-  int* bucketList;              // Indices of next particle in halo
-};
-
-#endif
diff --git a/ThirdParty/Cosmo/CosmoDefinition.h.in b/ThirdParty/Cosmo/CosmoDefinition.h.in
deleted file mode 100644
index ad9752a..0000000
--- a/ThirdParty/Cosmo/CosmoDefinition.h.in
+++ /dev/null
@@ -1,70 +0,0 @@
-/*=========================================================================
-                                                                                
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC. 
-This software was produced under U.S. Government contract DE-AC52-06NA25396 
-for Los Alamos National Laboratory (LANL), which is operated by 
-Los Alamos National Security, LLC for the U.S. Department of Energy. 
-The U.S. Government has rights to use, reproduce, and distribute this software. 
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.  
-If software is modified to produce derivative works, such modified software 
-should be clearly marked, so as not to confuse it with the version available 
-from LANL.
- 
-Additionally, redistribution and use in source and binary forms, with or 
-without modification, are permitted provided that the following conditions 
-are met:
--   Redistributions of source code must retain the above copyright notice, 
-    this list of conditions and the following disclaimer. 
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution. 
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software 
-    without specific prior written permission. 
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR 
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-                                                                                
-=========================================================================*/
-
-#ifndef CosmoDefinition_h
-#define CosmoDefinition_h
-
-#include <vtksys/Configure.h>
-#include "vtkABI.h"
-
-#cmakedefine BUILD_SHARED_LIBS
-
-// Now set up all of the export macros
-#if defined(BUILD_SHARED_LIBS)
- #if defined(Cosmo_EXPORTS)
-  #define COSMO_EXPORT VTK_ABI_EXPORT
- #else
-  #define COSMO_EXPORT VTK_ABI_IMPORT
- #endif
-#else
- #define COSMO_EXPORT
-#endif
-
-#include "BasicDefinition.h"
-
-#ifndef USE_SERIAL_COSMO
-#include <mpi.h>
-#endif
-
-#endif
diff --git a/ThirdParty/Cosmo/CosmoHalo.h b/ThirdParty/Cosmo/CosmoHalo.h
deleted file mode 100644
index beb59a1..0000000
--- a/ThirdParty/Cosmo/CosmoHalo.h
+++ /dev/null
@@ -1,157 +0,0 @@
-/*=========================================================================
-                                                                                
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC. 
-This software was produced under U.S. Government contract DE-AC52-06NA25396 
-for Los Alamos National Laboratory (LANL), which is operated by 
-Los Alamos National Security, LLC for the U.S. Department of Energy. 
-The U.S. Government has rights to use, reproduce, and distribute this software. 
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.  
-If software is modified to produce derivative works, such modified software 
-should be clearly marked, so as not to confuse it with the version available 
-from LANL.
- 
-Additionally, redistribution and use in source and binary forms, with or 
-without modification, are permitted provided that the following conditions 
-are met:
--   Redistributions of source code must retain the above copyright notice, 
-    this list of conditions and the following disclaimer. 
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution. 
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software 
-    without specific prior written permission. 
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR 
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-                                                                                
-=========================================================================*/
-
-#ifndef CosmoHalo_h
-#define CosmoHalo_h
-
-#ifdef USE_VTK_COSMO
-#include "CosmoDefinition.h"
-#else
-#include "Definition.h"
-#endif
-
-#include <string>
-#include <vector>
-#include <set>
-#include <algorithm>
-
-using namespace std;
-
-////////////////////////////////////////////////////////////////////////////
-//
-// CosmoHalo functions as a container for mixed halos received from the serial
-// halo finder where the particle vector contains the index of the particle
-// on a particular processor and the tag of that particle for the problem.
-//
-// It also functions as a merge container for MASTER processor where it
-// contains the mixed halos crossing more than one boundary.
-//
-////////////////////////////////////////////////////////////////////////////
-
-#ifdef USE_VTK_COSMO
-class COSMO_EXPORT CosmoHalo {
-#else
-class CosmoHalo {
-#endif
-public:
-  CosmoHalo(ID_T id, int alive, int dead)
-                {
-                  this->numberOfAlive = alive;
-                  this->numberOfDead = dead;
-                  this->haloID = id;
-                  this->valid = VALID;
-                  this->particles = new vector<ID_T>;
-                  this->tags = new vector<ID_T>;
-                  this->neighbors = new set<int>;
-                  this->partners = new set<int>;
-                }
-  ~CosmoHalo()
-                {
-                  delete this->particles;
-                  delete this->tags;
-                  delete this->neighbors;
-                  delete this->partners;
-                }
-
-  // Add a particle index for this halo on this processor
-  // Add to the neighbor zones to know how many processors share this halo
-  void addParticle(ID_T indx, ID_T tag, int neighbor)
-                {
-                  this->particles->push_back(indx);
-                  this->tags->push_back(tag);
-                  if (neighbor != ALIVE)
-                    this->neighbors->insert(neighbor);
-                }
-
-  // Add a mixed particle
-  void addParticle(ID_T tag)
-                {
-                  this->tags->push_back(tag);
-                }
-
-  // Add a matching mixed halo index indicating same halo
-  void addPartner(int index)
-                {
-                  this->partners->insert(index);
-                }
-
-  // Sort the members to help identify the same halo on multiple processors
-  void sortParticleTags()
-                {
-                  sort(this->tags->begin(), this->tags->end());
-                }
-
-  void         setAliveCount(int c)     { this->numberOfAlive = c; }
-  void         setDeadCount(int c)      { this->numberOfDead = c; }
-  void         setRankID(int rank)      { this->rankID = rank; }
-  void         setValid(int v)          { this->valid = v; }
-
-  ID_T         getHaloID()              { return this->haloID; }
-  int          getRankID()              { return this->rankID; }
-  int          getAliveCount()          { return this->numberOfAlive; }
-  int          getDeadCount()           { return this->numberOfDead; }
-  int          getValid()               { return this->valid; }
-
-  vector<ID_T>* getParticles()          { return this->particles; }
-  vector<ID_T>* getTags()               { return this->tags; }
-  set<int>*    getNeighbors()           { return this->neighbors; }
-  set<int>*    getPartners()            { return this->partners; }
-
-
-private:
-  ID_T haloID;                  // Halo id is smallest particle index/tag
-  int rankID;                   // Processor which owns this halo
-
-  vector<ID_T>* particles;      // Index of halo particle on this processor
-  vector<ID_T>* tags;           // Tag of halo particle
-  set<int>* neighbors;          // Zones with dead particles from this halo
-  set<int>* partners;           // Index of matching mixed halo
-
-  int numberOfAlive;            // Number of alive particles in halo
-  int numberOfDead;             // Number of dead particles in halo
-
-  int valid;                    // Mixed halo to be used or not
-};
-
-#endif
diff --git a/ThirdParty/Cosmo/CosmoHaloFinder.cxx b/ThirdParty/Cosmo/CosmoHaloFinder.cxx
deleted file mode 100644
index 72b1dc7..0000000
--- a/ThirdParty/Cosmo/CosmoHaloFinder.cxx
+++ /dev/null
@@ -1,581 +0,0 @@
-/*=========================================================================
-                                                                                
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC. 
-This software was produced under U.S. Government contract DE-AC52-06NA25396 
-for Los Alamos National Laboratory (LANL), which is operated by 
-Los Alamos National Security, LLC for the U.S. Department of Energy. 
-The U.S. Government has rights to use, reproduce, and distribute this software. 
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.  
-If software is modified to produce derivative works, such modified software 
-should be clearly marked, so as not to confuse it with the version available 
-from LANL.
- 
-Additionally, redistribution and use in source and binary forms, with or 
-without modification, are permitted provided that the following conditions 
-are met:
--   Redistributions of source code must retain the above copyright notice, 
-    this list of conditions and the following disclaimer. 
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution. 
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software 
-    without specific prior written permission. 
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR 
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-                                                                                
-=========================================================================*/
-
-#include "CosmoHaloFinder.h"
-
-#include <iostream>
-#include <fstream>
-#include <cmath>
-#include <cstdlib>
-#include <algorithm>
-
-#ifndef USE_VTK_COSMO
-#ifdef DEBUG
-#include <sys/time.h>
-#endif
-#endif
-
-using namespace std;
-
-/****************************************************************************/
-CosmoHaloFinder::CosmoHaloFinder()
-{
-}
-
-/****************************************************************************/
-CosmoHaloFinder::~CosmoHaloFinder()
-{
-}
-
-#ifndef USE_VTK_COSMO
-/****************************************************************************/
-void CosmoHaloFinder::Execute()
-{
-  cout << "np:       " << np << endl;
-  cout << "rL:       " << rL << endl;
-  cout << "bb:       " << bb << endl;
-  cout << "pmin:     " << pmin << endl;
-  cout << "periodic: " << (periodic ? "true" : "false") << endl;
-
-#ifdef DEBUG
-  timeval tim;
-  gettimeofday(&tim, NULL);
-  double t1=tim.tv_sec+(tim.tv_usec/1000000.0);
-  Reading();
-  gettimeofday(&tim, NULL);
-  double t2=tim.tv_sec+(tim.tv_usec/1000000.0);
-  printf("reading... %.2lfs\n", t2-t1);
-
-  gettimeofday(&tim, NULL);
-  t1=tim.tv_sec+(tim.tv_usec/1000000.0);
-  Finding();
-  gettimeofday(&tim, NULL);
-  t2=tim.tv_sec+(tim.tv_usec/1000000.0);
-  printf("finding... %.2lfs\n", t2-t1);
-
-  gettimeofday(&tim, NULL);
-  t1=tim.tv_sec+(tim.tv_usec/1000000.0);
-  Writing();
-  gettimeofday(&tim, NULL);
-  t2=tim.tv_sec+(tim.tv_usec/1000000.0);
-  printf("writing... %.2lfs\n", t2-t1);
-#else
-  Reading();
-  Finding();
-  Writing();
-#endif
-}
- 
-/****************************************************************************/
-void CosmoHaloFinder::Reading()
-{
-  // Verify that file exists and is readable
-  if ( !infile ) {
-    cout << "No input file specified" << endl;
-    exit (-1);
-  }
-
-  // Open the file and make sure everything is ok.
-  ifstream *FileStream = new ifstream(infile, ios::in);
-  if (FileStream->fail()) {
-    delete FileStream;
-    cout << "File: " << infile << " cannot be opened" << endl;
-    exit (-1);
-  }
-
-  // compute the number of particles
-  FileStream->seekg(0L, ios::end);
-  npart = FileStream->tellg() / 32;
-
-  cout << "npart:    " << npart << endl;
-
-  // these arrays are only used in the writing phase
-  xx = new POSVEL_T[npart];
-  yy = new POSVEL_T[npart];
-  zz = new POSVEL_T[npart];
-  vx = new POSVEL_T[npart];
-  vy = new POSVEL_T[npart];
-  vz = new POSVEL_T[npart];
-  ms = new POSVEL_T[npart];
-  pt = new int[npart];
-
-  // rewind file to beginning for particle reads
-  FileStream->seekg(0L, ios::beg);
-
-  // create dataspace
-  data = new POSVEL_T*[numDataDims];
-  for (int i=0; i<numDataDims; i++)
-    data[i] = new POSVEL_T[npart];
-
-  // declare temporary read buffers
-  int nfloat = 7, nint = 1;
-  POSVEL_T fBlock[nfloat];
-  ID_T iBlock[nint];
-
-  // Loop to read and scale all particles
-  xscal = rL / (1.0*np);
-
-  for (int i=0; i<npart; i++)
-  {
-    // Set file pointer to the requested particle
-    FileStream->read((char *)fBlock, nfloat * sizeof(float));
-
-    if (FileStream->gcount() != (int)(nfloat * sizeof(float))) {
-      cout << "Premature end-of-file" << endl;
-      exit (-1);
-    }
-
-    FileStream->read((char *)iBlock, nint * sizeof(int));
-    if (FileStream->gcount() != (int)(nint * sizeof(int))) {
-      cout << "Premature end-of-file" << endl;
-      exit (-1);
-    }
-
-    // These files are always little-endian
-    //vtkByteSwap::Swap4LERange(fBlock, nfloat);
-    //vtkByteSwap::Swap4LERange(iBlock, nint);
-
-    // sanity check
-    if (fBlock[0] > rL || fBlock[2] > rL || fBlock[4] > rL) {
-      cout << "rL is too small" << endl; 
-      exit (-1);
-    }
-
-    data[dataX][i] = fBlock[0] / xscal;
-    data[dataY][i] = fBlock[2] / xscal;
-    data[dataZ][i] = fBlock[4] / xscal;
-
-    // these assignment are only used in the writing phase.
-    xx[i] = fBlock[0];
-    vx[i] = fBlock[1];
-    yy[i] = fBlock[2];
-    vy[i] = fBlock[3];
-    zz[i] = fBlock[4];
-    vz[i] = fBlock[5];
-    ms[i] = fBlock[6];
-    pt[i] = iBlock[0]; 
-        
-  } // i-loop
-  
-  delete FileStream;
-
-  return;
-}
-
-/****************************************************************************/
-void CosmoHaloFinder::Writing()
-{
-  // compute halos statistics
-  hsize = new int[npart];
-  for (int h=0; h<npart; h++)
-    hsize[h] = 0;
-
-  for (int i=0; i<npart; i++)
-    hsize[ht[i]] += 1;
-
-  nhalo = 0;
-  for (int h=0; h<npart; h++) {
-    if (hsize[h] >= pmin)
-      nhalo++;
-  }
-
-  cout << "nhalo:    " << nhalo << endl;
-
-  nhalopart = 0;
-  for (int i=0; i<npart; i++)
-    if (hsize[ht[i]] >= pmin)
-      nhalopart++;
-
-  cout << "nhalopart:" << nhalopart << endl;
-
-  // Verify that file exists and is writable
-  if ( !outfile ) {
-    cout << "No output file specified" << endl;
-    exit (-1);
-  }
-
-  // Open the file and make sure everything is ok.
-  ofstream *FileStream = new ofstream(outfile, ios::out);
-  if (FileStream->fail()) {
-    delete FileStream;
-    cout << "File: " << outfile << " cannot be opened" << endl;
-    exit (-1);
-  }
-
-  for (int i=0; i<npart; i++)
-  {
-    // output in ASCII form
-    char str[1024];
-    sprintf(str, "%12.4E %12.4E", xx[i], vx[i]); *FileStream << str;
-    sprintf(str, " %12.4E %12.4E", yy[i], vy[i]); *FileStream << str;
-    sprintf(str, " %12.4E %12.4E", zz[i], vz[i]); *FileStream << str;
-    sprintf(str, " %12d", (hsize[ht[i]] < pmin) ? -1: pt[ht[i]]); *FileStream << str;
-    sprintf(str, " %12d", pt[i]); *FileStream << str;
-    *FileStream << "\n";
-  } // i-loop
-
-  delete FileStream;
-  delete hsize;
-
-  // done
-  return;
-}
-#endif  // #ifndef USE_VTK_COSMO
-
-/****************************************************************************/
-void CosmoHaloFinder::Finding()
-{
-  //
-  // REORDER particles based on spatial locality
-  //
-#ifndef USE_VTK_COSMO
-#ifdef DEBUG
-  timeval tim;
-  gettimeofday(&tim, NULL);
-  double t1=tim.tv_sec+(tim.tv_usec/1000000.0);
-#endif
-#endif
-
-  v = new ValueIdPair[npart];
-  for (int i = 0; i < npart; i++)
-    v[i].id = i;
-
-  Reorder(0, npart, dataX);
-
-  seq = new int[npart];
-  for (int i=0; i<npart; i++)
-    seq[i] = v[i].id;
-
-  delete v;
-
-#ifndef USE_VTK_COSMO
-#ifdef DEBUG
-  gettimeofday(&tim, NULL);
-  double t2=tim.tv_sec+(tim.tv_usec/1000000.0);
-  printf("reorder... %.2lfs\n", t2-t1);
-#endif
-#endif
-
-  //
-  // COMPUTE interval bounding box
-  //
-#ifndef USE_VTK_COSMO
-#ifdef DEBUG
-  gettimeofday(&tim, NULL);
-  t1=tim.tv_sec+(tim.tv_usec/1000000.0);
-#endif
-#endif
-
-  lb = new floatptr[numDataDims];
-  for (int i=0; i<numDataDims; i++)
-    lb[i] = new POSVEL_T[npart];
-
-  ub = new floatptr[numDataDims];
-  for (int i=0; i<numDataDims; i++)
-    ub[i] = new POSVEL_T[npart];
-
-  ComputeLU(0, npart);
-
-#ifndef USE_VTK_COSMO
-#ifdef DEBUG
-  gettimeofday(&tim, NULL);
-  t2=tim.tv_sec+(tim.tv_usec/1000000.0);
-  printf("computeLU... %.2lfs\n", t2-t1);
-#endif
-#endif
-
-  //
-  // FIND HALOS using friends-of-friends metric
-  //
-#ifndef USE_VTK_COSMO
-#ifdef DEBUG
-  gettimeofday(&tim, NULL);
-  t1=tim.tv_sec+(tim.tv_usec/1000000.0);
-#endif
-#endif
-
-  // create ht[] to store halo assignment.
-  ht = new int[npart];
-  for (int i=0; i<npart; i++)
-    ht[i] = i;
-
-  // create workspace for halo finder.
-  halo  = new int[npart];
-  nextp = new int[npart];
-
-  for (int i=0; i<npart; i++) {
-    halo[i] = i;
-    nextp[i] = -1;
-  }
-
-  myFOF(0, npart, dataX);
-
-#ifndef USE_VTK_COSMO
-#ifdef DEBUG
-  gettimeofday(&tim, NULL);
-  t2=tim.tv_sec+(tim.tv_usec/1000000.0);
-  printf("myFOF... %.2lfs\n", t2-t1);
-#endif
-#endif
-
-  //
-  // CLEANUP
-  //
-  for (int i=0; i<numDataDims; i++)
-    delete ub[i];
-
-  for (int i=0; i<numDataDims; i++)
-    delete lb[i];
-
-  delete seq;
-
-  // done!
-  return;
-}
-
-/****************************************************************************/
-void CosmoHaloFinder::Reorder(int first,
-                              int last, 
-                              int dataFlag)
-{
-  int len = last - first;
-
-  // base case
-  if (len == 1)
-    return;
-
-  // non-base cases
-  for(int i = first; i < last; i++)
-    v[i].value = data[dataFlag][v[i].id];
-
-  // divide
-  int half = len >> 1;
-  nth_element(&v[first], &v[first+half], &v[last], ValueIdPairLT());
-
-  Reorder(first, first+half, (dataFlag+1)%3);
-  Reorder(first+half,  last, (dataFlag+1)%3);
-
-  // done
-  return;
-}
-
-/****************************************************************************/
-void CosmoHaloFinder::ComputeLU(int first, int last)
-{
-  int len = last - first;
-    
-  int middle  = first + len/2;
-  int middle1 = first + len/4;
-  int middle2 = first + 3*len/4;
-  
-  // base cases
-  if (len == 2) {
-    int ii = seq[first];
-    int jj = seq[first+1];
-
-    lb[dataX][middle] = min(data[dataX][ii], data[dataX][jj]);
-    lb[dataY][middle] = min(data[dataY][ii], data[dataY][jj]);
-    lb[dataZ][middle] = min(data[dataZ][ii], data[dataZ][jj]);
-
-    ub[dataX][middle] = max(data[dataX][ii], data[dataX][jj]);
-    ub[dataY][middle] = max(data[dataY][ii], data[dataY][jj]);
-    ub[dataZ][middle] = max(data[dataZ][ii], data[dataZ][jj]);
-
-    return;
-  }
-
-  // this case is needed when npart is a non-power-of-two
-  if (len == 3) {
-    // fill lb[][middle2] and ub[][middle2]
-    ComputeLU(first+1, last);
-
-    int ii = seq[first];
-
-    lb[dataX][middle] = min(data[dataX][ii], lb[dataX][middle2]);
-    lb[dataY][middle] = min(data[dataY][ii], lb[dataY][middle2]);
-    lb[dataZ][middle] = min(data[dataZ][ii], lb[dataZ][middle2]);
-
-    ub[dataX][middle] = max(data[dataX][ii], ub[dataX][middle2]);
-    ub[dataY][middle] = max(data[dataY][ii], ub[dataY][middle2]);
-    ub[dataZ][middle] = max(data[dataZ][ii], ub[dataZ][middle2]);
-
-    return;
-  }
-
-  // non-base cases
-
-  ComputeLU(first, middle);
-  ComputeLU(middle,  last);
-
-  // compute LU at the bottom-up pass
-  lb[dataX][middle] = min(lb[dataX][middle1], lb[dataX][middle2]);
-  lb[dataY][middle] = min(lb[dataY][middle1], lb[dataY][middle2]);
-  lb[dataZ][middle] = min(lb[dataZ][middle1], lb[dataZ][middle2]);
-
-  ub[dataX][middle] = max(ub[dataX][middle1], ub[dataX][middle2]);
-  ub[dataY][middle] = max(ub[dataY][middle1], ub[dataY][middle2]);
-  ub[dataZ][middle] = max(ub[dataZ][middle1], ub[dataZ][middle2]);
-
-  // done
-  return;
-}
-
-/****************************************************************************/
-void CosmoHaloFinder::myFOF(int first, int last, int dataFlag)
-{
-  int len = last - first;
-
-  // base case
-  if (len == 1)
-    return;
-
-  // non-base cases
-
-  // divide
-  int middle = first + len/2;
-
-  myFOF(first, middle, (dataFlag+1)%3);
-  myFOF(middle,  last, (dataFlag+1)%3);
-
-  // recursive merge
-  Merge(first, middle, middle, last, dataFlag);
-
-  // done
-  return;
-}
-
-/****************************************************************************/
-void CosmoHaloFinder::Merge(int first1, int last1, int first2, int last2, int dataFlag)
-{
-  int len1 = last1 - first1;
-  int len2 = last2 - first2;
-
-  // base cases
-  // len1 == 1 || len2 == 1
-  // len1 == 1,2 && len2 == 1,2 (2 for non-power-of-two case)
-  if (len1 == 1 || len2 == 1) {
-    for (int i=0; i<len1; i++)
-    for (int j=0; j<len2; j++) {
-      int ii = seq[first1+i];
-      int jj = seq[first2+j];
-  
-      // fast exit
-      if (ht[ii] == ht[jj])
-        continue;
-  
-      // ht[ii] != ht[jj]
-      POSVEL_T xdist = fabs(data[dataX][jj] - data[dataX][ii]);
-      POSVEL_T ydist = fabs(data[dataY][jj] - data[dataY][ii]);
-      POSVEL_T zdist = fabs(data[dataZ][jj] - data[dataZ][ii]);
-  
-      if (periodic) {
-        xdist = min(xdist, np-xdist);
-        ydist = min(ydist, np-ydist);
-        zdist = min(zdist, np-zdist);
-      }
-  
-      if ((xdist<bb) && (ydist<bb) && (zdist<bb)) {
-  
-        POSVEL_T dist = xdist*xdist + ydist*ydist + zdist*zdist;
-        if (dist < bb*bb) {
-  
-          // union two halos to one
-          int newHaloId = min(ht[ii], ht[jj]);
-          int oldHaloId = max(ht[ii], ht[jj]);
-  
-          // update particles with oldHaloId
-          int last = -1;
-          int ith = halo[oldHaloId];
-          while (ith != -1) {
-            ht[ith] = newHaloId;
-            last = ith;
-            ith = nextp[ith];
-          }
-  
-          // update halo's linked list
-          nextp[last] = halo[newHaloId];
-          halo[newHaloId] = halo[oldHaloId];
-          halo[oldHaloId] = -1;
-        }
-      }
-    } // (i,j)-loop
-
-    return;
-  }
-
-  // non-base case
-
-  // pruning?
-  int middle1 = first1 + len1/2;
-  int middle2 = first2 + len2/2;
-
-  POSVEL_T lL = lb[dataFlag][middle1];
-  POSVEL_T uL = ub[dataFlag][middle1];
-  POSVEL_T lR = lb[dataFlag][middle2];
-  POSVEL_T uR = ub[dataFlag][middle2];
-
-  POSVEL_T dL = uL - lL;
-  POSVEL_T dR = uR - lR;
-  POSVEL_T dc = max(uL,uR) - min(lL,lR);
-
-  POSVEL_T dist = dc - dL - dR;
-  if (periodic)
-    dist = min(dist, np-dc);
-
-  if (dist >= bb)
-    return;
-
-  // continue merging
-
-  // move to the next axis
-  dataFlag = (dataFlag + 1) % 3;
-
-  Merge(first1, middle1,  first2, middle2, dataFlag);
-  Merge(first1, middle1, middle2,   last2, dataFlag);
-  Merge(middle1,  last1,  first2, middle2, dataFlag);
-  Merge(middle1,  last1, middle2,   last2, dataFlag);
-
-  // done
-  return;
-}
diff --git a/ThirdParty/Cosmo/CosmoHaloFinder.h b/ThirdParty/Cosmo/CosmoHaloFinder.h
deleted file mode 100644
index ed7e858..0000000
--- a/ThirdParty/Cosmo/CosmoHaloFinder.h
+++ /dev/null
@@ -1,236 +0,0 @@
-/*=========================================================================
-                                                                                
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC. 
-This software was produced under U.S. Government contract DE-AC52-06NA25396 
-for Los Alamos National Laboratory (LANL), which is operated by 
-Los Alamos National Security, LLC for the U.S. Department of Energy. 
-The U.S. Government has rights to use, reproduce, and distribute this software. 
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.  
-If software is modified to produce derivative works, such modified software 
-should be clearly marked, so as not to confuse it with the version available 
-from LANL.
- 
-Additionally, redistribution and use in source and binary forms, with or 
-without modification, are permitted provided that the following conditions 
-are met:
--   Redistributions of source code must retain the above copyright notice, 
-    this list of conditions and the following disclaimer. 
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution. 
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software 
-    without specific prior written permission. 
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR 
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-                                                                                
-=========================================================================*/
-
-// .NAME CosmoHaloFinder - find halos within a cosmology data file
-// .SECTION Description
-// CosmoHaloFinder is a filter object that operates on the unstructured 
-// grid created when a CosmoReader reads a .cosmo data file. 
-// It operates by finding clusters of neighbors.
-//
-// .SECTION Note
-// This halo finder implements a recursive algorithm using a k-d tree.
-// Linked lists are used to connect halos found during the recursive merge. 
-// Bounding boxes are calculated for each particle for pruning the merge tree.
-//
-// The halo finder doesn't actually build a tree that can be walked but
-// rather reorganizes the particles into the k-d tree using recursion, such that
-// when myFOF is walked in the same way, the data will match. This is stored
-// in the seq[] array.
-//
-// First step is Reorder().  When it is called the first time it divides all
-// the particles on the X axis such that the particle at the halfway mark in
-// the array is correctly positioned, and all particles in the array below it 
-// have an X value less than it and all particles in the array above it have
-// an X value higher.  Reorder() calls nth_element() which is a partial sort but
-// faster.  So the division does not physically divide the space in half
-// but rather divides the number of particles in half on a dimension.
-// 
-// Next step is the first level of recursion.  Each of the halves from above
-// are divided on the Y axis, again such that the number of particles is the
-// same in each half although the physical space is not divided.  Partial
-// ordering is done again by resequencing the seq array.  Each of these now
-// four pieces is divided on the Z axis next, and this continues until there
-// is one particle at the bottom of the tree.
-// 
-// Next step in the halo finder is to call ComputeLU() which computes a 
-// lower and upper bound for each particle based on the k-d tree of the next
-// axis positioning.  This is used in pruning the merge tree during myFOF().
-// This means that if there is a branch of the k-d tree with some
-// halos in it, but that the next jump to a particle is too far away, then
-// that entire branch is ignored.
-// 
-// Finally myFOF() is called and its recursion mimics that done by Reorder()
-// so that it is looking at the k-d tree resequence correctly.  myFOF()
-// recurses down to the bottom of the tree going to the left first.  When it
-// gets to the bottom it calls Merge() to see if those particles at the
-// bottom are close enough to each other.  Remembering that at each stage
-// of the k-d tree the two halves are divided on the next axis by count and
-// not by physical space, you can see that the Merge() must be done on those
-// four parts as follows.
-// 
-// Merge(A,C) Merge(A,D) Merge(B,C) Merge(B,D).  
-//
-// This is because it is unknown if A shares a boundary with C and D or 
-// B shares that boundary.  As particles are found to be close to each other, 
-// if they are already a part of a halo, the two halos must unite.  
-// While all this is going on, we also prune which means we stop the recursion.
-// As Merge() and myFOF() walk through the recursion chains of halos are
-// created and joined where they have a particle withing the required distance.
-// When myFOF() ends it has a chain of first particle in a halo and nextp 
-// pointing on down until -1 is reached.  Also the halo tag field for each
-// particle is constantly altered so that each particle knows what halo it
-// is part of, and that halo tag is the id of the lowest particle in the halo.
-//
-
-#ifndef CosmoHaloFinder_h
-#define CosmoHaloFinder_h
-
-#ifdef USE_VTK_COSMO
-#include "CosmoDefinition.h"
-#else
-#include "Definition.h"
-#endif
-
-#include <string>
-
-#define numDataDims 3
-#define dataX 0
-#define dataY 1
-#define dataZ 2
-
-using namespace std;
-
-/****************************************************************************/
-typedef POSVEL_T* floatptr;
-
-//
-// Particle information for reordering the particles according to position
-// Value is either the X, Y or Z position depending on the recursion
-// Id in the standalone serial version is the particle tag
-// Id in the parallel version is the index of that particle on a
-// particular processor which is why it can be int and not ID_T
-//
-struct ValueIdPair {
-  POSVEL_T value;
-  int id;
-};
-
-class ValueIdPairLT {
-public:
-  bool operator() (const ValueIdPair& p, const ValueIdPair& q) const
-  {
-  return p.value < q.value;
-  }
-};
-
-/****************************************************************************/
-
-#ifdef USE_VTK_COSMO
-class COSMO_EXPORT CosmoHaloFinder
-#else
-class CosmoHaloFinder
-#endif
-{
-public:
-  // create a finder
-  CosmoHaloFinder();
-  ~CosmoHaloFinder();
-
-  void Finding();
-
-  // Read alive particles
-#ifndef USE_VTK_COSMO
-  void Reading();
-  void Writing();
-
-  // execute the finder
-  void Execute();
-#endif
-
-  void setInFile(string inFile)         { infile = inFile.c_str(); }
-  void setOutFile(string outFile)       { outfile = outFile.c_str(); }
-
-  void setParticleLocations(POSVEL_T** d) { data = d; }
-  void setNumberOfParticles(int n)      { npart = n; }
-  void setMyProc(int r)                 { myProc = r; }
-
-  int* getHaloTag()                     { return ht; }
-
-  POSVEL_T* getXLoc()                   { return xx; }
-  POSVEL_T* getYLoc()                   { return yy; }
-  POSVEL_T* getZLoc()                   { return zz; }
-  POSVEL_T* getXVel()                   { return vx; }
-  POSVEL_T* getYVel()                   { return vy; }
-  POSVEL_T* getZVel()                   { return vz; }
-  POSVEL_T* getMass()                   { return ms; }
-  int*   getTag()                       { return pt; }
-  
-  // np.in
-  int np;
-  POSVEL_T rL;
-  POSVEL_T bb;
-  int pmin;
-  bool periodic;
-  const char *infile;
-  const char *outfile;
-  const char *textmode;
-
-private:
-
-  // input/output interface
-  POSVEL_T *xx, *yy, *zz, *vx, *vy, *vz, *ms;
-  int   *pt, *ht;
-
-  // internal state
-  int npart, nhalo, nhalopart;
-  int myProc;
-
-  // data[][] stores xx[], yy[], zz[].
-  POSVEL_T **data;
-
-  // scale factor
-  POSVEL_T xscal, vscal;
-
-  int *halo, *nextp, *hsize;
-
-  // Creates a sequence array containing ids of particle rearranged into
-  // a k-d tree.  Recursive method.
-  ValueIdPair *v;
-  int *seq;
-  void Reorder
-    (int first, 
-     int last, 
-     int flag);
-
-  // Calculates a lower and upper bound for each particle so that the 
-  // mergeing step can prune parts of the k-d tree
-  POSVEL_T **lb, **ub;
-  void ComputeLU(int, int);
-
-  // Recurses through the k-d tree merging particles to create halos
-  void myFOF(int, int, int);
-  void Merge(int, int, int, int, int);
-};
-
-#endif
diff --git a/ThirdParty/Cosmo/CosmoHaloFinderP.cxx b/ThirdParty/Cosmo/CosmoHaloFinderP.cxx
deleted file mode 100644
index 8ccf658..0000000
--- a/ThirdParty/Cosmo/CosmoHaloFinderP.cxx
+++ /dev/null
@@ -1,1119 +0,0 @@
-/*=========================================================================
-
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC.
-This software was produced under U.S. Government contract DE-AC52-06NA25396
-for Los Alamos National Laboratory (LANL), which is operated by
-Los Alamos National Security, LLC for the U.S. Department of Energy.
-The U.S. Government has rights to use, reproduce, and distribute this software.
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.
-If software is modified to produce derivative works, such modified software
-should be clearly marked, so as not to confuse it with the version available
-from LANL.
-
-Additionally, redistribution and use in source and binary forms, with or
-without modification, are permitted provided that the following conditions
-are met:
--   Redistributions of source code must retain the above copyright notice,
-    this list of conditions and the following disclaimer.
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution.
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software
-    without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-=========================================================================*/
-
-#include "Partition.h"
-#include "CosmoHaloFinderP.h"
-
-#include <iostream>
-#include <fstream>
-#include <sstream>
-#include <iomanip>
-#include <set>
-#include <math.h>
-
-using namespace std;
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Parallel manager for serial CosmoHaloFinder
-// Particle data space is partitioned for the number of processors
-// which currently is a factor of two but is easily extended.  Particles
-// are read in from files where each processor reads one file into a buffer,
-// extracts the particles which really belong on the processor (ALIVE) and
-// those in a buffer region around the edge (DEAD).  The buffer is then
-// passed round robin to every other processor so that all particles are
-// examined by all processors.  All dead particles are tagged with the
-// neighbor zone (26 neighbors in 3D) so that later halos can be associated
-// with zones.
-//
-// The serial halo finder is called on each processor and returns enough
-// information so that it can be determined if a halo is completely ALIVE,
-// completely DEAD, or mixed.  A mixed halo that is shared between on two
-// processors is kept by the processor that contains it in one of its
-// high plane neighbors, and is given up if contained in a low plane neighbor.
-//
-// Mixed halos that cross more than two processors are bundled up and sent
-// to the MASTER processor which decides the processor that should own it.
-//
-/////////////////////////////////////////////////////////////////////////
-
-CosmoHaloFinderP::CosmoHaloFinderP()
-{
-  // Get the number of processors and rank of this processor
-  this->numProc = Partition::getNumProc();
-  this->myProc = Partition::getMyProc();
-
-  // Get the number of processors in each dimension
-  Partition::getDecompSize(this->layoutSize);
-
-  // Get my position within the Cartesian topology
-  Partition::getMyPosition(this->layoutPos);
-
-  // Get the neighbors of this processor
-  Partition::getNeighbors(this->neighbor);
-
-  // For each neighbor zone, how many dead particles does it contain to start
-  // and how many dead halos does it contain after the serial halo finder
-  // For analysis but not necessary to run the code
-  //
-  for (int n = 0; n < NUM_OF_NEIGHBORS; n++) {
-    this->deadParticle[n] = 0;
-    this->deadHalo[n] = 0;
-  }
-}
-
-CosmoHaloFinderP::~CosmoHaloFinderP()
-{
-  for (unsigned int i = 0; i < this->myMixedHalos.size(); i++)
-    delete this->myMixedHalos[i];
-
-  delete [] this->haloList;
-  delete [] this->haloStart;
-  delete [] this->haloSize;
-
-  for (int dim = 0; dim < DIMENSION; dim++)
-    delete haloData[dim];
-  delete [] haloData;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Set parameters for the serial halo finder
-//
-/////////////////////////////////////////////////////////////////////////
-
-void CosmoHaloFinderP::setParameters(
-                        const string& outName,
-                        POSVEL_T _rL,
-                        POSVEL_T _deadSz,
-                        long _np,
-                        int _pmin,
-                        POSVEL_T _bb)
-{
-  // Particles for this processor output to file
-  ostringstream oname, hname;
-  if (this->numProc == 1) {
-    oname << outName;
-    hname << outName;
-  } else {
-    oname << outName << "." << myProc;
-    hname << outName << ".halo." << myProc;
-  }
-  this->outFile = oname.str();
-  this->outHaloFile = hname.str();
-
-  // Halo finder parameters
-  this->np = _np;
-  this->pmin = _pmin;
-  this->bb = _bb;
-  this->boxSize = _rL;
-  this->deadSize = _deadSz;
-
-  // First version of this code distributed the dead particles on a processor
-  // by taking the x,y,z position and adding or subtracting boxSize in all
-  // combinations.  This revised x,y,z was then normalized and added to the
-  // haloData array which was passed to each serial halo finder.
-  // This did not get the same answer as the standalone serial version which
-  // read the x,y,z and normalized without adding or subtracting boxSize first.
-  // Then when comparing distance the normalized "np" was used for subtraction.
-  // By doing things in this order some particles were placed slightly off,
-  // which was enough for particles to be included in halos where they should
-  // not have been.  In this first version, since I had placed particles by
-  // subtracting first, I made "periodic" false figuring all particles were
-  // placed where they should go.
-  //
-  // In the second version I normalize the dead particles, even from wraparound,
-  // using the actual x,y,z.  So when looking at a processor the alive particles
-  // will appear all together and the wraparound will properly be on the other
-  // side of the box.  Combined with doing this is setting "periodic" to true
-  // so that the serial halo finder works as it does in the standalone case
-  // and the normalization and subtraction from np happens in the same order.
-  //
-  // Third version went back to the first version because we need
-  // contiguous locations coming out of the halo finder for the center finder
-
-  this->haloFinder.np = _np;
-  this->haloFinder.pmin = _pmin;
-  this->haloFinder.bb = _bb;
-  this->haloFinder.rL = _rL;
-  this->haloFinder.periodic = false;
-  this->haloFinder.textmode = "ascii";
-
-  // Serial halo finder wants normalized locations on a grid superimposed
-  // on the physical rL grid.  Grid size is np and number of particles in
-  // the problem is np^3
-  this->normalizeFactor = (POSVEL_T)((1.0 * _np) / _rL);
-
-#ifndef USE_VTK_COSMO
-  if (this->myProc == MASTER) {
-    cout << endl << "------------------------------------" << endl;
-    cout << "np:       " << this->np << endl;
-    cout << "bb:       " << this->bb << endl;
-    cout << "pmin:     " << this->pmin << endl << endl;
-  }
-#endif
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Set the particle vectors that have already been read and which
-// contain only the alive particles for this processor
-//
-/////////////////////////////////////////////////////////////////////////
-
-void CosmoHaloFinderP::setParticles(
-                        vector<POSVEL_T>* xLoc,
-                        vector<POSVEL_T>* yLoc,
-                        vector<POSVEL_T>* zLoc,
-                        vector<POSVEL_T>* xVel,
-                        vector<POSVEL_T>* yVel,
-                        vector<POSVEL_T>* zVel,
-                        vector<POTENTIAL_T>* potential,
-                        vector<ID_T>* id,
-                        vector<MASK_T>* maskData,
-                        vector<STATUS_T>* state)
-{
-  this->particleCount = (long)xLoc->size();
-
-  // Extract the contiguous data block from a vector pointer
-  this->xx = &(*xLoc)[0];
-  this->yy = &(*yLoc)[0];
-  this->zz = &(*zLoc)[0];
-  this->vx = &(*xVel)[0];
-  this->vy = &(*yVel)[0];
-  this->vz = &(*zVel)[0];
-  this->pot = &(*potential)[0];
-  this->tag = &(*id)[0];
-  this->mask = &(*maskData)[0];
-  this->status = &(*state)[0];
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Execute the serial halo finder on the particles for this processor
-// Both ALIVE and DEAD particles we collected and normalized into
-// haloData which is in the form that the serial halo finder wants
-//
-/////////////////////////////////////////////////////////////////////////
-
-void CosmoHaloFinderP::executeHaloFinder()
-{
-  // Allocate data pointer which is sent to the serial halo finder
-  this->haloData = new POSVEL_T*[DIMENSION];
-  for (int dim = 0; dim < DIMENSION; dim++)
-    this->haloData[dim] = new POSVEL_T[this->particleCount];
-
-  // Fill it with normalized x,y,z of all particles on this processor
-  for (int p = 0; p < this->particleCount; p++) {
-    this->haloData[0][p] = this->xx[p] * this->normalizeFactor;
-    this->haloData[1][p] = this->yy[p] * this->normalizeFactor;
-    this->haloData[2][p] = this->zz[p] * this->normalizeFactor;
-  }
-
-  this->haloFinder.setParticleLocations(haloData);
-  this->haloFinder.setNumberOfParticles(this->particleCount);
-  this->haloFinder.setMyProc(this->myProc);
-  this->haloFinder.setOutFile(this->outFile);
-
-#ifndef USE_VTK_COSMO
-  cout << "Rank " << setw(3) << this->myProc
-       << " RUNNING SERIAL HALO FINDER on "
-       << particleCount << " particles" << endl;
-#endif
-
-#ifndef USE_SERIAL_COSMO
-  MPI_Barrier(Partition::getComm());
-#endif
-
-  if (this->particleCount > 0)
-    this->haloFinder.Finding();
-
-#ifndef USE_SERIAL_COSMO
-  MPI_Barrier(Partition::getComm());
-#endif
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// At this point each serial halo finder ran and
-// the particles handed to it included alive and dead.  Get back the
-// halo tag array and figure out the indices of the particles in each halo
-// and translate that into absolute particle tags and note alive or dead
-//
-// After the serial halo finder has run the halo tag is the INDEX of the
-// lowest particle in the halo on this processor.  It is not the absolute
-// particle tag id over the entire problem.
-//
-//    Serial partindex i = 0 haloTag = 0 haloSize = 1
-//    Serial partindex i = 1 haloTag = 1 haloSize = 1
-//    Serial partindex i = 2 haloTag = 2 haloSize = 1
-//    Serial partindex i = 3 haloTag = 3 haloSize = 1
-//    Serial partindex i = 4 haloTag = 4 haloSize = 2
-//    Serial partindex i = 5 haloTag = 5 haloSize = 1
-//    Serial partindex i = 6 haloTag = 6 haloSize = 1616
-//    Serial partindex i = 7 haloTag = 7 haloSize = 1
-//    Serial partindex i = 8 haloTag = 8 haloSize = 2
-//    Serial partindex i = 9 haloTag = 9 haloSize = 1738
-//    Serial partindex i = 10 haloTag = 10 haloSize = 4
-//    Serial partindex i = 11 haloTag = 11 haloSize = 1
-//    Serial partindex i = 12 haloTag = 12 haloSize = 78
-//    Serial partindex i = 13 haloTag = 12 haloSize = 0
-//    Serial partindex i = 14 haloTag = 12 haloSize = 0
-//    Serial partindex i = 15 haloTag = 12 haloSize = 0
-//    Serial partindex i = 16 haloTag = 16 haloSize = 2
-//    Serial partindex i = 17 haloTag = 17 haloSize = 1
-//    Serial partindex i = 18 haloTag = 6 haloSize = 0
-//    Serial partindex i = 19 haloTag = 6 haloSize = 0
-//    Serial partindex i = 20 haloTag = 6 haloSize = 0
-//    Serial partindex i = 21 haloTag = 6 haloSize = 0
-//
-// Halo of size 1616 has the low particle tag of 6 and other members are
-// 18,19,20,21 indicated by a tag of 6 and haloSize of 0
-//
-/////////////////////////////////////////////////////////////////////////
-
-void CosmoHaloFinderP::collectHalos()
-{
-  // Halo tag returned from the serial halo finder is actually the index
-  // of the particle on this processor.  Must map to get to actual tag
-  // which is common information between all processors.
-  this->haloTag = haloFinder.getHaloTag();
-
-  // Record the halo size of each particle on this processor
-  this->haloSize = new int[this->particleCount];
-  this->haloAliveSize = new int[this->particleCount];
-  this->haloDeadSize = new int[this->particleCount];
-
-  // Create a list of particles in any halo by recording the index of the
-  // first particle and having that index give the index of the next particle
-  // Last particle index reports a -1
-  // List is built by iterating on the tags and storing in opposite order so
-  this->haloList = new int[this->particleCount];
-  this->haloStart = new int[this->particleCount];
-
-  for (int p = 0; p < this->particleCount; p++) {
-    this->haloList[p] = -1;
-    this->haloStart[p] = p;
-    this->haloSize[p] = 0;
-    this->haloAliveSize[p] = 0;
-    this->haloDeadSize[p] = 0;
-  }
-
-  // Build the chaining mesh of particles in all the halos and count particles
-  buildHaloStructure();
-
-  // Mixed halos are saved separately so that they can be merged
-  processMixedHalos();
-
-  delete [] this->haloAliveSize;
-  delete [] this->haloDeadSize;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Examine every particle on this processor, both ALIVE and DEAD
-// For that particle increment the count for the corresponding halo
-// which is indicated by the lowest particle index in that halo
-// Also build the haloList so that we can find all particles in any halo
-//
-/////////////////////////////////////////////////////////////////////////
-
-void CosmoHaloFinderP::buildHaloStructure()
-{
-  // Build the chaining mesh so that all particles in a halo can be found
-  // This will include even small halos which will be excluded later
-  for (int p = 0; p < this->particleCount; p++) {
-
-    // Chain backwards the halo particles
-    // haloStart is the index of the last particle in a single halo in haloList
-    // The value found in haloList is the index of the next particle
-    if (this->haloTag[p] != p) {
-      this->haloList[p] = haloStart[this->haloTag[p]];
-      this->haloStart[this->haloTag[p]] = p;
-    }
-
-    // Count particles in the halos
-    if (this->status[p] == ALIVE)
-      this->haloAliveSize[this->haloTag[p]]++;
-    else
-      this->haloDeadSize[this->haloTag[p]]++;
-    this->haloSize[this->haloTag[p]]++;
-  }
-
-  // Iterate over particles and create a CosmoHalo for halos with size > pmin
-  // only for the mixed halos, not for those completely alive or dead
-  this->numberOfAliveHalos = 0;
-  this->numberOfDeadHalos = 0;
-  this->numberOfMixedHalos = 0;
-
-  // Only the first particle id for a halo records the size
-  // Succeeding particles which are members of a halo have a size of 0
-  // Record the start index of any legal halo which will allow the
-  // following of the chaining mesh to identify all particles in a halo
-  this->numberOfHaloParticles = 0;
-  for (ID_T p = 0; p < this->particleCount; p++) {
-
-    if (this->haloSize[p] >= this->pmin) {
-
-      if (this->haloAliveSize[p] > 0 && this->haloDeadSize[p] == 0) {
-        this->numberOfAliveHalos++;
-        this->numberOfHaloParticles += this->haloAliveSize[p];
-
-        // Save start of legal alive halo for halo properties
-        this->halos.push_back(this->haloStart[p]);
-        this->haloCount.push_back(this->haloAliveSize[p]);
-      }
-      else if (this->haloDeadSize[p] > 0 && this->haloAliveSize[p] == 0) {
-        this->numberOfDeadHalos++;
-      }
-      else {
-        this->numberOfMixedHalos++;
-        CosmoHalo* halo = new CosmoHalo(p,
-                                this->haloAliveSize[p], this->haloDeadSize[p]);
-        this->myMixedHalos.push_back(halo);
-      }
-    }
-  }
-
-#ifndef USE_VTK_COSMO
-#ifdef DEBUG
-  cout << "Rank " << this->myProc
-       << " #alive halos = " << this->numberOfAliveHalos
-       << " #dead halos = " << this->numberOfDeadHalos
-       << " #mixed halos = " << this->numberOfMixedHalos << endl;
-#endif
-#endif
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Mixed halos (which cross several processors) have been collected
-// By applying a high/low rule most mixed halos are assigned immediately
-// to one processor or another.  This requires extra processing so that
-// it is known which neighbor processors share the halo.
-//
-/////////////////////////////////////////////////////////////////////////
-
-void CosmoHaloFinderP::processMixedHalos()
-{
-  // Iterate over all particles and add tags to large mixed halos
-  for (ID_T p = 0; p < this->particleCount; p++) {
-
-    // All particles in the same halo have the same haloTag
-    if (this->haloSize[this->haloTag[p]] >= pmin &&
-        this->haloAliveSize[this->haloTag[p]] > 0 &&
-        this->haloDeadSize[this->haloTag[p]] > 0) {
-
-          // Check all each mixed halo to see which this particle belongs to
-          for (unsigned int h = 0; h < this->myMixedHalos.size(); h++) {
-
-            // If the tag of the particle matches the halo ID it belongs
-            if (this->haloTag[p] == this->myMixedHalos[h]->getHaloID()) {
-
-              // Add the index to that mixed halo.  Also record which neighbor
-              // the dead particle is associated with for merging
-              this->myMixedHalos[h]->addParticle(
-                                        p, this->tag[p], this->status[p]);
-
-              // For debugging only
-              if (this->status[p] > 0)
-                this->deadHalo[this->status[p]]++;
-
-              // Do some bookkeeping for the final output
-              // This processor should output all ALIVE particles, unless they
-              // are in a mixed halo that ends up being INVALID
-              // This processor should output none of the DEAD particles,
-              // unless they are in a mixed halo that ends up being VALID
-
-              // So since this particle is in a mixed halo set it to MIXED
-              // which is going to be one less than ALIVE.  Later when we
-              // determine we have a VALID mixed, we'll add one to the status
-              // for every particle turning all into ALIVE
-
-              // Now when we output we only do the ALIVE particles
-              this->status[p] = MIXED;
-            }
-          }
-    }
-  }
-
-  // Iterate over the mixed halos that were just created checking to see if
-  // the halo is on the "high" side of the 3D data space or not
-  // If it is on the high side and is shared with one other processor, keep it
-  // If it is on the low side and is shared with one other processor, delete it
-  // Any remaining halos are shared with more than two processors and must
-  // be merged by having the MASTER node decide
-  //
-  for (unsigned int h = 0; h < this->myMixedHalos.size(); h++) {
-    int lowCount = 0;
-    int highCount = 0;
-    set<int>* neighbors = this->myMixedHalos[h]->getNeighbors();
-    set<int>::iterator iter;
-    set<int> haloNeighbor;
-
-    for (iter = neighbors->begin(); iter != neighbors->end(); ++iter) {
-      if ((*iter) == X1 || (*iter) == Y1 || (*iter) == Z1 ||
-          (*iter) == X1_Y1 || (*iter) == Y1_Z1 || (*iter) == Z1_X1 ||
-          (*iter) == X1_Y1_Z1) {
-            highCount++;
-      } else {
-            lowCount++;
-      }
-      // Neighbor zones are on what actual processors
-      haloNeighbor.insert(this->neighbor[(*iter)]);
-    }
-
-    // Halo is kept by this processor and is marked as VALID
-    // May be in multiple neighbor zones, but all the same processor neighbor
-    if (highCount > 0 && lowCount == 0 && haloNeighbor.size() == 1) {
-      this->numberOfAliveHalos++;
-      this->numberOfMixedHalos--;
-      this->myMixedHalos[h]->setValid(VALID);
-      int id = this->myMixedHalos[h]->getHaloID();
-      int newAliveParticles = this->myMixedHalos[h]->getAliveCount() +
-                              this->myMixedHalos[h]->getDeadCount();
-      this->numberOfHaloParticles += newAliveParticles;
-
-      // Add this halo to valid halos on this processor for
-      // subsequent halo properties analysis
-      this->halos.push_back(this->haloStart[id]);
-      this->haloCount.push_back(newAliveParticles);
-
-      // Output trick - since the status of this particle was marked MIXED
-      // when it was added to the mixed CosmoHalo vector, and now it has
-      // been declared VALID, change it to ALIVE even if it was dead before
-      vector<ID_T>* particles = this->myMixedHalos[h]->getParticles();
-      vector<ID_T>::iterator iter2;
-      for (iter2 = particles->begin(); iter2 != particles->end(); ++iter2)
-        this->status[(*iter2)] = ALIVE;
-    }
-
-    // Halo will be kept by some other processor and is marked INVALID
-    // May be in multiple neighbor zones, but all the same processor neighbor
-    else if (highCount == 0 && lowCount > 0 && haloNeighbor.size() == 1) {
-      this->numberOfDeadHalos++;
-      this->numberOfMixedHalos--;
-      this->myMixedHalos[h]->setValid(INVALID);
-    }
-
-    // Remaining mixed halos must be examined by MASTER and stay UNMARKED
-    // Sort them on the tag field for easy comparison
-    else {
-      this->myMixedHalos[h]->setValid(UNMARKED);
-      this->myMixedHalos[h]->sortParticleTags();
-    }
-  }
-
-  // If only one processor is running there are no halos to merge
-  if (this->numProc == 1)
-    for (unsigned int h = 0; h < this->myMixedHalos.size(); h++)
-       this->myMixedHalos[h]->setValid(INVALID);
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Using the MASTER node merge all mixed halos so that only one processor
-// takes credit for them.
-//
-// Each processor containing mixed halos that are UNMARKED sends:
-//    Rank
-//    Number of mixed halos to merge
-//    for each halo
-//      id
-//      number of alive (for debugging)
-//      number of dead  (for debugging)
-//      first MERGE_COUNT particle ids (for merging)
-//
-/////////////////////////////////////////////////////////////////////////
-
-void CosmoHaloFinderP::mergeHalos()
-{
-  // What size integer buffer is needed to hold the largest halo data
-  int maxNumberOfMixed;
-  int numberOfMixed = (int)this->myMixedHalos.size();
-
-#ifdef USE_SERIAL_COSMO
-  maxNumberOfMixed = numberOfMixed;
-#else
-  MPI_Allreduce((void*) &numberOfMixed, (void*) &maxNumberOfMixed,
-                1, MPI_INT, MPI_MAX, Partition::getComm());
-#endif
-
-  // If there are no halos to merge, return
-  if (maxNumberOfMixed == 0)
-    return;
-
-  // Everyone creates the buffer for maximum halos
-  // MASTER will receive into it, others will send from it
-  int haloBufSize = maxNumberOfMixed * MERGE_COUNT * 2;
-  ID_T* haloBuffer = new ID_T[haloBufSize];
-
-  // MASTER moves its own mixed halos to mixed halo vector (change index to tag)
-  // then gets messages from others and creates those mixed halos
-  collectMixedHalos(haloBuffer, haloBufSize);
-#ifndef USE_SERIAL_COSMO
-  MPI_Barrier(Partition::getComm());
-#endif
-
-  // MASTER has all data and runs algorithm to make decisions
-  assignMixedHalos();
-#ifndef USE_SERIAL_COSMO
-  MPI_Barrier(Partition::getComm());
-#endif
-
-  // MASTER sends merge results to all processors
-  sendMixedHaloResults(haloBuffer, haloBufSize);
-#ifndef USE_SERIAL_COSMO
-  MPI_Barrier(Partition::getComm());
-#endif
-
-  // Collect totals for result checking
-  int totalAliveHalos;
-#ifdef USE_SERIAL_COSMO
-  totalAliveHalos = this->numberOfAliveHalos;
-#else
-  MPI_Allreduce((void*) &this->numberOfAliveHalos, (void*) &totalAliveHalos,
-                1, MPI_INT, MPI_SUM, Partition::getComm());
-#endif
-
-  int totalAliveHaloParticles;
-#ifdef USE_SERIAL_COSMO
-  totalAliveHaloParticles = this->numberOfHaloParticles;
-#else
-  MPI_Allreduce((void*) &this->numberOfHaloParticles,
-                (void*) &totalAliveHaloParticles,
-                1, MPI_INT, MPI_SUM, Partition::getComm());
-#endif
-
-#ifndef USE_VTK_COSMO
-  if (this->myProc == MASTER) {
-    cout << endl;
-    cout << "Total halos found:    " << totalAliveHalos << endl;
-    cout << "Total halo particles: " << totalAliveHaloParticles << endl;
-  }
-#endif
-
-  for (unsigned int i = 0; i < this->allMixedHalos.size(); i++)
-    delete this->allMixedHalos[i];
-  delete [] haloBuffer;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// MASTER collects all mixed halos which are UNMARKED from all processors
-// including its own mixed halos
-//
-/////////////////////////////////////////////////////////////////////////
-
-void CosmoHaloFinderP::collectMixedHalos
-#ifdef USE_SERIAL_COSMO
-  (ID_T* , int )
-#else
-  (ID_T* haloBuffer, int haloBufSize)
-#endif
-{
-  // How many processors have mixed halos
-  int haveMixedHalo = (this->numberOfMixedHalos > 0 ? 1 : 0);
-  int processorsWithMixedHalos;
-#ifdef USE_SERIAL_COSMO
-  processorsWithMixedHalos = haveMixedHalo;
-#else
-  MPI_Allreduce((void*) &haveMixedHalo, (void*) &processorsWithMixedHalos,
-                1, MPI_INT, MPI_SUM, Partition::getComm());
-#endif
-
-  // MASTER moves its own mixed halos to mixed halo vector (change index to tag)
-  // then gets messages from others and creates those mixed halos
-#ifndef USE_SERIAL_COSMO
-  if (this->myProc == MASTER) {
-#endif
-
-    // If MASTER has any mixed halos add them to the mixed halo vector
-    if (this->numberOfMixedHalos > 0) {
-      processorsWithMixedHalos--;
-
-      for (unsigned int h = 0; h < this->myMixedHalos.size(); h++) {
-        if (this->myMixedHalos[h]->getValid() == UNMARKED) {
-          CosmoHalo* halo = new CosmoHalo(
-                                     this->myMixedHalos[h]->getHaloID(),
-                                     this->myMixedHalos[h]->getAliveCount(),
-                                     this->myMixedHalos[h]->getDeadCount());
-          halo->setRankID(this->myProc);
-          this->allMixedHalos.push_back(halo);
-
-          // Translate index of particle to tag of particle
-          vector<ID_T>* tags = this->myMixedHalos[h]->getTags();
-          for (int i = 0; i < MERGE_COUNT; i++)
-            halo->addParticle((*tags)[i]);
-
-        }
-      }
-    }
-
-#ifndef USE_SERIAL_COSMO
-    // Wait on messages from other processors and process
-    int notReceived = processorsWithMixedHalos;
-    MPI_Status mpistatus;
-    while (notReceived > 0) {
-
-      // Get message containing mixed halo information
-#ifdef ID_64
-      MPI_Recv(haloBuffer, haloBufSize, MPI_LONG, MPI_ANY_SOURCE,
-               0, Partition::getComm(), &mpistatus);
-#else
-      MPI_Recv(haloBuffer, haloBufSize, MPI_INT, MPI_ANY_SOURCE,
-               0, Partition::getComm(), &mpistatus);
-#endif
-
-      // Gather halo information from the message
-      int index = 0;
-      int rank = haloBuffer[index++];
-      int numMixed = haloBuffer[index++];
-
-      for (int m = 0; m < numMixed; m++) {
-        ID_T id = haloBuffer[index++];
-        int aliveCount = haloBuffer[index++];
-        int deadCount = haloBuffer[index++];
-
-        // Create the CosmoHalo to hold the data and add to vector
-        CosmoHalo* halo = new CosmoHalo(id, aliveCount, deadCount);
-
-        halo->setRankID(rank);
-        this->allMixedHalos.push_back(halo);
-
-        for (int t = 0; t < MERGE_COUNT; t++)
-          halo->addParticle(haloBuffer[index++]);
-      }
-      notReceived--;
-    }
-
-#ifndef USE_VTK_COSMO
-    cout << "Number of halos to merge: " << this->allMixedHalos.size() << endl;
-#endif
-  }
-
-  // Other processors bundle up mixed and send to MASTER
-  else {
-    int index = 0;
-    if (this->numberOfMixedHalos > 0) {
-      haloBuffer[index++] = this->myProc;
-      haloBuffer[index++] = this->numberOfMixedHalos;
-
-      for (unsigned int h = 0; h < this->myMixedHalos.size(); h++) {
-        if (this->myMixedHalos[h]->getValid() == UNMARKED) {
-
-          haloBuffer[index++] = this->myMixedHalos[h]->getHaloID();
-          haloBuffer[index++] = this->myMixedHalos[h]->getAliveCount();
-          haloBuffer[index++] = this->myMixedHalos[h]->getDeadCount();
-
-          vector<ID_T>* tags = this->myMixedHalos[h]->getTags();
-          for (int i = 0; i < MERGE_COUNT; i++) {
-            haloBuffer[index++] = (*tags)[i];
-          }
-        }
-      }
-      MPI_Request request;
-#ifdef ID_64
-      MPI_Isend(haloBuffer, haloBufSize, MPI_LONG, MASTER,
-                0, Partition::getComm(), &request);
-#else
-      MPI_Isend(haloBuffer, haloBufSize, MPI_INT, MASTER,
-                0, Partition::getComm(), &request);
-#endif
-    }
-  }
-
-#endif // USE_SERIAL_COSMO
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// MASTER has collected all the mixed halos and decides which processors
-// will get which by matching them up
-//
-/////////////////////////////////////////////////////////////////////////
-
-void CosmoHaloFinderP::assignMixedHalos()
-{
-  // MASTER has all data and runs algorithm to make decisions
-  if (this->myProc == MASTER) {
-
-#ifndef USE_VTK_COSMO
-#ifdef DEBUG
-    for (int m = 0; m < this->allMixedHalos.size(); m++) {
-      vector<ID_T>* tags = this->allMixedHalos[m]->getTags();
-      cout << "Mixed Halo " << m << ": "
-           << " rank=" << this->allMixedHalos[m]->getRankID()
-           << " index=" << this->allMixedHalos[m]->getHaloID()
-           << " tag=" << (*tags)[0]
-           << " alive=" << this->allMixedHalos[m]->getAliveCount()
-           << " dead=" << this->allMixedHalos[m]->getDeadCount() << endl;
-    }
-#endif
-#endif
-
-    // Iterate over mixed halo vector and match and mark
-    // Remember that I can have 3 or 4 that match
-    for (unsigned int m = 0; m < this->allMixedHalos.size(); m++) {
-
-      // If this halo has not already been paired with another
-      if (this->allMixedHalos[m]->getPartners()->empty() == true) {
-
-        // Current mixed halo has the most alive particles so far
-        int numberAlive = this->allMixedHalos[m]->getAliveCount();
-        int haloWithLeastAlive = m;
-
-        // Iterate on the rest of the mixed halos
-        unsigned int n = m + 1;
-        while (n < this->allMixedHalos.size()) {
-
-          // Compare to see if there are a number of tags in common
-          int match = compareHalos(this->allMixedHalos[m],
-                                   this->allMixedHalos[n]);
-
-          // Keep track of the mixed halo with the most alive particles
-          if (match > 0) {
-            if (numberAlive > this->allMixedHalos[n]->getAliveCount()) {
-              numberAlive = this->allMixedHalos[n]->getAliveCount();
-              haloWithLeastAlive = n;
-            }
-            this->allMixedHalos[m]->addPartner(n);
-            this->allMixedHalos[n]->addPartner(m);
-            this->allMixedHalos[m]->setValid(INVALID);
-            this->allMixedHalos[n]->setValid(INVALID);
-          }
-          n++;
-        }
-        // Mixed halo with the least alive particles gets it as VALID
-        this->allMixedHalos[haloWithLeastAlive]->setValid(VALID);
-      }
-    }
-
-#ifndef USE_VTK_COSMO
-#ifdef DEBUG
-    for (unsigned int m = 0; m < this->allMixedHalos.size(); m++) {
-
-      cout << "Mixed Halo " << m;
-      if (this->allMixedHalos[m]->getValid() == VALID)
-        cout << " is VALID on "
-             << " Rank " << this->allMixedHalos[m]->getRankID();
-
-      cout << " partners with ";
-      set<int>::iterator iter;
-      set<int>* partner = this->allMixedHalos[m]->getPartners();
-      for (iter = partner->begin(); iter != partner->end(); ++iter)
-        cout << (*iter) << " ";
-      cout << endl;
-    }
-#endif
-#endif
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Compare the tags of two halos to see if they are somewhat the same
-// This needs to be made better
-//
-/////////////////////////////////////////////////////////////////////////
-
-int CosmoHaloFinderP::compareHalos(CosmoHalo* halo1, CosmoHalo* halo2)
-{
-  vector<ID_T>* member1 = halo1->getTags();
-  vector<ID_T>* member2 = halo2->getTags();
-
-  int numFound = 0;
-  for (unsigned int i = 0; i < member1->size(); i++) {
-    bool done = false;
-    unsigned int j = 0;
-    while (!done &&
-           (*member1)[i] >= (*member2)[j] &&
-           j < member2->size()) {
-      if ((*member1)[i] == (*member2)[j]) {
-        done = true;
-        numFound++;
-      }
-      j++;
-    }
-  }
-  if (numFound == 0)
-    return numFound;
-  else
-    return numFound;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// MASTER sends the result of the merge back to the processors which
-// label their previously UNMARKED mixed halos as VALID or INVALID
-// VALID halos have all their particles made ALIVE for output
-// INVALID halos have all their particles made DEAD because other
-// processors will report them
-//
-/////////////////////////////////////////////////////////////////////////
-
-void CosmoHaloFinderP::sendMixedHaloResults
-#ifdef USE_SERIAL_COSMO
-(ID_T* haloBuffer, int)
-#else
-(ID_T* haloBuffer, int haloBufSize)
-#endif
-{
-#ifndef USE_SERIAL_COSMO
-  // MASTER sends merge results to all processors
-  if (this->myProc == MASTER) {
-#endif
-
-    // Share the information
-    // Send to each processor the rank, id, and valid status
-    // Use the same haloBuffer
-    int index = 0;
-    haloBuffer[index++] = (ID_T)this->allMixedHalos.size();
-    for (unsigned int m = 0; m < this->allMixedHalos.size(); m++) {
-      haloBuffer[index++] = this->allMixedHalos[m]->getRankID();
-      haloBuffer[index++] = this->allMixedHalos[m]->getHaloID();
-      haloBuffer[index++] = this->allMixedHalos[m]->getValid();
-    }
-
-#ifndef USE_SERIAL_COSMO
-    MPI_Request request;
-    for (int proc = 1; proc < this->numProc; proc++) {
-#ifdef ID_64
-      MPI_Isend(haloBuffer, haloBufSize, MPI_LONG, proc,
-                0, Partition::getComm(), &request);
-#else
-      MPI_Isend(haloBuffer, haloBufSize, MPI_INT, proc,
-                0, Partition::getComm(), &request);
-#endif
-    }
-#endif
-
-    // MASTER must claim the mixed halos assigned to him
-    for (unsigned int m = 0; m < this->allMixedHalos.size(); m++) {
-      if (this->allMixedHalos[m]->getRankID() == MASTER &&
-          this->allMixedHalos[m]->getValid() == VALID) {
-
-        // Locate the mixed halo in question
-        for (unsigned int h = 0; h < this->myMixedHalos.size(); h++) {
-          int id = this->myMixedHalos[h]->getHaloID();
-          if (id == this->allMixedHalos[m]->getHaloID()) {
-            this->myMixedHalos[h]->setValid(VALID);
-            int newAliveParticles = this->myMixedHalos[h]->getAliveCount() +
-                                    this->myMixedHalos[h]->getDeadCount();
-            this->numberOfHaloParticles += newAliveParticles;
-            this->numberOfAliveHalos++;
-
-            // Add this halo to valid halos on this processor for
-            // subsequent halo properties analysis
-            this->halos.push_back(this->haloStart[id]);
-            this->haloCount.push_back(newAliveParticles);
-
-            // Output trick - since the status of this particle was marked MIXED
-            // when it was added to the mixed CosmoHalo vector, and now it has
-            // been declared VALID, change it to ALIVE even if it was dead
-            vector<ID_T>* particles = this->myMixedHalos[h]->getParticles();
-            vector<ID_T>::iterator iter;
-            for (iter = particles->begin(); iter != particles->end(); ++iter)
-              this->status[(*iter)] = ALIVE;
-          }
-        }
-      }
-    }
-
-#ifndef USE_SERIAL_COSMO
-  }
-
-  // Other processors wait for result and adjust their halo vector
-  else {
-    MPI_Status mpistatus;
-#ifdef ID_64
-    MPI_Recv(haloBuffer, haloBufSize, MPI_LONG, MASTER,
-             0, Partition::getComm(), &mpistatus);
-#else
-    MPI_Recv(haloBuffer, haloBufSize, MPI_INT, MASTER,
-             0, Partition::getComm(), &mpistatus);
-#endif
-
-    // Unpack information to see which of mixed halos are still valid
-    int index = 0;
-    int numMixed = haloBuffer[index++];
-    for (int m = 0; m < numMixed; m++) {
-      int rank = haloBuffer[index++];
-      int id = haloBuffer[index++];
-      int valid = haloBuffer[index++];
-
-      // If this mixed halo is on my processor
-      if (rank == this->myProc && valid == VALID) {
-
-        // Locate the mixed halo in question
-        for (unsigned int h = 0; h < this->myMixedHalos.size(); h++) {
-          if (this->myMixedHalos[h]->getHaloID() == id) {
-            this->myMixedHalos[h]->setValid(VALID);
-            int newAliveParticles = this->myMixedHalos[h]->getAliveCount() +
-                                    this->myMixedHalos[h]->getDeadCount();
-            this->numberOfHaloParticles += newAliveParticles;
-            this->numberOfAliveHalos++;
-
-            // Add this halo to valid halos on this processor for
-            // subsequent halo properties analysis
-            this->halos.push_back(this->haloStart[id]);
-            this->haloCount.push_back(newAliveParticles);
-
-            // Output trick - since the status of this particle was marked MIXED
-            // when it was added to the mixed CosmoHalo vector, and now it has
-            // been declared VALID, change it to ALIVE even if it was dead
-            vector<ID_T>* particles = this->myMixedHalos[h]->getParticles();
-            vector<ID_T>::iterator iter;
-            for (iter = particles->begin(); iter != particles->end(); ++iter)
-              this->status[(*iter)] = ALIVE;
-          }
-        }
-      }
-    }
-  }
-#endif // USE_SERIAL_COSMO
-}
-
-#ifndef USE_VTK_COSMO
-/////////////////////////////////////////////////////////////////////////
-//
-// Write the output of the halo finder in the form of the input .cosmo file
-//
-// Encoded mixed halo VALID or INVALID into the status array such that
-// ALIVE particles that are part of an INVALID mixed array will not write
-// but DEAD particles that are part of a VALID mixed array will be written
-//
-// In order to make the output consistent with the serial output where the
-// lowest tagged particle in a halo owns the halo, work must be done to
-// identify the lowest tag.  This is because as particles are read onto
-// this processor using the round robin read of every particle, those
-// particles are no longer in tag order.  When the serial halo finder is
-// called it has to use the index of the particle on this processor which
-// is no longer the tag.
-//
-//      p    haloTag     tag    haloSize
-//      0          0     523           3
-//      1          0     522           0
-//      2          0     266           0
-//
-// In the above example the halo will be credited to 523 instead of 266
-// because the index of 523 is 0 and the index of 266 is 2.  So we must
-// make a pass to map the indexes.
-//
-/////////////////////////////////////////////////////////////////////////
-
-void CosmoHaloFinderP::writeTaggedParticles()
-{
-  // Map the index of the particle on this process to the index of the
-  // particle with the lowest tag value so that the written output refers
-  // to the lowest tag as being the owner of the halo
-  int* mapIndex = new int[this->particleCount];
-  for (int p = 0; p < this->particleCount; p++)
-    mapIndex[p] = p;
-
-  // If the tag for the first particle of this halo is bigger than the tag
-  // for this particle, change the map to identify this particle as the lowest
-  for (int p = 0; p < this->particleCount; p++) {
-    if (this->tag[mapIndex[this->haloTag[p]]] > this->tag[p])
-      mapIndex[this->haloTag[p]] = p;
-  }
-
-  // Write the tagged particle file
-  ofstream* outStream = new ofstream(this->outFile.c_str(), ios::out);
-
-  string textMode = "ascii";
-  char str[1024];
-  string ascii = "ascii";
-  if (textMode == "ascii") {
-
-    // Output all ALIVE particles that were not part of a mixed halo
-    // unless that halo is VALID.  Output only the DEAD particles that are
-    // part of a VALID halo. This was encoded when mixed halos were found
-    // so any ALIVE particle is VALID
-
-    for (int p = 0; p < this->particleCount; p++) {
-
-      if (this->status[p] == ALIVE) {
-        // Every alive particle appears in the particle output
-        sprintf(str, "%12.4E %12.4E ", this->xx[p], this->vx[p]);
-        *outStream << str;
-        sprintf(str, "%12.4E %12.4E ", this->yy[p], this->vy[p]);
-        *outStream << str;
-        sprintf(str, "%12.4E %12.4E ", this->zz[p], this->vz[p]);
-        *outStream << str;
-        ID_T result = (this->haloSize[this->haloTag[p]] < this->pmin)
-                      ? -1: this->tag[mapIndex[this->haloTag[p]]];
-        sprintf(str, "%12ld %12ld\n", (long int) result,
-                                      (long int) this->tag[p]);
-        *outStream << str;
-      }
-    }
-  }
-
-  else {
-
-    // output in COSMO form
-    for (int p = 0; p < this->particleCount; p++) {
-      POSVEL_T fBlock[COSMO_FLOAT];
-      fBlock[0] = this->xx[p];
-      fBlock[1] = this->vx[p];
-      fBlock[2] = this->yy[p];
-      fBlock[3] = this->vy[p];
-      fBlock[4] = this->zz[p];
-      fBlock[5] = this->vz[p];
-      fBlock[6] = (this->haloSize[this->haloTag[p]] < this->pmin)
-                   ? -1.0: 1.0 * this->tag[this->haloTag[p]];
-      outStream->write((char *)fBlock, COSMO_FLOAT * sizeof(POSVEL_T));
-
-      ID_T iBlock[COSMO_INT];
-      iBlock[0] = this->tag[p];
-      outStream->write((char *)iBlock, COSMO_INT * sizeof(ID_T));
-    }
-  }
-  outStream->close();
-
-  delete outStream;
-  delete [] mapIndex;
-}
-#endif // USE_VTK_COSMO
diff --git a/ThirdParty/Cosmo/CosmoHaloFinderP.h b/ThirdParty/Cosmo/CosmoHaloFinderP.h
deleted file mode 100644
index 60230ba..0000000
--- a/ThirdParty/Cosmo/CosmoHaloFinderP.h
+++ /dev/null
@@ -1,218 +0,0 @@
-/*=========================================================================
-                                                                                
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC. 
-This software was produced under U.S. Government contract DE-AC52-06NA25396 
-for Los Alamos National Laboratory (LANL), which is operated by 
-Los Alamos National Security, LLC for the U.S. Department of Energy. 
-The U.S. Government has rights to use, reproduce, and distribute this software. 
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.  
-If software is modified to produce derivative works, such modified software 
-should be clearly marked, so as not to confuse it with the version available 
-from LANL.
- 
-Additionally, redistribution and use in source and binary forms, with or 
-without modification, are permitted provided that the following conditions 
-are met:
--   Redistributions of source code must retain the above copyright notice, 
-    this list of conditions and the following disclaimer. 
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution. 
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software 
-    without specific prior written permission. 
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR 
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-                                                                                
-=========================================================================*/
-
-// .NAME CosmoHaloFinderP - find halos within a cosmology data file in parallel
-//
-// .SECTION Description
-// CosmoHaloFinderP takes a series of data files containing .cosmo data
-// along with parameters defining the box size for the data and for
-// determining halos within the particle data.  It distributes the data
-// across processors including a healthy dead zone of particles belonging
-// to neighbor processors.  By definition all halos can be determined
-// completely for any processor because of this dead zone.  The serial
-// halo finder is called on each processor.
-//
-// Halos returned from the serial halo finder either contain all particles
-// interior to this processor (ALIVE), all particles completely in the dead
-// zone (DEAD) or a combination (MIXED).
-//
-// If mixed halos are shared with only one neighbor the rule followed is if
-// the halo is in the upper planes of the processor (high values of x,y,z)
-// then this processor will keep that halo as alive.  If the halo is in the
-// low planes it is given up as dead, with the understanding that the
-// adjacent processors will claim it as alive.  When more than two processors
-// claim a halo the information is sent to the MASTER processor which
-// determines which processor can claim that halo and the other two give
-// it up.
-//
-
-#ifndef CosmoHaloFinderP_h
-#define CosmoHaloFinderP_h
-
-#ifdef USE_VTK_COSMO
-#include "CosmoDefinition.h"
-#else
-#include "Definition.h"
-#endif
-
-#include "CosmoHaloFinder.h"
-#include "CosmoHalo.h"
-
-#include <string>
-#include <vector>
-
-using namespace std;
-
-#ifdef USE_VTK_COSMO
-class COSMO_EXPORT CosmoHaloFinderP {
-#else
-class CosmoHaloFinderP {
-#endif
-public:
-  CosmoHaloFinderP();
-  ~CosmoHaloFinderP();
-
-  // Set parameters for serial halo finder which does the work
-  void setParameters(
-        const string& outName,  // Base name of output halo files
-        POSVEL_T rL,            // Box size of the physical problem
-        POSVEL_T deadSize,      // Dead size used to normalize for non periodic
-        long np,                // Number of particles in the problem
-        int pmin,               // Minimum number of particles in a halo
-        POSVEL_T bb);           // Normalized distance between particles
-                                // which define a single halo
-
-  // Execute the serial halo finder for this processor
-  void executeHaloFinder();
-
-  // Collect the halo information from the serial halo finder
-  // Save the mixed halos so as to determine which processor owns them
-  void collectHalos();
-  void buildHaloStructure();
-  void processMixedHalos();
-
-  // MASTER node merges the mixed halos which cross more than two processors
-  void mergeHalos();
-  void collectMixedHalos(ID_T* buffer, int bufSize);
-  void assignMixedHalos();
-  void sendMixedHaloResults(ID_T* buffer, int bufSize);
-  int compareHalos(CosmoHalo* halo1, CosmoHalo* halo2);
-
-#ifndef USE_VTK_COSMO
-  // Write the particles with mass field containing halo tags
-  void writeTaggedParticles();
-#endif
-
-  // Set alive particle vectors which were created elsewhere
-  void setParticles(
-        vector<POSVEL_T>* xLoc,
-        vector<POSVEL_T>* yLoc,
-        vector<POSVEL_T>* zLod,
-        vector<POSVEL_T>* xVel,
-        vector<POSVEL_T>* yVel,
-        vector<POSVEL_T>* zVel,
-        vector<POTENTIAL_T>* potential,
-        vector<ID_T>* id,
-        vector<MASK_T>* mask,
-        vector<STATUS_T>* state);
-
-  // Return information needed by halo center finder
-  int getNumberOfHalos()        { return (int)this->halos.size(); }
-  int* getHalos()               { return &this->halos[0]; }
-  int* getHaloCount()           { return &this->haloCount[0]; }
-  int* getHaloList()            { return this->haloList; }
-  int* getHaloTag()             { return this->haloTag; }
-  int* getHaloSize()            { return this->haloSize; }
-
-private:
-  int    myProc;                // My processor number
-  int    numProc;               // Total number of processors
-
-  int    layoutSize[DIMENSION]; // Decomposition of processors
-  int    layoutPos[DIMENSION];  // Position of this processor in decomposition
-
-  string outFile;               // File of particles written by this processor
-  string outHaloFile;           // File of halo tag and size of halo
-                                // used for looping on round robin share of data
-
-  CosmoHaloFinder haloFinder;   // Serial halo finder for this processor
-
-  POSVEL_T boxSize;             // Physical box size of the data set
-  POSVEL_T deadSize;            // Border size for dead particles
-  long    np;                   // Number of particles in the problem
-  int    pmin;                  // Minimum number of particles in a halo
-  POSVEL_T bb;                  // Minimum normalized distance between
-                                // particles in a halo
-  POSVEL_T normalizeFactor;     // Convert physical location to grid location
-
-  long   particleCount;         // Running index used to store data
-                                // Ends up as the number of alive plus dead
-
-  int    neighbor[NUM_OF_NEIGHBORS];    // Neighbor processor ids
-  int    deadParticle[NUM_OF_NEIGHBORS];// Number of neighbor dead particles
-  int    deadHalo[NUM_OF_NEIGHBORS];    // Number of neighbor mixed halos
-
-  POSVEL_T* xx;                 // X location for particles on this processor
-  POSVEL_T* yy;                 // Y location for particles on this processor
-  POSVEL_T* zz;                 // Z location for particles on this processor
-  POSVEL_T* vx;                 // X velocity for particles on this processor
-  POSVEL_T* vy;                 // Y velocity for particles on this processor
-  POSVEL_T* vz;                 // Z velocity for particles on this processor
-  POTENTIAL_T* pot;             // Particle potential
-  ID_T* tag;                    // Id tag for particles on this processor
-  MASK_T* mask;                 // Particle information
-
-  POSVEL_T** haloData;          // Normalized data for serial halo finder
-
-  STATUS_T* status;             // Particle is ALIVE or labeled with neighbor
-                                // processor index where it is ALIVE
-
-  int* haloTag;                 // From serial halo finder, the index of the
-                                // first particle in a halo
-
-  int* haloSize;                // From serial halo finder, the size of a halo
-                                // where the first particle has the actual size
-                                // and other member particles have size=0
-  int* haloAliveSize;
-  int* haloDeadSize;
-
-  int numberOfAliveHalos;       // Number of alive or valid halos
-  int numberOfDeadHalos;        // Number of dead halos
-  int numberOfMixedHalos;       // Number of halos with both alive and dead
-  int numberOfHaloParticles;    // Number of particles in all VALID halos
-
-  vector<CosmoHalo*> myMixedHalos;      // Mixed halos on this processor
-  vector<CosmoHalo*> allMixedHalos;     // Combined mixed halos on MASTER
-
-  vector<int> halos;            // First particle index into haloList
-  vector<int> haloCount;        // Size of each halo 
-
-  int* haloList;                // Indices of next particle in halo
-  int* haloStart;               // Index of first particle in halo
-                                // Chain is built backwards but using these two
-                                // arrays, all particle indices for a halo
-                                // can be found
-};
-
-#endif
diff --git a/ThirdParty/Cosmo/FOFHaloProperties.cxx b/ThirdParty/Cosmo/FOFHaloProperties.cxx
deleted file mode 100644
index df97832..0000000
--- a/ThirdParty/Cosmo/FOFHaloProperties.cxx
+++ /dev/null
@@ -1,702 +0,0 @@
-/*=========================================================================
-
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC.
-This software was produced under U.S. Government contract DE-AC52-06NA25396
-for Los Alamos National Laboratory (LANL), which is operated by
-Los Alamos National Security, LLC for the U.S. Department of Energy.
-The U.S. Government has rights to use, reproduce, and distribute this software.
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.
-If software is modified to produce derivative works, such modified software
-should be clearly marked, so as not to confuse it with the version available
-from LANL.
-
-Additionally, redistribution and use in source and binary forms, with or
-without modification, are permitted provided that the following conditions
-are met:
--   Redistributions of source code must retain the above copyright notice,
-    this list of conditions and the following disclaimer.
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution.
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software
-    without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-=========================================================================*/
-
-#include "Partition.h"
-#include "FOFHaloProperties.h"
-#include "HaloCenterFinder.h"
-#ifndef USE_VTK_COSMO
-#include "Timings.h"
-#endif
-
-#include <iostream>
-#include <fstream>
-#include <sstream>
-#include <iomanip>
-#include <set>
-#include <math.h>
-
-using namespace std;
-
-/////////////////////////////////////////////////////////////////////////
-//
-// FOFHaloProperties uses the results of the CosmoHaloFinder to locate the
-// particle within every halo in order to calculate properties on halos
-//
-/////////////////////////////////////////////////////////////////////////
-
-FOFHaloProperties::FOFHaloProperties()
-{
-  // Get the number of processors and rank of this processor
-  this->numProc = Partition::getNumProc();
-  this->myProc = Partition::getMyProc();
-}
-
-FOFHaloProperties::~FOFHaloProperties()
-{
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Set linked list structure which will locate all particles in a halo
-//
-/////////////////////////////////////////////////////////////////////////
-
-void FOFHaloProperties::setHalos(
-                        int numberHalos,
-                        int* haloStartIndex,
-                        int* haloParticleCount,
-                        int* nextParticleIndex)
-{
-  this->numberOfHalos = numberHalos;
-  this->halos = haloStartIndex;
-  this->haloCount = haloParticleCount;
-  this->haloList = nextParticleIndex;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Set parameters for the halo center finder
-//
-/////////////////////////////////////////////////////////////////////////
-
-void FOFHaloProperties::setParameters(
-                        const string& outName,
-                        POSVEL_T rL,
-                        POSVEL_T deadSz,
-                        POSVEL_T pDist)
-{
-  this->outFile = outName;
-
-  // Halo finder parameters
-  this->boxSize = rL;
-  this->deadSize = deadSz;
-  this->bb = pDist;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Set the particle vectors that have already been read and which
-// contain only the alive particles for this processor
-//
-/////////////////////////////////////////////////////////////////////////
-
-void FOFHaloProperties::setParticles(
-                        vector<POSVEL_T>* xLoc,
-                        vector<POSVEL_T>* yLoc,
-                        vector<POSVEL_T>* zLoc,
-                        vector<POSVEL_T>* xVel,
-                        vector<POSVEL_T>* yVel,
-                        vector<POSVEL_T>* zVel,
-                        vector<POSVEL_T>* pmass,
-                        vector<POTENTIAL_T>* potential,
-                        vector<ID_T>* id,
-                        vector<MASK_T>* maskData,
-                        vector<STATUS_T>* state)
-{
-  this->particleCount = (long)xLoc->size();
-
-  // Extract the contiguous data block from a vector pointer
-  this->xx = &(*xLoc)[0];
-  this->yy = &(*yLoc)[0];
-  this->zz = &(*zLoc)[0];
-  this->vx = &(*xVel)[0];
-  this->vy = &(*yVel)[0];
-  this->vz = &(*zVel)[0];
-  this->mass = &(*pmass)[0];
-  this->pot = &(*potential)[0];
-  this->tag = &(*id)[0];
-  this->mask = &(*maskData)[0];
-  this->status = &(*state)[0];
-}
-
-void FOFHaloProperties::setParticles(
-			long count,
-                        POSVEL_T* xLoc,
-                        POSVEL_T* yLoc,
-                        POSVEL_T* zLoc,
-                        POSVEL_T* xVel,
-                        POSVEL_T* yVel,
-                        POSVEL_T* zVel,
-                        POSVEL_T* pmass,
-                        ID_T* id)
-{
-  this->particleCount = count;
-
-  // Extract the contiguous data block from a vector pointer
-  this->xx = xLoc;
-  this->yy = yLoc;
-  this->zz = zLoc;
-  this->vx = xVel;
-  this->vy = yVel;
-  this->vz = zVel;
-  this->mass = pmass;
-  this->tag = id;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Find the index of the particle at the center of every FOF halo which is the
-// particle with the minimum value in the potential array.
-//
-/////////////////////////////////////////////////////////////////////////
-
-void FOFHaloProperties::FOFHaloCenterMinimumPotential(vector<int>* haloCenter)
-{
-  for (int halo = 0; halo < this->numberOfHalos; halo++) {
-
-    // First particle in halo
-    int p = this->halos[halo];
-    POTENTIAL_T minPotential = this->pot[p];
-    int centerIndex = p;
-
-    // Next particle
-    p = this->haloList[p];
-
-    // Search for minimum
-    while (p != -1) {
-      if (minPotential > this->pot[p]) {
-        minPotential = this->pot[p];
-        centerIndex = p;
-      }
-      p = this->haloList[p];
-    }
-
-    // Save the minimum potential index for this halo
-    (*haloCenter).push_back(centerIndex);
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Calculate the mass of every FOF halo accumulating individual mass
-//
-// m_FOF = ((Sum i=1 to n_FOF) m_i) / n_FOF
-//    m_FOF is the mass of an FOF halo
-//    n_FOF is the number of particles in the halo
-//    m_i is the mass of an individual particle
-//
-/////////////////////////////////////////////////////////////////////////
-
-void FOFHaloProperties::FOFHaloMass(
-                        vector<POSVEL_T>* haloMass)
-{
-  POSVEL_T lmass;
-  double mKahan;
-
-  for (int halo = 0; halo < this->numberOfHalos; halo++) {
-    mKahan = KahanSummation(halo, this->mass);
-    lmass = (POSVEL_T) mKahan;
-    (*haloMass).push_back(lmass);
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Calculate the center of mass of every FOF halo
-//
-// x_FOF = ((Sum i=1 to n_FOF) x_i * x_mass) / n_mass
-//    x_FOF is the center of mass vector
-//    n_mass is the total mass of particles in the halo
-//    x_i is the position vector of particle i
-//
-/////////////////////////////////////////////////////////////////////////
-
-void FOFHaloProperties::FOFCenterOfMass(
-                        vector<POSVEL_T>* xCenterOfMass,
-                        vector<POSVEL_T>* yCenterOfMass,
-                        vector<POSVEL_T>* zCenterOfMass)
-{
-  POSVEL_T xCofMass, yCofMass, zCofMass;
-  double xKahan, yKahan, zKahan;
-
-  for (int halo = 0; halo < this->numberOfHalos; halo++) {
-
-    double totalMass = KahanSummation(halo, this->mass);
-
-    xKahan = KahanSummation2(halo, this->xx, this->mass);
-    yKahan = KahanSummation2(halo, this->yy, this->mass);
-    zKahan = KahanSummation2(halo, this->zz, this->mass);
-
-    xCofMass = (POSVEL_T) (xKahan / totalMass);
-    yCofMass = (POSVEL_T) (yKahan / totalMass);
-    zCofMass = (POSVEL_T) (zKahan / totalMass);
-
-    (*xCenterOfMass).push_back(xCofMass);
-    (*yCenterOfMass).push_back(yCofMass);
-    (*zCenterOfMass).push_back(zCofMass);
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-/////////////////////////////////////////////////////////////////////////
-//
-// Calculate the average position of particles of every FOF halo
-//
-// x_FOF = ((Sum i=1 to n_FOF) x_i) / n_FOF
-//    x_FOF is the average position vector
-//    n_FOF is the number of particles in the halo
-//    x_i is the position vector of particle i
-//
-/////////////////////////////////////////////////////////////////////////
-
-void FOFHaloProperties::FOFPosition(
-                        vector<POSVEL_T>* xMeanPos,
-                        vector<POSVEL_T>* yMeanPos,
-                        vector<POSVEL_T>* zMeanPos)
-{
-  POSVEL_T xMean, yMean, zMean;
-  double xKahan, yKahan, zKahan;
-
-  for (int halo = 0; halo < this->numberOfHalos; halo++) {
-    xKahan = KahanSummation(halo, this->xx);
-    yKahan = KahanSummation(halo, this->yy);
-    zKahan = KahanSummation(halo, this->zz);
-
-    xMean = (POSVEL_T) (xKahan / this->haloCount[halo]);
-    yMean = (POSVEL_T) (yKahan / this->haloCount[halo]);
-    zMean = (POSVEL_T) (zKahan / this->haloCount[halo]);
-
-    (*xMeanPos).push_back(xMean);
-    (*yMeanPos).push_back(yMean);
-    (*zMeanPos).push_back(zMean);
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Calculate the average velocity of particles of every FOF halo
-//
-// v_FOF = ((Sum i=1 to n_FOF) v_i) / n_FOF
-//    v_FOF is the average velocity vector
-//    n_FOF is the number of particles in the halo
-//    v_i is the velocity vector of particle i
-//
-/////////////////////////////////////////////////////////////////////////
-
-void FOFHaloProperties::FOFVelocity(
-                        vector<POSVEL_T>* xMeanVel,
-                        vector<POSVEL_T>* yMeanVel,
-                        vector<POSVEL_T>* zMeanVel)
-{
-  POSVEL_T xMean, yMean, zMean;
-  double xKahan, yKahan, zKahan;
-
-  for (int halo = 0; halo < this->numberOfHalos; halo++) {
-    xKahan = KahanSummation(halo, this->vx);
-    yKahan = KahanSummation(halo, this->vy);
-    zKahan = KahanSummation(halo, this->vz);
-
-    xMean = (POSVEL_T) (xKahan / this->haloCount[halo]);
-    yMean = (POSVEL_T) (yKahan / this->haloCount[halo]);
-    zMean = (POSVEL_T) (zKahan / this->haloCount[halo]);
-
-    (*xMeanVel).push_back(xMean);
-    (*yMeanVel).push_back(yMean);
-    (*zMeanVel).push_back(zMean);
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Calculate the velocity dispersion of every FOF halo
-//
-// o_FOF = sqrt((avg_part_vel_dot_prod - dot_prod_halo_vel) / 3)
-//    avg_part_vel_dot_prod = ((Sum i=1 to n_FOF) v_i dot v_i) / n_FOF
-//       n_FOF is the number of particles in the halo
-//       v_i is the velocity vector of particle i
-//    dot_prod_halo_vel = v_FOF dot v_FOF
-//       v_FOF is the average velocity vector of all particles in the halo
-//
-/////////////////////////////////////////////////////////////////////////
-
-void FOFHaloProperties::FOFVelocityDispersion(
-                        vector<POSVEL_T>* xAvgVel,
-                        vector<POSVEL_T>* yAvgVel,
-                        vector<POSVEL_T>* zAvgVel,
-                        vector<POSVEL_T>* velDisp)
-{
-  for (int halo = 0; halo < this->numberOfHalos; halo++) {
-
-    // First particle in the halo
-    int p = this->halos[halo];
-    POSVEL_T particleDot = 0.0;
-
-    // Iterate over all particles in the halo collecting dot products
-    while (p != -1) {
-      particleDot += dotProduct(this->vx[p], this->vy[p], this->vz[p]);
-      p = this->haloList[p];
-    }
-
-    // Average of all the dot products
-    particleDot /= this->haloCount[halo];
-
-    // Dot product of the average velocity for the entire halo
-    POSVEL_T haloDot = dotProduct((*xAvgVel)[halo],
-                                  (*yAvgVel)[halo], (*zAvgVel)[halo]);
-
-    // Velocity dispersion
-    POSVEL_T vDispersion = (POSVEL_T)sqrt((particleDot - haloDot) / 3.0);
-
-    // Save onto supplied vector
-    velDisp->push_back(vDispersion);
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Dot product of a vector
-//
-/////////////////////////////////////////////////////////////////////////
-
-POSVEL_T FOFHaloProperties::dotProduct(POSVEL_T x, POSVEL_T y, POSVEL_T z)
-{
-  POSVEL_T dotProd = x * x + y * y + z * z;
-  return dotProd;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Calculate the Kahan summation
-// Reduces roundoff error in floating point arithmetic
-//
-/////////////////////////////////////////////////////////////////////////
-
-POSVEL_T FOFHaloProperties::KahanSummation(int halo, POSVEL_T* data)
-{
-  POSVEL_T dataSum, dataRem, v, w;
-
-  // First particle in halo and first step in Kahan summation
-  int p = this->halos[halo];
-  dataSum = data[p];
-  dataRem = 0.0;
-
-  // Next particle
-  p = this->haloList[p];
-
-  // Remaining steps in Kahan summation
-  while (p != -1) {
-    v = data[p] - dataRem;
-    w = dataSum + v;
-    dataRem = (w - dataSum) - v;
-    dataSum = w;
-
-    p = this->haloList[p];
-  }
-  return dataSum;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Calculate the Kahan summation on two variables multiplied
-// Reduces roundoff error in floating point arithmetic
-//
-/////////////////////////////////////////////////////////////////////////
-
-POSVEL_T FOFHaloProperties::KahanSummation2(int halo,
-                                            POSVEL_T* data1, POSVEL_T* data2)
-{
-  POSVEL_T dataSum, dataRem, v, w;
-
-  // First particle in halo and first step in Kahan summation
-  int p = this->halos[halo];
-  dataSum = data1[p] * data2[p];
-  dataRem = 0.0;
-
-  // Next particle
-  p = this->haloList[p];
-
-  // Remaining steps in Kahan summation
-  while (p != -1) {
-    v = (data1[p] * data2[p]) - dataRem;
-    w = dataSum + v;
-    dataRem = (w - dataSum) - v;
-    dataSum = w;
-
-    p = this->haloList[p];
-  }
-  return dataSum;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Calculate the incremental mean using Kahan summation
-//
-/////////////////////////////////////////////////////////////////////////
-
-POSVEL_T FOFHaloProperties::incrementalMean(int halo, POSVEL_T* data)
-{
-  double dataMean, dataRem, diff, value, v, w;
-
-  // First particle in halo and first step in incremental mean
-  int p = this->halos[halo];
-  dataMean = data[p];
-  dataRem = 0.0;
-  int count = 1;
-
-  // Next particle
-  p = this->haloList[p];
-  count++;
-
-  // Remaining steps in incremental mean
-  while (p != -1) {
-    diff = data[p] - dataMean;
-    value = diff / count;
-    v = value - dataRem;
-    w = dataMean + v;
-    dataRem = (w - dataMean) - v;
-    dataMean = w;
-
-    p = this->haloList[p];
-    count++;
-  }
-  return (POSVEL_T) dataMean;
-}
-
-#ifndef USE_VTK_COSMO
-/////////////////////////////////////////////////////////////////////////
-//
-// Write the halo catalog file
-//
-// Output one entry per halo
-// Location (xx,yy,zz) is the location of particle closest to centroid
-// Eventually this needs to be the particle with the minimum potential
-// Velocity (vx,vy,vz) is the average velocity of all halo particles
-// Mass is the #particles in the halo * mass of one particle
-// Tag is the unique id of the halo
-//
-/////////////////////////////////////////////////////////////////////////
-
-void FOFHaloProperties::FOFHaloCatalog(
-                        vector<int>* haloCenter,
-                        vector<POSVEL_T>* haloMass,
-                        vector<POSVEL_T>* xMeanVel,
-                        vector<POSVEL_T>* yMeanVel,
-                        vector<POSVEL_T>* zMeanVel)
-{
-  // Compose ascii and .cosmo binary file names
-  ostringstream aname, cname;
-  if (this->numProc == 1) {
-    aname << this->outFile << ".halocatalog.ascii";
-    cname << this->outFile << ".halocatalog.cosmo";
-  } else {
-    aname << this->outFile << ".halocatalog.ascii." << myProc;
-    cname << this->outFile << ".halocatalog.cosmo." << myProc;
-  }
-  ofstream aStream(aname.str().c_str(), ios::out);
-  ofstream cStream(cname.str().c_str(), ios::out|ios::binary);
-
-  char str[1024];
-  float fBlock[COSMO_FLOAT];
-  int iBlock[COSMO_INT];
-
-  for (int halo = 0; halo < this->numberOfHalos; halo++) {
-
-    int centerIndex = (*haloCenter)[halo];
-    int haloTag = this->tag[this->halos[halo]];
-
-    // Write ascii
-    sprintf(str, "%12.4E %12.4E %12.4E %12.4E %12.4E %12.4E %12.4E %12d\n",
-      this->xx[centerIndex],
-      (*xMeanVel)[halo],
-      this->yy[centerIndex],
-      (*yMeanVel)[halo],
-      this->zz[centerIndex],
-      (*zMeanVel)[halo],
-      (*haloMass)[halo],
-      haloTag);
-      aStream << str;
-
-    fBlock[0] = this->xx[centerIndex];
-    fBlock[1] = (*xMeanVel)[halo];
-    fBlock[2] = this->yy[centerIndex];
-    fBlock[3] = (*yMeanVel)[halo];
-    fBlock[4] = this->zz[centerIndex];
-    fBlock[5] = (*zMeanVel)[halo];
-    fBlock[6] = (*haloMass)[halo];
-    cStream.write(reinterpret_cast<char*>(fBlock),
-                  COSMO_FLOAT * sizeof(POSVEL_T));
-
-    iBlock[0] = haloTag;
-    cStream.write(reinterpret_cast<char*>(iBlock),
-                  COSMO_INT * sizeof(ID_T));
-  }
-  aStream.close();
-  cStream.close();
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// For each processor print the halo index and size for debugging
-//
-/////////////////////////////////////////////////////////////////////////
-
-void FOFHaloProperties::printHaloSizes(int minSize)
-{
-  for (int i = 0; i < this->numberOfHalos; i++)
-    if (this->haloCount[i] > minSize)
-      cout << "Rank " << Partition::getMyProc()
-           << " Halo " << i
-           << " size = " << this->haloCount[i] << endl;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Copy locations and tags of halo particles to the allocated arrays
-//
-/////////////////////////////////////////////////////////////////////////
-
-void FOFHaloProperties::extractLocation(
-				int halo,
-				int* actualIndx,
-				POSVEL_T* xLocHalo,
-				POSVEL_T* yLocHalo,
-				POSVEL_T* zLocHalo,
-				ID_T* id)
-{
-  int p = this->halos[halo];
-  for (int i = 0; i < this->haloCount[halo]; i++) {
-    xLocHalo[i] = this->xx[p];
-    yLocHalo[i] = this->yy[p];
-    zLocHalo[i] = this->zz[p];
-    id[i] = this->tag[p];
-    actualIndx[i] = p;
-    p = this->haloList[p];
-  }
-}
-
-#endif
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Copy locations, velocities and tags of halo particles to the allocated arrays
-//
-/////////////////////////////////////////////////////////////////////////
-
-void FOFHaloProperties::extractInformation(
-				int halo,
-				int* actualIndx,
-				POSVEL_T* xLocHalo,
-				POSVEL_T* yLocHalo,
-				POSVEL_T* zLocHalo,
-				POSVEL_T* xVelHalo,
-				POSVEL_T* yVelHalo,
-				POSVEL_T* zVelHalo,
-				POSVEL_T* massHalo,
-				ID_T* id)
-{
-  int p = this->halos[halo];
-  for (int i = 0; i < this->haloCount[halo]; i++) {
-    xLocHalo[i] = this->xx[p];
-    yLocHalo[i] = this->yy[p];
-    zLocHalo[i] = this->zz[p];
-    xVelHalo[i] = this->vx[p];
-    yVelHalo[i] = this->vy[p];
-    zVelHalo[i] = this->vz[p];
-    massHalo[i] = this->mass[p];
-    id[i] = this->tag[p];
-    actualIndx[i] = p;
-    p = this->haloList[p];
-  }
-}
-
-#ifndef USE_VTK_COSMO
-
-/////////////////////////////////////////////////////////////////////////
-//
-// For the requested processor and halo index output locations for
-// a scatter plot for debugging
-//
-/////////////////////////////////////////////////////////////////////////
-
-void FOFHaloProperties::printLocations(int halo)
-{
-  int p = this->halos[halo];
-  for (int i = 0; i < this->haloCount[halo]; i++) {
-    cout << "FOF INFO " << this->myProc << " " << halo
-         << " INDEX " << p << " TAG " << this->tag[p] << " LOCATION "
-         << this->xx[p] << " " << this->yy[p] << " " << this->zz[p] << endl;
-    p = this->haloList[p];
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// For the requested processor and halo index output bounding box
-//
-/////////////////////////////////////////////////////////////////////////
-
-void FOFHaloProperties::printBoundingBox(int halo)
-{
-  POSVEL_T minBox[DIMENSION], maxBox[DIMENSION];
-  for (int dim = 0; dim < DIMENSION; dim++) {
-    minBox[dim] = this->boxSize;
-    maxBox[dim] = 0.0;
-  }
-
-  int p = this->halos[halo];
-  for (int i = 0; i < this->haloCount[halo]; i++) {
-
-    if (minBox[0] > this->xx[p])
-      minBox[0] = this->xx[p];
-    if (maxBox[0] < this->xx[p])
-      maxBox[0] = this->xx[p];
-
-    if (minBox[1] > this->yy[p])
-      minBox[1] = this->yy[p];
-    if (maxBox[1] < this->yy[p])
-      maxBox[1] = this->yy[p];
-
-    if (minBox[2] > this->zz[p])
-      minBox[2] = this->zz[p];
-    if (maxBox[2] < this->zz[p])
-      maxBox[2] = this->zz[p];
-
-    p = this->haloList[p];
-  }
-  cout << "FOF BOUNDING BOX " << this->myProc << " " << halo << ": "
-         << minBox[0] << ":" << maxBox[0] << "  "
-         << minBox[1] << ":" << maxBox[1] << "  "
-         << minBox[2] << ":" << maxBox[2] << "  " << endl;
-}
-#endif
diff --git a/ThirdParty/Cosmo/FOFHaloProperties.h b/ThirdParty/Cosmo/FOFHaloProperties.h
deleted file mode 100644
index e57d543..0000000
--- a/ThirdParty/Cosmo/FOFHaloProperties.h
+++ /dev/null
@@ -1,223 +0,0 @@
-/*=========================================================================
-                                                                                
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC. 
-This software was produced under U.S. Government contract DE-AC52-06NA25396 
-for Los Alamos National Laboratory (LANL), which is operated by 
-Los Alamos National Security, LLC for the U.S. Department of Energy. 
-The U.S. Government has rights to use, reproduce, and distribute this software. 
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.  
-If software is modified to produce derivative works, such modified software 
-should be clearly marked, so as not to confuse it with the version available 
-from LANL.
- 
-Additionally, redistribution and use in source and binary forms, with or 
-without modification, are permitted provided that the following conditions 
-are met:
--   Redistributions of source code must retain the above copyright notice, 
-    this list of conditions and the following disclaimer. 
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution. 
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software 
-    without specific prior written permission. 
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR 
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-                                                                                
-=========================================================================*/
-
-// .NAME FOFHaloProperties - calculate properties of all FOF halos
-//
-// FOFHaloProperties takes data from CosmoHaloFinderP about individual halos
-// and data from all particles and calculates properties.
-//
-
-#ifndef FOFHaloProperties_h
-#define FOFHaloProperties_h
-
-#ifdef USE_VTK_COSMO
-#include "CosmoDefinition.h"
-#else
-#include "Definition.h"
-#endif
-
-#include "ChainingMesh.h"
-#include <string>
-#include <vector>
-
-using namespace std;
-
-#ifdef USE_VTK_COSMO
-class COSMO_EXPORT FOFHaloProperties {
-#else
-class FOFHaloProperties {
-#endif
-public:
-  FOFHaloProperties();
-  ~FOFHaloProperties();
-
-  // Set parameters for sizes of the dead/alive space
-  void setParameters(
-        const string& outName,  // Base name of output halo files
-        POSVEL_T rL,            // Box size of the physical problem
-        POSVEL_T deadSize,      // Dead size used to normalize for non periodic
-	POSVEL_T bb);		// Inter particle distance for halos
-
-  // Set alive particle vectors which were created elsewhere
-  void setParticles(
-        vector<POSVEL_T>* xLoc,
-        vector<POSVEL_T>* yLoc,
-        vector<POSVEL_T>* zLoc,
-        vector<POSVEL_T>* xVel,
-        vector<POSVEL_T>* yVel,
-        vector<POSVEL_T>* zVel,
-        vector<POSVEL_T>* pmass,
-        vector<POTENTIAL_T>* potential,
-        vector<ID_T>* id,
-        vector<MASK_T>* mask,
-        vector<STATUS_T>* state);
-
-  void setParticles(
-	long count,
-        POSVEL_T* xLoc,
-        POSVEL_T* yLoc,
-        POSVEL_T* zLoc,
-        POSVEL_T* xVel,
-        POSVEL_T* yVel,
-        POSVEL_T* zVel,
-        POSVEL_T* pmass,
-        ID_T* id);
-
-  // Set the halo information from the FOF halo finder
-  void setHalos(
-        int  numberOfHalos,     // Number of halos found
-        int* halos,             // Index into haloList of first particle
-        int* haloCount,         // Number of particles in the matching halo
-        int* haloList);         // Chain of indices of all particles in halo
-
-  // Find the halo centers (minimum potential) finding minimum of array
-  void FOFHaloCenterMinimumPotential(vector<int>* haloCenter);
-
-  // Find the mass of each halo by accumulating individual particle masses
-  void FOFHaloMass(
-        vector<POSVEL_T>* haloMass);
-
-  // Find the average position of FOF halo particles
-  void FOFPosition(
-        vector<POSVEL_T>* xPos,
-        vector<POSVEL_T>* yPos,
-        vector<POSVEL_T>* zPos);
-
-  // Find the center of mass of FOF halo particles
-  void FOFCenterOfMass(
-        vector<POSVEL_T>* xCofMass,
-        vector<POSVEL_T>* yCofMass,
-        vector<POSVEL_T>* zCofMass);
-
-  // Find the average velocity of FOF halo particles
-  void FOFVelocity(
-        vector<POSVEL_T>* xVel,
-        vector<POSVEL_T>* yVel,
-        vector<POSVEL_T>* zVel);
-
-  // Find the velocity dispersion of FOF halos
-  void FOFVelocityDispersion(
-        vector<POSVEL_T>* xVel,
-        vector<POSVEL_T>* yVel,
-        vector<POSVEL_T>* zVel,
-        vector<POSVEL_T>* velDisp);
-
-  // Kahan summation of floating point numbers to reduce roundoff error
-  POSVEL_T KahanSummation(int halo, POSVEL_T* data);
-  POSVEL_T KahanSummation2(int halo, POSVEL_T* data1, POSVEL_T* data2);
-
-  // Dot product
-  POSVEL_T dotProduct(POSVEL_T x, POSVEL_T y, POSVEL_T z);
-
-  // Incremental mean, possibly needed for very large halos
-  POSVEL_T incrementalMean(int halo, POSVEL_T* data);
-
-  // Extract locations and tags for all particles in a halo
-  void extractLocation(
-	int halo,
-	int* actualIndx,
-	POSVEL_T* xLocHalo,
-	POSVEL_T* yLocHalo,
-	POSVEL_T* zLocHalo,
-	ID_T* tag);
-
-  void extractInformation(
-	int halo,
-	int* actualIndx,
-	POSVEL_T* xLocHalo,
-	POSVEL_T* yLocHalo,
-	POSVEL_T* zVelHalo,
-	POSVEL_T* xVelHalo,
-	POSVEL_T* yVelHalo,
-	POSVEL_T* zLocHalo,
-	POSVEL_T* pmass,
-	ID_T* tag);
-
-#ifndef USE_VTK_COSMO
-  // Print information about halos for debugging and selection
-  void FOFHaloCatalog(
-	vector<int>* haloCenter,
-        vector<POSVEL_T>* haloMass,
-        vector<POSVEL_T>* xVel,
-        vector<POSVEL_T>* yVel,
-        vector<POSVEL_T>* zVel);
-
-  void printHaloSizes(int minSize);
-  void printLocations(int haloIndex);
-  void printBoundingBox(int haloIndex);
-#endif
-
-private:
-  int    myProc;                // My processor number
-  int    numProc;               // Total number of processors
-
-  string outFile;               // File of particles written by this processor
-
-  POSVEL_T boxSize;             // Physical box size of the data set
-  POSVEL_T deadSize;            // Border size for dead particles
-  POSVEL_T bb;			// Interparticle distance for halos
-
-  long   particleCount;         // Total particles on this processor
-
-  POSVEL_T* xx;                 // X location for particles on this processor
-  POSVEL_T* yy;                 // Y location for particles on this processor
-  POSVEL_T* zz;                 // Z location for particles on this processor
-  POSVEL_T* vx;                 // X velocity for particles on this processor
-  POSVEL_T* vy;                 // Y velocity for particles on this processor
-  POSVEL_T* vz;                 // Z velocity for particles on this processor
-  POSVEL_T* mass;		// mass of particles on this processor
-  POTENTIAL_T* pot;             // Particle potential
-  ID_T* tag;                    // Id tag for particles on this processor
-  MASK_T* mask;                 // Particle information
-  STATUS_T* status;             // Particle is ALIVE or labeled with neighbor
-                                // processor index where it is ALIVE
-
-  // Information about halos from FOF halo finder
-  int  numberOfHalos;           // Number of halos found
-  int* halos;                   // First particle index into haloList
-  int* haloCount;               // Size of each halo 
-  int* haloList;                // Indices of next particle in halo
-};
-
-#endif
diff --git a/ThirdParty/Cosmo/HaloCenterFinder.cxx b/ThirdParty/Cosmo/HaloCenterFinder.cxx
deleted file mode 100644
index 1e60c3a..0000000
--- a/ThirdParty/Cosmo/HaloCenterFinder.cxx
+++ /dev/null
@@ -1,1325 +0,0 @@
-/*=========================================================================
-
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC.
-This software was produced under U.S. Government contract DE-AC52-06NA25396
-for Los Alamos National Laboratory (LANL), which is operated by
-Los Alamos National Security, LLC for the U.S. Department of Energy.
-The U.S. Government has rights to use, reproduce, and distribute this software.
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.
-If software is modified to produce derivative works, such modified software
-should be clearly marked, so as not to confuse it with the version available
-from LANL.
-
-Additionally, redistribution and use in source and binary forms, with or
-without modification, are permitted provided that the following conditions
-are met:
--   Redistributions of source code must retain the above copyright notice,
-    this list of conditions and the following disclaimer.
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution.
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software
-    without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-=========================================================================*/
-
-#include "Partition.h"
-#include "HaloCenterFinder.h"
-
-#include <iostream>
-#include <fstream>
-#include <sstream>
-#include <iomanip>
-#include <set>
-#include <math.h>
-
-using namespace std;
-
-/////////////////////////////////////////////////////////////////////////
-//
-// HaloCenterFinder takes all particles in a halo and calculations the
-// most bound particle (MBP) or most connected particle (MCP) using
-// an N2/2 algorithm on small halos and ChainingMesh algorithms on
-// large halos.
-//
-/////////////////////////////////////////////////////////////////////////
-
-HaloCenterFinder::HaloCenterFinder()
-{
-  // Get the number of processors and rank of this processor
-  this->numProc = Partition::getNumProc();
-  this->myProc = Partition::getMyProc();
-}
-
-HaloCenterFinder::~HaloCenterFinder()
-{
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Set parameters for the halo center finder
-//
-/////////////////////////////////////////////////////////////////////////
-
-void HaloCenterFinder::setParameters(
-                        POSVEL_T pDist,
-                        POSVEL_T distConvertFactor)
-{
-  // Halo finder parameters
-  this->bb = pDist;
-  this->distFactor = distConvertFactor;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Set the particle vectors that have already been read and which
-// contain only the alive particles for this processor
-//
-/////////////////////////////////////////////////////////////////////////
-
-void HaloCenterFinder::setParticles(
-                        long haloCount,
-                        POSVEL_T* xLoc,
-                        POSVEL_T* yLoc,
-                        POSVEL_T* zLoc,
-                        POSVEL_T* massHalo,
-                        ID_T* id)
-{
-  this->particleCount = haloCount;
-  this->xx = xLoc;
-  this->yy = yLoc;
-  this->zz = zLoc;
-  this->mass = massHalo;
-  this->tag = id;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Calculate the most connected particle using (N*(N-1)) / 2 algorithm
-// This is the particle with the most friends (most particles within bb)
-// Locations of the particles have taken wraparound into account so that
-// processors on the low edge of a dimension have particles with negative
-// positions and processors on the high edge of a dimension have particles
-// with locations greater than the box size
-//
-/////////////////////////////////////////////////////////////////////////
-
-int HaloCenterFinder::mostConnectedParticleN2()
-{
-  // Arrange in an upper triangular grid of friend counts
-  // friendCount will hold number of friends that a particle has
-  //
-  int* friendCount = new int[this->particleCount];
-  for (int i = 0; i < this->particleCount; i++)
-    friendCount[i] = 0;
-
-  // Iterate on all particles in halo adding to count if friends of each other
-  // Iterate in upper triangular fashion
-  for (int p = 0; p < this->particleCount; p++) {
-
-    // Get halo particle after the current one
-    for (int q = p+1; q < this->particleCount; q++) {
-
-      // Calculate distance betweent the two
-      POSVEL_T xdist = fabs(this->xx[p] - this->xx[q]);
-      POSVEL_T ydist = fabs(this->yy[p] - this->yy[q]);
-      POSVEL_T zdist = fabs(this->zz[p] - this->zz[q]);
-
-      if ((xdist < this->bb) && (ydist < this->bb) && (zdist < this->bb)) {
-        POSVEL_T dist = sqrt((xdist*xdist) + (ydist*ydist) + (zdist*zdist));
-        if (dist < this->bb) {
-          friendCount[p]++;
-          friendCount[q]++;
-        }
-      }
-    }
-  }
-
-  // Particle with the most friends
-  int maxFriends = 0;
-  int result = 0;
-
-  for (int i = 0; i < this->particleCount; i++) {
-    if (friendCount[i] > maxFriends) {
-      maxFriends = friendCount[i];
-      result = i;
-    }
-  }
-
-  delete [] friendCount;
-  return result;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Most connected particle using a chaining mesh of particles in one FOF halo
-// Build chaining mesh with a grid size such that all friends will be in
-// adjacent mesh grids.
-//
-/////////////////////////////////////////////////////////////////////////
-
-int HaloCenterFinder::mostConnectedParticleChainMesh()
-{
-  int bp, bi, bj, bk;
-  int wp, wi, wj, wk;
-  int first[DIMENSION], last[DIMENSION];
-  POSVEL_T xdist, ydist, zdist, dist;
-
-  // Build the chaining mesh
-  int chainFactor = MCP_CHAIN_FACTOR;
-  POSVEL_T chainSize = this->bb / chainFactor;
-  ChainingMesh* haloChain = buildChainingMesh(chainSize);
-
-  // Save the number of friends for each particle in the halo
-  int* friendCount = new int[this->particleCount];
-  for (int i = 0; i < this->particleCount; i++)
-    friendCount[i] = 0;
-
-  // Get chaining mesh information
-  int*** buckets = haloChain->getBuckets();
-  int* bucketList = haloChain->getBucketList();
-  int* meshSize = haloChain->getMeshSize();
-
-  // Calculate the friend count within each bucket using upper triangular loop
-  for (bi = 0; bi < meshSize[0]; bi++) {
-    for (bj = 0; bj < meshSize[1]; bj++) {
-      for (bk = 0; bk < meshSize[2]; bk++) {
-
-        bp = buckets[bi][bj][bk];
-        while (bp != -1) {
-
-          wp = bucketList[bp];
-          while (wp != -1) {
-            xdist = (POSVEL_T)fabs(this->xx[bp] - this->xx[wp]);
-            ydist = (POSVEL_T)fabs(this->yy[bp] - this->yy[wp]);
-            zdist = (POSVEL_T)fabs(this->zz[bp] - this->zz[wp]);
-            dist = sqrt((xdist*xdist) + (ydist*ydist) + (zdist*zdist));
-            if (dist != 0.0 && dist < this->bb) {
-              friendCount[bp]++;
-              friendCount[wp]++;
-            }
-            wp = bucketList[wp];
-          }
-          bp = bucketList[bp];
-        }
-      }
-    }
-  }
-
-  // Walk every bucket in the chaining mesh, processing all particles in bucket
-  // against all neighbor bucket particles one time, storing the friend
-  // count in two places, using the sliding window trick
-  for (bi = 0; bi < meshSize[0]; bi++) {
-    for (bj = 0; bj < meshSize[1]; bj++) {
-      for (bk = 0; bk < meshSize[2]; bk++) {
-
-        // Set the walking window around this bucket
-        first[0] = bi - chainFactor; last[0] = bi + chainFactor;
-        first[1] = bj - chainFactor; last[1] = bj + chainFactor;
-        first[2] = bk - chainFactor; last[2] = bk + chainFactor;
-
-        for (int dim = 0; dim < DIMENSION; dim++) {
-          if (first[dim] < 0)
-            first[dim] = 0;
-          if (last[dim] >= meshSize[dim])
-            last[dim] = meshSize[dim] - 1;
-        }
-
-        // First particle in the bucket being processed
-        bp = buckets[bi][bj][bk];
-        while (bp != -1) {
-
-          // For the current particle in the current bucket count friends
-          // going to all neighbor buckets in the chaining mesh.
-          // With the sliding window we calculate the distance between
-          // two particles and can fill in both, but when the second particle's
-          // bucket is reached we can't calculate and add in again
-          // So we must be aware of which buckets have not already been
-          // compared to this bucket and calculate only for planes and rows
-          // that have not already been processed
-
-          // Do entire trailing plane of buckets that has not been processed
-          for (wi = bi + 1; wi <= last[0]; wi++) {
-            for (wj = first[1]; wj <= last[1]; wj++) {
-              for (wk = first[2]; wk <= last[2]; wk++) {
-                wp = buckets[wi][wj][wk];
-                while (wp != -1) {
-                  xdist = (POSVEL_T) fabs(this->xx[bp] - this->xx[wp]);
-                  ydist = (POSVEL_T) fabs(this->yy[bp] - this->yy[wp]);
-                  zdist = (POSVEL_T) fabs(this->zz[bp] - this->zz[wp]);
-                  dist = sqrt((xdist*xdist) + (ydist*ydist) + (zdist*zdist));
-                  if (dist != 0.0 && dist < this->bb) {
-                    friendCount[bp]++;
-                    friendCount[wp]++;
-                  }
-                  wp = bucketList[wp];
-                }
-              }
-            }
-          }
-
-          // Do entire trailing row that has not been processed in this plane
-          wi = bi;
-          for (wj = bj + 1; wj <= last[1]; wj++) {
-            for (wk = first[2]; wk <= last[2]; wk++) {
-              wp = buckets[wi][wj][wk];
-              while (wp != -1) {
-                xdist = (POSVEL_T) fabs(this->xx[bp] - this->xx[wp]);
-                ydist = (POSVEL_T) fabs(this->yy[bp] - this->yy[wp]);
-                zdist = (POSVEL_T) fabs(this->zz[bp] - this->zz[wp]);
-                dist = sqrt((xdist*xdist) + (ydist*ydist) + (zdist*zdist));
-                if (dist != 0.0 && dist < this->bb) {
-                  friendCount[bp]++;
-                  friendCount[wp]++;
-                }
-                wp = bucketList[wp];
-              }
-            }
-          }
-
-          // Do bucket trailing buckets in this row
-          wi = bi;
-          wj = bj;
-          for (wk = bk+1; wk <= last[2]; wk++) {
-            wp = buckets[wi][wj][wk];
-            while (wp != -1) {
-              xdist = (POSVEL_T) fabs(this->xx[bp] - this->xx[wp]);
-              ydist = (POSVEL_T) fabs(this->yy[bp] - this->yy[wp]);
-              zdist = (POSVEL_T) fabs(this->zz[bp] - this->zz[wp]);
-              dist = sqrt((xdist*xdist) + (ydist*ydist) + (zdist*zdist));
-              if (dist != 0.0 && dist < this->bb) {
-                friendCount[bp]++;
-                friendCount[wp]++;
-              }
-              wp = bucketList[wp];
-            }
-          }
-          bp = bucketList[bp];
-        }
-      }
-    }
-  }
-  // Particle with the most friends
-  int maxFriends = 0;
-  int result = 0;
-
-  for (int i = 0; i < this->particleCount; i++) {
-    if (friendCount[i] > maxFriends) {
-      maxFriends = friendCount[i];
-      result = i;
-    }
-  }
-
-  delete [] friendCount;
-  delete haloChain;
-
-  return result;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Calculate the most bound particle using (N*(N-1)) / 2 algorithm
-// Also minimum potential particle for the halo.
-// Locations of the particles have taken wraparound into account so that
-// processors on the low edge of a dimension have particles with negative
-// positions and processors on the high edge of a dimension have particles
-// with locations greater than the box size
-//
-/////////////////////////////////////////////////////////////////////////
-
-int HaloCenterFinder::mostBoundParticleN2(POTENTIAL_T* minPotential)
-{
-  // Arrange in an upper triangular grid to save computation
-  POTENTIAL_T* lpot = new POTENTIAL_T[this->particleCount];
-  for (int i = 0; i < this->particleCount; i++)
-    lpot[i] = 0.0;
-
-  // First particle in halo to calculate minimum potential on
-  for (int p = 0; p < this->particleCount; p++) {
-
-    // Next particle in halo in minimum potential loop
-    for (int q = p+1; q < this->particleCount; q++) {
-
-      POSVEL_T xdist = (POSVEL_T)fabs(this->xx[p] - this->xx[q]);
-      POSVEL_T ydist = (POSVEL_T)fabs(this->yy[p] - this->yy[q]);
-      POSVEL_T zdist = (POSVEL_T)fabs(this->zz[p] - this->zz[q]);
-
-      POSVEL_T r = sqrt((xdist * xdist) + (ydist * ydist) + (zdist * zdist));
-
-      if (r != 0.0) {
-        lpot[p] = (POTENTIAL_T)(lpot[p] - (this->mass[q] / r));
-        lpot[q] = (POTENTIAL_T)(lpot[q] - (this->mass[p] / r));
-      }
-    }
-  }
-
-  *minPotential = MAX_FLOAT;
-  int result = 0;
-  for (int i = 0; i < this->particleCount; i++) {
-    if (lpot[i] < *minPotential) {
-      *minPotential = lpot[i];
-      result = i;
-    }
-  }
-  delete [] lpot;
-
-  return result;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Most bound particle using a chaining mesh of particles in one FOF halo.
-// and a combination of actual particle-to-particle values and estimation
-// values based on number of particles in a bucket and the distance to the
-// nearest corner.
-//
-// For the center area of a halo calculate the actual values for 26 neigbors.
-// For the perimeter area of a halo use a bounding box of those neighbors
-// to make up the actual portion and a estimate to other particles in the
-// neighbors.  This is to keep a particle from being too close to the
-// closest corner and giving a skewed answer.
-//
-// The refinement in the center buckets will be called level 1 because all
-// buckets to a distance of 1 are calculated fully.  The refinement of the
-// perimeter buckets will be called level 0 because only the center bucket
-// is calculated fully.
-//
-// Note that in refining, level 0 must be brought up to level 1, and then
-// refinement to more buckets becomes the same.
-//
-/////////////////////////////////////////////////////////////////////////
-
-int HaloCenterFinder::mostBoundParticleAStar(POTENTIAL_T* minimumPotential)
-{
-  // Chaining mesh size is a factor of the interparticle halo distance
-  POSVEL_T chainSize = this->bb * this->distFactor;
-
-  // Boundary around edges of a bucket for calculating estimate
-  POSVEL_T boundaryFactor = 10.0f * this->distFactor;
-  POSVEL_T boundarySize = chainSize / boundaryFactor;
-
-  // Actual values calculated for 26 neighbors in the center of a halo
-  // Factor to decide what distance this is out from the center
-  int eachSideFactor = 7;
-
-  // Create the chaining mesh for this halo
-  ChainingMesh* haloChain = buildChainingMesh(chainSize);
-
-  // Get chaining mesh information
-  int* meshSize = haloChain->getMeshSize();
-
-  // Bucket ID allows finding the bucket every particle is in
-  int* bucketID = new int[this->particleCount];
-
-  // Refinement level for a particle indicate how many buckets out have actual
-  // values calculate rather than estimates
-  int* refineLevel = new int[this->particleCount];
-
-  // Minimum potential made up of actual part and estimated part
-  POSVEL_T* estimate = new POSVEL_T[this->particleCount];
-  for (int i = 0; i < this->particleCount; i++)
-    estimate[i] = 0.0;
-
-  // Calculate better guesses (refinement level 1) around the center of halo
-  // Use estimates with boundary around neighbors of perimeter
-  int* minActual = new int[DIMENSION];
-  int* maxActual = new int[DIMENSION];
-  for (int dim = 0; dim < DIMENSION; dim++) {
-    int eachSide = meshSize[dim] / eachSideFactor;
-    int middle = meshSize[dim] / 2;
-    minActual[dim] = middle - eachSide;
-    maxActual[dim] = middle + eachSide;
-  }
-
-
-  //////////////////////////////////////////////////////////////////////////
-  //
-  // Calculate actual for particles within individual bucket
-  //
-  aStarThisBucketPart(haloChain, bucketID, estimate);
-
-  //////////////////////////////////////////////////////////////////////////
-  //
-  // Calculate actual values for immediate 26 neighbors for buckets in
-  // the center of the halo (refinement level = 1)
-  //
-  aStarActualNeighborPart(haloChain, minActual, maxActual,
-                          refineLevel, estimate);
-
-  //////////////////////////////////////////////////////////////////////////
-  //
-  // Calculate estimated values for immediate 26 neighbors for buckets on
-  // the edges of the halo (refinement level = 0)
-  //
-  aStarEstimatedNeighborPart(haloChain, minActual, maxActual,
-                             refineLevel, estimate, boundarySize);
-
-  //////////////////////////////////////////////////////////////////////////
-  //
-  // All buckets beyond the 27 nearest get an estimate based on count in
-  // the bucket and the distance to the nearest point
-  //
-  aStarEstimatedPart(haloChain, estimate);
-
-  //////////////////////////////////////////////////////////////////////////
-  //
-  // Iterative phase to refine individual particles
-  //
-  POSVEL_T minPotential = estimate[0];
-  int minParticleCur = 0;
-  int winDelta = 1;
-
-  // Find the current minimum potential particle after actual and estimates
-  for (int i = 0; i < this->particleCount; i++) {
-    if (estimate[i] < minPotential) {
-      minPotential = estimate[i];
-      minParticleCur = i;
-    }
-  }
-  POSVEL_T minPotentialLast = minPotential;
-  int minParticleLast = -1;
-
-  // Decode the bucket from the ID
-  int id = bucketID[minParticleCur];
-  int bk = id % meshSize[2];
-  id = id - bk;
-  int bj = (id % (meshSize[2] * meshSize[1])) / meshSize[2];
-  id = id - (bj * meshSize[2]);
-  int bi = id / (meshSize[2] * meshSize[1]);
-
-  // Calculate the maximum winDelta for this bucket
-  int maxDelta = max(max(
-                     max(meshSize[0] - bi, bi), max(meshSize[1] - bj, bj)),
-                     max(meshSize[2] - bk, bk));
-
-  // Terminate when a particle is the minimum twice in a row AND
-  // it has been calculated precisely without estimates over the entire halo
-  int pass = 1;
-  while (winDelta <= maxDelta) {
-    while (minParticleLast != minParticleCur) {
-
-      // Refine the value for all particles in the same bucket as the minimum
-      // Alter the minimum in the reference
-      // Return the particle index that is the new minimum of that bucket
-      while (winDelta > refineLevel[minParticleCur] &&
-             estimate[minParticleCur] <= minPotentialLast) {
-        pass++;
-        refineLevel[minParticleCur]++;
-
-        // Going from level 0 to level 1 is special because the 27 neighbors
-        // are part actual and part estimated.  After that all refinements are
-        // replacing an estimate with an actual
-        if (refineLevel[minParticleCur] == 1) {
-          refineAStarLevel_1(haloChain, bi, bj, bk, minActual, maxActual,
-                             minParticleCur, estimate,
-                             boundarySize);
-        } else {
-          refineAStarLevel_N(haloChain, bi, bj, bk,
-                             minParticleCur, estimate,
-                             refineLevel[minParticleCur]);
-        }
-      }
-      if (winDelta <= refineLevel[minParticleCur]) {
-        minPotentialLast = estimate[minParticleCur];
-        minParticleLast = minParticleCur;
-      }
-
-      // Find the current minimum particle
-      minPotential = minPotentialLast;
-      for (int i = 0; i < this->particleCount; i++) {
-        if (estimate[i] <= minPotential) {
-          minPotential = estimate[i];
-          minParticleCur = i;
-        }
-      }
-
-      // Decode the bucket from the ID
-      id = bucketID[minParticleCur];
-      bk = id % meshSize[2];
-      id = id - bk;
-      bj = (id % (meshSize[2] * meshSize[1])) / meshSize[2];
-      id = id - (bj * meshSize[2]);
-      bi = id / (meshSize[2] * meshSize[1]);
-
-      // Calculate the maximum winDelta for this bucket
-      maxDelta = max(max(
-                     max(meshSize[0] - bi, bi), max(meshSize[1] - bj, bj)),
-                     max(meshSize[2] - bk, bk));
-    }
-    pass++;
-    winDelta++;
-    minParticleLast = 0;
-  }
-  int result = minParticleCur;
-  *minimumPotential = estimate[minParticleCur];
-
-  delete [] estimate;
-  delete [] bucketID;
-  delete [] refineLevel;
-  delete [] minActual;
-  delete [] maxActual;
-  delete haloChain;
-
-  return result;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Within a bucket calculate the actual values between all particles
-// Set the bucket ID so that the associated bucket can be located quickly
-//
-/////////////////////////////////////////////////////////////////////////
-
-void HaloCenterFinder::aStarThisBucketPart(
-                        ChainingMesh* haloChain,
-                        int* bucketID,
-                        POSVEL_T* estimate)
-{
-  POSVEL_T xdist, ydist, zdist, dist;
-  int bp, bp2, bi, bj, bk;
-
-  // Get chaining mesh information
-  int*** buckets = haloChain->getBuckets();
-  int* bucketList = haloChain->getBucketList();
-  int* meshSize = haloChain->getMeshSize();
-
-  // Calculate actual values for all particles in the same bucket
-  // All pairs are calculated one time and stored twice
-  for (bi = 0; bi < meshSize[0]; bi++) {
-    for (bj = 0; bj < meshSize[1]; bj++) {
-      for (bk = 0; bk < meshSize[2]; bk++) {
-
-        bp = buckets[bi][bj][bk];
-        while (bp != -1) {
-
-          // Remember the bucket that every particle is in
-          bucketID[bp] = (bi * meshSize[1] * meshSize[2]) +
-                         (bj * meshSize[2]) + bk;
-
-          bp2 = bucketList[bp];
-          while (bp2 != -1) {
-            xdist = (POSVEL_T)fabs(this->xx[bp] - this->xx[bp2]);
-            ydist = (POSVEL_T)fabs(this->yy[bp] - this->yy[bp2]);
-            zdist = (POSVEL_T)fabs(this->zz[bp] - this->zz[bp2]);
-            dist = sqrt((xdist*xdist) + (ydist*ydist) + (zdist*zdist));
-            if (dist != 0.0) {
-              estimate[bp] -= (this->mass[bp2] / dist);
-              estimate[bp2] -= (this->mass[bp] / dist);
-            }
-            bp2 = bucketList[bp2];
-          }
-          bp = bucketList[bp];
-        }
-      }
-    }
-  }
-}
-
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Calculate the actual values to particles in 26 immediate neighbors
-// only for buckets in the center of the halo, indicated by min/maxActual.
-// Do this with a sliding window so that an N^2/2 algorithm is done where
-// calculations are stored in both particles at same time.  Set refineLevel
-// to 1 indicating buckets to a distance of one from the particle were
-// calculated completely.
-//
-/////////////////////////////////////////////////////////////////////////
-
-void HaloCenterFinder::aStarActualNeighborPart(
-                        ChainingMesh* haloChain,
-                        int* minActual,
-                        int* maxActual,
-                        int* refineLevel,
-                        POSVEL_T* estimate)
-{
-  // Walking window extents and size
-  int bp, bi, bj, bk;
-  int wp, wi, wj, wk;
-  int first[DIMENSION], last[DIMENSION];
-  POSVEL_T xdist, ydist, zdist, dist;
-
-  // Get chaining mesh information
-  int*** bucketCount = haloChain->getBucketCount();
-  int*** buckets = haloChain->getBuckets();
-  int* bucketList = haloChain->getBucketList();
-
-  // Process the perimeter buckets which contribute to the actual values
-  // but which will get estimate values for their own particles
-  for (bi = minActual[0] - 1; bi <= maxActual[0] + 1; bi++) {
-    for (bj = minActual[1] - 1; bj <= maxActual[1] + 1; bj++) {
-      for (bk = minActual[2] - 1; bk <= maxActual[2] + 1; bk++) {
-
-        // Only do the perimeter buckets
-        if ((bucketCount[bi][bj][bk] > 0) &&
-            ((bi < minActual[0] || bi > maxActual[0]) ||
-             (bj < minActual[1] || bj > maxActual[1]) ||
-             (bk < minActual[2] || bk > maxActual[2]))) {
-
-          // Set a window around this bucket for calculating actual potentials
-          first[0] = bi - 1;    last[0] = bi + 1;
-          first[1] = bj - 1;    last[1] = bj + 1;
-          first[2] = bk - 1;    last[2] = bk + 1;
-          for (int dim = 0; dim < DIMENSION; dim++) {
-            if (first[dim] < minActual[dim])
-              first[dim] = minActual[dim];
-            if (last[dim] > maxActual[dim])
-              last[dim] = maxActual[dim];
-          }
-
-          bp = buckets[bi][bj][bk];
-          while (bp != -1) {
-
-            // Check each bucket in the window
-            for (wi = first[0]; wi <= last[0]; wi++) {
-              for (wj = first[1]; wj <= last[1]; wj++) {
-                for (wk = first[2]; wk <= last[2]; wk++) {
-
-                  // Only do the window bucket if it is in the actual region
-                  if (bucketCount[wi][wj][wk] != 0 &&
-                      wi >= minActual[0] && wi <= maxActual[0] &&
-                      wj >= minActual[1] && wj <= maxActual[1] &&
-                      wk >= minActual[2] && wk <= maxActual[2]) {
-
-                    wp = buckets[wi][wj][wk];
-                    while (wp != -1) {
-                      xdist = (POSVEL_T)fabs(this->xx[bp] - this->xx[wp]);
-                      ydist = (POSVEL_T)fabs(this->yy[bp] - this->yy[wp]);
-                      zdist = (POSVEL_T)fabs(this->zz[bp] - this->zz[wp]);
-                      dist = sqrt((xdist*xdist)+(ydist*ydist)+(zdist*zdist));
-                      if (dist != 0.0) {
-                        estimate[bp] -= (this->mass[wp] / dist);
-                        estimate[wp] -= (this->mass[bp] / dist);
-                      }
-                      wp = bucketList[wp];
-                    }
-                  }
-                }
-              }
-            }
-            bp = bucketList[bp];
-          }
-        }
-      }
-    }
-  }
-
-  // Process the buckets in the center
-  for (bi = minActual[0]; bi <= maxActual[0]; bi++) {
-    for (bj = minActual[1]; bj <= maxActual[1]; bj++) {
-      for (bk = minActual[2]; bk <= maxActual[2]; bk++) {
-
-        // Set a window around this bucket for calculating actual potentials
-        first[0] = bi - 1;    last[0] = bi + 1;
-        first[1] = bj - 1;    last[1] = bj + 1;
-        first[2] = bk - 1;    last[2] = bk + 1;
-        for (int dim = 0; dim < DIMENSION; dim++) {
-          if (first[dim] < minActual[dim])
-            first[dim] = minActual[dim];
-          if (last[dim] > maxActual[dim])
-            last[dim] = maxActual[dim];
-        }
-
-        bp = buckets[bi][bj][bk];
-        while (bp != -1) {
-
-          // For the current particle in the current bucket calculate
-          // the actual part from the 27 surrounding buckets
-          // With the sliding window we calculate the distance between
-          // two particles and can fill in both, but when the second particle's
-          // bucket is reached we can't calculate and add in again
-          // So we must be aware of which buckets have not already been
-          // compared to this bucket and calculate only for planes and rows
-          // that have not already been processed
-          refineLevel[bp] = 1;
-
-          // Do entire trailing plane of buckets that has not been processed
-          for (wi = bi + 1; wi <= last[0]; wi++) {
-            for (wj = first[1]; wj <= last[1]; wj++) {
-              for (wk = first[2]; wk <= last[2]; wk++) {
-
-                wp = buckets[wi][wj][wk];
-                while (wp != -1) {
-                  xdist = fabs(this->xx[bp] - this->xx[wp]);
-                  ydist = fabs(this->yy[bp] - this->yy[wp]);
-                  zdist = fabs(this->zz[bp] - this->zz[wp]);
-                  dist = sqrt((xdist*xdist) + (ydist*ydist) + (zdist*zdist));
-                  if (dist != 0.0) {
-                    estimate[bp] -= (this->mass[wp] / dist);
-                    estimate[wp] -= (this->mass[bp] / dist);
-                  }
-                  wp = bucketList[wp];
-                }
-              }
-            }
-          }
-
-          // Do entire trailing row that has not been processed in this plane
-          wi = bi;
-          for (wj = bj + 1; wj <= last[1]; wj++) {
-            for (wk = first[2]; wk <= last[2]; wk++) {
-              wp = buckets[wi][wj][wk];
-              while (wp != -1) {
-                xdist = (POSVEL_T)fabs(this->xx[bp] - this->xx[wp]);
-                ydist = (POSVEL_T)fabs(this->yy[bp] - this->yy[wp]);
-                zdist = (POSVEL_T)fabs(this->zz[bp] - this->zz[wp]);
-                dist = sqrt((xdist*xdist) + (ydist*ydist) + (zdist*zdist));
-                if (dist != 0) {
-                  estimate[bp] -= (this->mass[wp] / dist);
-                  estimate[wp] -= (this->mass[bp] / dist);
-                }
-                wp = bucketList[wp];
-              }
-            }
-          }
-
-          // Do bucket for right hand neighbor
-          wi = bi;
-          wj = bj;
-          for (wk = bk + 1; wk <= last[2]; wk++) {
-            wp = buckets[wi][wj][wk];
-            while (wp != -1) {
-              xdist = (POSVEL_T)fabs(this->xx[bp] - this->xx[wp]);
-              ydist = (POSVEL_T)fabs(this->yy[bp] - this->yy[wp]);
-              zdist = (POSVEL_T)fabs(this->zz[bp] - this->zz[wp]);
-              dist = sqrt((xdist*xdist) + (ydist*ydist) + (zdist*zdist));
-              if (dist != 0.0) {
-                estimate[bp] -= (this->mass[wp] / dist);
-                estimate[wp] -= (this->mass[bp] / dist);
-              }
-              wp = bucketList[wp];
-            }
-          }
-          bp = bucketList[bp];
-        }
-      }
-    }
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Calculate the estimated values to particles in 26 immediate neighbors
-// Actual values are calculated within the boundary for safety and
-// an estimation to the remaining points using the nearest point in the
-// neighbor outside of the boundary
-//
-/////////////////////////////////////////////////////////////////////////
-
-void HaloCenterFinder::aStarEstimatedNeighborPart(
-                        ChainingMesh* haloChain,
-                        int* minActual,
-                        int* maxActual,
-                        int* refineLevel,
-                        POSVEL_T* estimate,
-                        POSVEL_T boundarySize)
-{
-  // Walking window extents and size
-  int bp, bi, bj, bk;
-  int wp, wi, wj, wk;
-  int first[DIMENSION], last[DIMENSION];
-  POSVEL_T minBound[DIMENSION], maxBound[DIMENSION];
-  POSVEL_T xNear = 0.0;
-  POSVEL_T yNear = 0.0;
-  POSVEL_T zNear = 0.0;
-  POSVEL_T xdist, ydist, zdist, dist;
-
-  // Get chaining mesh information
-  int*** bucketCount = haloChain->getBucketCount();
-  int*** buckets = haloChain->getBuckets();
-  int* bucketList = haloChain->getBucketList();
-  int* meshSize = haloChain->getMeshSize();
-  POSVEL_T* minRange = haloChain->getMinRange();
-  POSVEL_T chainSize = haloChain->getChainSize();
-
-  // Calculate estimates for all buckets not in the center
-  for (bi = 0; bi < meshSize[0]; bi++) {
-    for (bj = 0; bj < meshSize[1]; bj++) {
-      for (bk = 0; bk < meshSize[2]; bk++) {
-
-        if ((bucketCount[bi][bj][bk] > 0) &&
-            ((bi < minActual[0] || bi > maxActual[0]) ||
-             (bj < minActual[1] || bj > maxActual[1]) ||
-             (bk < minActual[2] || bk > maxActual[2]))) {
-
-          // Set a window around this bucket for calculating estimates
-          first[0] = bi - 1;    last[0] = bi + 1;
-          first[1] = bj - 1;    last[1] = bj + 1;
-          first[2] = bk - 1;    last[2] = bk + 1;
-
-          // Calculate the bounding box around the current bucket
-          minBound[0] = minRange[0] + (bi * chainSize) - boundarySize;
-          maxBound[0] = minRange[0] + ((bi + 1) * chainSize) + boundarySize;
-          minBound[1] = minRange[1] + (bj * chainSize) - boundarySize;
-          maxBound[1] = minRange[1] + ((bj + 1) * chainSize) + boundarySize;
-          minBound[2] = minRange[2] + (bk * chainSize) - boundarySize;
-          maxBound[2] = minRange[2] + ((bk + 1) * chainSize) + boundarySize;
-
-          for (int dim = 0; dim < DIMENSION; dim++) {
-            if (first[dim] < 0) {
-              first[dim] = 0;
-              minBound[dim] = 0.0;
-            }
-            if (last[dim] >= meshSize[dim]) {
-              last[dim] = meshSize[dim] - 1;
-              maxBound[dim] = (meshSize[dim] - 1) * chainSize;
-            }
-          }
-
-          // Calculate actual and estimated for every particle in this bucket
-          bp = buckets[bi][bj][bk];
-          while (bp != -1) {
-
-            // Since it is not fully calculated refinement level is 0
-            refineLevel[bp] = 0;
-
-            // Process all neighbor buckets of this one
-            for (wi = first[0]; wi <= last[0]; wi++) {
-              for (wj = first[1]; wj <= last[1]; wj++) {
-                for (wk = first[2]; wk <= last[2]; wk++) {
-
-                  // If bucket has particles, and is not within the region which
-                  // calculates actual neighbor values
-                  if ((bucketCount[wi][wj][wk] > 0) &&
-                      ((wi > maxActual[0] || wi < minActual[0]) ||
-                       (wj > maxActual[1] || wj < minActual[1]) ||
-                       (wk > maxActual[2] || wk < minActual[2])) &&
-                      (wi != bi || wj != bj || wk != bk)) {
-
-                    // What is the nearest point between buckets
-                    if (wi < bi)  xNear = minBound[0];
-                    if (wi == bi) xNear = (minBound[0] + maxBound[0]) / 2.0f;
-                    if (wi > bi)  xNear = maxBound[0];
-                    if (wj < bj)  yNear = minBound[1];
-                    if (wj == bj) yNear = (minBound[1] + maxBound[1]) / 2.0f;
-                    if (wj > bj)  yNear = maxBound[1];
-                    if (wk < bk)  zNear = minBound[2];
-                    if (wk == bk) zNear = (minBound[2] + maxBound[2]) / 2.0f;
-                    if (wk > bk)  zNear = maxBound[2];
-
-                    wp = buckets[wi][wj][wk];
-                    int estimatedParticleCount = 0;
-                    while (wp != -1) {
-                      if (this->xx[wp] > minBound[0] &&
-                          this->xx[wp] < maxBound[0] &&
-                          this->yy[wp] > minBound[1] &&
-                          this->yy[wp] < maxBound[1] &&
-                          this->zz[wp] > minBound[2] &&
-                          this->zz[wp] < maxBound[2]) {
-
-                        // Is the window particle within the boundary condition
-                        // Calculate actual potential
-                        xdist = (POSVEL_T)fabs(this->xx[bp] - this->xx[wp]);
-                        ydist = (POSVEL_T)fabs(this->yy[bp] - this->yy[wp]);
-                        zdist = (POSVEL_T)fabs(this->zz[bp] - this->zz[wp]);
-                        dist = sqrt(xdist*xdist + ydist*ydist + zdist*zdist);
-                        if (dist != 0.0) {
-                          estimate[bp] -= (this->mass[wp] / dist);
-                        }
-                      } else {
-                        // Count to create estimated potential
-                        estimatedParticleCount++;
-                      }
-                      wp = bucketList[wp];
-                    }
-
-                    // Find nearest corner or location to this bucket
-                    // Calculate estimated value for the part of the bucket
-                    xdist = (POSVEL_T)fabs(this->xx[bp] - xNear);
-                    ydist = (POSVEL_T)fabs(this->yy[bp] - yNear);
-                    zdist = (POSVEL_T)fabs(this->zz[bp] - zNear);
-                    dist = sqrt((xdist*xdist) + (ydist*ydist) + (zdist*zdist));
-                    if (dist != 0) {
-                      estimate[bp] -=
-                        ((this->mass[bp] / dist) * estimatedParticleCount);
-                    }
-                  }
-                }
-              }
-            }
-            bp = bucketList[bp];
-          }
-        }
-      }
-    }
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Add in an estimation for all buckets outside of the immediate 27 neighbors
-//
-/////////////////////////////////////////////////////////////////////////
-
-void HaloCenterFinder::aStarEstimatedPart(
-                        ChainingMesh* haloChain,
-                        POSVEL_T* estimate)
-{
-  // Walking window extents and size
-  int bp, bi, bj, bk;
-  int wi, wj, wk;
-  int first[DIMENSION], last[DIMENSION];
-  POSVEL_T xdist, ydist, zdist, dist;
-  POSVEL_T xNear, yNear, zNear;
-
-  // Get chaining mesh information
-  int*** bucketCount = haloChain->getBucketCount();
-  int*** buckets = haloChain->getBuckets();
-  int* bucketList = haloChain->getBucketList();
-  int* meshSize = haloChain->getMeshSize();
-  POSVEL_T chainSize = haloChain->getChainSize();
-  POSVEL_T* minRange = haloChain->getMinRange();
-
-  for (bi = 0; bi < meshSize[0]; bi++) {
-    for (bj = 0; bj < meshSize[1]; bj++) {
-      for (bk = 0; bk < meshSize[2]; bk++) {
-
-        // Set a window around this bucket for calculating actual potentials
-        first[0] = bi - 1;    last[0] = bi + 1;
-        first[1] = bj - 1;    last[1] = bj + 1;
-        first[2] = bk - 1;    last[2] = bk + 1;
-        for (int dim = 0; dim < DIMENSION; dim++) {
-          if (first[dim] < 0)
-            first[dim] = 0;
-          if (last[dim] >= meshSize[dim])
-            last[dim] = meshSize[dim] - 1;
-        }
-
-        for (wi = 0; wi < meshSize[0]; wi++) {
-          for (wj = 0; wj < meshSize[1]; wj++) {
-            for (wk = 0; wk < meshSize[2]; wk++) {
-
-              // Exclude the buckets for which actual values were calculated
-              if ((wi < first[0] || wi > last[0] ||
-                   wj < first[1] || wj > last[1] ||
-                   wk < first[2] || wk > last[2]) &&
-                  (bucketCount[wi][wj][wk] > 0)) {
-
-                // Nearest corner of the compared bucket to this particle
-                bp = buckets[bi][bj][bk];
-                xNear = minRange[0] + (wi * chainSize);
-                yNear = minRange[1] + (wj * chainSize);
-                zNear = minRange[2] + (wk * chainSize);
-                if (this->xx[bp] > xNear)
-                  xNear += chainSize;
-                if (this->yy[bp] > yNear)
-                  yNear += chainSize;
-                if (this->zz[bp] > zNear)
-                  zNear += chainSize;
-
-                // Iterate on all particles in the bucket doing the estimate
-                // to the near corner of the other buckets
-                while (bp != -1) {
-                  xdist = fabs(this->xx[bp] - xNear);
-                  ydist = fabs(this->yy[bp] - yNear);
-                  zdist = fabs(this->zz[bp] - zNear);
-                  dist = sqrt((xdist*xdist) + (ydist*ydist) + (zdist*zdist));
-                  if (dist != 0) {
-                    estimate[bp] -=
-                      ((this->mass[bp] / dist) * bucketCount[wi][wj][wk]);
-                  }
-                  bp = bucketList[bp];
-                }
-              }
-            }
-          }
-        }
-      }
-    }
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Refine the estimate for the particle in the halo with window delta
-// given the buckets in the chaining mesh, relative locations of particles
-// in this halo, the index of this halo, and the bucket it is in
-// The newly refined estimate is updated.
-//
-/////////////////////////////////////////////////////////////////////////
-
-void HaloCenterFinder::refineAStarLevel_1(
-                        ChainingMesh* haloChain,
-                        int bi,
-                        int bj,
-                        int bk,
-                        int* minActual,
-                        int* maxActual,
-                        int bp,
-                        POSVEL_T* estimate,
-                        POSVEL_T boundarySize)
-{
-  int wp, wi, wj, wk;
-  int first[DIMENSION], last[DIMENSION];
-  POSVEL_T xdist, ydist, zdist, dist;
-  POSVEL_T xNear = 0.0;
-  POSVEL_T yNear = 0.0;
-  POSVEL_T zNear = 0.0;
-  POSVEL_T minBound[DIMENSION], maxBound[DIMENSION];
-
-  // Get chaining mesh information
-  POSVEL_T chainSize = haloChain->getChainSize();
-  int*** bucketCount = haloChain->getBucketCount();
-  int*** buckets = haloChain->getBuckets();
-  int* bucketList = haloChain->getBucketList();
-  int* meshSize = haloChain->getMeshSize();
-  POSVEL_T* minRange = haloChain->getMinRange();
-
-  // Going out window delta in all directions
-  // Subtract the estimate from the current value
-  // Add the new values
-  first[0] = bi - 1;   last[0] = bi + 1;
-  first[1] = bj - 1;   last[1] = bj + 1;
-  first[2] = bk - 1;   last[2] = bk + 1;
-
-  // Calculate the bounding box around the current bucket
-  minBound[0] = minRange[0] + (bi * chainSize) - boundarySize;
-  maxBound[0] = minRange[0] + ((bi + 1) * chainSize) + boundarySize;
-  minBound[1] = minRange[1] + (bj * chainSize) - boundarySize;
-  maxBound[1] = minRange[1] + ((bj + 1) * chainSize) + boundarySize;
-  minBound[2] = minRange[2] + (bk * chainSize) - boundarySize;
-  maxBound[2] = minRange[2] + ((bk + 1) * chainSize) + boundarySize;
-
-  for (int dim = 0; dim < DIMENSION; dim++) {
-    if (first[dim] < 0) {
-      first[dim] = 0;
-      minBound[dim] = 0.0;
-    }
-    if (last[dim] >= meshSize[dim]) {
-      last[dim] = meshSize[dim] - 1;
-      maxBound[dim] = meshSize[dim] * chainSize;
-    }
-  }
-
-  for (wi = first[0]; wi <= last[0]; wi++) {
-    for (wj = first[1]; wj <= last[1]; wj++) {
-      for (wk = first[2]; wk <= last[2]; wk++) {
-
-        // If bucket has particles, and is not within the region which
-        // calculates actual neighbor values (because if it is, it would
-        // have already calculated actuals for this bucket) and if it is
-        // not this bucket which already had the n^2 algorithm run
-        if ((bucketCount[wi][wj][wk] > 0) &&
-            ((wi > maxActual[0] || wi < minActual[0]) ||
-             (wj > maxActual[1] || wj < minActual[1]) ||
-             (wk > maxActual[2] || wk < minActual[2])) &&
-            (wi != bi || wj != bj || wk != bk)) {
-
-
-          // What is the nearest point between buckets
-          if (wi < bi)  xNear = minBound[0];
-          if (wi == bi) xNear = (minBound[0] + maxBound[0]) / 2.0;
-          if (wi > bi)  xNear = maxBound[0];
-          if (wj < bj)  yNear = minBound[1];
-          if (wj == bj) yNear = (minBound[1] + maxBound[1]) / 2.0;
-          if (wj > bj)  yNear = maxBound[1];
-          if (wk < bk)  zNear = minBound[2];
-          if (wk == bk) zNear = (minBound[2] + maxBound[2]) / 2.0;
-          if (wk > bk)  zNear = maxBound[2];
-
-          wp = buckets[wi][wj][wk];
-          int estimatedParticleCount = 0;
-          while (wp != -1) {
-
-            // If inside the boundary around the bucket ignore because
-            // actual potential was already calculated in initialPhase
-            if (
-              (this->xx[wp] <= minBound[0] || this->xx[wp] >= maxBound[0]) ||
-              (this->yy[wp] <= minBound[1] || this->yy[wp] >= maxBound[1]) ||
-              (this->zz[wp] <= minBound[2] || this->zz[wp] >= maxBound[2])) {
-
-              // Count to create estimated potential which is added
-              estimatedParticleCount++;
-
-              // Calculate actual potential
-              xdist = (POSVEL_T)fabs(this->xx[bp] - this->xx[wp]);
-              ydist = (POSVEL_T)fabs(this->yy[bp] - this->yy[wp]);
-              zdist = (POSVEL_T)fabs(this->zz[bp] - this->zz[wp]);
-              dist = sqrt(xdist*xdist + ydist*ydist + zdist*zdist);
-              if (dist != 0.0) {
-                estimate[bp] -= (this->mass[wp] / dist);
-              }
-            }
-            wp = bucketList[wp];
-          }
-
-          // Find nearest corner or location to this bucket
-          // Calculate estimated value for the part of the bucket
-          xdist = (POSVEL_T)fabs(this->xx[bp] - xNear);
-          ydist = (POSVEL_T)fabs(this->yy[bp] - yNear);
-          zdist = (POSVEL_T)fabs(this->zz[bp] - zNear);
-          dist = sqrt((xdist*xdist) + (ydist*ydist) + (zdist*zdist));
-          if (dist != 0) {
-            estimate[bp] += ((this->mass[bp] / dist) * estimatedParticleCount);
-          }
-        }
-      }
-    }
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Refine the estimate for the particle in the halo with window delta
-// given the buckets in the chaining mesh, relative locations of particles
-// in this halo, the index of this halo, and the bucket it is in
-// The newly refined estimate is updated.
-//
-/////////////////////////////////////////////////////////////////////////
-
-void HaloCenterFinder::refineAStarLevel_N(
-                        ChainingMesh* haloChain,
-                        int bi,
-                        int bj,
-                        int bk,
-                        int bp,
-                        POSVEL_T* estimate,
-                        int winDelta)
-{
-  int wp, wi, wj, wk;
-  int first[DIMENSION], last[DIMENSION];
-  int oldDelta = winDelta - 1;
-  POSVEL_T xdist, ydist, zdist, dist;
-  POSVEL_T xNear, yNear, zNear;
-
-  // Get chaining mesh information
-  POSVEL_T chainSize = haloChain->getChainSize();
-  int*** bucketCount = haloChain->getBucketCount();
-  int*** buckets = haloChain->getBuckets();
-  int* bucketList = haloChain->getBucketList();
-  int* meshSize = haloChain->getMeshSize();
-  POSVEL_T* minRange = haloChain->getMinRange();
-
-  // Going out window delta in all directions
-  // Subtract the estimate from the current value
-  // Add the new values
-  first[0] = bi - winDelta;   last[0] = bi + winDelta;
-  first[1] = bj - winDelta;   last[1] = bj + winDelta;
-  first[2] = bk - winDelta;   last[2] = bk + winDelta;
-  for (int dim = 0; dim < DIMENSION; dim++) {
-    if (first[dim] < 0)
-      first[dim] = 0;
-    if (last[dim] >= meshSize[dim])
-      last[dim] = meshSize[dim] - 1;
-  }
-
-  // Walk the new delta window
-  // Exclude buckets which already contributed actual values
-  // For other buckets add the estimate and subtract the actual
-  for (wi = first[0]; wi <= last[0]; wi++) {
-    for (wj = first[1]; wj <= last[1]; wj++) {
-      for (wk = first[2]; wk <= last[2]; wk++) {
-
-        if ((wi < (bi - oldDelta) || wi > (bi + oldDelta) ||
-             wj < (bj - oldDelta) || wj > (bj + oldDelta) ||
-             wk < (bk - oldDelta) || wk > (bk + oldDelta)) &&
-            (bucketCount[wi][wj][wk] > 0)) {
-
-            // Nearest corner of the bucket to contribute new actuals
-            xNear = minRange[0] + (wi * chainSize);
-            yNear = minRange[1] + (wj * chainSize);
-            zNear = minRange[2] + (wk * chainSize);
-            if (this->xx[bp] > xNear) xNear += chainSize;
-            if (this->yy[bp] > yNear) yNear += chainSize;
-            if (this->zz[bp] > zNear) zNear += chainSize;
-
-            // Distance of this particle to the corner gives estimate
-            // which was subtracted in initialPhase and now is added back
-            xdist = (POSVEL_T)fabs(this->xx[bp] - xNear);
-            ydist = (POSVEL_T)fabs(this->yy[bp] - yNear);
-            zdist = (POSVEL_T)fabs(this->zz[bp] - zNear);
-            dist = sqrt((xdist*xdist) + (ydist*ydist) + (zdist*zdist));
-            if (dist != 0) {
-              estimate[bp] +=
-                ((this->mass[bp] / dist) * bucketCount[wi][wj][wk]);
-            }
-
-            // Subtract actual values from the new bucket to this particle
-            wp = buckets[wi][wj][wk];
-            while (wp != -1) {
-              xdist = fabs(this->xx[bp] - this->xx[wp]);
-              ydist = fabs(this->yy[bp] - this->yy[wp]);
-              zdist = fabs(this->zz[bp] - this->zz[wp]);
-              dist = sqrt((xdist*xdist) + (ydist*ydist) + (zdist*zdist));
-              if (dist != 0) {
-                estimate[bp] -= (this->mass[wp] / dist);
-              }
-              wp = bucketList[wp];
-            }
-        }
-      }
-    }
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Build a chaining mesh from the particles of a single halo
-// Used to find most connected and most bound particles for halo center
-// Space is allocated for locations of the halo and for a mapping of
-// the index within a halo to the index of the particle within the processor
-//
-/////////////////////////////////////////////////////////////////////////
-
-ChainingMesh* HaloCenterFinder::buildChainingMesh(POSVEL_T chainSize)
-{
-  // Find the bounding box of this halo
-  POSVEL_T* minLoc = new POSVEL_T[DIMENSION];
-  POSVEL_T* maxLoc = new POSVEL_T[DIMENSION];
-  minLoc[0] = maxLoc[0] = this->xx[0];
-  minLoc[1] = maxLoc[1] = this->yy[0];
-  minLoc[2] = maxLoc[2] = this->zz[0];
-
-  // Transfer the locations for this halo into separate vectors
-  for (int p = 0; p < this->particleCount; p++) {
-
-    if (minLoc[0] > this->xx[p]) minLoc[0] = this->xx[p];
-    if (maxLoc[0] < this->xx[p]) maxLoc[0] = this->xx[p];
-    if (minLoc[1] > this->yy[p]) minLoc[1] = this->yy[p];
-    if (maxLoc[1] < this->yy[p]) maxLoc[1] = this->yy[p];
-    if (minLoc[2] > this->zz[p]) minLoc[2] = this->zz[p];
-    if (maxLoc[2] < this->zz[p]) maxLoc[2] = this->zz[p];
-  }
-
-  // Want chaining mesh greater than 2 in any dimension
-  bool tooSmall = true;
-  while (tooSmall == true) {
-    tooSmall = false;
-    for (int dim = 0; dim < DIMENSION; dim++) {
-      if (((maxLoc[dim] - minLoc[dim]) / chainSize) < 3.0)
-        tooSmall = true;
-    }
-    if (tooSmall == true) {
-      chainSize /= 2.0;
-    }
-  }
-
-  // Build the chaining mesh
-  ChainingMesh* haloChain = new ChainingMesh(minLoc, maxLoc, chainSize,
-                        this->particleCount,
-                        this->xx, this->yy, this->zz);
-  delete [] minLoc;
-  delete [] maxLoc;
-
-  return haloChain;
-}
diff --git a/ThirdParty/Cosmo/HaloCenterFinder.h b/ThirdParty/Cosmo/HaloCenterFinder.h
deleted file mode 100644
index ee03b23..0000000
--- a/ThirdParty/Cosmo/HaloCenterFinder.h
+++ /dev/null
@@ -1,185 +0,0 @@
-/*=========================================================================
-                                                                                
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC. 
-This software was produced under U.S. Government contract DE-AC52-06NA25396 
-for Los Alamos National Laboratory (LANL), which is operated by 
-Los Alamos National Security, LLC for the U.S. Department of Energy. 
-The U.S. Government has rights to use, reproduce, and distribute this software. 
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.  
-If software is modified to produce derivative works, such modified software 
-should be clearly marked, so as not to confuse it with the version available 
-from LANL.
- 
-Additionally, redistribution and use in source and binary forms, with or 
-without modification, are permitted provided that the following conditions 
-are met:
--   Redistributions of source code must retain the above copyright notice, 
-    this list of conditions and the following disclaimer. 
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution. 
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software 
-    without specific prior written permission. 
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR 
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-                                                                                
-=========================================================================*/
-
-// .NAME HaloCenterFinder - find the particle in the center of halo particles
-//
-// HaloCenterFinder takes location data and a means of recognizing individual
-// halos within the location and finds the most bound particle or the most
-// connect particle.
-//
-// The class can be called with an array of locations, or can be called with
-// all locations on a processor and two arrays used to thread halos through
-// all particles.  The first array gives the index of the first particle
-// in the halo and the second array takes that index and gives the index
-// of the next particle in the halo.  Follow this until -1 which is the
-// end of the halo.
-//
-// Can operate on FOF halos, subhalos or SOD halos depending on the form
-// of the input.
-
-#ifndef HaloCenterFinder_h
-#define HaloCenterFinder_h
-
-#ifdef USE_VTK_COSMO
-#include "CosmoDefinition.h"
-#else
-#include "Definition.h"
-#endif
-
-#include "ChainingMesh.h"
-#include <string>
-#include <vector>
-
-using namespace std;
-
-#ifdef USE_VTK_COSMO
-class COSMO_EXPORT HaloCenterFinder {
-#else
-class HaloCenterFinder {
-#endif
-public:
-  HaloCenterFinder();
-  ~HaloCenterFinder();
-
-  // Set parameters for sizes of the dead/alive space
-  void setParameters(
-        POSVEL_T bb,                  // Inter particle distance for halos
-        POSVEL_T distConvertFactor);  // Scale positions by
-
-  // Set alive particle vectors which were created elsewhere
-  void setParticles(
-        long particleCount,
-        POSVEL_T* xLoc,
-        POSVEL_T* yLoc,
-        POSVEL_T* zLoc,
-        POSVEL_T* massHalo,
-        ID_T* id);
-
-  // Find the halo centers using most bound particle (N^2/2)
-  int  mostBoundParticleN2(POTENTIAL_T* minPotential);
-
-  // Initial guess of A* contains an actual part and an estimated part
-  int  mostBoundParticleAStar(POTENTIAL_T* minPotential);
-
-  // Calculate actual values between particles within a bucket
-  void aStarThisBucketPart(
-        ChainingMesh* haloChain,        // Buckets of particles
-        int* bucketID,                  // Map from particle to bucket
-        POSVEL_T* estimate);            // Running minimum potential
-
-  // Calculate actual values for 26 neighbors in the center of halo
-  // Level 1 refinement done for initial guess
-  void aStarActualNeighborPart(
-        ChainingMesh* haloChain,        // Buckets of particles
-        int* minActual,                 // Range for doing actual vs estimated
-        int* maxActual,
-        int* refineLevel,               // Refinement level of each particle
-        POSVEL_T* estimate);            // Running minimum potential
-
-  // Calculate estimated values for 26 neighbors around the edges of halo
-  // Level 0 refinement done for initial guess
-  void aStarEstimatedNeighborPart(
-        ChainingMesh* haloChain,        // Buckets of particles
-        int* minActual,                 // Range for doing actual vs estimated
-        int* maxActual,
-        int* refineLevel,               // Refinement level of each particle
-        POSVEL_T* estimate,             // Running minimum potential
-        POSVEL_T boundarySize);         // Boundary around bucket for estimation
-
-  // Calculate estimates for all buckets beyond the 27 closest
-  void aStarEstimatedPart(
-        ChainingMesh* haloChain,        // Buckets of particles
-        POSVEL_T* estimate);            // Running minimum potential
-
-  // Refinement of 0 to 1
-  void refineAStarLevel_1(
-        ChainingMesh* haloChain,        // Buckets of particles
-        int bi,                         // Bucket containing particle to refine
-        int bj,
-        int bk,
-        int* minActual,                 // Range for doing actual vs estimated
-        int* maxActual,
-        int minParticle,                // Particle to refine
-        POSVEL_T* estimate,             // Running minimum potential
-        POSVEL_T boundarySize);         // Boundary around bucket for estimation
-
-  // Refinement of 1 to N
-  void refineAStarLevel_N(
-        ChainingMesh* haloChain,        // Buckets of particles
-        int bi,                         // Bucket containing particle to refine
-        int bj,
-        int bk,
-        int minParticle,                // Particle to refine
-        POSVEL_T* estimate,             // Running minimum potential
-        int winDelta);                  // Number of buckets to refine out to
-
-  // Find the halo centers using most connected particle (N^2/2)
-  int  mostConnectedParticleN2();
-  int  mostConnectedParticleChainMesh();
-
-  // Build a chaining mesh of halo particles
-  ChainingMesh* buildChainingMesh(
-        POSVEL_T chainSize);
-
-private:
-  int    myProc;                // My processor number
-  int    numProc;               // Total number of processors
-
-  string outFile;               // File of particles written by this processor
-
-  POSVEL_T boxSize;             // Physical box size of the data set
-  POSVEL_T deadSize;            // Border size for dead particles
-  POSVEL_T bb;                  // Interparticle distance for halos
-  POSVEL_T distFactor;          // Scale positions by, used in chain size
-
-  long   particleCount;         // Total particles on this processor
-
-  POSVEL_T* xx;                 // X location for particles on this processor
-  POSVEL_T* yy;                 // Y location for particles on this processor
-  POSVEL_T* zz;                 // Z location for particles on this processor
-  POSVEL_T* mass;               // mass for particles on this processor
-  ID_T* tag;                    // Id tag for particles on this processor
-};
-
-#endif
diff --git a/ThirdParty/Cosmo/Message.cxx b/ThirdParty/Cosmo/Message.cxx
deleted file mode 100644
index b97ab0b..0000000
--- a/ThirdParty/Cosmo/Message.cxx
+++ /dev/null
@@ -1,242 +0,0 @@
-/*=========================================================================
-                                                                                
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC. 
-This software was produced under U.S. Government contract DE-AC52-06NA25396 
-for Los Alamos National Laboratory (LANL), which is operated by 
-Los Alamos National Security, LLC for the U.S. Department of Energy. 
-The U.S. Government has rights to use, reproduce, and distribute this software. 
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.  
-If software is modified to produce derivative works, such modified software 
-should be clearly marked, so as not to confuse it with the version available 
-from LANL.
- 
-Additionally, redistribution and use in source and binary forms, with or 
-without modification, are permitted provided that the following conditions 
-are met:
--   Redistributions of source code must retain the above copyright notice, 
-    this list of conditions and the following disclaimer. 
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution. 
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software 
-    without specific prior written permission. 
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR 
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-                                                                                
-=========================================================================*/
-
-#include "Message.h"
-#include "Partition.h"
-
-#ifdef USE_SERIAL_COSMO
-#include <string.h>
-#endif
-
-#include <iostream>
-
-using namespace std;
-
-////////////////////////////////////////////////////////////////////////////
-//
-// Create a Message for sending or receiving from MPI
-//
-////////////////////////////////////////////////////////////////////////////
-
-Message::Message(int size)
-{
-  this->bufSize = size;
-  this->buffer = new char[size];
-  this->bufPos = 0;
-}
-
-void Message::manualPackAtPosition(char* data, int pos, int count, size_t size)
-{
-  for(int i = 0; i < count; i = i + 1) {
-    for(size_t j = 0; j < size; j = j + 1) {
-      this->buffer[pos++] = data[i * size + j];
-    }
-  }
-}
-
-void Message::manualPack(char* data, int count, size_t size)
-{
-  for(int i = 0; i < count; i = i + 1) {
-    for(size_t j = 0; j < size; j = j + 1) {
-      this->buffer[this->bufPos++] = data[i * size + j];
-    }
-  }
-}
-
-void Message::manualUnpack(char* data, int count, size_t size)
-{
-  for(int i = 0; i < count; i = i + 1) {
-    for(size_t j = 0; j < size; j = j + 1) {
-      data[i * size + j] = this->buffer[this->bufPos++];
-    }
-  }
-}
-
-
-////////////////////////////////////////////////////////////////////////////
-//
-// Destructor for a message
-//
-////////////////////////////////////////////////////////////////////////////
-Message::~Message()
-{
-  delete [] this->buffer;
-}
-
-////////////////////////////////////////////////////////////////////////////
-//
-// Reset for another message of the same size
-//
-////////////////////////////////////////////////////////////////////////////
-void Message::reset()
-{
-  this->bufPos = 0;
-}
-
-
-////////////////////////////////////////////////////////////////////////////
-//
-// Place an integer at a specific location in the buffer
-// Used to set a counter of particles in the first position when it is
-// only known after all the particles are packed
-//
-////////////////////////////////////////////////////////////////////////////
-void Message::putValueAtPosition(int* data, int pos, int count)
-{
-  manualPackAtPosition((char*)data, pos, count, sizeof(int));
-}
-////////////////////////////////////////////////////////////////////////////
-//
-// Packing of the buffer
-//
-////////////////////////////////////////////////////////////////////////////
-void Message::putValue(int* data, int count)
-{
-  manualPack((char*)data, count, sizeof(int));
-}
-void Message::putValue(unsigned short* data, int count)
-{
-  manualPack((char*)data, count, sizeof(unsigned short));
-}
-void Message::putValue(long int* data, int count)
-{
-  manualPack((char*)data, count, sizeof(long int));
-}
-void Message::putValue(long long* data, int count)
-{
-  manualPack((char*)data, count, sizeof(long long));
-}
-void Message::putValue(float* data, int count)
-{
-  manualPack((char*)data, count, sizeof(float));
-}
-void Message::putValue(double* data, int count)
-{
-  manualPack((char*)data, count, sizeof(double));
-}
-void Message::putValue(char* data, int count)
-{
-  manualPack((char*)data, count, sizeof(char));
-}
-
-////////////////////////////////////////////////////////////////////////////
-//
-// Unpacking of the buffer
-//
-////////////////////////////////////////////////////////////////////////////
-void Message::getValue(int* data, int count)
-{
-  manualUnpack((char*)data, count, sizeof(int));
-}
-void Message::getValue(unsigned short* data, int count)
-{
-  manualUnpack((char*)data, count, sizeof(unsigned short));
-}
-void Message::getValue(long int* data, int count)
-{
-  manualUnpack((char*)data, count, sizeof(long int));
-}
-void Message::getValue(long long* data, int count)
-{
-  manualUnpack((char*)data, count, sizeof(long long));
-}
-void Message::getValue(float* data, int count)
-{
-  manualUnpack((char*)data, count, sizeof(float));
-}
-void Message::getValue(double* data, int count)
-{
-  manualUnpack((char*)data, count, sizeof(double));
-}
-void Message::getValue(char* data, int count)
-{
-  manualUnpack((char*)data, count, sizeof(char));
-}
-
-////////////////////////////////////////////////////////////////////////////
-//
-// Nonblocking send
-//
-////////////////////////////////////////////////////////////////////////////
-void Message::send
-#ifdef USE_SERIAL_COSMO
-  (int , int )
-#else
-  (int mach, int tag)
-#endif
-{
-#ifdef USE_SERIAL_COSMO
-  char* in = new char[this->bufPos];
-  memcpy(in, this->buffer, this->bufPos);
-  q.push(in);
-#else
-  MPI_Request request;
-  MPI_Isend(this->buffer, this->bufPos, MPI_PACKED, 
-            mach, tag, Partition::getComm(), &request);
-#endif
-}
-
-
-////////////////////////////////////////////////////////////////////////////
-//
-// Blocking receive
-//
-////////////////////////////////////////////////////////////////////////////
-void Message::receive
-#ifdef USE_SERIAL_COSMO
-(int, int)
-#else
-(int mach, int tag)
-#endif
-{
-#ifdef USE_SERIAL_COSMO
-  char* out = q.front(); q.pop();
-  memcpy(this->buffer, out, this->bufSize);
-  delete [] out;
-#else
-  MPI_Status status;
-  MPI_Recv(this->buffer, this->bufSize, MPI_PACKED, mach, tag,
-           Partition::getComm(), &status);
-#endif
-}
diff --git a/ThirdParty/Cosmo/Message.h b/ThirdParty/Cosmo/Message.h
deleted file mode 100644
index a7d3266..0000000
--- a/ThirdParty/Cosmo/Message.h
+++ /dev/null
@@ -1,126 +0,0 @@
-/*=========================================================================
-                                                                                
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC. 
-This software was produced under U.S. Government contract DE-AC52-06NA25396 
-for Los Alamos National Laboratory (LANL), which is operated by 
-Los Alamos National Security, LLC for the U.S. Department of Energy. 
-The U.S. Government has rights to use, reproduce, and distribute this software. 
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.  
-If software is modified to produce derivative works, such modified software 
-should be clearly marked, so as not to confuse it with the version available 
-from LANL.
- 
-Additionally, redistribution and use in source and binary forms, with or 
-without modification, are permitted provided that the following conditions 
-are met:
--   Redistributions of source code must retain the above copyright notice, 
-    this list of conditions and the following disclaimer. 
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution. 
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software 
-    without specific prior written permission. 
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR 
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-                                                                                
-=========================================================================*/
-
-// .NAME Message - create, send and receive MPI messages
-//
-// .SECTION Description
-// Message class packs and unpacks data into an MPI buffer
-
-#ifndef MESSAGE_H
-#define MESSAGE_H
-
-#ifdef USE_VTK_COSMO
-#include "CosmoDefinition.h"
-#include <queue>
-
-using namespace std;
-#else
-#include "Definition.h"
-#include <queue>
-
-using namespace std;
-#endif
-
-
-class Message {
-public:
-  Message(int size = BUF_SZ);
-
-   ~Message();
-
-  // Put values into the MPI buffer
-  void putValueAtPosition(int* data, int pos, int count = 1);
-  void putValue(int* data, int count = 1);
-  void putValue(unsigned short* data, int count = 1);
-  void putValue(long int* data, int count = 1);
-  void putValue(long long* data, int count = 1);
-  void putValue(float* data, int count = 1);
-  void putValue(double* data, int count = 1);
-  void putValue(char* data, int count = 1);
-
-  // Get values from the MPI buffer
-  void getValue(int* data, int count = 1);
-  void getValue(unsigned short* data, int count = 1);
-  void getValue(long int* data, int count = 1);
-  void getValue(long long* data, int count = 1);
-  void getValue(float* data, int count = 1);
-  void getValue(double* data, int count = 1);
-  void getValue(char* data, int count = 1);
-
-  int getBufPos() { return this->bufPos; }
-
-  void manualPackAtPosition(char* data, int pos, int count, size_t size);
-  void manualPack(char* data, int count, size_t size);
-  void manualUnpack(char* data, int count, size_t size);
-
-  // Send nonblocking
-  void send(
-        int mach,                       // Where to send message
-        int tag = 0                     // Identifying tag
-  );
-
-  // Receive blocking
-  void receive(
-#ifdef USE_SERIAL_COSMO
-        int mach = 0,
-#else
-        int mach = MPI_ANY_SOURCE,      // From where to receive
-#endif
-        int tag = 0                     // Identifying tag
-  );
-
-#ifdef USE_SERIAL_COSMO // message queue hack for serial
-  queue<char*> q;
-#endif
-
-  // Reset the buffer for another set of data
-  void reset();
-
-private:
-  char* buffer;         // Buffer to pack
-  int   bufSize;        // Size of buffer
-  int   bufPos;         // Position in buffer
-};
-
-#endif
diff --git a/ThirdParty/Cosmo/ParticleDistribute.cxx b/ThirdParty/Cosmo/ParticleDistribute.cxx
deleted file mode 100644
index a237efa..0000000
--- a/ThirdParty/Cosmo/ParticleDistribute.cxx
+++ /dev/null
@@ -1,1460 +0,0 @@
-/*=========================================================================
-
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC.
-This software was produced under U.S. Government contract DE-AC52-06NA25396
-for Los Alamos National Laboratory (LANL), which is operated by
-Los Alamos National Security, LLC for the U.S. Department of Energy.
-The U.S. Government has rights to use, reproduce, and distribute this software.
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.
-If software is modified to produce derivative works, such modified software
-should be clearly marked, so as not to confuse it with the version available
-from LANL.
-
-Additionally, redistribution and use in source and binary forms, with or
-without modification, are permitted provided that the following conditions
-are met:
--   Redistributions of source code must retain the above copyright notice,
-    this list of conditions and the following disclaimer.
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution.
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software
-    without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-=========================================================================*/
-
-#ifdef USE_VTK_COSMO
-#include "CosmoDefinition.h"
-#include "vtkStdString.h"
-#include "vtkSetGet.h"
-#endif
-
-#include "Partition.h"
-#include "ParticleDistribute.h"
-
-#include <iostream>
-#include <fstream>
-#include <sstream>
-#include <iomanip>
-
-#include <sys/types.h>
-
-#ifdef _WIN32
-#include "winDirent.h"
-#else
-#include <dirent.h>
-#endif
-
-using namespace std;
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Particle data space is partitioned for the number of processors
-// which currently is a factor of two but is easily extended.  Particles
-// are read in from files where each processor reads one file into a buffer,
-// extracts the particles which really belong on the processor (ALIVE) and
-// those in a buffer region around the edge (DEAD).  The buffer is then
-// passed round robin to every other processor so that all particles are
-// examined by all processors.  All dead particles are tagged with the
-// neighbor zone (26 neighbors in 3D) so that later halos can be associated
-// with zones.
-//
-/////////////////////////////////////////////////////////////////////////
-
-ParticleDistribute::ParticleDistribute()
-{
-  // Get the number of processors running this problem and rank
-  this->numProc = Partition::getNumProc();
-  this->myProc = Partition::getMyProc();
-
-  // Get the number of processors in each dimension
-  Partition::getDecompSize(this->layoutSize);
-
-  // Get my position within the Cartesian topology
-  Partition::getMyPosition(this->layoutPos);
-
-  // Get neighbors of this processor including the wraparound
-  Partition::getNeighbors(this->neighbor);
-
-  this->numberOfAliveParticles = 0;
-  this->massConvertFactor = 1.0;
-  this->distConvertFactor = 1.0;
-}
-
-ParticleDistribute::~ParticleDistribute()
-{
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Set parameters for particle distribution
-//
-/////////////////////////////////////////////////////////////////////////
-
-void ParticleDistribute::setParameters(
-                        const string& baseName,
-                        POSVEL_T rL,
-                        string dataType)
-{
-  // Base file name which will have processor id appended for actual files
-  this->baseFile = baseName;
-
-  // Physical total space and amount of physical space to use for dead particles
-  this->boxSize = rL;
-
-  // RECORD format is the binary .cosmo of one particle with all information
-  if (dataType == "RECORD")
-    this->inputType = RECORD;
-
-  // BLOCK format is Gadget format with a header and x,y,z locations for
-  // all particles, then x,y,z velocities for all particles, and all tags
-  else if (dataType == "BLOCK")
-    this->inputType = BLOCK;
-
-#ifndef USE_VTK_COSMO
-  if (this->myProc == MASTER) {
-    cout << endl << "------------------------------------" << endl;
-    cout << "boxSize:  " << this->boxSize << endl;
-  }
-#endif
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Set parameters for particle unit conversion
-//
-/////////////////////////////////////////////////////////////////////////
-
-void ParticleDistribute::setConvertParameters(
-                        POSVEL_T massFactor,
-                        POSVEL_T distFactor)
-{
-  this->massConvertFactor = massFactor;
-  this->distConvertFactor = distFactor;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Set box sizes for determining if a particle is in the alive or dead
-// region of this processor.  Data space is a DIMENSION torus.
-//
-/////////////////////////////////////////////////////////////////////////
-
-void ParticleDistribute::initialize()
-{
-#ifndef USE_VTK_COSMO
-#ifdef DEBUG
-  if (this->myProc == MASTER)
-    cout << "Decomposition: [" << this->layoutSize[0] << ":"
-         << this->layoutSize[1] << ":" << this->layoutSize[2] << "]" << endl;
-#endif
-#endif
-
-  // Set subextents on particle locations for this processor
-  POSVEL_T boxStep[DIMENSION];
-  for (int dim = 0; dim < DIMENSION; dim++) {
-    boxStep[dim] = this->boxSize / this->layoutSize[dim];
-
-    // Alive particles
-    this->minAlive[dim] = this->layoutPos[dim] * boxStep[dim];
-    this->maxAlive[dim] = this->minAlive[dim] + boxStep[dim];
-    if (this->maxAlive[dim] > this->boxSize)
-      this->maxAlive[dim] = this->boxSize;
-  }
-}
-
-
-void ParticleDistribute::setParticles(vector<POSVEL_T>* xLoc,
-                                      vector<POSVEL_T>* yLoc,
-                                      vector<POSVEL_T>* zLoc,
-                                      vector<POSVEL_T>* xVel,
-                                      vector<POSVEL_T>* yVel,
-                                      vector<POSVEL_T>* zVel,
-                                      vector<POSVEL_T>* mass,
-                                      vector<ID_T>* id)
-{
-  this->xx = xLoc;
-  this->yy = yLoc;
-  this->zz = zLoc;
-  this->vx = xVel;
-  this->vy = yVel;
-  this->vz = zVel;
-  this->ms = mass;
-  this->tag = id;
-}
-
-
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Each processor reads 0 or more files, a buffer at a time, and shares
-// the particles by passing the buffer round robin to every other processor
-//
-/////////////////////////////////////////////////////////////////////////
-
-void ParticleDistribute::readParticlesRoundRobin(int reserveQ)
-{
-  // Find how many input files there are and deal them between the processors
-  // Calculates the max number of files per processor and max number of
-  // particles per file so that buffering can be done
-  // For round robin sharing determine where to send and receive buffers from
-  partitionInputFiles();
-
-  // Compute the total number of particles in the problem
-  // Compute the maximum number of particles in any one file to set buffer size
-  findFileParticleCount();
-
-  // If there is only one input file we don't have to do MPI messaging
-  // because each processor will read that same file and extract only
-  // the particles in range
-  if (this->numberOfFiles == 1) {
-    if (this->inputType == RECORD) {
-      readFromRecordFile();
-    } else {
-      readFromBlockFile();
-    }
-  } else {
-
-  // MPI buffer size might limit the number of particles read from a file
-  // and passed round robin
-  // Largest file will have a number of buffer chunks to send if it is too large
-  // Every processor must send that number of chunks even if its own file
-  // does not have that much information
-
-  if (ENFORCE_MAX_READ == true && this->maxParticles > MAX_READ) {
-    this->maxRead = MAX_READ;
-    this->maxReadsPerFile = (this->maxParticles / this->maxRead) + 1;
-  } else {
-    this->maxRead = this->maxParticles;
-    this->maxReadsPerFile = 1;
-  }
-
-  // Allocate space to hold buffer information for reading of files
-  // Mass is constant use that float to store the tag
-  // Number of particles is the first integer in the buffer
-  int bufferSize = sizeof(int) + (this->maxRead * RECORD_SIZE);
-  Message* message1 = new Message(bufferSize);
-  Message* message2 = new Message(bufferSize);
-
-  // Allocate space for the data read from the file
-  POSVEL_T *fBlock = 0;
-  POSVEL_T *lBlock = 0;
-  POSVEL_T *vBlock = 0;
-  ID_T* iBlock = 0;
-
-  // RECORD format reads one particle at a time
-  if (this->inputType == RECORD) {
-    fBlock = new POSVEL_T[COSMO_FLOAT];
-    iBlock = new ID_T[COSMO_INT];
-  }
-
-  // BLOCK format reads all particles at one time for triples
-  else if (this->inputType == BLOCK) {
-    lBlock = new POSVEL_T[this->maxRead * DIMENSION];
-    vBlock = new POSVEL_T[this->maxRead * DIMENSION];
-    iBlock = new ID_T[this->maxRead];
-  }
-
-  // Reserve particle storage to minimize reallocation
-  int reserveSize = (int) (this->maxFiles * this->maxParticles * DEAD_FACTOR);
-
-  // If multiple processors are reading the same file we can reduce size
-  reserveSize /= this->processorsPerFile;
-
-  if(reserveQ) {
-#ifndef USE_VTK_COSMO
-    cout << "readParticlesRoundRobin reserving vectors" << endl;
-#endif
-    this->xx->reserve(reserveSize);
-    this->yy->reserve(reserveSize);
-    this->zz->reserve(reserveSize);
-    this->vx->reserve(reserveSize);
-    this->vy->reserve(reserveSize);
-    this->vz->reserve(reserveSize);
-    this->ms->reserve(reserveSize);
-    this->tag->reserve(reserveSize);
-  }
-
-  // Running total and index into particle data on this processor
-  this->particleCount = 0;
-
-  // Using the input files assigned to this processor, read the input
-  // and push round robin to every other processor
-  // this->maxFiles is the maximum number to read on any processor
-  // Some processors may have no files to read but must still participate
-  // in the round robin distribution
-
-  for (int file = 0; file < this->maxFiles; file++) {
-
-    // Open file to read the data if any for this processor
-    ifstream* inStream = 0;
-    int firstParticle = 0;
-    int numberOfParticles = 0;
-    int remainingParticles = 0;
-
-    if ((int)this->inFiles.size() > file) {
-      inStream = new ifstream(this->inFiles[file].c_str(), ios::in|ios::binary);
-
-#ifndef USE_VTK_COSMO
-      cout << "Rank " << this->myProc << " open file " << inFiles[file]
-           << " with " << this->fileParticles[file] << " particles" << endl;
-#endif
-
-      // Number of particles read at one time depends on MPI buffer size
-      numberOfParticles = this->fileParticles[file];
-      if (numberOfParticles > this->maxRead)
-        numberOfParticles = this->maxRead;
-
-      // If a file is too large to be passed as an MPI message divide it up
-      remainingParticles = this->fileParticles[file];
-
-    } else {
-#ifndef USE_VTK_COSMO
-      cout << "Rank " << this->myProc << " no file to open " << endl;
-#endif
-    }
-
-    for (int piece = 0; piece < this->maxReadsPerFile; piece++) {
-
-      // Reset each MPI message for each file read
-      message1->reset();
-      message2->reset();
-
-      // Processor has a file to read and share via round robin with others
-      if (file < (int)this->inFiles.size()) {
-        if (this->inputType == RECORD) {
-          readFromRecordFile(inStream, firstParticle, numberOfParticles,
-                             fBlock, iBlock, message1);
-        } else {
-          readFromBlockFile(inStream, firstParticle, numberOfParticles,
-                           this->fileParticles[file],
-                           lBlock, vBlock, iBlock, message1);
-        }
-        firstParticle += numberOfParticles;
-        remainingParticles -= numberOfParticles;
-        if (remainingParticles <= 0)
-          numberOfParticles = 0;
-        else if (remainingParticles < numberOfParticles)
-          numberOfParticles = remainingParticles;
-      }
-
-      // Processor does not have a file to open but must participate in the
-      // round robin with an empty buffer
-      else {
-        // Store number of particles used in first position
-        int zero = 0;
-        message1->putValue(&zero);
-      }
-
-      // Particles belonging to this processor are put in vectors
-      distributeParticles(message1, message2);
-    }
-
-    // Can delete the read buffers as soon as last file is read because
-    // information has been transferred into the double buffer1
-    if (file == (this->maxFiles - 1)) {
-      if (this->inputType == RECORD) {
-        delete [] fBlock;
-        delete [] iBlock;
-      } else if (this->inputType == BLOCK) {
-        delete [] lBlock;
-        delete [] vBlock;
-        delete [] iBlock;
-      }
-    }
-
-    if ((int)this->inFiles.size() > file)
-      inStream->close();
-  }
-
-  // After all particles have been distributed to vectors the double
-  // buffers can be deleted
-  delete message1;
-  delete message2;
-
-  // Count the particles across processors
-  long totalAliveParticles = 0;
-#ifdef USE_SERIAL_COSMO
-  totalAliveParticles = this->numberOfAliveParticles;
-#else
-  MPI_Allreduce((void*) &this->numberOfAliveParticles,
-                (void*) &totalAliveParticles,
-                1, MPI_LONG, MPI_SUM, Partition::getComm());
-#endif
-
-#ifndef USE_VTK_COSMO
-#ifdef DEBUG
-  cout << "Rank " << setw(3) << this->myProc
-       << " #alive = " << this->numberOfAliveParticles << endl;
-#endif
-
-  if (this->myProc == MASTER) {
-    cout << "TotalAliveParticles " << totalAliveParticles << endl;
-  }
-#endif
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Using the base name of the data, go to the subdirectory and determine
-// how many input files there are.  Parcel those files between all the
-// processors which will be responsible for actually reading 0 or more.
-//
-/////////////////////////////////////////////////////////////////////////
-
-void ParticleDistribute::partitionInputFiles()
-{
-  // Find number of input files for this problem given the base input name
-  // Get the subdirectory name containing the input files
-  string::size_type dirPos = this->baseFile.rfind("/");
-  string subdirectory;
-  string baseName;
-
-  // If the directory is not given use the current directory
-  if (dirPos == string::npos) {
-    subdirectory = "./";
-    baseName = this->baseFile;
-  } else {
-    subdirectory = this->baseFile.substr(0, dirPos + 1);
-    baseName = this->baseFile.substr(dirPos + 1);
-  }
-
-  // strip everything back to the first non-number
-  string::size_type pos = baseName.size() - 1;
-  int numbersOK = 1;
-
-  while(numbersOK)
-    {
-    if(baseName[pos] >= '0' && baseName[pos] <= '9')
-      {
-      if(pos > 0)
-        {
-        pos = pos - 1;
-        }
-      else
-        {
-        break;
-        }
-      }
-    else
-      {
-      numbersOK = 0;
-      }
-    }
-
-  // base name is everything up to the numbers
-  baseName = baseName.substr(0, pos + 1);
-
-  // Open the subdirectory and make a list of input files
-  DIR* directory = opendir(subdirectory.c_str());
-  struct dirent* directoryEntry;
-  vector<string> files;
-
-  if (directory != NULL) {
-  while ((directoryEntry = readdir(directory)))
-    {
-    // get the name
-    string fileName = directoryEntry->d_name;
-    pos = fileName.find(baseName.c_str());
-
-    // if it starts with the base name
-    if(pos == 0)
-      {
-      // check to see if it is all numbers on the end
-      pos = baseName.size() + 1;
-      numbersOK = 1;
-
-      while(pos < fileName.size())
-        {
-        if(fileName[pos] < '0' || fileName[pos] > '9')
-          {
-          numbersOK = 0;
-          break;
-          }
-
-        pos = pos + 1;
-        }
-
-      if(numbersOK)
-        {
-        fileName = subdirectory + fileName;
-        files.push_back(fileName);
-        }
-      }
-    }
-
-  closedir(directory);
-  }
-
-  this->numberOfFiles = (int)files.size();
-
-  if (this->numberOfFiles == 0) {
-#ifdef USE_VTK_COSMO
-    vtkStdString temp = "Processor ";
-    temp += this->myProc;
-    temp += " found no input files.\n";
-    vtkOutputWindowDisplayErrorText(temp.c_str());
-
-    return;
-#else
-    cout << "Rank " << this->myProc << " found no input files" << endl;
-    exit(1);
-#endif
-  }
-
-#ifndef USE_VTK_COSMO
-#ifdef DEBUG
-  if (this->myProc == MASTER) {
-    for (int i = 0; i < this->numberOfFiles; i++)
-      cout << "   File " << i << ": " << files[i] << endl;
-  }
-#endif
-#endif
-
-  // Divide the files between all the processors
-  // If there are 1 or more files per processor set the
-  // buffering up with a full round robin between all processors
-  if (this->numberOfFiles >= this->numProc) {
-
-    // Number of round robin sends to share all the files
-    this->processorsPerFile = 1;
-    this->numberOfFileSends = this->numProc - 1;
-    this->maxFileSends = this->numberOfFileSends;
-
-    // Which files does this processor read
-    for (int i = 0; i < this->numberOfFiles; i++)
-      if ((i % this->numProc) == this->myProc)
-        this->inFiles.push_back(files[i]);
-
-    // Where is the file sent, and where is it received
-    if (this->myProc == this->numProc - 1)
-      this->nextProc = 0;
-    else
-      this->nextProc = this->myProc + 1;
-    if (this->myProc == 0)
-      this->prevProc = this->numProc - 1;
-    else
-      this->prevProc = this->myProc - 1;
-  }
-
-  // If there are more processors than file set up as many round robin loops
-  // as possible so that multiple processors read the same file. If the number
-  // of files does not divide evenly into the number of processors the last
-  // round robin loop will be bigger and some processors will contribute
-  // buffers of 0 size to send
-
-  else {
-
-    // Assign the round robin circle (last circle is bigger than others)
-    this->processorsPerFile = this->numProc / this->numberOfFiles;
-    int numberOfRoundRobinCircles = this->processorsPerFile;
-    int myCircle = this->myProc / this->numberOfFiles;
-    int extraProcessors = this->numProc -
-            (numberOfRoundRobinCircles * this->numberOfFiles);
-    if (myCircle == numberOfRoundRobinCircles)
-      myCircle--;
-
-    int firstInCircle = myCircle * this->numberOfFiles;
-    int lastInCircle = firstInCircle + this->numberOfFiles - 1;
-    if (myCircle == (numberOfRoundRobinCircles - 1))
-      lastInCircle += extraProcessors;
-
-    // How big is the round robin circle this processor is in
-    // What is the biggest round robin circle (needed because of MPI_Barrier)
-    this->numberOfFileSends = lastInCircle - firstInCircle;
-    this->maxFileSends = this->numberOfFiles + extraProcessors;
-
-    // Which file does this processor read
-    int index = this->myProc % this->numberOfFiles;
-    if (myCircle == (this->myProc / this->numberOfFiles))
-      this->inFiles.push_back(files[index]);
-
-    // Where is the file sent, and where is it received
-    if (this->myProc == lastInCircle)
-      this->nextProc = firstInCircle;
-    else
-      this->nextProc = this->myProc + 1;
-    if (this->myProc == firstInCircle)
-      this->prevProc = lastInCircle;
-    else
-      this->prevProc = this->myProc - 1;
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Open each input file belonging to this processor and find the number
-// of particles for setting buffer sizes
-//
-/////////////////////////////////////////////////////////////////////////
-
-void ParticleDistribute::findFileParticleCount()
-{
-  // Compute the total number of particles in the problem
-  // Compute the maximum number of particles in any one file to set buffer size
-  long numberOfParticles = 0;
-  long maxNumberOfParticles = 0;
-  int numberOfMyFiles = (int)this->inFiles.size();
-
-  // Each processor counts the particles in its own files
-  for (int i = 0; i < numberOfMyFiles; i++) {
-
-    // Open my file
-    ifstream *inStream = new ifstream(this->inFiles[i].c_str(), ios::in);
-    if (inStream->fail()) {
-      delete inStream;
-#ifdef USE_VTK_COSMO
-      vtkStdString message = "File ";
-      message += this->inFiles[i];
-      message += " cannot be opened.\n";
-      vtkOutputWindowDisplayErrorText(message.c_str());
-
-      this->totalParticles = 0;
-      this->maxParticles = 0;
-      return;
-#else
-      cout << "File: " << this->inFiles[i] << " cannot be opened" << endl;
-      exit (-1);
-#endif
-    }
-
-    if (this->inputType == RECORD) {
-
-      // Compute the number of particles from file size
-      inStream->seekg(0L, ios::end);
-      int numberOfRecords = inStream->tellg() / RECORD_SIZE;
-      this->fileParticles.push_back(numberOfRecords);
-
-      numberOfParticles += numberOfRecords;
-      if (maxNumberOfParticles < numberOfRecords)
-        maxNumberOfParticles = numberOfRecords;
-    }
-
-    else if (this->inputType == BLOCK) {
-
-      // Find the number of particles in the header
-      readGadgetHeader(inStream);
-
-      int numberOfRecords = this->gadgetParticleCount;
-      this->fileParticles.push_back(numberOfRecords);
-
-      numberOfParticles += numberOfRecords;
-      if (maxNumberOfParticles < numberOfRecords)
-        maxNumberOfParticles = numberOfRecords;
-    }
-
-    inStream->close();
-    delete inStream;
-  }
-
-  // If multiple processors read the same file, just do the reduce on one set
-  if (this->processorsPerFile > 1) {
-    if (this->myProc >= this->numberOfFiles) {
-      numberOfParticles = 0;
-      maxNumberOfParticles = 0;
-    }
-  }
-
-  // Share the information about total particles
-#ifdef USE_SERIAL_COSMO
-  this->totalParticles = numberOfParticles;
-#else
-  MPI_Allreduce((void*) &numberOfParticles,
-                (void*) &this->totalParticles,
-                1, MPI_LONG, MPI_SUM, Partition::getComm());
-#endif
-
-  // Share the information about max particles in a file for setting buffer size
-#ifdef USE_SERIAL_COSMO
-  this->maxParticles = maxNumberOfParticles;
-#else
-  MPI_Allreduce((void*) &maxNumberOfParticles,
-                (void*) &this->maxParticles,
-                1, MPI_LONG, MPI_MAX, Partition::getComm());
-#endif
-
-  // Share the maximum number of files on a processor for setting the loop
-#ifdef USE_SERIAL_COSMO
-  this->maxFiles = numberOfMyFiles;
-#else
-  MPI_Allreduce((void*) &numberOfMyFiles,
-                (void*) &this->maxFiles,
-                1, MPI_INT, MPI_MAX, Partition::getComm());
-#endif
-
-#ifndef USE_VTK_COSMO
-#ifdef DEBUG
-  if (this->myProc == MASTER) {
-    cout << "Total particle count: " << this->totalParticles << endl;
-    cout << "Max particle count:   " << this->maxParticles << endl;
-  }
-#endif
-#endif
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Each processor reads 0 or more files, a buffer at a time.
-// The particles are processed by seeing if they are in the subextent of
-// this processor and are tagged either ALIVE or if dead, by the index of
-// the neighbor zone which contains that particle.  That buffer is sent
-// round robin to (myProc + 1) % numProc where it is processed and sent on.
-// After each processor reads one buffer and sends and receives numProc - 1
-// times the next buffer from the file is read.  Must use a double buffering
-// scheme so that on each send/recv we switch buffers.
-//
-// Input files may be BLOCK or RECORD structured
-//
-/////////////////////////////////////////////////////////////////////////
-
-void ParticleDistribute::distributeParticles(
-                Message* message1,      // Send/receive buffers
-                Message* message2)      // Send/receive buffers
-{
-  // Each processor has filled a buffer with particles read from a file
-  // or had no particles to read but set the count in the buffer to 0
-  // Process the buffer to keep only those within range
-  Message* recvMessage = message1;
-  Message* sendMessage = message2;
-
-  // Process the original send buffer of particles from the file
-  collectLocalParticles(recvMessage, sendMessage);
-
-  // Distribute buffer round robin so that all processors see it
-  for (int step = 0; step < this->maxFileSends; step++) {
-
-    if (step < this->numberOfFileSends)
-      {
-      // Send buffer to the next processor if round robin loop is still active
-      sendMessage->send(this->nextProc);
-
-      // Receive buffer from the previous processor
-      recvMessage->receive(this->prevProc);
-      }
-
-#ifndef USE_SERIAL_COSMO
-    MPI_Barrier(Partition::getComm());
-#endif
-
-    // Process the send buffer for alive and dead before sending on
-    // the particles that were not claimed by this processor
-    if (step < this->numberOfFileSends)
-      collectLocalParticles(recvMessage, sendMessage);
-
-#ifndef USE_SERIAL_COSMO
-    MPI_Barrier(Partition::getComm());
-#endif
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////////
-//
-// Input file is RECORD structured so read each particle record and populate
-// the double buffer in particle order for the rest of the processing
-//
-/////////////////////////////////////////////////////////////////////////////
-
-void ParticleDistribute::readFromRecordFile(
-                        ifstream* inStream,     // Stream to read from
-                        int firstParticle,      // First particle index
-                        int numberOfParticles,  // Number to read this time
-                        POSVEL_T* fBlock,       // Buffer for read in data
-                        ID_T* iBlock,           // Buffer for read in data
-                        Message* message)       // Reordered data
-{
-  // Store number of particles used in first position
-  message->putValue(&numberOfParticles);
-  if (numberOfParticles == 0)
-    return;
-
-  // Seek to the first particle locations and read
-  int skip = RECORD_SIZE * firstParticle;
-  inStream->seekg(skip, ios::beg);
-
-  // Store each particle location, velocity, mass and tag (as float) in buffer
-  int changeCount = 0;
-  for (int p = 0; p < numberOfParticles; p++) {
-
-    // Set file pointer to the requested particle
-    inStream->read(reinterpret_cast<char*>(fBlock),
-                   COSMO_FLOAT * sizeof(POSVEL_T));
-
-    if (inStream->gcount() != COSMO_FLOAT * sizeof(POSVEL_T)) {
-#ifdef USE_VTK_COSMO
-      vtkOutputWindowDisplayErrorText("Premature end-of-file.\n");
-      return;
-#else
-      cout << "Premature end-of-file" << endl;
-      exit (-1);
-#endif
-    }
-
-    // Convert units if requested
-    fBlock[0] *= this->distConvertFactor;
-    fBlock[2] *= this->distConvertFactor;
-    fBlock[4] *= this->distConvertFactor;
-    fBlock[6] *= this->massConvertFactor;
-
-    inStream->read(reinterpret_cast<char*>(iBlock),
-                   COSMO_INT * sizeof(ID_T));
-
-    if (inStream->gcount() != COSMO_INT * sizeof(ID_T)) {
-#ifdef USE_VTK_COSMO
-      vtkOutputWindowDisplayErrorText("Premature end-of-file.\n");
-      return;
-#else
-      cout << "Premature end-of-file" << endl;
-      exit (-1);
-#endif
-    }
-
-    // If the location is not within the bounding box wrap around
-    for (int i = 0; i <= 4; i = i + 2) {
-      if (fBlock[i] >= this->boxSize) {
-#ifndef USE_VTK_COSMO
-#ifdef DEBUG
-        cout << "Location at " << i << " changed from " << fBlock[i] << endl;
-#endif
-#endif
-        fBlock[i] -= this->boxSize;
-        changeCount++;
-      }
-    }
-
-    // Store location and velocity and mass in message buffer
-    // Reorder so that location vector is followed by velocity vector
-    message->putValue(&fBlock[0]);
-    message->putValue(&fBlock[2]);
-    message->putValue(&fBlock[4]);
-    message->putValue(&fBlock[1]);
-    message->putValue(&fBlock[3]);
-    message->putValue(&fBlock[5]);
-    message->putValue(&fBlock[6]);
-
-    // Store the integer tag
-    message->putValue(&iBlock[0]);
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////////
-//
-// Input file is BLOCK structured so read head and each block of data.
-// Gadget format:
-//    SKIP_GADGET_2 has extra 16 bytes
-//    SKIP_H 4 bytes (size of header)
-//    Header (6 types of particles with counts and masses)
-//    SKIP_H 4 bytes (size of header)
-//
-//    SKIP_GADGET_2 has extra 16 bytes
-//    SKIP_L 4 bytes (size of location block in bytes)
-//    Block of location data where each particle's x,y,z is stored together
-//    SKIP_L 4 bytes (size of location block in bytes)
-//
-//    SKIP_GADGET_2 has extra 16 bytes
-//    SKIP_V 4 bytes (size of velocity block in bytes)
-//    Block of velocity data where each particle's xv,yv,zv is stored together
-//    SKIP_V 4 bytes (size of velocity block in bytes)
-//
-//    SKIP_GADGET_2 has extra 16 bytes
-//    SKIP_T 4 bytes (size of tag block in bytes)
-//    Block of tag data
-//    SKIP_T 4 bytes (size of tag block in bytes)
-//
-// Reorder the data after it is read into the same structure as the
-// RECORD data so that the rest of the code does not have to be changed
-//
-/////////////////////////////////////////////////////////////////////////////
-
-void ParticleDistribute::readFromBlockFile(
-                        ifstream* inStream,     // Stream to read from
-                        int firstParticle,      // First particle index
-                        int numberOfParticles,  // Number to read this time
-                        int totParticles,       // Total particles in file
-                        POSVEL_T* lBlock,       // Buffer for read of location
-                        POSVEL_T* vBlock,       // Buffer for read of velocity
-                        ID_T* iBlock,           // Buffer for read in data
-                        Message* message)       // Reordered data
-{
-  // Store number of particles used in first position
-  message->putValue(&numberOfParticles);
-  if (numberOfParticles == 0)
-    return;
-
-  // Calculate skips to first location, velocity and tag
-  int skipToLocation = 0;
-  if (this->gadgetFormat == GADGET_2)
-    skipToLocation += GADGET_2_SKIP;
-  skipToLocation += GADGET_SKIP;		// Size of header
-  skipToLocation += GADGET_HEADER_SIZE;		// Header
-  skipToLocation += GADGET_SKIP;		// Size of header
-  if (this->gadgetFormat == GADGET_2)
-    skipToLocation += GADGET_2_SKIP;
-  skipToLocation += GADGET_SKIP;		// Size of location block
-
-  int skipToVelocity = skipToLocation;
-  skipToVelocity += DIMENSION * sizeof(POSVEL_T) * totParticles;
-  skipToVelocity += GADGET_SKIP;		// Size of location block
-  if (this->gadgetFormat == GADGET_2)
-    skipToLocation += GADGET_2_SKIP;
-  skipToVelocity += GADGET_SKIP;		// Size of velocity block
-
-  int skipToTag = skipToVelocity;
-  skipToTag += DIMENSION * sizeof(POSVEL_T) * totParticles;
-  skipToTag += GADGET_SKIP;			// Size of velocity block
-  if (this->gadgetFormat == GADGET_2)
-    skipToLocation += GADGET_2_SKIP;
-  skipToTag += GADGET_SKIP;			// Size of tag block
-
-  // Seek to the first requested particle location and read triples
-  inStream->seekg(skipToLocation, ios::beg);
-  int skip = (DIMENSION * sizeof(POSVEL_T) * firstParticle);
-  inStream->seekg(skip, ios::cur);
-
-  readData(this->gadgetSwap, (void*) lBlock, sizeof(POSVEL_T),
-                 DIMENSION * numberOfParticles, inStream);
-
-  // Convert units of distance
-  for (int i = 0; i < DIMENSION*numberOfParticles; i++)
-    lBlock[i] *= this->distConvertFactor;
-
-  // If the location is not within the bounding box wrap around
-  for (int i = 0; i < DIMENSION*numberOfParticles; i++) {
-    if (lBlock[i] >= this->boxSize)
-      lBlock[i] -= this->boxSize;
-  }
-
-  // Seek to first requested particle velocity and read triples
-  inStream->seekg(skipToVelocity, ios::beg);
-  skip = (DIMENSION * sizeof(POSVEL_T) * firstParticle); // skip to velocity
-  inStream->seekg(skip, ios::cur);
-
-  readData(this->gadgetSwap, (void*) vBlock, sizeof(POSVEL_T),
-                 DIMENSION * numberOfParticles, inStream);
-
-  // Seek to first requested particle tag and read
-  inStream->seekg(skipToTag, ios::beg);
-  skip = sizeof(ID_T) * firstParticle;             // skip to tag
-  inStream->seekg(skip, ios::cur);
-
-  readData(this->gadgetSwap, (void*) iBlock, sizeof(ID_T),
-                 numberOfParticles, inStream);
-
-  // Store the locations in the message buffer in record order
-  // so that the same distribution method for RECORD will work
-  int indx = 0;
-  int tagindx = 0;
-  for (int type = 0; type < NUM_GADGET_TYPES; type++) {
-
-    POSVEL_T particleMass =
-      (POSVEL_T) this->gadgetHeader.mass[type] * this->massConvertFactor;
-
-    for (int p = 0; p < this->gadgetHeader.npart[type]; p++) {
-
-      // Locations
-      message->putValue(&lBlock[indx]);           // X location
-      message->putValue(&lBlock[indx+1]);         // Y location
-      message->putValue(&lBlock[indx+2]);         // Z location
-
-      // Velocities
-      message->putValue(&vBlock[indx]);           // X velocity
-      message->putValue(&vBlock[indx+1]);         // Y velocity
-      message->putValue(&vBlock[indx+2]);         // Z velocity
-
-      // Mass
-      message->putValue(&particleMass);
-
-      // Id tag
-      message->putValue(&iBlock[p]);
-      indx += DIMENSION;
-      tagindx++;
-    }
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////////
-//
-// Process the data buffer of particles to choose those which are ALIVE
-// or DEAD on this processor.  Do wraparound tests to populate as for a
-// 3D torus.  Dead particle status is the zone id of the neighbor processor
-// which contains it as an ALIVE particle.
-//
-/////////////////////////////////////////////////////////////////////////////
-
-void ParticleDistribute::collectLocalParticles(
-                Message* recvMessage,      // Read particles and extract
-                Message* sendMessage)      // Other particles copied here
-{
-  // In order to read a buffer, reset position to the beginning
-  recvMessage->reset();
-  sendMessage->reset();
-
-  int recvParticles;
-  int sendParticles = 0;
-  recvMessage->getValue(&recvParticles);
-  sendMessage->putValue(&sendParticles);
-
-  POSVEL_T loc[DIMENSION], vel[DIMENSION], mass;
-  ID_T id;
-
-  // Test each particle in the buffer to see if it is ALIVE or DEAD
-  // If it is DEAD assign it to the neighbor zone that it is in
-  // Check all combinations of wraparound
-
-  for (int i = 0; i < recvParticles; i++) {
-    for (int dim = 0; dim < DIMENSION; dim++)
-      recvMessage->getValue(&loc[dim]);
-    for (int dim = 0; dim < DIMENSION; dim++)
-      recvMessage->getValue(&vel[dim]);
-    recvMessage->getValue(&mass);
-    recvMessage->getValue(&id);
-
-    // Is the particle ALIVE on this processor
-    if ((loc[0] >= minAlive[0] && loc[0] < maxAlive[0]) &&
-        (loc[1] >= minAlive[1] && loc[1] < maxAlive[1]) &&
-        (loc[2] >= minAlive[2] && loc[2] < maxAlive[2])) {
-
-          this->xx->push_back(loc[0]);
-          this->yy->push_back(loc[1]);
-          this->zz->push_back(loc[2]);
-          this->vx->push_back(vel[0]);
-          this->vy->push_back(vel[1]);
-          this->vz->push_back(vel[2]);
-          this->ms->push_back(mass);
-          this->tag->push_back(id);
-
-          this->numberOfAliveParticles++;
-          this->particleCount++;
-    } else {
-
-      // Pass the particle along to the next processor in send buffer
-      sendParticles++;
-      for (int dim = 0; dim < DIMENSION; dim++)
-        sendMessage->putValue(&loc[dim]);
-      for (int dim = 0; dim < DIMENSION; dim++)
-        sendMessage->putValue(&vel[dim]);
-      sendMessage->putValue(&mass);
-      sendMessage->putValue(&id);
-    }
-  }
-  // Overwrite the send buffer first word with the known number of particles
-  sendMessage->putValueAtPosition(&sendParticles, 0);
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Each processor reads 1 file or gets a pointer to data eventually
-// As the particle is read it will be stored as an alive particle on this
-// processor and will be checked about neighbor ranges to see if it must
-// be exchanged
-//
-/////////////////////////////////////////////////////////////////////////
-
-void ParticleDistribute::readParticlesOneToOne(int reserveQ)
-{
-  // File name is the base file name with processor id appended
-  // Because MPI Cartesian topology is used the arrangement of files in
-  // physical space must follow the rule of last dimension varies fastest
-  ostringstream fileName;
-  fileName << this->baseFile << this->myProc;
-  this->inFiles.push_back(fileName.str());
-
-  // Compute the total number of particles in the problem
-  // Compute the maximum number of particles in any one file to set buffer size
-  findFileParticleCount();
-
-  // Reserve particle storage to minimize reallocation
-  int reserveSize = (int) (this->maxParticles * DEAD_FACTOR);
-
-  if(reserveQ) {
-#ifndef USE_VTK_COSMO
-    cout << "readParticlesOneToOne reserving vectors" << endl;
-#endif
-    this->xx->reserve(reserveSize);
-    this->yy->reserve(reserveSize);
-    this->zz->reserve(reserveSize);
-    this->vx->reserve(reserveSize);
-    this->vy->reserve(reserveSize);
-    this->vz->reserve(reserveSize);
-    this->ms->reserve(reserveSize);
-    this->tag->reserve(reserveSize);
-  }
-
-  // Running total and index into particle data on this processor
-  this->particleCount = 0;
-
-  // Read the input file storing particles immediately because all are alive
-  if (this->inputType == RECORD) {
-    readFromRecordFile();
-  } else {
-    readFromBlockFile();
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////////
-//
-// Input file is RECORD structured so read each particle record and populate
-// the vectors of particles marking all as ALIVE
-//
-/////////////////////////////////////////////////////////////////////////////
-
-void ParticleDistribute::readFromRecordFile()
-{
-  // Only one file per processor named in index 0
-  ifstream inStream(this->inFiles[0].c_str(), ios::in);
-  int numberOfParticles = this->fileParticles[0];
-
-#ifndef USE_VTK_COSMO
-  cout << "Rank " << this->myProc << " open file " << this->inFiles[0]
-       << " with " << numberOfParticles << " particles" << endl;
-#endif
-
-  POSVEL_T* fBlock = new POSVEL_T[COSMO_FLOAT];
-  ID_T* iBlock = new ID_T[COSMO_INT];
-
-  // Store each particle location, velocity and tag
-  for (int i = 0; i < numberOfParticles; i++) {
-
-    // Set file pointer to the requested particle
-    inStream.read(reinterpret_cast<char*>(fBlock),
-                   COSMO_FLOAT * sizeof(POSVEL_T));
-
-    if (inStream.gcount() != COSMO_FLOAT * sizeof(POSVEL_T)) {
-#ifdef USE_VTK_COSMO
-      vtkOutputWindowDisplayErrorText("Premature end-of-file.\n");
-      inStream.close();
-      delete [] fBlock;
-      delete [] iBlock;
-
-      return;
-#else
-      cout << "Premature end-of-file" << endl;
-      exit (-1);
-#endif
-    }
-
-    // Convert units if requested
-    fBlock[0] *= this->distConvertFactor;
-    fBlock[2] *= this->distConvertFactor;
-    fBlock[4] *= this->distConvertFactor;
-    fBlock[6] *= this->massConvertFactor;
-
-    inStream.read(reinterpret_cast<char*>(iBlock),
-                   COSMO_INT * sizeof(ID_T));
-
-    if (inStream.gcount() != COSMO_INT * sizeof(ID_T)) {
-#ifdef USE_VTK_COSMO
-      vtkOutputWindowDisplayErrorText("Premature end-of-file.\n");
-      inStream.close();
-      delete [] fBlock;
-      delete [] iBlock;
-
-      return;
-#else
-      cout << "Premature end-of-file" << endl;
-      exit (-1);
-#endif
-    }
-
-    // Store information in buffer if within range on this processor
-    if ((fBlock[0] >= minAlive[0] && fBlock[0] <= maxAlive[0]) &&
-        (fBlock[2] >= minAlive[1] && fBlock[2] <= maxAlive[1]) &&
-        (fBlock[4] >= minAlive[2] && fBlock[4] <= maxAlive[2])) {
-
-      this->xx->push_back(fBlock[0]);
-      this->vx->push_back(fBlock[1]);
-      this->yy->push_back(fBlock[2]);
-      this->vy->push_back(fBlock[3]);
-      this->zz->push_back(fBlock[4]);
-      this->vz->push_back(fBlock[5]);
-      this->ms->push_back(fBlock[6]);
-      this->tag->push_back(iBlock[0]);
-
-      this->numberOfAliveParticles++;
-      this->particleCount++;
-    }
-  }
-
-  inStream.close();
-  delete [] fBlock;
-  delete [] iBlock;
-}
-
-/////////////////////////////////////////////////////////////////////////////
-//
-// Input file is BLOCK structured so read head and each block of data.
-// Gadget format:
-//    SKIP_GADGET_2 has extra 16 bytes
-//    SKIP_H 4 bytes (size of header)
-//    Header (6 types of particles with counts and masses)
-//    SKIP_H 4 bytes (size of header)
-//
-//    SKIP_GADGET_2 has extra 16 bytes
-//    SKIP_L 4 bytes (size of location block in bytes)
-//    Block of location data where each particle's x,y,z is stored together
-//    SKIP_L 4 bytes (size of location block in bytes)
-//
-//    SKIP_GADGET_2 has extra 16 bytes
-//    SKIP_V 4 bytes (size of velocity block in bytes)
-//    Block of velocity data where each particle's xv,yv,zv is stored together
-//    SKIP_V 4 bytes (size of velocity block in bytes)
-//
-//    SKIP_GADGET_2 has extra 16 bytes
-//    SKIP_T 4 bytes (size of tag block in bytes)
-//    Block of tag data
-//    SKIP_T 4 bytes (size of tag block in bytes)
-//
-// Reorder the data after it is read into the same structure as the
-// RECORD data so that the rest of the code does not have to be changed
-//
-/////////////////////////////////////////////////////////////////////////////
-
-void ParticleDistribute::readFromBlockFile()
-{
-  // Only one file per processor named in index 0
-  ifstream inStream(this->inFiles[0].c_str(), ios::in);
-  int numberOfParticles = this->fileParticles[0];
-
-#ifndef USE_VTK_COSMO
-  cout << "Rank " << this->myProc << " open file " << this->inFiles[0]
-       << " with " << numberOfParticles << " particles" << endl;
-#endif
-
-  // Calculate skips to first location, velocity and tag
-  int skipToLocation = 0;
-  if (this->gadgetFormat == GADGET_2)
-    skipToLocation += GADGET_2_SKIP;
-  skipToLocation += GADGET_SKIP;                // Size of header
-  skipToLocation += GADGET_HEADER_SIZE;         // Header
-  skipToLocation += GADGET_SKIP;                // Size of header
-  if (this->gadgetFormat == GADGET_2)
-    skipToLocation += GADGET_2_SKIP;
-  skipToLocation += GADGET_SKIP;                // Size of location block
-
-  // Allocate blocks to read into
-  POSVEL_T* lBlock = new POSVEL_T[numberOfParticles * DIMENSION];
-  POSVEL_T* vBlock = new POSVEL_T[numberOfParticles * DIMENSION];
-  ID_T* iBlock = new ID_T[numberOfParticles];
-
-  // Seek to particle locations and read triples
-  inStream.seekg(skipToLocation, ios::beg);
-  readData(this->gadgetSwap, (void*) lBlock, sizeof(POSVEL_T),
-           DIMENSION * numberOfParticles, &inStream);
-
-  // Convert locations
-  for (int p = 0; p < DIMENSION * numberOfParticles; p++)
-    lBlock[p] *= this->distConvertFactor;
-
-  // Seek to particle velocities and read triples
-  inStream.seekg((2 * GADGET_SKIP), ios::cur);
-  readData(this->gadgetSwap, (void*) vBlock, sizeof(POSVEL_T),
-           DIMENSION * numberOfParticles, &inStream);
-
-
-  // Seek to particle tags and read
-  inStream.seekg((2 * GADGET_SKIP), ios::cur);
-  readData(this->gadgetSwap, (void*) iBlock, sizeof(ID_T),
-           numberOfParticles, &inStream);
-
-  // Store mass, locations, velocities and tags into arrays if in range
-  // Range test is needed because this code is used for ONE_TO_ONE where all
-  // particles must be added, and by one single input file over many
-  // processors where messaging is not needed, but some particles don't belong
-  int indx = 0;
-  int tagindx = 0;
-  for (int type = 0; type < NUM_GADGET_TYPES; type++) {
-
-    POSVEL_T particleMass =
-      (POSVEL_T) this->gadgetHeader.mass[type] * this->massConvertFactor;
-
-    for (int p = 0; p < this->gadgetHeader.npart[type]; p++) {
-
-      if ((lBlock[indx] >= minAlive[0] && lBlock[indx] < maxAlive[0]) &&
-          (lBlock[indx+1] >= minAlive[1] && lBlock[indx+1] < maxAlive[1]) &&
-          (lBlock[indx+2] >= minAlive[2] && lBlock[indx+2] < maxAlive[2])) {
-
-        this->xx->push_back(lBlock[indx]);
-        this->yy->push_back(lBlock[indx+1]);
-        this->zz->push_back(lBlock[indx+2]);
-        this->vx->push_back(vBlock[indx]);
-        this->vy->push_back(vBlock[indx+1]);
-        this->vz->push_back(vBlock[indx+2]);
-        this->ms->push_back(particleMass);
-        this->tag->push_back(iBlock[tagindx]);
-
-        this->numberOfAliveParticles++;
-        this->particleCount++;
-      }
-      indx += DIMENSION;
-      tagindx++;
-    }
-  }
-
-  delete [] lBlock;
-  delete [] vBlock;
-  delete [] iBlock;
-  inStream.close();
-}
-
-/////////////////////////////////////////////////////////////////////////////
-//
-// Read the Gadget header from the stream
-// Gadget file may be Gadget-1 format with no block indicators or
-// Gadget-2 format with size of block 4 byte integers surrounding each block
-// Data may be big or little endian which we can tell by checking that
-// the header size is 256 in the first 4 bytes
-//
-/////////////////////////////////////////////////////////////////////////////
-
-void ParticleDistribute::readGadgetHeader(ifstream* gStr)
-{
-  this->gadgetSwap = false;
-  this->gadgetFormat = 1;
-  int blockSize, blockSize2;
-  string gadget2;
-
-  // Set the gadget format type by reading the first 4 byte integer
-  // If it is not "256" or "65536" then gadget-2 format with 16 bytes in front
-  readData(this->gadgetSwap, (void*) &blockSize, GADGET_SKIP, 1, gStr);
-  if (blockSize != GADGET_HEADER_SIZE && blockSize != GADGET_HEADER_SIZE_SWP) {
-    this->gadgetFormat = GADGET_2;
-    gadget2 = readString(gStr, GADGET_2_SKIP - GADGET_SKIP);
-    readData(this->gadgetSwap, (void*) &blockSize, GADGET_SKIP, 1, gStr);
-  }
-
-  // Set the swap type
-  if (blockSize != GADGET_HEADER_SIZE) {
-    this->gadgetSwap = true;
-    blockSize = GADGET_HEADER_SIZE;
-  }
-
-  // Read the Gadget header
-  readData(this->gadgetSwap, (void*) &this->gadgetHeader.npart[0],
-                         sizeof(int), NUM_GADGET_TYPES, gStr);
-  readData(this->gadgetSwap, (void*) &this->gadgetHeader.mass[0],
-                         sizeof(double), NUM_GADGET_TYPES, gStr);
-  readData(this->gadgetSwap, (void*) &this->gadgetHeader.time,
-                         sizeof(double), 1, gStr);
-  readData(this->gadgetSwap, (void*) &this->gadgetHeader.redshift,
-                         sizeof(double), 1, gStr);
-  readData(this->gadgetSwap, (void*) &this->gadgetHeader.flag_sfr,
-                         sizeof(int), 1, gStr);
-  readData(this->gadgetSwap, (void*) &this->gadgetHeader.flag_feedback,
-                         sizeof(int), 1, gStr);
-  readData(this->gadgetSwap, (void*) &this->gadgetHeader.npartTotal[0],
-                         sizeof(int), NUM_GADGET_TYPES, gStr);
-  readData(this->gadgetSwap, (void*) &this->gadgetHeader.flag_cooling,
-                         sizeof(int), 1, gStr);
-  readData(this->gadgetSwap, (void*) &this->gadgetHeader.num_files,
-                         sizeof(int), 1, gStr);
-  readData(this->gadgetSwap, (void*) &this->gadgetHeader.BoxSize,
-                         sizeof(double), 1, gStr);
-  readData(this->gadgetSwap, (void*) &this->gadgetHeader.Omega0,
-                         sizeof(double), 1, gStr);
-  readData(this->gadgetSwap, (void*) &this->gadgetHeader.OmegaLambda,
-                         sizeof(double), 1, gStr);
-  readData(this->gadgetSwap, (void*) &this->gadgetHeader.HubbleParam,
-                         sizeof(double), 1, gStr);
-  readData(this->gadgetSwap, (void*) &this->gadgetHeader.flag_stellarage,
-                         sizeof(int), 1, gStr);
-  readData(this->gadgetSwap, (void*) &this->gadgetHeader.flag_metals,
-                         sizeof(int), 1, gStr);
-  readData(this->gadgetSwap, (void*) &this->gadgetHeader.HighWord[0],
-                         sizeof(int), NUM_GADGET_TYPES, gStr);
-  readData(this->gadgetSwap, (void*) &this->gadgetHeader.flag_entropy,
-                         sizeof(int), 1, gStr);
-  string fill = readString(gStr, GADGET_FILL);
-  strcpy(&this->gadgetHeader.fill[0], fill.c_str());
-
-  // Read the Gadget header size to verify block
-  readData(this->gadgetSwap, (void*) &blockSize2, GADGET_SKIP, 1, gStr);
-  if (blockSize != blockSize2)
-#ifdef USE_VTK_COSMO
-    vtkOutputWindowDisplayErrorText("Mismatch of header size and header structure.\n");
-#else
-    cout << "Mismatch of header size and header structure" << endl;
-#endif
-
-  // Every type particle will have location, velocity and tag so sum up
-  this->gadgetParticleCount = 0;
-  for (int i = 0; i < NUM_GADGET_TYPES; i++)
-    this->gadgetParticleCount += this->gadgetHeader.npart[i];
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Read in the requested number of characters
-//
-/////////////////////////////////////////////////////////////////////////
-
-string ParticleDistribute::readString(ifstream* inStr, int size)
-{
-   char* buffer = new char[size + 1];
-   inStr->read(buffer, size);
-   buffer[size] = '\0';
-
-   // Make sure string has legal values
-   if (isalnum(buffer[0]) == 0)
-      buffer[0] = '\0';
-   for (int i = 1; i < size; i++)
-      if (isprint(buffer[i]) == 0)
-         buffer[i] = '\0';
-
-   string retString = buffer;
-   delete [] buffer;
-   return retString;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Read in the number of items from the file pointer and
-// byte swap if necessary
-//
-/////////////////////////////////////////////////////////////////////////
-
-void ParticleDistribute::readData(
-        bool swap,
-        void* data,
-        unsigned long dataSize,
-        unsigned long dataCount,
-        ifstream* inStr)
-{
-   // Read all the data from the file
-   inStr->read(reinterpret_cast<char*>(data), dataSize*dataCount);
-
-   if (swap == true) {
-
-      // Byte swap each integer
-      char* dataPtr = (char*) data;
-      char temp;
-      for (unsigned long item = 0; item < dataCount; item++) {
-
-         // Do a byte-by-byte swap, reversing the order.
-         for (unsigned int i = 0; i < dataSize / 2; i++) {
-            temp = dataPtr[i];
-            dataPtr[i] = dataPtr[dataSize - 1 - i];
-            dataPtr[dataSize - 1 - i] = temp;
-         }
-         dataPtr += dataSize;
-      }
-   }
-}
diff --git a/ThirdParty/Cosmo/ParticleDistribute.h b/ThirdParty/Cosmo/ParticleDistribute.h
deleted file mode 100644
index 3469045..0000000
--- a/ThirdParty/Cosmo/ParticleDistribute.h
+++ /dev/null
@@ -1,233 +0,0 @@
-/*=========================================================================
-
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC.
-This software was produced under U.S. Government contract DE-AC52-06NA25396
-for Los Alamos National Laboratory (LANL), which is operated by
-Los Alamos National Security, LLC for the U.S. Department of Energy.
-The U.S. Government has rights to use, reproduce, and distribute this software.
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.
-If software is modified to produce derivative works, such modified software
-should be clearly marked, so as not to confuse it with the version available
-from LANL.
-
-Additionally, redistribution and use in source and binary forms, with or
-without modification, are permitted provided that the following conditions
-are met:
--   Redistributions of source code must retain the above copyright notice,
-    this list of conditions and the following disclaimer.
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution.
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software
-    without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-=========================================================================*/
-
-// .NAME ParticleDistribute - distribute particles to processors
-//
-// .SECTION Description
-// ParticleDistribute takes a series of data files containing RECORD style
-// .cosmo data or Gadget style BLOCK data
-// along with parameters defining the box size for the data and for
-// determining halos within the particle data.  It distributes the data
-// across processors including a healthy dead zone of particles belonging
-// to neighbor processors.  By definition all halos can be determined
-// completely for any processor because of this dead zone.  The serial
-// halo finder is called on each processor.
-//
-
-#ifndef ParticleDistribute_h
-#define ParticleDistribute_h
-
-
-#ifdef USE_VTK_COSMO
-#include "CosmoDefinition.h"
-#include <string>
-#include <vector>
-
-using namespace std;
-#else
-#include "Definition.h"
-#include <string>
-#include <vector>
-
-using namespace std;
-#endif
-
-#include "Message.h"
-#include <cstdlib>
-
-#ifdef USE_VTK_COSMO
-class COSMO_EXPORT ParticleDistribute {
-#else
-class ParticleDistribute {
-#endif
-public:
-  ParticleDistribute();
-  ~ParticleDistribute();
-
-  // Set parameters particle distribution
-  void setParameters(
-        const string& inName,   // Base file name to read from
-        POSVEL_T rL,            // Box size of the physical problem
-        string dataType);       // BLOCK or RECORD structured input data
-
-  // Set parameters unit conversion of mass and distance
-  void setConvertParameters(
-        POSVEL_T massConvertFactor,     // Multiply every mass by this
-        POSVEL_T distConvertFactor);    // Multiply every position by this
-
-  // Set neighbor processor numbers and calculate dead regions
-  void initialize();
-
-  // Read particle files per processor and share round robin with others
-  // extracting only the alive particles
-  void readParticlesRoundRobin(int reserveQ=0);
-  void partitionInputFiles();
-
-  // Read one particle file per processor with alive particles
-  // and correct topology
-  void readParticlesOneToOne(int reserveQ=0);
-
-  // Get particle counts for allocating buffers
-  void findFileParticleCount();
-
-  // Round robin version must buffer for MPI sends to other processors
-  void readFromRecordFile(
-        ifstream* inStream,     // Stream to read from
-        int firstParticle,      // First particle index to read in this chunk
-        int numberOfParticles,  // Number of particles to read in this chunk
-        POSVEL_T* fblock,       // Buffer for read in data
-        ID_T* iblock,           // Buffer for read in data
-        Message* message);      // Message buffer for distribution
-
-  void readFromBlockFile(
-        ifstream* inStream,     // Stream to read from
-        int firstParticle,      // First particle index to read in this chunk
-        int numberOfParticles,  // Number of particles to read in this chunk
-        int totParticles,       // Total particles (used to get offset)
-        POSVEL_T* lblock,       // Buffer for read in location data
-        POSVEL_T* vblock,       // Buffer for read in velocity data
-        ID_T* iblock,           // Buffer for read in data
-        Message* message);      // Message buffer for distribution
-
-  // One to one version of read is simpler with no MPI buffering
-  void readFromRecordFile();
-  void readFromBlockFile();
-
-  // Reads for Gadget header and for possible byte swapping
-  void readGadgetHeader(ifstream* str);
-  string readString(ifstream* str, int size);
-  void readData(
-        bool swap,
-        void* data,
-        unsigned long size,
-        unsigned long count,
-        ifstream* inStr);
-
-  // Collect local alive particles from the input buffers
-  void distributeParticles(
-        Message* message1,      // Double buffering for reads
-        Message* message2);     // Double buffering for reads
-  void collectLocalParticles(
-        Message* message1,      // Read buffer to extract local particles
-        Message* message2);     // Remaining particles put here for next send
-
-  // Return data needed by other software
-  int     getParticleCount()    { return this->particleCount; }
-
-  void setParticles(vector<POSVEL_T>* xx,
-                    vector<POSVEL_T>* yy,
-                    vector<POSVEL_T>* zz,
-                    vector<POSVEL_T>* vx,
-                    vector<POSVEL_T>* vy,
-                    vector<POSVEL_T>* vz,
-                    vector<POSVEL_T>* mass,
-                    vector<ID_T>* tag);
-
-  vector<POSVEL_T>* getXLocation()      { return this->xx; }
-  vector<POSVEL_T>* getYLocation()      { return this->yy; }
-  vector<POSVEL_T>* getZLocation()      { return this->zz; }
-  vector<POSVEL_T>* getXVelocity()      { return this->vx; }
-  vector<POSVEL_T>* getYVelocity()      { return this->vy; }
-  vector<POSVEL_T>* getZVelocity()      { return this->vz; }
-  vector<POSVEL_T>* getMass()           { return this->ms; }
-  vector<ID_T>* getTag()                { return this->tag; }
-
-private:
-  int    myProc;                // My processor number
-  int    numProc;               // Total number of processors
-
-  string baseFile;              // Base name of input particle files
-  int    inputType;             // BLOCK or RECORD structure
-  int    maxFiles;              // Maximum number of files per processor
-  vector<string> inFiles;       // Files read by this processor
-  vector<long> fileParticles;   // Number of particles in files on processor
-
-  struct GadgetHeader gadgetHeader;// Gadget file header
-  int    gadgetFormat;          // GADGET-1 or GADGET-2
-  bool   gadgetSwap;            // Endian swap needed
-  long int gadgetParticleCount; // Total particles in the file
-
-  long   maxParticles;          // Largest number of particles in any file
-  long   maxRead;               // Largest number of particles read at one time
-  int    maxReadsPerFile;       // Max number of reads per file
-
-  long   totalParticles;        // Number of particles on all files
-  int    headerSize;            // For BLOCK files
-
-  int    nextProc;              // Where to send buffers to be shared
-  int    prevProc;              // Where to receive buffers from be shared
-  int    numberOfFiles;         // Number of input files total
-  int    processorsPerFile;     // Multiple processors read same file
-  int    numberOfFileSends;     // Number of round robin sends to share buffers
-  int    maxFileSends;          // Max of round robin sends to share buffers
-
-  int    layoutSize[DIMENSION]; // Decomposition of processors
-  int    layoutPos[DIMENSION];  // Position of this processor in decomposition
-
-  long   np;                    // Number of particles in the problem
-  POSVEL_T boxSize;             // Physical box size (rL)
-  POSVEL_T massConvertFactor;   // Multiply every mass read by this
-  POSVEL_T distConvertFactor;   // Multiply every position read by this
-
-  long   numberOfAliveParticles;
-
-  long   particleCount;         // Running index used to store data
-                                // Ends up as the number of alive plus dead
-
-  POSVEL_T minAlive[DIMENSION]; // Minimum alive particle location on processor
-  POSVEL_T maxAlive[DIMENSION]; // Maximum alive particle location on processor
-
-  int    neighbor[NUM_OF_NEIGHBORS];            // Neighbor processor ids
-
-  vector<POSVEL_T>* xx;         // X location for particles on this processor
-  vector<POSVEL_T>* yy;         // Y location for particles on this processor
-  vector<POSVEL_T>* zz;         // Z location for particles on this processor
-  vector<POSVEL_T>* vx;         // X velocity for particles on this processor
-  vector<POSVEL_T>* vy;         // Y velocity for particles on this processor
-  vector<POSVEL_T>* vz;         // Z velocity for particles on this processor
-  vector<POSVEL_T>* ms;         // Mass for particles on this processor
-  vector<ID_T>* tag;            // Id tag for particles on this processor
-};
-
-#endif
diff --git a/ThirdParty/Cosmo/ParticleExchange.cxx b/ThirdParty/Cosmo/ParticleExchange.cxx
deleted file mode 100644
index 71a6394..0000000
--- a/ThirdParty/Cosmo/ParticleExchange.cxx
+++ /dev/null
@@ -1,756 +0,0 @@
-/*=========================================================================
-                                                                                
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC. 
-This software was produced under U.S. Government contract DE-AC52-06NA25396 
-for Los Alamos National Laboratory (LANL), which is operated by 
-Los Alamos National Security, LLC for the U.S. Department of Energy. 
-The U.S. Government has rights to use, reproduce, and distribute this software. 
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.  
-If software is modified to produce derivative works, such modified software 
-should be clearly marked, so as not to confuse it with the version available 
-from LANL.
- 
-Additionally, redistribution and use in source and binary forms, with or 
-without modification, are permitted provided that the following conditions 
-are met:
--   Redistributions of source code must retain the above copyright notice, 
-    this list of conditions and the following disclaimer. 
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution. 
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software 
-    without specific prior written permission. 
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR 
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-                                                                                
-=========================================================================*/
-
-#include "Partition.h"
-#include "ParticleExchange.h"
-
-#include <iostream>
-#include <fstream>
-#include <sstream>
-#include <iomanip>
-
-#include <sys/types.h>
-
-using namespace std;
-
-/////////////////////////////////////////////////////////////////////////
-//
-// ParticleExchange is initialized with physical size of particle space and
-// the margin of dead zone desired for each processor.  It is given the
-// physical x,y,z locations for particles on this processor and can get
-// the number of each neighbor processor.  Since the desired goal is to
-// populate every processor with the alive particles (which it enters this
-// class with) and dead particles belonging on the edges of all neighbors,
-// each processor categorizes its own particles and arranges to send them
-// to the appropriate neighbor, and to receive particles from each neighbor
-// which it adds the the location vectors.
-//
-/////////////////////////////////////////////////////////////////////////
-
-ParticleExchange::ParticleExchange()
-{
-  // Get the number of processors running this problem and rank
-  this->numProc = Partition::getNumProc();
-  this->myProc = Partition::getMyProc();
-
-  // Get the number of processors in each dimension
-  Partition::getDecompSize(this->layoutSize);
-
-  // Get my position within the Cartesian topology
-  Partition::getMyPosition(this->layoutPos);
-
-  // Get neighbors of this processor including the wraparound
-  Partition::getNeighbors(this->neighbor);
-
-  // For this processor calculate alterations needed for wraparound locations
-  calculateOffsetFactor();
-
-  this->numberOfAliveParticles = 0;
-  this->numberOfDeadParticles = 0;
-}
-
-ParticleExchange::~ParticleExchange()
-{
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Set parameters for particle distribution
-//
-/////////////////////////////////////////////////////////////////////////
-
-void ParticleExchange::setParameters(POSVEL_T rL, POSVEL_T deadSz)
-{
-  // Physical total space and amount of physical space to use for dead particles
-  this->boxSize = rL;
-  this->deadSize = deadSz;
-
-#ifndef USE_VTK_COSMO
-  if (this->myProc == MASTER) {
-    cout << endl << "------------------------------------" << endl;
-    cout << "boxSize:  " << this->boxSize << endl;
-    cout << "deltaBox: " << this->deadSize << endl;
-  }
-#endif
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// ParticleExchange will start with only ALIVE particles and will determine
-// which of those particles must be sent to neighbors for the overloading
-// of DEAD particles.  As a particle is examined it may fall into several
-// sharing regions.  For instance a particle in a corner will be sent across
-// three faces, three edges and one corner.  As it is sent the x,y,z must
-// be altered in different ways.  Face overloading requires changing one
-// dimension's location, while corner overloading require three changes.
-// And these changes are only needed for processors on an edge of the
-// decomposition where layoutPos = 0 or layoutPos = layoutSize - 1.
-//
-// This method calculates a simple matrix which can be applied at the
-// time that the exchange buffer is filled with locations.  The rule for
-// sending a location is location = location + (overLoadFactor * boxSize);
-//
-// The factors are
-//      0       location in that dimension is not changed
-//     +1       location in that dimension is incremented by box size
-//     -1       location in that dimension is decremented by box size
-//
-/////////////////////////////////////////////////////////////////////////
-
-void ParticleExchange::calculateOffsetFactor()
-{
-   // Default is that a location is not changed when shared with a neighbor
-   // This is the case for all interior processors
-   for (int n = 0; n < NUM_OF_NEIGHBORS; n++)
-      for (int dim = 0; dim < DIMENSION; dim++)
-         this->overLoadFactor[n][dim] = 0;
-
-   // If this processor is on the edge of the decomposition then when it
-   // sends overloaded locations they must be altered.  This will depend on
-   // the position of this processor in the layout and on the neighbor 
-   // which is receiving the data
-
-   // Processor is on front edge in X dimension so add rL to wraparound x
-   if (this->layoutPos[0] == 0) {
-      this->overLoadFactor[X0][0] = 1;
-      this->overLoadFactor[X0_Y0][0] = 1;
-      this->overLoadFactor[X0_Y1][0] = 1;
-      this->overLoadFactor[Z0_X0][0] = 1;
-      this->overLoadFactor[Z1_X0][0] = 1;
-      this->overLoadFactor[X0_Y0_Z0][0] = 1;
-      this->overLoadFactor[X0_Y0_Z1][0] = 1;
-      this->overLoadFactor[X0_Y1_Z0][0] = 1;
-      this->overLoadFactor[X0_Y1_Z1][0] = 1;
-   }
-
-   // Processor is on back edge in X dimension so subtract rL from wraparound x
-   if (this->layoutPos[0] == (this->layoutSize[0] - 1)) {
-      this->overLoadFactor[X1][0] = -1;
-      this->overLoadFactor[X1_Y1][0] = -1;
-      this->overLoadFactor[X1_Y0][0] = -1;
-      this->overLoadFactor[Z1_X1][0] = -1;
-      this->overLoadFactor[Z0_X1][0] = -1;
-      this->overLoadFactor[X1_Y1_Z1][0] = -1;
-      this->overLoadFactor[X1_Y1_Z0][0] = -1;
-      this->overLoadFactor[X1_Y0_Z1][0] = -1;
-      this->overLoadFactor[X1_Y0_Z0][0] = -1;
-   }
-
-   // Processor is on front edge in Y dimension so add rL to wraparound y
-   if (this->layoutPos[1] == 0) {
-      this->overLoadFactor[Y0][1] = 1;
-      this->overLoadFactor[X0_Y0][1] = 1;
-      this->overLoadFactor[X1_Y0][1] = 1;
-      this->overLoadFactor[Y0_Z0][1] = 1;
-      this->overLoadFactor[Y0_Z1][1] = 1;
-      this->overLoadFactor[X0_Y0_Z0][1] = 1;
-      this->overLoadFactor[X0_Y0_Z1][1] = 1;
-      this->overLoadFactor[X1_Y0_Z1][1] = 1;
-      this->overLoadFactor[X1_Y0_Z0][1] = 1;
-   }
-
-   // Processor is on back edge in Y dimension so subtract rL from wraparound y
-   if (this->layoutPos[1] == (this->layoutSize[1] - 1)) {
-      this->overLoadFactor[Y1][1] = -1;
-      this->overLoadFactor[X1_Y1][1] = -1;
-      this->overLoadFactor[X0_Y1][1] = -1;
-      this->overLoadFactor[Y1_Z1][1] = -1;
-      this->overLoadFactor[Y1_Z0][1] = -1;
-      this->overLoadFactor[X1_Y1_Z1][1] = -1;
-      this->overLoadFactor[X1_Y1_Z0][1] = -1;
-      this->overLoadFactor[X0_Y1_Z0][1] = -1;
-      this->overLoadFactor[X0_Y1_Z1][1] = -1;
-   }
-
-   // Processor is on front edge in Z dimension so add rL to wraparound z
-   if (this->layoutPos[2] == 0) {
-      this->overLoadFactor[Z0][2] = 1;
-      this->overLoadFactor[Y0_Z0][2] = 1;
-      this->overLoadFactor[Y1_Z0][2] = 1;
-      this->overLoadFactor[Z0_X0][2] = 1;
-      this->overLoadFactor[Z0_X1][2] = 1;
-      this->overLoadFactor[X0_Y0_Z0][2] = 1;
-      this->overLoadFactor[X1_Y1_Z0][2] = 1;
-      this->overLoadFactor[X0_Y1_Z0][2] = 1;
-      this->overLoadFactor[X1_Y0_Z0][2] = 1;
-   }
-
-   // Processor is on back edge in Z dimension so subtract rL from wraparound z
-   if (this->layoutPos[2] == (this->layoutSize[2] - 1)) {
-      this->overLoadFactor[Z1][2] = -1;
-      this->overLoadFactor[Y1_Z1][2] = -1;
-      this->overLoadFactor[Y0_Z1][2] = -1;
-      this->overLoadFactor[Z1_X1][2] = -1;
-      this->overLoadFactor[Z1_X0][2] = -1;
-      this->overLoadFactor[X1_Y1_Z1][2] = -1;
-      this->overLoadFactor[X0_Y0_Z1][2] = -1;
-      this->overLoadFactor[X1_Y0_Z1][2] = -1;
-      this->overLoadFactor[X0_Y1_Z1][2] = -1;
-   }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// All particles on this processor initially are alive, but some of those
-// alive must be exchanged with neighbors.  Determine the physical range
-// on this processor where an ALIVE particle will never be exchanged and
-// the ranges for each neighbor's future DEAD particles.  Then when
-// reading each particle it can quickly be assigned.
-//
-/////////////////////////////////////////////////////////////////////////
-
-void ParticleExchange::initialize()
-{
-#ifndef USE_VTK_COSMO
-#ifdef DEBUG
-  if (this->myProc == MASTER)
-    cout << "Decomposition: [" << this->layoutSize[0] << ":"
-         << this->layoutSize[1] << ":" << this->layoutSize[2] << "]" << endl;
-#endif
-#endif
-
-  // Set subextents on particle locations for this processor
-  POSVEL_T boxStep[DIMENSION];
-  for (int dim = 0; dim < DIMENSION; dim++) {
-    boxStep[dim] = this->boxSize / this->layoutSize[dim];
-
-    // All particles are alive and available for sharing
-    this->minShare[dim] = this->layoutPos[dim] * boxStep[dim];
-    this->maxShare[dim] = this->minShare[dim] + boxStep[dim];
-    if (this->maxShare[dim] > this->boxSize)
-      this->maxShare[dim] = this->boxSize;
-
-    // Particles in the middle of the shared region will not be shared
-    this->minMine[dim] = this->minShare[dim] + this->deadSize;
-    this->maxMine[dim] = this->maxShare[dim] - this->deadSize;
-  }
-
-  // Set the ranges on the dead particles for each neighbor direction
-  calculateExchangeRegions();
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Each of the 26 neighbors will be sent a rectangular region of my particles
-// Calculate the range in each dimension of the ghost area
-//
-/////////////////////////////////////////////////////////////////////////
-
-void ParticleExchange::calculateExchangeRegions()
-{
-  // Initialize all neighbors to the entire available exchange range
-  for (int i = 0; i < NUM_OF_NEIGHBORS; i++) {
-    for (int dim = 0; dim < DIMENSION; dim++) {
-      this->minRange[i][dim] = this->minShare[dim];
-      this->maxRange[i][dim] = this->maxShare[dim];
-    }
-  }
-
-  // Left face
-  this->minRange[X0][0] = this->minShare[0];
-  this->maxRange[X0][0] = this->minMine[0];
-
-  // Right face
-  this->minRange[X1][0] = this->maxMine[0];
-  this->maxRange[X1][0] = this->maxShare[0];
-
-  // Bottom face
-  this->minRange[Y0][1] = this->minShare[1];
-  this->maxRange[Y0][1] = this->minMine[1];
-
-  // Top face
-  this->minRange[Y1][1] = this->maxMine[1];
-  this->maxRange[Y1][1] = this->maxShare[1];
-
-  // Front face
-  this->minRange[Z0][2] = this->minShare[2];
-  this->maxRange[Z0][2] = this->minMine[2];
-
-  // Back face
-  this->minRange[Z1][2] = this->maxMine[2];
-  this->maxRange[Z1][2] = this->maxShare[2];
-
-  // Left bottom and top bars
-  this->minRange[X0_Y0][0] = this->minShare[0];
-  this->maxRange[X0_Y0][0] = this->minMine[0];
-  this->minRange[X0_Y0][1] = this->minShare[1];
-  this->maxRange[X0_Y0][1] = this->minMine[1];
-
-  this->minRange[X0_Y1][0] = this->minShare[0];
-  this->maxRange[X0_Y1][0] = this->minMine[0];
-  this->minRange[X0_Y1][1] = this->maxMine[1];
-  this->maxRange[X0_Y1][1] = this->maxShare[1];
-
-  // Right bottom and top bars
-  this->minRange[X1_Y0][0] = this->maxMine[0];
-  this->maxRange[X1_Y0][0] = this->maxShare[0];
-  this->minRange[X1_Y0][1] = this->minShare[1];
-  this->maxRange[X1_Y0][1] = this->minMine[1];
-
-  this->minRange[X1_Y1][0] = this->maxMine[0];
-  this->maxRange[X1_Y1][0] = this->maxShare[0];
-  this->minRange[X1_Y1][1] = this->maxMine[1];
-  this->maxRange[X1_Y1][1] = this->maxShare[1];
-
-  // Bottom front and back bars
-  this->minRange[Y0_Z0][1] = this->minShare[1];
-  this->maxRange[Y0_Z0][1] = this->minMine[1];
-  this->minRange[Y0_Z0][2] = this->minShare[2];
-  this->maxRange[Y0_Z0][2] = this->minMine[2];
-
-  this->minRange[Y0_Z1][1] = this->minShare[1];
-  this->maxRange[Y0_Z1][1] = this->minMine[1];
-  this->minRange[Y0_Z1][2] = this->maxMine[2];
-  this->maxRange[Y0_Z1][2] = this->maxShare[2];
-
-  // Top front and back bars 
-  this->minRange[Y1_Z0][1] = this->maxMine[1];
-  this->maxRange[Y1_Z0][1] = this->maxShare[1];
-  this->minRange[Y1_Z0][2] = this->minShare[2];
-  this->maxRange[Y1_Z0][2] = this->minMine[2];
-
-  this->minRange[Y1_Z1][1] = this->maxMine[1];
-  this->maxRange[Y1_Z1][1] = this->maxShare[1];
-  this->minRange[Y1_Z1][2] = this->maxMine[2];
-  this->maxRange[Y1_Z1][2] = this->maxShare[2];
-
-  // Left front and back bars (vertical)
-  this->minRange[Z0_X0][0] = this->minShare[0];
-  this->maxRange[Z0_X0][0] = this->minMine[0];
-  this->minRange[Z0_X0][2] = this->minShare[2];
-  this->maxRange[Z0_X0][2] = this->minMine[2];
-
-  this->minRange[Z1_X0][0] = this->minShare[0];
-  this->maxRange[Z1_X0][0] = this->minMine[0];
-  this->minRange[Z1_X0][2] = this->maxMine[2];
-  this->maxRange[Z1_X0][2] = this->maxShare[2];
-
-  // Right front and back bars (vertical)
-  this->minRange[Z0_X1][0] = this->maxMine[0];
-  this->maxRange[Z0_X1][0] = this->maxShare[0];
-  this->minRange[Z0_X1][2] = this->minShare[2];
-  this->maxRange[Z0_X1][2] = this->minMine[2];
-
-  this->minRange[Z1_X1][0] = this->maxMine[0];
-  this->maxRange[Z1_X1][0] = this->maxShare[0];
-  this->minRange[Z1_X1][2] = this->maxMine[2];
-  this->maxRange[Z1_X1][2] = this->maxShare[2];
-
-  // Left bottom front corner
-  this->minRange[X0_Y0_Z0][0] = this->minShare[0];
-  this->maxRange[X0_Y0_Z0][0] = this->minMine[0];
-  this->minRange[X0_Y0_Z0][1] = this->minShare[1];
-  this->maxRange[X0_Y0_Z0][1] = this->minMine[1];
-  this->minRange[X0_Y0_Z0][2] = this->minShare[2];
-  this->maxRange[X0_Y0_Z0][2] = this->minMine[2];
-
-  // Left bottom back corner
-  this->minRange[X0_Y0_Z1][0] = this->minShare[0];
-  this->maxRange[X0_Y0_Z1][0] = this->minMine[0];
-  this->minRange[X0_Y0_Z1][1] = this->minShare[1];
-  this->maxRange[X0_Y0_Z1][1] = this->minMine[1];
-  this->minRange[X0_Y0_Z1][2] = this->maxMine[2];
-  this->maxRange[X0_Y0_Z1][2] = this->maxShare[2];
-
-  // Left top front corner
-  this->minRange[X0_Y1_Z0][0] = this->minShare[0];
-  this->maxRange[X0_Y1_Z0][0] = this->minMine[0];
-  this->minRange[X0_Y1_Z0][1] = this->maxMine[1];
-  this->maxRange[X0_Y1_Z0][1] = this->maxShare[1];
-  this->minRange[X0_Y1_Z0][2] = this->minShare[2];
-  this->maxRange[X0_Y1_Z0][2] = this->minMine[2];
-
-  // Left top back corner
-  this->minRange[X0_Y1_Z1][0] = this->minShare[0];
-  this->maxRange[X0_Y1_Z1][0] = this->minMine[0];
-  this->minRange[X0_Y1_Z1][1] = this->maxMine[1];
-  this->maxRange[X0_Y1_Z1][1] = this->maxShare[1];
-  this->minRange[X0_Y1_Z1][2] = this->maxMine[2];
-  this->maxRange[X0_Y1_Z1][2] = this->maxShare[2];
-
-  // Right bottom front corner
-  this->minRange[X1_Y0_Z0][0] = this->maxMine[0];
-  this->maxRange[X1_Y0_Z0][0] = this->maxShare[0];
-  this->minRange[X1_Y0_Z0][1] = this->minShare[1];
-  this->maxRange[X1_Y0_Z0][1] = this->minMine[1];
-  this->minRange[X1_Y0_Z0][2] = this->minShare[2];
-  this->maxRange[X1_Y0_Z0][2] = this->minMine[2];
-
-  // Right bottom back corner
-  this->minRange[X1_Y0_Z1][0] = this->maxMine[0];
-  this->maxRange[X1_Y0_Z1][0] = this->maxShare[0];
-  this->minRange[X1_Y0_Z1][1] = this->minShare[1];
-  this->maxRange[X1_Y0_Z1][1] = this->minMine[1];
-  this->minRange[X1_Y0_Z1][2] = this->maxMine[2];
-  this->maxRange[X1_Y0_Z1][2] = this->maxShare[2];
-
-  // Right top front corner
-  this->minRange[X1_Y1_Z0][0] = this->maxMine[0];
-  this->maxRange[X1_Y1_Z0][0] = this->maxShare[0];
-  this->minRange[X1_Y1_Z0][1] = this->maxMine[1];
-  this->maxRange[X1_Y1_Z0][1] = this->maxShare[1];
-  this->minRange[X1_Y1_Z0][2] = this->minShare[2];
-  this->maxRange[X1_Y1_Z0][2] = this->minMine[2];
-
-  // Right top back corner
-  this->minRange[X1_Y1_Z1][0] = this->maxMine[0];
-  this->maxRange[X1_Y1_Z1][0] = this->maxShare[0];
-  this->minRange[X1_Y1_Z1][1] = this->maxMine[1];
-  this->maxRange[X1_Y1_Z1][1] = this->maxShare[1];
-  this->minRange[X1_Y1_Z1][2] = this->maxMine[2];
-  this->maxRange[X1_Y1_Z1][2] = this->maxShare[2];
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Set the particle vectors that have already been read and which
-// contain only the alive particles for this processor
-//
-/////////////////////////////////////////////////////////////////////////
-
-void ParticleExchange::setParticles(
-                        vector<POSVEL_T>* xLoc,
-                        vector<POSVEL_T>* yLoc,
-                        vector<POSVEL_T>* zLoc,
-                        vector<POSVEL_T>* xVel,
-                        vector<POSVEL_T>* yVel,
-                        vector<POSVEL_T>* zVel,
-                        vector<POSVEL_T>* mass,
-                        vector<POTENTIAL_T>* potential,
-                        vector<ID_T>* id,
-                        vector<MASK_T>* maskData,
-                        vector<STATUS_T>* type)
-{
-  this->particleCount = (long)xLoc->size();
-  this->numberOfAliveParticles = this->particleCount;
-  this->xx = xLoc;
-  this->yy = yLoc;
-  this->zz = zLoc;
-  this->vx = xVel;
-  this->vy = yVel;
-  this->vz = zVel;
-  this->ms = mass;
-  this->pot = potential;
-  this->tag = id;
-  this->mask = maskData;
-  this->status = type;
-  this->status->clear();
-}
-        
-/////////////////////////////////////////////////////////////////////////////
-//
-// Alive particles are contained on each processor.  Identify the border
-// particles which will be dead on other processors and exchange them
-//
-/////////////////////////////////////////////////////////////////////////////
-
-void ParticleExchange::exchangeParticles()
-{
-  // Identify alive particles on this processor which must be shared
-  // because they are dead particles on neighbor processors
-  // x,y,z are still in physical units (because deadSize is given that way)
-  identifyExchangeParticles();
-
-  // Exchange those particles with appropriate neighbors
-  // x,y,z are not in normalized units
-  exchangeNeighborParticles();
-
-  // Count the particles across processors
-  long totalAliveParticles = 0;
-  long totalDeadParticles = 0;
-
-#ifdef USE_SERIAL_COSMO
-  totalAliveParticles = this->numberOfAliveParticles;
-  totalDeadParticles = this->numberOfDeadParticles;
-#else
-  MPI_Allreduce((void*) &this->numberOfAliveParticles, 
-                (void*) &totalAliveParticles, 
-                1, MPI_LONG, MPI_SUM, Partition::getComm());
-  MPI_Allreduce((void*) &this->numberOfDeadParticles,
-                (void*) &totalDeadParticles, 
-                1, MPI_LONG, MPI_SUM, Partition::getComm());
-#endif
-
-#ifndef USE_VTK_COSMO
-#ifdef DEBUG
-  cout << "Exchange Particles Rank " << setw(3) << this->myProc 
-       << " #alive = " << this->numberOfAliveParticles
-       << " #dead = " << this->numberOfDeadParticles << endl;
-#endif
- 
-  if (this->myProc == MASTER) {
-    cout << "TotalAliveParticles " << totalAliveParticles << endl;
-    cout << "TotalDeadParticles  " << totalDeadParticles << endl << endl;
-  }
-#endif
-}
-
-/////////////////////////////////////////////////////////////////////////////
-//
-// Iterate over all the alive particles on this processor and determine
-// which must be shared and add them to the vector for that neighbor
-//
-/////////////////////////////////////////////////////////////////////////////
-
-void ParticleExchange::identifyExchangeParticles()
-{
-  long notSharedCount = 0;
-  long sharedCount = 0;
-
-  // All initial particles before the exchange are ALIVE
-  for (long i = 0; i < this->particleCount; i++) {
-    this->status->push_back(ALIVE);
-    if (((*this->xx)[i] > this->minMine[0] && 
-         (*this->xx)[i] < this->maxMine[0]) &&
-        ((*this->yy)[i] > this->minMine[1] && 
-         (*this->yy)[i] < this->maxMine[1]) &&
-        ((*this->zz)[i] > this->minMine[2] && 
-         (*this->zz)[i] < this->maxMine[2])) {
-          notSharedCount++;
-    } else {
-      // Particle is alive here but which processors need it as dead
-      for (int n = 0; n < NUM_OF_NEIGHBORS; n++) {
-        if ((*this->xx)[i] >= minRange[n][0] && 
-            (*this->xx)[i] <= maxRange[n][0] &&
-            (*this->yy)[i] >= minRange[n][1] && 
-            (*this->yy)[i] <= maxRange[n][1] &&
-            (*this->zz)[i] >= minRange[n][2] && 
-            (*this->zz)[i] <= maxRange[n][2]) {
-                this->neighborParticles[n].push_back(i);
-                sharedCount++;
-        }
-      }
-    }
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////////
-//
-// Exchange the appropriate particles with neighbors
-// Only the index of the particle to be exchanged is stored so fill out
-// the message with location, velocity, tag.  Status information doesn't
-// have to be sent because when the message is received, the neighbor
-// containing the new dead particle will be known
-//
-// Use the Cartesian communicator for neighbor exchange
-//
-/////////////////////////////////////////////////////////////////////////////
-
-void ParticleExchange::exchangeNeighborParticles()
-{
-  // Calculate the maximum number of particles to share for calculating buffer
-  int myShareSize = 0;
-  for (int n = 0; n < NUM_OF_NEIGHBORS; n++)
-    if (myShareSize < (int)this->neighborParticles[n].size())
-      myShareSize = (int)this->neighborParticles[n].size();
-
-  int maxShareSize;
-#ifdef USE_SERIAL_COSMO
-  maxShareSize = myShareSize;
-#else
-  MPI_Allreduce((void*) &myShareSize,
-                (void*) &maxShareSize,
-                1, MPI_INT, MPI_MAX, Partition::getComm());
-#endif
-
-  // Allocate messages to send and receive MPI buffers
-  // Space for particle count +record(loc, vel, mass, tag) + potential + mask
-  int bufferSize = sizeof(int) +
-        (maxShareSize * 
-          (RECORD_SIZE + sizeof(POSVEL_T) + sizeof(MASK_T)));
-
-  Message* sendMessage = new Message(bufferSize);
-  Message* recvMessage = new Message(bufferSize);
-
-#ifndef USE_VTK_COSMO
-  //debug statement added by Adrian to see how much buffer space we're using
-  if(this->myProc == MASTER) {
-    printf("PXCH buffer = 2*%d = %f MB\n",bufferSize,
-           2.0*bufferSize/1024.0/1024.0);
-  }
-#endif
-
-#ifndef USE_SERIAL_COSMO
-  MPI_Barrier(Partition::getComm());
-#endif
-
-  // Exchange with each neighbor, with everyone sending in one direction and
-  // receiving from the other.  Data corresponding to the particle index
-  // must be packed in the buffer.  When the data is received it is unpacked
-  // into the location, velocity and tag vectors and the status is set
-  // to the neighbor who sent it
-
-  for (int n = 0; n < NUM_OF_NEIGHBORS; n=n+2) {
-    // Neighbor pairs in Definition.h must match so that every processor
-    // sends and every processor receives on each exchange
-    exchange(n, n+1, sendMessage, recvMessage);
-    exchange(n+1, n, sendMessage, recvMessage);
-  }
-
-  delete sendMessage;
-  delete recvMessage;
-}
-
-/////////////////////////////////////////////////////////////////////////////
-//
-// Pack particle data for the indicated neighbor into MPI message
-// Send that message and receive from opposite neighbor
-// Unpack the received particle data and add to particle buffers with
-// an indication of dead and the neighbor on which particle is alive
-//
-/////////////////////////////////////////////////////////////////////////////
-
-void ParticleExchange::exchange(
-                        int sendTo, 
-                        int recvFrom, 
-                        Message* sendMessage, 
-                        Message* recvMessage)
-{
-  POSVEL_T posValue;
-  POTENTIAL_T potValue;
-  ID_T idValue;
-  MASK_T maskValue;
-
-  // Fill same message for each of the neighbors
-  sendMessage->reset();
-  recvMessage->reset();
-
-  // Number of particles to share with neighbor
-  int sendParticleCount = (int)this->neighborParticles[sendTo].size();
-
-  // Overload factor alters the x,y,z dimension for wraparound depending on
-  // the neighbor receiving the data and the position this processor
-  // has in the decomposition
-  POSVEL_T offset[DIMENSION];
-  for (int dim = 0; dim < DIMENSION; dim++)
-    offset[dim] = this->overLoadFactor[sendTo][dim] * this->boxSize;
-
-  // If this processor would be sending to itself skip the MPI
-  if (this->neighbor[sendTo] == this->myProc) {
-    for (int i = 0; i < sendParticleCount; i++) {
-
-      int deadIndex = this->neighborParticles[sendTo][i];
-      this->xx->push_back((*this->xx)[deadIndex] + offset[0]);
-      this->yy->push_back((*this->yy)[deadIndex] + offset[1]);
-      this->zz->push_back((*this->zz)[deadIndex] + offset[2]);
-      this->vx->push_back((*this->vx)[deadIndex]);
-      this->vy->push_back((*this->vy)[deadIndex]);
-      this->vz->push_back((*this->vz)[deadIndex]);
-      this->ms->push_back((*this->ms)[deadIndex]);
-      this->pot->push_back((*this->pot)[deadIndex]);
-      this->tag->push_back((*this->tag)[deadIndex]);
-      this->mask->push_back((*this->mask)[deadIndex]);
-      this->status->push_back(recvFrom);
-
-      this->numberOfDeadParticles++;
-      this->particleCount++;
-    }
-    return;
-  }
-
-  // Pack the number of particles being sent
-  sendMessage->putValue(&sendParticleCount);
-
-  for (int i = 0; i < sendParticleCount; i++) {
-    int deadIndex = this->neighborParticles[sendTo][i];
-
-    // Locations are altered by wraparound if needed
-    posValue = (*this->xx)[deadIndex] + offset[0];
-    sendMessage->putValue(&posValue);
-    posValue = (*this->yy)[deadIndex] + offset[1];
-    sendMessage->putValue(&posValue);
-    posValue = (*this->zz)[deadIndex] + offset[2];
-    sendMessage->putValue(&posValue);
-
-    // Other values are just sent
-    sendMessage->putValue(&(*this->vx)[deadIndex]);
-    sendMessage->putValue(&(*this->vy)[deadIndex]);
-    sendMessage->putValue(&(*this->vz)[deadIndex]);
-    sendMessage->putValue(&(*this->ms)[deadIndex]);
-    sendMessage->putValue(&(*this->pot)[deadIndex]);
-    sendMessage->putValue(&(*this->tag)[deadIndex]);
-    sendMessage->putValue(&(*this->mask)[deadIndex]);
-  }
-
-  // Send the message buffer
-  sendMessage->send(this->neighbor[sendTo]);
-
-  // Receive the buffer from neighbor on other side
-  recvMessage->receive(this->neighbor[recvFrom]);
-
-#ifndef USE_SERIAL_COSMO
-  MPI_Barrier(Partition::getComm());
-#endif
-
-  // Process the received buffer
-  int recvParticleCount;
-  recvMessage->getValue(&recvParticleCount);
-
-  for (int i = 0; i < recvParticleCount; i++) {
-    recvMessage->getValue(&posValue);
-    this->xx->push_back(posValue);
-    recvMessage->getValue(&posValue);
-    this->yy->push_back(posValue);
-    recvMessage->getValue(&posValue);
-    this->zz->push_back(posValue);
-    recvMessage->getValue(&posValue);
-    this->vx->push_back(posValue);
-    recvMessage->getValue(&posValue);
-    this->vy->push_back(posValue);
-    recvMessage->getValue(&posValue);
-    this->vz->push_back(posValue);
-    recvMessage->getValue(&posValue);
-    this->ms->push_back(posValue);
-    recvMessage->getValue(&potValue);
-    this->pot->push_back(potValue);
-    recvMessage->getValue(&idValue);
-    this->tag->push_back(idValue);
-    recvMessage->getValue(&maskValue);
-    this->mask->push_back(maskValue);
-    this->status->push_back(recvFrom);
-
-    this->numberOfDeadParticles++;
-    this->particleCount++;
-  }
-}
diff --git a/ThirdParty/Cosmo/ParticleExchange.h b/ThirdParty/Cosmo/ParticleExchange.h
deleted file mode 100644
index 83e3698..0000000
--- a/ThirdParty/Cosmo/ParticleExchange.h
+++ /dev/null
@@ -1,183 +0,0 @@
-/*=========================================================================
-                                                                                
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC. 
-This software was produced under U.S. Government contract DE-AC52-06NA25396 
-for Los Alamos National Laboratory (LANL), which is operated by 
-Los Alamos National Security, LLC for the U.S. Department of Energy. 
-The U.S. Government has rights to use, reproduce, and distribute this software. 
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.  
-If software is modified to produce derivative works, such modified software 
-should be clearly marked, so as not to confuse it with the version available 
-from LANL.
- 
-Additionally, redistribution and use in source and binary forms, with or 
-without modification, are permitted provided that the following conditions 
-are met:
--   Redistributions of source code must retain the above copyright notice, 
-    this list of conditions and the following disclaimer. 
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution. 
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software 
-    without specific prior written permission. 
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR 
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-                                                                                
-=========================================================================*/
-
-// .NAME ParticleExchange - read or get pointer to alive particles on this
-//                          process and exchange dead particles with neighbors
-//
-// .SECTION Description
-// ParticleExchange is initialized with physical size of particle space and
-// the margin of dead zone desired for each processor.  It is given the
-// physical x,y,z locations for particles on this processor and can get
-// the number of each neighbor processor.  Since the desired goal is to
-// populate every processor with the alive particles (which it enters this
-// class with) and dead particles belonging on the edges of all neighbors,
-// each processor categorizes its own particles and arranges to send them
-// to the appropriate neighbor, and to receive particles from each neighbor
-// which it adds the the location vectors.
-//
-// Information exchanged are x,y,z locations and vectors and integer unique
-// tags per particle.  Also when the data is shared, the particle status is
-// filled in with the number of the neighbor that shared the particle.  This
-// is to make the halo finder faster because instead of listing a particle
-// as just alive or dead, we know where the dead particle is located.
-//
-
-#ifndef ParticleExchange_h
-#define ParticleExchange_h
-
-#include "Message.h"
-
-#ifdef USE_VTK_COSMO 
-#include "CosmoDefinition.h"
-#include <string>
-#include <vector>
-
-using namespace std;
-#else
-#include "Definition.h"
-#include <string>
-#include <vector>
-
-using namespace std;
-#endif 
-
-#ifdef USE_VTK_COSMO
-class COSMO_EXPORT ParticleExchange {
-#else
-class ParticleExchange {
-#endif
-public:
-  ParticleExchange();
-  ~ParticleExchange();
-
-  // Set parameters particle distribution
-  void setParameters(
-        POSVEL_T rL,            // Box size of the physical problem
-        POSVEL_T deadSize);     // Dead delta border for each processor
-
-  // Calculate the factor to add to locations when doing wraparound shares
-  void calculateOffsetFactor();
-
-  // Set neighbor processor numbers and calculate dead regions
-  void initialize();
-
-  // Calculate physical range of alive particles which must be shared
-  void calculateExchangeRegions();
-
-  // Set alive particle vectors which were created elsewhere
-  void setParticles(
-        vector<POSVEL_T>* xx,
-        vector<POSVEL_T>* yy,
-        vector<POSVEL_T>* zz,
-        vector<POSVEL_T>* vx,
-        vector<POSVEL_T>* vy,
-        vector<POSVEL_T>* vz,
-        vector<POSVEL_T>* mass,
-        vector<POTENTIAL_T>* potential,
-        vector<ID_T>* tag,
-        vector<MASK_T>* mask,
-        vector<STATUS_T>* status);
-
-  // Identify and exchange alive particles which must be shared with neighbors
-  void exchangeParticles();
-  void identifyExchangeParticles();
-  void exchangeNeighborParticles();
-  void exchange(
-        int sendTo,             // Neighbor to send particles to
-        int recvFrom,           // Neighbor to receive particles from
-        Message* sendMessage,
-        Message* recvMessage);
-
-  // Return data needed by other software
-  int getParticleCount()                { return this->particleCount; }
-
-private:
-  int    myProc;                // My processor number
-  int    numProc;               // Total number of processors
-
-  long   totalParticles;        // Number of particles on all files
-  int    headerSize;            // For BLOCK files
-
-  int    layoutSize[DIMENSION]; // Decomposition of processors
-  int    layoutPos[DIMENSION];  // Position of this processor in decomposition
-
-  POSVEL_T boxSize;             // Physical box size (rL)
-  POSVEL_T deadSize;            // Border size for dead particles
-
-  long   numberOfAliveParticles;
-  long   numberOfDeadParticles;
-  long   particleCount;         // Running index used to store data
-                                // Ends up as the number of alive plus dead
-
-  POSVEL_T minMine[DIMENSION];  // Minimum alive particle not exchanged
-  POSVEL_T maxMine[DIMENSION];  // Maximum alive particle not exchanged
-  POSVEL_T minShare[DIMENSION]; // Minimum alive particle shared
-  POSVEL_T maxShare[DIMENSION]; // Maximum alive particle shared
-
-  int      neighbor[NUM_OF_NEIGHBORS];            // Neighbor processor indices
-  POSVEL_T minRange[NUM_OF_NEIGHBORS][DIMENSION]; // Range of dead particles
-  POSVEL_T maxRange[NUM_OF_NEIGHBORS][DIMENSION]; // Range of dead particles
-
-  int    overLoadFactor[NUM_OF_NEIGHBORS][DIMENSION];
-                                // When sending location factor to multiply
-                                // boxSize by for wraparound alteration
-
-  vector<ID_T> neighborParticles[NUM_OF_NEIGHBORS];
-                                // Particle ids sent to each neighbor as DEAD
-
-  vector<POSVEL_T>* xx;         // X location for particles on this processor
-  vector<POSVEL_T>* yy;         // Y location for particles on this processor
-  vector<POSVEL_T>* zz;         // Z location for particles on this processor
-  vector<POSVEL_T>* vx;         // X velocity for particles on this processor
-  vector<POSVEL_T>* vy;         // Y velocity for particles on this processor
-  vector<POSVEL_T>* vz;         // Z velocity for particles on this processor
-  vector<POSVEL_T>* ms;         // Mass for particles on this processor
-  vector<ID_T>* tag;            // Id tag for particles on this processor
-  vector<STATUS_T>* status;     // Particle is ALIVE or labeled with neighbor
-                                // processor index where it is ALIVE
-  vector<POTENTIAL_T>* pot;     // Id tag for particles on this processor
-  vector<MASK_T>* mask;         // Id tag for particles on this processor
-};
-
-#endif
diff --git a/ThirdParty/Cosmo/Partition.cxx b/ThirdParty/Cosmo/Partition.cxx
deleted file mode 100644
index b2cf3e9..0000000
--- a/ThirdParty/Cosmo/Partition.cxx
+++ /dev/null
@@ -1,276 +0,0 @@
-/*=========================================================================
-                                                                                
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC. 
-This software was produced under U.S. Government contract DE-AC52-06NA25396 
-for Los Alamos National Laboratory (LANL), which is operated by 
-Los Alamos National Security, LLC for the U.S. Department of Energy. 
-The U.S. Government has rights to use, reproduce, and distribute this software. 
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.  
-If software is modified to produce derivative works, such modified software 
-should be clearly marked, so as not to confuse it with the version available 
-from LANL.
- 
-Additionally, redistribution and use in source and binary forms, with or 
-without modification, are permitted provided that the following conditions 
-are met:
--   Redistributions of source code must retain the above copyright notice, 
-    this list of conditions and the following disclaimer. 
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution. 
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software 
-    without specific prior written permission. 
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR 
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-                                                                                
-=========================================================================*/
-
-#include "Partition.h"
-
-#include <iostream>
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Static class to control MPI and the partitioning of processors in
-// a Cartesian grid across the problem space.
-//
-/////////////////////////////////////////////////////////////////////////
-
-#ifndef USE_SERIAL_COSMO
-MPI_Comm Partition::cartComm;
-#endif
-
-int Partition::numProc = 0;
-int Partition::myProc = -1;
-int Partition::decompSize[DIMENSION];
-int Partition::myPosition[DIMENSION];
-int Partition::neighbor[NUM_OF_NEIGHBORS];
-int Partition::initialized = 0;
-
-Partition::Partition()
-{
-}
-
-Partition::~Partition()
-{
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Initialize MPI, allocate the processors across a Cartesian grid of
-// DIMENSION size and record this processors position, id and neighbor ids
-//
-/////////////////////////////////////////////////////////////////////////
-
-//void Partition::initialize(int& argc, char** argv)
-void Partition::initialize()
-{
-  if(!initialized)
-    {
-#ifndef USE_SERIAL_COSMO
-
-#ifdef USE_VTK_COSMO
-    // this is for when it is compiled against MPI but single processor
-    // on ParaView (client only, it won't MPI_Init itself)
-    int temp;
-    MPI_Initialized(&temp);
-    if(!temp) 
-      {
-      temp = 0;
-      MPI_Init(&temp, 0);
-      }
-#endif
-
-    // Start up MPI
-    //MPI_Init(&argc, &argv);
-    MPI_Comm_rank(MPI_COMM_WORLD, &myProc);
-    MPI_Comm_size(MPI_COMM_WORLD, &numProc);
-#endif    
-
-    for (int dim = 0; dim < DIMENSION; dim++)
-      decompSize[dim] = 0;
-    
-#ifdef USE_SERIAL_COSMO
-    myProc = 0;
-    numProc = 1;
-
-    for(int dim = 0; dim < DIMENSION; dim = dim + 1)
-      {
-      decompSize[dim] = 1;
-      myPosition[dim] = 0;
-      }
-#else
-    int periodic[] = {1, 1, 1};
-    int reorder = 1;
-
-    // Compute the number of processors in each dimension
-    MPI_Dims_create(numProc, DIMENSION, decompSize);
-    
-    // Create the Cartesion communicator
-    MPI_Cart_create(MPI_COMM_WORLD,
-                    DIMENSION, decompSize, periodic, reorder, &cartComm);
-    
-    // Reset my rank if it changed
-    MPI_Comm_rank(cartComm, &myProc);
-    
-    // Get this processor's position in the Cartesian topology
-    MPI_Cart_coords(cartComm, myProc, DIMENSION, myPosition);
-#endif    
-
-    // Set all my neighbor processor ids for communication
-    setNeighbors();
-    
-#ifndef USE_VTK_COSMO
-    if (myProc == 0)
-      cout << "Decomposition: [" << decompSize[0] << ":"
-           << decompSize[1] << ":" << decompSize[2] << "]" << endl; 
-#endif
-
-    initialized = 1;
-    }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Return the decomposition size of this problem
-//
-/////////////////////////////////////////////////////////////////////////
-
-void Partition::getDecompSize(int size[])
-{
-  for (int dim = 0; dim < DIMENSION; dim++)
-    size[dim] = decompSize[dim];
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Return the position of this processor within the Cartesian grid
-//
-/////////////////////////////////////////////////////////////////////////
-
-void Partition::getMyPosition(int pos[])
-{
-  for (int dim = 0; dim < DIMENSION; dim++)
-    pos[dim] = myPosition[dim];
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Return the ranks of the neighbors of this processor using the
-// description in Definition.h
-//
-/////////////////////////////////////////////////////////////////////////
-
-void Partition::getNeighbors(int neigh[])
-{
-  for (int n = 0; n < NUM_OF_NEIGHBORS; n++)
-    neigh[n] = neighbor[n];
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Get the id of a particular processor given its position in the topology
-//
-/////////////////////////////////////////////////////////////////////////
-
-int Partition::getNeighbor
-#ifdef USE_SERIAL_COSMO
-  (int , int , int )
-#else
-  (int xpos, int ypos, int zpos)
-#endif
-{
-#ifdef USE_SERIAL_COSMO
-  return 0;
-#else
-  static int pos[DIMENSION];
-  pos[0] = xpos;
-  pos[1] = ypos;
-  pos[2] = zpos;
-
-  int neighborProc;
-  MPI_Cart_rank(cartComm, pos, &neighborProc);
-  return neighborProc;
-#endif
-}
-
-/////////////////////////////////////////////////////////////////////////
-//    
-// Every processor will have 26 neighbors because the cosmology structure
-// is a 3D torus.  Each will have 6 face neighbors, 12 edge neighbors and
-// 8 corner neighbors.
-//
-/////////////////////////////////////////////////////////////////////////
-
-void Partition::setNeighbors()
-{ 
-  // Where is this processor in the decomposition
-  int xpos = myPosition[0];
-  int ypos = myPosition[1];
-  int zpos = myPosition[2];
-
-  // Face neighbors
-  neighbor[X0] = Partition::getNeighbor(xpos-1, ypos, zpos);
-  neighbor[X1] = Partition::getNeighbor(xpos+1, ypos, zpos);
-  neighbor[Y0] = Partition::getNeighbor(xpos, ypos-1, zpos);
-  neighbor[Y1] = Partition::getNeighbor(xpos, ypos+1, zpos);
-  neighbor[Z0] = Partition::getNeighbor(xpos, ypos, zpos-1);
-  neighbor[Z1] = Partition::getNeighbor(xpos, ypos, zpos+1);
-
-  // Edge neighbors
-  neighbor[X0_Y0] = Partition::getNeighbor(xpos-1, ypos-1, zpos);
-  neighbor[X0_Y1] = Partition::getNeighbor(xpos-1, ypos+1, zpos);
-  neighbor[X1_Y0] = Partition::getNeighbor(xpos+1, ypos-1, zpos);
-  neighbor[X1_Y1] = Partition::getNeighbor(xpos+1, ypos+1, zpos);
-  
-  neighbor[Y0_Z0] = Partition::getNeighbor(xpos, ypos-1, zpos-1);
-  neighbor[Y0_Z1] = Partition::getNeighbor(xpos, ypos-1, zpos+1);
-  neighbor[Y1_Z0] = Partition::getNeighbor(xpos, ypos+1, zpos-1);
-  neighbor[Y1_Z1] = Partition::getNeighbor(xpos, ypos+1, zpos+1);
-  
-  neighbor[Z0_X0] = Partition::getNeighbor(xpos-1, ypos, zpos-1);
-  neighbor[Z0_X1] = Partition::getNeighbor(xpos+1, ypos, zpos-1);
-  neighbor[Z1_X0] = Partition::getNeighbor(xpos-1, ypos, zpos+1);
-  neighbor[Z1_X1] = Partition::getNeighbor(xpos+1, ypos, zpos+1);
-  
-  // Corner neighbors
-  neighbor[X0_Y0_Z0] = Partition::getNeighbor(xpos-1, ypos-1, zpos-1);
-  neighbor[X1_Y0_Z0] = Partition::getNeighbor(xpos+1, ypos-1, zpos-1);
-  neighbor[X0_Y1_Z0] = Partition::getNeighbor(xpos-1, ypos+1, zpos-1);
-  neighbor[X1_Y1_Z0] = Partition::getNeighbor(xpos+1, ypos+1, zpos-1);
-  neighbor[X0_Y0_Z1] = Partition::getNeighbor(xpos-1, ypos-1, zpos+1);
-  neighbor[X1_Y0_Z1] = Partition::getNeighbor(xpos+1, ypos-1, zpos+1);
-  neighbor[X0_Y1_Z1] = Partition::getNeighbor(xpos-1, ypos+1, zpos+1);
-  neighbor[X1_Y1_Z1] = Partition::getNeighbor(xpos+1, ypos+1, zpos+1);
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Shut down MPI
-//
-/////////////////////////////////////////////////////////////////////////
-
-void Partition::finalize()
-{
-  numProc = 0;
-  myProc = -1;
-
-  //MPI_Finalize();
-}
diff --git a/ThirdParty/Cosmo/Partition.h b/ThirdParty/Cosmo/Partition.h
deleted file mode 100644
index 26f2c2a..0000000
--- a/ThirdParty/Cosmo/Partition.h
+++ /dev/null
@@ -1,117 +0,0 @@
-/*=========================================================================
-                                                                                
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC. 
-This software was produced under U.S. Government contract DE-AC52-06NA25396 
-for Los Alamos National Laboratory (LANL), which is operated by 
-Los Alamos National Security, LLC for the U.S. Department of Energy. 
-The U.S. Government has rights to use, reproduce, and distribute this software. 
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.  
-If software is modified to produce derivative works, such modified software 
-should be clearly marked, so as not to confuse it with the version available 
-from LANL.
- 
-Additionally, redistribution and use in source and binary forms, with or 
-without modification, are permitted provided that the following conditions 
-are met:
--   Redistributions of source code must retain the above copyright notice, 
-    this list of conditions and the following disclaimer. 
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution. 
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software 
-    without specific prior written permission. 
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, 
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE 
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR 
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, 
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, 
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; 
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, 
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR 
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF 
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-                                                                                
-=========================================================================*/
-
-// .NAME Partition - Partition MPI processors into cartesian grid
-//
-// .SECTION Description
-// Partition allows MPI to divide the number of processors it is given and
-// to set the position of this processor within the Cartesian grid.  Using
-// that information with wraparound, all neighbors of a processor are
-// also computed.  This class is static and will be shared by all classes
-// within the infrastructure.
-
-#ifndef Partition_h
-#define Partition_h
-
-
-#ifdef USE_VTK_COSMO
-#include "CosmoDefinition.h"
-#include <string>
-#include <vector>
-
-using namespace std;
-#else
-#include "Definition.h"
-#include <string>
-#include <vector>
-
-using namespace std;
-#endif
-
-#ifdef USE_VTK_COSMO
-class COSMO_EXPORT Partition {
-#else
-class Partition {
-#endif
-public:
-  Partition();
-  ~Partition();
-
-  // Control MPI and the Cartesian topology
-  //static void initialize(int& argc, char** argv);
-  static void initialize();
-  static void finalize();
-
-  // Set the processor numbers of neighbors in all directions
-  static void setNeighbors();
-
-#ifndef USE_SERIAL_COSMO
-  static MPI_Comm getComm()       { return cartComm; }
-#endif
-
-  static int  getMyProc()               { return myProc; }
-  static int  getNumProc()              { return numProc; }
-
-  static void getDecompSize(int size[]);
-  static void getMyPosition(int pos[]);
-  static void getNeighbors(int neigh[]);
-
-  static int  getNeighbor(int xpos, int ypos, int zpos);
-
-private:
-  static int myProc;                    // My processor number
-  static int numProc;                   // Total number of processors
-  static int initialized;
-
-#ifndef USE_SERIAL_COSMO
-  static MPI_Comm cartComm;             // Cartesian communicator
-#endif
-
-  static int decompSize[DIMENSION];     // Number of processors in each dim
-  static int myPosition[DIMENSION];     // My index in cartesian communicator
-
-  static int neighbor[NUM_OF_NEIGHBORS];// Neighbor processor ids
-};
-
-#endif
diff --git a/ThirdParty/Cosmo/SODHalo.cxx b/ThirdParty/Cosmo/SODHalo.cxx
deleted file mode 100644
index bd4f180..0000000
--- a/ThirdParty/Cosmo/SODHalo.cxx
+++ /dev/null
@@ -1,849 +0,0 @@
-/*=========================================================================
-
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC.
-This software was produced under U.S. Government contract DE-AC52-06NA25396
-for Los Alamos National Laboratory (LANL), which is operated by
-Los Alamos National Security, LLC for the U.S. Department of Energy.
-The U.S. Government has rights to use, reproduce, and distribute this software.
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.
-If software is modified to produce derivative works, such modified software
-should be clearly marked, so as not to confuse it with the version available
-from LANL.
-
-Additionally, redistribution and use in source and binary forms, with or
-without modification, are permitted provided that the following conditions
-are met:
--   Redistributions of source code must retain the above copyright notice,
-    this list of conditions and the following disclaimer.
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution.
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software
-    without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-=========================================================================*/
-
-#include "Partition.h"
-#include "SODHalo.h"
-
-#include <iostream>
-#include <fstream>
-#include <sstream>
-#include <iomanip>
-#include <set>
-#include <vector>
-#include <algorithm>
-#include <math.h>
-
-#ifndef M_PI
-#define M_PI 3.14159265358979323846
-#endif
-
-using namespace std;
-
-/////////////////////////////////////////////////////////////////////////
-//
-// SODHalo uses the results of the CosmoHaloFinder to locate the
-// particle within every halo in order to calculate properties on halos
-//
-/////////////////////////////////////////////////////////////////////////
-
-SODHalo::SODHalo()
-{
-  // Get the number of processors and rank of this processor
-  this->numProc = Partition::getNumProc();
-  this->myProc = Partition::getMyProc();
-  this->numberOfParticles = 0;
-
-  this->binCount = 0;
-  this->binRadius = 0;
-  this->binMass = 0;
-  this->binRho = 0;
-  this->binRhoRatio = 0;
-  this->binInfo = 0;
-
-  this->avgRadius = 0;
-  this->avgRadVelocity = 0;
-  this->particleIndex = 0;
-  this->particleRadius = 0;
-}
-
-SODHalo::~SODHalo()
-{
-  if (this->binCount) delete [] this->binCount;
-  if (this->binRadius) delete [] this->binRadius;
-  if (this->binMass) delete [] this->binMass;
-  if (this->binRho) delete [] this->binRho;
-  if (this->binRhoRatio) delete [] this->binRhoRatio;
-  if (this->binInfo) delete [] this->binInfo;
-
-  if (this->avgRadius) delete [] this->avgRadius;
-  if (this->avgRadVelocity) delete [] this->avgRadVelocity;
-  if (this->particleIndex) delete [] this->particleIndex;
-  if (this->particleRadius) delete [] this->particleRadius;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Set parameters for the halo center finder
-//
-/////////////////////////////////////////////////////////////////////////
-
-void SODHalo::setParameters(
-                        ChainingMesh* chainMesh,
-                        int numBins,
-                        POSVEL_T rL,
-                        POSVEL_T np,
-                        POSVEL_T rhoc,
-                        POSVEL_T sodmass,
-                        POSVEL_T densRatio,
-                        POSVEL_T minFactor,
-                        POSVEL_T maxFactor)
-{
-  // Get information from the chaining mesh
-  this->chain = chainMesh;
-  this->buckets = chain->getBuckets();
-  this->bucketList = chain->getBucketList();
-
-  // Halo finder parameters
-  this->rSmooth = rL / np;
-  this->rhoRatio = densRatio;
-  this->cMinFactor = minFactor;
-  this->cMaxFactor = maxFactor;
-  this->RHOC = rhoc;
-  this->SODMASS = sodmass;
-
-  // Make the number of bins one larger so that all particles less than the
-  // minimum radius can be collected into bin 0
-  this->numberOfBins = numBins + 1;
-
-  // Allocate memory based on bins
-  this->binRadius = new POSVEL_T[this->numberOfBins];
-  this->binRho = new double[this->numberOfBins];
-  this->binRhoRatio = new double[this->numberOfBins];
-  this->binCount = new int[this->numberOfBins];
-  this->binMass = new double[this->numberOfBins];
-  this->binInfo = new vector<RadiusID>[this->numberOfBins];
-
-  this->avgRadius = new double[this->numberOfBins];
-  this->avgRadVelocity = new double[this->numberOfBins];
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Set the particle vectors that have already been read and which
-// contain only the alive particles for this processor
-//
-/////////////////////////////////////////////////////////////////////////
-
-void SODHalo::setParticles(
-                        vector<POSVEL_T>* xLoc,
-                        vector<POSVEL_T>* yLoc,
-                        vector<POSVEL_T>* zLoc,
-                        vector<POSVEL_T>* xVel,
-                        vector<POSVEL_T>* yVel,
-                        vector<POSVEL_T>* zVel,
-                        vector<POSVEL_T>* pmass,
-                        vector<ID_T>* id)
-{
-  this->particleCount = (long)xLoc->size();
-
-  // Extract the contiguous data block from a vector pointer
-  this->xx = &(*xLoc)[0];
-  this->yy = &(*yLoc)[0];
-  this->zz = &(*zLoc)[0];
-  this->vx = &(*xVel)[0];
-  this->vy = &(*yVel)[0];
-  this->vz = &(*zVel)[0];
-  this->mass = &(*pmass)[0];
-  this->tag = &(*id)[0];
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// SOD (Spherically Over Dense) halos centered at FOF center of minimum size
-//
-// Initial estimate of characteristic radius
-//    Choose a Delta which is the average density of a sphere around xcenter
-//                   divided by the critical density of the universe
-//                   Typically choose Delta = 200 for initial estimate
-//    Delta = Mass(r_delta) / Volume(r_delta)
-//          = m_delta / (4/3)(PI)(r_delta)^3
-//    r_initial = cube_root(mass_FOF / 10^14)
-//
-// Mass profile
-//    Choose r_max = c_max * r_initial
-//       c_max is approximately 2
-//    Collect all particles from the chaining mesh buckets such that they
-//       fall within the r_max sphere
-//    Choose r_min = c_min * r_smooth
-//       c_min is approximately 1
-//       r_smooth is boxSize / gridSize
-//    Arrange bins logarithmically between r_min and r_max storing (r, mass)
-//       radius of sphere and mass of particles within that sphere
-//
-// Improved estimate of characteristic radius
-//    Using the (r_j, m_j) spheres find the two that surround r_200
-//    Interpolate between the values of density to get a better r_200
-//
-/////////////////////////////////////////////////////////////////////////
-
-void SODHalo::createSODHalo(
-                        int FOFhaloCount,
-                        POSVEL_T centerXLocation,
-                        POSVEL_T centerYLocation,
-                        POSVEL_T centerZLocation,
-                        POSVEL_T avgXVelocity,
-                        POSVEL_T avgYVelocity,
-                        POSVEL_T avgZVelocity,
-                        POSVEL_T FOFhaloMass)
-{
-  this->fofCenterLocation[0] = centerXLocation;
-  this->fofCenterLocation[1] = centerYLocation;
-  this->fofCenterLocation[2] = centerZLocation;
-
-  this->fofHaloVelocity[0] = avgXVelocity;
-  this->fofHaloVelocity[1] = avgYVelocity;
-  this->fofHaloVelocity[2] = avgZVelocity;
-
-  this->fofHaloCount = FOFhaloCount;
-  this->initRadius = (POSVEL_T)pow
-  ((POSVEL_T)(FOFhaloMass / this->SODMASS), (POSVEL_T)(1.0 / 3.0));
-
-  // Binning for concentric spheres over radius range
-  this->minRadius = this->cMinFactor * this->rSmooth;
-  this->maxRadius = this->cMaxFactor * this->initRadius;
-
-  // Calculate logarithmic radial bins containing count, mass and RadiusID pairs
-  calculateMassProfile();
-
-#ifdef DEBUG
-  for (int bin = 0; bin < this->numberOfBins; bin++) {
-    double bmass = 0.0;
-    if (binCount[bin] > 0)
-      bmass = binMass[bin] / binCount[bin];
-
-    cout << "Bin radius " << binRadius[bin]
-         << " Avg Radius " << avgRadius[bin]
-         << " Radial Velocity " << avgRadVelocity[bin]
-         << " Avg Mass " << bmass
-         << " Count " << binCount[bin] << endl;
-  }
-  cout << endl;
-#endif
-
-  // Calculate the characteristic radius for requested density ratio
-  calculateCharacteristicRadius();
-
-  if (this->charRadius > 0.0) {
-
-    // Gather all particles less than the characteristic radius
-    // Collect average velocity at the same time
-    gatherSODParticles();
-
-    // Calculate velocity dispersion
-    calculateVelocityDispersion();
-  }
-
-#ifdef DEBUG
-  cout << "Initial radius = " << this->initRadius << endl;
-  cout << "Characteristic radius " << this->charRadius << endl;
-#endif
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Divide the radius between the minimum and maximum radius into bins
-// Iterate over all particles in the buckets, incrementing the count for
-// a bin if the radius falls within the boundary.
-// Return the bin pairs (radius, mass of particles within radius)
-//
-// Collect both the mass profile pairs for the number of bins and also
-// store the distances for each particle and sort that array of distances
-// Then we should be able to calculate r_200 and r_approximate_200
-//
-// Return radius[numBins], count[numBins], vector<POSVEL_T> distance sorted
-//
-/////////////////////////////////////////////////////////////////////////
-
-void SODHalo::calculateMassProfile()
-{
-  // If the max radius runs into the corner of data for this processor
-  // adjust down so as to get a complete sphere
-  POSVEL_T limit;
-  for (int dim = 0; dim < DIMENSION; dim++) {
-    limit = this->chain->getMaxMine(dim) - this->fofCenterLocation[dim];
-    if (this->maxRadius > limit)
-      this->maxRadius = limit;
-    limit = this->fofCenterLocation[dim] - chain->getMinMine(dim);
-    if (this->maxRadius > limit)
-      this->maxRadius = limit;
-  }
-
-#ifndef USE_VTK_COSMO
-  if (this->maxRadius < requiredMaxRadius) {
-    cout << "Reset max radius from " << requiredMaxRadius
-         << " to " << maxRadius << endl;
-    cout << "Might need to make the dead size (overload) larger" << endl;
-  }
-#endif
-
-  // Calculate the delta radius in log scale
-  // Number of bins was increased by one for particles less than the min
-  this->deltaRadius = (POSVEL_T) log10(this->maxRadius / this->minRadius) /
-                                 (this->numberOfBins - 1);
-
-  // Bin 0 is for all particles less than the minimum
-  this->binRadius[0] = this->minRadius;
-  for (int bin = 1; bin < this->numberOfBins; bin++) {
-    this->binRadius[bin] = (POSVEL_T)pow
-      ((POSVEL_T)10.0, (POSVEL_T)((this->deltaRadius * bin) *
-      this->minRadius));
-  }
-
-  for (int bin = 0; bin < this->numberOfBins; bin++) {
-    this->binCount[bin] = 0;
-    this->binMass[bin] = 0.0;
-    this->avgRadius[bin] = 0.0;
-    this->avgRadVelocity[bin] = 0.0;
-  }
-
-  // Grid in the bucket grid containing the FOF center
-  int centerIndex[DIMENSION];
-  for (int dim = 0; dim < DIMENSION; dim++) {
-    centerIndex[dim] =
-      (int) ((this->fofCenterLocation[dim] - chain->getMinMine(dim)) /
-             chain->getChainSize());
-  }
-
-  // Number of grids to look at in each direction
-  int gridOffset = (int) (this->maxRadius / chain->getChainSize()) + 1;
-
-  // Range of grid positions to examine for this particle center
-  int first[DIMENSION], last[DIMENSION];
-  for (int dim = 0; dim < DIMENSION; dim++) {
-    first[dim] = centerIndex[dim] - gridOffset;
-    last[dim] = centerIndex[dim] + gridOffset;
-    if (first[dim] < 0)
-      first[dim] = 0;
-    if (last[dim] > chain->getMeshSize(dim))
-      last[dim] = chain->getMeshSize(dim);
-  }
-
-  // Iterate over every possible grid and examine particles in the bucket
-  // Count the number of particles in each of the logarithmic bins
-  POSVEL_T location[DIMENSION];
-  for (int i = first[0]; i <= last[0]; i++) {
-    for (int j = first[1]; j <= last[1]; j++) {
-      for (int k = first[2]; k <= last[2]; k++) {
-
-
-        // Iterate on all particles in this bucket
-        // Index of first particle in bucket
-        int p = this->buckets[i][j][k];
-        while (p != -1) {
-          location[0] = this->xx[p];
-          location[1] = this->yy[p];
-          location[2] = this->zz[p];
-
-          // Calculate distance between this particle and the center
-          POSVEL_T diff[DIMENSION];
-          for (int dim = 0; dim < DIMENSION; dim++)
-            diff[dim] = location[dim] - this->fofCenterLocation[dim];
-
-          POSVEL_T dist = sqrt((diff[0] * diff[0]) +
-                               (diff[1] * diff[1]) +
-                               (diff[2] * diff[2]));
-
-          // If this particle is within the max radius
-          if (dist < this->maxRadius) {
-
-            // Calculate the unit vector for this particle
-            POSVEL_T unit[DIMENSION];
-            for (int dim = 0; dim < DIMENSION; dim++)
-              {
-              if (dist > 0.0)
-                {
-                unit[dim] = diff[dim] / dist;
-                }
-              else
-                {
-                unit[dim] = 0.0;
-                }
-              }
-
-            // Calculate the relative velocity vector of particle wrt center
-            POSVEL_T relVel[DIMENSION];
-            relVel[0] = this->vx[p] - this->fofHaloVelocity[0];
-            relVel[1] = this->vy[p] - this->fofHaloVelocity[1];
-            relVel[2] = this->vz[p] - this->fofHaloVelocity[2];
-
-            // Calculate the radial velocity
-            POSVEL_T radVel = 0.0;
-            for (int dim = 0; dim < DIMENSION; dim++)
-              radVel += unit[dim] * relVel[dim];
-
-            // Calculate the bin this particle goes in
-            // Bin 0 contains all particles less than the min radius
-            int bin = 0;
-            if (dist > this->minRadius) {
-              bin = (int) (floor(log10(dist/this->minRadius) /
-                                    this->deltaRadius)) + 1;
-            }
-            this->binCount[bin]++;
-            this->binMass[bin] += this->mass[p];
-            this->avgRadius[bin] += dist;
-            this->avgRadVelocity[bin] += radVel;
-
-            // Store the actual radius and index of particle on this processor
-            RadiusID pair;
-            pair.radius = dist;
-            pair.index = p;
-            this->binInfo[bin].push_back(pair);
-          }
-
-          // Next particle in bucket
-          p = this->bucketList[p];
-        }
-      }
-    }
-  }
-
-  // Calculate the average radius per bin
-  for (int bin = 0; bin < this->numberOfBins; bin++) {
-    if (binCount[bin] > 0) {
-      avgRadius[bin] /= binCount[bin];
-      avgRadVelocity[bin] /= binCount[bin];
-    }
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Given the mass profile for an SOD halo calculate the
-// characteristic radius matching the requested density
-//
-/////////////////////////////////////////////////////////////////////////
-
-void SODHalo::calculateCharacteristicRadius()
-{
-  // Calculate the mass, volume and density of every sphere bin
-  // Bin 0 contains information on particles less than the minimum radius
-  int totBinCount = this->binCount[0];
-  double totBinMass = this->binMass[0];
-
-  for (int bin = 1; bin < this->numberOfBins; bin++) {
-    totBinCount += this->binCount[bin];
-    totBinMass += this->binMass[bin];
-
-    double r = (double) this->avgRadius[bin];
-    double volume = ((4.0 * M_PI) / 3.0) * r * r * r;
-
-    this->binRho[bin] = totBinMass / volume;
-    this->binRhoRatio[bin] = this->binRho[bin] / this->RHOC;
-#ifdef DEBUG
-    cout << "Radius " << this->binRadius[bin]
-         << " Avg Radius " << this->avgRadius[bin]
-         << " Mass " << this->binMass[bin]
-         << " Rho " << this->binRho[bin]
-         << " Rho Ratio " << this->binRhoRatio[bin] << endl;
-#endif
-  }
-
-  // Find the two bins that the density should be between
-  // Interpolate the radius matching the requested density
-  vector<int> possibleBins;
-  for (int bin = 1; bin < (this->numberOfBins - 1); bin++) {
-    if (this->binRhoRatio[bin] > RHO_RATIO &&
-        this->binRhoRatio[bin+1] < RHO_RATIO)
-      possibleBins.push_back(bin);
-  }
-
-  // Zero bins means a badly behaved SOD region
-  // More than one bin means use the first
-  if (possibleBins.size() < 1) {
-    this->criticalBin = 0;
-    this->charRadius = 0.0;
-    return;
-  }
-  this->criticalBin = possibleBins[0] + 1;
-
-  // Sort each bin's radius/id pair vector up to the critical bin
-  for (int bin = 0; bin <= criticalBin; bin++)
-    sort(this->binInfo[bin].begin(), this->binInfo[bin].end(), RadiusIDLT());
-
-  // Accumulate mass for all bins lower than the critical bin
-  double totParticleMass = 0.0;
-  for (int bin = 0; bin < criticalBin; bin++)
-    totParticleMass += this->binMass[bin];
-
-  // Iterate over particles in the critical bin until we exceed critical density
-  int i = 0;
-  bool found = false;
-  this->charRadius = 0.0;
-
-  while (i < (int) this->binInfo[this->criticalBin].size() && found == false) {
-    double r = (double) this->binInfo[this->criticalBin][i].radius;
-    int index = this->binInfo[this->criticalBin][i].index;
-    totParticleMass += (double) this->mass[index];
-    double volume = ((4.0 * M_PI) / 3.0) * r * r * r;
-    double ratio = (totParticleMass / volume) / this->RHOC;
-
-    if (ratio < this->rhoRatio) {
-      this->criticalIndex = i;
-      this->charRadius = r;
-      found = true;
-    }
-    i++;
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Gather the accepted SOD particles from the bin RadiusID vector
-// into array of particle index on this processor and matching radius
-// Also collect some statistics
-//
-/////////////////////////////////////////////////////////////////////////
-
-void SODHalo::gatherSODParticles()
-{
-  // Allocate memory to hold the indices using bins to approximate
-  int totalCount = 0;
-  for (int bin = 0; bin <= this->criticalBin; bin++) {
-    totalCount += this->binCount[bin];
-  }
-  this->particleIndex = new int[totalCount];
-  this->particleRadius = new POSVEL_T[totalCount];
-
-  // Collect average velocity and average location of SOD particles
-  for (int dim = 0; dim < DIMENSION; dim++) {
-    this->avgVelocity[dim] = 0.0;
-    this->avgLocation[dim] = 0.0;
-    this->centerOfMass[dim] = 0.0;
-  }
-
-  // Iterate over all bins less than the critical bin collecting particles
-  this->numberOfParticles = 0;
-  this->totalMass = 0.0;
-
-  for (int bin = 0; bin < this->criticalBin; bin++) {
-    for (int i = 0; i < (int) this->binInfo[bin].size(); i++) {
-      int p = this->binInfo[bin][i].index;
-
-      this->particleIndex[this->numberOfParticles] = p;
-      this->particleRadius[this->numberOfParticles] =
-        this->binInfo[bin][i].radius;
-      numberOfParticles++;
-      this->totalMass += (double) this->mass[p];
-
-      // Collect average location of SOD particles
-      this->avgLocation[0] += (double) this->xx[p];
-      this->avgLocation[1] += (double) this->yy[p];
-      this->avgLocation[2] += (double) this->zz[p];
-
-      // Collect center of mass of SOD particles
-      this->centerOfMass[0] += (double) this->xx[p] * (double) this->mass[p];
-      this->centerOfMass[1] += (double) this->yy[p] * (double) this->mass[p];
-      this->centerOfMass[2] += (double) this->zz[p] * (double) this->mass[p];
-
-      // Collect average velocity of SOD particles
-      this->avgVelocity[0] += (double) this->vx[p];
-      this->avgVelocity[1] += (double) this->vy[p];
-      this->avgVelocity[2] += (double) this->vz[p];
-    }
-  }
-
-  // Iterate over the critical bin to the critical index
-  for (int i = 0; i < this->criticalIndex; i++) {
-    int p = this->binInfo[criticalBin][i].index;
-
-    this->particleIndex[this->numberOfParticles] = p;
-    this->particleRadius[this->numberOfParticles] =
-      this->binInfo[criticalBin][i].radius;
-    this->numberOfParticles++;
-    this->totalMass += (double) this->mass[p];
-
-    // Collect average location of SOD particles
-    this->avgLocation[0] += (double) this->xx[p];
-    this->avgLocation[1] += (double) this->yy[p];
-    this->avgLocation[2] += (double) this->zz[p];
-
-    // Collect center of mass of SOD particles
-    this->centerOfMass[0] += (double) this->xx[p] * (double) this->mass[p];
-    this->centerOfMass[1] += (double) this->yy[p] * (double) this->mass[p];
-    this->centerOfMass[2] += (double) this->zz[p] * (double) this->mass[p];
-
-    // Collect average velocity of SOD particles
-    this->avgVelocity[0] += (double) this->vx[p];
-    this->avgVelocity[1] += (double) this->vy[p];
-    this->avgVelocity[2] += (double) this->vz[p];
-  }
-
-  for (int dim = 0; dim < DIMENSION; dim++) {
-    this->avgLocation[dim] /= this->numberOfParticles;
-    this->centerOfMass[dim] /= this->totalMass;
-    this->avgVelocity[dim] /= this->numberOfParticles;
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Copy locations, velocities and tags of halo particles to the allocated arrays
-//
-/////////////////////////////////////////////////////////////////////////
-
-void SODHalo::extractInformation(
-                        int* actualIndx,
-                        POSVEL_T* xLocHalo,
-                        POSVEL_T* yLocHalo,
-                        POSVEL_T* zLocHalo,
-                        POSVEL_T* xVelHalo,
-                        POSVEL_T* yVelHalo,
-                        POSVEL_T* zVelHalo,
-                        POSVEL_T* massHalo,
-                        POSVEL_T* radius,
-                        ID_T* id)
-{
-  for (int i = 0; i < this->numberOfParticles; i++) {
-    int p = this->particleIndex[i];
-    radius[i] = this->particleRadius[i];
-
-    xLocHalo[i] = this->xx[p];
-    yLocHalo[i] = this->yy[p];
-    zLocHalo[i] = this->zz[p];
-    xVelHalo[i] = this->vx[p];
-    yVelHalo[i] = this->vy[p];
-    zVelHalo[i] = this->vz[p];
-    massHalo[i] = this->mass[p];
-    id[i] = this->tag[p];
-    actualIndx[i] = p;
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Calculate the velocity dispersion of the SOD halo
-//
-/////////////////////////////////////////////////////////////////////////
-
-void SODHalo::calculateVelocityDispersion()
-{
-  POSVEL_T particleDot = 0.0;
-  for (int i = 0; i < this->numberOfParticles; i++) {
-    int p = this->particleIndex[i];
-    particleDot += dotProduct(this->vx[p], this->vy[p], this->vz[p]);
-  }
-
-  // Average of all the dot products
-  particleDot /= this->numberOfParticles;
-
-  // Dot product of the average velocity for the entire halo
-  POSVEL_T haloDot = dotProduct(avgVelocity[0], avgVelocity[1], avgVelocity[2]);
-
-  // Velocity dispersion
-  this-> velocityDispersion = sqrt((particleDot - haloDot) / 3.0);
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Calculate the mass of the SOD halo
-//
-/////////////////////////////////////////////////////////////////////////
-
-void SODHalo::calculateMass()
-{
-  this->totalMass = 0.0;
-  for (int i = 0; i < this->numberOfParticles; i++) {
-    int p = this->particleIndex[i];
-    this->totalMass += this->mass[p];
-  }
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Return information for mass profile and mass density profile
-//
-/////////////////////////////////////////////////////////////////////////
-
-void SODHalo::SODProfile(
-                        int* bCount,
-                        POSVEL_T* bMass,
-                        POSVEL_T* bRadius,
-                        POSVEL_T* bRho,
-                        POSVEL_T* bRhoRatio,
-                        POSVEL_T* bRadVelocity)
-{
-  for (int bin = 1; bin < this->numberOfBins; bin++) {
-    bCount[bin-1] = this->binCount[bin];
-    bMass[bin-1] = (POSVEL_T) this->binMass[bin];
-    bRadius[bin-1] = (POSVEL_T) this->binRadius[bin];
-    bRho[bin-1] = (POSVEL_T) this->binRho[bin];
-    bRhoRatio[bin-1] = (POSVEL_T) this->binRhoRatio[bin];
-    bRadVelocity[bin-1] = (POSVEL_T) this->avgRadVelocity[bin];
-  }
-}
-
-void SODHalo::SODAverageLocation(POSVEL_T* pos)
-{
-  for (int dim = 0; dim < DIMENSION; dim++)
-    pos[dim] = (POSVEL_T) this->avgLocation[dim];
-}
-
-void SODHalo::SODCenterOfMass(POSVEL_T* com)
-{
-  for (int dim = 0; dim < DIMENSION; dim++)
-    com[dim] = (POSVEL_T) this->centerOfMass[dim];
-}
-
-void SODHalo::SODAverageVelocity(POSVEL_T* vel)
-{
-  for (int dim = 0; dim < DIMENSION; dim++)
-    vel[dim] = (POSVEL_T) this->avgVelocity[dim];
-}
-
-void SODHalo::SODVelocityDispersion(POSVEL_T* velDisp)
-{
-  (*velDisp) = (POSVEL_T) this->velocityDispersion;
-}
-
-void SODHalo::SODMass(POSVEL_T* sodmass)
-{
-  (*sodmass) = (POSVEL_T) this->totalMass;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Dot product of a vector
-//
-/////////////////////////////////////////////////////////////////////////
-
-POSVEL_T SODHalo::dotProduct(POSVEL_T x, POSVEL_T y, POSVEL_T z)
-{
-  POSVEL_T dotProd = x * x + y * y + z * z;
-  return dotProd;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Cubic spline from Numerical Recipes (altered for zero based arrays)
-// Called only once to process entire tabulated function
-//
-// Given arrays x[0..n-1] and y[0..n-1] containing a tabulated function
-// with x0 < x1 < .. < xn-1, and given values yp1 and ypn for the
-// first derivative of the interpolating function at points 0 and n-1,
-// this routine returns an array y2[0..n-1] that contains the second
-// derivatives of the interpolating function.  If yp1 or ypn > e30
-// the rougine is signaled to set the corresponding boundary condition
-// for a natural spline, with zero second derivative on that boundary.
-//
-/////////////////////////////////////////////////////////////////////////
-
-void SODHalo::spline(
-                POSVEL_T* x, POSVEL_T* y,       // arrays
-                int n,                          // size of arrays
-                POSVEL_T* y2)                   // return array
-{
-  // Set boundary conditions
-  POSVEL_T yp1 = 1.0e31;
-  POSVEL_T ypn = 1.0e31;
-  POSVEL_T qn, un;
-  POSVEL_T* u = new POSVEL_T[n];
-
-  // Lower boundary condition set to natural spline
-  if (yp1 > 0.99e30)
-    y2[0] = u[0] = 0.0;
-
-  // Lower boundary condition set to specified first derivative
-  else {
-    y2[0] = -0.5;
-    u[0]=(3.0/(x[1]-x[0]))*((y[1]-y[0])/(x[1]-x[0])-yp1);
-  }
-
-  // Decomposition loop of tridiagonal algorithm
-  for (int i = 1; i < n-1; i++) {
-    POSVEL_T sig = (x[i] - x[i-1]) / (x[i+1] - x[i-1]);
-    POSVEL_T p = sig * y2[i-1] + 2.0;
-    y2[i] = (sig - 1.0) / p;
-    u[i] = (y[i+1] - y[i]) / (x[i+1] - x[i]) -
-           (y[i] - y[i-1]) / (x[i] - x[i-1]);
-    u[i] = (6.0 * u[i] / (x[i+1] - x[i-1]) - sig * u[i-1]) / p;
-  }
-
-  // Upper boundary condition set to natural spline
-  if (ypn > 0.99e30)
-    qn = un = 0.0;
-
-  // Upper boundary condition set to specified first derivative
-  else {
-    qn = 0.5;
-    un = (3.0 / (x[n-1] - x[n-2])) *
-         (ypn - (y[n-1] - y[n-2]) / (x[n-1] -x [n-2]));
-  }
-
-  // Back substitution loop of tridiagonal algorithm
-  y2[n-1] = (un - qn * u[n-2]) / (qn * y2[n-2] + 1.0);
-  for (int k = n - 2; k >= 0; k--)
-    y2[k] = y2[k] * y2[k+1] + u[k];
-
-#ifndef USE_VTK_COSMO
-  for (int i = 0; i < n; i++)
-    cout << "x " << x[i] << "   y " << y[i] << "    result " << y2[i] << endl;
-#endif
-
-  delete [] u;
-}
-
-/////////////////////////////////////////////////////////////////////////
-//
-// Cubic spline interpolation from Numerical Recipes
-// Called succeeding times after spline is called once
-// Given x, y and y2 arrays from spline return cubic spline interpolated
-//
-/////////////////////////////////////////////////////////////////////////
-
-void SODHalo::splint(
-                POSVEL_T* xa, POSVEL_T* ya,     // arrays sent to spline
-                POSVEL_T* y2a,                  // result from spline
-                int n,                          // size of arrays
-                POSVEL_T x,                     //
-                POSVEL_T* y)                    // interpolated value
-{
-  // Find the right place in the table by means of bisection
-  // Optimal is sequential calls are at random values of x
-  int klo = 0;
-  int khi = n - 1;
-  while (khi - klo > 1) {
-    int k = (khi + klo + 1) >> 1;
-    if (xa[k] > x)
-      khi = k;
-    else
-      klo = k;
-  }
-
-  POSVEL_T h = xa[khi] - xa[klo];
-  POSVEL_T a = (xa[khi] - x) / h;
-  POSVEL_T b = (x - xa[klo]) / h;
-  *y = a * ya[klo] + b * ya[khi] +
-       ((a * a * a - a) * y2a[klo] +
-       (b * b * b - b) * y2a[khi]) * (h * h) / 6.0;
-}
diff --git a/ThirdParty/Cosmo/SODHalo.h b/ThirdParty/Cosmo/SODHalo.h
deleted file mode 100644
index 976a8bb..0000000
--- a/ThirdParty/Cosmo/SODHalo.h
+++ /dev/null
@@ -1,265 +0,0 @@
-/*=========================================================================
-
-Copyright (c) 2007, Los Alamos National Security, LLC
-
-All rights reserved.
-
-Copyright 2007. Los Alamos National Security, LLC.
-This software was produced under U.S. Government contract DE-AC52-06NA25396
-for Los Alamos National Laboratory (LANL), which is operated by
-Los Alamos National Security, LLC for the U.S. Department of Energy.
-The U.S. Government has rights to use, reproduce, and distribute this software.
-NEITHER THE GOVERNMENT NOR LOS ALAMOS NATIONAL SECURITY, LLC MAKES ANY WARRANTY,
-EXPRESS OR IMPLIED, OR ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE.
-If software is modified to produce derivative works, such modified software
-should be clearly marked, so as not to confuse it with the version available
-from LANL.
-
-Additionally, redistribution and use in source and binary forms, with or
-without modification, are permitted provided that the following conditions
-are met:
--   Redistributions of source code must retain the above copyright notice,
-    this list of conditions and the following disclaimer.
--   Redistributions in binary form must reproduce the above copyright notice,
-    this list of conditions and the following disclaimer in the documentation
-    and/or other materials provided with the distribution.
--   Neither the name of Los Alamos National Security, LLC, Los Alamos National
-    Laboratory, LANL, the U.S. Government, nor the names of its contributors
-    may be used to endorse or promote products derived from this software
-    without specific prior written permission.
-
-THIS SOFTWARE IS PROVIDED BY LOS ALAMOS NATIONAL SECURITY, LLC AND CONTRIBUTORS
-"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
-THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
-ARE DISCLAIMED. IN NO EVENT SHALL LOS ALAMOS NATIONAL SECURITY, LLC OR
-CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
-EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
-PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
-OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
-WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
-OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
-ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-=========================================================================*/
-
-// .NAME SODHalo - calculate properties of all SOD halos
-//
-// .SECTION Description
-// SODHalo takes data from CosmoHaloFinderP about individual halos
-// and data from all particles and calculates properties.
-//
-
-#ifndef SODHalo_h
-#define SODHalo_h
-
-#ifdef USE_VTK_COSMO
-#include "CosmoDefinition.h"
-#include <string>
-#include <vector>
-
-using namespace std;
-#else
-#include "Definition.h"
-#include <string>
-#include <vector>
-
-using namespace std;
-#endif
-
-#include "ChainingMesh.h"
-
-///////////////////////////////////////////////////////////////////////////
-//
-// To calculate the exact r_200 store the distance and mass of each particle
-// within the sphere.  When the particles are sorted by distance from the
-// center, the exact density at that particle can be calculated
-//
-///////////////////////////////////////////////////////////////////////////
-
-struct RadiusID {
-  POSVEL_T radius;
-  int index;
-};
-
-class RadiusIDLT {
-public:
-  bool operator() (const RadiusID& p, const RadiusID& q) const
-  {
-  return p.radius < q.radius;
-  }
-};
-
-///////////////////////////////////////////////////////////////////////////
-//
-// SOD Halo creation using either exact density or approximate with bins
-//
-///////////////////////////////////////////////////////////////////////////
-
-#ifdef USE_VTK_COSMO
-class COSMO_EXPORT SODHalo
-#else
-class SODHalo
-#endif
-{
-public:
-  SODHalo();
-  ~SODHalo();
-
-  // Set parameters for SOD calculation
-  void setParameters(
-        ChainingMesh* chain,    // Particles arranged in buckets
-        int numBins,            // Estimation density bins
-        POSVEL_T rL,            // Box size of the physical problem
-        POSVEL_T np,            // Grid size of problem
-        POSVEL_T rho_c,         // Critical density of universe
-        POSVEL_T sodMassFactor, // Factor used in initial radius
-        POSVEL_T rhoRatio,      // rho / rho_c for virial radius
-        POSVEL_T minFactor,     // Min factor for initial radius range
-        POSVEL_T maxFactor);    // Max factor for initial radius range
-
-  // Set alive particle vectors which were created elsewhere
-  void setParticles(
-        vector<POSVEL_T>* xLoc,
-        vector<POSVEL_T>* yLoc,
-        vector<POSVEL_T>* zLoc,
-        vector<POSVEL_T>* xVel,
-        vector<POSVEL_T>* yVel,
-        vector<POSVEL_T>* zVel,
-        vector<POSVEL_T>* pmass,
-        vector<ID_T>* id);
-
-  /////////////////////////////////////////////////////////////////////
-  //
-  // SOD (Spherical over density) halo analysis
-  //
-  /////////////////////////////////////////////////////////////////////
-
-  // Spherical over-density (SOD) mass profile, velocity dispersion
-  void createSODHalo(
-        int FOFhaloCount,       // FOF particle count
-        POSVEL_T centerXLoc,    // FOF center location for SOD
-        POSVEL_T centerYLoc,    // FOF center location for SOD
-        POSVEL_T centerZLoc,    // FOF center location for SOD
-        POSVEL_T fofHaloXVel,   // FOF halo velocity for SOD radial velocity
-        POSVEL_T fofHaloYVel,   // FOF halo velocity for SOD radial velocity
-        POSVEL_T fofHaloZVel,   // FOF halo velocity for SOD radial velocity
-        POSVEL_T fofHaloMass);  // FOF halo mass for SOD
-
-  // Create the SOD mass profile used to calculate characteristic radius
-  void calculateMassProfile();
-
-  // Calculate the characteristic radius of an SOD halo
-  void calculateCharacteristicRadius();
-
-  // Gather all particles belonging to the SOD halo
-  // Collect average velocity at the same time
-  void gatherSODParticles();
-
-  // Calculate velocity dispersion
-  void calculateVelocityDispersion();
-
-  // Calculate mass
-  void calculateMass();
-
-  // Utilities
-  POSVEL_T dotProduct(POSVEL_T x, POSVEL_T y, POSVEL_T z);
-  void spline(
-        POSVEL_T* x, POSVEL_T* y, int n,
-        POSVEL_T* y2);
-  void splint(
-        POSVEL_T* xa, POSVEL_T* ya, POSVEL_T* y2a, int n,
-        POSVEL_T x, POSVEL_T* y);
-
-  int SODHaloSize()             { return this->numberOfParticles; }
-  POSVEL_T SODRadius()          { return this->charRadius; }
-  int* SODParticles()           { return this->particleIndex; }
-
-  void SODAverageLocation(POSVEL_T* pos);
-  void SODCenterOfMass(POSVEL_T* com);
-  void SODAverageVelocity(POSVEL_T* vel);
-  void SODVelocityDispersion(POSVEL_T* velDisp);
-  void SODMass(POSVEL_T* mass);
-  void SODProfile(
-        int* bCount,
-        POSVEL_T* bMass,
-        POSVEL_T* bRadius,
-        POSVEL_T* bRho,
-        POSVEL_T* bRhoRatio,
-        POSVEL_T* bRadVelocity);
-
-  // Extract information for all particles in SOD halo
-  void extractInformation(
-        int* actualIndx,
-        POSVEL_T* xLocHalo,
-        POSVEL_T* yLocHalo,
-        POSVEL_T* zLocHalo,
-        POSVEL_T* xVelHalo,
-        POSVEL_T* yVelHalo,
-        POSVEL_T* zVelHalo,
-        POSVEL_T* pmass,
-        POSVEL_T* radius,
-        ID_T* tag);
-
-private:
-  int    myProc;                // My processor number
-  int    numProc;               // Total number of processors
-
-  ChainingMesh* chain;          // Buckets of particles on processor
-  int*** buckets;               // First particle index into bucketList
-  int* bucketList;              // Indices of next particle in halo
-
-  int minFOFHaloSize;           // Minimum FOF size for building SOD
-  int numberOfBins;             // Estimation density concentric spheres
-  POSVEL_T rhoRatio;            // rho / rho_c for virial radius
-  POSVEL_T cMinFactor;          // Min factor for initial radius range
-  POSVEL_T cMaxFactor;          // Max factor for initial radius range
-  POSVEL_T rSmooth;             // boxSize / gridSize for getting minimum radius
-  POSVEL_T RHOC;                // RHO_C * factor to get units right
-  POSVEL_T SODMASS;             // SOD_MASS * factor to get units right
-
-  long   particleCount;         // Total particles on this processor
-
-  POSVEL_T* xx;                 // X location for particles on this processor
-  POSVEL_T* yy;                 // Y location for particles on this processor
-  POSVEL_T* zz;                 // Z location for particles on this processor
-  POSVEL_T* vx;                 // X velocity for particles on this processor
-  POSVEL_T* vy;                 // Y velocity for particles on this processor
-  POSVEL_T* vz;                 // Z velocity for particles on this processor
-  POSVEL_T* mass;               // Mass of particles on this processor
-  ID_T* tag;                    // Tag of particles on this processor
-
-  // Information about this SOD halo
-  POSVEL_T initRadius;          // First guess at radius based on FOF size
-  POSVEL_T minRadius;           // Smallest radius to bin spheres on
-  POSVEL_T maxRadius;           // Largest radius to bin spheres on
-  POSVEL_T deltaRadius;         // Step on log bins from min to max radius
-  POSVEL_T charRadius;          // Characteristic radius (r_200)
-
-  int*    binCount;             // Number of particles assigned to bin
-  double* binMass;              // Mass of SOD at this bin
-  double* binRho;               // Density of SOD at this bin
-  double* binRhoRatio;          // Density ratio of SOD at this bin
-  double* avgRadius;            // Average radius of particles assigned to bin
-  double* avgRadVelocity;       // Average radial velocity of particles in bin
-  POSVEL_T* binRadius;          // Max radius of a log bin
-  vector<RadiusID>* binInfo;    // Particles in bin with radius
-
-  int criticalBin;              // Bin holding the critical density ratio
-  int criticalIndex;            // Index in critical bin of critical radius
-
-  int numberOfParticles;        // Number in this SOD halo
-  int* particleIndex;           // Indices of particles in this halo
-  POSVEL_T* particleRadius;     // Matching radius of particles in this halo
-
-  int      fofHaloCount;                // FOF particle count
-  POSVEL_T fofCenterLocation[DIMENSION];// FOF center particle location
-  POSVEL_T fofHaloVelocity[DIMENSION];  // FOF average velocity of all particles
-
-  double avgVelocity[DIMENSION];        // SOD average veloctiy of all particles
-  double avgLocation[DIMENSION];        // SOD average location of particles
-  double centerOfMass[DIMENSION];       // SOD center of mass
-  double velocityDispersion;            // SOD velocity dispersion
-  double totalMass;                     // SOD total mass
-};
-
-#endif
diff --git a/ThirdParty/Cosmo/module.cmake b/ThirdParty/Cosmo/module.cmake
deleted file mode 100644
index 5ebfb08..0000000
--- a/ThirdParty/Cosmo/module.cmake
+++ /dev/null
@@ -1,6 +0,0 @@
-vtk_module(vtkCosmo
-  DEPENDS
-  vtkCommonCore
-  vtksys
-  EXCLUDE_FROM_WRAPPING
-  )
diff --git a/ThirdParty/Cosmo/winDirent.h b/ThirdParty/Cosmo/winDirent.h
deleted file mode 100644
index c36bc42..0000000
--- a/ThirdParty/Cosmo/winDirent.h
+++ /dev/null
@@ -1,232 +0,0 @@
-/*****************************************************************************
- * dirent.h - dirent API for Microsoft Visual Studio
- *
- * Copyright (C) 2006 Toni Ronkko
- *
- * Permission is hereby granted, free of charge, to any person obtaining
- * a copy of this software and associated documentation files (the
- * ``Software''), to deal in the Software without restriction, including
- * without limitation the rights to use, copy, modify, merge, publish,
- * distribute, sublicense, and/or sell copies of the Software, and to
- * permit persons to whom the Software is furnished to do so, subject to
- * the following conditions:
- *
- * The above copyright notice and this permission notice shall be included
- * in all copies or substantial portions of the Software.
- *
- * THE SOFTWARE IS PROVIDED ``AS IS'', WITHOUT WARRANTY OF ANY KIND, EXPRESS
- * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
- * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
- * IN NO EVENT SHALL TONI RONKKO BE LIABLE FOR ANY CLAIM, DAMAGES OR
- * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
- * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
- * OTHER DEALINGS IN THE SOFTWARE.
- *
- * Dec 15, 2009, John Cunningham
- * Added rewinddir member function
- *
- * Jan 18, 2008, Toni Ronkko
- * Using FindFirstFileA and WIN32_FIND_DATAA to avoid converting string
- * between multi-byte and unicode representations.  This makes the
- * code simpler and also allows the code to be compiled under MingW.  Thanks
- * to Azriel Fasten for the suggestion.
- *
- * Mar 4, 2007, Toni Ronkko
- * Bug fix: due to the strncpy_s() function this file only compiled in
- * Visual Studio 2005.  Using the new string functions only when the
- * compiler version allows.
- *
- * Nov  2, 2006, Toni Ronkko
- * Major update: removed support for Watcom C, MS-DOS and Turbo C to
- * simplify the file, updated the code to compile cleanly on Visual
- * Studio 2005 with both unicode and multi-byte character strings,
- * removed rewinddir() as it had a bug.
- *
- * Aug 20, 2006, Toni Ronkko
- * Removed all remarks about MSVC 1.0, which is antiqued now.  Simplified
- * comments by removing SGML tags.
- *
- * May 14 2002, Toni Ronkko
- * Embedded the function definitions directly to the header so that no
- * source modules need to be included in the Visual Studio project.  Removed
- * all the dependencies to other projects so that this very header can be
- * used independently.
- *
- * May 28 1998, Toni Ronkko
- * First version.
- *****************************************************************************/
-#ifndef __WINDOWS_DIRENT_H
-#define __WINDOWS_DIRENT_H
-
-#include <windows.h>
-#include <string.h>
-#include <assert.h>
-
-typedef struct dirent
-{
-   char d_name[MAX_PATH + 1]; /* current dir entry (multi-byte char string) */
-   WIN32_FIND_DATAA data;     /* file attributes */
-}  dirent;
-
-
-typedef struct DIR
-{
-   dirent current;            /* Current directory entry */
-   int    cached;             /* Indicates un-processed entry in memory */
-   HANDLE search_handle;      /* File search handle */
-   char   patt[MAX_PATH + 3]; /* search pattern (3 = pattern + "\\*\0") */
-} DIR;
-
-
-/* Forward declarations */
-static DIR *opendir (const char *dirname);
-static struct dirent *readdir (DIR *dirp);
-static int closedir (DIR *dirp);
-
-#ifndef USE_VTK_COSMO
-static void rewinddir(DIR* dirp);
-#endif
-
-/* Use the new safe string functions introduced in Visual Studio 2005 */
-#if defined(_MSC_VER) && _MSC_VER >= 1400
-# define STRNCPY(dest,src,size) strncpy_s((dest),(size),(src),_TRUNCATE)
-#else
-# define STRNCPY(dest,src,size) strncpy((dest),(src),(size))
-#endif
-
-
-/*****************************************************************************
- * Open directory stream DIRNAME for read and return a pointer to the
- * internal working area that is used to retrieve individual directory
- * entries.
- */
-static DIR *opendir(const char *dirname)
-{
-   DIR *dirp;
-   assert (dirname != NULL);
-   assert (strlen (dirname) < MAX_PATH);
-
-   /* construct new DIR structure */
-   dirp = (DIR*) malloc (sizeof (struct DIR));
-   if (dirp != NULL) {
-      char *p;
-
-      /* take directory name... */
-      STRNCPY (dirp->patt, dirname, sizeof(dirp->patt));
-      dirp->patt[MAX_PATH] = '\0';
-
-      /* ... and append search pattern to it */
-      p = strchr (dirp->patt, '\0');
-      if (dirp->patt < p  &&  *(p-1) != '\\'  &&  *(p-1) != ':') {
-         *p++ = '\\';
-      }
-      *p++ = '*';
-      *p = '\0';
-
-      /* open stream and retrieve first file */
-      dirp->search_handle = FindFirstFileA (dirp->patt, &dirp->current.data);
-      if (dirp->search_handle == INVALID_HANDLE_VALUE) {
-         /* invalid search pattern? */
-         free (dirp);
-         return NULL;
-      }
-
-      /* there is an un-processed directory entry in memory now */
-      dirp->cached = 1;
-   }
-
-   return dirp;
-}
-
-
-/*****************************************************************************
- * Read a directory entry, and return a pointer to a dirent structure
- * containing the name of the entry in d_name field.  Individual directory
- * entries returned by this very function include regular files,
- * sub-directories, pseudo-directories "." and "..", but also volume labels,
- * hidden files and system files may be returned.
- */
-static struct dirent *readdir(DIR *dirp)
-{
-   assert (dirp != NULL);
-
-   if (dirp->search_handle == INVALID_HANDLE_VALUE) {
-      /* directory stream was opened/rewound incorrectly or ended normally */
-      return NULL;
-   }
-
-   /* get next directory entry */
-   if (dirp->cached != 0) {
-      /* a valid directory entry already in memory */
-      dirp->cached = 0;
-   } else {
-      /* read next directory entry from disk */
-      if (FindNextFileA (dirp->search_handle, &dirp->current.data) == FALSE) {
-         /* the very last file has been processed or an error occured */
-         FindClose (dirp->search_handle);
-         dirp->search_handle = INVALID_HANDLE_VALUE;
-         return NULL;
-      }
-   }
-
-   /* copy as a multibyte character string */
-   STRNCPY ( dirp->current.d_name,
-             dirp->current.data.cFileName,
-             sizeof(dirp->current.d_name) );
-   dirp->current.d_name[MAX_PATH] = '\0';
-
-   return &dirp->current;
-}
-
-/*****************************************************************************
- * Close directory stream opened by opendir() function.  Close of the
- * directory stream invalidates the DIR structure as well as any previously
- * read directory entry.
- */
-static int closedir(DIR *dirp)
-{
-   assert (dirp != NULL);
-
-   /* release search handle */
-   if (dirp->search_handle != INVALID_HANDLE_VALUE) {
-      FindClose (dirp->search_handle);
-      dirp->search_handle = INVALID_HANDLE_VALUE;
-   }
-
-   /* release directory handle */
-   free (dirp);
-   return 0;
-}
-
-#ifndef USE_VTK_COSMO
-
-/*****************************************************************************
- * Resets the position of the directory stream to which dirp refers to the
- * beginning of the directory. It also causes the directory stream to refer
- * to the current state of the corresponding directory, as a call to opendir()
- * would have done. If dirp does not refer to a directory stream, the effect
- * is undefined.
- */
-static void rewinddir(DIR* dirp)
-{
-   /* release search handle */
-   if (dirp->search_handle != INVALID_HANDLE_VALUE) {
-      FindClose (dirp->search_handle);
-      dirp->search_handle = INVALID_HANDLE_VALUE;
-   }
-
-   /* open new search handle and retrieve first file */
-   dirp->search_handle = FindFirstFileA (dirp->patt, &dirp->current.data);
-   if (dirp->search_handle == INVALID_HANDLE_VALUE) {
-      /* invalid search pattern? */
-      free (dirp);
-      return;
-   }
-
-   /* there is an un-processed directory entry in memory now */
-   dirp->cached = 1;
-}
-
-#endif
-
-#endif /*__WINDOWS_DIRENT_H*/
diff --git a/ThirdParty/TclTk/resources/tk8.3/win/rc/CMakeLists.txt b/ThirdParty/TclTk/resources/tk8.3/win/rc/CMakeLists.txt
index 489cefb..d16f317 100644
--- a/ThirdParty/TclTk/resources/tk8.3/win/rc/CMakeLists.txt
+++ b/ThirdParty/TclTk/resources/tk8.3/win/rc/CMakeLists.txt
@@ -6,7 +6,7 @@ IF(VTK_USE_TK AND VTK_TCL_TK_STATIC)
       FILE(GLOB tkResourceFiles "*.bmp" "*.cur" "*.ico" "*.manifest" "*.rc")
       INSTALL(FILES
         ${tkResourceFiles}
-        DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/TclTk/resources/tk8.3/win/rc
+        DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/TclTk/resources/tk8.3/win/rc
         COMPONENT Development
         )
     ENDIF(NOT VTK_INSTALL_NO_DEVELOPMENT)
diff --git a/ThirdParty/TclTk/resources/tk8.4/win/rc/CMakeLists.txt b/ThirdParty/TclTk/resources/tk8.4/win/rc/CMakeLists.txt
index 1435cbb..6839f95 100644
--- a/ThirdParty/TclTk/resources/tk8.4/win/rc/CMakeLists.txt
+++ b/ThirdParty/TclTk/resources/tk8.4/win/rc/CMakeLists.txt
@@ -6,7 +6,7 @@ IF(VTK_USE_TK AND VTK_TCL_TK_STATIC)
       FILE(GLOB tkResourceFiles "*.bmp" "*.cur" "*.ico" "*.manifest" "*.rc")
       INSTALL(FILES
         ${tkResourceFiles}
-        DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/TclTk/resources/tk8.4/win/rc
+        DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/TclTk/resources/tk8.4/win/rc
         COMPONENT Development
         )
     ENDIF(NOT VTK_INSTALL_NO_DEVELOPMENT)
diff --git a/ThirdParty/TclTk/resources/tk8.5/win/rc/CMakeLists.txt b/ThirdParty/TclTk/resources/tk8.5/win/rc/CMakeLists.txt
index 4fbec84..f7bf734 100644
--- a/ThirdParty/TclTk/resources/tk8.5/win/rc/CMakeLists.txt
+++ b/ThirdParty/TclTk/resources/tk8.5/win/rc/CMakeLists.txt
@@ -6,7 +6,7 @@ IF(VTK_USE_TK AND VTK_TCL_TK_STATIC)
       FILE(GLOB tkResourceFiles "*.bmp" "*.cur" "*.ico" "*.manifest" "*.rc")
       INSTALL(FILES
         ${tkResourceFiles}
-        DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/TclTk/resources/tk8.5/win/rc
+        DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/TclTk/resources/tk8.5/win/rc
         COMPONENT Development
         )
     ENDIF(NOT VTK_INSTALL_NO_DEVELOPMENT)
diff --git a/ThirdParty/TclTk/resources/tk8.6/win/rc/CMakeLists.txt b/ThirdParty/TclTk/resources/tk8.6/win/rc/CMakeLists.txt
index 49ab809..156576d 100644
--- a/ThirdParty/TclTk/resources/tk8.6/win/rc/CMakeLists.txt
+++ b/ThirdParty/TclTk/resources/tk8.6/win/rc/CMakeLists.txt
@@ -6,7 +6,7 @@ IF(VTK_USE_TK AND VTK_TCL_TK_STATIC)
       FILE(GLOB tkResourceFiles "*.bmp" "*.cur" "*.ico" "*.manifest" "*.rc")
       INSTALL(FILES
         ${tkResourceFiles}
-        DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/TclTk/resources/tk8.6/win/rc
+        DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/TclTk/resources/tk8.6/win/rc
         COMPONENT Development
         )
     ENDIF(NOT VTK_INSTALL_NO_DEVELOPMENT)
diff --git a/ThirdParty/Twisted/CMakeLists.txt b/ThirdParty/Twisted/CMakeLists.txt
new file mode 100644
index 0000000..1e3e09b
--- /dev/null
+++ b/ThirdParty/Twisted/CMakeLists.txt
@@ -0,0 +1,20 @@
+vtk_module_impl()
+vtk_module_export("")
+
+option(VTK_USE_SYSTEM_TWISTED "Use system Twisted Python package" OFF)
+mark_as_advanced(VTK_USE_SYSTEM_TWISTED)
+
+if(NOT VTK_USE_SYSTEM_TWISTED)
+  find_package(PythonInterp)
+
+  include(vtkPythonPackages)
+
+  set(Twisted_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/twisted")
+  set(Twisted_BINARY_DIR "${VTK_BUILD_PYTHON_MODULE_DIR}/twisted")
+
+  build_python_package("Twisted" ${Twisted_SOURCE_DIR} ${Twisted_BINARY_DIR})
+
+  install(DIRECTORY ${Twisted_BINARY_DIR}
+    DESTINATION "${VTK_INSTALL_PYTHON_MODULE_DIR}"
+    COMPONENT Runtime)
+endif()
diff --git a/ThirdParty/Twisted/LICENSE b/ThirdParty/Twisted/LICENSE
new file mode 100644
index 0000000..159debb
--- /dev/null
+++ b/ThirdParty/Twisted/LICENSE
@@ -0,0 +1,57 @@
+Copyright (c) 2001-2012
+Allen Short
+Andy Gayton
+Andrew Bennetts
+Antoine Pitrou
+Apple Computer, Inc.
+Benjamin Bruheim
+Bob Ippolito
+Canonical Limited
+Christopher Armstrong
+David Reid
+Donovan Preston
+Eric Mangold
+Eyal Lotem
+Itamar Turner-Trauring
+James Knight
+Jason A. Mobarak
+Jean-Paul Calderone
+Jessica McKellar
+Jonathan Jacobs
+Jonathan Lange
+Jonathan D. Simms
+Jürgen Hermann
+Kevin Horn
+Kevin Turner
+Mary Gardiner
+Matthew Lefkowitz
+Massachusetts Institute of Technology
+Moshe Zadka
+Paul Swartz
+Pavel Pergamenshchik
+Ralph Meijer
+Sean Riley
+Software Freedom Conservancy
+Travis B. Hartwell
+Thijs Triemstra
+Thomas Herve
+Timothy Allen
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/ThirdParty/Twisted/README b/ThirdParty/Twisted/README
new file mode 100644
index 0000000..bd06813
--- /dev/null
+++ b/ThirdParty/Twisted/README
@@ -0,0 +1,117 @@
+Twisted 12.3.0
+
+Quote of the Release:
+
+
+  <PenguinOfDoom> yeah twisted should get on with the times
+  <PenguinOfDoom> the rest of the world has been writing unicode to sockets for years!
+
+
+For information on what's new in Twisted 12.3.0, see the NEWS file that comes
+with the distribution.
+
+What is this?
+=============
+
+  Twisted is an event-based framework for internet applications.  It includes
+  modules for many different purposes, including the following:
+
+  - twisted.application
+    A "Service" system that allows you to organize your application in
+    hierarchies with well-defined startup and dependency semantics,
+  - twisted.cred
+    A general credentials and authentication system that facilitates
+    pluggable authentication backends,
+  - twisted.enterprise
+    Asynchronous database access, compatible with any Python DBAPI2.0
+    modules,
+  - twisted.internet
+    Low-level asynchronous networking APIs that allow you to define
+    your own protocols that run over certain transports,
+  - twisted.manhole
+    A tool for remote debugging of your services which gives you a
+    Python interactive interpreter,
+  - twisted.protocols
+    Basic protocol implementations and helpers for your own protocol
+    implementations,
+  - twisted.python
+    A large set of utilities for Python tricks, reflection, text
+    processing, and anything else,
+  - twisted.spread
+    A secure, fast remote object system,
+  - twisted.trial
+    A unit testing framework that integrates well with Twisted-based code.
+
+  Twisted supports integration of the Win32, Tk, GTK+ and GTK+ 2 event loops
+  with its main event loop.  There is experimental support for Mac OS X and
+  wxPython event loop integration, which you use at your peril.
+
+  For more information, visit http://www.twistedmatrix.com, or join the list
+  at http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-python
+
+  There are many official Twisted subprojects, including clients and
+  servers for web, mail, DNS, and more. You can find out more about
+  these projects at http://twistedmatrix.com/trac/wiki/TwistedProjects
+
+
+Installing
+==========
+
+  Instructions for installing this software are in INSTALL.
+
+Unit Tests
+==========
+
+
+  See our unit tests run proving that the software is BugFree(TM):
+
+   % trial twisted
+
+  Some of these tests may fail if you
+   * don't have the dependancies required for a particular subsystem installed,
+   * have a firewall blocking some ports (or things like Multicast, which Linux
+     NAT has shown itself to do), or
+   * run them as root.
+
+
+Documentation and Support
+=========================
+
+  Examples on how to use Twisted APIs are located in doc/core/examples; this
+  might ease the learning curve a little bit, since all these files are kept
+  as short as possible.  The file doc/core/howto/index.xhtml contains an index
+  of all the core HOWTOs: this should be your starting point when looking for
+  documentation.
+
+  Help is available on the Twisted mailing list:
+
+    http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-python
+
+  There is also a very lively IRC channel, #twisted, on
+  chat.freenode.net.
+
+
+Copyright
+=========
+
+  All of the code in this distribution is Copyright (c) 2001-2012
+  Twisted Matrix Laboratories.
+
+  Twisted is made available under the MIT license. The included
+  LICENSE file describes this in detail.
+
+
+Warranty
+========
+
+  THIS SOFTWARE IS PROVIDED "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER
+  EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+  OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS
+  TO THE USE OF THIS SOFTWARE IS WITH YOU.
+
+  IN NO EVENT WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY
+  AND/OR REDISTRIBUTE THE LIBRARY, BE LIABLE TO YOU FOR ANY DAMAGES, EVEN IF
+  SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH
+  DAMAGES.
+
+  Again, see the included LICENSE file for specific legal details.
diff --git a/ThirdParty/Twisted/module.cmake b/ThirdParty/Twisted/module.cmake
new file mode 100644
index 0000000..6131bd3
--- /dev/null
+++ b/ThirdParty/Twisted/module.cmake
@@ -0,0 +1,5 @@
+vtk_module(Twisted
+  DEPENDS
+    ZopeInterface
+    vtkPython
+  EXCLUDE_FROM_WRAPPING)
diff --git a/ThirdParty/Twisted/twisted/__init__.py b/ThirdParty/Twisted/twisted/__init__.py
new file mode 100644
index 0000000..34963f3
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/__init__.py
@@ -0,0 +1,62 @@
+# -*- test-case-name: twisted -*-
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Twisted: The Framework Of Your Internet.
+"""
+
+def _checkRequirements():
+    # Don't allow the user to run a version of Python we don't support.
+    import sys
+
+    version = getattr(sys, "version_info", (0,))
+    if version < (2, 6):
+        raise ImportError("Twisted requires Python 2.6 or later.")
+    if version < (3, 0):
+        required = "3.6.0"
+    else:
+        required = "4.0.0"
+
+    if ("setuptools" in sys.modules and
+        getattr(sys.modules["setuptools"],
+                "_TWISTED_NO_CHECK_REQUIREMENTS", None) is not None):
+        # Skip requirement checks, setuptools ought to take care of installing
+        # the dependencies.
+        return
+
+    # Don't allow the user to run with a version of zope.interface we don't
+    # support.
+    required = "Twisted requires zope.interface %s or later" % (required,)
+    try:
+        from zope import interface
+    except ImportError:
+        # It isn't installed.
+        raise ImportError(required + ": no module named zope.interface.")
+    except:
+        # It is installed but not compatible with this version of Python.
+        raise ImportError(required + ".")
+    try:
+        # Try using the API that we need, which only works right with
+        # zope.interface 3.6 (or 4.0 on Python 3)
+        class IDummy(interface.Interface):
+            pass
+        @interface.implementer(IDummy)
+        class Dummy(object):
+            pass
+    except TypeError:
+        # It is installed but not compatible with this version of Python.
+        raise ImportError(required + ".")
+
+_checkRequirements()
+
+# Ensure compat gets imported
+from twisted.python import compat
+
+# setup version
+from twisted._version import version
+__version__ = version.short()
+
+del compat
diff --git a/ThirdParty/Twisted/twisted/_version.py b/ThirdParty/Twisted/twisted/_version.py
new file mode 100644
index 0000000..48bf12c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/_version.py
@@ -0,0 +1,3 @@
+# This is an auto-generated file. Do not edit it.
+from twisted.python import versions
+version = versions.Version('twisted', 12, 3, 0)
diff --git a/ThirdParty/Twisted/twisted/application/__init__.py b/ThirdParty/Twisted/twisted/application/__init__.py
new file mode 100644
index 0000000..c155ca4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/application/__init__.py
@@ -0,0 +1,7 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+"""
+Configuration objects for Twisted Applications
+"""
diff --git a/ThirdParty/Twisted/twisted/application/app.py b/ThirdParty/Twisted/twisted/application/app.py
new file mode 100644
index 0000000..97f7a42
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/application/app.py
@@ -0,0 +1,674 @@
+# -*- test-case-name: twisted.test.test_application,twisted.test.test_twistd -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import sys, os, pdb, getpass, traceback, signal
+from operator import attrgetter
+
+from twisted.python import runtime, log, usage, failure, util, logfile
+from twisted.python.versions import Version
+from twisted.python.reflect import qual, namedAny
+from twisted.python.deprecate import deprecated
+from twisted.python.log import ILogObserver
+from twisted.persisted import sob
+from twisted.application import service, reactors
+from twisted.internet import defer
+from twisted import copyright, plugin
+
+# Expose the new implementation of installReactor at the old location.
+from twisted.application.reactors import installReactor
+from twisted.application.reactors import NoSuchReactor
+
+
+
+class _BasicProfiler(object):
+    """
+    @ivar saveStats: if C{True}, save the stats information instead of the
+        human readable format
+    @type saveStats: C{bool}
+
+    @ivar profileOutput: the name of the file use to print profile data.
+    @type profileOutput: C{str}
+    """
+
+    def __init__(self, profileOutput, saveStats):
+        self.profileOutput = profileOutput
+        self.saveStats = saveStats
+
+
+    def _reportImportError(self, module, e):
+        """
+        Helper method to report an import error with a profile module. This
+        has to be explicit because some of these modules are removed by
+        distributions due to them being non-free.
+        """
+        s = "Failed to import module %s: %s" % (module, e)
+        s += """
+This is most likely caused by your operating system not including
+the module due to it being non-free. Either do not use the option
+--profile, or install the module; your operating system vendor
+may provide it in a separate package.
+"""
+        raise SystemExit(s)
+
+
+
+class ProfileRunner(_BasicProfiler):
+    """
+    Runner for the standard profile module.
+    """
+
+    def run(self, reactor):
+        """
+        Run reactor under the standard profiler.
+        """
+        try:
+            import profile
+        except ImportError, e:
+            self._reportImportError("profile", e)
+
+        p = profile.Profile()
+        p.runcall(reactor.run)
+        if self.saveStats:
+            p.dump_stats(self.profileOutput)
+        else:
+            tmp, sys.stdout = sys.stdout, open(self.profileOutput, 'a')
+            try:
+                p.print_stats()
+            finally:
+                sys.stdout, tmp = tmp, sys.stdout
+                tmp.close()
+
+
+
+class HotshotRunner(_BasicProfiler):
+    """
+    Runner for the hotshot profile module.
+    """
+
+    def run(self, reactor):
+        """
+        Run reactor under the hotshot profiler.
+        """
+        try:
+            import hotshot.stats
+        except (ImportError, SystemExit), e:
+            # Certain versions of Debian (and Debian derivatives) raise
+            # SystemExit when importing hotshot if the "non-free" profiler
+            # module is not installed.  Someone eventually recognized this
+            # as a bug and changed the Debian packaged Python to raise
+            # ImportError instead.  Handle both exception types here in
+            # order to support the versions of Debian which have this
+            # behavior.  The bug report which prompted the introduction of
+            # this highly undesirable behavior should be available online at
+            # <http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=334067>.
+            # There seems to be no corresponding bug report which resulted
+            # in the behavior being removed. -exarkun
+            self._reportImportError("hotshot", e)
+
+        # this writes stats straight out
+        p = hotshot.Profile(self.profileOutput)
+        p.runcall(reactor.run)
+        if self.saveStats:
+            # stats are automatically written to file, nothing to do
+            return
+        else:
+            s = hotshot.stats.load(self.profileOutput)
+            s.strip_dirs()
+            s.sort_stats(-1)
+            if getattr(s, 'stream', None) is not None:
+                # Python 2.5 and above supports a stream attribute
+                s.stream = open(self.profileOutput, 'w')
+                s.print_stats()
+                s.stream.close()
+            else:
+                # But we have to use a trick for Python < 2.5
+                tmp, sys.stdout = sys.stdout, open(self.profileOutput, 'w')
+                try:
+                    s.print_stats()
+                finally:
+                    sys.stdout, tmp = tmp, sys.stdout
+                    tmp.close()
+
+
+
+class CProfileRunner(_BasicProfiler):
+    """
+    Runner for the cProfile module.
+    """
+
+    def run(self, reactor):
+        """
+        Run reactor under the cProfile profiler.
+        """
+        try:
+            import cProfile, pstats
+        except ImportError, e:
+            self._reportImportError("cProfile", e)
+
+        p = cProfile.Profile()
+        p.runcall(reactor.run)
+        if self.saveStats:
+            p.dump_stats(self.profileOutput)
+        else:
+            stream = open(self.profileOutput, 'w')
+            s = pstats.Stats(p, stream=stream)
+            s.strip_dirs()
+            s.sort_stats(-1)
+            s.print_stats()
+            stream.close()
+
+
+
+class AppProfiler(object):
+    """
+    Class which selects a specific profile runner based on configuration
+    options.
+
+    @ivar profiler: the name of the selected profiler.
+    @type profiler: C{str}
+    """
+    profilers = {"profile": ProfileRunner, "hotshot": HotshotRunner,
+                 "cprofile": CProfileRunner}
+
+    def __init__(self, options):
+        saveStats = options.get("savestats", False)
+        profileOutput = options.get("profile", None)
+        self.profiler = options.get("profiler", "hotshot").lower()
+        if self.profiler in self.profilers:
+            profiler = self.profilers[self.profiler](profileOutput, saveStats)
+            self.run = profiler.run
+        else:
+            raise SystemExit("Unsupported profiler name: %s" % (self.profiler,))
+
+
+
+class AppLogger(object):
+    """
+    Class managing logging faciliy of the application.
+
+    @ivar _logfilename: The name of the file to which to log, if other than the
+        default.
+    @type _logfilename: C{str}
+
+    @ivar _observerFactory: Callable object that will create a log observer, or
+        None.
+
+    @ivar _observer: log observer added at C{start} and removed at C{stop}.
+    @type _observer: C{callable}
+    """
+    _observer = None
+
+    def __init__(self, options):
+        self._logfilename = options.get("logfile", "")
+        self._observerFactory = options.get("logger") or None
+
+
+    def start(self, application):
+        """
+        Initialize the logging system.
+
+        If a customer logger was specified on the command line it will be
+        used. If not, and an L{ILogObserver} component has been set on
+        C{application}, then it will be used as the log observer.  Otherwise a
+        log observer will be created based on the command-line options for
+        built-in loggers (e.g. C{--logfile}).
+
+        @param application: The application on which to check for an
+            L{ILogObserver}.
+        """
+        if self._observerFactory is not None:
+            observer = self._observerFactory()
+        else:
+            observer = application.getComponent(ILogObserver, None)
+
+        if observer is None:
+            observer = self._getLogObserver()
+        self._observer = observer
+        log.startLoggingWithObserver(self._observer)
+        self._initialLog()
+
+
+    def _initialLog(self):
+        """
+        Print twistd start log message.
+        """
+        from twisted.internet import reactor
+        log.msg("twistd %s (%s %s) starting up." % (copyright.version,
+                                                   sys.executable,
+                                                   runtime.shortPythonVersion()))
+        log.msg('reactor class: %s.' % (qual(reactor.__class__),))
+
+
+    def _getLogObserver(self):
+        """
+        Create a log observer to be added to the logging system before running
+        this application.
+        """
+        if self._logfilename == '-' or not self._logfilename:
+            logFile = sys.stdout
+        else:
+            logFile = logfile.LogFile.fromFullPath(self._logfilename)
+        return log.FileLogObserver(logFile).emit
+
+
+    def stop(self):
+        """
+        Print twistd stop log message.
+        """
+        log.msg("Server Shut Down.")
+        if self._observer is not None:
+            log.removeObserver(self._observer)
+            self._observer = None
+
+
+
+def fixPdb():
+    def do_stop(self, arg):
+        self.clear_all_breaks()
+        self.set_continue()
+        from twisted.internet import reactor
+        reactor.callLater(0, reactor.stop)
+        return 1
+
+
+    def help_stop(self):
+        print """stop - Continue execution, then cleanly shutdown the twisted reactor."""
+
+
+    def set_quit(self):
+        os._exit(0)
+
+    pdb.Pdb.set_quit = set_quit
+    pdb.Pdb.do_stop = do_stop
+    pdb.Pdb.help_stop = help_stop
+
+
+
+def runReactorWithLogging(config, oldstdout, oldstderr, profiler=None, reactor=None):
+    """
+    Start the reactor, using profiling if specified by the configuration, and
+    log any error happening in the process.
+
+    @param config: configuration of the twistd application.
+    @type config: L{ServerOptions}
+
+    @param oldstdout: initial value of C{sys.stdout}.
+    @type oldstdout: C{file}
+
+    @param oldstderr: initial value of C{sys.stderr}.
+    @type oldstderr: C{file}
+
+    @param profiler: object used to run the reactor with profiling.
+    @type profiler: L{AppProfiler}
+
+    @param reactor: The reactor to use.  If C{None}, the global reactor will
+        be used.
+    """
+    if reactor is None:
+        from twisted.internet import reactor
+    try:
+        if config['profile']:
+            if profiler is not None:
+                profiler.run(reactor)
+        elif config['debug']:
+            sys.stdout = oldstdout
+            sys.stderr = oldstderr
+            if runtime.platformType == 'posix':
+                signal.signal(signal.SIGUSR2, lambda *args: pdb.set_trace())
+                signal.signal(signal.SIGINT, lambda *args: pdb.set_trace())
+            fixPdb()
+            pdb.runcall(reactor.run)
+        else:
+            reactor.run()
+    except:
+        if config['nodaemon']:
+            file = oldstdout
+        else:
+            file = open("TWISTD-CRASH.log",'a')
+        traceback.print_exc(file=file)
+        file.flush()
+
+
+
+def getPassphrase(needed):
+    if needed:
+        return getpass.getpass('Passphrase: ')
+    else:
+        return None
+
+
+
+def getSavePassphrase(needed):
+    if needed:
+        passphrase = util.getPassword("Encryption passphrase: ")
+    else:
+        return None
+
+
+
+class ApplicationRunner(object):
+    """
+    An object which helps running an application based on a config object.
+
+    Subclass me and implement preApplication and postApplication
+    methods. postApplication generally will want to run the reactor
+    after starting the application.
+
+    @ivar config: The config object, which provides a dict-like interface.
+
+    @ivar application: Available in postApplication, but not
+       preApplication. This is the application object.
+
+    @ivar profilerFactory: Factory for creating a profiler object, able to
+        profile the application if options are set accordingly.
+
+    @ivar profiler: Instance provided by C{profilerFactory}.
+
+    @ivar loggerFactory: Factory for creating object responsible for logging.
+
+    @ivar logger: Instance provided by C{loggerFactory}.
+    """
+    profilerFactory = AppProfiler
+    loggerFactory = AppLogger
+
+    def __init__(self, config):
+        self.config = config
+        self.profiler = self.profilerFactory(config)
+        self.logger = self.loggerFactory(config)
+
+
+    def run(self):
+        """
+        Run the application.
+        """
+        self.preApplication()
+        self.application = self.createOrGetApplication()
+
+        self.logger.start(self.application)
+
+        self.postApplication()
+        self.logger.stop()
+
+
+    def startReactor(self, reactor, oldstdout, oldstderr):
+        """
+        Run the reactor with the given configuration.  Subclasses should
+        probably call this from C{postApplication}.
+
+        @see: L{runReactorWithLogging}
+        """
+        runReactorWithLogging(
+            self.config, oldstdout, oldstderr, self.profiler, reactor)
+
+
+    def preApplication(self):
+        """
+        Override in subclass.
+
+        This should set up any state necessary before loading and
+        running the Application.
+        """
+        raise NotImplementedError()
+
+
+    def postApplication(self):
+        """
+        Override in subclass.
+
+        This will be called after the application has been loaded (so
+        the C{application} attribute will be set). Generally this
+        should start the application and run the reactor.
+        """
+        raise NotImplementedError()
+
+
+    def createOrGetApplication(self):
+        """
+        Create or load an Application based on the parameters found in the
+        given L{ServerOptions} instance.
+
+        If a subcommand was used, the L{service.IServiceMaker} that it
+        represents will be used to construct a service to be added to
+        a newly-created Application.
+
+        Otherwise, an application will be loaded based on parameters in
+        the config.
+        """
+        if self.config.subCommand:
+            # If a subcommand was given, it's our responsibility to create
+            # the application, instead of load it from a file.
+
+            # loadedPlugins is set up by the ServerOptions.subCommands
+            # property, which is iterated somewhere in the bowels of
+            # usage.Options.
+            plg = self.config.loadedPlugins[self.config.subCommand]
+            ser = plg.makeService(self.config.subOptions)
+            application = service.Application(plg.tapname)
+            ser.setServiceParent(application)
+        else:
+            passphrase = getPassphrase(self.config['encrypted'])
+            application = getApplication(self.config, passphrase)
+        return application
+
+
+
+def getApplication(config, passphrase):
+    s = [(config[t], t)
+           for t in ['python', 'source', 'file'] if config[t]][0]
+    filename, style = s[0], {'file':'pickle'}.get(s[1],s[1])
+    try:
+        log.msg("Loading %s..." % filename)
+        application = service.loadApplication(filename, style, passphrase)
+        log.msg("Loaded.")
+    except Exception, e:
+        s = "Failed to load application: %s" % e
+        if isinstance(e, KeyError) and e.args[0] == "application":
+            s += """
+Could not find 'application' in the file. To use 'twistd -y', your .tac
+file must create a suitable object (e.g., by calling service.Application())
+and store it in a variable named 'application'. twistd loads your .tac file
+and scans the global variables for one of this name.
+
+Please read the 'Using Application' HOWTO for details.
+"""
+        traceback.print_exc(file=log.logfile)
+        log.msg(s)
+        log.deferr()
+        sys.exit('\n' + s + '\n')
+    return application
+
+
+
+def _reactorAction():
+    return usage.CompleteList([r.shortName for r in reactors.getReactorTypes()])
+
+
+class ReactorSelectionMixin:
+    """
+    Provides options for selecting a reactor to install.
+
+    If a reactor is installed, the short name which was used to locate it is
+    saved as the value for the C{"reactor"} key.
+    """
+    compData = usage.Completions(
+        optActions={"reactor": _reactorAction})
+
+    messageOutput = sys.stdout
+    _getReactorTypes = staticmethod(reactors.getReactorTypes)
+
+
+    def opt_help_reactors(self):
+        """
+        Display a list of possibly available reactor names.
+        """
+        rcts = sorted(self._getReactorTypes(), key=attrgetter('shortName'))
+        for r in rcts:
+            self.messageOutput.write('    %-4s\t%s\n' %
+                                     (r.shortName, r.description))
+        raise SystemExit(0)
+
+
+    def opt_reactor(self, shortName):
+        """
+        Which reactor to use (see --help-reactors for a list of possibilities)
+        """
+        # Actually actually actually install the reactor right at this very
+        # moment, before any other code (for example, a sub-command plugin)
+        # runs and accidentally imports and installs the default reactor.
+        #
+        # This could probably be improved somehow.
+        try:
+            installReactor(shortName)
+        except NoSuchReactor:
+            msg = ("The specified reactor does not exist: '%s'.\n"
+                   "See the list of available reactors with "
+                   "--help-reactors" % (shortName,))
+            raise usage.UsageError(msg)
+        except Exception, e:
+            msg = ("The specified reactor cannot be used, failed with error: "
+                   "%s.\nSee the list of available reactors with "
+                   "--help-reactors" % (e,))
+            raise usage.UsageError(msg)
+        else:
+            self["reactor"] = shortName
+    opt_r = opt_reactor
+
+
+
+
+class ServerOptions(usage.Options, ReactorSelectionMixin):
+
+    longdesc = ("twistd reads a twisted.application.service.Application out "
+                "of a file and runs it.")
+
+    optFlags = [['savestats', None,
+                 "save the Stats object rather than the text output of "
+                 "the profiler."],
+                ['no_save','o',   "do not save state on shutdown"],
+                ['encrypted', 'e',
+                 "The specified tap/aos file is encrypted."]]
+
+    optParameters = [['logfile','l', None,
+                      "log to a specified file, - for stdout"],
+                     ['logger', None, None,
+                      "A fully-qualified name to a log observer factory to use "
+                      "for the initial log observer.  Takes precedence over "
+                      "--logfile and --syslog (when available)."],
+                     ['profile', 'p', None,
+                      "Run in profile mode, dumping results to specified file"],
+                     ['profiler', None, "hotshot",
+                      "Name of the profiler to use (%s)." %
+                      ", ".join(AppProfiler.profilers)],
+                     ['file','f','twistd.tap',
+                      "read the given .tap file"],
+                     ['python','y', None,
+                      "read an application from within a Python file "
+                      "(implies -o)"],
+                     ['source', 's', None,
+                      "Read an application from a .tas file (AOT format)."],
+                     ['rundir','d','.',
+                      'Change to a supplied directory before running']]
+
+    compData = usage.Completions(
+        mutuallyExclusive=[("file", "python", "source")],
+        optActions={"file": usage.CompleteFiles("*.tap"),
+                    "python": usage.CompleteFiles("*.(tac|py)"),
+                    "source": usage.CompleteFiles("*.tas"),
+                    "rundir": usage.CompleteDirs()}
+        )
+
+    _getPlugins = staticmethod(plugin.getPlugins)
+
+    def __init__(self, *a, **kw):
+        self['debug'] = False
+        usage.Options.__init__(self, *a, **kw)
+
+
+    def opt_debug(self):
+        """
+        Run the application in the Python Debugger (implies nodaemon),
+        sending SIGUSR2 will drop into debugger
+        """
+        defer.setDebugging(True)
+        failure.startDebugMode()
+        self['debug'] = True
+    opt_b = opt_debug
+
+
+    def opt_spew(self):
+        """
+        Print an insanely verbose log of everything that happens.
+        Useful when debugging freezes or locks in complex code."""
+        sys.settrace(util.spewer)
+        try:
+            import threading
+        except ImportError:
+            return
+        threading.settrace(util.spewer)
+
+
+    def parseOptions(self, options=None):
+        if options is None:
+            options = sys.argv[1:] or ["--help"]
+        usage.Options.parseOptions(self, options)
+
+
+    def postOptions(self):
+        if self.subCommand or self['python']:
+            self['no_save'] = True
+        if self['logger'] is not None:
+            try:
+                self['logger'] = namedAny(self['logger'])
+            except Exception, e:
+                raise usage.UsageError("Logger '%s' could not be imported: %s" 
+                                       % (self['logger'], e))
+
+
+    def subCommands(self):
+        plugins = self._getPlugins(service.IServiceMaker)
+        self.loadedPlugins = {}
+        for plug in sorted(plugins, key=attrgetter('tapname')):
+            self.loadedPlugins[plug.tapname] = plug
+            yield (plug.tapname,
+                   None,
+                   # Avoid resolving the options attribute right away, in case
+                   # it's a property with a non-trivial getter (eg, one which
+                   # imports modules).
+                   lambda plug=plug: plug.options(),
+                   plug.description)
+    subCommands = property(subCommands)
+
+
+
+def run(runApp, ServerOptions):
+    config = ServerOptions()
+    try:
+        config.parseOptions()
+    except usage.error, ue:
+        print config
+        print "%s: %s" % (sys.argv[0], ue)
+    else:
+        runApp(config)
+
+
+
+def convertStyle(filein, typein, passphrase, fileout, typeout, encrypt):
+    application = service.loadApplication(filein, typein, passphrase)
+    sob.IPersistable(application).setStyle(typeout)
+    passphrase = getSavePassphrase(encrypt)
+    if passphrase:
+        fileout = None
+    sob.IPersistable(application).save(filename=fileout, passphrase=passphrase)
+
+
+
+def startApplication(application, save):
+    from twisted.internet import reactor
+    service.IService(application).startService()
+    if save:
+         p = sob.IPersistable(application)
+         reactor.addSystemEventTrigger('after', 'shutdown', p.save, 'shutdown')
+    reactor.addSystemEventTrigger('before', 'shutdown',
+                                  service.IService(application).stopService)
+
diff --git a/ThirdParty/Twisted/twisted/application/internet.py b/ThirdParty/Twisted/twisted/application/internet.py
new file mode 100644
index 0000000..a30f418
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/application/internet.py
@@ -0,0 +1,365 @@
+# -*- test-case-name: twisted.application.test.test_internet,twisted.test.test_application,twisted.test.test_cooperator -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Reactor-based Services
+
+Here are services to run clients, servers and periodic services using
+the reactor.
+
+If you want to run a server service, L{StreamServerEndpointService} defines a
+service that can wrap an arbitrary L{IStreamServerEndpoint
+<twisted.internet.interfaces.IStreamServerEndpoint>}
+as an L{IService}. See also L{twisted.application.strports.service} for
+constructing one of these directly from a descriptive string.
+
+Additionally, this module (dynamically) defines various Service subclasses that
+let you represent clients and servers in a Service hierarchy.  Endpoints APIs
+should be preferred for stream server services, but since those APIs do not yet
+exist for clients or datagram services, many of these are still useful.
+
+They are as follows::
+
+  TCPServer, TCPClient,
+  UNIXServer, UNIXClient,
+  SSLServer, SSLClient,
+  UDPServer, UDPClient,
+  UNIXDatagramServer, UNIXDatagramClient,
+  MulticastServer
+
+These classes take arbitrary arguments in their constructors and pass
+them straight on to their respective reactor.listenXXX or
+reactor.connectXXX calls.
+
+For example, the following service starts a web server on port 8080:
+C{TCPServer(8080, server.Site(r))}.  See the documentation for the
+reactor.listen/connect* methods for more information.
+"""
+
+import warnings
+
+from twisted.python import log
+from twisted.application import service
+from twisted.internet import task
+
+from twisted.internet.defer import CancelledError
+
+
+def _maybeGlobalReactor(maybeReactor):
+    """
+    @return: the argument, or the global reactor if the argument is C{None}.
+    """
+    if maybeReactor is None:
+        from twisted.internet import reactor
+        return reactor
+    else:
+        return maybeReactor
+
+
+class _VolatileDataService(service.Service):
+
+    volatile = []
+
+    def __getstate__(self):
+        d = service.Service.__getstate__(self)
+        for attr in self.volatile:
+            if attr in d:
+                del d[attr]
+        return d
+
+
+
+class _AbstractServer(_VolatileDataService):
+    """
+    @cvar volatile: list of attribute to remove from pickling.
+    @type volatile: C{list}
+
+    @ivar method: the type of method to call on the reactor, one of B{TCP},
+        B{UDP}, B{SSL} or B{UNIX}.
+    @type method: C{str}
+
+    @ivar reactor: the current running reactor.
+    @type reactor: a provider of C{IReactorTCP}, C{IReactorUDP},
+        C{IReactorSSL} or C{IReactorUnix}.
+
+    @ivar _port: instance of port set when the service is started.
+    @type _port: a provider of L{twisted.internet.interfaces.IListeningPort}.
+    """
+
+    volatile = ['_port']
+    method = None
+    reactor = None
+
+    _port = None
+
+    def __init__(self, *args, **kwargs):
+        self.args = args
+        if 'reactor' in kwargs:
+            self.reactor = kwargs.pop("reactor")
+        self.kwargs = kwargs
+
+
+    def privilegedStartService(self):
+        service.Service.privilegedStartService(self)
+        self._port = self._getPort()
+
+
+    def startService(self):
+        service.Service.startService(self)
+        if self._port is None:
+            self._port = self._getPort()
+
+
+    def stopService(self):
+        service.Service.stopService(self)
+        # TODO: if startup failed, should shutdown skip stopListening?
+        # _port won't exist
+        if self._port is not None:
+            d = self._port.stopListening()
+            del self._port
+            return d
+
+
+    def _getPort(self):
+        """
+        Wrapper around the appropriate listen method of the reactor.
+
+        @return: the port object returned by the listen method.
+        @rtype: an object providing
+            L{twisted.internet.interfaces.IListeningPort}.
+        """
+        return getattr(_maybeGlobalReactor(self.reactor),
+                       'listen%s' % (self.method,))(*self.args, **self.kwargs)
+
+
+
+class _AbstractClient(_VolatileDataService):
+    """
+    @cvar volatile: list of attribute to remove from pickling.
+    @type volatile: C{list}
+
+    @ivar method: the type of method to call on the reactor, one of B{TCP},
+        B{UDP}, B{SSL} or B{UNIX}.
+    @type method: C{str}
+
+    @ivar reactor: the current running reactor.
+    @type reactor: a provider of C{IReactorTCP}, C{IReactorUDP},
+        C{IReactorSSL} or C{IReactorUnix}.
+
+    @ivar _connection: instance of connection set when the service is started.
+    @type _connection: a provider of L{twisted.internet.interfaces.IConnector}.
+    """
+    volatile = ['_connection']
+    method = None
+    reactor = None
+
+    _connection = None
+
+    def __init__(self, *args, **kwargs):
+        self.args = args
+        if 'reactor' in kwargs:
+            self.reactor = kwargs.pop("reactor")
+        self.kwargs = kwargs
+
+
+    def startService(self):
+        service.Service.startService(self)
+        self._connection = self._getConnection()
+
+
+    def stopService(self):
+        service.Service.stopService(self)
+        if self._connection is not None:
+            self._connection.disconnect()
+            del self._connection
+
+
+    def _getConnection(self):
+        """
+        Wrapper around the appropriate connect method of the reactor.
+
+        @return: the port object returned by the connect method.
+        @rtype: an object providing L{twisted.internet.interfaces.IConnector}.
+        """
+        return getattr(_maybeGlobalReactor(self.reactor),
+                       'connect%s' % (self.method,))(*self.args, **self.kwargs)
+
+
+
+_doc={
+'Client':
+"""Connect to %(tran)s
+
+Call reactor.connect%(tran)s when the service starts, with the
+arguments given to the constructor.
+""",
+'Server':
+"""Serve %(tran)s clients
+
+Call reactor.listen%(tran)s when the service starts, with the
+arguments given to the constructor. When the service stops,
+stop listening. See twisted.internet.interfaces for documentation
+on arguments to the reactor method.
+""",
+}
+
+import types
+for tran in 'TCP UNIX SSL UDP UNIXDatagram Multicast'.split():
+    for side in 'Server Client'.split():
+        if tran == "Multicast" and side == "Client":
+            continue
+        base = globals()['_Abstract'+side]
+        doc = _doc[side] % vars()
+        klass = types.ClassType(tran+side, (base,),
+                                {'method': tran, '__doc__': doc})
+        globals()[tran+side] = klass
+
+
+
+class TimerService(_VolatileDataService):
+
+    """Service to periodically call a function
+
+    Every C{step} seconds call the given function with the given arguments.
+    The service starts the calls when it starts, and cancels them
+    when it stops.
+    """
+
+    volatile = ['_loop']
+
+    def __init__(self, step, callable, *args, **kwargs):
+        self.step = step
+        self.call = (callable, args, kwargs)
+
+    def startService(self):
+        service.Service.startService(self)
+        callable, args, kwargs = self.call
+        # we have to make a new LoopingCall each time we're started, because
+        # an active LoopingCall remains active when serialized. If
+        # LoopingCall were a _VolatileDataService, we wouldn't need to do
+        # this.
+        self._loop = task.LoopingCall(callable, *args, **kwargs)
+        self._loop.start(self.step, now=True).addErrback(self._failed)
+
+    def _failed(self, why):
+        # make a note that the LoopingCall is no longer looping, so we don't
+        # try to shut it down a second time in stopService. I think this
+        # should be in LoopingCall. -warner
+        self._loop.running = False
+        log.err(why)
+
+    def stopService(self):
+        if self._loop.running:
+            self._loop.stop()
+        return service.Service.stopService(self)
+
+
+
+class CooperatorService(service.Service):
+    """
+    Simple L{service.IService} which starts and stops a L{twisted.internet.task.Cooperator}.
+    """
+    def __init__(self):
+        self.coop = task.Cooperator(started=False)
+
+
+    def coiterate(self, iterator):
+        return self.coop.coiterate(iterator)
+
+
+    def startService(self):
+        self.coop.start()
+
+
+    def stopService(self):
+        self.coop.stop()
+
+
+
+class StreamServerEndpointService(service.Service, object):
+    """
+    A L{StreamServerEndpointService} is an L{IService} which runs a server on a
+    listening port described by an L{IStreamServerEndpoint
+    <twisted.internet.interfaces.IStreamServerEndpoint>}.
+
+    @ivar factory: A server factory which will be used to listen on the
+        endpoint.
+
+    @ivar endpoint: An L{IStreamServerEndpoint
+        <twisted.internet.interfaces.IStreamServerEndpoint>} provider
+        which will be used to listen when the service starts.
+
+    @ivar _waitingForPort: a Deferred, if C{listen} has yet been invoked on the
+        endpoint, otherwise None.
+
+    @ivar _raiseSynchronously: Defines error-handling behavior for the case
+        where C{listen(...)} raises an exception before C{startService} or
+        C{privilegedStartService} have completed.
+
+    @type _raiseSynchronously: C{bool}
+
+    @since: 10.2
+    """
+
+    _raiseSynchronously = None
+
+    def __init__(self, endpoint, factory):
+        self.endpoint = endpoint
+        self.factory = factory
+        self._waitingForPort = None
+
+
+    def privilegedStartService(self):
+        """
+        Start listening on the endpoint.
+        """
+        service.Service.privilegedStartService(self)
+        self._waitingForPort = self.endpoint.listen(self.factory)
+        raisedNow = []
+        def handleIt(err):
+            if self._raiseSynchronously:
+                raisedNow.append(err)
+            elif not err.check(CancelledError):
+                log.err(err)
+        self._waitingForPort.addErrback(handleIt)
+        if raisedNow:
+            raisedNow[0].raiseException()
+
+
+    def startService(self):
+        """
+        Start listening on the endpoint, unless L{privilegedStartService} got
+        around to it already.
+        """
+        service.Service.startService(self)
+        if self._waitingForPort is None:
+            self.privilegedStartService()
+
+
+    def stopService(self):
+        """
+        Stop listening on the port if it is already listening, otherwise,
+        cancel the attempt to listen.
+
+        @return: a L{Deferred<twisted.internet.defer.Deferred>} which fires
+            with C{None} when the port has stopped listening.
+        """
+        self._waitingForPort.cancel()
+        def stopIt(port):
+            if port is not None:
+                return port.stopListening()
+        d = self._waitingForPort.addCallback(stopIt)
+        def stop(passthrough):
+            self.running = False
+            return passthrough
+        d.addBoth(stop)
+        return d
+
+
+
+__all__ = (['TimerService', 'CooperatorService', 'MulticastServer',
+            'StreamServerEndpointService'] +
+           [tran+side
+            for tran in 'TCP UNIX SSL UDP UNIXDatagram'.split()
+            for side in 'Server Client'.split()])
diff --git a/ThirdParty/Twisted/twisted/application/reactors.py b/ThirdParty/Twisted/twisted/application/reactors.py
new file mode 100644
index 0000000..6bae985
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/application/reactors.py
@@ -0,0 +1,83 @@
+# -*- test-case-name: twisted.test.test_application -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Plugin-based system for enumerating available reactors and installing one of
+them.
+"""
+
+from zope.interface import Interface, Attribute, implements
+
+from twisted.plugin import IPlugin, getPlugins
+from twisted.python.reflect import namedAny
+
+
+class IReactorInstaller(Interface):
+    """
+    Definition of a reactor which can probably be installed.
+    """
+    shortName = Attribute("""
+    A brief string giving the user-facing name of this reactor.
+    """)
+
+    description = Attribute("""
+    A longer string giving a user-facing description of this reactor.
+    """)
+
+    def install():
+        """
+        Install this reactor.
+        """
+
+    # TODO - A method which provides a best-guess as to whether this reactor
+    # can actually be used in the execution environment.
+
+
+
+class NoSuchReactor(KeyError):
+    """
+    Raised when an attempt is made to install a reactor which cannot be found.
+    """
+
+
+class Reactor(object):
+    """
+    @ivar moduleName: The fully-qualified Python name of the module of which
+    the install callable is an attribute.
+    """
+    implements(IPlugin, IReactorInstaller)
+
+
+    def __init__(self, shortName, moduleName, description):
+        self.shortName = shortName
+        self.moduleName = moduleName
+        self.description = description
+
+
+    def install(self):
+        namedAny(self.moduleName).install()
+
+
+
+def getReactorTypes():
+    """
+    Return an iterator of L{IReactorInstaller} plugins.
+    """
+    return getPlugins(IReactorInstaller)
+
+
+
+def installReactor(shortName):
+    """
+    Install the reactor with the given C{shortName} attribute.
+
+    @raise NoSuchReactor: If no reactor is found with a matching C{shortName}.
+
+    @raise: anything that the specified reactor can raise when installed.
+    """
+    for installer in getReactorTypes():
+        if installer.shortName == shortName:
+            return installer.install()
+    raise NoSuchReactor(shortName)
+
diff --git a/ThirdParty/Twisted/twisted/application/service.py b/ThirdParty/Twisted/twisted/application/service.py
new file mode 100644
index 0000000..66fef85
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/application/service.py
@@ -0,0 +1,413 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Service architecture for Twisted.
+
+Services are arranged in a hierarchy. At the leafs of the hierarchy,
+the services which actually interact with the outside world are started.
+Services can be named or anonymous -- usually, they will be named if
+there is need to access them through the hierarchy (from a parent or
+a sibling).
+
+Maintainer: Moshe Zadka
+"""
+
+from zope.interface import implements, Interface, Attribute
+
+from twisted.python.reflect import namedAny
+from twisted.python import components
+from twisted.internet import defer
+from twisted.persisted import sob
+from twisted.plugin import IPlugin
+
+
+class IServiceMaker(Interface):
+    """
+    An object which can be used to construct services in a flexible
+    way.
+
+    This interface should most often be implemented along with
+    L{twisted.plugin.IPlugin}, and will most often be used by the
+    'twistd' command.
+    """
+    tapname = Attribute(
+        "A short string naming this Twisted plugin, for example 'web' or "
+        "'pencil'. This name will be used as the subcommand of 'twistd'.")
+
+    description = Attribute(
+        "A brief summary of the features provided by this "
+        "Twisted application plugin.")
+
+    options = Attribute(
+        "A C{twisted.python.usage.Options} subclass defining the "
+        "configuration options for this application.")
+
+
+    def makeService(options):
+        """
+        Create and return an object providing
+        L{twisted.application.service.IService}.
+
+        @param options: A mapping (typically a C{dict} or
+        L{twisted.python.usage.Options} instance) of configuration
+        options to desired configuration values.
+        """
+
+
+
+class ServiceMaker(object):
+    """
+    Utility class to simplify the definition of L{IServiceMaker} plugins.
+    """
+    implements(IPlugin, IServiceMaker)
+
+    def __init__(self, name, module, description, tapname):
+        self.name = name
+        self.module = module
+        self.description = description
+        self.tapname = tapname
+
+
+    def options():
+        def get(self):
+            return namedAny(self.module).Options
+        return get,
+    options = property(*options())
+
+
+    def makeService():
+        def get(self):
+            return namedAny(self.module).makeService
+        return get,
+    makeService = property(*makeService())
+
+
+
+class IService(Interface):
+    """
+    A service.
+
+    Run start-up and shut-down code at the appropriate times.
+
+    @type name:            C{string}
+    @ivar name:            The name of the service (or None)
+    @type running:         C{boolean}
+    @ivar running:         Whether the service is running.
+    """
+
+    def setName(name):
+        """
+        Set the name of the service.
+
+        @type name: C{str}
+        @raise RuntimeError: Raised if the service already has a parent.
+        """
+
+    def setServiceParent(parent):
+        """
+        Set the parent of the service.  This method is responsible for setting
+        the C{parent} attribute on this service (the child service).
+
+        @type parent: L{IServiceCollection}
+        @raise RuntimeError: Raised if the service already has a parent
+            or if the service has a name and the parent already has a child
+            by that name.
+        """
+
+    def disownServiceParent():
+        """
+        Use this API to remove an L{IService} from an L{IServiceCollection}.
+
+        This method is used symmetrically with L{setServiceParent} in that it
+        sets the C{parent} attribute on the child.
+
+        @rtype: L{Deferred<defer.Deferred>}
+        @return: a L{Deferred<defer.Deferred>} which is triggered when the
+            service has finished shutting down. If shutting down is immediate,
+            a value can be returned (usually, C{None}).
+        """
+
+    def startService():
+        """
+        Start the service.
+        """
+
+    def stopService():
+        """
+        Stop the service.
+
+        @rtype: L{Deferred<defer.Deferred>}
+        @return: a L{Deferred<defer.Deferred>} which is triggered when the
+            service has finished shutting down. If shutting down is immediate,
+            a value can be returned (usually, C{None}).
+        """
+
+    def privilegedStartService():
+        """
+        Do preparation work for starting the service.
+
+        Here things which should be done before changing directory,
+        root or shedding privileges are done.
+        """
+
+
+class Service:
+    """
+    Base class for services.
+
+    Most services should inherit from this class. It handles the
+    book-keeping reponsibilities of starting and stopping, as well
+    as not serializing this book-keeping information.
+    """
+
+    implements(IService)
+
+    running = 0
+    name = None
+    parent = None
+
+    def __getstate__(self):
+        dict = self.__dict__.copy()
+        if "running" in dict:
+            del dict['running']
+        return dict
+
+    def setName(self, name):
+        if self.parent is not None:
+            raise RuntimeError("cannot change name when parent exists")
+        self.name = name
+
+    def setServiceParent(self, parent):
+        if self.parent is not None:
+            self.disownServiceParent()
+        parent = IServiceCollection(parent, parent)
+        self.parent = parent
+        self.parent.addService(self)
+
+    def disownServiceParent(self):
+        d = self.parent.removeService(self)
+        self.parent = None
+        return d
+
+    def privilegedStartService(self):
+        pass
+
+    def startService(self):
+        self.running = 1
+
+    def stopService(self):
+        self.running = 0
+
+
+
+class IServiceCollection(Interface):
+    """
+    Collection of services.
+
+    Contain several services, and manage their start-up/shut-down.
+    Services can be accessed by name if they have a name, and it
+    is always possible to iterate over them.
+    """
+
+    def getServiceNamed(name):
+        """
+        Get the child service with a given name.
+
+        @type name: C{str}
+        @rtype: L{IService}
+        @raise KeyError: Raised if the service has no child with the
+            given name.
+        """
+
+    def __iter__():
+        """
+        Get an iterator over all child services.
+        """
+
+    def addService(service):
+        """
+        Add a child service.
+
+        Only implementations of L{IService.setServiceParent} should use this
+        method.
+
+        @type service: L{IService}
+        @raise RuntimeError: Raised if the service has a child with
+            the given name.
+        """
+
+    def removeService(service):
+        """
+        Remove a child service.
+
+        Only implementations of L{IService.disownServiceParent} should
+        use this method.
+
+        @type service: L{IService}
+        @raise ValueError: Raised if the given service is not a child.
+        @rtype: L{Deferred<defer.Deferred>}
+        @return: a L{Deferred<defer.Deferred>} which is triggered when the
+            service has finished shutting down. If shutting down is immediate,
+            a value can be returned (usually, C{None}).
+        """
+
+
+
+class MultiService(Service):
+    """
+    Straightforward Service Container.
+
+    Hold a collection of services, and manage them in a simplistic
+    way. No service will wait for another, but this object itself
+    will not finish shutting down until all of its child services
+    will finish.
+    """
+
+    implements(IServiceCollection)
+
+    def __init__(self):
+        self.services = []
+        self.namedServices = {}
+        self.parent = None
+
+    def privilegedStartService(self):
+        Service.privilegedStartService(self)
+        for service in self:
+            service.privilegedStartService()
+
+    def startService(self):
+        Service.startService(self)
+        for service in self:
+            service.startService()
+
+    def stopService(self):
+        Service.stopService(self)
+        l = []
+        services = list(self)
+        services.reverse()
+        for service in services:
+            l.append(defer.maybeDeferred(service.stopService))
+        return defer.DeferredList(l)
+
+    def getServiceNamed(self, name):
+        return self.namedServices[name]
+
+    def __iter__(self):
+        return iter(self.services)
+
+    def addService(self, service):
+        if service.name is not None:
+            if service.name in self.namedServices:
+                raise RuntimeError("cannot have two services with same name"
+                                   " '%s'" % service.name)
+            self.namedServices[service.name] = service
+        self.services.append(service)
+        if self.running:
+            # It may be too late for that, but we will do our best
+            service.privilegedStartService()
+            service.startService()
+
+    def removeService(self, service):
+        if service.name:
+            del self.namedServices[service.name]
+        self.services.remove(service)
+        if self.running:
+            # Returning this so as not to lose information from the
+            # MultiService.stopService deferred.
+            return service.stopService()
+        else:
+            return None
+
+
+
+class IProcess(Interface):
+    """
+    Process running parameters.
+
+    Represents parameters for how processes should be run.
+    """
+    processName = Attribute(
+        """
+        A C{str} giving the name the process should have in ps (or C{None}
+        to leave the name alone).
+        """)
+
+    uid = Attribute(
+        """
+        An C{int} giving the user id as which the process should run (or
+        C{None} to leave the UID alone).
+        """)
+
+    gid = Attribute(
+        """
+        An C{int} giving the group id as which the process should run (or
+        C{None} to leave the GID alone).
+        """)
+
+
+
+class Process:
+    """
+    Process running parameters.
+
+    Sets up uid/gid in the constructor, and has a default
+    of C{None} as C{processName}.
+    """
+    implements(IProcess)
+    processName = None
+
+    def __init__(self, uid=None, gid=None):
+        """
+        Set uid and gid.
+
+        @param uid: The user ID as whom to execute the process.  If
+            this is C{None}, no attempt will be made to change the UID.
+
+        @param gid: The group ID as whom to execute the process.  If
+            this is C{None}, no attempt will be made to change the GID.
+        """
+        self.uid = uid
+        self.gid = gid
+
+
+def Application(name, uid=None, gid=None):
+    """
+    Return a compound class.
+
+    Return an object supporting the L{IService}, L{IServiceCollection},
+    L{IProcess} and L{sob.IPersistable} interfaces, with the given
+    parameters. Always access the return value by explicit casting to
+    one of the interfaces.
+    """
+    ret = components.Componentized()
+    for comp in (MultiService(), sob.Persistent(ret, name), Process(uid, gid)):
+        ret.addComponent(comp, ignoreClass=1)
+    IService(ret).setName(name)
+    return ret
+
+
+
+def loadApplication(filename, kind, passphrase=None):
+    """
+    Load Application from a given file.
+
+    The serialization format it was saved in should be given as
+    C{kind}, and is one of C{pickle}, C{source}, C{xml} or C{python}. If
+    C{passphrase} is given, the application was encrypted with the
+    given passphrase.
+
+    @type filename: C{str}
+    @type kind: C{str}
+    @type passphrase: C{str}
+    """
+    if kind == 'python':
+        application = sob.loadValueFromFile(filename, 'application', passphrase)
+    else:
+        application = sob.load(filename, kind, passphrase)
+    return application
+
+
+__all__ = ['IServiceMaker', 'IService', 'Service',
+           'IServiceCollection', 'MultiService',
+           'IProcess', 'Process', 'Application', 'loadApplication']
diff --git a/ThirdParty/Twisted/twisted/application/strports.py b/ThirdParty/Twisted/twisted/application/strports.py
new file mode 100644
index 0000000..117d76f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/application/strports.py
@@ -0,0 +1,103 @@
+# -*- test-case-name: twisted.test.test_strports -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Construct listening port services from a simple string description.
+
+ at see: L{twisted.internet.endpoints.serverFromString}
+ at see: L{twisted.internet.endpoints.clientFromString}
+"""
+
+import warnings
+
+from twisted.internet import endpoints
+from twisted.python.deprecate import deprecatedModuleAttribute
+from twisted.python.versions import Version
+from twisted.application.internet import StreamServerEndpointService
+
+
+
+def parse(description, factory, default='tcp'):
+    """
+    This function is deprecated as of Twisted 10.2.
+
+    @see: L{twisted.internet.endpoints.server}
+    """
+    return endpoints._parseServer(description, factory, default)
+
+deprecatedModuleAttribute(
+    Version("Twisted", 10, 2, 0),
+    "in favor of twisted.internet.endpoints.serverFromString",
+    __name__, "parse")
+
+
+
+_DEFAULT = object()
+
+def service(description, factory, default=_DEFAULT, reactor=None):
+    """
+    Return the service corresponding to a description.
+
+    @param description: The description of the listening port, in the syntax
+        described by L{twisted.internet.endpoints.server}.
+
+    @type description: C{str}
+
+    @param factory: The protocol factory which will build protocols for
+        connections to this service.
+
+    @type factory: L{twisted.internet.interfaces.IProtocolFactory}
+
+    @type default: C{str} or C{None}
+
+    @param default: Do not use this parameter. It has been deprecated since
+        Twisted 10.2.0.
+
+    @rtype: C{twisted.application.service.IService}
+
+    @return: the service corresponding to a description of a reliable
+        stream server.
+
+    @see: L{twisted.internet.endpoints.serverFromString}
+    """
+    if reactor is None:
+        from twisted.internet import reactor
+    if default is _DEFAULT:
+        default = None
+    else:
+        message = "The 'default' parameter was deprecated in Twisted 10.2.0."
+        if default is not None:
+            message += (
+                "  Use qualified endpoint descriptions; for example, "
+                "'tcp:%s'." % (description,))
+        warnings.warn(
+            message=message, category=DeprecationWarning, stacklevel=2)
+    svc = StreamServerEndpointService(
+        endpoints._serverFromStringLegacy(reactor, description, default),
+        factory)
+    svc._raiseSynchronously = True
+    return svc
+
+
+
+def listen(description, factory, default=None):
+    """Listen on a port corresponding to a description
+
+    @type description: C{str}
+    @type factory: L{twisted.internet.interfaces.IProtocolFactory}
+    @type default: C{str} or C{None}
+    @rtype: C{twisted.internet.interfaces.IListeningPort}
+    @return: the port corresponding to a description of a reliable
+    virtual circuit server.
+
+    See the documentation of the C{parse} function for description
+    of the semantics of the arguments.
+    """
+    from twisted.internet import reactor
+    name, args, kw = parse(description, factory, default)
+    return getattr(reactor, 'listen'+name)(*args, **kw)
+
+
+
+__all__ = ['parse', 'service', 'listen']
diff --git a/ThirdParty/Twisted/twisted/application/test/__init__.py b/ThirdParty/Twisted/twisted/application/test/__init__.py
new file mode 100644
index 0000000..3cb9635
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/application/test/__init__.py
@@ -0,0 +1,6 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet.application}.
+"""
diff --git a/ThirdParty/Twisted/twisted/application/test/test_internet.py b/ThirdParty/Twisted/twisted/application/test/test_internet.py
new file mode 100644
index 0000000..9e058d7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/application/test/test_internet.py
@@ -0,0 +1,252 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for (new code in) L{twisted.application.internet}.
+"""
+
+
+from zope.interface import implements
+from zope.interface.verify import verifyClass
+
+from twisted.internet.protocol import Factory
+from twisted.trial.unittest import TestCase
+from twisted.application.internet import StreamServerEndpointService
+from twisted.internet.interfaces import IStreamServerEndpoint, IListeningPort
+from twisted.internet.defer import Deferred, CancelledError
+
+class FakeServer(object):
+    """
+    In-memory implementation of L{IStreamServerEndpoint}.
+
+    @ivar result: The L{Deferred} resulting from the call to C{listen}, after
+        C{listen} has been called.
+
+    @ivar factory: The factory passed to C{listen}.
+
+    @ivar cancelException: The exception to errback C{self.result} when it is
+        cancelled.
+
+    @ivar port: The L{IListeningPort} which C{listen}'s L{Deferred} will fire
+        with.
+
+    @ivar listenAttempts: The number of times C{listen} has been invoked.
+
+    @ivar failImmediately: If set, the exception to fail the L{Deferred}
+        returned from C{listen} before it is returned.
+    """
+
+    implements(IStreamServerEndpoint)
+
+    result = None
+    factory = None
+    failImmediately = None
+    cancelException = CancelledError()
+    listenAttempts = 0
+
+    def __init__(self):
+        self.port = FakePort()
+
+
+    def listen(self, factory):
+        """
+        Return a Deferred and store it for future use.  (Implementation of
+        L{IStreamServerEndpoint}).
+        """
+        self.listenAttempts += 1
+        self.factory = factory
+        self.result = Deferred(
+            canceller=lambda d: d.errback(self.cancelException))
+        if self.failImmediately is not None:
+            self.result.errback(self.failImmediately)
+        return self.result
+
+
+    def startedListening(self):
+        """
+        Test code should invoke this method after causing C{listen} to be
+        invoked in order to fire the L{Deferred} previously returned from
+        C{listen}.
+        """
+        self.result.callback(self.port)
+
+
+    def stoppedListening(self):
+        """
+        Test code should invoke this method after causing C{stopListening} to
+        be invoked on the port fired from the L{Deferred} returned from
+        C{listen} in order to cause the L{Deferred} returned from
+        C{stopListening} to fire.
+        """
+        self.port.deferred.callback(None)
+
+verifyClass(IStreamServerEndpoint, FakeServer)
+
+
+
+class FakePort(object):
+    """
+    Fake L{IListeningPort} implementation.
+
+    @ivar deferred: The L{Deferred} returned by C{stopListening}.
+    """
+
+    implements(IListeningPort)
+
+    deferred = None
+
+    def stopListening(self):
+        self.deferred = Deferred()
+        return self.deferred
+
+verifyClass(IStreamServerEndpoint, FakeServer)
+
+
+
+class TestEndpointService(TestCase):
+    """
+    Tests for L{twisted.application.internet}.
+    """
+
+    def setUp(self):
+        """
+        Construct a stub server, a stub factory, and a
+        L{StreamServerEndpointService} to test.
+        """
+        self.fakeServer = FakeServer()
+        self.factory = Factory()
+        self.svc = StreamServerEndpointService(self.fakeServer, self.factory)
+
+
+    def test_privilegedStartService(self):
+        """
+        L{StreamServerEndpointService.privilegedStartService} calls its
+        endpoint's C{listen} method with its factory.
+        """
+        self.svc.privilegedStartService()
+        self.assertIdentical(self.factory, self.fakeServer.factory)
+
+
+    def test_synchronousRaiseRaisesSynchronously(self, thunk=None):
+        """
+        L{StreamServerEndpointService.startService} should raise synchronously
+        if the L{Deferred} returned by its wrapped
+        L{IStreamServerEndpoint.listen} has already fired with an errback and
+        the L{StreamServerEndpointService}'s C{_raiseSynchronously} flag has
+        been set.  This feature is necessary to preserve compatibility with old
+        behavior of L{twisted.internet.strports.service}, which is to return a
+        service which synchronously raises an exception from C{startService}
+        (so that, among other things, twistd will not start running).  However,
+        since L{IStreamServerEndpoint.listen} may fail asynchronously, it is
+        a bad idea to rely on this behavior.
+        """
+        self.fakeServer.failImmediately = ZeroDivisionError()
+        self.svc._raiseSynchronously = True
+        self.assertRaises(ZeroDivisionError, thunk or self.svc.startService)
+
+
+    def test_synchronousRaisePrivileged(self):
+        """
+        L{StreamServerEndpointService.privilegedStartService} should behave the
+        same as C{startService} with respect to
+        L{TestEndpointService.test_synchronousRaiseRaisesSynchronously}.
+        """
+        self.test_synchronousRaiseRaisesSynchronously(
+            self.svc.privilegedStartService)
+
+
+    def test_failReportsError(self):
+        """
+        L{StreamServerEndpointService.startService} and
+        L{StreamServerEndpointService.privilegedStartService} should both log
+        an exception when the L{Deferred} returned from their wrapped
+        L{IStreamServerEndpoint.listen} fails.
+        """
+        self.svc.startService()
+        self.fakeServer.result.errback(ZeroDivisionError())
+        logged = self.flushLoggedErrors(ZeroDivisionError)
+        self.assertEqual(len(logged), 1)
+
+
+    def test_synchronousFailReportsError(self):
+        """
+        Without the C{_raiseSynchronously} compatibility flag, failing
+        immediately has the same behavior as failing later; it logs the error.
+        """
+        self.fakeServer.failImmediately = ZeroDivisionError()
+        self.svc.startService()
+        logged = self.flushLoggedErrors(ZeroDivisionError)
+        self.assertEqual(len(logged), 1)
+
+
+    def test_startServiceUnstarted(self):
+        """
+        L{StreamServerEndpointService.startService} sets the C{running} flag,
+        and calls its endpoint's C{listen} method with its factory, if it
+        has not yet been started.
+        """
+        self.svc.startService()
+        self.assertIdentical(self.factory, self.fakeServer.factory)
+        self.assertEqual(self.svc.running, True)
+
+
+    def test_startServiceStarted(self):
+        """
+        L{StreamServerEndpointService.startService} sets the C{running} flag,
+        but nothing else, if the service has already been started.
+        """
+        self.test_privilegedStartService()
+        self.svc.startService()
+        self.assertEqual(self.fakeServer.listenAttempts, 1)
+        self.assertEqual(self.svc.running, True)
+
+
+    def test_stopService(self):
+        """
+        L{StreamServerEndpointService.stopService} calls C{stopListening} on
+        the L{IListeningPort} returned from its endpoint, returns the
+        C{Deferred} from stopService, and sets C{running} to C{False}.
+        """
+        self.svc.privilegedStartService()
+        self.fakeServer.startedListening()
+        # Ensure running gets set to true
+        self.svc.startService()
+        result = self.svc.stopService()
+        l = []
+        result.addCallback(l.append)
+        self.assertEqual(len(l), 0)
+        self.fakeServer.stoppedListening()
+        self.assertEqual(len(l), 1)
+        self.assertFalse(self.svc.running)
+
+
+    def test_stopServiceBeforeStartFinished(self):
+        """
+        L{StreamServerEndpointService.stopService} cancels the L{Deferred}
+        returned by C{listen} if it has not yet fired.  No error will be logged
+        about the cancellation of the listen attempt.
+        """
+        self.svc.privilegedStartService()
+        result = self.svc.stopService()
+        l = []
+        result.addBoth(l.append)
+        self.assertEqual(l, [None])
+        self.assertEqual(self.flushLoggedErrors(CancelledError), [])
+
+
+    def test_stopServiceCancelStartError(self):
+        """
+        L{StreamServerEndpointService.stopService} cancels the L{Deferred}
+        returned by C{listen} if it has not fired yet.  An error will be logged
+        if the resulting exception is not L{CancelledError}.
+        """
+        self.fakeServer.cancelException = ZeroDivisionError()
+        self.svc.privilegedStartService()
+        result = self.svc.stopService()
+        l = []
+        result.addCallback(l.append)
+        self.assertEqual(l, [None])
+        stoppingErrors = self.flushLoggedErrors(ZeroDivisionError)
+        self.assertEqual(len(stoppingErrors), 1)
+
+
diff --git a/ThirdParty/Twisted/twisted/conch/__init__.py b/ThirdParty/Twisted/twisted/conch/__init__.py
new file mode 100644
index 0000000..d7ce597
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/__init__.py
@@ -0,0 +1,18 @@
+# -*- test-case-name: twisted.conch.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+
+
+"""
+Twisted.Conch: The Twisted Shell. Terminal emulation, SSHv2 and telnet.
+
+Currently this contains the SSHv2 implementation, but it may work over other
+protocols in the future. (i.e. Telnet)
+
+Maintainer: Paul Swartz
+"""
+
+from twisted.conch._version import version
+__version__ = version.short()
diff --git a/ThirdParty/Twisted/twisted/conch/_version.py b/ThirdParty/Twisted/twisted/conch/_version.py
new file mode 100644
index 0000000..0317806
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/_version.py
@@ -0,0 +1,3 @@
+# This is an auto-generated file. Do not edit it.
+from twisted.python import versions
+version = versions.Version('twisted.conch', 12, 3, 0)
diff --git a/ThirdParty/Twisted/twisted/conch/avatar.py b/ThirdParty/Twisted/twisted/conch/avatar.py
new file mode 100644
index 0000000..a914da3
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/avatar.py
@@ -0,0 +1,37 @@
+# -*- test-case-name: twisted.conch.test.test_conch -*-
+from interfaces import IConchUser
+from error import ConchError
+from ssh.connection import OPEN_UNKNOWN_CHANNEL_TYPE
+from twisted.python import log
+from zope import interface
+
+class ConchUser:
+    interface.implements(IConchUser)
+
+    def __init__(self):
+        self.channelLookup = {}
+        self.subsystemLookup = {}
+
+    def lookupChannel(self, channelType, windowSize, maxPacket, data):
+        klass = self.channelLookup.get(channelType, None)
+        if not klass:
+            raise ConchError(OPEN_UNKNOWN_CHANNEL_TYPE, "unknown channel")
+        else:
+            return klass(remoteWindow = windowSize, 
+                         remoteMaxPacket = maxPacket, 
+                         data=data, avatar=self)
+
+    def lookupSubsystem(self, subsystem, data):
+        log.msg(repr(self.subsystemLookup))
+        klass = self.subsystemLookup.get(subsystem, None)
+        if not klass:
+            return False
+        return klass(data, avatar=self)
+
+    def gotGlobalRequest(self, requestType, data):
+        # XXX should this use method dispatch?
+        requestType = requestType.replace('-','_')
+        f = getattr(self, "global_%s" % requestType, None)
+        if not f:
+            return 0
+        return f(data)
diff --git a/ThirdParty/Twisted/twisted/conch/checkers.py b/ThirdParty/Twisted/twisted/conch/checkers.py
new file mode 100644
index 0000000..3cd6a0e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/checkers.py
@@ -0,0 +1,308 @@
+# -*- test-case-name: twisted.conch.test.test_checkers -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Provide L{ICredentialsChecker} implementations to be used in Conch protocols.
+"""
+
+import os, base64, binascii, errno
+try:
+    import pwd
+except ImportError:
+    pwd = None
+else:
+    import crypt
+
+try:
+    # Python 2.5 got spwd to interface with shadow passwords
+    import spwd
+except ImportError:
+    spwd = None
+    try:
+        import shadow
+    except ImportError:
+        shadow = None
+else:
+    shadow = None
+
+try:
+    from twisted.cred import pamauth
+except ImportError:
+    pamauth = None
+
+from zope.interface import implements, providedBy
+
+from twisted.conch import error
+from twisted.conch.ssh import keys
+from twisted.cred.checkers import ICredentialsChecker
+from twisted.cred.credentials import IUsernamePassword, ISSHPrivateKey
+from twisted.cred.error import UnauthorizedLogin, UnhandledCredentials
+from twisted.internet import defer
+from twisted.python import failure, reflect, log
+from twisted.python.util import runAsEffectiveUser
+from twisted.python.filepath import FilePath
+
+
+
+def verifyCryptedPassword(crypted, pw):
+    return crypt.crypt(pw, crypted) == crypted
+
+
+
+def _pwdGetByName(username):
+    """
+    Look up a user in the /etc/passwd database using the pwd module.  If the
+    pwd module is not available, return None.
+
+    @param username: the username of the user to return the passwd database
+        information for.
+    """
+    if pwd is None:
+        return None
+    return pwd.getpwnam(username)
+
+
+
+def _shadowGetByName(username):
+    """
+    Look up a user in the /etc/shadow database using the spwd or shadow
+    modules.  If neither module is available, return None.
+
+    @param username: the username of the user to return the shadow database
+        information for.
+    """
+    if spwd is not None:
+        f = spwd.getspnam
+    elif shadow is not None:
+        f = shadow.getspnam
+    else:
+        return None
+    return runAsEffectiveUser(0, 0, f, username)
+
+
+
+class UNIXPasswordDatabase:
+    """
+    A checker which validates users out of the UNIX password databases, or
+    databases of a compatible format.
+
+    @ivar _getByNameFunctions: a C{list} of functions which are called in order
+        to valid a user.  The default value is such that the /etc/passwd
+        database will be tried first, followed by the /etc/shadow database.
+    """
+    credentialInterfaces = IUsernamePassword,
+    implements(ICredentialsChecker)
+
+
+    def __init__(self, getByNameFunctions=None):
+        if getByNameFunctions is None:
+            getByNameFunctions = [_pwdGetByName, _shadowGetByName]
+        self._getByNameFunctions = getByNameFunctions
+
+
+    def requestAvatarId(self, credentials):
+        for func in self._getByNameFunctions:
+            try:
+                pwnam = func(credentials.username)
+            except KeyError:
+                return defer.fail(UnauthorizedLogin("invalid username"))
+            else:
+                if pwnam is not None:
+                    crypted = pwnam[1]
+                    if crypted == '':
+                        continue
+                    if verifyCryptedPassword(crypted, credentials.password):
+                        return defer.succeed(credentials.username)
+        # fallback
+        return defer.fail(UnauthorizedLogin("unable to verify password"))
+
+
+
+class SSHPublicKeyDatabase:
+    """
+    Checker that authenticates SSH public keys, based on public keys listed in
+    authorized_keys and authorized_keys2 files in user .ssh/ directories.
+    """
+    implements(ICredentialsChecker)
+
+    credentialInterfaces = (ISSHPrivateKey,)
+
+    _userdb = pwd
+
+    def requestAvatarId(self, credentials):
+        d = defer.maybeDeferred(self.checkKey, credentials)
+        d.addCallback(self._cbRequestAvatarId, credentials)
+        d.addErrback(self._ebRequestAvatarId)
+        return d
+
+    def _cbRequestAvatarId(self, validKey, credentials):
+        """
+        Check whether the credentials themselves are valid, now that we know
+        if the key matches the user.
+
+        @param validKey: A boolean indicating whether or not the public key
+            matches a key in the user's authorized_keys file.
+
+        @param credentials: The credentials offered by the user.
+        @type credentials: L{ISSHPrivateKey} provider
+
+        @raise UnauthorizedLogin: (as a failure) if the key does not match the
+            user in C{credentials}. Also raised if the user provides an invalid
+            signature.
+
+        @raise ValidPublicKey: (as a failure) if the key matches the user but
+            the credentials do not include a signature. See
+            L{error.ValidPublicKey} for more information.
+
+        @return: The user's username, if authentication was successful.
+        """
+        if not validKey:
+            return failure.Failure(UnauthorizedLogin("invalid key"))
+        if not credentials.signature:
+            return failure.Failure(error.ValidPublicKey())
+        else:
+            try:
+                pubKey = keys.Key.fromString(credentials.blob)
+                if pubKey.verify(credentials.signature, credentials.sigData):
+                    return credentials.username
+            except: # any error should be treated as a failed login
+                log.err()
+                return failure.Failure(UnauthorizedLogin('error while verifying key'))
+        return failure.Failure(UnauthorizedLogin("unable to verify key"))
+
+
+    def getAuthorizedKeysFiles(self, credentials):
+        """
+        Return a list of L{FilePath} instances for I{authorized_keys} files
+        which might contain information about authorized keys for the given
+        credentials.
+
+        On OpenSSH servers, the default location of the file containing the
+        list of authorized public keys is
+        U{$HOME/.ssh/authorized_keys<http://www.openbsd.org/cgi-bin/man.cgi?query=sshd_config>}.
+
+        I{$HOME/.ssh/authorized_keys2} is also returned, though it has been
+        U{deprecated by OpenSSH since
+        2001<http://marc.info/?m=100508718416162>}.
+
+        @return: A list of L{FilePath} instances to files with the authorized keys.
+        """
+        pwent = self._userdb.getpwnam(credentials.username)
+        root = FilePath(pwent.pw_dir).child('.ssh')
+        files = ['authorized_keys', 'authorized_keys2']
+        return [root.child(f) for f in files]
+
+
+    def checkKey(self, credentials):
+        """
+        Retrieve files containing authorized keys and check against user
+        credentials.
+        """
+        uid, gid = os.geteuid(), os.getegid()
+        ouid, ogid = self._userdb.getpwnam(credentials.username)[2:4]
+        for filepath in self.getAuthorizedKeysFiles(credentials):
+            if not filepath.exists():
+                continue
+            try:
+                lines = filepath.open()
+            except IOError, e:
+                if e.errno == errno.EACCES:
+                    lines = runAsEffectiveUser(ouid, ogid, filepath.open)
+                else:
+                    raise
+            for l in lines:
+                l2 = l.split()
+                if len(l2) < 2:
+                    continue
+                try:
+                    if base64.decodestring(l2[1]) == credentials.blob:
+                        return True
+                except binascii.Error:
+                    continue
+        return False
+
+    def _ebRequestAvatarId(self, f):
+        if not f.check(UnauthorizedLogin):
+            log.msg(f)
+            return failure.Failure(UnauthorizedLogin("unable to get avatar id"))
+        return f
+
+
+class SSHProtocolChecker:
+    """
+    SSHProtocolChecker is a checker that requires multiple authentications
+    to succeed.  To add a checker, call my registerChecker method with
+    the checker and the interface.
+
+    After each successful authenticate, I call my areDone method with the
+    avatar id.  To get a list of the successful credentials for an avatar id,
+    use C{SSHProcotolChecker.successfulCredentials[avatarId]}.  If L{areDone}
+    returns True, the authentication has succeeded.
+    """
+
+    implements(ICredentialsChecker)
+
+    def __init__(self):
+        self.checkers = {}
+        self.successfulCredentials = {}
+
+    def get_credentialInterfaces(self):
+        return self.checkers.keys()
+
+    credentialInterfaces = property(get_credentialInterfaces)
+
+    def registerChecker(self, checker, *credentialInterfaces):
+        if not credentialInterfaces:
+            credentialInterfaces = checker.credentialInterfaces
+        for credentialInterface in credentialInterfaces:
+            self.checkers[credentialInterface] = checker
+
+    def requestAvatarId(self, credentials):
+        """
+        Part of the L{ICredentialsChecker} interface.  Called by a portal with
+        some credentials to check if they'll authenticate a user.  We check the
+        interfaces that the credentials provide against our list of acceptable
+        checkers.  If one of them matches, we ask that checker to verify the
+        credentials.  If they're valid, we call our L{_cbGoodAuthentication}
+        method to continue.
+
+        @param credentials: the credentials the L{Portal} wants us to verify
+        """
+        ifac = providedBy(credentials)
+        for i in ifac:
+            c = self.checkers.get(i)
+            if c is not None:
+                d = defer.maybeDeferred(c.requestAvatarId, credentials)
+                return d.addCallback(self._cbGoodAuthentication,
+                        credentials)
+        return defer.fail(UnhandledCredentials("No checker for %s" % \
+            ', '.join(map(reflect.qual, ifac))))
+
+    def _cbGoodAuthentication(self, avatarId, credentials):
+        """
+        Called if a checker has verified the credentials.  We call our
+        L{areDone} method to see if the whole of the successful authentications
+        are enough.  If they are, we return the avatar ID returned by the first
+        checker.
+        """
+        if avatarId not in self.successfulCredentials:
+            self.successfulCredentials[avatarId] = []
+        self.successfulCredentials[avatarId].append(credentials)
+        if self.areDone(avatarId):
+            del self.successfulCredentials[avatarId]
+            return avatarId
+        else:
+            raise error.NotEnoughAuthentication()
+
+    def areDone(self, avatarId):
+        """
+        Override to determine if the authentication is finished for a given
+        avatarId.
+
+        @param avatarId: the avatar returned by the first checker.  For
+            this checker to function correctly, all the checkers must
+            return the same avatar ID.
+        """
+        return True
+
diff --git a/ThirdParty/Twisted/twisted/conch/client/__init__.py b/ThirdParty/Twisted/twisted/conch/client/__init__.py
new file mode 100644
index 0000000..f55d474
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/client/__init__.py
@@ -0,0 +1,9 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+"""
+Client support code for Conch.
+
+Maintainer: Paul Swartz
+"""
diff --git a/ThirdParty/Twisted/twisted/conch/client/agent.py b/ThirdParty/Twisted/twisted/conch/client/agent.py
new file mode 100644
index 0000000..50a8fea
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/client/agent.py
@@ -0,0 +1,73 @@
+# -*- test-case-name: twisted.conch.test.test_default -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Accesses the key agent for user authentication.
+
+Maintainer: Paul Swartz
+"""
+
+import os
+
+from twisted.conch.ssh import agent, channel, keys
+from twisted.internet import protocol, reactor
+from twisted.python import log
+
+
+
+class SSHAgentClient(agent.SSHAgentClient):
+
+    def __init__(self):
+        agent.SSHAgentClient.__init__(self)
+        self.blobs = []
+
+
+    def getPublicKeys(self):
+        return self.requestIdentities().addCallback(self._cbPublicKeys)
+
+
+    def _cbPublicKeys(self, blobcomm):
+        log.msg('got %i public keys' % len(blobcomm))
+        self.blobs = [x[0] for x in blobcomm]
+
+
+    def getPublicKey(self):
+        """
+        Return a L{Key} from the first blob in C{self.blobs}, if any, or
+        return C{None}.
+        """
+        if self.blobs:
+            return keys.Key.fromString(self.blobs.pop(0))
+        return None
+
+
+
+class SSHAgentForwardingChannel(channel.SSHChannel):
+
+    def channelOpen(self, specificData):
+        cc = protocol.ClientCreator(reactor, SSHAgentForwardingLocal)
+        d = cc.connectUNIX(os.environ['SSH_AUTH_SOCK'])
+        d.addCallback(self._cbGotLocal)
+        d.addErrback(lambda x:self.loseConnection())
+        self.buf = ''
+
+
+    def _cbGotLocal(self, local):
+        self.local = local
+        self.dataReceived = self.local.transport.write
+        self.local.dataReceived = self.write
+
+
+    def dataReceived(self, data):
+        self.buf += data
+
+
+    def closed(self):
+        if self.local:
+            self.local.loseConnection()
+            self.local = None
+
+
+class SSHAgentForwardingLocal(protocol.Protocol):
+    pass
diff --git a/ThirdParty/Twisted/twisted/conch/client/connect.py b/ThirdParty/Twisted/twisted/conch/client/connect.py
new file mode 100644
index 0000000..dc5fe22
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/client/connect.py
@@ -0,0 +1,21 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+import direct
+
+connectTypes = {"direct" : direct.connect}
+
+def connect(host, port, options, verifyHostKey, userAuthObject):
+    useConnects = ['direct']
+    return _ebConnect(None, useConnects, host, port, options, verifyHostKey,
+                      userAuthObject)
+
+def _ebConnect(f, useConnects, host, port, options, vhk, uao):
+    if not useConnects:
+        return f
+    connectType = useConnects.pop(0)
+    f = connectTypes[connectType]
+    d = f(host, port, options, vhk, uao)
+    d.addErrback(_ebConnect, useConnects, host, port, options, vhk, uao)
+    return d
diff --git a/ThirdParty/Twisted/twisted/conch/client/default.py b/ThirdParty/Twisted/twisted/conch/client/default.py
new file mode 100644
index 0000000..50fe97a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/client/default.py
@@ -0,0 +1,256 @@
+# -*- test-case-name: twisted.conch.test.test_knownhosts,twisted.conch.test.test_default -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Various classes and functions for implementing user-interaction in the
+command-line conch client.
+
+You probably shouldn't use anything in this module directly, since it assumes
+you are sitting at an interactive terminal.  For example, to programmatically
+interact with a known_hosts database, use L{twisted.conch.client.knownhosts}.
+"""
+
+from twisted.python import log
+from twisted.python.filepath import FilePath
+
+from twisted.conch.error import ConchError
+from twisted.conch.ssh import common, keys, userauth
+from twisted.internet import defer, protocol, reactor
+
+from twisted.conch.client.knownhosts import KnownHostsFile, ConsoleUI
+
+from twisted.conch.client import agent
+
+import os, sys, base64, getpass
+
+# This name is bound so that the unit tests can use 'patch' to override it.
+_open = open
+
+def verifyHostKey(transport, host, pubKey, fingerprint):
+    """
+    Verify a host's key.
+
+    This function is a gross vestige of some bad factoring in the client
+    internals.  The actual implementation, and a better signature of this logic
+    is in L{KnownHostsFile.verifyHostKey}.  This function is not deprecated yet
+    because the callers have not yet been rehabilitated, but they should
+    eventually be changed to call that method instead.
+
+    However, this function does perform two functions not implemented by
+    L{KnownHostsFile.verifyHostKey}.  It determines the path to the user's
+    known_hosts file based on the options (which should really be the options
+    object's job), and it provides an opener to L{ConsoleUI} which opens
+    '/dev/tty' so that the user will be prompted on the tty of the process even
+    if the input and output of the process has been redirected.  This latter
+    part is, somewhat obviously, not portable, but I don't know of a portable
+    equivalent that could be used.
+
+    @param host: Due to a bug in L{SSHClientTransport.verifyHostKey}, this is
+    always the dotted-quad IP address of the host being connected to.
+    @type host: L{str}
+
+    @param transport: the client transport which is attempting to connect to
+    the given host.
+    @type transport: L{SSHClientTransport}
+
+    @param fingerprint: the fingerprint of the given public key, in
+    xx:xx:xx:... format.  This is ignored in favor of getting the fingerprint
+    from the key itself.
+    @type fingerprint: L{str}
+
+    @param pubKey: The public key of the server being connected to.
+    @type pubKey: L{str}
+
+    @return: a L{Deferred} which fires with C{1} if the key was successfully
+    verified, or fails if the key could not be successfully verified.  Failure
+    types may include L{HostKeyChanged}, L{UserRejectedKey}, L{IOError} or
+    L{KeyboardInterrupt}.
+    """
+    actualHost = transport.factory.options['host']
+    actualKey = keys.Key.fromString(pubKey)
+    kh = KnownHostsFile.fromPath(FilePath(
+            transport.factory.options['known-hosts']
+            or os.path.expanduser("~/.ssh/known_hosts")
+            ))
+    ui = ConsoleUI(lambda : _open("/dev/tty", "r+b"))
+    return kh.verifyHostKey(ui, actualHost, host, actualKey)
+
+
+
+def isInKnownHosts(host, pubKey, options):
+    """checks to see if host is in the known_hosts file for the user.
+    returns 0 if it isn't, 1 if it is and is the same, 2 if it's changed.
+    """
+    keyType = common.getNS(pubKey)[0]
+    retVal = 0
+
+    if not options['known-hosts'] and not os.path.exists(os.path.expanduser('~/.ssh/')):
+        print 'Creating ~/.ssh directory...'
+        os.mkdir(os.path.expanduser('~/.ssh'))
+    kh_file = options['known-hosts'] or '~/.ssh/known_hosts'
+    try:
+        known_hosts = open(os.path.expanduser(kh_file))
+    except IOError:
+        return 0
+    for line in known_hosts.xreadlines():
+        split = line.split()
+        if len(split) < 3:
+            continue
+        hosts, hostKeyType, encodedKey = split[:3]
+        if host not in hosts.split(','): # incorrect host
+            continue
+        if hostKeyType != keyType: # incorrect type of key
+            continue
+        try:
+            decodedKey = base64.decodestring(encodedKey)
+        except:
+            continue
+        if decodedKey == pubKey:
+            return 1
+        else:
+            retVal = 2
+    return retVal
+
+
+
+class SSHUserAuthClient(userauth.SSHUserAuthClient):
+
+    def __init__(self, user, options, *args):
+        userauth.SSHUserAuthClient.__init__(self, user, *args)
+        self.keyAgent = None
+        self.options = options
+        self.usedFiles = []
+        if not options.identitys:
+            options.identitys = ['~/.ssh/id_rsa', '~/.ssh/id_dsa']
+
+    def serviceStarted(self):
+        if 'SSH_AUTH_SOCK' in os.environ and not self.options['noagent']:
+            log.msg('using agent')
+            cc = protocol.ClientCreator(reactor, agent.SSHAgentClient)
+            d = cc.connectUNIX(os.environ['SSH_AUTH_SOCK'])
+            d.addCallback(self._setAgent)
+            d.addErrback(self._ebSetAgent)
+        else:
+            userauth.SSHUserAuthClient.serviceStarted(self)
+
+    def serviceStopped(self):
+        if self.keyAgent:
+            self.keyAgent.transport.loseConnection()
+            self.keyAgent = None
+
+    def _setAgent(self, a):
+        self.keyAgent = a
+        d = self.keyAgent.getPublicKeys()
+        d.addBoth(self._ebSetAgent)
+        return d
+
+    def _ebSetAgent(self, f):
+        userauth.SSHUserAuthClient.serviceStarted(self)
+
+    def _getPassword(self, prompt):
+        try:
+            oldout, oldin = sys.stdout, sys.stdin
+            sys.stdin = sys.stdout = open('/dev/tty','r+')
+            p=getpass.getpass(prompt)
+            sys.stdout,sys.stdin=oldout,oldin
+            return p
+        except (KeyboardInterrupt, IOError):
+            print
+            raise ConchError('PEBKAC')
+
+    def getPassword(self, prompt = None):
+        if not prompt:
+            prompt = "%s@%s's password: " % (self.user, self.transport.transport.getPeer().host)
+        try:
+            p = self._getPassword(prompt)
+            return defer.succeed(p)
+        except ConchError:
+            return defer.fail()
+
+
+    def getPublicKey(self):
+        """
+        Get a public key from the key agent if possible, otherwise look in
+        the next configured identity file for one.
+        """
+        if self.keyAgent:
+            key = self.keyAgent.getPublicKey()
+            if key is not None:
+                return key
+        files = [x for x in self.options.identitys if x not in self.usedFiles]
+        log.msg(str(self.options.identitys))
+        log.msg(str(files))
+        if not files:
+            return None
+        file = files[0]
+        log.msg(file)
+        self.usedFiles.append(file)
+        file = os.path.expanduser(file)
+        file += '.pub'
+        if not os.path.exists(file):
+            return self.getPublicKey() # try again
+        try:
+            return keys.Key.fromFile(file)
+        except keys.BadKeyError:
+            return self.getPublicKey() # try again
+
+
+    def signData(self, publicKey, signData):
+        """
+        Extend the base signing behavior by using an SSH agent to sign the
+        data, if one is available.
+
+        @type publicKey: L{Key}
+        @type signData: C{str}
+        """
+        if not self.usedFiles: # agent key
+            return self.keyAgent.signData(publicKey.blob(), signData)
+        else:
+            return userauth.SSHUserAuthClient.signData(self, publicKey, signData)
+
+
+    def getPrivateKey(self):
+        """
+        Try to load the private key from the last used file identified by
+        C{getPublicKey}, potentially asking for the passphrase if the key is
+        encrypted.
+        """
+        file = os.path.expanduser(self.usedFiles[-1])
+        if not os.path.exists(file):
+            return None
+        try:
+            return defer.succeed(keys.Key.fromFile(file))
+        except keys.EncryptedKeyError:
+            for i in range(3):
+                prompt = "Enter passphrase for key '%s': " % \
+                    self.usedFiles[-1]
+                try:
+                    p = self._getPassword(prompt)
+                    return defer.succeed(keys.Key.fromFile(file, passphrase=p))
+                except (keys.BadKeyError, ConchError):
+                    pass
+                return defer.fail(ConchError('bad password'))
+            raise
+        except KeyboardInterrupt:
+            print
+            reactor.stop()
+
+
+    def getGenericAnswers(self, name, instruction, prompts):
+        responses = []
+        try:
+            oldout, oldin = sys.stdout, sys.stdin
+            sys.stdin = sys.stdout = open('/dev/tty','r+')
+            if name:
+                print name
+            if instruction:
+                print instruction
+            for prompt, echo in prompts:
+                if echo:
+                    responses.append(raw_input(prompt))
+                else:
+                    responses.append(getpass.getpass(prompt))
+        finally:
+            sys.stdout,sys.stdin=oldout,oldin
+        return defer.succeed(responses)
diff --git a/ThirdParty/Twisted/twisted/conch/client/direct.py b/ThirdParty/Twisted/twisted/conch/client/direct.py
new file mode 100644
index 0000000..f95a14a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/client/direct.py
@@ -0,0 +1,107 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+from twisted.internet import defer, protocol, reactor
+from twisted.conch import error
+from twisted.conch.ssh import transport
+from twisted.python import log
+
+
+
+class SSHClientFactory(protocol.ClientFactory):
+
+    def __init__(self, d, options, verifyHostKey, userAuthObject):
+        self.d = d
+        self.options = options
+        self.verifyHostKey = verifyHostKey
+        self.userAuthObject = userAuthObject
+
+
+    def clientConnectionLost(self, connector, reason):
+        if self.options['reconnect']:
+            connector.connect()
+
+
+    def clientConnectionFailed(self, connector, reason):
+        if self.d is None:
+            return
+        d, self.d = self.d, None
+        d.errback(reason)
+
+
+    def buildProtocol(self, addr):
+        trans = SSHClientTransport(self)
+        if self.options['ciphers']:
+            trans.supportedCiphers = self.options['ciphers']
+        if self.options['macs']:
+            trans.supportedMACs = self.options['macs']
+        if self.options['compress']:
+            trans.supportedCompressions[0:1] = ['zlib']
+        if self.options['host-key-algorithms']:
+            trans.supportedPublicKeys = self.options['host-key-algorithms']
+        return trans
+
+
+
+class SSHClientTransport(transport.SSHClientTransport):
+
+    def __init__(self, factory):
+        self.factory = factory
+        self.unixServer = None
+
+
+    def connectionLost(self, reason):
+        if self.unixServer:
+            d = self.unixServer.stopListening()
+            self.unixServer = None
+        else:
+            d = defer.succeed(None)
+        d.addCallback(lambda x:
+            transport.SSHClientTransport.connectionLost(self, reason))
+
+
+    def receiveError(self, code, desc):
+        if self.factory.d is None:
+            return
+        d, self.factory.d = self.factory.d, None
+        d.errback(error.ConchError(desc, code))
+
+
+    def sendDisconnect(self, code, reason):
+        if self.factory.d is None:
+            return
+        d, self.factory.d = self.factory.d, None
+        transport.SSHClientTransport.sendDisconnect(self, code, reason)
+        d.errback(error.ConchError(reason, code))
+
+
+    def receiveDebug(self, alwaysDisplay, message, lang):
+        log.msg('Received Debug Message: %s' % message)
+        if alwaysDisplay: # XXX what should happen here?
+            print message
+
+
+    def verifyHostKey(self, pubKey, fingerprint):
+        return self.factory.verifyHostKey(self, self.transport.getPeer().host, pubKey,
+                                          fingerprint)
+
+
+    def setService(self, service):
+        log.msg('setting client server to %s' % service)
+        transport.SSHClientTransport.setService(self, service)
+        if service.name != 'ssh-userauth' and self.factory.d is not None:
+            d, self.factory.d = self.factory.d, None
+            d.callback(None)
+
+
+    def connectionSecure(self):
+        self.requestService(self.factory.userAuthObject)
+
+
+
+def connect(host, port, options, verifyHostKey, userAuthObject):
+    d = defer.Deferred()
+    factory = SSHClientFactory(d, options, verifyHostKey, userAuthObject)
+    reactor.connectTCP(host, port, factory)
+    return d
diff --git a/ThirdParty/Twisted/twisted/conch/client/knownhosts.py b/ThirdParty/Twisted/twisted/conch/client/knownhosts.py
new file mode 100644
index 0000000..48cd89b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/client/knownhosts.py
@@ -0,0 +1,478 @@
+# -*- test-case-name: twisted.conch.test.test_knownhosts -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+An implementation of the OpenSSH known_hosts database.
+
+ at since: 8.2
+"""
+
+from binascii import Error as DecodeError, b2a_base64
+import hmac
+import sys
+
+from zope.interface import implements
+
+from twisted.python.randbytes import secureRandom
+if sys.version_info >= (2, 5):
+    from twisted.python.hashlib import sha1
+else:
+    # We need to have an object with a method named 'new'.
+    import sha as sha1
+
+from twisted.internet import defer
+
+from twisted.python import log
+from twisted.conch.interfaces import IKnownHostEntry
+from twisted.conch.error import HostKeyChanged, UserRejectedKey, InvalidEntry
+from twisted.conch.ssh.keys import Key, BadKeyError
+
+
+def _b64encode(s):
+    """
+    Encode a binary string as base64 with no trailing newline.
+    """
+    return b2a_base64(s).strip()
+
+
+
+def _extractCommon(string):
+    """
+    Extract common elements of base64 keys from an entry in a hosts file.
+
+    @return: a 4-tuple of hostname data (L{str}), ssh key type (L{str}), key
+    (L{Key}), and comment (L{str} or L{None}).  The hostname data is simply the
+    beginning of the line up to the first occurrence of whitespace.
+    """
+    elements = string.split(None, 2)
+    if len(elements) != 3:
+        raise InvalidEntry()
+    hostnames, keyType, keyAndComment = elements
+    splitkey = keyAndComment.split(None, 1)
+    if len(splitkey) == 2:
+        keyString, comment = splitkey
+        comment = comment.rstrip("\n")
+    else:
+        keyString = splitkey[0]
+        comment = None
+    key = Key.fromString(keyString.decode('base64'))
+    return hostnames, keyType, key, comment
+
+
+
+class _BaseEntry(object):
+    """
+    Abstract base of both hashed and non-hashed entry objects, since they
+    represent keys and key types the same way.
+
+    @ivar keyType: The type of the key; either ssh-dss or ssh-rsa.
+    @type keyType: L{str}
+
+    @ivar publicKey: The server public key indicated by this line.
+    @type publicKey: L{twisted.conch.ssh.keys.Key}
+
+    @ivar comment: Trailing garbage after the key line.
+    @type comment: L{str}
+    """
+
+    def __init__(self, keyType, publicKey, comment):
+        self.keyType = keyType
+        self.publicKey = publicKey
+        self.comment = comment
+
+
+    def matchesKey(self, keyObject):
+        """
+        Check to see if this entry matches a given key object.
+
+        @type keyObject: L{Key}
+
+        @rtype: bool
+        """
+        return self.publicKey == keyObject
+
+
+
+class PlainEntry(_BaseEntry):
+    """
+    A L{PlainEntry} is a representation of a plain-text entry in a known_hosts
+    file.
+
+    @ivar _hostnames: the list of all host-names associated with this entry.
+    @type _hostnames: L{list} of L{str}
+    """
+
+    implements(IKnownHostEntry)
+
+    def __init__(self, hostnames, keyType, publicKey, comment):
+        self._hostnames = hostnames
+        super(PlainEntry, self).__init__(keyType, publicKey, comment)
+
+
+    def fromString(cls, string):
+        """
+        Parse a plain-text entry in a known_hosts file, and return a
+        corresponding L{PlainEntry}.
+
+        @param string: a space-separated string formatted like "hostname
+        key-type base64-key-data comment".
+
+        @type string: L{str}
+
+        @raise DecodeError: if the key is not valid encoded as valid base64.
+
+        @raise InvalidEntry: if the entry does not have the right number of
+        elements and is therefore invalid.
+
+        @raise BadKeyError: if the key, once decoded from base64, is not
+        actually an SSH key.
+
+        @return: an IKnownHostEntry representing the hostname and key in the
+        input line.
+
+        @rtype: L{PlainEntry}
+        """
+        hostnames, keyType, key, comment = _extractCommon(string)
+        self = cls(hostnames.split(","), keyType, key, comment)
+        return self
+
+    fromString = classmethod(fromString)
+
+
+    def matchesHost(self, hostname):
+        """
+        Check to see if this entry matches a given hostname.
+
+        @type hostname: L{str}
+
+        @rtype: bool
+        """
+        return hostname in self._hostnames
+
+
+    def toString(self):
+        """
+        Implement L{IKnownHostEntry.toString} by recording the comma-separated
+        hostnames, key type, and base-64 encoded key.
+        """
+        fields = [','.join(self._hostnames),
+                  self.keyType,
+                  _b64encode(self.publicKey.blob())]
+        if self.comment is not None:
+            fields.append(self.comment)
+        return ' '.join(fields)
+
+
+class UnparsedEntry(object):
+    """
+    L{UnparsedEntry} is an entry in a L{KnownHostsFile} which can't actually be
+    parsed; therefore it matches no keys and no hosts.
+    """
+
+    implements(IKnownHostEntry)
+
+    def __init__(self, string):
+        """
+        Create an unparsed entry from a line in a known_hosts file which cannot
+        otherwise be parsed.
+        """
+        self._string = string
+
+
+    def matchesHost(self, hostname):
+        """
+        Always returns False.
+        """
+        return False
+
+
+    def matchesKey(self, key):
+        """
+        Always returns False.
+        """
+        return False
+
+
+    def toString(self):
+        """
+        Returns the input line, without its newline if one was given.
+        """
+        return self._string.rstrip("\n")
+
+
+
+def _hmacedString(key, string):
+    """
+    Return the SHA-1 HMAC hash of the given key and string.
+    """
+    hash = hmac.HMAC(key, digestmod=sha1)
+    hash.update(string)
+    return hash.digest()
+
+
+
+class HashedEntry(_BaseEntry):
+    """
+    A L{HashedEntry} is a representation of an entry in a known_hosts file
+    where the hostname has been hashed and salted.
+
+    @ivar _hostSalt: the salt to combine with a hostname for hashing.
+
+    @ivar _hostHash: the hashed representation of the hostname.
+
+    @cvar MAGIC: the 'hash magic' string used to identify a hashed line in a
+    known_hosts file as opposed to a plaintext one.
+    """
+
+    implements(IKnownHostEntry)
+
+    MAGIC = '|1|'
+
+    def __init__(self, hostSalt, hostHash, keyType, publicKey, comment):
+        self._hostSalt = hostSalt
+        self._hostHash = hostHash
+        super(HashedEntry, self).__init__(keyType, publicKey, comment)
+
+
+    def fromString(cls, string):
+        """
+        Load a hashed entry from a string representing a line in a known_hosts
+        file.
+
+        @raise DecodeError: if the key, the hostname, or the is not valid
+        encoded as valid base64
+
+        @raise InvalidEntry: if the entry does not have the right number of
+        elements and is therefore invalid, or the host/hash portion contains
+        more items than just the host and hash.
+
+        @raise BadKeyError: if the key, once decoded from base64, is not
+        actually an SSH key.
+        """
+        stuff, keyType, key, comment = _extractCommon(string)
+        saltAndHash = stuff[len(cls.MAGIC):].split("|")
+        if len(saltAndHash) != 2:
+            raise InvalidEntry()
+        hostSalt, hostHash = saltAndHash
+        self = cls(hostSalt.decode("base64"), hostHash.decode("base64"),
+                   keyType, key, comment)
+        return self
+
+    fromString = classmethod(fromString)
+
+
+    def matchesHost(self, hostname):
+        """
+        Implement L{IKnownHostEntry.matchesHost} to compare the hash of the
+        input to the stored hash.
+        """
+        return (_hmacedString(self._hostSalt, hostname) == self._hostHash)
+
+
+    def toString(self):
+        """
+        Implement L{IKnownHostEntry.toString} by base64-encoding the salt, host
+        hash, and key.
+        """
+        fields = [self.MAGIC + '|'.join([_b64encode(self._hostSalt),
+                                         _b64encode(self._hostHash)]),
+                  self.keyType,
+                  _b64encode(self.publicKey.blob())]
+        if self.comment is not None:
+            fields.append(self.comment)
+        return ' '.join(fields)
+
+
+
+class KnownHostsFile(object):
+    """
+    A structured representation of an OpenSSH-format ~/.ssh/known_hosts file.
+
+    @ivar _entries: a list of L{IKnownHostEntry} providers.
+
+    @ivar _savePath: the L{FilePath} to save new entries to.
+    """
+
+    def __init__(self, savePath):
+        """
+        Create a new, empty KnownHostsFile.
+
+        You want to use L{KnownHostsFile.fromPath} to parse one of these.
+        """
+        self._entries = []
+        self._savePath = savePath
+
+
+    def hasHostKey(self, hostname, key):
+        """
+        @return: True if the given hostname and key are present in this file,
+        False if they are not.
+
+        @rtype: L{bool}
+
+        @raise HostKeyChanged: if the host key found for the given hostname
+        does not match the given key.
+        """
+        for lineidx, entry in enumerate(self._entries):
+            if entry.matchesHost(hostname):
+                if entry.matchesKey(key):
+                    return True
+                else:
+                    raise HostKeyChanged(entry, self._savePath, lineidx + 1)
+        return False
+
+
+    def verifyHostKey(self, ui, hostname, ip, key):
+        """
+        Verify the given host key for the given IP and host, asking for
+        confirmation from, and notifying, the given UI about changes to this
+        file.
+
+        @param ui: The user interface to request an IP address from.
+
+        @param hostname: The hostname that the user requested to connect to.
+
+        @param ip: The string representation of the IP address that is actually
+        being connected to.
+
+        @param key: The public key of the server.
+
+        @return: a L{Deferred} that fires with True when the key has been
+        verified, or fires with an errback when the key either cannot be
+        verified or has changed.
+
+        @rtype: L{Deferred}
+        """
+        hhk = defer.maybeDeferred(self.hasHostKey, hostname, key)
+        def gotHasKey(result):
+            if result:
+                if not self.hasHostKey(ip, key):
+                    ui.warn("Warning: Permanently added the %s host key for "
+                            "IP address '%s' to the list of known hosts." %
+                            (key.type(), ip))
+                    self.addHostKey(ip, key)
+                    self.save()
+                return result
+            else:
+                def promptResponse(response):
+                    if response:
+                        self.addHostKey(hostname, key)
+                        self.addHostKey(ip, key)
+                        self.save()
+                        return response
+                    else:
+                        raise UserRejectedKey()
+                return ui.prompt(
+                    "The authenticity of host '%s (%s)' "
+                    "can't be established.\n"
+                    "RSA key fingerprint is %s.\n"
+                    "Are you sure you want to continue connecting (yes/no)? " %
+                    (hostname, ip, key.fingerprint())).addCallback(promptResponse)
+        return hhk.addCallback(gotHasKey)
+
+
+    def addHostKey(self, hostname, key):
+        """
+        Add a new L{HashedEntry} to the key database.
+
+        Note that you still need to call L{KnownHostsFile.save} if you wish
+        these changes to be persisted.
+
+        @return: the L{HashedEntry} that was added.
+        """
+        salt = secureRandom(20)
+        keyType = "ssh-" + key.type().lower()
+        entry = HashedEntry(salt, _hmacedString(salt, hostname),
+                            keyType, key, None)
+        self._entries.append(entry)
+        return entry
+
+
+    def save(self):
+        """
+        Save this L{KnownHostsFile} to the path it was loaded from.
+        """
+        p = self._savePath.parent()
+        if not p.isdir():
+            p.makedirs()
+        self._savePath.setContent('\n'.join(
+                [entry.toString() for entry in self._entries]) + "\n")
+
+
+    def fromPath(cls, path):
+        """
+        @param path: A path object to use for both reading contents from and
+        later saving to.
+
+        @type path: L{FilePath}
+        """
+        self = cls(path)
+        try:
+            fp = path.open()
+        except IOError:
+            return self
+        for line in fp:
+            try:
+                if line.startswith(HashedEntry.MAGIC):
+                    entry = HashedEntry.fromString(line)
+                else:
+                    entry = PlainEntry.fromString(line)
+            except (DecodeError, InvalidEntry, BadKeyError):
+                entry = UnparsedEntry(line)
+            self._entries.append(entry)
+        return self
+
+    fromPath = classmethod(fromPath)
+
+
+class ConsoleUI(object):
+    """
+    A UI object that can ask true/false questions and post notifications on the
+    console, to be used during key verification.
+
+    @ivar opener: a no-argument callable which should open a console file-like
+    object to be used for reading and writing.
+    """
+
+    def __init__(self, opener):
+        self.opener = opener
+
+
+    def prompt(self, text):
+        """
+        Write the given text as a prompt to the console output, then read a
+        result from the console input.
+
+        @return: a L{Deferred} which fires with L{True} when the user answers
+        'yes' and L{False} when the user answers 'no'.  It may errback if there
+        were any I/O errors.
+        """
+        d = defer.succeed(None)
+        def body(ignored):
+            f = self.opener()
+            f.write(text)
+            while True:
+                answer = f.readline().strip().lower()
+                if answer == 'yes':
+                    f.close()
+                    return True
+                elif answer == 'no':
+                    f.close()
+                    return False
+                else:
+                    f.write("Please type 'yes' or 'no': ")
+        return d.addCallback(body)
+
+
+    def warn(self, text):
+        """
+        Notify the user (non-interactively) of the provided text, by writing it
+        to the console.
+        """
+        try:
+            f = self.opener()
+            f.write(text)
+            f.close()
+        except:
+            log.err()
diff --git a/ThirdParty/Twisted/twisted/conch/client/options.py b/ThirdParty/Twisted/twisted/conch/client/options.py
new file mode 100644
index 0000000..8550573
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/client/options.py
@@ -0,0 +1,96 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+from twisted.conch.ssh.transport import SSHClientTransport, SSHCiphers
+from twisted.python import usage
+
+import sys
+
+class ConchOptions(usage.Options):
+
+    optParameters = [['user', 'l', None, 'Log in using this user name.'],
+                     ['identity', 'i', None],
+                     ['ciphers', 'c', None],
+                     ['macs', 'm', None],
+                     ['port', 'p', None, 'Connect to this port.  Server must be on the same port.'],
+                     ['option', 'o', None, 'Ignored OpenSSH options'],
+                     ['host-key-algorithms', '', None],
+                     ['known-hosts', '', None, 'File to check for host keys'],
+                     ['user-authentications', '', None, 'Types of user authentications to use.'],
+                     ['logfile', '', None, 'File to log to, or - for stdout'],
+                   ]
+
+    optFlags = [['version', 'V', 'Display version number only.'],
+                ['compress', 'C', 'Enable compression.'],
+                ['log', 'v', 'Enable logging (defaults to stderr)'],
+                ['nox11', 'x', 'Disable X11 connection forwarding (default)'],
+                ['agent', 'A', 'Enable authentication agent forwarding'],
+                ['noagent', 'a', 'Disable authentication agent forwarding (default)'],
+                ['reconnect', 'r', 'Reconnect to the server if the connection is lost.'],
+               ]
+
+    compData = usage.Completions(
+        mutuallyExclusive=[("agent", "noagent")],
+        optActions={
+            "user": usage.CompleteUsernames(),
+            "ciphers": usage.CompleteMultiList(
+                SSHCiphers.cipherMap.keys(),
+                descr='ciphers to choose from'),
+            "macs": usage.CompleteMultiList(
+                SSHCiphers.macMap.keys(),
+                descr='macs to choose from'),
+            "host-key-algorithms": usage.CompleteMultiList(
+                SSHClientTransport.supportedPublicKeys,
+                descr='host key algorithms to choose from'),
+            #"user-authentications": usage.CompleteMultiList(?
+            # descr='user authentication types' ),
+            },
+        extraActions=[usage.CompleteUserAtHost(),
+                      usage.Completer(descr="command"),
+                      usage.Completer(descr='argument',
+                                      repeat=True)]
+        )
+
+    def __init__(self, *args, **kw):
+        usage.Options.__init__(self, *args, **kw)
+        self.identitys = []
+        self.conns = None
+
+    def opt_identity(self, i):
+        """Identity for public-key authentication"""
+        self.identitys.append(i)
+
+    def opt_ciphers(self, ciphers):
+        "Select encryption algorithms"
+        ciphers = ciphers.split(',')
+        for cipher in ciphers:
+            if not SSHCiphers.cipherMap.has_key(cipher):
+                sys.exit("Unknown cipher type '%s'" % cipher)
+        self['ciphers'] = ciphers
+
+
+    def opt_macs(self, macs):
+        "Specify MAC algorithms"
+        macs = macs.split(',')
+        for mac in macs:
+            if not SSHCiphers.macMap.has_key(mac):
+                sys.exit("Unknown mac type '%s'" % mac)
+        self['macs'] = macs
+
+    def opt_host_key_algorithms(self, hkas):
+        "Select host key algorithms"
+        hkas = hkas.split(',')
+        for hka in hkas:
+            if hka not in SSHClientTransport.supportedPublicKeys:
+                sys.exit("Unknown host key type '%s'" % hka)
+        self['host-key-algorithms'] = hkas
+
+    def opt_user_authentications(self, uas):
+        "Choose how to authenticate to the remote server"
+        self['user-authentications'] = uas.split(',')
+
+#    def opt_compress(self):
+#        "Enable compression"
+#        self.enableCompression = 1
+#        SSHClientTransport.supportedCompressions[0:1] = ['zlib']
diff --git a/ThirdParty/Twisted/twisted/conch/error.py b/ThirdParty/Twisted/twisted/conch/error.py
new file mode 100644
index 0000000..a3bcc65
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/error.py
@@ -0,0 +1,102 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+An error to represent bad things happening in Conch.
+
+Maintainer: Paul Swartz
+"""
+
+from twisted.cred.error import UnauthorizedLogin
+
+
+
+class ConchError(Exception):
+    def __init__(self, value, data = None):
+        Exception.__init__(self, value, data)
+        self.value = value
+        self.data = data
+
+
+
+class NotEnoughAuthentication(Exception):
+    """
+    This is thrown if the authentication is valid, but is not enough to
+    successfully verify the user.  i.e. don't retry this type of
+    authentication, try another one.
+    """
+
+
+
+class ValidPublicKey(UnauthorizedLogin):
+    """
+    Raised by public key checkers when they receive public key credentials
+    that don't contain a signature at all, but are valid in every other way.
+    (e.g. the public key matches one in the user's authorized_keys file).
+
+    Protocol code (eg
+    L{SSHUserAuthServer<twisted.conch.ssh.userauth.SSHUserAuthServer>}) which
+    attempts to log in using
+    L{ISSHPrivateKey<twisted.cred.credentials.ISSHPrivateKey>} credentials
+    should be prepared to handle a failure of this type by telling the user to
+    re-authenticate using the same key and to include a signature with the new
+    attempt.
+
+    See U{http://www.ietf.org/rfc/rfc4252.txt} section 7 for more details.
+    """
+
+
+
+class IgnoreAuthentication(Exception):
+    """
+    This is thrown to let the UserAuthServer know it doesn't need to handle the
+    authentication anymore.
+    """
+
+
+
+class MissingKeyStoreError(Exception):
+    """
+    Raised if an SSHAgentServer starts receiving data without its factory
+    providing a keys dict on which to read/write key data.
+    """
+
+
+
+class UserRejectedKey(Exception):
+    """
+    The user interactively rejected a key.
+    """
+
+
+
+class InvalidEntry(Exception):
+    """
+    An entry in a known_hosts file could not be interpreted as a valid entry.
+    """
+
+
+
+class HostKeyChanged(Exception):
+    """
+    The host key of a remote host has changed.
+
+    @ivar offendingEntry: The entry which contains the persistent host key that
+    disagrees with the given host key.
+
+    @type offendingEntry: L{twisted.conch.interfaces.IKnownHostEntry}
+
+    @ivar path: a reference to the known_hosts file that the offending entry
+    was loaded from
+
+    @type path: L{twisted.python.filepath.FilePath}
+
+    @ivar lineno: The line number of the offending entry in the given path.
+
+    @type lineno: L{int}
+    """
+    def __init__(self, offendingEntry, path, lineno):
+        Exception.__init__(self)
+        self.offendingEntry = offendingEntry
+        self.path = path
+        self.lineno = lineno
diff --git a/ThirdParty/Twisted/twisted/conch/insults/__init__.py b/ThirdParty/Twisted/twisted/conch/insults/__init__.py
new file mode 100644
index 0000000..c070d4f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/insults/__init__.py
@@ -0,0 +1,16 @@
+"""
+Insults: a replacement for Curses/S-Lang.
+
+Very basic at the moment."""
+
+from twisted.python import deprecate, versions
+
+deprecate.deprecatedModuleAttribute(
+    versions.Version("Twisted", 10, 1, 0),
+    "Please use twisted.conch.insults.helper instead.",
+    __name__, "colors")
+
+deprecate.deprecatedModuleAttribute(
+    versions.Version("Twisted", 10, 1, 0),
+    "Please use twisted.conch.insults.insults instead.",
+    __name__, "client")
diff --git a/ThirdParty/Twisted/twisted/conch/insults/client.py b/ThirdParty/Twisted/twisted/conch/insults/client.py
new file mode 100644
index 0000000..89c79cd
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/insults/client.py
@@ -0,0 +1,138 @@
+"""
+You don't really want to use this module. Try insults.py instead.
+"""
+
+from twisted.internet import protocol
+
+class InsultsClient(protocol.Protocol):
+
+    escapeTimeout = 0.2
+
+    def __init__(self):
+        self.width = self.height = None
+        self.xpos = self.ypos = 0
+        self.commandQueue = []
+        self.inEscape = ''
+
+    def setSize(self, width, height):
+        call = 0
+        if self.width:
+            call = 1
+        self.width = width
+        self.height = height
+        if call:
+            self.windowSizeChanged()
+
+    def dataReceived(self, data):
+        from twisted.internet import reactor
+        for ch in data:
+            if ch == '\x1b':
+                if self.inEscape:
+                    self.keyReceived(ch)
+                    self.inEscape = ''
+                else:
+                    self.inEscape = ch
+                    self.escapeCall = reactor.callLater(self.escapeTimeout,
+                                                        self.endEscape)
+            elif ch in 'ABCD' and self.inEscape:
+                self.inEscape = ''
+                self.escapeCall.cancel()
+                if ch == 'A':
+                    self.keyReceived('<Up>')
+                elif ch == 'B':
+                    self.keyReceived('<Down>')
+                elif ch == 'C':
+                    self.keyReceived('<Right>')
+                elif ch == 'D':
+                    self.keyReceived('<Left>')
+            elif self.inEscape:
+                self.inEscape += ch
+            else:
+                self.keyReceived(ch)
+
+    def endEscape(self):
+        ch = self.inEscape
+        self.inEscape = ''
+        self.keyReceived(ch)
+
+    def initScreen(self):
+        self.transport.write('\x1b=\x1b[?1h')
+
+    def gotoXY(self, x, y):
+        """Go to a position on the screen.
+        """
+        self.xpos = x
+        self.ypos = y
+        self.commandQueue.append(('gotoxy', x, y))
+
+    def writeCh(self, ch):
+        """Write a character to the screen.  If we're at the end of the row,
+        ignore the write.
+        """
+        if self.xpos < self.width - 1:
+            self.commandQueue.append(('write', ch))
+            self.xpos += 1
+
+    def writeStr(self, s):
+        """Write a string to the screen.  This does not wrap a the edge of the
+        screen, and stops at \\r and \\n.
+        """
+        s = s[:self.width-self.xpos]
+        if '\n' in s:
+            s=s[:s.find('\n')]
+        if '\r' in s:
+            s=s[:s.find('\r')]
+        self.commandQueue.append(('write', s))
+        self.xpos += len(s)
+
+    def eraseToLine(self):
+        """Erase from the current position to the end of the line.
+        """
+        self.commandQueue.append(('eraseeol',))
+
+    def eraseToScreen(self):
+        """Erase from the current position to the end of the screen.
+        """
+        self.commandQueue.append(('eraseeos',))
+    
+    def clearScreen(self):
+        """Clear the screen, and return the cursor to 0, 0.
+        """
+        self.commandQueue = [('cls',)]
+        self.xpos = self.ypos = 0
+
+    def setAttributes(self, *attrs):
+        """Set the attributes for drawing on the screen.
+        """
+        self.commandQueue.append(('attributes', attrs))
+
+    def refresh(self):
+        """Redraw the screen.
+        """
+        redraw = ''
+        for command in self.commandQueue:
+            if command[0] == 'gotoxy':
+                redraw += '\x1b[%i;%iH' % (command[2]+1, command[1]+1)
+            elif command[0] == 'write':
+                redraw += command[1]
+            elif command[0] == 'eraseeol':
+                redraw += '\x1b[0K'
+            elif command[0] == 'eraseeos':
+                redraw += '\x1b[OJ'
+            elif command[0] == 'cls':
+                redraw += '\x1b[H\x1b[J'
+            elif command[0] == 'attributes':
+                redraw += '\x1b[%sm' % ';'.join(map(str, command[1]))
+            else:
+                print command
+        self.commandQueue = []
+        self.transport.write(redraw)
+
+    def windowSizeChanged(self):
+        """Called when the size of the window changes.
+        Might want to redraw the screen here, or something.
+        """
+
+    def keyReceived(self, key):
+        """Called when the user hits a key.
+        """
diff --git a/ThirdParty/Twisted/twisted/conch/insults/colors.py b/ThirdParty/Twisted/twisted/conch/insults/colors.py
new file mode 100644
index 0000000..c12ab16
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/insults/colors.py
@@ -0,0 +1,29 @@
+"""
+You don't really want to use this module. Try helper.py instead.
+"""
+
+CLEAR = 0
+BOLD = 1
+DIM = 2
+ITALIC = 3
+UNDERSCORE = 4
+BLINK_SLOW = 5
+BLINK_FAST = 6
+REVERSE = 7
+CONCEALED = 8
+FG_BLACK = 30
+FG_RED = 31
+FG_GREEN = 32
+FG_YELLOW = 33
+FG_BLUE = 34
+FG_MAGENTA = 35
+FG_CYAN = 36
+FG_WHITE = 37
+BG_BLACK = 40
+BG_RED = 41
+BG_GREEN = 42
+BG_YELLOW = 43
+BG_BLUE = 44
+BG_MAGENTA = 45
+BG_CYAN = 46
+BG_WHITE = 47
diff --git a/ThirdParty/Twisted/twisted/conch/insults/helper.py b/ThirdParty/Twisted/twisted/conch/insults/helper.py
new file mode 100644
index 0000000..ed645c4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/insults/helper.py
@@ -0,0 +1,450 @@
+# -*- test-case-name: twisted.conch.test.test_helper -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Partial in-memory terminal emulator
+
+ at author: Jp Calderone
+"""
+
+import re, string
+
+from zope.interface import implements
+
+from twisted.internet import defer, protocol, reactor
+from twisted.python import log
+
+from twisted.conch.insults import insults
+
+FOREGROUND = 30
+BACKGROUND = 40
+BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE, N_COLORS = range(9)
+
+class CharacterAttribute:
+    """Represents the attributes of a single character.
+
+    Character set, intensity, underlinedness, blinkitude, video
+    reversal, as well as foreground and background colors made up a
+    character's attributes.
+    """
+    def __init__(self, charset=insults.G0,
+                 bold=False, underline=False,
+                 blink=False, reverseVideo=False,
+                 foreground=WHITE, background=BLACK,
+
+                 _subtracting=False):
+        self.charset = charset
+        self.bold = bold
+        self.underline = underline
+        self.blink = blink
+        self.reverseVideo = reverseVideo
+        self.foreground = foreground
+        self.background = background
+
+        self._subtracting = _subtracting
+
+    def __eq__(self, other):
+        return vars(self) == vars(other)
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+    def copy(self):
+        c = self.__class__()
+        c.__dict__.update(vars(self))
+        return c
+
+    def wantOne(self, **kw):
+        k, v = kw.popitem()
+        if getattr(self, k) != v:
+            attr = self.copy()
+            attr._subtracting = not v
+            setattr(attr, k, v)
+            return attr
+        else:
+            return self.copy()
+
+    def toVT102(self):
+        # Spit out a vt102 control sequence that will set up
+        # all the attributes set here.  Except charset.
+        attrs = []
+        if self._subtracting:
+            attrs.append(0)
+        if self.bold:
+            attrs.append(insults.BOLD)
+        if self.underline:
+            attrs.append(insults.UNDERLINE)
+        if self.blink:
+            attrs.append(insults.BLINK)
+        if self.reverseVideo:
+            attrs.append(insults.REVERSE_VIDEO)
+        if self.foreground != WHITE:
+            attrs.append(FOREGROUND + self.foreground)
+        if self.background != BLACK:
+            attrs.append(BACKGROUND + self.background)
+        if attrs:
+            return '\x1b[' + ';'.join(map(str, attrs)) + 'm'
+        return ''
+
+# XXX - need to support scroll regions and scroll history
+class TerminalBuffer(protocol.Protocol):
+    """
+    An in-memory terminal emulator.
+    """
+    implements(insults.ITerminalTransport)
+
+    for keyID in ('UP_ARROW', 'DOWN_ARROW', 'RIGHT_ARROW', 'LEFT_ARROW',
+                  'HOME', 'INSERT', 'DELETE', 'END', 'PGUP', 'PGDN',
+                  'F1', 'F2', 'F3', 'F4', 'F5', 'F6', 'F7', 'F8', 'F9',
+                  'F10', 'F11', 'F12'):
+        exec '%s = object()' % (keyID,)
+
+    TAB = '\t'
+    BACKSPACE = '\x7f'
+
+    width = 80
+    height = 24
+
+    fill = ' '
+    void = object()
+
+    def getCharacter(self, x, y):
+        return self.lines[y][x]
+
+    def connectionMade(self):
+        self.reset()
+
+    def write(self, bytes):
+        """
+        Add the given printable bytes to the terminal.
+
+        Line feeds in C{bytes} will be replaced with carriage return / line
+        feed pairs.
+        """
+        for b in bytes.replace('\n', '\r\n'):
+            self.insertAtCursor(b)
+
+    def _currentCharacterAttributes(self):
+        return CharacterAttribute(self.activeCharset, **self.graphicRendition)
+
+    def insertAtCursor(self, b):
+        """
+        Add one byte to the terminal at the cursor and make consequent state
+        updates.
+
+        If b is a carriage return, move the cursor to the beginning of the
+        current row.
+
+        If b is a line feed, move the cursor to the next row or scroll down if
+        the cursor is already in the last row.
+
+        Otherwise, if b is printable, put it at the cursor position (inserting
+        or overwriting as dictated by the current mode) and move the cursor.
+        """
+        if b == '\r':
+            self.x = 0
+        elif b == '\n':
+            self._scrollDown()
+        elif b in string.printable:
+            if self.x >= self.width:
+                self.nextLine()
+            ch = (b, self._currentCharacterAttributes())
+            if self.modes.get(insults.modes.IRM):
+                self.lines[self.y][self.x:self.x] = [ch]
+                self.lines[self.y].pop()
+            else:
+                self.lines[self.y][self.x] = ch
+            self.x += 1
+
+    def _emptyLine(self, width):
+        return [(self.void, self._currentCharacterAttributes()) for i in xrange(width)]
+
+    def _scrollDown(self):
+        self.y += 1
+        if self.y >= self.height:
+            self.y -= 1
+            del self.lines[0]
+            self.lines.append(self._emptyLine(self.width))
+
+    def _scrollUp(self):
+        self.y -= 1
+        if self.y < 0:
+            self.y = 0
+            del self.lines[-1]
+            self.lines.insert(0, self._emptyLine(self.width))
+
+    def cursorUp(self, n=1):
+        self.y = max(0, self.y - n)
+
+    def cursorDown(self, n=1):
+        self.y = min(self.height - 1, self.y + n)
+
+    def cursorBackward(self, n=1):
+        self.x = max(0, self.x - n)
+
+    def cursorForward(self, n=1):
+        self.x = min(self.width, self.x + n)
+
+    def cursorPosition(self, column, line):
+        self.x = column
+        self.y = line
+
+    def cursorHome(self):
+        self.x = self.home.x
+        self.y = self.home.y
+
+    def index(self):
+        self._scrollDown()
+
+    def reverseIndex(self):
+        self._scrollUp()
+
+    def nextLine(self):
+        """
+        Update the cursor position attributes and scroll down if appropriate.
+        """
+        self.x = 0
+        self._scrollDown()
+
+    def saveCursor(self):
+        self._savedCursor = (self.x, self.y)
+
+    def restoreCursor(self):
+        self.x, self.y = self._savedCursor
+        del self._savedCursor
+
+    def setModes(self, modes):
+        for m in modes:
+            self.modes[m] = True
+
+    def resetModes(self, modes):
+        for m in modes:
+            try:
+                del self.modes[m]
+            except KeyError:
+                pass
+
+
+    def setPrivateModes(self, modes):
+        """
+        Enable the given modes.
+
+        Track which modes have been enabled so that the implementations of
+        other L{insults.ITerminalTransport} methods can be properly implemented
+        to respect these settings.
+
+        @see: L{resetPrivateModes}
+        @see: L{insults.ITerminalTransport.setPrivateModes}
+        """
+        for m in modes:
+            self.privateModes[m] = True
+
+
+    def resetPrivateModes(self, modes):
+        """
+        Disable the given modes.
+
+        @see: L{setPrivateModes}
+        @see: L{insults.ITerminalTransport.resetPrivateModes}
+        """
+        for m in modes:
+            try:
+                del self.privateModes[m]
+            except KeyError:
+                pass
+
+
+    def applicationKeypadMode(self):
+        self.keypadMode = 'app'
+
+    def numericKeypadMode(self):
+        self.keypadMode = 'num'
+
+    def selectCharacterSet(self, charSet, which):
+        self.charsets[which] = charSet
+
+    def shiftIn(self):
+        self.activeCharset = insults.G0
+
+    def shiftOut(self):
+        self.activeCharset = insults.G1
+
+    def singleShift2(self):
+        oldActiveCharset = self.activeCharset
+        self.activeCharset = insults.G2
+        f = self.insertAtCursor
+        def insertAtCursor(b):
+            f(b)
+            del self.insertAtCursor
+            self.activeCharset = oldActiveCharset
+        self.insertAtCursor = insertAtCursor
+
+    def singleShift3(self):
+        oldActiveCharset = self.activeCharset
+        self.activeCharset = insults.G3
+        f = self.insertAtCursor
+        def insertAtCursor(b):
+            f(b)
+            del self.insertAtCursor
+            self.activeCharset = oldActiveCharset
+        self.insertAtCursor = insertAtCursor
+
+    def selectGraphicRendition(self, *attributes):
+        for a in attributes:
+            if a == insults.NORMAL:
+                self.graphicRendition = {
+                    'bold': False,
+                    'underline': False,
+                    'blink': False,
+                    'reverseVideo': False,
+                    'foreground': WHITE,
+                    'background': BLACK}
+            elif a == insults.BOLD:
+                self.graphicRendition['bold'] = True
+            elif a == insults.UNDERLINE:
+                self.graphicRendition['underline'] = True
+            elif a == insults.BLINK:
+                self.graphicRendition['blink'] = True
+            elif a == insults.REVERSE_VIDEO:
+                self.graphicRendition['reverseVideo'] = True
+            else:
+                try:
+                    v = int(a)
+                except ValueError:
+                    log.msg("Unknown graphic rendition attribute: " + repr(a))
+                else:
+                    if FOREGROUND <= v <= FOREGROUND + N_COLORS:
+                        self.graphicRendition['foreground'] = v - FOREGROUND
+                    elif BACKGROUND <= v <= BACKGROUND + N_COLORS:
+                        self.graphicRendition['background'] = v - BACKGROUND
+                    else:
+                        log.msg("Unknown graphic rendition attribute: " + repr(a))
+
+    def eraseLine(self):
+        self.lines[self.y] = self._emptyLine(self.width)
+
+    def eraseToLineEnd(self):
+        width = self.width - self.x
+        self.lines[self.y][self.x:] = self._emptyLine(width)
+
+    def eraseToLineBeginning(self):
+        self.lines[self.y][:self.x + 1] = self._emptyLine(self.x + 1)
+
+    def eraseDisplay(self):
+        self.lines = [self._emptyLine(self.width) for i in xrange(self.height)]
+
+    def eraseToDisplayEnd(self):
+        self.eraseToLineEnd()
+        height = self.height - self.y - 1
+        self.lines[self.y + 1:] = [self._emptyLine(self.width) for i in range(height)]
+
+    def eraseToDisplayBeginning(self):
+        self.eraseToLineBeginning()
+        self.lines[:self.y] = [self._emptyLine(self.width) for i in range(self.y)]
+
+    def deleteCharacter(self, n=1):
+        del self.lines[self.y][self.x:self.x+n]
+        self.lines[self.y].extend(self._emptyLine(min(self.width - self.x, n)))
+
+    def insertLine(self, n=1):
+        self.lines[self.y:self.y] = [self._emptyLine(self.width) for i in range(n)]
+        del self.lines[self.height:]
+
+    def deleteLine(self, n=1):
+        del self.lines[self.y:self.y+n]
+        self.lines.extend([self._emptyLine(self.width) for i in range(n)])
+
+    def reportCursorPosition(self):
+        return (self.x, self.y)
+
+    def reset(self):
+        self.home = insults.Vector(0, 0)
+        self.x = self.y = 0
+        self.modes = {}
+        self.privateModes = {}
+        self.setPrivateModes([insults.privateModes.AUTO_WRAP,
+                              insults.privateModes.CURSOR_MODE])
+        self.numericKeypad = 'app'
+        self.activeCharset = insults.G0
+        self.graphicRendition = {
+            'bold': False,
+            'underline': False,
+            'blink': False,
+            'reverseVideo': False,
+            'foreground': WHITE,
+            'background': BLACK}
+        self.charsets = {
+            insults.G0: insults.CS_US,
+            insults.G1: insults.CS_US,
+            insults.G2: insults.CS_ALTERNATE,
+            insults.G3: insults.CS_ALTERNATE_SPECIAL}
+        self.eraseDisplay()
+
+    def unhandledControlSequence(self, buf):
+        print 'Could not handle', repr(buf)
+
+    def __str__(self):
+        lines = []
+        for L in self.lines:
+            buf = []
+            length = 0
+            for (ch, attr) in L:
+                if ch is not self.void:
+                    buf.append(ch)
+                    length = len(buf)
+                else:
+                    buf.append(self.fill)
+            lines.append(''.join(buf[:length]))
+        return '\n'.join(lines)
+
+class ExpectationTimeout(Exception):
+    pass
+
+class ExpectableBuffer(TerminalBuffer):
+    _mark = 0
+
+    def connectionMade(self):
+        TerminalBuffer.connectionMade(self)
+        self._expecting = []
+
+    def write(self, bytes):
+        TerminalBuffer.write(self, bytes)
+        self._checkExpected()
+
+    def cursorHome(self):
+        TerminalBuffer.cursorHome(self)
+        self._mark = 0
+
+    def _timeoutExpected(self, d):
+        d.errback(ExpectationTimeout())
+        self._checkExpected()
+
+    def _checkExpected(self):
+        s = str(self)[self._mark:]
+        while self._expecting:
+            expr, timer, deferred = self._expecting[0]
+            if timer and not timer.active():
+                del self._expecting[0]
+                continue
+            for match in expr.finditer(s):
+                if timer:
+                    timer.cancel()
+                del self._expecting[0]
+                self._mark += match.end()
+                s = s[match.end():]
+                deferred.callback(match)
+                break
+            else:
+                return
+
+    def expect(self, expression, timeout=None, scheduler=reactor):
+        d = defer.Deferred()
+        timer = None
+        if timeout:
+            timer = scheduler.callLater(timeout, self._timeoutExpected, d)
+        self._expecting.append((re.compile(expression), timer, d))
+        self._checkExpected()
+        return d
+
+__all__ = ['CharacterAttribute', 'TerminalBuffer', 'ExpectableBuffer']
diff --git a/ThirdParty/Twisted/twisted/conch/insults/insults.py b/ThirdParty/Twisted/twisted/conch/insults/insults.py
new file mode 100644
index 0000000..721551d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/insults/insults.py
@@ -0,0 +1,1087 @@
+# -*- test-case-name: twisted.conch.test.test_insults -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+VT102 and VT220 terminal manipulation.
+
+ at author: Jp Calderone
+"""
+
+from zope.interface import implements, Interface
+
+from twisted.internet import protocol, defer, interfaces as iinternet
+
+class ITerminalProtocol(Interface):
+    def makeConnection(transport):
+        """Called with an L{ITerminalTransport} when a connection is established.
+        """
+
+    def keystrokeReceived(keyID, modifier):
+        """A keystroke was received.
+
+        Each keystroke corresponds to one invocation of this method.
+        keyID is a string identifier for that key.  Printable characters
+        are represented by themselves.  Control keys, such as arrows and
+        function keys, are represented with symbolic constants on
+        L{ServerProtocol}.
+        """
+
+    def terminalSize(width, height):
+        """Called to indicate the size of the terminal.
+
+        A terminal of 80x24 should be assumed if this method is not
+        called.  This method might not be called for real terminals.
+        """
+
+    def unhandledControlSequence(seq):
+        """Called when an unsupported control sequence is received.
+
+        @type seq: C{str}
+        @param seq: The whole control sequence which could not be interpreted.
+        """
+
+    def connectionLost(reason):
+        """Called when the connection has been lost.
+
+        reason is a Failure describing why.
+        """
+
+class TerminalProtocol(object):
+    implements(ITerminalProtocol)
+
+    def makeConnection(self, terminal):
+        # assert ITerminalTransport.providedBy(transport), "TerminalProtocol.makeConnection must be passed an ITerminalTransport implementor"
+        self.terminal = terminal
+        self.connectionMade()
+
+    def connectionMade(self):
+        """Called after a connection has been established.
+        """
+
+    def keystrokeReceived(self, keyID, modifier):
+        pass
+
+    def terminalSize(self, width, height):
+        pass
+
+    def unhandledControlSequence(self, seq):
+        pass
+
+    def connectionLost(self, reason):
+        pass
+
+class ITerminalTransport(iinternet.ITransport):
+    def cursorUp(n=1):
+        """Move the cursor up n lines.
+        """
+
+    def cursorDown(n=1):
+        """Move the cursor down n lines.
+        """
+
+    def cursorForward(n=1):
+        """Move the cursor right n columns.
+        """
+
+    def cursorBackward(n=1):
+        """Move the cursor left n columns.
+        """
+
+    def cursorPosition(column, line):
+        """Move the cursor to the given line and column.
+        """
+
+    def cursorHome():
+        """Move the cursor home.
+        """
+
+    def index():
+        """Move the cursor down one line, performing scrolling if necessary.
+        """
+
+    def reverseIndex():
+        """Move the cursor up one line, performing scrolling if necessary.
+        """
+
+    def nextLine():
+        """Move the cursor to the first position on the next line, performing scrolling if necessary.
+        """
+
+    def saveCursor():
+        """Save the cursor position, character attribute, character set, and origin mode selection.
+        """
+
+    def restoreCursor():
+        """Restore the previously saved cursor position, character attribute, character set, and origin mode selection.
+
+        If no cursor state was previously saved, move the cursor to the home position.
+        """
+
+    def setModes(modes):
+        """Set the given modes on the terminal.
+        """
+
+    def resetModes(mode):
+        """Reset the given modes on the terminal.
+        """
+
+
+    def setPrivateModes(modes):
+        """
+        Set the given DEC private modes on the terminal.
+        """
+
+
+    def resetPrivateModes(modes):
+        """
+        Reset the given DEC private modes on the terminal.
+        """
+
+
+    def applicationKeypadMode():
+        """Cause keypad to generate control functions.
+
+        Cursor key mode selects the type of characters generated by cursor keys.
+        """
+
+    def numericKeypadMode():
+        """Cause keypad to generate normal characters.
+        """
+
+    def selectCharacterSet(charSet, which):
+        """Select a character set.
+
+        charSet should be one of CS_US, CS_UK, CS_DRAWING, CS_ALTERNATE, or
+        CS_ALTERNATE_SPECIAL.
+
+        which should be one of G0 or G1.
+        """
+
+    def shiftIn():
+        """Activate the G0 character set.
+        """
+
+    def shiftOut():
+        """Activate the G1 character set.
+        """
+
+    def singleShift2():
+        """Shift to the G2 character set for a single character.
+        """
+
+    def singleShift3():
+        """Shift to the G3 character set for a single character.
+        """
+
+    def selectGraphicRendition(*attributes):
+        """Enabled one or more character attributes.
+
+        Arguments should be one or more of UNDERLINE, REVERSE_VIDEO, BLINK, or BOLD.
+        NORMAL may also be specified to disable all character attributes.
+        """
+
+    def horizontalTabulationSet():
+        """Set a tab stop at the current cursor position.
+        """
+
+    def tabulationClear():
+        """Clear the tab stop at the current cursor position.
+        """
+
+    def tabulationClearAll():
+        """Clear all tab stops.
+        """
+
+    def doubleHeightLine(top=True):
+        """Make the current line the top or bottom half of a double-height, double-width line.
+
+        If top is True, the current line is the top half.  Otherwise, it is the bottom half.
+        """
+
+    def singleWidthLine():
+        """Make the current line a single-width, single-height line.
+        """
+
+    def doubleWidthLine():
+        """Make the current line a double-width line.
+        """
+
+    def eraseToLineEnd():
+        """Erase from the cursor to the end of line, including cursor position.
+        """
+
+    def eraseToLineBeginning():
+        """Erase from the cursor to the beginning of the line, including the cursor position.
+        """
+
+    def eraseLine():
+        """Erase the entire cursor line.
+        """
+
+    def eraseToDisplayEnd():
+        """Erase from the cursor to the end of the display, including the cursor position.
+        """
+
+    def eraseToDisplayBeginning():
+        """Erase from the cursor to the beginning of the display, including the cursor position.
+        """
+
+    def eraseDisplay():
+        """Erase the entire display.
+        """
+
+    def deleteCharacter(n=1):
+        """Delete n characters starting at the cursor position.
+
+        Characters to the right of deleted characters are shifted to the left.
+        """
+
+    def insertLine(n=1):
+        """Insert n lines at the cursor position.
+
+        Lines below the cursor are shifted down.  Lines moved past the bottom margin are lost.
+        This command is ignored when the cursor is outside the scroll region.
+        """
+
+    def deleteLine(n=1):
+        """Delete n lines starting at the cursor position.
+
+        Lines below the cursor are shifted up.  This command is ignored when the cursor is outside
+        the scroll region.
+        """
+
+    def reportCursorPosition():
+        """Return a Deferred that fires with a two-tuple of (x, y) indicating the cursor position.
+        """
+
+    def reset():
+        """Reset the terminal to its initial state.
+        """
+
+    def unhandledControlSequence(seq):
+        """Called when an unsupported control sequence is received.
+
+        @type seq: C{str}
+        @param seq: The whole control sequence which could not be interpreted.
+        """
+
+
+CSI = '\x1b'
+CST = {'~': 'tilde'}
+
+class modes:
+    """ECMA 48 standardized modes
+    """
+
+    # BREAKS YOPUR KEYBOARD MOFO
+    KEYBOARD_ACTION = KAM = 2
+
+    # When set, enables character insertion. New display characters
+    # move old display characters to the right. Characters moved past
+    # the right margin are lost.
+
+    # When reset, enables replacement mode (disables character
+    # insertion). New display characters replace old display
+    # characters at cursor position. The old character is erased.
+    INSERTION_REPLACEMENT = IRM = 4
+
+    # Set causes a received linefeed, form feed, or vertical tab to
+    # move cursor to first column of next line. RETURN transmits both
+    # a carriage return and linefeed. This selection is also called
+    # new line option.
+
+    # Reset causes a received linefeed, form feed, or vertical tab to
+    # move cursor to next line in current column. RETURN transmits a
+    # carriage return.
+    LINEFEED_NEWLINE = LNM = 20
+
+
+class privateModes:
+    """ANSI-Compatible Private Modes
+    """
+    ERROR = 0
+    CURSOR_KEY = 1
+    ANSI_VT52 = 2
+    COLUMN = 3
+    SCROLL = 4
+    SCREEN = 5
+    ORIGIN = 6
+    AUTO_WRAP = 7
+    AUTO_REPEAT = 8
+    PRINTER_FORM_FEED = 18
+    PRINTER_EXTENT = 19
+
+    # Toggle cursor visibility (reset hides it)
+    CURSOR_MODE = 25
+
+
+# Character sets
+CS_US = 'CS_US'
+CS_UK = 'CS_UK'
+CS_DRAWING = 'CS_DRAWING'
+CS_ALTERNATE = 'CS_ALTERNATE'
+CS_ALTERNATE_SPECIAL = 'CS_ALTERNATE_SPECIAL'
+
+# Groupings (or something?? These are like variables that can be bound to character sets)
+G0 = 'G0'
+G1 = 'G1'
+
+# G2 and G3 cannot be changed, but they can be shifted to.
+G2 = 'G2'
+G3 = 'G3'
+
+# Character attributes
+
+NORMAL = 0
+BOLD = 1
+UNDERLINE = 4
+BLINK = 5
+REVERSE_VIDEO = 7
+
+class Vector:
+    def __init__(self, x, y):
+        self.x = x
+        self.y = y
+
+def log(s):
+    file('log', 'a').write(str(s) + '\n')
+
+# XXX TODO - These attributes are really part of the
+# ITerminalTransport interface, I think.
+_KEY_NAMES = ('UP_ARROW', 'DOWN_ARROW', 'RIGHT_ARROW', 'LEFT_ARROW',
+              'HOME', 'INSERT', 'DELETE', 'END', 'PGUP', 'PGDN', 'NUMPAD_MIDDLE',
+              'F1', 'F2', 'F3', 'F4', 'F5', 'F6', 'F7', 'F8', 'F9',
+              'F10', 'F11', 'F12',
+
+              'ALT', 'SHIFT', 'CONTROL')
+
+class _const(object):
+    """
+    @ivar name: A string naming this constant
+    """
+    def __init__(self, name):
+        self.name = name
+
+    def __repr__(self):
+        return '[' + self.name + ']'
+
+
+FUNCTION_KEYS = [
+    _const(_name) for _name in _KEY_NAMES]
+
+class ServerProtocol(protocol.Protocol):
+    implements(ITerminalTransport)
+
+    protocolFactory = None
+    terminalProtocol = None
+
+    TAB = '\t'
+    BACKSPACE = '\x7f'
+    ##
+
+    lastWrite = ''
+
+    state = 'data'
+
+    termSize = Vector(80, 24)
+    cursorPos = Vector(0, 0)
+    scrollRegion = None
+
+    # Factory who instantiated me
+    factory = None
+
+    def __init__(self, protocolFactory=None, *a, **kw):
+        """
+        @param protocolFactory: A callable which will be invoked with
+        *a, **kw and should return an ITerminalProtocol implementor.
+        This will be invoked when a connection to this ServerProtocol
+        is established.
+
+        @param a: Any positional arguments to pass to protocolFactory.
+        @param kw: Any keyword arguments to pass to protocolFactory.
+        """
+        # assert protocolFactory is None or ITerminalProtocol.implementedBy(protocolFactory), "ServerProtocol.__init__ must be passed an ITerminalProtocol implementor"
+        if protocolFactory is not None:
+            self.protocolFactory = protocolFactory
+        self.protocolArgs = a
+        self.protocolKwArgs = kw
+
+        self._cursorReports = []
+
+    def connectionMade(self):
+        if self.protocolFactory is not None:
+            self.terminalProtocol = self.protocolFactory(*self.protocolArgs, **self.protocolKwArgs)
+
+            try:
+                factory = self.factory
+            except AttributeError:
+                pass
+            else:
+                self.terminalProtocol.factory = factory
+
+            self.terminalProtocol.makeConnection(self)
+
+    def dataReceived(self, data):
+        for ch in data:
+            if self.state == 'data':
+                if ch == '\x1b':
+                    self.state = 'escaped'
+                else:
+                    self.terminalProtocol.keystrokeReceived(ch, None)
+            elif self.state == 'escaped':
+                if ch == '[':
+                    self.state = 'bracket-escaped'
+                    self.escBuf = []
+                elif ch == 'O':
+                    self.state = 'low-function-escaped'
+                else:
+                    self.state = 'data'
+                    self._handleShortControlSequence(ch)
+            elif self.state == 'bracket-escaped':
+                if ch == 'O':
+                    self.state = 'low-function-escaped'
+                elif ch.isalpha() or ch == '~':
+                    self._handleControlSequence(''.join(self.escBuf) + ch)
+                    del self.escBuf
+                    self.state = 'data'
+                else:
+                    self.escBuf.append(ch)
+            elif self.state == 'low-function-escaped':
+                self._handleLowFunctionControlSequence(ch)
+                self.state = 'data'
+            else:
+                raise ValueError("Illegal state")
+
+    def _handleShortControlSequence(self, ch):
+        self.terminalProtocol.keystrokeReceived(ch, self.ALT)
+
+    def _handleControlSequence(self, buf):
+        buf = '\x1b[' + buf
+        f = getattr(self.controlSequenceParser, CST.get(buf[-1], buf[-1]), None)
+        if f is None:
+            self.unhandledControlSequence(buf)
+        else:
+            f(self, self.terminalProtocol, buf[:-1])
+
+    def unhandledControlSequence(self, buf):
+        self.terminalProtocol.unhandledControlSequence(buf)
+
+    def _handleLowFunctionControlSequence(self, ch):
+        map = {'P': self.F1, 'Q': self.F2, 'R': self.F3, 'S': self.F4}
+        keyID = map.get(ch)
+        if keyID is not None:
+            self.terminalProtocol.keystrokeReceived(keyID, None)
+        else:
+            self.terminalProtocol.unhandledControlSequence('\x1b[O' + ch)
+
+    class ControlSequenceParser:
+        def A(self, proto, handler, buf):
+            if buf == '\x1b[':
+                handler.keystrokeReceived(proto.UP_ARROW, None)
+            else:
+                handler.unhandledControlSequence(buf + 'A')
+
+        def B(self, proto, handler, buf):
+            if buf == '\x1b[':
+                handler.keystrokeReceived(proto.DOWN_ARROW, None)
+            else:
+                handler.unhandledControlSequence(buf + 'B')
+
+        def C(self, proto, handler, buf):
+            if buf == '\x1b[':
+                handler.keystrokeReceived(proto.RIGHT_ARROW, None)
+            else:
+                handler.unhandledControlSequence(buf + 'C')
+
+        def D(self, proto, handler, buf):
+            if buf == '\x1b[':
+                handler.keystrokeReceived(proto.LEFT_ARROW, None)
+            else:
+                handler.unhandledControlSequence(buf + 'D')
+
+        def E(self, proto, handler, buf):
+            if buf == '\x1b[':
+                handler.keystrokeReceived(proto.NUMPAD_MIDDLE, None)
+            else:
+                handler.unhandledControlSequence(buf + 'E')
+
+        def F(self, proto, handler, buf):
+            if buf == '\x1b[':
+                handler.keystrokeReceived(proto.END, None)
+            else:
+                handler.unhandledControlSequence(buf + 'F')
+
+        def H(self, proto, handler, buf):
+            if buf == '\x1b[':
+                handler.keystrokeReceived(proto.HOME, None)
+            else:
+                handler.unhandledControlSequence(buf + 'H')
+
+        def R(self, proto, handler, buf):
+            if not proto._cursorReports:
+                handler.unhandledControlSequence(buf + 'R')
+            elif buf.startswith('\x1b['):
+                report = buf[2:]
+                parts = report.split(';')
+                if len(parts) != 2:
+                    handler.unhandledControlSequence(buf + 'R')
+                else:
+                    Pl, Pc = parts
+                    try:
+                        Pl, Pc = int(Pl), int(Pc)
+                    except ValueError:
+                        handler.unhandledControlSequence(buf + 'R')
+                    else:
+                        d = proto._cursorReports.pop(0)
+                        d.callback((Pc - 1, Pl - 1))
+            else:
+                handler.unhandledControlSequence(buf + 'R')
+
+        def Z(self, proto, handler, buf):
+            if buf == '\x1b[':
+                handler.keystrokeReceived(proto.TAB, proto.SHIFT)
+            else:
+                handler.unhandledControlSequence(buf + 'Z')
+
+        def tilde(self, proto, handler, buf):
+            map = {1: proto.HOME, 2: proto.INSERT, 3: proto.DELETE,
+                   4: proto.END,  5: proto.PGUP,   6: proto.PGDN,
+
+                   15: proto.F5,  17: proto.F6, 18: proto.F7,
+                   19: proto.F8,  20: proto.F9, 21: proto.F10,
+                   23: proto.F11, 24: proto.F12}
+
+            if buf.startswith('\x1b['):
+                ch = buf[2:]
+                try:
+                    v = int(ch)
+                except ValueError:
+                    handler.unhandledControlSequence(buf + '~')
+                else:
+                    symbolic = map.get(v)
+                    if symbolic is not None:
+                        handler.keystrokeReceived(map[v], None)
+                    else:
+                        handler.unhandledControlSequence(buf + '~')
+            else:
+                handler.unhandledControlSequence(buf + '~')
+
+    controlSequenceParser = ControlSequenceParser()
+
+    # ITerminalTransport
+    def cursorUp(self, n=1):
+        assert n >= 1
+        self.cursorPos.y = max(self.cursorPos.y - n, 0)
+        self.write('\x1b[%dA' % (n,))
+
+    def cursorDown(self, n=1):
+        assert n >= 1
+        self.cursorPos.y = min(self.cursorPos.y + n, self.termSize.y - 1)
+        self.write('\x1b[%dB' % (n,))
+
+    def cursorForward(self, n=1):
+        assert n >= 1
+        self.cursorPos.x = min(self.cursorPos.x + n, self.termSize.x - 1)
+        self.write('\x1b[%dC' % (n,))
+
+    def cursorBackward(self, n=1):
+        assert n >= 1
+        self.cursorPos.x = max(self.cursorPos.x - n, 0)
+        self.write('\x1b[%dD' % (n,))
+
+    def cursorPosition(self, column, line):
+        self.write('\x1b[%d;%dH' % (line + 1, column + 1))
+
+    def cursorHome(self):
+        self.cursorPos.x = self.cursorPos.y = 0
+        self.write('\x1b[H')
+
+    def index(self):
+        self.cursorPos.y = min(self.cursorPos.y + 1, self.termSize.y - 1)
+        self.write('\x1bD')
+
+    def reverseIndex(self):
+        self.cursorPos.y = max(self.cursorPos.y - 1, 0)
+        self.write('\x1bM')
+
+    def nextLine(self):
+        self.cursorPos.x = 0
+        self.cursorPos.y = min(self.cursorPos.y + 1, self.termSize.y - 1)
+        self.write('\n')
+
+    def saveCursor(self):
+        self._savedCursorPos = Vector(self.cursorPos.x, self.cursorPos.y)
+        self.write('\x1b7')
+
+    def restoreCursor(self):
+        self.cursorPos = self._savedCursorPos
+        del self._savedCursorPos
+        self.write('\x1b8')
+
+    def setModes(self, modes):
+        # XXX Support ANSI-Compatible private modes
+        self.write('\x1b[%sh' % (';'.join(map(str, modes)),))
+
+    def setPrivateModes(self, modes):
+        self.write('\x1b[?%sh' % (';'.join(map(str, modes)),))
+
+    def resetModes(self, modes):
+        # XXX Support ANSI-Compatible private modes
+        self.write('\x1b[%sl' % (';'.join(map(str, modes)),))
+
+    def resetPrivateModes(self, modes):
+        self.write('\x1b[?%sl' % (';'.join(map(str, modes)),))
+
+    def applicationKeypadMode(self):
+        self.write('\x1b=')
+
+    def numericKeypadMode(self):
+        self.write('\x1b>')
+
+    def selectCharacterSet(self, charSet, which):
+        # XXX Rewrite these as dict lookups
+        if which == G0:
+            which = '('
+        elif which == G1:
+            which = ')'
+        else:
+            raise ValueError("`which' argument to selectCharacterSet must be G0 or G1")
+        if charSet == CS_UK:
+            charSet = 'A'
+        elif charSet == CS_US:
+            charSet = 'B'
+        elif charSet == CS_DRAWING:
+            charSet = '0'
+        elif charSet == CS_ALTERNATE:
+            charSet = '1'
+        elif charSet == CS_ALTERNATE_SPECIAL:
+            charSet = '2'
+        else:
+            raise ValueError("Invalid `charSet' argument to selectCharacterSet")
+        self.write('\x1b' + which + charSet)
+
+    def shiftIn(self):
+        self.write('\x15')
+
+    def shiftOut(self):
+        self.write('\x14')
+
+    def singleShift2(self):
+        self.write('\x1bN')
+
+    def singleShift3(self):
+        self.write('\x1bO')
+
+    def selectGraphicRendition(self, *attributes):
+        attrs = []
+        for a in attributes:
+            attrs.append(a)
+        self.write('\x1b[%sm' % (';'.join(attrs),))
+
+    def horizontalTabulationSet(self):
+        self.write('\x1bH')
+
+    def tabulationClear(self):
+        self.write('\x1b[q')
+
+    def tabulationClearAll(self):
+        self.write('\x1b[3q')
+
+    def doubleHeightLine(self, top=True):
+        if top:
+            self.write('\x1b#3')
+        else:
+            self.write('\x1b#4')
+
+    def singleWidthLine(self):
+        self.write('\x1b#5')
+
+    def doubleWidthLine(self):
+        self.write('\x1b#6')
+
+    def eraseToLineEnd(self):
+        self.write('\x1b[K')
+
+    def eraseToLineBeginning(self):
+        self.write('\x1b[1K')
+
+    def eraseLine(self):
+        self.write('\x1b[2K')
+
+    def eraseToDisplayEnd(self):
+        self.write('\x1b[J')
+
+    def eraseToDisplayBeginning(self):
+        self.write('\x1b[1J')
+
+    def eraseDisplay(self):
+        self.write('\x1b[2J')
+
+    def deleteCharacter(self, n=1):
+        self.write('\x1b[%dP' % (n,))
+
+    def insertLine(self, n=1):
+        self.write('\x1b[%dL' % (n,))
+
+    def deleteLine(self, n=1):
+        self.write('\x1b[%dM' % (n,))
+
+    def setScrollRegion(self, first=None, last=None):
+        if first is not None:
+            first = '%d' % (first,)
+        else:
+            first = ''
+        if last is not None:
+            last = '%d' % (last,)
+        else:
+            last = ''
+        self.write('\x1b[%s;%sr' % (first, last))
+
+    def resetScrollRegion(self):
+        self.setScrollRegion()
+
+    def reportCursorPosition(self):
+        d = defer.Deferred()
+        self._cursorReports.append(d)
+        self.write('\x1b[6n')
+        return d
+
+    def reset(self):
+        self.cursorPos.x = self.cursorPos.y = 0
+        try:
+            del self._savedCursorPos
+        except AttributeError:
+            pass
+        self.write('\x1bc')
+
+    # ITransport
+    def write(self, bytes):
+        if bytes:
+            self.lastWrite = bytes
+            self.transport.write('\r\n'.join(bytes.split('\n')))
+
+    def writeSequence(self, bytes):
+        self.write(''.join(bytes))
+
+    def loseConnection(self):
+        self.reset()
+        self.transport.loseConnection()
+
+    def connectionLost(self, reason):
+        if self.terminalProtocol is not None:
+            try:
+                self.terminalProtocol.connectionLost(reason)
+            finally:
+                self.terminalProtocol = None
+# Add symbolic names for function keys
+for name, const in zip(_KEY_NAMES, FUNCTION_KEYS):
+    setattr(ServerProtocol, name, const)
+
+
+
+class ClientProtocol(protocol.Protocol):
+
+    terminalFactory = None
+    terminal = None
+
+    state = 'data'
+
+    _escBuf = None
+
+    _shorts = {
+        'D': 'index',
+        'M': 'reverseIndex',
+        'E': 'nextLine',
+        '7': 'saveCursor',
+        '8': 'restoreCursor',
+        '=': 'applicationKeypadMode',
+        '>': 'numericKeypadMode',
+        'N': 'singleShift2',
+        'O': 'singleShift3',
+        'H': 'horizontalTabulationSet',
+        'c': 'reset'}
+
+    _longs = {
+        '[': 'bracket-escape',
+        '(': 'select-g0',
+        ')': 'select-g1',
+        '#': 'select-height-width'}
+
+    _charsets = {
+        'A': CS_UK,
+        'B': CS_US,
+        '0': CS_DRAWING,
+        '1': CS_ALTERNATE,
+        '2': CS_ALTERNATE_SPECIAL}
+
+    # Factory who instantiated me
+    factory = None
+
+    def __init__(self, terminalFactory=None, *a, **kw):
+        """
+        @param terminalFactory: A callable which will be invoked with
+        *a, **kw and should return an ITerminalTransport provider.
+        This will be invoked when this ClientProtocol establishes a
+        connection.
+
+        @param a: Any positional arguments to pass to terminalFactory.
+        @param kw: Any keyword arguments to pass to terminalFactory.
+        """
+        # assert terminalFactory is None or ITerminalTransport.implementedBy(terminalFactory), "ClientProtocol.__init__ must be passed an ITerminalTransport implementor"
+        if terminalFactory is not None:
+            self.terminalFactory = terminalFactory
+        self.terminalArgs = a
+        self.terminalKwArgs = kw
+
+    def connectionMade(self):
+        if self.terminalFactory is not None:
+            self.terminal = self.terminalFactory(*self.terminalArgs, **self.terminalKwArgs)
+            self.terminal.factory = self.factory
+            self.terminal.makeConnection(self)
+
+    def connectionLost(self, reason):
+        if self.terminal is not None:
+            try:
+                self.terminal.connectionLost(reason)
+            finally:
+                del self.terminal
+
+    def dataReceived(self, bytes):
+        """
+        Parse the given data from a terminal server, dispatching to event
+        handlers defined by C{self.terminal}.
+        """
+        toWrite = []
+        for b in bytes:
+            if self.state == 'data':
+                if b == '\x1b':
+                    if toWrite:
+                        self.terminal.write(''.join(toWrite))
+                        del toWrite[:]
+                    self.state = 'escaped'
+                elif b == '\x14':
+                    if toWrite:
+                        self.terminal.write(''.join(toWrite))
+                        del toWrite[:]
+                    self.terminal.shiftOut()
+                elif b == '\x15':
+                    if toWrite:
+                        self.terminal.write(''.join(toWrite))
+                        del toWrite[:]
+                    self.terminal.shiftIn()
+                elif b == '\x08':
+                    if toWrite:
+                        self.terminal.write(''.join(toWrite))
+                        del toWrite[:]
+                    self.terminal.cursorBackward()
+                else:
+                    toWrite.append(b)
+            elif self.state == 'escaped':
+                fName = self._shorts.get(b)
+                if fName is not None:
+                    self.state = 'data'
+                    getattr(self.terminal, fName)()
+                else:
+                    state = self._longs.get(b)
+                    if state is not None:
+                        self.state = state
+                    else:
+                        self.terminal.unhandledControlSequence('\x1b' + b)
+                        self.state = 'data'
+            elif self.state == 'bracket-escape':
+                if self._escBuf is None:
+                    self._escBuf = []
+                if b.isalpha() or b == '~':
+                    self._handleControlSequence(''.join(self._escBuf), b)
+                    del self._escBuf
+                    self.state = 'data'
+                else:
+                    self._escBuf.append(b)
+            elif self.state == 'select-g0':
+                self.terminal.selectCharacterSet(self._charsets.get(b, b), G0)
+                self.state = 'data'
+            elif self.state == 'select-g1':
+                self.terminal.selectCharacterSet(self._charsets.get(b, b), G1)
+                self.state = 'data'
+            elif self.state == 'select-height-width':
+                self._handleHeightWidth(b)
+                self.state = 'data'
+            else:
+                raise ValueError("Illegal state")
+        if toWrite:
+            self.terminal.write(''.join(toWrite))
+
+
+    def _handleControlSequence(self, buf, terminal):
+        f = getattr(self.controlSequenceParser, CST.get(terminal, terminal), None)
+        if f is None:
+            self.terminal.unhandledControlSequence('\x1b[' + buf + terminal)
+        else:
+            f(self, self.terminal, buf)
+
+    class ControlSequenceParser:
+        def _makeSimple(ch, fName):
+            n = 'cursor' + fName
+            def simple(self, proto, handler, buf):
+                if not buf:
+                    getattr(handler, n)(1)
+                else:
+                    try:
+                        m = int(buf)
+                    except ValueError:
+                        handler.unhandledControlSequence('\x1b[' + buf + ch)
+                    else:
+                        getattr(handler, n)(m)
+            return simple
+        for (ch, fName) in (('A', 'Up'),
+                            ('B', 'Down'),
+                            ('C', 'Forward'),
+                            ('D', 'Backward')):
+            exec ch + " = _makeSimple(ch, fName)"
+        del _makeSimple
+
+        def h(self, proto, handler, buf):
+            # XXX - Handle '?' to introduce ANSI-Compatible private modes.
+            try:
+                modes = map(int, buf.split(';'))
+            except ValueError:
+                handler.unhandledControlSequence('\x1b[' + buf + 'h')
+            else:
+                handler.setModes(modes)
+
+        def l(self, proto, handler, buf):
+            # XXX - Handle '?' to introduce ANSI-Compatible private modes.
+            try:
+                modes = map(int, buf.split(';'))
+            except ValueError:
+                handler.unhandledControlSequence('\x1b[' + buf + 'l')
+            else:
+                handler.resetModes(modes)
+
+        def r(self, proto, handler, buf):
+            parts = buf.split(';')
+            if len(parts) == 1:
+                handler.setScrollRegion(None, None)
+            elif len(parts) == 2:
+                try:
+                    if parts[0]:
+                        pt = int(parts[0])
+                    else:
+                        pt = None
+                    if parts[1]:
+                        pb = int(parts[1])
+                    else:
+                        pb = None
+                except ValueError:
+                    handler.unhandledControlSequence('\x1b[' + buf + 'r')
+                else:
+                    handler.setScrollRegion(pt, pb)
+            else:
+                handler.unhandledControlSequence('\x1b[' + buf + 'r')
+
+        def K(self, proto, handler, buf):
+            if not buf:
+                handler.eraseToLineEnd()
+            elif buf == '1':
+                handler.eraseToLineBeginning()
+            elif buf == '2':
+                handler.eraseLine()
+            else:
+                handler.unhandledControlSequence('\x1b[' + buf + 'K')
+
+        def H(self, proto, handler, buf):
+            handler.cursorHome()
+
+        def J(self, proto, handler, buf):
+            if not buf:
+                handler.eraseToDisplayEnd()
+            elif buf == '1':
+                handler.eraseToDisplayBeginning()
+            elif buf == '2':
+                handler.eraseDisplay()
+            else:
+                handler.unhandledControlSequence('\x1b[' + buf + 'J')
+
+        def P(self, proto, handler, buf):
+            if not buf:
+                handler.deleteCharacter(1)
+            else:
+                try:
+                    n = int(buf)
+                except ValueError:
+                    handler.unhandledControlSequence('\x1b[' + buf + 'P')
+                else:
+                    handler.deleteCharacter(n)
+
+        def L(self, proto, handler, buf):
+            if not buf:
+                handler.insertLine(1)
+            else:
+                try:
+                    n = int(buf)
+                except ValueError:
+                    handler.unhandledControlSequence('\x1b[' + buf + 'L')
+                else:
+                    handler.insertLine(n)
+
+        def M(self, proto, handler, buf):
+            if not buf:
+                handler.deleteLine(1)
+            else:
+                try:
+                    n = int(buf)
+                except ValueError:
+                    handler.unhandledControlSequence('\x1b[' + buf + 'M')
+                else:
+                    handler.deleteLine(n)
+
+        def n(self, proto, handler, buf):
+            if buf == '6':
+                x, y = handler.reportCursorPosition()
+                proto.transport.write('\x1b[%d;%dR' % (x + 1, y + 1))
+            else:
+                handler.unhandledControlSequence('\x1b[' + buf + 'n')
+
+        def m(self, proto, handler, buf):
+            if not buf:
+                handler.selectGraphicRendition(NORMAL)
+            else:
+                attrs = []
+                for a in buf.split(';'):
+                    try:
+                        a = int(a)
+                    except ValueError:
+                        pass
+                    attrs.append(a)
+                handler.selectGraphicRendition(*attrs)
+
+    controlSequenceParser = ControlSequenceParser()
+
+    def _handleHeightWidth(self, b):
+        if b == '3':
+            self.terminal.doubleHeightLine(True)
+        elif b == '4':
+            self.terminal.doubleHeightLine(False)
+        elif b == '5':
+            self.terminal.singleWidthLine()
+        elif b == '6':
+            self.terminal.doubleWidthLine()
+        else:
+            self.terminal.unhandledControlSequence('\x1b#' + b)
+
+
+__all__ = [
+    # Interfaces
+    'ITerminalProtocol', 'ITerminalTransport',
+
+    # Symbolic constants
+    'modes', 'privateModes', 'FUNCTION_KEYS',
+
+    'CS_US', 'CS_UK', 'CS_DRAWING', 'CS_ALTERNATE', 'CS_ALTERNATE_SPECIAL',
+    'G0', 'G1', 'G2', 'G3',
+
+    'UNDERLINE', 'REVERSE_VIDEO', 'BLINK', 'BOLD', 'NORMAL',
+
+    # Protocol classes
+    'ServerProtocol', 'ClientProtocol']
diff --git a/ThirdParty/Twisted/twisted/conch/insults/text.py b/ThirdParty/Twisted/twisted/conch/insults/text.py
new file mode 100644
index 0000000..e5c8fd1
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/insults/text.py
@@ -0,0 +1,186 @@
+# -*- test-case-name: twisted.conch.test.test_text -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Character attribute manipulation API
+
+This module provides a domain-specific language (using Python syntax)
+for the creation of text with additional display attributes associated
+with it.  It is intended as an alternative to manually building up
+strings containing ECMA 48 character attribute control codes.  It
+currently supports foreground and background colors (black, red,
+green, yellow, blue, magenta, cyan, and white), intensity selection,
+underlining, blinking and reverse video.  Character set selection
+support is planned.
+
+Character attributes are specified by using two Python operations:
+attribute lookup and indexing.  For example, the string \"Hello
+world\" with red foreground and all other attributes set to their
+defaults, assuming the name twisted.conch.insults.text.attributes has
+been imported and bound to the name \"A\" (with the statement C{from
+twisted.conch.insults.text import attributes as A}, for example) one
+uses this expression::
+
+ | A.fg.red[\"Hello world\"]
+
+Other foreground colors are set by substituting their name for
+\"red\".  To set both a foreground and a background color, this
+expression is used::
+
+ | A.fg.red[A.bg.green[\"Hello world\"]]
+
+Note that either A.bg.green can be nested within A.fg.red or vice
+versa.  Also note that multiple items can be nested within a single
+index operation by separating them with commas::
+
+ | A.bg.green[A.fg.red[\"Hello\"], " ", A.fg.blue[\"world\"]]
+
+Other character attributes are set in a similar fashion.  To specify a
+blinking version of the previous expression::
+
+ | A.blink[A.bg.green[A.fg.red[\"Hello\"], " ", A.fg.blue[\"world\"]]]
+
+C{A.reverseVideo}, C{A.underline}, and C{A.bold} are also valid.
+
+A third operation is actually supported: unary negation.  This turns
+off an attribute when an enclosing expression would otherwise have
+caused it to be on.  For example::
+
+ | A.underline[A.fg.red[\"Hello\", -A.underline[\" world\"]]]
+
+ at author: Jp Calderone
+"""
+
+from twisted.conch.insults import helper, insults
+
+class _Attribute(object):
+    def __init__(self):
+        self.children = []
+
+    def __getitem__(self, item):
+        assert isinstance(item, (list, tuple, _Attribute, str))
+        if isinstance(item, (list, tuple)):
+            self.children.extend(item)
+        else:
+            self.children.append(item)
+        return self
+
+    def serialize(self, write, attrs=None):
+        if attrs is None:
+            attrs = helper.CharacterAttribute()
+        for ch in self.children:
+            if isinstance(ch, _Attribute):
+                ch.serialize(write, attrs.copy())
+            else:
+                write(attrs.toVT102())
+                write(ch)
+
+class _NormalAttr(_Attribute):
+    def serialize(self, write, attrs):
+        attrs.__init__()
+        super(_NormalAttr, self).serialize(write, attrs)
+
+class _OtherAttr(_Attribute):
+    def __init__(self, attrname, attrvalue):
+        self.attrname = attrname
+        self.attrvalue = attrvalue
+        self.children = []
+
+    def __neg__(self):
+        result = _OtherAttr(self.attrname, not self.attrvalue)
+        result.children.extend(self.children)
+        return result
+
+    def serialize(self, write, attrs):
+        attrs = attrs.wantOne(**{self.attrname: self.attrvalue})
+        super(_OtherAttr, self).serialize(write, attrs)
+
+class _ColorAttr(_Attribute):
+    def __init__(self, color, ground):
+        self.color = color
+        self.ground = ground
+        self.children = []
+
+    def serialize(self, write, attrs):
+        attrs = attrs.wantOne(**{self.ground: self.color})
+        super(_ColorAttr, self).serialize(write, attrs)
+
+class _ForegroundColorAttr(_ColorAttr):
+    def __init__(self, color):
+        super(_ForegroundColorAttr, self).__init__(color, 'foreground')
+
+class _BackgroundColorAttr(_ColorAttr):
+    def __init__(self, color):
+        super(_BackgroundColorAttr, self).__init__(color, 'background')
+
+class CharacterAttributes(object):
+    class _ColorAttribute(object):
+        def __init__(self, ground):
+            self.ground = ground
+
+        attrs = {
+            'black': helper.BLACK,
+            'red': helper.RED,
+            'green': helper.GREEN,
+            'yellow': helper.YELLOW,
+            'blue': helper.BLUE,
+            'magenta': helper.MAGENTA,
+            'cyan': helper.CYAN,
+            'white': helper.WHITE}
+
+        def __getattr__(self, name):
+            try:
+                return self.ground(self.attrs[name])
+            except KeyError:
+                raise AttributeError(name)
+
+    fg = _ColorAttribute(_ForegroundColorAttr)
+    bg = _ColorAttribute(_BackgroundColorAttr)
+
+    attrs = {
+        'bold': insults.BOLD,
+        'blink': insults.BLINK,
+        'underline': insults.UNDERLINE,
+        'reverseVideo': insults.REVERSE_VIDEO}
+
+    def __getattr__(self, name):
+        if name == 'normal':
+            return _NormalAttr()
+        if name in self.attrs:
+            return _OtherAttr(name, True)
+        raise AttributeError(name)
+
+def flatten(output, attrs):
+    """Serialize a sequence of characters with attribute information
+
+    The resulting string can be interpreted by VT102-compatible
+    terminals so that the contained characters are displayed and, for
+    those attributes which the terminal supports, have the attributes
+    specified in the input.
+
+    For example, if your terminal is VT102 compatible, you might run
+    this for a colorful variation on the \"hello world\" theme::
+
+     | from twisted.conch.insults.text import flatten, attributes as A
+     | from twisted.conch.insults.helper import CharacterAttribute
+     | print flatten(
+     |     A.normal[A.bold[A.fg.red['He'], A.fg.green['ll'], A.fg.magenta['o'], ' ',
+     |                     A.fg.yellow['Wo'], A.fg.blue['rl'], A.fg.cyan['d!']]],
+     |     CharacterAttribute())
+
+    @param output: Object returned by accessing attributes of the
+    module-level attributes object.
+
+    @param attrs: A L{twisted.conch.insults.helper.CharacterAttribute}
+    instance
+
+    @return: A VT102-friendly string
+    """
+    L = []
+    output.serialize(L.append, attrs)
+    return ''.join(L)
+
+attributes = CharacterAttributes()
+
+__all__ = ['attributes', 'flatten']
diff --git a/ThirdParty/Twisted/twisted/conch/insults/window.py b/ThirdParty/Twisted/twisted/conch/insults/window.py
new file mode 100644
index 0000000..9901327
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/insults/window.py
@@ -0,0 +1,868 @@
+# -*- test-case-name: twisted.conch.test.test_window -*-
+
+"""
+Simple insults-based widget library
+
+ at author: Jp Calderone
+"""
+
+import array
+
+from twisted.conch.insults import insults, helper
+from twisted.python import text as tptext
+
+class YieldFocus(Exception):
+    """Input focus manipulation exception
+    """
+
+class BoundedTerminalWrapper(object):
+    def __init__(self, terminal, width, height, xoff, yoff):
+        self.width = width
+        self.height = height
+        self.xoff = xoff
+        self.yoff = yoff
+        self.terminal = terminal
+        self.cursorForward = terminal.cursorForward
+        self.selectCharacterSet = terminal.selectCharacterSet
+        self.selectGraphicRendition = terminal.selectGraphicRendition
+        self.saveCursor = terminal.saveCursor
+        self.restoreCursor = terminal.restoreCursor
+
+    def cursorPosition(self, x, y):
+        return self.terminal.cursorPosition(
+            self.xoff + min(self.width, x),
+            self.yoff + min(self.height, y)
+            )
+
+    def cursorHome(self):
+        return self.terminal.cursorPosition(
+            self.xoff, self.yoff)
+
+    def write(self, bytes):
+        return self.terminal.write(bytes)
+
+class Widget(object):
+    focused = False
+    parent = None
+    dirty = False
+    width = height = None
+
+    def repaint(self):
+        if not self.dirty:
+            self.dirty = True
+        if self.parent is not None and not self.parent.dirty:
+            self.parent.repaint()
+
+    def filthy(self):
+        self.dirty = True
+
+    def redraw(self, width, height, terminal):
+        self.filthy()
+        self.draw(width, height, terminal)
+
+    def draw(self, width, height, terminal):
+        if width != self.width or height != self.height or self.dirty:
+            self.width = width
+            self.height = height
+            self.dirty = False
+            self.render(width, height, terminal)
+
+    def render(self, width, height, terminal):
+        pass
+
+    def sizeHint(self):
+        return None
+
+    def keystrokeReceived(self, keyID, modifier):
+        if keyID == '\t':
+            self.tabReceived(modifier)
+        elif keyID == '\x7f':
+            self.backspaceReceived()
+        elif keyID in insults.FUNCTION_KEYS:
+            self.functionKeyReceived(keyID, modifier)
+        else:
+            self.characterReceived(keyID, modifier)
+
+    def tabReceived(self, modifier):
+        # XXX TODO - Handle shift+tab
+        raise YieldFocus()
+
+    def focusReceived(self):
+        """Called when focus is being given to this widget.
+
+        May raise YieldFocus is this widget does not want focus.
+        """
+        self.focused = True
+        self.repaint()
+
+    def focusLost(self):
+        self.focused = False
+        self.repaint()
+
+    def backspaceReceived(self):
+        pass
+
+    def functionKeyReceived(self, keyID, modifier):
+        func = getattr(self, 'func_' + keyID.name, None)
+        if func is not None:
+            func(modifier)
+
+    def characterReceived(self, keyID, modifier):
+        pass
+
+class ContainerWidget(Widget):
+    """
+    @ivar focusedChild: The contained widget which currently has
+    focus, or None.
+    """
+    focusedChild = None
+    focused = False
+
+    def __init__(self):
+        Widget.__init__(self)
+        self.children = []
+
+    def addChild(self, child):
+        assert child.parent is None
+        child.parent = self
+        self.children.append(child)
+        if self.focusedChild is None and self.focused:
+            try:
+                child.focusReceived()
+            except YieldFocus:
+                pass
+            else:
+                self.focusedChild = child
+        self.repaint()
+
+    def remChild(self, child):
+        assert child.parent is self
+        child.parent = None
+        self.children.remove(child)
+        self.repaint()
+
+    def filthy(self):
+        for ch in self.children:
+            ch.filthy()
+        Widget.filthy(self)
+
+    def render(self, width, height, terminal):
+        for ch in self.children:
+            ch.draw(width, height, terminal)
+
+    def changeFocus(self):
+        self.repaint()
+
+        if self.focusedChild is not None:
+            self.focusedChild.focusLost()
+            focusedChild = self.focusedChild
+            self.focusedChild = None
+            try:
+                curFocus = self.children.index(focusedChild) + 1
+            except ValueError:
+                raise YieldFocus()
+        else:
+            curFocus = 0
+        while curFocus < len(self.children):
+            try:
+                self.children[curFocus].focusReceived()
+            except YieldFocus:
+                curFocus += 1
+            else:
+                self.focusedChild = self.children[curFocus]
+                return
+        # None of our children wanted focus
+        raise YieldFocus()
+
+
+    def focusReceived(self):
+        self.changeFocus()
+        self.focused = True
+
+
+    def keystrokeReceived(self, keyID, modifier):
+        if self.focusedChild is not None:
+            try:
+                self.focusedChild.keystrokeReceived(keyID, modifier)
+            except YieldFocus:
+                self.changeFocus()
+                self.repaint()
+        else:
+            Widget.keystrokeReceived(self, keyID, modifier)
+
+
+class TopWindow(ContainerWidget):
+    """
+    A top-level container object which provides focus wrap-around and paint
+    scheduling.
+
+    @ivar painter: A no-argument callable which will be invoked when this
+    widget needs to be redrawn.
+
+    @ivar scheduler: A one-argument callable which will be invoked with a
+    no-argument callable and should arrange for it to invoked at some point in
+    the near future.  The no-argument callable will cause this widget and all
+    its children to be redrawn.  It is typically beneficial for the no-argument
+    callable to be invoked at the end of handling for whatever event is
+    currently active; for example, it might make sense to call it at the end of
+    L{twisted.conch.insults.insults.ITerminalProtocol.keystrokeReceived}.
+    Note, however, that since calls to this may also be made in response to no
+    apparent event, arrangements should be made for the function to be called
+    even if an event handler such as C{keystrokeReceived} is not on the call
+    stack (eg, using C{reactor.callLater} with a short timeout).
+    """
+    focused = True
+
+    def __init__(self, painter, scheduler):
+        ContainerWidget.__init__(self)
+        self.painter = painter
+        self.scheduler = scheduler
+
+    _paintCall = None
+    def repaint(self):
+        if self._paintCall is None:
+            self._paintCall = object()
+            self.scheduler(self._paint)
+        ContainerWidget.repaint(self)
+
+    def _paint(self):
+        self._paintCall = None
+        self.painter()
+
+    def changeFocus(self):
+        try:
+            ContainerWidget.changeFocus(self)
+        except YieldFocus:
+            try:
+                ContainerWidget.changeFocus(self)
+            except YieldFocus:
+                pass
+
+    def keystrokeReceived(self, keyID, modifier):
+        try:
+            ContainerWidget.keystrokeReceived(self, keyID, modifier)
+        except YieldFocus:
+            self.changeFocus()
+
+
+class AbsoluteBox(ContainerWidget):
+    def moveChild(self, child, x, y):
+        for n in range(len(self.children)):
+            if self.children[n][0] is child:
+                self.children[n] = (child, x, y)
+                break
+        else:
+            raise ValueError("No such child", child)
+
+    def render(self, width, height, terminal):
+        for (ch, x, y) in self.children:
+            wrap = BoundedTerminalWrapper(terminal, width - x, height - y, x, y)
+            ch.draw(width, height, wrap)
+
+
+class _Box(ContainerWidget):
+    TOP, CENTER, BOTTOM = range(3)
+
+    def __init__(self, gravity=CENTER):
+        ContainerWidget.__init__(self)
+        self.gravity = gravity
+
+    def sizeHint(self):
+        height = 0
+        width = 0
+        for ch in self.children:
+            hint = ch.sizeHint()
+            if hint is None:
+                hint = (None, None)
+
+            if self.variableDimension == 0:
+                if hint[0] is None:
+                    width = None
+                elif width is not None:
+                    width += hint[0]
+                if hint[1] is None:
+                    height = None
+                elif height is not None:
+                    height = max(height, hint[1])
+            else:
+                if hint[0] is None:
+                    width = None
+                elif width is not None:
+                    width = max(width, hint[0])
+                if hint[1] is None:
+                    height = None
+                elif height is not None:
+                    height += hint[1]
+
+        return width, height
+
+
+    def render(self, width, height, terminal):
+        if not self.children:
+            return
+
+        greedy = 0
+        wants = []
+        for ch in self.children:
+            hint = ch.sizeHint()
+            if hint is None:
+                hint = (None, None)
+            if hint[self.variableDimension] is None:
+                greedy += 1
+            wants.append(hint[self.variableDimension])
+
+        length = (width, height)[self.variableDimension]
+        totalWant = sum([w for w in wants if w is not None])
+        if greedy:
+            leftForGreedy = int((length - totalWant) / greedy)
+
+        widthOffset = heightOffset = 0
+
+        for want, ch in zip(wants, self.children):
+            if want is None:
+                want = leftForGreedy
+
+            subWidth, subHeight = width, height
+            if self.variableDimension == 0:
+                subWidth = want
+            else:
+                subHeight = want
+
+            wrap = BoundedTerminalWrapper(
+                terminal,
+                subWidth,
+                subHeight,
+                widthOffset,
+                heightOffset,
+                )
+            ch.draw(subWidth, subHeight, wrap)
+            if self.variableDimension == 0:
+                widthOffset += want
+            else:
+                heightOffset += want
+
+
+class HBox(_Box):
+    variableDimension = 0
+
+class VBox(_Box):
+    variableDimension = 1
+
+
+class Packer(ContainerWidget):
+    def render(self, width, height, terminal):
+        if not self.children:
+            return
+
+        root = int(len(self.children) ** 0.5 + 0.5)
+        boxes = [VBox() for n in range(root)]
+        for n, ch in enumerate(self.children):
+            boxes[n % len(boxes)].addChild(ch)
+        h = HBox()
+        map(h.addChild, boxes)
+        h.render(width, height, terminal)
+
+
+class Canvas(Widget):
+    focused = False
+
+    contents = None
+
+    def __init__(self):
+        Widget.__init__(self)
+        self.resize(1, 1)
+
+    def resize(self, width, height):
+        contents = array.array('c', ' ' * width * height)
+        if self.contents is not None:
+            for x in range(min(width, self._width)):
+                for y in range(min(height, self._height)):
+                    contents[width * y + x] = self[x, y]
+        self.contents = contents
+        self._width = width
+        self._height = height
+        if self.x >= width:
+            self.x = width - 1
+        if self.y >= height:
+            self.y = height - 1
+
+    def __getitem__(self, (x, y)):
+        return self.contents[(self._width * y) + x]
+
+    def __setitem__(self, (x, y), value):
+        self.contents[(self._width * y) + x] = value
+
+    def clear(self):
+        self.contents = array.array('c', ' ' * len(self.contents))
+
+    def render(self, width, height, terminal):
+        if not width or not height:
+            return
+
+        if width != self._width or height != self._height:
+            self.resize(width, height)
+        for i in range(height):
+            terminal.cursorPosition(0, i)
+            terminal.write(''.join(self.contents[self._width * i:self._width * i + self._width])[:width])
+
+
+def horizontalLine(terminal, y, left, right):
+    terminal.selectCharacterSet(insults.CS_DRAWING, insults.G0)
+    terminal.cursorPosition(left, y)
+    terminal.write(chr(0161) * (right - left))
+    terminal.selectCharacterSet(insults.CS_US, insults.G0)
+
+def verticalLine(terminal, x, top, bottom):
+    terminal.selectCharacterSet(insults.CS_DRAWING, insults.G0)
+    for n in xrange(top, bottom):
+        terminal.cursorPosition(x, n)
+        terminal.write(chr(0170))
+    terminal.selectCharacterSet(insults.CS_US, insults.G0)
+
+
+def rectangle(terminal, (top, left), (width, height)):
+    terminal.selectCharacterSet(insults.CS_DRAWING, insults.G0)
+
+    terminal.cursorPosition(top, left)
+    terminal.write(chr(0154))
+    terminal.write(chr(0161) * (width - 2))
+    terminal.write(chr(0153))
+    for n in range(height - 2):
+        terminal.cursorPosition(left, top + n + 1)
+        terminal.write(chr(0170))
+        terminal.cursorForward(width - 2)
+        terminal.write(chr(0170))
+    terminal.cursorPosition(0, top + height - 1)
+    terminal.write(chr(0155))
+    terminal.write(chr(0161) * (width - 2))
+    terminal.write(chr(0152))
+
+    terminal.selectCharacterSet(insults.CS_US, insults.G0)
+
+class Border(Widget):
+    def __init__(self, containee):
+        Widget.__init__(self)
+        self.containee = containee
+        self.containee.parent = self
+
+    def focusReceived(self):
+        return self.containee.focusReceived()
+
+    def focusLost(self):
+        return self.containee.focusLost()
+
+    def keystrokeReceived(self, keyID, modifier):
+        return self.containee.keystrokeReceived(keyID, modifier)
+
+    def sizeHint(self):
+        hint = self.containee.sizeHint()
+        if hint is None:
+            hint = (None, None)
+        if hint[0] is None:
+            x = None
+        else:
+            x = hint[0] + 2
+        if hint[1] is None:
+            y = None
+        else:
+            y = hint[1] + 2
+        return x, y
+
+    def filthy(self):
+        self.containee.filthy()
+        Widget.filthy(self)
+
+    def render(self, width, height, terminal):
+        if self.containee.focused:
+            terminal.write('\x1b[31m')
+        rectangle(terminal, (0, 0), (width, height))
+        terminal.write('\x1b[0m')
+        wrap = BoundedTerminalWrapper(terminal, width - 2, height - 2, 1, 1)
+        self.containee.draw(width - 2, height - 2, wrap)
+
+
+class Button(Widget):
+    def __init__(self, label, onPress):
+        Widget.__init__(self)
+        self.label = label
+        self.onPress = onPress
+
+    def sizeHint(self):
+        return len(self.label), 1
+
+    def characterReceived(self, keyID, modifier):
+        if keyID == '\r':
+            self.onPress()
+
+    def render(self, width, height, terminal):
+        terminal.cursorPosition(0, 0)
+        if self.focused:
+            terminal.write('\x1b[1m' + self.label + '\x1b[0m')
+        else:
+            terminal.write(self.label)
+
+class TextInput(Widget):
+    def __init__(self, maxwidth, onSubmit):
+        Widget.__init__(self)
+        self.onSubmit = onSubmit
+        self.maxwidth = maxwidth
+        self.buffer = ''
+        self.cursor = 0
+
+    def setText(self, text):
+        self.buffer = text[:self.maxwidth]
+        self.cursor = len(self.buffer)
+        self.repaint()
+
+    def func_LEFT_ARROW(self, modifier):
+        if self.cursor > 0:
+            self.cursor -= 1
+            self.repaint()
+
+    def func_RIGHT_ARROW(self, modifier):
+        if self.cursor < len(self.buffer):
+            self.cursor += 1
+            self.repaint()
+
+    def backspaceReceived(self):
+        if self.cursor > 0:
+            self.buffer = self.buffer[:self.cursor - 1] + self.buffer[self.cursor:]
+            self.cursor -= 1
+            self.repaint()
+
+    def characterReceived(self, keyID, modifier):
+        if keyID == '\r':
+            self.onSubmit(self.buffer)
+        else:
+            if len(self.buffer) < self.maxwidth:
+                self.buffer = self.buffer[:self.cursor] + keyID + self.buffer[self.cursor:]
+                self.cursor += 1
+                self.repaint()
+
+    def sizeHint(self):
+        return self.maxwidth + 1, 1
+
+    def render(self, width, height, terminal):
+        currentText = self._renderText()
+        terminal.cursorPosition(0, 0)
+        if self.focused:
+            terminal.write(currentText[:self.cursor])
+            cursor(terminal, currentText[self.cursor:self.cursor+1] or ' ')
+            terminal.write(currentText[self.cursor+1:])
+            terminal.write(' ' * (self.maxwidth - len(currentText) + 1))
+        else:
+            more = self.maxwidth - len(currentText)
+            terminal.write(currentText + '_' * more)
+
+    def _renderText(self):
+        return self.buffer
+
+class PasswordInput(TextInput):
+    def _renderText(self):
+        return '*' * len(self.buffer)
+
+class TextOutput(Widget):
+    text = ''
+
+    def __init__(self, size=None):
+        Widget.__init__(self)
+        self.size = size
+
+    def sizeHint(self):
+        return self.size
+
+    def render(self, width, height, terminal):
+        terminal.cursorPosition(0, 0)
+        text = self.text[:width]
+        terminal.write(text + ' ' * (width - len(text)))
+
+    def setText(self, text):
+        self.text = text
+        self.repaint()
+
+    def focusReceived(self):
+        raise YieldFocus()
+
+class TextOutputArea(TextOutput):
+    WRAP, TRUNCATE = range(2)
+
+    def __init__(self, size=None, longLines=WRAP):
+        TextOutput.__init__(self, size)
+        self.longLines = longLines
+
+    def render(self, width, height, terminal):
+        n = 0
+        inputLines = self.text.splitlines()
+        outputLines = []
+        while inputLines:
+            if self.longLines == self.WRAP:
+                wrappedLines = tptext.greedyWrap(inputLines.pop(0), width)
+                outputLines.extend(wrappedLines or [''])
+            else:
+                outputLines.append(inputLines.pop(0)[:width])
+            if len(outputLines) >= height:
+                break
+        for n, L in enumerate(outputLines[:height]):
+            terminal.cursorPosition(0, n)
+            terminal.write(L)
+
+class Viewport(Widget):
+    _xOffset = 0
+    _yOffset = 0
+
+    def xOffset():
+        def get(self):
+            return self._xOffset
+        def set(self, value):
+            if self._xOffset != value:
+                self._xOffset = value
+                self.repaint()
+        return get, set
+    xOffset = property(*xOffset())
+
+    def yOffset():
+        def get(self):
+            return self._yOffset
+        def set(self, value):
+            if self._yOffset != value:
+                self._yOffset = value
+                self.repaint()
+        return get, set
+    yOffset = property(*yOffset())
+
+    _width = 160
+    _height = 24
+
+    def __init__(self, containee):
+        Widget.__init__(self)
+        self.containee = containee
+        self.containee.parent = self
+
+        self._buf = helper.TerminalBuffer()
+        self._buf.width = self._width
+        self._buf.height = self._height
+        self._buf.connectionMade()
+
+    def filthy(self):
+        self.containee.filthy()
+        Widget.filthy(self)
+
+    def render(self, width, height, terminal):
+        self.containee.draw(self._width, self._height, self._buf)
+
+        # XXX /Lame/
+        for y, line in enumerate(self._buf.lines[self._yOffset:self._yOffset + height]):
+            terminal.cursorPosition(0, y)
+            n = 0
+            for n, (ch, attr) in enumerate(line[self._xOffset:self._xOffset + width]):
+                if ch is self._buf.void:
+                    ch = ' '
+                terminal.write(ch)
+            if n < width:
+                terminal.write(' ' * (width - n - 1))
+
+
+class _Scrollbar(Widget):
+    def __init__(self, onScroll):
+        Widget.__init__(self)
+        self.onScroll = onScroll
+        self.percent = 0.0
+
+    def smaller(self):
+        self.percent = min(1.0, max(0.0, self.onScroll(-1)))
+        self.repaint()
+
+    def bigger(self):
+        self.percent = min(1.0, max(0.0, self.onScroll(+1)))
+        self.repaint()
+
+
+class HorizontalScrollbar(_Scrollbar):
+    def sizeHint(self):
+        return (None, 1)
+
+    def func_LEFT_ARROW(self, modifier):
+        self.smaller()
+
+    def func_RIGHT_ARROW(self, modifier):
+        self.bigger()
+
+    _left = u'\N{BLACK LEFT-POINTING TRIANGLE}'
+    _right = u'\N{BLACK RIGHT-POINTING TRIANGLE}'
+    _bar = u'\N{LIGHT SHADE}'
+    _slider = u'\N{DARK SHADE}'
+    def render(self, width, height, terminal):
+        terminal.cursorPosition(0, 0)
+        n = width - 3
+        before = int(n * self.percent)
+        after = n - before
+        me = self._left + (self._bar * before) + self._slider + (self._bar * after) + self._right
+        terminal.write(me.encode('utf-8'))
+
+
+class VerticalScrollbar(_Scrollbar):
+    def sizeHint(self):
+        return (1, None)
+
+    def func_UP_ARROW(self, modifier):
+        self.smaller()
+
+    def func_DOWN_ARROW(self, modifier):
+        self.bigger()
+
+    _up = u'\N{BLACK UP-POINTING TRIANGLE}'
+    _down = u'\N{BLACK DOWN-POINTING TRIANGLE}'
+    _bar = u'\N{LIGHT SHADE}'
+    _slider = u'\N{DARK SHADE}'
+    def render(self, width, height, terminal):
+        terminal.cursorPosition(0, 0)
+        knob = int(self.percent * (height - 2))
+        terminal.write(self._up.encode('utf-8'))
+        for i in xrange(1, height - 1):
+            terminal.cursorPosition(0, i)
+            if i != (knob + 1):
+                terminal.write(self._bar.encode('utf-8'))
+            else:
+                terminal.write(self._slider.encode('utf-8'))
+        terminal.cursorPosition(0, height - 1)
+        terminal.write(self._down.encode('utf-8'))
+
+
+class ScrolledArea(Widget):
+    """
+    A L{ScrolledArea} contains another widget wrapped in a viewport and
+    vertical and horizontal scrollbars for moving the viewport around.
+    """
+    def __init__(self, containee):
+        Widget.__init__(self)
+        self._viewport = Viewport(containee)
+        self._horiz = HorizontalScrollbar(self._horizScroll)
+        self._vert = VerticalScrollbar(self._vertScroll)
+
+        for w in self._viewport, self._horiz, self._vert:
+            w.parent = self
+
+    def _horizScroll(self, n):
+        self._viewport.xOffset += n
+        self._viewport.xOffset = max(0, self._viewport.xOffset)
+        return self._viewport.xOffset / 25.0
+
+    def _vertScroll(self, n):
+        self._viewport.yOffset += n
+        self._viewport.yOffset = max(0, self._viewport.yOffset)
+        return self._viewport.yOffset / 25.0
+
+    def func_UP_ARROW(self, modifier):
+        self._vert.smaller()
+
+    def func_DOWN_ARROW(self, modifier):
+        self._vert.bigger()
+
+    def func_LEFT_ARROW(self, modifier):
+        self._horiz.smaller()
+
+    def func_RIGHT_ARROW(self, modifier):
+        self._horiz.bigger()
+
+    def filthy(self):
+        self._viewport.filthy()
+        self._horiz.filthy()
+        self._vert.filthy()
+        Widget.filthy(self)
+
+    def render(self, width, height, terminal):
+        wrapper = BoundedTerminalWrapper(terminal, width - 2, height - 2, 1, 1)
+        self._viewport.draw(width - 2, height - 2, wrapper)
+        if self.focused:
+            terminal.write('\x1b[31m')
+        horizontalLine(terminal, 0, 1, width - 1)
+        verticalLine(terminal, 0, 1, height - 1)
+        self._vert.draw(1, height - 1, BoundedTerminalWrapper(terminal, 1, height - 1, width - 1, 0))
+        self._horiz.draw(width, 1, BoundedTerminalWrapper(terminal, width, 1, 0, height - 1))
+        terminal.write('\x1b[0m')
+
+def cursor(terminal, ch):
+    terminal.saveCursor()
+    terminal.selectGraphicRendition(str(insults.REVERSE_VIDEO))
+    terminal.write(ch)
+    terminal.restoreCursor()
+    terminal.cursorForward()
+
+class Selection(Widget):
+    # Index into the sequence
+    focusedIndex = 0
+
+    # Offset into the displayed subset of the sequence
+    renderOffset = 0
+
+    def __init__(self, sequence, onSelect, minVisible=None):
+        Widget.__init__(self)
+        self.sequence = sequence
+        self.onSelect = onSelect
+        self.minVisible = minVisible
+        if minVisible is not None:
+            self._width = max(map(len, self.sequence))
+
+    def sizeHint(self):
+        if self.minVisible is not None:
+            return self._width, self.minVisible
+
+    def func_UP_ARROW(self, modifier):
+        if self.focusedIndex > 0:
+            self.focusedIndex -= 1
+            if self.renderOffset > 0:
+                self.renderOffset -= 1
+            self.repaint()
+
+    def func_PGUP(self, modifier):
+        if self.renderOffset != 0:
+            self.focusedIndex -= self.renderOffset
+            self.renderOffset = 0
+        else:
+            self.focusedIndex = max(0, self.focusedIndex - self.height)
+        self.repaint()
+
+    def func_DOWN_ARROW(self, modifier):
+        if self.focusedIndex < len(self.sequence) - 1:
+            self.focusedIndex += 1
+            if self.renderOffset < self.height - 1:
+                self.renderOffset += 1
+            self.repaint()
+
+
+    def func_PGDN(self, modifier):
+        if self.renderOffset != self.height - 1:
+            change = self.height - self.renderOffset - 1
+            if change + self.focusedIndex >= len(self.sequence):
+                change = len(self.sequence) - self.focusedIndex - 1
+            self.focusedIndex += change
+            self.renderOffset = self.height - 1
+        else:
+            self.focusedIndex = min(len(self.sequence) - 1, self.focusedIndex + self.height)
+        self.repaint()
+
+    def characterReceived(self, keyID, modifier):
+        if keyID == '\r':
+            self.onSelect(self.sequence[self.focusedIndex])
+
+    def render(self, width, height, terminal):
+        self.height = height
+        start = self.focusedIndex - self.renderOffset
+        if start > len(self.sequence) - height:
+            start = max(0, len(self.sequence) - height)
+
+        elements = self.sequence[start:start+height]
+
+        for n, ele in enumerate(elements):
+            terminal.cursorPosition(0, n)
+            if n == self.renderOffset:
+                terminal.saveCursor()
+                if self.focused:
+                    modes = str(insults.REVERSE_VIDEO), str(insults.BOLD)
+                else:
+                    modes = str(insults.REVERSE_VIDEO),
+                terminal.selectGraphicRendition(*modes)
+            text = ele[:width]
+            terminal.write(text + (' ' * (width - len(text))))
+            if n == self.renderOffset:
+                terminal.restoreCursor()
diff --git a/ThirdParty/Twisted/twisted/conch/interfaces.py b/ThirdParty/Twisted/twisted/conch/interfaces.py
new file mode 100644
index 0000000..d42811a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/interfaces.py
@@ -0,0 +1,402 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module contains interfaces defined for the L{twisted.conch} package.
+"""
+
+from zope.interface import Interface, Attribute
+
+class IConchUser(Interface):
+    """
+    A user who has been authenticated to Cred through Conch.  This is
+    the interface between the SSH connection and the user.
+    """
+
+    conn = Attribute('The SSHConnection object for this user.')
+
+    def lookupChannel(channelType, windowSize, maxPacket, data):
+        """
+        The other side requested a channel of some sort.
+        channelType is the type of channel being requested,
+        windowSize is the initial size of the remote window,
+        maxPacket is the largest packet we should send,
+        data is any other packet data (often nothing).
+
+        We return a subclass of L{SSHChannel<ssh.channel.SSHChannel>}.  If
+        an appropriate channel can not be found, an exception will be
+        raised.  If a L{ConchError<error.ConchError>} is raised, the .value
+        will be the message, and the .data will be the error code.
+
+        @type channelType:  C{str}
+        @type windowSize:   C{int}
+        @type maxPacket:    C{int}
+        @type data:         C{str}
+        @rtype:             subclass of L{SSHChannel}/C{tuple}
+        """
+
+    def lookupSubsystem(subsystem, data):
+        """
+        The other side requested a subsystem.
+        subsystem is the name of the subsystem being requested.
+        data is any other packet data (often nothing).
+
+        We return a L{Protocol}.
+        """
+
+    def gotGlobalRequest(requestType, data):
+        """
+        A global request was sent from the other side.
+
+        By default, this dispatches to a method 'channel_channelType' with any
+        non-alphanumerics in the channelType replace with _'s.  If it cannot
+        find a suitable method, it returns an OPEN_UNKNOWN_CHANNEL_TYPE error.
+        The method is called with arguments of windowSize, maxPacket, data.
+        """
+
+class ISession(Interface):
+
+    def getPty(term, windowSize, modes):
+        """
+        Get a psuedo-terminal for use by a shell or command.
+
+        If a psuedo-terminal is not available, or the request otherwise
+        fails, raise an exception.
+        """
+
+    def openShell(proto):
+        """
+        Open a shell and connect it to proto.
+
+        @param proto: a L{ProcessProtocol} instance.
+        """
+
+    def execCommand(proto, command):
+        """
+        Execute a command.
+
+        @param proto: a L{ProcessProtocol} instance.
+        """
+
+    def windowChanged(newWindowSize):
+        """
+        Called when the size of the remote screen has changed.
+        """
+
+    def eofReceived():
+        """
+        Called when the other side has indicated no more data will be sent.
+        """
+
+    def closed():
+        """
+        Called when the session is closed.
+        """
+
+
+class ISFTPServer(Interface):
+    """
+    The only attribute of this class is "avatar".  It is the avatar
+    returned by the Realm that we are authenticated with, and
+    represents the logged-in user.  Each method should check to verify
+    that the user has permission for their actions.
+    """
+
+    def gotVersion(otherVersion, extData):
+        """
+        Called when the client sends their version info.
+
+        otherVersion is an integer representing the version of the SFTP
+        protocol they are claiming.
+        extData is a dictionary of extended_name : extended_data items.
+        These items are sent by the client to indicate additional features.
+
+        This method should return a dictionary of extended_name : extended_data
+        items.  These items are the additional features (if any) supported
+        by the server.
+        """
+        return {}
+
+    def openFile(filename, flags, attrs):
+        """
+        Called when the clients asks to open a file.
+
+        @param filename: a string representing the file to open.
+
+        @param flags: an integer of the flags to open the file with, ORed together.
+        The flags and their values are listed at the bottom of this file.
+
+        @param attrs: a list of attributes to open the file with.  It is a
+        dictionary, consisting of 0 or more keys.  The possible keys are::
+
+            size: the size of the file in bytes
+            uid: the user ID of the file as an integer
+            gid: the group ID of the file as an integer
+            permissions: the permissions of the file with as an integer.
+            the bit representation of this field is defined by POSIX.
+            atime: the access time of the file as seconds since the epoch.
+            mtime: the modification time of the file as seconds since the epoch.
+            ext_*: extended attributes.  The server is not required to
+            understand this, but it may.
+
+        NOTE: there is no way to indicate text or binary files.  it is up
+        to the SFTP client to deal with this.
+
+        This method returns an object that meets the ISFTPFile interface.
+        Alternatively, it can return a L{Deferred} that will be called back
+        with the object.
+        """
+
+    def removeFile(filename):
+        """
+        Remove the given file.
+
+        This method returns when the remove succeeds, or a Deferred that is
+        called back when it succeeds.
+
+        @param filename: the name of the file as a string.
+        """
+
+    def renameFile(oldpath, newpath):
+        """
+        Rename the given file.
+
+        This method returns when the rename succeeds, or a L{Deferred} that is
+        called back when it succeeds. If the rename fails, C{renameFile} will
+        raise an implementation-dependent exception.
+
+        @param oldpath: the current location of the file.
+        @param newpath: the new file name.
+        """
+
+    def makeDirectory(path, attrs):
+        """
+        Make a directory.
+
+        This method returns when the directory is created, or a Deferred that
+        is called back when it is created.
+
+        @param path: the name of the directory to create as a string.
+        @param attrs: a dictionary of attributes to create the directory with.
+        Its meaning is the same as the attrs in the L{openFile} method.
+        """
+
+    def removeDirectory(path):
+        """
+        Remove a directory (non-recursively)
+
+        It is an error to remove a directory that has files or directories in
+        it.
+
+        This method returns when the directory is removed, or a Deferred that
+        is called back when it is removed.
+
+        @param path: the directory to remove.
+        """
+
+    def openDirectory(path):
+        """
+        Open a directory for scanning.
+
+        This method returns an iterable object that has a close() method,
+        or a Deferred that is called back with same.
+
+        The close() method is called when the client is finished reading
+        from the directory.  At this point, the iterable will no longer
+        be used.
+
+        The iterable should return triples of the form (filename,
+        longname, attrs) or Deferreds that return the same.  The
+        sequence must support __getitem__, but otherwise may be any
+        'sequence-like' object.
+
+        filename is the name of the file relative to the directory.
+        logname is an expanded format of the filename.  The recommended format
+        is:
+        -rwxr-xr-x   1 mjos     staff      348911 Mar 25 14:29 t-filexfer
+        1234567890 123 12345678 12345678 12345678 123456789012
+
+        The first line is sample output, the second is the length of the field.
+        The fields are: permissions, link count, user owner, group owner,
+        size in bytes, modification time.
+
+        attrs is a dictionary in the format of the attrs argument to openFile.
+
+        @param path: the directory to open.
+        """
+
+    def getAttrs(path, followLinks):
+        """
+        Return the attributes for the given path.
+
+        This method returns a dictionary in the same format as the attrs
+        argument to openFile or a Deferred that is called back with same.
+
+        @param path: the path to return attributes for as a string.
+        @param followLinks: a boolean.  If it is True, follow symbolic links
+        and return attributes for the real path at the base.  If it is False,
+        return attributes for the specified path.
+        """
+
+    def setAttrs(path, attrs):
+        """
+        Set the attributes for the path.
+
+        This method returns when the attributes are set or a Deferred that is
+        called back when they are.
+
+        @param path: the path to set attributes for as a string.
+        @param attrs: a dictionary in the same format as the attrs argument to
+        L{openFile}.
+        """
+
+    def readLink(path):
+        """
+        Find the root of a set of symbolic links.
+
+        This method returns the target of the link, or a Deferred that
+        returns the same.
+
+        @param path: the path of the symlink to read.
+        """
+
+    def makeLink(linkPath, targetPath):
+        """
+        Create a symbolic link.
+
+        This method returns when the link is made, or a Deferred that
+        returns the same.
+
+        @param linkPath: the pathname of the symlink as a string.
+        @param targetPath: the path of the target of the link as a string.
+        """
+
+    def realPath(path):
+        """
+        Convert any path to an absolute path.
+
+        This method returns the absolute path as a string, or a Deferred
+        that returns the same.
+
+        @param path: the path to convert as a string.
+        """
+
+    def extendedRequest(extendedName, extendedData):
+        """
+        This is the extension mechanism for SFTP.  The other side can send us
+        arbitrary requests.
+
+        If we don't implement the request given by extendedName, raise
+        NotImplementedError.
+
+        The return value is a string, or a Deferred that will be called
+        back with a string.
+
+        @param extendedName: the name of the request as a string.
+        @param extendedData: the data the other side sent with the request,
+        as a string.
+        """
+
+
+
+class IKnownHostEntry(Interface):
+    """
+    A L{IKnownHostEntry} is an entry in an OpenSSH-formatted C{known_hosts}
+    file.
+
+    @since: 8.2
+    """
+
+    def matchesKey(key):
+        """
+        Return True if this entry matches the given Key object, False
+        otherwise.
+
+        @param key: The key object to match against.
+        @type key: L{twisted.conch.ssh.Key}
+        """
+
+
+    def matchesHost(hostname):
+        """
+        Return True if this entry matches the given hostname, False otherwise.
+
+        Note that this does no name resolution; if you want to match an IP
+        address, you have to resolve it yourself, and pass it in as a dotted
+        quad string.
+
+        @param key: The hostname to match against.
+        @type key: L{str}
+        """
+
+
+    def toString():
+        """
+        @return: a serialized string representation of this entry, suitable for
+        inclusion in a known_hosts file.  (Newline not included.)
+
+        @rtype: L{str}
+        """
+
+
+
+class ISFTPFile(Interface):
+    """
+    This represents an open file on the server.  An object adhering to this
+    interface should be returned from L{openFile}().
+    """
+
+    def close():
+        """
+        Close the file.
+
+        This method returns nothing if the close succeeds immediately, or a
+        Deferred that is called back when the close succeeds.
+        """
+
+    def readChunk(offset, length):
+        """
+        Read from the file.
+
+        If EOF is reached before any data is read, raise EOFError.
+
+        This method returns the data as a string, or a Deferred that is
+        called back with same.
+
+        @param offset: an integer that is the index to start from in the file.
+        @param length: the maximum length of data to return.  The actual amount
+        returned may less than this.  For normal disk files, however,
+        this should read the requested number (up to the end of the file).
+        """
+
+    def writeChunk(offset, data):
+        """
+        Write to the file.
+
+        This method returns when the write completes, or a Deferred that is
+        called when it completes.
+
+        @param offset: an integer that is the index to start from in the file.
+        @param data: a string that is the data to write.
+        """
+
+    def getAttrs():
+        """
+        Return the attributes for the file.
+
+        This method returns a dictionary in the same format as the attrs
+        argument to L{openFile} or a L{Deferred} that is called back with same.
+        """
+
+    def setAttrs(attrs):
+        """
+        Set the attributes for the file.
+
+        This method returns when the attributes are set or a Deferred that is
+        called back when they are.
+
+        @param attrs: a dictionary in the same format as the attrs argument to
+        L{openFile}.
+        """
+
+
diff --git a/ThirdParty/Twisted/twisted/conch/ls.py b/ThirdParty/Twisted/twisted/conch/ls.py
new file mode 100644
index 0000000..ab44f85
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/ls.py
@@ -0,0 +1,75 @@
+# -*- test-case-name: twisted.conch.test.test_cftp -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import array
+import stat
+
+from time import time, strftime, localtime
+
+# locale-independent month names to use instead of strftime's
+_MONTH_NAMES = dict(zip(
+        range(1, 13),
+        "Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec".split()))
+
+
+def lsLine(name, s):
+    """
+    Build an 'ls' line for a file ('file' in its generic sense, it
+    can be of any type).
+    """
+    mode = s.st_mode
+    perms = array.array('c', '-'*10)
+    ft = stat.S_IFMT(mode)
+    if stat.S_ISDIR(ft): perms[0] = 'd'
+    elif stat.S_ISCHR(ft): perms[0] = 'c'
+    elif stat.S_ISBLK(ft): perms[0] = 'b'
+    elif stat.S_ISREG(ft): perms[0] = '-'
+    elif stat.S_ISFIFO(ft): perms[0] = 'f'
+    elif stat.S_ISLNK(ft): perms[0] = 'l'
+    elif stat.S_ISSOCK(ft): perms[0] = 's'
+    else: perms[0] = '!'
+    # user
+    if mode&stat.S_IRUSR:perms[1] = 'r'
+    if mode&stat.S_IWUSR:perms[2] = 'w'
+    if mode&stat.S_IXUSR:perms[3] = 'x'
+    # group
+    if mode&stat.S_IRGRP:perms[4] = 'r'
+    if mode&stat.S_IWGRP:perms[5] = 'w'
+    if mode&stat.S_IXGRP:perms[6] = 'x'
+    # other
+    if mode&stat.S_IROTH:perms[7] = 'r'
+    if mode&stat.S_IWOTH:perms[8] = 'w'
+    if mode&stat.S_IXOTH:perms[9] = 'x'
+    # suid/sgid
+    if mode&stat.S_ISUID:
+        if perms[3] == 'x': perms[3] = 's'
+        else: perms[3] = 'S'
+    if mode&stat.S_ISGID:
+        if perms[6] == 'x': perms[6] = 's'
+        else: perms[6] = 'S'
+
+    lsresult = [
+        perms.tostring(),
+        str(s.st_nlink).rjust(5),
+        ' ',
+        str(s.st_uid).ljust(9),
+        str(s.st_gid).ljust(9),
+        str(s.st_size).rjust(8),
+        ' ',
+    ]
+
+    # need to specify the month manually, as strftime depends on locale
+    ttup = localtime(s.st_mtime)
+    sixmonths = 60 * 60 * 24 * 7 * 26
+    if s.st_mtime + sixmonths < time(): # last edited more than 6mo ago
+        strtime = strftime("%%s %d  %Y ", ttup)
+    else:
+        strtime = strftime("%%s %d %H:%M ", ttup)
+    lsresult.append(strtime % (_MONTH_NAMES[ttup[1]],))
+
+    lsresult.append(name)
+    return ''.join(lsresult)
+
+
+__all__ = ['lsLine']
diff --git a/ThirdParty/Twisted/twisted/conch/manhole.py b/ThirdParty/Twisted/twisted/conch/manhole.py
new file mode 100644
index 0000000..dee6a02
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/manhole.py
@@ -0,0 +1,340 @@
+# -*- test-case-name: twisted.conch.test.test_manhole -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Line-input oriented interactive interpreter loop.
+
+Provides classes for handling Python source input and arbitrary output
+interactively from a Twisted application.  Also included is syntax coloring
+code with support for VT102 terminals, control code handling (^C, ^D, ^Q),
+and reasonable handling of Deferreds.
+
+ at author: Jp Calderone
+"""
+
+import code, sys, StringIO, tokenize
+
+from twisted.conch import recvline
+
+from twisted.internet import defer
+from twisted.python.htmlizer import TokenPrinter
+
+class FileWrapper:
+    """Minimal write-file-like object.
+
+    Writes are translated into addOutput calls on an object passed to
+    __init__.  Newlines are also converted from network to local style.
+    """
+
+    softspace = 0
+    state = 'normal'
+
+    def __init__(self, o):
+        self.o = o
+
+    def flush(self):
+        pass
+
+    def write(self, data):
+        self.o.addOutput(data.replace('\r\n', '\n'))
+
+    def writelines(self, lines):
+        self.write(''.join(lines))
+
+class ManholeInterpreter(code.InteractiveInterpreter):
+    """Interactive Interpreter with special output and Deferred support.
+
+    Aside from the features provided by L{code.InteractiveInterpreter}, this
+    class captures sys.stdout output and redirects it to the appropriate
+    location (the Manhole protocol instance).  It also treats Deferreds
+    which reach the top-level specially: each is formatted to the user with
+    a unique identifier and a new callback and errback added to it, each of
+    which will format the unique identifier and the result with which the
+    Deferred fires and then pass it on to the next participant in the
+    callback chain.
+    """
+
+    numDeferreds = 0
+    def __init__(self, handler, locals=None, filename="<console>"):
+        code.InteractiveInterpreter.__init__(self, locals)
+        self._pendingDeferreds = {}
+        self.handler = handler
+        self.filename = filename
+        self.resetBuffer()
+
+    def resetBuffer(self):
+        """Reset the input buffer."""
+        self.buffer = []
+
+    def push(self, line):
+        """Push a line to the interpreter.
+
+        The line should not have a trailing newline; it may have
+        internal newlines.  The line is appended to a buffer and the
+        interpreter's runsource() method is called with the
+        concatenated contents of the buffer as source.  If this
+        indicates that the command was executed or invalid, the buffer
+        is reset; otherwise, the command is incomplete, and the buffer
+        is left as it was after the line was appended.  The return
+        value is 1 if more input is required, 0 if the line was dealt
+        with in some way (this is the same as runsource()).
+
+        """
+        self.buffer.append(line)
+        source = "\n".join(self.buffer)
+        more = self.runsource(source, self.filename)
+        if not more:
+            self.resetBuffer()
+        return more
+
+    def runcode(self, *a, **kw):
+        orighook, sys.displayhook = sys.displayhook, self.displayhook
+        try:
+            origout, sys.stdout = sys.stdout, FileWrapper(self.handler)
+            try:
+                code.InteractiveInterpreter.runcode(self, *a, **kw)
+            finally:
+                sys.stdout = origout
+        finally:
+            sys.displayhook = orighook
+
+    def displayhook(self, obj):
+        self.locals['_'] = obj
+        if isinstance(obj, defer.Deferred):
+            # XXX Ick, where is my "hasFired()" interface?
+            if hasattr(obj, "result"):
+                self.write(repr(obj))
+            elif id(obj) in self._pendingDeferreds:
+                self.write("<Deferred #%d>" % (self._pendingDeferreds[id(obj)][0],))
+            else:
+                d = self._pendingDeferreds
+                k = self.numDeferreds
+                d[id(obj)] = (k, obj)
+                self.numDeferreds += 1
+                obj.addCallbacks(self._cbDisplayDeferred, self._ebDisplayDeferred,
+                                 callbackArgs=(k, obj), errbackArgs=(k, obj))
+                self.write("<Deferred #%d>" % (k,))
+        elif obj is not None:
+            self.write(repr(obj))
+
+    def _cbDisplayDeferred(self, result, k, obj):
+        self.write("Deferred #%d called back: %r" % (k, result), True)
+        del self._pendingDeferreds[id(obj)]
+        return result
+
+    def _ebDisplayDeferred(self, failure, k, obj):
+        self.write("Deferred #%d failed: %r" % (k, failure.getErrorMessage()), True)
+        del self._pendingDeferreds[id(obj)]
+        return failure
+
+    def write(self, data, async=False):
+        self.handler.addOutput(data, async)
+
+CTRL_C = '\x03'
+CTRL_D = '\x04'
+CTRL_BACKSLASH = '\x1c'
+CTRL_L = '\x0c'
+CTRL_A = '\x01'
+CTRL_E = '\x05'
+
+class Manhole(recvline.HistoricRecvLine):
+    """Mediator between a fancy line source and an interactive interpreter.
+
+    This accepts lines from its transport and passes them on to a
+    L{ManholeInterpreter}.  Control commands (^C, ^D, ^\) are also handled
+    with something approximating their normal terminal-mode behavior.  It
+    can optionally be constructed with a dict which will be used as the
+    local namespace for any code executed.
+    """
+
+    namespace = None
+
+    def __init__(self, namespace=None):
+        recvline.HistoricRecvLine.__init__(self)
+        if namespace is not None:
+            self.namespace = namespace.copy()
+
+    def connectionMade(self):
+        recvline.HistoricRecvLine.connectionMade(self)
+        self.interpreter = ManholeInterpreter(self, self.namespace)
+        self.keyHandlers[CTRL_C] = self.handle_INT
+        self.keyHandlers[CTRL_D] = self.handle_EOF
+        self.keyHandlers[CTRL_L] = self.handle_FF
+        self.keyHandlers[CTRL_A] = self.handle_HOME
+        self.keyHandlers[CTRL_E] = self.handle_END
+        self.keyHandlers[CTRL_BACKSLASH] = self.handle_QUIT
+
+
+    def handle_INT(self):
+        """
+        Handle ^C as an interrupt keystroke by resetting the current input
+        variables to their initial state.
+        """
+        self.pn = 0
+        self.lineBuffer = []
+        self.lineBufferIndex = 0
+        self.interpreter.resetBuffer()
+
+        self.terminal.nextLine()
+        self.terminal.write("KeyboardInterrupt")
+        self.terminal.nextLine()
+        self.terminal.write(self.ps[self.pn])
+
+
+    def handle_EOF(self):
+        if self.lineBuffer:
+            self.terminal.write('\a')
+        else:
+            self.handle_QUIT()
+
+
+    def handle_FF(self):
+        """
+        Handle a 'form feed' byte - generally used to request a screen
+        refresh/redraw.
+        """
+        self.terminal.eraseDisplay()
+        self.terminal.cursorHome()
+        self.drawInputLine()
+
+
+    def handle_QUIT(self):
+        self.terminal.loseConnection()
+
+
+    def _needsNewline(self):
+        w = self.terminal.lastWrite
+        return not w.endswith('\n') and not w.endswith('\x1bE')
+
+    def addOutput(self, bytes, async=False):
+        if async:
+            self.terminal.eraseLine()
+            self.terminal.cursorBackward(len(self.lineBuffer) + len(self.ps[self.pn]))
+
+        self.terminal.write(bytes)
+
+        if async:
+            if self._needsNewline():
+                self.terminal.nextLine()
+
+            self.terminal.write(self.ps[self.pn])
+
+            if self.lineBuffer:
+                oldBuffer = self.lineBuffer
+                self.lineBuffer = []
+                self.lineBufferIndex = 0
+
+                self._deliverBuffer(oldBuffer)
+
+    def lineReceived(self, line):
+        more = self.interpreter.push(line)
+        self.pn = bool(more)
+        if self._needsNewline():
+            self.terminal.nextLine()
+        self.terminal.write(self.ps[self.pn])
+
+class VT102Writer:
+    """Colorizer for Python tokens.
+
+    A series of tokens are written to instances of this object.  Each is
+    colored in a particular way.  The final line of the result of this is
+    generally added to the output.
+    """
+
+    typeToColor = {
+        'identifier': '\x1b[31m',
+        'keyword': '\x1b[32m',
+        'parameter': '\x1b[33m',
+        'variable': '\x1b[1;33m',
+        'string': '\x1b[35m',
+        'number': '\x1b[36m',
+        'op': '\x1b[37m'}
+
+    normalColor = '\x1b[0m'
+
+    def __init__(self):
+        self.written = []
+
+    def color(self, type):
+        r = self.typeToColor.get(type, '')
+        return r
+
+    def write(self, token, type=None):
+        if token and token != '\r':
+            c = self.color(type)
+            if c:
+                self.written.append(c)
+            self.written.append(token)
+            if c:
+                self.written.append(self.normalColor)
+
+    def __str__(self):
+        s = ''.join(self.written)
+        return s.strip('\n').splitlines()[-1]
+
+def lastColorizedLine(source):
+    """Tokenize and colorize the given Python source.
+
+    Returns a VT102-format colorized version of the last line of C{source}.
+    """
+    w = VT102Writer()
+    p = TokenPrinter(w.write).printtoken
+    s = StringIO.StringIO(source)
+
+    tokenize.tokenize(s.readline, p)
+
+    return str(w)
+
+class ColoredManhole(Manhole):
+    """A REPL which syntax colors input as users type it.
+    """
+
+    def getSource(self):
+        """Return a string containing the currently entered source.
+
+        This is only the code which will be considered for execution
+        next.
+        """
+        return ('\n'.join(self.interpreter.buffer) +
+                '\n' +
+                ''.join(self.lineBuffer))
+
+
+    def characterReceived(self, ch, moreCharactersComing):
+        if self.mode == 'insert':
+            self.lineBuffer.insert(self.lineBufferIndex, ch)
+        else:
+            self.lineBuffer[self.lineBufferIndex:self.lineBufferIndex+1] = [ch]
+        self.lineBufferIndex += 1
+
+        if moreCharactersComing:
+            # Skip it all, we'll get called with another character in
+            # like 2 femtoseconds.
+            return
+
+        if ch == ' ':
+            # Don't bother to try to color whitespace
+            self.terminal.write(ch)
+            return
+
+        source = self.getSource()
+
+        # Try to write some junk
+        try:
+            coloredLine = lastColorizedLine(source)
+        except tokenize.TokenError:
+            # We couldn't do it.  Strange.  Oh well, just add the character.
+            self.terminal.write(ch)
+        else:
+            # Success!  Clear the source on this line.
+            self.terminal.eraseLine()
+            self.terminal.cursorBackward(len(self.lineBuffer) + len(self.ps[self.pn]) - 1)
+
+            # And write a new, colorized one.
+            self.terminal.write(self.ps[self.pn] + coloredLine)
+
+            # And move the cursor to where it belongs
+            n = len(self.lineBuffer) - self.lineBufferIndex
+            if n:
+                self.terminal.cursorBackward(n)
diff --git a/ThirdParty/Twisted/twisted/conch/manhole_ssh.py b/ThirdParty/Twisted/twisted/conch/manhole_ssh.py
new file mode 100644
index 0000000..a2297ef
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/manhole_ssh.py
@@ -0,0 +1,146 @@
+# -*- test-case-name: twisted.conch.test.test_manhole -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+insults/SSH integration support.
+
+ at author: Jp Calderone
+"""
+
+from zope.interface import implements
+
+from twisted.conch import avatar, interfaces as iconch, error as econch
+from twisted.conch.ssh import factory, keys, session
+from twisted.cred import credentials, checkers, portal
+from twisted.python import components
+
+from twisted.conch.insults import insults
+
+class _Glue:
+    """A feeble class for making one attribute look like another.
+
+    This should be replaced with a real class at some point, probably.
+    Try not to write new code that uses it.
+    """
+    def __init__(self, **kw):
+        self.__dict__.update(kw)
+
+    def __getattr__(self, name):
+        raise AttributeError(self.name, "has no attribute", name)
+
+class TerminalSessionTransport:
+    def __init__(self, proto, chainedProtocol, avatar, width, height):
+        self.proto = proto
+        self.avatar = avatar
+        self.chainedProtocol = chainedProtocol
+
+        session = self.proto.session
+
+        self.proto.makeConnection(
+            _Glue(write=self.chainedProtocol.dataReceived,
+                  loseConnection=lambda: avatar.conn.sendClose(session),
+                  name="SSH Proto Transport"))
+
+        def loseConnection():
+            self.proto.loseConnection()
+
+        self.chainedProtocol.makeConnection(
+            _Glue(write=self.proto.write,
+                  loseConnection=loseConnection,
+                  name="Chained Proto Transport"))
+
+        # XXX TODO
+        # chainedProtocol is supposed to be an ITerminalTransport,
+        # maybe.  That means perhaps its terminalProtocol attribute is
+        # an ITerminalProtocol, it could be.  So calling terminalSize
+        # on that should do the right thing But it'd be nice to clean
+        # this bit up.
+        self.chainedProtocol.terminalProtocol.terminalSize(width, height)
+
+class TerminalSession(components.Adapter):
+    implements(iconch.ISession)
+
+    transportFactory = TerminalSessionTransport
+    chainedProtocolFactory = insults.ServerProtocol
+
+    def getPty(self, term, windowSize, attrs):
+        self.height, self.width = windowSize[:2]
+
+    def openShell(self, proto):
+        self.transportFactory(
+            proto, self.chainedProtocolFactory(),
+            iconch.IConchUser(self.original),
+            self.width, self.height)
+
+    def execCommand(self, proto, cmd):
+        raise econch.ConchError("Cannot execute commands")
+
+    def closed(self):
+        pass
+
+class TerminalUser(avatar.ConchUser, components.Adapter):
+    def __init__(self, original, avatarId):
+        components.Adapter.__init__(self, original)
+        avatar.ConchUser.__init__(self)
+        self.channelLookup['session'] = session.SSHSession
+
+class TerminalRealm:
+    userFactory = TerminalUser
+    sessionFactory = TerminalSession
+
+    transportFactory = TerminalSessionTransport
+    chainedProtocolFactory = insults.ServerProtocol
+
+    def _getAvatar(self, avatarId):
+        comp = components.Componentized()
+        user = self.userFactory(comp, avatarId)
+        sess = self.sessionFactory(comp)
+
+        sess.transportFactory = self.transportFactory
+        sess.chainedProtocolFactory = self.chainedProtocolFactory
+
+        comp.setComponent(iconch.IConchUser, user)
+        comp.setComponent(iconch.ISession, sess)
+
+        return user
+
+    def __init__(self, transportFactory=None):
+        if transportFactory is not None:
+            self.transportFactory = transportFactory
+
+    def requestAvatar(self, avatarId, mind, *interfaces):
+        for i in interfaces:
+            if i is iconch.IConchUser:
+                return (iconch.IConchUser,
+                        self._getAvatar(avatarId),
+                        lambda: None)
+        raise NotImplementedError()
+
+class ConchFactory(factory.SSHFactory):
+    publicKey = 'ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAGEArzJx8OYOnJmzf4tfBEvLi8DVPrJ3/c9k2I/Az64fxjHf9imyRJbixtQhlH9lfNjUIx+4LmrJH5QNRsFporcHDKOTwTTYLh5KmRpslkYHRivcJSkbh/C+BR3utDS555mV'
+
+    publicKeys = {
+        'ssh-rsa' : keys.Key.fromString(publicKey)
+    }
+    del publicKey
+
+    privateKey = """-----BEGIN RSA PRIVATE KEY-----
+MIIByAIBAAJhAK8ycfDmDpyZs3+LXwRLy4vA1T6yd/3PZNiPwM+uH8Yx3/YpskSW
+4sbUIZR/ZXzY1CMfuC5qyR+UDUbBaaK3Bwyjk8E02C4eSpkabJZGB0Yr3CUpG4fw
+vgUd7rQ0ueeZlQIBIwJgbh+1VZfr7WftK5lu7MHtqE1S1vPWZQYE3+VUn8yJADyb
+Z4fsZaCrzW9lkIqXkE3GIY+ojdhZhkO1gbG0118sIgphwSWKRxK0mvh6ERxKqIt1
+xJEJO74EykXZV4oNJ8sjAjEA3J9r2ZghVhGN6V8DnQrTk24Td0E8hU8AcP0FVP+8
+PQm/g/aXf2QQkQT+omdHVEJrAjEAy0pL0EBH6EVS98evDCBtQw22OZT52qXlAwZ2
+gyTriKFVoqjeEjt3SZKKqXHSApP/AjBLpF99zcJJZRq2abgYlf9lv1chkrWqDHUu
+DZttmYJeEfiFBBavVYIF1dOlZT0G8jMCMBc7sOSZodFnAiryP+Qg9otSBjJ3bQML
+pSTqy7c3a2AScC/YyOwkDaICHnnD3XyjMwIxALRzl0tQEKMXs6hH8ToUdlLROCrP
+EhQ0wahUTCk1gKA4uPD6TMTChavbh4K63OvbKg==
+-----END RSA PRIVATE KEY-----"""
+    privateKeys = {
+        'ssh-rsa' : keys.Key.fromString(privateKey)
+    }
+    del privateKey
+
+    def __init__(self, portal):
+        self.portal = portal
diff --git a/ThirdParty/Twisted/twisted/conch/manhole_tap.py b/ThirdParty/Twisted/twisted/conch/manhole_tap.py
new file mode 100644
index 0000000..4df7c83
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/manhole_tap.py
@@ -0,0 +1,124 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+TAP plugin for creating telnet- and ssh-accessible manhole servers.
+
+ at author: Jp Calderone
+"""
+
+from zope.interface import implements
+
+from twisted.internet import protocol
+from twisted.application import service, strports
+from twisted.conch.ssh import session
+from twisted.conch import interfaces as iconch
+from twisted.cred import portal, checkers
+from twisted.python import usage
+
+from twisted.conch.insults import insults
+from twisted.conch import manhole, manhole_ssh, telnet
+
+class makeTelnetProtocol:
+    def __init__(self, portal):
+        self.portal = portal
+
+    def __call__(self):
+        auth = telnet.AuthenticatingTelnetProtocol
+        args = (self.portal,)
+        return telnet.TelnetTransport(auth, *args)
+
+class chainedProtocolFactory:
+    def __init__(self, namespace):
+        self.namespace = namespace
+    
+    def __call__(self):
+        return insults.ServerProtocol(manhole.ColoredManhole, self.namespace)
+
+class _StupidRealm:
+    implements(portal.IRealm)
+
+    def __init__(self, proto, *a, **kw):
+        self.protocolFactory = proto
+        self.protocolArgs = a
+        self.protocolKwArgs = kw
+
+    def requestAvatar(self, avatarId, *interfaces):
+        if telnet.ITelnetProtocol in interfaces:
+            return (telnet.ITelnetProtocol,
+                    self.protocolFactory(*self.protocolArgs, **self.protocolKwArgs),
+                    lambda: None)
+        raise NotImplementedError()
+
+class Options(usage.Options):
+    optParameters = [
+        ["telnetPort", "t", None, "strports description of the address on which to listen for telnet connections"],
+        ["sshPort", "s", None, "strports description of the address on which to listen for ssh connections"],
+        ["passwd", "p", "/etc/passwd", "name of a passwd(5)-format username/password file"]]
+
+    def __init__(self):
+        usage.Options.__init__(self)
+        self['namespace'] = None
+    
+    def postOptions(self):
+        if self['telnetPort'] is None and self['sshPort'] is None:
+            raise usage.UsageError("At least one of --telnetPort and --sshPort must be specified")
+
+def makeService(options):
+    """Create a manhole server service.
+
+    @type options: C{dict}
+    @param options: A mapping describing the configuration of
+    the desired service.  Recognized key/value pairs are::
+
+        "telnetPort": strports description of the address on which
+                      to listen for telnet connections.  If None,
+                      no telnet service will be started.
+
+        "sshPort": strports description of the address on which to
+                   listen for ssh connections.  If None, no ssh
+                   service will be started.
+
+        "namespace": dictionary containing desired initial locals
+                     for manhole connections.  If None, an empty
+                     dictionary will be used.
+
+        "passwd": Name of a passwd(5)-format username/password file.
+
+    @rtype: L{twisted.application.service.IService}
+    @return: A manhole service.
+    """
+
+    svc = service.MultiService()
+
+    namespace = options['namespace']
+    if namespace is None:
+        namespace = {}
+
+    checker = checkers.FilePasswordDB(options['passwd'])
+
+    if options['telnetPort']:
+        telnetRealm = _StupidRealm(telnet.TelnetBootstrapProtocol,
+                                   insults.ServerProtocol,
+                                   manhole.ColoredManhole,
+                                   namespace)
+
+        telnetPortal = portal.Portal(telnetRealm, [checker])
+
+        telnetFactory = protocol.ServerFactory()
+        telnetFactory.protocol = makeTelnetProtocol(telnetPortal)
+        telnetService = strports.service(options['telnetPort'],
+                                         telnetFactory)
+        telnetService.setServiceParent(svc)
+
+    if options['sshPort']:
+        sshRealm = manhole_ssh.TerminalRealm()
+        sshRealm.chainedProtocolFactory = chainedProtocolFactory(namespace)
+
+        sshPortal = portal.Portal(sshRealm, [checker])
+        sshFactory = manhole_ssh.ConchFactory(sshPortal)
+        sshService = strports.service(options['sshPort'],
+                                      sshFactory)
+        sshService.setServiceParent(svc)
+
+    return svc
diff --git a/ThirdParty/Twisted/twisted/conch/mixin.py b/ThirdParty/Twisted/twisted/conch/mixin.py
new file mode 100644
index 0000000..581e2ff
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/mixin.py
@@ -0,0 +1,49 @@
+# -*- test-case-name: twisted.conch.test.test_mixin -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Experimental optimization
+
+This module provides a single mixin class which allows protocols to
+collapse numerous small writes into a single larger one.
+
+ at author: Jp Calderone
+"""
+
+from twisted.internet import reactor
+
+class BufferingMixin:
+    """Mixin which adds write buffering.
+    """
+    _delayedWriteCall = None
+    bytes = None
+
+    DELAY = 0.0
+
+    def schedule(self):
+        return reactor.callLater(self.DELAY, self.flush)
+
+    def reschedule(self, token):
+        token.reset(self.DELAY)
+
+    def write(self, bytes):
+        """Buffer some bytes to be written soon.
+
+        Every call to this function delays the real write by C{self.DELAY}
+        seconds.  When the delay expires, all collected bytes are written
+        to the underlying transport using L{ITransport.writeSequence}.
+        """
+        if self._delayedWriteCall is None:
+            self.bytes = []
+            self._delayedWriteCall = self.schedule()
+        else:
+            self.reschedule(self._delayedWriteCall)
+        self.bytes.append(bytes)
+
+    def flush(self):
+        """Flush the buffer immediately.
+        """
+        self._delayedWriteCall = None
+        self.transport.writeSequence(self.bytes)
+        self.bytes = None
diff --git a/ThirdParty/Twisted/twisted/conch/openssh_compat/__init__.py b/ThirdParty/Twisted/twisted/conch/openssh_compat/__init__.py
new file mode 100644
index 0000000..69d5927
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/openssh_compat/__init__.py
@@ -0,0 +1,11 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+# 
+
+"""
+Support for OpenSSH configuration files.
+
+Maintainer: Paul Swartz
+"""
+
diff --git a/ThirdParty/Twisted/twisted/conch/openssh_compat/factory.py b/ThirdParty/Twisted/twisted/conch/openssh_compat/factory.py
new file mode 100644
index 0000000..f0ad8f7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/openssh_compat/factory.py
@@ -0,0 +1,73 @@
+# -*- test-case-name: twisted.conch.test.test_openssh_compat -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Factory for reading openssh configuration files: public keys, private keys, and
+moduli file.
+"""
+
+import os, errno
+
+from twisted.python import log
+from twisted.python.util import runAsEffectiveUser
+
+from twisted.conch.ssh import keys, factory, common
+from twisted.conch.openssh_compat import primes
+
+
+
+class OpenSSHFactory(factory.SSHFactory):
+    dataRoot = '/usr/local/etc'
+    moduliRoot = '/usr/local/etc' # for openbsd which puts moduli in a different
+                                  # directory from keys
+
+
+    def getPublicKeys(self):
+        """
+        Return the server public keys.
+        """
+        ks = {}
+        for filename in os.listdir(self.dataRoot):
+            if filename[:9] == 'ssh_host_' and filename[-8:]=='_key.pub':
+                try:
+                    k = keys.Key.fromFile(
+                        os.path.join(self.dataRoot, filename))
+                    t = common.getNS(k.blob())[0]
+                    ks[t] = k
+                except Exception, e:
+                    log.msg('bad public key file %s: %s' % (filename, e))
+        return ks
+
+
+    def getPrivateKeys(self):
+        """
+        Return the server private keys.
+        """
+        privateKeys = {}
+        for filename in os.listdir(self.dataRoot):
+            if filename[:9] == 'ssh_host_' and filename[-4:]=='_key':
+                fullPath = os.path.join(self.dataRoot, filename)
+                try:
+                    key = keys.Key.fromFile(fullPath)
+                except IOError, e:
+                    if e.errno == errno.EACCES:
+                        # Not allowed, let's switch to root
+                        key = runAsEffectiveUser(0, 0, keys.Key.fromFile, fullPath)
+                        keyType = keys.objectType(key.keyObject)
+                        privateKeys[keyType] = key
+                    else:
+                        raise
+                except Exception, e:
+                    log.msg('bad private key file %s: %s' % (filename, e))
+                else:
+                    keyType = keys.objectType(key.keyObject)
+                    privateKeys[keyType] = key
+        return privateKeys
+
+
+    def getPrimes(self):
+        try:
+            return primes.parseModuliFile(self.moduliRoot+'/moduli')
+        except IOError:
+            return None
diff --git a/ThirdParty/Twisted/twisted/conch/openssh_compat/primes.py b/ThirdParty/Twisted/twisted/conch/openssh_compat/primes.py
new file mode 100644
index 0000000..5d939e6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/openssh_compat/primes.py
@@ -0,0 +1,26 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+# 
+
+"""
+Parsing for the moduli file, which contains Diffie-Hellman prime groups.
+
+Maintainer: Paul Swartz
+"""
+
+def parseModuliFile(filename):
+    lines = open(filename).readlines()
+    primes = {}
+    for l in lines:
+        l = l.strip()
+        if  not l or l[0]=='#':
+            continue
+        tim, typ, tst, tri, size, gen, mod = l.split()
+        size = int(size) + 1
+        gen = long(gen)
+        mod = long(mod, 16)
+        if not primes.has_key(size):
+            primes[size] = []
+        primes[size].append((gen, mod))
+    return primes
diff --git a/ThirdParty/Twisted/twisted/conch/recvline.py b/ThirdParty/Twisted/twisted/conch/recvline.py
new file mode 100644
index 0000000..6c8416a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/recvline.py
@@ -0,0 +1,329 @@
+# -*- test-case-name: twisted.conch.test.test_recvline -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Basic line editing support.
+
+ at author: Jp Calderone
+"""
+
+import string
+
+from zope.interface import implements
+
+from twisted.conch.insults import insults, helper
+
+from twisted.python import log, reflect
+
+_counters = {}
+class Logging(object):
+    """Wrapper which logs attribute lookups.
+
+    This was useful in debugging something, I guess.  I forget what.
+    It can probably be deleted or moved somewhere more appropriate.
+    Nothing special going on here, really.
+    """
+    def __init__(self, original):
+        self.original = original
+        key = reflect.qual(original.__class__)
+        count = _counters.get(key, 0)
+        _counters[key] = count + 1
+        self._logFile = file(key + '-' + str(count), 'w')
+
+    def __str__(self):
+        return str(super(Logging, self).__getattribute__('original'))
+
+    def __repr__(self):
+        return repr(super(Logging, self).__getattribute__('original'))
+
+    def __getattribute__(self, name):
+        original = super(Logging, self).__getattribute__('original')
+        logFile = super(Logging, self).__getattribute__('_logFile')
+        logFile.write(name + '\n')
+        return getattr(original, name)
+
+class TransportSequence(object):
+    """An L{ITerminalTransport} implementation which forwards calls to
+    one or more other L{ITerminalTransport}s.
+
+    This is a cheap way for servers to keep track of the state they
+    expect the client to see, since all terminal manipulations can be
+    send to the real client and to a terminal emulator that lives in
+    the server process.
+    """
+    implements(insults.ITerminalTransport)
+
+    for keyID in ('UP_ARROW', 'DOWN_ARROW', 'RIGHT_ARROW', 'LEFT_ARROW',
+                  'HOME', 'INSERT', 'DELETE', 'END', 'PGUP', 'PGDN',
+                  'F1', 'F2', 'F3', 'F4', 'F5', 'F6', 'F7', 'F8', 'F9',
+                  'F10', 'F11', 'F12'):
+        exec '%s = object()' % (keyID,)
+
+    TAB = '\t'
+    BACKSPACE = '\x7f'
+
+    def __init__(self, *transports):
+        assert transports, "Cannot construct a TransportSequence with no transports"
+        self.transports = transports
+
+    for method in insults.ITerminalTransport:
+        exec """\
+def %s(self, *a, **kw):
+    for tpt in self.transports:
+        result = tpt.%s(*a, **kw)
+    return result
+""" % (method, method)
+
+class LocalTerminalBufferMixin(object):
+    """A mixin for RecvLine subclasses which records the state of the terminal.
+
+    This is accomplished by performing all L{ITerminalTransport} operations on both
+    the transport passed to makeConnection and an instance of helper.TerminalBuffer.
+
+    @ivar terminalCopy: A L{helper.TerminalBuffer} instance which efforts
+    will be made to keep up to date with the actual terminal
+    associated with this protocol instance.
+    """
+
+    def makeConnection(self, transport):
+        self.terminalCopy = helper.TerminalBuffer()
+        self.terminalCopy.connectionMade()
+        return super(LocalTerminalBufferMixin, self).makeConnection(
+            TransportSequence(transport, self.terminalCopy))
+
+    def __str__(self):
+        return str(self.terminalCopy)
+
+class RecvLine(insults.TerminalProtocol):
+    """L{TerminalProtocol} which adds line editing features.
+
+    Clients will be prompted for lines of input with all the usual
+    features: character echoing, left and right arrow support for
+    moving the cursor to different areas of the line buffer, backspace
+    and delete for removing characters, and insert for toggling
+    between typeover and insert mode.  Tabs will be expanded to enough
+    spaces to move the cursor to the next tabstop (every four
+    characters by default).  Enter causes the line buffer to be
+    cleared and the line to be passed to the lineReceived() method
+    which, by default, does nothing.  Subclasses are responsible for
+    redrawing the input prompt (this will probably change).
+    """
+    width = 80
+    height = 24
+
+    TABSTOP = 4
+
+    ps = ('>>> ', '... ')
+    pn = 0
+    _printableChars = set(string.printable)
+
+    def connectionMade(self):
+        # A list containing the characters making up the current line
+        self.lineBuffer = []
+
+        # A zero-based (wtf else?) index into self.lineBuffer.
+        # Indicates the current cursor position.
+        self.lineBufferIndex = 0
+
+        t = self.terminal
+        # A map of keyIDs to bound instance methods.
+        self.keyHandlers = {
+            t.LEFT_ARROW: self.handle_LEFT,
+            t.RIGHT_ARROW: self.handle_RIGHT,
+            t.TAB: self.handle_TAB,
+
+            # Both of these should not be necessary, but figuring out
+            # which is necessary is a huge hassle.
+            '\r': self.handle_RETURN,
+            '\n': self.handle_RETURN,
+
+            t.BACKSPACE: self.handle_BACKSPACE,
+            t.DELETE: self.handle_DELETE,
+            t.INSERT: self.handle_INSERT,
+            t.HOME: self.handle_HOME,
+            t.END: self.handle_END}
+
+        self.initializeScreen()
+
+    def initializeScreen(self):
+        # Hmm, state sucks.  Oh well.
+        # For now we will just take over the whole terminal.
+        self.terminal.reset()
+        self.terminal.write(self.ps[self.pn])
+        # XXX Note: I would prefer to default to starting in insert
+        # mode, however this does not seem to actually work!  I do not
+        # know why.  This is probably of interest to implementors
+        # subclassing RecvLine.
+
+        # XXX XXX Note: But the unit tests all expect the initial mode
+        # to be insert right now.  Fuck, there needs to be a way to
+        # query the current mode or something.
+        # self.setTypeoverMode()
+        self.setInsertMode()
+
+    def currentLineBuffer(self):
+        s = ''.join(self.lineBuffer)
+        return s[:self.lineBufferIndex], s[self.lineBufferIndex:]
+
+    def setInsertMode(self):
+        self.mode = 'insert'
+        self.terminal.setModes([insults.modes.IRM])
+
+    def setTypeoverMode(self):
+        self.mode = 'typeover'
+        self.terminal.resetModes([insults.modes.IRM])
+
+    def drawInputLine(self):
+        """
+        Write a line containing the current input prompt and the current line
+        buffer at the current cursor position.
+        """
+        self.terminal.write(self.ps[self.pn] + ''.join(self.lineBuffer))
+
+    def terminalSize(self, width, height):
+        # XXX - Clear the previous input line, redraw it at the new
+        # cursor position
+        self.terminal.eraseDisplay()
+        self.terminal.cursorHome()
+        self.width = width
+        self.height = height
+        self.drawInputLine()
+
+    def unhandledControlSequence(self, seq):
+        pass
+
+    def keystrokeReceived(self, keyID, modifier):
+        m = self.keyHandlers.get(keyID)
+        if m is not None:
+            m()
+        elif keyID in self._printableChars:
+            self.characterReceived(keyID, False)
+        else:
+            log.msg("Received unhandled keyID: %r" % (keyID,))
+
+    def characterReceived(self, ch, moreCharactersComing):
+        if self.mode == 'insert':
+            self.lineBuffer.insert(self.lineBufferIndex, ch)
+        else:
+            self.lineBuffer[self.lineBufferIndex:self.lineBufferIndex+1] = [ch]
+        self.lineBufferIndex += 1
+        self.terminal.write(ch)
+
+    def handle_TAB(self):
+        n = self.TABSTOP - (len(self.lineBuffer) % self.TABSTOP)
+        self.terminal.cursorForward(n)
+        self.lineBufferIndex += n
+        self.lineBuffer.extend(' ' * n)
+
+    def handle_LEFT(self):
+        if self.lineBufferIndex > 0:
+            self.lineBufferIndex -= 1
+            self.terminal.cursorBackward()
+
+    def handle_RIGHT(self):
+        if self.lineBufferIndex < len(self.lineBuffer):
+            self.lineBufferIndex += 1
+            self.terminal.cursorForward()
+
+    def handle_HOME(self):
+        if self.lineBufferIndex:
+            self.terminal.cursorBackward(self.lineBufferIndex)
+            self.lineBufferIndex = 0
+
+    def handle_END(self):
+        offset = len(self.lineBuffer) - self.lineBufferIndex
+        if offset:
+            self.terminal.cursorForward(offset)
+            self.lineBufferIndex = len(self.lineBuffer)
+
+    def handle_BACKSPACE(self):
+        if self.lineBufferIndex > 0:
+            self.lineBufferIndex -= 1
+            del self.lineBuffer[self.lineBufferIndex]
+            self.terminal.cursorBackward()
+            self.terminal.deleteCharacter()
+
+    def handle_DELETE(self):
+        if self.lineBufferIndex < len(self.lineBuffer):
+            del self.lineBuffer[self.lineBufferIndex]
+            self.terminal.deleteCharacter()
+
+    def handle_RETURN(self):
+        line = ''.join(self.lineBuffer)
+        self.lineBuffer = []
+        self.lineBufferIndex = 0
+        self.terminal.nextLine()
+        self.lineReceived(line)
+
+    def handle_INSERT(self):
+        assert self.mode in ('typeover', 'insert')
+        if self.mode == 'typeover':
+            self.setInsertMode()
+        else:
+            self.setTypeoverMode()
+
+    def lineReceived(self, line):
+        pass
+
+class HistoricRecvLine(RecvLine):
+    """L{TerminalProtocol} which adds both basic line-editing features and input history.
+
+    Everything supported by L{RecvLine} is also supported by this class.  In addition, the
+    up and down arrows traverse the input history.  Each received line is automatically
+    added to the end of the input history.
+    """
+    def connectionMade(self):
+        RecvLine.connectionMade(self)
+
+        self.historyLines = []
+        self.historyPosition = 0
+
+        t = self.terminal
+        self.keyHandlers.update({t.UP_ARROW: self.handle_UP,
+                                 t.DOWN_ARROW: self.handle_DOWN})
+
+    def currentHistoryBuffer(self):
+        b = tuple(self.historyLines)
+        return b[:self.historyPosition], b[self.historyPosition:]
+
+    def _deliverBuffer(self, buf):
+        if buf:
+            for ch in buf[:-1]:
+                self.characterReceived(ch, True)
+            self.characterReceived(buf[-1], False)
+
+    def handle_UP(self):
+        if self.lineBuffer and self.historyPosition == len(self.historyLines):
+            self.historyLines.append(self.lineBuffer)
+        if self.historyPosition > 0:
+            self.handle_HOME()
+            self.terminal.eraseToLineEnd()
+
+            self.historyPosition -= 1
+            self.lineBuffer = []
+
+            self._deliverBuffer(self.historyLines[self.historyPosition])
+
+    def handle_DOWN(self):
+        if self.historyPosition < len(self.historyLines) - 1:
+            self.handle_HOME()
+            self.terminal.eraseToLineEnd()
+
+            self.historyPosition += 1
+            self.lineBuffer = []
+
+            self._deliverBuffer(self.historyLines[self.historyPosition])
+        else:
+            self.handle_HOME()
+            self.terminal.eraseToLineEnd()
+
+            self.historyPosition = len(self.historyLines)
+            self.lineBuffer = []
+            self.lineBufferIndex = 0
+
+    def handle_RETURN(self):
+        if self.lineBuffer:
+            self.historyLines.append(''.join(self.lineBuffer))
+        self.historyPosition = len(self.historyLines)
+        return RecvLine.handle_RETURN(self)
diff --git a/ThirdParty/Twisted/twisted/conch/scripts/__init__.py b/ThirdParty/Twisted/twisted/conch/scripts/__init__.py
new file mode 100644
index 0000000..63fdb3d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/scripts/__init__.py
@@ -0,0 +1 @@
+'conch scripts'
diff --git a/ThirdParty/Twisted/twisted/conch/scripts/cftp.py b/ThirdParty/Twisted/twisted/conch/scripts/cftp.py
new file mode 100644
index 0000000..e6db67f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/scripts/cftp.py
@@ -0,0 +1,832 @@
+# -*- test-case-name: twisted.conch.test.test_cftp -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Implementation module for the I{cftp} command.
+"""
+
+import os, sys, getpass, struct, tty, fcntl, stat
+import fnmatch, pwd, glob
+
+from twisted.conch.client import connect, default, options
+from twisted.conch.ssh import connection, common
+from twisted.conch.ssh import channel, filetransfer
+from twisted.protocols import basic
+from twisted.internet import reactor, stdio, defer, utils
+from twisted.python import log, usage, failure
+
+class ClientOptions(options.ConchOptions):
+
+    synopsis = """Usage:   cftp [options] [user@]host
+         cftp [options] [user@]host[:dir[/]]
+         cftp [options] [user@]host[:file [localfile]]
+"""
+    longdesc = ("cftp is a client for logging into a remote machine and "
+                "executing commands to send and receive file information")
+
+    optParameters = [
+                    ['buffersize', 'B', 32768, 'Size of the buffer to use for sending/receiving.'],
+                    ['batchfile', 'b', None, 'File to read commands from, or \'-\' for stdin.'],
+                    ['requests', 'R', 5, 'Number of requests to make before waiting for a reply.'],
+                    ['subsystem', 's', 'sftp', 'Subsystem/server program to connect to.']]
+
+    compData = usage.Completions(
+        descriptions={
+            "buffersize": "Size of send/receive buffer (default: 32768)"},
+        extraActions=[usage.CompleteUserAtHost(),
+                      usage.CompleteFiles(descr="local file")])
+
+    def parseArgs(self, host, localPath=None):
+        self['remotePath'] = ''
+        if ':' in host:
+            host, self['remotePath'] = host.split(':', 1)
+            self['remotePath'].rstrip('/')
+        self['host'] = host
+        self['localPath'] = localPath
+
+def run():
+#    import hotshot
+#    prof = hotshot.Profile('cftp.prof')
+#    prof.start()
+    args = sys.argv[1:]
+    if '-l' in args: # cvs is an idiot
+        i = args.index('-l')
+        args = args[i:i+2]+args
+        del args[i+2:i+4]
+    options = ClientOptions()
+    try:
+        options.parseOptions(args)
+    except usage.UsageError, u:
+        print 'ERROR: %s' % u
+        sys.exit(1)
+    if options['log']:
+        realout = sys.stdout
+        log.startLogging(sys.stderr)
+        sys.stdout = realout
+    else:
+        log.discardLogs()
+    doConnect(options)
+    reactor.run()
+#    prof.stop()
+#    prof.close()
+
+def handleError():
+    global exitStatus
+    exitStatus = 2
+    try:
+        reactor.stop()
+    except: pass
+    log.err(failure.Failure())
+    raise
+
+def doConnect(options):
+#    log.deferr = handleError # HACK
+    if '@' in options['host']:
+        options['user'], options['host'] = options['host'].split('@',1)
+    host = options['host']
+    if not options['user']:
+        options['user'] = getpass.getuser()
+    if not options['port']:
+        options['port'] = 22
+    else:
+        options['port'] = int(options['port'])
+    host = options['host']
+    port = options['port']
+    conn = SSHConnection()
+    conn.options = options
+    vhk = default.verifyHostKey
+    uao = default.SSHUserAuthClient(options['user'], options, conn)
+    connect.connect(host, port, options, vhk, uao).addErrback(_ebExit)
+
+def _ebExit(f):
+    #global exitStatus
+    if hasattr(f.value, 'value'):
+        s = f.value.value
+    else:
+        s = str(f)
+    print s
+    #exitStatus = "conch: exiting with error %s" % f
+    try:
+        reactor.stop()
+    except: pass
+
+def _ignore(*args): pass
+
+class FileWrapper:
+
+    def __init__(self, f):
+        self.f = f
+        self.total = 0.0
+        f.seek(0, 2) # seek to the end
+        self.size = f.tell()
+
+    def __getattr__(self, attr):
+        return getattr(self.f, attr)
+
+class StdioClient(basic.LineReceiver):
+
+    _pwd = pwd
+
+    ps = 'cftp> '
+    delimiter = '\n'
+
+    reactor = reactor
+
+    def __init__(self, client, f = None):
+        self.client = client
+        self.currentDirectory = ''
+        self.file = f
+        self.useProgressBar = (not f and 1) or 0
+
+    def connectionMade(self):
+        self.client.realPath('').addCallback(self._cbSetCurDir)
+
+    def _cbSetCurDir(self, path):
+        self.currentDirectory = path
+        self._newLine()
+
+    def lineReceived(self, line):
+        if self.client.transport.localClosed:
+            return
+        log.msg('got line %s' % repr(line))
+        line = line.lstrip()
+        if not line:
+            self._newLine()
+            return
+        if self.file and line.startswith('-'):
+            self.ignoreErrors = 1
+            line = line[1:]
+        else:
+            self.ignoreErrors = 0
+        d = self._dispatchCommand(line)
+        if d is not None:
+            d.addCallback(self._cbCommand)
+            d.addErrback(self._ebCommand)
+
+
+    def _dispatchCommand(self, line):
+        if ' ' in line:
+            command, rest = line.split(' ', 1)
+            rest = rest.lstrip()
+        else:
+            command, rest = line, ''
+        if command.startswith('!'): # command
+            f = self.cmd_EXEC
+            rest = (command[1:] + ' ' + rest).strip()
+        else:
+            command = command.upper()
+            log.msg('looking up cmd %s' % command)
+            f = getattr(self, 'cmd_%s' % command, None)
+        if f is not None:
+            return defer.maybeDeferred(f, rest)
+        else:
+            self._ebCommand(failure.Failure(NotImplementedError(
+                "No command called `%s'" % command)))
+            self._newLine()
+
+    def _printFailure(self, f):
+        log.msg(f)
+        e = f.trap(NotImplementedError, filetransfer.SFTPError, OSError, IOError)
+        if e == NotImplementedError:
+            self.transport.write(self.cmd_HELP(''))
+        elif e == filetransfer.SFTPError:
+            self.transport.write("remote error %i: %s\n" %
+                    (f.value.code, f.value.message))
+        elif e in (OSError, IOError):
+            self.transport.write("local error %i: %s\n" %
+                    (f.value.errno, f.value.strerror))
+
+    def _newLine(self):
+        if self.client.transport.localClosed:
+            return
+        self.transport.write(self.ps)
+        self.ignoreErrors = 0
+        if self.file:
+            l = self.file.readline()
+            if not l:
+                self.client.transport.loseConnection()
+            else:
+                self.transport.write(l)
+                self.lineReceived(l.strip())
+
+    def _cbCommand(self, result):
+        if result is not None:
+            self.transport.write(result)
+            if not result.endswith('\n'):
+                self.transport.write('\n')
+        self._newLine()
+
+    def _ebCommand(self, f):
+        self._printFailure(f)
+        if self.file and not self.ignoreErrors:
+            self.client.transport.loseConnection()
+        self._newLine()
+
+    def cmd_CD(self, path):
+        path, rest = self._getFilename(path)
+        if not path.endswith('/'):
+            path += '/'
+        newPath = path and os.path.join(self.currentDirectory, path) or ''
+        d = self.client.openDirectory(newPath)
+        d.addCallback(self._cbCd)
+        d.addErrback(self._ebCommand)
+        return d
+
+    def _cbCd(self, directory):
+        directory.close()
+        d = self.client.realPath(directory.name)
+        d.addCallback(self._cbCurDir)
+        return d
+
+    def _cbCurDir(self, path):
+        self.currentDirectory = path
+
+    def cmd_CHGRP(self, rest):
+        grp, rest = rest.split(None, 1)
+        path, rest = self._getFilename(rest)
+        grp = int(grp)
+        d = self.client.getAttrs(path)
+        d.addCallback(self._cbSetUsrGrp, path, grp=grp)
+        return d
+
+    def cmd_CHMOD(self, rest):
+        mod, rest = rest.split(None, 1)
+        path, rest = self._getFilename(rest)
+        mod = int(mod, 8)
+        d = self.client.setAttrs(path, {'permissions':mod})
+        d.addCallback(_ignore)
+        return d
+
+    def cmd_CHOWN(self, rest):
+        usr, rest = rest.split(None, 1)
+        path, rest = self._getFilename(rest)
+        usr = int(usr)
+        d = self.client.getAttrs(path)
+        d.addCallback(self._cbSetUsrGrp, path, usr=usr)
+        return d
+
+    def _cbSetUsrGrp(self, attrs, path, usr=None, grp=None):
+        new = {}
+        new['uid'] = (usr is not None) and usr or attrs['uid']
+        new['gid'] = (grp is not None) and grp or attrs['gid']
+        d = self.client.setAttrs(path, new)
+        d.addCallback(_ignore)
+        return d
+
+    def cmd_GET(self, rest):
+        remote, rest = self._getFilename(rest)
+        if '*' in remote or '?' in remote: # wildcard
+            if rest:
+                local, rest = self._getFilename(rest)
+                if not os.path.isdir(local):
+                    return "Wildcard get with non-directory target."
+            else:
+                local = ''
+            d = self._remoteGlob(remote)
+            d.addCallback(self._cbGetMultiple, local)
+            return d
+        if rest:
+            local, rest = self._getFilename(rest)
+        else:
+            local = os.path.split(remote)[1]
+        log.msg((remote, local))
+        lf = file(local, 'w', 0)
+        path = os.path.join(self.currentDirectory, remote)
+        d = self.client.openFile(path, filetransfer.FXF_READ, {})
+        d.addCallback(self._cbGetOpenFile, lf)
+        d.addErrback(self._ebCloseLf, lf)
+        return d
+
+    def _cbGetMultiple(self, files, local):
+        #if self._useProgressBar: # one at a time
+        # XXX this can be optimized for times w/o progress bar
+        return self._cbGetMultipleNext(None, files, local)
+
+    def _cbGetMultipleNext(self, res, files, local):
+        if isinstance(res, failure.Failure):
+            self._printFailure(res)
+        elif res:
+            self.transport.write(res)
+            if not res.endswith('\n'):
+                self.transport.write('\n')
+        if not files:
+            return
+        f = files.pop(0)[0]
+        lf = file(os.path.join(local, os.path.split(f)[1]), 'w', 0)
+        path = os.path.join(self.currentDirectory, f)
+        d = self.client.openFile(path, filetransfer.FXF_READ, {})
+        d.addCallback(self._cbGetOpenFile, lf)
+        d.addErrback(self._ebCloseLf, lf)
+        d.addBoth(self._cbGetMultipleNext, files, local)
+        return d
+
+    def _ebCloseLf(self, f, lf):
+        lf.close()
+        return f
+
+    def _cbGetOpenFile(self, rf, lf):
+        return rf.getAttrs().addCallback(self._cbGetFileSize, rf, lf)
+
+    def _cbGetFileSize(self, attrs, rf, lf):
+        if not stat.S_ISREG(attrs['permissions']):
+            rf.close()
+            lf.close()
+            return "Can't get non-regular file: %s" % rf.name
+        rf.size = attrs['size']
+        bufferSize = self.client.transport.conn.options['buffersize']
+        numRequests = self.client.transport.conn.options['requests']
+        rf.total = 0.0
+        dList = []
+        chunks = []
+        startTime = self.reactor.seconds()
+        for i in range(numRequests):
+            d = self._cbGetRead('', rf, lf, chunks, 0, bufferSize, startTime)
+            dList.append(d)
+        dl = defer.DeferredList(dList, fireOnOneErrback=1)
+        dl.addCallback(self._cbGetDone, rf, lf)
+        return dl
+
+    def _getNextChunk(self, chunks):
+        end = 0
+        for chunk in chunks:
+            if end == 'eof':
+                return # nothing more to get
+            if end != chunk[0]:
+                i = chunks.index(chunk)
+                chunks.insert(i, (end, chunk[0]))
+                return (end, chunk[0] - end)
+            end = chunk[1]
+        bufSize = int(self.client.transport.conn.options['buffersize'])
+        chunks.append((end, end + bufSize))
+        return (end, bufSize)
+
+    def _cbGetRead(self, data, rf, lf, chunks, start, size, startTime):
+        if data and isinstance(data, failure.Failure):
+            log.msg('get read err: %s' % data)
+            reason = data
+            reason.trap(EOFError)
+            i = chunks.index((start, start + size))
+            del chunks[i]
+            chunks.insert(i, (start, 'eof'))
+        elif data:
+            log.msg('get read data: %i' % len(data))
+            lf.seek(start)
+            lf.write(data)
+            if len(data) != size:
+                log.msg('got less than we asked for: %i < %i' %
+                        (len(data), size))
+                i = chunks.index((start, start + size))
+                del chunks[i]
+                chunks.insert(i, (start, start + len(data)))
+            rf.total += len(data)
+        if self.useProgressBar:
+            self._printProgressBar(rf, startTime)
+        chunk = self._getNextChunk(chunks)
+        if not chunk:
+            return
+        else:
+            start, length = chunk
+        log.msg('asking for %i -> %i' % (start, start+length))
+        d = rf.readChunk(start, length)
+        d.addBoth(self._cbGetRead, rf, lf, chunks, start, length, startTime)
+        return d
+
+    def _cbGetDone(self, ignored, rf, lf):
+        log.msg('get done')
+        rf.close()
+        lf.close()
+        if self.useProgressBar:
+            self.transport.write('\n')
+        return "Transferred %s to %s" % (rf.name, lf.name)
+
+    def cmd_PUT(self, rest):
+        local, rest = self._getFilename(rest)
+        if '*' in local or '?' in local: # wildcard
+            if rest:
+                remote, rest = self._getFilename(rest)
+                path = os.path.join(self.currentDirectory, remote)
+                d = self.client.getAttrs(path)
+                d.addCallback(self._cbPutTargetAttrs, remote, local)
+                return d
+            else:
+                remote = ''
+                files = glob.glob(local)
+                return self._cbPutMultipleNext(None, files, remote)
+        if rest:
+            remote, rest = self._getFilename(rest)
+        else:
+            remote = os.path.split(local)[1]
+        lf = file(local, 'r')
+        path = os.path.join(self.currentDirectory, remote)
+        flags = filetransfer.FXF_WRITE|filetransfer.FXF_CREAT|filetransfer.FXF_TRUNC
+        d = self.client.openFile(path, flags, {})
+        d.addCallback(self._cbPutOpenFile, lf)
+        d.addErrback(self._ebCloseLf, lf)
+        return d
+
+    def _cbPutTargetAttrs(self, attrs, path, local):
+        if not stat.S_ISDIR(attrs['permissions']):
+            return "Wildcard put with non-directory target."
+        return self._cbPutMultipleNext(None, files, path)
+
+    def _cbPutMultipleNext(self, res, files, path):
+        if isinstance(res, failure.Failure):
+            self._printFailure(res)
+        elif res:
+            self.transport.write(res)
+            if not res.endswith('\n'):
+                self.transport.write('\n')
+        f = None
+        while files and not f:
+            try:
+                f = files.pop(0)
+                lf = file(f, 'r')
+            except:
+                self._printFailure(failure.Failure())
+                f = None
+        if not f:
+            return
+        name = os.path.split(f)[1]
+        remote = os.path.join(self.currentDirectory, path, name)
+        log.msg((name, remote, path))
+        flags = filetransfer.FXF_WRITE|filetransfer.FXF_CREAT|filetransfer.FXF_TRUNC
+        d = self.client.openFile(remote, flags, {})
+        d.addCallback(self._cbPutOpenFile, lf)
+        d.addErrback(self._ebCloseLf, lf)
+        d.addBoth(self._cbPutMultipleNext, files, path)
+        return d
+
+    def _cbPutOpenFile(self, rf, lf):
+        numRequests = self.client.transport.conn.options['requests']
+        if self.useProgressBar:
+            lf = FileWrapper(lf)
+        dList = []
+        chunks = []
+        startTime = self.reactor.seconds()
+        for i in range(numRequests):
+            d = self._cbPutWrite(None, rf, lf, chunks, startTime)
+            if d:
+                dList.append(d)
+        dl = defer.DeferredList(dList, fireOnOneErrback=1)
+        dl.addCallback(self._cbPutDone, rf, lf)
+        return dl
+
+    def _cbPutWrite(self, ignored, rf, lf, chunks, startTime):
+        chunk = self._getNextChunk(chunks)
+        start, size = chunk
+        lf.seek(start)
+        data = lf.read(size)
+        if self.useProgressBar:
+            lf.total += len(data)
+            self._printProgressBar(lf, startTime)
+        if data:
+            d = rf.writeChunk(start, data)
+            d.addCallback(self._cbPutWrite, rf, lf, chunks, startTime)
+            return d
+        else:
+            return
+
+    def _cbPutDone(self, ignored, rf, lf):
+        lf.close()
+        rf.close()
+        if self.useProgressBar:
+            self.transport.write('\n')
+        return 'Transferred %s to %s' % (lf.name, rf.name)
+
+    def cmd_LCD(self, path):
+        os.chdir(path)
+
+    def cmd_LN(self, rest):
+        linkpath, rest = self._getFilename(rest)
+        targetpath, rest = self._getFilename(rest)
+        linkpath, targetpath = map(
+                lambda x: os.path.join(self.currentDirectory, x),
+                (linkpath, targetpath))
+        return self.client.makeLink(linkpath, targetpath).addCallback(_ignore)
+
+    def cmd_LS(self, rest):
+        # possible lines:
+        # ls                    current directory
+        # ls name_of_file       that file
+        # ls name_of_directory  that directory
+        # ls some_glob_string   current directory, globbed for that string
+        options = []
+        rest = rest.split()
+        while rest and rest[0] and rest[0][0] == '-':
+            opts = rest.pop(0)[1:]
+            for o in opts:
+                if o == 'l':
+                    options.append('verbose')
+                elif o == 'a':
+                    options.append('all')
+        rest = ' '.join(rest)
+        path, rest = self._getFilename(rest)
+        if not path:
+            fullPath = self.currentDirectory + '/'
+        else:
+            fullPath = os.path.join(self.currentDirectory, path)
+        d = self._remoteGlob(fullPath)
+        d.addCallback(self._cbDisplayFiles, options)
+        return d
+
+    def _cbDisplayFiles(self, files, options):
+        files.sort()
+        if 'all' not in options:
+            files = [f for f in files if not f[0].startswith('.')]
+        if 'verbose' in options:
+            lines = [f[1] for f in files]
+        else:
+            lines = [f[0] for f in files]
+        if not lines:
+            return None
+        else:
+            return '\n'.join(lines)
+
+    def cmd_MKDIR(self, path):
+        path, rest = self._getFilename(path)
+        path = os.path.join(self.currentDirectory, path)
+        return self.client.makeDirectory(path, {}).addCallback(_ignore)
+
+    def cmd_RMDIR(self, path):
+        path, rest = self._getFilename(path)
+        path = os.path.join(self.currentDirectory, path)
+        return self.client.removeDirectory(path).addCallback(_ignore)
+
+    def cmd_LMKDIR(self, path):
+        os.system("mkdir %s" % path)
+
+    def cmd_RM(self, path):
+        path, rest = self._getFilename(path)
+        path = os.path.join(self.currentDirectory, path)
+        return self.client.removeFile(path).addCallback(_ignore)
+
+    def cmd_LLS(self, rest):
+        os.system("ls %s" % rest)
+
+    def cmd_RENAME(self, rest):
+        oldpath, rest = self._getFilename(rest)
+        newpath, rest = self._getFilename(rest)
+        oldpath, newpath = map (
+                lambda x: os.path.join(self.currentDirectory, x),
+                (oldpath, newpath))
+        return self.client.renameFile(oldpath, newpath).addCallback(_ignore)
+
+    def cmd_EXIT(self, ignored):
+        self.client.transport.loseConnection()
+
+    cmd_QUIT = cmd_EXIT
+
+    def cmd_VERSION(self, ignored):
+        return "SFTP version %i" % self.client.version
+
+    def cmd_HELP(self, ignored):
+        return """Available commands:
+cd path                         Change remote directory to 'path'.
+chgrp gid path                  Change gid of 'path' to 'gid'.
+chmod mode path                 Change mode of 'path' to 'mode'.
+chown uid path                  Change uid of 'path' to 'uid'.
+exit                            Disconnect from the server.
+get remote-path [local-path]    Get remote file.
+help                            Get a list of available commands.
+lcd path                        Change local directory to 'path'.
+lls [ls-options] [path]         Display local directory listing.
+lmkdir path                     Create local directory.
+ln linkpath targetpath          Symlink remote file.
+lpwd                            Print the local working directory.
+ls [-l] [path]                  Display remote directory listing.
+mkdir path                      Create remote directory.
+progress                        Toggle progress bar.
+put local-path [remote-path]    Put local file.
+pwd                             Print the remote working directory.
+quit                            Disconnect from the server.
+rename oldpath newpath          Rename remote file.
+rmdir path                      Remove remote directory.
+rm path                         Remove remote file.
+version                         Print the SFTP version.
+?                               Synonym for 'help'.
+"""
+
+    def cmd_PWD(self, ignored):
+        return self.currentDirectory
+
+    def cmd_LPWD(self, ignored):
+        return os.getcwd()
+
+    def cmd_PROGRESS(self, ignored):
+        self.useProgressBar = not self.useProgressBar
+        return "%ssing progess bar." % (self.useProgressBar and "U" or "Not u")
+
+    def cmd_EXEC(self, rest):
+        """
+        Run C{rest} using the user's shell (or /bin/sh if they do not have
+        one).
+        """
+        shell = self._pwd.getpwnam(getpass.getuser())[6]
+        if not shell:
+            shell = '/bin/sh'
+        if rest:
+            cmds = ['-c', rest]
+            return utils.getProcessOutput(shell, cmds, errortoo=1)
+        else:
+            os.system(shell)
+
+    # accessory functions
+
+    def _remoteGlob(self, fullPath):
+        log.msg('looking up %s' % fullPath)
+        head, tail = os.path.split(fullPath)
+        if '*' in tail or '?' in tail:
+            glob = 1
+        else:
+            glob = 0
+        if tail and not glob: # could be file or directory
+            # try directory first
+            d = self.client.openDirectory(fullPath)
+            d.addCallback(self._cbOpenList, '')
+            d.addErrback(self._ebNotADirectory, head, tail)
+        else:
+            d = self.client.openDirectory(head)
+            d.addCallback(self._cbOpenList, tail)
+        return d
+
+    def _cbOpenList(self, directory, glob):
+        files = []
+        d = directory.read()
+        d.addBoth(self._cbReadFile, files, directory, glob)
+        return d
+
+    def _ebNotADirectory(self, reason, path, glob):
+        d = self.client.openDirectory(path)
+        d.addCallback(self._cbOpenList, glob)
+        return d
+
+    def _cbReadFile(self, files, l, directory, glob):
+        if not isinstance(files, failure.Failure):
+            if glob:
+                l.extend([f for f in files if fnmatch.fnmatch(f[0], glob)])
+            else:
+                l.extend(files)
+            d = directory.read()
+            d.addBoth(self._cbReadFile, l, directory, glob)
+            return d
+        else:
+            reason = files
+            reason.trap(EOFError)
+            directory.close()
+            return l
+
+    def _abbrevSize(self, size):
+        # from http://mail.python.org/pipermail/python-list/1999-December/018395.html
+        _abbrevs = [
+            (1<<50L, 'PB'),
+            (1<<40L, 'TB'),
+            (1<<30L, 'GB'),
+            (1<<20L, 'MB'),
+            (1<<10L, 'kB'),
+            (1, 'B')
+            ]
+
+        for factor, suffix in _abbrevs:
+            if size > factor:
+                break
+        return '%.1f' % (size/factor) + suffix
+
+    def _abbrevTime(self, t):
+        if t > 3600: # 1 hour
+            hours = int(t / 3600)
+            t -= (3600 * hours)
+            mins = int(t / 60)
+            t -= (60 * mins)
+            return "%i:%02i:%02i" % (hours, mins, t)
+        else:
+            mins = int(t/60)
+            t -= (60 * mins)
+            return "%02i:%02i" % (mins, t)
+
+
+    def _printProgressBar(self, f, startTime):
+        """
+        Update a console progress bar on this L{StdioClient}'s transport, based
+        on the difference between the start time of the operation and the
+        current time according to the reactor, and appropriate to the size of
+        the console window.
+
+        @param f: a wrapper around the file which is being written or read
+        @type f: L{FileWrapper}
+
+        @param startTime: The time at which the operation being tracked began.
+        @type startTime: C{float}
+        """
+        diff = self.reactor.seconds() - startTime
+        total = f.total
+        try:
+            winSize = struct.unpack('4H',
+                fcntl.ioctl(0, tty.TIOCGWINSZ, '12345679'))
+        except IOError:
+            winSize = [None, 80]
+        if diff == 0.0:
+            speed = 0.0
+        else:
+            speed = total / diff
+        if speed:
+            timeLeft = (f.size - total) / speed
+        else:
+            timeLeft = 0
+        front = f.name
+        back = '%3i%% %s %sps %s ' % ((total / f.size) * 100,
+                                      self._abbrevSize(total),
+                                      self._abbrevSize(speed),
+                                      self._abbrevTime(timeLeft))
+        spaces = (winSize[1] - (len(front) + len(back) + 1)) * ' '
+        self.transport.write('\r%s%s%s' % (front, spaces, back))
+
+
+    def _getFilename(self, line):
+        line.lstrip()
+        if not line:
+            return None, ''
+        if line[0] in '\'"':
+            ret = []
+            line = list(line)
+            try:
+                for i in range(1,len(line)):
+                    c = line[i]
+                    if c == line[0]:
+                        return ''.join(ret), ''.join(line[i+1:]).lstrip()
+                    elif c == '\\': # quoted character
+                        del line[i]
+                        if line[i] not in '\'"\\':
+                            raise IndexError, "bad quote: \\%s" % line[i]
+                        ret.append(line[i])
+                    else:
+                        ret.append(line[i])
+            except IndexError:
+                raise IndexError, "unterminated quote"
+        ret = line.split(None, 1)
+        if len(ret) == 1:
+            return ret[0], ''
+        else:
+            return ret
+
+StdioClient.__dict__['cmd_?'] = StdioClient.cmd_HELP
+
+class SSHConnection(connection.SSHConnection):
+    def serviceStarted(self):
+        self.openChannel(SSHSession())
+
+class SSHSession(channel.SSHChannel):
+
+    name = 'session'
+
+    def channelOpen(self, foo):
+        log.msg('session %s open' % self.id)
+        if self.conn.options['subsystem'].startswith('/'):
+            request = 'exec'
+        else:
+            request = 'subsystem'
+        d = self.conn.sendRequest(self, request, \
+            common.NS(self.conn.options['subsystem']), wantReply=1)
+        d.addCallback(self._cbSubsystem)
+        d.addErrback(_ebExit)
+
+    def _cbSubsystem(self, result):
+        self.client = filetransfer.FileTransferClient()
+        self.client.makeConnection(self)
+        self.dataReceived = self.client.dataReceived
+        f = None
+        if self.conn.options['batchfile']:
+            fn = self.conn.options['batchfile']
+            if fn != '-':
+                f = file(fn)
+        self.stdio = stdio.StandardIO(StdioClient(self.client, f))
+
+    def extReceived(self, t, data):
+        if t==connection.EXTENDED_DATA_STDERR:
+            log.msg('got %s stderr data' % len(data))
+            sys.stderr.write(data)
+            sys.stderr.flush()
+
+    def eofReceived(self):
+        log.msg('got eof')
+        self.stdio.closeStdin()
+
+    def closeReceived(self):
+        log.msg('remote side closed %s' % self)
+        self.conn.sendClose(self)
+
+    def closed(self):
+        try:
+            reactor.stop()
+        except:
+            pass
+
+    def stopWriting(self):
+        self.stdio.pauseProducing()
+
+    def startWriting(self):
+        self.stdio.resumeProducing()
+
+if __name__ == '__main__':
+    run()
+
diff --git a/ThirdParty/Twisted/twisted/conch/scripts/ckeygen.py b/ThirdParty/Twisted/twisted/conch/scripts/ckeygen.py
new file mode 100644
index 0000000..4078df3
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/scripts/ckeygen.py
@@ -0,0 +1,201 @@
+# -*- test-case-name: twisted.conch.test.test_ckeygen -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Implementation module for the `ckeygen` command.
+"""
+
+import sys, os, getpass, socket
+if getpass.getpass == getpass.unix_getpass:
+    try:
+        import termios # hack around broken termios
+        termios.tcgetattr, termios.tcsetattr
+    except (ImportError, AttributeError):
+        sys.modules['termios'] = None
+        reload(getpass)
+
+from twisted.conch.ssh import keys
+from twisted.python import filepath, log, usage, randbytes
+
+
+
+class GeneralOptions(usage.Options):
+    synopsis = """Usage:    ckeygen [options]
+ """
+
+    longdesc = "ckeygen manipulates public/private keys in various ways."
+
+    optParameters = [['bits', 'b', 1024, 'Number of bits in the key to create.'],
+                     ['filename', 'f', None, 'Filename of the key file.'],
+                     ['type', 't', None, 'Specify type of key to create.'],
+                     ['comment', 'C', None, 'Provide new comment.'],
+                     ['newpass', 'N', None, 'Provide new passphrase.'],
+                     ['pass', 'P', None, 'Provide old passphrase']]
+
+    optFlags = [['fingerprint', 'l', 'Show fingerprint of key file.'],
+                ['changepass', 'p', 'Change passphrase of private key file.'],
+                ['quiet', 'q', 'Quiet.'],
+                ['showpub', 'y', 'Read private key file and print public key.']]
+
+    compData = usage.Completions(
+        optActions={"type": usage.CompleteList(["rsa", "dsa"])})
+
+
+
+def run():
+    options = GeneralOptions()
+    try:
+        options.parseOptions(sys.argv[1:])
+    except usage.UsageError, u:
+        print 'ERROR: %s' % u
+        options.opt_help()
+        sys.exit(1)
+    log.discardLogs()
+    log.deferr = handleError # HACK
+    if options['type']:
+        if options['type'] == 'rsa':
+            generateRSAkey(options)
+        elif options['type'] == 'dsa':
+            generateDSAkey(options)
+        else:
+            sys.exit('Key type was %s, must be one of: rsa, dsa' % options['type'])
+    elif options['fingerprint']:
+        printFingerprint(options)
+    elif options['changepass']:
+        changePassPhrase(options)
+    elif options['showpub']:
+        displayPublicKey(options)
+    else:
+        options.opt_help()
+        sys.exit(1)
+
+
+
+def handleError():
+    from twisted.python import failure
+    global exitStatus
+    exitStatus = 2
+    log.err(failure.Failure())
+    reactor.stop()
+    raise
+
+
+
+def generateRSAkey(options):
+    from Crypto.PublicKey import RSA
+    print 'Generating public/private rsa key pair.'
+    key = RSA.generate(int(options['bits']), randbytes.secureRandom)
+    _saveKey(key, options)
+
+
+
+def generateDSAkey(options):
+    from Crypto.PublicKey import DSA
+    print 'Generating public/private dsa key pair.'
+    key = DSA.generate(int(options['bits']), randbytes.secureRandom)
+    _saveKey(key, options)
+
+
+
+def printFingerprint(options):
+    if not options['filename']:
+        filename = os.path.expanduser('~/.ssh/id_rsa')
+        options['filename'] = raw_input('Enter file in which the key is (%s): ' % filename)
+    if os.path.exists(options['filename']+'.pub'):
+        options['filename'] += '.pub'
+    try:
+        key = keys.Key.fromFile(options['filename'])
+        obj = key.keyObject
+        string = key.blob()
+        print '%s %s %s' % (
+            obj.size() + 1,
+            key.fingerprint(),
+            os.path.basename(options['filename']))
+    except:
+        sys.exit('bad key')
+
+
+
+def changePassPhrase(options):
+    if not options['filename']:
+        filename = os.path.expanduser('~/.ssh/id_rsa')
+        options['filename'] = raw_input('Enter file in which the key is (%s): ' % filename)
+    try:
+        key = keys.Key.fromFile(options['filename']).keyObject
+    except keys.BadKeyError, e:
+        if e.args[0] != 'encrypted key with no passphrase':
+            raise
+        else:
+            if not options['pass']:
+                options['pass'] = getpass.getpass('Enter old passphrase: ')
+            key = keys.Key.fromFile(
+                options['filename'], passphrase = options['pass']).keyObject
+    if not options['newpass']:
+        while 1:
+            p1 = getpass.getpass('Enter new passphrase (empty for no passphrase): ')
+            p2 = getpass.getpass('Enter same passphrase again: ')
+            if p1 == p2:
+                break
+            print 'Passphrases do not match.  Try again.'
+        options['newpass'] = p1
+    open(options['filename'], 'w').write(
+        keys.Key(key).toString(passphrase=options['newpass']))
+    print 'Your identification has been saved with the new passphrase.'
+
+
+
+def displayPublicKey(options):
+    if not options['filename']:
+        filename = os.path.expanduser('~/.ssh/id_rsa')
+        options['filename'] = raw_input('Enter file in which the key is (%s): ' % filename)
+    try:
+        key = keys.Key.fromFile(options['filename']).keyObject
+    except keys.EncryptedKeyError, e:
+        if not options.get('pass'):
+            options['pass'] = getpass.getpass('Enter passphrase: ')
+        key = keys.Key.fromFile(
+            options['filename'], passphrase = options['pass']).keyObject
+    print keys.Key(key).public().toString('openssh')
+
+
+
+def _saveKey(key, options):
+    if not options['filename']:
+        kind = keys.objectType(key)
+        kind = {'ssh-rsa':'rsa','ssh-dss':'dsa'}[kind]
+        filename = os.path.expanduser('~/.ssh/id_%s'%kind)
+        options['filename'] = raw_input('Enter file in which to save the key (%s): '%filename).strip() or filename
+    if os.path.exists(options['filename']):
+        print '%s already exists.' % options['filename']
+        yn = raw_input('Overwrite (y/n)? ')
+        if yn[0].lower() != 'y':
+            sys.exit()
+    if not options['pass']:
+        while 1:
+            p1 = getpass.getpass('Enter passphrase (empty for no passphrase): ')
+            p2 = getpass.getpass('Enter same passphrase again: ')
+            if p1 == p2:
+                break
+            print 'Passphrases do not match.  Try again.'
+        options['pass'] = p1
+
+    keyObj = keys.Key(key)
+    comment = '%s@%s' % (getpass.getuser(), socket.gethostname())
+
+    filepath.FilePath(options['filename']).setContent(
+        keyObj.toString('openssh', options['pass']))
+    os.chmod(options['filename'], 33152)
+
+    filepath.FilePath(options['filename'] + '.pub').setContent(
+        keyObj.public().toString('openssh', comment))
+
+    print 'Your identification has been saved in %s' % options['filename']
+    print 'Your public key has been saved in %s.pub' % options['filename']
+    print 'The key fingerprint is:'
+    print keyObj.fingerprint()
+
+
+
+if __name__ == '__main__':
+    run()
diff --git a/ThirdParty/Twisted/twisted/conch/scripts/conch.py b/ThirdParty/Twisted/twisted/conch/scripts/conch.py
new file mode 100644
index 0000000..8c49544
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/scripts/conch.py
@@ -0,0 +1,512 @@
+# -*- test-case-name: twisted.conch.test.test_conch -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+# $Id: conch.py,v 1.65 2004/03/11 00:29:14 z3p Exp $
+
+#""" Implementation module for the `conch` command.
+#"""
+from twisted.conch.client import connect, default, options
+from twisted.conch.error import ConchError
+from twisted.conch.ssh import connection, common
+from twisted.conch.ssh import session, forwarding, channel
+from twisted.internet import reactor, stdio, task
+from twisted.python import log, usage
+
+import os, sys, getpass, struct, tty, fcntl, signal
+
+class ClientOptions(options.ConchOptions):
+
+    synopsis = """Usage:   conch [options] host [command]
+"""
+    longdesc = ("conch is a SSHv2 client that allows logging into a remote "
+                "machine and executing commands.")
+
+    optParameters = [['escape', 'e', '~'],
+                      ['localforward', 'L', None, 'listen-port:host:port   Forward local port to remote address'],
+                      ['remoteforward', 'R', None, 'listen-port:host:port   Forward remote port to local address'],
+                     ]
+
+    optFlags = [['null', 'n', 'Redirect input from /dev/null.'],
+                 ['fork', 'f', 'Fork to background after authentication.'],
+                 ['tty', 't', 'Tty; allocate a tty even if command is given.'],
+                 ['notty', 'T', 'Do not allocate a tty.'],
+                 ['noshell', 'N', 'Do not execute a shell or command.'],
+                 ['subsystem', 's', 'Invoke command (mandatory) as SSH2 subsystem.'],
+                ]
+
+    compData = usage.Completions(
+        mutuallyExclusive=[("tty", "notty")],
+        optActions={
+            "localforward": usage.Completer(descr="listen-port:host:port"),
+            "remoteforward": usage.Completer(descr="listen-port:host:port")},
+        extraActions=[usage.CompleteUserAtHost(),
+                      usage.Completer(descr="command"),
+                      usage.Completer(descr="argument", repeat=True)]
+        )
+
+    localForwards = []
+    remoteForwards = []
+
+    def opt_escape(self, esc):
+        "Set escape character; ``none'' = disable"
+        if esc == 'none':
+            self['escape'] = None
+        elif esc[0] == '^' and len(esc) == 2:
+            self['escape'] = chr(ord(esc[1])-64)
+        elif len(esc) == 1:
+            self['escape'] = esc
+        else:
+            sys.exit("Bad escape character '%s'." % esc)
+
+    def opt_localforward(self, f):
+        "Forward local port to remote address (lport:host:port)"
+        localPort, remoteHost, remotePort = f.split(':') # doesn't do v6 yet
+        localPort = int(localPort)
+        remotePort = int(remotePort)
+        self.localForwards.append((localPort, (remoteHost, remotePort)))
+
+    def opt_remoteforward(self, f):
+        """Forward remote port to local address (rport:host:port)"""
+        remotePort, connHost, connPort = f.split(':') # doesn't do v6 yet
+        remotePort = int(remotePort)
+        connPort = int(connPort)
+        self.remoteForwards.append((remotePort, (connHost, connPort)))
+
+    def parseArgs(self, host, *command):
+        self['host'] = host
+        self['command'] = ' '.join(command)
+
+# Rest of code in "run"
+options = None
+conn = None
+exitStatus = 0
+old = None
+_inRawMode = 0
+_savedRawMode = None
+
+def run():
+    global options, old
+    args = sys.argv[1:]
+    if '-l' in args: # cvs is an idiot
+        i = args.index('-l')
+        args = args[i:i+2]+args
+        del args[i+2:i+4]
+    for arg in args[:]:
+        try:
+            i = args.index(arg)
+            if arg[:2] == '-o' and args[i+1][0]!='-':
+                args[i:i+2] = [] # suck on it scp
+        except ValueError:
+            pass
+    options = ClientOptions()
+    try:
+        options.parseOptions(args)
+    except usage.UsageError, u:
+        print 'ERROR: %s' % u
+        options.opt_help()
+        sys.exit(1)
+    if options['log']:
+        if options['logfile']:
+            if options['logfile'] == '-':
+                f = sys.stdout
+            else:
+                f = file(options['logfile'], 'a+')
+        else:
+            f = sys.stderr
+        realout = sys.stdout
+        log.startLogging(f)
+        sys.stdout = realout
+    else:
+        log.discardLogs()
+    doConnect()
+    fd = sys.stdin.fileno()
+    try:
+        old = tty.tcgetattr(fd)
+    except:
+        old = None
+    try:
+        oldUSR1 = signal.signal(signal.SIGUSR1, lambda *a: reactor.callLater(0, reConnect))
+    except:
+        oldUSR1 = None
+    try:
+        reactor.run()
+    finally:
+        if old:
+            tty.tcsetattr(fd, tty.TCSANOW, old)
+        if oldUSR1:
+            signal.signal(signal.SIGUSR1, oldUSR1)
+        if (options['command'] and options['tty']) or not options['notty']:
+            signal.signal(signal.SIGWINCH, signal.SIG_DFL)
+    if sys.stdout.isatty() and not options['command']:
+        print 'Connection to %s closed.' % options['host']
+    sys.exit(exitStatus)
+
+def handleError():
+    from twisted.python import failure
+    global exitStatus
+    exitStatus = 2
+    reactor.callLater(0.01, _stopReactor)
+    log.err(failure.Failure())
+    raise
+
+def _stopReactor():
+    try:
+        reactor.stop()
+    except: pass
+
+def doConnect():
+#    log.deferr = handleError # HACK
+    if '@' in options['host']:
+        options['user'], options['host'] = options['host'].split('@',1)
+    if not options.identitys:
+        options.identitys = ['~/.ssh/id_rsa', '~/.ssh/id_dsa']
+    host = options['host']
+    if not options['user']:
+        options['user'] = getpass.getuser()
+    if not options['port']:
+        options['port'] = 22
+    else:
+        options['port'] = int(options['port'])
+    host = options['host']
+    port = options['port']
+    vhk = default.verifyHostKey
+    uao = default.SSHUserAuthClient(options['user'], options, SSHConnection())
+    connect.connect(host, port, options, vhk, uao).addErrback(_ebExit)
+
+def _ebExit(f):
+    global exitStatus
+    if hasattr(f.value, 'value'):
+        s = f.value.value
+    else:
+        s = str(f)
+    exitStatus = "conch: exiting with error %s" % f
+    reactor.callLater(0.1, _stopReactor)
+
+def onConnect():
+#    if keyAgent and options['agent']:
+#        cc = protocol.ClientCreator(reactor, SSHAgentForwardingLocal, conn)
+#        cc.connectUNIX(os.environ['SSH_AUTH_SOCK'])
+    if hasattr(conn.transport, 'sendIgnore'):
+        _KeepAlive(conn)
+    if options.localForwards:
+        for localPort, hostport in options.localForwards:
+            s = reactor.listenTCP(localPort,
+                        forwarding.SSHListenForwardingFactory(conn,
+                            hostport,
+                            SSHListenClientForwardingChannel))
+            conn.localForwards.append(s)
+    if options.remoteForwards:
+        for remotePort, hostport in options.remoteForwards:
+            log.msg('asking for remote forwarding for %s:%s' %
+                    (remotePort, hostport))
+            conn.requestRemoteForwarding(remotePort, hostport)
+        reactor.addSystemEventTrigger('before', 'shutdown', beforeShutdown)
+    if not options['noshell'] or options['agent']:
+        conn.openChannel(SSHSession())
+    if options['fork']:
+        if os.fork():
+            os._exit(0)
+        os.setsid()
+        for i in range(3):
+            try:
+                os.close(i)
+            except OSError, e:
+                import errno
+                if e.errno != errno.EBADF:
+                    raise
+
+def reConnect():
+    beforeShutdown()
+    conn.transport.transport.loseConnection()
+
+def beforeShutdown():
+    remoteForwards = options.remoteForwards
+    for remotePort, hostport in remoteForwards:
+        log.msg('cancelling %s:%s' % (remotePort, hostport))
+        conn.cancelRemoteForwarding(remotePort)
+
+def stopConnection():
+    if not options['reconnect']:
+        reactor.callLater(0.1, _stopReactor)
+
+class _KeepAlive:
+
+    def __init__(self, conn):
+        self.conn = conn
+        self.globalTimeout = None
+        self.lc = task.LoopingCall(self.sendGlobal)
+        self.lc.start(300)
+
+    def sendGlobal(self):
+        d = self.conn.sendGlobalRequest("conch-keep-alive at twistedmatrix.com",
+                "", wantReply = 1)
+        d.addBoth(self._cbGlobal)
+        self.globalTimeout = reactor.callLater(30, self._ebGlobal)
+
+    def _cbGlobal(self, res):
+        if self.globalTimeout:
+            self.globalTimeout.cancel()
+            self.globalTimeout = None
+
+    def _ebGlobal(self):
+        if self.globalTimeout:
+            self.globalTimeout = None
+            self.conn.transport.loseConnection()
+
+class SSHConnection(connection.SSHConnection):
+    def serviceStarted(self):
+        global conn
+        conn = self
+        self.localForwards = []
+        self.remoteForwards = {}
+        if not isinstance(self, connection.SSHConnection):
+            # make these fall through
+            del self.__class__.requestRemoteForwarding
+            del self.__class__.cancelRemoteForwarding
+        onConnect()
+
+    def serviceStopped(self):
+        lf = self.localForwards
+        self.localForwards = []
+        for s in lf:
+            s.loseConnection()
+        stopConnection()
+
+    def requestRemoteForwarding(self, remotePort, hostport):
+        data = forwarding.packGlobal_tcpip_forward(('0.0.0.0', remotePort))
+        d = self.sendGlobalRequest('tcpip-forward', data,
+                                   wantReply=1)
+        log.msg('requesting remote forwarding %s:%s' %(remotePort, hostport))
+        d.addCallback(self._cbRemoteForwarding, remotePort, hostport)
+        d.addErrback(self._ebRemoteForwarding, remotePort, hostport)
+
+    def _cbRemoteForwarding(self, result, remotePort, hostport):
+        log.msg('accepted remote forwarding %s:%s' % (remotePort, hostport))
+        self.remoteForwards[remotePort] = hostport
+        log.msg(repr(self.remoteForwards))
+
+    def _ebRemoteForwarding(self, f, remotePort, hostport):
+        log.msg('remote forwarding %s:%s failed' % (remotePort, hostport))
+        log.msg(f)
+
+    def cancelRemoteForwarding(self, remotePort):
+        data = forwarding.packGlobal_tcpip_forward(('0.0.0.0', remotePort))
+        self.sendGlobalRequest('cancel-tcpip-forward', data)
+        log.msg('cancelling remote forwarding %s' % remotePort)
+        try:
+            del self.remoteForwards[remotePort]
+        except:
+            pass
+        log.msg(repr(self.remoteForwards))
+
+    def channel_forwarded_tcpip(self, windowSize, maxPacket, data):
+        log.msg('%s %s' % ('FTCP', repr(data)))
+        remoteHP, origHP = forwarding.unpackOpen_forwarded_tcpip(data)
+        log.msg(self.remoteForwards)
+        log.msg(remoteHP)
+        if self.remoteForwards.has_key(remoteHP[1]):
+            connectHP = self.remoteForwards[remoteHP[1]]
+            log.msg('connect forwarding %s' % (connectHP,))
+            return SSHConnectForwardingChannel(connectHP,
+                                            remoteWindow = windowSize,
+                                            remoteMaxPacket = maxPacket,
+                                            conn = self)
+        else:
+            raise ConchError(connection.OPEN_CONNECT_FAILED, "don't know about that port")
+
+#    def channel_auth_agent_openssh_com(self, windowSize, maxPacket, data):
+#        if options['agent'] and keyAgent:
+#            return agent.SSHAgentForwardingChannel(remoteWindow = windowSize,
+#                                             remoteMaxPacket = maxPacket,
+#                                             conn = self)
+#        else:
+#            return connection.OPEN_CONNECT_FAILED, "don't have an agent"
+
+    def channelClosed(self, channel):
+        log.msg('connection closing %s' % channel)
+        log.msg(self.channels)
+        if len(self.channels) == 1: # just us left
+            log.msg('stopping connection')
+            stopConnection()
+        else:
+            # because of the unix thing
+            self.__class__.__bases__[0].channelClosed(self, channel)
+
+class SSHSession(channel.SSHChannel):
+
+    name = 'session'
+
+    def channelOpen(self, foo):
+        log.msg('session %s open' % self.id)
+        if options['agent']:
+            d = self.conn.sendRequest(self, 'auth-agent-req at openssh.com', '', wantReply=1)
+            d.addBoth(lambda x:log.msg(x))
+        if options['noshell']: return
+        if (options['command'] and options['tty']) or not options['notty']:
+            _enterRawMode()
+        c = session.SSHSessionClient()
+        if options['escape'] and not options['notty']:
+            self.escapeMode = 1
+            c.dataReceived = self.handleInput
+        else:
+            c.dataReceived = self.write
+        c.connectionLost = lambda x=None,s=self:s.sendEOF()
+        self.stdio = stdio.StandardIO(c)
+        fd = 0
+        if options['subsystem']:
+            self.conn.sendRequest(self, 'subsystem', \
+                common.NS(options['command']))
+        elif options['command']:
+            if options['tty']:
+                term = os.environ['TERM']
+                winsz = fcntl.ioctl(fd, tty.TIOCGWINSZ, '12345678')
+                winSize = struct.unpack('4H', winsz)
+                ptyReqData = session.packRequest_pty_req(term, winSize, '')
+                self.conn.sendRequest(self, 'pty-req', ptyReqData)
+                signal.signal(signal.SIGWINCH, self._windowResized)
+            self.conn.sendRequest(self, 'exec', \
+                common.NS(options['command']))
+        else:
+            if not options['notty']:
+                term = os.environ['TERM']
+                winsz = fcntl.ioctl(fd, tty.TIOCGWINSZ, '12345678')
+                winSize = struct.unpack('4H', winsz)
+                ptyReqData = session.packRequest_pty_req(term, winSize, '')
+                self.conn.sendRequest(self, 'pty-req', ptyReqData)
+                signal.signal(signal.SIGWINCH, self._windowResized)
+            self.conn.sendRequest(self, 'shell', '')
+            #if hasattr(conn.transport, 'transport'):
+            #    conn.transport.transport.setTcpNoDelay(1)
+
+    def handleInput(self, char):
+        #log.msg('handling %s' % repr(char))
+        if char in ('\n', '\r'):
+            self.escapeMode = 1
+            self.write(char)
+        elif self.escapeMode == 1 and char == options['escape']:
+            self.escapeMode = 2
+        elif self.escapeMode == 2:
+            self.escapeMode = 1 # so we can chain escapes together
+            if char == '.': # disconnect
+                log.msg('disconnecting from escape')
+                stopConnection()
+                return
+            elif char == '\x1a': # ^Z, suspend
+                def _():
+                    _leaveRawMode()
+                    sys.stdout.flush()
+                    sys.stdin.flush()
+                    os.kill(os.getpid(), signal.SIGTSTP)
+                    _enterRawMode()
+                reactor.callLater(0, _)
+                return
+            elif char == 'R': # rekey connection
+                log.msg('rekeying connection')
+                self.conn.transport.sendKexInit()
+                return
+            elif char == '#': # display connections
+                self.stdio.write('\r\nThe following connections are open:\r\n')
+                channels = self.conn.channels.keys()
+                channels.sort()
+                for channelId in channels:
+                    self.stdio.write('  #%i %s\r\n' % (channelId, str(self.conn.channels[channelId])))
+                return
+            self.write('~' + char)
+        else:
+            self.escapeMode = 0
+            self.write(char)
+
+    def dataReceived(self, data):
+        self.stdio.write(data)
+
+    def extReceived(self, t, data):
+        if t==connection.EXTENDED_DATA_STDERR:
+            log.msg('got %s stderr data' % len(data))
+            sys.stderr.write(data)
+
+    def eofReceived(self):
+        log.msg('got eof')
+        self.stdio.loseWriteConnection()
+
+    def closeReceived(self):
+        log.msg('remote side closed %s' % self)
+        self.conn.sendClose(self)
+
+    def closed(self):
+        global old
+        log.msg('closed %s' % self)
+        log.msg(repr(self.conn.channels))
+
+    def request_exit_status(self, data):
+        global exitStatus
+        exitStatus = int(struct.unpack('>L', data)[0])
+        log.msg('exit status: %s' % exitStatus)
+
+    def sendEOF(self):
+        self.conn.sendEOF(self)
+
+    def stopWriting(self):
+        self.stdio.pauseProducing()
+
+    def startWriting(self):
+        self.stdio.resumeProducing()
+
+    def _windowResized(self, *args):
+        winsz = fcntl.ioctl(0, tty.TIOCGWINSZ, '12345678')
+        winSize = struct.unpack('4H', winsz)
+        newSize = winSize[1], winSize[0], winSize[2], winSize[3]
+        self.conn.sendRequest(self, 'window-change', struct.pack('!4L', *newSize))
+
+
+class SSHListenClientForwardingChannel(forwarding.SSHListenClientForwardingChannel): pass
+class SSHConnectForwardingChannel(forwarding.SSHConnectForwardingChannel): pass
+
+def _leaveRawMode():
+    global _inRawMode
+    if not _inRawMode:
+        return
+    fd = sys.stdin.fileno()
+    tty.tcsetattr(fd, tty.TCSANOW, _savedMode)
+    _inRawMode = 0
+
+def _enterRawMode():
+    global _inRawMode, _savedMode
+    if _inRawMode:
+        return
+    fd = sys.stdin.fileno()
+    try:
+        old = tty.tcgetattr(fd)
+        new = old[:]
+    except:
+        log.msg('not a typewriter!')
+    else:
+        # iflage
+        new[0] = new[0] | tty.IGNPAR
+        new[0] = new[0] & ~(tty.ISTRIP | tty.INLCR | tty.IGNCR | tty.ICRNL |
+                            tty.IXON | tty.IXANY | tty.IXOFF)
+        if hasattr(tty, 'IUCLC'):
+            new[0] = new[0] & ~tty.IUCLC
+
+        # lflag
+        new[3] = new[3] & ~(tty.ISIG | tty.ICANON | tty.ECHO | tty.ECHO |
+                            tty.ECHOE | tty.ECHOK | tty.ECHONL)
+        if hasattr(tty, 'IEXTEN'):
+            new[3] = new[3] & ~tty.IEXTEN
+
+        #oflag
+        new[1] = new[1] & ~tty.OPOST
+
+        new[6][tty.VMIN] = 1
+        new[6][tty.VTIME] = 0
+
+        _savedMode = old
+        tty.tcsetattr(fd, tty.TCSANOW, new)
+        #tty.setraw(fd)
+        _inRawMode = 1
+
+if __name__ == '__main__':
+    run()
+
diff --git a/ThirdParty/Twisted/twisted/conch/scripts/tkconch.py b/ThirdParty/Twisted/twisted/conch/scripts/tkconch.py
new file mode 100644
index 0000000..eb00186
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/scripts/tkconch.py
@@ -0,0 +1,572 @@
+# -*- test-case-name: twisted.conch.test.test_scripts -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Implementation module for the `tkconch` command.
+"""
+
+import Tkinter, tkFileDialog, tkFont, tkMessageBox, string
+from twisted.conch.ui import tkvt100
+from twisted.conch.ssh import transport, userauth, connection, common, keys
+from twisted.conch.ssh import session, forwarding, channel
+from twisted.conch.client.default import isInKnownHosts
+from twisted.internet import reactor, defer, protocol, tksupport
+from twisted.python import usage, log
+
+import os, sys, getpass, struct, base64, signal
+
+class TkConchMenu(Tkinter.Frame):
+    def __init__(self, *args, **params):
+        ## Standard heading: initialization
+        apply(Tkinter.Frame.__init__, (self,) + args, params)
+
+        self.master.title('TkConch')
+        self.localRemoteVar = Tkinter.StringVar()
+        self.localRemoteVar.set('local')
+
+        Tkinter.Label(self, anchor='w', justify='left', text='Hostname').grid(column=1, row=1, sticky='w')
+        self.host = Tkinter.Entry(self)
+        self.host.grid(column=2, columnspan=2, row=1, sticky='nesw')
+
+        Tkinter.Label(self, anchor='w', justify='left', text='Port').grid(column=1, row=2, sticky='w')
+        self.port = Tkinter.Entry(self)
+        self.port.grid(column=2, columnspan=2, row=2, sticky='nesw')
+
+        Tkinter.Label(self, anchor='w', justify='left', text='Username').grid(column=1, row=3, sticky='w')
+        self.user = Tkinter.Entry(self)
+        self.user.grid(column=2, columnspan=2, row=3, sticky='nesw')
+
+        Tkinter.Label(self, anchor='w', justify='left', text='Command').grid(column=1, row=4, sticky='w')
+        self.command = Tkinter.Entry(self)
+        self.command.grid(column=2, columnspan=2, row=4, sticky='nesw')
+
+        Tkinter.Label(self, anchor='w', justify='left', text='Identity').grid(column=1, row=5, sticky='w')
+        self.identity = Tkinter.Entry(self)
+        self.identity.grid(column=2, row=5, sticky='nesw')
+        Tkinter.Button(self, command=self.getIdentityFile, text='Browse').grid(column=3, row=5, sticky='nesw')
+
+        Tkinter.Label(self, text='Port Forwarding').grid(column=1, row=6, sticky='w')
+        self.forwards = Tkinter.Listbox(self, height=0, width=0)
+        self.forwards.grid(column=2, columnspan=2, row=6, sticky='nesw')
+        Tkinter.Button(self, text='Add', command=self.addForward).grid(column=1, row=7)
+        Tkinter.Button(self, text='Remove', command=self.removeForward).grid(column=1, row=8)
+        self.forwardPort = Tkinter.Entry(self)
+        self.forwardPort.grid(column=2, row=7, sticky='nesw')
+        Tkinter.Label(self, text='Port').grid(column=3, row=7, sticky='nesw')
+        self.forwardHost = Tkinter.Entry(self)
+        self.forwardHost.grid(column=2, row=8, sticky='nesw')
+        Tkinter.Label(self, text='Host').grid(column=3, row=8, sticky='nesw')
+        self.localForward = Tkinter.Radiobutton(self, text='Local', variable=self.localRemoteVar, value='local')
+        self.localForward.grid(column=2, row=9)
+        self.remoteForward = Tkinter.Radiobutton(self, text='Remote', variable=self.localRemoteVar, value='remote')
+        self.remoteForward.grid(column=3, row=9)
+
+        Tkinter.Label(self, text='Advanced Options').grid(column=1, columnspan=3, row=10, sticky='nesw')
+
+        Tkinter.Label(self, anchor='w', justify='left', text='Cipher').grid(column=1, row=11, sticky='w')
+        self.cipher = Tkinter.Entry(self, name='cipher')
+        self.cipher.grid(column=2, columnspan=2, row=11, sticky='nesw')
+
+        Tkinter.Label(self, anchor='w', justify='left', text='MAC').grid(column=1, row=12, sticky='w')
+        self.mac = Tkinter.Entry(self, name='mac')
+        self.mac.grid(column=2, columnspan=2, row=12, sticky='nesw')
+
+        Tkinter.Label(self, anchor='w', justify='left', text='Escape Char').grid(column=1, row=13, sticky='w')
+        self.escape = Tkinter.Entry(self, name='escape')
+        self.escape.grid(column=2, columnspan=2, row=13, sticky='nesw')
+        Tkinter.Button(self, text='Connect!', command=self.doConnect).grid(column=1, columnspan=3, row=14, sticky='nesw')
+
+        # Resize behavior(s)
+        self.grid_rowconfigure(6, weight=1, minsize=64)
+        self.grid_columnconfigure(2, weight=1, minsize=2)
+
+        self.master.protocol("WM_DELETE_WINDOW", sys.exit)
+
+
+    def getIdentityFile(self):
+        r = tkFileDialog.askopenfilename()
+        if r:
+            self.identity.delete(0, Tkinter.END)
+            self.identity.insert(Tkinter.END, r)
+
+    def addForward(self):
+        port = self.forwardPort.get()
+        self.forwardPort.delete(0, Tkinter.END)
+        host = self.forwardHost.get()
+        self.forwardHost.delete(0, Tkinter.END)
+        if self.localRemoteVar.get() == 'local':
+            self.forwards.insert(Tkinter.END, 'L:%s:%s' % (port, host))
+        else:
+            self.forwards.insert(Tkinter.END, 'R:%s:%s' % (port, host))
+
+    def removeForward(self):
+        cur = self.forwards.curselection()
+        if cur:
+            self.forwards.remove(cur[0])
+
+    def doConnect(self):
+        finished = 1
+        options['host'] = self.host.get()
+        options['port'] = self.port.get()
+        options['user'] = self.user.get()
+        options['command'] = self.command.get()
+        cipher = self.cipher.get()
+        mac = self.mac.get()
+        escape = self.escape.get()
+        if cipher:
+            if cipher in SSHClientTransport.supportedCiphers:
+                SSHClientTransport.supportedCiphers = [cipher]
+            else:
+                tkMessageBox.showerror('TkConch', 'Bad cipher.')
+                finished = 0
+
+        if mac:
+            if mac in SSHClientTransport.supportedMACs:
+                SSHClientTransport.supportedMACs = [mac]
+            elif finished:
+                tkMessageBox.showerror('TkConch', 'Bad MAC.')
+                finished = 0
+
+        if escape:
+            if escape == 'none':
+                options['escape'] = None
+            elif escape[0] == '^' and len(escape) == 2:
+                options['escape'] = chr(ord(escape[1])-64)
+            elif len(escape) == 1:
+                options['escape'] = escape
+            elif finished:
+                tkMessageBox.showerror('TkConch', "Bad escape character '%s'." % escape)
+                finished = 0
+
+        if self.identity.get():
+            options.identitys.append(self.identity.get())
+
+        for line in self.forwards.get(0,Tkinter.END):
+            if line[0]=='L':
+                options.opt_localforward(line[2:])
+            else:
+                options.opt_remoteforward(line[2:])
+
+        if '@' in options['host']:
+            options['user'], options['host'] = options['host'].split('@',1)
+
+        if (not options['host'] or not options['user']) and finished:
+            tkMessageBox.showerror('TkConch', 'Missing host or username.')
+            finished = 0
+        if finished:
+            self.master.quit()
+            self.master.destroy()
+            if options['log']:
+                realout = sys.stdout
+                log.startLogging(sys.stderr)
+                sys.stdout = realout
+            else:
+                log.discardLogs()
+            log.deferr = handleError # HACK
+            if not options.identitys:
+                options.identitys = ['~/.ssh/id_rsa', '~/.ssh/id_dsa']
+            host = options['host']
+            port = int(options['port'] or 22)
+            log.msg((host,port))
+            reactor.connectTCP(host, port, SSHClientFactory())
+            frame.master.deiconify()
+            frame.master.title('%s@%s - TkConch' % (options['user'], options['host']))
+        else:
+            self.focus()
+
+class GeneralOptions(usage.Options):
+    synopsis = """Usage:    tkconch [options] host [command]
+ """
+
+    optParameters = [['user', 'l', None, 'Log in using this user name.'],
+                    ['identity', 'i', '~/.ssh/identity', 'Identity for public key authentication'],
+                    ['escape', 'e', '~', "Set escape character; ``none'' = disable"],
+                    ['cipher', 'c', None, 'Select encryption algorithm.'],
+                    ['macs', 'm', None, 'Specify MAC algorithms for protocol version 2.'],
+                    ['port', 'p', None, 'Connect to this port.  Server must be on the same port.'],
+                    ['localforward', 'L', None, 'listen-port:host:port   Forward local port to remote address'],
+                    ['remoteforward', 'R', None, 'listen-port:host:port   Forward remote port to local address'],
+                    ]
+
+    optFlags = [['tty', 't', 'Tty; allocate a tty even if command is given.'],
+                ['notty', 'T', 'Do not allocate a tty.'],
+                ['version', 'V', 'Display version number only.'],
+                ['compress', 'C', 'Enable compression.'],
+                ['noshell', 'N', 'Do not execute a shell or command.'],
+                ['subsystem', 's', 'Invoke command (mandatory) as SSH2 subsystem.'],
+                ['log', 'v', 'Log to stderr'],
+                ['ansilog', 'a', 'Print the receieved data to stdout']]
+
+    _ciphers = transport.SSHClientTransport.supportedCiphers
+    _macs = transport.SSHClientTransport.supportedMACs
+
+    compData = usage.Completions(
+        mutuallyExclusive=[("tty", "notty")],
+        optActions={
+            "cipher": usage.CompleteList(_ciphers),
+            "macs": usage.CompleteList(_macs),
+            "localforward": usage.Completer(descr="listen-port:host:port"),
+            "remoteforward": usage.Completer(descr="listen-port:host:port")},
+        extraActions=[usage.CompleteUserAtHost(),
+                      usage.Completer(descr="command"),
+                      usage.Completer(descr="argument", repeat=True)]
+        )
+
+    identitys = []
+    localForwards = []
+    remoteForwards = []
+
+    def opt_identity(self, i):
+        self.identitys.append(i)
+
+    def opt_localforward(self, f):
+        localPort, remoteHost, remotePort = f.split(':') # doesn't do v6 yet
+        localPort = int(localPort)
+        remotePort = int(remotePort)
+        self.localForwards.append((localPort, (remoteHost, remotePort)))
+
+    def opt_remoteforward(self, f):
+        remotePort, connHost, connPort = f.split(':') # doesn't do v6 yet
+        remotePort = int(remotePort)
+        connPort = int(connPort)
+        self.remoteForwards.append((remotePort, (connHost, connPort)))
+
+    def opt_compress(self):
+        SSHClientTransport.supportedCompressions[0:1] = ['zlib']
+
+    def parseArgs(self, *args):
+        if args:
+            self['host'] = args[0]
+            self['command'] = ' '.join(args[1:])
+        else:
+            self['host'] = ''
+            self['command'] = ''
+
+# Rest of code in "run"
+options = None
+menu = None
+exitStatus = 0
+frame = None
+
+def deferredAskFrame(question, echo):
+    if frame.callback:
+        raise ValueError("can't ask 2 questions at once!")
+    d = defer.Deferred()
+    resp = []
+    def gotChar(ch, resp=resp):
+        if not ch: return
+        if ch=='\x03': # C-c
+            reactor.stop()
+        if ch=='\r':
+            frame.write('\r\n')
+            stresp = ''.join(resp)
+            del resp
+            frame.callback = None
+            d.callback(stresp)
+            return
+        elif 32 <= ord(ch) < 127:
+            resp.append(ch)
+            if echo:
+                frame.write(ch)
+        elif ord(ch) == 8 and resp: # BS
+            if echo: frame.write('\x08 \x08')
+            resp.pop()
+    frame.callback = gotChar
+    frame.write(question)
+    frame.canvas.focus_force()
+    return d
+
+def run():
+    global menu, options, frame
+    args = sys.argv[1:]
+    if '-l' in args: # cvs is an idiot
+        i = args.index('-l')
+        args = args[i:i+2]+args
+        del args[i+2:i+4]
+    for arg in args[:]:
+        try:
+            i = args.index(arg)
+            if arg[:2] == '-o' and args[i+1][0]!='-':
+                args[i:i+2] = [] # suck on it scp
+        except ValueError:
+            pass
+    root = Tkinter.Tk()
+    root.withdraw()
+    top = Tkinter.Toplevel()
+    menu = TkConchMenu(top)
+    menu.pack(side=Tkinter.TOP, fill=Tkinter.BOTH, expand=1)
+    options = GeneralOptions()
+    try:
+        options.parseOptions(args)
+    except usage.UsageError, u:
+        print 'ERROR: %s' % u
+        options.opt_help()
+        sys.exit(1)
+    for k,v in options.items():
+        if v and hasattr(menu, k):
+            getattr(menu,k).insert(Tkinter.END, v)
+    for (p, (rh, rp)) in options.localForwards:
+        menu.forwards.insert(Tkinter.END, 'L:%s:%s:%s' % (p, rh, rp))
+    options.localForwards = []
+    for (p, (rh, rp)) in options.remoteForwards:
+        menu.forwards.insert(Tkinter.END, 'R:%s:%s:%s' % (p, rh, rp))
+    options.remoteForwards = []
+    frame = tkvt100.VT100Frame(root, callback=None)
+    root.geometry('%dx%d'%(tkvt100.fontWidth*frame.width+3, tkvt100.fontHeight*frame.height+3))
+    frame.pack(side = Tkinter.TOP)
+    tksupport.install(root)
+    root.withdraw()
+    if (options['host'] and options['user']) or '@' in options['host']:
+        menu.doConnect()
+    else:
+        top.mainloop()
+    reactor.run()
+    sys.exit(exitStatus)
+
+def handleError():
+    from twisted.python import failure
+    global exitStatus
+    exitStatus = 2
+    log.err(failure.Failure())
+    reactor.stop()
+    raise
+
+class SSHClientFactory(protocol.ClientFactory):
+    noisy = 1
+
+    def stopFactory(self):
+        reactor.stop()
+
+    def buildProtocol(self, addr):
+        return SSHClientTransport()
+
+    def clientConnectionFailed(self, connector, reason):
+        tkMessageBox.showwarning('TkConch','Connection Failed, Reason:\n %s: %s' % (reason.type, reason.value))
+
+class SSHClientTransport(transport.SSHClientTransport):
+
+    def receiveError(self, code, desc):
+        global exitStatus
+        exitStatus = 'conch:\tRemote side disconnected with error code %i\nconch:\treason: %s' % (code, desc)
+
+    def sendDisconnect(self, code, reason):
+        global exitStatus
+        exitStatus = 'conch:\tSending disconnect with error code %i\nconch:\treason: %s' % (code, reason)
+        transport.SSHClientTransport.sendDisconnect(self, code, reason)
+
+    def receiveDebug(self, alwaysDisplay, message, lang):
+        global options
+        if alwaysDisplay or options['log']:
+            log.msg('Received Debug Message: %s' % message)
+
+    def verifyHostKey(self, pubKey, fingerprint):
+        #d = defer.Deferred()
+        #d.addCallback(lambda x:defer.succeed(1))
+        #d.callback(2)
+        #return d
+        goodKey = isInKnownHosts(options['host'], pubKey, {'known-hosts': None})
+        if goodKey == 1: # good key
+            return defer.succeed(1)
+        elif goodKey == 2: # AAHHHHH changed
+            return defer.fail(error.ConchError('bad host key'))
+        else:
+            if options['host'] == self.transport.getPeer()[1]:
+                host = options['host']
+                khHost = options['host']
+            else:
+                host = '%s (%s)' % (options['host'],
+                                    self.transport.getPeer()[1])
+                khHost = '%s,%s' % (options['host'],
+                                    self.transport.getPeer()[1])
+            keyType = common.getNS(pubKey)[0]
+            ques = """The authenticity of host '%s' can't be established.\r
+%s key fingerprint is %s.""" % (host,
+                                {'ssh-dss':'DSA', 'ssh-rsa':'RSA'}[keyType],
+                                fingerprint)
+            ques+='\r\nAre you sure you want to continue connecting (yes/no)? '
+            return deferredAskFrame(ques, 1).addCallback(self._cbVerifyHostKey, pubKey, khHost, keyType)
+
+    def _cbVerifyHostKey(self, ans, pubKey, khHost, keyType):
+        if ans.lower() not in ('yes', 'no'):
+            return deferredAskFrame("Please type  'yes' or 'no': ",1).addCallback(self._cbVerifyHostKey, pubKey, khHost, keyType)
+        if ans.lower() == 'no':
+            frame.write('Host key verification failed.\r\n')
+            raise error.ConchError('bad host key')
+        try:
+            frame.write("Warning: Permanently added '%s' (%s) to the list of known hosts.\r\n" % (khHost, {'ssh-dss':'DSA', 'ssh-rsa':'RSA'}[keyType]))
+            known_hosts = open(os.path.expanduser('~/.ssh/known_hosts'), 'a')
+            encodedKey = base64.encodestring(pubKey).replace('\n', '')
+            known_hosts.write('\n%s %s %s' % (khHost, keyType, encodedKey))
+            known_hosts.close()
+        except:
+            log.deferr()
+            raise error.ConchError
+
+    def connectionSecure(self):
+        if options['user']:
+            user = options['user']
+        else:
+            user = getpass.getuser()
+        self.requestService(SSHUserAuthClient(user, SSHConnection()))
+
+class SSHUserAuthClient(userauth.SSHUserAuthClient):
+    usedFiles = []
+
+    def getPassword(self, prompt = None):
+        if not prompt:
+            prompt = "%s@%s's password: " % (self.user, options['host'])
+        return deferredAskFrame(prompt,0)
+
+    def getPublicKey(self):
+        files = [x for x in options.identitys if x not in self.usedFiles]
+        if not files:
+            return None
+        file = files[0]
+        log.msg(file)
+        self.usedFiles.append(file)
+        file = os.path.expanduser(file)
+        file += '.pub'
+        if not os.path.exists(file):
+            return
+        try:
+            return keys.Key.fromFile(file).blob()
+        except:
+            return self.getPublicKey() # try again
+
+    def getPrivateKey(self):
+        file = os.path.expanduser(self.usedFiles[-1])
+        if not os.path.exists(file):
+            return None
+        try:
+            return defer.succeed(keys.Key.fromFile(file).keyObject)
+        except keys.BadKeyError, e:
+            if e.args[0] == 'encrypted key with no password':
+                prompt = "Enter passphrase for key '%s': " % \
+                       self.usedFiles[-1]
+                return deferredAskFrame(prompt, 0).addCallback(self._cbGetPrivateKey, 0)
+    def _cbGetPrivateKey(self, ans, count):
+        file = os.path.expanduser(self.usedFiles[-1])
+        try:
+            return keys.Key.fromFile(file, password = ans).keyObject
+        except keys.BadKeyError:
+            if count == 2:
+                raise
+            prompt = "Enter passphrase for key '%s': " % \
+                   self.usedFiles[-1]
+            return deferredAskFrame(prompt, 0).addCallback(self._cbGetPrivateKey, count+1)
+
+class SSHConnection(connection.SSHConnection):
+    def serviceStarted(self):
+        if not options['noshell']:
+            self.openChannel(SSHSession())
+        if options.localForwards:
+            for localPort, hostport in options.localForwards:
+                reactor.listenTCP(localPort,
+                            forwarding.SSHListenForwardingFactory(self,
+                                hostport,
+                                forwarding.SSHListenClientForwardingChannel))
+        if options.remoteForwards:
+            for remotePort, hostport in options.remoteForwards:
+                log.msg('asking for remote forwarding for %s:%s' %
+                        (remotePort, hostport))
+                data = forwarding.packGlobal_tcpip_forward(
+                    ('0.0.0.0', remotePort))
+                d = self.sendGlobalRequest('tcpip-forward', data)
+                self.remoteForwards[remotePort] = hostport
+
+class SSHSession(channel.SSHChannel):
+
+    name = 'session'
+
+    def channelOpen(self, foo):
+        #global globalSession
+        #globalSession = self
+        # turn off local echo
+        self.escapeMode = 1
+        c = session.SSHSessionClient()
+        if options['escape']:
+            c.dataReceived = self.handleInput
+        else:
+            c.dataReceived = self.write
+        c.connectionLost = self.sendEOF
+        frame.callback = c.dataReceived
+        frame.canvas.focus_force()
+        if options['subsystem']:
+            self.conn.sendRequest(self, 'subsystem', \
+                common.NS(options['command']))
+        elif options['command']:
+            if options['tty']:
+                term = os.environ.get('TERM', 'xterm')
+                #winsz = fcntl.ioctl(fd, tty.TIOCGWINSZ, '12345678')
+                winSize = (25,80,0,0) #struct.unpack('4H', winsz)
+                ptyReqData = session.packRequest_pty_req(term, winSize, '')
+                self.conn.sendRequest(self, 'pty-req', ptyReqData)
+            self.conn.sendRequest(self, 'exec', \
+                common.NS(options['command']))
+        else:
+            if not options['notty']:
+                term = os.environ.get('TERM', 'xterm')
+                #winsz = fcntl.ioctl(fd, tty.TIOCGWINSZ, '12345678')
+                winSize = (25,80,0,0) #struct.unpack('4H', winsz)
+                ptyReqData = session.packRequest_pty_req(term, winSize, '')
+                self.conn.sendRequest(self, 'pty-req', ptyReqData)
+            self.conn.sendRequest(self, 'shell', '')
+        self.conn.transport.transport.setTcpNoDelay(1)
+
+    def handleInput(self, char):
+        #log.msg('handling %s' % repr(char))
+        if char in ('\n', '\r'):
+            self.escapeMode = 1
+            self.write(char)
+        elif self.escapeMode == 1 and char == options['escape']:
+            self.escapeMode = 2
+        elif self.escapeMode == 2:
+            self.escapeMode = 1 # so we can chain escapes together
+            if char == '.': # disconnect
+                log.msg('disconnecting from escape')
+                reactor.stop()
+                return
+            elif char == '\x1a': # ^Z, suspend
+                # following line courtesy of Erwin at freenode
+                os.kill(os.getpid(), signal.SIGSTOP)
+                return
+            elif char == 'R': # rekey connection
+                log.msg('rekeying connection')
+                self.conn.transport.sendKexInit()
+                return
+            self.write('~' + char)
+        else:
+            self.escapeMode = 0
+            self.write(char)
+
+    def dataReceived(self, data):
+        if options['ansilog']:
+            print repr(data)
+        frame.write(data)
+
+    def extReceived(self, t, data):
+        if t==connection.EXTENDED_DATA_STDERR:
+            log.msg('got %s stderr data' % len(data))
+            sys.stderr.write(data)
+            sys.stderr.flush()
+
+    def eofReceived(self):
+        log.msg('got eof')
+        sys.stdin.close()
+
+    def closed(self):
+        log.msg('closed %s' % self)
+        if len(self.conn.channels) == 1: # just us left
+            reactor.stop()
+
+    def request_exit_status(self, data):
+        global exitStatus
+        exitStatus = int(struct.unpack('>L', data)[0])
+        log.msg('exit status: %s' % exitStatus)
+
+    def sendEOF(self):
+        self.conn.sendEOF(self)
+
+if __name__=="__main__":
+    run()
diff --git a/ThirdParty/Twisted/twisted/conch/ssh/__init__.py b/ThirdParty/Twisted/twisted/conch/ssh/__init__.py
new file mode 100644
index 0000000..4b7f024
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/ssh/__init__.py
@@ -0,0 +1,10 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+# 
+
+"""
+An SSHv2 implementation for Twisted.  Part of the Twisted.Conch package.
+
+Maintainer: Paul Swartz
+"""
diff --git a/ThirdParty/Twisted/twisted/conch/ssh/address.py b/ThirdParty/Twisted/twisted/conch/ssh/address.py
new file mode 100644
index 0000000..c06f2bf
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/ssh/address.py
@@ -0,0 +1,38 @@
+# -*- test-case-name: twisted.conch.test.test_address -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Address object for SSH network connections.
+
+Maintainer: Paul Swartz
+
+ at since: 12.1
+"""
+from zope.interface import implements
+from twisted.internet.interfaces import IAddress
+from twisted.python import util
+
+
+
+class SSHTransportAddress(object, util.FancyEqMixin):
+    """
+    Object representing an SSH Transport endpoint.
+
+    @ivar address: A instance of an object which implements I{IAddress} to
+        which this transport address is connected.
+    """
+
+    implements(IAddress)
+
+    compareAttributes = ('address',)
+
+    def __init__(self, address):
+        self.address = address
+
+    def __repr__(self):
+        return 'SSHTransportAddress(%r)' % (self.address,)
+
+    def __hash__(self):
+        return hash(('SSH', self.address))
+
diff --git a/ThirdParty/Twisted/twisted/conch/ssh/agent.py b/ThirdParty/Twisted/twisted/conch/ssh/agent.py
new file mode 100644
index 0000000..c1bf1a0
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/ssh/agent.py
@@ -0,0 +1,294 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Implements the SSH v2 key agent protocol.  This protocol is documented in the
+SSH source code, in the file
+U{PROTOCOL.agent<http://www.openbsd.org/cgi-bin/cvsweb/src/usr.bin/ssh/PROTOCOL.agent>}.
+
+Maintainer: Paul Swartz
+"""
+
+import struct
+
+from twisted.conch.ssh.common import NS, getNS, getMP
+from twisted.conch.error import ConchError, MissingKeyStoreError
+from twisted.conch.ssh import keys
+from twisted.internet import defer, protocol
+
+
+
+class SSHAgentClient(protocol.Protocol):
+    """
+    The client side of the SSH agent protocol.  This is equivalent to
+    ssh-add(1) and can be used with either ssh-agent(1) or the SSHAgentServer
+    protocol, also in this package.
+    """
+
+    def __init__(self):
+        self.buf = ''
+        self.deferreds = []
+
+
+    def dataReceived(self, data):
+        self.buf += data
+        while 1:
+            if len(self.buf) <= 4:
+                return
+            packLen = struct.unpack('!L', self.buf[:4])[0]
+            if len(self.buf) < 4 + packLen:
+                return
+            packet, self.buf = self.buf[4:4 + packLen], self.buf[4 + packLen:]
+            reqType = ord(packet[0])
+            d = self.deferreds.pop(0)
+            if reqType == AGENT_FAILURE:
+                d.errback(ConchError('agent failure'))
+            elif reqType == AGENT_SUCCESS:
+                d.callback('')
+            else:
+                d.callback(packet)
+
+
+    def sendRequest(self, reqType, data):
+        pack = struct.pack('!LB',len(data) + 1, reqType) + data
+        self.transport.write(pack)
+        d = defer.Deferred()
+        self.deferreds.append(d)
+        return d
+
+
+    def requestIdentities(self):
+        """
+        @return: A L{Deferred} which will fire with a list of all keys found in
+            the SSH agent. The list of keys is comprised of (public key blob,
+            comment) tuples.
+        """
+        d = self.sendRequest(AGENTC_REQUEST_IDENTITIES, '')
+        d.addCallback(self._cbRequestIdentities)
+        return d
+
+
+    def _cbRequestIdentities(self, data):
+        """
+        Unpack a collection of identities into a list of tuples comprised of
+        public key blobs and comments.
+        """
+        if ord(data[0]) != AGENT_IDENTITIES_ANSWER:
+            raise ConchError('unexpected response: %i' % ord(data[0]))
+        numKeys = struct.unpack('!L', data[1:5])[0]
+        keys = []
+        data = data[5:]
+        for i in range(numKeys):
+            blob, data = getNS(data)
+            comment, data = getNS(data)
+            keys.append((blob, comment))
+        return keys
+
+
+    def addIdentity(self, blob, comment = ''):
+        """
+        Add a private key blob to the agent's collection of keys.
+        """
+        req = blob
+        req += NS(comment)
+        return self.sendRequest(AGENTC_ADD_IDENTITY, req)
+
+
+    def signData(self, blob, data):
+        """
+        Request that the agent sign the given C{data} with the private key
+        which corresponds to the public key given by C{blob}.  The private
+        key should have been added to the agent already.
+
+        @type blob: C{str}
+        @type data: C{str}
+        @return: A L{Deferred} which fires with a signature for given data
+            created with the given key.
+        """
+        req = NS(blob)
+        req += NS(data)
+        req += '\000\000\000\000' # flags
+        return self.sendRequest(AGENTC_SIGN_REQUEST, req).addCallback(self._cbSignData)
+
+
+    def _cbSignData(self, data):
+        if ord(data[0]) != AGENT_SIGN_RESPONSE:
+            raise ConchError('unexpected data: %i' % ord(data[0]))
+        signature = getNS(data[1:])[0]
+        return signature
+
+
+    def removeIdentity(self, blob):
+        """
+        Remove the private key corresponding to the public key in blob from the
+        running agent.
+        """
+        req = NS(blob)
+        return self.sendRequest(AGENTC_REMOVE_IDENTITY, req)
+
+
+    def removeAllIdentities(self):
+        """
+        Remove all keys from the running agent.
+        """
+        return self.sendRequest(AGENTC_REMOVE_ALL_IDENTITIES, '')
+
+
+
+class SSHAgentServer(protocol.Protocol):
+    """
+    The server side of the SSH agent protocol.  This is equivalent to
+    ssh-agent(1) and can be used with either ssh-add(1) or the SSHAgentClient
+    protocol, also in this package.
+    """
+
+    def __init__(self):
+        self.buf = ''
+
+
+    def dataReceived(self, data):
+        self.buf += data
+        while 1:
+            if len(self.buf) <= 4:
+                return
+            packLen = struct.unpack('!L', self.buf[:4])[0]
+            if len(self.buf) < 4 + packLen:
+                return
+            packet, self.buf = self.buf[4:4 + packLen], self.buf[4 + packLen:]
+            reqType = ord(packet[0])
+            reqName = messages.get(reqType, None)
+            if not reqName:
+                self.sendResponse(AGENT_FAILURE, '')
+            else:
+                f = getattr(self, 'agentc_%s' % reqName)
+                if getattr(self.factory, 'keys', None) is None:
+                    self.sendResponse(AGENT_FAILURE, '')
+                    raise MissingKeyStoreError()
+                f(packet[1:])
+
+
+    def sendResponse(self, reqType, data):
+        pack = struct.pack('!LB', len(data) + 1, reqType) + data
+        self.transport.write(pack)
+
+
+    def agentc_REQUEST_IDENTITIES(self, data):
+        """
+        Return all of the identities that have been added to the server
+        """
+        assert data == ''
+        numKeys = len(self.factory.keys)
+        resp = []
+
+        resp.append(struct.pack('!L', numKeys))
+        for key, comment in self.factory.keys.itervalues():
+            resp.append(NS(key.blob())) # yes, wrapped in an NS
+            resp.append(NS(comment))
+        self.sendResponse(AGENT_IDENTITIES_ANSWER, ''.join(resp))
+
+
+    def agentc_SIGN_REQUEST(self, data):
+        """
+        Data is a structure with a reference to an already added key object and
+        some data that the clients wants signed with that key.  If the key
+        object wasn't loaded, return AGENT_FAILURE, else return the signature.
+        """
+        blob, data = getNS(data)
+        if blob not in self.factory.keys:
+            return self.sendResponse(AGENT_FAILURE, '')
+        signData, data = getNS(data)
+        assert data == '\000\000\000\000'
+        self.sendResponse(AGENT_SIGN_RESPONSE, NS(self.factory.keys[blob][0].sign(signData)))
+
+
+    def agentc_ADD_IDENTITY(self, data):
+        """
+        Adds a private key to the agent's collection of identities.  On
+        subsequent interactions, the private key can be accessed using only the
+        corresponding public key.
+        """
+
+        # need to pre-read the key data so we can get past it to the comment string
+        keyType, rest = getNS(data)
+        if keyType == 'ssh-rsa':
+            nmp = 6
+        elif keyType == 'ssh-dss':
+            nmp = 5
+        else:
+            raise keys.BadKeyError('unknown blob type: %s' % keyType)
+
+        rest = getMP(rest, nmp)[-1] # ignore the key data for now, we just want the comment
+        comment, rest = getNS(rest) # the comment, tacked onto the end of the key blob
+
+        k = keys.Key.fromString(data, type='private_blob') # not wrapped in NS here
+        self.factory.keys[k.blob()] = (k, comment)
+        self.sendResponse(AGENT_SUCCESS, '')
+
+
+    def agentc_REMOVE_IDENTITY(self, data):
+        """
+        Remove a specific key from the agent's collection of identities.
+        """
+        blob, _ = getNS(data)
+        k = keys.Key.fromString(blob, type='blob')
+        del self.factory.keys[k.blob()]
+        self.sendResponse(AGENT_SUCCESS, '')
+
+
+    def agentc_REMOVE_ALL_IDENTITIES(self, data):
+        """
+        Remove all keys from the agent's collection of identities.
+        """
+        assert data == ''
+        self.factory.keys = {}
+        self.sendResponse(AGENT_SUCCESS, '')
+
+    # v1 messages that we ignore because we don't keep v1 keys
+    # open-ssh sends both v1 and v2 commands, so we have to
+    # do no-ops for v1 commands or we'll get "bad request" errors
+
+    def agentc_REQUEST_RSA_IDENTITIES(self, data):
+        """
+        v1 message for listing RSA1 keys; superseded by
+        agentc_REQUEST_IDENTITIES, which handles different key types.
+        """
+        self.sendResponse(AGENT_RSA_IDENTITIES_ANSWER, struct.pack('!L', 0))
+
+
+    def agentc_REMOVE_RSA_IDENTITY(self, data):
+        """
+        v1 message for removing RSA1 keys; superseded by
+        agentc_REMOVE_IDENTITY, which handles different key types.
+        """
+        self.sendResponse(AGENT_SUCCESS, '')
+
+
+    def agentc_REMOVE_ALL_RSA_IDENTITIES(self, data):
+        """
+        v1 message for removing all RSA1 keys; superseded by
+        agentc_REMOVE_ALL_IDENTITIES, which handles different key types.
+        """
+        self.sendResponse(AGENT_SUCCESS, '')
+
+
+AGENTC_REQUEST_RSA_IDENTITIES   = 1
+AGENT_RSA_IDENTITIES_ANSWER     = 2
+AGENT_FAILURE                   = 5
+AGENT_SUCCESS                   = 6
+
+AGENTC_REMOVE_RSA_IDENTITY         = 8
+AGENTC_REMOVE_ALL_RSA_IDENTITIES   = 9
+
+AGENTC_REQUEST_IDENTITIES       = 11
+AGENT_IDENTITIES_ANSWER         = 12
+AGENTC_SIGN_REQUEST             = 13
+AGENT_SIGN_RESPONSE             = 14
+AGENTC_ADD_IDENTITY             = 17
+AGENTC_REMOVE_IDENTITY          = 18
+AGENTC_REMOVE_ALL_IDENTITIES    = 19
+
+messages = {}
+for name, value in locals().copy().items():
+    if name[:7] == 'AGENTC_':
+        messages[value] = name[7:] # doesn't handle doubles
+
diff --git a/ThirdParty/Twisted/twisted/conch/ssh/channel.py b/ThirdParty/Twisted/twisted/conch/ssh/channel.py
new file mode 100644
index 0000000..f498aec
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/ssh/channel.py
@@ -0,0 +1,281 @@
+# -*- test-case-name: twisted.conch.test.test_channel -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+"""
+The parent class for all the SSH Channels.  Currently implemented channels
+are session. direct-tcp, and forwarded-tcp.
+
+Maintainer: Paul Swartz
+"""
+
+from twisted.python import log
+from twisted.internet import interfaces
+from zope.interface import implements
+
+
+class SSHChannel(log.Logger):
+    """
+    A class that represents a multiplexed channel over an SSH connection.
+    The channel has a local window which is the maximum amount of data it will
+    receive, and a remote which is the maximum amount of data the remote side
+    will accept.  There is also a maximum packet size for any individual data
+    packet going each way.
+
+    @ivar name: the name of the channel.
+    @type name: C{str}
+    @ivar localWindowSize: the maximum size of the local window in bytes.
+    @type localWindowSize: C{int}
+    @ivar localWindowLeft: how many bytes are left in the local window.
+    @type localWindowLeft: C{int}
+    @ivar localMaxPacket: the maximum size of packet we will accept in bytes.
+    @type localMaxPacket: C{int}
+    @ivar remoteWindowLeft: how many bytes are left in the remote window.
+    @type remoteWindowLeft: C{int}
+    @ivar remoteMaxPacket: the maximum size of a packet the remote side will
+        accept in bytes.
+    @type remoteMaxPacket: C{int}
+    @ivar conn: the connection this channel is multiplexed through.
+    @type conn: L{SSHConnection}
+    @ivar data: any data to send to the other size when the channel is
+        requested.
+    @type data: C{str}
+    @ivar avatar: an avatar for the logged-in user (if a server channel)
+    @ivar localClosed: True if we aren't accepting more data.
+    @type localClosed: C{bool}
+    @ivar remoteClosed: True if the other size isn't accepting more data.
+    @type remoteClosed: C{bool}
+    """
+
+    implements(interfaces.ITransport)
+
+    name = None # only needed for client channels
+
+    def __init__(self, localWindow = 0, localMaxPacket = 0,
+                       remoteWindow = 0, remoteMaxPacket = 0,
+                       conn = None, data=None, avatar = None):
+        self.localWindowSize = localWindow or 131072
+        self.localWindowLeft = self.localWindowSize
+        self.localMaxPacket = localMaxPacket or 32768
+        self.remoteWindowLeft = remoteWindow
+        self.remoteMaxPacket = remoteMaxPacket
+        self.areWriting = 1
+        self.conn = conn
+        self.data = data
+        self.avatar = avatar
+        self.specificData = ''
+        self.buf = ''
+        self.extBuf = []
+        self.closing = 0
+        self.localClosed = 0
+        self.remoteClosed = 0
+        self.id = None # gets set later by SSHConnection
+
+    def __str__(self):
+        return '<SSHChannel %s (lw %i rw %i)>' % (self.name,
+                self.localWindowLeft, self.remoteWindowLeft)
+
+    def logPrefix(self):
+        id = (self.id is not None and str(self.id)) or "unknown"
+        return "SSHChannel %s (%s) on %s" % (self.name, id,
+                self.conn.logPrefix())
+
+    def channelOpen(self, specificData):
+        """
+        Called when the channel is opened.  specificData is any data that the
+        other side sent us when opening the channel.
+
+        @type specificData: C{str}
+        """
+        log.msg('channel open')
+
+    def openFailed(self, reason):
+        """
+        Called when the the open failed for some reason.
+        reason.desc is a string descrption, reason.code the the SSH error code.
+
+        @type reason: L{error.ConchError}
+        """
+        log.msg('other side refused open\nreason: %s'% reason)
+
+    def addWindowBytes(self, bytes):
+        """
+        Called when bytes are added to the remote window.  By default it clears
+        the data buffers.
+
+        @type bytes:    C{int}
+        """
+        self.remoteWindowLeft = self.remoteWindowLeft+bytes
+        if not self.areWriting and not self.closing:
+            self.areWriting = True
+            self.startWriting()
+        if self.buf:
+            b = self.buf
+            self.buf = ''
+            self.write(b)
+        if self.extBuf:
+            b = self.extBuf
+            self.extBuf = []
+            for (type, data) in b:
+                self.writeExtended(type, data)
+
+    def requestReceived(self, requestType, data):
+        """
+        Called when a request is sent to this channel.  By default it delegates
+        to self.request_<requestType>.
+        If this function returns true, the request succeeded, otherwise it
+        failed.
+
+        @type requestType:  C{str}
+        @type data:         C{str}
+        @rtype:             C{bool}
+        """
+        foo = requestType.replace('-', '_')
+        f = getattr(self, 'request_%s'%foo, None)
+        if f:
+            return f(data)
+        log.msg('unhandled request for %s'%requestType)
+        return 0
+
+    def dataReceived(self, data):
+        """
+        Called when we receive data.
+
+        @type data: C{str}
+        """
+        log.msg('got data %s'%repr(data))
+
+    def extReceived(self, dataType, data):
+        """
+        Called when we receive extended data (usually standard error).
+
+        @type dataType: C{int}
+        @type data:     C{str}
+        """
+        log.msg('got extended data %s %s'%(dataType, repr(data)))
+
+    def eofReceived(self):
+        """
+        Called when the other side will send no more data.
+        """
+        log.msg('remote eof')
+
+    def closeReceived(self):
+        """
+        Called when the other side has closed the channel.
+        """
+        log.msg('remote close')
+        self.loseConnection()
+
+    def closed(self):
+        """
+        Called when the channel is closed.  This means that both our side and
+        the remote side have closed the channel.
+        """
+        log.msg('closed')
+
+    # transport stuff
+    def write(self, data):
+        """
+        Write some data to the channel.  If there is not enough remote window
+        available, buffer until it is.  Otherwise, split the data into
+        packets of length remoteMaxPacket and send them.
+
+        @type data: C{str}
+        """
+        if self.buf:
+            self.buf += data
+            return
+        top = len(data)
+        if top > self.remoteWindowLeft:
+            data, self.buf = (data[:self.remoteWindowLeft],
+                data[self.remoteWindowLeft:])
+            self.areWriting = 0
+            self.stopWriting()
+            top = self.remoteWindowLeft
+        rmp = self.remoteMaxPacket
+        write = self.conn.sendData
+        r = range(0, top, rmp)
+        for offset in r:
+            write(self, data[offset: offset+rmp])
+        self.remoteWindowLeft -= top
+        if self.closing and not self.buf:
+            self.loseConnection() # try again
+
+    def writeExtended(self, dataType, data):
+        """
+        Send extended data to this channel.  If there is not enough remote
+        window available, buffer until there is.  Otherwise, split the data
+        into packets of length remoteMaxPacket and send them.
+
+        @type dataType: C{int}
+        @type data:     C{str}
+        """
+        if self.extBuf:
+            if self.extBuf[-1][0] == dataType:
+                self.extBuf[-1][1] += data
+            else:
+                self.extBuf.append([dataType, data])
+            return
+        if len(data) > self.remoteWindowLeft:
+            data, self.extBuf = (data[:self.remoteWindowLeft],
+                                [[dataType, data[self.remoteWindowLeft:]]])
+            self.areWriting = 0
+            self.stopWriting()
+        while len(data) > self.remoteMaxPacket:
+            self.conn.sendExtendedData(self, dataType,
+                                             data[:self.remoteMaxPacket])
+            data = data[self.remoteMaxPacket:]
+            self.remoteWindowLeft -= self.remoteMaxPacket
+        if data:
+            self.conn.sendExtendedData(self, dataType, data)
+            self.remoteWindowLeft -= len(data)
+        if self.closing:
+            self.loseConnection() # try again
+
+    def writeSequence(self, data):
+        """
+        Part of the Transport interface.  Write a list of strings to the
+        channel.
+
+        @type data: C{list} of C{str}
+        """
+        self.write(''.join(data))
+
+    def loseConnection(self):
+        """
+        Close the channel if there is no buferred data.  Otherwise, note the
+        request and return.
+        """
+        self.closing = 1
+        if not self.buf and not self.extBuf:
+            self.conn.sendClose(self)
+
+    def getPeer(self):
+        """
+        Return a tuple describing the other side of the connection.
+
+        @rtype: C{tuple}
+        """
+        return('SSH', )+self.conn.transport.getPeer()
+
+    def getHost(self):
+        """
+        Return a tuple describing our side of the connection.
+
+        @rtype: C{tuple}
+        """
+        return('SSH', )+self.conn.transport.getHost()
+
+    def stopWriting(self):
+        """
+        Called when the remote buffer is full, as a hint to stop writing.
+        This can be ignored, but it can be helpful.
+        """
+
+    def startWriting(self):
+        """
+        Called when the remote buffer has more room, as a hint to continue
+        writing.
+        """
diff --git a/ThirdParty/Twisted/twisted/conch/ssh/common.py b/ThirdParty/Twisted/twisted/conch/ssh/common.py
new file mode 100644
index 0000000..3afa341
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/ssh/common.py
@@ -0,0 +1,117 @@
+# -*- test-case-name: twisted.conch.test.test_ssh -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Common functions for the SSH classes.
+
+Maintainer: Paul Swartz
+"""
+
+import struct, warnings, __builtin__
+
+try:
+    from Crypto import Util
+except ImportError:
+    warnings.warn("PyCrypto not installed, but continuing anyways!",
+            RuntimeWarning)
+
+from twisted.python import randbytes
+
+
+def NS(t):
+    """
+    net string
+    """
+    return struct.pack('!L',len(t)) + t
+
+def getNS(s, count=1):
+    """
+    get net string
+    """
+    ns = []
+    c = 0
+    for i in range(count):
+        l, = struct.unpack('!L',s[c:c+4])
+        ns.append(s[c+4:4+l+c])
+        c += 4 + l
+    return tuple(ns) + (s[c:],)
+
+def MP(number):
+    if number==0: return '\000'*4
+    assert number>0
+    bn = Util.number.long_to_bytes(number)
+    if ord(bn[0])&128:
+        bn = '\000' + bn
+    return struct.pack('>L',len(bn)) + bn
+
+def getMP(data, count=1):
+    """
+    Get multiple precision integer out of the string.  A multiple precision
+    integer is stored as a 4-byte length followed by length bytes of the
+    integer.  If count is specified, get count integers out of the string.
+    The return value is a tuple of count integers followed by the rest of
+    the data.
+    """
+    mp = []
+    c = 0
+    for i in range(count):
+        length, = struct.unpack('>L',data[c:c+4])
+        mp.append(Util.number.bytes_to_long(data[c+4:c+4+length]))
+        c += 4 + length
+    return tuple(mp) + (data[c:],)
+
+def _MPpow(x, y, z):
+    """return the MP version of (x**y)%z
+    """
+    return MP(pow(x,y,z))
+
+def ffs(c, s):
+    """
+    first from second
+    goes through the first list, looking for items in the second, returns the first one
+    """
+    for i in c:
+        if i in s: return i
+
+getMP_py = getMP
+MP_py = MP
+_MPpow_py = _MPpow
+pyPow = pow
+
+def _fastgetMP(data, count=1):
+    mp = []
+    c = 0
+    for i in range(count):
+        length = struct.unpack('!L', data[c:c+4])[0]
+        mp.append(long(gmpy.mpz(data[c + 4:c + 4 + length][::-1] + '\x00', 256)))
+        c += length + 4
+    return tuple(mp) + (data[c:],)
+
+def _fastMP(i):
+    i2 = gmpy.mpz(i).binary()[::-1]
+    return struct.pack('!L', len(i2)) + i2
+
+def _fastMPpow(x, y, z=None):
+    r = pyPow(gmpy.mpz(x),y,z).binary()[::-1]
+    return struct.pack('!L', len(r)) + r
+
+def install():
+    global getMP, MP, _MPpow
+    getMP = _fastgetMP
+    MP = _fastMP
+    _MPpow = _fastMPpow
+    # XXX: We override builtin pow so that PyCrypto can benefit from gmpy too.
+    def _fastpow(x, y, z=None, mpz=gmpy.mpz):
+        if type(x) in (long, int):
+            x = mpz(x)
+        return pyPow(x, y, z)
+    __builtin__.pow = _fastpow # evil evil
+
+try:
+    import gmpy
+    install()
+except ImportError:
+    pass
+
diff --git a/ThirdParty/Twisted/twisted/conch/ssh/connection.py b/ThirdParty/Twisted/twisted/conch/ssh/connection.py
new file mode 100644
index 0000000..2527199
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/ssh/connection.py
@@ -0,0 +1,637 @@
+# -*- test-case-name: twisted.conch.test.test_connection -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module contains the implementation of the ssh-connection service, which
+allows access to the shell and port-forwarding.
+
+Maintainer: Paul Swartz
+"""
+
+import struct
+
+from twisted.conch.ssh import service, common
+from twisted.conch import error
+from twisted.internet import defer
+from twisted.python import log
+
+class SSHConnection(service.SSHService):
+    """
+    An implementation of the 'ssh-connection' service.  It is used to
+    multiplex multiple channels over the single SSH connection.
+
+    @ivar localChannelID: the next number to use as a local channel ID.
+    @type localChannelID: C{int}
+    @ivar channels: a C{dict} mapping a local channel ID to C{SSHChannel}
+        subclasses.
+    @type channels: C{dict}
+    @ivar localToRemoteChannel: a C{dict} mapping a local channel ID to a
+        remote channel ID.
+    @type localToRemoteChannel: C{dict}
+    @ivar channelsToRemoteChannel: a C{dict} mapping a C{SSHChannel} subclass
+        to remote channel ID.
+    @type channelsToRemoteChannel: C{dict}
+    @ivar deferreds: a C{dict} mapping a local channel ID to a C{list} of
+        C{Deferreds} for outstanding channel requests.  Also, the 'global'
+        key stores the C{list} of pending global request C{Deferred}s.
+    """
+    name = 'ssh-connection'
+
+    def __init__(self):
+        self.localChannelID = 0 # this is the current # to use for channel ID
+        self.localToRemoteChannel = {} # local channel ID -> remote channel ID
+        self.channels = {} # local channel ID -> subclass of SSHChannel
+        self.channelsToRemoteChannel = {} # subclass of SSHChannel ->
+                                          # remote channel ID
+        self.deferreds = {"global": []} # local channel -> list of deferreds 
+                            # for pending requests or 'global' -> list of 
+                            # deferreds for global requests
+        self.transport = None # gets set later
+
+
+    def serviceStarted(self):
+        if hasattr(self.transport, 'avatar'):
+            self.transport.avatar.conn = self
+
+
+    def serviceStopped(self):
+        """
+        Called when the connection is stopped.
+        """
+        map(self.channelClosed, self.channels.values())
+        self._cleanupGlobalDeferreds()
+
+
+    def _cleanupGlobalDeferreds(self):
+        """
+        All pending requests that have returned a deferred must be errbacked
+        when this service is stopped, otherwise they might be left uncalled and
+        uncallable.
+        """
+        for d in self.deferreds["global"]:
+            d.errback(error.ConchError("Connection stopped."))
+        del self.deferreds["global"][:]
+
+
+    # packet methods
+    def ssh_GLOBAL_REQUEST(self, packet):
+        """
+        The other side has made a global request.  Payload::
+            string  request type
+            bool    want reply
+            <request specific data>
+
+        This dispatches to self.gotGlobalRequest.
+        """
+        requestType, rest = common.getNS(packet)
+        wantReply, rest = ord(rest[0]), rest[1:]
+        ret = self.gotGlobalRequest(requestType, rest)
+        if wantReply:
+            reply = MSG_REQUEST_FAILURE
+            data = ''
+            if ret:
+                reply = MSG_REQUEST_SUCCESS
+                if isinstance(ret, (tuple, list)):
+                    data = ret[1]
+            self.transport.sendPacket(reply, data)
+
+    def ssh_REQUEST_SUCCESS(self, packet):
+        """
+        Our global request succeeded.  Get the appropriate Deferred and call
+        it back with the packet we received.
+        """
+        log.msg('RS')
+        self.deferreds['global'].pop(0).callback(packet)
+
+    def ssh_REQUEST_FAILURE(self, packet):
+        """
+        Our global request failed.  Get the appropriate Deferred and errback
+        it with the packet we received.
+        """
+        log.msg('RF')
+        self.deferreds['global'].pop(0).errback(
+            error.ConchError('global request failed', packet))
+
+    def ssh_CHANNEL_OPEN(self, packet):
+        """
+        The other side wants to get a channel.  Payload::
+            string  channel name
+            uint32  remote channel number
+            uint32  remote window size
+            uint32  remote maximum packet size
+            <channel specific data>
+
+        We get a channel from self.getChannel(), give it a local channel number
+        and notify the other side.  Then notify the channel by calling its
+        channelOpen method.
+        """
+        channelType, rest = common.getNS(packet)
+        senderChannel, windowSize, maxPacket = struct.unpack('>3L', rest[:12])
+        packet = rest[12:]
+        try:
+            channel = self.getChannel(channelType, windowSize, maxPacket,
+                            packet)
+            localChannel = self.localChannelID
+            self.localChannelID += 1
+            channel.id = localChannel
+            self.channels[localChannel] = channel
+            self.channelsToRemoteChannel[channel] = senderChannel
+            self.localToRemoteChannel[localChannel] = senderChannel
+            self.transport.sendPacket(MSG_CHANNEL_OPEN_CONFIRMATION,
+                struct.pack('>4L', senderChannel, localChannel,
+                    channel.localWindowSize,
+                    channel.localMaxPacket)+channel.specificData)
+            log.callWithLogger(channel, channel.channelOpen, packet)
+        except Exception, e:
+            log.msg('channel open failed')
+            log.err(e)
+            if isinstance(e, error.ConchError):
+                textualInfo, reason = e.args
+                if isinstance(textualInfo, (int, long)):
+                    # See #3657 and #3071
+                    textualInfo, reason = reason, textualInfo
+            else:
+                reason = OPEN_CONNECT_FAILED
+                textualInfo = "unknown failure"
+            self.transport.sendPacket(
+                MSG_CHANNEL_OPEN_FAILURE,
+                struct.pack('>2L', senderChannel, reason) +
+                common.NS(textualInfo) + common.NS(''))
+
+    def ssh_CHANNEL_OPEN_CONFIRMATION(self, packet):
+        """
+        The other side accepted our MSG_CHANNEL_OPEN request.  Payload::
+            uint32  local channel number
+            uint32  remote channel number
+            uint32  remote window size
+            uint32  remote maximum packet size
+            <channel specific data>
+
+        Find the channel using the local channel number and notify its
+        channelOpen method.
+        """
+        (localChannel, remoteChannel, windowSize,
+                maxPacket) = struct.unpack('>4L', packet[: 16])
+        specificData = packet[16:]
+        channel = self.channels[localChannel]
+        channel.conn = self
+        self.localToRemoteChannel[localChannel] = remoteChannel
+        self.channelsToRemoteChannel[channel] = remoteChannel
+        channel.remoteWindowLeft = windowSize
+        channel.remoteMaxPacket = maxPacket
+        log.callWithLogger(channel, channel.channelOpen, specificData)
+
+    def ssh_CHANNEL_OPEN_FAILURE(self, packet):
+        """
+        The other side did not accept our MSG_CHANNEL_OPEN request.  Payload::
+            uint32  local channel number
+            uint32  reason code
+            string  reason description
+
+        Find the channel using the local channel number and notify it by
+        calling its openFailed() method.
+        """
+        localChannel, reasonCode = struct.unpack('>2L', packet[:8])
+        reasonDesc = common.getNS(packet[8:])[0]
+        channel = self.channels[localChannel]
+        del self.channels[localChannel]
+        channel.conn = self
+        reason = error.ConchError(reasonDesc, reasonCode)
+        log.callWithLogger(channel, channel.openFailed, reason)
+
+    def ssh_CHANNEL_WINDOW_ADJUST(self, packet):
+        """
+        The other side is adding bytes to its window.  Payload::
+            uint32  local channel number
+            uint32  bytes to add
+
+        Call the channel's addWindowBytes() method to add new bytes to the
+        remote window.
+        """
+        localChannel, bytesToAdd = struct.unpack('>2L', packet[:8])
+        channel = self.channels[localChannel]
+        log.callWithLogger(channel, channel.addWindowBytes, bytesToAdd)
+
+    def ssh_CHANNEL_DATA(self, packet):
+        """
+        The other side is sending us data.  Payload::
+            uint32 local channel number
+            string data
+
+        Check to make sure the other side hasn't sent too much data (more
+        than what's in the window, or more than the maximum packet size).  If
+        they have, close the channel.  Otherwise, decrease the available
+        window and pass the data to the channel's dataReceived().
+        """
+        localChannel, dataLength = struct.unpack('>2L', packet[:8])
+        channel = self.channels[localChannel]
+        # XXX should this move to dataReceived to put client in charge?
+        if (dataLength > channel.localWindowLeft or
+           dataLength > channel.localMaxPacket): # more data than we want
+            log.callWithLogger(channel, log.msg, 'too much data')
+            self.sendClose(channel)
+            return
+            #packet = packet[:channel.localWindowLeft+4]
+        data = common.getNS(packet[4:])[0]
+        channel.localWindowLeft -= dataLength
+        if channel.localWindowLeft < channel.localWindowSize // 2:
+            self.adjustWindow(channel, channel.localWindowSize - \
+                                       channel.localWindowLeft)
+            #log.msg('local window left: %s/%s' % (channel.localWindowLeft,
+            #                                    channel.localWindowSize))
+        log.callWithLogger(channel, channel.dataReceived, data)
+
+    def ssh_CHANNEL_EXTENDED_DATA(self, packet):
+        """
+        The other side is sending us exteneded data.  Payload::
+            uint32  local channel number
+            uint32  type code
+            string  data
+
+        Check to make sure the other side hasn't sent too much data (more
+        than what's in the window, or or than the maximum packet size).  If
+        they have, close the channel.  Otherwise, decrease the available
+        window and pass the data and type code to the channel's
+        extReceived().
+        """
+        localChannel, typeCode, dataLength = struct.unpack('>3L', packet[:12])
+        channel = self.channels[localChannel]
+        if (dataLength > channel.localWindowLeft or
+                dataLength > channel.localMaxPacket):
+            log.callWithLogger(channel, log.msg, 'too much extdata')
+            self.sendClose(channel)
+            return
+        data = common.getNS(packet[8:])[0]
+        channel.localWindowLeft -= dataLength
+        if channel.localWindowLeft < channel.localWindowSize // 2:
+            self.adjustWindow(channel, channel.localWindowSize -
+                                       channel.localWindowLeft)
+        log.callWithLogger(channel, channel.extReceived, typeCode, data)
+
+    def ssh_CHANNEL_EOF(self, packet):
+        """
+        The other side is not sending any more data.  Payload::
+            uint32  local channel number
+
+        Notify the channel by calling its eofReceived() method.
+        """
+        localChannel = struct.unpack('>L', packet[:4])[0]
+        channel = self.channels[localChannel]
+        log.callWithLogger(channel, channel.eofReceived)
+
+    def ssh_CHANNEL_CLOSE(self, packet):
+        """
+        The other side is closing its end; it does not want to receive any
+        more data.  Payload::
+            uint32  local channel number
+
+        Notify the channnel by calling its closeReceived() method.  If
+        the channel has also sent a close message, call self.channelClosed().
+        """
+        localChannel = struct.unpack('>L', packet[:4])[0]
+        channel = self.channels[localChannel]
+        log.callWithLogger(channel, channel.closeReceived)
+        channel.remoteClosed = True
+        if channel.localClosed and channel.remoteClosed:
+            self.channelClosed(channel)
+
+    def ssh_CHANNEL_REQUEST(self, packet):
+        """
+        The other side is sending a request to a channel.  Payload::
+            uint32  local channel number
+            string  request name
+            bool    want reply
+            <request specific data>
+
+        Pass the message to the channel's requestReceived method.  If the
+        other side wants a reply, add callbacks which will send the
+        reply.
+        """
+        localChannel = struct.unpack('>L', packet[: 4])[0]
+        requestType, rest = common.getNS(packet[4:])
+        wantReply = ord(rest[0])
+        channel = self.channels[localChannel]
+        d = defer.maybeDeferred(log.callWithLogger, channel,
+                channel.requestReceived, requestType, rest[1:])
+        if wantReply:
+            d.addCallback(self._cbChannelRequest, localChannel)
+            d.addErrback(self._ebChannelRequest, localChannel)
+            return d
+
+    def _cbChannelRequest(self, result, localChannel):
+        """
+        Called back if the other side wanted a reply to a channel request.  If
+        the result is true, send a MSG_CHANNEL_SUCCESS.  Otherwise, raise
+        a C{error.ConchError}
+
+        @param result: the value returned from the channel's requestReceived()
+            method.  If it's False, the request failed.
+        @type result: C{bool}
+        @param localChannel: the local channel ID of the channel to which the
+            request was made.
+        @type localChannel: C{int}
+        @raises ConchError: if the result is False.
+        """
+        if not result:
+            raise error.ConchError('failed request')
+        self.transport.sendPacket(MSG_CHANNEL_SUCCESS, struct.pack('>L',
+                                self.localToRemoteChannel[localChannel]))
+
+    def _ebChannelRequest(self, result, localChannel):
+        """
+        Called if the other wisde wanted a reply to the channel requeset and
+        the channel request failed.
+
+        @param result: a Failure, but it's not used.
+        @param localChannel: the local channel ID of the channel to which the
+            request was made.
+        @type localChannel: C{int}
+        """
+        self.transport.sendPacket(MSG_CHANNEL_FAILURE, struct.pack('>L',
+                                self.localToRemoteChannel[localChannel]))
+
+    def ssh_CHANNEL_SUCCESS(self, packet):
+        """
+        Our channel request to the other other side succeeded.  Payload::
+            uint32  local channel number
+
+        Get the C{Deferred} out of self.deferreds and call it back.
+        """
+        localChannel = struct.unpack('>L', packet[:4])[0]
+        if self.deferreds.get(localChannel):
+            d = self.deferreds[localChannel].pop(0)
+            log.callWithLogger(self.channels[localChannel],
+                               d.callback, '')
+
+    def ssh_CHANNEL_FAILURE(self, packet):
+        """
+        Our channel request to the other side failed.  Payload::
+            uint32  local channel number
+
+        Get the C{Deferred} out of self.deferreds and errback it with a
+        C{error.ConchError}.
+        """
+        localChannel = struct.unpack('>L', packet[:4])[0]
+        if self.deferreds.get(localChannel):
+            d = self.deferreds[localChannel].pop(0)
+            log.callWithLogger(self.channels[localChannel],
+                               d.errback,
+                               error.ConchError('channel request failed'))
+
+    # methods for users of the connection to call
+
+    def sendGlobalRequest(self, request, data, wantReply=0):
+        """
+        Send a global request for this connection.  Current this is only used
+        for remote->local TCP forwarding.
+
+        @type request:      C{str}
+        @type data:         C{str}
+        @type wantReply:    C{bool}
+        @rtype              C{Deferred}/C{None}
+        """
+        self.transport.sendPacket(MSG_GLOBAL_REQUEST,
+                                  common.NS(request)
+                                  + (wantReply and '\xff' or '\x00')
+                                  + data)
+        if wantReply:
+            d = defer.Deferred()
+            self.deferreds['global'].append(d)
+            return d
+
+    def openChannel(self, channel, extra=''):
+        """
+        Open a new channel on this connection.
+
+        @type channel:  subclass of C{SSHChannel}
+        @type extra:    C{str}
+        """
+        log.msg('opening channel %s with %s %s'%(self.localChannelID,
+                channel.localWindowSize, channel.localMaxPacket))
+        self.transport.sendPacket(MSG_CHANNEL_OPEN, common.NS(channel.name)
+                    + struct.pack('>3L', self.localChannelID,
+                    channel.localWindowSize, channel.localMaxPacket)
+                    + extra)
+        channel.id = self.localChannelID
+        self.channels[self.localChannelID] = channel
+        self.localChannelID += 1
+
+    def sendRequest(self, channel, requestType, data, wantReply=0):
+        """
+        Send a request to a channel.
+
+        @type channel:      subclass of C{SSHChannel}
+        @type requestType:  C{str}
+        @type data:         C{str}
+        @type wantReply:    C{bool}
+        @rtype              C{Deferred}/C{None}
+        """
+        if channel.localClosed:
+            return
+        log.msg('sending request %s' % requestType)
+        self.transport.sendPacket(MSG_CHANNEL_REQUEST, struct.pack('>L',
+                                    self.channelsToRemoteChannel[channel])
+                                  + common.NS(requestType)+chr(wantReply)
+                                  + data)
+        if wantReply:
+            d = defer.Deferred()
+            self.deferreds.setdefault(channel.id, []).append(d)
+            return d
+
+    def adjustWindow(self, channel, bytesToAdd):
+        """
+        Tell the other side that we will receive more data.  This should not
+        normally need to be called as it is managed automatically.
+
+        @type channel:      subclass of L{SSHChannel}
+        @type bytesToAdd:   C{int}
+        """
+        if channel.localClosed:
+            return # we're already closed
+        self.transport.sendPacket(MSG_CHANNEL_WINDOW_ADJUST, struct.pack('>2L',
+                                    self.channelsToRemoteChannel[channel],
+                                    bytesToAdd))
+        log.msg('adding %i to %i in channel %i' % (bytesToAdd,
+            channel.localWindowLeft, channel.id))
+        channel.localWindowLeft += bytesToAdd
+
+    def sendData(self, channel, data):
+        """
+        Send data to a channel.  This should not normally be used: instead use
+        channel.write(data) as it manages the window automatically.
+
+        @type channel:  subclass of L{SSHChannel}
+        @type data:     C{str}
+        """
+        if channel.localClosed:
+            return # we're already closed
+        self.transport.sendPacket(MSG_CHANNEL_DATA, struct.pack('>L',
+                                    self.channelsToRemoteChannel[channel]) +
+                                   common.NS(data))
+
+    def sendExtendedData(self, channel, dataType, data):
+        """
+        Send extended data to a channel.  This should not normally be used:
+        instead use channel.writeExtendedData(data, dataType) as it manages
+        the window automatically.
+
+        @type channel:  subclass of L{SSHChannel}
+        @type dataType: C{int}
+        @type data:     C{str}
+        """
+        if channel.localClosed:
+            return # we're already closed
+        self.transport.sendPacket(MSG_CHANNEL_EXTENDED_DATA, struct.pack('>2L',
+                            self.channelsToRemoteChannel[channel],dataType) \
+                            + common.NS(data))
+
+    def sendEOF(self, channel):
+        """
+        Send an EOF (End of File) for a channel.
+
+        @type channel:  subclass of L{SSHChannel}
+        """
+        if channel.localClosed:
+            return # we're already closed
+        log.msg('sending eof')
+        self.transport.sendPacket(MSG_CHANNEL_EOF, struct.pack('>L',
+                                    self.channelsToRemoteChannel[channel]))
+
+    def sendClose(self, channel):
+        """
+        Close a channel.
+
+        @type channel:  subclass of L{SSHChannel}
+        """
+        if channel.localClosed:
+            return # we're already closed
+        log.msg('sending close %i' % channel.id)
+        self.transport.sendPacket(MSG_CHANNEL_CLOSE, struct.pack('>L',
+                self.channelsToRemoteChannel[channel]))
+        channel.localClosed = True
+        if channel.localClosed and channel.remoteClosed:
+            self.channelClosed(channel)
+
+    # methods to override
+    def getChannel(self, channelType, windowSize, maxPacket, data):
+        """
+        The other side requested a channel of some sort.
+        channelType is the type of channel being requested,
+        windowSize is the initial size of the remote window,
+        maxPacket is the largest packet we should send,
+        data is any other packet data (often nothing).
+
+        We return a subclass of L{SSHChannel}.
+
+        By default, this dispatches to a method 'channel_channelType' with any
+        non-alphanumerics in the channelType replace with _'s.  If it cannot
+        find a suitable method, it returns an OPEN_UNKNOWN_CHANNEL_TYPE error.
+        The method is called with arguments of windowSize, maxPacket, data.
+
+        @type channelType:  C{str}
+        @type windowSize:   C{int}
+        @type maxPacket:    C{int}
+        @type data:         C{str}
+        @rtype:             subclass of L{SSHChannel}/C{tuple}
+        """
+        log.msg('got channel %s request' % channelType)
+        if hasattr(self.transport, "avatar"): # this is a server!
+            chan = self.transport.avatar.lookupChannel(channelType,
+                                                       windowSize,
+                                                       maxPacket,
+                                                       data)
+        else:
+            channelType = channelType.translate(TRANSLATE_TABLE)
+            f = getattr(self, 'channel_%s' % channelType, None)
+            if f is not None:
+                chan = f(windowSize, maxPacket, data)
+            else:
+                chan = None
+        if chan is None:
+            raise error.ConchError('unknown channel',
+                    OPEN_UNKNOWN_CHANNEL_TYPE)
+        else:
+            chan.conn = self
+            return chan
+
+    def gotGlobalRequest(self, requestType, data):
+        """
+        We got a global request.  pretty much, this is just used by the client
+        to request that we forward a port from the server to the client.
+        Returns either:
+            - 1: request accepted
+            - 1, <data>: request accepted with request specific data
+            - 0: request denied
+
+        By default, this dispatches to a method 'global_requestType' with
+        -'s in requestType replaced with _'s.  The found method is passed data.
+        If this method cannot be found, this method returns 0.  Otherwise, it
+        returns the return value of that method.
+
+        @type requestType:  C{str}
+        @type data:         C{str}
+        @rtype:             C{int}/C{tuple}
+        """
+        log.msg('got global %s request' % requestType)
+        if hasattr(self.transport, 'avatar'): # this is a server!
+            return self.transport.avatar.gotGlobalRequest(requestType, data)
+
+        requestType = requestType.replace('-','_')
+        f = getattr(self, 'global_%s' % requestType, None)
+        if not f:
+            return 0
+        return f(data)
+
+    def channelClosed(self, channel):
+        """
+        Called when a channel is closed.
+        It clears the local state related to the channel, and calls
+        channel.closed().
+        MAKE SURE YOU CALL THIS METHOD, even if you subclass L{SSHConnection}.
+        If you don't, things will break mysteriously.
+
+        @type channel: L{SSHChannel}
+        """
+        if channel in self.channelsToRemoteChannel: # actually open
+            channel.localClosed = channel.remoteClosed = True
+            del self.localToRemoteChannel[channel.id]
+            del self.channels[channel.id]
+            del self.channelsToRemoteChannel[channel]
+            for d in self.deferreds.setdefault(channel.id, []):
+                d.errback(error.ConchError("Channel closed."))
+            del self.deferreds[channel.id][:]
+            log.callWithLogger(channel, channel.closed)
+
+MSG_GLOBAL_REQUEST = 80
+MSG_REQUEST_SUCCESS = 81
+MSG_REQUEST_FAILURE = 82
+MSG_CHANNEL_OPEN = 90
+MSG_CHANNEL_OPEN_CONFIRMATION = 91
+MSG_CHANNEL_OPEN_FAILURE = 92
+MSG_CHANNEL_WINDOW_ADJUST = 93
+MSG_CHANNEL_DATA = 94
+MSG_CHANNEL_EXTENDED_DATA = 95
+MSG_CHANNEL_EOF = 96
+MSG_CHANNEL_CLOSE = 97
+MSG_CHANNEL_REQUEST = 98
+MSG_CHANNEL_SUCCESS = 99
+MSG_CHANNEL_FAILURE = 100
+
+OPEN_ADMINISTRATIVELY_PROHIBITED = 1
+OPEN_CONNECT_FAILED = 2
+OPEN_UNKNOWN_CHANNEL_TYPE = 3
+OPEN_RESOURCE_SHORTAGE = 4
+
+EXTENDED_DATA_STDERR = 1
+
+messages = {}
+for name, value in locals().copy().items():
+    if name[:4] == 'MSG_':
+        messages[value] = name # doesn't handle doubles
+
+import string
+alphanums = string.letters + string.digits
+TRANSLATE_TABLE = ''.join([chr(i) in alphanums and chr(i) or '_'
+    for i in range(256)])
+SSHConnection.protocolMessages = messages
diff --git a/ThirdParty/Twisted/twisted/conch/ssh/factory.py b/ThirdParty/Twisted/twisted/conch/ssh/factory.py
new file mode 100644
index 0000000..3c50932
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/ssh/factory.py
@@ -0,0 +1,141 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+A Factory for SSH servers, along with an OpenSSHFactory to use the same
+data sources as OpenSSH.
+
+Maintainer: Paul Swartz
+"""
+
+from twisted.internet import protocol
+from twisted.python import log
+from twisted.python.reflect import qual
+
+from twisted.conch import error
+from twisted.conch.ssh import keys
+import transport, userauth, connection
+
+import random
+import warnings
+
+class SSHFactory(protocol.Factory):
+    """
+    A Factory for SSH servers.
+    """
+    protocol = transport.SSHServerTransport
+
+    services = {
+        'ssh-userauth':userauth.SSHUserAuthServer,
+        'ssh-connection':connection.SSHConnection
+    }
+    def startFactory(self):
+        """
+        Check for public and private keys.
+        """
+        if not hasattr(self,'publicKeys'):
+            self.publicKeys = self.getPublicKeys()
+        for keyType, value in self.publicKeys.items():
+            if isinstance(value, str):
+                warnings.warn("Returning a mapping from strings to "
+                        "strings from getPublicKeys()/publicKeys (in %s) "
+                        "is deprecated.  Return a mapping from "
+                        "strings to Key objects instead." %
+                        (qual(self.__class__)),
+                        DeprecationWarning, stacklevel=1)
+                self.publicKeys[keyType] = keys.Key.fromString(value)
+        if not hasattr(self,'privateKeys'):
+            self.privateKeys = self.getPrivateKeys()
+        for keyType, value in self.privateKeys.items():
+            if not isinstance(value, keys.Key):
+                warnings.warn("Returning a mapping from strings to "
+                        "PyCrypto key objects from "
+                        "getPrivateKeys()/privateKeys (in %s) "
+                        "is deprecated.  Return a mapping from "
+                        "strings to Key objects instead." %
+                        (qual(self.__class__),),
+                        DeprecationWarning, stacklevel=1)
+                self.privateKeys[keyType] = keys.Key(value)
+        if not self.publicKeys or not self.privateKeys:
+            raise error.ConchError('no host keys, failing')
+        if not hasattr(self,'primes'):
+            self.primes = self.getPrimes()
+
+
+    def buildProtocol(self, addr):
+        """
+        Create an instance of the server side of the SSH protocol.
+
+        @type addr: L{twisted.internet.interfaces.IAddress} provider
+        @param addr: The address at which the server will listen.
+
+        @rtype: L{twisted.conch.ssh.SSHServerTransport}
+        @return: The built transport.
+        """
+        t = protocol.Factory.buildProtocol(self, addr)
+        t.supportedPublicKeys = self.privateKeys.keys()
+        if not self.primes:
+            log.msg('disabling diffie-hellman-group-exchange because we '
+                    'cannot find moduli file')
+            ske = t.supportedKeyExchanges[:]
+            ske.remove('diffie-hellman-group-exchange-sha1')
+            t.supportedKeyExchanges = ske
+        return t
+
+
+    def getPublicKeys(self):
+        """
+        Called when the factory is started to get the public portions of the
+        servers host keys.  Returns a dictionary mapping SSH key types to
+        public key strings.
+
+        @rtype: C{dict}
+        """
+        raise NotImplementedError('getPublicKeys unimplemented')
+
+
+    def getPrivateKeys(self):
+        """
+        Called when the factory is started to get the  private portions of the
+        servers host keys.  Returns a dictionary mapping SSH key types to
+        C{Crypto.PublicKey.pubkey.pubkey} objects.
+
+        @rtype: C{dict}
+        """
+        raise NotImplementedError('getPrivateKeys unimplemented')
+
+
+    def getPrimes(self):
+        """
+        Called when the factory is started to get Diffie-Hellman generators and
+        primes to use.  Returns a dictionary mapping number of bits to lists
+        of tuple of (generator, prime).
+
+        @rtype: C{dict}
+        """
+
+
+    def getDHPrime(self, bits):
+        """
+        Return a tuple of (g, p) for a Diffe-Hellman process, with p being as
+        close to bits bits as possible.
+
+        @type bits: C{int}
+        @rtype:     C{tuple}
+        """
+        primesKeys = self.primes.keys()
+        primesKeys.sort(lambda x, y: cmp(abs(x - bits), abs(y - bits)))
+        realBits = primesKeys[0]
+        return random.choice(self.primes[realBits])
+
+
+    def getService(self, transport, service):
+        """
+        Return a class to use as a service for the given transport.
+
+        @type transport:    L{transport.SSHServerTransport}
+        @type service:      C{str}
+        @rtype:             subclass of L{service.SSHService}
+        """
+        if service == 'ssh-userauth' or hasattr(transport, 'avatar'):
+            return self.services[service]
diff --git a/ThirdParty/Twisted/twisted/conch/ssh/filetransfer.py b/ThirdParty/Twisted/twisted/conch/ssh/filetransfer.py
new file mode 100644
index 0000000..9b11db0
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/ssh/filetransfer.py
@@ -0,0 +1,934 @@
+# -*- test-case-name: twisted.conch.test.test_filetransfer -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+import struct, errno
+
+from twisted.internet import defer, protocol
+from twisted.python import failure, log
+
+from common import NS, getNS
+from twisted.conch.interfaces import ISFTPServer, ISFTPFile
+
+from zope import interface
+
+
+
+class FileTransferBase(protocol.Protocol):
+
+    versions = (3, )
+
+    packetTypes = {}
+
+    def __init__(self):
+        self.buf = ''
+        self.otherVersion = None # this gets set
+
+    def sendPacket(self, kind, data):
+        self.transport.write(struct.pack('!LB', len(data)+1, kind) + data)
+
+    def dataReceived(self, data):
+        self.buf += data
+        while len(self.buf) > 5:
+            length, kind = struct.unpack('!LB', self.buf[:5])
+            if len(self.buf) < 4 + length:
+                return
+            data, self.buf = self.buf[5:4+length], self.buf[4+length:]
+            packetType = self.packetTypes.get(kind, None)
+            if not packetType:
+                log.msg('no packet type for', kind)
+                continue
+            f = getattr(self, 'packet_%s' % packetType, None)
+            if not f:
+                log.msg('not implemented: %s' % packetType)
+                log.msg(repr(data[4:]))
+                reqId, = struct.unpack('!L', data[:4])
+                self._sendStatus(reqId, FX_OP_UNSUPPORTED,
+                                 "don't understand %s" % packetType)
+                #XXX not implemented
+                continue
+            try:
+                f(data)
+            except:
+                log.err()
+                continue
+                reqId ,= struct.unpack('!L', data[:4])
+                self._ebStatus(failure.Failure(e), reqId)
+
+    def _parseAttributes(self, data):
+        flags ,= struct.unpack('!L', data[:4])
+        attrs = {}
+        data = data[4:]
+        if flags & FILEXFER_ATTR_SIZE == FILEXFER_ATTR_SIZE:
+            size ,= struct.unpack('!Q', data[:8])
+            attrs['size'] = size
+            data = data[8:]
+        if flags & FILEXFER_ATTR_OWNERGROUP == FILEXFER_ATTR_OWNERGROUP:
+            uid, gid = struct.unpack('!2L', data[:8])
+            attrs['uid'] = uid
+            attrs['gid'] = gid
+            data = data[8:]
+        if flags & FILEXFER_ATTR_PERMISSIONS == FILEXFER_ATTR_PERMISSIONS:
+            perms ,= struct.unpack('!L', data[:4])
+            attrs['permissions'] = perms
+            data = data[4:]
+        if flags & FILEXFER_ATTR_ACMODTIME == FILEXFER_ATTR_ACMODTIME:
+            atime, mtime = struct.unpack('!2L', data[:8])
+            attrs['atime'] = atime
+            attrs['mtime'] = mtime
+            data = data[8:]
+        if flags & FILEXFER_ATTR_EXTENDED == FILEXFER_ATTR_EXTENDED:
+            extended_count ,= struct.unpack('!L', data[:4])
+            data = data[4:]
+            for i in xrange(extended_count):
+                extended_type, data = getNS(data)
+                extended_data, data = getNS(data)
+                attrs['ext_%s' % extended_type] = extended_data
+        return attrs, data
+
+    def _packAttributes(self, attrs):
+        flags = 0
+        data = ''
+        if 'size' in attrs:
+            data += struct.pack('!Q', attrs['size'])
+            flags |= FILEXFER_ATTR_SIZE
+        if 'uid' in attrs and 'gid' in attrs:
+            data += struct.pack('!2L', attrs['uid'], attrs['gid'])
+            flags |= FILEXFER_ATTR_OWNERGROUP
+        if 'permissions' in attrs:
+            data += struct.pack('!L', attrs['permissions'])
+            flags |= FILEXFER_ATTR_PERMISSIONS
+        if 'atime' in attrs and 'mtime' in attrs:
+            data += struct.pack('!2L', attrs['atime'], attrs['mtime'])
+            flags |= FILEXFER_ATTR_ACMODTIME
+        extended = []
+        for k in attrs:
+            if k.startswith('ext_'):
+                ext_type = NS(k[4:])
+                ext_data = NS(attrs[k])
+                extended.append(ext_type+ext_data)
+        if extended:
+            data += struct.pack('!L', len(extended))
+            data += ''.join(extended)
+            flags |= FILEXFER_ATTR_EXTENDED
+        return struct.pack('!L', flags) + data
+
+class FileTransferServer(FileTransferBase):
+
+    def __init__(self, data=None, avatar=None):
+        FileTransferBase.__init__(self)
+        self.client = ISFTPServer(avatar) # yay interfaces
+        self.openFiles = {}
+        self.openDirs = {}
+
+    def packet_INIT(self, data):
+        version ,= struct.unpack('!L', data[:4])
+        self.version = min(list(self.versions) + [version])
+        data = data[4:]
+        ext = {}
+        while data:
+            ext_name, data = getNS(data)
+            ext_data, data = getNS(data)
+            ext[ext_name] = ext_data
+        our_ext = self.client.gotVersion(version, ext)
+        our_ext_data = ""
+        for (k,v) in our_ext.items():
+            our_ext_data += NS(k) + NS(v)
+        self.sendPacket(FXP_VERSION, struct.pack('!L', self.version) + \
+                                     our_ext_data)
+
+    def packet_OPEN(self, data):
+        requestId = data[:4]
+        data = data[4:]
+        filename, data = getNS(data)
+        flags ,= struct.unpack('!L', data[:4])
+        data = data[4:]
+        attrs, data = self._parseAttributes(data)
+        assert data == '', 'still have data in OPEN: %s' % repr(data)
+        d = defer.maybeDeferred(self.client.openFile, filename, flags, attrs)
+        d.addCallback(self._cbOpenFile, requestId)
+        d.addErrback(self._ebStatus, requestId, "open failed")
+
+    def _cbOpenFile(self, fileObj, requestId):
+        fileId = str(hash(fileObj))
+        if fileId in self.openFiles:
+            raise KeyError, 'id already open'
+        self.openFiles[fileId] = fileObj
+        self.sendPacket(FXP_HANDLE, requestId + NS(fileId))
+
+    def packet_CLOSE(self, data):
+        requestId = data[:4]
+        data = data[4:]
+        handle, data = getNS(data)
+        assert data == '', 'still have data in CLOSE: %s' % repr(data)
+        if handle in self.openFiles:
+            fileObj = self.openFiles[handle]
+            d = defer.maybeDeferred(fileObj.close)
+            d.addCallback(self._cbClose, handle, requestId)
+            d.addErrback(self._ebStatus, requestId, "close failed")
+        elif handle in self.openDirs:
+            dirObj = self.openDirs[handle][0]
+            d = defer.maybeDeferred(dirObj.close)
+            d.addCallback(self._cbClose, handle, requestId, 1)
+            d.addErrback(self._ebStatus, requestId, "close failed")
+        else:
+            self._ebClose(failure.Failure(KeyError()), requestId)
+
+    def _cbClose(self, result, handle, requestId, isDir = 0):
+        if isDir:
+            del self.openDirs[handle]
+        else:
+            del self.openFiles[handle]
+        self._sendStatus(requestId, FX_OK, 'file closed')
+
+    def packet_READ(self, data):
+        requestId = data[:4]
+        data = data[4:]
+        handle, data = getNS(data)
+        (offset, length), data = struct.unpack('!QL', data[:12]), data[12:]
+        assert data == '', 'still have data in READ: %s' % repr(data)
+        if handle not in self.openFiles:
+            self._ebRead(failure.Failure(KeyError()), requestId)
+        else:
+            fileObj = self.openFiles[handle]
+            d = defer.maybeDeferred(fileObj.readChunk, offset, length)
+            d.addCallback(self._cbRead, requestId)
+            d.addErrback(self._ebStatus, requestId, "read failed")
+
+    def _cbRead(self, result, requestId):
+        if result == '': # python's read will return this for EOF
+            raise EOFError()
+        self.sendPacket(FXP_DATA, requestId + NS(result))
+
+    def packet_WRITE(self, data):
+        requestId = data[:4]
+        data = data[4:]
+        handle, data = getNS(data)
+        offset, = struct.unpack('!Q', data[:8])
+        data = data[8:]
+        writeData, data = getNS(data)
+        assert data == '', 'still have data in WRITE: %s' % repr(data)
+        if handle not in self.openFiles:
+            self._ebWrite(failure.Failure(KeyError()), requestId)
+        else:
+            fileObj = self.openFiles[handle]
+            d = defer.maybeDeferred(fileObj.writeChunk, offset, writeData)
+            d.addCallback(self._cbStatus, requestId, "write succeeded")
+            d.addErrback(self._ebStatus, requestId, "write failed")
+
+    def packet_REMOVE(self, data):
+        requestId = data[:4]
+        data = data[4:]
+        filename, data = getNS(data)
+        assert data == '', 'still have data in REMOVE: %s' % repr(data)
+        d = defer.maybeDeferred(self.client.removeFile, filename)
+        d.addCallback(self._cbStatus, requestId, "remove succeeded")
+        d.addErrback(self._ebStatus, requestId, "remove failed")
+
+    def packet_RENAME(self, data):
+        requestId = data[:4]
+        data = data[4:]
+        oldPath, data = getNS(data)
+        newPath, data = getNS(data)
+        assert data == '', 'still have data in RENAME: %s' % repr(data)
+        d = defer.maybeDeferred(self.client.renameFile, oldPath, newPath)
+        d.addCallback(self._cbStatus, requestId, "rename succeeded")
+        d.addErrback(self._ebStatus, requestId, "rename failed")
+
+    def packet_MKDIR(self, data):
+        requestId = data[:4]
+        data = data[4:]
+        path, data = getNS(data)
+        attrs, data = self._parseAttributes(data)
+        assert data == '', 'still have data in MKDIR: %s' % repr(data)
+        d = defer.maybeDeferred(self.client.makeDirectory, path, attrs)
+        d.addCallback(self._cbStatus, requestId, "mkdir succeeded")
+        d.addErrback(self._ebStatus, requestId, "mkdir failed")
+
+    def packet_RMDIR(self, data):
+        requestId = data[:4]
+        data = data[4:]
+        path, data = getNS(data)
+        assert data == '', 'still have data in RMDIR: %s' % repr(data)
+        d = defer.maybeDeferred(self.client.removeDirectory, path)
+        d.addCallback(self._cbStatus, requestId, "rmdir succeeded")
+        d.addErrback(self._ebStatus, requestId, "rmdir failed")
+
+    def packet_OPENDIR(self, data):
+        requestId = data[:4]
+        data = data[4:]
+        path, data = getNS(data)
+        assert data == '', 'still have data in OPENDIR: %s' % repr(data)
+        d = defer.maybeDeferred(self.client.openDirectory, path)
+        d.addCallback(self._cbOpenDirectory, requestId)
+        d.addErrback(self._ebStatus, requestId, "opendir failed")
+
+    def _cbOpenDirectory(self, dirObj, requestId):
+        handle = str(hash(dirObj))
+        if handle in self.openDirs:
+            raise KeyError, "already opened this directory"
+        self.openDirs[handle] = [dirObj, iter(dirObj)]
+        self.sendPacket(FXP_HANDLE, requestId + NS(handle))
+
+    def packet_READDIR(self, data):
+        requestId = data[:4]
+        data = data[4:]
+        handle, data = getNS(data)
+        assert data == '', 'still have data in READDIR: %s' % repr(data)
+        if handle not in self.openDirs:
+            self._ebStatus(failure.Failure(KeyError()), requestId)
+        else:
+            dirObj, dirIter = self.openDirs[handle]
+            d = defer.maybeDeferred(self._scanDirectory, dirIter, [])
+            d.addCallback(self._cbSendDirectory, requestId)
+            d.addErrback(self._ebStatus, requestId, "scan directory failed")
+
+    def _scanDirectory(self, dirIter, f):
+        while len(f) < 250:
+            try:
+                info = dirIter.next()
+            except StopIteration:
+                if not f:
+                    raise EOFError
+                return f
+            if isinstance(info, defer.Deferred):
+                info.addCallback(self._cbScanDirectory, dirIter, f)
+                return
+            else:
+                f.append(info)
+        return f
+
+    def _cbScanDirectory(self, result, dirIter, f):
+        f.append(result)
+        return self._scanDirectory(dirIter, f)
+
+    def _cbSendDirectory(self, result, requestId):
+        data = ''
+        for (filename, longname, attrs) in result:
+            data += NS(filename)
+            data += NS(longname)
+            data += self._packAttributes(attrs)
+        self.sendPacket(FXP_NAME, requestId +
+                        struct.pack('!L', len(result))+data)
+
+    def packet_STAT(self, data, followLinks = 1):
+        requestId = data[:4]
+        data = data[4:]
+        path, data = getNS(data)
+        assert data == '', 'still have data in STAT/LSTAT: %s' % repr(data)
+        d = defer.maybeDeferred(self.client.getAttrs, path, followLinks)
+        d.addCallback(self._cbStat, requestId)
+        d.addErrback(self._ebStatus, requestId, 'stat/lstat failed')
+
+    def packet_LSTAT(self, data):
+        self.packet_STAT(data, 0)
+
+    def packet_FSTAT(self, data):
+        requestId = data[:4]
+        data = data[4:]
+        handle, data = getNS(data)
+        assert data == '', 'still have data in FSTAT: %s' % repr(data)
+        if handle not in self.openFiles:
+            self._ebStatus(failure.Failure(KeyError('%s not in self.openFiles'
+                                        % handle)), requestId)
+        else:
+            fileObj = self.openFiles[handle]
+            d = defer.maybeDeferred(fileObj.getAttrs)
+            d.addCallback(self._cbStat, requestId)
+            d.addErrback(self._ebStatus, requestId, 'fstat failed')
+
+    def _cbStat(self, result, requestId):
+        data = requestId + self._packAttributes(result)
+        self.sendPacket(FXP_ATTRS, data)
+
+    def packet_SETSTAT(self, data):
+        requestId = data[:4]
+        data = data[4:]
+        path, data = getNS(data)
+        attrs, data = self._parseAttributes(data)
+        if data != '':
+            log.msg('WARN: still have data in SETSTAT: %s' % repr(data))
+        d = defer.maybeDeferred(self.client.setAttrs, path, attrs)
+        d.addCallback(self._cbStatus, requestId, 'setstat succeeded')
+        d.addErrback(self._ebStatus, requestId, 'setstat failed')
+
+    def packet_FSETSTAT(self, data):
+        requestId = data[:4]
+        data = data[4:]
+        handle, data = getNS(data)
+        attrs, data = self._parseAttributes(data)
+        assert data == '', 'still have data in FSETSTAT: %s' % repr(data)
+        if handle not in self.openFiles:
+            self._ebStatus(failure.Failure(KeyError()), requestId)
+        else:
+            fileObj = self.openFiles[handle]
+            d = defer.maybeDeferred(fileObj.setAttrs, attrs)
+            d.addCallback(self._cbStatus, requestId, 'fsetstat succeeded')
+            d.addErrback(self._ebStatus, requestId, 'fsetstat failed')
+
+    def packet_READLINK(self, data):
+        requestId = data[:4]
+        data = data[4:]
+        path, data = getNS(data)
+        assert data == '', 'still have data in READLINK: %s' % repr(data)
+        d = defer.maybeDeferred(self.client.readLink, path)
+        d.addCallback(self._cbReadLink, requestId)
+        d.addErrback(self._ebStatus, requestId, 'readlink failed')
+
+    def _cbReadLink(self, result, requestId):
+        self._cbSendDirectory([(result, '', {})], requestId)
+
+    def packet_SYMLINK(self, data):
+        requestId = data[:4]
+        data = data[4:]
+        linkPath, data = getNS(data)
+        targetPath, data = getNS(data)
+        d = defer.maybeDeferred(self.client.makeLink, linkPath, targetPath)
+        d.addCallback(self._cbStatus, requestId, 'symlink succeeded')
+        d.addErrback(self._ebStatus, requestId, 'symlink failed')
+
+    def packet_REALPATH(self, data):
+        requestId = data[:4]
+        data = data[4:]
+        path, data = getNS(data)
+        assert data == '', 'still have data in REALPATH: %s' % repr(data)
+        d = defer.maybeDeferred(self.client.realPath, path)
+        d.addCallback(self._cbReadLink, requestId) # same return format
+        d.addErrback(self._ebStatus, requestId, 'realpath failed')
+
+    def packet_EXTENDED(self, data):
+        requestId = data[:4]
+        data = data[4:]
+        extName, extData = getNS(data)
+        d = defer.maybeDeferred(self.client.extendedRequest, extName, extData)
+        d.addCallback(self._cbExtended, requestId)
+        d.addErrback(self._ebStatus, requestId, 'extended %s failed' % extName)
+
+    def _cbExtended(self, data, requestId):
+        self.sendPacket(FXP_EXTENDED_REPLY, requestId + data)
+
+    def _cbStatus(self, result, requestId, msg = "request succeeded"):
+        self._sendStatus(requestId, FX_OK, msg)
+
+    def _ebStatus(self, reason, requestId, msg = "request failed"):
+        code = FX_FAILURE
+        message = msg
+        if reason.type in (IOError, OSError):
+            if reason.value.errno == errno.ENOENT: # no such file
+                code = FX_NO_SUCH_FILE
+                message = reason.value.strerror
+            elif reason.value.errno == errno.EACCES: # permission denied
+                code = FX_PERMISSION_DENIED
+                message = reason.value.strerror
+            elif reason.value.errno == errno.EEXIST:
+                code = FX_FILE_ALREADY_EXISTS
+            else:
+                log.err(reason)
+        elif reason.type == EOFError: # EOF
+            code = FX_EOF
+            if reason.value.args:
+                message = reason.value.args[0]
+        elif reason.type == NotImplementedError:
+            code = FX_OP_UNSUPPORTED
+            if reason.value.args:
+                message = reason.value.args[0]
+        elif reason.type == SFTPError:
+            code = reason.value.code
+            message = reason.value.message
+        else:
+            log.err(reason)
+        self._sendStatus(requestId, code, message)
+
+    def _sendStatus(self, requestId, code, message, lang = ''):
+        """
+        Helper method to send a FXP_STATUS message.
+        """
+        data = requestId + struct.pack('!L', code)
+        data += NS(message)
+        data += NS(lang)
+        self.sendPacket(FXP_STATUS, data)
+
+
+    def connectionLost(self, reason):
+        """
+        Clean all opened files and directories.
+        """
+        for fileObj in self.openFiles.values():
+            fileObj.close()
+        self.openFiles = {}
+        for (dirObj, dirIter) in self.openDirs.values():
+            dirObj.close()
+        self.openDirs = {}
+
+
+
+class FileTransferClient(FileTransferBase):
+
+    def __init__(self, extData = {}):
+        """
+        @param extData: a dict of extended_name : extended_data items
+        to be sent to the server.
+        """
+        FileTransferBase.__init__(self)
+        self.extData = {}
+        self.counter = 0
+        self.openRequests = {} # id -> Deferred
+        self.wasAFile = {} # Deferred -> 1 TERRIBLE HACK
+
+    def connectionMade(self):
+        data = struct.pack('!L', max(self.versions))
+        for k,v in self.extData.itervalues():
+            data += NS(k) + NS(v)
+        self.sendPacket(FXP_INIT, data)
+
+    def _sendRequest(self, msg, data):
+        data = struct.pack('!L', self.counter) + data
+        d = defer.Deferred()
+        self.openRequests[self.counter] = d
+        self.counter += 1
+        self.sendPacket(msg, data)
+        return d
+
+    def _parseRequest(self, data):
+        (id,) = struct.unpack('!L', data[:4])
+        d = self.openRequests[id]
+        del self.openRequests[id]
+        return d, data[4:]
+
+    def openFile(self, filename, flags, attrs):
+        """
+        Open a file.
+
+        This method returns a L{Deferred} that is called back with an object
+        that provides the L{ISFTPFile} interface.
+
+        @param filename: a string representing the file to open.
+
+        @param flags: a integer of the flags to open the file with, ORed together.
+        The flags and their values are listed at the bottom of this file.
+
+        @param attrs: a list of attributes to open the file with.  It is a
+        dictionary, consisting of 0 or more keys.  The possible keys are::
+
+            size: the size of the file in bytes
+            uid: the user ID of the file as an integer
+            gid: the group ID of the file as an integer
+            permissions: the permissions of the file with as an integer.
+            the bit representation of this field is defined by POSIX.
+            atime: the access time of the file as seconds since the epoch.
+            mtime: the modification time of the file as seconds since the epoch.
+            ext_*: extended attributes.  The server is not required to
+            understand this, but it may.
+
+        NOTE: there is no way to indicate text or binary files.  it is up
+        to the SFTP client to deal with this.
+        """
+        data = NS(filename) + struct.pack('!L', flags) + self._packAttributes(attrs)
+        d = self._sendRequest(FXP_OPEN, data)
+        self.wasAFile[d] = (1, filename) # HACK
+        return d
+
+    def removeFile(self, filename):
+        """
+        Remove the given file.
+
+        This method returns a Deferred that is called back when it succeeds.
+
+        @param filename: the name of the file as a string.
+        """
+        return self._sendRequest(FXP_REMOVE, NS(filename))
+
+    def renameFile(self, oldpath, newpath):
+        """
+        Rename the given file.
+
+        This method returns a Deferred that is called back when it succeeds.
+
+        @param oldpath: the current location of the file.
+        @param newpath: the new file name.
+        """
+        return self._sendRequest(FXP_RENAME, NS(oldpath)+NS(newpath))
+
+    def makeDirectory(self, path, attrs):
+        """
+        Make a directory.
+
+        This method returns a Deferred that is called back when it is
+        created.
+
+        @param path: the name of the directory to create as a string.
+
+        @param attrs: a dictionary of attributes to create the directory
+        with.  Its meaning is the same as the attrs in the openFile method.
+        """
+        return self._sendRequest(FXP_MKDIR, NS(path)+self._packAttributes(attrs))
+
+    def removeDirectory(self, path):
+        """
+        Remove a directory (non-recursively)
+
+        It is an error to remove a directory that has files or directories in
+        it.
+
+        This method returns a Deferred that is called back when it is removed.
+
+        @param path: the directory to remove.
+        """
+        return self._sendRequest(FXP_RMDIR, NS(path))
+
+    def openDirectory(self, path):
+        """
+        Open a directory for scanning.
+
+        This method returns a Deferred that is called back with an iterable
+        object that has a close() method.
+
+        The close() method is called when the client is finished reading
+        from the directory.  At this point, the iterable will no longer
+        be used.
+
+        The iterable returns triples of the form (filename, longname, attrs)
+        or a Deferred that returns the same.  The sequence must support
+        __getitem__, but otherwise may be any 'sequence-like' object.
+
+        filename is the name of the file relative to the directory.
+        logname is an expanded format of the filename.  The recommended format
+        is:
+        -rwxr-xr-x   1 mjos     staff      348911 Mar 25 14:29 t-filexfer
+        1234567890 123 12345678 12345678 12345678 123456789012
+
+        The first line is sample output, the second is the length of the field.
+        The fields are: permissions, link count, user owner, group owner,
+        size in bytes, modification time.
+
+        attrs is a dictionary in the format of the attrs argument to openFile.
+
+        @param path: the directory to open.
+        """
+        d = self._sendRequest(FXP_OPENDIR, NS(path))
+        self.wasAFile[d] = (0, path)
+        return d
+
+    def getAttrs(self, path, followLinks=0):
+        """
+        Return the attributes for the given path.
+
+        This method returns a dictionary in the same format as the attrs
+        argument to openFile or a Deferred that is called back with same.
+
+        @param path: the path to return attributes for as a string.
+        @param followLinks: a boolean.  if it is True, follow symbolic links
+        and return attributes for the real path at the base.  if it is False,
+        return attributes for the specified path.
+        """
+        if followLinks: m = FXP_STAT
+        else: m = FXP_LSTAT
+        return self._sendRequest(m, NS(path))
+
+    def setAttrs(self, path, attrs):
+        """
+        Set the attributes for the path.
+
+        This method returns when the attributes are set or a Deferred that is
+        called back when they are.
+
+        @param path: the path to set attributes for as a string.
+        @param attrs: a dictionary in the same format as the attrs argument to
+        openFile.
+        """
+        data = NS(path) + self._packAttributes(attrs)
+        return self._sendRequest(FXP_SETSTAT, data)
+
+    def readLink(self, path):
+        """
+        Find the root of a set of symbolic links.
+
+        This method returns the target of the link, or a Deferred that
+        returns the same.
+
+        @param path: the path of the symlink to read.
+        """
+        d = self._sendRequest(FXP_READLINK, NS(path))
+        return d.addCallback(self._cbRealPath)
+
+    def makeLink(self, linkPath, targetPath):
+        """
+        Create a symbolic link.
+
+        This method returns when the link is made, or a Deferred that
+        returns the same.
+
+        @param linkPath: the pathname of the symlink as a string
+        @param targetPath: the path of the target of the link as a string.
+        """
+        return self._sendRequest(FXP_SYMLINK, NS(linkPath)+NS(targetPath))
+
+    def realPath(self, path):
+        """
+        Convert any path to an absolute path.
+
+        This method returns the absolute path as a string, or a Deferred
+        that returns the same.
+
+        @param path: the path to convert as a string.
+        """
+        d = self._sendRequest(FXP_REALPATH, NS(path))
+        return d.addCallback(self._cbRealPath)
+
+    def _cbRealPath(self, result):
+        name, longname, attrs = result[0]
+        return name
+
+    def extendedRequest(self, request, data):
+        """
+        Make an extended request of the server.
+
+        The method returns a Deferred that is called back with
+        the result of the extended request.
+
+        @param request: the name of the extended request to make.
+        @param data: any other data that goes along with the request.
+        """
+        return self._sendRequest(FXP_EXTENDED, NS(request) + data)
+
+    def packet_VERSION(self, data):
+        version, = struct.unpack('!L', data[:4])
+        data = data[4:]
+        d = {}
+        while data:
+            k, data = getNS(data)
+            v, data = getNS(data)
+            d[k]=v
+        self.version = version
+        self.gotServerVersion(version, d)
+
+    def packet_STATUS(self, data):
+        d, data = self._parseRequest(data)
+        code, = struct.unpack('!L', data[:4])
+        data = data[4:]
+        if len(data) >= 4:
+            msg, data = getNS(data)
+            if len(data) >= 4:
+                lang, data = getNS(data)
+            else:
+                lang = ''
+        else:
+            msg = ''
+            lang = ''
+        if code == FX_OK:
+            d.callback((msg, lang))
+        elif code == FX_EOF:
+            d.errback(EOFError(msg))
+        elif code == FX_OP_UNSUPPORTED:
+            d.errback(NotImplementedError(msg))
+        else:
+            d.errback(SFTPError(code, msg, lang))
+
+    def packet_HANDLE(self, data):
+        d, data = self._parseRequest(data)
+        isFile, name = self.wasAFile.pop(d)
+        if isFile:
+            cb = ClientFile(self, getNS(data)[0])
+        else:
+            cb = ClientDirectory(self, getNS(data)[0])
+        cb.name = name
+        d.callback(cb)
+
+    def packet_DATA(self, data):
+        d, data = self._parseRequest(data)
+        d.callback(getNS(data)[0])
+
+    def packet_NAME(self, data):
+        d, data = self._parseRequest(data)
+        count, = struct.unpack('!L', data[:4])
+        data = data[4:]
+        files = []
+        for i in range(count):
+            filename, data = getNS(data)
+            longname, data = getNS(data)
+            attrs, data = self._parseAttributes(data)
+            files.append((filename, longname, attrs))
+        d.callback(files)
+
+    def packet_ATTRS(self, data):
+        d, data = self._parseRequest(data)
+        d.callback(self._parseAttributes(data)[0])
+
+    def packet_EXTENDED_REPLY(self, data):
+        d, data = self._parseRequest(data)
+        d.callback(data)
+
+    def gotServerVersion(self, serverVersion, extData):
+        """
+        Called when the client sends their version info.
+
+        @param otherVersion: an integer representing the version of the SFTP
+        protocol they are claiming.
+        @param extData: a dictionary of extended_name : extended_data items.
+        These items are sent by the client to indicate additional features.
+        """
+
+class ClientFile:
+
+    interface.implements(ISFTPFile)
+
+    def __init__(self, parent, handle):
+        self.parent = parent
+        self.handle = NS(handle)
+
+    def close(self):
+        return self.parent._sendRequest(FXP_CLOSE, self.handle)
+
+    def readChunk(self, offset, length):
+        data = self.handle + struct.pack("!QL", offset, length)
+        return self.parent._sendRequest(FXP_READ, data)
+
+    def writeChunk(self, offset, chunk):
+        data = self.handle + struct.pack("!Q", offset) + NS(chunk)
+        return self.parent._sendRequest(FXP_WRITE, data)
+
+    def getAttrs(self):
+        return self.parent._sendRequest(FXP_FSTAT, self.handle)
+
+    def setAttrs(self, attrs):
+        data = self.handle + self.parent._packAttributes(attrs)
+        return self.parent._sendRequest(FXP_FSTAT, data)
+
+class ClientDirectory:
+
+    def __init__(self, parent, handle):
+        self.parent = parent
+        self.handle = NS(handle)
+        self.filesCache = []
+
+    def read(self):
+        d = self.parent._sendRequest(FXP_READDIR, self.handle)
+        return d
+
+    def close(self):
+        return self.parent._sendRequest(FXP_CLOSE, self.handle)
+
+    def __iter__(self):
+        return self
+
+    def next(self):
+        if self.filesCache:
+            return self.filesCache.pop(0)
+        d = self.read()
+        d.addCallback(self._cbReadDir)
+        d.addErrback(self._ebReadDir)
+        return d
+
+    def _cbReadDir(self, names):
+        self.filesCache = names[1:]
+        return names[0]
+
+    def _ebReadDir(self, reason):
+        reason.trap(EOFError)
+        def _():
+            raise StopIteration
+        self.next = _
+        return reason
+
+
+class SFTPError(Exception):
+
+    def __init__(self, errorCode, errorMessage, lang = ''):
+        Exception.__init__(self)
+        self.code = errorCode
+        self._message = errorMessage
+        self.lang = lang
+
+
+    def message(self):
+        """
+        A string received over the network that explains the error to a human.
+        """
+        # Python 2.6 deprecates assigning to the 'message' attribute of an
+        # exception. We define this read-only property here in order to
+        # prevent the warning about deprecation while maintaining backwards
+        # compatibility with object clients that rely on the 'message'
+        # attribute being set correctly. See bug #3897.
+        return self._message
+    message = property(message)
+
+
+    def __str__(self):
+        return 'SFTPError %s: %s' % (self.code, self.message)
+
+FXP_INIT            =   1
+FXP_VERSION         =   2
+FXP_OPEN            =   3
+FXP_CLOSE           =   4
+FXP_READ            =   5
+FXP_WRITE           =   6
+FXP_LSTAT           =   7
+FXP_FSTAT           =   8
+FXP_SETSTAT         =   9
+FXP_FSETSTAT        =  10
+FXP_OPENDIR         =  11
+FXP_READDIR         =  12
+FXP_REMOVE          =  13
+FXP_MKDIR           =  14
+FXP_RMDIR           =  15
+FXP_REALPATH        =  16
+FXP_STAT            =  17
+FXP_RENAME          =  18
+FXP_READLINK        =  19
+FXP_SYMLINK         =  20
+FXP_STATUS          = 101
+FXP_HANDLE          = 102
+FXP_DATA            = 103
+FXP_NAME            = 104
+FXP_ATTRS           = 105
+FXP_EXTENDED        = 200
+FXP_EXTENDED_REPLY  = 201
+
+FILEXFER_ATTR_SIZE        = 0x00000001
+FILEXFER_ATTR_UIDGID      = 0x00000002
+FILEXFER_ATTR_OWNERGROUP  = FILEXFER_ATTR_UIDGID
+FILEXFER_ATTR_PERMISSIONS = 0x00000004
+FILEXFER_ATTR_ACMODTIME   = 0x00000008
+FILEXFER_ATTR_EXTENDED    = 0x80000000L
+
+FILEXFER_TYPE_REGULAR        = 1
+FILEXFER_TYPE_DIRECTORY      = 2
+FILEXFER_TYPE_SYMLINK        = 3
+FILEXFER_TYPE_SPECIAL        = 4
+FILEXFER_TYPE_UNKNOWN        = 5
+
+FXF_READ          = 0x00000001
+FXF_WRITE         = 0x00000002
+FXF_APPEND        = 0x00000004
+FXF_CREAT         = 0x00000008
+FXF_TRUNC         = 0x00000010
+FXF_EXCL          = 0x00000020
+FXF_TEXT          = 0x00000040
+
+FX_OK                          = 0
+FX_EOF                         = 1
+FX_NO_SUCH_FILE                = 2
+FX_PERMISSION_DENIED           = 3
+FX_FAILURE                     = 4
+FX_BAD_MESSAGE                 = 5
+FX_NO_CONNECTION               = 6
+FX_CONNECTION_LOST             = 7
+FX_OP_UNSUPPORTED              = 8
+FX_FILE_ALREADY_EXISTS         = 11
+# http://tools.ietf.org/wg/secsh/draft-ietf-secsh-filexfer/ defines more
+# useful error codes, but so far OpenSSH doesn't implement them.  We use them
+# internally for clarity, but for now define them all as FX_FAILURE to be
+# compatible with existing software.
+FX_NOT_A_DIRECTORY             = FX_FAILURE
+FX_FILE_IS_A_DIRECTORY         = FX_FAILURE
+
+
+# initialize FileTransferBase.packetTypes:
+g = globals()
+for name in g.keys():
+    if name.startswith('FXP_'):
+        value = g[name]
+        FileTransferBase.packetTypes[value] = name[4:]
+del g, name, value
diff --git a/ThirdParty/Twisted/twisted/conch/ssh/forwarding.py b/ThirdParty/Twisted/twisted/conch/ssh/forwarding.py
new file mode 100644
index 0000000..753f994
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/ssh/forwarding.py
@@ -0,0 +1,181 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+# 
+
+"""
+This module contains the implementation of the TCP forwarding, which allows
+clients and servers to forward arbitrary TCP data across the connection.
+
+Maintainer: Paul Swartz
+"""
+
+import struct
+
+from twisted.internet import protocol, reactor
+from twisted.python import log
+
+import common, channel
+
+class SSHListenForwardingFactory(protocol.Factory):
+    def __init__(self, connection, hostport, klass):
+        self.conn = connection
+        self.hostport = hostport # tuple
+        self.klass = klass
+
+    def buildProtocol(self, addr):
+        channel = self.klass(conn = self.conn)
+        client = SSHForwardingClient(channel)
+        channel.client = client
+        addrTuple = (addr.host, addr.port)
+        channelOpenData = packOpen_direct_tcpip(self.hostport, addrTuple)
+        self.conn.openChannel(channel, channelOpenData)
+        return client
+
+class SSHListenForwardingChannel(channel.SSHChannel):
+
+    def channelOpen(self, specificData):
+        log.msg('opened forwarding channel %s' % self.id)
+        if len(self.client.buf)>1:
+            b = self.client.buf[1:]
+            self.write(b)
+        self.client.buf = ''
+
+    def openFailed(self, reason):
+        self.closed()
+
+    def dataReceived(self, data):
+        self.client.transport.write(data)
+
+    def eofReceived(self):
+        self.client.transport.loseConnection()
+
+    def closed(self):
+        if hasattr(self, 'client'):
+            log.msg('closing local forwarding channel %s' % self.id)
+            self.client.transport.loseConnection()
+            del self.client
+
+class SSHListenClientForwardingChannel(SSHListenForwardingChannel):
+
+    name = 'direct-tcpip'
+
+class SSHListenServerForwardingChannel(SSHListenForwardingChannel):
+
+    name = 'forwarded-tcpip'
+
+class SSHConnectForwardingChannel(channel.SSHChannel):
+
+    def __init__(self, hostport, *args, **kw):
+        channel.SSHChannel.__init__(self, *args, **kw)
+        self.hostport = hostport 
+        self.client = None
+        self.clientBuf = ''
+
+    def channelOpen(self, specificData):
+        cc = protocol.ClientCreator(reactor, SSHForwardingClient, self)
+        log.msg("connecting to %s:%i" % self.hostport)
+        cc.connectTCP(*self.hostport).addCallbacks(self._setClient, self._close)
+
+    def _setClient(self, client):
+        self.client = client
+        log.msg("connected to %s:%i" % self.hostport)
+        if self.clientBuf:
+            self.client.transport.write(self.clientBuf)
+            self.clientBuf = None
+        if self.client.buf[1:]:
+            self.write(self.client.buf[1:])
+        self.client.buf = ''
+
+    def _close(self, reason):
+        log.msg("failed to connect: %s" % reason)
+        self.loseConnection()
+
+    def dataReceived(self, data):
+        if self.client:
+            self.client.transport.write(data)
+        else:
+            self.clientBuf += data
+
+    def closed(self):
+        if self.client:
+            log.msg('closed remote forwarding channel %s' % self.id)
+            if self.client.channel:
+                self.loseConnection()
+            self.client.transport.loseConnection()
+            del self.client
+
+def openConnectForwardingClient(remoteWindow, remoteMaxPacket, data, avatar):
+    remoteHP, origHP = unpackOpen_direct_tcpip(data)
+    return SSHConnectForwardingChannel(remoteHP, 
+                                       remoteWindow=remoteWindow,
+                                       remoteMaxPacket=remoteMaxPacket,
+                                       avatar=avatar)
+
+class SSHForwardingClient(protocol.Protocol):
+
+    def __init__(self, channel):
+        self.channel = channel
+        self.buf = '\000'
+
+    def dataReceived(self, data):
+        if self.buf:
+            self.buf += data
+        else:
+            self.channel.write(data)
+
+    def connectionLost(self, reason):
+        if self.channel:
+            self.channel.loseConnection()
+            self.channel = None
+
+
+def packOpen_direct_tcpip((connHost, connPort), (origHost, origPort)):
+    """Pack the data suitable for sending in a CHANNEL_OPEN packet.
+    """
+    conn = common.NS(connHost) + struct.pack('>L', connPort)
+    orig = common.NS(origHost) + struct.pack('>L', origPort)
+    return conn + orig
+
+packOpen_forwarded_tcpip = packOpen_direct_tcpip
+
+def unpackOpen_direct_tcpip(data):
+    """Unpack the data to a usable format.
+    """
+    connHost, rest = common.getNS(data)
+    connPort = int(struct.unpack('>L', rest[:4])[0])
+    origHost, rest = common.getNS(rest[4:])
+    origPort = int(struct.unpack('>L', rest[:4])[0])
+    return (connHost, connPort), (origHost, origPort)
+
+unpackOpen_forwarded_tcpip = unpackOpen_direct_tcpip
+    
+def packGlobal_tcpip_forward((host, port)):
+    return common.NS(host) + struct.pack('>L', port)
+
+def unpackGlobal_tcpip_forward(data):
+    host, rest = common.getNS(data)
+    port = int(struct.unpack('>L', rest[:4])[0])
+    return host, port
+
+"""This is how the data -> eof -> close stuff /should/ work.
+
+debug3: channel 1: waiting for connection
+debug1: channel 1: connected
+debug1: channel 1: read<=0 rfd 7 len 0
+debug1: channel 1: read failed
+debug1: channel 1: close_read
+debug1: channel 1: input open -> drain
+debug1: channel 1: ibuf empty
+debug1: channel 1: send eof
+debug1: channel 1: input drain -> closed
+debug1: channel 1: rcvd eof
+debug1: channel 1: output open -> drain
+debug1: channel 1: obuf empty
+debug1: channel 1: close_write
+debug1: channel 1: output drain -> closed
+debug1: channel 1: rcvd close
+debug3: channel 1: will not send data after close
+debug1: channel 1: send close
+debug1: channel 1: is dead
+"""
diff --git a/ThirdParty/Twisted/twisted/conch/ssh/keys.py b/ThirdParty/Twisted/twisted/conch/ssh/keys.py
new file mode 100644
index 0000000..1ddb6cf
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/ssh/keys.py
@@ -0,0 +1,844 @@
+# -*- test-case-name: twisted.conch.test.test_keys -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Handling of RSA and DSA keys.
+
+Maintainer: U{Paul Swartz}
+"""
+
+# base library imports
+import base64
+import itertools
+
+# external library imports
+from Crypto.Cipher import DES3, AES
+from Crypto.PublicKey import RSA, DSA
+from Crypto import Util
+from pyasn1.type import univ
+from pyasn1.codec.ber import decoder as berDecoder
+from pyasn1.codec.ber import encoder as berEncoder
+
+# twisted
+from twisted.python import randbytes
+from twisted.python.hashlib import md5, sha1
+
+# sibling imports
+from twisted.conch.ssh import common, sexpy
+
+
+
+class BadKeyError(Exception):
+    """
+    Raised when a key isn't what we expected from it.
+
+    XXX: we really need to check for bad keys
+    """
+
+
+
+class EncryptedKeyError(Exception):
+    """
+    Raised when an encrypted key is presented to fromString/fromFile without
+    a password.
+    """
+
+
+
+class Key(object):
+    """
+    An object representing a key.  A key can be either a public or
+    private key.  A public key can verify a signature; a private key can
+    create or verify a signature.  To generate a string that can be stored
+    on disk, use the toString method.  If you have a private key, but want
+    the string representation of the public key, use Key.public().toString().
+
+    @ivar keyObject: The C{Crypto.PublicKey.pubkey.pubkey} object that
+                  operations are performed with.
+    """
+
+    def fromFile(Class, filename, type=None, passphrase=None):
+        """
+        Return a Key object corresponding to the data in filename.  type
+        and passphrase function as they do in fromString.
+        """
+        return Class.fromString(file(filename, 'rb').read(), type, passphrase)
+    fromFile = classmethod(fromFile)
+
+
+    def fromString(Class, data, type=None, passphrase=None):
+        """
+        Return a Key object corresponding to the string data.
+        type is optionally the type of string, matching a _fromString_*
+        method.  Otherwise, the _guessStringType() classmethod will be used
+        to guess a type.  If the key is encrypted, passphrase is used as
+        the decryption key.
+
+        @type data: C{str}
+        @type type: C{None}/C{str}
+        @type passphrase: C{None}/C{str}
+        @rtype: C{Key}
+        """
+        if type is None:
+            type = Class._guessStringType(data)
+        if type is None:
+            raise BadKeyError('cannot guess the type of %r' % data)
+        method = getattr(Class, '_fromString_%s' % type.upper(), None)
+        if method is None:
+            raise BadKeyError('no _fromString method for %s' % type)
+        if method.func_code.co_argcount == 2:  # no passphrase
+            if passphrase:
+                raise BadKeyError('key not encrypted')
+            return method(data)
+        else:
+            return method(data, passphrase)
+    fromString = classmethod(fromString)
+
+
+    def _fromString_BLOB(Class, blob):
+        """
+        Return a public key object corresponding to this public key blob.
+        The format of a RSA public key blob is::
+            string 'ssh-rsa'
+            integer e
+            integer n
+
+        The format of a DSA public key blob is::
+            string 'ssh-dss'
+            integer p
+            integer q
+            integer g
+            integer y
+
+        @type blob: C{str}
+        @return: a C{Crypto.PublicKey.pubkey.pubkey} object
+        @raises BadKeyError: if the key type (the first string) is unknown.
+        """
+        keyType, rest = common.getNS(blob)
+        if keyType == 'ssh-rsa':
+            e, n, rest = common.getMP(rest, 2)
+            return Class(RSA.construct((n, e)))
+        elif keyType == 'ssh-dss':
+            p, q, g, y, rest = common.getMP(rest, 4)
+            return Class(DSA.construct((y, g, p, q)))
+        else:
+            raise BadKeyError('unknown blob type: %s' % keyType)
+    _fromString_BLOB = classmethod(_fromString_BLOB)
+
+
+    def _fromString_PRIVATE_BLOB(Class, blob):
+        """
+        Return a private key object corresponding to this private key blob.
+        The blob formats are as follows:
+
+        RSA keys::
+            string 'ssh-rsa'
+            integer n
+            integer e
+            integer d
+            integer u
+            integer p
+            integer q
+
+        DSA keys::
+            string 'ssh-dss'
+            integer p
+            integer q
+            integer g
+            integer y
+            integer x
+
+        @type blob: C{str}
+        @return: a C{Crypto.PublicKey.pubkey.pubkey} object
+        @raises BadKeyError: if the key type (the first string) is unknown.
+        """
+        keyType, rest = common.getNS(blob)
+
+        if keyType == 'ssh-rsa':
+            n, e, d, u, p, q, rest = common.getMP(rest, 6)
+            rsakey = Class(RSA.construct((n, e, d, p, q, u)))
+            return rsakey
+        elif keyType == 'ssh-dss':
+            p, q, g, y, x, rest = common.getMP(rest, 5)
+            dsakey = Class(DSA.construct((y, g, p, q, x)))
+            return dsakey
+        else:
+            raise BadKeyError('unknown blob type: %s' % keyType)
+    _fromString_PRIVATE_BLOB = classmethod(_fromString_PRIVATE_BLOB)
+
+
+    def _fromString_PUBLIC_OPENSSH(Class, data):
+        """
+        Return a public key object corresponding to this OpenSSH public key
+        string.  The format of an OpenSSH public key string is::
+            <key type> <base64-encoded public key blob>
+
+        @type data: C{str}
+        @return: A {Crypto.PublicKey.pubkey.pubkey} object
+        @raises BadKeyError: if the blob type is unknown.
+        """
+        blob = base64.decodestring(data.split()[1])
+        return Class._fromString_BLOB(blob)
+    _fromString_PUBLIC_OPENSSH = classmethod(_fromString_PUBLIC_OPENSSH)
+
+
+    def _fromString_PRIVATE_OPENSSH(Class, data, passphrase):
+        """
+        Return a private key object corresponding to this OpenSSH private key
+        string.  If the key is encrypted, passphrase MUST be provided.
+        Providing a passphrase for an unencrypted key is an error.
+
+        The format of an OpenSSH private key string is::
+            -----BEGIN <key type> PRIVATE KEY-----
+            [Proc-Type: 4,ENCRYPTED
+            DEK-Info: DES-EDE3-CBC,<initialization value>]
+            <base64-encoded ASN.1 structure>
+            ------END <key type> PRIVATE KEY------
+
+        The ASN.1 structure of a RSA key is::
+            (0, n, e, d, p, q)
+
+        The ASN.1 structure of a DSA key is::
+            (0, p, q, g, y, x)
+
+        @type data: C{str}
+        @type passphrase: C{str}
+        @return: a C{Crypto.PublicKey.pubkey.pubkey} object
+        @raises BadKeyError: if
+            * a passphrase is provided for an unencrypted key
+            * the ASN.1 encoding is incorrect
+        @raises EncryptedKeyError: if
+            * a passphrase is not provided for an encrypted key
+        """
+        lines = data.strip().split('\n')
+        kind = lines[0][11:14]
+        if lines[1].startswith('Proc-Type: 4,ENCRYPTED'):  # encrypted key
+            try:
+                _, cipher_iv_info = lines[2].split(' ', 1)
+                cipher, ivdata = cipher_iv_info.rstrip().split(',', 1)
+            except ValueError:
+                raise BadKeyError('invalid DEK-info %r' % lines[2])
+            if cipher == 'AES-128-CBC':
+                CipherClass = AES
+                keySize = 16
+                if len(ivdata) != 32:
+                    raise BadKeyError('AES encrypted key with a bad IV')
+            elif cipher == 'DES-EDE3-CBC':
+                CipherClass = DES3
+                keySize = 24
+                if len(ivdata) != 16:
+                    raise BadKeyError('DES encrypted key with a bad IV')
+            else:
+                raise BadKeyError('unknown encryption type %r' % cipher)
+            iv = ''.join([chr(int(ivdata[i:i + 2], 16))
+                          for i in range(0, len(ivdata), 2)])
+            if not passphrase:
+                raise EncryptedKeyError('encrypted key with no passphrase')
+            ba = md5(passphrase + iv[:8]).digest()
+            bb = md5(ba + passphrase + iv[:8]).digest()
+            decKey = (ba + bb)[:keySize]
+            b64Data = base64.decodestring(''.join(lines[3:-1]))
+            keyData = CipherClass.new(decKey,
+                                      CipherClass.MODE_CBC,
+                                      iv).decrypt(b64Data)
+            removeLen = ord(keyData[-1])
+            keyData = keyData[:-removeLen]
+        else:
+            b64Data = ''.join(lines[1:-1])
+            keyData = base64.decodestring(b64Data)
+        try:
+            decodedKey = berDecoder.decode(keyData)[0]
+        except Exception:
+            raise BadKeyError('Failed to decode key')
+        if kind == 'RSA':
+            if len(decodedKey) == 2:  # alternate RSA key
+                decodedKey = decodedKey[0]
+            if len(decodedKey) < 6:
+                raise BadKeyError('RSA key failed to decode properly')
+            n, e, d, p, q = [long(value) for value in decodedKey[1:6]]
+            if p > q:  # make p smaller than q
+                p, q = q, p
+            return Class(RSA.construct((n, e, d, p, q)))
+        elif kind == 'DSA':
+            p, q, g, y, x = [long(value) for value in decodedKey[1: 6]]
+            if len(decodedKey) < 6:
+                raise BadKeyError('DSA key failed to decode properly')
+            return Class(DSA.construct((y, g, p, q, x)))
+    _fromString_PRIVATE_OPENSSH = classmethod(_fromString_PRIVATE_OPENSSH)
+
+
+    def _fromString_PUBLIC_LSH(Class, data):
+        """
+        Return a public key corresponding to this LSH public key string.
+        The LSH public key string format is::
+            <s-expression: ('public-key', (<key type>, (<name, <value>)+))>
+
+        The names for a RSA (key type 'rsa-pkcs1-sha1') key are: n, e.
+        The names for a DSA (key type 'dsa') key are: y, g, p, q.
+
+        @type data: C{str}
+        @return: a C{Crypto.PublicKey.pubkey.pubkey} object
+        @raises BadKeyError: if the key type is unknown
+        """
+        sexp = sexpy.parse(base64.decodestring(data[1:-1]))
+        assert sexp[0] == 'public-key'
+        kd = {}
+        for name, data in sexp[1][1:]:
+            kd[name] = common.getMP(common.NS(data))[0]
+        if sexp[1][0] == 'dsa':
+            return Class(DSA.construct((kd['y'], kd['g'], kd['p'], kd['q'])))
+        elif sexp[1][0] == 'rsa-pkcs1-sha1':
+            return Class(RSA.construct((kd['n'], kd['e'])))
+        else:
+            raise BadKeyError('unknown lsh key type %s' % sexp[1][0])
+    _fromString_PUBLIC_LSH = classmethod(_fromString_PUBLIC_LSH)
+
+
+    def _fromString_PRIVATE_LSH(Class, data):
+        """
+        Return a private key corresponding to this LSH private key string.
+        The LSH private key string format is::
+            <s-expression: ('private-key', (<key type>, (<name>, <value>)+))>
+
+        The names for a RSA (key type 'rsa-pkcs1-sha1') key are: n, e, d, p, q.
+        The names for a DSA (key type 'dsa') key are: y, g, p, q, x.
+
+        @type data: C{str}
+        @return: a {Crypto.PublicKey.pubkey.pubkey} object
+        @raises BadKeyError: if the key type is unknown
+        """
+        sexp = sexpy.parse(data)
+        assert sexp[0] == 'private-key'
+        kd = {}
+        for name, data in sexp[1][1:]:
+            kd[name] = common.getMP(common.NS(data))[0]
+        if sexp[1][0] == 'dsa':
+            assert len(kd) == 5, len(kd)
+            return Class(DSA.construct((kd['y'], kd['g'], kd['p'],
+                                        kd['q'], kd['x'])))
+        elif sexp[1][0] == 'rsa-pkcs1':
+            assert len(kd) == 8, len(kd)
+            if kd['p'] > kd['q']:  # make p smaller than q
+                kd['p'], kd['q'] = kd['q'], kd['p']
+            return Class(RSA.construct((kd['n'], kd['e'], kd['d'],
+                                        kd['p'], kd['q'])))
+        else:
+            raise BadKeyError('unknown lsh key type %s' % sexp[1][0])
+    _fromString_PRIVATE_LSH = classmethod(_fromString_PRIVATE_LSH)
+
+
+    def _fromString_AGENTV3(Class, data):
+        """
+        Return a private key object corresponsing to the Secure Shell Key
+        Agent v3 format.
+
+        The SSH Key Agent v3 format for a RSA key is::
+            string 'ssh-rsa'
+            integer e
+            integer d
+            integer n
+            integer u
+            integer p
+            integer q
+
+        The SSH Key Agent v3 format for a DSA key is::
+            string 'ssh-dss'
+            integer p
+            integer q
+            integer g
+            integer y
+            integer x
+
+        @type data: C{str}
+        @return: a C{Crypto.PublicKey.pubkey.pubkey} object
+        @raises BadKeyError: if the key type (the first string) is unknown
+        """
+        keyType, data = common.getNS(data)
+        if keyType == 'ssh-dss':
+            p, data = common.getMP(data)
+            q, data = common.getMP(data)
+            g, data = common.getMP(data)
+            y, data = common.getMP(data)
+            x, data = common.getMP(data)
+            return Class(DSA.construct((y, g, p, q, x)))
+        elif keyType == 'ssh-rsa':
+            e, data = common.getMP(data)
+            d, data = common.getMP(data)
+            n, data = common.getMP(data)
+            u, data = common.getMP(data)
+            p, data = common.getMP(data)
+            q, data = common.getMP(data)
+            return Class(RSA.construct((n, e, d, p, q, u)))
+        else:
+            raise BadKeyError("unknown key type %s" % keyType)
+    _fromString_AGENTV3 = classmethod(_fromString_AGENTV3)
+
+
+    def _guessStringType(Class, data):
+        """
+        Guess the type of key in data.  The types map to _fromString_*
+        methods.
+        """
+        if data.startswith('ssh-'):
+            return 'public_openssh'
+        elif data.startswith('-----BEGIN'):
+            return 'private_openssh'
+        elif data.startswith('{'):
+            return 'public_lsh'
+        elif data.startswith('('):
+            return 'private_lsh'
+        elif data.startswith('\x00\x00\x00\x07ssh-'):
+            ignored, rest = common.getNS(data)
+            count = 0
+            while rest:
+                count += 1
+                ignored, rest = common.getMP(rest)
+            if count > 4:
+                return 'agentv3'
+            else:
+                return 'blob'
+    _guessStringType = classmethod(_guessStringType)
+
+
+    def __init__(self, keyObject):
+        """
+        Initialize a PublicKey with a C{Crypto.PublicKey.pubkey.pubkey}
+        object.
+
+        @type keyObject: C{Crypto.PublicKey.pubkey.pubkey}
+        """
+        self.keyObject = keyObject
+
+
+    def __eq__(self, other):
+        """
+        Return True if other represents an object with the same key.
+        """
+        if type(self) == type(other):
+            return self.type() == other.type() and self.data() == other.data()
+        else:
+            return NotImplemented
+
+
+    def __ne__(self, other):
+        """
+        Return True if other represents anything other than this key.
+        """
+        result = self.__eq__(other)
+        if result == NotImplemented:
+            return result
+        return not result
+
+
+    def __repr__(self):
+        """
+        Return a pretty representation of this object.
+        """
+        lines = [
+            '<%s %s (%s bits)' % (
+                self.type(),
+                self.isPublic() and 'Public Key' or 'Private Key',
+                self.keyObject.size())]
+        for k, v in sorted(self.data().items()):
+            lines.append('attr %s:' % k)
+            by = common.MP(v)[4:]
+            while by:
+                m = by[:15]
+                by = by[15:]
+                o = ''
+                for c in m:
+                    o = o + '%02x:' % ord(c)
+                if len(m) < 15:
+                    o = o[:-1]
+                lines.append('\t' + o)
+        lines[-1] = lines[-1] + '>'
+        return '\n'.join(lines)
+
+
+    def isPublic(self):
+        """
+        Returns True if this Key is a public key.
+        """
+        return not self.keyObject.has_private()
+
+
+    def public(self):
+        """
+        Returns a version of this key containing only the public key data.
+        If this is a public key, this may or may not be the same object
+        as self.
+        """
+        return Key(self.keyObject.publickey())
+
+
+    def fingerprint(self):
+        """
+        Get the user presentation of the fingerprint of this L{Key}.  As
+        described by U{RFC 4716 section
+        4<http://tools.ietf.org/html/rfc4716#section-4>}::
+
+            The fingerprint of a public key consists of the output of the MD5
+            message-digest algorithm [RFC1321].  The input to the algorithm is
+            the public key data as specified by [RFC4253].  (...)  The output
+            of the (MD5) algorithm is presented to the user as a sequence of 16
+            octets printed as hexadecimal with lowercase letters and separated
+            by colons.
+
+        @since: 8.2
+
+        @return: the user presentation of this L{Key}'s fingerprint, as a
+        string.
+
+        @rtype: L{str}
+        """
+        return ':'.join([x.encode('hex') for x in md5(self.blob()).digest()])
+
+
+    def type(self):
+        """
+        Return the type of the object we wrap.  Currently this can only be
+        'RSA' or 'DSA'.
+        """
+        # the class is Crypto.PublicKey.<type>.<stuff we don't care about>
+        mod = self.keyObject.__class__.__module__
+        if mod.startswith('Crypto.PublicKey'):
+            type = mod.split('.')[2]
+        else:
+            raise RuntimeError('unknown type of object: %r' % self.keyObject)
+        if type in ('RSA', 'DSA'):
+            return type
+        else:
+            raise RuntimeError('unknown type of key: %s' % type)
+
+
+    def sshType(self):
+        """
+        Return the type of the object we wrap as defined in the ssh protocol.
+        Currently this can only be 'ssh-rsa' or 'ssh-dss'.
+        """
+        return {'RSA': 'ssh-rsa', 'DSA': 'ssh-dss'}[self.type()]
+
+
+    def data(self):
+        """
+        Return the values of the public key as a dictionary.
+
+        @rtype: C{dict}
+        """
+        keyData = {}
+        for name in self.keyObject.keydata:
+            value = getattr(self.keyObject, name, None)
+            if value is not None:
+                keyData[name] = value
+        return keyData
+
+
+    def blob(self):
+        """
+        Return the public key blob for this key.  The blob is the
+        over-the-wire format for public keys:
+
+        RSA keys::
+            string  'ssh-rsa'
+            integer e
+            integer n
+
+        DSA keys::
+            string  'ssh-dss'
+            integer p
+            integer q
+            integer g
+            integer y
+
+        @rtype: C{str}
+        """
+        type = self.type()
+        data = self.data()
+        if type == 'RSA':
+            return (common.NS('ssh-rsa') + common.MP(data['e']) +
+                    common.MP(data['n']))
+        elif type == 'DSA':
+            return (common.NS('ssh-dss') + common.MP(data['p']) +
+                    common.MP(data['q']) + common.MP(data['g']) +
+                    common.MP(data['y']))
+
+
+    def privateBlob(self):
+        """
+        Return the private key blob for this key.  The blob is the
+        over-the-wire format for private keys:
+
+        RSA keys::
+            string 'ssh-rsa'
+            integer n
+            integer e
+            integer d
+            integer u
+            integer p
+            integer q
+
+        DSA keys::
+            string 'ssh-dss'
+            integer p
+            integer q
+            integer g
+            integer y
+            integer x
+        """
+        type = self.type()
+        data = self.data()
+        if type == 'RSA':
+            return (common.NS('ssh-rsa') + common.MP(data['n']) +
+                    common.MP(data['e']) + common.MP(data['d']) +
+                    common.MP(data['u']) + common.MP(data['p']) +
+                    common.MP(data['q']))
+        elif type == 'DSA':
+            return (common.NS('ssh-dss') + common.MP(data['p']) +
+                    common.MP(data['q']) + common.MP(data['g']) +
+                    common.MP(data['y']) + common.MP(data['x']))
+
+
+    def toString(self, type, extra=None):
+        """
+        Create a string representation of this key.  If the key is a private
+        key and you want the represenation of its public key, use
+        C{key.public().toString()}.  type maps to a _toString_* method.
+
+        @param type: The type of string to emit.  Currently supported values
+            are C{'OPENSSH'}, C{'LSH'}, and C{'AGENTV3'}.
+        @type type: L{str}
+
+        @param extra: Any extra data supported by the selected format which
+            is not part of the key itself.  For public OpenSSH keys, this is
+            a comment.  For private OpenSSH keys, this is a passphrase to
+            encrypt with.
+        @type extra: L{str} or L{NoneType}
+
+        @rtype: L{str}
+        """
+        method = getattr(self, '_toString_%s' % type.upper(), None)
+        if method is None:
+            raise BadKeyError('unknown type: %s' % type)
+        if method.func_code.co_argcount == 2:
+            return method(extra)
+        else:
+            return method()
+
+
+    def _toString_OPENSSH(self, extra):
+        """
+        Return a public or private OpenSSH string.  See
+        _fromString_PUBLIC_OPENSSH and _fromString_PRIVATE_OPENSSH for the
+        string formats.  If extra is present, it represents a comment for a
+        public key, or a passphrase for a private key.
+
+        @type extra: C{str}
+        @rtype: C{str}
+        """
+        data = self.data()
+        if self.isPublic():
+            b64Data = base64.encodestring(self.blob()).replace('\n', '')
+            if not extra:
+                extra = ''
+            return ('%s %s %s' % (self.sshType(), b64Data, extra)).strip()
+        else:
+            lines = ['-----BEGIN %s PRIVATE KEY-----' % self.type()]
+            if self.type() == 'RSA':
+                p, q = data['p'], data['q']
+                objData = (0, data['n'], data['e'], data['d'], q, p,
+                           data['d'] % (q - 1), data['d'] % (p - 1),
+                           data['u'])
+            else:
+                objData = (0, data['p'], data['q'], data['g'], data['y'],
+                           data['x'])
+            asn1Sequence = univ.Sequence()
+            for index, value in itertools.izip(itertools.count(), objData):
+                asn1Sequence.setComponentByPosition(index, univ.Integer(value))
+            asn1Data = berEncoder.encode(asn1Sequence)
+            if extra:
+                iv = randbytes.secureRandom(8)
+                hexiv = ''.join(['%02X' % ord(x) for x in iv])
+                lines.append('Proc-Type: 4,ENCRYPTED')
+                lines.append('DEK-Info: DES-EDE3-CBC,%s\n' % hexiv)
+                ba = md5(extra + iv).digest()
+                bb = md5(ba + extra + iv).digest()
+                encKey = (ba + bb)[:24]
+                padLen = 8 - (len(asn1Data) % 8)
+                asn1Data += (chr(padLen) * padLen)
+                asn1Data = DES3.new(encKey, DES3.MODE_CBC,
+                                    iv).encrypt(asn1Data)
+            b64Data = base64.encodestring(asn1Data).replace('\n', '')
+            lines += [b64Data[i:i + 64] for i in range(0, len(b64Data), 64)]
+            lines.append('-----END %s PRIVATE KEY-----' % self.type())
+            return '\n'.join(lines)
+
+
+    def _toString_LSH(self):
+        """
+        Return a public or private LSH key.  See _fromString_PUBLIC_LSH and
+        _fromString_PRIVATE_LSH for the key formats.
+
+        @rtype: C{str}
+        """
+        data = self.data()
+        if self.isPublic():
+            if self.type() == 'RSA':
+                keyData = sexpy.pack([['public-key',
+                                       ['rsa-pkcs1-sha1',
+                                        ['n', common.MP(data['n'])[4:]],
+                                        ['e', common.MP(data['e'])[4:]]]]])
+            elif self.type() == 'DSA':
+                keyData = sexpy.pack([['public-key',
+                                       ['dsa',
+                                        ['p', common.MP(data['p'])[4:]],
+                                        ['q', common.MP(data['q'])[4:]],
+                                        ['g', common.MP(data['g'])[4:]],
+                                        ['y', common.MP(data['y'])[4:]]]]])
+            return '{' + base64.encodestring(keyData).replace('\n', '') + '}'
+        else:
+            if self.type() == 'RSA':
+                p, q = data['p'], data['q']
+                return sexpy.pack([['private-key',
+                                    ['rsa-pkcs1',
+                                     ['n', common.MP(data['n'])[4:]],
+                                     ['e', common.MP(data['e'])[4:]],
+                                     ['d', common.MP(data['d'])[4:]],
+                                     ['p', common.MP(q)[4:]],
+                                     ['q', common.MP(p)[4:]],
+                                     ['a', common.MP(data['d'] % (q - 1))[4:]],
+                                     ['b', common.MP(data['d'] % (p - 1))[4:]],
+                                     ['c', common.MP(data['u'])[4:]]]]])
+            elif self.type() == 'DSA':
+                return sexpy.pack([['private-key',
+                                    ['dsa',
+                                     ['p', common.MP(data['p'])[4:]],
+                                     ['q', common.MP(data['q'])[4:]],
+                                     ['g', common.MP(data['g'])[4:]],
+                                     ['y', common.MP(data['y'])[4:]],
+                                     ['x', common.MP(data['x'])[4:]]]]])
+
+
+    def _toString_AGENTV3(self):
+        """
+        Return a private Secure Shell Agent v3 key.  See
+        _fromString_AGENTV3 for the key format.
+
+        @rtype: C{str}
+        """
+        data = self.data()
+        if not self.isPublic():
+            if self.type() == 'RSA':
+                values = (data['e'], data['d'], data['n'], data['u'],
+                          data['p'], data['q'])
+            elif self.type() == 'DSA':
+                values = (data['p'], data['q'], data['g'], data['y'],
+                          data['x'])
+            return common.NS(self.sshType()) + ''.join(map(common.MP, values))
+
+
+    def sign(self, data):
+        """
+        Returns a signature with this Key.
+
+        @type data: C{str}
+        @rtype: C{str}
+        """
+        if self.type() == 'RSA':
+            digest = pkcs1Digest(data, self.keyObject.size() / 8)
+            signature = self.keyObject.sign(digest, '')[0]
+            ret = common.NS(Util.number.long_to_bytes(signature))
+        elif self.type() == 'DSA':
+            digest = sha1(data).digest()
+            randomBytes = randbytes.secureRandom(19)
+            sig = self.keyObject.sign(digest, randomBytes)
+            # SSH insists that the DSS signature blob be two 160-bit integers
+            # concatenated together. The sig[0], [1] numbers from obj.sign
+            # are just numbers, and could be any length from 0 to 160 bits.
+            # Make sure they are padded out to 160 bits (20 bytes each)
+            ret = common.NS(Util.number.long_to_bytes(sig[0], 20) +
+                            Util.number.long_to_bytes(sig[1], 20))
+        return common.NS(self.sshType()) + ret
+
+
+    def verify(self, signature, data):
+        """
+        Returns true if the signature for data is valid for this Key.
+
+        @type signature: C{str}
+        @type data: C{str}
+        @rtype: C{bool}
+        """
+        if len(signature) == 40:
+            # DSA key with no padding
+            signatureType, signature = 'ssh-dss', common.NS(signature)
+        else:
+            signatureType, signature = common.getNS(signature)
+        if signatureType != self.sshType():
+            return False
+        if self.type() == 'RSA':
+            numbers = common.getMP(signature)
+            digest = pkcs1Digest(data, self.keyObject.size() / 8)
+        elif self.type() == 'DSA':
+            signature = common.getNS(signature)[0]
+            numbers = [Util.number.bytes_to_long(n) for n in signature[:20],
+                       signature[20:]]
+            digest = sha1(data).digest()
+        return self.keyObject.verify(digest, numbers)
+
+
+
+def objectType(obj):
+    """
+    Return the SSH key type corresponding to a
+    C{Crypto.PublicKey.pubkey.pubkey} object.
+
+    @type obj:  C{Crypto.PublicKey.pubkey.pubkey}
+    @rtype:     C{str}
+    """
+    keyDataMapping = {
+        ('n', 'e', 'd', 'p', 'q'): 'ssh-rsa',
+        ('n', 'e', 'd', 'p', 'q', 'u'): 'ssh-rsa',
+        ('y', 'g', 'p', 'q', 'x'): 'ssh-dss'
+    }
+    try:
+        return keyDataMapping[tuple(obj.keydata)]
+    except (KeyError, AttributeError):
+        raise BadKeyError("invalid key object", obj)
+
+
+
+def pkcs1Pad(data, messageLength):
+    """
+    Pad out data to messageLength according to the PKCS#1 standard.
+    @type data: C{str}
+    @type messageLength: C{int}
+    """
+    lenPad = messageLength - 2 - len(data)
+    return '\x01' + ('\xff' * lenPad) + '\x00' + data
+
+
+
+def pkcs1Digest(data, messageLength):
+    """
+    Create a message digest using the SHA1 hash algorithm according to the
+    PKCS#1 standard.
+    @type data: C{str}
+    @type messageLength: C{str}
+    """
+    digest = sha1(data).digest()
+    return pkcs1Pad(ID_SHA1 + digest, messageLength)
+
+
+
+def lenSig(obj):
+    """
+    Return the length of the signature in bytes for a key object.
+
+    @type obj: C{Crypto.PublicKey.pubkey.pubkey}
+    @rtype: C{long}
+    """
+    return obj.size() / 8
+
+
+ID_SHA1 = '\x30\x21\x30\x09\x06\x05\x2b\x0e\x03\x02\x1a\x05\x00\x04\x14'
diff --git a/ThirdParty/Twisted/twisted/conch/ssh/service.py b/ThirdParty/Twisted/twisted/conch/ssh/service.py
new file mode 100644
index 0000000..b5477c4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/ssh/service.py
@@ -0,0 +1,48 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+The parent class for all the SSH services.  Currently implemented services
+are ssh-userauth and ssh-connection.
+
+Maintainer: Paul Swartz
+"""
+
+
+from twisted.python import log
+
+class SSHService(log.Logger):
+    name = None # this is the ssh name for the service
+    protocolMessages = {} # these map #'s -> protocol names
+    transport = None # gets set later
+
+    def serviceStarted(self):
+        """
+        called when the service is active on the transport.
+        """
+
+    def serviceStopped(self):
+        """
+        called when the service is stopped, either by the connection ending
+        or by another service being started
+        """
+
+    def logPrefix(self):
+        return "SSHService %s on %s" % (self.name,
+                self.transport.transport.logPrefix())
+
+    def packetReceived(self, messageNum, packet):
+        """
+        called when we receive a packet on the transport
+        """
+        #print self.protocolMessages
+        if messageNum in self.protocolMessages:
+            messageType = self.protocolMessages[messageNum]
+            f = getattr(self,'ssh_%s' % messageType[4:],
+                        None)
+            if f is not None:
+                return f(packet)
+        log.msg("couldn't handle %r" % messageNum)
+        log.msg(repr(packet))
+        self.transport.sendUnimplemented()
+
diff --git a/ThirdParty/Twisted/twisted/conch/ssh/session.py b/ThirdParty/Twisted/twisted/conch/ssh/session.py
new file mode 100644
index 0000000..e9eca3e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/ssh/session.py
@@ -0,0 +1,348 @@
+# -*- test-case-name: twisted.conch.test.test_session -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module contains the implementation of SSHSession, which (by default)
+allows access to a shell and a python interpreter over SSH.
+
+Maintainer: Paul Swartz
+"""
+
+import struct
+import signal
+import sys
+import os
+from zope.interface import implements
+
+from twisted.internet import interfaces, protocol
+from twisted.python import log
+from twisted.conch.interfaces import ISession
+from twisted.conch.ssh import common, channel
+
+class SSHSession(channel.SSHChannel):
+
+    name = 'session'
+    def __init__(self, *args, **kw):
+        channel.SSHChannel.__init__(self, *args, **kw)
+        self.buf = ''
+        self.client = None
+        self.session = None
+
+    def request_subsystem(self, data):
+        subsystem, ignored= common.getNS(data)
+        log.msg('asking for subsystem "%s"' % subsystem)
+        client = self.avatar.lookupSubsystem(subsystem, data)
+        if client:
+            pp = SSHSessionProcessProtocol(self)
+            proto = wrapProcessProtocol(pp)
+            client.makeConnection(proto)
+            pp.makeConnection(wrapProtocol(client))
+            self.client = pp
+            return 1
+        else:
+            log.msg('failed to get subsystem')
+            return 0
+
+    def request_shell(self, data):
+        log.msg('getting shell')
+        if not self.session:
+            self.session = ISession(self.avatar)
+        try:
+            pp = SSHSessionProcessProtocol(self)
+            self.session.openShell(pp)
+        except:
+            log.deferr()
+            return 0
+        else:
+            self.client = pp
+            return 1
+
+    def request_exec(self, data):
+        if not self.session:
+            self.session = ISession(self.avatar)
+        f,data = common.getNS(data)
+        log.msg('executing command "%s"' % f)
+        try:
+            pp = SSHSessionProcessProtocol(self)
+            self.session.execCommand(pp, f)
+        except:
+            log.deferr()
+            return 0
+        else:
+            self.client = pp
+            return 1
+
+    def request_pty_req(self, data):
+        if not self.session:
+            self.session = ISession(self.avatar)
+        term, windowSize, modes = parseRequest_pty_req(data)
+        log.msg('pty request: %s %s' % (term, windowSize))
+        try:
+            self.session.getPty(term, windowSize, modes)
+        except:
+            log.err()
+            return 0
+        else:
+            return 1
+
+    def request_window_change(self, data):
+        if not self.session:
+            self.session = ISession(self.avatar)
+        winSize = parseRequest_window_change(data)
+        try:
+            self.session.windowChanged(winSize)
+        except:
+            log.msg('error changing window size')
+            log.err()
+            return 0
+        else:
+            return 1
+
+    def dataReceived(self, data):
+        if not self.client:
+            #self.conn.sendClose(self)
+            self.buf += data
+            return
+        self.client.transport.write(data)
+
+    def extReceived(self, dataType, data):
+        if dataType == connection.EXTENDED_DATA_STDERR:
+            if self.client and hasattr(self.client.transport, 'writeErr'):
+                self.client.transport.writeErr(data)
+        else:
+            log.msg('weird extended data: %s'%dataType)
+
+    def eofReceived(self):
+        if self.session:
+            self.session.eofReceived()
+        elif self.client:
+            self.conn.sendClose(self)
+
+    def closed(self):
+        if self.session:
+            self.session.closed()
+        elif self.client:
+            self.client.transport.loseConnection()
+
+    #def closeReceived(self):
+    #    self.loseConnection() # don't know what to do with this
+
+    def loseConnection(self):
+        if self.client:
+            self.client.transport.loseConnection()
+        channel.SSHChannel.loseConnection(self)
+
+class _ProtocolWrapper(protocol.ProcessProtocol):
+    """
+    This class wraps a L{Protocol} instance in a L{ProcessProtocol} instance.
+    """
+    def __init__(self, proto):
+        self.proto = proto
+
+    def connectionMade(self): self.proto.connectionMade()
+
+    def outReceived(self, data): self.proto.dataReceived(data)
+
+    def processEnded(self, reason): self.proto.connectionLost(reason)
+
+class _DummyTransport:
+
+    def __init__(self, proto):
+        self.proto = proto
+
+    def dataReceived(self, data):
+        self.proto.transport.write(data)
+
+    def write(self, data):
+        self.proto.dataReceived(data)
+
+    def writeSequence(self, seq):
+        self.write(''.join(seq))
+
+    def loseConnection(self):
+        self.proto.connectionLost(protocol.connectionDone)
+
+def wrapProcessProtocol(inst):
+    if isinstance(inst, protocol.Protocol):
+        return _ProtocolWrapper(inst)
+    else:
+        return inst
+
+def wrapProtocol(proto):
+    return _DummyTransport(proto)
+
+
+
+# SUPPORTED_SIGNALS is a list of signals that every session channel is supposed
+# to accept.  See RFC 4254
+SUPPORTED_SIGNALS = ["ABRT", "ALRM", "FPE", "HUP", "ILL", "INT", "KILL",
+                     "PIPE", "QUIT", "SEGV", "TERM", "USR1", "USR2"]
+
+
+
+class SSHSessionProcessProtocol(protocol.ProcessProtocol):
+    """I am both an L{IProcessProtocol} and an L{ITransport}.
+
+    I am a transport to the remote endpoint and a process protocol to the
+    local subsystem.
+    """
+
+    implements(interfaces.ITransport)
+
+    # once initialized, a dictionary mapping signal values to strings
+    # that follow RFC 4254.
+    _signalValuesToNames = None
+
+    def __init__(self, session):
+        self.session = session
+        self.lostOutOrErrFlag = False
+
+    def connectionMade(self):
+        if self.session.buf:
+            self.transport.write(self.session.buf)
+            self.session.buf = None
+
+    def outReceived(self, data):
+        self.session.write(data)
+
+    def errReceived(self, err):
+        self.session.writeExtended(connection.EXTENDED_DATA_STDERR, err)
+
+    def outConnectionLost(self):
+        """
+        EOF should only be sent when both STDOUT and STDERR have been closed.
+        """
+        if self.lostOutOrErrFlag:
+            self.session.conn.sendEOF(self.session)
+        else:
+            self.lostOutOrErrFlag = True
+
+    def errConnectionLost(self):
+        """
+        See outConnectionLost().
+        """
+        self.outConnectionLost()
+
+    def connectionLost(self, reason = None):
+        self.session.loseConnection()
+
+
+    def _getSignalName(self, signum):
+        """
+        Get a signal name given a signal number.
+        """
+        if self._signalValuesToNames is None:
+            self._signalValuesToNames = {}
+            # make sure that the POSIX ones are the defaults
+            for signame in SUPPORTED_SIGNALS:
+                signame = 'SIG' + signame
+                sigvalue = getattr(signal, signame, None)
+                if sigvalue is not None:
+                    self._signalValuesToNames[sigvalue] = signame
+            for k, v in signal.__dict__.items():
+                # Check for platform specific signals, ignoring Python specific
+                # SIG_DFL and SIG_IGN
+                if k.startswith('SIG') and not k.startswith('SIG_'):
+                    if v not in self._signalValuesToNames:
+                        self._signalValuesToNames[v] = k + '@' + sys.platform
+        return self._signalValuesToNames[signum]
+
+
+    def processEnded(self, reason=None):
+        """
+        When we are told the process ended, try to notify the other side about
+        how the process ended using the exit-signal or exit-status requests.
+        Also, close the channel.
+        """
+        if reason is not None:
+            err = reason.value
+            if err.signal is not None:
+                signame = self._getSignalName(err.signal)
+                if (getattr(os, 'WCOREDUMP', None) is not None and
+                    os.WCOREDUMP(err.status)):
+                    log.msg('exitSignal: %s (core dumped)' % (signame,))
+                    coreDumped = 1
+                else:
+                    log.msg('exitSignal: %s' % (signame,))
+                    coreDumped = 0
+                self.session.conn.sendRequest(self.session, 'exit-signal',
+                        common.NS(signame[3:]) + chr(coreDumped) +
+                        common.NS('') + common.NS(''))
+            elif err.exitCode is not None:
+                log.msg('exitCode: %r' % (err.exitCode,))
+                self.session.conn.sendRequest(self.session, 'exit-status',
+                        struct.pack('>L', err.exitCode))
+        self.session.loseConnection()
+
+
+    def getHost(self):
+        """
+        Return the host from my session's transport.
+        """
+        return self.session.conn.transport.getHost()
+
+
+    def getPeer(self):
+        """
+        Return the peer from my session's transport.
+        """
+        return self.session.conn.transport.getPeer()
+
+
+    def write(self, data):
+        self.session.write(data)
+
+
+    def writeSequence(self, seq):
+        self.session.write(''.join(seq))
+
+
+    def loseConnection(self):
+        self.session.loseConnection()
+
+
+
+class SSHSessionClient(protocol.Protocol):
+
+    def dataReceived(self, data):
+        if self.transport:
+            self.transport.write(data)
+
+# methods factored out to make live easier on server writers
+def parseRequest_pty_req(data):
+    """Parse the data from a pty-req request into usable data.
+
+    @returns: a tuple of (terminal type, (rows, cols, xpixel, ypixel), modes)
+    """
+    term, rest = common.getNS(data)
+    cols, rows, xpixel, ypixel = struct.unpack('>4L', rest[: 16])
+    modes, ignored= common.getNS(rest[16:])
+    winSize = (rows, cols, xpixel, ypixel)
+    modes = [(ord(modes[i]), struct.unpack('>L', modes[i+1: i+5])[0]) for i in range(0, len(modes)-1, 5)]
+    return term, winSize, modes
+
+def packRequest_pty_req(term, (rows, cols, xpixel, ypixel), modes):
+    """Pack a pty-req request so that it is suitable for sending.
+
+    NOTE: modes must be packed before being sent here.
+    """
+    termPacked = common.NS(term)
+    winSizePacked = struct.pack('>4L', cols, rows, xpixel, ypixel)
+    modesPacked = common.NS(modes) # depend on the client packing modes
+    return termPacked + winSizePacked + modesPacked
+
+def parseRequest_window_change(data):
+    """Parse the data from a window-change request into usuable data.
+
+    @returns: a tuple of (rows, cols, xpixel, ypixel)
+    """
+    cols, rows, xpixel, ypixel = struct.unpack('>4L', data)
+    return rows, cols, xpixel, ypixel
+
+def packRequest_window_change((rows, cols, xpixel, ypixel)):
+    """Pack a window-change request so that it is suitable for sending.
+    """
+    return struct.pack('>4L', cols, rows, xpixel, ypixel)
+
+import connection
diff --git a/ThirdParty/Twisted/twisted/conch/ssh/sexpy.py b/ThirdParty/Twisted/twisted/conch/ssh/sexpy.py
new file mode 100644
index 0000000..60c4328
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/ssh/sexpy.py
@@ -0,0 +1,42 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+
+def parse(s):
+    s = s.strip()
+    expr = []
+    while s:
+        if s[0] == '(':
+            newSexp = []
+            if expr:
+                expr[-1].append(newSexp)
+            expr.append(newSexp)
+            s = s[1:]
+            continue
+        if s[0] == ')':
+            aList = expr.pop()
+            s=s[1:]
+            if not expr:
+                assert not s
+                return aList
+            continue
+        i = 0
+        while s[i].isdigit(): i+=1
+        assert i
+        length = int(s[:i])
+        data = s[i+1:i+1+length]
+        expr[-1].append(data)
+        s=s[i+1+length:]
+    assert 0, "this should not happen"
+
+def pack(sexp):
+    s = ""
+    for o in sexp:
+        if type(o) in (type(()), type([])):
+            s+='('
+            s+=pack(o)
+            s+=')'
+        else:
+            s+='%i:%s' % (len(o), o)
+    return s
diff --git a/ThirdParty/Twisted/twisted/conch/ssh/transport.py b/ThirdParty/Twisted/twisted/conch/ssh/transport.py
new file mode 100644
index 0000000..9e0c753
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/ssh/transport.py
@@ -0,0 +1,1617 @@
+# -*- test-case-name: twisted.conch.test.test_transport -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+The lowest level SSH protocol.  This handles the key negotiation, the
+encryption and the compression.  The transport layer is described in
+RFC 4253.
+
+Maintainer: Paul Swartz
+"""
+
+# base library imports
+import struct
+import zlib
+import array
+
+# external library imports
+from Crypto import Util
+from Crypto.Cipher import XOR
+
+# twisted imports
+from twisted.internet import protocol, defer
+
+from twisted.conch import error
+from twisted.python import log, randbytes
+from twisted.python.hashlib import md5, sha1
+
+
+# sibling imports
+from twisted.conch.ssh import address, keys
+from twisted.conch.ssh.common import NS, getNS, MP, getMP, _MPpow, ffs
+
+
+def _getRandomNumber(random, bits):
+    """
+    Generate a random number in the range [0, 2 ** bits).
+
+    @param bits: The number of bits in the result.
+    @type bits: C{int}
+
+    @rtype: C{int} or C{long}
+    @return: The newly generated random number.
+
+    @raise ValueError: if C{bits} is not a multiple of 8.
+    """
+    if bits % 8:
+        raise ValueError("bits (%d) must be a multiple of 8" % (bits,))
+    bytes = random(bits / 8)
+    result = Util.number.bytes_to_long(bytes)
+    return result
+
+
+
+def _generateX(random, bits):
+    """
+    Generate a new value for the private key x.
+
+    From RFC 2631, section 2.2::
+
+        X9.42 requires that the private key x be in the interval
+        [2, (q - 2)].  x should be randomly generated in this interval.
+    """
+    while True:
+        x = _getRandomNumber(random, bits)
+        if 2 <= x <= (2 ** bits) - 2:
+            return x
+
+class SSHTransportBase(protocol.Protocol):
+    """
+    Protocol supporting basic SSH functionality: sending/receiving packets
+    and message dispatch.  To connect to or run a server, you must use
+    SSHClientTransport or SSHServerTransport.
+
+    @ivar protocolVersion: A string representing the version of the SSH
+        protocol we support.  Currently defaults to '2.0'.
+
+    @ivar version: A string representing the version of the server or client.
+        Currently defaults to 'Twisted'.
+
+    @ivar comment: An optional string giving more information about the
+        server or client.
+
+    @ivar supportedCiphers: A list of strings representing the encryption
+        algorithms supported, in order from most-preferred to least.
+
+    @ivar supportedMACs: A list of strings representing the message
+        authentication codes (hashes) supported, in order from most-preferred
+        to least.  Both this and supportedCiphers can include 'none' to use
+        no encryption or authentication, but that must be done manually,
+
+    @ivar supportedKeyExchanges: A list of strings representing the
+        key exchanges supported, in order from most-preferred to least.
+
+    @ivar supportedPublicKeys:  A list of strings representing the
+        public key types supported, in order from most-preferred to least.
+
+    @ivar supportedCompressions: A list of strings representing compression
+        types supported, from most-preferred to least.
+
+    @ivar supportedLanguages: A list of strings representing languages
+        supported, from most-preferred to least.
+
+    @ivar supportedVersions: A container of strings representing supported ssh
+        protocol version numbers.
+
+    @ivar isClient: A boolean indicating whether this is a client or server.
+
+    @ivar gotVersion: A boolean indicating whether we have receieved the
+        version string from the other side.
+
+    @ivar buf: Data we've received but hasn't been parsed into a packet.
+
+    @ivar outgoingPacketSequence: the sequence number of the next packet we
+        will send.
+
+    @ivar incomingPacketSequence: the sequence number of the next packet we
+        are expecting from the other side.
+
+    @ivar outgoingCompression: an object supporting the .compress(str) and
+        .flush() methods, or None if there is no outgoing compression.  Used to
+        compress outgoing data.
+
+    @ivar outgoingCompressionType: A string representing the outgoing
+        compression type.
+
+    @ivar incomingCompression: an object supporting the .decompress(str)
+        method, or None if there is no incoming compression.  Used to
+        decompress incoming data.
+
+    @ivar incomingCompressionType: A string representing the incoming
+        compression type.
+
+    @ivar ourVersionString: the version string that we sent to the other side.
+        Used in the key exchange.
+
+    @ivar otherVersionString: the version string sent by the other side.  Used
+        in the key exchange.
+
+    @ivar ourKexInitPayload: the MSG_KEXINIT payload we sent.  Used in the key
+        exchange.
+
+    @ivar otherKexInitPayload: the MSG_KEXINIT payload we received.  Used in
+        the key exchange
+
+    @ivar sessionID: a string that is unique to this SSH session.  Created as
+        part of the key exchange, sessionID is used to generate the various
+        encryption and authentication keys.
+
+    @ivar service: an SSHService instance, or None.  If it's set to an object,
+        it's the currently running service.
+
+    @ivar kexAlg: the agreed-upon key exchange algorithm.
+
+    @ivar keyAlg: the agreed-upon public key type for the key exchange.
+
+    @ivar currentEncryptions: an SSHCiphers instance.  It represents the
+        current encryption and authentication options for the transport.
+
+    @ivar nextEncryptions: an SSHCiphers instance.  Held here until the
+        MSG_NEWKEYS messages are exchanged, when nextEncryptions is
+        transitioned to currentEncryptions.
+
+    @ivar first: the first bytes of the next packet.  In order to avoid
+        decrypting data twice, the first bytes are decrypted and stored until
+        the whole packet is available.
+
+    @ivar _keyExchangeState: The current protocol state with respect to key
+        exchange.  This is either C{_KEY_EXCHANGE_NONE} if no key exchange is
+        in progress (and returns to this value after any key exchange
+        completqes), C{_KEY_EXCHANGE_REQUESTED} if this side of the connection
+        initiated a key exchange, and C{_KEY_EXCHANGE_PROGRESSING} if the other
+        side of the connection initiated a key exchange.  C{_KEY_EXCHANGE_NONE}
+        is the initial value (however SSH connections begin with key exchange,
+        so it will quickly change to another state).
+
+    @ivar _blockedByKeyExchange: Whenever C{_keyExchangeState} is not
+        C{_KEY_EXCHANGE_NONE}, this is a C{list} of pending messages which were
+        passed to L{sendPacket} but could not be sent because it is not legal
+        to send them while a key exchange is in progress.  When the key
+        exchange completes, another attempt is made to send these messages.
+    """
+
+
+    protocolVersion = '2.0'
+    version = 'Twisted'
+    comment = ''
+    ourVersionString = ('SSH-' + protocolVersion + '-' + version + ' '
+            + comment).strip()
+    supportedCiphers = ['aes256-ctr', 'aes256-cbc', 'aes192-ctr', 'aes192-cbc',
+                        'aes128-ctr', 'aes128-cbc', 'cast128-ctr',
+                        'cast128-cbc', 'blowfish-ctr', 'blowfish-cbc',
+                        '3des-ctr', '3des-cbc'] # ,'none']
+    supportedMACs = ['hmac-sha1', 'hmac-md5'] # , 'none']
+    # both of the above support 'none', but for security are disabled by
+    # default.  to enable them, subclass this class and add it, or do:
+    #   SSHTransportBase.supportedCiphers.append('none')
+    supportedKeyExchanges = ['diffie-hellman-group-exchange-sha1',
+                             'diffie-hellman-group1-sha1']
+    supportedPublicKeys = ['ssh-rsa', 'ssh-dss']
+    supportedCompressions = ['none', 'zlib']
+    supportedLanguages = ()
+    supportedVersions = ('1.99', '2.0')
+    isClient = False
+    gotVersion = False
+    buf = ''
+    outgoingPacketSequence = 0
+    incomingPacketSequence = 0
+    outgoingCompression = None
+    incomingCompression = None
+    sessionID = None
+    service = None
+
+    # There is no key exchange activity in progress.
+    _KEY_EXCHANGE_NONE = '_KEY_EXCHANGE_NONE'
+
+    # Key exchange is in progress and we started it.
+    _KEY_EXCHANGE_REQUESTED = '_KEY_EXCHANGE_REQUESTED'
+
+    # Key exchange is in progress and both sides have sent KEXINIT messages.
+    _KEY_EXCHANGE_PROGRESSING = '_KEY_EXCHANGE_PROGRESSING'
+
+    # There is a fourth conceptual state not represented here: KEXINIT received
+    # but not sent.  Since we always send a KEXINIT as soon as we get it, we
+    # can't ever be in that state.
+
+    # The current key exchange state.
+    _keyExchangeState = _KEY_EXCHANGE_NONE
+    _blockedByKeyExchange = None
+
+    def connectionLost(self, reason):
+        if self.service:
+            self.service.serviceStopped()
+        if hasattr(self, 'avatar'):
+            self.logoutFunction()
+        log.msg('connection lost')
+
+
+    def connectionMade(self):
+        """
+        Called when the connection is made to the other side.  We sent our
+        version and the MSG_KEXINIT packet.
+        """
+        self.transport.write('%s\r\n' % (self.ourVersionString,))
+        self.currentEncryptions = SSHCiphers('none', 'none', 'none', 'none')
+        self.currentEncryptions.setKeys('', '', '', '', '', '')
+        self.sendKexInit()
+
+
+    def sendKexInit(self):
+        """
+        Send a I{KEXINIT} message to initiate key exchange or to respond to a
+        key exchange initiated by the peer.
+
+        @raise RuntimeError: If a key exchange has already been started and it
+            is not appropriate to send a I{KEXINIT} message at this time.
+
+        @return: C{None}
+        """
+        if self._keyExchangeState != self._KEY_EXCHANGE_NONE:
+            raise RuntimeError(
+                "Cannot send KEXINIT while key exchange state is %r" % (
+                    self._keyExchangeState,))
+
+        self.ourKexInitPayload = (chr(MSG_KEXINIT) +
+               randbytes.secureRandom(16) +
+               NS(','.join(self.supportedKeyExchanges)) +
+               NS(','.join(self.supportedPublicKeys)) +
+               NS(','.join(self.supportedCiphers)) +
+               NS(','.join(self.supportedCiphers)) +
+               NS(','.join(self.supportedMACs)) +
+               NS(','.join(self.supportedMACs)) +
+               NS(','.join(self.supportedCompressions)) +
+               NS(','.join(self.supportedCompressions)) +
+               NS(','.join(self.supportedLanguages)) +
+               NS(','.join(self.supportedLanguages)) +
+               '\000' + '\000\000\000\000')
+        self.sendPacket(MSG_KEXINIT, self.ourKexInitPayload[1:])
+        self._keyExchangeState = self._KEY_EXCHANGE_REQUESTED
+        self._blockedByKeyExchange = []
+
+
+    def _allowedKeyExchangeMessageType(self, messageType):
+        """
+        Determine if the given message type may be sent while key exchange is
+        in progress.
+
+        @param messageType: The type of message
+        @type messageType: C{int}
+
+        @return: C{True} if the given type of message may be sent while key
+            exchange is in progress, C{False} if it may not.
+        @rtype: C{bool}
+
+        @see: U{http://tools.ietf.org/html/rfc4253#section-7.1}
+        """
+        # Written somewhat peculularly to reflect the way the specification
+        # defines the allowed message types.
+        if 1 <= messageType <= 19:
+            return messageType not in (MSG_SERVICE_REQUEST, MSG_SERVICE_ACCEPT)
+        if 20 <= messageType <= 29:
+            return messageType not in (MSG_KEXINIT,)
+        return 30 <= messageType <= 49
+
+
+    def sendPacket(self, messageType, payload):
+        """
+        Sends a packet.  If it's been set up, compress the data, encrypt it,
+        and authenticate it before sending.  If key exchange is in progress and
+        the message is not part of key exchange, queue it to be sent later.
+
+        @param messageType: The type of the packet; generally one of the
+                            MSG_* values.
+        @type messageType: C{int}
+        @param payload: The payload for the message.
+        @type payload: C{str}
+        """
+        if self._keyExchangeState != self._KEY_EXCHANGE_NONE:
+            if not self._allowedKeyExchangeMessageType(messageType):
+                self._blockedByKeyExchange.append((messageType, payload))
+                return
+
+        payload = chr(messageType) + payload
+        if self.outgoingCompression:
+            payload = (self.outgoingCompression.compress(payload)
+                       + self.outgoingCompression.flush(2))
+        bs = self.currentEncryptions.encBlockSize
+        # 4 for the packet length and 1 for the padding length
+        totalSize = 5 + len(payload)
+        lenPad = bs - (totalSize % bs)
+        if lenPad < 4:
+            lenPad = lenPad + bs
+        packet = (struct.pack('!LB',
+                              totalSize + lenPad - 4, lenPad) +
+                  payload + randbytes.secureRandom(lenPad))
+        encPacket = (
+            self.currentEncryptions.encrypt(packet) +
+            self.currentEncryptions.makeMAC(
+                self.outgoingPacketSequence, packet))
+        self.transport.write(encPacket)
+        self.outgoingPacketSequence += 1
+
+
+    def getPacket(self):
+        """
+        Try to return a decrypted, authenticated, and decompressed packet
+        out of the buffer.  If there is not enough data, return None.
+
+        @rtype: C{str}/C{None}
+        """
+        bs = self.currentEncryptions.decBlockSize
+        ms = self.currentEncryptions.verifyDigestSize
+        if len(self.buf) < bs: return # not enough data
+        if not hasattr(self, 'first'):
+            first = self.currentEncryptions.decrypt(self.buf[:bs])
+        else:
+            first = self.first
+            del self.first
+        packetLen, paddingLen = struct.unpack('!LB', first[:5])
+        if packetLen > 1048576: # 1024 ** 2
+            self.sendDisconnect(DISCONNECT_PROTOCOL_ERROR,
+                                'bad packet length %s' % packetLen)
+            return
+        if len(self.buf) < packetLen + 4 + ms:
+            self.first = first
+            return # not enough packet
+        if(packetLen + 4) % bs != 0:
+            self.sendDisconnect(
+                DISCONNECT_PROTOCOL_ERROR,
+                'bad packet mod (%i%%%i == %i)' % (packetLen + 4, bs,
+                                                   (packetLen + 4) % bs))
+            return
+        encData, self.buf = self.buf[:4 + packetLen], self.buf[4 + packetLen:]
+        packet = first + self.currentEncryptions.decrypt(encData[bs:])
+        if len(packet) != 4 + packetLen:
+            self.sendDisconnect(DISCONNECT_PROTOCOL_ERROR,
+                                'bad decryption')
+            return
+        if ms:
+            macData, self.buf = self.buf[:ms], self.buf[ms:]
+            if not self.currentEncryptions.verify(self.incomingPacketSequence,
+                                                  packet, macData):
+                self.sendDisconnect(DISCONNECT_MAC_ERROR, 'bad MAC')
+                return
+        payload = packet[5:-paddingLen]
+        if self.incomingCompression:
+            try:
+                payload = self.incomingCompression.decompress(payload)
+            except: # bare except, because who knows what kind of errors
+                    # decompression can raise
+                log.err()
+                self.sendDisconnect(DISCONNECT_COMPRESSION_ERROR,
+                                    'compression error')
+                return
+        self.incomingPacketSequence += 1
+        return payload
+
+
+    def _unsupportedVersionReceived(self, remoteVersion):
+        """
+        Called when an unsupported version of the ssh protocol is received from
+        the remote endpoint.
+
+        @param remoteVersion: remote ssh protocol version which is unsupported
+            by us.
+        @type remoteVersion: C{str}
+        """
+        self.sendDisconnect(DISCONNECT_PROTOCOL_VERSION_NOT_SUPPORTED,
+            'bad version ' + remoteVersion)
+
+
+    def dataReceived(self, data):
+        """
+        First, check for the version string (SSH-2.0-*).  After that has been
+        received, this method adds data to the buffer, and pulls out any
+        packets.
+
+        @type data: C{str}
+        """
+        self.buf = self.buf + data
+        if not self.gotVersion:
+            if self.buf.find('\n', self.buf.find('SSH-')) == -1:
+                return
+            lines = self.buf.split('\n')
+            for p in lines:
+                if p.startswith('SSH-'):
+                    self.gotVersion = True
+                    self.otherVersionString = p.strip()
+                    remoteVersion = p.split('-')[1]
+                    if remoteVersion not in self.supportedVersions:
+                        self._unsupportedVersionReceived(remoteVersion)
+                        return
+                    i = lines.index(p)
+                    self.buf = '\n'.join(lines[i + 1:])
+        packet = self.getPacket()
+        while packet:
+            messageNum = ord(packet[0])
+            self.dispatchMessage(messageNum, packet[1:])
+            packet = self.getPacket()
+
+
+    def dispatchMessage(self, messageNum, payload):
+        """
+        Send a received message to the appropriate method.
+
+        @type messageNum: C{int}
+        @type payload: c{str}
+        """
+        if messageNum < 50 and messageNum in messages:
+            messageType = messages[messageNum][4:]
+            f = getattr(self, 'ssh_%s' % messageType, None)
+            if f is not None:
+                f(payload)
+            else:
+                log.msg("couldn't handle %s" % messageType)
+                log.msg(repr(payload))
+                self.sendUnimplemented()
+        elif self.service:
+            log.callWithLogger(self.service, self.service.packetReceived,
+                               messageNum, payload)
+        else:
+            log.msg("couldn't handle %s" % messageNum)
+            log.msg(repr(payload))
+            self.sendUnimplemented()
+
+    def getPeer(self):
+        """
+        Returns an L{SSHTransportAddress} corresponding to the other (peer)
+        side of this transport.
+
+        @return: L{SSHTransportAddress} for the peer
+        @rtype: L{SSHTransportAddress}
+        @since: 12.1
+        """
+        return address.SSHTransportAddress(self.transport.getPeer())
+
+    def getHost(self):
+        """
+        Returns an L{SSHTransportAddress} corresponding to the this side of
+        transport.
+
+        @return: L{SSHTransportAddress} for the peer
+        @rtype: L{SSHTransportAddress}
+        @since: 12.1
+        """
+        return address.SSHTransportAddress(self.transport.getHost())
+
+
+    # Client-initiated rekeying looks like this:
+    #
+    #  C> MSG_KEXINIT
+    #  S> MSG_KEXINIT
+    #  C> MSG_KEX_DH_GEX_REQUEST  or   MSG_KEXDH_INIT
+    #  S> MSG_KEX_DH_GEX_GROUP    or   MSG_KEXDH_REPLY
+    #  C> MSG_KEX_DH_GEX_INIT     or   --
+    #  S> MSG_KEX_DH_GEX_REPLY    or   --
+    #  C> MSG_NEWKEYS
+    #  S> MSG_NEWKEYS
+    #
+    # Server-initiated rekeying is the same, only the first two messages are
+    # switched.
+
+    def ssh_KEXINIT(self, packet):
+        """
+        Called when we receive a MSG_KEXINIT message.  Payload::
+            bytes[16] cookie
+            string keyExchangeAlgorithms
+            string keyAlgorithms
+            string incomingEncryptions
+            string outgoingEncryptions
+            string incomingAuthentications
+            string outgoingAuthentications
+            string incomingCompressions
+            string outgoingCompressions
+            string incomingLanguages
+            string outgoingLanguages
+            bool firstPacketFollows
+            unit32 0 (reserved)
+
+        Starts setting up the key exchange, keys, encryptions, and
+        authentications.  Extended by ssh_KEXINIT in SSHServerTransport and
+        SSHClientTransport.
+        """
+        self.otherKexInitPayload = chr(MSG_KEXINIT) + packet
+        #cookie = packet[: 16] # taking this is useless
+        k = getNS(packet[16:], 10)
+        strings, rest = k[:-1], k[-1]
+        (kexAlgs, keyAlgs, encCS, encSC, macCS, macSC, compCS, compSC, langCS,
+         langSC) = [s.split(',') for s in strings]
+        # these are the server directions
+        outs = [encSC, macSC, compSC]
+        ins = [encCS, macSC, compCS]
+        if self.isClient:
+            outs, ins = ins, outs # switch directions
+        server = (self.supportedKeyExchanges, self.supportedPublicKeys,
+                self.supportedCiphers, self.supportedCiphers,
+                self.supportedMACs, self.supportedMACs,
+                self.supportedCompressions, self.supportedCompressions)
+        client = (kexAlgs, keyAlgs, outs[0], ins[0], outs[1], ins[1],
+                outs[2], ins[2])
+        if self.isClient:
+            server, client = client, server
+        self.kexAlg = ffs(client[0], server[0])
+        self.keyAlg = ffs(client[1], server[1])
+        self.nextEncryptions = SSHCiphers(
+            ffs(client[2], server[2]),
+            ffs(client[3], server[3]),
+            ffs(client[4], server[4]),
+            ffs(client[5], server[5]))
+        self.outgoingCompressionType = ffs(client[6], server[6])
+        self.incomingCompressionType = ffs(client[7], server[7])
+        if None in (self.kexAlg, self.keyAlg, self.outgoingCompressionType,
+                    self.incomingCompressionType):
+            self.sendDisconnect(DISCONNECT_KEY_EXCHANGE_FAILED,
+                                "couldn't match all kex parts")
+            return
+        if None in self.nextEncryptions.__dict__.values():
+            self.sendDisconnect(DISCONNECT_KEY_EXCHANGE_FAILED,
+                                "couldn't match all kex parts")
+            return
+        log.msg('kex alg, key alg: %s %s' % (self.kexAlg, self.keyAlg))
+        log.msg('outgoing: %s %s %s' % (self.nextEncryptions.outCipType,
+                                        self.nextEncryptions.outMACType,
+                                        self.outgoingCompressionType))
+        log.msg('incoming: %s %s %s' % (self.nextEncryptions.inCipType,
+                                        self.nextEncryptions.inMACType,
+                                        self.incomingCompressionType))
+
+        if self._keyExchangeState == self._KEY_EXCHANGE_REQUESTED:
+            self._keyExchangeState = self._KEY_EXCHANGE_PROGRESSING
+        else:
+            self.sendKexInit()
+
+        return kexAlgs, keyAlgs, rest # for SSHServerTransport to use
+
+
+    def ssh_DISCONNECT(self, packet):
+        """
+        Called when we receive a MSG_DISCONNECT message.  Payload::
+            long code
+            string description
+
+        This means that the other side has disconnected.  Pass the message up
+        and disconnect ourselves.
+        """
+        reasonCode = struct.unpack('>L', packet[: 4])[0]
+        description, foo = getNS(packet[4:])
+        self.receiveError(reasonCode, description)
+        self.transport.loseConnection()
+
+
+    def ssh_IGNORE(self, packet):
+        """
+        Called when we receieve a MSG_IGNORE message.  No payload.
+        This means nothing; we simply return.
+        """
+
+
+    def ssh_UNIMPLEMENTED(self, packet):
+        """
+        Called when we receieve a MSG_UNIMPLEMENTED message.  Payload::
+            long packet
+
+        This means that the other side did not implement one of our packets.
+        """
+        seqnum, = struct.unpack('>L', packet)
+        self.receiveUnimplemented(seqnum)
+
+
+    def ssh_DEBUG(self, packet):
+        """
+        Called when we receieve a MSG_DEBUG message.  Payload::
+            bool alwaysDisplay
+            string message
+            string language
+
+        This means the other side has passed along some debugging info.
+        """
+        alwaysDisplay = bool(packet[0])
+        message, lang, foo = getNS(packet[1:], 2)
+        self.receiveDebug(alwaysDisplay, message, lang)
+
+
+    def setService(self, service):
+        """
+        Set our service to service and start it running.  If we were
+        running a service previously, stop it first.
+
+        @type service: C{SSHService}
+        """
+        log.msg('starting service %s' % service.name)
+        if self.service:
+            self.service.serviceStopped()
+        self.service = service
+        service.transport = self
+        self.service.serviceStarted()
+
+
+    def sendDebug(self, message, alwaysDisplay=False, language=''):
+        """
+        Send a debug message to the other side.
+
+        @param message: the message to send.
+        @type message: C{str}
+        @param alwaysDisplay: if True, tell the other side to always
+                              display this message.
+        @type alwaysDisplay: C{bool}
+        @param language: optionally, the language the message is in.
+        @type language: C{str}
+        """
+        self.sendPacket(MSG_DEBUG, chr(alwaysDisplay) + NS(message) +
+                        NS(language))
+
+
+    def sendIgnore(self, message):
+        """
+        Send a message that will be ignored by the other side.  This is
+        useful to fool attacks based on guessing packet sizes in the
+        encrypted stream.
+
+        @param message: data to send with the message
+        @type message: C{str}
+        """
+        self.sendPacket(MSG_IGNORE, NS(message))
+
+
+    def sendUnimplemented(self):
+        """
+        Send a message to the other side that the last packet was not
+        understood.
+        """
+        seqnum = self.incomingPacketSequence
+        self.sendPacket(MSG_UNIMPLEMENTED, struct.pack('!L', seqnum))
+
+
+    def sendDisconnect(self, reason, desc):
+        """
+        Send a disconnect message to the other side and then disconnect.
+
+        @param reason: the reason for the disconnect.  Should be one of the
+                       DISCONNECT_* values.
+        @type reason: C{int}
+        @param desc: a descrption of the reason for the disconnection.
+        @type desc: C{str}
+        """
+        self.sendPacket(
+            MSG_DISCONNECT, struct.pack('>L', reason) + NS(desc) + NS(''))
+        log.msg('Disconnecting with error, code %s\nreason: %s' % (reason,
+                                                                   desc))
+        self.transport.loseConnection()
+
+
+    def _getKey(self, c, sharedSecret, exchangeHash):
+        """
+        Get one of the keys for authentication/encryption.
+
+        @type c: C{str}
+        @type sharedSecret: C{str}
+        @type exchangeHash: C{str}
+        """
+        k1 = sha1(sharedSecret + exchangeHash + c + self.sessionID)
+        k1 = k1.digest()
+        k2 = sha1(sharedSecret + exchangeHash + k1).digest()
+        return k1 + k2
+
+
+    def _keySetup(self, sharedSecret, exchangeHash):
+        """
+        Set up the keys for the connection and sends MSG_NEWKEYS when
+        finished,
+
+        @param sharedSecret: a secret string agreed upon using a Diffie-
+                             Hellman exchange, so it is only shared between
+                             the server and the client.
+        @type sharedSecret: C{str}
+        @param exchangeHash: A hash of various data known by both sides.
+        @type exchangeHash: C{str}
+        """
+        if not self.sessionID:
+            self.sessionID = exchangeHash
+        initIVCS = self._getKey('A', sharedSecret, exchangeHash)
+        initIVSC = self._getKey('B', sharedSecret, exchangeHash)
+        encKeyCS = self._getKey('C', sharedSecret, exchangeHash)
+        encKeySC = self._getKey('D', sharedSecret, exchangeHash)
+        integKeyCS = self._getKey('E', sharedSecret, exchangeHash)
+        integKeySC = self._getKey('F', sharedSecret, exchangeHash)
+        outs = [initIVSC, encKeySC, integKeySC]
+        ins = [initIVCS, encKeyCS, integKeyCS]
+        if self.isClient: # reverse for the client
+            log.msg('REVERSE')
+            outs, ins = ins, outs
+        self.nextEncryptions.setKeys(outs[0], outs[1], ins[0], ins[1],
+                                     outs[2], ins[2])
+        self.sendPacket(MSG_NEWKEYS, '')
+
+
+    def _newKeys(self):
+        """
+        Called back by a subclass once a I{MSG_NEWKEYS} message has been
+        received.  This indicates key exchange has completed and new encryption
+        and compression parameters should be adopted.  Any messages which were
+        queued during key exchange will also be flushed.
+        """
+        log.msg('NEW KEYS')
+        self.currentEncryptions = self.nextEncryptions
+        if self.outgoingCompressionType == 'zlib':
+            self.outgoingCompression = zlib.compressobj(6)
+        if self.incomingCompressionType == 'zlib':
+            self.incomingCompression = zlib.decompressobj()
+
+        self._keyExchangeState = self._KEY_EXCHANGE_NONE
+        messages = self._blockedByKeyExchange
+        self._blockedByKeyExchange = None
+        for (messageType, payload) in messages:
+            self.sendPacket(messageType, payload)
+
+
+    def isEncrypted(self, direction="out"):
+        """
+        Return True if the connection is encrypted in the given direction.
+        Direction must be one of ["out", "in", "both"].
+        """
+        if direction == "out":
+            return self.currentEncryptions.outCipType != 'none'
+        elif direction == "in":
+            return self.currentEncryptions.inCipType != 'none'
+        elif direction == "both":
+            return self.isEncrypted("in") and self.isEncrypted("out")
+        else:
+            raise TypeError('direction must be "out", "in", or "both"')
+
+
+    def isVerified(self, direction="out"):
+        """
+        Return True if the connecction is verified/authenticated in the
+        given direction.  Direction must be one of ["out", "in", "both"].
+        """
+        if direction == "out":
+            return self.currentEncryptions.outMACType != 'none'
+        elif direction == "in":
+            return self.currentEncryptions.inMACType != 'none'
+        elif direction == "both":
+            return self.isVerified("in")and self.isVerified("out")
+        else:
+            raise TypeError('direction must be "out", "in", or "both"')
+
+
+    def loseConnection(self):
+        """
+        Lose the connection to the other side, sending a
+        DISCONNECT_CONNECTION_LOST message.
+        """
+        self.sendDisconnect(DISCONNECT_CONNECTION_LOST,
+                            "user closed connection")
+
+
+    # client methods
+    def receiveError(self, reasonCode, description):
+        """
+        Called when we receive a disconnect error message from the other
+        side.
+
+        @param reasonCode: the reason for the disconnect, one of the
+                           DISCONNECT_ values.
+        @type reasonCode: C{int}
+        @param description: a human-readable description of the
+                            disconnection.
+        @type description: C{str}
+        """
+        log.msg('Got remote error, code %s\nreason: %s' % (reasonCode,
+                                                           description))
+
+
+    def receiveUnimplemented(self, seqnum):
+        """
+        Called when we receive an unimplemented packet message from the other
+        side.
+
+        @param seqnum: the sequence number that was not understood.
+        @type seqnum: C{int}
+        """
+        log.msg('other side unimplemented packet #%s' % seqnum)
+
+
+    def receiveDebug(self, alwaysDisplay, message, lang):
+        """
+        Called when we receive a debug message from the other side.
+
+        @param alwaysDisplay: if True, this message should always be
+                              displayed.
+        @type alwaysDisplay: C{bool}
+        @param message: the debug message
+        @type message: C{str}
+        @param lang: optionally the language the message is in.
+        @type lang: C{str}
+        """
+        if alwaysDisplay:
+            log.msg('Remote Debug Message: %s' % message)
+
+
+
+class SSHServerTransport(SSHTransportBase):
+    """
+    SSHServerTransport implements the server side of the SSH protocol.
+
+    @ivar isClient: since we are never the client, this is always False.
+
+    @ivar ignoreNextPacket: if True, ignore the next key exchange packet.  This
+        is set when the client sends a guessed key exchange packet but with
+        an incorrect guess.
+
+    @ivar dhGexRequest: the KEX_DH_GEX_REQUEST(_OLD) that the client sent.
+        The key generation needs this to be stored.
+
+    @ivar g: the Diffie-Hellman group generator.
+
+    @ivar p: the Diffie-Hellman group prime.
+    """
+    isClient = False
+    ignoreNextPacket = 0
+
+
+    def ssh_KEXINIT(self, packet):
+        """
+        Called when we receive a MSG_KEXINIT message.  For a description
+        of the packet, see SSHTransportBase.ssh_KEXINIT().  Additionally,
+        this method checks if a guessed key exchange packet was sent.  If
+        it was sent, and it guessed incorrectly, the next key exchange
+        packet MUST be ignored.
+        """
+        retval = SSHTransportBase.ssh_KEXINIT(self, packet)
+        if not retval: # disconnected
+            return
+        else:
+            kexAlgs, keyAlgs, rest = retval
+        if ord(rest[0]): # first_kex_packet_follows
+            if (kexAlgs[0] != self.supportedKeyExchanges[0] or
+                keyAlgs[0] != self.supportedPublicKeys[0]):
+                self.ignoreNextPacket = True # guess was wrong
+
+
+    def _ssh_KEXDH_INIT(self, packet):
+        """
+        Called to handle the beginning of a diffie-hellman-group1-sha1 key
+        exchange.
+
+        Unlike other message types, this is not dispatched automatically.  It
+        is called from C{ssh_KEX_DH_GEX_REQUEST_OLD} because an extra check is
+        required to determine if this is really a KEXDH_INIT message or if it
+        is a KEX_DH_GEX_REQUEST_OLD message.
+
+        The KEXDH_INIT (for diffie-hellman-group1-sha1 exchanges) payload::
+
+                integer e (the client's Diffie-Hellman public key)
+
+            We send the KEXDH_REPLY with our host key and signature.
+        """
+        clientDHpublicKey, foo = getMP(packet)
+        y = _getRandomNumber(randbytes.secureRandom, 512)
+        serverDHpublicKey = _MPpow(DH_GENERATOR, y, DH_PRIME)
+        sharedSecret = _MPpow(clientDHpublicKey, y, DH_PRIME)
+        h = sha1()
+        h.update(NS(self.otherVersionString))
+        h.update(NS(self.ourVersionString))
+        h.update(NS(self.otherKexInitPayload))
+        h.update(NS(self.ourKexInitPayload))
+        h.update(NS(self.factory.publicKeys[self.keyAlg].blob()))
+        h.update(MP(clientDHpublicKey))
+        h.update(serverDHpublicKey)
+        h.update(sharedSecret)
+        exchangeHash = h.digest()
+        self.sendPacket(
+            MSG_KEXDH_REPLY,
+            NS(self.factory.publicKeys[self.keyAlg].blob()) +
+            serverDHpublicKey +
+            NS(self.factory.privateKeys[self.keyAlg].sign(exchangeHash)))
+        self._keySetup(sharedSecret, exchangeHash)
+
+
+    def ssh_KEX_DH_GEX_REQUEST_OLD(self, packet):
+        """
+        This represents two different key exchange methods that share the same
+        integer value.  If the message is determined to be a KEXDH_INIT,
+        C{_ssh_KEXDH_INIT} is called to handle it.  Otherwise, for
+        KEX_DH_GEX_REQUEST_OLD (for diffie-hellman-group-exchange-sha1)
+        payload::
+
+                integer ideal (ideal size for the Diffie-Hellman prime)
+
+            We send the KEX_DH_GEX_GROUP message with the group that is
+            closest in size to ideal.
+
+        If we were told to ignore the next key exchange packet by ssh_KEXINIT,
+        drop it on the floor and return.
+        """
+        if self.ignoreNextPacket:
+            self.ignoreNextPacket = 0
+            return
+
+        # KEXDH_INIT and KEX_DH_GEX_REQUEST_OLD have the same value, so use
+        # another cue to decide what kind of message the peer sent us.
+        if self.kexAlg == 'diffie-hellman-group1-sha1':
+            return self._ssh_KEXDH_INIT(packet)
+        elif self.kexAlg == 'diffie-hellman-group-exchange-sha1':
+            self.dhGexRequest = packet
+            ideal = struct.unpack('>L', packet)[0]
+            self.g, self.p = self.factory.getDHPrime(ideal)
+            self.sendPacket(MSG_KEX_DH_GEX_GROUP, MP(self.p) + MP(self.g))
+        else:
+            raise error.ConchError('bad kexalg: %s' % self.kexAlg)
+
+
+    def ssh_KEX_DH_GEX_REQUEST(self, packet):
+        """
+        Called when we receive a MSG_KEX_DH_GEX_REQUEST message.  Payload::
+            integer minimum
+            integer ideal
+            integer maximum
+
+        The client is asking for a Diffie-Hellman group between minimum and
+        maximum size, and close to ideal if possible.  We reply with a
+        MSG_KEX_DH_GEX_GROUP message.
+
+        If we were told to ignore the next key exchange packet by ssh_KEXINIT,
+        drop it on the floor and return.
+        """
+        if self.ignoreNextPacket:
+            self.ignoreNextPacket = 0
+            return
+        self.dhGexRequest = packet
+        min, ideal, max = struct.unpack('>3L', packet)
+        self.g, self.p = self.factory.getDHPrime(ideal)
+        self.sendPacket(MSG_KEX_DH_GEX_GROUP, MP(self.p) + MP(self.g))
+
+
+    def ssh_KEX_DH_GEX_INIT(self, packet):
+        """
+        Called when we get a MSG_KEX_DH_GEX_INIT message.  Payload::
+            integer e (client DH public key)
+
+        We send the MSG_KEX_DH_GEX_REPLY message with our host key and
+        signature.
+        """
+        clientDHpublicKey, foo = getMP(packet)
+        # TODO: we should also look at the value they send to us and reject
+        # insecure values of f (if g==2 and f has a single '1' bit while the
+        # rest are '0's, then they must have used a small y also).
+
+        # TODO: This could be computed when self.p is set up
+        #  or do as openssh does and scan f for a single '1' bit instead
+
+        pSize = Util.number.size(self.p)
+        y = _getRandomNumber(randbytes.secureRandom, pSize)
+
+        serverDHpublicKey = _MPpow(self.g, y, self.p)
+        sharedSecret = _MPpow(clientDHpublicKey, y, self.p)
+        h = sha1()
+        h.update(NS(self.otherVersionString))
+        h.update(NS(self.ourVersionString))
+        h.update(NS(self.otherKexInitPayload))
+        h.update(NS(self.ourKexInitPayload))
+        h.update(NS(self.factory.publicKeys[self.keyAlg].blob()))
+        h.update(self.dhGexRequest)
+        h.update(MP(self.p))
+        h.update(MP(self.g))
+        h.update(MP(clientDHpublicKey))
+        h.update(serverDHpublicKey)
+        h.update(sharedSecret)
+        exchangeHash = h.digest()
+        self.sendPacket(
+            MSG_KEX_DH_GEX_REPLY,
+            NS(self.factory.publicKeys[self.keyAlg].blob()) +
+            serverDHpublicKey +
+            NS(self.factory.privateKeys[self.keyAlg].sign(exchangeHash)))
+        self._keySetup(sharedSecret, exchangeHash)
+
+
+    def ssh_NEWKEYS(self, packet):
+        """
+        Called when we get a MSG_NEWKEYS message.  No payload.
+        When we get this, the keys have been set on both sides, and we
+        start using them to encrypt and authenticate the connection.
+        """
+        if packet != '':
+            self.sendDisconnect(DISCONNECT_PROTOCOL_ERROR,
+                                "NEWKEYS takes no data")
+            return
+        self._newKeys()
+
+
+    def ssh_SERVICE_REQUEST(self, packet):
+        """
+        Called when we get a MSG_SERVICE_REQUEST message.  Payload::
+            string serviceName
+
+        The client has requested a service.  If we can start the service,
+        start it; otherwise, disconnect with
+        DISCONNECT_SERVICE_NOT_AVAILABLE.
+        """
+        service, rest = getNS(packet)
+        cls = self.factory.getService(self, service)
+        if not cls:
+            self.sendDisconnect(DISCONNECT_SERVICE_NOT_AVAILABLE,
+                                "don't have service %s" % service)
+            return
+        else:
+            self.sendPacket(MSG_SERVICE_ACCEPT, NS(service))
+            self.setService(cls())
+
+
+
+class SSHClientTransport(SSHTransportBase):
+    """
+    SSHClientTransport implements the client side of the SSH protocol.
+
+    @ivar isClient: since we are always the client, this is always True.
+
+    @ivar _gotNewKeys: if we receive a MSG_NEWKEYS message before we are
+        ready to transition to the new keys, this is set to True so we
+        can transition when the keys are ready locally.
+
+    @ivar x: our Diffie-Hellman private key.
+
+    @ivar e: our Diffie-Hellman public key.
+
+    @ivar g: the Diffie-Hellman group generator.
+
+    @ivar p: the Diffie-Hellman group prime
+
+    @ivar instance: the SSHService object we are requesting.
+    """
+    isClient = True
+
+    def connectionMade(self):
+        """
+        Called when the connection is started with the server.  Just sets
+        up a private instance variable.
+        """
+        SSHTransportBase.connectionMade(self)
+        self._gotNewKeys = 0
+
+
+    def ssh_KEXINIT(self, packet):
+        """
+        Called when we receive a MSG_KEXINIT message.  For a description
+        of the packet, see SSHTransportBase.ssh_KEXINIT().  Additionally,
+        this method sends the first key exchange packet.  If the agreed-upon
+        exchange is diffie-hellman-group1-sha1, generate a public key
+        and send it in a MSG_KEXDH_INIT message.  If the exchange is
+        diffie-hellman-group-exchange-sha1, ask for a 2048 bit group with a
+        MSG_KEX_DH_GEX_REQUEST_OLD message.
+        """
+        if SSHTransportBase.ssh_KEXINIT(self, packet) is None:
+            return # we disconnected
+        if self.kexAlg == 'diffie-hellman-group1-sha1':
+            self.x = _generateX(randbytes.secureRandom, 512)
+            self.e = _MPpow(DH_GENERATOR, self.x, DH_PRIME)
+            self.sendPacket(MSG_KEXDH_INIT, self.e)
+        elif self.kexAlg == 'diffie-hellman-group-exchange-sha1':
+            self.sendPacket(MSG_KEX_DH_GEX_REQUEST_OLD, '\x00\x00\x08\x00')
+        else:
+            raise error.ConchError("somehow, the kexAlg has been set "
+                                   "to something we don't support")
+
+
+    def _ssh_KEXDH_REPLY(self, packet):
+        """
+        Called to handle a reply to a diffie-hellman-group1-sha1 key exchange
+        message (KEXDH_INIT).
+        
+        Like the handler for I{KEXDH_INIT}, this message type has an
+        overlapping value.  This method is called from C{ssh_KEX_DH_GEX_GROUP}
+        if that method detects a diffie-hellman-group1-sha1 key exchange is in
+        progress.
+
+        Payload::
+
+            string serverHostKey
+            integer f (server Diffie-Hellman public key)
+            string signature
+
+        We verify the host key by calling verifyHostKey, then continue in
+        _continueKEXDH_REPLY.
+        """
+        pubKey, packet = getNS(packet)
+        f, packet = getMP(packet)
+        signature, packet = getNS(packet)
+        fingerprint = ':'.join([ch.encode('hex') for ch in
+                                md5(pubKey).digest()])
+        d = self.verifyHostKey(pubKey, fingerprint)
+        d.addCallback(self._continueKEXDH_REPLY, pubKey, f, signature)
+        d.addErrback(
+            lambda unused: self.sendDisconnect(
+                DISCONNECT_HOST_KEY_NOT_VERIFIABLE, 'bad host key'))
+        return d
+
+
+    def ssh_KEX_DH_GEX_GROUP(self, packet):
+        """
+        This handles two different message which share an integer value.
+
+        If the key exchange is diffie-hellman-group-exchange-sha1, this is
+        MSG_KEX_DH_GEX_GROUP.  Payload::
+            string g (group generator)
+            string p (group prime)
+
+        We generate a Diffie-Hellman public key and send it in a
+        MSG_KEX_DH_GEX_INIT message.
+        """
+        if self.kexAlg == 'diffie-hellman-group1-sha1':
+            return self._ssh_KEXDH_REPLY(packet)
+        else:
+            self.p, rest = getMP(packet)
+            self.g, rest = getMP(rest)
+            self.x = _generateX(randbytes.secureRandom, 320)
+            self.e = _MPpow(self.g, self.x, self.p)
+            self.sendPacket(MSG_KEX_DH_GEX_INIT, self.e)
+
+
+    def _continueKEXDH_REPLY(self, ignored, pubKey, f, signature):
+        """
+        The host key has been verified, so we generate the keys.
+
+        @param pubKey: the public key blob for the server's public key.
+        @type pubKey: C{str}
+        @param f: the server's Diffie-Hellman public key.
+        @type f: C{long}
+        @param signature: the server's signature, verifying that it has the
+            correct private key.
+        @type signature: C{str}
+        """
+        serverKey = keys.Key.fromString(pubKey)
+        sharedSecret = _MPpow(f, self.x, DH_PRIME)
+        h = sha1()
+        h.update(NS(self.ourVersionString))
+        h.update(NS(self.otherVersionString))
+        h.update(NS(self.ourKexInitPayload))
+        h.update(NS(self.otherKexInitPayload))
+        h.update(NS(pubKey))
+        h.update(self.e)
+        h.update(MP(f))
+        h.update(sharedSecret)
+        exchangeHash = h.digest()
+        if not serverKey.verify(signature, exchangeHash):
+            self.sendDisconnect(DISCONNECT_KEY_EXCHANGE_FAILED,
+                                'bad signature')
+            return
+        self._keySetup(sharedSecret, exchangeHash)
+
+
+    def ssh_KEX_DH_GEX_REPLY(self, packet):
+        """
+        Called when we receieve a MSG_KEX_DH_GEX_REPLY message.  Payload::
+            string server host key
+            integer f (server DH public key)
+
+        We verify the host key by calling verifyHostKey, then continue in
+        _continueGEX_REPLY.
+        """
+        pubKey, packet = getNS(packet)
+        f, packet = getMP(packet)
+        signature, packet = getNS(packet)
+        fingerprint = ':'.join(map(lambda c: '%02x'%ord(c),
+            md5(pubKey).digest()))
+        d = self.verifyHostKey(pubKey, fingerprint)
+        d.addCallback(self._continueGEX_REPLY, pubKey, f, signature)
+        d.addErrback(
+            lambda unused: self.sendDisconnect(
+                DISCONNECT_HOST_KEY_NOT_VERIFIABLE, 'bad host key'))
+        return d
+
+
+    def _continueGEX_REPLY(self, ignored, pubKey, f, signature):
+        """
+        The host key has been verified, so we generate the keys.
+
+        @param pubKey: the public key blob for the server's public key.
+        @type pubKey: C{str}
+        @param f: the server's Diffie-Hellman public key.
+        @type f: C{long}
+        @param signature: the server's signature, verifying that it has the
+            correct private key.
+        @type signature: C{str}
+        """
+        serverKey = keys.Key.fromString(pubKey)
+        sharedSecret = _MPpow(f, self.x, self.p)
+        h = sha1()
+        h.update(NS(self.ourVersionString))
+        h.update(NS(self.otherVersionString))
+        h.update(NS(self.ourKexInitPayload))
+        h.update(NS(self.otherKexInitPayload))
+        h.update(NS(pubKey))
+        h.update('\x00\x00\x08\x00')
+        h.update(MP(self.p))
+        h.update(MP(self.g))
+        h.update(self.e)
+        h.update(MP(f))
+        h.update(sharedSecret)
+        exchangeHash = h.digest()
+        if not serverKey.verify(signature, exchangeHash):
+            self.sendDisconnect(DISCONNECT_KEY_EXCHANGE_FAILED,
+                                'bad signature')
+            return
+        self._keySetup(sharedSecret, exchangeHash)
+
+
+    def _keySetup(self, sharedSecret, exchangeHash):
+        """
+        See SSHTransportBase._keySetup().
+        """
+        SSHTransportBase._keySetup(self, sharedSecret, exchangeHash)
+        if self._gotNewKeys:
+            self.ssh_NEWKEYS('')
+
+
+    def ssh_NEWKEYS(self, packet):
+        """
+        Called when we receieve a MSG_NEWKEYS message.  No payload.
+        If we've finished setting up our own keys, start using them.
+        Otherwise, remeber that we've receieved this message.
+        """
+        if packet != '':
+            self.sendDisconnect(DISCONNECT_PROTOCOL_ERROR,
+                                "NEWKEYS takes no data")
+            return
+        if not self.nextEncryptions.encBlockSize:
+            self._gotNewKeys = 1
+            return
+        self._newKeys()
+        self.connectionSecure()
+
+
+    def ssh_SERVICE_ACCEPT(self, packet):
+        """
+        Called when we receieve a MSG_SERVICE_ACCEPT message.  Payload::
+            string service name
+
+        Start the service we requested.
+        """
+        if packet == '':
+            log.msg('got SERVICE_ACCEPT without payload')
+        else:
+            name = getNS(packet)[0]
+            if name != self.instance.name:
+                self.sendDisconnect(
+                    DISCONNECT_PROTOCOL_ERROR,
+                    "received accept for service we did not request")
+        self.setService(self.instance)
+
+
+    def requestService(self, instance):
+        """
+        Request that a service be run over this transport.
+
+        @type instance: subclass of L{twisted.conch.ssh.service.SSHService}
+        """
+        self.sendPacket(MSG_SERVICE_REQUEST, NS(instance.name))
+        self.instance = instance
+
+
+    # client methods
+    def verifyHostKey(self, hostKey, fingerprint):
+        """
+        Returns a Deferred that gets a callback if it is a valid key, or
+        an errback if not.
+
+        @type hostKey:      C{str}
+        @type fingerprint:  C{str}
+        @rtype:             L{twisted.internet.defer.Deferred}
+        """
+        # return if it's good
+        return defer.fail(NotImplementedError())
+
+
+    def connectionSecure(self):
+        """
+        Called when the encryption has been set up.  Generally,
+        requestService() is called to run another service over the transport.
+        """
+        raise NotImplementedError()
+
+
+
+class _DummyCipher:
+    """
+    A cipher for the none encryption method.
+
+    @ivar block_size: the block size of the encryption.  In the case of the
+    none cipher, this is 8 bytes.
+    """
+    block_size = 8
+
+
+    def encrypt(self, x):
+        return x
+
+
+    decrypt = encrypt
+
+
+class SSHCiphers:
+    """
+    SSHCiphers represents all the encryption operations that need to occur
+    to encrypt and authenticate the SSH connection.
+
+    @cvar cipherMap: A dictionary mapping SSH encryption names to 3-tuples of
+                     (<Crypto.Cipher.* name>, <block size>, <counter mode>)
+    @cvar macMap: A dictionary mapping SSH MAC names to hash modules.
+
+    @ivar outCipType: the string type of the outgoing cipher.
+    @ivar inCipType: the string type of the incoming cipher.
+    @ivar outMACType: the string type of the incoming MAC.
+    @ivar inMACType: the string type of the incoming MAC.
+    @ivar encBlockSize: the block size of the outgoing cipher.
+    @ivar decBlockSize: the block size of the incoming cipher.
+    @ivar verifyDigestSize: the size of the incoming MAC.
+    @ivar outMAC: a tuple of (<hash module>, <inner key>, <outer key>,
+        <digest size>) representing the outgoing MAC.
+    @ivar inMAc: see outMAC, but for the incoming MAC.
+    """
+
+
+    cipherMap = {
+        '3des-cbc':('DES3', 24, 0),
+        'blowfish-cbc':('Blowfish', 16,0 ),
+        'aes256-cbc':('AES', 32, 0),
+        'aes192-cbc':('AES', 24, 0),
+        'aes128-cbc':('AES', 16, 0),
+        'cast128-cbc':('CAST', 16, 0),
+        'aes128-ctr':('AES', 16, 1),
+        'aes192-ctr':('AES', 24, 1),
+        'aes256-ctr':('AES', 32, 1),
+        '3des-ctr':('DES3', 24, 1),
+        'blowfish-ctr':('Blowfish', 16, 1),
+        'cast128-ctr':('CAST', 16, 1),
+        'none':(None, 0, 0),
+    }
+    macMap = {
+        'hmac-sha1': sha1,
+        'hmac-md5': md5,
+        'none': None
+     }
+
+
+    def __init__(self, outCip, inCip, outMac, inMac):
+        self.outCipType = outCip
+        self.inCipType = inCip
+        self.outMACType = outMac
+        self.inMACType = inMac
+        self.encBlockSize = 0
+        self.decBlockSize = 0
+        self.verifyDigestSize = 0
+        self.outMAC = (None, '', '', 0)
+        self.inMAC = (None, '', '', 0)
+
+
+    def setKeys(self, outIV, outKey, inIV, inKey, outInteg, inInteg):
+        """
+        Set up the ciphers and hashes using the given keys,
+
+        @param outIV: the outgoing initialization vector
+        @param outKey: the outgoing encryption key
+        @param inIV: the incoming initialization vector
+        @param inKey: the incoming encryption key
+        @param outInteg: the outgoing integrity key
+        @param inInteg: the incoming integrity key.
+        """
+        o = self._getCipher(self.outCipType, outIV, outKey)
+        self.encrypt = o.encrypt
+        self.encBlockSize = o.block_size
+        o = self._getCipher(self.inCipType, inIV, inKey)
+        self.decrypt = o.decrypt
+        self.decBlockSize = o.block_size
+        self.outMAC = self._getMAC(self.outMACType, outInteg)
+        self.inMAC = self._getMAC(self.inMACType, inInteg)
+        if self.inMAC:
+            self.verifyDigestSize = self.inMAC[3]
+
+
+    def _getCipher(self, cip, iv, key):
+        """
+        Creates an initialized cipher object.
+
+        @param cip: the name of the cipher: maps into Crypto.Cipher.*
+        @param iv: the initialzation vector
+        @param key: the encryption key
+        """
+        modName, keySize, counterMode = self.cipherMap[cip]
+        if not modName: # no cipher
+            return _DummyCipher()
+        mod = __import__('Crypto.Cipher.%s'%modName, {}, {}, 'x')
+        if counterMode:
+            return mod.new(key[:keySize], mod.MODE_CTR, iv[:mod.block_size],
+                           counter=_Counter(iv, mod.block_size))
+        else:
+            return mod.new(key[:keySize], mod.MODE_CBC, iv[:mod.block_size])
+
+
+    def _getMAC(self, mac, key):
+        """
+        Gets a 4-tuple representing the message authentication code.
+        (<hash module>, <inner hash value>, <outer hash value>,
+        <digest size>)
+
+        @param mac: a key mapping into macMap
+        @type mac: C{str}
+        @param key: the MAC key.
+        @type key: C{str}
+        """
+        mod = self.macMap[mac]
+        if not mod:
+            return (None, '', '', 0)
+        ds = mod().digest_size
+        key = key[:ds] + '\x00' * (64 - ds)
+        i = XOR.new('\x36').encrypt(key)
+        o = XOR.new('\x5c').encrypt(key)
+        return mod, i, o, ds
+
+
+    def encrypt(self, blocks):
+        """
+        Encrypt blocks.  Overridden by the encrypt method of a
+        Crypto.Cipher.* object in setKeys().
+
+        @type blocks: C{str}
+        """
+        raise NotImplementedError()
+
+
+    def decrypt(self, blocks):
+        """
+        Decrypt blocks.  See encrypt().
+
+        @type blocks: C{str}
+        """
+        raise NotImplementedError()
+
+
+    def makeMAC(self, seqid, data):
+        """
+        Create a message authentication code (MAC) for the given packet using
+        the outgoing MAC values.
+
+        @param seqid: the sequence ID of the outgoing packet
+        @type seqid: C{int}
+        @param data: the data to create a MAC for
+        @type data: C{str}
+        @rtype: C{str}
+        """
+        if not self.outMAC[0]:
+            return ''
+        data = struct.pack('>L', seqid) + data
+        mod, i, o, ds = self.outMAC
+        inner = mod(i + data)
+        outer = mod(o + inner.digest())
+        return outer.digest()
+
+
+    def verify(self, seqid, data, mac):
+        """
+        Verify an incoming MAC using the incoming MAC values.  Return True
+        if the MAC is valid.
+
+        @param seqid: the sequence ID of the incoming packet
+        @type seqid: C{int}
+        @param data: the packet data to verify
+        @type data: C{str}
+        @param mac: the MAC sent with the packet
+        @type mac: C{str}
+        @rtype: C{bool}
+        """
+        if not self.inMAC[0]:
+            return mac == ''
+        data = struct.pack('>L', seqid) + data
+        mod, i, o, ds = self.inMAC
+        inner = mod(i + data)
+        outer = mod(o + inner.digest())
+        return mac == outer.digest()
+
+
+
+class _Counter:
+    """
+    Stateful counter which returns results packed in a byte string
+    """
+
+
+    def __init__(self, initialVector, blockSize):
+        """
+        @type initialVector: C{str}
+        @param initialVector: A byte string representing the initial counter
+                              value.
+        @type blockSize: C{int}
+        @param blockSize: The length of the output buffer, as well as the
+        number of bytes at the beginning of C{initialVector} to consider.
+        """
+        initialVector = initialVector[:blockSize]
+        self.count = getMP('\xff\xff\xff\xff' + initialVector)[0]
+        self.blockSize = blockSize
+        self.count = Util.number.long_to_bytes(self.count - 1)
+        self.count = '\x00' * (self.blockSize - len(self.count)) + self.count
+        self.count = array.array('c', self.count)
+        self.len = len(self.count) - 1
+
+
+    def __call__(self):
+        """
+        Increment the counter and return the new value.
+        """
+        i = self.len
+        while i > -1:
+            self.count[i] = n = chr((ord(self.count[i]) + 1) % 256)
+            if n == '\x00':
+                i -= 1
+            else:
+                return self.count.tostring()
+
+        self.count = array.array('c', '\x00' * self.blockSize)
+        return self.count.tostring()
+
+
+
+# Diffie-Hellman primes from Oakley Group 2 [RFC 2409]
+DH_PRIME = long('17976931348623159077083915679378745319786029604875601170644'
+'442368419718021615851936894783379586492554150218056548598050364644054819923'
+'910005079287700335581663922955313623907650873575991482257486257500742530207'
+'744771258955095793777842444242661733472762929938766870920560605027081084290'
+'7692932019128194467627007L')
+DH_GENERATOR = 2L
+
+
+
+MSG_DISCONNECT = 1
+MSG_IGNORE = 2
+MSG_UNIMPLEMENTED = 3
+MSG_DEBUG = 4
+MSG_SERVICE_REQUEST = 5
+MSG_SERVICE_ACCEPT = 6
+MSG_KEXINIT = 20
+MSG_NEWKEYS = 21
+MSG_KEXDH_INIT = 30
+MSG_KEXDH_REPLY = 31
+MSG_KEX_DH_GEX_REQUEST_OLD = 30
+MSG_KEX_DH_GEX_REQUEST = 34
+MSG_KEX_DH_GEX_GROUP = 31
+MSG_KEX_DH_GEX_INIT = 32
+MSG_KEX_DH_GEX_REPLY = 33
+
+
+
+DISCONNECT_HOST_NOT_ALLOWED_TO_CONNECT = 1
+DISCONNECT_PROTOCOL_ERROR = 2
+DISCONNECT_KEY_EXCHANGE_FAILED = 3
+DISCONNECT_RESERVED = 4
+DISCONNECT_MAC_ERROR = 5
+DISCONNECT_COMPRESSION_ERROR = 6
+DISCONNECT_SERVICE_NOT_AVAILABLE = 7
+DISCONNECT_PROTOCOL_VERSION_NOT_SUPPORTED = 8
+DISCONNECT_HOST_KEY_NOT_VERIFIABLE = 9
+DISCONNECT_CONNECTION_LOST = 10
+DISCONNECT_BY_APPLICATION = 11
+DISCONNECT_TOO_MANY_CONNECTIONS = 12
+DISCONNECT_AUTH_CANCELLED_BY_USER = 13
+DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE = 14
+DISCONNECT_ILLEGAL_USER_NAME = 15
+
+
+
+messages = {}
+for name, value in globals().items():
+    # Avoid legacy messages which overlap with never ones
+    if name.startswith('MSG_') and not name.startswith('MSG_KEXDH_'):
+        messages[value] = name
+# Check for regressions (#5352)
+if 'MSG_KEXDH_INIT' in messages or 'MSG_KEXDH_REPLY' in messages:
+    raise RuntimeError(
+        "legacy SSH mnemonics should not end up in messages dict")
diff --git a/ThirdParty/Twisted/twisted/conch/ssh/userauth.py b/ThirdParty/Twisted/twisted/conch/ssh/userauth.py
new file mode 100644
index 0000000..65c0ef0
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/ssh/userauth.py
@@ -0,0 +1,848 @@
+# -*- test-case-name: twisted.conch.test.test_userauth -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Implementation of the ssh-userauth service.
+Currently implemented authentication types are public-key and password.
+
+Maintainer: Paul Swartz
+"""
+
+import struct, warnings
+from twisted.conch import error, interfaces
+from twisted.conch.ssh import keys, transport, service
+from twisted.conch.ssh.common import NS, getNS
+from twisted.cred import credentials
+from twisted.cred.error import UnauthorizedLogin
+from twisted.internet import defer, reactor
+from twisted.python import failure, log
+
+
+
+class SSHUserAuthServer(service.SSHService):
+    """
+    A service implementing the server side of the 'ssh-userauth' service.  It
+    is used to authenticate the user on the other side as being able to access
+    this server.
+
+    @ivar name: the name of this service: 'ssh-userauth'
+    @type name: C{str}
+    @ivar authenticatedWith: a list of authentication methods that have
+        already been used.
+    @type authenticatedWith: C{list}
+    @ivar loginTimeout: the number of seconds we wait before disconnecting
+        the user for taking too long to authenticate
+    @type loginTimeout: C{int}
+    @ivar attemptsBeforeDisconnect: the number of failed login attempts we
+        allow before disconnecting.
+    @type attemptsBeforeDisconnect: C{int}
+    @ivar loginAttempts: the number of login attempts that have been made
+    @type loginAttempts: C{int}
+    @ivar passwordDelay: the number of seconds to delay when the user gives
+        an incorrect password
+    @type passwordDelay: C{int}
+    @ivar interfaceToMethod: a C{dict} mapping credential interfaces to
+        authentication methods.  The server checks to see which of the
+        cred interfaces have checkers and tells the client that those methods
+        are valid for authentication.
+    @type interfaceToMethod: C{dict}
+    @ivar supportedAuthentications: A list of the supported authentication
+        methods.
+    @type supportedAuthentications: C{list} of C{str}
+    @ivar user: the last username the client tried to authenticate with
+    @type user: C{str}
+    @ivar method: the current authentication method
+    @type method: C{str}
+    @ivar nextService: the service the user wants started after authentication
+        has been completed.
+    @type nextService: C{str}
+    @ivar portal: the L{twisted.cred.portal.Portal} we are using for
+        authentication
+    @type portal: L{twisted.cred.portal.Portal}
+    @ivar clock: an object with a callLater method.  Stubbed out for testing.
+    """
+
+
+    name = 'ssh-userauth'
+    loginTimeout = 10 * 60 * 60
+    # 10 minutes before we disconnect them
+    attemptsBeforeDisconnect = 20
+    # 20 login attempts before a disconnect
+    passwordDelay = 1 # number of seconds to delay on a failed password
+    clock = reactor
+    interfaceToMethod = {
+        credentials.ISSHPrivateKey : 'publickey',
+        credentials.IUsernamePassword : 'password',
+        credentials.IPluggableAuthenticationModules : 'keyboard-interactive',
+    }
+
+
+    def serviceStarted(self):
+        """
+        Called when the userauth service is started.  Set up instance
+        variables, check if we should allow password/keyboard-interactive
+        authentication (only allow if the outgoing connection is encrypted) and
+        set up a login timeout.
+        """
+        self.authenticatedWith = []
+        self.loginAttempts = 0
+        self.user = None
+        self.nextService = None
+        self._pamDeferred = None
+        self.portal = self.transport.factory.portal
+
+        self.supportedAuthentications = []
+        for i in self.portal.listCredentialsInterfaces():
+            if i in self.interfaceToMethod:
+                self.supportedAuthentications.append(self.interfaceToMethod[i])
+
+        if not self.transport.isEncrypted('in'):
+            # don't let us transport password in plaintext
+            if 'password' in self.supportedAuthentications:
+                self.supportedAuthentications.remove('password')
+            if 'keyboard-interactive' in self.supportedAuthentications:
+                self.supportedAuthentications.remove('keyboard-interactive')
+        self._cancelLoginTimeout = self.clock.callLater(
+            self.loginTimeout,
+            self.timeoutAuthentication)
+
+
+    def serviceStopped(self):
+        """
+        Called when the userauth service is stopped.  Cancel the login timeout
+        if it's still going.
+        """
+        if self._cancelLoginTimeout:
+            self._cancelLoginTimeout.cancel()
+            self._cancelLoginTimeout = None
+
+
+    def timeoutAuthentication(self):
+        """
+        Called when the user has timed out on authentication.  Disconnect
+        with a DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE message.
+        """
+        self._cancelLoginTimeout = None
+        self.transport.sendDisconnect(
+            transport.DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE,
+            'you took too long')
+
+
+    def tryAuth(self, kind, user, data):
+        """
+        Try to authenticate the user with the given method.  Dispatches to a
+        auth_* method.
+
+        @param kind: the authentication method to try.
+        @type kind: C{str}
+        @param user: the username the client is authenticating with.
+        @type user: C{str}
+        @param data: authentication specific data sent by the client.
+        @type data: C{str}
+        @return: A Deferred called back if the method succeeded, or erred back
+            if it failed.
+        @rtype: C{defer.Deferred}
+        """
+        log.msg('%s trying auth %s' % (user, kind))
+        if kind not in self.supportedAuthentications:
+            return defer.fail(
+                    error.ConchError('unsupported authentication, failing'))
+        kind = kind.replace('-', '_')
+        f = getattr(self,'auth_%s'%kind, None)
+        if f:
+            ret = f(data)
+            if not ret:
+                return defer.fail(
+                        error.ConchError('%s return None instead of a Deferred'
+                            % kind))
+            else:
+                return ret
+        return defer.fail(error.ConchError('bad auth type: %s' % kind))
+
+
+    def ssh_USERAUTH_REQUEST(self, packet):
+        """
+        The client has requested authentication.  Payload::
+            string user
+            string next service
+            string method
+            <authentication specific data>
+
+        @type packet: C{str}
+        """
+        user, nextService, method, rest = getNS(packet, 3)
+        if user != self.user or nextService != self.nextService:
+            self.authenticatedWith = [] # clear auth state
+        self.user = user
+        self.nextService = nextService
+        self.method = method
+        d = self.tryAuth(method, user, rest)
+        if not d:
+            self._ebBadAuth(
+                failure.Failure(error.ConchError('auth returned none')))
+            return
+        d.addCallback(self._cbFinishedAuth)
+        d.addErrback(self._ebMaybeBadAuth)
+        d.addErrback(self._ebBadAuth)
+        return d
+
+
+    def _cbFinishedAuth(self, (interface, avatar, logout)):
+        """
+        The callback when user has successfully been authenticated.  For a
+        description of the arguments, see L{twisted.cred.portal.Portal.login}.
+        We start the service requested by the user.
+        """
+        self.transport.avatar = avatar
+        self.transport.logoutFunction = logout
+        service = self.transport.factory.getService(self.transport,
+                self.nextService)
+        if not service:
+            raise error.ConchError('could not get next service: %s'
+                                  % self.nextService)
+        log.msg('%s authenticated with %s' % (self.user, self.method))
+        self.transport.sendPacket(MSG_USERAUTH_SUCCESS, '')
+        self.transport.setService(service())
+
+
+    def _ebMaybeBadAuth(self, reason):
+        """
+        An intermediate errback.  If the reason is
+        error.NotEnoughAuthentication, we send a MSG_USERAUTH_FAILURE, but
+        with the partial success indicator set.
+
+        @type reason: L{twisted.python.failure.Failure}
+        """
+        reason.trap(error.NotEnoughAuthentication)
+        self.transport.sendPacket(MSG_USERAUTH_FAILURE,
+                NS(','.join(self.supportedAuthentications)) + '\xff')
+
+
+    def _ebBadAuth(self, reason):
+        """
+        The final errback in the authentication chain.  If the reason is
+        error.IgnoreAuthentication, we simply return; the authentication
+        method has sent its own response.  Otherwise, send a failure message
+        and (if the method is not 'none') increment the number of login
+        attempts.
+
+        @type reason: L{twisted.python.failure.Failure}
+        """
+        if reason.check(error.IgnoreAuthentication):
+            return
+        if self.method != 'none':
+            log.msg('%s failed auth %s' % (self.user, self.method))
+            if reason.check(UnauthorizedLogin):
+                log.msg('unauthorized login: %s' % reason.getErrorMessage())
+            elif reason.check(error.ConchError):
+                log.msg('reason: %s' % reason.getErrorMessage())
+            else:
+                log.msg(reason.getTraceback())
+            self.loginAttempts += 1
+            if self.loginAttempts > self.attemptsBeforeDisconnect:
+                self.transport.sendDisconnect(
+                        transport.DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE,
+                        'too many bad auths')
+                return
+        self.transport.sendPacket(
+                MSG_USERAUTH_FAILURE,
+                NS(','.join(self.supportedAuthentications)) + '\x00')
+
+
+    def auth_publickey(self, packet):
+        """
+        Public key authentication.  Payload::
+            byte has signature
+            string algorithm name
+            string key blob
+            [string signature] (if has signature is True)
+
+        Create a SSHPublicKey credential and verify it using our portal.
+        """
+        hasSig = ord(packet[0])
+        algName, blob, rest = getNS(packet[1:], 2)
+        pubKey = keys.Key.fromString(blob)
+        signature = hasSig and getNS(rest)[0] or None
+        if hasSig:
+            b = (NS(self.transport.sessionID) + chr(MSG_USERAUTH_REQUEST) +
+                NS(self.user) + NS(self.nextService) + NS('publickey') +
+                chr(hasSig) +  NS(pubKey.sshType()) + NS(blob))
+            c = credentials.SSHPrivateKey(self.user, algName, blob, b,
+                    signature)
+            return self.portal.login(c, None, interfaces.IConchUser)
+        else:
+            c = credentials.SSHPrivateKey(self.user, algName, blob, None, None)
+            return self.portal.login(c, None,
+                    interfaces.IConchUser).addErrback(self._ebCheckKey,
+                            packet[1:])
+
+
+    def _ebCheckKey(self, reason, packet):
+        """
+        Called back if the user did not sent a signature.  If reason is
+        error.ValidPublicKey then this key is valid for the user to
+        authenticate with.  Send MSG_USERAUTH_PK_OK.
+        """
+        reason.trap(error.ValidPublicKey)
+        # if we make it here, it means that the publickey is valid
+        self.transport.sendPacket(MSG_USERAUTH_PK_OK, packet)
+        return failure.Failure(error.IgnoreAuthentication())
+
+
+    def auth_password(self, packet):
+        """
+        Password authentication.  Payload::
+            string password
+
+        Make a UsernamePassword credential and verify it with our portal.
+        """
+        password = getNS(packet[1:])[0]
+        c = credentials.UsernamePassword(self.user, password)
+        return self.portal.login(c, None, interfaces.IConchUser).addErrback(
+                                                        self._ebPassword)
+
+
+    def _ebPassword(self, f):
+        """
+        If the password is invalid, wait before sending the failure in order
+        to delay brute-force password guessing.
+        """
+        d = defer.Deferred()
+        self.clock.callLater(self.passwordDelay, d.callback, f)
+        return d
+
+
+    def auth_keyboard_interactive(self, packet):
+        """
+        Keyboard interactive authentication.  No payload.  We create a
+        PluggableAuthenticationModules credential and authenticate with our
+        portal.
+        """
+        if self._pamDeferred is not None:
+            self.transport.sendDisconnect(
+                    transport.DISCONNECT_PROTOCOL_ERROR,
+                    "only one keyboard interactive attempt at a time")
+            return defer.fail(error.IgnoreAuthentication())
+        c = credentials.PluggableAuthenticationModules(self.user,
+                self._pamConv)
+        return self.portal.login(c, None, interfaces.IConchUser)
+
+
+    def _pamConv(self, items):
+        """
+        Convert a list of PAM authentication questions into a
+        MSG_USERAUTH_INFO_REQUEST.  Returns a Deferred that will be called
+        back when the user has responses to the questions.
+
+        @param items: a list of 2-tuples (message, kind).  We only care about
+            kinds 1 (password) and 2 (text).
+        @type items: C{list}
+        @rtype: L{defer.Deferred}
+        """
+        resp = []
+        for message, kind in items:
+            if kind == 1: # password
+                resp.append((message, 0))
+            elif kind == 2: # text
+                resp.append((message, 1))
+            elif kind in (3, 4):
+                return defer.fail(error.ConchError(
+                    'cannot handle PAM 3 or 4 messages'))
+            else:
+                return defer.fail(error.ConchError(
+                    'bad PAM auth kind %i' % kind))
+        packet = NS('') + NS('') + NS('')
+        packet += struct.pack('>L', len(resp))
+        for prompt, echo in resp:
+            packet += NS(prompt)
+            packet += chr(echo)
+        self.transport.sendPacket(MSG_USERAUTH_INFO_REQUEST, packet)
+        self._pamDeferred = defer.Deferred()
+        return self._pamDeferred
+
+
+    def ssh_USERAUTH_INFO_RESPONSE(self, packet):
+        """
+        The user has responded with answers to PAMs authentication questions.
+        Parse the packet into a PAM response and callback self._pamDeferred.
+        Payload::
+            uint32 numer of responses
+            string response 1
+            ...
+            string response n
+        """
+        d, self._pamDeferred = self._pamDeferred, None
+
+        try:
+            resp = []
+            numResps = struct.unpack('>L', packet[:4])[0]
+            packet = packet[4:]
+            while len(resp) < numResps:
+                response, packet = getNS(packet)
+                resp.append((response, 0))
+            if packet:
+                raise error.ConchError("%i bytes of extra data" % len(packet))
+        except:
+            d.errback(failure.Failure())
+        else:
+            d.callback(resp)
+
+
+
+class SSHUserAuthClient(service.SSHService):
+    """
+    A service implementing the client side of 'ssh-userauth'.
+
+    @ivar name: the name of this service: 'ssh-userauth'
+    @type name: C{str}
+    @ivar preferredOrder: a list of authentication methods we support, in
+        order of preference.  The client will try authentication methods in
+        this order, making callbacks for information when necessary.
+    @type preferredOrder: C{list}
+    @ivar user: the name of the user to authenticate as
+    @type user: C{str}
+    @ivar instance: the service to start after authentication has finished
+    @type instance: L{service.SSHService}
+    @ivar authenticatedWith: a list of strings of authentication methods we've tried
+    @type authenticatedWith: C{list} of C{str}
+    @ivar triedPublicKeys: a list of public key objects that we've tried to
+        authenticate with
+    @type triedPublicKeys: C{list} of L{Key}
+    @ivar lastPublicKey: the last public key object we've tried to authenticate
+        with
+    @type lastPublicKey: L{Key}
+    """
+
+
+    name = 'ssh-userauth'
+    preferredOrder = ['publickey', 'password', 'keyboard-interactive']
+
+
+    def __init__(self, user, instance):
+        self.user = user
+        self.instance = instance
+
+
+    def serviceStarted(self):
+        self.authenticatedWith = []
+        self.triedPublicKeys = []
+        self.lastPublicKey = None
+        self.askForAuth('none', '')
+
+
+    def askForAuth(self, kind, extraData):
+        """
+        Send a MSG_USERAUTH_REQUEST.
+
+        @param kind: the authentication method to try.
+        @type kind: C{str}
+        @param extraData: method-specific data to go in the packet
+        @type extraData: C{str}
+        """
+        self.lastAuth = kind
+        self.transport.sendPacket(MSG_USERAUTH_REQUEST, NS(self.user) +
+                NS(self.instance.name) + NS(kind) + extraData)
+
+
+    def tryAuth(self, kind):
+        """
+        Dispatch to an authentication method.
+
+        @param kind: the authentication method
+        @type kind: C{str}
+        """
+        kind = kind.replace('-', '_')
+        log.msg('trying to auth with %s' % (kind,))
+        f = getattr(self,'auth_%s' % (kind,), None)
+        if f:
+            return f()
+
+
+    def _ebAuth(self, ignored, *args):
+        """
+        Generic callback for a failed authentication attempt.  Respond by
+        asking for the list of accepted methods (the 'none' method)
+        """
+        self.askForAuth('none', '')
+
+
+    def ssh_USERAUTH_SUCCESS(self, packet):
+        """
+        We received a MSG_USERAUTH_SUCCESS.  The server has accepted our
+        authentication, so start the next service.
+        """
+        self.transport.setService(self.instance)
+
+
+    def ssh_USERAUTH_FAILURE(self, packet):
+        """
+        We received a MSG_USERAUTH_FAILURE.  Payload::
+            string methods
+            byte partial success
+
+        If partial success is C{True}, then the previous method succeeded but is
+        not sufficent for authentication. C{methods} is a comma-separated list
+        of accepted authentication methods.
+
+        We sort the list of methods by their position in C{self.preferredOrder},
+        removing methods that have already succeeded. We then call
+        C{self.tryAuth} with the most preferred method.
+
+        @param packet: the L{MSG_USERAUTH_FAILURE} payload.
+        @type packet: C{str}
+
+        @return: a L{defer.Deferred} that will be callbacked with C{None} as
+            soon as all authentication methods have been tried, or C{None} if no
+            more authentication methods are available.
+        @rtype: C{defer.Deferred} or C{None}
+        """
+        canContinue, partial = getNS(packet)
+        partial = ord(partial)
+        if partial:
+            self.authenticatedWith.append(self.lastAuth)
+
+        def orderByPreference(meth):
+            """
+            Invoked once per authentication method in order to extract a
+            comparison key which is then used for sorting.
+
+            @param meth: the authentication method.
+            @type meth: C{str}
+
+            @return: the comparison key for C{meth}.
+            @rtype: C{int}
+            """
+            if meth in self.preferredOrder:
+                return self.preferredOrder.index(meth)
+            else:
+                # put the element at the end of the list.
+                return len(self.preferredOrder)
+
+        canContinue = sorted([meth for meth in canContinue.split(',')
+                              if meth not in self.authenticatedWith],
+                             key=orderByPreference)
+
+        log.msg('can continue with: %s' % canContinue)
+        return self._cbUserauthFailure(None, iter(canContinue))
+
+
+    def _cbUserauthFailure(self, result, iterator):
+        if result:
+            return
+        try:
+            method = iterator.next()
+        except StopIteration:
+            self.transport.sendDisconnect(
+                transport.DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE,
+                'no more authentication methods available')
+        else:
+            d = defer.maybeDeferred(self.tryAuth, method)
+            d.addCallback(self._cbUserauthFailure, iterator)
+            return d
+
+
+    def ssh_USERAUTH_PK_OK(self, packet):
+        """
+        This message (number 60) can mean several different messages depending
+        on the current authentication type.  We dispatch to individual methods
+        in order to handle this request.
+        """
+        func = getattr(self, 'ssh_USERAUTH_PK_OK_%s' %
+                       self.lastAuth.replace('-', '_'), None)
+        if func is not None:
+            return func(packet)
+        else:
+            self.askForAuth('none', '')
+
+
+    def ssh_USERAUTH_PK_OK_publickey(self, packet):
+        """
+        This is MSG_USERAUTH_PK.  Our public key is valid, so we create a
+        signature and try to authenticate with it.
+        """
+        publicKey = self.lastPublicKey
+        b = (NS(self.transport.sessionID) + chr(MSG_USERAUTH_REQUEST) +
+             NS(self.user) + NS(self.instance.name) + NS('publickey') +
+             '\x01' + NS(publicKey.sshType()) + NS(publicKey.blob()))
+        d  = self.signData(publicKey, b)
+        if not d:
+            self.askForAuth('none', '')
+            # this will fail, we'll move on
+            return
+        d.addCallback(self._cbSignedData)
+        d.addErrback(self._ebAuth)
+
+
+    def ssh_USERAUTH_PK_OK_password(self, packet):
+        """
+        This is MSG_USERAUTH_PASSWD_CHANGEREQ.  The password given has expired.
+        We ask for an old password and a new password, then send both back to
+        the server.
+        """
+        prompt, language, rest = getNS(packet, 2)
+        self._oldPass = self._newPass = None
+        d = self.getPassword('Old Password: ')
+        d = d.addCallbacks(self._setOldPass, self._ebAuth)
+        d.addCallback(lambda ignored: self.getPassword(prompt))
+        d.addCallbacks(self._setNewPass, self._ebAuth)
+
+
+    def ssh_USERAUTH_PK_OK_keyboard_interactive(self, packet):
+        """
+        This is MSG_USERAUTH_INFO_RESPONSE.  The server has sent us the
+        questions it wants us to answer, so we ask the user and sent the
+        responses.
+        """
+        name, instruction, lang, data = getNS(packet, 3)
+        numPrompts = struct.unpack('!L', data[:4])[0]
+        data = data[4:]
+        prompts = []
+        for i in range(numPrompts):
+            prompt, data = getNS(data)
+            echo = bool(ord(data[0]))
+            data = data[1:]
+            prompts.append((prompt, echo))
+        d = self.getGenericAnswers(name, instruction, prompts)
+        d.addCallback(self._cbGenericAnswers)
+        d.addErrback(self._ebAuth)
+
+
+    def _cbSignedData(self, signedData):
+        """
+        Called back out of self.signData with the signed data.  Send the
+        authentication request with the signature.
+
+        @param signedData: the data signed by the user's private key.
+        @type signedData: C{str}
+        """
+        publicKey = self.lastPublicKey
+        self.askForAuth('publickey', '\x01' + NS(publicKey.sshType()) +
+                NS(publicKey.blob()) + NS(signedData))
+
+
+    def _setOldPass(self, op):
+        """
+        Called back when we are choosing a new password.  Simply store the old
+        password for now.
+
+        @param op: the old password as entered by the user
+        @type op: C{str}
+        """
+        self._oldPass = op
+
+
+    def _setNewPass(self, np):
+        """
+        Called back when we are choosing a new password.  Get the old password
+        and send the authentication message with both.
+
+        @param np: the new password as entered by the user
+        @type np: C{str}
+        """
+        op = self._oldPass
+        self._oldPass = None
+        self.askForAuth('password', '\xff' + NS(op) + NS(np))
+
+
+    def _cbGenericAnswers(self, responses):
+        """
+        Called back when we are finished answering keyboard-interactive
+        questions.  Send the info back to the server in a
+        MSG_USERAUTH_INFO_RESPONSE.
+
+        @param responses: a list of C{str} responses
+        @type responses: C{list}
+        """
+        data = struct.pack('!L', len(responses))
+        for r in responses:
+            data += NS(r.encode('UTF8'))
+        self.transport.sendPacket(MSG_USERAUTH_INFO_RESPONSE, data)
+
+
+    def auth_publickey(self):
+        """
+        Try to authenticate with a public key.  Ask the user for a public key;
+        if the user has one, send the request to the server and return True.
+        Otherwise, return False.
+
+        @rtype: C{bool}
+        """
+        d = defer.maybeDeferred(self.getPublicKey)
+        d.addBoth(self._cbGetPublicKey)
+        return d
+
+
+    def _cbGetPublicKey(self, publicKey):
+        if isinstance(publicKey, str):
+            warnings.warn("Returning a string from "
+                          "SSHUserAuthClient.getPublicKey() is deprecated "
+                          "since Twisted 9.0.  Return a keys.Key() instead.",
+                          DeprecationWarning)
+            publicKey = keys.Key.fromString(publicKey)
+        if not isinstance(publicKey, keys.Key): # failure or None
+            publicKey = None
+        if publicKey is not None:
+            self.lastPublicKey = publicKey
+            self.triedPublicKeys.append(publicKey)
+            log.msg('using key of type %s' % publicKey.type())
+            self.askForAuth('publickey', '\x00' + NS(publicKey.sshType()) +
+                            NS(publicKey.blob()))
+            return True
+        else:
+            return False
+
+
+    def auth_password(self):
+        """
+        Try to authenticate with a password.  Ask the user for a password.
+        If the user will return a password, return True.  Otherwise, return
+        False.
+
+        @rtype: C{bool}
+        """
+        d = self.getPassword()
+        if d:
+            d.addCallbacks(self._cbPassword, self._ebAuth)
+            return True
+        else: # returned None, don't do password auth
+            return False
+
+
+    def auth_keyboard_interactive(self):
+        """
+        Try to authenticate with keyboard-interactive authentication.  Send
+        the request to the server and return True.
+
+        @rtype: C{bool}
+        """
+        log.msg('authing with keyboard-interactive')
+        self.askForAuth('keyboard-interactive', NS('') + NS(''))
+        return True
+
+
+    def _cbPassword(self, password):
+        """
+        Called back when the user gives a password.  Send the request to the
+        server.
+
+        @param password: the password the user entered
+        @type password: C{str}
+        """
+        self.askForAuth('password', '\x00' + NS(password))
+
+
+    def signData(self, publicKey, signData):
+        """
+        Sign the given data with the given public key.
+
+        By default, this will call getPrivateKey to get the private key,
+        then sign the data using Key.sign().
+
+        This method is factored out so that it can be overridden to use
+        alternate methods, such as a key agent.
+
+        @param publicKey: The public key object returned from L{getPublicKey}
+        @type publicKey: L{keys.Key}
+
+        @param signData: the data to be signed by the private key.
+        @type signData: C{str}
+        @return: a Deferred that's called back with the signature
+        @rtype: L{defer.Deferred}
+        """
+        key = self.getPrivateKey()
+        if not key:
+            return
+        return key.addCallback(self._cbSignData, signData)
+
+
+    def _cbSignData(self, privateKey, signData):
+        """
+        Called back when the private key is returned.  Sign the data and
+        return the signature.
+
+        @param privateKey: the private key object
+        @type publicKey: L{keys.Key}
+        @param signData: the data to be signed by the private key.
+        @type signData: C{str}
+        @return: the signature
+        @rtype: C{str}
+        """
+        if not isinstance(privateKey, keys.Key):
+            warnings.warn("Returning a PyCrypto key object from "
+                          "SSHUserAuthClient.getPrivateKey() is deprecated "
+                          "since Twisted 9.0.  Return a keys.Key() instead.",
+                          DeprecationWarning)
+            privateKey = keys.Key(privateKey)
+        return privateKey.sign(signData)
+
+
+    def getPublicKey(self):
+        """
+        Return a public key for the user.  If no more public keys are
+        available, return C{None}.
+
+        This implementation always returns C{None}.  Override it in a
+        subclass to actually find and return a public key object.
+
+        @rtype: L{Key} or L{NoneType}
+        """
+        return None
+
+
+    def getPrivateKey(self):
+        """
+        Return a L{Deferred} that will be called back with the private key
+        object corresponding to the last public key from getPublicKey().
+        If the private key is not available, errback on the Deferred.
+
+        @rtype: L{Deferred} called back with L{Key}
+        """
+        return defer.fail(NotImplementedError())
+
+
+    def getPassword(self, prompt = None):
+        """
+        Return a L{Deferred} that will be called back with a password.
+        prompt is a string to display for the password, or None for a generic
+        'user at hostname's password: '.
+
+        @type prompt: C{str}/C{None}
+        @rtype: L{defer.Deferred}
+        """
+        return defer.fail(NotImplementedError())
+
+
+    def getGenericAnswers(self, name, instruction, prompts):
+        """
+        Returns a L{Deferred} with the responses to the promopts.
+
+        @param name: The name of the authentication currently in progress.
+        @param instruction: Describes what the authentication wants.
+        @param prompts: A list of (prompt, echo) pairs, where prompt is a
+        string to display and echo is a boolean indicating whether the
+        user's response should be echoed as they type it.
+        """
+        return defer.fail(NotImplementedError())
+
+
+MSG_USERAUTH_REQUEST          = 50
+MSG_USERAUTH_FAILURE          = 51
+MSG_USERAUTH_SUCCESS          = 52
+MSG_USERAUTH_BANNER           = 53
+MSG_USERAUTH_INFO_RESPONSE    = 61
+MSG_USERAUTH_PK_OK            = 60
+
+messages = {}
+for k, v in locals().items():
+    if k[:4]=='MSG_':
+        messages[v] = k
+
+SSHUserAuthServer.protocolMessages = messages
+SSHUserAuthClient.protocolMessages = messages
+del messages
+del v
+
+# Doubles, not included in the protocols' mappings
+MSG_USERAUTH_PASSWD_CHANGEREQ = 60
+MSG_USERAUTH_INFO_REQUEST     = 60
diff --git a/ThirdParty/Twisted/twisted/conch/stdio.py b/ThirdParty/Twisted/twisted/conch/stdio.py
new file mode 100644
index 0000000..c45fc3b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/stdio.py
@@ -0,0 +1,95 @@
+# -*- test-case-name: twisted.conch.test.test_manhole -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Asynchronous local terminal input handling
+
+ at author: Jp Calderone
+"""
+
+import os, tty, sys, termios
+
+from twisted.internet import reactor, stdio, protocol, defer
+from twisted.python import failure, reflect, log
+
+from twisted.conch.insults.insults import ServerProtocol
+from twisted.conch.manhole import ColoredManhole
+
+class UnexpectedOutputError(Exception):
+    pass
+
+class TerminalProcessProtocol(protocol.ProcessProtocol):
+    def __init__(self, proto):
+        self.proto = proto
+        self.onConnection = defer.Deferred()
+
+    def connectionMade(self):
+        self.proto.makeConnection(self)
+        self.onConnection.callback(None)
+        self.onConnection = None
+
+    def write(self, bytes):
+        self.transport.write(bytes)
+
+    def outReceived(self, bytes):
+        self.proto.dataReceived(bytes)
+
+    def errReceived(self, bytes):
+        self.transport.loseConnection()
+        if self.proto is not None:
+            self.proto.connectionLost(failure.Failure(UnexpectedOutputError(bytes)))
+            self.proto = None
+
+    def childConnectionLost(self, childFD):
+        if self.proto is not None:
+            self.proto.childConnectionLost(childFD)
+
+    def processEnded(self, reason):
+        if self.proto is not None:
+            self.proto.connectionLost(reason)
+            self.proto = None
+
+
+
+class ConsoleManhole(ColoredManhole):
+    """
+    A manhole protocol specifically for use with L{stdio.StandardIO}.
+    """
+    def connectionLost(self, reason):
+        """
+        When the connection is lost, there is nothing more to do.  Stop the
+        reactor so that the process can exit.
+        """
+        reactor.stop()
+
+
+
+def runWithProtocol(klass):
+    fd = sys.__stdin__.fileno()
+    oldSettings = termios.tcgetattr(fd)
+    tty.setraw(fd)
+    try:
+        p = ServerProtocol(klass)
+        stdio.StandardIO(p)
+        reactor.run()
+    finally:
+        termios.tcsetattr(fd, termios.TCSANOW, oldSettings)
+        os.write(fd, "\r\x1bc\r")
+
+
+
+def main(argv=None):
+    log.startLogging(file('child.log', 'w'))
+
+    if argv is None:
+        argv = sys.argv[1:]
+    if argv:
+        klass = reflect.namedClass(argv[0])
+    else:
+        klass = ConsoleManhole
+    runWithProtocol(klass)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/ThirdParty/Twisted/twisted/conch/tap.py b/ThirdParty/Twisted/twisted/conch/tap.py
new file mode 100644
index 0000000..5e58699
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/tap.py
@@ -0,0 +1,92 @@
+# -*- test-case-name: twisted.conch.test.test_tap -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Support module for making SSH servers with twistd.
+"""
+
+from twisted.conch import unix
+from twisted.conch import checkers as conch_checkers
+from twisted.conch.openssh_compat import factory
+from twisted.cred import portal, checkers, strcred
+from twisted.python import usage
+from twisted.application import strports
+try:
+    from twisted.cred import pamauth
+except ImportError:
+    pamauth = None
+
+
+
+class Options(usage.Options, strcred.AuthOptionMixin):
+    synopsis = "[-i <interface>] [-p <port>] [-d <dir>] "
+    longdesc = ("Makes a Conch SSH server.  If no authentication methods are "
+        "specified, the default authentication methods are UNIX passwords, "
+        "SSH public keys, and PAM if it is available.  If --auth options are "
+        "passed, only the measures specified will be used.")
+    optParameters = [
+        ["interface", "i", "", "local interface to which we listen"],
+        ["port", "p", "tcp:22", "Port on which to listen"],
+        ["data", "d", "/etc", "directory to look for host keys in"],
+        ["moduli", "", None, "directory to look for moduli in "
+            "(if different from --data)"]
+    ]
+    compData = usage.Completions(
+        optActions={"data": usage.CompleteDirs(descr="data directory"),
+                    "moduli": usage.CompleteDirs(descr="moduli directory"),
+                    "interface": usage.CompleteNetInterfaces()}
+        )
+
+
+    def __init__(self, *a, **kw):
+        usage.Options.__init__(self, *a, **kw)
+
+        # call the default addCheckers (for backwards compatibility) that will
+        # be used if no --auth option is provided - note that conch's
+        # UNIXPasswordDatabase is used, instead of twisted.plugins.cred_unix's
+        # checker
+        super(Options, self).addChecker(conch_checkers.UNIXPasswordDatabase())
+        super(Options, self).addChecker(conch_checkers.SSHPublicKeyDatabase())
+        if pamauth is not None:
+            super(Options, self).addChecker(
+                checkers.PluggableAuthenticationModulesChecker())
+        self._usingDefaultAuth = True
+
+
+    def addChecker(self, checker):
+        """
+        Add the checker specified.  If any checkers are added, the default
+        checkers are automatically cleared and the only checkers will be the
+        specified one(s).
+        """
+        if self._usingDefaultAuth:
+            self['credCheckers'] = []
+            self['credInterfaces'] = {}
+            self._usingDefaultAuth = False
+        super(Options, self).addChecker(checker)
+
+
+
+def makeService(config):
+    """
+    Construct a service for operating a SSH server.
+
+    @param config: An L{Options} instance specifying server options, including
+    where server keys are stored and what authentication methods to use.
+
+    @return: An L{IService} provider which contains the requested SSH server.
+    """
+
+    t = factory.OpenSSHFactory()
+
+    r = unix.UnixSSHRealm()
+    t.portal = portal.Portal(r, config.get('credCheckers', []))
+    t.dataRoot = config['data']
+    t.moduliRoot = config['moduli'] or config['data']
+
+    port = config['port']
+    if config['interface']:
+        # Add warning here
+        port += ':interface=' + config['interface']
+    return strports.service(port, t)
diff --git a/ThirdParty/Twisted/twisted/conch/telnet.py b/ThirdParty/Twisted/twisted/conch/telnet.py
new file mode 100644
index 0000000..c90fe1a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/telnet.py
@@ -0,0 +1,1086 @@
+# -*- test-case-name: twisted.conch.test.test_telnet -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Telnet protocol implementation.
+
+ at author: Jean-Paul Calderone
+"""
+
+import struct
+
+from zope.interface import implements
+
+from twisted.internet import protocol, interfaces as iinternet, defer
+from twisted.python import log
+
+MODE = chr(1)
+EDIT = 1
+TRAPSIG = 2
+MODE_ACK = 4
+SOFT_TAB = 8
+LIT_ECHO = 16
+
+# Characters gleaned from the various (and conflicting) RFCs.  Not all of these are correct.
+
+NULL =           chr(0)   # No operation.
+BEL =            chr(7)   # Produces an audible or
+                          # visible signal (which does
+                          # NOT move the print head).
+BS =             chr(8)   # Moves the print head one
+                          # character position towards
+                          # the left margin.
+HT =             chr(9)   # Moves the printer to the
+                          # next horizontal tab stop.
+                          # It remains unspecified how
+                          # either party determines or
+                          # establishes where such tab
+                          # stops are located.
+LF =             chr(10)  # Moves the printer to the
+                          # next print line, keeping the
+                          # same horizontal position.
+VT =             chr(11)  # Moves the printer to the
+                          # next vertical tab stop.  It
+                          # remains unspecified how
+                          # either party determines or
+                          # establishes where such tab
+                          # stops are located.
+FF =             chr(12)  # Moves the printer to the top
+                          # of the next page, keeping
+                          # the same horizontal position.
+CR =             chr(13)  # Moves the printer to the left
+                          # margin of the current line.
+
+ECHO  =          chr(1)   # User-to-Server:  Asks the server to send
+                          # Echos of the transmitted data.
+SGA =            chr(3)   # Suppress Go Ahead.  Go Ahead is silly
+                          # and most modern servers should suppress
+                          # it.
+NAWS =           chr(31)  # Negotiate About Window Size.  Indicate that
+                          # information about the size of the terminal
+                          # can be communicated.
+LINEMODE =       chr(34)  # Allow line buffering to be
+                          # negotiated about.
+
+SE =             chr(240) # End of subnegotiation parameters.
+NOP =            chr(241) # No operation.
+DM =             chr(242) # "Data Mark": The data stream portion
+                          # of a Synch.  This should always be
+                          # accompanied by a TCP Urgent
+                          # notification.
+BRK =            chr(243) # NVT character Break.
+IP =             chr(244) # The function Interrupt Process.
+AO =             chr(245) # The function Abort Output
+AYT =            chr(246) # The function Are You There.
+EC =             chr(247) # The function Erase Character.
+EL =             chr(248) # The function Erase Line
+GA =             chr(249) # The Go Ahead signal.
+SB =             chr(250) # Indicates that what follows is
+                          # subnegotiation of the indicated
+                          # option.
+WILL =           chr(251) # Indicates the desire to begin
+                          # performing, or confirmation that
+                          # you are now performing, the
+                          # indicated option.
+WONT =           chr(252) # Indicates the refusal to perform,
+                          # or continue performing, the
+                          # indicated option.
+DO =             chr(253) # Indicates the request that the
+                          # other party perform, or
+                          # confirmation that you are expecting
+                          # the other party to perform, the
+                          # indicated option.
+DONT =           chr(254) # Indicates the demand that the
+                          # other party stop performing,
+                          # or confirmation that you are no
+                          # longer expecting the other party
+                          # to perform, the indicated option.
+IAC =            chr(255) # Data Byte 255.  Introduces a
+                          # telnet command.
+
+LINEMODE_MODE = chr(1)
+LINEMODE_EDIT = chr(1)
+LINEMODE_TRAPSIG = chr(2)
+LINEMODE_MODE_ACK = chr(4)
+LINEMODE_SOFT_TAB = chr(8)
+LINEMODE_LIT_ECHO = chr(16)
+LINEMODE_FORWARDMASK = chr(2)
+LINEMODE_SLC = chr(3)
+LINEMODE_SLC_SYNCH = chr(1)
+LINEMODE_SLC_BRK = chr(2)
+LINEMODE_SLC_IP = chr(3)
+LINEMODE_SLC_AO = chr(4)
+LINEMODE_SLC_AYT = chr(5)
+LINEMODE_SLC_EOR = chr(6)
+LINEMODE_SLC_ABORT = chr(7)
+LINEMODE_SLC_EOF = chr(8)
+LINEMODE_SLC_SUSP = chr(9)
+LINEMODE_SLC_EC = chr(10)
+LINEMODE_SLC_EL = chr(11)
+
+LINEMODE_SLC_EW = chr(12)
+LINEMODE_SLC_RP = chr(13)
+LINEMODE_SLC_LNEXT = chr(14)
+LINEMODE_SLC_XON = chr(15)
+LINEMODE_SLC_XOFF = chr(16)
+LINEMODE_SLC_FORW1 = chr(17)
+LINEMODE_SLC_FORW2 = chr(18)
+LINEMODE_SLC_MCL = chr(19)
+LINEMODE_SLC_MCR = chr(20)
+LINEMODE_SLC_MCWL = chr(21)
+LINEMODE_SLC_MCWR = chr(22)
+LINEMODE_SLC_MCBOL = chr(23)
+LINEMODE_SLC_MCEOL = chr(24)
+LINEMODE_SLC_INSRT = chr(25)
+LINEMODE_SLC_OVER = chr(26)
+LINEMODE_SLC_ECR = chr(27)
+LINEMODE_SLC_EWR = chr(28)
+LINEMODE_SLC_EBOL = chr(29)
+LINEMODE_SLC_EEOL = chr(30)
+
+LINEMODE_SLC_DEFAULT = chr(3)
+LINEMODE_SLC_VALUE = chr(2)
+LINEMODE_SLC_CANTCHANGE = chr(1)
+LINEMODE_SLC_NOSUPPORT = chr(0)
+LINEMODE_SLC_LEVELBITS = chr(3)
+
+LINEMODE_SLC_ACK = chr(128)
+LINEMODE_SLC_FLUSHIN = chr(64)
+LINEMODE_SLC_FLUSHOUT = chr(32)
+LINEMODE_EOF = chr(236)
+LINEMODE_SUSP = chr(237)
+LINEMODE_ABORT = chr(238)
+
+class ITelnetProtocol(iinternet.IProtocol):
+    def unhandledCommand(command, argument):
+        """A command was received but not understood.
+
+        @param command: the command received.
+        @type command: C{str}, a single character.
+        @param argument: the argument to the received command.
+        @type argument: C{str}, a single character, or None if the command that
+            was unhandled does not provide an argument.
+        """
+
+    def unhandledSubnegotiation(command, bytes):
+        """A subnegotiation command was received but not understood.
+
+        @param command: the command being subnegotiated. That is, the first
+            byte after the SB command.
+        @type command: C{str}, a single character.
+        @param bytes: all other bytes of the subneogation. That is, all but the
+            first bytes between SB and SE, with IAC un-escaping applied.
+        @type bytes: C{list} of C{str}, each a single character
+        """
+
+    def enableLocal(option):
+        """Enable the given option locally.
+
+        This should enable the given option on this side of the
+        telnet connection and return True.  If False is returned,
+        the option will be treated as still disabled and the peer
+        will be notified.
+
+        @param option: the option to be enabled.
+        @type option: C{str}, a single character.
+        """
+
+    def enableRemote(option):
+        """Indicate whether the peer should be allowed to enable this option.
+
+        Returns True if the peer should be allowed to enable this option,
+        False otherwise.
+
+        @param option: the option to be enabled.
+        @type option: C{str}, a single character.
+        """
+
+    def disableLocal(option):
+        """Disable the given option locally.
+
+        Unlike enableLocal, this method cannot fail.  The option must be
+        disabled.
+
+        @param option: the option to be disabled.
+        @type option: C{str}, a single character.
+        """
+
+    def disableRemote(option):
+        """Indicate that the peer has disabled this option.
+
+        @param option: the option to be disabled.
+        @type option: C{str}, a single character.
+        """
+
+
+
+class ITelnetTransport(iinternet.ITransport):
+    def do(option):
+        """
+        Indicate a desire for the peer to begin performing the given option.
+
+        Returns a Deferred that fires with True when the peer begins performing
+        the option, or fails with L{OptionRefused} when the peer refuses to
+        perform it.  If the peer is already performing the given option, the
+        Deferred will fail with L{AlreadyEnabled}.  If a negotiation regarding
+        this option is already in progress, the Deferred will fail with
+        L{AlreadyNegotiating}.
+
+        Note: It is currently possible that this Deferred will never fire,
+        if the peer never responds, or if the peer believes the option to
+        already be enabled.
+        """
+
+
+    def dont(option):
+        """
+        Indicate a desire for the peer to cease performing the given option.
+
+        Returns a Deferred that fires with True when the peer ceases performing
+        the option.  If the peer is not performing the given option, the
+        Deferred will fail with L{AlreadyDisabled}.  If negotiation regarding
+        this option is already in progress, the Deferred will fail with
+        L{AlreadyNegotiating}.
+
+        Note: It is currently possible that this Deferred will never fire,
+        if the peer never responds, or if the peer believes the option to
+        already be disabled.
+        """
+
+
+    def will(option):
+        """
+        Indicate our willingness to begin performing this option locally.
+
+        Returns a Deferred that fires with True when the peer agrees to allow us
+        to begin performing this option, or fails with L{OptionRefused} if the
+        peer refuses to allow us to begin performing it.  If the option is
+        already enabled locally, the Deferred will fail with L{AlreadyEnabled}.
+        If negotiation regarding this option is already in progress, the
+        Deferred will fail with L{AlreadyNegotiating}.
+
+        Note: It is currently possible that this Deferred will never fire,
+        if the peer never responds, or if the peer believes the option to
+        already be enabled.
+        """
+
+
+    def wont(option):
+        """
+        Indicate that we will stop performing the given option.
+
+        Returns a Deferred that fires with True when the peer acknowledges
+        we have stopped performing this option.  If the option is already
+        disabled locally, the Deferred will fail with L{AlreadyDisabled}.
+        If negotiation regarding this option is already in progress,
+        the Deferred will fail with L{AlreadyNegotiating}.
+
+        Note: It is currently possible that this Deferred will never fire,
+        if the peer never responds, or if the peer believes the option to
+        already be disabled.
+        """
+
+
+    def requestNegotiation(about, bytes):
+        """
+        Send a subnegotiation request.
+
+        @param about: A byte indicating the feature being negotiated.
+        @param bytes: Any number of bytes containing specific information
+        about the negotiation being requested.  No values in this string
+        need to be escaped, as this function will escape any value which
+        requires it.
+        """
+
+
+
+class TelnetError(Exception):
+    pass
+
+class NegotiationError(TelnetError):
+    def __str__(self):
+        return self.__class__.__module__ + '.' + self.__class__.__name__ + ':' + repr(self.args[0])
+
+class OptionRefused(NegotiationError):
+    pass
+
+class AlreadyEnabled(NegotiationError):
+    pass
+
+class AlreadyDisabled(NegotiationError):
+    pass
+
+class AlreadyNegotiating(NegotiationError):
+    pass
+
+class TelnetProtocol(protocol.Protocol):
+    implements(ITelnetProtocol)
+
+    def unhandledCommand(self, command, argument):
+        pass
+
+    def unhandledSubnegotiation(self, command, bytes):
+        pass
+
+    def enableLocal(self, option):
+        pass
+
+    def enableRemote(self, option):
+        pass
+
+    def disableLocal(self, option):
+        pass
+
+    def disableRemote(self, option):
+        pass
+
+
+class Telnet(protocol.Protocol):
+    """
+    @ivar commandMap: A mapping of bytes to callables.  When a
+    telnet command is received, the command byte (the first byte
+    after IAC) is looked up in this dictionary.  If a callable is
+    found, it is invoked with the argument of the command, or None
+    if the command takes no argument.  Values should be added to
+    this dictionary if commands wish to be handled.  By default,
+    only WILL, WONT, DO, and DONT are handled.  These should not
+    be overridden, as this class handles them correctly and
+    provides an API for interacting with them.
+
+    @ivar negotiationMap: A mapping of bytes to callables.  When
+    a subnegotiation command is received, the command byte (the
+    first byte after SB) is looked up in this dictionary.  If
+    a callable is found, it is invoked with the argument of the
+    subnegotiation.  Values should be added to this dictionary if
+    subnegotiations are to be handled.  By default, no values are
+    handled.
+
+    @ivar options: A mapping of option bytes to their current
+    state.  This state is likely of little use to user code.
+    Changes should not be made to it.
+
+    @ivar state: A string indicating the current parse state.  It
+    can take on the values "data", "escaped", "command", "newline",
+    "subnegotiation", and "subnegotiation-escaped".  Changes
+    should not be made to it.
+
+    @ivar transport: This protocol's transport object.
+    """
+
+    # One of a lot of things
+    state = 'data'
+
+    def __init__(self):
+        self.options = {}
+        self.negotiationMap = {}
+        self.commandMap = {
+            WILL: self.telnet_WILL,
+            WONT: self.telnet_WONT,
+            DO: self.telnet_DO,
+            DONT: self.telnet_DONT}
+
+    def _write(self, bytes):
+        self.transport.write(bytes)
+
+    class _OptionState:
+        """
+        Represents the state of an option on both sides of a telnet
+        connection.
+
+        @ivar us: The state of the option on this side of the connection.
+
+        @ivar him: The state of the option on the other side of the
+            connection.
+        """
+        class _Perspective:
+            """
+            Represents the state of an option on side of the telnet
+            connection.  Some options can be enabled on a particular side of
+            the connection (RFC 1073 for example: only the client can have
+            NAWS enabled).  Other options can be enabled on either or both
+            sides (such as RFC 1372: each side can have its own flow control
+            state).
+
+            @ivar state: C{'yes'} or C{'no'} indicating whether or not this
+                option is enabled on one side of the connection.
+
+            @ivar negotiating: A boolean tracking whether negotiation about
+                this option is in progress.
+
+            @ivar onResult: When negotiation about this option has been
+                initiated by this side of the connection, a L{Deferred}
+                which will fire with the result of the negotiation.  C{None}
+                at other times.
+            """
+            state = 'no'
+            negotiating = False
+            onResult = None
+
+            def __str__(self):
+                return self.state + ('*' * self.negotiating)
+
+        def __init__(self):
+            self.us = self._Perspective()
+            self.him = self._Perspective()
+
+        def __repr__(self):
+            return '<_OptionState us=%s him=%s>' % (self.us, self.him)
+
+    def getOptionState(self, opt):
+        return self.options.setdefault(opt, self._OptionState())
+
+    def _do(self, option):
+        self._write(IAC + DO + option)
+
+    def _dont(self, option):
+        self._write(IAC + DONT + option)
+
+    def _will(self, option):
+        self._write(IAC + WILL + option)
+
+    def _wont(self, option):
+        self._write(IAC + WONT + option)
+
+    def will(self, option):
+        """Indicate our willingness to enable an option.
+        """
+        s = self.getOptionState(option)
+        if s.us.negotiating or s.him.negotiating:
+            return defer.fail(AlreadyNegotiating(option))
+        elif s.us.state == 'yes':
+            return defer.fail(AlreadyEnabled(option))
+        else:
+            s.us.negotiating = True
+            s.us.onResult = d = defer.Deferred()
+            self._will(option)
+            return d
+
+    def wont(self, option):
+        """Indicate we are not willing to enable an option.
+        """
+        s = self.getOptionState(option)
+        if s.us.negotiating or s.him.negotiating:
+            return defer.fail(AlreadyNegotiating(option))
+        elif s.us.state == 'no':
+            return defer.fail(AlreadyDisabled(option))
+        else:
+            s.us.negotiating = True
+            s.us.onResult = d = defer.Deferred()
+            self._wont(option)
+            return d
+
+    def do(self, option):
+        s = self.getOptionState(option)
+        if s.us.negotiating or s.him.negotiating:
+            return defer.fail(AlreadyNegotiating(option))
+        elif s.him.state == 'yes':
+            return defer.fail(AlreadyEnabled(option))
+        else:
+            s.him.negotiating = True
+            s.him.onResult = d = defer.Deferred()
+            self._do(option)
+            return d
+
+    def dont(self, option):
+        s = self.getOptionState(option)
+        if s.us.negotiating or s.him.negotiating:
+            return defer.fail(AlreadyNegotiating(option))
+        elif s.him.state == 'no':
+            return defer.fail(AlreadyDisabled(option))
+        else:
+            s.him.negotiating = True
+            s.him.onResult = d = defer.Deferred()
+            self._dont(option)
+            return d
+
+
+    def requestNegotiation(self, about, bytes):
+        """
+        Send a negotiation message for the option C{about} with C{bytes} as the
+        payload.
+
+        @see: L{ITelnetTransport.requestNegotiation}
+        """
+        bytes = bytes.replace(IAC, IAC * 2)
+        self._write(IAC + SB + about + bytes + IAC + SE)
+
+
+    def dataReceived(self, data):
+        appDataBuffer = []
+
+        for b in data:
+            if self.state == 'data':
+                if b == IAC:
+                    self.state = 'escaped'
+                elif b == '\r':
+                    self.state = 'newline'
+                else:
+                    appDataBuffer.append(b)
+            elif self.state == 'escaped':
+                if b == IAC:
+                    appDataBuffer.append(b)
+                    self.state = 'data'
+                elif b == SB:
+                    self.state = 'subnegotiation'
+                    self.commands = []
+                elif b in (NOP, DM, BRK, IP, AO, AYT, EC, EL, GA):
+                    self.state = 'data'
+                    if appDataBuffer:
+                        self.applicationDataReceived(''.join(appDataBuffer))
+                        del appDataBuffer[:]
+                    self.commandReceived(b, None)
+                elif b in (WILL, WONT, DO, DONT):
+                    self.state = 'command'
+                    self.command = b
+                else:
+                    raise ValueError("Stumped", b)
+            elif self.state == 'command':
+                self.state = 'data'
+                command = self.command
+                del self.command
+                if appDataBuffer:
+                    self.applicationDataReceived(''.join(appDataBuffer))
+                    del appDataBuffer[:]
+                self.commandReceived(command, b)
+            elif self.state == 'newline':
+                self.state = 'data'
+                if b == '\n':
+                    appDataBuffer.append('\n')
+                elif b == '\0':
+                    appDataBuffer.append('\r')
+                elif b == IAC:
+                    # IAC isn't really allowed after \r, according to the
+                    # RFC, but handling it this way is less surprising than
+                    # delivering the IAC to the app as application data. 
+                    # The purpose of the restriction is to allow terminals
+                    # to unambiguously interpret the behavior of the CR
+                    # after reading only one more byte.  CR LF is supposed
+                    # to mean one thing (cursor to next line, first column),
+                    # CR NUL another (cursor to first column).  Absent the
+                    # NUL, it still makes sense to interpret this as CR and
+                    # then apply all the usual interpretation to the IAC.
+                    appDataBuffer.append('\r')
+                    self.state = 'escaped'
+                else:
+                    appDataBuffer.append('\r' + b)
+            elif self.state == 'subnegotiation':
+                if b == IAC:
+                    self.state = 'subnegotiation-escaped'
+                else:
+                    self.commands.append(b)
+            elif self.state == 'subnegotiation-escaped':
+                if b == SE:
+                    self.state = 'data'
+                    commands = self.commands
+                    del self.commands
+                    if appDataBuffer:
+                        self.applicationDataReceived(''.join(appDataBuffer))
+                        del appDataBuffer[:]
+                    self.negotiate(commands)
+                else:
+                    self.state = 'subnegotiation'
+                    self.commands.append(b)
+            else:
+                raise ValueError("How'd you do this?")
+
+        if appDataBuffer:
+            self.applicationDataReceived(''.join(appDataBuffer))
+
+
+    def connectionLost(self, reason):
+        for state in self.options.values():
+            if state.us.onResult is not None:
+                d = state.us.onResult
+                state.us.onResult = None
+                d.errback(reason)
+            if state.him.onResult is not None:
+                d = state.him.onResult
+                state.him.onResult = None
+                d.errback(reason)
+
+    def applicationDataReceived(self, bytes):
+        """Called with application-level data.
+        """
+
+    def unhandledCommand(self, command, argument):
+        """Called for commands for which no handler is installed.
+        """
+
+    def commandReceived(self, command, argument):
+        cmdFunc = self.commandMap.get(command)
+        if cmdFunc is None:
+            self.unhandledCommand(command, argument)
+        else:
+            cmdFunc(argument)
+
+    def unhandledSubnegotiation(self, command, bytes):
+        """Called for subnegotiations for which no handler is installed.
+        """
+
+    def negotiate(self, bytes):
+        command, bytes = bytes[0], bytes[1:]
+        cmdFunc = self.negotiationMap.get(command)
+        if cmdFunc is None:
+            self.unhandledSubnegotiation(command, bytes)
+        else:
+            cmdFunc(bytes)
+
+    def telnet_WILL(self, option):
+        s = self.getOptionState(option)
+        self.willMap[s.him.state, s.him.negotiating](self, s, option)
+
+    def will_no_false(self, state, option):
+        # He is unilaterally offering to enable an option.
+        if self.enableRemote(option):
+            state.him.state = 'yes'
+            self._do(option)
+        else:
+            self._dont(option)
+
+    def will_no_true(self, state, option):
+        # Peer agreed to enable an option in response to our request.
+        state.him.state = 'yes'
+        state.him.negotiating = False
+        d = state.him.onResult
+        state.him.onResult = None
+        d.callback(True)
+        assert self.enableRemote(option), "enableRemote must return True in this context (for option %r)" % (option,)
+
+    def will_yes_false(self, state, option):
+        # He is unilaterally offering to enable an already-enabled option.
+        # Ignore this.
+        pass
+
+    def will_yes_true(self, state, option):
+        # This is a bogus state.  It is here for completeness.  It will
+        # never be entered.
+        assert False, "will_yes_true can never be entered, but was called with %r, %r" % (state, option)
+
+    willMap = {('no', False): will_no_false,   ('no', True): will_no_true,
+               ('yes', False): will_yes_false, ('yes', True): will_yes_true}
+
+    def telnet_WONT(self, option):
+        s = self.getOptionState(option)
+        self.wontMap[s.him.state, s.him.negotiating](self, s, option)
+
+    def wont_no_false(self, state, option):
+        # He is unilaterally demanding that an already-disabled option be/remain disabled.
+        # Ignore this (although we could record it and refuse subsequent enable attempts
+        # from our side - he can always refuse them again though, so we won't)
+        pass
+
+    def wont_no_true(self, state, option):
+        # Peer refused to enable an option in response to our request.
+        state.him.negotiating = False
+        d = state.him.onResult
+        state.him.onResult = None
+        d.errback(OptionRefused(option))
+
+    def wont_yes_false(self, state, option):
+        # Peer is unilaterally demanding that an option be disabled.
+        state.him.state = 'no'
+        self.disableRemote(option)
+        self._dont(option)
+
+    def wont_yes_true(self, state, option):
+        # Peer agreed to disable an option at our request.
+        state.him.state = 'no'
+        state.him.negotiating = False
+        d = state.him.onResult
+        state.him.onResult = None
+        d.callback(True)
+        self.disableRemote(option)
+
+    wontMap = {('no', False): wont_no_false,   ('no', True): wont_no_true,
+               ('yes', False): wont_yes_false, ('yes', True): wont_yes_true}
+
+    def telnet_DO(self, option):
+        s = self.getOptionState(option)
+        self.doMap[s.us.state, s.us.negotiating](self, s, option)
+
+    def do_no_false(self, state, option):
+        # Peer is unilaterally requesting that we enable an option.
+        if self.enableLocal(option):
+            state.us.state = 'yes'
+            self._will(option)
+        else:
+            self._wont(option)
+
+    def do_no_true(self, state, option):
+        # Peer agreed to allow us to enable an option at our request.
+        state.us.state = 'yes'
+        state.us.negotiating = False
+        d = state.us.onResult
+        state.us.onResult = None
+        d.callback(True)
+        self.enableLocal(option)
+
+    def do_yes_false(self, state, option):
+        # Peer is unilaterally requesting us to enable an already-enabled option.
+        # Ignore this.
+        pass
+
+    def do_yes_true(self, state, option):
+        # This is a bogus state.  It is here for completeness.  It will never be
+        # entered.
+        assert False, "do_yes_true can never be entered, but was called with %r, %r" % (state, option)
+
+    doMap = {('no', False): do_no_false,   ('no', True): do_no_true,
+             ('yes', False): do_yes_false, ('yes', True): do_yes_true}
+
+    def telnet_DONT(self, option):
+        s = self.getOptionState(option)
+        self.dontMap[s.us.state, s.us.negotiating](self, s, option)
+
+    def dont_no_false(self, state, option):
+        # Peer is unilaterally demanding us to disable an already-disabled option.
+        # Ignore this.
+        pass
+
+    def dont_no_true(self, state, option):
+        # Offered option was refused.  Fail the Deferred returned by the
+        # previous will() call.
+        state.us.negotiating = False
+        d = state.us.onResult
+        state.us.onResult = None
+        d.errback(OptionRefused(option))
+
+    def dont_yes_false(self, state, option):
+        # Peer is unilaterally demanding we disable an option.
+        state.us.state = 'no'
+        self.disableLocal(option)
+        self._wont(option)
+
+    def dont_yes_true(self, state, option):
+        # Peer acknowledged our notice that we will disable an option.
+        state.us.state = 'no'
+        state.us.negotiating = False
+        d = state.us.onResult
+        state.us.onResult = None
+        d.callback(True)
+        self.disableLocal(option)
+
+    dontMap = {('no', False): dont_no_false,   ('no', True): dont_no_true,
+               ('yes', False): dont_yes_false, ('yes', True): dont_yes_true}
+
+    def enableLocal(self, option):
+        """
+        Reject all attempts to enable options.
+        """
+        return False
+
+
+    def enableRemote(self, option):
+        """
+        Reject all attempts to enable options.
+        """
+        return False
+
+
+    def disableLocal(self, option):
+        """
+        Signal a programming error by raising an exception.
+
+        L{enableLocal} must return true for the given value of C{option} in
+        order for this method to be called.  If a subclass of L{Telnet}
+        overrides enableLocal to allow certain options to be enabled, it must
+        also override disableLocal to disable those options.
+
+        @raise NotImplementedError: Always raised.
+        """
+        raise NotImplementedError(
+            "Don't know how to disable local telnet option %r" % (option,))
+
+
+    def disableRemote(self, option):
+        """
+        Signal a programming error by raising an exception.
+
+        L{enableRemote} must return true for the given value of C{option} in
+        order for this method to be called.  If a subclass of L{Telnet}
+        overrides enableRemote to allow certain options to be enabled, it must
+        also override disableRemote tto disable those options.
+
+        @raise NotImplementedError: Always raised.
+        """
+        raise NotImplementedError(
+            "Don't know how to disable remote telnet option %r" % (option,))
+
+
+
+class ProtocolTransportMixin:
+    def write(self, bytes):
+        self.transport.write(bytes.replace('\n', '\r\n'))
+
+    def writeSequence(self, seq):
+        self.transport.writeSequence(seq)
+
+    def loseConnection(self):
+        self.transport.loseConnection()
+
+    def getHost(self):
+        return self.transport.getHost()
+
+    def getPeer(self):
+        return self.transport.getPeer()
+
+class TelnetTransport(Telnet, ProtocolTransportMixin):
+    """
+    @ivar protocol: An instance of the protocol to which this
+    transport is connected, or None before the connection is
+    established and after it is lost.
+
+    @ivar protocolFactory: A callable which returns protocol instances
+    which provide L{ITelnetProtocol}.  This will be invoked when a
+    connection is established.  It is passed *protocolArgs and
+    **protocolKwArgs.
+
+    @ivar protocolArgs: A tuple of additional arguments to
+    pass to protocolFactory.
+
+    @ivar protocolKwArgs: A dictionary of additional arguments
+    to pass to protocolFactory.
+    """
+
+    disconnecting = False
+
+    protocolFactory = None
+    protocol = None
+
+    def __init__(self, protocolFactory=None, *a, **kw):
+        Telnet.__init__(self)
+        if protocolFactory is not None:
+            self.protocolFactory = protocolFactory
+            self.protocolArgs = a
+            self.protocolKwArgs = kw
+
+    def connectionMade(self):
+        if self.protocolFactory is not None:
+            self.protocol = self.protocolFactory(*self.protocolArgs, **self.protocolKwArgs)
+            assert ITelnetProtocol.providedBy(self.protocol)
+            try:
+                factory = self.factory
+            except AttributeError:
+                pass
+            else:
+                self.protocol.factory = factory
+            self.protocol.makeConnection(self)
+
+    def connectionLost(self, reason):
+        Telnet.connectionLost(self, reason)
+        if self.protocol is not None:
+            try:
+                self.protocol.connectionLost(reason)
+            finally:
+                del self.protocol
+
+    def enableLocal(self, option):
+        return self.protocol.enableLocal(option)
+
+    def enableRemote(self, option):
+        return self.protocol.enableRemote(option)
+
+    def disableLocal(self, option):
+        return self.protocol.disableLocal(option)
+
+    def disableRemote(self, option):
+        return self.protocol.disableRemote(option)
+
+    def unhandledSubnegotiation(self, command, bytes):
+        self.protocol.unhandledSubnegotiation(command, bytes)
+
+    def unhandledCommand(self, command, argument):
+        self.protocol.unhandledCommand(command, argument)
+
+    def applicationDataReceived(self, bytes):
+        self.protocol.dataReceived(bytes)
+
+    def write(self, data):
+        ProtocolTransportMixin.write(self, data.replace('\xff','\xff\xff'))
+
+
+class TelnetBootstrapProtocol(TelnetProtocol, ProtocolTransportMixin):
+    implements()
+
+    protocol = None
+
+    def __init__(self, protocolFactory, *args, **kw):
+        self.protocolFactory = protocolFactory
+        self.protocolArgs = args
+        self.protocolKwArgs = kw
+
+    def connectionMade(self):
+        self.transport.negotiationMap[NAWS] = self.telnet_NAWS
+        self.transport.negotiationMap[LINEMODE] = self.telnet_LINEMODE
+
+        for opt in (LINEMODE, NAWS, SGA):
+            self.transport.do(opt).addErrback(log.err)
+        for opt in (ECHO,):
+            self.transport.will(opt).addErrback(log.err)
+
+        self.protocol = self.protocolFactory(*self.protocolArgs, **self.protocolKwArgs)
+
+        try:
+            factory = self.factory
+        except AttributeError:
+            pass
+        else:
+            self.protocol.factory = factory
+
+        self.protocol.makeConnection(self)
+
+    def connectionLost(self, reason):
+        if self.protocol is not None:
+            try:
+                self.protocol.connectionLost(reason)
+            finally:
+                del self.protocol
+
+    def dataReceived(self, data):
+        self.protocol.dataReceived(data)
+
+    def enableLocal(self, opt):
+        if opt == ECHO:
+            return True
+        elif opt == SGA:
+            return True
+        else:
+            return False
+
+    def enableRemote(self, opt):
+        if opt == LINEMODE:
+            self.transport.requestNegotiation(LINEMODE, MODE + chr(TRAPSIG))
+            return True
+        elif opt == NAWS:
+            return True
+        elif opt == SGA:
+            return True
+        else:
+            return False
+
+    def telnet_NAWS(self, bytes):
+        # NAWS is client -> server *only*.  self.protocol will
+        # therefore be an ITerminalTransport, the `.protocol'
+        # attribute of which will be an ITerminalProtocol.  Maybe.
+        # You know what, XXX TODO clean this up.
+        if len(bytes) == 4:
+            width, height = struct.unpack('!HH', ''.join(bytes))
+            self.protocol.terminalProtocol.terminalSize(width, height)
+        else:
+            log.msg("Wrong number of NAWS bytes")
+
+
+    linemodeSubcommands = {
+        LINEMODE_SLC: 'SLC'}
+    def telnet_LINEMODE(self, bytes):
+        revmap = {}
+        linemodeSubcommand = bytes[0]
+        if 0:
+            # XXX TODO: This should be enabled to parse linemode subnegotiation.
+            getattr(self, 'linemode_' + self.linemodeSubcommands[linemodeSubcommand])(bytes[1:])
+
+    def linemode_SLC(self, bytes):
+        chunks = zip(*[iter(bytes)]*3)
+        for slcFunction, slcValue, slcWhat in chunks:
+            # Later, we should parse stuff.
+            'SLC', ord(slcFunction), ord(slcValue), ord(slcWhat)
+
+from twisted.protocols import basic
+
+class StatefulTelnetProtocol(basic.LineReceiver, TelnetProtocol):
+    delimiter = '\n'
+
+    state = 'Discard'
+
+    def connectionLost(self, reason):
+        basic.LineReceiver.connectionLost(self, reason)
+        TelnetProtocol.connectionLost(self, reason)
+
+    def lineReceived(self, line):
+        oldState = self.state
+        newState = getattr(self, "telnet_" + oldState)(line)
+        if newState is not None:
+            if self.state == oldState:
+                self.state = newState
+            else:
+                log.msg("Warning: state changed and new state returned")
+
+    def telnet_Discard(self, line):
+        pass
+
+from twisted.cred import credentials
+
+class AuthenticatingTelnetProtocol(StatefulTelnetProtocol):
+    """A protocol which prompts for credentials and attempts to authenticate them.
+
+    Username and password prompts are given (the password is obscured).  When the
+    information is collected, it is passed to a portal and an avatar implementing
+    L{ITelnetProtocol} is requested.  If an avatar is returned, it connected to this
+    protocol's transport, and this protocol's transport is connected to it.
+    Otherwise, the user is re-prompted for credentials.
+    """
+
+    state = "User"
+    protocol = None
+
+    def __init__(self, portal):
+        self.portal = portal
+
+    def connectionMade(self):
+        self.transport.write("Username: ")
+
+    def connectionLost(self, reason):
+        StatefulTelnetProtocol.connectionLost(self, reason)
+        if self.protocol is not None:
+            try:
+                self.protocol.connectionLost(reason)
+                self.logout()
+            finally:
+                del self.protocol, self.logout
+
+    def telnet_User(self, line):
+        self.username = line
+        self.transport.will(ECHO)
+        self.transport.write("Password: ")
+        return 'Password'
+
+    def telnet_Password(self, line):
+        username, password = self.username, line
+        del self.username
+        def login(ignored):
+            creds = credentials.UsernamePassword(username, password)
+            d = self.portal.login(creds, None, ITelnetProtocol)
+            d.addCallback(self._cbLogin)
+            d.addErrback(self._ebLogin)
+        self.transport.wont(ECHO).addCallback(login)
+        return 'Discard'
+
+    def _cbLogin(self, ial):
+        interface, protocol, logout = ial
+        assert interface is ITelnetProtocol
+        self.protocol = protocol
+        self.logout = logout
+        self.state = 'Command'
+
+        protocol.makeConnection(self.transport)
+        self.transport.protocol = protocol
+
+    def _ebLogin(self, failure):
+        self.transport.write("\nAuthentication failed\n")
+        self.transport.write("Username: ")
+        self.state = "User"
+
+__all__ = [
+    # Exceptions
+    'TelnetError', 'NegotiationError', 'OptionRefused',
+    'AlreadyNegotiating', 'AlreadyEnabled', 'AlreadyDisabled',
+
+    # Interfaces
+    'ITelnetProtocol', 'ITelnetTransport',
+
+    # Other stuff, protocols, etc.
+    'Telnet', 'TelnetProtocol', 'TelnetTransport',
+    'TelnetBootstrapProtocol',
+
+    ]
diff --git a/ThirdParty/Twisted/twisted/conch/test/__init__.py b/ThirdParty/Twisted/twisted/conch/test/__init__.py
new file mode 100644
index 0000000..d09b412
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/__init__.py
@@ -0,0 +1 @@
+'conch tests'
diff --git a/ThirdParty/Twisted/twisted/conch/test/keydata.py b/ThirdParty/Twisted/twisted/conch/test/keydata.py
new file mode 100644
index 0000000..9417ec5
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/keydata.py
@@ -0,0 +1,208 @@
+# -*- test-case-name: twisted.conch.test.test_keys -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Data used by test_keys as well as others.
+"""
+RSAData = {
+        'n':long('1062486685755247411169438309495398947372127791189432809481'
+            '382072971106157632182084539383569281493520117634129557550415277'
+            '516685881326038852354459895734875625093273594925884531272867425'
+            '864910490065695876046999646807138717162833156501L'),
+        'e':35L,
+        'd':long('6678487739032983727350755088256793383481946116047863373882'
+            '973030104095847973715959961839578340816412167985957218887914482'
+            '713602371850869127033494910375212470664166001439410214474266799'
+            '85974425203903884190893469297150446322896587555L'),
+        'q':long('3395694744258061291019136154000709371890447462086362702627'
+            '9704149412726577280741108645721676968699696898960891593323L'),
+        'p':long('3128922844292337321766351031842562691837301298995834258844'
+            '4720539204069737532863831050930719431498338835415515173887L')}
+
+DSAData = {
+        'y':long('2300663509295750360093768159135720439490120577534296730713'
+            '348508834878775464483169644934425336771277908527130096489120714'
+            '610188630979820723924744291603865L'),
+        'g':long('4451569990409370769930903934104221766858515498655655091803'
+            '866645719060300558655677517139568505649468378587802312867198352'
+            '1161998270001677664063945776405L'),
+        'p':long('7067311773048598659694590252855127633397024017439939353776'
+            '608320410518694001356789646664502838652272205440894335303988504'
+            '978724817717069039110940675621677L'),
+        'q':1184501645189849666738820838619601267690550087703L,
+        'x':863951293559205482820041244219051653999559962819L}
+
+publicRSA_openssh = ("ssh-rsa AAAAB3NzaC1yc2EAAAABIwAAAGEArzJx8OYOnJmzf4tfBE"
+"vLi8DVPrJ3/c9k2I/Az64fxjHf9imyRJbixtQhlH9lfNjUIx+4LmrJH5QNRsFporcHDKOTwTTYL"
+"h5KmRpslkYHRivcJSkbh/C+BR3utDS555mV comment")
+
+privateRSA_openssh = """-----BEGIN RSA PRIVATE KEY-----
+MIIByAIBAAJhAK8ycfDmDpyZs3+LXwRLy4vA1T6yd/3PZNiPwM+uH8Yx3/YpskSW
+4sbUIZR/ZXzY1CMfuC5qyR+UDUbBaaK3Bwyjk8E02C4eSpkabJZGB0Yr3CUpG4fw
+vgUd7rQ0ueeZlQIBIwJgbh+1VZfr7WftK5lu7MHtqE1S1vPWZQYE3+VUn8yJADyb
+Z4fsZaCrzW9lkIqXkE3GIY+ojdhZhkO1gbG0118sIgphwSWKRxK0mvh6ERxKqIt1
+xJEJO74EykXZV4oNJ8sjAjEA3J9r2ZghVhGN6V8DnQrTk24Td0E8hU8AcP0FVP+8
+PQm/g/aXf2QQkQT+omdHVEJrAjEAy0pL0EBH6EVS98evDCBtQw22OZT52qXlAwZ2
+gyTriKFVoqjeEjt3SZKKqXHSApP/AjBLpF99zcJJZRq2abgYlf9lv1chkrWqDHUu
+DZttmYJeEfiFBBavVYIF1dOlZT0G8jMCMBc7sOSZodFnAiryP+Qg9otSBjJ3bQML
+pSTqy7c3a2AScC/YyOwkDaICHnnD3XyjMwIxALRzl0tQEKMXs6hH8ToUdlLROCrP
+EhQ0wahUTCk1gKA4uPD6TMTChavbh4K63OvbKg==
+-----END RSA PRIVATE KEY-----"""
+
+# some versions of OpenSSH generate these (slightly different keys)
+privateRSA_openssh_alternate = """-----BEGIN RSA PRIVATE KEY-----
+MIIBzjCCAcgCAQACYQCvMnHw5g6cmbN/i18ES8uLwNU+snf9z2TYj8DPrh/GMd/2
+KbJEluLG1CGUf2V82NQjH7guaskflA1GwWmitwcMo5PBNNguHkqZGmyWRgdGK9wl
+KRuH8L4FHe60NLnnmZUCASMCYG4ftVWX6+1n7SuZbuzB7ahNUtbz1mUGBN/lVJ/M
+iQA8m2eH7GWgq81vZZCKl5BNxiGPqI3YWYZDtYGxtNdfLCIKYcElikcStJr4ehEc
+SqiLdcSRCTu+BMpF2VeKDSfLIwIxANyfa9mYIVYRjelfA50K05NuE3dBPIVPAHD9
+BVT/vD0Jv4P2l39kEJEE/qJnR1RCawIxAMtKS9BAR+hFUvfHrwwgbUMNtjmU+dql
+5QMGdoMk64ihVaKo3hI7d0mSiqlx0gKT/wIwS6Rffc3CSWUatmm4GJX/Zb9XIZK1
+qgx1Lg2bbZmCXhH4hQQWr1WCBdXTpWU9BvIzAjAXO7DkmaHRZwIq8j/kIPaLUgYy
+d20DC6Uk6su3N2tgEnAv2MjsJA2iAh55w918ozMCMQC0c5dLUBCjF7OoR/E6FHZS
+0TgqzxIUNMGoVEwpNYCgOLjw+kzEwoWr24eCutzr2yowAA==
+------END RSA PRIVATE KEY------"""
+
+# encrypted with the passphrase 'encrypted'
+privateRSA_openssh_encrypted = """-----BEGIN RSA PRIVATE KEY-----
+Proc-Type: 4,ENCRYPTED
+DEK-Info: DES-EDE3-CBC,FFFFFFFFFFFFFFFF
+
+30qUR7DYY/rpVJu159paRM1mUqt/IMibfEMTKWSjNhCVD21hskftZCJROw/WgIFt
+ncusHpJMkjgwEpho0KyKilcC7zxjpunTex24Meb5pCdXCrYft8AyUkRdq3dugMqT
+4nuWuWxziluBhKQ2M9tPGcEOeulU4vVjceZt2pZhZQVBf08o3XUv5/7RYd24M9md
+WIo+5zdj2YQkI6xMFTP954O/X32ME1KQt98wgNEy6mxhItbvf00mH3woALwEKP3v
+PSMxxtx3VKeDKd9YTOm1giKkXZUf91vZWs0378tUBrU4U5qJxgryTjvvVKOtofj6
+4qQy6+r6M6wtwVlXBgeRm2gBPvL3nv6MsROp3E6ztBd/e7A8fSec+UTq3ko/EbGP
+0QG+IG5tg8FsdITxQ9WAIITZL3Rc6hA5Ymx1VNhySp3iSiso8Jof27lku4pyuvRV
+ko/B3N2H7LnQrGV0GyrjeYocW/qZh/PCsY48JBFhlNQexn2mn44AJW3y5xgbhvKA
+3mrmMD1hD17ZvZxi4fPHjbuAyM1vFqhQx63eT9ijbwJ91svKJl5O5MIv41mCRonm
+hxvOXw8S0mjSasyofptzzQCtXxFLQigXbpQBltII+Ys=
+-----END RSA PRIVATE KEY-----"""
+
+# encrypted with the passphrase 'testxp'. NB: this key was generated by
+# OpenSSH, so it doesn't use the same key data as the other keys here.
+privateRSA_openssh_encrypted_aes = """-----BEGIN RSA PRIVATE KEY-----
+Proc-Type: 4,ENCRYPTED
+DEK-Info: AES-128-CBC,0673309A6ACCAB4B77DEE1C1E536AC26
+
+4Ed/a9OgJWHJsne7yOGWeWMzHYKsxuP9w1v0aYcp+puS75wvhHLiUnNwxz0KDi6n
+T3YkKLBsoCWS68ApR2J9yeQ6R+EyS+UQDrO9nwqo3DB5BT3Ggt8S1wE7vjNLQD0H
+g/SJnlqwsECNhh8aAx+Ag0m3ZKOZiRD5mCkcDQsZET7URSmFytDKOjhFn3u6ZFVB
+sXrfpYc6TJtOQlHd/52JB6aAbjt6afSv955Z7enIi+5yEJ5y7oYQTaE5zrFMP7N5
+9LbfJFlKXxEddy/DErRLxEjmC+t4svHesoJKc2jjjyNPiOoGGF3kJXea62vsjdNV
+gMK5Eged3TBVIk2dv8rtJUvyFeCUtjQ1UJZIebScRR47KrbsIpCmU8I4/uHWm5hW
+0mOwvdx1L/mqx/BHqVU9Dw2COhOdLbFxlFI92chkovkmNk4P48ziyVnpm7ME22sE
+vfCMsyirdqB1mrL4CSM7FXONv+CgfBfeYVkYW8RfJac9U1L/O+JNn7yee414O/rS
+hRYw4UdWnH6Gg6niklVKWNY0ZwUZC8zgm2iqy8YCYuneS37jC+OEKP+/s6HSKuqk
+2bzcl3/TcZXNSM815hnFRpz0anuyAsvwPNRyvxG2/DacJHL1f6luV4B0o6W410yf
+qXQx01DLo7nuyhJqoH3UGCyyXB+/QUs0mbG2PAEn3f5dVs31JMdbt+PrxURXXjKk
+4cexpUcIpqqlfpIRe3RD0sDVbH4OXsGhi2kiTfPZu7mgyFxKopRbn1KwU1qKinfY
+EU9O4PoTak/tPT+5jFNhaP+HrURoi/pU8EAUNSktl7xAkHYwkN/9Cm7DeBghgf3n
+8+tyCGYDsB5utPD0/Xe9yx0Qhc/kMm4xIyQDyA937dk3mUvLC9vulnAP8I+Izim0
+fZ182+D1bWwykoD0997mUHG/AUChWR01V1OLwRyPv2wUtiS8VNG76Y2aqKlgqP1P
+V+IvIEqR4ERvSBVFzXNF8Y6j/sVxo8+aZw+d0L1Ns/R55deErGg3B8i/2EqGd3r+
+0jps9BqFHHWW87n3VyEB3jWCMj8Vi2EJIfa/7pSaViFIQn8LiBLf+zxG5LTOToK5
+xkN42fReDcqi3UNfKNGnv4dsplyTR2hyx65lsj4bRKDGLKOuB1y7iB0AGb0LtcAI
+dcsVlcCeUquDXtqKvRnwfIMg+ZunyjqHBhj3qgRgbXbT6zjaSdNnih569aTg0Vup
+VykzZ7+n/KVcGLmvX0NesdoI7TKbq4TnEIOynuG5Sf+2GpARO5bjcWKSZeN/Ybgk
+gccf8Cqf6XWqiwlWd0B7BR3SymeHIaSymC45wmbgdstrbk7Ppa2Tp9AZku8M2Y7c
+8mY9b+onK075/ypiwBm4L4GRNTFLnoNQJXx0OSl4FNRWsn6ztbD+jZhu8Seu10Jw
+SEJVJ+gmTKdRLYORJKyqhDet6g7kAxs4EoJ25WsOnX5nNr00rit+NkMPA7xbJT+7
+CfI51GQLw7pUPeO2WNt6yZO/YkzZrqvTj5FEwybkUyBv7L0gkqu9wjfDdUw0fVHE
+xEm4DxjEoaIp8dW/JOzXQ2EF+WaSOgdYsw3Ac+rnnjnNptCdOEDGP6QBkt+oXj4P
+-----END RSA PRIVATE KEY-----"""
+
+publicRSA_lsh = ("{KDEwOnB1YmxpYy1rZXkoMTQ6cnNhLXBrY3MxLXNoYTEoMTpuOTc6AK8yc"
+"fDmDpyZs3+LXwRLy4vA1T6yd/3PZNiPwM+uH8Yx3/YpskSW4sbUIZR/ZXzY1CMfuC5qyR+UDUbB"
+"aaK3Bwyjk8E02C4eSpkabJZGB0Yr3CUpG4fwvgUd7rQ0ueeZlSkoMTplMTojKSkp}")
+
+privateRSA_lsh = ("(11:private-key(9:rsa-pkcs1(1:n97:\x00\xaf2q\xf0\xe6\x0e"
+"\x9c\x99\xb3\x7f\x8b_\x04K\xcb\x8b\xc0\xd5>\xb2w\xfd\xcfd\xd8\x8f\xc0\xcf"
+"\xae\x1f\xc61\xdf\xf6)\xb2D\x96\xe2\xc6\xd4!\x94\x7fe|\xd8\xd4#\x1f\xb8.j"
+"\xc9\x1f\x94\rF\xc1i\xa2\xb7\x07\x0c\xa3\x93\xc14\xd8.\x1eJ\x99\x1al\x96F"
+"\x07F+\xdc%)\x1b\x87\xf0\xbe\x05\x1d\xee\xb44\xb9\xe7\x99\x95)(1:e1:#)(1:d9"
+"6:n\x1f\xb5U\x97\xeb\xedg\xed+\x99n\xec\xc1\xed\xa8MR\xd6\xf3\xd6e\x06\x04"
+"\xdf\xe5T\x9f\xcc\x89\x00<\x9bg\x87\xece\xa0\xab\xcdoe\x90\x8a\x97\x90M\xc6"
+'!\x8f\xa8\x8d\xd8Y\x86C\xb5\x81\xb1\xb4\xd7_,"\na\xc1%\x8aG\x12\xb4\x9a\xf8'
+"z\x11\x1cJ\xa8\x8bu\xc4\x91\t;\xbe\x04\xcaE\xd9W\x8a\r\'\xcb#)(1:p49:\x00"
+"\xdc\x9fk\xd9\x98!V\x11\x8d\xe9_\x03\x9d\n\xd3\x93n\x13wA<\x85O\x00p\xfd"
+"\x05T\xff\xbc=\t\xbf\x83\xf6\x97\x7fd\x10\x91\x04\xfe\xa2gGTBk)(1:q49:\x00"
+"\xcbJK\xd0 at G\xe8ER\xf7\xc7\xaf\x0c mC\r\xb69\x94\xf9\xda\xa5\xe5\x03\x06v"
+"\x83$\xeb\x88\xa1U\xa2\xa8\xde\x12;wI\x92\x8a\xa9q\xd2\x02\x93\xff)(1:a48:K"
+"\xa4_}\xcd\xc2Ie\x1a\xb6i\xb8\x18\x95\xffe\xbfW!\x92\xb5\xaa\x0cu.\r\x9bm"
+"\x99\x82^\x11\xf8\x85\x04\x16\xafU\x82\x05\xd5\xd3\xa5e=\x06\xf23)(1:b48:"
+"\x17;\xb0\xe4\x99\xa1\xd1g\x02*\xf2?\xe4 \xf6\x8bR\x062wm\x03\x0b\xa5$\xea"
+"\xcb\xb77k`\x12p/\xd8\xc8\xec$\r\xa2\x02\x1ey\xc3\xdd|\xa33)(1:c49:\x00\xb4"
+"s\x97KP\x10\xa3\x17\xb3\xa8G\xf1:\x14vR\xd18*\xcf\x12\x144\xc1\xa8TL)5\x80"
+"\xa08\xb8\xf0\xfaL\xc4\xc2\x85\xab\xdb\x87\x82\xba\xdc\xeb\xdb*)))")
+
+privateRSA_agentv3 = ("\x00\x00\x00\x07ssh-rsa\x00\x00\x00\x01#\x00\x00\x00`"
+"n\x1f\xb5U\x97\xeb\xedg\xed+\x99n\xec\xc1\xed\xa8MR\xd6\xf3\xd6e\x06\x04"
+"\xdf\xe5T\x9f\xcc\x89\x00<\x9bg\x87\xece\xa0\xab\xcdoe\x90\x8a\x97\x90M\xc6"
+'!\x8f\xa8\x8d\xd8Y\x86C\xb5\x81\xb1\xb4\xd7_,"\na\xc1%\x8aG\x12\xb4\x9a\xf8'
+"z\x11\x1cJ\xa8\x8bu\xc4\x91\t;\xbe\x04\xcaE\xd9W\x8a\r\'\xcb#\x00\x00\x00a"
+"\x00\xaf2q\xf0\xe6\x0e\x9c\x99\xb3\x7f\x8b_\x04K\xcb\x8b\xc0\xd5>\xb2w\xfd"
+"\xcfd\xd8\x8f\xc0\xcf\xae\x1f\xc61\xdf\xf6)\xb2D\x96\xe2\xc6\xd4!\x94\x7fe|"
+"\xd8\xd4#\x1f\xb8.j\xc9\x1f\x94\rF\xc1i\xa2\xb7\x07\x0c\xa3\x93\xc14\xd8."
+"\x1eJ\x99\x1al\x96F\x07F+\xdc%)\x1b\x87\xf0\xbe\x05\x1d\xee\xb44\xb9\xe7"
+"\x99\x95\x00\x00\x001\x00\xb4s\x97KP\x10\xa3\x17\xb3\xa8G\xf1:\x14vR\xd18*"
+"\xcf\x12\x144\xc1\xa8TL)5\x80\xa08\xb8\xf0\xfaL\xc4\xc2\x85\xab\xdb\x87\x82"
+"\xba\xdc\xeb\xdb*\x00\x00\x001\x00\xcbJK\xd0 at G\xe8ER\xf7\xc7\xaf\x0c mC\r"
+"\xb69\x94\xf9\xda\xa5\xe5\x03\x06v\x83$\xeb\x88\xa1U\xa2\xa8\xde\x12;wI\x92"
+"\x8a\xa9q\xd2\x02\x93\xff\x00\x00\x001\x00\xdc\x9fk\xd9\x98!V\x11\x8d\xe9_"
+"\x03\x9d\n\xd3\x93n\x13wA<\x85O\x00p\xfd\x05T\xff\xbc=\t\xbf\x83\xf6\x97"
+"\x7fd\x10\x91\x04\xfe\xa2gGTBk")
+
+publicDSA_openssh = ("ssh-dss AAAAB3NzaC1kc3MAAABBAIbwTOSsZ7Bl7U1KyMNqV13Tu7"
+"yRAtTr70PVI3QnfrPumf2UzCgpL1ljbKxSfAi05XvrE/1vfCFAsFYXRZLhQy0AAAAVAM965Akmo"
+"6eAi7K+k9qDR4TotFAXAAAAQADZlpTW964haQWS4vC063NGdldT6xpUGDcDRqbm90CoPEa2RmNO"
+"uOqi8lnbhYraEzypYH3K4Gzv/bxCBnKtHRUAAABAK+1osyWBS0+P90u/rAuko6chZ98thUSY2kL"
+"SHp6hLKyy2bjnT29h7haELE+XHfq2bM9fckDx2FLOSIJzy83VmQ== comment")
+
+privateDSA_openssh = """-----BEGIN DSA PRIVATE KEY-----
+MIH4AgEAAkEAhvBM5KxnsGXtTUrIw2pXXdO7vJEC1OvvQ9UjdCd+s+6Z/ZTMKCkv
+WWNsrFJ8CLTle+sT/W98IUCwVhdFkuFDLQIVAM965Akmo6eAi7K+k9qDR4TotFAX
+AkAA2ZaU1veuIWkFkuLwtOtzRnZXU+saVBg3A0am5vdAqDxGtkZjTrjqovJZ24WK
+2hM8qWB9yuBs7/28QgZyrR0VAkAr7WizJYFLT4/3S7+sC6SjpyFn3y2FRJjaQtIe
+nqEsrLLZuOdPb2HuFoQsT5cd+rZsz19yQPHYUs5IgnPLzdWZAhUAl1TqdmlAG/b4
+nnVchGiO9sML8MM=
+-----END DSA PRIVATE KEY-----"""
+
+publicDSA_lsh = ("{KDEwOnB1YmxpYy1rZXkoMzpkc2EoMTpwNjU6AIbwTOSsZ7Bl7U1KyMNqV"
+"13Tu7yRAtTr70PVI3QnfrPumf2UzCgpL1ljbKxSfAi05XvrE/1vfCFAsFYXRZLhQy0pKDE6cTIx"
+"OgDPeuQJJqOngIuyvpPag0eE6LRQFykoMTpnNjQ6ANmWlNb3riFpBZLi8LTrc0Z2V1PrGlQYNwN"
+"Gpub3QKg8RrZGY0646qLyWduFitoTPKlgfcrgbO/9vEIGcq0dFSkoMTp5NjQ6K+1osyWBS0+P90"
+"u/rAuko6chZ98thUSY2kLSHp6hLKyy2bjnT29h7haELE+XHfq2bM9fckDx2FLOSIJzy83VmSkpK"
+"Q==}")
+
+privateDSA_lsh = ("(11:private-key(3:dsa(1:p65:\x00\x86\xf0L\xe4\xacg\xb0e"
+"\xedMJ\xc8\xc3jW]\xd3\xbb\xbc\x91\x02\xd4\xeb\xefC\xd5#t'~\xb3\xee\x99\xfd"
+"\x94\xcc()/Ycl\xacR|\x08\xb4\xe5{\xeb\x13\xfdo|!@\xb0V\x17E\x92\xe1C-)(1:q2"
+"1:\x00\xcfz\xe4\t&\xa3\xa7\x80\x8b\xb2\xbe\x93\xda\x83G\x84\xe8\xb4P\x17)(1"
+":g64:\x00\xd9\x96\x94\xd6\xf7\xae!i\x05\x92\xe2\xf0\xb4\xebsFvWS\xeb\x1aT"
+"\x187\x03F\xa6\xe6\xf7@\xa8<F\xb6FcN\xb8\xea\xa2\xf2Y\xdb\x85\x8a\xda\x13<"
+"\xa9`}\xca\xe0l\xef\xfd\xbcB\x06r\xad\x1d\x15)(1:y64:+\xedh\xb3%\x81KO\x8f"
+"\xf7K\xbf\xac\x0b\xa4\xa3\xa7!g\xdf-\x85D\x98\xdaB\xd2\x1e\x9e\xa1,\xac\xb2"
+"\xd9\xb8\xe7Ooa\xee\x16\x84,O\x97\x1d\xfa\xb6l\xcf_r@\xf1\xd8R\xceH\x82s"
+"\xcb\xcd\xd5\x99)(1:x21:\x00\x97T\xeavi@\x1b\xf6\xf8\x9eu\\\x84h\x8e\xf6"
+"\xc3\x0b\xf0\xc3)))")
+
+privateDSA_agentv3 = ("\x00\x00\x00\x07ssh-dss\x00\x00\x00A\x00\x86\xf0L\xe4"
+"\xacg\xb0e\xedMJ\xc8\xc3jW]\xd3\xbb\xbc\x91\x02\xd4\xeb\xefC\xd5#t'~\xb3"
+"\xee\x99\xfd\x94\xcc()/Ycl\xacR|\x08\xb4\xe5{\xeb\x13\xfdo|!@\xb0V\x17E\x92"
+"\xe1C-\x00\x00\x00\x15\x00\xcfz\xe4\t&\xa3\xa7\x80\x8b\xb2\xbe\x93\xda\x83G"
+"\x84\xe8\xb4P\x17\x00\x00\x00@\x00\xd9\x96\x94\xd6\xf7\xae!i\x05\x92\xe2"
+"\xf0\xb4\xebsFvWS\xeb\x1aT\x187\x03F\xa6\xe6\xf7@\xa8<F\xb6FcN\xb8\xea\xa2"
+"\xf2Y\xdb\x85\x8a\xda\x13<\xa9`}\xca\xe0l\xef\xfd\xbcB\x06r\xad\x1d\x15\x00"
+"\x00\x00 at +\xedh\xb3%\x81KO\x8f\xf7K\xbf\xac\x0b\xa4\xa3\xa7!g\xdf-\x85D\x98"
+"\xdaB\xd2\x1e\x9e\xa1,\xac\xb2\xd9\xb8\xe7Ooa\xee\x16\x84,O\x97\x1d\xfa\xb6"
+"l\xcf_r@\xf1\xd8R\xceH\x82s\xcb\xcd\xd5\x99\x00\x00\x00\x15\x00\x97T\xeavi@"
+"\x1b\xf6\xf8\x9eu\\\x84h\x8e\xf6\xc3\x0b\xf0\xc3")
+
+__all__ = ['DSAData', 'RSAData', 'privateDSA_agentv3', 'privateDSA_lsh',
+        'privateDSA_openssh', 'privateRSA_agentv3', 'privateRSA_lsh',
+        'privateRSA_openssh', 'publicDSA_lsh', 'publicDSA_openssh',
+        'publicRSA_lsh', 'publicRSA_openssh', 'privateRSA_openssh_alternate']
+
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_address.py b/ThirdParty/Twisted/twisted/conch/test/test_address.py
new file mode 100644
index 0000000..cf02275
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_address.py
@@ -0,0 +1,49 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{SSHTransportAddrress} in ssh/address.py
+"""
+
+from twisted.trial import unittest
+from twisted.internet.address import IPv4Address
+from twisted.internet.test.test_address import AddressTestCaseMixin
+
+from twisted.conch.ssh.address import SSHTransportAddress
+
+
+
+class SSHTransportAddressTestCase(unittest.TestCase, AddressTestCaseMixin):
+    """
+    L{twisted.conch.ssh.address.SSHTransportAddress} is what Conch transports
+    use to represent the other side of the SSH connection.  This tests the
+    basic functionality of that class (string representation, comparison, &c).
+    """
+
+
+    def _stringRepresentation(self, stringFunction):
+        """
+        The string representation of C{SSHTransportAddress} should be
+        "SSHTransportAddress(<stringFunction on address>)".
+        """
+        addr = self.buildAddress()
+        stringValue = stringFunction(addr)
+        addressValue = stringFunction(addr.address)
+        self.assertEqual(stringValue,
+                         "SSHTransportAddress(%s)" % addressValue)
+
+
+    def buildAddress(self):
+        """
+        Create an arbitrary new C{SSHTransportAddress}.  A new instance is
+        created for each call, but always for the same address.
+        """
+        return SSHTransportAddress(IPv4Address("TCP", "127.0.0.1", 22))
+
+
+    def buildDifferentAddress(self):
+        """
+        Like C{buildAddress}, but with a different fixed address.
+        """
+        return SSHTransportAddress(IPv4Address("TCP", "127.0.0.2", 22))
+
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_agent.py b/ThirdParty/Twisted/twisted/conch/test/test_agent.py
new file mode 100644
index 0000000..532a0e5
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_agent.py
@@ -0,0 +1,399 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.conch.ssh.agent}.
+"""
+
+import struct
+
+from twisted.trial import unittest
+
+try:
+    import OpenSSL
+except ImportError:
+    iosim = None
+else:
+    from twisted.test import iosim
+
+try:
+    import Crypto.Cipher.DES3
+except ImportError:
+    Crypto = None
+
+try:
+    import pyasn1
+except ImportError:
+    pyasn1 = None
+
+if Crypto and pyasn1:
+    from twisted.conch.ssh import keys, agent
+else:
+    keys = agent = None
+
+from twisted.conch.test import keydata
+from twisted.conch.error import ConchError, MissingKeyStoreError
+
+
+class StubFactory(object):
+    """
+    Mock factory that provides the keys attribute required by the
+    SSHAgentServerProtocol
+    """
+    def __init__(self):
+        self.keys = {}
+
+
+
+class AgentTestBase(unittest.TestCase):
+    """
+    Tests for SSHAgentServer/Client.
+    """
+    if iosim is None:
+        skip = "iosim requires SSL, but SSL is not available"
+    elif agent is None or keys is None:
+        skip = "Cannot run without PyCrypto or PyASN1"
+
+    def setUp(self):
+        # wire up our client <-> server
+        self.client, self.server, self.pump = iosim.connectedServerAndClient(
+            agent.SSHAgentServer, agent.SSHAgentClient)
+
+        # the server's end of the protocol is stateful and we store it on the
+        # factory, for which we only need a mock
+        self.server.factory = StubFactory()
+
+        # pub/priv keys of each kind
+        self.rsaPrivate = keys.Key.fromString(keydata.privateRSA_openssh)
+        self.dsaPrivate = keys.Key.fromString(keydata.privateDSA_openssh)
+
+        self.rsaPublic = keys.Key.fromString(keydata.publicRSA_openssh)
+        self.dsaPublic = keys.Key.fromString(keydata.publicDSA_openssh)
+
+
+
+class TestServerProtocolContractWithFactory(AgentTestBase):
+    """
+    The server protocol is stateful and so uses its factory to track state
+    across requests.  This test asserts that the protocol raises if its factory
+    doesn't provide the necessary storage for that state.
+    """
+    def test_factorySuppliesKeyStorageForServerProtocol(self):
+        # need a message to send into the server
+        msg = struct.pack('!LB',1, agent.AGENTC_REQUEST_IDENTITIES)
+        del self.server.factory.__dict__['keys']
+        self.assertRaises(MissingKeyStoreError,
+                          self.server.dataReceived, msg)
+
+
+
+class TestUnimplementedVersionOneServer(AgentTestBase):
+    """
+    Tests for methods with no-op implementations on the server. We need these
+    for clients, such as openssh, that try v1 methods before going to v2.
+
+    Because the client doesn't expose these operations with nice method names,
+    we invoke sendRequest directly with an op code.
+    """
+
+    def test_agentc_REQUEST_RSA_IDENTITIES(self):
+        """
+        assert that we get the correct op code for an RSA identities request
+        """
+        d = self.client.sendRequest(agent.AGENTC_REQUEST_RSA_IDENTITIES, '')
+        self.pump.flush()
+        def _cb(packet):
+            self.assertEqual(
+                agent.AGENT_RSA_IDENTITIES_ANSWER, ord(packet[0]))
+        return d.addCallback(_cb)
+
+
+    def test_agentc_REMOVE_RSA_IDENTITY(self):
+        """
+        assert that we get the correct op code for an RSA remove identity request
+        """
+        d = self.client.sendRequest(agent.AGENTC_REMOVE_RSA_IDENTITY, '')
+        self.pump.flush()
+        return d.addCallback(self.assertEqual, '')
+
+
+    def test_agentc_REMOVE_ALL_RSA_IDENTITIES(self):
+        """
+        assert that we get the correct op code for an RSA remove all identities
+        request.
+        """
+        d = self.client.sendRequest(agent.AGENTC_REMOVE_ALL_RSA_IDENTITIES, '')
+        self.pump.flush()
+        return d.addCallback(self.assertEqual, '')
+
+
+
+if agent is not None:
+    class CorruptServer(agent.SSHAgentServer):
+        """
+        A misbehaving server that returns bogus response op codes so that we can
+        verify that our callbacks that deal with these op codes handle such
+        miscreants.
+        """
+        def agentc_REQUEST_IDENTITIES(self, data):
+            self.sendResponse(254, '')
+
+
+        def agentc_SIGN_REQUEST(self, data):
+            self.sendResponse(254, '')
+
+
+
+class TestClientWithBrokenServer(AgentTestBase):
+    """
+    verify error handling code in the client using a misbehaving server
+    """
+
+    def setUp(self):
+        AgentTestBase.setUp(self)
+        self.client, self.server, self.pump = iosim.connectedServerAndClient(
+            CorruptServer, agent.SSHAgentClient)
+        # the server's end of the protocol is stateful and we store it on the
+        # factory, for which we only need a mock
+        self.server.factory = StubFactory()
+
+
+    def test_signDataCallbackErrorHandling(self):
+        """
+        Assert that L{SSHAgentClient.signData} raises a ConchError
+        if we get a response from the server whose opcode doesn't match
+        the protocol for data signing requests.
+        """
+        d = self.client.signData(self.rsaPublic.blob(), "John Hancock")
+        self.pump.flush()
+        return self.assertFailure(d, ConchError)
+
+
+    def test_requestIdentitiesCallbackErrorHandling(self):
+        """
+        Assert that L{SSHAgentClient.requestIdentities} raises a ConchError
+        if we get a response from the server whose opcode doesn't match
+        the protocol for identity requests.
+        """
+        d = self.client.requestIdentities()
+        self.pump.flush()
+        return self.assertFailure(d, ConchError)
+
+
+
+class TestAgentKeyAddition(AgentTestBase):
+    """
+    Test adding different flavors of keys to an agent.
+    """
+
+    def test_addRSAIdentityNoComment(self):
+        """
+        L{SSHAgentClient.addIdentity} adds the private key it is called
+        with to the SSH agent server to which it is connected, associating
+        it with the comment it is called with.
+
+        This test asserts that ommitting the comment produces an
+        empty string for the comment on the server.
+        """
+        d = self.client.addIdentity(self.rsaPrivate.privateBlob())
+        self.pump.flush()
+        def _check(ignored):
+            serverKey = self.server.factory.keys[self.rsaPrivate.blob()]
+            self.assertEqual(self.rsaPrivate, serverKey[0])
+            self.assertEqual('', serverKey[1])
+        return d.addCallback(_check)
+
+
+    def test_addDSAIdentityNoComment(self):
+        """
+        L{SSHAgentClient.addIdentity} adds the private key it is called
+        with to the SSH agent server to which it is connected, associating
+        it with the comment it is called with.
+
+        This test asserts that ommitting the comment produces an
+        empty string for the comment on the server.
+        """
+        d = self.client.addIdentity(self.dsaPrivate.privateBlob())
+        self.pump.flush()
+        def _check(ignored):
+            serverKey = self.server.factory.keys[self.dsaPrivate.blob()]
+            self.assertEqual(self.dsaPrivate, serverKey[0])
+            self.assertEqual('', serverKey[1])
+        return d.addCallback(_check)
+
+
+    def test_addRSAIdentityWithComment(self):
+        """
+        L{SSHAgentClient.addIdentity} adds the private key it is called
+        with to the SSH agent server to which it is connected, associating
+        it with the comment it is called with.
+
+        This test asserts that the server receives/stores the comment
+        as sent by the client.
+        """
+        d = self.client.addIdentity(
+            self.rsaPrivate.privateBlob(), comment='My special key')
+        self.pump.flush()
+        def _check(ignored):
+            serverKey = self.server.factory.keys[self.rsaPrivate.blob()]
+            self.assertEqual(self.rsaPrivate, serverKey[0])
+            self.assertEqual('My special key', serverKey[1])
+        return d.addCallback(_check)
+
+
+    def test_addDSAIdentityWithComment(self):
+        """
+        L{SSHAgentClient.addIdentity} adds the private key it is called
+        with to the SSH agent server to which it is connected, associating
+        it with the comment it is called with.
+
+        This test asserts that the server receives/stores the comment
+        as sent by the client.
+        """
+        d = self.client.addIdentity(
+            self.dsaPrivate.privateBlob(), comment='My special key')
+        self.pump.flush()
+        def _check(ignored):
+            serverKey = self.server.factory.keys[self.dsaPrivate.blob()]
+            self.assertEqual(self.dsaPrivate, serverKey[0])
+            self.assertEqual('My special key', serverKey[1])
+        return d.addCallback(_check)
+
+
+
+class TestAgentClientFailure(AgentTestBase):
+    def test_agentFailure(self):
+        """
+        verify that the client raises ConchError on AGENT_FAILURE
+        """
+        d = self.client.sendRequest(254, '')
+        self.pump.flush()
+        return self.assertFailure(d, ConchError)
+
+
+
+class TestAgentIdentityRequests(AgentTestBase):
+    """
+    Test operations against a server with identities already loaded.
+    """
+
+    def setUp(self):
+        AgentTestBase.setUp(self)
+        self.server.factory.keys[self.dsaPrivate.blob()] = (
+            self.dsaPrivate, 'a comment')
+        self.server.factory.keys[self.rsaPrivate.blob()] = (
+            self.rsaPrivate, 'another comment')
+
+
+    def test_signDataRSA(self):
+        """
+        Sign data with an RSA private key and then verify it with the public
+        key.
+        """
+        d = self.client.signData(self.rsaPublic.blob(), "John Hancock")
+        self.pump.flush()
+        def _check(sig):
+            expected = self.rsaPrivate.sign("John Hancock")
+            self.assertEqual(expected, sig)
+            self.assertTrue(self.rsaPublic.verify(sig, "John Hancock"))
+        return d.addCallback(_check)
+
+
+    def test_signDataDSA(self):
+        """
+        Sign data with a DSA private key and then verify it with the public
+        key.
+        """
+        d = self.client.signData(self.dsaPublic.blob(), "John Hancock")
+        self.pump.flush()
+        def _check(sig):
+            # Cannot do this b/c DSA uses random numbers when signing
+            #   expected = self.dsaPrivate.sign("John Hancock")
+            #   self.assertEqual(expected, sig)
+            self.assertTrue(self.dsaPublic.verify(sig, "John Hancock"))
+        return d.addCallback(_check)
+
+
+    def test_signDataRSAErrbackOnUnknownBlob(self):
+        """
+        Assert that we get an errback if we try to sign data using a key that
+        wasn't added.
+        """
+        del self.server.factory.keys[self.rsaPublic.blob()]
+        d = self.client.signData(self.rsaPublic.blob(), "John Hancock")
+        self.pump.flush()
+        return self.assertFailure(d, ConchError)
+
+
+    def test_requestIdentities(self):
+        """
+        Assert that we get all of the keys/comments that we add when we issue a
+        request for all identities.
+        """
+        d = self.client.requestIdentities()
+        self.pump.flush()
+        def _check(keyt):
+            expected = {}
+            expected[self.dsaPublic.blob()] = 'a comment'
+            expected[self.rsaPublic.blob()] = 'another comment'
+
+            received = {}
+            for k in keyt:
+                received[keys.Key.fromString(k[0], type='blob').blob()] = k[1]
+            self.assertEqual(expected, received)
+        return d.addCallback(_check)
+
+
+
+class TestAgentKeyRemoval(AgentTestBase):
+    """
+    Test support for removing keys in a remote server.
+    """
+
+    def setUp(self):
+        AgentTestBase.setUp(self)
+        self.server.factory.keys[self.dsaPrivate.blob()] = (
+            self.dsaPrivate, 'a comment')
+        self.server.factory.keys[self.rsaPrivate.blob()] = (
+            self.rsaPrivate, 'another comment')
+
+
+    def test_removeRSAIdentity(self):
+        """
+        Assert that we can remove an RSA identity.
+        """
+        # only need public key for this
+        d = self.client.removeIdentity(self.rsaPrivate.blob())
+        self.pump.flush()
+
+        def _check(ignored):
+            self.assertEqual(1, len(self.server.factory.keys))
+            self.assertIn(self.dsaPrivate.blob(), self.server.factory.keys)
+            self.assertNotIn(self.rsaPrivate.blob(), self.server.factory.keys)
+        return d.addCallback(_check)
+
+
+    def test_removeDSAIdentity(self):
+        """
+        Assert that we can remove a DSA identity.
+        """
+        # only need public key for this
+        d = self.client.removeIdentity(self.dsaPrivate.blob())
+        self.pump.flush()
+
+        def _check(ignored):
+            self.assertEqual(1, len(self.server.factory.keys))
+            self.assertIn(self.rsaPrivate.blob(), self.server.factory.keys)
+        return d.addCallback(_check)
+
+
+    def test_removeAllIdentities(self):
+        """
+        Assert that we can remove all identities.
+        """
+        d = self.client.removeAllIdentities()
+        self.pump.flush()
+
+        def _check(ignored):
+            self.assertEqual(0, len(self.server.factory.keys))
+        return d.addCallback(_check)
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_cftp.py b/ThirdParty/Twisted/twisted/conch/test/test_cftp.py
new file mode 100644
index 0000000..03e327a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_cftp.py
@@ -0,0 +1,975 @@
+# -*- test-case-name: twisted.conch.test.test_cftp -*-
+# Copyright (c) 2001-2009 Twisted Matrix Laboratories.
+# See LICENSE file for details.
+
+"""
+Tests for L{twisted.conch.scripts.cftp}.
+"""
+
+import locale
+import time, sys, os, operator, getpass, struct
+from StringIO import StringIO
+
+from twisted.conch.test.test_ssh import Crypto, pyasn1
+
+_reason = None
+if Crypto and pyasn1:
+    try:
+        from twisted.conch import unix
+        from twisted.conch.scripts import cftp
+        from twisted.conch.test.test_filetransfer import FileTransferForTestAvatar
+    except ImportError, e:
+        # Python 2.3 compatibility fix
+        sys.modules.pop("twisted.conch.unix", None)
+        unix = None
+        _reason = str(e)
+        del e
+else:
+    unix = None
+
+
+from twisted.python.fakepwd import UserDatabase
+from twisted.trial.unittest import TestCase
+from twisted.cred import portal
+from twisted.internet import reactor, protocol, interfaces, defer, error
+from twisted.internet.utils import getProcessOutputAndValue
+from twisted.python import log
+from twisted.conch import ls
+from twisted.test.proto_helpers import StringTransport
+from twisted.internet.task import Clock
+
+from twisted.conch.test import test_ssh, test_conch
+from twisted.conch.test.test_filetransfer import SFTPTestBase
+from twisted.conch.test.test_filetransfer import FileTransferTestAvatar
+
+
+
+class ListingTests(TestCase):
+    """
+    Tests for L{lsLine}, the function which generates an entry for a file or
+    directory in an SFTP I{ls} command's output.
+    """
+    if getattr(time, 'tzset', None) is None:
+        skip = "Cannot test timestamp formatting code without time.tzset"
+
+    def setUp(self):
+        """
+        Patch the L{ls} module's time function so the results of L{lsLine} are
+        deterministic.
+        """
+        self.now = 123456789
+        def fakeTime():
+            return self.now
+        self.patch(ls, 'time', fakeTime)
+
+        # Make sure that the timezone ends up the same after these tests as
+        # it was before.
+        if 'TZ' in os.environ:
+            self.addCleanup(operator.setitem, os.environ, 'TZ', os.environ['TZ'])
+            self.addCleanup(time.tzset)
+        else:
+            def cleanup():
+                # os.environ.pop is broken!  Don't use it!  Ever!  Or die!
+                try:
+                    del os.environ['TZ']
+                except KeyError:
+                    pass
+                time.tzset()
+            self.addCleanup(cleanup)
+
+
+    def _lsInTimezone(self, timezone, stat):
+        """
+        Call L{ls.lsLine} after setting the timezone to C{timezone} and return
+        the result.
+        """
+        # Set the timezone to a well-known value so the timestamps are
+        # predictable.
+        os.environ['TZ'] = timezone
+        time.tzset()
+        return ls.lsLine('foo', stat)
+
+
+    def test_oldFile(self):
+        """
+        A file with an mtime six months (approximately) or more in the past has
+        a listing including a low-resolution timestamp.
+        """
+        # Go with 7 months.  That's more than 6 months.
+        then = self.now - (60 * 60 * 24 * 31 * 7)
+        stat = os.stat_result((0, 0, 0, 0, 0, 0, 0, 0, then, 0))
+
+        self.assertEqual(
+            self._lsInTimezone('America/New_York', stat),
+            '!---------    0 0        0               0 Apr 26  1973 foo')
+        self.assertEqual(
+            self._lsInTimezone('Pacific/Auckland', stat),
+            '!---------    0 0        0               0 Apr 27  1973 foo')
+
+
+    def test_oldSingleDigitDayOfMonth(self):
+        """
+        A file with a high-resolution timestamp which falls on a day of the
+        month which can be represented by one decimal digit is formatted with
+        one padding 0 to preserve the columns which come after it.
+        """
+        # A point about 7 months in the past, tweaked to fall on the first of a
+        # month so we test the case we want to test.
+        then = self.now - (60 * 60 * 24 * 31 * 7) + (60 * 60 * 24 * 5)
+        stat = os.stat_result((0, 0, 0, 0, 0, 0, 0, 0, then, 0))
+
+        self.assertEqual(
+            self._lsInTimezone('America/New_York', stat),
+            '!---------    0 0        0               0 May 01  1973 foo')
+        self.assertEqual(
+            self._lsInTimezone('Pacific/Auckland', stat),
+            '!---------    0 0        0               0 May 02  1973 foo')
+
+
+    def test_newFile(self):
+        """
+        A file with an mtime fewer than six months (approximately) in the past
+        has a listing including a high-resolution timestamp excluding the year.
+        """
+        # A point about three months in the past.
+        then = self.now - (60 * 60 * 24 * 31 * 3)
+        stat = os.stat_result((0, 0, 0, 0, 0, 0, 0, 0, then, 0))
+
+        self.assertEqual(
+            self._lsInTimezone('America/New_York', stat),
+            '!---------    0 0        0               0 Aug 28 17:33 foo')
+        self.assertEqual(
+            self._lsInTimezone('Pacific/Auckland', stat),
+            '!---------    0 0        0               0 Aug 29 09:33 foo')
+
+
+    def test_localeIndependent(self):
+        """
+        The month name in the date is locale independent.
+        """
+        # A point about three months in the past.
+        then = self.now - (60 * 60 * 24 * 31 * 3)
+        stat = os.stat_result((0, 0, 0, 0, 0, 0, 0, 0, then, 0))
+
+        # Fake that we're in a language where August is not Aug (e.g.: Spanish)
+        currentLocale = locale.getlocale()
+        locale.setlocale(locale.LC_ALL, "es_AR.UTF8")
+        self.addCleanup(locale.setlocale, locale.LC_ALL, currentLocale)
+
+        self.assertEqual(
+            self._lsInTimezone('America/New_York', stat),
+            '!---------    0 0        0               0 Aug 28 17:33 foo')
+        self.assertEqual(
+            self._lsInTimezone('Pacific/Auckland', stat),
+            '!---------    0 0        0               0 Aug 29 09:33 foo')
+
+    # if alternate locale is not available, the previous test will be
+    # skipped, please install this locale for it to run
+    currentLocale = locale.getlocale()
+    try:
+        try:
+            locale.setlocale(locale.LC_ALL, "es_AR.UTF8")
+        except locale.Error:
+            test_localeIndependent.skip = "The es_AR.UTF8 locale is not installed."
+    finally:
+        locale.setlocale(locale.LC_ALL, currentLocale)
+
+
+    def test_newSingleDigitDayOfMonth(self):
+        """
+        A file with a high-resolution timestamp which falls on a day of the
+        month which can be represented by one decimal digit is formatted with
+        one padding 0 to preserve the columns which come after it.
+        """
+        # A point about three months in the past, tweaked to fall on the first
+        # of a month so we test the case we want to test.
+        then = self.now - (60 * 60 * 24 * 31 * 3) + (60 * 60 * 24 * 4)
+        stat = os.stat_result((0, 0, 0, 0, 0, 0, 0, 0, then, 0))
+
+        self.assertEqual(
+            self._lsInTimezone('America/New_York', stat),
+            '!---------    0 0        0               0 Sep 01 17:33 foo')
+        self.assertEqual(
+            self._lsInTimezone('Pacific/Auckland', stat),
+            '!---------    0 0        0               0 Sep 02 09:33 foo')
+
+
+
+class StdioClientTests(TestCase):
+    """
+    Tests for L{cftp.StdioClient}.
+    """
+    def setUp(self):
+        """
+        Create a L{cftp.StdioClient} hooked up to dummy transport and a fake
+        user database.
+        """
+        class Connection:
+            pass
+
+        conn = Connection()
+        conn.transport = StringTransport()
+        conn.transport.localClosed = False
+
+        self.client = cftp.StdioClient(conn)
+        self.database = self.client._pwd = UserDatabase()
+
+        # Intentionally bypassing makeConnection - that triggers some code
+        # which uses features not provided by our dumb Connection fake.
+        self.client.transport = StringTransport()
+
+
+    def test_exec(self):
+        """
+        The I{exec} command runs its arguments locally in a child process
+        using the user's shell.
+        """
+        self.database.addUser(
+            getpass.getuser(), 'secret', os.getuid(), 1234, 'foo', 'bar',
+            sys.executable)
+
+        d = self.client._dispatchCommand("exec print 1 + 2")
+        d.addCallback(self.assertEqual, "3\n")
+        return d
+
+
+    def test_execWithoutShell(self):
+        """
+        If the local user has no shell, the I{exec} command runs its arguments
+        using I{/bin/sh}.
+        """
+        self.database.addUser(
+            getpass.getuser(), 'secret', os.getuid(), 1234, 'foo', 'bar', '')
+
+        d = self.client._dispatchCommand("exec echo hello")
+        d.addCallback(self.assertEqual, "hello\n")
+        return d
+
+
+    def test_bang(self):
+        """
+        The I{exec} command is run for lines which start with C{"!"}.
+        """
+        self.database.addUser(
+            getpass.getuser(), 'secret', os.getuid(), 1234, 'foo', 'bar',
+            '/bin/sh')
+
+        d = self.client._dispatchCommand("!echo hello")
+        d.addCallback(self.assertEqual, "hello\n")
+        return d
+
+
+    def setKnownConsoleSize(self, width, height):
+        """
+        For the duration of this test, patch C{cftp}'s C{fcntl} module to return
+        a fixed width and height.
+
+        @param width: the width in characters
+        @type width: C{int}
+        @param height: the height in characters
+        @type height: C{int}
+        """
+        import tty # local import to avoid win32 issues
+        class FakeFcntl(object):
+            def ioctl(self, fd, opt, mutate):
+                if opt != tty.TIOCGWINSZ:
+                    self.fail("Only window-size queries supported.")
+                return struct.pack("4H", height, width, 0, 0)
+        self.patch(cftp, "fcntl", FakeFcntl())
+
+
+    def test_progressReporting(self):
+        """
+        L{StdioClient._printProgressBar} prints a progress description,
+        including percent done, amount transferred, transfer rate, and time
+        remaining, all based the given start time, the given L{FileWrapper}'s
+        progress information and the reactor's current time.
+        """
+        # Use a short, known console width because this simple test doesn't need
+        # to test the console padding.
+        self.setKnownConsoleSize(10, 34)
+        clock = self.client.reactor = Clock()
+        wrapped = StringIO("x")
+        wrapped.name = "sample"
+        wrapper = cftp.FileWrapper(wrapped)
+        wrapper.size = 1024 * 10
+        startTime = clock.seconds()
+        clock.advance(2.0)
+        wrapper.total += 4096
+        self.client._printProgressBar(wrapper, startTime)
+        self.assertEqual(self.client.transport.value(),
+                          "\rsample 40% 4.0kB 2.0kBps 00:03 ")
+
+
+    def test_reportNoProgress(self):
+        """
+        L{StdioClient._printProgressBar} prints a progress description that
+        indicates 0 bytes transferred if no bytes have been transferred and no
+        time has passed.
+        """
+        self.setKnownConsoleSize(10, 34)
+        clock = self.client.reactor = Clock()
+        wrapped = StringIO("x")
+        wrapped.name = "sample"
+        wrapper = cftp.FileWrapper(wrapped)
+        startTime = clock.seconds()
+        self.client._printProgressBar(wrapper, startTime)
+        self.assertEqual(self.client.transport.value(),
+                          "\rsample  0% 0.0B 0.0Bps 00:00 ")
+
+
+
+class FileTransferTestRealm:
+    def __init__(self, testDir):
+        self.testDir = testDir
+
+    def requestAvatar(self, avatarID, mind, *interfaces):
+        a = FileTransferTestAvatar(self.testDir)
+        return interfaces[0], a, lambda: None
+
+
+class SFTPTestProcess(protocol.ProcessProtocol):
+    """
+    Protocol for testing cftp. Provides an interface between Python (where all
+    the tests are) and the cftp client process (which does the work that is
+    being tested).
+    """
+
+    def __init__(self, onOutReceived):
+        """
+        @param onOutReceived: A L{Deferred} to be fired as soon as data is
+        received from stdout.
+        """
+        self.clearBuffer()
+        self.onOutReceived = onOutReceived
+        self.onProcessEnd = None
+        self._expectingCommand = None
+        self._processEnded = False
+
+    def clearBuffer(self):
+        """
+        Clear any buffered data received from stdout. Should be private.
+        """
+        self.buffer = ''
+        self._linesReceived = []
+        self._lineBuffer = ''
+
+    def outReceived(self, data):
+        """
+        Called by Twisted when the cftp client prints data to stdout.
+        """
+        log.msg('got %s' % data)
+        lines = (self._lineBuffer + data).split('\n')
+        self._lineBuffer = lines.pop(-1)
+        self._linesReceived.extend(lines)
+        # XXX - not strictly correct.
+        # We really want onOutReceived to fire after the first 'cftp>' prompt
+        # has been received. (See use in TestOurServerCmdLineClient.setUp)
+        if self.onOutReceived is not None:
+            d, self.onOutReceived = self.onOutReceived, None
+            d.callback(data)
+        self.buffer += data
+        self._checkForCommand()
+
+    def _checkForCommand(self):
+        prompt = 'cftp> '
+        if self._expectingCommand and self._lineBuffer == prompt:
+            buf = '\n'.join(self._linesReceived)
+            if buf.startswith(prompt):
+                buf = buf[len(prompt):]
+            self.clearBuffer()
+            d, self._expectingCommand = self._expectingCommand, None
+            d.callback(buf)
+
+    def errReceived(self, data):
+        """
+        Called by Twisted when the cftp client prints data to stderr.
+        """
+        log.msg('err: %s' % data)
+
+    def getBuffer(self):
+        """
+        Return the contents of the buffer of data received from stdout.
+        """
+        return self.buffer
+
+    def runCommand(self, command):
+        """
+        Issue the given command via the cftp client. Return a C{Deferred} that
+        fires when the server returns a result. Note that the C{Deferred} will
+        callback even if the server returns some kind of error.
+
+        @param command: A string containing an sftp command.
+
+        @return: A C{Deferred} that fires when the sftp server returns a
+        result. The payload is the server's response string.
+        """
+        self._expectingCommand = defer.Deferred()
+        self.clearBuffer()
+        self.transport.write(command + '\n')
+        return self._expectingCommand
+
+    def runScript(self, commands):
+        """
+        Run each command in sequence and return a Deferred that fires when all
+        commands are completed.
+
+        @param commands: A list of strings containing sftp commands.
+
+        @return: A C{Deferred} that fires when all commands are completed. The
+        payload is a list of response strings from the server, in the same
+        order as the commands.
+        """
+        sem = defer.DeferredSemaphore(1)
+        dl = [sem.run(self.runCommand, command) for command in commands]
+        return defer.gatherResults(dl)
+
+    def killProcess(self):
+        """
+        Kill the process if it is still running.
+
+        If the process is still running, sends a KILL signal to the transport
+        and returns a C{Deferred} which fires when L{processEnded} is called.
+
+        @return: a C{Deferred}.
+        """
+        if self._processEnded:
+            return defer.succeed(None)
+        self.onProcessEnd = defer.Deferred()
+        self.transport.signalProcess('KILL')
+        return self.onProcessEnd
+
+    def processEnded(self, reason):
+        """
+        Called by Twisted when the cftp client process ends.
+        """
+        self._processEnded = True
+        if self.onProcessEnd:
+            d, self.onProcessEnd = self.onProcessEnd, None
+            d.callback(None)
+
+
+class CFTPClientTestBase(SFTPTestBase):
+    def setUp(self):
+        f = open('dsa_test.pub','w')
+        f.write(test_ssh.publicDSA_openssh)
+        f.close()
+        f = open('dsa_test','w')
+        f.write(test_ssh.privateDSA_openssh)
+        f.close()
+        os.chmod('dsa_test', 33152)
+        f = open('kh_test','w')
+        f.write('127.0.0.1 ' + test_ssh.publicRSA_openssh)
+        f.close()
+        return SFTPTestBase.setUp(self)
+
+    def startServer(self):
+        realm = FileTransferTestRealm(self.testDir)
+        p = portal.Portal(realm)
+        p.registerChecker(test_ssh.ConchTestPublicKeyChecker())
+        fac = test_ssh.ConchTestServerFactory()
+        fac.portal = p
+        self.server = reactor.listenTCP(0, fac, interface="127.0.0.1")
+
+    def stopServer(self):
+        if not hasattr(self.server.factory, 'proto'):
+            return self._cbStopServer(None)
+        self.server.factory.proto.expectedLoseConnection = 1
+        d = defer.maybeDeferred(
+            self.server.factory.proto.transport.loseConnection)
+        d.addCallback(self._cbStopServer)
+        return d
+
+    def _cbStopServer(self, ignored):
+        return defer.maybeDeferred(self.server.stopListening)
+
+    def tearDown(self):
+        for f in ['dsa_test.pub', 'dsa_test', 'kh_test']:
+            try:
+                os.remove(f)
+            except:
+                pass
+        return SFTPTestBase.tearDown(self)
+
+
+
+class TestOurServerCmdLineClient(CFTPClientTestBase):
+
+    def setUp(self):
+        CFTPClientTestBase.setUp(self)
+
+        self.startServer()
+        cmds = ('-p %i -l testuser '
+               '--known-hosts kh_test '
+               '--user-authentications publickey '
+               '--host-key-algorithms ssh-rsa '
+               '-i dsa_test '
+               '-a '
+               '-v '
+               '127.0.0.1')
+        port = self.server.getHost().port
+        cmds = test_conch._makeArgs((cmds % port).split(), mod='cftp')
+        log.msg('running %s %s' % (sys.executable, cmds))
+        d = defer.Deferred()
+        self.processProtocol = SFTPTestProcess(d)
+        d.addCallback(lambda _: self.processProtocol.clearBuffer())
+        env = os.environ.copy()
+        env['PYTHONPATH'] = os.pathsep.join(sys.path)
+        reactor.spawnProcess(self.processProtocol, sys.executable, cmds,
+                             env=env)
+        return d
+
+    def tearDown(self):
+        d = self.stopServer()
+        d.addCallback(lambda _: self.processProtocol.killProcess())
+        return d
+
+    def _killProcess(self, ignored):
+        try:
+            self.processProtocol.transport.signalProcess('KILL')
+        except error.ProcessExitedAlready:
+            pass
+
+    def runCommand(self, command):
+        """
+        Run the given command with the cftp client. Return a C{Deferred} that
+        fires when the command is complete. Payload is the server's output for
+        that command.
+        """
+        return self.processProtocol.runCommand(command)
+
+    def runScript(self, *commands):
+        """
+        Run the given commands with the cftp client. Returns a C{Deferred}
+        that fires when the commands are all complete. The C{Deferred}'s
+        payload is a list of output for each command.
+        """
+        return self.processProtocol.runScript(commands)
+
+    def testCdPwd(self):
+        """
+        Test that 'pwd' reports the current remote directory, that 'lpwd'
+        reports the current local directory, and that changing to a
+        subdirectory then changing to its parent leaves you in the original
+        remote directory.
+        """
+        # XXX - not actually a unit test, see docstring.
+        homeDir = os.path.join(os.getcwd(), self.testDir)
+        d = self.runScript('pwd', 'lpwd', 'cd testDirectory', 'cd ..', 'pwd')
+        d.addCallback(lambda xs: xs[:3] + xs[4:])
+        d.addCallback(self.assertEqual,
+                      [homeDir, os.getcwd(), '', homeDir])
+        return d
+
+    def testChAttrs(self):
+        """
+        Check that 'ls -l' output includes the access permissions and that
+        this output changes appropriately with 'chmod'.
+        """
+        def _check(results):
+            self.flushLoggedErrors()
+            self.assertTrue(results[0].startswith('-rw-r--r--'))
+            self.assertEqual(results[1], '')
+            self.assertTrue(results[2].startswith('----------'), results[2])
+            self.assertEqual(results[3], '')
+
+        d = self.runScript('ls -l testfile1', 'chmod 0 testfile1',
+                           'ls -l testfile1', 'chmod 644 testfile1')
+        return d.addCallback(_check)
+        # XXX test chgrp/own
+
+
+    def testList(self):
+        """
+        Check 'ls' works as expected. Checks for wildcards, hidden files,
+        listing directories and listing empty directories.
+        """
+        def _check(results):
+            self.assertEqual(results[0], ['testDirectory', 'testRemoveFile',
+                                          'testRenameFile', 'testfile1'])
+            self.assertEqual(results[1], ['testDirectory', 'testRemoveFile',
+                                          'testRenameFile', 'testfile1'])
+            self.assertEqual(results[2], ['testRemoveFile', 'testRenameFile'])
+            self.assertEqual(results[3], ['.testHiddenFile', 'testRemoveFile',
+                                          'testRenameFile'])
+            self.assertEqual(results[4], [''])
+        d = self.runScript('ls', 'ls ../' + os.path.basename(self.testDir),
+                           'ls *File', 'ls -a *File', 'ls -l testDirectory')
+        d.addCallback(lambda xs: [x.split('\n') for x in xs])
+        return d.addCallback(_check)
+
+
+    def testHelp(self):
+        """
+        Check that running the '?' command returns help.
+        """
+        d = self.runCommand('?')
+        d.addCallback(self.assertEqual,
+                      cftp.StdioClient(None).cmd_HELP('').strip())
+        return d
+
+    def assertFilesEqual(self, name1, name2, msg=None):
+        """
+        Assert that the files at C{name1} and C{name2} contain exactly the
+        same data.
+        """
+        f1 = file(name1).read()
+        f2 = file(name2).read()
+        self.assertEqual(f1, f2, msg)
+
+
+    def testGet(self):
+        """
+        Test that 'get' saves the remote file to the correct local location,
+        that the output of 'get' is correct and that 'rm' actually removes
+        the file.
+        """
+        # XXX - not actually a unit test
+        expectedOutput = ("Transferred %s/%s/testfile1 to %s/test file2"
+                          % (os.getcwd(), self.testDir, self.testDir))
+        def _checkGet(result):
+            self.assertTrue(result.endswith(expectedOutput))
+            self.assertFilesEqual(self.testDir + '/testfile1',
+                                  self.testDir + '/test file2',
+                                  "get failed")
+            return self.runCommand('rm "test file2"')
+
+        d = self.runCommand('get testfile1 "%s/test file2"' % (self.testDir,))
+        d.addCallback(_checkGet)
+        d.addCallback(lambda _: self.failIf(
+            os.path.exists(self.testDir + '/test file2')))
+        return d
+
+
+    def testWildcardGet(self):
+        """
+        Test that 'get' works correctly when given wildcard parameters.
+        """
+        def _check(ignored):
+            self.assertFilesEqual(self.testDir + '/testRemoveFile',
+                                  'testRemoveFile',
+                                  'testRemoveFile get failed')
+            self.assertFilesEqual(self.testDir + '/testRenameFile',
+                                  'testRenameFile',
+                                  'testRenameFile get failed')
+
+        d = self.runCommand('get testR*')
+        return d.addCallback(_check)
+
+
+    def testPut(self):
+        """
+        Check that 'put' uploads files correctly and that they can be
+        successfully removed. Also check the output of the put command.
+        """
+        # XXX - not actually a unit test
+        expectedOutput = ('Transferred %s/testfile1 to %s/%s/test"file2'
+                          % (self.testDir, os.getcwd(), self.testDir))
+        def _checkPut(result):
+            self.assertFilesEqual(self.testDir + '/testfile1',
+                                  self.testDir + '/test"file2')
+            self.failUnless(result.endswith(expectedOutput))
+            return self.runCommand('rm "test\\"file2"')
+
+        d = self.runCommand('put %s/testfile1 "test\\"file2"'
+                            % (self.testDir,))
+        d.addCallback(_checkPut)
+        d.addCallback(lambda _: self.failIf(
+            os.path.exists(self.testDir + '/test"file2')))
+        return d
+
+
+    def test_putOverLongerFile(self):
+        """
+        Check that 'put' uploads files correctly when overwriting a longer
+        file.
+        """
+        # XXX - not actually a unit test
+        f = file(os.path.join(self.testDir, 'shorterFile'), 'w')
+        f.write("a")
+        f.close()
+        f = file(os.path.join(self.testDir, 'longerFile'), 'w')
+        f.write("bb")
+        f.close()
+        def _checkPut(result):
+            self.assertFilesEqual(self.testDir + '/shorterFile',
+                                  self.testDir + '/longerFile')
+
+        d = self.runCommand('put %s/shorterFile longerFile'
+                            % (self.testDir,))
+        d.addCallback(_checkPut)
+        return d
+
+
+    def test_putMultipleOverLongerFile(self):
+        """
+        Check that 'put' uploads files correctly when overwriting a longer
+        file and you use a wildcard to specify the files to upload.
+        """
+        # XXX - not actually a unit test
+        os.mkdir(os.path.join(self.testDir, 'dir'))
+        f = file(os.path.join(self.testDir, 'dir', 'file'), 'w')
+        f.write("a")
+        f.close()
+        f = file(os.path.join(self.testDir, 'file'), 'w')
+        f.write("bb")
+        f.close()
+        def _checkPut(result):
+            self.assertFilesEqual(self.testDir + '/dir/file',
+                                  self.testDir + '/file')
+
+        d = self.runCommand('put %s/dir/*'
+                            % (self.testDir,))
+        d.addCallback(_checkPut)
+        return d
+
+
+    def testWildcardPut(self):
+        """
+        What happens if you issue a 'put' command and include a wildcard (i.e.
+        '*') in parameter? Check that all files matching the wildcard are
+        uploaded to the correct directory.
+        """
+        def check(results):
+            self.assertEqual(results[0], '')
+            self.assertEqual(results[2], '')
+            self.assertFilesEqual(self.testDir + '/testRemoveFile',
+                                  self.testDir + '/../testRemoveFile',
+                                  'testRemoveFile get failed')
+            self.assertFilesEqual(self.testDir + '/testRenameFile',
+                                  self.testDir + '/../testRenameFile',
+                                  'testRenameFile get failed')
+
+        d = self.runScript('cd ..',
+                           'put %s/testR*' % (self.testDir,),
+                           'cd %s' % os.path.basename(self.testDir))
+        d.addCallback(check)
+        return d
+
+
+    def testLink(self):
+        """
+        Test that 'ln' creates a file which appears as a link in the output of
+        'ls'. Check that removing the new file succeeds without output.
+        """
+        def _check(results):
+            self.flushLoggedErrors()
+            self.assertEqual(results[0], '')
+            self.assertTrue(results[1].startswith('l'), 'link failed')
+            return self.runCommand('rm testLink')
+
+        d = self.runScript('ln testLink testfile1', 'ls -l testLink')
+        d.addCallback(_check)
+        d.addCallback(self.assertEqual, '')
+        return d
+
+
+    def testRemoteDirectory(self):
+        """
+        Test that we can create and remove directories with the cftp client.
+        """
+        def _check(results):
+            self.assertEqual(results[0], '')
+            self.assertTrue(results[1].startswith('d'))
+            return self.runCommand('rmdir testMakeDirectory')
+
+        d = self.runScript('mkdir testMakeDirectory',
+                           'ls -l testMakeDirector?')
+        d.addCallback(_check)
+        d.addCallback(self.assertEqual, '')
+        return d
+
+
+    def test_existingRemoteDirectory(self):
+        """
+        Test that a C{mkdir} on an existing directory fails with the
+        appropriate error, and doesn't log an useless error server side.
+        """
+        def _check(results):
+            self.assertEqual(results[0], '')
+            self.assertEqual(results[1],
+                              'remote error 11: mkdir failed')
+
+        d = self.runScript('mkdir testMakeDirectory',
+                           'mkdir testMakeDirectory')
+        d.addCallback(_check)
+        return d
+
+
+    def testLocalDirectory(self):
+        """
+        Test that we can create a directory locally and remove it with the
+        cftp client. This test works because the 'remote' server is running
+        out of a local directory.
+        """
+        d = self.runCommand('lmkdir %s/testLocalDirectory' % (self.testDir,))
+        d.addCallback(self.assertEqual, '')
+        d.addCallback(lambda _: self.runCommand('rmdir testLocalDirectory'))
+        d.addCallback(self.assertEqual, '')
+        return d
+
+
+    def testRename(self):
+        """
+        Test that we can rename a file.
+        """
+        def _check(results):
+            self.assertEqual(results[0], '')
+            self.assertEqual(results[1], 'testfile2')
+            return self.runCommand('rename testfile2 testfile1')
+
+        d = self.runScript('rename testfile1 testfile2', 'ls testfile?')
+        d.addCallback(_check)
+        d.addCallback(self.assertEqual, '')
+        return d
+
+
+
+class TestOurServerBatchFile(CFTPClientTestBase):
+    def setUp(self):
+        CFTPClientTestBase.setUp(self)
+        self.startServer()
+
+    def tearDown(self):
+        CFTPClientTestBase.tearDown(self)
+        return self.stopServer()
+
+    def _getBatchOutput(self, f):
+        fn = self.mktemp()
+        open(fn, 'w').write(f)
+        port = self.server.getHost().port
+        cmds = ('-p %i -l testuser '
+                    '--known-hosts kh_test '
+                    '--user-authentications publickey '
+                    '--host-key-algorithms ssh-rsa '
+                    '-i dsa_test '
+                    '-a '
+                    '-v -b %s 127.0.0.1') % (port, fn)
+        cmds = test_conch._makeArgs(cmds.split(), mod='cftp')[1:]
+        log.msg('running %s %s' % (sys.executable, cmds))
+        env = os.environ.copy()
+        env['PYTHONPATH'] = os.pathsep.join(sys.path)
+
+        self.server.factory.expectedLoseConnection = 1
+
+        d = getProcessOutputAndValue(sys.executable, cmds, env=env)
+
+        def _cleanup(res):
+            os.remove(fn)
+            return res
+
+        d.addCallback(lambda res: res[0])
+        d.addBoth(_cleanup)
+
+        return d
+
+    def testBatchFile(self):
+        """Test whether batch file function of cftp ('cftp -b batchfile').
+        This works by treating the file as a list of commands to be run.
+        """
+        cmds = """pwd
+ls
+exit
+"""
+        def _cbCheckResult(res):
+            res = res.split('\n')
+            log.msg('RES %s' % str(res))
+            self.failUnless(res[1].find(self.testDir) != -1, repr(res))
+            self.assertEqual(res[3:-2], ['testDirectory', 'testRemoveFile',
+                                             'testRenameFile', 'testfile1'])
+
+        d = self._getBatchOutput(cmds)
+        d.addCallback(_cbCheckResult)
+        return d
+
+    def testError(self):
+        """Test that an error in the batch file stops running the batch.
+        """
+        cmds = """chown 0 missingFile
+pwd
+exit
+"""
+        def _cbCheckResult(res):
+            self.failIf(res.find(self.testDir) != -1)
+
+        d = self._getBatchOutput(cmds)
+        d.addCallback(_cbCheckResult)
+        return d
+
+    def testIgnoredError(self):
+        """Test that a minus sign '-' at the front of a line ignores
+        any errors.
+        """
+        cmds = """-chown 0 missingFile
+pwd
+exit
+"""
+        def _cbCheckResult(res):
+            self.failIf(res.find(self.testDir) == -1)
+
+        d = self._getBatchOutput(cmds)
+        d.addCallback(_cbCheckResult)
+        return d
+
+
+
+class TestOurServerSftpClient(CFTPClientTestBase):
+    """
+    Test the sftp server against sftp command line client.
+    """
+
+    def setUp(self):
+        CFTPClientTestBase.setUp(self)
+        return self.startServer()
+
+
+    def tearDown(self):
+        return self.stopServer()
+
+
+    def test_extendedAttributes(self):
+        """
+        Test the return of extended attributes by the server: the sftp client
+        should ignore them, but still be able to parse the response correctly.
+
+        This test is mainly here to check that
+        L{filetransfer.FILEXFER_ATTR_EXTENDED} has the correct value.
+        """
+        fn = self.mktemp()
+        open(fn, 'w').write("ls .\nexit")
+        port = self.server.getHost().port
+
+        oldGetAttr = FileTransferForTestAvatar._getAttrs
+        def _getAttrs(self, s):
+            attrs = oldGetAttr(self, s)
+            attrs["ext_foo"] = "bar"
+            return attrs
+
+        self.patch(FileTransferForTestAvatar, "_getAttrs", _getAttrs)
+
+        self.server.factory.expectedLoseConnection = True
+        cmds = ('-o', 'IdentityFile=dsa_test',
+                '-o', 'UserKnownHostsFile=kh_test',
+                '-o', 'HostKeyAlgorithms=ssh-rsa',
+                '-o', 'Port=%i' % (port,), '-b', fn, 'testuser at 127.0.0.1')
+        d = getProcessOutputAndValue("sftp", cmds)
+        def check(result):
+            self.assertEqual(result[2], 0)
+            for i in ['testDirectory', 'testRemoveFile',
+                      'testRenameFile', 'testfile1']:
+                self.assertIn(i, result[0])
+        return d.addCallback(check)
+
+
+
+if unix is None or Crypto is None or pyasn1 is None or interfaces.IReactorProcess(reactor, None) is None:
+    if _reason is None:
+        _reason = "don't run w/o spawnProcess or PyCrypto or pyasn1"
+    TestOurServerCmdLineClient.skip = _reason
+    TestOurServerBatchFile.skip = _reason
+    TestOurServerSftpClient.skip = _reason
+    StdioClientTests.skip = _reason
+else:
+    from twisted.python.procutils import which
+    if not which('sftp'):
+        TestOurServerSftpClient.skip = "no sftp command-line client available"
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_channel.py b/ThirdParty/Twisted/twisted/conch/test/test_channel.py
new file mode 100644
index 0000000..a46596d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_channel.py
@@ -0,0 +1,279 @@
+# Copyright (C) 2007-2008 Twisted Matrix Laboratories
+# See LICENSE for details
+
+"""
+Test ssh/channel.py.
+"""
+from twisted.conch.ssh import channel
+from twisted.trial import unittest
+
+
+class MockTransport(object):
+    """
+    A mock Transport.  All we use is the getPeer() and getHost() methods.
+    Channels implement the ITransport interface, and their getPeer() and
+    getHost() methods return ('SSH', <transport's getPeer/Host value>) so
+    we need to implement these methods so they have something to draw
+    from.
+    """
+    def getPeer(self):
+        return ('MockPeer',)
+
+    def getHost(self):
+        return ('MockHost',)
+
+
+class MockConnection(object):
+    """
+    A mock for twisted.conch.ssh.connection.SSHConnection.  Record the data
+    that channels send, and when they try to close the connection.
+
+    @ivar data: a C{dict} mapping channel id #s to lists of data sent by that
+        channel.
+    @ivar extData: a C{dict} mapping channel id #s to lists of 2-tuples
+        (extended data type, data) sent by that channel.
+    @ivar closes: a C{dict} mapping channel id #s to True if that channel sent
+        a close message.
+    """
+    transport = MockTransport()
+
+    def __init__(self):
+        self.data = {}
+        self.extData = {}
+        self.closes = {}
+
+    def logPrefix(self):
+        """
+        Return our logging prefix.
+        """
+        return "MockConnection"
+
+    def sendData(self, channel, data):
+        """
+        Record the sent data.
+        """
+        self.data.setdefault(channel, []).append(data)
+
+    def sendExtendedData(self, channel, type, data):
+        """
+        Record the sent extended data.
+        """
+        self.extData.setdefault(channel, []).append((type, data))
+
+    def sendClose(self, channel):
+        """
+        Record that the channel sent a close message.
+        """
+        self.closes[channel] = True
+
+
+class ChannelTestCase(unittest.TestCase):
+
+    def setUp(self):
+        """
+        Initialize the channel.  remoteMaxPacket is 10 so that data is able
+        to be sent (the default of 0 means no data is sent because no packets
+        are made).
+        """
+        self.conn = MockConnection()
+        self.channel = channel.SSHChannel(conn=self.conn,
+                remoteMaxPacket=10)
+        self.channel.name = 'channel'
+
+    def test_init(self):
+        """
+        Test that SSHChannel initializes correctly.  localWindowSize defaults
+        to 131072 (2**17) and localMaxPacket to 32768 (2**15) as reasonable
+        defaults (what OpenSSH uses for those variables).
+
+        The values in the second set of assertions are meaningless; they serve
+        only to verify that the instance variables are assigned in the correct
+        order.
+        """
+        c = channel.SSHChannel(conn=self.conn)
+        self.assertEqual(c.localWindowSize, 131072)
+        self.assertEqual(c.localWindowLeft, 131072)
+        self.assertEqual(c.localMaxPacket, 32768)
+        self.assertEqual(c.remoteWindowLeft, 0)
+        self.assertEqual(c.remoteMaxPacket, 0)
+        self.assertEqual(c.conn, self.conn)
+        self.assertEqual(c.data, None)
+        self.assertEqual(c.avatar, None)
+
+        c2 = channel.SSHChannel(1, 2, 3, 4, 5, 6, 7)
+        self.assertEqual(c2.localWindowSize, 1)
+        self.assertEqual(c2.localWindowLeft, 1)
+        self.assertEqual(c2.localMaxPacket, 2)
+        self.assertEqual(c2.remoteWindowLeft, 3)
+        self.assertEqual(c2.remoteMaxPacket, 4)
+        self.assertEqual(c2.conn, 5)
+        self.assertEqual(c2.data, 6)
+        self.assertEqual(c2.avatar, 7)
+
+    def test_str(self):
+        """
+        Test that str(SSHChannel) works gives the channel name and local and
+        remote windows at a glance..
+        """
+        self.assertEqual(str(self.channel), '<SSHChannel channel (lw 131072 '
+                'rw 0)>')
+
+    def test_logPrefix(self):
+        """
+        Test that SSHChannel.logPrefix gives the name of the channel, the
+        local channel ID and the underlying connection.
+        """
+        self.assertEqual(self.channel.logPrefix(), 'SSHChannel channel '
+                '(unknown) on MockConnection')
+
+    def test_addWindowBytes(self):
+        """
+        Test that addWindowBytes adds bytes to the window and resumes writing
+        if it was paused.
+        """
+        cb = [False]
+        def stubStartWriting():
+            cb[0] = True
+        self.channel.startWriting = stubStartWriting
+        self.channel.write('test')
+        self.channel.writeExtended(1, 'test')
+        self.channel.addWindowBytes(50)
+        self.assertEqual(self.channel.remoteWindowLeft, 50 - 4 - 4)
+        self.assertTrue(self.channel.areWriting)
+        self.assertTrue(cb[0])
+        self.assertEqual(self.channel.buf, '')
+        self.assertEqual(self.conn.data[self.channel], ['test'])
+        self.assertEqual(self.channel.extBuf, [])
+        self.assertEqual(self.conn.extData[self.channel], [(1, 'test')])
+
+        cb[0] = False
+        self.channel.addWindowBytes(20)
+        self.assertFalse(cb[0])
+
+        self.channel.write('a'*80)
+        self.channel.loseConnection()
+        self.channel.addWindowBytes(20)
+        self.assertFalse(cb[0])
+
+    def test_requestReceived(self):
+        """
+        Test that requestReceived handles requests by dispatching them to
+        request_* methods.
+        """
+        self.channel.request_test_method = lambda data: data == ''
+        self.assertTrue(self.channel.requestReceived('test-method', ''))
+        self.assertFalse(self.channel.requestReceived('test-method', 'a'))
+        self.assertFalse(self.channel.requestReceived('bad-method', ''))
+
+    def test_closeReceieved(self):
+        """
+        Test that the default closeReceieved closes the connection.
+        """
+        self.assertFalse(self.channel.closing)
+        self.channel.closeReceived()
+        self.assertTrue(self.channel.closing)
+
+    def test_write(self):
+        """
+        Test that write handles data correctly.  Send data up to the size
+        of the remote window, splitting the data into packets of length
+        remoteMaxPacket.
+        """
+        cb = [False]
+        def stubStopWriting():
+            cb[0] = True
+        # no window to start with
+        self.channel.stopWriting = stubStopWriting
+        self.channel.write('d')
+        self.channel.write('a')
+        self.assertFalse(self.channel.areWriting)
+        self.assertTrue(cb[0])
+        # regular write
+        self.channel.addWindowBytes(20)
+        self.channel.write('ta')
+        data = self.conn.data[self.channel]
+        self.assertEqual(data, ['da', 'ta'])
+        self.assertEqual(self.channel.remoteWindowLeft, 16)
+        # larger than max packet
+        self.channel.write('12345678901')
+        self.assertEqual(data, ['da', 'ta', '1234567890', '1'])
+        self.assertEqual(self.channel.remoteWindowLeft, 5)
+        # running out of window
+        cb[0] = False
+        self.channel.write('123456')
+        self.assertFalse(self.channel.areWriting)
+        self.assertTrue(cb[0])
+        self.assertEqual(data, ['da', 'ta', '1234567890', '1', '12345'])
+        self.assertEqual(self.channel.buf, '6')
+        self.assertEqual(self.channel.remoteWindowLeft, 0)
+
+    def test_writeExtended(self):
+        """
+        Test that writeExtended handles data correctly.  Send extended data
+        up to the size of the window, splitting the extended data into packets
+        of length remoteMaxPacket.
+        """
+        cb = [False]
+        def stubStopWriting():
+            cb[0] = True
+        # no window to start with
+        self.channel.stopWriting = stubStopWriting
+        self.channel.writeExtended(1, 'd')
+        self.channel.writeExtended(1, 'a')
+        self.channel.writeExtended(2, 't')
+        self.assertFalse(self.channel.areWriting)
+        self.assertTrue(cb[0])
+        # regular write
+        self.channel.addWindowBytes(20)
+        self.channel.writeExtended(2, 'a')
+        data = self.conn.extData[self.channel]
+        self.assertEqual(data, [(1, 'da'), (2, 't'), (2, 'a')])
+        self.assertEqual(self.channel.remoteWindowLeft, 16)
+        # larger than max packet
+        self.channel.writeExtended(3, '12345678901')
+        self.assertEqual(data, [(1, 'da'), (2, 't'), (2, 'a'),
+            (3, '1234567890'), (3, '1')])
+        self.assertEqual(self.channel.remoteWindowLeft, 5)
+        # running out of window
+        cb[0] = False
+        self.channel.writeExtended(4, '123456')
+        self.assertFalse(self.channel.areWriting)
+        self.assertTrue(cb[0])
+        self.assertEqual(data, [(1, 'da'), (2, 't'), (2, 'a'),
+            (3, '1234567890'), (3, '1'), (4, '12345')])
+        self.assertEqual(self.channel.extBuf, [[4, '6']])
+        self.assertEqual(self.channel.remoteWindowLeft, 0)
+
+    def test_writeSequence(self):
+        """
+        Test that writeSequence is equivalent to write(''.join(sequece)).
+        """
+        self.channel.addWindowBytes(20)
+        self.channel.writeSequence(map(str, range(10)))
+        self.assertEqual(self.conn.data[self.channel], ['0123456789'])
+
+    def test_loseConnection(self):
+        """
+        Tesyt that loseConnection() doesn't close the channel until all
+        the data is sent.
+        """
+        self.channel.write('data')
+        self.channel.writeExtended(1, 'datadata')
+        self.channel.loseConnection()
+        self.assertEqual(self.conn.closes.get(self.channel), None)
+        self.channel.addWindowBytes(4) # send regular data
+        self.assertEqual(self.conn.closes.get(self.channel), None)
+        self.channel.addWindowBytes(8) # send extended data
+        self.assertTrue(self.conn.closes.get(self.channel))
+
+    def test_getPeer(self):
+        """
+        Test that getPeer() returns ('SSH', <connection transport peer>).
+        """
+        self.assertEqual(self.channel.getPeer(), ('SSH', 'MockPeer'))
+
+    def test_getHost(self):
+        """
+        Test that getHost() returns ('SSH', <connection transport host>).
+        """
+        self.assertEqual(self.channel.getHost(), ('SSH', 'MockHost'))
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_checkers.py b/ThirdParty/Twisted/twisted/conch/test/test_checkers.py
new file mode 100644
index 0000000..9c85050
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_checkers.py
@@ -0,0 +1,609 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.conch.checkers}.
+"""
+
+try:
+    import crypt
+except ImportError:
+    cryptSkip = 'cannot run without crypt module'
+else:
+    cryptSkip = None
+
+import os, base64
+
+from twisted.python import util
+from twisted.python.failure import Failure
+from twisted.trial.unittest import TestCase
+from twisted.python.filepath import FilePath
+from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse
+from twisted.cred.credentials import UsernamePassword, IUsernamePassword, \
+    SSHPrivateKey, ISSHPrivateKey
+from twisted.cred.error import UnhandledCredentials, UnauthorizedLogin
+from twisted.python.fakepwd import UserDatabase, ShadowDatabase
+from twisted.test.test_process import MockOS
+
+try:
+    import Crypto.Cipher.DES3
+    import pyasn1
+except ImportError:
+    dependencySkip = "can't run without Crypto and PyASN1"
+else:
+    dependencySkip = None
+    from twisted.conch.ssh import keys
+    from twisted.conch import checkers
+    from twisted.conch.error import NotEnoughAuthentication, ValidPublicKey
+    from twisted.conch.test import keydata
+
+if getattr(os, 'geteuid', None) is None:
+    euidSkip = "Cannot run without effective UIDs (questionable)"
+else:
+    euidSkip = None
+
+
+class HelperTests(TestCase):
+    """
+    Tests for helper functions L{verifyCryptedPassword}, L{_pwdGetByName} and
+    L{_shadowGetByName}.
+    """
+    skip = cryptSkip or dependencySkip
+
+    def setUp(self):
+        self.mockos = MockOS()
+
+
+    def test_verifyCryptedPassword(self):
+        """
+        L{verifyCryptedPassword} returns C{True} if the plaintext password
+        passed to it matches the encrypted password passed to it.
+        """
+        password = 'secret string'
+        salt = 'salty'
+        crypted = crypt.crypt(password, salt)
+        self.assertTrue(
+            checkers.verifyCryptedPassword(crypted, password),
+            '%r supposed to be valid encrypted password for %r' % (
+                crypted, password))
+
+
+    def test_verifyCryptedPasswordMD5(self):
+        """
+        L{verifyCryptedPassword} returns True if the provided cleartext password
+        matches the provided MD5 password hash.
+        """
+        password = 'password'
+        salt = '$1$salt'
+        crypted = crypt.crypt(password, salt)
+        self.assertTrue(
+            checkers.verifyCryptedPassword(crypted, password),
+            '%r supposed to be valid encrypted password for %s' % (
+                crypted, password))
+
+
+    def test_refuteCryptedPassword(self):
+        """
+        L{verifyCryptedPassword} returns C{False} if the plaintext password
+        passed to it does not match the encrypted password passed to it.
+        """
+        password = 'string secret'
+        wrong = 'secret string'
+        crypted = crypt.crypt(password, password)
+        self.assertFalse(
+            checkers.verifyCryptedPassword(crypted, wrong),
+            '%r not supposed to be valid encrypted password for %s' % (
+                crypted, wrong))
+
+
+    def test_pwdGetByName(self):
+        """
+        L{_pwdGetByName} returns a tuple of items from the UNIX /etc/passwd
+        database if the L{pwd} module is present.
+        """
+        userdb = UserDatabase()
+        userdb.addUser(
+            'alice', 'secrit', 1, 2, 'first last', '/foo', '/bin/sh')
+        self.patch(checkers, 'pwd', userdb)
+        self.assertEquals(
+            checkers._pwdGetByName('alice'), userdb.getpwnam('alice'))
+
+
+    def test_pwdGetByNameWithoutPwd(self):
+        """
+        If the C{pwd} module isn't present, L{_pwdGetByName} returns C{None}.
+        """
+        self.patch(checkers, 'pwd', None)
+        self.assertIdentical(checkers._pwdGetByName('alice'), None)
+
+
+    def test_shadowGetByName(self):
+        """
+        L{_shadowGetByName} returns a tuple of items from the UNIX /etc/shadow
+        database if the L{spwd} is present.
+        """
+        userdb = ShadowDatabase()
+        userdb.addUser('bob', 'passphrase', 1, 2, 3, 4, 5, 6, 7)
+        self.patch(checkers, 'spwd', userdb)
+
+        self.mockos.euid = 2345
+        self.mockos.egid = 1234
+        self.patch(checkers, 'os', self.mockos)
+        self.patch(util, 'os', self.mockos)
+
+        self.assertEquals(
+            checkers._shadowGetByName('bob'), userdb.getspnam('bob'))
+        self.assertEquals(self.mockos.seteuidCalls, [0, 2345])
+        self.assertEquals(self.mockos.setegidCalls, [0, 1234])
+
+
+    def test_shadowGetByNameWithoutSpwd(self):
+        """
+        L{_shadowGetByName} uses the C{shadow} module to return a tuple of items
+        from the UNIX /etc/shadow database if the C{spwd} module is not present
+        and the C{shadow} module is.
+        """
+        userdb = ShadowDatabase()
+        userdb.addUser('bob', 'passphrase', 1, 2, 3, 4, 5, 6, 7)
+        self.patch(checkers, 'spwd', None)
+        self.patch(checkers, 'shadow', userdb)
+        self.patch(checkers, 'os', self.mockos)
+        self.patch(util, 'os', self.mockos)
+
+        self.mockos.euid = 2345
+        self.mockos.egid = 1234
+
+        self.assertEquals(
+            checkers._shadowGetByName('bob'), userdb.getspnam('bob'))
+        self.assertEquals(self.mockos.seteuidCalls, [0, 2345])
+        self.assertEquals(self.mockos.setegidCalls, [0, 1234])
+
+
+    def test_shadowGetByNameWithoutEither(self):
+        """
+        L{_shadowGetByName} returns C{None} if neither C{spwd} nor C{shadow} is
+        present.
+        """
+        self.patch(checkers, 'spwd', None)
+        self.patch(checkers, 'shadow', None)
+        self.patch(checkers, 'os', self.mockos)
+
+        self.assertIdentical(checkers._shadowGetByName('bob'), None)
+        self.assertEquals(self.mockos.seteuidCalls, [])
+        self.assertEquals(self.mockos.setegidCalls, [])
+
+
+
+class SSHPublicKeyDatabaseTestCase(TestCase):
+    """
+    Tests for L{SSHPublicKeyDatabase}.
+    """
+    skip = euidSkip or dependencySkip
+
+    def setUp(self):
+        self.checker = checkers.SSHPublicKeyDatabase()
+        self.key1 = base64.encodestring("foobar")
+        self.key2 = base64.encodestring("eggspam")
+        self.content = "t1 %s foo\nt2 %s egg\n" % (self.key1, self.key2)
+
+        self.mockos = MockOS()
+        self.mockos.path = FilePath(self.mktemp())
+        self.mockos.path.makedirs()
+        self.patch(checkers, 'os', self.mockos)
+        self.patch(util, 'os', self.mockos)
+        self.sshDir = self.mockos.path.child('.ssh')
+        self.sshDir.makedirs()
+
+        userdb = UserDatabase()
+        userdb.addUser(
+            'user', 'password', 1, 2, 'first last',
+            self.mockos.path.path, '/bin/shell')
+        self.checker._userdb = userdb
+
+
+    def _testCheckKey(self, filename):
+        self.sshDir.child(filename).setContent(self.content)
+        user = UsernamePassword("user", "password")
+        user.blob = "foobar"
+        self.assertTrue(self.checker.checkKey(user))
+        user.blob = "eggspam"
+        self.assertTrue(self.checker.checkKey(user))
+        user.blob = "notallowed"
+        self.assertFalse(self.checker.checkKey(user))
+
+
+    def test_checkKey(self):
+        """
+        L{SSHPublicKeyDatabase.checkKey} should retrieve the content of the
+        authorized_keys file and check the keys against that file.
+        """
+        self._testCheckKey("authorized_keys")
+        self.assertEqual(self.mockos.seteuidCalls, [])
+        self.assertEqual(self.mockos.setegidCalls, [])
+
+
+    def test_checkKey2(self):
+        """
+        L{SSHPublicKeyDatabase.checkKey} should retrieve the content of the
+        authorized_keys2 file and check the keys against that file.
+        """
+        self._testCheckKey("authorized_keys2")
+        self.assertEqual(self.mockos.seteuidCalls, [])
+        self.assertEqual(self.mockos.setegidCalls, [])
+
+
+    def test_checkKeyAsRoot(self):
+        """
+        If the key file is readable, L{SSHPublicKeyDatabase.checkKey} should
+        switch its uid/gid to the ones of the authenticated user.
+        """
+        keyFile = self.sshDir.child("authorized_keys")
+        keyFile.setContent(self.content)
+        # Fake permission error by changing the mode
+        keyFile.chmod(0000)
+        self.addCleanup(keyFile.chmod, 0777)
+        # And restore the right mode when seteuid is called
+        savedSeteuid = self.mockos.seteuid
+        def seteuid(euid):
+            keyFile.chmod(0777)
+            return savedSeteuid(euid)
+        self.mockos.euid = 2345
+        self.mockos.egid = 1234
+        self.patch(self.mockos, "seteuid", seteuid)
+        self.patch(checkers, 'os', self.mockos)
+        self.patch(util, 'os', self.mockos)
+        user = UsernamePassword("user", "password")
+        user.blob = "foobar"
+        self.assertTrue(self.checker.checkKey(user))
+        self.assertEqual(self.mockos.seteuidCalls, [0, 1, 0, 2345])
+        self.assertEqual(self.mockos.setegidCalls, [2, 1234])
+
+
+    def test_requestAvatarId(self):
+        """
+        L{SSHPublicKeyDatabase.requestAvatarId} should return the avatar id
+        passed in if its C{_checkKey} method returns True.
+        """
+        def _checkKey(ignored):
+            return True
+        self.patch(self.checker, 'checkKey', _checkKey)
+        credentials = SSHPrivateKey(
+            'test', 'ssh-rsa', keydata.publicRSA_openssh, 'foo',
+            keys.Key.fromString(keydata.privateRSA_openssh).sign('foo'))
+        d = self.checker.requestAvatarId(credentials)
+        def _verify(avatarId):
+            self.assertEqual(avatarId, 'test')
+        return d.addCallback(_verify)
+
+
+    def test_requestAvatarIdWithoutSignature(self):
+        """
+        L{SSHPublicKeyDatabase.requestAvatarId} should raise L{ValidPublicKey}
+        if the credentials represent a valid key without a signature.  This
+        tells the user that the key is valid for login, but does not actually
+        allow that user to do so without a signature.
+        """
+        def _checkKey(ignored):
+            return True
+        self.patch(self.checker, 'checkKey', _checkKey)
+        credentials = SSHPrivateKey(
+            'test', 'ssh-rsa', keydata.publicRSA_openssh, None, None)
+        d = self.checker.requestAvatarId(credentials)
+        return self.assertFailure(d, ValidPublicKey)
+
+
+    def test_requestAvatarIdInvalidKey(self):
+        """
+        If L{SSHPublicKeyDatabase.checkKey} returns False,
+        C{_cbRequestAvatarId} should raise L{UnauthorizedLogin}.
+        """
+        def _checkKey(ignored):
+            return False
+        self.patch(self.checker, 'checkKey', _checkKey)
+        d = self.checker.requestAvatarId(None);
+        return self.assertFailure(d, UnauthorizedLogin)
+
+
+    def test_requestAvatarIdInvalidSignature(self):
+        """
+        Valid keys with invalid signatures should cause
+        L{SSHPublicKeyDatabase.requestAvatarId} to return a {UnauthorizedLogin}
+        failure
+        """
+        def _checkKey(ignored):
+            return True
+        self.patch(self.checker, 'checkKey', _checkKey)
+        credentials = SSHPrivateKey(
+            'test', 'ssh-rsa', keydata.publicRSA_openssh, 'foo',
+            keys.Key.fromString(keydata.privateDSA_openssh).sign('foo'))
+        d = self.checker.requestAvatarId(credentials)
+        return self.assertFailure(d, UnauthorizedLogin)
+
+
+    def test_requestAvatarIdNormalizeException(self):
+        """
+        Exceptions raised while verifying the key should be normalized into an
+        C{UnauthorizedLogin} failure.
+        """
+        def _checkKey(ignored):
+            return True
+        self.patch(self.checker, 'checkKey', _checkKey)
+        credentials = SSHPrivateKey('test', None, 'blob', 'sigData', 'sig')
+        d = self.checker.requestAvatarId(credentials)
+        def _verifyLoggedException(failure):
+            errors = self.flushLoggedErrors(keys.BadKeyError)
+            self.assertEqual(len(errors), 1)
+            return failure
+        d.addErrback(_verifyLoggedException)
+        return self.assertFailure(d, UnauthorizedLogin)
+
+
+
+class SSHProtocolCheckerTestCase(TestCase):
+    """
+    Tests for L{SSHProtocolChecker}.
+    """
+
+    skip = dependencySkip
+
+    def test_registerChecker(self):
+        """
+        L{SSHProcotolChecker.registerChecker} should add the given checker to
+        the list of registered checkers.
+        """
+        checker = checkers.SSHProtocolChecker()
+        self.assertEqual(checker.credentialInterfaces, [])
+        checker.registerChecker(checkers.SSHPublicKeyDatabase(), )
+        self.assertEqual(checker.credentialInterfaces, [ISSHPrivateKey])
+        self.assertIsInstance(checker.checkers[ISSHPrivateKey],
+                              checkers.SSHPublicKeyDatabase)
+
+
+    def test_registerCheckerWithInterface(self):
+        """
+        If a apecific interface is passed into
+        L{SSHProtocolChecker.registerChecker}, that interface should be
+        registered instead of what the checker specifies in
+        credentialIntefaces.
+        """
+        checker = checkers.SSHProtocolChecker()
+        self.assertEqual(checker.credentialInterfaces, [])
+        checker.registerChecker(checkers.SSHPublicKeyDatabase(),
+                                IUsernamePassword)
+        self.assertEqual(checker.credentialInterfaces, [IUsernamePassword])
+        self.assertIsInstance(checker.checkers[IUsernamePassword],
+                              checkers.SSHPublicKeyDatabase)
+
+
+    def test_requestAvatarId(self):
+        """
+        L{SSHProtocolChecker.requestAvatarId} should defer to one if its
+        registered checkers to authenticate a user.
+        """
+        checker = checkers.SSHProtocolChecker()
+        passwordDatabase = InMemoryUsernamePasswordDatabaseDontUse()
+        passwordDatabase.addUser('test', 'test')
+        checker.registerChecker(passwordDatabase)
+        d = checker.requestAvatarId(UsernamePassword('test', 'test'))
+        def _callback(avatarId):
+            self.assertEqual(avatarId, 'test')
+        return d.addCallback(_callback)
+
+
+    def test_requestAvatarIdWithNotEnoughAuthentication(self):
+        """
+        If the client indicates that it is never satisfied, by always returning
+        False from _areDone, then L{SSHProtocolChecker} should raise
+        L{NotEnoughAuthentication}.
+        """
+        checker = checkers.SSHProtocolChecker()
+        def _areDone(avatarId):
+            return False
+        self.patch(checker, 'areDone', _areDone)
+
+        passwordDatabase = InMemoryUsernamePasswordDatabaseDontUse()
+        passwordDatabase.addUser('test', 'test')
+        checker.registerChecker(passwordDatabase)
+        d = checker.requestAvatarId(UsernamePassword('test', 'test'))
+        return self.assertFailure(d, NotEnoughAuthentication)
+
+
+    def test_requestAvatarIdInvalidCredential(self):
+        """
+        If the passed credentials aren't handled by any registered checker,
+        L{SSHProtocolChecker} should raise L{UnhandledCredentials}.
+        """
+        checker = checkers.SSHProtocolChecker()
+        d = checker.requestAvatarId(UsernamePassword('test', 'test'))
+        return self.assertFailure(d, UnhandledCredentials)
+
+
+    def test_areDone(self):
+        """
+        The default L{SSHProcotolChecker.areDone} should simply return True.
+        """
+        self.assertEquals(checkers.SSHProtocolChecker().areDone(None), True)
+
+
+
+class UNIXPasswordDatabaseTests(TestCase):
+    """
+    Tests for L{UNIXPasswordDatabase}.
+    """
+    skip = cryptSkip or dependencySkip
+
+    def assertLoggedIn(self, d, username):
+        """
+        Assert that the L{Deferred} passed in is called back with the value
+        'username'.  This represents a valid login for this TestCase.
+
+        NOTE: To work, this method's return value must be returned from the
+        test method, or otherwise hooked up to the test machinery.
+
+        @param d: a L{Deferred} from an L{IChecker.requestAvatarId} method.
+        @type d: L{Deferred}
+        @rtype: L{Deferred}
+        """
+        result = []
+        d.addBoth(result.append)
+        self.assertEquals(len(result), 1, "login incomplete")
+        if isinstance(result[0], Failure):
+            result[0].raiseException()
+        self.assertEquals(result[0], username)
+
+
+    def test_defaultCheckers(self):
+        """
+        L{UNIXPasswordDatabase} with no arguments has checks the C{pwd} database
+        and then the C{spwd} database.
+        """
+        checker = checkers.UNIXPasswordDatabase()
+
+        def crypted(username, password):
+            salt = crypt.crypt(password, username)
+            crypted = crypt.crypt(password, '$1$' + salt)
+            return crypted
+
+        pwd = UserDatabase()
+        pwd.addUser('alice', crypted('alice', 'password'),
+                    1, 2, 'foo', '/foo', '/bin/sh')
+        # x and * are convention for "look elsewhere for the password"
+        pwd.addUser('bob', 'x', 1, 2, 'bar', '/bar', '/bin/sh')
+        spwd = ShadowDatabase()
+        spwd.addUser('alice', 'wrong', 1, 2, 3, 4, 5, 6, 7)
+        spwd.addUser('bob', crypted('bob', 'password'),
+                     8, 9, 10, 11, 12, 13, 14)
+
+        self.patch(checkers, 'pwd', pwd)
+        self.patch(checkers, 'spwd', spwd)
+
+        mockos = MockOS()
+        self.patch(checkers, 'os', mockos)
+        self.patch(util, 'os', mockos)
+
+        mockos.euid = 2345
+        mockos.egid = 1234
+
+        cred = UsernamePassword("alice", "password")
+        self.assertLoggedIn(checker.requestAvatarId(cred), 'alice')
+        self.assertEquals(mockos.seteuidCalls, [])
+        self.assertEquals(mockos.setegidCalls, [])
+        cred.username = "bob"
+        self.assertLoggedIn(checker.requestAvatarId(cred), 'bob')
+        self.assertEquals(mockos.seteuidCalls, [0, 2345])
+        self.assertEquals(mockos.setegidCalls, [0, 1234])
+
+
+    def assertUnauthorizedLogin(self, d):
+        """
+        Asserts that the L{Deferred} passed in is erred back with an
+        L{UnauthorizedLogin} L{Failure}.  This reprsents an invalid login for
+        this TestCase.
+
+        NOTE: To work, this method's return value must be returned from the
+        test method, or otherwise hooked up to the test machinery.
+
+        @param d: a L{Deferred} from an L{IChecker.requestAvatarId} method.
+        @type d: L{Deferred}
+        @rtype: L{None}
+        """
+        self.assertRaises(
+            checkers.UnauthorizedLogin, self.assertLoggedIn, d, 'bogus value')
+
+
+    def test_passInCheckers(self):
+        """
+        L{UNIXPasswordDatabase} takes a list of functions to check for UNIX
+        user information.
+        """
+        password = crypt.crypt('secret', 'secret')
+        userdb = UserDatabase()
+        userdb.addUser('anybody', password, 1, 2, 'foo', '/bar', '/bin/sh')
+        checker = checkers.UNIXPasswordDatabase([userdb.getpwnam])
+        self.assertLoggedIn(
+            checker.requestAvatarId(UsernamePassword('anybody', 'secret')),
+            'anybody')
+
+
+    def test_verifyPassword(self):
+        """
+        If the encrypted password provided by the getpwnam function is valid
+        (verified by the L{verifyCryptedPassword} function), we callback the
+        C{requestAvatarId} L{Deferred} with the username.
+        """
+        def verifyCryptedPassword(crypted, pw):
+            return crypted == pw
+        def getpwnam(username):
+            return [username, username]
+        self.patch(checkers, 'verifyCryptedPassword', verifyCryptedPassword)
+        checker = checkers.UNIXPasswordDatabase([getpwnam])
+        credential = UsernamePassword('username', 'username')
+        self.assertLoggedIn(checker.requestAvatarId(credential), 'username')
+
+
+    def test_failOnKeyError(self):
+        """
+        If the getpwnam function raises a KeyError, the login fails with an
+        L{UnauthorizedLogin} exception.
+        """
+        def getpwnam(username):
+            raise KeyError(username)
+        checker = checkers.UNIXPasswordDatabase([getpwnam])
+        credential = UsernamePassword('username', 'username')
+        self.assertUnauthorizedLogin(checker.requestAvatarId(credential))
+
+
+    def test_failOnBadPassword(self):
+        """
+        If the verifyCryptedPassword function doesn't verify the password, the
+        login fails with an L{UnauthorizedLogin} exception.
+        """
+        def verifyCryptedPassword(crypted, pw):
+            return False
+        def getpwnam(username):
+            return [username, username]
+        self.patch(checkers, 'verifyCryptedPassword', verifyCryptedPassword)
+        checker = checkers.UNIXPasswordDatabase([getpwnam])
+        credential = UsernamePassword('username', 'username')
+        self.assertUnauthorizedLogin(checker.requestAvatarId(credential))
+
+
+    def test_loopThroughFunctions(self):
+        """
+        UNIXPasswordDatabase.requestAvatarId loops through each getpwnam
+        function associated with it and returns a L{Deferred} which fires with
+        the result of the first one which returns a value other than None.
+        ones do not verify the password.
+        """
+        def verifyCryptedPassword(crypted, pw):
+            return crypted == pw
+        def getpwnam1(username):
+            return [username, 'not the password']
+        def getpwnam2(username):
+            return [username, username]
+        self.patch(checkers, 'verifyCryptedPassword', verifyCryptedPassword)
+        checker = checkers.UNIXPasswordDatabase([getpwnam1, getpwnam2])
+        credential = UsernamePassword('username', 'username')
+        self.assertLoggedIn(checker.requestAvatarId(credential), 'username')
+
+
+    def test_failOnSpecial(self):
+        """
+        If the password returned by any function is C{""}, C{"x"}, or C{"*"} it
+        is not compared against the supplied password.  Instead it is skipped.
+        """
+        pwd = UserDatabase()
+        pwd.addUser('alice', '', 1, 2, '', 'foo', 'bar')
+        pwd.addUser('bob', 'x', 1, 2, '', 'foo', 'bar')
+        pwd.addUser('carol', '*', 1, 2, '', 'foo', 'bar')
+        self.patch(checkers, 'pwd', pwd)
+
+        checker = checkers.UNIXPasswordDatabase([checkers._pwdGetByName])
+        cred = UsernamePassword('alice', '')
+        self.assertUnauthorizedLogin(checker.requestAvatarId(cred))
+
+        cred = UsernamePassword('bob', 'x')
+        self.assertUnauthorizedLogin(checker.requestAvatarId(cred))
+
+        cred = UsernamePassword('carol', '*')
+        self.assertUnauthorizedLogin(checker.requestAvatarId(cred))
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_ckeygen.py b/ThirdParty/Twisted/twisted/conch/test/test_ckeygen.py
new file mode 100644
index 0000000..f3f09a1
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_ckeygen.py
@@ -0,0 +1,137 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.conch.scripts.ckeygen}.
+"""
+
+import getpass
+import sys
+from StringIO import StringIO
+
+try:
+    import Crypto
+    import pyasn1
+except ImportError:
+    skip = "PyCrypto and pyasn1 required for twisted.conch.scripts.ckeygen."
+else:
+    from twisted.conch.ssh.keys import Key, BadKeyError
+    from twisted.conch.scripts.ckeygen import (
+        displayPublicKey, printFingerprint, _saveKey)
+
+from twisted.python.filepath import FilePath
+from twisted.trial.unittest import TestCase
+from twisted.conch.test.keydata import (
+    publicRSA_openssh, privateRSA_openssh, privateRSA_openssh_encrypted)
+
+
+
+class KeyGenTests(TestCase):
+    """
+    Tests for various functions used to implement the I{ckeygen} script.
+    """
+    def setUp(self):
+        """
+        Patch C{sys.stdout} with a L{StringIO} instance to tests can make
+        assertions about what's printed.
+        """
+        self.stdout = StringIO()
+        self.patch(sys, 'stdout', self.stdout)
+
+
+    def test_printFingerprint(self):
+        """
+        L{printFingerprint} writes a line to standard out giving the number of
+        bits of the key, its fingerprint, and the basename of the file from it
+        was read.
+        """
+        filename = self.mktemp()
+        FilePath(filename).setContent(publicRSA_openssh)
+        printFingerprint({'filename': filename})
+        self.assertEqual(
+            self.stdout.getvalue(),
+            '768 3d:13:5f:cb:c9:79:8a:93:06:27:65:bc:3d:0b:8f:af temp\n')
+
+
+    def test_saveKey(self):
+        """
+        L{_saveKey} writes the private and public parts of a key to two
+        different files and writes a report of this to standard out.
+        """
+        base = FilePath(self.mktemp())
+        base.makedirs()
+        filename = base.child('id_rsa').path
+        key = Key.fromString(privateRSA_openssh)
+        _saveKey(
+            key.keyObject,
+            {'filename': filename, 'pass': 'passphrase'})
+        self.assertEqual(
+            self.stdout.getvalue(),
+            "Your identification has been saved in %s\n"
+            "Your public key has been saved in %s.pub\n"
+            "The key fingerprint is:\n"
+            "3d:13:5f:cb:c9:79:8a:93:06:27:65:bc:3d:0b:8f:af\n" % (
+                filename,
+                filename))
+        self.assertEqual(
+            key.fromString(
+                base.child('id_rsa').getContent(), None, 'passphrase'),
+            key)
+        self.assertEqual(
+            Key.fromString(base.child('id_rsa.pub').getContent()),
+            key.public())
+
+
+    def test_displayPublicKey(self):
+        """
+        L{displayPublicKey} prints out the public key associated with a given
+        private key.
+        """
+        filename = self.mktemp()
+        pubKey = Key.fromString(publicRSA_openssh)
+        FilePath(filename).setContent(privateRSA_openssh)
+        displayPublicKey({'filename': filename})
+        self.assertEqual(
+            self.stdout.getvalue().strip('\n'),
+            pubKey.toString('openssh'))
+
+
+    def test_displayPublicKeyEncrypted(self):
+        """
+        L{displayPublicKey} prints out the public key associated with a given
+        private key using the given passphrase when it's encrypted.
+        """
+        filename = self.mktemp()
+        pubKey = Key.fromString(publicRSA_openssh)
+        FilePath(filename).setContent(privateRSA_openssh_encrypted)
+        displayPublicKey({'filename': filename, 'pass': 'encrypted'})
+        self.assertEqual(
+            self.stdout.getvalue().strip('\n'),
+            pubKey.toString('openssh'))
+
+
+    def test_displayPublicKeyEncryptedPassphrasePrompt(self):
+        """
+        L{displayPublicKey} prints out the public key associated with a given
+        private key, asking for the passphrase when it's encrypted.
+        """
+        filename = self.mktemp()
+        pubKey = Key.fromString(publicRSA_openssh)
+        FilePath(filename).setContent(privateRSA_openssh_encrypted)
+        self.patch(getpass, 'getpass', lambda x: 'encrypted')
+        displayPublicKey({'filename': filename})
+        self.assertEqual(
+            self.stdout.getvalue().strip('\n'),
+            pubKey.toString('openssh'))
+
+
+    def test_displayPublicKeyWrongPassphrase(self):
+        """
+        L{displayPublicKey} fails with a L{BadKeyError} when trying to decrypt
+        an encrypted key with the wrong password.
+        """
+        filename = self.mktemp()
+        FilePath(filename).setContent(privateRSA_openssh_encrypted)
+        self.assertRaises(
+            BadKeyError, displayPublicKey,
+            {'filename': filename, 'pass': 'wrong'})
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_conch.py b/ThirdParty/Twisted/twisted/conch/test/test_conch.py
new file mode 100644
index 0000000..95219d4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_conch.py
@@ -0,0 +1,552 @@
+# -*- test-case-name: twisted.conch.test.test_conch -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import os, sys, socket
+from itertools import count
+
+from zope.interface import implements
+
+from twisted.cred import portal
+from twisted.internet import reactor, defer, protocol
+from twisted.internet.error import ProcessExitedAlready
+from twisted.internet.task import LoopingCall
+from twisted.python import log, runtime
+from twisted.trial import unittest
+from twisted.conch.error import ConchError
+from twisted.conch.avatar import ConchUser
+from twisted.conch.ssh.session import ISession, SSHSession, wrapProtocol
+
+try:
+    from twisted.conch.scripts.conch import SSHSession as StdioInteractingSession
+except ImportError, e:
+    StdioInteractingSession = None
+    _reason = str(e)
+    del e
+
+from twisted.conch.test.test_ssh import ConchTestRealm
+from twisted.python.procutils import which
+
+from twisted.conch.test.keydata import publicRSA_openssh, privateRSA_openssh
+from twisted.conch.test.keydata import publicDSA_openssh, privateDSA_openssh
+
+from twisted.conch.test.test_ssh import Crypto, pyasn1
+try:
+    from twisted.conch.test.test_ssh import ConchTestServerFactory, \
+        ConchTestPublicKeyChecker
+except ImportError:
+    pass
+
+
+
+class StdioInteractingSessionTests(unittest.TestCase):
+    """
+    Tests for L{twisted.conch.scripts.conch.SSHSession}.
+    """
+    if StdioInteractingSession is None:
+        skip = _reason
+
+    def test_eofReceived(self):
+        """
+        L{twisted.conch.scripts.conch.SSHSession.eofReceived} loses the
+        write half of its stdio connection.
+        """
+        class FakeStdio:
+            writeConnLost = False
+
+            def loseWriteConnection(self):
+                self.writeConnLost = True
+
+        stdio = FakeStdio()
+        channel = StdioInteractingSession()
+        channel.stdio = stdio
+        channel.eofReceived()
+        self.assertTrue(stdio.writeConnLost)
+
+
+
+class Echo(protocol.Protocol):
+    def connectionMade(self):
+        log.msg('ECHO CONNECTION MADE')
+
+
+    def connectionLost(self, reason):
+        log.msg('ECHO CONNECTION DONE')
+
+
+    def dataReceived(self, data):
+        self.transport.write(data)
+        if '\n' in data:
+            self.transport.loseConnection()
+
+
+
+class EchoFactory(protocol.Factory):
+    protocol = Echo
+
+
+
+class ConchTestOpenSSHProcess(protocol.ProcessProtocol):
+    """
+    Test protocol for launching an OpenSSH client process.
+
+    @ivar deferred: Set by whatever uses this object. Accessed using
+    L{_getDeferred}, which destroys the value so the Deferred is not
+    fired twice. Fires when the process is terminated.
+    """
+
+    deferred = None
+    buf = ''
+
+    def _getDeferred(self):
+        d, self.deferred = self.deferred, None
+        return d
+
+
+    def outReceived(self, data):
+        self.buf += data
+
+
+    def processEnded(self, reason):
+        """
+        Called when the process has ended.
+
+        @param reason: a Failure giving the reason for the process' end.
+        """
+        if reason.value.exitCode != 0:
+            self._getDeferred().errback(
+                ConchError("exit code was not 0: %s" %
+                                 reason.value.exitCode))
+        else:
+            buf = self.buf.replace('\r\n', '\n')
+            self._getDeferred().callback(buf)
+
+
+
+class ConchTestForwardingProcess(protocol.ProcessProtocol):
+    """
+    Manages a third-party process which launches a server.
+
+    Uses L{ConchTestForwardingPort} to connect to the third-party server.
+    Once L{ConchTestForwardingPort} has disconnected, kill the process and fire
+    a Deferred with the data received by the L{ConchTestForwardingPort}.
+
+    @ivar deferred: Set by whatever uses this object. Accessed using
+    L{_getDeferred}, which destroys the value so the Deferred is not
+    fired twice. Fires when the process is terminated.
+    """
+
+    deferred = None
+
+    def __init__(self, port, data):
+        """
+        @type port: C{int}
+        @param port: The port on which the third-party server is listening.
+        (it is assumed that the server is running on localhost).
+
+        @type data: C{str}
+        @param data: This is sent to the third-party server. Must end with '\n'
+        in order to trigger a disconnect.
+        """
+        self.port = port
+        self.buffer = None
+        self.data = data
+
+
+    def _getDeferred(self):
+        d, self.deferred = self.deferred, None
+        return d
+
+
+    def connectionMade(self):
+        self._connect()
+
+
+    def _connect(self):
+        """
+        Connect to the server, which is often a third-party process.
+        Tries to reconnect if it fails because we have no way of determining
+        exactly when the port becomes available for listening -- we can only
+        know when the process starts.
+        """
+        cc = protocol.ClientCreator(reactor, ConchTestForwardingPort, self,
+                                    self.data)
+        d = cc.connectTCP('127.0.0.1', self.port)
+        d.addErrback(self._ebConnect)
+        return d
+
+
+    def _ebConnect(self, f):
+        reactor.callLater(.1, self._connect)
+
+
+    def forwardingPortDisconnected(self, buffer):
+        """
+        The network connection has died; save the buffer of output
+        from the network and attempt to quit the process gracefully,
+        and then (after the reactor has spun) send it a KILL signal.
+        """
+        self.buffer = buffer
+        self.transport.write('\x03')
+        self.transport.loseConnection()
+        reactor.callLater(0, self._reallyDie)
+
+
+    def _reallyDie(self):
+        try:
+            self.transport.signalProcess('KILL')
+        except ProcessExitedAlready:
+            pass
+
+
+    def processEnded(self, reason):
+        """
+        Fire the Deferred at self.deferred with the data collected
+        from the L{ConchTestForwardingPort} connection, if any.
+        """
+        self._getDeferred().callback(self.buffer)
+
+
+
+class ConchTestForwardingPort(protocol.Protocol):
+    """
+    Connects to server launched by a third-party process (managed by
+    L{ConchTestForwardingProcess}) sends data, then reports whatever it
+    received back to the L{ConchTestForwardingProcess} once the connection
+    is ended.
+    """
+
+
+    def __init__(self, protocol, data):
+        """
+        @type protocol: L{ConchTestForwardingProcess}
+        @param protocol: The L{ProcessProtocol} which made this connection.
+
+        @type data: str
+        @param data: The data to be sent to the third-party server.
+        """
+        self.protocol = protocol
+        self.data = data
+
+
+    def connectionMade(self):
+        self.buffer = ''
+        self.transport.write(self.data)
+
+
+    def dataReceived(self, data):
+        self.buffer += data
+
+
+    def connectionLost(self, reason):
+        self.protocol.forwardingPortDisconnected(self.buffer)
+
+
+
+def _makeArgs(args, mod="conch"):
+    start = [sys.executable, '-c'
+"""
+### Twisted Preamble
+import sys, os
+path = os.path.abspath(sys.argv[0])
+while os.path.dirname(path) != path:
+    if os.path.basename(path).startswith('Twisted'):
+        sys.path.insert(0, path)
+        break
+    path = os.path.dirname(path)
+
+from twisted.conch.scripts.%s import run
+run()""" % mod]
+    return start + list(args)
+
+
+
+class ConchServerSetupMixin:
+    if not Crypto:
+        skip = "can't run w/o PyCrypto"
+
+    if not pyasn1:
+        skip = "Cannot run without PyASN1"
+
+    realmFactory = staticmethod(lambda: ConchTestRealm('testuser'))
+
+    def _createFiles(self):
+        for f in ['rsa_test','rsa_test.pub','dsa_test','dsa_test.pub',
+                  'kh_test']:
+            if os.path.exists(f):
+                os.remove(f)
+        open('rsa_test','w').write(privateRSA_openssh)
+        open('rsa_test.pub','w').write(publicRSA_openssh)
+        open('dsa_test.pub','w').write(publicDSA_openssh)
+        open('dsa_test','w').write(privateDSA_openssh)
+        os.chmod('dsa_test', 33152)
+        os.chmod('rsa_test', 33152)
+        open('kh_test','w').write('127.0.0.1 '+publicRSA_openssh)
+
+
+    def _getFreePort(self):
+        s = socket.socket()
+        s.bind(('', 0))
+        port = s.getsockname()[1]
+        s.close()
+        return port
+
+
+    def _makeConchFactory(self):
+        """
+        Make a L{ConchTestServerFactory}, which allows us to start a
+        L{ConchTestServer} -- i.e. an actually listening conch.
+        """
+        realm = self.realmFactory()
+        p = portal.Portal(realm)
+        p.registerChecker(ConchTestPublicKeyChecker())
+        factory = ConchTestServerFactory()
+        factory.portal = p
+        return factory
+
+
+    def setUp(self):
+        self._createFiles()
+        self.conchFactory = self._makeConchFactory()
+        self.conchFactory.expectedLoseConnection = 1
+        self.conchServer = reactor.listenTCP(0, self.conchFactory,
+                                             interface="127.0.0.1")
+        self.echoServer = reactor.listenTCP(0, EchoFactory())
+        self.echoPort = self.echoServer.getHost().port
+
+
+    def tearDown(self):
+        try:
+            self.conchFactory.proto.done = 1
+        except AttributeError:
+            pass
+        else:
+            self.conchFactory.proto.transport.loseConnection()
+        return defer.gatherResults([
+                defer.maybeDeferred(self.conchServer.stopListening),
+                defer.maybeDeferred(self.echoServer.stopListening)])
+
+
+
+class ForwardingMixin(ConchServerSetupMixin):
+    """
+    Template class for tests of the Conch server's ability to forward arbitrary
+    protocols over SSH.
+
+    These tests are integration tests, not unit tests. They launch a Conch
+    server, a custom TCP server (just an L{EchoProtocol}) and then call
+    L{execute}.
+
+    L{execute} is implemented by subclasses of L{ForwardingMixin}. It should
+    cause an SSH client to connect to the Conch server, asking it to forward
+    data to the custom TCP server.
+    """
+
+    def test_exec(self):
+        """
+        Test that we can use whatever client to send the command "echo goodbye"
+        to the Conch server. Make sure we receive "goodbye" back from the
+        server.
+        """
+        d = self.execute('echo goodbye', ConchTestOpenSSHProcess())
+        return d.addCallback(self.assertEqual, 'goodbye\n')
+
+
+    def test_localToRemoteForwarding(self):
+        """
+        Test that we can use whatever client to forward a local port to a
+        specified port on the server.
+        """
+        localPort = self._getFreePort()
+        process = ConchTestForwardingProcess(localPort, 'test\n')
+        d = self.execute('', process,
+                         sshArgs='-N -L%i:127.0.0.1:%i'
+                         % (localPort, self.echoPort))
+        d.addCallback(self.assertEqual, 'test\n')
+        return d
+
+
+    def test_remoteToLocalForwarding(self):
+        """
+        Test that we can use whatever client to forward a port from the server
+        to a port locally.
+        """
+        localPort = self._getFreePort()
+        process = ConchTestForwardingProcess(localPort, 'test\n')
+        d = self.execute('', process,
+                         sshArgs='-N -R %i:127.0.0.1:%i'
+                         % (localPort, self.echoPort))
+        d.addCallback(self.assertEqual, 'test\n')
+        return d
+
+
+
+class RekeyAvatar(ConchUser):
+    """
+    This avatar implements a shell which sends 60 numbered lines to whatever
+    connects to it, then closes the session with a 0 exit status.
+
+    60 lines is selected as being enough to send more than 2kB of traffic, the
+    amount the client is configured to initiate a rekey after.
+    """
+    # Conventionally there is a separate adapter object which provides ISession
+    # for the user, but making the user provide ISession directly works too.
+    # This isn't a full implementation of ISession though, just enough to make
+    # these tests pass.
+    implements(ISession)
+
+    def __init__(self):
+        ConchUser.__init__(self)
+        self.channelLookup['session'] = SSHSession
+
+
+    def openShell(self, transport):
+        """
+        Write 60 lines of data to the transport, then exit.
+        """
+        proto = protocol.Protocol()
+        proto.makeConnection(transport)
+        transport.makeConnection(wrapProtocol(proto))
+
+        # Send enough bytes to the connection so that a rekey is triggered in
+        # the client.
+        def write(counter):
+            i = counter()
+            if i == 60:
+                call.stop()
+                transport.session.conn.sendRequest(
+                    transport.session, 'exit-status', '\x00\x00\x00\x00')
+                transport.loseConnection()
+            else:
+                transport.write("line #%02d\n" % (i,))
+
+        # The timing for this loop is an educated guess (and/or the result of
+        # experimentation) to exercise the case where a packet is generated
+        # mid-rekey.  Since the other side of the connection is (so far) the
+        # OpenSSH command line client, there's no easy way to determine when the
+        # rekey has been initiated.  If there were, then generating a packet
+        # immediately at that time would be a better way to test the
+        # functionality being tested here.
+        call = LoopingCall(write, count().next)
+        call.start(0.01)
+
+
+    def closed(self):
+        """
+        Ignore the close of the session.
+        """
+
+
+
+class RekeyRealm:
+    """
+    This realm gives out new L{RekeyAvatar} instances for any avatar request.
+    """
+    def requestAvatar(self, avatarID, mind, *interfaces):
+        return interfaces[0], RekeyAvatar(), lambda: None
+
+
+
+class RekeyTestsMixin(ConchServerSetupMixin):
+    """
+    TestCase mixin which defines tests exercising L{SSHTransportBase}'s handling
+    of rekeying messages.
+    """
+    realmFactory = RekeyRealm
+
+    def test_clientRekey(self):
+        """
+        After a client-initiated rekey is completed, application data continues
+        to be passed over the SSH connection.
+        """
+        process = ConchTestOpenSSHProcess()
+        d = self.execute("", process, '-o RekeyLimit=2K')
+        def finished(result):
+            self.assertEqual(
+                result,
+                '\n'.join(['line #%02d' % (i,) for i in range(60)]) + '\n')
+        d.addCallback(finished)
+        return d
+
+
+
+class OpenSSHClientMixin:
+    if not which('ssh'):
+        skip = "no ssh command-line client available"
+
+    def execute(self, remoteCommand, process, sshArgs=''):
+        """
+        Connects to the SSH server started in L{ConchServerSetupMixin.setUp} by
+        running the 'ssh' command line tool.
+
+        @type remoteCommand: str
+        @param remoteCommand: The command (with arguments) to run on the
+        remote end.
+
+        @type process: L{ConchTestOpenSSHProcess}
+
+        @type sshArgs: str
+        @param sshArgs: Arguments to pass to the 'ssh' process.
+
+        @return: L{defer.Deferred}
+        """
+        process.deferred = defer.Deferred()
+        cmdline = ('ssh -2 -l testuser -p %i '
+                   '-oUserKnownHostsFile=kh_test '
+                   '-oPasswordAuthentication=no '
+                   # Always use the RSA key, since that's the one in kh_test.
+                   '-oHostKeyAlgorithms=ssh-rsa '
+                   '-a '
+                   '-i dsa_test ') + sshArgs + \
+                   ' 127.0.0.1 ' + remoteCommand
+        port = self.conchServer.getHost().port
+        cmds = (cmdline % port).split()
+        reactor.spawnProcess(process, "ssh", cmds)
+        return process.deferred
+
+
+
+class OpenSSHClientForwardingTestCase(ForwardingMixin, OpenSSHClientMixin,
+                                      unittest.TestCase):
+    """
+    Connection forwarding tests run against the OpenSSL command line client.
+    """
+
+
+
+class OpenSSHClientRekeyTestCase(RekeyTestsMixin, OpenSSHClientMixin,
+                                 unittest.TestCase):
+    """
+    Rekeying tests run against the OpenSSL command line client.
+    """
+
+
+
+class CmdLineClientTestCase(ForwardingMixin, unittest.TestCase):
+    """
+    Connection forwarding tests run against the Conch command line client.
+    """
+    if runtime.platformType == 'win32':
+        skip = "can't run cmdline client on win32"
+
+    def execute(self, remoteCommand, process, sshArgs=''):
+        """
+        As for L{OpenSSHClientTestCase.execute}, except it runs the 'conch'
+        command line tool, not 'ssh'.
+        """
+        process.deferred = defer.Deferred()
+        port = self.conchServer.getHost().port
+        cmd = ('-p %i -l testuser '
+               '--known-hosts kh_test '
+               '--user-authentications publickey '
+               '--host-key-algorithms ssh-rsa '
+               '-a '
+               '-i dsa_test '
+               '-v ') % port + sshArgs + \
+               ' 127.0.0.1 ' + remoteCommand
+        cmds = _makeArgs(cmd.split())
+        log.msg(str(cmds))
+        env = os.environ.copy()
+        env['PYTHONPATH'] = os.pathsep.join(sys.path)
+        reactor.spawnProcess(process, sys.executable, cmds, env=env)
+        return process.deferred
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_connection.py b/ThirdParty/Twisted/twisted/conch/test/test_connection.py
new file mode 100644
index 0000000..85a8e6a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_connection.py
@@ -0,0 +1,730 @@
+# Copyright (c) 2007-2010 Twisted Matrix Laboratories.
+# See LICENSE for details
+
+"""
+This module tests twisted.conch.ssh.connection.
+"""
+
+import struct
+
+from twisted.conch import error
+from twisted.conch.ssh import channel, common, connection
+from twisted.trial import unittest
+from twisted.conch.test import test_userauth
+
+
+class TestChannel(channel.SSHChannel):
+    """
+    A mocked-up version of twisted.conch.ssh.channel.SSHChannel.
+
+    @ivar gotOpen: True if channelOpen has been called.
+    @type gotOpen: C{bool}
+    @ivar specificData: the specific channel open data passed to channelOpen.
+    @type specificData: C{str}
+    @ivar openFailureReason: the reason passed to openFailed.
+    @type openFailed: C{error.ConchError}
+    @ivar inBuffer: a C{list} of strings received by the channel.
+    @type inBuffer: C{list}
+    @ivar extBuffer: a C{list} of 2-tuples (type, extended data) of received by
+        the channel.
+    @type extBuffer: C{list}
+    @ivar numberRequests: the number of requests that have been made to this
+        channel.
+    @type numberRequests: C{int}
+    @ivar gotEOF: True if the other side sent EOF.
+    @type gotEOF: C{bool}
+    @ivar gotOneClose: True if the other side closed the connection.
+    @type gotOneClose: C{bool}
+    @ivar gotClosed: True if the channel is closed.
+    @type gotClosed: C{bool}
+    """
+    name = "TestChannel"
+    gotOpen = False
+
+    def logPrefix(self):
+        return "TestChannel %i" % self.id
+
+    def channelOpen(self, specificData):
+        """
+        The channel is open.  Set up the instance variables.
+        """
+        self.gotOpen = True
+        self.specificData = specificData
+        self.inBuffer = []
+        self.extBuffer = []
+        self.numberRequests = 0
+        self.gotEOF = False
+        self.gotOneClose = False
+        self.gotClosed = False
+
+    def openFailed(self, reason):
+        """
+        Opening the channel failed.  Store the reason why.
+        """
+        self.openFailureReason = reason
+
+    def request_test(self, data):
+        """
+        A test request.  Return True if data is 'data'.
+
+        @type data: C{str}
+        """
+        self.numberRequests += 1
+        return data == 'data'
+
+    def dataReceived(self, data):
+        """
+        Data was received.  Store it in the buffer.
+        """
+        self.inBuffer.append(data)
+
+    def extReceived(self, code, data):
+        """
+        Extended data was received.  Store it in the buffer.
+        """
+        self.extBuffer.append((code, data))
+
+    def eofReceived(self):
+        """
+        EOF was received.  Remember it.
+        """
+        self.gotEOF = True
+
+    def closeReceived(self):
+        """
+        Close was received.  Remember it.
+        """
+        self.gotOneClose = True
+
+    def closed(self):
+        """
+        The channel is closed.  Rembember it.
+        """
+        self.gotClosed = True
+
+class TestAvatar:
+    """
+    A mocked-up version of twisted.conch.avatar.ConchUser
+    """
+    _ARGS_ERROR_CODE = 123
+
+    def lookupChannel(self, channelType, windowSize, maxPacket, data):
+        """
+        The server wants us to return a channel.  If the requested channel is
+        our TestChannel, return it, otherwise return None.
+        """
+        if channelType == TestChannel.name:
+            return TestChannel(remoteWindow=windowSize,
+                    remoteMaxPacket=maxPacket,
+                    data=data, avatar=self)
+        elif channelType == "conch-error-args":
+            # Raise a ConchError with backwards arguments to make sure the
+            # connection fixes it for us.  This case should be deprecated and
+            # deleted eventually, but only after all of Conch gets the argument
+            # order right.
+            raise error.ConchError(
+                self._ARGS_ERROR_CODE, "error args in wrong order")
+
+
+    def gotGlobalRequest(self, requestType, data):
+        """
+        The client has made a global request.  If the global request is
+        'TestGlobal', return True.  If the global request is 'TestData',
+        return True and the request-specific data we received.  Otherwise,
+        return False.
+        """
+        if requestType == 'TestGlobal':
+            return True
+        elif requestType == 'TestData':
+            return True, data
+        else:
+            return False
+
+
+
+class TestConnection(connection.SSHConnection):
+    """
+    A subclass of SSHConnection for testing.
+
+    @ivar channel: the current channel.
+    @type channel. C{TestChannel}
+    """
+
+    def logPrefix(self):
+        return "TestConnection"
+
+    def global_TestGlobal(self, data):
+        """
+        The other side made the 'TestGlobal' global request.  Return True.
+        """
+        return True
+
+    def global_Test_Data(self, data):
+        """
+        The other side made the 'Test-Data' global request.  Return True and
+        the data we received.
+        """
+        return True, data
+
+    def channel_TestChannel(self, windowSize, maxPacket, data):
+        """
+        The other side is requesting the TestChannel.  Create a C{TestChannel}
+        instance, store it, and return it.
+        """
+        self.channel = TestChannel(remoteWindow=windowSize,
+                remoteMaxPacket=maxPacket, data=data)
+        return self.channel
+
+    def channel_ErrorChannel(self, windowSize, maxPacket, data):
+        """
+        The other side is requesting the ErrorChannel.  Raise an exception.
+        """
+        raise AssertionError('no such thing')
+
+
+
+class ConnectionTestCase(unittest.TestCase):
+
+    if test_userauth.transport is None:
+        skip = "Cannot run without both PyCrypto and pyasn1"
+
+    def setUp(self):
+        self.transport = test_userauth.FakeTransport(None)
+        self.transport.avatar = TestAvatar()
+        self.conn = TestConnection()
+        self.conn.transport = self.transport
+        self.conn.serviceStarted()
+
+    def _openChannel(self, channel):
+        """
+        Open the channel with the default connection.
+        """
+        self.conn.openChannel(channel)
+        self.transport.packets = self.transport.packets[:-1]
+        self.conn.ssh_CHANNEL_OPEN_CONFIRMATION(struct.pack('>2L',
+            channel.id, 255) + '\x00\x02\x00\x00\x00\x00\x80\x00')
+
+    def tearDown(self):
+        self.conn.serviceStopped()
+
+    def test_linkAvatar(self):
+        """
+        Test that the connection links itself to the avatar in the
+        transport.
+        """
+        self.assertIdentical(self.transport.avatar.conn, self.conn)
+
+    def test_serviceStopped(self):
+        """
+        Test that serviceStopped() closes any open channels.
+        """
+        channel1 = TestChannel()
+        channel2 = TestChannel()
+        self.conn.openChannel(channel1)
+        self.conn.openChannel(channel2)
+        self.conn.ssh_CHANNEL_OPEN_CONFIRMATION('\x00\x00\x00\x00' * 4)
+        self.assertTrue(channel1.gotOpen)
+        self.assertFalse(channel2.gotOpen)
+        self.conn.serviceStopped()
+        self.assertTrue(channel1.gotClosed)
+
+    def test_GLOBAL_REQUEST(self):
+        """
+        Test that global request packets are dispatched to the global_*
+        methods and the return values are translated into success or failure
+        messages.
+        """
+        self.conn.ssh_GLOBAL_REQUEST(common.NS('TestGlobal') + '\xff')
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_REQUEST_SUCCESS, '')])
+        self.transport.packets = []
+        self.conn.ssh_GLOBAL_REQUEST(common.NS('TestData') + '\xff' +
+                'test data')
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_REQUEST_SUCCESS, 'test data')])
+        self.transport.packets = []
+        self.conn.ssh_GLOBAL_REQUEST(common.NS('TestBad') + '\xff')
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_REQUEST_FAILURE, '')])
+        self.transport.packets = []
+        self.conn.ssh_GLOBAL_REQUEST(common.NS('TestGlobal') + '\x00')
+        self.assertEqual(self.transport.packets, [])
+
+    def test_REQUEST_SUCCESS(self):
+        """
+        Test that global request success packets cause the Deferred to be
+        called back.
+        """
+        d = self.conn.sendGlobalRequest('request', 'data', True)
+        self.conn.ssh_REQUEST_SUCCESS('data')
+        def check(data):
+            self.assertEqual(data, 'data')
+        d.addCallback(check)
+        d.addErrback(self.fail)
+        return d
+
+    def test_REQUEST_FAILURE(self):
+        """
+        Test that global request failure packets cause the Deferred to be
+        erred back.
+        """
+        d = self.conn.sendGlobalRequest('request', 'data', True)
+        self.conn.ssh_REQUEST_FAILURE('data')
+        def check(f):
+            self.assertEqual(f.value.data, 'data')
+        d.addCallback(self.fail)
+        d.addErrback(check)
+        return d
+
+    def test_CHANNEL_OPEN(self):
+        """
+        Test that open channel packets cause a channel to be created and
+        opened or a failure message to be returned.
+        """
+        del self.transport.avatar
+        self.conn.ssh_CHANNEL_OPEN(common.NS('TestChannel') +
+                '\x00\x00\x00\x01' * 4)
+        self.assertTrue(self.conn.channel.gotOpen)
+        self.assertEqual(self.conn.channel.conn, self.conn)
+        self.assertEqual(self.conn.channel.data, '\x00\x00\x00\x01')
+        self.assertEqual(self.conn.channel.specificData, '\x00\x00\x00\x01')
+        self.assertEqual(self.conn.channel.remoteWindowLeft, 1)
+        self.assertEqual(self.conn.channel.remoteMaxPacket, 1)
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_CHANNEL_OPEN_CONFIRMATION,
+                    '\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00'
+                    '\x00\x00\x80\x00')])
+        self.transport.packets = []
+        self.conn.ssh_CHANNEL_OPEN(common.NS('BadChannel') +
+                '\x00\x00\x00\x02' * 4)
+        self.flushLoggedErrors()
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_CHANNEL_OPEN_FAILURE,
+                    '\x00\x00\x00\x02\x00\x00\x00\x03' + common.NS(
+                    'unknown channel') + common.NS(''))])
+        self.transport.packets = []
+        self.conn.ssh_CHANNEL_OPEN(common.NS('ErrorChannel') +
+                '\x00\x00\x00\x02' * 4)
+        self.flushLoggedErrors()
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_CHANNEL_OPEN_FAILURE,
+                    '\x00\x00\x00\x02\x00\x00\x00\x02' + common.NS(
+                    'unknown failure') + common.NS(''))])
+
+
+    def _lookupChannelErrorTest(self, code):
+        """
+        Deliver a request for a channel open which will result in an exception
+        being raised during channel lookup.  Assert that an error response is
+        delivered as a result.
+        """
+        self.transport.avatar._ARGS_ERROR_CODE = code
+        self.conn.ssh_CHANNEL_OPEN(
+            common.NS('conch-error-args') + '\x00\x00\x00\x01' * 4)
+        errors = self.flushLoggedErrors(error.ConchError)
+        self.assertEqual(
+            len(errors), 1, "Expected one error, got: %r" % (errors,))
+        self.assertEqual(errors[0].value.args, (123, "error args in wrong order"))
+        self.assertEqual(
+            self.transport.packets,
+            [(connection.MSG_CHANNEL_OPEN_FAILURE,
+              # The response includes some bytes which identifying the
+              # associated request, as well as the error code (7b in hex) and
+              # the error message.
+              '\x00\x00\x00\x01\x00\x00\x00\x7b' + common.NS(
+                        'error args in wrong order') + common.NS(''))])
+
+
+    def test_lookupChannelError(self):
+        """
+        If a C{lookupChannel} implementation raises L{error.ConchError} with the
+        arguments in the wrong order, a C{MSG_CHANNEL_OPEN} failure is still
+        sent in response to the message.
+
+        This is a temporary work-around until L{error.ConchError} is given
+        better attributes and all of the Conch code starts constructing
+        instances of it properly.  Eventually this functionality should be
+        deprecated and then removed.
+        """
+        self._lookupChannelErrorTest(123)
+
+
+    def test_lookupChannelErrorLongCode(self):
+        """
+        Like L{test_lookupChannelError}, but for the case where the failure code
+        is represented as a C{long} instead of a C{int}.
+        """
+        self._lookupChannelErrorTest(123L)
+
+
+    def test_CHANNEL_OPEN_CONFIRMATION(self):
+        """
+        Test that channel open confirmation packets cause the channel to be
+        notified that it's open.
+        """
+        channel = TestChannel()
+        self.conn.openChannel(channel)
+        self.conn.ssh_CHANNEL_OPEN_CONFIRMATION('\x00\x00\x00\x00'*5)
+        self.assertEqual(channel.remoteWindowLeft, 0)
+        self.assertEqual(channel.remoteMaxPacket, 0)
+        self.assertEqual(channel.specificData, '\x00\x00\x00\x00')
+        self.assertEqual(self.conn.channelsToRemoteChannel[channel],
+                0)
+        self.assertEqual(self.conn.localToRemoteChannel[0], 0)
+
+    def test_CHANNEL_OPEN_FAILURE(self):
+        """
+        Test that channel open failure packets cause the channel to be
+        notified that its opening failed.
+        """
+        channel = TestChannel()
+        self.conn.openChannel(channel)
+        self.conn.ssh_CHANNEL_OPEN_FAILURE('\x00\x00\x00\x00\x00\x00\x00'
+                '\x01' + common.NS('failure!'))
+        self.assertEqual(channel.openFailureReason.args, ('failure!', 1))
+        self.assertEqual(self.conn.channels.get(channel), None)
+
+
+    def test_CHANNEL_WINDOW_ADJUST(self):
+        """
+        Test that channel window adjust messages add bytes to the channel
+        window.
+        """
+        channel = TestChannel()
+        self._openChannel(channel)
+        oldWindowSize = channel.remoteWindowLeft
+        self.conn.ssh_CHANNEL_WINDOW_ADJUST('\x00\x00\x00\x00\x00\x00\x00'
+                '\x01')
+        self.assertEqual(channel.remoteWindowLeft, oldWindowSize + 1)
+
+    def test_CHANNEL_DATA(self):
+        """
+        Test that channel data messages are passed up to the channel, or
+        cause the channel to be closed if the data is too large.
+        """
+        channel = TestChannel(localWindow=6, localMaxPacket=5)
+        self._openChannel(channel)
+        self.conn.ssh_CHANNEL_DATA('\x00\x00\x00\x00' + common.NS('data'))
+        self.assertEqual(channel.inBuffer, ['data'])
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_CHANNEL_WINDOW_ADJUST, '\x00\x00\x00\xff'
+                    '\x00\x00\x00\x04')])
+        self.transport.packets = []
+        longData = 'a' * (channel.localWindowLeft + 1)
+        self.conn.ssh_CHANNEL_DATA('\x00\x00\x00\x00' + common.NS(longData))
+        self.assertEqual(channel.inBuffer, ['data'])
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_CHANNEL_CLOSE, '\x00\x00\x00\xff')])
+        channel = TestChannel()
+        self._openChannel(channel)
+        bigData = 'a' * (channel.localMaxPacket + 1)
+        self.transport.packets = []
+        self.conn.ssh_CHANNEL_DATA('\x00\x00\x00\x01' + common.NS(bigData))
+        self.assertEqual(channel.inBuffer, [])
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_CHANNEL_CLOSE, '\x00\x00\x00\xff')])
+
+    def test_CHANNEL_EXTENDED_DATA(self):
+        """
+        Test that channel extended data messages are passed up to the channel,
+        or cause the channel to be closed if they're too big.
+        """
+        channel = TestChannel(localWindow=6, localMaxPacket=5)
+        self._openChannel(channel)
+        self.conn.ssh_CHANNEL_EXTENDED_DATA('\x00\x00\x00\x00\x00\x00\x00'
+                                            '\x00' + common.NS('data'))
+        self.assertEqual(channel.extBuffer, [(0, 'data')])
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_CHANNEL_WINDOW_ADJUST, '\x00\x00\x00\xff'
+                    '\x00\x00\x00\x04')])
+        self.transport.packets = []
+        longData = 'a' * (channel.localWindowLeft + 1)
+        self.conn.ssh_CHANNEL_EXTENDED_DATA('\x00\x00\x00\x00\x00\x00\x00'
+                                            '\x00' + common.NS(longData))
+        self.assertEqual(channel.extBuffer, [(0, 'data')])
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_CHANNEL_CLOSE, '\x00\x00\x00\xff')])
+        channel = TestChannel()
+        self._openChannel(channel)
+        bigData = 'a' * (channel.localMaxPacket + 1)
+        self.transport.packets = []
+        self.conn.ssh_CHANNEL_EXTENDED_DATA('\x00\x00\x00\x01\x00\x00\x00'
+                                            '\x00' + common.NS(bigData))
+        self.assertEqual(channel.extBuffer, [])
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_CHANNEL_CLOSE, '\x00\x00\x00\xff')])
+
+    def test_CHANNEL_EOF(self):
+        """
+        Test that channel eof messages are passed up to the channel.
+        """
+        channel = TestChannel()
+        self._openChannel(channel)
+        self.conn.ssh_CHANNEL_EOF('\x00\x00\x00\x00')
+        self.assertTrue(channel.gotEOF)
+
+    def test_CHANNEL_CLOSE(self):
+        """
+        Test that channel close messages are passed up to the channel.  Also,
+        test that channel.close() is called if both sides are closed when this
+        message is received.
+        """
+        channel = TestChannel()
+        self._openChannel(channel)
+        self.conn.sendClose(channel)
+        self.conn.ssh_CHANNEL_CLOSE('\x00\x00\x00\x00')
+        self.assertTrue(channel.gotOneClose)
+        self.assertTrue(channel.gotClosed)
+
+    def test_CHANNEL_REQUEST_success(self):
+        """
+        Test that channel requests that succeed send MSG_CHANNEL_SUCCESS.
+        """
+        channel = TestChannel()
+        self._openChannel(channel)
+        self.conn.ssh_CHANNEL_REQUEST('\x00\x00\x00\x00' + common.NS('test')
+                + '\x00')
+        self.assertEqual(channel.numberRequests, 1)
+        d = self.conn.ssh_CHANNEL_REQUEST('\x00\x00\x00\x00' + common.NS(
+            'test') + '\xff' + 'data')
+        def check(result):
+            self.assertEqual(self.transport.packets,
+                    [(connection.MSG_CHANNEL_SUCCESS, '\x00\x00\x00\xff')])
+        d.addCallback(check)
+        return d
+
+    def test_CHANNEL_REQUEST_failure(self):
+        """
+        Test that channel requests that fail send MSG_CHANNEL_FAILURE.
+        """
+        channel = TestChannel()
+        self._openChannel(channel)
+        d = self.conn.ssh_CHANNEL_REQUEST('\x00\x00\x00\x00' + common.NS(
+            'test') + '\xff')
+        def check(result):
+            self.assertEqual(self.transport.packets,
+                    [(connection.MSG_CHANNEL_FAILURE, '\x00\x00\x00\xff'
+                        )])
+        d.addCallback(self.fail)
+        d.addErrback(check)
+        return d
+
+    def test_CHANNEL_REQUEST_SUCCESS(self):
+        """
+        Test that channel request success messages cause the Deferred to be
+        called back.
+        """
+        channel = TestChannel()
+        self._openChannel(channel)
+        d = self.conn.sendRequest(channel, 'test', 'data', True)
+        self.conn.ssh_CHANNEL_SUCCESS('\x00\x00\x00\x00')
+        def check(result):
+            self.assertTrue(result)
+        return d
+
+    def test_CHANNEL_REQUEST_FAILURE(self):
+        """
+        Test that channel request failure messages cause the Deferred to be
+        erred back.
+        """
+        channel = TestChannel()
+        self._openChannel(channel)
+        d = self.conn.sendRequest(channel, 'test', '', True)
+        self.conn.ssh_CHANNEL_FAILURE('\x00\x00\x00\x00')
+        def check(result):
+            self.assertEqual(result.value.value, 'channel request failed')
+        d.addCallback(self.fail)
+        d.addErrback(check)
+        return d
+
+    def test_sendGlobalRequest(self):
+        """
+        Test that global request messages are sent in the right format.
+        """
+        d = self.conn.sendGlobalRequest('wantReply', 'data', True)
+        # must be added to prevent errbacking during teardown
+        d.addErrback(lambda failure: None)
+        self.conn.sendGlobalRequest('noReply', '', False)
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_GLOBAL_REQUEST, common.NS('wantReply') +
+                    '\xffdata'),
+                 (connection.MSG_GLOBAL_REQUEST, common.NS('noReply') +
+                     '\x00')])
+        self.assertEqual(self.conn.deferreds, {'global':[d]})
+
+    def test_openChannel(self):
+        """
+        Test that open channel messages are sent in the right format.
+        """
+        channel = TestChannel()
+        self.conn.openChannel(channel, 'aaaa')
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_CHANNEL_OPEN, common.NS('TestChannel') +
+                    '\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x80\x00aaaa')])
+        self.assertEqual(channel.id, 0)
+        self.assertEqual(self.conn.localChannelID, 1)
+
+    def test_sendRequest(self):
+        """
+        Test that channel request messages are sent in the right format.
+        """
+        channel = TestChannel()
+        self._openChannel(channel)
+        d = self.conn.sendRequest(channel, 'test', 'test', True)
+        # needed to prevent errbacks during teardown.
+        d.addErrback(lambda failure: None)
+        self.conn.sendRequest(channel, 'test2', '', False)
+        channel.localClosed = True # emulate sending a close message
+        self.conn.sendRequest(channel, 'test3', '', True)
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_CHANNEL_REQUEST, '\x00\x00\x00\xff' +
+                    common.NS('test') + '\x01test'),
+                 (connection.MSG_CHANNEL_REQUEST, '\x00\x00\x00\xff' +
+                     common.NS('test2') + '\x00')])
+        self.assertEqual(self.conn.deferreds[0], [d])
+
+    def test_adjustWindow(self):
+        """
+        Test that channel window adjust messages cause bytes to be added
+        to the window.
+        """
+        channel = TestChannel(localWindow=5)
+        self._openChannel(channel)
+        channel.localWindowLeft = 0
+        self.conn.adjustWindow(channel, 1)
+        self.assertEqual(channel.localWindowLeft, 1)
+        channel.localClosed = True
+        self.conn.adjustWindow(channel, 2)
+        self.assertEqual(channel.localWindowLeft, 1)
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_CHANNEL_WINDOW_ADJUST, '\x00\x00\x00\xff'
+                    '\x00\x00\x00\x01')])
+
+    def test_sendData(self):
+        """
+        Test that channel data messages are sent in the right format.
+        """
+        channel = TestChannel()
+        self._openChannel(channel)
+        self.conn.sendData(channel, 'a')
+        channel.localClosed = True
+        self.conn.sendData(channel, 'b')
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_CHANNEL_DATA, '\x00\x00\x00\xff' +
+                    common.NS('a'))])
+
+    def test_sendExtendedData(self):
+        """
+        Test that channel extended data messages are sent in the right format.
+        """
+        channel = TestChannel()
+        self._openChannel(channel)
+        self.conn.sendExtendedData(channel, 1, 'test')
+        channel.localClosed = True
+        self.conn.sendExtendedData(channel, 2, 'test2')
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_CHANNEL_EXTENDED_DATA, '\x00\x00\x00\xff' +
+                    '\x00\x00\x00\x01' + common.NS('test'))])
+
+    def test_sendEOF(self):
+        """
+        Test that channel EOF messages are sent in the right format.
+        """
+        channel = TestChannel()
+        self._openChannel(channel)
+        self.conn.sendEOF(channel)
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_CHANNEL_EOF, '\x00\x00\x00\xff')])
+        channel.localClosed = True
+        self.conn.sendEOF(channel)
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_CHANNEL_EOF, '\x00\x00\x00\xff')])
+
+    def test_sendClose(self):
+        """
+        Test that channel close messages are sent in the right format.
+        """
+        channel = TestChannel()
+        self._openChannel(channel)
+        self.conn.sendClose(channel)
+        self.assertTrue(channel.localClosed)
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_CHANNEL_CLOSE, '\x00\x00\x00\xff')])
+        self.conn.sendClose(channel)
+        self.assertEqual(self.transport.packets,
+                [(connection.MSG_CHANNEL_CLOSE, '\x00\x00\x00\xff')])
+
+        channel2 = TestChannel()
+        self._openChannel(channel2)
+        channel2.remoteClosed = True
+        self.conn.sendClose(channel2)
+        self.assertTrue(channel2.gotClosed)
+
+    def test_getChannelWithAvatar(self):
+        """
+        Test that getChannel dispatches to the avatar when an avatar is
+        present. Correct functioning without the avatar is verified in
+        test_CHANNEL_OPEN.
+        """
+        channel = self.conn.getChannel('TestChannel', 50, 30, 'data')
+        self.assertEqual(channel.data, 'data')
+        self.assertEqual(channel.remoteWindowLeft, 50)
+        self.assertEqual(channel.remoteMaxPacket, 30)
+        self.assertRaises(error.ConchError, self.conn.getChannel,
+                'BadChannel', 50, 30, 'data')
+
+    def test_gotGlobalRequestWithoutAvatar(self):
+        """
+        Test that gotGlobalRequests dispatches to global_* without an avatar.
+        """
+        del self.transport.avatar
+        self.assertTrue(self.conn.gotGlobalRequest('TestGlobal', 'data'))
+        self.assertEqual(self.conn.gotGlobalRequest('Test-Data', 'data'),
+                (True, 'data'))
+        self.assertFalse(self.conn.gotGlobalRequest('BadGlobal', 'data'))
+
+
+    def test_channelClosedCausesLeftoverChannelDeferredsToErrback(self):
+        """
+        Whenever an SSH channel gets closed any Deferred that was returned by a
+        sendRequest() on its parent connection must be errbacked.
+        """
+        channel = TestChannel()
+        self._openChannel(channel)
+
+        d = self.conn.sendRequest(
+            channel, "dummyrequest", "dummydata", wantReply=1)
+        d = self.assertFailure(d, error.ConchError)
+        self.conn.channelClosed(channel)
+        return d
+
+
+
+class TestCleanConnectionShutdown(unittest.TestCase):
+    """
+    Check whether correct cleanup is performed on connection shutdown.
+    """
+    if test_userauth.transport is None:
+        skip = "Cannot run without both PyCrypto and pyasn1"
+
+    def setUp(self):
+        self.transport = test_userauth.FakeTransport(None)
+        self.transport.avatar = TestAvatar()
+        self.conn = TestConnection()
+        self.conn.transport = self.transport
+
+
+    def test_serviceStoppedCausesLeftoverGlobalDeferredsToErrback(self):
+        """
+        Once the service is stopped any leftover global deferred returned by
+        a sendGlobalRequest() call must be errbacked.
+        """
+        self.conn.serviceStarted()
+
+        d = self.conn.sendGlobalRequest(
+            "dummyrequest", "dummydata", wantReply=1)
+        d = self.assertFailure(d, error.ConchError)
+        self.conn.serviceStopped()
+        return d
+
+
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_default.py b/ThirdParty/Twisted/twisted/conch/test/test_default.py
new file mode 100644
index 0000000..109f23d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_default.py
@@ -0,0 +1,171 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.conch.client.default}.
+"""
+try:
+    import Crypto.Cipher.DES3
+    import pyasn1
+except ImportError:
+    skip = "PyCrypto and PyASN1 required for twisted.conch.client.default."
+else:
+    from twisted.conch.client.agent import SSHAgentClient
+    from twisted.conch.client.default import SSHUserAuthClient
+    from twisted.conch.client.options import ConchOptions
+    from twisted.conch.ssh.keys import Key
+
+
+from twisted.trial.unittest import TestCase
+from twisted.python.filepath import FilePath
+from twisted.conch.test import keydata
+from twisted.test.proto_helpers import StringTransport
+
+
+
+class SSHUserAuthClientTest(TestCase):
+    """
+    Tests for L{SSHUserAuthClient}.
+
+    @type rsaPublic: L{Key}
+    @ivar rsaPublic: A public RSA key.
+    """
+
+    def setUp(self):
+        self.rsaPublic = Key.fromString(keydata.publicRSA_openssh)
+        self.tmpdir = FilePath(self.mktemp())
+        self.tmpdir.makedirs()
+        self.rsaFile = self.tmpdir.child('id_rsa')
+        self.rsaFile.setContent(keydata.privateRSA_openssh)
+        self.tmpdir.child('id_rsa.pub').setContent(keydata.publicRSA_openssh)
+
+
+    def test_signDataWithAgent(self):
+        """
+        When connected to an agent, L{SSHUserAuthClient} can use it to
+        request signatures of particular data with a particular L{Key}.
+        """
+        client = SSHUserAuthClient("user", ConchOptions(), None)
+        agent = SSHAgentClient()
+        transport = StringTransport()
+        agent.makeConnection(transport)
+        client.keyAgent = agent
+        cleartext = "Sign here"
+        client.signData(self.rsaPublic, cleartext)
+        self.assertEqual(
+            transport.value(),
+            "\x00\x00\x00\x8b\r\x00\x00\x00u" + self.rsaPublic.blob() +
+            "\x00\x00\x00\t" + cleartext +
+            "\x00\x00\x00\x00")
+
+
+    def test_agentGetPublicKey(self):
+        """
+        L{SSHUserAuthClient} looks up public keys from the agent using the
+        L{SSHAgentClient} class.  That L{SSHAgentClient.getPublicKey} returns a
+        L{Key} object with one of the public keys in the agent.  If no more
+        keys are present, it returns C{None}.
+        """
+        agent = SSHAgentClient()
+        agent.blobs = [self.rsaPublic.blob()]
+        key = agent.getPublicKey()
+        self.assertEqual(key.isPublic(), True)
+        self.assertEqual(key, self.rsaPublic)
+        self.assertEqual(agent.getPublicKey(), None)
+
+
+    def test_getPublicKeyFromFile(self):
+        """
+        L{SSHUserAuthClient.getPublicKey()} is able to get a public key from
+        the first file described by its options' C{identitys} list, and return
+        the corresponding public L{Key} object.
+        """
+        options = ConchOptions()
+        options.identitys = [self.rsaFile.path]
+        client = SSHUserAuthClient("user",  options, None)
+        key = client.getPublicKey()
+        self.assertEqual(key.isPublic(), True)
+        self.assertEqual(key, self.rsaPublic)
+
+
+    def test_getPublicKeyAgentFallback(self):
+        """
+        If an agent is present, but doesn't return a key,
+        L{SSHUserAuthClient.getPublicKey} continue with the normal key lookup.
+        """
+        options = ConchOptions()
+        options.identitys = [self.rsaFile.path]
+        agent = SSHAgentClient()
+        client = SSHUserAuthClient("user",  options, None)
+        client.keyAgent = agent
+        key = client.getPublicKey()
+        self.assertEqual(key.isPublic(), True)
+        self.assertEqual(key, self.rsaPublic)
+
+
+    def test_getPublicKeyBadKeyError(self):
+        """
+        If L{keys.Key.fromFile} raises a L{keys.BadKeyError}, the
+        L{SSHUserAuthClient.getPublicKey} tries again to get a public key by
+        calling itself recursively.
+        """
+        options = ConchOptions()
+        self.tmpdir.child('id_dsa.pub').setContent(keydata.publicDSA_openssh)
+        dsaFile = self.tmpdir.child('id_dsa')
+        dsaFile.setContent(keydata.privateDSA_openssh)
+        options.identitys = [self.rsaFile.path, dsaFile.path]
+        self.tmpdir.child('id_rsa.pub').setContent('not a key!')
+        client = SSHUserAuthClient("user",  options, None)
+        key = client.getPublicKey()
+        self.assertEqual(key.isPublic(), True)
+        self.assertEqual(key, Key.fromString(keydata.publicDSA_openssh))
+        self.assertEqual(client.usedFiles, [self.rsaFile.path, dsaFile.path])
+
+
+    def test_getPrivateKey(self):
+        """
+        L{SSHUserAuthClient.getPrivateKey} will load a private key from the
+        last used file populated by L{SSHUserAuthClient.getPublicKey}, and
+        return a L{Deferred} which fires with the corresponding private L{Key}.
+        """
+        rsaPrivate = Key.fromString(keydata.privateRSA_openssh)
+        options = ConchOptions()
+        options.identitys = [self.rsaFile.path]
+        client = SSHUserAuthClient("user",  options, None)
+        # Populate the list of used files
+        client.getPublicKey()
+
+        def _cbGetPrivateKey(key):
+            self.assertEqual(key.isPublic(), False)
+            self.assertEqual(key, rsaPrivate)
+
+        return client.getPrivateKey().addCallback(_cbGetPrivateKey)
+
+
+    def test_getPrivateKeyPassphrase(self):
+        """
+        L{SSHUserAuthClient} can get a private key from a file, and return a
+        Deferred called back with a private L{Key} object, even if the key is
+        encrypted.
+        """
+        rsaPrivate = Key.fromString(keydata.privateRSA_openssh)
+        passphrase = 'this is the passphrase'
+        self.rsaFile.setContent(rsaPrivate.toString('openssh', passphrase))
+        options = ConchOptions()
+        options.identitys = [self.rsaFile.path]
+        client = SSHUserAuthClient("user",  options, None)
+        # Populate the list of used files
+        client.getPublicKey()
+
+        def _getPassword(prompt):
+            self.assertEqual(prompt,
+                              "Enter passphrase for key '%s': " % (
+                              self.rsaFile.path,))
+            return passphrase
+
+        def _cbGetPrivateKey(key):
+            self.assertEqual(key.isPublic(), False)
+            self.assertEqual(key, rsaPrivate)
+
+        self.patch(client, '_getPassword', _getPassword)
+        return client.getPrivateKey().addCallback(_cbGetPrivateKey)
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_filetransfer.py b/ThirdParty/Twisted/twisted/conch/test/test_filetransfer.py
new file mode 100644
index 0000000..3849331
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_filetransfer.py
@@ -0,0 +1,765 @@
+# -*- test-case-name: twisted.conch.test.test_filetransfer -*-
+# Copyright (c) 2001-2008 Twisted Matrix Laboratories.
+# See LICENSE file for details.
+
+
+import os
+import re
+import struct
+import sys
+
+from twisted.trial import unittest
+try:
+    from twisted.conch import unix
+    unix # shut up pyflakes
+except ImportError:
+    unix = None
+    try:
+        del sys.modules['twisted.conch.unix'] # remove the bad import
+    except KeyError:
+        # In Python 2.4, the bad import has already been cleaned up for us.
+        # Hooray.
+        pass
+
+from twisted.conch import avatar
+from twisted.conch.ssh import common, connection, filetransfer, session
+from twisted.internet import defer
+from twisted.protocols import loopback
+from twisted.python import components
+
+
+class TestAvatar(avatar.ConchUser):
+    def __init__(self):
+        avatar.ConchUser.__init__(self)
+        self.channelLookup['session'] = session.SSHSession
+        self.subsystemLookup['sftp'] = filetransfer.FileTransferServer
+
+    def _runAsUser(self, f, *args, **kw):
+        try:
+            f = iter(f)
+        except TypeError:
+            f = [(f, args, kw)]
+        for i in f:
+            func = i[0]
+            args = len(i)>1 and i[1] or ()
+            kw = len(i)>2 and i[2] or {}
+            r = func(*args, **kw)
+        return r
+
+
+class FileTransferTestAvatar(TestAvatar):
+
+    def __init__(self, homeDir):
+        TestAvatar.__init__(self)
+        self.homeDir = homeDir
+
+    def getHomeDir(self):
+        return os.path.join(os.getcwd(), self.homeDir)
+
+
+class ConchSessionForTestAvatar:
+
+    def __init__(self, avatar):
+        self.avatar = avatar
+
+if unix:
+    if not hasattr(unix, 'SFTPServerForUnixConchUser'):
+        # unix should either be a fully working module, or None.  I'm not sure
+        # how this happens, but on win32 it does.  Try to cope.  --spiv.
+        import warnings
+        warnings.warn(("twisted.conch.unix imported %r, "
+                       "but doesn't define SFTPServerForUnixConchUser'")
+                      % (unix,))
+        unix = None
+    else:
+        class FileTransferForTestAvatar(unix.SFTPServerForUnixConchUser):
+
+            def gotVersion(self, version, otherExt):
+                return {'conchTest' : 'ext data'}
+
+            def extendedRequest(self, extName, extData):
+                if extName == 'testExtendedRequest':
+                    return 'bar'
+                raise NotImplementedError
+
+        components.registerAdapter(FileTransferForTestAvatar,
+                                   TestAvatar,
+                                   filetransfer.ISFTPServer)
+
+class SFTPTestBase(unittest.TestCase):
+
+    def setUp(self):
+        self.testDir = self.mktemp()
+        # Give the testDir another level so we can safely "cd .." from it in
+        # tests.
+        self.testDir = os.path.join(self.testDir, 'extra')
+        os.makedirs(os.path.join(self.testDir, 'testDirectory'))
+
+        f = file(os.path.join(self.testDir, 'testfile1'),'w')
+        f.write('a'*10+'b'*10)
+        f.write(file('/dev/urandom').read(1024*64)) # random data
+        os.chmod(os.path.join(self.testDir, 'testfile1'), 0644)
+        file(os.path.join(self.testDir, 'testRemoveFile'), 'w').write('a')
+        file(os.path.join(self.testDir, 'testRenameFile'), 'w').write('a')
+        file(os.path.join(self.testDir, '.testHiddenFile'), 'w').write('a')
+
+
+class TestOurServerOurClient(SFTPTestBase):
+
+    if not unix:
+        skip = "can't run on non-posix computers"
+
+    def setUp(self):
+        SFTPTestBase.setUp(self)
+
+        self.avatar = FileTransferTestAvatar(self.testDir)
+        self.server = filetransfer.FileTransferServer(avatar=self.avatar)
+        clientTransport = loopback.LoopbackRelay(self.server)
+
+        self.client = filetransfer.FileTransferClient()
+        self._serverVersion = None
+        self._extData = None
+        def _(serverVersion, extData):
+            self._serverVersion = serverVersion
+            self._extData = extData
+        self.client.gotServerVersion = _
+        serverTransport = loopback.LoopbackRelay(self.client)
+        self.client.makeConnection(clientTransport)
+        self.server.makeConnection(serverTransport)
+
+        self.clientTransport = clientTransport
+        self.serverTransport = serverTransport
+
+        self._emptyBuffers()
+
+
+    def _emptyBuffers(self):
+        while self.serverTransport.buffer or self.clientTransport.buffer:
+            self.serverTransport.clearBuffer()
+            self.clientTransport.clearBuffer()
+
+
+    def tearDown(self):
+        self.serverTransport.loseConnection()
+        self.clientTransport.loseConnection()
+        self.serverTransport.clearBuffer()
+        self.clientTransport.clearBuffer()
+
+
+    def testServerVersion(self):
+        self.assertEqual(self._serverVersion, 3)
+        self.assertEqual(self._extData, {'conchTest' : 'ext data'})
+
+
+    def test_openedFileClosedWithConnection(self):
+        """
+        A file opened with C{openFile} is close when the connection is lost.
+        """
+        d = self.client.openFile("testfile1", filetransfer.FXF_READ |
+                                 filetransfer.FXF_WRITE, {})
+        self._emptyBuffers()
+
+        oldClose = os.close
+        closed = []
+        def close(fd):
+            closed.append(fd)
+            oldClose(fd)
+
+        self.patch(os, "close", close)
+
+        def _fileOpened(openFile):
+            fd = self.server.openFiles[openFile.handle[4:]].fd
+            self.serverTransport.loseConnection()
+            self.clientTransport.loseConnection()
+            self.serverTransport.clearBuffer()
+            self.clientTransport.clearBuffer()
+            self.assertEqual(self.server.openFiles, {})
+            self.assertIn(fd, closed)
+
+        d.addCallback(_fileOpened)
+        return d
+
+
+    def test_openedDirectoryClosedWithConnection(self):
+        """
+        A directory opened with C{openDirectory} is close when the connection
+        is lost.
+        """
+        d = self.client.openDirectory('')
+        self._emptyBuffers()
+
+        def _getFiles(openDir):
+            self.serverTransport.loseConnection()
+            self.clientTransport.loseConnection()
+            self.serverTransport.clearBuffer()
+            self.clientTransport.clearBuffer()
+            self.assertEqual(self.server.openDirs, {})
+
+        d.addCallback(_getFiles)
+        return d
+
+
+    def testOpenFileIO(self):
+        d = self.client.openFile("testfile1", filetransfer.FXF_READ |
+                                 filetransfer.FXF_WRITE, {})
+        self._emptyBuffers()
+
+        def _fileOpened(openFile):
+            self.assertEqual(openFile, filetransfer.ISFTPFile(openFile))
+            d = _readChunk(openFile)
+            d.addCallback(_writeChunk, openFile)
+            return d
+
+        def _readChunk(openFile):
+            d = openFile.readChunk(0, 20)
+            self._emptyBuffers()
+            d.addCallback(self.assertEqual, 'a'*10 + 'b'*10)
+            return d
+
+        def _writeChunk(_, openFile):
+            d = openFile.writeChunk(20, 'c'*10)
+            self._emptyBuffers()
+            d.addCallback(_readChunk2, openFile)
+            return d
+
+        def _readChunk2(_, openFile):
+            d = openFile.readChunk(0, 30)
+            self._emptyBuffers()
+            d.addCallback(self.assertEqual, 'a'*10 + 'b'*10 + 'c'*10)
+            return d
+
+        d.addCallback(_fileOpened)
+        return d
+
+    def testClosedFileGetAttrs(self):
+        d = self.client.openFile("testfile1", filetransfer.FXF_READ |
+                                 filetransfer.FXF_WRITE, {})
+        self._emptyBuffers()
+
+        def _getAttrs(_, openFile):
+            d = openFile.getAttrs()
+            self._emptyBuffers()
+            return d
+
+        def _err(f):
+            self.flushLoggedErrors()
+            return f
+
+        def _close(openFile):
+            d = openFile.close()
+            self._emptyBuffers()
+            d.addCallback(_getAttrs, openFile)
+            d.addErrback(_err)
+            return self.assertFailure(d, filetransfer.SFTPError)
+
+        d.addCallback(_close)
+        return d
+
+    def testOpenFileAttributes(self):
+        d = self.client.openFile("testfile1", filetransfer.FXF_READ |
+                                 filetransfer.FXF_WRITE, {})
+        self._emptyBuffers()
+
+        def _getAttrs(openFile):
+            d = openFile.getAttrs()
+            self._emptyBuffers()
+            d.addCallback(_getAttrs2)
+            return d
+
+        def _getAttrs2(attrs1):
+            d = self.client.getAttrs('testfile1')
+            self._emptyBuffers()
+            d.addCallback(self.assertEqual, attrs1)
+            return d
+
+        return d.addCallback(_getAttrs)
+
+
+    def testOpenFileSetAttrs(self):
+        # XXX test setAttrs
+        # Ok, how about this for a start?  It caught a bug :)  -- spiv.
+        d = self.client.openFile("testfile1", filetransfer.FXF_READ |
+                                 filetransfer.FXF_WRITE, {})
+        self._emptyBuffers()
+
+        def _getAttrs(openFile):
+            d = openFile.getAttrs()
+            self._emptyBuffers()
+            d.addCallback(_setAttrs)
+            return d
+
+        def _setAttrs(attrs):
+            attrs['atime'] = 0
+            d = self.client.setAttrs('testfile1', attrs)
+            self._emptyBuffers()
+            d.addCallback(_getAttrs2)
+            d.addCallback(self.assertEqual, attrs)
+            return d
+
+        def _getAttrs2(_):
+            d = self.client.getAttrs('testfile1')
+            self._emptyBuffers()
+            return d
+
+        d.addCallback(_getAttrs)
+        return d
+
+
+    def test_openFileExtendedAttributes(self):
+        """
+        Check that L{filetransfer.FileTransferClient.openFile} can send
+        extended attributes, that should be extracted server side. By default,
+        they are ignored, so we just verify they are correctly parsed.
+        """
+        savedAttributes = {}
+        oldOpenFile = self.server.client.openFile
+        def openFile(filename, flags, attrs):
+            savedAttributes.update(attrs)
+            return oldOpenFile(filename, flags, attrs)
+        self.server.client.openFile = openFile
+
+        d = self.client.openFile("testfile1", filetransfer.FXF_READ |
+                filetransfer.FXF_WRITE, {"ext_foo": "bar"})
+        self._emptyBuffers()
+
+        def check(ign):
+            self.assertEqual(savedAttributes, {"ext_foo": "bar"})
+
+        return d.addCallback(check)
+
+
+    def testRemoveFile(self):
+        d = self.client.getAttrs("testRemoveFile")
+        self._emptyBuffers()
+        def _removeFile(ignored):
+            d = self.client.removeFile("testRemoveFile")
+            self._emptyBuffers()
+            return d
+        d.addCallback(_removeFile)
+        d.addCallback(_removeFile)
+        return self.assertFailure(d, filetransfer.SFTPError)
+
+    def testRenameFile(self):
+        d = self.client.getAttrs("testRenameFile")
+        self._emptyBuffers()
+        def _rename(attrs):
+            d = self.client.renameFile("testRenameFile", "testRenamedFile")
+            self._emptyBuffers()
+            d.addCallback(_testRenamed, attrs)
+            return d
+        def _testRenamed(_, attrs):
+            d = self.client.getAttrs("testRenamedFile")
+            self._emptyBuffers()
+            d.addCallback(self.assertEqual, attrs)
+        return d.addCallback(_rename)
+
+    def testDirectoryBad(self):
+        d = self.client.getAttrs("testMakeDirectory")
+        self._emptyBuffers()
+        return self.assertFailure(d, filetransfer.SFTPError)
+
+    def testDirectoryCreation(self):
+        d = self.client.makeDirectory("testMakeDirectory", {})
+        self._emptyBuffers()
+
+        def _getAttrs(_):
+            d = self.client.getAttrs("testMakeDirectory")
+            self._emptyBuffers()
+            return d
+
+        # XXX not until version 4/5
+        # self.assertEqual(filetransfer.FILEXFER_TYPE_DIRECTORY&attrs['type'],
+        #                     filetransfer.FILEXFER_TYPE_DIRECTORY)
+
+        def _removeDirectory(_):
+            d = self.client.removeDirectory("testMakeDirectory")
+            self._emptyBuffers()
+            return d
+
+        d.addCallback(_getAttrs)
+        d.addCallback(_removeDirectory)
+        d.addCallback(_getAttrs)
+        return self.assertFailure(d, filetransfer.SFTPError)
+
+    def testOpenDirectory(self):
+        d = self.client.openDirectory('')
+        self._emptyBuffers()
+        files = []
+
+        def _getFiles(openDir):
+            def append(f):
+                files.append(f)
+                return openDir
+            d = defer.maybeDeferred(openDir.next)
+            self._emptyBuffers()
+            d.addCallback(append)
+            d.addCallback(_getFiles)
+            d.addErrback(_close, openDir)
+            return d
+
+        def _checkFiles(ignored):
+            fs = list(zip(*files)[0])
+            fs.sort()
+            self.assertEqual(fs,
+                                 ['.testHiddenFile', 'testDirectory',
+                                  'testRemoveFile', 'testRenameFile',
+                                  'testfile1'])
+
+        def _close(_, openDir):
+            d = openDir.close()
+            self._emptyBuffers()
+            return d
+
+        d.addCallback(_getFiles)
+        d.addCallback(_checkFiles)
+        return d
+
+    def testLinkDoesntExist(self):
+        d = self.client.getAttrs('testLink')
+        self._emptyBuffers()
+        return self.assertFailure(d, filetransfer.SFTPError)
+
+    def testLinkSharesAttrs(self):
+        d = self.client.makeLink('testLink', 'testfile1')
+        self._emptyBuffers()
+        def _getFirstAttrs(_):
+            d = self.client.getAttrs('testLink', 1)
+            self._emptyBuffers()
+            return d
+        def _getSecondAttrs(firstAttrs):
+            d = self.client.getAttrs('testfile1')
+            self._emptyBuffers()
+            d.addCallback(self.assertEqual, firstAttrs)
+            return d
+        d.addCallback(_getFirstAttrs)
+        return d.addCallback(_getSecondAttrs)
+
+    def testLinkPath(self):
+        d = self.client.makeLink('testLink', 'testfile1')
+        self._emptyBuffers()
+        def _readLink(_):
+            d = self.client.readLink('testLink')
+            self._emptyBuffers()
+            d.addCallback(self.assertEqual,
+                          os.path.join(os.getcwd(), self.testDir, 'testfile1'))
+            return d
+        def _realPath(_):
+            d = self.client.realPath('testLink')
+            self._emptyBuffers()
+            d.addCallback(self.assertEqual,
+                          os.path.join(os.getcwd(), self.testDir, 'testfile1'))
+            return d
+        d.addCallback(_readLink)
+        d.addCallback(_realPath)
+        return d
+
+    def testExtendedRequest(self):
+        d = self.client.extendedRequest('testExtendedRequest', 'foo')
+        self._emptyBuffers()
+        d.addCallback(self.assertEqual, 'bar')
+        d.addCallback(self._cbTestExtendedRequest)
+        return d
+
+    def _cbTestExtendedRequest(self, ignored):
+        d = self.client.extendedRequest('testBadRequest', '')
+        self._emptyBuffers()
+        return self.assertFailure(d, NotImplementedError)
+
+
+class FakeConn:
+    def sendClose(self, channel):
+        pass
+
+
+class TestFileTransferClose(unittest.TestCase):
+
+    if not unix:
+        skip = "can't run on non-posix computers"
+
+    def setUp(self):
+        self.avatar = TestAvatar()
+
+    def buildServerConnection(self):
+        # make a server connection
+        conn = connection.SSHConnection()
+        # server connections have a 'self.transport.avatar'.
+        class DummyTransport:
+            def __init__(self):
+                self.transport = self
+            def sendPacket(self, kind, data):
+                pass
+            def logPrefix(self):
+                return 'dummy transport'
+        conn.transport = DummyTransport()
+        conn.transport.avatar = self.avatar
+        return conn
+
+    def interceptConnectionLost(self, sftpServer):
+        self.connectionLostFired = False
+        origConnectionLost = sftpServer.connectionLost
+        def connectionLost(reason):
+            self.connectionLostFired = True
+            origConnectionLost(reason)
+        sftpServer.connectionLost = connectionLost
+
+    def assertSFTPConnectionLost(self):
+        self.assertTrue(self.connectionLostFired,
+            "sftpServer's connectionLost was not called")
+
+    def test_sessionClose(self):
+        """
+        Closing a session should notify an SFTP subsystem launched by that
+        session.
+        """
+        # make a session
+        testSession = session.SSHSession(conn=FakeConn(), avatar=self.avatar)
+
+        # start an SFTP subsystem on the session
+        testSession.request_subsystem(common.NS('sftp'))
+        sftpServer = testSession.client.transport.proto
+
+        # intercept connectionLost so we can check that it's called
+        self.interceptConnectionLost(sftpServer)
+
+        # close session
+        testSession.closeReceived()
+
+        self.assertSFTPConnectionLost()
+
+    def test_clientClosesChannelOnConnnection(self):
+        """
+        A client sending CHANNEL_CLOSE should trigger closeReceived on the
+        associated channel instance.
+        """
+        conn = self.buildServerConnection()
+
+        # somehow get a session
+        packet = common.NS('session') + struct.pack('>L', 0) * 3
+        conn.ssh_CHANNEL_OPEN(packet)
+        sessionChannel = conn.channels[0]
+
+        sessionChannel.request_subsystem(common.NS('sftp'))
+        sftpServer = sessionChannel.client.transport.proto
+        self.interceptConnectionLost(sftpServer)
+
+        # intercept closeReceived
+        self.interceptConnectionLost(sftpServer)
+
+        # close the connection
+        conn.ssh_CHANNEL_CLOSE(struct.pack('>L', 0))
+
+        self.assertSFTPConnectionLost()
+
+
+    def test_stopConnectionServiceClosesChannel(self):
+        """
+        Closing an SSH connection should close all sessions within it.
+        """
+        conn = self.buildServerConnection()
+
+        # somehow get a session
+        packet = common.NS('session') + struct.pack('>L', 0) * 3
+        conn.ssh_CHANNEL_OPEN(packet)
+        sessionChannel = conn.channels[0]
+
+        sessionChannel.request_subsystem(common.NS('sftp'))
+        sftpServer = sessionChannel.client.transport.proto
+        self.interceptConnectionLost(sftpServer)
+
+        # close the connection
+        conn.serviceStopped()
+
+        self.assertSFTPConnectionLost()
+
+
+
+class TestConstants(unittest.TestCase):
+    """
+    Tests for the constants used by the SFTP protocol implementation.
+
+    @ivar filexferSpecExcerpts: Excerpts from the
+        draft-ietf-secsh-filexfer-02.txt (draft) specification of the SFTP
+        protocol.  There are more recent drafts of the specification, but this
+        one describes version 3, which is what conch (and OpenSSH) implements.
+    """
+
+
+    filexferSpecExcerpts = [
+        """
+           The following values are defined for packet types.
+
+                #define SSH_FXP_INIT                1
+                #define SSH_FXP_VERSION             2
+                #define SSH_FXP_OPEN                3
+                #define SSH_FXP_CLOSE               4
+                #define SSH_FXP_READ                5
+                #define SSH_FXP_WRITE               6
+                #define SSH_FXP_LSTAT               7
+                #define SSH_FXP_FSTAT               8
+                #define SSH_FXP_SETSTAT             9
+                #define SSH_FXP_FSETSTAT           10
+                #define SSH_FXP_OPENDIR            11
+                #define SSH_FXP_READDIR            12
+                #define SSH_FXP_REMOVE             13
+                #define SSH_FXP_MKDIR              14
+                #define SSH_FXP_RMDIR              15
+                #define SSH_FXP_REALPATH           16
+                #define SSH_FXP_STAT               17
+                #define SSH_FXP_RENAME             18
+                #define SSH_FXP_READLINK           19
+                #define SSH_FXP_SYMLINK            20
+                #define SSH_FXP_STATUS            101
+                #define SSH_FXP_HANDLE            102
+                #define SSH_FXP_DATA              103
+                #define SSH_FXP_NAME              104
+                #define SSH_FXP_ATTRS             105
+                #define SSH_FXP_EXTENDED          200
+                #define SSH_FXP_EXTENDED_REPLY    201
+
+           Additional packet types should only be defined if the protocol
+           version number (see Section ``Protocol Initialization'') is
+           incremented, and their use MUST be negotiated using the version
+           number.  However, the SSH_FXP_EXTENDED and SSH_FXP_EXTENDED_REPLY
+           packets can be used to implement vendor-specific extensions.  See
+           Section ``Vendor-Specific-Extensions'' for more details.
+        """,
+        """
+            The flags bits are defined to have the following values:
+
+                #define SSH_FILEXFER_ATTR_SIZE          0x00000001
+                #define SSH_FILEXFER_ATTR_UIDGID        0x00000002
+                #define SSH_FILEXFER_ATTR_PERMISSIONS   0x00000004
+                #define SSH_FILEXFER_ATTR_ACMODTIME     0x00000008
+                #define SSH_FILEXFER_ATTR_EXTENDED      0x80000000
+
+        """,
+        """
+            The `pflags' field is a bitmask.  The following bits have been
+           defined.
+
+                #define SSH_FXF_READ            0x00000001
+                #define SSH_FXF_WRITE           0x00000002
+                #define SSH_FXF_APPEND          0x00000004
+                #define SSH_FXF_CREAT           0x00000008
+                #define SSH_FXF_TRUNC           0x00000010
+                #define SSH_FXF_EXCL            0x00000020
+        """,
+        """
+            Currently, the following values are defined (other values may be
+           defined by future versions of this protocol):
+
+                #define SSH_FX_OK                            0
+                #define SSH_FX_EOF                           1
+                #define SSH_FX_NO_SUCH_FILE                  2
+                #define SSH_FX_PERMISSION_DENIED             3
+                #define SSH_FX_FAILURE                       4
+                #define SSH_FX_BAD_MESSAGE                   5
+                #define SSH_FX_NO_CONNECTION                 6
+                #define SSH_FX_CONNECTION_LOST               7
+                #define SSH_FX_OP_UNSUPPORTED                8
+        """]
+
+
+    def test_constantsAgainstSpec(self):
+        """
+        The constants used by the SFTP protocol implementation match those
+        found by searching through the spec.
+        """
+        constants = {}
+        for excerpt in self.filexferSpecExcerpts:
+            for line in excerpt.splitlines():
+                m = re.match('^\s*#define SSH_([A-Z_]+)\s+([0-9x]*)\s*$', line)
+                if m:
+                    constants[m.group(1)] = long(m.group(2), 0)
+        self.assertTrue(
+            len(constants) > 0, "No constants found (the test must be buggy).")
+        for k, v in constants.items():
+            self.assertEqual(v, getattr(filetransfer, k))
+
+
+
+class TestRawPacketData(unittest.TestCase):
+    """
+    Tests for L{filetransfer.FileTransferClient} which explicitly craft certain
+    less common protocol messages to exercise their handling.
+    """
+    def setUp(self):
+        self.ftc = filetransfer.FileTransferClient()
+
+
+    def test_packetSTATUS(self):
+        """
+        A STATUS packet containing a result code, a message, and a language is
+        parsed to produce the result of an outstanding request L{Deferred}.
+
+        @see: U{section 9.1<http://tools.ietf.org/html/draft-ietf-secsh-filexfer-13#section-9.1>}
+            of the SFTP Internet-Draft.
+        """
+        d = defer.Deferred()
+        d.addCallback(self._cbTestPacketSTATUS)
+        self.ftc.openRequests[1] = d
+        data = struct.pack('!LL', 1, filetransfer.FX_OK) + common.NS('msg') + common.NS('lang')
+        self.ftc.packet_STATUS(data)
+        return d
+
+
+    def _cbTestPacketSTATUS(self, result):
+        """
+        Assert that the result is a two-tuple containing the message and
+        language from the STATUS packet.
+        """
+        self.assertEqual(result[0], 'msg')
+        self.assertEqual(result[1], 'lang')
+
+
+    def test_packetSTATUSShort(self):
+        """
+        A STATUS packet containing only a result code can also be parsed to
+        produce the result of an outstanding request L{Deferred}.  Such packets
+        are sent by some SFTP implementations, though not strictly legal.
+
+        @see: U{section 9.1<http://tools.ietf.org/html/draft-ietf-secsh-filexfer-13#section-9.1>}
+            of the SFTP Internet-Draft.
+        """
+        d = defer.Deferred()
+        d.addCallback(self._cbTestPacketSTATUSShort)
+        self.ftc.openRequests[1] = d
+        data = struct.pack('!LL', 1, filetransfer.FX_OK)
+        self.ftc.packet_STATUS(data)
+        return d
+
+
+    def _cbTestPacketSTATUSShort(self, result):
+        """
+        Assert that the result is a two-tuple containing empty strings, since
+        the STATUS packet had neither a message nor a language.
+        """
+        self.assertEqual(result[0], '')
+        self.assertEqual(result[1], '')
+
+
+    def test_packetSTATUSWithoutLang(self):
+        """
+        A STATUS packet containing a result code and a message but no language
+        can also be parsed to produce the result of an outstanding request
+        L{Deferred}.  Such packets are sent by some SFTP implementations, though
+        not strictly legal.
+
+        @see: U{section 9.1<http://tools.ietf.org/html/draft-ietf-secsh-filexfer-13#section-9.1>}
+            of the SFTP Internet-Draft.
+        """
+        d = defer.Deferred()
+        d.addCallback(self._cbTestPacketSTATUSWithoutLang)
+        self.ftc.openRequests[1] = d
+        data = struct.pack('!LL', 1, filetransfer.FX_OK) + common.NS('msg')
+        self.ftc.packet_STATUS(data)
+        return d
+
+
+    def _cbTestPacketSTATUSWithoutLang(self, result):
+        """
+        Assert that the result is a two-tuple containing the message from the
+        STATUS packet and an empty string, since the language was missing.
+        """
+        self.assertEqual(result[0], 'msg')
+        self.assertEqual(result[1], '')
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_helper.py b/ThirdParty/Twisted/twisted/conch/test/test_helper.py
new file mode 100644
index 0000000..7064d03
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_helper.py
@@ -0,0 +1,560 @@
+# -*- test-case-name: twisted.conch.test.test_helper -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.conch.insults import helper
+from twisted.conch.insults.insults import G0, G1, G2, G3
+from twisted.conch.insults.insults import modes, privateModes
+from twisted.conch.insults.insults import NORMAL, BOLD, UNDERLINE, BLINK, REVERSE_VIDEO
+
+from twisted.trial import unittest
+
+WIDTH = 80
+HEIGHT = 24
+
+class BufferTestCase(unittest.TestCase):
+    def setUp(self):
+        self.term = helper.TerminalBuffer()
+        self.term.connectionMade()
+
+    def testInitialState(self):
+        self.assertEqual(self.term.width, WIDTH)
+        self.assertEqual(self.term.height, HEIGHT)
+        self.assertEqual(str(self.term),
+                          '\n' * (HEIGHT - 1))
+        self.assertEqual(self.term.reportCursorPosition(), (0, 0))
+
+
+    def test_initialPrivateModes(self):
+        """
+        Verify that only DEC Auto Wrap Mode (DECAWM) and DEC Text Cursor Enable
+        Mode (DECTCEM) are initially in the Set Mode (SM) state.
+        """
+        self.assertEqual(
+            {privateModes.AUTO_WRAP: True,
+             privateModes.CURSOR_MODE: True},
+            self.term.privateModes)
+
+
+    def test_carriageReturn(self):
+        """
+        C{"\r"} moves the cursor to the first column in the current row.
+        """
+        self.term.cursorForward(5)
+        self.term.cursorDown(3)
+        self.assertEqual(self.term.reportCursorPosition(), (5, 3))
+        self.term.insertAtCursor("\r")
+        self.assertEqual(self.term.reportCursorPosition(), (0, 3))
+
+
+    def test_linefeed(self):
+        """
+        C{"\n"} moves the cursor to the next row without changing the column.
+        """
+        self.term.cursorForward(5)
+        self.assertEqual(self.term.reportCursorPosition(), (5, 0))
+        self.term.insertAtCursor("\n")
+        self.assertEqual(self.term.reportCursorPosition(), (5, 1))
+
+
+    def test_newline(self):
+        """
+        C{write} transforms C{"\n"} into C{"\r\n"}.
+        """
+        self.term.cursorForward(5)
+        self.term.cursorDown(3)
+        self.assertEqual(self.term.reportCursorPosition(), (5, 3))
+        self.term.write("\n")
+        self.assertEqual(self.term.reportCursorPosition(), (0, 4))
+
+
+    def test_setPrivateModes(self):
+        """
+        Verify that L{helper.TerminalBuffer.setPrivateModes} changes the Set
+        Mode (SM) state to "set" for the private modes it is passed.
+        """
+        expected = self.term.privateModes.copy()
+        self.term.setPrivateModes([privateModes.SCROLL, privateModes.SCREEN])
+        expected[privateModes.SCROLL] = True
+        expected[privateModes.SCREEN] = True
+        self.assertEqual(expected, self.term.privateModes)
+
+
+    def test_resetPrivateModes(self):
+        """
+        Verify that L{helper.TerminalBuffer.resetPrivateModes} changes the Set
+        Mode (SM) state to "reset" for the private modes it is passed.
+        """
+        expected = self.term.privateModes.copy()
+        self.term.resetPrivateModes([privateModes.AUTO_WRAP, privateModes.CURSOR_MODE])
+        del expected[privateModes.AUTO_WRAP]
+        del expected[privateModes.CURSOR_MODE]
+        self.assertEqual(expected, self.term.privateModes)
+
+
+    def testCursorDown(self):
+        self.term.cursorDown(3)
+        self.assertEqual(self.term.reportCursorPosition(), (0, 3))
+        self.term.cursorDown()
+        self.assertEqual(self.term.reportCursorPosition(), (0, 4))
+        self.term.cursorDown(HEIGHT)
+        self.assertEqual(self.term.reportCursorPosition(), (0, HEIGHT - 1))
+
+    def testCursorUp(self):
+        self.term.cursorUp(5)
+        self.assertEqual(self.term.reportCursorPosition(), (0, 0))
+
+        self.term.cursorDown(20)
+        self.term.cursorUp(1)
+        self.assertEqual(self.term.reportCursorPosition(), (0, 19))
+
+        self.term.cursorUp(19)
+        self.assertEqual(self.term.reportCursorPosition(), (0, 0))
+
+    def testCursorForward(self):
+        self.term.cursorForward(2)
+        self.assertEqual(self.term.reportCursorPosition(), (2, 0))
+        self.term.cursorForward(2)
+        self.assertEqual(self.term.reportCursorPosition(), (4, 0))
+        self.term.cursorForward(WIDTH)
+        self.assertEqual(self.term.reportCursorPosition(), (WIDTH, 0))
+
+    def testCursorBackward(self):
+        self.term.cursorForward(10)
+        self.term.cursorBackward(2)
+        self.assertEqual(self.term.reportCursorPosition(), (8, 0))
+        self.term.cursorBackward(7)
+        self.assertEqual(self.term.reportCursorPosition(), (1, 0))
+        self.term.cursorBackward(1)
+        self.assertEqual(self.term.reportCursorPosition(), (0, 0))
+        self.term.cursorBackward(1)
+        self.assertEqual(self.term.reportCursorPosition(), (0, 0))
+
+    def testCursorPositioning(self):
+        self.term.cursorPosition(3, 9)
+        self.assertEqual(self.term.reportCursorPosition(), (3, 9))
+
+    def testSimpleWriting(self):
+        s = "Hello, world."
+        self.term.write(s)
+        self.assertEqual(
+            str(self.term),
+            s + '\n' +
+            '\n' * (HEIGHT - 2))
+
+    def testOvertype(self):
+        s = "hello, world."
+        self.term.write(s)
+        self.term.cursorBackward(len(s))
+        self.term.resetModes([modes.IRM])
+        self.term.write("H")
+        self.assertEqual(
+            str(self.term),
+            ("H" + s[1:]) + '\n' +
+            '\n' * (HEIGHT - 2))
+
+    def testInsert(self):
+        s = "ello, world."
+        self.term.write(s)
+        self.term.cursorBackward(len(s))
+        self.term.setModes([modes.IRM])
+        self.term.write("H")
+        self.assertEqual(
+            str(self.term),
+            ("H" + s) + '\n' +
+            '\n' * (HEIGHT - 2))
+
+    def testWritingInTheMiddle(self):
+        s = "Hello, world."
+        self.term.cursorDown(5)
+        self.term.cursorForward(5)
+        self.term.write(s)
+        self.assertEqual(
+            str(self.term),
+            '\n' * 5 +
+            (self.term.fill * 5) + s + '\n' +
+            '\n' * (HEIGHT - 7))
+
+    def testWritingWrappedAtEndOfLine(self):
+        s = "Hello, world."
+        self.term.cursorForward(WIDTH - 5)
+        self.term.write(s)
+        self.assertEqual(
+            str(self.term),
+            s[:5].rjust(WIDTH) + '\n' +
+            s[5:] + '\n' +
+            '\n' * (HEIGHT - 3))
+
+    def testIndex(self):
+        self.term.index()
+        self.assertEqual(self.term.reportCursorPosition(), (0, 1))
+        self.term.cursorDown(HEIGHT)
+        self.assertEqual(self.term.reportCursorPosition(), (0, HEIGHT - 1))
+        self.term.index()
+        self.assertEqual(self.term.reportCursorPosition(), (0, HEIGHT - 1))
+
+    def testReverseIndex(self):
+        self.term.reverseIndex()
+        self.assertEqual(self.term.reportCursorPosition(), (0, 0))
+        self.term.cursorDown(2)
+        self.assertEqual(self.term.reportCursorPosition(), (0, 2))
+        self.term.reverseIndex()
+        self.assertEqual(self.term.reportCursorPosition(), (0, 1))
+
+    def test_nextLine(self):
+        """
+        C{nextLine} positions the cursor at the beginning of the row below the
+        current row.
+        """
+        self.term.nextLine()
+        self.assertEqual(self.term.reportCursorPosition(), (0, 1))
+        self.term.cursorForward(5)
+        self.assertEqual(self.term.reportCursorPosition(), (5, 1))
+        self.term.nextLine()
+        self.assertEqual(self.term.reportCursorPosition(), (0, 2))
+
+    def testSaveCursor(self):
+        self.term.cursorDown(5)
+        self.term.cursorForward(7)
+        self.assertEqual(self.term.reportCursorPosition(), (7, 5))
+        self.term.saveCursor()
+        self.term.cursorDown(7)
+        self.term.cursorBackward(3)
+        self.assertEqual(self.term.reportCursorPosition(), (4, 12))
+        self.term.restoreCursor()
+        self.assertEqual(self.term.reportCursorPosition(), (7, 5))
+
+    def testSingleShifts(self):
+        self.term.singleShift2()
+        self.term.write('Hi')
+
+        ch = self.term.getCharacter(0, 0)
+        self.assertEqual(ch[0], 'H')
+        self.assertEqual(ch[1].charset, G2)
+
+        ch = self.term.getCharacter(1, 0)
+        self.assertEqual(ch[0], 'i')
+        self.assertEqual(ch[1].charset, G0)
+
+        self.term.singleShift3()
+        self.term.write('!!')
+
+        ch = self.term.getCharacter(2, 0)
+        self.assertEqual(ch[0], '!')
+        self.assertEqual(ch[1].charset, G3)
+
+        ch = self.term.getCharacter(3, 0)
+        self.assertEqual(ch[0], '!')
+        self.assertEqual(ch[1].charset, G0)
+
+    def testShifting(self):
+        s1 = "Hello"
+        s2 = "World"
+        s3 = "Bye!"
+        self.term.write("Hello\n")
+        self.term.shiftOut()
+        self.term.write("World\n")
+        self.term.shiftIn()
+        self.term.write("Bye!\n")
+
+        g = G0
+        h = 0
+        for s in (s1, s2, s3):
+            for i in range(len(s)):
+                ch = self.term.getCharacter(i, h)
+                self.assertEqual(ch[0], s[i])
+                self.assertEqual(ch[1].charset, g)
+            g = g == G0 and G1 or G0
+            h += 1
+
+    def testGraphicRendition(self):
+        self.term.selectGraphicRendition(BOLD, UNDERLINE, BLINK, REVERSE_VIDEO)
+        self.term.write('W')
+        self.term.selectGraphicRendition(NORMAL)
+        self.term.write('X')
+        self.term.selectGraphicRendition(BLINK)
+        self.term.write('Y')
+        self.term.selectGraphicRendition(BOLD)
+        self.term.write('Z')
+
+        ch = self.term.getCharacter(0, 0)
+        self.assertEqual(ch[0], 'W')
+        self.failUnless(ch[1].bold)
+        self.failUnless(ch[1].underline)
+        self.failUnless(ch[1].blink)
+        self.failUnless(ch[1].reverseVideo)
+
+        ch = self.term.getCharacter(1, 0)
+        self.assertEqual(ch[0], 'X')
+        self.failIf(ch[1].bold)
+        self.failIf(ch[1].underline)
+        self.failIf(ch[1].blink)
+        self.failIf(ch[1].reverseVideo)
+
+        ch = self.term.getCharacter(2, 0)
+        self.assertEqual(ch[0], 'Y')
+        self.failUnless(ch[1].blink)
+        self.failIf(ch[1].bold)
+        self.failIf(ch[1].underline)
+        self.failIf(ch[1].reverseVideo)
+
+        ch = self.term.getCharacter(3, 0)
+        self.assertEqual(ch[0], 'Z')
+        self.failUnless(ch[1].blink)
+        self.failUnless(ch[1].bold)
+        self.failIf(ch[1].underline)
+        self.failIf(ch[1].reverseVideo)
+
+    def testColorAttributes(self):
+        s1 = "Merry xmas"
+        s2 = "Just kidding"
+        self.term.selectGraphicRendition(helper.FOREGROUND + helper.RED,
+                                         helper.BACKGROUND + helper.GREEN)
+        self.term.write(s1 + "\n")
+        self.term.selectGraphicRendition(NORMAL)
+        self.term.write(s2 + "\n")
+
+        for i in range(len(s1)):
+            ch = self.term.getCharacter(i, 0)
+            self.assertEqual(ch[0], s1[i])
+            self.assertEqual(ch[1].charset, G0)
+            self.assertEqual(ch[1].bold, False)
+            self.assertEqual(ch[1].underline, False)
+            self.assertEqual(ch[1].blink, False)
+            self.assertEqual(ch[1].reverseVideo, False)
+            self.assertEqual(ch[1].foreground, helper.RED)
+            self.assertEqual(ch[1].background, helper.GREEN)
+
+        for i in range(len(s2)):
+            ch = self.term.getCharacter(i, 1)
+            self.assertEqual(ch[0], s2[i])
+            self.assertEqual(ch[1].charset, G0)
+            self.assertEqual(ch[1].bold, False)
+            self.assertEqual(ch[1].underline, False)
+            self.assertEqual(ch[1].blink, False)
+            self.assertEqual(ch[1].reverseVideo, False)
+            self.assertEqual(ch[1].foreground, helper.WHITE)
+            self.assertEqual(ch[1].background, helper.BLACK)
+
+    def testEraseLine(self):
+        s1 = 'line 1'
+        s2 = 'line 2'
+        s3 = 'line 3'
+        self.term.write('\n'.join((s1, s2, s3)) + '\n')
+        self.term.cursorPosition(1, 1)
+        self.term.eraseLine()
+
+        self.assertEqual(
+            str(self.term),
+            s1 + '\n' +
+            '\n' +
+            s3 + '\n' +
+            '\n' * (HEIGHT - 4))
+
+    def testEraseToLineEnd(self):
+        s = 'Hello, world.'
+        self.term.write(s)
+        self.term.cursorBackward(5)
+        self.term.eraseToLineEnd()
+        self.assertEqual(
+            str(self.term),
+            s[:-5] + '\n' +
+            '\n' * (HEIGHT - 2))
+
+    def testEraseToLineBeginning(self):
+        s = 'Hello, world.'
+        self.term.write(s)
+        self.term.cursorBackward(5)
+        self.term.eraseToLineBeginning()
+        self.assertEqual(
+            str(self.term),
+            s[-4:].rjust(len(s)) + '\n' +
+            '\n' * (HEIGHT - 2))
+
+    def testEraseDisplay(self):
+        self.term.write('Hello world\n')
+        self.term.write('Goodbye world\n')
+        self.term.eraseDisplay()
+
+        self.assertEqual(
+            str(self.term),
+            '\n' * (HEIGHT - 1))
+
+    def testEraseToDisplayEnd(self):
+        s1 = "Hello world"
+        s2 = "Goodbye world"
+        self.term.write('\n'.join((s1, s2, '')))
+        self.term.cursorPosition(5, 1)
+        self.term.eraseToDisplayEnd()
+
+        self.assertEqual(
+            str(self.term),
+            s1 + '\n' +
+            s2[:5] + '\n' +
+            '\n' * (HEIGHT - 3))
+
+    def testEraseToDisplayBeginning(self):
+        s1 = "Hello world"
+        s2 = "Goodbye world"
+        self.term.write('\n'.join((s1, s2)))
+        self.term.cursorPosition(5, 1)
+        self.term.eraseToDisplayBeginning()
+
+        self.assertEqual(
+            str(self.term),
+            '\n' +
+            s2[6:].rjust(len(s2)) + '\n' +
+            '\n' * (HEIGHT - 3))
+
+    def testLineInsertion(self):
+        s1 = "Hello world"
+        s2 = "Goodbye world"
+        self.term.write('\n'.join((s1, s2)))
+        self.term.cursorPosition(7, 1)
+        self.term.insertLine()
+
+        self.assertEqual(
+            str(self.term),
+            s1 + '\n' +
+            '\n' +
+            s2 + '\n' +
+            '\n' * (HEIGHT - 4))
+
+    def testLineDeletion(self):
+        s1 = "Hello world"
+        s2 = "Middle words"
+        s3 = "Goodbye world"
+        self.term.write('\n'.join((s1, s2, s3)))
+        self.term.cursorPosition(9, 1)
+        self.term.deleteLine()
+
+        self.assertEqual(
+            str(self.term),
+            s1 + '\n' +
+            s3 + '\n' +
+            '\n' * (HEIGHT - 3))
+
+class FakeDelayedCall:
+    called = False
+    cancelled = False
+    def __init__(self, fs, timeout, f, a, kw):
+        self.fs = fs
+        self.timeout = timeout
+        self.f = f
+        self.a = a
+        self.kw = kw
+
+    def active(self):
+        return not (self.cancelled or self.called)
+
+    def cancel(self):
+        self.cancelled = True
+#        self.fs.calls.remove(self)
+
+    def call(self):
+        self.called = True
+        self.f(*self.a, **self.kw)
+
+class FakeScheduler:
+    def __init__(self):
+        self.calls = []
+
+    def callLater(self, timeout, f, *a, **kw):
+        self.calls.append(FakeDelayedCall(self, timeout, f, a, kw))
+        return self.calls[-1]
+
+class ExpectTestCase(unittest.TestCase):
+    def setUp(self):
+        self.term = helper.ExpectableBuffer()
+        self.term.connectionMade()
+        self.fs = FakeScheduler()
+
+    def testSimpleString(self):
+        result = []
+        d = self.term.expect("hello world", timeout=1, scheduler=self.fs)
+        d.addCallback(result.append)
+
+        self.term.write("greeting puny earthlings\n")
+        self.failIf(result)
+        self.term.write("hello world\n")
+        self.failUnless(result)
+        self.assertEqual(result[0].group(), "hello world")
+        self.assertEqual(len(self.fs.calls), 1)
+        self.failIf(self.fs.calls[0].active())
+
+    def testBrokenUpString(self):
+        result = []
+        d = self.term.expect("hello world")
+        d.addCallback(result.append)
+
+        self.failIf(result)
+        self.term.write("hello ")
+        self.failIf(result)
+        self.term.write("worl")
+        self.failIf(result)
+        self.term.write("d")
+        self.failUnless(result)
+        self.assertEqual(result[0].group(), "hello world")
+
+
+    def testMultiple(self):
+        result = []
+        d1 = self.term.expect("hello ")
+        d1.addCallback(result.append)
+        d2 = self.term.expect("world")
+        d2.addCallback(result.append)
+
+        self.failIf(result)
+        self.term.write("hello")
+        self.failIf(result)
+        self.term.write(" ")
+        self.assertEqual(len(result), 1)
+        self.term.write("world")
+        self.assertEqual(len(result), 2)
+        self.assertEqual(result[0].group(), "hello ")
+        self.assertEqual(result[1].group(), "world")
+
+    def testSynchronous(self):
+        self.term.write("hello world")
+
+        result = []
+        d = self.term.expect("hello world")
+        d.addCallback(result.append)
+        self.failUnless(result)
+        self.assertEqual(result[0].group(), "hello world")
+
+    def testMultipleSynchronous(self):
+        self.term.write("goodbye world")
+
+        result = []
+        d1 = self.term.expect("bye")
+        d1.addCallback(result.append)
+        d2 = self.term.expect("world")
+        d2.addCallback(result.append)
+
+        self.assertEqual(len(result), 2)
+        self.assertEqual(result[0].group(), "bye")
+        self.assertEqual(result[1].group(), "world")
+
+    def _cbTestTimeoutFailure(self, res):
+        self.assert_(hasattr(res, 'type'))
+        self.assertEqual(res.type, helper.ExpectationTimeout)
+
+    def testTimeoutFailure(self):
+        d = self.term.expect("hello world", timeout=1, scheduler=self.fs)
+        d.addBoth(self._cbTestTimeoutFailure)
+        self.fs.calls[0].call()
+
+    def testOverlappingTimeout(self):
+        self.term.write("not zoomtastic")
+
+        result = []
+        d1 = self.term.expect("hello world", timeout=1, scheduler=self.fs)
+        d1.addBoth(self._cbTestTimeoutFailure)
+        d2 = self.term.expect("zoom")
+        d2.addCallback(result.append)
+
+        self.fs.calls[0].call()
+
+        self.assertEqual(len(result), 1)
+        self.assertEqual(result[0].group(), "zoom")
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_insults.py b/ThirdParty/Twisted/twisted/conch/test/test_insults.py
new file mode 100644
index 0000000..f313b5e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_insults.py
@@ -0,0 +1,496 @@
+# -*- test-case-name: twisted.conch.test.test_insults -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.trial import unittest
+from twisted.test.proto_helpers import StringTransport
+
+from twisted.conch.insults.insults import ServerProtocol, ClientProtocol
+from twisted.conch.insults.insults import CS_UK, CS_US, CS_DRAWING, CS_ALTERNATE, CS_ALTERNATE_SPECIAL
+from twisted.conch.insults.insults import G0, G1
+from twisted.conch.insults.insults import modes
+
+def _getattr(mock, name):
+    return super(Mock, mock).__getattribute__(name)
+
+def occurrences(mock):
+    return _getattr(mock, 'occurrences')
+
+def methods(mock):
+    return _getattr(mock, 'methods')
+
+def _append(mock, obj):
+    occurrences(mock).append(obj)
+
+default = object()
+
+class Mock(object):
+    callReturnValue = default
+
+    def __init__(self, methods=None, callReturnValue=default):
+        """
+        @param methods: Mapping of names to return values
+        @param callReturnValue: object __call__ should return
+        """
+        self.occurrences = []
+        if methods is None:
+            methods = {}
+        self.methods = methods
+        if callReturnValue is not default:
+            self.callReturnValue = callReturnValue
+
+    def __call__(self, *a, **kw):
+        returnValue = _getattr(self, 'callReturnValue')
+        if returnValue is default:
+            returnValue = Mock()
+        # _getattr(self, 'occurrences').append(('__call__', returnValue, a, kw))
+        _append(self, ('__call__', returnValue, a, kw))
+        return returnValue
+
+    def __getattribute__(self, name):
+        methods = _getattr(self, 'methods')
+        if name in methods:
+            attrValue = Mock(callReturnValue=methods[name])
+        else:
+            attrValue = Mock()
+        # _getattr(self, 'occurrences').append((name, attrValue))
+        _append(self, (name, attrValue))
+        return attrValue
+
+class MockMixin:
+    def assertCall(self, occurrence, methodName, expectedPositionalArgs=(),
+                   expectedKeywordArgs={}):
+        attr, mock = occurrence
+        self.assertEqual(attr, methodName)
+        self.assertEqual(len(occurrences(mock)), 1)
+        [(call, result, args, kw)] = occurrences(mock)
+        self.assertEqual(call, "__call__")
+        self.assertEqual(args, expectedPositionalArgs)
+        self.assertEqual(kw, expectedKeywordArgs)
+        return result
+
+
+_byteGroupingTestTemplate = """\
+def testByte%(groupName)s(self):
+    transport = StringTransport()
+    proto = Mock()
+    parser = self.protocolFactory(lambda: proto)
+    parser.factory = self
+    parser.makeConnection(transport)
+
+    bytes = self.TEST_BYTES
+    while bytes:
+        chunk = bytes[:%(bytesPer)d]
+        bytes = bytes[%(bytesPer)d:]
+        parser.dataReceived(chunk)
+
+    self.verifyResults(transport, proto, parser)
+"""
+class ByteGroupingsMixin(MockMixin):
+    protocolFactory = None
+
+    for word, n in [('Pairs', 2), ('Triples', 3), ('Quads', 4), ('Quints', 5), ('Sexes', 6)]:
+        exec _byteGroupingTestTemplate % {'groupName': word, 'bytesPer': n}
+    del word, n
+
+    def verifyResults(self, transport, proto, parser):
+        result = self.assertCall(occurrences(proto).pop(0), "makeConnection", (parser,))
+        self.assertEqual(occurrences(result), [])
+
+del _byteGroupingTestTemplate
+
+class ServerArrowKeys(ByteGroupingsMixin, unittest.TestCase):
+    protocolFactory = ServerProtocol
+
+    # All the arrow keys once
+    TEST_BYTES = '\x1b[A\x1b[B\x1b[C\x1b[D'
+
+    def verifyResults(self, transport, proto, parser):
+        ByteGroupingsMixin.verifyResults(self, transport, proto, parser)
+
+        for arrow in (parser.UP_ARROW, parser.DOWN_ARROW,
+                      parser.RIGHT_ARROW, parser.LEFT_ARROW):
+            result = self.assertCall(occurrences(proto).pop(0), "keystrokeReceived", (arrow, None))
+            self.assertEqual(occurrences(result), [])
+        self.failIf(occurrences(proto))
+
+
+class PrintableCharacters(ByteGroupingsMixin, unittest.TestCase):
+    protocolFactory = ServerProtocol
+
+    # Some letters and digits, first on their own, then capitalized,
+    # then modified with alt
+
+    TEST_BYTES = 'abc123ABC!@#\x1ba\x1bb\x1bc\x1b1\x1b2\x1b3'
+
+    def verifyResults(self, transport, proto, parser):
+        ByteGroupingsMixin.verifyResults(self, transport, proto, parser)
+
+        for char in 'abc123ABC!@#':
+            result = self.assertCall(occurrences(proto).pop(0), "keystrokeReceived", (char, None))
+            self.assertEqual(occurrences(result), [])
+
+        for char in 'abc123':
+            result = self.assertCall(occurrences(proto).pop(0), "keystrokeReceived", (char, parser.ALT))
+            self.assertEqual(occurrences(result), [])
+
+        occs = occurrences(proto)
+        self.failIf(occs, "%r should have been []" % (occs,))
+
+class ServerFunctionKeys(ByteGroupingsMixin, unittest.TestCase):
+    """Test for parsing and dispatching function keys (F1 - F12)
+    """
+    protocolFactory = ServerProtocol
+
+    byteList = []
+    for bytes in ('OP', 'OQ', 'OR', 'OS', # F1 - F4
+                  '15~', '17~', '18~', '19~', # F5 - F8
+                  '20~', '21~', '23~', '24~'): # F9 - F12
+        byteList.append('\x1b[' + bytes)
+    TEST_BYTES = ''.join(byteList)
+    del byteList, bytes
+
+    def verifyResults(self, transport, proto, parser):
+        ByteGroupingsMixin.verifyResults(self, transport, proto, parser)
+        for funcNum in range(1, 13):
+            funcArg = getattr(parser, 'F%d' % (funcNum,))
+            result = self.assertCall(occurrences(proto).pop(0), "keystrokeReceived", (funcArg, None))
+            self.assertEqual(occurrences(result), [])
+        self.failIf(occurrences(proto))
+
+class ClientCursorMovement(ByteGroupingsMixin, unittest.TestCase):
+    protocolFactory = ClientProtocol
+
+    d2 = "\x1b[2B"
+    r4 = "\x1b[4C"
+    u1 = "\x1b[A"
+    l2 = "\x1b[2D"
+    # Move the cursor down two, right four, up one, left two, up one, left two
+    TEST_BYTES = d2 + r4 + u1 + l2 + u1 + l2
+    del d2, r4, u1, l2
+
+    def verifyResults(self, transport, proto, parser):
+        ByteGroupingsMixin.verifyResults(self, transport, proto, parser)
+
+        for (method, count) in [('Down', 2), ('Forward', 4), ('Up', 1),
+                                ('Backward', 2), ('Up', 1), ('Backward', 2)]:
+            result = self.assertCall(occurrences(proto).pop(0), "cursor" + method, (count,))
+            self.assertEqual(occurrences(result), [])
+        self.failIf(occurrences(proto))
+
+class ClientControlSequences(unittest.TestCase, MockMixin):
+    def setUp(self):
+        self.transport = StringTransport()
+        self.proto = Mock()
+        self.parser = ClientProtocol(lambda: self.proto)
+        self.parser.factory = self
+        self.parser.makeConnection(self.transport)
+        result = self.assertCall(occurrences(self.proto).pop(0), "makeConnection", (self.parser,))
+        self.failIf(occurrences(result))
+
+    def testSimpleCardinals(self):
+        self.parser.dataReceived(
+            ''.join([''.join(['\x1b[' + str(n) + ch for n in ('', 2, 20, 200)]) for ch in 'BACD']))
+        occs = occurrences(self.proto)
+
+        for meth in ("Down", "Up", "Forward", "Backward"):
+            for count in (1, 2, 20, 200):
+                result = self.assertCall(occs.pop(0), "cursor" + meth, (count,))
+                self.failIf(occurrences(result))
+        self.failIf(occs)
+
+    def testScrollRegion(self):
+        self.parser.dataReceived('\x1b[5;22r\x1b[r')
+        occs = occurrences(self.proto)
+
+        result = self.assertCall(occs.pop(0), "setScrollRegion", (5, 22))
+        self.failIf(occurrences(result))
+
+        result = self.assertCall(occs.pop(0), "setScrollRegion", (None, None))
+        self.failIf(occurrences(result))
+        self.failIf(occs)
+
+    def testHeightAndWidth(self):
+        self.parser.dataReceived("\x1b#3\x1b#4\x1b#5\x1b#6")
+        occs = occurrences(self.proto)
+
+        result = self.assertCall(occs.pop(0), "doubleHeightLine", (True,))
+        self.failIf(occurrences(result))
+
+        result = self.assertCall(occs.pop(0), "doubleHeightLine", (False,))
+        self.failIf(occurrences(result))
+
+        result = self.assertCall(occs.pop(0), "singleWidthLine")
+        self.failIf(occurrences(result))
+
+        result = self.assertCall(occs.pop(0), "doubleWidthLine")
+        self.failIf(occurrences(result))
+        self.failIf(occs)
+
+    def testCharacterSet(self):
+        self.parser.dataReceived(
+            ''.join([''.join(['\x1b' + g + n for n in 'AB012']) for g in '()']))
+        occs = occurrences(self.proto)
+
+        for which in (G0, G1):
+            for charset in (CS_UK, CS_US, CS_DRAWING, CS_ALTERNATE, CS_ALTERNATE_SPECIAL):
+                result = self.assertCall(occs.pop(0), "selectCharacterSet", (charset, which))
+                self.failIf(occurrences(result))
+        self.failIf(occs)
+
+    def testShifting(self):
+        self.parser.dataReceived("\x15\x14")
+        occs = occurrences(self.proto)
+
+        result = self.assertCall(occs.pop(0), "shiftIn")
+        self.failIf(occurrences(result))
+
+        result = self.assertCall(occs.pop(0), "shiftOut")
+        self.failIf(occurrences(result))
+        self.failIf(occs)
+
+    def testSingleShifts(self):
+        self.parser.dataReceived("\x1bN\x1bO")
+        occs = occurrences(self.proto)
+
+        result = self.assertCall(occs.pop(0), "singleShift2")
+        self.failIf(occurrences(result))
+
+        result = self.assertCall(occs.pop(0), "singleShift3")
+        self.failIf(occurrences(result))
+        self.failIf(occs)
+
+    def testKeypadMode(self):
+        self.parser.dataReceived("\x1b=\x1b>")
+        occs = occurrences(self.proto)
+
+        result = self.assertCall(occs.pop(0), "applicationKeypadMode")
+        self.failIf(occurrences(result))
+
+        result = self.assertCall(occs.pop(0), "numericKeypadMode")
+        self.failIf(occurrences(result))
+        self.failIf(occs)
+
+    def testCursor(self):
+        self.parser.dataReceived("\x1b7\x1b8")
+        occs = occurrences(self.proto)
+
+        result = self.assertCall(occs.pop(0), "saveCursor")
+        self.failIf(occurrences(result))
+
+        result = self.assertCall(occs.pop(0), "restoreCursor")
+        self.failIf(occurrences(result))
+        self.failIf(occs)
+
+    def testReset(self):
+        self.parser.dataReceived("\x1bc")
+        occs = occurrences(self.proto)
+
+        result = self.assertCall(occs.pop(0), "reset")
+        self.failIf(occurrences(result))
+        self.failIf(occs)
+
+    def testIndex(self):
+        self.parser.dataReceived("\x1bD\x1bM\x1bE")
+        occs = occurrences(self.proto)
+
+        result = self.assertCall(occs.pop(0), "index")
+        self.failIf(occurrences(result))
+
+        result = self.assertCall(occs.pop(0), "reverseIndex")
+        self.failIf(occurrences(result))
+
+        result = self.assertCall(occs.pop(0), "nextLine")
+        self.failIf(occurrences(result))
+        self.failIf(occs)
+
+    def testModes(self):
+        self.parser.dataReceived(
+            "\x1b[" + ';'.join(map(str, [modes.KAM, modes.IRM, modes.LNM])) + "h")
+        self.parser.dataReceived(
+            "\x1b[" + ';'.join(map(str, [modes.KAM, modes.IRM, modes.LNM])) + "l")
+        occs = occurrences(self.proto)
+
+        result = self.assertCall(occs.pop(0), "setModes", ([modes.KAM, modes.IRM, modes.LNM],))
+        self.failIf(occurrences(result))
+
+        result = self.assertCall(occs.pop(0), "resetModes", ([modes.KAM, modes.IRM, modes.LNM],))
+        self.failIf(occurrences(result))
+        self.failIf(occs)
+
+    def testErasure(self):
+        self.parser.dataReceived(
+            "\x1b[K\x1b[1K\x1b[2K\x1b[J\x1b[1J\x1b[2J\x1b[3P")
+        occs = occurrences(self.proto)
+
+        for meth in ("eraseToLineEnd", "eraseToLineBeginning", "eraseLine",
+                     "eraseToDisplayEnd", "eraseToDisplayBeginning",
+                     "eraseDisplay"):
+            result = self.assertCall(occs.pop(0), meth)
+            self.failIf(occurrences(result))
+
+        result = self.assertCall(occs.pop(0), "deleteCharacter", (3,))
+        self.failIf(occurrences(result))
+        self.failIf(occs)
+
+    def testLineDeletion(self):
+        self.parser.dataReceived("\x1b[M\x1b[3M")
+        occs = occurrences(self.proto)
+
+        for arg in (1, 3):
+            result = self.assertCall(occs.pop(0), "deleteLine", (arg,))
+            self.failIf(occurrences(result))
+        self.failIf(occs)
+
+    def testLineInsertion(self):
+        self.parser.dataReceived("\x1b[L\x1b[3L")
+        occs = occurrences(self.proto)
+
+        for arg in (1, 3):
+            result = self.assertCall(occs.pop(0), "insertLine", (arg,))
+            self.failIf(occurrences(result))
+        self.failIf(occs)
+
+    def testCursorPosition(self):
+        methods(self.proto)['reportCursorPosition'] = (6, 7)
+        self.parser.dataReceived("\x1b[6n")
+        self.assertEqual(self.transport.value(), "\x1b[7;8R")
+        occs = occurrences(self.proto)
+
+        result = self.assertCall(occs.pop(0), "reportCursorPosition")
+        # This isn't really an interesting assert, since it only tests that
+        # our mock setup is working right, but I'll include it anyway.
+        self.assertEqual(result, (6, 7))
+
+
+    def test_applicationDataBytes(self):
+        """
+        Contiguous non-control bytes are passed to a single call to the
+        C{write} method of the terminal to which the L{ClientProtocol} is
+        connected.
+        """
+        occs = occurrences(self.proto)
+        self.parser.dataReceived('a')
+        self.assertCall(occs.pop(0), "write", ("a",))
+        self.parser.dataReceived('bc')
+        self.assertCall(occs.pop(0), "write", ("bc",))
+
+
+    def _applicationDataTest(self, data, calls):
+        occs = occurrences(self.proto)
+        self.parser.dataReceived(data)
+        while calls:
+            self.assertCall(occs.pop(0), *calls.pop(0))
+        self.assertFalse(occs, "No other calls should happen: %r" % (occs,))
+
+
+    def test_shiftInAfterApplicationData(self):
+        """
+        Application data bytes followed by a shift-in command are passed to a
+        call to C{write} before the terminal's C{shiftIn} method is called.
+        """
+        self._applicationDataTest(
+            'ab\x15', [
+                ("write", ("ab",)),
+                ("shiftIn",)])
+
+
+    def test_shiftOutAfterApplicationData(self):
+        """
+        Application data bytes followed by a shift-out command are passed to a
+        call to C{write} before the terminal's C{shiftOut} method is called.
+        """
+        self._applicationDataTest(
+            'ab\x14', [
+                ("write", ("ab",)),
+                ("shiftOut",)])
+
+
+    def test_cursorBackwardAfterApplicationData(self):
+        """
+        Application data bytes followed by a cursor-backward command are passed
+        to a call to C{write} before the terminal's C{cursorBackward} method is
+        called.
+        """
+        self._applicationDataTest(
+            'ab\x08', [
+                ("write", ("ab",)),
+                ("cursorBackward",)])
+
+
+    def test_escapeAfterApplicationData(self):
+        """
+        Application data bytes followed by an escape character are passed to a
+        call to C{write} before the terminal's handler method for the escape is
+        called.
+        """
+        # Test a short escape
+        self._applicationDataTest(
+            'ab\x1bD', [
+                ("write", ("ab",)),
+                ("index",)])
+
+        # And a long escape
+        self._applicationDataTest(
+            'ab\x1b[4h', [
+                ("write", ("ab",)),
+                ("setModes", ([4],))])
+
+        # There's some other cases too, but they're all handled by the same
+        # codepaths as above.
+
+
+
+class ServerProtocolOutputTests(unittest.TestCase):
+    """
+    Tests for the bytes L{ServerProtocol} writes to its transport when its
+    methods are called.
+    """
+    def test_nextLine(self):
+        """
+        L{ServerProtocol.nextLine} writes C{"\r\n"} to its transport.
+        """
+        # Why doesn't it write ESC E?  Because ESC E is poorly supported.  For
+        # example, gnome-terminal (many different versions) fails to scroll if
+        # it receives ESC E and the cursor is already on the last row.
+        protocol = ServerProtocol()
+        transport = StringTransport()
+        protocol.makeConnection(transport)
+        protocol.nextLine()
+        self.assertEqual(transport.value(), "\r\n")
+
+
+
+class Deprecations(unittest.TestCase):
+    """
+    Tests to ensure deprecation of L{insults.colors} and L{insults.client}
+    """
+
+    def ensureDeprecated(self, message):
+        """
+        Ensures that the correct deprecation warning was issued.
+        """
+        warnings = self.flushWarnings()
+        self.assertIdentical(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(warnings[0]['message'], message)
+        self.assertEqual(len(warnings), 1)
+
+
+    def test_colors(self):
+        """
+        The L{insults.colors} module is deprecated
+        """
+        from twisted.conch.insults import colors
+        self.ensureDeprecated("twisted.conch.insults.colors was deprecated "
+                              "in Twisted 10.1.0: Please use "
+                              "twisted.conch.insults.helper instead.")
+
+
+    def test_client(self):
+        """
+        The L{insults.client} module is deprecated
+        """
+        from twisted.conch.insults import client
+        self.ensureDeprecated("twisted.conch.insults.client was deprecated "
+                              "in Twisted 10.1.0: Please use "
+                              "twisted.conch.insults.insults instead.")
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_keys.py b/ThirdParty/Twisted/twisted/conch/test/test_keys.py
new file mode 100644
index 0000000..8403e1e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_keys.py
@@ -0,0 +1,644 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.conch.ssh.keys}.
+"""
+
+try:
+    import Crypto.Cipher.DES3
+except ImportError:
+    # we'll have to skip these tests without PyCypto and pyasn1
+    Crypto = None
+
+try:
+    import pyasn1
+except ImportError:
+    pyasn1 = None
+
+if Crypto and pyasn1:
+    from twisted.conch.ssh import keys, common, sexpy
+
+import os, base64
+from twisted.conch.test import keydata
+from twisted.python import randbytes
+from twisted.python.hashlib import sha1
+from twisted.trial import unittest
+
+
+class HelpersTestCase(unittest.TestCase):
+
+    if Crypto is None:
+        skip = "cannot run w/o PyCrypto"
+    if pyasn1 is None:
+        skip = "Cannot run without PyASN1"
+
+    def setUp(self):
+        self._secureRandom = randbytes.secureRandom
+        randbytes.secureRandom = lambda x: '\x55' * x
+
+    def tearDown(self):
+        randbytes.secureRandom = self._secureRandom
+        self._secureRandom = None
+
+    def test_pkcs1(self):
+        """
+        Test Public Key Cryptographic Standard #1 functions.
+        """
+        data = 'ABC'
+        messageSize = 6
+        self.assertEqual(keys.pkcs1Pad(data, messageSize),
+                '\x01\xff\x00ABC')
+        hash = sha1().digest()
+        messageSize = 40
+        self.assertEqual(keys.pkcs1Digest('', messageSize),
+                '\x01\xff\xff\xff\x00' + keys.ID_SHA1 + hash)
+
+    def _signRSA(self, data):
+        key = keys.Key.fromString(keydata.privateRSA_openssh)
+        sig = key.sign(data)
+        return key.keyObject, sig
+
+    def _signDSA(self, data):
+        key = keys.Key.fromString(keydata.privateDSA_openssh)
+        sig = key.sign(data)
+        return key.keyObject, sig
+
+    def test_signRSA(self):
+        """
+        Test that RSA keys return appropriate signatures.
+        """
+        data = 'data'
+        key, sig = self._signRSA(data)
+        sigData = keys.pkcs1Digest(data, keys.lenSig(key))
+        v = key.sign(sigData, '')[0]
+        self.assertEqual(sig, common.NS('ssh-rsa') + common.MP(v))
+        return key, sig
+
+    def test_signDSA(self):
+        """
+        Test that DSA keys return appropriate signatures.
+        """
+        data = 'data'
+        key, sig = self._signDSA(data)
+        sigData = sha1(data).digest()
+        v = key.sign(sigData, '\x55' * 19)
+        self.assertEqual(sig, common.NS('ssh-dss') + common.NS(
+            Crypto.Util.number.long_to_bytes(v[0], 20) +
+            Crypto.Util.number.long_to_bytes(v[1], 20)))
+        return key, sig
+
+
+    def test_objectType(self):
+        """
+        Test that objectType, returns the correct type for objects.
+        """
+        self.assertEqual(keys.objectType(keys.Key.fromString(
+            keydata.privateRSA_openssh).keyObject), 'ssh-rsa')
+        self.assertEqual(keys.objectType(keys.Key.fromString(
+            keydata.privateDSA_openssh).keyObject), 'ssh-dss')
+        self.assertRaises(keys.BadKeyError, keys.objectType, None)
+
+
+class KeyTestCase(unittest.TestCase):
+
+    if Crypto is None:
+        skip = "cannot run w/o PyCrypto"
+    if pyasn1 is None:
+        skip = "Cannot run without PyASN1"
+
+    def setUp(self):
+        self.rsaObj = Crypto.PublicKey.RSA.construct((1L, 2L, 3L, 4L, 5L))
+        self.dsaObj = Crypto.PublicKey.DSA.construct((1L, 2L, 3L, 4L, 5L))
+        self.rsaSignature = ('\x00\x00\x00\x07ssh-rsa\x00'
+            '\x00\x00`N\xac\xb4 at qK\xa0(\xc3\xf2h \xd3\xdd\xee6Np\x9d_'
+            '\xb0>\xe3\x0c(L\x9d{\txUd|!\xf6m\x9c\xd3\x93\x842\x7fU'
+            '\x05\xf4\xf7\xfaD\xda\xce\x81\x8ea\x7f=Y\xed*\xb7\xba\x81'
+            '\xf2\xad\xda\xeb(\x97\x03S\x08\x81\xc7\xb1\xb7\xe6\xe3'
+            '\xcd*\xd4\xbd\xc0wt\xf7y\xcd\xf0\xb7\x7f\xfb\x1e>\xf9r'
+            '\x8c\xba')
+        self.dsaSignature = ('\x00\x00\x00\x07ssh-dss\x00\x00'
+            '\x00(\x18z)H\x8a\x1b\xc6\r\xbbq\xa2\xd7f\x7f$\xa7\xbf'
+            '\xe8\x87\x8c\x88\xef\xd9k\x1a\x98\xdd{=\xdec\x18\t\xe3'
+            '\x87\xa9\xc72h\x95')
+        self.oldSecureRandom = randbytes.secureRandom
+        randbytes.secureRandom = lambda x: '\xff' * x
+        self.keyFile = self.mktemp()
+        file(self.keyFile, 'wb').write(keydata.privateRSA_lsh)
+
+    def tearDown(self):
+        randbytes.secureRandom = self.oldSecureRandom
+        del self.oldSecureRandom
+        os.unlink(self.keyFile)
+
+    def test__guessStringType(self):
+        """
+        Test that the _guessStringType method guesses string types
+        correctly.
+        """
+        self.assertEqual(keys.Key._guessStringType(keydata.publicRSA_openssh),
+                'public_openssh')
+        self.assertEqual(keys.Key._guessStringType(keydata.publicDSA_openssh),
+                'public_openssh')
+        self.assertEqual(keys.Key._guessStringType(
+            keydata.privateRSA_openssh), 'private_openssh')
+        self.assertEqual(keys.Key._guessStringType(
+            keydata.privateDSA_openssh), 'private_openssh')
+        self.assertEqual(keys.Key._guessStringType(keydata.publicRSA_lsh),
+                'public_lsh')
+        self.assertEqual(keys.Key._guessStringType(keydata.publicDSA_lsh),
+                'public_lsh')
+        self.assertEqual(keys.Key._guessStringType(keydata.privateRSA_lsh),
+                'private_lsh')
+        self.assertEqual(keys.Key._guessStringType(keydata.privateDSA_lsh),
+                'private_lsh')
+        self.assertEqual(keys.Key._guessStringType(
+            keydata.privateRSA_agentv3), 'agentv3')
+        self.assertEqual(keys.Key._guessStringType(
+            keydata.privateDSA_agentv3), 'agentv3')
+        self.assertEqual(keys.Key._guessStringType(
+            '\x00\x00\x00\x07ssh-rsa\x00\x00\x00\x01\x01'),
+            'blob')
+        self.assertEqual(keys.Key._guessStringType(
+            '\x00\x00\x00\x07ssh-dss\x00\x00\x00\x01\x01'),
+            'blob')
+        self.assertEqual(keys.Key._guessStringType('not a key'),
+                None)
+
+    def _testPublicPrivateFromString(self, public, private, type, data):
+        self._testPublicFromString(public, type, data)
+        self._testPrivateFromString(private, type, data)
+
+    def _testPublicFromString(self, public, type, data):
+        publicKey = keys.Key.fromString(public)
+        self.assertTrue(publicKey.isPublic())
+        self.assertEqual(publicKey.type(), type)
+        for k, v in publicKey.data().items():
+            self.assertEqual(data[k], v)
+
+    def _testPrivateFromString(self, private, type, data):
+        privateKey = keys.Key.fromString(private)
+        self.assertFalse(privateKey.isPublic())
+        self.assertEqual(privateKey.type(), type)
+        for k, v in data.items():
+            self.assertEqual(privateKey.data()[k], v)
+
+    def test_fromOpenSSH(self):
+        """
+        Test that keys are correctly generated from OpenSSH strings.
+        """
+        self._testPublicPrivateFromString(keydata.publicRSA_openssh,
+                keydata.privateRSA_openssh, 'RSA', keydata.RSAData)
+        self.assertEqual(keys.Key.fromString(
+            keydata.privateRSA_openssh_encrypted,
+            passphrase='encrypted'),
+            keys.Key.fromString(keydata.privateRSA_openssh))
+        self.assertEqual(keys.Key.fromString(
+            keydata.privateRSA_openssh_alternate),
+            keys.Key.fromString(keydata.privateRSA_openssh))
+        self._testPublicPrivateFromString(keydata.publicDSA_openssh,
+                keydata.privateDSA_openssh, 'DSA', keydata.DSAData)
+
+    def test_fromOpenSSH_with_whitespace(self):
+        """
+        If key strings have trailing whitespace, it should be ignored.
+        """
+        # from bug #3391, since our test key data doesn't have
+        # an issue with appended newlines
+        privateDSAData = """-----BEGIN DSA PRIVATE KEY-----
+MIIBuwIBAAKBgQDylESNuc61jq2yatCzZbenlr9llG+p9LhIpOLUbXhhHcwC6hrh
+EZIdCKqTO0USLrGoP5uS9UHAUoeN62Z0KXXWTwOWGEQn/syyPzNJtnBorHpNUT9D
+Qzwl1yUa53NNgEctpo4NoEFOx8PuU6iFLyvgHCjNn2MsuGuzkZm7sI9ZpQIVAJiR
+9dPc08KLdpJyRxz8T74b4FQRAoGAGBc4Z5Y6R/HZi7AYM/iNOM8su6hrk8ypkBwR
+a3Dbhzk97fuV3SF1SDrcQu4zF7c4CtH609N5nfZs2SUjLLGPWln83Ysb8qhh55Em
+AcHXuROrHS/sDsnqu8FQp86MaudrqMExCOYyVPE7jaBWW+/JWFbKCxmgOCSdViUJ
+esJpBFsCgYEA7+jtVvSt9yrwsS/YU1QGP5wRAiDYB+T5cK4HytzAqJKRdC5qS4zf
+C7R0eKcDHHLMYO39aPnCwXjscisnInEhYGNblTDyPyiyNxAOXuC8x7luTmwzMbNJ
+/ow0IqSj0VF72VJN9uSoPpFd4lLT0zN8v42RWja0M8ohWNf+YNJluPgCFE0PT4Vm
+SUrCyZXsNh6VXwjs3gKQ
+-----END DSA PRIVATE KEY-----"""
+        self.assertEqual(keys.Key.fromString(privateDSAData),
+                         keys.Key.fromString(privateDSAData + '\n'))
+
+    def test_fromNewerOpenSSH(self):
+        """
+        Newer versions of OpenSSH generate encrypted keys which have a longer
+        IV than the older versions.  These newer keys are also loaded.
+        """
+        key = keys.Key.fromString(keydata.privateRSA_openssh_encrypted_aes,
+                                  passphrase='testxp')
+        self.assertEqual(key.type(), 'RSA')
+        key2 = keys.Key.fromString(
+            keydata.privateRSA_openssh_encrypted_aes + '\n',
+            passphrase='testxp')
+        self.assertEqual(key, key2)
+
+
+    def test_fromLSH(self):
+        """
+        Test that keys are correctly generated from LSH strings.
+        """
+        self._testPublicPrivateFromString(keydata.publicRSA_lsh,
+                keydata.privateRSA_lsh, 'RSA', keydata.RSAData)
+        self._testPublicPrivateFromString(keydata.publicDSA_lsh,
+                keydata.privateDSA_lsh, 'DSA', keydata.DSAData)
+        sexp = sexpy.pack([['public-key', ['bad-key', ['p', '2']]]])
+        self.assertRaises(keys.BadKeyError, keys.Key.fromString,
+                data='{'+base64.encodestring(sexp)+'}')
+        sexp = sexpy.pack([['private-key', ['bad-key', ['p', '2']]]])
+        self.assertRaises(keys.BadKeyError, keys.Key.fromString,
+                sexp)
+
+    def test_fromAgentv3(self):
+        """
+        Test that keys are correctly generated from Agent v3 strings.
+        """
+        self._testPrivateFromString(keydata.privateRSA_agentv3, 'RSA',
+                keydata.RSAData)
+        self._testPrivateFromString(keydata.privateDSA_agentv3, 'DSA',
+                keydata.DSAData)
+        self.assertRaises(keys.BadKeyError, keys.Key.fromString,
+                '\x00\x00\x00\x07ssh-foo'+'\x00\x00\x00\x01\x01'*5)
+
+    def test_fromStringErrors(self):
+        """
+        keys.Key.fromString should raise BadKeyError when the key is invalid.
+        """
+        self.assertRaises(keys.BadKeyError, keys.Key.fromString, '')
+        # no key data with a bad key type
+        self.assertRaises(keys.BadKeyError, keys.Key.fromString, '',
+                'bad_type')
+        # trying to decrypt a key which doesn't support encryption
+        self.assertRaises(keys.BadKeyError, keys.Key.fromString,
+                keydata.publicRSA_lsh, passphrase = 'unencrypted')
+        # trying to decrypt an unencrypted key
+        self.assertRaises(keys.EncryptedKeyError, keys.Key.fromString,
+                keys.Key(self.rsaObj).toString('openssh', 'encrypted'))
+        # key with no key data
+        self.assertRaises(keys.BadKeyError, keys.Key.fromString,
+                '-----BEGIN RSA KEY-----\nwA==\n')
+        # key with invalid DEK Info
+        self.assertRaises(
+            keys.BadKeyError, keys.Key.fromString,
+            """-----BEGIN ENCRYPTED RSA KEY-----
+Proc-Type: 4,ENCRYPTED
+DEK-Info: weird type
+
+4Ed/a9OgJWHJsne7yOGWeWMzHYKsxuP9w1v0aYcp+puS75wvhHLiUnNwxz0KDi6n
+T3YkKLBsoCWS68ApR2J9yeQ6R+EyS+UQDrO9nwqo3DB5BT3Ggt8S1wE7vjNLQD0H
+g/SJnlqwsECNhh8aAx+Ag0m3ZKOZiRD5mCkcDQsZET7URSmFytDKOjhFn3u6ZFVB
+sXrfpYc6TJtOQlHd/52JB6aAbjt6afSv955Z7enIi+5yEJ5y7oYQTaE5zrFMP7N5
+9LbfJFlKXxEddy/DErRLxEjmC+t4svHesoJKc2jjjyNPiOoGGF3kJXea62vsjdNV
+gMK5Eged3TBVIk2dv8rtJUvyFeCUtjQ1UJZIebScRR47KrbsIpCmU8I4/uHWm5hW
+0mOwvdx1L/mqx/BHqVU9Dw2COhOdLbFxlFI92chkovkmNk4P48ziyVnpm7ME22sE
+vfCMsyirdqB1mrL4CSM7FXONv+CgfBfeYVkYW8RfJac9U1L/O+JNn7yee414O/rS
+hRYw4UdWnH6Gg6niklVKWNY0ZwUZC8zgm2iqy8YCYuneS37jC+OEKP+/s6HSKuqk
+2bzcl3/TcZXNSM815hnFRpz0anuyAsvwPNRyvxG2/DacJHL1f6luV4B0o6W410yf
+qXQx01DLo7nuyhJqoH3UGCyyXB+/QUs0mbG2PAEn3f5dVs31JMdbt+PrxURXXjKk
+4cexpUcIpqqlfpIRe3RD0sDVbH4OXsGhi2kiTfPZu7mgyFxKopRbn1KwU1qKinfY
+EU9O4PoTak/tPT+5jFNhaP+HrURoi/pU8EAUNSktl7xAkHYwkN/9Cm7DeBghgf3n
+8+tyCGYDsB5utPD0/Xe9yx0Qhc/kMm4xIyQDyA937dk3mUvLC9vulnAP8I+Izim0
+fZ182+D1bWwykoD0997mUHG/AUChWR01V1OLwRyPv2wUtiS8VNG76Y2aqKlgqP1P
+V+IvIEqR4ERvSBVFzXNF8Y6j/sVxo8+aZw+d0L1Ns/R55deErGg3B8i/2EqGd3r+
+0jps9BqFHHWW87n3VyEB3jWCMj8Vi2EJIfa/7pSaViFIQn8LiBLf+zxG5LTOToK5
+xkN42fReDcqi3UNfKNGnv4dsplyTR2hyx65lsj4bRKDGLKOuB1y7iB0AGb0LtcAI
+dcsVlcCeUquDXtqKvRnwfIMg+ZunyjqHBhj3qgRgbXbT6zjaSdNnih569aTg0Vup
+VykzZ7+n/KVcGLmvX0NesdoI7TKbq4TnEIOynuG5Sf+2GpARO5bjcWKSZeN/Ybgk
+gccf8Cqf6XWqiwlWd0B7BR3SymeHIaSymC45wmbgdstrbk7Ppa2Tp9AZku8M2Y7c
+8mY9b+onK075/ypiwBm4L4GRNTFLnoNQJXx0OSl4FNRWsn6ztbD+jZhu8Seu10Jw
+SEJVJ+gmTKdRLYORJKyqhDet6g7kAxs4EoJ25WsOnX5nNr00rit+NkMPA7xbJT+7
+CfI51GQLw7pUPeO2WNt6yZO/YkzZrqvTj5FEwybkUyBv7L0gkqu9wjfDdUw0fVHE
+xEm4DxjEoaIp8dW/JOzXQ2EF+WaSOgdYsw3Ac+rnnjnNptCdOEDGP6QBkt+oXj4P
+-----END RSA PRIVATE KEY-----""")
+        # key with invalid encryption type
+        self.assertRaises(
+            keys.BadKeyError, keys.Key.fromString,
+            """-----BEGIN ENCRYPTED RSA KEY-----
+Proc-Type: 4,ENCRYPTED
+DEK-Info: FOO-123-BAR,01234567
+
+4Ed/a9OgJWHJsne7yOGWeWMzHYKsxuP9w1v0aYcp+puS75wvhHLiUnNwxz0KDi6n
+T3YkKLBsoCWS68ApR2J9yeQ6R+EyS+UQDrO9nwqo3DB5BT3Ggt8S1wE7vjNLQD0H
+g/SJnlqwsECNhh8aAx+Ag0m3ZKOZiRD5mCkcDQsZET7URSmFytDKOjhFn3u6ZFVB
+sXrfpYc6TJtOQlHd/52JB6aAbjt6afSv955Z7enIi+5yEJ5y7oYQTaE5zrFMP7N5
+9LbfJFlKXxEddy/DErRLxEjmC+t4svHesoJKc2jjjyNPiOoGGF3kJXea62vsjdNV
+gMK5Eged3TBVIk2dv8rtJUvyFeCUtjQ1UJZIebScRR47KrbsIpCmU8I4/uHWm5hW
+0mOwvdx1L/mqx/BHqVU9Dw2COhOdLbFxlFI92chkovkmNk4P48ziyVnpm7ME22sE
+vfCMsyirdqB1mrL4CSM7FXONv+CgfBfeYVkYW8RfJac9U1L/O+JNn7yee414O/rS
+hRYw4UdWnH6Gg6niklVKWNY0ZwUZC8zgm2iqy8YCYuneS37jC+OEKP+/s6HSKuqk
+2bzcl3/TcZXNSM815hnFRpz0anuyAsvwPNRyvxG2/DacJHL1f6luV4B0o6W410yf
+qXQx01DLo7nuyhJqoH3UGCyyXB+/QUs0mbG2PAEn3f5dVs31JMdbt+PrxURXXjKk
+4cexpUcIpqqlfpIRe3RD0sDVbH4OXsGhi2kiTfPZu7mgyFxKopRbn1KwU1qKinfY
+EU9O4PoTak/tPT+5jFNhaP+HrURoi/pU8EAUNSktl7xAkHYwkN/9Cm7DeBghgf3n
+8+tyCGYDsB5utPD0/Xe9yx0Qhc/kMm4xIyQDyA937dk3mUvLC9vulnAP8I+Izim0
+fZ182+D1bWwykoD0997mUHG/AUChWR01V1OLwRyPv2wUtiS8VNG76Y2aqKlgqP1P
+V+IvIEqR4ERvSBVFzXNF8Y6j/sVxo8+aZw+d0L1Ns/R55deErGg3B8i/2EqGd3r+
+0jps9BqFHHWW87n3VyEB3jWCMj8Vi2EJIfa/7pSaViFIQn8LiBLf+zxG5LTOToK5
+xkN42fReDcqi3UNfKNGnv4dsplyTR2hyx65lsj4bRKDGLKOuB1y7iB0AGb0LtcAI
+dcsVlcCeUquDXtqKvRnwfIMg+ZunyjqHBhj3qgRgbXbT6zjaSdNnih569aTg0Vup
+VykzZ7+n/KVcGLmvX0NesdoI7TKbq4TnEIOynuG5Sf+2GpARO5bjcWKSZeN/Ybgk
+gccf8Cqf6XWqiwlWd0B7BR3SymeHIaSymC45wmbgdstrbk7Ppa2Tp9AZku8M2Y7c
+8mY9b+onK075/ypiwBm4L4GRNTFLnoNQJXx0OSl4FNRWsn6ztbD+jZhu8Seu10Jw
+SEJVJ+gmTKdRLYORJKyqhDet6g7kAxs4EoJ25WsOnX5nNr00rit+NkMPA7xbJT+7
+CfI51GQLw7pUPeO2WNt6yZO/YkzZrqvTj5FEwybkUyBv7L0gkqu9wjfDdUw0fVHE
+xEm4DxjEoaIp8dW/JOzXQ2EF+WaSOgdYsw3Ac+rnnjnNptCdOEDGP6QBkt+oXj4P
+-----END RSA PRIVATE KEY-----""")
+        # key with bad IV (AES)
+        self.assertRaises(
+            keys.BadKeyError, keys.Key.fromString,
+            """-----BEGIN ENCRYPTED RSA KEY-----
+Proc-Type: 4,ENCRYPTED
+DEK-Info: AES-128-CBC,01234
+
+4Ed/a9OgJWHJsne7yOGWeWMzHYKsxuP9w1v0aYcp+puS75wvhHLiUnNwxz0KDi6n
+T3YkKLBsoCWS68ApR2J9yeQ6R+EyS+UQDrO9nwqo3DB5BT3Ggt8S1wE7vjNLQD0H
+g/SJnlqwsECNhh8aAx+Ag0m3ZKOZiRD5mCkcDQsZET7URSmFytDKOjhFn3u6ZFVB
+sXrfpYc6TJtOQlHd/52JB6aAbjt6afSv955Z7enIi+5yEJ5y7oYQTaE5zrFMP7N5
+9LbfJFlKXxEddy/DErRLxEjmC+t4svHesoJKc2jjjyNPiOoGGF3kJXea62vsjdNV
+gMK5Eged3TBVIk2dv8rtJUvyFeCUtjQ1UJZIebScRR47KrbsIpCmU8I4/uHWm5hW
+0mOwvdx1L/mqx/BHqVU9Dw2COhOdLbFxlFI92chkovkmNk4P48ziyVnpm7ME22sE
+vfCMsyirdqB1mrL4CSM7FXONv+CgfBfeYVkYW8RfJac9U1L/O+JNn7yee414O/rS
+hRYw4UdWnH6Gg6niklVKWNY0ZwUZC8zgm2iqy8YCYuneS37jC+OEKP+/s6HSKuqk
+2bzcl3/TcZXNSM815hnFRpz0anuyAsvwPNRyvxG2/DacJHL1f6luV4B0o6W410yf
+qXQx01DLo7nuyhJqoH3UGCyyXB+/QUs0mbG2PAEn3f5dVs31JMdbt+PrxURXXjKk
+4cexpUcIpqqlfpIRe3RD0sDVbH4OXsGhi2kiTfPZu7mgyFxKopRbn1KwU1qKinfY
+EU9O4PoTak/tPT+5jFNhaP+HrURoi/pU8EAUNSktl7xAkHYwkN/9Cm7DeBghgf3n
+8+tyCGYDsB5utPD0/Xe9yx0Qhc/kMm4xIyQDyA937dk3mUvLC9vulnAP8I+Izim0
+fZ182+D1bWwykoD0997mUHG/AUChWR01V1OLwRyPv2wUtiS8VNG76Y2aqKlgqP1P
+V+IvIEqR4ERvSBVFzXNF8Y6j/sVxo8+aZw+d0L1Ns/R55deErGg3B8i/2EqGd3r+
+0jps9BqFHHWW87n3VyEB3jWCMj8Vi2EJIfa/7pSaViFIQn8LiBLf+zxG5LTOToK5
+xkN42fReDcqi3UNfKNGnv4dsplyTR2hyx65lsj4bRKDGLKOuB1y7iB0AGb0LtcAI
+dcsVlcCeUquDXtqKvRnwfIMg+ZunyjqHBhj3qgRgbXbT6zjaSdNnih569aTg0Vup
+VykzZ7+n/KVcGLmvX0NesdoI7TKbq4TnEIOynuG5Sf+2GpARO5bjcWKSZeN/Ybgk
+gccf8Cqf6XWqiwlWd0B7BR3SymeHIaSymC45wmbgdstrbk7Ppa2Tp9AZku8M2Y7c
+8mY9b+onK075/ypiwBm4L4GRNTFLnoNQJXx0OSl4FNRWsn6ztbD+jZhu8Seu10Jw
+SEJVJ+gmTKdRLYORJKyqhDet6g7kAxs4EoJ25WsOnX5nNr00rit+NkMPA7xbJT+7
+CfI51GQLw7pUPeO2WNt6yZO/YkzZrqvTj5FEwybkUyBv7L0gkqu9wjfDdUw0fVHE
+xEm4DxjEoaIp8dW/JOzXQ2EF+WaSOgdYsw3Ac+rnnjnNptCdOEDGP6QBkt+oXj4P
+-----END RSA PRIVATE KEY-----""")
+        # key with bad IV (DES3)
+        self.assertRaises(
+            keys.BadKeyError, keys.Key.fromString,
+            """-----BEGIN ENCRYPTED RSA KEY-----
+Proc-Type: 4,ENCRYPTED
+DEK-Info: DES-EDE3-CBC,01234
+
+4Ed/a9OgJWHJsne7yOGWeWMzHYKsxuP9w1v0aYcp+puS75wvhHLiUnNwxz0KDi6n
+T3YkKLBsoCWS68ApR2J9yeQ6R+EyS+UQDrO9nwqo3DB5BT3Ggt8S1wE7vjNLQD0H
+g/SJnlqwsECNhh8aAx+Ag0m3ZKOZiRD5mCkcDQsZET7URSmFytDKOjhFn3u6ZFVB
+sXrfpYc6TJtOQlHd/52JB6aAbjt6afSv955Z7enIi+5yEJ5y7oYQTaE5zrFMP7N5
+9LbfJFlKXxEddy/DErRLxEjmC+t4svHesoJKc2jjjyNPiOoGGF3kJXea62vsjdNV
+gMK5Eged3TBVIk2dv8rtJUvyFeCUtjQ1UJZIebScRR47KrbsIpCmU8I4/uHWm5hW
+0mOwvdx1L/mqx/BHqVU9Dw2COhOdLbFxlFI92chkovkmNk4P48ziyVnpm7ME22sE
+vfCMsyirdqB1mrL4CSM7FXONv+CgfBfeYVkYW8RfJac9U1L/O+JNn7yee414O/rS
+hRYw4UdWnH6Gg6niklVKWNY0ZwUZC8zgm2iqy8YCYuneS37jC+OEKP+/s6HSKuqk
+2bzcl3/TcZXNSM815hnFRpz0anuyAsvwPNRyvxG2/DacJHL1f6luV4B0o6W410yf
+qXQx01DLo7nuyhJqoH3UGCyyXB+/QUs0mbG2PAEn3f5dVs31JMdbt+PrxURXXjKk
+4cexpUcIpqqlfpIRe3RD0sDVbH4OXsGhi2kiTfPZu7mgyFxKopRbn1KwU1qKinfY
+EU9O4PoTak/tPT+5jFNhaP+HrURoi/pU8EAUNSktl7xAkHYwkN/9Cm7DeBghgf3n
+8+tyCGYDsB5utPD0/Xe9yx0Qhc/kMm4xIyQDyA937dk3mUvLC9vulnAP8I+Izim0
+fZ182+D1bWwykoD0997mUHG/AUChWR01V1OLwRyPv2wUtiS8VNG76Y2aqKlgqP1P
+V+IvIEqR4ERvSBVFzXNF8Y6j/sVxo8+aZw+d0L1Ns/R55deErGg3B8i/2EqGd3r+
+0jps9BqFHHWW87n3VyEB3jWCMj8Vi2EJIfa/7pSaViFIQn8LiBLf+zxG5LTOToK5
+xkN42fReDcqi3UNfKNGnv4dsplyTR2hyx65lsj4bRKDGLKOuB1y7iB0AGb0LtcAI
+dcsVlcCeUquDXtqKvRnwfIMg+ZunyjqHBhj3qgRgbXbT6zjaSdNnih569aTg0Vup
+VykzZ7+n/KVcGLmvX0NesdoI7TKbq4TnEIOynuG5Sf+2GpARO5bjcWKSZeN/Ybgk
+gccf8Cqf6XWqiwlWd0B7BR3SymeHIaSymC45wmbgdstrbk7Ppa2Tp9AZku8M2Y7c
+8mY9b+onK075/ypiwBm4L4GRNTFLnoNQJXx0OSl4FNRWsn6ztbD+jZhu8Seu10Jw
+SEJVJ+gmTKdRLYORJKyqhDet6g7kAxs4EoJ25WsOnX5nNr00rit+NkMPA7xbJT+7
+CfI51GQLw7pUPeO2WNt6yZO/YkzZrqvTj5FEwybkUyBv7L0gkqu9wjfDdUw0fVHE
+xEm4DxjEoaIp8dW/JOzXQ2EF+WaSOgdYsw3Ac+rnnjnNptCdOEDGP6QBkt+oXj4P
+-----END RSA PRIVATE KEY-----""")
+
+    def test_fromFile(self):
+        """
+        Test that fromFile works correctly.
+        """
+        self.assertEqual(keys.Key.fromFile(self.keyFile),
+                keys.Key.fromString(keydata.privateRSA_lsh))
+        self.assertRaises(keys.BadKeyError, keys.Key.fromFile,
+                self.keyFile, 'bad_type')
+        self.assertRaises(keys.BadKeyError, keys.Key.fromFile,
+                self.keyFile, passphrase='unencrypted')
+
+    def test_init(self):
+        """
+        Test that the PublicKey object is initialized correctly.
+        """
+        obj = Crypto.PublicKey.RSA.construct((1L, 2L))
+        key = keys.Key(obj)
+        self.assertEqual(key.keyObject, obj)
+
+    def test_equal(self):
+        """
+        Test that Key objects are compared correctly.
+        """
+        rsa1 = keys.Key(self.rsaObj)
+        rsa2 = keys.Key(self.rsaObj)
+        rsa3 = keys.Key(Crypto.PublicKey.RSA.construct((1L, 2L)))
+        dsa = keys.Key(self.dsaObj)
+        self.assertTrue(rsa1 == rsa2)
+        self.assertFalse(rsa1 == rsa3)
+        self.assertFalse(rsa1 == dsa)
+        self.assertFalse(rsa1 == object)
+        self.assertFalse(rsa1 == None)
+
+    def test_notEqual(self):
+        """
+        Test that Key objects are not-compared correctly.
+        """
+        rsa1 = keys.Key(self.rsaObj)
+        rsa2 = keys.Key(self.rsaObj)
+        rsa3 = keys.Key(Crypto.PublicKey.RSA.construct((1L, 2L)))
+        dsa = keys.Key(self.dsaObj)
+        self.assertFalse(rsa1 != rsa2)
+        self.assertTrue(rsa1 != rsa3)
+        self.assertTrue(rsa1 != dsa)
+        self.assertTrue(rsa1 != object)
+        self.assertTrue(rsa1 != None)
+
+    def test_type(self):
+        """
+        Test that the type method returns the correct type for an object.
+        """
+        self.assertEqual(keys.Key(self.rsaObj).type(), 'RSA')
+        self.assertEqual(keys.Key(self.rsaObj).sshType(), 'ssh-rsa')
+        self.assertEqual(keys.Key(self.dsaObj).type(), 'DSA')
+        self.assertEqual(keys.Key(self.dsaObj).sshType(), 'ssh-dss')
+        self.assertRaises(RuntimeError, keys.Key(None).type)
+        self.assertRaises(RuntimeError, keys.Key(None).sshType)
+        self.assertRaises(RuntimeError, keys.Key(self).type)
+        self.assertRaises(RuntimeError, keys.Key(self).sshType)
+
+    def test_fromBlob(self):
+        """
+        Test that a public key is correctly generated from a public key blob.
+        """
+        rsaBlob = common.NS('ssh-rsa') + common.MP(2) + common.MP(3)
+        rsaKey = keys.Key.fromString(rsaBlob)
+        dsaBlob = (common.NS('ssh-dss') + common.MP(2) + common.MP(3) +
+                common.MP(4) + common.MP(5))
+        dsaKey = keys.Key.fromString(dsaBlob)
+        badBlob = common.NS('ssh-bad')
+        self.assertTrue(rsaKey.isPublic())
+        self.assertEqual(rsaKey.data(), {'e':2L, 'n':3L})
+        self.assertTrue(dsaKey.isPublic())
+        self.assertEqual(dsaKey.data(), {'p':2L, 'q':3L, 'g':4L, 'y':5L})
+        self.assertRaises(keys.BadKeyError,
+                keys.Key.fromString, badBlob)
+
+
+    def test_fromPrivateBlob(self):
+        """
+        Test that a private key is correctly generated from a private key blob.
+        """
+        rsaBlob = (common.NS('ssh-rsa') + common.MP(2) + common.MP(3) +
+                   common.MP(4) + common.MP(5) + common.MP(6) + common.MP(7))
+        rsaKey = keys.Key._fromString_PRIVATE_BLOB(rsaBlob)
+        dsaBlob = (common.NS('ssh-dss') + common.MP(2) + common.MP(3) +
+                   common.MP(4) + common.MP(5) + common.MP(6))
+        dsaKey = keys.Key._fromString_PRIVATE_BLOB(dsaBlob)
+        badBlob = common.NS('ssh-bad')
+        self.assertFalse(rsaKey.isPublic())
+        self.assertEqual(
+            rsaKey.data(), {'n':2L, 'e':3L, 'd':4L, 'u':5L, 'p':6L, 'q':7L})
+        self.assertFalse(dsaKey.isPublic())
+        self.assertEqual(dsaKey.data(), {'p':2L, 'q':3L, 'g':4L, 'y':5L, 'x':6L})
+        self.assertRaises(
+            keys.BadKeyError, keys.Key._fromString_PRIVATE_BLOB, badBlob)
+
+
+    def test_blob(self):
+        """
+        Test that the Key object generates blobs correctly.
+        """
+        self.assertEqual(keys.Key(self.rsaObj).blob(),
+                '\x00\x00\x00\x07ssh-rsa\x00\x00\x00\x01\x02'
+                '\x00\x00\x00\x01\x01')
+        self.assertEqual(keys.Key(self.dsaObj).blob(),
+                '\x00\x00\x00\x07ssh-dss\x00\x00\x00\x01\x03'
+                '\x00\x00\x00\x01\x04\x00\x00\x00\x01\x02'
+                '\x00\x00\x00\x01\x01')
+
+        badKey = keys.Key(None)
+        self.assertRaises(RuntimeError, badKey.blob)
+
+
+    def test_privateBlob(self):
+        """
+        L{Key.privateBlob} returns the SSH protocol-level format of the private
+        key and raises L{RuntimeError} if the underlying key object is invalid.
+        """
+        self.assertEqual(keys.Key(self.rsaObj).privateBlob(),
+                '\x00\x00\x00\x07ssh-rsa\x00\x00\x00\x01\x01'
+                '\x00\x00\x00\x01\x02\x00\x00\x00\x01\x03\x00'
+                '\x00\x00\x01\x04\x00\x00\x00\x01\x04\x00\x00'
+                '\x00\x01\x05')
+        self.assertEqual(keys.Key(self.dsaObj).privateBlob(),
+                '\x00\x00\x00\x07ssh-dss\x00\x00\x00\x01\x03'
+                '\x00\x00\x00\x01\x04\x00\x00\x00\x01\x02\x00'
+                '\x00\x00\x01\x01\x00\x00\x00\x01\x05')
+
+        badKey = keys.Key(None)
+        self.assertRaises(RuntimeError, badKey.privateBlob)
+
+
+    def test_toOpenSSH(self):
+        """
+        Test that the Key object generates OpenSSH keys correctly.
+        """
+        key = keys.Key.fromString(keydata.privateRSA_lsh)
+        self.assertEqual(key.toString('openssh'), keydata.privateRSA_openssh)
+        self.assertEqual(key.toString('openssh', 'encrypted'),
+                keydata.privateRSA_openssh_encrypted)
+        self.assertEqual(key.public().toString('openssh'),
+                keydata.publicRSA_openssh[:-8]) # no comment
+        self.assertEqual(key.public().toString('openssh', 'comment'),
+                keydata.publicRSA_openssh)
+        key = keys.Key.fromString(keydata.privateDSA_lsh)
+        self.assertEqual(key.toString('openssh'), keydata.privateDSA_openssh)
+        self.assertEqual(key.public().toString('openssh', 'comment'),
+                keydata.publicDSA_openssh)
+        self.assertEqual(key.public().toString('openssh'),
+                keydata.publicDSA_openssh[:-8]) # no comment
+
+    def test_toLSH(self):
+        """
+        Test that the Key object generates LSH keys correctly.
+        """
+        key = keys.Key.fromString(keydata.privateRSA_openssh)
+        self.assertEqual(key.toString('lsh'), keydata.privateRSA_lsh)
+        self.assertEqual(key.public().toString('lsh'),
+                keydata.publicRSA_lsh)
+        key = keys.Key.fromString(keydata.privateDSA_openssh)
+        self.assertEqual(key.toString('lsh'), keydata.privateDSA_lsh)
+        self.assertEqual(key.public().toString('lsh'),
+                keydata.publicDSA_lsh)
+
+    def test_toAgentv3(self):
+        """
+        Test that the Key object generates Agent v3 keys correctly.
+        """
+        key = keys.Key.fromString(keydata.privateRSA_openssh)
+        self.assertEqual(key.toString('agentv3'), keydata.privateRSA_agentv3)
+        key = keys.Key.fromString(keydata.privateDSA_openssh)
+        self.assertEqual(key.toString('agentv3'), keydata.privateDSA_agentv3)
+
+    def test_toStringErrors(self):
+        """
+        Test that toString raises errors appropriately.
+        """
+        self.assertRaises(keys.BadKeyError, keys.Key(self.rsaObj).toString,
+                'bad_type')
+
+    def test_sign(self):
+        """
+        Test that the Key object generates correct signatures.
+        """
+        key = keys.Key.fromString(keydata.privateRSA_openssh)
+        self.assertEqual(key.sign(''), self.rsaSignature)
+        key = keys.Key.fromString(keydata.privateDSA_openssh)
+        self.assertEqual(key.sign(''), self.dsaSignature)
+
+
+    def test_verify(self):
+        """
+        Test that the Key object correctly verifies signatures.
+        """
+        key = keys.Key.fromString(keydata.publicRSA_openssh)
+        self.assertTrue(key.verify(self.rsaSignature, ''))
+        self.assertFalse(key.verify(self.rsaSignature, 'a'))
+        self.assertFalse(key.verify(self.dsaSignature, ''))
+        key = keys.Key.fromString(keydata.publicDSA_openssh)
+        self.assertTrue(key.verify(self.dsaSignature, ''))
+        self.assertFalse(key.verify(self.dsaSignature, 'a'))
+        self.assertFalse(key.verify(self.rsaSignature, ''))
+
+
+    def test_verifyDSANoPrefix(self):
+        """
+        Some commercial SSH servers send DSA keys as 2 20-byte numbers;
+        they are still verified as valid keys.
+        """
+        key = keys.Key.fromString(keydata.publicDSA_openssh)
+        self.assertTrue(key.verify(self.dsaSignature[-40:], ''))
+
+
+    def test_repr(self):
+        """
+        Test the pretty representation of Key.
+        """
+        self.assertEqual(repr(keys.Key(self.rsaObj)),
+"""<RSA Private Key (0 bits)
+attr d:
+\t03
+attr e:
+\t02
+attr n:
+\t01
+attr p:
+\t04
+attr q:
+\t05
+attr u:
+\t04>""")
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_knownhosts.py b/ThirdParty/Twisted/twisted/conch/test/test_knownhosts.py
new file mode 100644
index 0000000..d7fdacf
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_knownhosts.py
@@ -0,0 +1,1037 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.conch.client.knownhosts}.
+"""
+
+import os
+from binascii import Error as BinasciiError, b2a_base64, a2b_base64
+
+try:
+    import Crypto
+    import pyasn1
+except ImportError:
+    skip = "PyCrypto and PyASN1 required for twisted.conch.knownhosts."
+else:
+    from twisted.conch.ssh.keys import Key, BadKeyError
+    from twisted.conch.client.knownhosts import \
+        PlainEntry, HashedEntry, KnownHostsFile, UnparsedEntry, ConsoleUI
+    from twisted.conch.client import default
+
+from zope.interface.verify import verifyObject
+
+from twisted.python.filepath import FilePath
+from twisted.trial.unittest import TestCase
+from twisted.internet.defer import Deferred
+from twisted.conch.interfaces import IKnownHostEntry
+from twisted.conch.error import HostKeyChanged, UserRejectedKey, InvalidEntry
+
+
+sampleEncodedKey = (
+    'AAAAB3NzaC1yc2EAAAABIwAAAQEAsV0VMRbGmzhqxxayLRHmvnFvtyNqgbNKV46dU1bVFB+3y'
+    'tNvue4Riqv/SVkPRNwMb7eWH29SviXaBxUhYyzKkDoNUq3rTNnH1Vnif6d6X4JCrUb5d3W+Dm'
+    'YClyJrZ5HgD/hUpdSkTRqdbQ2TrvSAxRacj+vHHT4F4dm1bJSewm3B2D8HVOoi/CbVh3dsIiC'
+    'dp8VltdZx4qYVfYe2LwVINCbAa3d3tj9ma7RVfw3OH2Mfb+toLd1N5tBQFb7oqTt2nC6I/6Bd'
+    '4JwPUld+IEitw/suElq/AIJVQXXujeyiZlea90HE65U2mF1ytr17HTAIT2ySokJWyuBANGACk'
+    '6iIaw==')
+
+otherSampleEncodedKey = (
+    'AAAAB3NzaC1yc2EAAAABIwAAAIEAwaeCZd3UCuPXhX39+/p9qO028jTF76DMVd9mPvYVDVXuf'
+    'WckKZauF7+0b7qm+ChT7kan6BzRVo4++gCVNfAlMzLysSt3ylmOR48tFpAfygg9UCX3DjHz0E'
+    'lOOUKh3iifc9aUShD0OPaK3pR5JJ8jfiBfzSYWt/hDi/iZ4igsSs8=')
+
+thirdSampleEncodedKey = (
+    'AAAAB3NzaC1yc2EAAAABIwAAAQEAl/TQakPkePlnwCBRPitIVUTg6Z8VzN1en+DGkyo/evkmLw'
+    '7o4NWR5qbysk9A9jXW332nxnEuAnbcCam9SHe1su1liVfyIK0+3bdn0YRB0sXIbNEtMs2LtCho'
+    '/aV3cXPS+Cf1yut3wvIpaRnAzXxuKPCTXQ7/y0IXa8TwkRBH58OJa3RqfQ/NsSp5SAfdsrHyH2'
+    'aitiVKm2jfbTKzSEqOQG/zq4J9GXTkq61gZugory/Tvl5/yPgSnOR6C9jVOMHf27ZPoRtyj9SY'
+    '343Hd2QHiIE0KPZJEgCynKeWoKz8v6eTSK8n4rBnaqWdp8MnGZK1WGy05MguXbyCDuTC8AmJXQ'
+    '==')
+
+sampleKey = a2b_base64(sampleEncodedKey)
+otherSampleKey = a2b_base64(otherSampleEncodedKey)
+thirdSampleKey = a2b_base64(thirdSampleEncodedKey)
+
+samplePlaintextLine = (
+    "www.twistedmatrix.com ssh-rsa " + sampleEncodedKey + "\n")
+
+otherSamplePlaintextLine = (
+    "divmod.com ssh-rsa " + otherSampleEncodedKey + "\n")
+
+sampleHostIPLine = (
+    "www.twistedmatrix.com,198.49.126.131 ssh-rsa " + sampleEncodedKey + "\n")
+
+sampleHashedLine = (
+    "|1|gJbSEPBG9ZSBoZpHNtZBD1bHKBA=|bQv+0Xa0dByrwkA1EB0E7Xop/Fo= ssh-rsa " +
+    sampleEncodedKey + "\n")
+
+
+
+class EntryTestsMixin:
+    """
+    Tests for implementations of L{IKnownHostEntry}.  Subclasses must set the
+    'entry' attribute to a provider of that interface, the implementation of
+    that interface under test.
+
+    @ivar entry: a provider of L{IKnownHostEntry} with a hostname of
+    www.twistedmatrix.com and an RSA key of sampleKey.
+    """
+
+    def test_providesInterface(self):
+        """
+        The given entry should provide IKnownHostEntry.
+        """
+        verifyObject(IKnownHostEntry, self.entry)
+
+
+    def test_fromString(self):
+        """
+        Constructing a plain text entry from an unhashed known_hosts entry will
+        result in an L{IKnownHostEntry} provider with 'keyString', 'hostname',
+        and 'keyType' attributes.  While outside the interface in question,
+        these attributes are held in common by L{PlainEntry} and L{HashedEntry}
+        implementations; other implementations should override this method in
+        subclasses.
+        """
+        entry = self.entry
+        self.assertEqual(entry.publicKey, Key.fromString(sampleKey))
+        self.assertEqual(entry.keyType, "ssh-rsa")
+
+
+    def test_matchesKey(self):
+        """
+        L{IKnownHostEntry.matchesKey} checks to see if an entry matches a given
+        SSH key.
+        """
+        twistedmatrixDotCom = Key.fromString(sampleKey)
+        divmodDotCom = Key.fromString(otherSampleKey)
+        self.assertEqual(
+            True,
+            self.entry.matchesKey(twistedmatrixDotCom))
+        self.assertEqual(
+            False,
+            self.entry.matchesKey(divmodDotCom))
+
+
+    def test_matchesHost(self):
+        """
+        L{IKnownHostEntry.matchesHost} checks to see if an entry matches a
+        given hostname.
+        """
+        self.assertEqual(True, self.entry.matchesHost(
+                "www.twistedmatrix.com"))
+        self.assertEqual(False, self.entry.matchesHost(
+                "www.divmod.com"))
+
+
+
+class PlainEntryTests(EntryTestsMixin, TestCase):
+    """
+    Test cases for L{PlainEntry}.
+    """
+    plaintextLine = samplePlaintextLine
+    hostIPLine = sampleHostIPLine
+
+    def setUp(self):
+        """
+        Set 'entry' to a sample plain-text entry with sampleKey as its key.
+        """
+        self.entry = PlainEntry.fromString(self.plaintextLine)
+
+
+    def test_matchesHostIP(self):
+        """
+        A "hostname,ip" formatted line will match both the host and the IP.
+        """
+        self.entry = PlainEntry.fromString(self.hostIPLine)
+        self.assertEqual(True, self.entry.matchesHost("198.49.126.131"))
+        self.test_matchesHost()
+
+
+    def test_toString(self):
+        """
+        L{PlainEntry.toString} generates the serialized OpenSSL format string
+        for the entry, sans newline.
+        """
+        self.assertEqual(self.entry.toString(), self.plaintextLine.rstrip("\n"))
+        multiHostEntry = PlainEntry.fromString(self.hostIPLine)
+        self.assertEqual(multiHostEntry.toString(),
+                         self.hostIPLine.rstrip("\n"))
+
+
+
+class PlainTextWithCommentTests(PlainEntryTests):
+    """
+    Test cases for L{PlainEntry} when parsed from a line with a comment.
+    """
+
+    plaintextLine = samplePlaintextLine[:-1] + " plain text comment.\n"
+    hostIPLine = sampleHostIPLine[:-1] + " text following host/IP line\n"
+
+
+
+class HashedEntryTests(EntryTestsMixin, TestCase):
+    """
+    Tests for L{HashedEntry}.
+
+    This suite doesn't include any tests for host/IP pairs because hashed
+    entries store IP addresses the same way as hostnames and does not support
+    comma-separated lists.  (If you hash the IP and host together you can't
+    tell if you've got the key already for one or the other.)
+    """
+    hashedLine = sampleHashedLine
+
+    def setUp(self):
+        """
+        Set 'entry' to a sample hashed entry for twistedmatrix.com with
+        sampleKey as its key.
+        """
+        self.entry = HashedEntry.fromString(self.hashedLine)
+
+
+    def test_toString(self):
+        """
+        L{HashedEntry.toString} generates the serialized OpenSSL format string
+        for the entry, sans the newline.
+        """
+        self.assertEqual(self.entry.toString(), self.hashedLine.rstrip("\n"))
+
+
+
+class HashedEntryWithCommentTests(HashedEntryTests):
+    """
+    Test cases for L{PlainEntry} when parsed from a line with a comment.
+    """
+
+    hashedLine = sampleHashedLine[:-1] + " plain text comment.\n"
+
+
+
+class UnparsedEntryTests(TestCase, EntryTestsMixin):
+    """
+    Tests for L{UnparsedEntry}
+    """
+    def setUp(self):
+        """
+        Set up the 'entry' to be an unparsed entry for some random text.
+        """
+        self.entry = UnparsedEntry("    This is a bogus entry.  \n")
+
+
+    def test_fromString(self):
+        """
+        Creating an L{UnparsedEntry} should simply record the string it was
+        passed.
+        """
+        self.assertEqual("    This is a bogus entry.  \n",
+                         self.entry._string)
+
+
+    def test_matchesHost(self):
+        """
+        An unparsed entry can't match any hosts.
+        """
+        self.assertEqual(False, self.entry.matchesHost("www.twistedmatrix.com"))
+
+
+    def test_matchesKey(self):
+        """
+        An unparsed entry can't match any keys.
+        """
+        self.assertEqual(False, self.entry.matchesKey(Key.fromString(sampleKey)))
+
+
+    def test_toString(self):
+        """
+        L{UnparsedEntry.toString} returns its input string, sans trailing
+        newline.
+        """
+        self.assertEqual("    This is a bogus entry.  ", self.entry.toString())
+
+
+
+class ParseErrorTests(TestCase):
+    """
+    L{HashedEntry.fromString} and L{PlainEntry.fromString} can raise a variety
+    of errors depending on misformattings of certain strings.  These tests make
+    sure those errors are caught.  Since many of the ways that this can go
+    wrong are in the lower-level APIs being invoked by the parsing logic,
+    several of these are integration tests with the C{base64} and
+    L{twisted.conch.ssh.keys} modules.
+    """
+
+    def invalidEntryTest(self, cls):
+        """
+        If there are fewer than three elements, C{fromString} should raise
+        L{InvalidEntry}.
+        """
+        self.assertRaises(InvalidEntry, cls.fromString, "invalid")
+
+
+    def notBase64Test(self, cls):
+        """
+        If the key is not base64, C{fromString} should raise L{BinasciiError}.
+        """
+        self.assertRaises(BinasciiError, cls.fromString, "x x x")
+
+
+    def badKeyTest(self, cls, prefix):
+        """
+        If the key portion of the entry is valid base64, but is not actually an
+        SSH key, C{fromString} should raise L{BadKeyError}.
+        """
+        self.assertRaises(BadKeyError, cls.fromString, ' '.join(
+                [prefix, "ssh-rsa", b2a_base64(
+                        "Hey, this isn't an SSH key!").strip()]))
+
+
+    def test_invalidPlainEntry(self):
+        """
+        If there are fewer than three whitespace-separated elements in an
+        entry, L{PlainEntry.fromString} should raise L{InvalidEntry}.
+        """
+        self.invalidEntryTest(PlainEntry)
+
+
+    def test_invalidHashedEntry(self):
+        """
+        If there are fewer than three whitespace-separated elements in an
+        entry, or the hostname salt/hash portion has more than two elements,
+        L{HashedEntry.fromString} should raise L{InvalidEntry}.
+        """
+        self.invalidEntryTest(HashedEntry)
+        a, b, c = sampleHashedLine.split()
+        self.assertRaises(InvalidEntry, HashedEntry.fromString, ' '.join(
+                [a + "||", b, c]))
+
+
+    def test_plainNotBase64(self):
+        """
+        If the key portion of a plain entry is not decodable as base64,
+        C{fromString} should raise L{BinasciiError}.
+        """
+        self.notBase64Test(PlainEntry)
+
+
+    def test_hashedNotBase64(self):
+        """
+        If the key, host salt, or host hash portion of a hashed entry is not
+        encoded, it will raise L{BinasciiError}.
+        """
+        self.notBase64Test(HashedEntry)
+        a, b, c = sampleHashedLine.split()
+        # Salt not valid base64.
+        self.assertRaises(
+            BinasciiError, HashedEntry.fromString,
+            ' '.join(["|1|x|" + b2a_base64("stuff").strip(), b, c]))
+        # Host hash not valid base64.
+        self.assertRaises(
+            BinasciiError, HashedEntry.fromString,
+            ' '.join([HashedEntry.MAGIC + b2a_base64("stuff").strip() + "|x",
+            b, c]))
+        # Neither salt nor hash valid base64.
+        self.assertRaises(
+            BinasciiError, HashedEntry.fromString,
+            ' '.join(["|1|x|x", b, c]))
+
+
+    def test_hashedBadKey(self):
+        """
+        If the key portion of the entry is valid base64, but is not actually an
+        SSH key, C{HashedEntry.fromString} should raise L{BadKeyError}.
+        """
+        a, b, c = sampleHashedLine.split()
+        self.badKeyTest(HashedEntry, a)
+
+
+    def test_plainBadKey(self):
+        """
+        If the key portion of the entry is valid base64, but is not actually an
+        SSH key, C{PlainEntry.fromString} should raise L{BadKeyError}.
+        """
+        self.badKeyTest(PlainEntry, "hostname")
+
+
+
+class KnownHostsDatabaseTests(TestCase):
+    """
+    Tests for L{KnownHostsFile}.
+    """
+
+    def pathWithContent(self, content):
+        """
+        Return a FilePath with the given initial content.
+        """
+        fp = FilePath(self.mktemp())
+        fp.setContent(content)
+        return fp
+
+
+    def loadSampleHostsFile(self, content=(
+            sampleHashedLine + otherSamplePlaintextLine +
+            "\n# That was a blank line.\n"
+            "This is just unparseable.\n"
+            "|1|This also unparseable.\n")):
+        """
+        Return a sample hosts file, with keys for www.twistedmatrix.com and
+        divmod.com present.
+        """
+        return KnownHostsFile.fromPath(self.pathWithContent(content))
+
+
+    def test_loadFromPath(self):
+        """
+        Loading a L{KnownHostsFile} from a path with six entries in it will
+        result in a L{KnownHostsFile} object with six L{IKnownHostEntry}
+        providers in it.
+        """
+        hostsFile = self.loadSampleHostsFile()
+        self.assertEqual(len(hostsFile._entries), 6)
+
+
+    def test_verifyHashedEntry(self):
+        """
+        Loading a L{KnownHostsFile} from a path containing a single valid
+        L{HashedEntry} entry will result in a L{KnownHostsFile} object
+        with one L{IKnownHostEntry} provider.
+        """
+        hostsFile = self.loadSampleHostsFile((sampleHashedLine))
+        self.assertIsInstance(hostsFile._entries[0], HashedEntry)
+        self.assertEqual(True, hostsFile._entries[0].matchesHost(
+                "www.twistedmatrix.com"))
+
+
+    def test_verifyPlainEntry(self):
+        """
+        Loading a L{KnownHostsFile} from a path containing a single valid
+        L{PlainEntry} entry will result in a L{KnownHostsFile} object
+        with one L{IKnownHostEntry} provider.
+        """
+        hostsFile = self.loadSampleHostsFile((otherSamplePlaintextLine))
+        self.assertIsInstance(hostsFile._entries[0], PlainEntry)
+        self.assertEqual(True, hostsFile._entries[0].matchesHost(
+                "divmod.com"))
+
+
+    def test_verifyUnparsedEntry(self):
+        """
+        Loading a L{KnownHostsFile} from a path that only contains '\n' will
+        result in a L{KnownHostsFile} object containing a L{UnparsedEntry}
+        object.
+        """
+        hostsFile = self.loadSampleHostsFile(("\n"))
+        self.assertIsInstance(hostsFile._entries[0], UnparsedEntry)
+        self.assertEqual(hostsFile._entries[0].toString(), "")
+
+
+    def test_verifyUnparsedComment(self):
+        """
+        Loading a L{KnownHostsFile} from a path that contains a comment will
+        result in a L{KnownHostsFile} object containing a L{UnparsedEntry}
+        object.
+        """
+        hostsFile = self.loadSampleHostsFile(("# That was a blank line.\n"))
+        self.assertIsInstance(hostsFile._entries[0], UnparsedEntry)
+        self.assertEqual(hostsFile._entries[0].toString(),
+                         "# That was a blank line.")
+
+
+    def test_verifyUnparsableLine(self):
+        """
+        Loading a L{KnownHostsFile} from a path that contains an unparseable
+        line will be represented as an L{UnparsedEntry} instance.
+        """
+        hostsFile = self.loadSampleHostsFile(("This is just unparseable.\n"))
+        self.assertIsInstance(hostsFile._entries[0], UnparsedEntry)
+        self.assertEqual(hostsFile._entries[0].toString(),
+                         "This is just unparseable.")
+
+
+    def test_verifyUnparsableEncryptionMarker(self):
+        """
+        Loading a L{KnownHostsFile} from a path containing an unparseable line
+        that starts with an encryption marker will be represented as an
+        L{UnparsedEntry} instance.
+        """
+        hostsFile = self.loadSampleHostsFile(("|1|This is unparseable.\n"))
+        self.assertIsInstance(hostsFile._entries[0], UnparsedEntry)
+        self.assertEqual(hostsFile._entries[0].toString(),
+                         "|1|This is unparseable.")
+
+
+    def test_loadNonExistent(self):
+        """
+        Loading a L{KnownHostsFile} from a path that does not exist should
+        result in an empty L{KnownHostsFile} that will save back to that path.
+        """
+        pn = self.mktemp()
+        knownHostsFile = KnownHostsFile.fromPath(FilePath(pn))
+        self.assertEqual([], list(knownHostsFile._entries))
+        self.assertEqual(False, FilePath(pn).exists())
+        knownHostsFile.save()
+        self.assertEqual(True, FilePath(pn).exists())
+
+
+    def test_loadNonExistentParent(self):
+        """
+        Loading a L{KnownHostsFile} from a path whose parent directory does not
+        exist should result in an empty L{KnownHostsFile} that will save back
+        to that path, creating its parent directory(ies) in the process.
+        """
+        thePath = FilePath(self.mktemp())
+        knownHostsPath = thePath.child("foo").child("known_hosts")
+        knownHostsFile = KnownHostsFile.fromPath(knownHostsPath)
+        knownHostsFile.save()
+        knownHostsPath.restat(False)
+        self.assertEqual(True, knownHostsPath.exists())
+
+
+    def test_savingAddsEntry(self):
+        """
+        L{KnownHostsFile.save()} will write out a new file with any entries
+        that have been added.
+        """
+        path = self.pathWithContent(sampleHashedLine +
+                                    otherSamplePlaintextLine)
+        knownHostsFile = KnownHostsFile.fromPath(path)
+        newEntry = knownHostsFile.addHostKey("some.example.com",
+            Key.fromString(thirdSampleKey))
+        expectedContent = (
+            sampleHashedLine +
+            otherSamplePlaintextLine + HashedEntry.MAGIC +
+            b2a_base64(newEntry._hostSalt).strip() + "|" +
+            b2a_base64(newEntry._hostHash).strip() + " ssh-rsa " +
+            thirdSampleEncodedKey + "\n")
+
+        # Sanity check, let's make sure the base64 API being used for the test
+        # isn't inserting spurious newlines.
+        self.assertEqual(3, expectedContent.count("\n"))
+        knownHostsFile.save()
+        self.assertEqual(expectedContent, path.getContent())
+
+
+    def test_hasPresentKey(self):
+        """
+        L{KnownHostsFile.hasHostKey} returns C{True} when a key for the given
+        hostname is present and matches the expected key.
+        """
+        hostsFile = self.loadSampleHostsFile()
+        self.assertEqual(True, hostsFile.hasHostKey(
+                "www.twistedmatrix.com", Key.fromString(sampleKey)))
+
+
+    def test_hasNonPresentKey(self):
+        """
+        L{KnownHostsFile.hasHostKey} returns C{False} when a key for the given
+        hostname is not present.
+        """
+        hostsFile = self.loadSampleHostsFile()
+        self.assertEqual(False, hostsFile.hasHostKey(
+                "non-existent.example.com", Key.fromString(sampleKey)))
+
+
+    def test_hasKeyMismatch(self):
+        """
+        L{KnownHostsFile.hasHostKey} raises L{HostKeyChanged} if the host key
+        is present, but different from the expected one.  The resulting
+        exception should have an C{offendingEntry} indicating the given entry.
+        """
+        hostsFile = self.loadSampleHostsFile()
+        exception = self.assertRaises(
+            HostKeyChanged, hostsFile.hasHostKey,
+            "www.twistedmatrix.com", Key.fromString(otherSampleKey))
+        self.assertEqual(exception.offendingEntry, hostsFile._entries[0])
+        self.assertEqual(exception.lineno, 1)
+        self.assertEqual(exception.path, hostsFile._savePath)
+
+
+    def test_addHostKey(self):
+        """
+        L{KnownHostsFile.addHostKey} adds a new L{HashedEntry} to the host
+        file, and returns it.
+        """
+        hostsFile = self.loadSampleHostsFile()
+        aKey = Key.fromString(thirdSampleKey)
+        self.assertEqual(False,
+                         hostsFile.hasHostKey("somewhere.example.com", aKey))
+        newEntry = hostsFile.addHostKey("somewhere.example.com", aKey)
+
+        # The code in OpenSSH requires host salts to be 20 characters long.
+        # This is the required length of a SHA-1 HMAC hash, so it's just a
+        # sanity check.
+        self.assertEqual(20, len(newEntry._hostSalt))
+        self.assertEqual(True,
+                         newEntry.matchesHost("somewhere.example.com"))
+        self.assertEqual(newEntry.keyType, "ssh-rsa")
+        self.assertEqual(aKey, newEntry.publicKey)
+        self.assertEqual(True,
+                         hostsFile.hasHostKey("somewhere.example.com", aKey))
+
+
+    def test_randomSalts(self):
+        """
+        L{KnownHostsFile.addHostKey} generates a random salt for each new key,
+        so subsequent salts will be different.
+        """
+        hostsFile = self.loadSampleHostsFile()
+        aKey = Key.fromString(thirdSampleKey)
+        self.assertNotEqual(
+            hostsFile.addHostKey("somewhere.example.com", aKey)._hostSalt,
+            hostsFile.addHostKey("somewhere-else.example.com", aKey)._hostSalt)
+
+
+    def test_verifyValidKey(self):
+        """
+        Verifying a valid key should return a L{Deferred} which fires with
+        True.
+        """
+        hostsFile = self.loadSampleHostsFile()
+        hostsFile.addHostKey("1.2.3.4", Key.fromString(sampleKey))
+        ui = FakeUI()
+        d = hostsFile.verifyHostKey(ui, "www.twistedmatrix.com", "1.2.3.4",
+                                    Key.fromString(sampleKey))
+        l = []
+        d.addCallback(l.append)
+        self.assertEqual(l, [True])
+
+
+    def test_verifyInvalidKey(self):
+        """
+        Verfying an invalid key should return a L{Deferred} which fires with a
+        L{HostKeyChanged} failure.
+        """
+        hostsFile = self.loadSampleHostsFile()
+        wrongKey = Key.fromString(thirdSampleKey)
+        ui = FakeUI()
+        hostsFile.addHostKey("1.2.3.4", Key.fromString(sampleKey))
+        d = hostsFile.verifyHostKey(
+            ui, "www.twistedmatrix.com", "1.2.3.4", wrongKey)
+        return self.assertFailure(d, HostKeyChanged)
+
+
+    def verifyNonPresentKey(self):
+        """
+        Set up a test to verify a key that isn't present.  Return a 3-tuple of
+        the UI, a list set up to collect the result of the verifyHostKey call,
+        and the sample L{KnownHostsFile} being used.
+
+        This utility method avoids returning a L{Deferred}, and records results
+        in the returned list instead, because the events which get generated
+        here are pre-recorded in the 'ui' object.  If the L{Deferred} in
+        question does not fire, the it will fail quickly with an empty list.
+        """
+        hostsFile = self.loadSampleHostsFile()
+        absentKey = Key.fromString(thirdSampleKey)
+        ui = FakeUI()
+        l = []
+        d = hostsFile.verifyHostKey(
+            ui, "sample-host.example.com", "4.3.2.1", absentKey)
+        d.addBoth(l.append)
+        self.assertEqual([], l)
+        self.assertEqual(
+            ui.promptText,
+            "The authenticity of host 'sample-host.example.com (4.3.2.1)' "
+            "can't be established.\n"
+            "RSA key fingerprint is "
+            "89:4e:cc:8c:57:83:96:48:ef:63:ad:ee:99:00:4c:8f.\n"
+            "Are you sure you want to continue connecting (yes/no)? ")
+        return ui, l, hostsFile
+
+
+    def test_verifyNonPresentKey_Yes(self):
+        """
+        Verifying a key where neither the hostname nor the IP are present
+        should result in the UI being prompted with a message explaining as
+        much.  If the UI says yes, the Deferred should fire with True.
+        """
+        ui, l, knownHostsFile = self.verifyNonPresentKey()
+        ui.promptDeferred.callback(True)
+        self.assertEqual([True], l)
+        reloaded = KnownHostsFile.fromPath(knownHostsFile._savePath)
+        self.assertEqual(
+            True,
+            reloaded.hasHostKey("4.3.2.1", Key.fromString(thirdSampleKey)))
+        self.assertEqual(
+            True,
+            reloaded.hasHostKey("sample-host.example.com",
+                                Key.fromString(thirdSampleKey)))
+
+
+    def test_verifyNonPresentKey_No(self):
+        """
+        Verifying a key where neither the hostname nor the IP are present
+        should result in the UI being prompted with a message explaining as
+        much.  If the UI says no, the Deferred should fail with
+        UserRejectedKey.
+        """
+        ui, l, knownHostsFile = self.verifyNonPresentKey()
+        ui.promptDeferred.callback(False)
+        l[0].trap(UserRejectedKey)
+
+
+    def test_verifyHostIPMismatch(self):
+        """
+        Verifying a key where the host is present (and correct), but the IP is
+        present and different, should result the deferred firing in a
+        HostKeyChanged failure.
+        """
+        hostsFile = self.loadSampleHostsFile()
+        wrongKey = Key.fromString(thirdSampleKey)
+        ui = FakeUI()
+        d = hostsFile.verifyHostKey(
+            ui, "www.twistedmatrix.com", "4.3.2.1", wrongKey)
+        return self.assertFailure(d, HostKeyChanged)
+
+
+    def test_verifyKeyForHostAndIP(self):
+        """
+        Verifying a key where the hostname is present but the IP is not should
+        result in the key being added for the IP and the user being warned
+        about the change.
+        """
+        ui = FakeUI()
+        hostsFile = self.loadSampleHostsFile()
+        expectedKey = Key.fromString(sampleKey)
+        hostsFile.verifyHostKey(
+            ui, "www.twistedmatrix.com", "5.4.3.2", expectedKey)
+        self.assertEqual(
+            True, KnownHostsFile.fromPath(hostsFile._savePath).hasHostKey(
+                "5.4.3.2", expectedKey))
+        self.assertEqual(
+            ["Warning: Permanently added the RSA host key for IP address "
+             "'5.4.3.2' to the list of known hosts."],
+            ui.userWarnings)
+
+
+class FakeFile(object):
+    """
+    A fake file-like object that acts enough like a file for
+    L{ConsoleUI.prompt}.
+    """
+
+    def __init__(self):
+        self.inlines = []
+        self.outchunks = []
+        self.closed = False
+
+
+    def readline(self):
+        """
+        Return a line from the 'inlines' list.
+        """
+        return self.inlines.pop(0)
+
+
+    def write(self, chunk):
+        """
+        Append the given item to the 'outchunks' list.
+        """
+        if self.closed:
+            raise IOError("the file was closed")
+        self.outchunks.append(chunk)
+
+
+    def close(self):
+        """
+        Set the 'closed' flag to True, explicitly marking that it has been
+        closed.
+        """
+        self.closed = True
+
+
+
+class ConsoleUITests(TestCase):
+    """
+    Test cases for L{ConsoleUI}.
+    """
+
+    def setUp(self):
+        """
+        Create a L{ConsoleUI} pointed at a L{FakeFile}.
+        """
+        self.fakeFile = FakeFile()
+        self.ui = ConsoleUI(self.openFile)
+
+
+    def openFile(self):
+        """
+        Return the current fake file.
+        """
+        return self.fakeFile
+
+
+    def newFile(self, lines):
+        """
+        Create a new fake file (the next file that self.ui will open) with the
+        given list of lines to be returned from readline().
+        """
+        self.fakeFile = FakeFile()
+        self.fakeFile.inlines = lines
+
+
+    def test_promptYes(self):
+        """
+        L{ConsoleUI.prompt} writes a message to the console, then reads a line.
+        If that line is 'yes', then it returns a L{Deferred} that fires with
+        True.
+        """
+        for okYes in ['yes', 'Yes', 'yes\n']:
+            self.newFile([okYes])
+            l = []
+            self.ui.prompt("Hello, world!").addCallback(l.append)
+            self.assertEqual(["Hello, world!"], self.fakeFile.outchunks)
+            self.assertEqual([True], l)
+            self.assertEqual(True, self.fakeFile.closed)
+
+
+    def test_promptNo(self):
+        """
+        L{ConsoleUI.prompt} writes a message to the console, then reads a line.
+        If that line is 'no', then it returns a L{Deferred} that fires with
+        False.
+        """
+        for okNo in ['no', 'No', 'no\n']:
+            self.newFile([okNo])
+            l = []
+            self.ui.prompt("Goodbye, world!").addCallback(l.append)
+            self.assertEqual(["Goodbye, world!"], self.fakeFile.outchunks)
+            self.assertEqual([False], l)
+            self.assertEqual(True, self.fakeFile.closed)
+
+
+    def test_promptRepeatedly(self):
+        """
+        L{ConsoleUI.prompt} writes a message to the console, then reads a line.
+        If that line is neither 'yes' nor 'no', then it says "Please enter
+        'yes' or 'no'" until it gets a 'yes' or a 'no', at which point it
+        returns a Deferred that answers either True or False.
+        """
+        self.newFile(['what', 'uh', 'okay', 'yes'])
+        l = []
+        self.ui.prompt("Please say something useful.").addCallback(l.append)
+        self.assertEqual([True], l)
+        self.assertEqual(self.fakeFile.outchunks,
+                         ["Please say something useful."] +
+                         ["Please type 'yes' or 'no': "] * 3)
+        self.assertEqual(True, self.fakeFile.closed)
+        self.newFile(['blah', 'stuff', 'feh', 'no'])
+        l = []
+        self.ui.prompt("Please say something negative.").addCallback(l.append)
+        self.assertEqual([False], l)
+        self.assertEqual(self.fakeFile.outchunks,
+                         ["Please say something negative."] +
+                         ["Please type 'yes' or 'no': "] * 3)
+        self.assertEqual(True, self.fakeFile.closed)
+
+
+    def test_promptOpenFailed(self):
+        """
+        If the C{opener} passed to L{ConsoleUI} raises an exception, that
+        exception will fail the L{Deferred} returned from L{ConsoleUI.prompt}.
+        """
+        def raiseIt():
+            raise IOError()
+        ui = ConsoleUI(raiseIt)
+        d = ui.prompt("This is a test.")
+        return self.assertFailure(d, IOError)
+
+
+    def test_warn(self):
+        """
+        L{ConsoleUI.warn} should output a message to the console object.
+        """
+        self.ui.warn("Test message.")
+        self.assertEqual(["Test message."], self.fakeFile.outchunks)
+        self.assertEqual(True, self.fakeFile.closed)
+
+
+    def test_warnOpenFailed(self):
+        """
+        L{ConsoleUI.warn} should log a traceback if the output can't be opened.
+        """
+        def raiseIt():
+            1 / 0
+        ui = ConsoleUI(raiseIt)
+        ui.warn("This message never makes it.")
+        self.assertEqual(len(self.flushLoggedErrors(ZeroDivisionError)), 1)
+
+
+
+class FakeUI(object):
+    """
+    A fake UI object, adhering to the interface expected by
+    L{KnownHostsFile.verifyHostKey}
+
+    @ivar userWarnings: inputs provided to 'warn'.
+
+    @ivar promptDeferred: last result returned from 'prompt'.
+
+    @ivar promptText: the last input provided to 'prompt'.
+    """
+
+    def __init__(self):
+        self.userWarnings = []
+        self.promptDeferred = None
+        self.promptText = None
+
+
+    def prompt(self, text):
+        """
+        Issue the user an interactive prompt, which they can accept or deny.
+        """
+        self.promptText = text
+        self.promptDeferred = Deferred()
+        return self.promptDeferred
+
+
+    def warn(self, text):
+        """
+        Issue a non-interactive warning to the user.
+        """
+        self.userWarnings.append(text)
+
+
+
+class FakeObject(object):
+    """
+    A fake object that can have some attributes.  Used to fake
+    L{SSHClientTransport} and L{SSHClientFactory}.
+    """
+
+
+class DefaultAPITests(TestCase):
+    """
+    The API in L{twisted.conch.client.default.verifyHostKey} is the integration
+    point between the code in the rest of conch and L{KnownHostsFile}.
+    """
+
+    def patchedOpen(self, fname, mode):
+        """
+        The patched version of 'open'; this returns a L{FakeFile} that the
+        instantiated L{ConsoleUI} can use.
+        """
+        self.assertEqual(fname, "/dev/tty")
+        self.assertEqual(mode, "r+b")
+        return self.fakeFile
+
+
+    def setUp(self):
+        """
+        Patch 'open' in verifyHostKey.
+        """
+        self.fakeFile = FakeFile()
+        self.patch(default, "_open", self.patchedOpen)
+        self.hostsOption = self.mktemp()
+        knownHostsFile = KnownHostsFile(FilePath(self.hostsOption))
+        knownHostsFile.addHostKey("exists.example.com",
+            Key.fromString(sampleKey))
+        knownHostsFile.addHostKey("4.3.2.1", Key.fromString(sampleKey))
+        knownHostsFile.save()
+        self.fakeTransport = FakeObject()
+        self.fakeTransport.factory = FakeObject()
+        self.options = self.fakeTransport.factory.options = {
+            'host': "exists.example.com",
+            'known-hosts': self.hostsOption
+            }
+
+
+    def test_verifyOKKey(self):
+        """
+        L{default.verifyHostKey} should return a L{Deferred} which fires with
+        C{1} when passed a host, IP, and key which already match the
+        known_hosts file it is supposed to check.
+        """
+        l = []
+        default.verifyHostKey(self.fakeTransport, "4.3.2.1", sampleKey,
+                              "I don't care.").addCallback(l.append)
+        self.assertEqual([1], l)
+
+
+    def replaceHome(self, tempHome):
+        """
+        Replace the HOME environment variable until the end of the current
+        test, with the given new home-directory, so that L{os.path.expanduser}
+        will yield controllable, predictable results.
+
+        @param tempHome: the pathname to replace the HOME variable with.
+
+        @type tempHome: L{str}
+        """
+        oldHome = os.environ.get('HOME')
+        def cleanupHome():
+            if oldHome is None:
+                del os.environ['HOME']
+            else:
+                os.environ['HOME'] = oldHome
+        self.addCleanup(cleanupHome)
+        os.environ['HOME'] = tempHome
+
+
+    def test_noKnownHostsOption(self):
+        """
+        L{default.verifyHostKey} should find your known_hosts file in
+        ~/.ssh/known_hosts if you don't specify one explicitly on the command
+        line.
+        """
+        l = []
+        tmpdir = self.mktemp()
+        oldHostsOption = self.hostsOption
+        hostsNonOption = FilePath(tmpdir).child(".ssh").child("known_hosts")
+        hostsNonOption.parent().makedirs()
+        FilePath(oldHostsOption).moveTo(hostsNonOption)
+        self.replaceHome(tmpdir)
+        self.options['known-hosts'] = None
+        default.verifyHostKey(self.fakeTransport, "4.3.2.1", sampleKey,
+                              "I don't care.").addCallback(l.append)
+        self.assertEqual([1], l)
+
+
+    def test_verifyHostButNotIP(self):
+        """
+        L{default.verifyHostKey} should return a L{Deferred} which fires with
+        C{1} when passed a host which matches with an IP is not present in its
+        known_hosts file, and should also warn the user that it has added the
+        IP address.
+        """
+        l = []
+        default.verifyHostKey(self.fakeTransport, "8.7.6.5", sampleKey,
+                              "Fingerprint not required.").addCallback(l.append)
+        self.assertEqual(
+            ["Warning: Permanently added the RSA host key for IP address "
+            "'8.7.6.5' to the list of known hosts."],
+            self.fakeFile.outchunks)
+        self.assertEqual([1], l)
+        knownHostsFile = KnownHostsFile.fromPath(FilePath(self.hostsOption))
+        self.assertEqual(True, knownHostsFile.hasHostKey("8.7.6.5",
+                                             Key.fromString(sampleKey)))
+
+
+    def test_verifyQuestion(self):
+        """
+        L{default.verifyHostKey} should return a L{Default} which fires with
+        C{0} when passed a unknown host that the user refuses to acknowledge.
+        """
+        self.fakeTransport.factory.options['host'] = 'fake.example.com'
+        self.fakeFile.inlines.append("no")
+        d = default.verifyHostKey(
+            self.fakeTransport, "9.8.7.6", otherSampleKey, "No fingerprint!")
+        self.assertEqual(
+            ["The authenticity of host 'fake.example.com (9.8.7.6)' "
+             "can't be established.\n"
+             "RSA key fingerprint is "
+             "57:a1:c2:a1:07:a0:2b:f4:ce:b5:e5:b7:ae:cc:e1:99.\n"
+              "Are you sure you want to continue connecting (yes/no)? "],
+             self.fakeFile.outchunks)
+        return self.assertFailure(d, UserRejectedKey)
+
+
+    def test_verifyBadKey(self):
+        """
+        L{default.verifyHostKey} should return a L{Deferred} which fails with
+        L{HostKeyChanged} if the host key is incorrect.
+        """
+        d = default.verifyHostKey(
+            self.fakeTransport, "4.3.2.1", otherSampleKey,
+            "Again, not required.")
+        return self.assertFailure(d, HostKeyChanged)
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_manhole.py b/ThirdParty/Twisted/twisted/conch/test/test_manhole.py
new file mode 100644
index 0000000..09dd52c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_manhole.py
@@ -0,0 +1,372 @@
+# -*- test-case-name: twisted.conch.test.test_manhole -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.conch.manhole}.
+"""
+
+import traceback
+
+from twisted.trial import unittest
+from twisted.internet import error, defer
+from twisted.test.proto_helpers import StringTransport
+from twisted.conch.test.test_recvline import _TelnetMixin, _SSHMixin, _StdioMixin, stdio, ssh
+from twisted.conch import manhole
+from twisted.conch.insults import insults
+
+
+def determineDefaultFunctionName():
+    """
+    Return the string used by Python as the name for code objects which are
+    compiled from interactive input or at the top-level of modules.
+    """
+    try:
+        1 // 0
+    except:
+        # The last frame is this function.  The second to last frame is this
+        # function's caller, which is module-scope, which is what we want,
+        # so -2.
+        return traceback.extract_stack()[-2][2]
+defaultFunctionName = determineDefaultFunctionName()
+
+
+
+class ManholeInterpreterTests(unittest.TestCase):
+    """
+    Tests for L{manhole.ManholeInterpreter}.
+    """
+    def test_resetBuffer(self):
+        """
+        L{ManholeInterpreter.resetBuffer} should empty the input buffer.
+        """
+        interpreter = manhole.ManholeInterpreter(None)
+        interpreter.buffer.extend(["1", "2"])
+        interpreter.resetBuffer()
+        self.assertFalse(interpreter.buffer)
+
+
+
+class ManholeProtocolTests(unittest.TestCase):
+    """
+    Tests for L{manhole.Manhole}.
+    """
+    def test_interruptResetsInterpreterBuffer(self):
+        """
+        L{manhole.Manhole.handle_INT} should cause the interpreter input buffer
+        to be reset.
+        """
+        transport = StringTransport()
+        terminal = insults.ServerProtocol(manhole.Manhole)
+        terminal.makeConnection(transport)
+        protocol = terminal.terminalProtocol
+        interpreter = protocol.interpreter
+        interpreter.buffer.extend(["1", "2"])
+        protocol.handle_INT()
+        self.assertFalse(interpreter.buffer)
+
+
+
+class WriterTestCase(unittest.TestCase):
+    def testInteger(self):
+        manhole.lastColorizedLine("1")
+
+
+    def testDoubleQuoteString(self):
+        manhole.lastColorizedLine('"1"')
+
+
+    def testSingleQuoteString(self):
+        manhole.lastColorizedLine("'1'")
+
+
+    def testTripleSingleQuotedString(self):
+        manhole.lastColorizedLine("'''1'''")
+
+
+    def testTripleDoubleQuotedString(self):
+        manhole.lastColorizedLine('"""1"""')
+
+
+    def testFunctionDefinition(self):
+        manhole.lastColorizedLine("def foo():")
+
+
+    def testClassDefinition(self):
+        manhole.lastColorizedLine("class foo:")
+
+
+class ManholeLoopbackMixin:
+    serverProtocol = manhole.ColoredManhole
+
+    def wfd(self, d):
+        return defer.waitForDeferred(d)
+
+    def testSimpleExpression(self):
+        done = self.recvlineClient.expect("done")
+
+        self._testwrite(
+            "1 + 1\n"
+            "done")
+
+        def finished(ign):
+            self._assertBuffer(
+                [">>> 1 + 1",
+                 "2",
+                 ">>> done"])
+
+        return done.addCallback(finished)
+
+    def testTripleQuoteLineContinuation(self):
+        done = self.recvlineClient.expect("done")
+
+        self._testwrite(
+            "'''\n'''\n"
+            "done")
+
+        def finished(ign):
+            self._assertBuffer(
+                [">>> '''",
+                 "... '''",
+                 "'\\n'",
+                 ">>> done"])
+
+        return done.addCallback(finished)
+
+    def testFunctionDefinition(self):
+        done = self.recvlineClient.expect("done")
+
+        self._testwrite(
+            "def foo(bar):\n"
+            "\tprint bar\n\n"
+            "foo(42)\n"
+            "done")
+
+        def finished(ign):
+            self._assertBuffer(
+                [">>> def foo(bar):",
+                 "...     print bar",
+                 "... ",
+                 ">>> foo(42)",
+                 "42",
+                 ">>> done"])
+
+        return done.addCallback(finished)
+
+    def testClassDefinition(self):
+        done = self.recvlineClient.expect("done")
+
+        self._testwrite(
+            "class Foo:\n"
+            "\tdef bar(self):\n"
+            "\t\tprint 'Hello, world!'\n\n"
+            "Foo().bar()\n"
+            "done")
+
+        def finished(ign):
+            self._assertBuffer(
+                [">>> class Foo:",
+                 "...     def bar(self):",
+                 "...         print 'Hello, world!'",
+                 "... ",
+                 ">>> Foo().bar()",
+                 "Hello, world!",
+                 ">>> done"])
+
+        return done.addCallback(finished)
+
+    def testException(self):
+        done = self.recvlineClient.expect("done")
+
+        self._testwrite(
+            "raise Exception('foo bar baz')\n"
+            "done")
+
+        def finished(ign):
+            self._assertBuffer(
+                [">>> raise Exception('foo bar baz')",
+                 "Traceback (most recent call last):",
+                 '  File "<console>", line 1, in ' + defaultFunctionName,
+                 "Exception: foo bar baz",
+                 ">>> done"])
+
+        return done.addCallback(finished)
+
+    def testControlC(self):
+        done = self.recvlineClient.expect("done")
+
+        self._testwrite(
+            "cancelled line" + manhole.CTRL_C +
+            "done")
+
+        def finished(ign):
+            self._assertBuffer(
+                [">>> cancelled line",
+                 "KeyboardInterrupt",
+                 ">>> done"])
+
+        return done.addCallback(finished)
+
+
+    def test_interruptDuringContinuation(self):
+        """
+        Sending ^C to Manhole while in a state where more input is required to
+        complete a statement should discard the entire ongoing statement and
+        reset the input prompt to the non-continuation prompt.
+        """
+        continuing = self.recvlineClient.expect("things")
+
+        self._testwrite("(\nthings")
+
+        def gotContinuation(ignored):
+            self._assertBuffer(
+                [">>> (",
+                 "... things"])
+            interrupted = self.recvlineClient.expect(">>> ")
+            self._testwrite(manhole.CTRL_C)
+            return interrupted
+        continuing.addCallback(gotContinuation)
+
+        def gotInterruption(ignored):
+            self._assertBuffer(
+                [">>> (",
+                 "... things",
+                 "KeyboardInterrupt",
+                 ">>> "])
+        continuing.addCallback(gotInterruption)
+        return continuing
+
+
+    def testControlBackslash(self):
+        self._testwrite("cancelled line")
+        partialLine = self.recvlineClient.expect("cancelled line")
+
+        def gotPartialLine(ign):
+            self._assertBuffer(
+                [">>> cancelled line"])
+            self._testwrite(manhole.CTRL_BACKSLASH)
+
+            d = self.recvlineClient.onDisconnection
+            return self.assertFailure(d, error.ConnectionDone)
+
+        def gotClearedLine(ign):
+            self._assertBuffer(
+                [""])
+
+        return partialLine.addCallback(gotPartialLine).addCallback(gotClearedLine)
+
+    def testControlD(self):
+        self._testwrite("1 + 1")
+        helloWorld = self.wfd(self.recvlineClient.expect(r"\+ 1"))
+        yield helloWorld
+        helloWorld.getResult()
+        self._assertBuffer([">>> 1 + 1"])
+
+        self._testwrite(manhole.CTRL_D + " + 1")
+        cleared = self.wfd(self.recvlineClient.expect(r"\+ 1"))
+        yield cleared
+        cleared.getResult()
+        self._assertBuffer([">>> 1 + 1 + 1"])
+
+        self._testwrite("\n")
+        printed = self.wfd(self.recvlineClient.expect("3\n>>> "))
+        yield printed
+        printed.getResult()
+
+        self._testwrite(manhole.CTRL_D)
+        d = self.recvlineClient.onDisconnection
+        disconnected = self.wfd(self.assertFailure(d, error.ConnectionDone))
+        yield disconnected
+        disconnected.getResult()
+    testControlD = defer.deferredGenerator(testControlD)
+
+
+    def testControlL(self):
+        """
+        CTRL+L is generally used as a redraw-screen command in terminal
+        applications.  Manhole doesn't currently respect this usage of it,
+        but it should at least do something reasonable in response to this
+        event (rather than, say, eating your face).
+        """
+        # Start off with a newline so that when we clear the display we can
+        # tell by looking for the missing first empty prompt line.
+        self._testwrite("\n1 + 1")
+        helloWorld = self.wfd(self.recvlineClient.expect(r"\+ 1"))
+        yield helloWorld
+        helloWorld.getResult()
+        self._assertBuffer([">>> ", ">>> 1 + 1"])
+
+        self._testwrite(manhole.CTRL_L + " + 1")
+        redrew = self.wfd(self.recvlineClient.expect(r"1 \+ 1 \+ 1"))
+        yield redrew
+        redrew.getResult()
+        self._assertBuffer([">>> 1 + 1 + 1"])
+    testControlL = defer.deferredGenerator(testControlL)
+
+
+    def test_controlA(self):
+        """
+        CTRL-A can be used as HOME - returning cursor to beginning of
+        current line buffer.
+        """
+        self._testwrite('rint "hello"' + '\x01' + 'p')
+        d = self.recvlineClient.expect('print "hello"')
+        def cb(ignore):
+            self._assertBuffer(['>>> print "hello"'])
+        return d.addCallback(cb)
+
+
+    def test_controlE(self):
+        """
+        CTRL-E can be used as END - setting cursor to end of current
+        line buffer.
+        """
+        self._testwrite('rint "hello' + '\x01' + 'p' + '\x05' + '"')
+        d = self.recvlineClient.expect('print "hello"')
+        def cb(ignore):
+            self._assertBuffer(['>>> print "hello"'])
+        return d.addCallback(cb)
+
+
+    def testDeferred(self):
+        self._testwrite(
+            "from twisted.internet import defer, reactor\n"
+            "d = defer.Deferred()\n"
+            "d\n")
+
+        deferred = self.wfd(self.recvlineClient.expect("<Deferred #0>"))
+        yield deferred
+        deferred.getResult()
+
+        self._testwrite(
+            "c = reactor.callLater(0.1, d.callback, 'Hi!')\n")
+        delayed = self.wfd(self.recvlineClient.expect(">>> "))
+        yield delayed
+        delayed.getResult()
+
+        called = self.wfd(self.recvlineClient.expect("Deferred #0 called back: 'Hi!'\n>>> "))
+        yield called
+        called.getResult()
+        self._assertBuffer(
+            [">>> from twisted.internet import defer, reactor",
+             ">>> d = defer.Deferred()",
+             ">>> d",
+             "<Deferred #0>",
+             ">>> c = reactor.callLater(0.1, d.callback, 'Hi!')",
+             "Deferred #0 called back: 'Hi!'",
+             ">>> "])
+
+    testDeferred = defer.deferredGenerator(testDeferred)
+
+class ManholeLoopbackTelnet(_TelnetMixin, unittest.TestCase, ManholeLoopbackMixin):
+    pass
+
+class ManholeLoopbackSSH(_SSHMixin, unittest.TestCase, ManholeLoopbackMixin):
+    if ssh is None:
+        skip = "Crypto requirements missing, can't run manhole tests over ssh"
+
+class ManholeLoopbackStdio(_StdioMixin, unittest.TestCase, ManholeLoopbackMixin):
+    if stdio is None:
+        skip = "Terminal requirements missing, can't run manhole tests over stdio"
+    else:
+        serverProtocol = stdio.ConsoleManhole
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_mixin.py b/ThirdParty/Twisted/twisted/conch/test/test_mixin.py
new file mode 100644
index 0000000..74d60ea
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_mixin.py
@@ -0,0 +1,47 @@
+# -*- twisted.conch.test.test_mixin -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import time
+
+from twisted.internet import reactor, protocol
+
+from twisted.trial import unittest
+from twisted.test.proto_helpers import StringTransport
+
+from twisted.conch import mixin
+
+
+class TestBufferingProto(mixin.BufferingMixin):
+    scheduled = False
+    rescheduled = 0
+    def schedule(self):
+        self.scheduled = True
+        return object()
+
+    def reschedule(self, token):
+        self.rescheduled += 1
+
+
+
+class BufferingTest(unittest.TestCase):
+    def testBuffering(self):
+        p = TestBufferingProto()
+        t = p.transport = StringTransport()
+
+        self.failIf(p.scheduled)
+
+        L = ['foo', 'bar', 'baz', 'quux']
+
+        p.write('foo')
+        self.failUnless(p.scheduled)
+        self.failIf(p.rescheduled)
+
+        for s in L:
+            n = p.rescheduled
+            p.write(s)
+            self.assertEqual(p.rescheduled, n + 1)
+            self.assertEqual(t.value(), '')
+
+        p.flush()
+        self.assertEqual(t.value(), 'foo' + ''.join(L))
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_openssh_compat.py b/ThirdParty/Twisted/twisted/conch/test/test_openssh_compat.py
new file mode 100644
index 0000000..8b4e1a6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_openssh_compat.py
@@ -0,0 +1,102 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.conch.openssh_compat}.
+"""
+
+import os
+
+from twisted.trial.unittest import TestCase
+from twisted.python.filepath import FilePath
+from twisted.python.compat import set
+
+try:
+    import Crypto.Cipher.DES3
+    import pyasn1
+except ImportError:
+    OpenSSHFactory = None
+else:
+    from twisted.conch.openssh_compat.factory import OpenSSHFactory
+
+from twisted.conch.test import keydata
+from twisted.test.test_process import MockOS
+
+
+class OpenSSHFactoryTests(TestCase):
+    """
+    Tests for L{OpenSSHFactory}.
+    """
+    if getattr(os, "geteuid", None) is None:
+        skip = "geteuid/seteuid not available"
+    elif OpenSSHFactory is None:
+        skip = "Cannot run without PyCrypto or PyASN1"
+
+    def setUp(self):
+        self.factory = OpenSSHFactory()
+        self.keysDir = FilePath(self.mktemp())
+        self.keysDir.makedirs()
+        self.factory.dataRoot = self.keysDir.path
+
+        self.keysDir.child("ssh_host_foo").setContent("foo")
+        self.keysDir.child("bar_key").setContent("foo")
+        self.keysDir.child("ssh_host_one_key").setContent(
+            keydata.privateRSA_openssh)
+        self.keysDir.child("ssh_host_two_key").setContent(
+            keydata.privateDSA_openssh)
+        self.keysDir.child("ssh_host_three_key").setContent(
+            "not a key content")
+
+        self.keysDir.child("ssh_host_one_key.pub").setContent(
+            keydata.publicRSA_openssh)
+
+        self.mockos = MockOS()
+        self.patch(os, "seteuid", self.mockos.seteuid)
+        self.patch(os, "setegid", self.mockos.setegid)
+
+
+    def test_getPublicKeys(self):
+        """
+        L{OpenSSHFactory.getPublicKeys} should return the available public keys
+        in the data directory
+        """
+        keys = self.factory.getPublicKeys()
+        self.assertEqual(len(keys), 1)
+        keyTypes = keys.keys()
+        self.assertEqual(keyTypes, ['ssh-rsa'])
+
+
+    def test_getPrivateKeys(self):
+        """
+        L{OpenSSHFactory.getPrivateKeys} should return the available private
+        keys in the data directory.
+        """
+        keys = self.factory.getPrivateKeys()
+        self.assertEqual(len(keys), 2)
+        keyTypes = keys.keys()
+        self.assertEqual(set(keyTypes), set(['ssh-rsa', 'ssh-dss']))
+        self.assertEqual(self.mockos.seteuidCalls, [])
+        self.assertEqual(self.mockos.setegidCalls, [])
+
+
+    def test_getPrivateKeysAsRoot(self):
+        """
+        L{OpenSSHFactory.getPrivateKeys} should switch to root if the keys
+        aren't readable by the current user.
+        """
+        keyFile = self.keysDir.child("ssh_host_two_key")
+        # Fake permission error by changing the mode
+        keyFile.chmod(0000)
+        self.addCleanup(keyFile.chmod, 0777)
+        # And restore the right mode when seteuid is called
+        savedSeteuid = os.seteuid
+        def seteuid(euid):
+            keyFile.chmod(0777)
+            return savedSeteuid(euid)
+        self.patch(os, "seteuid", seteuid)
+        keys = self.factory.getPrivateKeys()
+        self.assertEqual(len(keys), 2)
+        keyTypes = keys.keys()
+        self.assertEqual(set(keyTypes), set(['ssh-rsa', 'ssh-dss']))
+        self.assertEqual(self.mockos.seteuidCalls, [0, os.geteuid()])
+        self.assertEqual(self.mockos.setegidCalls, [0, os.getegid()])
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_recvline.py b/ThirdParty/Twisted/twisted/conch/test/test_recvline.py
new file mode 100644
index 0000000..3d53564
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_recvline.py
@@ -0,0 +1,706 @@
+# -*- test-case-name: twisted.conch.test.test_recvline -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.conch.recvline} and fixtures for testing related
+functionality.
+"""
+
+import sys, os
+
+from twisted.conch.insults import insults
+from twisted.conch import recvline
+
+from twisted.python import reflect, components
+from twisted.internet import defer, error
+from twisted.trial import unittest
+from twisted.cred import portal
+from twisted.test.proto_helpers import StringTransport
+
+class Arrows(unittest.TestCase):
+    def setUp(self):
+        self.underlyingTransport = StringTransport()
+        self.pt = insults.ServerProtocol()
+        self.p = recvline.HistoricRecvLine()
+        self.pt.protocolFactory = lambda: self.p
+        self.pt.factory = self
+        self.pt.makeConnection(self.underlyingTransport)
+        # self.p.makeConnection(self.pt)
+
+    def test_printableCharacters(self):
+        """
+        When L{HistoricRecvLine} receives a printable character,
+        it adds it to the current line buffer.
+        """
+        self.p.keystrokeReceived('x', None)
+        self.p.keystrokeReceived('y', None)
+        self.p.keystrokeReceived('z', None)
+
+        self.assertEqual(self.p.currentLineBuffer(), ('xyz', ''))
+
+    def test_horizontalArrows(self):
+        """
+        When L{HistoricRecvLine} receives an LEFT_ARROW or
+        RIGHT_ARROW keystroke it moves the cursor left or right
+        in the current line buffer, respectively.
+        """
+        kR = lambda ch: self.p.keystrokeReceived(ch, None)
+        for ch in 'xyz':
+            kR(ch)
+
+        self.assertEqual(self.p.currentLineBuffer(), ('xyz', ''))
+
+        kR(self.pt.RIGHT_ARROW)
+        self.assertEqual(self.p.currentLineBuffer(), ('xyz', ''))
+
+        kR(self.pt.LEFT_ARROW)
+        self.assertEqual(self.p.currentLineBuffer(), ('xy', 'z'))
+
+        kR(self.pt.LEFT_ARROW)
+        self.assertEqual(self.p.currentLineBuffer(), ('x', 'yz'))
+
+        kR(self.pt.LEFT_ARROW)
+        self.assertEqual(self.p.currentLineBuffer(), ('', 'xyz'))
+
+        kR(self.pt.LEFT_ARROW)
+        self.assertEqual(self.p.currentLineBuffer(), ('', 'xyz'))
+
+        kR(self.pt.RIGHT_ARROW)
+        self.assertEqual(self.p.currentLineBuffer(), ('x', 'yz'))
+
+        kR(self.pt.RIGHT_ARROW)
+        self.assertEqual(self.p.currentLineBuffer(), ('xy', 'z'))
+
+        kR(self.pt.RIGHT_ARROW)
+        self.assertEqual(self.p.currentLineBuffer(), ('xyz', ''))
+
+        kR(self.pt.RIGHT_ARROW)
+        self.assertEqual(self.p.currentLineBuffer(), ('xyz', ''))
+
+    def test_newline(self):
+        """
+        When {HistoricRecvLine} receives a newline, it adds the current
+        line buffer to the end of its history buffer.
+        """
+        kR = lambda ch: self.p.keystrokeReceived(ch, None)
+
+        for ch in 'xyz\nabc\n123\n':
+            kR(ch)
+
+        self.assertEqual(self.p.currentHistoryBuffer(),
+                          (('xyz', 'abc', '123'), ()))
+
+        kR('c')
+        kR('b')
+        kR('a')
+        self.assertEqual(self.p.currentHistoryBuffer(),
+                          (('xyz', 'abc', '123'), ()))
+
+        kR('\n')
+        self.assertEqual(self.p.currentHistoryBuffer(),
+                          (('xyz', 'abc', '123', 'cba'), ()))
+
+    def test_verticalArrows(self):
+        """
+        When L{HistoricRecvLine} receives UP_ARROW or DOWN_ARROW
+        keystrokes it move the current index in the current history
+        buffer up or down, and resets the current line buffer to the
+        previous or next line in history, respectively for each.
+        """
+        kR = lambda ch: self.p.keystrokeReceived(ch, None)
+
+        for ch in 'xyz\nabc\n123\n':
+            kR(ch)
+
+        self.assertEqual(self.p.currentHistoryBuffer(),
+                          (('xyz', 'abc', '123'), ()))
+        self.assertEqual(self.p.currentLineBuffer(), ('', ''))
+
+        kR(self.pt.UP_ARROW)
+        self.assertEqual(self.p.currentHistoryBuffer(),
+                          (('xyz', 'abc'), ('123',)))
+        self.assertEqual(self.p.currentLineBuffer(), ('123', ''))
+
+        kR(self.pt.UP_ARROW)
+        self.assertEqual(self.p.currentHistoryBuffer(),
+                          (('xyz',), ('abc', '123')))
+        self.assertEqual(self.p.currentLineBuffer(), ('abc', ''))
+
+        kR(self.pt.UP_ARROW)
+        self.assertEqual(self.p.currentHistoryBuffer(),
+                          ((), ('xyz', 'abc', '123')))
+        self.assertEqual(self.p.currentLineBuffer(), ('xyz', ''))
+
+        kR(self.pt.UP_ARROW)
+        self.assertEqual(self.p.currentHistoryBuffer(),
+                          ((), ('xyz', 'abc', '123')))
+        self.assertEqual(self.p.currentLineBuffer(), ('xyz', ''))
+
+        for i in range(4):
+            kR(self.pt.DOWN_ARROW)
+        self.assertEqual(self.p.currentHistoryBuffer(),
+                          (('xyz', 'abc', '123'), ()))
+
+    def test_home(self):
+        """
+        When L{HistoricRecvLine} receives a HOME keystroke it moves the
+        cursor to the beginning of the current line buffer.
+        """
+        kR = lambda ch: self.p.keystrokeReceived(ch, None)
+
+        for ch in 'hello, world':
+            kR(ch)
+        self.assertEqual(self.p.currentLineBuffer(), ('hello, world', ''))
+
+        kR(self.pt.HOME)
+        self.assertEqual(self.p.currentLineBuffer(), ('', 'hello, world'))
+
+    def test_end(self):
+        """
+        When L{HistoricRecvLine} receives a END keystroke it moves the cursor
+        to the end of the current line buffer.
+        """
+        kR = lambda ch: self.p.keystrokeReceived(ch, None)
+
+        for ch in 'hello, world':
+            kR(ch)
+        self.assertEqual(self.p.currentLineBuffer(), ('hello, world', ''))
+
+        kR(self.pt.HOME)
+        kR(self.pt.END)
+        self.assertEqual(self.p.currentLineBuffer(), ('hello, world', ''))
+
+    def test_backspace(self):
+        """
+        When L{HistoricRecvLine} receives a BACKSPACE keystroke it deletes
+        the character immediately before the cursor.
+        """
+        kR = lambda ch: self.p.keystrokeReceived(ch, None)
+
+        for ch in 'xyz':
+            kR(ch)
+        self.assertEqual(self.p.currentLineBuffer(), ('xyz', ''))
+
+        kR(self.pt.BACKSPACE)
+        self.assertEqual(self.p.currentLineBuffer(), ('xy', ''))
+
+        kR(self.pt.LEFT_ARROW)
+        kR(self.pt.BACKSPACE)
+        self.assertEqual(self.p.currentLineBuffer(), ('', 'y'))
+
+        kR(self.pt.BACKSPACE)
+        self.assertEqual(self.p.currentLineBuffer(), ('', 'y'))
+
+    def test_delete(self):
+        """
+        When L{HistoricRecvLine} receives a DELETE keystroke, it
+        delets the character immediately after the cursor.
+        """
+        kR = lambda ch: self.p.keystrokeReceived(ch, None)
+
+        for ch in 'xyz':
+            kR(ch)
+        self.assertEqual(self.p.currentLineBuffer(), ('xyz', ''))
+
+        kR(self.pt.DELETE)
+        self.assertEqual(self.p.currentLineBuffer(), ('xyz', ''))
+
+        kR(self.pt.LEFT_ARROW)
+        kR(self.pt.DELETE)
+        self.assertEqual(self.p.currentLineBuffer(), ('xy', ''))
+
+        kR(self.pt.LEFT_ARROW)
+        kR(self.pt.DELETE)
+        self.assertEqual(self.p.currentLineBuffer(), ('x', ''))
+
+        kR(self.pt.LEFT_ARROW)
+        kR(self.pt.DELETE)
+        self.assertEqual(self.p.currentLineBuffer(), ('', ''))
+
+        kR(self.pt.DELETE)
+        self.assertEqual(self.p.currentLineBuffer(), ('', ''))
+
+    def test_insert(self):
+        """
+        When not in INSERT mode, L{HistoricRecvLine} inserts the typed
+        character at the cursor before the next character.
+        """
+        kR = lambda ch: self.p.keystrokeReceived(ch, None)
+
+        for ch in 'xyz':
+            kR(ch)
+
+        kR(self.pt.LEFT_ARROW)
+        kR('A')
+        self.assertEqual(self.p.currentLineBuffer(), ('xyA', 'z'))
+
+        kR(self.pt.LEFT_ARROW)
+        kR('B')
+        self.assertEqual(self.p.currentLineBuffer(), ('xyB', 'Az'))
+
+    def test_typeover(self):
+        """
+        When in INSERT mode and upon receiving a keystroke with a printable
+        character, L{HistoricRecvLine} replaces the character at
+        the cursor with the typed character rather than inserting before.
+        Ah, the ironies of INSERT mode.
+        """
+        kR = lambda ch: self.p.keystrokeReceived(ch, None)
+
+        for ch in 'xyz':
+            kR(ch)
+
+        kR(self.pt.INSERT)
+
+        kR(self.pt.LEFT_ARROW)
+        kR('A')
+        self.assertEqual(self.p.currentLineBuffer(), ('xyA', ''))
+
+        kR(self.pt.LEFT_ARROW)
+        kR('B')
+        self.assertEqual(self.p.currentLineBuffer(), ('xyB', ''))
+
+
+    def test_unprintableCharacters(self):
+        """
+        When L{HistoricRecvLine} receives a keystroke for an unprintable
+        function key with no assigned behavior, the line buffer is unmodified.
+        """
+        kR = lambda ch: self.p.keystrokeReceived(ch, None)
+        pt = self.pt
+
+        for ch in (pt.F1, pt.F2, pt.F3, pt.F4, pt.F5, pt.F6, pt.F7, pt.F8,
+                   pt.F9, pt.F10, pt.F11, pt.F12, pt.PGUP, pt.PGDN):
+            kR(ch)
+            self.assertEqual(self.p.currentLineBuffer(), ('', ''))
+
+
+from twisted.conch import telnet
+from twisted.conch.insults import helper
+from twisted.protocols import loopback
+
+class EchoServer(recvline.HistoricRecvLine):
+    def lineReceived(self, line):
+        self.terminal.write(line + '\n' + self.ps[self.pn])
+
+# An insults API for this would be nice.
+left = "\x1b[D"
+right = "\x1b[C"
+up = "\x1b[A"
+down = "\x1b[B"
+insert = "\x1b[2~"
+home = "\x1b[1~"
+delete = "\x1b[3~"
+end = "\x1b[4~"
+backspace = "\x7f"
+
+from twisted.cred import checkers
+
+try:
+    from twisted.conch.ssh import userauth, transport, channel, connection, session
+    from twisted.conch.manhole_ssh import TerminalUser, TerminalSession, TerminalRealm, TerminalSessionTransport, ConchFactory
+except ImportError:
+    ssh = False
+else:
+    ssh = True
+    class SessionChannel(channel.SSHChannel):
+        name = 'session'
+
+        def __init__(self, protocolFactory, protocolArgs, protocolKwArgs, width, height, *a, **kw):
+            channel.SSHChannel.__init__(self, *a, **kw)
+
+            self.protocolFactory = protocolFactory
+            self.protocolArgs = protocolArgs
+            self.protocolKwArgs = protocolKwArgs
+
+            self.width = width
+            self.height = height
+
+        def channelOpen(self, data):
+            term = session.packRequest_pty_req("vt102", (self.height, self.width, 0, 0), '')
+            self.conn.sendRequest(self, 'pty-req', term)
+            self.conn.sendRequest(self, 'shell', '')
+
+            self._protocolInstance = self.protocolFactory(*self.protocolArgs, **self.protocolKwArgs)
+            self._protocolInstance.factory = self
+            self._protocolInstance.makeConnection(self)
+
+        def closed(self):
+            self._protocolInstance.connectionLost(error.ConnectionDone())
+
+        def dataReceived(self, data):
+            self._protocolInstance.dataReceived(data)
+
+    class TestConnection(connection.SSHConnection):
+        def __init__(self, protocolFactory, protocolArgs, protocolKwArgs, width, height, *a, **kw):
+            connection.SSHConnection.__init__(self, *a, **kw)
+
+            self.protocolFactory = protocolFactory
+            self.protocolArgs = protocolArgs
+            self.protocolKwArgs = protocolKwArgs
+
+            self.width = width
+            self.height = height
+
+        def serviceStarted(self):
+            self.__channel = SessionChannel(self.protocolFactory, self.protocolArgs, self.protocolKwArgs, self.width, self.height)
+            self.openChannel(self.__channel)
+
+        def write(self, bytes):
+            return self.__channel.write(bytes)
+
+    class TestAuth(userauth.SSHUserAuthClient):
+        def __init__(self, username, password, *a, **kw):
+            userauth.SSHUserAuthClient.__init__(self, username, *a, **kw)
+            self.password = password
+
+        def getPassword(self):
+            return defer.succeed(self.password)
+
+    class TestTransport(transport.SSHClientTransport):
+        def __init__(self, protocolFactory, protocolArgs, protocolKwArgs, username, password, width, height, *a, **kw):
+            # transport.SSHClientTransport.__init__(self, *a, **kw)
+            self.protocolFactory = protocolFactory
+            self.protocolArgs = protocolArgs
+            self.protocolKwArgs = protocolKwArgs
+            self.username = username
+            self.password = password
+            self.width = width
+            self.height = height
+
+        def verifyHostKey(self, hostKey, fingerprint):
+            return defer.succeed(True)
+
+        def connectionSecure(self):
+            self.__connection = TestConnection(self.protocolFactory, self.protocolArgs, self.protocolKwArgs, self.width, self.height)
+            self.requestService(
+                TestAuth(self.username, self.password, self.__connection))
+
+        def write(self, bytes):
+            return self.__connection.write(bytes)
+
+    class TestSessionTransport(TerminalSessionTransport):
+        def protocolFactory(self):
+            return self.avatar.conn.transport.factory.serverProtocol()
+
+    class TestSession(TerminalSession):
+        transportFactory = TestSessionTransport
+
+    class TestUser(TerminalUser):
+        pass
+
+    components.registerAdapter(TestSession, TestUser, session.ISession)
+
+
+class LoopbackRelay(loopback.LoopbackRelay):
+    clearCall = None
+
+    def logPrefix(self):
+        return "LoopbackRelay(%r)" % (self.target.__class__.__name__,)
+
+    def write(self, bytes):
+        loopback.LoopbackRelay.write(self, bytes)
+        if self.clearCall is not None:
+            self.clearCall.cancel()
+
+        from twisted.internet import reactor
+        self.clearCall = reactor.callLater(0, self._clearBuffer)
+
+    def _clearBuffer(self):
+        self.clearCall = None
+        loopback.LoopbackRelay.clearBuffer(self)
+
+
+class NotifyingExpectableBuffer(helper.ExpectableBuffer):
+    def __init__(self):
+        self.onConnection = defer.Deferred()
+        self.onDisconnection = defer.Deferred()
+
+    def connectionMade(self):
+        helper.ExpectableBuffer.connectionMade(self)
+        self.onConnection.callback(self)
+
+    def connectionLost(self, reason):
+        self.onDisconnection.errback(reason)
+
+
+class _BaseMixin:
+    WIDTH = 80
+    HEIGHT = 24
+
+    def _assertBuffer(self, lines):
+        receivedLines = str(self.recvlineClient).splitlines()
+        expectedLines = lines + ([''] * (self.HEIGHT - len(lines) - 1))
+        self.assertEqual(len(receivedLines), len(expectedLines))
+        for i in range(len(receivedLines)):
+            self.assertEqual(
+                receivedLines[i], expectedLines[i],
+                str(receivedLines[max(0, i-1):i+1]) +
+                " != " +
+                str(expectedLines[max(0, i-1):i+1]))
+
+    def _trivialTest(self, input, output):
+        done = self.recvlineClient.expect("done")
+
+        self._testwrite(input)
+
+        def finished(ign):
+            self._assertBuffer(output)
+
+        return done.addCallback(finished)
+
+
+class _SSHMixin(_BaseMixin):
+    def setUp(self):
+        if not ssh:
+            raise unittest.SkipTest("Crypto requirements missing, can't run historic recvline tests over ssh")
+
+        u, p = 'testuser', 'testpass'
+        rlm = TerminalRealm()
+        rlm.userFactory = TestUser
+        rlm.chainedProtocolFactory = lambda: insultsServer
+
+        ptl = portal.Portal(
+            rlm,
+            [checkers.InMemoryUsernamePasswordDatabaseDontUse(**{u: p})])
+        sshFactory = ConchFactory(ptl)
+        sshFactory.serverProtocol = self.serverProtocol
+        sshFactory.startFactory()
+
+        recvlineServer = self.serverProtocol()
+        insultsServer = insults.ServerProtocol(lambda: recvlineServer)
+        sshServer = sshFactory.buildProtocol(None)
+        clientTransport = LoopbackRelay(sshServer)
+
+        recvlineClient = NotifyingExpectableBuffer()
+        insultsClient = insults.ClientProtocol(lambda: recvlineClient)
+        sshClient = TestTransport(lambda: insultsClient, (), {}, u, p, self.WIDTH, self.HEIGHT)
+        serverTransport = LoopbackRelay(sshClient)
+
+        sshClient.makeConnection(clientTransport)
+        sshServer.makeConnection(serverTransport)
+
+        self.recvlineClient = recvlineClient
+        self.sshClient = sshClient
+        self.sshServer = sshServer
+        self.clientTransport = clientTransport
+        self.serverTransport = serverTransport
+
+        return recvlineClient.onConnection
+
+    def _testwrite(self, bytes):
+        self.sshClient.write(bytes)
+
+from twisted.conch.test import test_telnet
+
+class TestInsultsClientProtocol(insults.ClientProtocol,
+                                test_telnet.TestProtocol):
+    pass
+
+
+class TestInsultsServerProtocol(insults.ServerProtocol,
+                                test_telnet.TestProtocol):
+    pass
+
+class _TelnetMixin(_BaseMixin):
+    def setUp(self):
+        recvlineServer = self.serverProtocol()
+        insultsServer = TestInsultsServerProtocol(lambda: recvlineServer)
+        telnetServer = telnet.TelnetTransport(lambda: insultsServer)
+        clientTransport = LoopbackRelay(telnetServer)
+
+        recvlineClient = NotifyingExpectableBuffer()
+        insultsClient = TestInsultsClientProtocol(lambda: recvlineClient)
+        telnetClient = telnet.TelnetTransport(lambda: insultsClient)
+        serverTransport = LoopbackRelay(telnetClient)
+
+        telnetClient.makeConnection(clientTransport)
+        telnetServer.makeConnection(serverTransport)
+
+        serverTransport.clearBuffer()
+        clientTransport.clearBuffer()
+
+        self.recvlineClient = recvlineClient
+        self.telnetClient = telnetClient
+        self.clientTransport = clientTransport
+        self.serverTransport = serverTransport
+
+        return recvlineClient.onConnection
+
+    def _testwrite(self, bytes):
+        self.telnetClient.write(bytes)
+
+try:
+    from twisted.conch import stdio
+except ImportError:
+    stdio = None
+
+class _StdioMixin(_BaseMixin):
+    def setUp(self):
+        # A memory-only terminal emulator, into which the server will
+        # write things and make other state changes.  What ends up
+        # here is basically what a user would have seen on their
+        # screen.
+        testTerminal = NotifyingExpectableBuffer()
+
+        # An insults client protocol which will translate bytes
+        # received from the child process into keystroke commands for
+        # an ITerminalProtocol.
+        insultsClient = insults.ClientProtocol(lambda: testTerminal)
+
+        # A process protocol which will translate stdout and stderr
+        # received from the child process to dataReceived calls and
+        # error reporting on an insults client protocol.
+        processClient = stdio.TerminalProcessProtocol(insultsClient)
+
+        # Run twisted/conch/stdio.py with the name of a class
+        # implementing ITerminalProtocol.  This class will be used to
+        # handle bytes we send to the child process.
+        exe = sys.executable
+        module = stdio.__file__
+        if module.endswith('.pyc') or module.endswith('.pyo'):
+            module = module[:-1]
+        args = [exe, module, reflect.qual(self.serverProtocol)]
+        env = os.environ.copy()
+        env["PYTHONPATH"] = os.pathsep.join(sys.path)
+
+        from twisted.internet import reactor
+        clientTransport = reactor.spawnProcess(processClient, exe, args,
+                                               env=env, usePTY=True)
+
+        self.recvlineClient = self.testTerminal = testTerminal
+        self.processClient = processClient
+        self.clientTransport = clientTransport
+
+        # Wait for the process protocol and test terminal to become
+        # connected before proceeding.  The former should always
+        # happen first, but it doesn't hurt to be safe.
+        return defer.gatherResults(filter(None, [
+            processClient.onConnection,
+            testTerminal.expect(">>> ")]))
+
+    def tearDown(self):
+        # Kill the child process.  We're done with it.
+        try:
+            self.clientTransport.signalProcess("KILL")
+        except (error.ProcessExitedAlready, OSError):
+            pass
+        def trap(failure):
+            failure.trap(error.ProcessTerminated)
+            self.assertEqual(failure.value.exitCode, None)
+            self.assertEqual(failure.value.status, 9)
+        return self.testTerminal.onDisconnection.addErrback(trap)
+
+    def _testwrite(self, bytes):
+        self.clientTransport.write(bytes)
+
+class RecvlineLoopbackMixin:
+    serverProtocol = EchoServer
+
+    def testSimple(self):
+        return self._trivialTest(
+            "first line\ndone",
+            [">>> first line",
+             "first line",
+             ">>> done"])
+
+    def testLeftArrow(self):
+        return self._trivialTest(
+            insert + 'first line' + left * 4 + "xxxx\ndone",
+            [">>> first xxxx",
+             "first xxxx",
+             ">>> done"])
+
+    def testRightArrow(self):
+        return self._trivialTest(
+            insert + 'right line' + left * 4 + right * 2 + "xx\ndone",
+            [">>> right lixx",
+             "right lixx",
+            ">>> done"])
+
+    def testBackspace(self):
+        return self._trivialTest(
+            "second line" + backspace * 4 + "xxxx\ndone",
+            [">>> second xxxx",
+             "second xxxx",
+             ">>> done"])
+
+    def testDelete(self):
+        return self._trivialTest(
+            "delete xxxx" + left * 4 + delete * 4 + "line\ndone",
+            [">>> delete line",
+             "delete line",
+             ">>> done"])
+
+    def testInsert(self):
+        return self._trivialTest(
+            "third ine" + left * 3 + "l\ndone",
+            [">>> third line",
+             "third line",
+             ">>> done"])
+
+    def testTypeover(self):
+        return self._trivialTest(
+            "fourth xine" + left * 4 + insert + "l\ndone",
+            [">>> fourth line",
+             "fourth line",
+             ">>> done"])
+
+    def testHome(self):
+        return self._trivialTest(
+            insert + "blah line" + home + "home\ndone",
+            [">>> home line",
+             "home line",
+             ">>> done"])
+
+    def testEnd(self):
+        return self._trivialTest(
+            "end " + left * 4 + end + "line\ndone",
+            [">>> end line",
+             "end line",
+             ">>> done"])
+
+class RecvlineLoopbackTelnet(_TelnetMixin, unittest.TestCase, RecvlineLoopbackMixin):
+    pass
+
+class RecvlineLoopbackSSH(_SSHMixin, unittest.TestCase, RecvlineLoopbackMixin):
+    pass
+
+class RecvlineLoopbackStdio(_StdioMixin, unittest.TestCase, RecvlineLoopbackMixin):
+    if stdio is None:
+        skip = "Terminal requirements missing, can't run recvline tests over stdio"
+
+
+class HistoricRecvlineLoopbackMixin:
+    serverProtocol = EchoServer
+
+    def testUpArrow(self):
+        return self._trivialTest(
+            "first line\n" + up + "\ndone",
+            [">>> first line",
+             "first line",
+             ">>> first line",
+             "first line",
+             ">>> done"])
+
+    def testDownArrow(self):
+        return self._trivialTest(
+            "first line\nsecond line\n" + up * 2 + down + "\ndone",
+            [">>> first line",
+             "first line",
+             ">>> second line",
+             "second line",
+             ">>> second line",
+             "second line",
+             ">>> done"])
+
+class HistoricRecvlineLoopbackTelnet(_TelnetMixin, unittest.TestCase, HistoricRecvlineLoopbackMixin):
+    pass
+
+class HistoricRecvlineLoopbackSSH(_SSHMixin, unittest.TestCase, HistoricRecvlineLoopbackMixin):
+    pass
+
+class HistoricRecvlineLoopbackStdio(_StdioMixin, unittest.TestCase, HistoricRecvlineLoopbackMixin):
+    if stdio is None:
+        skip = "Terminal requirements missing, can't run historic recvline tests over stdio"
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_scripts.py b/ThirdParty/Twisted/twisted/conch/test/test_scripts.py
new file mode 100644
index 0000000..ae90e82
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_scripts.py
@@ -0,0 +1,82 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for the command-line interfaces to conch.
+"""
+
+try:
+    import pyasn1
+except ImportError:
+    pyasn1Skip =  "Cannot run without PyASN1"
+else:
+    pyasn1Skip = None
+
+try:
+    import Crypto
+except ImportError:
+    cryptoSkip = "can't run w/o PyCrypto"
+else:
+    cryptoSkip = None
+
+try:
+    import tty
+except ImportError:
+    ttySkip = "can't run w/o tty"
+else:
+    ttySkip = None
+
+try:
+    import Tkinter
+except ImportError:
+    tkskip = "can't run w/o Tkinter"
+else:
+    try:
+        Tkinter.Tk().destroy()
+    except Tkinter.TclError, e:
+        tkskip = "Can't test Tkinter: " + str(e)
+    else:
+        tkskip = None
+
+from twisted.trial.unittest import TestCase
+from twisted.scripts.test.test_scripts import ScriptTestsMixin
+from twisted.python.test.test_shellcomp import ZshScriptTestMixin
+
+
+
+class ScriptTests(TestCase, ScriptTestsMixin):
+    """
+    Tests for the Conch scripts.
+    """
+    skip = pyasn1Skip or cryptoSkip
+
+
+    def test_conch(self):
+        self.scriptTest("conch/conch")
+    test_conch.skip = ttySkip or skip
+
+
+    def test_cftp(self):
+        self.scriptTest("conch/cftp")
+    test_cftp.skip = ttySkip or skip
+
+
+    def test_ckeygen(self):
+        self.scriptTest("conch/ckeygen")
+
+
+    def test_tkconch(self):
+        self.scriptTest("conch/tkconch")
+    test_tkconch.skip = tkskip or skip
+
+
+
+class ZshIntegrationTestCase(TestCase, ZshScriptTestMixin):
+    """
+    Test that zsh completion functions are generated without error
+    """
+    generateFor = [('conch', 'twisted.conch.scripts.conch.ClientOptions'),
+                   ('cftp', 'twisted.conch.scripts.cftp.ClientOptions'),
+                   ('ckeygen', 'twisted.conch.scripts.ckeygen.GeneralOptions'),
+                   ('tkconch', 'twisted.conch.scripts.tkconch.GeneralOptions'),
+                   ]
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_session.py b/ThirdParty/Twisted/twisted/conch/test/test_session.py
new file mode 100644
index 0000000..4db1629
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_session.py
@@ -0,0 +1,1256 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for the 'session' channel implementation in twisted.conch.ssh.session.
+
+See also RFC 4254.
+"""
+
+import os, signal, sys, struct
+
+from zope.interface import implements
+
+from twisted.internet.address import IPv4Address
+from twisted.internet.error import ProcessTerminated, ProcessDone
+from twisted.python.failure import Failure
+from twisted.conch.ssh import common, session, connection
+from twisted.internet import defer, protocol, error
+from twisted.python import components, failure
+from twisted.trial import unittest
+
+
+
+class SubsystemOnlyAvatar(object):
+    """
+    A stub class representing an avatar that is only useful for
+    getting a subsystem.
+    """
+
+
+    def lookupSubsystem(self, name, data):
+        """
+        If the other side requests the 'subsystem' subsystem, allow it by
+        returning a MockProtocol to implement it.  Otherwise, return
+        None which is interpreted by SSHSession as a failure.
+        """
+        if name == 'subsystem':
+            return MockProtocol()
+
+
+
+class StubAvatar:
+    """
+    A stub class representing the avatar representing the authenticated user.
+    It implements the I{ISession} interface.
+    """
+
+
+    def lookupSubsystem(self, name, data):
+        """
+        If the user requests the TestSubsystem subsystem, connect them to a
+        MockProtocol.  If they request neither, then None is returned which is
+        interpreted by SSHSession as a failure.
+        """
+        if name == 'TestSubsystem':
+            self.subsystem = MockProtocol()
+            self.subsystem.packetData = data
+            return self.subsystem
+
+
+
+class StubSessionForStubAvatar(object):
+    """
+    A stub ISession implementation for our StubAvatar.  The instance
+    variables generally keep track of method invocations so that we can test
+    that the methods were called.
+
+    @ivar avatar: the L{StubAvatar} we are adapting.
+    @ivar ptyRequest: if present, the terminal, window size, and modes passed
+        to the getPty method.
+    @ivar windowChange: if present, the window size passed to the
+        windowChangned method.
+    @ivar shellProtocol: if present, the L{SSHSessionProcessProtocol} passed
+        to the openShell method.
+    @ivar shellTransport: if present, the L{EchoTransport} connected to
+        shellProtocol.
+    @ivar execProtocol: if present, the L{SSHSessionProcessProtocol} passed
+        to the execCommand method.
+    @ivar execTransport: if present, the L{EchoTransport} connected to
+        execProtocol.
+    @ivar execCommandLine: if present, the command line passed to the
+        execCommand method.
+    @ivar gotEOF: if present, an EOF message was received.
+    @ivar gotClosed: if present, a closed message was received.
+    """
+
+
+    implements(session.ISession)
+
+
+    def __init__(self, avatar):
+        """
+        Store the avatar we're adapting.
+        """
+        self.avatar = avatar
+        self.shellProtocol = None
+
+
+    def getPty(self, terminal, window, modes):
+        """
+        If the terminal is 'bad', fail.  Otherwise, store the information in
+        the ptyRequest variable.
+        """
+        if terminal != 'bad':
+            self.ptyRequest = (terminal, window, modes)
+        else:
+            raise RuntimeError('not getting a pty')
+
+
+    def windowChanged(self, window):
+        """
+        If all the window sizes are 0, fail.  Otherwise, store the size in the
+        windowChange variable.
+        """
+        if window == (0, 0, 0, 0):
+            raise RuntimeError('not changing the window size')
+        else:
+            self.windowChange = window
+
+
+    def openShell(self, pp):
+        """
+        If we have gotten a shell request before, fail.  Otherwise, store the
+        process protocol in the shellProtocol variable, connect it to the
+        EchoTransport and store that as shellTransport.
+        """
+        if self.shellProtocol is not None:
+            raise RuntimeError('not getting a shell this time')
+        else:
+            self.shellProtocol = pp
+            self.shellTransport = EchoTransport(pp)
+
+
+    def execCommand(self, pp, command):
+        """
+        If the command is 'true', store the command, the process protocol, and
+        the transport we connect to the process protocol.  Otherwise, just
+        store the command and raise an error.
+        """
+        self.execCommandLine = command
+        if command == 'success':
+            self.execProtocol = pp
+        elif command[:6] == 'repeat':
+            self.execProtocol = pp
+            self.execTransport = EchoTransport(pp)
+            pp.outReceived(command[7:])
+        else:
+            raise RuntimeError('not getting a command')
+
+
+    def eofReceived(self):
+        """
+        Note that EOF has been received.
+        """
+        self.gotEOF = True
+
+
+    def closed(self):
+        """
+        Note that close has been received.
+        """
+        self.gotClosed = True
+
+
+
+components.registerAdapter(StubSessionForStubAvatar, StubAvatar,
+        session.ISession)
+
+
+
+
+class MockProcessProtocol(protocol.ProcessProtocol):
+    """
+    A mock ProcessProtocol which echoes back data sent to it and
+    appends a tilde.  The tilde is appended so the tests can verify that
+    we received and processed the data.
+
+    @ivar packetData: C{str} of data to be sent when the connection is made.
+    @ivar data: a C{str} of data received.
+    @ivar err: a C{str} of error data received.
+    @ivar inConnectionOpen: True if the input side is open.
+    @ivar outConnectionOpen: True if the output side is open.
+    @ivar errConnectionOpen: True if the error side is open.
+    @ivar ended: False if the protocol has not ended, a C{Failure} if the
+        process has ended.
+    """
+    packetData = ''
+
+
+    def connectionMade(self):
+        """
+        Set up variables.
+        """
+        self.data = ''
+        self.err = ''
+        self.inConnectionOpen = True
+        self.outConnectionOpen = True
+        self.errConnectionOpen = True
+        self.ended = False
+        if self.packetData:
+            self.outReceived(self.packetData)
+
+
+    def outReceived(self, data):
+        """
+        Data was received.  Store it and echo it back with a tilde.
+        """
+        self.data += data
+        if self.transport is not None:
+            self.transport.write(data + '~')
+
+
+    def errReceived(self, data):
+        """
+        Error data was received.  Store it and echo it back backwards.
+        """
+        self.err += data
+        self.transport.write(data[::-1])
+
+
+    def inConnectionLost(self):
+        """
+        Close the input side.
+        """
+        self.inConnectionOpen = False
+
+
+    def outConnectionLost(self):
+        """
+        Close the output side.
+        """
+        self.outConnectionOpen = False
+
+
+    def errConnectionLost(self):
+        """
+        Close the error side.
+        """
+        self.errConnectionOpen = False
+
+
+    def processEnded(self, reason):
+        """
+        End the process and store the reason.
+        """
+        self.ended = reason
+
+
+
+class EchoTransport:
+    """
+    A transport for a ProcessProtocol which echos data that is sent to it with
+    a Window newline (CR LF) appended to it.  If a null byte is in the data,
+    disconnect.  When we are asked to disconnect, disconnect the
+    C{ProcessProtocol} with a 0 exit code.
+
+    @ivar proto: the C{ProcessProtocol} connected to us.
+    @ivar data: a C{str} of data written to us.
+    """
+
+
+    def __init__(self, processProtocol):
+        """
+        Initialize our instance variables.
+
+        @param processProtocol: a C{ProcessProtocol} to connect to ourself.
+        """
+        self.proto = processProtocol
+        self.closed = False
+        self.data = ''
+        processProtocol.makeConnection(self)
+
+
+    def write(self, data):
+        """
+        We got some data.  Give it back to our C{ProcessProtocol} with
+        a newline attached.  Disconnect if there's a null byte.
+        """
+        self.data += data
+        self.proto.outReceived(data)
+        self.proto.outReceived('\r\n')
+        if '\x00' in data: # mimic 'exit' for the shell test
+            self.loseConnection()
+
+
+    def loseConnection(self):
+        """
+        If we're asked to disconnect (and we haven't already) shut down
+        the C{ProcessProtocol} with a 0 exit code.
+        """
+        if self.closed:
+            return
+        self.closed = 1
+        self.proto.inConnectionLost()
+        self.proto.outConnectionLost()
+        self.proto.errConnectionLost()
+        self.proto.processEnded(failure.Failure(
+                error.ProcessTerminated(0, None, None)))
+
+
+
+class MockProtocol(protocol.Protocol):
+    """
+    A sample Protocol which stores the data passed to it.
+
+    @ivar packetData: a C{str} of data to be sent when the connection is made.
+    @ivar data: a C{str} of the data passed to us.
+    @ivar open: True if the channel is open.
+    @ivar reason: if not None, the reason the protocol was closed.
+    """
+    packetData = ''
+
+
+    def connectionMade(self):
+        """
+        Set up the instance variables.  If we have any packetData, send it
+        along.
+        """
+
+        self.data = ''
+        self.open = True
+        self.reason = None
+        if self.packetData:
+            self.dataReceived(self.packetData)
+
+
+    def dataReceived(self, data):
+        """
+        Store the received data and write it back with a tilde appended.
+        The tilde is appended so that the tests can verify that we processed
+        the data.
+        """
+        self.data += data
+        if self.transport is not None:
+            self.transport.write(data + '~')
+
+
+    def connectionLost(self, reason):
+        """
+        Close the protocol and store the reason.
+        """
+        self.open = False
+        self.reason = reason
+
+
+
+class StubConnection(object):
+    """
+    A stub for twisted.conch.ssh.connection.SSHConnection.  Record the data
+    that channels send, and when they try to close the connection.
+
+    @ivar data: a C{dict} mapping C{SSHChannel}s to a C{list} of C{str} of data
+        they sent.
+    @ivar extData: a C{dict} mapping L{SSHChannel}s to a C{list} of C{tuple} of
+        (C{int}, C{str}) of extended data they sent.
+    @ivar requests: a C{dict} mapping L{SSHChannel}s to a C{list} of C{tuple}
+        of (C{str}, C{str}) of channel requests they made.
+    @ivar eofs: a C{dict} mapping L{SSHChannel}s to C{true} if they have sent
+        an EOF.
+    @ivar closes: a C{dict} mapping L{SSHChannel}s to C{true} if they have sent
+        a close.
+    """
+
+
+    def __init__(self, transport=None):
+        """
+        Initialize our instance variables.
+        """
+        self.data = {}
+        self.extData = {}
+        self.requests = {}
+        self.eofs = {}
+        self.closes = {}
+        self.transport = transport
+
+
+    def logPrefix(self):
+        """
+        Return our logging prefix.
+        """
+        return "MockConnection"
+
+
+    def sendData(self, channel, data):
+        """
+        Record the sent data.
+        """
+        self.data.setdefault(channel, []).append(data)
+
+
+    def sendExtendedData(self, channel, type, data):
+        """
+        Record the sent extended data.
+        """
+        self.extData.setdefault(channel, []).append((type, data))
+
+
+    def sendRequest(self, channel, request, data, wantReply=False):
+        """
+        Record the sent channel request.
+        """
+        self.requests.setdefault(channel, []).append((request, data,
+            wantReply))
+        if wantReply:
+            return defer.succeed(None)
+
+
+    def sendEOF(self, channel):
+        """
+        Record the sent EOF.
+        """
+        self.eofs[channel] = True
+
+
+    def sendClose(self, channel):
+        """
+        Record the sent close.
+        """
+        self.closes[channel] = True
+
+
+
+class StubTransport:
+    """
+    A stub transport which records the data written.
+
+    @ivar buf: the data sent to the transport.
+    @type buf: C{str}
+
+    @ivar close: flags indicating if the transport has been closed.
+    @type close: C{bool}
+    """
+
+    buf = ''
+    close = False
+
+
+    def getPeer(self):
+        """
+        Return an arbitrary L{IAddress}.
+        """
+        return IPv4Address('TCP', 'remotehost', 8888)
+
+
+    def getHost(self):
+        """
+        Return an arbitrary L{IAddress}.
+        """
+        return IPv4Address('TCP', 'localhost', 9999)
+
+
+    def write(self, data):
+        """
+        Record data in the buffer.
+        """
+        self.buf += data
+
+
+    def loseConnection(self):
+        """
+        Note that the connection was closed.
+        """
+        self.close = True
+
+
+class StubTransportWithWriteErr(StubTransport):
+    """
+    A version of StubTransport which records the error data sent to it.
+
+    @ivar err: the extended data sent to the transport.
+    @type err: C{str}
+    """
+
+    err = ''
+
+
+    def writeErr(self, data):
+        """
+        Record the extended data in the buffer.  This was an old interface
+        that allowed the Transports from ISession.openShell() or
+        ISession.execCommand() to receive extended data from the client.
+        """
+        self.err += data
+
+
+
+class StubClient(object):
+    """
+    A stub class representing the client to a SSHSession.
+
+    @ivar transport: A L{StubTransport} object which keeps track of the data
+        passed to it.
+    """
+
+
+    def __init__(self):
+        self.transport = StubTransportWithWriteErr()
+
+
+
+class SessionInterfaceTestCase(unittest.TestCase):
+    """
+    Tests for the SSHSession class interface.  This interface is not ideal, but
+    it is tested in order to maintain backwards compatibility.
+    """
+
+
+    def setUp(self):
+        """
+        Make an SSHSession object to test.  Give the channel some window
+        so that it's allowed to send packets.  500 and 100 are arbitrary
+        values.
+        """
+        self.session = session.SSHSession(remoteWindow=500,
+                remoteMaxPacket=100, conn=StubConnection(),
+                avatar=StubAvatar())
+
+
+    def assertSessionIsStubSession(self):
+        """
+        Asserts that self.session.session is an instance of
+        StubSessionForStubOldAvatar.
+        """
+        self.assertIsInstance(self.session.session,
+                              StubSessionForStubAvatar)
+
+
+    def test_init(self):
+        """
+        SSHSession initializes its buffer (buf), client, and ISession adapter.
+        The avatar should not need to be adaptable to an ISession immediately.
+        """
+        s = session.SSHSession(avatar=object) # use object because it doesn't
+                                              # have an adapter
+        self.assertEqual(s.buf, '')
+        self.assertIdentical(s.client, None)
+        self.assertIdentical(s.session, None)
+
+
+    def test_client_dataReceived(self):
+        """
+        SSHSession.dataReceived() passes data along to a client.  If the data
+        comes before there is a client, the data should be discarded.
+        """
+        self.session.dataReceived('1')
+        self.session.client = StubClient()
+        self.session.dataReceived('2')
+        self.assertEqual(self.session.client.transport.buf, '2')
+
+    def test_client_extReceived(self):
+        """
+        SSHSession.extReceived() passed data of type EXTENDED_DATA_STDERR along
+        to the client.  If the data comes before there is a client, or if the
+        data is not of type EXTENDED_DATA_STDERR, it is discared.
+        """
+        self.session.extReceived(connection.EXTENDED_DATA_STDERR, '1')
+        self.session.extReceived(255, '2') # 255 is arbitrary
+        self.session.client = StubClient()
+        self.session.extReceived(connection.EXTENDED_DATA_STDERR, '3')
+        self.assertEqual(self.session.client.transport.err, '3')
+
+
+    def test_client_extReceivedWithoutWriteErr(self):
+        """
+        SSHSession.extReceived() should handle the case where the transport
+        on the client doesn't have a writeErr method.
+        """
+        client = self.session.client = StubClient()
+        client.transport = StubTransport() # doesn't have writeErr
+
+        # should not raise an error
+        self.session.extReceived(connection.EXTENDED_DATA_STDERR, 'ignored')
+
+
+
+    def test_client_closed(self):
+        """
+        SSHSession.closed() should tell the transport connected to the client
+        that the connection was lost.
+        """
+        self.session.client = StubClient()
+        self.session.closed()
+        self.assertTrue(self.session.client.transport.close)
+        self.session.client.transport.close = False
+
+
+    def test_badSubsystemDoesNotCreateClient(self):
+        """
+        When a subsystem request fails, SSHSession.client should not be set.
+        """
+        ret = self.session.requestReceived(
+            'subsystem', common.NS('BadSubsystem'))
+        self.assertFalse(ret)
+        self.assertIdentical(self.session.client, None)
+
+
+    def test_lookupSubsystem(self):
+        """
+        When a client requests a subsystem, the SSHSession object should get
+        the subsystem by calling avatar.lookupSubsystem, and attach it as
+        the client.
+        """
+        ret = self.session.requestReceived(
+            'subsystem', common.NS('TestSubsystem') + 'data')
+        self.assertTrue(ret)
+        self.assertIsInstance(self.session.client, protocol.ProcessProtocol)
+        self.assertIdentical(self.session.client.transport.proto,
+                             self.session.avatar.subsystem)
+
+
+
+    def test_lookupSubsystemDoesNotNeedISession(self):
+        """
+        Previously, if one only wanted to implement a subsystem, an ISession
+        adapter wasn't needed because subsystems were looked up using the
+        lookupSubsystem method on the avatar.
+        """
+        s = session.SSHSession(avatar=SubsystemOnlyAvatar(),
+                               conn=StubConnection())
+        ret = s.request_subsystem(
+            common.NS('subsystem') + 'data')
+        self.assertTrue(ret)
+        self.assertNotIdentical(s.client, None)
+        self.assertIdentical(s.conn.closes.get(s), None)
+        s.eofReceived()
+        self.assertTrue(s.conn.closes.get(s))
+        # these should not raise errors
+        s.loseConnection()
+        s.closed()
+
+
+    def test_lookupSubsystem_data(self):
+        """
+        After having looked up a subsystem, data should be passed along to the
+        client.  Additionally, subsystems were passed the entire request packet
+        as data, instead of just the additional data.
+
+        We check for the additional tidle to verify that the data passed
+        through the client.
+        """
+        #self.session.dataReceived('1')
+        # subsystems didn't get extended data
+        #self.session.extReceived(connection.EXTENDED_DATA_STDERR, '2')
+
+        self.session.requestReceived('subsystem',
+                                     common.NS('TestSubsystem') + 'data')
+
+        self.assertEqual(self.session.conn.data[self.session],
+                ['\x00\x00\x00\x0dTestSubsystemdata~'])
+        self.session.dataReceived('more data')
+        self.assertEqual(self.session.conn.data[self.session][-1],
+                'more data~')
+
+
+    def test_lookupSubsystem_closeReceived(self):
+        """
+        SSHSession.closeReceived() should sent a close message to the remote
+        side.
+        """
+        self.session.requestReceived('subsystem',
+                                     common.NS('TestSubsystem') + 'data')
+
+        self.session.closeReceived()
+        self.assertTrue(self.session.conn.closes[self.session])
+
+
+    def assertRequestRaisedRuntimeError(self):
+        """
+        Assert that the request we just made raised a RuntimeError (and only a
+        RuntimeError).
+        """
+        errors = self.flushLoggedErrors(RuntimeError)
+        self.assertEqual(len(errors), 1, "Multiple RuntimeErrors raised: %s" %
+                          '\n'.join([repr(error) for error in errors]))
+        errors[0].trap(RuntimeError)
+
+
+    def test_requestShell(self):
+        """
+        When a client requests a shell, the SSHSession object should get
+        the shell by getting an ISession adapter for the avatar, then
+        calling openShell() with a ProcessProtocol to attach.
+        """
+        # gets a shell the first time
+        ret = self.session.requestReceived('shell', '')
+        self.assertTrue(ret)
+        self.assertSessionIsStubSession()
+        self.assertIsInstance(self.session.client,
+                              session.SSHSessionProcessProtocol)
+        self.assertIdentical(self.session.session.shellProtocol,
+                self.session.client)
+        # doesn't get a shell the second time
+        self.assertFalse(self.session.requestReceived('shell', ''))
+        self.assertRequestRaisedRuntimeError()
+
+
+    def test_requestShellWithData(self):
+        """
+        When a client executes a shell, it should be able to give pass data
+        back and forth between the local and the remote side.
+        """
+        ret = self.session.requestReceived('shell', '')
+        self.assertTrue(ret)
+        self.assertSessionIsStubSession()
+        self.session.dataReceived('some data\x00')
+        self.assertEqual(self.session.session.shellTransport.data,
+                          'some data\x00')
+        self.assertEqual(self.session.conn.data[self.session],
+                          ['some data\x00', '\r\n'])
+        self.assertTrue(self.session.session.shellTransport.closed)
+        self.assertEqual(self.session.conn.requests[self.session],
+                          [('exit-status', '\x00\x00\x00\x00', False)])
+
+
+    def test_requestExec(self):
+        """
+        When a client requests a command, the SSHSession object should get
+        the command by getting an ISession adapter for the avatar, then
+        calling execCommand with a ProcessProtocol to attach and the
+        command line.
+        """
+        ret = self.session.requestReceived('exec',
+                                           common.NS('failure'))
+        self.assertFalse(ret)
+        self.assertRequestRaisedRuntimeError()
+        self.assertIdentical(self.session.client, None)
+
+        self.assertTrue(self.session.requestReceived('exec',
+                                                     common.NS('success')))
+        self.assertSessionIsStubSession()
+        self.assertIsInstance(self.session.client,
+                              session.SSHSessionProcessProtocol)
+        self.assertIdentical(self.session.session.execProtocol,
+                self.session.client)
+        self.assertEqual(self.session.session.execCommandLine,
+                'success')
+
+
+    def test_requestExecWithData(self):
+        """
+        When a client executes a command, it should be able to give pass data
+        back and forth.
+        """
+        ret = self.session.requestReceived('exec',
+                                           common.NS('repeat hello'))
+        self.assertTrue(ret)
+        self.assertSessionIsStubSession()
+        self.session.dataReceived('some data')
+        self.assertEqual(self.session.session.execTransport.data, 'some data')
+        self.assertEqual(self.session.conn.data[self.session],
+                          ['hello', 'some data', '\r\n'])
+        self.session.eofReceived()
+        self.session.closeReceived()
+        self.session.closed()
+        self.assertTrue(self.session.session.execTransport.closed)
+        self.assertEqual(self.session.conn.requests[self.session],
+                          [('exit-status', '\x00\x00\x00\x00', False)])
+
+
+    def test_requestPty(self):
+        """
+        When a client requests a PTY, the SSHSession object should make
+        the request by getting an ISession adapter for the avatar, then
+        calling getPty with the terminal type, the window size, and any modes
+        the client gave us.
+        """
+        # 'bad' terminal type fails
+        ret = self.session.requestReceived(
+            'pty_req',  session.packRequest_pty_req(
+                'bad', (1, 2, 3, 4), ''))
+        self.assertFalse(ret)
+        self.assertSessionIsStubSession()
+        self.assertRequestRaisedRuntimeError()
+        # 'good' terminal type succeeds
+        self.assertTrue(self.session.requestReceived('pty_req',
+            session.packRequest_pty_req('good', (1, 2, 3, 4), '')))
+        self.assertEqual(self.session.session.ptyRequest,
+                ('good', (1, 2, 3, 4), []))
+
+
+    def test_requestWindowChange(self):
+        """
+        When the client requests to change the window size, the SSHSession
+        object should make the request by getting an ISession adapter for the
+        avatar, then calling windowChanged with the new window size.
+        """
+        ret = self.session.requestReceived(
+            'window_change',
+            session.packRequest_window_change((0, 0, 0, 0)))
+        self.assertFalse(ret)
+        self.assertRequestRaisedRuntimeError()
+        self.assertSessionIsStubSession()
+        self.assertTrue(self.session.requestReceived('window_change',
+            session.packRequest_window_change((1, 2, 3, 4))))
+        self.assertEqual(self.session.session.windowChange,
+                (1, 2, 3, 4))
+
+
+    def test_eofReceived(self):
+        """
+        When an EOF is received and a ISession adapter is present, it should
+        be notified of the EOF message.
+        """
+        self.session.session = session.ISession(self.session.avatar)
+        self.session.eofReceived()
+        self.assertTrue(self.session.session.gotEOF)
+
+
+    def test_closeReceived(self):
+        """
+        When a close is received, the session should send a close message.
+        """
+        ret = self.session.closeReceived()
+        self.assertIdentical(ret, None)
+        self.assertTrue(self.session.conn.closes[self.session])
+
+
+    def test_closed(self):
+        """
+        When a close is received and a ISession adapter is present, it should
+        be notified of the close message.
+        """
+        self.session.session = session.ISession(self.session.avatar)
+        self.session.closed()
+        self.assertTrue(self.session.session.gotClosed)
+
+
+
+class SessionWithNoAvatarTestCase(unittest.TestCase):
+    """
+    Test for the SSHSession interface.  Several of the methods (request_shell,
+    request_exec, request_pty_req, request_window_change) would create a
+    'session' instance variable from the avatar if one didn't exist when they
+    were called.
+    """
+
+
+    def setUp(self):
+        self.session = session.SSHSession()
+        self.session.avatar = StubAvatar()
+        self.assertIdentical(self.session.session, None)
+
+
+    def assertSessionProvidesISession(self):
+        """
+        self.session.session should provide I{ISession}.
+        """
+        self.assertTrue(session.ISession.providedBy(self.session.session),
+                        "ISession not provided by %r" % self.session.session)
+
+
+    def test_requestShellGetsSession(self):
+        """
+        If an ISession adapter isn't already present, request_shell should get
+        one.
+        """
+        self.session.requestReceived('shell', '')
+        self.assertSessionProvidesISession()
+
+
+    def test_requestExecGetsSession(self):
+        """
+        If an ISession adapter isn't already present, request_exec should get
+        one.
+        """
+        self.session.requestReceived('exec',
+                                     common.NS('success'))
+        self.assertSessionProvidesISession()
+
+
+    def test_requestPtyReqGetsSession(self):
+        """
+        If an ISession adapter isn't already present, request_pty_req should
+        get one.
+        """
+        self.session.requestReceived('pty_req',
+                                     session.packRequest_pty_req(
+                'term', (0, 0, 0, 0), ''))
+        self.assertSessionProvidesISession()
+
+
+    def test_requestWindowChangeGetsSession(self):
+        """
+        If an ISession adapter isn't already present, request_window_change
+        should get one.
+        """
+        self.session.requestReceived(
+            'window_change',
+            session.packRequest_window_change(
+                (1, 1, 1, 1)))
+        self.assertSessionProvidesISession()
+
+
+
+class WrappersTestCase(unittest.TestCase):
+    """
+    A test for the wrapProtocol and wrapProcessProtocol functions.
+    """
+
+    def test_wrapProtocol(self):
+        """
+        L{wrapProtocol}, when passed a L{Protocol} should return something that
+        has write(), writeSequence(), loseConnection() methods which call the
+        Protocol's dataReceived() and connectionLost() methods, respectively.
+        """
+        protocol = MockProtocol()
+        protocol.transport = StubTransport()
+        protocol.connectionMade()
+        wrapped = session.wrapProtocol(protocol)
+        wrapped.dataReceived('dataReceived')
+        self.assertEqual(protocol.transport.buf, 'dataReceived')
+        wrapped.write('data')
+        wrapped.writeSequence(['1', '2'])
+        wrapped.loseConnection()
+        self.assertEqual(protocol.data, 'data12')
+        protocol.reason.trap(error.ConnectionDone)
+
+    def test_wrapProcessProtocol_Protocol(self):
+        """
+        L{wrapPRocessProtocol}, when passed a L{Protocol} should return
+        something that follows the L{IProcessProtocol} interface, with
+        connectionMade() mapping to connectionMade(), outReceived() mapping to
+        dataReceived() and processEnded() mapping to connectionLost().
+        """
+        protocol = MockProtocol()
+        protocol.transport = StubTransport()
+        process_protocol = session.wrapProcessProtocol(protocol)
+        process_protocol.connectionMade()
+        process_protocol.outReceived('data')
+        self.assertEqual(protocol.transport.buf, 'data~')
+        process_protocol.processEnded(failure.Failure(
+            error.ProcessTerminated(0, None, None)))
+        protocol.reason.trap(error.ProcessTerminated)
+
+
+
+class TestHelpers(unittest.TestCase):
+    """
+    Tests for the 4 helper functions: parseRequest_* and packRequest_*.
+    """
+
+
+    def test_parseRequest_pty_req(self):
+        """
+        The payload of a pty-req message is::
+            string  terminal
+            uint32  columns
+            uint32  rows
+            uint32  x pixels
+            uint32  y pixels
+            string  modes
+
+        Modes are::
+            byte    mode number
+            uint32  mode value
+        """
+        self.assertEqual(session.parseRequest_pty_req(common.NS('xterm') +
+                                                       struct.pack('>4L',
+                                                                   1, 2, 3, 4)
+                                                       + common.NS(
+                    struct.pack('>BL', 5, 6))),
+                          ('xterm', (2, 1, 3, 4), [(5, 6)]))
+
+
+    def test_packRequest_pty_req_old(self):
+        """
+        See test_parseRequest_pty_req for the payload format.
+        """
+        packed = session.packRequest_pty_req('xterm', (2, 1, 3, 4),
+                                             '\x05\x00\x00\x00\x06')
+
+        self.assertEqual(packed,
+                          common.NS('xterm') + struct.pack('>4L', 1, 2, 3, 4) +
+                          common.NS(struct.pack('>BL', 5, 6)))
+
+
+    def test_packRequest_pty_req(self):
+        """
+        See test_parseRequest_pty_req for the payload format.
+        """
+        packed = session.packRequest_pty_req('xterm', (2, 1, 3, 4),
+                                             '\x05\x00\x00\x00\x06')
+        self.assertEqual(packed,
+                          common.NS('xterm') + struct.pack('>4L', 1, 2, 3, 4) +
+                          common.NS(struct.pack('>BL', 5, 6)))
+
+
+    def test_parseRequest_window_change(self):
+        """
+        The payload of a window_change request is::
+            uint32  columns
+            uint32  rows
+            uint32  x pixels
+            uint32  y pixels
+
+        parseRequest_window_change() returns (rows, columns, x pixels,
+        y pixels).
+        """
+        self.assertEqual(session.parseRequest_window_change(
+                struct.pack('>4L', 1, 2, 3, 4)), (2, 1, 3, 4))
+
+
+    def test_packRequest_window_change(self):
+        """
+        See test_parseRequest_window_change for the payload format.
+        """
+        self.assertEqual(session.packRequest_window_change((2, 1, 3, 4)),
+                          struct.pack('>4L', 1, 2, 3, 4))
+
+
+
+class SSHSessionProcessProtocolTestCase(unittest.TestCase):
+    """
+    Tests for L{SSHSessionProcessProtocol}.
+    """
+
+    def setUp(self):
+        self.transport = StubTransport()
+        self.session = session.SSHSession(
+            conn=StubConnection(self.transport), remoteWindow=500,
+            remoteMaxPacket=100)
+        self.pp = session.SSHSessionProcessProtocol(self.session)
+        self.pp.makeConnection(self.transport)
+
+
+    def assertSessionClosed(self):
+        """
+        Assert that C{self.session} is closed.
+        """
+        self.assertTrue(self.session.conn.closes[self.session])
+
+
+    def assertRequestsEqual(self, expectedRequests):
+        """
+        Assert that C{self.session} has sent the C{expectedRequests}.
+        """
+        self.assertEqual(
+            self.session.conn.requests[self.session],
+            expectedRequests)
+
+
+    def test_init(self):
+        """
+        SSHSessionProcessProtocol should set self.session to the session passed
+        to the __init__ method.
+        """
+        self.assertEqual(self.pp.session, self.session)
+
+
+    def test_getHost(self):
+        """
+        SSHSessionProcessProtocol.getHost() just delegates to its
+        session.conn.transport.getHost().
+        """
+        self.assertEqual(
+            self.session.conn.transport.getHost(), self.pp.getHost())
+
+
+    def test_getPeer(self):
+        """
+        SSHSessionProcessProtocol.getPeer() just delegates to its
+        session.conn.transport.getPeer().
+        """
+        self.assertEqual(
+            self.session.conn.transport.getPeer(), self.pp.getPeer())
+
+
+    def test_connectionMade(self):
+        """
+        SSHSessionProcessProtocol.connectionMade() should check if there's a
+        'buf' attribute on its session and write it to the transport if so.
+        """
+        self.session.buf = 'buffer'
+        self.pp.connectionMade()
+        self.assertEqual(self.transport.buf, 'buffer')
+
+
+    def test_getSignalName(self):
+        """
+        _getSignalName should return the name of a signal when given the
+        signal number.
+        """
+        for signalName in session.SUPPORTED_SIGNALS:
+            signalName = 'SIG' + signalName
+            signalValue = getattr(signal, signalName)
+            sshName = self.pp._getSignalName(signalValue)
+            self.assertEqual(sshName, signalName,
+                              "%i: %s != %s" % (signalValue, sshName,
+                                                signalName))
+
+
+    def test_getSignalNameWithLocalSignal(self):
+        """
+        If there are signals in the signal module which aren't in the SSH RFC,
+        we map their name to [signal name]@[platform].
+        """
+        signal.SIGTwistedTest = signal.NSIG + 1 # value can't exist normally
+        # Force reinitialization of signals
+        self.pp._signalValuesToNames = None
+        self.assertEqual(self.pp._getSignalName(signal.SIGTwistedTest),
+                          'SIGTwistedTest@' + sys.platform)
+
+
+    if getattr(signal, 'SIGALRM', None) is None:
+        test_getSignalName.skip = test_getSignalNameWithLocalSignal.skip = \
+            "Not all signals available"
+
+
+    def test_outReceived(self):
+        """
+        When data is passed to the outReceived method, it should be sent to
+        the session's write method.
+        """
+        self.pp.outReceived('test data')
+        self.assertEqual(self.session.conn.data[self.session],
+                ['test data'])
+
+
+    def test_write(self):
+        """
+        When data is passed to the write method, it should be sent to the
+        session channel's write method.
+        """
+        self.pp.write('test data')
+        self.assertEqual(self.session.conn.data[self.session],
+                ['test data'])
+
+    def test_writeSequence(self):
+        """
+        When a sequence is passed to the writeSequence method, it should be
+        joined together and sent to the session channel's write method.
+        """
+        self.pp.writeSequence(['test ', 'data'])
+        self.assertEqual(self.session.conn.data[self.session],
+                ['test data'])
+
+
+    def test_errReceived(self):
+        """
+        When data is passed to the errReceived method, it should be sent to
+        the session's writeExtended method.
+        """
+        self.pp.errReceived('test data')
+        self.assertEqual(self.session.conn.extData[self.session],
+                [(1, 'test data')])
+
+
+    def test_outConnectionLost(self):
+        """
+        When outConnectionLost and errConnectionLost are both called, we should
+        send an EOF message.
+        """
+        self.pp.outConnectionLost()
+        self.assertFalse(self.session in self.session.conn.eofs)
+        self.pp.errConnectionLost()
+        self.assertTrue(self.session.conn.eofs[self.session])
+
+
+    def test_errConnectionLost(self):
+        """
+        Make sure reverse ordering of events in test_outConnectionLost also
+        sends EOF.
+        """
+        self.pp.errConnectionLost()
+        self.assertFalse(self.session in self.session.conn.eofs)
+        self.pp.outConnectionLost()
+        self.assertTrue(self.session.conn.eofs[self.session])
+
+
+    def test_loseConnection(self):
+        """
+        When loseConnection() is called, it should call loseConnection
+        on the session channel.
+        """
+        self.pp.loseConnection()
+        self.assertTrue(self.session.conn.closes[self.session])
+
+
+    def test_connectionLost(self):
+        """
+        When connectionLost() is called, it should call loseConnection()
+        on the session channel.
+        """
+        self.pp.connectionLost(failure.Failure(
+                ProcessDone(0)))
+
+
+    def test_processEndedWithExitCode(self):
+        """
+        When processEnded is called, if there is an exit code in the reason
+        it should be sent in an exit-status method.  The connection should be
+        closed.
+        """
+        self.pp.processEnded(Failure(ProcessDone(None)))
+        self.assertRequestsEqual(
+            [('exit-status', struct.pack('>I', 0) , False)])
+        self.assertSessionClosed()
+
+
+    def test_processEndedWithExitSignalCoreDump(self):
+        """
+        When processEnded is called, if there is an exit signal in the reason
+        it should be sent in an exit-signal message.  The connection should be
+        closed.
+        """
+        self.pp.processEnded(
+            Failure(ProcessTerminated(1,
+                signal.SIGTERM, 1 << 7))) # 7th bit means core dumped
+        self.assertRequestsEqual(
+            [('exit-signal',
+              common.NS('TERM') # signal name
+              + '\x01' # core dumped is true
+              + common.NS('') # error message
+              + common.NS(''), # language tag
+              False)])
+        self.assertSessionClosed()
+
+
+    def test_processEndedWithExitSignalNoCoreDump(self):
+        """
+        When processEnded is called, if there is an exit signal in the
+        reason it should be sent in an exit-signal message.  If no
+        core was dumped, don't set the core-dump bit.
+        """
+        self.pp.processEnded(
+            Failure(ProcessTerminated(1, signal.SIGTERM, 0)))
+        # see comments in test_processEndedWithExitSignalCoreDump for the
+        # meaning of the parts in the request
+        self.assertRequestsEqual(
+             [('exit-signal', common.NS('TERM') + '\x00' + common.NS('') +
+               common.NS(''), False)])
+        self.assertSessionClosed()
+
+
+    if getattr(os, 'WCOREDUMP', None) is None:
+        skipMsg = "can't run this w/o os.WCOREDUMP"
+        test_processEndedWithExitSignalCoreDump.skip = skipMsg
+        test_processEndedWithExitSignalNoCoreDump.skip = skipMsg
+
+
+
+class SSHSessionClientTestCase(unittest.TestCase):
+    """
+    SSHSessionClient is an obsolete class used to connect standard IO to
+    an SSHSession.
+    """
+
+
+    def test_dataReceived(self):
+        """
+        When data is received, it should be sent to the transport.
+        """
+        client = session.SSHSessionClient()
+        client.transport = StubTransport()
+        client.dataReceived('test data')
+        self.assertEqual(client.transport.buf, 'test data')
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_ssh.py b/ThirdParty/Twisted/twisted/conch/test/test_ssh.py
new file mode 100644
index 0000000..6cf1a1a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_ssh.py
@@ -0,0 +1,995 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.conch.ssh}.
+"""
+
+import struct
+
+try:
+    import Crypto.Cipher.DES3
+except ImportError:
+    Crypto = None
+
+try:
+    import pyasn1
+except ImportError:
+    pyasn1 = None
+
+from twisted.conch.ssh import common, session, forwarding
+from twisted.conch import avatar, error
+from twisted.conch.test.keydata import publicRSA_openssh, privateRSA_openssh
+from twisted.conch.test.keydata import publicDSA_openssh, privateDSA_openssh
+from twisted.cred import portal
+from twisted.cred.error import UnauthorizedLogin
+from twisted.internet import defer, protocol, reactor
+from twisted.internet.error import ProcessTerminated
+from twisted.python import failure, log
+from twisted.trial import unittest
+
+from twisted.conch.test.test_recvline import LoopbackRelay
+
+
+
+class ConchTestRealm(object):
+    """
+    A realm which expects a particular avatarId to log in once and creates a
+    L{ConchTestAvatar} for that request.
+
+    @ivar expectedAvatarID: The only avatarID that this realm will produce an
+        avatar for.
+
+    @ivar avatar: A reference to the avatar after it is requested.
+    """
+    avatar = None
+
+    def __init__(self, expectedAvatarID):
+        self.expectedAvatarID = expectedAvatarID
+
+
+    def requestAvatar(self, avatarID, mind, *interfaces):
+        """
+        Return a new L{ConchTestAvatar} if the avatarID matches the expected one
+        and this is the first avatar request.
+        """
+        if avatarID == self.expectedAvatarID:
+            if self.avatar is not None:
+                raise UnauthorizedLogin("Only one login allowed")
+            self.avatar = ConchTestAvatar()
+            return interfaces[0], self.avatar, self.avatar.logout
+        raise UnauthorizedLogin(
+            "Only %r may log in, not %r" % (self.expectedAvatarID, avatarID))
+
+
+
+class ConchTestAvatar(avatar.ConchUser):
+    """
+    An avatar against which various SSH features can be tested.
+
+    @ivar loggedOut: A flag indicating whether the avatar logout method has been
+        called.
+    """
+    loggedOut = False
+
+    def __init__(self):
+        avatar.ConchUser.__init__(self)
+        self.listeners = {}
+        self.globalRequests = {}
+        self.channelLookup.update({'session': session.SSHSession,
+                        'direct-tcpip':forwarding.openConnectForwardingClient})
+        self.subsystemLookup.update({'crazy': CrazySubsystem})
+
+
+    def global_foo(self, data):
+        self.globalRequests['foo'] = data
+        return 1
+
+
+    def global_foo_2(self, data):
+        self.globalRequests['foo_2'] = data
+        return 1, 'data'
+
+
+    def global_tcpip_forward(self, data):
+        host, port = forwarding.unpackGlobal_tcpip_forward(data)
+        try:
+            listener = reactor.listenTCP(
+                port, forwarding.SSHListenForwardingFactory(
+                    self.conn, (host, port),
+                    forwarding.SSHListenServerForwardingChannel),
+                interface=host)
+        except:
+            log.err(None, "something went wrong with remote->local forwarding")
+            return 0
+        else:
+            self.listeners[(host, port)] = listener
+            return 1
+
+
+    def global_cancel_tcpip_forward(self, data):
+        host, port = forwarding.unpackGlobal_tcpip_forward(data)
+        listener = self.listeners.get((host, port), None)
+        if not listener:
+            return 0
+        del self.listeners[(host, port)]
+        listener.stopListening()
+        return 1
+
+
+    def logout(self):
+        self.loggedOut = True
+        for listener in self.listeners.values():
+            log.msg('stopListening %s' % listener)
+            listener.stopListening()
+
+
+
+class ConchSessionForTestAvatar(object):
+    """
+    An ISession adapter for ConchTestAvatar.
+    """
+    def __init__(self, avatar):
+        """
+        Initialize the session and create a reference to it on the avatar for
+        later inspection.
+        """
+        self.avatar = avatar
+        self.avatar._testSession = self
+        self.cmd = None
+        self.proto = None
+        self.ptyReq = False
+        self.eof = 0
+        self.onClose = defer.Deferred()
+
+
+    def getPty(self, term, windowSize, attrs):
+        log.msg('pty req')
+        self._terminalType = term
+        self._windowSize = windowSize
+        self.ptyReq = True
+
+
+    def openShell(self, proto):
+        log.msg('opening shell')
+        self.proto = proto
+        EchoTransport(proto)
+        self.cmd = 'shell'
+
+
+    def execCommand(self, proto, cmd):
+        self.cmd = cmd
+        self.proto = proto
+        f = cmd.split()[0]
+        if f == 'false':
+            t = FalseTransport(proto)
+            # Avoid disconnecting this immediately.  If the channel is closed
+            # before execCommand even returns the caller gets confused.
+            reactor.callLater(0, t.loseConnection)
+        elif f == 'echo':
+            t = EchoTransport(proto)
+            t.write(cmd[5:])
+            t.loseConnection()
+        elif f == 'secho':
+            t = SuperEchoTransport(proto)
+            t.write(cmd[6:])
+            t.loseConnection()
+        elif f == 'eecho':
+            t = ErrEchoTransport(proto)
+            t.write(cmd[6:])
+            t.loseConnection()
+        else:
+            raise error.ConchError('bad exec')
+        self.avatar.conn.transport.expectedLoseConnection = 1
+
+
+    def eofReceived(self):
+        self.eof = 1
+
+
+    def closed(self):
+        log.msg('closed cmd "%s"' % self.cmd)
+        self.remoteWindowLeftAtClose = self.proto.session.remoteWindowLeft
+        self.onClose.callback(None)
+
+from twisted.python import components
+components.registerAdapter(ConchSessionForTestAvatar, ConchTestAvatar, session.ISession)
+
+class CrazySubsystem(protocol.Protocol):
+
+    def __init__(self, *args, **kw):
+        pass
+
+    def connectionMade(self):
+        """
+        good ... good
+        """
+
+
+
+class FalseTransport:
+    """
+    False transport should act like a /bin/false execution, i.e. just exit with
+    nonzero status, writing nothing to the terminal.
+
+    @ivar proto: The protocol associated with this transport.
+    @ivar closed: A flag tracking whether C{loseConnection} has been called yet.
+    """
+
+    def __init__(self, p):
+        """
+        @type p L{twisted.conch.ssh.session.SSHSessionProcessProtocol} instance
+        """
+        self.proto = p
+        p.makeConnection(self)
+        self.closed = 0
+
+
+    def loseConnection(self):
+        """
+        Disconnect the protocol associated with this transport.
+        """
+        if self.closed:
+            return
+        self.closed = 1
+        self.proto.inConnectionLost()
+        self.proto.outConnectionLost()
+        self.proto.errConnectionLost()
+        self.proto.processEnded(failure.Failure(ProcessTerminated(255, None, None)))
+
+
+
+class EchoTransport:
+
+    def __init__(self, p):
+        self.proto = p
+        p.makeConnection(self)
+        self.closed = 0
+
+    def write(self, data):
+        log.msg(repr(data))
+        self.proto.outReceived(data)
+        self.proto.outReceived('\r\n')
+        if '\x00' in data: # mimic 'exit' for the shell test
+            self.loseConnection()
+
+    def loseConnection(self):
+        if self.closed: return
+        self.closed = 1
+        self.proto.inConnectionLost()
+        self.proto.outConnectionLost()
+        self.proto.errConnectionLost()
+        self.proto.processEnded(failure.Failure(ProcessTerminated(0, None, None)))
+
+class ErrEchoTransport:
+
+    def __init__(self, p):
+        self.proto = p
+        p.makeConnection(self)
+        self.closed = 0
+
+    def write(self, data):
+        self.proto.errReceived(data)
+        self.proto.errReceived('\r\n')
+
+    def loseConnection(self):
+        if self.closed: return
+        self.closed = 1
+        self.proto.inConnectionLost()
+        self.proto.outConnectionLost()
+        self.proto.errConnectionLost()
+        self.proto.processEnded(failure.Failure(ProcessTerminated(0, None, None)))
+
+class SuperEchoTransport:
+
+    def __init__(self, p):
+        self.proto = p
+        p.makeConnection(self)
+        self.closed = 0
+
+    def write(self, data):
+        self.proto.outReceived(data)
+        self.proto.outReceived('\r\n')
+        self.proto.errReceived(data)
+        self.proto.errReceived('\r\n')
+
+    def loseConnection(self):
+        if self.closed: return
+        self.closed = 1
+        self.proto.inConnectionLost()
+        self.proto.outConnectionLost()
+        self.proto.errConnectionLost()
+        self.proto.processEnded(failure.Failure(ProcessTerminated(0, None, None)))
+
+
+if Crypto is not None and pyasn1 is not None:
+    from twisted.conch import checkers
+    from twisted.conch.ssh import channel, connection, factory, keys
+    from twisted.conch.ssh import transport, userauth
+
+    class UtilityTestCase(unittest.TestCase):
+        def testCounter(self):
+            c = transport._Counter('\x00\x00', 2)
+            for i in xrange(256 * 256):
+                self.assertEqual(c(), struct.pack('!H', (i + 1) % (2 ** 16)))
+            # It should wrap around, too.
+            for i in xrange(256 * 256):
+                self.assertEqual(c(), struct.pack('!H', (i + 1) % (2 ** 16)))
+
+
+    class ConchTestPublicKeyChecker(checkers.SSHPublicKeyDatabase):
+        def checkKey(self, credentials):
+            blob = keys.Key.fromString(publicDSA_openssh).blob()
+            if credentials.username == 'testuser' and credentials.blob == blob:
+                return True
+            return False
+
+
+    class ConchTestPasswordChecker:
+        credentialInterfaces = checkers.IUsernamePassword,
+
+        def requestAvatarId(self, credentials):
+            if credentials.username == 'testuser' and credentials.password == 'testpass':
+                return defer.succeed(credentials.username)
+            return defer.fail(Exception("Bad credentials"))
+
+
+    class ConchTestSSHChecker(checkers.SSHProtocolChecker):
+
+        def areDone(self, avatarId):
+            if avatarId != 'testuser' or len(self.successfulCredentials[avatarId]) < 2:
+                return False
+            return True
+
+    class ConchTestServerFactory(factory.SSHFactory):
+        noisy = 0
+
+        services = {
+            'ssh-userauth':userauth.SSHUserAuthServer,
+            'ssh-connection':connection.SSHConnection
+        }
+
+        def buildProtocol(self, addr):
+            proto = ConchTestServer()
+            proto.supportedPublicKeys = self.privateKeys.keys()
+            proto.factory = self
+
+            if hasattr(self, 'expectedLoseConnection'):
+                proto.expectedLoseConnection = self.expectedLoseConnection
+
+            self.proto = proto
+            return proto
+
+        def getPublicKeys(self):
+            return {
+                'ssh-rsa': keys.Key.fromString(publicRSA_openssh),
+                'ssh-dss': keys.Key.fromString(publicDSA_openssh)
+            }
+
+        def getPrivateKeys(self):
+            return {
+                'ssh-rsa': keys.Key.fromString(privateRSA_openssh),
+                'ssh-dss': keys.Key.fromString(privateDSA_openssh)
+            }
+
+        def getPrimes(self):
+            return {
+                2048:[(transport.DH_GENERATOR, transport.DH_PRIME)]
+            }
+
+        def getService(self, trans, name):
+            return factory.SSHFactory.getService(self, trans, name)
+
+    class ConchTestBase:
+
+        done = 0
+
+        def connectionLost(self, reason):
+            if self.done:
+                return
+            if not hasattr(self,'expectedLoseConnection'):
+                unittest.fail('unexpectedly lost connection %s\n%s' % (self, reason))
+            self.done = 1
+
+        def receiveError(self, reasonCode, desc):
+            self.expectedLoseConnection = 1
+            # Some versions of OpenSSH (for example, OpenSSH_5.3p1) will
+            # send a DISCONNECT_BY_APPLICATION error before closing the
+            # connection.  Other, older versions (for example,
+            # OpenSSH_5.1p1), won't.  So accept this particular error here,
+            # but no others.
+            if reasonCode != transport.DISCONNECT_BY_APPLICATION:
+                log.err(
+                    Exception(
+                        'got disconnect for %s: reason %s, desc: %s' % (
+                            self, reasonCode, desc)))
+            self.loseConnection()
+
+        def receiveUnimplemented(self, seqID):
+            unittest.fail('got unimplemented: seqid %s'  % seqID)
+            self.expectedLoseConnection = 1
+            self.loseConnection()
+
+    class ConchTestServer(ConchTestBase, transport.SSHServerTransport):
+
+        def connectionLost(self, reason):
+            ConchTestBase.connectionLost(self, reason)
+            transport.SSHServerTransport.connectionLost(self, reason)
+
+
+    class ConchTestClient(ConchTestBase, transport.SSHClientTransport):
+        """
+        @ivar _channelFactory: A callable which accepts an SSH connection and
+            returns a channel which will be attached to a new channel on that
+            connection.
+        """
+        def __init__(self, channelFactory):
+            self._channelFactory = channelFactory
+
+        def connectionLost(self, reason):
+            ConchTestBase.connectionLost(self, reason)
+            transport.SSHClientTransport.connectionLost(self, reason)
+
+        def verifyHostKey(self, key, fp):
+            keyMatch = key == keys.Key.fromString(publicRSA_openssh).blob()
+            fingerprintMatch = (
+                fp == '3d:13:5f:cb:c9:79:8a:93:06:27:65:bc:3d:0b:8f:af')
+            if keyMatch and fingerprintMatch:
+                return defer.succeed(1)
+            return defer.fail(Exception("Key or fingerprint mismatch"))
+
+        def connectionSecure(self):
+            self.requestService(ConchTestClientAuth('testuser',
+                ConchTestClientConnection(self._channelFactory)))
+
+
+    class ConchTestClientAuth(userauth.SSHUserAuthClient):
+
+        hasTriedNone = 0 # have we tried the 'none' auth yet?
+        canSucceedPublicKey = 0 # can we succed with this yet?
+        canSucceedPassword = 0
+
+        def ssh_USERAUTH_SUCCESS(self, packet):
+            if not self.canSucceedPassword and self.canSucceedPublicKey:
+                unittest.fail('got USERAUTH_SUCESS before password and publickey')
+            userauth.SSHUserAuthClient.ssh_USERAUTH_SUCCESS(self, packet)
+
+        def getPassword(self):
+            self.canSucceedPassword = 1
+            return defer.succeed('testpass')
+
+        def getPrivateKey(self):
+            self.canSucceedPublicKey = 1
+            return defer.succeed(keys.Key.fromString(privateDSA_openssh))
+
+        def getPublicKey(self):
+            return keys.Key.fromString(publicDSA_openssh)
+
+
+    class ConchTestClientConnection(connection.SSHConnection):
+        """
+        @ivar _completed: A L{Deferred} which will be fired when the number of
+            results collected reaches C{totalResults}.
+        """
+        name = 'ssh-connection'
+        results = 0
+        totalResults = 8
+
+        def __init__(self, channelFactory):
+            connection.SSHConnection.__init__(self)
+            self._channelFactory = channelFactory
+
+        def serviceStarted(self):
+            self.openChannel(self._channelFactory(conn=self))
+
+
+    class SSHTestChannel(channel.SSHChannel):
+
+        def __init__(self, name, opened, *args, **kwargs):
+            self.name = name
+            self._opened = opened
+            self.received = []
+            self.receivedExt = []
+            self.onClose = defer.Deferred()
+            channel.SSHChannel.__init__(self, *args, **kwargs)
+
+
+        def openFailed(self, reason):
+            self._opened.errback(reason)
+
+
+        def channelOpen(self, ignore):
+            self._opened.callback(self)
+
+
+        def dataReceived(self, data):
+            self.received.append(data)
+
+
+        def extReceived(self, dataType, data):
+            if dataType == connection.EXTENDED_DATA_STDERR:
+                self.receivedExt.append(data)
+            else:
+                log.msg("Unrecognized extended data: %r" % (dataType,))
+
+
+        def request_exit_status(self, status):
+            [self.status] = struct.unpack('>L', status)
+
+
+        def eofReceived(self):
+            self.eofCalled = True
+
+
+        def closed(self):
+            self.onClose.callback(None)
+
+
+
+class SSHProtocolTestCase(unittest.TestCase):
+    """
+    Tests for communication between L{SSHServerTransport} and
+    L{SSHClientTransport}.
+    """
+
+    if not Crypto:
+        skip = "can't run w/o PyCrypto"
+
+    if not pyasn1:
+        skip = "Cannot run without PyASN1"
+
+    def _ourServerOurClientTest(self, name='session', **kwargs):
+        """
+        Create a connected SSH client and server protocol pair and return a
+        L{Deferred} which fires with an L{SSHTestChannel} instance connected to
+        a channel on that SSH connection.
+        """
+        result = defer.Deferred()
+        self.realm = ConchTestRealm('testuser')
+        p = portal.Portal(self.realm)
+        sshpc = ConchTestSSHChecker()
+        sshpc.registerChecker(ConchTestPasswordChecker())
+        sshpc.registerChecker(ConchTestPublicKeyChecker())
+        p.registerChecker(sshpc)
+        fac = ConchTestServerFactory()
+        fac.portal = p
+        fac.startFactory()
+        self.server = fac.buildProtocol(None)
+        self.clientTransport = LoopbackRelay(self.server)
+        self.client = ConchTestClient(
+            lambda conn: SSHTestChannel(name, result, conn=conn, **kwargs))
+
+        self.serverTransport = LoopbackRelay(self.client)
+
+        self.server.makeConnection(self.serverTransport)
+        self.client.makeConnection(self.clientTransport)
+        return result
+
+
+    def test_subsystemsAndGlobalRequests(self):
+        """
+        Run the Conch server against the Conch client.  Set up several different
+        channels which exercise different behaviors and wait for them to
+        complete.  Verify that the channels with errors log them.
+        """
+        channel = self._ourServerOurClientTest()
+
+        def cbSubsystem(channel):
+            self.channel = channel
+            return self.assertFailure(
+                channel.conn.sendRequest(
+                    channel, 'subsystem', common.NS('not-crazy'), 1),
+                Exception)
+        channel.addCallback(cbSubsystem)
+
+        def cbNotCrazyFailed(ignored):
+            channel = self.channel
+            return channel.conn.sendRequest(
+                channel, 'subsystem', common.NS('crazy'), 1)
+        channel.addCallback(cbNotCrazyFailed)
+
+        def cbGlobalRequests(ignored):
+            channel = self.channel
+            d1 = channel.conn.sendGlobalRequest('foo', 'bar', 1)
+
+            d2 = channel.conn.sendGlobalRequest('foo-2', 'bar2', 1)
+            d2.addCallback(self.assertEqual, 'data')
+
+            d3 = self.assertFailure(
+                channel.conn.sendGlobalRequest('bar', 'foo', 1),
+                Exception)
+
+            return defer.gatherResults([d1, d2, d3])
+        channel.addCallback(cbGlobalRequests)
+
+        def disconnect(ignored):
+            self.assertEqual(
+                self.realm.avatar.globalRequests,
+                {"foo": "bar", "foo_2": "bar2"})
+            channel = self.channel
+            channel.conn.transport.expectedLoseConnection = True
+            channel.conn.serviceStopped()
+            channel.loseConnection()
+        channel.addCallback(disconnect)
+
+        return channel
+
+
+    def test_shell(self):
+        """
+        L{SSHChannel.sendRequest} can open a shell with a I{pty-req} request,
+        specifying a terminal type and window size.
+        """
+        channel = self._ourServerOurClientTest()
+
+        data = session.packRequest_pty_req('conch-test-term', (24, 80, 0, 0), '')
+        def cbChannel(channel):
+            self.channel = channel
+            return channel.conn.sendRequest(channel, 'pty-req', data, 1)
+        channel.addCallback(cbChannel)
+
+        def cbPty(ignored):
+            # The server-side object corresponding to our client side channel.
+            session = self.realm.avatar.conn.channels[0].session
+            self.assertIdentical(session.avatar, self.realm.avatar)
+            self.assertEqual(session._terminalType, 'conch-test-term')
+            self.assertEqual(session._windowSize, (24, 80, 0, 0))
+            self.assertTrue(session.ptyReq)
+            channel = self.channel
+            return channel.conn.sendRequest(channel, 'shell', '', 1)
+        channel.addCallback(cbPty)
+
+        def cbShell(ignored):
+            self.channel.write('testing the shell!\x00')
+            self.channel.conn.sendEOF(self.channel)
+            return defer.gatherResults([
+                    self.channel.onClose,
+                    self.realm.avatar._testSession.onClose])
+        channel.addCallback(cbShell)
+
+        def cbExited(ignored):
+            if self.channel.status != 0:
+                log.msg(
+                    'shell exit status was not 0: %i' % (self.channel.status,))
+            self.assertEqual(
+                "".join(self.channel.received),
+                'testing the shell!\x00\r\n')
+            self.assertTrue(self.channel.eofCalled)
+            self.assertTrue(
+                self.realm.avatar._testSession.eof)
+        channel.addCallback(cbExited)
+        return channel
+
+
+    def test_failedExec(self):
+        """
+        If L{SSHChannel.sendRequest} issues an exec which the server responds to
+        with an error, the L{Deferred} it returns fires its errback.
+        """
+        channel = self._ourServerOurClientTest()
+
+        def cbChannel(channel):
+            self.channel = channel
+            return self.assertFailure(
+                channel.conn.sendRequest(
+                    channel, 'exec', common.NS('jumboliah'), 1),
+                Exception)
+        channel.addCallback(cbChannel)
+
+        def cbFailed(ignored):
+            # The server logs this exception when it cannot perform the
+            # requested exec.
+            errors = self.flushLoggedErrors(error.ConchError)
+            self.assertEqual(errors[0].value.args, ('bad exec', None))
+        channel.addCallback(cbFailed)
+        return channel
+
+
+    def test_falseChannel(self):
+        """
+        When the process started by a L{SSHChannel.sendRequest} exec request
+        exits, the exit status is reported to the channel.
+        """
+        channel = self._ourServerOurClientTest()
+
+        def cbChannel(channel):
+            self.channel = channel
+            return channel.conn.sendRequest(
+                channel, 'exec', common.NS('false'), 1)
+        channel.addCallback(cbChannel)
+
+        def cbExec(ignored):
+            return self.channel.onClose
+        channel.addCallback(cbExec)
+
+        def cbClosed(ignored):
+            # No data is expected
+            self.assertEqual(self.channel.received, [])
+            self.assertNotEquals(self.channel.status, 0)
+        channel.addCallback(cbClosed)
+        return channel
+
+
+    def test_errorChannel(self):
+        """
+        Bytes sent over the extended channel for stderr data are delivered to
+        the channel's C{extReceived} method.
+        """
+        channel = self._ourServerOurClientTest(localWindow=4, localMaxPacket=5)
+
+        def cbChannel(channel):
+            self.channel = channel
+            return channel.conn.sendRequest(
+                channel, 'exec', common.NS('eecho hello'), 1)
+        channel.addCallback(cbChannel)
+
+        def cbExec(ignored):
+            return defer.gatherResults([
+                    self.channel.onClose,
+                    self.realm.avatar._testSession.onClose])
+        channel.addCallback(cbExec)
+
+        def cbClosed(ignored):
+            self.assertEqual(self.channel.received, [])
+            self.assertEqual("".join(self.channel.receivedExt), "hello\r\n")
+            self.assertEqual(self.channel.status, 0)
+            self.assertTrue(self.channel.eofCalled)
+            self.assertEqual(self.channel.localWindowLeft, 4)
+            self.assertEqual(
+                self.channel.localWindowLeft,
+                self.realm.avatar._testSession.remoteWindowLeftAtClose)
+        channel.addCallback(cbClosed)
+        return channel
+
+
+    def test_unknownChannel(self):
+        """
+        When an attempt is made to open an unknown channel type, the L{Deferred}
+        returned by L{SSHChannel.sendRequest} fires its errback.
+        """
+        d = self.assertFailure(
+            self._ourServerOurClientTest('crazy-unknown-channel'), Exception)
+        def cbFailed(ignored):
+            errors = self.flushLoggedErrors(error.ConchError)
+            self.assertEqual(errors[0].value.args, (3, 'unknown channel'))
+            self.assertEqual(len(errors), 1)
+        d.addCallback(cbFailed)
+        return d
+
+
+    def test_maxPacket(self):
+        """
+        An L{SSHChannel} can be configured with a maximum packet size to
+        receive.
+        """
+        # localWindow needs to be at least 11 otherwise the assertion about it
+        # in cbClosed is invalid.
+        channel = self._ourServerOurClientTest(
+            localWindow=11, localMaxPacket=1)
+
+        def cbChannel(channel):
+            self.channel = channel
+            return channel.conn.sendRequest(
+                channel, 'exec', common.NS('secho hello'), 1)
+        channel.addCallback(cbChannel)
+
+        def cbExec(ignored):
+            return self.channel.onClose
+        channel.addCallback(cbExec)
+
+        def cbClosed(ignored):
+            self.assertEqual(self.channel.status, 0)
+            self.assertEqual("".join(self.channel.received), "hello\r\n")
+            self.assertEqual("".join(self.channel.receivedExt), "hello\r\n")
+            self.assertEqual(self.channel.localWindowLeft, 11)
+            self.assertTrue(self.channel.eofCalled)
+        channel.addCallback(cbClosed)
+        return channel
+
+
+    def test_echo(self):
+        """
+        Normal standard out bytes are sent to the channel's C{dataReceived}
+        method.
+        """
+        channel = self._ourServerOurClientTest(localWindow=4, localMaxPacket=5)
+
+        def cbChannel(channel):
+            self.channel = channel
+            return channel.conn.sendRequest(
+                channel, 'exec', common.NS('echo hello'), 1)
+        channel.addCallback(cbChannel)
+
+        def cbEcho(ignored):
+            return defer.gatherResults([
+                    self.channel.onClose,
+                    self.realm.avatar._testSession.onClose])
+        channel.addCallback(cbEcho)
+
+        def cbClosed(ignored):
+            self.assertEqual(self.channel.status, 0)
+            self.assertEqual("".join(self.channel.received), "hello\r\n")
+            self.assertEqual(self.channel.localWindowLeft, 4)
+            self.assertTrue(self.channel.eofCalled)
+            self.assertEqual(
+                self.channel.localWindowLeft,
+                self.realm.avatar._testSession.remoteWindowLeftAtClose)
+        channel.addCallback(cbClosed)
+        return channel
+
+
+
+class TestSSHFactory(unittest.TestCase):
+
+    if not Crypto:
+        skip = "can't run w/o PyCrypto"
+
+    if not pyasn1:
+        skip = "Cannot run without PyASN1"
+
+    def makeSSHFactory(self, primes=None):
+        sshFactory = factory.SSHFactory()
+        gpk = lambda: {'ssh-rsa' : keys.Key(None)}
+        sshFactory.getPrimes = lambda: primes
+        sshFactory.getPublicKeys = sshFactory.getPrivateKeys = gpk
+        sshFactory.startFactory()
+        return sshFactory
+
+
+    def test_buildProtocol(self):
+        """
+        By default, buildProtocol() constructs an instance of
+        SSHServerTransport.
+        """
+        factory = self.makeSSHFactory()
+        protocol = factory.buildProtocol(None)
+        self.assertIsInstance(protocol, transport.SSHServerTransport)
+
+
+    def test_buildProtocolRespectsProtocol(self):
+        """
+        buildProtocol() calls 'self.protocol()' to construct a protocol
+        instance.
+        """
+        calls = []
+        def makeProtocol(*args):
+            calls.append(args)
+            return transport.SSHServerTransport()
+        factory = self.makeSSHFactory()
+        factory.protocol = makeProtocol
+        factory.buildProtocol(None)
+        self.assertEqual([()], calls)
+
+
+    def test_multipleFactories(self):
+        f1 = self.makeSSHFactory(primes=None)
+        f2 = self.makeSSHFactory(primes={1:(2,3)})
+        p1 = f1.buildProtocol(None)
+        p2 = f2.buildProtocol(None)
+        self.failIf('diffie-hellman-group-exchange-sha1' in p1.supportedKeyExchanges,
+                p1.supportedKeyExchanges)
+        self.failUnless('diffie-hellman-group-exchange-sha1' in p2.supportedKeyExchanges,
+                p2.supportedKeyExchanges)
+
+
+
+class MPTestCase(unittest.TestCase):
+    """
+    Tests for L{common.getMP}.
+
+    @cvar getMP: a method providing a MP parser.
+    @type getMP: C{callable}
+    """
+    getMP = staticmethod(common.getMP)
+
+    if not Crypto:
+        skip = "can't run w/o PyCrypto"
+
+    if not pyasn1:
+        skip = "Cannot run without PyASN1"
+
+
+    def test_getMP(self):
+        """
+        L{common.getMP} should parse the a multiple precision integer from a
+        string: a 4-byte length followed by length bytes of the integer.
+        """
+        self.assertEqual(
+            self.getMP('\x00\x00\x00\x04\x00\x00\x00\x01'),
+            (1, ''))
+
+
+    def test_getMPBigInteger(self):
+        """
+        L{common.getMP} should be able to parse a big enough integer
+        (that doesn't fit on one byte).
+        """
+        self.assertEqual(
+            self.getMP('\x00\x00\x00\x04\x01\x02\x03\x04'),
+            (16909060, ''))
+
+
+    def test_multipleGetMP(self):
+        """
+        L{common.getMP} has the ability to parse multiple integer in the same
+        string.
+        """
+        self.assertEqual(
+            self.getMP('\x00\x00\x00\x04\x00\x00\x00\x01'
+                       '\x00\x00\x00\x04\x00\x00\x00\x02', 2),
+            (1, 2, ''))
+
+
+    def test_getMPRemainingData(self):
+        """
+        When more data than needed is sent to L{common.getMP}, it should return
+        the remaining data.
+        """
+        self.assertEqual(
+            self.getMP('\x00\x00\x00\x04\x00\x00\x00\x01foo'),
+            (1, 'foo'))
+
+
+    def test_notEnoughData(self):
+        """
+        When the string passed to L{common.getMP} doesn't even make 5 bytes,
+        it should raise a L{struct.error}.
+        """
+        self.assertRaises(struct.error, self.getMP, '\x02\x00')
+
+
+
+class PyMPTestCase(MPTestCase):
+    """
+    Tests for the python implementation of L{common.getMP}.
+    """
+    getMP = staticmethod(common.getMP_py)
+
+
+
+class GMPYMPTestCase(MPTestCase):
+    """
+    Tests for the gmpy implementation of L{common.getMP}.
+    """
+    getMP = staticmethod(common._fastgetMP)
+
+
+class BuiltinPowHackTestCase(unittest.TestCase):
+    """
+    Tests that the builtin pow method is still correct after
+    L{twisted.conch.ssh.common} monkeypatches it to use gmpy.
+    """
+
+    def test_floatBase(self):
+        """
+        pow gives the correct result when passed a base of type float with a
+        non-integer value.
+        """
+        self.assertEqual(6.25, pow(2.5, 2))
+
+    def test_intBase(self):
+        """
+        pow gives the correct result when passed a base of type int.
+        """
+        self.assertEqual(81, pow(3, 4))
+
+    def test_longBase(self):
+        """
+        pow gives the correct result when passed a base of type long.
+        """
+        self.assertEqual(81, pow(3, 4))
+
+    def test_mpzBase(self):
+        """
+        pow gives the correct result when passed a base of type gmpy.mpz.
+        """
+        if gmpy is None:
+            raise unittest.SkipTest('gmpy not available')
+        self.assertEqual(81, pow(gmpy.mpz(3), 4))
+
+
+try:
+    import gmpy
+except ImportError:
+    GMPYMPTestCase.skip = "gmpy not available"
+    gmpy = None
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_tap.py b/ThirdParty/Twisted/twisted/conch/test/test_tap.py
new file mode 100644
index 0000000..b957ffd
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_tap.py
@@ -0,0 +1,184 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.conch.tap}.
+"""
+
+try:
+    import Crypto.Cipher.DES3
+except:
+    Crypto = None
+
+try:
+    import pyasn1
+except ImportError:
+    pyasn1 = None
+
+try:
+    from twisted.conch import unix
+except ImportError:
+    unix = None
+
+if Crypto and pyasn1 and unix:
+    from twisted.conch import tap
+    from twisted.conch.openssh_compat.factory import OpenSSHFactory
+
+from twisted.python.compat import set
+from twisted.application.internet import StreamServerEndpointService
+from twisted.cred import error
+from twisted.cred.credentials import IPluggableAuthenticationModules
+from twisted.cred.credentials import ISSHPrivateKey
+from twisted.cred.credentials import IUsernamePassword, UsernamePassword
+
+from twisted.trial.unittest import TestCase
+
+
+
+class MakeServiceTest(TestCase):
+    """
+    Tests for L{tap.makeService}.
+    """
+
+    if not Crypto:
+        skip = "can't run w/o PyCrypto"
+
+    if not pyasn1:
+        skip = "Cannot run without PyASN1"
+
+    if not unix:
+        skip = "can't run on non-posix computers"
+
+    usernamePassword = ('iamuser', 'thisispassword')
+
+    def setUp(self):
+        """
+        Create a file with two users.
+        """
+        self.filename = self.mktemp()
+        f = open(self.filename, 'wb+')
+        f.write(':'.join(self.usernamePassword))
+        f.close()
+        self.options = tap.Options()
+
+
+    def test_basic(self):
+        """
+        L{tap.makeService} returns a L{StreamServerEndpointService} instance
+        running on TCP port 22, and the linked protocol factory is an instance
+        of L{OpenSSHFactory}.
+        """
+        config = tap.Options()
+        service = tap.makeService(config)
+        self.assertIsInstance(service, StreamServerEndpointService)
+        self.assertEqual(service.endpoint._port, 22)
+        self.assertIsInstance(service.factory, OpenSSHFactory)
+
+
+    def test_defaultAuths(self):
+        """
+        Make sure that if the C{--auth} command-line option is not passed,
+        the default checkers are (for backwards compatibility): SSH, UNIX, and
+        PAM if available
+        """
+        numCheckers = 2
+        try:
+            from twisted.cred import pamauth
+            self.assertIn(IPluggableAuthenticationModules,
+                self.options['credInterfaces'],
+                "PAM should be one of the modules")
+            numCheckers += 1
+        except ImportError:
+            pass
+
+        self.assertIn(ISSHPrivateKey, self.options['credInterfaces'],
+            "SSH should be one of the default checkers")
+        self.assertIn(IUsernamePassword, self.options['credInterfaces'],
+            "UNIX should be one of the default checkers")
+        self.assertEqual(numCheckers, len(self.options['credCheckers']),
+            "There should be %d checkers by default" % (numCheckers,))
+
+
+    def test_authAdded(self):
+        """
+        The C{--auth} command-line option will add a checker to the list of
+        checkers, and it should be the only auth checker
+        """
+        self.options.parseOptions(['--auth', 'file:' + self.filename])
+        self.assertEqual(len(self.options['credCheckers']), 1)
+
+
+    def test_multipleAuthAdded(self):
+        """
+        Multiple C{--auth} command-line options will add all checkers specified
+        to the list ofcheckers, and there should only be the specified auth
+        checkers (no default checkers).
+        """
+        self.options.parseOptions(['--auth', 'file:' + self.filename,
+                                   '--auth', 'memory:testuser:testpassword'])
+        self.assertEqual(len(self.options['credCheckers']), 2)
+
+
+    def test_authFailure(self):
+        """
+        The checker created by the C{--auth} command-line option returns a
+        L{Deferred} that fails with L{UnauthorizedLogin} when
+        presented with credentials that are unknown to that checker.
+        """
+        self.options.parseOptions(['--auth', 'file:' + self.filename])
+        checker = self.options['credCheckers'][-1]
+        invalid = UsernamePassword(self.usernamePassword[0], 'fake')
+        # Wrong password should raise error
+        return self.assertFailure(
+            checker.requestAvatarId(invalid), error.UnauthorizedLogin)
+
+
+    def test_authSuccess(self):
+        """
+        The checker created by the C{--auth} command-line option returns a
+        L{Deferred} that returns the avatar id when presented with credentials
+        that are known to that checker.
+        """
+        self.options.parseOptions(['--auth', 'file:' + self.filename])
+        checker = self.options['credCheckers'][-1]
+        correct = UsernamePassword(*self.usernamePassword)
+        d = checker.requestAvatarId(correct)
+
+        def checkSuccess(username):
+            self.assertEqual(username, correct.username)
+
+        return d.addCallback(checkSuccess)
+
+
+    def test_checkersPamAuth(self):
+        """
+        The L{OpenSSHFactory} built by L{tap.makeService} has a portal with
+        L{IPluggableAuthenticationModules}, L{ISSHPrivateKey} and
+        L{IUsernamePassword} interfaces registered as checkers if C{pamauth} is
+        available.
+        """
+        # Fake the presence of pamauth, even if PyPAM is not installed
+        self.patch(tap, "pamauth", object())
+        config = tap.Options()
+        service = tap.makeService(config)
+        portal = service.factory.portal
+        self.assertEqual(
+            set(portal.checkers.keys()),
+            set([IPluggableAuthenticationModules, ISSHPrivateKey,
+                 IUsernamePassword]))
+
+
+    def test_checkersWithoutPamAuth(self):
+        """
+        The L{OpenSSHFactory} built by L{tap.makeService} has a portal with
+        L{ISSHPrivateKey} and L{IUsernamePassword} interfaces registered as
+        checkers if C{pamauth} is not available.
+        """
+        # Fake the absence of pamauth, even if PyPAM is installed
+        self.patch(tap, "pamauth", None)
+        config = tap.Options()
+        service = tap.makeService(config)
+        portal = service.factory.portal
+        self.assertEqual(
+            set(portal.checkers.keys()),
+            set([ISSHPrivateKey, IUsernamePassword]))
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_telnet.py b/ThirdParty/Twisted/twisted/conch/test/test_telnet.py
new file mode 100644
index 0000000..9b5bf76
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_telnet.py
@@ -0,0 +1,767 @@
+# -*- test-case-name: twisted.conch.test.test_telnet -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.conch.telnet}.
+"""
+
+from zope.interface import implements
+from zope.interface.verify import verifyObject
+
+from twisted.internet import defer
+
+from twisted.conch import telnet
+
+from twisted.trial import unittest
+from twisted.test import proto_helpers
+
+
+class TestProtocol:
+    implements(telnet.ITelnetProtocol)
+
+    localEnableable = ()
+    remoteEnableable = ()
+
+    def __init__(self):
+        self.bytes = ''
+        self.subcmd = ''
+        self.calls = []
+
+        self.enabledLocal = []
+        self.enabledRemote = []
+        self.disabledLocal = []
+        self.disabledRemote = []
+
+    def makeConnection(self, transport):
+        d = transport.negotiationMap = {}
+        d['\x12'] = self.neg_TEST_COMMAND
+
+        d = transport.commandMap = transport.commandMap.copy()
+        for cmd in ('NOP', 'DM', 'BRK', 'IP', 'AO', 'AYT', 'EC', 'EL', 'GA'):
+            d[getattr(telnet, cmd)] = lambda arg, cmd=cmd: self.calls.append(cmd)
+
+    def dataReceived(self, bytes):
+        self.bytes += bytes
+
+    def connectionLost(self, reason):
+        pass
+
+    def neg_TEST_COMMAND(self, payload):
+        self.subcmd = payload
+
+    def enableLocal(self, option):
+        if option in self.localEnableable:
+            self.enabledLocal.append(option)
+            return True
+        return False
+
+    def disableLocal(self, option):
+        self.disabledLocal.append(option)
+
+    def enableRemote(self, option):
+        if option in self.remoteEnableable:
+            self.enabledRemote.append(option)
+            return True
+        return False
+
+    def disableRemote(self, option):
+        self.disabledRemote.append(option)
+
+
+
+class TestInterfaces(unittest.TestCase):
+    def test_interface(self):
+        """
+        L{telnet.TelnetProtocol} implements L{telnet.ITelnetProtocol}
+        """
+        p = telnet.TelnetProtocol()
+        verifyObject(telnet.ITelnetProtocol, p)
+
+
+
+class TelnetTransportTestCase(unittest.TestCase):
+    """
+    Tests for L{telnet.TelnetTransport}.
+    """
+    def setUp(self):
+        self.p = telnet.TelnetTransport(TestProtocol)
+        self.t = proto_helpers.StringTransport()
+        self.p.makeConnection(self.t)
+
+    def testRegularBytes(self):
+        # Just send a bunch of bytes.  None of these do anything
+        # with telnet.  They should pass right through to the
+        # application layer.
+        h = self.p.protocol
+
+        L = ["here are some bytes la la la",
+             "some more arrive here",
+             "lots of bytes to play with",
+             "la la la",
+             "ta de da",
+             "dum"]
+        for b in L:
+            self.p.dataReceived(b)
+
+        self.assertEqual(h.bytes, ''.join(L))
+
+    def testNewlineHandling(self):
+        # Send various kinds of newlines and make sure they get translated
+        # into \n.
+        h = self.p.protocol
+
+        L = ["here is the first line\r\n",
+             "here is the second line\r\0",
+             "here is the third line\r\n",
+             "here is the last line\r\0"]
+
+        for b in L:
+            self.p.dataReceived(b)
+
+        self.assertEqual(h.bytes, L[0][:-2] + '\n' +
+                          L[1][:-2] + '\r' +
+                          L[2][:-2] + '\n' +
+                          L[3][:-2] + '\r')
+
+    def testIACEscape(self):
+        # Send a bunch of bytes and a couple quoted \xFFs.  Unquoted,
+        # \xFF is a telnet command.  Quoted, one of them from each pair
+        # should be passed through to the application layer.
+        h = self.p.protocol
+
+        L = ["here are some bytes\xff\xff with an embedded IAC",
+             "and here is a test of a border escape\xff",
+             "\xff did you get that IAC?"]
+
+        for b in L:
+            self.p.dataReceived(b)
+
+        self.assertEqual(h.bytes, ''.join(L).replace('\xff\xff', '\xff'))
+
+    def _simpleCommandTest(self, cmdName):
+        # Send a single simple telnet command and make sure
+        # it gets noticed and the appropriate method gets
+        # called.
+        h = self.p.protocol
+
+        cmd = telnet.IAC + getattr(telnet, cmdName)
+        L = ["Here's some bytes, tra la la",
+             "But ono!" + cmd + " an interrupt"]
+
+        for b in L:
+            self.p.dataReceived(b)
+
+        self.assertEqual(h.calls, [cmdName])
+        self.assertEqual(h.bytes, ''.join(L).replace(cmd, ''))
+
+    def testInterrupt(self):
+        self._simpleCommandTest("IP")
+
+    def testNoOperation(self):
+        self._simpleCommandTest("NOP")
+
+    def testDataMark(self):
+        self._simpleCommandTest("DM")
+
+    def testBreak(self):
+        self._simpleCommandTest("BRK")
+
+    def testAbortOutput(self):
+        self._simpleCommandTest("AO")
+
+    def testAreYouThere(self):
+        self._simpleCommandTest("AYT")
+
+    def testEraseCharacter(self):
+        self._simpleCommandTest("EC")
+
+    def testEraseLine(self):
+        self._simpleCommandTest("EL")
+
+    def testGoAhead(self):
+        self._simpleCommandTest("GA")
+
+    def testSubnegotiation(self):
+        # Send a subnegotiation command and make sure it gets
+        # parsed and that the correct method is called.
+        h = self.p.protocol
+
+        cmd = telnet.IAC + telnet.SB + '\x12hello world' + telnet.IAC + telnet.SE
+        L = ["These are some bytes but soon" + cmd,
+             "there will be some more"]
+
+        for b in L:
+            self.p.dataReceived(b)
+
+        self.assertEqual(h.bytes, ''.join(L).replace(cmd, ''))
+        self.assertEqual(h.subcmd, list("hello world"))
+
+    def testSubnegotiationWithEmbeddedSE(self):
+        # Send a subnegotiation command with an embedded SE.  Make sure
+        # that SE gets passed to the correct method.
+        h = self.p.protocol
+
+        cmd = (telnet.IAC + telnet.SB +
+               '\x12' + telnet.SE +
+               telnet.IAC + telnet.SE)
+
+        L = ["Some bytes are here" + cmd + "and here",
+             "and here"]
+
+        for b in L:
+            self.p.dataReceived(b)
+
+        self.assertEqual(h.bytes, ''.join(L).replace(cmd, ''))
+        self.assertEqual(h.subcmd, [telnet.SE])
+
+    def testBoundarySubnegotiation(self):
+        # Send a subnegotiation command.  Split it at every possible byte boundary
+        # and make sure it always gets parsed and that it is passed to the correct
+        # method.
+        cmd = (telnet.IAC + telnet.SB +
+               '\x12' + telnet.SE + 'hello' +
+               telnet.IAC + telnet.SE)
+
+        for i in range(len(cmd)):
+            h = self.p.protocol = TestProtocol()
+            h.makeConnection(self.p)
+
+            a, b = cmd[:i], cmd[i:]
+            L = ["first part" + a,
+                 b + "last part"]
+
+            for bytes in L:
+                self.p.dataReceived(bytes)
+
+            self.assertEqual(h.bytes, ''.join(L).replace(cmd, ''))
+            self.assertEqual(h.subcmd, [telnet.SE] + list('hello'))
+
+    def _enabledHelper(self, o, eL=[], eR=[], dL=[], dR=[]):
+        self.assertEqual(o.enabledLocal, eL)
+        self.assertEqual(o.enabledRemote, eR)
+        self.assertEqual(o.disabledLocal, dL)
+        self.assertEqual(o.disabledRemote, dR)
+
+    def testRefuseWill(self):
+        # Try to enable an option.  The server should refuse to enable it.
+        cmd = telnet.IAC + telnet.WILL + '\x12'
+
+        bytes = "surrounding bytes" + cmd + "to spice things up"
+        self.p.dataReceived(bytes)
+
+        self.assertEqual(self.p.protocol.bytes, bytes.replace(cmd, ''))
+        self.assertEqual(self.t.value(), telnet.IAC + telnet.DONT + '\x12')
+        self._enabledHelper(self.p.protocol)
+
+    def testRefuseDo(self):
+        # Try to enable an option.  The server should refuse to enable it.
+        cmd = telnet.IAC + telnet.DO + '\x12'
+
+        bytes = "surrounding bytes" + cmd + "to spice things up"
+        self.p.dataReceived(bytes)
+
+        self.assertEqual(self.p.protocol.bytes, bytes.replace(cmd, ''))
+        self.assertEqual(self.t.value(), telnet.IAC + telnet.WONT + '\x12')
+        self._enabledHelper(self.p.protocol)
+
+    def testAcceptDo(self):
+        # Try to enable an option.  The option is in our allowEnable
+        # list, so we will allow it to be enabled.
+        cmd = telnet.IAC + telnet.DO + '\x19'
+        bytes = 'padding' + cmd + 'trailer'
+
+        h = self.p.protocol
+        h.localEnableable = ('\x19',)
+        self.p.dataReceived(bytes)
+
+        self.assertEqual(self.t.value(), telnet.IAC + telnet.WILL + '\x19')
+        self._enabledHelper(h, eL=['\x19'])
+
+    def testAcceptWill(self):
+        # Same as testAcceptDo, but reversed.
+        cmd = telnet.IAC + telnet.WILL + '\x91'
+        bytes = 'header' + cmd + 'padding'
+
+        h = self.p.protocol
+        h.remoteEnableable = ('\x91',)
+        self.p.dataReceived(bytes)
+
+        self.assertEqual(self.t.value(), telnet.IAC + telnet.DO + '\x91')
+        self._enabledHelper(h, eR=['\x91'])
+
+    def testAcceptWont(self):
+        # Try to disable an option.  The server must allow any option to
+        # be disabled at any time.  Make sure it disables it and sends
+        # back an acknowledgement of this.
+        cmd = telnet.IAC + telnet.WONT + '\x29'
+
+        # Jimmy it - after these two lines, the server will be in a state
+        # such that it believes the option to have been previously enabled
+        # via normal negotiation.
+        s = self.p.getOptionState('\x29')
+        s.him.state = 'yes'
+
+        bytes = "fiddle dee" + cmd
+        self.p.dataReceived(bytes)
+
+        self.assertEqual(self.p.protocol.bytes, bytes.replace(cmd, ''))
+        self.assertEqual(self.t.value(), telnet.IAC + telnet.DONT + '\x29')
+        self.assertEqual(s.him.state, 'no')
+        self._enabledHelper(self.p.protocol, dR=['\x29'])
+
+    def testAcceptDont(self):
+        # Try to disable an option.  The server must allow any option to
+        # be disabled at any time.  Make sure it disables it and sends
+        # back an acknowledgement of this.
+        cmd = telnet.IAC + telnet.DONT + '\x29'
+
+        # Jimmy it - after these two lines, the server will be in a state
+        # such that it believes the option to have beenp previously enabled
+        # via normal negotiation.
+        s = self.p.getOptionState('\x29')
+        s.us.state = 'yes'
+
+        bytes = "fiddle dum " + cmd
+        self.p.dataReceived(bytes)
+
+        self.assertEqual(self.p.protocol.bytes, bytes.replace(cmd, ''))
+        self.assertEqual(self.t.value(), telnet.IAC + telnet.WONT + '\x29')
+        self.assertEqual(s.us.state, 'no')
+        self._enabledHelper(self.p.protocol, dL=['\x29'])
+
+    def testIgnoreWont(self):
+        # Try to disable an option.  The option is already disabled.  The
+        # server should send nothing in response to this.
+        cmd = telnet.IAC + telnet.WONT + '\x47'
+
+        bytes = "dum de dum" + cmd + "tra la la"
+        self.p.dataReceived(bytes)
+
+        self.assertEqual(self.p.protocol.bytes, bytes.replace(cmd, ''))
+        self.assertEqual(self.t.value(), '')
+        self._enabledHelper(self.p.protocol)
+
+    def testIgnoreDont(self):
+        # Try to disable an option.  The option is already disabled.  The
+        # server should send nothing in response to this.  Doing so could
+        # lead to a negotiation loop.
+        cmd = telnet.IAC + telnet.DONT + '\x47'
+
+        bytes = "dum de dum" + cmd + "tra la la"
+        self.p.dataReceived(bytes)
+
+        self.assertEqual(self.p.protocol.bytes, bytes.replace(cmd, ''))
+        self.assertEqual(self.t.value(), '')
+        self._enabledHelper(self.p.protocol)
+
+    def testIgnoreWill(self):
+        # Try to enable an option.  The option is already enabled.  The
+        # server should send nothing in response to this.  Doing so could
+        # lead to a negotiation loop.
+        cmd = telnet.IAC + telnet.WILL + '\x56'
+
+        # Jimmy it - after these two lines, the server will be in a state
+        # such that it believes the option to have been previously enabled
+        # via normal negotiation.
+        s = self.p.getOptionState('\x56')
+        s.him.state = 'yes'
+
+        bytes = "tra la la" + cmd + "dum de dum"
+        self.p.dataReceived(bytes)
+
+        self.assertEqual(self.p.protocol.bytes, bytes.replace(cmd, ''))
+        self.assertEqual(self.t.value(), '')
+        self._enabledHelper(self.p.protocol)
+
+    def testIgnoreDo(self):
+        # Try to enable an option.  The option is already enabled.  The
+        # server should send nothing in response to this.  Doing so could
+        # lead to a negotiation loop.
+        cmd = telnet.IAC + telnet.DO + '\x56'
+
+        # Jimmy it - after these two lines, the server will be in a state
+        # such that it believes the option to have been previously enabled
+        # via normal negotiation.
+        s = self.p.getOptionState('\x56')
+        s.us.state = 'yes'
+
+        bytes = "tra la la" + cmd + "dum de dum"
+        self.p.dataReceived(bytes)
+
+        self.assertEqual(self.p.protocol.bytes, bytes.replace(cmd, ''))
+        self.assertEqual(self.t.value(), '')
+        self._enabledHelper(self.p.protocol)
+
+    def testAcceptedEnableRequest(self):
+        # Try to enable an option through the user-level API.  This
+        # returns a Deferred that fires when negotiation about the option
+        # finishes.  Make sure it fires, make sure state gets updated
+        # properly, make sure the result indicates the option was enabled.
+        d = self.p.do('\x42')
+
+        h = self.p.protocol
+        h.remoteEnableable = ('\x42',)
+
+        self.assertEqual(self.t.value(), telnet.IAC + telnet.DO + '\x42')
+
+        self.p.dataReceived(telnet.IAC + telnet.WILL + '\x42')
+
+        d.addCallback(self.assertEqual, True)
+        d.addCallback(lambda _:  self._enabledHelper(h, eR=['\x42']))
+        return d
+
+
+    def test_refusedEnableRequest(self):
+        """
+        If the peer refuses to enable an option we request it to enable, the
+        L{Deferred} returned by L{TelnetProtocol.do} fires with an
+        L{OptionRefused} L{Failure}.
+        """
+        # Try to enable an option through the user-level API.  This returns a
+        # Deferred that fires when negotiation about the option finishes.  Make
+        # sure it fires, make sure state gets updated properly, make sure the
+        # result indicates the option was enabled.
+        self.p.protocol.remoteEnableable = ('\x42',)
+        d = self.p.do('\x42')
+
+        self.assertEqual(self.t.value(), telnet.IAC + telnet.DO + '\x42')
+
+        s = self.p.getOptionState('\x42')
+        self.assertEqual(s.him.state, 'no')
+        self.assertEqual(s.us.state, 'no')
+        self.assertEqual(s.him.negotiating, True)
+        self.assertEqual(s.us.negotiating, False)
+
+        self.p.dataReceived(telnet.IAC + telnet.WONT + '\x42')
+
+        d = self.assertFailure(d, telnet.OptionRefused)
+        d.addCallback(lambda ignored: self._enabledHelper(self.p.protocol))
+        d.addCallback(
+            lambda ignored: self.assertEqual(s.him.negotiating, False))
+        return d
+
+
+    def test_refusedEnableOffer(self):
+        """
+        If the peer refuses to allow us to enable an option, the L{Deferred}
+        returned by L{TelnetProtocol.will} fires with an L{OptionRefused}
+        L{Failure}.
+        """
+        # Try to offer an option through the user-level API.  This returns a
+        # Deferred that fires when negotiation about the option finishes.  Make
+        # sure it fires, make sure state gets updated properly, make sure the
+        # result indicates the option was enabled.
+        self.p.protocol.localEnableable = ('\x42',)
+        d = self.p.will('\x42')
+
+        self.assertEqual(self.t.value(), telnet.IAC + telnet.WILL + '\x42')
+
+        s = self.p.getOptionState('\x42')
+        self.assertEqual(s.him.state, 'no')
+        self.assertEqual(s.us.state, 'no')
+        self.assertEqual(s.him.negotiating, False)
+        self.assertEqual(s.us.negotiating, True)
+
+        self.p.dataReceived(telnet.IAC + telnet.DONT + '\x42')
+
+        d = self.assertFailure(d, telnet.OptionRefused)
+        d.addCallback(lambda ignored: self._enabledHelper(self.p.protocol))
+        d.addCallback(
+            lambda ignored: self.assertEqual(s.us.negotiating, False))
+        return d
+
+
+    def testAcceptedDisableRequest(self):
+        # Try to disable an option through the user-level API.  This
+        # returns a Deferred that fires when negotiation about the option
+        # finishes.  Make sure it fires, make sure state gets updated
+        # properly, make sure the result indicates the option was enabled.
+        s = self.p.getOptionState('\x42')
+        s.him.state = 'yes'
+
+        d = self.p.dont('\x42')
+
+        self.assertEqual(self.t.value(), telnet.IAC + telnet.DONT + '\x42')
+
+        self.p.dataReceived(telnet.IAC + telnet.WONT + '\x42')
+
+        d.addCallback(self.assertEqual, True)
+        d.addCallback(lambda _: self._enabledHelper(self.p.protocol,
+                                                    dR=['\x42']))
+        return d
+
+    def testNegotiationBlocksFurtherNegotiation(self):
+        # Try to disable an option, then immediately try to enable it, then
+        # immediately try to disable it.  Ensure that the 2nd and 3rd calls
+        # fail quickly with the right exception.
+        s = self.p.getOptionState('\x24')
+        s.him.state = 'yes'
+        d2 = self.p.dont('\x24') # fires after the first line of _final
+
+        def _do(x):
+            d = self.p.do('\x24')
+            return self.assertFailure(d, telnet.AlreadyNegotiating)
+
+        def _dont(x):
+            d = self.p.dont('\x24')
+            return self.assertFailure(d, telnet.AlreadyNegotiating)
+
+        def _final(x):
+            self.p.dataReceived(telnet.IAC + telnet.WONT + '\x24')
+            # an assertion that only passes if d2 has fired
+            self._enabledHelper(self.p.protocol, dR=['\x24'])
+            # Make sure we allow this
+            self.p.protocol.remoteEnableable = ('\x24',)
+            d = self.p.do('\x24')
+            self.p.dataReceived(telnet.IAC + telnet.WILL + '\x24')
+            d.addCallback(self.assertEqual, True)
+            d.addCallback(lambda _: self._enabledHelper(self.p.protocol,
+                                                        eR=['\x24'],
+                                                        dR=['\x24']))
+            return d
+
+        d = _do(None)
+        d.addCallback(_dont)
+        d.addCallback(_final)
+        return d
+
+    def testSuperfluousDisableRequestRaises(self):
+        # Try to disable a disabled option.  Make sure it fails properly.
+        d = self.p.dont('\xab')
+        return self.assertFailure(d, telnet.AlreadyDisabled)
+
+    def testSuperfluousEnableRequestRaises(self):
+        # Try to disable a disabled option.  Make sure it fails properly.
+        s = self.p.getOptionState('\xab')
+        s.him.state = 'yes'
+        d = self.p.do('\xab')
+        return self.assertFailure(d, telnet.AlreadyEnabled)
+
+    def testLostConnectionFailsDeferreds(self):
+        d1 = self.p.do('\x12')
+        d2 = self.p.do('\x23')
+        d3 = self.p.do('\x34')
+
+        class TestException(Exception):
+            pass
+
+        self.p.connectionLost(TestException("Total failure!"))
+
+        d1 = self.assertFailure(d1, TestException)
+        d2 = self.assertFailure(d2, TestException)
+        d3 = self.assertFailure(d3, TestException)
+        return defer.gatherResults([d1, d2, d3])
+
+
+class TestTelnet(telnet.Telnet):
+    """
+    A trivial extension of the telnet protocol class useful to unit tests.
+    """
+    def __init__(self):
+        telnet.Telnet.__init__(self)
+        self.events = []
+
+
+    def applicationDataReceived(self, bytes):
+        """
+        Record the given data in C{self.events}.
+        """
+        self.events.append(('bytes', bytes))
+
+
+    def unhandledCommand(self, command, bytes):
+        """
+        Record the given command in C{self.events}.
+        """
+        self.events.append(('command', command, bytes))
+
+
+    def unhandledSubnegotiation(self, command, bytes):
+        """
+        Record the given subnegotiation command in C{self.events}.
+        """
+        self.events.append(('negotiate', command, bytes))
+
+
+
+class TelnetTests(unittest.TestCase):
+    """
+    Tests for L{telnet.Telnet}.
+
+    L{telnet.Telnet} implements the TELNET protocol (RFC 854), including option
+    and suboption negotiation, and option state tracking.
+    """
+    def setUp(self):
+        """
+        Create an unconnected L{telnet.Telnet} to be used by tests.
+        """
+        self.protocol = TestTelnet()
+
+
+    def test_enableLocal(self):
+        """
+        L{telnet.Telnet.enableLocal} should reject all options, since
+        L{telnet.Telnet} does not know how to implement any options.
+        """
+        self.assertFalse(self.protocol.enableLocal('\0'))
+
+
+    def test_enableRemote(self):
+        """
+        L{telnet.Telnet.enableRemote} should reject all options, since
+        L{telnet.Telnet} does not know how to implement any options.
+        """
+        self.assertFalse(self.protocol.enableRemote('\0'))
+
+
+    def test_disableLocal(self):
+        """
+        It is an error for L{telnet.Telnet.disableLocal} to be called, since
+        L{telnet.Telnet.enableLocal} will never allow any options to be enabled
+        locally.  If a subclass overrides enableLocal, it must also override
+        disableLocal.
+        """
+        self.assertRaises(NotImplementedError, self.protocol.disableLocal, '\0')
+
+
+    def test_disableRemote(self):
+        """
+        It is an error for L{telnet.Telnet.disableRemote} to be called, since
+        L{telnet.Telnet.enableRemote} will never allow any options to be
+        enabled remotely.  If a subclass overrides enableRemote, it must also
+        override disableRemote.
+        """
+        self.assertRaises(NotImplementedError, self.protocol.disableRemote, '\0')
+
+
+    def test_requestNegotiation(self):
+        """
+        L{telnet.Telnet.requestNegotiation} formats the feature byte and the
+        payload bytes into the subnegotiation format and sends them.
+
+        See RFC 855.
+        """
+        transport = proto_helpers.StringTransport()
+        self.protocol.makeConnection(transport)
+        self.protocol.requestNegotiation('\x01', '\x02\x03')
+        self.assertEqual(
+            transport.value(),
+            # IAC SB feature bytes IAC SE
+            '\xff\xfa\x01\x02\x03\xff\xf0')
+
+
+    def test_requestNegotiationEscapesIAC(self):
+        """
+        If the payload for a subnegotiation includes I{IAC}, it is escaped by
+        L{telnet.Telnet.requestNegotiation} with another I{IAC}.
+
+        See RFC 855.
+        """
+        transport = proto_helpers.StringTransport()
+        self.protocol.makeConnection(transport)
+        self.protocol.requestNegotiation('\x01', '\xff')
+        self.assertEqual(
+            transport.value(),
+            '\xff\xfa\x01\xff\xff\xff\xf0')
+
+
+    def _deliver(self, bytes, *expected):
+        """
+        Pass the given bytes to the protocol's C{dataReceived} method and
+        assert that the given events occur.
+        """
+        received = self.protocol.events = []
+        self.protocol.dataReceived(bytes)
+        self.assertEqual(received, list(expected))
+
+
+    def test_oneApplicationDataByte(self):
+        """
+        One application-data byte in the default state gets delivered right
+        away.
+        """
+        self._deliver('a', ('bytes', 'a'))
+
+
+    def test_twoApplicationDataBytes(self):
+        """
+        Two application-data bytes in the default state get delivered
+        together.
+        """
+        self._deliver('bc', ('bytes', 'bc'))
+
+
+    def test_threeApplicationDataBytes(self):
+        """
+        Three application-data bytes followed by a control byte get
+        delivered, but the control byte doesn't.
+        """
+        self._deliver('def' + telnet.IAC, ('bytes', 'def'))
+
+
+    def test_escapedControl(self):
+        """
+        IAC in the escaped state gets delivered and so does another
+        application-data byte following it.
+        """
+        self._deliver(telnet.IAC)
+        self._deliver(telnet.IAC + 'g', ('bytes', telnet.IAC + 'g'))
+
+
+    def test_carriageReturn(self):
+        """
+        A carriage return only puts the protocol into the newline state.  A
+        linefeed in the newline state causes just the newline to be
+        delivered.  A nul in the newline state causes a carriage return to
+        be delivered.  An IAC in the newline state causes a carriage return
+        to be delivered and puts the protocol into the escaped state. 
+        Anything else causes a carriage return and that thing to be
+        delivered.
+        """
+        self._deliver('\r')
+        self._deliver('\n', ('bytes', '\n'))
+        self._deliver('\r\n', ('bytes', '\n'))
+
+        self._deliver('\r')
+        self._deliver('\0', ('bytes', '\r'))
+        self._deliver('\r\0', ('bytes', '\r'))
+
+        self._deliver('\r')
+        self._deliver('a', ('bytes', '\ra'))
+        self._deliver('\ra', ('bytes', '\ra'))
+
+        self._deliver('\r')
+        self._deliver(
+            telnet.IAC + telnet.IAC + 'x', ('bytes', '\r' + telnet.IAC + 'x'))
+
+
+    def test_applicationDataBeforeSimpleCommand(self):
+        """
+        Application bytes received before a command are delivered before the
+        command is processed.
+        """
+        self._deliver(
+            'x' + telnet.IAC + telnet.NOP,
+            ('bytes', 'x'), ('command', telnet.NOP, None))
+
+
+    def test_applicationDataBeforeCommand(self):
+        """
+        Application bytes received before a WILL/WONT/DO/DONT are delivered
+        before the command is processed.
+        """
+        self.protocol.commandMap = {}
+        self._deliver(
+            'y' + telnet.IAC + telnet.WILL + '\x00',
+            ('bytes', 'y'), ('command', telnet.WILL, '\x00'))
+
+
+    def test_applicationDataBeforeSubnegotiation(self):
+        """
+        Application bytes received before a subnegotiation command are
+        delivered before the negotiation is processed.
+        """
+        self._deliver(
+            'z' + telnet.IAC + telnet.SB + 'Qx' + telnet.IAC + telnet.SE,
+            ('bytes', 'z'), ('negotiate', 'Q', ['x']))
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_text.py b/ThirdParty/Twisted/twisted/conch/test/test_text.py
new file mode 100644
index 0000000..1d68870
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_text.py
@@ -0,0 +1,101 @@
+# -*- test-case-name: twisted.conch.test.test_text -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.trial import unittest
+
+from twisted.conch.insults import helper, text
+
+A = text.attributes
+
+class Serialization(unittest.TestCase):
+    def setUp(self):
+        self.attrs = helper.CharacterAttribute()
+
+    def testTrivial(self):
+        self.assertEqual(
+            text.flatten(A.normal['Hello, world.'], self.attrs),
+            'Hello, world.')
+
+    def testBold(self):
+        self.assertEqual(
+            text.flatten(A.bold['Hello, world.'], self.attrs),
+            '\x1b[1mHello, world.')
+
+    def testUnderline(self):
+        self.assertEqual(
+            text.flatten(A.underline['Hello, world.'], self.attrs),
+            '\x1b[4mHello, world.')
+
+    def testBlink(self):
+        self.assertEqual(
+            text.flatten(A.blink['Hello, world.'], self.attrs),
+            '\x1b[5mHello, world.')
+
+    def testReverseVideo(self):
+        self.assertEqual(
+            text.flatten(A.reverseVideo['Hello, world.'], self.attrs),
+            '\x1b[7mHello, world.')
+
+    def testMinus(self):
+        self.assertEqual(
+            text.flatten(
+                A.bold[A.blink['Hello', -A.bold[' world'], '.']],
+                self.attrs),
+            '\x1b[1;5mHello\x1b[0;5m world\x1b[1;5m.')
+
+    def testForeground(self):
+        self.assertEqual(
+            text.flatten(
+                A.normal[A.fg.red['Hello, '], A.fg.green['world!']],
+                self.attrs),
+            '\x1b[31mHello, \x1b[32mworld!')
+
+    def testBackground(self):
+        self.assertEqual(
+            text.flatten(
+                A.normal[A.bg.red['Hello, '], A.bg.green['world!']],
+                self.attrs),
+            '\x1b[41mHello, \x1b[42mworld!')
+
+
+class EfficiencyTestCase(unittest.TestCase):
+    todo = ("flatten() isn't quite stateful enough to avoid emitting a few extra bytes in "
+            "certain circumstances, so these tests fail.  The failures take the form of "
+            "additional elements in the ;-delimited character attribute lists.  For example, "
+            "\\x1b[0;31;46m might be emitted instead of \\x[46m, even if 31 has already been "
+            "activated and no conflicting attributes are set which need to be cleared.")
+
+    def setUp(self):
+        self.attrs = helper.CharacterAttribute()
+
+    def testComplexStructure(self):
+        output = A.normal[
+            A.bold[
+                A.bg.cyan[
+                    A.fg.red[
+                        "Foreground Red, Background Cyan, Bold",
+                        A.blink[
+                            "Blinking"],
+                        -A.bold[
+                            "Foreground Red, Background Cyan, normal"]],
+                    A.fg.green[
+                        "Foreground Green, Background Cyan, Bold"]]]]
+
+        self.assertEqual(
+            text.flatten(output, self.attrs),
+            "\x1b[1;31;46mForeground Red, Background Cyan, Bold"
+            "\x1b[5mBlinking"
+            "\x1b[0;31;46mForeground Red, Background Cyan, normal"
+            "\x1b[1;32;46mForeground Green, Background Cyan, Bold")
+
+    def testNesting(self):
+        self.assertEqual(
+            text.flatten(A.bold['Hello, ', A.underline['world.']], self.attrs),
+            '\x1b[1mHello, \x1b[4mworld.')
+
+        self.assertEqual(
+            text.flatten(
+                A.bold[A.reverseVideo['Hello, ', A.normal['world'], '.']],
+                self.attrs),
+            '\x1b[1;7mHello, \x1b[0mworld\x1b[1;7m.')
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_transport.py b/ThirdParty/Twisted/twisted/conch/test/test_transport.py
new file mode 100644
index 0000000..8b801b0
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_transport.py
@@ -0,0 +1,2225 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for ssh/transport.py and the classes therein.
+"""
+
+try:
+    import pyasn1
+except ImportError:
+    pyasn1 = None
+
+try:
+    import Crypto.Cipher.DES3
+except ImportError:
+    Crypto = None
+
+if pyasn1 is not None and Crypto is not None:
+    dependencySkip = None
+    from twisted.conch.ssh import transport, keys, factory
+    from twisted.conch.test import keydata
+else:
+    if pyasn1 is None:
+        dependencySkip = "Cannot run without PyASN1"
+    elif Crypto is None:
+        dependencySkip = "can't run w/o PyCrypto"
+
+    class transport: # fictional modules to make classes work
+        class SSHTransportBase: pass
+        class SSHServerTransport: pass
+        class SSHClientTransport: pass
+    class factory:
+        class SSHFactory:
+            pass
+
+from twisted.trial import unittest
+from twisted.internet import defer
+from twisted.protocols import loopback
+from twisted.python import randbytes
+from twisted.python.reflect import qual, getClass
+from twisted.python.hashlib import md5, sha1
+from twisted.conch.ssh import address, service, common
+from twisted.test import proto_helpers
+
+from twisted.conch.error import ConchError
+
+class MockTransportBase(transport.SSHTransportBase):
+    """
+    A base class for the client and server protocols.  Stores the messages
+    it receieves instead of ignoring them.
+
+    @ivar errors: a list of tuples: (reasonCode, description)
+    @ivar unimplementeds: a list of integers: sequence number
+    @ivar debugs: a list of tuples: (alwaysDisplay, message, lang)
+    @ivar ignoreds: a list of strings: ignored data
+    """
+
+    def connectionMade(self):
+        """
+        Set up instance variables.
+        """
+        transport.SSHTransportBase.connectionMade(self)
+        self.errors = []
+        self.unimplementeds = []
+        self.debugs = []
+        self.ignoreds = []
+        self.gotUnsupportedVersion = None
+
+
+    def _unsupportedVersionReceived(self, remoteVersion):
+        """
+        Intercept unsupported version call.
+
+        @type remoteVersion: C{str}
+        """
+        self.gotUnsupportedVersion = remoteVersion
+        return transport.SSHTransportBase._unsupportedVersionReceived(
+            self, remoteVersion)
+
+
+    def receiveError(self, reasonCode, description):
+        """
+        Store any errors received.
+
+        @type reasonCode: C{int}
+        @type description: C{str}
+        """
+        self.errors.append((reasonCode, description))
+
+
+    def receiveUnimplemented(self, seqnum):
+        """
+        Store any unimplemented packet messages.
+
+        @type seqnum: C{int}
+        """
+        self.unimplementeds.append(seqnum)
+
+
+    def receiveDebug(self, alwaysDisplay, message, lang):
+        """
+        Store any debug messages.
+
+        @type alwaysDisplay: C{bool}
+        @type message: C{str}
+        @type lang: C{str}
+        """
+        self.debugs.append((alwaysDisplay, message, lang))
+
+
+    def ssh_IGNORE(self, packet):
+        """
+        Store any ignored data.
+
+        @type packet: C{str}
+        """
+        self.ignoreds.append(packet)
+
+
+class MockCipher(object):
+    """
+    A mocked-up version of twisted.conch.ssh.transport.SSHCiphers.
+    """
+    outCipType = 'test'
+    encBlockSize = 6
+    inCipType = 'test'
+    decBlockSize = 6
+    inMACType = 'test'
+    outMACType = 'test'
+    verifyDigestSize = 1
+    usedEncrypt = False
+    usedDecrypt = False
+    outMAC = (None, '', '', 1)
+    inMAC = (None, '', '', 1)
+    keys = ()
+
+
+    def encrypt(self, x):
+        """
+        Called to encrypt the packet.  Simply record that encryption was used
+        and return the data unchanged.
+        """
+        self.usedEncrypt = True
+        if (len(x) % self.encBlockSize) != 0:
+            raise RuntimeError("length %i modulo blocksize %i is not 0: %i" %
+                    (len(x), self.encBlockSize, len(x) % self.encBlockSize))
+        return x
+
+
+    def decrypt(self, x):
+        """
+        Called to decrypt the packet.  Simply record that decryption was used
+        and return the data unchanged.
+        """
+        self.usedDecrypt = True
+        if (len(x) % self.encBlockSize) != 0:
+            raise RuntimeError("length %i modulo blocksize %i is not 0: %i" %
+                    (len(x), self.decBlockSize, len(x) % self.decBlockSize))
+        return x
+
+
+    def makeMAC(self, outgoingPacketSequence, payload):
+        """
+        Make a Message Authentication Code by sending the character value of
+        the outgoing packet.
+        """
+        return chr(outgoingPacketSequence)
+
+
+    def verify(self, incomingPacketSequence, packet, macData):
+        """
+        Verify the Message Authentication Code by checking that the packet
+        sequence number is the same.
+        """
+        return chr(incomingPacketSequence) == macData
+
+
+    def setKeys(self, ivOut, keyOut, ivIn, keyIn, macIn, macOut):
+        """
+        Record the keys.
+        """
+        self.keys = (ivOut, keyOut, ivIn, keyIn, macIn, macOut)
+
+
+
+class MockCompression:
+    """
+    A mocked-up compression, based on the zlib interface.  Instead of
+    compressing, it reverses the data and adds a 0x66 byte to the end.
+    """
+
+
+    def compress(self, payload):
+        return payload[::-1] # reversed
+
+
+    def decompress(self, payload):
+        return payload[:-1][::-1]
+
+
+    def flush(self, kind):
+        return '\x66'
+
+
+
+class MockService(service.SSHService):
+    """
+    A mocked-up service, based on twisted.conch.ssh.service.SSHService.
+
+    @ivar started: True if this service has been started.
+    @ivar stopped: True if this service has been stopped.
+    """
+    name = "MockService"
+    started = False
+    stopped = False
+    protocolMessages = {0xff: "MSG_TEST", 71: "MSG_fiction"}
+
+
+    def logPrefix(self):
+        return "MockService"
+
+
+    def serviceStarted(self):
+        """
+        Record that the service was started.
+        """
+        self.started = True
+
+
+    def serviceStopped(self):
+        """
+        Record that the service was stopped.
+        """
+        self.stopped = True
+
+
+    def ssh_TEST(self, packet):
+        """
+        A message that this service responds to.
+        """
+        self.transport.sendPacket(0xff, packet)
+
+
+class MockFactory(factory.SSHFactory):
+    """
+    A mocked-up factory based on twisted.conch.ssh.factory.SSHFactory.
+    """
+    services = {
+        'ssh-userauth': MockService}
+
+
+    def getPublicKeys(self):
+        """
+        Return the public keys that authenticate this server.
+        """
+        return {
+            'ssh-rsa': keys.Key.fromString(keydata.publicRSA_openssh),
+            'ssh-dsa': keys.Key.fromString(keydata.publicDSA_openssh)}
+
+
+    def getPrivateKeys(self):
+        """
+        Return the private keys that authenticate this server.
+        """
+        return {
+            'ssh-rsa': keys.Key.fromString(keydata.privateRSA_openssh),
+            'ssh-dsa': keys.Key.fromString(keydata.privateDSA_openssh)}
+
+
+    def getPrimes(self):
+        """
+        Return the Diffie-Hellman primes that can be used for the
+        diffie-hellman-group-exchange-sha1 key exchange.
+        """
+        return {
+            1024: ((2, transport.DH_PRIME),),
+            2048: ((3, transport.DH_PRIME),),
+            4096: ((5, 7),)}
+
+
+
+class MockOldFactoryPublicKeys(MockFactory):
+    """
+    The old SSHFactory returned mappings from key names to strings from
+    getPublicKeys().  We return those here for testing.
+    """
+
+
+    def getPublicKeys(self):
+        """
+        We used to map key types to public key blobs as strings.
+        """
+        keys = MockFactory.getPublicKeys(self)
+        for name, key in keys.items()[:]:
+            keys[name] = key.blob()
+        return keys
+
+
+
+class MockOldFactoryPrivateKeys(MockFactory):
+    """
+    The old SSHFactory returned mappings from key names to PyCrypto key
+    objects from getPrivateKeys().  We return those here for testing.
+    """
+
+
+    def getPrivateKeys(self):
+        """
+        We used to map key types to PyCrypto key objects.
+        """
+        keys = MockFactory.getPrivateKeys(self)
+        for name, key  in keys.items()[:]:
+            keys[name] = key.keyObject
+        return keys
+
+
+class TransportTestCase(unittest.TestCase):
+    """
+    Base class for transport test cases.
+    """
+    klass = None
+
+    if Crypto is None:
+        skip = "cannot run w/o PyCrypto"
+
+    if pyasn1 is None:
+        skip = "Cannot run without PyASN1"
+
+
+    def setUp(self):
+        self.transport = proto_helpers.StringTransport()
+        self.proto = self.klass()
+        self.packets = []
+        def secureRandom(len):
+            """
+            Return a consistent entropy value
+            """
+            return '\x99' * len
+        self.oldSecureRandom = randbytes.secureRandom
+        randbytes.secureRandom = secureRandom
+        def stubSendPacket(messageType, payload):
+            self.packets.append((messageType, payload))
+        self.proto.makeConnection(self.transport)
+        # we just let the kex packet go into the transport
+        self.proto.sendPacket = stubSendPacket
+
+
+    def finishKeyExchange(self, proto):
+        """
+        Deliver enough additional messages to C{proto} so that the key exchange
+        which is started in L{SSHTransportBase.connectionMade} completes and
+        non-key exchange messages can be sent and received.
+        """
+        proto.dataReceived("SSH-2.0-BogoClient-1.2i\r\n")
+        proto.dispatchMessage(
+            transport.MSG_KEXINIT, self._A_KEXINIT_MESSAGE)
+        proto._keySetup("foo", "bar")
+        # SSHTransportBase can't handle MSG_NEWKEYS, or it would be the right
+        # thing to deliver next.  _newKeys won't work either, because
+        # sendKexInit (probably) hasn't been called.  sendKexInit is
+        # responsible for setting up certain state _newKeys relies on.  So,
+        # just change the key exchange state to what it would be when key
+        # exchange is finished.
+        proto._keyExchangeState = proto._KEY_EXCHANGE_NONE
+
+
+    def tearDown(self):
+        randbytes.secureRandom = self.oldSecureRandom
+        self.oldSecureRandom = None
+
+
+    def simulateKeyExchange(self, sharedSecret, exchangeHash):
+        """
+        Finish a key exchange by calling C{_keySetup} with the given arguments.
+        Also do extra whitebox stuff to satisfy that method's assumption that
+        some kind of key exchange has actually taken place.
+        """
+        self.proto._keyExchangeState = self.proto._KEY_EXCHANGE_REQUESTED
+        self.proto._blockedByKeyExchange = []
+        self.proto._keySetup(sharedSecret, exchangeHash)
+
+
+
+class BaseSSHTransportTestCase(TransportTestCase):
+    """
+    Test TransportBase.  It implements the non-server/client specific
+    parts of the SSH transport protocol.
+    """
+
+    klass = MockTransportBase
+
+    _A_KEXINIT_MESSAGE = (
+        "\xAA" * 16 +
+        common.NS('diffie-hellman-group1-sha1') +
+        common.NS('ssh-rsa') +
+        common.NS('aes256-ctr') +
+        common.NS('aes256-ctr') +
+        common.NS('hmac-sha1') +
+        common.NS('hmac-sha1') +
+        common.NS('none') +
+        common.NS('none') +
+        common.NS('') +
+        common.NS('') +
+        '\x00' + '\x00\x00\x00\x00')
+
+    def test_sendVersion(self):
+        """
+        Test that the first thing sent over the connection is the version
+        string.
+        """
+        # the other setup was done in the setup method
+        self.assertEqual(self.transport.value().split('\r\n', 1)[0],
+                          "SSH-2.0-Twisted")
+
+
+    def test_sendPacketPlain(self):
+        """
+        Test that plain (unencrypted, uncompressed) packets are sent
+        correctly.  The format is::
+            uint32 length (including type and padding length)
+            byte padding length
+            byte type
+            bytes[length-padding length-2] data
+            bytes[padding length] padding
+        """
+        proto = MockTransportBase()
+        proto.makeConnection(self.transport)
+        self.finishKeyExchange(proto)
+        self.transport.clear()
+        message = ord('A')
+        payload = 'BCDEFG'
+        proto.sendPacket(message, payload)
+        value = self.transport.value()
+        self.assertEqual(value, '\x00\x00\x00\x0c\x04ABCDEFG\x99\x99\x99\x99')
+
+
+    def test_sendPacketEncrypted(self):
+        """
+        Test that packets sent while encryption is enabled are sent
+        correctly.  The whole packet should be encrypted.
+        """
+        proto = MockTransportBase()
+        proto.makeConnection(self.transport)
+        self.finishKeyExchange(proto)
+        proto.currentEncryptions = testCipher = MockCipher()
+        message = ord('A')
+        payload = 'BC'
+        self.transport.clear()
+        proto.sendPacket(message, payload)
+        self.assertTrue(testCipher.usedEncrypt)
+        value = self.transport.value()
+        self.assertEqual(
+            value,
+            # Four byte length prefix
+            '\x00\x00\x00\x08'
+            # One byte padding length
+            '\x04'
+            # The actual application data
+            'ABC'
+            # "Random" padding - see the secureRandom monkeypatch in setUp
+            '\x99\x99\x99\x99'
+            # The MAC
+            '\x02')
+
+
+    def test_sendPacketCompressed(self):
+        """
+        Test that packets sent while compression is enabled are sent
+        correctly.  The packet type and data should be encrypted.
+        """
+        proto = MockTransportBase()
+        proto.makeConnection(self.transport)
+        self.finishKeyExchange(proto)
+        proto.outgoingCompression = MockCompression()
+        self.transport.clear()
+        proto.sendPacket(ord('A'), 'B')
+        value = self.transport.value()
+        self.assertEqual(
+            value,
+            '\x00\x00\x00\x0c\x08BA\x66\x99\x99\x99\x99\x99\x99\x99\x99')
+
+
+    def test_sendPacketBoth(self):
+        """
+        Test that packets sent while compression and encryption are
+        enabled are sent correctly.  The packet type and data should be
+        compressed and then the whole packet should be encrypted.
+        """
+        proto = MockTransportBase()
+        proto.makeConnection(self.transport)
+        self.finishKeyExchange(proto)
+        proto.currentEncryptions = testCipher = MockCipher()
+        proto.outgoingCompression = MockCompression()
+        message = ord('A')
+        payload = 'BC'
+        self.transport.clear()
+        proto.sendPacket(message, payload)
+        self.assertTrue(testCipher.usedEncrypt)
+        value = self.transport.value()
+        self.assertEqual(
+            value,
+            # Four byte length prefix
+            '\x00\x00\x00\x0e'
+            # One byte padding length
+            '\x09'
+            # Compressed application data
+            'CBA\x66'
+            # "Random" padding - see the secureRandom monkeypatch in setUp
+            '\x99\x99\x99\x99\x99\x99\x99\x99\x99'
+            # The MAC
+            '\x02')
+
+
+    def test_getPacketPlain(self):
+        """
+        Test that packets are retrieved correctly out of the buffer when
+        no encryption is enabled.
+        """
+        proto = MockTransportBase()
+        proto.makeConnection(self.transport)
+        self.finishKeyExchange(proto)
+        self.transport.clear()
+        proto.sendPacket(ord('A'), 'BC')
+        proto.buf = self.transport.value() + 'extra'
+        self.assertEqual(proto.getPacket(), 'ABC')
+        self.assertEqual(proto.buf, 'extra')
+
+
+    def test_getPacketEncrypted(self):
+        """
+        Test that encrypted packets are retrieved correctly.
+        See test_sendPacketEncrypted.
+        """
+        proto = MockTransportBase()
+        proto.sendKexInit = lambda: None # don't send packets
+        proto.makeConnection(self.transport)
+        self.transport.clear()
+        proto.currentEncryptions = testCipher = MockCipher()
+        proto.sendPacket(ord('A'), 'BCD')
+        value = self.transport.value()
+        proto.buf = value[:MockCipher.decBlockSize]
+        self.assertEqual(proto.getPacket(), None)
+        self.assertTrue(testCipher.usedDecrypt)
+        self.assertEqual(proto.first, '\x00\x00\x00\x0e\x09A')
+        proto.buf += value[MockCipher.decBlockSize:]
+        self.assertEqual(proto.getPacket(), 'ABCD')
+        self.assertEqual(proto.buf, '')
+
+
+    def test_getPacketCompressed(self):
+        """
+        Test that compressed packets are retrieved correctly.  See
+        test_sendPacketCompressed.
+        """
+        proto = MockTransportBase()
+        proto.makeConnection(self.transport)
+        self.finishKeyExchange(proto)
+        self.transport.clear()
+        proto.outgoingCompression = MockCompression()
+        proto.incomingCompression = proto.outgoingCompression
+        proto.sendPacket(ord('A'), 'BCD')
+        proto.buf = self.transport.value()
+        self.assertEqual(proto.getPacket(), 'ABCD')
+
+
+    def test_getPacketBoth(self):
+        """
+        Test that compressed and encrypted packets are retrieved correctly.
+        See test_sendPacketBoth.
+        """
+        proto = MockTransportBase()
+        proto.sendKexInit = lambda: None
+        proto.makeConnection(self.transport)
+        self.transport.clear()
+        proto.currentEncryptions = MockCipher()
+        proto.outgoingCompression = MockCompression()
+        proto.incomingCompression = proto.outgoingCompression
+        proto.sendPacket(ord('A'), 'BCDEFG')
+        proto.buf = self.transport.value()
+        self.assertEqual(proto.getPacket(), 'ABCDEFG')
+
+
+    def test_ciphersAreValid(self):
+        """
+        Test that all the supportedCiphers are valid.
+        """
+        ciphers = transport.SSHCiphers('A', 'B', 'C', 'D')
+        iv = key = '\x00' * 16
+        for cipName in self.proto.supportedCiphers:
+            self.assertTrue(ciphers._getCipher(cipName, iv, key))
+
+
+    def test_sendKexInit(self):
+        """
+        Test that the KEXINIT (key exchange initiation) message is sent
+        correctly.  Payload::
+            bytes[16] cookie
+            string key exchange algorithms
+            string public key algorithms
+            string outgoing ciphers
+            string incoming ciphers
+            string outgoing MACs
+            string incoming MACs
+            string outgoing compressions
+            string incoming compressions
+            bool first packet follows
+            uint32 0
+        """
+        value = self.transport.value().split('\r\n', 1)[1]
+        self.proto.buf = value
+        packet = self.proto.getPacket()
+        self.assertEqual(packet[0], chr(transport.MSG_KEXINIT))
+        self.assertEqual(packet[1:17], '\x99' * 16)
+        (kex, pubkeys, ciphers1, ciphers2, macs1, macs2, compressions1,
+         compressions2, languages1, languages2,
+         buf) = common.getNS(packet[17:], 10)
+
+        self.assertEqual(kex, ','.join(self.proto.supportedKeyExchanges))
+        self.assertEqual(pubkeys, ','.join(self.proto.supportedPublicKeys))
+        self.assertEqual(ciphers1, ','.join(self.proto.supportedCiphers))
+        self.assertEqual(ciphers2, ','.join(self.proto.supportedCiphers))
+        self.assertEqual(macs1, ','.join(self.proto.supportedMACs))
+        self.assertEqual(macs2, ','.join(self.proto.supportedMACs))
+        self.assertEqual(compressions1,
+                          ','.join(self.proto.supportedCompressions))
+        self.assertEqual(compressions2,
+                          ','.join(self.proto.supportedCompressions))
+        self.assertEqual(languages1, ','.join(self.proto.supportedLanguages))
+        self.assertEqual(languages2, ','.join(self.proto.supportedLanguages))
+        self.assertEqual(buf, '\x00' * 5)
+
+
+    def test_receiveKEXINITReply(self):
+        """
+        Immediately after connecting, the transport expects a KEXINIT message
+        and does not reply to it.
+        """
+        self.transport.clear()
+        self.proto.dispatchMessage(
+            transport.MSG_KEXINIT, self._A_KEXINIT_MESSAGE)
+        self.assertEqual(self.packets, [])
+
+
+    def test_sendKEXINITReply(self):
+        """
+        When a KEXINIT message is received which is not a reply to an earlier
+        KEXINIT message which was sent, a KEXINIT reply is sent.
+        """
+        self.finishKeyExchange(self.proto)
+        del self.packets[:]
+
+        self.proto.dispatchMessage(
+            transport.MSG_KEXINIT, self._A_KEXINIT_MESSAGE)
+        self.assertEqual(len(self.packets), 1)
+        self.assertEqual(self.packets[0][0], transport.MSG_KEXINIT)
+
+
+    def test_sendKexInitTwiceFails(self):
+        """
+        A new key exchange cannot be started while a key exchange is already in
+        progress.  If an attempt is made to send a I{KEXINIT} message using
+        L{SSHTransportBase.sendKexInit} while a key exchange is in progress
+        causes that method to raise a L{RuntimeError}.
+        """
+        self.assertRaises(RuntimeError, self.proto.sendKexInit)
+
+
+    def test_sendKexInitBlocksOthers(self):
+        """
+        After L{SSHTransportBase.sendKexInit} has been called, messages types
+        other than the following are queued and not sent until after I{NEWKEYS}
+        is sent by L{SSHTransportBase._keySetup}.
+
+        RFC 4253, section 7.1.
+        """
+        # sendKexInit is called by connectionMade, which is called in setUp.
+        # So we're in the state already.
+        disallowedMessageTypes = [
+            transport.MSG_SERVICE_REQUEST,
+            transport.MSG_KEXINIT,
+            ]
+
+        # Drop all the bytes sent by setUp, they're not relevant to this test.
+        self.transport.clear()
+
+        # Get rid of the sendPacket monkey patch, we are testing the behavior
+        # of sendPacket.
+        del self.proto.sendPacket
+
+        for messageType in disallowedMessageTypes:
+            self.proto.sendPacket(messageType, 'foo')
+            self.assertEqual(self.transport.value(), "")
+
+        self.finishKeyExchange(self.proto)
+        # Make the bytes written to the transport cleartext so it's easier to
+        # make an assertion about them.
+        self.proto.nextEncryptions = MockCipher()
+
+        # Pseudo-deliver the peer's NEWKEYS message, which should flush the
+        # messages which were queued above.
+        self.proto._newKeys()
+        self.assertEqual(self.transport.value().count("foo"), 2)
+
+
+    def test_sendDebug(self):
+        """
+        Test that debug messages are sent correctly.  Payload::
+            bool always display
+            string debug message
+            string language
+        """
+        self.proto.sendDebug("test", True, 'en')
+        self.assertEqual(
+            self.packets,
+            [(transport.MSG_DEBUG,
+              "\x01\x00\x00\x00\x04test\x00\x00\x00\x02en")])
+
+
+    def test_receiveDebug(self):
+        """
+        Test that debug messages are received correctly.  See test_sendDebug.
+        """
+        self.proto.dispatchMessage(
+            transport.MSG_DEBUG,
+            '\x01\x00\x00\x00\x04test\x00\x00\x00\x02en')
+        self.assertEqual(self.proto.debugs, [(True, 'test', 'en')])
+
+
+    def test_sendIgnore(self):
+        """
+        Test that ignored messages are sent correctly.  Payload::
+            string ignored data
+        """
+        self.proto.sendIgnore("test")
+        self.assertEqual(
+            self.packets, [(transport.MSG_IGNORE,
+                            '\x00\x00\x00\x04test')])
+
+
+    def test_receiveIgnore(self):
+        """
+        Test that ignored messages are received correctly.  See
+        test_sendIgnore.
+        """
+        self.proto.dispatchMessage(transport.MSG_IGNORE, 'test')
+        self.assertEqual(self.proto.ignoreds, ['test'])
+
+
+    def test_sendUnimplemented(self):
+        """
+        Test that unimplemented messages are sent correctly.  Payload::
+            uint32 sequence number
+        """
+        self.proto.sendUnimplemented()
+        self.assertEqual(
+            self.packets, [(transport.MSG_UNIMPLEMENTED,
+                            '\x00\x00\x00\x00')])
+
+
+    def test_receiveUnimplemented(self):
+        """
+        Test that unimplemented messages are received correctly.  See
+        test_sendUnimplemented.
+        """
+        self.proto.dispatchMessage(transport.MSG_UNIMPLEMENTED,
+                                   '\x00\x00\x00\xff')
+        self.assertEqual(self.proto.unimplementeds, [255])
+
+
+    def test_sendDisconnect(self):
+        """
+        Test that disconnection messages are sent correctly.  Payload::
+            uint32 reason code
+            string reason description
+            string language
+        """
+        disconnected = [False]
+        def stubLoseConnection():
+            disconnected[0] = True
+        self.transport.loseConnection = stubLoseConnection
+        self.proto.sendDisconnect(0xff, "test")
+        self.assertEqual(
+            self.packets,
+            [(transport.MSG_DISCONNECT,
+              "\x00\x00\x00\xff\x00\x00\x00\x04test\x00\x00\x00\x00")])
+        self.assertTrue(disconnected[0])
+
+
+    def test_receiveDisconnect(self):
+        """
+        Test that disconnection messages are received correctly.  See
+        test_sendDisconnect.
+        """
+        disconnected = [False]
+        def stubLoseConnection():
+            disconnected[0] = True
+        self.transport.loseConnection = stubLoseConnection
+        self.proto.dispatchMessage(transport.MSG_DISCONNECT,
+                                   '\x00\x00\x00\xff\x00\x00\x00\x04test')
+        self.assertEqual(self.proto.errors, [(255, 'test')])
+        self.assertTrue(disconnected[0])
+
+
+    def test_dataReceived(self):
+        """
+        Test that dataReceived parses packets and dispatches them to
+        ssh_* methods.
+        """
+        kexInit = [False]
+        def stubKEXINIT(packet):
+            kexInit[0] = True
+        self.proto.ssh_KEXINIT = stubKEXINIT
+        self.proto.dataReceived(self.transport.value())
+        self.assertTrue(self.proto.gotVersion)
+        self.assertEqual(self.proto.ourVersionString,
+                          self.proto.otherVersionString)
+        self.assertTrue(kexInit[0])
+
+
+    def test_service(self):
+        """
+        Test that the transport can set the running service and dispatches
+        packets to the service's packetReceived method.
+        """
+        service = MockService()
+        self.proto.setService(service)
+        self.assertEqual(self.proto.service, service)
+        self.assertTrue(service.started)
+        self.proto.dispatchMessage(0xff, "test")
+        self.assertEqual(self.packets, [(0xff, "test")])
+
+        service2 = MockService()
+        self.proto.setService(service2)
+        self.assertTrue(service2.started)
+        self.assertTrue(service.stopped)
+
+        self.proto.connectionLost(None)
+        self.assertTrue(service2.stopped)
+
+
+    def test_avatar(self):
+        """
+        Test that the transport notifies the avatar of disconnections.
+        """
+        disconnected = [False]
+        def logout():
+            disconnected[0] = True
+        self.proto.logoutFunction = logout
+        self.proto.avatar = True
+
+        self.proto.connectionLost(None)
+        self.assertTrue(disconnected[0])
+
+
+    def test_isEncrypted(self):
+        """
+        Test that the transport accurately reflects its encrypted status.
+        """
+        self.assertFalse(self.proto.isEncrypted('in'))
+        self.assertFalse(self.proto.isEncrypted('out'))
+        self.assertFalse(self.proto.isEncrypted('both'))
+        self.proto.currentEncryptions = MockCipher()
+        self.assertTrue(self.proto.isEncrypted('in'))
+        self.assertTrue(self.proto.isEncrypted('out'))
+        self.assertTrue(self.proto.isEncrypted('both'))
+        self.proto.currentEncryptions = transport.SSHCiphers('none', 'none',
+                                                             'none', 'none')
+        self.assertFalse(self.proto.isEncrypted('in'))
+        self.assertFalse(self.proto.isEncrypted('out'))
+        self.assertFalse(self.proto.isEncrypted('both'))
+
+        self.assertRaises(TypeError, self.proto.isEncrypted, 'bad')
+
+
+    def test_isVerified(self):
+        """
+        Test that the transport accurately reflects its verified status.
+        """
+        self.assertFalse(self.proto.isVerified('in'))
+        self.assertFalse(self.proto.isVerified('out'))
+        self.assertFalse(self.proto.isVerified('both'))
+        self.proto.currentEncryptions = MockCipher()
+        self.assertTrue(self.proto.isVerified('in'))
+        self.assertTrue(self.proto.isVerified('out'))
+        self.assertTrue(self.proto.isVerified('both'))
+        self.proto.currentEncryptions = transport.SSHCiphers('none', 'none',
+                                                             'none', 'none')
+        self.assertFalse(self.proto.isVerified('in'))
+        self.assertFalse(self.proto.isVerified('out'))
+        self.assertFalse(self.proto.isVerified('both'))
+
+        self.assertRaises(TypeError, self.proto.isVerified, 'bad')
+
+
+    def test_loseConnection(self):
+        """
+        Test that loseConnection sends a disconnect message and closes the
+        connection.
+        """
+        disconnected = [False]
+        def stubLoseConnection():
+            disconnected[0] = True
+        self.transport.loseConnection = stubLoseConnection
+        self.proto.loseConnection()
+        self.assertEqual(self.packets[0][0], transport.MSG_DISCONNECT)
+        self.assertEqual(self.packets[0][1][3],
+                          chr(transport.DISCONNECT_CONNECTION_LOST))
+
+
+    def test_badVersion(self):
+        """
+        Test that the transport disconnects when it receives a bad version.
+        """
+        def testBad(version):
+            self.packets = []
+            self.proto.gotVersion = False
+            disconnected = [False]
+            def stubLoseConnection():
+                disconnected[0] = True
+            self.transport.loseConnection = stubLoseConnection
+            for c in version + '\r\n':
+                self.proto.dataReceived(c)
+            self.assertTrue(disconnected[0])
+            self.assertEqual(self.packets[0][0], transport.MSG_DISCONNECT)
+            self.assertEqual(
+                self.packets[0][1][3],
+                chr(transport.DISCONNECT_PROTOCOL_VERSION_NOT_SUPPORTED))
+        testBad('SSH-1.5-OpenSSH')
+        testBad('SSH-3.0-Twisted')
+        testBad('GET / HTTP/1.1')
+
+
+    def test_dataBeforeVersion(self):
+        """
+        Test that the transport ignores data sent before the version string.
+        """
+        proto = MockTransportBase()
+        proto.makeConnection(proto_helpers.StringTransport())
+        data = ("""here's some stuff beforehand
+here's some other stuff
+""" + proto.ourVersionString + "\r\n")
+        [proto.dataReceived(c) for c in data]
+        self.assertTrue(proto.gotVersion)
+        self.assertEqual(proto.otherVersionString, proto.ourVersionString)
+
+
+    def test_compatabilityVersion(self):
+        """
+        Test that the transport treats the compatbility version (1.99)
+        as equivalent to version 2.0.
+        """
+        proto = MockTransportBase()
+        proto.makeConnection(proto_helpers.StringTransport())
+        proto.dataReceived("SSH-1.99-OpenSSH\n")
+        self.assertTrue(proto.gotVersion)
+        self.assertEqual(proto.otherVersionString, "SSH-1.99-OpenSSH")
+
+
+    def test_supportedVersionsAreAllowed(self):
+        """
+        If an unusual SSH version is received and is included in
+        C{supportedVersions}, an unsupported version error is not emitted.
+        """
+        proto = MockTransportBase()
+        proto.supportedVersions = ("9.99", )
+        proto.makeConnection(proto_helpers.StringTransport())
+        proto.dataReceived("SSH-9.99-OpenSSH\n")
+        self.assertFalse(proto.gotUnsupportedVersion)
+
+
+    def test_unsupportedVersionsCallUnsupportedVersionReceived(self):
+        """
+        If an unusual SSH version is received and is not included in
+        C{supportedVersions}, an unsupported version error is emitted.
+        """
+        proto = MockTransportBase()
+        proto.supportedVersions = ("2.0", )
+        proto.makeConnection(proto_helpers.StringTransport())
+        proto.dataReceived("SSH-9.99-OpenSSH\n")
+        self.assertEqual("9.99", proto.gotUnsupportedVersion)
+
+
+    def test_badPackets(self):
+        """
+        Test that the transport disconnects with an error when it receives
+        bad packets.
+        """
+        def testBad(packet, error=transport.DISCONNECT_PROTOCOL_ERROR):
+            self.packets = []
+            self.proto.buf = packet
+            self.assertEqual(self.proto.getPacket(), None)
+            self.assertEqual(len(self.packets), 1)
+            self.assertEqual(self.packets[0][0], transport.MSG_DISCONNECT)
+            self.assertEqual(self.packets[0][1][3], chr(error))
+
+        testBad('\xff' * 8) # big packet
+        testBad('\x00\x00\x00\x05\x00BCDE') # length not modulo blocksize
+        oldEncryptions = self.proto.currentEncryptions
+        self.proto.currentEncryptions = MockCipher()
+        testBad('\x00\x00\x00\x08\x06AB123456', # bad MAC
+                transport.DISCONNECT_MAC_ERROR)
+        self.proto.currentEncryptions.decrypt = lambda x: x[:-1]
+        testBad('\x00\x00\x00\x08\x06BCDEFGHIJK') # bad decryption
+        self.proto.currentEncryptions = oldEncryptions
+        self.proto.incomingCompression = MockCompression()
+        def stubDecompress(payload):
+            raise Exception('bad compression')
+        self.proto.incomingCompression.decompress = stubDecompress
+        testBad('\x00\x00\x00\x04\x00BCDE', # bad decompression
+                transport.DISCONNECT_COMPRESSION_ERROR)
+        self.flushLoggedErrors()
+
+
+    def test_unimplementedPackets(self):
+        """
+        Test that unimplemented packet types cause MSG_UNIMPLEMENTED packets
+        to be sent.
+        """
+        seqnum = self.proto.incomingPacketSequence
+        def checkUnimplemented(seqnum=seqnum):
+            self.assertEqual(self.packets[0][0],
+                              transport.MSG_UNIMPLEMENTED)
+            self.assertEqual(self.packets[0][1][3], chr(seqnum))
+            self.proto.packets = []
+            seqnum += 1
+
+        self.proto.dispatchMessage(40, '')
+        checkUnimplemented()
+        transport.messages[41] = 'MSG_fiction'
+        self.proto.dispatchMessage(41, '')
+        checkUnimplemented()
+        self.proto.dispatchMessage(60, '')
+        checkUnimplemented()
+        self.proto.setService(MockService())
+        self.proto.dispatchMessage(70, '')
+        checkUnimplemented()
+        self.proto.dispatchMessage(71, '')
+        checkUnimplemented()
+
+
+    def test_getKey(self):
+        """
+        Test that _getKey generates the correct keys.
+        """
+        self.proto.sessionID = 'EF'
+
+        k1 = sha1('AB' + 'CD' + 'K' + self.proto.sessionID).digest()
+        k2 = sha1('ABCD' + k1).digest()
+        self.assertEqual(self.proto._getKey('K', 'AB', 'CD'), k1 + k2)
+
+
+    def test_multipleClasses(self):
+        """
+        Test that multiple instances have distinct states.
+        """
+        proto = self.proto
+        proto.dataReceived(self.transport.value())
+        proto.currentEncryptions = MockCipher()
+        proto.outgoingCompression = MockCompression()
+        proto.incomingCompression = MockCompression()
+        proto.setService(MockService())
+        proto2 = MockTransportBase()
+        proto2.makeConnection(proto_helpers.StringTransport())
+        proto2.sendIgnore('')
+        self.failIfEquals(proto.gotVersion, proto2.gotVersion)
+        self.failIfEquals(proto.transport, proto2.transport)
+        self.failIfEquals(proto.outgoingPacketSequence,
+                          proto2.outgoingPacketSequence)
+        self.failIfEquals(proto.incomingPacketSequence,
+                          proto2.incomingPacketSequence)
+        self.failIfEquals(proto.currentEncryptions,
+                          proto2.currentEncryptions)
+        self.failIfEquals(proto.service, proto2.service)
+
+
+
+class ServerAndClientSSHTransportBaseCase:
+    """
+    Tests that need to be run on both the server and the client.
+    """
+
+
+    def checkDisconnected(self, kind=None):
+        """
+        Helper function to check if the transport disconnected.
+        """
+        if kind is None:
+            kind = transport.DISCONNECT_PROTOCOL_ERROR
+        self.assertEqual(self.packets[-1][0], transport.MSG_DISCONNECT)
+        self.assertEqual(self.packets[-1][1][3], chr(kind))
+
+
+    def connectModifiedProtocol(self, protoModification,
+            kind=None):
+        """
+        Helper function to connect a modified protocol to the test protocol
+        and test for disconnection.
+        """
+        if kind is None:
+            kind = transport.DISCONNECT_KEY_EXCHANGE_FAILED
+        proto2 = self.klass()
+        protoModification(proto2)
+        proto2.makeConnection(proto_helpers.StringTransport())
+        self.proto.dataReceived(proto2.transport.value())
+        if kind:
+            self.checkDisconnected(kind)
+        return proto2
+
+
+    def test_disconnectIfCantMatchKex(self):
+        """
+        Test that the transport disconnects if it can't match the key
+        exchange
+        """
+        def blankKeyExchanges(proto2):
+            proto2.supportedKeyExchanges = []
+        self.connectModifiedProtocol(blankKeyExchanges)
+
+
+    def test_disconnectIfCantMatchKeyAlg(self):
+        """
+        Like test_disconnectIfCantMatchKex, but for the key algorithm.
+        """
+        def blankPublicKeys(proto2):
+            proto2.supportedPublicKeys = []
+        self.connectModifiedProtocol(blankPublicKeys)
+
+
+    def test_disconnectIfCantMatchCompression(self):
+        """
+        Like test_disconnectIfCantMatchKex, but for the compression.
+        """
+        def blankCompressions(proto2):
+            proto2.supportedCompressions = []
+        self.connectModifiedProtocol(blankCompressions)
+
+
+    def test_disconnectIfCantMatchCipher(self):
+        """
+        Like test_disconnectIfCantMatchKex, but for the encryption.
+        """
+        def blankCiphers(proto2):
+            proto2.supportedCiphers = []
+        self.connectModifiedProtocol(blankCiphers)
+
+
+    def test_disconnectIfCantMatchMAC(self):
+        """
+        Like test_disconnectIfCantMatchKex, but for the MAC.
+        """
+        def blankMACs(proto2):
+            proto2.supportedMACs = []
+        self.connectModifiedProtocol(blankMACs)
+
+    def test_getPeer(self):
+        """
+        Test that the transport's L{getPeer} method returns an
+        L{SSHTransportAddress} with the L{IAddress} of the peer.
+        """
+        self.assertEqual(self.proto.getPeer(),
+                         address.SSHTransportAddress(
+                self.proto.transport.getPeer()))
+
+    def test_getHost(self):
+        """
+        Test that the transport's L{getHost} method returns an
+        L{SSHTransportAddress} with the L{IAddress} of the host.
+        """
+        self.assertEqual(self.proto.getHost(),
+                         address.SSHTransportAddress(
+                self.proto.transport.getHost()))
+
+
+
+class ServerSSHTransportTestCase(ServerAndClientSSHTransportBaseCase,
+        TransportTestCase):
+    """
+    Tests for the SSHServerTransport.
+    """
+
+    klass = transport.SSHServerTransport
+
+
+    def setUp(self):
+        TransportTestCase.setUp(self)
+        self.proto.factory = MockFactory()
+        self.proto.factory.startFactory()
+
+
+    def tearDown(self):
+        TransportTestCase.tearDown(self)
+        self.proto.factory.stopFactory()
+        del self.proto.factory
+
+
+    def test_KEXINIT(self):
+        """
+        Test that receiving a KEXINIT packet sets up the correct values on the
+        server.
+        """
+        self.proto.dataReceived( 'SSH-2.0-Twisted\r\n\x00\x00\x01\xd4\t\x14'
+                '\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99'
+                '\x99\x00\x00\x00=diffie-hellman-group1-sha1,diffie-hellman-g'
+                'roup-exchange-sha1\x00\x00\x00\x0fssh-dss,ssh-rsa\x00\x00\x00'
+                '\x85aes128-ctr,aes128-cbc,aes192-ctr,aes192-cbc,aes256-ctr,ae'
+                's256-cbc,cast128-ctr,cast128-cbc,blowfish-ctr,blowfish-cbc,3d'
+                'es-ctr,3des-cbc\x00\x00\x00\x85aes128-ctr,aes128-cbc,aes192-c'
+                'tr,aes192-cbc,aes256-ctr,aes256-cbc,cast128-ctr,cast128-cbc,b'
+                'lowfish-ctr,blowfish-cbc,3des-ctr,3des-cbc\x00\x00\x00\x12hma'
+                'c-md5,hmac-sha1\x00\x00\x00\x12hmac-md5,hmac-sha1\x00\x00\x00'
+                '\tnone,zlib\x00\x00\x00\tnone,zlib\x00\x00\x00\x00\x00\x00'
+                '\x00\x00\x00\x00\x00\x00\x00\x99\x99\x99\x99\x99\x99\x99\x99'
+                '\x99')
+        self.assertEqual(self.proto.kexAlg,
+                          'diffie-hellman-group1-sha1')
+        self.assertEqual(self.proto.keyAlg,
+                          'ssh-dss')
+        self.assertEqual(self.proto.outgoingCompressionType,
+                          'none')
+        self.assertEqual(self.proto.incomingCompressionType,
+                          'none')
+        ne = self.proto.nextEncryptions
+        self.assertEqual(ne.outCipType, 'aes128-ctr')
+        self.assertEqual(ne.inCipType, 'aes128-ctr')
+        self.assertEqual(ne.outMACType, 'hmac-md5')
+        self.assertEqual(ne.inMACType, 'hmac-md5')
+
+
+    def test_ignoreGuessPacketKex(self):
+        """
+        The client is allowed to send a guessed key exchange packet
+        after it sends the KEXINIT packet.  However, if the key exchanges
+        do not match, that guess packet must be ignored.  This tests that
+        the packet is ignored in the case of the key exchange method not
+        matching.
+        """
+        kexInitPacket = '\x00' * 16 + (
+            ''.join([common.NS(x) for x in
+                     [','.join(y) for y in
+                      [self.proto.supportedKeyExchanges[::-1],
+                       self.proto.supportedPublicKeys,
+                       self.proto.supportedCiphers,
+                       self.proto.supportedCiphers,
+                       self.proto.supportedMACs,
+                       self.proto.supportedMACs,
+                       self.proto.supportedCompressions,
+                       self.proto.supportedCompressions,
+                       self.proto.supportedLanguages,
+                       self.proto.supportedLanguages]]])) + (
+            '\xff\x00\x00\x00\x00')
+        self.proto.ssh_KEXINIT(kexInitPacket)
+        self.assertTrue(self.proto.ignoreNextPacket)
+        self.proto.ssh_DEBUG("\x01\x00\x00\x00\x04test\x00\x00\x00\x00")
+        self.assertTrue(self.proto.ignoreNextPacket)
+
+
+        self.proto.ssh_KEX_DH_GEX_REQUEST_OLD('\x00\x00\x08\x00')
+        self.assertFalse(self.proto.ignoreNextPacket)
+        self.assertEqual(self.packets, [])
+        self.proto.ignoreNextPacket = True
+
+        self.proto.ssh_KEX_DH_GEX_REQUEST('\x00\x00\x08\x00' * 3)
+        self.assertFalse(self.proto.ignoreNextPacket)
+        self.assertEqual(self.packets, [])
+
+
+    def test_ignoreGuessPacketKey(self):
+        """
+        Like test_ignoreGuessPacketKex, but for an incorrectly guessed
+        public key format.
+        """
+        kexInitPacket = '\x00' * 16 + (
+            ''.join([common.NS(x) for x in
+                     [','.join(y) for y in
+                      [self.proto.supportedKeyExchanges,
+                       self.proto.supportedPublicKeys[::-1],
+                       self.proto.supportedCiphers,
+                       self.proto.supportedCiphers,
+                       self.proto.supportedMACs,
+                       self.proto.supportedMACs,
+                       self.proto.supportedCompressions,
+                       self.proto.supportedCompressions,
+                       self.proto.supportedLanguages,
+                       self.proto.supportedLanguages]]])) + (
+            '\xff\x00\x00\x00\x00')
+        self.proto.ssh_KEXINIT(kexInitPacket)
+        self.assertTrue(self.proto.ignoreNextPacket)
+        self.proto.ssh_DEBUG("\x01\x00\x00\x00\x04test\x00\x00\x00\x00")
+        self.assertTrue(self.proto.ignoreNextPacket)
+
+        self.proto.ssh_KEX_DH_GEX_REQUEST_OLD('\x00\x00\x08\x00')
+        self.assertFalse(self.proto.ignoreNextPacket)
+        self.assertEqual(self.packets, [])
+        self.proto.ignoreNextPacket = True
+
+        self.proto.ssh_KEX_DH_GEX_REQUEST('\x00\x00\x08\x00' * 3)
+        self.assertFalse(self.proto.ignoreNextPacket)
+        self.assertEqual(self.packets, [])
+
+
+    def test_KEXDH_INIT(self):
+        """
+        Test that the KEXDH_INIT packet causes the server to send a
+        KEXDH_REPLY with the server's public key and a signature.
+        """
+        self.proto.supportedKeyExchanges = ['diffie-hellman-group1-sha1']
+        self.proto.supportedPublicKeys = ['ssh-rsa']
+        self.proto.dataReceived(self.transport.value())
+        e = pow(transport.DH_GENERATOR, 5000,
+                transport.DH_PRIME)
+
+        self.proto.ssh_KEX_DH_GEX_REQUEST_OLD(common.MP(e))
+        y = common.getMP('\x00\x00\x00\x40' + '\x99' * 64)[0]
+        f = common._MPpow(transport.DH_GENERATOR, y, transport.DH_PRIME)
+        sharedSecret = common._MPpow(e, y, transport.DH_PRIME)
+
+        h = sha1()
+        h.update(common.NS(self.proto.ourVersionString) * 2)
+        h.update(common.NS(self.proto.ourKexInitPayload) * 2)
+        h.update(common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob()))
+        h.update(common.MP(e))
+        h.update(f)
+        h.update(sharedSecret)
+        exchangeHash = h.digest()
+
+        signature = self.proto.factory.privateKeys['ssh-rsa'].sign(
+                exchangeHash)
+
+        self.assertEqual(
+            self.packets,
+            [(transport.MSG_KEXDH_REPLY,
+              common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob())
+              + f + common.NS(signature)),
+             (transport.MSG_NEWKEYS, '')])
+
+
+    def test_KEX_DH_GEX_REQUEST_OLD(self):
+        """
+        Test that the KEX_DH_GEX_REQUEST_OLD message causes the server
+        to reply with a KEX_DH_GEX_GROUP message with the correct
+        Diffie-Hellman group.
+        """
+        self.proto.supportedKeyExchanges = [
+                'diffie-hellman-group-exchange-sha1']
+        self.proto.supportedPublicKeys = ['ssh-rsa']
+        self.proto.dataReceived(self.transport.value())
+        self.proto.ssh_KEX_DH_GEX_REQUEST_OLD('\x00\x00\x04\x00')
+        self.assertEqual(
+            self.packets,
+            [(transport.MSG_KEX_DH_GEX_GROUP,
+              common.MP(transport.DH_PRIME) + '\x00\x00\x00\x01\x02')])
+        self.assertEqual(self.proto.g, 2)
+        self.assertEqual(self.proto.p, transport.DH_PRIME)
+
+
+    def test_KEX_DH_GEX_REQUEST_OLD_badKexAlg(self):
+        """
+        Test that if the server recieves a KEX_DH_GEX_REQUEST_OLD message
+        and the key exchange algorithm is not 'diffie-hellman-group1-sha1' or
+        'diffie-hellman-group-exchange-sha1', we raise a ConchError.
+        """
+        self.proto.kexAlg = None
+        self.assertRaises(ConchError, self.proto.ssh_KEX_DH_GEX_REQUEST_OLD,
+                None)
+
+
+    def test_KEX_DH_GEX_REQUEST(self):
+        """
+        Test that the KEX_DH_GEX_REQUEST message causes the server to reply
+        with a KEX_DH_GEX_GROUP message with the correct Diffie-Hellman
+        group.
+        """
+        self.proto.supportedKeyExchanges = [
+            'diffie-hellman-group-exchange-sha1']
+        self.proto.supportedPublicKeys = ['ssh-rsa']
+        self.proto.dataReceived(self.transport.value())
+        self.proto.ssh_KEX_DH_GEX_REQUEST('\x00\x00\x04\x00\x00\x00\x08\x00' +
+                                          '\x00\x00\x0c\x00')
+        self.assertEqual(
+            self.packets,
+            [(transport.MSG_KEX_DH_GEX_GROUP,
+              common.MP(transport.DH_PRIME) + '\x00\x00\x00\x01\x03')])
+        self.assertEqual(self.proto.g, 3)
+        self.assertEqual(self.proto.p, transport.DH_PRIME)
+
+
+    def test_KEX_DH_GEX_INIT_after_REQUEST(self):
+        """
+        Test that the KEX_DH_GEX_INIT message after the client sends
+        KEX_DH_GEX_REQUEST causes the server to send a KEX_DH_GEX_INIT message
+        with a public key and signature.
+        """
+        self.test_KEX_DH_GEX_REQUEST()
+        e = pow(self.proto.g, 3, self.proto.p)
+        y = common.getMP('\x00\x00\x00\x80' + '\x99' * 128)[0]
+        f = common._MPpow(self.proto.g, y, self.proto.p)
+        sharedSecret = common._MPpow(e, y, self.proto.p)
+        h = sha1()
+        h.update(common.NS(self.proto.ourVersionString) * 2)
+        h.update(common.NS(self.proto.ourKexInitPayload) * 2)
+        h.update(common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob()))
+        h.update('\x00\x00\x04\x00\x00\x00\x08\x00\x00\x00\x0c\x00')
+        h.update(common.MP(self.proto.p))
+        h.update(common.MP(self.proto.g))
+        h.update(common.MP(e))
+        h.update(f)
+        h.update(sharedSecret)
+        exchangeHash = h.digest()
+        self.proto.ssh_KEX_DH_GEX_INIT(common.MP(e))
+        self.assertEqual(
+            self.packets[1],
+            (transport.MSG_KEX_DH_GEX_REPLY,
+             common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob()) +
+             f + common.NS(self.proto.factory.privateKeys['ssh-rsa'].sign(
+                        exchangeHash))))
+
+
+    def test_KEX_DH_GEX_INIT_after_REQUEST_OLD(self):
+        """
+        Test that the KEX_DH_GEX_INIT message after the client sends
+        KEX_DH_GEX_REQUEST_OLD causes the server to sent a KEX_DH_GEX_INIT
+        message with a public key and signature.
+        """
+        self.test_KEX_DH_GEX_REQUEST_OLD()
+        e = pow(self.proto.g, 3, self.proto.p)
+        y = common.getMP('\x00\x00\x00\x80' + '\x99' * 128)[0]
+        f = common._MPpow(self.proto.g, y, self.proto.p)
+        sharedSecret = common._MPpow(e, y, self.proto.p)
+        h = sha1()
+        h.update(common.NS(self.proto.ourVersionString) * 2)
+        h.update(common.NS(self.proto.ourKexInitPayload) * 2)
+        h.update(common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob()))
+        h.update('\x00\x00\x04\x00')
+        h.update(common.MP(self.proto.p))
+        h.update(common.MP(self.proto.g))
+        h.update(common.MP(e))
+        h.update(f)
+        h.update(sharedSecret)
+        exchangeHash = h.digest()
+        self.proto.ssh_KEX_DH_GEX_INIT(common.MP(e))
+        self.assertEqual(
+            self.packets[1:],
+            [(transport.MSG_KEX_DH_GEX_REPLY,
+              common.NS(self.proto.factory.publicKeys['ssh-rsa'].blob()) +
+              f + common.NS(self.proto.factory.privateKeys['ssh-rsa'].sign(
+                            exchangeHash))),
+             (transport.MSG_NEWKEYS, '')])
+
+
+    def test_keySetup(self):
+        """
+        Test that _keySetup sets up the next encryption keys.
+        """
+        self.proto.nextEncryptions = MockCipher()
+        self.simulateKeyExchange('AB', 'CD')
+        self.assertEqual(self.proto.sessionID, 'CD')
+        self.simulateKeyExchange('AB', 'EF')
+        self.assertEqual(self.proto.sessionID, 'CD')
+        self.assertEqual(self.packets[-1], (transport.MSG_NEWKEYS, ''))
+        newKeys = [self.proto._getKey(c, 'AB', 'EF') for c in 'ABCDEF']
+        self.assertEqual(
+            self.proto.nextEncryptions.keys,
+            (newKeys[1], newKeys[3], newKeys[0], newKeys[2], newKeys[5],
+             newKeys[4]))
+
+
+    def test_NEWKEYS(self):
+        """
+        Test that NEWKEYS transitions the keys in nextEncryptions to
+        currentEncryptions.
+        """
+        self.test_KEXINIT()
+
+        self.proto.nextEncryptions = transport.SSHCiphers('none', 'none',
+                                                          'none', 'none')
+        self.proto.ssh_NEWKEYS('')
+        self.assertIdentical(self.proto.currentEncryptions,
+                             self.proto.nextEncryptions)
+        self.assertIdentical(self.proto.outgoingCompression, None)
+        self.assertIdentical(self.proto.incomingCompression, None)
+        self.proto.outgoingCompressionType = 'zlib'
+        self.simulateKeyExchange('AB', 'CD')
+        self.proto.ssh_NEWKEYS('')
+        self.failIfIdentical(self.proto.outgoingCompression, None)
+        self.proto.incomingCompressionType = 'zlib'
+        self.simulateKeyExchange('AB', 'EF')
+        self.proto.ssh_NEWKEYS('')
+        self.failIfIdentical(self.proto.incomingCompression, None)
+
+
+    def test_SERVICE_REQUEST(self):
+        """
+        Test that the SERVICE_REQUEST message requests and starts a
+        service.
+        """
+        self.proto.ssh_SERVICE_REQUEST(common.NS('ssh-userauth'))
+        self.assertEqual(self.packets, [(transport.MSG_SERVICE_ACCEPT,
+                                          common.NS('ssh-userauth'))])
+        self.assertEqual(self.proto.service.name, 'MockService')
+
+
+    def test_disconnectNEWKEYSData(self):
+        """
+        Test that NEWKEYS disconnects if it receives data.
+        """
+        self.proto.ssh_NEWKEYS("bad packet")
+        self.checkDisconnected()
+
+
+    def test_disconnectSERVICE_REQUESTBadService(self):
+        """
+        Test that SERVICE_REQUESTS disconnects if an unknown service is
+        requested.
+        """
+        self.proto.ssh_SERVICE_REQUEST(common.NS('no service'))
+        self.checkDisconnected(transport.DISCONNECT_SERVICE_NOT_AVAILABLE)
+
+
+
+class ClientSSHTransportTestCase(ServerAndClientSSHTransportBaseCase,
+        TransportTestCase):
+    """
+    Tests for SSHClientTransport.
+    """
+
+    klass = transport.SSHClientTransport
+
+
+    def test_KEXINIT(self):
+        """
+        Test that receiving a KEXINIT packet sets up the correct values on the
+        client.  The way algorithms are picks is that the first item in the
+        client's list that is also in the server's list is chosen.
+        """
+        self.proto.dataReceived( 'SSH-2.0-Twisted\r\n\x00\x00\x01\xd4\t\x14'
+                '\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99\x99'
+                '\x99\x00\x00\x00=diffie-hellman-group1-sha1,diffie-hellman-g'
+                'roup-exchange-sha1\x00\x00\x00\x0fssh-dss,ssh-rsa\x00\x00\x00'
+                '\x85aes128-ctr,aes128-cbc,aes192-ctr,aes192-cbc,aes256-ctr,ae'
+                's256-cbc,cast128-ctr,cast128-cbc,blowfish-ctr,blowfish-cbc,3d'
+                'es-ctr,3des-cbc\x00\x00\x00\x85aes128-ctr,aes128-cbc,aes192-c'
+                'tr,aes192-cbc,aes256-ctr,aes256-cbc,cast128-ctr,cast128-cbc,b'
+                'lowfish-ctr,blowfish-cbc,3des-ctr,3des-cbc\x00\x00\x00\x12hma'
+                'c-md5,hmac-sha1\x00\x00\x00\x12hmac-md5,hmac-sha1\x00\x00\x00'
+                '\tzlib,none\x00\x00\x00\tzlib,none\x00\x00\x00\x00\x00\x00'
+                '\x00\x00\x00\x00\x00\x00\x00\x99\x99\x99\x99\x99\x99\x99\x99'
+                '\x99')
+        self.assertEqual(self.proto.kexAlg,
+                          'diffie-hellman-group-exchange-sha1')
+        self.assertEqual(self.proto.keyAlg,
+                          'ssh-rsa')
+        self.assertEqual(self.proto.outgoingCompressionType,
+                          'none')
+        self.assertEqual(self.proto.incomingCompressionType,
+                          'none')
+        ne = self.proto.nextEncryptions
+        self.assertEqual(ne.outCipType, 'aes256-ctr')
+        self.assertEqual(ne.inCipType, 'aes256-ctr')
+        self.assertEqual(ne.outMACType, 'hmac-sha1')
+        self.assertEqual(ne.inMACType, 'hmac-sha1')
+
+
+    def verifyHostKey(self, pubKey, fingerprint):
+        """
+        Mock version of SSHClientTransport.verifyHostKey.
+        """
+        self.calledVerifyHostKey = True
+        self.assertEqual(pubKey, self.blob)
+        self.assertEqual(fingerprint.replace(':', ''),
+                          md5(pubKey).hexdigest())
+        return defer.succeed(True)
+
+
+    def setUp(self):
+        TransportTestCase.setUp(self)
+        self.blob = keys.Key.fromString(keydata.publicRSA_openssh).blob()
+        self.privObj = keys.Key.fromString(keydata.privateRSA_openssh)
+        self.calledVerifyHostKey = False
+        self.proto.verifyHostKey = self.verifyHostKey
+
+
+    def test_notImplementedClientMethods(self):
+        """
+        verifyHostKey() should return a Deferred which fails with a
+        NotImplementedError exception.  connectionSecure() should raise
+        NotImplementedError().
+        """
+        self.assertRaises(NotImplementedError, self.klass().connectionSecure)
+        def _checkRaises(f):
+            f.trap(NotImplementedError)
+        d = self.klass().verifyHostKey(None, None)
+        return d.addCallback(self.fail).addErrback(_checkRaises)
+
+
+    def test_KEXINIT_groupexchange(self):
+        """
+        Test that a KEXINIT packet with a group-exchange key exchange results
+        in a KEX_DH_GEX_REQUEST_OLD message..
+        """
+        self.proto.supportedKeyExchanges = [
+            'diffie-hellman-group-exchange-sha1']
+        self.proto.dataReceived(self.transport.value())
+        self.assertEqual(self.packets, [(transport.MSG_KEX_DH_GEX_REQUEST_OLD,
+                                          '\x00\x00\x08\x00')])
+
+
+    def test_KEXINIT_group1(self):
+        """
+        Like test_KEXINIT_groupexchange, but for the group-1 key exchange.
+        """
+        self.proto.supportedKeyExchanges = ['diffie-hellman-group1-sha1']
+        self.proto.dataReceived(self.transport.value())
+        self.assertEqual(common.MP(self.proto.x)[5:], '\x99' * 64)
+        self.assertEqual(self.packets,
+                          [(transport.MSG_KEXDH_INIT, self.proto.e)])
+
+
+    def test_KEXINIT_badKexAlg(self):
+        """
+        Test that the client raises a ConchError if it receives a
+        KEXINIT message bug doesn't have a key exchange algorithm that we
+        understand.
+        """
+        self.proto.supportedKeyExchanges = ['diffie-hellman-group2-sha1']
+        data = self.transport.value().replace('group1', 'group2')
+        self.assertRaises(ConchError, self.proto.dataReceived, data)
+
+
+    def test_KEXDH_REPLY(self):
+        """
+        Test that the KEXDH_REPLY message verifies the server.
+        """
+        self.test_KEXINIT_group1()
+
+        sharedSecret = common._MPpow(transport.DH_GENERATOR,
+                                     self.proto.x, transport.DH_PRIME)
+        h = sha1()
+        h.update(common.NS(self.proto.ourVersionString) * 2)
+        h.update(common.NS(self.proto.ourKexInitPayload) * 2)
+        h.update(common.NS(self.blob))
+        h.update(self.proto.e)
+        h.update('\x00\x00\x00\x01\x02') # f
+        h.update(sharedSecret)
+        exchangeHash = h.digest()
+
+        def _cbTestKEXDH_REPLY(value):
+            self.assertIdentical(value, None)
+            self.assertEqual(self.calledVerifyHostKey, True)
+            self.assertEqual(self.proto.sessionID, exchangeHash)
+
+        signature = self.privObj.sign(exchangeHash)
+
+        d = self.proto.ssh_KEX_DH_GEX_GROUP(
+            (common.NS(self.blob) + '\x00\x00\x00\x01\x02' +
+             common.NS(signature)))
+        d.addCallback(_cbTestKEXDH_REPLY)
+
+        return d
+
+
+    def test_KEX_DH_GEX_GROUP(self):
+        """
+        Test that the KEX_DH_GEX_GROUP message results in a
+        KEX_DH_GEX_INIT message with the client's Diffie-Hellman public key.
+        """
+        self.test_KEXINIT_groupexchange()
+        self.proto.ssh_KEX_DH_GEX_GROUP(
+            '\x00\x00\x00\x01\x0f\x00\x00\x00\x01\x02')
+        self.assertEqual(self.proto.p, 15)
+        self.assertEqual(self.proto.g, 2)
+        self.assertEqual(common.MP(self.proto.x)[5:], '\x99' * 40)
+        self.assertEqual(self.proto.e,
+                          common.MP(pow(2, self.proto.x, 15)))
+        self.assertEqual(self.packets[1:], [(transport.MSG_KEX_DH_GEX_INIT,
+                                              self.proto.e)])
+
+
+    def test_KEX_DH_GEX_REPLY(self):
+        """
+        Test that the KEX_DH_GEX_REPLY message results in a verified
+        server.
+        """
+
+        self.test_KEX_DH_GEX_GROUP()
+        sharedSecret = common._MPpow(3, self.proto.x, self.proto.p)
+        h = sha1()
+        h.update(common.NS(self.proto.ourVersionString) * 2)
+        h.update(common.NS(self.proto.ourKexInitPayload) * 2)
+        h.update(common.NS(self.blob))
+        h.update('\x00\x00\x08\x00\x00\x00\x00\x01\x0f\x00\x00\x00\x01\x02')
+        h.update(self.proto.e)
+        h.update('\x00\x00\x00\x01\x03') # f
+        h.update(sharedSecret)
+        exchangeHash = h.digest()
+
+        def _cbTestKEX_DH_GEX_REPLY(value):
+            self.assertIdentical(value, None)
+            self.assertEqual(self.calledVerifyHostKey, True)
+            self.assertEqual(self.proto.sessionID, exchangeHash)
+
+        signature = self.privObj.sign(exchangeHash)
+
+        d = self.proto.ssh_KEX_DH_GEX_REPLY(
+            common.NS(self.blob) +
+            '\x00\x00\x00\x01\x03' +
+            common.NS(signature))
+        d.addCallback(_cbTestKEX_DH_GEX_REPLY)
+        return d
+
+
+    def test_keySetup(self):
+        """
+        Test that _keySetup sets up the next encryption keys.
+        """
+        self.proto.nextEncryptions = MockCipher()
+        self.simulateKeyExchange('AB', 'CD')
+        self.assertEqual(self.proto.sessionID, 'CD')
+        self.simulateKeyExchange('AB', 'EF')
+        self.assertEqual(self.proto.sessionID, 'CD')
+        self.assertEqual(self.packets[-1], (transport.MSG_NEWKEYS, ''))
+        newKeys = [self.proto._getKey(c, 'AB', 'EF') for c in 'ABCDEF']
+        self.assertEqual(self.proto.nextEncryptions.keys,
+                          (newKeys[0], newKeys[2], newKeys[1], newKeys[3],
+                           newKeys[4], newKeys[5]))
+
+
+    def test_NEWKEYS(self):
+        """
+        Test that NEWKEYS transitions the keys from nextEncryptions to
+        currentEncryptions.
+        """
+        self.test_KEXINIT()
+        secure = [False]
+        def stubConnectionSecure():
+            secure[0] = True
+        self.proto.connectionSecure = stubConnectionSecure
+
+        self.proto.nextEncryptions = transport.SSHCiphers(
+            'none', 'none', 'none', 'none')
+        self.simulateKeyExchange('AB', 'CD')
+        self.assertNotIdentical(
+            self.proto.currentEncryptions, self.proto.nextEncryptions)
+
+        self.proto.nextEncryptions = MockCipher()
+        self.proto.ssh_NEWKEYS('')
+        self.assertIdentical(self.proto.outgoingCompression, None)
+        self.assertIdentical(self.proto.incomingCompression, None)
+        self.assertIdentical(self.proto.currentEncryptions,
+                             self.proto.nextEncryptions)
+        self.assertTrue(secure[0])
+        self.proto.outgoingCompressionType = 'zlib'
+        self.simulateKeyExchange('AB', 'GH')
+        self.proto.ssh_NEWKEYS('')
+        self.failIfIdentical(self.proto.outgoingCompression, None)
+        self.proto.incomingCompressionType = 'zlib'
+        self.simulateKeyExchange('AB', 'IJ')
+        self.proto.ssh_NEWKEYS('')
+        self.failIfIdentical(self.proto.incomingCompression, None)
+
+
+    def test_SERVICE_ACCEPT(self):
+        """
+        Test that the SERVICE_ACCEPT packet starts the requested service.
+        """
+        self.proto.instance = MockService()
+        self.proto.ssh_SERVICE_ACCEPT('\x00\x00\x00\x0bMockService')
+        self.assertTrue(self.proto.instance.started)
+
+
+    def test_requestService(self):
+        """
+        Test that requesting a service sends a SERVICE_REQUEST packet.
+        """
+        self.proto.requestService(MockService())
+        self.assertEqual(self.packets, [(transport.MSG_SERVICE_REQUEST,
+                                          '\x00\x00\x00\x0bMockService')])
+
+
+    def test_disconnectKEXDH_REPLYBadSignature(self):
+        """
+        Test that KEXDH_REPLY disconnects if the signature is bad.
+        """
+        self.test_KEXDH_REPLY()
+        self.proto._continueKEXDH_REPLY(None, self.blob, 3, "bad signature")
+        self.checkDisconnected(transport.DISCONNECT_KEY_EXCHANGE_FAILED)
+
+
+    def test_disconnectGEX_REPLYBadSignature(self):
+        """
+        Like test_disconnectKEXDH_REPLYBadSignature, but for DH_GEX_REPLY.
+        """
+        self.test_KEX_DH_GEX_REPLY()
+        self.proto._continueGEX_REPLY(None, self.blob, 3, "bad signature")
+        self.checkDisconnected(transport.DISCONNECT_KEY_EXCHANGE_FAILED)
+
+
+    def test_disconnectNEWKEYSData(self):
+        """
+        Test that NEWKEYS disconnects if it receives data.
+        """
+        self.proto.ssh_NEWKEYS("bad packet")
+        self.checkDisconnected()
+
+
+    def test_disconnectSERVICE_ACCEPT(self):
+        """
+        Test that SERVICE_ACCEPT disconnects if the accepted protocol is
+        differet from the asked-for protocol.
+        """
+        self.proto.instance = MockService()
+        self.proto.ssh_SERVICE_ACCEPT('\x00\x00\x00\x03bad')
+        self.checkDisconnected()
+
+
+    def test_noPayloadSERVICE_ACCEPT(self):
+        """
+        Some commercial SSH servers don't send a payload with the
+        SERVICE_ACCEPT message.  Conch pretends that it got the correct
+        name of the service.
+        """
+        self.proto.instance = MockService()
+        self.proto.ssh_SERVICE_ACCEPT('') # no payload
+        self.assertTrue(self.proto.instance.started)
+        self.assertEquals(len(self.packets), 0) # not disconnected
+
+
+
+class SSHCiphersTestCase(unittest.TestCase):
+    """
+    Tests for the SSHCiphers helper class.
+    """
+    if Crypto is None:
+        skip = "cannot run w/o PyCrypto"
+
+    if pyasn1 is None:
+        skip = "Cannot run without PyASN1"
+
+
+    def test_init(self):
+        """
+        Test that the initializer sets up the SSHCiphers object.
+        """
+        ciphers = transport.SSHCiphers('A', 'B', 'C', 'D')
+        self.assertEqual(ciphers.outCipType, 'A')
+        self.assertEqual(ciphers.inCipType, 'B')
+        self.assertEqual(ciphers.outMACType, 'C')
+        self.assertEqual(ciphers.inMACType, 'D')
+
+
+    def test_getCipher(self):
+        """
+        Test that the _getCipher method returns the correct cipher.
+        """
+        ciphers = transport.SSHCiphers('A', 'B', 'C', 'D')
+        iv = key = '\x00' * 16
+        for cipName, (modName, keySize, counter) in ciphers.cipherMap.items():
+            cip = ciphers._getCipher(cipName, iv, key)
+            if cipName == 'none':
+                self.assertIsInstance(cip, transport._DummyCipher)
+            else:
+                self.assertTrue(getClass(cip).__name__.startswith(modName))
+
+
+    def test_getMAC(self):
+        """
+        Test that the _getMAC method returns the correct MAC.
+        """
+        ciphers = transport.SSHCiphers('A', 'B', 'C', 'D')
+        key = '\x00' * 64
+        for macName, mac in ciphers.macMap.items():
+            mod = ciphers._getMAC(macName, key)
+            if macName == 'none':
+                self.assertIdentical(mac, None)
+            else:
+                self.assertEqual(mod[0], mac)
+                self.assertEqual(mod[1],
+                                  Crypto.Cipher.XOR.new('\x36').encrypt(key))
+                self.assertEqual(mod[2],
+                                  Crypto.Cipher.XOR.new('\x5c').encrypt(key))
+                self.assertEqual(mod[3], len(mod[0]().digest()))
+
+
+    def test_setKeysCiphers(self):
+        """
+        Test that setKeys sets up the ciphers.
+        """
+        key = '\x00' * 64
+        cipherItems = transport.SSHCiphers.cipherMap.items()
+        for cipName, (modName, keySize, counter) in cipherItems:
+            encCipher = transport.SSHCiphers(cipName, 'none', 'none', 'none')
+            decCipher = transport.SSHCiphers('none', cipName, 'none', 'none')
+            cip = encCipher._getCipher(cipName, key, key)
+            bs = cip.block_size
+            encCipher.setKeys(key, key, '', '', '', '')
+            decCipher.setKeys('', '', key, key, '', '')
+            self.assertEqual(encCipher.encBlockSize, bs)
+            self.assertEqual(decCipher.decBlockSize, bs)
+            enc = cip.encrypt(key[:bs])
+            enc2 = cip.encrypt(key[:bs])
+            if counter:
+                self.failIfEquals(enc, enc2)
+            self.assertEqual(encCipher.encrypt(key[:bs]), enc)
+            self.assertEqual(encCipher.encrypt(key[:bs]), enc2)
+            self.assertEqual(decCipher.decrypt(enc), key[:bs])
+            self.assertEqual(decCipher.decrypt(enc2), key[:bs])
+
+
+    def test_setKeysMACs(self):
+        """
+        Test that setKeys sets up the MACs.
+        """
+        key = '\x00' * 64
+        for macName, mod in transport.SSHCiphers.macMap.items():
+            outMac = transport.SSHCiphers('none', 'none', macName, 'none')
+            inMac = transport.SSHCiphers('none', 'none', 'none', macName)
+            outMac.setKeys('', '', '', '', key, '')
+            inMac.setKeys('', '', '', '', '', key)
+            if mod:
+                ds = mod().digest_size
+            else:
+                ds = 0
+            self.assertEqual(inMac.verifyDigestSize, ds)
+            if mod:
+                mod, i, o, ds = outMac._getMAC(macName, key)
+            seqid = 0
+            data = key
+            packet = '\x00' * 4 + key
+            if mod:
+                mac = mod(o + mod(i + packet).digest()).digest()
+            else:
+                mac = ''
+            self.assertEqual(outMac.makeMAC(seqid, data), mac)
+            self.assertTrue(inMac.verify(seqid, data, mac))
+
+
+
+class CounterTestCase(unittest.TestCase):
+    """
+    Tests for the _Counter helper class.
+    """
+    if Crypto is None:
+        skip = "cannot run w/o PyCrypto"
+
+    if pyasn1 is None:
+        skip = "Cannot run without PyASN1"
+
+
+    def test_init(self):
+        """
+        Test that the counter is initialized correctly.
+        """
+        counter = transport._Counter('\x00' * 8 + '\xff' * 8, 8)
+        self.assertEqual(counter.blockSize, 8)
+        self.assertEqual(counter.count.tostring(), '\x00' * 8)
+
+
+    def test_count(self):
+        """
+        Test that the counter counts incrementally and wraps at the top.
+        """
+        counter = transport._Counter('\x00', 1)
+        self.assertEqual(counter(), '\x01')
+        self.assertEqual(counter(), '\x02')
+        [counter() for i in range(252)]
+        self.assertEqual(counter(), '\xff')
+        self.assertEqual(counter(), '\x00')
+
+
+
+class TransportLoopbackTestCase(unittest.TestCase):
+    """
+    Test the server transport and client transport against each other,
+    """
+    if Crypto is None:
+        skip = "cannot run w/o PyCrypto"
+
+    if pyasn1 is None:
+        skip = "Cannot run without PyASN1"
+
+
+    def _runClientServer(self, mod):
+        """
+        Run an async client and server, modifying each using the mod function
+        provided.  Returns a Deferred called back when both Protocols have
+        disconnected.
+
+        @type mod: C{func}
+        @rtype: C{defer.Deferred}
+        """
+        factory = MockFactory()
+        server = transport.SSHServerTransport()
+        server.factory = factory
+        factory.startFactory()
+        server.errors = []
+        server.receiveError = lambda code, desc: server.errors.append((
+                code, desc))
+        client = transport.SSHClientTransport()
+        client.verifyHostKey = lambda x, y: defer.succeed(None)
+        client.errors = []
+        client.receiveError = lambda code, desc: client.errors.append((
+                code, desc))
+        client.connectionSecure = lambda: client.loseConnection()
+        server = mod(server)
+        client = mod(client)
+        def check(ignored, server, client):
+            name = repr([server.supportedCiphers[0],
+                         server.supportedMACs[0],
+                         server.supportedKeyExchanges[0],
+                         server.supportedCompressions[0]])
+            self.assertEqual(client.errors, [])
+            self.assertEqual(server.errors, [(
+                        transport.DISCONNECT_CONNECTION_LOST,
+                        "user closed connection")])
+            if server.supportedCiphers[0] == 'none':
+                self.assertFalse(server.isEncrypted(), name)
+                self.assertFalse(client.isEncrypted(), name)
+            else:
+                self.assertTrue(server.isEncrypted(), name)
+                self.assertTrue(client.isEncrypted(), name)
+            if server.supportedMACs[0] == 'none':
+                self.assertFalse(server.isVerified(), name)
+                self.assertFalse(client.isVerified(), name)
+            else:
+                self.assertTrue(server.isVerified(), name)
+                self.assertTrue(client.isVerified(), name)
+
+        d = loopback.loopbackAsync(server, client)
+        d.addCallback(check, server, client)
+        return d
+
+
+    def test_ciphers(self):
+        """
+        Test that the client and server play nicely together, in all
+        the various combinations of ciphers.
+        """
+        deferreds = []
+        for cipher in transport.SSHTransportBase.supportedCiphers + ['none']:
+            def setCipher(proto):
+                proto.supportedCiphers = [cipher]
+                return proto
+            deferreds.append(self._runClientServer(setCipher))
+        return defer.DeferredList(deferreds, fireOnOneErrback=True)
+
+
+    def test_macs(self):
+        """
+        Like test_ciphers, but for the various MACs.
+        """
+        deferreds = []
+        for mac in transport.SSHTransportBase.supportedMACs + ['none']:
+            def setMAC(proto):
+                proto.supportedMACs = [mac]
+                return proto
+            deferreds.append(self._runClientServer(setMAC))
+        return defer.DeferredList(deferreds, fireOnOneErrback=True)
+
+
+    def test_keyexchanges(self):
+        """
+        Like test_ciphers, but for the various key exchanges.
+        """
+        deferreds = []
+        for kex in transport.SSHTransportBase.supportedKeyExchanges:
+            def setKeyExchange(proto):
+                proto.supportedKeyExchanges = [kex]
+                return proto
+            deferreds.append(self._runClientServer(setKeyExchange))
+        return defer.DeferredList(deferreds, fireOnOneErrback=True)
+
+
+    def test_compressions(self):
+        """
+        Like test_ciphers, but for the various compressions.
+        """
+        deferreds = []
+        for compression in transport.SSHTransportBase.supportedCompressions:
+            def setCompression(proto):
+                proto.supportedCompressions = [compression]
+                return proto
+            deferreds.append(self._runClientServer(setCompression))
+        return defer.DeferredList(deferreds, fireOnOneErrback=True)
+
+
+class RandomNumberTestCase(unittest.TestCase):
+    """
+    Tests for the random number generator L{_getRandomNumber} and private
+    key generator L{_generateX}.
+    """
+    skip = dependencySkip
+
+    def test_usesSuppliedRandomFunction(self):
+        """
+        L{_getRandomNumber} returns an integer constructed directly from the
+        bytes returned by the random byte generator passed to it.
+        """
+        def random(bytes):
+            # The number of bytes requested will be the value of each byte
+            # we return.
+            return chr(bytes) * bytes
+        self.assertEqual(
+            transport._getRandomNumber(random, 32),
+            4 << 24 | 4 << 16 | 4 << 8 | 4)
+
+
+    def test_rejectsNonByteMultiples(self):
+        """
+        L{_getRandomNumber} raises L{ValueError} if the number of bits
+        passed to L{_getRandomNumber} is not a multiple of 8.
+        """
+        self.assertRaises(
+            ValueError,
+            transport._getRandomNumber, None, 9)
+
+
+    def test_excludesSmall(self):
+        """
+        If the random byte generator passed to L{_generateX} produces bytes
+        which would result in 0 or 1 being returned, these bytes are
+        discarded and another attempt is made to produce a larger value.
+        """
+        results = [chr(0), chr(1), chr(127)]
+        def random(bytes):
+            return results.pop(0) * bytes
+        self.assertEqual(
+            transport._generateX(random, 8),
+            127)
+
+
+    def test_excludesLarge(self):
+        """
+        If the random byte generator passed to L{_generateX} produces bytes
+        which would result in C{(2 ** bits) - 1} being returned, these bytes
+        are discarded and another attempt is made to produce a smaller
+        value.
+        """
+        results = [chr(255), chr(64)]
+        def random(bytes):
+            return results.pop(0) * bytes
+        self.assertEqual(
+            transport._generateX(random, 8),
+            64)
+
+
+
+class OldFactoryTestCase(unittest.TestCase):
+    """
+    The old C{SSHFactory.getPublicKeys}() returned mappings of key names to
+    strings of key blobs and mappings of key names to PyCrypto key objects from
+    C{SSHFactory.getPrivateKeys}() (they could also be specified with the
+    C{publicKeys} and C{privateKeys} attributes).  This is no longer supported
+    by the C{SSHServerTransport}, so we warn the user if they create an old
+    factory.
+    """
+
+    if Crypto is None:
+        skip = "cannot run w/o PyCrypto"
+
+    if pyasn1 is None:
+        skip = "Cannot run without PyASN1"
+
+
+    def test_getPublicKeysWarning(self):
+        """
+        If the return value of C{getPublicKeys}() isn't a mapping from key
+        names to C{Key} objects, then warn the user and convert the mapping.
+        """
+        sshFactory = MockOldFactoryPublicKeys()
+        self.assertWarns(DeprecationWarning,
+                "Returning a mapping from strings to strings from"
+                " getPublicKeys()/publicKeys (in %s) is deprecated.  Return "
+                "a mapping from strings to Key objects instead." %
+                (qual(MockOldFactoryPublicKeys),),
+                factory.__file__, sshFactory.startFactory)
+        self.assertEqual(sshFactory.publicKeys, MockFactory().getPublicKeys())
+
+
+    def test_getPrivateKeysWarning(self):
+        """
+        If the return value of C{getPrivateKeys}() isn't a mapping from key
+        names to C{Key} objects, then warn the user and convert the mapping.
+        """
+        sshFactory = MockOldFactoryPrivateKeys()
+        self.assertWarns(DeprecationWarning,
+                "Returning a mapping from strings to PyCrypto key objects from"
+                " getPrivateKeys()/privateKeys (in %s) is deprecated.  Return"
+                " a mapping from strings to Key objects instead." %
+                (qual(MockOldFactoryPrivateKeys),),
+                factory.__file__, sshFactory.startFactory)
+        self.assertEqual(sshFactory.privateKeys,
+                          MockFactory().getPrivateKeys())
+
+
+    def test_publicKeysWarning(self):
+        """
+        If the value of the C{publicKeys} attribute isn't a mapping from key
+        names to C{Key} objects, then warn the user and convert the mapping.
+        """
+        sshFactory = MockOldFactoryPublicKeys()
+        sshFactory.publicKeys = sshFactory.getPublicKeys()
+        self.assertWarns(DeprecationWarning,
+                "Returning a mapping from strings to strings from"
+                " getPublicKeys()/publicKeys (in %s) is deprecated.  Return "
+                "a mapping from strings to Key objects instead." %
+                (qual(MockOldFactoryPublicKeys),),
+                factory.__file__, sshFactory.startFactory)
+        self.assertEqual(sshFactory.publicKeys, MockFactory().getPublicKeys())
+
+
+    def test_privateKeysWarning(self):
+        """
+        If the return value of C{privateKeys} attribute isn't a mapping from
+        key names to C{Key} objects, then warn the user and convert the
+        mapping.
+        """
+        sshFactory = MockOldFactoryPrivateKeys()
+        sshFactory.privateKeys = sshFactory.getPrivateKeys()
+        self.assertWarns(DeprecationWarning,
+                "Returning a mapping from strings to PyCrypto key objects from"
+                " getPrivateKeys()/privateKeys (in %s) is deprecated.  Return"
+                " a mapping from strings to Key objects instead." %
+                (qual(MockOldFactoryPrivateKeys),),
+                factory.__file__, sshFactory.startFactory)
+        self.assertEqual(sshFactory.privateKeys,
+                          MockFactory().getPrivateKeys())
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_userauth.py b/ThirdParty/Twisted/twisted/conch/test/test_userauth.py
new file mode 100644
index 0000000..d027faa
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_userauth.py
@@ -0,0 +1,1077 @@
+# -*- test-case-name: twisted.conch.test.test_userauth -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for the implementation of the ssh-userauth service.
+
+Maintainer: Paul Swartz
+"""
+
+from zope.interface import implements
+
+from twisted.cred.checkers import ICredentialsChecker
+from twisted.cred.credentials import IUsernamePassword, ISSHPrivateKey
+from twisted.cred.credentials import IPluggableAuthenticationModules
+from twisted.cred.credentials import IAnonymous
+from twisted.cred.error import UnauthorizedLogin
+from twisted.cred.portal import IRealm, Portal
+from twisted.conch.error import ConchError, ValidPublicKey
+from twisted.internet import defer, task
+from twisted.protocols import loopback
+from twisted.trial import unittest
+
+try:
+    import Crypto.Cipher.DES3, Crypto.Cipher.XOR
+    import pyasn1
+except ImportError:
+    keys = None
+
+
+    class transport:
+        class SSHTransportBase:
+            """
+            A stub class so that later class definitions won't die.
+            """
+
+    class userauth:
+        class SSHUserAuthClient:
+            """
+            A stub class so that later class definitions won't die.
+            """
+else:
+    from twisted.conch.ssh.common import NS
+    from twisted.conch.checkers import SSHProtocolChecker
+    from twisted.conch.ssh import keys, userauth, transport
+    from twisted.conch.test import keydata
+
+
+
+class ClientUserAuth(userauth.SSHUserAuthClient):
+    """
+    A mock user auth client.
+    """
+
+
+    def getPublicKey(self):
+        """
+        If this is the first time we've been called, return a blob for
+        the DSA key.  Otherwise, return a blob
+        for the RSA key.
+        """
+        if self.lastPublicKey:
+            return keys.Key.fromString(keydata.publicRSA_openssh)
+        else:
+            return defer.succeed(keys.Key.fromString(keydata.publicDSA_openssh))
+
+
+    def getPrivateKey(self):
+        """
+        Return the private key object for the RSA key.
+        """
+        return defer.succeed(keys.Key.fromString(keydata.privateRSA_openssh))
+
+
+    def getPassword(self, prompt=None):
+        """
+        Return 'foo' as the password.
+        """
+        return defer.succeed('foo')
+
+
+    def getGenericAnswers(self, name, information, answers):
+        """
+        Return 'foo' as the answer to two questions.
+        """
+        return defer.succeed(('foo', 'foo'))
+
+
+
+class OldClientAuth(userauth.SSHUserAuthClient):
+    """
+    The old SSHUserAuthClient returned a PyCrypto key object from
+    getPrivateKey() and a string from getPublicKey
+    """
+
+
+    def getPrivateKey(self):
+        return defer.succeed(keys.Key.fromString(
+            keydata.privateRSA_openssh).keyObject)
+
+
+    def getPublicKey(self):
+        return keys.Key.fromString(keydata.publicRSA_openssh).blob()
+
+class ClientAuthWithoutPrivateKey(userauth.SSHUserAuthClient):
+    """
+    This client doesn't have a private key, but it does have a public key.
+    """
+
+
+    def getPrivateKey(self):
+        return
+
+
+    def getPublicKey(self):
+        return keys.Key.fromString(keydata.publicRSA_openssh)
+
+
+
+class FakeTransport(transport.SSHTransportBase):
+    """
+    L{userauth.SSHUserAuthServer} expects an SSH transport which has a factory
+    attribute which has a portal attribute. Because the portal is important for
+    testing authentication, we need to be able to provide an interesting portal
+    object to the L{SSHUserAuthServer}.
+
+    In addition, we want to be able to capture any packets sent over the
+    transport.
+
+    @ivar packets: a list of 2-tuples: (messageType, data).  Each 2-tuple is
+        a sent packet.
+    @type packets: C{list}
+    @param lostConnecion: True if loseConnection has been called on us.
+    @type lostConnection: C{bool}
+    """
+
+
+    class Service(object):
+        """
+        A mock service, representing the other service offered by the server.
+        """
+        name = 'nancy'
+
+
+        def serviceStarted(self):
+            pass
+
+
+
+    class Factory(object):
+        """
+        A mock factory, representing the factory that spawned this user auth
+        service.
+        """
+
+
+        def getService(self, transport, service):
+            """
+            Return our fake service.
+            """
+            if service == 'none':
+                return FakeTransport.Service
+
+
+
+    def __init__(self, portal):
+        self.factory = self.Factory()
+        self.factory.portal = portal
+        self.lostConnection = False
+        self.transport = self
+        self.packets = []
+
+
+
+    def sendPacket(self, messageType, message):
+        """
+        Record the packet sent by the service.
+        """
+        self.packets.append((messageType, message))
+
+
+    def isEncrypted(self, direction):
+        """
+        Pretend that this transport encrypts traffic in both directions. The
+        SSHUserAuthServer disables password authentication if the transport
+        isn't encrypted.
+        """
+        return True
+
+
+    def loseConnection(self):
+        self.lostConnection = True
+
+
+
+class Realm(object):
+    """
+    A mock realm for testing L{userauth.SSHUserAuthServer}.
+
+    This realm is not actually used in the course of testing, so it returns the
+    simplest thing that could possibly work.
+    """
+    implements(IRealm)
+
+
+    def requestAvatar(self, avatarId, mind, *interfaces):
+        return defer.succeed((interfaces[0], None, lambda: None))
+
+
+
+class PasswordChecker(object):
+    """
+    A very simple username/password checker which authenticates anyone whose
+    password matches their username and rejects all others.
+    """
+    credentialInterfaces = (IUsernamePassword,)
+    implements(ICredentialsChecker)
+
+
+    def requestAvatarId(self, creds):
+        if creds.username == creds.password:
+            return defer.succeed(creds.username)
+        return defer.fail(UnauthorizedLogin("Invalid username/password pair"))
+
+
+
+class PrivateKeyChecker(object):
+    """
+    A very simple public key checker which authenticates anyone whose
+    public/private keypair is the same keydata.public/privateRSA_openssh.
+    """
+    credentialInterfaces = (ISSHPrivateKey,)
+    implements(ICredentialsChecker)
+
+
+
+    def requestAvatarId(self, creds):
+        if creds.blob == keys.Key.fromString(keydata.publicRSA_openssh).blob():
+            if creds.signature is not None:
+                obj = keys.Key.fromString(creds.blob)
+                if obj.verify(creds.signature, creds.sigData):
+                    return creds.username
+            else:
+                raise ValidPublicKey()
+        raise UnauthorizedLogin()
+
+
+
+class PAMChecker(object):
+    """
+    A simple PAM checker which asks the user for a password, verifying them
+    if the password is the same as their username.
+    """
+    credentialInterfaces = (IPluggableAuthenticationModules,)
+    implements(ICredentialsChecker)
+
+
+    def requestAvatarId(self, creds):
+        d = creds.pamConversion([('Name: ', 2), ("Password: ", 1)])
+        def check(values):
+            if values == [(creds.username, 0), (creds.username, 0)]:
+                return creds.username
+            raise UnauthorizedLogin()
+        return d.addCallback(check)
+
+
+
+class AnonymousChecker(object):
+    """
+    A simple checker which isn't supported by L{SSHUserAuthServer}.
+    """
+    credentialInterfaces = (IAnonymous,)
+    implements(ICredentialsChecker)
+
+
+
+class SSHUserAuthServerTestCase(unittest.TestCase):
+    """
+    Tests for SSHUserAuthServer.
+    """
+
+
+    if keys is None:
+        skip = "cannot run w/o PyCrypto"
+
+
+    def setUp(self):
+        self.realm = Realm()
+        self.portal = Portal(self.realm)
+        self.portal.registerChecker(PasswordChecker())
+        self.portal.registerChecker(PrivateKeyChecker())
+        self.portal.registerChecker(PAMChecker())
+        self.authServer = userauth.SSHUserAuthServer()
+        self.authServer.transport = FakeTransport(self.portal)
+        self.authServer.serviceStarted()
+        self.authServer.supportedAuthentications.sort() # give a consistent
+                                                        # order
+
+
+    def tearDown(self):
+        self.authServer.serviceStopped()
+        self.authServer = None
+
+
+    def _checkFailed(self, ignored):
+        """
+        Check that the authentication has failed.
+        """
+        self.assertEqual(self.authServer.transport.packets[-1],
+                (userauth.MSG_USERAUTH_FAILURE,
+                NS('keyboard-interactive,password,publickey') + '\x00'))
+
+
+    def test_noneAuthentication(self):
+        """
+        A client may request a list of authentication 'method name' values
+        that may continue by using the "none" authentication 'method name'.
+
+        See RFC 4252 Section 5.2.
+        """
+        d = self.authServer.ssh_USERAUTH_REQUEST(NS('foo') + NS('service') +
+                                                 NS('none'))
+        return d.addCallback(self._checkFailed)
+
+
+    def test_successfulPasswordAuthentication(self):
+        """
+        When provided with correct password authentication information, the
+        server should respond by sending a MSG_USERAUTH_SUCCESS message with
+        no other data.
+
+        See RFC 4252, Section 5.1.
+        """
+        packet = NS('foo') + NS('none') + NS('password') + chr(0) + NS('foo')
+        d = self.authServer.ssh_USERAUTH_REQUEST(packet)
+        def check(ignored):
+            self.assertEqual(
+                self.authServer.transport.packets,
+                [(userauth.MSG_USERAUTH_SUCCESS, '')])
+        return d.addCallback(check)
+
+
+    def test_failedPasswordAuthentication(self):
+        """
+        When provided with invalid authentication details, the server should
+        respond by sending a MSG_USERAUTH_FAILURE message which states whether
+        the authentication was partially successful, and provides other, open
+        options for authentication.
+
+        See RFC 4252, Section 5.1.
+        """
+        # packet = username, next_service, authentication type, FALSE, password
+        packet = NS('foo') + NS('none') + NS('password') + chr(0) + NS('bar')
+        self.authServer.clock = task.Clock()
+        d = self.authServer.ssh_USERAUTH_REQUEST(packet)
+        self.assertEqual(self.authServer.transport.packets, [])
+        self.authServer.clock.advance(2)
+        return d.addCallback(self._checkFailed)
+
+
+    def test_successfulPrivateKeyAuthentication(self):
+        """
+        Test that private key authentication completes sucessfully,
+        """
+        blob = keys.Key.fromString(keydata.publicRSA_openssh).blob()
+        obj = keys.Key.fromString(keydata.privateRSA_openssh)
+        packet = (NS('foo') + NS('none') + NS('publickey') + '\xff'
+                + NS(obj.sshType()) + NS(blob))
+        self.authServer.transport.sessionID = 'test'
+        signature = obj.sign(NS('test') + chr(userauth.MSG_USERAUTH_REQUEST)
+                + packet)
+        packet += NS(signature)
+        d = self.authServer.ssh_USERAUTH_REQUEST(packet)
+        def check(ignored):
+            self.assertEqual(self.authServer.transport.packets,
+                    [(userauth.MSG_USERAUTH_SUCCESS, '')])
+        return d.addCallback(check)
+
+
+    def test_requestRaisesConchError(self):
+        """
+        ssh_USERAUTH_REQUEST should raise a ConchError if tryAuth returns
+        None. Added to catch a bug noticed by pyflakes.
+        """
+        d = defer.Deferred()
+
+        def mockCbFinishedAuth(self, ignored):
+            self.fail('request should have raised ConochError')
+
+        def mockTryAuth(kind, user, data):
+            return None
+
+        def mockEbBadAuth(reason):
+            d.errback(reason.value)
+
+        self.patch(self.authServer, 'tryAuth', mockTryAuth)
+        self.patch(self.authServer, '_cbFinishedAuth', mockCbFinishedAuth)
+        self.patch(self.authServer, '_ebBadAuth', mockEbBadAuth)
+
+        packet = NS('user') + NS('none') + NS('public-key') + NS('data')
+        # If an error other than ConchError is raised, this will trigger an
+        # exception.
+        self.authServer.ssh_USERAUTH_REQUEST(packet)
+        return self.assertFailure(d, ConchError)
+
+
+    def test_verifyValidPrivateKey(self):
+        """
+        Test that verifying a valid private key works.
+        """
+        blob = keys.Key.fromString(keydata.publicRSA_openssh).blob()
+        packet = (NS('foo') + NS('none') + NS('publickey') + '\x00'
+                + NS('ssh-rsa') + NS(blob))
+        d = self.authServer.ssh_USERAUTH_REQUEST(packet)
+        def check(ignored):
+            self.assertEqual(self.authServer.transport.packets,
+                    [(userauth.MSG_USERAUTH_PK_OK, NS('ssh-rsa') + NS(blob))])
+        return d.addCallback(check)
+
+
+    def test_failedPrivateKeyAuthenticationWithoutSignature(self):
+        """
+        Test that private key authentication fails when the public key
+        is invalid.
+        """
+        blob = keys.Key.fromString(keydata.publicDSA_openssh).blob()
+        packet = (NS('foo') + NS('none') + NS('publickey') + '\x00'
+                + NS('ssh-dsa') + NS(blob))
+        d = self.authServer.ssh_USERAUTH_REQUEST(packet)
+        return d.addCallback(self._checkFailed)
+
+
+    def test_failedPrivateKeyAuthenticationWithSignature(self):
+        """
+        Test that private key authentication fails when the public key
+        is invalid.
+        """
+        blob = keys.Key.fromString(keydata.publicRSA_openssh).blob()
+        obj = keys.Key.fromString(keydata.privateRSA_openssh)
+        packet = (NS('foo') + NS('none') + NS('publickey') + '\xff'
+                + NS('ssh-rsa') + NS(blob) + NS(obj.sign(blob)))
+        self.authServer.transport.sessionID = 'test'
+        d = self.authServer.ssh_USERAUTH_REQUEST(packet)
+        return d.addCallback(self._checkFailed)
+
+
+    def test_successfulPAMAuthentication(self):
+        """
+        Test that keyboard-interactive authentication succeeds.
+        """
+        packet = (NS('foo') + NS('none') + NS('keyboard-interactive')
+                + NS('') + NS(''))
+        response = '\x00\x00\x00\x02' + NS('foo') + NS('foo')
+        d = self.authServer.ssh_USERAUTH_REQUEST(packet)
+        self.authServer.ssh_USERAUTH_INFO_RESPONSE(response)
+        def check(ignored):
+            self.assertEqual(self.authServer.transport.packets,
+                    [(userauth.MSG_USERAUTH_INFO_REQUEST, (NS('') + NS('')
+                        + NS('') + '\x00\x00\x00\x02' + NS('Name: ') + '\x01'
+                        + NS('Password: ') + '\x00')),
+                     (userauth.MSG_USERAUTH_SUCCESS, '')])
+
+        return d.addCallback(check)
+
+
+    def test_failedPAMAuthentication(self):
+        """
+        Test that keyboard-interactive authentication fails.
+        """
+        packet = (NS('foo') + NS('none') + NS('keyboard-interactive')
+                + NS('') + NS(''))
+        response = '\x00\x00\x00\x02' + NS('bar') + NS('bar')
+        d = self.authServer.ssh_USERAUTH_REQUEST(packet)
+        self.authServer.ssh_USERAUTH_INFO_RESPONSE(response)
+        def check(ignored):
+            self.assertEqual(self.authServer.transport.packets[0],
+                    (userauth.MSG_USERAUTH_INFO_REQUEST, (NS('') + NS('')
+                        + NS('') + '\x00\x00\x00\x02' + NS('Name: ') + '\x01'
+                        + NS('Password: ') + '\x00')))
+        return d.addCallback(check).addCallback(self._checkFailed)
+
+
+    def test_invalid_USERAUTH_INFO_RESPONSE_not_enough_data(self):
+        """
+        If ssh_USERAUTH_INFO_RESPONSE gets an invalid packet,
+        the user authentication should fail.
+        """
+        packet = (NS('foo') + NS('none') + NS('keyboard-interactive')
+                + NS('') + NS(''))
+        d = self.authServer.ssh_USERAUTH_REQUEST(packet)
+        self.authServer.ssh_USERAUTH_INFO_RESPONSE(NS('\x00\x00\x00\x00' +
+            NS('hi')))
+        return d.addCallback(self._checkFailed)
+
+
+    def test_invalid_USERAUTH_INFO_RESPONSE_too_much_data(self):
+        """
+        If ssh_USERAUTH_INFO_RESPONSE gets too much data, the user
+        authentication should fail.
+        """
+        packet = (NS('foo') + NS('none') + NS('keyboard-interactive')
+                + NS('') + NS(''))
+        response = '\x00\x00\x00\x02' + NS('foo') + NS('foo') + NS('foo')
+        d = self.authServer.ssh_USERAUTH_REQUEST(packet)
+        self.authServer.ssh_USERAUTH_INFO_RESPONSE(response)
+        return d.addCallback(self._checkFailed)
+
+
+    def test_onlyOnePAMAuthentication(self):
+        """
+        Because it requires an intermediate message, one can't send a second
+        keyboard-interactive request while the first is still pending.
+        """
+        packet = (NS('foo') + NS('none') + NS('keyboard-interactive')
+                + NS('') + NS(''))
+        self.authServer.ssh_USERAUTH_REQUEST(packet)
+        self.authServer.ssh_USERAUTH_REQUEST(packet)
+        self.assertEqual(self.authServer.transport.packets[-1][0],
+                transport.MSG_DISCONNECT)
+        self.assertEqual(self.authServer.transport.packets[-1][1][3],
+                chr(transport.DISCONNECT_PROTOCOL_ERROR))
+
+
+    def test_ignoreUnknownCredInterfaces(self):
+        """
+        L{SSHUserAuthServer} sets up
+        C{SSHUserAuthServer.supportedAuthentications} by checking the portal's
+        credentials interfaces and mapping them to SSH authentication method
+        strings.  If the Portal advertises an interface that
+        L{SSHUserAuthServer} can't map, it should be ignored.  This is a white
+        box test.
+        """
+        server = userauth.SSHUserAuthServer()
+        server.transport = FakeTransport(self.portal)
+        self.portal.registerChecker(AnonymousChecker())
+        server.serviceStarted()
+        server.serviceStopped()
+        server.supportedAuthentications.sort() # give a consistent order
+        self.assertEqual(server.supportedAuthentications,
+                          ['keyboard-interactive', 'password', 'publickey'])
+
+
+    def test_removePasswordIfUnencrypted(self):
+        """
+        Test that the userauth service does not advertise password
+        authentication if the password would be send in cleartext.
+        """
+        self.assertIn('password', self.authServer.supportedAuthentications)
+        # no encryption
+        clearAuthServer = userauth.SSHUserAuthServer()
+        clearAuthServer.transport = FakeTransport(self.portal)
+        clearAuthServer.transport.isEncrypted = lambda x: False
+        clearAuthServer.serviceStarted()
+        clearAuthServer.serviceStopped()
+        self.failIfIn('password', clearAuthServer.supportedAuthentications)
+        # only encrypt incoming (the direction the password is sent)
+        halfAuthServer = userauth.SSHUserAuthServer()
+        halfAuthServer.transport = FakeTransport(self.portal)
+        halfAuthServer.transport.isEncrypted = lambda x: x == 'in'
+        halfAuthServer.serviceStarted()
+        halfAuthServer.serviceStopped()
+        self.assertIn('password', halfAuthServer.supportedAuthentications)
+
+
+    def test_removeKeyboardInteractiveIfUnencrypted(self):
+        """
+        Test that the userauth service does not advertise keyboard-interactive
+        authentication if the password would be send in cleartext.
+        """
+        self.assertIn('keyboard-interactive',
+                self.authServer.supportedAuthentications)
+        # no encryption
+        clearAuthServer = userauth.SSHUserAuthServer()
+        clearAuthServer.transport = FakeTransport(self.portal)
+        clearAuthServer.transport.isEncrypted = lambda x: False
+        clearAuthServer.serviceStarted()
+        clearAuthServer.serviceStopped()
+        self.failIfIn('keyboard-interactive',
+                clearAuthServer.supportedAuthentications)
+        # only encrypt incoming (the direction the password is sent)
+        halfAuthServer = userauth.SSHUserAuthServer()
+        halfAuthServer.transport = FakeTransport(self.portal)
+        halfAuthServer.transport.isEncrypted = lambda x: x == 'in'
+        halfAuthServer.serviceStarted()
+        halfAuthServer.serviceStopped()
+        self.assertIn('keyboard-interactive',
+                halfAuthServer.supportedAuthentications)
+
+
+    def test_unencryptedConnectionWithoutPasswords(self):
+        """
+        If the L{SSHUserAuthServer} is not advertising passwords, then an
+        unencrypted connection should not cause any warnings or exceptions.
+        This is a white box test.
+        """
+        # create a Portal without password authentication
+        portal = Portal(self.realm)
+        portal.registerChecker(PrivateKeyChecker())
+
+        # no encryption
+        clearAuthServer = userauth.SSHUserAuthServer()
+        clearAuthServer.transport = FakeTransport(portal)
+        clearAuthServer.transport.isEncrypted = lambda x: False
+        clearAuthServer.serviceStarted()
+        clearAuthServer.serviceStopped()
+        self.assertEqual(clearAuthServer.supportedAuthentications,
+                          ['publickey'])
+
+        # only encrypt incoming (the direction the password is sent)
+        halfAuthServer = userauth.SSHUserAuthServer()
+        halfAuthServer.transport = FakeTransport(portal)
+        halfAuthServer.transport.isEncrypted = lambda x: x == 'in'
+        halfAuthServer.serviceStarted()
+        halfAuthServer.serviceStopped()
+        self.assertEqual(clearAuthServer.supportedAuthentications,
+                          ['publickey'])
+
+
+    def test_loginTimeout(self):
+        """
+        Test that the login times out.
+        """
+        timeoutAuthServer = userauth.SSHUserAuthServer()
+        timeoutAuthServer.clock = task.Clock()
+        timeoutAuthServer.transport = FakeTransport(self.portal)
+        timeoutAuthServer.serviceStarted()
+        timeoutAuthServer.clock.advance(11 * 60 * 60)
+        timeoutAuthServer.serviceStopped()
+        self.assertEqual(timeoutAuthServer.transport.packets,
+                [(transport.MSG_DISCONNECT,
+                '\x00' * 3 +
+                chr(transport.DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE) +
+                NS("you took too long") + NS(''))])
+        self.assertTrue(timeoutAuthServer.transport.lostConnection)
+
+
+    def test_cancelLoginTimeout(self):
+        """
+        Test that stopping the service also stops the login timeout.
+        """
+        timeoutAuthServer = userauth.SSHUserAuthServer()
+        timeoutAuthServer.clock = task.Clock()
+        timeoutAuthServer.transport = FakeTransport(self.portal)
+        timeoutAuthServer.serviceStarted()
+        timeoutAuthServer.serviceStopped()
+        timeoutAuthServer.clock.advance(11 * 60 * 60)
+        self.assertEqual(timeoutAuthServer.transport.packets, [])
+        self.assertFalse(timeoutAuthServer.transport.lostConnection)
+
+
+    def test_tooManyAttempts(self):
+        """
+        Test that the server disconnects if the client fails authentication
+        too many times.
+        """
+        packet = NS('foo') + NS('none') + NS('password') + chr(0) + NS('bar')
+        self.authServer.clock = task.Clock()
+        for i in range(21):
+            d = self.authServer.ssh_USERAUTH_REQUEST(packet)
+            self.authServer.clock.advance(2)
+        def check(ignored):
+            self.assertEqual(self.authServer.transport.packets[-1],
+                (transport.MSG_DISCONNECT,
+                '\x00' * 3 +
+                chr(transport.DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE) +
+                NS("too many bad auths") + NS('')))
+        return d.addCallback(check)
+
+
+    def test_failIfUnknownService(self):
+        """
+        If the user requests a service that we don't support, the
+        authentication should fail.
+        """
+        packet = NS('foo') + NS('') + NS('password') + chr(0) + NS('foo')
+        self.authServer.clock = task.Clock()
+        d = self.authServer.ssh_USERAUTH_REQUEST(packet)
+        return d.addCallback(self._checkFailed)
+
+
+    def test__pamConvErrors(self):
+        """
+        _pamConv should fail if it gets a message that's not 1 or 2.
+        """
+        def secondTest(ignored):
+            d2 = self.authServer._pamConv([('', 90)])
+            return self.assertFailure(d2, ConchError)
+
+        d = self.authServer._pamConv([('', 3)])
+        return self.assertFailure(d, ConchError).addCallback(secondTest)
+
+
+    def test_tryAuthEdgeCases(self):
+        """
+        tryAuth() has two edge cases that are difficult to reach.
+
+        1) an authentication method auth_* returns None instead of a Deferred.
+        2) an authentication type that is defined does not have a matching
+           auth_* method.
+
+        Both these cases should return a Deferred which fails with a
+        ConchError.
+        """
+        def mockAuth(packet):
+            return None
+
+        self.patch(self.authServer, 'auth_publickey', mockAuth) # first case
+        self.patch(self.authServer, 'auth_password', None) # second case
+
+        def secondTest(ignored):
+            d2 = self.authServer.tryAuth('password', None, None)
+            return self.assertFailure(d2, ConchError)
+
+        d1 = self.authServer.tryAuth('publickey', None, None)
+        return self.assertFailure(d1, ConchError).addCallback(secondTest)
+
+
+
+
+class SSHUserAuthClientTestCase(unittest.TestCase):
+    """
+    Tests for SSHUserAuthClient.
+    """
+
+
+    if keys is None:
+        skip = "cannot run w/o PyCrypto"
+
+
+    def setUp(self):
+        self.authClient = ClientUserAuth('foo', FakeTransport.Service())
+        self.authClient.transport = FakeTransport(None)
+        self.authClient.transport.sessionID = 'test'
+        self.authClient.serviceStarted()
+
+
+    def tearDown(self):
+        self.authClient.serviceStopped()
+        self.authClient = None
+
+
+    def test_init(self):
+        """
+        Test that client is initialized properly.
+        """
+        self.assertEqual(self.authClient.user, 'foo')
+        self.assertEqual(self.authClient.instance.name, 'nancy')
+        self.assertEqual(self.authClient.transport.packets,
+                [(userauth.MSG_USERAUTH_REQUEST, NS('foo') + NS('nancy')
+                    + NS('none'))])
+
+
+    def test_USERAUTH_SUCCESS(self):
+        """
+        Test that the client succeeds properly.
+        """
+        instance = [None]
+        def stubSetService(service):
+            instance[0] = service
+        self.authClient.transport.setService = stubSetService
+        self.authClient.ssh_USERAUTH_SUCCESS('')
+        self.assertEqual(instance[0], self.authClient.instance)
+
+
+    def test_publickey(self):
+        """
+        Test that the client can authenticate with a public key.
+        """
+        self.authClient.ssh_USERAUTH_FAILURE(NS('publickey') + '\x00')
+        self.assertEqual(self.authClient.transport.packets[-1],
+                (userauth.MSG_USERAUTH_REQUEST, NS('foo') + NS('nancy')
+                    + NS('publickey') + '\x00' + NS('ssh-dss')
+                    + NS(keys.Key.fromString(
+                        keydata.publicDSA_openssh).blob())))
+       # that key isn't good
+        self.authClient.ssh_USERAUTH_FAILURE(NS('publickey') + '\x00')
+        blob = NS(keys.Key.fromString(keydata.publicRSA_openssh).blob())
+        self.assertEqual(self.authClient.transport.packets[-1],
+                (userauth.MSG_USERAUTH_REQUEST, (NS('foo') + NS('nancy')
+                    + NS('publickey') + '\x00'+ NS('ssh-rsa') + blob)))
+        self.authClient.ssh_USERAUTH_PK_OK(NS('ssh-rsa')
+            + NS(keys.Key.fromString(keydata.publicRSA_openssh).blob()))
+        sigData = (NS(self.authClient.transport.sessionID)
+                + chr(userauth.MSG_USERAUTH_REQUEST) + NS('foo')
+                + NS('nancy') + NS('publickey') + '\x01' + NS('ssh-rsa')
+                + blob)
+        obj = keys.Key.fromString(keydata.privateRSA_openssh)
+        self.assertEqual(self.authClient.transport.packets[-1],
+                (userauth.MSG_USERAUTH_REQUEST, NS('foo') + NS('nancy')
+                    + NS('publickey') + '\x01' + NS('ssh-rsa') + blob
+                    + NS(obj.sign(sigData))))
+
+
+    def test_publickey_without_privatekey(self):
+        """
+        If the SSHUserAuthClient doesn't return anything from signData,
+        the client should start the authentication over again by requesting
+        'none' authentication.
+        """
+        authClient = ClientAuthWithoutPrivateKey('foo',
+                                                 FakeTransport.Service())
+
+        authClient.transport = FakeTransport(None)
+        authClient.transport.sessionID = 'test'
+        authClient.serviceStarted()
+        authClient.tryAuth('publickey')
+        authClient.transport.packets = []
+        self.assertIdentical(authClient.ssh_USERAUTH_PK_OK(''), None)
+        self.assertEqual(authClient.transport.packets, [
+                (userauth.MSG_USERAUTH_REQUEST, NS('foo') + NS('nancy') +
+                 NS('none'))])
+
+
+    def test_old_publickey_getPublicKey(self):
+        """
+        Old SSHUserAuthClients returned strings of public key blobs from
+        getPublicKey().  Test that a Deprecation warning is raised but the key is
+        verified correctly.
+        """
+        oldAuth = OldClientAuth('foo', FakeTransport.Service())
+        oldAuth.transport = FakeTransport(None)
+        oldAuth.transport.sessionID = 'test'
+        oldAuth.serviceStarted()
+        oldAuth.transport.packets = []
+        self.assertWarns(DeprecationWarning, "Returning a string from "
+                         "SSHUserAuthClient.getPublicKey() is deprecated since "
+                         "Twisted 9.0.  Return a keys.Key() instead.",
+                         userauth.__file__, oldAuth.tryAuth, 'publickey')
+        self.assertEqual(oldAuth.transport.packets, [
+                (userauth.MSG_USERAUTH_REQUEST, NS('foo') + NS('nancy') +
+                 NS('publickey') + '\x00' + NS('ssh-rsa') +
+                 NS(keys.Key.fromString(keydata.publicRSA_openssh).blob()))])
+
+
+    def test_old_publickey_getPrivateKey(self):
+        """
+        Old SSHUserAuthClients returned a PyCrypto key object from
+        getPrivateKey().  Test that _cbSignData signs the data warns the
+        user about the deprecation, but signs the data correctly.
+        """
+        oldAuth = OldClientAuth('foo', FakeTransport.Service())
+        d = self.assertWarns(DeprecationWarning, "Returning a PyCrypto key "
+                             "object from SSHUserAuthClient.getPrivateKey() is "
+                             "deprecated since Twisted 9.0.  "
+                             "Return a keys.Key() instead.", userauth.__file__,
+                             oldAuth.signData, None, 'data')
+        def _checkSignedData(sig):
+            self.assertEqual(sig,
+                keys.Key.fromString(keydata.privateRSA_openssh).sign(
+                    'data'))
+        d.addCallback(_checkSignedData)
+        return d
+
+
+    def test_no_publickey(self):
+        """
+        If there's no public key, auth_publickey should return a Deferred
+        called back with a False value.
+        """
+        self.authClient.getPublicKey = lambda x: None
+        d = self.authClient.tryAuth('publickey')
+        def check(result):
+            self.assertFalse(result)
+        return d.addCallback(check)
+
+    def test_password(self):
+        """
+        Test that the client can authentication with a password.  This
+        includes changing the password.
+        """
+        self.authClient.ssh_USERAUTH_FAILURE(NS('password') + '\x00')
+        self.assertEqual(self.authClient.transport.packets[-1],
+                (userauth.MSG_USERAUTH_REQUEST, NS('foo') + NS('nancy')
+                    + NS('password') + '\x00' + NS('foo')))
+        self.authClient.ssh_USERAUTH_PK_OK(NS('') + NS(''))
+        self.assertEqual(self.authClient.transport.packets[-1],
+                (userauth.MSG_USERAUTH_REQUEST, NS('foo') + NS('nancy')
+                    + NS('password') + '\xff' + NS('foo') * 2))
+
+
+    def test_no_password(self):
+        """
+        If getPassword returns None, tryAuth should return False.
+        """
+        self.authClient.getPassword = lambda: None
+        self.assertFalse(self.authClient.tryAuth('password'))
+
+
+    def test_keyboardInteractive(self):
+        """
+        Test that the client can authenticate using keyboard-interactive
+        authentication.
+        """
+        self.authClient.ssh_USERAUTH_FAILURE(NS('keyboard-interactive')
+               + '\x00')
+        self.assertEqual(self.authClient.transport.packets[-1],
+                (userauth.MSG_USERAUTH_REQUEST, NS('foo') + NS('nancy')
+                    + NS('keyboard-interactive') + NS('')*2))
+        self.authClient.ssh_USERAUTH_PK_OK(NS('')*3 + '\x00\x00\x00\x02'
+                + NS('Name: ') + '\xff' + NS('Password: ') + '\x00')
+        self.assertEqual(self.authClient.transport.packets[-1],
+                (userauth.MSG_USERAUTH_INFO_RESPONSE, '\x00\x00\x00\x02'
+                    + NS('foo')*2))
+
+
+    def test_USERAUTH_PK_OK_unknown_method(self):
+        """
+        If C{SSHUserAuthClient} gets a MSG_USERAUTH_PK_OK packet when it's not
+        expecting it, it should fail the current authentication and move on to
+        the next type.
+        """
+        self.authClient.lastAuth = 'unknown'
+        self.authClient.transport.packets = []
+        self.authClient.ssh_USERAUTH_PK_OK('')
+        self.assertEqual(self.authClient.transport.packets,
+                          [(userauth.MSG_USERAUTH_REQUEST, NS('foo') +
+                            NS('nancy') + NS('none'))])
+
+
+    def test_USERAUTH_FAILURE_sorting(self):
+        """
+        ssh_USERAUTH_FAILURE should sort the methods by their position
+        in SSHUserAuthClient.preferredOrder.  Methods that are not in
+        preferredOrder should be sorted at the end of that list.
+        """
+        def auth_firstmethod():
+            self.authClient.transport.sendPacket(255, 'here is data')
+        def auth_anothermethod():
+            self.authClient.transport.sendPacket(254, 'other data')
+            return True
+        self.authClient.auth_firstmethod = auth_firstmethod
+        self.authClient.auth_anothermethod = auth_anothermethod
+
+        # although they shouldn't get called, method callbacks auth_* MUST
+        # exist in order for the test to work properly.
+        self.authClient.ssh_USERAUTH_FAILURE(NS('anothermethod,password') +
+                                             '\x00')
+        # should send password packet
+        self.assertEqual(self.authClient.transport.packets[-1],
+                (userauth.MSG_USERAUTH_REQUEST, NS('foo') + NS('nancy')
+                    + NS('password') + '\x00' + NS('foo')))
+        self.authClient.ssh_USERAUTH_FAILURE(
+            NS('firstmethod,anothermethod,password') + '\xff')
+        self.assertEqual(self.authClient.transport.packets[-2:],
+                          [(255, 'here is data'), (254, 'other data')])
+
+
+    def test_disconnectIfNoMoreAuthentication(self):
+        """
+        If there are no more available user authentication messages,
+        the SSHUserAuthClient should disconnect with code
+        DISCONNECT_NO_MORE_AUTH_METHODS_AVAILABLE.
+        """
+        self.authClient.ssh_USERAUTH_FAILURE(NS('password') + '\x00')
+        self.authClient.ssh_USERAUTH_FAILURE(NS('password') + '\xff')
+        self.assertEqual(self.authClient.transport.packets[-1],
+                          (transport.MSG_DISCONNECT, '\x00\x00\x00\x0e' +
+                           NS('no more authentication methods available') +
+                           '\x00\x00\x00\x00'))
+
+
+    def test_ebAuth(self):
+        """
+        _ebAuth (the generic authentication error handler) should send
+        a request for the 'none' authentication method.
+        """
+        self.authClient.transport.packets = []
+        self.authClient._ebAuth(None)
+        self.assertEqual(self.authClient.transport.packets,
+                [(userauth.MSG_USERAUTH_REQUEST, NS('foo') + NS('nancy')
+                    + NS('none'))])
+
+
+    def test_defaults(self):
+        """
+        getPublicKey() should return None.  getPrivateKey() should return a
+        failed Deferred.  getPassword() should return a failed Deferred.
+        getGenericAnswers() should return a failed Deferred.
+        """
+        authClient = userauth.SSHUserAuthClient('foo', FakeTransport.Service())
+        self.assertIdentical(authClient.getPublicKey(), None)
+        def check(result):
+            result.trap(NotImplementedError)
+            d = authClient.getPassword()
+            return d.addCallback(self.fail).addErrback(check2)
+        def check2(result):
+            result.trap(NotImplementedError)
+            d = authClient.getGenericAnswers(None, None, None)
+            return d.addCallback(self.fail).addErrback(check3)
+        def check3(result):
+            result.trap(NotImplementedError)
+        d = authClient.getPrivateKey()
+        return d.addCallback(self.fail).addErrback(check)
+
+
+
+class LoopbackTestCase(unittest.TestCase):
+
+
+    if keys is None:
+        skip = "cannot run w/o PyCrypto or PyASN1"
+
+
+    class Factory:
+        class Service:
+            name = 'TestService'
+
+
+            def serviceStarted(self):
+                self.transport.loseConnection()
+
+
+            def serviceStopped(self):
+                pass
+
+
+        def getService(self, avatar, name):
+            return self.Service
+
+
+    def test_loopback(self):
+        """
+        Test that the userauth server and client play nicely with each other.
+        """
+        server = userauth.SSHUserAuthServer()
+        client = ClientUserAuth('foo', self.Factory.Service())
+
+        # set up transports
+        server.transport = transport.SSHTransportBase()
+        server.transport.service = server
+        server.transport.isEncrypted = lambda x: True
+        client.transport = transport.SSHTransportBase()
+        client.transport.service = client
+        server.transport.sessionID = client.transport.sessionID = ''
+        # don't send key exchange packet
+        server.transport.sendKexInit = client.transport.sendKexInit = \
+                lambda: None
+
+        # set up server authentication
+        server.transport.factory = self.Factory()
+        server.passwordDelay = 0 # remove bad password delay
+        realm = Realm()
+        portal = Portal(realm)
+        checker = SSHProtocolChecker()
+        checker.registerChecker(PasswordChecker())
+        checker.registerChecker(PrivateKeyChecker())
+        checker.registerChecker(PAMChecker())
+        checker.areDone = lambda aId: (
+            len(checker.successfulCredentials[aId]) == 3)
+        portal.registerChecker(checker)
+        server.transport.factory.portal = portal
+
+        d = loopback.loopbackAsync(server.transport, client.transport)
+        server.transport.transport.logPrefix = lambda: '_ServerLoopback'
+        client.transport.transport.logPrefix = lambda: '_ClientLoopback'
+
+        server.serviceStarted()
+        client.serviceStarted()
+
+        def check(ignored):
+            self.assertEqual(server.transport.service.name, 'TestService')
+        return d.addCallback(check)
+
+
+
+class ModuleInitializationTestCase(unittest.TestCase):
+    if keys is None:
+        skip = "cannot run w/o PyCrypto or PyASN1"
+
+
+    def test_messages(self):
+        # Several message types have value 60, check that MSG_USERAUTH_PK_OK
+        # is always the one which is mapped.
+        self.assertEqual(userauth.SSHUserAuthServer.protocolMessages[60],
+                         'MSG_USERAUTH_PK_OK')
+        self.assertEqual(userauth.SSHUserAuthClient.protocolMessages[60],
+                         'MSG_USERAUTH_PK_OK')
diff --git a/ThirdParty/Twisted/twisted/conch/test/test_window.py b/ThirdParty/Twisted/twisted/conch/test/test_window.py
new file mode 100644
index 0000000..6d7d9d2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/test/test_window.py
@@ -0,0 +1,67 @@
+
+"""
+Tests for the insults windowing module, L{twisted.conch.insults.window}.
+"""
+
+from twisted.trial.unittest import TestCase
+
+from twisted.conch.insults.window import TopWindow, ScrolledArea, TextOutput
+
+
+class TopWindowTests(TestCase):
+    """
+    Tests for L{TopWindow}, the root window container class.
+    """
+
+    def test_paintScheduling(self):
+        """
+        Verify that L{TopWindow.repaint} schedules an actual paint to occur
+        using the scheduling object passed to its initializer.
+        """
+        paints = []
+        scheduled = []
+        root = TopWindow(lambda: paints.append(None), scheduled.append)
+
+        # Nothing should have happened yet.
+        self.assertEqual(paints, [])
+        self.assertEqual(scheduled, [])
+
+        # Cause a paint to be scheduled.
+        root.repaint()
+        self.assertEqual(paints, [])
+        self.assertEqual(len(scheduled), 1)
+
+        # Do another one to verify nothing else happens as long as the previous
+        # one is still pending.
+        root.repaint()
+        self.assertEqual(paints, [])
+        self.assertEqual(len(scheduled), 1)
+
+        # Run the actual paint call.
+        scheduled.pop()()
+        self.assertEqual(len(paints), 1)
+        self.assertEqual(scheduled, [])
+
+        # Do one more to verify that now that the previous one is finished
+        # future paints will succeed.
+        root.repaint()
+        self.assertEqual(len(paints), 1)
+        self.assertEqual(len(scheduled), 1)
+
+
+
+class ScrolledAreaTests(TestCase):
+    """
+    Tests for L{ScrolledArea}, a widget which creates a viewport containing
+    another widget and can reposition that viewport using scrollbars.
+    """
+    def test_parent(self):
+        """
+        The parent of the widget passed to L{ScrolledArea} is set to a new
+        L{Viewport} created by the L{ScrolledArea} which itself has the
+        L{ScrolledArea} instance as its parent.
+        """
+        widget = TextOutput()
+        scrolled = ScrolledArea(widget)
+        self.assertIdentical(widget.parent, scrolled._viewport)
+        self.assertIdentical(scrolled._viewport.parent, scrolled)
diff --git a/ThirdParty/Twisted/twisted/conch/topfiles/NEWS b/ThirdParty/Twisted/twisted/conch/topfiles/NEWS
new file mode 100644
index 0000000..b92cad3
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/topfiles/NEWS
@@ -0,0 +1,432 @@
+Ticket numbers in this file can be looked up by visiting
+http://twistedmatrix.com/trac/ticket/<number>
+
+Twisted Conch 12.3.0 (2012-12-20)
+=================================
+
+Bugfixes
+--------
+ - Passing multiple --auth arguments to conch now correctly adds all
+   the specified checkers to the conch server (#5881)
+ - ckeygen --showpub now uses OPENSSH as default display, instead of
+   breaking because no display type was passed. (#5889)
+ - ckeygen --showpub catches EncryptedKeyError instead of BadKeyError
+   to detect that a key needs to be decrypted with a passphrase.
+   (#5890)
+
+Other
+-----
+ - #5923
+
+
+Twisted Conch 12.2.0 (2012-08-26)
+=================================
+
+Features
+--------
+ - twisted.conch.ssh.transport.SSHTransport now returns an
+   SSHTransportAddress from the getPeer() and getHost() methods.
+   (#2997)
+
+Bugfixes
+--------
+ - twisted.conch now supports commercial SSH implementations which
+   don't comply with the IETF standard (#1902)
+ - twisted.conch.ssh.userauth now works correctly with hash
+   randomization enabled. (#5776)
+ - twisted.conch no longer relies on __builtins__ being a dict, which
+   is a purely CPython implementation detail (#5779)
+
+Other
+-----
+ - #5496, #5617, #5700, #5748, #5777
+
+
+Twisted Conch 12.1.0 (2012-06-02)
+=================================
+
+Features
+--------
+ - twisted.conch.tap now supports cred plugins (#4753)
+
+Bugfixes
+--------
+ - twisted.conch.client.knownhosts now handles errors encountered
+   parsing hashed entries in a known hosts file. (#5616)
+
+Improved Documentation
+----------------------
+ - Conch examples window.tac and telnet_echo.tac now have better
+   explanations. (#5590)
+
+Other
+-----
+ - #5580
+
+
+Twisted Conch 12.0.0 (2012-02-10)
+=================================
+
+Features
+--------
+ - use Python shadow module for authentication if it's available
+   (#3242)
+
+Bugfixes
+--------
+ - twisted.conch.ssh.transport.messages no longer ends with with old
+   message IDs on platforms with differing dict() orderings (#5352)
+
+Other
+-----
+ - #5225
+
+
+Twisted Conch 11.1.0 (2011-11-15)
+=================================
+
+Features
+--------
+ - twisted.conch.ssh.filetransfer.FileTransferClient now handles short
+   status messages, not strictly allowed by the RFC, but sent by some
+   SSH implementations. (#3009)
+ - twisted.conch.manhole now supports CTRL-A and CTRL-E to trigger
+   HOME and END functions respectively. (#5252)
+
+Bugfixes
+--------
+ - When run from an unpacked source tarball or a VCS checkout, the
+   bin/conch/ scripts will now use the version of Twisted they are
+   part of. (#3526)
+ - twisted.conch.insults.window.ScrolledArea now passes no extra
+   arguments to object.__init__ (which works on more versions of
+   Python). (#4197)
+ - twisted.conch.telnet.ITelnetProtocol now has the correct signature
+   for its unhandledSubnegotiation() method. (#4751)
+ - twisted.conch.ssh.userauth.SSHUserAuthClient now more closely
+   follows the RFC 4251 definition of boolean values when negotiating
+   for key-based authentication, allowing better interoperability with
+   other SSH implementations. (#5241)
+ - twisted.conch.recvline.RecvLine now ignores certain function keys
+   in its keystrokeReceived method instead of raising an exception.
+   (#5246)
+
+Deprecations and Removals
+-------------------------
+ - The --user option to `twistd manhole' has been removed as it was
+   dead code with no functionality associated with it. (#5283)
+
+Other
+-----
+ - #5107, #5256, #5349
+
+
+Twisted Conch 11.0.0 (2011-04-01)
+=================================
+
+Bugfixes
+--------
+ - The transport for subsystem protocols now declares that it
+   implements ITransport and implements the getHost and getPeer
+   methods. (#2453)
+ - twisted.conch.ssh.transport.SSHTransportBase now responds to key
+   exchange messages at any time during a connection (instead of only
+   at connection setup).  It also queues non-key exchange messages
+   sent during key exchange to avoid corrupting the connection state.
+   (#4395)
+ - Importing twisted.conch.ssh.common no longer breaks pow(base, exp[,
+   modulus]) when the gmpy package is installed and base is not an
+   integer. (#4803)
+ - twisted.conch.ls.lsLine now returns a time string which does not
+   consider the locale. (#4937)
+
+Improved Documentation
+----------------------
+ - Changed the man page for ckeygen to accurately reflect what it
+   does, and corrected its synposis so that a second "ckeygen" is not
+   a required part of the ckeygen command line.  (#4738)
+
+Other
+-----
+ - #2112
+
+
+Twisted Conch 10.2.0 (2010-11-29)
+=================================
+
+Bugfixes
+--------
+ - twisted.conch.ssh.factory.SSHFactory no longer disables coredumps.
+   (#2715)
+ - The Deferred returned by twisted.conch.telnet.TelnetTransport.will
+   now fires with an OptionRefused failure if the peer responds with a
+   refusal for the option negotiation. (#4231)
+ - SSHServerTransport and SSHClientTransport in
+   twisted.conch.ssh.transport no longer use PyCrypto to generate
+   random numbers for DH KEX.  They also now generate values from the
+   full valid range, rather than only half of it. (#4469)
+ - twisted.conch.ssh.connection.SSHConnection now errbacks leftover
+   request deferreds on connection shutdown. (#4483)
+
+Other
+-----
+ - #4677
+
+
+Twisted Conch 10.1.0 (2010-06-27)
+=================================
+
+Features
+--------
+ - twisted.conch.ssh.transport.SSHTransportBase now allows supported
+   ssh protocol versions to be overriden. (#4428)
+
+Bugfixes
+--------
+ - SSHSessionProcessProtocol now doesn't close the session when stdin
+   is closed, but instead when both stdout and stderr are. (#4350)
+ - The 'cftp' command-line tool will no longer encounter an
+   intermittent error, crashing at startup with a ZeroDivisionError
+   while trying to report progress. (#4463)
+ - twisted.conch.ssh.connection.SSHConnection now replies to requests
+   to open an unknown channel with a OPEN_UNKNOWN_CHANNEL_TYPE message
+   instead of closing the connection. (#4490)
+
+Deprecations and Removals
+-------------------------
+ - twisted.conch.insults.client was deprecated. (#4095)
+ - twisted.conch.insults.colors has been deprecated.  Please use
+   twisted.conch.insults.helper instead. (#4096)
+ - Removed twisted.conch.ssh.asn1, which has been deprecated since
+   Twisted 9.0. (#4097)
+ - Removed twisted.conch.ssh.common.Entropy, as Entropy.get_bytes has
+   been  deprecated since 2007 and Entropy.get_bytes was the only
+   attribute of Entropy. (#4098)
+ - Removed twisted.conch.ssh.keys.getPublicKeyString, which has been
+   deprecated since 2007.  Also updated the conch examples
+   sshsimpleserver.py and sshsimpleclient.py to reflect this removal.
+   (#4099)
+ - Removed twisted.conch.ssh.keys.makePublicKeyString, which has been
+   deprecated since 2007. (#4100)
+ - Removed twisted.conch.ssh.keys.getPublicKeyObject, which has been
+   deprecated since 2007. (#4101)
+ - Removed twisted.conch.ssh.keys.getPrivateKeyObject, which has been
+   deprecated since 2007.  Also updated the conch examples to reflect
+   this removal. (#4102)
+ - Removed twisted.conch.ssh.keys.makePrivateKeyString, which has been
+   deprecated since 2007. (#4103)
+ - Removed twisted.conch.ssh.keys.makePublicKeyBlob, which has been
+   deprecated since 2007. (#4104)
+ - Removed twisted.conch.ssh.keys.signData,
+   twisted.conch.ssh.keys.verifySignature, and
+   twisted.conch.ssh.keys.printKey, which have been deprecated since
+   2007.   (#4105)
+
+Other
+-----
+ - #3849, #4408, #4454
+
+
+Twisted Conch 10.0.0 (2010-03-01)
+=================================
+
+Bugfixes
+--------
+ - twisted.conch.checkers.SSHPublicKeyDatabase now looks in the
+   correct user directory for authorized_keys files. (#3984)
+
+ - twisted.conch.ssh.SSHUserAuthClient now honors preferredOrder when
+   authenticating. (#4266)
+
+Other
+-----
+ - #2391, #4203, #4265
+
+
+Twisted Conch 9.0.0 (2009-11-24)
+================================
+
+Fixes
+-----
+ - The SSH key parser has been removed and conch now uses pyASN1 to parse keys.
+   This should fix a number of cases where parsing a key would fail, but it now
+   requires users to have pyASN1 installed (#3391)
+ - The time field on SFTP file listings should now be correct (#3503)
+ - The day field on SFTP file listings should now be correct on Windows (#3503)
+ - The "cftp" sftp client now truncates files it is uploading over (#2519)
+ - The telnet server protocol can now properly respond to subnegotiation
+   requests (#3655)
+ - Tests and factoring of the SSHv2 server implementation are now much better
+   (#2682)
+ - The SSHv2 server now sends "exit-signal" messages to the client, instead of
+   raising an exception, when a process dies due to a signal (#2687)
+ - cftp's client-side "exec" command now uses /bin/sh if the current user has
+   no shell (#3914)
+
+Deprecations and Removals
+-------------------------
+ - The buggy SSH connection sharing feature of the SSHv2 client was removed
+   (#3498)
+ - Use of strings and PyCrypto objects to represent keys is deprecated in favor
+   of using Conch Key objects (#2682)
+
+Other
+-----
+ - #3548, #3537, #3551, #3220, #3568, #3689, #3709, #3809, #2763, #3540, #3750,
+   #3897, #3813, #3871, #3916, #4047, #3940, #4050
+
+
+Conch 8.2.0 (2008-12-16)
+========================
+
+Features
+--------
+ - The type of the protocols instantiated by SSHFactory is now parameterized
+   (#3443)
+
+Fixes
+-----
+ - A file descriptor leak has been fixed (#3213, #1789)
+ - "File Already Exists" errors are now handled more correctly (#3033)
+ - Handling of CR IAC in TelnetClient is now improved (#3305)
+ - SSHAgent is no longer completely unusable (#3332)
+ - The performance of insults.ClientProtocol is now greatly increased by
+   delivering more than one byte at a time to application code (#3386)
+ - Manhole and the conch server no longer need to be run as root when not
+   necessary (#2607)
+ - The value of FILEXFER_ATTR_ACMODTIME has been corrected (#2902)
+ - The management of known_hosts and host key verification has been overhauled
+   (#1376, #1301, #3494, #3496, #1292, #3499)
+
+Other
+-----
+ - #3193, #1633
+
+
+8.1.0 (2008-05-18)
+==================
+
+Fixes
+-----
+ - A regression was fixed whereby the publicKeys and privateKeys attributes of
+   SSHFactory would not be interpreted as strings (#3141)
+ - The sshsimpleserver.py example had a minor bug fix (#3135)
+ - The deprecated mktap API is no longer used (#3127)
+ - An infelicity was fixed whereby a NameError would be raised in certain
+   circumstances during authentication when a ConchError should have been
+   (#3154)
+ - A workaround was added to conch.insults for a bug in gnome-terminal whereby
+   it would not scroll correctly (#3189)
+
+
+8.0.0 (2008-03-17)
+==================
+
+Features
+--------
+ - Add DEC private mode manipulation methods to ITerminalTransport. (#2403)
+
+Fixes
+-----
+ - Parameterize the scheduler function used by the insults TopWindow widget.
+   This change breaks backwards compatibility in the TopWindow initializer.
+   (#2413)
+ - Notify subsystems, like SFTP, of connection close. (#2421)
+ - Change the process file descriptor "connection lost" code to reverse the
+   setNonBlocking operation done during initialization. (#2371)
+ - Change ConsoleManhole to wait for connectionLost notification before
+   stopping the reactor. (#2123, #2371)
+ - Make SSHUserAuthServer.ssh_USERAUTH_REQUEST return a Deferred. (#2528)
+ - Manhole's initializer calls its parent class's initializer with its
+   namespace argument. (#2587)
+ - Handle ^C during input line continuation in manhole by updating the prompt
+   and line buffer correctly. (#2663)
+ - Make twisted.conch.telnet.Telnet by default reject all attempts to enable
+   options. (#1967)
+ - Reduce the number of calls into application code to deliver application-level
+   data in twisted.conch.telnet.Telnet.dataReceived (#2107)
+ - Fix definition and management of extended attributes in conch file transfer.
+   (#3010)
+ - Fix parsing of OpenSSH-generated RSA keys with differing ASN.1 packing style.
+   (#3008)
+ - Fix handling of missing $HOME in twisted.conch.client.unix. (#3061)
+
+Misc
+----
+ - #2267, #2378, #2604, #2707, #2341, #2685, #2679, #2912, #2977, #2678, #2709
+   #2063, #2847
+
+
+0.8.0 (2007-01-06)
+==================
+
+Features
+--------
+ - Manhole now supports Ctrl-l to emulate the same behavior in the
+   Python interactive interpreter (#1565)
+ - Python 2.5 is now supported (#1867)
+
+Misc
+----
+ - #1673, #1636, #1892, #1943, #2057, #1180, #1185, #2148, #2159, #2291, 
+
+Deprecations and Removals
+-------------------------
+
+ - The old twisted.cred API (Identities, Authorizers, etc) is no
+   longer supported (#1440)
+
+
+0.7.0 (2006-05-21)
+==================
+
+Features
+--------
+  - Timeout support for ExpectableBuffer.expect()
+
+Fixes
+-----
+  - ~5x speedup for bulk data transfer (#1325)
+  - Misc: #1428
+
+0.6.0:
+
+ Bugfixes and improvements in SSH support and Insults:
+  - PAM authenticator support factored out into twisted.cred
+  - Poorly supported next-line terminal operation replaced with simple \r\n
+
+ New functionality:
+  - An ITerminalTransport implementation with expect-like features
+  - Support for the "none" SSH cipher
+  - Insults support for handling more keystrokes and more methods for
+    terminal manipulation
+  - New, simple insults-based widget library added
+
+ Better test coverage:
+  - Dependence on `localhost' name removed
+  - Some timing-sensitive tests changed to be more reliable
+  - Process spawning tests initialize environment more robustly
+
+0.5.0:
+
+ Many improvements to SSH support. Here's some in particular:
+  - Add --reconnect option to conch binary
+  - utmp/wtmp logging
+  - Unix login improvements, PAM support
+  - Add "cftp" -- Conch SFTP.
+  - Deferred retrieval of public keys is supported
+  - PAM support for client and server
+  - Bugfixes: 
+	- fix conch failing to exit, and hangs.
+	- Remote->Local forwarding
+	- Channel closing
+	- Invalid known_host writing
+	- Many others
+
+ New functionality:
+  - twisted.conch.telnet: new, much improved telnet implementation.
+  - twisted.conch.insults: Basic curses-like terminal support (server-side).
+  - twisted.conch.manhole: new interactive python interactive interpreter,
+    can be used with conch's telnet, ssh, or on the console.
+	- Main features: Syntax coloring, line editing, and useful interactive
+       handling of Deferreds.
diff --git a/ThirdParty/Twisted/twisted/conch/topfiles/README b/ThirdParty/Twisted/twisted/conch/topfiles/README
new file mode 100644
index 0000000..0b64668
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/topfiles/README
@@ -0,0 +1,11 @@
+Twisted Conch 12.3.0
+
+Twisted Conch depends on Twisted Core and on Python Crypto extensions
+(<http://www.pycrypto.org>).
+
+The pyasn1 module (<http://pyasn1.sourceforge.net/>) is also required.
+
+gmpy (<http://code.google.com/p/gmpy/>) is strongly recommended to improve
+performance.
+
+Twisted Conch includes a couple simple GUI applications which depend on Tkinter.
diff --git a/ThirdParty/Twisted/twisted/conch/topfiles/setup.py b/ThirdParty/Twisted/twisted/conch/topfiles/setup.py
new file mode 100644
index 0000000..19b9496
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/topfiles/setup.py
@@ -0,0 +1,48 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import sys
+
+try:
+    from twisted.python import dist
+except ImportError:
+    raise SystemExit("twisted.python.dist module not found.  Make sure you "
+                     "have installed the Twisted core package before "
+                     "attempting to install any other Twisted projects.")
+
+if __name__ == '__main__':
+    if sys.version_info[:2] >= (2, 4):
+        extraMeta = dict(
+            classifiers=[
+                "Development Status :: 4 - Beta",
+                "Environment :: Console",
+                "Environment :: No Input/Output (Daemon)",
+                "Intended Audience :: Developers",
+                "Intended Audience :: End Users/Desktop",
+                "Intended Audience :: System Administrators",
+                "License :: OSI Approved :: MIT License",
+                "Programming Language :: Python",
+                "Topic :: Internet",
+                "Topic :: Security",
+                "Topic :: Software Development :: Libraries :: Python Modules",
+                "Topic :: Terminals",
+            ])
+    else:
+        extraMeta = {}
+
+    dist.setup(
+        twisted_subproject="conch",
+        scripts=dist.getScripts("conch"),
+        # metadata
+        name="Twisted Conch",
+        description="Twisted SSHv2 implementation.",
+        author="Twisted Matrix Laboratories",
+        author_email="twisted-python at twistedmatrix.com",
+        maintainer="Paul Swartz",
+        url="http://twistedmatrix.com/trac/wiki/TwistedConch",
+        license="MIT",
+        long_description="""\
+Conch is an SSHv2 implementation using the Twisted framework.  It
+includes a server, client, a SFTP client, and a key generator.
+""",
+        **extraMeta)
diff --git a/ThirdParty/Twisted/twisted/conch/ttymodes.py b/ThirdParty/Twisted/twisted/conch/ttymodes.py
new file mode 100644
index 0000000..00b4495
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/ttymodes.py
@@ -0,0 +1,121 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+# 
+
+import tty
+# this module was autogenerated.
+
+VINTR = 1
+VQUIT = 2
+VERASE = 3
+VKILL = 4
+VEOF = 5
+VEOL = 6
+VEOL2 = 7
+VSTART = 8
+VSTOP = 9
+VSUSP = 10
+VDSUSP = 11
+VREPRINT = 12
+VWERASE = 13
+VLNEXT = 14
+VFLUSH = 15
+VSWTCH = 16
+VSTATUS = 17
+VDISCARD = 18
+IGNPAR = 30
+PARMRK = 31
+INPCK = 32
+ISTRIP = 33
+INLCR = 34
+IGNCR = 35
+ICRNL = 36
+IUCLC = 37
+IXON = 38
+IXANY = 39
+IXOFF = 40
+IMAXBEL = 41
+ISIG = 50
+ICANON = 51
+XCASE = 52
+ECHO = 53
+ECHOE = 54
+ECHOK = 55
+ECHONL = 56
+NOFLSH = 57
+TOSTOP = 58
+IEXTEN = 59
+ECHOCTL = 60
+ECHOKE = 61
+PENDIN = 62
+OPOST = 70
+OLCUC = 71
+ONLCR = 72
+OCRNL = 73
+ONOCR = 74
+ONLRET = 75
+CS7 = 90
+CS8 = 91
+PARENB = 92
+PARODD = 93
+TTY_OP_ISPEED = 128
+TTY_OP_OSPEED = 129
+
+TTYMODES = {
+    1 : 'VINTR',
+    2 : 'VQUIT',
+    3 : 'VERASE',
+    4 : 'VKILL',
+    5 : 'VEOF',
+    6 : 'VEOL',
+    7 : 'VEOL2',
+    8 : 'VSTART',
+    9 : 'VSTOP',
+    10 : 'VSUSP',
+    11 : 'VDSUSP',
+    12 : 'VREPRINT',
+    13 : 'VWERASE',
+    14 : 'VLNEXT',
+    15 : 'VFLUSH',
+    16 : 'VSWTCH',
+    17 : 'VSTATUS',
+    18 : 'VDISCARD',
+    30 : (tty.IFLAG, 'IGNPAR'),
+    31 : (tty.IFLAG, 'PARMRK'),
+    32 : (tty.IFLAG, 'INPCK'),
+    33 : (tty.IFLAG, 'ISTRIP'),
+    34 : (tty.IFLAG, 'INLCR'),
+    35 : (tty.IFLAG, 'IGNCR'),
+    36 : (tty.IFLAG, 'ICRNL'),
+    37 : (tty.IFLAG, 'IUCLC'),
+    38 : (tty.IFLAG, 'IXON'),
+    39 : (tty.IFLAG, 'IXANY'),
+    40 : (tty.IFLAG, 'IXOFF'),
+    41 : (tty.IFLAG, 'IMAXBEL'),
+    50 : (tty.LFLAG, 'ISIG'),
+    51 : (tty.LFLAG, 'ICANON'),
+    52 : (tty.LFLAG, 'XCASE'),
+    53 : (tty.LFLAG, 'ECHO'),
+    54 : (tty.LFLAG, 'ECHOE'),
+    55 : (tty.LFLAG, 'ECHOK'),
+    56 : (tty.LFLAG, 'ECHONL'),
+    57 : (tty.LFLAG, 'NOFLSH'),
+    58 : (tty.LFLAG, 'TOSTOP'),
+    59 : (tty.LFLAG, 'IEXTEN'),
+    60 : (tty.LFLAG, 'ECHOCTL'),
+    61 : (tty.LFLAG, 'ECHOKE'),
+    62 : (tty.LFLAG, 'PENDIN'),
+    70 : (tty.OFLAG, 'OPOST'),
+    71 : (tty.OFLAG, 'OLCUC'),
+    72 : (tty.OFLAG, 'ONLCR'),
+    73 : (tty.OFLAG, 'OCRNL'),
+    74 : (tty.OFLAG, 'ONOCR'),
+    75 : (tty.OFLAG, 'ONLRET'),
+#   90 : (tty.CFLAG, 'CS7'),
+#   91 : (tty.CFLAG, 'CS8'),
+    92 : (tty.CFLAG, 'PARENB'),
+    93 : (tty.CFLAG, 'PARODD'),
+    128 : 'ISPEED',
+    129 : 'OSPEED'
+}
diff --git a/ThirdParty/Twisted/twisted/conch/ui/__init__.py b/ThirdParty/Twisted/twisted/conch/ui/__init__.py
new file mode 100644
index 0000000..ea0eea8
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/ui/__init__.py
@@ -0,0 +1,11 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+
+
+"""
+twisted.conch.ui is home to the UI elements for tkconch.
+
+Maintainer: Paul Swartz
+"""
diff --git a/ThirdParty/Twisted/twisted/conch/ui/ansi.py b/ThirdParty/Twisted/twisted/conch/ui/ansi.py
new file mode 100644
index 0000000..9d5e616
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/ui/ansi.py
@@ -0,0 +1,240 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+"""Module to parse ANSI escape sequences
+
+Maintainer: Jean-Paul Calderone
+"""
+
+import string
+
+# Twisted imports
+from twisted.python import log
+
+class ColorText:
+    """
+    Represents an element of text along with the texts colors and
+    additional attributes.
+    """
+
+    # The colors to use
+    COLORS = ('b', 'r', 'g', 'y', 'l', 'm', 'c', 'w')
+    BOLD_COLORS = tuple([x.upper() for x in COLORS])
+    BLACK, RED, GREEN, YELLOW, BLUE, MAGENTA, CYAN, WHITE = range(len(COLORS))
+
+    # Color names
+    COLOR_NAMES = (
+        'Black', 'Red', 'Green', 'Yellow', 'Blue', 'Magenta', 'Cyan', 'White'
+    )
+
+    def __init__(self, text, fg, bg, display, bold, underline, flash, reverse):
+        self.text, self.fg, self.bg = text, fg, bg
+        self.display = display
+        self.bold = bold
+        self.underline = underline
+        self.flash = flash
+        self.reverse = reverse
+        if self.reverse:
+            self.fg, self.bg = self.bg, self.fg
+
+
+class AnsiParser:
+    """
+    Parser class for ANSI codes.
+    """
+
+    # Terminators for cursor movement ansi controls - unsupported
+    CURSOR_SET = ('H', 'f', 'A', 'B', 'C', 'D', 'R', 's', 'u', 'd','G')
+
+    # Terminators for erasure ansi controls - unsupported
+    ERASE_SET = ('J', 'K', 'P')
+    
+    # Terminators for mode change ansi controls - unsupported
+    MODE_SET = ('h', 'l')
+    
+    # Terminators for keyboard assignment ansi controls - unsupported
+    ASSIGN_SET = ('p',)
+    
+    # Terminators for color change ansi controls - supported
+    COLOR_SET = ('m',)
+
+    SETS = (CURSOR_SET, ERASE_SET, MODE_SET, ASSIGN_SET, COLOR_SET)
+
+    def __init__(self, defaultFG, defaultBG):
+        self.defaultFG, self.defaultBG = defaultFG, defaultBG
+        self.currentFG, self.currentBG = self.defaultFG, self.defaultBG
+        self.bold, self.flash, self.underline, self.reverse = 0, 0, 0, 0
+        self.display = 1
+        self.prepend = ''
+
+    
+    def stripEscapes(self, string):
+        """
+        Remove all ANSI color escapes from the given string.
+        """
+        result = ''
+        show = 1
+        i = 0
+        L = len(string)
+        while i < L:
+            if show == 0 and string[i] in _sets:
+                show = 1
+            elif show:
+                n = string.find('\x1B', i)
+                if n == -1:
+                    return result + string[i:]
+                else:
+                    result = result + string[i:n]
+                    i = n
+                    show = 0
+            i = i + 1
+        return result
+
+    def writeString(self, colorstr):
+        pass
+
+    def parseString(self, str):
+        """
+        Turn a string input into a list of L{ColorText} elements.
+        """
+
+        if self.prepend:
+            str = self.prepend + str
+            self.prepend = ''
+        parts = str.split('\x1B')
+        
+        if len(parts) == 1:
+            self.writeString(self.formatText(parts[0]))
+        else:
+            self.writeString(self.formatText(parts[0]))
+            for s in parts[1:]:
+                L = len(s)
+                i = 0 
+                type = None
+                while i < L:
+                    if s[i] not in string.digits+'[;?':
+                        break
+                    i+=1
+                if not s:
+                    self.prepend = '\x1b'
+                    return
+                if s[0]!='[':
+                    self.writeString(self.formatText(s[i+1:]))
+                    continue
+                else:
+                    s=s[1:]
+                    i-=1
+                if i==L-1:
+                    self.prepend = '\x1b['
+                    return
+                type = _setmap.get(s[i], None)
+                if type is None:
+                    continue 
+
+                if type == AnsiParser.COLOR_SET:
+                    self.parseColor(s[:i + 1])
+                    s = s[i + 1:]
+                    self.writeString(self.formatText(s))
+                elif type == AnsiParser.CURSOR_SET:
+                    cursor, s = s[:i+1], s[i+1:]
+                    self.parseCursor(cursor)
+                    self.writeString(self.formatText(s))
+                elif type == AnsiParser.ERASE_SET:
+                    erase, s = s[:i+1], s[i+1:]
+                    self.parseErase(erase)
+                    self.writeString(self.formatText(s))
+                elif type == AnsiParser.MODE_SET:
+                    mode, s = s[:i+1], s[i+1:]
+                    #self.parseErase('2J')
+                    self.writeString(self.formatText(s))
+                elif i == L:
+                    self.prepend = '\x1B[' + s
+                else:
+                    log.msg('Unhandled ANSI control type: %c' % (s[i],))
+                    s = s[i + 1:]
+                    self.writeString(self.formatText(s))
+
+    def parseColor(self, str):
+        """
+        Handle a single ANSI color sequence
+        """
+        # Drop the trailing 'm'
+        str = str[:-1]
+
+        if not str:
+            str = '0'
+
+        try:
+            parts = map(int, str.split(';'))
+        except ValueError:
+            log.msg('Invalid ANSI color sequence (%d): %s' % (len(str), str))
+            self.currentFG, self.currentBG = self.defaultFG, self.defaultBG
+            return
+
+        for x in parts:
+            if x == 0:
+                self.currentFG, self.currentBG = self.defaultFG, self.defaultBG
+                self.bold, self.flash, self.underline, self.reverse = 0, 0, 0, 0
+                self.display = 1
+            elif x == 1:
+                self.bold = 1
+            elif 30 <= x <= 37:
+                self.currentFG = x - 30
+            elif 40 <= x <= 47:
+                self.currentBG = x - 40
+            elif x == 39:
+                self.currentFG = self.defaultFG
+            elif x == 49:
+                self.currentBG = self.defaultBG
+            elif x == 4:
+                self.underline = 1
+            elif x == 5:
+                self.flash = 1
+            elif x == 7:
+                self.reverse = 1
+            elif x == 8:
+                self.display = 0
+            elif x == 22:
+                self.bold = 0
+            elif x == 24:
+                self.underline = 0
+            elif x == 25:
+                self.blink = 0
+            elif x == 27:
+                self.reverse = 0
+            elif x == 28:
+                self.display = 1
+            else:
+                log.msg('Unrecognised ANSI color command: %d' % (x,))
+
+    def parseCursor(self, cursor):
+        pass
+
+    def parseErase(self, erase):
+        pass
+
+
+    def pickColor(self, value, mode, BOLD = ColorText.BOLD_COLORS):
+        if mode:
+            return ColorText.COLORS[value]
+        else:
+            return self.bold and BOLD[value] or ColorText.COLORS[value]
+
+
+    def formatText(self, text):
+        return ColorText(
+            text,
+            self.pickColor(self.currentFG, 0),
+            self.pickColor(self.currentBG, 1),
+            self.display, self.bold, self.underline, self.flash, self.reverse
+        )
+
+
+_sets = ''.join(map(''.join, AnsiParser.SETS))
+
+_setmap = {}
+for s in AnsiParser.SETS:
+    for r in s:
+        _setmap[r] = s
+del s
diff --git a/ThirdParty/Twisted/twisted/conch/ui/tkvt100.py b/ThirdParty/Twisted/twisted/conch/ui/tkvt100.py
new file mode 100644
index 0000000..cd7581d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/ui/tkvt100.py
@@ -0,0 +1,197 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+
+"""Module to emulate a VT100 terminal in Tkinter.
+
+Maintainer: Paul Swartz
+"""
+
+import Tkinter, tkFont
+import ansi
+import string
+
+ttyFont = None#tkFont.Font(family = 'Courier', size = 10)
+fontWidth, fontHeight = None,None#max(map(ttyFont.measure, string.letters+string.digits)), int(ttyFont.metrics()['linespace'])
+
+colorKeys = (
+    'b', 'r', 'g', 'y', 'l', 'm', 'c', 'w',
+    'B', 'R', 'G', 'Y', 'L', 'M', 'C', 'W'
+)
+
+colorMap = {
+    'b': '#000000', 'r': '#c40000', 'g': '#00c400', 'y': '#c4c400',
+    'l': '#000080', 'm': '#c400c4', 'c': '#00c4c4', 'w': '#c4c4c4',
+    'B': '#626262', 'R': '#ff0000', 'G': '#00ff00', 'Y': '#ffff00',
+    'L': '#0000ff', 'M': '#ff00ff', 'C': '#00ffff', 'W': '#ffffff',
+}
+
+class VT100Frame(Tkinter.Frame):
+    def __init__(self, *args, **kw):
+        global ttyFont, fontHeight, fontWidth
+        ttyFont = tkFont.Font(family = 'Courier', size = 10)
+        fontWidth, fontHeight = max(map(ttyFont.measure, string.letters+string.digits)), int(ttyFont.metrics()['linespace'])
+        self.width = kw.get('width', 80)
+        self.height = kw.get('height', 25)
+        self.callback = kw['callback']
+        del kw['callback']
+        kw['width'] = w = fontWidth * self.width
+        kw['height'] = h = fontHeight * self.height
+        Tkinter.Frame.__init__(self, *args, **kw)
+        self.canvas = Tkinter.Canvas(bg='#000000', width=w, height=h)
+        self.canvas.pack(side=Tkinter.TOP, fill=Tkinter.BOTH, expand=1)
+        self.canvas.bind('<Key>', self.keyPressed)
+        self.canvas.bind('<1>', lambda x: 'break')
+        self.canvas.bind('<Up>', self.upPressed)
+        self.canvas.bind('<Down>', self.downPressed)
+        self.canvas.bind('<Left>', self.leftPressed)
+        self.canvas.bind('<Right>', self.rightPressed)
+        self.canvas.focus()
+
+        self.ansiParser = ansi.AnsiParser(ansi.ColorText.WHITE, ansi.ColorText.BLACK)
+        self.ansiParser.writeString = self.writeString
+        self.ansiParser.parseCursor = self.parseCursor
+        self.ansiParser.parseErase = self.parseErase
+        #for (a, b) in colorMap.items():
+        #    self.canvas.tag_config(a, foreground=b)
+        #    self.canvas.tag_config('b'+a, background=b)
+        #self.canvas.tag_config('underline', underline=1)
+
+        self.x = 0 
+        self.y = 0
+        self.cursor = self.canvas.create_rectangle(0,0,fontWidth-1,fontHeight-1,fill='green',outline='green')
+
+    def _delete(self, sx, sy, ex, ey):
+        csx = sx*fontWidth + 1
+        csy = sy*fontHeight + 1
+        cex = ex*fontWidth + 3
+        cey = ey*fontHeight + 3
+        items = self.canvas.find_overlapping(csx,csy, cex,cey)
+        for item in items:
+            self.canvas.delete(item)
+
+    def _write(self, ch, fg, bg):
+        if self.x == self.width:
+            self.x = 0
+            self.y+=1
+            if self.y == self.height:
+                [self.canvas.move(x,0,-fontHeight) for x in self.canvas.find_all()]
+                self.y-=1
+        canvasX = self.x*fontWidth + 1
+        canvasY = self.y*fontHeight + 1
+        items = self.canvas.find_overlapping(canvasX, canvasY, canvasX+2, canvasY+2)
+        if items:
+            [self.canvas.delete(item) for item in items]
+        if bg:
+            self.canvas.create_rectangle(canvasX, canvasY, canvasX+fontWidth-1, canvasY+fontHeight-1, fill=bg, outline=bg)
+        self.canvas.create_text(canvasX, canvasY, anchor=Tkinter.NW, font=ttyFont, text=ch, fill=fg)
+        self.x+=1
+            
+    def write(self, data):
+        #print self.x,self.y,repr(data)
+        #if len(data)>5: raw_input()
+        self.ansiParser.parseString(data)
+        self.canvas.delete(self.cursor)
+        canvasX = self.x*fontWidth + 1
+        canvasY = self.y*fontHeight + 1
+        self.cursor = self.canvas.create_rectangle(canvasX,canvasY,canvasX+fontWidth-1,canvasY+fontHeight-1, fill='green', outline='green')
+        self.canvas.lower(self.cursor)
+
+    def writeString(self, i):
+        if not i.display:
+            return
+        fg = colorMap[i.fg]
+        bg = i.bg != 'b' and colorMap[i.bg]
+        for ch in i.text:
+            b = ord(ch)
+            if b == 7: # bell
+                self.bell() 
+            elif b == 8: # BS
+                if self.x:
+                    self.x-=1
+            elif b == 9: # TAB
+                [self._write(' ',fg,bg) for i in range(8)]
+            elif b == 10:
+                if self.y == self.height-1:
+                    self._delete(0,0,self.width,0)
+                    [self.canvas.move(x,0,-fontHeight) for x in self.canvas.find_all()]
+                else:   
+                    self.y+=1
+            elif b == 13:
+                self.x = 0
+            elif 32 <= b < 127:
+                self._write(ch, fg, bg)
+
+    def parseErase(self, erase):
+        if ';' in erase:
+            end = erase[-1]
+            parts = erase[:-1].split(';')
+            [self.parseErase(x+end) for x in parts]
+            return
+        start = 0
+        x,y = self.x, self.y
+        if len(erase) > 1:
+            start = int(erase[:-1])
+        if erase[-1] == 'J':
+            if start == 0: 
+                self._delete(x,y,self.width,self.height)
+            else:
+                self._delete(0,0,self.width,self.height)
+                self.x = 0
+                self.y = 0 
+        elif erase[-1] == 'K':
+            if start == 0:
+                self._delete(x,y,self.width,y)
+            elif start == 1:
+                self._delete(0,y,x,y)
+                self.x = 0
+            else:
+                self._delete(0,y,self.width,y)
+                self.x = 0
+        elif erase[-1] == 'P':
+            self._delete(x,y,x+start,y)
+
+    def parseCursor(self, cursor):
+        #if ';' in cursor and cursor[-1]!='H':
+        #    end = cursor[-1]
+        #    parts = cursor[:-1].split(';')
+        #    [self.parseCursor(x+end) for x in parts]
+        #    return
+        start = 1
+        if len(cursor) > 1 and cursor[-1]!='H':
+            start = int(cursor[:-1])
+        if cursor[-1] == 'C':
+            self.x+=start
+        elif cursor[-1] == 'D':
+            self.x-=start
+        elif cursor[-1]=='d':
+            self.y=start-1
+        elif cursor[-1]=='G':
+            self.x=start-1
+        elif cursor[-1]=='H':
+            if len(cursor)>1:
+                y,x = map(int, cursor[:-1].split(';'))
+                y-=1
+                x-=1
+            else:
+                x,y=0,0
+            self.x = x
+            self.y = y
+
+    def keyPressed(self, event):
+        if self.callback and event.char:
+            self.callback(event.char)
+        return 'break'
+
+    def upPressed(self, event):
+        self.callback('\x1bOA')
+
+    def downPressed(self, event):
+        self.callback('\x1bOB')
+
+    def rightPressed(self, event):
+        self.callback('\x1bOC')
+
+    def leftPressed(self, event):
+        self.callback('\x1bOD')
diff --git a/ThirdParty/Twisted/twisted/conch/unix.py b/ThirdParty/Twisted/twisted/conch/unix.py
new file mode 100644
index 0000000..3a44be0
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/conch/unix.py
@@ -0,0 +1,457 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.cred import portal
+from twisted.python import components, log
+from twisted.internet.error import ProcessExitedAlready
+from zope import interface
+from ssh import session, forwarding, filetransfer
+from ssh.filetransfer import FXF_READ, FXF_WRITE, FXF_APPEND, FXF_CREAT, FXF_TRUNC, FXF_EXCL
+from twisted.conch.ls import lsLine
+
+from avatar import ConchUser
+from error import ConchError
+from interfaces import ISession, ISFTPServer, ISFTPFile
+
+import struct, os, time, socket
+import fcntl, tty
+import pwd, grp
+import pty
+import ttymodes
+
+try:
+    import utmp
+except ImportError:
+    utmp = None
+
+class UnixSSHRealm:
+    interface.implements(portal.IRealm)
+
+    def requestAvatar(self, username, mind, *interfaces):
+        user = UnixConchUser(username)
+        return interfaces[0], user, user.logout
+
+
+class UnixConchUser(ConchUser):
+
+    def __init__(self, username):
+        ConchUser.__init__(self)
+        self.username = username
+        self.pwdData = pwd.getpwnam(self.username)
+        l = [self.pwdData[3]]
+        for groupname, password, gid, userlist in grp.getgrall():
+            if username in userlist:
+                l.append(gid)
+        self.otherGroups = l
+        self.listeners = {}  # dict mapping (interface, port) -> listener
+        self.channelLookup.update(
+                {"session": session.SSHSession,
+                 "direct-tcpip": forwarding.openConnectForwardingClient})
+
+        self.subsystemLookup.update(
+                {"sftp": filetransfer.FileTransferServer})
+
+    def getUserGroupId(self):
+        return self.pwdData[2:4]
+
+    def getOtherGroups(self):
+        return self.otherGroups
+
+    def getHomeDir(self):
+        return self.pwdData[5]
+
+    def getShell(self):
+        return self.pwdData[6]
+
+    def global_tcpip_forward(self, data):
+        hostToBind, portToBind = forwarding.unpackGlobal_tcpip_forward(data)
+        from twisted.internet import reactor
+        try: listener = self._runAsUser(
+                            reactor.listenTCP, portToBind, 
+                            forwarding.SSHListenForwardingFactory(self.conn,
+                                (hostToBind, portToBind),
+                                forwarding.SSHListenServerForwardingChannel), 
+                            interface = hostToBind)
+        except:
+            return 0
+        else:
+            self.listeners[(hostToBind, portToBind)] = listener
+            if portToBind == 0:
+                portToBind = listener.getHost()[2] # the port
+                return 1, struct.pack('>L', portToBind)
+            else:
+                return 1
+
+    def global_cancel_tcpip_forward(self, data):
+        hostToBind, portToBind = forwarding.unpackGlobal_tcpip_forward(data)
+        listener = self.listeners.get((hostToBind, portToBind), None)
+        if not listener:
+            return 0
+        del self.listeners[(hostToBind, portToBind)]
+        self._runAsUser(listener.stopListening)
+        return 1
+
+    def logout(self):
+        # remove all listeners
+        for listener in self.listeners.itervalues():
+            self._runAsUser(listener.stopListening)
+        log.msg('avatar %s logging out (%i)' % (self.username, len(self.listeners)))
+
+    def _runAsUser(self, f, *args, **kw):
+        euid = os.geteuid()
+        egid = os.getegid()
+        groups = os.getgroups()
+        uid, gid = self.getUserGroupId()
+        os.setegid(0)
+        os.seteuid(0)
+        os.setgroups(self.getOtherGroups())
+        os.setegid(gid)
+        os.seteuid(uid)
+        try:
+            f = iter(f)
+        except TypeError:
+            f = [(f, args, kw)]
+        try:
+            for i in f:
+                func = i[0]
+                args = len(i)>1 and i[1] or ()
+                kw = len(i)>2 and i[2] or {}
+                r = func(*args, **kw)
+        finally:
+            os.setegid(0)
+            os.seteuid(0)
+            os.setgroups(groups)
+            os.setegid(egid)
+            os.seteuid(euid)
+        return r
+
+class SSHSessionForUnixConchUser:
+
+    interface.implements(ISession)
+
+    def __init__(self, avatar):
+        self.avatar = avatar
+        self. environ = {'PATH':'/bin:/usr/bin:/usr/local/bin'}
+        self.pty = None
+        self.ptyTuple = 0
+
+    def addUTMPEntry(self, loggedIn=1):
+        if not utmp:
+            return
+        ipAddress = self.avatar.conn.transport.transport.getPeer().host
+        packedIp ,= struct.unpack('L', socket.inet_aton(ipAddress))
+        ttyName = self.ptyTuple[2][5:]
+        t = time.time()
+        t1 = int(t)
+        t2 = int((t-t1) * 1e6)
+        entry = utmp.UtmpEntry()
+        entry.ut_type = loggedIn and utmp.USER_PROCESS or utmp.DEAD_PROCESS
+        entry.ut_pid = self.pty.pid
+        entry.ut_line = ttyName
+        entry.ut_id = ttyName[-4:]
+        entry.ut_tv = (t1,t2)
+        if loggedIn:
+            entry.ut_user = self.avatar.username
+            entry.ut_host = socket.gethostbyaddr(ipAddress)[0]
+            entry.ut_addr_v6 = (packedIp, 0, 0, 0)
+        a = utmp.UtmpRecord(utmp.UTMP_FILE)
+        a.pututline(entry)
+        a.endutent()
+        b = utmp.UtmpRecord(utmp.WTMP_FILE)
+        b.pututline(entry)
+        b.endutent()
+                            
+
+    def getPty(self, term, windowSize, modes):
+        self.environ['TERM'] = term
+        self.winSize = windowSize
+        self.modes = modes
+        master, slave = pty.openpty()
+        ttyname = os.ttyname(slave)
+        self.environ['SSH_TTY'] = ttyname 
+        self.ptyTuple = (master, slave, ttyname)
+
+    def openShell(self, proto):
+        from twisted.internet import reactor
+        if not self.ptyTuple: # we didn't get a pty-req
+            log.msg('tried to get shell without pty, failing')
+            raise ConchError("no pty")
+        uid, gid = self.avatar.getUserGroupId()
+        homeDir = self.avatar.getHomeDir()
+        shell = self.avatar.getShell()
+        self.environ['USER'] = self.avatar.username
+        self.environ['HOME'] = homeDir
+        self.environ['SHELL'] = shell
+        shellExec = os.path.basename(shell)
+        peer = self.avatar.conn.transport.transport.getPeer()
+        host = self.avatar.conn.transport.transport.getHost()
+        self.environ['SSH_CLIENT'] = '%s %s %s' % (peer.host, peer.port, host.port)
+        self.getPtyOwnership()
+        self.pty = reactor.spawnProcess(proto, \
+                  shell, ['-%s' % shellExec], self.environ, homeDir, uid, gid,
+                   usePTY = self.ptyTuple)
+        self.addUTMPEntry()
+        fcntl.ioctl(self.pty.fileno(), tty.TIOCSWINSZ, 
+                    struct.pack('4H', *self.winSize))
+        if self.modes:
+            self.setModes()
+        self.oldWrite = proto.transport.write
+        proto.transport.write = self._writeHack
+        self.avatar.conn.transport.transport.setTcpNoDelay(1)
+
+    def execCommand(self, proto, cmd):
+        from twisted.internet import reactor
+        uid, gid = self.avatar.getUserGroupId()
+        homeDir = self.avatar.getHomeDir()
+        shell = self.avatar.getShell() or '/bin/sh'
+        command = (shell, '-c', cmd)
+        peer = self.avatar.conn.transport.transport.getPeer()
+        host = self.avatar.conn.transport.transport.getHost()
+        self.environ['SSH_CLIENT'] = '%s %s %s' % (peer.host, peer.port, host.port)
+        if self.ptyTuple:
+            self.getPtyOwnership()
+        self.pty = reactor.spawnProcess(proto, \
+                shell, command, self.environ, homeDir,
+                uid, gid, usePTY = self.ptyTuple or 0)
+        if self.ptyTuple:
+            self.addUTMPEntry()
+            if self.modes:
+                self.setModes()
+#        else:
+#            tty.setraw(self.pty.pipes[0].fileno(), tty.TCSANOW)
+        self.avatar.conn.transport.transport.setTcpNoDelay(1)
+
+    def getPtyOwnership(self):
+        ttyGid = os.stat(self.ptyTuple[2])[5]
+        uid, gid = self.avatar.getUserGroupId()
+        euid, egid = os.geteuid(), os.getegid()
+        os.setegid(0)
+        os.seteuid(0)
+        try:
+            os.chown(self.ptyTuple[2], uid, ttyGid)
+        finally:
+            os.setegid(egid)
+            os.seteuid(euid)
+        
+    def setModes(self):
+        pty = self.pty
+        attr = tty.tcgetattr(pty.fileno())
+        for mode, modeValue in self.modes:
+            if not ttymodes.TTYMODES.has_key(mode): continue
+            ttyMode = ttymodes.TTYMODES[mode]
+            if len(ttyMode) == 2: # flag
+                flag, ttyAttr = ttyMode
+                if not hasattr(tty, ttyAttr): continue
+                ttyval = getattr(tty, ttyAttr)
+                if modeValue:
+                    attr[flag] = attr[flag]|ttyval
+                else:
+                    attr[flag] = attr[flag]&~ttyval
+            elif ttyMode == 'OSPEED':
+                attr[tty.OSPEED] = getattr(tty, 'B%s'%modeValue)
+            elif ttyMode == 'ISPEED':
+                attr[tty.ISPEED] = getattr(tty, 'B%s'%modeValue)
+            else:
+                if not hasattr(tty, ttyMode): continue
+                ttyval = getattr(tty, ttyMode)
+                attr[tty.CC][ttyval] = chr(modeValue)
+        tty.tcsetattr(pty.fileno(), tty.TCSANOW, attr)
+
+    def eofReceived(self):
+        if self.pty:
+            self.pty.closeStdin()
+
+    def closed(self):
+        if self.ptyTuple and os.path.exists(self.ptyTuple[2]):
+            ttyGID = os.stat(self.ptyTuple[2])[5]
+            os.chown(self.ptyTuple[2], 0, ttyGID)
+        if self.pty:
+            try:
+                self.pty.signalProcess('HUP')
+            except (OSError,ProcessExitedAlready):
+                pass
+            self.pty.loseConnection()
+            self.addUTMPEntry(0)
+        log.msg('shell closed')
+
+    def windowChanged(self, winSize):
+        self.winSize = winSize
+        fcntl.ioctl(self.pty.fileno(), tty.TIOCSWINSZ, 
+                        struct.pack('4H', *self.winSize))
+
+    def _writeHack(self, data):
+        """
+        Hack to send ignore messages when we aren't echoing.
+        """
+        if self.pty is not None:
+            attr = tty.tcgetattr(self.pty.fileno())[3]
+            if not attr & tty.ECHO and attr & tty.ICANON: # no echo
+                self.avatar.conn.transport.sendIgnore('\x00'*(8+len(data)))
+        self.oldWrite(data)
+
+
+class SFTPServerForUnixConchUser:
+
+    interface.implements(ISFTPServer)
+
+    def __init__(self, avatar):
+        self.avatar = avatar
+
+
+    def _setAttrs(self, path, attrs):
+        """
+        NOTE: this function assumes it runs as the logged-in user:
+        i.e. under _runAsUser()
+        """
+        if "uid" in attrs and "gid" in attrs:
+            os.chown(path, attrs["uid"], attrs["gid"])
+        if "permissions" in attrs:
+            os.chmod(path, attrs["permissions"])
+        if "atime" in attrs and "mtime" in attrs:
+            os.utime(path, (attrs["atime"], attrs["mtime"]))
+
+    def _getAttrs(self, s):
+        return {
+            "size" : s.st_size,
+            "uid" : s.st_uid,
+            "gid" : s.st_gid,
+            "permissions" : s.st_mode,
+            "atime" : int(s.st_atime),
+            "mtime" : int(s.st_mtime)
+        }
+
+    def _absPath(self, path):
+        home = self.avatar.getHomeDir()
+        return os.path.abspath(os.path.join(home, path))
+
+    def gotVersion(self, otherVersion, extData):
+        return {}
+
+    def openFile(self, filename, flags, attrs):
+        return UnixSFTPFile(self, self._absPath(filename), flags, attrs)
+
+    def removeFile(self, filename):
+        filename = self._absPath(filename)
+        return self.avatar._runAsUser(os.remove, filename)
+
+    def renameFile(self, oldpath, newpath):
+        oldpath = self._absPath(oldpath)
+        newpath = self._absPath(newpath)
+        return self.avatar._runAsUser(os.rename, oldpath, newpath)
+
+    def makeDirectory(self, path, attrs):
+        path = self._absPath(path)
+        return self.avatar._runAsUser([(os.mkdir, (path,)),
+                                (self._setAttrs, (path, attrs))])
+
+    def removeDirectory(self, path):
+        path = self._absPath(path)
+        self.avatar._runAsUser(os.rmdir, path)
+
+    def openDirectory(self, path):
+        return UnixSFTPDirectory(self, self._absPath(path))
+
+    def getAttrs(self, path, followLinks):
+        path = self._absPath(path)
+        if followLinks:
+            s = self.avatar._runAsUser(os.stat, path)
+        else:
+            s = self.avatar._runAsUser(os.lstat, path)
+        return self._getAttrs(s)
+
+    def setAttrs(self, path, attrs):
+        path = self._absPath(path)
+        self.avatar._runAsUser(self._setAttrs, path, attrs)
+
+    def readLink(self, path):
+        path = self._absPath(path)
+        return self.avatar._runAsUser(os.readlink, path)
+
+    def makeLink(self, linkPath, targetPath):
+        linkPath = self._absPath(linkPath)
+        targetPath = self._absPath(targetPath)
+        return self.avatar._runAsUser(os.symlink, targetPath, linkPath)
+
+    def realPath(self, path):
+        return os.path.realpath(self._absPath(path))
+
+    def extendedRequest(self, extName, extData):
+        raise NotImplementedError
+
+class UnixSFTPFile:
+
+    interface.implements(ISFTPFile)
+
+    def __init__(self, server, filename, flags, attrs):
+        self.server = server
+        openFlags = 0
+        if flags & FXF_READ == FXF_READ and flags & FXF_WRITE == 0:
+            openFlags = os.O_RDONLY
+        if flags & FXF_WRITE == FXF_WRITE and flags & FXF_READ == 0:
+            openFlags = os.O_WRONLY
+        if flags & FXF_WRITE == FXF_WRITE and flags & FXF_READ == FXF_READ:
+            openFlags = os.O_RDWR
+        if flags & FXF_APPEND == FXF_APPEND:
+            openFlags |= os.O_APPEND
+        if flags & FXF_CREAT == FXF_CREAT:
+            openFlags |= os.O_CREAT
+        if flags & FXF_TRUNC == FXF_TRUNC:
+            openFlags |= os.O_TRUNC
+        if flags & FXF_EXCL == FXF_EXCL:
+            openFlags |= os.O_EXCL
+        if "permissions" in attrs:
+            mode = attrs["permissions"]
+            del attrs["permissions"]
+        else:
+            mode = 0777
+        fd = server.avatar._runAsUser(os.open, filename, openFlags, mode)
+        if attrs:
+            server.avatar._runAsUser(server._setAttrs, filename, attrs)
+        self.fd = fd
+
+    def close(self):
+        return self.server.avatar._runAsUser(os.close, self.fd)
+
+    def readChunk(self, offset, length):
+        return self.server.avatar._runAsUser([ (os.lseek, (self.fd, offset, 0)),
+                                               (os.read, (self.fd, length)) ])
+
+    def writeChunk(self, offset, data):
+        return self.server.avatar._runAsUser([(os.lseek, (self.fd, offset, 0)),
+                                       (os.write, (self.fd, data))])
+
+    def getAttrs(self):
+        s = self.server.avatar._runAsUser(os.fstat, self.fd)
+        return self.server._getAttrs(s)
+
+    def setAttrs(self, attrs):
+        raise NotImplementedError
+
+
+class UnixSFTPDirectory:
+
+    def __init__(self, server, directory):
+        self.server = server
+        self.files = server.avatar._runAsUser(os.listdir, directory)
+        self.dir = directory
+
+    def __iter__(self):
+        return self
+
+    def next(self):
+        try:
+            f = self.files.pop(0)
+        except IndexError:
+            raise StopIteration
+        else:
+            s = self.server.avatar._runAsUser(os.lstat, os.path.join(self.dir, f))
+            longname = lsLine(f, s)
+            attrs = self.server._getAttrs(s)
+            return (f, longname, attrs)
+
+    def close(self):
+        self.files = []
+
+
+components.registerAdapter(SFTPServerForUnixConchUser, UnixConchUser, filetransfer.ISFTPServer)
+components.registerAdapter(SSHSessionForUnixConchUser, UnixConchUser, session.ISession)
diff --git a/ThirdParty/Twisted/twisted/copyright.py b/ThirdParty/Twisted/twisted/copyright.py
new file mode 100644
index 0000000..e449b57
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/copyright.py
@@ -0,0 +1,41 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Copyright information for Twisted.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted import __version__ as version, version as longversion
+
+longversion = str(longversion)
+
+copyright="""\
+Copyright (c) 2001-2012 Twisted Matrix Laboratories.
+See LICENSE for details."""
+
+disclaimer='''
+Twisted, the Framework of Your Internet
+%s
+
+Permission is hereby granted, free of charge, to any person obtaining
+a copy of this software and associated documentation files (the
+"Software"), to deal in the Software without restriction, including
+without limitation the rights to use, copy, modify, merge, publish,
+distribute, sublicense, and/or sell copies of the Software, and to
+permit persons to whom the Software is furnished to do so, subject to
+the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
+LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
+OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
+WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+''' % (copyright,)
diff --git a/ThirdParty/Twisted/twisted/cred/__init__.py b/ThirdParty/Twisted/twisted/cred/__init__.py
new file mode 100644
index 0000000..06e287f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/cred/__init__.py
@@ -0,0 +1,13 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Twisted Cred
+
+Support for verifying credentials, and providing services to users based on
+those credentials.
+
+(This package was previously known as the module twisted.internet.passport.)
+"""
diff --git a/ThirdParty/Twisted/twisted/cred/_digest.py b/ThirdParty/Twisted/twisted/cred/_digest.py
new file mode 100644
index 0000000..4640a1d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/cred/_digest.py
@@ -0,0 +1,129 @@
+# -*- test-case-name: twisted.test.test_digestauth -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Calculations for HTTP Digest authentication.
+
+ at see: U{http://www.faqs.org/rfcs/rfc2617.html}
+"""
+
+from twisted.python.hashlib import md5, sha1
+
+
+
+# The digest math
+
+algorithms = {
+    'md5': md5,
+
+    # md5-sess is more complicated than just another algorithm.  It requires
+    # H(A1) state to be remembered from the first WWW-Authenticate challenge
+    # issued and re-used to process any Authorization header in response to
+    # that WWW-Authenticate challenge.  It is *not* correct to simply
+    # recalculate H(A1) each time an Authorization header is received.  Read
+    # RFC 2617, section 3.2.2.2 and do not try to make DigestCredentialFactory
+    # support this unless you completely understand it. -exarkun
+    'md5-sess': md5,
+
+    'sha': sha1,
+}
+
+# DigestCalcHA1
+def calcHA1(pszAlg, pszUserName, pszRealm, pszPassword, pszNonce, pszCNonce,
+            preHA1=None):
+    """
+    Compute H(A1) from RFC 2617.
+
+    @param pszAlg: The name of the algorithm to use to calculate the digest.
+        Currently supported are md5, md5-sess, and sha.
+    @param pszUserName: The username
+    @param pszRealm: The realm
+    @param pszPassword: The password
+    @param pszNonce: The nonce
+    @param pszCNonce: The cnonce
+
+    @param preHA1: If available this is a str containing a previously
+       calculated H(A1) as a hex string.  If this is given then the values for
+       pszUserName, pszRealm, and pszPassword must be C{None} and are ignored.
+    """
+
+    if (preHA1 and (pszUserName or pszRealm or pszPassword)):
+        raise TypeError(("preHA1 is incompatible with the pszUserName, "
+                         "pszRealm, and pszPassword arguments"))
+
+    if preHA1 is None:
+        # We need to calculate the HA1 from the username:realm:password
+        m = algorithms[pszAlg]()
+        m.update(pszUserName)
+        m.update(":")
+        m.update(pszRealm)
+        m.update(":")
+        m.update(pszPassword)
+        HA1 = m.digest()
+    else:
+        # We were given a username:realm:password
+        HA1 = preHA1.decode('hex')
+
+    if pszAlg == "md5-sess":
+        m = algorithms[pszAlg]()
+        m.update(HA1)
+        m.update(":")
+        m.update(pszNonce)
+        m.update(":")
+        m.update(pszCNonce)
+        HA1 = m.digest()
+
+    return HA1.encode('hex')
+
+
+def calcHA2(algo, pszMethod, pszDigestUri, pszQop, pszHEntity):
+    """
+    Compute H(A2) from RFC 2617.
+
+    @param pszAlg: The name of the algorithm to use to calculate the digest.
+        Currently supported are md5, md5-sess, and sha.
+    @param pszMethod: The request method.
+    @param pszDigestUri: The request URI.
+    @param pszQop: The Quality-of-Protection value.
+    @param pszHEntity: The hash of the entity body or C{None} if C{pszQop} is
+        not C{'auth-int'}.
+    @return: The hash of the A2 value for the calculation of the response
+        digest.
+    """
+    m = algorithms[algo]()
+    m.update(pszMethod)
+    m.update(":")
+    m.update(pszDigestUri)
+    if pszQop == "auth-int":
+        m.update(":")
+        m.update(pszHEntity)
+    return m.digest().encode('hex')
+
+
+def calcResponse(HA1, HA2, algo, pszNonce, pszNonceCount, pszCNonce, pszQop):
+    """
+    Compute the digest for the given parameters.
+
+    @param HA1: The H(A1) value, as computed by L{calcHA1}.
+    @param HA2: The H(A2) value, as computed by L{calcHA2}.
+    @param pszNonce: The challenge nonce.
+    @param pszNonceCount: The (client) nonce count value for this response.
+    @param pszCNonce: The client nonce.
+    @param pszQop: The Quality-of-Protection value.
+    """
+    m = algorithms[algo]()
+    m.update(HA1)
+    m.update(":")
+    m.update(pszNonce)
+    m.update(":")
+    if pszNonceCount and pszCNonce:
+        m.update(pszNonceCount)
+        m.update(":")
+        m.update(pszCNonce)
+        m.update(":")
+        m.update(pszQop)
+        m.update(":")
+    m.update(HA2)
+    respHash = m.digest().encode('hex')
+    return respHash
diff --git a/ThirdParty/Twisted/twisted/cred/checkers.py b/ThirdParty/Twisted/twisted/cred/checkers.py
new file mode 100644
index 0000000..523a94d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/cred/checkers.py
@@ -0,0 +1,268 @@
+# -*- test-case-name: twisted.test.test_newcred -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import os
+
+from zope.interface import implements, Interface, Attribute
+
+from twisted.internet import defer
+from twisted.python import failure, log
+from twisted.cred import error, credentials
+
+
+
+class ICredentialsChecker(Interface):
+    """
+    An object that can check sub-interfaces of ICredentials.
+    """
+
+    credentialInterfaces = Attribute(
+        'A list of sub-interfaces of ICredentials which specifies which I may check.')
+
+
+    def requestAvatarId(credentials):
+        """
+        @param credentials: something which implements one of the interfaces in
+        self.credentialInterfaces.
+
+        @return: a Deferred which will fire a string which identifies an
+        avatar, an empty tuple to specify an authenticated anonymous user
+        (provided as checkers.ANONYMOUS) or fire a Failure(UnauthorizedLogin).
+        Alternatively, return the result itself.
+
+        @see: L{twisted.cred.credentials}
+        """
+
+
+
+# A note on anonymity - We do not want None as the value for anonymous
+# because it is too easy to accidentally return it.  We do not want the
+# empty string, because it is too easy to mistype a password file.  For
+# example, an .htpasswd file may contain the lines: ['hello:asdf',
+# 'world:asdf', 'goodbye', ':world'].  This misconfiguration will have an
+# ill effect in any case, but accidentally granting anonymous access is a
+# worse failure mode than simply granting access to an untypeable
+# username.  We do not want an instance of 'object', because that would
+# create potential problems with persistence.
+
+ANONYMOUS = ()
+
+
+class AllowAnonymousAccess:
+    implements(ICredentialsChecker)
+    credentialInterfaces = credentials.IAnonymous,
+
+    def requestAvatarId(self, credentials):
+        return defer.succeed(ANONYMOUS)
+
+
+class InMemoryUsernamePasswordDatabaseDontUse:
+    """
+    An extremely simple credentials checker.
+
+    This is only of use in one-off test programs or examples which don't
+    want to focus too much on how credentials are verified.
+
+    You really don't want to use this for anything else.  It is, at best, a
+    toy.  If you need a simple credentials checker for a real application,
+    see L{FilePasswordDB}.
+    """
+
+    implements(ICredentialsChecker)
+
+    credentialInterfaces = (credentials.IUsernamePassword,
+                            credentials.IUsernameHashedPassword)
+
+    def __init__(self, **users):
+        self.users = users
+
+    def addUser(self, username, password):
+        self.users[username] = password
+
+    def _cbPasswordMatch(self, matched, username):
+        if matched:
+            return username
+        else:
+            return failure.Failure(error.UnauthorizedLogin())
+
+    def requestAvatarId(self, credentials):
+        if credentials.username in self.users:
+            return defer.maybeDeferred(
+                credentials.checkPassword,
+                self.users[credentials.username]).addCallback(
+                self._cbPasswordMatch, str(credentials.username))
+        else:
+            return defer.fail(error.UnauthorizedLogin())
+
+
+class FilePasswordDB:
+    """A file-based, text-based username/password database.
+
+    Records in the datafile for this class are delimited by a particular
+    string.  The username appears in a fixed field of the columns delimited
+    by this string, as does the password.  Both fields are specifiable.  If
+    the passwords are not stored plaintext, a hash function must be supplied
+    to convert plaintext passwords to the form stored on disk and this
+    CredentialsChecker will only be able to check IUsernamePassword
+    credentials.  If the passwords are stored plaintext,
+    IUsernameHashedPassword credentials will be checkable as well.
+    """
+
+    implements(ICredentialsChecker)
+
+    cache = False
+    _credCache = None
+    _cacheTimestamp = 0
+
+    def __init__(self, filename, delim=':', usernameField=0, passwordField=1,
+                 caseSensitive=True, hash=None, cache=False):
+        """
+        @type filename: C{str}
+        @param filename: The name of the file from which to read username and
+        password information.
+
+        @type delim: C{str}
+        @param delim: The field delimiter used in the file.
+
+        @type usernameField: C{int}
+        @param usernameField: The index of the username after splitting a
+        line on the delimiter.
+
+        @type passwordField: C{int}
+        @param passwordField: The index of the password after splitting a
+        line on the delimiter.
+
+        @type caseSensitive: C{bool}
+        @param caseSensitive: If true, consider the case of the username when
+        performing a lookup.  Ignore it otherwise.
+
+        @type hash: Three-argument callable or C{None}
+        @param hash: A function used to transform the plaintext password
+        received over the network to a format suitable for comparison
+        against the version stored on disk.  The arguments to the callable
+        are the username, the network-supplied password, and the in-file
+        version of the password.  If the return value compares equal to the
+        version stored on disk, the credentials are accepted.
+
+        @type cache: C{bool}
+        @param cache: If true, maintain an in-memory cache of the
+        contents of the password file.  On lookups, the mtime of the
+        file will be checked, and the file will only be re-parsed if
+        the mtime is newer than when the cache was generated.
+        """
+        self.filename = filename
+        self.delim = delim
+        self.ufield = usernameField
+        self.pfield = passwordField
+        self.caseSensitive = caseSensitive
+        self.hash = hash
+        self.cache = cache
+
+        if self.hash is None:
+            # The passwords are stored plaintext.  We can support both
+            # plaintext and hashed passwords received over the network.
+            self.credentialInterfaces = (
+                credentials.IUsernamePassword,
+                credentials.IUsernameHashedPassword
+            )
+        else:
+            # The passwords are hashed on disk.  We can support only
+            # plaintext passwords received over the network.
+            self.credentialInterfaces = (
+                credentials.IUsernamePassword,
+            )
+
+
+    def __getstate__(self):
+        d = dict(vars(self))
+        for k in '_credCache', '_cacheTimestamp':
+            try:
+                del d[k]
+            except KeyError:
+                pass
+        return d
+
+
+    def _cbPasswordMatch(self, matched, username):
+        if matched:
+            return username
+        else:
+            return failure.Failure(error.UnauthorizedLogin())
+
+
+    def _loadCredentials(self):
+        try:
+            f = file(self.filename)
+        except:
+            log.err()
+            raise error.UnauthorizedLogin()
+        else:
+            for line in f:
+                line = line.rstrip()
+                parts = line.split(self.delim)
+
+                if self.ufield >= len(parts) or self.pfield >= len(parts):
+                    continue
+                if self.caseSensitive:
+                    yield parts[self.ufield], parts[self.pfield]
+                else:
+                    yield parts[self.ufield].lower(), parts[self.pfield]
+
+
+    def getUser(self, username):
+        if not self.caseSensitive:
+            username = username.lower()
+
+        if self.cache:
+            if self._credCache is None or os.path.getmtime(self.filename) > self._cacheTimestamp:
+                self._cacheTimestamp = os.path.getmtime(self.filename)
+                self._credCache = dict(self._loadCredentials())
+            return username, self._credCache[username]
+        else:
+            for u, p in self._loadCredentials():
+                if u == username:
+                    return u, p
+            raise KeyError(username)
+
+
+    def requestAvatarId(self, c):
+        try:
+            u, p = self.getUser(c.username)
+        except KeyError:
+            return defer.fail(error.UnauthorizedLogin())
+        else:
+            up = credentials.IUsernamePassword(c, None)
+            if self.hash:
+                if up is not None:
+                    h = self.hash(up.username, up.password, p)
+                    if h == p:
+                        return defer.succeed(u)
+                return defer.fail(error.UnauthorizedLogin())
+            else:
+                return defer.maybeDeferred(c.checkPassword, p
+                    ).addCallback(self._cbPasswordMatch, u)
+
+
+
+class PluggableAuthenticationModulesChecker:
+    implements(ICredentialsChecker)
+    credentialInterfaces = credentials.IPluggableAuthenticationModules,
+    service = 'Twisted'
+
+    def requestAvatarId(self, credentials):
+        try:
+            from twisted.cred import pamauth
+        except ImportError: # PyPAM is missing
+            return defer.fail(error.UnauthorizedLogin())
+        else:
+            d = pamauth.pamAuthenticate(self.service, credentials.username,
+                                        credentials.pamConversion)
+            d.addCallback(lambda x: credentials.username)
+            return d
+
+
+
+# For backwards compatibility
+# Allow access as the old name.
+OnDiskUsernamePasswordDatabase = FilePasswordDB
diff --git a/ThirdParty/Twisted/twisted/cred/credentials.py b/ThirdParty/Twisted/twisted/cred/credentials.py
new file mode 100644
index 0000000..63fb44e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/cred/credentials.py
@@ -0,0 +1,483 @@
+# -*- test-case-name: twisted.test.test_newcred-*-
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+from zope.interface import implements, Interface
+
+import hmac, time, random
+from twisted.python.hashlib import md5
+from twisted.python.randbytes import secureRandom
+from twisted.cred._digest import calcResponse, calcHA1, calcHA2
+from twisted.cred import error
+
+class ICredentials(Interface):
+    """
+    I check credentials.
+
+    Implementors _must_ specify which sub-interfaces of ICredentials
+    to which it conforms, using zope.interface.implements().
+    """
+
+
+
+class IUsernameDigestHash(ICredentials):
+    """
+    This credential is used when a CredentialChecker has access to the hash
+    of the username:realm:password as in an Apache .htdigest file.
+    """
+    def checkHash(digestHash):
+        """
+        @param digestHash: The hashed username:realm:password to check against.
+
+        @return: C{True} if the credentials represented by this object match
+            the given hash, C{False} if they do not, or a L{Deferred} which
+            will be called back with one of these values.
+        """
+
+
+
+class IUsernameHashedPassword(ICredentials):
+    """
+    I encapsulate a username and a hashed password.
+
+    This credential is used when a hashed password is received from the
+    party requesting authentication.  CredentialCheckers which check this
+    kind of credential must store the passwords in plaintext (or as
+    password-equivalent hashes) form so that they can be hashed in a manner
+    appropriate for the particular credentials class.
+
+    @type username: C{str}
+    @ivar username: The username associated with these credentials.
+    """
+
+    def checkPassword(password):
+        """
+        Validate these credentials against the correct password.
+
+        @type password: C{str}
+        @param password: The correct, plaintext password against which to
+        check.
+
+        @rtype: C{bool} or L{Deferred}
+        @return: C{True} if the credentials represented by this object match the
+            given password, C{False} if they do not, or a L{Deferred} which will
+            be called back with one of these values.
+        """
+
+
+
+class IUsernamePassword(ICredentials):
+    """
+    I encapsulate a username and a plaintext password.
+
+    This encapsulates the case where the password received over the network
+    has been hashed with the identity function (That is, not at all).  The
+    CredentialsChecker may store the password in whatever format it desires,
+    it need only transform the stored password in a similar way before
+    performing the comparison.
+
+    @type username: C{str}
+    @ivar username: The username associated with these credentials.
+
+    @type password: C{str}
+    @ivar password: The password associated with these credentials.
+    """
+
+    def checkPassword(password):
+        """
+        Validate these credentials against the correct password.
+
+        @type password: C{str}
+        @param password: The correct, plaintext password against which to
+        check.
+
+        @rtype: C{bool} or L{Deferred}
+        @return: C{True} if the credentials represented by this object match the
+            given password, C{False} if they do not, or a L{Deferred} which will
+            be called back with one of these values.
+        """
+
+
+
+class IAnonymous(ICredentials):
+    """
+    I am an explicitly anonymous request for access.
+    """
+
+
+
+class DigestedCredentials(object):
+    """
+    Yet Another Simple HTTP Digest authentication scheme.
+    """
+    implements(IUsernameHashedPassword, IUsernameDigestHash)
+
+    def __init__(self, username, method, realm, fields):
+        self.username = username
+        self.method = method
+        self.realm = realm
+        self.fields = fields
+
+
+    def checkPassword(self, password):
+        """
+        Verify that the credentials represented by this object agree with the
+        given plaintext C{password} by hashing C{password} in the same way the
+        response hash represented by this object was generated and comparing
+        the results.
+        """
+        response = self.fields.get('response')
+        uri = self.fields.get('uri')
+        nonce = self.fields.get('nonce')
+        cnonce = self.fields.get('cnonce')
+        nc = self.fields.get('nc')
+        algo = self.fields.get('algorithm', 'md5').lower()
+        qop = self.fields.get('qop', 'auth')
+
+        expected = calcResponse(
+            calcHA1(algo, self.username, self.realm, password, nonce, cnonce),
+            calcHA2(algo, self.method, uri, qop, None),
+            algo, nonce, nc, cnonce, qop)
+
+        return expected == response
+
+
+    def checkHash(self, digestHash):
+        """
+        Verify that the credentials represented by this object agree with the
+        credentials represented by the I{H(A1)} given in C{digestHash}.
+
+        @param digestHash: A precomputed H(A1) value based on the username,
+            realm, and password associate with this credentials object.
+        """
+        response = self.fields.get('response')
+        uri = self.fields.get('uri')
+        nonce = self.fields.get('nonce')
+        cnonce = self.fields.get('cnonce')
+        nc = self.fields.get('nc')
+        algo = self.fields.get('algorithm', 'md5').lower()
+        qop = self.fields.get('qop', 'auth')
+
+        expected = calcResponse(
+            calcHA1(algo, None, None, None, nonce, cnonce, preHA1=digestHash),
+            calcHA2(algo, self.method, uri, qop, None),
+            algo, nonce, nc, cnonce, qop)
+
+        return expected == response
+
+
+
+class DigestCredentialFactory(object):
+    """
+    Support for RFC2617 HTTP Digest Authentication
+
+    @cvar CHALLENGE_LIFETIME_SECS: The number of seconds for which an
+        opaque should be valid.
+
+    @type privateKey: C{str}
+    @ivar privateKey: A random string used for generating the secure opaque.
+
+    @type algorithm: C{str}
+    @param algorithm: Case insensitive string specifying the hash algorithm to
+        use.  Must be either C{'md5'} or C{'sha'}.  C{'md5-sess'} is B{not}
+        supported.
+
+    @type authenticationRealm: C{str}
+    @param authenticationRealm: case sensitive string that specifies the realm
+        portion of the challenge
+    """
+
+    CHALLENGE_LIFETIME_SECS = 15 * 60    # 15 minutes
+
+    scheme = "digest"
+
+    def __init__(self, algorithm, authenticationRealm):
+        self.algorithm = algorithm
+        self.authenticationRealm = authenticationRealm
+        self.privateKey = secureRandom(12)
+
+
+    def getChallenge(self, address):
+        """
+        Generate the challenge for use in the WWW-Authenticate header.
+
+        @param address: The client address to which this challenge is being
+        sent.
+
+        @return: The C{dict} that can be used to generate a WWW-Authenticate
+            header.
+        """
+        c = self._generateNonce()
+        o = self._generateOpaque(c, address)
+
+        return {'nonce': c,
+                'opaque': o,
+                'qop': 'auth',
+                'algorithm': self.algorithm,
+                'realm': self.authenticationRealm}
+
+
+    def _generateNonce(self):
+        """
+        Create a random value suitable for use as the nonce parameter of a
+        WWW-Authenticate challenge.
+
+        @rtype: C{str}
+        """
+        return secureRandom(12).encode('hex')
+
+
+    def _getTime(self):
+        """
+        Parameterize the time based seed used in C{_generateOpaque}
+        so we can deterministically unittest it's behavior.
+        """
+        return time.time()
+
+
+    def _generateOpaque(self, nonce, clientip):
+        """
+        Generate an opaque to be returned to the client.  This is a unique
+        string that can be returned to us and verified.
+        """
+        # Now, what we do is encode the nonce, client ip and a timestamp in the
+        # opaque value with a suitable digest.
+        now = str(int(self._getTime()))
+        if clientip is None:
+            clientip = ''
+        key = "%s,%s,%s" % (nonce, clientip, now)
+        digest = md5(key + self.privateKey).hexdigest()
+        ekey = key.encode('base64')
+        return "%s-%s" % (digest, ekey.replace('\n', ''))
+
+
+    def _verifyOpaque(self, opaque, nonce, clientip):
+        """
+        Given the opaque and nonce from the request, as well as the client IP
+        that made the request, verify that the opaque was generated by us.
+        And that it's not too old.
+
+        @param opaque: The opaque value from the Digest response
+        @param nonce: The nonce value from the Digest response
+        @param clientip: The remote IP address of the client making the request
+            or C{None} if the request was submitted over a channel where this
+            does not make sense.
+
+        @return: C{True} if the opaque was successfully verified.
+
+        @raise error.LoginFailed: if C{opaque} could not be parsed or
+            contained the wrong values.
+        """
+        # First split the digest from the key
+        opaqueParts = opaque.split('-')
+        if len(opaqueParts) != 2:
+            raise error.LoginFailed('Invalid response, invalid opaque value')
+
+        if clientip is None:
+            clientip = ''
+
+        # Verify the key
+        key = opaqueParts[1].decode('base64')
+        keyParts = key.split(',')
+
+        if len(keyParts) != 3:
+            raise error.LoginFailed('Invalid response, invalid opaque value')
+
+        if keyParts[0] != nonce:
+            raise error.LoginFailed(
+                'Invalid response, incompatible opaque/nonce values')
+
+        if keyParts[1] != clientip:
+            raise error.LoginFailed(
+                'Invalid response, incompatible opaque/client values')
+
+        try:
+            when = int(keyParts[2])
+        except ValueError:
+            raise error.LoginFailed(
+                'Invalid response, invalid opaque/time values')
+
+        if (int(self._getTime()) - when >
+            DigestCredentialFactory.CHALLENGE_LIFETIME_SECS):
+
+            raise error.LoginFailed(
+                'Invalid response, incompatible opaque/nonce too old')
+
+        # Verify the digest
+        digest = md5(key + self.privateKey).hexdigest()
+        if digest != opaqueParts[0]:
+            raise error.LoginFailed('Invalid response, invalid opaque value')
+
+        return True
+
+
+    def decode(self, response, method, host):
+        """
+        Decode the given response and attempt to generate a
+        L{DigestedCredentials} from it.
+
+        @type response: C{str}
+        @param response: A string of comma seperated key=value pairs
+
+        @type method: C{str}
+        @param method: The action requested to which this response is addressed
+        (GET, POST, INVITE, OPTIONS, etc).
+
+        @type host: C{str}
+        @param host: The address the request was sent from.
+
+        @raise error.LoginFailed: If the response does not contain a username,
+            a nonce, an opaque, or if the opaque is invalid.
+
+        @return: L{DigestedCredentials}
+        """
+        def unq(s):
+            if s[0] == s[-1] == '"':
+                return s[1:-1]
+            return s
+        response = ' '.join(response.splitlines())
+        parts = response.split(',')
+
+        auth = {}
+
+        for (k, v) in [p.split('=', 1) for p in parts]:
+            auth[k.strip()] = unq(v.strip())
+
+        username = auth.get('username')
+        if not username:
+            raise error.LoginFailed('Invalid response, no username given.')
+
+        if 'opaque' not in auth:
+            raise error.LoginFailed('Invalid response, no opaque given.')
+
+        if 'nonce' not in auth:
+            raise error.LoginFailed('Invalid response, no nonce given.')
+
+        # Now verify the nonce/opaque values for this client
+        if self._verifyOpaque(auth.get('opaque'), auth.get('nonce'), host):
+            return DigestedCredentials(username,
+                                       method,
+                                       self.authenticationRealm,
+                                       auth)
+
+
+
+class CramMD5Credentials:
+    implements(IUsernameHashedPassword)
+
+    challenge = ''
+    response = ''
+
+    def __init__(self, host=None):
+        self.host = host
+
+    def getChallenge(self):
+        if self.challenge:
+            return self.challenge
+        # The data encoded in the first ready response contains an
+        # presumptively arbitrary string of random digits, a timestamp, and
+        # the fully-qualified primary host name of the server.  The syntax of
+        # the unencoded form must correspond to that of an RFC 822 'msg-id'
+        # [RFC822] as described in [POP3].
+        #   -- RFC 2195
+        r = random.randrange(0x7fffffff)
+        t = time.time()
+        self.challenge = '<%d.%d@%s>' % (r, t, self.host)
+        return self.challenge
+
+    def setResponse(self, response):
+        self.username, self.response = response.split(None, 1)
+
+    def moreChallenges(self):
+        return False
+
+    def checkPassword(self, password):
+        verify = hmac.HMAC(password, self.challenge).hexdigest()
+        return verify == self.response
+
+
+class UsernameHashedPassword:
+    implements(IUsernameHashedPassword)
+
+    def __init__(self, username, hashed):
+        self.username = username
+        self.hashed = hashed
+
+    def checkPassword(self, password):
+        return self.hashed == password
+
+
+class UsernamePassword:
+    implements(IUsernamePassword)
+
+    def __init__(self, username, password):
+        self.username = username
+        self.password = password
+
+    def checkPassword(self, password):
+        return self.password == password
+
+
+class Anonymous:
+    implements(IAnonymous)
+
+
+
+class ISSHPrivateKey(ICredentials):
+    """
+    L{ISSHPrivateKey} credentials encapsulate an SSH public key to be checked
+    against a user's private key.
+
+    @ivar username: The username associated with these credentials.
+    @type username: C{str}
+
+    @ivar algName: The algorithm name for the blob.
+    @type algName: C{str}
+
+    @ivar blob: The public key blob as sent by the client.
+    @type blob: C{str}
+
+    @ivar sigData: The data the signature was made from.
+    @type sigData: C{str}
+
+    @ivar signature: The signed data.  This is checked to verify that the user
+        owns the private key.
+    @type signature: C{str} or C{NoneType}
+    """
+
+
+
+class SSHPrivateKey:
+    implements(ISSHPrivateKey)
+    def __init__(self, username, algName, blob, sigData, signature):
+        self.username = username
+        self.algName = algName
+        self.blob = blob
+        self.sigData = sigData
+        self.signature = signature
+
+
+class IPluggableAuthenticationModules(ICredentials):
+    """I encapsulate the authentication of a user via PAM (Pluggable
+    Authentication Modules.  I use PyPAM (available from
+    http://www.tummy.com/Software/PyPam/index.html).
+
+    @ivar username: The username for the user being logged in.
+
+    @ivar pamConversion: A function that is called with a list of tuples
+    (message, messageType).  See the PAM documentation
+    for the meaning of messageType.  The function
+    returns a Deferred which will fire with a list
+    of (response, 0), one for each message.  The 0 is
+    currently unused, but is required by the PAM library.
+    """
+
+class PluggableAuthenticationModules:
+    implements(IPluggableAuthenticationModules)
+
+    def __init__(self, username, pamConversion):
+        self.username = username
+        self.pamConversion = pamConversion
+
diff --git a/ThirdParty/Twisted/twisted/cred/error.py b/ThirdParty/Twisted/twisted/cred/error.py
new file mode 100644
index 0000000..cce682b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/cred/error.py
@@ -0,0 +1,41 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""Cred errors."""
+
+class Unauthorized(Exception):
+    """Standard unauthorized error."""
+
+
+
+class LoginFailed(Exception):
+    """
+    The user's request to log in failed for some reason.
+    """
+
+
+
+class UnauthorizedLogin(LoginFailed, Unauthorized):
+    """The user was not authorized to log in.
+    """
+
+
+
+class UnhandledCredentials(LoginFailed):
+    """A type of credentials were passed in with no knowledge of how to check
+    them.  This is a server configuration error - it means that a protocol was
+    connected to a Portal without a CredentialChecker that can check all of its
+    potential authentication strategies.
+    """
+
+
+
+class LoginDenied(LoginFailed):
+    """
+    The realm rejected this login for some reason.
+    
+    Examples of reasons this might be raised include an avatar logging in
+    too frequently, a quota having been fully used, or the overall server
+    load being too high.
+    """
diff --git a/ThirdParty/Twisted/twisted/cred/pamauth.py b/ThirdParty/Twisted/twisted/cred/pamauth.py
new file mode 100644
index 0000000..12357df
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/cred/pamauth.py
@@ -0,0 +1,79 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Support for asynchronously authenticating using PAM.
+"""
+
+
+import PAM
+
+import getpass, threading, os
+
+from twisted.internet import threads, defer
+
+def pamAuthenticateThread(service, user, conv):
+    def _conv(items):
+        from twisted.internet import reactor
+        try:
+            d = conv(items)
+        except:
+            import traceback
+            traceback.print_exc()
+            return
+        ev = threading.Event()
+        def cb(r):
+            ev.r = (1, r)
+            ev.set()
+        def eb(e):
+            ev.r = (0, e)
+            ev.set()
+        reactor.callFromThread(d.addCallbacks, cb, eb)
+        ev.wait()
+        done = ev.r
+        if done[0]:
+            return done[1]
+        else:
+            raise done[1].type, done[1].value
+
+    return callIntoPAM(service, user, _conv)
+
+def callIntoPAM(service, user, conv):
+    """A testing hook.
+    """
+    pam = PAM.pam()
+    pam.start(service)
+    pam.set_item(PAM.PAM_USER, user)
+    pam.set_item(PAM.PAM_CONV, conv)
+    gid = os.getegid()
+    uid = os.geteuid()
+    os.setegid(0)
+    os.seteuid(0)
+    try:
+        pam.authenticate() # these will raise
+        pam.acct_mgmt()
+        return 1
+    finally:
+        os.setegid(gid)
+        os.seteuid(uid)
+
+def defConv(items):
+    resp = []
+    for i in range(len(items)):
+        message, kind = items[i]
+        if kind == 1: # password
+            p = getpass.getpass(message)
+            resp.append((p, 0))
+        elif kind == 2: # text
+            p = raw_input(message)
+            resp.append((p, 0))
+        elif kind in (3,4):
+            print message
+            resp.append(("", 0))
+        else:
+            return defer.fail('foo')
+    d = defer.succeed(resp)
+    return d
+
+def pamAuthenticate(service, user, conv):
+    return threads.deferToThread(pamAuthenticateThread, service, user, conv)
diff --git a/ThirdParty/Twisted/twisted/cred/portal.py b/ThirdParty/Twisted/twisted/cred/portal.py
new file mode 100644
index 0000000..bbb0af8
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/cred/portal.py
@@ -0,0 +1,121 @@
+# -*- test-case-name: twisted.test.test_newcred -*-
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+The point of integration of application and authentication.
+"""
+
+
+from twisted.internet import defer
+from twisted.internet.defer import maybeDeferred
+from twisted.python import failure, reflect
+from twisted.cred import error
+from zope.interface import providedBy, Interface
+
+
+class IRealm(Interface):
+    """
+    The realm connects application-specific objects to the
+    authentication system.
+    """
+    def requestAvatar(avatarId, mind, *interfaces):
+        """
+        Return avatar which provides one of the given interfaces.
+
+        @param avatarId: a string that identifies an avatar, as returned by
+            L{ICredentialsChecker.requestAvatarId<twisted.cred.checkers.ICredentialsChecker.requestAvatarId>}
+            (via a Deferred).  Alternatively, it may be
+            C{twisted.cred.checkers.ANONYMOUS}.
+        @param mind: usually None.  See the description of mind in
+            L{Portal.login}.
+        @param interfaces: the interface(s) the returned avatar should
+            implement, e.g.  C{IMailAccount}.  See the description of
+            L{Portal.login}.
+
+        @returns: a deferred which will fire a tuple of (interface,
+            avatarAspect, logout), or the tuple itself.  The interface will be
+            one of the interfaces passed in the 'interfaces' argument.  The
+            'avatarAspect' will implement that interface.  The 'logout' object
+            is a callable which will detach the mind from the avatar.
+        """
+
+
+class Portal:
+    """
+    A mediator between clients and a realm.
+
+    A portal is associated with one Realm and zero or more credentials checkers.
+    When a login is attempted, the portal finds the appropriate credentials
+    checker for the credentials given, invokes it, and if the credentials are
+    valid, retrieves the appropriate avatar from the Realm.
+
+    This class is not intended to be subclassed.  Customization should be done
+    in the realm object and in the credentials checker objects.
+    """
+    def __init__(self, realm, checkers=()):
+        """
+        Create a Portal to a L{IRealm}.
+        """
+        self.realm = realm
+        self.checkers = {}
+        for checker in checkers:
+            self.registerChecker(checker)
+
+    def listCredentialsInterfaces(self):
+        """
+        Return list of credentials interfaces that can be used to login.
+        """
+        return self.checkers.keys()
+
+    def registerChecker(self, checker, *credentialInterfaces):
+        if not credentialInterfaces:
+            credentialInterfaces = checker.credentialInterfaces
+        for credentialInterface in credentialInterfaces:
+            self.checkers[credentialInterface] = checker
+
+    def login(self, credentials, mind, *interfaces):
+        """
+        @param credentials: an implementor of
+            L{twisted.cred.credentials.ICredentials}
+
+        @param mind: an object which implements a client-side interface for
+            your particular realm.  In many cases, this may be None, so if the
+            word 'mind' confuses you, just ignore it.
+
+        @param interfaces: list of interfaces for the perspective that the mind
+            wishes to attach to. Usually, this will be only one interface, for
+            example IMailAccount. For highly dynamic protocols, however, this
+            may be a list like (IMailAccount, IUserChooser, IServiceInfo).  To
+            expand: if we are speaking to the system over IMAP, any information
+            that will be relayed to the user MUST be returned as an
+            IMailAccount implementor; IMAP clients would not be able to
+            understand anything else. Any information about unusual status
+            would have to be relayed as a single mail message in an
+            otherwise-empty mailbox. However, in a web-based mail system, or a
+            PB-based client, the ``mind'' object inside the web server
+            (implemented with a dynamic page-viewing mechanism such as a
+            Twisted Web Resource) or on the user's client program may be
+            intelligent enough to respond to several ``server''-side
+            interfaces.
+
+        @return: A deferred which will fire a tuple of (interface,
+            avatarAspect, logout).  The interface will be one of the interfaces
+            passed in the 'interfaces' argument.  The 'avatarAspect' will
+            implement that interface. The 'logout' object is a callable which
+            will detach the mind from the avatar. It must be called when the
+            user has conceptually disconnected from the service. Although in
+            some cases this will not be in connectionLost (such as in a
+            web-based session), it will always be at the end of a user's
+            interactive session.
+        """
+        for i in self.checkers:
+            if i.providedBy(credentials):
+                return maybeDeferred(self.checkers[i].requestAvatarId, credentials
+                    ).addCallback(self.realm.requestAvatar, mind, *interfaces
+                    )
+        ifac = providedBy(credentials)
+        return defer.fail(failure.Failure(error.UnhandledCredentials(
+            "No checker for %s" % ', '.join(map(reflect.qual, ifac)))))
+
diff --git a/ThirdParty/Twisted/twisted/cred/strcred.py b/ThirdParty/Twisted/twisted/cred/strcred.py
new file mode 100644
index 0000000..5f99a16
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/cred/strcred.py
@@ -0,0 +1,270 @@
+# -*- test-case-name: twisted.test.test_strcred -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+#
+
+"""
+Support for resolving command-line strings that represent different
+checkers available to cred.
+
+Examples:
+ - passwd:/etc/passwd
+ - memory:admin:asdf:user:lkj
+ - unix
+"""
+
+import sys
+
+from zope.interface import Interface, Attribute
+
+from twisted.plugin import getPlugins
+from twisted.python import usage
+
+
+
+class ICheckerFactory(Interface):
+    """
+    A factory for objects which provide
+    L{twisted.cred.checkers.ICredentialsChecker}.
+
+    It's implemented by twistd plugins creating checkers.
+    """
+
+    authType = Attribute(
+        'A tag that identifies the authentication method.')
+
+
+    authHelp = Attribute(
+        'A detailed (potentially multi-line) description of precisely '
+        'what functionality this CheckerFactory provides.')
+
+
+    argStringFormat = Attribute(
+        'A short (one-line) description of the argument string format.')
+
+
+    credentialInterfaces = Attribute(
+        'A list of credentials interfaces that this factory will support.')
+
+
+    def generateChecker(argstring):
+        """
+        Return an L{ICredentialChecker} provider using the supplied
+        argument string.
+        """
+
+
+
+class StrcredException(Exception):
+    """
+    Base exception class for strcred.
+    """
+
+
+
+class InvalidAuthType(StrcredException):
+    """
+    Raised when a user provides an invalid identifier for the
+    authentication plugin (known as the authType).
+    """
+
+
+
+class InvalidAuthArgumentString(StrcredException):
+    """
+    Raised by an authentication plugin when the argument string
+    provided is formatted incorrectly.
+    """
+
+
+
+class UnsupportedInterfaces(StrcredException):
+    """
+    Raised when an application is given a checker to use that does not
+    provide any of the application's supported credentials interfaces.
+    """
+
+
+
+# This will be used to warn the users whenever they view help for an
+# authType that is not supported by the application.
+notSupportedWarning = ("WARNING: This authType is not supported by "
+                       "this application.")
+
+
+
+def findCheckerFactories():
+    """
+    Find all objects that implement L{ICheckerFactory}.
+    """
+    return getPlugins(ICheckerFactory)
+
+
+
+def findCheckerFactory(authType):
+    """
+    Find the first checker factory that supports the given authType.
+    """
+    for factory in findCheckerFactories():
+        if factory.authType == authType:
+            return factory
+    raise InvalidAuthType(authType)
+
+
+
+def makeChecker(description):
+    """
+    Returns an L{twisted.cred.checkers.ICredentialsChecker} based on the
+    contents of a descriptive string. Similar to
+    L{twisted.application.strports}.
+    """
+    if ':' in description:
+        authType, argstring = description.split(':', 1)
+    else:
+        authType = description
+        argstring = ''
+    return findCheckerFactory(authType).generateChecker(argstring)
+
+
+
+class AuthOptionMixin:
+    """
+    Defines helper methods that can be added on to any
+    L{usage.Options} subclass that needs authentication.
+
+    This mixin implements three new options methods:
+
+    The opt_auth method (--auth) will write two new values to the
+    'self' dictionary: C{credInterfaces} (a dict of lists) and
+    C{credCheckers} (a list).
+
+    The opt_help_auth method (--help-auth) will search for all
+    available checker plugins and list them for the user; it will exit
+    when finished.
+
+    The opt_help_auth_type method (--help-auth-type) will display
+    detailed help for a particular checker plugin.
+
+    @cvar supportedInterfaces: An iterable object that returns
+       credential interfaces which this application is able to support.
+
+    @cvar authOutput: A writeable object to which this options class
+        will send all help-related output. Default: L{sys.stdout}
+    """
+
+    supportedInterfaces = None
+    authOutput = sys.stdout
+
+
+    def supportsInterface(self, interface):
+        """
+        Returns whether a particular credentials interface is supported.
+        """
+        return (self.supportedInterfaces is None
+                or interface in self.supportedInterfaces)
+
+
+    def supportsCheckerFactory(self, factory):
+        """
+        Returns whether a checker factory will provide at least one of
+        the credentials interfaces that we care about.
+        """
+        for interface in factory.credentialInterfaces:
+            if self.supportsInterface(interface):
+                return True
+        return False
+
+
+    def addChecker(self, checker):
+        """
+        Supply a supplied credentials checker to the Options class.
+        """
+        # First figure out which interfaces we're willing to support.
+        supported = []
+        if self.supportedInterfaces is None:
+            supported = checker.credentialInterfaces
+        else:
+            for interface in checker.credentialInterfaces:
+                if self.supportsInterface(interface):
+                    supported.append(interface)
+        if not supported:
+            raise UnsupportedInterfaces(checker.credentialInterfaces)
+        # If we get this far, then we know we can use this checker.
+        if 'credInterfaces' not in self:
+            self['credInterfaces'] = {}
+        if 'credCheckers' not in self:
+            self['credCheckers'] = []
+        self['credCheckers'].append(checker)
+        for interface in supported:
+            self['credInterfaces'].setdefault(interface, []).append(checker)
+
+
+    def opt_auth(self, description):
+        """
+        Specify an authentication method for the server.
+        """
+        try:
+            self.addChecker(makeChecker(description))
+        except UnsupportedInterfaces, e:
+            raise usage.UsageError(
+                'Auth plugin not supported: %s' % e.args[0])
+        except InvalidAuthType, e:
+            raise usage.UsageError(
+                'Auth plugin not recognized: %s' % e.args[0])
+        except Exception, e:
+            raise usage.UsageError('Unexpected error: %s' % e)
+
+
+    def _checkerFactoriesForOptHelpAuth(self):
+        """
+        Return a list of which authTypes will be displayed by --help-auth.
+        This makes it a lot easier to test this module.
+        """
+        for factory in findCheckerFactories():
+            for interface in factory.credentialInterfaces:
+                if self.supportsInterface(interface):
+                    yield factory
+                    break
+
+
+    def opt_help_auth(self):
+        """
+        Show all authentication methods available.
+        """
+        self.authOutput.write("Usage: --auth AuthType[:ArgString]\n")
+        self.authOutput.write("For detailed help: --help-auth-type AuthType\n")
+        self.authOutput.write('\n')
+        # Figure out the right width for our columns
+        firstLength = 0
+        for factory in self._checkerFactoriesForOptHelpAuth():
+            if len(factory.authType) > firstLength:
+                firstLength = len(factory.authType)
+        formatString = '  %%-%is\t%%s\n' % firstLength
+        self.authOutput.write(formatString % ('AuthType', 'ArgString format'))
+        self.authOutput.write(formatString % ('========', '================'))
+        for factory in self._checkerFactoriesForOptHelpAuth():
+            self.authOutput.write(
+                formatString % (factory.authType, factory.argStringFormat))
+        self.authOutput.write('\n')
+        raise SystemExit(0)
+
+
+    def opt_help_auth_type(self, authType):
+        """
+        Show help for a particular authentication type.
+        """
+        try:
+            cf = findCheckerFactory(authType)
+        except InvalidAuthType:
+            raise usage.UsageError("Invalid auth type: %s" % authType)
+        self.authOutput.write("Usage: --auth %s[:ArgString]\n" % authType)
+        self.authOutput.write("ArgString format: %s\n" % cf.argStringFormat)
+        self.authOutput.write('\n')
+        for line in cf.authHelp.strip().splitlines():
+            self.authOutput.write('  %s\n' % line.rstrip())
+        self.authOutput.write('\n')
+        if not self.supportsCheckerFactory(cf):
+            self.authOutput.write('  %s\n' % notSupportedWarning)
+            self.authOutput.write('\n')
+        raise SystemExit(0)
diff --git a/ThirdParty/Twisted/twisted/enterprise/__init__.py b/ThirdParty/Twisted/twisted/enterprise/__init__.py
new file mode 100644
index 0000000..06c6a61
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/enterprise/__init__.py
@@ -0,0 +1,9 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Twisted Enterprise: database support for Twisted services.
+"""
+
+__all__ = ['adbapi']
diff --git a/ThirdParty/Twisted/twisted/enterprise/adbapi.py b/ThirdParty/Twisted/twisted/enterprise/adbapi.py
new file mode 100644
index 0000000..0531d2d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/enterprise/adbapi.py
@@ -0,0 +1,483 @@
+# -*- test-case-name: twisted.test.test_adbapi -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+An asynchronous mapping to U{DB-API 2.0<http://www.python.org/topics/database/DatabaseAPI-2.0.html>}.
+"""
+
+import sys
+
+from twisted.internet import threads
+from twisted.python import reflect, log
+from twisted.python.deprecate import deprecated
+from twisted.python.versions import Version
+
+
+
+class ConnectionLost(Exception):
+    """
+    This exception means that a db connection has been lost.  Client code may
+    try again.
+    """
+
+
+
+class Connection(object):
+    """
+    A wrapper for a DB-API connection instance.
+
+    The wrapper passes almost everything to the wrapped connection and so has
+    the same API. However, the Connection knows about its pool and also
+    handle reconnecting should when the real connection dies.
+    """
+
+    def __init__(self, pool):
+        self._pool = pool
+        self._connection = None
+        self.reconnect()
+
+    def close(self):
+        # The way adbapi works right now means that closing a connection is
+        # a really bad thing  as it leaves a dead connection associated with
+        # a thread in the thread pool.
+        # Really, I think closing a pooled connection should return it to the
+        # pool but that's handled by the runWithConnection method already so,
+        # rather than upsetting anyone by raising an exception, let's ignore
+        # the request
+        pass
+
+    def rollback(self):
+        if not self._pool.reconnect:
+            self._connection.rollback()
+            return
+
+        try:
+            self._connection.rollback()
+            curs = self._connection.cursor()
+            curs.execute(self._pool.good_sql)
+            curs.close()
+            self._connection.commit()
+            return
+        except:
+            log.err(None, "Rollback failed")
+
+        self._pool.disconnect(self._connection)
+
+        if self._pool.noisy:
+            log.msg("Connection lost.")
+
+        raise ConnectionLost()
+
+    def reconnect(self):
+        if self._connection is not None:
+            self._pool.disconnect(self._connection)
+        self._connection = self._pool.connect()
+
+    def __getattr__(self, name):
+        return getattr(self._connection, name)
+
+
+class Transaction:
+    """A lightweight wrapper for a DB-API 'cursor' object.
+
+    Relays attribute access to the DB cursor. That is, you can call
+    execute(), fetchall(), etc., and they will be called on the
+    underlying DB-API cursor object. Attributes will also be
+    retrieved from there.
+    """
+    _cursor = None
+
+    def __init__(self, pool, connection):
+        self._pool = pool
+        self._connection = connection
+        self.reopen()
+
+    def close(self):
+        _cursor = self._cursor
+        self._cursor = None
+        _cursor.close()
+
+    def reopen(self):
+        if self._cursor is not None:
+            self.close()
+
+        try:
+            self._cursor = self._connection.cursor()
+            return
+        except:
+            if not self._pool.reconnect:
+                raise
+            else:
+                log.err(None, "Cursor creation failed")
+
+        if self._pool.noisy:
+            log.msg('Connection lost, reconnecting')
+
+        self.reconnect()
+        self._cursor = self._connection.cursor()
+
+    def reconnect(self):
+        self._connection.reconnect()
+        self._cursor = None
+
+    def __getattr__(self, name):
+        return getattr(self._cursor, name)
+
+
+class ConnectionPool:
+    """
+    Represent a pool of connections to a DB-API 2.0 compliant database.
+
+    @ivar connectionFactory: factory for connections, default to L{Connection}.
+    @type connectionFactory: any callable.
+
+    @ivar transactionFactory: factory for transactions, default to
+        L{Transaction}.
+    @type transactionFactory: any callable
+
+    @ivar shutdownID: C{None} or a handle on the shutdown event trigger
+        which will be used to stop the connection pool workers when the
+        reactor stops.
+
+    @ivar _reactor: The reactor which will be used to schedule startup and
+        shutdown events.
+    @type _reactor: L{IReactorCore} provider
+    """
+
+    CP_ARGS = "min max name noisy openfun reconnect good_sql".split()
+
+    noisy = False # if true, generate informational log messages
+    min = 3 # minimum number of connections in pool
+    max = 5 # maximum number of connections in pool
+    name = None # Name to assign to thread pool for debugging
+    openfun = None # A function to call on new connections
+    reconnect = False # reconnect when connections fail
+    good_sql = 'select 1' # a query which should always succeed
+
+    running = False # true when the pool is operating
+    connectionFactory = Connection
+    transactionFactory = Transaction
+
+    # Initialize this to None so it's available in close() even if start()
+    # never runs.
+    shutdownID = None
+
+    def __init__(self, dbapiName, *connargs, **connkw):
+        """Create a new ConnectionPool.
+
+        Any positional or keyword arguments other than those documented here
+        are passed to the DB-API object when connecting. Use these arguments to
+        pass database names, usernames, passwords, etc.
+
+        @param dbapiName: an import string to use to obtain a DB-API compatible
+                          module (e.g. 'pyPgSQL.PgSQL')
+
+        @param cp_min: the minimum number of connections in pool (default 3)
+
+        @param cp_max: the maximum number of connections in pool (default 5)
+
+        @param cp_noisy: generate informational log messages during operation
+                         (default False)
+
+        @param cp_openfun: a callback invoked after every connect() on the
+                           underlying DB-API object. The callback is passed a
+                           new DB-API connection object.  This callback can
+                           setup per-connection state such as charset,
+                           timezone, etc.
+
+        @param cp_reconnect: detect connections which have failed and reconnect
+                             (default False). Failed connections may result in
+                             ConnectionLost exceptions, which indicate the
+                             query may need to be re-sent.
+
+        @param cp_good_sql: an sql query which should always succeed and change
+                            no state (default 'select 1')
+
+        @param cp_reactor: use this reactor instead of the global reactor
+            (added in Twisted 10.2).
+        @type cp_reactor: L{IReactorCore} provider
+        """
+
+        self.dbapiName = dbapiName
+        self.dbapi = reflect.namedModule(dbapiName)
+
+        if getattr(self.dbapi, 'apilevel', None) != '2.0':
+            log.msg('DB API module not DB API 2.0 compliant.')
+
+        if getattr(self.dbapi, 'threadsafety', 0) < 1:
+            log.msg('DB API module not sufficiently thread-safe.')
+
+        reactor = connkw.pop('cp_reactor', None)
+        if reactor is None:
+            from twisted.internet import reactor
+        self._reactor = reactor
+
+        self.connargs = connargs
+        self.connkw = connkw
+
+        for arg in self.CP_ARGS:
+            cp_arg = 'cp_%s' % arg
+            if cp_arg in connkw:
+                setattr(self, arg, connkw[cp_arg])
+                del connkw[cp_arg]
+
+        self.min = min(self.min, self.max)
+        self.max = max(self.min, self.max)
+
+        self.connections = {}  # all connections, hashed on thread id
+
+        # these are optional so import them here
+        from twisted.python import threadpool
+        import thread
+
+        self.threadID = thread.get_ident
+        self.threadpool = threadpool.ThreadPool(self.min, self.max)
+        self.startID = self._reactor.callWhenRunning(self._start)
+
+
+    def _start(self):
+        self.startID = None
+        return self.start()
+
+
+    def start(self):
+        """
+        Start the connection pool.
+
+        If you are using the reactor normally, this function does *not*
+        need to be called.
+        """
+        if not self.running:
+            self.threadpool.start()
+            self.shutdownID = self._reactor.addSystemEventTrigger(
+                'during', 'shutdown', self.finalClose)
+            self.running = True
+
+
+    def runWithConnection(self, func, *args, **kw):
+        """
+        Execute a function with a database connection and return the result.
+
+        @param func: A callable object of one argument which will be executed
+            in a thread with a connection from the pool.  It will be passed as
+            its first argument a L{Connection} instance (whose interface is
+            mostly identical to that of a connection object for your DB-API
+            module of choice), and its results will be returned as a Deferred.
+            If the method raises an exception the transaction will be rolled
+            back.  Otherwise, the transaction will be committed.  B{Note} that
+            this function is B{not} run in the main thread: it must be
+            threadsafe.
+
+        @param *args: positional arguments to be passed to func
+
+        @param **kw: keyword arguments to be passed to func
+
+        @return: a Deferred which will fire the return value of
+            C{func(Transaction(...), *args, **kw)}, or a Failure.
+        """
+        from twisted.internet import reactor
+        return threads.deferToThreadPool(reactor, self.threadpool,
+                                         self._runWithConnection,
+                                         func, *args, **kw)
+
+
+    def _runWithConnection(self, func, *args, **kw):
+        conn = self.connectionFactory(self)
+        try:
+            result = func(conn, *args, **kw)
+            conn.commit()
+            return result
+        except:
+            excType, excValue, excTraceback = sys.exc_info()
+            try:
+                conn.rollback()
+            except:
+                log.err(None, "Rollback failed")
+            raise excType, excValue, excTraceback
+
+
+    def runInteraction(self, interaction, *args, **kw):
+        """
+        Interact with the database and return the result.
+
+        The 'interaction' is a callable object which will be executed
+        in a thread using a pooled connection. It will be passed an
+        L{Transaction} object as an argument (whose interface is
+        identical to that of the database cursor for your DB-API
+        module of choice), and its results will be returned as a
+        Deferred. If running the method raises an exception, the
+        transaction will be rolled back. If the method returns a
+        value, the transaction will be committed.
+
+        NOTE that the function you pass is *not* run in the main
+        thread: you may have to worry about thread-safety in the
+        function you pass to this if it tries to use non-local
+        objects.
+
+        @param interaction: a callable object whose first argument
+            is an L{adbapi.Transaction}.
+
+        @param *args: additional positional arguments to be passed
+            to interaction
+
+        @param **kw: keyword arguments to be passed to interaction
+
+        @return: a Deferred which will fire the return value of
+            'interaction(Transaction(...), *args, **kw)', or a Failure.
+        """
+        from twisted.internet import reactor
+        return threads.deferToThreadPool(reactor, self.threadpool,
+                                         self._runInteraction,
+                                         interaction, *args, **kw)
+
+
+    def runQuery(self, *args, **kw):
+        """Execute an SQL query and return the result.
+
+        A DB-API cursor will will be invoked with cursor.execute(*args, **kw).
+        The exact nature of the arguments will depend on the specific flavor
+        of DB-API being used, but the first argument in *args be an SQL
+        statement. The result of a subsequent cursor.fetchall() will be
+        fired to the Deferred which is returned. If either the 'execute' or
+        'fetchall' methods raise an exception, the transaction will be rolled
+        back and a Failure returned.
+
+        The  *args and **kw arguments will be passed to the DB-API cursor's
+        'execute' method.
+
+        @return: a Deferred which will fire the return value of a DB-API
+        cursor's 'fetchall' method, or a Failure.
+        """
+        return self.runInteraction(self._runQuery, *args, **kw)
+
+
+    def runOperation(self, *args, **kw):
+        """Execute an SQL query and return None.
+
+        A DB-API cursor will will be invoked with cursor.execute(*args, **kw).
+        The exact nature of the arguments will depend on the specific flavor
+        of DB-API being used, but the first argument in *args will be an SQL
+        statement. This method will not attempt to fetch any results from the
+        query and is thus suitable for INSERT, DELETE, and other SQL statements
+        which do not return values. If the 'execute' method raises an
+        exception, the transaction will be rolled back and a Failure returned.
+
+        The args and kw arguments will be passed to the DB-API cursor's
+        'execute' method.
+
+        return: a Deferred which will fire None or a Failure.
+        """
+        return self.runInteraction(self._runOperation, *args, **kw)
+
+
+    def close(self):
+        """
+        Close all pool connections and shutdown the pool.
+        """
+        if self.shutdownID:
+            self._reactor.removeSystemEventTrigger(self.shutdownID)
+            self.shutdownID = None
+        if self.startID:
+            self._reactor.removeSystemEventTrigger(self.startID)
+            self.startID = None
+        self.finalClose()
+
+    def finalClose(self):
+        """This should only be called by the shutdown trigger."""
+
+        self.shutdownID = None
+        self.threadpool.stop()
+        self.running = False
+        for conn in self.connections.values():
+            self._close(conn)
+        self.connections.clear()
+
+    def connect(self):
+        """Return a database connection when one becomes available.
+
+        This method blocks and should be run in a thread from the internal
+        threadpool. Don't call this method directly from non-threaded code.
+        Using this method outside the external threadpool may exceed the
+        maximum number of connections in the pool.
+
+        @return: a database connection from the pool.
+        """
+
+        tid = self.threadID()
+        conn = self.connections.get(tid)
+        if conn is None:
+            if self.noisy:
+                log.msg('adbapi connecting: %s %s%s' % (self.dbapiName,
+                                                        self.connargs or '',
+                                                        self.connkw or ''))
+            conn = self.dbapi.connect(*self.connargs, **self.connkw)
+            if self.openfun != None:
+                self.openfun(conn)
+            self.connections[tid] = conn
+        return conn
+
+    def disconnect(self, conn):
+        """Disconnect a database connection associated with this pool.
+
+        Note: This function should only be used by the same thread which
+        called connect(). As with connect(), this function is not used
+        in normal non-threaded twisted code.
+        """
+        tid = self.threadID()
+        if conn is not self.connections.get(tid):
+            raise Exception("wrong connection for thread")
+        if conn is not None:
+            self._close(conn)
+            del self.connections[tid]
+
+
+    def _close(self, conn):
+        if self.noisy:
+            log.msg('adbapi closing: %s' % (self.dbapiName,))
+        try:
+            conn.close()
+        except:
+            log.err(None, "Connection close failed")
+
+
+    def _runInteraction(self, interaction, *args, **kw):
+        conn = self.connectionFactory(self)
+        trans = self.transactionFactory(self, conn)
+        try:
+            result = interaction(trans, *args, **kw)
+            trans.close()
+            conn.commit()
+            return result
+        except:
+            excType, excValue, excTraceback = sys.exc_info()
+            try:
+                conn.rollback()
+            except:
+                log.err(None, "Rollback failed")
+            raise excType, excValue, excTraceback
+
+
+    def _runQuery(self, trans, *args, **kw):
+        trans.execute(*args, **kw)
+        return trans.fetchall()
+
+    def _runOperation(self, trans, *args, **kw):
+        trans.execute(*args, **kw)
+
+    def __getstate__(self):
+        return {'dbapiName': self.dbapiName,
+                'min': self.min,
+                'max': self.max,
+                'noisy': self.noisy,
+                'reconnect': self.reconnect,
+                'good_sql': self.good_sql,
+                'connargs': self.connargs,
+                'connkw': self.connkw}
+
+    def __setstate__(self, state):
+        self.__dict__ = state
+        self.__init__(self.dbapiName, *self.connargs, **self.connkw)
+
+
+__all__ = ['Transaction', 'ConnectionPool']
diff --git a/ThirdParty/Twisted/twisted/internet/__init__.py b/ThirdParty/Twisted/twisted/internet/__init__.py
new file mode 100644
index 0000000..a3d851d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/__init__.py
@@ -0,0 +1,12 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Twisted Internet: Asynchronous I/O and Events.
+
+Twisted Internet is a collection of compatible event-loops for Python. It contains
+the code to dispatch events to interested observers and a portable API so that
+observers need not care about which event loop is running. Thus, it is possible
+to use the same code for different loops, from Twisted's basic, yet portable,
+select-based loop to the loops of various GUI toolkits like GTK+ or Tk.
+"""
diff --git a/ThirdParty/Twisted/twisted/internet/_baseprocess.py b/ThirdParty/Twisted/twisted/internet/_baseprocess.py
new file mode 100644
index 0000000..0a06259
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/_baseprocess.py
@@ -0,0 +1,62 @@
+# -*- test-case-name: twisted.test.test_process -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Cross-platform process-related functionality used by different
+L{IReactorProcess} implementations.
+"""
+
+from twisted.python.reflect import qual
+from twisted.python.deprecate import getWarningMethod
+from twisted.python.failure import Failure
+from twisted.python.log import err
+from twisted.persisted.styles import Ephemeral
+
+_missingProcessExited = ("Since Twisted 8.2, IProcessProtocol.processExited "
+                         "is required.  %s must implement it.")
+
+class BaseProcess(Ephemeral):
+    pid = None
+    status = None
+    lostProcess = 0
+    proto = None
+
+    def __init__(self, protocol):
+        self.proto = protocol
+
+
+    def _callProcessExited(self, reason):
+        default = object()
+        processExited = getattr(self.proto, 'processExited', default)
+        if processExited is default:
+            getWarningMethod()(
+                _missingProcessExited % (qual(self.proto.__class__),),
+                DeprecationWarning, stacklevel=0)
+        else:
+            processExited(Failure(reason))
+
+
+    def processEnded(self, status):
+        """
+        This is called when the child terminates.
+        """
+        self.status = status
+        self.lostProcess += 1
+        self.pid = None
+        self._callProcessExited(self._getReason(status))
+        self.maybeCallProcessEnded()
+
+
+    def maybeCallProcessEnded(self):
+        """
+        Call processEnded on protocol after final cleanup.
+        """
+        if self.proto is not None:
+            reason = self._getReason(self.status)
+            proto = self.proto
+            self.proto = None
+            try:
+                proto.processEnded(Failure(reason))
+            except:
+                err(None, "unexpected error in processEnded")
diff --git a/ThirdParty/Twisted/twisted/internet/_dumbwin32proc.py b/ThirdParty/Twisted/twisted/internet/_dumbwin32proc.py
new file mode 100644
index 0000000..0df82ae
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/_dumbwin32proc.py
@@ -0,0 +1,388 @@
+# -*- test-case-name: twisted.test.test_process -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+http://isometric.sixsided.org/_/gates_in_the_head/
+"""
+
+import os
+
+# Win32 imports
+import win32api
+import win32con
+import win32event
+import win32file
+import win32pipe
+import win32process
+import win32security
+
+import pywintypes
+
+# security attributes for pipes
+PIPE_ATTRS_INHERITABLE = win32security.SECURITY_ATTRIBUTES()
+PIPE_ATTRS_INHERITABLE.bInheritHandle = 1
+
+from zope.interface import implements
+from twisted.internet.interfaces import IProcessTransport, IConsumer, IProducer
+
+from twisted.python.win32 import quoteArguments
+
+from twisted.internet import error
+
+from twisted.internet import _pollingfile
+from twisted.internet._baseprocess import BaseProcess
+
+def debug(msg):
+    import sys
+    print msg
+    sys.stdout.flush()
+
+class _Reaper(_pollingfile._PollableResource):
+
+    def __init__(self, proc):
+        self.proc = proc
+
+    def checkWork(self):
+        if win32event.WaitForSingleObject(self.proc.hProcess, 0) != win32event.WAIT_OBJECT_0:
+            return 0
+        exitCode = win32process.GetExitCodeProcess(self.proc.hProcess)
+        self.deactivate()
+        self.proc.processEnded(exitCode)
+        return 0
+
+
+def _findShebang(filename):
+    """
+    Look for a #! line, and return the value following the #! if one exists, or
+    None if this file is not a script.
+
+    I don't know if there are any conventions for quoting in Windows shebang
+    lines, so this doesn't support any; therefore, you may not pass any
+    arguments to scripts invoked as filters.  That's probably wrong, so if
+    somebody knows more about the cultural expectations on Windows, please feel
+    free to fix.
+
+    This shebang line support was added in support of the CGI tests;
+    appropriately enough, I determined that shebang lines are culturally
+    accepted in the Windows world through this page::
+
+        http://www.cgi101.com/learn/connect/winxp.html
+
+    @param filename: str representing a filename
+
+    @return: a str representing another filename.
+    """
+    f = file(filename, 'rU')
+    if f.read(2) == '#!':
+        exe = f.readline(1024).strip('\n')
+        return exe
+
+def _invalidWin32App(pywinerr):
+    """
+    Determine if a pywintypes.error is telling us that the given process is
+    'not a valid win32 application', i.e. not a PE format executable.
+
+    @param pywinerr: a pywintypes.error instance raised by CreateProcess
+
+    @return: a boolean
+    """
+
+    # Let's do this better in the future, but I have no idea what this error
+    # is; MSDN doesn't mention it, and there is no symbolic constant in
+    # win32process module that represents 193.
+
+    return pywinerr.args[0] == 193
+
+class Process(_pollingfile._PollingTimer, BaseProcess):
+    """A process that integrates with the Twisted event loop.
+
+    If your subprocess is a python program, you need to:
+
+     - Run python.exe with the '-u' command line option - this turns on
+       unbuffered I/O. Buffering stdout/err/in can cause problems, see e.g.
+       http://support.microsoft.com/default.aspx?scid=kb;EN-US;q1903
+
+     - If you don't want Windows messing with data passed over
+       stdin/out/err, set the pipes to be in binary mode::
+
+        import os, sys, mscvrt
+        msvcrt.setmode(sys.stdin.fileno(), os.O_BINARY)
+        msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
+        msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
+
+    """
+    implements(IProcessTransport, IConsumer, IProducer)
+
+    closedNotifies = 0
+
+    def __init__(self, reactor, protocol, command, args, environment, path):
+        """
+        Create a new child process.
+        """
+        _pollingfile._PollingTimer.__init__(self, reactor)
+        BaseProcess.__init__(self, protocol)
+
+        # security attributes for pipes
+        sAttrs = win32security.SECURITY_ATTRIBUTES()
+        sAttrs.bInheritHandle = 1
+
+        # create the pipes which will connect to the secondary process
+        self.hStdoutR, hStdoutW = win32pipe.CreatePipe(sAttrs, 0)
+        self.hStderrR, hStderrW = win32pipe.CreatePipe(sAttrs, 0)
+        hStdinR,  self.hStdinW  = win32pipe.CreatePipe(sAttrs, 0)
+
+        win32pipe.SetNamedPipeHandleState(self.hStdinW,
+                                          win32pipe.PIPE_NOWAIT,
+                                          None,
+                                          None)
+
+        # set the info structure for the new process.
+        StartupInfo = win32process.STARTUPINFO()
+        StartupInfo.hStdOutput = hStdoutW
+        StartupInfo.hStdError  = hStderrW
+        StartupInfo.hStdInput  = hStdinR
+        StartupInfo.dwFlags = win32process.STARTF_USESTDHANDLES
+
+        # Create new handles whose inheritance property is false
+        currentPid = win32api.GetCurrentProcess()
+
+        tmp = win32api.DuplicateHandle(currentPid, self.hStdoutR, currentPid, 0, 0,
+                                       win32con.DUPLICATE_SAME_ACCESS)
+        win32file.CloseHandle(self.hStdoutR)
+        self.hStdoutR = tmp
+
+        tmp = win32api.DuplicateHandle(currentPid, self.hStderrR, currentPid, 0, 0,
+                                       win32con.DUPLICATE_SAME_ACCESS)
+        win32file.CloseHandle(self.hStderrR)
+        self.hStderrR = tmp
+
+        tmp = win32api.DuplicateHandle(currentPid, self.hStdinW, currentPid, 0, 0,
+                                       win32con.DUPLICATE_SAME_ACCESS)
+        win32file.CloseHandle(self.hStdinW)
+        self.hStdinW = tmp
+
+        # Add the specified environment to the current environment - this is
+        # necessary because certain operations are only supported on Windows
+        # if certain environment variables are present.
+
+        env = os.environ.copy()
+        env.update(environment or {})
+
+        cmdline = quoteArguments(args)
+        # TODO: error detection here.  See #2787 and #4184.
+        def doCreate():
+            self.hProcess, self.hThread, self.pid, dwTid = win32process.CreateProcess(
+                command, cmdline, None, None, 1, 0, env, path, StartupInfo)
+        try:
+            try:
+                doCreate()
+            except TypeError, e:
+                # win32process.CreateProcess cannot deal with mixed
+                # str/unicode environment, so we make it all Unicode
+                if e.args != ('All dictionary items must be strings, or '
+                              'all must be unicode',):
+                    raise
+                newenv = {}
+                for key, value in env.items():
+                    newenv[unicode(key)] = unicode(value)
+                env = newenv
+                doCreate()
+        except pywintypes.error, pwte:
+            if not _invalidWin32App(pwte):
+                # This behavior isn't _really_ documented, but let's make it
+                # consistent with the behavior that is documented.
+                raise OSError(pwte)
+            else:
+                # look for a shebang line.  Insert the original 'command'
+                # (actually a script) into the new arguments list.
+                sheb = _findShebang(command)
+                if sheb is None:
+                    raise OSError(
+                        "%r is neither a Windows executable, "
+                        "nor a script with a shebang line" % command)
+                else:
+                    args = list(args)
+                    args.insert(0, command)
+                    cmdline = quoteArguments(args)
+                    origcmd = command
+                    command = sheb
+                    try:
+                        # Let's try again.
+                        doCreate()
+                    except pywintypes.error, pwte2:
+                        # d'oh, failed again!
+                        if _invalidWin32App(pwte2):
+                            raise OSError(
+                                "%r has an invalid shebang line: "
+                                "%r is not a valid executable" % (
+                                    origcmd, sheb))
+                        raise OSError(pwte2)
+
+        # close handles which only the child will use
+        win32file.CloseHandle(hStderrW)
+        win32file.CloseHandle(hStdoutW)
+        win32file.CloseHandle(hStdinR)
+
+        # set up everything
+        self.stdout = _pollingfile._PollableReadPipe(
+            self.hStdoutR,
+            lambda data: self.proto.childDataReceived(1, data),
+            self.outConnectionLost)
+
+        self.stderr = _pollingfile._PollableReadPipe(
+                self.hStderrR,
+                lambda data: self.proto.childDataReceived(2, data),
+                self.errConnectionLost)
+
+        self.stdin = _pollingfile._PollableWritePipe(
+            self.hStdinW, self.inConnectionLost)
+
+        for pipewatcher in self.stdout, self.stderr, self.stdin:
+            self._addPollableResource(pipewatcher)
+
+
+        # notify protocol
+        self.proto.makeConnection(self)
+
+        self._addPollableResource(_Reaper(self))
+
+
+    def signalProcess(self, signalID):
+        if self.pid is None:
+            raise error.ProcessExitedAlready()
+        if signalID in ("INT", "TERM", "KILL"):
+            win32process.TerminateProcess(self.hProcess, 1)
+
+
+    def _getReason(self, status):
+        if status == 0:
+            return error.ProcessDone(status)
+        return error.ProcessTerminated(status)
+
+
+    def write(self, data):
+        """
+        Write data to the process' stdin.
+
+        @type data: C{str}
+        """
+        self.stdin.write(data)
+
+
+    def writeSequence(self, seq):
+        """
+        Write data to the process' stdin.
+
+        @type data: C{list} of C{str}
+        """
+        self.stdin.writeSequence(seq)
+
+
+    def writeToChild(self, fd, data):
+        """
+        Similar to L{ITransport.write} but also allows the file descriptor in
+        the child process which will receive the bytes to be specified.
+
+        This implementation is limited to writing to the child's standard input.
+
+        @param fd: The file descriptor to which to write.  Only stdin (C{0}) is
+            supported.
+        @type fd: C{int}
+
+        @param data: The bytes to write.
+        @type data: C{str}
+
+        @return: C{None}
+
+        @raise KeyError: If C{fd} is anything other than the stdin file
+            descriptor (C{0}).
+        """
+        if fd == 0:
+            self.stdin.write(data)
+        else:
+            raise KeyError(fd)
+
+
+    def closeChildFD(self, fd):
+        if fd == 0:
+            self.closeStdin()
+        elif fd == 1:
+            self.closeStdout()
+        elif fd == 2:
+            self.closeStderr()
+        else:
+            raise NotImplementedError("Only standard-IO file descriptors available on win32")
+
+    def closeStdin(self):
+        """Close the process' stdin.
+        """
+        self.stdin.close()
+
+    def closeStderr(self):
+        self.stderr.close()
+
+    def closeStdout(self):
+        self.stdout.close()
+
+    def loseConnection(self):
+        """Close the process' stdout, in and err."""
+        self.closeStdin()
+        self.closeStdout()
+        self.closeStderr()
+
+
+    def outConnectionLost(self):
+        self.proto.childConnectionLost(1)
+        self.connectionLostNotify()
+
+
+    def errConnectionLost(self):
+        self.proto.childConnectionLost(2)
+        self.connectionLostNotify()
+
+
+    def inConnectionLost(self):
+        self.proto.childConnectionLost(0)
+        self.connectionLostNotify()
+
+
+    def connectionLostNotify(self):
+        """
+        Will be called 3 times, by stdout/err threads and process handle.
+        """
+        self.closedNotifies += 1
+        self.maybeCallProcessEnded()
+
+
+    def maybeCallProcessEnded(self):
+        if self.closedNotifies == 3 and self.lostProcess:
+            win32file.CloseHandle(self.hProcess)
+            win32file.CloseHandle(self.hThread)
+            self.hProcess = None
+            self.hThread = None
+            BaseProcess.maybeCallProcessEnded(self)
+
+
+    # IConsumer
+    def registerProducer(self, producer, streaming):
+        self.stdin.registerProducer(producer, streaming)
+
+    def unregisterProducer(self):
+        self.stdin.unregisterProducer()
+
+    # IProducer
+    def pauseProducing(self):
+        self._pause()
+
+    def resumeProducing(self):
+        self._unpause()
+
+    def stopProducing(self):
+        self.loseConnection()
+
+    def __repr__(self):
+        """
+        Return a string representation of the process.
+        """
+        return "<%s pid=%s>" % (self.__class__.__name__, self.pid)
diff --git a/ThirdParty/Twisted/twisted/internet/_endpointspy3.py b/ThirdParty/Twisted/twisted/internet/_endpointspy3.py
new file mode 100644
index 0000000..7061e4b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/_endpointspy3.py
@@ -0,0 +1,483 @@
+# -*- test-case-name: twisted.internet.test.test_endpointspy3 -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+"""
+Implementations of endpoints that have been ported to Python 3.
+"""
+
+from __future__ import division, absolute_import
+
+import socket
+
+from zope.interface import implementer, directlyProvides
+
+from twisted.internet import interfaces, defer, error, threads
+from twisted.internet.protocol import ClientFactory, Protocol
+from twisted.internet.abstract import isIPv6Address
+
+class _WrappingProtocol(Protocol):
+    """
+    Wrap another protocol in order to notify my user when a connection has
+    been made.
+    """
+
+    def __init__(self, connectedDeferred, wrappedProtocol):
+        """
+        @param connectedDeferred: The L{Deferred} that will callback
+            with the C{wrappedProtocol} when it is connected.
+
+        @param wrappedProtocol: An L{IProtocol} provider that will be
+            connected.
+        """
+        self._connectedDeferred = connectedDeferred
+        self._wrappedProtocol = wrappedProtocol
+
+        for iface in [interfaces.IHalfCloseableProtocol,
+                      interfaces.IFileDescriptorReceiver]:
+            if iface.providedBy(self._wrappedProtocol):
+                directlyProvides(self, iface)
+
+
+    def logPrefix(self):
+        """
+        Transparently pass through the wrapped protocol's log prefix.
+        """
+        if interfaces.ILoggingContext.providedBy(self._wrappedProtocol):
+            return self._wrappedProtocol.logPrefix()
+        return self._wrappedProtocol.__class__.__name__
+
+
+    def connectionMade(self):
+        """
+        Connect the C{self._wrappedProtocol} to our C{self.transport} and
+        callback C{self._connectedDeferred} with the C{self._wrappedProtocol}
+        """
+        self._wrappedProtocol.makeConnection(self.transport)
+        self._connectedDeferred.callback(self._wrappedProtocol)
+
+
+    def dataReceived(self, data):
+        """
+        Proxy C{dataReceived} calls to our C{self._wrappedProtocol}
+        """
+        return self._wrappedProtocol.dataReceived(data)
+
+
+    def fileDescriptorReceived(self, descriptor):
+        """
+        Proxy C{fileDescriptorReceived} calls to our C{self._wrappedProtocol}
+        """
+        return self._wrappedProtocol.fileDescriptorReceived(descriptor)
+
+
+    def connectionLost(self, reason):
+        """
+        Proxy C{connectionLost} calls to our C{self._wrappedProtocol}
+        """
+        return self._wrappedProtocol.connectionLost(reason)
+
+
+    def readConnectionLost(self):
+        """
+        Proxy L{IHalfCloseableProtocol.readConnectionLost} to our
+        C{self._wrappedProtocol}
+        """
+        self._wrappedProtocol.readConnectionLost()
+
+
+    def writeConnectionLost(self):
+        """
+        Proxy L{IHalfCloseableProtocol.writeConnectionLost} to our
+        C{self._wrappedProtocol}
+        """
+        self._wrappedProtocol.writeConnectionLost()
+
+
+
+class _WrappingFactory(ClientFactory):
+    """
+    Wrap a factory in order to wrap the protocols it builds.
+
+    @ivar _wrappedFactory: A provider of I{IProtocolFactory} whose buildProtocol
+        method will be called and whose resulting protocol will be wrapped.
+
+    @ivar _onConnection: A L{Deferred} that fires when the protocol is
+        connected
+
+    @ivar _connector: A L{connector <twisted.internet.interfaces.IConnector>}
+        that is managing the current or previous connection attempt.
+    """
+    protocol = _WrappingProtocol
+
+    def __init__(self, wrappedFactory):
+        """
+        @param wrappedFactory: A provider of I{IProtocolFactory} whose
+            buildProtocol method will be called and whose resulting protocol
+            will be wrapped.
+        """
+        self._wrappedFactory = wrappedFactory
+        self._onConnection = defer.Deferred(canceller=self._canceller)
+
+
+    def startedConnecting(self, connector):
+        """
+        A connection attempt was started.  Remember the connector which started
+        said attempt, for use later.
+        """
+        self._connector = connector
+
+
+    def _canceller(self, deferred):
+        """
+        The outgoing connection attempt was cancelled.  Fail that L{Deferred}
+        with an L{error.ConnectingCancelledError}.
+
+        @param deferred: The L{Deferred <defer.Deferred>} that was cancelled;
+            should be the same as C{self._onConnection}.
+        @type deferred: L{Deferred <defer.Deferred>}
+
+        @note: This relies on startedConnecting having been called, so it may
+            seem as though there's a race condition where C{_connector} may not
+            have been set.  However, using public APIs, this condition is
+            impossible to catch, because a connection API
+            (C{connectTCP}/C{SSL}/C{UNIX}) is always invoked before a
+            L{_WrappingFactory}'s L{Deferred <defer.Deferred>} is returned to
+            C{connect()}'s caller.
+
+        @return: C{None}
+        """
+        deferred.errback(
+            error.ConnectingCancelledError(
+                self._connector.getDestination()))
+        self._connector.stopConnecting()
+
+
+    def doStart(self):
+        """
+        Start notifications are passed straight through to the wrapped factory.
+        """
+        self._wrappedFactory.doStart()
+
+
+    def doStop(self):
+        """
+        Stop notifications are passed straight through to the wrapped factory.
+        """
+        self._wrappedFactory.doStop()
+
+
+    def buildProtocol(self, addr):
+        """
+        Proxy C{buildProtocol} to our C{self._wrappedFactory} or errback
+        the C{self._onConnection} L{Deferred}.
+
+        @return: An instance of L{_WrappingProtocol} or C{None}
+        """
+        try:
+            proto = self._wrappedFactory.buildProtocol(addr)
+        except:
+            self._onConnection.errback()
+        else:
+            return self.protocol(self._onConnection, proto)
+
+
+    def clientConnectionFailed(self, connector, reason):
+        """
+        Errback the C{self._onConnection} L{Deferred} when the
+        client connection fails.
+        """
+        if not self._onConnection.called:
+            self._onConnection.errback(reason)
+
+
+
+ at implementer(interfaces.IStreamServerEndpoint)
+class _TCPServerEndpoint(object):
+    """
+    A TCP server endpoint interface
+    """
+
+    def __init__(self, reactor, port, backlog, interface):
+        """
+        @param reactor: An L{IReactorTCP} provider.
+
+        @param port: The port number used for listening
+        @type port: int
+
+        @param backlog: Size of the listen queue
+        @type backlog: int
+
+        @param interface: The hostname to bind to
+        @type interface: str
+        """
+        self._reactor = reactor
+        self._port = port
+        self._backlog = backlog
+        self._interface = interface
+
+
+    def listen(self, protocolFactory):
+        """
+        Implement L{IStreamServerEndpoint.listen} to listen on a TCP socket
+        """
+        return defer.execute(self._reactor.listenTCP,
+                             self._port,
+                             protocolFactory,
+                             backlog=self._backlog,
+                             interface=self._interface)
+
+
+
+class TCP4ServerEndpoint(_TCPServerEndpoint):
+    """
+    Implements TCP server endpoint with an IPv4 configuration
+    """
+    def __init__(self, reactor, port, backlog=50, interface=''):
+        """
+        @param reactor: An L{IReactorTCP} provider.
+
+        @param port: The port number used for listening
+        @type port: int
+
+        @param backlog: Size of the listen queue
+        @type backlog: int
+
+        @param interface: The hostname to bind to, defaults to '' (all)
+        @type interface: str
+        """
+        _TCPServerEndpoint.__init__(self, reactor, port, backlog, interface)
+
+
+
+class TCP6ServerEndpoint(_TCPServerEndpoint):
+    """
+    Implements TCP server endpoint with an IPv6 configuration
+    """
+    def __init__(self, reactor, port, backlog=50, interface='::'):
+        """
+        @param reactor: An L{IReactorTCP} provider.
+
+        @param port: The port number used for listening
+        @type port: int
+
+        @param backlog: Size of the listen queue
+        @type backlog: int
+
+        @param interface: The hostname to bind to, defaults to '' (all)
+        @type interface: str
+        """
+        _TCPServerEndpoint.__init__(self, reactor, port, backlog, interface)
+
+
+
+ at implementer(interfaces.IStreamClientEndpoint)
+class TCP4ClientEndpoint(object):
+    """
+    TCP client endpoint with an IPv4 configuration.
+    """
+
+    def __init__(self, reactor, host, port, timeout=30, bindAddress=None):
+        """
+        @param reactor: An L{IReactorTCP} provider
+
+        @param host: A hostname, used when connecting
+        @type host: str
+
+        @param port: The port number, used when connecting
+        @type port: int
+
+        @param timeout: The number of seconds to wait before assuming the
+            connection has failed.
+        @type timeout: int
+
+        @param bindAddress: A (host, port) tuple of local address to bind to,
+            or None.
+        @type bindAddress: tuple
+        """
+        self._reactor = reactor
+        self._host = host
+        self._port = port
+        self._timeout = timeout
+        self._bindAddress = bindAddress
+
+
+    def connect(self, protocolFactory):
+        """
+        Implement L{IStreamClientEndpoint.connect} to connect via TCP.
+        """
+        try:
+            wf = _WrappingFactory(protocolFactory)
+            self._reactor.connectTCP(
+                self._host, self._port, wf,
+                timeout=self._timeout, bindAddress=self._bindAddress)
+            return wf._onConnection
+        except:
+            return defer.fail()
+
+
+
+ at implementer(interfaces.IStreamClientEndpoint)
+class TCP6ClientEndpoint(object):
+    """
+    TCP client endpoint with an IPv6 configuration.
+
+    @ivar _getaddrinfo: A hook used for testing name resolution.
+
+    @ivar _deferToThread: A hook used for testing deferToThread.
+
+    @ivar _GAI_ADDRESS: Index of the address portion in result of
+        getaddrinfo to be used.
+
+    @ivar _GAI_ADDRESS_HOST: Index of the actual host-address in the
+        5-tuple L{_GAI_ADDRESS}.
+    """
+
+    _getaddrinfo = socket.getaddrinfo
+    _deferToThread = threads.deferToThread
+    _GAI_ADDRESS = 4
+    _GAI_ADDRESS_HOST = 0
+
+    def __init__(self, reactor, host, port, timeout=30, bindAddress=None):
+        """
+        @param host: An IPv6 address literal or a hostname with an
+            IPv6 address
+
+        @see: L{twisted.internet.interfaces.IReactorTCP.connectTCP}
+        """
+        self._reactor = reactor
+        self._host = host
+        self._port = port
+        self._timeout = timeout
+        self._bindAddress = bindAddress
+
+
+    def connect(self, protocolFactory):
+        """
+        Implement L{IStreamClientEndpoint.connect} to connect via TCP,
+        once the hostname resolution is done.
+        """
+        if isIPv6Address(self._host):
+            d = self._resolvedHostConnect(self._host, protocolFactory)
+        else:
+            d = self._nameResolution(self._host)
+            d.addCallback(lambda result: result[0][self._GAI_ADDRESS]
+                          [self._GAI_ADDRESS_HOST])
+            d.addCallback(self._resolvedHostConnect, protocolFactory)
+        return d
+
+
+    def _nameResolution(self, host):
+        """
+        Resolve the hostname string into a tuple containig the host
+        IPv6 address.
+        """
+        return self._deferToThread(
+            self._getaddrinfo, host, 0, socket.AF_INET6)
+
+
+    def _resolvedHostConnect(self, resolvedHost, protocolFactory):
+        """
+        Connect to the server using the resolved hostname.
+        """
+        try:
+            wf = _WrappingFactory(protocolFactory)
+            self._reactor.connectTCP(resolvedHost, self._port, wf,
+                timeout=self._timeout, bindAddress=self._bindAddress)
+            return wf._onConnection
+        except:
+            return defer.fail()
+
+
+
+ at implementer(interfaces.IStreamServerEndpoint)
+class SSL4ServerEndpoint(object):
+    """
+    SSL secured TCP server endpoint with an IPv4 configuration.
+    """
+
+    def __init__(self, reactor, port, sslContextFactory,
+                 backlog=50, interface=''):
+        """
+        @param reactor: An L{IReactorSSL} provider.
+
+        @param port: The port number used for listening
+        @type port: int
+
+        @param sslContextFactory: An instance of
+            L{twisted.internet.ssl.ContextFactory}.
+
+        @param backlog: Size of the listen queue
+        @type backlog: int
+
+        @param interface: The hostname to bind to, defaults to '' (all)
+        @type interface: str
+        """
+        self._reactor = reactor
+        self._port = port
+        self._sslContextFactory = sslContextFactory
+        self._backlog = backlog
+        self._interface = interface
+
+
+    def listen(self, protocolFactory):
+        """
+        Implement L{IStreamServerEndpoint.listen} to listen for SSL on a
+        TCP socket.
+        """
+        return defer.execute(self._reactor.listenSSL, self._port,
+                             protocolFactory,
+                             contextFactory=self._sslContextFactory,
+                             backlog=self._backlog,
+                             interface=self._interface)
+
+
+
+ at implementer(interfaces.IStreamClientEndpoint)
+class SSL4ClientEndpoint(object):
+    """
+    SSL secured TCP client endpoint with an IPv4 configuration
+    """
+
+    def __init__(self, reactor, host, port, sslContextFactory,
+                 timeout=30, bindAddress=None):
+        """
+        @param reactor: An L{IReactorSSL} provider.
+
+        @param host: A hostname, used when connecting
+        @type host: str
+
+        @param port: The port number, used when connecting
+        @type port: int
+
+        @param sslContextFactory: SSL Configuration information as an instance
+            of L{twisted.internet.ssl.ContextFactory}.
+
+        @param timeout: Number of seconds to wait before assuming the
+            connection has failed.
+        @type timeout: int
+
+        @param bindAddress: A (host, port) tuple of local address to bind to,
+            or None.
+        @type bindAddress: tuple
+        """
+        self._reactor = reactor
+        self._host = host
+        self._port = port
+        self._sslContextFactory = sslContextFactory
+        self._timeout = timeout
+        self._bindAddress = bindAddress
+
+
+    def connect(self, protocolFactory):
+        """
+        Implement L{IStreamClientEndpoint.connect} to connect with SSL over
+        TCP.
+        """
+        try:
+            wf = _WrappingFactory(protocolFactory)
+            self._reactor.connectSSL(
+                self._host, self._port, wf, self._sslContextFactory,
+                timeout=self._timeout, bindAddress=self._bindAddress)
+            return wf._onConnection
+        except:
+            return defer.fail()
diff --git a/ThirdParty/Twisted/twisted/internet/_glibbase.py b/ThirdParty/Twisted/twisted/internet/_glibbase.py
new file mode 100644
index 0000000..7bdbd1e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/_glibbase.py
@@ -0,0 +1,391 @@
+# -*- test-case-name: twisted.internet.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module provides base support for Twisted to interact with the glib/gtk
+mainloops.
+
+The classes in this module should not be used directly, but rather you should
+import gireactor or gtk3reactor for GObject Introspection based applications,
+or glib2reactor or gtk2reactor for applications using legacy static bindings.
+"""
+
+from __future__ import division, absolute_import
+
+import sys
+
+from zope.interface import implementer
+
+from twisted.internet import base, posixbase, selectreactor
+from twisted.internet.interfaces import IReactorFDSet
+from twisted.python import log
+from twisted.python.compat import set
+
+
+
+def ensureNotImported(moduleNames, errorMessage, preventImports=[]):
+    """
+    Check whether the given modules were imported, and if requested, ensure
+    they will not be importable in the future.
+
+    @param moduleNames: A list of module names we make sure aren't imported.
+    @type moduleNames: C{list} of C{str}
+
+    @param preventImports: A list of module name whose future imports should
+        be prevented.
+    @type preventImports: C{list} of C{str}
+
+    @param errorMessage: Message to use when raising an C{ImportError}.
+    @type errorMessage: C{str}
+
+    @raises: C{ImportError} with given error message if a given module name
+        has already been imported.
+    """
+    for name in moduleNames:
+        if sys.modules.get(name) is not None:
+            raise ImportError(errorMessage)
+
+    # Disable module imports to avoid potential problems.
+    for name in preventImports:
+        sys.modules[name] = None
+
+
+
+class GlibWaker(posixbase._UnixWaker):
+    """
+    Run scheduled events after waking up.
+    """
+
+    def doRead(self):
+        posixbase._UnixWaker.doRead(self)
+        self.reactor._simulate()
+
+
+
+ at implementer(IReactorFDSet)
+class GlibReactorBase(posixbase.PosixReactorBase, posixbase._PollLikeMixin):
+    """
+    Base class for GObject event loop reactors.
+
+    Notification for I/O events (reads and writes on file descriptors) is done
+    by the the gobject-based event loop. File descriptors are registered with
+    gobject with the appropriate flags for read/write/disconnect notification.
+
+    Time-based events, the results of C{callLater} and C{callFromThread}, are
+    handled differently. Rather than registering each event with gobject, a
+    single gobject timeout is registered for the earliest scheduled event, the
+    output of C{reactor.timeout()}. For example, if there are timeouts in 1, 2
+    and 3.4 seconds, a single timeout is registered for 1 second in the
+    future. When this timeout is hit, C{_simulate} is called, which calls the
+    appropriate Twisted-level handlers, and a new timeout is added to gobject
+    by the C{_reschedule} method.
+
+    To handle C{callFromThread} events, we use a custom waker that calls
+    C{_simulate} whenever it wakes up.
+
+    @ivar _sources: A dictionary mapping L{FileDescriptor} instances to
+        GSource handles.
+
+    @ivar _reads: A set of L{FileDescriptor} instances currently monitored for
+        reading.
+
+    @ivar _writes: A set of L{FileDescriptor} instances currently monitored for
+        writing.
+
+    @ivar _simtag: A GSource handle for the next L{simulate} call.
+    """
+
+    # Install a waker that knows it needs to call C{_simulate} in order to run
+    # callbacks queued from a thread:
+    _wakerFactory = GlibWaker
+
+    def __init__(self, glib_module, gtk_module, useGtk=False):
+        self._simtag = None
+        self._reads = set()
+        self._writes = set()
+        self._sources = {}
+        self._glib = glib_module
+        self._gtk = gtk_module
+        posixbase.PosixReactorBase.__init__(self)
+
+        self._source_remove = self._glib.source_remove
+        self._timeout_add = self._glib.timeout_add
+
+        def _mainquit():
+            if self._gtk.main_level():
+                self._gtk.main_quit()
+
+        if useGtk:
+            self._pending = self._gtk.events_pending
+            self._iteration = self._gtk.main_iteration_do
+            self._crash = _mainquit
+            self._run = self._gtk.main
+        else:
+            self.context = self._glib.main_context_default()
+            self._pending = self.context.pending
+            self._iteration = self.context.iteration
+            self.loop = self._glib.MainLoop()
+            self._crash = lambda: self._glib.idle_add(self.loop.quit)
+            self._run = self.loop.run
+
+
+    def _handleSignals(self):
+        # First, install SIGINT and friends:
+        base._SignalReactorMixin._handleSignals(self)
+        # Next, since certain versions of gtk will clobber our signal handler,
+        # set all signal handlers again after the event loop has started to
+        # ensure they're *really* set. We don't call this twice so we don't
+        # leak file descriptors created in the SIGCHLD initialization:
+        self.callLater(0, posixbase.PosixReactorBase._handleSignals, self)
+
+
+    # The input_add function in pygtk1 checks for objects with a
+    # 'fileno' method and, if present, uses the result of that method
+    # as the input source. The pygtk2 input_add does not do this. The
+    # function below replicates the pygtk1 functionality.
+
+    # In addition, pygtk maps gtk.input_add to _gobject.io_add_watch, and
+    # g_io_add_watch() takes different condition bitfields than
+    # gtk_input_add(). We use g_io_add_watch() here in case pygtk fixes this
+    # bug.
+    def input_add(self, source, condition, callback):
+        if hasattr(source, 'fileno'):
+            # handle python objects
+            def wrapper(ignored, condition):
+                return callback(source, condition)
+            fileno = source.fileno()
+        else:
+            fileno = source
+            wrapper = callback
+        return self._glib.io_add_watch(
+            fileno, condition, wrapper,
+            priority=self._glib.PRIORITY_DEFAULT_IDLE)
+
+
+    def _ioEventCallback(self, source, condition):
+        """
+        Called by event loop when an I/O event occurs.
+        """
+        log.callWithLogger(
+            source, self._doReadOrWrite, source, source, condition)
+        return True  # True = don't auto-remove the source
+
+
+    def _add(self, source, primary, other, primaryFlag, otherFlag):
+        """
+        Add the given L{FileDescriptor} for monitoring either for reading or
+        writing. If the file is already monitored for the other operation, we
+        delete the previous registration and re-register it for both reading
+        and writing.
+        """
+        if source in primary:
+            return
+        flags = primaryFlag
+        if source in other:
+            self._source_remove(self._sources[source])
+            flags |= otherFlag
+        self._sources[source] = self.input_add(
+            source, flags, self._ioEventCallback)
+        primary.add(source)
+
+
+    def addReader(self, reader):
+        """
+        Add a L{FileDescriptor} for monitoring of data available to read.
+        """
+        self._add(reader, self._reads, self._writes,
+                  self.INFLAGS, self.OUTFLAGS)
+
+
+    def addWriter(self, writer):
+        """
+        Add a L{FileDescriptor} for monitoring ability to write data.
+        """
+        self._add(writer, self._writes, self._reads,
+                  self.OUTFLAGS, self.INFLAGS)
+
+
+    def getReaders(self):
+        """
+        Retrieve the list of current L{FileDescriptor} monitored for reading.
+        """
+        return list(self._reads)
+
+
+    def getWriters(self):
+        """
+        Retrieve the list of current L{FileDescriptor} monitored for writing.
+        """
+        return list(self._writes)
+
+
+    def removeAll(self):
+        """
+        Remove monitoring for all registered L{FileDescriptor}s.
+        """
+        return self._removeAll(self._reads, self._writes)
+
+
+    def _remove(self, source, primary, other, flags):
+        """
+        Remove monitoring the given L{FileDescriptor} for either reading or
+        writing. If it's still monitored for the other operation, we
+        re-register the L{FileDescriptor} for only that operation.
+        """
+        if source not in primary:
+            return
+        self._source_remove(self._sources[source])
+        primary.remove(source)
+        if source in other:
+            self._sources[source] = self.input_add(
+                source, flags, self._ioEventCallback)
+        else:
+            self._sources.pop(source)
+
+
+    def removeReader(self, reader):
+        """
+        Stop monitoring the given L{FileDescriptor} for reading.
+        """
+        self._remove(reader, self._reads, self._writes, self.OUTFLAGS)
+
+
+    def removeWriter(self, writer):
+        """
+        Stop monitoring the given L{FileDescriptor} for writing.
+        """
+        self._remove(writer, self._writes, self._reads, self.INFLAGS)
+
+
+    def iterate(self, delay=0):
+        """
+        One iteration of the event loop, for trial's use.
+
+        This is not used for actual reactor runs.
+        """
+        self.runUntilCurrent()
+        while self._pending():
+            self._iteration(0)
+
+
+    def crash(self):
+        """
+        Crash the reactor.
+        """
+        posixbase.PosixReactorBase.crash(self)
+        self._crash()
+
+
+    def stop(self):
+        """
+        Stop the reactor.
+        """
+        posixbase.PosixReactorBase.stop(self)
+        # The base implementation only sets a flag, to ensure shutting down is
+        # not reentrant. Unfortunately, this flag is not meaningful to the
+        # gobject event loop. We therefore call wakeUp() to ensure the event
+        # loop will call back into Twisted once this iteration is done. This
+        # will result in self.runUntilCurrent() being called, where the stop
+        # flag will trigger the actual shutdown process, eventually calling
+        # crash() which will do the actual gobject event loop shutdown.
+        self.wakeUp()
+
+
+    def run(self, installSignalHandlers=True):
+        """
+        Run the reactor.
+        """
+        self.callWhenRunning(self._reschedule)
+        self.startRunning(installSignalHandlers=installSignalHandlers)
+        if self._started:
+            self._run()
+
+
+    def callLater(self, *args, **kwargs):
+        """
+        Schedule a C{DelayedCall}.
+        """
+        result = posixbase.PosixReactorBase.callLater(self, *args, **kwargs)
+        # Make sure we'll get woken up at correct time to handle this new
+        # scheduled call:
+        self._reschedule()
+        return result
+
+
+    def _reschedule(self):
+        """
+        Schedule a glib timeout for C{_simulate}.
+        """
+        if self._simtag is not None:
+            self._source_remove(self._simtag)
+            self._simtag = None
+        timeout = self.timeout()
+        if timeout is not None:
+            self._simtag = self._timeout_add(
+                int(timeout * 1000), self._simulate,
+                priority=self._glib.PRIORITY_DEFAULT_IDLE)
+
+
+    def _simulate(self):
+        """
+        Run timers, and then reschedule glib timeout for next scheduled event.
+        """
+        self.runUntilCurrent()
+        self._reschedule()
+
+
+
+class PortableGlibReactorBase(selectreactor.SelectReactor):
+    """
+    Base class for GObject event loop reactors that works on Windows.
+
+    Sockets aren't supported by GObject's input_add on Win32.
+    """
+    def __init__(self, glib_module, gtk_module, useGtk=False):
+        self._simtag = None
+        self._glib = glib_module
+        self._gtk = gtk_module
+        selectreactor.SelectReactor.__init__(self)
+
+        self._source_remove = self._glib.source_remove
+        self._timeout_add = self._glib.timeout_add
+
+        def _mainquit():
+            if self._gtk.main_level():
+                self._gtk.main_quit()
+
+        if useGtk:
+            self._crash = _mainquit
+            self._run = self._gtk.main
+        else:
+            self.loop = self._glib.MainLoop()
+            self._crash = lambda: self._glib.idle_add(self.loop.quit)
+            self._run = self.loop.run
+
+
+    def crash(self):
+        selectreactor.SelectReactor.crash(self)
+        self._crash()
+
+
+    def run(self, installSignalHandlers=True):
+        self.startRunning(installSignalHandlers=installSignalHandlers)
+        self._timeout_add(0, self.simulate)
+        if self._started:
+            self._run()
+
+
+    def simulate(self):
+        """
+        Run simulation loops and reschedule callbacks.
+        """
+        if self._simtag is not None:
+            self._source_remove(self._simtag)
+        self.iterate()
+        timeout = min(self.timeout(), 0.01)
+        if timeout is None:
+            timeout = 0.01
+        self._simtag = self._timeout_add(
+            int(timeout * 1000), self.simulate,
+            priority=self._glib.PRIORITY_DEFAULT_IDLE)
diff --git a/ThirdParty/Twisted/twisted/internet/_newtls.py b/ThirdParty/Twisted/twisted/internet/_newtls.py
new file mode 100644
index 0000000..a990cbd
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/_newtls.py
@@ -0,0 +1,271 @@
+# -*- test-case-name: twisted.test.test_ssl -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module implements memory BIO based TLS support.  It is the preferred
+implementation and will be used whenever pyOpenSSL 0.10 or newer is installed
+(whenever L{twisted.protocols.tls} is importable).
+
+ at since: 11.1
+"""
+
+from __future__ import division, absolute_import
+
+from zope.interface import implementer
+from zope.interface import directlyProvides
+
+from twisted.internet.interfaces import ITLSTransport, ISSLTransport
+from twisted.internet.abstract import FileDescriptor
+
+from twisted.protocols.tls import TLSMemoryBIOFactory, TLSMemoryBIOProtocol
+
+
+class _BypassTLS(object):
+    """
+    L{_BypassTLS} is used as the transport object for the TLS protocol object
+    used to implement C{startTLS}.  Its methods skip any TLS logic which
+    C{startTLS} enables.
+
+    @ivar _base: A transport class L{_BypassTLS} has been mixed in with to which
+        methods will be forwarded.  This class is only responsible for sending
+        bytes over the connection, not doing TLS.
+
+    @ivar _connection: A L{Connection} which TLS has been started on which will
+        be proxied to by this object.  Any method which has its behavior
+        altered after C{startTLS} will be skipped in favor of the base class's
+        implementation.  This allows the TLS protocol object to have direct
+        access to the transport, necessary to actually implement TLS.
+    """
+    def __init__(self, base, connection):
+        self._base = base
+        self._connection = connection
+
+
+    def __getattr__(self, name):
+        """
+        Forward any extra attribute access to the original transport object.
+        For example, this exposes C{getHost}, the behavior of which does not
+        change after TLS is enabled.
+        """
+        return getattr(self._connection, name)
+
+
+    def write(self, data):
+        """
+        Write some bytes directly to the connection.
+        """
+        return self._base.write(self._connection, data)
+
+
+    def writeSequence(self, iovec):
+        """
+        Write a some bytes directly to the connection.
+        """
+        return self._base.writeSequence(self._connection, iovec)
+
+
+    def loseConnection(self, *args, **kwargs):
+        """
+        Close the underlying connection.
+        """
+        return self._base.loseConnection(self._connection, *args, **kwargs)
+
+
+    def registerProducer(self, producer, streaming):
+        """
+        Register a producer with the underlying connection.
+        """
+        return self._base.registerProducer(self._connection, producer, streaming)
+
+
+    def unregisterProducer(self):
+        """
+        Unregister a producer with the underlying connection.
+        """
+        return self._base.unregisterProducer(self._connection)
+
+
+
+def startTLS(transport, contextFactory, normal, bypass):
+    """
+    Add a layer of SSL to a transport.
+
+    @param transport: The transport which will be modified.  This can either by
+        a L{FileDescriptor<twisted.internet.abstract.FileDescriptor>} or a
+        L{FileHandle<twisted.internet.iocpreactor.abstract.FileHandle>}.  The
+        actual requirements of this instance are that it have:
+
+          - a C{_tlsClientDefault} attribute indicating whether the transport is
+            a client (C{True}) or a server (C{False})
+          - a settable C{TLS} attribute which can be used to mark the fact
+            that SSL has been started
+          - settable C{getHandle} and C{getPeerCertificate} attributes so
+            these L{ISSLTransport} methods can be added to it
+          - a C{protocol} attribute referring to the L{IProtocol} currently
+            connected to the transport, which can also be set to a new
+            L{IProtocol} for the transport to deliver data to
+
+    @param contextFactory: An SSL context factory defining SSL parameters for
+        the new SSL layer.
+    @type contextFactory: L{twisted.internet.ssl.ContextFactory}
+
+    @param normal: A flag indicating whether SSL will go in the same direction
+        as the underlying transport goes.  That is, if the SSL client will be
+        the underlying client and the SSL server will be the underlying server.
+        C{True} means it is the same, C{False} means they are switched.
+    @type param: L{bool}
+
+    @param bypass: A transport base class to call methods on to bypass the new
+        SSL layer (so that the SSL layer itself can send its bytes).
+    @type bypass: L{type}
+    """
+    # Figure out which direction the SSL goes in.  If normal is True,
+    # we'll go in the direction indicated by the subclass.  Otherwise,
+    # we'll go the other way (client = not normal ^ _tlsClientDefault,
+    # in other words).
+    if normal:
+        client = transport._tlsClientDefault
+    else:
+        client = not transport._tlsClientDefault
+
+    # If we have a producer, unregister it, and then re-register it below once
+    # we've switched to TLS mode, so it gets hooked up correctly:
+    producer, streaming = None, None
+    if transport.producer is not None:
+        producer, streaming = transport.producer, transport.streamingProducer
+        transport.unregisterProducer()
+
+    tlsFactory = TLSMemoryBIOFactory(contextFactory, client, None)
+    tlsProtocol = TLSMemoryBIOProtocol(tlsFactory, transport.protocol, False)
+    transport.protocol = tlsProtocol
+
+    transport.getHandle = tlsProtocol.getHandle
+    transport.getPeerCertificate = tlsProtocol.getPeerCertificate
+
+    # Mark the transport as secure.
+    directlyProvides(transport, ISSLTransport)
+
+    # Remember we did this so that write and writeSequence can send the
+    # data to the right place.
+    transport.TLS = True
+
+    # Hook it up
+    transport.protocol.makeConnection(_BypassTLS(bypass, transport))
+
+    # Restore producer if necessary:
+    if producer:
+        transport.registerProducer(producer, streaming)
+
+
+
+ at implementer(ITLSTransport)
+class ConnectionMixin(object):
+    """
+    A mixin for L{twisted.internet.abstract.FileDescriptor} which adds an
+    L{ITLSTransport} implementation.
+
+    @ivar TLS: A flag indicating whether TLS is currently in use on this
+        transport.  This is not a good way for applications to check for TLS,
+        instead use L{ISSLTransport.providedBy}.
+    """
+
+    TLS = False
+
+    def startTLS(self, ctx, normal=True):
+        """
+        @see: L{ITLSTransport.startTLS}
+        """
+        startTLS(self, ctx, normal, FileDescriptor)
+
+
+    def write(self, bytes):
+        """
+        Write some bytes to this connection, passing them through a TLS layer if
+        necessary, or discarding them if the connection has already been lost.
+        """
+        if self.TLS:
+            if self.connected:
+                self.protocol.write(bytes)
+        else:
+            FileDescriptor.write(self, bytes)
+
+
+    def writeSequence(self, iovec):
+        """
+        Write some bytes to this connection, scatter/gather-style, passing them
+        through a TLS layer if necessary, or discarding them if the connection
+        has already been lost.
+        """
+        if self.TLS:
+            if self.connected:
+                self.protocol.writeSequence(iovec)
+        else:
+            FileDescriptor.writeSequence(self, iovec)
+
+
+    def loseConnection(self):
+        """
+        Close this connection after writing all pending data.
+
+        If TLS has been negotiated, perform a TLS shutdown.
+        """
+        if self.TLS:
+            if self.connected and not self.disconnecting:
+                self.protocol.loseConnection()
+        else:
+            FileDescriptor.loseConnection(self)
+
+
+    def registerProducer(self, producer, streaming):
+        """
+        Register a producer.
+
+        If TLS is enabled, the TLS connection handles this.
+        """
+        if self.TLS:
+            # Registering a producer before we're connected shouldn't be a
+            # problem. If we end up with a write(), that's already handled in
+            # the write() code above, and there are no other potential
+            # side-effects.
+            self.protocol.registerProducer(producer, streaming)
+        else:
+            FileDescriptor.registerProducer(self, producer, streaming)
+
+
+    def unregisterProducer(self):
+        """
+        Unregister a producer.
+
+        If TLS is enabled, the TLS connection handles this.
+        """
+        if self.TLS:
+            self.protocol.unregisterProducer()
+        else:
+            FileDescriptor.unregisterProducer(self)
+
+
+
+class ClientMixin(object):
+    """
+    A mixin for L{twisted.internet.tcp.Client} which just marks it as a client
+    for the purposes of the default TLS handshake.
+
+    @ivar _tlsClientDefault: Always C{True}, indicating that this is a client
+        connection, and by default when TLS is negotiated this class will act as
+        a TLS client.
+    """
+    _tlsClientDefault = True
+
+
+
+class ServerMixin(object):
+    """
+    A mixin for L{twisted.internet.tcp.Server} which just marks it as a server
+    for the purposes of the default TLS handshake.
+
+    @ivar _tlsClientDefault: Always C{False}, indicating that this is a server
+        connection, and by default when TLS is negotiated this class will act as
+        a TLS server.
+    """
+    _tlsClientDefault = False
diff --git a/ThirdParty/Twisted/twisted/internet/_oldtls.py b/ThirdParty/Twisted/twisted/internet/_oldtls.py
new file mode 100644
index 0000000..e0d2cad
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/_oldtls.py
@@ -0,0 +1,381 @@
+# -*- test-case-name: twisted.test.test_ssl -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module implements OpenSSL socket BIO based TLS support.  It is only used if
+memory BIO APIs are not available, which is when the version of pyOpenSSL
+installed is older than 0.10 (when L{twisted.protocols.tls} is not importable).
+This implementation is undesirable because of the complexity of working with
+OpenSSL's non-blocking socket-based APIs (which this module probably does about
+99% correctly, but see #4455 for an example of a problem with it).
+
+Support for older versions of pyOpenSSL is now deprecated and will be removed
+(see #5014).
+
+ at see: L{twisted.internet._newtls}
+ at since: 11.1
+"""
+
+import os, warnings
+
+from twisted.python.runtime import platformType
+if platformType == 'win32':
+    from errno import WSAEINTR as EINTR
+    from errno import WSAEWOULDBLOCK as EWOULDBLOCK
+    from errno import WSAENOBUFS as ENOBUFS
+else:
+    from errno import EINTR
+    from errno import EWOULDBLOCK
+    from errno import ENOBUFS
+
+from OpenSSL import SSL, __version__ as _sslversion
+
+from zope.interface import implements
+
+from twisted.python import log
+from twisted.internet.interfaces import ITLSTransport, ISSLTransport
+from twisted.internet.abstract import FileDescriptor
+from twisted.internet.main import CONNECTION_DONE, CONNECTION_LOST
+from twisted.internet._ssl import _TLSDelayed
+
+warnings.warn(
+    "Support for pyOpenSSL %s is deprecated.  "
+    "Upgrade to pyOpenSSL 0.10 or newer." % (_sslversion,),
+    category=DeprecationWarning,
+    stacklevel=100)
+
+class _TLSMixin:
+    _socketShutdownMethod = 'sock_shutdown'
+
+    writeBlockedOnRead = 0
+    readBlockedOnWrite = 0
+    _userWantRead = _userWantWrite = True
+
+    def getPeerCertificate(self):
+        return self.socket.get_peer_certificate()
+
+    def doRead(self):
+        if self.disconnected:
+            # See the comment in the similar check in doWrite below.
+            # Additionally, in order for anything other than returning
+            # CONNECTION_DONE here to make sense, it will probably be necessary
+            # to implement a way to switch back to TCP from TLS (actually, if
+            # we did something other than return CONNECTION_DONE, that would be
+            # a big part of implementing that feature).  In other words, the
+            # expectation is that doRead will be called when self.disconnected
+            # is True only when the connection has been lost.  It's possible
+            # that the other end could stop speaking TLS and then send us some
+            # non-TLS data.  We'll end up ignoring that data and dropping the
+            # connection.  There's no unit tests for this check in the cases
+            # where it makes a difference.  The test suite only hits this
+            # codepath when it would have otherwise hit the SSL.ZeroReturnError
+            # exception handler below, which has exactly the same behavior as
+            # this conditional.  Maybe that's the only case that can ever be
+            # triggered, I'm not sure.  -exarkun
+            return CONNECTION_DONE
+        if self.writeBlockedOnRead:
+            self.writeBlockedOnRead = 0
+            self._resetReadWrite()
+        try:
+            return self._base.doRead(self)
+        except SSL.ZeroReturnError:
+            return CONNECTION_DONE
+        except SSL.WantReadError:
+            return
+        except SSL.WantWriteError:
+            self.readBlockedOnWrite = 1
+            self._base.startWriting(self)
+            self._base.stopReading(self)
+            return
+        except SSL.SysCallError, (retval, desc):
+            if ((retval == -1 and desc == 'Unexpected EOF')
+                or retval > 0):
+                return CONNECTION_LOST
+            log.err()
+            return CONNECTION_LOST
+        except SSL.Error, e:
+            return e
+
+    def doWrite(self):
+        # Retry disconnecting
+        if self.disconnected:
+            # This case is triggered when "disconnected" is set to True by a
+            # call to _postLoseConnection from FileDescriptor.doWrite (to which
+            # we upcall at the end of this overridden version of that API).  It
+            # means that while, as far as any protocol connected to this
+            # transport is concerned, the connection no longer exists, the
+            # connection *does* actually still exist.  Instead of closing the
+            # connection in the overridden _postLoseConnection, we probably
+            # tried (and failed) to send a TLS close alert.  The TCP connection
+            # is still up and we're waiting for the socket to become writeable
+            # enough for the TLS close alert to actually be sendable.  Only
+            # then will the connection actually be torn down. -exarkun
+            return self._postLoseConnection()
+        if self._writeDisconnected:
+            return self._closeWriteConnection()
+
+        if self.readBlockedOnWrite:
+            self.readBlockedOnWrite = 0
+            self._resetReadWrite()
+        return self._base.doWrite(self)
+
+    def writeSomeData(self, data):
+        try:
+            return self._base.writeSomeData(self, data)
+        except SSL.WantWriteError:
+            return 0
+        except SSL.WantReadError:
+            self.writeBlockedOnRead = 1
+            self._base.stopWriting(self)
+            self._base.startReading(self)
+            return 0
+        except SSL.ZeroReturnError:
+            return CONNECTION_LOST
+        except SSL.SysCallError, e:
+            if e[0] == -1 and data == "":
+                # errors when writing empty strings are expected
+                # and can be ignored
+                return 0
+            else:
+                return CONNECTION_LOST
+        except SSL.Error, e:
+            return e
+
+
+    def _postLoseConnection(self):
+        """
+        Gets called after loseConnection(), after buffered data is sent.
+
+        We try to send an SSL shutdown alert, but if it doesn't work, retry
+        when the socket is writable.
+        """
+        # Here, set "disconnected" to True to trick higher levels into thinking
+        # the connection is really gone.  It's not, and we're not going to
+        # close it yet.  Instead, we'll try to send a TLS close alert to shut
+        # down the TLS connection cleanly.  Only after we actually get the
+        # close alert into the socket will we disconnect the underlying TCP
+        # connection.
+        self.disconnected = True
+        if hasattr(self.socket, 'set_shutdown'):
+            # If possible, mark the state of the TLS connection as having
+            # already received a TLS close alert from the peer.  Why do
+            # this???
+            self.socket.set_shutdown(SSL.RECEIVED_SHUTDOWN)
+        return self._sendCloseAlert()
+
+
+    def _sendCloseAlert(self):
+        # Okay, *THIS* is a bit complicated.
+
+        # Basically, the issue is, OpenSSL seems to not actually return
+        # errors from SSL_shutdown. Therefore, the only way to
+        # determine if the close notification has been sent is by
+        # SSL_shutdown returning "done". However, it will not claim it's
+        # done until it's both sent *and* received a shutdown notification.
+
+        # I don't actually want to wait for a received shutdown
+        # notification, though, so, I have to set RECEIVED_SHUTDOWN
+        # before calling shutdown. Then, it'll return True once it's
+        # *SENT* the shutdown.
+
+        # However, RECEIVED_SHUTDOWN can't be left set, because then
+        # reads will fail, breaking half close.
+
+        # Also, since shutdown doesn't report errors, an empty write call is
+        # done first, to try to detect if the connection has gone away.
+        # (*NOT* an SSL_write call, because that fails once you've called
+        # shutdown)
+        try:
+            os.write(self.socket.fileno(), '')
+        except OSError, se:
+            if se.args[0] in (EINTR, EWOULDBLOCK, ENOBUFS):
+                return 0
+            # Write error, socket gone
+            return CONNECTION_LOST
+
+        try:
+            if hasattr(self.socket, 'set_shutdown'):
+                laststate = self.socket.get_shutdown()
+                self.socket.set_shutdown(laststate | SSL.RECEIVED_SHUTDOWN)
+                done = self.socket.shutdown()
+                if not (laststate & SSL.RECEIVED_SHUTDOWN):
+                    self.socket.set_shutdown(SSL.SENT_SHUTDOWN)
+            else:
+                #warnings.warn("SSL connection shutdown possibly unreliable, "
+                #              "please upgrade to ver 0.XX", category=UserWarning)
+                self.socket.shutdown()
+                done = True
+        except SSL.Error, e:
+            return e
+
+        if done:
+            self.stopWriting()
+            # Note that this is tested for by identity below.
+            return CONNECTION_DONE
+        else:
+            # For some reason, the close alert wasn't sent.  Start writing
+            # again so that we'll get another chance to send it.
+            self.startWriting()
+            # On Linux, select will sometimes not report a closed file
+            # descriptor in the write set (in particular, it seems that if a
+            # send() fails with EPIPE, the socket will not appear in the write
+            # set).  The shutdown call above (which calls down to SSL_shutdown)
+            # may have swallowed a write error.  Therefore, also start reading
+            # so that if the socket is closed we will notice.  This doesn't
+            # seem to be a problem for poll (because poll reports errors
+            # separately) or with select on BSD (presumably because, unlike
+            # Linux, it doesn't implement select in terms of poll and then map
+            # POLLHUP to select's in fd_set).
+            self.startReading()
+            return None
+
+    def _closeWriteConnection(self):
+        result = self._sendCloseAlert()
+
+        if result is CONNECTION_DONE:
+            return self._base._closeWriteConnection(self)
+
+        return result
+
+    def startReading(self):
+        self._userWantRead = True
+        if not self.readBlockedOnWrite:
+            return self._base.startReading(self)
+
+
+    def stopReading(self):
+        self._userWantRead = False
+        # If we've disconnected, preventing stopReading() from happening
+        # because we are blocked on a read is silly; the read will never
+        # happen.
+        if self.disconnected or not self.writeBlockedOnRead:
+            return self._base.stopReading(self)
+
+
+    def startWriting(self):
+        self._userWantWrite = True
+        if not self.writeBlockedOnRead:
+            return self._base.startWriting(self)
+
+
+    def stopWriting(self):
+        self._userWantWrite = False
+        # If we've disconnected, preventing stopWriting() from happening
+        # because we are blocked on a write is silly; the write will never
+        # happen.
+        if self.disconnected or not self.readBlockedOnWrite:
+            return self._base.stopWriting(self)
+
+
+    def _resetReadWrite(self):
+        # After changing readBlockedOnWrite or writeBlockedOnRead,
+        # call this to reset the state to what the user requested.
+        if self._userWantWrite:
+            self.startWriting()
+        else:
+            self.stopWriting()
+
+        if self._userWantRead:
+            self.startReading()
+        else:
+            self.stopReading()
+
+
+
+def _getTLSClass(klass, _existing={}):
+    if klass not in _existing:
+        class TLSConnection(_TLSMixin, klass):
+            implements(ISSLTransport)
+            _base = klass
+        _existing[klass] = TLSConnection
+    return _existing[klass]
+
+
+class ConnectionMixin(object):
+    """
+    Mixin for L{twisted.internet.tcp.Connection} to help implement
+    L{ITLSTransport} using pyOpenSSL to do crypto and I/O.
+    """
+    TLS = 0
+
+    _tlsWaiting = None
+    def startTLS(self, ctx, extra=True):
+        assert not self.TLS
+        if self.dataBuffer or self._tempDataBuffer:
+            # pre-TLS bytes are still being written.  Starting TLS now
+            # will do the wrong thing.  Instead, mark that we're trying
+            # to go into the TLS state.
+            self._tlsWaiting = _TLSDelayed([], ctx, extra)
+            return False
+
+        self.stopReading()
+        self.stopWriting()
+        self._startTLS()
+        self.socket = SSL.Connection(ctx.getContext(), self.socket)
+        self.fileno = self.socket.fileno
+        self.startReading()
+        return True
+
+
+    def _startTLS(self):
+        self.TLS = 1
+        self.__class__ = _getTLSClass(self.__class__)
+
+
+    def write(self, bytes):
+        if self._tlsWaiting is not None:
+            self._tlsWaiting.bufferedData.append(bytes)
+        else:
+            FileDescriptor.write(self, bytes)
+
+
+    def writeSequence(self, iovec):
+        if self._tlsWaiting is not None:
+            self._tlsWaiting.bufferedData.extend(iovec)
+        else:
+            FileDescriptor.writeSequence(self, iovec)
+
+
+    def doWrite(self):
+        result = FileDescriptor.doWrite(self)
+        if self._tlsWaiting is not None:
+            if not self.dataBuffer and not self._tempDataBuffer:
+                waiting = self._tlsWaiting
+                self._tlsWaiting = None
+                self.startTLS(waiting.context, waiting.extra)
+                self.writeSequence(waiting.bufferedData)
+        return result
+
+
+
+class ClientMixin(object):
+    """
+    Mixin for L{twisted.internet.tcp.Client} to implement the client part of
+    L{ITLSTransport}.
+    """
+    implements(ITLSTransport)
+
+    def startTLS(self, ctx, client=1):
+        if self._base.startTLS(self, ctx, client):
+            if client:
+                self.socket.set_connect_state()
+            else:
+                self.socket.set_accept_state()
+
+
+
+class ServerMixin(object):
+    """
+    Mixin for L{twisted.internet.tcp.Client} to implement the server part of
+    L{ITLSTransport}.
+    """
+    implements(ITLSTransport)
+
+    def startTLS(self, ctx, server=1):
+        if self._base.startTLS(self, ctx, server):
+            if server:
+                self.socket.set_accept_state()
+            else:
+                self.socket.set_connect_state()
+
diff --git a/ThirdParty/Twisted/twisted/internet/_pollingfile.py b/ThirdParty/Twisted/twisted/internet/_pollingfile.py
new file mode 100644
index 0000000..5d00ace
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/_pollingfile.py
@@ -0,0 +1,300 @@
+# -*- test-case-name: twisted.internet.test.test_pollingfile -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Implements a simple polling interface for file descriptors that don't work with
+select() - this is pretty much only useful on Windows.
+"""
+
+from zope.interface import implements
+
+from twisted.internet.interfaces import IConsumer, IPushProducer
+
+
+MIN_TIMEOUT = 0.000000001
+MAX_TIMEOUT = 0.1
+
+
+
+class _PollableResource:
+    active = True
+
+    def activate(self):
+        self.active = True
+
+
+    def deactivate(self):
+        self.active = False
+
+
+
+class _PollingTimer:
+    # Everything is private here because it is really an implementation detail.
+
+    def __init__(self, reactor):
+        self.reactor = reactor
+        self._resources = []
+        self._pollTimer = None
+        self._currentTimeout = MAX_TIMEOUT
+        self._paused = False
+
+    def _addPollableResource(self, res):
+        self._resources.append(res)
+        self._checkPollingState()
+
+    def _checkPollingState(self):
+        for resource in self._resources:
+            if resource.active:
+                self._startPolling()
+                break
+        else:
+            self._stopPolling()
+
+    def _startPolling(self):
+        if self._pollTimer is None:
+            self._pollTimer = self._reschedule()
+
+    def _stopPolling(self):
+        if self._pollTimer is not None:
+            self._pollTimer.cancel()
+            self._pollTimer = None
+
+    def _pause(self):
+        self._paused = True
+
+    def _unpause(self):
+        self._paused = False
+        self._checkPollingState()
+
+    def _reschedule(self):
+        if not self._paused:
+            return self.reactor.callLater(self._currentTimeout, self._pollEvent)
+
+    def _pollEvent(self):
+        workUnits = 0.
+        anyActive = []
+        for resource in self._resources:
+            if resource.active:
+                workUnits += resource.checkWork()
+                # Check AFTER work has been done
+                if resource.active:
+                    anyActive.append(resource)
+
+        newTimeout = self._currentTimeout
+        if workUnits:
+            newTimeout = self._currentTimeout / (workUnits + 1.)
+            if newTimeout < MIN_TIMEOUT:
+                newTimeout = MIN_TIMEOUT
+        else:
+            newTimeout = self._currentTimeout * 2.
+            if newTimeout > MAX_TIMEOUT:
+                newTimeout = MAX_TIMEOUT
+        self._currentTimeout = newTimeout
+        if anyActive:
+            self._pollTimer = self._reschedule()
+
+
+# If we ever (let's hope not) need the above functionality on UNIX, this could
+# be factored into a different module.
+
+import win32pipe
+import win32file
+import win32api
+import pywintypes
+
+class _PollableReadPipe(_PollableResource):
+
+    implements(IPushProducer)
+
+    def __init__(self, pipe, receivedCallback, lostCallback):
+        # security attributes for pipes
+        self.pipe = pipe
+        self.receivedCallback = receivedCallback
+        self.lostCallback = lostCallback
+
+    def checkWork(self):
+        finished = 0
+        fullDataRead = []
+
+        while 1:
+            try:
+                buffer, bytesToRead, result = win32pipe.PeekNamedPipe(self.pipe, 1)
+                # finished = (result == -1)
+                if not bytesToRead:
+                    break
+                hr, data = win32file.ReadFile(self.pipe, bytesToRead, None)
+                fullDataRead.append(data)
+            except win32api.error:
+                finished = 1
+                break
+
+        dataBuf = ''.join(fullDataRead)
+        if dataBuf:
+            self.receivedCallback(dataBuf)
+        if finished:
+            self.cleanup()
+        return len(dataBuf)
+
+    def cleanup(self):
+        self.deactivate()
+        self.lostCallback()
+
+    def close(self):
+        try:
+            win32api.CloseHandle(self.pipe)
+        except pywintypes.error:
+            # You can't close std handles...?
+            pass
+
+    def stopProducing(self):
+        self.close()
+
+    def pauseProducing(self):
+        self.deactivate()
+
+    def resumeProducing(self):
+        self.activate()
+
+
+FULL_BUFFER_SIZE = 64 * 1024
+
+class _PollableWritePipe(_PollableResource):
+
+    implements(IConsumer)
+
+    def __init__(self, writePipe, lostCallback):
+        self.disconnecting = False
+        self.producer = None
+        self.producerPaused = 0
+        self.streamingProducer = 0
+        self.outQueue = []
+        self.writePipe = writePipe
+        self.lostCallback = lostCallback
+        try:
+            win32pipe.SetNamedPipeHandleState(writePipe,
+                                              win32pipe.PIPE_NOWAIT,
+                                              None,
+                                              None)
+        except pywintypes.error:
+            # Maybe it's an invalid handle.  Who knows.
+            pass
+
+    def close(self):
+        self.disconnecting = True
+
+    def bufferFull(self):
+        if self.producer is not None:
+            self.producerPaused = 1
+            self.producer.pauseProducing()
+
+    def bufferEmpty(self):
+        if self.producer is not None and ((not self.streamingProducer) or
+                                          self.producerPaused):
+            self.producer.producerPaused = 0
+            self.producer.resumeProducing()
+            return True
+        return False
+
+    # almost-but-not-quite-exact copy-paste from abstract.FileDescriptor... ugh
+
+    def registerProducer(self, producer, streaming):
+        """Register to receive data from a producer.
+
+        This sets this selectable to be a consumer for a producer.  When this
+        selectable runs out of data on a write() call, it will ask the producer
+        to resumeProducing(). A producer should implement the IProducer
+        interface.
+
+        FileDescriptor provides some infrastructure for producer methods.
+        """
+        if self.producer is not None:
+            raise RuntimeError(
+                "Cannot register producer %s, because producer %s was never "
+                "unregistered." % (producer, self.producer))
+        if not self.active:
+            producer.stopProducing()
+        else:
+            self.producer = producer
+            self.streamingProducer = streaming
+            if not streaming:
+                producer.resumeProducing()
+
+    def unregisterProducer(self):
+        """Stop consuming data from a producer, without disconnecting.
+        """
+        self.producer = None
+
+    def writeConnectionLost(self):
+        self.deactivate()
+        try:
+            win32api.CloseHandle(self.writePipe)
+        except pywintypes.error:
+            # OMG what
+            pass
+        self.lostCallback()
+
+
+    def writeSequence(self, seq):
+        """
+        Append a C{list} or C{tuple} of bytes to the output buffer.
+
+        @param seq: C{list} or C{tuple} of C{str} instances to be appended to
+            the output buffer.
+
+        @raise TypeError: If C{seq} contains C{unicode}.
+        """
+        if unicode in map(type, seq):
+            raise TypeError("Unicode not allowed in output buffer.")
+        self.outQueue.extend(seq)
+
+
+    def write(self, data):
+        """
+        Append some bytes to the output buffer.
+
+        @param data: C{str} to be appended to the output buffer.
+        @type data: C{str}.
+
+        @raise TypeError: If C{data} is C{unicode} instead of C{str}.
+        """
+        if isinstance(data, unicode):
+            raise TypeError("Unicode not allowed in output buffer.")
+        if self.disconnecting:
+            return
+        self.outQueue.append(data)
+        if sum(map(len, self.outQueue)) > FULL_BUFFER_SIZE:
+            self.bufferFull()
+
+
+    def checkWork(self):
+        numBytesWritten = 0
+        if not self.outQueue:
+            if self.disconnecting:
+                self.writeConnectionLost()
+                return 0
+            try:
+                win32file.WriteFile(self.writePipe, '', None)
+            except pywintypes.error:
+                self.writeConnectionLost()
+                return numBytesWritten
+        while self.outQueue:
+            data = self.outQueue.pop(0)
+            errCode = 0
+            try:
+                errCode, nBytesWritten = win32file.WriteFile(self.writePipe,
+                                                             data, None)
+            except win32api.error:
+                self.writeConnectionLost()
+                break
+            else:
+                # assert not errCode, "wtf an error code???"
+                numBytesWritten += nBytesWritten
+                if len(data) > nBytesWritten:
+                    self.outQueue.insert(0, data[nBytesWritten:])
+                    break
+        else:
+            resumed = self.bufferEmpty()
+            if not resumed and self.disconnecting:
+                self.writeConnectionLost()
+        return numBytesWritten
diff --git a/ThirdParty/Twisted/twisted/internet/_posixserialport.py b/ThirdParty/Twisted/twisted/internet/_posixserialport.py
new file mode 100644
index 0000000..cc165a3
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/_posixserialport.py
@@ -0,0 +1,74 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Serial Port Protocol
+"""
+
+# system imports
+import os, errno
+
+# dependent on pyserial ( http://pyserial.sf.net/ )
+# only tested w/ 1.18 (5 Dec 2002)
+import serial
+from serial import PARITY_NONE, PARITY_EVEN, PARITY_ODD
+from serial import STOPBITS_ONE, STOPBITS_TWO
+from serial import FIVEBITS, SIXBITS, SEVENBITS, EIGHTBITS
+
+from serialport import BaseSerialPort
+
+# twisted imports
+from twisted.internet import abstract, fdesc, main
+
+class SerialPort(BaseSerialPort, abstract.FileDescriptor):
+    """
+    A select()able serial device, acting as a transport.
+    """
+
+    connected = 1
+
+    def __init__(self, protocol, deviceNameOrPortNumber, reactor,
+        baudrate = 9600, bytesize = EIGHTBITS, parity = PARITY_NONE,
+        stopbits = STOPBITS_ONE, timeout = 0, xonxoff = 0, rtscts = 0):
+        abstract.FileDescriptor.__init__(self, reactor)
+        self._serial = self._serialFactory(
+            deviceNameOrPortNumber, baudrate=baudrate, bytesize=bytesize,
+            parity=parity, stopbits=stopbits, timeout=timeout,
+            xonxoff=xonxoff, rtscts=rtscts)
+        self.reactor = reactor
+        self.flushInput()
+        self.flushOutput()
+        self.protocol = protocol
+        self.protocol.makeConnection(self)
+        self.startReading()
+
+
+    def fileno(self):
+        return self._serial.fd
+
+
+    def writeSomeData(self, data):
+        """
+        Write some data to the serial device.
+        """
+        return fdesc.writeToFD(self.fileno(), data)
+
+
+    def doRead(self):
+        """
+        Some data's readable from serial device.
+        """
+        return fdesc.readFromFD(self.fileno(), self.protocol.dataReceived)
+
+
+    def connectionLost(self, reason):
+        """
+        Called when the serial port disconnects.
+
+        Will call C{connectionLost} on the protocol that is handling the
+        serial data.
+        """
+        abstract.FileDescriptor.connectionLost(self, reason)
+        self._serial.close()
+        self.protocol.connectionLost(reason)
diff --git a/ThirdParty/Twisted/twisted/internet/_posixstdio.py b/ThirdParty/Twisted/twisted/internet/_posixstdio.py
new file mode 100644
index 0000000..11b3205
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/_posixstdio.py
@@ -0,0 +1,175 @@
+# -*- test-case-name: twisted.test.test_stdio -*-
+
+"""Standard input/out/err support.
+
+Future Plans::
+
+    support for stderr, perhaps
+    Rewrite to use the reactor instead of an ad-hoc mechanism for connecting
+        protocols to transport.
+
+Maintainer: James Y Knight
+"""
+
+import warnings
+from zope.interface import implements
+
+from twisted.internet import process, error, interfaces
+from twisted.python import log, failure
+
+
+class PipeAddress(object):
+    implements(interfaces.IAddress)
+
+
+class StandardIO(object):
+    implements(interfaces.ITransport, interfaces.IProducer,
+               interfaces.IConsumer, interfaces.IHalfCloseableDescriptor)
+
+    _reader = None
+    _writer = None
+    disconnected = False
+    disconnecting = False
+
+    def __init__(self, proto, stdin=0, stdout=1, reactor=None):
+        if reactor is None:
+            from twisted.internet import reactor
+        self.protocol = proto
+
+        self._writer = process.ProcessWriter(reactor, self, 'write', stdout)
+        self._reader = process.ProcessReader(reactor, self, 'read', stdin)
+        self._reader.startReading()
+        self.protocol.makeConnection(self)
+
+    # ITransport
+
+    # XXX Actually, see #3597.
+    def loseWriteConnection(self):
+        if self._writer is not None:
+            self._writer.loseConnection()
+
+    def write(self, data):
+        if self._writer is not None:
+            self._writer.write(data)
+
+    def writeSequence(self, data):
+        if self._writer is not None:
+            self._writer.writeSequence(data)
+
+    def loseConnection(self):
+        self.disconnecting = True
+
+        if self._writer is not None:
+            self._writer.loseConnection()
+        if self._reader is not None:
+            # Don't loseConnection, because we don't want to SIGPIPE it.
+            self._reader.stopReading()
+
+    def getPeer(self):
+        return PipeAddress()
+
+    def getHost(self):
+        return PipeAddress()
+
+
+    # Callbacks from process.ProcessReader/ProcessWriter
+    def childDataReceived(self, fd, data):
+        self.protocol.dataReceived(data)
+
+    def childConnectionLost(self, fd, reason):
+        if self.disconnected:
+            return
+
+        if reason.value.__class__ == error.ConnectionDone:
+            # Normal close
+            if fd == 'read':
+                self._readConnectionLost(reason)
+            else:
+                self._writeConnectionLost(reason)
+        else:
+            self.connectionLost(reason)
+
+    def connectionLost(self, reason):
+        self.disconnected = True
+
+        # Make sure to cleanup the other half
+        _reader = self._reader
+        _writer = self._writer
+        protocol = self.protocol
+        self._reader = self._writer = None
+        self.protocol = None
+
+        if _writer is not None and not _writer.disconnected:
+            _writer.connectionLost(reason)
+
+        if _reader is not None and not _reader.disconnected:
+            _reader.connectionLost(reason)
+
+        try:
+            protocol.connectionLost(reason)
+        except:
+            log.err()
+
+    def _writeConnectionLost(self, reason):
+        self._writer=None
+        if self.disconnecting:
+            self.connectionLost(reason)
+            return
+
+        p = interfaces.IHalfCloseableProtocol(self.protocol, None)
+        if p:
+            try:
+                p.writeConnectionLost()
+            except:
+                log.err()
+                self.connectionLost(failure.Failure())
+
+    def _readConnectionLost(self, reason):
+        self._reader=None
+        p = interfaces.IHalfCloseableProtocol(self.protocol, None)
+        if p:
+            try:
+                p.readConnectionLost()
+            except:
+                log.err()
+                self.connectionLost(failure.Failure())
+        else:
+            self.connectionLost(reason)
+
+    # IConsumer
+    def registerProducer(self, producer, streaming):
+        if self._writer is None:
+            producer.stopProducing()
+        else:
+            self._writer.registerProducer(producer, streaming)
+
+    def unregisterProducer(self):
+        if self._writer is not None:
+            self._writer.unregisterProducer()
+
+    # IProducer
+    def stopProducing(self):
+        self.loseConnection()
+
+    def pauseProducing(self):
+        if self._reader is not None:
+            self._reader.pauseProducing()
+
+    def resumeProducing(self):
+        if self._reader is not None:
+            self._reader.resumeProducing()
+
+    # Stupid compatibility:
+    def closeStdin(self):
+        """Compatibility only, don't use. Same as loseWriteConnection."""
+        warnings.warn("This function is deprecated, use loseWriteConnection instead.",
+                      category=DeprecationWarning, stacklevel=2)
+        self.loseWriteConnection()
+
+    def stopReading(self):
+        """Compatibility only, don't use. Call pauseProducing."""
+        self.pauseProducing()
+
+    def startReading(self):
+        """Compatibility only, don't use. Call resumeProducing."""
+        self.resumeProducing()
diff --git a/ThirdParty/Twisted/twisted/internet/_signals.py b/ThirdParty/Twisted/twisted/internet/_signals.py
new file mode 100644
index 0000000..4335727
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/_signals.py
@@ -0,0 +1,68 @@
+# -*- test-case-name: twisted.internet.test.test_sigchld -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module is used to integrate child process termination into a
+reactor event loop.  This is a challenging feature to provide because
+most platforms indicate process termination via SIGCHLD and do not
+provide a way to wait for that signal and arbitrary I/O events at the
+same time.  The naive implementation involves installing a Python
+SIGCHLD handler; unfortunately this leads to other syscalls being
+interrupted (whenever SIGCHLD is received) and failing with EINTR
+(which almost no one is prepared to handle).  This interruption can be
+disabled via siginterrupt(2) (or one of the equivalent mechanisms);
+however, if the SIGCHLD is delivered by the platform to a non-main
+thread (not a common occurrence, but difficult to prove impossible),
+the main thread (waiting on select() or another event notification
+API) may not wake up leading to an arbitrary delay before the child
+termination is noticed.
+
+The basic solution to all these issues involves enabling SA_RESTART
+(ie, disabling system call interruption) and registering a C signal
+handler which writes a byte to a pipe.  The other end of the pipe is
+registered with the event loop, allowing it to wake up shortly after
+SIGCHLD is received.  See L{twisted.internet.posixbase._SIGCHLDWaker}
+for the implementation of the event loop side of this solution.  The
+use of a pipe this way is known as the U{self-pipe
+trick<http://cr.yp.to/docs/selfpipe.html>}.
+
+From Python version 2.6, C{signal.siginterrupt} and C{signal.set_wakeup_fd}
+provide the necessary C signal handler which writes to the pipe to be
+registered with C{SA_RESTART}.
+"""
+
+from __future__ import division, absolute_import
+
+import signal
+
+
+def installHandler(fd):
+    """
+    Install a signal handler which will write a byte to C{fd} when
+    I{SIGCHLD} is received.
+
+    This is implemented by installing a SIGCHLD handler that does nothing,
+    setting the I{SIGCHLD} handler as not allowed to interrupt system calls,
+    and using L{signal.set_wakeup_fd} to do the actual writing.
+
+    @param fd: The file descriptor to which to write when I{SIGCHLD} is
+        received.
+    @type fd: C{int}
+    """
+    if fd == -1:
+        signal.signal(signal.SIGCHLD, signal.SIG_DFL)
+    else:
+        def noopSignalHandler(*args):
+            pass
+        signal.signal(signal.SIGCHLD, noopSignalHandler)
+        signal.siginterrupt(signal.SIGCHLD, False)
+    return signal.set_wakeup_fd(fd)
+
+
+
+def isDefaultHandler():
+    """
+    Determine whether the I{SIGCHLD} handler is the default or not.
+    """
+    return signal.getsignal(signal.SIGCHLD) == signal.SIG_DFL
diff --git a/ThirdParty/Twisted/twisted/internet/_ssl.py b/ThirdParty/Twisted/twisted/internet/_ssl.py
new file mode 100644
index 0000000..318ee35
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/_ssl.py
@@ -0,0 +1,32 @@
+# -*- test-case-name: twisted.test.test_ssl -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module implements helpers for switching to TLS on an existing transport.
+
+ at since: 11.1
+"""
+
+class _TLSDelayed(object):
+    """
+    State tracking record for TLS startup parameters.  Used to remember how
+    TLS should be started when starting it is delayed to wait for the output
+    buffer to be flushed.
+
+    @ivar bufferedData: A C{list} which contains all the data which was
+        written to the transport after an attempt to start TLS was made but
+        before the buffers outstanding at that time could be flushed and TLS
+        could really be started.  This is appended to by the transport's
+        write and writeSequence methods until it is possible to actually
+        start TLS, then it is written to the TLS-enabled transport.
+
+    @ivar context: An SSL context factory object to use to start TLS.
+
+    @ivar extra: An extra argument to pass to the transport's C{startTLS}
+        method.
+    """
+    def __init__(self, bufferedData, context, extra):
+        self.bufferedData = bufferedData
+        self.context = context
+        self.extra = extra
diff --git a/ThirdParty/Twisted/twisted/internet/_sslverify.py b/ThirdParty/Twisted/twisted/internet/_sslverify.py
new file mode 100644
index 0000000..e06c33b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/_sslverify.py
@@ -0,0 +1,786 @@
+# -*- test-case-name: twisted.test.test_sslverify -*-
+# Copyright (c) 2005 Divmod, Inc.
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from __future__ import division, absolute_import
+
+import itertools
+from hashlib import md5
+
+from OpenSSL import SSL, crypto
+
+from twisted.python.compat import nativeString, networkString
+from twisted.python import _reflectpy3 as reflect, _utilpy3 as util
+from twisted.internet.defer import Deferred
+from twisted.internet.error import VerifyError, CertificateError
+
+def _sessionCounter(counter=itertools.count()):
+    """
+    Private - shared between all OpenSSLCertificateOptions, counts up to
+    provide a unique session id for each context.
+    """
+    return next(counter)
+
+
+
+_x509names = {
+    'CN': 'commonName',
+    'commonName': 'commonName',
+
+    'O': 'organizationName',
+    'organizationName': 'organizationName',
+
+    'OU': 'organizationalUnitName',
+    'organizationalUnitName': 'organizationalUnitName',
+
+    'L': 'localityName',
+    'localityName': 'localityName',
+
+    'ST': 'stateOrProvinceName',
+    'stateOrProvinceName': 'stateOrProvinceName',
+
+    'C': 'countryName',
+    'countryName': 'countryName',
+
+    'emailAddress': 'emailAddress'}
+
+
+
+class DistinguishedName(dict):
+    """
+    Identify and describe an entity.
+
+    Distinguished names are used to provide a minimal amount of identifying
+    information about a certificate issuer or subject.  They are commonly
+    created with one or more of the following fields::
+
+        commonName (CN)
+        organizationName (O)
+        organizationalUnitName (OU)
+        localityName (L)
+        stateOrProvinceName (ST)
+        countryName (C)
+        emailAddress
+
+    A L{DistinguishedName} should be constructed using keyword arguments whose
+    keys can be any of the field names above (as a native string), and the
+    values are either Unicode text which is encodable to ASCII, or C{bytes}
+    limited to the ASCII subset. Any fields passed to the constructor will be
+    set as attributes, accessable using both their extended name and their
+    shortened acronym. The attribute values will be the ASCII-encoded
+    bytes. For example::
+
+        >>> dn = DistinguishedName(commonName=b'www.example.com',
+                                   C='US')
+        >>> dn.C
+        b'US'
+        >>> dn.countryName
+        b'US'
+        >>> hasattr(dn, "organizationName")
+        False
+
+    L{DistinguishedName} instances can also be used as dictionaries; the keys
+    are extended name of the fields::
+
+        >>> dn.keys()
+        ['countryName', 'commonName']
+        >>> dn['countryName']
+        b'US'
+
+    """
+    __slots__ = ()
+
+    def __init__(self, **kw):
+        for k, v in kw.items():
+            setattr(self, k, v)
+
+
+    def _copyFrom(self, x509name):
+        for name in _x509names:
+            value = getattr(x509name, name, None)
+            if value is not None:
+                setattr(self, name, value)
+
+
+    def _copyInto(self, x509name):
+        for k, v in self.items():
+            setattr(x509name, k, nativeString(v))
+
+
+    def __repr__(self):
+        return '<DN %s>' % (dict.__repr__(self)[1:-1])
+
+
+    def __getattr__(self, attr):
+        try:
+            return self[_x509names[attr]]
+        except KeyError:
+            raise AttributeError(attr)
+
+
+    def __setattr__(self, attr, value):
+        if attr not in _x509names:
+            raise AttributeError("%s is not a valid OpenSSL X509 name field" % (attr,))
+        realAttr = _x509names[attr]
+        if not isinstance(value, bytes):
+            value = value.encode("ascii")
+        self[realAttr] = value
+
+
+    def inspect(self):
+        """
+        Return a multi-line, human-readable representation of this DN.
+
+        @rtype: C{str}
+        """
+        l = []
+        lablen = 0
+        def uniqueValues(mapping):
+            return set(mapping.values())
+        for k in sorted(uniqueValues(_x509names)):
+            label = util.nameToLabel(k)
+            lablen = max(len(label), lablen)
+            v = getattr(self, k, None)
+            if v is not None:
+                l.append((label, nativeString(v)))
+        lablen += 2
+        for n, (label, attr) in enumerate(l):
+            l[n] = (label.rjust(lablen)+': '+ attr)
+        return '\n'.join(l)
+
+DN = DistinguishedName
+
+
+class CertBase:
+    def __init__(self, original):
+        self.original = original
+
+    def _copyName(self, suffix):
+        dn = DistinguishedName()
+        dn._copyFrom(getattr(self.original, 'get_'+suffix)())
+        return dn
+
+    def getSubject(self):
+        """
+        Retrieve the subject of this certificate.
+
+        @rtype: L{DistinguishedName}
+        @return: A copy of the subject of this certificate.
+        """
+        return self._copyName('subject')
+
+
+
+def _handleattrhelper(Class, transport, methodName):
+    """
+    (private) Helper for L{Certificate.peerFromTransport} and
+    L{Certificate.hostFromTransport} which checks for incompatible handle types
+    and null certificates and raises the appropriate exception or returns the
+    appropriate certificate object.
+    """
+    method = getattr(transport.getHandle(),
+                     "get_%s_certificate" % (methodName,), None)
+    if method is None:
+        raise CertificateError(
+            "non-TLS transport %r did not have %s certificate" % (transport, methodName))
+    cert = method()
+    if cert is None:
+        raise CertificateError(
+            "TLS transport %r did not have %s certificate" % (transport, methodName))
+    return Class(cert)
+
+
+class Certificate(CertBase):
+    """
+    An x509 certificate.
+    """
+    def __repr__(self):
+        return '<%s Subject=%s Issuer=%s>' % (self.__class__.__name__,
+                                              self.getSubject().commonName,
+                                              self.getIssuer().commonName)
+
+    def __eq__(self, other):
+        if isinstance(other, Certificate):
+            return self.dump() == other.dump()
+        return False
+
+
+    def __ne__(self, other):
+        return not self.__eq__(other)
+
+
+    def load(Class, requestData, format=crypto.FILETYPE_ASN1, args=()):
+        """
+        Load a certificate from an ASN.1- or PEM-format string.
+
+        @rtype: C{Class}
+        """
+        return Class(crypto.load_certificate(format, requestData), *args)
+    load = classmethod(load)
+    _load = load
+
+
+    def dumpPEM(self):
+        """
+        Dump this certificate to a PEM-format data string.
+
+        @rtype: C{str}
+        """
+        return self.dump(crypto.FILETYPE_PEM)
+
+
+    def loadPEM(Class, data):
+        """
+        Load a certificate from a PEM-format data string.
+
+        @rtype: C{Class}
+        """
+        return Class.load(data, crypto.FILETYPE_PEM)
+    loadPEM = classmethod(loadPEM)
+
+
+    def peerFromTransport(Class, transport):
+        """
+        Get the certificate for the remote end of the given transport.
+
+        @type: L{ISystemHandle}
+        @rtype: C{Class}
+
+        @raise: L{CertificateError}, if the given transport does not have a peer
+        certificate.
+        """
+        return _handleattrhelper(Class, transport, 'peer')
+    peerFromTransport = classmethod(peerFromTransport)
+
+
+    def hostFromTransport(Class, transport):
+        """
+        Get the certificate for the local end of the given transport.
+
+        @param transport: an L{ISystemHandle} provider; the transport we will
+
+        @rtype: C{Class}
+
+        @raise: L{CertificateError}, if the given transport does not have a host
+        certificate.
+        """
+        return _handleattrhelper(Class, transport, 'host')
+    hostFromTransport = classmethod(hostFromTransport)
+
+
+    def getPublicKey(self):
+        """
+        Get the public key for this certificate.
+
+        @rtype: L{PublicKey}
+        """
+        return PublicKey(self.original.get_pubkey())
+
+
+    def dump(self, format=crypto.FILETYPE_ASN1):
+        return crypto.dump_certificate(format, self.original)
+
+
+    def serialNumber(self):
+        """
+        Retrieve the serial number of this certificate.
+
+        @rtype: C{int}
+        """
+        return self.original.get_serial_number()
+
+
+    def digest(self, method='md5'):
+        """
+        Return a digest hash of this certificate using the specified hash
+        algorithm.
+
+        @param method: One of C{'md5'} or C{'sha'}.
+        @rtype: C{str}
+        """
+        return self.original.digest(method)
+
+
+    def _inspect(self):
+        return '\n'.join(['Certificate For Subject:',
+                          self.getSubject().inspect(),
+                          '\nIssuer:',
+                          self.getIssuer().inspect(),
+                          '\nSerial Number: %d' % self.serialNumber(),
+                          'Digest: %s' % nativeString(self.digest())])
+
+
+    def inspect(self):
+        """
+        Return a multi-line, human-readable representation of this
+        Certificate, including information about the subject, issuer, and
+        public key.
+        """
+        return '\n'.join((self._inspect(), self.getPublicKey().inspect()))
+
+
+    def getIssuer(self):
+        """
+        Retrieve the issuer of this certificate.
+
+        @rtype: L{DistinguishedName}
+        @return: A copy of the issuer of this certificate.
+        """
+        return self._copyName('issuer')
+
+
+    def options(self, *authorities):
+        raise NotImplementedError('Possible, but doubtful we need this yet')
+
+
+
+class CertificateRequest(CertBase):
+    """
+    An x509 certificate request.
+
+    Certificate requests are given to certificate authorities to be signed and
+    returned resulting in an actual certificate.
+    """
+    def load(Class, requestData, requestFormat=crypto.FILETYPE_ASN1):
+        req = crypto.load_certificate_request(requestFormat, requestData)
+        dn = DistinguishedName()
+        dn._copyFrom(req.get_subject())
+        if not req.verify(req.get_pubkey()):
+            raise VerifyError("Can't verify that request for %r is self-signed." % (dn,))
+        return Class(req)
+    load = classmethod(load)
+
+
+    def dump(self, format=crypto.FILETYPE_ASN1):
+        return crypto.dump_certificate_request(format, self.original)
+
+
+
+class PrivateCertificate(Certificate):
+    """
+    An x509 certificate and private key.
+    """
+    def __repr__(self):
+        return Certificate.__repr__(self) + ' with ' + repr(self.privateKey)
+
+
+    def _setPrivateKey(self, privateKey):
+        if not privateKey.matches(self.getPublicKey()):
+            raise VerifyError(
+                "Certificate public and private keys do not match.")
+        self.privateKey = privateKey
+        return self
+
+
+    def newCertificate(self, newCertData, format=crypto.FILETYPE_ASN1):
+        """
+        Create a new L{PrivateCertificate} from the given certificate data and
+        this instance's private key.
+        """
+        return self.load(newCertData, self.privateKey, format)
+
+
+    def load(Class, data, privateKey, format=crypto.FILETYPE_ASN1):
+        return Class._load(data, format)._setPrivateKey(privateKey)
+    load = classmethod(load)
+
+
+    def inspect(self):
+        return '\n'.join([Certificate._inspect(self),
+                          self.privateKey.inspect()])
+
+
+    def dumpPEM(self):
+        """
+        Dump both public and private parts of a private certificate to
+        PEM-format data.
+        """
+        return self.dump(crypto.FILETYPE_PEM) + self.privateKey.dump(crypto.FILETYPE_PEM)
+
+
+    def loadPEM(Class, data):
+        """
+        Load both private and public parts of a private certificate from a
+        chunk of PEM-format data.
+        """
+        return Class.load(data, KeyPair.load(data, crypto.FILETYPE_PEM),
+                          crypto.FILETYPE_PEM)
+    loadPEM = classmethod(loadPEM)
+
+
+    def fromCertificateAndKeyPair(Class, certificateInstance, privateKey):
+        privcert = Class(certificateInstance.original)
+        return privcert._setPrivateKey(privateKey)
+    fromCertificateAndKeyPair = classmethod(fromCertificateAndKeyPair)
+
+
+    def options(self, *authorities):
+        options = dict(privateKey=self.privateKey.original,
+                       certificate=self.original)
+        if authorities:
+            options.update(dict(verify=True,
+                                requireCertificate=True,
+                                caCerts=[auth.original for auth in authorities]))
+        return OpenSSLCertificateOptions(**options)
+
+
+    def certificateRequest(self, format=crypto.FILETYPE_ASN1,
+                           digestAlgorithm='md5'):
+        return self.privateKey.certificateRequest(
+            self.getSubject(),
+            format,
+            digestAlgorithm)
+
+
+    def signCertificateRequest(self,
+                               requestData,
+                               verifyDNCallback,
+                               serialNumber,
+                               requestFormat=crypto.FILETYPE_ASN1,
+                               certificateFormat=crypto.FILETYPE_ASN1):
+        issuer = self.getSubject()
+        return self.privateKey.signCertificateRequest(
+            issuer,
+            requestData,
+            verifyDNCallback,
+            serialNumber,
+            requestFormat,
+            certificateFormat)
+
+
+    def signRequestObject(self, certificateRequest, serialNumber,
+                          secondsToExpiry=60 * 60 * 24 * 365, # One year
+                          digestAlgorithm='md5'):
+        return self.privateKey.signRequestObject(self.getSubject(),
+                                                 certificateRequest,
+                                                 serialNumber,
+                                                 secondsToExpiry,
+                                                 digestAlgorithm)
+
+
+class PublicKey:
+    def __init__(self, osslpkey):
+        self.original = osslpkey
+        req1 = crypto.X509Req()
+        req1.set_pubkey(osslpkey)
+        self._emptyReq = crypto.dump_certificate_request(crypto.FILETYPE_ASN1, req1)
+
+
+    def matches(self, otherKey):
+        return self._emptyReq == otherKey._emptyReq
+
+
+    # XXX This could be a useful method, but sometimes it triggers a segfault,
+    # so we'll steer clear for now.
+#     def verifyCertificate(self, certificate):
+#         """
+#         returns None, or raises a VerifyError exception if the certificate
+#         could not be verified.
+#         """
+#         if not certificate.original.verify(self.original):
+#             raise VerifyError("We didn't sign that certificate.")
+
+    def __repr__(self):
+        return '<%s %s>' % (self.__class__.__name__, self.keyHash())
+
+
+    def keyHash(self):
+        """
+        MD5 hex digest of signature on an empty certificate request with this
+        key.
+        """
+        return md5(self._emptyReq).hexdigest()
+
+
+    def inspect(self):
+        return 'Public Key with Hash: %s' % (self.keyHash(),)
+
+
+
+class KeyPair(PublicKey):
+
+    def load(Class, data, format=crypto.FILETYPE_ASN1):
+        return Class(crypto.load_privatekey(format, data))
+    load = classmethod(load)
+
+
+    def dump(self, format=crypto.FILETYPE_ASN1):
+        return crypto.dump_privatekey(format, self.original)
+
+
+    def __getstate__(self):
+        return self.dump()
+
+
+    def __setstate__(self, state):
+        self.__init__(crypto.load_privatekey(crypto.FILETYPE_ASN1, state))
+
+
+    def inspect(self):
+        t = self.original.type()
+        if t == crypto.TYPE_RSA:
+            ts = 'RSA'
+        elif t == crypto.TYPE_DSA:
+            ts = 'DSA'
+        else:
+            ts = '(Unknown Type!)'
+        L = (self.original.bits(), ts, self.keyHash())
+        return '%s-bit %s Key Pair with Hash: %s' % L
+
+
+    def generate(Class, kind=crypto.TYPE_RSA, size=1024):
+        pkey = crypto.PKey()
+        pkey.generate_key(kind, size)
+        return Class(pkey)
+
+
+    def newCertificate(self, newCertData, format=crypto.FILETYPE_ASN1):
+        return PrivateCertificate.load(newCertData, self, format)
+    generate = classmethod(generate)
+
+
+    def requestObject(self, distinguishedName, digestAlgorithm='md5'):
+        req = crypto.X509Req()
+        req.set_pubkey(self.original)
+        distinguishedName._copyInto(req.get_subject())
+        req.sign(self.original, digestAlgorithm)
+        return CertificateRequest(req)
+
+
+    def certificateRequest(self, distinguishedName,
+                           format=crypto.FILETYPE_ASN1,
+                           digestAlgorithm='md5'):
+        """Create a certificate request signed with this key.
+
+        @return: a string, formatted according to the 'format' argument.
+        """
+        return self.requestObject(distinguishedName, digestAlgorithm).dump(format)
+
+
+    def signCertificateRequest(self,
+                               issuerDistinguishedName,
+                               requestData,
+                               verifyDNCallback,
+                               serialNumber,
+                               requestFormat=crypto.FILETYPE_ASN1,
+                               certificateFormat=crypto.FILETYPE_ASN1,
+                               secondsToExpiry=60 * 60 * 24 * 365, # One year
+                               digestAlgorithm='md5'):
+        """
+        Given a blob of certificate request data and a certificate authority's
+        DistinguishedName, return a blob of signed certificate data.
+
+        If verifyDNCallback returns a Deferred, I will return a Deferred which
+        fires the data when that Deferred has completed.
+        """
+        hlreq = CertificateRequest.load(requestData, requestFormat)
+
+        dn = hlreq.getSubject()
+        vval = verifyDNCallback(dn)
+
+        def verified(value):
+            if not value:
+                raise VerifyError("DN callback %r rejected request DN %r" % (verifyDNCallback, dn))
+            return self.signRequestObject(issuerDistinguishedName, hlreq,
+                                          serialNumber, secondsToExpiry, digestAlgorithm).dump(certificateFormat)
+
+        if isinstance(vval, Deferred):
+            return vval.addCallback(verified)
+        else:
+            return verified(vval)
+
+
+    def signRequestObject(self,
+                          issuerDistinguishedName,
+                          requestObject,
+                          serialNumber,
+                          secondsToExpiry=60 * 60 * 24 * 365, # One year
+                          digestAlgorithm='md5'):
+        """
+        Sign a CertificateRequest instance, returning a Certificate instance.
+        """
+        req = requestObject.original
+        dn = requestObject.getSubject()
+        cert = crypto.X509()
+        issuerDistinguishedName._copyInto(cert.get_issuer())
+        cert.set_subject(req.get_subject())
+        cert.set_pubkey(req.get_pubkey())
+        cert.gmtime_adj_notBefore(0)
+        cert.gmtime_adj_notAfter(secondsToExpiry)
+        cert.set_serial_number(serialNumber)
+        cert.sign(self.original, digestAlgorithm)
+        return Certificate(cert)
+
+
+    def selfSignedCert(self, serialNumber, **kw):
+        dn = DN(**kw)
+        return PrivateCertificate.fromCertificateAndKeyPair(
+            self.signRequestObject(dn, self.requestObject(dn), serialNumber),
+            self)
+
+
+
+class OpenSSLCertificateOptions(object):
+    """
+    A factory for SSL context objects for both SSL servers and clients.
+    """
+
+    _context = None
+    # Older versions of PyOpenSSL didn't provide OP_ALL.  Fudge it here, just in case.
+    _OP_ALL = getattr(SSL, 'OP_ALL', 0x0000FFFF)
+    # OP_NO_TICKET is not (yet) exposed by PyOpenSSL
+    _OP_NO_TICKET = 0x00004000
+
+    method = SSL.TLSv1_METHOD
+
+    def __init__(self,
+                 privateKey=None,
+                 certificate=None,
+                 method=None,
+                 verify=False,
+                 caCerts=None,
+                 verifyDepth=9,
+                 requireCertificate=True,
+                 verifyOnce=True,
+                 enableSingleUseKeys=True,
+                 enableSessions=True,
+                 fixBrokenPeers=False,
+                 enableSessionTickets=False):
+        """
+        Create an OpenSSL context SSL connection context factory.
+
+        @param privateKey: A PKey object holding the private key.
+
+        @param certificate: An X509 object holding the certificate.
+
+        @param method: The SSL protocol to use, one of SSLv23_METHOD,
+        SSLv2_METHOD, SSLv3_METHOD, TLSv1_METHOD.  Defaults to TLSv1_METHOD.
+
+        @param verify: If True, verify certificates received from the peer and
+        fail the handshake if verification fails.  Otherwise, allow anonymous
+        sessions and sessions with certificates which fail validation.  By
+        default this is False.
+
+        @param caCerts: List of certificate authority certificate objects to
+            use to verify the peer's certificate.  Only used if verify is
+            C{True}, and if verify is C{True}, this must be specified.  Since
+            verify is C{False} by default, this is C{None} by default.
+
+        @type caCerts: C{list} of L{OpenSSL.crypto.X509}
+
+        @param verifyDepth: Depth in certificate chain down to which to verify.
+        If unspecified, use the underlying default (9).
+
+        @param requireCertificate: If True, do not allow anonymous sessions.
+
+        @param verifyOnce: If True, do not re-verify the certificate
+        on session resumption.
+
+        @param enableSingleUseKeys: If True, generate a new key whenever
+        ephemeral DH parameters are used to prevent small subgroup attacks.
+
+        @param enableSessions: If True, set a session ID on each context.  This
+        allows a shortened handshake to be used when a known client reconnects.
+
+        @param fixBrokenPeers: If True, enable various non-spec protocol fixes
+        for broken SSL implementations.  This should be entirely safe,
+        according to the OpenSSL documentation, but YMMV.  This option is now
+        off by default, because it causes problems with connections between
+        peers using OpenSSL 0.9.8a.
+
+        @param enableSessionTickets: If True, enable session ticket extension
+        for session resumption per RFC 5077. Note there is no support for
+        controlling session tickets. This option is off by default, as some
+        server implementations don't correctly process incoming empty session
+        ticket extensions in the hello.
+        """
+
+        assert (privateKey is None) == (certificate is None), "Specify neither or both of privateKey and certificate"
+        self.privateKey = privateKey
+        self.certificate = certificate
+        if method is not None:
+            self.method = method
+
+        self.verify = verify
+        assert ((verify and caCerts) or
+                (not verify)), "Specify client CA certificate information if and only if enabling certificate verification"
+
+        self.caCerts = caCerts
+        self.verifyDepth = verifyDepth
+        self.requireCertificate = requireCertificate
+        self.verifyOnce = verifyOnce
+        self.enableSingleUseKeys = enableSingleUseKeys
+        self.enableSessions = enableSessions
+        self.fixBrokenPeers = fixBrokenPeers
+        self.enableSessionTickets = enableSessionTickets
+
+
+    def __getstate__(self):
+        d = self.__dict__.copy()
+        try:
+            del d['_context']
+        except KeyError:
+            pass
+        return d
+
+
+    def __setstate__(self, state):
+        self.__dict__ = state
+
+
+    def getContext(self):
+        """Return a SSL.Context object.
+        """
+        if self._context is None:
+            self._context = self._makeContext()
+        return self._context
+
+
+    def _makeContext(self):
+        ctx = SSL.Context(self.method)
+
+        if self.certificate is not None and self.privateKey is not None:
+            ctx.use_certificate(self.certificate)
+            ctx.use_privatekey(self.privateKey)
+            # Sanity check
+            ctx.check_privatekey()
+
+        verifyFlags = SSL.VERIFY_NONE
+        if self.verify:
+            verifyFlags = SSL.VERIFY_PEER
+            if self.requireCertificate:
+                verifyFlags |= SSL.VERIFY_FAIL_IF_NO_PEER_CERT
+            if self.verifyOnce:
+                verifyFlags |= SSL.VERIFY_CLIENT_ONCE
+            if self.caCerts:
+                store = ctx.get_cert_store()
+                for cert in self.caCerts:
+                    store.add_cert(cert)
+
+        # It'd be nice if pyOpenSSL let us pass None here for this behavior (as
+        # the underlying OpenSSL API call allows NULL to be passed).  It
+        # doesn't, so we'll supply a function which does the same thing.
+        def _verifyCallback(conn, cert, errno, depth, preverify_ok):
+            return preverify_ok
+        ctx.set_verify(verifyFlags, _verifyCallback)
+
+        if self.verifyDepth is not None:
+            ctx.set_verify_depth(self.verifyDepth)
+
+        if self.enableSingleUseKeys:
+            ctx.set_options(SSL.OP_SINGLE_DH_USE)
+
+        if self.fixBrokenPeers:
+            ctx.set_options(self._OP_ALL)
+
+        if self.enableSessions:
+            name = "%s-%d" % (reflect.qual(self.__class__), _sessionCounter())
+            sessionName = md5(networkString(name)).hexdigest()
+
+            ctx.set_session_id(sessionName)
+
+        if not self.enableSessionTickets:
+            ctx.set_options(self._OP_NO_TICKET)
+
+        return ctx
diff --git a/ThirdParty/Twisted/twisted/internet/_threadedselect.py b/ThirdParty/Twisted/twisted/internet/_threadedselect.py
new file mode 100644
index 0000000..8a1b722
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/_threadedselect.py
@@ -0,0 +1,361 @@
+# -*- test-case-name: twisted.test.test_internet -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Threaded select reactor
+
+The threadedselectreactor is a specialized reactor for integrating with
+arbitrary foreign event loop, such as those you find in GUI toolkits.
+
+There are three things you'll need to do to use this reactor.
+
+Install the reactor at the beginning of your program, before importing
+the rest of Twisted::
+
+    | from twisted.internet import _threadedselect
+    | _threadedselect.install()
+
+Interleave this reactor with your foreign event loop, at some point after
+your event loop is initialized::
+
+    | from twisted.internet import reactor
+    | reactor.interleave(foreignEventLoopWakerFunction)
+    | self.addSystemEventTrigger('after', 'shutdown', foreignEventLoopStop)
+
+Instead of shutting down the foreign event loop directly, shut down the
+reactor::
+
+    | from twisted.internet import reactor
+    | reactor.stop()
+
+In order for Twisted to do its work in the main thread (the thread that
+interleave is called from), a waker function is necessary.  The waker function
+will be called from a "background" thread with one argument: func.
+The waker function's purpose is to call func() from the main thread.
+Many GUI toolkits ship with appropriate waker functions.
+Some examples of this are wxPython's wx.callAfter (may be wxCallAfter in
+older versions of wxPython) or PyObjC's PyObjCTools.AppHelper.callAfter.
+These would be used in place of "foreignEventLoopWakerFunction" in the above
+example.
+
+The other integration point at which the foreign event loop and this reactor
+must integrate is shutdown.  In order to ensure clean shutdown of Twisted,
+you must allow for Twisted to come to a complete stop before quitting the
+application.  Typically, you will do this by setting up an after shutdown
+trigger to stop your foreign event loop, and call reactor.stop() where you
+would normally have initiated the shutdown procedure for the foreign event
+loop.  Shutdown functions that could be used in place of
+"foreignEventloopStop" would be the ExitMainLoop method of the wxApp instance
+with wxPython, or the PyObjCTools.AppHelper.stopEventLoop function.
+"""
+
+from threading import Thread
+from Queue import Queue, Empty
+from time import sleep
+import sys
+
+from zope.interface import implements
+
+from twisted.internet.interfaces import IReactorFDSet
+from twisted.internet import error
+from twisted.internet import posixbase
+from twisted.internet.posixbase import _NO_FILENO, _NO_FILEDESC
+from twisted.python import log, failure, threadable
+from twisted.persisted import styles
+from twisted.python.runtime import platformType
+
+import select
+from errno import EINTR, EBADF
+
+from twisted.internet.selectreactor import _select
+
+def dictRemove(dct, value):
+    try:
+        del dct[value]
+    except KeyError:
+        pass
+
+def raiseException(e):
+    raise e
+
+class ThreadedSelectReactor(posixbase.PosixReactorBase):
+    """A threaded select() based reactor - runs on all POSIX platforms and on
+    Win32.
+    """
+    implements(IReactorFDSet)
+
+    def __init__(self):
+        threadable.init(1)
+        self.reads = {}
+        self.writes = {}
+        self.toThreadQueue = Queue()
+        self.toMainThread = Queue()
+        self.workerThread = None
+        self.mainWaker = None
+        posixbase.PosixReactorBase.__init__(self)
+        self.addSystemEventTrigger('after', 'shutdown', self._mainLoopShutdown)
+
+    def wakeUp(self):
+        # we want to wake up from any thread
+        self.waker.wakeUp()
+
+    def callLater(self, *args, **kw):
+        tple = posixbase.PosixReactorBase.callLater(self, *args, **kw)
+        self.wakeUp()
+        return tple
+
+    def _sendToMain(self, msg, *args):
+        #print >>sys.stderr, 'sendToMain', msg, args
+        self.toMainThread.put((msg, args))
+        if self.mainWaker is not None:
+            self.mainWaker()
+
+    def _sendToThread(self, fn, *args):
+        #print >>sys.stderr, 'sendToThread', fn, args
+        self.toThreadQueue.put((fn, args))
+
+    def _preenDescriptorsInThread(self):
+        log.msg("Malformed file descriptor found.  Preening lists.")
+        readers = self.reads.keys()
+        writers = self.writes.keys()
+        self.reads.clear()
+        self.writes.clear()
+        for selDict, selList in ((self.reads, readers), (self.writes, writers)):
+            for selectable in selList:
+                try:
+                    select.select([selectable], [selectable], [selectable], 0)
+                except:
+                    log.msg("bad descriptor %s" % selectable)
+                else:
+                    selDict[selectable] = 1
+
+    def _workerInThread(self):
+        try:
+            while 1:
+                fn, args = self.toThreadQueue.get()
+                #print >>sys.stderr, "worker got", fn, args
+                fn(*args)
+        except SystemExit:
+            pass # exception indicates this thread should exit
+        except:
+            f = failure.Failure()
+            self._sendToMain('Failure', f)
+        #print >>sys.stderr, "worker finished"
+
+    def _doSelectInThread(self, timeout):
+        """Run one iteration of the I/O monitor loop.
+
+        This will run all selectables who had input or output readiness
+        waiting for them.
+        """
+        reads = self.reads
+        writes = self.writes
+        while 1:
+            try:
+                r, w, ignored = _select(reads.keys(),
+                                        writes.keys(),
+                                        [], timeout)
+                break
+            except ValueError, ve:
+                # Possibly a file descriptor has gone negative?
+                log.err()
+                self._preenDescriptorsInThread()
+            except TypeError, te:
+                # Something *totally* invalid (object w/o fileno, non-integral
+                # result) was passed
+                log.err()
+                self._preenDescriptorsInThread()
+            except (select.error, IOError), se:
+                # select(2) encountered an error
+                if se.args[0] in (0, 2):
+                    # windows does this if it got an empty list
+                    if (not reads) and (not writes):
+                        return
+                    else:
+                        raise
+                elif se.args[0] == EINTR:
+                    return
+                elif se.args[0] == EBADF:
+                    self._preenDescriptorsInThread()
+                else:
+                    # OK, I really don't know what's going on.  Blow up.
+                    raise
+        self._sendToMain('Notify', r, w)
+
+    def _process_Notify(self, r, w):
+        #print >>sys.stderr, "_process_Notify"
+        reads = self.reads
+        writes = self.writes
+
+        _drdw = self._doReadOrWrite
+        _logrun = log.callWithLogger
+        for selectables, method, dct in ((r, "doRead", reads), (w, "doWrite", writes)):
+            for selectable in selectables:
+                # if this was disconnected in another thread, kill it.
+                if selectable not in dct:
+                    continue
+                # This for pausing input when we're not ready for more.
+                _logrun(selectable, _drdw, selectable, method, dct)
+        #print >>sys.stderr, "done _process_Notify"
+
+    def _process_Failure(self, f):
+        f.raiseException()
+
+    _doIterationInThread = _doSelectInThread
+
+    def ensureWorkerThread(self):
+        if self.workerThread is None or not self.workerThread.isAlive():
+            self.workerThread = Thread(target=self._workerInThread)
+            self.workerThread.start()
+
+    def doThreadIteration(self, timeout):
+        self._sendToThread(self._doIterationInThread, timeout)
+        self.ensureWorkerThread()
+        #print >>sys.stderr, 'getting...'
+        msg, args = self.toMainThread.get()
+        #print >>sys.stderr, 'got', msg, args
+        getattr(self, '_process_' + msg)(*args)
+
+    doIteration = doThreadIteration
+
+    def _interleave(self):
+        while self.running:
+            #print >>sys.stderr, "runUntilCurrent"
+            self.runUntilCurrent()
+            t2 = self.timeout()
+            t = self.running and t2
+            self._sendToThread(self._doIterationInThread, t)
+            #print >>sys.stderr, "yielding"
+            yield None
+            #print >>sys.stderr, "fetching"
+            msg, args = self.toMainThread.get_nowait()
+            getattr(self, '_process_' + msg)(*args)
+
+    def interleave(self, waker, *args, **kw):
+        """
+        interleave(waker) interleaves this reactor with the
+        current application by moving the blocking parts of
+        the reactor (select() in this case) to a separate
+        thread.  This is typically useful for integration with
+        GUI applications which have their own event loop
+        already running.
+
+        See the module docstring for more information.
+        """
+        self.startRunning(*args, **kw)
+        loop = self._interleave()
+        def mainWaker(waker=waker, loop=loop):
+            #print >>sys.stderr, "mainWaker()"
+            waker(loop.next)
+        self.mainWaker = mainWaker
+        loop.next()
+        self.ensureWorkerThread()
+
+    def _mainLoopShutdown(self):
+        self.mainWaker = None
+        if self.workerThread is not None:
+            #print >>sys.stderr, 'getting...'
+            self._sendToThread(raiseException, SystemExit)
+            self.wakeUp()
+            try:
+                while 1:
+                    msg, args = self.toMainThread.get_nowait()
+                    #print >>sys.stderr, "ignored:", (msg, args)
+            except Empty:
+                pass
+            self.workerThread.join()
+            self.workerThread = None
+        try:
+            while 1:
+                fn, args = self.toThreadQueue.get_nowait()
+                if fn is self._doIterationInThread:
+                    log.msg('Iteration is still in the thread queue!')
+                elif fn is raiseException and args[0] is SystemExit:
+                    pass
+                else:
+                    fn(*args)
+        except Empty:
+            pass
+
+    def _doReadOrWrite(self, selectable, method, dict):
+        try:
+            why = getattr(selectable, method)()
+            handfn = getattr(selectable, 'fileno', None)
+            if not handfn:
+                why = _NO_FILENO
+            elif handfn() == -1:
+                why = _NO_FILEDESC
+        except:
+            why = sys.exc_info()[1]
+            log.err()
+        if why:
+            self._disconnectSelectable(selectable, why, method == "doRead")
+
+    def addReader(self, reader):
+        """Add a FileDescriptor for notification of data available to read.
+        """
+        self._sendToThread(self.reads.__setitem__, reader, 1)
+        self.wakeUp()
+
+    def addWriter(self, writer):
+        """Add a FileDescriptor for notification of data available to write.
+        """
+        self._sendToThread(self.writes.__setitem__, writer, 1)
+        self.wakeUp()
+
+    def removeReader(self, reader):
+        """Remove a Selectable for notification of data available to read.
+        """
+        self._sendToThread(dictRemove, self.reads, reader)
+
+    def removeWriter(self, writer):
+        """Remove a Selectable for notification of data available to write.
+        """
+        self._sendToThread(dictRemove, self.writes, writer)
+
+    def removeAll(self):
+        return self._removeAll(self.reads, self.writes)
+
+
+    def getReaders(self):
+        return self.reads.keys()
+
+
+    def getWriters(self):
+        return self.writes.keys()
+
+
+    def stop(self):
+        """
+        Extend the base stop implementation to also wake up the select thread so
+        that C{runUntilCurrent} notices the reactor should stop.
+        """
+        posixbase.PosixReactorBase.stop(self)
+        self.wakeUp()
+
+
+    def run(self, installSignalHandlers=1):
+        self.startRunning(installSignalHandlers=installSignalHandlers)
+        self.mainLoop()
+
+    def mainLoop(self):
+        q = Queue()
+        self.interleave(q.put)
+        while self.running:
+            try:
+                q.get()()
+            except StopIteration:
+                break
+
+
+
+def install():
+    """Configure the twisted mainloop to be run using the select() reactor.
+    """
+    reactor = ThreadedSelectReactor()
+    from twisted.internet.main import installReactor
+    installReactor(reactor)
+    return reactor
+
+__all__ = ['install']
diff --git a/ThirdParty/Twisted/twisted/internet/_utilspy3.py b/ThirdParty/Twisted/twisted/internet/_utilspy3.py
new file mode 100644
index 0000000..8559353
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/_utilspy3.py
@@ -0,0 +1,59 @@
+# -*- test-case-name: twisted.internet.test.test_utilspy3 -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Utility methods, ported to Python 3.
+"""
+
+from __future__ import division, absolute_import
+
+import sys, warnings
+from functools import wraps
+
+from twisted.python.compat import reraise
+from twisted.internet import defer
+
+def _resetWarningFilters(passthrough, addedFilters):
+    for f in addedFilters:
+        try:
+            warnings.filters.remove(f)
+        except ValueError:
+            pass
+    return passthrough
+
+
+def runWithWarningsSuppressed(suppressedWarnings, f, *a, **kw):
+    """Run the function C{f}, but with some warnings suppressed.
+
+    @param suppressedWarnings: A list of arguments to pass to filterwarnings.
+                               Must be a sequence of 2-tuples (args, kwargs).
+    @param f: A callable, followed by its arguments and keyword arguments
+    """
+    for args, kwargs in suppressedWarnings:
+        warnings.filterwarnings(*args, **kwargs)
+    addedFilters = warnings.filters[:len(suppressedWarnings)]
+    try:
+        result = f(*a, **kw)
+    except:
+        exc_info = sys.exc_info()
+        _resetWarningFilters(None, addedFilters)
+        reraise(exc_info[1], exc_info[2])
+    else:
+        if isinstance(result, defer.Deferred):
+            result.addBoth(_resetWarningFilters, addedFilters)
+        else:
+            _resetWarningFilters(None, addedFilters)
+        return result
+
+
+def suppressWarnings(f, *suppressedWarnings):
+    """
+    Wrap C{f} in a callable which suppresses the indicated warnings before
+    invoking C{f} and unsuppresses them afterwards.  If f returns a Deferred,
+    warnings will remain suppressed until the Deferred fires.
+    """
+    @wraps(f)
+    def warningSuppressingWrapper(*a, **kw):
+        return runWithWarningsSuppressed(suppressedWarnings, f, *a, **kw)
+    return warningSuppressingWrapper
diff --git a/ThirdParty/Twisted/twisted/internet/_win32serialport.py b/ThirdParty/Twisted/twisted/internet/_win32serialport.py
new file mode 100644
index 0000000..1a77236
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/_win32serialport.py
@@ -0,0 +1,126 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Serial port support for Windows.
+
+Requires PySerial and pywin32.
+"""
+
+# system imports
+import serial
+from serial import PARITY_NONE, PARITY_EVEN, PARITY_ODD
+from serial import STOPBITS_ONE, STOPBITS_TWO
+from serial import FIVEBITS, SIXBITS, SEVENBITS, EIGHTBITS
+import win32file, win32event
+
+# twisted imports
+from twisted.internet import abstract
+
+# sibling imports
+from serialport import BaseSerialPort
+
+
+class SerialPort(BaseSerialPort, abstract.FileDescriptor):
+    """A serial device, acting as a transport, that uses a win32 event."""
+
+    connected = 1
+
+    def __init__(self, protocol, deviceNameOrPortNumber, reactor, 
+        baudrate = 9600, bytesize = EIGHTBITS, parity = PARITY_NONE,
+        stopbits = STOPBITS_ONE, xonxoff = 0, rtscts = 0):
+        self._serial = self._serialFactory(
+            deviceNameOrPortNumber, baudrate=baudrate, bytesize=bytesize,
+            parity=parity, stopbits=stopbits, timeout=None,
+            xonxoff=xonxoff, rtscts=rtscts)
+        self.flushInput()
+        self.flushOutput()
+        self.reactor = reactor
+        self.protocol = protocol
+        self.outQueue = []
+        self.closed = 0
+        self.closedNotifies = 0
+        self.writeInProgress = 0
+
+        self.protocol = protocol
+        self._overlappedRead = win32file.OVERLAPPED()
+        self._overlappedRead.hEvent = win32event.CreateEvent(None, 1, 0, None)
+        self._overlappedWrite = win32file.OVERLAPPED()
+        self._overlappedWrite.hEvent = win32event.CreateEvent(None, 0, 0, None)
+
+        self.reactor.addEvent(self._overlappedRead.hEvent, self, 'serialReadEvent')
+        self.reactor.addEvent(self._overlappedWrite.hEvent, self, 'serialWriteEvent')
+
+        self.protocol.makeConnection(self)
+        self._finishPortSetup()
+
+
+    def _finishPortSetup(self):
+        """
+        Finish setting up the serial port.
+
+        This is a separate method to facilitate testing.
+        """
+        flags, comstat = win32file.ClearCommError(self._serial.hComPort)
+        rc, self.read_buf = win32file.ReadFile(self._serial.hComPort,
+                                               win32file.AllocateReadBuffer(1),
+                                               self._overlappedRead)
+
+
+    def serialReadEvent(self):
+        #get that character we set up
+        n = win32file.GetOverlappedResult(self._serial.hComPort, self._overlappedRead, 0)
+        if n:
+            first = str(self.read_buf[:n])
+            #now we should get everything that is already in the buffer
+            flags, comstat = win32file.ClearCommError(self._serial.hComPort)
+            if comstat.cbInQue:
+                win32event.ResetEvent(self._overlappedRead.hEvent)
+                rc, buf = win32file.ReadFile(self._serial.hComPort,
+                                             win32file.AllocateReadBuffer(comstat.cbInQue),
+                                             self._overlappedRead)
+                n = win32file.GetOverlappedResult(self._serial.hComPort, self._overlappedRead, 1)
+                #handle all the received data:
+                self.protocol.dataReceived(first + str(buf[:n]))
+            else:
+                #handle all the received data:
+                self.protocol.dataReceived(first)
+
+        #set up next one
+        win32event.ResetEvent(self._overlappedRead.hEvent)
+        rc, self.read_buf = win32file.ReadFile(self._serial.hComPort,
+                                               win32file.AllocateReadBuffer(1),
+                                               self._overlappedRead)
+
+
+    def write(self, data):
+        if data:
+            if self.writeInProgress:
+                self.outQueue.append(data)
+            else:
+                self.writeInProgress = 1
+                win32file.WriteFile(self._serial.hComPort, data, self._overlappedWrite)
+
+
+    def serialWriteEvent(self):
+        try:
+            dataToWrite = self.outQueue.pop(0)
+        except IndexError:
+            self.writeInProgress = 0
+            return
+        else:
+            win32file.WriteFile(self._serial.hComPort, dataToWrite, self._overlappedWrite)
+
+
+    def connectionLost(self, reason):
+        """
+        Called when the serial port disconnects.
+
+        Will call C{connectionLost} on the protocol that is handling the
+        serial data.
+        """
+        self.reactor.removeEvent(self._overlappedRead.hEvent)
+        self.reactor.removeEvent(self._overlappedWrite.hEvent)
+        abstract.FileDescriptor.connectionLost(self, reason)
+        self._serial.close()
+        self.protocol.connectionLost(reason)
diff --git a/ThirdParty/Twisted/twisted/internet/_win32stdio.py b/ThirdParty/Twisted/twisted/internet/_win32stdio.py
new file mode 100644
index 0000000..c4c5644
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/_win32stdio.py
@@ -0,0 +1,124 @@
+# -*- test-case-name: twisted.test.test_stdio -*-
+
+"""
+Windows-specific implementation of the L{twisted.internet.stdio} interface.
+"""
+
+import win32api
+import os, msvcrt
+
+from zope.interface import implements
+
+from twisted.internet.interfaces import IHalfCloseableProtocol, ITransport, IAddress
+from twisted.internet.interfaces import IConsumer, IPushProducer
+
+from twisted.internet import _pollingfile, main
+from twisted.python.failure import Failure
+
+
+class Win32PipeAddress(object):
+    implements(IAddress)
+
+
+
+class StandardIO(_pollingfile._PollingTimer):
+
+    implements(ITransport,
+               IConsumer,
+               IPushProducer)
+
+    disconnecting = False
+    disconnected = False
+
+    def __init__(self, proto):
+        """
+        Start talking to standard IO with the given protocol.
+
+        Also, put it stdin/stdout/stderr into binary mode.
+        """
+        from twisted.internet import reactor
+
+        for stdfd in range(0, 1, 2):
+            msvcrt.setmode(stdfd, os.O_BINARY)
+
+        _pollingfile._PollingTimer.__init__(self, reactor)
+        self.proto = proto
+
+        hstdin = win32api.GetStdHandle(win32api.STD_INPUT_HANDLE)
+        hstdout = win32api.GetStdHandle(win32api.STD_OUTPUT_HANDLE)
+
+        self.stdin = _pollingfile._PollableReadPipe(
+            hstdin, self.dataReceived, self.readConnectionLost)
+
+        self.stdout = _pollingfile._PollableWritePipe(
+            hstdout, self.writeConnectionLost)
+
+        self._addPollableResource(self.stdin)
+        self._addPollableResource(self.stdout)
+
+        self.proto.makeConnection(self)
+
+    def dataReceived(self, data):
+        self.proto.dataReceived(data)
+
+    def readConnectionLost(self):
+        if IHalfCloseableProtocol.providedBy(self.proto):
+            self.proto.readConnectionLost()
+        self.checkConnLost()
+
+    def writeConnectionLost(self):
+        if IHalfCloseableProtocol.providedBy(self.proto):
+            self.proto.writeConnectionLost()
+        self.checkConnLost()
+
+    connsLost = 0
+
+    def checkConnLost(self):
+        self.connsLost += 1
+        if self.connsLost >= 2:
+            self.disconnecting = True
+            self.disconnected = True
+            self.proto.connectionLost(Failure(main.CONNECTION_DONE))
+
+    # ITransport
+
+    def write(self, data):
+        self.stdout.write(data)
+
+    def writeSequence(self, seq):
+        self.stdout.write(''.join(seq))
+
+    def loseConnection(self):
+        self.disconnecting = True
+        self.stdin.close()
+        self.stdout.close()
+
+    def getPeer(self):
+        return Win32PipeAddress()
+
+    def getHost(self):
+        return Win32PipeAddress()
+
+    # IConsumer
+
+    def registerProducer(self, producer, streaming):
+        return self.stdout.registerProducer(producer, streaming)
+
+    def unregisterProducer(self):
+        return self.stdout.unregisterProducer()
+
+    # def write() above
+
+    # IProducer
+
+    def stopProducing(self):
+        self.stdin.stopProducing()
+
+    # IPushProducer
+
+    def pauseProducing(self):
+        self.stdin.pauseProducing()
+
+    def resumeProducing(self):
+        self.stdin.resumeProducing()
+
diff --git a/ThirdParty/Twisted/twisted/internet/abstract.py b/ThirdParty/Twisted/twisted/internet/abstract.py
new file mode 100644
index 0000000..c968937
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/abstract.py
@@ -0,0 +1,530 @@
+# -*- test-case-name: twisted.test.test_abstract -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Support for generic select()able objects.
+"""
+
+from __future__ import division, absolute_import
+
+from socket import AF_INET6, inet_pton, error
+
+from zope.interface import implementer
+
+# Twisted Imports
+from twisted.python.compat import _PY3, unicode, lazyByteSlice
+from twisted.python import _reflectpy3 as reflect, failure
+from twisted.internet import interfaces, main
+
+if _PY3:
+    def _concatenate(bObj, offset, bArray):
+        # Python 3 lacks the buffer() builtin and the other primitives don't
+        # help in this case.  Just do the copy.  Perhaps later these buffers can
+        # be joined and FileDescriptor can use writev().  Or perhaps bytearrays
+        # would help.
+        return bObj[offset:] + b"".join(bArray)
+else:
+    def _concatenate(bObj, offset, bArray):
+        # Avoid one extra string copy by using a buffer to limit what we include
+        # in the result.
+        return buffer(bObj, offset) + b"".join(bArray)
+
+
+class _ConsumerMixin(object):
+    """
+    L{IConsumer} implementations can mix this in to get C{registerProducer} and
+    C{unregisterProducer} methods which take care of keeping track of a
+    producer's state.
+
+    Subclasses must provide three attributes which L{_ConsumerMixin} will read
+    but not write:
+
+      - connected: A C{bool} which is C{True} as long as the the consumer has
+        someplace to send bytes (for example, a TCP connection), and then
+        C{False} when it no longer does.
+
+      - disconnecting: A C{bool} which is C{False} until something like
+        L{ITransport.loseConnection} is called, indicating that the send buffer
+        should be flushed and the connection lost afterwards.  Afterwards,
+        C{True}.
+
+      - disconnected: A C{bool} which is C{False} until the consumer no longer
+        has a place to send bytes, then C{True}.
+
+    Subclasses must also override the C{startWriting} method.
+
+    @ivar producer: C{None} if no producer is registered, otherwise the
+        registered producer.
+
+    @ivar producerPaused: A flag indicating whether the producer is currently
+        paused.
+    @type producerPaused: C{bool} or C{int}
+
+    @ivar streamingProducer: A flag indicating whether the producer was
+        registered as a streaming (ie push) producer or not (ie a pull
+        producer).  This will determine whether the consumer may ever need to
+        pause and resume it, or if it can merely call C{resumeProducing} on it
+        when buffer space is available.
+    @ivar streamingProducer: C{bool} or C{int}
+
+    """
+    producer = None
+    producerPaused = False
+    streamingProducer = False
+
+    def startWriting(self):
+        """
+        Override in a subclass to cause the reactor to monitor this selectable
+        for write events.  This will be called once in C{unregisterProducer} if
+        C{loseConnection} has previously been called, so that the connection can
+        actually close.
+        """
+        raise NotImplementedError("%r did not implement startWriting")
+
+
+    def registerProducer(self, producer, streaming):
+        """
+        Register to receive data from a producer.
+
+        This sets this selectable to be a consumer for a producer.  When this
+        selectable runs out of data on a write() call, it will ask the producer
+        to resumeProducing(). When the FileDescriptor's internal data buffer is
+        filled, it will ask the producer to pauseProducing(). If the connection
+        is lost, FileDescriptor calls producer's stopProducing() method.
+
+        If streaming is true, the producer should provide the IPushProducer
+        interface. Otherwise, it is assumed that producer provides the
+        IPullProducer interface. In this case, the producer won't be asked to
+        pauseProducing(), but it has to be careful to write() data only when its
+        resumeProducing() method is called.
+        """
+        if self.producer is not None:
+            raise RuntimeError(
+                "Cannot register producer %s, because producer %s was never "
+                "unregistered." % (producer, self.producer))
+        if self.disconnected:
+            producer.stopProducing()
+        else:
+            self.producer = producer
+            self.streamingProducer = streaming
+            if not streaming:
+                producer.resumeProducing()
+
+
+    def unregisterProducer(self):
+        """
+        Stop consuming data from a producer, without disconnecting.
+        """
+        self.producer = None
+        if self.connected and self.disconnecting:
+            self.startWriting()
+
+
+
+ at implementer(interfaces.ILoggingContext)
+class _LogOwner(object):
+    """
+    Mixin to help implement L{interfaces.ILoggingContext} for transports which
+    have a protocol, the log prefix of which should also appear in the
+    transport's log prefix.
+    """
+
+    def _getLogPrefix(self, applicationObject):
+        """
+        Determine the log prefix to use for messages related to
+        C{applicationObject}, which may or may not be an
+        L{interfaces.ILoggingContext} provider.
+
+        @return: A C{str} giving the log prefix to use.
+        """
+        if interfaces.ILoggingContext.providedBy(applicationObject):
+            return applicationObject.logPrefix()
+        return applicationObject.__class__.__name__
+
+
+    def logPrefix(self):
+        """
+        Override this method to insert custom logging behavior.  Its
+        return value will be inserted in front of every line.  It may
+        be called more times than the number of output lines.
+        """
+        return "-"
+
+
+
+ at implementer(
+    interfaces.IPushProducer, interfaces.IReadWriteDescriptor,
+    interfaces.IConsumer, interfaces.ITransport,
+    interfaces.IHalfCloseableDescriptor)
+class FileDescriptor(_ConsumerMixin, _LogOwner):
+    """
+    An object which can be operated on by select().
+
+    This is an abstract superclass of all objects which may be notified when
+    they are readable or writable; e.g. they have a file-descriptor that is
+    valid to be passed to select(2).
+    """
+    connected = 0
+    disconnected = 0
+    disconnecting = 0
+    _writeDisconnecting = False
+    _writeDisconnected = False
+    dataBuffer = b""
+    offset = 0
+
+    SEND_LIMIT = 128*1024
+
+    def __init__(self, reactor=None):
+        if not reactor:
+            from twisted.internet import reactor
+        self.reactor = reactor
+        self._tempDataBuffer = [] # will be added to dataBuffer in doWrite
+        self._tempDataLen = 0
+
+
+    def connectionLost(self, reason):
+        """The connection was lost.
+
+        This is called when the connection on a selectable object has been
+        lost.  It will be called whether the connection was closed explicitly,
+        an exception occurred in an event handler, or the other end of the
+        connection closed it first.
+
+        Clean up state here, but make sure to call back up to FileDescriptor.
+        """
+        self.disconnected = 1
+        self.connected = 0
+        if self.producer is not None:
+            self.producer.stopProducing()
+            self.producer = None
+        self.stopReading()
+        self.stopWriting()
+
+
+    def writeSomeData(self, data):
+        """
+        Write as much as possible of the given data, immediately.
+
+        This is called to invoke the lower-level writing functionality, such
+        as a socket's send() method, or a file's write(); this method
+        returns an integer or an exception.  If an integer, it is the number
+        of bytes written (possibly zero); if an exception, it indicates the
+        connection was lost.
+        """
+        raise NotImplementedError("%s does not implement writeSomeData" %
+                                  reflect.qual(self.__class__))
+
+
+    def doRead(self):
+        """
+        Called when data is available for reading.
+
+        Subclasses must override this method. The result will be interpreted
+        in the same way as a result of doWrite().
+        """
+        raise NotImplementedError("%s does not implement doRead" %
+                                  reflect.qual(self.__class__))
+
+    def doWrite(self):
+        """
+        Called when data can be written.
+
+        @return: C{None} on success, an exception or a negative integer on
+            failure.
+
+        @see: L{twisted.internet.interfaces.IWriteDescriptor.doWrite}.
+        """
+        if len(self.dataBuffer) - self.offset < self.SEND_LIMIT:
+            # If there is currently less than SEND_LIMIT bytes left to send
+            # in the string, extend it with the array data.
+            self.dataBuffer = _concatenate(
+                self.dataBuffer, self.offset, self._tempDataBuffer)
+            self.offset = 0
+            self._tempDataBuffer = []
+            self._tempDataLen = 0
+
+        # Send as much data as you can.
+        if self.offset:
+            l = self.writeSomeData(lazyByteSlice(self.dataBuffer, self.offset))
+        else:
+            l = self.writeSomeData(self.dataBuffer)
+
+        # There is no writeSomeData implementation in Twisted which returns
+        # < 0, but the documentation for writeSomeData used to claim negative
+        # integers meant connection lost.  Keep supporting this here,
+        # although it may be worth deprecating and removing at some point.
+        if isinstance(l, Exception) or l < 0:
+            return l
+        self.offset += l
+        # If there is nothing left to send,
+        if self.offset == len(self.dataBuffer) and not self._tempDataLen:
+            self.dataBuffer = b""
+            self.offset = 0
+            # stop writing.
+            self.stopWriting()
+            # If I've got a producer who is supposed to supply me with data,
+            if self.producer is not None and ((not self.streamingProducer)
+                                              or self.producerPaused):
+                # tell them to supply some more.
+                self.producerPaused = 0
+                self.producer.resumeProducing()
+            elif self.disconnecting:
+                # But if I was previously asked to let the connection die, do
+                # so.
+                return self._postLoseConnection()
+            elif self._writeDisconnecting:
+                # I was previously asked to half-close the connection.  We
+                # set _writeDisconnected before calling handler, in case the
+                # handler calls loseConnection(), which will want to check for
+                # this attribute.
+                self._writeDisconnected = True
+                result = self._closeWriteConnection()
+                return result
+        return None
+
+    def _postLoseConnection(self):
+        """Called after a loseConnection(), when all data has been written.
+
+        Whatever this returns is then returned by doWrite.
+        """
+        # default implementation, telling reactor we're finished
+        return main.CONNECTION_DONE
+
+    def _closeWriteConnection(self):
+        # override in subclasses
+        pass
+
+    def writeConnectionLost(self, reason):
+        # in current code should never be called
+        self.connectionLost(reason)
+
+    def readConnectionLost(self, reason):
+        # override in subclasses
+        self.connectionLost(reason)
+
+
+    def _isSendBufferFull(self):
+        """
+        Determine whether the user-space send buffer for this transport is full
+        or not.
+
+        When the buffer contains more than C{self.bufferSize} bytes, it is
+        considered full.  This might be improved by considering the size of the
+        kernel send buffer and how much of it is free.
+
+        @return: C{True} if it is full, C{False} otherwise.
+        """
+        return len(self.dataBuffer) + self._tempDataLen > self.bufferSize
+
+
+    def _maybePauseProducer(self):
+        """
+        Possibly pause a producer, if there is one and the send buffer is full.
+        """
+        # If we are responsible for pausing our producer,
+        if self.producer is not None and self.streamingProducer:
+            # and our buffer is full,
+            if self._isSendBufferFull():
+                # pause it.
+                self.producerPaused = 1
+                self.producer.pauseProducing()
+
+
+    def write(self, data):
+        """Reliably write some data.
+
+        The data is buffered until the underlying file descriptor is ready
+        for writing. If there is more than C{self.bufferSize} data in the
+        buffer and this descriptor has a registered streaming producer, its
+        C{pauseProducing()} method will be called.
+        """
+        if isinstance(data, unicode): # no, really, I mean it
+            raise TypeError("Data must not be unicode")
+        if not self.connected or self._writeDisconnected:
+            return
+        if data:
+            self._tempDataBuffer.append(data)
+            self._tempDataLen += len(data)
+            self._maybePauseProducer()
+            self.startWriting()
+
+
+    def writeSequence(self, iovec):
+        """
+        Reliably write a sequence of data.
+
+        Currently, this is a convenience method roughly equivalent to::
+
+            for chunk in iovec:
+                fd.write(chunk)
+
+        It may have a more efficient implementation at a later time or in a
+        different reactor.
+
+        As with the C{write()} method, if a buffer size limit is reached and a
+        streaming producer is registered, it will be paused until the buffered
+        data is written to the underlying file descriptor.
+        """
+        for i in iovec:
+            if isinstance(i, unicode): # no, really, I mean it
+                raise TypeError("Data must not be unicode")
+        if not self.connected or not iovec or self._writeDisconnected:
+            return
+        self._tempDataBuffer.extend(iovec)
+        for i in iovec:
+            self._tempDataLen += len(i)
+        self._maybePauseProducer()
+        self.startWriting()
+
+
+    def loseConnection(self, _connDone=failure.Failure(main.CONNECTION_DONE)):
+        """Close the connection at the next available opportunity.
+
+        Call this to cause this FileDescriptor to lose its connection.  It will
+        first write any data that it has buffered.
+
+        If there is data buffered yet to be written, this method will cause the
+        transport to lose its connection as soon as it's done flushing its
+        write buffer.  If you have a producer registered, the connection won't
+        be closed until the producer is finished. Therefore, make sure you
+        unregister your producer when it's finished, or the connection will
+        never close.
+        """
+
+        if self.connected and not self.disconnecting:
+            if self._writeDisconnected:
+                # doWrite won't trigger the connection close anymore
+                self.stopReading()
+                self.stopWriting()
+                self.connectionLost(_connDone)
+            else:
+                self.stopReading()
+                self.startWriting()
+                self.disconnecting = 1
+
+    def loseWriteConnection(self):
+        self._writeDisconnecting = True
+        self.startWriting()
+
+    def stopReading(self):
+        """Stop waiting for read availability.
+
+        Call this to remove this selectable from being notified when it is
+        ready for reading.
+        """
+        self.reactor.removeReader(self)
+
+    def stopWriting(self):
+        """Stop waiting for write availability.
+
+        Call this to remove this selectable from being notified when it is ready
+        for writing.
+        """
+        self.reactor.removeWriter(self)
+
+    def startReading(self):
+        """Start waiting for read availability.
+        """
+        self.reactor.addReader(self)
+
+    def startWriting(self):
+        """Start waiting for write availability.
+
+        Call this to have this FileDescriptor be notified whenever it is ready for
+        writing.
+        """
+        self.reactor.addWriter(self)
+
+    # Producer/consumer implementation
+
+    # first, the consumer stuff.  This requires no additional work, as
+    # any object you can write to can be a consumer, really.
+
+    producer = None
+    bufferSize = 2**2**2**2
+
+    def stopConsuming(self):
+        """Stop consuming data.
+
+        This is called when a producer has lost its connection, to tell the
+        consumer to go lose its connection (and break potential circular
+        references).
+        """
+        self.unregisterProducer()
+        self.loseConnection()
+
+    # producer interface implementation
+
+    def resumeProducing(self):
+        assert self.connected and not self.disconnecting
+        self.startReading()
+
+    def pauseProducing(self):
+        self.stopReading()
+
+    def stopProducing(self):
+        self.loseConnection()
+
+
+    def fileno(self):
+        """File Descriptor number for select().
+
+        This method must be overridden or assigned in subclasses to
+        indicate a valid file descriptor for the operating system.
+        """
+        return -1
+
+
+def isIPAddress(addr):
+    """
+    Determine whether the given string represents an IPv4 address.
+
+    @type addr: C{str}
+    @param addr: A string which may or may not be the decimal dotted
+    representation of an IPv4 address.
+
+    @rtype: C{bool}
+    @return: C{True} if C{addr} represents an IPv4 address, C{False}
+    otherwise.
+    """
+    dottedParts = addr.split('.')
+    if len(dottedParts) == 4:
+        for octet in dottedParts:
+            try:
+                value = int(octet)
+            except ValueError:
+                return False
+            else:
+                if value < 0 or value > 255:
+                    return False
+        return True
+    return False
+
+
+def isIPv6Address(addr):
+    """
+    Determine whether the given string represents an IPv6 address.
+
+    @param addr: A string which may or may not be the hex
+        representation of an IPv6 address.
+    @type addr: C{str}
+
+    @return: C{True} if C{addr} represents an IPv6 address, C{False}
+        otherwise.
+    @rtype: C{bool}
+    """
+    if '%' in addr:
+        addr = addr.split('%', 1)[0]
+    if not addr:
+        return False
+    try:
+        # This might be a native implementation or the one from
+        # twisted.python.compat.
+        inet_pton(AF_INET6, addr)
+    except (ValueError, error):
+        return False
+    return True
+
+
+__all__ = ["FileDescriptor", "isIPAddress", "isIPv6Address"]
diff --git a/ThirdParty/Twisted/twisted/internet/address.py b/ThirdParty/Twisted/twisted/internet/address.py
new file mode 100644
index 0000000..6b34b99
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/address.py
@@ -0,0 +1,146 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Address objects for network connections.
+"""
+
+from __future__ import division, absolute_import
+
+import warnings, os
+
+from zope.interface import implementer
+
+from twisted.internet.interfaces import IAddress
+from twisted.python._utilpy3 import FancyEqMixin
+
+
+ at implementer(IAddress)
+class _IPAddress(FancyEqMixin, object):
+    """
+    An L{_IPAddress} represents the address of an IP socket endpoint, providing
+    common behavior for IPv4 and IPv6.
+
+    @ivar type: A string describing the type of transport, either 'TCP' or
+        'UDP'.
+
+    @ivar host: A string containing the presentation format of the IP address;
+        for example, "127.0.0.1" or "::1".
+    @type host: C{str}
+
+    @ivar port: An integer representing the port number.
+    @type port: C{int}
+    """
+
+    compareAttributes = ('type', 'host', 'port')
+
+    def __init__(self, type, host, port):
+        assert type in ('TCP', 'UDP')
+        self.type = type
+        self.host = host
+        self.port = port
+
+
+    def __repr__(self):
+        return '%s(%s, %r, %d)' % (
+            self.__class__.__name__, self.type, self.host, self.port)
+
+
+    def __hash__(self):
+        return hash((self.type, self.host, self.port))
+
+
+
+class IPv4Address(_IPAddress):
+    """
+    An L{IPv4Address} represents the address of an IPv4 socket endpoint.
+
+    @ivar host: A string containing a dotted-quad IPv4 address; for example,
+        "127.0.0.1".
+    @type host: C{str}
+    """
+
+    def __init__(self, type, host, port, _bwHack=None):
+        _IPAddress.__init__(self, type, host, port)
+        if _bwHack is not None:
+            warnings.warn("twisted.internet.address.IPv4Address._bwHack "
+                          "is deprecated since Twisted 11.0",
+                          DeprecationWarning, stacklevel=2)
+
+
+
+class IPv6Address(_IPAddress):
+    """
+    An L{IPv6Address} represents the address of an IPv6 socket endpoint.
+
+    @ivar host: A string containing a colon-separated, hexadecimal formatted
+        IPv6 address; for example, "::1".
+    @type host: C{str}
+    """
+
+
+
+ at implementer(IAddress)
+class UNIXAddress(FancyEqMixin, object):
+    """
+    Object representing a UNIX socket endpoint.
+
+    @ivar name: The filename associated with this socket.
+    @type name: C{str}
+    """
+
+    compareAttributes = ('name', )
+
+    def __init__(self, name, _bwHack = None):
+        self.name = name
+        if _bwHack is not None:
+            warnings.warn("twisted.internet.address.UNIXAddress._bwHack is deprecated since Twisted 11.0",
+                    DeprecationWarning, stacklevel=2)
+
+
+    if getattr(os.path, 'samefile', None) is not None:
+        def __eq__(self, other):
+            """
+            Overriding C{FancyEqMixin} to ensure the os level samefile
+            check is done if the name attributes do not match.
+            """
+            res = super(UNIXAddress, self).__eq__(other)
+            if not res and self.name and other.name:
+                try:
+                    return os.path.samefile(self.name, other.name)
+                except OSError:
+                    pass
+            return res
+
+
+    def __repr__(self):
+        return 'UNIXAddress(%r)' % (self.name,)
+
+
+    def __hash__(self):
+        if self.name is None:
+            return hash((self.__class__, None))
+        try:
+            s1 = os.stat(self.name)
+            return hash((s1.st_ino, s1.st_dev))
+        except OSError:
+            return hash(self.name)
+
+
+
+# These are for buildFactory backwards compatability due to
+# stupidity-induced inconsistency.
+
+class _ServerFactoryIPv4Address(IPv4Address):
+    """Backwards compatability hack. Just like IPv4Address in practice."""
+
+    def __eq__(self, other):
+        if isinstance(other, tuple):
+            warnings.warn("IPv4Address.__getitem__ is deprecated.  Use attributes instead.",
+                          category=DeprecationWarning, stacklevel=2)
+            return (self.host, self.port) == other
+        elif isinstance(other, IPv4Address):
+            a = (self.type, self.host, self.port)
+            b = (other.type, other.host, other.port)
+            return a == b
+        return False
diff --git a/ThirdParty/Twisted/twisted/internet/base.py b/ThirdParty/Twisted/twisted/internet/base.py
new file mode 100644
index 0000000..46e9217
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/base.py
@@ -0,0 +1,1194 @@
+# -*- test-case-name: twisted.test.test_internet,twisted.internet.test.test_core -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Very basic functionality for a Reactor implementation.
+"""
+
+from __future__ import division, absolute_import
+
+import socket # needed only for sync-dns
+from zope.interface import implementer, classImplements
+
+import sys
+import warnings
+from heapq import heappush, heappop, heapify
+
+import traceback
+
+from twisted.python.compat import set
+from twisted.python._utilpy3 import unsignedID
+from twisted.internet.interfaces import IReactorCore, IReactorTime, IReactorThreads
+from twisted.internet.interfaces import IResolverSimple, IReactorPluggableResolver
+from twisted.internet.interfaces import IConnector, IDelayedCall
+from twisted.internet import fdesc, main, error, abstract, defer, threads
+from twisted.python import log, failure, _reflectpy3 as reflect
+from twisted.python.runtime import seconds as runtimeSeconds, platform
+from twisted.internet.defer import Deferred, DeferredList
+
+# This import is for side-effects!  Even if you don't see any code using it
+# in this module, don't delete it.
+from twisted.python import threadable
+
+
+ at implementer(IDelayedCall)
+class DelayedCall:
+
+    # enable .debug to record creator call stack, and it will be logged if
+    # an exception occurs while the function is being run
+    debug = False
+    _str = None
+
+    def __init__(self, time, func, args, kw, cancel, reset,
+                 seconds=runtimeSeconds):
+        """
+        @param time: Seconds from the epoch at which to call C{func}.
+        @param func: The callable to call.
+        @param args: The positional arguments to pass to the callable.
+        @param kw: The keyword arguments to pass to the callable.
+        @param cancel: A callable which will be called with this
+            DelayedCall before cancellation.
+        @param reset: A callable which will be called with this
+            DelayedCall after changing this DelayedCall's scheduled
+            execution time. The callable should adjust any necessary
+            scheduling details to ensure this DelayedCall is invoked
+            at the new appropriate time.
+        @param seconds: If provided, a no-argument callable which will be
+            used to determine the current time any time that information is
+            needed.
+        """
+        self.time, self.func, self.args, self.kw = time, func, args, kw
+        self.resetter = reset
+        self.canceller = cancel
+        self.seconds = seconds
+        self.cancelled = self.called = 0
+        self.delayed_time = 0
+        if self.debug:
+            self.creator = traceback.format_stack()[:-2]
+
+    def getTime(self):
+        """Return the time at which this call will fire
+
+        @rtype: C{float}
+        @return: The number of seconds after the epoch at which this call is
+        scheduled to be made.
+        """
+        return self.time + self.delayed_time
+
+    def cancel(self):
+        """Unschedule this call
+
+        @raise AlreadyCancelled: Raised if this call has already been
+        unscheduled.
+
+        @raise AlreadyCalled: Raised if this call has already been made.
+        """
+        if self.cancelled:
+            raise error.AlreadyCancelled
+        elif self.called:
+            raise error.AlreadyCalled
+        else:
+            self.canceller(self)
+            self.cancelled = 1
+            if self.debug:
+                self._str = str(self)
+            del self.func, self.args, self.kw
+
+    def reset(self, secondsFromNow):
+        """Reschedule this call for a different time
+
+        @type secondsFromNow: C{float}
+        @param secondsFromNow: The number of seconds from the time of the
+        C{reset} call at which this call will be scheduled.
+
+        @raise AlreadyCancelled: Raised if this call has been cancelled.
+        @raise AlreadyCalled: Raised if this call has already been made.
+        """
+        if self.cancelled:
+            raise error.AlreadyCancelled
+        elif self.called:
+            raise error.AlreadyCalled
+        else:
+            newTime = self.seconds() + secondsFromNow
+            if newTime < self.time:
+                self.delayed_time = 0
+                self.time = newTime
+                self.resetter(self)
+            else:
+                self.delayed_time = newTime - self.time
+
+    def delay(self, secondsLater):
+        """Reschedule this call for a later time
+
+        @type secondsLater: C{float}
+        @param secondsLater: The number of seconds after the originally
+        scheduled time for which to reschedule this call.
+
+        @raise AlreadyCancelled: Raised if this call has been cancelled.
+        @raise AlreadyCalled: Raised if this call has already been made.
+        """
+        if self.cancelled:
+            raise error.AlreadyCancelled
+        elif self.called:
+            raise error.AlreadyCalled
+        else:
+            self.delayed_time += secondsLater
+            if self.delayed_time < 0:
+                self.activate_delay()
+                self.resetter(self)
+
+    def activate_delay(self):
+        self.time += self.delayed_time
+        self.delayed_time = 0
+
+    def active(self):
+        """Determine whether this call is still pending
+
+        @rtype: C{bool}
+        @return: True if this call has not yet been made or cancelled,
+        False otherwise.
+        """
+        return not (self.cancelled or self.called)
+
+
+    def __le__(self, other):
+        """
+        Implement C{<=} operator between two L{DelayedCall} instances.
+
+        Comparison is based on the C{time} attribute (unadjusted by the
+        delayed time).
+        """
+        return self.time <= other.time
+
+
+    def __lt__(self, other):
+        """
+        Implement C{<} operator between two L{DelayedCall} instances.
+
+        Comparison is based on the C{time} attribute (unadjusted by the
+        delayed time).
+        """
+        return self.time < other.time
+
+
+    def __str__(self):
+        if self._str is not None:
+            return self._str
+        if hasattr(self, 'func'):
+            # This code should be replaced by a utility function in reflect;
+            # see ticket #6066:
+            if hasattr(self.func, '__qualname__'):
+                func = self.func.__qualname__
+            elif hasattr(self.func, '__name__'):
+                func = self.func.func_name
+                if hasattr(self.func, 'im_class'):
+                    func = self.func.im_class.__name__ + '.' + func
+            else:
+                func = reflect.safe_repr(self.func)
+        else:
+            func = None
+
+        now = self.seconds()
+        L = ["<DelayedCall 0x%x [%ss] called=%s cancelled=%s" % (
+                unsignedID(self), self.time - now, self.called,
+                self.cancelled)]
+        if func is not None:
+            L.extend((" ", func, "("))
+            if self.args:
+                L.append(", ".join([reflect.safe_repr(e) for e in self.args]))
+                if self.kw:
+                    L.append(", ")
+            if self.kw:
+                L.append(", ".join(['%s=%s' % (k, reflect.safe_repr(v)) for (k, v) in self.kw.items()]))
+            L.append(")")
+
+        if self.debug:
+            L.append("\n\ntraceback at creation: \n\n%s" % ('    '.join(self.creator)))
+        L.append('>')
+
+        return "".join(L)
+
+
+
+ at implementer(IResolverSimple)
+class ThreadedResolver(object):
+    """
+    L{ThreadedResolver} uses a reactor, a threadpool, and
+    L{socket.gethostbyname} to perform name lookups without blocking the
+    reactor thread.  It also supports timeouts indepedently from whatever
+    timeout logic L{socket.gethostbyname} might have.
+
+    @ivar reactor: The reactor the threadpool of which will be used to call
+        L{socket.gethostbyname} and the I/O thread of which the result will be
+        delivered.
+    """
+
+    def __init__(self, reactor):
+        self.reactor = reactor
+        self._runningQueries = {}
+
+
+    def _fail(self, name, err):
+        err = error.DNSLookupError("address %r not found: %s" % (name, err))
+        return failure.Failure(err)
+
+
+    def _cleanup(self, name, lookupDeferred):
+        userDeferred, cancelCall = self._runningQueries[lookupDeferred]
+        del self._runningQueries[lookupDeferred]
+        userDeferred.errback(self._fail(name, "timeout error"))
+
+
+    def _checkTimeout(self, result, name, lookupDeferred):
+        try:
+            userDeferred, cancelCall = self._runningQueries[lookupDeferred]
+        except KeyError:
+            pass
+        else:
+            del self._runningQueries[lookupDeferred]
+            cancelCall.cancel()
+
+            if isinstance(result, failure.Failure):
+                userDeferred.errback(self._fail(name, result.getErrorMessage()))
+            else:
+                userDeferred.callback(result)
+
+
+    def getHostByName(self, name, timeout = (1, 3, 11, 45)):
+        """
+        See L{twisted.internet.interfaces.IResolverSimple.getHostByName}.
+
+        Note that the elements of C{timeout} are summed and the result is used
+        as a timeout for the lookup.  Any intermediate timeout or retry logic
+        is left up to the platform via L{socket.gethostbyname}.
+        """
+        if timeout:
+            timeoutDelay = sum(timeout)
+        else:
+            timeoutDelay = 60
+        userDeferred = defer.Deferred()
+        lookupDeferred = threads.deferToThreadPool(
+            self.reactor, self.reactor.getThreadPool(),
+            socket.gethostbyname, name)
+        cancelCall = self.reactor.callLater(
+            timeoutDelay, self._cleanup, name, lookupDeferred)
+        self._runningQueries[lookupDeferred] = (userDeferred, cancelCall)
+        lookupDeferred.addBoth(self._checkTimeout, name, lookupDeferred)
+        return userDeferred
+
+
+
+ at implementer(IResolverSimple)
+class BlockingResolver:
+
+    def getHostByName(self, name, timeout = (1, 3, 11, 45)):
+        try:
+            address = socket.gethostbyname(name)
+        except socket.error:
+            msg = "address %r not found" % (name,)
+            err = error.DNSLookupError(msg)
+            return defer.fail(err)
+        else:
+            return defer.succeed(address)
+
+
+class _ThreePhaseEvent(object):
+    """
+    Collection of callables (with arguments) which can be invoked as a group in
+    a particular order.
+
+    This provides the underlying implementation for the reactor's system event
+    triggers.  An instance of this class tracks triggers for all phases of a
+    single type of event.
+
+    @ivar before: A list of the before-phase triggers containing three-tuples
+        of a callable, a tuple of positional arguments, and a dict of keyword
+        arguments
+
+    @ivar finishedBefore: A list of the before-phase triggers which have
+        already been executed.  This is only populated in the C{'BEFORE'} state.
+
+    @ivar during: A list of the during-phase triggers containing three-tuples
+        of a callable, a tuple of positional arguments, and a dict of keyword
+        arguments
+
+    @ivar after: A list of the after-phase triggers containing three-tuples
+        of a callable, a tuple of positional arguments, and a dict of keyword
+        arguments
+
+    @ivar state: A string indicating what is currently going on with this
+        object.  One of C{'BASE'} (for when nothing in particular is happening;
+        this is the initial value), C{'BEFORE'} (when the before-phase triggers
+        are in the process of being executed).
+    """
+    def __init__(self):
+        self.before = []
+        self.during = []
+        self.after = []
+        self.state = 'BASE'
+
+
+    def addTrigger(self, phase, callable, *args, **kwargs):
+        """
+        Add a trigger to the indicate phase.
+
+        @param phase: One of C{'before'}, C{'during'}, or C{'after'}.
+
+        @param callable: An object to be called when this event is triggered.
+        @param *args: Positional arguments to pass to C{callable}.
+        @param **kwargs: Keyword arguments to pass to C{callable}.
+
+        @return: An opaque handle which may be passed to L{removeTrigger} to
+            reverse the effects of calling this method.
+        """
+        if phase not in ('before', 'during', 'after'):
+            raise KeyError("invalid phase")
+        getattr(self, phase).append((callable, args, kwargs))
+        return phase, callable, args, kwargs
+
+
+    def removeTrigger(self, handle):
+        """
+        Remove a previously added trigger callable.
+
+        @param handle: An object previously returned by L{addTrigger}.  The
+            trigger added by that call will be removed.
+
+        @raise ValueError: If the trigger associated with C{handle} has already
+            been removed or if C{handle} is not a valid handle.
+        """
+        return getattr(self, 'removeTrigger_' + self.state)(handle)
+
+
+    def removeTrigger_BASE(self, handle):
+        """
+        Just try to remove the trigger.
+
+        @see: removeTrigger
+        """
+        try:
+            phase, callable, args, kwargs = handle
+        except (TypeError, ValueError):
+            raise ValueError("invalid trigger handle")
+        else:
+            if phase not in ('before', 'during', 'after'):
+                raise KeyError("invalid phase")
+            getattr(self, phase).remove((callable, args, kwargs))
+
+
+    def removeTrigger_BEFORE(self, handle):
+        """
+        Remove the trigger if it has yet to be executed, otherwise emit a
+        warning that in the future an exception will be raised when removing an
+        already-executed trigger.
+
+        @see: removeTrigger
+        """
+        phase, callable, args, kwargs = handle
+        if phase != 'before':
+            return self.removeTrigger_BASE(handle)
+        if (callable, args, kwargs) in self.finishedBefore:
+            warnings.warn(
+                "Removing already-fired system event triggers will raise an "
+                "exception in a future version of Twisted.",
+                category=DeprecationWarning,
+                stacklevel=3)
+        else:
+            self.removeTrigger_BASE(handle)
+
+
+    def fireEvent(self):
+        """
+        Call the triggers added to this event.
+        """
+        self.state = 'BEFORE'
+        self.finishedBefore = []
+        beforeResults = []
+        while self.before:
+            callable, args, kwargs = self.before.pop(0)
+            self.finishedBefore.append((callable, args, kwargs))
+            try:
+                result = callable(*args, **kwargs)
+            except:
+                log.err()
+            else:
+                if isinstance(result, Deferred):
+                    beforeResults.append(result)
+        DeferredList(beforeResults).addCallback(self._continueFiring)
+
+
+    def _continueFiring(self, ignored):
+        """
+        Call the during and after phase triggers for this event.
+        """
+        self.state = 'BASE'
+        self.finishedBefore = []
+        for phase in self.during, self.after:
+            while phase:
+                callable, args, kwargs = phase.pop(0)
+                try:
+                    callable(*args, **kwargs)
+                except:
+                    log.err()
+
+
+
+ at implementer(IReactorCore, IReactorTime, IReactorPluggableResolver)
+class ReactorBase(object):
+    """
+    Default base class for Reactors.
+
+    @type _stopped: C{bool}
+    @ivar _stopped: A flag which is true between paired calls to C{reactor.run}
+        and C{reactor.stop}.  This should be replaced with an explicit state
+        machine.
+
+    @type _justStopped: C{bool}
+    @ivar _justStopped: A flag which is true between the time C{reactor.stop}
+        is called and the time the shutdown system event is fired.  This is
+        used to determine whether that event should be fired after each
+        iteration through the mainloop.  This should be replaced with an
+        explicit state machine.
+
+    @type _started: C{bool}
+    @ivar _started: A flag which is true from the time C{reactor.run} is called
+        until the time C{reactor.run} returns.  This is used to prevent calls
+        to C{reactor.run} on a running reactor.  This should be replaced with
+        an explicit state machine.
+
+    @ivar running: See L{IReactorCore.running}
+
+    @ivar _registerAsIOThread: A flag controlling whether the reactor will
+        register the thread it is running in as the I/O thread when it starts.
+        If C{True}, registration will be done, otherwise it will not be.
+    """
+
+    _registerAsIOThread = True
+
+    _stopped = True
+    installed = False
+    usingThreads = False
+    resolver = BlockingResolver()
+
+    __name__ = "twisted.internet.reactor"
+
+    def __init__(self):
+        self.threadCallQueue = []
+        self._eventTriggers = {}
+        self._pendingTimedCalls = []
+        self._newTimedCalls = []
+        self._cancellations = 0
+        self.running = False
+        self._started = False
+        self._justStopped = False
+        self._startedBefore = False
+        # reactor internal readers, e.g. the waker.
+        self._internalReaders = set()
+        self.waker = None
+
+        # Arrange for the running attribute to change to True at the right time
+        # and let a subclass possibly do other things at that time (eg install
+        # signal handlers).
+        self.addSystemEventTrigger(
+            'during', 'startup', self._reallyStartRunning)
+        self.addSystemEventTrigger('during', 'shutdown', self.crash)
+        self.addSystemEventTrigger('during', 'shutdown', self.disconnectAll)
+
+        if platform.supportsThreads():
+            self._initThreads()
+        self.installWaker()
+
+    # override in subclasses
+
+    _lock = None
+
+    def installWaker(self):
+        raise NotImplementedError(
+            reflect.qual(self.__class__) + " did not implement installWaker")
+
+    def installResolver(self, resolver):
+        assert IResolverSimple.providedBy(resolver)
+        oldResolver = self.resolver
+        self.resolver = resolver
+        return oldResolver
+
+    def wakeUp(self):
+        """
+        Wake up the event loop.
+        """
+        if self.waker:
+            self.waker.wakeUp()
+        # if the waker isn't installed, the reactor isn't running, and
+        # therefore doesn't need to be woken up
+
+    def doIteration(self, delay):
+        """
+        Do one iteration over the readers and writers which have been added.
+        """
+        raise NotImplementedError(
+            reflect.qual(self.__class__) + " did not implement doIteration")
+
+    def addReader(self, reader):
+        raise NotImplementedError(
+            reflect.qual(self.__class__) + " did not implement addReader")
+
+    def addWriter(self, writer):
+        raise NotImplementedError(
+            reflect.qual(self.__class__) + " did not implement addWriter")
+
+    def removeReader(self, reader):
+        raise NotImplementedError(
+            reflect.qual(self.__class__) + " did not implement removeReader")
+
+    def removeWriter(self, writer):
+        raise NotImplementedError(
+            reflect.qual(self.__class__) + " did not implement removeWriter")
+
+    def removeAll(self):
+        raise NotImplementedError(
+            reflect.qual(self.__class__) + " did not implement removeAll")
+
+
+    def getReaders(self):
+        raise NotImplementedError(
+            reflect.qual(self.__class__) + " did not implement getReaders")
+
+
+    def getWriters(self):
+        raise NotImplementedError(
+            reflect.qual(self.__class__) + " did not implement getWriters")
+
+
+    def resolve(self, name, timeout = (1, 3, 11, 45)):
+        """Return a Deferred that will resolve a hostname.
+        """
+        if not name:
+            # XXX - This is *less than* '::', and will screw up IPv6 servers
+            return defer.succeed('0.0.0.0')
+        if abstract.isIPAddress(name):
+            return defer.succeed(name)
+        return self.resolver.getHostByName(name, timeout)
+
+    # Installation.
+
+    # IReactorCore
+    def stop(self):
+        """
+        See twisted.internet.interfaces.IReactorCore.stop.
+        """
+        if self._stopped:
+            raise error.ReactorNotRunning(
+                "Can't stop reactor that isn't running.")
+        self._stopped = True
+        self._justStopped = True
+        self._startedBefore = True
+
+
+    def crash(self):
+        """
+        See twisted.internet.interfaces.IReactorCore.crash.
+
+        Reset reactor state tracking attributes and re-initialize certain
+        state-transition helpers which were set up in C{__init__} but later
+        destroyed (through use).
+        """
+        self._started = False
+        self.running = False
+        self.addSystemEventTrigger(
+            'during', 'startup', self._reallyStartRunning)
+
+    def sigInt(self, *args):
+        """Handle a SIGINT interrupt.
+        """
+        log.msg("Received SIGINT, shutting down.")
+        self.callFromThread(self.stop)
+
+    def sigBreak(self, *args):
+        """Handle a SIGBREAK interrupt.
+        """
+        log.msg("Received SIGBREAK, shutting down.")
+        self.callFromThread(self.stop)
+
+    def sigTerm(self, *args):
+        """Handle a SIGTERM interrupt.
+        """
+        log.msg("Received SIGTERM, shutting down.")
+        self.callFromThread(self.stop)
+
+    def disconnectAll(self):
+        """Disconnect every reader, and writer in the system.
+        """
+        selectables = self.removeAll()
+        for reader in selectables:
+            log.callWithLogger(reader,
+                               reader.connectionLost,
+                               failure.Failure(main.CONNECTION_LOST))
+
+
+    def iterate(self, delay=0):
+        """See twisted.internet.interfaces.IReactorCore.iterate.
+        """
+        self.runUntilCurrent()
+        self.doIteration(delay)
+
+
+    def fireSystemEvent(self, eventType):
+        """See twisted.internet.interfaces.IReactorCore.fireSystemEvent.
+        """
+        event = self._eventTriggers.get(eventType)
+        if event is not None:
+            event.fireEvent()
+
+
+    def addSystemEventTrigger(self, _phase, _eventType, _f, *args, **kw):
+        """See twisted.internet.interfaces.IReactorCore.addSystemEventTrigger.
+        """
+        assert callable(_f), "%s is not callable" % _f
+        if _eventType not in self._eventTriggers:
+            self._eventTriggers[_eventType] = _ThreePhaseEvent()
+        return (_eventType, self._eventTriggers[_eventType].addTrigger(
+            _phase, _f, *args, **kw))
+
+
+    def removeSystemEventTrigger(self, triggerID):
+        """See twisted.internet.interfaces.IReactorCore.removeSystemEventTrigger.
+        """
+        eventType, handle = triggerID
+        self._eventTriggers[eventType].removeTrigger(handle)
+
+
+    def callWhenRunning(self, _callable, *args, **kw):
+        """See twisted.internet.interfaces.IReactorCore.callWhenRunning.
+        """
+        if self.running:
+            _callable(*args, **kw)
+        else:
+            return self.addSystemEventTrigger('after', 'startup',
+                                              _callable, *args, **kw)
+
+    def startRunning(self):
+        """
+        Method called when reactor starts: do some initialization and fire
+        startup events.
+
+        Don't call this directly, call reactor.run() instead: it should take
+        care of calling this.
+
+        This method is somewhat misnamed.  The reactor will not necessarily be
+        in the running state by the time this method returns.  The only
+        guarantee is that it will be on its way to the running state.
+        """
+        if self._started:
+            raise error.ReactorAlreadyRunning()
+        if self._startedBefore:
+            raise error.ReactorNotRestartable()
+        self._started = True
+        self._stopped = False
+        if self._registerAsIOThread:
+            threadable.registerAsIOThread()
+        self.fireSystemEvent('startup')
+
+
+    def _reallyStartRunning(self):
+        """
+        Method called to transition to the running state.  This should happen
+        in the I{during startup} event trigger phase.
+        """
+        self.running = True
+
+    # IReactorTime
+
+    seconds = staticmethod(runtimeSeconds)
+
+    def callLater(self, _seconds, _f, *args, **kw):
+        """See twisted.internet.interfaces.IReactorTime.callLater.
+        """
+        assert callable(_f), "%s is not callable" % _f
+        assert sys.maxsize >= _seconds >= 0, \
+               "%s is not greater than or equal to 0 seconds" % (_seconds,)
+        tple = DelayedCall(self.seconds() + _seconds, _f, args, kw,
+                           self._cancelCallLater,
+                           self._moveCallLaterSooner,
+                           seconds=self.seconds)
+        self._newTimedCalls.append(tple)
+        return tple
+
+    def _moveCallLaterSooner(self, tple):
+        # Linear time find: slow.
+        heap = self._pendingTimedCalls
+        try:
+            pos = heap.index(tple)
+
+            # Move elt up the heap until it rests at the right place.
+            elt = heap[pos]
+            while pos != 0:
+                parent = (pos-1) // 2
+                if heap[parent] <= elt:
+                    break
+                # move parent down
+                heap[pos] = heap[parent]
+                pos = parent
+            heap[pos] = elt
+        except ValueError:
+            # element was not found in heap - oh well...
+            pass
+
+    def _cancelCallLater(self, tple):
+        self._cancellations+=1
+
+
+    def getDelayedCalls(self):
+        """Return all the outstanding delayed calls in the system.
+        They are returned in no particular order.
+        This method is not efficient -- it is really only meant for
+        test cases."""
+        return [x for x in (self._pendingTimedCalls + self._newTimedCalls) if not x.cancelled]
+
+    def _insertNewDelayedCalls(self):
+        for call in self._newTimedCalls:
+            if call.cancelled:
+                self._cancellations-=1
+            else:
+                call.activate_delay()
+                heappush(self._pendingTimedCalls, call)
+        self._newTimedCalls = []
+
+    def timeout(self):
+        # insert new delayed calls to make sure to include them in timeout value
+        self._insertNewDelayedCalls()
+
+        if not self._pendingTimedCalls:
+            return None
+
+        return max(0, self._pendingTimedCalls[0].time - self.seconds())
+
+
+    def runUntilCurrent(self):
+        """Run all pending timed calls.
+        """
+        if self.threadCallQueue:
+            # Keep track of how many calls we actually make, as we're
+            # making them, in case another call is added to the queue
+            # while we're in this loop.
+            count = 0
+            total = len(self.threadCallQueue)
+            for (f, a, kw) in self.threadCallQueue:
+                try:
+                    f(*a, **kw)
+                except:
+                    log.err()
+                count += 1
+                if count == total:
+                    break
+            del self.threadCallQueue[:count]
+            if self.threadCallQueue:
+                self.wakeUp()
+
+        # insert new delayed calls now
+        self._insertNewDelayedCalls()
+
+        now = self.seconds()
+        while self._pendingTimedCalls and (self._pendingTimedCalls[0].time <= now):
+            call = heappop(self._pendingTimedCalls)
+            if call.cancelled:
+                self._cancellations-=1
+                continue
+
+            if call.delayed_time > 0:
+                call.activate_delay()
+                heappush(self._pendingTimedCalls, call)
+                continue
+
+            try:
+                call.called = 1
+                call.func(*call.args, **call.kw)
+            except:
+                log.deferr()
+                if hasattr(call, "creator"):
+                    e = "\n"
+                    e += " C: previous exception occurred in " + \
+                         "a DelayedCall created here:\n"
+                    e += " C:"
+                    e += "".join(call.creator).rstrip().replace("\n","\n C:")
+                    e += "\n"
+                    log.msg(e)
+
+
+        if (self._cancellations > 50 and
+             self._cancellations > len(self._pendingTimedCalls) >> 1):
+            self._cancellations = 0
+            self._pendingTimedCalls = [x for x in self._pendingTimedCalls
+                                       if not x.cancelled]
+            heapify(self._pendingTimedCalls)
+
+        if self._justStopped:
+            self._justStopped = False
+            self.fireSystemEvent("shutdown")
+
+    # IReactorProcess
+
+    def _checkProcessArgs(self, args, env):
+        """
+        Check for valid arguments and environment to spawnProcess.
+
+        @return: A two element tuple giving values to use when creating the
+        process.  The first element of the tuple is a C{list} of C{str}
+        giving the values for argv of the child process.  The second element
+        of the tuple is either C{None} if C{env} was C{None} or a C{dict}
+        mapping C{str} environment keys to C{str} environment values.
+        """
+        # Any unicode string which Python would successfully implicitly
+        # encode to a byte string would have worked before these explicit
+        # checks were added.  Anything which would have failed with a
+        # UnicodeEncodeError during that implicit encoding step would have
+        # raised an exception in the child process and that would have been
+        # a pain in the butt to debug.
+        #
+        # So, we will explicitly attempt the same encoding which Python
+        # would implicitly do later.  If it fails, we will report an error
+        # without ever spawning a child process.  If it succeeds, we'll save
+        # the result so that Python doesn't need to do it implicitly later.
+        #
+        # For any unicode which we can actually encode, we'll also issue a
+        # deprecation warning, because no one should be passing unicode here
+        # anyway.
+        #
+        # -exarkun
+        defaultEncoding = sys.getdefaultencoding()
+
+        # Common check function
+        def argChecker(arg):
+            """
+            Return either a str or None.  If the given value is not
+            allowable for some reason, None is returned.  Otherwise, a
+            possibly different object which should be used in place of arg
+            is returned.  This forces unicode encoding to happen now, rather
+            than implicitly later.
+            """
+            if isinstance(arg, unicode):
+                try:
+                    arg = arg.encode(defaultEncoding)
+                except UnicodeEncodeError:
+                    return None
+                warnings.warn(
+                    "Argument strings and environment keys/values passed to "
+                    "reactor.spawnProcess should be str, not unicode.",
+                    category=DeprecationWarning,
+                    stacklevel=4)
+            if isinstance(arg, str) and '\0' not in arg:
+                return arg
+            return None
+
+        # Make a few tests to check input validity
+        if not isinstance(args, (tuple, list)):
+            raise TypeError("Arguments must be a tuple or list")
+
+        outputArgs = []
+        for arg in args:
+            arg = argChecker(arg)
+            if arg is None:
+                raise TypeError("Arguments contain a non-string value")
+            else:
+                outputArgs.append(arg)
+
+        outputEnv = None
+        if env is not None:
+            outputEnv = {}
+            for key, val in env.iteritems():
+                key = argChecker(key)
+                if key is None:
+                    raise TypeError("Environment contains a non-string key")
+                val = argChecker(val)
+                if val is None:
+                    raise TypeError("Environment contains a non-string value")
+                outputEnv[key] = val
+        return outputArgs, outputEnv
+
+    # IReactorThreads
+    if platform.supportsThreads():
+        threadpool = None
+        # ID of the trigger starting the threadpool
+        _threadpoolStartupID = None
+        # ID of the trigger stopping the threadpool
+        threadpoolShutdownID = None
+
+        def _initThreads(self):
+            self.usingThreads = True
+            self.resolver = ThreadedResolver(self)
+
+        def callFromThread(self, f, *args, **kw):
+            """
+            See L{twisted.internet.interfaces.IReactorThreads.callFromThread}.
+            """
+            assert callable(f), "%s is not callable" % (f,)
+            # lists are thread-safe in CPython, but not in Jython
+            # this is probably a bug in Jython, but until fixed this code
+            # won't work in Jython.
+            self.threadCallQueue.append((f, args, kw))
+            self.wakeUp()
+
+        def _initThreadPool(self):
+            """
+            Create the threadpool accessible with callFromThread.
+            """
+            from twisted.python import threadpool
+            self.threadpool = threadpool.ThreadPool(
+                0, 10, 'twisted.internet.reactor')
+            self._threadpoolStartupID = self.callWhenRunning(
+                self.threadpool.start)
+            self.threadpoolShutdownID = self.addSystemEventTrigger(
+                'during', 'shutdown', self._stopThreadPool)
+
+        def _uninstallHandler(self):
+            pass
+
+        def _stopThreadPool(self):
+            """
+            Stop the reactor threadpool.  This method is only valid if there
+            is currently a threadpool (created by L{_initThreadPool}).  It
+            is not intended to be called directly; instead, it will be
+            called by a shutdown trigger created in L{_initThreadPool}.
+            """
+            triggers = [self._threadpoolStartupID, self.threadpoolShutdownID]
+            for trigger in filter(None, triggers):
+                try:
+                    self.removeSystemEventTrigger(trigger)
+                except ValueError:
+                    pass
+            self._threadpoolStartupID = None
+            self.threadpoolShutdownID = None
+            self.threadpool.stop()
+            self.threadpool = None
+
+
+        def getThreadPool(self):
+            """
+            See L{twisted.internet.interfaces.IReactorThreads.getThreadPool}.
+            """
+            if self.threadpool is None:
+                self._initThreadPool()
+            return self.threadpool
+
+
+        def callInThread(self, _callable, *args, **kwargs):
+            """
+            See L{twisted.internet.interfaces.IReactorThreads.callInThread}.
+            """
+            self.getThreadPool().callInThread(_callable, *args, **kwargs)
+
+        def suggestThreadPoolSize(self, size):
+            """
+            See L{twisted.internet.interfaces.IReactorThreads.suggestThreadPoolSize}.
+            """
+            self.getThreadPool().adjustPoolsize(maxthreads=size)
+    else:
+        # This is for signal handlers.
+        def callFromThread(self, f, *args, **kw):
+            assert callable(f), "%s is not callable" % (f,)
+            # See comment in the other callFromThread implementation.
+            self.threadCallQueue.append((f, args, kw))
+
+if platform.supportsThreads():
+    classImplements(ReactorBase, IReactorThreads)
+
+
+ at implementer(IConnector)
+class BaseConnector:
+    """Basic implementation of connector.
+
+    State can be: "connecting", "connected", "disconnected"
+    """
+    timeoutID = None
+    factoryStarted = 0
+
+    def __init__(self, factory, timeout, reactor):
+        self.state = "disconnected"
+        self.reactor = reactor
+        self.factory = factory
+        self.timeout = timeout
+
+    def disconnect(self):
+        """Disconnect whatever our state is."""
+        if self.state == 'connecting':
+            self.stopConnecting()
+        elif self.state == 'connected':
+            self.transport.loseConnection()
+
+    def connect(self):
+        """Start connection to remote server."""
+        if self.state != "disconnected":
+            raise RuntimeError("can't connect in this state")
+
+        self.state = "connecting"
+        if not self.factoryStarted:
+            self.factory.doStart()
+            self.factoryStarted = 1
+        self.transport = transport = self._makeTransport()
+        if self.timeout is not None:
+            self.timeoutID = self.reactor.callLater(self.timeout, transport.failIfNotConnected, error.TimeoutError())
+        self.factory.startedConnecting(self)
+
+    def stopConnecting(self):
+        """Stop attempting to connect."""
+        if self.state != "connecting":
+            raise error.NotConnectingError("we're not trying to connect")
+
+        self.state = "disconnected"
+        self.transport.failIfNotConnected(error.UserError())
+        del self.transport
+
+    def cancelTimeout(self):
+        if self.timeoutID is not None:
+            try:
+                self.timeoutID.cancel()
+            except ValueError:
+                pass
+            del self.timeoutID
+
+    def buildProtocol(self, addr):
+        self.state = "connected"
+        self.cancelTimeout()
+        return self.factory.buildProtocol(addr)
+
+    def connectionFailed(self, reason):
+        self.cancelTimeout()
+        self.transport = None
+        self.state = "disconnected"
+        self.factory.clientConnectionFailed(self, reason)
+        if self.state == "disconnected":
+            # factory hasn't called our connect() method
+            self.factory.doStop()
+            self.factoryStarted = 0
+
+    def connectionLost(self, reason):
+        self.state = "disconnected"
+        self.factory.clientConnectionLost(self, reason)
+        if self.state == "disconnected":
+            # factory hasn't called our connect() method
+            self.factory.doStop()
+            self.factoryStarted = 0
+
+    def getDestination(self):
+        raise NotImplementedError(
+            reflect.qual(self.__class__) + " did not implement "
+            "getDestination")
+
+
+
+class BasePort(abstract.FileDescriptor):
+    """Basic implementation of a ListeningPort.
+
+    Note: This does not actually implement IListeningPort.
+    """
+
+    addressFamily = None
+    socketType = None
+
+    def createInternetSocket(self):
+        s = socket.socket(self.addressFamily, self.socketType)
+        s.setblocking(0)
+        fdesc._setCloseOnExec(s.fileno())
+        return s
+
+
+    def doWrite(self):
+        """Raises a RuntimeError"""
+        raise RuntimeError(
+            "doWrite called on a %s" % reflect.qual(self.__class__))
+
+
+
+class _SignalReactorMixin(object):
+    """
+    Private mixin to manage signals: it installs signal handlers at start time,
+    and define run method.
+
+    It can only be used mixed in with L{ReactorBase}, and has to be defined
+    first in the inheritance (so that method resolution order finds
+    startRunning first).
+
+    @type _installSignalHandlers: C{bool}
+    @ivar _installSignalHandlers: A flag which indicates whether any signal
+        handlers will be installed during startup.  This includes handlers for
+        SIGCHLD to monitor child processes, and SIGINT, SIGTERM, and SIGBREAK
+        to stop the reactor.
+    """
+
+    _installSignalHandlers = False
+
+    def _handleSignals(self):
+        """
+        Install the signal handlers for the Twisted event loop.
+        """
+        try:
+            import signal
+        except ImportError:
+            log.msg("Warning: signal module unavailable -- "
+                    "not installing signal handlers.")
+            return
+
+        if signal.getsignal(signal.SIGINT) == signal.default_int_handler:
+            # only handle if there isn't already a handler, e.g. for Pdb.
+            signal.signal(signal.SIGINT, self.sigInt)
+        signal.signal(signal.SIGTERM, self.sigTerm)
+
+        # Catch Ctrl-Break in windows
+        if hasattr(signal, "SIGBREAK"):
+            signal.signal(signal.SIGBREAK, self.sigBreak)
+
+
+    def startRunning(self, installSignalHandlers=True):
+        """
+        Extend the base implementation in order to remember whether signal
+        handlers should be installed later.
+
+        @type installSignalHandlers: C{bool}
+        @param installSignalHandlers: A flag which, if set, indicates that
+            handlers for a number of (implementation-defined) signals should be
+            installed during startup.
+        """
+        self._installSignalHandlers = installSignalHandlers
+        ReactorBase.startRunning(self)
+
+
+    def _reallyStartRunning(self):
+        """
+        Extend the base implementation by also installing signal handlers, if
+        C{self._installSignalHandlers} is true.
+        """
+        ReactorBase._reallyStartRunning(self)
+        if self._installSignalHandlers:
+            # Make sure this happens before after-startup events, since the
+            # expectation of after-startup is that the reactor is fully
+            # initialized.  Don't do it right away for historical reasons
+            # (perhaps some before-startup triggers don't want there to be a
+            # custom SIGCHLD handler so that they can run child processes with
+            # some blocking api).
+            self._handleSignals()
+
+
+    def run(self, installSignalHandlers=True):
+        self.startRunning(installSignalHandlers=installSignalHandlers)
+        self.mainLoop()
+
+
+    def mainLoop(self):
+        while self._started:
+            try:
+                while self._started:
+                    # Advance simulation time in delayed event
+                    # processors.
+                    self.runUntilCurrent()
+                    t2 = self.timeout()
+                    t = self.running and t2
+                    self.doIteration(t)
+            except:
+                log.msg("Unexpected error in main loop.")
+                log.err()
+            else:
+                log.msg('Main loop terminated.')
+
+
+
+__all__ = []
diff --git a/ThirdParty/Twisted/twisted/internet/cfreactor.py b/ThirdParty/Twisted/twisted/internet/cfreactor.py
new file mode 100644
index 0000000..ef6bf7d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/cfreactor.py
@@ -0,0 +1,501 @@
+# -*- test-case-name: twisted.internet.test.test_core -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+A reactor for integrating with U{CFRunLoop<http://bit.ly/cfrunloop>}, the
+CoreFoundation main loop used by MacOS X.
+
+This is useful for integrating Twisted with U{PyObjC<http://pyobjc.sf.net/>}
+applications.
+"""
+
+__all__ = [
+    'install',
+    'CFReactor'
+]
+
+import sys
+
+from zope.interface import implements
+
+from twisted.internet.interfaces import IReactorFDSet
+from twisted.internet.posixbase import PosixReactorBase, _Waker
+from twisted.internet.posixbase import _NO_FILEDESC
+
+from twisted.python import log
+
+from CoreFoundation import (
+    CFRunLoopAddSource, CFRunLoopRemoveSource, CFRunLoopGetMain, CFRunLoopRun,
+    CFRunLoopStop, CFRunLoopTimerCreate, CFRunLoopAddTimer,
+    CFRunLoopTimerInvalidate, kCFAllocatorDefault, kCFRunLoopCommonModes,
+    CFAbsoluteTimeGetCurrent)
+
+from CFNetwork import (
+    CFSocketCreateWithNative, CFSocketSetSocketFlags, CFSocketEnableCallBacks,
+    CFSocketCreateRunLoopSource, CFSocketDisableCallBacks, CFSocketInvalidate,
+    kCFSocketWriteCallBack, kCFSocketReadCallBack, kCFSocketConnectCallBack,
+    kCFSocketAutomaticallyReenableReadCallBack,
+    kCFSocketAutomaticallyReenableWriteCallBack)
+
+
+_READ = 0
+_WRITE = 1
+_preserveSOError = 1 << 6
+
+
+class _WakerPlus(_Waker):
+    """
+    The normal Twisted waker will simply wake up the main loop, which causes an
+    iteration to run, which in turn causes L{PosixReactorBase.runUntilCurrent}
+    to get invoked.
+
+    L{CFReactor} has a slightly different model of iteration, though: rather
+    than have each iteration process the thread queue, then timed calls, then
+    file descriptors, each callback is run as it is dispatched by the CFRunLoop
+    observer which triggered it.
+
+    So this waker needs to not only unblock the loop, but also make sure the
+    work gets done; so, it reschedules the invocation of C{runUntilCurrent} to
+    be immediate (0 seconds from now) even if there is no timed call work to
+    do.
+    """
+
+    def doRead(self):
+        """
+        Wake up the loop and force C{runUntilCurrent} to run immediately in the
+        next timed iteration.
+        """
+        result = _Waker.doRead(self)
+        self.reactor._scheduleSimulate(True)
+        return result
+
+
+
+class CFReactor(PosixReactorBase):
+    """
+    The CoreFoundation reactor.
+
+    You probably want to use this via the L{install} API.
+
+    @ivar _fdmap: a dictionary, mapping an integer (a file descriptor) to a
+        4-tuple of:
+
+            - source: a C{CFRunLoopSource}; the source associated with this
+              socket.
+            - socket: a C{CFSocket} wrapping the file descriptor.
+            - descriptor: an L{IReadDescriptor} and/or L{IWriteDescriptor}
+              provider.
+            - read-write: a 2-C{list} of booleans: respectively, whether this
+              descriptor is currently registered for reading or registered for
+              writing.
+
+    @ivar _idmap: a dictionary, mapping the id() of an L{IReadDescriptor} or
+        L{IWriteDescriptor} to a C{fd} in L{_fdmap}.  Implemented in this
+        manner so that we don't have to rely (even more) on the hashability of
+        L{IReadDescriptor} providers, and we know that they won't be collected
+        since these are kept in sync with C{_fdmap}.  Necessary because the
+        .fileno() of a file descriptor may change at will, so we need to be
+        able to look up what its file descriptor I{used} to be, so that we can
+        look it up in C{_fdmap}
+
+    @ivar _cfrunloop: the L{CFRunLoop} pyobjc object wrapped by this reactor.
+
+    @ivar _inCFLoop: Is L{CFRunLoopRun} currently running?
+
+    @type _inCFLoop: C{bool}
+
+    @ivar _currentSimulator: if a CFTimer is currently scheduled with the CF
+        run loop to run Twisted callLater calls, this is a reference to it.
+        Otherwise, it is C{None}
+    """
+
+    implements(IReactorFDSet)
+
+    def __init__(self, runLoop=None, runner=None):
+        self._fdmap = {}
+        self._idmap = {}
+        if runner is None:
+            runner = CFRunLoopRun
+        self._runner = runner
+
+        if runLoop is None:
+            runLoop = CFRunLoopGetMain()
+        self._cfrunloop = runLoop
+        PosixReactorBase.__init__(self)
+
+
+    def installWaker(self):
+        """
+        Override C{installWaker} in order to use L{_WakerPlus}; otherwise this
+        should be exactly the same as the parent implementation.
+        """
+        if not self.waker:
+            self.waker = _WakerPlus(self)
+            self._internalReaders.add(self.waker)
+            self.addReader(self.waker)
+
+
+    def _socketCallback(self, cfSocket, callbackType,
+                        ignoredAddress, ignoredData, context):
+        """
+        The socket callback issued by CFRunLoop.  This will issue C{doRead} or
+        C{doWrite} calls to the L{IReadDescriptor} and L{IWriteDescriptor}
+        registered with the file descriptor that we are being notified of.
+
+        @param cfSocket: The L{CFSocket} which has got some activity.
+
+        @param callbackType: The type of activity that we are being notified
+            of.  Either L{kCFSocketReadCallBack} or L{kCFSocketWriteCallBack}.
+
+        @param ignoredAddress: Unused, because this is not used for either of
+            the callback types we register for.
+
+        @param ignoredData: Unused, because this is not used for either of the
+            callback types we register for.
+
+        @param context: The data associated with this callback by
+            L{CFSocketCreateWithNative} (in L{CFReactor._watchFD}).  A 2-tuple
+            of C{(int, CFRunLoopSource)}.
+        """
+        (fd, smugglesrc) = context
+        if fd not in self._fdmap:
+            # Spurious notifications seem to be generated sometimes if you
+            # CFSocketDisableCallBacks in the middle of an event.  I don't know
+            # about this FD, any more, so let's get rid of it.
+            CFRunLoopRemoveSource(
+                self._cfrunloop, smugglesrc, kCFRunLoopCommonModes
+            )
+            return
+
+        why = None
+        isRead = False
+        src, skt, readWriteDescriptor, rw = self._fdmap[fd]
+        try:
+            if readWriteDescriptor.fileno() == -1:
+                why = _NO_FILEDESC
+            else:
+                isRead = callbackType == kCFSocketReadCallBack
+                # CFSocket seems to deliver duplicate read/write notifications
+                # sometimes, especially a duplicate writability notification
+                # when first registering the socket.  This bears further
+                # investigation, since I may have been mis-interpreting the
+                # behavior I was seeing. (Running the full Twisted test suite,
+                # while thorough, is not always entirely clear.) Until this has
+                # been more thoroughly investigated , we consult our own
+                # reading/writing state flags to determine whether we should
+                # actually attempt a doRead/doWrite first.  -glyph
+                if isRead:
+                    if rw[_READ]:
+                        why = log.callWithLogger(
+                            readWriteDescriptor, readWriteDescriptor.doRead)
+                else:
+                    if rw[_WRITE]:
+                        why = log.callWithLogger(
+                            readWriteDescriptor, readWriteDescriptor.doWrite)
+        except:
+            why = sys.exc_info()[1]
+            log.err()
+        if why:
+            self._disconnectSelectable(readWriteDescriptor, why, isRead)
+
+
+    def _watchFD(self, fd, descr, flag):
+        """
+        Register a file descriptor with the L{CFRunLoop}, or modify its state
+        so that it's listening for both notifications (read and write) rather
+        than just one; used to implement C{addReader} and C{addWriter}.
+
+        @param fd: The file descriptor.
+
+        @type fd: C{int}
+
+        @param descr: the L{IReadDescriptor} or L{IWriteDescriptor}
+
+        @param flag: the flag to register for callbacks on, either
+            L{kCFSocketReadCallBack} or L{kCFSocketWriteCallBack}
+        """
+        if fd == -1:
+            raise RuntimeError("Invalid file descriptor.")
+        if fd in self._fdmap:
+            src, cfs, gotdescr, rw = self._fdmap[fd]
+            # do I need to verify that it's the same descr?
+        else:
+            ctx = []
+            ctx.append(fd)
+            cfs = CFSocketCreateWithNative(
+                kCFAllocatorDefault, fd,
+                kCFSocketReadCallBack | kCFSocketWriteCallBack |
+                kCFSocketConnectCallBack,
+                self._socketCallback, ctx
+            )
+            CFSocketSetSocketFlags(
+                cfs,
+                kCFSocketAutomaticallyReenableReadCallBack |
+                kCFSocketAutomaticallyReenableWriteCallBack |
+
+                # This extra flag is to ensure that CF doesn't (destructively,
+                # because destructively is the only way to do it) retrieve
+                # SO_ERROR and thereby break twisted.internet.tcp.BaseClient,
+                # which needs SO_ERROR to tell it whether or not it needs to
+                # call connect_ex a second time.
+                _preserveSOError
+            )
+            src = CFSocketCreateRunLoopSource(kCFAllocatorDefault, cfs, 0)
+            ctx.append(src)
+            CFRunLoopAddSource(self._cfrunloop, src, kCFRunLoopCommonModes)
+            CFSocketDisableCallBacks(
+                cfs,
+                kCFSocketReadCallBack | kCFSocketWriteCallBack |
+                kCFSocketConnectCallBack
+            )
+            rw = [False, False]
+            self._idmap[id(descr)] = fd
+            self._fdmap[fd] = src, cfs, descr, rw
+        rw[self._flag2idx(flag)] = True
+        CFSocketEnableCallBacks(cfs, flag)
+
+
+    def _flag2idx(self, flag):
+        """
+        Convert a C{kCFSocket...} constant to an index into the read/write
+        state list (C{_READ} or C{_WRITE}) (the 4th element of the value of
+        C{self._fdmap}).
+
+        @param flag: C{kCFSocketReadCallBack} or C{kCFSocketWriteCallBack}
+
+        @return: C{_READ} or C{_WRITE}
+        """
+        return {kCFSocketReadCallBack: _READ,
+                kCFSocketWriteCallBack: _WRITE}[flag]
+
+
+    def _unwatchFD(self, fd, descr, flag):
+        """
+        Unregister a file descriptor with the L{CFRunLoop}, or modify its state
+        so that it's listening for only one notification (read or write) as
+        opposed to both; used to implement C{removeReader} and C{removeWriter}.
+
+        @param fd: a file descriptor
+
+        @type fd: C{int}
+
+        @param descr: an L{IReadDescriptor} or L{IWriteDescriptor}
+
+        @param flag: L{kCFSocketWriteCallBack} L{kCFSocketReadCallBack}
+        """
+        if id(descr) not in self._idmap:
+            return
+        if fd == -1:
+            # need to deal with it in this case, I think.
+            realfd = self._idmap[id(descr)]
+        else:
+            realfd = fd
+        src, cfs, descr, rw = self._fdmap[realfd]
+        CFSocketDisableCallBacks(cfs, flag)
+        rw[self._flag2idx(flag)] = False
+        if not rw[_READ] and not rw[_WRITE]:
+            del self._idmap[id(descr)]
+            del self._fdmap[realfd]
+            CFRunLoopRemoveSource(self._cfrunloop, src, kCFRunLoopCommonModes)
+            CFSocketInvalidate(cfs)
+
+
+    def addReader(self, reader):
+        """
+        Implement L{IReactorFDSet.addReader}.
+        """
+        self._watchFD(reader.fileno(), reader, kCFSocketReadCallBack)
+
+
+    def addWriter(self, writer):
+        """
+        Implement L{IReactorFDSet.addWriter}.
+        """
+        self._watchFD(writer.fileno(), writer, kCFSocketWriteCallBack)
+
+
+    def removeReader(self, reader):
+        """
+        Implement L{IReactorFDSet.removeReader}.
+        """
+        self._unwatchFD(reader.fileno(), reader, kCFSocketReadCallBack)
+
+
+    def removeWriter(self, writer):
+        """
+        Implement L{IReactorFDSet.removeWriter}.
+        """
+        self._unwatchFD(writer.fileno(), writer, kCFSocketWriteCallBack)
+
+
+    def removeAll(self):
+        """
+        Implement L{IReactorFDSet.removeAll}.
+        """
+        allDesc = set([descr for src, cfs, descr, rw in self._fdmap.values()])
+        allDesc -= set(self._internalReaders)
+        for desc in allDesc:
+            self.removeReader(desc)
+            self.removeWriter(desc)
+        return list(allDesc)
+
+
+    def getReaders(self):
+        """
+        Implement L{IReactorFDSet.getReaders}.
+        """
+        return [descr for src, cfs, descr, rw in self._fdmap.values()
+                if rw[_READ]]
+
+
+    def getWriters(self):
+        """
+        Implement L{IReactorFDSet.getWriters}.
+        """
+        return [descr for src, cfs, descr, rw in self._fdmap.values()
+                if rw[_WRITE]]
+
+
+    def _moveCallLaterSooner(self, tple):
+        """
+        Override L{PosixReactorBase}'s implementation of L{IDelayedCall.reset}
+        so that it will immediately reschedule.  Normally
+        C{_moveCallLaterSooner} depends on the fact that C{runUntilCurrent} is
+        always run before the mainloop goes back to sleep, so this forces it to
+        immediately recompute how long the loop needs to stay asleep.
+        """
+        result = PosixReactorBase._moveCallLaterSooner(self, tple)
+        self._scheduleSimulate()
+        return result
+
+
+    _inCFLoop = False
+
+    def mainLoop(self):
+        """
+        Run the runner (L{CFRunLoopRun} or something that calls it), which runs
+        the run loop until C{crash()} is called.
+        """
+        self._inCFLoop = True
+        try:
+            self._runner()
+        finally:
+            self._inCFLoop = False
+
+
+    _currentSimulator = None
+
+    def _scheduleSimulate(self, force=False):
+        """
+        Schedule a call to C{self.runUntilCurrent}.  This will cancel the
+        currently scheduled call if it is already scheduled.
+
+        @param force: Even if there are no timed calls, make sure that
+            C{runUntilCurrent} runs immediately (in a 0-seconds-from-now
+            {CFRunLoopTimer}).  This is necessary for calls which need to
+            trigger behavior of C{runUntilCurrent} other than running timed
+            calls, such as draining the thread call queue or calling C{crash()}
+            when the appropriate flags are set.
+
+        @type force: C{bool}
+        """
+        if self._currentSimulator is not None:
+            CFRunLoopTimerInvalidate(self._currentSimulator)
+            self._currentSimulator = None
+        timeout = self.timeout()
+        if force:
+            timeout = 0.0
+        if timeout is not None:
+            fireDate = (CFAbsoluteTimeGetCurrent() + timeout)
+            def simulate(cftimer, extra):
+                self._currentSimulator = None
+                self.runUntilCurrent()
+                self._scheduleSimulate()
+            c = self._currentSimulator = CFRunLoopTimerCreate(
+                kCFAllocatorDefault, fireDate,
+                0, 0, 0, simulate, None
+            )
+            CFRunLoopAddTimer(self._cfrunloop, c, kCFRunLoopCommonModes)
+
+
+    def callLater(self, _seconds, _f, *args, **kw):
+        """
+        Implement L{IReactorTime.callLater}.
+        """
+        delayedCall = PosixReactorBase.callLater(
+            self, _seconds, _f, *args, **kw
+        )
+        self._scheduleSimulate()
+        return delayedCall
+
+
+    def stop(self):
+        """
+        Implement L{IReactorCore.stop}.
+        """
+        PosixReactorBase.stop(self)
+        self._scheduleSimulate(True)
+
+
+    def crash(self):
+        """
+        Implement L{IReactorCore.crash}
+        """
+        wasStarted = self._started
+        PosixReactorBase.crash(self)
+        if self._inCFLoop:
+            self._stopNow()
+        else:
+            if wasStarted:
+                self.callLater(0, self._stopNow)
+
+
+    def _stopNow(self):
+        """
+        Immediately stop the CFRunLoop (which must be running!).
+        """
+        CFRunLoopStop(self._cfrunloop)
+
+
+    def iterate(self, delay=0):
+        """
+        Emulate the behavior of C{iterate()} for things that want to call it,
+        by letting the loop run for a little while and then scheduling a timed
+        call to exit it.
+        """
+        self.callLater(delay, self._stopNow)
+        self.mainLoop()
+
+
+
+def install(runLoop=None, runner=None):
+    """
+    Configure the twisted mainloop to be run inside CFRunLoop.
+
+    @param runLoop: the run loop to use.
+
+    @param runner: the function to call in order to actually invoke the main
+        loop.  This will default to L{CFRunLoopRun} if not specified.  However,
+        this is not an appropriate choice for GUI applications, as you need to
+        run NSApplicationMain (or something like it).  For example, to run the
+        Twisted mainloop in a PyObjC application, your C{main.py} should look
+        something like this::
+
+            from PyObjCTools import AppHelper
+            from twisted.internet.cfreactor import install
+            install(runner=AppHelper.runEventLoop)
+            # initialize your application
+            reactor.run()
+
+    @return: The installed reactor.
+
+    @rtype: L{CFReactor}
+    """
+
+    reactor = CFReactor(runLoop=runLoop, runner=runner)
+    from twisted.internet.main import installReactor
+    installReactor(reactor)
+    return reactor
+
+
diff --git a/ThirdParty/Twisted/twisted/internet/default.py b/ThirdParty/Twisted/twisted/internet/default.py
new file mode 100644
index 0000000..958010b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/default.py
@@ -0,0 +1,56 @@
+# -*- test-case-name: twisted.internet.test.test_default -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+The most suitable default reactor for the current platform.
+
+Depending on a specific application's needs, some other reactor may in
+fact be better.
+"""
+
+from __future__ import division, absolute_import
+
+__all__ = ["install"]
+
+from twisted.python.runtime import platform
+
+
+def _getInstallFunction(platform):
+    """
+    Return a function to install the reactor most suited for the given platform.
+
+    @param platform: The platform for which to select a reactor.
+    @type platform: L{twisted.python.runtime.Platform}
+
+    @return: A zero-argument callable which will install the selected
+        reactor.
+    """
+    # Linux: epoll(7) is the default, since it scales well.
+    #
+    # OS X: poll(2) is not exposed by Python because it doesn't support all
+    # file descriptors (in particular, lack of PTY support is a problem) --
+    # see <http://bugs.python.org/issue5154>. kqueue has the same restrictions
+    # as poll(2) as far PTY support goes.
+    #
+    # Windows: IOCP should eventually be default, but still has some serious
+    # bugs, e.g. <http://twistedmatrix.com/trac/ticket/4667>.
+    #
+    # We therefore choose epoll(7) on Linux, poll(2) on other non-OS X POSIX
+    # platforms, and select(2) everywhere else.
+    try:
+        if platform.isLinux():
+            try:
+                from twisted.internet.epollreactor import install
+            except ImportError:
+                from twisted.internet.pollreactor import install
+        elif platform.getType() == 'posix' and not platform.isMacOSX():
+            from twisted.internet.pollreactor import install
+        else:
+            from twisted.internet.selectreactor import install
+    except ImportError:
+        from twisted.internet.selectreactor import install
+    return install
+
+
+install = _getInstallFunction(platform)
diff --git a/ThirdParty/Twisted/twisted/internet/defer.py b/ThirdParty/Twisted/twisted/internet/defer.py
new file mode 100644
index 0000000..b8d99a0
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/defer.py
@@ -0,0 +1,1592 @@
+# -*- test-case-name: twisted.test.test_defer,twisted.test.test_defgen,twisted.internet.test.test_inlinecb -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Support for results that aren't immediately available.
+
+Maintainer: Glyph Lefkowitz
+
+ at var _NO_RESULT: The result used to represent the fact that there is no
+    result. B{Never ever ever use this as an actual result for a Deferred}.  You
+    have been warned.
+
+ at var _CONTINUE: A marker left in L{Deferred.callbacks} to indicate a Deferred
+    chain.  Always accompanied by a Deferred instance in the args tuple pointing
+    at the Deferred which is chained to the Deferred which has this marker.
+"""
+
+from __future__ import division, absolute_import
+
+import traceback
+import types
+import warnings
+from sys import exc_info
+from functools import wraps
+
+# Twisted imports
+from twisted.python.compat import _PY3, comparable, cmp
+from twisted.python import log, failure
+from twisted.python._utilpy3 import unsignedID
+
+
+
+class AlreadyCalledError(Exception):
+    pass
+
+
+class CancelledError(Exception):
+    """
+    This error is raised by default when a L{Deferred} is cancelled.
+    """
+
+
+class TimeoutError(Exception):
+    """
+    This exception is deprecated.  It is used only by the deprecated
+    L{Deferred.setTimeout} method.
+    """
+
+
+
+def logError(err):
+    log.err(err)
+    return err
+
+
+
+def succeed(result):
+    """
+    Return a L{Deferred} that has already had C{.callback(result)} called.
+
+    This is useful when you're writing synchronous code to an
+    asynchronous interface: i.e., some code is calling you expecting a
+    L{Deferred} result, but you don't actually need to do anything
+    asynchronous. Just return C{defer.succeed(theResult)}.
+
+    See L{fail} for a version of this function that uses a failing
+    L{Deferred} rather than a successful one.
+
+    @param result: The result to give to the Deferred's 'callback'
+           method.
+
+    @rtype: L{Deferred}
+    """
+    d = Deferred()
+    d.callback(result)
+    return d
+
+
+
+def fail(result=None):
+    """
+    Return a L{Deferred} that has already had C{.errback(result)} called.
+
+    See L{succeed}'s docstring for rationale.
+
+    @param result: The same argument that L{Deferred.errback} takes.
+
+    @raise NoCurrentExceptionError: If C{result} is C{None} but there is no
+        current exception state.
+
+    @rtype: L{Deferred}
+    """
+    d = Deferred()
+    d.errback(result)
+    return d
+
+
+
+def execute(callable, *args, **kw):
+    """
+    Create a L{Deferred} from a callable and arguments.
+
+    Call the given function with the given arguments.  Return a L{Deferred}
+    which has been fired with its callback as the result of that invocation
+    or its C{errback} with a L{Failure} for the exception thrown.
+    """
+    try:
+        result = callable(*args, **kw)
+    except:
+        return fail()
+    else:
+        return succeed(result)
+
+
+
+def maybeDeferred(f, *args, **kw):
+    """
+    Invoke a function that may or may not return a L{Deferred}.
+
+    Call the given function with the given arguments.  If the returned
+    object is a L{Deferred}, return it.  If the returned object is a L{Failure},
+    wrap it with L{fail} and return it.  Otherwise, wrap it in L{succeed} and
+    return it.  If an exception is raised, convert it to a L{Failure}, wrap it
+    in L{fail}, and then return it.
+
+    @type f: Any callable
+    @param f: The callable to invoke
+
+    @param args: The arguments to pass to C{f}
+    @param kw: The keyword arguments to pass to C{f}
+
+    @rtype: L{Deferred}
+    @return: The result of the function call, wrapped in a L{Deferred} if
+    necessary.
+    """
+    try:
+        result = f(*args, **kw)
+    except:
+        return fail(failure.Failure(captureVars=Deferred.debug))
+
+    if isinstance(result, Deferred):
+        return result
+    elif isinstance(result, failure.Failure):
+        return fail(result)
+    else:
+        return succeed(result)
+
+
+
+def timeout(deferred):
+    deferred.errback(failure.Failure(TimeoutError("Callback timed out")))
+
+
+
+def passthru(arg):
+    return arg
+
+
+
+def setDebugging(on):
+    """
+    Enable or disable L{Deferred} debugging.
+
+    When debugging is on, the call stacks from creation and invocation are
+    recorded, and added to any L{AlreadyCalledErrors} we raise.
+    """
+    Deferred.debug=bool(on)
+
+
+
+def getDebugging():
+    """
+    Determine whether L{Deferred} debugging is enabled.
+    """
+    return Deferred.debug
+
+
+# See module docstring.
+_NO_RESULT = object()
+_CONTINUE = object()
+
+
+
+class Deferred:
+    """
+    This is a callback which will be put off until later.
+
+    Why do we want this? Well, in cases where a function in a threaded
+    program would block until it gets a result, for Twisted it should
+    not block. Instead, it should return a L{Deferred}.
+
+    This can be implemented for protocols that run over the network by
+    writing an asynchronous protocol for L{twisted.internet}. For methods
+    that come from outside packages that are not under our control, we use
+    threads (see for example L{twisted.enterprise.adbapi}).
+
+    For more information about Deferreds, see doc/core/howto/defer.html or
+    U{http://twistedmatrix.com/documents/current/core/howto/defer.html}
+
+    When creating a Deferred, you may provide a canceller function, which
+    will be called by d.cancel() to let you do any clean-up necessary if the
+    user decides not to wait for the deferred to complete.
+
+    @ivar called: A flag which is C{False} until either C{callback} or
+        C{errback} is called and afterwards always C{True}.
+    @type called: C{bool}
+
+    @ivar paused: A counter of how many unmatched C{pause} calls have been made
+        on this instance.
+    @type paused: C{int}
+
+    @ivar _suppressAlreadyCalled: A flag used by the cancellation mechanism
+        which is C{True} if the Deferred has no canceller and has been
+        cancelled, C{False} otherwise.  If C{True}, it can be expected that
+        C{callback} or C{errback} will eventually be called and the result
+        should be silently discarded.
+    @type _suppressAlreadyCalled: C{bool}
+
+    @ivar _runningCallbacks: A flag which is C{True} while this instance is
+        executing its callback chain, used to stop recursive execution of
+        L{_runCallbacks}
+    @type _runningCallbacks: C{bool}
+
+    @ivar _chainedTo: If this Deferred is waiting for the result of another
+        Deferred, this is a reference to the other Deferred.  Otherwise, C{None}.
+    """
+
+    called = False
+    paused = 0
+    _debugInfo = None
+    _suppressAlreadyCalled = False
+
+    # Are we currently running a user-installed callback?  Meant to prevent
+    # recursive running of callbacks when a reentrant call to add a callback is
+    # used.
+    _runningCallbacks = False
+
+    # Keep this class attribute for now, for compatibility with code that
+    # sets it directly.
+    debug = False
+
+    _chainedTo = None
+
+    def __init__(self, canceller=None):
+        """
+        Initialize a L{Deferred}.
+
+        @param canceller: a callable used to stop the pending operation
+            scheduled by this L{Deferred} when L{Deferred.cancel} is
+            invoked. The canceller will be passed the deferred whose
+            cancelation is requested (i.e., self).
+
+            If a canceller is not given, or does not invoke its argument's
+            C{callback} or C{errback} method, L{Deferred.cancel} will
+            invoke L{Deferred.errback} with a L{CancelledError}.
+
+            Note that if a canceller is not given, C{callback} or
+            C{errback} may still be invoked exactly once, even though
+            defer.py will have already invoked C{errback}, as described
+            above.  This allows clients of code which returns a L{Deferred}
+            to cancel it without requiring the L{Deferred} instantiator to
+            provide any specific implementation support for cancellation.
+            New in 10.1.
+
+        @type canceller: a 1-argument callable which takes a L{Deferred}. The
+            return result is ignored.
+        """
+        self.callbacks = []
+        self._canceller = canceller
+        if self.debug:
+            self._debugInfo = DebugInfo()
+            self._debugInfo.creator = traceback.format_stack()[:-1]
+
+
+    def addCallbacks(self, callback, errback=None,
+                     callbackArgs=None, callbackKeywords=None,
+                     errbackArgs=None, errbackKeywords=None):
+        """
+        Add a pair of callbacks (success and error) to this L{Deferred}.
+
+        These will be executed when the 'master' callback is run.
+
+        @return: C{self}.
+        @rtype: a L{Deferred}
+        """
+        assert callable(callback)
+        assert errback == None or callable(errback)
+        cbs = ((callback, callbackArgs, callbackKeywords),
+               (errback or (passthru), errbackArgs, errbackKeywords))
+        self.callbacks.append(cbs)
+
+        if self.called:
+            self._runCallbacks()
+        return self
+
+
+    def addCallback(self, callback, *args, **kw):
+        """
+        Convenience method for adding just a callback.
+
+        See L{addCallbacks}.
+        """
+        return self.addCallbacks(callback, callbackArgs=args,
+                                 callbackKeywords=kw)
+
+
+    def addErrback(self, errback, *args, **kw):
+        """
+        Convenience method for adding just an errback.
+
+        See L{addCallbacks}.
+        """
+        return self.addCallbacks(passthru, errback,
+                                 errbackArgs=args,
+                                 errbackKeywords=kw)
+
+
+    def addBoth(self, callback, *args, **kw):
+        """
+        Convenience method for adding a single callable as both a callback
+        and an errback.
+
+        See L{addCallbacks}.
+        """
+        return self.addCallbacks(callback, callback,
+                                 callbackArgs=args, errbackArgs=args,
+                                 callbackKeywords=kw, errbackKeywords=kw)
+
+
+    def chainDeferred(self, d):
+        """
+        Chain another L{Deferred} to this L{Deferred}.
+
+        This method adds callbacks to this L{Deferred} to call C{d}'s callback
+        or errback, as appropriate. It is merely a shorthand way of performing
+        the following::
+
+            self.addCallbacks(d.callback, d.errback)
+
+        When you chain a deferred d2 to another deferred d1 with
+        d1.chainDeferred(d2), you are making d2 participate in the callback
+        chain of d1. Thus any event that fires d1 will also fire d2.
+        However, the converse is B{not} true; if d2 is fired d1 will not be
+        affected.
+
+        Note that unlike the case where chaining is caused by a L{Deferred}
+        being returned from a callback, it is possible to cause the call
+        stack size limit to be exceeded by chaining many L{Deferred}s
+        together with C{chainDeferred}.
+
+        @return: C{self}.
+        @rtype: a L{Deferred}
+        """
+        d._chainedTo = self
+        return self.addCallbacks(d.callback, d.errback)
+
+
+    def callback(self, result):
+        """
+        Run all success callbacks that have been added to this L{Deferred}.
+
+        Each callback will have its result passed as the first argument to
+        the next; this way, the callbacks act as a 'processing chain'.  If
+        the success-callback returns a L{Failure} or raises an L{Exception},
+        processing will continue on the *error* callback chain.  If a
+        callback (or errback) returns another L{Deferred}, this L{Deferred}
+        will be chained to it (and further callbacks will not run until that
+        L{Deferred} has a result).
+
+        An instance of L{Deferred} may only have either L{callback} or
+        L{errback} called on it, and only once.
+
+        @param result: The object which will be passed to the first callback
+            added to this L{Deferred} (via L{addCallback}).
+
+        @raise AlreadyCalledError: If L{callback} or L{errback} has already been
+            called on this L{Deferred}.
+        """
+        assert not isinstance(result, Deferred)
+        self._startRunCallbacks(result)
+
+
+    def errback(self, fail=None):
+        """
+        Run all error callbacks that have been added to this L{Deferred}.
+
+        Each callback will have its result passed as the first
+        argument to the next; this way, the callbacks act as a
+        'processing chain'. Also, if the error-callback returns a non-Failure
+        or doesn't raise an L{Exception}, processing will continue on the
+        *success*-callback chain.
+
+        If the argument that's passed to me is not a L{failure.Failure} instance,
+        it will be embedded in one. If no argument is passed, a
+        L{failure.Failure} instance will be created based on the current
+        traceback stack.
+
+        Passing a string as `fail' is deprecated, and will be punished with
+        a warning message.
+
+        An instance of L{Deferred} may only have either L{callback} or
+        L{errback} called on it, and only once.
+
+        @param fail: The L{Failure} object which will be passed to the first
+            errback added to this L{Deferred} (via L{addErrback}).
+            Alternatively, a L{Exception} instance from which a L{Failure} will
+            be constructed (with no traceback) or C{None} to create a L{Failure}
+            instance from the current exception state (with a traceback).
+
+        @raise AlreadyCalledError: If L{callback} or L{errback} has already been
+            called on this L{Deferred}.
+
+        @raise NoCurrentExceptionError: If C{fail} is C{None} but there is
+            no current exception state.
+        """
+        if fail is None:
+            fail = failure.Failure(captureVars=self.debug)
+        elif not isinstance(fail, failure.Failure):
+            fail = failure.Failure(fail)
+
+        self._startRunCallbacks(fail)
+
+
+    def pause(self):
+        """
+        Stop processing on a L{Deferred} until L{unpause}() is called.
+        """
+        self.paused = self.paused + 1
+
+
+    def unpause(self):
+        """
+        Process all callbacks made since L{pause}() was called.
+        """
+        self.paused = self.paused - 1
+        if self.paused:
+            return
+        if self.called:
+            self._runCallbacks()
+
+
+    def cancel(self):
+        """
+        Cancel this L{Deferred}.
+
+        If the L{Deferred} has not yet had its C{errback} or C{callback} method
+        invoked, call the canceller function provided to the constructor. If
+        that function does not invoke C{callback} or C{errback}, or if no
+        canceller function was provided, errback with L{CancelledError}.
+
+        If this L{Deferred} is waiting on another L{Deferred}, forward the
+        cancellation to the other L{Deferred}.
+        """
+        if not self.called:
+            canceller = self._canceller
+            if canceller:
+                canceller(self)
+            else:
+                # Arrange to eat the callback that will eventually be fired
+                # since there was no real canceller.
+                self._suppressAlreadyCalled = True
+            if not self.called:
+                # There was no canceller, or the canceller didn't call
+                # callback or errback.
+                self.errback(failure.Failure(CancelledError()))
+        elif isinstance(self.result, Deferred):
+            # Waiting for another deferred -- cancel it instead.
+            self.result.cancel()
+
+
+    def _startRunCallbacks(self, result):
+        if self.called:
+            if self._suppressAlreadyCalled:
+                self._suppressAlreadyCalled = False
+                return
+            if self.debug:
+                if self._debugInfo is None:
+                    self._debugInfo = DebugInfo()
+                extra = "\n" + self._debugInfo._getDebugTracebacks()
+                raise AlreadyCalledError(extra)
+            raise AlreadyCalledError
+        if self.debug:
+            if self._debugInfo is None:
+                self._debugInfo = DebugInfo()
+            self._debugInfo.invoker = traceback.format_stack()[:-2]
+        self.called = True
+        self.result = result
+        self._runCallbacks()
+
+
+    def _continuation(self):
+        """
+        Build a tuple of callback and errback with L{_continue} to be used by
+        L{_addContinue} and L{_removeContinue} on another Deferred.
+        """
+        return ((_CONTINUE, (self,), None),
+                (_CONTINUE, (self,), None))
+
+
+    def _runCallbacks(self):
+        """
+        Run the chain of callbacks once a result is available.
+
+        This consists of a simple loop over all of the callbacks, calling each
+        with the current result and making the current result equal to the
+        return value (or raised exception) of that call.
+
+        If C{self._runningCallbacks} is true, this loop won't run at all, since
+        it is already running above us on the call stack.  If C{self.paused} is
+        true, the loop also won't run, because that's what it means to be
+        paused.
+
+        The loop will terminate before processing all of the callbacks if a
+        C{Deferred} without a result is encountered.
+
+        If a C{Deferred} I{with} a result is encountered, that result is taken
+        and the loop proceeds.
+
+        @note: The implementation is complicated slightly by the fact that
+            chaining (associating two Deferreds with each other such that one
+            will wait for the result of the other, as happens when a Deferred is
+            returned from a callback on another Deferred) is supported
+            iteratively rather than recursively, to avoid running out of stack
+            frames when processing long chains.
+        """
+        if self._runningCallbacks:
+            # Don't recursively run callbacks
+            return
+
+        # Keep track of all the Deferreds encountered while propagating results
+        # up a chain.  The way a Deferred gets onto this stack is by having
+        # added its _continuation() to the callbacks list of a second Deferred
+        # and then that second Deferred being fired.  ie, if ever had _chainedTo
+        # set to something other than None, you might end up on this stack.
+        chain = [self]
+
+        while chain:
+            current = chain[-1]
+
+            if current.paused:
+                # This Deferred isn't going to produce a result at all.  All the
+                # Deferreds up the chain waiting on it will just have to...
+                # wait.
+                return
+
+            finished = True
+            current._chainedTo = None
+            while current.callbacks:
+                item = current.callbacks.pop(0)
+                callback, args, kw = item[
+                    isinstance(current.result, failure.Failure)]
+                args = args or ()
+                kw = kw or {}
+
+                # Avoid recursion if we can.
+                if callback is _CONTINUE:
+                    # Give the waiting Deferred our current result and then
+                    # forget about that result ourselves.
+                    chainee = args[0]
+                    chainee.result = current.result
+                    current.result = None
+                    # Making sure to update _debugInfo
+                    if current._debugInfo is not None:
+                        current._debugInfo.failResult = None
+                    chainee.paused -= 1
+                    chain.append(chainee)
+                    # Delay cleaning this Deferred and popping it from the chain
+                    # until after we've dealt with chainee.
+                    finished = False
+                    break
+
+                try:
+                    current._runningCallbacks = True
+                    try:
+                        current.result = callback(current.result, *args, **kw)
+                    finally:
+                        current._runningCallbacks = False
+                except:
+                    # Including full frame information in the Failure is quite
+                    # expensive, so we avoid it unless self.debug is set.
+                    current.result = failure.Failure(captureVars=self.debug)
+                else:
+                    if isinstance(current.result, Deferred):
+                        # The result is another Deferred.  If it has a result,
+                        # we can take it and keep going.
+                        resultResult = getattr(current.result, 'result', _NO_RESULT)
+                        if resultResult is _NO_RESULT or isinstance(resultResult, Deferred) or current.result.paused:
+                            # Nope, it didn't.  Pause and chain.
+                            current.pause()
+                            current._chainedTo = current.result
+                            # Note: current.result has no result, so it's not
+                            # running its callbacks right now.  Therefore we can
+                            # append to the callbacks list directly instead of
+                            # using addCallbacks.
+                            current.result.callbacks.append(current._continuation())
+                            break
+                        else:
+                            # Yep, it did.  Steal it.
+                            current.result.result = None
+                            # Make sure _debugInfo's failure state is updated.
+                            if current.result._debugInfo is not None:
+                                current.result._debugInfo.failResult = None
+                            current.result = resultResult
+
+            if finished:
+                # As much of the callback chain - perhaps all of it - as can be
+                # processed right now has been.  The current Deferred is waiting on
+                # another Deferred or for more callbacks.  Before finishing with it,
+                # make sure its _debugInfo is in the proper state.
+                if isinstance(current.result, failure.Failure):
+                    # Stash the Failure in the _debugInfo for unhandled error
+                    # reporting.
+                    current.result.cleanFailure()
+                    if current._debugInfo is None:
+                        current._debugInfo = DebugInfo()
+                    current._debugInfo.failResult = current.result
+                else:
+                    # Clear out any Failure in the _debugInfo, since the result
+                    # is no longer a Failure.
+                    if current._debugInfo is not None:
+                        current._debugInfo.failResult = None
+
+                # This Deferred is done, pop it from the chain and move back up
+                # to the Deferred which supplied us with our result.
+                chain.pop()
+
+
+    def __str__(self):
+        """
+        Return a string representation of this C{Deferred}.
+        """
+        cname = self.__class__.__name__
+        result = getattr(self, 'result', _NO_RESULT)
+        myID = hex(unsignedID(self))
+        if self._chainedTo is not None:
+            result = ' waiting on Deferred at %s' % (hex(unsignedID(self._chainedTo)),)
+        elif result is _NO_RESULT:
+            result = ''
+        else:
+            result = ' current result: %r' % (result,)
+        return "<%s at %s%s>" % (cname, myID, result)
+    __repr__ = __str__
+
+
+
+class DebugInfo:
+    """
+    Deferred debug helper.
+    """
+
+    failResult = None
+
+    def _getDebugTracebacks(self):
+        info = ''
+        if hasattr(self, "creator"):
+            info += " C: Deferred was created:\n C:"
+            info += "".join(self.creator).rstrip().replace("\n","\n C:")
+            info += "\n"
+        if hasattr(self, "invoker"):
+            info += " I: First Invoker was:\n I:"
+            info += "".join(self.invoker).rstrip().replace("\n","\n I:")
+            info += "\n"
+        return info
+
+
+    def __del__(self):
+        """
+        Print tracebacks and die.
+
+        If the *last* (and I do mean *last*) callback leaves me in an error
+        state, print a traceback (if said errback is a L{Failure}).
+        """
+        if self.failResult is not None:
+            log.msg("Unhandled error in Deferred:", isError=True)
+            debugInfo = self._getDebugTracebacks()
+            if debugInfo != '':
+                log.msg("(debug: " + debugInfo + ")", isError=True)
+            log.err(self.failResult)
+
+
+
+ at comparable
+class FirstError(Exception):
+    """
+    First error to occur in a L{DeferredList} if C{fireOnOneErrback} is set.
+
+    @ivar subFailure: The L{Failure} that occurred.
+    @type subFailure: L{Failure}
+
+    @ivar index: The index of the L{Deferred} in the L{DeferredList} where
+        it happened.
+    @type index: C{int}
+    """
+    def __init__(self, failure, index):
+        Exception.__init__(self, failure, index)
+        self.subFailure = failure
+        self.index = index
+
+
+    def __repr__(self):
+        """
+        The I{repr} of L{FirstError} instances includes the repr of the
+        wrapped failure's exception and the index of the L{FirstError}.
+        """
+        return 'FirstError[#%d, %r]' % (self.index, self.subFailure.value)
+
+
+    def __str__(self):
+        """
+        The I{str} of L{FirstError} instances includes the I{str} of the
+        entire wrapped failure (including its traceback and exception) and
+        the index of the L{FirstError}.
+        """
+        return 'FirstError[#%d, %s]' % (self.index, self.subFailure)
+
+
+    def __cmp__(self, other):
+        """
+        Comparison between L{FirstError} and other L{FirstError} instances
+        is defined as the comparison of the index and sub-failure of each
+        instance.  L{FirstError} instances don't compare equal to anything
+        that isn't a L{FirstError} instance.
+
+        @since: 8.2
+        """
+        if isinstance(other, FirstError):
+            return cmp(
+                (self.index, self.subFailure),
+                (other.index, other.subFailure))
+        return -1
+
+
+
+class DeferredList(Deferred):
+    """
+    L{DeferredList} is a tool for collecting the results of several Deferreds.
+
+    This tracks a list of L{Deferred}s for their results, and makes a single
+    callback when they have all completed.  By default, the ultimate result is a
+    list of (success, result) tuples, 'success' being a boolean.
+    L{DeferredList} exposes the same API that L{Deferred} does, so callbacks and
+    errbacks can be added to it in the same way.
+
+    L{DeferredList} is implemented by adding callbacks and errbacks to each
+    L{Deferred} in the list passed to it.  This means callbacks and errbacks
+    added to the Deferreds before they are passed to L{DeferredList} will change
+    the result that L{DeferredList} sees (i.e., L{DeferredList} is not special).
+    Callbacks and errbacks can also be added to the Deferreds after they are
+    passed to L{DeferredList} and L{DeferredList} may change the result that
+    they see.
+
+    See the documentation for the C{__init__} arguments for more information.
+    """
+
+    fireOnOneCallback = False
+    fireOnOneErrback = False
+
+    def __init__(self, deferredList, fireOnOneCallback=False,
+                 fireOnOneErrback=False, consumeErrors=False):
+        """
+        Initialize a DeferredList.
+
+        @param deferredList: The list of deferreds to track.
+        @type deferredList:  C{list} of L{Deferred}s
+
+        @param fireOnOneCallback: (keyword param) a flag indicating that this
+            L{DeferredList} will fire when the first L{Deferred} in
+            C{deferredList} fires with a non-failure result without waiting for
+            any of the other Deferreds.  When this flag is set, the DeferredList
+            will fire with a two-tuple: the first element is the result of the
+            Deferred which fired; the second element is the index in
+            C{deferredList} of that Deferred.
+        @type fireOnOneCallback: C{bool}
+
+        @param fireOnOneErrback: (keyword param) a flag indicating that this
+            L{DeferredList} will fire when the first L{Deferred} in
+            C{deferredList} fires with a failure result without waiting for any
+            of the other Deferreds.  When this flag is set, if a Deferred in the
+            list errbacks, the DeferredList will errback with a L{FirstError}
+            failure wrapping the failure of that Deferred.
+        @type fireOnOneErrback: C{bool}
+
+        @param consumeErrors: (keyword param) a flag indicating that failures in
+            any of the included L{Deferreds} should not be propagated to
+            errbacks added to the individual L{Deferreds} after this
+            L{DeferredList} is constructed.  After constructing the
+            L{DeferredList}, any errors in the individual L{Deferred}s will be
+            converted to a callback result of C{None}.  This is useful to
+            prevent spurious 'Unhandled error in Deferred' messages from being
+            logged.  This does not prevent C{fireOnOneErrback} from working.
+        @type consumeErrors: C{bool}
+        """
+        self.resultList = [None] * len(deferredList)
+        Deferred.__init__(self)
+        if len(deferredList) == 0 and not fireOnOneCallback:
+            self.callback(self.resultList)
+
+        # These flags need to be set *before* attaching callbacks to the
+        # deferreds, because the callbacks use these flags, and will run
+        # synchronously if any of the deferreds are already fired.
+        self.fireOnOneCallback = fireOnOneCallback
+        self.fireOnOneErrback = fireOnOneErrback
+        self.consumeErrors = consumeErrors
+        self.finishedCount = 0
+
+        index = 0
+        for deferred in deferredList:
+            deferred.addCallbacks(self._cbDeferred, self._cbDeferred,
+                                  callbackArgs=(index,SUCCESS),
+                                  errbackArgs=(index,FAILURE))
+            index = index + 1
+
+
+    def _cbDeferred(self, result, index, succeeded):
+        """
+        (internal) Callback for when one of my deferreds fires.
+        """
+        self.resultList[index] = (succeeded, result)
+
+        self.finishedCount += 1
+        if not self.called:
+            if succeeded == SUCCESS and self.fireOnOneCallback:
+                self.callback((result, index))
+            elif succeeded == FAILURE and self.fireOnOneErrback:
+                self.errback(failure.Failure(FirstError(result, index)))
+            elif self.finishedCount == len(self.resultList):
+                self.callback(self.resultList)
+
+        if succeeded == FAILURE and self.consumeErrors:
+            result = None
+
+        return result
+
+
+
+def _parseDListResult(l, fireOnOneErrback=False):
+    if __debug__:
+        for success, value in l:
+            assert success
+    return [x[1] for x in l]
+
+
+
+def gatherResults(deferredList, consumeErrors=False):
+    """
+    Returns, via a L{Deferred}, a list with the results of the given
+    L{Deferred}s - in effect, a "join" of multiple deferred operations.
+
+    The returned L{Deferred} will fire when I{all} of the provided L{Deferred}s
+    have fired, or when any one of them has failed.
+
+    This differs from L{DeferredList} in that you don't need to parse
+    the result for success/failure.
+
+    @type deferredList:  C{list} of L{Deferred}s
+
+    @param consumeErrors: (keyword param) a flag, defaulting to False,
+        indicating that failures in any of the given L{Deferreds} should not be
+        propagated to errbacks added to the individual L{Deferreds} after this
+        L{gatherResults} invocation.  Any such errors in the individual
+        L{Deferred}s will be converted to a callback result of C{None}.  This
+        is useful to prevent spurious 'Unhandled error in Deferred' messages
+        from being logged.  This parameter is available since 11.1.0.
+    @type consumeErrors: C{bool}
+    """
+    d = DeferredList(deferredList, fireOnOneErrback=True,
+                                   consumeErrors=consumeErrors)
+    d.addCallback(_parseDListResult)
+    return d
+
+
+
+# Constants for use with DeferredList
+
+SUCCESS = True
+FAILURE = False
+
+
+
+## deferredGenerator
+
+class waitForDeferred:
+    """
+    See L{deferredGenerator}.
+    """
+
+    def __init__(self, d):
+        if not isinstance(d, Deferred):
+            raise TypeError("You must give waitForDeferred a Deferred. You gave it %r." % (d,))
+        self.d = d
+
+
+    def getResult(self):
+        if isinstance(self.result, failure.Failure):
+            self.result.raiseException()
+        return self.result
+
+
+
+def _deferGenerator(g, deferred):
+    """
+    See L{deferredGenerator}.
+    """
+    result = None
+
+    # This function is complicated by the need to prevent unbounded recursion
+    # arising from repeatedly yielding immediately ready deferreds.  This while
+    # loop and the waiting variable solve that by manually unfolding the
+    # recursion.
+
+    waiting = [True, # defgen is waiting for result?
+               None] # result
+
+    while 1:
+        try:
+            result = next(g)
+        except StopIteration:
+            deferred.callback(result)
+            return deferred
+        except:
+            deferred.errback()
+            return deferred
+
+        # Deferred.callback(Deferred) raises an error; we catch this case
+        # early here and give a nicer error message to the user in case
+        # they yield a Deferred.
+        if isinstance(result, Deferred):
+            return fail(TypeError("Yield waitForDeferred(d), not d!"))
+
+        if isinstance(result, waitForDeferred):
+            # a waitForDeferred was yielded, get the result.
+            # Pass result in so it don't get changed going around the loop
+            # This isn't a problem for waiting, as it's only reused if
+            # gotResult has already been executed.
+            def gotResult(r, result=result):
+                result.result = r
+                if waiting[0]:
+                    waiting[0] = False
+                    waiting[1] = r
+                else:
+                    _deferGenerator(g, deferred)
+            result.d.addBoth(gotResult)
+            if waiting[0]:
+                # Haven't called back yet, set flag so that we get reinvoked
+                # and return from the loop
+                waiting[0] = False
+                return deferred
+            # Reset waiting to initial values for next loop
+            waiting[0] = True
+            waiting[1] = None
+
+            result = None
+
+
+
+def deferredGenerator(f):
+    """
+    deferredGenerator and waitForDeferred help you write L{Deferred}-using code
+    that looks like a regular sequential function. If your code has a minimum
+    requirement of Python 2.5, consider the use of L{inlineCallbacks} instead,
+    which can accomplish the same thing in a more concise manner.
+
+    There are two important functions involved: L{waitForDeferred}, and
+    L{deferredGenerator}.  They are used together, like this::
+
+        @deferredGenerator
+        def thingummy():
+            thing = waitForDeferred(makeSomeRequestResultingInDeferred())
+            yield thing
+            thing = thing.getResult()
+            print thing #the result! hoorj!
+
+    L{waitForDeferred} returns something that you should immediately yield; when
+    your generator is resumed, calling C{thing.getResult()} will either give you
+    the result of the L{Deferred} if it was a success, or raise an exception if it
+    was a failure.  Calling C{getResult} is B{absolutely mandatory}.  If you do
+    not call it, I{your program will not work}.
+
+    L{deferredGenerator} takes one of these waitForDeferred-using generator
+    functions and converts it into a function that returns a L{Deferred}. The
+    result of the L{Deferred} will be the last value that your generator yielded
+    unless the last value is a L{waitForDeferred} instance, in which case the
+    result will be C{None}.  If the function raises an unhandled exception, the
+    L{Deferred} will errback instead.  Remember that C{return result} won't work;
+    use C{yield result; return} in place of that.
+
+    Note that not yielding anything from your generator will make the L{Deferred}
+    result in C{None}. Yielding a L{Deferred} from your generator is also an error
+    condition; always yield C{waitForDeferred(d)} instead.
+
+    The L{Deferred} returned from your deferred generator may also errback if your
+    generator raised an exception.  For example::
+
+        @deferredGenerator
+        def thingummy():
+            thing = waitForDeferred(makeSomeRequestResultingInDeferred())
+            yield thing
+            thing = thing.getResult()
+            if thing == 'I love Twisted':
+                # will become the result of the Deferred
+                yield 'TWISTED IS GREAT!'
+                return
+            else:
+                # will trigger an errback
+                raise Exception('DESTROY ALL LIFE')
+
+    Put succinctly, these functions connect deferred-using code with this 'fake
+    blocking' style in both directions: L{waitForDeferred} converts from a
+    L{Deferred} to the 'blocking' style, and L{deferredGenerator} converts from the
+    'blocking' style to a L{Deferred}.
+    """
+    @wraps(f)
+    def unwindGenerator(*args, **kwargs):
+        return _deferGenerator(f(*args, **kwargs), Deferred())
+    return unwindGenerator
+
+
+## inlineCallbacks
+
+# BaseException is only in Py 2.5.
+try:
+    BaseException
+except NameError:
+    BaseException=Exception
+
+
+
+class _DefGen_Return(BaseException):
+    def __init__(self, value):
+        self.value = value
+
+
+
+def returnValue(val):
+    """
+    Return val from a L{inlineCallbacks} generator.
+
+    Note: this is currently implemented by raising an exception
+    derived from L{BaseException}.  You might want to change any
+    'except:' clauses to an 'except Exception:' clause so as not to
+    catch this exception.
+
+    Also: while this function currently will work when called from
+    within arbitrary functions called from within the generator, do
+    not rely upon this behavior.
+    """
+    raise _DefGen_Return(val)
+
+
+
+def _inlineCallbacks(result, g, deferred):
+    """
+    See L{inlineCallbacks}.
+    """
+    # This function is complicated by the need to prevent unbounded recursion
+    # arising from repeatedly yielding immediately ready deferreds.  This while
+    # loop and the waiting variable solve that by manually unfolding the
+    # recursion.
+
+    waiting = [True, # waiting for result?
+               None] # result
+
+    while 1:
+        try:
+            # Send the last result back as the result of the yield expression.
+            isFailure = isinstance(result, failure.Failure)
+            if isFailure:
+                result = result.throwExceptionIntoGenerator(g)
+            else:
+                result = g.send(result)
+        except StopIteration:
+            # fell off the end, or "return" statement
+            deferred.callback(None)
+            return deferred
+        except _DefGen_Return as e:
+            # returnValue() was called; time to give a result to the original
+            # Deferred.  First though, let's try to identify the potentially
+            # confusing situation which results when returnValue() is
+            # accidentally invoked from a different function, one that wasn't
+            # decorated with @inlineCallbacks.
+
+            # The traceback starts in this frame (the one for
+            # _inlineCallbacks); the next one down should be the application
+            # code.
+            appCodeTrace = exc_info()[2].tb_next
+            if isFailure:
+                # If we invoked this generator frame by throwing an exception
+                # into it, then throwExceptionIntoGenerator will consume an
+                # additional stack frame itself, so we need to skip that too.
+                appCodeTrace = appCodeTrace.tb_next
+            # Now that we've identified the frame being exited by the
+            # exception, let's figure out if returnValue was called from it
+            # directly.  returnValue itself consumes a stack frame, so the
+            # application code will have a tb_next, but it will *not* have a
+            # second tb_next.
+            if appCodeTrace.tb_next.tb_next:
+                # If returnValue was invoked non-local to the frame which it is
+                # exiting, identify the frame that ultimately invoked
+                # returnValue so that we can warn the user, as this behavior is
+                # confusing.
+                ultimateTrace = appCodeTrace
+                while ultimateTrace.tb_next.tb_next:
+                    ultimateTrace = ultimateTrace.tb_next
+                filename = ultimateTrace.tb_frame.f_code.co_filename
+                lineno = ultimateTrace.tb_lineno
+                warnings.warn_explicit(
+                    "returnValue() in %r causing %r to exit: "
+                    "returnValue should only be invoked by functions decorated "
+                    "with inlineCallbacks" % (
+                        ultimateTrace.tb_frame.f_code.co_name,
+                        appCodeTrace.tb_frame.f_code.co_name),
+                    DeprecationWarning, filename, lineno)
+            deferred.callback(e.value)
+            return deferred
+        except:
+            deferred.errback()
+            return deferred
+
+        if isinstance(result, Deferred):
+            # a deferred was yielded, get the result.
+            def gotResult(r):
+                if waiting[0]:
+                    waiting[0] = False
+                    waiting[1] = r
+                else:
+                    _inlineCallbacks(r, g, deferred)
+
+            result.addBoth(gotResult)
+            if waiting[0]:
+                # Haven't called back yet, set flag so that we get reinvoked
+                # and return from the loop
+                waiting[0] = False
+                return deferred
+
+            result = waiting[1]
+            # Reset waiting to initial values for next loop.  gotResult uses
+            # waiting, but this isn't a problem because gotResult is only
+            # executed once, and if it hasn't been executed yet, the return
+            # branch above would have been taken.
+
+
+            waiting[0] = True
+            waiting[1] = None
+
+
+    return deferred
+
+
+
+def inlineCallbacks(f):
+    """
+    WARNING: this function will not work in Python 2.4 and earlier!
+
+    inlineCallbacks helps you write Deferred-using code that looks like a
+    regular sequential function. This function uses features of Python 2.5
+    generators.  If you need to be compatible with Python 2.4 or before, use
+    the L{deferredGenerator} function instead, which accomplishes the same
+    thing, but with somewhat more boilerplate.  For example::
+
+        @inlineCallBacks
+        def thingummy():
+            thing = yield makeSomeRequestResultingInDeferred()
+            print thing #the result! hoorj!
+
+    When you call anything that results in a L{Deferred}, you can simply yield it;
+    your generator will automatically be resumed when the Deferred's result is
+    available. The generator will be sent the result of the L{Deferred} with the
+    'send' method on generators, or if the result was a failure, 'throw'.
+
+    Things that are not L{Deferred}s may also be yielded, and your generator
+    will be resumed with the same object sent back. This means C{yield}
+    performs an operation roughly equivalent to L{maybeDeferred}.
+
+    Your inlineCallbacks-enabled generator will return a L{Deferred} object, which
+    will result in the return value of the generator (or will fail with a
+    failure object if your generator raises an unhandled exception). Note that
+    you can't use C{return result} to return a value; use C{returnValue(result)}
+    instead. Falling off the end of the generator, or simply using C{return}
+    will cause the L{Deferred} to have a result of C{None}.
+
+    Be aware that L{returnValue} will not accept a L{Deferred} as a parameter.
+    If you believe the thing you'd like to return could be a L{Deferred}, do
+    this::
+
+        result = yield result
+        returnValue(result)
+
+    The L{Deferred} returned from your deferred generator may errback if your
+    generator raised an exception::
+
+        @inlineCallbacks
+        def thingummy():
+            thing = yield makeSomeRequestResultingInDeferred()
+            if thing == 'I love Twisted':
+                # will become the result of the Deferred
+                returnValue('TWISTED IS GREAT!')
+            else:
+                # will trigger an errback
+                raise Exception('DESTROY ALL LIFE')
+    """
+    @wraps(f)
+    def unwindGenerator(*args, **kwargs):
+        try:
+            gen = f(*args, **kwargs)
+        except _DefGen_Return:
+            raise TypeError(
+                "inlineCallbacks requires %r to produce a generator; instead"
+                "caught returnValue being used in a non-generator" % (f,))
+        if not isinstance(gen, types.GeneratorType):
+            raise TypeError(
+                "inlineCallbacks requires %r to produce a generator; "
+                "instead got %r" % (f, gen))
+        return _inlineCallbacks(None, gen, Deferred())
+    return unwindGenerator
+
+
+## DeferredLock/DeferredQueue
+
+class _ConcurrencyPrimitive(object):
+    def __init__(self):
+        self.waiting = []
+
+
+    def _releaseAndReturn(self, r):
+        self.release()
+        return r
+
+
+    def run(*args, **kwargs):
+        """
+        Acquire, run, release.
+
+        This function takes a callable as its first argument and any
+        number of other positional and keyword arguments.  When the
+        lock or semaphore is acquired, the callable will be invoked
+        with those arguments.
+
+        The callable may return a L{Deferred}; if it does, the lock or
+        semaphore won't be released until that L{Deferred} fires.
+
+        @return: L{Deferred} of function result.
+        """
+        if len(args) < 2:
+            if not args:
+                raise TypeError("run() takes at least 2 arguments, none given.")
+            raise TypeError("%s.run() takes at least 2 arguments, 1 given" % (
+                args[0].__class__.__name__,))
+        self, f = args[:2]
+        args = args[2:]
+
+        def execute(ignoredResult):
+            d = maybeDeferred(f, *args, **kwargs)
+            d.addBoth(self._releaseAndReturn)
+            return d
+
+        d = self.acquire()
+        d.addCallback(execute)
+        return d
+
+
+
+class DeferredLock(_ConcurrencyPrimitive):
+    """
+    A lock for event driven systems.
+
+    @ivar locked: C{True} when this Lock has been acquired, false at all other
+        times.  Do not change this value, but it is useful to examine for the
+        equivalent of a "non-blocking" acquisition.
+    """
+
+    locked = False
+
+
+    def _cancelAcquire(self, d):
+        """
+        Remove a deferred d from our waiting list, as the deferred has been
+        canceled.
+
+        Note: We do not need to wrap this in a try/except to catch d not
+        being in self.waiting because this canceller will not be called if
+        d has fired. release() pops a deferred out of self.waiting and
+        calls it, so the canceller will no longer be called.
+
+        @param d: The deferred that has been canceled.
+        """
+        self.waiting.remove(d)
+
+
+    def acquire(self):
+        """
+        Attempt to acquire the lock.  Returns a L{Deferred} that fires on
+        lock acquisition with the L{DeferredLock} as the value.  If the lock
+        is locked, then the Deferred is placed at the end of a waiting list.
+
+        @return: a L{Deferred} which fires on lock acquisition.
+        @rtype: a L{Deferred}
+        """
+        d = Deferred(canceller=self._cancelAcquire)
+        if self.locked:
+            self.waiting.append(d)
+        else:
+            self.locked = True
+            d.callback(self)
+        return d
+
+
+    def release(self):
+        """
+        Release the lock.  If there is a waiting list, then the first
+        L{Deferred} in that waiting list will be called back.
+
+        Should be called by whomever did the L{acquire}() when the shared
+        resource is free.
+        """
+        assert self.locked, "Tried to release an unlocked lock"
+        self.locked = False
+        if self.waiting:
+            # someone is waiting to acquire lock
+            self.locked = True
+            d = self.waiting.pop(0)
+            d.callback(self)
+
+
+
+class DeferredSemaphore(_ConcurrencyPrimitive):
+    """
+    A semaphore for event driven systems.
+
+    @ivar tokens: At most this many users may acquire this semaphore at
+        once.
+    @type tokens: C{int}
+
+    @ivar limit: The difference between C{tokens} and the number of users
+        which have currently acquired this semaphore.
+    @type limit: C{int}
+    """
+
+    def __init__(self, tokens):
+        _ConcurrencyPrimitive.__init__(self)
+        if tokens < 1:
+            raise ValueError("DeferredSemaphore requires tokens >= 1")
+        self.tokens = tokens
+        self.limit = tokens
+
+
+    def _cancelAcquire(self, d):
+        """
+        Remove a deferred d from our waiting list, as the deferred has been
+        canceled.
+
+        Note: We do not need to wrap this in a try/except to catch d not
+        being in self.waiting because this canceller will not be called if
+        d has fired. release() pops a deferred out of self.waiting and
+        calls it, so the canceller will no longer be called.
+
+        @param d: The deferred that has been canceled.
+        """
+        self.waiting.remove(d)
+
+
+    def acquire(self):
+        """
+        Attempt to acquire the token.
+
+        @return: a L{Deferred} which fires on token acquisition.
+        """
+        assert self.tokens >= 0, "Internal inconsistency??  tokens should never be negative"
+        d = Deferred(canceller=self._cancelAcquire)
+        if not self.tokens:
+            self.waiting.append(d)
+        else:
+            self.tokens = self.tokens - 1
+            d.callback(self)
+        return d
+
+
+    def release(self):
+        """
+        Release the token.
+
+        Should be called by whoever did the L{acquire}() when the shared
+        resource is free.
+        """
+        assert self.tokens < self.limit, "Someone released me too many times: too many tokens!"
+        self.tokens = self.tokens + 1
+        if self.waiting:
+            # someone is waiting to acquire token
+            self.tokens = self.tokens - 1
+            d = self.waiting.pop(0)
+            d.callback(self)
+
+
+
+class QueueOverflow(Exception):
+    pass
+
+
+
+class QueueUnderflow(Exception):
+    pass
+
+
+
+class DeferredQueue(object):
+    """
+    An event driven queue.
+
+    Objects may be added as usual to this queue.  When an attempt is
+    made to retrieve an object when the queue is empty, a L{Deferred} is
+    returned which will fire when an object becomes available.
+
+    @ivar size: The maximum number of objects to allow into the queue
+    at a time.  When an attempt to add a new object would exceed this
+    limit, L{QueueOverflow} is raised synchronously.  C{None} for no limit.
+
+    @ivar backlog: The maximum number of L{Deferred} gets to allow at
+    one time.  When an attempt is made to get an object which would
+    exceed this limit, L{QueueUnderflow} is raised synchronously.  C{None}
+    for no limit.
+    """
+
+    def __init__(self, size=None, backlog=None):
+        self.waiting = []
+        self.pending = []
+        self.size = size
+        self.backlog = backlog
+
+
+    def _cancelGet(self, d):
+        """
+        Remove a deferred d from our waiting list, as the deferred has been
+        canceled.
+
+        Note: We do not need to wrap this in a try/except to catch d not
+        being in self.waiting because this canceller will not be called if
+        d has fired. put() pops a deferred out of self.waiting and calls
+        it, so the canceller will no longer be called.
+
+        @param d: The deferred that has been canceled.
+        """
+        self.waiting.remove(d)
+
+
+    def put(self, obj):
+        """
+        Add an object to this queue.
+
+        @raise QueueOverflow: Too many objects are in this queue.
+        """
+        if self.waiting:
+            self.waiting.pop(0).callback(obj)
+        elif self.size is None or len(self.pending) < self.size:
+            self.pending.append(obj)
+        else:
+            raise QueueOverflow()
+
+
+    def get(self):
+        """
+        Attempt to retrieve and remove an object from the queue.
+
+        @return: a L{Deferred} which fires with the next object available in
+        the queue.
+
+        @raise QueueUnderflow: Too many (more than C{backlog})
+        L{Deferred}s are already waiting for an object from this queue.
+        """
+        if self.pending:
+            return succeed(self.pending.pop(0))
+        elif self.backlog is None or len(self.waiting) < self.backlog:
+            d = Deferred(canceller=self._cancelGet)
+            self.waiting.append(d)
+            return d
+        else:
+            raise QueueUnderflow()
+
+
+
+# Re-add to Python 3 in #5960:
+if not _PY3:
+    from twisted.python import lockfile
+
+    class AlreadyTryingToLockError(Exception):
+        """
+        Raised when L{DeferredFilesystemLock.deferUntilLocked} is called twice on a
+        single L{DeferredFilesystemLock}.
+        """
+
+
+
+    class DeferredFilesystemLock(lockfile.FilesystemLock):
+        """
+        A L{FilesystemLock} that allows for a L{Deferred} to be fired when the lock is
+        acquired.
+
+        @ivar _scheduler: The object in charge of scheduling retries. In this
+            implementation this is parameterized for testing.
+
+        @ivar _interval: The retry interval for an L{IReactorTime} based scheduler.
+
+        @ivar _tryLockCall: A L{DelayedCall} based on C{_interval} that will manage
+            the next retry for aquiring the lock.
+
+        @ivar _timeoutCall: A L{DelayedCall} based on C{deferUntilLocked}'s timeout
+            argument.  This is in charge of timing out our attempt to acquire the
+            lock.
+        """
+        _interval = 1
+        _tryLockCall = None
+        _timeoutCall = None
+
+
+        def __init__(self, name, scheduler=None):
+            """
+            @param name: The name of the lock to acquire
+            @param scheduler: An object which provides L{IReactorTime}
+            """
+            lockfile.FilesystemLock.__init__(self, name)
+
+            if scheduler is None:
+                from twisted.internet import reactor
+                scheduler = reactor
+
+            self._scheduler = scheduler
+
+
+        def deferUntilLocked(self, timeout=None):
+            """
+            Wait until we acquire this lock.  This method is not safe for
+            concurrent use.
+
+            @type timeout: C{float} or C{int}
+            @param timeout: the number of seconds after which to time out if the
+                lock has not been acquired.
+
+            @return: a L{Deferred} which will callback when the lock is acquired, or
+                errback with a L{TimeoutError} after timing out or an
+                L{AlreadyTryingToLockError} if the L{deferUntilLocked} has already
+                been called and not successfully locked the file.
+            """
+            if self._tryLockCall is not None:
+                return fail(
+                    AlreadyTryingToLockError(
+                        "deferUntilLocked isn't safe for concurrent use."))
+
+            d = Deferred()
+
+            def _cancelLock():
+                self._tryLockCall.cancel()
+                self._tryLockCall = None
+                self._timeoutCall = None
+
+                if self.lock():
+                    d.callback(None)
+                else:
+                    d.errback(failure.Failure(
+                            TimeoutError("Timed out aquiring lock: %s after %fs" % (
+                                    self.name,
+                                    timeout))))
+
+            def _tryLock():
+                if self.lock():
+                    if self._timeoutCall is not None:
+                        self._timeoutCall.cancel()
+                        self._timeoutCall = None
+
+                    self._tryLockCall = None
+
+                    d.callback(None)
+                else:
+                    if timeout is not None and self._timeoutCall is None:
+                        self._timeoutCall = self._scheduler.callLater(
+                            timeout, _cancelLock)
+
+                    self._tryLockCall = self._scheduler.callLater(
+                        self._interval, _tryLock)
+
+            _tryLock()
+
+            return d
+
+
+
+__all__ = ["Deferred", "DeferredList", "succeed", "fail", "FAILURE", "SUCCESS",
+           "AlreadyCalledError", "TimeoutError", "gatherResults",
+           "maybeDeferred",
+           "waitForDeferred", "deferredGenerator", "inlineCallbacks",
+           "returnValue",
+           "DeferredLock", "DeferredSemaphore", "DeferredQueue",
+           "DeferredFilesystemLock", "AlreadyTryingToLockError",
+          ]
diff --git a/ThirdParty/Twisted/twisted/internet/endpoints.py b/ThirdParty/Twisted/twisted/internet/endpoints.py
new file mode 100644
index 0000000..bb21280
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/endpoints.py
@@ -0,0 +1,884 @@
+# -*- test-case-name: twisted.internet.test.test_endpoints -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+"""
+Implementations of L{IStreamServerEndpoint} and L{IStreamClientEndpoint} that
+wrap the L{IReactorTCP}, L{IReactorSSL}, and L{IReactorUNIX} interfaces.
+
+This also implements an extensible mini-language for describing endpoints,
+parsed by the L{clientFromString} and L{serverFromString} functions.
+
+ at since: 10.1
+"""
+
+import os, socket
+
+from zope.interface import implements
+import warnings
+
+from twisted.internet import interfaces, defer, error, fdesc
+from twisted.plugin import IPlugin, getPlugins
+from twisted.internet.interfaces import IStreamServerEndpointStringParser
+from twisted.internet.interfaces import IStreamClientEndpointStringParser
+from twisted.python.filepath import FilePath
+from twisted.python.systemd import ListenFDs
+from twisted.internet import stdio
+from twisted.internet.stdio import PipeAddress
+
+from twisted.internet._endpointspy3 import (
+    _WrappingFactory, TCP4ServerEndpoint, TCP6ServerEndpoint,
+    TCP4ClientEndpoint, TCP6ClientEndpoint, SSL4ClientEndpoint,
+    SSL4ServerEndpoint)
+
+__all__ = ["clientFromString", "serverFromString",
+           "TCP4ServerEndpoint", "TCP6ServerEndpoint",
+           "TCP4ClientEndpoint", "TCP6ClientEndpoint",
+           "UNIXServerEndpoint", "UNIXClientEndpoint",
+           "SSL4ServerEndpoint", "SSL4ClientEndpoint",
+           "AdoptedStreamServerEndpoint", "StandardIOEndpoint"]
+
+
+
+class StandardIOEndpoint(object):
+    """
+    A Standard Input/Output endpoint
+    """
+    implements(interfaces.IStreamServerEndpoint)
+
+    def __init__(self, reactor):
+        """
+        @param reactor: The reactor for the endpoint
+        """
+        self._reactor = reactor
+
+
+    def listen(self, stdioProtocolFactory):
+        """
+        Implement L{IStreamServerEndpoint.listen} to listen on stdin/stdout
+        """
+        return defer.execute(stdio.StandardIO,
+                             stdioProtocolFactory.buildProtocol(PipeAddress()))
+
+
+
+class UNIXServerEndpoint(object):
+    """
+    UnixSocket server endpoint.
+    """
+    implements(interfaces.IStreamServerEndpoint)
+
+    def __init__(self, reactor, address, backlog=50, mode=0666, wantPID=0):
+        """
+        @param reactor: An L{IReactorUNIX} provider.
+        @param address: The path to the Unix socket file, used when listening
+        @param backlog: number of connections to allow in backlog.
+        @param mode: mode to set on the unix socket.  This parameter is
+            deprecated.  Permissions should be set on the directory which
+            contains the UNIX socket.
+        @param wantPID: If True, create a pidfile for the socket.
+        """
+        self._reactor = reactor
+        self._address = address
+        self._backlog = backlog
+        self._mode = mode
+        self._wantPID = wantPID
+
+
+    def listen(self, protocolFactory):
+        """
+        Implement L{IStreamServerEndpoint.listen} to listen on a UNIX socket.
+        """
+        return defer.execute(self._reactor.listenUNIX, self._address,
+                             protocolFactory,
+                             backlog=self._backlog,
+                             mode=self._mode,
+                             wantPID=self._wantPID)
+
+
+
+class UNIXClientEndpoint(object):
+    """
+    UnixSocket client endpoint.
+    """
+    implements(interfaces.IStreamClientEndpoint)
+
+    def __init__(self, reactor, path, timeout=30, checkPID=0):
+        """
+        @param reactor: An L{IReactorUNIX} provider.
+
+        @param path: The path to the Unix socket file, used when connecting
+        @type path: str
+
+        @param timeout: Number of seconds to wait before assuming the
+            connection has failed.
+        @type timeout: int
+
+        @param checkPID: If True, check for a pid file to verify that a server
+            is listening.
+        @type checkPID: bool
+        """
+        self._reactor = reactor
+        self._path = path
+        self._timeout = timeout
+        self._checkPID = checkPID
+
+
+    def connect(self, protocolFactory):
+        """
+        Implement L{IStreamClientEndpoint.connect} to connect via a
+        UNIX Socket
+        """
+        try:
+            wf = _WrappingFactory(protocolFactory)
+            self._reactor.connectUNIX(
+                self._path, wf,
+                timeout=self._timeout,
+                checkPID=self._checkPID)
+            return wf._onConnection
+        except:
+            return defer.fail()
+
+
+
+class AdoptedStreamServerEndpoint(object):
+    """
+    An endpoint for listening on a file descriptor initialized outside of
+    Twisted.
+
+    @ivar _used: A C{bool} indicating whether this endpoint has been used to
+        listen with a factory yet.  C{True} if so.
+    """
+    implements(interfaces.IStreamServerEndpoint)
+
+    _close = os.close
+    _setNonBlocking = staticmethod(fdesc.setNonBlocking)
+
+    def __init__(self, reactor, fileno, addressFamily):
+        """
+        @param reactor: An L{IReactorSocket} provider.
+
+        @param fileno: An integer file descriptor corresponding to a listening
+            I{SOCK_STREAM} socket.
+
+        @param addressFamily: The address family of the socket given by
+            C{fileno}.
+        """
+        self.reactor = reactor
+        self.fileno = fileno
+        self.addressFamily = addressFamily
+        self._used = False
+
+
+    def listen(self, factory):
+        """
+        Implement L{IStreamServerEndpoint.listen} to start listening on, and
+        then close, C{self._fileno}.
+        """
+        if self._used:
+            return defer.fail(error.AlreadyListened())
+        self._used = True
+
+        try:
+            self._setNonBlocking(self.fileno)
+            port = self.reactor.adoptStreamPort(
+                self.fileno, self.addressFamily, factory)
+            self._close(self.fileno)
+        except:
+            return defer.fail()
+        return defer.succeed(port)
+
+
+
+def _parseTCP(factory, port, interface="", backlog=50):
+    """
+    Internal parser function for L{_parseServer} to convert the string
+    arguments for a TCP(IPv4) stream endpoint into the structured arguments.
+
+    @param factory: the protocol factory being parsed, or C{None}.  (This was a
+        leftover argument from when this code was in C{strports}, and is now
+        mostly None and unused.)
+
+    @type factory: L{IProtocolFactory} or C{NoneType}
+
+    @param port: the integer port number to bind
+    @type port: C{str}
+
+    @param interface: the interface IP to listen on
+    @param backlog: the length of the listen queue
+    @type backlog: C{str}
+
+    @return: a 2-tuple of (args, kwargs), describing  the parameters to
+        L{IReactorTCP.listenTCP} (or, modulo argument 2, the factory, arguments
+        to L{TCP4ServerEndpoint}.
+    """
+    return (int(port), factory), {'interface': interface,
+                                  'backlog': int(backlog)}
+
+
+
+def _parseUNIX(factory, address, mode='666', backlog=50, lockfile=True):
+    """
+    Internal parser function for L{_parseServer} to convert the string
+    arguments for a UNIX (AF_UNIX/SOCK_STREAM) stream endpoint into the
+    structured arguments.
+
+    @param factory: the protocol factory being parsed, or C{None}.  (This was a
+        leftover argument from when this code was in C{strports}, and is now
+        mostly None and unused.)
+
+    @type factory: L{IProtocolFactory} or C{NoneType}
+
+    @param address: the pathname of the unix socket
+    @type address: C{str}
+
+    @param backlog: the length of the listen queue
+    @type backlog: C{str}
+
+    @param lockfile: A string '0' or '1', mapping to True and False
+        respectively.  See the C{wantPID} argument to C{listenUNIX}
+
+    @return: a 2-tuple of (args, kwargs), describing  the parameters to
+        L{IReactorTCP.listenUNIX} (or, modulo argument 2, the factory,
+        arguments to L{UNIXServerEndpoint}.
+    """
+    return (
+        (address, factory),
+        {'mode': int(mode, 8), 'backlog': int(backlog),
+         'wantPID': bool(int(lockfile))})
+
+
+
+def _parseSSL(factory, port, privateKey="server.pem", certKey=None,
+              sslmethod=None, interface='', backlog=50):
+    """
+    Internal parser function for L{_parseServer} to convert the string
+    arguments for an SSL (over TCP/IPv4) stream endpoint into the structured
+    arguments.
+
+    @param factory: the protocol factory being parsed, or C{None}.  (This was a
+        leftover argument from when this code was in C{strports}, and is now
+        mostly None and unused.)
+    @type factory: L{IProtocolFactory} or C{NoneType}
+
+    @param port: the integer port number to bind
+    @type port: C{str}
+
+    @param interface: the interface IP to listen on
+    @param backlog: the length of the listen queue
+    @type backlog: C{str}
+
+    @param privateKey: The file name of a PEM format private key file.
+    @type privateKey: C{str}
+
+    @param certKey: The file name of a PEM format certificate file.
+    @type certKey: C{str}
+
+    @param sslmethod: The string name of an SSL method, based on the name of a
+        constant in C{OpenSSL.SSL}.  Must be one of: "SSLv23_METHOD",
+        "SSLv2_METHOD", "SSLv3_METHOD", "TLSv1_METHOD".
+    @type sslmethod: C{str}
+
+    @return: a 2-tuple of (args, kwargs), describing  the parameters to
+        L{IReactorSSL.listenSSL} (or, modulo argument 2, the factory, arguments
+        to L{SSL4ServerEndpoint}.
+    """
+    from twisted.internet import ssl
+    if certKey is None:
+        certKey = privateKey
+    kw = {}
+    if sslmethod is not None:
+        kw['sslmethod'] = getattr(ssl.SSL, sslmethod)
+    cf = ssl.DefaultOpenSSLContextFactory(privateKey, certKey, **kw)
+    return ((int(port), factory, cf),
+            {'interface': interface, 'backlog': int(backlog)})
+
+
+
+class _StandardIOParser(object):
+    """
+    Stream server endpoint string parser for the Standard I/O type.
+
+    @ivar prefix: See L{IStreamClientEndpointStringParser.prefix}.
+    """
+    implements(IPlugin, IStreamServerEndpointStringParser)
+
+    prefix = "stdio"
+
+    def _parseServer(self, reactor):
+        """
+        Internal parser function for L{_parseServer} to convert the string
+        arguments into structured arguments for the L{StandardIOEndpoint}
+
+        @param reactor: Reactor for the endpoint
+        """
+        return StandardIOEndpoint(reactor)
+
+
+    def parseStreamServer(self, reactor, *args, **kwargs):
+        # Redirects to another function (self._parseServer), tricks zope.interface
+        # into believing the interface is correctly implemented.
+        return self._parseServer(reactor)
+
+
+
+class _SystemdParser(object):
+    """
+    Stream server endpoint string parser for the I{systemd} endpoint type.
+
+    @ivar prefix: See L{IStreamClientEndpointStringParser.prefix}.
+
+    @ivar _sddaemon: A L{ListenFDs} instance used to translate an index into an
+        actual file descriptor.
+    """
+    implements(IPlugin, IStreamServerEndpointStringParser)
+
+    _sddaemon = ListenFDs.fromEnvironment()
+
+    prefix = "systemd"
+
+    def _parseServer(self, reactor, domain, index):
+        """
+        Internal parser function for L{_parseServer} to convert the string
+        arguments for a systemd server endpoint into structured arguments for
+        L{AdoptedStreamServerEndpoint}.
+
+        @param reactor: An L{IReactorSocket} provider.
+
+        @param domain: The domain (or address family) of the socket inherited
+            from systemd.  This is a string like C{"INET"} or C{"UNIX"}, ie the
+            name of an address family from the L{socket} module, without the
+            C{"AF_"} prefix.
+        @type domain: C{str}
+
+        @param index: An offset into the list of file descriptors inherited from
+            systemd.
+        @type index: C{str}
+
+        @return: A two-tuple of parsed positional arguments and parsed keyword
+            arguments (a tuple and a dictionary).  These can be used to
+            construct an L{AdoptedStreamServerEndpoint}.
+        """
+        index = int(index)
+        fileno = self._sddaemon.inheritedDescriptors()[index]
+        addressFamily = getattr(socket, 'AF_' + domain)
+        return AdoptedStreamServerEndpoint(reactor, fileno, addressFamily)
+
+
+    def parseStreamServer(self, reactor, *args, **kwargs):
+        # Delegate to another function with a sane signature.  This function has
+        # an insane signature to trick zope.interface into believing the
+        # interface is correctly implemented.
+        return self._parseServer(reactor, *args, **kwargs)
+
+
+
+class _TCP6ServerParser(object):
+    """
+    Stream server endpoint string parser for the TCP6ServerEndpoint type.
+
+    @ivar prefix: See L{IStreamClientEndpointStringParser.prefix}.
+    """
+    implements(IPlugin, IStreamServerEndpointStringParser)
+
+    prefix = "tcp6"     # Used in _parseServer to identify the plugin with the endpoint type
+
+    def _parseServer(self, reactor, port, backlog=50, interface='::'):
+        """
+        Internal parser function for L{_parseServer} to convert the string
+        arguments into structured arguments for the L{TCP6ServerEndpoint}
+
+        @param reactor: An L{IReactorTCP} provider.
+
+        @param port: The port number used for listening
+        @type port: int
+
+        @param backlog: Size of the listen queue
+        @type backlog: int
+
+        @param interface: The hostname to bind to
+        @type interface: str
+        """
+        port = int(port)
+        backlog = int(backlog)
+        return TCP6ServerEndpoint(reactor, port, backlog, interface)
+
+
+    def parseStreamServer(self, reactor, *args, **kwargs):
+        # Redirects to another function (self._parseServer), tricks zope.interface
+        # into believing the interface is correctly implemented.
+        return self._parseServer(reactor, *args, **kwargs)
+
+
+
+_serverParsers = {"tcp": _parseTCP,
+                  "unix": _parseUNIX,
+                  "ssl": _parseSSL,
+                  }
+
+_OP, _STRING = range(2)
+
+def _tokenize(description):
+    """
+    Tokenize a strports string and yield each token.
+
+    @param description: a string as described by L{serverFromString} or
+        L{clientFromString}.
+
+    @return: an iterable of 2-tuples of (L{_OP} or L{_STRING}, string).  Tuples
+        starting with L{_OP} will contain a second element of either ':' (i.e.
+        'next parameter') or '=' (i.e. 'assign parameter value').  For example,
+        the string 'hello:greet\=ing=world' would result in a generator
+        yielding these values::
+
+            _STRING, 'hello'
+            _OP, ':'
+            _STRING, 'greet=ing'
+            _OP, '='
+            _STRING, 'world'
+    """
+    current = ''
+    ops = ':='
+    nextOps = {':': ':=', '=': ':'}
+    description = iter(description)
+    for n in description:
+        if n in ops:
+            yield _STRING, current
+            yield _OP, n
+            current = ''
+            ops = nextOps[n]
+        elif n == '\\':
+            current += description.next()
+        else:
+            current += n
+    yield _STRING, current
+
+
+
+def _parse(description):
+    """
+    Convert a description string into a list of positional and keyword
+    parameters, using logic vaguely like what Python does.
+
+    @param description: a string as described by L{serverFromString} or
+        L{clientFromString}.
+
+    @return: a 2-tuple of C{(args, kwargs)}, where 'args' is a list of all
+        ':'-separated C{str}s not containing an '=' and 'kwargs' is a map of
+        all C{str}s which do contain an '='.  For example, the result of
+        C{_parse('a:b:d=1:c')} would be C{(['a', 'b', 'c'], {'d': '1'})}.
+    """
+    args, kw = [], {}
+    def add(sofar):
+        if len(sofar) == 1:
+            args.append(sofar[0])
+        else:
+            kw[sofar[0]] = sofar[1]
+    sofar = ()
+    for (type, value) in _tokenize(description):
+        if type is _STRING:
+            sofar += (value,)
+        elif value == ':':
+            add(sofar)
+            sofar = ()
+    add(sofar)
+    return args, kw
+
+
+# Mappings from description "names" to endpoint constructors.
+_endpointServerFactories = {
+    'TCP': TCP4ServerEndpoint,
+    'SSL': SSL4ServerEndpoint,
+    'UNIX': UNIXServerEndpoint,
+    }
+
+_endpointClientFactories = {
+    'TCP': TCP4ClientEndpoint,
+    'SSL': SSL4ClientEndpoint,
+    'UNIX': UNIXClientEndpoint,
+    }
+
+
+_NO_DEFAULT = object()
+
+def _parseServer(description, factory, default=None):
+    """
+    Parse a stports description into a 2-tuple of arguments and keyword values.
+
+    @param description: A description in the format explained by
+        L{serverFromString}.
+    @type description: C{str}
+
+    @param factory: A 'factory' argument; this is left-over from
+        twisted.application.strports, it's not really used.
+    @type factory: L{IProtocolFactory} or L{None}
+
+    @param default: Deprecated argument, specifying the default parser mode to
+        use for unqualified description strings (those which do not have a ':'
+        and prefix).
+    @type default: C{str} or C{NoneType}
+
+    @return: a 3-tuple of (plugin or name, arguments, keyword arguments)
+    """
+    args, kw = _parse(description)
+    if not args or (len(args) == 1 and not kw):
+        deprecationMessage = (
+            "Unqualified strport description passed to 'service'."
+            "Use qualified endpoint descriptions; for example, 'tcp:%s'."
+            % (description,))
+        if default is None:
+            default = 'tcp'
+            warnings.warn(
+                deprecationMessage, category=DeprecationWarning, stacklevel=4)
+        elif default is _NO_DEFAULT:
+            raise ValueError(deprecationMessage)
+        # If the default has been otherwise specified, the user has already
+        # been warned.
+        args[0:0] = [default]
+    endpointType = args[0]
+    parser = _serverParsers.get(endpointType)
+    if parser is None:
+        # If the required parser is not found in _server, check if
+        # a plugin exists for the endpointType
+        for plugin in getPlugins(IStreamServerEndpointStringParser):
+            if plugin.prefix == endpointType:
+                return (plugin, args[1:], kw)
+        raise ValueError("Unknown endpoint type: '%s'" % (endpointType,))
+    return (endpointType.upper(),) + parser(factory, *args[1:], **kw)
+
+
+
+def _serverFromStringLegacy(reactor, description, default):
+    """
+    Underlying implementation of L{serverFromString} which avoids exposing the
+    deprecated 'default' argument to anything but L{strports.service}.
+    """
+    nameOrPlugin, args, kw = _parseServer(description, None, default)
+    if type(nameOrPlugin) is not str:
+        plugin = nameOrPlugin
+        return plugin.parseStreamServer(reactor, *args, **kw)
+    else:
+        name = nameOrPlugin
+    # Chop out the factory.
+    args = args[:1] + args[2:]
+    return _endpointServerFactories[name](reactor, *args, **kw)
+
+
+
+def serverFromString(reactor, description):
+    """
+    Construct a stream server endpoint from an endpoint description string.
+
+    The format for server endpoint descriptions is a simple string.  It is a
+    prefix naming the type of endpoint, then a colon, then the arguments for
+    that endpoint.
+
+    For example, you can call it like this to create an endpoint that will
+    listen on TCP port 80::
+
+        serverFromString(reactor, "tcp:80")
+
+    Additional arguments may be specified as keywords, separated with colons.
+    For example, you can specify the interface for a TCP server endpoint to
+    bind to like this::
+
+        serverFromString(reactor, "tcp:80:interface=127.0.0.1")
+
+    SSL server endpoints may be specified with the 'ssl' prefix, and the
+    private key and certificate files may be specified by the C{privateKey} and
+    C{certKey} arguments::
+
+        serverFromString(reactor, "ssl:443:privateKey=key.pem:certKey=crt.pem")
+
+    If a private key file name (C{privateKey}) isn't provided, a "server.pem"
+    file is assumed to exist which contains the private key. If the certificate
+    file name (C{certKey}) isn't provided, the private key file is assumed to
+    contain the certificate as well.
+
+    You may escape colons in arguments with a backslash, which you will need to
+    use if you want to specify a full pathname argument on Windows::
+
+        serverFromString(reactor,
+            "ssl:443:privateKey=C\\:/key.pem:certKey=C\\:/cert.pem")
+
+    finally, the 'unix' prefix may be used to specify a filesystem UNIX socket,
+    optionally with a 'mode' argument to specify the mode of the socket file
+    created by C{listen}::
+
+        serverFromString(reactor, "unix:/var/run/finger")
+        serverFromString(reactor, "unix:/var/run/finger:mode=660")
+
+    This function is also extensible; new endpoint types may be registered as
+    L{IStreamServerEndpointStringParser} plugins.  See that interface for more
+    information.
+
+    @param reactor: The server endpoint will be constructed with this reactor.
+
+    @param description: The strports description to parse.
+
+    @return: A new endpoint which can be used to listen with the parameters
+        given by by C{description}.
+
+    @rtype: L{IStreamServerEndpoint<twisted.internet.interfaces.IStreamServerEndpoint>}
+
+    @raise ValueError: when the 'description' string cannot be parsed.
+
+    @since: 10.2
+    """
+    return _serverFromStringLegacy(reactor, description, _NO_DEFAULT)
+
+
+
+def quoteStringArgument(argument):
+    """
+    Quote an argument to L{serverFromString} and L{clientFromString}.  Since
+    arguments are separated with colons and colons are escaped with
+    backslashes, some care is necessary if, for example, you have a pathname,
+    you may be tempted to interpolate into a string like this::
+
+        serverFromString("ssl:443:privateKey=%s" % (myPathName,))
+
+    This may appear to work, but will have portability issues (Windows
+    pathnames, for example).  Usually you should just construct the appropriate
+    endpoint type rather than interpolating strings, which in this case would
+    be L{SSL4ServerEndpoint}.  There are some use-cases where you may need to
+    generate such a string, though; for example, a tool to manipulate a
+    configuration file which has strports descriptions in it.  To be correct in
+    those cases, do this instead::
+
+        serverFromString("ssl:443:privateKey=%s" %
+                         (quoteStringArgument(myPathName),))
+
+    @param argument: The part of the endpoint description string you want to
+        pass through.
+
+    @type argument: C{str}
+
+    @return: The quoted argument.
+
+    @rtype: C{str}
+    """
+    return argument.replace('\\', '\\\\').replace(':', '\\:')
+
+
+
+def _parseClientTCP(*args, **kwargs):
+    """
+    Perform any argument value coercion necessary for TCP client parameters.
+
+    Valid positional arguments to this function are host and port.
+
+    Valid keyword arguments to this function are all L{IReactorTCP.connectTCP}
+    arguments.
+
+    @return: The coerced values as a C{dict}.
+    """
+
+    if len(args) == 2:
+        kwargs['port'] = int(args[1])
+        kwargs['host'] = args[0]
+    elif len(args) == 1:
+        if 'host' in kwargs:
+            kwargs['port'] = int(args[0])
+        else:
+            kwargs['host'] = args[0]
+
+    try:
+        kwargs['port'] = int(kwargs['port'])
+    except KeyError:
+        pass
+
+    try:
+        kwargs['timeout'] = int(kwargs['timeout'])
+    except KeyError:
+        pass
+    return kwargs
+
+
+
+def _loadCAsFromDir(directoryPath):
+    """
+    Load certificate-authority certificate objects in a given directory.
+
+    @param directoryPath: a L{FilePath} pointing at a directory to load .pem
+        files from.
+
+    @return: a C{list} of L{OpenSSL.crypto.X509} objects.
+    """
+    from twisted.internet import ssl
+
+    caCerts = {}
+    for child in directoryPath.children():
+        if not child.basename().split('.')[-1].lower() == 'pem':
+            continue
+        try:
+            data = child.getContent()
+        except IOError:
+            # Permission denied, corrupt disk, we don't care.
+            continue
+        try:
+            theCert = ssl.Certificate.loadPEM(data)
+        except ssl.SSL.Error:
+            # Duplicate certificate, invalid certificate, etc.  We don't care.
+            pass
+        else:
+            caCerts[theCert.digest()] = theCert.original
+    return caCerts.values()
+
+
+
+def _parseClientSSL(*args, **kwargs):
+    """
+    Perform any argument value coercion necessary for SSL client parameters.
+
+    Valid keyword arguments to this function are all L{IReactorSSL.connectSSL}
+    arguments except for C{contextFactory}.  Instead, C{certKey} (the path name
+    of the certificate file) C{privateKey} (the path name of the private key
+    associated with the certificate) are accepted and used to construct a
+    context factory.
+
+    Valid positional arguments to this function are host and port.
+
+    @param caCertsDir: The one parameter which is not part of
+        L{IReactorSSL.connectSSL}'s signature, this is a path name used to
+        construct a list of certificate authority certificates.  The directory
+        will be scanned for files ending in C{.pem}, all of which will be
+        considered valid certificate authorities for this connection.
+
+    @type caCertsDir: C{str}
+
+    @return: The coerced values as a C{dict}.
+    """
+    from twisted.internet import ssl
+    kwargs = _parseClientTCP(*args, **kwargs)
+    certKey = kwargs.pop('certKey', None)
+    privateKey = kwargs.pop('privateKey', None)
+    caCertsDir = kwargs.pop('caCertsDir', None)
+    if certKey is not None:
+        certx509 = ssl.Certificate.loadPEM(
+            FilePath(certKey).getContent()).original
+    else:
+        certx509 = None
+    if privateKey is not None:
+        privateKey = ssl.PrivateCertificate.loadPEM(
+            FilePath(privateKey).getContent()).privateKey.original
+    else:
+        privateKey = None
+    if caCertsDir is not None:
+        verify = True
+        caCerts = _loadCAsFromDir(FilePath(caCertsDir))
+    else:
+        verify = False
+        caCerts = None
+    kwargs['sslContextFactory'] = ssl.CertificateOptions(
+        method=ssl.SSL.SSLv23_METHOD,
+        certificate=certx509,
+        privateKey=privateKey,
+        verify=verify,
+        caCerts=caCerts
+    )
+    return kwargs
+
+
+
+def _parseClientUNIX(*args, **kwargs):
+    """
+    Perform any argument value coercion necessary for UNIX client parameters.
+
+    Valid keyword arguments to this function are all L{IReactorUNIX.connectUNIX}
+    keyword arguments except for C{checkPID}.  Instead, C{lockfile} is accepted
+    and has the same meaning.  Also C{path} is used instead of C{address}.
+
+    Valid positional arguments to this function are C{path}.
+
+    @return: The coerced values as a C{dict}.
+    """
+    if len(args) == 1:
+        kwargs['path'] = args[0]
+
+    try:
+        kwargs['checkPID'] = bool(int(kwargs.pop('lockfile')))
+    except KeyError:
+        pass
+    try:
+        kwargs['timeout'] = int(kwargs['timeout'])
+    except KeyError:
+        pass
+    return kwargs
+
+_clientParsers = {
+    'TCP': _parseClientTCP,
+    'SSL': _parseClientSSL,
+    'UNIX': _parseClientUNIX,
+    }
+
+
+
+def clientFromString(reactor, description):
+    """
+    Construct a client endpoint from a description string.
+
+    Client description strings are much like server description strings,
+    although they take all of their arguments as keywords, aside from host and
+    port.
+
+    You can create a TCP client endpoint with the 'host' and 'port' arguments,
+    like so::
+
+        clientFromString(reactor, "tcp:host=www.example.com:port=80")
+
+    or, without specifying host and port keywords::
+
+        clientFromString(reactor, "tcp:www.example.com:80")
+
+    Or you can specify only one or the other, as in the following 2 examples::
+
+        clientFromString(reactor, "tcp:host=www.example.com:80")
+        clientFromString(reactor, "tcp:www.example.com:port=80")
+
+    or an SSL client endpoint with those arguments, plus the arguments used by
+    the server SSL, for a client certificate::
+
+        clientFromString(reactor, "ssl:web.example.com:443:"
+                                  "privateKey=foo.pem:certKey=foo.pem")
+
+    to specify your certificate trust roots, you can identify a directory with
+    PEM files in it with the C{caCertsDir} argument::
+
+        clientFromString(reactor, "ssl:host=web.example.com:port=443:"
+                                  "caCertsDir=/etc/ssl/certs")
+
+    You can create a UNIX client endpoint with the 'path' argument and optional
+    'lockfile' and 'timeout' arguments::
+
+        clientFromString(reactor, "unix:path=/var/foo/bar:lockfile=1:timeout=9")
+
+    or, with the path as a positional argument with or without optional
+    arguments as in the following 2 examples::
+
+        clientFromString(reactor, "unix:/var/foo/bar")
+        clientFromString(reactor, "unix:/var/foo/bar:lockfile=1:timeout=9")
+
+    This function is also extensible; new endpoint types may be registered as
+    L{IStreamClientEndpointStringParser} plugins.  See that interface for more
+    information.
+
+    @param reactor: The client endpoint will be constructed with this reactor.
+
+    @param description: The strports description to parse.
+
+    @return: A new endpoint which can be used to connect with the parameters
+        given by by C{description}.
+    @rtype: L{IStreamClientEndpoint<twisted.internet.interfaces.IStreamClientEndpoint>}
+
+    @since: 10.2
+    """
+    args, kwargs = _parse(description)
+    aname = args.pop(0)
+    name = aname.upper()
+    for plugin in getPlugins(IStreamClientEndpointStringParser):
+        if plugin.prefix.upper() == name:
+            return plugin.parseStreamClient(*args, **kwargs)
+    if name not in _clientParsers:
+        raise ValueError("Unknown endpoint type: %r" % (aname,))
+    kwargs = _clientParsers[name](*args, **kwargs)
+    return _endpointClientFactories[name](reactor, **kwargs)
diff --git a/ThirdParty/Twisted/twisted/internet/epollreactor.py b/ThirdParty/Twisted/twisted/internet/epollreactor.py
new file mode 100644
index 0000000..ff76dea
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/epollreactor.py
@@ -0,0 +1,396 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+An epoll() based implementation of the twisted main loop.
+
+To install the event loop (and you should do this before any connections,
+listeners or connectors are added)::
+
+    from twisted.internet import epollreactor
+    epollreactor.install()
+"""
+
+from __future__ import division, absolute_import
+
+import errno
+
+from zope.interface import implementer
+
+from twisted.internet.interfaces import IReactorFDSet
+
+from twisted.python import log
+from twisted.internet import posixbase
+
+try:
+    # In Python 2.6+, select.epoll provides epoll functionality. Try to import
+    # it, and fall back to Twisted's own epoll wrapper if it isn't available
+    # for any reason.
+    from select import epoll
+except ImportError:
+    from twisted.python import _epoll
+else:
+    del epoll
+    import select as _epoll
+
+
+
+ at implementer(IReactorFDSet)
+class _ContinuousPolling(posixbase._PollLikeMixin,
+                         posixbase._DisconnectSelectableMixin):
+    """
+    Schedule reads and writes based on the passage of time, rather than
+    notification.
+
+    This is useful for supporting polling filesystem files, which C{epoll(7)}
+    does not support.
+
+    The implementation uses L{posixbase._PollLikeMixin}, which is a bit hacky,
+    but re-implementing and testing the relevant code yet again is
+    unappealing.
+
+    @ivar _reactor: The L{EPollReactor} that is using this instance.
+
+    @ivar _loop: A C{LoopingCall} that drives the polling, or C{None}.
+
+    @ivar _readers: A C{set} of C{FileDescriptor} objects that should be read
+        from.
+
+    @ivar _writers: A C{set} of C{FileDescriptor} objects that should be
+        written to.
+    """
+
+    # Attributes for _PollLikeMixin
+    _POLL_DISCONNECTED = 1
+    _POLL_IN = 2
+    _POLL_OUT = 4
+
+
+    def __init__(self, reactor):
+        self._reactor = reactor
+        self._loop = None
+        self._readers = set()
+        self._writers = set()
+        self.isReading = self._readers.__contains__
+        self.isWriting = self._writers.__contains__
+
+
+    def _checkLoop(self):
+        """
+        Start or stop a C{LoopingCall} based on whether there are readers and
+        writers.
+        """
+        if self._readers or self._writers:
+            if self._loop is None:
+                from twisted.internet.task import LoopingCall, _EPSILON
+                self._loop = LoopingCall(self.iterate)
+                self._loop.clock = self._reactor
+                # LoopingCall seems unhappy with timeout of 0, so use very
+                # small number:
+                self._loop.start(_EPSILON, now=False)
+        elif self._loop:
+            self._loop.stop()
+            self._loop = None
+
+
+    def iterate(self):
+        """
+        Call C{doRead} and C{doWrite} on all readers and writers respectively.
+        """
+        for reader in list(self._readers):
+            self._doReadOrWrite(reader, reader, self._POLL_IN)
+        for reader in list(self._writers):
+            self._doReadOrWrite(reader, reader, self._POLL_OUT)
+
+
+    def addReader(self, reader):
+        """
+        Add a C{FileDescriptor} for notification of data available to read.
+        """
+        self._readers.add(reader)
+        self._checkLoop()
+
+
+    def addWriter(self, writer):
+        """
+        Add a C{FileDescriptor} for notification of data available to write.
+        """
+        self._writers.add(writer)
+        self._checkLoop()
+
+
+    def removeReader(self, reader):
+        """
+        Remove a C{FileDescriptor} from notification of data available to read.
+        """
+        try:
+            self._readers.remove(reader)
+        except KeyError:
+            return
+        self._checkLoop()
+
+
+    def removeWriter(self, writer):
+        """
+        Remove a C{FileDescriptor} from notification of data available to write.
+        """
+        try:
+            self._writers.remove(writer)
+        except KeyError:
+            return
+        self._checkLoop()
+
+
+    def removeAll(self):
+        """
+        Remove all readers and writers.
+        """
+        result = list(self._readers | self._writers)
+        # Don't reset to new value, since self.isWriting and .isReading refer
+        # to the existing instance:
+        self._readers.clear()
+        self._writers.clear()
+        return result
+
+
+    def getReaders(self):
+        """
+        Return a list of the readers.
+        """
+        return list(self._readers)
+
+
+    def getWriters(self):
+        """
+        Return a list of the writers.
+        """
+        return list(self._writers)
+
+
+
+ at implementer(IReactorFDSet)
+class EPollReactor(posixbase.PosixReactorBase, posixbase._PollLikeMixin):
+    """
+    A reactor that uses epoll(7).
+
+    @ivar _poller: A C{epoll} which will be used to check for I/O
+        readiness.
+
+    @ivar _selectables: A dictionary mapping integer file descriptors to
+        instances of C{FileDescriptor} which have been registered with the
+        reactor.  All C{FileDescriptors} which are currently receiving read or
+        write readiness notifications will be present as values in this
+        dictionary.
+
+    @ivar _reads: A dictionary mapping integer file descriptors to arbitrary
+        values (this is essentially a set).  Keys in this dictionary will be
+        registered with C{_poller} for read readiness notifications which will
+        be dispatched to the corresponding C{FileDescriptor} instances in
+        C{_selectables}.
+
+    @ivar _writes: A dictionary mapping integer file descriptors to arbitrary
+        values (this is essentially a set).  Keys in this dictionary will be
+        registered with C{_poller} for write readiness notifications which will
+        be dispatched to the corresponding C{FileDescriptor} instances in
+        C{_selectables}.
+
+    @ivar _continuousPolling: A L{_ContinuousPolling} instance, used to handle
+        file descriptors (e.g. filesytem files) that are not supported by
+        C{epoll(7)}.
+    """
+
+    # Attributes for _PollLikeMixin
+    _POLL_DISCONNECTED = (_epoll.EPOLLHUP | _epoll.EPOLLERR)
+    _POLL_IN = _epoll.EPOLLIN
+    _POLL_OUT = _epoll.EPOLLOUT
+
+    def __init__(self):
+        """
+        Initialize epoll object, file descriptor tracking dictionaries, and the
+        base class.
+        """
+        # Create the poller we're going to use.  The 1024 here is just a hint to
+        # the kernel, it is not a hard maximum.  After Linux 2.6.8, the size
+        # argument is completely ignored.
+        self._poller = _epoll.epoll(1024)
+        self._reads = {}
+        self._writes = {}
+        self._selectables = {}
+        self._continuousPolling = _ContinuousPolling(self)
+        posixbase.PosixReactorBase.__init__(self)
+
+
+    def _add(self, xer, primary, other, selectables, event, antievent):
+        """
+        Private method for adding a descriptor from the event loop.
+
+        It takes care of adding it if  new or modifying it if already added
+        for another state (read -> read/write for example).
+        """
+        fd = xer.fileno()
+        if fd not in primary:
+            flags = event
+            # epoll_ctl can raise all kinds of IOErrors, and every one
+            # indicates a bug either in the reactor or application-code.
+            # Let them all through so someone sees a traceback and fixes
+            # something.  We'll do the same thing for every other call to
+            # this method in this file.
+            if fd in other:
+                flags |= antievent
+                self._poller.modify(fd, flags)
+            else:
+                self._poller.register(fd, flags)
+
+            # Update our own tracking state *only* after the epoll call has
+            # succeeded.  Otherwise we may get out of sync.
+            primary[fd] = 1
+            selectables[fd] = xer
+
+
+    def addReader(self, reader):
+        """
+        Add a FileDescriptor for notification of data available to read.
+        """
+        try:
+            self._add(reader, self._reads, self._writes, self._selectables,
+                      _epoll.EPOLLIN, _epoll.EPOLLOUT)
+        except IOError as e:
+            if e.errno == errno.EPERM:
+                # epoll(7) doesn't support certain file descriptors,
+                # e.g. filesystem files, so for those we just poll
+                # continuously:
+                self._continuousPolling.addReader(reader)
+            else:
+                raise
+
+
+    def addWriter(self, writer):
+        """
+        Add a FileDescriptor for notification of data available to write.
+        """
+        try:
+            self._add(writer, self._writes, self._reads, self._selectables,
+                      _epoll.EPOLLOUT, _epoll.EPOLLIN)
+        except IOError as e:
+            if e.errno == errno.EPERM:
+                # epoll(7) doesn't support certain file descriptors,
+                # e.g. filesystem files, so for those we just poll
+                # continuously:
+                self._continuousPolling.addWriter(writer)
+            else:
+                raise
+
+
+    def _remove(self, xer, primary, other, selectables, event, antievent):
+        """
+        Private method for removing a descriptor from the event loop.
+
+        It does the inverse job of _add, and also add a check in case of the fd
+        has gone away.
+        """
+        fd = xer.fileno()
+        if fd == -1:
+            for fd, fdes in selectables.items():
+                if xer is fdes:
+                    break
+            else:
+                return
+        if fd in primary:
+            if fd in other:
+                flags = antievent
+                # See comment above modify call in _add.
+                self._poller.modify(fd, flags)
+            else:
+                del selectables[fd]
+                # See comment above _control call in _add.
+                self._poller.unregister(fd)
+            del primary[fd]
+
+
+    def removeReader(self, reader):
+        """
+        Remove a Selectable for notification of data available to read.
+        """
+        if self._continuousPolling.isReading(reader):
+            self._continuousPolling.removeReader(reader)
+            return
+        self._remove(reader, self._reads, self._writes, self._selectables,
+                     _epoll.EPOLLIN, _epoll.EPOLLOUT)
+
+
+    def removeWriter(self, writer):
+        """
+        Remove a Selectable for notification of data available to write.
+        """
+        if self._continuousPolling.isWriting(writer):
+            self._continuousPolling.removeWriter(writer)
+            return
+        self._remove(writer, self._writes, self._reads, self._selectables,
+                     _epoll.EPOLLOUT, _epoll.EPOLLIN)
+
+
+    def removeAll(self):
+        """
+        Remove all selectables, and return a list of them.
+        """
+        return (self._removeAll(
+                [self._selectables[fd] for fd in self._reads],
+                [self._selectables[fd] for fd in self._writes]) +
+                self._continuousPolling.removeAll())
+
+
+    def getReaders(self):
+        return ([self._selectables[fd] for fd in self._reads] +
+                self._continuousPolling.getReaders())
+
+
+    def getWriters(self):
+        return ([self._selectables[fd] for fd in self._writes] +
+                self._continuousPolling.getWriters())
+
+
+    def doPoll(self, timeout):
+        """
+        Poll the poller for new events.
+        """
+        if timeout is None:
+            timeout = -1  # Wait indefinitely.
+
+        try:
+            # Limit the number of events to the number of io objects we're
+            # currently tracking (because that's maybe a good heuristic) and
+            # the amount of time we block to the value specified by our
+            # caller.
+            l = self._poller.poll(timeout, len(self._selectables))
+        except IOError as err:
+            if err.errno == errno.EINTR:
+                return
+            # See epoll_wait(2) for documentation on the other conditions
+            # under which this can fail.  They can only be due to a serious
+            # programming error on our part, so let's just announce them
+            # loudly.
+            raise
+
+        _drdw = self._doReadOrWrite
+        for fd, event in l:
+            try:
+                selectable = self._selectables[fd]
+            except KeyError:
+                pass
+            else:
+                log.callWithLogger(selectable, _drdw, selectable, fd, event)
+
+    doIteration = doPoll
+
+
+def install():
+    """
+    Install the epoll() reactor.
+    """
+    p = EPollReactor()
+    from twisted.internet.main import installReactor
+    installReactor(p)
+
+
+__all__ = ["EPollReactor", "install"]
+
diff --git a/ThirdParty/Twisted/twisted/internet/error.py b/ThirdParty/Twisted/twisted/internet/error.py
new file mode 100644
index 0000000..6879886
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/error.py
@@ -0,0 +1,455 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Exceptions and errors for use in twisted.internet modules.
+"""
+
+from __future__ import division, absolute_import
+
+import socket
+
+from twisted.python import deprecate
+from twisted.python.versions import Version
+
+
+
+class BindError(Exception):
+    """An error occurred binding to an interface"""
+
+    def __str__(self):
+        s = self.__doc__
+        if self.args:
+            s = '%s: %s' % (s, ' '.join(self.args))
+        s = '%s.' % s
+        return s
+
+
+
+class CannotListenError(BindError):
+    """
+    This gets raised by a call to startListening, when the object cannotstart
+    listening.
+
+    @ivar interface: the interface I tried to listen on
+    @ivar port: the port I tried to listen on
+    @ivar socketError: the exception I got when I tried to listen
+    @type socketError: L{socket.error}
+    """
+    def __init__(self, interface, port, socketError):
+        BindError.__init__(self, interface, port, socketError)
+        self.interface = interface
+        self.port = port
+        self.socketError = socketError
+
+    def __str__(self):
+        iface = self.interface or 'any'
+        return "Couldn't listen on %s:%s: %s." % (iface, self.port,
+                                                 self.socketError)
+
+
+
+class MulticastJoinError(Exception):
+    """
+    An attempt to join a multicast group failed.
+    """
+
+
+
+class MessageLengthError(Exception):
+    """Message is too long to send"""
+
+    def __str__(self):
+        s = self.__doc__
+        if self.args:
+            s = '%s: %s' % (s, ' '.join(self.args))
+        s = '%s.' % s
+        return s
+
+
+
+class DNSLookupError(IOError):
+    """DNS lookup failed"""
+
+    def __str__(self):
+        s = self.__doc__
+        if self.args:
+            s = '%s: %s' % (s, ' '.join(self.args))
+        s = '%s.' % s
+        return s
+
+
+
+class ConnectInProgressError(Exception):
+    """A connect operation was started and isn't done yet."""
+
+
+# connection errors
+
+class ConnectError(Exception):
+    """An error occurred while connecting"""
+
+    def __init__(self, osError=None, string=""):
+        self.osError = osError
+        Exception.__init__(self, string)
+
+    def __str__(self):
+        s = self.__doc__ or self.__class__.__name__
+        if self.osError:
+            s = '%s: %s' % (s, self.osError)
+        if self.args[0]:
+            s = '%s: %s' % (s, self.args[0])
+        s = '%s.' % s
+        return s
+
+
+
+class ConnectBindError(ConnectError):
+    """Couldn't bind"""
+
+
+
+class UnknownHostError(ConnectError):
+    """Hostname couldn't be looked up"""
+
+
+
+class NoRouteError(ConnectError):
+    """No route to host"""
+
+
+
+class ConnectionRefusedError(ConnectError):
+    """Connection was refused by other side"""
+
+
+
+class TCPTimedOutError(ConnectError):
+    """TCP connection timed out"""
+
+
+
+class BadFileError(ConnectError):
+    """File used for UNIX socket is no good"""
+
+
+
+class ServiceNameUnknownError(ConnectError):
+    """Service name given as port is unknown"""
+
+
+
+class UserError(ConnectError):
+    """User aborted connection"""
+
+
+
+class TimeoutError(UserError):
+    """User timeout caused connection failure"""
+
+
+
+class SSLError(ConnectError):
+    """An SSL error occurred"""
+
+
+
+class VerifyError(Exception):
+    """Could not verify something that was supposed to be signed.
+    """
+
+
+
+class PeerVerifyError(VerifyError):
+    """The peer rejected our verify error.
+    """
+
+
+
+class CertificateError(Exception):
+    """
+    We did not find a certificate where we expected to find one.
+    """
+
+
+
+try:
+    import errno
+    errnoMapping = {
+        errno.ENETUNREACH: NoRouteError,
+        errno.ECONNREFUSED: ConnectionRefusedError,
+        errno.ETIMEDOUT: TCPTimedOutError,
+    }
+    if hasattr(errno, "WSAECONNREFUSED"):
+        errnoMapping[errno.WSAECONNREFUSED] = ConnectionRefusedError
+        errnoMapping[errno.WSAENETUNREACH] = NoRouteError
+except ImportError:
+    errnoMapping = {}
+
+
+
+def getConnectError(e):
+    """Given a socket exception, return connection error."""
+    if isinstance(e, Exception):
+        args = e.args
+    else:
+        args = e
+    try:
+        number, string = args
+    except ValueError:
+        return ConnectError(string=e)
+
+    if hasattr(socket, 'gaierror') and isinstance(e, socket.gaierror):
+        # Only works in 2.2 in newer. Really that means always; #5978 covers
+        # this and other wierdnesses in this function.
+        klass = UnknownHostError
+    else:
+        klass = errnoMapping.get(number, ConnectError)
+    return klass(number, string)
+
+
+
+class ConnectionClosed(Exception):
+    """
+    Connection was closed, whether cleanly or non-cleanly.
+    """
+
+
+
+class ConnectionLost(ConnectionClosed):
+    """Connection to the other side was lost in a non-clean fashion"""
+
+    def __str__(self):
+        s = self.__doc__
+        if self.args:
+            s = '%s: %s' % (s, ' '.join(self.args))
+        s = '%s.' % s
+        return s
+
+
+
+class ConnectionAborted(ConnectionLost):
+    """
+    Connection was aborted locally, using
+    L{twisted.internet.interfaces.ITCPTransport.abortConnection}.
+
+    @since: 11.1
+    """
+
+
+
+class ConnectionDone(ConnectionClosed):
+    """Connection was closed cleanly"""
+
+    def __str__(self):
+        s = self.__doc__
+        if self.args:
+            s = '%s: %s' % (s, ' '.join(self.args))
+        s = '%s.' % s
+        return s
+
+
+
+class FileDescriptorOverrun(ConnectionLost):
+    """
+    A mis-use of L{IUNIXTransport.sendFileDescriptor} caused the connection to
+    be closed.
+
+    Each file descriptor sent using C{sendFileDescriptor} must be associated
+    with at least one byte sent using L{ITransport.write}.  If at any point
+    fewer bytes have been written than file descriptors have been sent, the
+    connection is closed with this exception.
+    """
+
+
+
+class ConnectionFdescWentAway(ConnectionLost):
+    """Uh""" #TODO
+
+
+
+class AlreadyCalled(ValueError):
+    """Tried to cancel an already-called event"""
+
+    def __str__(self):
+        s = self.__doc__
+        if self.args:
+            s = '%s: %s' % (s, ' '.join(self.args))
+        s = '%s.' % s
+        return s
+
+
+
+class AlreadyCancelled(ValueError):
+    """Tried to cancel an already-cancelled event"""
+
+    def __str__(self):
+        s = self.__doc__
+        if self.args:
+            s = '%s: %s' % (s, ' '.join(self.args))
+        s = '%s.' % s
+        return s
+
+
+
+class PotentialZombieWarning(Warning):
+    """
+    Emitted when L{IReactorProcess.spawnProcess} is called in a way which may
+    result in termination of the created child process not being reported.
+
+    Deprecated in Twisted 10.0.
+    """
+    MESSAGE = (
+        "spawnProcess called, but the SIGCHLD handler is not "
+        "installed. This probably means you have not yet "
+        "called reactor.run, or called "
+        "reactor.run(installSignalHandler=0). You will probably "
+        "never see this process finish, and it may become a "
+        "zombie process.")
+
+deprecate.deprecatedModuleAttribute(
+    Version("Twisted", 10, 0, 0),
+    "There is no longer any potential for zombie process.",
+    __name__,
+    "PotentialZombieWarning")
+
+
+
+class ProcessDone(ConnectionDone):
+    """A process has ended without apparent errors"""
+
+    def __init__(self, status):
+        Exception.__init__(self, "process finished with exit code 0")
+        self.exitCode = 0
+        self.signal = None
+        self.status = status
+
+
+
+class ProcessTerminated(ConnectionLost):
+    """A process has ended with a probable error condition"""
+
+    def __init__(self, exitCode=None, signal=None, status=None):
+        self.exitCode = exitCode
+        self.signal = signal
+        self.status = status
+        s = "process ended"
+        if exitCode is not None: s = s + " with exit code %s" % exitCode
+        if signal is not None: s = s + " by signal %s" % signal
+        Exception.__init__(self, s)
+
+
+
+class ProcessExitedAlready(Exception):
+    """
+    The process has already exited and the operation requested can no longer
+    be performed.
+    """
+
+
+
+class NotConnectingError(RuntimeError):
+    """The Connector was not connecting when it was asked to stop connecting"""
+
+    def __str__(self):
+        s = self.__doc__
+        if self.args:
+            s = '%s: %s' % (s, ' '.join(self.args))
+        s = '%s.' % s
+        return s
+
+
+
+class NotListeningError(RuntimeError):
+    """The Port was not listening when it was asked to stop listening"""
+
+    def __str__(self):
+        s = self.__doc__
+        if self.args:
+            s = '%s: %s' % (s, ' '.join(self.args))
+        s = '%s.' % s
+        return s
+
+
+
+class ReactorNotRunning(RuntimeError):
+    """
+    Error raised when trying to stop a reactor which is not running.
+    """
+
+
+class ReactorNotRestartable(RuntimeError):
+    """
+    Error raised when trying to run a reactor which was stopped.
+    """
+
+
+
+class ReactorAlreadyRunning(RuntimeError):
+    """
+    Error raised when trying to start the reactor multiple times.
+    """
+
+
+class ReactorAlreadyInstalledError(AssertionError):
+    """
+    Could not install reactor because one is already installed.
+    """
+
+
+
+class ConnectingCancelledError(Exception):
+    """
+    An C{Exception} that will be raised when an L{IStreamClientEndpoint} is
+    cancelled before it connects.
+
+    @ivar address: The L{IAddress} that is the destination of the
+        cancelled L{IStreamClientEndpoint}.
+    """
+
+    def __init__(self, address):
+        """
+        @param address: The L{IAddress} that is the destination of the
+            L{IStreamClientEndpoint} that was cancelled.
+        """
+        Exception.__init__(self, address)
+        self.address = address
+
+
+
+class UnsupportedAddressFamily(Exception):
+    """
+    An attempt was made to use a socket with an address family (eg I{AF_INET},
+    I{AF_INET6}, etc) which is not supported by the reactor.
+    """
+
+
+
+class UnsupportedSocketType(Exception):
+    """
+    An attempt was made to use a socket of a type (eg I{SOCK_STREAM},
+    I{SOCK_DGRAM}, etc) which is not supported by the reactor.
+    """
+
+
+class AlreadyListened(Exception):
+    """
+    An attempt was made to listen on a file descriptor which can only be
+    listened on once.
+    """
+
+
+__all__ = [
+    'BindError', 'CannotListenError', 'MulticastJoinError',
+    'MessageLengthError', 'DNSLookupError', 'ConnectInProgressError',
+    'ConnectError', 'ConnectBindError', 'UnknownHostError', 'NoRouteError',
+    'ConnectionRefusedError', 'TCPTimedOutError', 'BadFileError',
+    'ServiceNameUnknownError', 'UserError', 'TimeoutError', 'SSLError',
+    'VerifyError', 'PeerVerifyError', 'CertificateError',
+    'getConnectError', 'ConnectionClosed', 'ConnectionLost',
+    'ConnectionDone', 'ConnectionFdescWentAway', 'AlreadyCalled',
+    'AlreadyCancelled', 'PotentialZombieWarning', 'ProcessDone',
+    'ProcessTerminated', 'ProcessExitedAlready', 'NotConnectingError',
+    'NotListeningError', 'ReactorNotRunning', 'ReactorAlreadyRunning',
+    'ReactorAlreadyInstalledError', 'ConnectingCancelledError',
+    'UnsupportedAddressFamily', 'UnsupportedSocketType']
diff --git a/ThirdParty/Twisted/twisted/internet/fdesc.py b/ThirdParty/Twisted/twisted/internet/fdesc.py
new file mode 100644
index 0000000..e5a760d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/fdesc.py
@@ -0,0 +1,118 @@
+# -*- test-case-name: twisted.test.test_fdesc -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Utility functions for dealing with POSIX file descriptors.
+"""
+
+import os
+import errno
+try:
+    import fcntl
+except ImportError:
+    fcntl = None
+
+# twisted imports
+from twisted.internet.main import CONNECTION_LOST, CONNECTION_DONE
+
+
+def setNonBlocking(fd):
+    """
+    Set the file description of the given file descriptor to non-blocking.
+    """
+    flags = fcntl.fcntl(fd, fcntl.F_GETFL)
+    flags = flags | os.O_NONBLOCK
+    fcntl.fcntl(fd, fcntl.F_SETFL, flags)
+
+
+def setBlocking(fd):
+    """
+    Set the file description of the given file descriptor to blocking.
+    """
+    flags = fcntl.fcntl(fd, fcntl.F_GETFL)
+    flags = flags & ~os.O_NONBLOCK
+    fcntl.fcntl(fd, fcntl.F_SETFL, flags)
+
+
+if fcntl is None:
+    # fcntl isn't available on Windows.  By default, handles aren't
+    # inherited on Windows, so we can do nothing here.
+    _setCloseOnExec = _unsetCloseOnExec = lambda fd: None
+else:
+    def _setCloseOnExec(fd):
+        """
+        Make a file descriptor close-on-exec.
+        """
+        flags = fcntl.fcntl(fd, fcntl.F_GETFD)
+        flags = flags | fcntl.FD_CLOEXEC
+        fcntl.fcntl(fd, fcntl.F_SETFD, flags)
+
+
+    def _unsetCloseOnExec(fd):
+        """
+        Make a file descriptor close-on-exec.
+        """
+        flags = fcntl.fcntl(fd, fcntl.F_GETFD)
+        flags = flags & ~fcntl.FD_CLOEXEC
+        fcntl.fcntl(fd, fcntl.F_SETFD, flags)
+
+
+def readFromFD(fd, callback):
+    """
+    Read from file descriptor, calling callback with resulting data.
+
+    If successful, call 'callback' with a single argument: the
+    resulting data.
+
+    Returns same thing FileDescriptor.doRead would: CONNECTION_LOST,
+    CONNECTION_DONE, or None.
+
+    @type fd: C{int}
+    @param fd: non-blocking file descriptor to be read from.
+    @param callback: a callable which accepts a single argument. If
+    data is read from the file descriptor it will be called with this
+    data. Handling exceptions from calling the callback is up to the
+    caller.
+
+    Note that if the descriptor is still connected but no data is read,
+    None will be returned but callback will not be called.
+
+    @return: CONNECTION_LOST on error, CONNECTION_DONE when fd is
+    closed, otherwise None.
+    """
+    try:
+        output = os.read(fd, 8192)
+    except (OSError, IOError) as ioe:
+        if ioe.args[0] in (errno.EAGAIN, errno.EINTR):
+            return
+        else:
+            return CONNECTION_LOST
+    if not output:
+        return CONNECTION_DONE
+    callback(output)
+
+
+def writeToFD(fd, data):
+    """
+    Write data to file descriptor.
+
+    Returns same thing FileDescriptor.writeSomeData would.
+
+    @type fd: C{int}
+    @param fd: non-blocking file descriptor to be written to.
+    @type data: C{str} or C{buffer}
+    @param data: bytes to write to fd.
+
+    @return: number of bytes written, or CONNECTION_LOST.
+    """
+    try:
+        return os.write(fd, data)
+    except (OSError, IOError) as io:
+        if io.errno in (errno.EAGAIN, errno.EINTR):
+            return 0
+        return CONNECTION_LOST
+
+
+__all__ = ["setNonBlocking", "setBlocking", "readFromFD", "writeToFD"]
diff --git a/ThirdParty/Twisted/twisted/internet/gireactor.py b/ThirdParty/Twisted/twisted/internet/gireactor.py
new file mode 100644
index 0000000..a7ada11
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/gireactor.py
@@ -0,0 +1,188 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module provides support for Twisted to interact with the glib
+mainloop via GObject Introspection.
+
+In order to use this support, simply do the following::
+
+    from twisted.internet import gireactor
+    gireactor.install()
+
+If you wish to use a GApplication, register it with the reactor::
+
+    from twisted.internet import reactor
+    reactor.registerGApplication(app)
+
+Then use twisted.internet APIs as usual.
+
+On Python 3, pygobject v3.4 or later is required.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.python.compat import _PY3
+from twisted.internet.error import ReactorAlreadyRunning
+from twisted.internet import _glibbase
+from twisted.python import runtime
+
+if _PY3:
+    # We require a sufficiently new version of pygobject, so always exists:
+    _pygtkcompatPresent = True
+else:
+    # We can't just try to import gi.pygtkcompat, because that would import
+    # gi, and the goal here is to not import gi in cases where that would
+    # cause segfault.
+    from twisted.python.modules import theSystemPath
+    _pygtkcompatPresent = True
+    try:
+        theSystemPath["gi.pygtkcompat"]
+    except KeyError:
+        _pygtkcompatPresent = False
+
+
+# Modules that we want to ensure aren't imported if we're on older versions of
+# GI:
+_PYGTK_MODULES = ['gobject', 'glib', 'gio', 'gtk']
+
+def _oldGiInit():
+    """
+    Make sure pygtk and gi aren't loaded at the same time, and import Glib if
+    possible.
+    """
+    # We can't immediately prevent imports, because that confuses some buggy
+    # code in gi:
+    _glibbase.ensureNotImported(
+        _PYGTK_MODULES,
+        "Introspected and static glib/gtk bindings must not be mixed; can't "
+        "import gireactor since pygtk2 module is already imported.")
+
+    global GLib
+    from gi.repository import GLib
+    if getattr(GLib, "threads_init", None) is not None:
+        GLib.threads_init()
+
+    _glibbase.ensureNotImported([], "",
+                                preventImports=_PYGTK_MODULES)
+
+
+if not _pygtkcompatPresent:
+    # Older versions of gi don't have compatability layer, so just enforce no
+    # imports of pygtk and gi at same time:
+    _oldGiInit()
+else:
+    # Newer version of gi, so we can try to initialize compatibility layer; if
+    # real pygtk was already imported we'll get ImportError at this point
+    # rather than segfault, so unconditional import is fine.
+    import gi.pygtkcompat
+    gi.pygtkcompat.enable()
+    # At this point importing gobject will get you gi version, and importing
+    # e.g. gtk will either fail in non-segfaulty way or use gi version if user
+    # does gi.pygtkcompat.enable_gtk(). So, no need to prevent imports of
+    # old school pygtk modules.
+    from gi.repository import GLib
+    if getattr(GLib, "threads_init", None) is not None:
+        GLib.threads_init()
+
+
+
+class GIReactor(_glibbase.GlibReactorBase):
+    """
+    GObject-introspection event loop reactor.
+
+    @ivar _gapplication: A C{Gio.Application} instance that was registered
+        with C{registerGApplication}.
+    """
+    _POLL_DISCONNECTED = (GLib.IOCondition.HUP | GLib.IOCondition.ERR |
+                          GLib.IOCondition.NVAL)
+    _POLL_IN = GLib.IOCondition.IN
+    _POLL_OUT = GLib.IOCondition.OUT
+
+    # glib's iochannel sources won't tell us about any events that we haven't
+    # asked for, even if those events aren't sensible inputs to the poll()
+    # call.
+    INFLAGS = _POLL_IN | _POLL_DISCONNECTED
+    OUTFLAGS = _POLL_OUT | _POLL_DISCONNECTED
+
+    # By default no Application is registered:
+    _gapplication = None
+
+
+    def __init__(self, useGtk=False):
+        _gtk = None
+        if useGtk is True:
+            from gi.repository import Gtk as _gtk
+
+        _glibbase.GlibReactorBase.__init__(self, GLib, _gtk, useGtk=useGtk)
+
+
+    def registerGApplication(self, app):
+        """
+        Register a C{Gio.Application} or C{Gtk.Application}, whose main loop
+        will be used instead of the default one.
+
+        We will C{hold} the application so it doesn't exit on its own. In
+        versions of C{python-gi} 3.2 and later, we exit the event loop using
+        the C{app.quit} method which overrides any holds. Older versions are
+        not supported.
+        """
+        if self._gapplication is not None:
+            raise RuntimeError(
+                "Can't register more than one application instance.")
+        if self._started:
+            raise ReactorAlreadyRunning(
+                "Can't register application after reactor was started.")
+        if not hasattr(app, "quit"):
+            raise RuntimeError("Application registration is not supported in"
+                               " versions of PyGObject prior to 3.2.")
+        self._gapplication = app
+        def run():
+            app.hold()
+            app.run(None)
+        self._run = run
+
+        self._crash = app.quit
+
+
+
+class PortableGIReactor(_glibbase.PortableGlibReactorBase):
+    """
+    Portable GObject Introspection event loop reactor.
+    """
+    def __init__(self, useGtk=False):
+        _gtk = None
+        if useGtk is True:
+            from gi.repository import Gtk as _gtk
+
+        _glibbase.PortableGlibReactorBase.__init__(self, GLib, _gtk,
+                                                   useGtk=useGtk)
+
+
+    def registerGApplication(self, app):
+        """
+        Register a C{Gio.Application} or C{Gtk.Application}, whose main loop
+        will be used instead of the default one.
+        """
+        raise NotImplementedError("GApplication is not currently supported on Windows.")
+
+
+
+def install(useGtk=False):
+    """
+    Configure the twisted mainloop to be run inside the glib mainloop.
+
+    @param useGtk: should GTK+ rather than glib event loop be
+        used (this will be slightly slower but does support GUI).
+    """
+    if runtime.platform.getType() == 'posix':
+        reactor = GIReactor(useGtk=useGtk)
+    else:
+        reactor = PortableGIReactor(useGtk=useGtk)
+
+    from twisted.internet.main import installReactor
+    installReactor(reactor)
+    return reactor
+
+
+__all__ = ['install']
diff --git a/ThirdParty/Twisted/twisted/internet/glib2reactor.py b/ThirdParty/Twisted/twisted/internet/glib2reactor.py
new file mode 100644
index 0000000..5275efd
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/glib2reactor.py
@@ -0,0 +1,44 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module provides support for Twisted to interact with the glib mainloop.
+This is like gtk2, but slightly faster and does not require a working
+$DISPLAY. However, you cannot run GUIs under this reactor: for that you must
+use the gtk2reactor instead.
+
+In order to use this support, simply do the following::
+
+    from twisted.internet import glib2reactor
+    glib2reactor.install()
+
+Then use twisted.internet APIs as usual.  The other methods here are not
+intended to be called directly.
+"""
+
+from twisted.internet import gtk2reactor
+
+
+class Glib2Reactor(gtk2reactor.Gtk2Reactor):
+    """
+    The reactor using the glib mainloop.
+    """
+
+    def __init__(self):
+        """
+        Override init to set the C{useGtk} flag.
+        """
+        gtk2reactor.Gtk2Reactor.__init__(self, useGtk=False)
+
+
+
+def install():
+    """
+    Configure the twisted mainloop to be run inside the glib mainloop.
+    """
+    reactor = Glib2Reactor()
+    from twisted.internet.main import installReactor
+    installReactor(reactor)
+
+
+__all__ = ['install']
diff --git a/ThirdParty/Twisted/twisted/internet/gtk2reactor.py b/ThirdParty/Twisted/twisted/internet/gtk2reactor.py
new file mode 100644
index 0000000..2509b6d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/gtk2reactor.py
@@ -0,0 +1,119 @@
+# -*- test-case-name: twisted.internet.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+This module provides support for Twisted to interact with the glib/gtk2
+mainloop.
+
+In order to use this support, simply do the following::
+
+    from twisted.internet import gtk2reactor
+    gtk2reactor.install()
+
+Then use twisted.internet APIs as usual.  The other methods here are not
+intended to be called directly.
+"""
+
+# System Imports
+import sys
+
+# Twisted Imports
+from twisted.internet import _glibbase
+from twisted.python import runtime
+
+# Certain old versions of pygtk and gi crash if imported at the same
+# time. This is a problem when running Twisted's unit tests, since they will
+# attempt to run both gtk2 and gtk3/gi tests. However, gireactor makes sure
+# that if we are in such an old version, and gireactor was imported,
+# gtk2reactor will not be importable. So we don't *need* to enforce that here
+# as well; whichever is imported first will still win. Moreover, additional
+# enforcement in this module is unnecessary in modern versions, and downright
+# problematic in certain versions where for some reason importing gtk also
+# imports some subset of gi. So we do nothing here, relying on gireactor to
+# prevent the crash.
+
+try:
+    if not hasattr(sys, 'frozen'):
+        # Don't want to check this for py2exe
+        import pygtk
+        pygtk.require('2.0')
+except (ImportError, AttributeError):
+    pass # maybe we're using pygtk before this hack existed.
+
+import gobject
+if hasattr(gobject, "threads_init"):
+    # recent versions of python-gtk expose this. python-gtk=2.4.1
+    # (wrapping glib-2.4.7) does. python-gtk=2.0.0 (wrapping
+    # glib-2.2.3) does not.
+    gobject.threads_init()
+
+
+
+class Gtk2Reactor(_glibbase.GlibReactorBase):
+    """
+    PyGTK+ 2 event loop reactor.
+    """
+    _POLL_DISCONNECTED = gobject.IO_HUP | gobject.IO_ERR | gobject.IO_NVAL
+    _POLL_IN = gobject.IO_IN
+    _POLL_OUT = gobject.IO_OUT
+
+    # glib's iochannel sources won't tell us about any events that we haven't
+    # asked for, even if those events aren't sensible inputs to the poll()
+    # call.
+    INFLAGS = _POLL_IN | _POLL_DISCONNECTED
+    OUTFLAGS = _POLL_OUT | _POLL_DISCONNECTED
+
+    def __init__(self, useGtk=True):
+        _gtk = None
+        if useGtk is True:
+            import gtk as _gtk
+
+        _glibbase.GlibReactorBase.__init__(self, gobject, _gtk, useGtk=useGtk)
+
+
+
+class PortableGtkReactor(_glibbase.PortableGlibReactorBase):
+    """
+    Reactor that works on Windows.
+
+    Sockets aren't supported by GTK+'s input_add on Win32.
+    """
+    def __init__(self, useGtk=True):
+        _gtk = None
+        if useGtk is True:
+            import gtk as _gtk
+
+        _glibbase.PortableGlibReactorBase.__init__(self, gobject, _gtk,
+                                                   useGtk=useGtk)
+
+
+def install(useGtk=True):
+    """
+    Configure the twisted mainloop to be run inside the gtk mainloop.
+
+    @param useGtk: should glib rather than GTK+ event loop be
+        used (this will be slightly faster but does not support GUI).
+    """
+    reactor = Gtk2Reactor(useGtk)
+    from twisted.internet.main import installReactor
+    installReactor(reactor)
+    return reactor
+
+
+def portableInstall(useGtk=True):
+    """
+    Configure the twisted mainloop to be run inside the gtk mainloop.
+    """
+    reactor = PortableGtkReactor()
+    from twisted.internet.main import installReactor
+    installReactor(reactor)
+    return reactor
+
+
+if runtime.platform.getType() != 'posix':
+    install = portableInstall
+
+
+__all__ = ['install']
diff --git a/ThirdParty/Twisted/twisted/internet/gtk3reactor.py b/ThirdParty/Twisted/twisted/internet/gtk3reactor.py
new file mode 100644
index 0000000..256b698
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/gtk3reactor.py
@@ -0,0 +1,80 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module provides support for Twisted to interact with the gtk3 mainloop
+via Gobject introspection. This is like gi, but slightly slower and requires a
+working $DISPLAY.
+
+In order to use this support, simply do the following::
+
+    from twisted.internet import gtk3reactor
+    gtk3reactor.install()
+
+If you wish to use a GApplication, register it with the reactor::
+
+    from twisted.internet import reactor
+    reactor.registerGApplication(app)
+
+Then use twisted.internet APIs as usual.
+"""
+
+from __future__ import division, absolute_import
+
+import os
+
+from twisted.internet import gireactor
+from twisted.python import runtime
+
+# Newer versions of gtk3/pygoject raise a RuntimeError, or just break in a
+# confusing manner, if the program is not running under X11.  We therefore try
+# to fail in a more reasonable manner, and check for $DISPLAY as a reasonable
+# approximation of availability of X11. This is somewhat over-aggressive,
+# since some older versions of gtk3/pygobject do work with missing $DISPLAY,
+# but it's too hard to figure out which, so we always require it.
+if (runtime.platform.getType() == 'posix' and
+    not runtime.platform.isMacOSX() and not os.environ.get("DISPLAY")):
+    raise ImportError(
+        "Gtk3 requires X11, and no DISPLAY environment variable is set")
+
+
+class Gtk3Reactor(gireactor.GIReactor):
+    """
+    A reactor using the gtk3+ event loop.
+    """
+
+    def __init__(self):
+        """
+        Override init to set the C{useGtk} flag.
+        """
+        gireactor.GIReactor.__init__(self, useGtk=True)
+
+
+
+class PortableGtk3Reactor(gireactor.PortableGIReactor):
+    """
+    Portable GTK+ 3.x reactor.
+    """
+    def __init__(self):
+        """
+        Override init to set the C{useGtk} flag.
+        """
+        gireactor.PortableGIReactor.__init__(self, useGtk=True)
+
+
+
+def install():
+    """
+    Configure the Twisted mainloop to be run inside the gtk3+ mainloop.
+    """
+    if runtime.platform.getType() == 'posix':
+        reactor = Gtk3Reactor()
+    else:
+        reactor = PortableGtk3Reactor()
+
+    from twisted.internet.main import installReactor
+    installReactor(reactor)
+    return reactor
+
+
+__all__ = ['install']
diff --git a/ThirdParty/Twisted/twisted/internet/gtkreactor.py b/ThirdParty/Twisted/twisted/internet/gtkreactor.py
new file mode 100644
index 0000000..6b1855e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/gtkreactor.py
@@ -0,0 +1,250 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module provides support for Twisted to interact with the PyGTK mainloop.
+
+In order to use this support, simply do the following::
+
+    |  from twisted.internet import gtkreactor
+    |  gtkreactor.install()
+
+Then use twisted.internet APIs as usual.  The other methods here are not
+intended to be called directly.
+"""
+
+import sys
+
+# System Imports
+try:
+    import pygtk
+    pygtk.require('1.2')
+except ImportError, AttributeError:
+    pass # maybe we're using pygtk before this hack existed.
+import gtk
+
+from zope.interface import implements
+
+# Twisted Imports
+from twisted.python import log, runtime, deprecate, versions
+from twisted.internet.interfaces import IReactorFDSet
+
+# Sibling Imports
+from twisted.internet import posixbase, selectreactor
+
+
+deprecatedSince = versions.Version("Twisted", 10, 1, 0)
+deprecationMessage = ("All new applications should be written with gtk 2.x, "
+                      "which is supported by twisted.internet.gtk2reactor.")
+
+
+class GtkReactor(posixbase.PosixReactorBase):
+    """
+    GTK+ event loop reactor.
+
+    @ivar _reads: A dictionary mapping L{FileDescriptor} instances to gtk INPUT_READ
+        watch handles.
+
+    @ivar _writes: A dictionary mapping L{FileDescriptor} instances to gtk
+        INTPUT_WRITE watch handles.
+
+    @ivar _simtag: A gtk timeout handle for the next L{simulate} call.
+    """
+    implements(IReactorFDSet)
+
+    deprecate.deprecatedModuleAttribute(deprecatedSince, deprecationMessage,
+                                        __name__, "GtkReactor")
+
+    def __init__(self):
+        """
+        Initialize the file descriptor tracking dictionaries and the base
+        class.
+        """
+        self._simtag = None
+        self._reads = {}
+        self._writes = {}
+        posixbase.PosixReactorBase.__init__(self)
+
+
+    def addReader(self, reader):
+        if reader not in self._reads:
+            self._reads[reader] = gtk.input_add(reader, gtk.GDK.INPUT_READ, self.callback)
+
+    def addWriter(self, writer):
+        if writer not in self._writes:
+            self._writes[writer] = gtk.input_add(writer, gtk.GDK.INPUT_WRITE, self.callback)
+
+
+    def getReaders(self):
+        return self._reads.keys()
+
+
+    def getWriters(self):
+        return self._writes.keys()
+
+
+    def removeAll(self):
+        return self._removeAll(self._reads, self._writes)
+
+
+    def removeReader(self, reader):
+        if reader in self._reads:
+            gtk.input_remove(self._reads[reader])
+            del self._reads[reader]
+
+    def removeWriter(self, writer):
+        if writer in self._writes:
+            gtk.input_remove(self._writes[writer])
+            del self._writes[writer]
+
+    doIterationTimer = None
+
+    def doIterationTimeout(self, *args):
+        self.doIterationTimer = None
+        return 0 # auto-remove
+    def doIteration(self, delay):
+        # flush some pending events, return if there was something to do
+        # don't use the usual "while gtk.events_pending(): mainiteration()"
+        # idiom because lots of IO (in particular test_tcp's
+        # ProperlyCloseFilesTestCase) can keep us from ever exiting.
+        log.msg(channel='system', event='iteration', reactor=self)
+        if gtk.events_pending():
+            gtk.mainiteration(0)
+            return
+        # nothing to do, must delay
+        if delay == 0:
+            return # shouldn't delay, so just return
+        self.doIterationTimer = gtk.timeout_add(int(delay * 1000),
+                                                self.doIterationTimeout)
+        # This will either wake up from IO or from a timeout.
+        gtk.mainiteration(1) # block
+        # note: with the .simulate timer below, delays > 0.1 will always be
+        # woken up by the .simulate timer
+        if self.doIterationTimer:
+            # if woken by IO, need to cancel the timer
+            gtk.timeout_remove(self.doIterationTimer)
+            self.doIterationTimer = None
+
+    def crash(self):
+        posixbase.PosixReactorBase.crash(self)
+        gtk.mainquit()
+
+    def run(self, installSignalHandlers=1):
+        self.startRunning(installSignalHandlers=installSignalHandlers)
+        gtk.timeout_add(0, self.simulate)
+        gtk.mainloop()
+
+    def _readAndWrite(self, source, condition):
+        # note: gtk-1.2's gtk_input_add presents an API in terms of gdk
+        # constants like INPUT_READ and INPUT_WRITE. Internally, it will add
+        # POLL_HUP and POLL_ERR to the poll() events, but if they happen it
+        # will turn them back into INPUT_READ and INPUT_WRITE. gdkevents.c
+        # maps IN/HUP/ERR to INPUT_READ, and OUT/ERR to INPUT_WRITE. This
+        # means there is no immediate way to detect a disconnected socket.
+
+        # The g_io_add_watch() API is more suited to this task. I don't think
+        # pygtk exposes it, though.
+        why = None
+        didRead = None
+        try:
+            if condition & gtk.GDK.INPUT_READ:
+                why = source.doRead()
+                didRead = source.doRead
+            if not why and condition & gtk.GDK.INPUT_WRITE:
+                # if doRead caused connectionLost, don't call doWrite
+                # if doRead is doWrite, don't call it again.
+                if not source.disconnected and source.doWrite != didRead:
+                    why = source.doWrite()
+                    didRead = source.doWrite # if failed it was in write
+        except:
+            why = sys.exc_info()[1]
+            log.msg('Error In %s' % source)
+            log.deferr()
+
+        if why:
+            self._disconnectSelectable(source, why, didRead == source.doRead)
+
+    def callback(self, source, condition):
+        log.callWithLogger(source, self._readAndWrite, source, condition)
+        self.simulate() # fire Twisted timers
+        return 1 # 1=don't auto-remove the source
+
+    def simulate(self):
+        """Run simulation loops and reschedule callbacks.
+        """
+        if self._simtag is not None:
+            gtk.timeout_remove(self._simtag)
+        self.runUntilCurrent()
+        timeout = min(self.timeout(), 0.1)
+        if timeout is None:
+            timeout = 0.1
+        # Quoth someone other than me, "grumble", yet I know not why. Try to be
+        # more specific in your complaints, guys. -exarkun
+        self._simtag = gtk.timeout_add(int(timeout * 1010), self.simulate)
+
+
+
+class PortableGtkReactor(selectreactor.SelectReactor):
+    """Reactor that works on Windows.
+
+    input_add is not supported on GTK+ for Win32, apparently.
+
+    @ivar _simtag: A gtk timeout handle for the next L{simulate} call.
+    """
+    _simtag = None
+
+    deprecate.deprecatedModuleAttribute(deprecatedSince, deprecationMessage,
+                                        __name__, "PortableGtkReactor")
+
+    def crash(self):
+        selectreactor.SelectReactor.crash(self)
+        gtk.mainquit()
+
+    def run(self, installSignalHandlers=1):
+        self.startRunning(installSignalHandlers=installSignalHandlers)
+        self.simulate()
+        gtk.mainloop()
+
+    def simulate(self):
+        """Run simulation loops and reschedule callbacks.
+        """
+        if self._simtag is not None:
+            gtk.timeout_remove(self._simtag)
+        self.iterate()
+        timeout = min(self.timeout(), 0.1)
+        if timeout is None:
+            timeout = 0.1
+
+        # See comment for identical line in GtkReactor.simulate.
+        self._simtag = gtk.timeout_add((timeout * 1010), self.simulate)
+
+
+
+def install():
+    """Configure the twisted mainloop to be run inside the gtk mainloop.
+    """
+    reactor = GtkReactor()
+    from twisted.internet.main import installReactor
+    installReactor(reactor)
+    return reactor
+
+deprecate.deprecatedModuleAttribute(deprecatedSince, deprecationMessage,
+                                    __name__, "install")
+
+
+def portableInstall():
+    """Configure the twisted mainloop to be run inside the gtk mainloop.
+    """
+    reactor = PortableGtkReactor()
+    from twisted.internet.main import installReactor
+    installReactor(reactor)
+    return reactor
+
+deprecate.deprecatedModuleAttribute(deprecatedSince, deprecationMessage,
+                                    __name__, "portableInstall")
+
+
+if runtime.platform.getType() != 'posix':
+    install = portableInstall
+
+__all__ = ['install']
diff --git a/ThirdParty/Twisted/twisted/internet/inotify.py b/ThirdParty/Twisted/twisted/internet/inotify.py
new file mode 100644
index 0000000..85305dc
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/inotify.py
@@ -0,0 +1,405 @@
+# -*- test-case-name: twisted.internet.test.test_inotify -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module provides support for Twisted to linux inotify API.
+
+In order to use this support, simply do the following (and start a reactor
+at some point)::
+
+    from twisted.internet import inotify
+    from twisted.python import filepath
+
+    def notify(ignored, filepath, mask):
+        \"""
+        For historical reasons, an opaque handle is passed as first
+        parameter. This object should never be used.
+
+        @param filepath: FilePath on which the event happened.
+        @param mask: inotify event as hexadecimal masks
+        \"""
+        print "event %s on %s" % (
+            ', '.join(inotify.humanReadableMask(mask)), filepath)
+
+    notifier = inotify.INotify()
+    notifier.startReading()
+    notifier.watch(filepath.FilePath("/some/directory"), callbacks=[notify])
+
+ at since: 10.1
+"""
+
+import os
+import struct
+
+from twisted.internet import fdesc
+from twisted.internet.abstract import FileDescriptor
+from twisted.python import log, _inotify
+
+
+# from /usr/src/linux/include/linux/inotify.h
+
+IN_ACCESS = 0x00000001L         # File was accessed
+IN_MODIFY = 0x00000002L         # File was modified
+IN_ATTRIB = 0x00000004L         # Metadata changed
+IN_CLOSE_WRITE = 0x00000008L    # Writeable file was closed
+IN_CLOSE_NOWRITE = 0x00000010L  # Unwriteable file closed
+IN_OPEN = 0x00000020L           # File was opened
+IN_MOVED_FROM = 0x00000040L     # File was moved from X
+IN_MOVED_TO = 0x00000080L       # File was moved to Y
+IN_CREATE = 0x00000100L         # Subfile was created
+IN_DELETE = 0x00000200L         # Subfile was delete
+IN_DELETE_SELF = 0x00000400L    # Self was deleted
+IN_MOVE_SELF = 0x00000800L      # Self was moved
+IN_UNMOUNT = 0x00002000L        # Backing fs was unmounted
+IN_Q_OVERFLOW = 0x00004000L     # Event queued overflowed
+IN_IGNORED = 0x00008000L        # File was ignored
+
+IN_ONLYDIR = 0x01000000         # only watch the path if it is a directory
+IN_DONT_FOLLOW = 0x02000000     # don't follow a sym link
+IN_MASK_ADD = 0x20000000        # add to the mask of an already existing watch
+IN_ISDIR = 0x40000000           # event occurred against dir
+IN_ONESHOT = 0x80000000         # only send event once
+
+IN_CLOSE = IN_CLOSE_WRITE | IN_CLOSE_NOWRITE     # closes
+IN_MOVED = IN_MOVED_FROM | IN_MOVED_TO           # moves
+IN_CHANGED = IN_MODIFY | IN_ATTRIB               # changes
+
+IN_WATCH_MASK = (IN_MODIFY | IN_ATTRIB |
+                 IN_CREATE | IN_DELETE |
+                 IN_DELETE_SELF | IN_MOVE_SELF |
+                 IN_UNMOUNT | IN_MOVED_FROM | IN_MOVED_TO)
+
+
+_FLAG_TO_HUMAN = [
+    (IN_ACCESS, 'access'),
+    (IN_MODIFY, 'modify'),
+    (IN_ATTRIB, 'attrib'),
+    (IN_CLOSE_WRITE, 'close_write'),
+    (IN_CLOSE_NOWRITE, 'close_nowrite'),
+    (IN_OPEN, 'open'),
+    (IN_MOVED_FROM, 'moved_from'),
+    (IN_MOVED_TO, 'moved_to'),
+    (IN_CREATE, 'create'),
+    (IN_DELETE, 'delete'),
+    (IN_DELETE_SELF, 'delete_self'),
+    (IN_MOVE_SELF, 'move_self'),
+    (IN_UNMOUNT, 'unmount'),
+    (IN_Q_OVERFLOW, 'queue_overflow'),
+    (IN_IGNORED, 'ignored'),
+    (IN_ONLYDIR, 'only_dir'),
+    (IN_DONT_FOLLOW, 'dont_follow'),
+    (IN_MASK_ADD, 'mask_add'),
+    (IN_ISDIR, 'is_dir'),
+    (IN_ONESHOT, 'one_shot')
+]
+
+
+
+def humanReadableMask(mask):
+    """
+    Auxiliary function that converts an hexadecimal mask into a series
+    of human readable flags.
+    """
+    s = []
+    for k, v in _FLAG_TO_HUMAN:
+        if k & mask:
+            s.append(v)
+    return s
+
+
+
+class _Watch(object):
+    """
+    Watch object that represents a Watch point in the filesystem. The
+    user should let INotify to create these objects
+
+    @ivar path: The path over which this watch point is monitoring
+    @ivar mask: The events monitored by this watchpoint
+    @ivar autoAdd: Flag that determines whether this watch point
+        should automatically add created subdirectories
+    @ivar callbacks: C{list} of callback functions that will be called
+        when an event occurs on this watch.
+    """
+    def __init__(self, path, mask=IN_WATCH_MASK, autoAdd=False,
+                 callbacks=None):
+        self.path = path
+        self.mask = mask
+        self.autoAdd = autoAdd
+        if callbacks is None:
+            callbacks = []
+        self.callbacks = callbacks
+
+
+    def _notify(self, filepath, events):
+        """
+        Callback function used by L{INotify} to dispatch an event.
+        """
+        for callback in self.callbacks:
+            callback(self, filepath, events)
+
+
+
+class INotify(FileDescriptor, object):
+    """
+    The INotify file descriptor, it basically does everything related
+    to INotify, from reading to notifying watch points.
+
+    @ivar _buffer: a C{str} containing the data read from the inotify fd.
+
+    @ivar _watchpoints: a C{dict} that maps from inotify watch ids to
+        watchpoints objects
+
+    @ivar _watchpaths: a C{dict} that maps from watched paths to the
+        inotify watch ids
+    """
+    _inotify = _inotify
+
+    def __init__(self, reactor=None):
+        FileDescriptor.__init__(self, reactor=reactor)
+
+        # Smart way to allow parametrization of libc so I can override
+        # it and test for the system errors.
+        self._fd = self._inotify.init()
+
+        fdesc.setNonBlocking(self._fd)
+        fdesc._setCloseOnExec(self._fd)
+
+        # The next 2 lines are needed to have self.loseConnection()
+        # to call connectionLost() on us. Since we already created the
+        # fd that talks to inotify we want to be notified even if we
+        # haven't yet started reading.
+        self.connected = 1
+        self._writeDisconnected = True
+
+        self._buffer = ''
+        self._watchpoints = {}
+        self._watchpaths = {}
+
+
+    def _addWatch(self, path, mask, autoAdd, callbacks):
+        """
+        Private helper that abstracts the use of ctypes.
+
+        Calls the internal inotify API and checks for any errors after the
+        call. If there's an error L{INotify._addWatch} can raise an
+        INotifyError. If there's no error it proceeds creating a watchpoint and
+        adding a watchpath for inverse lookup of the file descriptor from the
+        path.
+        """
+        wd = self._inotify.add(self._fd, path.path, mask)
+
+        iwp = _Watch(path, mask, autoAdd, callbacks)
+
+        self._watchpoints[wd] = iwp
+        self._watchpaths[path] = wd
+
+        return wd
+
+
+    def _rmWatch(self, wd):
+        """
+        Private helper that abstracts the use of ctypes.
+
+        Calls the internal inotify API to remove an fd from inotify then
+        removes the corresponding watchpoint from the internal mapping together
+        with the file descriptor from the watchpath.
+        """
+        self._inotify.remove(self._fd, wd)
+        iwp = self._watchpoints.pop(wd)
+        self._watchpaths.pop(iwp.path)
+
+
+    def connectionLost(self, reason):
+        """
+        Release the inotify file descriptor and do the necessary cleanup
+        """
+        FileDescriptor.connectionLost(self, reason)
+        if self._fd >= 0:
+            try:
+                os.close(self._fd)
+            except OSError, e:
+                log.err(e, "Couldn't close INotify file descriptor.")
+
+
+    def fileno(self):
+        """
+        Get the underlying file descriptor from this inotify observer.
+        Required by L{abstract.FileDescriptor} subclasses.
+        """
+        return self._fd
+
+
+    def doRead(self):
+        """
+        Read some data from the observed file descriptors
+        """
+        fdesc.readFromFD(self._fd, self._doRead)
+
+
+    def _doRead(self, in_):
+        """
+        Work on the data just read from the file descriptor.
+        """
+        self._buffer += in_
+        while len(self._buffer) >= 16:
+
+            wd, mask, cookie, size = struct.unpack("=LLLL", self._buffer[0:16])
+
+            if size:
+                name = self._buffer[16:16 + size].rstrip('\0')
+            else:
+                name = None
+
+            self._buffer = self._buffer[16 + size:]
+
+            try:
+                iwp = self._watchpoints[wd]
+            except KeyError:
+                continue
+
+            path = iwp.path
+            if name:
+                path = path.child(name)
+            iwp._notify(path, mask)
+
+            if (iwp.autoAdd and mask & IN_ISDIR and mask & IN_CREATE):
+                # mask & IN_ISDIR already guarantees that the path is a
+                # directory. There's no way you can get here without a
+                # directory anyway, so no point in checking for that again.
+                new_wd = self.watch(
+                    path, mask=iwp.mask, autoAdd=True,
+                    callbacks=iwp.callbacks
+                )
+                # This is very very very hacky and I'd rather not do this but
+                # we have no other alternative that is less hacky other than
+                # surrender.  We use callLater because we don't want to have
+                # too many events waiting while we process these subdirs, we
+                # must always answer events as fast as possible or the overflow
+                # might come.
+                self.reactor.callLater(0,
+                    self._addChildren, self._watchpoints[new_wd])
+            if mask & IN_DELETE_SELF:
+                self._rmWatch(wd)
+
+
+    def _addChildren(self, iwp):
+        """
+        This is a very private method, please don't even think about using it.
+
+        Note that this is a fricking hack... it's because we cannot be fast
+        enough in adding a watch to a directory and so we basically end up
+        getting here too late if some operations have already been going on in
+        the subdir, we basically need to catchup.  This eventually ends up
+        meaning that we generate double events, your app must be resistant.
+        """
+        try:
+            listdir = iwp.path.children()
+        except OSError:
+            # Somebody or something (like a test) removed this directory while
+            # we were in the callLater(0...) waiting. It doesn't make sense to
+            # process it anymore
+            return
+
+        # note that it's true that listdir will only see the subdirs inside
+        # path at the moment of the call but path is monitored already so if
+        # something is created we will receive an event.
+        for f in listdir:
+            # It's a directory, watch it and then add its children
+            if f.isdir():
+                wd = self.watch(
+                    f, mask=iwp.mask, autoAdd=True,
+                    callbacks=iwp.callbacks
+                )
+                iwp._notify(f, IN_ISDIR|IN_CREATE)
+                # now f is watched, we can add its children the callLater is to
+                # avoid recursion
+                self.reactor.callLater(0,
+                    self._addChildren, self._watchpoints[wd])
+
+            # It's a file and we notify it.
+            if f.isfile():
+                iwp._notify(f, IN_CREATE|IN_CLOSE_WRITE)
+
+
+    def watch(self, path, mask=IN_WATCH_MASK, autoAdd=False,
+              callbacks=None, recursive=False):
+        """
+        Watch the 'mask' events in given path. Can raise C{INotifyError} when
+        there's a problem while adding a directory.
+
+        @param path: The path needing monitoring
+        @type path: L{FilePath}
+
+        @param mask: The events that should be watched
+        @type mask: C{int}
+
+        @param autoAdd: if True automatically add newly created
+                        subdirectories
+        @type autoAdd: C{boolean}
+
+        @param callbacks: A list of callbacks that should be called
+                          when an event happens in the given path.
+                          The callback should accept 3 arguments:
+                          (ignored, filepath, mask)
+        @type callbacks: C{list} of callables
+
+        @param recursive: Also add all the subdirectories in this path
+        @type recursive: C{boolean}
+        """
+        if recursive:
+            # This behavior is needed to be compatible with the windows
+            # interface for filesystem changes:
+            # http://msdn.microsoft.com/en-us/library/aa365465(VS.85).aspx
+            # ReadDirectoryChangesW can do bWatchSubtree so it doesn't
+            # make sense to implement this at an higher abstraction
+            # level when other platforms support it already
+            for child in path.walk():
+                if child.isdir():
+                    self.watch(child, mask, autoAdd, callbacks,
+                               recursive=False)
+        else:
+            wd = self._isWatched(path)
+            if wd:
+                return wd
+
+            mask = mask | IN_DELETE_SELF # need this to remove the watch
+
+            return self._addWatch(path, mask, autoAdd, callbacks)
+
+
+    def ignore(self, path):
+        """
+        Remove the watch point monitoring the given path
+
+        @param path: The path that should be ignored
+        @type path: L{FilePath}
+        """
+        wd = self._isWatched(path)
+        if wd is None:
+            raise KeyError("%r is not watched" % (path,))
+        else:
+            self._rmWatch(wd)
+
+
+    def _isWatched(self, path):
+        """
+        Helper function that checks if the path is already monitored
+        and returns its watchdescriptor if so or None otherwise.
+
+        @param path: The path that should be checked
+        @type path: L{FilePath}
+        """
+        return self._watchpaths.get(path, None)
+
+
+INotifyError = _inotify.INotifyError
+
+
+__all__ = ["INotify", "humanReadableMask", "IN_WATCH_MASK", "IN_ACCESS",
+           "IN_MODIFY", "IN_ATTRIB", "IN_CLOSE_NOWRITE", "IN_CLOSE_WRITE",
+           "IN_OPEN", "IN_MOVED_FROM", "IN_MOVED_TO", "IN_CREATE",
+           "IN_DELETE", "IN_DELETE_SELF", "IN_MOVE_SELF", "IN_UNMOUNT",
+           "IN_Q_OVERFLOW", "IN_IGNORED", "IN_ONLYDIR", "IN_DONT_FOLLOW",
+           "IN_MASK_ADD", "IN_ISDIR", "IN_ONESHOT", "IN_CLOSE",
+           "IN_MOVED", "IN_CHANGED"]
diff --git a/ThirdParty/Twisted/twisted/internet/interfaces.py b/ThirdParty/Twisted/twisted/internet/interfaces.py
new file mode 100644
index 0000000..7d98802
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/interfaces.py
@@ -0,0 +1,2015 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Interface documentation.
+
+Maintainer: Itamar Shtull-Trauring
+"""
+
+from __future__ import division, absolute_import
+
+from zope.interface import Interface, Attribute
+
+
+class IAddress(Interface):
+    """
+    An address, e.g. a TCP C{(host, port)}.
+
+    Default implementations are in L{twisted.internet.address}.
+    """
+
+### Reactor Interfaces
+
+class IConnector(Interface):
+    """
+    Object used to interface between connections and protocols.
+
+    Each L{IConnector} manages one connection.
+    """
+
+    def stopConnecting():
+        """
+        Stop attempting to connect.
+        """
+
+    def disconnect():
+        """
+        Disconnect regardless of the connection state.
+
+        If we are connected, disconnect, if we are trying to connect,
+        stop trying.
+        """
+
+    def connect():
+        """
+        Try to connect to remote address.
+        """
+
+    def getDestination():
+        """
+        Return destination this will try to connect to.
+
+        @return: An object which provides L{IAddress}.
+        """
+
+
+class IResolverSimple(Interface):
+
+    def getHostByName(name, timeout = (1, 3, 11, 45)):
+        """
+        Resolve the domain name C{name} into an IP address.
+
+        @type name: C{str}
+        @type timeout: C{tuple}
+        @rtype: L{twisted.internet.defer.Deferred}
+        @return: The callback of the Deferred that is returned will be
+        passed a string that represents the IP address of the specified
+        name, or the errback will be called if the lookup times out.  If
+        multiple types of address records are associated with the name,
+        A6 records will be returned in preference to AAAA records, which
+        will be returned in preference to A records.  If there are multiple
+        records of the type to be returned, one will be selected at random.
+
+        @raise twisted.internet.defer.TimeoutError: Raised (asynchronously)
+        if the name cannot be resolved within the specified timeout period.
+        """
+
+class IResolver(IResolverSimple):
+    def query(query, timeout = 10):
+        """
+        Interpret and dispatch a query object to the appropriate
+        lookup* method.
+        """
+
+    def lookupAddress(name, timeout = 10):
+        """
+        Lookup the A records associated with C{name}.
+        """
+
+    def lookupAddress6(name, timeout = 10):
+        """
+        Lookup all the A6 records associated with C{name}.
+        """
+
+    def lookupIPV6Address(name, timeout = 10):
+        """
+        Lookup all the AAAA records associated with C{name}.
+        """
+
+    def lookupMailExchange(name, timeout = 10):
+        """
+        Lookup the MX records associated with C{name}.
+        """
+
+    def lookupNameservers(name, timeout = 10):
+        """
+        Lookup the the NS records associated with C{name}.
+        """
+
+    def lookupCanonicalName(name, timeout = 10):
+        """
+        Lookup the CNAME records associated with C{name}.
+        """
+
+    def lookupMailBox(name, timeout = 10):
+        """
+        Lookup the MB records associated with C{name}.
+        """
+
+    def lookupMailGroup(name, timeout = 10):
+        """
+        Lookup the MG records associated with C{name}.
+        """
+
+    def lookupMailRename(name, timeout = 10):
+        """
+        Lookup the MR records associated with C{name}.
+        """
+
+    def lookupPointer(name, timeout = 10):
+        """
+        Lookup the PTR records associated with C{name}.
+        """
+
+    def lookupAuthority(name, timeout = 10):
+        """
+        Lookup the SOA records associated with C{name}.
+        """
+
+    def lookupNull(name, timeout = 10):
+        """
+        Lookup the NULL records associated with C{name}.
+        """
+
+    def lookupWellKnownServices(name, timeout = 10):
+        """
+        Lookup the WKS records associated with C{name}.
+        """
+
+    def lookupHostInfo(name, timeout = 10):
+        """
+        Lookup the HINFO records associated with C{name}.
+        """
+
+    def lookupMailboxInfo(name, timeout = 10):
+        """
+        Lookup the MINFO records associated with C{name}.
+        """
+
+    def lookupText(name, timeout = 10):
+        """
+        Lookup the TXT records associated with C{name}.
+        """
+
+    def lookupResponsibility(name, timeout = 10):
+        """
+        Lookup the RP records associated with C{name}.
+        """
+
+    def lookupAFSDatabase(name, timeout = 10):
+        """
+        Lookup the AFSDB records associated with C{name}.
+        """
+
+    def lookupService(name, timeout = 10):
+        """
+        Lookup the SRV records associated with C{name}.
+        """
+
+    def lookupAllRecords(name, timeout = 10):
+        """
+        Lookup all records associated with C{name}.
+        """
+
+    def lookupZone(name, timeout = 10):
+        """
+        Perform a zone transfer for the given C{name}.
+        """
+
+
+
+class IReactorTCP(Interface):
+
+    def listenTCP(port, factory, backlog=50, interface=''):
+        """
+        Connects a given protocol factory to the given numeric TCP/IP port.
+
+        @param port: a port number on which to listen
+
+        @param factory: a L{twisted.internet.protocol.ServerFactory} instance
+
+        @param backlog: size of the listen queue
+
+        @param interface: The local IPv4 or IPv6 address to which to bind;
+            defaults to '', ie all IPv4 addresses.  To bind to all IPv4 and IPv6
+            addresses, you must call this method twice.
+
+        @return: an object that provides L{IListeningPort}.
+
+        @raise CannotListenError: as defined here
+                                  L{twisted.internet.error.CannotListenError},
+                                  if it cannot listen on this port (e.g., it
+                                  cannot bind to the required port number)
+        """
+
+    def connectTCP(host, port, factory, timeout=30, bindAddress=None):
+        """
+        Connect a TCP client.
+
+        @param host: a host name
+
+        @param port: a port number
+
+        @param factory: a L{twisted.internet.protocol.ClientFactory} instance
+
+        @param timeout: number of seconds to wait before assuming the
+                        connection has failed.
+
+        @param bindAddress: a (host, port) tuple of local address to bind
+                            to, or None.
+
+        @return: An object which provides L{IConnector}. This connector will
+                 call various callbacks on the factory when a connection is
+                 made, failed, or lost - see
+                 L{ClientFactory<twisted.internet.protocol.ClientFactory>}
+                 docs for details.
+        """
+
+class IReactorSSL(Interface):
+
+    def connectSSL(host, port, factory, contextFactory, timeout=30, bindAddress=None):
+        """
+        Connect a client Protocol to a remote SSL socket.
+
+        @param host: a host name
+
+        @param port: a port number
+
+        @param factory: a L{twisted.internet.protocol.ClientFactory} instance
+
+        @param contextFactory: a L{twisted.internet.ssl.ClientContextFactory} object.
+
+        @param timeout: number of seconds to wait before assuming the
+                        connection has failed.
+
+        @param bindAddress: a (host, port) tuple of local address to bind to,
+                            or C{None}.
+
+        @return: An object which provides L{IConnector}.
+        """
+
+    def listenSSL(port, factory, contextFactory, backlog=50, interface=''):
+        """
+        Connects a given protocol factory to the given numeric TCP/IP port.
+        The connection is a SSL one, using contexts created by the context
+        factory.
+
+        @param port: a port number on which to listen
+
+        @param factory: a L{twisted.internet.protocol.ServerFactory} instance
+
+        @param contextFactory: a L{twisted.internet.ssl.ContextFactory} instance
+
+        @param backlog: size of the listen queue
+
+        @param interface: the hostname to bind to, defaults to '' (all)
+        """
+
+
+
+class IReactorUNIX(Interface):
+    """
+    UNIX socket methods.
+    """
+
+    def connectUNIX(address, factory, timeout=30, checkPID=0):
+        """
+        Connect a client protocol to a UNIX socket.
+
+        @param address: a path to a unix socket on the filesystem.
+
+        @param factory: a L{twisted.internet.protocol.ClientFactory} instance
+
+        @param timeout: number of seconds to wait before assuming the connection
+            has failed.
+
+        @param checkPID: if True, check for a pid file to verify that a server
+            is listening.  If C{address} is a Linux abstract namespace path,
+            this must be C{False}.
+
+        @return: An object which provides L{IConnector}.
+        """
+
+
+    def listenUNIX(address, factory, backlog=50, mode=0o666, wantPID=0):
+        """
+        Listen on a UNIX socket.
+
+        @param address: a path to a unix socket on the filesystem.
+
+        @param factory: a L{twisted.internet.protocol.Factory} instance.
+
+        @param backlog: number of connections to allow in backlog.
+
+        @param mode: The mode (B{not} umask) to set on the unix socket.  See
+            platform specific documentation for information about how this
+            might affect connection attempts.
+        @type mode: C{int}
+
+        @param wantPID: if True, create a pidfile for the socket.  If C{address}
+            is a Linux abstract namespace path, this must be C{False}.
+
+        @return: An object which provides L{IListeningPort}.
+        """
+
+
+
+class IReactorUNIXDatagram(Interface):
+    """
+    Datagram UNIX socket methods.
+    """
+
+    def connectUNIXDatagram(address, protocol, maxPacketSize=8192, mode=0o666, bindAddress=None):
+        """
+        Connect a client protocol to a datagram UNIX socket.
+
+        @param address: a path to a unix socket on the filesystem.
+
+        @param protocol: a L{twisted.internet.protocol.ConnectedDatagramProtocol} instance
+
+        @param maxPacketSize: maximum packet size to accept
+
+        @param mode: The mode (B{not} umask) to set on the unix socket.  See
+            platform specific documentation for information about how this
+            might affect connection attempts.
+        @type mode: C{int}
+
+        @param bindAddress: address to bind to
+
+        @return: An object which provides L{IConnector}.
+        """
+
+
+    def listenUNIXDatagram(address, protocol, maxPacketSize=8192, mode=0o666):
+        """
+        Listen on a datagram UNIX socket.
+
+        @param address: a path to a unix socket on the filesystem.
+
+        @param protocol: a L{twisted.internet.protocol.DatagramProtocol} instance.
+
+        @param maxPacketSize: maximum packet size to accept
+
+        @param mode: The mode (B{not} umask) to set on the unix socket.  See
+            platform specific documentation for information about how this
+            might affect connection attempts.
+        @type mode: C{int}
+
+        @return: An object which provides L{IListeningPort}.
+        """
+
+
+
+class IReactorWin32Events(Interface):
+    """
+    Win32 Event API methods
+
+    @since: 10.2
+    """
+
+    def addEvent(event, fd, action):
+        """
+        Add a new win32 event to the event loop.
+
+        @param event: a Win32 event object created using win32event.CreateEvent()
+
+        @param fd: an instance of L{twisted.internet.abstract.FileDescriptor}
+
+        @param action: a string that is a method name of the fd instance.
+                       This method is called in response to the event.
+
+        @return: None
+        """
+
+
+    def removeEvent(event):
+        """
+        Remove an event.
+
+        @param event: a Win32 event object added using L{IReactorWin32Events.addEvent}
+
+        @return: None
+        """
+
+
+
+class IReactorUDP(Interface):
+    """
+    UDP socket methods.
+    """
+
+    def listenUDP(port, protocol, interface='', maxPacketSize=8192):
+        """
+        Connects a given DatagramProtocol to the given numeric UDP port.
+
+        @return: object which provides L{IListeningPort}.
+        """
+
+
+
+class IReactorMulticast(Interface):
+    """
+    UDP socket methods that support multicast.
+
+    IMPORTANT: This is an experimental new interface. It may change
+    without backwards compatability. Suggestions are welcome.
+    """
+
+    def listenMulticast(port, protocol, interface='', maxPacketSize=8192,
+                        listenMultiple=False):
+        """
+        Connects a given
+        L{DatagramProtocol<twisted.internet.protocol.DatagramProtocol>} to the
+        given numeric UDP port.
+
+        @param listenMultiple: If set to True, allows multiple sockets to
+            bind to the same address and port number at the same time.
+        @type listenMultiple: C{bool}
+
+        @returns: An object which provides L{IListeningPort}.
+
+        @see: L{twisted.internet.interfaces.IMulticastTransport}
+        @see: U{http://twistedmatrix.com/documents/current/core/howto/udp.html}
+        """
+
+
+
+class IReactorSocket(Interface):
+    """
+    Methods which allow a reactor to use externally created sockets.
+
+    For example, to use C{adoptStreamPort} to implement behavior equivalent
+    to that of L{IReactorTCP.listenTCP}, you might write code like this::
+
+        from socket import SOMAXCONN, AF_INET, SOCK_STREAM, socket
+        portSocket = socket(AF_INET, SOCK_STREAM)
+        # Set FD_CLOEXEC on port, left as an exercise.  Then make it into a
+        # non-blocking listening port:
+        portSocket.setblocking(False)
+        portSocket.bind(('192.168.1.2', 12345))
+        portSocket.listen(SOMAXCONN)
+
+        # Now have the reactor use it as a TCP port
+        port = reactor.adoptStreamPort(
+            portSocket.fileno(), AF_INET, YourFactory())
+
+        # portSocket itself is no longer necessary, and needs to be cleaned
+        # up by us.
+        portSocket.close()
+
+        # Whenever the server is no longer needed, stop it as usual.
+        stoppedDeferred = port.stopListening()
+
+    Another potential use is to inherit a listening descriptor from a parent
+    process (for example, systemd or launchd), or to receive one over a UNIX
+    domain socket.
+
+    Some plans for extending this interface exist.  See:
+
+        - U{http://twistedmatrix.com/trac/ticket/5570}: established connections
+        - U{http://twistedmatrix.com/trac/ticket/5573}: AF_UNIX ports
+        - U{http://twistedmatrix.com/trac/ticket/5574}: SOCK_DGRAM sockets
+    """
+
+    def adoptStreamPort(fileDescriptor, addressFamily, factory):
+        """
+        Add an existing listening I{SOCK_STREAM} socket to the reactor to
+        monitor for new connections to accept and handle.
+
+        @param fileDescriptor: A file descriptor associated with a socket which
+            is already bound to an address and marked as listening.  The socket
+            must be set non-blocking.  Any additional flags (for example,
+            close-on-exec) must also be set by application code.  Application
+            code is responsible for closing the file descriptor, which may be
+            done as soon as C{adoptStreamPort} returns.
+        @type fileDescriptor: C{int}
+
+        @param addressFamily: The address family (or I{domain}) of the socket.
+            For example, L{socket.AF_INET6}.
+
+        @param factory: A L{ServerFactory} instance to use to create new
+            protocols to handle connections accepted via this socket.
+
+        @return: An object providing L{IListeningPort}.
+
+        @raise UnsupportedAddressFamily: If the given address family is not
+            supported by this reactor, or not supported with the given socket
+            type.
+
+        @raise UnsupportedSocketType: If the given socket type is not supported
+            by this reactor, or not supported with the given socket type.
+        """
+
+    def adoptStreamConnection(fileDescriptor, addressFamily, factory):
+        """
+        Add an existing connected I{SOCK_STREAM} socket to the reactor to
+        monitor for data.
+
+        Note that the given factory won't have its C{startFactory} and
+        C{stopFactory} methods called, as there is no sensible time to call
+        them in this situation.
+
+        @param fileDescriptor: A file descriptor associated with a socket which
+            is already connected.  The socket must be set non-blocking.  Any
+            additional flags (for example, close-on-exec) must also be set by
+            application code.  Application code is responsible for closing the
+            file descriptor, which may be done as soon as
+            C{adoptStreamConnection} returns.
+        @type fileDescriptor: C{int}
+
+        @param addressFamily: The address family (or I{domain}) of the socket.
+            For example, L{socket.AF_INET6}.
+
+        @param factory: A L{ServerFactory} instance to use to create a new
+            protocol to handle the connection via this socket.
+
+        @raise UnsupportedAddressFamily: If the given address family is not
+            supported by this reactor, or not supported with the given socket
+            type.
+
+        @raise UnsupportedSocketType: If the given socket type is not supported
+            by this reactor, or not supported with the given socket type.
+        """
+
+
+
+class IReactorProcess(Interface):
+
+    def spawnProcess(processProtocol, executable, args=(), env={}, path=None,
+                     uid=None, gid=None, usePTY=0, childFDs=None):
+        """
+        Spawn a process, with a process protocol.
+
+        @type processProtocol: L{IProcessProtocol} provider
+        @param processProtocol: An object which will be notified of all
+            events related to the created process.
+
+        @param executable: the file name to spawn - the full path should be
+                           used.
+
+        @param args: the command line arguments to pass to the process; a
+                     sequence of strings. The first string should be the
+                     executable's name.
+
+        @type env: a C{dict} mapping C{str} to C{str}, or C{None}.
+        @param env: the environment variables to pass to the child process. The
+                    resulting behavior varies between platforms. If
+                      - C{env} is not set:
+                        - On POSIX: pass an empty environment.
+                        - On Windows: pass C{os.environ}.
+                      - C{env} is C{None}:
+                        - On POSIX: pass C{os.environ}.
+                        - On Windows: pass C{os.environ}.
+                      - C{env} is a C{dict}:
+                        - On POSIX: pass the key/value pairs in C{env} as the
+                          complete environment.
+                        - On Windows: update C{os.environ} with the key/value
+                          pairs in the C{dict} before passing it. As a
+                          consequence of U{bug #1640
+                          <http://twistedmatrix.com/trac/ticket/1640>}, passing
+                          keys with empty values in an effort to unset
+                          environment variables I{won't} unset them.
+
+        @param path: the path to run the subprocess in - defaults to the
+                     current directory.
+
+        @param uid: user ID to run the subprocess as. (Only available on
+                    POSIX systems.)
+
+        @param gid: group ID to run the subprocess as. (Only available on
+                    POSIX systems.)
+
+        @param usePTY: if true, run this process in a pseudo-terminal.
+                       optionally a tuple of C{(masterfd, slavefd, ttyname)},
+                       in which case use those file descriptors.
+                       (Not available on all systems.)
+
+        @param childFDs: A dictionary mapping file descriptors in the new child
+                         process to an integer or to the string 'r' or 'w'.
+
+                         If the value is an integer, it specifies a file
+                         descriptor in the parent process which will be mapped
+                         to a file descriptor (specified by the key) in the
+                         child process.  This is useful for things like inetd
+                         and shell-like file redirection.
+
+                         If it is the string 'r', a pipe will be created and
+                         attached to the child at that file descriptor: the
+                         child will be able to write to that file descriptor
+                         and the parent will receive read notification via the
+                         L{IProcessProtocol.childDataReceived} callback.  This
+                         is useful for the child's stdout and stderr.
+
+                         If it is the string 'w', similar setup to the previous
+                         case will occur, with the pipe being readable by the
+                         child instead of writeable.  The parent process can
+                         write to that file descriptor using
+                         L{IProcessTransport.writeToChild}.  This is useful for
+                         the child's stdin.
+
+                         If childFDs is not passed, the default behaviour is to
+                         use a mapping that opens the usual stdin/stdout/stderr
+                         pipes.
+
+        @see: L{twisted.internet.protocol.ProcessProtocol}
+
+        @return: An object which provides L{IProcessTransport}.
+
+        @raise OSError: Raised with errno C{EAGAIN} or C{ENOMEM} if there are
+                        insufficient system resources to create a new process.
+        """
+
+class IReactorTime(Interface):
+    """
+    Time methods that a Reactor should implement.
+    """
+
+    def seconds():
+        """
+        Get the current time in seconds.
+
+        @return: A number-like object of some sort.
+        """
+
+
+    def callLater(delay, callable, *args, **kw):
+        """
+        Call a function later.
+
+        @type delay:  C{float}
+        @param delay: the number of seconds to wait.
+
+        @param callable: the callable object to call later.
+
+        @param args: the arguments to call it with.
+
+        @param kw: the keyword arguments to call it with.
+
+        @return: An object which provides L{IDelayedCall} and can be used to
+                 cancel the scheduled call, by calling its C{cancel()} method.
+                 It also may be rescheduled by calling its C{delay()} or
+                 C{reset()} methods.
+        """
+
+
+    def getDelayedCalls():
+        """
+        Retrieve all currently scheduled delayed calls.
+
+        @return: A tuple of all L{IDelayedCall} providers representing all
+                 currently scheduled calls. This is everything that has been
+                 returned by C{callLater} but not yet called or canceled.
+        """
+
+
+class IDelayedCall(Interface):
+    """
+    A scheduled call.
+
+    There are probably other useful methods we can add to this interface;
+    suggestions are welcome.
+    """
+
+    def getTime():
+        """
+        Get time when delayed call will happen.
+
+        @return: time in seconds since epoch (a float).
+        """
+
+    def cancel():
+        """
+        Cancel the scheduled call.
+
+        @raises twisted.internet.error.AlreadyCalled: if the call has already
+            happened.
+        @raises twisted.internet.error.AlreadyCancelled: if the call has already
+            been cancelled.
+        """
+
+    def delay(secondsLater):
+        """
+        Delay the scheduled call.
+
+        @param secondsLater: how many seconds from its current firing time to delay
+
+        @raises twisted.internet.error.AlreadyCalled: if the call has already
+            happened.
+        @raises twisted.internet.error.AlreadyCancelled: if the call has already
+            been cancelled.
+        """
+
+    def reset(secondsFromNow):
+        """
+        Reset the scheduled call's timer.
+
+        @param secondsFromNow: how many seconds from now it should fire,
+            equivalent to C{.cancel()} and then doing another
+            C{reactor.callLater(secondsLater, ...)}
+
+        @raises twisted.internet.error.AlreadyCalled: if the call has already
+            happened.
+        @raises twisted.internet.error.AlreadyCancelled: if the call has already
+            been cancelled.
+        """
+
+    def active():
+        """
+        @return: True if this call is still active, False if it has been
+                 called or cancelled.
+        """
+
+class IReactorThreads(Interface):
+    """
+    Dispatch methods to be run in threads.
+
+    Internally, this should use a thread pool and dispatch methods to them.
+    """
+
+    def getThreadPool():
+        """
+        Return the threadpool used by L{callInThread}.  Create it first if
+        necessary.
+
+        @rtype: L{twisted.python.threadpool.ThreadPool}
+        """
+
+
+    def callInThread(callable, *args, **kwargs):
+        """
+        Run the callable object in a separate thread.
+        """
+
+
+    def callFromThread(callable, *args, **kw):
+        """
+        Cause a function to be executed by the reactor thread.
+
+        Use this method when you want to run a function in the reactor's thread
+        from another thread.  Calling L{callFromThread} should wake up the main
+        thread (where L{reactor.run()<reactor.run>} is executing) and run the
+        given callable in that thread.
+
+        If you're writing a multi-threaded application the C{callable} may need
+        to be thread safe, but this method doesn't require it as such. If you
+        want to call a function in the next mainloop iteration, but you're in
+        the same thread, use L{callLater} with a delay of 0.
+        """
+
+
+    def suggestThreadPoolSize(size):
+        """
+        Suggest the size of the internal threadpool used to dispatch functions
+        passed to L{callInThread}.
+        """
+
+
+class IReactorCore(Interface):
+    """
+    Core methods that a Reactor must implement.
+    """
+
+    running = Attribute(
+        "A C{bool} which is C{True} from I{during startup} to "
+        "I{during shutdown} and C{False} the rest of the time.")
+
+
+    def resolve(name, timeout=10):
+        """
+        Return a L{twisted.internet.defer.Deferred} that will resolve a hostname.
+        """
+
+    def run():
+        """
+        Fire 'startup' System Events, move the reactor to the 'running'
+        state, then run the main loop until it is stopped with C{stop()} or
+        C{crash()}.
+        """
+
+    def stop():
+        """
+        Fire 'shutdown' System Events, which will move the reactor to the
+        'stopped' state and cause C{reactor.run()} to exit.
+        """
+
+    def crash():
+        """
+        Stop the main loop *immediately*, without firing any system events.
+
+        This is named as it is because this is an extremely "rude" thing to do;
+        it is possible to lose data and put your system in an inconsistent
+        state by calling this.  However, it is necessary, as sometimes a system
+        can become wedged in a pre-shutdown call.
+        """
+
+    def iterate(delay=0):
+        """
+        Run the main loop's I/O polling function for a period of time.
+
+        This is most useful in applications where the UI is being drawn "as
+        fast as possible", such as games. All pending L{IDelayedCall}s will
+        be called.
+
+        The reactor must have been started (via the C{run()} method) prior to
+        any invocations of this method.  It must also be stopped manually
+        after the last call to this method (via the C{stop()} method).  This
+        method is not re-entrant: you must not call it recursively; in
+        particular, you must not call it while the reactor is running.
+        """
+
+    def fireSystemEvent(eventType):
+        """
+        Fire a system-wide event.
+
+        System-wide events are things like 'startup', 'shutdown', and
+        'persist'.
+        """
+
+    def addSystemEventTrigger(phase, eventType, callable, *args, **kw):
+        """
+        Add a function to be called when a system event occurs.
+
+        Each "system event" in Twisted, such as 'startup', 'shutdown', and
+        'persist', has 3 phases: 'before', 'during', and 'after' (in that
+        order, of course).  These events will be fired internally by the
+        Reactor.
+
+        An implementor of this interface must only implement those events
+        described here.
+
+        Callbacks registered for the "before" phase may return either None or a
+        Deferred.  The "during" phase will not execute until all of the
+        Deferreds from the "before" phase have fired.
+
+        Once the "during" phase is running, all of the remaining triggers must
+        execute; their return values must be ignored.
+
+        @param phase: a time to call the event -- either the string 'before',
+                      'after', or 'during', describing when to call it
+                      relative to the event's execution.
+
+        @param eventType: this is a string describing the type of event.
+
+        @param callable: the object to call before shutdown.
+
+        @param args: the arguments to call it with.
+
+        @param kw: the keyword arguments to call it with.
+
+        @return: an ID that can be used to remove this call with
+                 removeSystemEventTrigger.
+        """
+
+    def removeSystemEventTrigger(triggerID):
+        """
+        Removes a trigger added with addSystemEventTrigger.
+
+        @param triggerID: a value returned from addSystemEventTrigger.
+
+        @raise KeyError: If there is no system event trigger for the given
+            C{triggerID}.
+
+        @raise ValueError: If there is no system event trigger for the given
+            C{triggerID}.
+
+        @raise TypeError: If there is no system event trigger for the given
+            C{triggerID}.
+        """
+
+    def callWhenRunning(callable, *args, **kw):
+        """
+        Call a function when the reactor is running.
+
+        If the reactor has not started, the callable will be scheduled
+        to run when it does start. Otherwise, the callable will be invoked
+        immediately.
+
+        @param callable: the callable object to call later.
+
+        @param args: the arguments to call it with.
+
+        @param kw: the keyword arguments to call it with.
+
+        @return: None if the callable was invoked, otherwise a system
+                 event id for the scheduled call.
+        """
+
+
+class IReactorPluggableResolver(Interface):
+    """
+    A reactor with a pluggable name resolver interface.
+    """
+
+    def installResolver(resolver):
+        """
+        Set the internal resolver to use to for name lookups.
+
+        @type resolver: An object implementing the L{IResolverSimple} interface
+        @param resolver: The new resolver to use.
+
+        @return: The previously installed resolver.
+        """
+
+
+class IReactorDaemonize(Interface):
+    """
+    A reactor which provides hooks that need to be called before and after
+    daemonization.
+
+    Notes:
+       - This interface SHOULD NOT be called by applications.
+       - This interface should only be implemented by reactors as a workaround
+         (in particular, it's implemented currently only by kqueue()).
+         For details please see the comments on ticket #1918.
+    """
+
+    def beforeDaemonize():
+        """
+        Hook to be called immediately before daemonization. No reactor methods
+        may be called until L{afterDaemonize} is called.
+
+        @return: C{None}.
+        """
+
+
+    def afterDaemonize():
+        """
+        Hook to be called immediately after daemonization. This may only be
+        called after L{beforeDaemonize} had been called previously.
+
+        @return: C{None}.
+        """
+
+
+
+class IReactorFDSet(Interface):
+    """
+    Implement me to be able to use L{IFileDescriptor} type resources.
+
+    This assumes that your main-loop uses UNIX-style numeric file descriptors
+    (or at least similarly opaque IDs returned from a .fileno() method)
+    """
+
+    def addReader(reader):
+        """
+        I add reader to the set of file descriptors to get read events for.
+
+        @param reader: An L{IReadDescriptor} provider that will be checked for
+                       read events until it is removed from the reactor with
+                       L{removeReader}.
+
+        @return: C{None}.
+        """
+
+    def addWriter(writer):
+        """
+        I add writer to the set of file descriptors to get write events for.
+
+        @param writer: An L{IWriteDescriptor} provider that will be checked for
+                       write events until it is removed from the reactor with
+                       L{removeWriter}.
+
+        @return: C{None}.
+        """
+
+    def removeReader(reader):
+        """
+        Removes an object previously added with L{addReader}.
+
+        @return: C{None}.
+        """
+
+    def removeWriter(writer):
+        """
+        Removes an object previously added with L{addWriter}.
+
+        @return: C{None}.
+        """
+
+    def removeAll():
+        """
+        Remove all readers and writers.
+
+        Should not remove reactor internal reactor connections (like a waker).
+
+        @return: A list of L{IReadDescriptor} and L{IWriteDescriptor} providers
+                 which were removed.
+        """
+
+    def getReaders():
+        """
+        Return the list of file descriptors currently monitored for input
+        events by the reactor.
+
+        @return: the list of file descriptors monitored for input events.
+        @rtype: C{list} of C{IReadDescriptor}
+        """
+
+    def getWriters():
+        """
+        Return the list file descriptors currently monitored for output events
+        by the reactor.
+
+        @return: the list of file descriptors monitored for output events.
+        @rtype: C{list} of C{IWriteDescriptor}
+        """
+
+
+class IListeningPort(Interface):
+    """
+    A listening port.
+    """
+
+    def startListening():
+        """
+        Start listening on this port.
+
+        @raise CannotListenError: If it cannot listen on this port (e.g., it is
+                                  a TCP port and it cannot bind to the required
+                                  port number).
+        """
+
+    def stopListening():
+        """
+        Stop listening on this port.
+
+        If it does not complete immediately, will return Deferred that fires
+        upon completion.
+        """
+
+    def getHost():
+        """
+        Get the host that this port is listening for.
+
+        @return: An L{IAddress} provider.
+        """
+
+
+class ILoggingContext(Interface):
+    """
+    Give context information that will be used to log events generated by
+    this item.
+    """
+
+    def logPrefix():
+        """
+        @return: Prefix used during log formatting to indicate context.
+        @rtype: C{str}
+        """
+
+
+
+class IFileDescriptor(ILoggingContext):
+    """
+    An interface representing a UNIX-style numeric file descriptor.
+    """
+
+    def fileno():
+        """
+        @raise: If the descriptor no longer has a valid file descriptor
+            number associated with it.
+
+        @return: The platform-specified representation of a file descriptor
+            number.  Or C{-1} if the descriptor no longer has a valid file
+            descriptor number associated with it.  As long as the descriptor
+            is valid, calls to this method on a particular instance must
+            return the same value.
+        """
+
+
+    def connectionLost(reason):
+        """
+        Called when the connection was lost.
+
+        This is called when the connection on a selectable object has been
+        lost.  It will be called whether the connection was closed explicitly,
+        an exception occurred in an event handler, or the other end of the
+        connection closed it first.
+
+        See also L{IHalfCloseableDescriptor} if your descriptor wants to be
+        notified separately of the two halves of the connection being closed.
+
+        @param reason: A failure instance indicating the reason why the
+                       connection was lost.  L{error.ConnectionLost} and
+                       L{error.ConnectionDone} are of special note, but the
+                       failure may be of other classes as well.
+        """
+
+
+
+class IReadDescriptor(IFileDescriptor):
+    """
+    An L{IFileDescriptor} that can read.
+
+    This interface is generally used in conjunction with L{IReactorFDSet}.
+    """
+
+    def doRead():
+        """
+        Some data is available for reading on your descriptor.
+
+        @return: If an error is encountered which causes the descriptor to
+            no longer be valid, a L{Failure} should be returned.  Otherwise,
+            C{None}.
+        """
+
+
+class IWriteDescriptor(IFileDescriptor):
+    """
+    An L{IFileDescriptor} that can write.
+
+    This interface is generally used in conjunction with L{IReactorFDSet}.
+    """
+
+    def doWrite():
+        """
+        Some data can be written to your descriptor.
+
+        @return: If an error is encountered which causes the descriptor to
+            no longer be valid, a L{Failure} should be returned.  Otherwise,
+            C{None}.
+        """
+
+
+class IReadWriteDescriptor(IReadDescriptor, IWriteDescriptor):
+    """
+    An L{IFileDescriptor} that can both read and write.
+    """
+
+
+class IHalfCloseableDescriptor(Interface):
+    """
+    A descriptor that can be half-closed.
+    """
+
+    def writeConnectionLost(reason):
+        """
+        Indicates write connection was lost.
+        """
+
+    def readConnectionLost(reason):
+        """
+        Indicates read connection was lost.
+        """
+
+
+class ISystemHandle(Interface):
+    """
+    An object that wraps a networking OS-specific handle.
+    """
+
+    def getHandle():
+        """
+        Return a system- and reactor-specific handle.
+
+        This might be a socket.socket() object, or some other type of
+        object, depending on which reactor is being used. Use and
+        manipulate at your own risk.
+
+        This might be used in cases where you want to set specific
+        options not exposed by the Twisted APIs.
+        """
+
+
+class IConsumer(Interface):
+    """
+    A consumer consumes data from a producer.
+    """
+
+    def registerProducer(producer, streaming):
+        """
+        Register to receive data from a producer.
+
+        This sets self to be a consumer for a producer.  When this object runs
+        out of data (as when a send(2) call on a socket succeeds in moving the
+        last data from a userspace buffer into a kernelspace buffer), it will
+        ask the producer to resumeProducing().
+
+        For L{IPullProducer} providers, C{resumeProducing} will be called once
+        each time data is required.
+
+        For L{IPushProducer} providers, C{pauseProducing} will be called
+        whenever the write buffer fills up and C{resumeProducing} will only be
+        called when it empties.
+
+        @type producer: L{IProducer} provider
+
+        @type streaming: C{bool}
+        @param streaming: C{True} if C{producer} provides L{IPushProducer},
+        C{False} if C{producer} provides L{IPullProducer}.
+
+        @raise RuntimeError: If a producer is already registered.
+
+        @return: C{None}
+        """
+
+
+    def unregisterProducer():
+        """
+        Stop consuming data from a producer, without disconnecting.
+        """
+
+
+    def write(data):
+        """
+        The producer will write data by calling this method.
+
+        The implementation must be non-blocking and perform whatever
+        buffering is necessary.  If the producer has provided enough data
+        for now and it is a L{IPushProducer}, the consumer may call its
+        C{pauseProducing} method.
+        """
+
+
+
+class IProducer(Interface):
+    """
+    A producer produces data for a consumer.
+
+    Typically producing is done by calling the write method of an class
+    implementing L{IConsumer}.
+    """
+
+    def stopProducing():
+        """
+        Stop producing data.
+
+        This tells a producer that its consumer has died, so it must stop
+        producing data for good.
+        """
+
+
+class IPushProducer(IProducer):
+    """
+    A push producer, also known as a streaming producer is expected to
+    produce (write to this consumer) data on a continuous basis, unless
+    it has been paused. A paused push producer will resume producing
+    after its resumeProducing() method is called.   For a push producer
+    which is not pauseable, these functions may be noops.
+    """
+
+    def pauseProducing():
+        """
+        Pause producing data.
+
+        Tells a producer that it has produced too much data to process for
+        the time being, and to stop until resumeProducing() is called.
+        """
+    def resumeProducing():
+        """
+        Resume producing data.
+
+        This tells a producer to re-add itself to the main loop and produce
+        more data for its consumer.
+        """
+
+class IPullProducer(IProducer):
+    """
+    A pull producer, also known as a non-streaming producer, is
+    expected to produce data each time resumeProducing() is called.
+    """
+
+    def resumeProducing():
+        """
+        Produce data for the consumer a single time.
+
+        This tells a producer to produce data for the consumer once
+        (not repeatedly, once only). Typically this will be done
+        by calling the consumer's write() method a single time with
+        produced data.
+        """
+
+class IProtocol(Interface):
+
+    def dataReceived(data):
+        """
+        Called whenever data is received.
+
+        Use this method to translate to a higher-level message.  Usually, some
+        callback will be made upon the receipt of each complete protocol
+        message.
+
+        @param data: a string of indeterminate length.  Please keep in mind
+            that you will probably need to buffer some data, as partial
+            (or multiple) protocol messages may be received!  I recommend
+            that unit tests for protocols call through to this method with
+            differing chunk sizes, down to one byte at a time.
+        """
+
+    def connectionLost(reason):
+        """
+        Called when the connection is shut down.
+
+        Clear any circular references here, and any external references
+        to this Protocol.  The connection has been closed. The C{reason}
+        Failure wraps a L{twisted.internet.error.ConnectionDone} or
+        L{twisted.internet.error.ConnectionLost} instance (or a subclass
+        of one of those).
+
+        @type reason: L{twisted.python.failure.Failure}
+        """
+
+    def makeConnection(transport):
+        """
+        Make a connection to a transport and a server.
+        """
+
+    def connectionMade():
+        """
+        Called when a connection is made.
+
+        This may be considered the initializer of the protocol, because
+        it is called when the connection is completed.  For clients,
+        this is called once the connection to the server has been
+        established; for servers, this is called after an accept() call
+        stops blocking and a socket has been received.  If you need to
+        send any greeting or initial message, do it here.
+        """
+
+
+class IProcessProtocol(Interface):
+    """
+    Interface for process-related event handlers.
+    """
+
+    def makeConnection(process):
+        """
+        Called when the process has been created.
+
+        @type process: L{IProcessTransport} provider
+        @param process: An object representing the process which has been
+            created and associated with this protocol.
+        """
+
+
+    def childDataReceived(childFD, data):
+        """
+        Called when data arrives from the child process.
+
+        @type childFD: C{int}
+        @param childFD: The file descriptor from which the data was
+            received.
+
+        @type data: C{str}
+        @param data: The data read from the child's file descriptor.
+        """
+
+
+    def childConnectionLost(childFD):
+        """
+        Called when a file descriptor associated with the child process is
+        closed.
+
+        @type childFD: C{int}
+        @param childFD: The file descriptor which was closed.
+        """
+
+
+    def processExited(reason):
+        """
+        Called when the child process exits.
+
+        @type reason: L{twisted.python.failure.Failure}
+        @param reason: A failure giving the reason the child process
+            terminated.  The type of exception for this failure is either
+            L{twisted.internet.error.ProcessDone} or
+            L{twisted.internet.error.ProcessTerminated}.
+
+        @since: 8.2
+        """
+
+
+    def processEnded(reason):
+        """
+        Called when the child process exits and all file descriptors associated
+        with it have been closed.
+
+        @type reason: L{twisted.python.failure.Failure}
+        @param reason: A failure giving the reason the child process
+            terminated.  The type of exception for this failure is either
+            L{twisted.internet.error.ProcessDone} or
+            L{twisted.internet.error.ProcessTerminated}.
+        """
+
+
+
+class IHalfCloseableProtocol(Interface):
+    """
+    Implemented to indicate they want notification of half-closes.
+
+    TCP supports the notion of half-closing the connection, e.g.
+    closing the write side but still not stopping reading. A protocol
+    that implements this interface will be notified of such events,
+    instead of having connectionLost called.
+    """
+
+    def readConnectionLost():
+        """
+        Notification of the read connection being closed.
+
+        This indicates peer did half-close of write side. It is now
+        the responsibility of the this protocol to call
+        loseConnection().  In addition, the protocol MUST make sure a
+        reference to it still exists (i.e. by doing a callLater with
+        one of its methods, etc.)  as the reactor will only have a
+        reference to it if it is writing.
+
+        If the protocol does not do so, it might get garbage collected
+        without the connectionLost method ever being called.
+        """
+
+    def writeConnectionLost():
+        """
+        Notification of the write connection being closed.
+
+        This will never be called for TCP connections as TCP does not
+        support notification of this type of half-close.
+        """
+
+
+
+class IFileDescriptorReceiver(Interface):
+    """
+    Protocols may implement L{IFileDescriptorReceiver} to receive file
+    descriptors sent to them.  This is useful in conjunction with
+    L{IUNIXTransport}, which allows file descriptors to be sent between
+    processes on a single host.
+    """
+    def fileDescriptorReceived(descriptor):
+        """
+        Called when a file descriptor is received over the connection.
+
+        @param descriptor: The descriptor which was received.
+        @type descriptor: C{int}
+
+        @return: C{None}
+        """
+
+
+
+class IProtocolFactory(Interface):
+    """
+    Interface for protocol factories.
+    """
+
+    def buildProtocol(addr):
+        """
+        Called when a connection has been established to addr.
+
+        If None is returned, the connection is assumed to have been refused,
+        and the Port will close the connection.
+
+        @type addr: (host, port)
+        @param addr: The address of the newly-established connection
+
+        @return: None if the connection was refused, otherwise an object
+                 providing L{IProtocol}.
+        """
+
+    def doStart():
+        """
+        Called every time this is connected to a Port or Connector.
+        """
+
+    def doStop():
+        """
+        Called every time this is unconnected from a Port or Connector.
+        """
+
+
+class ITransport(Interface):
+    """
+    I am a transport for bytes.
+
+    I represent (and wrap) the physical connection and synchronicity
+    of the framework which is talking to the network.  I make no
+    representations about whether calls to me will happen immediately
+    or require returning to a control loop, or whether they will happen
+    in the same or another thread.  Consider methods of this class
+    (aside from getPeer) to be 'thrown over the wall', to happen at some
+    indeterminate time.
+    """
+
+    def write(data):
+        """
+        Write some data to the physical connection, in sequence, in a
+        non-blocking fashion.
+
+        If possible, make sure that it is all written.  No data will
+        ever be lost, although (obviously) the connection may be closed
+        before it all gets through.
+        """
+
+    def writeSequence(data):
+        """
+        Write a list of strings to the physical connection.
+
+        If possible, make sure that all of the data is written to
+        the socket at once, without first copying it all into a
+        single string.
+        """
+
+    def loseConnection():
+        """
+        Close my connection, after writing all pending data.
+
+        Note that if there is a registered producer on a transport it
+        will not be closed until the producer has been unregistered.
+        """
+
+    def getPeer():
+        """
+        Get the remote address of this connection.
+
+        Treat this method with caution.  It is the unfortunate result of the
+        CGI and Jabber standards, but should not be considered reliable for
+        the usual host of reasons; port forwarding, proxying, firewalls, IP
+        masquerading, etc.
+
+        @return: An L{IAddress} provider.
+        """
+
+    def getHost():
+        """
+        Similar to getPeer, but returns an address describing this side of the
+        connection.
+
+        @return: An L{IAddress} provider.
+        """
+
+
+class ITCPTransport(ITransport):
+    """
+    A TCP based transport.
+    """
+
+    def loseWriteConnection():
+        """
+        Half-close the write side of a TCP connection.
+
+        If the protocol instance this is attached to provides
+        IHalfCloseableProtocol, it will get notified when the operation is
+        done. When closing write connection, as with loseConnection this will
+        only happen when buffer has emptied and there is no registered
+        producer.
+        """
+
+
+    def abortConnection():
+        """
+        Close the connection abruptly.
+
+        Discards any buffered data, stops any registered producer,
+        and, if possible, notifies the other end of the unclean
+        closure.
+
+        @since: 11.1
+        """
+
+
+    def getTcpNoDelay():
+        """
+        Return if C{TCP_NODELAY} is enabled.
+        """
+
+    def setTcpNoDelay(enabled):
+        """
+        Enable/disable C{TCP_NODELAY}.
+
+        Enabling C{TCP_NODELAY} turns off Nagle's algorithm. Small packets are
+        sent sooner, possibly at the expense of overall throughput.
+        """
+
+    def getTcpKeepAlive():
+        """
+        Return if C{SO_KEEPALIVE} is enabled.
+        """
+
+    def setTcpKeepAlive(enabled):
+        """
+        Enable/disable C{SO_KEEPALIVE}.
+
+        Enabling C{SO_KEEPALIVE} sends packets periodically when the connection
+        is otherwise idle, usually once every two hours. They are intended
+        to allow detection of lost peers in a non-infinite amount of time.
+        """
+
+    def getHost():
+        """
+        Returns L{IPv4Address} or L{IPv6Address}.
+        """
+
+    def getPeer():
+        """
+        Returns L{IPv4Address} or L{IPv6Address}.
+        """
+
+
+
+class IUNIXTransport(ITransport):
+    """
+    Transport for stream-oriented unix domain connections.
+    """
+    def sendFileDescriptor(descriptor):
+        """
+        Send a duplicate of this (file, socket, pipe, etc) descriptor to the
+        other end of this connection.
+
+        The send is non-blocking and will be queued if it cannot be performed
+        immediately.  The send will be processed in order with respect to other
+        C{sendFileDescriptor} calls on this transport, but not necessarily with
+        respect to C{write} calls on this transport.  The send can only be
+        processed if there are also bytes in the normal connection-oriented send
+        buffer (ie, you must call C{write} at least as many times as you call
+        C{sendFileDescriptor}).
+
+        @param descriptor: An C{int} giving a valid file descriptor in this
+            process.  Note that a I{file descriptor} may actually refer to a
+            socket, a pipe, or anything else POSIX tries to treat in the same
+            way as a file.
+
+        @return: C{None}
+        """
+
+
+
+class ITLSTransport(ITCPTransport):
+    """
+    A TCP transport that supports switching to TLS midstream.
+
+    Once TLS mode is started the transport will implement L{ISSLTransport}.
+    """
+
+    def startTLS(contextFactory):
+        """
+        Initiate TLS negotiation.
+
+        @param contextFactory: A context factory (see L{ssl.py<twisted.internet.ssl>})
+        """
+
+class ISSLTransport(ITCPTransport):
+    """
+    A SSL/TLS based transport.
+    """
+
+    def getPeerCertificate():
+        """
+        Return an object with the peer's certificate info.
+        """
+
+
+class IProcessTransport(ITransport):
+    """
+    A process transport.
+    """
+
+    pid = Attribute(
+        "From before L{IProcessProtocol.makeConnection} is called to before "
+        "L{IProcessProtocol.processEnded} is called, C{pid} is an L{int} "
+        "giving the platform process ID of this process.  C{pid} is L{None} "
+        "at all other times.")
+
+    def closeStdin():
+        """
+        Close stdin after all data has been written out.
+        """
+
+    def closeStdout():
+        """
+        Close stdout.
+        """
+
+    def closeStderr():
+        """
+        Close stderr.
+        """
+
+    def closeChildFD(descriptor):
+        """
+        Close a file descriptor which is connected to the child process, identified
+        by its FD in the child process.
+        """
+
+    def writeToChild(childFD, data):
+        """
+        Similar to L{ITransport.write} but also allows the file descriptor in
+        the child process which will receive the bytes to be specified.
+
+        @type childFD: C{int}
+        @param childFD: The file descriptor to which to write.
+
+        @type data: C{str}
+        @param data: The bytes to write.
+
+        @return: C{None}
+
+        @raise KeyError: If C{childFD} is not a file descriptor that was mapped
+            in the child when L{IReactorProcess.spawnProcess} was used to create
+            it.
+        """
+
+    def loseConnection():
+        """
+        Close stdin, stderr and stdout.
+        """
+
+    def signalProcess(signalID):
+        """
+        Send a signal to the process.
+
+        @param signalID: can be
+          - one of C{"KILL"}, C{"TERM"}, or C{"INT"}.
+              These will be implemented in a
+              cross-platform manner, and so should be used
+              if possible.
+          - an integer, where it represents a POSIX
+              signal ID.
+
+        @raise twisted.internet.error.ProcessExitedAlready: If the process has
+            already exited.
+        @raise OSError: If the C{os.kill} call fails with an errno different
+            from C{ESRCH}.
+        """
+
+
+class IServiceCollection(Interface):
+    """
+    An object which provides access to a collection of services.
+    """
+
+    def getServiceNamed(serviceName):
+        """
+        Retrieve the named service from this application.
+
+        Raise a C{KeyError} if there is no such service name.
+        """
+
+    def addService(service):
+        """
+        Add a service to this collection.
+        """
+
+    def removeService(service):
+        """
+        Remove a service from this collection.
+        """
+
+
+class IUDPTransport(Interface):
+    """
+    Transport for UDP DatagramProtocols.
+    """
+
+    def write(packet, addr=None):
+        """
+        Write packet to given address.
+
+        @param addr: a tuple of (ip, port). For connected transports must
+                     be the address the transport is connected to, or None.
+                     In non-connected mode this is mandatory.
+
+        @raise twisted.internet.error.MessageLengthError: C{packet} was too
+        long.
+        """
+
+    def connect(host, port):
+        """
+        Connect the transport to an address.
+
+        This changes it to connected mode. Datagrams can only be sent to
+        this address, and will only be received from this address. In addition
+        the protocol's connectionRefused method might get called if destination
+        is not receiving datagrams.
+
+        @param host: an IP address, not a domain name ('127.0.0.1', not 'localhost')
+        @param port: port to connect to.
+        """
+
+    def getHost():
+        """
+        Returns L{IPv4Address}.
+        """
+
+    def stopListening():
+        """
+        Stop listening on this port.
+
+        If it does not complete immediately, will return L{Deferred} that fires
+        upon completion.
+        """
+
+
+
+class IUNIXDatagramTransport(Interface):
+    """
+    Transport for UDP PacketProtocols.
+    """
+
+    def write(packet, address):
+        """
+        Write packet to given address.
+        """
+
+    def getHost():
+        """
+        Returns L{UNIXAddress}.
+        """
+
+
+class IUNIXDatagramConnectedTransport(Interface):
+    """
+    Transport for UDP ConnectedPacketProtocols.
+    """
+
+    def write(packet):
+        """
+        Write packet to address we are connected to.
+        """
+
+    def getHost():
+        """
+        Returns L{UNIXAddress}.
+        """
+
+    def getPeer():
+        """
+        Returns L{UNIXAddress}.
+        """
+
+
+class IMulticastTransport(Interface):
+    """
+    Additional functionality for multicast UDP.
+    """
+
+    def getOutgoingInterface():
+        """
+        Return interface of outgoing multicast packets.
+        """
+
+    def setOutgoingInterface(addr):
+        """
+        Set interface for outgoing multicast packets.
+
+        Returns Deferred of success.
+        """
+
+    def getLoopbackMode():
+        """
+        Return if loopback mode is enabled.
+        """
+
+    def setLoopbackMode(mode):
+        """
+        Set if loopback mode is enabled.
+        """
+
+    def getTTL():
+        """
+        Get time to live for multicast packets.
+        """
+
+    def setTTL(ttl):
+        """
+        Set time to live on multicast packets.
+        """
+
+    def joinGroup(addr, interface=""):
+        """
+        Join a multicast group. Returns L{Deferred} of success or failure.
+
+        If an error occurs, the returned L{Deferred} will fail with
+        L{error.MulticastJoinError}.
+        """
+
+    def leaveGroup(addr, interface=""):
+        """
+        Leave multicast group, return L{Deferred} of success.
+        """
+
+
+class IStreamClientEndpoint(Interface):
+    """
+    A stream client endpoint is a place that L{ClientFactory} can connect to.
+    For example, a remote TCP host/port pair would be a TCP client endpoint.
+
+    @since: 10.1
+    """
+
+    def connect(protocolFactory):
+        """
+        Connect the C{protocolFactory} to the location specified by this
+        L{IStreamClientEndpoint} provider.
+
+        @param protocolFactory: A provider of L{IProtocolFactory}
+        @return: A L{Deferred} that results in an L{IProtocol} upon successful
+            connection otherwise a L{ConnectError}
+        """
+
+
+
+class IStreamServerEndpoint(Interface):
+    """
+    A stream server endpoint is a place that a L{Factory} can listen for
+    incoming connections.
+
+    @since: 10.1
+    """
+
+    def listen(protocolFactory):
+        """
+        Listen with C{protocolFactory} at the location specified by this
+        L{IStreamServerEndpoint} provider.
+
+        @param protocolFactory: A provider of L{IProtocolFactory}
+        @return: A L{Deferred} that results in an L{IListeningPort} or an
+            L{CannotListenError}
+        """
+
+
+
+class IStreamServerEndpointStringParser(Interface):
+    """
+    An L{IStreamServerEndpointStringParser} is like an
+    L{IStreamClientEndpointStringParser}, except for L{IStreamServerEndpoint}s
+    instead of clients.  It integrates with L{endpoints.serverFromString} in
+    much the same way.
+    """
+
+    prefix = Attribute(
+        """
+        @see: L{IStreamClientEndpointStringParser.prefix}
+        """
+    )
+
+
+    def parseStreamServer(reactor, *args, **kwargs):
+        """
+        Parse a stream server endpoint from a reactor and string-only arguments
+        and keyword arguments.
+
+        @see: L{IStreamClientEndpointStringParser.parseStreamClient}
+
+        @return: a stream server endpoint
+        @rtype: L{IStreamServerEndpoint}
+        """
+
+
+
+class IStreamClientEndpointStringParser(Interface):
+    """
+    An L{IStreamClientEndpointStringParser} is a parser which can convert
+    a set of string C{*args} and C{**kwargs} into an L{IStreamClientEndpoint}
+    provider.
+
+    This interface is really only useful in the context of the plugin system
+    for L{endpoints.clientFromString}.  See the document entitled "I{The
+    Twisted Plugin System}" for more details on how to write a plugin.
+
+    If you place an L{IStreamClientEndpointStringParser} plugin in the
+    C{twisted.plugins} package, that plugin's C{parseStreamClient} method will
+    be used to produce endpoints for any description string that begins with
+    the result of that L{IStreamClientEndpointStringParser}'s prefix attribute.
+    """
+
+    prefix = Attribute(
+        """
+        A C{str}, the description prefix to respond to.  For example, an
+        L{IStreamClientEndpointStringParser} plugin which had C{"foo"} for its
+        C{prefix} attribute would be called for endpoint descriptions like
+        C{"foo:bar:baz"} or C{"foo:"}.
+        """
+    )
+
+
+    def parseStreamClient(*args, **kwargs):
+        """
+        This method is invoked by L{endpoints.clientFromString}, if the type of
+        endpoint matches the return value from this
+        L{IStreamClientEndpointStringParser}'s C{prefix} method.
+
+        @param args: The string arguments, minus the endpoint type, in the
+            endpoint description string, parsed according to the rules
+            described in L{endpoints.quoteStringArgument}.  For example, if the
+            description were C{"my-type:foo:bar:baz=qux"}, C{args} would be
+            C{('foo','bar')}
+
+        @param kwargs: The string arguments from the endpoint description
+            passed as keyword arguments.  For example, if the description were
+            C{"my-type:foo:bar:baz=qux"}, C{kwargs} would be
+            C{dict(baz='qux')}.
+
+        @return: a client endpoint
+        @rtype: L{IStreamClientEndpoint}
+        """
diff --git a/ThirdParty/Twisted/twisted/internet/iocpreactor/__init__.py b/ThirdParty/Twisted/twisted/internet/iocpreactor/__init__.py
new file mode 100644
index 0000000..c403e51
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/iocpreactor/__init__.py
@@ -0,0 +1,10 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+I/O Completion Ports reactor
+"""
+
+from twisted.internet.iocpreactor.reactor import install
+
+__all__ = ['install']
diff --git a/ThirdParty/Twisted/twisted/internet/iocpreactor/abstract.py b/ThirdParty/Twisted/twisted/internet/iocpreactor/abstract.py
new file mode 100644
index 0000000..ee3c51f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/iocpreactor/abstract.py
@@ -0,0 +1,400 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Abstract file handle class
+"""
+
+from twisted.internet import main, error, interfaces
+from twisted.internet.abstract import _ConsumerMixin, _LogOwner
+from twisted.python import failure
+
+from zope.interface import implements
+import errno
+
+from twisted.internet.iocpreactor.const import ERROR_HANDLE_EOF
+from twisted.internet.iocpreactor.const import ERROR_IO_PENDING
+from twisted.internet.iocpreactor import iocpsupport as _iocp
+
+
+
+class FileHandle(_ConsumerMixin, _LogOwner):
+    """
+    File handle that can read and write asynchronously
+    """
+    implements(interfaces.IPushProducer, interfaces.IConsumer,
+               interfaces.ITransport, interfaces.IHalfCloseableDescriptor)
+    # read stuff
+    maxReadBuffers = 16
+    readBufferSize = 4096
+    reading = False
+    dynamicReadBuffers = True # set this to false if subclass doesn't do iovecs
+    _readNextBuffer = 0
+    _readSize = 0 # how much data we have in the read buffer
+    _readScheduled = None
+    _readScheduledInOS = False
+
+
+    def startReading(self):
+        self.reactor.addActiveHandle(self)
+        if not self._readScheduled and not self.reading:
+            self.reading = True
+            self._readScheduled = self.reactor.callLater(0,
+                                                         self._resumeReading)
+
+
+    def stopReading(self):
+        if self._readScheduled:
+            self._readScheduled.cancel()
+            self._readScheduled = None
+        self.reading = False
+
+
+    def _resumeReading(self):
+        self._readScheduled = None
+        if self._dispatchData() and not self._readScheduledInOS:
+            self.doRead()
+
+
+    def _dispatchData(self):
+        """
+        Dispatch previously read data. Return True if self.reading and we don't
+        have any more data
+        """
+        if not self._readSize:
+            return self.reading
+        size = self._readSize
+        full_buffers = size // self.readBufferSize
+        while self._readNextBuffer < full_buffers:
+            self.dataReceived(self._readBuffers[self._readNextBuffer])
+            self._readNextBuffer += 1
+            if not self.reading:
+                return False
+        remainder = size % self.readBufferSize
+        if remainder:
+            self.dataReceived(buffer(self._readBuffers[full_buffers],
+                                     0, remainder))
+        if self.dynamicReadBuffers:
+            total_buffer_size = self.readBufferSize * len(self._readBuffers)
+            # we have one buffer too many
+            if size < total_buffer_size - self.readBufferSize:
+                del self._readBuffers[-1]
+            # we filled all buffers, so allocate one more
+            elif (size == total_buffer_size and
+                  len(self._readBuffers) < self.maxReadBuffers):
+                self._readBuffers.append(_iocp.AllocateReadBuffer(
+                                            self.readBufferSize))
+        self._readNextBuffer = 0
+        self._readSize = 0
+        return self.reading
+
+
+    def _cbRead(self, rc, bytes, evt):
+        self._readScheduledInOS = False
+        if self._handleRead(rc, bytes, evt):
+            self.doRead()
+
+
+    def _handleRead(self, rc, bytes, evt):
+        """
+        Returns False if we should stop reading for now
+        """
+        if self.disconnected:
+            return False
+        # graceful disconnection
+        if (not (rc or bytes)) or rc in (errno.WSAEDISCON, ERROR_HANDLE_EOF):
+            self.reactor.removeActiveHandle(self)
+            self.readConnectionLost(failure.Failure(main.CONNECTION_DONE))
+            return False
+        # XXX: not handling WSAEWOULDBLOCK
+        # ("too many outstanding overlapped I/O requests")
+        elif rc:
+            self.connectionLost(failure.Failure(
+                                error.ConnectionLost("read error -- %s (%s)" %
+                                    (errno.errorcode.get(rc, 'unknown'), rc))))
+            return False
+        else:
+            assert self._readSize == 0
+            assert self._readNextBuffer == 0
+            self._readSize = bytes
+            return self._dispatchData()
+
+
+    def doRead(self):
+        evt = _iocp.Event(self._cbRead, self)
+
+        evt.buff = buff = self._readBuffers
+        rc, bytes = self.readFromHandle(buff, evt)
+
+        if not rc or rc == ERROR_IO_PENDING:
+            self._readScheduledInOS = True
+        else:
+            self._handleRead(rc, bytes, evt)
+
+
+    def readFromHandle(self, bufflist, evt):
+        raise NotImplementedError() # TODO: this should default to ReadFile
+
+
+    def dataReceived(self, data):
+        raise NotImplementedError
+
+
+    def readConnectionLost(self, reason):
+        self.connectionLost(reason)
+
+
+    # write stuff
+    dataBuffer = ''
+    offset = 0
+    writing = False
+    _writeScheduled = None
+    _writeDisconnecting = False
+    _writeDisconnected = False
+    writeBufferSize = 2**2**2**2
+
+
+    def loseWriteConnection(self):
+        self._writeDisconnecting = True
+        self.startWriting()
+
+
+    def _closeWriteConnection(self):
+        # override in subclasses
+        pass
+
+
+    def writeConnectionLost(self, reason):
+        # in current code should never be called
+        self.connectionLost(reason)
+
+
+    def startWriting(self):
+        self.reactor.addActiveHandle(self)
+        self.writing = True
+        if not self._writeScheduled:
+            self._writeScheduled = self.reactor.callLater(0,
+                                                          self._resumeWriting)
+
+
+    def stopWriting(self):
+        if self._writeScheduled:
+            self._writeScheduled.cancel()
+            self._writeScheduled = None
+        self.writing = False
+
+
+    def _resumeWriting(self):
+        self._writeScheduled = None
+        self.doWrite()
+
+
+    def _cbWrite(self, rc, bytes, evt):
+        if self._handleWrite(rc, bytes, evt):
+            self.doWrite()
+
+
+    def _handleWrite(self, rc, bytes, evt):
+        """
+        Returns false if we should stop writing for now
+        """
+        if self.disconnected or self._writeDisconnected:
+            return False
+        # XXX: not handling WSAEWOULDBLOCK
+        # ("too many outstanding overlapped I/O requests")
+        if rc:
+            self.connectionLost(failure.Failure(
+                                error.ConnectionLost("write error -- %s (%s)" %
+                                    (errno.errorcode.get(rc, 'unknown'), rc))))
+            return False
+        else:
+            self.offset += bytes
+            # If there is nothing left to send,
+            if self.offset == len(self.dataBuffer) and not self._tempDataLen:
+                self.dataBuffer = ""
+                self.offset = 0
+                # stop writing
+                self.stopWriting()
+                # If I've got a producer who is supposed to supply me with data
+                if self.producer is not None and ((not self.streamingProducer)
+                                                  or self.producerPaused):
+                    # tell them to supply some more.
+                    self.producerPaused = True
+                    self.producer.resumeProducing()
+                elif self.disconnecting:
+                    # But if I was previously asked to let the connection die,
+                    # do so.
+                    self.connectionLost(failure.Failure(main.CONNECTION_DONE))
+                elif self._writeDisconnecting:
+                    # I was previously asked to to half-close the connection.
+                    self._writeDisconnected = True
+                    self._closeWriteConnection()
+                return False
+            else:
+                return True
+
+
+    def doWrite(self):
+        if len(self.dataBuffer) - self.offset < self.SEND_LIMIT:
+            # If there is currently less than SEND_LIMIT bytes left to send
+            # in the string, extend it with the array data.
+            self.dataBuffer = (buffer(self.dataBuffer, self.offset) +
+                               "".join(self._tempDataBuffer))
+            self.offset = 0
+            self._tempDataBuffer = []
+            self._tempDataLen = 0
+
+        evt = _iocp.Event(self._cbWrite, self)
+
+        # Send as much data as you can.
+        if self.offset:
+            evt.buff = buff = buffer(self.dataBuffer, self.offset)
+        else:
+            evt.buff = buff = self.dataBuffer
+        rc, bytes = self.writeToHandle(buff, evt)
+        if rc and rc != ERROR_IO_PENDING:
+            self._handleWrite(rc, bytes, evt)
+
+
+    def writeToHandle(self, buff, evt):
+        raise NotImplementedError() # TODO: this should default to WriteFile
+
+
+    def write(self, data):
+        """Reliably write some data.
+
+        The data is buffered until his file descriptor is ready for writing.
+        """
+        if isinstance(data, unicode): # no, really, I mean it
+            raise TypeError("Data must not be unicode")
+        if not self.connected or self._writeDisconnected:
+            return
+        if data:
+            self._tempDataBuffer.append(data)
+            self._tempDataLen += len(data)
+            if self.producer is not None and self.streamingProducer:
+                if (len(self.dataBuffer) + self._tempDataLen
+                    > self.writeBufferSize):
+                    self.producerPaused = True
+                    self.producer.pauseProducing()
+            self.startWriting()
+
+
+    def writeSequence(self, iovec):
+        for i in iovec:
+            if isinstance(i, unicode): # no, really, I mean it
+                raise TypeError("Data must not be unicode")
+        if not self.connected or not iovec or self._writeDisconnected:
+            return
+        self._tempDataBuffer.extend(iovec)
+        for i in iovec:
+            self._tempDataLen += len(i)
+        if self.producer is not None and self.streamingProducer:
+            if len(self.dataBuffer) + self._tempDataLen > self.writeBufferSize:
+                self.producerPaused = True
+                self.producer.pauseProducing()
+        self.startWriting()
+
+
+    # general stuff
+    connected = False
+    disconnected = False
+    disconnecting = False
+    logstr = "Uninitialized"
+
+    SEND_LIMIT = 128*1024
+
+
+    def __init__(self, reactor = None):
+        if not reactor:
+            from twisted.internet import reactor
+        self.reactor = reactor
+        self._tempDataBuffer = [] # will be added to dataBuffer in doWrite
+        self._tempDataLen = 0
+        self._readBuffers = [_iocp.AllocateReadBuffer(self.readBufferSize)]
+
+
+    def connectionLost(self, reason):
+        """
+        The connection was lost.
+
+        This is called when the connection on a selectable object has been
+        lost.  It will be called whether the connection was closed explicitly,
+        an exception occurred in an event handler, or the other end of the
+        connection closed it first.
+
+        Clean up state here, but make sure to call back up to FileDescriptor.
+        """
+
+        self.disconnected = True
+        self.connected = False
+        if self.producer is not None:
+            self.producer.stopProducing()
+            self.producer = None
+        self.stopReading()
+        self.stopWriting()
+        self.reactor.removeActiveHandle(self)
+
+
+    def getFileHandle(self):
+        return -1
+
+
+    def loseConnection(self, _connDone=failure.Failure(main.CONNECTION_DONE)):
+        """
+        Close the connection at the next available opportunity.
+
+        Call this to cause this FileDescriptor to lose its connection.  It will
+        first write any data that it has buffered.
+
+        If there is data buffered yet to be written, this method will cause the
+        transport to lose its connection as soon as it's done flushing its
+        write buffer.  If you have a producer registered, the connection won't
+        be closed until the producer is finished. Therefore, make sure you
+        unregister your producer when it's finished, or the connection will
+        never close.
+        """
+
+        if self.connected and not self.disconnecting:
+            if self._writeDisconnected:
+                # doWrite won't trigger the connection close anymore
+                self.stopReading()
+                self.stopWriting
+                self.connectionLost(_connDone)
+            else:
+                self.stopReading()
+                self.startWriting()
+                self.disconnecting = 1
+
+
+    # Producer/consumer implementation
+
+    def stopConsuming(self):
+        """
+        Stop consuming data.
+
+        This is called when a producer has lost its connection, to tell the
+        consumer to go lose its connection (and break potential circular
+        references).
+        """
+        self.unregisterProducer()
+        self.loseConnection()
+
+
+    # producer interface implementation
+
+    def resumeProducing(self):
+        assert self.connected and not self.disconnecting
+        self.startReading()
+
+
+    def pauseProducing(self):
+        self.stopReading()
+
+
+    def stopProducing(self):
+        self.loseConnection()
+
+
+__all__ = ['FileHandle']
+
diff --git a/ThirdParty/Twisted/twisted/internet/iocpreactor/build.bat b/ThirdParty/Twisted/twisted/internet/iocpreactor/build.bat
new file mode 100755
index 0000000..25f361b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/iocpreactor/build.bat
@@ -0,0 +1,4 @@
+del iocpsupport\iocpsupport.c iocpsupport.pyd
+del /f /s /q build
+python setup.py build_ext -i -c mingw32
+
diff --git a/ThirdParty/Twisted/twisted/internet/iocpreactor/const.py b/ThirdParty/Twisted/twisted/internet/iocpreactor/const.py
new file mode 100644
index 0000000..dbeb094
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/iocpreactor/const.py
@@ -0,0 +1,26 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Windows constants for IOCP
+"""
+
+
+# this stuff should really be gotten from Windows headers via pyrex, but it
+# probably is not going to change
+
+ERROR_PORT_UNREACHABLE = 1234
+ERROR_NETWORK_UNREACHABLE = 1231
+ERROR_CONNECTION_REFUSED = 1225
+ERROR_IO_PENDING = 997
+ERROR_OPERATION_ABORTED = 995
+WAIT_TIMEOUT = 258
+ERROR_NETNAME_DELETED = 64
+ERROR_HANDLE_EOF = 38
+
+INFINITE = -1
+
+SO_UPDATE_CONNECT_CONTEXT = 0x7010
+SO_UPDATE_ACCEPT_CONTEXT = 0x700B
+
diff --git a/ThirdParty/Twisted/twisted/internet/iocpreactor/interfaces.py b/ThirdParty/Twisted/twisted/internet/iocpreactor/interfaces.py
new file mode 100644
index 0000000..9e4d3ca
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/iocpreactor/interfaces.py
@@ -0,0 +1,47 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Interfaces for iocpreactor
+"""
+
+
+from zope.interface import Interface
+
+
+
+class IReadHandle(Interface):
+    def readFromHandle(bufflist, evt):
+        """
+        Read into the given buffers from this handle.
+
+        @param buff: the buffers to read into
+        @type buff: list of objects implementing the read/write buffer protocol
+
+        @param evt: an IOCP Event object
+
+        @return: tuple (return code, number of bytes read)
+        """
+
+
+
+class IWriteHandle(Interface):
+    def writeToHandle(buff, evt):
+        """
+        Write the given buffer to this handle.
+
+        @param buff: the buffer to write
+        @type buff: any object implementing the buffer protocol
+
+        @param evt: an IOCP Event object
+
+        @return: tuple (return code, number of bytes written)
+        """
+
+
+
+class IReadWriteHandle(IReadHandle, IWriteHandle):
+    pass
+
+
diff --git a/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/acceptex.pxi b/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/acceptex.pxi
new file mode 100644
index 0000000..867736d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/acceptex.pxi
@@ -0,0 +1,46 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+def accept(long listening, long accepting, object buff, object obj):
+    """
+    CAUTION: unlike system AcceptEx(), this function returns 0 on success
+    """
+    cdef unsigned long bytes
+    cdef int rc
+    cdef Py_ssize_t size
+    cdef void *mem_buffer
+    cdef myOVERLAPPED *ov
+
+    PyObject_AsWriteBuffer(buff, &mem_buffer, &size)
+
+    ov = makeOV()
+    if obj is not None:
+        ov.obj = <PyObject *>obj
+
+    rc = lpAcceptEx(listening, accepting, mem_buffer, 0,
+                    <DWORD>size / 2, <DWORD>size / 2,
+                    &bytes, <OVERLAPPED *>ov)
+    if not rc:
+        rc = WSAGetLastError()
+        if rc != ERROR_IO_PENDING:
+            PyMem_Free(ov)
+            return rc
+
+    # operation is in progress
+    Py_XINCREF(obj)
+    return 0
+
+def get_accept_addrs(long s, object buff):
+    cdef WSAPROTOCOL_INFO wsa_pi
+    cdef int locallen, remotelen
+    cdef Py_ssize_t size
+    cdef void *mem_buffer
+    cdef sockaddr *localaddr, *remoteaddr
+
+    PyObject_AsReadBuffer(buff, &mem_buffer, &size)
+
+    lpGetAcceptExSockaddrs(mem_buffer, 0, <DWORD>size / 2, <DWORD>size / 2,
+                           &localaddr, &locallen, &remoteaddr, &remotelen)
+    return remoteaddr.sa_family, _makesockaddr(localaddr, locallen), _makesockaddr(remoteaddr, remotelen)
+
diff --git a/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/connectex.pxi b/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/connectex.pxi
new file mode 100644
index 0000000..276638a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/connectex.pxi
@@ -0,0 +1,47 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+def connect(long s, object addr, object obj):
+    """
+    CAUTION: unlike system ConnectEx(), this function returns 0 on success
+    """
+    cdef int family, rc
+    cdef myOVERLAPPED *ov
+    cdef sockaddr_in ipv4_name
+    cdef sockaddr_in6 ipv6_name
+    cdef sockaddr *name
+    cdef int namelen
+
+    if not have_connectex:
+        raise ValueError, 'ConnectEx is not available on this system'
+
+    family = getAddrFamily(s)
+    if family == AF_INET:
+        name = <sockaddr *>&ipv4_name
+        namelen = sizeof(ipv4_name)
+        fillinetaddr(&ipv4_name, addr)
+    elif family == AF_INET6:
+        name = <sockaddr *>&ipv6_name
+        namelen = sizeof(ipv6_name)
+        fillinet6addr(&ipv6_name, addr)
+    else:
+        raise ValueError, 'unsupported address family'
+    name.sa_family = family
+
+    ov = makeOV()
+    if obj is not None:
+        ov.obj = <PyObject *>obj
+
+    rc = lpConnectEx(s, name, namelen, NULL, 0, NULL, <OVERLAPPED *>ov)
+
+    if not rc:
+        rc = WSAGetLastError()
+        if rc != ERROR_IO_PENDING:
+            PyMem_Free(ov)
+            return rc
+
+    # operation is in progress
+    Py_XINCREF(obj)
+    return 0
+
diff --git a/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/iocpsupport.c b/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/iocpsupport.c
new file mode 100644
index 0000000..deb4b22
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/iocpsupport.c
@@ -0,0 +1,6376 @@
+/* Generated by Cython 0.15.1 on Tue Mar 27 07:16:06 2012 */
+
+#define PY_SSIZE_T_CLEAN
+#include "Python.h"
+#ifndef Py_PYTHON_H
+    #error Python headers needed to compile C extensions, please install development version of Python.
+#else
+
+#include <stddef.h> /* For offsetof */
+#ifndef offsetof
+#define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
+#endif
+
+#if !defined(WIN32) && !defined(MS_WINDOWS)
+  #ifndef __stdcall
+    #define __stdcall
+  #endif
+  #ifndef __cdecl
+    #define __cdecl
+  #endif
+  #ifndef __fastcall
+    #define __fastcall
+  #endif
+#endif
+
+#ifndef DL_IMPORT
+  #define DL_IMPORT(t) t
+#endif
+#ifndef DL_EXPORT
+  #define DL_EXPORT(t) t
+#endif
+
+#ifndef PY_LONG_LONG
+  #define PY_LONG_LONG LONG_LONG
+#endif
+
+#if PY_VERSION_HEX < 0x02040000
+  #define METH_COEXIST 0
+  #define PyDict_CheckExact(op) (Py_TYPE(op) == &PyDict_Type)
+  #define PyDict_Contains(d,o)   PySequence_Contains(d,o)
+#endif
+
+#if PY_VERSION_HEX < 0x02050000
+  typedef int Py_ssize_t;
+  #define PY_SSIZE_T_MAX INT_MAX
+  #define PY_SSIZE_T_MIN INT_MIN
+  #define PY_FORMAT_SIZE_T ""
+  #define PyInt_FromSsize_t(z) PyInt_FromLong(z)
+  #define PyInt_AsSsize_t(o)   __Pyx_PyInt_AsInt(o)
+  #define PyNumber_Index(o)    PyNumber_Int(o)
+  #define PyIndex_Check(o)     PyNumber_Check(o)
+  #define PyErr_WarnEx(category, message, stacklevel) PyErr_Warn(category, message)
+#endif
+
+#if PY_VERSION_HEX < 0x02060000
+  #define Py_REFCNT(ob) (((PyObject*)(ob))->ob_refcnt)
+  #define Py_TYPE(ob)   (((PyObject*)(ob))->ob_type)
+  #define Py_SIZE(ob)   (((PyVarObject*)(ob))->ob_size)
+  #define PyVarObject_HEAD_INIT(type, size) \
+          PyObject_HEAD_INIT(type) size,
+  #define PyType_Modified(t)
+
+  typedef struct {
+     void *buf;
+     PyObject *obj;
+     Py_ssize_t len;
+     Py_ssize_t itemsize;
+     int readonly;
+     int ndim;
+     char *format;
+     Py_ssize_t *shape;
+     Py_ssize_t *strides;
+     Py_ssize_t *suboffsets;
+     void *internal;
+  } Py_buffer;
+
+  #define PyBUF_SIMPLE 0
+  #define PyBUF_WRITABLE 0x0001
+  #define PyBUF_FORMAT 0x0004
+  #define PyBUF_ND 0x0008
+  #define PyBUF_STRIDES (0x0010 | PyBUF_ND)
+  #define PyBUF_C_CONTIGUOUS (0x0020 | PyBUF_STRIDES)
+  #define PyBUF_F_CONTIGUOUS (0x0040 | PyBUF_STRIDES)
+  #define PyBUF_ANY_CONTIGUOUS (0x0080 | PyBUF_STRIDES)
+  #define PyBUF_INDIRECT (0x0100 | PyBUF_STRIDES)
+
+#endif
+
+#if PY_MAJOR_VERSION < 3
+  #define __Pyx_BUILTIN_MODULE_NAME "__builtin__"
+#else
+  #define __Pyx_BUILTIN_MODULE_NAME "builtins"
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+  #define Py_TPFLAGS_CHECKTYPES 0
+  #define Py_TPFLAGS_HAVE_INDEX 0
+#endif
+
+#if (PY_VERSION_HEX < 0x02060000) || (PY_MAJOR_VERSION >= 3)
+  #define Py_TPFLAGS_HAVE_NEWBUFFER 0
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+  #define PyBaseString_Type            PyUnicode_Type
+  #define PyStringObject               PyUnicodeObject
+  #define PyString_Type                PyUnicode_Type
+  #define PyString_Check               PyUnicode_Check
+  #define PyString_CheckExact          PyUnicode_CheckExact
+#endif
+
+#if PY_VERSION_HEX < 0x02060000
+  #define PyBytesObject                PyStringObject
+  #define PyBytes_Type                 PyString_Type
+  #define PyBytes_Check                PyString_Check
+  #define PyBytes_CheckExact           PyString_CheckExact
+  #define PyBytes_FromString           PyString_FromString
+  #define PyBytes_FromStringAndSize    PyString_FromStringAndSize
+  #define PyBytes_FromFormat           PyString_FromFormat
+  #define PyBytes_DecodeEscape         PyString_DecodeEscape
+  #define PyBytes_AsString             PyString_AsString
+  #define PyBytes_AsStringAndSize      PyString_AsStringAndSize
+  #define PyBytes_Size                 PyString_Size
+  #define PyBytes_AS_STRING            PyString_AS_STRING
+  #define PyBytes_GET_SIZE             PyString_GET_SIZE
+  #define PyBytes_Repr                 PyString_Repr
+  #define PyBytes_Concat               PyString_Concat
+  #define PyBytes_ConcatAndDel         PyString_ConcatAndDel
+#endif
+
+#if PY_VERSION_HEX < 0x02060000
+  #define PySet_Check(obj)             PyObject_TypeCheck(obj, &PySet_Type)
+  #define PyFrozenSet_Check(obj)       PyObject_TypeCheck(obj, &PyFrozenSet_Type)
+#endif
+#ifndef PySet_CheckExact
+  #define PySet_CheckExact(obj)        (Py_TYPE(obj) == &PySet_Type)
+#endif
+
+#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type)
+
+#if PY_MAJOR_VERSION >= 3
+  #define PyIntObject                  PyLongObject
+  #define PyInt_Type                   PyLong_Type
+  #define PyInt_Check(op)              PyLong_Check(op)
+  #define PyInt_CheckExact(op)         PyLong_CheckExact(op)
+  #define PyInt_FromString             PyLong_FromString
+  #define PyInt_FromUnicode            PyLong_FromUnicode
+  #define PyInt_FromLong               PyLong_FromLong
+  #define PyInt_FromSize_t             PyLong_FromSize_t
+  #define PyInt_FromSsize_t            PyLong_FromSsize_t
+  #define PyInt_AsLong                 PyLong_AsLong
+  #define PyInt_AS_LONG                PyLong_AS_LONG
+  #define PyInt_AsSsize_t              PyLong_AsSsize_t
+  #define PyInt_AsUnsignedLongMask     PyLong_AsUnsignedLongMask
+  #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+  #define PyBoolObject                 PyLongObject
+#endif
+
+#if PY_VERSION_HEX < 0x03020000
+  typedef long Py_hash_t;
+  #define __Pyx_PyInt_FromHash_t PyInt_FromLong
+  #define __Pyx_PyInt_AsHash_t   PyInt_AsLong
+#else
+  #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t
+  #define __Pyx_PyInt_AsHash_t   PyInt_AsSsize_t
+#endif
+
+
+#if PY_MAJOR_VERSION >= 3
+  #define __Pyx_PyNumber_Divide(x,y)         PyNumber_TrueDivide(x,y)
+  #define __Pyx_PyNumber_InPlaceDivide(x,y)  PyNumber_InPlaceTrueDivide(x,y)
+#else
+  #define __Pyx_PyNumber_Divide(x,y)         PyNumber_Divide(x,y)
+  #define __Pyx_PyNumber_InPlaceDivide(x,y)  PyNumber_InPlaceDivide(x,y)
+#endif
+
+#if (PY_MAJOR_VERSION < 3) || (PY_VERSION_HEX >= 0x03010300)
+  #define __Pyx_PySequence_GetSlice(obj, a, b) PySequence_GetSlice(obj, a, b)
+  #define __Pyx_PySequence_SetSlice(obj, a, b, value) PySequence_SetSlice(obj, a, b, value)
+  #define __Pyx_PySequence_DelSlice(obj, a, b) PySequence_DelSlice(obj, a, b)
+#else
+  #define __Pyx_PySequence_GetSlice(obj, a, b) (unlikely(!(obj)) ? \
+        (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), (PyObject*)0) : \
+        (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_GetSlice(obj, a, b)) : \
+            (PyErr_Format(PyExc_TypeError, "'%.200s' object is unsliceable", (obj)->ob_type->tp_name), (PyObject*)0)))
+  #define __Pyx_PySequence_SetSlice(obj, a, b, value) (unlikely(!(obj)) ? \
+        (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \
+        (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_SetSlice(obj, a, b, value)) : \
+            (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice assignment", (obj)->ob_type->tp_name), -1)))
+  #define __Pyx_PySequence_DelSlice(obj, a, b) (unlikely(!(obj)) ? \
+        (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \
+        (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_DelSlice(obj, a, b)) : \
+            (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice deletion", (obj)->ob_type->tp_name), -1)))
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+  #define PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func))
+#endif
+
+#if PY_VERSION_HEX < 0x02050000
+  #define __Pyx_GetAttrString(o,n)   PyObject_GetAttrString((o),((char *)(n)))
+  #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),((char *)(n)),(a))
+  #define __Pyx_DelAttrString(o,n)   PyObject_DelAttrString((o),((char *)(n)))
+#else
+  #define __Pyx_GetAttrString(o,n)   PyObject_GetAttrString((o),(n))
+  #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),(n),(a))
+  #define __Pyx_DelAttrString(o,n)   PyObject_DelAttrString((o),(n))
+#endif
+
+#if PY_VERSION_HEX < 0x02050000
+  #define __Pyx_NAMESTR(n) ((char *)(n))
+  #define __Pyx_DOCSTR(n)  ((char *)(n))
+#else
+  #define __Pyx_NAMESTR(n) (n)
+  #define __Pyx_DOCSTR(n)  (n)
+#endif
+
+#ifndef __PYX_EXTERN_C
+  #ifdef __cplusplus
+    #define __PYX_EXTERN_C extern "C"
+  #else
+    #define __PYX_EXTERN_C extern
+  #endif
+#endif
+
+#if defined(WIN32) || defined(MS_WINDOWS)
+#define _USE_MATH_DEFINES
+#endif
+#include <math.h>
+#define __PYX_HAVE__iocpsupport
+#define __PYX_HAVE_API__iocpsupport
+#include "io.h"
+#include "errno.h"
+#include "winsock2.h"
+#include "ws2tcpip.h"
+#include "windows.h"
+#include "python.h"
+#include "string.h"
+#include "winsock_pointers.h"
+#ifdef _OPENMP
+#include <omp.h>
+#endif /* _OPENMP */
+
+#ifdef PYREX_WITHOUT_ASSERTIONS
+#define CYTHON_WITHOUT_ASSERTIONS
+#endif
+
+
+/* inline attribute */
+#ifndef CYTHON_INLINE
+  #if defined(__GNUC__)
+    #define CYTHON_INLINE __inline__
+  #elif defined(_MSC_VER)
+    #define CYTHON_INLINE __inline
+  #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+    #define CYTHON_INLINE inline
+  #else
+    #define CYTHON_INLINE
+  #endif
+#endif
+
+/* unused attribute */
+#ifndef CYTHON_UNUSED
+# if defined(__GNUC__)
+#   if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))
+#     define CYTHON_UNUSED __attribute__ ((__unused__))
+#   else
+#     define CYTHON_UNUSED
+#   endif
+# elif defined(__ICC) || defined(__INTEL_COMPILER)
+#   define CYTHON_UNUSED __attribute__ ((__unused__))
+# else
+#   define CYTHON_UNUSED
+# endif
+#endif
+
+typedef struct {PyObject **p; char *s; const long n; const char* encoding; const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; /*proto*/
+
+
+/* Type Conversion Predeclarations */
+
+#define __Pyx_PyBytes_FromUString(s) PyBytes_FromString((char*)s)
+#define __Pyx_PyBytes_AsUString(s)   ((unsigned char*) PyBytes_AsString(s))
+
+#define __Pyx_Owned_Py_None(b) (Py_INCREF(Py_None), Py_None)
+#define __Pyx_PyBool_FromLong(b) ((b) ? (Py_INCREF(Py_True), Py_True) : (Py_INCREF(Py_False), Py_False))
+static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*);
+static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x);
+
+static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*);
+static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t);
+static CYTHON_INLINE size_t __Pyx_PyInt_AsSize_t(PyObject*);
+
+#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x))
+
+
+#ifdef __GNUC__
+  /* Test for GCC > 2.95 */
+  #if __GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))
+    #define likely(x)   __builtin_expect(!!(x), 1)
+    #define unlikely(x) __builtin_expect(!!(x), 0)
+  #else /* __GNUC__ > 2 ... */
+    #define likely(x)   (x)
+    #define unlikely(x) (x)
+  #endif /* __GNUC__ > 2 ... */
+#else /* __GNUC__ */
+  #define likely(x)   (x)
+  #define unlikely(x) (x)
+#endif /* __GNUC__ */
+    
+static PyObject *__pyx_m;
+static PyObject *__pyx_b;
+static PyObject *__pyx_empty_tuple;
+static PyObject *__pyx_empty_bytes;
+static int __pyx_lineno;
+static int __pyx_clineno = 0;
+static const char * __pyx_cfilenm= __FILE__;
+static const char *__pyx_filename;
+
+
+static const char *__pyx_f[] = {
+  "iocpsupport.pyx",
+  "acceptex.pxi",
+  "connectex.pxi",
+  "wsarecv.pxi",
+  "wsasend.pxi",
+};
+
+/* "iocpsupport.pyx":6
+ * 
+ * # HANDLE and SOCKET are pointer-sized (they are 64 bit wide in 64-bit builds)
+ * ctypedef size_t HANDLE             # <<<<<<<<<<<<<<
+ * ctypedef size_t SOCKET
+ * ctypedef unsigned long DWORD
+ */
+typedef size_t __pyx_t_11iocpsupport_HANDLE;
+
+/* "iocpsupport.pyx":7
+ * # HANDLE and SOCKET are pointer-sized (they are 64 bit wide in 64-bit builds)
+ * ctypedef size_t HANDLE
+ * ctypedef size_t SOCKET             # <<<<<<<<<<<<<<
+ * ctypedef unsigned long DWORD
+ * # it's really a pointer, but we use it as an integer
+ */
+typedef size_t __pyx_t_11iocpsupport_SOCKET;
+
+/* "iocpsupport.pyx":8
+ * ctypedef size_t HANDLE
+ * ctypedef size_t SOCKET
+ * ctypedef unsigned long DWORD             # <<<<<<<<<<<<<<
+ * # it's really a pointer, but we use it as an integer
+ * ctypedef size_t ULONG_PTR
+ */
+typedef unsigned long __pyx_t_11iocpsupport_DWORD;
+
+/* "iocpsupport.pyx":10
+ * ctypedef unsigned long DWORD
+ * # it's really a pointer, but we use it as an integer
+ * ctypedef size_t ULONG_PTR             # <<<<<<<<<<<<<<
+ * ctypedef int BOOL
+ * 
+ */
+typedef size_t __pyx_t_11iocpsupport_ULONG_PTR;
+
+/* "iocpsupport.pyx":11
+ * # it's really a pointer, but we use it as an integer
+ * ctypedef size_t ULONG_PTR
+ * ctypedef int BOOL             # <<<<<<<<<<<<<<
+ * 
+ * cdef extern from 'io.h':
+ */
+typedef int __pyx_t_11iocpsupport_BOOL;
+
+/*--- Type declarations ---*/
+struct __pyx_obj_11iocpsupport_CompletionPort;
+struct __pyx_t_11iocpsupport_myOVERLAPPED;
+
+/* "iocpsupport.pyx":124
+ * #    BOOL (*lpTransmitFile)(SOCKET s, HANDLE hFile, DWORD size, DWORD buffer_size, OVERLAPPED *ov, TRANSMIT_FILE_BUFFERS *buff, DWORD flags)
+ * 
+ * cdef struct myOVERLAPPED:             # <<<<<<<<<<<<<<
+ *     OVERLAPPED ov
+ *     PyObject *obj
+ */
+struct __pyx_t_11iocpsupport_myOVERLAPPED {
+  OVERLAPPED ov;
+  struct PyObject *obj;
+};
+
+/* "iocpsupport.pyx":148
+ *             setattr(self, k, v)
+ * 
+ * cdef class CompletionPort:             # <<<<<<<<<<<<<<
+ *     cdef HANDLE port
+ *     def __init__(self):
+ */
+struct __pyx_obj_11iocpsupport_CompletionPort {
+  PyObject_HEAD
+  __pyx_t_11iocpsupport_HANDLE port;
+};
+
+
+#ifndef CYTHON_REFNANNY
+  #define CYTHON_REFNANNY 0
+#endif
+
+#if CYTHON_REFNANNY
+  typedef struct {
+    void (*INCREF)(void*, PyObject*, int);
+    void (*DECREF)(void*, PyObject*, int);
+    void (*GOTREF)(void*, PyObject*, int);
+    void (*GIVEREF)(void*, PyObject*, int);
+    void* (*SetupContext)(const char*, int, const char*);
+    void (*FinishContext)(void**);
+  } __Pyx_RefNannyAPIStruct;
+  static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL;
+  static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); /*proto*/
+  #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL;
+  #define __Pyx_RefNannySetupContext(name)           __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__)
+  #define __Pyx_RefNannyFinishContext()           __Pyx_RefNanny->FinishContext(&__pyx_refnanny)
+  #define __Pyx_INCREF(r)  __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+  #define __Pyx_DECREF(r)  __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+  #define __Pyx_GOTREF(r)  __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+  #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+  #define __Pyx_XINCREF(r)  do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0)
+  #define __Pyx_XDECREF(r)  do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0)
+  #define __Pyx_XGOTREF(r)  do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0)
+  #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0)
+#else
+  #define __Pyx_RefNannyDeclarations
+  #define __Pyx_RefNannySetupContext(name)
+  #define __Pyx_RefNannyFinishContext()
+  #define __Pyx_INCREF(r) Py_INCREF(r)
+  #define __Pyx_DECREF(r) Py_DECREF(r)
+  #define __Pyx_GOTREF(r)
+  #define __Pyx_GIVEREF(r)
+  #define __Pyx_XINCREF(r) Py_XINCREF(r)
+  #define __Pyx_XDECREF(r) Py_XDECREF(r)
+  #define __Pyx_XGOTREF(r)
+  #define __Pyx_XGIVEREF(r)
+#endif /* CYTHON_REFNANNY */
+
+static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name); /*proto*/
+
+static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb); /*proto*/
+static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb); /*proto*/
+
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); /*proto*/
+
+static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact,
+    Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); /*proto*/
+
+static void __Pyx_RaiseDoubleKeywordsError(
+    const char* func_name, PyObject* kw_name); /*proto*/
+
+static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],     PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,     const char* function_name); /*proto*/
+
+static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index);
+
+static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected);
+
+static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected); /*proto*/
+
+static CYTHON_INLINE int __Pyx_CheckKeywordStrings(PyObject *kwdict,
+    const char* function_name, int kw_allowed); /*proto*/
+
+
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Generic(PyObject *o, PyObject* j) {
+    PyObject *r;
+    if (!j) return NULL;
+    r = PyObject_GetItem(o, j);
+    Py_DECREF(j);
+    return r;
+}
+
+
+#define __Pyx_GetItemInt_List(o, i, size, to_py_func) (((size) <= sizeof(Py_ssize_t)) ? \
+                                                    __Pyx_GetItemInt_List_Fast(o, i) : \
+                                                    __Pyx_GetItemInt_Generic(o, to_py_func(i)))
+
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_List_Fast(PyObject *o, Py_ssize_t i) {
+    if (likely(o != Py_None)) {
+        if (likely((0 <= i) & (i < PyList_GET_SIZE(o)))) {
+            PyObject *r = PyList_GET_ITEM(o, i);
+            Py_INCREF(r);
+            return r;
+        }
+        else if ((-PyList_GET_SIZE(o) <= i) & (i < 0)) {
+            PyObject *r = PyList_GET_ITEM(o, PyList_GET_SIZE(o) + i);
+            Py_INCREF(r);
+            return r;
+        }
+    }
+    return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i));
+}
+
+#define __Pyx_GetItemInt_Tuple(o, i, size, to_py_func) (((size) <= sizeof(Py_ssize_t)) ? \
+                                                    __Pyx_GetItemInt_Tuple_Fast(o, i) : \
+                                                    __Pyx_GetItemInt_Generic(o, to_py_func(i)))
+
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Tuple_Fast(PyObject *o, Py_ssize_t i) {
+    if (likely(o != Py_None)) {
+        if (likely((0 <= i) & (i < PyTuple_GET_SIZE(o)))) {
+            PyObject *r = PyTuple_GET_ITEM(o, i);
+            Py_INCREF(r);
+            return r;
+        }
+        else if ((-PyTuple_GET_SIZE(o) <= i) & (i < 0)) {
+            PyObject *r = PyTuple_GET_ITEM(o, PyTuple_GET_SIZE(o) + i);
+            Py_INCREF(r);
+            return r;
+        }
+    }
+    return __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i));
+}
+
+
+#define __Pyx_GetItemInt(o, i, size, to_py_func) (((size) <= sizeof(Py_ssize_t)) ? \
+                                                    __Pyx_GetItemInt_Fast(o, i) : \
+                                                    __Pyx_GetItemInt_Generic(o, to_py_func(i)))
+
+static CYTHON_INLINE PyObject *__Pyx_GetItemInt_Fast(PyObject *o, Py_ssize_t i) {
+    PyObject *r;
+    if (PyList_CheckExact(o) && ((0 <= i) & (i < PyList_GET_SIZE(o)))) {
+        r = PyList_GET_ITEM(o, i);
+        Py_INCREF(r);
+    }
+    else if (PyTuple_CheckExact(o) && ((0 <= i) & (i < PyTuple_GET_SIZE(o)))) {
+        r = PyTuple_GET_ITEM(o, i);
+        Py_INCREF(r);
+    }
+    else if (Py_TYPE(o)->tp_as_sequence && Py_TYPE(o)->tp_as_sequence->sq_item && (likely(i >= 0))) {
+        r = PySequence_GetItem(o, i);
+    }
+    else {
+        r = __Pyx_GetItemInt_Generic(o, PyInt_FromSsize_t(i));
+    }
+    return r;
+}
+
+static PyObject *__Pyx_FindPy2Metaclass(PyObject *bases); /*proto*/
+
+static PyObject *__Pyx_CreateClass(PyObject *bases, PyObject *dict, PyObject *name,
+                                   PyObject *modname); /*proto*/
+
+#define __pyx_binding_PyCFunctionType_USED 1
+
+typedef struct {
+    PyCFunctionObject func;
+} __pyx_binding_PyCFunctionType_object;
+
+static PyTypeObject __pyx_binding_PyCFunctionType_type;
+static PyTypeObject *__pyx_binding_PyCFunctionType = NULL;
+
+static PyObject *__pyx_binding_PyCFunctionType_NewEx(PyMethodDef *ml, PyObject *self, PyObject *module); /* proto */
+#define __pyx_binding_PyCFunctionType_New(ml, self) __pyx_binding_PyCFunctionType_NewEx(ml, self, NULL)
+
+static int __pyx_binding_PyCFunctionType_init(void); /* proto */
+
+static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, long level); /*proto*/
+
+#include <string.h>
+
+static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals); /*proto*/
+
+static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals); /*proto*/
+
+#if PY_MAJOR_VERSION >= 3
+#define __Pyx_PyString_Equals __Pyx_PyUnicode_Equals
+#else
+#define __Pyx_PyString_Equals __Pyx_PyBytes_Equals
+#endif
+
+static CYTHON_INLINE unsigned char __Pyx_PyInt_AsUnsignedChar(PyObject *);
+
+static CYTHON_INLINE unsigned short __Pyx_PyInt_AsUnsignedShort(PyObject *);
+
+static CYTHON_INLINE unsigned int __Pyx_PyInt_AsUnsignedInt(PyObject *);
+
+static CYTHON_INLINE char __Pyx_PyInt_AsChar(PyObject *);
+
+static CYTHON_INLINE short __Pyx_PyInt_AsShort(PyObject *);
+
+static CYTHON_INLINE int __Pyx_PyInt_AsInt(PyObject *);
+
+static CYTHON_INLINE signed char __Pyx_PyInt_AsSignedChar(PyObject *);
+
+static CYTHON_INLINE signed short __Pyx_PyInt_AsSignedShort(PyObject *);
+
+static CYTHON_INLINE signed int __Pyx_PyInt_AsSignedInt(PyObject *);
+
+static CYTHON_INLINE int __Pyx_PyInt_AsLongDouble(PyObject *);
+
+static CYTHON_INLINE unsigned long __Pyx_PyInt_AsUnsignedLong(PyObject *);
+
+static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_AsUnsignedLongLong(PyObject *);
+
+static CYTHON_INLINE long __Pyx_PyInt_AsLong(PyObject *);
+
+static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_AsLongLong(PyObject *);
+
+static CYTHON_INLINE signed long __Pyx_PyInt_AsSignedLong(PyObject *);
+
+static CYTHON_INLINE signed PY_LONG_LONG __Pyx_PyInt_AsSignedLongLong(PyObject *);
+
+static int __Pyx_check_binary_version(void);
+
+static void __Pyx_AddTraceback(const char *funcname, int __pyx_clineno,
+                               int __pyx_lineno, const char *__pyx_filename); /*proto*/
+
+static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/
+
+/* Module declarations from 'iocpsupport' */
+static PyTypeObject *__pyx_ptype_11iocpsupport_CompletionPort = 0;
+static struct __pyx_t_11iocpsupport_myOVERLAPPED *__pyx_f_11iocpsupport_makeOV(void); /*proto*/
+static void __pyx_f_11iocpsupport_raise_error(int, PyObject *); /*proto*/
+static PyObject *__pyx_f_11iocpsupport__makesockaddr(struct sockaddr *, Py_ssize_t); /*proto*/
+static PyObject *__pyx_f_11iocpsupport_fillinetaddr(struct sockaddr_in *, PyObject *); /*proto*/
+static PyObject *__pyx_f_11iocpsupport_fillinet6addr(struct sockaddr_in6 *, PyObject *); /*proto*/
+static int __pyx_f_11iocpsupport_getAddrFamily(__pyx_t_11iocpsupport_SOCKET); /*proto*/
+#define __Pyx_MODULE_NAME "iocpsupport"
+int __pyx_module_is_main_iocpsupport = 0;
+
+/* Implementation of 'iocpsupport' */
+static PyObject *__pyx_builtin_ValueError;
+static PyObject *__pyx_builtin_MemoryError;
+static PyObject *__pyx_builtin_RuntimeError;
+static char __pyx_k_1[] = "CreateIoCompletionPort";
+static char __pyx_k_2[] = "PostQueuedCompletionStatus";
+static char __pyx_k_3[] = ":";
+static char __pyx_k_5[] = "[";
+static char __pyx_k_6[] = "]";
+static char __pyx_k_7[] = "invalid IP address";
+static char __pyx_k_8[] = "%";
+static char __pyx_k_10[] = "invalid IPv6 address %r";
+static char __pyx_k_11[] = "undefined error occurred during address parsing";
+static char __pyx_k_12[] = "ConnectEx is not available on this system";
+static char __pyx_k_13[] = "unsupported address family";
+static char __pyx_k_14[] = "second argument needs to be a list";
+static char __pyx_k_15[] = "length of address length buffer needs to be sizeof(int)";
+static char __pyx_k_16[] = "Failed to initialize Winsock function vectors";
+static char __pyx_k__s[] = "s";
+static char __pyx_k__key[] = "key";
+static char __pyx_k__obj[] = "obj";
+static char __pyx_k__addr[] = "addr";
+static char __pyx_k__buff[] = "buff";
+static char __pyx_k__recv[] = "recv";
+static char __pyx_k__self[] = "self";
+static char __pyx_k__send[] = "send";
+static char __pyx_k__Event[] = "Event";
+static char __pyx_k__bytes[] = "bytes";
+static char __pyx_k__flags[] = "flags";
+static char __pyx_k__owner[] = "owner";
+static char __pyx_k__split[] = "split";
+static char __pyx_k__accept[] = "accept";
+static char __pyx_k__handle[] = "handle";
+static char __pyx_k__rsplit[] = "rsplit";
+static char __pyx_k__socket[] = "socket";
+static char __pyx_k__connect[] = "connect";
+static char __pyx_k____init__[] = "__init__";
+static char __pyx_k____main__[] = "__main__";
+static char __pyx_k____test__[] = "__test__";
+static char __pyx_k__bufflist[] = "bufflist";
+static char __pyx_k__callback[] = "callback";
+static char __pyx_k__recvfrom[] = "recvfrom";
+static char __pyx_k__accepting[] = "accepting";
+static char __pyx_k__addr_buff[] = "addr_buff";
+static char __pyx_k__listening[] = "listening";
+static char __pyx_k__ValueError[] = "ValueError";
+static char __pyx_k__getsockopt[] = "getsockopt";
+static char __pyx_k__maxAddrLen[] = "maxAddrLen";
+static char __pyx_k__MemoryError[] = "MemoryError";
+static char __pyx_k__iocpsupport[] = "iocpsupport";
+static char __pyx_k__RuntimeError[] = "RuntimeError";
+static char __pyx_k__WindowsError[] = "WindowsError";
+static char __pyx_k__makesockaddr[] = "makesockaddr";
+static char __pyx_k__addr_len_buff[] = "addr_len_buff";
+static char __pyx_k__have_connectex[] = "have_connectex";
+static char __pyx_k__get_accept_addrs[] = "get_accept_addrs";
+static char __pyx_k__AllocateReadBuffer[] = "AllocateReadBuffer";
+static char __pyx_k__WSAAddressToString[] = "WSAAddressToString";
+static PyObject *__pyx_n_s_1;
+static PyObject *__pyx_kp_s_10;
+static PyObject *__pyx_kp_s_11;
+static PyObject *__pyx_kp_s_12;
+static PyObject *__pyx_kp_s_13;
+static PyObject *__pyx_kp_s_15;
+static PyObject *__pyx_kp_s_16;
+static PyObject *__pyx_n_s_2;
+static PyObject *__pyx_kp_s_3;
+static PyObject *__pyx_kp_s_5;
+static PyObject *__pyx_kp_s_6;
+static PyObject *__pyx_kp_s_7;
+static PyObject *__pyx_kp_s_8;
+static PyObject *__pyx_n_s__AllocateReadBuffer;
+static PyObject *__pyx_n_s__Event;
+static PyObject *__pyx_n_s__MemoryError;
+static PyObject *__pyx_n_s__RuntimeError;
+static PyObject *__pyx_n_s__ValueError;
+static PyObject *__pyx_n_s__WSAAddressToString;
+static PyObject *__pyx_n_s__WindowsError;
+static PyObject *__pyx_n_s____init__;
+static PyObject *__pyx_n_s____main__;
+static PyObject *__pyx_n_s____test__;
+static PyObject *__pyx_n_s__accept;
+static PyObject *__pyx_n_s__accepting;
+static PyObject *__pyx_n_s__addr;
+static PyObject *__pyx_n_s__addr_buff;
+static PyObject *__pyx_n_s__addr_len_buff;
+static PyObject *__pyx_n_s__buff;
+static PyObject *__pyx_n_s__bufflist;
+static PyObject *__pyx_n_s__bytes;
+static PyObject *__pyx_n_s__callback;
+static PyObject *__pyx_n_s__connect;
+static PyObject *__pyx_n_s__flags;
+static PyObject *__pyx_n_s__get_accept_addrs;
+static PyObject *__pyx_n_s__getsockopt;
+static PyObject *__pyx_n_s__handle;
+static PyObject *__pyx_n_s__have_connectex;
+static PyObject *__pyx_n_s__iocpsupport;
+static PyObject *__pyx_n_s__key;
+static PyObject *__pyx_n_s__listening;
+static PyObject *__pyx_n_s__makesockaddr;
+static PyObject *__pyx_n_s__maxAddrLen;
+static PyObject *__pyx_n_s__obj;
+static PyObject *__pyx_n_s__owner;
+static PyObject *__pyx_n_s__recv;
+static PyObject *__pyx_n_s__recvfrom;
+static PyObject *__pyx_n_s__rsplit;
+static PyObject *__pyx_n_s__s;
+static PyObject *__pyx_n_s__self;
+static PyObject *__pyx_n_s__send;
+static PyObject *__pyx_n_s__socket;
+static PyObject *__pyx_n_s__split;
+static PyObject *__pyx_int_0;
+static PyObject *__pyx_int_1;
+static PyObject *__pyx_k_tuple_4;
+static PyObject *__pyx_k_tuple_9;
+
+/* "iocpsupport.pyx":128
+ *     PyObject *obj
+ * 
+ * cdef myOVERLAPPED *makeOV() except NULL:             # <<<<<<<<<<<<<<
+ *     cdef myOVERLAPPED *res
+ *     res = <myOVERLAPPED *>PyMem_Malloc(sizeof(myOVERLAPPED))
+ */
+
+static struct __pyx_t_11iocpsupport_myOVERLAPPED *__pyx_f_11iocpsupport_makeOV(void) {
+  struct __pyx_t_11iocpsupport_myOVERLAPPED *__pyx_v_res;
+  struct __pyx_t_11iocpsupport_myOVERLAPPED *__pyx_r;
+  __Pyx_RefNannyDeclarations
+  void *__pyx_t_1;
+  int __pyx_t_2;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  __Pyx_RefNannySetupContext("makeOV");
+
+  /* "iocpsupport.pyx":130
+ * cdef myOVERLAPPED *makeOV() except NULL:
+ *     cdef myOVERLAPPED *res
+ *     res = <myOVERLAPPED *>PyMem_Malloc(sizeof(myOVERLAPPED))             # <<<<<<<<<<<<<<
+ *     if not res:
+ *         raise MemoryError
+ */
+  __pyx_t_1 = PyMem_Malloc((sizeof(struct __pyx_t_11iocpsupport_myOVERLAPPED))); if (unlikely(__pyx_t_1 == NULL)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 130; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_v_res = ((struct __pyx_t_11iocpsupport_myOVERLAPPED *)__pyx_t_1);
+
+  /* "iocpsupport.pyx":131
+ *     cdef myOVERLAPPED *res
+ *     res = <myOVERLAPPED *>PyMem_Malloc(sizeof(myOVERLAPPED))
+ *     if not res:             # <<<<<<<<<<<<<<
+ *         raise MemoryError
+ *     memset(res, 0, sizeof(myOVERLAPPED))
+ */
+  __pyx_t_2 = (!(__pyx_v_res != 0));
+  if (__pyx_t_2) {
+
+    /* "iocpsupport.pyx":132
+ *     res = <myOVERLAPPED *>PyMem_Malloc(sizeof(myOVERLAPPED))
+ *     if not res:
+ *         raise MemoryError             # <<<<<<<<<<<<<<
+ *     memset(res, 0, sizeof(myOVERLAPPED))
+ *     return res
+ */
+    PyErr_NoMemory(); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 132; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    goto __pyx_L3;
+  }
+  __pyx_L3:;
+
+  /* "iocpsupport.pyx":133
+ *     if not res:
+ *         raise MemoryError
+ *     memset(res, 0, sizeof(myOVERLAPPED))             # <<<<<<<<<<<<<<
+ *     return res
+ * 
+ */
+  memset(__pyx_v_res, 0, (sizeof(struct __pyx_t_11iocpsupport_myOVERLAPPED)));
+
+  /* "iocpsupport.pyx":134
+ *         raise MemoryError
+ *     memset(res, 0, sizeof(myOVERLAPPED))
+ *     return res             # <<<<<<<<<<<<<<
+ * 
+ * cdef void raise_error(int err, object message) except *:
+ */
+  __pyx_r = __pyx_v_res;
+  goto __pyx_L0;
+
+  __pyx_r = 0;
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_AddTraceback("iocpsupport.makeOV", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "iocpsupport.pyx":136
+ *     return res
+ * 
+ * cdef void raise_error(int err, object message) except *:             # <<<<<<<<<<<<<<
+ *     if not err:
+ *         err = GetLastError()
+ */
+
+static void __pyx_f_11iocpsupport_raise_error(int __pyx_v_err, PyObject *__pyx_v_message) {
+  __Pyx_RefNannyDeclarations
+  int __pyx_t_1;
+  PyObject *__pyx_t_2 = NULL;
+  PyObject *__pyx_t_3 = NULL;
+  PyObject *__pyx_t_4 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  __Pyx_RefNannySetupContext("raise_error");
+
+  /* "iocpsupport.pyx":137
+ * 
+ * cdef void raise_error(int err, object message) except *:
+ *     if not err:             # <<<<<<<<<<<<<<
+ *         err = GetLastError()
+ *     raise WindowsError(message, err)
+ */
+  __pyx_t_1 = (!__pyx_v_err);
+  if (__pyx_t_1) {
+
+    /* "iocpsupport.pyx":138
+ * cdef void raise_error(int err, object message) except *:
+ *     if not err:
+ *         err = GetLastError()             # <<<<<<<<<<<<<<
+ *     raise WindowsError(message, err)
+ * 
+ */
+    __pyx_v_err = GetLastError();
+    goto __pyx_L3;
+  }
+  __pyx_L3:;
+
+  /* "iocpsupport.pyx":139
+ *     if not err:
+ *         err = GetLastError()
+ *     raise WindowsError(message, err)             # <<<<<<<<<<<<<<
+ * 
+ * class Event:
+ */
+  __pyx_t_2 = __Pyx_GetName(__pyx_b, __pyx_n_s__WindowsError); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 139; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_2);
+  __pyx_t_3 = PyInt_FromLong(__pyx_v_err); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 139; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_3);
+  __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 139; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(((PyObject *)__pyx_t_4));
+  __Pyx_INCREF(__pyx_v_message);
+  PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_message);
+  __Pyx_GIVEREF(__pyx_v_message);
+  PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_3);
+  __Pyx_GIVEREF(__pyx_t_3);
+  __pyx_t_3 = 0;
+  __pyx_t_3 = PyObject_Call(__pyx_t_2, ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 139; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_3);
+  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+  __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0;
+  __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+  __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+  {__pyx_filename = __pyx_f[0]; __pyx_lineno = 139; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_2);
+  __Pyx_XDECREF(__pyx_t_3);
+  __Pyx_XDECREF(__pyx_t_4);
+  __Pyx_AddTraceback("iocpsupport.raise_error", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_L0:;
+  __Pyx_RefNannyFinishContext();
+}
+
+/* "iocpsupport.pyx":142
+ * 
+ * class Event:
+ *     def __init__(self, callback, owner, **kw):             # <<<<<<<<<<<<<<
+ *         self.callback = callback
+ *         self.owner = owner
+ */
+
+static PyObject *__pyx_pf_11iocpsupport_5Event___init__(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static PyMethodDef __pyx_mdef_11iocpsupport_5Event___init__ = {__Pyx_NAMESTR("__init__"), (PyCFunction)__pyx_pf_11iocpsupport_5Event___init__, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)};
+static PyObject *__pyx_pf_11iocpsupport_5Event___init__(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+  PyObject *__pyx_v_self = 0;
+  PyObject *__pyx_v_callback = 0;
+  PyObject *__pyx_v_owner = 0;
+  PyObject *__pyx_v_kw = 0;
+  PyObject *__pyx_v_k = NULL;
+  PyObject *__pyx_v_v = NULL;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  PyObject *__pyx_t_1 = NULL;
+  PyObject *__pyx_t_2 = NULL;
+  Py_ssize_t __pyx_t_3;
+  PyObject *(*__pyx_t_4)(PyObject *);
+  PyObject *__pyx_t_5 = NULL;
+  PyObject *__pyx_t_6 = NULL;
+  PyObject *__pyx_t_7 = NULL;
+  PyObject *(*__pyx_t_8)(PyObject *);
+  int __pyx_t_9;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__self,&__pyx_n_s__callback,&__pyx_n_s__owner,0};
+  __Pyx_RefNannySetupContext("__init__");
+  __pyx_self = __pyx_self;
+  __pyx_v_kw = PyDict_New(); if (unlikely(!__pyx_v_kw)) return NULL;
+  __Pyx_GOTREF(__pyx_v_kw);
+  {
+    PyObject* values[3] = {0,0,0};
+    if (unlikely(__pyx_kwds)) {
+      Py_ssize_t kw_args;
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        case  0: break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+      kw_args = PyDict_Size(__pyx_kwds);
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  0:
+        values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__self);
+        if (likely(values[0])) kw_args--;
+        else goto __pyx_L5_argtuple_error;
+        case  1:
+        values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__callback);
+        if (likely(values[1])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("__init__", 1, 3, 3, 1); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 142; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+        case  2:
+        values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__owner);
+        if (likely(values[2])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("__init__", 1, 3, 3, 2); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 142; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+      }
+      if (unlikely(kw_args > 0)) {
+        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, __pyx_v_kw, values, PyTuple_GET_SIZE(__pyx_args), "__init__") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 142; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+      }
+    } else if (PyTuple_GET_SIZE(__pyx_args) != 3) {
+      goto __pyx_L5_argtuple_error;
+    } else {
+      values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+      values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+      values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+    }
+    __pyx_v_self = values[0];
+    __pyx_v_callback = values[1];
+    __pyx_v_owner = values[2];
+  }
+  goto __pyx_L4_argument_unpacking_done;
+  __pyx_L5_argtuple_error:;
+  __Pyx_RaiseArgtupleInvalid("__init__", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 142; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  __pyx_L3_error:;
+  __Pyx_DECREF(__pyx_v_kw); __pyx_v_kw = 0;
+  __Pyx_AddTraceback("iocpsupport.Event.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __Pyx_RefNannyFinishContext();
+  return NULL;
+  __pyx_L4_argument_unpacking_done:;
+
+  /* "iocpsupport.pyx":143
+ * class Event:
+ *     def __init__(self, callback, owner, **kw):
+ *         self.callback = callback             # <<<<<<<<<<<<<<
+ *         self.owner = owner
+ *         for k, v in kw.items():
+ */
+  if (PyObject_SetAttr(__pyx_v_self, __pyx_n_s__callback, __pyx_v_callback) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 143; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+  /* "iocpsupport.pyx":144
+ *     def __init__(self, callback, owner, **kw):
+ *         self.callback = callback
+ *         self.owner = owner             # <<<<<<<<<<<<<<
+ *         for k, v in kw.items():
+ *             setattr(self, k, v)
+ */
+  if (PyObject_SetAttr(__pyx_v_self, __pyx_n_s__owner, __pyx_v_owner) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 144; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+  /* "iocpsupport.pyx":145
+ *         self.callback = callback
+ *         self.owner = owner
+ *         for k, v in kw.items():             # <<<<<<<<<<<<<<
+ *             setattr(self, k, v)
+ * 
+ */
+  if (unlikely(((PyObject *)__pyx_v_kw) == Py_None)) {
+    PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%s'", "items"); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 145; __pyx_clineno = __LINE__; goto __pyx_L1_error;} 
+  }
+  __pyx_t_1 = PyDict_Items(__pyx_v_kw); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 145; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyList_CheckExact(__pyx_t_1) || PyTuple_CheckExact(__pyx_t_1)) {
+    __pyx_t_2 = __pyx_t_1; __Pyx_INCREF(__pyx_t_2); __pyx_t_3 = 0;
+    __pyx_t_4 = NULL;
+  } else {
+    __pyx_t_3 = -1; __pyx_t_2 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 145; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_2);
+    __pyx_t_4 = Py_TYPE(__pyx_t_2)->tp_iternext;
+  }
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+  for (;;) {
+    if (PyList_CheckExact(__pyx_t_2)) {
+      if (__pyx_t_3 >= PyList_GET_SIZE(__pyx_t_2)) break;
+      __pyx_t_1 = PyList_GET_ITEM(__pyx_t_2, __pyx_t_3); __Pyx_INCREF(__pyx_t_1); __pyx_t_3++;
+    } else if (PyTuple_CheckExact(__pyx_t_2)) {
+      if (__pyx_t_3 >= PyTuple_GET_SIZE(__pyx_t_2)) break;
+      __pyx_t_1 = PyTuple_GET_ITEM(__pyx_t_2, __pyx_t_3); __Pyx_INCREF(__pyx_t_1); __pyx_t_3++;
+    } else {
+      __pyx_t_1 = __pyx_t_4(__pyx_t_2);
+      if (unlikely(!__pyx_t_1)) {
+        if (PyErr_Occurred()) {
+          if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) PyErr_Clear();
+          else {__pyx_filename = __pyx_f[0]; __pyx_lineno = 145; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+        }
+        break;
+      }
+      __Pyx_GOTREF(__pyx_t_1);
+    }
+    if ((likely(PyTuple_CheckExact(__pyx_t_1))) || (PyList_CheckExact(__pyx_t_1))) {
+      PyObject* sequence = __pyx_t_1;
+      if (likely(PyTuple_CheckExact(sequence))) {
+        if (unlikely(PyTuple_GET_SIZE(sequence) != 2)) {
+          if (PyTuple_GET_SIZE(sequence) > 2) __Pyx_RaiseTooManyValuesError(2);
+          else __Pyx_RaiseNeedMoreValuesError(PyTuple_GET_SIZE(sequence));
+          {__pyx_filename = __pyx_f[0]; __pyx_lineno = 145; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+        }
+        __pyx_t_5 = PyTuple_GET_ITEM(sequence, 0); 
+        __pyx_t_6 = PyTuple_GET_ITEM(sequence, 1); 
+      } else {
+        if (unlikely(PyList_GET_SIZE(sequence) != 2)) {
+          if (PyList_GET_SIZE(sequence) > 2) __Pyx_RaiseTooManyValuesError(2);
+          else __Pyx_RaiseNeedMoreValuesError(PyList_GET_SIZE(sequence));
+          {__pyx_filename = __pyx_f[0]; __pyx_lineno = 145; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+        }
+        __pyx_t_5 = PyList_GET_ITEM(sequence, 0); 
+        __pyx_t_6 = PyList_GET_ITEM(sequence, 1); 
+      }
+      __Pyx_INCREF(__pyx_t_5);
+      __Pyx_INCREF(__pyx_t_6);
+      __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+    } else {
+      Py_ssize_t index = -1;
+      __pyx_t_7 = PyObject_GetIter(__pyx_t_1); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 145; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      __Pyx_GOTREF(__pyx_t_7);
+      __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+      __pyx_t_8 = Py_TYPE(__pyx_t_7)->tp_iternext;
+      index = 0; __pyx_t_5 = __pyx_t_8(__pyx_t_7); if (unlikely(!__pyx_t_5)) goto __pyx_L8_unpacking_failed;
+      __Pyx_GOTREF(__pyx_t_5);
+      index = 1; __pyx_t_6 = __pyx_t_8(__pyx_t_7); if (unlikely(!__pyx_t_6)) goto __pyx_L8_unpacking_failed;
+      __Pyx_GOTREF(__pyx_t_6);
+      if (__Pyx_IternextUnpackEndCheck(__pyx_t_8(__pyx_t_7), 2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 145; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+      goto __pyx_L9_unpacking_done;
+      __pyx_L8_unpacking_failed:;
+      __Pyx_DECREF(__pyx_t_7); __pyx_t_7 = 0;
+      if (PyErr_Occurred() && PyErr_ExceptionMatches(PyExc_StopIteration)) PyErr_Clear();
+      if (!PyErr_Occurred()) __Pyx_RaiseNeedMoreValuesError(index);
+      {__pyx_filename = __pyx_f[0]; __pyx_lineno = 145; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      __pyx_L9_unpacking_done:;
+    }
+    __Pyx_XDECREF(__pyx_v_k);
+    __pyx_v_k = __pyx_t_5;
+    __pyx_t_5 = 0;
+    __Pyx_XDECREF(__pyx_v_v);
+    __pyx_v_v = __pyx_t_6;
+    __pyx_t_6 = 0;
+
+    /* "iocpsupport.pyx":146
+ *         self.owner = owner
+ *         for k, v in kw.items():
+ *             setattr(self, k, v)             # <<<<<<<<<<<<<<
+ * 
+ * cdef class CompletionPort:
+ */
+    __pyx_t_9 = PyObject_SetAttr(__pyx_v_self, __pyx_v_k, __pyx_v_v); if (unlikely(__pyx_t_9 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 146; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  }
+  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_1);
+  __Pyx_XDECREF(__pyx_t_2);
+  __Pyx_XDECREF(__pyx_t_5);
+  __Pyx_XDECREF(__pyx_t_6);
+  __Pyx_XDECREF(__pyx_t_7);
+  __Pyx_AddTraceback("iocpsupport.Event.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XDECREF(__pyx_v_kw);
+  __Pyx_XDECREF(__pyx_v_k);
+  __Pyx_XDECREF(__pyx_v_v);
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "iocpsupport.pyx":150
+ * cdef class CompletionPort:
+ *     cdef HANDLE port
+ *     def __init__(self):             # <<<<<<<<<<<<<<
+ *         cdef HANDLE res
+ *         res = CreateIoCompletionPort(INVALID_HANDLE_VALUE, 0, 0, 0)
+ */
+
+static int __pyx_pf_11iocpsupport_14CompletionPort___init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static int __pyx_pf_11iocpsupport_14CompletionPort___init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+  __pyx_t_11iocpsupport_HANDLE __pyx_v_res;
+  int __pyx_r;
+  __Pyx_RefNannyDeclarations
+  int __pyx_t_1;
+  PyObject *__pyx_t_2 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  __Pyx_RefNannySetupContext("__init__");
+  if (unlikely(PyTuple_GET_SIZE(__pyx_args) > 0)) {
+    __Pyx_RaiseArgtupleInvalid("__init__", 1, 0, 0, PyTuple_GET_SIZE(__pyx_args)); return -1;}
+  if (unlikely(__pyx_kwds) && unlikely(PyDict_Size(__pyx_kwds) > 0) && unlikely(!__Pyx_CheckKeywordStrings(__pyx_kwds, "__init__", 0))) return -1;
+
+  /* "iocpsupport.pyx":152
+ *     def __init__(self):
+ *         cdef HANDLE res
+ *         res = CreateIoCompletionPort(INVALID_HANDLE_VALUE, 0, 0, 0)             # <<<<<<<<<<<<<<
+ *         if not res:
+ *             raise_error(0, 'CreateIoCompletionPort')
+ */
+  __pyx_v_res = CreateIoCompletionPort(INVALID_HANDLE_VALUE, 0, 0, 0);
+
+  /* "iocpsupport.pyx":153
+ *         cdef HANDLE res
+ *         res = CreateIoCompletionPort(INVALID_HANDLE_VALUE, 0, 0, 0)
+ *         if not res:             # <<<<<<<<<<<<<<
+ *             raise_error(0, 'CreateIoCompletionPort')
+ *         self.port = res
+ */
+  __pyx_t_1 = (!__pyx_v_res);
+  if (__pyx_t_1) {
+
+    /* "iocpsupport.pyx":154
+ *         res = CreateIoCompletionPort(INVALID_HANDLE_VALUE, 0, 0, 0)
+ *         if not res:
+ *             raise_error(0, 'CreateIoCompletionPort')             # <<<<<<<<<<<<<<
+ *         self.port = res
+ * 
+ */
+    __pyx_t_2 = ((PyObject *)__pyx_n_s_1);
+    __Pyx_INCREF(__pyx_t_2);
+    __pyx_f_11iocpsupport_raise_error(0, __pyx_t_2); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 154; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+    goto __pyx_L5;
+  }
+  __pyx_L5:;
+
+  /* "iocpsupport.pyx":155
+ *         if not res:
+ *             raise_error(0, 'CreateIoCompletionPort')
+ *         self.port = res             # <<<<<<<<<<<<<<
+ * 
+ *     def addHandle(self, HANDLE handle, size_t key=0):
+ */
+  ((struct __pyx_obj_11iocpsupport_CompletionPort *)__pyx_v_self)->port = __pyx_v_res;
+
+  __pyx_r = 0;
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_2);
+  __Pyx_AddTraceback("iocpsupport.CompletionPort.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = -1;
+  __pyx_L0:;
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "iocpsupport.pyx":157
+ *         self.port = res
+ * 
+ *     def addHandle(self, HANDLE handle, size_t key=0):             # <<<<<<<<<<<<<<
+ *         cdef HANDLE res
+ *         res = CreateIoCompletionPort(handle, self.port, key, 0)
+ */
+
+static PyObject *__pyx_pf_11iocpsupport_14CompletionPort_1addHandle(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static PyObject *__pyx_pf_11iocpsupport_14CompletionPort_1addHandle(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+  __pyx_t_11iocpsupport_HANDLE __pyx_v_handle;
+  size_t __pyx_v_key;
+  __pyx_t_11iocpsupport_HANDLE __pyx_v_res;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  int __pyx_t_1;
+  PyObject *__pyx_t_2 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__handle,&__pyx_n_s__key,0};
+  __Pyx_RefNannySetupContext("addHandle");
+  {
+    PyObject* values[2] = {0,0};
+    if (unlikely(__pyx_kwds)) {
+      Py_ssize_t kw_args;
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        case  0: break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+      kw_args = PyDict_Size(__pyx_kwds);
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  0:
+        values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__handle);
+        if (likely(values[0])) kw_args--;
+        else goto __pyx_L5_argtuple_error;
+        case  1:
+        if (kw_args > 0) {
+          PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s__key);
+          if (value) { values[1] = value; kw_args--; }
+        }
+      }
+      if (unlikely(kw_args > 0)) {
+        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, PyTuple_GET_SIZE(__pyx_args), "addHandle") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 157; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+      }
+    } else {
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+    }
+    __pyx_v_handle = __Pyx_PyInt_AsSize_t(values[0]); if (unlikely((__pyx_v_handle == (size_t)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 157; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    if (values[1]) {
+      __pyx_v_key = __Pyx_PyInt_AsSize_t(values[1]); if (unlikely((__pyx_v_key == (size_t)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 157; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    } else {
+      __pyx_v_key = ((size_t)0);
+    }
+  }
+  goto __pyx_L4_argument_unpacking_done;
+  __pyx_L5_argtuple_error:;
+  __Pyx_RaiseArgtupleInvalid("addHandle", 0, 1, 2, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 157; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  __pyx_L3_error:;
+  __Pyx_AddTraceback("iocpsupport.CompletionPort.addHandle", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __Pyx_RefNannyFinishContext();
+  return NULL;
+  __pyx_L4_argument_unpacking_done:;
+
+  /* "iocpsupport.pyx":159
+ *     def addHandle(self, HANDLE handle, size_t key=0):
+ *         cdef HANDLE res
+ *         res = CreateIoCompletionPort(handle, self.port, key, 0)             # <<<<<<<<<<<<<<
+ *         if not res:
+ *             raise_error(0, 'CreateIoCompletionPort')
+ */
+  __pyx_v_res = CreateIoCompletionPort(__pyx_v_handle, ((struct __pyx_obj_11iocpsupport_CompletionPort *)__pyx_v_self)->port, __pyx_v_key, 0);
+
+  /* "iocpsupport.pyx":160
+ *         cdef HANDLE res
+ *         res = CreateIoCompletionPort(handle, self.port, key, 0)
+ *         if not res:             # <<<<<<<<<<<<<<
+ *             raise_error(0, 'CreateIoCompletionPort')
+ * 
+ */
+  __pyx_t_1 = (!__pyx_v_res);
+  if (__pyx_t_1) {
+
+    /* "iocpsupport.pyx":161
+ *         res = CreateIoCompletionPort(handle, self.port, key, 0)
+ *         if not res:
+ *             raise_error(0, 'CreateIoCompletionPort')             # <<<<<<<<<<<<<<
+ * 
+ *     def getEvent(self, long timeout):
+ */
+    __pyx_t_2 = ((PyObject *)__pyx_n_s_1);
+    __Pyx_INCREF(__pyx_t_2);
+    __pyx_f_11iocpsupport_raise_error(0, __pyx_t_2); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 161; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+    goto __pyx_L6;
+  }
+  __pyx_L6:;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_2);
+  __Pyx_AddTraceback("iocpsupport.CompletionPort.addHandle", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "iocpsupport.pyx":163
+ *             raise_error(0, 'CreateIoCompletionPort')
+ * 
+ *     def getEvent(self, long timeout):             # <<<<<<<<<<<<<<
+ *         cdef PyThreadState *_save
+ *         cdef unsigned long bytes, rc
+ */
+
+static PyObject *__pyx_pf_11iocpsupport_14CompletionPort_2getEvent(PyObject *__pyx_v_self, PyObject *__pyx_arg_timeout); /*proto*/
+static PyObject *__pyx_pf_11iocpsupport_14CompletionPort_2getEvent(PyObject *__pyx_v_self, PyObject *__pyx_arg_timeout) {
+  long __pyx_v_timeout;
+  struct PyThreadState *__pyx_v__save;
+  unsigned long __pyx_v_bytes;
+  unsigned long __pyx_v_rc;
+  size_t __pyx_v_key;
+  struct __pyx_t_11iocpsupport_myOVERLAPPED *__pyx_v_ov;
+  PyObject *__pyx_v_obj = NULL;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  int __pyx_t_1;
+  PyObject *__pyx_t_2 = NULL;
+  PyObject *__pyx_t_3 = NULL;
+  PyObject *__pyx_t_4 = NULL;
+  PyObject *__pyx_t_5 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  __Pyx_RefNannySetupContext("getEvent");
+  assert(__pyx_arg_timeout); {
+    __pyx_v_timeout = __Pyx_PyInt_AsLong(__pyx_arg_timeout); if (unlikely((__pyx_v_timeout == (long)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 163; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  }
+  goto __pyx_L4_argument_unpacking_done;
+  __pyx_L3_error:;
+  __Pyx_AddTraceback("iocpsupport.CompletionPort.getEvent", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __Pyx_RefNannyFinishContext();
+  return NULL;
+  __pyx_L4_argument_unpacking_done:;
+
+  /* "iocpsupport.pyx":169
+ *         cdef myOVERLAPPED *ov
+ * 
+ *         _save = PyEval_SaveThread()             # <<<<<<<<<<<<<<
+ *         rc = GetQueuedCompletionStatus(self.port, &bytes, &key, <OVERLAPPED **>&ov, timeout)
+ *         PyEval_RestoreThread(_save)
+ */
+  __pyx_v__save = PyEval_SaveThread();
+
+  /* "iocpsupport.pyx":170
+ * 
+ *         _save = PyEval_SaveThread()
+ *         rc = GetQueuedCompletionStatus(self.port, &bytes, &key, <OVERLAPPED **>&ov, timeout)             # <<<<<<<<<<<<<<
+ *         PyEval_RestoreThread(_save)
+ * 
+ */
+  __pyx_v_rc = GetQueuedCompletionStatus(((struct __pyx_obj_11iocpsupport_CompletionPort *)__pyx_v_self)->port, (&__pyx_v_bytes), (&__pyx_v_key), ((OVERLAPPED **)(&__pyx_v_ov)), __pyx_v_timeout);
+
+  /* "iocpsupport.pyx":171
+ *         _save = PyEval_SaveThread()
+ *         rc = GetQueuedCompletionStatus(self.port, &bytes, &key, <OVERLAPPED **>&ov, timeout)
+ *         PyEval_RestoreThread(_save)             # <<<<<<<<<<<<<<
+ * 
+ *         if not rc:
+ */
+  PyEval_RestoreThread(__pyx_v__save);
+
+  /* "iocpsupport.pyx":173
+ *         PyEval_RestoreThread(_save)
+ * 
+ *         if not rc:             # <<<<<<<<<<<<<<
+ *             rc = GetLastError()
+ *         else:
+ */
+  __pyx_t_1 = (!__pyx_v_rc);
+  if (__pyx_t_1) {
+
+    /* "iocpsupport.pyx":174
+ * 
+ *         if not rc:
+ *             rc = GetLastError()             # <<<<<<<<<<<<<<
+ *         else:
+ *             rc = 0
+ */
+    __pyx_v_rc = GetLastError();
+    goto __pyx_L5;
+  }
+  /*else*/ {
+
+    /* "iocpsupport.pyx":176
+ *             rc = GetLastError()
+ *         else:
+ *             rc = 0             # <<<<<<<<<<<<<<
+ * 
+ *         obj = None
+ */
+    __pyx_v_rc = 0;
+  }
+  __pyx_L5:;
+
+  /* "iocpsupport.pyx":178
+ *             rc = 0
+ * 
+ *         obj = None             # <<<<<<<<<<<<<<
+ *         if ov:
+ *             if ov.obj:
+ */
+  __Pyx_INCREF(Py_None);
+  __pyx_v_obj = Py_None;
+
+  /* "iocpsupport.pyx":179
+ * 
+ *         obj = None
+ *         if ov:             # <<<<<<<<<<<<<<
+ *             if ov.obj:
+ *                 obj = <object>ov.obj
+ */
+  __pyx_t_1 = (__pyx_v_ov != 0);
+  if (__pyx_t_1) {
+
+    /* "iocpsupport.pyx":180
+ *         obj = None
+ *         if ov:
+ *             if ov.obj:             # <<<<<<<<<<<<<<
+ *                 obj = <object>ov.obj
+ *                 Py_DECREF(obj) # we are stealing a reference here
+ */
+    __pyx_t_1 = (__pyx_v_ov->obj != 0);
+    if (__pyx_t_1) {
+
+      /* "iocpsupport.pyx":181
+ *         if ov:
+ *             if ov.obj:
+ *                 obj = <object>ov.obj             # <<<<<<<<<<<<<<
+ *                 Py_DECREF(obj) # we are stealing a reference here
+ *             PyMem_Free(ov)
+ */
+      __Pyx_INCREF(((PyObject *)__pyx_v_ov->obj));
+      __Pyx_DECREF(__pyx_v_obj);
+      __pyx_v_obj = ((PyObject *)__pyx_v_ov->obj);
+
+      /* "iocpsupport.pyx":182
+ *             if ov.obj:
+ *                 obj = <object>ov.obj
+ *                 Py_DECREF(obj) # we are stealing a reference here             # <<<<<<<<<<<<<<
+ *             PyMem_Free(ov)
+ * 
+ */
+      Py_DECREF(__pyx_v_obj);
+      goto __pyx_L7;
+    }
+    __pyx_L7:;
+
+    /* "iocpsupport.pyx":183
+ *                 obj = <object>ov.obj
+ *                 Py_DECREF(obj) # we are stealing a reference here
+ *             PyMem_Free(ov)             # <<<<<<<<<<<<<<
+ * 
+ *         return (rc, bytes, key, obj)
+ */
+    PyMem_Free(__pyx_v_ov);
+    goto __pyx_L6;
+  }
+  __pyx_L6:;
+
+  /* "iocpsupport.pyx":185
+ *             PyMem_Free(ov)
+ * 
+ *         return (rc, bytes, key, obj)             # <<<<<<<<<<<<<<
+ * 
+ *     def postEvent(self, unsigned long bytes, size_t key, obj):
+ */
+  __Pyx_XDECREF(__pyx_r);
+  __pyx_t_2 = PyLong_FromUnsignedLong(__pyx_v_rc); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 185; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_2);
+  __pyx_t_3 = PyLong_FromUnsignedLong(__pyx_v_bytes); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 185; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_3);
+  __pyx_t_4 = __Pyx_PyInt_FromSize_t(__pyx_v_key); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 185; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_4);
+  __pyx_t_5 = PyTuple_New(4); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 185; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(((PyObject *)__pyx_t_5));
+  PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_2);
+  __Pyx_GIVEREF(__pyx_t_2);
+  PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_3);
+  __Pyx_GIVEREF(__pyx_t_3);
+  PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_t_4);
+  __Pyx_GIVEREF(__pyx_t_4);
+  __Pyx_INCREF(__pyx_v_obj);
+  PyTuple_SET_ITEM(__pyx_t_5, 3, __pyx_v_obj);
+  __Pyx_GIVEREF(__pyx_v_obj);
+  __pyx_t_2 = 0;
+  __pyx_t_3 = 0;
+  __pyx_t_4 = 0;
+  __pyx_r = ((PyObject *)__pyx_t_5);
+  __pyx_t_5 = 0;
+  goto __pyx_L0;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_2);
+  __Pyx_XDECREF(__pyx_t_3);
+  __Pyx_XDECREF(__pyx_t_4);
+  __Pyx_XDECREF(__pyx_t_5);
+  __Pyx_AddTraceback("iocpsupport.CompletionPort.getEvent", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XDECREF(__pyx_v_obj);
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "iocpsupport.pyx":187
+ *         return (rc, bytes, key, obj)
+ * 
+ *     def postEvent(self, unsigned long bytes, size_t key, obj):             # <<<<<<<<<<<<<<
+ *         cdef myOVERLAPPED *ov
+ *         cdef unsigned long rc
+ */
+
+static PyObject *__pyx_pf_11iocpsupport_14CompletionPort_3postEvent(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static PyObject *__pyx_pf_11iocpsupport_14CompletionPort_3postEvent(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+  unsigned long __pyx_v_bytes;
+  size_t __pyx_v_key;
+  PyObject *__pyx_v_obj = 0;
+  struct __pyx_t_11iocpsupport_myOVERLAPPED *__pyx_v_ov;
+  unsigned long __pyx_v_rc;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  int __pyx_t_1;
+  struct __pyx_t_11iocpsupport_myOVERLAPPED *__pyx_t_2;
+  PyObject *__pyx_t_3 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__bytes,&__pyx_n_s__key,&__pyx_n_s__obj,0};
+  __Pyx_RefNannySetupContext("postEvent");
+  {
+    PyObject* values[3] = {0,0,0};
+    if (unlikely(__pyx_kwds)) {
+      Py_ssize_t kw_args;
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        case  0: break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+      kw_args = PyDict_Size(__pyx_kwds);
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  0:
+        values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__bytes);
+        if (likely(values[0])) kw_args--;
+        else goto __pyx_L5_argtuple_error;
+        case  1:
+        values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__key);
+        if (likely(values[1])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("postEvent", 1, 3, 3, 1); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 187; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+        case  2:
+        values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__obj);
+        if (likely(values[2])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("postEvent", 1, 3, 3, 2); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 187; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+      }
+      if (unlikely(kw_args > 0)) {
+        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, PyTuple_GET_SIZE(__pyx_args), "postEvent") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 187; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+      }
+    } else if (PyTuple_GET_SIZE(__pyx_args) != 3) {
+      goto __pyx_L5_argtuple_error;
+    } else {
+      values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+      values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+      values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+    }
+    __pyx_v_bytes = __Pyx_PyInt_AsUnsignedLong(values[0]); if (unlikely((__pyx_v_bytes == (unsigned long)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 187; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    __pyx_v_key = __Pyx_PyInt_AsSize_t(values[1]); if (unlikely((__pyx_v_key == (size_t)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 187; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    __pyx_v_obj = values[2];
+  }
+  goto __pyx_L4_argument_unpacking_done;
+  __pyx_L5_argtuple_error:;
+  __Pyx_RaiseArgtupleInvalid("postEvent", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 187; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  __pyx_L3_error:;
+  __Pyx_AddTraceback("iocpsupport.CompletionPort.postEvent", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __Pyx_RefNannyFinishContext();
+  return NULL;
+  __pyx_L4_argument_unpacking_done:;
+
+  /* "iocpsupport.pyx":191
+ *         cdef unsigned long rc
+ * 
+ *         if obj is not None:             # <<<<<<<<<<<<<<
+ *             ov = makeOV()
+ *             Py_INCREF(obj) # give ov its own reference to obj
+ */
+  __pyx_t_1 = (__pyx_v_obj != Py_None);
+  if (__pyx_t_1) {
+
+    /* "iocpsupport.pyx":192
+ * 
+ *         if obj is not None:
+ *             ov = makeOV()             # <<<<<<<<<<<<<<
+ *             Py_INCREF(obj) # give ov its own reference to obj
+ *             ov.obj = <PyObject *>obj
+ */
+    __pyx_t_2 = __pyx_f_11iocpsupport_makeOV(); if (unlikely(__pyx_t_2 == NULL)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 192; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __pyx_v_ov = __pyx_t_2;
+
+    /* "iocpsupport.pyx":193
+ *         if obj is not None:
+ *             ov = makeOV()
+ *             Py_INCREF(obj) # give ov its own reference to obj             # <<<<<<<<<<<<<<
+ *             ov.obj = <PyObject *>obj
+ *         else:
+ */
+    Py_INCREF(__pyx_v_obj);
+
+    /* "iocpsupport.pyx":194
+ *             ov = makeOV()
+ *             Py_INCREF(obj) # give ov its own reference to obj
+ *             ov.obj = <PyObject *>obj             # <<<<<<<<<<<<<<
+ *         else:
+ *             ov = NULL
+ */
+    __pyx_v_ov->obj = ((struct PyObject *)__pyx_v_obj);
+    goto __pyx_L6;
+  }
+  /*else*/ {
+
+    /* "iocpsupport.pyx":196
+ *             ov.obj = <PyObject *>obj
+ *         else:
+ *             ov = NULL             # <<<<<<<<<<<<<<
+ * 
+ *         rc = PostQueuedCompletionStatus(self.port, bytes, key, <OVERLAPPED *>ov)
+ */
+    __pyx_v_ov = NULL;
+  }
+  __pyx_L6:;
+
+  /* "iocpsupport.pyx":198
+ *             ov = NULL
+ * 
+ *         rc = PostQueuedCompletionStatus(self.port, bytes, key, <OVERLAPPED *>ov)             # <<<<<<<<<<<<<<
+ *         if not rc:
+ *             if ov:
+ */
+  __pyx_v_rc = PostQueuedCompletionStatus(((struct __pyx_obj_11iocpsupport_CompletionPort *)__pyx_v_self)->port, __pyx_v_bytes, __pyx_v_key, ((OVERLAPPED *)__pyx_v_ov));
+
+  /* "iocpsupport.pyx":199
+ * 
+ *         rc = PostQueuedCompletionStatus(self.port, bytes, key, <OVERLAPPED *>ov)
+ *         if not rc:             # <<<<<<<<<<<<<<
+ *             if ov:
+ *                 Py_DECREF(obj)
+ */
+  __pyx_t_1 = (!__pyx_v_rc);
+  if (__pyx_t_1) {
+
+    /* "iocpsupport.pyx":200
+ *         rc = PostQueuedCompletionStatus(self.port, bytes, key, <OVERLAPPED *>ov)
+ *         if not rc:
+ *             if ov:             # <<<<<<<<<<<<<<
+ *                 Py_DECREF(obj)
+ *                 PyMem_Free(ov)
+ */
+    __pyx_t_1 = (__pyx_v_ov != 0);
+    if (__pyx_t_1) {
+
+      /* "iocpsupport.pyx":201
+ *         if not rc:
+ *             if ov:
+ *                 Py_DECREF(obj)             # <<<<<<<<<<<<<<
+ *                 PyMem_Free(ov)
+ *             raise_error(0, 'PostQueuedCompletionStatus')
+ */
+      Py_DECREF(__pyx_v_obj);
+
+      /* "iocpsupport.pyx":202
+ *             if ov:
+ *                 Py_DECREF(obj)
+ *                 PyMem_Free(ov)             # <<<<<<<<<<<<<<
+ *             raise_error(0, 'PostQueuedCompletionStatus')
+ * 
+ */
+      PyMem_Free(__pyx_v_ov);
+      goto __pyx_L8;
+    }
+    __pyx_L8:;
+
+    /* "iocpsupport.pyx":203
+ *                 Py_DECREF(obj)
+ *                 PyMem_Free(ov)
+ *             raise_error(0, 'PostQueuedCompletionStatus')             # <<<<<<<<<<<<<<
+ * 
+ *     def __del__(self):
+ */
+    __pyx_t_3 = ((PyObject *)__pyx_n_s_2);
+    __Pyx_INCREF(__pyx_t_3);
+    __pyx_f_11iocpsupport_raise_error(0, __pyx_t_3); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 203; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+    goto __pyx_L7;
+  }
+  __pyx_L7:;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_3);
+  __Pyx_AddTraceback("iocpsupport.CompletionPort.postEvent", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "iocpsupport.pyx":205
+ *             raise_error(0, 'PostQueuedCompletionStatus')
+ * 
+ *     def __del__(self):             # <<<<<<<<<<<<<<
+ *         CloseHandle(self.port)
+ * 
+ */
+
+static PyObject *__pyx_pf_11iocpsupport_14CompletionPort_4__del__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static PyObject *__pyx_pf_11iocpsupport_14CompletionPort_4__del__(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  __Pyx_RefNannySetupContext("__del__");
+
+  /* "iocpsupport.pyx":206
+ * 
+ *     def __del__(self):
+ *         CloseHandle(self.port)             # <<<<<<<<<<<<<<
+ * 
+ * def makesockaddr(object buff):
+ */
+  CloseHandle(((struct __pyx_obj_11iocpsupport_CompletionPort *)__pyx_v_self)->port);
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "iocpsupport.pyx":208
+ *         CloseHandle(self.port)
+ * 
+ * def makesockaddr(object buff):             # <<<<<<<<<<<<<<
+ *     cdef void *mem_buffer
+ *     cdef Py_ssize_t size
+ */
+
+static PyObject *__pyx_pf_11iocpsupport_makesockaddr(PyObject *__pyx_self, PyObject *__pyx_v_buff); /*proto*/
+static PyMethodDef __pyx_mdef_11iocpsupport_makesockaddr = {__Pyx_NAMESTR("makesockaddr"), (PyCFunction)__pyx_pf_11iocpsupport_makesockaddr, METH_O, __Pyx_DOCSTR(0)};
+static PyObject *__pyx_pf_11iocpsupport_makesockaddr(PyObject *__pyx_self, PyObject *__pyx_v_buff) {
+  void *__pyx_v_mem_buffer;
+  Py_ssize_t __pyx_v_size;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  int __pyx_t_1;
+  PyObject *__pyx_t_2 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  __Pyx_RefNannySetupContext("makesockaddr");
+  __pyx_self = __pyx_self;
+
+  /* "iocpsupport.pyx":212
+ *     cdef Py_ssize_t size
+ * 
+ *     PyObject_AsReadBuffer(buff, &mem_buffer, &size)             # <<<<<<<<<<<<<<
+ *     # XXX: this should really return the address family as well
+ *     return _makesockaddr(<sockaddr *>mem_buffer, size)
+ */
+  __pyx_t_1 = PyObject_AsReadBuffer(__pyx_v_buff, (&__pyx_v_mem_buffer), (&__pyx_v_size)); if (unlikely(__pyx_t_1 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 212; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+  /* "iocpsupport.pyx":214
+ *     PyObject_AsReadBuffer(buff, &mem_buffer, &size)
+ *     # XXX: this should really return the address family as well
+ *     return _makesockaddr(<sockaddr *>mem_buffer, size)             # <<<<<<<<<<<<<<
+ * 
+ * cdef object _makesockaddr(sockaddr *addr, Py_ssize_t len):
+ */
+  __Pyx_XDECREF(__pyx_r);
+  __pyx_t_2 = __pyx_f_11iocpsupport__makesockaddr(((struct sockaddr *)__pyx_v_mem_buffer), __pyx_v_size); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 214; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_2);
+  __pyx_r = __pyx_t_2;
+  __pyx_t_2 = 0;
+  goto __pyx_L0;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_2);
+  __Pyx_AddTraceback("iocpsupport.makesockaddr", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "iocpsupport.pyx":216
+ *     return _makesockaddr(<sockaddr *>mem_buffer, size)
+ * 
+ * cdef object _makesockaddr(sockaddr *addr, Py_ssize_t len):             # <<<<<<<<<<<<<<
+ *     cdef sockaddr_in *sin
+ *     cdef sockaddr_in6 *sin6
+ */
+
+static PyObject *__pyx_f_11iocpsupport__makesockaddr(struct sockaddr *__pyx_v_addr, Py_ssize_t __pyx_v_len) {
+  struct sockaddr_in *__pyx_v_sin;
+  struct sockaddr_in6 *__pyx_v_sin6;
+  char __pyx_v_buff[256];
+  int __pyx_v_rc;
+  __pyx_t_11iocpsupport_DWORD __pyx_v_buff_size;
+  PyObject *__pyx_v_host = NULL;
+  unsigned short __pyx_v_sa_port;
+  PyObject *__pyx_v_port = NULL;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  int __pyx_t_1;
+  PyObject *__pyx_t_2 = NULL;
+  PyObject *__pyx_t_3 = NULL;
+  PyObject *__pyx_t_4 = NULL;
+  unsigned short __pyx_t_5;
+  PyObject *__pyx_t_6 = NULL;
+  PyObject *(*__pyx_t_7)(PyObject *);
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  __Pyx_RefNannySetupContext("_makesockaddr");
+
+  /* "iocpsupport.pyx":221
+ *     cdef char buff[256]
+ *     cdef int rc
+ *     cdef DWORD buff_size = sizeof(buff)             # <<<<<<<<<<<<<<
+ *     if not len:
+ *         return None
+ */
+  __pyx_v_buff_size = (sizeof(__pyx_v_buff));
+
+  /* "iocpsupport.pyx":222
+ *     cdef int rc
+ *     cdef DWORD buff_size = sizeof(buff)
+ *     if not len:             # <<<<<<<<<<<<<<
+ *         return None
+ *     if addr.sa_family == AF_INET:
+ */
+  __pyx_t_1 = (!__pyx_v_len);
+  if (__pyx_t_1) {
+
+    /* "iocpsupport.pyx":223
+ *     cdef DWORD buff_size = sizeof(buff)
+ *     if not len:
+ *         return None             # <<<<<<<<<<<<<<
+ *     if addr.sa_family == AF_INET:
+ *         sin = <sockaddr_in *>addr
+ */
+    __Pyx_XDECREF(__pyx_r);
+    __Pyx_INCREF(Py_None);
+    __pyx_r = Py_None;
+    goto __pyx_L0;
+    goto __pyx_L3;
+  }
+  __pyx_L3:;
+
+  /* "iocpsupport.pyx":227
+ *         sin = <sockaddr_in *>addr
+ *         return PyString_FromString(inet_ntoa(sin.sin_addr)), ntohs(sin.sin_port)
+ *     elif addr.sa_family == AF_INET6:             # <<<<<<<<<<<<<<
+ *         sin6 = <sockaddr_in6 *>addr
+ *         rc = WSAAddressToStringA(addr, sizeof(sockaddr_in6), NULL, buff, &buff_size)
+ */
+  switch (__pyx_v_addr->sa_family) {
+
+    /* "iocpsupport.pyx":224
+ *     if not len:
+ *         return None
+ *     if addr.sa_family == AF_INET:             # <<<<<<<<<<<<<<
+ *         sin = <sockaddr_in *>addr
+ *         return PyString_FromString(inet_ntoa(sin.sin_addr)), ntohs(sin.sin_port)
+ */
+    case AF_INET:
+
+    /* "iocpsupport.pyx":225
+ *         return None
+ *     if addr.sa_family == AF_INET:
+ *         sin = <sockaddr_in *>addr             # <<<<<<<<<<<<<<
+ *         return PyString_FromString(inet_ntoa(sin.sin_addr)), ntohs(sin.sin_port)
+ *     elif addr.sa_family == AF_INET6:
+ */
+    __pyx_v_sin = ((struct sockaddr_in *)__pyx_v_addr);
+
+    /* "iocpsupport.pyx":226
+ *     if addr.sa_family == AF_INET:
+ *         sin = <sockaddr_in *>addr
+ *         return PyString_FromString(inet_ntoa(sin.sin_addr)), ntohs(sin.sin_port)             # <<<<<<<<<<<<<<
+ *     elif addr.sa_family == AF_INET6:
+ *         sin6 = <sockaddr_in6 *>addr
+ */
+    __Pyx_XDECREF(__pyx_r);
+    __pyx_t_2 = PyString_FromString(inet_ntoa(__pyx_v_sin->sin_addr)); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 226; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_2);
+    __pyx_t_3 = PyInt_FromLong(ntohs(__pyx_v_sin->sin_port)); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 226; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_3);
+    __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 226; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(((PyObject *)__pyx_t_4));
+    PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_2);
+    __Pyx_GIVEREF(__pyx_t_2);
+    PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_3);
+    __Pyx_GIVEREF(__pyx_t_3);
+    __pyx_t_2 = 0;
+    __pyx_t_3 = 0;
+    __pyx_r = ((PyObject *)__pyx_t_4);
+    __pyx_t_4 = 0;
+    goto __pyx_L0;
+    break;
+
+    /* "iocpsupport.pyx":227
+ *         sin = <sockaddr_in *>addr
+ *         return PyString_FromString(inet_ntoa(sin.sin_addr)), ntohs(sin.sin_port)
+ *     elif addr.sa_family == AF_INET6:             # <<<<<<<<<<<<<<
+ *         sin6 = <sockaddr_in6 *>addr
+ *         rc = WSAAddressToStringA(addr, sizeof(sockaddr_in6), NULL, buff, &buff_size)
+ */
+    case AF_INET6:
+
+    /* "iocpsupport.pyx":228
+ *         return PyString_FromString(inet_ntoa(sin.sin_addr)), ntohs(sin.sin_port)
+ *     elif addr.sa_family == AF_INET6:
+ *         sin6 = <sockaddr_in6 *>addr             # <<<<<<<<<<<<<<
+ *         rc = WSAAddressToStringA(addr, sizeof(sockaddr_in6), NULL, buff, &buff_size)
+ *         if rc == SOCKET_ERROR:
+ */
+    __pyx_v_sin6 = ((struct sockaddr_in6 *)__pyx_v_addr);
+
+    /* "iocpsupport.pyx":229
+ *     elif addr.sa_family == AF_INET6:
+ *         sin6 = <sockaddr_in6 *>addr
+ *         rc = WSAAddressToStringA(addr, sizeof(sockaddr_in6), NULL, buff, &buff_size)             # <<<<<<<<<<<<<<
+ *         if rc == SOCKET_ERROR:
+ *             raise_error(0, 'WSAAddressToString')
+ */
+    __pyx_v_rc = WSAAddressToStringA(__pyx_v_addr, (sizeof(struct sockaddr_in6)), NULL, __pyx_v_buff, (&__pyx_v_buff_size));
+
+    /* "iocpsupport.pyx":230
+ *         sin6 = <sockaddr_in6 *>addr
+ *         rc = WSAAddressToStringA(addr, sizeof(sockaddr_in6), NULL, buff, &buff_size)
+ *         if rc == SOCKET_ERROR:             # <<<<<<<<<<<<<<
+ *             raise_error(0, 'WSAAddressToString')
+ *         host, sa_port = PyString_FromString(buff), ntohs(sin6.sin6_port)
+ */
+    __pyx_t_1 = (__pyx_v_rc == SOCKET_ERROR);
+    if (__pyx_t_1) {
+
+      /* "iocpsupport.pyx":231
+ *         rc = WSAAddressToStringA(addr, sizeof(sockaddr_in6), NULL, buff, &buff_size)
+ *         if rc == SOCKET_ERROR:
+ *             raise_error(0, 'WSAAddressToString')             # <<<<<<<<<<<<<<
+ *         host, sa_port = PyString_FromString(buff), ntohs(sin6.sin6_port)
+ *         host, port = host.rsplit(':', 1)
+ */
+      __pyx_t_4 = ((PyObject *)__pyx_n_s__WSAAddressToString);
+      __Pyx_INCREF(__pyx_t_4);
+      __pyx_f_11iocpsupport_raise_error(0, __pyx_t_4); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 231; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+      goto __pyx_L4;
+    }
+    __pyx_L4:;
+
+    /* "iocpsupport.pyx":232
+ *         if rc == SOCKET_ERROR:
+ *             raise_error(0, 'WSAAddressToString')
+ *         host, sa_port = PyString_FromString(buff), ntohs(sin6.sin6_port)             # <<<<<<<<<<<<<<
+ *         host, port = host.rsplit(':', 1)
+ *         port = int(port)
+ */
+    __pyx_t_4 = PyString_FromString(__pyx_v_buff); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 232; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_4);
+    __pyx_t_5 = ntohs(__pyx_v_sin6->sin6_port);
+    __pyx_v_host = __pyx_t_4;
+    __pyx_t_4 = 0;
+    __pyx_v_sa_port = __pyx_t_5;
+
+    /* "iocpsupport.pyx":233
+ *             raise_error(0, 'WSAAddressToString')
+ *         host, sa_port = PyString_FromString(buff), ntohs(sin6.sin6_port)
+ *         host, port = host.rsplit(':', 1)             # <<<<<<<<<<<<<<
+ *         port = int(port)
+ *         assert host[0] == '['
+ */
+    __pyx_t_4 = PyObject_GetAttr(__pyx_v_host, __pyx_n_s__rsplit); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 233; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_4);
+    __pyx_t_3 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_k_tuple_4), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 233; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_3);
+    __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+    if ((likely(PyTuple_CheckExact(__pyx_t_3))) || (PyList_CheckExact(__pyx_t_3))) {
+      PyObject* sequence = __pyx_t_3;
+      if (likely(PyTuple_CheckExact(sequence))) {
+        if (unlikely(PyTuple_GET_SIZE(sequence) != 2)) {
+          if (PyTuple_GET_SIZE(sequence) > 2) __Pyx_RaiseTooManyValuesError(2);
+          else __Pyx_RaiseNeedMoreValuesError(PyTuple_GET_SIZE(sequence));
+          {__pyx_filename = __pyx_f[0]; __pyx_lineno = 233; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+        }
+        __pyx_t_4 = PyTuple_GET_ITEM(sequence, 0); 
+        __pyx_t_2 = PyTuple_GET_ITEM(sequence, 1); 
+      } else {
+        if (unlikely(PyList_GET_SIZE(sequence) != 2)) {
+          if (PyList_GET_SIZE(sequence) > 2) __Pyx_RaiseTooManyValuesError(2);
+          else __Pyx_RaiseNeedMoreValuesError(PyList_GET_SIZE(sequence));
+          {__pyx_filename = __pyx_f[0]; __pyx_lineno = 233; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+        }
+        __pyx_t_4 = PyList_GET_ITEM(sequence, 0); 
+        __pyx_t_2 = PyList_GET_ITEM(sequence, 1); 
+      }
+      __Pyx_INCREF(__pyx_t_4);
+      __Pyx_INCREF(__pyx_t_2);
+      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+    } else {
+      Py_ssize_t index = -1;
+      __pyx_t_6 = PyObject_GetIter(__pyx_t_3); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 233; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      __Pyx_GOTREF(__pyx_t_6);
+      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+      __pyx_t_7 = Py_TYPE(__pyx_t_6)->tp_iternext;
+      index = 0; __pyx_t_4 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_4)) goto __pyx_L5_unpacking_failed;
+      __Pyx_GOTREF(__pyx_t_4);
+      index = 1; __pyx_t_2 = __pyx_t_7(__pyx_t_6); if (unlikely(!__pyx_t_2)) goto __pyx_L5_unpacking_failed;
+      __Pyx_GOTREF(__pyx_t_2);
+      if (__Pyx_IternextUnpackEndCheck(__pyx_t_7(__pyx_t_6), 2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 233; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+      goto __pyx_L6_unpacking_done;
+      __pyx_L5_unpacking_failed:;
+      __Pyx_DECREF(__pyx_t_6); __pyx_t_6 = 0;
+      if (PyErr_Occurred() && PyErr_ExceptionMatches(PyExc_StopIteration)) PyErr_Clear();
+      if (!PyErr_Occurred()) __Pyx_RaiseNeedMoreValuesError(index);
+      {__pyx_filename = __pyx_f[0]; __pyx_lineno = 233; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      __pyx_L6_unpacking_done:;
+    }
+    __Pyx_DECREF(__pyx_v_host);
+    __pyx_v_host = __pyx_t_4;
+    __pyx_t_4 = 0;
+    __pyx_v_port = __pyx_t_2;
+    __pyx_t_2 = 0;
+
+    /* "iocpsupport.pyx":234
+ *         host, sa_port = PyString_FromString(buff), ntohs(sin6.sin6_port)
+ *         host, port = host.rsplit(':', 1)
+ *         port = int(port)             # <<<<<<<<<<<<<<
+ *         assert host[0] == '['
+ *         assert host[-1] == ']'
+ */
+    __pyx_t_3 = PyTuple_New(1); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 234; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(((PyObject *)__pyx_t_3));
+    __Pyx_INCREF(__pyx_v_port);
+    PyTuple_SET_ITEM(__pyx_t_3, 0, __pyx_v_port);
+    __Pyx_GIVEREF(__pyx_v_port);
+    __pyx_t_2 = PyObject_Call(((PyObject *)((PyObject*)(&PyInt_Type))), ((PyObject *)__pyx_t_3), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 234; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_2);
+    __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0;
+    __Pyx_DECREF(__pyx_v_port);
+    __pyx_v_port = __pyx_t_2;
+    __pyx_t_2 = 0;
+
+    /* "iocpsupport.pyx":235
+ *         host, port = host.rsplit(':', 1)
+ *         port = int(port)
+ *         assert host[0] == '['             # <<<<<<<<<<<<<<
+ *         assert host[-1] == ']'
+ *         assert port == sa_port
+ */
+    #ifndef CYTHON_WITHOUT_ASSERTIONS
+    __pyx_t_2 = __Pyx_GetItemInt(__pyx_v_host, 0, sizeof(long), PyInt_FromLong); if (!__pyx_t_2) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 235; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_2);
+    __pyx_t_1 = __Pyx_PyString_Equals(__pyx_t_2, ((PyObject *)__pyx_kp_s_5), Py_EQ); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 235; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+    if (unlikely(!__pyx_t_1)) {
+      PyErr_SetNone(PyExc_AssertionError);
+      {__pyx_filename = __pyx_f[0]; __pyx_lineno = 235; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    }
+    #endif
+
+    /* "iocpsupport.pyx":236
+ *         port = int(port)
+ *         assert host[0] == '['
+ *         assert host[-1] == ']'             # <<<<<<<<<<<<<<
+ *         assert port == sa_port
+ *         return host[1:-1], port
+ */
+    #ifndef CYTHON_WITHOUT_ASSERTIONS
+    __pyx_t_2 = __Pyx_GetItemInt(__pyx_v_host, -1, sizeof(long), PyInt_FromLong); if (!__pyx_t_2) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 236; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_2);
+    __pyx_t_1 = __Pyx_PyString_Equals(__pyx_t_2, ((PyObject *)__pyx_kp_s_6), Py_EQ); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 236; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+    if (unlikely(!__pyx_t_1)) {
+      PyErr_SetNone(PyExc_AssertionError);
+      {__pyx_filename = __pyx_f[0]; __pyx_lineno = 236; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    }
+    #endif
+
+    /* "iocpsupport.pyx":237
+ *         assert host[0] == '['
+ *         assert host[-1] == ']'
+ *         assert port == sa_port             # <<<<<<<<<<<<<<
+ *         return host[1:-1], port
+ *     else:
+ */
+    #ifndef CYTHON_WITHOUT_ASSERTIONS
+    __pyx_t_2 = PyInt_FromLong(__pyx_v_sa_port); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 237; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_2);
+    __pyx_t_3 = PyObject_RichCompare(__pyx_v_port, __pyx_t_2, Py_EQ); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 237; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_3);
+    __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+    __pyx_t_1 = __Pyx_PyObject_IsTrue(__pyx_t_3); if (unlikely(__pyx_t_1 < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 237; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+    if (unlikely(!__pyx_t_1)) {
+      PyErr_SetNone(PyExc_AssertionError);
+      {__pyx_filename = __pyx_f[0]; __pyx_lineno = 237; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    }
+    #endif
+
+    /* "iocpsupport.pyx":238
+ *         assert host[-1] == ']'
+ *         assert port == sa_port
+ *         return host[1:-1], port             # <<<<<<<<<<<<<<
+ *     else:
+ *         return PyString_FromStringAndSize(addr.sa_data, sizeof(addr.sa_data))
+ */
+    __Pyx_XDECREF(__pyx_r);
+    __pyx_t_3 = __Pyx_PySequence_GetSlice(__pyx_v_host, 1, -1); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 238; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_3);
+    __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 238; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(((PyObject *)__pyx_t_2));
+    PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_3);
+    __Pyx_GIVEREF(__pyx_t_3);
+    __Pyx_INCREF(__pyx_v_port);
+    PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_v_port);
+    __Pyx_GIVEREF(__pyx_v_port);
+    __pyx_t_3 = 0;
+    __pyx_r = ((PyObject *)__pyx_t_2);
+    __pyx_t_2 = 0;
+    goto __pyx_L0;
+    break;
+    default:
+
+    /* "iocpsupport.pyx":240
+ *         return host[1:-1], port
+ *     else:
+ *         return PyString_FromStringAndSize(addr.sa_data, sizeof(addr.sa_data))             # <<<<<<<<<<<<<<
+ * 
+ * 
+ */
+    __Pyx_XDECREF(__pyx_r);
+    __pyx_t_2 = PyString_FromStringAndSize(__pyx_v_addr->sa_data, (sizeof(__pyx_v_addr->sa_data))); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 240; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_2);
+    __pyx_r = __pyx_t_2;
+    __pyx_t_2 = 0;
+    goto __pyx_L0;
+    break;
+  }
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_2);
+  __Pyx_XDECREF(__pyx_t_3);
+  __Pyx_XDECREF(__pyx_t_4);
+  __Pyx_XDECREF(__pyx_t_6);
+  __Pyx_AddTraceback("iocpsupport._makesockaddr", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = 0;
+  __pyx_L0:;
+  __Pyx_XDECREF(__pyx_v_host);
+  __Pyx_XDECREF(__pyx_v_port);
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "iocpsupport.pyx":243
+ * 
+ * 
+ * cdef object fillinetaddr(sockaddr_in *dest, object addr):             # <<<<<<<<<<<<<<
+ *     cdef unsigned short port
+ *     cdef unsigned long res
+ */
+
+static PyObject *__pyx_f_11iocpsupport_fillinetaddr(struct sockaddr_in *__pyx_v_dest, PyObject *__pyx_v_addr) {
+  unsigned short __pyx_v_port;
+  unsigned long __pyx_v_res;
+  char *__pyx_v_hoststr;
+  PyObject *__pyx_v_host = NULL;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  PyObject *__pyx_t_1 = NULL;
+  PyObject *__pyx_t_2 = NULL;
+  PyObject *__pyx_t_3 = NULL;
+  PyObject *(*__pyx_t_4)(PyObject *);
+  unsigned short __pyx_t_5;
+  char *__pyx_t_6;
+  int __pyx_t_7;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  __Pyx_RefNannySetupContext("fillinetaddr");
+
+  /* "iocpsupport.pyx":247
+ *     cdef unsigned long res
+ *     cdef char *hoststr
+ *     host, port = addr             # <<<<<<<<<<<<<<
+ * 
+ *     hoststr = PyString_AsString(host)
+ */
+  if ((likely(PyTuple_CheckExact(__pyx_v_addr))) || (PyList_CheckExact(__pyx_v_addr))) {
+    PyObject* sequence = __pyx_v_addr;
+    if (likely(PyTuple_CheckExact(sequence))) {
+      if (unlikely(PyTuple_GET_SIZE(sequence) != 2)) {
+        if (PyTuple_GET_SIZE(sequence) > 2) __Pyx_RaiseTooManyValuesError(2);
+        else __Pyx_RaiseNeedMoreValuesError(PyTuple_GET_SIZE(sequence));
+        {__pyx_filename = __pyx_f[0]; __pyx_lineno = 247; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      }
+      __pyx_t_1 = PyTuple_GET_ITEM(sequence, 0); 
+      __pyx_t_2 = PyTuple_GET_ITEM(sequence, 1); 
+    } else {
+      if (unlikely(PyList_GET_SIZE(sequence) != 2)) {
+        if (PyList_GET_SIZE(sequence) > 2) __Pyx_RaiseTooManyValuesError(2);
+        else __Pyx_RaiseNeedMoreValuesError(PyList_GET_SIZE(sequence));
+        {__pyx_filename = __pyx_f[0]; __pyx_lineno = 247; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      }
+      __pyx_t_1 = PyList_GET_ITEM(sequence, 0); 
+      __pyx_t_2 = PyList_GET_ITEM(sequence, 1); 
+    }
+    __Pyx_INCREF(__pyx_t_1);
+    __Pyx_INCREF(__pyx_t_2);
+  } else {
+    Py_ssize_t index = -1;
+    __pyx_t_3 = PyObject_GetIter(__pyx_v_addr); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 247; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_3);
+    __pyx_t_4 = Py_TYPE(__pyx_t_3)->tp_iternext;
+    index = 0; __pyx_t_1 = __pyx_t_4(__pyx_t_3); if (unlikely(!__pyx_t_1)) goto __pyx_L3_unpacking_failed;
+    __Pyx_GOTREF(__pyx_t_1);
+    index = 1; __pyx_t_2 = __pyx_t_4(__pyx_t_3); if (unlikely(!__pyx_t_2)) goto __pyx_L3_unpacking_failed;
+    __Pyx_GOTREF(__pyx_t_2);
+    if (__Pyx_IternextUnpackEndCheck(__pyx_t_4(__pyx_t_3), 2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 247; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+    goto __pyx_L4_unpacking_done;
+    __pyx_L3_unpacking_failed:;
+    __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+    if (PyErr_Occurred() && PyErr_ExceptionMatches(PyExc_StopIteration)) PyErr_Clear();
+    if (!PyErr_Occurred()) __Pyx_RaiseNeedMoreValuesError(index);
+    {__pyx_filename = __pyx_f[0]; __pyx_lineno = 247; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __pyx_L4_unpacking_done:;
+  }
+  __pyx_t_5 = __Pyx_PyInt_AsUnsignedShort(__pyx_t_2); if (unlikely((__pyx_t_5 == (unsigned short)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 247; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+  __pyx_v_host = __pyx_t_1;
+  __pyx_t_1 = 0;
+  __pyx_v_port = __pyx_t_5;
+
+  /* "iocpsupport.pyx":249
+ *     host, port = addr
+ * 
+ *     hoststr = PyString_AsString(host)             # <<<<<<<<<<<<<<
+ *     res = inet_addr(hoststr)
+ *     if res == INADDR_ANY:
+ */
+  __pyx_t_6 = PyString_AsString(__pyx_v_host); if (unlikely(__pyx_t_6 == NULL)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 249; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_v_hoststr = __pyx_t_6;
+
+  /* "iocpsupport.pyx":250
+ * 
+ *     hoststr = PyString_AsString(host)
+ *     res = inet_addr(hoststr)             # <<<<<<<<<<<<<<
+ *     if res == INADDR_ANY:
+ *         raise ValueError, 'invalid IP address'
+ */
+  __pyx_v_res = inet_addr(__pyx_v_hoststr);
+
+  /* "iocpsupport.pyx":251
+ *     hoststr = PyString_AsString(host)
+ *     res = inet_addr(hoststr)
+ *     if res == INADDR_ANY:             # <<<<<<<<<<<<<<
+ *         raise ValueError, 'invalid IP address'
+ *     dest.sin_addr.s_addr = res
+ */
+  __pyx_t_7 = (__pyx_v_res == INADDR_ANY);
+  if (__pyx_t_7) {
+
+    /* "iocpsupport.pyx":252
+ *     res = inet_addr(hoststr)
+ *     if res == INADDR_ANY:
+ *         raise ValueError, 'invalid IP address'             # <<<<<<<<<<<<<<
+ *     dest.sin_addr.s_addr = res
+ * 
+ */
+    __Pyx_Raise(__pyx_builtin_ValueError, ((PyObject *)__pyx_kp_s_7), 0, 0);
+    {__pyx_filename = __pyx_f[0]; __pyx_lineno = 252; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    goto __pyx_L5;
+  }
+  __pyx_L5:;
+
+  /* "iocpsupport.pyx":253
+ *     if res == INADDR_ANY:
+ *         raise ValueError, 'invalid IP address'
+ *     dest.sin_addr.s_addr = res             # <<<<<<<<<<<<<<
+ * 
+ *     dest.sin_port = htons(port)
+ */
+  __pyx_v_dest->sin_addr.s_addr = __pyx_v_res;
+
+  /* "iocpsupport.pyx":255
+ *     dest.sin_addr.s_addr = res
+ * 
+ *     dest.sin_port = htons(port)             # <<<<<<<<<<<<<<
+ * 
+ * 
+ */
+  __pyx_v_dest->sin_port = htons(__pyx_v_port);
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_1);
+  __Pyx_XDECREF(__pyx_t_2);
+  __Pyx_XDECREF(__pyx_t_3);
+  __Pyx_AddTraceback("iocpsupport.fillinetaddr", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = 0;
+  __pyx_L0:;
+  __Pyx_XDECREF(__pyx_v_host);
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "iocpsupport.pyx":258
+ * 
+ * 
+ * cdef object fillinet6addr(sockaddr_in6 *dest, object addr):             # <<<<<<<<<<<<<<
+ *     cdef unsigned short port
+ *     cdef unsigned long res
+ */
+
+static PyObject *__pyx_f_11iocpsupport_fillinet6addr(struct sockaddr_in6 *__pyx_v_dest, PyObject *__pyx_v_addr) {
+  unsigned short __pyx_v_port;
+  char *__pyx_v_hoststr;
+  int __pyx_v_addrlen;
+  PyObject *__pyx_v_host = NULL;
+  PyObject *__pyx_v_flow = NULL;
+  PyObject *__pyx_v_scope = NULL;
+  int __pyx_v_parseresult;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  PyObject *__pyx_t_1 = NULL;
+  PyObject *__pyx_t_2 = NULL;
+  PyObject *__pyx_t_3 = NULL;
+  PyObject *__pyx_t_4 = NULL;
+  PyObject *__pyx_t_5 = NULL;
+  PyObject *(*__pyx_t_6)(PyObject *);
+  unsigned short __pyx_t_7;
+  char *__pyx_t_8;
+  int __pyx_t_9;
+  unsigned long __pyx_t_10;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  __Pyx_RefNannySetupContext("fillinet6addr");
+
+  /* "iocpsupport.pyx":262
+ *     cdef unsigned long res
+ *     cdef char *hoststr
+ *     cdef int addrlen = sizeof(sockaddr_in6)             # <<<<<<<<<<<<<<
+ *     host, port, flow, scope = addr
+ *     host = host.split("%")[0] # remove scope ID, if any
+ */
+  __pyx_v_addrlen = (sizeof(struct sockaddr_in6));
+
+  /* "iocpsupport.pyx":263
+ *     cdef char *hoststr
+ *     cdef int addrlen = sizeof(sockaddr_in6)
+ *     host, port, flow, scope = addr             # <<<<<<<<<<<<<<
+ *     host = host.split("%")[0] # remove scope ID, if any
+ * 
+ */
+  if ((likely(PyTuple_CheckExact(__pyx_v_addr))) || (PyList_CheckExact(__pyx_v_addr))) {
+    PyObject* sequence = __pyx_v_addr;
+    if (likely(PyTuple_CheckExact(sequence))) {
+      if (unlikely(PyTuple_GET_SIZE(sequence) != 4)) {
+        if (PyTuple_GET_SIZE(sequence) > 4) __Pyx_RaiseTooManyValuesError(4);
+        else __Pyx_RaiseNeedMoreValuesError(PyTuple_GET_SIZE(sequence));
+        {__pyx_filename = __pyx_f[0]; __pyx_lineno = 263; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      }
+      __pyx_t_1 = PyTuple_GET_ITEM(sequence, 0); 
+      __pyx_t_2 = PyTuple_GET_ITEM(sequence, 1); 
+      __pyx_t_3 = PyTuple_GET_ITEM(sequence, 2); 
+      __pyx_t_4 = PyTuple_GET_ITEM(sequence, 3); 
+    } else {
+      if (unlikely(PyList_GET_SIZE(sequence) != 4)) {
+        if (PyList_GET_SIZE(sequence) > 4) __Pyx_RaiseTooManyValuesError(4);
+        else __Pyx_RaiseNeedMoreValuesError(PyList_GET_SIZE(sequence));
+        {__pyx_filename = __pyx_f[0]; __pyx_lineno = 263; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      }
+      __pyx_t_1 = PyList_GET_ITEM(sequence, 0); 
+      __pyx_t_2 = PyList_GET_ITEM(sequence, 1); 
+      __pyx_t_3 = PyList_GET_ITEM(sequence, 2); 
+      __pyx_t_4 = PyList_GET_ITEM(sequence, 3); 
+    }
+    __Pyx_INCREF(__pyx_t_1);
+    __Pyx_INCREF(__pyx_t_2);
+    __Pyx_INCREF(__pyx_t_3);
+    __Pyx_INCREF(__pyx_t_4);
+  } else {
+    Py_ssize_t index = -1;
+    __pyx_t_5 = PyObject_GetIter(__pyx_v_addr); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 263; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_5);
+    __pyx_t_6 = Py_TYPE(__pyx_t_5)->tp_iternext;
+    index = 0; __pyx_t_1 = __pyx_t_6(__pyx_t_5); if (unlikely(!__pyx_t_1)) goto __pyx_L3_unpacking_failed;
+    __Pyx_GOTREF(__pyx_t_1);
+    index = 1; __pyx_t_2 = __pyx_t_6(__pyx_t_5); if (unlikely(!__pyx_t_2)) goto __pyx_L3_unpacking_failed;
+    __Pyx_GOTREF(__pyx_t_2);
+    index = 2; __pyx_t_3 = __pyx_t_6(__pyx_t_5); if (unlikely(!__pyx_t_3)) goto __pyx_L3_unpacking_failed;
+    __Pyx_GOTREF(__pyx_t_3);
+    index = 3; __pyx_t_4 = __pyx_t_6(__pyx_t_5); if (unlikely(!__pyx_t_4)) goto __pyx_L3_unpacking_failed;
+    __Pyx_GOTREF(__pyx_t_4);
+    if (__Pyx_IternextUnpackEndCheck(__pyx_t_6(__pyx_t_5), 4) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 263; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+    goto __pyx_L4_unpacking_done;
+    __pyx_L3_unpacking_failed:;
+    __Pyx_DECREF(__pyx_t_5); __pyx_t_5 = 0;
+    if (PyErr_Occurred() && PyErr_ExceptionMatches(PyExc_StopIteration)) PyErr_Clear();
+    if (!PyErr_Occurred()) __Pyx_RaiseNeedMoreValuesError(index);
+    {__pyx_filename = __pyx_f[0]; __pyx_lineno = 263; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __pyx_L4_unpacking_done:;
+  }
+  __pyx_t_7 = __Pyx_PyInt_AsUnsignedShort(__pyx_t_2); if (unlikely((__pyx_t_7 == (unsigned short)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 263; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+  __pyx_v_host = __pyx_t_1;
+  __pyx_t_1 = 0;
+  __pyx_v_port = __pyx_t_7;
+  __pyx_v_flow = __pyx_t_3;
+  __pyx_t_3 = 0;
+  __pyx_v_scope = __pyx_t_4;
+  __pyx_t_4 = 0;
+
+  /* "iocpsupport.pyx":264
+ *     cdef int addrlen = sizeof(sockaddr_in6)
+ *     host, port, flow, scope = addr
+ *     host = host.split("%")[0] # remove scope ID, if any             # <<<<<<<<<<<<<<
+ * 
+ *     hoststr = PyString_AsString(host)
+ */
+  __pyx_t_4 = PyObject_GetAttr(__pyx_v_host, __pyx_n_s__split); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 264; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_4);
+  __pyx_t_3 = PyObject_Call(__pyx_t_4, ((PyObject *)__pyx_k_tuple_9), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 264; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_3);
+  __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+  __pyx_t_4 = __Pyx_GetItemInt(__pyx_t_3, 0, sizeof(long), PyInt_FromLong); if (!__pyx_t_4) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 264; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_4);
+  __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+  __Pyx_DECREF(__pyx_v_host);
+  __pyx_v_host = __pyx_t_4;
+  __pyx_t_4 = 0;
+
+  /* "iocpsupport.pyx":266
+ *     host = host.split("%")[0] # remove scope ID, if any
+ * 
+ *     hoststr = PyString_AsString(host)             # <<<<<<<<<<<<<<
+ *     cdef int parseresult = WSAStringToAddressA(hoststr, AF_INET6, NULL,
+ *                                                <sockaddr *>dest, &addrlen)
+ */
+  __pyx_t_8 = PyString_AsString(__pyx_v_host); if (unlikely(__pyx_t_8 == NULL)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 266; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_v_hoststr = __pyx_t_8;
+
+  /* "iocpsupport.pyx":268
+ *     hoststr = PyString_AsString(host)
+ *     cdef int parseresult = WSAStringToAddressA(hoststr, AF_INET6, NULL,
+ *                                                <sockaddr *>dest, &addrlen)             # <<<<<<<<<<<<<<
+ *     if parseresult == SOCKET_ERROR:
+ *         raise ValueError, 'invalid IPv6 address %r' % (host,)
+ */
+  __pyx_v_parseresult = WSAStringToAddressA(__pyx_v_hoststr, AF_INET6, NULL, ((struct sockaddr *)__pyx_v_dest), (&__pyx_v_addrlen));
+
+  /* "iocpsupport.pyx":269
+ *     cdef int parseresult = WSAStringToAddressA(hoststr, AF_INET6, NULL,
+ *                                                <sockaddr *>dest, &addrlen)
+ *     if parseresult == SOCKET_ERROR:             # <<<<<<<<<<<<<<
+ *         raise ValueError, 'invalid IPv6 address %r' % (host,)
+ *     if parseresult != 0:
+ */
+  __pyx_t_9 = (__pyx_v_parseresult == SOCKET_ERROR);
+  if (__pyx_t_9) {
+
+    /* "iocpsupport.pyx":270
+ *                                                <sockaddr *>dest, &addrlen)
+ *     if parseresult == SOCKET_ERROR:
+ *         raise ValueError, 'invalid IPv6 address %r' % (host,)             # <<<<<<<<<<<<<<
+ *     if parseresult != 0:
+ *         raise RuntimeError, 'undefined error occurred during address parsing'
+ */
+    __pyx_t_4 = PyTuple_New(1); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 270; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(((PyObject *)__pyx_t_4));
+    __Pyx_INCREF(__pyx_v_host);
+    PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_v_host);
+    __Pyx_GIVEREF(__pyx_v_host);
+    __pyx_t_3 = PyNumber_Remainder(((PyObject *)__pyx_kp_s_10), ((PyObject *)__pyx_t_4)); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 270; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(((PyObject *)__pyx_t_3));
+    __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0;
+    __Pyx_Raise(__pyx_builtin_ValueError, ((PyObject *)__pyx_t_3), 0, 0);
+    __Pyx_DECREF(((PyObject *)__pyx_t_3)); __pyx_t_3 = 0;
+    {__pyx_filename = __pyx_f[0]; __pyx_lineno = 270; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    goto __pyx_L5;
+  }
+  __pyx_L5:;
+
+  /* "iocpsupport.pyx":271
+ *     if parseresult == SOCKET_ERROR:
+ *         raise ValueError, 'invalid IPv6 address %r' % (host,)
+ *     if parseresult != 0:             # <<<<<<<<<<<<<<
+ *         raise RuntimeError, 'undefined error occurred during address parsing'
+ *     # sin6_host field was handled by WSAStringToAddress
+ */
+  __pyx_t_9 = (__pyx_v_parseresult != 0);
+  if (__pyx_t_9) {
+
+    /* "iocpsupport.pyx":272
+ *         raise ValueError, 'invalid IPv6 address %r' % (host,)
+ *     if parseresult != 0:
+ *         raise RuntimeError, 'undefined error occurred during address parsing'             # <<<<<<<<<<<<<<
+ *     # sin6_host field was handled by WSAStringToAddress
+ *     dest.sin6_port = htons(port)
+ */
+    __Pyx_Raise(__pyx_builtin_RuntimeError, ((PyObject *)__pyx_kp_s_11), 0, 0);
+    {__pyx_filename = __pyx_f[0]; __pyx_lineno = 272; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    goto __pyx_L6;
+  }
+  __pyx_L6:;
+
+  /* "iocpsupport.pyx":274
+ *         raise RuntimeError, 'undefined error occurred during address parsing'
+ *     # sin6_host field was handled by WSAStringToAddress
+ *     dest.sin6_port = htons(port)             # <<<<<<<<<<<<<<
+ *     dest.sin6_flowinfo = flow
+ *     dest.sin6_scope_id = scope
+ */
+  __pyx_v_dest->sin6_port = htons(__pyx_v_port);
+
+  /* "iocpsupport.pyx":275
+ *     # sin6_host field was handled by WSAStringToAddress
+ *     dest.sin6_port = htons(port)
+ *     dest.sin6_flowinfo = flow             # <<<<<<<<<<<<<<
+ *     dest.sin6_scope_id = scope
+ * 
+ */
+  __pyx_t_10 = __Pyx_PyInt_AsUnsignedLong(__pyx_v_flow); if (unlikely((__pyx_t_10 == (unsigned long)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 275; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_v_dest->sin6_flowinfo = __pyx_t_10;
+
+  /* "iocpsupport.pyx":276
+ *     dest.sin6_port = htons(port)
+ *     dest.sin6_flowinfo = flow
+ *     dest.sin6_scope_id = scope             # <<<<<<<<<<<<<<
+ * 
+ * 
+ */
+  __pyx_t_10 = __Pyx_PyInt_AsUnsignedLong(__pyx_v_scope); if (unlikely((__pyx_t_10 == (unsigned long)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_v_dest->sin6_scope_id = __pyx_t_10;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_1);
+  __Pyx_XDECREF(__pyx_t_2);
+  __Pyx_XDECREF(__pyx_t_3);
+  __Pyx_XDECREF(__pyx_t_4);
+  __Pyx_XDECREF(__pyx_t_5);
+  __Pyx_AddTraceback("iocpsupport.fillinet6addr", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = 0;
+  __pyx_L0:;
+  __Pyx_XDECREF(__pyx_v_host);
+  __Pyx_XDECREF(__pyx_v_flow);
+  __Pyx_XDECREF(__pyx_v_scope);
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "iocpsupport.pyx":279
+ * 
+ * 
+ * def AllocateReadBuffer(int size):             # <<<<<<<<<<<<<<
+ *     return PyBuffer_New(size)
+ * 
+ */
+
+static PyObject *__pyx_pf_11iocpsupport_1AllocateReadBuffer(PyObject *__pyx_self, PyObject *__pyx_arg_size); /*proto*/
+static PyMethodDef __pyx_mdef_11iocpsupport_1AllocateReadBuffer = {__Pyx_NAMESTR("AllocateReadBuffer"), (PyCFunction)__pyx_pf_11iocpsupport_1AllocateReadBuffer, METH_O, __Pyx_DOCSTR(0)};
+static PyObject *__pyx_pf_11iocpsupport_1AllocateReadBuffer(PyObject *__pyx_self, PyObject *__pyx_arg_size) {
+  int __pyx_v_size;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  PyObject *__pyx_t_1 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  __Pyx_RefNannySetupContext("AllocateReadBuffer");
+  __pyx_self = __pyx_self;
+  assert(__pyx_arg_size); {
+    __pyx_v_size = __Pyx_PyInt_AsInt(__pyx_arg_size); if (unlikely((__pyx_v_size == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 279; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  }
+  goto __pyx_L4_argument_unpacking_done;
+  __pyx_L3_error:;
+  __Pyx_AddTraceback("iocpsupport.AllocateReadBuffer", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __Pyx_RefNannyFinishContext();
+  return NULL;
+  __pyx_L4_argument_unpacking_done:;
+
+  /* "iocpsupport.pyx":280
+ * 
+ * def AllocateReadBuffer(int size):
+ *     return PyBuffer_New(size)             # <<<<<<<<<<<<<<
+ * 
+ * def maxAddrLen(long s):
+ */
+  __Pyx_XDECREF(__pyx_r);
+  __pyx_t_1 = PyBuffer_New(__pyx_v_size); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 280; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  __pyx_r = __pyx_t_1;
+  __pyx_t_1 = 0;
+  goto __pyx_L0;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_1);
+  __Pyx_AddTraceback("iocpsupport.AllocateReadBuffer", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "iocpsupport.pyx":282
+ *     return PyBuffer_New(size)
+ * 
+ * def maxAddrLen(long s):             # <<<<<<<<<<<<<<
+ *     cdef WSAPROTOCOL_INFO wsa_pi
+ *     cdef int size, rc
+ */
+
+static PyObject *__pyx_pf_11iocpsupport_2maxAddrLen(PyObject *__pyx_self, PyObject *__pyx_arg_s); /*proto*/
+static PyMethodDef __pyx_mdef_11iocpsupport_2maxAddrLen = {__Pyx_NAMESTR("maxAddrLen"), (PyCFunction)__pyx_pf_11iocpsupport_2maxAddrLen, METH_O, __Pyx_DOCSTR(0)};
+static PyObject *__pyx_pf_11iocpsupport_2maxAddrLen(PyObject *__pyx_self, PyObject *__pyx_arg_s) {
+  long __pyx_v_s;
+  WSAPROTOCOL_INFO __pyx_v_wsa_pi;
+  int __pyx_v_size;
+  int __pyx_v_rc;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  int __pyx_t_1;
+  PyObject *__pyx_t_2 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  __Pyx_RefNannySetupContext("maxAddrLen");
+  __pyx_self = __pyx_self;
+  assert(__pyx_arg_s); {
+    __pyx_v_s = __Pyx_PyInt_AsLong(__pyx_arg_s); if (unlikely((__pyx_v_s == (long)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 282; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  }
+  goto __pyx_L4_argument_unpacking_done;
+  __pyx_L3_error:;
+  __Pyx_AddTraceback("iocpsupport.maxAddrLen", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __Pyx_RefNannyFinishContext();
+  return NULL;
+  __pyx_L4_argument_unpacking_done:;
+
+  /* "iocpsupport.pyx":286
+ *     cdef int size, rc
+ * 
+ *     size = sizeof(wsa_pi)             # <<<<<<<<<<<<<<
+ *     rc = getsockopt(s, SOL_SOCKET, SO_PROTOCOL_INFO, <char *>&wsa_pi, &size)
+ *     if rc == SOCKET_ERROR:
+ */
+  __pyx_v_size = (sizeof(__pyx_v_wsa_pi));
+
+  /* "iocpsupport.pyx":287
+ * 
+ *     size = sizeof(wsa_pi)
+ *     rc = getsockopt(s, SOL_SOCKET, SO_PROTOCOL_INFO, <char *>&wsa_pi, &size)             # <<<<<<<<<<<<<<
+ *     if rc == SOCKET_ERROR:
+ *         raise_error(WSAGetLastError(), 'getsockopt')
+ */
+  __pyx_v_rc = getsockopt(__pyx_v_s, SOL_SOCKET, SO_PROTOCOL_INFO, ((char *)(&__pyx_v_wsa_pi)), (&__pyx_v_size));
+
+  /* "iocpsupport.pyx":288
+ *     size = sizeof(wsa_pi)
+ *     rc = getsockopt(s, SOL_SOCKET, SO_PROTOCOL_INFO, <char *>&wsa_pi, &size)
+ *     if rc == SOCKET_ERROR:             # <<<<<<<<<<<<<<
+ *         raise_error(WSAGetLastError(), 'getsockopt')
+ *     return wsa_pi.iMaxSockAddr
+ */
+  __pyx_t_1 = (__pyx_v_rc == SOCKET_ERROR);
+  if (__pyx_t_1) {
+
+    /* "iocpsupport.pyx":289
+ *     rc = getsockopt(s, SOL_SOCKET, SO_PROTOCOL_INFO, <char *>&wsa_pi, &size)
+ *     if rc == SOCKET_ERROR:
+ *         raise_error(WSAGetLastError(), 'getsockopt')             # <<<<<<<<<<<<<<
+ *     return wsa_pi.iMaxSockAddr
+ * 
+ */
+    __pyx_t_2 = ((PyObject *)__pyx_n_s__getsockopt);
+    __Pyx_INCREF(__pyx_t_2);
+    __pyx_f_11iocpsupport_raise_error(WSAGetLastError(), __pyx_t_2); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 289; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+    goto __pyx_L5;
+  }
+  __pyx_L5:;
+
+  /* "iocpsupport.pyx":290
+ *     if rc == SOCKET_ERROR:
+ *         raise_error(WSAGetLastError(), 'getsockopt')
+ *     return wsa_pi.iMaxSockAddr             # <<<<<<<<<<<<<<
+ * 
+ * cdef int getAddrFamily(SOCKET s) except *:
+ */
+  __Pyx_XDECREF(__pyx_r);
+  __pyx_t_2 = PyInt_FromLong(__pyx_v_wsa_pi.iMaxSockAddr); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 290; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_2);
+  __pyx_r = __pyx_t_2;
+  __pyx_t_2 = 0;
+  goto __pyx_L0;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_2);
+  __Pyx_AddTraceback("iocpsupport.maxAddrLen", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "iocpsupport.pyx":292
+ *     return wsa_pi.iMaxSockAddr
+ * 
+ * cdef int getAddrFamily(SOCKET s) except *:             # <<<<<<<<<<<<<<
+ *     cdef WSAPROTOCOL_INFO wsa_pi
+ *     cdef int size, rc
+ */
+
+static int __pyx_f_11iocpsupport_getAddrFamily(__pyx_t_11iocpsupport_SOCKET __pyx_v_s) {
+  WSAPROTOCOL_INFO __pyx_v_wsa_pi;
+  int __pyx_v_size;
+  int __pyx_v_rc;
+  int __pyx_r;
+  __Pyx_RefNannyDeclarations
+  int __pyx_t_1;
+  PyObject *__pyx_t_2 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  __Pyx_RefNannySetupContext("getAddrFamily");
+
+  /* "iocpsupport.pyx":296
+ *     cdef int size, rc
+ * 
+ *     size = sizeof(wsa_pi)             # <<<<<<<<<<<<<<
+ *     rc = getsockopt(s, SOL_SOCKET, SO_PROTOCOL_INFO, <char *>&wsa_pi, &size)
+ *     if rc == SOCKET_ERROR:
+ */
+  __pyx_v_size = (sizeof(__pyx_v_wsa_pi));
+
+  /* "iocpsupport.pyx":297
+ * 
+ *     size = sizeof(wsa_pi)
+ *     rc = getsockopt(s, SOL_SOCKET, SO_PROTOCOL_INFO, <char *>&wsa_pi, &size)             # <<<<<<<<<<<<<<
+ *     if rc == SOCKET_ERROR:
+ *         raise_error(WSAGetLastError(), 'getsockopt')
+ */
+  __pyx_v_rc = getsockopt(__pyx_v_s, SOL_SOCKET, SO_PROTOCOL_INFO, ((char *)(&__pyx_v_wsa_pi)), (&__pyx_v_size));
+
+  /* "iocpsupport.pyx":298
+ *     size = sizeof(wsa_pi)
+ *     rc = getsockopt(s, SOL_SOCKET, SO_PROTOCOL_INFO, <char *>&wsa_pi, &size)
+ *     if rc == SOCKET_ERROR:             # <<<<<<<<<<<<<<
+ *         raise_error(WSAGetLastError(), 'getsockopt')
+ *     return wsa_pi.iAddressFamily
+ */
+  __pyx_t_1 = (__pyx_v_rc == SOCKET_ERROR);
+  if (__pyx_t_1) {
+
+    /* "iocpsupport.pyx":299
+ *     rc = getsockopt(s, SOL_SOCKET, SO_PROTOCOL_INFO, <char *>&wsa_pi, &size)
+ *     if rc == SOCKET_ERROR:
+ *         raise_error(WSAGetLastError(), 'getsockopt')             # <<<<<<<<<<<<<<
+ *     return wsa_pi.iAddressFamily
+ * 
+ */
+    __pyx_t_2 = ((PyObject *)__pyx_n_s__getsockopt);
+    __Pyx_INCREF(__pyx_t_2);
+    __pyx_f_11iocpsupport_raise_error(WSAGetLastError(), __pyx_t_2); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 299; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+    goto __pyx_L3;
+  }
+  __pyx_L3:;
+
+  /* "iocpsupport.pyx":300
+ *     if rc == SOCKET_ERROR:
+ *         raise_error(WSAGetLastError(), 'getsockopt')
+ *     return wsa_pi.iAddressFamily             # <<<<<<<<<<<<<<
+ * 
+ * import socket # for WSAStartup
+ */
+  __pyx_r = __pyx_v_wsa_pi.iAddressFamily;
+  goto __pyx_L0;
+
+  __pyx_r = 0;
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_2);
+  __Pyx_AddTraceback("iocpsupport.getAddrFamily", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = 0;
+  __pyx_L0:;
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\acceptex.pxi":5
+ * 
+ * 
+ * def accept(long listening, long accepting, object buff, object obj):             # <<<<<<<<<<<<<<
+ *     """
+ *     CAUTION: unlike system AcceptEx(), this function returns 0 on success
+ */
+
+static PyObject *__pyx_pf_11iocpsupport_3accept(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static char __pyx_doc_11iocpsupport_3accept[] = "\n    CAUTION: unlike system AcceptEx(), this function returns 0 on success\n    ";
+static PyMethodDef __pyx_mdef_11iocpsupport_3accept = {__Pyx_NAMESTR("accept"), (PyCFunction)__pyx_pf_11iocpsupport_3accept, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(__pyx_doc_11iocpsupport_3accept)};
+static PyObject *__pyx_pf_11iocpsupport_3accept(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+  long __pyx_v_listening;
+  long __pyx_v_accepting;
+  PyObject *__pyx_v_buff = 0;
+  PyObject *__pyx_v_obj = 0;
+  unsigned long __pyx_v_bytes;
+  int __pyx_v_rc;
+  Py_ssize_t __pyx_v_size;
+  void *__pyx_v_mem_buffer;
+  struct __pyx_t_11iocpsupport_myOVERLAPPED *__pyx_v_ov;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  int __pyx_t_1;
+  struct __pyx_t_11iocpsupport_myOVERLAPPED *__pyx_t_2;
+  int __pyx_t_3;
+  PyObject *__pyx_t_4 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__listening,&__pyx_n_s__accepting,&__pyx_n_s__buff,&__pyx_n_s__obj,0};
+  __Pyx_RefNannySetupContext("accept");
+  __pyx_self = __pyx_self;
+  {
+    PyObject* values[4] = {0,0,0,0};
+    if (unlikely(__pyx_kwds)) {
+      Py_ssize_t kw_args;
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+        case  3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        case  0: break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+      kw_args = PyDict_Size(__pyx_kwds);
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  0:
+        values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__listening);
+        if (likely(values[0])) kw_args--;
+        else goto __pyx_L5_argtuple_error;
+        case  1:
+        values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__accepting);
+        if (likely(values[1])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("accept", 1, 4, 4, 1); {__pyx_filename = __pyx_f[1]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+        case  2:
+        values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__buff);
+        if (likely(values[2])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("accept", 1, 4, 4, 2); {__pyx_filename = __pyx_f[1]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+        case  3:
+        values[3] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__obj);
+        if (likely(values[3])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("accept", 1, 4, 4, 3); {__pyx_filename = __pyx_f[1]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+      }
+      if (unlikely(kw_args > 0)) {
+        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, PyTuple_GET_SIZE(__pyx_args), "accept") < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+      }
+    } else if (PyTuple_GET_SIZE(__pyx_args) != 4) {
+      goto __pyx_L5_argtuple_error;
+    } else {
+      values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+      values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+      values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+      values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+    }
+    __pyx_v_listening = __Pyx_PyInt_AsLong(values[0]); if (unlikely((__pyx_v_listening == (long)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    __pyx_v_accepting = __Pyx_PyInt_AsLong(values[1]); if (unlikely((__pyx_v_accepting == (long)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    __pyx_v_buff = values[2];
+    __pyx_v_obj = values[3];
+  }
+  goto __pyx_L4_argument_unpacking_done;
+  __pyx_L5_argtuple_error:;
+  __Pyx_RaiseArgtupleInvalid("accept", 1, 4, 4, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[1]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  __pyx_L3_error:;
+  __Pyx_AddTraceback("iocpsupport.accept", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __Pyx_RefNannyFinishContext();
+  return NULL;
+  __pyx_L4_argument_unpacking_done:;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\acceptex.pxi":15
+ *     cdef myOVERLAPPED *ov
+ * 
+ *     PyObject_AsWriteBuffer(buff, &mem_buffer, &size)             # <<<<<<<<<<<<<<
+ * 
+ *     ov = makeOV()
+ */
+  __pyx_t_1 = PyObject_AsWriteBuffer(__pyx_v_buff, (&__pyx_v_mem_buffer), (&__pyx_v_size)); if (unlikely(__pyx_t_1 == -1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 15; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\acceptex.pxi":17
+ *     PyObject_AsWriteBuffer(buff, &mem_buffer, &size)
+ * 
+ *     ov = makeOV()             # <<<<<<<<<<<<<<
+ *     if obj is not None:
+ *         ov.obj = <PyObject *>obj
+ */
+  __pyx_t_2 = __pyx_f_11iocpsupport_makeOV(); if (unlikely(__pyx_t_2 == NULL)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 17; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_v_ov = __pyx_t_2;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\acceptex.pxi":18
+ * 
+ *     ov = makeOV()
+ *     if obj is not None:             # <<<<<<<<<<<<<<
+ *         ov.obj = <PyObject *>obj
+ * 
+ */
+  __pyx_t_3 = (__pyx_v_obj != Py_None);
+  if (__pyx_t_3) {
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\acceptex.pxi":19
+ *     ov = makeOV()
+ *     if obj is not None:
+ *         ov.obj = <PyObject *>obj             # <<<<<<<<<<<<<<
+ * 
+ *     rc = lpAcceptEx(listening, accepting, mem_buffer, 0,
+ */
+    __pyx_v_ov->obj = ((struct PyObject *)__pyx_v_obj);
+    goto __pyx_L6;
+  }
+  __pyx_L6:;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\acceptex.pxi":23
+ *     rc = lpAcceptEx(listening, accepting, mem_buffer, 0,
+ *                     <DWORD>size / 2, <DWORD>size / 2,
+ *                     &bytes, <OVERLAPPED *>ov)             # <<<<<<<<<<<<<<
+ *     if not rc:
+ *         rc = WSAGetLastError()
+ */
+  __pyx_v_rc = lpAcceptEx(__pyx_v_listening, __pyx_v_accepting, __pyx_v_mem_buffer, 0, (((__pyx_t_11iocpsupport_DWORD)__pyx_v_size) / 2), (((__pyx_t_11iocpsupport_DWORD)__pyx_v_size) / 2), (&__pyx_v_bytes), ((OVERLAPPED *)__pyx_v_ov));
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\acceptex.pxi":24
+ *                     <DWORD>size / 2, <DWORD>size / 2,
+ *                     &bytes, <OVERLAPPED *>ov)
+ *     if not rc:             # <<<<<<<<<<<<<<
+ *         rc = WSAGetLastError()
+ *         if rc != ERROR_IO_PENDING:
+ */
+  __pyx_t_3 = (!__pyx_v_rc);
+  if (__pyx_t_3) {
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\acceptex.pxi":25
+ *                     &bytes, <OVERLAPPED *>ov)
+ *     if not rc:
+ *         rc = WSAGetLastError()             # <<<<<<<<<<<<<<
+ *         if rc != ERROR_IO_PENDING:
+ *             PyMem_Free(ov)
+ */
+    __pyx_v_rc = WSAGetLastError();
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\acceptex.pxi":26
+ *     if not rc:
+ *         rc = WSAGetLastError()
+ *         if rc != ERROR_IO_PENDING:             # <<<<<<<<<<<<<<
+ *             PyMem_Free(ov)
+ *             return rc
+ */
+    __pyx_t_3 = (__pyx_v_rc != ERROR_IO_PENDING);
+    if (__pyx_t_3) {
+
+      /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\acceptex.pxi":27
+ *         rc = WSAGetLastError()
+ *         if rc != ERROR_IO_PENDING:
+ *             PyMem_Free(ov)             # <<<<<<<<<<<<<<
+ *             return rc
+ * 
+ */
+      PyMem_Free(__pyx_v_ov);
+
+      /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\acceptex.pxi":28
+ *         if rc != ERROR_IO_PENDING:
+ *             PyMem_Free(ov)
+ *             return rc             # <<<<<<<<<<<<<<
+ * 
+ *     # operation is in progress
+ */
+      __Pyx_XDECREF(__pyx_r);
+      __pyx_t_4 = PyInt_FromLong(__pyx_v_rc); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      __Pyx_GOTREF(__pyx_t_4);
+      __pyx_r = __pyx_t_4;
+      __pyx_t_4 = 0;
+      goto __pyx_L0;
+      goto __pyx_L8;
+    }
+    __pyx_L8:;
+    goto __pyx_L7;
+  }
+  __pyx_L7:;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\acceptex.pxi":31
+ * 
+ *     # operation is in progress
+ *     Py_XINCREF(obj)             # <<<<<<<<<<<<<<
+ *     return 0
+ * 
+ */
+  Py_XINCREF(__pyx_v_obj);
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\acceptex.pxi":32
+ *     # operation is in progress
+ *     Py_XINCREF(obj)
+ *     return 0             # <<<<<<<<<<<<<<
+ * 
+ * def get_accept_addrs(long s, object buff):
+ */
+  __Pyx_XDECREF(__pyx_r);
+  __Pyx_INCREF(__pyx_int_0);
+  __pyx_r = __pyx_int_0;
+  goto __pyx_L0;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_4);
+  __Pyx_AddTraceback("iocpsupport.accept", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\acceptex.pxi":34
+ *     return 0
+ * 
+ * def get_accept_addrs(long s, object buff):             # <<<<<<<<<<<<<<
+ *     cdef WSAPROTOCOL_INFO wsa_pi
+ *     cdef int locallen, remotelen
+ */
+
+static PyObject *__pyx_pf_11iocpsupport_4get_accept_addrs(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static PyMethodDef __pyx_mdef_11iocpsupport_4get_accept_addrs = {__Pyx_NAMESTR("get_accept_addrs"), (PyCFunction)__pyx_pf_11iocpsupport_4get_accept_addrs, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)};
+static PyObject *__pyx_pf_11iocpsupport_4get_accept_addrs(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+  long __pyx_v_s;
+  PyObject *__pyx_v_buff = 0;
+  int __pyx_v_locallen;
+  int __pyx_v_remotelen;
+  Py_ssize_t __pyx_v_size;
+  void *__pyx_v_mem_buffer;
+  struct sockaddr *__pyx_v_localaddr;
+  struct sockaddr *__pyx_v_remoteaddr;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  int __pyx_t_1;
+  PyObject *__pyx_t_2 = NULL;
+  PyObject *__pyx_t_3 = NULL;
+  PyObject *__pyx_t_4 = NULL;
+  PyObject *__pyx_t_5 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__s,&__pyx_n_s__buff,0};
+  __Pyx_RefNannySetupContext("get_accept_addrs");
+  __pyx_self = __pyx_self;
+  {
+    PyObject* values[2] = {0,0};
+    if (unlikely(__pyx_kwds)) {
+      Py_ssize_t kw_args;
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        case  0: break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+      kw_args = PyDict_Size(__pyx_kwds);
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  0:
+        values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__s);
+        if (likely(values[0])) kw_args--;
+        else goto __pyx_L5_argtuple_error;
+        case  1:
+        values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__buff);
+        if (likely(values[1])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("get_accept_addrs", 1, 2, 2, 1); {__pyx_filename = __pyx_f[1]; __pyx_lineno = 34; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+      }
+      if (unlikely(kw_args > 0)) {
+        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, PyTuple_GET_SIZE(__pyx_args), "get_accept_addrs") < 0)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 34; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+      }
+    } else if (PyTuple_GET_SIZE(__pyx_args) != 2) {
+      goto __pyx_L5_argtuple_error;
+    } else {
+      values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+      values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+    }
+    __pyx_v_s = __Pyx_PyInt_AsLong(values[0]); if (unlikely((__pyx_v_s == (long)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 34; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    __pyx_v_buff = values[1];
+  }
+  goto __pyx_L4_argument_unpacking_done;
+  __pyx_L5_argtuple_error:;
+  __Pyx_RaiseArgtupleInvalid("get_accept_addrs", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[1]; __pyx_lineno = 34; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  __pyx_L3_error:;
+  __Pyx_AddTraceback("iocpsupport.get_accept_addrs", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __Pyx_RefNannyFinishContext();
+  return NULL;
+  __pyx_L4_argument_unpacking_done:;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\acceptex.pxi":41
+ *     cdef sockaddr *localaddr, *remoteaddr
+ * 
+ *     PyObject_AsReadBuffer(buff, &mem_buffer, &size)             # <<<<<<<<<<<<<<
+ * 
+ *     lpGetAcceptExSockaddrs(mem_buffer, 0, <DWORD>size / 2, <DWORD>size / 2,
+ */
+  __pyx_t_1 = PyObject_AsReadBuffer(__pyx_v_buff, (&__pyx_v_mem_buffer), (&__pyx_v_size)); if (unlikely(__pyx_t_1 == -1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\acceptex.pxi":44
+ * 
+ *     lpGetAcceptExSockaddrs(mem_buffer, 0, <DWORD>size / 2, <DWORD>size / 2,
+ *                            &localaddr, &locallen, &remoteaddr, &remotelen)             # <<<<<<<<<<<<<<
+ *     return remoteaddr.sa_family, _makesockaddr(localaddr, locallen), _makesockaddr(remoteaddr, remotelen)
+ * 
+ */
+  lpGetAcceptExSockaddrs(__pyx_v_mem_buffer, 0, (((__pyx_t_11iocpsupport_DWORD)__pyx_v_size) / 2), (((__pyx_t_11iocpsupport_DWORD)__pyx_v_size) / 2), (&__pyx_v_localaddr), (&__pyx_v_locallen), (&__pyx_v_remoteaddr), (&__pyx_v_remotelen));
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\acceptex.pxi":45
+ *     lpGetAcceptExSockaddrs(mem_buffer, 0, <DWORD>size / 2, <DWORD>size / 2,
+ *                            &localaddr, &locallen, &remoteaddr, &remotelen)
+ *     return remoteaddr.sa_family, _makesockaddr(localaddr, locallen), _makesockaddr(remoteaddr, remotelen)             # <<<<<<<<<<<<<<
+ * 
+ */
+  __Pyx_XDECREF(__pyx_r);
+  __pyx_t_2 = PyInt_FromLong(__pyx_v_remoteaddr->sa_family); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 45; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_2);
+  __pyx_t_3 = __pyx_f_11iocpsupport__makesockaddr(__pyx_v_localaddr, __pyx_v_locallen); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 45; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_3);
+  __pyx_t_4 = __pyx_f_11iocpsupport__makesockaddr(__pyx_v_remoteaddr, __pyx_v_remotelen); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 45; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_4);
+  __pyx_t_5 = PyTuple_New(3); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 45; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(((PyObject *)__pyx_t_5));
+  PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_2);
+  __Pyx_GIVEREF(__pyx_t_2);
+  PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_t_3);
+  __Pyx_GIVEREF(__pyx_t_3);
+  PyTuple_SET_ITEM(__pyx_t_5, 2, __pyx_t_4);
+  __Pyx_GIVEREF(__pyx_t_4);
+  __pyx_t_2 = 0;
+  __pyx_t_3 = 0;
+  __pyx_t_4 = 0;
+  __pyx_r = ((PyObject *)__pyx_t_5);
+  __pyx_t_5 = 0;
+  goto __pyx_L0;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_2);
+  __Pyx_XDECREF(__pyx_t_3);
+  __Pyx_XDECREF(__pyx_t_4);
+  __Pyx_XDECREF(__pyx_t_5);
+  __Pyx_AddTraceback("iocpsupport.get_accept_addrs", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":5
+ * 
+ * 
+ * def connect(long s, object addr, object obj):             # <<<<<<<<<<<<<<
+ *     """
+ *     CAUTION: unlike system ConnectEx(), this function returns 0 on success
+ */
+
+static PyObject *__pyx_pf_11iocpsupport_5connect(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static char __pyx_doc_11iocpsupport_5connect[] = "\n    CAUTION: unlike system ConnectEx(), this function returns 0 on success\n    ";
+static PyMethodDef __pyx_mdef_11iocpsupport_5connect = {__Pyx_NAMESTR("connect"), (PyCFunction)__pyx_pf_11iocpsupport_5connect, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(__pyx_doc_11iocpsupport_5connect)};
+static PyObject *__pyx_pf_11iocpsupport_5connect(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+  long __pyx_v_s;
+  PyObject *__pyx_v_addr = 0;
+  PyObject *__pyx_v_obj = 0;
+  int __pyx_v_family;
+  int __pyx_v_rc;
+  struct __pyx_t_11iocpsupport_myOVERLAPPED *__pyx_v_ov;
+  struct sockaddr_in __pyx_v_ipv4_name;
+  struct sockaddr_in6 __pyx_v_ipv6_name;
+  struct sockaddr *__pyx_v_name;
+  int __pyx_v_namelen;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  PyObject *__pyx_t_1 = NULL;
+  int __pyx_t_2;
+  int __pyx_t_3;
+  int __pyx_t_4;
+  struct __pyx_t_11iocpsupport_myOVERLAPPED *__pyx_t_5;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__s,&__pyx_n_s__addr,&__pyx_n_s__obj,0};
+  __Pyx_RefNannySetupContext("connect");
+  __pyx_self = __pyx_self;
+  {
+    PyObject* values[3] = {0,0,0};
+    if (unlikely(__pyx_kwds)) {
+      Py_ssize_t kw_args;
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        case  0: break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+      kw_args = PyDict_Size(__pyx_kwds);
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  0:
+        values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__s);
+        if (likely(values[0])) kw_args--;
+        else goto __pyx_L5_argtuple_error;
+        case  1:
+        values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__addr);
+        if (likely(values[1])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("connect", 1, 3, 3, 1); {__pyx_filename = __pyx_f[2]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+        case  2:
+        values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__obj);
+        if (likely(values[2])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("connect", 1, 3, 3, 2); {__pyx_filename = __pyx_f[2]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+      }
+      if (unlikely(kw_args > 0)) {
+        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, PyTuple_GET_SIZE(__pyx_args), "connect") < 0)) {__pyx_filename = __pyx_f[2]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+      }
+    } else if (PyTuple_GET_SIZE(__pyx_args) != 3) {
+      goto __pyx_L5_argtuple_error;
+    } else {
+      values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+      values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+      values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+    }
+    __pyx_v_s = __Pyx_PyInt_AsLong(values[0]); if (unlikely((__pyx_v_s == (long)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[2]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    __pyx_v_addr = values[1];
+    __pyx_v_obj = values[2];
+  }
+  goto __pyx_L4_argument_unpacking_done;
+  __pyx_L5_argtuple_error:;
+  __Pyx_RaiseArgtupleInvalid("connect", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[2]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  __pyx_L3_error:;
+  __Pyx_AddTraceback("iocpsupport.connect", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __Pyx_RefNannyFinishContext();
+  return NULL;
+  __pyx_L4_argument_unpacking_done:;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":16
+ *     cdef int namelen
+ * 
+ *     if not have_connectex:             # <<<<<<<<<<<<<<
+ *         raise ValueError, 'ConnectEx is not available on this system'
+ * 
+ */
+  __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__have_connectex); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[2]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  __pyx_t_2 = __Pyx_PyObject_IsTrue(__pyx_t_1); if (unlikely(__pyx_t_2 < 0)) {__pyx_filename = __pyx_f[2]; __pyx_lineno = 16; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+  __pyx_t_3 = (!__pyx_t_2);
+  if (__pyx_t_3) {
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":17
+ * 
+ *     if not have_connectex:
+ *         raise ValueError, 'ConnectEx is not available on this system'             # <<<<<<<<<<<<<<
+ * 
+ *     family = getAddrFamily(s)
+ */
+    __Pyx_Raise(__pyx_builtin_ValueError, ((PyObject *)__pyx_kp_s_12), 0, 0);
+    {__pyx_filename = __pyx_f[2]; __pyx_lineno = 17; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    goto __pyx_L6;
+  }
+  __pyx_L6:;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":19
+ *         raise ValueError, 'ConnectEx is not available on this system'
+ * 
+ *     family = getAddrFamily(s)             # <<<<<<<<<<<<<<
+ *     if family == AF_INET:
+ *         name = <sockaddr *>&ipv4_name
+ */
+  __pyx_t_4 = __pyx_f_11iocpsupport_getAddrFamily(__pyx_v_s); if (unlikely(PyErr_Occurred())) {__pyx_filename = __pyx_f[2]; __pyx_lineno = 19; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_v_family = __pyx_t_4;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":24
+ *         namelen = sizeof(ipv4_name)
+ *         fillinetaddr(&ipv4_name, addr)
+ *     elif family == AF_INET6:             # <<<<<<<<<<<<<<
+ *         name = <sockaddr *>&ipv6_name
+ *         namelen = sizeof(ipv6_name)
+ */
+  switch (__pyx_v_family) {
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":20
+ * 
+ *     family = getAddrFamily(s)
+ *     if family == AF_INET:             # <<<<<<<<<<<<<<
+ *         name = <sockaddr *>&ipv4_name
+ *         namelen = sizeof(ipv4_name)
+ */
+    case AF_INET:
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":21
+ *     family = getAddrFamily(s)
+ *     if family == AF_INET:
+ *         name = <sockaddr *>&ipv4_name             # <<<<<<<<<<<<<<
+ *         namelen = sizeof(ipv4_name)
+ *         fillinetaddr(&ipv4_name, addr)
+ */
+    __pyx_v_name = ((struct sockaddr *)(&__pyx_v_ipv4_name));
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":22
+ *     if family == AF_INET:
+ *         name = <sockaddr *>&ipv4_name
+ *         namelen = sizeof(ipv4_name)             # <<<<<<<<<<<<<<
+ *         fillinetaddr(&ipv4_name, addr)
+ *     elif family == AF_INET6:
+ */
+    __pyx_v_namelen = (sizeof(__pyx_v_ipv4_name));
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":23
+ *         name = <sockaddr *>&ipv4_name
+ *         namelen = sizeof(ipv4_name)
+ *         fillinetaddr(&ipv4_name, addr)             # <<<<<<<<<<<<<<
+ *     elif family == AF_INET6:
+ *         name = <sockaddr *>&ipv6_name
+ */
+    __pyx_t_1 = __pyx_f_11iocpsupport_fillinetaddr((&__pyx_v_ipv4_name), __pyx_v_addr); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[2]; __pyx_lineno = 23; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_1);
+    __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+    break;
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":24
+ *         namelen = sizeof(ipv4_name)
+ *         fillinetaddr(&ipv4_name, addr)
+ *     elif family == AF_INET6:             # <<<<<<<<<<<<<<
+ *         name = <sockaddr *>&ipv6_name
+ *         namelen = sizeof(ipv6_name)
+ */
+    case AF_INET6:
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":25
+ *         fillinetaddr(&ipv4_name, addr)
+ *     elif family == AF_INET6:
+ *         name = <sockaddr *>&ipv6_name             # <<<<<<<<<<<<<<
+ *         namelen = sizeof(ipv6_name)
+ *         fillinet6addr(&ipv6_name, addr)
+ */
+    __pyx_v_name = ((struct sockaddr *)(&__pyx_v_ipv6_name));
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":26
+ *     elif family == AF_INET6:
+ *         name = <sockaddr *>&ipv6_name
+ *         namelen = sizeof(ipv6_name)             # <<<<<<<<<<<<<<
+ *         fillinet6addr(&ipv6_name, addr)
+ *     else:
+ */
+    __pyx_v_namelen = (sizeof(__pyx_v_ipv6_name));
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":27
+ *         name = <sockaddr *>&ipv6_name
+ *         namelen = sizeof(ipv6_name)
+ *         fillinet6addr(&ipv6_name, addr)             # <<<<<<<<<<<<<<
+ *     else:
+ *         raise ValueError, 'unsupported address family'
+ */
+    __pyx_t_1 = __pyx_f_11iocpsupport_fillinet6addr((&__pyx_v_ipv6_name), __pyx_v_addr); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[2]; __pyx_lineno = 27; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_1);
+    __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+    break;
+    default:
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":29
+ *         fillinet6addr(&ipv6_name, addr)
+ *     else:
+ *         raise ValueError, 'unsupported address family'             # <<<<<<<<<<<<<<
+ *     name.sa_family = family
+ * 
+ */
+    __Pyx_Raise(__pyx_builtin_ValueError, ((PyObject *)__pyx_kp_s_13), 0, 0);
+    {__pyx_filename = __pyx_f[2]; __pyx_lineno = 29; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    break;
+  }
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":30
+ *     else:
+ *         raise ValueError, 'unsupported address family'
+ *     name.sa_family = family             # <<<<<<<<<<<<<<
+ * 
+ *     ov = makeOV()
+ */
+  __pyx_v_name->sa_family = __pyx_v_family;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":32
+ *     name.sa_family = family
+ * 
+ *     ov = makeOV()             # <<<<<<<<<<<<<<
+ *     if obj is not None:
+ *         ov.obj = <PyObject *>obj
+ */
+  __pyx_t_5 = __pyx_f_11iocpsupport_makeOV(); if (unlikely(__pyx_t_5 == NULL)) {__pyx_filename = __pyx_f[2]; __pyx_lineno = 32; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_v_ov = __pyx_t_5;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":33
+ * 
+ *     ov = makeOV()
+ *     if obj is not None:             # <<<<<<<<<<<<<<
+ *         ov.obj = <PyObject *>obj
+ * 
+ */
+  __pyx_t_3 = (__pyx_v_obj != Py_None);
+  if (__pyx_t_3) {
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":34
+ *     ov = makeOV()
+ *     if obj is not None:
+ *         ov.obj = <PyObject *>obj             # <<<<<<<<<<<<<<
+ * 
+ *     rc = lpConnectEx(s, name, namelen, NULL, 0, NULL, <OVERLAPPED *>ov)
+ */
+    __pyx_v_ov->obj = ((struct PyObject *)__pyx_v_obj);
+    goto __pyx_L7;
+  }
+  __pyx_L7:;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":36
+ *         ov.obj = <PyObject *>obj
+ * 
+ *     rc = lpConnectEx(s, name, namelen, NULL, 0, NULL, <OVERLAPPED *>ov)             # <<<<<<<<<<<<<<
+ * 
+ *     if not rc:
+ */
+  __pyx_v_rc = lpConnectEx(__pyx_v_s, __pyx_v_name, __pyx_v_namelen, NULL, 0, NULL, ((OVERLAPPED *)__pyx_v_ov));
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":38
+ *     rc = lpConnectEx(s, name, namelen, NULL, 0, NULL, <OVERLAPPED *>ov)
+ * 
+ *     if not rc:             # <<<<<<<<<<<<<<
+ *         rc = WSAGetLastError()
+ *         if rc != ERROR_IO_PENDING:
+ */
+  __pyx_t_3 = (!__pyx_v_rc);
+  if (__pyx_t_3) {
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":39
+ * 
+ *     if not rc:
+ *         rc = WSAGetLastError()             # <<<<<<<<<<<<<<
+ *         if rc != ERROR_IO_PENDING:
+ *             PyMem_Free(ov)
+ */
+    __pyx_v_rc = WSAGetLastError();
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":40
+ *     if not rc:
+ *         rc = WSAGetLastError()
+ *         if rc != ERROR_IO_PENDING:             # <<<<<<<<<<<<<<
+ *             PyMem_Free(ov)
+ *             return rc
+ */
+    __pyx_t_3 = (__pyx_v_rc != ERROR_IO_PENDING);
+    if (__pyx_t_3) {
+
+      /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":41
+ *         rc = WSAGetLastError()
+ *         if rc != ERROR_IO_PENDING:
+ *             PyMem_Free(ov)             # <<<<<<<<<<<<<<
+ *             return rc
+ * 
+ */
+      PyMem_Free(__pyx_v_ov);
+
+      /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":42
+ *         if rc != ERROR_IO_PENDING:
+ *             PyMem_Free(ov)
+ *             return rc             # <<<<<<<<<<<<<<
+ * 
+ *     # operation is in progress
+ */
+      __Pyx_XDECREF(__pyx_r);
+      __pyx_t_1 = PyInt_FromLong(__pyx_v_rc); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[2]; __pyx_lineno = 42; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      __Pyx_GOTREF(__pyx_t_1);
+      __pyx_r = __pyx_t_1;
+      __pyx_t_1 = 0;
+      goto __pyx_L0;
+      goto __pyx_L9;
+    }
+    __pyx_L9:;
+    goto __pyx_L8;
+  }
+  __pyx_L8:;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":45
+ * 
+ *     # operation is in progress
+ *     Py_XINCREF(obj)             # <<<<<<<<<<<<<<
+ *     return 0
+ * 
+ */
+  Py_XINCREF(__pyx_v_obj);
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":46
+ *     # operation is in progress
+ *     Py_XINCREF(obj)
+ *     return 0             # <<<<<<<<<<<<<<
+ * 
+ */
+  __Pyx_XDECREF(__pyx_r);
+  __Pyx_INCREF(__pyx_int_0);
+  __pyx_r = __pyx_int_0;
+  goto __pyx_L0;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_1);
+  __Pyx_AddTraceback("iocpsupport.connect", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":5
+ * 
+ * 
+ * def recv(long s, object bufflist, object obj, unsigned long flags = 0):             # <<<<<<<<<<<<<<
+ *     cdef int rc, res
+ *     cdef myOVERLAPPED *ov
+ */
+
+static PyObject *__pyx_pf_11iocpsupport_6recv(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static PyMethodDef __pyx_mdef_11iocpsupport_6recv = {__Pyx_NAMESTR("recv"), (PyCFunction)__pyx_pf_11iocpsupport_6recv, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)};
+static PyObject *__pyx_pf_11iocpsupport_6recv(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+  long __pyx_v_s;
+  PyObject *__pyx_v_bufflist = 0;
+  PyObject *__pyx_v_obj = 0;
+  unsigned long __pyx_v_flags;
+  int __pyx_v_rc;
+  struct __pyx_t_11iocpsupport_myOVERLAPPED *__pyx_v_ov;
+  WSABUF *__pyx_v_ws_buf;
+  unsigned long __pyx_v_bytes;
+  struct PyObject **__pyx_v_buffers;
+  Py_ssize_t __pyx_v_i;
+  Py_ssize_t __pyx_v_size;
+  Py_ssize_t __pyx_v_buffcount;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  PyObject *__pyx_t_1 = NULL;
+  void *__pyx_t_2;
+  Py_ssize_t __pyx_t_3;
+  int __pyx_t_4;
+  struct __pyx_t_11iocpsupport_myOVERLAPPED *__pyx_t_5;
+  int __pyx_t_6;
+  PyObject *__pyx_t_7 = NULL;
+  PyObject *__pyx_t_8 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__s,&__pyx_n_s__bufflist,&__pyx_n_s__obj,&__pyx_n_s__flags,0};
+  __Pyx_RefNannySetupContext("recv");
+  __pyx_self = __pyx_self;
+  {
+    PyObject* values[4] = {0,0,0,0};
+    if (unlikely(__pyx_kwds)) {
+      Py_ssize_t kw_args;
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+        case  3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        case  0: break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+      kw_args = PyDict_Size(__pyx_kwds);
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  0:
+        values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__s);
+        if (likely(values[0])) kw_args--;
+        else goto __pyx_L5_argtuple_error;
+        case  1:
+        values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__bufflist);
+        if (likely(values[1])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("recv", 0, 3, 4, 1); {__pyx_filename = __pyx_f[3]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+        case  2:
+        values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__obj);
+        if (likely(values[2])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("recv", 0, 3, 4, 2); {__pyx_filename = __pyx_f[3]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+        case  3:
+        if (kw_args > 0) {
+          PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s__flags);
+          if (value) { values[3] = value; kw_args--; }
+        }
+      }
+      if (unlikely(kw_args > 0)) {
+        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, PyTuple_GET_SIZE(__pyx_args), "recv") < 0)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+      }
+    } else {
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+        case  3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+        values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+        values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+    }
+    __pyx_v_s = __Pyx_PyInt_AsLong(values[0]); if (unlikely((__pyx_v_s == (long)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    __pyx_v_bufflist = values[1];
+    __pyx_v_obj = values[2];
+    if (values[3]) {
+      __pyx_v_flags = __Pyx_PyInt_AsUnsignedLong(values[3]); if (unlikely((__pyx_v_flags == (unsigned long)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    } else {
+      __pyx_v_flags = ((unsigned long)0);
+    }
+  }
+  goto __pyx_L4_argument_unpacking_done;
+  __pyx_L5_argtuple_error:;
+  __Pyx_RaiseArgtupleInvalid("recv", 0, 3, 4, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[3]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  __pyx_L3_error:;
+  __Pyx_AddTraceback("iocpsupport.recv", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __Pyx_RefNannyFinishContext();
+  return NULL;
+  __pyx_L4_argument_unpacking_done:;
+  __Pyx_INCREF(__pyx_v_bufflist);
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":13
+ *     cdef Py_ssize_t i, size, buffcount
+ * 
+ *     bufflist = PySequence_Fast(bufflist, 'second argument needs to be a list')             # <<<<<<<<<<<<<<
+ *     buffcount = PySequence_Fast_GET_SIZE(bufflist)
+ *     buffers = PySequence_Fast_ITEMS(bufflist)
+ */
+  __pyx_t_1 = PySequence_Fast(__pyx_v_bufflist, __pyx_k_14); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 13; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  __Pyx_DECREF(__pyx_v_bufflist);
+  __pyx_v_bufflist = __pyx_t_1;
+  __pyx_t_1 = 0;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":14
+ * 
+ *     bufflist = PySequence_Fast(bufflist, 'second argument needs to be a list')
+ *     buffcount = PySequence_Fast_GET_SIZE(bufflist)             # <<<<<<<<<<<<<<
+ *     buffers = PySequence_Fast_ITEMS(bufflist)
+ * 
+ */
+  __pyx_v_buffcount = PySequence_Fast_GET_SIZE(__pyx_v_bufflist);
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":15
+ *     bufflist = PySequence_Fast(bufflist, 'second argument needs to be a list')
+ *     buffcount = PySequence_Fast_GET_SIZE(bufflist)
+ *     buffers = PySequence_Fast_ITEMS(bufflist)             # <<<<<<<<<<<<<<
+ * 
+ *     ws_buf = <WSABUF *>PyMem_Malloc(buffcount*sizeof(WSABUF))
+ */
+  __pyx_v_buffers = PySequence_Fast_ITEMS(__pyx_v_bufflist);
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":17
+ *     buffers = PySequence_Fast_ITEMS(bufflist)
+ * 
+ *     ws_buf = <WSABUF *>PyMem_Malloc(buffcount*sizeof(WSABUF))             # <<<<<<<<<<<<<<
+ * 
+ *     try:
+ */
+  __pyx_t_2 = PyMem_Malloc((__pyx_v_buffcount * (sizeof(WSABUF)))); if (unlikely(__pyx_t_2 == NULL)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 17; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_v_ws_buf = ((WSABUF *)__pyx_t_2);
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":19
+ *     ws_buf = <WSABUF *>PyMem_Malloc(buffcount*sizeof(WSABUF))
+ * 
+ *     try:             # <<<<<<<<<<<<<<
+ *         for i from 0 <= i < buffcount:
+ *             PyObject_AsWriteBuffer(<object>buffers[i], <void **>&ws_buf[i].buf, &size)
+ */
+  /*try:*/ {
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":20
+ * 
+ *     try:
+ *         for i from 0 <= i < buffcount:             # <<<<<<<<<<<<<<
+ *             PyObject_AsWriteBuffer(<object>buffers[i], <void **>&ws_buf[i].buf, &size)
+ *             ws_buf[i].len = <DWORD>size
+ */
+    __pyx_t_3 = __pyx_v_buffcount;
+    for (__pyx_v_i = 0; __pyx_v_i < __pyx_t_3; __pyx_v_i++) {
+
+      /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":21
+ *     try:
+ *         for i from 0 <= i < buffcount:
+ *             PyObject_AsWriteBuffer(<object>buffers[i], <void **>&ws_buf[i].buf, &size)             # <<<<<<<<<<<<<<
+ *             ws_buf[i].len = <DWORD>size
+ * 
+ */
+      __pyx_t_1 = ((PyObject *)(__pyx_v_buffers[__pyx_v_i]));
+      __Pyx_INCREF(__pyx_t_1);
+      __pyx_t_4 = PyObject_AsWriteBuffer(__pyx_t_1, ((void **)(&(__pyx_v_ws_buf[__pyx_v_i]).buf)), (&__pyx_v_size)); if (unlikely(__pyx_t_4 == -1)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 21; __pyx_clineno = __LINE__; goto __pyx_L7;}
+      __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+      /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":22
+ *         for i from 0 <= i < buffcount:
+ *             PyObject_AsWriteBuffer(<object>buffers[i], <void **>&ws_buf[i].buf, &size)
+ *             ws_buf[i].len = <DWORD>size             # <<<<<<<<<<<<<<
+ * 
+ *         ov = makeOV()
+ */
+      (__pyx_v_ws_buf[__pyx_v_i]).len = ((__pyx_t_11iocpsupport_DWORD)__pyx_v_size);
+    }
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":24
+ *             ws_buf[i].len = <DWORD>size
+ * 
+ *         ov = makeOV()             # <<<<<<<<<<<<<<
+ *         if obj is not None:
+ *             ov.obj = <PyObject *>obj
+ */
+    __pyx_t_5 = __pyx_f_11iocpsupport_makeOV(); if (unlikely(__pyx_t_5 == NULL)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 24; __pyx_clineno = __LINE__; goto __pyx_L7;}
+    __pyx_v_ov = __pyx_t_5;
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":25
+ * 
+ *         ov = makeOV()
+ *         if obj is not None:             # <<<<<<<<<<<<<<
+ *             ov.obj = <PyObject *>obj
+ * 
+ */
+    __pyx_t_6 = (__pyx_v_obj != Py_None);
+    if (__pyx_t_6) {
+
+      /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":26
+ *         ov = makeOV()
+ *         if obj is not None:
+ *             ov.obj = <PyObject *>obj             # <<<<<<<<<<<<<<
+ * 
+ *         rc = WSARecv(s, ws_buf, <DWORD>buffcount, &bytes, &flags, <OVERLAPPED *>ov, NULL)
+ */
+      __pyx_v_ov->obj = ((struct PyObject *)__pyx_v_obj);
+      goto __pyx_L11;
+    }
+    __pyx_L11:;
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":28
+ *             ov.obj = <PyObject *>obj
+ * 
+ *         rc = WSARecv(s, ws_buf, <DWORD>buffcount, &bytes, &flags, <OVERLAPPED *>ov, NULL)             # <<<<<<<<<<<<<<
+ * 
+ *         if rc == SOCKET_ERROR:
+ */
+    __pyx_v_rc = WSARecv(__pyx_v_s, __pyx_v_ws_buf, ((__pyx_t_11iocpsupport_DWORD)__pyx_v_buffcount), (&__pyx_v_bytes), (&__pyx_v_flags), ((OVERLAPPED *)__pyx_v_ov), NULL);
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":30
+ *         rc = WSARecv(s, ws_buf, <DWORD>buffcount, &bytes, &flags, <OVERLAPPED *>ov, NULL)
+ * 
+ *         if rc == SOCKET_ERROR:             # <<<<<<<<<<<<<<
+ *             rc = WSAGetLastError()
+ *             if rc != ERROR_IO_PENDING:
+ */
+    __pyx_t_6 = (__pyx_v_rc == SOCKET_ERROR);
+    if (__pyx_t_6) {
+
+      /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":31
+ * 
+ *         if rc == SOCKET_ERROR:
+ *             rc = WSAGetLastError()             # <<<<<<<<<<<<<<
+ *             if rc != ERROR_IO_PENDING:
+ *                 PyMem_Free(ov)
+ */
+      __pyx_v_rc = WSAGetLastError();
+
+      /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":32
+ *         if rc == SOCKET_ERROR:
+ *             rc = WSAGetLastError()
+ *             if rc != ERROR_IO_PENDING:             # <<<<<<<<<<<<<<
+ *                 PyMem_Free(ov)
+ *                 return rc, 0
+ */
+      __pyx_t_6 = (__pyx_v_rc != ERROR_IO_PENDING);
+      if (__pyx_t_6) {
+
+        /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":33
+ *             rc = WSAGetLastError()
+ *             if rc != ERROR_IO_PENDING:
+ *                 PyMem_Free(ov)             # <<<<<<<<<<<<<<
+ *                 return rc, 0
+ * 
+ */
+        PyMem_Free(__pyx_v_ov);
+
+        /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":34
+ *             if rc != ERROR_IO_PENDING:
+ *                 PyMem_Free(ov)
+ *                 return rc, 0             # <<<<<<<<<<<<<<
+ * 
+ *         Py_XINCREF(obj)
+ */
+        __Pyx_XDECREF(__pyx_r);
+        __pyx_t_1 = PyInt_FromLong(__pyx_v_rc); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 34; __pyx_clineno = __LINE__; goto __pyx_L7;}
+        __Pyx_GOTREF(__pyx_t_1);
+        __pyx_t_7 = PyTuple_New(2); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 34; __pyx_clineno = __LINE__; goto __pyx_L7;}
+        __Pyx_GOTREF(((PyObject *)__pyx_t_7));
+        PyTuple_SET_ITEM(__pyx_t_7, 0, __pyx_t_1);
+        __Pyx_GIVEREF(__pyx_t_1);
+        __Pyx_INCREF(__pyx_int_0);
+        PyTuple_SET_ITEM(__pyx_t_7, 1, __pyx_int_0);
+        __Pyx_GIVEREF(__pyx_int_0);
+        __pyx_t_1 = 0;
+        __pyx_r = ((PyObject *)__pyx_t_7);
+        __pyx_t_7 = 0;
+        goto __pyx_L6;
+        goto __pyx_L13;
+      }
+      __pyx_L13:;
+      goto __pyx_L12;
+    }
+    __pyx_L12:;
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":36
+ *                 return rc, 0
+ * 
+ *         Py_XINCREF(obj)             # <<<<<<<<<<<<<<
+ *         return rc, bytes
+ *     finally:
+ */
+    Py_XINCREF(__pyx_v_obj);
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":37
+ * 
+ *         Py_XINCREF(obj)
+ *         return rc, bytes             # <<<<<<<<<<<<<<
+ *     finally:
+ *         PyMem_Free(ws_buf)
+ */
+    __Pyx_XDECREF(__pyx_r);
+    __pyx_t_7 = PyInt_FromLong(__pyx_v_rc); if (unlikely(!__pyx_t_7)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 37; __pyx_clineno = __LINE__; goto __pyx_L7;}
+    __Pyx_GOTREF(__pyx_t_7);
+    __pyx_t_1 = PyLong_FromUnsignedLong(__pyx_v_bytes); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 37; __pyx_clineno = __LINE__; goto __pyx_L7;}
+    __Pyx_GOTREF(__pyx_t_1);
+    __pyx_t_8 = PyTuple_New(2); if (unlikely(!__pyx_t_8)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 37; __pyx_clineno = __LINE__; goto __pyx_L7;}
+    __Pyx_GOTREF(((PyObject *)__pyx_t_8));
+    PyTuple_SET_ITEM(__pyx_t_8, 0, __pyx_t_7);
+    __Pyx_GIVEREF(__pyx_t_7);
+    PyTuple_SET_ITEM(__pyx_t_8, 1, __pyx_t_1);
+    __Pyx_GIVEREF(__pyx_t_1);
+    __pyx_t_7 = 0;
+    __pyx_t_1 = 0;
+    __pyx_r = ((PyObject *)__pyx_t_8);
+    __pyx_t_8 = 0;
+    goto __pyx_L6;
+  }
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":39
+ *         return rc, bytes
+ *     finally:
+ *         PyMem_Free(ws_buf)             # <<<<<<<<<<<<<<
+ * 
+ * def recvfrom(long s, object buff, object addr_buff, object addr_len_buff, object obj, unsigned long flags = 0):
+ */
+  /*finally:*/ {
+    int __pyx_why;
+    PyObject *__pyx_exc_type, *__pyx_exc_value, *__pyx_exc_tb;
+    int __pyx_exc_lineno;
+    __pyx_exc_type = 0; __pyx_exc_value = 0; __pyx_exc_tb = 0; __pyx_exc_lineno = 0;
+    __pyx_why = 0; goto __pyx_L8;
+    __pyx_L6: __pyx_exc_type = 0; __pyx_exc_value = 0; __pyx_exc_tb = 0; __pyx_exc_lineno = 0;
+    __pyx_why = 3; goto __pyx_L8;
+    __pyx_L7: {
+      __pyx_why = 4;
+      __Pyx_XDECREF(__pyx_t_7); __pyx_t_7 = 0;
+      __Pyx_XDECREF(__pyx_t_1); __pyx_t_1 = 0;
+      __Pyx_XDECREF(__pyx_t_8); __pyx_t_8 = 0;
+      __Pyx_ErrFetch(&__pyx_exc_type, &__pyx_exc_value, &__pyx_exc_tb);
+      __pyx_exc_lineno = __pyx_lineno;
+      goto __pyx_L8;
+    }
+    __pyx_L8:;
+    PyMem_Free(__pyx_v_ws_buf);
+    switch (__pyx_why) {
+      case 3: goto __pyx_L0;
+      case 4: {
+        __Pyx_ErrRestore(__pyx_exc_type, __pyx_exc_value, __pyx_exc_tb);
+        __pyx_lineno = __pyx_exc_lineno;
+        __pyx_exc_type = 0;
+        __pyx_exc_value = 0;
+        __pyx_exc_tb = 0;
+        goto __pyx_L1_error;
+      }
+    }
+  }
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_1);
+  __Pyx_XDECREF(__pyx_t_7);
+  __Pyx_XDECREF(__pyx_t_8);
+  __Pyx_AddTraceback("iocpsupport.recv", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XDECREF(__pyx_v_bufflist);
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":41
+ *         PyMem_Free(ws_buf)
+ * 
+ * def recvfrom(long s, object buff, object addr_buff, object addr_len_buff, object obj, unsigned long flags = 0):             # <<<<<<<<<<<<<<
+ *     cdef int rc, c_addr_buff_len, c_addr_len_buff_len
+ *     cdef myOVERLAPPED *ov
+ */
+
+static PyObject *__pyx_pf_11iocpsupport_7recvfrom(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static PyMethodDef __pyx_mdef_11iocpsupport_7recvfrom = {__Pyx_NAMESTR("recvfrom"), (PyCFunction)__pyx_pf_11iocpsupport_7recvfrom, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)};
+static PyObject *__pyx_pf_11iocpsupport_7recvfrom(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+  long __pyx_v_s;
+  PyObject *__pyx_v_buff = 0;
+  PyObject *__pyx_v_addr_buff = 0;
+  PyObject *__pyx_v_addr_len_buff = 0;
+  PyObject *__pyx_v_obj = 0;
+  unsigned long __pyx_v_flags;
+  int __pyx_v_rc;
+  int __pyx_v_c_addr_buff_len;
+  int __pyx_v_c_addr_len_buff_len;
+  struct __pyx_t_11iocpsupport_myOVERLAPPED *__pyx_v_ov;
+  WSABUF __pyx_v_ws_buf;
+  unsigned long __pyx_v_bytes;
+  struct sockaddr *__pyx_v_c_addr_buff;
+  int *__pyx_v_c_addr_len_buff;
+  Py_ssize_t __pyx_v_size;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  int __pyx_t_1;
+  int __pyx_t_2;
+  struct __pyx_t_11iocpsupport_myOVERLAPPED *__pyx_t_3;
+  PyObject *__pyx_t_4 = NULL;
+  PyObject *__pyx_t_5 = NULL;
+  PyObject *__pyx_t_6 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__s,&__pyx_n_s__buff,&__pyx_n_s__addr_buff,&__pyx_n_s__addr_len_buff,&__pyx_n_s__obj,&__pyx_n_s__flags,0};
+  __Pyx_RefNannySetupContext("recvfrom");
+  __pyx_self = __pyx_self;
+  {
+    PyObject* values[6] = {0,0,0,0,0,0};
+    if (unlikely(__pyx_kwds)) {
+      Py_ssize_t kw_args;
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5);
+        case  5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4);
+        case  4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+        case  3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        case  0: break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+      kw_args = PyDict_Size(__pyx_kwds);
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  0:
+        values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__s);
+        if (likely(values[0])) kw_args--;
+        else goto __pyx_L5_argtuple_error;
+        case  1:
+        values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__buff);
+        if (likely(values[1])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("recvfrom", 0, 5, 6, 1); {__pyx_filename = __pyx_f[3]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+        case  2:
+        values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__addr_buff);
+        if (likely(values[2])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("recvfrom", 0, 5, 6, 2); {__pyx_filename = __pyx_f[3]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+        case  3:
+        values[3] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__addr_len_buff);
+        if (likely(values[3])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("recvfrom", 0, 5, 6, 3); {__pyx_filename = __pyx_f[3]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+        case  4:
+        values[4] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__obj);
+        if (likely(values[4])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("recvfrom", 0, 5, 6, 4); {__pyx_filename = __pyx_f[3]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+        case  5:
+        if (kw_args > 0) {
+          PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s__flags);
+          if (value) { values[5] = value; kw_args--; }
+        }
+      }
+      if (unlikely(kw_args > 0)) {
+        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, PyTuple_GET_SIZE(__pyx_args), "recvfrom") < 0)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+      }
+    } else {
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  6: values[5] = PyTuple_GET_ITEM(__pyx_args, 5);
+        case  5: values[4] = PyTuple_GET_ITEM(__pyx_args, 4);
+        values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+        values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+        values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+        values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+    }
+    __pyx_v_s = __Pyx_PyInt_AsLong(values[0]); if (unlikely((__pyx_v_s == (long)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    __pyx_v_buff = values[1];
+    __pyx_v_addr_buff = values[2];
+    __pyx_v_addr_len_buff = values[3];
+    __pyx_v_obj = values[4];
+    if (values[5]) {
+      __pyx_v_flags = __Pyx_PyInt_AsUnsignedLong(values[5]); if (unlikely((__pyx_v_flags == (unsigned long)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    } else {
+      __pyx_v_flags = ((unsigned long)0);
+    }
+  }
+  goto __pyx_L4_argument_unpacking_done;
+  __pyx_L5_argtuple_error:;
+  __Pyx_RaiseArgtupleInvalid("recvfrom", 0, 5, 6, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[3]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  __pyx_L3_error:;
+  __Pyx_AddTraceback("iocpsupport.recvfrom", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __Pyx_RefNannyFinishContext();
+  return NULL;
+  __pyx_L4_argument_unpacking_done:;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":50
+ *     cdef Py_ssize_t size
+ * 
+ *     PyObject_AsWriteBuffer(buff, <void **>&ws_buf.buf, &size)             # <<<<<<<<<<<<<<
+ *     ws_buf.len = <DWORD>size
+ *     PyObject_AsWriteBuffer(addr_buff, <void **>&c_addr_buff, &size)
+ */
+  __pyx_t_1 = PyObject_AsWriteBuffer(__pyx_v_buff, ((void **)(&__pyx_v_ws_buf.buf)), (&__pyx_v_size)); if (unlikely(__pyx_t_1 == -1)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 50; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":51
+ * 
+ *     PyObject_AsWriteBuffer(buff, <void **>&ws_buf.buf, &size)
+ *     ws_buf.len = <DWORD>size             # <<<<<<<<<<<<<<
+ *     PyObject_AsWriteBuffer(addr_buff, <void **>&c_addr_buff, &size)
+ *     c_addr_buff_len = <int>size
+ */
+  __pyx_v_ws_buf.len = ((__pyx_t_11iocpsupport_DWORD)__pyx_v_size);
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":52
+ *     PyObject_AsWriteBuffer(buff, <void **>&ws_buf.buf, &size)
+ *     ws_buf.len = <DWORD>size
+ *     PyObject_AsWriteBuffer(addr_buff, <void **>&c_addr_buff, &size)             # <<<<<<<<<<<<<<
+ *     c_addr_buff_len = <int>size
+ *     PyObject_AsWriteBuffer(addr_len_buff, <void **>&c_addr_len_buff, &size)
+ */
+  __pyx_t_1 = PyObject_AsWriteBuffer(__pyx_v_addr_buff, ((void **)(&__pyx_v_c_addr_buff)), (&__pyx_v_size)); if (unlikely(__pyx_t_1 == -1)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 52; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":53
+ *     ws_buf.len = <DWORD>size
+ *     PyObject_AsWriteBuffer(addr_buff, <void **>&c_addr_buff, &size)
+ *     c_addr_buff_len = <int>size             # <<<<<<<<<<<<<<
+ *     PyObject_AsWriteBuffer(addr_len_buff, <void **>&c_addr_len_buff, &size)
+ *     c_addr_len_buff_len = <int>size
+ */
+  __pyx_v_c_addr_buff_len = ((int)__pyx_v_size);
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":54
+ *     PyObject_AsWriteBuffer(addr_buff, <void **>&c_addr_buff, &size)
+ *     c_addr_buff_len = <int>size
+ *     PyObject_AsWriteBuffer(addr_len_buff, <void **>&c_addr_len_buff, &size)             # <<<<<<<<<<<<<<
+ *     c_addr_len_buff_len = <int>size
+ * 
+ */
+  __pyx_t_1 = PyObject_AsWriteBuffer(__pyx_v_addr_len_buff, ((void **)(&__pyx_v_c_addr_len_buff)), (&__pyx_v_size)); if (unlikely(__pyx_t_1 == -1)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 54; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":55
+ *     c_addr_buff_len = <int>size
+ *     PyObject_AsWriteBuffer(addr_len_buff, <void **>&c_addr_len_buff, &size)
+ *     c_addr_len_buff_len = <int>size             # <<<<<<<<<<<<<<
+ * 
+ *     if c_addr_len_buff_len != sizeof(int):
+ */
+  __pyx_v_c_addr_len_buff_len = ((int)__pyx_v_size);
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":57
+ *     c_addr_len_buff_len = <int>size
+ * 
+ *     if c_addr_len_buff_len != sizeof(int):             # <<<<<<<<<<<<<<
+ *         raise ValueError, 'length of address length buffer needs to be sizeof(int)'
+ * 
+ */
+  __pyx_t_2 = (__pyx_v_c_addr_len_buff_len != (sizeof(int)));
+  if (__pyx_t_2) {
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":58
+ * 
+ *     if c_addr_len_buff_len != sizeof(int):
+ *         raise ValueError, 'length of address length buffer needs to be sizeof(int)'             # <<<<<<<<<<<<<<
+ * 
+ *     c_addr_len_buff[0] = c_addr_buff_len
+ */
+    __Pyx_Raise(__pyx_builtin_ValueError, ((PyObject *)__pyx_kp_s_15), 0, 0);
+    {__pyx_filename = __pyx_f[3]; __pyx_lineno = 58; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    goto __pyx_L6;
+  }
+  __pyx_L6:;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":60
+ *         raise ValueError, 'length of address length buffer needs to be sizeof(int)'
+ * 
+ *     c_addr_len_buff[0] = c_addr_buff_len             # <<<<<<<<<<<<<<
+ * 
+ *     ov = makeOV()
+ */
+  (__pyx_v_c_addr_len_buff[0]) = __pyx_v_c_addr_buff_len;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":62
+ *     c_addr_len_buff[0] = c_addr_buff_len
+ * 
+ *     ov = makeOV()             # <<<<<<<<<<<<<<
+ *     if obj is not None:
+ *         ov.obj = <PyObject *>obj
+ */
+  __pyx_t_3 = __pyx_f_11iocpsupport_makeOV(); if (unlikely(__pyx_t_3 == NULL)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 62; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_v_ov = __pyx_t_3;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":63
+ * 
+ *     ov = makeOV()
+ *     if obj is not None:             # <<<<<<<<<<<<<<
+ *         ov.obj = <PyObject *>obj
+ * 
+ */
+  __pyx_t_2 = (__pyx_v_obj != Py_None);
+  if (__pyx_t_2) {
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":64
+ *     ov = makeOV()
+ *     if obj is not None:
+ *         ov.obj = <PyObject *>obj             # <<<<<<<<<<<<<<
+ * 
+ *     rc = WSARecvFrom(s, &ws_buf, 1, &bytes, &flags, c_addr_buff, c_addr_len_buff, <OVERLAPPED *>ov, NULL)
+ */
+    __pyx_v_ov->obj = ((struct PyObject *)__pyx_v_obj);
+    goto __pyx_L7;
+  }
+  __pyx_L7:;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":66
+ *         ov.obj = <PyObject *>obj
+ * 
+ *     rc = WSARecvFrom(s, &ws_buf, 1, &bytes, &flags, c_addr_buff, c_addr_len_buff, <OVERLAPPED *>ov, NULL)             # <<<<<<<<<<<<<<
+ * 
+ *     if rc == SOCKET_ERROR:
+ */
+  __pyx_v_rc = WSARecvFrom(__pyx_v_s, (&__pyx_v_ws_buf), 1, (&__pyx_v_bytes), (&__pyx_v_flags), __pyx_v_c_addr_buff, __pyx_v_c_addr_len_buff, ((OVERLAPPED *)__pyx_v_ov), NULL);
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":68
+ *     rc = WSARecvFrom(s, &ws_buf, 1, &bytes, &flags, c_addr_buff, c_addr_len_buff, <OVERLAPPED *>ov, NULL)
+ * 
+ *     if rc == SOCKET_ERROR:             # <<<<<<<<<<<<<<
+ *         rc = WSAGetLastError()
+ *         if rc != ERROR_IO_PENDING:
+ */
+  __pyx_t_2 = (__pyx_v_rc == SOCKET_ERROR);
+  if (__pyx_t_2) {
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":69
+ * 
+ *     if rc == SOCKET_ERROR:
+ *         rc = WSAGetLastError()             # <<<<<<<<<<<<<<
+ *         if rc != ERROR_IO_PENDING:
+ *             PyMem_Free(ov)
+ */
+    __pyx_v_rc = WSAGetLastError();
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":70
+ *     if rc == SOCKET_ERROR:
+ *         rc = WSAGetLastError()
+ *         if rc != ERROR_IO_PENDING:             # <<<<<<<<<<<<<<
+ *             PyMem_Free(ov)
+ *             return rc, 0
+ */
+    __pyx_t_2 = (__pyx_v_rc != ERROR_IO_PENDING);
+    if (__pyx_t_2) {
+
+      /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":71
+ *         rc = WSAGetLastError()
+ *         if rc != ERROR_IO_PENDING:
+ *             PyMem_Free(ov)             # <<<<<<<<<<<<<<
+ *             return rc, 0
+ * 
+ */
+      PyMem_Free(__pyx_v_ov);
+
+      /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":72
+ *         if rc != ERROR_IO_PENDING:
+ *             PyMem_Free(ov)
+ *             return rc, 0             # <<<<<<<<<<<<<<
+ * 
+ *     Py_XINCREF(obj)
+ */
+      __Pyx_XDECREF(__pyx_r);
+      __pyx_t_4 = PyInt_FromLong(__pyx_v_rc); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      __Pyx_GOTREF(__pyx_t_4);
+      __pyx_t_5 = PyTuple_New(2); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 72; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      __Pyx_GOTREF(((PyObject *)__pyx_t_5));
+      PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_4);
+      __Pyx_GIVEREF(__pyx_t_4);
+      __Pyx_INCREF(__pyx_int_0);
+      PyTuple_SET_ITEM(__pyx_t_5, 1, __pyx_int_0);
+      __Pyx_GIVEREF(__pyx_int_0);
+      __pyx_t_4 = 0;
+      __pyx_r = ((PyObject *)__pyx_t_5);
+      __pyx_t_5 = 0;
+      goto __pyx_L0;
+      goto __pyx_L9;
+    }
+    __pyx_L9:;
+    goto __pyx_L8;
+  }
+  __pyx_L8:;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":74
+ *             return rc, 0
+ * 
+ *     Py_XINCREF(obj)             # <<<<<<<<<<<<<<
+ *     return rc, bytes
+ * 
+ */
+  Py_XINCREF(__pyx_v_obj);
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":75
+ * 
+ *     Py_XINCREF(obj)
+ *     return rc, bytes             # <<<<<<<<<<<<<<
+ * 
+ */
+  __Pyx_XDECREF(__pyx_r);
+  __pyx_t_5 = PyInt_FromLong(__pyx_v_rc); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 75; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_5);
+  __pyx_t_4 = PyLong_FromUnsignedLong(__pyx_v_bytes); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 75; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_4);
+  __pyx_t_6 = PyTuple_New(2); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 75; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(((PyObject *)__pyx_t_6));
+  PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_5);
+  __Pyx_GIVEREF(__pyx_t_5);
+  PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_4);
+  __Pyx_GIVEREF(__pyx_t_4);
+  __pyx_t_5 = 0;
+  __pyx_t_4 = 0;
+  __pyx_r = ((PyObject *)__pyx_t_6);
+  __pyx_t_6 = 0;
+  goto __pyx_L0;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_4);
+  __Pyx_XDECREF(__pyx_t_5);
+  __Pyx_XDECREF(__pyx_t_6);
+  __Pyx_AddTraceback("iocpsupport.recvfrom", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsasend.pxi":5
+ * 
+ * 
+ * def send(long s, object buff, object obj, unsigned long flags = 0):             # <<<<<<<<<<<<<<
+ *     cdef int rc
+ *     cdef myOVERLAPPED *ov
+ */
+
+static PyObject *__pyx_pf_11iocpsupport_8send(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static PyMethodDef __pyx_mdef_11iocpsupport_8send = {__Pyx_NAMESTR("send"), (PyCFunction)__pyx_pf_11iocpsupport_8send, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)};
+static PyObject *__pyx_pf_11iocpsupport_8send(PyObject *__pyx_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+  long __pyx_v_s;
+  PyObject *__pyx_v_buff = 0;
+  PyObject *__pyx_v_obj = 0;
+  unsigned long __pyx_v_flags;
+  int __pyx_v_rc;
+  struct __pyx_t_11iocpsupport_myOVERLAPPED *__pyx_v_ov;
+  WSABUF __pyx_v_ws_buf;
+  unsigned long __pyx_v_bytes;
+  Py_ssize_t __pyx_v_size;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  int __pyx_t_1;
+  struct __pyx_t_11iocpsupport_myOVERLAPPED *__pyx_t_2;
+  int __pyx_t_3;
+  PyObject *__pyx_t_4 = NULL;
+  PyObject *__pyx_t_5 = NULL;
+  PyObject *__pyx_t_6 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__s,&__pyx_n_s__buff,&__pyx_n_s__obj,&__pyx_n_s__flags,0};
+  __Pyx_RefNannySetupContext("send");
+  __pyx_self = __pyx_self;
+  {
+    PyObject* values[4] = {0,0,0,0};
+    if (unlikely(__pyx_kwds)) {
+      Py_ssize_t kw_args;
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+        case  3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        case  0: break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+      kw_args = PyDict_Size(__pyx_kwds);
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  0:
+        values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__s);
+        if (likely(values[0])) kw_args--;
+        else goto __pyx_L5_argtuple_error;
+        case  1:
+        values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__buff);
+        if (likely(values[1])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("send", 0, 3, 4, 1); {__pyx_filename = __pyx_f[4]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+        case  2:
+        values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__obj);
+        if (likely(values[2])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("send", 0, 3, 4, 2); {__pyx_filename = __pyx_f[4]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+        case  3:
+        if (kw_args > 0) {
+          PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s__flags);
+          if (value) { values[3] = value; kw_args--; }
+        }
+      }
+      if (unlikely(kw_args > 0)) {
+        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, PyTuple_GET_SIZE(__pyx_args), "send") < 0)) {__pyx_filename = __pyx_f[4]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+      }
+    } else {
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  4: values[3] = PyTuple_GET_ITEM(__pyx_args, 3);
+        case  3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+        values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+        values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+    }
+    __pyx_v_s = __Pyx_PyInt_AsLong(values[0]); if (unlikely((__pyx_v_s == (long)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[4]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    __pyx_v_buff = values[1];
+    __pyx_v_obj = values[2];
+    if (values[3]) {
+      __pyx_v_flags = __Pyx_PyInt_AsUnsignedLong(values[3]); if (unlikely((__pyx_v_flags == (unsigned long)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[4]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    } else {
+      __pyx_v_flags = ((unsigned long)0);
+    }
+  }
+  goto __pyx_L4_argument_unpacking_done;
+  __pyx_L5_argtuple_error:;
+  __Pyx_RaiseArgtupleInvalid("send", 0, 3, 4, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[4]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  __pyx_L3_error:;
+  __Pyx_AddTraceback("iocpsupport.send", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __Pyx_RefNannyFinishContext();
+  return NULL;
+  __pyx_L4_argument_unpacking_done:;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsasend.pxi":12
+ *     cdef Py_ssize_t size
+ * 
+ *     PyObject_AsReadBuffer(buff, <void **>&ws_buf.buf, &size)             # <<<<<<<<<<<<<<
+ *     ws_buf.len = <DWORD>size
+ * 
+ */
+  __pyx_t_1 = PyObject_AsReadBuffer(__pyx_v_buff, ((void **)(&__pyx_v_ws_buf.buf)), (&__pyx_v_size)); if (unlikely(__pyx_t_1 == -1)) {__pyx_filename = __pyx_f[4]; __pyx_lineno = 12; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsasend.pxi":13
+ * 
+ *     PyObject_AsReadBuffer(buff, <void **>&ws_buf.buf, &size)
+ *     ws_buf.len = <DWORD>size             # <<<<<<<<<<<<<<
+ * 
+ *     ov = makeOV()
+ */
+  __pyx_v_ws_buf.len = ((__pyx_t_11iocpsupport_DWORD)__pyx_v_size);
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsasend.pxi":15
+ *     ws_buf.len = <DWORD>size
+ * 
+ *     ov = makeOV()             # <<<<<<<<<<<<<<
+ *     if obj is not None:
+ *         ov.obj = <PyObject *>obj
+ */
+  __pyx_t_2 = __pyx_f_11iocpsupport_makeOV(); if (unlikely(__pyx_t_2 == NULL)) {__pyx_filename = __pyx_f[4]; __pyx_lineno = 15; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_v_ov = __pyx_t_2;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsasend.pxi":16
+ * 
+ *     ov = makeOV()
+ *     if obj is not None:             # <<<<<<<<<<<<<<
+ *         ov.obj = <PyObject *>obj
+ * 
+ */
+  __pyx_t_3 = (__pyx_v_obj != Py_None);
+  if (__pyx_t_3) {
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsasend.pxi":17
+ *     ov = makeOV()
+ *     if obj is not None:
+ *         ov.obj = <PyObject *>obj             # <<<<<<<<<<<<<<
+ * 
+ *     rc = WSASend(s, &ws_buf, 1, &bytes, flags, <OVERLAPPED *>ov, NULL)
+ */
+    __pyx_v_ov->obj = ((struct PyObject *)__pyx_v_obj);
+    goto __pyx_L6;
+  }
+  __pyx_L6:;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsasend.pxi":19
+ *         ov.obj = <PyObject *>obj
+ * 
+ *     rc = WSASend(s, &ws_buf, 1, &bytes, flags, <OVERLAPPED *>ov, NULL)             # <<<<<<<<<<<<<<
+ * 
+ *     if rc == SOCKET_ERROR:
+ */
+  __pyx_v_rc = WSASend(__pyx_v_s, (&__pyx_v_ws_buf), 1, (&__pyx_v_bytes), __pyx_v_flags, ((OVERLAPPED *)__pyx_v_ov), NULL);
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsasend.pxi":21
+ *     rc = WSASend(s, &ws_buf, 1, &bytes, flags, <OVERLAPPED *>ov, NULL)
+ * 
+ *     if rc == SOCKET_ERROR:             # <<<<<<<<<<<<<<
+ *         rc = WSAGetLastError()
+ *         if rc != ERROR_IO_PENDING:
+ */
+  __pyx_t_3 = (__pyx_v_rc == SOCKET_ERROR);
+  if (__pyx_t_3) {
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsasend.pxi":22
+ * 
+ *     if rc == SOCKET_ERROR:
+ *         rc = WSAGetLastError()             # <<<<<<<<<<<<<<
+ *         if rc != ERROR_IO_PENDING:
+ *             PyMem_Free(ov)
+ */
+    __pyx_v_rc = WSAGetLastError();
+
+    /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsasend.pxi":23
+ *     if rc == SOCKET_ERROR:
+ *         rc = WSAGetLastError()
+ *         if rc != ERROR_IO_PENDING:             # <<<<<<<<<<<<<<
+ *             PyMem_Free(ov)
+ *             return rc, bytes
+ */
+    __pyx_t_3 = (__pyx_v_rc != ERROR_IO_PENDING);
+    if (__pyx_t_3) {
+
+      /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsasend.pxi":24
+ *         rc = WSAGetLastError()
+ *         if rc != ERROR_IO_PENDING:
+ *             PyMem_Free(ov)             # <<<<<<<<<<<<<<
+ *             return rc, bytes
+ * 
+ */
+      PyMem_Free(__pyx_v_ov);
+
+      /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsasend.pxi":25
+ *         if rc != ERROR_IO_PENDING:
+ *             PyMem_Free(ov)
+ *             return rc, bytes             # <<<<<<<<<<<<<<
+ * 
+ *     Py_XINCREF(obj)
+ */
+      __Pyx_XDECREF(__pyx_r);
+      __pyx_t_4 = PyInt_FromLong(__pyx_v_rc); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[4]; __pyx_lineno = 25; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      __Pyx_GOTREF(__pyx_t_4);
+      __pyx_t_5 = PyLong_FromUnsignedLong(__pyx_v_bytes); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[4]; __pyx_lineno = 25; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      __Pyx_GOTREF(__pyx_t_5);
+      __pyx_t_6 = PyTuple_New(2); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[4]; __pyx_lineno = 25; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      __Pyx_GOTREF(((PyObject *)__pyx_t_6));
+      PyTuple_SET_ITEM(__pyx_t_6, 0, __pyx_t_4);
+      __Pyx_GIVEREF(__pyx_t_4);
+      PyTuple_SET_ITEM(__pyx_t_6, 1, __pyx_t_5);
+      __Pyx_GIVEREF(__pyx_t_5);
+      __pyx_t_4 = 0;
+      __pyx_t_5 = 0;
+      __pyx_r = ((PyObject *)__pyx_t_6);
+      __pyx_t_6 = 0;
+      goto __pyx_L0;
+      goto __pyx_L8;
+    }
+    __pyx_L8:;
+    goto __pyx_L7;
+  }
+  __pyx_L7:;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsasend.pxi":27
+ *             return rc, bytes
+ * 
+ *     Py_XINCREF(obj)             # <<<<<<<<<<<<<<
+ *     return rc, bytes
+ * 
+ */
+  Py_XINCREF(__pyx_v_obj);
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsasend.pxi":28
+ * 
+ *     Py_XINCREF(obj)
+ *     return rc, bytes             # <<<<<<<<<<<<<<
+ * 
+ * 
+ */
+  __Pyx_XDECREF(__pyx_r);
+  __pyx_t_6 = PyInt_FromLong(__pyx_v_rc); if (unlikely(!__pyx_t_6)) {__pyx_filename = __pyx_f[4]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_6);
+  __pyx_t_5 = PyLong_FromUnsignedLong(__pyx_v_bytes); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[4]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_5);
+  __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[4]; __pyx_lineno = 28; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(((PyObject *)__pyx_t_4));
+  PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_6);
+  __Pyx_GIVEREF(__pyx_t_6);
+  PyTuple_SET_ITEM(__pyx_t_4, 1, __pyx_t_5);
+  __Pyx_GIVEREF(__pyx_t_5);
+  __pyx_t_6 = 0;
+  __pyx_t_5 = 0;
+  __pyx_r = ((PyObject *)__pyx_t_4);
+  __pyx_t_4 = 0;
+  goto __pyx_L0;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_4);
+  __Pyx_XDECREF(__pyx_t_5);
+  __Pyx_XDECREF(__pyx_t_6);
+  __Pyx_AddTraceback("iocpsupport.send", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+static PyObject *__pyx_tp_new_11iocpsupport_CompletionPort(PyTypeObject *t, PyObject *a, PyObject *k) {
+  PyObject *o = (*t->tp_alloc)(t, 0);
+  if (!o) return 0;
+  return o;
+}
+
+static void __pyx_tp_dealloc_11iocpsupport_CompletionPort(PyObject *o) {
+  (*Py_TYPE(o)->tp_free)(o);
+}
+
+static PyMethodDef __pyx_methods_11iocpsupport_CompletionPort[] = {
+  {__Pyx_NAMESTR("addHandle"), (PyCFunction)__pyx_pf_11iocpsupport_14CompletionPort_1addHandle, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)},
+  {__Pyx_NAMESTR("getEvent"), (PyCFunction)__pyx_pf_11iocpsupport_14CompletionPort_2getEvent, METH_O, __Pyx_DOCSTR(0)},
+  {__Pyx_NAMESTR("postEvent"), (PyCFunction)__pyx_pf_11iocpsupport_14CompletionPort_3postEvent, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(0)},
+  {__Pyx_NAMESTR("__del__"), (PyCFunction)__pyx_pf_11iocpsupport_14CompletionPort_4__del__, METH_NOARGS, __Pyx_DOCSTR(0)},
+  {0, 0, 0, 0}
+};
+
+static PyNumberMethods __pyx_tp_as_number_CompletionPort = {
+  0, /*nb_add*/
+  0, /*nb_subtract*/
+  0, /*nb_multiply*/
+  #if PY_MAJOR_VERSION < 3
+  0, /*nb_divide*/
+  #endif
+  0, /*nb_remainder*/
+  0, /*nb_divmod*/
+  0, /*nb_power*/
+  0, /*nb_negative*/
+  0, /*nb_positive*/
+  0, /*nb_absolute*/
+  0, /*nb_nonzero*/
+  0, /*nb_invert*/
+  0, /*nb_lshift*/
+  0, /*nb_rshift*/
+  0, /*nb_and*/
+  0, /*nb_xor*/
+  0, /*nb_or*/
+  #if PY_MAJOR_VERSION < 3
+  0, /*nb_coerce*/
+  #endif
+  0, /*nb_int*/
+  #if PY_MAJOR_VERSION < 3
+  0, /*nb_long*/
+  #else
+  0, /*reserved*/
+  #endif
+  0, /*nb_float*/
+  #if PY_MAJOR_VERSION < 3
+  0, /*nb_oct*/
+  #endif
+  #if PY_MAJOR_VERSION < 3
+  0, /*nb_hex*/
+  #endif
+  0, /*nb_inplace_add*/
+  0, /*nb_inplace_subtract*/
+  0, /*nb_inplace_multiply*/
+  #if PY_MAJOR_VERSION < 3
+  0, /*nb_inplace_divide*/
+  #endif
+  0, /*nb_inplace_remainder*/
+  0, /*nb_inplace_power*/
+  0, /*nb_inplace_lshift*/
+  0, /*nb_inplace_rshift*/
+  0, /*nb_inplace_and*/
+  0, /*nb_inplace_xor*/
+  0, /*nb_inplace_or*/
+  0, /*nb_floor_divide*/
+  0, /*nb_true_divide*/
+  0, /*nb_inplace_floor_divide*/
+  0, /*nb_inplace_true_divide*/
+  #if PY_VERSION_HEX >= 0x02050000
+  0, /*nb_index*/
+  #endif
+};
+
+static PySequenceMethods __pyx_tp_as_sequence_CompletionPort = {
+  0, /*sq_length*/
+  0, /*sq_concat*/
+  0, /*sq_repeat*/
+  0, /*sq_item*/
+  0, /*sq_slice*/
+  0, /*sq_ass_item*/
+  0, /*sq_ass_slice*/
+  0, /*sq_contains*/
+  0, /*sq_inplace_concat*/
+  0, /*sq_inplace_repeat*/
+};
+
+static PyMappingMethods __pyx_tp_as_mapping_CompletionPort = {
+  0, /*mp_length*/
+  0, /*mp_subscript*/
+  0, /*mp_ass_subscript*/
+};
+
+static PyBufferProcs __pyx_tp_as_buffer_CompletionPort = {
+  #if PY_MAJOR_VERSION < 3
+  0, /*bf_getreadbuffer*/
+  #endif
+  #if PY_MAJOR_VERSION < 3
+  0, /*bf_getwritebuffer*/
+  #endif
+  #if PY_MAJOR_VERSION < 3
+  0, /*bf_getsegcount*/
+  #endif
+  #if PY_MAJOR_VERSION < 3
+  0, /*bf_getcharbuffer*/
+  #endif
+  #if PY_VERSION_HEX >= 0x02060000
+  0, /*bf_getbuffer*/
+  #endif
+  #if PY_VERSION_HEX >= 0x02060000
+  0, /*bf_releasebuffer*/
+  #endif
+};
+
+static PyTypeObject __pyx_type_11iocpsupport_CompletionPort = {
+  PyVarObject_HEAD_INIT(0, 0)
+  __Pyx_NAMESTR("iocpsupport.CompletionPort"), /*tp_name*/
+  sizeof(struct __pyx_obj_11iocpsupport_CompletionPort), /*tp_basicsize*/
+  0, /*tp_itemsize*/
+  __pyx_tp_dealloc_11iocpsupport_CompletionPort, /*tp_dealloc*/
+  0, /*tp_print*/
+  0, /*tp_getattr*/
+  0, /*tp_setattr*/
+  #if PY_MAJOR_VERSION < 3
+  0, /*tp_compare*/
+  #else
+  0, /*reserved*/
+  #endif
+  0, /*tp_repr*/
+  &__pyx_tp_as_number_CompletionPort, /*tp_as_number*/
+  &__pyx_tp_as_sequence_CompletionPort, /*tp_as_sequence*/
+  &__pyx_tp_as_mapping_CompletionPort, /*tp_as_mapping*/
+  0, /*tp_hash*/
+  0, /*tp_call*/
+  0, /*tp_str*/
+  0, /*tp_getattro*/
+  0, /*tp_setattro*/
+  &__pyx_tp_as_buffer_CompletionPort, /*tp_as_buffer*/
+  Py_TPFLAGS_DEFAULT|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE, /*tp_flags*/
+  0, /*tp_doc*/
+  0, /*tp_traverse*/
+  0, /*tp_clear*/
+  0, /*tp_richcompare*/
+  0, /*tp_weaklistoffset*/
+  0, /*tp_iter*/
+  0, /*tp_iternext*/
+  __pyx_methods_11iocpsupport_CompletionPort, /*tp_methods*/
+  0, /*tp_members*/
+  0, /*tp_getset*/
+  0, /*tp_base*/
+  0, /*tp_dict*/
+  0, /*tp_descr_get*/
+  0, /*tp_descr_set*/
+  0, /*tp_dictoffset*/
+  __pyx_pf_11iocpsupport_14CompletionPort___init__, /*tp_init*/
+  0, /*tp_alloc*/
+  __pyx_tp_new_11iocpsupport_CompletionPort, /*tp_new*/
+  0, /*tp_free*/
+  0, /*tp_is_gc*/
+  0, /*tp_bases*/
+  0, /*tp_mro*/
+  0, /*tp_cache*/
+  0, /*tp_subclasses*/
+  0, /*tp_weaklist*/
+  0, /*tp_del*/
+  #if PY_VERSION_HEX >= 0x02060000
+  0, /*tp_version_tag*/
+  #endif
+};
+
+static PyMethodDef __pyx_methods[] = {
+  {0, 0, 0, 0}
+};
+
+#if PY_MAJOR_VERSION >= 3
+static struct PyModuleDef __pyx_moduledef = {
+    PyModuleDef_HEAD_INIT,
+    __Pyx_NAMESTR("iocpsupport"),
+    0, /* m_doc */
+    -1, /* m_size */
+    __pyx_methods /* m_methods */,
+    NULL, /* m_reload */
+    NULL, /* m_traverse */
+    NULL, /* m_clear */
+    NULL /* m_free */
+};
+#endif
+
+static __Pyx_StringTabEntry __pyx_string_tab[] = {
+  {&__pyx_n_s_1, __pyx_k_1, sizeof(__pyx_k_1), 0, 0, 1, 1},
+  {&__pyx_kp_s_10, __pyx_k_10, sizeof(__pyx_k_10), 0, 0, 1, 0},
+  {&__pyx_kp_s_11, __pyx_k_11, sizeof(__pyx_k_11), 0, 0, 1, 0},
+  {&__pyx_kp_s_12, __pyx_k_12, sizeof(__pyx_k_12), 0, 0, 1, 0},
+  {&__pyx_kp_s_13, __pyx_k_13, sizeof(__pyx_k_13), 0, 0, 1, 0},
+  {&__pyx_kp_s_15, __pyx_k_15, sizeof(__pyx_k_15), 0, 0, 1, 0},
+  {&__pyx_kp_s_16, __pyx_k_16, sizeof(__pyx_k_16), 0, 0, 1, 0},
+  {&__pyx_n_s_2, __pyx_k_2, sizeof(__pyx_k_2), 0, 0, 1, 1},
+  {&__pyx_kp_s_3, __pyx_k_3, sizeof(__pyx_k_3), 0, 0, 1, 0},
+  {&__pyx_kp_s_5, __pyx_k_5, sizeof(__pyx_k_5), 0, 0, 1, 0},
+  {&__pyx_kp_s_6, __pyx_k_6, sizeof(__pyx_k_6), 0, 0, 1, 0},
+  {&__pyx_kp_s_7, __pyx_k_7, sizeof(__pyx_k_7), 0, 0, 1, 0},
+  {&__pyx_kp_s_8, __pyx_k_8, sizeof(__pyx_k_8), 0, 0, 1, 0},
+  {&__pyx_n_s__AllocateReadBuffer, __pyx_k__AllocateReadBuffer, sizeof(__pyx_k__AllocateReadBuffer), 0, 0, 1, 1},
+  {&__pyx_n_s__Event, __pyx_k__Event, sizeof(__pyx_k__Event), 0, 0, 1, 1},
+  {&__pyx_n_s__MemoryError, __pyx_k__MemoryError, sizeof(__pyx_k__MemoryError), 0, 0, 1, 1},
+  {&__pyx_n_s__RuntimeError, __pyx_k__RuntimeError, sizeof(__pyx_k__RuntimeError), 0, 0, 1, 1},
+  {&__pyx_n_s__ValueError, __pyx_k__ValueError, sizeof(__pyx_k__ValueError), 0, 0, 1, 1},
+  {&__pyx_n_s__WSAAddressToString, __pyx_k__WSAAddressToString, sizeof(__pyx_k__WSAAddressToString), 0, 0, 1, 1},
+  {&__pyx_n_s__WindowsError, __pyx_k__WindowsError, sizeof(__pyx_k__WindowsError), 0, 0, 1, 1},
+  {&__pyx_n_s____init__, __pyx_k____init__, sizeof(__pyx_k____init__), 0, 0, 1, 1},
+  {&__pyx_n_s____main__, __pyx_k____main__, sizeof(__pyx_k____main__), 0, 0, 1, 1},
+  {&__pyx_n_s____test__, __pyx_k____test__, sizeof(__pyx_k____test__), 0, 0, 1, 1},
+  {&__pyx_n_s__accept, __pyx_k__accept, sizeof(__pyx_k__accept), 0, 0, 1, 1},
+  {&__pyx_n_s__accepting, __pyx_k__accepting, sizeof(__pyx_k__accepting), 0, 0, 1, 1},
+  {&__pyx_n_s__addr, __pyx_k__addr, sizeof(__pyx_k__addr), 0, 0, 1, 1},
+  {&__pyx_n_s__addr_buff, __pyx_k__addr_buff, sizeof(__pyx_k__addr_buff), 0, 0, 1, 1},
+  {&__pyx_n_s__addr_len_buff, __pyx_k__addr_len_buff, sizeof(__pyx_k__addr_len_buff), 0, 0, 1, 1},
+  {&__pyx_n_s__buff, __pyx_k__buff, sizeof(__pyx_k__buff), 0, 0, 1, 1},
+  {&__pyx_n_s__bufflist, __pyx_k__bufflist, sizeof(__pyx_k__bufflist), 0, 0, 1, 1},
+  {&__pyx_n_s__bytes, __pyx_k__bytes, sizeof(__pyx_k__bytes), 0, 0, 1, 1},
+  {&__pyx_n_s__callback, __pyx_k__callback, sizeof(__pyx_k__callback), 0, 0, 1, 1},
+  {&__pyx_n_s__connect, __pyx_k__connect, sizeof(__pyx_k__connect), 0, 0, 1, 1},
+  {&__pyx_n_s__flags, __pyx_k__flags, sizeof(__pyx_k__flags), 0, 0, 1, 1},
+  {&__pyx_n_s__get_accept_addrs, __pyx_k__get_accept_addrs, sizeof(__pyx_k__get_accept_addrs), 0, 0, 1, 1},
+  {&__pyx_n_s__getsockopt, __pyx_k__getsockopt, sizeof(__pyx_k__getsockopt), 0, 0, 1, 1},
+  {&__pyx_n_s__handle, __pyx_k__handle, sizeof(__pyx_k__handle), 0, 0, 1, 1},
+  {&__pyx_n_s__have_connectex, __pyx_k__have_connectex, sizeof(__pyx_k__have_connectex), 0, 0, 1, 1},
+  {&__pyx_n_s__iocpsupport, __pyx_k__iocpsupport, sizeof(__pyx_k__iocpsupport), 0, 0, 1, 1},
+  {&__pyx_n_s__key, __pyx_k__key, sizeof(__pyx_k__key), 0, 0, 1, 1},
+  {&__pyx_n_s__listening, __pyx_k__listening, sizeof(__pyx_k__listening), 0, 0, 1, 1},
+  {&__pyx_n_s__makesockaddr, __pyx_k__makesockaddr, sizeof(__pyx_k__makesockaddr), 0, 0, 1, 1},
+  {&__pyx_n_s__maxAddrLen, __pyx_k__maxAddrLen, sizeof(__pyx_k__maxAddrLen), 0, 0, 1, 1},
+  {&__pyx_n_s__obj, __pyx_k__obj, sizeof(__pyx_k__obj), 0, 0, 1, 1},
+  {&__pyx_n_s__owner, __pyx_k__owner, sizeof(__pyx_k__owner), 0, 0, 1, 1},
+  {&__pyx_n_s__recv, __pyx_k__recv, sizeof(__pyx_k__recv), 0, 0, 1, 1},
+  {&__pyx_n_s__recvfrom, __pyx_k__recvfrom, sizeof(__pyx_k__recvfrom), 0, 0, 1, 1},
+  {&__pyx_n_s__rsplit, __pyx_k__rsplit, sizeof(__pyx_k__rsplit), 0, 0, 1, 1},
+  {&__pyx_n_s__s, __pyx_k__s, sizeof(__pyx_k__s), 0, 0, 1, 1},
+  {&__pyx_n_s__self, __pyx_k__self, sizeof(__pyx_k__self), 0, 0, 1, 1},
+  {&__pyx_n_s__send, __pyx_k__send, sizeof(__pyx_k__send), 0, 0, 1, 1},
+  {&__pyx_n_s__socket, __pyx_k__socket, sizeof(__pyx_k__socket), 0, 0, 1, 1},
+  {&__pyx_n_s__split, __pyx_k__split, sizeof(__pyx_k__split), 0, 0, 1, 1},
+  {0, 0, 0, 0, 0, 0, 0}
+};
+static int __Pyx_InitCachedBuiltins(void) {
+  __pyx_builtin_ValueError = __Pyx_GetName(__pyx_b, __pyx_n_s__ValueError); if (!__pyx_builtin_ValueError) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 304; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_builtin_MemoryError = __Pyx_GetName(__pyx_b, __pyx_n_s__MemoryError); if (!__pyx_builtin_MemoryError) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 132; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_builtin_RuntimeError = __Pyx_GetName(__pyx_b, __pyx_n_s__RuntimeError); if (!__pyx_builtin_RuntimeError) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 272; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  return 0;
+  __pyx_L1_error:;
+  return -1;
+}
+
+static int __Pyx_InitCachedConstants(void) {
+  __Pyx_RefNannyDeclarations
+  __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants");
+
+  /* "iocpsupport.pyx":233
+ *             raise_error(0, 'WSAAddressToString')
+ *         host, sa_port = PyString_FromString(buff), ntohs(sin6.sin6_port)
+ *         host, port = host.rsplit(':', 1)             # <<<<<<<<<<<<<<
+ *         port = int(port)
+ *         assert host[0] == '['
+ */
+  __pyx_k_tuple_4 = PyTuple_New(2); if (unlikely(!__pyx_k_tuple_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 233; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_4));
+  __Pyx_INCREF(((PyObject *)__pyx_kp_s_3));
+  PyTuple_SET_ITEM(__pyx_k_tuple_4, 0, ((PyObject *)__pyx_kp_s_3));
+  __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_3));
+  __Pyx_INCREF(__pyx_int_1);
+  PyTuple_SET_ITEM(__pyx_k_tuple_4, 1, __pyx_int_1);
+  __Pyx_GIVEREF(__pyx_int_1);
+  __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_4));
+
+  /* "iocpsupport.pyx":264
+ *     cdef int addrlen = sizeof(sockaddr_in6)
+ *     host, port, flow, scope = addr
+ *     host = host.split("%")[0] # remove scope ID, if any             # <<<<<<<<<<<<<<
+ * 
+ *     hoststr = PyString_AsString(host)
+ */
+  __pyx_k_tuple_9 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_9)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 264; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_9));
+  __Pyx_INCREF(((PyObject *)__pyx_kp_s_8));
+  PyTuple_SET_ITEM(__pyx_k_tuple_9, 0, ((PyObject *)__pyx_kp_s_8));
+  __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_8));
+  __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_9));
+  __Pyx_RefNannyFinishContext();
+  return 0;
+  __pyx_L1_error:;
+  __Pyx_RefNannyFinishContext();
+  return -1;
+}
+
+static int __Pyx_InitGlobals(void) {
+  if (__Pyx_InitStrings(__pyx_string_tab) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+  __pyx_int_0 = PyInt_FromLong(0); if (unlikely(!__pyx_int_0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+  __pyx_int_1 = PyInt_FromLong(1); if (unlikely(!__pyx_int_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+  return 0;
+  __pyx_L1_error:;
+  return -1;
+}
+
+#if PY_MAJOR_VERSION < 3
+PyMODINIT_FUNC initiocpsupport(void); /*proto*/
+PyMODINIT_FUNC initiocpsupport(void)
+#else
+PyMODINIT_FUNC PyInit_iocpsupport(void); /*proto*/
+PyMODINIT_FUNC PyInit_iocpsupport(void)
+#endif
+{
+  PyObject *__pyx_t_1 = NULL;
+  PyObject *__pyx_t_2 = NULL;
+  int __pyx_t_3;
+  __Pyx_RefNannyDeclarations
+  #if CYTHON_REFNANNY
+  __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny");
+  if (!__Pyx_RefNanny) {
+      PyErr_Clear();
+      __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny");
+      if (!__Pyx_RefNanny)
+          Py_FatalError("failed to import 'refnanny' module");
+  }
+  #endif
+  __Pyx_RefNannySetupContext("PyMODINIT_FUNC PyInit_iocpsupport(void)");
+  if ( __Pyx_check_binary_version() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  #ifdef __pyx_binding_PyCFunctionType_USED
+  if (__pyx_binding_PyCFunctionType_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  #endif
+  /*--- Library function declarations ---*/
+  /*--- Threads initialization code ---*/
+  #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS
+  #ifdef WITH_THREAD /* Python build with threading support? */
+  PyEval_InitThreads();
+  #endif
+  #endif
+  /*--- Module creation code ---*/
+  #if PY_MAJOR_VERSION < 3
+  __pyx_m = Py_InitModule4(__Pyx_NAMESTR("iocpsupport"), __pyx_methods, 0, 0, PYTHON_API_VERSION);
+  #else
+  __pyx_m = PyModule_Create(&__pyx_moduledef);
+  #endif
+  if (!__pyx_m) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+  #if PY_MAJOR_VERSION < 3
+  Py_INCREF(__pyx_m);
+  #endif
+  __pyx_b = PyImport_AddModule(__Pyx_NAMESTR(__Pyx_BUILTIN_MODULE_NAME));
+  if (!__pyx_b) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+  if (__Pyx_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+  /*--- Initialize various global constants etc. ---*/
+  if (unlikely(__Pyx_InitGlobals() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  if (__pyx_module_is_main_iocpsupport) {
+    if (__Pyx_SetAttrString(__pyx_m, "__name__", __pyx_n_s____main__) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+  }
+  /*--- Builtin init code ---*/
+  if (unlikely(__Pyx_InitCachedBuiltins() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  /*--- Constants init code ---*/
+  if (unlikely(__Pyx_InitCachedConstants() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  /*--- Global init code ---*/
+  /*--- Variable export code ---*/
+  /*--- Function export code ---*/
+  /*--- Type init code ---*/
+  if (PyType_Ready(&__pyx_type_11iocpsupport_CompletionPort) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 148; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  if (__Pyx_SetAttrString(__pyx_m, "CompletionPort", (PyObject *)&__pyx_type_11iocpsupport_CompletionPort) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 148; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_ptype_11iocpsupport_CompletionPort = &__pyx_type_11iocpsupport_CompletionPort;
+  /*--- Type import code ---*/
+  /*--- Variable import code ---*/
+  /*--- Function import code ---*/
+  /*--- Execution code ---*/
+
+  /* "iocpsupport.pyx":141
+ *     raise WindowsError(message, err)
+ * 
+ * class Event:             # <<<<<<<<<<<<<<
+ *     def __init__(self, callback, owner, **kw):
+ *         self.callback = callback
+ */
+  __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 141; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(((PyObject *)__pyx_t_1));
+
+  /* "iocpsupport.pyx":142
+ * 
+ * class Event:
+ *     def __init__(self, callback, owner, **kw):             # <<<<<<<<<<<<<<
+ *         self.callback = callback
+ *         self.owner = owner
+ */
+  __pyx_t_2 = __pyx_binding_PyCFunctionType_NewEx(&__pyx_mdef_11iocpsupport_5Event___init__, NULL, __pyx_n_s__iocpsupport); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 142; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_2);
+  if (PyObject_SetItem(__pyx_t_1, __pyx_n_s____init__, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 142; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+
+  /* "iocpsupport.pyx":141
+ *     raise WindowsError(message, err)
+ * 
+ * class Event:             # <<<<<<<<<<<<<<
+ *     def __init__(self, callback, owner, **kw):
+ *         self.callback = callback
+ */
+  __pyx_t_2 = __Pyx_CreateClass(((PyObject *)__pyx_empty_tuple), ((PyObject *)__pyx_t_1), __pyx_n_s__Event, __pyx_n_s__iocpsupport); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 141; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_2);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__Event, __pyx_t_2) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 141; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+  __Pyx_DECREF(((PyObject *)__pyx_t_1)); __pyx_t_1 = 0;
+
+  /* "iocpsupport.pyx":208
+ *         CloseHandle(self.port)
+ * 
+ * def makesockaddr(object buff):             # <<<<<<<<<<<<<<
+ *     cdef void *mem_buffer
+ *     cdef Py_ssize_t size
+ */
+  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11iocpsupport_makesockaddr, NULL, __pyx_n_s__iocpsupport); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 208; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__makesockaddr, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 208; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "iocpsupport.pyx":279
+ * 
+ * 
+ * def AllocateReadBuffer(int size):             # <<<<<<<<<<<<<<
+ *     return PyBuffer_New(size)
+ * 
+ */
+  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11iocpsupport_1AllocateReadBuffer, NULL, __pyx_n_s__iocpsupport); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 279; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__AllocateReadBuffer, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 279; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "iocpsupport.pyx":282
+ *     return PyBuffer_New(size)
+ * 
+ * def maxAddrLen(long s):             # <<<<<<<<<<<<<<
+ *     cdef WSAPROTOCOL_INFO wsa_pi
+ *     cdef int size, rc
+ */
+  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11iocpsupport_2maxAddrLen, NULL, __pyx_n_s__iocpsupport); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 282; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__maxAddrLen, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 282; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "iocpsupport.pyx":302
+ *     return wsa_pi.iAddressFamily
+ * 
+ * import socket # for WSAStartup             # <<<<<<<<<<<<<<
+ * if not initWinsockPointers():
+ *     raise ValueError, 'Failed to initialize Winsock function vectors'
+ */
+  __pyx_t_1 = __Pyx_Import(((PyObject *)__pyx_n_s__socket), 0, -1); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 302; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__socket, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 302; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "iocpsupport.pyx":303
+ * 
+ * import socket # for WSAStartup
+ * if not initWinsockPointers():             # <<<<<<<<<<<<<<
+ *     raise ValueError, 'Failed to initialize Winsock function vectors'
+ * 
+ */
+  __pyx_t_3 = (!initWinsockPointers());
+  if (__pyx_t_3) {
+
+    /* "iocpsupport.pyx":304
+ * import socket # for WSAStartup
+ * if not initWinsockPointers():
+ *     raise ValueError, 'Failed to initialize Winsock function vectors'             # <<<<<<<<<<<<<<
+ * 
+ * have_connectex = (lpConnectEx != NULL)
+ */
+    __Pyx_Raise(__pyx_builtin_ValueError, ((PyObject *)__pyx_kp_s_16), 0, 0);
+    {__pyx_filename = __pyx_f[0]; __pyx_lineno = 304; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    goto __pyx_L2;
+  }
+  __pyx_L2:;
+
+  /* "iocpsupport.pyx":306
+ *     raise ValueError, 'Failed to initialize Winsock function vectors'
+ * 
+ * have_connectex = (lpConnectEx != NULL)             # <<<<<<<<<<<<<<
+ * 
+ * include 'acceptex.pxi'
+ */
+  __pyx_t_1 = __Pyx_PyBool_FromLong((lpConnectEx != NULL)); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 306; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__have_connectex, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 306; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\acceptex.pxi":5
+ * 
+ * 
+ * def accept(long listening, long accepting, object buff, object obj):             # <<<<<<<<<<<<<<
+ *     """
+ *     CAUTION: unlike system AcceptEx(), this function returns 0 on success
+ */
+  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11iocpsupport_3accept, NULL, __pyx_n_s__iocpsupport); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__accept, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\acceptex.pxi":34
+ *     return 0
+ * 
+ * def get_accept_addrs(long s, object buff):             # <<<<<<<<<<<<<<
+ *     cdef WSAPROTOCOL_INFO wsa_pi
+ *     cdef int locallen, remotelen
+ */
+  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11iocpsupport_4get_accept_addrs, NULL, __pyx_n_s__iocpsupport); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 34; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__get_accept_addrs, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[1]; __pyx_lineno = 34; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\connectex.pxi":5
+ * 
+ * 
+ * def connect(long s, object addr, object obj):             # <<<<<<<<<<<<<<
+ *     """
+ *     CAUTION: unlike system ConnectEx(), this function returns 0 on success
+ */
+  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11iocpsupport_5connect, NULL, __pyx_n_s__iocpsupport); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[2]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__connect, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[2]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11iocpsupport_6recv, NULL, __pyx_n_s__iocpsupport); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__recv, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsarecv.pxi":41
+ *         PyMem_Free(ws_buf)
+ * 
+ * def recvfrom(long s, object buff, object addr_buff, object addr_len_buff, object obj, unsigned long flags = 0):             # <<<<<<<<<<<<<<
+ *     cdef int rc, c_addr_buff_len, c_addr_len_buff_len
+ *     cdef myOVERLAPPED *ov
+ */
+  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11iocpsupport_7recvfrom, NULL, __pyx_n_s__iocpsupport); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__recvfrom, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[3]; __pyx_lineno = 41; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "C:\t\twisted\twisted\internet\iocpreactor\iocpsupport\wsasend.pxi":5
+ * 
+ * 
+ * def send(long s, object buff, object obj, unsigned long flags = 0):             # <<<<<<<<<<<<<<
+ *     cdef int rc
+ *     cdef myOVERLAPPED *ov
+ */
+  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_11iocpsupport_8send, NULL, __pyx_n_s__iocpsupport); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[4]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__send, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[4]; __pyx_lineno = 5; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "iocpsupport.pyx":1
+ * # Copyright (c) Twisted Matrix Laboratories.             # <<<<<<<<<<<<<<
+ * # See LICENSE for details.
+ * 
+ */
+  __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(((PyObject *)__pyx_t_1));
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s____test__, ((PyObject *)__pyx_t_1)) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(((PyObject *)__pyx_t_1)); __pyx_t_1 = 0;
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_1);
+  __Pyx_XDECREF(__pyx_t_2);
+  if (__pyx_m) {
+    __Pyx_AddTraceback("init iocpsupport", __pyx_clineno, __pyx_lineno, __pyx_filename);
+    Py_DECREF(__pyx_m); __pyx_m = 0;
+  } else if (!PyErr_Occurred()) {
+    PyErr_SetString(PyExc_ImportError, "init iocpsupport");
+  }
+  __pyx_L0:;
+  __Pyx_RefNannyFinishContext();
+  #if PY_MAJOR_VERSION < 3
+  return;
+  #else
+  return __pyx_m;
+  #endif
+}
+
+/* Runtime support code */
+
+#if CYTHON_REFNANNY
+static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) {
+    PyObject *m = NULL, *p = NULL;
+    void *r = NULL;
+    m = PyImport_ImportModule((char *)modname);
+    if (!m) goto end;
+    p = PyObject_GetAttrString(m, (char *)"RefNannyAPI");
+    if (!p) goto end;
+    r = PyLong_AsVoidPtr(p);
+end:
+    Py_XDECREF(p);
+    Py_XDECREF(m);
+    return (__Pyx_RefNannyAPIStruct *)r;
+}
+#endif /* CYTHON_REFNANNY */
+
+static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name) {
+    PyObject *result;
+    result = PyObject_GetAttr(dict, name);
+    if (!result) {
+        if (dict != __pyx_b) {
+            PyErr_Clear();
+            result = PyObject_GetAttr(__pyx_b, name);
+        }
+        if (!result) {
+            PyErr_SetObject(PyExc_NameError, name);
+        }
+    }
+    return result;
+}
+
+static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb) {
+    PyObject *tmp_type, *tmp_value, *tmp_tb;
+    PyThreadState *tstate = PyThreadState_GET();
+
+    tmp_type = tstate->curexc_type;
+    tmp_value = tstate->curexc_value;
+    tmp_tb = tstate->curexc_traceback;
+    tstate->curexc_type = type;
+    tstate->curexc_value = value;
+    tstate->curexc_traceback = tb;
+    Py_XDECREF(tmp_type);
+    Py_XDECREF(tmp_value);
+    Py_XDECREF(tmp_tb);
+}
+
+static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb) {
+    PyThreadState *tstate = PyThreadState_GET();
+    *type = tstate->curexc_type;
+    *value = tstate->curexc_value;
+    *tb = tstate->curexc_traceback;
+
+    tstate->curexc_type = 0;
+    tstate->curexc_value = 0;
+    tstate->curexc_traceback = 0;
+}
+
+
+#if PY_MAJOR_VERSION < 3
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) {
+    /* cause is unused */
+    Py_XINCREF(type);
+    Py_XINCREF(value);
+    Py_XINCREF(tb);
+    /* First, check the traceback argument, replacing None with NULL. */
+    if (tb == Py_None) {
+        Py_DECREF(tb);
+        tb = 0;
+    }
+    else if (tb != NULL && !PyTraceBack_Check(tb)) {
+        PyErr_SetString(PyExc_TypeError,
+            "raise: arg 3 must be a traceback or None");
+        goto raise_error;
+    }
+    /* Next, replace a missing value with None */
+    if (value == NULL) {
+        value = Py_None;
+        Py_INCREF(value);
+    }
+    #if PY_VERSION_HEX < 0x02050000
+    if (!PyClass_Check(type))
+    #else
+    if (!PyType_Check(type))
+    #endif
+    {
+        /* Raising an instance.  The value should be a dummy. */
+        if (value != Py_None) {
+            PyErr_SetString(PyExc_TypeError,
+                "instance exception may not have a separate value");
+            goto raise_error;
+        }
+        /* Normalize to raise <class>, <instance> */
+        Py_DECREF(value);
+        value = type;
+        #if PY_VERSION_HEX < 0x02050000
+            if (PyInstance_Check(type)) {
+                type = (PyObject*) ((PyInstanceObject*)type)->in_class;
+                Py_INCREF(type);
+            }
+            else {
+                type = 0;
+                PyErr_SetString(PyExc_TypeError,
+                    "raise: exception must be an old-style class or instance");
+                goto raise_error;
+            }
+        #else
+            type = (PyObject*) Py_TYPE(type);
+            Py_INCREF(type);
+            if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) {
+                PyErr_SetString(PyExc_TypeError,
+                    "raise: exception class must be a subclass of BaseException");
+                goto raise_error;
+            }
+        #endif
+    }
+
+    __Pyx_ErrRestore(type, value, tb);
+    return;
+raise_error:
+    Py_XDECREF(value);
+    Py_XDECREF(type);
+    Py_XDECREF(tb);
+    return;
+}
+
+#else /* Python 3+ */
+
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) {
+    if (tb == Py_None) {
+        tb = 0;
+    } else if (tb && !PyTraceBack_Check(tb)) {
+        PyErr_SetString(PyExc_TypeError,
+            "raise: arg 3 must be a traceback or None");
+        goto bad;
+    }
+    if (value == Py_None)
+        value = 0;
+
+    if (PyExceptionInstance_Check(type)) {
+        if (value) {
+            PyErr_SetString(PyExc_TypeError,
+                "instance exception may not have a separate value");
+            goto bad;
+        }
+        value = type;
+        type = (PyObject*) Py_TYPE(value);
+    } else if (!PyExceptionClass_Check(type)) {
+        PyErr_SetString(PyExc_TypeError,
+            "raise: exception class must be a subclass of BaseException");
+        goto bad;
+    }
+
+    if (cause) {
+        PyObject *fixed_cause;
+        if (PyExceptionClass_Check(cause)) {
+            fixed_cause = PyObject_CallObject(cause, NULL);
+            if (fixed_cause == NULL)
+                goto bad;
+        }
+        else if (PyExceptionInstance_Check(cause)) {
+            fixed_cause = cause;
+            Py_INCREF(fixed_cause);
+        }
+        else {
+            PyErr_SetString(PyExc_TypeError,
+                            "exception causes must derive from "
+                            "BaseException");
+            goto bad;
+        }
+        if (!value) {
+            value = PyObject_CallObject(type, NULL);
+        }
+        PyException_SetCause(value, fixed_cause);
+    }
+
+    PyErr_SetObject(type, value);
+
+    if (tb) {
+        PyThreadState *tstate = PyThreadState_GET();
+        PyObject* tmp_tb = tstate->curexc_traceback;
+        if (tb != tmp_tb) {
+            Py_INCREF(tb);
+            tstate->curexc_traceback = tb;
+            Py_XDECREF(tmp_tb);
+        }
+    }
+
+bad:
+    return;
+}
+#endif
+
+static void __Pyx_RaiseArgtupleInvalid(
+    const char* func_name,
+    int exact,
+    Py_ssize_t num_min,
+    Py_ssize_t num_max,
+    Py_ssize_t num_found)
+{
+    Py_ssize_t num_expected;
+    const char *more_or_less;
+
+    if (num_found < num_min) {
+        num_expected = num_min;
+        more_or_less = "at least";
+    } else {
+        num_expected = num_max;
+        more_or_less = "at most";
+    }
+    if (exact) {
+        more_or_less = "exactly";
+    }
+    PyErr_Format(PyExc_TypeError,
+                 "%s() takes %s %"PY_FORMAT_SIZE_T"d positional argument%s (%"PY_FORMAT_SIZE_T"d given)",
+                 func_name, more_or_less, num_expected,
+                 (num_expected == 1) ? "" : "s", num_found);
+}
+
+static void __Pyx_RaiseDoubleKeywordsError(
+    const char* func_name,
+    PyObject* kw_name)
+{
+    PyErr_Format(PyExc_TypeError,
+        #if PY_MAJOR_VERSION >= 3
+        "%s() got multiple values for keyword argument '%U'", func_name, kw_name);
+        #else
+        "%s() got multiple values for keyword argument '%s'", func_name,
+        PyString_AS_STRING(kw_name));
+        #endif
+}
+
+static int __Pyx_ParseOptionalKeywords(
+    PyObject *kwds,
+    PyObject **argnames[],
+    PyObject *kwds2,
+    PyObject *values[],
+    Py_ssize_t num_pos_args,
+    const char* function_name)
+{
+    PyObject *key = 0, *value = 0;
+    Py_ssize_t pos = 0;
+    PyObject*** name;
+    PyObject*** first_kw_arg = argnames + num_pos_args;
+
+    while (PyDict_Next(kwds, &pos, &key, &value)) {
+        name = first_kw_arg;
+        while (*name && (**name != key)) name++;
+        if (*name) {
+            values[name-argnames] = value;
+        } else {
+            #if PY_MAJOR_VERSION < 3
+            if (unlikely(!PyString_CheckExact(key)) && unlikely(!PyString_Check(key))) {
+            #else
+            if (unlikely(!PyUnicode_CheckExact(key)) && unlikely(!PyUnicode_Check(key))) {
+            #endif
+                goto invalid_keyword_type;
+            } else {
+                for (name = first_kw_arg; *name; name++) {
+                    #if PY_MAJOR_VERSION >= 3
+                    if (PyUnicode_GET_SIZE(**name) == PyUnicode_GET_SIZE(key) &&
+                        PyUnicode_Compare(**name, key) == 0) break;
+                    #else
+                    if (PyString_GET_SIZE(**name) == PyString_GET_SIZE(key) &&
+                        _PyString_Eq(**name, key)) break;
+                    #endif
+                }
+                if (*name) {
+                    values[name-argnames] = value;
+                } else {
+                    /* unexpected keyword found */
+                    for (name=argnames; name != first_kw_arg; name++) {
+                        if (**name == key) goto arg_passed_twice;
+                        #if PY_MAJOR_VERSION >= 3
+                        if (PyUnicode_GET_SIZE(**name) == PyUnicode_GET_SIZE(key) &&
+                            PyUnicode_Compare(**name, key) == 0) goto arg_passed_twice;
+                        #else
+                        if (PyString_GET_SIZE(**name) == PyString_GET_SIZE(key) &&
+                            _PyString_Eq(**name, key)) goto arg_passed_twice;
+                        #endif
+                    }
+                    if (kwds2) {
+                        if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad;
+                    } else {
+                        goto invalid_keyword;
+                    }
+                }
+            }
+        }
+    }
+    return 0;
+arg_passed_twice:
+    __Pyx_RaiseDoubleKeywordsError(function_name, **name);
+    goto bad;
+invalid_keyword_type:
+    PyErr_Format(PyExc_TypeError,
+        "%s() keywords must be strings", function_name);
+    goto bad;
+invalid_keyword:
+    PyErr_Format(PyExc_TypeError,
+    #if PY_MAJOR_VERSION < 3
+        "%s() got an unexpected keyword argument '%s'",
+        function_name, PyString_AsString(key));
+    #else
+        "%s() got an unexpected keyword argument '%U'",
+        function_name, key);
+    #endif
+bad:
+    return -1;
+}
+
+static CYTHON_INLINE void __Pyx_RaiseNeedMoreValuesError(Py_ssize_t index) {
+    PyErr_Format(PyExc_ValueError,
+                 "need more than %"PY_FORMAT_SIZE_T"d value%s to unpack",
+                 index, (index == 1) ? "" : "s");
+}
+
+static CYTHON_INLINE void __Pyx_RaiseTooManyValuesError(Py_ssize_t expected) {
+    PyErr_Format(PyExc_ValueError,
+                 "too many values to unpack (expected %"PY_FORMAT_SIZE_T"d)", expected);
+}
+
+static int __Pyx_IternextUnpackEndCheck(PyObject *retval, Py_ssize_t expected) {
+    if (unlikely(retval)) {
+        Py_DECREF(retval);
+        __Pyx_RaiseTooManyValuesError(expected);
+        return -1;
+    } else if (PyErr_Occurred()) {
+        if (likely(PyErr_ExceptionMatches(PyExc_StopIteration))) {
+            PyErr_Clear();
+            return 0;
+        } else {
+            return -1;
+        }
+    }
+    return 0;
+}
+
+static CYTHON_INLINE int __Pyx_CheckKeywordStrings(
+    PyObject *kwdict,
+    const char* function_name,
+    int kw_allowed)
+{
+    PyObject* key = 0;
+    Py_ssize_t pos = 0;
+    while (PyDict_Next(kwdict, &pos, &key, 0)) {
+        #if PY_MAJOR_VERSION < 3
+        if (unlikely(!PyString_CheckExact(key)) && unlikely(!PyString_Check(key)))
+        #else
+        if (unlikely(!PyUnicode_CheckExact(key)) && unlikely(!PyUnicode_Check(key)))
+        #endif
+            goto invalid_keyword_type;
+    }
+    if ((!kw_allowed) && unlikely(key))
+        goto invalid_keyword;
+    return 1;
+invalid_keyword_type:
+    PyErr_Format(PyExc_TypeError,
+        "%s() keywords must be strings", function_name);
+    return 0;
+invalid_keyword:
+    PyErr_Format(PyExc_TypeError,
+    #if PY_MAJOR_VERSION < 3
+        "%s() got an unexpected keyword argument '%s'",
+        function_name, PyString_AsString(key));
+    #else
+        "%s() got an unexpected keyword argument '%U'",
+        function_name, key);
+    #endif
+    return 0;
+}
+
+
+static PyObject *__Pyx_FindPy2Metaclass(PyObject *bases) {
+    PyObject *metaclass;
+    /* Default metaclass */
+#if PY_MAJOR_VERSION < 3
+    if (PyTuple_Check(bases) && PyTuple_GET_SIZE(bases) > 0) {
+        PyObject *base = PyTuple_GET_ITEM(bases, 0);
+        metaclass = PyObject_GetAttrString(base, (char *)"__class__");
+        if (!metaclass) {
+            PyErr_Clear();
+            metaclass = (PyObject*) Py_TYPE(base);
+        }
+    } else {
+        metaclass = (PyObject *) &PyClass_Type;
+    }
+#else
+    if (PyTuple_Check(bases) && PyTuple_GET_SIZE(bases) > 0) {
+        PyObject *base = PyTuple_GET_ITEM(bases, 0);
+        metaclass = (PyObject*) Py_TYPE(base);
+    } else {
+        metaclass = (PyObject *) &PyType_Type;
+    }
+#endif
+    Py_INCREF(metaclass);
+    return metaclass;
+}
+
+static PyObject *__Pyx_CreateClass(PyObject *bases, PyObject *dict, PyObject *name,
+                                   PyObject *modname) {
+    PyObject *result;
+    PyObject *metaclass;
+
+    if (PyDict_SetItemString(dict, "__module__", modname) < 0)
+        return NULL;
+
+    /* Python2 __metaclass__ */
+    metaclass = PyDict_GetItemString(dict, "__metaclass__");
+    if (metaclass) {
+        Py_INCREF(metaclass);
+    } else {
+        metaclass = __Pyx_FindPy2Metaclass(bases);
+    }
+    result = PyObject_CallFunctionObjArgs(metaclass, name, bases, dict, NULL);
+    Py_DECREF(metaclass);
+    return result;
+}
+
+
+static PyObject *__pyx_binding_PyCFunctionType_NewEx(PyMethodDef *ml, PyObject *self, PyObject *module) {
+    __pyx_binding_PyCFunctionType_object *op = PyObject_GC_New(__pyx_binding_PyCFunctionType_object, __pyx_binding_PyCFunctionType);
+    if (op == NULL)
+        return NULL;
+    op->func.m_ml = ml;
+    Py_XINCREF(self);
+    op->func.m_self = self;
+    Py_XINCREF(module);
+    op->func.m_module = module;
+    PyObject_GC_Track(op);
+    return (PyObject *)op;
+}
+
+static void __pyx_binding_PyCFunctionType_dealloc(__pyx_binding_PyCFunctionType_object *m) {
+    PyObject_GC_UnTrack(m);
+    Py_XDECREF(m->func.m_self);
+    Py_XDECREF(m->func.m_module);
+    PyObject_GC_Del(m);
+}
+
+static PyObject *__pyx_binding_PyCFunctionType_descr_get(PyObject *func, PyObject *obj, PyObject *type) {
+    if (obj == Py_None)
+            obj = NULL;
+    return PyMethod_New(func, obj, type);
+}
+
+static int __pyx_binding_PyCFunctionType_init(void) {
+    __pyx_binding_PyCFunctionType_type = PyCFunction_Type;
+    __pyx_binding_PyCFunctionType_type.tp_name = __Pyx_NAMESTR("cython_binding_builtin_function_or_method");
+    __pyx_binding_PyCFunctionType_type.tp_dealloc = (destructor)__pyx_binding_PyCFunctionType_dealloc;
+    __pyx_binding_PyCFunctionType_type.tp_descr_get = __pyx_binding_PyCFunctionType_descr_get;
+    if (PyType_Ready(&__pyx_binding_PyCFunctionType_type) < 0) {
+        return -1;
+    }
+    __pyx_binding_PyCFunctionType = &__pyx_binding_PyCFunctionType_type;
+    return 0;
+
+}
+
+static PyObject *__Pyx_Import(PyObject *name, PyObject *from_list, long level) {
+    PyObject *py_import = 0;
+    PyObject *empty_list = 0;
+    PyObject *module = 0;
+    PyObject *global_dict = 0;
+    PyObject *empty_dict = 0;
+    PyObject *list;
+    py_import = __Pyx_GetAttrString(__pyx_b, "__import__");
+    if (!py_import)
+        goto bad;
+    if (from_list)
+        list = from_list;
+    else {
+        empty_list = PyList_New(0);
+        if (!empty_list)
+            goto bad;
+        list = empty_list;
+    }
+    global_dict = PyModule_GetDict(__pyx_m);
+    if (!global_dict)
+        goto bad;
+    empty_dict = PyDict_New();
+    if (!empty_dict)
+        goto bad;
+    #if PY_VERSION_HEX >= 0x02050000
+    {
+        PyObject *py_level = PyInt_FromLong(level);
+        if (!py_level)
+            goto bad;
+        module = PyObject_CallFunctionObjArgs(py_import,
+            name, global_dict, empty_dict, list, py_level, NULL);
+        Py_DECREF(py_level);
+    }
+    #else
+    if (level>0) {
+        PyErr_SetString(PyExc_RuntimeError, "Relative import is not supported for Python <=2.4.");
+        goto bad;
+    }
+    module = PyObject_CallFunctionObjArgs(py_import,
+        name, global_dict, empty_dict, list, NULL);
+    #endif
+bad:
+    Py_XDECREF(empty_list);
+    Py_XDECREF(py_import);
+    Py_XDECREF(empty_dict);
+    return module;
+}
+
+static CYTHON_INLINE int __Pyx_PyBytes_Equals(PyObject* s1, PyObject* s2, int equals) {
+    if (s1 == s2) {   /* as done by PyObject_RichCompareBool(); also catches the (interned) empty string */
+        return (equals == Py_EQ);
+    } else if (PyBytes_CheckExact(s1) & PyBytes_CheckExact(s2)) {
+        if (PyBytes_GET_SIZE(s1) != PyBytes_GET_SIZE(s2)) {
+            return (equals == Py_NE);
+        } else if (PyBytes_GET_SIZE(s1) == 1) {
+            if (equals == Py_EQ)
+                return (PyBytes_AS_STRING(s1)[0] == PyBytes_AS_STRING(s2)[0]);
+            else
+                return (PyBytes_AS_STRING(s1)[0] != PyBytes_AS_STRING(s2)[0]);
+        } else {
+            int result = memcmp(PyBytes_AS_STRING(s1), PyBytes_AS_STRING(s2), (size_t)PyBytes_GET_SIZE(s1));
+            return (equals == Py_EQ) ? (result == 0) : (result != 0);
+        }
+    } else if ((s1 == Py_None) & PyBytes_CheckExact(s2)) {
+        return (equals == Py_NE);
+    } else if ((s2 == Py_None) & PyBytes_CheckExact(s1)) {
+        return (equals == Py_NE);
+    } else {
+        int result;
+        PyObject* py_result = PyObject_RichCompare(s1, s2, equals);
+        if (!py_result)
+            return -1;
+        result = __Pyx_PyObject_IsTrue(py_result);
+        Py_DECREF(py_result);
+        return result;
+    }
+}
+
+static CYTHON_INLINE int __Pyx_PyUnicode_Equals(PyObject* s1, PyObject* s2, int equals) {
+    if (s1 == s2) {   /* as done by PyObject_RichCompareBool(); also catches the (interned) empty string */
+        return (equals == Py_EQ);
+    } else if (PyUnicode_CheckExact(s1) & PyUnicode_CheckExact(s2)) {
+        if (PyUnicode_GET_SIZE(s1) != PyUnicode_GET_SIZE(s2)) {
+            return (equals == Py_NE);
+        } else if (PyUnicode_GET_SIZE(s1) == 1) {
+            if (equals == Py_EQ)
+                return (PyUnicode_AS_UNICODE(s1)[0] == PyUnicode_AS_UNICODE(s2)[0]);
+            else
+                return (PyUnicode_AS_UNICODE(s1)[0] != PyUnicode_AS_UNICODE(s2)[0]);
+        } else {
+            int result = PyUnicode_Compare(s1, s2);
+            if ((result == -1) && unlikely(PyErr_Occurred()))
+                return -1;
+            return (equals == Py_EQ) ? (result == 0) : (result != 0);
+        }
+    } else if ((s1 == Py_None) & PyUnicode_CheckExact(s2)) {
+        return (equals == Py_NE);
+    } else if ((s2 == Py_None) & PyUnicode_CheckExact(s1)) {
+        return (equals == Py_NE);
+    } else {
+        int result;
+        PyObject* py_result = PyObject_RichCompare(s1, s2, equals);
+        if (!py_result)
+            return -1;
+        result = __Pyx_PyObject_IsTrue(py_result);
+        Py_DECREF(py_result);
+        return result;
+    }
+}
+
+static CYTHON_INLINE unsigned char __Pyx_PyInt_AsUnsignedChar(PyObject* x) {
+    const unsigned char neg_one = (unsigned char)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(unsigned char) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(unsigned char)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to unsigned char" :
+                    "value too large to convert to unsigned char");
+            }
+            return (unsigned char)-1;
+        }
+        return (unsigned char)val;
+    }
+    return (unsigned char)__Pyx_PyInt_AsUnsignedLong(x);
+}
+
+static CYTHON_INLINE unsigned short __Pyx_PyInt_AsUnsignedShort(PyObject* x) {
+    const unsigned short neg_one = (unsigned short)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(unsigned short) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(unsigned short)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to unsigned short" :
+                    "value too large to convert to unsigned short");
+            }
+            return (unsigned short)-1;
+        }
+        return (unsigned short)val;
+    }
+    return (unsigned short)__Pyx_PyInt_AsUnsignedLong(x);
+}
+
+static CYTHON_INLINE unsigned int __Pyx_PyInt_AsUnsignedInt(PyObject* x) {
+    const unsigned int neg_one = (unsigned int)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(unsigned int) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(unsigned int)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to unsigned int" :
+                    "value too large to convert to unsigned int");
+            }
+            return (unsigned int)-1;
+        }
+        return (unsigned int)val;
+    }
+    return (unsigned int)__Pyx_PyInt_AsUnsignedLong(x);
+}
+
+static CYTHON_INLINE char __Pyx_PyInt_AsChar(PyObject* x) {
+    const char neg_one = (char)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(char) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(char)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to char" :
+                    "value too large to convert to char");
+            }
+            return (char)-1;
+        }
+        return (char)val;
+    }
+    return (char)__Pyx_PyInt_AsLong(x);
+}
+
+static CYTHON_INLINE short __Pyx_PyInt_AsShort(PyObject* x) {
+    const short neg_one = (short)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(short) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(short)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to short" :
+                    "value too large to convert to short");
+            }
+            return (short)-1;
+        }
+        return (short)val;
+    }
+    return (short)__Pyx_PyInt_AsLong(x);
+}
+
+static CYTHON_INLINE int __Pyx_PyInt_AsInt(PyObject* x) {
+    const int neg_one = (int)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(int) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(int)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to int" :
+                    "value too large to convert to int");
+            }
+            return (int)-1;
+        }
+        return (int)val;
+    }
+    return (int)__Pyx_PyInt_AsLong(x);
+}
+
+static CYTHON_INLINE signed char __Pyx_PyInt_AsSignedChar(PyObject* x) {
+    const signed char neg_one = (signed char)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(signed char) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(signed char)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to signed char" :
+                    "value too large to convert to signed char");
+            }
+            return (signed char)-1;
+        }
+        return (signed char)val;
+    }
+    return (signed char)__Pyx_PyInt_AsSignedLong(x);
+}
+
+static CYTHON_INLINE signed short __Pyx_PyInt_AsSignedShort(PyObject* x) {
+    const signed short neg_one = (signed short)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(signed short) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(signed short)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to signed short" :
+                    "value too large to convert to signed short");
+            }
+            return (signed short)-1;
+        }
+        return (signed short)val;
+    }
+    return (signed short)__Pyx_PyInt_AsSignedLong(x);
+}
+
+static CYTHON_INLINE signed int __Pyx_PyInt_AsSignedInt(PyObject* x) {
+    const signed int neg_one = (signed int)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(signed int) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(signed int)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to signed int" :
+                    "value too large to convert to signed int");
+            }
+            return (signed int)-1;
+        }
+        return (signed int)val;
+    }
+    return (signed int)__Pyx_PyInt_AsSignedLong(x);
+}
+
+static CYTHON_INLINE int __Pyx_PyInt_AsLongDouble(PyObject* x) {
+    const int neg_one = (int)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(int) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(int)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to int" :
+                    "value too large to convert to int");
+            }
+            return (int)-1;
+        }
+        return (int)val;
+    }
+    return (int)__Pyx_PyInt_AsLong(x);
+}
+
+static CYTHON_INLINE unsigned long __Pyx_PyInt_AsUnsignedLong(PyObject* x) {
+    const unsigned long neg_one = (unsigned long)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+#if PY_VERSION_HEX < 0x03000000
+    if (likely(PyInt_Check(x))) {
+        long val = PyInt_AS_LONG(x);
+        if (is_unsigned && unlikely(val < 0)) {
+            PyErr_SetString(PyExc_OverflowError,
+                            "can't convert negative value to unsigned long");
+            return (unsigned long)-1;
+        }
+        return (unsigned long)val;
+    } else
+#endif
+    if (likely(PyLong_Check(x))) {
+        if (is_unsigned) {
+            if (unlikely(Py_SIZE(x) < 0)) {
+                PyErr_SetString(PyExc_OverflowError,
+                                "can't convert negative value to unsigned long");
+                return (unsigned long)-1;
+            }
+            return (unsigned long)PyLong_AsUnsignedLong(x);
+        } else {
+            return (unsigned long)PyLong_AsLong(x);
+        }
+    } else {
+        unsigned long val;
+        PyObject *tmp = __Pyx_PyNumber_Int(x);
+        if (!tmp) return (unsigned long)-1;
+        val = __Pyx_PyInt_AsUnsignedLong(tmp);
+        Py_DECREF(tmp);
+        return val;
+    }
+}
+
+static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_AsUnsignedLongLong(PyObject* x) {
+    const unsigned PY_LONG_LONG neg_one = (unsigned PY_LONG_LONG)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+#if PY_VERSION_HEX < 0x03000000
+    if (likely(PyInt_Check(x))) {
+        long val = PyInt_AS_LONG(x);
+        if (is_unsigned && unlikely(val < 0)) {
+            PyErr_SetString(PyExc_OverflowError,
+                            "can't convert negative value to unsigned PY_LONG_LONG");
+            return (unsigned PY_LONG_LONG)-1;
+        }
+        return (unsigned PY_LONG_LONG)val;
+    } else
+#endif
+    if (likely(PyLong_Check(x))) {
+        if (is_unsigned) {
+            if (unlikely(Py_SIZE(x) < 0)) {
+                PyErr_SetString(PyExc_OverflowError,
+                                "can't convert negative value to unsigned PY_LONG_LONG");
+                return (unsigned PY_LONG_LONG)-1;
+            }
+            return (unsigned PY_LONG_LONG)PyLong_AsUnsignedLongLong(x);
+        } else {
+            return (unsigned PY_LONG_LONG)PyLong_AsLongLong(x);
+        }
+    } else {
+        unsigned PY_LONG_LONG val;
+        PyObject *tmp = __Pyx_PyNumber_Int(x);
+        if (!tmp) return (unsigned PY_LONG_LONG)-1;
+        val = __Pyx_PyInt_AsUnsignedLongLong(tmp);
+        Py_DECREF(tmp);
+        return val;
+    }
+}
+
+static CYTHON_INLINE long __Pyx_PyInt_AsLong(PyObject* x) {
+    const long neg_one = (long)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+#if PY_VERSION_HEX < 0x03000000
+    if (likely(PyInt_Check(x))) {
+        long val = PyInt_AS_LONG(x);
+        if (is_unsigned && unlikely(val < 0)) {
+            PyErr_SetString(PyExc_OverflowError,
+                            "can't convert negative value to long");
+            return (long)-1;
+        }
+        return (long)val;
+    } else
+#endif
+    if (likely(PyLong_Check(x))) {
+        if (is_unsigned) {
+            if (unlikely(Py_SIZE(x) < 0)) {
+                PyErr_SetString(PyExc_OverflowError,
+                                "can't convert negative value to long");
+                return (long)-1;
+            }
+            return (long)PyLong_AsUnsignedLong(x);
+        } else {
+            return (long)PyLong_AsLong(x);
+        }
+    } else {
+        long val;
+        PyObject *tmp = __Pyx_PyNumber_Int(x);
+        if (!tmp) return (long)-1;
+        val = __Pyx_PyInt_AsLong(tmp);
+        Py_DECREF(tmp);
+        return val;
+    }
+}
+
+static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_AsLongLong(PyObject* x) {
+    const PY_LONG_LONG neg_one = (PY_LONG_LONG)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+#if PY_VERSION_HEX < 0x03000000
+    if (likely(PyInt_Check(x))) {
+        long val = PyInt_AS_LONG(x);
+        if (is_unsigned && unlikely(val < 0)) {
+            PyErr_SetString(PyExc_OverflowError,
+                            "can't convert negative value to PY_LONG_LONG");
+            return (PY_LONG_LONG)-1;
+        }
+        return (PY_LONG_LONG)val;
+    } else
+#endif
+    if (likely(PyLong_Check(x))) {
+        if (is_unsigned) {
+            if (unlikely(Py_SIZE(x) < 0)) {
+                PyErr_SetString(PyExc_OverflowError,
+                                "can't convert negative value to PY_LONG_LONG");
+                return (PY_LONG_LONG)-1;
+            }
+            return (PY_LONG_LONG)PyLong_AsUnsignedLongLong(x);
+        } else {
+            return (PY_LONG_LONG)PyLong_AsLongLong(x);
+        }
+    } else {
+        PY_LONG_LONG val;
+        PyObject *tmp = __Pyx_PyNumber_Int(x);
+        if (!tmp) return (PY_LONG_LONG)-1;
+        val = __Pyx_PyInt_AsLongLong(tmp);
+        Py_DECREF(tmp);
+        return val;
+    }
+}
+
+static CYTHON_INLINE signed long __Pyx_PyInt_AsSignedLong(PyObject* x) {
+    const signed long neg_one = (signed long)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+#if PY_VERSION_HEX < 0x03000000
+    if (likely(PyInt_Check(x))) {
+        long val = PyInt_AS_LONG(x);
+        if (is_unsigned && unlikely(val < 0)) {
+            PyErr_SetString(PyExc_OverflowError,
+                            "can't convert negative value to signed long");
+            return (signed long)-1;
+        }
+        return (signed long)val;
+    } else
+#endif
+    if (likely(PyLong_Check(x))) {
+        if (is_unsigned) {
+            if (unlikely(Py_SIZE(x) < 0)) {
+                PyErr_SetString(PyExc_OverflowError,
+                                "can't convert negative value to signed long");
+                return (signed long)-1;
+            }
+            return (signed long)PyLong_AsUnsignedLong(x);
+        } else {
+            return (signed long)PyLong_AsLong(x);
+        }
+    } else {
+        signed long val;
+        PyObject *tmp = __Pyx_PyNumber_Int(x);
+        if (!tmp) return (signed long)-1;
+        val = __Pyx_PyInt_AsSignedLong(tmp);
+        Py_DECREF(tmp);
+        return val;
+    }
+}
+
+static CYTHON_INLINE signed PY_LONG_LONG __Pyx_PyInt_AsSignedLongLong(PyObject* x) {
+    const signed PY_LONG_LONG neg_one = (signed PY_LONG_LONG)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+#if PY_VERSION_HEX < 0x03000000
+    if (likely(PyInt_Check(x))) {
+        long val = PyInt_AS_LONG(x);
+        if (is_unsigned && unlikely(val < 0)) {
+            PyErr_SetString(PyExc_OverflowError,
+                            "can't convert negative value to signed PY_LONG_LONG");
+            return (signed PY_LONG_LONG)-1;
+        }
+        return (signed PY_LONG_LONG)val;
+    } else
+#endif
+    if (likely(PyLong_Check(x))) {
+        if (is_unsigned) {
+            if (unlikely(Py_SIZE(x) < 0)) {
+                PyErr_SetString(PyExc_OverflowError,
+                                "can't convert negative value to signed PY_LONG_LONG");
+                return (signed PY_LONG_LONG)-1;
+            }
+            return (signed PY_LONG_LONG)PyLong_AsUnsignedLongLong(x);
+        } else {
+            return (signed PY_LONG_LONG)PyLong_AsLongLong(x);
+        }
+    } else {
+        signed PY_LONG_LONG val;
+        PyObject *tmp = __Pyx_PyNumber_Int(x);
+        if (!tmp) return (signed PY_LONG_LONG)-1;
+        val = __Pyx_PyInt_AsSignedLongLong(tmp);
+        Py_DECREF(tmp);
+        return val;
+    }
+}
+
+static int __Pyx_check_binary_version(void) {
+    char ctversion[4], rtversion[4];
+    PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION);
+    PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion());
+    if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) {
+        char message[200];
+        PyOS_snprintf(message, sizeof(message),
+                      "compiletime version %s of module '%.100s' "
+                      "does not match runtime version %s",
+                      ctversion, __Pyx_MODULE_NAME, rtversion);
+        #if PY_VERSION_HEX < 0x02050000
+        return PyErr_Warn(NULL, message);
+        #else
+        return PyErr_WarnEx(NULL, message, 1);
+        #endif
+    }
+    return 0;
+}
+
+#include "compile.h"
+#include "frameobject.h"
+#include "traceback.h"
+
+static void __Pyx_AddTraceback(const char *funcname, int __pyx_clineno,
+                               int __pyx_lineno, const char *__pyx_filename) {
+    PyObject *py_srcfile = 0;
+    PyObject *py_funcname = 0;
+    PyObject *py_globals = 0;
+    PyCodeObject *py_code = 0;
+    PyFrameObject *py_frame = 0;
+
+    #if PY_MAJOR_VERSION < 3
+    py_srcfile = PyString_FromString(__pyx_filename);
+    #else
+    py_srcfile = PyUnicode_FromString(__pyx_filename);
+    #endif
+    if (!py_srcfile) goto bad;
+    if (__pyx_clineno) {
+        #if PY_MAJOR_VERSION < 3
+        py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, __pyx_clineno);
+        #else
+        py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, __pyx_clineno);
+        #endif
+    }
+    else {
+        #if PY_MAJOR_VERSION < 3
+        py_funcname = PyString_FromString(funcname);
+        #else
+        py_funcname = PyUnicode_FromString(funcname);
+        #endif
+    }
+    if (!py_funcname) goto bad;
+    py_globals = PyModule_GetDict(__pyx_m);
+    if (!py_globals) goto bad;
+    py_code = PyCode_New(
+        0,            /*int argcount,*/
+        #if PY_MAJOR_VERSION >= 3
+        0,            /*int kwonlyargcount,*/
+        #endif
+        0,            /*int nlocals,*/
+        0,            /*int stacksize,*/
+        0,            /*int flags,*/
+        __pyx_empty_bytes, /*PyObject *code,*/
+        __pyx_empty_tuple,  /*PyObject *consts,*/
+        __pyx_empty_tuple,  /*PyObject *names,*/
+        __pyx_empty_tuple,  /*PyObject *varnames,*/
+        __pyx_empty_tuple,  /*PyObject *freevars,*/
+        __pyx_empty_tuple,  /*PyObject *cellvars,*/
+        py_srcfile,   /*PyObject *filename,*/
+        py_funcname,  /*PyObject *name,*/
+        __pyx_lineno,   /*int firstlineno,*/
+        __pyx_empty_bytes  /*PyObject *lnotab*/
+    );
+    if (!py_code) goto bad;
+    py_frame = PyFrame_New(
+        PyThreadState_GET(), /*PyThreadState *tstate,*/
+        py_code,             /*PyCodeObject *code,*/
+        py_globals,          /*PyObject *globals,*/
+        0                    /*PyObject *locals*/
+    );
+    if (!py_frame) goto bad;
+    py_frame->f_lineno = __pyx_lineno;
+    PyTraceBack_Here(py_frame);
+bad:
+    Py_XDECREF(py_srcfile);
+    Py_XDECREF(py_funcname);
+    Py_XDECREF(py_code);
+    Py_XDECREF(py_frame);
+}
+
+static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) {
+    while (t->p) {
+        #if PY_MAJOR_VERSION < 3
+        if (t->is_unicode) {
+            *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL);
+        } else if (t->intern) {
+            *t->p = PyString_InternFromString(t->s);
+        } else {
+            *t->p = PyString_FromStringAndSize(t->s, t->n - 1);
+        }
+        #else  /* Python 3+ has unicode identifiers */
+        if (t->is_unicode | t->is_str) {
+            if (t->intern) {
+                *t->p = PyUnicode_InternFromString(t->s);
+            } else if (t->encoding) {
+                *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL);
+            } else {
+                *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1);
+            }
+        } else {
+            *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1);
+        }
+        #endif
+        if (!*t->p)
+            return -1;
+        ++t;
+    }
+    return 0;
+}
+
+/* Type Conversion Functions */
+
+static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) {
+   int is_true = x == Py_True;
+   if (is_true | (x == Py_False) | (x == Py_None)) return is_true;
+   else return PyObject_IsTrue(x);
+}
+
+static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x) {
+  PyNumberMethods *m;
+  const char *name = NULL;
+  PyObject *res = NULL;
+#if PY_VERSION_HEX < 0x03000000
+  if (PyInt_Check(x) || PyLong_Check(x))
+#else
+  if (PyLong_Check(x))
+#endif
+    return Py_INCREF(x), x;
+  m = Py_TYPE(x)->tp_as_number;
+#if PY_VERSION_HEX < 0x03000000
+  if (m && m->nb_int) {
+    name = "int";
+    res = PyNumber_Int(x);
+  }
+  else if (m && m->nb_long) {
+    name = "long";
+    res = PyNumber_Long(x);
+  }
+#else
+  if (m && m->nb_int) {
+    name = "int";
+    res = PyNumber_Long(x);
+  }
+#endif
+  if (res) {
+#if PY_VERSION_HEX < 0x03000000
+    if (!PyInt_Check(res) && !PyLong_Check(res)) {
+#else
+    if (!PyLong_Check(res)) {
+#endif
+      PyErr_Format(PyExc_TypeError,
+                   "__%s__ returned non-%s (type %.200s)",
+                   name, name, Py_TYPE(res)->tp_name);
+      Py_DECREF(res);
+      return NULL;
+    }
+  }
+  else if (!PyErr_Occurred()) {
+    PyErr_SetString(PyExc_TypeError,
+                    "an integer is required");
+  }
+  return res;
+}
+
+static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) {
+  Py_ssize_t ival;
+  PyObject* x = PyNumber_Index(b);
+  if (!x) return -1;
+  ival = PyInt_AsSsize_t(x);
+  Py_DECREF(x);
+  return ival;
+}
+
+static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) {
+#if PY_VERSION_HEX < 0x02050000
+   if (ival <= LONG_MAX)
+       return PyInt_FromLong((long)ival);
+   else {
+       unsigned char *bytes = (unsigned char *) &ival;
+       int one = 1; int little = (int)*(unsigned char*)&one;
+       return _PyLong_FromByteArray(bytes, sizeof(size_t), little, 0);
+   }
+#else
+   return PyInt_FromSize_t(ival);
+#endif
+}
+
+static CYTHON_INLINE size_t __Pyx_PyInt_AsSize_t(PyObject* x) {
+   unsigned PY_LONG_LONG val = __Pyx_PyInt_AsUnsignedLongLong(x);
+   if (unlikely(val == (unsigned PY_LONG_LONG)-1 && PyErr_Occurred())) {
+       return (size_t)-1;
+   } else if (unlikely(val != (unsigned PY_LONG_LONG)(size_t)val)) {
+       PyErr_SetString(PyExc_OverflowError,
+                       "value too large to convert to size_t");
+       return (size_t)-1;
+   }
+   return (size_t)val;
+}
+
+
+#endif /* Py_PYTHON_H */
diff --git a/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/iocpsupport.pyx b/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/iocpsupport.pyx
new file mode 100644
index 0000000..97cf634
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/iocpsupport.pyx
@@ -0,0 +1,312 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+# HANDLE and SOCKET are pointer-sized (they are 64 bit wide in 64-bit builds)
+ctypedef size_t HANDLE
+ctypedef size_t SOCKET
+ctypedef unsigned long DWORD
+# it's really a pointer, but we use it as an integer
+ctypedef size_t ULONG_PTR
+ctypedef int BOOL
+
+cdef extern from 'io.h':
+    long _get_osfhandle(int filehandle)
+
+cdef extern from 'errno.h':
+    int errno
+    enum:
+        EBADF
+
+cdef extern from 'winsock2.h':
+    pass
+
+cdef extern from 'ws2tcpip.h':
+    pass
+
+cdef extern from 'windows.h':
+    ctypedef struct OVERLAPPED:
+        pass
+    HANDLE CreateIoCompletionPort(HANDLE fileHandle, HANDLE existing, ULONG_PTR key, DWORD numThreads)
+    BOOL GetQueuedCompletionStatus(HANDLE port, DWORD *bytes, ULONG_PTR *key, OVERLAPPED **ov, DWORD timeout)
+    BOOL PostQueuedCompletionStatus(HANDLE port, DWORD bytes, ULONG_PTR key, OVERLAPPED *ov)
+    DWORD GetLastError()
+    BOOL CloseHandle(HANDLE h)
+    enum:
+        INVALID_HANDLE_VALUE
+    void DebugBreak()
+
+cdef extern from 'python.h':
+    struct PyObject:
+        pass
+    void *PyMem_Malloc(size_t n) except NULL
+    void PyMem_Free(void *p)
+    struct PyThreadState:
+        pass
+    PyThreadState *PyEval_SaveThread()
+    void PyEval_RestoreThread(PyThreadState *tstate)
+    void Py_INCREF(object o)
+    void Py_XINCREF(object o)
+    void Py_DECREF(object o)
+    void Py_XDECREF(object o)
+    int PyObject_AsWriteBuffer(object obj, void **buffer, Py_ssize_t *buffer_len) except -1
+    int PyObject_AsReadBuffer(object obj, void **buffer, Py_ssize_t *buffer_len) except -1
+    object PyString_FromString(char *v)
+    object PyString_FromStringAndSize(char *v, Py_ssize_t len)
+    object PyBuffer_New(Py_ssize_t size)
+    char *PyString_AsString(object obj) except NULL
+    object PySequence_Fast(object o, char *m)
+#    object PySequence_Fast_GET_ITEM(object o, Py_ssize_t i)
+    PyObject** PySequence_Fast_ITEMS(object o)
+    PyObject* PySequence_ITEM(PyObject *o, Py_ssize_t i)
+    Py_ssize_t PySequence_Fast_GET_SIZE(object o)
+
+cdef extern from '':
+    struct sockaddr:
+        unsigned short int sa_family
+        char sa_data[0]
+    cdef struct in_addr:
+        unsigned long s_addr
+    struct sockaddr_in:
+        int sin_port
+        in_addr sin_addr
+    cdef struct in6_addr:
+        char s6_addr[16]
+    struct sockaddr_in6:
+        short int sin6_family
+        unsigned short int sin6_port
+        unsigned long int sin6_flowinfo
+        in6_addr sin6_addr
+        unsigned long int sin6_scope_id
+    int getsockopt(SOCKET s, int level, int optname, char *optval, int *optlen)
+    enum:
+        SOL_SOCKET
+        SO_PROTOCOL_INFO
+        SOCKET_ERROR
+        ERROR_IO_PENDING
+        AF_INET
+        AF_INET6
+        INADDR_ANY
+    ctypedef struct WSAPROTOCOL_INFO:
+        int iMaxSockAddr
+        int iAddressFamily
+    int WSAGetLastError()
+    char *inet_ntoa(in_addr ina)
+    unsigned long inet_addr(char *cp)
+    unsigned short ntohs(unsigned short netshort)
+    unsigned short htons(unsigned short hostshort)
+    ctypedef struct WSABUF:
+        long len
+        char *buf
+#    cdef struct TRANSMIT_FILE_BUFFERS:
+#        pass
+    int WSARecv(SOCKET s, WSABUF *buffs, DWORD buffcount, DWORD *bytes, DWORD *flags, OVERLAPPED *ov, void *crud)
+    int WSARecvFrom(SOCKET s, WSABUF *buffs, DWORD buffcount, DWORD *bytes, DWORD *flags, sockaddr *fromaddr, int *fromlen, OVERLAPPED *ov, void *crud)
+    int WSASend(SOCKET s, WSABUF *buffs, DWORD buffcount, DWORD *bytes, DWORD flags, OVERLAPPED *ov, void *crud)
+    int WSAAddressToStringA(sockaddr *lpsaAddress, DWORD dwAddressLength,
+                            WSAPROTOCOL_INFO *lpProtocolInfo,
+                            char *lpszAddressString,
+                            DWORD *lpdwAddressStringLength)
+    int WSAStringToAddressA(char *AddressString, int AddressFamily,
+                            WSAPROTOCOL_INFO *lpProtocolInfo,
+                            sockaddr *lpAddress, int *lpAddressLength)
+
+cdef extern from 'string.h':
+    void *memset(void *s, int c, size_t n)
+
+cdef extern from 'winsock_pointers.h':
+    int initWinsockPointers()
+    BOOL (*lpAcceptEx)(SOCKET listening, SOCKET accepting, void *buffer, DWORD recvlen, DWORD locallen, DWORD remotelen, DWORD *bytes, OVERLAPPED *ov)
+    void (*lpGetAcceptExSockaddrs)(void *buffer, DWORD recvlen, DWORD locallen, DWORD remotelen, sockaddr **localaddr, int *locallen, sockaddr **remoteaddr, int *remotelen)
+    BOOL (*lpConnectEx)(SOCKET s, sockaddr *name, int namelen, void *buff, DWORD sendlen, DWORD *sentlen, OVERLAPPED *ov)
+#    BOOL (*lpTransmitFile)(SOCKET s, HANDLE hFile, DWORD size, DWORD buffer_size, OVERLAPPED *ov, TRANSMIT_FILE_BUFFERS *buff, DWORD flags)
+
+cdef struct myOVERLAPPED:
+    OVERLAPPED ov
+    PyObject *obj
+
+cdef myOVERLAPPED *makeOV() except NULL:
+    cdef myOVERLAPPED *res
+    res = <myOVERLAPPED *>PyMem_Malloc(sizeof(myOVERLAPPED))
+    if not res:
+        raise MemoryError
+    memset(res, 0, sizeof(myOVERLAPPED))
+    return res
+
+cdef void raise_error(int err, object message) except *:
+    if not err:
+        err = GetLastError()
+    raise WindowsError(message, err)
+
+class Event:
+    def __init__(self, callback, owner, **kw):
+        self.callback = callback
+        self.owner = owner
+        for k, v in kw.items():
+            setattr(self, k, v)
+
+cdef class CompletionPort:
+    cdef HANDLE port
+    def __init__(self):
+        cdef HANDLE res
+        res = CreateIoCompletionPort(INVALID_HANDLE_VALUE, 0, 0, 0)
+        if not res:
+            raise_error(0, 'CreateIoCompletionPort')
+        self.port = res
+
+    def addHandle(self, HANDLE handle, size_t key=0):
+        cdef HANDLE res
+        res = CreateIoCompletionPort(handle, self.port, key, 0)
+        if not res:
+            raise_error(0, 'CreateIoCompletionPort')
+
+    def getEvent(self, long timeout):
+        cdef PyThreadState *_save
+        cdef unsigned long bytes, rc
+        cdef size_t key
+        cdef myOVERLAPPED *ov
+
+        _save = PyEval_SaveThread()
+        rc = GetQueuedCompletionStatus(self.port, &bytes, &key, <OVERLAPPED **>&ov, timeout)
+        PyEval_RestoreThread(_save)
+
+        if not rc:
+            rc = GetLastError()
+        else:
+            rc = 0
+
+        obj = None
+        if ov:
+            if ov.obj:
+                obj = <object>ov.obj
+                Py_DECREF(obj) # we are stealing a reference here
+            PyMem_Free(ov)
+
+        return (rc, bytes, key, obj)
+
+    def postEvent(self, unsigned long bytes, size_t key, obj):
+        cdef myOVERLAPPED *ov
+        cdef unsigned long rc
+
+        if obj is not None:
+            ov = makeOV()
+            Py_INCREF(obj) # give ov its own reference to obj
+            ov.obj = <PyObject *>obj
+        else:
+            ov = NULL
+
+        rc = PostQueuedCompletionStatus(self.port, bytes, key, <OVERLAPPED *>ov)
+        if not rc:
+            if ov:
+                Py_DECREF(obj)
+                PyMem_Free(ov)
+            raise_error(0, 'PostQueuedCompletionStatus')
+
+    def __del__(self):
+        CloseHandle(self.port)
+
+def makesockaddr(object buff):
+    cdef void *mem_buffer
+    cdef Py_ssize_t size
+
+    PyObject_AsReadBuffer(buff, &mem_buffer, &size)
+    # XXX: this should really return the address family as well
+    return _makesockaddr(<sockaddr *>mem_buffer, size)
+
+cdef object _makesockaddr(sockaddr *addr, Py_ssize_t len):
+    cdef sockaddr_in *sin
+    cdef sockaddr_in6 *sin6
+    cdef char buff[256]
+    cdef int rc
+    cdef DWORD buff_size = sizeof(buff)
+    if not len:
+        return None
+    if addr.sa_family == AF_INET:
+        sin = <sockaddr_in *>addr
+        return PyString_FromString(inet_ntoa(sin.sin_addr)), ntohs(sin.sin_port)
+    elif addr.sa_family == AF_INET6:
+        sin6 = <sockaddr_in6 *>addr
+        rc = WSAAddressToStringA(addr, sizeof(sockaddr_in6), NULL, buff, &buff_size)
+        if rc == SOCKET_ERROR:
+            raise_error(0, 'WSAAddressToString')
+        host, sa_port = PyString_FromString(buff), ntohs(sin6.sin6_port)
+        host, port = host.rsplit(':', 1)
+        port = int(port)
+        assert host[0] == '['
+        assert host[-1] == ']'
+        assert port == sa_port
+        return host[1:-1], port
+    else:
+        return PyString_FromStringAndSize(addr.sa_data, sizeof(addr.sa_data))
+
+
+cdef object fillinetaddr(sockaddr_in *dest, object addr):
+    cdef unsigned short port
+    cdef unsigned long res
+    cdef char *hoststr
+    host, port = addr
+
+    hoststr = PyString_AsString(host)
+    res = inet_addr(hoststr)
+    if res == INADDR_ANY:
+        raise ValueError, 'invalid IP address'
+    dest.sin_addr.s_addr = res
+
+    dest.sin_port = htons(port)
+
+
+cdef object fillinet6addr(sockaddr_in6 *dest, object addr):
+    cdef unsigned short port
+    cdef unsigned long res
+    cdef char *hoststr
+    cdef int addrlen = sizeof(sockaddr_in6)
+    host, port, flow, scope = addr
+    host = host.split("%")[0] # remove scope ID, if any
+
+    hoststr = PyString_AsString(host)
+    cdef int parseresult = WSAStringToAddressA(hoststr, AF_INET6, NULL,
+                                               <sockaddr *>dest, &addrlen)
+    if parseresult == SOCKET_ERROR:
+        raise ValueError, 'invalid IPv6 address %r' % (host,)
+    if parseresult != 0:
+        raise RuntimeError, 'undefined error occurred during address parsing'
+    # sin6_host field was handled by WSAStringToAddress
+    dest.sin6_port = htons(port)
+    dest.sin6_flowinfo = flow
+    dest.sin6_scope_id = scope
+
+
+def AllocateReadBuffer(int size):
+    return PyBuffer_New(size)
+
+def maxAddrLen(long s):
+    cdef WSAPROTOCOL_INFO wsa_pi
+    cdef int size, rc
+
+    size = sizeof(wsa_pi)
+    rc = getsockopt(s, SOL_SOCKET, SO_PROTOCOL_INFO, <char *>&wsa_pi, &size)
+    if rc == SOCKET_ERROR:
+        raise_error(WSAGetLastError(), 'getsockopt')
+    return wsa_pi.iMaxSockAddr
+
+cdef int getAddrFamily(SOCKET s) except *:
+    cdef WSAPROTOCOL_INFO wsa_pi
+    cdef int size, rc
+
+    size = sizeof(wsa_pi)
+    rc = getsockopt(s, SOL_SOCKET, SO_PROTOCOL_INFO, <char *>&wsa_pi, &size)
+    if rc == SOCKET_ERROR:
+        raise_error(WSAGetLastError(), 'getsockopt')
+    return wsa_pi.iAddressFamily
+
+import socket # for WSAStartup
+if not initWinsockPointers():
+    raise ValueError, 'Failed to initialize Winsock function vectors'
+
+have_connectex = (lpConnectEx != NULL)
+
+include 'acceptex.pxi'
+include 'connectex.pxi'
+include 'wsarecv.pxi'
+include 'wsasend.pxi'
+
diff --git a/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/winsock_pointers.c b/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/winsock_pointers.c
new file mode 100644
index 0000000..9bd115a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/winsock_pointers.c
@@ -0,0 +1,62 @@
+/* Copyright (c) 2008 Twisted Matrix Laboratories.
+ * See LICENSE for details.
+ */
+
+
+#include<winsock2.h>
+#include<assert.h>
+#include<stdio.h>
+#include<stdlib.h>
+
+#ifndef WSAID_CONNECTEX
+#define WSAID_CONNECTEX {0x25a207b9,0xddf3,0x4660,{0x8e,0xe9,0x76,0xe5,0x8c,0x74,0x06,0x3e}}
+#endif
+#ifndef WSAID_GETACCEPTEXSOCKADDRS
+#define WSAID_GETACCEPTEXSOCKADDRS {0xb5367df2,0xcbac,0x11cf,{0x95,0xca,0x00,0x80,0x5f,0x48,0xa1,0x92}}
+#endif
+#ifndef WSAID_ACCEPTEX
+#define WSAID_ACCEPTEX {0xb5367df1,0xcbac,0x11cf,{0x95,0xca,0x00,0x80,0x5f,0x48,0xa1,0x92}}
+#endif
+/*#ifndef WSAID_TRANSMITFILE
+#define WSAID_TRANSMITFILE {0xb5367df0,0xcbac,0x11cf,{0x95,0xca,0x00,0x80,0x5f,0x48,0xa1,0x92}}
+#endif*/
+
+
+void *lpAcceptEx, *lpGetAcceptExSockaddrs, *lpConnectEx, *lpTransmitFile;
+
+int initPointer(SOCKET s, void **fun, GUID guid) {
+    int res;
+    DWORD bytes;
+
+    *fun = NULL;
+    res = WSAIoctl(s, SIO_GET_EXTENSION_FUNCTION_POINTER,
+                   &guid, sizeof(guid),
+                   fun, sizeof(fun),
+                   &bytes, NULL, NULL);
+    return !res;
+}
+
+int initWinsockPointers() {
+    SOCKET s = socket(AF_INET, SOCK_STREAM, 0);
+    /* I hate C */
+    GUID guid1 = WSAID_ACCEPTEX;
+    GUID guid2 = WSAID_GETACCEPTEXSOCKADDRS;
+    GUID guid3 = WSAID_CONNECTEX;
+    /*GUID guid4 = WSAID_TRANSMITFILE;*/
+    if (!s) {
+        return 0;
+    }
+    if (!initPointer(s, &lpAcceptEx, guid1))
+    {
+        return 0;
+    }
+    if (!initPointer(s, &lpGetAcceptExSockaddrs, guid2)) {
+        return 0;
+    }
+    if (!initPointer(s, &lpConnectEx, guid3)) {
+        return 0;
+    };
+    /*initPointer(s, &lpTransmitFile, guid4);*/
+    return 1;
+}
+
diff --git a/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/winsock_pointers.h b/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/winsock_pointers.h
new file mode 100644
index 0000000..83e9ba8
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/winsock_pointers.h
@@ -0,0 +1,51 @@
+/* Copyright (c) 2008 Twisted Matrix Laboratories.
+ * See LICENSE for details.
+ */
+
+
+#include<windows.h>
+
+int initWinsockPointers();
+BOOL
+(PASCAL FAR * lpAcceptEx)(
+    IN SOCKET sListenSocket,
+    IN SOCKET sAcceptSocket,
+    IN PVOID lpOutputBuffer,
+    IN DWORD dwReceiveDataLength,
+    IN DWORD dwLocalAddressLength,
+    IN DWORD dwRemoteAddressLength,
+    OUT LPDWORD lpdwBytesReceived,
+    IN LPOVERLAPPED lpOverlapped
+    );
+VOID
+(PASCAL FAR * lpGetAcceptExSockaddrs)(
+    IN PVOID lpOutputBuffer,
+    IN DWORD dwReceiveDataLength,
+    IN DWORD dwLocalAddressLength,
+    IN DWORD dwRemoteAddressLength,
+    OUT struct sockaddr **LocalSockaddr,
+    OUT LPINT LocalSockaddrLength,
+    OUT struct sockaddr **RemoteSockaddr,
+    OUT LPINT RemoteSockaddrLength
+    );
+BOOL
+(PASCAL FAR * lpConnectEx) (
+    IN SOCKET s,
+    IN const struct sockaddr FAR *name,
+    IN int namelen,
+    IN PVOID lpSendBuffer OPTIONAL,
+    IN DWORD dwSendDataLength,
+    OUT LPDWORD lpdwBytesSent,
+    IN LPOVERLAPPED lpOverlapped
+    );
+/*BOOL
+(PASCAL FAR * lpTransmitFile)(
+    IN SOCKET hSocket,
+    IN HANDLE hFile,
+    IN DWORD nNumberOfBytesToWrite,
+    IN DWORD nNumberOfBytesPerSend,
+    IN LPOVERLAPPED lpOverlapped,
+    IN LPTRANSMIT_FILE_BUFFERS lpTransmitBuffers,
+    IN DWORD dwReserved
+    );*/
+
diff --git a/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/wsarecv.pxi b/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/wsarecv.pxi
new file mode 100644
index 0000000..58c391e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/wsarecv.pxi
@@ -0,0 +1,76 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+def recv(long s, object bufflist, object obj, unsigned long flags = 0):
+    cdef int rc, res
+    cdef myOVERLAPPED *ov
+    cdef WSABUF *ws_buf
+    cdef unsigned long bytes
+    cdef PyObject **buffers
+    cdef Py_ssize_t i, size, buffcount
+
+    bufflist = PySequence_Fast(bufflist, 'second argument needs to be a list')
+    buffcount = PySequence_Fast_GET_SIZE(bufflist)
+    buffers = PySequence_Fast_ITEMS(bufflist)
+
+    ws_buf = <WSABUF *>PyMem_Malloc(buffcount*sizeof(WSABUF))
+
+    try:
+        for i from 0 <= i < buffcount:
+            PyObject_AsWriteBuffer(<object>buffers[i], <void **>&ws_buf[i].buf, &size)
+            ws_buf[i].len = <DWORD>size
+
+        ov = makeOV()
+        if obj is not None:
+            ov.obj = <PyObject *>obj
+
+        rc = WSARecv(s, ws_buf, <DWORD>buffcount, &bytes, &flags, <OVERLAPPED *>ov, NULL)
+
+        if rc == SOCKET_ERROR:
+            rc = WSAGetLastError()
+            if rc != ERROR_IO_PENDING:
+                PyMem_Free(ov)
+                return rc, 0
+
+        Py_XINCREF(obj)
+        return rc, bytes
+    finally:
+        PyMem_Free(ws_buf)
+
+def recvfrom(long s, object buff, object addr_buff, object addr_len_buff, object obj, unsigned long flags = 0):
+    cdef int rc, c_addr_buff_len, c_addr_len_buff_len
+    cdef myOVERLAPPED *ov
+    cdef WSABUF ws_buf
+    cdef unsigned long bytes
+    cdef sockaddr *c_addr_buff
+    cdef int *c_addr_len_buff
+    cdef Py_ssize_t size
+
+    PyObject_AsWriteBuffer(buff, <void **>&ws_buf.buf, &size)
+    ws_buf.len = <DWORD>size
+    PyObject_AsWriteBuffer(addr_buff, <void **>&c_addr_buff, &size)
+    c_addr_buff_len = <int>size
+    PyObject_AsWriteBuffer(addr_len_buff, <void **>&c_addr_len_buff, &size)
+    c_addr_len_buff_len = <int>size
+
+    if c_addr_len_buff_len != sizeof(int):
+        raise ValueError, 'length of address length buffer needs to be sizeof(int)'
+
+    c_addr_len_buff[0] = c_addr_buff_len
+
+    ov = makeOV()
+    if obj is not None:
+        ov.obj = <PyObject *>obj
+
+    rc = WSARecvFrom(s, &ws_buf, 1, &bytes, &flags, c_addr_buff, c_addr_len_buff, <OVERLAPPED *>ov, NULL)
+
+    if rc == SOCKET_ERROR:
+        rc = WSAGetLastError()
+        if rc != ERROR_IO_PENDING:
+            PyMem_Free(ov)
+            return rc, 0
+
+    Py_XINCREF(obj)
+    return rc, bytes
+
diff --git a/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/wsasend.pxi b/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/wsasend.pxi
new file mode 100644
index 0000000..4ad59ca
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/iocpreactor/iocpsupport/wsasend.pxi
@@ -0,0 +1,30 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+def send(long s, object buff, object obj, unsigned long flags = 0):
+    cdef int rc
+    cdef myOVERLAPPED *ov
+    cdef WSABUF ws_buf
+    cdef unsigned long bytes
+    cdef Py_ssize_t size
+
+    PyObject_AsReadBuffer(buff, <void **>&ws_buf.buf, &size)
+    ws_buf.len = <DWORD>size
+
+    ov = makeOV()
+    if obj is not None:
+        ov.obj = <PyObject *>obj
+
+    rc = WSASend(s, &ws_buf, 1, &bytes, flags, <OVERLAPPED *>ov, NULL)
+
+    if rc == SOCKET_ERROR:
+        rc = WSAGetLastError()
+        if rc != ERROR_IO_PENDING:
+            PyMem_Free(ov)
+            return rc, bytes
+
+    Py_XINCREF(obj)
+    return rc, bytes
+
+
diff --git a/ThirdParty/Twisted/twisted/internet/iocpreactor/notes.txt b/ThirdParty/Twisted/twisted/internet/iocpreactor/notes.txt
new file mode 100644
index 0000000..4caffb8
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/iocpreactor/notes.txt
@@ -0,0 +1,24 @@
+test specifically:
+failed accept error message -- similar to test_tcp_internals
+immediate success on accept/connect/recv, including Event.ignore
+parametrize iocpsupport somehow -- via reactor?
+
+do:
+break handling -- WaitForSingleObject on the IOCP handle?
+iovecs for write buffer
+do not wait for a mainloop iteration if resumeProducing (in _handleWrite) does startWriting
+don't addActiveHandle in every call to startWriting/startReading
+iocpified process support
+  win32er-in-a-thread (or run GQCS in a thread -- it can't receive SIGBREAK)
+blocking in sendto() -- I think Windows can do that, especially with local UDP
+
+buildbot:
+run in vmware
+start from a persistent snapshot
+
+use a stub inside the vm to svnup/run tests/collect stdio
+lift logs through SMB? or ship them via tcp beams to the VM host
+
+have a timeout on the test run
+if we time out, take a screenshot, save it, kill the VM
+
diff --git a/ThirdParty/Twisted/twisted/internet/iocpreactor/reactor.py b/ThirdParty/Twisted/twisted/internet/iocpreactor/reactor.py
new file mode 100644
index 0000000..0c565ab
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/iocpreactor/reactor.py
@@ -0,0 +1,275 @@
+# -*- test-case-name: twisted.internet.test.test_iocp -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Reactor that uses IO completion ports
+"""
+
+import warnings, socket, sys
+
+from zope.interface import implements
+
+from twisted.internet import base, interfaces, main, error
+from twisted.python import log, failure
+from twisted.internet._dumbwin32proc import Process
+from twisted.internet.win32eventreactor import _ThreadedWin32EventsMixin
+
+from twisted.internet.iocpreactor import iocpsupport as _iocp
+from twisted.internet.iocpreactor.const import WAIT_TIMEOUT
+from twisted.internet.iocpreactor import tcp, udp
+
+try:
+    from twisted.protocols.tls import TLSMemoryBIOFactory
+except ImportError:
+    # Either pyOpenSSL isn't installed, or it is too old for this code to work.
+    # The reactor won't provide IReactorSSL.
+    TLSMemoryBIOFactory = None
+    _extraInterfaces = ()
+    warnings.warn(
+        "pyOpenSSL 0.10 or newer is required for SSL support in iocpreactor. "
+        "It is missing, so the reactor will not support SSL APIs.")
+else:
+    _extraInterfaces = (interfaces.IReactorSSL,)
+
+from twisted.python.compat import set
+
+MAX_TIMEOUT = 2000 # 2 seconds, see doIteration for explanation
+
+EVENTS_PER_LOOP = 1000 # XXX: what's a good value here?
+
+# keys to associate with normal and waker events
+KEY_NORMAL, KEY_WAKEUP = range(2)
+
+_NO_GETHANDLE = error.ConnectionFdescWentAway(
+                    'Handler has no getFileHandle method')
+_NO_FILEDESC = error.ConnectionFdescWentAway('Filedescriptor went away')
+
+
+
+class IOCPReactor(base._SignalReactorMixin, base.ReactorBase,
+                  _ThreadedWin32EventsMixin):
+    implements(interfaces.IReactorTCP, interfaces.IReactorUDP,
+               interfaces.IReactorMulticast, interfaces.IReactorProcess,
+               *_extraInterfaces)
+
+    port = None
+
+    def __init__(self):
+        base.ReactorBase.__init__(self)
+        self.port = _iocp.CompletionPort()
+        self.handles = set()
+
+
+    def addActiveHandle(self, handle):
+        self.handles.add(handle)
+
+
+    def removeActiveHandle(self, handle):
+        self.handles.discard(handle)
+
+
+    def doIteration(self, timeout):
+        """
+        Poll the IO completion port for new events.
+        """
+        # This function sits and waits for an IO completion event.
+        #
+        # There are two requirements: process IO events as soon as they arrive
+        # and process ctrl-break from the user in a reasonable amount of time.
+        #
+        # There are three kinds of waiting.
+        # 1) GetQueuedCompletionStatus (self.port.getEvent) to wait for IO
+        # events only.
+        # 2) Msg* family of wait functions that can stop waiting when
+        # ctrl-break is detected (then, I think, Python converts it into a
+        # KeyboardInterrupt)
+        # 3) *Ex family of wait functions that put the thread into an
+        # "alertable" wait state which is supposedly triggered by IO completion
+        #
+        # 2) and 3) can be combined. Trouble is, my IO completion is not
+        # causing 3) to trigger, possibly because I do not use an IO completion
+        # callback. Windows is weird.
+        # There are two ways to handle this. I could use MsgWaitForSingleObject
+        # here and GetQueuedCompletionStatus in a thread. Or I could poll with
+        # a reasonable interval. Guess what! Threads are hard.
+
+        processed_events = 0
+        if timeout is None:
+            timeout = MAX_TIMEOUT
+        else:
+            timeout = min(MAX_TIMEOUT, int(1000*timeout))
+        rc, bytes, key, evt = self.port.getEvent(timeout)
+        while 1:
+            if rc == WAIT_TIMEOUT:
+                break
+            if key != KEY_WAKEUP:
+                assert key == KEY_NORMAL
+                log.callWithLogger(evt.owner, self._callEventCallback,
+                                   rc, bytes, evt)
+                processed_events += 1
+            if processed_events >= EVENTS_PER_LOOP:
+                break
+            rc, bytes, key, evt = self.port.getEvent(0)
+
+
+    def _callEventCallback(self, rc, bytes, evt):
+        owner = evt.owner
+        why = None
+        try:
+            evt.callback(rc, bytes, evt)
+            handfn = getattr(owner, 'getFileHandle', None)
+            if not handfn:
+                why = _NO_GETHANDLE
+            elif handfn() == -1:
+                why = _NO_FILEDESC
+            if why:
+                return # ignore handles that were closed
+        except:
+            why = sys.exc_info()[1]
+            log.err()
+        if why:
+            owner.loseConnection(failure.Failure(why))
+
+
+    def installWaker(self):
+        pass
+
+
+    def wakeUp(self):
+        self.port.postEvent(0, KEY_WAKEUP, None)
+
+
+    def registerHandle(self, handle):
+        self.port.addHandle(handle, KEY_NORMAL)
+
+
+    def createSocket(self, af, stype):
+        skt = socket.socket(af, stype)
+        self.registerHandle(skt.fileno())
+        return skt
+
+
+    def listenTCP(self, port, factory, backlog=50, interface=''):
+        """
+        @see: twisted.internet.interfaces.IReactorTCP.listenTCP
+        """
+        p = tcp.Port(port, factory, backlog, interface, self)
+        p.startListening()
+        return p
+
+
+    def connectTCP(self, host, port, factory, timeout=30, bindAddress=None):
+        """
+        @see: twisted.internet.interfaces.IReactorTCP.connectTCP
+        """
+        c = tcp.Connector(host, port, factory, timeout, bindAddress, self)
+        c.connect()
+        return c
+
+
+    if TLSMemoryBIOFactory is not None:
+        def listenSSL(self, port, factory, contextFactory, backlog=50, interface=''):
+            """
+            @see: twisted.internet.interfaces.IReactorSSL.listenSSL
+            """
+            port = self.listenTCP(
+                port,
+                TLSMemoryBIOFactory(contextFactory, False, factory),
+                backlog, interface)
+            port._type = 'TLS'
+            return port
+
+
+        def connectSSL(self, host, port, factory, contextFactory, timeout=30, bindAddress=None):
+            """
+            @see: twisted.internet.interfaces.IReactorSSL.connectSSL
+            """
+            return self.connectTCP(
+                host, port,
+                TLSMemoryBIOFactory(contextFactory, True, factory),
+                timeout, bindAddress)
+    else:
+        def listenSSL(self, port, factory, contextFactory, backlog=50, interface=''):
+            """
+            Non-implementation of L{IReactorSSL.listenSSL}.  Some dependency
+            is not satisfied.  This implementation always raises
+            L{NotImplementedError}.
+            """
+            raise NotImplementedError(
+                "pyOpenSSL 0.10 or newer is required for SSL support in "
+                "iocpreactor. It is missing, so the reactor does not support "
+                "SSL APIs.")
+
+
+        def connectSSL(self, host, port, factory, contextFactory, timeout=30, bindAddress=None):
+            """
+            Non-implementation of L{IReactorSSL.connectSSL}.  Some dependency
+            is not satisfied.  This implementation always raises
+            L{NotImplementedError}.
+            """
+            raise NotImplementedError(
+                "pyOpenSSL 0.10 or newer is required for SSL support in "
+                "iocpreactor. It is missing, so the reactor does not support "
+                "SSL APIs.")
+
+
+    def listenUDP(self, port, protocol, interface='', maxPacketSize=8192):
+        """
+        Connects a given L{DatagramProtocol} to the given numeric UDP port.
+
+        @returns: object conforming to L{IListeningPort}.
+        """
+        p = udp.Port(port, protocol, interface, maxPacketSize, self)
+        p.startListening()
+        return p
+
+
+    def listenMulticast(self, port, protocol, interface='', maxPacketSize=8192,
+                        listenMultiple=False):
+        """
+        Connects a given DatagramProtocol to the given numeric UDP port.
+
+        EXPERIMENTAL.
+
+        @returns: object conforming to IListeningPort.
+        """
+        p = udp.MulticastPort(port, protocol, interface, maxPacketSize, self,
+                              listenMultiple)
+        p.startListening()
+        return p
+
+
+    def spawnProcess(self, processProtocol, executable, args=(), env={},
+                     path=None, uid=None, gid=None, usePTY=0, childFDs=None):
+        """
+        Spawn a process.
+        """
+        if uid is not None:
+            raise ValueError("Setting UID is unsupported on this platform.")
+        if gid is not None:
+            raise ValueError("Setting GID is unsupported on this platform.")
+        if usePTY:
+            raise ValueError("PTYs are unsupported on this platform.")
+        if childFDs is not None:
+            raise ValueError(
+                "Custom child file descriptor mappings are unsupported on "
+                "this platform.")
+        args, env = self._checkProcessArgs(args, env)
+        return Process(self, processProtocol, executable, args, env, path)
+
+
+    def removeAll(self):
+        res = list(self.handles)
+        self.handles.clear()
+        return res
+
+
+
+def install():
+    r = IOCPReactor()
+    main.installReactor(r)
+
+
+__all__ = ['IOCPReactor', 'install']
+
diff --git a/ThirdParty/Twisted/twisted/internet/iocpreactor/setup.py b/ThirdParty/Twisted/twisted/internet/iocpreactor/setup.py
new file mode 100644
index 0000000..b110fc5
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/iocpreactor/setup.py
@@ -0,0 +1,23 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Distutils file for building low-level IOCP bindings from their Pyrex source
+"""
+
+
+from distutils.core import setup
+from distutils.extension import Extension
+from Cython.Distutils import build_ext
+
+setup(name='iocpsupport',
+      ext_modules=[Extension('iocpsupport',
+                   ['iocpsupport/iocpsupport.pyx',
+                    'iocpsupport/winsock_pointers.c'],
+                   libraries = ['ws2_32'],
+                   )
+                  ],
+      cmdclass = {'build_ext': build_ext},
+      )
+
diff --git a/ThirdParty/Twisted/twisted/internet/iocpreactor/tcp.py b/ThirdParty/Twisted/twisted/internet/iocpreactor/tcp.py
new file mode 100644
index 0000000..d34f698
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/iocpreactor/tcp.py
@@ -0,0 +1,578 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+TCP support for IOCP reactor
+"""
+
+import socket, operator, errno, struct
+
+from zope.interface import implements, classImplements
+
+from twisted.internet import interfaces, error, address, main, defer
+from twisted.internet.abstract import _LogOwner, isIPAddress, isIPv6Address
+from twisted.internet.tcp import _SocketCloser, Connector as TCPConnector
+from twisted.internet.tcp import _AbortingMixin, _BaseBaseClient, _BaseTCPClient
+from twisted.python import log, failure, reflect, util
+
+from twisted.internet.iocpreactor import iocpsupport as _iocp, abstract
+from twisted.internet.iocpreactor.interfaces import IReadWriteHandle
+from twisted.internet.iocpreactor.const import ERROR_IO_PENDING
+from twisted.internet.iocpreactor.const import SO_UPDATE_CONNECT_CONTEXT
+from twisted.internet.iocpreactor.const import SO_UPDATE_ACCEPT_CONTEXT
+from twisted.internet.iocpreactor.const import ERROR_CONNECTION_REFUSED
+from twisted.internet.iocpreactor.const import ERROR_NETWORK_UNREACHABLE
+
+try:
+    from twisted.internet._newtls import startTLS as _startTLS
+except ImportError:
+    _startTLS = None
+
+# ConnectEx returns these. XXX: find out what it does for timeout
+connectExErrors = {
+        ERROR_CONNECTION_REFUSED: errno.WSAECONNREFUSED,
+        ERROR_NETWORK_UNREACHABLE: errno.WSAENETUNREACH,
+        }
+
+class Connection(abstract.FileHandle, _SocketCloser, _AbortingMixin):
+    """
+    @ivar TLS: C{False} to indicate the connection is in normal TCP mode,
+        C{True} to indicate that TLS has been started and that operations must
+        be routed through the L{TLSMemoryBIOProtocol} instance.
+    """
+    implements(IReadWriteHandle, interfaces.ITCPTransport,
+               interfaces.ISystemHandle)
+
+    TLS = False
+
+
+    def __init__(self, sock, proto, reactor=None):
+        abstract.FileHandle.__init__(self, reactor)
+        self.socket = sock
+        self.getFileHandle = sock.fileno
+        self.protocol = proto
+
+
+    def getHandle(self):
+        return self.socket
+
+
+    def dataReceived(self, rbuffer):
+        # XXX: some day, we'll have protocols that can handle raw buffers
+        self.protocol.dataReceived(str(rbuffer))
+
+
+    def readFromHandle(self, bufflist, evt):
+        return _iocp.recv(self.getFileHandle(), bufflist, evt)
+
+
+    def writeToHandle(self, buff, evt):
+        """
+        Send C{buff} to current file handle using C{_iocp.send}. The buffer
+        sent is limited to a size of C{self.SEND_LIMIT}.
+        """
+        return _iocp.send(self.getFileHandle(),
+            buffer(buff, 0, self.SEND_LIMIT), evt)
+
+
+    def _closeWriteConnection(self):
+        try:
+            getattr(self.socket, self._socketShutdownMethod)(1)
+        except socket.error:
+            pass
+        p = interfaces.IHalfCloseableProtocol(self.protocol, None)
+        if p:
+            try:
+                p.writeConnectionLost()
+            except:
+                f = failure.Failure()
+                log.err()
+                self.connectionLost(f)
+
+
+    def readConnectionLost(self, reason):
+        p = interfaces.IHalfCloseableProtocol(self.protocol, None)
+        if p:
+            try:
+                p.readConnectionLost()
+            except:
+                log.err()
+                self.connectionLost(failure.Failure())
+        else:
+            self.connectionLost(reason)
+
+
+    def connectionLost(self, reason):
+        if self.disconnected:
+            return
+        abstract.FileHandle.connectionLost(self, reason)
+        isClean = (reason is None or
+                   not reason.check(error.ConnectionAborted))
+        self._closeSocket(isClean)
+        protocol = self.protocol
+        del self.protocol
+        del self.socket
+        del self.getFileHandle
+        protocol.connectionLost(reason)
+
+
+    def logPrefix(self):
+        """
+        Return the prefix to log with when I own the logging thread.
+        """
+        return self.logstr
+
+
+    def getTcpNoDelay(self):
+        return operator.truth(self.socket.getsockopt(socket.IPPROTO_TCP,
+                                                     socket.TCP_NODELAY))
+
+
+    def setTcpNoDelay(self, enabled):
+        self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, enabled)
+
+
+    def getTcpKeepAlive(self):
+        return operator.truth(self.socket.getsockopt(socket.SOL_SOCKET,
+                                                     socket.SO_KEEPALIVE))
+
+
+    def setTcpKeepAlive(self, enabled):
+        self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, enabled)
+
+
+    if _startTLS is not None:
+        def startTLS(self, contextFactory, normal=True):
+            """
+            @see: L{ITLSTransport.startTLS}
+            """
+            _startTLS(self, contextFactory, normal, abstract.FileHandle)
+
+
+    def write(self, data):
+        """
+        Write some data, either directly to the underlying handle or, if TLS
+        has been started, to the L{TLSMemoryBIOProtocol} for it to encrypt and
+        send.
+
+        @see: L{ITCPTransport.write}
+        """
+        if self.disconnected:
+            return
+        if self.TLS:
+            self.protocol.write(data)
+        else:
+            abstract.FileHandle.write(self, data)
+
+
+    def writeSequence(self, iovec):
+        """
+        Write some data, either directly to the underlying handle or, if TLS
+        has been started, to the L{TLSMemoryBIOProtocol} for it to encrypt and
+        send.
+
+        @see: L{ITCPTransport.writeSequence}
+        """
+        if self.disconnected:
+            return
+        if self.TLS:
+            self.protocol.writeSequence(iovec)
+        else:
+            abstract.FileHandle.writeSequence(self, iovec)
+
+
+    def loseConnection(self, reason=None):
+        """
+        Close the underlying handle or, if TLS has been started, first shut it
+        down.
+
+        @see: L{ITCPTransport.loseConnection}
+        """
+        if self.TLS:
+            if self.connected and not self.disconnecting:
+                self.protocol.loseConnection()
+        else:
+            abstract.FileHandle.loseConnection(self, reason)
+
+
+    def registerProducer(self, producer, streaming):
+        """
+        Register a producer.
+
+        If TLS is enabled, the TLS connection handles this.
+        """
+        if self.TLS:
+            # Registering a producer before we're connected shouldn't be a
+            # problem. If we end up with a write(), that's already handled in
+            # the write() code above, and there are no other potential
+            # side-effects.
+            self.protocol.registerProducer(producer, streaming)
+        else:
+            abstract.FileHandle.registerProducer(self, producer, streaming)
+
+
+    def unregisterProducer(self):
+        """
+        Unregister a producer.
+
+        If TLS is enabled, the TLS connection handles this.
+        """
+        if self.TLS:
+            self.protocol.unregisterProducer()
+        else:
+            abstract.FileHandle.unregisterProducer(self)
+
+if _startTLS is not None:
+    classImplements(Connection, interfaces.ITLSTransport)
+
+
+
+class Client(_BaseBaseClient, _BaseTCPClient, Connection):
+    """
+    @ivar _tlsClientDefault: Always C{True}, indicating that this is a client
+        connection, and by default when TLS is negotiated this class will act as
+        a TLS client.
+    """
+    addressFamily = socket.AF_INET
+    socketType = socket.SOCK_STREAM
+
+    _tlsClientDefault = True
+    _commonConnection = Connection
+
+    def __init__(self, host, port, bindAddress, connector, reactor):
+        # ConnectEx documentation says socket _has_ to be bound
+        if bindAddress is None:
+            bindAddress = ('', 0)
+        self.reactor = reactor # createInternetSocket needs this
+        _BaseTCPClient.__init__(self, host, port, bindAddress, connector,
+                                reactor)
+
+
+    def createInternetSocket(self):
+        """
+        Create a socket registered with the IOCP reactor.
+
+        @see: L{_BaseTCPClient}
+        """
+        return self.reactor.createSocket(self.addressFamily, self.socketType)
+
+
+    def _collectSocketDetails(self):
+        """
+        Clean up potentially circular references to the socket and to its
+        C{getFileHandle} method.
+
+        @see: L{_BaseBaseClient}
+        """
+        del self.socket, self.getFileHandle
+
+
+    def _stopReadingAndWriting(self):
+        """
+        Remove the active handle from the reactor.
+
+        @see: L{_BaseBaseClient}
+        """
+        self.reactor.removeActiveHandle(self)
+
+
+    def cbConnect(self, rc, bytes, evt):
+        if rc:
+            rc = connectExErrors.get(rc, rc)
+            self.failIfNotConnected(error.getConnectError((rc,
+                                    errno.errorcode.get(rc, 'Unknown error'))))
+        else:
+            self.socket.setsockopt(
+                socket.SOL_SOCKET, SO_UPDATE_CONNECT_CONTEXT,
+                struct.pack('P', self.socket.fileno()))
+            self.protocol = self.connector.buildProtocol(self.getPeer())
+            self.connected = True
+            logPrefix = self._getLogPrefix(self.protocol)
+            self.logstr = logPrefix + ",client"
+            self.protocol.makeConnection(self)
+            self.startReading()
+
+
+    def doConnect(self):
+        if not hasattr(self, "connector"):
+            # this happens if we connector.stopConnecting in
+            # factory.startedConnecting
+            return
+        assert _iocp.have_connectex
+        self.reactor.addActiveHandle(self)
+        evt = _iocp.Event(self.cbConnect, self)
+
+        rc = _iocp.connect(self.socket.fileno(), self.realAddress, evt)
+        if rc and rc != ERROR_IO_PENDING:
+            self.cbConnect(rc, 0, evt)
+
+
+
+class Server(Connection):
+    """
+    Serverside socket-stream connection class.
+
+    I am a serverside network connection transport; a socket which came from an
+    accept() on a server.
+
+    @ivar _tlsClientDefault: Always C{False}, indicating that this is a server
+        connection, and by default when TLS is negotiated this class will act as
+        a TLS server.
+    """
+
+    _tlsClientDefault = False
+
+
+    def __init__(self, sock, protocol, clientAddr, serverAddr, sessionno, reactor):
+        """
+        Server(sock, protocol, client, server, sessionno)
+
+        Initialize me with a socket, a protocol, a descriptor for my peer (a
+        tuple of host, port describing the other end of the connection), an
+        instance of Port, and a session number.
+        """
+        Connection.__init__(self, sock, protocol, reactor)
+        self.serverAddr = serverAddr
+        self.clientAddr = clientAddr
+        self.sessionno = sessionno
+        logPrefix = self._getLogPrefix(self.protocol)
+        self.logstr = "%s,%s,%s" % (logPrefix, sessionno, self.clientAddr.host)
+        self.repstr = "<%s #%s on %s>" % (self.protocol.__class__.__name__,
+                                          self.sessionno, self.serverAddr.port)
+        self.connected = True
+        self.startReading()
+
+
+    def __repr__(self):
+        """
+        A string representation of this connection.
+        """
+        return self.repstr
+
+
+    def getHost(self):
+        """
+        Returns an IPv4Address.
+
+        This indicates the server's address.
+        """
+        return self.serverAddr
+
+
+    def getPeer(self):
+        """
+        Returns an IPv4Address.
+
+        This indicates the client's address.
+        """
+        return self.clientAddr
+
+
+
+class Connector(TCPConnector):
+    def _makeTransport(self):
+        return Client(self.host, self.port, self.bindAddress, self,
+                      self.reactor)
+
+
+
+class Port(_SocketCloser, _LogOwner):
+    implements(interfaces.IListeningPort)
+
+    connected = False
+    disconnected = False
+    disconnecting = False
+    addressFamily = socket.AF_INET
+    socketType = socket.SOCK_STREAM
+    _addressType = address.IPv4Address
+    sessionno = 0
+
+    # Actual port number being listened on, only set to a non-None
+    # value when we are actually listening.
+    _realPortNumber = None
+
+    # A string describing the connections which will be created by this port.
+    # Normally this is C{"TCP"}, since this is a TCP port, but when the TLS
+    # implementation re-uses this class it overrides the value with C{"TLS"}.
+    # Only used for logging.
+    _type = 'TCP'
+
+    def __init__(self, port, factory, backlog=50, interface='', reactor=None):
+        self.port = port
+        self.factory = factory
+        self.backlog = backlog
+        self.interface = interface
+        self.reactor = reactor
+        if isIPv6Address(interface):
+            self.addressFamily = socket.AF_INET6
+            self._addressType = address.IPv6Address
+
+
+    def __repr__(self):
+        if self._realPortNumber is not None:
+            return "<%s of %s on %s>" % (self.__class__,
+                                         self.factory.__class__,
+                                         self._realPortNumber)
+        else:
+            return "<%s of %s (not listening)>" % (self.__class__,
+                                                   self.factory.__class__)
+
+
+    def startListening(self):
+        try:
+            skt = self.reactor.createSocket(self.addressFamily,
+                                            self.socketType)
+            # TODO: resolve self.interface if necessary
+            if self.addressFamily == socket.AF_INET6:
+                addr = socket.getaddrinfo(self.interface, self.port)[0][4]
+            else:
+                addr = (self.interface, self.port)
+            skt.bind(addr)
+        except socket.error, le:
+            raise error.CannotListenError, (self.interface, self.port, le)
+
+        self.addrLen = _iocp.maxAddrLen(skt.fileno())
+
+        # Make sure that if we listened on port 0, we update that to
+        # reflect what the OS actually assigned us.
+        self._realPortNumber = skt.getsockname()[1]
+
+        log.msg("%s starting on %s" % (self._getLogPrefix(self.factory),
+                                       self._realPortNumber))
+
+        self.factory.doStart()
+        skt.listen(self.backlog)
+        self.connected = True
+        self.disconnected = False
+        self.reactor.addActiveHandle(self)
+        self.socket = skt
+        self.getFileHandle = self.socket.fileno
+        self.doAccept()
+
+
+    def loseConnection(self, connDone=failure.Failure(main.CONNECTION_DONE)):
+        """
+        Stop accepting connections on this port.
+
+        This will shut down my socket and call self.connectionLost().
+        It returns a deferred which will fire successfully when the
+        port is actually closed.
+        """
+        self.disconnecting = True
+        if self.connected:
+            self.deferred = defer.Deferred()
+            self.reactor.callLater(0, self.connectionLost, connDone)
+            return self.deferred
+
+    stopListening = loseConnection
+
+
+    def _logConnectionLostMsg(self):
+        """
+        Log message for closing port
+        """
+        log.msg('(%s Port %s Closed)' % (self._type, self._realPortNumber))
+
+
+    def connectionLost(self, reason):
+        """
+        Cleans up the socket.
+        """
+        self._logConnectionLostMsg()
+        self._realPortNumber = None
+        d = None
+        if hasattr(self, "deferred"):
+            d = self.deferred
+            del self.deferred
+
+        self.disconnected = True
+        self.reactor.removeActiveHandle(self)
+        self.connected = False
+        self._closeSocket(True)
+        del self.socket
+        del self.getFileHandle
+
+        try:
+            self.factory.doStop()
+        except:
+            self.disconnecting = False
+            if d is not None:
+                d.errback(failure.Failure())
+            else:
+                raise
+        else:
+            self.disconnecting = False
+            if d is not None:
+                d.callback(None)
+
+
+    def logPrefix(self):
+        """
+        Returns the name of my class, to prefix log entries with.
+        """
+        return reflect.qual(self.factory.__class__)
+
+
+    def getHost(self):
+        """
+        Returns an IPv4Address.
+
+        This indicates the server's address.
+        """
+        host, port = self.socket.getsockname()[:2]
+        return self._addressType('TCP', host, port)
+
+
+    def cbAccept(self, rc, bytes, evt):
+        self.handleAccept(rc, evt)
+        if not (self.disconnecting or self.disconnected):
+            self.doAccept()
+
+
+    def handleAccept(self, rc, evt):
+        if self.disconnecting or self.disconnected:
+            return False
+
+        # possible errors:
+        # (WSAEMFILE, WSAENOBUFS, WSAENFILE, WSAENOMEM, WSAECONNABORTED)
+        if rc:
+            log.msg("Could not accept new connection -- %s (%s)" %
+                    (errno.errorcode.get(rc, 'unknown error'), rc))
+            return False
+        else:
+            evt.newskt.setsockopt(
+                socket.SOL_SOCKET, SO_UPDATE_ACCEPT_CONTEXT,
+                struct.pack('P', self.socket.fileno()))
+            family, lAddr, rAddr = _iocp.get_accept_addrs(evt.newskt.fileno(),
+                                                          evt.buff)
+            assert family == self.addressFamily
+
+            protocol = self.factory.buildProtocol(
+                self._addressType('TCP', rAddr[0], rAddr[1]))
+            if protocol is None:
+                evt.newskt.close()
+            else:
+                s = self.sessionno
+                self.sessionno = s+1
+                transport = Server(evt.newskt, protocol,
+                        self._addressType('TCP', rAddr[0], rAddr[1]),
+                        self._addressType('TCP', lAddr[0], lAddr[1]),
+                        s, self.reactor)
+                protocol.makeConnection(transport)
+            return True
+
+
+    def doAccept(self):
+        evt = _iocp.Event(self.cbAccept, self)
+
+        # see AcceptEx documentation
+        evt.buff = buff = _iocp.AllocateReadBuffer(2 * (self.addrLen + 16))
+
+        evt.newskt = newskt = self.reactor.createSocket(self.addressFamily,
+                                                        self.socketType)
+        rc = _iocp.accept(self.socket.fileno(), newskt.fileno(), buff, evt)
+
+        if rc and rc != ERROR_IO_PENDING:
+            self.handleAccept(rc, evt)
+
+
diff --git a/ThirdParty/Twisted/twisted/internet/iocpreactor/udp.py b/ThirdParty/Twisted/twisted/internet/iocpreactor/udp.py
new file mode 100644
index 0000000..4dec51f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/iocpreactor/udp.py
@@ -0,0 +1,382 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+UDP support for IOCP reactor
+"""
+
+import socket, operator, struct, warnings, errno
+
+from zope.interface import implements
+
+from twisted.internet import defer, address, error, interfaces
+from twisted.internet.abstract import isIPAddress
+from twisted.python import log, failure
+
+from twisted.internet.iocpreactor.const import ERROR_IO_PENDING
+from twisted.internet.iocpreactor.const import ERROR_CONNECTION_REFUSED
+from twisted.internet.iocpreactor.const import ERROR_PORT_UNREACHABLE
+from twisted.internet.iocpreactor.interfaces import IReadWriteHandle
+from twisted.internet.iocpreactor import iocpsupport as _iocp, abstract
+
+
+
+class Port(abstract.FileHandle):
+    """
+    UDP port, listening for packets.
+    """
+    implements(
+        IReadWriteHandle, interfaces.IListeningPort, interfaces.IUDPTransport,
+        interfaces.ISystemHandle)
+
+    addressFamily = socket.AF_INET
+    socketType = socket.SOCK_DGRAM
+    dynamicReadBuffers = False
+
+    # Actual port number being listened on, only set to a non-None
+    # value when we are actually listening.
+    _realPortNumber = None
+
+
+    def __init__(self, port, proto, interface='', maxPacketSize=8192,
+                 reactor=None):
+        """
+        Initialize with a numeric port to listen on.
+        """
+        self.port = port
+        self.protocol = proto
+        self.readBufferSize = maxPacketSize
+        self.interface = interface
+        self.setLogStr()
+        self._connectedAddr = None
+
+        abstract.FileHandle.__init__(self, reactor)
+
+        skt = socket.socket(self.addressFamily, self.socketType)
+        addrLen = _iocp.maxAddrLen(skt.fileno())
+        self.addressBuffer = _iocp.AllocateReadBuffer(addrLen)
+        # WSARecvFrom takes an int
+        self.addressLengthBuffer = _iocp.AllocateReadBuffer(
+                struct.calcsize('i'))
+
+
+    def __repr__(self):
+        if self._realPortNumber is not None:
+            return ("<%s on %s>" %
+                    (self.protocol.__class__, self._realPortNumber))
+        else:
+            return "<%s not connected>" % (self.protocol.__class__,)
+
+
+    def getHandle(self):
+        """
+        Return a socket object.
+        """
+        return self.socket
+
+
+    def startListening(self):
+        """
+        Create and bind my socket, and begin listening on it.
+
+        This is called on unserialization, and must be called after creating a
+        server to begin listening on the specified port.
+        """
+        self._bindSocket()
+        self._connectToProtocol()
+
+
+    def createSocket(self):
+        return self.reactor.createSocket(self.addressFamily, self.socketType)
+
+
+    def _bindSocket(self):
+        try:
+            skt = self.createSocket()
+            skt.bind((self.interface, self.port))
+        except socket.error, le:
+            raise error.CannotListenError, (self.interface, self.port, le)
+
+        # Make sure that if we listened on port 0, we update that to
+        # reflect what the OS actually assigned us.
+        self._realPortNumber = skt.getsockname()[1]
+
+        log.msg("%s starting on %s" % (
+                self._getLogPrefix(self.protocol), self._realPortNumber))
+
+        self.connected = True
+        self.socket = skt
+        self.getFileHandle = self.socket.fileno
+
+
+    def _connectToProtocol(self):
+        self.protocol.makeConnection(self)
+        self.startReading()
+        self.reactor.addActiveHandle(self)
+
+
+    def cbRead(self, rc, bytes, evt):
+        if self.reading:
+            self.handleRead(rc, bytes, evt)
+            self.doRead()
+
+
+    def handleRead(self, rc, bytes, evt):
+        if rc in (errno.WSAECONNREFUSED, errno.WSAECONNRESET,
+                  ERROR_CONNECTION_REFUSED, ERROR_PORT_UNREACHABLE):
+            if self._connectedAddr:
+                self.protocol.connectionRefused()
+        elif rc:
+            log.msg("error in recvfrom -- %s (%s)" %
+                    (errno.errorcode.get(rc, 'unknown error'), rc))
+        else:
+            try:
+                self.protocol.datagramReceived(str(evt.buff[:bytes]),
+                    _iocp.makesockaddr(evt.addr_buff))
+            except:
+                log.err()
+
+
+    def doRead(self):
+        evt = _iocp.Event(self.cbRead, self)
+
+        evt.buff = buff = self._readBuffers[0]
+        evt.addr_buff = addr_buff = self.addressBuffer
+        evt.addr_len_buff = addr_len_buff = self.addressLengthBuffer
+        rc, bytes = _iocp.recvfrom(self.getFileHandle(), buff,
+                                   addr_buff, addr_len_buff, evt)
+
+        if rc and rc != ERROR_IO_PENDING:
+            self.handleRead(rc, bytes, evt)
+
+
+    def write(self, datagram, addr=None):
+        """
+        Write a datagram.
+
+        @param addr: should be a tuple (ip, port), can be None in connected
+        mode.
+        """
+        if self._connectedAddr:
+            assert addr in (None, self._connectedAddr)
+            try:
+                return self.socket.send(datagram)
+            except socket.error, se:
+                no = se.args[0]
+                if no == errno.WSAEINTR:
+                    return self.write(datagram)
+                elif no == errno.WSAEMSGSIZE:
+                    raise error.MessageLengthError, "message too long"
+                elif no in (errno.WSAECONNREFUSED, errno.WSAECONNRESET,
+                            ERROR_CONNECTION_REFUSED, ERROR_PORT_UNREACHABLE):
+                    self.protocol.connectionRefused()
+                else:
+                    raise
+        else:
+            assert addr != None
+            if not addr[0].replace(".", "").isdigit():
+                warnings.warn("Please only pass IPs to write(), not hostnames",
+                              DeprecationWarning, stacklevel=2)
+            try:
+                return self.socket.sendto(datagram, addr)
+            except socket.error, se:
+                no = se.args[0]
+                if no == errno.WSAEINTR:
+                    return self.write(datagram, addr)
+                elif no == errno.WSAEMSGSIZE:
+                    raise error.MessageLengthError, "message too long"
+                elif no in (errno.WSAECONNREFUSED, errno.WSAECONNRESET,
+                            ERROR_CONNECTION_REFUSED, ERROR_PORT_UNREACHABLE):
+                    # in non-connected UDP ECONNREFUSED is platform dependent,
+                    # I think and the info is not necessarily useful.
+                    # Nevertheless maybe we should call connectionRefused? XXX
+                    return
+                else:
+                    raise
+
+
+    def writeSequence(self, seq, addr):
+        self.write("".join(seq), addr)
+
+
+    def connect(self, host, port):
+        """
+        'Connect' to remote server.
+        """
+        if self._connectedAddr:
+            raise RuntimeError(
+                "already connected, reconnecting is not currently supported "
+                "(talk to itamar if you want this)")
+        if not isIPAddress(host):
+            raise ValueError, "please pass only IP addresses, not domain names"
+        self._connectedAddr = (host, port)
+        self.socket.connect((host, port))
+
+
+    def _loseConnection(self):
+        self.stopReading()
+        self.reactor.removeActiveHandle(self)
+        if self.connected: # actually means if we are *listening*
+            self.reactor.callLater(0, self.connectionLost)
+
+
+    def stopListening(self):
+        if self.connected:
+            result = self.d = defer.Deferred()
+        else:
+            result = None
+        self._loseConnection()
+        return result
+
+
+    def loseConnection(self):
+        warnings.warn("Please use stopListening() to disconnect port",
+                      DeprecationWarning, stacklevel=2)
+        self.stopListening()
+
+
+    def connectionLost(self, reason=None):
+        """
+        Cleans up my socket.
+        """
+        log.msg('(UDP Port %s Closed)' % self._realPortNumber)
+        self._realPortNumber = None
+        abstract.FileHandle.connectionLost(self, reason)
+        self.protocol.doStop()
+        self.socket.close()
+        del self.socket
+        del self.getFileHandle
+        if hasattr(self, "d"):
+            self.d.callback(None)
+            del self.d
+
+
+    def setLogStr(self):
+        """
+        Initialize the C{logstr} attribute to be used by C{logPrefix}.
+        """
+        logPrefix = self._getLogPrefix(self.protocol)
+        self.logstr = "%s (UDP)" % logPrefix
+
+
+    def logPrefix(self):
+        """
+        Returns the name of my class, to prefix log entries with.
+        """
+        return self.logstr
+
+
+    def getHost(self):
+        """
+        Returns an IPv4Address.
+
+        This indicates the address from which I am connecting.
+        """
+        return address.IPv4Address('UDP', *self.socket.getsockname())
+
+
+
+class MulticastMixin:
+    """
+    Implement multicast functionality.
+    """
+
+
+    def getOutgoingInterface(self):
+        i = self.socket.getsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_IF)
+        return socket.inet_ntoa(struct.pack("@i", i))
+
+
+    def setOutgoingInterface(self, addr):
+        """
+        Returns Deferred of success.
+        """
+        return self.reactor.resolve(addr).addCallback(self._setInterface)
+
+
+    def _setInterface(self, addr):
+        i = socket.inet_aton(addr)
+        self.socket.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_IF, i)
+        return 1
+
+
+    def getLoopbackMode(self):
+        return self.socket.getsockopt(socket.IPPROTO_IP,
+                                      socket.IP_MULTICAST_LOOP)
+
+
+    def setLoopbackMode(self, mode):
+        mode = struct.pack("b", operator.truth(mode))
+        self.socket.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP,
+                               mode)
+
+
+    def getTTL(self):
+        return self.socket.getsockopt(socket.IPPROTO_IP,
+                                      socket.IP_MULTICAST_TTL)
+
+
+    def setTTL(self, ttl):
+        ttl = struct.pack("B", ttl)
+        self.socket.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl)
+
+
+    def joinGroup(self, addr, interface=""):
+        """
+        Join a multicast group. Returns Deferred of success.
+        """
+        return self.reactor.resolve(addr).addCallback(self._joinAddr1,
+                                                      interface, 1)
+
+
+    def _joinAddr1(self, addr, interface, join):
+        return self.reactor.resolve(interface).addCallback(self._joinAddr2,
+                                                           addr, join)
+
+
+    def _joinAddr2(self, interface, addr, join):
+        addr = socket.inet_aton(addr)
+        interface = socket.inet_aton(interface)
+        if join:
+            cmd = socket.IP_ADD_MEMBERSHIP
+        else:
+            cmd = socket.IP_DROP_MEMBERSHIP
+        try:
+            self.socket.setsockopt(socket.IPPROTO_IP, cmd, addr + interface)
+        except socket.error, e:
+            return failure.Failure(error.MulticastJoinError(addr, interface,
+                                                            *e.args))
+
+
+    def leaveGroup(self, addr, interface=""):
+        """
+        Leave multicast group, return Deferred of success.
+        """
+        return self.reactor.resolve(addr).addCallback(self._joinAddr1,
+                                                      interface, 0)
+
+
+
+class MulticastPort(MulticastMixin, Port):
+    """
+    UDP Port that supports multicasting.
+    """
+
+    implements(interfaces.IMulticastTransport)
+
+
+    def __init__(self, port, proto, interface='', maxPacketSize=8192,
+                 reactor=None, listenMultiple=False):
+        Port.__init__(self, port, proto, interface, maxPacketSize, reactor)
+        self.listenMultiple = listenMultiple
+
+
+    def createSocket(self):
+        skt = Port.createSocket(self)
+        if self.listenMultiple:
+            skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+            if hasattr(socket, "SO_REUSEPORT"):
+                skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
+        return skt
+
+
diff --git a/ThirdParty/Twisted/twisted/internet/kqreactor.py b/ThirdParty/Twisted/twisted/internet/kqreactor.py
new file mode 100644
index 0000000..bb1b6a3
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/kqreactor.py
@@ -0,0 +1,305 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+A kqueue()/kevent() based implementation of the Twisted main loop.
+
+To use this reactor, start your application specifying the kqueue reactor::
+
+   twistd --reactor kqueue ...
+
+To install the event loop from code (and you should do this before any
+connections, listeners or connectors are added)::
+
+   from twisted.internet import kqreactor
+   kqreactor.install()
+
+This implementation depends on Python 2.6 or higher which has kqueue support
+built in the select module.
+
+Note, that you should use Python 2.6.5 or higher, since previous implementations
+of select.kqueue had U{http://bugs.python.org/issue5910} not yet fixed.
+"""
+
+import errno
+
+from zope.interface import implements
+
+from select import kqueue, kevent
+from select import KQ_FILTER_READ, KQ_FILTER_WRITE
+from select import KQ_EV_DELETE, KQ_EV_ADD, KQ_EV_EOF
+
+from twisted.internet.interfaces import IReactorFDSet, IReactorDaemonize
+
+from twisted.python import log, failure
+from twisted.internet import main, posixbase
+
+
+class KQueueReactor(posixbase.PosixReactorBase):
+    """
+    A reactor that uses kqueue(2)/kevent(2) and relies on Python 2.6 or higher
+    which has built in support for kqueue in the select module.
+
+    @ivar _kq: A L{kqueue} which will be used to check for I/O readiness.
+
+    @ivar _selectables: A dictionary mapping integer file descriptors to
+        instances of L{FileDescriptor} which have been registered with the
+        reactor.  All L{FileDescriptors} which are currently receiving read or
+        write readiness notifications will be present as values in this
+        dictionary.
+
+    @ivar _reads: A dictionary mapping integer file descriptors to arbitrary
+        values (this is essentially a set).  Keys in this dictionary will be
+        registered with C{_kq} for read readiness notifications which will be
+        dispatched to the corresponding L{FileDescriptor} instances in
+        C{_selectables}.
+
+    @ivar _writes: A dictionary mapping integer file descriptors to arbitrary
+        values (this is essentially a set).  Keys in this dictionary will be
+        registered with C{_kq} for write readiness notifications which will be
+        dispatched to the corresponding L{FileDescriptor} instances in
+        C{_selectables}.
+    """
+    implements(IReactorFDSet, IReactorDaemonize)
+
+
+    def __init__(self):
+        """
+        Initialize kqueue object, file descriptor tracking dictionaries, and the
+        base class.
+
+        See:
+            - http://docs.python.org/library/select.html
+            - www.freebsd.org/cgi/man.cgi?query=kqueue
+            - people.freebsd.org/~jlemon/papers/kqueue.pdf
+        """
+        self._kq = kqueue()
+        self._reads = {}
+        self._writes = {}
+        self._selectables = {}
+        posixbase.PosixReactorBase.__init__(self)
+
+
+    def _updateRegistration(self, fd, filter, op):
+        """
+        Private method for changing kqueue registration on a given FD
+        filtering for events given filter/op. This will never block and
+        returns nothing.
+        """
+        self._kq.control([kevent(fd, filter, op)], 0, 0)
+
+
+    def beforeDaemonize(self):
+        """
+        Implement L{IReactorDaemonize.beforeDaemonize}.
+        """
+        # Twisted-internal method called during daemonization (when application
+        # is started via twistd). This is called right before the magic double
+        # forking done for daemonization. We cleanly close the kqueue() and later
+        # recreate it. This is needed since a) kqueue() are not inherited across
+        # forks and b) twistd will create the reactor already before daemonization
+        # (and will also add at least 1 reader to the reactor, an instance of
+        # twisted.internet.posixbase._UnixWaker).
+        #
+        # See: twisted.scripts._twistd_unix.daemonize()
+        self._kq.close()
+        self._kq = None
+
+
+    def afterDaemonize(self):
+        """
+        Implement L{IReactorDaemonize.afterDaemonize}.
+        """
+        # Twisted-internal method called during daemonization. This is called right
+        # after daemonization and recreates the kqueue() and any readers/writers
+        # that were added before. Note that you MUST NOT call any reactor methods
+        # in between beforeDaemonize() and afterDaemonize()!
+        self._kq = kqueue()
+        for fd in self._reads:
+            self._updateRegistration(fd, KQ_FILTER_READ, KQ_EV_ADD)
+        for fd in self._writes:
+            self._updateRegistration(fd, KQ_FILTER_WRITE, KQ_EV_ADD)
+
+
+    def addReader(self, reader):
+        """
+        Implement L{IReactorFDSet.addReader}.
+        """
+        fd = reader.fileno()
+        if fd not in self._reads:
+            try:
+                self._updateRegistration(fd, KQ_FILTER_READ, KQ_EV_ADD)
+            except OSError:
+                pass
+            finally:
+                self._selectables[fd] = reader
+                self._reads[fd] = 1
+
+
+    def addWriter(self, writer):
+        """
+        Implement L{IReactorFDSet.addWriter}.
+        """
+        fd = writer.fileno()
+        if fd not in self._writes:
+            try:
+                self._updateRegistration(fd, KQ_FILTER_WRITE, KQ_EV_ADD)
+            except OSError:
+                pass
+            finally:
+                self._selectables[fd] = writer
+                self._writes[fd] = 1
+
+
+    def removeReader(self, reader):
+        """
+        Implement L{IReactorFDSet.removeReader}.
+        """
+        wasLost = False
+        try:
+            fd = reader.fileno()
+        except:
+            fd = -1
+        if fd == -1:
+            for fd, fdes in self._selectables.items():
+                if reader is fdes:
+                    wasLost = True
+                    break
+            else:
+                return
+        if fd in self._reads:
+            del self._reads[fd]
+            if fd not in self._writes:
+                del self._selectables[fd]
+            if not wasLost:
+                try:
+                    self._updateRegistration(fd, KQ_FILTER_READ, KQ_EV_DELETE)
+                except OSError:
+                    pass
+
+
+    def removeWriter(self, writer):
+        """
+        Implement L{IReactorFDSet.removeWriter}.
+        """
+        wasLost = False
+        try:
+            fd = writer.fileno()
+        except:
+            fd = -1
+        if fd == -1:
+            for fd, fdes in self._selectables.items():
+                if writer is fdes:
+                    wasLost = True
+                    break
+            else:
+                return
+        if fd in self._writes:
+            del self._writes[fd]
+            if fd not in self._reads:
+                del self._selectables[fd]
+            if not wasLost:
+                try:
+                    self._updateRegistration(fd, KQ_FILTER_WRITE, KQ_EV_DELETE)
+                except OSError:
+                    pass
+
+
+    def removeAll(self):
+        """
+        Implement L{IReactorFDSet.removeAll}.
+        """
+        return self._removeAll(
+            [self._selectables[fd] for fd in self._reads],
+            [self._selectables[fd] for fd in self._writes])
+
+
+    def getReaders(self):
+        """
+        Implement L{IReactorFDSet.getReaders}.
+        """
+        return [self._selectables[fd] for fd in self._reads]
+
+
+    def getWriters(self):
+        """
+        Implement L{IReactorFDSet.getWriters}.
+        """
+        return [self._selectables[fd] for fd in self._writes]
+
+
+    def doKEvent(self, timeout):
+        """
+        Poll the kqueue for new events.
+        """
+        if timeout is None:
+            timeout = 1
+
+        try:
+            l = self._kq.control([], len(self._selectables), timeout)
+        except OSError, e:
+            if e[0] == errno.EINTR:
+                return
+            else:
+                raise
+
+        _drdw = self._doWriteOrRead
+        for event in l:
+            fd = event.ident
+            try:
+                selectable = self._selectables[fd]
+            except KeyError:
+                # Handles the infrequent case where one selectable's
+                # handler disconnects another.
+                continue
+            else:
+                log.callWithLogger(selectable, _drdw, selectable, fd, event)
+
+
+    def _doWriteOrRead(self, selectable, fd, event):
+        """
+        Private method called when a FD is ready for reading, writing or was
+        lost. Do the work and raise errors where necessary.
+        """
+        why = None
+        inRead = False
+        (filter, flags, data, fflags) = (
+            event.filter, event.flags, event.data, event.fflags)
+
+        if flags & KQ_EV_EOF and data and fflags:
+            why = main.CONNECTION_LOST
+        else:
+            try:
+                if selectable.fileno() == -1:
+                    inRead = False
+                    why = posixbase._NO_FILEDESC
+                else:
+                   if filter == KQ_FILTER_READ:
+                       inRead = True
+                       why = selectable.doRead()
+                   if filter == KQ_FILTER_WRITE:
+                       inRead = False
+                       why = selectable.doWrite()
+            except:
+                # Any exception from application code gets logged and will
+                # cause us to disconnect the selectable.
+                why = failure.Failure()
+                log.err(why, "An exception was raised from application code" \
+                             " while processing a reactor selectable")
+
+        if why:
+            self._disconnectSelectable(selectable, why, inRead)
+
+    doIteration = doKEvent
+
+
+def install():
+    """
+    Install the kqueue() reactor.
+    """
+    p = KQueueReactor()
+    from twisted.internet.main import installReactor
+    installReactor(p)
+
+
+__all__ = ["KQueueReactor", "install"]
diff --git a/ThirdParty/Twisted/twisted/internet/main.py b/ThirdParty/Twisted/twisted/internet/main.py
new file mode 100644
index 0000000..f7efeab
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/main.py
@@ -0,0 +1,37 @@
+# -*- test-case-name: twisted.internet.test.test_main -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Backwards compatibility, and utility functions.
+
+In general, this module should not be used, other than by reactor authors
+who need to use the 'installReactor' method.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.internet import error
+
+CONNECTION_DONE = error.ConnectionDone('Connection done')
+CONNECTION_LOST = error.ConnectionLost('Connection lost')
+
+
+
+def installReactor(reactor):
+    """
+    Install reactor C{reactor}.
+
+    @param reactor: An object that provides one or more IReactor* interfaces.
+    """
+    # this stuff should be common to all reactors.
+    import twisted.internet
+    import sys
+    if 'twisted.internet.reactor' in sys.modules:
+        raise error.ReactorAlreadyInstalledError("reactor already installed")
+    twisted.internet.reactor = reactor
+    sys.modules['twisted.internet.reactor'] = reactor
+
+
+__all__ = ["CONNECTION_LOST", "CONNECTION_DONE", "installReactor"]
diff --git a/ThirdParty/Twisted/twisted/internet/pollreactor.py b/ThirdParty/Twisted/twisted/internet/pollreactor.py
new file mode 100644
index 0000000..3613f01
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/pollreactor.py
@@ -0,0 +1,189 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+A poll() based implementation of the twisted main loop.
+
+To install the event loop (and you should do this before any connections,
+listeners or connectors are added)::
+
+    from twisted.internet import pollreactor
+    pollreactor.install()
+"""
+
+from __future__ import division, absolute_import
+
+# System imports
+import errno
+from select import error as SelectError, poll
+from select import POLLIN, POLLOUT, POLLHUP, POLLERR, POLLNVAL
+
+from zope.interface import implementer
+
+# Twisted imports
+from twisted.python import log
+from twisted.internet import posixbase
+from twisted.internet.interfaces import IReactorFDSet
+
+
+
+ at implementer(IReactorFDSet)
+class PollReactor(posixbase.PosixReactorBase, posixbase._PollLikeMixin):
+    """
+    A reactor that uses poll(2).
+
+    @ivar _poller: A L{poll} which will be used to check for I/O
+        readiness.
+
+    @ivar _selectables: A dictionary mapping integer file descriptors to
+        instances of L{FileDescriptor} which have been registered with the
+        reactor.  All L{FileDescriptors} which are currently receiving read or
+        write readiness notifications will be present as values in this
+        dictionary.
+
+    @ivar _reads: A dictionary mapping integer file descriptors to arbitrary
+        values (this is essentially a set).  Keys in this dictionary will be
+        registered with C{_poller} for read readiness notifications which will
+        be dispatched to the corresponding L{FileDescriptor} instances in
+        C{_selectables}.
+
+    @ivar _writes: A dictionary mapping integer file descriptors to arbitrary
+        values (this is essentially a set).  Keys in this dictionary will be
+        registered with C{_poller} for write readiness notifications which will
+        be dispatched to the corresponding L{FileDescriptor} instances in
+        C{_selectables}.
+    """
+
+    _POLL_DISCONNECTED = (POLLHUP | POLLERR | POLLNVAL)
+    _POLL_IN = POLLIN
+    _POLL_OUT = POLLOUT
+
+    def __init__(self):
+        """
+        Initialize polling object, file descriptor tracking dictionaries, and
+        the base class.
+        """
+        self._poller = poll()
+        self._selectables = {}
+        self._reads = {}
+        self._writes = {}
+        posixbase.PosixReactorBase.__init__(self)
+
+
+    def _updateRegistration(self, fd):
+        """Register/unregister an fd with the poller."""
+        try:
+            self._poller.unregister(fd)
+        except KeyError:
+            pass
+
+        mask = 0
+        if fd in self._reads:
+            mask = mask | POLLIN
+        if fd in self._writes:
+            mask = mask | POLLOUT
+        if mask != 0:
+            self._poller.register(fd, mask)
+        else:
+            if fd in self._selectables:
+                del self._selectables[fd]
+
+    def _dictRemove(self, selectable, mdict):
+        try:
+            # the easy way
+            fd = selectable.fileno()
+            # make sure the fd is actually real.  In some situations we can get
+            # -1 here.
+            mdict[fd]
+        except:
+            # the hard way: necessary because fileno() may disappear at any
+            # moment, thanks to python's underlying sockets impl
+            for fd, fdes in self._selectables.items():
+                if selectable is fdes:
+                    break
+            else:
+                # Hmm, maybe not the right course of action?  This method can't
+                # fail, because it happens inside error detection...
+                return
+        if fd in mdict:
+            del mdict[fd]
+            self._updateRegistration(fd)
+
+    def addReader(self, reader):
+        """Add a FileDescriptor for notification of data available to read.
+        """
+        fd = reader.fileno()
+        if fd not in self._reads:
+            self._selectables[fd] = reader
+            self._reads[fd] =  1
+            self._updateRegistration(fd)
+
+    def addWriter(self, writer):
+        """Add a FileDescriptor for notification of data available to write.
+        """
+        fd = writer.fileno()
+        if fd not in self._writes:
+            self._selectables[fd] = writer
+            self._writes[fd] =  1
+            self._updateRegistration(fd)
+
+    def removeReader(self, reader):
+        """Remove a Selectable for notification of data available to read.
+        """
+        return self._dictRemove(reader, self._reads)
+
+    def removeWriter(self, writer):
+        """Remove a Selectable for notification of data available to write.
+        """
+        return self._dictRemove(writer, self._writes)
+
+    def removeAll(self):
+        """
+        Remove all selectables, and return a list of them.
+        """
+        return self._removeAll(
+            [self._selectables[fd] for fd in self._reads],
+            [self._selectables[fd] for fd in self._writes])
+
+
+    def doPoll(self, timeout):
+        """Poll the poller for new events."""
+        if timeout is not None:
+            timeout = int(timeout * 1000) # convert seconds to milliseconds
+
+        try:
+            l = self._poller.poll(timeout)
+        except SelectError as e:
+            if e.args[0] == errno.EINTR:
+                return
+            else:
+                raise
+        _drdw = self._doReadOrWrite
+        for fd, event in l:
+            try:
+                selectable = self._selectables[fd]
+            except KeyError:
+                # Handles the infrequent case where one selectable's
+                # handler disconnects another.
+                continue
+            log.callWithLogger(selectable, _drdw, selectable, fd, event)
+
+    doIteration = doPoll
+
+    def getReaders(self):
+        return [self._selectables[fd] for fd in self._reads]
+
+
+    def getWriters(self):
+        return [self._selectables[fd] for fd in self._writes]
+
+
+
+def install():
+    """Install the poll() reactor."""
+    p = PollReactor()
+    from twisted.internet.main import installReactor
+    installReactor(p)
+
+
+__all__ = ["PollReactor", "install"]
diff --git a/ThirdParty/Twisted/twisted/internet/posixbase.py b/ThirdParty/Twisted/twisted/internet/posixbase.py
new file mode 100644
index 0000000..37269e0
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/posixbase.py
@@ -0,0 +1,640 @@
+# -*- test-case-name: twisted.test.test_internet,twisted.internet.test.test_posixbase -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Posix reactor base class
+"""
+
+from __future__ import division, absolute_import
+
+import socket
+import errno
+import os
+import sys
+
+from zope.interface import implementer, classImplements
+
+from twisted.python.compat import set, _PY3
+from twisted.internet.interfaces import IReactorUNIX, IReactorUNIXDatagram
+from twisted.internet.interfaces import (
+    IReactorTCP, IReactorUDP, IReactorSSL, IReactorSocket)
+from twisted.internet.interfaces import IReactorProcess, IReactorMulticast
+from twisted.internet.interfaces import IHalfCloseableDescriptor
+from twisted.internet import error, udp, tcp
+
+from twisted.python import log, failure, _utilpy3 as util
+from twisted.python.runtime import platformType, platform
+
+from twisted.internet.base import ReactorBase, _SignalReactorMixin
+from twisted.internet.main import CONNECTION_DONE, CONNECTION_LOST
+
+# Exceptions that doSelect might return frequently
+_NO_FILENO = error.ConnectionFdescWentAway('Handler has no fileno method')
+_NO_FILEDESC = error.ConnectionFdescWentAway('File descriptor lost')
+
+
+try:
+    from twisted.protocols import tls
+except ImportError:
+    tls = None
+    try:
+        from twisted.internet import ssl
+    except ImportError:
+        ssl = None
+
+unixEnabled = (platformType == 'posix')
+
+processEnabled = False
+if unixEnabled:
+    from twisted.internet import fdesc
+    # Enable on Python 3 in ticket #5987:
+    if not _PY3:
+        from twisted.internet import process, _signals
+        processEnabled = True
+
+
+if platform.isWindows():
+    try:
+        import win32process
+        processEnabled = True
+    except ImportError:
+        win32process = None
+
+
+class _SocketWaker(log.Logger):
+    """
+    The I{self-pipe trick<http://cr.yp.to/docs/selfpipe.html>}, implemented
+    using a pair of sockets rather than pipes (due to the lack of support in
+    select() on Windows for pipes), used to wake up the main loop from
+    another thread.
+    """
+    disconnected = 0
+
+    def __init__(self, reactor):
+        """Initialize.
+        """
+        self.reactor = reactor
+        # Following select_trigger (from asyncore)'s example;
+        server = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        client = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        client.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
+        server.bind(('127.0.0.1', 0))
+        server.listen(1)
+        client.connect(server.getsockname())
+        reader, clientaddr = server.accept()
+        client.setblocking(0)
+        reader.setblocking(0)
+        self.r = reader
+        self.w = client
+        self.fileno = self.r.fileno
+
+    def wakeUp(self):
+        """Send a byte to my connection.
+        """
+        try:
+            util.untilConcludes(self.w.send, b'x')
+        except socket.error as e:
+            if e.args[0] != errno.WSAEWOULDBLOCK:
+                raise
+
+    def doRead(self):
+        """Read some data from my connection.
+        """
+        try:
+            self.r.recv(8192)
+        except socket.error:
+            pass
+
+    def connectionLost(self, reason):
+        self.r.close()
+        self.w.close()
+
+
+
+class _FDWaker(log.Logger, object):
+    """
+    The I{self-pipe trick<http://cr.yp.to/docs/selfpipe.html>}, used to wake
+    up the main loop from another thread or a signal handler.
+
+    L{_FDWaker} is a base class for waker implementations based on
+    writing to a pipe being monitored by the reactor.
+
+    @ivar o: The file descriptor for the end of the pipe which can be
+        written to to wake up a reactor monitoring this waker.
+
+    @ivar i: The file descriptor which should be monitored in order to
+        be awoken by this waker.
+    """
+    disconnected = 0
+
+    i = None
+    o = None
+
+    def __init__(self, reactor):
+        """Initialize.
+        """
+        self.reactor = reactor
+        self.i, self.o = os.pipe()
+        fdesc.setNonBlocking(self.i)
+        fdesc._setCloseOnExec(self.i)
+        fdesc.setNonBlocking(self.o)
+        fdesc._setCloseOnExec(self.o)
+        self.fileno = lambda: self.i
+
+
+    def doRead(self):
+        """
+        Read some bytes from the pipe and discard them.
+        """
+        fdesc.readFromFD(self.fileno(), lambda data: None)
+
+
+    def connectionLost(self, reason):
+        """Close both ends of my pipe.
+        """
+        if not hasattr(self, "o"):
+            return
+        for fd in self.i, self.o:
+            try:
+                os.close(fd)
+            except IOError:
+                pass
+        del self.i, self.o
+
+
+
+class _UnixWaker(_FDWaker):
+    """
+    This class provides a simple interface to wake up the event loop.
+
+    This is used by threads or signals to wake up the event loop.
+    """
+
+    def wakeUp(self):
+        """Write one byte to the pipe, and flush it.
+        """
+        # We don't use fdesc.writeToFD since we need to distinguish
+        # between EINTR (try again) and EAGAIN (do nothing).
+        if self.o is not None:
+            try:
+                util.untilConcludes(os.write, self.o, b'x')
+            except OSError as e:
+                # XXX There is no unit test for raising the exception
+                # for other errnos. See #4285.
+                if e.errno != errno.EAGAIN:
+                    raise
+
+
+
+if platformType == 'posix':
+    _Waker = _UnixWaker
+else:
+    # Primarily Windows and Jython.
+    _Waker = _SocketWaker
+
+
+class _SIGCHLDWaker(_FDWaker):
+    """
+    L{_SIGCHLDWaker} can wake up a reactor whenever C{SIGCHLD} is
+    received.
+
+    @see: L{twisted.internet._signals}
+    """
+    def __init__(self, reactor):
+        _FDWaker.__init__(self, reactor)
+
+
+    def install(self):
+        """
+        Install the handler necessary to make this waker active.
+        """
+        _signals.installHandler(self.o)
+
+
+    def uninstall(self):
+        """
+        Remove the handler which makes this waker active.
+        """
+        _signals.installHandler(-1)
+
+
+    def doRead(self):
+        """
+        Having woken up the reactor in response to receipt of
+        C{SIGCHLD}, reap the process which exited.
+
+        This is called whenever the reactor notices the waker pipe is
+        writeable, which happens soon after any call to the C{wakeUp}
+        method.
+        """
+        _FDWaker.doRead(self)
+        process.reapAllProcesses()
+
+
+
+
+class _DisconnectSelectableMixin(object):
+    """
+    Mixin providing the C{_disconnectSelectable} method.
+    """
+
+    def _disconnectSelectable(self, selectable, why, isRead, faildict={
+        error.ConnectionDone: failure.Failure(error.ConnectionDone()),
+        error.ConnectionLost: failure.Failure(error.ConnectionLost())
+        }):
+        """
+        Utility function for disconnecting a selectable.
+
+        Supports half-close notification, isRead should be boolean indicating
+        whether error resulted from doRead().
+        """
+        self.removeReader(selectable)
+        f = faildict.get(why.__class__)
+        if f:
+            if (isRead and why.__class__ ==  error.ConnectionDone
+                and IHalfCloseableDescriptor.providedBy(selectable)):
+                selectable.readConnectionLost(f)
+            else:
+                self.removeWriter(selectable)
+                selectable.connectionLost(f)
+        else:
+            self.removeWriter(selectable)
+            selectable.connectionLost(failure.Failure(why))
+
+
+
+ at implementer(IReactorTCP, IReactorUDP, IReactorMulticast)
+class PosixReactorBase(_SignalReactorMixin, _DisconnectSelectableMixin,
+                       ReactorBase):
+    """
+    A basis for reactors that use file descriptors.
+
+    @ivar _childWaker: C{None} or a reference to the L{_SIGCHLDWaker}
+        which is used to properly notice child process termination.
+    """
+
+    # Callable that creates a waker, overrideable so that subclasses can
+    # substitute their own implementation:
+    _wakerFactory = _Waker
+
+    def installWaker(self):
+        """
+        Install a `waker' to allow threads and signals to wake up the IO thread.
+
+        We use the self-pipe trick (http://cr.yp.to/docs/selfpipe.html) to wake
+        the reactor. On Windows we use a pair of sockets.
+        """
+        if not self.waker:
+            self.waker = self._wakerFactory(self)
+            self._internalReaders.add(self.waker)
+            self.addReader(self.waker)
+
+
+    _childWaker = None
+    def _handleSignals(self):
+        """
+        Extend the basic signal handling logic to also support
+        handling SIGCHLD to know when to try to reap child processes.
+        """
+        _SignalReactorMixin._handleSignals(self)
+        if platformType == 'posix' and processEnabled:
+            if not self._childWaker:
+                self._childWaker = _SIGCHLDWaker(self)
+                self._internalReaders.add(self._childWaker)
+                self.addReader(self._childWaker)
+            self._childWaker.install()
+            # Also reap all processes right now, in case we missed any
+            # signals before we installed the SIGCHLD waker/handler.
+            # This should only happen if someone used spawnProcess
+            # before calling reactor.run (and the process also exited
+            # already).
+            process.reapAllProcesses()
+
+    def _uninstallHandler(self):
+        """
+        If a child waker was created and installed, uninstall it now.
+
+        Since this disables reactor functionality and is only called
+        when the reactor is stopping, it doesn't provide any directly
+        useful functionality, but the cleanup of reactor-related
+        process-global state that it does helps in unit tests
+        involving multiple reactors and is generally just a nice
+        thing.
+        """
+        # XXX This would probably be an alright place to put all of
+        # the cleanup code for all internal readers (here and in the
+        # base class, anyway).  See #3063 for that cleanup task.
+        if self._childWaker:
+            self._childWaker.uninstall()
+
+    # IReactorProcess
+
+    def spawnProcess(self, processProtocol, executable, args=(),
+                     env={}, path=None,
+                     uid=None, gid=None, usePTY=0, childFDs=None):
+        args, env = self._checkProcessArgs(args, env)
+        if platformType == 'posix':
+            if usePTY:
+                if childFDs is not None:
+                    raise ValueError("Using childFDs is not supported with usePTY=True.")
+                return process.PTYProcess(self, executable, args, env, path,
+                                          processProtocol, uid, gid, usePTY)
+            else:
+                return process.Process(self, executable, args, env, path,
+                                       processProtocol, uid, gid, childFDs)
+        elif platformType == "win32":
+            if uid is not None:
+                raise ValueError("Setting UID is unsupported on this platform.")
+            if gid is not None:
+                raise ValueError("Setting GID is unsupported on this platform.")
+            if usePTY:
+                raise ValueError("The usePTY parameter is not supported on Windows.")
+            if childFDs:
+                raise ValueError("Customizing childFDs is not supported on Windows.")
+
+            if win32process:
+                from twisted.internet._dumbwin32proc import Process
+                return Process(self, processProtocol, executable, args, env, path)
+            else:
+                raise NotImplementedError(
+                    "spawnProcess not available since pywin32 is not installed.")
+        else:
+            raise NotImplementedError(
+                "spawnProcess only available on Windows or POSIX.")
+
+    # IReactorUDP
+
+    def listenUDP(self, port, protocol, interface='', maxPacketSize=8192):
+        """Connects a given L{DatagramProtocol} to the given numeric UDP port.
+
+        @returns: object conforming to L{IListeningPort}.
+        """
+        p = udp.Port(port, protocol, interface, maxPacketSize, self)
+        p.startListening()
+        return p
+
+    # IReactorMulticast
+
+    def listenMulticast(self, port, protocol, interface='', maxPacketSize=8192, listenMultiple=False):
+        """Connects a given DatagramProtocol to the given numeric UDP port.
+
+        EXPERIMENTAL.
+
+        @returns: object conforming to IListeningPort.
+        """
+        p = udp.MulticastPort(port, protocol, interface, maxPacketSize, self, listenMultiple)
+        p.startListening()
+        return p
+
+
+    # IReactorUNIX
+
+    def connectUNIX(self, address, factory, timeout=30, checkPID=0):
+        """@see: twisted.internet.interfaces.IReactorUNIX.connectUNIX
+        """
+        assert unixEnabled, "UNIX support is not present"
+        # Move this import back up to main level when twisted.internet.unix is
+        # ported to Python 3:
+        from twisted.internet import unix
+        c = unix.Connector(address, factory, timeout, self, checkPID)
+        c.connect()
+        return c
+
+    def listenUNIX(self, address, factory, backlog=50, mode=0o666, wantPID=0):
+        """
+        @see: twisted.internet.interfaces.IReactorUNIX.listenUNIX
+        """
+        assert unixEnabled, "UNIX support is not present"
+        # Move this import back up to main level when twisted.internet.unix is
+        # ported to Python 3:
+        from twisted.internet import unix
+        p = unix.Port(address, factory, backlog, mode, self, wantPID)
+        p.startListening()
+        return p
+
+
+    # IReactorUNIXDatagram
+
+    def listenUNIXDatagram(self, address, protocol, maxPacketSize=8192,
+                           mode=0o666):
+        """
+        Connects a given L{DatagramProtocol} to the given path.
+
+        EXPERIMENTAL.
+
+        @returns: object conforming to L{IListeningPort}.
+        """
+        assert unixEnabled, "UNIX support is not present"
+        # Move this import back up to main level when twisted.internet.unix is
+        # ported to Python 3:
+        from twisted.internet import unix
+        p = unix.DatagramPort(address, protocol, maxPacketSize, mode, self)
+        p.startListening()
+        return p
+
+    def connectUNIXDatagram(self, address, protocol, maxPacketSize=8192,
+                            mode=0o666, bindAddress=None):
+        """
+        Connects a L{ConnectedDatagramProtocol} instance to a path.
+
+        EXPERIMENTAL.
+        """
+        assert unixEnabled, "UNIX support is not present"
+        # Move this import back up to main level when twisted.internet.unix is
+        # ported to Python 3:
+        from twisted.internet import unix
+        p = unix.ConnectedDatagramPort(address, protocol, maxPacketSize, mode, bindAddress, self)
+        p.startListening()
+        return p
+
+
+    # IReactorSocket (but not on Windows)
+
+    def adoptStreamPort(self, fileDescriptor, addressFamily, factory):
+        """
+        Create a new L{IListeningPort} from an already-initialized socket.
+
+        This just dispatches to a suitable port implementation (eg from
+        L{IReactorTCP}, etc) based on the specified C{addressFamily}.
+
+        @see: L{twisted.internet.interfaces.IReactorSocket.adoptStreamPort}
+        """
+        if addressFamily not in (socket.AF_INET, socket.AF_INET6):
+            raise error.UnsupportedAddressFamily(addressFamily)
+
+        p = tcp.Port._fromListeningDescriptor(
+            self, fileDescriptor, addressFamily, factory)
+        p.startListening()
+        return p
+
+    def adoptStreamConnection(self, fileDescriptor, addressFamily, factory):
+        """
+        @see:
+            L{twisted.internet.interfaces.IReactorSocket.adoptStreamConnection}
+        """
+        if addressFamily not in (socket.AF_INET, socket.AF_INET6):
+            raise error.UnsupportedAddressFamily(addressFamily)
+
+        return tcp.Server._fromConnectedSocket(
+            fileDescriptor, addressFamily, factory, self)
+
+
+    # IReactorTCP
+
+    def listenTCP(self, port, factory, backlog=50, interface=''):
+        """@see: twisted.internet.interfaces.IReactorTCP.listenTCP
+        """
+        p = tcp.Port(port, factory, backlog, interface, self)
+        p.startListening()
+        return p
+
+    def connectTCP(self, host, port, factory, timeout=30, bindAddress=None):
+        """@see: twisted.internet.interfaces.IReactorTCP.connectTCP
+        """
+        c = tcp.Connector(host, port, factory, timeout, bindAddress, self)
+        c.connect()
+        return c
+
+    # IReactorSSL (sometimes, not implemented)
+
+    def connectSSL(self, host, port, factory, contextFactory, timeout=30, bindAddress=None):
+        """@see: twisted.internet.interfaces.IReactorSSL.connectSSL
+        """
+        if tls is not None:
+            tlsFactory = tls.TLSMemoryBIOFactory(contextFactory, True, factory)
+            return self.connectTCP(host, port, tlsFactory, timeout, bindAddress)
+        elif ssl is not None:
+            c = ssl.Connector(
+                host, port, factory, contextFactory, timeout, bindAddress, self)
+            c.connect()
+            return c
+        else:
+            assert False, "SSL support is not present"
+
+
+
+    def listenSSL(self, port, factory, contextFactory, backlog=50, interface=''):
+        """@see: twisted.internet.interfaces.IReactorSSL.listenSSL
+        """
+        if tls is not None:
+            tlsFactory = tls.TLSMemoryBIOFactory(contextFactory, False, factory)
+            port = self.listenTCP(port, tlsFactory, backlog, interface)
+            port._type = 'TLS'
+            return port
+        elif ssl is not None:
+            p = ssl.Port(
+                port, factory, contextFactory, backlog, interface, self)
+            p.startListening()
+            return p
+        else:
+            assert False, "SSL support is not present"
+
+
+    def _removeAll(self, readers, writers):
+        """
+        Remove all readers and writers, and list of removed L{IReadDescriptor}s
+        and L{IWriteDescriptor}s.
+
+        Meant for calling from subclasses, to implement removeAll, like::
+
+          def removeAll(self):
+              return self._removeAll(self._reads, self._writes)
+
+        where C{self._reads} and C{self._writes} are iterables.
+        """
+        removedReaders = set(readers) - self._internalReaders
+        for reader in removedReaders:
+            self.removeReader(reader)
+
+        removedWriters = set(writers)
+        for writer in removedWriters:
+            self.removeWriter(writer)
+
+        return list(removedReaders | removedWriters)
+
+
+class _PollLikeMixin(object):
+    """
+    Mixin for poll-like reactors.
+
+    Subclasses must define the following attributes::
+
+      - _POLL_DISCONNECTED - Bitmask for events indicating a connection was
+        lost.
+      - _POLL_IN - Bitmask for events indicating there is input to read.
+      - _POLL_OUT - Bitmask for events indicating output can be written.
+
+    Must be mixed in to a subclass of PosixReactorBase (for
+    _disconnectSelectable).
+    """
+
+    def _doReadOrWrite(self, selectable, fd, event):
+        """
+        fd is available for read or write, do the work and raise errors if
+        necessary.
+        """
+        why = None
+        inRead = False
+        if event & self._POLL_DISCONNECTED and not (event & self._POLL_IN):
+            # Handle disconnection.  But only if we finished processing all
+            # the pending input.
+            if fd in self._reads:
+                # If we were reading from the descriptor then this is a
+                # clean shutdown.  We know there are no read events pending
+                # because we just checked above.  It also might be a
+                # half-close (which is why we have to keep track of inRead).
+                inRead = True
+                why = CONNECTION_DONE
+            else:
+                # If we weren't reading, this is an error shutdown of some
+                # sort.
+                why = CONNECTION_LOST
+        else:
+            # Any non-disconnect event turns into a doRead or a doWrite.
+            try:
+                # First check to see if the descriptor is still valid.  This
+                # gives fileno() a chance to raise an exception, too. 
+                # Ideally, disconnection would always be indicated by the
+                # return value of doRead or doWrite (or an exception from
+                # one of those methods), but calling fileno here helps make
+                # buggy applications more transparent.
+                if selectable.fileno() == -1:
+                    # -1 is sort of a historical Python artifact.  Python
+                    # files and sockets used to change their file descriptor
+                    # to -1 when they closed.  For the time being, we'll
+                    # continue to support this anyway in case applications
+                    # replicated it, plus abstract.FileDescriptor.fileno
+                    # returns -1.  Eventually it'd be good to deprecate this
+                    # case.
+                    why = _NO_FILEDESC
+                else:
+                    if event & self._POLL_IN:
+                        # Handle a read event.
+                        why = selectable.doRead()
+                        inRead = True
+                    if not why and event & self._POLL_OUT:
+                        # Handle a write event, as long as doRead didn't
+                        # disconnect us.
+                        why = selectable.doWrite()
+                        inRead = False
+            except:
+                # Any exception from application code gets logged and will
+                # cause us to disconnect the selectable.
+                why = sys.exc_info()[1]
+                log.err()
+        if why:
+            self._disconnectSelectable(selectable, why, inRead)
+
+
+
+if tls is not None or ssl is not None:
+    classImplements(PosixReactorBase, IReactorSSL)
+if unixEnabled:
+    classImplements(PosixReactorBase, IReactorUNIX, IReactorUNIXDatagram)
+if processEnabled:
+    classImplements(PosixReactorBase, IReactorProcess)
+if getattr(socket, 'fromfd', None) is not None:
+    classImplements(PosixReactorBase, IReactorSocket)
+
+__all__ = ["PosixReactorBase"]
diff --git a/ThirdParty/Twisted/twisted/internet/process.py b/ThirdParty/Twisted/twisted/internet/process.py
new file mode 100644
index 0000000..a5f12a4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/process.py
@@ -0,0 +1,1084 @@
+# -*- test-case-name: twisted.test.test_process -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+UNIX Process management.
+
+Do NOT use this module directly - use reactor.spawnProcess() instead.
+
+Maintainer: Itamar Shtull-Trauring
+"""
+
+# System Imports
+import gc, os, sys, stat, traceback, select, signal, errno
+
+try:
+    import pty
+except ImportError:
+    pty = None
+
+try:
+    import fcntl, termios
+except ImportError:
+    fcntl = None
+
+from zope.interface import implements
+
+from twisted.python import log, failure
+from twisted.python.util import switchUID
+from twisted.internet import fdesc, abstract, error
+from twisted.internet.main import CONNECTION_LOST, CONNECTION_DONE
+from twisted.internet._baseprocess import BaseProcess
+from twisted.internet.interfaces import IProcessTransport
+
+# Some people were importing this, which is incorrect, just keeping it
+# here for backwards compatibility:
+ProcessExitedAlready = error.ProcessExitedAlready
+
+reapProcessHandlers = {}
+
+def reapAllProcesses():
+    """
+    Reap all registered processes.
+    """
+    for process in reapProcessHandlers.values():
+        process.reapProcess()
+
+
+def registerReapProcessHandler(pid, process):
+    """
+    Register a process handler for the given pid, in case L{reapAllProcesses}
+    is called.
+
+    @param pid: the pid of the process.
+    @param process: a process handler.
+    """
+    if pid in reapProcessHandlers:
+        raise RuntimeError("Try to register an already registered process.")
+    try:
+        auxPID, status = os.waitpid(pid, os.WNOHANG)
+    except:
+        log.msg('Failed to reap %d:' % pid)
+        log.err()
+        auxPID = None
+    if auxPID:
+        process.processEnded(status)
+    else:
+        # if auxPID is 0, there are children but none have exited
+        reapProcessHandlers[pid] = process
+
+
+def unregisterReapProcessHandler(pid, process):
+    """
+    Unregister a process handler previously registered with
+    L{registerReapProcessHandler}.
+    """
+    if not (pid in reapProcessHandlers
+            and reapProcessHandlers[pid] == process):
+        raise RuntimeError("Try to unregister a process not registered.")
+    del reapProcessHandlers[pid]
+
+
+def detectLinuxBrokenPipeBehavior():
+    """
+    On some Linux version, write-only pipe are detected as readable. This
+    function is here to check if this bug is present or not.
+
+    See L{ProcessWriter.doRead} for a more detailed explanation.
+    """
+    global brokenLinuxPipeBehavior
+    r, w = os.pipe()
+    os.write(w, 'a')
+    reads, writes, exes = select.select([w], [], [], 0)
+    if reads:
+        # Linux < 2.6.11 says a write-only pipe is readable.
+        brokenLinuxPipeBehavior = True
+    else:
+        brokenLinuxPipeBehavior = False
+    os.close(r)
+    os.close(w)
+
+# Call at import time
+detectLinuxBrokenPipeBehavior()
+
+
+class ProcessWriter(abstract.FileDescriptor):
+    """
+    (Internal) Helper class to write into a Process's input pipe.
+
+    I am a helper which describes a selectable asynchronous writer to a
+    process's input pipe, including stdin.
+
+    @ivar enableReadHack: A flag which determines how readability on this
+        write descriptor will be handled.  If C{True}, then readability may
+        indicate the reader for this write descriptor has been closed (ie,
+        the connection has been lost).  If C{False}, then readability events
+        are ignored.
+    """
+    connected = 1
+    ic = 0
+    enableReadHack = False
+
+    def __init__(self, reactor, proc, name, fileno, forceReadHack=False):
+        """
+        Initialize, specifying a Process instance to connect to.
+        """
+        abstract.FileDescriptor.__init__(self, reactor)
+        fdesc.setNonBlocking(fileno)
+        self.proc = proc
+        self.name = name
+        self.fd = fileno
+
+        if not stat.S_ISFIFO(os.fstat(self.fileno()).st_mode):
+            # If the fd is not a pipe, then the read hack is never
+            # applicable.  This case arises when ProcessWriter is used by
+            # StandardIO and stdout is redirected to a normal file.
+            self.enableReadHack = False
+        elif forceReadHack:
+            self.enableReadHack = True
+        else:
+            # Detect if this fd is actually a write-only fd. If it's
+            # valid to read, don't try to detect closing via read.
+            # This really only means that we cannot detect a TTY's write
+            # pipe being closed.
+            try:
+                os.read(self.fileno(), 0)
+            except OSError:
+                # It's a write-only pipe end, enable hack
+                self.enableReadHack = True
+
+        if self.enableReadHack:
+            self.startReading()
+
+    def fileno(self):
+        """
+        Return the fileno() of my process's stdin.
+        """
+        return self.fd
+
+    def writeSomeData(self, data):
+        """
+        Write some data to the open process.
+        """
+        rv = fdesc.writeToFD(self.fd, data)
+        if rv == len(data) and self.enableReadHack:
+            # If the send buffer is now empty and it is necessary to monitor
+            # this descriptor for readability to detect close, try detecting
+            # readability now.
+            self.startReading()
+        return rv
+
+    def write(self, data):
+        self.stopReading()
+        abstract.FileDescriptor.write(self, data)
+
+    def doRead(self):
+        """
+        The only way a write pipe can become "readable" is at EOF, because the
+        child has closed it, and we're using a reactor which doesn't
+        distinguish between readable and closed (such as the select reactor).
+
+        Except that's not true on linux < 2.6.11. It has the following
+        characteristics: write pipe is completely empty => POLLOUT (writable in
+        select), write pipe is not completely empty => POLLIN (readable in
+        select), write pipe's reader closed => POLLIN|POLLERR (readable and
+        writable in select)
+
+        That's what this funky code is for. If linux was not broken, this
+        function could be simply "return CONNECTION_LOST".
+
+        BUG: We call select no matter what the reactor.
+        If the reactor is pollreactor, and the fd is > 1024, this will fail.
+        (only occurs on broken versions of linux, though).
+        """
+        if self.enableReadHack:
+            if brokenLinuxPipeBehavior:
+                fd = self.fd
+                r, w, x = select.select([fd], [fd], [], 0)
+                if r and w:
+                    return CONNECTION_LOST
+            else:
+                return CONNECTION_LOST
+        else:
+            self.stopReading()
+
+    def connectionLost(self, reason):
+        """
+        See abstract.FileDescriptor.connectionLost.
+        """
+        # At least on OS X 10.4, exiting while stdout is non-blocking can
+        # result in data loss.  For some reason putting the file descriptor
+        # back into blocking mode seems to resolve this issue.
+        fdesc.setBlocking(self.fd)
+
+        abstract.FileDescriptor.connectionLost(self, reason)
+        self.proc.childConnectionLost(self.name, reason)
+
+
+
+class ProcessReader(abstract.FileDescriptor):
+    """
+    ProcessReader
+
+    I am a selectable representation of a process's output pipe, such as
+    stdout and stderr.
+    """
+    connected = 1
+
+    def __init__(self, reactor, proc, name, fileno):
+        """
+        Initialize, specifying a process to connect to.
+        """
+        abstract.FileDescriptor.__init__(self, reactor)
+        fdesc.setNonBlocking(fileno)
+        self.proc = proc
+        self.name = name
+        self.fd = fileno
+        self.startReading()
+
+    def fileno(self):
+        """
+        Return the fileno() of my process's stderr.
+        """
+        return self.fd
+
+    def writeSomeData(self, data):
+        # the only time this is actually called is after .loseConnection Any
+        # actual write attempt would fail, so we must avoid that. This hack
+        # allows us to use .loseConnection on both readers and writers.
+        assert data == ""
+        return CONNECTION_LOST
+
+    def doRead(self):
+        """
+        This is called when the pipe becomes readable.
+        """
+        return fdesc.readFromFD(self.fd, self.dataReceived)
+
+    def dataReceived(self, data):
+        self.proc.childDataReceived(self.name, data)
+
+    def loseConnection(self):
+        if self.connected and not self.disconnecting:
+            self.disconnecting = 1
+            self.stopReading()
+            self.reactor.callLater(0, self.connectionLost,
+                                   failure.Failure(CONNECTION_DONE))
+
+    def connectionLost(self, reason):
+        """
+        Close my end of the pipe, signal the Process (which signals the
+        ProcessProtocol).
+        """
+        abstract.FileDescriptor.connectionLost(self, reason)
+        self.proc.childConnectionLost(self.name, reason)
+
+
+class _BaseProcess(BaseProcess, object):
+    """
+    Base class for Process and PTYProcess.
+    """
+    status = None
+    pid = None
+
+    def reapProcess(self):
+        """
+        Try to reap a process (without blocking) via waitpid.
+
+        This is called when sigchild is caught or a Process object loses its
+        "connection" (stdout is closed) This ought to result in reaping all
+        zombie processes, since it will be called twice as often as it needs
+        to be.
+
+        (Unfortunately, this is a slightly experimental approach, since
+        UNIX has no way to be really sure that your process is going to
+        go away w/o blocking.  I don't want to block.)
+        """
+        try:
+            try:
+                pid, status = os.waitpid(self.pid, os.WNOHANG)
+            except OSError, e:
+                if e.errno == errno.ECHILD:
+                    # no child process
+                    pid = None
+                else:
+                    raise
+        except:
+            log.msg('Failed to reap %d:' % self.pid)
+            log.err()
+            pid = None
+        if pid:
+            self.processEnded(status)
+            unregisterReapProcessHandler(pid, self)
+
+
+    def _getReason(self, status):
+        exitCode = sig = None
+        if os.WIFEXITED(status):
+            exitCode = os.WEXITSTATUS(status)
+        else:
+            sig = os.WTERMSIG(status)
+        if exitCode or sig:
+            return error.ProcessTerminated(exitCode, sig, status)
+        return error.ProcessDone(status)
+
+
+    def signalProcess(self, signalID):
+        """
+        Send the given signal C{signalID} to the process. It'll translate a
+        few signals ('HUP', 'STOP', 'INT', 'KILL', 'TERM') from a string
+        representation to its int value, otherwise it'll pass directly the
+        value provided
+
+        @type signalID: C{str} or C{int}
+        """
+        if signalID in ('HUP', 'STOP', 'INT', 'KILL', 'TERM'):
+            signalID = getattr(signal, 'SIG%s' % (signalID,))
+        if self.pid is None:
+            raise ProcessExitedAlready()
+        try:
+            os.kill(self.pid, signalID)
+        except OSError, e:
+            if e.errno == errno.ESRCH:
+                raise ProcessExitedAlready()
+            else:
+                raise
+
+
+    def _resetSignalDisposition(self):
+        # The Python interpreter ignores some signals, and our child
+        # process will inherit that behaviour. To have a child process
+        # that responds to signals normally, we need to reset our
+        # child process's signal handling (just) after we fork and
+        # before we execvpe.
+        for signalnum in range(1, signal.NSIG):
+            if signal.getsignal(signalnum) == signal.SIG_IGN:
+                # Reset signal handling to the default
+                signal.signal(signalnum, signal.SIG_DFL)
+
+
+    def _fork(self, path, uid, gid, executable, args, environment, **kwargs):
+        """
+        Fork and then exec sub-process.
+
+        @param path: the path where to run the new process.
+        @type path: C{str}
+        @param uid: if defined, the uid used to run the new process.
+        @type uid: C{int}
+        @param gid: if defined, the gid used to run the new process.
+        @type gid: C{int}
+        @param executable: the executable to run in a new process.
+        @type executable: C{str}
+        @param args: arguments used to create the new process.
+        @type args: C{list}.
+        @param environment: environment used for the new process.
+        @type environment: C{dict}.
+        @param kwargs: keyword arguments to L{_setupChild} method.
+        """
+        settingUID = (uid is not None) or (gid is not None)
+        if settingUID:
+            curegid = os.getegid()
+            currgid = os.getgid()
+            cureuid = os.geteuid()
+            curruid = os.getuid()
+            if uid is None:
+                uid = cureuid
+            if gid is None:
+                gid = curegid
+            # prepare to change UID in subprocess
+            os.setuid(0)
+            os.setgid(0)
+
+        collectorEnabled = gc.isenabled()
+        gc.disable()
+        try:
+            self.pid = os.fork()
+        except:
+            # Still in the parent process
+            if settingUID:
+                os.setregid(currgid, curegid)
+                os.setreuid(curruid, cureuid)
+            if collectorEnabled:
+                gc.enable()
+            raise
+        else:
+            if self.pid == 0: # pid is 0 in the child process
+                # do not put *ANY* code outside the try block. The child process
+                # must either exec or _exit. If it gets outside this block (due
+                # to an exception that is not handled here, but which might be
+                # handled higher up), there will be two copies of the parent
+                # running in parallel, doing all kinds of damage.
+
+                # After each change to this code, review it to make sure there
+                # are no exit paths.
+                try:
+                    # Stop debugging. If I am, I don't care anymore.
+                    sys.settrace(None)
+                    self._setupChild(**kwargs)
+                    self._execChild(path, settingUID, uid, gid,
+                                    executable, args, environment)
+                except:
+                    # If there are errors, bail and try to write something
+                    # descriptive to stderr.
+                    # XXX: The parent's stderr isn't necessarily fd 2 anymore, or
+                    #      even still available
+                    # XXXX: however even libc assumes write(2, err) is a useful
+                    #       thing to attempt
+                    try:
+                        stderr = os.fdopen(2, 'w')
+                        stderr.write("Upon execvpe %s %s in environment %s\n:" %
+                                     (executable, str(args),
+                                      "id %s" % id(environment)))
+                        traceback.print_exc(file=stderr)
+                        stderr.flush()
+                        for fd in range(3):
+                            os.close(fd)
+                    except:
+                        pass # make *sure* the child terminates
+                # Did you read the comment about not adding code here?
+                os._exit(1)
+
+        # we are now in parent process
+        if settingUID:
+            os.setregid(currgid, curegid)
+            os.setreuid(curruid, cureuid)
+        if collectorEnabled:
+            gc.enable()
+        self.status = -1 # this records the exit status of the child
+
+    def _setupChild(self, *args, **kwargs):
+        """
+        Setup the child process. Override in subclasses.
+        """
+        raise NotImplementedError()
+
+    def _execChild(self, path, settingUID, uid, gid,
+                   executable, args, environment):
+        """
+        The exec() which is done in the forked child.
+        """
+        if path:
+            os.chdir(path)
+        # set the UID before I actually exec the process
+        if settingUID:
+            switchUID(uid, gid)
+        os.execvpe(executable, args, environment)
+
+    def __repr__(self):
+        """
+        String representation of a process.
+        """
+        return "<%s pid=%s status=%s>" % (self.__class__.__name__,
+                                          self.pid, self.status)
+
+
+class _FDDetector(object):
+    """
+    This class contains the logic necessary to decide which of the available
+    system techniques should be used to detect the open file descriptors for
+    the current process. The chosen technique gets monkey-patched into the
+    _listOpenFDs method of this class so that the detection only needs to occur
+    once.
+
+    @ivars listdir: The implementation of listdir to use. This gets overwritten
+        by the test cases.
+    @ivars getpid: The implementation of getpid to use, returns the PID of the
+        running process.
+    @ivars openfile: The implementation of open() to use, by default the Python
+        builtin.
+    """
+    # So that we can unit test this
+    listdir = os.listdir
+    getpid = os.getpid
+    openfile = open
+
+    def __init__(self):
+        self._implementations = [
+            self._procFDImplementation, self._devFDImplementation,
+            self._fallbackFDImplementation]
+
+
+    def _listOpenFDs(self):
+        """
+        Return an iterable of file descriptors which I{may} be open in this
+        process.
+
+        This will try to return the fewest possible descriptors without missing
+        any.
+        """
+        self._listOpenFDs = self._getImplementation()
+        return self._listOpenFDs()
+
+
+    def _getImplementation(self):
+        """
+        Pick a method which gives correct results for C{_listOpenFDs} in this
+        runtime environment.
+
+        This involves a lot of very platform-specific checks, some of which may
+        be relatively expensive.  Therefore the returned method should be saved
+        and re-used, rather than always calling this method to determine what it
+        is.
+
+        See the implementation for the details of how a method is selected.
+        """
+        for impl in self._implementations:
+            try:
+                before = impl()
+            except:
+                continue
+            try:
+                fp = self.openfile("/dev/null", "r")
+                after = impl()
+            finally:
+                fp.close()
+            if before != after:
+                return impl
+        # If no implementation can detect the newly opened file above, then just
+        # return the last one.  The last one should therefore always be one
+        # which makes a simple static guess which includes all possible open
+        # file descriptors, but perhaps also many other values which do not
+        # correspond to file descriptors.  For example, the scheme implemented
+        # by _fallbackFDImplementation is suitable to be the last entry.
+        return impl
+
+
+    def _devFDImplementation(self):
+        """
+        Simple implementation for systems where /dev/fd actually works.
+        See: http://www.freebsd.org/cgi/man.cgi?fdescfs
+        """
+        dname = "/dev/fd"
+        result = [int(fd) for fd in self.listdir(dname)]
+        return result
+
+
+    def _procFDImplementation(self):
+        """
+        Simple implementation for systems where /proc/pid/fd exists (we assume
+        it works).
+        """
+        dname = "/proc/%d/fd" % (self.getpid(),)
+        return [int(fd) for fd in self.listdir(dname)]
+
+
+    def _fallbackFDImplementation(self):
+        """
+        Fallback implementation where either the resource module can inform us
+        about the upper bound of how many FDs to expect, or where we just guess
+        a constant maximum if there is no resource module.
+
+        All possible file descriptors from 0 to that upper bound are returned
+        with no attempt to exclude invalid file descriptor values.
+        """
+        try:
+            import resource
+        except ImportError:
+            maxfds = 1024
+        else:
+            # OS-X reports 9223372036854775808. That's a lot of fds to close.
+            # OS-X should get the /dev/fd implementation instead, so mostly
+            # this check probably isn't necessary.
+            maxfds = min(1024, resource.getrlimit(resource.RLIMIT_NOFILE)[1])
+        return range(maxfds)
+
+
+
+detector = _FDDetector()
+
+def _listOpenFDs():
+    """
+    Use the global detector object to figure out which FD implementation to
+    use.
+    """
+    return detector._listOpenFDs()
+
+
+class Process(_BaseProcess):
+    """
+    An operating-system Process.
+
+    This represents an operating-system process with arbitrary input/output
+    pipes connected to it. Those pipes may represent standard input,
+    standard output, and standard error, or any other file descriptor.
+
+    On UNIX, this is implemented using fork(), exec(), pipe()
+    and fcntl(). These calls may not exist elsewhere so this
+    code is not cross-platform. (also, windows can only select
+    on sockets...)
+    """
+    implements(IProcessTransport)
+
+    debug = False
+    debug_child = False
+
+    status = -1
+    pid = None
+
+    processWriterFactory = ProcessWriter
+    processReaderFactory = ProcessReader
+
+    def __init__(self,
+                 reactor, executable, args, environment, path, proto,
+                 uid=None, gid=None, childFDs=None):
+        """
+        Spawn an operating-system process.
+
+        This is where the hard work of disconnecting all currently open
+        files / forking / executing the new process happens.  (This is
+        executed automatically when a Process is instantiated.)
+
+        This will also run the subprocess as a given user ID and group ID, if
+        specified.  (Implementation Note: this doesn't support all the arcane
+        nuances of setXXuid on UNIX: it will assume that either your effective
+        or real UID is 0.)
+        """
+        if not proto:
+            assert 'r' not in childFDs.values()
+            assert 'w' not in childFDs.values()
+        _BaseProcess.__init__(self, proto)
+
+        self.pipes = {}
+        # keys are childFDs, we can sense them closing
+        # values are ProcessReader/ProcessWriters
+
+        helpers = {}
+        # keys are childFDs
+        # values are parentFDs
+
+        if childFDs is None:
+            childFDs = {0: "w", # we write to the child's stdin
+                        1: "r", # we read from their stdout
+                        2: "r", # and we read from their stderr
+                        }
+
+        debug = self.debug
+        if debug: print "childFDs", childFDs
+
+        _openedPipes = []
+        def pipe():
+            r, w = os.pipe()
+            _openedPipes.extend([r, w])
+            return r, w
+
+        # fdmap.keys() are filenos of pipes that are used by the child.
+        fdmap = {} # maps childFD to parentFD
+        try:
+            for childFD, target in childFDs.items():
+                if debug: print "[%d]" % childFD, target
+                if target == "r":
+                    # we need a pipe that the parent can read from
+                    readFD, writeFD = pipe()
+                    if debug: print "readFD=%d, writeFD=%d" % (readFD, writeFD)
+                    fdmap[childFD] = writeFD     # child writes to this
+                    helpers[childFD] = readFD    # parent reads from this
+                elif target == "w":
+                    # we need a pipe that the parent can write to
+                    readFD, writeFD = pipe()
+                    if debug: print "readFD=%d, writeFD=%d" % (readFD, writeFD)
+                    fdmap[childFD] = readFD      # child reads from this
+                    helpers[childFD] = writeFD   # parent writes to this
+                else:
+                    assert type(target) == int, '%r should be an int' % (target,)
+                    fdmap[childFD] = target      # parent ignores this
+            if debug: print "fdmap", fdmap
+            if debug: print "helpers", helpers
+            # the child only cares about fdmap.values()
+
+            self._fork(path, uid, gid, executable, args, environment, fdmap=fdmap)
+        except:
+            map(os.close, _openedPipes)
+            raise
+
+        # we are the parent process:
+        self.proto = proto
+
+        # arrange for the parent-side pipes to be read and written
+        for childFD, parentFD in helpers.items():
+            os.close(fdmap[childFD])
+
+            if childFDs[childFD] == "r":
+                reader = self.processReaderFactory(reactor, self, childFD,
+                                        parentFD)
+                self.pipes[childFD] = reader
+
+            if childFDs[childFD] == "w":
+                writer = self.processWriterFactory(reactor, self, childFD,
+                                        parentFD, forceReadHack=True)
+                self.pipes[childFD] = writer
+
+        try:
+            # the 'transport' is used for some compatibility methods
+            if self.proto is not None:
+                self.proto.makeConnection(self)
+        except:
+            log.err()
+
+        # The reactor might not be running yet.  This might call back into
+        # processEnded synchronously, triggering an application-visible
+        # callback.  That's probably not ideal.  The replacement API for
+        # spawnProcess should improve upon this situation.
+        registerReapProcessHandler(self.pid, self)
+
+
+    def _setupChild(self, fdmap):
+        """
+        fdmap[childFD] = parentFD
+
+        The child wants to end up with 'childFD' attached to what used to be
+        the parent's parentFD. As an example, a bash command run like
+        'command 2>&1' would correspond to an fdmap of {0:0, 1:1, 2:1}.
+        'command >foo.txt' would be {0:0, 1:os.open('foo.txt'), 2:2}.
+
+        This is accomplished in two steps::
+
+            1. close all file descriptors that aren't values of fdmap.  This
+               means 0 .. maxfds (or just the open fds within that range, if
+               the platform supports '/proc/<pid>/fd').
+
+            2. for each childFD::
+
+                 - if fdmap[childFD] == childFD, the descriptor is already in
+                   place.  Make sure the CLOEXEC flag is not set, then delete
+                   the entry from fdmap.
+
+                 - if childFD is in fdmap.values(), then the target descriptor
+                   is busy. Use os.dup() to move it elsewhere, update all
+                   fdmap[childFD] items that point to it, then close the
+                   original. Then fall through to the next case.
+
+                 - now fdmap[childFD] is not in fdmap.values(), and is free.
+                   Use os.dup2() to move it to the right place, then close the
+                   original.
+        """
+
+        debug = self.debug_child
+        if debug:
+            errfd = sys.stderr
+            errfd.write("starting _setupChild\n")
+
+        destList = fdmap.values()
+        for fd in _listOpenFDs():
+            if fd in destList:
+                continue
+            if debug and fd == errfd.fileno():
+                continue
+            try:
+                os.close(fd)
+            except:
+                pass
+
+        # at this point, the only fds still open are the ones that need to
+        # be moved to their appropriate positions in the child (the targets
+        # of fdmap, i.e. fdmap.values() )
+
+        if debug: print >>errfd, "fdmap", fdmap
+        childlist = fdmap.keys()
+        childlist.sort()
+
+        for child in childlist:
+            target = fdmap[child]
+            if target == child:
+                # fd is already in place
+                if debug: print >>errfd, "%d already in place" % target
+                fdesc._unsetCloseOnExec(child)
+            else:
+                if child in fdmap.values():
+                    # we can't replace child-fd yet, as some other mapping
+                    # still needs the fd it wants to target. We must preserve
+                    # that old fd by duping it to a new home.
+                    newtarget = os.dup(child) # give it a safe home
+                    if debug: print >>errfd, "os.dup(%d) -> %d" % (child,
+                                                                   newtarget)
+                    os.close(child) # close the original
+                    for c, p in fdmap.items():
+                        if p == child:
+                            fdmap[c] = newtarget # update all pointers
+                # now it should be available
+                if debug: print >>errfd, "os.dup2(%d,%d)" % (target, child)
+                os.dup2(target, child)
+
+        # At this point, the child has everything it needs. We want to close
+        # everything that isn't going to be used by the child, i.e.
+        # everything not in fdmap.keys(). The only remaining fds open are
+        # those in fdmap.values().
+
+        # Any given fd may appear in fdmap.values() multiple times, so we
+        # need to remove duplicates first.
+
+        old = []
+        for fd in fdmap.values():
+            if not fd in old:
+                if not fd in fdmap.keys():
+                    old.append(fd)
+        if debug: print >>errfd, "old", old
+        for fd in old:
+            os.close(fd)
+
+        self._resetSignalDisposition()
+
+
+    def writeToChild(self, childFD, data):
+        self.pipes[childFD].write(data)
+
+    def closeChildFD(self, childFD):
+        # for writer pipes, loseConnection tries to write the remaining data
+        # out to the pipe before closing it
+        # if childFD is not in the list of pipes, assume that it is already
+        # closed
+        if childFD in self.pipes:
+            self.pipes[childFD].loseConnection()
+
+    def pauseProducing(self):
+        for p in self.pipes.itervalues():
+            if isinstance(p, ProcessReader):
+                p.stopReading()
+
+    def resumeProducing(self):
+        for p in self.pipes.itervalues():
+            if isinstance(p, ProcessReader):
+                p.startReading()
+
+    # compatibility
+    def closeStdin(self):
+        """
+        Call this to close standard input on this process.
+        """
+        self.closeChildFD(0)
+
+    def closeStdout(self):
+        self.closeChildFD(1)
+
+    def closeStderr(self):
+        self.closeChildFD(2)
+
+    def loseConnection(self):
+        self.closeStdin()
+        self.closeStderr()
+        self.closeStdout()
+
+    def write(self, data):
+        """
+        Call this to write to standard input on this process.
+
+        NOTE: This will silently lose data if there is no standard input.
+        """
+        if 0 in self.pipes:
+            self.pipes[0].write(data)
+
+    def registerProducer(self, producer, streaming):
+        """
+        Call this to register producer for standard input.
+
+        If there is no standard input producer.stopProducing() will
+        be called immediately.
+        """
+        if 0 in self.pipes:
+            self.pipes[0].registerProducer(producer, streaming)
+        else:
+            producer.stopProducing()
+
+    def unregisterProducer(self):
+        """
+        Call this to unregister producer for standard input."""
+        if 0 in self.pipes:
+            self.pipes[0].unregisterProducer()
+
+    def writeSequence(self, seq):
+        """
+        Call this to write to standard input on this process.
+
+        NOTE: This will silently lose data if there is no standard input.
+        """
+        if 0 in self.pipes:
+            self.pipes[0].writeSequence(seq)
+
+
+    def childDataReceived(self, name, data):
+        self.proto.childDataReceived(name, data)
+
+
+    def childConnectionLost(self, childFD, reason):
+        # this is called when one of the helpers (ProcessReader or
+        # ProcessWriter) notices their pipe has been closed
+        os.close(self.pipes[childFD].fileno())
+        del self.pipes[childFD]
+        try:
+            self.proto.childConnectionLost(childFD)
+        except:
+            log.err()
+        self.maybeCallProcessEnded()
+
+    def maybeCallProcessEnded(self):
+        # we don't call ProcessProtocol.processEnded until:
+        #  the child has terminated, AND
+        #  all writers have indicated an error status, AND
+        #  all readers have indicated EOF
+        # This insures that we've gathered all output from the process.
+        if self.pipes:
+            return
+        if not self.lostProcess:
+            self.reapProcess()
+            return
+        _BaseProcess.maybeCallProcessEnded(self)
+
+
+
+class PTYProcess(abstract.FileDescriptor, _BaseProcess):
+    """
+    An operating-system Process that uses PTY support.
+    """
+    implements(IProcessTransport)
+
+    status = -1
+    pid = None
+
+    def __init__(self, reactor, executable, args, environment, path, proto,
+                 uid=None, gid=None, usePTY=None):
+        """
+        Spawn an operating-system process.
+
+        This is where the hard work of disconnecting all currently open
+        files / forking / executing the new process happens.  (This is
+        executed automatically when a Process is instantiated.)
+
+        This will also run the subprocess as a given user ID and group ID, if
+        specified.  (Implementation Note: this doesn't support all the arcane
+        nuances of setXXuid on UNIX: it will assume that either your effective
+        or real UID is 0.)
+        """
+        if pty is None and not isinstance(usePTY, (tuple, list)):
+            # no pty module and we didn't get a pty to use
+            raise NotImplementedError(
+                "cannot use PTYProcess on platforms without the pty module.")
+        abstract.FileDescriptor.__init__(self, reactor)
+        _BaseProcess.__init__(self, proto)
+
+        if isinstance(usePTY, (tuple, list)):
+            masterfd, slavefd, ttyname = usePTY
+        else:
+            masterfd, slavefd = pty.openpty()
+            ttyname = os.ttyname(slavefd)
+
+        try:
+            self._fork(path, uid, gid, executable, args, environment,
+                       masterfd=masterfd, slavefd=slavefd)
+        except:
+            if not isinstance(usePTY, (tuple, list)):
+                os.close(masterfd)
+                os.close(slavefd)
+            raise
+
+        # we are now in parent process:
+        os.close(slavefd)
+        fdesc.setNonBlocking(masterfd)
+        self.fd = masterfd
+        self.startReading()
+        self.connected = 1
+        self.status = -1
+        try:
+            self.proto.makeConnection(self)
+        except:
+            log.err()
+        registerReapProcessHandler(self.pid, self)
+
+
+    def _setupChild(self, masterfd, slavefd):
+        """
+        Set up child process after C{fork()} but before C{exec()}.
+
+        This involves:
+
+            - closing C{masterfd}, since it is not used in the subprocess
+
+            - creating a new session with C{os.setsid}
+
+            - changing the controlling terminal of the process (and the new
+              session) to point at C{slavefd}
+
+            - duplicating C{slavefd} to standard input, output, and error
+
+            - closing all other open file descriptors (according to
+              L{_listOpenFDs})
+
+            - re-setting all signal handlers to C{SIG_DFL}
+
+        @param masterfd: The master end of a PTY file descriptors opened with
+            C{openpty}.
+        @type masterfd: L{int}
+
+        @param slavefd: The slave end of a PTY opened with C{openpty}.
+        @type slavefd: L{int}
+        """
+        os.close(masterfd)
+        os.setsid()
+        fcntl.ioctl(slavefd, termios.TIOCSCTTY, '')
+
+        for fd in range(3):
+            if fd != slavefd:
+                os.close(fd)
+
+        os.dup2(slavefd, 0) # stdin
+        os.dup2(slavefd, 1) # stdout
+        os.dup2(slavefd, 2) # stderr
+
+        for fd in _listOpenFDs():
+            if fd > 2:
+                try:
+                    os.close(fd)
+                except:
+                    pass
+
+        self._resetSignalDisposition()
+
+
+    # PTYs do not have stdin/stdout/stderr. They only have in and out, just
+    # like sockets. You cannot close one without closing off the entire PTY.
+    def closeStdin(self):
+        pass
+
+    def closeStdout(self):
+        pass
+
+    def closeStderr(self):
+        pass
+
+    def doRead(self):
+        """
+        Called when my standard output stream is ready for reading.
+        """
+        return fdesc.readFromFD(
+            self.fd,
+            lambda data: self.proto.childDataReceived(1, data))
+
+    def fileno(self):
+        """
+        This returns the file number of standard output on this process.
+        """
+        return self.fd
+
+    def maybeCallProcessEnded(self):
+        # two things must happen before we call the ProcessProtocol's
+        # processEnded method. 1: the child process must die and be reaped
+        # (which calls our own processEnded method). 2: the child must close
+        # their stdin/stdout/stderr fds, causing the pty to close, causing
+        # our connectionLost method to be called. #2 can also be triggered
+        # by calling .loseConnection().
+        if self.lostProcess == 2:
+            _BaseProcess.maybeCallProcessEnded(self)
+
+    def connectionLost(self, reason):
+        """
+        I call this to clean up when one or all of my connections has died.
+        """
+        abstract.FileDescriptor.connectionLost(self, reason)
+        os.close(self.fd)
+        self.lostProcess += 1
+        self.maybeCallProcessEnded()
+
+    def writeSomeData(self, data):
+        """
+        Write some data to the open process.
+        """
+        return fdesc.writeToFD(self.fd, data)
diff --git a/ThirdParty/Twisted/twisted/internet/protocol.py b/ThirdParty/Twisted/twisted/internet/protocol.py
new file mode 100644
index 0000000..4c455bd
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/protocol.py
@@ -0,0 +1,827 @@
+# -*- test-case-name: twisted.test.test_factories,twisted.internet.test.test_protocol -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Standard implementations of Twisted protocol-related interfaces.
+
+Start here if you are looking to write a new protocol implementation for
+Twisted.  The Protocol class contains some introductory material.
+"""
+
+from __future__ import division, absolute_import
+
+import random
+from zope.interface import implementer
+
+from twisted.python import log, failure, components
+from twisted.internet import interfaces, error, defer
+
+
+ at implementer(interfaces.IProtocolFactory, interfaces.ILoggingContext)
+class Factory:
+    """
+    This is a factory which produces protocols.
+
+    By default, buildProtocol will create a protocol of the class given in
+    self.protocol.
+    """
+
+    # put a subclass of Protocol here:
+    protocol = None
+
+    numPorts = 0
+    noisy = True
+
+    def logPrefix(self):
+        """
+        Describe this factory for log messages.
+        """
+        return self.__class__.__name__
+
+
+    def doStart(self):
+        """Make sure startFactory is called.
+
+        Users should not call this function themselves!
+        """
+        if not self.numPorts:
+            if self.noisy:
+                log.msg("Starting factory %r" % self)
+            self.startFactory()
+        self.numPorts = self.numPorts + 1
+
+    def doStop(self):
+        """Make sure stopFactory is called.
+
+        Users should not call this function themselves!
+        """
+        if self.numPorts == 0:
+            # this shouldn't happen, but does sometimes and this is better
+            # than blowing up in assert as we did previously.
+            return
+        self.numPorts = self.numPorts - 1
+        if not self.numPorts:
+            if self.noisy:
+                log.msg("Stopping factory %r" % self)
+            self.stopFactory()
+
+    def startFactory(self):
+        """This will be called before I begin listening on a Port or Connector.
+
+        It will only be called once, even if the factory is connected
+        to multiple ports.
+
+        This can be used to perform 'unserialization' tasks that
+        are best put off until things are actually running, such
+        as connecting to a database, opening files, etcetera.
+        """
+
+    def stopFactory(self):
+        """This will be called before I stop listening on all Ports/Connectors.
+
+        This can be overridden to perform 'shutdown' tasks such as disconnecting
+        database connections, closing files, etc.
+
+        It will be called, for example, before an application shuts down,
+        if it was connected to a port. User code should not call this function
+        directly.
+        """
+
+    def buildProtocol(self, addr):
+        """Create an instance of a subclass of Protocol.
+
+        The returned instance will handle input on an incoming server
+        connection, and an attribute \"factory\" pointing to the creating
+        factory.
+
+        Override this method to alter how Protocol instances get created.
+
+        @param addr: an object implementing L{twisted.internet.interfaces.IAddress}
+        """
+        p = self.protocol()
+        p.factory = self
+        return p
+
+
+class ClientFactory(Factory):
+    """A Protocol factory for clients.
+
+    This can be used together with the various connectXXX methods in
+    reactors.
+    """
+
+    def startedConnecting(self, connector):
+        """Called when a connection has been started.
+
+        You can call connector.stopConnecting() to stop the connection attempt.
+
+        @param connector: a Connector object.
+        """
+
+    def clientConnectionFailed(self, connector, reason):
+        """Called when a connection has failed to connect.
+
+        It may be useful to call connector.connect() - this will reconnect.
+
+        @type reason: L{twisted.python.failure.Failure}
+        """
+
+    def clientConnectionLost(self, connector, reason):
+        """Called when an established connection is lost.
+
+        It may be useful to call connector.connect() - this will reconnect.
+
+        @type reason: L{twisted.python.failure.Failure}
+        """
+
+
+class _InstanceFactory(ClientFactory):
+    """
+    Factory used by ClientCreator.
+
+    @ivar deferred: The L{Deferred} which represents this connection attempt and
+        which will be fired when it succeeds or fails.
+
+    @ivar pending: After a connection attempt succeeds or fails, a delayed call
+        which will fire the L{Deferred} representing this connection attempt.
+    """
+
+    noisy = False
+    pending = None
+
+    def __init__(self, reactor, instance, deferred):
+        self.reactor = reactor
+        self.instance = instance
+        self.deferred = deferred
+
+
+    def __repr__(self):
+        return "<ClientCreator factory: %r>" % (self.instance, )
+
+
+    def buildProtocol(self, addr):
+        """
+        Return the pre-constructed protocol instance and arrange to fire the
+        waiting L{Deferred} to indicate success establishing the connection.
+        """
+        self.pending = self.reactor.callLater(
+            0, self.fire, self.deferred.callback, self.instance)
+        self.deferred = None
+        return self.instance
+
+
+    def clientConnectionFailed(self, connector, reason):
+        """
+        Arrange to fire the waiting L{Deferred} with the given failure to
+        indicate the connection could not be established.
+        """
+        self.pending = self.reactor.callLater(
+            0, self.fire, self.deferred.errback, reason)
+        self.deferred = None
+
+
+    def fire(self, func, value):
+        """
+        Clear C{self.pending} to avoid a reference cycle and then invoke func
+        with the value.
+        """
+        self.pending = None
+        func(value)
+
+
+
+class ClientCreator:
+    """
+    Client connections that do not require a factory.
+
+    The various connect* methods create a protocol instance using the given
+    protocol class and arguments, and connect it, returning a Deferred of the
+    resulting protocol instance.
+
+    Useful for cases when we don't really need a factory.  Mainly this
+    is when there is no shared state between protocol instances, and no need
+    to reconnect.
+
+    The C{connectTCP}, C{connectUNIX}, and C{connectSSL} methods each return a
+    L{Deferred} which will fire with an instance of the protocol class passed to
+    L{ClientCreator.__init__}.  These Deferred can be cancelled to abort the
+    connection attempt (in a very unlikely case, cancelling the Deferred may not
+    prevent the protocol from being instantiated and connected to a transport;
+    if this happens, it will be disconnected immediately afterwards and the
+    Deferred will still errback with L{CancelledError}).
+    """
+
+    def __init__(self, reactor, protocolClass, *args, **kwargs):
+        self.reactor = reactor
+        self.protocolClass = protocolClass
+        self.args = args
+        self.kwargs = kwargs
+
+
+    def _connect(self, method, *args, **kwargs):
+        """
+        Initiate a connection attempt.
+
+        @param method: A callable which will actually start the connection
+            attempt.  For example, C{reactor.connectTCP}.
+
+        @param *args: Positional arguments to pass to C{method}, excluding the
+            factory.
+
+        @param **kwargs: Keyword arguments to pass to C{method}.
+
+        @return: A L{Deferred} which fires with an instance of the protocol
+            class passed to this L{ClientCreator}'s initializer or fails if the
+            connection cannot be set up for some reason.
+        """
+        def cancelConnect(deferred):
+            connector.disconnect()
+            if f.pending is not None:
+                f.pending.cancel()
+        d = defer.Deferred(cancelConnect)
+        f = _InstanceFactory(
+            self.reactor, self.protocolClass(*self.args, **self.kwargs), d)
+        connector = method(factory=f, *args, **kwargs)
+        return d
+
+
+    def connectTCP(self, host, port, timeout=30, bindAddress=None):
+        """
+        Connect to a TCP server.
+
+        The parameters are all the same as to L{IReactorTCP.connectTCP} except
+        that the factory parameter is omitted.
+
+        @return: A L{Deferred} which fires with an instance of the protocol
+            class passed to this L{ClientCreator}'s initializer or fails if the
+            connection cannot be set up for some reason.
+        """
+        return self._connect(
+            self.reactor.connectTCP, host, port, timeout=timeout,
+            bindAddress=bindAddress)
+
+
+    def connectUNIX(self, address, timeout=30, checkPID=False):
+        """
+        Connect to a Unix socket.
+
+        The parameters are all the same as to L{IReactorUNIX.connectUNIX} except
+        that the factory parameter is omitted.
+
+        @return: A L{Deferred} which fires with an instance of the protocol
+            class passed to this L{ClientCreator}'s initializer or fails if the
+            connection cannot be set up for some reason.
+        """
+        return self._connect(
+            self.reactor.connectUNIX, address, timeout=timeout,
+            checkPID=checkPID)
+
+
+    def connectSSL(self, host, port, contextFactory, timeout=30, bindAddress=None):
+        """
+        Connect to an SSL server.
+
+        The parameters are all the same as to L{IReactorSSL.connectSSL} except
+        that the factory parameter is omitted.
+
+        @return: A L{Deferred} which fires with an instance of the protocol
+            class passed to this L{ClientCreator}'s initializer or fails if the
+            connection cannot be set up for some reason.
+        """
+        return self._connect(
+            self.reactor.connectSSL, host, port,
+            contextFactory=contextFactory, timeout=timeout,
+            bindAddress=bindAddress)
+
+
+
+class ReconnectingClientFactory(ClientFactory):
+    """
+    Factory which auto-reconnects clients with an exponential back-off.
+
+    Note that clients should call my resetDelay method after they have
+    connected successfully.
+
+    @ivar maxDelay: Maximum number of seconds between connection attempts.
+    @ivar initialDelay: Delay for the first reconnection attempt.
+    @ivar factor: A multiplicitive factor by which the delay grows
+    @ivar jitter: Percentage of randomness to introduce into the delay length
+        to prevent stampeding.
+    @ivar clock: The clock used to schedule reconnection. It's mainly useful to
+        be parametrized in tests. If the factory is serialized, this attribute
+        will not be serialized, and the default value (the reactor) will be
+        restored when deserialized.
+    @type clock: L{IReactorTime}
+    @ivar maxRetries: Maximum number of consecutive unsuccessful connection
+        attempts, after which no further connection attempts will be made. If
+        this is not explicitly set, no maximum is applied.
+    """
+    maxDelay = 3600
+    initialDelay = 1.0
+    # Note: These highly sensitive factors have been precisely measured by
+    # the National Institute of Science and Technology.  Take extreme care
+    # in altering them, or you may damage your Internet!
+    # (Seriously: <http://physics.nist.gov/cuu/Constants/index.html>)
+    factor = 2.7182818284590451 # (math.e)
+    # Phi = 1.6180339887498948 # (Phi is acceptable for use as a
+    # factor if e is too large for your application.)
+    jitter = 0.11962656472 # molar Planck constant times c, joule meter/mole
+
+    delay = initialDelay
+    retries = 0
+    maxRetries = None
+    _callID = None
+    connector = None
+    clock = None
+
+    continueTrying = 1
+
+
+    def clientConnectionFailed(self, connector, reason):
+        if self.continueTrying:
+            self.connector = connector
+            self.retry()
+
+
+    def clientConnectionLost(self, connector, unused_reason):
+        if self.continueTrying:
+            self.connector = connector
+            self.retry()
+
+
+    def retry(self, connector=None):
+        """
+        Have this connector connect again, after a suitable delay.
+        """
+        if not self.continueTrying:
+            if self.noisy:
+                log.msg("Abandoning %s on explicit request" % (connector,))
+            return
+
+        if connector is None:
+            if self.connector is None:
+                raise ValueError("no connector to retry")
+            else:
+                connector = self.connector
+
+        self.retries += 1
+        if self.maxRetries is not None and (self.retries > self.maxRetries):
+            if self.noisy:
+                log.msg("Abandoning %s after %d retries." %
+                        (connector, self.retries))
+            return
+
+        self.delay = min(self.delay * self.factor, self.maxDelay)
+        if self.jitter:
+            self.delay = random.normalvariate(self.delay,
+                                              self.delay * self.jitter)
+
+        if self.noisy:
+            log.msg("%s will retry in %d seconds" % (connector, self.delay,))
+
+        def reconnector():
+            self._callID = None
+            connector.connect()
+        if self.clock is None:
+            from twisted.internet import reactor
+            self.clock = reactor
+        self._callID = self.clock.callLater(self.delay, reconnector)
+
+
+    def stopTrying(self):
+        """
+        Put a stop to any attempt to reconnect in progress.
+        """
+        # ??? Is this function really stopFactory?
+        if self._callID:
+            self._callID.cancel()
+            self._callID = None
+        self.continueTrying = 0
+        if self.connector:
+            try:
+                self.connector.stopConnecting()
+            except error.NotConnectingError:
+                pass
+
+
+    def resetDelay(self):
+        """
+        Call this method after a successful connection: it resets the delay and
+        the retry counter.
+        """
+        self.delay = self.initialDelay
+        self.retries = 0
+        self._callID = None
+        self.continueTrying = 1
+
+
+    def __getstate__(self):
+        """
+        Remove all of the state which is mutated by connection attempts and
+        failures, returning just the state which describes how reconnections
+        should be attempted.  This will make the unserialized instance
+        behave just as this one did when it was first instantiated.
+        """
+        state = self.__dict__.copy()
+        for key in ['connector', 'retries', 'delay',
+                    'continueTrying', '_callID', 'clock']:
+            if key in state:
+                del state[key]
+        return state
+
+
+
+class ServerFactory(Factory):
+    """Subclass this to indicate that your protocol.Factory is only usable for servers.
+    """
+
+
+
+class BaseProtocol:
+    """
+    This is the abstract superclass of all protocols.
+
+    Some methods have helpful default implementations here so that they can
+    easily be shared, but otherwise the direct subclasses of this class are more
+    interesting, L{Protocol} and L{ProcessProtocol}.
+    """
+    connected = 0
+    transport = None
+
+    def makeConnection(self, transport):
+        """Make a connection to a transport and a server.
+
+        This sets the 'transport' attribute of this Protocol, and calls the
+        connectionMade() callback.
+        """
+        self.connected = 1
+        self.transport = transport
+        self.connectionMade()
+
+    def connectionMade(self):
+        """Called when a connection is made.
+
+        This may be considered the initializer of the protocol, because
+        it is called when the connection is completed.  For clients,
+        this is called once the connection to the server has been
+        established; for servers, this is called after an accept() call
+        stops blocking and a socket has been received.  If you need to
+        send any greeting or initial message, do it here.
+        """
+
+connectionDone=failure.Failure(error.ConnectionDone())
+connectionDone.cleanFailure()
+
+
+ at implementer(interfaces.IProtocol, interfaces.ILoggingContext)
+class Protocol(BaseProtocol):
+    """
+    This is the base class for streaming connection-oriented protocols.
+
+    If you are going to write a new connection-oriented protocol for Twisted,
+    start here.  Any protocol implementation, either client or server, should
+    be a subclass of this class.
+
+    The API is quite simple.  Implement L{dataReceived} to handle both
+    event-based and synchronous input; output can be sent through the
+    'transport' attribute, which is to be an instance that implements
+    L{twisted.internet.interfaces.ITransport}.  Override C{connectionLost} to be
+    notified when the connection ends.
+
+    Some subclasses exist already to help you write common types of protocols:
+    see the L{twisted.protocols.basic} module for a few of them.
+    """
+
+    def logPrefix(self):
+        """
+        Return a prefix matching the class name, to identify log messages
+        related to this protocol instance.
+        """
+        return self.__class__.__name__
+
+
+    def dataReceived(self, data):
+        """Called whenever data is received.
+
+        Use this method to translate to a higher-level message.  Usually, some
+        callback will be made upon the receipt of each complete protocol
+        message.
+
+        @param data: a string of indeterminate length.  Please keep in mind
+            that you will probably need to buffer some data, as partial
+            (or multiple) protocol messages may be received!  I recommend
+            that unit tests for protocols call through to this method with
+            differing chunk sizes, down to one byte at a time.
+        """
+
+    def connectionLost(self, reason=connectionDone):
+        """Called when the connection is shut down.
+
+        Clear any circular references here, and any external references
+        to this Protocol.  The connection has been closed.
+
+        @type reason: L{twisted.python.failure.Failure}
+        """
+
+
+ at implementer(interfaces.IConsumer)
+class ProtocolToConsumerAdapter(components.Adapter):
+
+    def write(self, data):
+        self.original.dataReceived(data)
+
+    def registerProducer(self, producer, streaming):
+        pass
+
+    def unregisterProducer(self):
+        pass
+
+components.registerAdapter(ProtocolToConsumerAdapter, interfaces.IProtocol,
+                           interfaces.IConsumer)
+
+ at implementer(interfaces.IProtocol)
+class ConsumerToProtocolAdapter(components.Adapter):
+
+    def dataReceived(self, data):
+        self.original.write(data)
+
+    def connectionLost(self, reason):
+        pass
+
+    def makeConnection(self, transport):
+        pass
+
+    def connectionMade(self):
+        pass
+
+components.registerAdapter(ConsumerToProtocolAdapter, interfaces.IConsumer,
+                           interfaces.IProtocol)
+
+ at implementer(interfaces.IProcessProtocol)
+class ProcessProtocol(BaseProtocol):
+    """
+    Base process protocol implementation which does simple dispatching for
+    stdin, stdout, and stderr file descriptors.
+    """
+
+    def childDataReceived(self, childFD, data):
+        if childFD == 1:
+            self.outReceived(data)
+        elif childFD == 2:
+            self.errReceived(data)
+
+
+    def outReceived(self, data):
+        """
+        Some data was received from stdout.
+        """
+
+
+    def errReceived(self, data):
+        """
+        Some data was received from stderr.
+        """
+
+
+    def childConnectionLost(self, childFD):
+        if childFD == 0:
+            self.inConnectionLost()
+        elif childFD == 1:
+            self.outConnectionLost()
+        elif childFD == 2:
+            self.errConnectionLost()
+
+
+    def inConnectionLost(self):
+        """
+        This will be called when stdin is closed.
+        """
+
+
+    def outConnectionLost(self):
+        """
+        This will be called when stdout is closed.
+        """
+
+
+    def errConnectionLost(self):
+        """
+        This will be called when stderr is closed.
+        """
+
+
+    def processExited(self, reason):
+        """
+        This will be called when the subprocess exits.
+
+        @type reason: L{twisted.python.failure.Failure}
+        """
+
+
+    def processEnded(self, reason):
+        """
+        Called when the child process exits and all file descriptors
+        associated with it have been closed.
+
+        @type reason: L{twisted.python.failure.Failure}
+        """
+
+
+
+class AbstractDatagramProtocol:
+    """
+    Abstract protocol for datagram-oriented transports, e.g. IP, ICMP, ARP, UDP.
+    """
+
+    transport = None
+    numPorts = 0
+    noisy = True
+
+    def __getstate__(self):
+        d = self.__dict__.copy()
+        d['transport'] = None
+        return d
+
+    def doStart(self):
+        """Make sure startProtocol is called.
+
+        This will be called by makeConnection(), users should not call it.
+        """
+        if not self.numPorts:
+            if self.noisy:
+                log.msg("Starting protocol %s" % self)
+            self.startProtocol()
+        self.numPorts = self.numPorts + 1
+
+    def doStop(self):
+        """Make sure stopProtocol is called.
+
+        This will be called by the port, users should not call it.
+        """
+        assert self.numPorts > 0
+        self.numPorts = self.numPorts - 1
+        self.transport = None
+        if not self.numPorts:
+            if self.noisy:
+                log.msg("Stopping protocol %s" % self)
+            self.stopProtocol()
+
+    def startProtocol(self):
+        """Called when a transport is connected to this protocol.
+
+        Will only be called once, even if multiple ports are connected.
+        """
+
+    def stopProtocol(self):
+        """Called when the transport is disconnected.
+
+        Will only be called once, after all ports are disconnected.
+        """
+
+    def makeConnection(self, transport):
+        """Make a connection to a transport and a server.
+
+        This sets the 'transport' attribute of this DatagramProtocol, and calls the
+        doStart() callback.
+        """
+        assert self.transport == None
+        self.transport = transport
+        self.doStart()
+
+    def datagramReceived(self, datagram, addr):
+        """Called when a datagram is received.
+
+        @param datagram: the string received from the transport.
+        @param addr: tuple of source of datagram.
+        """
+
+
+ at implementer(interfaces.ILoggingContext)
+class DatagramProtocol(AbstractDatagramProtocol):
+    """
+    Protocol for datagram-oriented transport, e.g. UDP.
+
+    @type transport: C{NoneType} or
+        L{IUDPTransport<twisted.internet.interfaces.IUDPTransport>} provider
+    @ivar transport: The transport with which this protocol is associated,
+        if it is associated with one.
+    """
+
+    def logPrefix(self):
+        """
+        Return a prefix matching the class name, to identify log messages
+        related to this protocol instance.
+        """
+        return self.__class__.__name__
+
+
+    def connectionRefused(self):
+        """Called due to error from write in connected mode.
+
+        Note this is a result of ICMP message generated by *previous*
+        write.
+        """
+
+
+class ConnectedDatagramProtocol(DatagramProtocol):
+    """Protocol for connected datagram-oriented transport.
+
+    No longer necessary for UDP.
+    """
+
+    def datagramReceived(self, datagram):
+        """Called when a datagram is received.
+
+        @param datagram: the string received from the transport.
+        """
+
+    def connectionFailed(self, failure):
+        """Called if connecting failed.
+
+        Usually this will be due to a DNS lookup failure.
+        """
+
+
+
+ at implementer(interfaces.ITransport)
+class FileWrapper:
+    """A wrapper around a file-like object to make it behave as a Transport.
+
+    This doesn't actually stream the file to the attached protocol,
+    and is thus useful mainly as a utility for debugging protocols.
+    """
+
+    closed = 0
+    disconnecting = 0
+    producer = None
+    streamingProducer = 0
+
+    def __init__(self, file):
+        self.file = file
+
+    def write(self, data):
+        try:
+            self.file.write(data)
+        except:
+            self.handleException()
+        # self._checkProducer()
+
+    def _checkProducer(self):
+        # Cheating; this is called at "idle" times to allow producers to be
+        # found and dealt with
+        if self.producer:
+            self.producer.resumeProducing()
+
+    def registerProducer(self, producer, streaming):
+        """From abstract.FileDescriptor
+        """
+        self.producer = producer
+        self.streamingProducer = streaming
+        if not streaming:
+            producer.resumeProducing()
+
+    def unregisterProducer(self):
+        self.producer = None
+
+    def stopConsuming(self):
+        self.unregisterProducer()
+        self.loseConnection()
+
+    def writeSequence(self, iovec):
+        self.write("".join(iovec))
+
+    def loseConnection(self):
+        self.closed = 1
+        try:
+            self.file.close()
+        except (IOError, OSError):
+            self.handleException()
+
+    def getPeer(self):
+        # XXX: According to ITransport, this should return an IAddress!
+        return 'file', 'file'
+
+    def getHost(self):
+        # XXX: According to ITransport, this should return an IAddress!
+        return 'file'
+
+    def handleException(self):
+        pass
+
+    def resumeProducing(self):
+        # Never sends data anyways
+        pass
+
+    def pauseProducing(self):
+        # Never sends data anyways
+        pass
+
+    def stopProducing(self):
+        self.loseConnection()
+
+
+__all__ = ["Factory", "ClientFactory", "ReconnectingClientFactory", "connectionDone",
+           "Protocol", "ProcessProtocol", "FileWrapper", "ServerFactory",
+           "AbstractDatagramProtocol", "DatagramProtocol", "ConnectedDatagramProtocol",
+           "ClientCreator"]
diff --git a/ThirdParty/Twisted/twisted/internet/pyuisupport.py b/ThirdParty/Twisted/twisted/internet/pyuisupport.py
new file mode 100644
index 0000000..1e7def5
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/pyuisupport.py
@@ -0,0 +1,37 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+This module integrates PyUI with twisted.internet's mainloop.
+
+Maintainer: Jp Calderone
+
+See doc/examples/pyuidemo.py for example usage.
+"""
+
+# System imports
+import pyui
+
+def _guiUpdate(reactor, delay):
+    pyui.draw()
+    if pyui.update() == 0:
+        pyui.quit()
+        reactor.stop()
+    else:
+        reactor.callLater(delay, _guiUpdate, reactor, delay)
+
+
+def install(ms=10, reactor=None, args=(), kw={}):
+    """
+    Schedule PyUI's display to be updated approximately every C{ms}
+    milliseconds, and initialize PyUI with the specified arguments.
+    """
+    d = pyui.init(*args, **kw)
+
+    if reactor is None:
+        from twisted.internet import reactor
+    _guiUpdate(reactor, ms / 1000.0)
+    return d
+
+__all__ = ["install"]
diff --git a/ThirdParty/Twisted/twisted/internet/qtreactor.py b/ThirdParty/Twisted/twisted/internet/qtreactor.py
new file mode 100644
index 0000000..a548008
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/qtreactor.py
@@ -0,0 +1,19 @@
+# -*- test-case-name: twisted.internet.test.test_qtreactor -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+try:
+    # 'import qtreactor' would have imported this file instead of the
+    # top-level qtreactor. __import__ does the right thing
+    # (kids, don't repeat this at home)
+    install = __import__('qtreactor').install
+except ImportError:
+    from twisted.plugins.twisted_qtstub import errorMessage
+    raise ImportError(errorMessage)
+else:
+    import warnings
+    warnings.warn("Please use qtreactor instead of twisted.internet.qtreactor",
+                  category=DeprecationWarning)
+
+__all__ = ['install']
+
diff --git a/ThirdParty/Twisted/twisted/internet/reactor.py b/ThirdParty/Twisted/twisted/internet/reactor.py
new file mode 100644
index 0000000..6dd72af
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/reactor.py
@@ -0,0 +1,39 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+The reactor is the Twisted event loop within Twisted, the loop which drives
+applications using Twisted. The reactor provides APIs for networking,
+threading, dispatching events, and more.
+
+The default reactor depends on the platform and will be installed if this
+module is imported without another reactor being explicitly installed
+beforehand. Regardless of which reactor is installed, importing this module is
+the correct way to get a reference to it.
+
+New application code should prefer to pass and accept the reactor as a
+parameter where it is needed, rather than relying on being able to import this
+module to get a reference.  This simplifies unit testing and may make it easier
+to one day support multiple reactors (as a performance enhancement), though
+this is not currently possible.
+
+ at see: L{IReactorCore<twisted.internet.interfaces.IReactorCore>}
+ at see: L{IReactorTime<twisted.internet.interfaces.IReactorTime>}
+ at see: L{IReactorProcess<twisted.internet.interfaces.IReactorProcess>}
+ at see: L{IReactorTCP<twisted.internet.interfaces.IReactorTCP>}
+ at see: L{IReactorSSL<twisted.internet.interfaces.IReactorSSL>}
+ at see: L{IReactorUDP<twisted.internet.interfaces.IReactorUDP>}
+ at see: L{IReactorMulticast<twisted.internet.interfaces.IReactorMulticast>}
+ at see: L{IReactorUNIX<twisted.internet.interfaces.IReactorUNIX>}
+ at see: L{IReactorUNIXDatagram<twisted.internet.interfaces.IReactorUNIXDatagram>}
+ at see: L{IReactorFDSet<twisted.internet.interfaces.IReactorFDSet>}
+ at see: L{IReactorThreads<twisted.internet.interfaces.IReactorThreads>}
+ at see: L{IReactorPluggableResolver<twisted.internet.interfaces.IReactorPluggableResolver>}
+"""
+
+from __future__ import division, absolute_import
+
+import sys
+del sys.modules['twisted.internet.reactor']
+from twisted.internet import default
+default.install()
diff --git a/ThirdParty/Twisted/twisted/internet/selectreactor.py b/ThirdParty/Twisted/twisted/internet/selectreactor.py
new file mode 100644
index 0000000..d4be832
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/selectreactor.py
@@ -0,0 +1,204 @@
+# -*- test-case-name: twisted.test.test_internet -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Select reactor
+"""
+
+from __future__ import division, absolute_import
+
+from time import sleep
+import sys, select, socket
+from errno import EINTR, EBADF
+
+from zope.interface import implementer
+
+from twisted.internet.interfaces import IReactorFDSet
+from twisted.internet import posixbase
+from twisted.python import log
+from twisted.python.runtime import platformType
+
+
+def win32select(r, w, e, timeout=None):
+    """Win32 select wrapper."""
+    if not (r or w):
+        # windows select() exits immediately when no sockets
+        if timeout is None:
+            timeout = 0.01
+        else:
+            timeout = min(timeout, 0.001)
+        sleep(timeout)
+        return [], [], []
+    # windows doesn't process 'signals' inside select(), so we set a max
+    # time or ctrl-c will never be recognized
+    if timeout is None or timeout > 0.5:
+        timeout = 0.5
+    r, w, e = select.select(r, w, w, timeout)
+    return r, w + e, []
+
+if platformType == "win32":
+    _select = win32select
+else:
+    _select = select.select
+
+
+try:
+    from twisted.internet.win32eventreactor import _ThreadedWin32EventsMixin
+except ImportError:
+    _extraBase = object
+else:
+    _extraBase = _ThreadedWin32EventsMixin
+
+
+ at implementer(IReactorFDSet)
+class SelectReactor(posixbase.PosixReactorBase, _extraBase):
+    """
+    A select() based reactor - runs on all POSIX platforms and on Win32.
+
+    @ivar _reads: A dictionary mapping L{FileDescriptor} instances to arbitrary
+        values (this is essentially a set).  Keys in this dictionary will be
+        checked for read events.
+
+    @ivar _writes: A dictionary mapping L{FileDescriptor} instances to
+        arbitrary values (this is essentially a set).  Keys in this dictionary
+        will be checked for writability.
+    """
+
+    def __init__(self):
+        """
+        Initialize file descriptor tracking dictionaries and the base class.
+        """
+        self._reads = {}
+        self._writes = {}
+        posixbase.PosixReactorBase.__init__(self)
+
+
+    def _preenDescriptors(self):
+        log.msg("Malformed file descriptor found.  Preening lists.")
+        readers = list(self._reads.keys())
+        writers = list(self._writes.keys())
+        self._reads.clear()
+        self._writes.clear()
+        for selDict, selList in ((self._reads, readers),
+                                 (self._writes, writers)):
+            for selectable in selList:
+                try:
+                    select.select([selectable], [selectable], [selectable], 0)
+                except Exception as e:
+                    log.msg("bad descriptor %s" % selectable)
+                    self._disconnectSelectable(selectable, e, False)
+                else:
+                    selDict[selectable] = 1
+
+
+    def doSelect(self, timeout):
+        """
+        Run one iteration of the I/O monitor loop.
+
+        This will run all selectables who had input or output readiness
+        waiting for them.
+        """
+        try:
+            r, w, ignored = _select(self._reads.keys(),
+                                    self._writes.keys(),
+                                    [], timeout)
+        except ValueError:
+            # Possibly a file descriptor has gone negative?
+            self._preenDescriptors()
+            return
+        except TypeError:
+            # Something *totally* invalid (object w/o fileno, non-integral
+            # result) was passed
+            log.err()
+            self._preenDescriptors()
+            return
+        except (select.error, socket.error, IOError) as se:
+            # select(2) encountered an error, perhaps while calling the fileno()
+            # method of a socket.  (Python 2.6 socket.error is an IOError
+            # subclass, but on Python 2.5 and earlier it is not.)
+            if se.args[0] in (0, 2):
+                # windows does this if it got an empty list
+                if (not self._reads) and (not self._writes):
+                    return
+                else:
+                    raise
+            elif se.args[0] == EINTR:
+                return
+            elif se.args[0] == EBADF:
+                self._preenDescriptors()
+                return
+            else:
+                # OK, I really don't know what's going on.  Blow up.
+                raise
+
+        _drdw = self._doReadOrWrite
+        _logrun = log.callWithLogger
+        for selectables, method, fdset in ((r, "doRead", self._reads),
+                                           (w,"doWrite", self._writes)):
+            for selectable in selectables:
+                # if this was disconnected in another thread, kill it.
+                # ^^^^ --- what the !@#*?  serious!  -exarkun
+                if selectable not in fdset:
+                    continue
+                # This for pausing input when we're not ready for more.
+                _logrun(selectable, _drdw, selectable, method, dict)
+
+    doIteration = doSelect
+
+    def _doReadOrWrite(self, selectable, method, dict):
+        try:
+            why = getattr(selectable, method)()
+        except:
+            why = sys.exc_info()[1]
+            log.err()
+        if why:
+            self._disconnectSelectable(selectable, why, method=="doRead")
+
+    def addReader(self, reader):
+        """
+        Add a FileDescriptor for notification of data available to read.
+        """
+        self._reads[reader] = 1
+
+    def addWriter(self, writer):
+        """
+        Add a FileDescriptor for notification of data available to write.
+        """
+        self._writes[writer] = 1
+
+    def removeReader(self, reader):
+        """
+        Remove a Selectable for notification of data available to read.
+        """
+        if reader in self._reads:
+            del self._reads[reader]
+
+    def removeWriter(self, writer):
+        """
+        Remove a Selectable for notification of data available to write.
+        """
+        if writer in self._writes:
+            del self._writes[writer]
+
+    def removeAll(self):
+        return self._removeAll(self._reads, self._writes)
+
+
+    def getReaders(self):
+        return list(self._reads.keys())
+
+
+    def getWriters(self):
+        return list(self._writes.keys())
+
+
+
+def install():
+    """Configure the twisted mainloop to be run using the select() reactor.
+    """
+    reactor = SelectReactor()
+    from twisted.internet.main import installReactor
+    installReactor(reactor)
+
+__all__ = ['install']
diff --git a/ThirdParty/Twisted/twisted/internet/serialport.py b/ThirdParty/Twisted/twisted/internet/serialport.py
new file mode 100644
index 0000000..500d8ba
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/serialport.py
@@ -0,0 +1,87 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Serial Port Protocol
+"""
+
+# http://twistedmatrix.com/trac/ticket/3725#comment:24
+# Apparently applications use these names even though they should
+# be imported from pyserial
+__all__ = ["serial", "PARITY_ODD", "PARITY_EVEN", "PARITY_NONE",
+           "STOPBITS_TWO", "STOPBITS_ONE", "FIVEBITS",
+           "EIGHTBITS", "SEVENBITS", "SIXBITS",
+# Name this module is actually trying to export
+           "SerialPort"]
+
+# system imports
+import os, sys
+
+# all of them require pyserial at the moment, so check that first
+import serial
+from serial import PARITY_NONE, PARITY_EVEN, PARITY_ODD
+from serial import STOPBITS_ONE, STOPBITS_TWO
+from serial import FIVEBITS, SIXBITS, SEVENBITS, EIGHTBITS
+
+
+
+class BaseSerialPort:
+    """
+    Base class for Windows and POSIX serial ports.
+
+    @ivar _serialFactory: a pyserial C{serial.Serial} factory, used to create
+        the instance stored in C{self._serial}. Overrideable to enable easier
+        testing.
+
+    @ivar _serial: a pyserial C{serial.Serial} instance used to manage the
+        options on the serial port.
+    """
+
+    _serialFactory = serial.Serial
+
+
+    def setBaudRate(self, baudrate):
+        if hasattr(self._serial, "setBaudrate"):
+            self._serial.setBaudrate(baudrate)
+        else:
+            self._serial.setBaudRate(baudrate)
+
+    def inWaiting(self):
+        return self._serial.inWaiting()
+
+    def flushInput(self):
+        self._serial.flushInput()
+
+    def flushOutput(self):
+        self._serial.flushOutput()
+
+    def sendBreak(self):
+        self._serial.sendBreak()
+
+    def getDSR(self):
+        return self._serial.getDSR()
+
+    def getCD(self):
+        return self._serial.getCD()
+
+    def getRI(self):
+        return self._serial.getRI()
+
+    def getCTS(self):
+        return self._serial.getCTS()
+
+    def setDTR(self, on = 1):
+        self._serial.setDTR(on)
+
+    def setRTS(self, on = 1):
+        self._serial.setRTS(on)
+
+class SerialPort(BaseSerialPort):
+    pass
+
+# replace SerialPort with appropriate serial port
+if os.name == 'posix':
+    from twisted.internet._posixserialport import SerialPort
+elif sys.platform == 'win32':
+    from twisted.internet._win32serialport import SerialPort
diff --git a/ThirdParty/Twisted/twisted/internet/ssl.py b/ThirdParty/Twisted/twisted/internet/ssl.py
new file mode 100644
index 0000000..8f5d989
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/ssl.py
@@ -0,0 +1,205 @@
+# -*- test-case-name: twisted.test.test_ssl -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+SSL transport. Requires PyOpenSSL (http://pypi.python.org/pypi/pyOpenSSL).
+
+SSL connections require a ContextFactory so they can create SSL contexts.
+End users should only use the ContextFactory classes directly - for SSL
+connections use the reactor.connectSSL/listenSSL and so on, as documented
+in IReactorSSL.
+
+All server context factories should inherit from ContextFactory, and all
+client context factories should inherit from ClientContextFactory. At the
+moment this is not enforced, but in the future it might be.
+
+Future Plans:
+    - split module so reactor-specific classes are in a separate module
+"""
+
+from __future__ import division, absolute_import
+
+# System imports
+from OpenSSL import SSL
+supported = True
+
+from zope.interface import implementer, implementer_only, implementedBy
+
+# Twisted imports
+from twisted.internet import tcp, interfaces
+
+
+class ContextFactory:
+    """A factory for SSL context objects, for server SSL connections."""
+
+    isClient = 0
+
+    def getContext(self):
+        """Return a SSL.Context object. override in subclasses."""
+        raise NotImplementedError
+
+
+class DefaultOpenSSLContextFactory(ContextFactory):
+    """
+    L{DefaultOpenSSLContextFactory} is a factory for server-side SSL context
+    objects.  These objects define certain parameters related to SSL
+    handshakes and the subsequent connection.
+
+    @ivar _contextFactory: A callable which will be used to create new
+        context objects.  This is typically L{SSL.Context}.
+    """
+    _context = None
+
+    def __init__(self, privateKeyFileName, certificateFileName,
+                 sslmethod=SSL.SSLv23_METHOD, _contextFactory=SSL.Context):
+        """
+        @param privateKeyFileName: Name of a file containing a private key
+        @param certificateFileName: Name of a file containing a certificate
+        @param sslmethod: The SSL method to use
+        """
+        self.privateKeyFileName = privateKeyFileName
+        self.certificateFileName = certificateFileName
+        self.sslmethod = sslmethod
+        self._contextFactory = _contextFactory
+
+        # Create a context object right now.  This is to force validation of
+        # the given parameters so that errors are detected earlier rather
+        # than later.
+        self.cacheContext()
+
+
+    def cacheContext(self):
+        if self._context is None:
+            ctx = self._contextFactory(self.sslmethod)
+            # Disallow SSLv2!  It's insecure!  SSLv3 has been around since
+            # 1996.  It's time to move on.
+            ctx.set_options(SSL.OP_NO_SSLv2)
+            ctx.use_certificate_file(self.certificateFileName)
+            ctx.use_privatekey_file(self.privateKeyFileName)
+            self._context = ctx
+
+
+    def __getstate__(self):
+        d = self.__dict__.copy()
+        del d['_context']
+        return d
+
+
+    def __setstate__(self, state):
+        self.__dict__ = state
+
+
+    def getContext(self):
+        """
+        Return an SSL context.
+        """
+        return self._context
+
+
+class ClientContextFactory:
+    """A context factory for SSL clients."""
+
+    isClient = 1
+
+    # SSLv23_METHOD allows SSLv2, SSLv3, and TLSv1.  We disable SSLv2 below,
+    # though.
+    method = SSL.SSLv23_METHOD
+
+    _contextFactory = SSL.Context
+
+    def getContext(self):
+        ctx = self._contextFactory(self.method)
+        # See comment in DefaultOpenSSLContextFactory about SSLv2.
+        ctx.set_options(SSL.OP_NO_SSLv2)
+        return ctx
+
+
+
+ at implementer_only(interfaces.ISSLTransport,
+                 *[i for i in implementedBy(tcp.Client)
+                   if i != interfaces.ITLSTransport])
+class Client(tcp.Client):
+    """
+    I am an SSL client.
+    """
+
+    def __init__(self, host, port, bindAddress, ctxFactory, connector, reactor=None):
+        # tcp.Client.__init__ depends on self.ctxFactory being set
+        self.ctxFactory = ctxFactory
+        tcp.Client.__init__(self, host, port, bindAddress, connector, reactor)
+
+    def _connectDone(self):
+        self.startTLS(self.ctxFactory)
+        self.startWriting()
+        tcp.Client._connectDone(self)
+
+
+
+ at implementer(interfaces.ISSLTransport)
+class Server(tcp.Server):
+    """
+    I am an SSL server.
+    """
+
+    def __init__(self, *args, **kwargs):
+        tcp.Server.__init__(self, *args, **kwargs)
+        self.startTLS(self.server.ctxFactory)
+
+
+
+class Port(tcp.Port):
+    """
+    I am an SSL port.
+    """
+    transport = Server
+
+    _type = 'TLS'
+
+    def __init__(self, port, factory, ctxFactory, backlog=50, interface='', reactor=None):
+        tcp.Port.__init__(self, port, factory, backlog, interface, reactor)
+        self.ctxFactory = ctxFactory
+
+        # Force some parameter checking in pyOpenSSL.  It's better to fail now
+        # than after we've set up the transport.
+        ctxFactory.getContext()
+
+
+    def _getLogPrefix(self, factory):
+        """
+        Override the normal prefix to include an annotation indicating this is a
+        port for TLS connections.
+        """
+        return tcp.Port._getLogPrefix(self, factory) + ' (TLS)'
+
+
+
+class Connector(tcp.Connector):
+    def __init__(self, host, port, factory, contextFactory, timeout, bindAddress, reactor=None):
+        self.contextFactory = contextFactory
+        tcp.Connector.__init__(self, host, port, factory, timeout, bindAddress, reactor)
+
+        # Force some parameter checking in pyOpenSSL.  It's better to fail now
+        # than after we've set up the transport.
+        contextFactory.getContext()
+
+
+    def _makeTransport(self):
+        return Client(self.host, self.port, self.bindAddress, self.contextFactory, self, self.reactor)
+
+
+
+from twisted.internet._sslverify import DistinguishedName, DN, Certificate
+from twisted.internet._sslverify import CertificateRequest, PrivateCertificate
+from twisted.internet._sslverify import KeyPair
+from twisted.internet._sslverify import OpenSSLCertificateOptions as CertificateOptions
+
+__all__ = [
+    "ContextFactory", "DefaultOpenSSLContextFactory", "ClientContextFactory",
+
+    'DistinguishedName', 'DN',
+    'Certificate', 'CertificateRequest', 'PrivateCertificate',
+    'KeyPair',
+    'CertificateOptions',
+    ]
diff --git a/ThirdParty/Twisted/twisted/internet/stdio.py b/ThirdParty/Twisted/twisted/internet/stdio.py
new file mode 100644
index 0000000..201e5f1
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/stdio.py
@@ -0,0 +1,35 @@
+# -*- test-case-name: twisted.test.test_stdio -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Standard input/out/err support.
+
+This module exposes one name, StandardIO, which is a factory that takes an
+IProtocol provider as an argument.  It connects that protocol to standard input
+and output on the current process.
+
+It should work on any UNIX and also on Win32 (with some caveats: due to
+platform limitations, it will perform very poorly on Win32).
+
+Future Plans::
+
+    support for stderr, perhaps
+    Rewrite to use the reactor instead of an ad-hoc mechanism for connecting
+        protocols to transport.
+
+
+Maintainer: James Y Knight
+"""
+
+from twisted.python.runtime import platform
+
+if platform.isWindows():
+    from twisted.internet import _win32stdio
+    StandardIO = _win32stdio.StandardIO
+    PipeAddress = _win32stdio.Win32PipeAddress
+
+else:
+    from twisted.internet._posixstdio import StandardIO, PipeAddress 
+
+__all__ = ['StandardIO', 'PipeAddress']
diff --git a/ThirdParty/Twisted/twisted/internet/task.py b/ThirdParty/Twisted/twisted/internet/task.py
new file mode 100644
index 0000000..6e7b908
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/task.py
@@ -0,0 +1,857 @@
+# -*- test-case-name: twisted.test.test_task,twisted.test.test_cooperator -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Scheduling utility methods and classes.
+"""
+
+from __future__ import division, absolute_import
+
+__metaclass__ = type
+
+import sys
+import time
+
+from zope.interface import implementer
+
+from twisted.python import log
+from twisted.python import _reflectpy3 as reflect
+from twisted.python.failure import Failure
+
+from twisted.internet import base, defer
+from twisted.internet.interfaces import IReactorTime
+from twisted.internet.error import ReactorNotRunning
+
+
+class LoopingCall:
+    """Call a function repeatedly.
+
+    If C{f} returns a deferred, rescheduling will not take place until the
+    deferred has fired. The result value is ignored.
+
+    @ivar f: The function to call.
+    @ivar a: A tuple of arguments to pass the function.
+    @ivar kw: A dictionary of keyword arguments to pass to the function.
+    @ivar clock: A provider of
+        L{twisted.internet.interfaces.IReactorTime}.  The default is
+        L{twisted.internet.reactor}. Feel free to set this to
+        something else, but it probably ought to be set *before*
+        calling L{start}.
+
+    @type running: C{bool}
+    @ivar running: A flag which is C{True} while C{f} is scheduled to be called
+        (or is currently being called). It is set to C{True} when L{start} is
+        called and set to C{False} when L{stop} is called or if C{f} raises an
+        exception. In either case, it will be C{False} by the time the
+        C{Deferred} returned by L{start} fires its callback or errback.
+
+    @type _expectNextCallAt: C{float}
+    @ivar _expectNextCallAt: The time at which this instance most recently
+        scheduled itself to run.
+
+    @type _realLastTime: C{float}
+    @ivar _realLastTime: When counting skips, the time at which the skip
+        counter was last invoked.
+
+    @type _runAtStart: C{bool}
+    @ivar _runAtStart: A flag indicating whether the 'now' argument was passed
+        to L{LoopingCall.start}.
+    """
+
+    call = None
+    running = False
+    deferred = None
+    interval = None
+    _expectNextCallAt = 0.0
+    _runAtStart = False
+    starttime = None
+
+    def __init__(self, f, *a, **kw):
+        self.f = f
+        self.a = a
+        self.kw = kw
+        from twisted.internet import reactor
+        self.clock = reactor
+
+
+    def withCount(cls, countCallable):
+        """
+        An alternate constructor for L{LoopingCall} that makes available the
+        number of calls which should have occurred since it was last invoked.
+
+        Note that this number is an C{int} value; It represents the discrete
+        number of calls that should have been made.  For example, if you are
+        using a looping call to display an animation with discrete frames, this
+        number would be the number of frames to advance.
+
+        The count is normally 1, but can be higher. For example, if the reactor
+        is blocked and takes too long to invoke the L{LoopingCall}, a Deferred
+        returned from a previous call is not fired before an interval has
+        elapsed, or if the callable itself blocks for longer than an interval,
+        preventing I{itself} from being called.
+
+        @param countCallable: A callable that will be invoked each time the
+            resulting LoopingCall is run, with an integer specifying the number
+            of calls that should have been invoked.
+
+        @type countCallable: 1-argument callable which takes an C{int}
+
+        @return: An instance of L{LoopingCall} with call counting enabled,
+            which provides the count as the first positional argument.
+
+        @rtype: L{LoopingCall}
+
+        @since: 9.0
+        """
+
+        def counter():
+            now = self.clock.seconds()
+            lastTime = self._realLastTime
+            if lastTime is None:
+                lastTime = self.starttime
+                if self._runAtStart:
+                    lastTime -= self.interval
+            self._realLastTime = now
+            lastInterval = self._intervalOf(lastTime)
+            thisInterval = self._intervalOf(now)
+            count = thisInterval - lastInterval
+            return countCallable(count)
+
+        self = cls(counter)
+
+        self._realLastTime = None
+
+        return self
+
+    withCount = classmethod(withCount)
+
+
+    def _intervalOf(self, t):
+        """
+        Determine the number of intervals passed as of the given point in
+        time.
+
+        @param t: The specified time (from the start of the L{LoopingCall}) to
+            be measured in intervals
+
+        @return: The C{int} number of intervals which have passed as of the
+            given point in time.
+        """
+        elapsedTime = t - self.starttime
+        intervalNum = int(elapsedTime / self.interval)
+        return intervalNum
+
+
+    def start(self, interval, now=True):
+        """
+        Start running function every interval seconds.
+
+        @param interval: The number of seconds between calls.  May be
+        less than one.  Precision will depend on the underlying
+        platform, the available hardware, and the load on the system.
+
+        @param now: If True, run this call right now.  Otherwise, wait
+        until the interval has elapsed before beginning.
+
+        @return: A Deferred whose callback will be invoked with
+        C{self} when C{self.stop} is called, or whose errback will be
+        invoked when the function raises an exception or returned a
+        deferred that has its errback invoked.
+        """
+        assert not self.running, ("Tried to start an already running "
+                                  "LoopingCall.")
+        if interval < 0:
+            raise ValueError("interval must be >= 0")
+        self.running = True
+        d = self.deferred = defer.Deferred()
+        self.starttime = self.clock.seconds()
+        self._expectNextCallAt = self.starttime
+        self.interval = interval
+        self._runAtStart = now
+        if now:
+            self()
+        else:
+            self._reschedule()
+        return d
+
+    def stop(self):
+        """Stop running function.
+        """
+        assert self.running, ("Tried to stop a LoopingCall that was "
+                              "not running.")
+        self.running = False
+        if self.call is not None:
+            self.call.cancel()
+            self.call = None
+            d, self.deferred = self.deferred, None
+            d.callback(self)
+
+    def reset(self):
+        """
+        Skip the next iteration and reset the timer.
+
+        @since: 11.1
+        """
+        assert self.running, ("Tried to reset a LoopingCall that was "
+                              "not running.")
+        if self.call is not None:
+            self.call.cancel()
+            self.call = None
+            self._expectNextCallAt = self.clock.seconds()
+            self._reschedule()
+
+    def __call__(self):
+        def cb(result):
+            if self.running:
+                self._reschedule()
+            else:
+                d, self.deferred = self.deferred, None
+                d.callback(self)
+
+        def eb(failure):
+            self.running = False
+            d, self.deferred = self.deferred, None
+            d.errback(failure)
+
+        self.call = None
+        d = defer.maybeDeferred(self.f, *self.a, **self.kw)
+        d.addCallback(cb)
+        d.addErrback(eb)
+
+
+    def _reschedule(self):
+        """
+        Schedule the next iteration of this looping call.
+        """
+        if self.interval == 0:
+            self.call = self.clock.callLater(0, self)
+            return
+
+        currentTime = self.clock.seconds()
+        # Find how long is left until the interval comes around again.
+        untilNextTime = (self._expectNextCallAt - currentTime) % self.interval
+        # Make sure it is in the future, in case more than one interval worth
+        # of time passed since the previous call was made.
+        nextTime = max(
+            self._expectNextCallAt + self.interval, currentTime + untilNextTime)
+        # If the interval falls on the current time exactly, skip it and
+        # schedule the call for the next interval.
+        if nextTime == currentTime:
+            nextTime += self.interval
+        self._expectNextCallAt = nextTime
+        self.call = self.clock.callLater(nextTime - currentTime, self)
+
+
+    def __repr__(self):
+        if hasattr(self.f, '__qualname__'):
+            func = self.f.__qualname__
+        elif hasattr(self.f, '__name__'):
+            func = self.f.__name__
+            if hasattr(self.f, 'im_class'):
+                func = self.f.im_class.__name__ + '.' + func
+        else:
+            func = reflect.safe_repr(self.f)
+
+        return 'LoopingCall<%r>(%s, *%s, **%s)' % (
+            self.interval, func, reflect.safe_repr(self.a),
+            reflect.safe_repr(self.kw))
+
+
+
+class SchedulerError(Exception):
+    """
+    The operation could not be completed because the scheduler or one of its
+    tasks was in an invalid state.  This exception should not be raised
+    directly, but is a superclass of various scheduler-state-related
+    exceptions.
+    """
+
+
+
+class SchedulerStopped(SchedulerError):
+    """
+    The operation could not complete because the scheduler was stopped in
+    progress or was already stopped.
+    """
+
+
+
+class TaskFinished(SchedulerError):
+    """
+    The operation could not complete because the task was already completed,
+    stopped, encountered an error or otherwise permanently stopped running.
+    """
+
+
+
+class TaskDone(TaskFinished):
+    """
+    The operation could not complete because the task was already completed.
+    """
+
+
+
+class TaskStopped(TaskFinished):
+    """
+    The operation could not complete because the task was stopped.
+    """
+
+
+
+class TaskFailed(TaskFinished):
+    """
+    The operation could not complete because the task died with an unhandled
+    error.
+    """
+
+
+
+class NotPaused(SchedulerError):
+    """
+    This exception is raised when a task is resumed which was not previously
+    paused.
+    """
+
+
+
+class _Timer(object):
+    MAX_SLICE = 0.01
+    def __init__(self):
+        self.end = time.time() + self.MAX_SLICE
+
+
+    def __call__(self):
+        return time.time() >= self.end
+
+
+
+_EPSILON = 0.00000001
+def _defaultScheduler(x):
+    from twisted.internet import reactor
+    return reactor.callLater(_EPSILON, x)
+
+
+class CooperativeTask(object):
+    """
+    A L{CooperativeTask} is a task object inside a L{Cooperator}, which can be
+    paused, resumed, and stopped.  It can also have its completion (or
+    termination) monitored.
+
+    @see: L{Cooperator.cooperate}
+
+    @ivar _iterator: the iterator to iterate when this L{CooperativeTask} is
+        asked to do work.
+
+    @ivar _cooperator: the L{Cooperator} that this L{CooperativeTask}
+        participates in, which is used to re-insert it upon resume.
+
+    @ivar _deferreds: the list of L{defer.Deferred}s to fire when this task
+        completes, fails, or finishes.
+
+    @type _deferreds: C{list}
+
+    @type _cooperator: L{Cooperator}
+
+    @ivar _pauseCount: the number of times that this L{CooperativeTask} has
+        been paused; if 0, it is running.
+
+    @type _pauseCount: C{int}
+
+    @ivar _completionState: The completion-state of this L{CooperativeTask}.
+        C{None} if the task is not yet completed, an instance of L{TaskStopped}
+        if C{stop} was called to stop this task early, of L{TaskFailed} if the
+        application code in the iterator raised an exception which caused it to
+        terminate, and of L{TaskDone} if it terminated normally via raising
+        C{StopIteration}.
+
+    @type _completionState: L{TaskFinished}
+    """
+
+    def __init__(self, iterator, cooperator):
+        """
+        A private constructor: to create a new L{CooperativeTask}, see
+        L{Cooperator.cooperate}.
+        """
+        self._iterator = iterator
+        self._cooperator = cooperator
+        self._deferreds = []
+        self._pauseCount = 0
+        self._completionState = None
+        self._completionResult = None
+        cooperator._addTask(self)
+
+
+    def whenDone(self):
+        """
+        Get a L{defer.Deferred} notification of when this task is complete.
+
+        @return: a L{defer.Deferred} that fires with the C{iterator} that this
+            L{CooperativeTask} was created with when the iterator has been
+            exhausted (i.e. its C{next} method has raised C{StopIteration}), or
+            fails with the exception raised by C{next} if it raises some other
+            exception.
+
+        @rtype: L{defer.Deferred}
+        """
+        d = defer.Deferred()
+        if self._completionState is None:
+            self._deferreds.append(d)
+        else:
+            d.callback(self._completionResult)
+        return d
+
+
+    def pause(self):
+        """
+        Pause this L{CooperativeTask}.  Stop doing work until
+        L{CooperativeTask.resume} is called.  If C{pause} is called more than
+        once, C{resume} must be called an equal number of times to resume this
+        task.
+
+        @raise TaskFinished: if this task has already finished or completed.
+        """
+        self._checkFinish()
+        self._pauseCount += 1
+        if self._pauseCount == 1:
+            self._cooperator._removeTask(self)
+
+
+    def resume(self):
+        """
+        Resume processing of a paused L{CooperativeTask}.
+
+        @raise NotPaused: if this L{CooperativeTask} is not paused.
+        """
+        if self._pauseCount == 0:
+            raise NotPaused()
+        self._pauseCount -= 1
+        if self._pauseCount == 0 and self._completionState is None:
+            self._cooperator._addTask(self)
+
+
+    def _completeWith(self, completionState, deferredResult):
+        """
+        @param completionState: a L{TaskFinished} exception or a subclass
+            thereof, indicating what exception should be raised when subsequent
+            operations are performed.
+
+        @param deferredResult: the result to fire all the deferreds with.
+        """
+        self._completionState = completionState
+        self._completionResult = deferredResult
+        if not self._pauseCount:
+            self._cooperator._removeTask(self)
+
+        # The Deferreds need to be invoked after all this is completed, because
+        # a Deferred may want to manipulate other tasks in a Cooperator.  For
+        # example, if you call "stop()" on a cooperator in a callback on a
+        # Deferred returned from whenDone(), this CooperativeTask must be gone
+        # from the Cooperator by that point so that _completeWith is not
+        # invoked reentrantly; that would cause these Deferreds to blow up with
+        # an AlreadyCalledError, or the _removeTask to fail with a ValueError.
+        for d in self._deferreds:
+            d.callback(deferredResult)
+
+
+    def stop(self):
+        """
+        Stop further processing of this task.
+
+        @raise TaskFinished: if this L{CooperativeTask} has previously
+            completed, via C{stop}, completion, or failure.
+        """
+        self._checkFinish()
+        self._completeWith(TaskStopped(), Failure(TaskStopped()))
+
+
+    def _checkFinish(self):
+        """
+        If this task has been stopped, raise the appropriate subclass of
+        L{TaskFinished}.
+        """
+        if self._completionState is not None:
+            raise self._completionState
+
+
+    def _oneWorkUnit(self):
+        """
+        Perform one unit of work for this task, retrieving one item from its
+        iterator, stopping if there are no further items in the iterator, and
+        pausing if the result was a L{defer.Deferred}.
+        """
+        try:
+            result = next(self._iterator)
+        except StopIteration:
+            self._completeWith(TaskDone(), self._iterator)
+        except:
+            self._completeWith(TaskFailed(), Failure())
+        else:
+            if isinstance(result, defer.Deferred):
+                self.pause()
+                def failLater(f):
+                    self._completeWith(TaskFailed(), f)
+                result.addCallbacks(lambda result: self.resume(),
+                                    failLater)
+
+
+
+class Cooperator(object):
+    """
+    Cooperative task scheduler.
+    """
+
+    def __init__(self,
+                 terminationPredicateFactory=_Timer,
+                 scheduler=_defaultScheduler,
+                 started=True):
+        """
+        Create a scheduler-like object to which iterators may be added.
+
+        @param terminationPredicateFactory: A no-argument callable which will
+        be invoked at the beginning of each step and should return a
+        no-argument callable which will return True when the step should be
+        terminated.  The default factory is time-based and allows iterators to
+        run for 1/100th of a second at a time.
+
+        @param scheduler: A one-argument callable which takes a no-argument
+        callable and should invoke it at some future point.  This will be used
+        to schedule each step of this Cooperator.
+
+        @param started: A boolean which indicates whether iterators should be
+        stepped as soon as they are added, or if they will be queued up until
+        L{Cooperator.start} is called.
+        """
+        self._tasks = []
+        self._metarator = iter(())
+        self._terminationPredicateFactory = terminationPredicateFactory
+        self._scheduler = scheduler
+        self._delayedCall = None
+        self._stopped = False
+        self._started = started
+
+
+    def coiterate(self, iterator, doneDeferred=None):
+        """
+        Add an iterator to the list of iterators this L{Cooperator} is
+        currently running.
+
+        @param doneDeferred: If specified, this will be the Deferred used as
+            the completion deferred.  It is suggested that you use the default,
+            which creates a new Deferred for you.
+
+        @return: a Deferred that will fire when the iterator finishes.
+        """
+        if doneDeferred is None:
+            doneDeferred = defer.Deferred()
+        CooperativeTask(iterator, self).whenDone().chainDeferred(doneDeferred)
+        return doneDeferred
+
+
+    def cooperate(self, iterator):
+        """
+        Start running the given iterator as a long-running cooperative task, by
+        calling next() on it as a periodic timed event.
+
+        @param iterator: the iterator to invoke.
+
+        @return: a L{CooperativeTask} object representing this task.
+        """
+        return CooperativeTask(iterator, self)
+
+
+    def _addTask(self, task):
+        """
+        Add a L{CooperativeTask} object to this L{Cooperator}.
+        """
+        if self._stopped:
+            self._tasks.append(task) # XXX silly, I know, but _completeWith
+                                     # does the inverse
+            task._completeWith(SchedulerStopped(), Failure(SchedulerStopped()))
+        else:
+            self._tasks.append(task)
+            self._reschedule()
+
+
+    def _removeTask(self, task):
+        """
+        Remove a L{CooperativeTask} from this L{Cooperator}.
+        """
+        self._tasks.remove(task)
+        # If no work left to do, cancel the delayed call:
+        if not self._tasks and self._delayedCall:
+            self._delayedCall.cancel()
+            self._delayedCall = None
+
+
+    def _tasksWhileNotStopped(self):
+        """
+        Yield all L{CooperativeTask} objects in a loop as long as this
+        L{Cooperator}'s termination condition has not been met.
+        """
+        terminator = self._terminationPredicateFactory()
+        while self._tasks:
+            for t in self._metarator:
+                yield t
+                if terminator():
+                    return
+            self._metarator = iter(self._tasks)
+
+
+    def _tick(self):
+        """
+        Run one scheduler tick.
+        """
+        self._delayedCall = None
+        for taskObj in self._tasksWhileNotStopped():
+            taskObj._oneWorkUnit()
+        self._reschedule()
+
+
+    _mustScheduleOnStart = False
+    def _reschedule(self):
+        if not self._started:
+            self._mustScheduleOnStart = True
+            return
+        if self._delayedCall is None and self._tasks:
+            self._delayedCall = self._scheduler(self._tick)
+
+
+    def start(self):
+        """
+        Begin scheduling steps.
+        """
+        self._stopped = False
+        self._started = True
+        if self._mustScheduleOnStart:
+            del self._mustScheduleOnStart
+            self._reschedule()
+
+
+    def stop(self):
+        """
+        Stop scheduling steps.  Errback the completion Deferreds of all
+        iterators which have been added and forget about them.
+        """
+        self._stopped = True
+        for taskObj in self._tasks:
+            taskObj._completeWith(SchedulerStopped(),
+                                  Failure(SchedulerStopped()))
+        self._tasks = []
+        if self._delayedCall is not None:
+            self._delayedCall.cancel()
+            self._delayedCall = None
+
+
+
+_theCooperator = Cooperator()
+
+def coiterate(iterator):
+    """
+    Cooperatively iterate over the given iterator, dividing runtime between it
+    and all other iterators which have been passed to this function and not yet
+    exhausted.
+
+    @param iterator: the iterator to invoke.
+
+    @return: a Deferred that will fire when the iterator finishes.
+    """
+    return _theCooperator.coiterate(iterator)
+
+
+
+def cooperate(iterator):
+    """
+    Start running the given iterator as a long-running cooperative task, by
+    calling next() on it as a periodic timed event.
+
+    @param iterator: the iterator to invoke.
+
+    @return: a L{CooperativeTask} object representing this task.
+    """
+    return _theCooperator.cooperate(iterator)
+
+
+
+ at implementer(IReactorTime)
+class Clock:
+    """
+    Provide a deterministic, easily-controlled implementation of
+    L{IReactorTime.callLater}.  This is commonly useful for writing
+    deterministic unit tests for code which schedules events using this API.
+    """
+
+    rightNow = 0.0
+
+    def __init__(self):
+        self.calls = []
+
+
+    def seconds(self):
+        """
+        Pretend to be time.time().  This is used internally when an operation
+        such as L{IDelayedCall.reset} needs to determine a a time value
+        relative to the current time.
+
+        @rtype: C{float}
+        @return: The time which should be considered the current time.
+        """
+        return self.rightNow
+
+
+    def _sortCalls(self):
+        """
+        Sort the pending calls according to the time they are scheduled.
+        """
+        self.calls.sort(key=lambda a: a.getTime())
+
+
+    def callLater(self, when, what, *a, **kw):
+        """
+        See L{twisted.internet.interfaces.IReactorTime.callLater}.
+        """
+        dc = base.DelayedCall(self.seconds() + when,
+                               what, a, kw,
+                               self.calls.remove,
+                               lambda c: None,
+                               self.seconds)
+        self.calls.append(dc)
+        self._sortCalls()
+        return dc
+
+
+    def getDelayedCalls(self):
+        """
+        See L{twisted.internet.interfaces.IReactorTime.getDelayedCalls}
+        """
+        return self.calls
+
+
+    def advance(self, amount):
+        """
+        Move time on this clock forward by the given amount and run whatever
+        pending calls should be run.
+
+        @type amount: C{float}
+        @param amount: The number of seconds which to advance this clock's
+        time.
+        """
+        self.rightNow += amount
+        self._sortCalls()
+        while self.calls and self.calls[0].getTime() <= self.seconds():
+            call = self.calls.pop(0)
+            call.called = 1
+            call.func(*call.args, **call.kw)
+            self._sortCalls()
+
+
+    def pump(self, timings):
+        """
+        Advance incrementally by the given set of times.
+
+        @type timings: iterable of C{float}
+        """
+        for amount in timings:
+            self.advance(amount)
+
+
+
+def deferLater(clock, delay, callable, *args, **kw):
+    """
+    Call the given function after a certain period of time has passed.
+
+    @type clock: L{IReactorTime} provider
+    @param clock: The object which will be used to schedule the delayed
+        call.
+
+    @type delay: C{float} or C{int}
+    @param delay: The number of seconds to wait before calling the function.
+
+    @param callable: The object to call after the delay.
+
+    @param *args: The positional arguments to pass to C{callable}.
+
+    @param **kw: The keyword arguments to pass to C{callable}.
+
+    @rtype: L{defer.Deferred}
+
+    @return: A deferred that fires with the result of the callable when the
+        specified time has elapsed.
+    """
+    def deferLaterCancel(deferred):
+        delayedCall.cancel()
+    d = defer.Deferred(deferLaterCancel)
+    d.addCallback(lambda ignored: callable(*args, **kw))
+    delayedCall = clock.callLater(delay, d.callback, None)
+    return d
+
+
+
+def react(main, argv, _reactor=None):
+    """
+    Call C{main} and run the reactor until the L{Deferred} it returns fires.
+
+    This is intended as the way to start up an application with a well-defined
+    completion condition.  Use it to write clients or one-off asynchronous
+    operations.  Prefer this to calling C{reactor.run} directly, as this
+    function will also:
+
+      - Take care to call C{reactor.stop} once and only once, and at the right
+        time.
+      - Log any failures from the C{Deferred} returned by C{main}.
+      - Exit the application when done, with exit code 0 in case of success and
+        1 in case of failure. If C{main} fails with a C{SystemExit} error, the
+        code returned is used.
+
+    @param main: A callable which returns a L{Deferred}.  It should take as
+        many arguments as there are elements in the list C{argv}.
+
+    @param argv: A list of arguments to pass to C{main}.
+
+    @param _reactor: An implementation detail to allow easier unit testing.  Do
+        not supply this parameter.
+
+    @since: 12.3
+    """
+    if _reactor is None:
+        from twisted.internet import reactor as _reactor
+    finished = main(_reactor, *argv)
+    codes = [0]
+
+    stopping = []
+    _reactor.addSystemEventTrigger('before', 'shutdown', stopping.append, True)
+
+    def stop(result, stopReactor):
+        if stopReactor:
+            try:
+                _reactor.stop()
+            except ReactorNotRunning:
+                pass
+
+        if isinstance(result, Failure):
+            if result.check(SystemExit) is not None:
+                code = result.value.code
+            else:
+                log.err(result, "main function encountered error")
+                code = 1
+            codes[0] = code
+
+    def cbFinish(result):
+        if stopping:
+            stop(result, False)
+        else:
+            _reactor.callWhenRunning(stop, result, True)
+
+    finished.addBoth(cbFinish)
+    _reactor.run()
+    sys.exit(codes[0])
+
+
+__all__ = [
+    'LoopingCall',
+
+    'Clock',
+
+    'SchedulerStopped', 'Cooperator', 'coiterate',
+
+    'deferLater', 'react']
diff --git a/ThirdParty/Twisted/twisted/internet/tcp.py b/ThirdParty/Twisted/twisted/internet/tcp.py
new file mode 100644
index 0000000..619fba4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/tcp.py
@@ -0,0 +1,1183 @@
+# -*- test-case-name: twisted.test.test_tcp -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Various asynchronous TCP/IP classes.
+
+End users shouldn't use this module directly - use the reactor APIs instead.
+"""
+
+from __future__ import division, absolute_import
+
+# System Imports
+import types
+import socket
+import sys
+import operator
+import struct
+
+from zope.interface import implementer
+
+from twisted.python.compat import _PY3, lazyByteSlice
+from twisted.python.runtime import platformType
+from twisted.python import versions, deprecate
+
+try:
+    # Try to get the memory BIO based startTLS implementation, available since
+    # pyOpenSSL 0.10
+    from twisted.internet._newtls import (
+        ConnectionMixin as _TLSConnectionMixin,
+        ClientMixin as _TLSClientMixin,
+        ServerMixin as _TLSServerMixin)
+except ImportError:
+    try:
+        if _PY3:
+            # We're never going to port the old SSL code to Python 3:
+            raise
+        # Try to get the socket BIO based startTLS implementation, available in
+        # all pyOpenSSL versions
+        from twisted.internet._oldtls import (
+            ConnectionMixin as _TLSConnectionMixin,
+            ClientMixin as _TLSClientMixin,
+            ServerMixin as _TLSServerMixin)
+    except ImportError:
+        # There is no version of startTLS available
+        class _TLSConnectionMixin(object):
+            TLS = False
+        class _TLSClientMixin(object):
+            pass
+        class _TLSServerMixin(object):
+            pass
+
+if platformType == 'win32':
+    # no such thing as WSAEPERM or error code 10001 according to winsock.h or MSDN
+    EPERM = object()
+    from errno import WSAEINVAL as EINVAL
+    from errno import WSAEWOULDBLOCK as EWOULDBLOCK
+    from errno import WSAEINPROGRESS as EINPROGRESS
+    from errno import WSAEALREADY as EALREADY
+    from errno import WSAECONNRESET as ECONNRESET
+    from errno import WSAEISCONN as EISCONN
+    from errno import WSAENOTCONN as ENOTCONN
+    from errno import WSAEINTR as EINTR
+    from errno import WSAENOBUFS as ENOBUFS
+    from errno import WSAEMFILE as EMFILE
+    # No such thing as WSAENFILE, either.
+    ENFILE = object()
+    # Nor ENOMEM
+    ENOMEM = object()
+    EAGAIN = EWOULDBLOCK
+    from errno import WSAECONNRESET as ECONNABORTED
+
+    from twisted.python.win32 import formatError as strerror
+else:
+    from errno import EPERM
+    from errno import EINVAL
+    from errno import EWOULDBLOCK
+    from errno import EINPROGRESS
+    from errno import EALREADY
+    from errno import ECONNRESET
+    from errno import EISCONN
+    from errno import ENOTCONN
+    from errno import EINTR
+    from errno import ENOBUFS
+    from errno import EMFILE
+    from errno import ENFILE
+    from errno import ENOMEM
+    from errno import EAGAIN
+    from errno import ECONNABORTED
+
+    from os import strerror
+
+
+from errno import errorcode
+
+# Twisted Imports
+from twisted.internet import base, address, fdesc
+from twisted.internet.task import deferLater
+from twisted.python import log, failure, _reflectpy3 as reflect
+from twisted.python._utilpy3 import unsignedID, untilConcludes
+from twisted.internet.error import CannotListenError
+from twisted.internet import abstract, main, interfaces, error
+
+# Not all platforms have, or support, this flag.
+_AI_NUMERICSERV = getattr(socket, "AI_NUMERICSERV", 0)
+
+
+# The type for service names passed to socket.getservbyname:
+if _PY3:
+    _portNameType = str
+else:
+    _portNameType = types.StringTypes
+
+
+
+class _SocketCloser(object):
+    _socketShutdownMethod = 'shutdown'
+
+    def _closeSocket(self, orderly):
+        # The call to shutdown() before close() isn't really necessary, because
+        # we set FD_CLOEXEC now, which will ensure this is the only process
+        # holding the FD, thus ensuring close() really will shutdown the TCP
+        # socket. However, do it anyways, just to be safe.
+        skt = self.socket
+        try:
+            if orderly:
+                if self._socketShutdownMethod is not None:
+                    getattr(skt, self._socketShutdownMethod)(2)
+            else:
+                # Set SO_LINGER to 1,0 which, by convention, causes a
+                # connection reset to be sent when close is called,
+                # instead of the standard FIN shutdown sequence.
+                self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_LINGER,
+                                       struct.pack("ii", 1, 0))
+
+        except socket.error:
+            pass
+        try:
+            skt.close()
+        except socket.error:
+            pass
+
+
+
+class _AbortingMixin(object):
+    """
+    Common implementation of C{abortConnection}.
+
+    @ivar _aborting: Set to C{True} when C{abortConnection} is called.
+    @type _aborting: C{bool}
+    """
+    _aborting = False
+
+    def abortConnection(self):
+        """
+        Aborts the connection immediately, dropping any buffered data.
+
+        @since: 11.1
+        """
+        if self.disconnected or self._aborting:
+            return
+        self._aborting = True
+        self.stopReading()
+        self.stopWriting()
+        self.doRead = lambda *args, **kwargs: None
+        self.doWrite = lambda *args, **kwargs: None
+        self.reactor.callLater(0, self.connectionLost,
+                               failure.Failure(error.ConnectionAborted()))
+
+
+
+ at implementer(interfaces.ITCPTransport, interfaces.ISystemHandle)
+class Connection(_TLSConnectionMixin, abstract.FileDescriptor, _SocketCloser,
+                 _AbortingMixin):
+    """
+    Superclass of all socket-based FileDescriptors.
+
+    This is an abstract superclass of all objects which represent a TCP/IP
+    connection based socket.
+
+    @ivar logstr: prefix used when logging events related to this connection.
+    @type logstr: C{str}
+    """
+
+
+    def __init__(self, skt, protocol, reactor=None):
+        abstract.FileDescriptor.__init__(self, reactor=reactor)
+        self.socket = skt
+        self.socket.setblocking(0)
+        self.fileno = skt.fileno
+        self.protocol = protocol
+
+
+    def getHandle(self):
+        """Return the socket for this connection."""
+        return self.socket
+
+
+    def doRead(self):
+        """Calls self.protocol.dataReceived with all available data.
+
+        This reads up to self.bufferSize bytes of data from its socket, then
+        calls self.dataReceived(data) to process it.  If the connection is not
+        lost through an error in the physical recv(), this function will return
+        the result of the dataReceived call.
+        """
+        try:
+            data = self.socket.recv(self.bufferSize)
+        except socket.error as se:
+            if se.args[0] == EWOULDBLOCK:
+                return
+            else:
+                return main.CONNECTION_LOST
+
+        return self._dataReceived(data)
+
+
+    def _dataReceived(self, data):
+        if not data:
+            return main.CONNECTION_DONE
+        rval = self.protocol.dataReceived(data)
+        if rval is not None:
+            offender = self.protocol.dataReceived
+            warningFormat = (
+                'Returning a value other than None from %(fqpn)s is '
+                'deprecated since %(version)s.')
+            warningString = deprecate.getDeprecationWarningString(
+                offender, versions.Version('Twisted', 11, 0, 0),
+                format=warningFormat)
+            deprecate.warnAboutFunction(offender, warningString)
+        return rval
+
+
+    def writeSomeData(self, data):
+        """
+        Write as much as possible of the given data to this TCP connection.
+
+        This sends up to C{self.SEND_LIMIT} bytes from C{data}.  If the
+        connection is lost, an exception is returned.  Otherwise, the number
+        of bytes successfully written is returned.
+        """
+        # Limit length of buffer to try to send, because some OSes are too
+        # stupid to do so themselves (ahem windows)
+        limitedData = lazyByteSlice(data, 0, self.SEND_LIMIT)
+
+        try:
+            return untilConcludes(self.socket.send, limitedData)
+        except socket.error as se:
+            if se.args[0] in (EWOULDBLOCK, ENOBUFS):
+                return 0
+            else:
+                return main.CONNECTION_LOST
+
+
+    def _closeWriteConnection(self):
+        try:
+            getattr(self.socket, self._socketShutdownMethod)(1)
+        except socket.error:
+            pass
+        p = interfaces.IHalfCloseableProtocol(self.protocol, None)
+        if p:
+            try:
+                p.writeConnectionLost()
+            except:
+                f = failure.Failure()
+                log.err()
+                self.connectionLost(f)
+
+
+    def readConnectionLost(self, reason):
+        p = interfaces.IHalfCloseableProtocol(self.protocol, None)
+        if p:
+            try:
+                p.readConnectionLost()
+            except:
+                log.err()
+                self.connectionLost(failure.Failure())
+        else:
+            self.connectionLost(reason)
+
+
+
+    def connectionLost(self, reason):
+        """See abstract.FileDescriptor.connectionLost().
+        """
+        # Make sure we're not called twice, which can happen e.g. if
+        # abortConnection() is called from protocol's dataReceived and then
+        # code immediately after throws an exception that reaches the
+        # reactor. We can't rely on "disconnected" attribute for this check
+        # since twisted.internet._oldtls does evil things to it:
+        if not hasattr(self, "socket"):
+            return
+        abstract.FileDescriptor.connectionLost(self, reason)
+        self._closeSocket(not reason.check(error.ConnectionAborted))
+        protocol = self.protocol
+        del self.protocol
+        del self.socket
+        del self.fileno
+        protocol.connectionLost(reason)
+
+
+    logstr = "Uninitialized"
+
+    def logPrefix(self):
+        """Return the prefix to log with when I own the logging thread.
+        """
+        return self.logstr
+
+    def getTcpNoDelay(self):
+        return operator.truth(self.socket.getsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY))
+
+    def setTcpNoDelay(self, enabled):
+        self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, enabled)
+
+    def getTcpKeepAlive(self):
+        return operator.truth(self.socket.getsockopt(socket.SOL_SOCKET,
+                                                     socket.SO_KEEPALIVE))
+
+    def setTcpKeepAlive(self, enabled):
+        self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_KEEPALIVE, enabled)
+
+
+
+
+class _BaseBaseClient(object):
+    """
+    Code shared with other (non-POSIX) reactors for management of general
+    outgoing connections.
+
+    Requirements upon subclasses are documented as instance variables rather
+    than abstract methods, in order to avoid MRO confusion, since this base is
+    mixed in to unfortunately weird and distinctive multiple-inheritance
+    hierarchies and many of these attributes are provided by peer classes
+    rather than descendant classes in those hierarchies.
+
+    @ivar addressFamily: The address family constant (C{socket.AF_INET},
+        C{socket.AF_INET6}, C{socket.AF_UNIX}) of the underlying socket of this
+        client connection.
+    @type addressFamily: C{int}
+
+    @ivar socketType: The socket type constant (C{socket.SOCK_STREAM} or
+        C{socket.SOCK_DGRAM}) of the underlying socket.
+    @type socketType: C{int}
+
+    @ivar _requiresResolution: A flag indicating whether the address of this
+        client will require name resolution.  C{True} if the hostname of said
+        address indicates a name that must be resolved by hostname lookup,
+        C{False} if it indicates an IP address literal.
+    @type _requiresResolution: C{bool}
+
+    @cvar _commonConnection: Subclasses must provide this attribute, which
+        indicates the L{Connection}-alike class to invoke C{__init__} and
+        C{connectionLost} on.
+    @type _commonConnection: C{type}
+
+    @ivar _stopReadingAndWriting: Subclasses must implement in order to remove
+        this transport from its reactor's notifications in response to a
+        terminated connection attempt.
+    @type _stopReadingAndWriting: 0-argument callable returning C{None}
+
+    @ivar _closeSocket: Subclasses must implement in order to close the socket
+        in response to a terminated connection attempt.
+    @type _closeSocket: 1-argument callable; see L{_SocketCloser._closeSocket}
+
+    @ivar _collectSocketDetails: Clean up references to the attached socket in
+        its underlying OS resource (such as a file descriptor or file handle),
+        as part of post connection-failure cleanup.
+    @type _collectSocketDetails: 0-argument callable returning C{None}.
+
+    @ivar reactor: The class pointed to by C{_commonConnection} should set this
+        attribute in its constructor.
+    @type reactor: L{twisted.internet.interfaces.IReactorTime},
+        L{twisted.internet.interfaces.IReactorCore},
+        L{twisted.internet.interfaces.IReactorFDSet}
+    """
+
+    addressFamily = socket.AF_INET
+    socketType = socket.SOCK_STREAM
+
+    def _finishInit(self, whenDone, skt, error, reactor):
+        """
+        Called by subclasses to continue to the stage of initialization where
+        the socket connect attempt is made.
+
+        @param whenDone: A 0-argument callable to invoke once the connection is
+            set up.  This is C{None} if the connection could not be prepared
+            due to a previous error.
+
+        @param skt: The socket object to use to perform the connection.
+        @type skt: C{socket._socketobject}
+
+        @param error: The error to fail the connection with.
+
+        @param reactor: The reactor to use for this client.
+        @type reactor: L{twisted.internet.interfaces.IReactorTime}
+        """
+        if whenDone:
+            self._commonConnection.__init__(self, skt, None, reactor)
+            reactor.callLater(0, whenDone)
+        else:
+            reactor.callLater(0, self.failIfNotConnected, error)
+
+
+    def resolveAddress(self):
+        """
+        Resolve the name that was passed to this L{_BaseBaseClient}, if
+        necessary, and then move on to attempting the connection once an
+        address has been determined.  (The connection will be attempted
+        immediately within this function if either name resolution can be
+        synchronous or the address was an IP address literal.)
+
+        @note: You don't want to call this method from outside, as it won't do
+            anything useful; it's just part of the connection bootstrapping
+            process.  Also, although this method is on L{_BaseBaseClient} for
+            historical reasons, it's not used anywhere except for L{Client}
+            itself.
+
+        @return: C{None}
+        """
+        if self._requiresResolution:
+            d = self.reactor.resolve(self.addr[0])
+            d.addCallback(lambda n: (n,) + self.addr[1:])
+            d.addCallbacks(self._setRealAddress, self.failIfNotConnected)
+        else:
+            self._setRealAddress(self.addr)
+
+
+    def _setRealAddress(self, address):
+        """
+        Set the resolved address of this L{_BaseBaseClient} and initiate the
+        connection attempt.
+
+        @param address: Depending on whether this is an IPv4 or IPv6 connection
+            attempt, a 2-tuple of C{(host, port)} or a 4-tuple of C{(host,
+            port, flow, scope)}.  At this point it is a fully resolved address,
+            and the 'host' portion will always be an IP address, not a DNS
+            name.
+        """
+        self.realAddress = address
+        self.doConnect()
+
+
+    def failIfNotConnected(self, err):
+        """
+        Generic method called when the attemps to connect failed. It basically
+        cleans everything it can: call connectionFailed, stop read and write,
+        delete socket related members.
+        """
+        if (self.connected or self.disconnected or
+            not hasattr(self, "connector")):
+            return
+
+        self._stopReadingAndWriting()
+        try:
+            self._closeSocket(True)
+        except AttributeError:
+            pass
+        else:
+            self._collectSocketDetails()
+        self.connector.connectionFailed(failure.Failure(err))
+        del self.connector
+
+
+    def stopConnecting(self):
+        """
+        If a connection attempt is still outstanding (i.e.  no connection is
+        yet established), immediately stop attempting to connect.
+        """
+        self.failIfNotConnected(error.UserError())
+
+
+    def connectionLost(self, reason):
+        """
+        Invoked by lower-level logic when it's time to clean the socket up.
+        Depending on the state of the connection, either inform the attached
+        L{Connector} that the connection attempt has failed, or inform the
+        connected L{IProtocol} that the established connection has been lost.
+
+        @param reason: the reason that the connection was terminated
+        @type reason: L{Failure}
+        """
+        if not self.connected:
+            self.failIfNotConnected(error.ConnectError(string=reason))
+        else:
+            self._commonConnection.connectionLost(self, reason)
+            self.connector.connectionLost(reason)
+
+
+
+class BaseClient(_BaseBaseClient, _TLSClientMixin, Connection):
+    """
+    A base class for client TCP (and similiar) sockets.
+
+    @ivar realAddress: The address object that will be used for socket.connect;
+        this address is an address tuple (the number of elements dependent upon
+        the address family) which does not contain any names which need to be
+        resolved.
+    @type realAddress: C{tuple}
+
+    @ivar _base: L{Connection}, which is the base class of this class which has
+        all of the useful file descriptor methods.  This is used by
+        L{_TLSServerMixin} to call the right methods to directly manipulate the
+        transport, as is necessary for writing TLS-encrypted bytes (whereas
+        those methods on L{Server} will go through another layer of TLS if it
+        has been enabled).
+    """
+
+    _base = Connection
+    _commonConnection = Connection
+
+    def _stopReadingAndWriting(self):
+        """
+        Implement the POSIX-ish (i.e.
+        L{twisted.internet.interfaces.IReactorFDSet}) method of detaching this
+        socket from the reactor for L{_BaseBaseClient}.
+        """
+        if hasattr(self, "reactor"):
+            # this doesn't happen if we failed in __init__
+            self.stopReading()
+            self.stopWriting()
+
+
+    def _collectSocketDetails(self):
+        """
+        Clean up references to the socket and its file descriptor.
+
+        @see: L{_BaseBaseClient}
+        """
+        del self.socket, self.fileno
+
+
+    def createInternetSocket(self):
+        """(internal) Create a non-blocking socket using
+        self.addressFamily, self.socketType.
+        """
+        s = socket.socket(self.addressFamily, self.socketType)
+        s.setblocking(0)
+        fdesc._setCloseOnExec(s.fileno())
+        return s
+
+
+    def doConnect(self):
+        """
+        Initiate the outgoing connection attempt.
+
+        @note: Applications do not need to call this method; it will be invoked
+            internally as part of L{IReactorTCP.connectTCP}.
+        """
+        self.doWrite = self.doConnect
+        self.doRead = self.doConnect
+        if not hasattr(self, "connector"):
+            # this happens when connection failed but doConnect
+            # was scheduled via a callLater in self._finishInit
+            return
+
+        err = self.socket.getsockopt(socket.SOL_SOCKET, socket.SO_ERROR)
+        if err:
+            self.failIfNotConnected(error.getConnectError((err, strerror(err))))
+            return
+
+        # doConnect gets called twice.  The first time we actually need to
+        # start the connection attempt.  The second time we don't really
+        # want to (SO_ERROR above will have taken care of any errors, and if
+        # it reported none, the mere fact that doConnect was called again is
+        # sufficient to indicate that the connection has succeeded), but it
+        # is not /particularly/ detrimental to do so.  This should get
+        # cleaned up some day, though.
+        try:
+            connectResult = self.socket.connect_ex(self.realAddress)
+        except socket.error as se:
+            connectResult = se.args[0]
+        if connectResult:
+            if connectResult == EISCONN:
+                pass
+            # on Windows EINVAL means sometimes that we should keep trying:
+            # http://msdn.microsoft.com/library/default.asp?url=/library/en-us/winsock/winsock/connect_2.asp
+            elif ((connectResult in (EWOULDBLOCK, EINPROGRESS, EALREADY)) or
+                  (connectResult == EINVAL and platformType == "win32")):
+                self.startReading()
+                self.startWriting()
+                return
+            else:
+                self.failIfNotConnected(error.getConnectError((connectResult, strerror(connectResult))))
+                return
+
+        # If I have reached this point without raising or returning, that means
+        # that the socket is connected.
+        del self.doWrite
+        del self.doRead
+        # we first stop and then start, to reset any references to the old doRead
+        self.stopReading()
+        self.stopWriting()
+        self._connectDone()
+
+
+    def _connectDone(self):
+        """
+        This is a hook for when a connection attempt has succeeded.
+
+        Here, we build the protocol from the
+        L{twisted.internet.protocol.ClientFactory} that was passed in, compute
+        a log string, begin reading so as to send traffic to the newly built
+        protocol, and finally hook up the protocol itself.
+
+        This hook is overridden by L{ssl.Client} to initiate the TLS protocol.
+        """
+        self.protocol = self.connector.buildProtocol(self.getPeer())
+        self.connected = 1
+        logPrefix = self._getLogPrefix(self.protocol)
+        self.logstr = "%s,client" % logPrefix
+        self.startReading()
+        self.protocol.makeConnection(self)
+
+
+
+_NUMERIC_ONLY = socket.AI_NUMERICHOST | _AI_NUMERICSERV
+
+def _resolveIPv6(ip, port):
+    """
+    Resolve an IPv6 literal into an IPv6 address.
+
+    This is necessary to resolve any embedded scope identifiers to the relevant
+    C{sin6_scope_id} for use with C{socket.connect()}, C{socket.listen()}, or
+    C{socket.bind()}; see U{RFC 3493 <https://tools.ietf.org/html/rfc3493>} for
+    more information.
+
+    @param ip: An IPv6 address literal.
+    @type ip: C{str}
+
+    @param port: A port number.
+    @type port: C{int}
+
+    @return: a 4-tuple of C{(host, port, flow, scope)}, suitable for use as an
+        IPv6 address.
+
+    @raise socket.gaierror: if either the IP or port is not numeric as it
+        should be.
+    """
+    return socket.getaddrinfo(ip, port, 0, 0, 0, _NUMERIC_ONLY)[0][4]
+
+
+
+class _BaseTCPClient(object):
+    """
+    Code shared with other (non-POSIX) reactors for management of outgoing TCP
+    connections (both TCPv4 and TCPv6).
+
+    @note: In order to be functional, this class must be mixed into the same
+        hierarchy as L{_BaseBaseClient}.  It would subclass L{_BaseBaseClient}
+        directly, but the class hierarchy here is divided in strange ways out
+        of the need to share code along multiple axes; specifically, with the
+        IOCP reactor and also with UNIX clients in other reactors.
+
+    @ivar _addressType: The Twisted _IPAddress implementation for this client
+    @type _addressType: L{IPv4Address} or L{IPv6Address}
+
+    @ivar connector: The L{Connector} which is driving this L{_BaseTCPClient}'s
+        connection attempt.
+
+    @ivar addr: The address that this socket will be connecting to.
+    @type addr: If IPv4, a 2-C{tuple} of C{(str host, int port)}.  If IPv6, a
+        4-C{tuple} of (C{str host, int port, int ignored, int scope}).
+
+    @ivar createInternetSocket: Subclasses must implement this as a method to
+        create a python socket object of the appropriate address family and
+        socket type.
+    @type createInternetSocket: 0-argument callable returning
+        C{socket._socketobject}.
+    """
+
+    _addressType = address.IPv4Address
+
+    def __init__(self, host, port, bindAddress, connector, reactor=None):
+        # BaseClient.__init__ is invoked later
+        self.connector = connector
+        self.addr = (host, port)
+
+        whenDone = self.resolveAddress
+        err = None
+        skt = None
+
+        if abstract.isIPAddress(host):
+            self._requiresResolution = False
+        elif abstract.isIPv6Address(host):
+            self._requiresResolution = False
+            self.addr = _resolveIPv6(host, port)
+            self.addressFamily = socket.AF_INET6
+            self._addressType = address.IPv6Address
+        else:
+            self._requiresResolution = True
+        try:
+            skt = self.createInternetSocket()
+        except socket.error as se:
+            err = error.ConnectBindError(se.args[0], se.args[1])
+            whenDone = None
+        if whenDone and bindAddress is not None:
+            try:
+                if abstract.isIPv6Address(bindAddress[0]):
+                    bindinfo = _resolveIPv6(*bindAddress)
+                else:
+                    bindinfo = bindAddress
+                skt.bind(bindinfo)
+            except socket.error as se:
+                err = error.ConnectBindError(se.args[0], se.args[1])
+                whenDone = None
+        self._finishInit(whenDone, skt, err, reactor)
+
+
+    def getHost(self):
+        """
+        Returns an L{IPv4Address} or L{IPv6Address}.
+
+        This indicates the address from which I am connecting.
+        """
+        return self._addressType('TCP', *self.socket.getsockname()[:2])
+
+
+    def getPeer(self):
+        """
+        Returns an L{IPv4Address} or L{IPv6Address}.
+
+        This indicates the address that I am connected to.
+        """
+        # an ipv6 realAddress has more than two elements, but the IPv6Address
+        # constructor still only takes two.
+        return self._addressType('TCP', *self.realAddress[:2])
+
+
+    def __repr__(self):
+        s = '<%s to %s at %x>' % (self.__class__, self.addr, unsignedID(self))
+        return s
+
+
+
+class Client(_BaseTCPClient, BaseClient):
+    """
+    A transport for a TCP protocol; either TCPv4 or TCPv6.
+
+    Do not create these directly; use L{IReactorTCP.connectTCP}.
+    """
+
+
+
+class Server(_TLSServerMixin, Connection):
+    """
+    Serverside socket-stream connection class.
+
+    This is a serverside network connection transport; a socket which came from
+    an accept() on a server.
+
+    @ivar _base: L{Connection}, which is the base class of this class which has
+        all of the useful file descriptor methods.  This is used by
+        L{_TLSServerMixin} to call the right methods to directly manipulate the
+        transport, as is necessary for writing TLS-encrypted bytes (whereas
+        those methods on L{Server} will go through another layer of TLS if it
+        has been enabled).
+    """
+    _base = Connection
+
+    _addressType = address.IPv4Address
+
+    def __init__(self, sock, protocol, client, server, sessionno, reactor):
+        """
+        Server(sock, protocol, client, server, sessionno)
+
+        Initialize it with a socket, a protocol, a descriptor for my peer (a
+        tuple of host, port describing the other end of the connection), an
+        instance of Port, and a session number.
+        """
+        Connection.__init__(self, sock, protocol, reactor)
+        if len(client) != 2:
+            self._addressType = address.IPv6Address
+        self.server = server
+        self.client = client
+        self.sessionno = sessionno
+        self.hostname = client[0]
+
+        logPrefix = self._getLogPrefix(self.protocol)
+        self.logstr = "%s,%s,%s" % (logPrefix,
+                                    sessionno,
+                                    self.hostname)
+        if self.server is not None:
+            self.repstr = "<%s #%s on %s>" % (self.protocol.__class__.__name__,
+                                              self.sessionno,
+                                              self.server._realPortNumber)
+        self.startReading()
+        self.connected = 1
+
+    def __repr__(self):
+        """
+        A string representation of this connection.
+        """
+        return self.repstr
+
+
+    @classmethod
+    def _fromConnectedSocket(cls, fileDescriptor, addressFamily, factory,
+                             reactor):
+        """
+        Create a new L{Server} based on an existing connected I{SOCK_STREAM}
+        socket.
+
+        Arguments are the same as to L{Server.__init__}, except where noted.
+
+        @param fileDescriptor: An integer file descriptor associated with a
+            connected socket.  The socket must be in non-blocking mode.  Any
+            additional attributes desired, such as I{FD_CLOEXEC}, must also be
+            set already.
+
+        @param addressFamily: The address family (sometimes called I{domain})
+            of the existing socket.  For example, L{socket.AF_INET}.
+
+        @return: A new instance of C{cls} wrapping the socket given by
+            C{fileDescriptor}.
+        """
+        addressType = address.IPv4Address
+        if addressFamily == socket.AF_INET6:
+            addressType = address.IPv6Address
+        skt = socket.fromfd(fileDescriptor, addressFamily, socket.SOCK_STREAM)
+        addr = skt.getpeername()
+        protocolAddr = addressType('TCP', addr[0], addr[1])
+        localPort = skt.getsockname()[1]
+
+        protocol = factory.buildProtocol(protocolAddr)
+        if protocol is None:
+            skt.close()
+            return
+
+        self = cls(skt, protocol, addr, None, addr[1], reactor)
+        self.repstr = "<%s #%s on %s>" % (
+            self.protocol.__class__.__name__, self.sessionno, localPort)
+        protocol.makeConnection(self)
+        return self
+
+
+    def getHost(self):
+        """
+        Returns an L{IPv4Address} or L{IPv6Address}.
+
+        This indicates the server's address.
+        """
+        host, port = self.socket.getsockname()[:2]
+        return self._addressType('TCP', host, port)
+
+
+    def getPeer(self):
+        """
+        Returns an L{IPv4Address} or L{IPv6Address}.
+
+        This indicates the client's address.
+        """
+        return self._addressType('TCP', *self.client[:2])
+
+
+
+ at implementer(interfaces.IListeningPort)
+class Port(base.BasePort, _SocketCloser):
+    """
+    A TCP server port, listening for connections.
+
+    When a connection is accepted, this will call a factory's buildProtocol
+    with the incoming address as an argument, according to the specification
+    described in L{twisted.internet.interfaces.IProtocolFactory}.
+
+    If you wish to change the sort of transport that will be used, the
+    C{transport} attribute will be called with the signature expected for
+    C{Server.__init__}, so it can be replaced.
+
+    @ivar deferred: a deferred created when L{stopListening} is called, and
+        that will fire when connection is lost. This is not to be used it
+        directly: prefer the deferred returned by L{stopListening} instead.
+    @type deferred: L{defer.Deferred}
+
+    @ivar disconnecting: flag indicating that the L{stopListening} method has
+        been called and that no connections should be accepted anymore.
+    @type disconnecting: C{bool}
+
+    @ivar connected: flag set once the listen has successfully been called on
+        the socket.
+    @type connected: C{bool}
+
+    @ivar _type: A string describing the connections which will be created by
+        this port.  Normally this is C{"TCP"}, since this is a TCP port, but
+        when the TLS implementation re-uses this class it overrides the value
+        with C{"TLS"}.  Only used for logging.
+
+    @ivar _preexistingSocket: If not C{None}, a L{socket.socket} instance which
+        was created and initialized outside of the reactor and will be used to
+        listen for connections (instead of a new socket being created by this
+        L{Port}).
+    """
+
+    socketType = socket.SOCK_STREAM
+
+    transport = Server
+    sessionno = 0
+    interface = ''
+    backlog = 50
+
+    _type = 'TCP'
+
+    # Actual port number being listened on, only set to a non-None
+    # value when we are actually listening.
+    _realPortNumber = None
+
+    # An externally initialized socket that we will use, rather than creating
+    # our own.
+    _preexistingSocket = None
+
+    addressFamily = socket.AF_INET
+    _addressType = address.IPv4Address
+
+    def __init__(self, port, factory, backlog=50, interface='', reactor=None):
+        """Initialize with a numeric port to listen on.
+        """
+        base.BasePort.__init__(self, reactor=reactor)
+        self.port = port
+        self.factory = factory
+        self.backlog = backlog
+        if abstract.isIPv6Address(interface):
+            self.addressFamily = socket.AF_INET6
+            self._addressType = address.IPv6Address
+        self.interface = interface
+
+
+    @classmethod
+    def _fromListeningDescriptor(cls, reactor, fd, addressFamily, factory):
+        """
+        Create a new L{Port} based on an existing listening I{SOCK_STREAM}
+        socket.
+
+        Arguments are the same as to L{Port.__init__}, except where noted.
+
+        @param fd: An integer file descriptor associated with a listening
+            socket.  The socket must be in non-blocking mode.  Any additional
+            attributes desired, such as I{FD_CLOEXEC}, must also be set already.
+
+        @param addressFamily: The address family (sometimes called I{domain}) of
+            the existing socket.  For example, L{socket.AF_INET}.
+
+        @return: A new instance of C{cls} wrapping the socket given by C{fd}.
+        """
+        port = socket.fromfd(fd, addressFamily, cls.socketType)
+        interface = port.getsockname()[0]
+        self = cls(None, factory, None, interface, reactor)
+        self._preexistingSocket = port
+        return self
+
+
+    def __repr__(self):
+        if self._realPortNumber is not None:
+            return "<%s of %s on %s>" % (self.__class__,
+                self.factory.__class__, self._realPortNumber)
+        else:
+            return "<%s of %s (not listening)>" % (self.__class__, self.factory.__class__)
+
+    def createInternetSocket(self):
+        s = base.BasePort.createInternetSocket(self)
+        if platformType == "posix" and sys.platform != "cygwin":
+            s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+        return s
+
+
+    def startListening(self):
+        """Create and bind my socket, and begin listening on it.
+
+        This is called on unserialization, and must be called after creating a
+        server to begin listening on the specified port.
+        """
+        if self._preexistingSocket is None:
+            # Create a new socket and make it listen
+            try:
+                skt = self.createInternetSocket()
+                if self.addressFamily == socket.AF_INET6:
+                    addr = _resolveIPv6(self.interface, self.port)
+                else:
+                    addr = (self.interface, self.port)
+                skt.bind(addr)
+            except socket.error as le:
+                raise CannotListenError(self.interface, self.port, le)
+            skt.listen(self.backlog)
+        else:
+            # Re-use the externally specified socket
+            skt = self._preexistingSocket
+            self._preexistingSocket = None
+            # Avoid shutting it down at the end.
+            self._socketShutdownMethod = None
+
+        # Make sure that if we listened on port 0, we update that to
+        # reflect what the OS actually assigned us.
+        self._realPortNumber = skt.getsockname()[1]
+
+        log.msg("%s starting on %s" % (
+                self._getLogPrefix(self.factory), self._realPortNumber))
+
+        # The order of the next 5 lines is kind of bizarre.  If no one
+        # can explain it, perhaps we should re-arrange them.
+        self.factory.doStart()
+        self.connected = True
+        self.socket = skt
+        self.fileno = self.socket.fileno
+        self.numberAccepts = 100
+
+        self.startReading()
+
+
+    def _buildAddr(self, address):
+        host, port = address[:2]
+        return self._addressType('TCP', host, port)
+
+
+    def doRead(self):
+        """Called when my socket is ready for reading.
+
+        This accepts a connection and calls self.protocol() to handle the
+        wire-level protocol.
+        """
+        try:
+            if platformType == "posix":
+                numAccepts = self.numberAccepts
+            else:
+                # win32 event loop breaks if we do more than one accept()
+                # in an iteration of the event loop.
+                numAccepts = 1
+            for i in range(numAccepts):
+                # we need this so we can deal with a factory's buildProtocol
+                # calling our loseConnection
+                if self.disconnecting:
+                    return
+                try:
+                    skt, addr = self.socket.accept()
+                except socket.error as e:
+                    if e.args[0] in (EWOULDBLOCK, EAGAIN):
+                        self.numberAccepts = i
+                        break
+                    elif e.args[0] == EPERM:
+                        # Netfilter on Linux may have rejected the
+                        # connection, but we get told to try to accept()
+                        # anyway.
+                        continue
+                    elif e.args[0] in (EMFILE, ENOBUFS, ENFILE, ENOMEM, ECONNABORTED):
+
+                        # Linux gives EMFILE when a process is not allowed
+                        # to allocate any more file descriptors.  *BSD and
+                        # Win32 give (WSA)ENOBUFS.  Linux can also give
+                        # ENFILE if the system is out of inodes, or ENOMEM
+                        # if there is insufficient memory to allocate a new
+                        # dentry.  ECONNABORTED is documented as possible on
+                        # both Linux and Windows, but it is not clear
+                        # whether there are actually any circumstances under
+                        # which it can happen (one might expect it to be
+                        # possible if a client sends a FIN or RST after the
+                        # server sends a SYN|ACK but before application code
+                        # calls accept(2), however at least on Linux this
+                        # _seems_ to be short-circuited by syncookies.
+
+                        log.msg("Could not accept new connection (%s)" % (
+                            errorcode[e.args[0]],))
+                        break
+                    raise
+
+                fdesc._setCloseOnExec(skt.fileno())
+                protocol = self.factory.buildProtocol(self._buildAddr(addr))
+                if protocol is None:
+                    skt.close()
+                    continue
+                s = self.sessionno
+                self.sessionno = s+1
+                transport = self.transport(skt, protocol, addr, self, s, self.reactor)
+                protocol.makeConnection(transport)
+            else:
+                self.numberAccepts = self.numberAccepts+20
+        except:
+            # Note that in TLS mode, this will possibly catch SSL.Errors
+            # raised by self.socket.accept()
+            #
+            # There is no "except SSL.Error:" above because SSL may be
+            # None if there is no SSL support.  In any case, all the
+            # "except SSL.Error:" suite would probably do is log.deferr()
+            # and return, so handling it here works just as well.
+            log.deferr()
+
+    def loseConnection(self, connDone=failure.Failure(main.CONNECTION_DONE)):
+        """
+        Stop accepting connections on this port.
+
+        This will shut down the socket and call self.connectionLost().  It
+        returns a deferred which will fire successfully when the port is
+        actually closed, or with a failure if an error occurs shutting down.
+        """
+        self.disconnecting = True
+        self.stopReading()
+        if self.connected:
+            self.deferred = deferLater(
+                self.reactor, 0, self.connectionLost, connDone)
+            return self.deferred
+
+    stopListening = loseConnection
+
+    def _logConnectionLostMsg(self):
+        """
+        Log message for closing port
+        """
+        log.msg('(%s Port %s Closed)' % (self._type, self._realPortNumber))
+
+
+    def connectionLost(self, reason):
+        """
+        Cleans up the socket.
+        """
+        self._logConnectionLostMsg()
+        self._realPortNumber = None
+
+        base.BasePort.connectionLost(self, reason)
+        self.connected = False
+        self._closeSocket(True)
+        del self.socket
+        del self.fileno
+
+        try:
+            self.factory.doStop()
+        finally:
+            self.disconnecting = False
+
+
+    def logPrefix(self):
+        """Returns the name of my class, to prefix log entries with.
+        """
+        return reflect.qual(self.factory.__class__)
+
+
+    def getHost(self):
+        """
+        Return an L{IPv4Address} or L{IPv6Address} indicating the listening
+        address of this port.
+        """
+        host, port = self.socket.getsockname()[:2]
+        return self._addressType('TCP', host, port)
+
+
+
+class Connector(base.BaseConnector):
+    """
+    A L{Connector} provides of L{twisted.internet.interfaces.IConnector} for
+    all POSIX-style reactors.
+
+    @ivar _addressType: the type returned by L{Connector.getDestination}.
+        Either L{IPv4Address} or L{IPv6Address}, depending on the type of
+        address.
+    @type _addressType: C{type}
+    """
+    _addressType = address.IPv4Address
+
+    def __init__(self, host, port, factory, timeout, bindAddress, reactor=None):
+        if isinstance(port, _portNameType):
+            try:
+                port = socket.getservbyname(port, 'tcp')
+            except socket.error as e:
+                raise error.ServiceNameUnknownError(string="%s (%r)" % (e, port))
+        self.host, self.port = host, port
+        if abstract.isIPv6Address(host):
+            self._addressType = address.IPv6Address
+        self.bindAddress = bindAddress
+        base.BaseConnector.__init__(self, factory, timeout, reactor)
+
+
+    def _makeTransport(self):
+        """
+        Create a L{Client} bound to this L{Connector}.
+
+        @return: a new L{Client}
+        @rtype: L{Client}
+        """
+        return Client(self.host, self.port, self.bindAddress, self, self.reactor)
+
+
+    def getDestination(self):
+        """
+        @see: L{twisted.internet.interfaces.IConnector.getDestination}.
+        """
+        return self._addressType('TCP', self.host, self.port)
+
+
diff --git a/ThirdParty/Twisted/twisted/internet/test/__init__.py b/ThirdParty/Twisted/twisted/internet/test/__init__.py
new file mode 100644
index 0000000..cf1de2a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/__init__.py
@@ -0,0 +1,6 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet}.
+"""
diff --git a/ThirdParty/Twisted/twisted/internet/test/_posixifaces.py b/ThirdParty/Twisted/twisted/internet/test/_posixifaces.py
new file mode 100644
index 0000000..1238233
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/_posixifaces.py
@@ -0,0 +1,148 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+POSIX implementation of local network interface enumeration.
+"""
+
+from __future__ import division, absolute_import
+
+import sys, socket
+
+from socket import AF_INET, AF_INET6, inet_ntop
+from ctypes import (
+    CDLL, POINTER, Structure, c_char_p, c_ushort, c_int,
+    c_uint32, c_uint8, c_void_p, c_ubyte, pointer, cast)
+from ctypes.util import find_library
+
+from twisted.python.compat import _PY3, nativeString
+
+if _PY3:
+    # Once #6070 is implemented, this can be replaced with the implementation
+    # from that ticket:
+    def chr(i):
+        """
+        Python 3 implementation of Python 2 chr(), i.e. convert an integer to
+        corresponding byte.
+        """
+        return bytes([i])
+
+
+libc = CDLL(find_library("c"))
+
+if sys.platform == 'darwin':
+    _sockaddrCommon = [
+        ("sin_len", c_uint8),
+        ("sin_family", c_uint8),
+        ]
+else:
+    _sockaddrCommon = [
+        ("sin_family", c_ushort),
+        ]
+
+
+
+class in_addr(Structure):
+    _fields_ = [
+        ("in_addr", c_ubyte * 4),
+        ]
+
+
+
+class in6_addr(Structure):
+    _fields_ = [
+        ("in_addr", c_ubyte * 16),
+        ]
+
+
+
+class sockaddr(Structure):
+    _fields_ = _sockaddrCommon + [
+        ("sin_port", c_ushort),
+        ]
+
+
+
+class sockaddr_in(Structure):
+    _fields_ = _sockaddrCommon + [
+        ("sin_port", c_ushort),
+        ("sin_addr", in_addr),
+        ]
+
+
+
+class sockaddr_in6(Structure):
+    _fields_ = _sockaddrCommon + [
+        ("sin_port", c_ushort),
+        ("sin_flowinfo", c_uint32),
+        ("sin_addr", in6_addr),
+        ]
+
+
+
+class ifaddrs(Structure):
+    pass
+
+ifaddrs_p = POINTER(ifaddrs)
+ifaddrs._fields_ = [
+    ('ifa_next', ifaddrs_p),
+    ('ifa_name', c_char_p),
+    ('ifa_flags', c_uint32),
+    ('ifa_addr', POINTER(sockaddr)),
+    ('ifa_netmask', POINTER(sockaddr)),
+    ('ifa_dstaddr', POINTER(sockaddr)),
+    ('ifa_data', c_void_p)]
+
+getifaddrs = libc.getifaddrs
+getifaddrs.argtypes = [POINTER(ifaddrs_p)]
+getifaddrs.restype = c_int
+
+freeifaddrs = libc.freeifaddrs
+freeifaddrs.argtypes = [ifaddrs_p]
+
+def _interfaces():
+    """
+    Call C{getifaddrs(3)} and return a list of tuples of interface name, address
+    family, and human-readable address representing its results.
+    """
+    ifaddrs = ifaddrs_p()
+    if getifaddrs(pointer(ifaddrs)) < 0:
+        raise OSError()
+    results = []
+    try:
+        while ifaddrs:
+            if ifaddrs[0].ifa_addr:
+                family = ifaddrs[0].ifa_addr[0].sin_family
+                if family == AF_INET:
+                    addr = cast(ifaddrs[0].ifa_addr, POINTER(sockaddr_in))
+                elif family == AF_INET6:
+                    addr = cast(ifaddrs[0].ifa_addr, POINTER(sockaddr_in6))
+                else:
+                    addr = None
+
+                if addr:
+                    packed = b''.join(map(chr, addr[0].sin_addr.in_addr[:]))
+                    results.append((
+                            ifaddrs[0].ifa_name,
+                            family,
+                            inet_ntop(family, packed)))
+
+            ifaddrs = ifaddrs[0].ifa_next
+    finally:
+        freeifaddrs(ifaddrs)
+    return results
+
+
+
+def posixGetLinkLocalIPv6Addresses():
+    """
+    Return a list of strings in colon-hex format representing all the link local
+    IPv6 addresses available on the system, as reported by I{getifaddrs(3)}.
+    """
+    retList = []
+    for (interface, family, address) in _interfaces():
+        interface = nativeString(interface)
+        address = nativeString(address)
+        if family == socket.AF_INET6 and address.startswith('fe80:'):
+            retList.append('%s%%%s' % (address, interface))
+    return retList
diff --git a/ThirdParty/Twisted/twisted/internet/test/_win32ifaces.py b/ThirdParty/Twisted/twisted/internet/test/_win32ifaces.py
new file mode 100644
index 0000000..4a1e82b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/_win32ifaces.py
@@ -0,0 +1,119 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Windows implementation of local network interface enumeration.
+"""
+
+from socket import socket, AF_INET6, SOCK_STREAM
+from ctypes import (
+    WinDLL, byref, create_string_buffer, c_int, c_void_p,
+    POINTER, Structure, cast, string_at)
+
+WS2_32 = WinDLL('ws2_32')
+
+SOCKET = c_int
+DWORD = c_int
+LPVOID = c_void_p
+LPSOCKADDR = c_void_p
+LPWSAPROTOCOL_INFO = c_void_p
+LPTSTR = c_void_p
+LPDWORD = c_void_p
+LPWSAOVERLAPPED = c_void_p
+LPWSAOVERLAPPED_COMPLETION_ROUTINE = c_void_p
+
+# http://msdn.microsoft.com/en-us/library/ms741621(v=VS.85).aspx
+# int WSAIoctl(
+#         __in   SOCKET s,
+#         __in   DWORD dwIoControlCode,
+#         __in   LPVOID lpvInBuffer,
+#         __in   DWORD cbInBuffer,
+#         __out  LPVOID lpvOutBuffer,
+#         __in   DWORD cbOutBuffer,
+#         __out  LPDWORD lpcbBytesReturned,
+#         __in   LPWSAOVERLAPPED lpOverlapped,
+#         __in   LPWSAOVERLAPPED_COMPLETION_ROUTINE lpCompletionRoutine
+#       );
+WSAIoctl = WS2_32.WSAIoctl
+WSAIoctl.argtypes = [
+    SOCKET, DWORD, LPVOID, DWORD, LPVOID, DWORD, LPDWORD,
+    LPWSAOVERLAPPED, LPWSAOVERLAPPED_COMPLETION_ROUTINE]
+WSAIoctl.restype = c_int
+
+# http://msdn.microsoft.com/en-us/library/ms741516(VS.85).aspx
+# INT WSAAPI WSAAddressToString(
+#         __in      LPSOCKADDR lpsaAddress,
+#         __in      DWORD dwAddressLength,
+#         __in_opt  LPWSAPROTOCOL_INFO lpProtocolInfo,
+#         __inout   LPTSTR lpszAddressString,
+#         __inout   LPDWORD lpdwAddressStringLength
+#       );
+WSAAddressToString = WS2_32.WSAAddressToStringA
+WSAAddressToString.argtypes = [
+    LPSOCKADDR, DWORD, LPWSAPROTOCOL_INFO, LPTSTR, LPDWORD]
+WSAAddressToString.restype = c_int
+
+
+SIO_ADDRESS_LIST_QUERY = 0x48000016
+WSAEFAULT = 10014
+
+class SOCKET_ADDRESS(Structure):
+    _fields_ = [('lpSockaddr', c_void_p),
+                ('iSockaddrLength', c_int)]
+
+
+
+def make_SAL(ln):
+    class SOCKET_ADDRESS_LIST(Structure):
+        _fields_ = [('iAddressCount', c_int),
+                    ('Address', SOCKET_ADDRESS * ln)]
+    return SOCKET_ADDRESS_LIST
+
+
+
+def win32GetLinkLocalIPv6Addresses():
+    """
+    Return a list of strings in colon-hex format representing all the link local
+    IPv6 addresses available on the system, as reported by
+    I{WSAIoctl}/C{SIO_ADDRESS_LIST_QUERY}.
+    """
+    s = socket(AF_INET6, SOCK_STREAM)
+    size = 4096
+    retBytes = c_int()
+    for i in range(2):
+        buf = create_string_buffer(size)
+        ret = WSAIoctl(
+            s.fileno(),
+            SIO_ADDRESS_LIST_QUERY, 0, 0, buf, size, byref(retBytes), 0, 0)
+
+        # WSAIoctl might fail with WSAEFAULT, which means there was not enough
+        # space in the buffer we gave it.  There's no way to check the errno
+        # until Python 2.6, so we don't even try. :/ Maybe if retBytes is still
+        # 0 another error happened, though.
+        if ret and retBytes.value:
+            size = retBytes.value
+        else:
+            break
+
+    # If it failed, then we'll just have to give up.  Still no way to see why.
+    if ret:
+        raise RuntimeError("WSAIoctl failure")
+
+    addrList = cast(buf, POINTER(make_SAL(0)))
+    addrCount = addrList[0].iAddressCount
+    addrList = cast(buf, POINTER(make_SAL(addrCount)))
+
+    addressStringBufLength = 1024
+    addressStringBuf = create_string_buffer(addressStringBufLength)
+
+    retList = []
+    for i in range(addrList[0].iAddressCount):
+        retBytes.value = addressStringBufLength
+        addr = addrList[0].Address[i]
+        ret = WSAAddressToString(
+            addr.lpSockaddr, addr.iSockaddrLength, 0, addressStringBuf,
+            byref(retBytes))
+        if ret:
+            raise RuntimeError("WSAAddressToString failure")
+        retList.append(string_at(addressStringBuf))
+    return [addr for addr in retList if '%' in addr]
diff --git a/ThirdParty/Twisted/twisted/internet/test/connectionmixins.py b/ThirdParty/Twisted/twisted/internet/test/connectionmixins.py
new file mode 100644
index 0000000..c1a8ca2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/connectionmixins.py
@@ -0,0 +1,776 @@
+# -*- test-case-name: twisted.internet.test.test_tcp -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Various helpers for tests for connection-oriented transports.
+"""
+
+from __future__ import division, absolute_import
+
+import socket
+
+from gc import collect
+from weakref import ref
+
+from zope.interface import implementer
+from zope.interface.verify import verifyObject
+
+from twisted.python import context, log
+from twisted.python.failure import Failure
+from twisted.python.runtime import platform
+from twisted.python.log import ILogContext, msg, err
+from twisted.internet.defer import Deferred, gatherResults, succeed, fail
+from twisted.internet.interfaces import (
+    IConnector, IResolverSimple, IReactorFDSet)
+from twisted.internet.protocol import ClientFactory, Protocol, ServerFactory
+from twisted.trial.unittest import SkipTest
+from twisted.internet.error import DNSLookupError
+from twisted.internet.interfaces import ITLSTransport
+from twisted.internet.test.reactormixins import needsRunningReactor
+from twisted.test.test_tcp import ClosingProtocol
+
+
+
+def serverFactoryFor(protocol):
+    """
+    Helper function which returns a L{ServerFactory} which will build instances
+    of C{protocol}.
+
+    @param protocol: A callable which returns an L{IProtocol} provider to be
+        used to handle connections to the port the returned factory listens on.
+    """
+    factory = ServerFactory()
+    factory.protocol = protocol
+    return factory
+
+# ServerFactory is good enough for client endpoints, too.
+factoryFor = serverFactoryFor
+
+
+
+def findFreePort(interface='127.0.0.1', family=socket.AF_INET,
+                 type=socket.SOCK_STREAM):
+    """
+    Ask the platform to allocate a free port on the specified interface, then
+    release the socket and return the address which was allocated.
+
+    @param interface: The local address to try to bind the port on.
+    @type interface: C{str}
+
+    @param type: The socket type which will use the resulting port.
+
+    @return: A two-tuple of address and port, like that returned by
+        L{socket.getsockname}.
+    """
+    addr = socket.getaddrinfo(interface, 0)[0][4]
+    probe = socket.socket(family, type)
+    try:
+        probe.bind(addr)
+        return probe.getsockname()
+    finally:
+        probe.close()
+
+
+
+class ConnectableProtocol(Protocol):
+    """
+    A protocol to be used with L{runProtocolsWithReactor}.
+
+    The protocol and its pair should eventually disconnect from each other.
+
+    @ivar reactor: The reactor used in this test.
+
+    @ivar disconnectReason: The L{Failure} passed to C{connectionLost}.
+
+    @ivar _done: A L{Deferred} which will be fired when the connection is
+        lost.
+    """
+
+    disconnectReason = None
+
+    def _setAttributes(self, reactor, done):
+        """
+        Set attributes on the protocol that are known only externally; this
+        will be called by L{runProtocolsWithReactor} when this protocol is
+        instantiated.
+
+        @param reactor: The reactor used in this test.
+
+        @param done: A L{Deferred} which will be fired when the connection is
+           lost.
+        """
+        self.reactor = reactor
+        self._done = done
+
+
+    def connectionLost(self, reason):
+        self.disconnectReason = reason
+        self._done.callback(None)
+        del self._done
+
+
+
+class EndpointCreator:
+    """
+    Create client and server endpoints that know how to connect to each other.
+    """
+
+    def server(self, reactor):
+        """
+        Return an object providing C{IStreamServerEndpoint} for use in creating
+        a server to use to establish the connection type to be tested.
+        """
+        raise NotImplementedError()
+
+
+    def client(self, reactor, serverAddress):
+        """
+        Return an object providing C{IStreamClientEndpoint} for use in creating
+        a client to use to establish the connection type to be tested.
+        """
+        raise NotImplementedError()
+
+
+
+class _SingleProtocolFactory(ClientFactory):
+    """
+    Factory to be used by L{runProtocolsWithReactor}.
+
+    It always returns the same protocol (i.e. is intended for only a single connection).
+    """
+
+    def __init__(self, protocol):
+        self._protocol = protocol
+
+
+    def buildProtocol(self, addr):
+        return self._protocol
+
+
+
+def runProtocolsWithReactor(reactorBuilder, serverProtocol, clientProtocol,
+                            endpointCreator):
+    """
+    Connect two protocols using endpoints and a new reactor instance.
+
+    A new reactor will be created and run, with the client and server protocol
+    instances connected to each other using the given endpoint creator. The
+    protocols should run through some set of tests, then disconnect; when both
+    have disconnected the reactor will be stopped and the function will
+    return.
+
+    @param reactorBuilder: A L{ReactorBuilder} instance.
+
+    @param serverProtocol: A L{ConnectableProtocol} that will be the server.
+
+    @param clientProtocol: A L{ConnectableProtocol} that will be the client.
+
+    @param endpointCreator: An instance of L{EndpointCreator}.
+
+    @return: The reactor run by this test.
+    """
+    reactor = reactorBuilder.buildReactor()
+    serverProtocol._setAttributes(reactor, Deferred())
+    clientProtocol._setAttributes(reactor, Deferred())
+    serverFactory = _SingleProtocolFactory(serverProtocol)
+    clientFactory = _SingleProtocolFactory(clientProtocol)
+
+    # Listen on a port:
+    serverEndpoint = endpointCreator.server(reactor)
+    d = serverEndpoint.listen(serverFactory)
+
+    # Connect to the port:
+    def gotPort(p):
+        clientEndpoint = endpointCreator.client(
+            reactor, p.getHost())
+        return clientEndpoint.connect(clientFactory)
+    d.addCallback(gotPort)
+
+    # Stop reactor when both connections are lost:
+    def failed(result):
+        log.err(result, "Connection setup failed.")
+    disconnected = gatherResults([serverProtocol._done, clientProtocol._done])
+    d.addCallback(lambda _: disconnected)
+    d.addErrback(failed)
+    d.addCallback(lambda _: needsRunningReactor(reactor, reactor.stop))
+
+    reactorBuilder.runReactor(reactor)
+    return reactor
+
+
+
+def _getWriters(reactor):
+    """
+    Like L{IReactorFDSet.getWriters}, but with support for IOCP reactor as
+    well.
+    """
+    if IReactorFDSet.providedBy(reactor):
+        return reactor.getWriters()
+    elif 'IOCP' in reactor.__class__.__name__:
+        return reactor.handles
+    else:
+        # Cannot tell what is going on.
+        raise Exception("Cannot find writers on %r" % (reactor,))
+
+
+
+class _AcceptOneClient(ServerFactory):
+    """
+    This factory fires a L{Deferred} with a protocol instance shortly after it
+    is constructed (hopefully long enough afterwards so that it has been
+    connected to a transport).
+
+    @ivar reactor: The reactor used to schedule the I{shortly}.
+
+    @ivar result: A L{Deferred} which will be fired with the protocol instance.
+    """
+    def __init__(self, reactor, result):
+        self.reactor = reactor
+        self.result = result
+
+
+    def buildProtocol(self, addr):
+        protocol = ServerFactory.buildProtocol(self, addr)
+        self.reactor.callLater(0, self.result.callback, protocol)
+        return protocol
+
+
+
+class _SimplePullProducer(object):
+    """
+    A pull producer which writes one byte whenever it is resumed.  For use by
+    L{test_unregisterProducerAfterDisconnect}.
+    """
+    def __init__(self, consumer):
+        self.consumer = consumer
+
+
+    def stopProducing(self):
+        pass
+
+
+    def resumeProducing(self):
+        log.msg("Producer.resumeProducing")
+        self.consumer.write(b'x')
+
+
+
+class Stop(ClientFactory):
+    """
+    A client factory which stops a reactor when a connection attempt fails.
+    """
+    failReason = None
+
+    def __init__(self, reactor):
+        self.reactor = reactor
+
+
+    def clientConnectionFailed(self, connector, reason):
+        self.failReason = reason
+        msg("Stop(CF) cCFailed: %s" % (reason.getErrorMessage(),))
+        self.reactor.stop()
+
+
+
+ at implementer(IResolverSimple)
+class FakeResolver(object):
+    """
+    A resolver implementation based on a C{dict} mapping names to addresses.
+    """
+
+    def __init__(self, names):
+        self.names = names
+
+
+    def getHostByName(self, name, timeout):
+        try:
+            return succeed(self.names[name])
+        except KeyError:
+            return fail(DNSLookupError("FakeResolver couldn't find " + name))
+
+
+
+class ClosingLaterProtocol(ConnectableProtocol):
+    """
+    ClosingLaterProtocol exchanges one byte with its peer and then disconnects
+    itself.  This is mostly a work-around for the fact that connectionMade is
+    called before the SSL handshake has completed.
+    """
+    def __init__(self, onConnectionLost):
+        self.lostConnectionReason = None
+        self.onConnectionLost = onConnectionLost
+
+
+    def connectionMade(self):
+        msg("ClosingLaterProtocol.connectionMade")
+
+
+    def dataReceived(self, bytes):
+        msg("ClosingLaterProtocol.dataReceived %r" % (bytes,))
+        self.transport.loseConnection()
+
+
+    def connectionLost(self, reason):
+        msg("ClosingLaterProtocol.connectionLost")
+        self.lostConnectionReason = reason
+        self.onConnectionLost.callback(self)
+
+
+
+class ConnectionTestsMixin(object):
+    """
+    This mixin defines test methods which should apply to most L{ITransport}
+    implementations.
+    """
+
+    # This should be a reactormixins.EndpointCreator instance.
+    endpoints = None
+
+
+    def test_logPrefix(self):
+        """
+        Client and server transports implement L{ILoggingContext.logPrefix} to
+        return a message reflecting the protocol they are running.
+        """
+        class CustomLogPrefixProtocol(ConnectableProtocol):
+            def __init__(self, prefix):
+                self._prefix = prefix
+                self.system = None
+
+            def connectionMade(self):
+                self.transport.write(b"a")
+
+            def logPrefix(self):
+                return self._prefix
+
+            def dataReceived(self, bytes):
+                self.system = context.get(ILogContext)["system"]
+                self.transport.write(b"b")
+                # Only close connection if both sides have received data, so
+                # that both sides have system set.
+                if b"b" in bytes:
+                    self.transport.loseConnection()
+
+        client = CustomLogPrefixProtocol("Custom Client")
+        server = CustomLogPrefixProtocol("Custom Server")
+        runProtocolsWithReactor(self, server, client, self.endpoints)
+        self.assertIn("Custom Client", client.system)
+        self.assertIn("Custom Server", server.system)
+
+
+    def test_writeAfterDisconnect(self):
+        """
+        After a connection is disconnected, L{ITransport.write} and
+        L{ITransport.writeSequence} are no-ops.
+        """
+        reactor = self.buildReactor()
+
+        finished = []
+
+        serverConnectionLostDeferred = Deferred()
+        protocol = lambda: ClosingLaterProtocol(serverConnectionLostDeferred)
+        portDeferred = self.endpoints.server(reactor).listen(
+            serverFactoryFor(protocol))
+        def listening(port):
+            msg("Listening on %r" % (port.getHost(),))
+            endpoint = self.endpoints.client(reactor, port.getHost())
+
+            lostConnectionDeferred = Deferred()
+            protocol = lambda: ClosingLaterProtocol(lostConnectionDeferred)
+            client = endpoint.connect(factoryFor(protocol))
+            def write(proto):
+                msg("About to write to %r" % (proto,))
+                proto.transport.write(b'x')
+            client.addCallbacks(write, lostConnectionDeferred.errback)
+
+            def disconnected(proto):
+                msg("%r disconnected" % (proto,))
+                proto.transport.write(b"some bytes to get lost")
+                proto.transport.writeSequence([b"some", b"more"])
+                finished.append(True)
+
+            lostConnectionDeferred.addCallback(disconnected)
+            serverConnectionLostDeferred.addCallback(disconnected)
+            return gatherResults([
+                    lostConnectionDeferred,
+                    serverConnectionLostDeferred])
+
+        def onListen():
+            portDeferred.addCallback(listening)
+            portDeferred.addErrback(err)
+            portDeferred.addCallback(lambda ignored: reactor.stop())
+        needsRunningReactor(reactor, onListen)
+
+        self.runReactor(reactor)
+        self.assertEqual(finished, [True, True])
+
+
+    def test_protocolGarbageAfterLostConnection(self):
+        """
+        After the connection a protocol is being used for is closed, the
+        reactor discards all of its references to the protocol.
+        """
+        lostConnectionDeferred = Deferred()
+        clientProtocol = ClosingLaterProtocol(lostConnectionDeferred)
+        clientRef = ref(clientProtocol)
+
+        reactor = self.buildReactor()
+        portDeferred = self.endpoints.server(reactor).listen(
+            serverFactoryFor(Protocol))
+        def listening(port):
+            msg("Listening on %r" % (port.getHost(),))
+            endpoint = self.endpoints.client(reactor, port.getHost())
+
+            client = endpoint.connect(factoryFor(lambda: clientProtocol))
+            def disconnect(proto):
+                msg("About to disconnect %r" % (proto,))
+                proto.transport.loseConnection()
+            client.addCallback(disconnect)
+            client.addErrback(lostConnectionDeferred.errback)
+            return lostConnectionDeferred
+
+        def onListening():
+            portDeferred.addCallback(listening)
+            portDeferred.addErrback(err)
+            portDeferred.addBoth(lambda ignored: reactor.stop())
+        needsRunningReactor(reactor, onListening)
+
+        self.runReactor(reactor)
+
+        # Drop the reference and get the garbage collector to tell us if there
+        # are no references to the protocol instance left in the reactor.
+        clientProtocol = None
+        collect()
+        self.assertIdentical(None, clientRef())
+
+
+
+class LogObserverMixin(object):
+    """
+    Mixin for L{TestCase} subclasses which want to observe log events.
+    """
+    def observe(self):
+        loggedMessages = []
+        log.addObserver(loggedMessages.append)
+        self.addCleanup(log.removeObserver, loggedMessages.append)
+        return loggedMessages
+
+
+
+class BrokenContextFactory(object):
+    """
+    A context factory with a broken C{getContext} method, for exercising the
+    error handling for such a case.
+    """
+    message = "Some path was wrong maybe"
+
+    def getContext(self):
+        raise ValueError(self.message)
+
+
+
+class TCPClientTestsMixin(object):
+    """
+    This mixin defines tests applicable to TCP client implementations.  Classes
+    which mix this in must provide all of the documented instance variables in
+    order to specify how the test works.  These are documented as instance
+    variables rather than declared as methods due to some peculiar inheritance
+    ordering concerns, but they are effectively abstract methods.
+
+    This must be mixed in to a L{ReactorBuilder
+    <twisted.internet.test.reactormixins.ReactorBuilder>} subclass, as it
+    depends on several of its methods.
+
+    @ivar endpoints: A L{twisted.internet.test.reactormixins.EndpointCreator}
+      instance.
+
+    @ivar interface: An IP address literal to locally bind a socket to as well
+        as to connect to.  This can be any valid interface for the local host.
+    @type interface: C{str}
+
+    @ivar port: An unused local listening port to listen on and connect to.
+        This will be used in conjunction with the C{interface}.  (Depending on
+        what they're testing, some tests will locate their own port with
+        L{findFreePort} instead.)
+    @type port: C{int}
+
+    @ivar family: an address family constant, such as L{socket.AF_INET},
+        L{socket.AF_INET6}, or L{socket.AF_UNIX}, which indicates the address
+        family of the transport type under test.
+    @type family: C{int}
+
+    @ivar addressClass: the L{twisted.internet.interfaces.IAddress} implementor
+        associated with the transport type under test.  Must also be a
+        3-argument callable which produces an instance of same.
+    @type addressClass: C{type}
+
+    @ivar fakeDomainName: A fake domain name to use, to simulate hostname
+        resolution and to distinguish between hostnames and IP addresses where
+        necessary.
+    @type fakeDomainName: C{str}
+    """
+
+    def test_interface(self):
+        """
+        L{IReactorTCP.connectTCP} returns an object providing L{IConnector}.
+        """
+        reactor = self.buildReactor()
+        connector = reactor.connectTCP(self.interface, self.port,
+                                       ClientFactory())
+        self.assertTrue(verifyObject(IConnector, connector))
+
+
+    def test_clientConnectionFailedStopsReactor(self):
+        """
+        The reactor can be stopped by a client factory's
+        C{clientConnectionFailed} method.
+        """
+        host, port = findFreePort(self.interface, self.family)[:2]
+        reactor = self.buildReactor()
+        needsRunningReactor(
+            reactor, lambda: reactor.connectTCP(host, port, Stop(reactor)))
+        self.runReactor(reactor)
+
+
+    def test_addresses(self):
+        """
+        A client's transport's C{getHost} and C{getPeer} return L{IPv4Address}
+        instances which have the dotted-quad string form of the resolved
+        adddress of the local and remote endpoints of the connection
+        respectively as their C{host} attribute, not the hostname originally
+        passed in to L{connectTCP
+        <twisted.internet.interfaces.IReactorTCP.connectTCP>}, if a hostname
+        was used.
+        """
+        host, port = findFreePort(self.interface, self.family)[:2]
+        reactor = self.buildReactor()
+        fakeDomain = self.fakeDomainName
+        reactor.installResolver(FakeResolver({fakeDomain: self.interface}))
+
+        server = reactor.listenTCP(
+            0, serverFactoryFor(Protocol), interface=host)
+        serverAddress = server.getHost()
+
+        addresses = {'host': None, 'peer': None}
+        class CheckAddress(Protocol):
+            def makeConnection(self, transport):
+                addresses['host'] = transport.getHost()
+                addresses['peer'] = transport.getPeer()
+                reactor.stop()
+
+        clientFactory = Stop(reactor)
+        clientFactory.protocol = CheckAddress
+
+        def connectMe():
+            reactor.connectTCP(
+                fakeDomain, server.getHost().port, clientFactory,
+                bindAddress=(self.interface, port))
+        needsRunningReactor(reactor, connectMe)
+
+        self.runReactor(reactor)
+
+        if clientFactory.failReason:
+            self.fail(clientFactory.failReason.getTraceback())
+
+        self.assertEqual(
+            addresses['host'],
+            self.addressClass('TCP', self.interface, port))
+        self.assertEqual(
+            addresses['peer'],
+            self.addressClass('TCP', self.interface, serverAddress.port))
+
+
+    def test_connectEvent(self):
+        """
+        This test checks that we correctly get notifications event for a
+        client.  This ought to prevent a regression under Windows using the
+        GTK2 reactor.  See #3925.
+        """
+        reactor = self.buildReactor()
+
+        server = reactor.listenTCP(0, serverFactoryFor(Protocol),
+                                   interface=self.interface)
+        connected = []
+
+        class CheckConnection(Protocol):
+            def connectionMade(self):
+                connected.append(self)
+                reactor.stop()
+
+        clientFactory = Stop(reactor)
+        clientFactory.protocol = CheckConnection
+
+        needsRunningReactor(reactor, lambda: reactor.connectTCP(
+            self.interface, server.getHost().port, clientFactory))
+
+        reactor.run()
+
+        self.assertTrue(connected)
+
+
+    def test_unregisterProducerAfterDisconnect(self):
+        """
+        If a producer is unregistered from a L{ITCPTransport} provider after
+        the transport has been disconnected (by the peer) and after
+        L{ITCPTransport.loseConnection} has been called, the transport is not
+        re-added to the reactor as a writer as would be necessary if the
+        transport were still connected.
+        """
+        reactor = self.buildReactor()
+        port = reactor.listenTCP(0, serverFactoryFor(ClosingProtocol),
+                                 interface=self.interface)
+
+        finished = Deferred()
+        finished.addErrback(log.err)
+        finished.addCallback(lambda ign: reactor.stop())
+
+        writing = []
+
+        class ClientProtocol(Protocol):
+            """
+            Protocol to connect, register a producer, try to lose the
+            connection, wait for the server to disconnect from us, and then
+            unregister the producer.
+            """
+            def connectionMade(self):
+                log.msg("ClientProtocol.connectionMade")
+                self.transport.registerProducer(
+                    _SimplePullProducer(self.transport), False)
+                self.transport.loseConnection()
+
+            def connectionLost(self, reason):
+                log.msg("ClientProtocol.connectionLost")
+                self.unregister()
+                writing.append(self.transport in _getWriters(reactor))
+                finished.callback(None)
+
+            def unregister(self):
+                log.msg("ClientProtocol unregister")
+                self.transport.unregisterProducer()
+
+        clientFactory = ClientFactory()
+        clientFactory.protocol = ClientProtocol
+        reactor.connectTCP(self.interface, port.getHost().port, clientFactory)
+        self.runReactor(reactor)
+        self.assertFalse(writing[0],
+                         "Transport was writing after unregisterProducer.")
+
+
+    def test_disconnectWhileProducing(self):
+        """
+        If L{ITCPTransport.loseConnection} is called while a producer is
+        registered with the transport, the connection is closed after the
+        producer is unregistered.
+        """
+        reactor = self.buildReactor()
+
+        # For some reason, pyobject/pygtk will not deliver the close
+        # notification that should happen after the unregisterProducer call in
+        # this test.  The selectable is in the write notification set, but no
+        # notification ever arrives.  Probably for the same reason #5233 led
+        # win32eventreactor to be broken.
+        skippedReactors = ["Glib2Reactor", "Gtk2Reactor"]
+        reactorClassName = reactor.__class__.__name__
+        if reactorClassName in skippedReactors and platform.isWindows():
+            raise SkipTest(
+                "A pygobject/pygtk bug disables this functionality on Windows.")
+
+        class Producer:
+            def resumeProducing(self):
+                log.msg("Producer.resumeProducing")
+
+        port = reactor.listenTCP(0, serverFactoryFor(Protocol),
+            interface=self.interface)
+
+        finished = Deferred()
+        finished.addErrback(log.err)
+        finished.addCallback(lambda ign: reactor.stop())
+
+        class ClientProtocol(Protocol):
+            """
+            Protocol to connect, register a producer, try to lose the
+            connection, unregister the producer, and wait for the connection to
+            actually be lost.
+            """
+            def connectionMade(self):
+                log.msg("ClientProtocol.connectionMade")
+                self.transport.registerProducer(Producer(), False)
+                self.transport.loseConnection()
+                # Let the reactor tick over, in case synchronously calling
+                # loseConnection and then unregisterProducer is the same as
+                # synchronously calling unregisterProducer and then
+                # loseConnection (as it is in several reactors).
+                reactor.callLater(0, reactor.callLater, 0, self.unregister)
+
+            def unregister(self):
+                log.msg("ClientProtocol unregister")
+                self.transport.unregisterProducer()
+                # This should all be pretty quick.  Fail the test
+                # if we don't get a connectionLost event really
+                # soon.
+                reactor.callLater(
+                    1.0, finished.errback,
+                    Failure(Exception("Connection was not lost")))
+
+            def connectionLost(self, reason):
+                log.msg("ClientProtocol.connectionLost")
+                finished.callback(None)
+
+        clientFactory = ClientFactory()
+        clientFactory.protocol = ClientProtocol
+        reactor.connectTCP(self.interface, port.getHost().port, clientFactory)
+        self.runReactor(reactor)
+        # If the test failed, we logged an error already and trial
+        # will catch it.
+
+
+    def test_badContext(self):
+        """
+        If the context factory passed to L{ITCPTransport.startTLS} raises an
+        exception from its C{getContext} method, that exception is raised by
+        L{ITCPTransport.startTLS}.
+        """
+        reactor = self.buildReactor()
+
+        brokenFactory = BrokenContextFactory()
+        results = []
+
+        serverFactory = ServerFactory()
+        serverFactory.protocol = Protocol
+
+        port = reactor.listenTCP(0, serverFactory, interface=self.interface)
+        endpoint = self.endpoints.client(reactor, port.getHost())
+
+        clientFactory = ClientFactory()
+        clientFactory.protocol = Protocol
+        connectDeferred = endpoint.connect(clientFactory)
+
+        def connected(protocol):
+            if not ITLSTransport.providedBy(protocol.transport):
+                results.append("skip")
+            else:
+                results.append(self.assertRaises(ValueError,
+                                                 protocol.transport.startTLS,
+                                                 brokenFactory))
+
+        def connectFailed(failure):
+            results.append(failure)
+
+        def whenRun():
+            connectDeferred.addCallback(connected)
+            connectDeferred.addErrback(connectFailed)
+            connectDeferred.addBoth(lambda ign: reactor.stop())
+        needsRunningReactor(reactor, whenRun)
+
+        self.runReactor(reactor)
+
+        self.assertEqual(len(results), 1,
+                         "more than one callback result: %s" % (results,))
+
+        if isinstance(results[0], Failure):
+            # self.fail(Failure)
+            results[0].raiseException()
+        if results[0] == "skip":
+            raise SkipTest("Reactor does not support ITLSTransport")
+        self.assertEqual(BrokenContextFactory.message, str(results[0]))
diff --git a/ThirdParty/Twisted/twisted/internet/test/fake_CAs/not-a-certificate b/ThirdParty/Twisted/twisted/internet/test/fake_CAs/not-a-certificate
new file mode 100644
index 0000000..316453d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/fake_CAs/not-a-certificate
@@ -0,0 +1 @@
+This file is not a certificate; it is present to make sure that it will be skipped.
diff --git a/ThirdParty/Twisted/twisted/internet/test/fake_CAs/thing1.pem b/ThirdParty/Twisted/twisted/internet/test/fake_CAs/thing1.pem
new file mode 100644
index 0000000..75e47a6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/fake_CAs/thing1.pem
@@ -0,0 +1,26 @@
+
+This is a self-signed certificate authority certificate to be used in tests.
+
+It was created with the following command: 
+certcreate -f thing1.pem -h fake-ca-1.example.com -e noreply at example.com \
+           -S 1234 -o 'Twisted Matrix Labs'
+
+'certcreate' may be obtained from <http://divmod.org/trac/wiki/DivmodEpsilon>
+
+-----BEGIN CERTIFICATE-----
+MIICwjCCAisCAgTSMA0GCSqGSIb3DQEBBAUAMIGoMREwDwYDVQQLEwhTZWN1cml0
+eTEcMBoGA1UEChMTVHdpc3RlZCBNYXRyaXggTGFiczEeMBwGA1UEAxMVZmFrZS1j
+YS0xLmV4YW1wbGUuY29tMREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UEBhMCVVMx
+IjAgBgkqhkiG9w0BCQEWE25vcmVwbHlAZXhhbXBsZS5jb20xETAPBgNVBAcTCE5l
+dyBZb3JrMB4XDTEwMDkyMTAxMjUxNFoXDTExMDkyMTAxMjUxNFowgagxETAPBgNV
+BAsTCFNlY3VyaXR5MRwwGgYDVQQKExNUd2lzdGVkIE1hdHJpeCBMYWJzMR4wHAYD
+VQQDExVmYWtlLWNhLTEuZXhhbXBsZS5jb20xETAPBgNVBAgTCE5ldyBZb3JrMQsw
+CQYDVQQGEwJVUzEiMCAGCSqGSIb3DQEJARYTbm9yZXBseUBleGFtcGxlLmNvbTER
+MA8GA1UEBxMITmV3IFlvcmswgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBALRb
+VqC0CsaFgq1vbwPfs8zoP3ZYC/0sPMv0RJN+f3Dc7Q6YgNHS7o7TM3uAy/McADeW
+rwVuNJGe9k+4ZBHysmBH1sG64fHT5TlK9saPcUQqkubSWj4cKSDtVbQERWqC5Dy+
+qTQeZGYoPEMlnRXgMpST04DG//Dgzi4PYqUOjwxTAgMBAAEwDQYJKoZIhvcNAQEE
+BQADgYEAqNEdMXWEs8Co76wxL3/cSV3MjiAroVxJdI/3EzlnfPi1JeibbdWw31fC
+bn6428KTjjfhS31zo1yHG3YNXFEJXRscwLAH7ogz5kJwZMy/oS/96EFM10bkNwkK
+v+nWKN8i3t/E5TEIl3BPN8tchtWmH0rycVuzs5LwaewwR1AnUE4=
+-----END CERTIFICATE-----
diff --git a/ThirdParty/Twisted/twisted/internet/test/fake_CAs/thing2-duplicate.pem b/ThirdParty/Twisted/twisted/internet/test/fake_CAs/thing2-duplicate.pem
new file mode 100644
index 0000000..429e121
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/fake_CAs/thing2-duplicate.pem
@@ -0,0 +1,26 @@
+
+This is a self-signed certificate authority certificate to be used in tests.
+
+It was created with the following command: 
+certcreate -f thing2.pem -h fake-ca-2.example.com -e noreply at example.com \
+           -S 1234 -o 'Twisted Matrix Labs'
+
+'certcreate' may be obtained from <http://divmod.org/trac/wiki/DivmodEpsilon>
+
+-----BEGIN CERTIFICATE-----
+MIICwjCCAisCAgTSMA0GCSqGSIb3DQEBBAUAMIGoMREwDwYDVQQLEwhTZWN1cml0
+eTEcMBoGA1UEChMTVHdpc3RlZCBNYXRyaXggTGFiczEeMBwGA1UEAxMVZmFrZS1j
+YS0yLmV4YW1wbGUuY29tMREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UEBhMCVVMx
+IjAgBgkqhkiG9w0BCQEWE25vcmVwbHlAZXhhbXBsZS5jb20xETAPBgNVBAcTCE5l
+dyBZb3JrMB4XDTEwMDkyMTAxMjUzMVoXDTExMDkyMTAxMjUzMVowgagxETAPBgNV
+BAsTCFNlY3VyaXR5MRwwGgYDVQQKExNUd2lzdGVkIE1hdHJpeCBMYWJzMR4wHAYD
+VQQDExVmYWtlLWNhLTIuZXhhbXBsZS5jb20xETAPBgNVBAgTCE5ldyBZb3JrMQsw
+CQYDVQQGEwJVUzEiMCAGCSqGSIb3DQEJARYTbm9yZXBseUBleGFtcGxlLmNvbTER
+MA8GA1UEBxMITmV3IFlvcmswgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMNn
+b3EcKqBedQed1qJC4uGVx8PYmn2vxL3QwCVW1w0VjpZXyhCq/2VrYBhJAXRzpfvE
+dCqhtJKcdifwavUrTfr4yXu1MvWA0YuaAkj1TbmlHHQYACf3h+MPOXroYzhT72bO
+FSSLDWuitj0ozR+2Fk15QwLWUxaYLmwylxXAf7vpAgMBAAEwDQYJKoZIhvcNAQEE
+BQADgYEADB2N6VHHhm5M2rJqqGDXMm2dU+7abxiuN+PUygN2LXIsqdGBS6U7/rta
+lJNVeRaM423c8imfuklkIBG9Msn5+xm1xIMIULoi/efActDLbsX1x6IyHQrG5aDP
+/RMKBio9RjS8ajgSwyYVUZiCZBsn/T0/JS8K61YLpiv4Tg8uXmM=
+-----END CERTIFICATE-----
diff --git a/ThirdParty/Twisted/twisted/internet/test/fake_CAs/thing2.pem b/ThirdParty/Twisted/twisted/internet/test/fake_CAs/thing2.pem
new file mode 100644
index 0000000..429e121
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/fake_CAs/thing2.pem
@@ -0,0 +1,26 @@
+
+This is a self-signed certificate authority certificate to be used in tests.
+
+It was created with the following command: 
+certcreate -f thing2.pem -h fake-ca-2.example.com -e noreply at example.com \
+           -S 1234 -o 'Twisted Matrix Labs'
+
+'certcreate' may be obtained from <http://divmod.org/trac/wiki/DivmodEpsilon>
+
+-----BEGIN CERTIFICATE-----
+MIICwjCCAisCAgTSMA0GCSqGSIb3DQEBBAUAMIGoMREwDwYDVQQLEwhTZWN1cml0
+eTEcMBoGA1UEChMTVHdpc3RlZCBNYXRyaXggTGFiczEeMBwGA1UEAxMVZmFrZS1j
+YS0yLmV4YW1wbGUuY29tMREwDwYDVQQIEwhOZXcgWW9yazELMAkGA1UEBhMCVVMx
+IjAgBgkqhkiG9w0BCQEWE25vcmVwbHlAZXhhbXBsZS5jb20xETAPBgNVBAcTCE5l
+dyBZb3JrMB4XDTEwMDkyMTAxMjUzMVoXDTExMDkyMTAxMjUzMVowgagxETAPBgNV
+BAsTCFNlY3VyaXR5MRwwGgYDVQQKExNUd2lzdGVkIE1hdHJpeCBMYWJzMR4wHAYD
+VQQDExVmYWtlLWNhLTIuZXhhbXBsZS5jb20xETAPBgNVBAgTCE5ldyBZb3JrMQsw
+CQYDVQQGEwJVUzEiMCAGCSqGSIb3DQEJARYTbm9yZXBseUBleGFtcGxlLmNvbTER
+MA8GA1UEBxMITmV3IFlvcmswgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAMNn
+b3EcKqBedQed1qJC4uGVx8PYmn2vxL3QwCVW1w0VjpZXyhCq/2VrYBhJAXRzpfvE
+dCqhtJKcdifwavUrTfr4yXu1MvWA0YuaAkj1TbmlHHQYACf3h+MPOXroYzhT72bO
+FSSLDWuitj0ozR+2Fk15QwLWUxaYLmwylxXAf7vpAgMBAAEwDQYJKoZIhvcNAQEE
+BQADgYEADB2N6VHHhm5M2rJqqGDXMm2dU+7abxiuN+PUygN2LXIsqdGBS6U7/rta
+lJNVeRaM423c8imfuklkIBG9Msn5+xm1xIMIULoi/efActDLbsX1x6IyHQrG5aDP
+/RMKBio9RjS8ajgSwyYVUZiCZBsn/T0/JS8K61YLpiv4Tg8uXmM=
+-----END CERTIFICATE-----
diff --git a/ThirdParty/Twisted/twisted/internet/test/fakeendpoint.py b/ThirdParty/Twisted/twisted/internet/test/fakeendpoint.py
new file mode 100644
index 0000000..dbb7419
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/fakeendpoint.py
@@ -0,0 +1,66 @@
+# -*- test-case-name: twisted.internet.test.test_endpoints -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Fake client and server endpoint string parser plugins for testing purposes.
+"""
+
+from zope.interface.declarations import implements
+from twisted.plugin import IPlugin
+from twisted.internet.interfaces import (IStreamClientEndpoint,
+                                         IStreamServerEndpoint,
+                                         IStreamClientEndpointStringParser,
+                                         IStreamServerEndpointStringParser)
+
+class PluginBase(object):
+    implements(IPlugin)
+
+    def __init__(self, pfx):
+        self.prefix = pfx
+
+
+
+class FakeClientParser(PluginBase):
+
+    implements(IStreamClientEndpointStringParser)
+
+    def parseStreamClient(self, *a, **kw):
+        return StreamClient(self, a, kw)
+
+
+
+class FakeParser(PluginBase):
+
+    implements(IStreamServerEndpointStringParser)
+
+    def parseStreamServer(self, *a, **kw):
+        return StreamServer(self, a, kw)
+
+
+
+class EndpointBase(object):
+
+    def __init__(self, parser, args, kwargs):
+        self.parser = parser
+        self.args = args
+        self.kwargs = kwargs
+
+
+
+class StreamClient(EndpointBase):
+
+    implements(IStreamClientEndpoint)
+
+
+
+class StreamServer(EndpointBase):
+
+    implements(IStreamServerEndpoint)
+
+
+
+# Instantiate plugin interface providers to register them.
+fake = FakeParser('fake')
+fakeClient = FakeClientParser('cfake')
+
diff --git a/ThirdParty/Twisted/twisted/internet/test/modulehelpers.py b/ThirdParty/Twisted/twisted/internet/test/modulehelpers.py
new file mode 100644
index 0000000..52736e2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/modulehelpers.py
@@ -0,0 +1,43 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Testing helpers related to the module system.
+"""
+
+from __future__ import division, absolute_import
+
+__all__ = ['NoReactor']
+
+import twisted.internet
+from twisted.test.test_twisted import SetAsideModule
+
+
+class NoReactor(SetAsideModule):
+    """
+    Context manager that uninstalls the reactor, if any, and then restores it
+    afterwards.
+    """
+
+    def __init__(self):
+        SetAsideModule.__init__(self, "twisted.internet.reactor")
+
+
+    def __enter__(self):
+        SetAsideModule.__enter__(self)
+        if "twisted.internet.reactor" in self.modules:
+            del twisted.internet.reactor
+
+
+    def __exit__(self, excType, excValue, traceback):
+        SetAsideModule.__exit__(self, excType, excValue, traceback)
+        # Clean up 'reactor' attribute that may have been set on
+        # twisted.internet:
+        reactor = self.modules.get("twisted.internet.reactor", None)
+        if reactor is not None:
+            twisted.internet.reactor = reactor
+        else:
+            try:
+                del twisted.internet.reactor
+            except AttributeError:
+                pass
diff --git a/ThirdParty/Twisted/twisted/internet/test/process_gireactornocompat.py b/ThirdParty/Twisted/twisted/internet/test/process_gireactornocompat.py
new file mode 100644
index 0000000..d2ae75c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/process_gireactornocompat.py
@@ -0,0 +1,22 @@
+import sys
+
+# Override theSystemPath so it throws KeyError on gi.pygtkcompat:
+from twisted.python import modules
+modules.theSystemPath = modules.PythonPath([], moduleDict={})
+
+# Now, when we import gireactor it shouldn't use pygtkcompat, and should
+# instead prevent gobject from being importable:
+from twisted.internet import gireactor
+for name in gireactor._PYGTK_MODULES:
+    if sys.modules[name] is not None:
+        sys.stdout.write("failure, sys.modules[%r] is %r, instead of None" %
+                         (name, sys.modules["gobject"]))
+        sys.exit(0)
+
+try:
+    import gobject
+except ImportError:
+    sys.stdout.write("success")
+else:
+    sys.stdout.write("failure: %s was imported" % (gobject.__path__,))
+
diff --git a/ThirdParty/Twisted/twisted/internet/test/process_helper.py b/ThirdParty/Twisted/twisted/internet/test/process_helper.py
new file mode 100644
index 0000000..b921697
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/process_helper.py
@@ -0,0 +1,33 @@
+
+# A program which exits after starting a child which inherits its
+# stdin/stdout/stderr and keeps them open until stdin is closed.
+
+import sys, os
+
+def grandchild():
+    sys.stdout.write('grandchild started')
+    sys.stdout.flush()
+    sys.stdin.read()
+
+def main():
+    if sys.argv[1] == 'child':
+        if sys.argv[2] == 'windows':
+            import win32api as api, win32process as proc
+            info = proc.STARTUPINFO()
+            info.hStdInput = api.GetStdHandle(api.STD_INPUT_HANDLE)
+            info.hStdOutput = api.GetStdHandle(api.STD_OUTPUT_HANDLE)
+            info.hStdError = api.GetStdHandle(api.STD_ERROR_HANDLE)
+            python = sys.executable
+            scriptDir = os.path.dirname(__file__)
+            scriptName = os.path.basename(__file__)
+            proc.CreateProcess(
+                None, " ".join((python, scriptName, "grandchild")), None,
+                None, 1, 0, os.environ, scriptDir, info)
+        else:
+            if os.fork() == 0:
+                grandchild()
+    else:
+        grandchild()
+
+if __name__ == '__main__':
+    main()
diff --git a/ThirdParty/Twisted/twisted/internet/test/reactormixins.py b/ThirdParty/Twisted/twisted/internet/test/reactormixins.py
new file mode 100644
index 0000000..3080ebe
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/reactormixins.py
@@ -0,0 +1,315 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Utilities for unit testing reactor implementations.
+
+The main feature of this module is L{ReactorBuilder}, a base class for use when
+writing interface/blackbox tests for reactor implementations.  Test case classes
+for reactor features should subclass L{ReactorBuilder} instead of
+L{SynchronousTestCase}.  All of the features of L{SynchronousTestCase} will be
+available.  Additionally, the tests will automatically be applied to all
+available reactor implementations.
+"""
+
+from __future__ import division, absolute_import
+
+__metaclass__ = type
+
+__all__ = ['TestTimeoutError', 'ReactorBuilder', 'needsRunningReactor']
+
+import os, signal, time
+
+from twisted.python.compat import _PY3
+from twisted.trial.unittest import SynchronousTestCase, SkipTest
+from twisted.trial.util import DEFAULT_TIMEOUT_DURATION, acquireAttribute
+from twisted.python.runtime import platform
+from twisted.python._reflectpy3 import namedAny
+from twisted.python.deprecate import _fullyQualifiedName as fullyQualifiedName
+
+from twisted.python import log
+from twisted.python.failure import Failure
+
+
+# Access private APIs.
+if platform.isWindows():
+    process = None
+elif _PY3:
+    # Enable this on Python 3 when twisted.internet.process is ported.
+    # See #5968.
+    process = None
+else:
+    from twisted.internet import process
+
+
+
+class TestTimeoutError(Exception):
+    """
+    The reactor was still running after the timeout period elapsed in
+    L{ReactorBuilder.runReactor}.
+    """
+
+
+
+def needsRunningReactor(reactor, thunk):
+    """
+    Various functions within these tests need an already-running reactor at
+    some point.  They need to stop the reactor when the test has completed, and
+    that means calling reactor.stop().  However, reactor.stop() raises an
+    exception if the reactor isn't already running, so if the L{Deferred} that
+    a particular API under test returns fires synchronously (as especially an
+    endpoint's C{connect()} method may do, if the connect is to a local
+    interface address) then the test won't be able to stop the reactor being
+    tested and finish.  So this calls C{thunk} only once C{reactor} is running.
+
+    (This is just an alias for
+    L{twisted.internet.interfaces.IReactorCore.callWhenRunning} on the given
+    reactor parameter, in order to centrally reference the above paragraph and
+    repeating it everywhere as a comment.)
+
+    @param reactor: the L{twisted.internet.interfaces.IReactorCore} under test
+
+    @param thunk: a 0-argument callable, which eventually finishes the test in
+        question, probably in a L{Deferred} callback.
+    """
+    reactor.callWhenRunning(thunk)
+
+
+
+class ReactorBuilder:
+    """
+    L{SynchronousTestCase} mixin which provides a reactor-creation API.  This
+    mixin defines C{setUp} and C{tearDown}, so mix it in before
+    L{SynchronousTestCase} or call its methods from the overridden ones in the
+    subclass.
+
+    @cvar skippedReactors: A dict mapping FQPN strings of reactors for
+        which the tests defined by this class will be skipped to strings
+        giving the skip message.
+    @cvar requiredInterfaces: A C{list} of interfaces which the reactor must
+        provide or these tests will be skipped.  The default, C{None}, means
+        that no interfaces are required.
+    @ivar reactorFactory: A no-argument callable which returns the reactor to
+        use for testing.
+    @ivar originalHandler: The SIGCHLD handler which was installed when setUp
+        ran and which will be re-installed when tearDown runs.
+    @ivar _reactors: A list of FQPN strings giving the reactors for which
+        L{SynchronousTestCase}s will be created.
+    """
+
+    _reactors = [
+        # Select works everywhere
+        "twisted.internet.selectreactor.SelectReactor",
+        ]
+
+    if platform.isWindows():
+        # PortableGtkReactor is only really interesting on Windows,
+        # but not really Windows specific; if you want you can
+        # temporarily move this up to the all-platforms list to test
+        # it on other platforms.  It's not there in general because
+        # it's not _really_ worth it to support on other platforms,
+        # since no one really wants to use it on other platforms.
+        _reactors.extend([
+                "twisted.internet.gtk2reactor.PortableGtkReactor",
+                "twisted.internet.gireactor.PortableGIReactor",
+                "twisted.internet.gtk3reactor.PortableGtk3Reactor",
+                "twisted.internet.win32eventreactor.Win32Reactor",
+                "twisted.internet.iocpreactor.reactor.IOCPReactor"])
+    else:
+        _reactors.extend([
+                "twisted.internet.glib2reactor.Glib2Reactor",
+                "twisted.internet.gtk2reactor.Gtk2Reactor",
+                "twisted.internet.gireactor.GIReactor",
+                "twisted.internet.gtk3reactor.Gtk3Reactor"])
+        if platform.isMacOSX():
+            _reactors.append("twisted.internet.cfreactor.CFReactor")
+        else:
+            _reactors.extend([
+                    "twisted.internet.pollreactor.PollReactor",
+                    "twisted.internet.epollreactor.EPollReactor"])
+            if not platform.isLinux():
+                # Presumably Linux is not going to start supporting kqueue, so
+                # skip even trying this configuration.
+                _reactors.extend([
+                        # Support KQueue on non-OS-X POSIX platforms for now.
+                        "twisted.internet.kqreactor.KQueueReactor",
+                        ])
+
+    reactorFactory = None
+    originalHandler = None
+    requiredInterfaces = None
+    skippedReactors = {}
+
+    def setUp(self):
+        """
+        Clear the SIGCHLD handler, if there is one, to ensure an environment
+        like the one which exists prior to a call to L{reactor.run}.
+        """
+        if not platform.isWindows():
+            self.originalHandler = signal.signal(signal.SIGCHLD, signal.SIG_DFL)
+
+
+    def tearDown(self):
+        """
+        Restore the original SIGCHLD handler and reap processes as long as
+        there seem to be any remaining.
+        """
+        if self.originalHandler is not None:
+            signal.signal(signal.SIGCHLD, self.originalHandler)
+        if process is not None:
+            begin = time.time()
+            while process.reapProcessHandlers:
+                log.msg(
+                    "ReactorBuilder.tearDown reaping some processes %r" % (
+                        process.reapProcessHandlers,))
+                process.reapAllProcesses()
+
+                # The process should exit on its own.  However, if it
+                # doesn't, we're stuck in this loop forever.  To avoid
+                # hanging the test suite, eventually give the process some
+                # help exiting and move on.
+                time.sleep(0.001)
+                if time.time() - begin > 60:
+                    for pid in process.reapProcessHandlers:
+                        os.kill(pid, signal.SIGKILL)
+                    raise Exception(
+                        "Timeout waiting for child processes to exit: %r" % (
+                            process.reapProcessHandlers,))
+
+
+    def unbuildReactor(self, reactor):
+        """
+        Clean up any resources which may have been allocated for the given
+        reactor by its creation or by a test which used it.
+        """
+        # Chris says:
+        #
+        # XXX These explicit calls to clean up the waker (and any other
+        # internal readers) should become obsolete when bug #3063 is
+        # fixed. -radix, 2008-02-29. Fortunately it should probably cause an
+        # error when bug #3063 is fixed, so it should be removed in the same
+        # branch that fixes it.
+        #
+        # -exarkun
+        reactor._uninstallHandler()
+        if getattr(reactor, '_internalReaders', None) is not None:
+            for reader in reactor._internalReaders:
+                reactor.removeReader(reader)
+                reader.connectionLost(None)
+            reactor._internalReaders.clear()
+
+        # Here's an extra thing unrelated to wakers but necessary for
+        # cleaning up after the reactors we make.  -exarkun
+        reactor.disconnectAll()
+
+        # It would also be bad if any timed calls left over were allowed to
+        # run.
+        calls = reactor.getDelayedCalls()
+        for c in calls:
+            c.cancel()
+
+
+    def buildReactor(self):
+        """
+        Create and return a reactor using C{self.reactorFactory}.
+        """
+        try:
+            from twisted.internet.cfreactor import CFReactor
+            from twisted.internet import reactor as globalReactor
+        except ImportError:
+            pass
+        else:
+            if (isinstance(globalReactor, CFReactor)
+                and self.reactorFactory is CFReactor):
+                raise SkipTest(
+                    "CFReactor uses APIs which manipulate global state, "
+                    "so it's not safe to run its own reactor-builder tests "
+                    "under itself")
+        try:
+            reactor = self.reactorFactory()
+        except:
+            # Unfortunately, not all errors which result in a reactor
+            # being unusable are detectable without actually
+            # instantiating the reactor.  So we catch some more here
+            # and skip the test if necessary.  We also log it to aid
+            # with debugging, but flush the logged error so the test
+            # doesn't fail.
+            log.err(None, "Failed to install reactor")
+            self.flushLoggedErrors()
+            raise SkipTest(Failure().getErrorMessage())
+        else:
+            if self.requiredInterfaces is not None:
+                missing = [
+                    required for required in self.requiredInterfaces
+                    if not required.providedBy(reactor)]
+                if missing:
+                    self.unbuildReactor(reactor)
+                    raise SkipTest("%s does not provide %s" % (
+                        fullyQualifiedName(reactor.__class__),
+                        ",".join([fullyQualifiedName(x) for x in missing])))
+        self.addCleanup(self.unbuildReactor, reactor)
+        return reactor
+
+
+    def getTimeout(self):
+        """
+        Determine how long to run the test before considering it failed.
+
+        @return: A C{int} or C{float} giving a number of seconds.
+        """
+        return acquireAttribute(self._parents, 'timeout', DEFAULT_TIMEOUT_DURATION)
+
+
+    def runReactor(self, reactor, timeout=None):
+        """
+        Run the reactor for at most the given amount of time.
+
+        @param reactor: The reactor to run.
+
+        @type timeout: C{int} or C{float}
+        @param timeout: The maximum amount of time, specified in seconds, to
+            allow the reactor to run.  If the reactor is still running after
+            this much time has elapsed, it will be stopped and an exception
+            raised.  If C{None}, the default test method timeout imposed by
+            Trial will be used.  This depends on the L{IReactorTime}
+            implementation of C{reactor} for correct operation.
+
+        @raise TestTimeoutError: If the reactor is still running after
+            C{timeout} seconds.
+        """
+        if timeout is None:
+            timeout = self.getTimeout()
+
+        timedOut = []
+        def stop():
+            timedOut.append(None)
+            reactor.stop()
+
+        reactor.callLater(timeout, stop)
+        reactor.run()
+        if timedOut:
+            raise TestTimeoutError(
+                "reactor still running after %s seconds" % (timeout,))
+
+
+    def makeTestCaseClasses(cls):
+        """
+        Create a L{SynchronousTestCase} subclass which mixes in C{cls} for each
+        known reactor and return a dict mapping their names to them.
+        """
+        classes = {}
+        for reactor in cls._reactors:
+            shortReactorName = reactor.split(".")[-1]
+            name = (cls.__name__ + "." + shortReactorName).replace(".", "_")
+            class testcase(cls, SynchronousTestCase):
+                __module__ = cls.__module__
+                if reactor in cls.skippedReactors:
+                    skip = cls.skippedReactors[reactor]
+                try:
+                    reactorFactory = namedAny(reactor)
+                except:
+                    skip = Failure().getErrorMessage()
+            testcase.__name__ = name
+            classes[testcase.__name__] = testcase
+        return classes
+    makeTestCaseClasses = classmethod(makeTestCaseClasses)
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_abstract.py b/ThirdParty/Twisted/twisted/internet/test/test_abstract.py
new file mode 100644
index 0000000..05e105b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_abstract.py
@@ -0,0 +1,58 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet.abstract}, a collection of APIs for implementing
+reactors.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.trial.unittest import SynchronousTestCase
+
+from twisted.internet.abstract import isIPv6Address
+
+class IPv6AddressTests(SynchronousTestCase):
+    """
+    Tests for L{isIPv6Address}, a function for determining if a particular
+    string is an IPv6 address literal.
+    """
+    def test_empty(self):
+        """
+        The empty string is not an IPv6 address literal.
+        """
+        self.assertFalse(isIPv6Address(""))
+
+
+    def test_colon(self):
+        """
+        A single C{":"} is not an IPv6 address literal.
+        """
+        self.assertFalse(isIPv6Address(":"))
+
+
+    def test_loopback(self):
+        """
+        C{"::1"} is the IPv6 loopback address literal.
+        """
+        self.assertTrue(isIPv6Address("::1"))
+
+
+    def test_scopeID(self):
+        """
+        An otherwise valid IPv6 address literal may also include a C{"%"}
+        followed by an arbitrary scope identifier.
+        """
+        self.assertTrue(isIPv6Address("fe80::1%eth0"))
+        self.assertTrue(isIPv6Address("fe80::2%1"))
+        self.assertTrue(isIPv6Address("fe80::3%en2"))
+
+
+    def test_invalidWithScopeID(self):
+        """
+        An otherwise invalid IPv6 address literal is still invalid with a
+        trailing scope identifier.
+        """
+        self.assertFalse(isIPv6Address("%eth0"))
+        self.assertFalse(isIPv6Address(":%eth0"))
+        self.assertFalse(isIPv6Address("hello%eth0"))
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_address.py b/ThirdParty/Twisted/twisted/internet/test/test_address.py
new file mode 100644
index 0000000..a0ebc81
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_address.py
@@ -0,0 +1,318 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from __future__ import division, absolute_import
+
+import re
+import os
+
+from twisted.trial import unittest
+from twisted.internet.address import IPv4Address, UNIXAddress, IPv6Address
+
+try:
+    os.symlink
+except AttributeError:
+    symlinkSkip = "Platform does not support symlinks"
+else:
+    symlinkSkip = None
+
+
+class AddressTestCaseMixin(object):
+    def test_addressComparison(self):
+        """
+        Two different address instances, sharing the same properties are
+        considered equal by C{==} and not considered not equal by C{!=}.
+
+        Note: When applied via UNIXAddress class, this uses the same
+        filename for both objects being compared.
+        """
+        self.assertTrue(self.buildAddress() == self.buildAddress())
+        self.assertFalse(self.buildAddress() != self.buildAddress())
+
+
+    def _stringRepresentation(self, stringFunction):
+        """
+        Verify that the string representation of an address object conforms to a
+        simple pattern (the usual one for Python object reprs) and contains
+        values which accurately reflect the attributes of the address.
+        """
+        addr = self.buildAddress()
+        pattern = "".join([
+           "^",
+           "([^\(]+Address)", # class name,
+           "\(",       # opening bracket,
+           "([^)]+)",  # arguments,
+           "\)",       # closing bracket,
+           "$"
+        ])
+        stringValue = stringFunction(addr)
+        m = re.match(pattern, stringValue)
+        self.assertNotEquals(
+            None, m,
+            "%s does not match the standard __str__ pattern "
+            "ClassName(arg1, arg2, etc)" % (stringValue,))
+        self.assertEqual(addr.__class__.__name__, m.group(1))
+
+        args = [x.strip() for x in m.group(2).split(",")]
+        self.assertEqual(
+            args,
+            [argSpec[1] % (getattr(addr, argSpec[0]),)
+             for argSpec in self.addressArgSpec])
+
+
+    def test_str(self):
+        """
+        C{str} can be used to get a string representation of an address instance
+        containing information about that address.
+        """
+        self._stringRepresentation(str)
+
+
+    def test_repr(self):
+        """
+        C{repr} can be used to get a string representation of an address
+        instance containing information about that address.
+        """
+        self._stringRepresentation(repr)
+
+
+    def test_hash(self):
+        """
+        C{__hash__} can be used to get a hash of an address, allowing
+        addresses to be used as keys in dictionaries, for instance.
+        """
+        addr = self.buildAddress()
+        d = {addr: True}
+        self.assertTrue(d[self.buildAddress()])
+
+
+    def test_differentNamesComparison(self):
+        """
+        Check that comparison operators work correctly on address objects
+        when a different name is passed in
+        """
+        self.assertFalse(self.buildAddress() == self.buildDifferentAddress())
+        self.assertFalse(self.buildDifferentAddress() == self.buildAddress())
+
+        self.assertTrue(self.buildAddress() != self.buildDifferentAddress())
+        self.assertTrue(self.buildDifferentAddress() != self.buildAddress())
+
+
+    def assertDeprecations(self, testMethod, message):
+        """
+        Assert that the a DeprecationWarning with the given message was
+        emitted against the given method.
+        """
+        warnings = self.flushWarnings([testMethod])
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(warnings[0]['message'], message)
+        self.assertEqual(len(warnings), 1)
+
+
+
+class IPv4AddressTestCaseMixin(AddressTestCaseMixin):
+    addressArgSpec = (("type", "%s"), ("host", "%r"), ("port", "%d"))
+
+
+
+class IPv4AddressTCPTestCase(unittest.SynchronousTestCase,
+                             IPv4AddressTestCaseMixin):
+    def buildAddress(self):
+        """
+        Create an arbitrary new L{IPv4Address} instance with a C{"TCP"}
+        type.  A new instance is created for each call, but always for the
+        same address.
+        """
+        return IPv4Address("TCP", "127.0.0.1", 0)
+
+
+    def buildDifferentAddress(self):
+        """
+        Like L{buildAddress}, but with a different fixed address.
+        """
+        return IPv4Address("TCP", "127.0.0.2", 0)
+
+
+    def test_bwHackDeprecation(self):
+        """
+        If a value is passed for the C{_bwHack} parameter to L{IPv4Address},
+        a deprecation warning is emitted.
+        """
+        # Construct this for warning side-effects, disregard the actual object.
+        IPv4Address("TCP", "127.0.0.3", 0, _bwHack="TCP")
+
+        message = (
+            "twisted.internet.address.IPv4Address._bwHack is deprecated "
+            "since Twisted 11.0")
+        return self.assertDeprecations(self.test_bwHackDeprecation, message)
+
+
+
+class IPv4AddressUDPTestCase(unittest.SynchronousTestCase,
+                             IPv4AddressTestCaseMixin):
+    def buildAddress(self):
+        """
+        Create an arbitrary new L{IPv4Address} instance with a C{"UDP"}
+        type.  A new instance is created for each call, but always for the
+        same address.
+        """
+        return IPv4Address("UDP", "127.0.0.1", 0)
+
+
+    def buildDifferentAddress(self):
+        """
+        Like L{buildAddress}, but with a different fixed address.
+        """
+        return IPv4Address("UDP", "127.0.0.2", 0)
+
+
+    def test_bwHackDeprecation(self):
+        """
+        If a value is passed for the C{_bwHack} parameter to L{IPv4Address},
+        a deprecation warning is emitted.
+        """
+        # Construct this for warning side-effects, disregard the actual object.
+        IPv4Address("UDP", "127.0.0.3", 0, _bwHack="UDP")
+
+        message = (
+            "twisted.internet.address.IPv4Address._bwHack is deprecated "
+            "since Twisted 11.0")
+        return self.assertDeprecations(self.test_bwHackDeprecation, message)
+
+
+
+class IPv6AddressTestCase(unittest.SynchronousTestCase, AddressTestCaseMixin):
+    addressArgSpec = (("type", "%s"), ("host", "%r"), ("port", "%d"))
+
+    def buildAddress(self):
+        """
+        Create an arbitrary new L{IPv6Address} instance with a C{"TCP"}
+        type.  A new instance is created for each call, but always for the
+        same address.
+        """
+        return IPv6Address("TCP", "::1", 0)
+
+
+    def buildDifferentAddress(self):
+        """
+        Like L{buildAddress}, but with a different fixed address.
+        """
+        return IPv6Address("TCP", "::2", 0)
+
+
+
+class UNIXAddressTestCase(unittest.SynchronousTestCase, AddressTestCaseMixin):
+    addressArgSpec = (("name", "%r"),)
+
+    def setUp(self):
+        self._socketAddress = self.mktemp()
+        self._otherAddress = self.mktemp()
+
+
+    def buildAddress(self):
+        """
+        Create an arbitrary new L{UNIXAddress} instance.  A new instance is
+        created for each call, but always for the same address.
+        """
+        return UNIXAddress(self._socketAddress)
+
+
+    def buildDifferentAddress(self):
+        """
+        Like L{buildAddress}, but with a different fixed address.
+        """
+        return UNIXAddress(self._otherAddress)
+
+
+    def test_comparisonOfLinkedFiles(self):
+        """
+        UNIXAddress objects compare as equal if they link to the same file.
+        """
+        linkName = self.mktemp()
+        self.fd = open(self._socketAddress, 'w')
+        os.symlink(os.path.abspath(self._socketAddress), linkName)
+        self.assertTrue(
+            UNIXAddress(self._socketAddress) == UNIXAddress(linkName))
+        self.assertTrue(
+            UNIXAddress(linkName) == UNIXAddress(self._socketAddress))
+    test_comparisonOfLinkedFiles.skip = symlinkSkip
+
+
+    def test_hashOfLinkedFiles(self):
+        """
+        UNIXAddress Objects that compare as equal have the same hash value.
+        """
+        linkName = self.mktemp()
+        self.fd = open(self._socketAddress, 'w')
+        os.symlink(os.path.abspath(self._socketAddress), linkName)
+        self.assertEqual(
+            hash(UNIXAddress(self._socketAddress)), hash(UNIXAddress(linkName)))
+    test_hashOfLinkedFiles.skip = symlinkSkip
+
+
+    def test_bwHackDeprecation(self):
+        """
+        If a value is passed for the C{_bwHack} parameter to L{UNIXAddress},
+        a deprecation warning is emitted.
+        """
+        # Construct this for warning side-effects, disregard the actual object.
+        UNIXAddress(self.mktemp(), _bwHack='UNIX')
+
+        message = (
+            "twisted.internet.address.UNIXAddress._bwHack is deprecated "
+            "since Twisted 11.0")
+        return self.assertDeprecations(self.test_bwHackDeprecation, message)
+
+
+
+class EmptyUNIXAddressTestCase(unittest.SynchronousTestCase,
+                               AddressTestCaseMixin):
+    """
+    Tests for L{UNIXAddress} operations involving a C{None} address.
+    """
+    addressArgSpec = (("name", "%r"),)
+
+    def setUp(self):
+        self._socketAddress = self.mktemp()
+
+
+    def buildAddress(self):
+        """
+        Create an arbitrary new L{UNIXAddress} instance.  A new instance is
+        created for each call, but always for the same address.
+        """
+        return UNIXAddress(self._socketAddress)
+
+
+    def buildDifferentAddress(self):
+        """
+        Like L{buildAddress}, but with a fixed address of C{None}.
+        """
+        return UNIXAddress(None)
+
+
+    def test_comparisonOfLinkedFiles(self):
+        """
+        A UNIXAddress referring to a C{None} address does not compare equal to a
+        UNIXAddress referring to a symlink.
+        """
+        linkName = self.mktemp()
+        self.fd = open(self._socketAddress, 'w')
+        os.symlink(os.path.abspath(self._socketAddress), linkName)
+        self.assertTrue(
+            UNIXAddress(self._socketAddress) != UNIXAddress(None))
+        self.assertTrue(
+            UNIXAddress(None) != UNIXAddress(self._socketAddress))
+    test_comparisonOfLinkedFiles.skip = symlinkSkip
+
+
+    def test_emptyHash(self):
+        """
+        C{__hash__} can be used to get a hash of an address, even one referring
+        to C{None} rather than a real path.
+        """
+        addr = self.buildDifferentAddress()
+        d = {addr: True}
+        self.assertTrue(d[self.buildDifferentAddress()])
+
+
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_base.py b/ThirdParty/Twisted/twisted/internet/test/test_base.py
new file mode 100644
index 0000000..87fbf79
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_base.py
@@ -0,0 +1,279 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet.base}.
+"""
+
+import socket
+try:
+    from Queue import Queue
+except ImportError:
+    from queue import Queue
+
+from zope.interface import implementer
+
+from twisted.python.threadpool import ThreadPool
+from twisted.python._utilpy3 import setIDFunction
+from twisted.internet.interfaces import IReactorTime, IReactorThreads
+from twisted.internet.error import DNSLookupError
+from twisted.internet.base import ThreadedResolver, DelayedCall
+from twisted.internet.task import Clock
+from twisted.trial.unittest import TestCase
+
+
+ at implementer(IReactorTime, IReactorThreads)
+class FakeReactor(object):
+    """
+    A fake reactor implementation which just supports enough reactor APIs for
+    L{ThreadedResolver}.
+    """
+
+    def __init__(self):
+        self._clock = Clock()
+        self.callLater = self._clock.callLater
+
+        self._threadpool = ThreadPool()
+        self._threadpool.start()
+        self.getThreadPool = lambda: self._threadpool
+
+        self._threadCalls = Queue()
+
+
+    def callFromThread(self, f, *args, **kwargs):
+        self._threadCalls.put((f, args, kwargs))
+
+
+    def _runThreadCalls(self):
+        f, args, kwargs = self._threadCalls.get()
+        f(*args, **kwargs)
+
+
+    def _stop(self):
+        self._threadpool.stop()
+
+
+
+class ThreadedResolverTests(TestCase):
+    """
+    Tests for L{ThreadedResolver}.
+    """
+    def test_success(self):
+        """
+        L{ThreadedResolver.getHostByName} returns a L{Deferred} which fires
+        with the value returned by the call to L{socket.gethostbyname} in the
+        threadpool of the reactor passed to L{ThreadedResolver.__init__}.
+        """
+        ip = "10.0.0.17"
+        name = "foo.bar.example.com"
+        timeout = 30
+
+        reactor = FakeReactor()
+        self.addCleanup(reactor._stop)
+
+        lookedUp = []
+        resolvedTo = []
+        def fakeGetHostByName(name):
+            lookedUp.append(name)
+            return ip
+
+        self.patch(socket, 'gethostbyname', fakeGetHostByName)
+
+        resolver = ThreadedResolver(reactor)
+        d = resolver.getHostByName(name, (timeout,))
+        d.addCallback(resolvedTo.append)
+
+        reactor._runThreadCalls()
+
+        self.assertEqual(lookedUp, [name])
+        self.assertEqual(resolvedTo, [ip])
+
+        # Make sure that any timeout-related stuff gets cleaned up.
+        reactor._clock.advance(timeout + 1)
+        self.assertEqual(reactor._clock.calls, [])
+
+
+    def test_failure(self):
+        """
+        L{ThreadedResolver.getHostByName} returns a L{Deferred} which fires a
+        L{Failure} if the call to L{socket.gethostbyname} raises an exception.
+        """
+        timeout = 30
+
+        reactor = FakeReactor()
+        self.addCleanup(reactor._stop)
+
+        def fakeGetHostByName(name):
+            raise IOError("ENOBUFS (this is a funny joke)")
+
+        self.patch(socket, 'gethostbyname', fakeGetHostByName)
+
+        failedWith = []
+        resolver = ThreadedResolver(reactor)
+        d = resolver.getHostByName("some.name", (timeout,))
+        self.assertFailure(d, DNSLookupError)
+        d.addCallback(failedWith.append)
+
+        reactor._runThreadCalls()
+
+        self.assertEqual(len(failedWith), 1)
+
+        # Make sure that any timeout-related stuff gets cleaned up.
+        reactor._clock.advance(timeout + 1)
+        self.assertEqual(reactor._clock.calls, [])
+
+
+    def test_timeout(self):
+        """
+        If L{socket.gethostbyname} does not complete before the specified
+        timeout elapsed, the L{Deferred} returned by
+        L{ThreadedResolver.getHostByBame} fails with L{DNSLookupError}.
+        """
+        timeout = 10
+
+        reactor = FakeReactor()
+        self.addCleanup(reactor._stop)
+
+        result = Queue()
+        def fakeGetHostByName(name):
+            raise result.get()
+
+        self.patch(socket, 'gethostbyname', fakeGetHostByName)
+
+        failedWith = []
+        resolver = ThreadedResolver(reactor)
+        d = resolver.getHostByName("some.name", (timeout,))
+        self.assertFailure(d, DNSLookupError)
+        d.addCallback(failedWith.append)
+
+        reactor._clock.advance(timeout - 1)
+        self.assertEqual(failedWith, [])
+        reactor._clock.advance(1)
+        self.assertEqual(len(failedWith), 1)
+
+        # Eventually the socket.gethostbyname does finish - in this case, with
+        # an exception.  Nobody cares, though.
+        result.put(IOError("The I/O was errorful"))
+
+
+
+def nothing():
+    """
+    Function used by L{DelayedCallTests.test_str}.
+    """
+
+
+class DelayedCallTests(TestCase):
+    """
+    Tests for L{DelayedCall}.
+    """
+    def _getDelayedCallAt(self, time):
+        """
+        Get a L{DelayedCall} instance at a given C{time}.
+
+        @param time: The absolute time at which the returned L{DelayedCall}
+            will be scheduled.
+        """
+        def noop(call):
+            pass
+        return DelayedCall(time, lambda: None, (), {}, noop, noop, None)
+
+
+    def setUp(self):
+        """
+        Create two L{DelayedCall} instanced scheduled to run at different
+        times.
+        """
+        self.zero = self._getDelayedCallAt(0)
+        self.one = self._getDelayedCallAt(1)
+
+
+    def test_str(self):
+        """
+        The string representation of a L{DelayedCall} instance, as returned by
+        C{str}, includes the unsigned id of the instance, as well as its state,
+        the function to be called, and the function arguments.
+        """
+        dc = DelayedCall(12, nothing, (3, ), {"A": 5}, None, None, lambda: 1.5)
+        ids = {dc: 200}
+        def fakeID(obj):
+            try:
+                return ids[obj]
+            except (TypeError, KeyError):
+                return id(obj)
+        self.addCleanup(setIDFunction, setIDFunction(fakeID))
+        self.assertEqual(
+            str(dc),
+            "<DelayedCall 0xc8 [10.5s] called=0 cancelled=0 nothing(3, A=5)>")
+
+
+    def test_lt(self):
+        """
+        For two instances of L{DelayedCall} C{a} and C{b}, C{a < b} is true
+        if and only if C{a} is scheduled to run before C{b}.
+        """
+        zero, one = self.zero, self.one
+        self.assertTrue(zero < one)
+        self.assertFalse(one < zero)
+        self.assertFalse(zero < zero)
+        self.assertFalse(one < one)
+
+
+    def test_le(self):
+        """
+        For two instances of L{DelayedCall} C{a} and C{b}, C{a <= b} is true
+        if and only if C{a} is scheduled to run before C{b} or at the same
+        time as C{b}.
+        """
+        zero, one = self.zero, self.one
+        self.assertTrue(zero <= one)
+        self.assertFalse(one <= zero)
+        self.assertTrue(zero <= zero)
+        self.assertTrue(one <= one)
+
+
+    def test_gt(self):
+        """
+        For two instances of L{DelayedCall} C{a} and C{b}, C{a > b} is true
+        if and only if C{a} is scheduled to run after C{b}.
+        """
+        zero, one = self.zero, self.one
+        self.assertTrue(one > zero)
+        self.assertFalse(zero > one)
+        self.assertFalse(zero > zero)
+        self.assertFalse(one > one)
+
+
+    def test_ge(self):
+        """
+        For two instances of L{DelayedCall} C{a} and C{b}, C{a > b} is true
+        if and only if C{a} is scheduled to run after C{b} or at the same
+        time as C{b}.
+        """
+        zero, one = self.zero, self.one
+        self.assertTrue(one >= zero)
+        self.assertFalse(zero >= one)
+        self.assertTrue(zero >= zero)
+        self.assertTrue(one >= one)
+
+
+    def test_eq(self):
+        """
+        A L{DelayedCall} instance is only equal to itself.
+        """
+        # Explicitly use == here, instead of assertEqual, to be more
+        # confident __eq__ is being tested.
+        self.assertFalse(self.zero == self.one)
+        self.assertTrue(self.zero == self.zero)
+        self.assertTrue(self.one == self.one)
+
+
+    def test_ne(self):
+        """
+        A L{DelayedCall} instance is not equal to any other object.
+        """
+        # Explicitly use != here, instead of assertEqual, to be more
+        # confident __ne__ is being tested.
+        self.assertTrue(self.zero != self.one)
+        self.assertFalse(self.zero != self.zero)
+        self.assertFalse(self.one != self.one)
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_baseprocess.py b/ThirdParty/Twisted/twisted/internet/test/test_baseprocess.py
new file mode 100644
index 0000000..750b660
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_baseprocess.py
@@ -0,0 +1,73 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet._baseprocess} which implements process-related
+functionality that is useful in all platforms supporting L{IReactorProcess}.
+"""
+
+__metaclass__ = type
+
+from twisted.python.deprecate import getWarningMethod, setWarningMethod
+from twisted.trial.unittest import TestCase
+from twisted.internet._baseprocess import BaseProcess
+
+
+class BaseProcessTests(TestCase):
+    """
+    Tests for L{BaseProcess}, a parent class for other classes which represent
+    processes which implements functionality common to many different process
+    implementations.
+    """
+    def test_callProcessExited(self):
+        """
+        L{BaseProcess._callProcessExited} calls the C{processExited} method of
+        its C{proto} attribute and passes it a L{Failure} wrapping the given
+        exception.
+        """
+        class FakeProto:
+            reason = None
+
+            def processExited(self, reason):
+                self.reason = reason
+
+        reason = RuntimeError("fake reason")
+        process = BaseProcess(FakeProto())
+        process._callProcessExited(reason)
+        process.proto.reason.trap(RuntimeError)
+        self.assertIdentical(reason, process.proto.reason.value)
+
+
+    def test_callProcessExitedMissing(self):
+        """
+        L{BaseProcess._callProcessExited} emits a L{DeprecationWarning} if the
+        object referred to by its C{proto} attribute has no C{processExited}
+        method.
+        """
+        class FakeProto:
+            pass
+
+        reason = object()
+        process = BaseProcess(FakeProto())
+
+        self.addCleanup(setWarningMethod, getWarningMethod())
+        warnings = []
+        def collect(message, category, stacklevel):
+            warnings.append((message, category, stacklevel))
+        setWarningMethod(collect)
+
+        process._callProcessExited(reason)
+
+        [(message, category, stacklevel)] = warnings
+        self.assertEqual(
+            message,
+            "Since Twisted 8.2, IProcessProtocol.processExited is required.  "
+            "%s.%s must implement it." % (
+                FakeProto.__module__, FakeProto.__name__))
+        self.assertIdentical(category, DeprecationWarning)
+        # The stacklevel doesn't really make sense for this kind of
+        # deprecation.  Requiring it to be 0 will at least avoid pointing to
+        # any part of Twisted or a random part of the application's code, which
+        # I think would be more misleading than having it point inside the
+        # warning system itself. -exarkun
+        self.assertEqual(stacklevel, 0)
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_core.py b/ThirdParty/Twisted/twisted/internet/test/test_core.py
new file mode 100644
index 0000000..76c1eef
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_core.py
@@ -0,0 +1,333 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for implementations of L{IReactorCore}.
+"""
+
+from __future__ import division, absolute_import
+
+__metaclass__ = type
+
+import signal
+import time
+import inspect
+
+from twisted.internet.abstract import FileDescriptor
+from twisted.internet.error import ReactorAlreadyRunning, ReactorNotRestartable
+from twisted.internet.defer import Deferred
+from twisted.internet.test.reactormixins import ReactorBuilder
+
+
+class ObjectModelIntegrationMixin(object):
+    """
+    Helpers for tests about the object model of reactor-related objects.
+    """
+    def assertFullyNewStyle(self, instance):
+        """
+        Assert that the given object is an instance of a new-style class and
+        that there are no classic classes in the inheritance hierarchy of
+        that class.
+
+        This is a beneficial condition because PyPy is better able to
+        optimize attribute lookup on such classes.
+        """
+        self.assertIsInstance(instance, object)
+        mro = inspect.getmro(type(instance))
+        for subclass in mro:
+            self.assertTrue(
+                issubclass(subclass, object),
+                "%r is not new-style" % (subclass,))
+
+
+
+class ObjectModelIntegrationTest(ReactorBuilder, ObjectModelIntegrationMixin):
+    """
+    Test details of object model integration against all reactors.
+    """
+
+    def test_newstyleReactor(self):
+        """
+        Checks that all reactors on a platform have method resolution order
+        containing only new style classes.
+        """
+        reactor = self.buildReactor()
+        self.assertFullyNewStyle(reactor)
+
+
+
+class SystemEventTestsBuilder(ReactorBuilder):
+    """
+    Builder defining tests relating to L{IReactorCore.addSystemEventTrigger}
+    and L{IReactorCore.fireSystemEvent}.
+    """
+    def test_stopWhenNotStarted(self):
+        """
+        C{reactor.stop()} raises L{RuntimeError} when called when the reactor
+        has not been started.
+        """
+        reactor = self.buildReactor()
+        self.assertRaises(RuntimeError, reactor.stop)
+
+
+    def test_stopWhenAlreadyStopped(self):
+        """
+        C{reactor.stop()} raises L{RuntimeError} when called after the reactor
+        has been stopped.
+        """
+        reactor = self.buildReactor()
+        reactor.callWhenRunning(reactor.stop)
+        self.runReactor(reactor)
+        self.assertRaises(RuntimeError, reactor.stop)
+
+
+    def test_callWhenRunningOrder(self):
+        """
+        Functions are run in the order that they were passed to
+        L{reactor.callWhenRunning}.
+        """
+        reactor = self.buildReactor()
+        events = []
+        reactor.callWhenRunning(events.append, "first")
+        reactor.callWhenRunning(events.append, "second")
+        reactor.callWhenRunning(reactor.stop)
+        self.runReactor(reactor)
+        self.assertEqual(events, ["first", "second"])
+
+
+    def test_runningForStartupEvents(self):
+        """
+        The reactor is not running when C{"before"} C{"startup"} triggers are
+        called and is running when C{"during"} and C{"after"} C{"startup"}
+        triggers are called.
+        """
+        reactor = self.buildReactor()
+        state = {}
+        def beforeStartup():
+            state['before'] = reactor.running
+        def duringStartup():
+            state['during'] = reactor.running
+        def afterStartup():
+            state['after'] = reactor.running
+        reactor.addSystemEventTrigger("before", "startup", beforeStartup)
+        reactor.addSystemEventTrigger("during", "startup", duringStartup)
+        reactor.addSystemEventTrigger("after", "startup", afterStartup)
+        reactor.callWhenRunning(reactor.stop)
+        self.assertEqual(state, {})
+        self.runReactor(reactor)
+        self.assertEqual(
+            state,
+            {"before": False,
+             "during": True,
+             "after": True})
+
+
+    def test_signalHandlersInstalledDuringStartup(self):
+        """
+        Signal handlers are installed in responsed to the C{"during"}
+        C{"startup"}.
+        """
+        reactor = self.buildReactor()
+        phase = [None]
+        def beforeStartup():
+            phase[0] = "before"
+        def afterStartup():
+            phase[0] = "after"
+        reactor.addSystemEventTrigger("before", "startup", beforeStartup)
+        reactor.addSystemEventTrigger("after", "startup", afterStartup)
+
+        sawPhase = []
+        def fakeSignal(signum, action):
+            sawPhase.append(phase[0])
+        self.patch(signal, 'signal', fakeSignal)
+        reactor.callWhenRunning(reactor.stop)
+        self.assertEqual(phase[0], None)
+        self.assertEqual(sawPhase, [])
+        self.runReactor(reactor)
+        self.assertIn("before", sawPhase)
+        self.assertEqual(phase[0], "after")
+
+
+    def test_stopShutDownEvents(self):
+        """
+        C{reactor.stop()} fires all three phases of shutdown event triggers
+        before it makes C{reactor.run()} return.
+        """
+        reactor = self.buildReactor()
+        events = []
+        reactor.addSystemEventTrigger(
+            "before", "shutdown",
+            lambda: events.append(("before", "shutdown")))
+        reactor.addSystemEventTrigger(
+            "during", "shutdown",
+            lambda: events.append(("during", "shutdown")))
+        reactor.addSystemEventTrigger(
+            "after", "shutdown",
+            lambda: events.append(("after", "shutdown")))
+        reactor.callWhenRunning(reactor.stop)
+        self.runReactor(reactor)
+        self.assertEqual(events, [("before", "shutdown"),
+                                   ("during", "shutdown"),
+                                   ("after", "shutdown")])
+
+
+    def test_shutdownFiresTriggersAsynchronously(self):
+        """
+        C{"before"} C{"shutdown"} triggers are not run synchronously from
+        L{reactor.stop}.
+        """
+        reactor = self.buildReactor()
+        events = []
+        reactor.addSystemEventTrigger(
+            "before", "shutdown", events.append, "before shutdown")
+        def stopIt():
+            reactor.stop()
+            events.append("stopped")
+        reactor.callWhenRunning(stopIt)
+        self.assertEqual(events, [])
+        self.runReactor(reactor)
+        self.assertEqual(events, ["stopped", "before shutdown"])
+
+
+    def test_shutdownDisconnectsCleanly(self):
+        """
+        A L{IFileDescriptor.connectionLost} implementation which raises an
+        exception does not prevent the remaining L{IFileDescriptor}s from
+        having their C{connectionLost} method called.
+        """
+        lostOK = [False]
+
+        # Subclass FileDescriptor to get logPrefix
+        class ProblematicFileDescriptor(FileDescriptor):
+            def connectionLost(self, reason):
+                raise RuntimeError("simulated connectionLost error")
+
+        class OKFileDescriptor(FileDescriptor):
+            def connectionLost(self, reason):
+                lostOK[0] = True
+
+        reactor = self.buildReactor()
+
+        # Unfortunately, it is necessary to patch removeAll to directly control
+        # the order of the returned values.  The test is only valid if
+        # ProblematicFileDescriptor comes first.  Also, return these
+        # descriptors only the first time removeAll is called so that if it is
+        # called again the file descriptors aren't re-disconnected.
+        fds = iter([ProblematicFileDescriptor(), OKFileDescriptor()])
+        reactor.removeAll = lambda: fds
+        reactor.callWhenRunning(reactor.stop)
+        self.runReactor(reactor)
+        self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1)
+        self.assertTrue(lostOK[0])
+
+
+    def test_multipleRun(self):
+        """
+        C{reactor.run()} raises L{ReactorAlreadyRunning} when called when
+        the reactor is already running.
+        """
+        events = []
+        def reentrantRun():
+            self.assertRaises(ReactorAlreadyRunning, reactor.run)
+            events.append("tested")
+        reactor = self.buildReactor()
+        reactor.callWhenRunning(reentrantRun)
+        reactor.callWhenRunning(reactor.stop)
+        self.runReactor(reactor)
+        self.assertEqual(events, ["tested"])
+
+
+    def test_runWithAsynchronousBeforeStartupTrigger(self):
+        """
+        When there is a C{'before'} C{'startup'} trigger which returns an
+        unfired L{Deferred}, C{reactor.run()} starts the reactor and does not
+        return until after C{reactor.stop()} is called
+        """
+        events = []
+        def trigger():
+            events.append('trigger')
+            d = Deferred()
+            d.addCallback(callback)
+            reactor.callLater(0, d.callback, None)
+            return d
+        def callback(ignored):
+            events.append('callback')
+            reactor.stop()
+        reactor = self.buildReactor()
+        reactor.addSystemEventTrigger('before', 'startup', trigger)
+        self.runReactor(reactor)
+        self.assertEqual(events, ['trigger', 'callback'])
+
+
+    def test_iterate(self):
+        """
+        C{reactor.iterate()} does not block.
+        """
+        reactor = self.buildReactor()
+        t = reactor.callLater(5, reactor.crash)
+
+        start = time.time()
+        reactor.iterate(0) # Shouldn't block
+        elapsed = time.time() - start
+
+        self.failUnless(elapsed < 2)
+        t.cancel()
+
+
+    def test_crash(self):
+        """
+        C{reactor.crash()} stops the reactor and does not fire shutdown
+        triggers.
+        """
+        reactor = self.buildReactor()
+        events = []
+        reactor.addSystemEventTrigger(
+            "before", "shutdown",
+            lambda: events.append(("before", "shutdown")))
+        reactor.callWhenRunning(reactor.callLater, 0, reactor.crash)
+        self.runReactor(reactor)
+        self.assertFalse(reactor.running)
+        self.assertFalse(
+            events,
+            "Shutdown triggers invoked but they should not have been.")
+
+
+    def test_runAfterCrash(self):
+        """
+        C{reactor.run()} restarts the reactor after it has been stopped by
+        C{reactor.crash()}.
+        """
+        events = []
+        def crash():
+            events.append('crash')
+            reactor.crash()
+        reactor = self.buildReactor()
+        reactor.callWhenRunning(crash)
+        self.runReactor(reactor)
+        def stop():
+            events.append(('stop', reactor.running))
+            reactor.stop()
+        reactor.callWhenRunning(stop)
+        self.runReactor(reactor)
+        self.assertEqual(events, ['crash', ('stop', True)])
+
+
+    def test_runAfterStop(self):
+        """
+        C{reactor.run()} raises L{ReactorNotRestartable} when called when
+        the reactor is being run after getting stopped priorly.
+        """
+        events = []
+        def restart():
+            self.assertRaises(ReactorNotRestartable, reactor.run)
+            events.append('tested')
+        reactor = self.buildReactor()
+        reactor.callWhenRunning(reactor.stop)
+        reactor.addSystemEventTrigger('after', 'shutdown', restart)
+        self.runReactor(reactor)
+        self.assertEqual(events, ['tested'])
+
+
+
+globals().update(SystemEventTestsBuilder.makeTestCaseClasses())
+globals().update(ObjectModelIntegrationTest.makeTestCaseClasses())
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_default.py b/ThirdParty/Twisted/twisted/internet/test/test_default.py
new file mode 100644
index 0000000..fdf2a6a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_default.py
@@ -0,0 +1,120 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet.default}.
+"""
+
+from __future__ import division, absolute_import
+
+import select, sys
+from twisted.trial.unittest import SynchronousTestCase
+from twisted.python.runtime import Platform
+from twisted.internet import default
+from twisted.internet.default import _getInstallFunction, install
+from twisted.internet.test.test_main import NoReactor
+from twisted.internet.interfaces import IReactorCore
+
+unix = Platform('posix', 'other')
+linux = Platform('posix', 'linux2')
+windows = Platform('nt', 'win32')
+osx = Platform('posix', 'darwin')
+
+
+class PollReactorTests(SynchronousTestCase):
+    """
+    Tests for the cases of L{twisted.internet.default._getInstallFunction}
+    in which it picks the poll(2) or epoll(7)-based reactors.
+    """
+
+    def assertIsPoll(self, install):
+        """
+        Assert the given function will install the poll() reactor, or select()
+        if poll() is unavailable.
+        """
+        if hasattr(select, "poll"):
+            self.assertEqual(
+                install.__module__, 'twisted.internet.pollreactor')
+        else:
+            self.assertEqual(
+                install.__module__, 'twisted.internet.selectreactor')
+
+
+    def test_unix(self):
+        """
+        L{_getInstallFunction} chooses the poll reactor on arbitrary Unix
+        platforms, falling back to select(2) if it is unavailable.
+        """
+        install = _getInstallFunction(unix)
+        self.assertIsPoll(install)
+
+
+    def test_linux(self):
+        """
+        L{_getInstallFunction} chooses the epoll reactor on Linux, or poll if
+        epoll is unavailable.
+        """
+        install = _getInstallFunction(linux)
+        try:
+            from twisted.internet import epollreactor
+        except ImportError:
+            self.assertIsPoll(install)
+        else:
+            self.assertEqual(
+                install.__module__, 'twisted.internet.epollreactor')
+
+
+
+class SelectReactorTests(SynchronousTestCase):
+    """
+    Tests for the cases of L{twisted.internet.default._getInstallFunction}
+    in which it picks the select(2)-based reactor.
+    """
+    def test_osx(self):
+        """
+        L{_getInstallFunction} chooses the select reactor on OS X.
+        """
+        install = _getInstallFunction(osx)
+        self.assertEqual(
+            install.__module__, 'twisted.internet.selectreactor')
+
+
+    def test_windows(self):
+        """
+        L{_getInstallFunction} chooses the select reactor on Windows.
+        """
+        install = _getInstallFunction(windows)
+        self.assertEqual(
+            install.__module__, 'twisted.internet.selectreactor')
+
+
+
+class InstallationTests(SynchronousTestCase):
+    """
+    Tests for actual installation of the reactor.
+    """
+
+    def test_install(self):
+        """
+        L{install} installs a reactor.
+        """
+        with NoReactor():
+            install()
+            self.assertIn("twisted.internet.reactor", sys.modules)
+
+
+    def test_reactor(self):
+        """
+        Importing L{twisted.internet.reactor} installs the default reactor if
+        none is installed.
+        """
+        installed = []
+        def installer():
+            installed.append(True)
+            return install()
+        self.patch(default, "install", installer)
+
+        with NoReactor():
+            from twisted.internet import reactor
+            self.assertTrue(IReactorCore.providedBy(reactor))
+            self.assertEqual(installed, [True])
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_endpoints.py b/ThirdParty/Twisted/twisted/internet/test/test_endpoints.py
new file mode 100644
index 0000000..b1ff733
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_endpoints.py
@@ -0,0 +1,1029 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+"""
+Test the C{I...Endpoint} implementations that wrap the L{IReactorTCP},
+L{IReactorSSL}, and L{IReactorUNIX} interfaces found in
+L{twisted.internet.endpoints}.
+"""
+from socket import AF_INET, AF_INET6
+from errno import EPERM
+from zope.interface.verify import verifyObject
+
+from twisted.trial import unittest
+from twisted.internet import error, interfaces
+from twisted.internet import endpoints
+from twisted.internet.address import IPv4Address, UNIXAddress
+from twisted.test.proto_helpers import MemoryReactor
+from twisted.python.systemd import ListenFDs
+from twisted.plugin import getPlugins
+
+from twisted import plugins
+from twisted.python.modules import getModule
+from twisted.python.filepath import FilePath
+from twisted.protocols import basic
+from twisted.internet import protocol, reactor, stdio
+from twisted.internet.stdio import PipeAddress
+from twisted.internet.test.test_endpointspy3 import (
+    EndpointTestCaseMixin, ServerEndpointTestCaseMixin, skipSSL,
+    pemPath)
+
+casPath = getModule(__name__).filePath.sibling("fake_CAs")
+escapedPEMPathName = endpoints.quoteStringArgument(pemPath.path)
+escapedCAsPathName = endpoints.quoteStringArgument(casPath.path)
+
+if not skipSSL:
+    from OpenSSL.SSL import ContextType
+    from twisted.internet.ssl import CertificateOptions, Certificate, \
+        KeyPair, PrivateCertificate
+    from twisted.internet.test.test_endpointspy3 import (testCertificate,
+                                                         testPrivateCertificate)
+
+
+class StdioFactory(protocol.Factory):
+    protocol = basic.LineReceiver
+
+
+
+class StandardIOEndpointsTestCase(unittest.TestCase):
+    """
+    Tests for Standard I/O Endpoints
+    """
+    def setUp(self):
+        self.ep = endpoints.StandardIOEndpoint(reactor)
+
+
+    def test_standardIOInstance(self):
+        """
+        The endpoint creates an L{endpoints.StandardIO} instance.
+        """
+        self.d = self.ep.listen(StdioFactory())
+        def checkInstanceAndLoseConnection(stdioOb):
+            self.assertIsInstance(stdioOb, stdio.StandardIO)
+            stdioOb.loseConnection()
+        self.d.addCallback(checkInstanceAndLoseConnection)
+        return self.d
+
+
+    def test_reactor(self):
+        """
+        The reactor passed to the endpoint is set as its _reactor attribute.
+        """
+        self.assertEqual(self.ep._reactor, reactor)
+
+
+    def test_protocol(self):
+        """
+        The protocol used in the endpoint is a L{basic.LineReceiver} instance.
+        """
+        self.d = self.ep.listen(StdioFactory())
+        def checkProtocol(stdioOb):
+            from twisted.python.runtime import platform
+            if platform.isWindows():
+                self.assertIsInstance(stdioOb.proto, basic.LineReceiver)
+            else:
+                self.assertIsInstance(stdioOb.protocol, basic.LineReceiver)
+            stdioOb.loseConnection()
+        self.d.addCallback(checkProtocol)
+        return self.d
+
+
+    def test_address(self):
+        """
+        The address passed to the factory's buildProtocol in the endpoint
+        should be a PipeAddress instance.
+        """
+        class TestAddrFactory(protocol.Factory):
+            protocol = basic.LineReceiver
+            _address = None
+            def buildProtocol(self, addr):
+                self._address = addr
+                p = self.protocol()
+                p.factory = self
+                return p
+            def getAddress(self):
+                return self._address
+
+        myFactory = TestAddrFactory()
+        self.d = self.ep.listen(myFactory)
+        def checkAddress(stdioOb):
+            self.assertIsInstance(myFactory.getAddress(), PipeAddress)
+            stdioOb.loseConnection()
+        self.d.addCallback(checkAddress)
+        return self.d
+
+
+
+class UNIXEndpointsTestCase(EndpointTestCaseMixin,
+                            unittest.TestCase):
+    """
+    Tests for UnixSocket Endpoints.
+    """
+
+    def retrieveConnectedFactory(self, reactor):
+        """
+        Override L{EndpointTestCaseMixin.retrieveConnectedFactory} to account
+        for different index of 'factory' in C{connectUNIX} args.
+        """
+        return self.expectedClients(reactor)[0][1]
+
+    def expectedServers(self, reactor):
+        """
+        @return: List of calls to L{IReactorUNIX.listenUNIX}
+        """
+        return reactor.unixServers
+
+
+    def expectedClients(self, reactor):
+        """
+        @return: List of calls to L{IReactorUNIX.connectUNIX}
+        """
+        return reactor.unixClients
+
+
+    def assertConnectArgs(self, receivedArgs, expectedArgs):
+        """
+        Compare path, timeout, checkPID in C{receivedArgs} to C{expectedArgs}.
+        We ignore the factory because we don't only care what protocol comes
+        out of the C{IStreamClientEndpoint.connect} call.
+
+        @param receivedArgs: C{tuple} of (C{path}, C{timeout}, C{checkPID})
+            that was passed to L{IReactorUNIX.connectUNIX}.
+        @param expectedArgs: C{tuple} of (C{path}, C{timeout}, C{checkPID})
+            that we expect to have been passed to L{IReactorUNIX.connectUNIX}.
+        """
+
+        (path, ignoredFactory, timeout, checkPID) = receivedArgs
+
+        (expectedPath, _ignoredFactory, expectedTimeout,
+         expectedCheckPID) = expectedArgs
+
+        self.assertEqual(path, expectedPath)
+        self.assertEqual(timeout, expectedTimeout)
+        self.assertEqual(checkPID, expectedCheckPID)
+
+
+    def connectArgs(self):
+        """
+        @return: C{dict} of keyword arguments to pass to connect.
+        """
+        return {'timeout': 10, 'checkPID': 1}
+
+
+    def listenArgs(self):
+        """
+        @return: C{dict} of keyword arguments to pass to listen
+        """
+        return {'backlog': 100, 'mode': 0600, 'wantPID': 1}
+
+
+    def createServerEndpoint(self, reactor, factory, **listenArgs):
+        """
+        Create an L{UNIXServerEndpoint} and return the tools to verify its
+        behaviour.
+
+        @param reactor: A fake L{IReactorUNIX} that L{UNIXServerEndpoint} can
+            call L{IReactorUNIX.listenUNIX} on.
+        @param factory: The thing that we expect to be passed to our
+            L{IStreamServerEndpoint.listen} implementation.
+        @param listenArgs: Optional dictionary of arguments to
+            L{IReactorUNIX.listenUNIX}.
+        """
+        address = UNIXAddress(self.mktemp())
+
+        return (endpoints.UNIXServerEndpoint(reactor, address.name,
+                                             **listenArgs),
+                (address.name, factory,
+                 listenArgs.get('backlog', 50),
+                 listenArgs.get('mode', 0666),
+                 listenArgs.get('wantPID', 0)),
+                address)
+
+
+    def createClientEndpoint(self, reactor, clientFactory, **connectArgs):
+        """
+        Create an L{UNIXClientEndpoint} and return the values needed to verify
+        its behaviour.
+
+        @param reactor: A fake L{IReactorUNIX} that L{UNIXClientEndpoint} can
+            call L{IReactorUNIX.connectUNIX} on.
+        @param clientFactory: The thing that we expect to be passed to our
+            L{IStreamClientEndpoint.connect} implementation.
+        @param connectArgs: Optional dictionary of arguments to
+            L{IReactorUNIX.connectUNIX}
+        """
+        address = UNIXAddress(self.mktemp())
+
+        return (endpoints.UNIXClientEndpoint(reactor, address.name,
+                                             **connectArgs),
+                (address.name, clientFactory,
+                 connectArgs.get('timeout', 30),
+                 connectArgs.get('checkPID', 0)),
+                address)
+
+
+
+class ParserTestCase(unittest.TestCase):
+    """
+    Tests for L{endpoints._parseServer}, the low-level parsing logic.
+    """
+
+    f = "Factory"
+
+    def parse(self, *a, **kw):
+        """
+        Provide a hook for test_strports to substitute the deprecated API.
+        """
+        return endpoints._parseServer(*a, **kw)
+
+
+    def test_simpleTCP(self):
+        """
+        Simple strings with a 'tcp:' prefix should be parsed as TCP.
+        """
+        self.assertEqual(self.parse('tcp:80', self.f),
+                         ('TCP', (80, self.f), {'interface':'', 'backlog':50}))
+
+
+    def test_interfaceTCP(self):
+        """
+        TCP port descriptions parse their 'interface' argument as a string.
+        """
+        self.assertEqual(
+             self.parse('tcp:80:interface=127.0.0.1', self.f),
+             ('TCP', (80, self.f), {'interface':'127.0.0.1', 'backlog':50}))
+
+
+    def test_backlogTCP(self):
+        """
+        TCP port descriptions parse their 'backlog' argument as an integer.
+        """
+        self.assertEqual(self.parse('tcp:80:backlog=6', self.f),
+                         ('TCP', (80, self.f),
+                                 {'interface':'', 'backlog':6}))
+
+
+    def test_simpleUNIX(self):
+        """
+        L{endpoints._parseServer} returns a C{'UNIX'} port description with
+        defaults for C{'mode'}, C{'backlog'}, and C{'wantPID'} when passed a
+        string with the C{'unix:'} prefix and no other parameter values.
+        """
+        self.assertEqual(
+            self.parse('unix:/var/run/finger', self.f),
+            ('UNIX', ('/var/run/finger', self.f),
+             {'mode': 0666, 'backlog': 50, 'wantPID': True}))
+
+
+    def test_modeUNIX(self):
+        """
+        C{mode} can be set by including C{"mode=<some integer>"}.
+        """
+        self.assertEqual(
+            self.parse('unix:/var/run/finger:mode=0660', self.f),
+            ('UNIX', ('/var/run/finger', self.f),
+             {'mode': 0660, 'backlog': 50, 'wantPID': True}))
+
+
+    def test_wantPIDUNIX(self):
+        """
+        C{wantPID} can be set to false by included C{"lockfile=0"}.
+        """
+        self.assertEqual(
+            self.parse('unix:/var/run/finger:lockfile=0', self.f),
+            ('UNIX', ('/var/run/finger', self.f),
+             {'mode': 0666, 'backlog': 50, 'wantPID': False}))
+
+
+    def test_escape(self):
+        """
+        Backslash can be used to escape colons and backslashes in port
+        descriptions.
+        """
+        self.assertEqual(
+            self.parse(r'unix:foo\:bar\=baz\:qux\\', self.f),
+            ('UNIX', ('foo:bar=baz:qux\\', self.f),
+             {'mode': 0666, 'backlog': 50, 'wantPID': True}))
+
+
+    def test_quoteStringArgument(self):
+        """
+        L{endpoints.quoteStringArgument} should quote backslashes and colons
+        for interpolation into L{endpoints.serverFromString} and
+        L{endpoints.clientFactory} arguments.
+        """
+        self.assertEqual(endpoints.quoteStringArgument("some : stuff \\"),
+                         "some \\: stuff \\\\")
+
+
+    def test_impliedEscape(self):
+        """
+        In strports descriptions, '=' in a parameter value does not need to be
+        quoted; it will simply be parsed as part of the value.
+        """
+        self.assertEqual(
+            self.parse(r'unix:address=foo=bar', self.f),
+            ('UNIX', ('foo=bar', self.f),
+             {'mode': 0666, 'backlog': 50, 'wantPID': True}))
+
+
+    def test_nonstandardDefault(self):
+        """
+        For compatibility with the old L{twisted.application.strports.parse},
+        the third 'mode' argument may be specified to L{endpoints.parse} to
+        indicate a default other than TCP.
+        """
+        self.assertEqual(
+            self.parse('filename', self.f, 'unix'),
+            ('UNIX', ('filename', self.f),
+             {'mode': 0666, 'backlog': 50, 'wantPID': True}))
+
+
+    def test_unknownType(self):
+        """
+        L{strports.parse} raises C{ValueError} when given an unknown endpoint
+        type.
+        """
+        self.assertRaises(ValueError, self.parse, "bogus-type:nothing", self.f)
+
+
+
+class ServerStringTests(unittest.TestCase):
+    """
+    Tests for L{twisted.internet.endpoints.serverFromString}.
+    """
+
+    def test_tcp(self):
+        """
+        When passed a TCP strports description, L{endpoints.serverFromString}
+        returns a L{TCP4ServerEndpoint} instance initialized with the values
+        from the string.
+        """
+        reactor = object()
+        server = endpoints.serverFromString(
+            reactor, "tcp:1234:backlog=12:interface=10.0.0.1")
+        self.assertIsInstance(server, endpoints.TCP4ServerEndpoint)
+        self.assertIdentical(server._reactor, reactor)
+        self.assertEqual(server._port, 1234)
+        self.assertEqual(server._backlog, 12)
+        self.assertEqual(server._interface, "10.0.0.1")
+
+
+    def test_ssl(self):
+        """
+        When passed an SSL strports description, L{endpoints.serverFromString}
+        returns a L{SSL4ServerEndpoint} instance initialized with the values
+        from the string.
+        """
+        reactor = object()
+        server = endpoints.serverFromString(
+            reactor,
+            "ssl:1234:backlog=12:privateKey=%s:"
+            "certKey=%s:interface=10.0.0.1" % (escapedPEMPathName,
+                                               escapedPEMPathName))
+        self.assertIsInstance(server, endpoints.SSL4ServerEndpoint)
+        self.assertIdentical(server._reactor, reactor)
+        self.assertEqual(server._port, 1234)
+        self.assertEqual(server._backlog, 12)
+        self.assertEqual(server._interface, "10.0.0.1")
+        ctx = server._sslContextFactory.getContext()
+        self.assertIsInstance(ctx, ContextType)
+
+    if skipSSL:
+        test_ssl.skip = skipSSL
+
+
+    def test_unix(self):
+        """
+        When passed a UNIX strports description, L{endpoint.serverFromString}
+        returns a L{UNIXServerEndpoint} instance initialized with the values
+        from the string.
+        """
+        reactor = object()
+        endpoint = endpoints.serverFromString(
+            reactor,
+            "unix:/var/foo/bar:backlog=7:mode=0123:lockfile=1")
+        self.assertIsInstance(endpoint, endpoints.UNIXServerEndpoint)
+        self.assertIdentical(endpoint._reactor, reactor)
+        self.assertEqual(endpoint._address, "/var/foo/bar")
+        self.assertEqual(endpoint._backlog, 7)
+        self.assertEqual(endpoint._mode, 0123)
+        self.assertEqual(endpoint._wantPID, True)
+
+
+    def test_implicitDefaultNotAllowed(self):
+        """
+        The older service-based API (L{twisted.internet.strports.service})
+        allowed an implicit default of 'tcp' so that TCP ports could be
+        specified as a simple integer, but we've since decided that's a bad
+        idea, and the new API does not accept an implicit default argument; you
+        have to say 'tcp:' now.  If you try passing an old implicit port number
+        to the new API, you'll get a C{ValueError}.
+        """
+        value = self.assertRaises(
+            ValueError, endpoints.serverFromString, None, "4321")
+        self.assertEqual(
+            str(value),
+            "Unqualified strport description passed to 'service'."
+            "Use qualified endpoint descriptions; for example, 'tcp:4321'.")
+
+
+    def test_unknownType(self):
+        """
+        L{endpoints.serverFromString} raises C{ValueError} when given an
+        unknown endpoint type.
+        """
+        value = self.assertRaises(
+            # faster-than-light communication not supported
+            ValueError, endpoints.serverFromString, None,
+            "ftl:andromeda/carcosa/hali/2387")
+        self.assertEqual(
+            str(value),
+            "Unknown endpoint type: 'ftl'")
+
+
+    def test_typeFromPlugin(self):
+        """
+        L{endpoints.serverFromString} looks up plugins of type
+        L{IStreamServerEndpoint} and constructs endpoints from them.
+        """
+        # Set up a plugin which will only be accessible for the duration of
+        # this test.
+        addFakePlugin(self)
+        # Plugin is set up: now actually test.
+        notAReactor = object()
+        fakeEndpoint = endpoints.serverFromString(
+            notAReactor, "fake:hello:world:yes=no:up=down")
+        from twisted.plugins.fakeendpoint import fake
+        self.assertIdentical(fakeEndpoint.parser, fake)
+        self.assertEqual(fakeEndpoint.args, (notAReactor, 'hello', 'world'))
+        self.assertEqual(fakeEndpoint.kwargs, dict(yes='no', up='down'))
+
+
+
+def addFakePlugin(testCase, dropinSource="fakeendpoint.py"):
+    """
+    For the duration of C{testCase}, add a fake plugin to twisted.plugins which
+    contains some sample endpoint parsers.
+    """
+    import sys
+    savedModules = sys.modules.copy()
+    savedPluginPath = plugins.__path__
+    def cleanup():
+        sys.modules.clear()
+        sys.modules.update(savedModules)
+        plugins.__path__[:] = savedPluginPath
+    testCase.addCleanup(cleanup)
+    fp = FilePath(testCase.mktemp())
+    fp.createDirectory()
+    getModule(__name__).filePath.sibling(dropinSource).copyTo(
+        fp.child(dropinSource))
+    plugins.__path__.append(fp.path)
+
+
+
+class ClientStringTests(unittest.TestCase):
+    """
+    Tests for L{twisted.internet.endpoints.clientFromString}.
+    """
+
+    def test_tcp(self):
+        """
+        When passed a TCP strports description, L{endpoints.clientFromString}
+        returns a L{TCP4ClientEndpoint} instance initialized with the values
+        from the string.
+        """
+        reactor = object()
+        client = endpoints.clientFromString(
+            reactor,
+            "tcp:host=example.com:port=1234:timeout=7:bindAddress=10.0.0.2")
+        self.assertIsInstance(client, endpoints.TCP4ClientEndpoint)
+        self.assertIdentical(client._reactor, reactor)
+        self.assertEqual(client._host, "example.com")
+        self.assertEqual(client._port, 1234)
+        self.assertEqual(client._timeout, 7)
+        self.assertEqual(client._bindAddress, "10.0.0.2")
+
+
+    def test_tcpPositionalArgs(self):
+        """
+        When passed a TCP strports description using positional arguments,
+        L{endpoints.clientFromString} returns a L{TCP4ClientEndpoint} instance
+        initialized with the values from the string.
+        """
+        reactor = object()
+        client = endpoints.clientFromString(
+            reactor,
+            "tcp:example.com:1234:timeout=7:bindAddress=10.0.0.2")
+        self.assertIsInstance(client, endpoints.TCP4ClientEndpoint)
+        self.assertIdentical(client._reactor, reactor)
+        self.assertEqual(client._host, "example.com")
+        self.assertEqual(client._port, 1234)
+        self.assertEqual(client._timeout, 7)
+        self.assertEqual(client._bindAddress, "10.0.0.2")
+
+
+    def test_tcpHostPositionalArg(self):
+        """
+        When passed a TCP strports description specifying host as a positional
+        argument, L{endpoints.clientFromString} returns a L{TCP4ClientEndpoint}
+        instance initialized with the values from the string.
+        """
+        reactor = object()
+
+        client = endpoints.clientFromString(
+            reactor,
+            "tcp:example.com:port=1234:timeout=7:bindAddress=10.0.0.2")
+        self.assertEqual(client._host, "example.com")
+        self.assertEqual(client._port, 1234)
+
+
+    def test_tcpPortPositionalArg(self):
+        """
+        When passed a TCP strports description specifying port as a positional
+        argument, L{endpoints.clientFromString} returns a L{TCP4ClientEndpoint}
+        instance initialized with the values from the string.
+        """
+        reactor = object()
+        client = endpoints.clientFromString(
+            reactor,
+            "tcp:host=example.com:1234:timeout=7:bindAddress=10.0.0.2")
+        self.assertEqual(client._host, "example.com")
+        self.assertEqual(client._port, 1234)
+
+
+    def test_tcpDefaults(self):
+        """
+        A TCP strports description may omit I{timeout} or I{bindAddress} to
+        allow the default to be used.
+        """
+        reactor = object()
+        client = endpoints.clientFromString(
+            reactor,
+            "tcp:host=example.com:port=1234")
+        self.assertEqual(client._timeout, 30)
+        self.assertEqual(client._bindAddress, None)
+
+
+    def test_unix(self):
+        """
+        When passed a UNIX strports description, L{endpoints.clientFromString}
+        returns a L{UNIXClientEndpoint} instance initialized with the values
+        from the string.
+        """
+        reactor = object()
+        client = endpoints.clientFromString(
+            reactor,
+            "unix:path=/var/foo/bar:lockfile=1:timeout=9")
+        self.assertIsInstance(client, endpoints.UNIXClientEndpoint)
+        self.assertIdentical(client._reactor, reactor)
+        self.assertEqual(client._path, "/var/foo/bar")
+        self.assertEqual(client._timeout, 9)
+        self.assertEqual(client._checkPID, True)
+
+
+    def test_unixDefaults(self):
+        """
+        A UNIX strports description may omit I{lockfile} or I{timeout} to allow
+        the defaults to be used.
+        """
+        client = endpoints.clientFromString(object(), "unix:path=/var/foo/bar")
+        self.assertEqual(client._timeout, 30)
+        self.assertEqual(client._checkPID, False)
+
+
+    def test_unixPathPositionalArg(self):
+        """
+        When passed a UNIX strports description specifying path as a positional
+        argument, L{endpoints.clientFromString} returns a L{UNIXClientEndpoint}
+        instance initialized with the values from the string.
+        """
+        reactor = object()
+        client = endpoints.clientFromString(
+            reactor,
+            "unix:/var/foo/bar:lockfile=1:timeout=9")
+        self.assertIsInstance(client, endpoints.UNIXClientEndpoint)
+        self.assertIdentical(client._reactor, reactor)
+        self.assertEqual(client._path, "/var/foo/bar")
+        self.assertEqual(client._timeout, 9)
+        self.assertEqual(client._checkPID, True)
+
+
+    def test_typeFromPlugin(self):
+        """
+        L{endpoints.clientFromString} looks up plugins of type
+        L{IStreamClientEndpoint} and constructs endpoints from them.
+        """
+        addFakePlugin(self)
+        notAReactor = object()
+        clientEndpoint = endpoints.clientFromString(
+            notAReactor, "cfake:alpha:beta:cee=dee:num=1")
+        from twisted.plugins.fakeendpoint import fakeClient
+        self.assertIdentical(clientEndpoint.parser, fakeClient)
+        self.assertEqual(clientEndpoint.args, ('alpha', 'beta'))
+        self.assertEqual(clientEndpoint.kwargs, dict(cee='dee', num='1'))
+
+
+    def test_unknownType(self):
+        """
+        L{endpoints.serverFromString} raises C{ValueError} when given an
+        unknown endpoint type.
+        """
+        value = self.assertRaises(
+            # faster-than-light communication not supported
+            ValueError, endpoints.clientFromString, None,
+            "ftl:andromeda/carcosa/hali/2387")
+        self.assertEqual(
+            str(value),
+            "Unknown endpoint type: 'ftl'")
+
+
+
+class SSLClientStringTests(unittest.TestCase):
+    """
+    Tests for L{twisted.internet.endpoints.clientFromString} which require SSL.
+    """
+
+    if skipSSL:
+        skip = skipSSL
+
+    def test_ssl(self):
+        """
+        When passed an SSL strports description, L{clientFromString} returns a
+        L{SSL4ClientEndpoint} instance initialized with the values from the
+        string.
+        """
+        reactor = object()
+        client = endpoints.clientFromString(
+            reactor,
+            "ssl:host=example.net:port=4321:privateKey=%s:"
+            "certKey=%s:bindAddress=10.0.0.3:timeout=3:caCertsDir=%s" %
+             (escapedPEMPathName,
+              escapedPEMPathName,
+              escapedCAsPathName))
+        self.assertIsInstance(client, endpoints.SSL4ClientEndpoint)
+        self.assertIdentical(client._reactor, reactor)
+        self.assertEqual(client._host, "example.net")
+        self.assertEqual(client._port, 4321)
+        self.assertEqual(client._timeout, 3)
+        self.assertEqual(client._bindAddress, "10.0.0.3")
+        certOptions = client._sslContextFactory
+        self.assertIsInstance(certOptions, CertificateOptions)
+        ctx = certOptions.getContext()
+        self.assertIsInstance(ctx, ContextType)
+        self.assertEqual(Certificate(certOptions.certificate),
+                          testCertificate)
+        privateCert = PrivateCertificate(certOptions.certificate)
+        privateCert._setPrivateKey(KeyPair(certOptions.privateKey))
+        self.assertEqual(privateCert, testPrivateCertificate)
+        expectedCerts = [
+            Certificate.loadPEM(x.getContent()) for x in
+                [casPath.child("thing1.pem"), casPath.child("thing2.pem")]
+            if x.basename().lower().endswith('.pem')
+        ]
+        self.assertEqual(sorted((Certificate(x) for x in certOptions.caCerts),
+                                key=lambda cert: cert.digest()),
+                         sorted(expectedCerts,
+                                key=lambda cert: cert.digest()))
+
+
+    def test_sslPositionalArgs(self):
+        """
+        When passed an SSL strports description, L{clientFromString} returns a
+        L{SSL4ClientEndpoint} instance initialized with the values from the
+        string.
+        """
+        reactor = object()
+        client = endpoints.clientFromString(
+            reactor,
+            "ssl:example.net:4321:privateKey=%s:"
+            "certKey=%s:bindAddress=10.0.0.3:timeout=3:caCertsDir=%s" %
+             (escapedPEMPathName,
+              escapedPEMPathName,
+              escapedCAsPathName))
+        self.assertIsInstance(client, endpoints.SSL4ClientEndpoint)
+        self.assertIdentical(client._reactor, reactor)
+        self.assertEqual(client._host, "example.net")
+        self.assertEqual(client._port, 4321)
+        self.assertEqual(client._timeout, 3)
+        self.assertEqual(client._bindAddress, "10.0.0.3")
+
+
+    def test_unreadableCertificate(self):
+        """
+        If a certificate in the directory is unreadable,
+        L{endpoints._loadCAsFromDir} will ignore that certificate.
+        """
+        class UnreadableFilePath(FilePath):
+            def getContent(self):
+                data = FilePath.getContent(self)
+                # There is a duplicate of thing2.pem, so ignore anything that
+                # looks like it.
+                if data == casPath.child("thing2.pem").getContent():
+                    raise IOError(EPERM)
+                else:
+                    return data
+        casPathClone = casPath.child("ignored").parent()
+        casPathClone.clonePath = UnreadableFilePath
+        self.assertEqual(
+            [Certificate(x) for x in endpoints._loadCAsFromDir(casPathClone)],
+            [Certificate.loadPEM(casPath.child("thing1.pem").getContent())])
+
+
+    def test_sslSimple(self):
+        """
+        When passed an SSL strports description without any extra parameters,
+        L{clientFromString} returns a simple non-verifying endpoint that will
+        speak SSL.
+        """
+        reactor = object()
+        client = endpoints.clientFromString(
+            reactor, "ssl:host=simple.example.org:port=4321")
+        certOptions = client._sslContextFactory
+        self.assertIsInstance(certOptions, CertificateOptions)
+        self.assertEqual(certOptions.verify, False)
+        ctx = certOptions.getContext()
+        self.assertIsInstance(ctx, ContextType)
+
+
+
+class AdoptedStreamServerEndpointTestCase(ServerEndpointTestCaseMixin,
+                                          unittest.TestCase):
+    """
+    Tests for adopted socket-based stream server endpoints.
+    """
+    def _createStubbedAdoptedEndpoint(self, reactor, fileno, addressFamily):
+        """
+        Create an L{AdoptedStreamServerEndpoint} which may safely be used with
+        an invalid file descriptor.  This is convenient for a number of unit
+        tests.
+        """
+        e = endpoints.AdoptedStreamServerEndpoint(reactor, fileno, addressFamily)
+        # Stub out some syscalls which would fail, given our invalid file
+        # descriptor.
+        e._close = lambda fd: None
+        e._setNonBlocking = lambda fd: None
+        return e
+
+
+    def createServerEndpoint(self, reactor, factory):
+        """
+        Create a new L{AdoptedStreamServerEndpoint} for use by a test.
+
+        @return: A three-tuple:
+            - The endpoint
+            - A tuple of the arguments expected to be passed to the underlying
+              reactor method
+            - An IAddress object which will match the result of
+              L{IListeningPort.getHost} on the port returned by the endpoint.
+        """
+        fileno = 12
+        addressFamily = AF_INET
+        endpoint = self._createStubbedAdoptedEndpoint(
+            reactor, fileno, addressFamily)
+        # Magic numbers come from the implementation of MemoryReactor
+        address = IPv4Address("TCP", "0.0.0.0", 1234)
+        return (endpoint, (fileno, addressFamily, factory), address)
+
+
+    def expectedServers(self, reactor):
+        """
+        @return: The ports which were actually adopted by C{reactor} via calls
+            to its L{IReactorSocket.adoptStreamPort} implementation.
+        """
+        return reactor.adoptedPorts
+
+
+    def listenArgs(self):
+        """
+        @return: A C{dict} of additional keyword arguments to pass to the
+            C{createServerEndpoint}.
+        """
+        return {}
+
+
+    def test_singleUse(self):
+        """
+        L{AdoptedStreamServerEndpoint.listen} can only be used once.  The file
+        descriptor given is closed after the first use, and subsequent calls to
+        C{listen} return a L{Deferred} that fails with L{AlreadyListened}.
+        """
+        reactor = MemoryReactor()
+        endpoint = self._createStubbedAdoptedEndpoint(reactor, 13, AF_INET)
+        endpoint.listen(object())
+        d = self.assertFailure(endpoint.listen(object()), error.AlreadyListened)
+        def listenFailed(ignored):
+            self.assertEqual(1, len(reactor.adoptedPorts))
+        d.addCallback(listenFailed)
+        return d
+
+
+    def test_descriptionNonBlocking(self):
+        """
+        L{AdoptedStreamServerEndpoint.listen} sets the file description given to
+        it to non-blocking.
+        """
+        reactor = MemoryReactor()
+        endpoint = self._createStubbedAdoptedEndpoint(reactor, 13, AF_INET)
+        events = []
+        def setNonBlocking(fileno):
+            events.append(("setNonBlocking", fileno))
+        endpoint._setNonBlocking = setNonBlocking
+
+        d = endpoint.listen(object())
+        def listened(ignored):
+            self.assertEqual([("setNonBlocking", 13)], events)
+        d.addCallback(listened)
+        return d
+
+
+    def test_descriptorClosed(self):
+        """
+        L{AdoptedStreamServerEndpoint.listen} closes its file descriptor after
+        adding it to the reactor with L{IReactorSocket.adoptStreamPort}.
+        """
+        reactor = MemoryReactor()
+        endpoint = self._createStubbedAdoptedEndpoint(reactor, 13, AF_INET)
+        events = []
+        def close(fileno):
+            events.append(("close", fileno, len(reactor.adoptedPorts)))
+        endpoint._close = close
+
+        d = endpoint.listen(object())
+        def listened(ignored):
+            self.assertEqual([("close", 13, 1)], events)
+        d.addCallback(listened)
+        return d
+
+
+
+class SystemdEndpointPluginTests(unittest.TestCase):
+    """
+    Unit tests for the systemd stream server endpoint and endpoint string
+    description parser.
+
+    @see: U{systemd<http://www.freedesktop.org/wiki/Software/systemd>}
+    """
+
+    _parserClass = endpoints._SystemdParser
+
+    def test_pluginDiscovery(self):
+        """
+        L{endpoints._SystemdParser} is found as a plugin for
+        L{interfaces.IStreamServerEndpointStringParser} interface.
+        """
+        parsers = list(getPlugins(
+                interfaces.IStreamServerEndpointStringParser))
+        for p in parsers:
+            if isinstance(p, self._parserClass):
+                break
+        else:
+            self.fail("Did not find systemd parser in %r" % (parsers,))
+
+
+    def test_interface(self):
+        """
+        L{endpoints._SystemdParser} instances provide
+        L{interfaces.IStreamServerEndpointStringParser}.
+        """
+        parser = self._parserClass()
+        self.assertTrue(verifyObject(
+                interfaces.IStreamServerEndpointStringParser, parser))
+
+
+    def _parseStreamServerTest(self, addressFamily, addressFamilyString):
+        """
+        Helper for unit tests for L{endpoints._SystemdParser.parseStreamServer}
+        for different address families.
+
+        Handling of the address family given will be verify.  If there is a
+        problem a test-failing exception will be raised.
+
+        @param addressFamily: An address family constant, like L{socket.AF_INET}.
+
+        @param addressFamilyString: A string which should be recognized by the
+            parser as representing C{addressFamily}.
+        """
+        reactor = object()
+        descriptors = [5, 6, 7, 8, 9]
+        index = 3
+
+        parser = self._parserClass()
+        parser._sddaemon = ListenFDs(descriptors)
+
+        server = parser.parseStreamServer(
+            reactor, domain=addressFamilyString, index=str(index))
+        self.assertIdentical(server.reactor, reactor)
+        self.assertEqual(server.addressFamily, addressFamily)
+        self.assertEqual(server.fileno, descriptors[index])
+
+
+    def test_parseStreamServerINET(self):
+        """
+        IPv4 can be specified using the string C{"INET"}.
+        """
+        self._parseStreamServerTest(AF_INET, "INET")
+
+
+    def test_parseStreamServerINET6(self):
+        """
+        IPv6 can be specified using the string C{"INET6"}.
+        """
+        self._parseStreamServerTest(AF_INET6, "INET6")
+
+
+    def test_parseStreamServerUNIX(self):
+        """
+        A UNIX domain socket can be specified using the string C{"UNIX"}.
+        """
+        try:
+            from socket import AF_UNIX
+        except ImportError:
+            raise unittest.SkipTest("Platform lacks AF_UNIX support")
+        else:
+            self._parseStreamServerTest(AF_UNIX, "UNIX")
+
+
+
+class TCP6ServerEndpointPluginTests(unittest.TestCase):
+    """
+    Unit tests for the TCP IPv6 stream server endpoint string description parser.
+    """
+    _parserClass = endpoints._TCP6ServerParser
+
+    def test_pluginDiscovery(self):
+        """
+        L{endpoints._TCP6ServerParser} is found as a plugin for
+        L{interfaces.IStreamServerEndpointStringParser} interface.
+        """
+        parsers = list(getPlugins(
+                interfaces.IStreamServerEndpointStringParser))
+        for p in parsers:
+            if isinstance(p, self._parserClass):
+                break
+        else:
+            self.fail("Did not find TCP6ServerEndpoint parser in %r" % (parsers,))
+
+
+    def test_interface(self):
+        """
+        L{endpoints._TCP6ServerParser} instances provide
+        L{interfaces.IStreamServerEndpointStringParser}.
+        """
+        parser = self._parserClass()
+        self.assertTrue(verifyObject(
+                interfaces.IStreamServerEndpointStringParser, parser))
+
+
+    def test_stringDescription(self):
+        """
+        L{serverFromString} returns a L{TCP6ServerEndpoint} instance with a 'tcp6'
+        endpoint string description.
+        """
+        ep = endpoints.serverFromString(MemoryReactor(),
+            "tcp6:8080:backlog=12:interface=\:\:1")
+        self.assertIsInstance(ep, endpoints.TCP6ServerEndpoint)
+        self.assertIsInstance(ep._reactor, MemoryReactor)
+        self.assertEqual(ep._port, 8080)
+        self.assertEqual(ep._backlog, 12)
+        self.assertEqual(ep._interface, '::1')
+
+
+
+class StandardIOEndpointPluginTests(unittest.TestCase):
+    """
+    Unit tests for the Standard I/O endpoint string description parser.
+    """
+    _parserClass = endpoints._StandardIOParser
+
+    def test_pluginDiscovery(self):
+        """
+        L{endpoints._StandardIOParser} is found as a plugin for
+        L{interfaces.IStreamServerEndpointStringParser} interface.
+        """
+        parsers = list(getPlugins(
+                interfaces.IStreamServerEndpointStringParser))
+        for p in parsers:
+            if isinstance(p, self._parserClass):
+                break
+        else:
+            self.fail("Did not find StandardIOEndpoint parser in %r" % (parsers,))
+
+
+    def test_interface(self):
+        """
+        L{endpoints._StandardIOParser} instances provide
+        L{interfaces.IStreamServerEndpointStringParser}.
+        """
+        parser = self._parserClass()
+        self.assertTrue(verifyObject(
+                interfaces.IStreamServerEndpointStringParser, parser))
+
+
+    def test_stringDescription(self):
+        """
+        L{serverFromString} returns a L{StandardIOEndpoint} instance with a 'stdio'
+        endpoint string description.
+        """
+        ep = endpoints.serverFromString(MemoryReactor(), "stdio:")
+        self.assertIsInstance(ep, endpoints.StandardIOEndpoint)
+        self.assertIsInstance(ep._reactor, MemoryReactor)
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_endpointspy3.py b/ThirdParty/Twisted/twisted/internet/test/test_endpointspy3.py
new file mode 100644
index 0000000..a1e4fb6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_endpointspy3.py
@@ -0,0 +1,1036 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+"""
+Tests for L{twisted.internet._endpointspy3}.
+"""
+
+from socket import AF_INET6, SOCK_STREAM, IPPROTO_TCP
+
+from zope.interface import implementer
+from zope.interface.verify import verifyObject
+
+from twisted.trial import unittest
+from twisted.python.failure import Failure
+from twisted.python.filepath import FilePath
+from twisted.test.proto_helpers import (
+    MemoryReactor, RaisingMemoryReactor, StringTransport)
+from twisted.internet.protocol import ClientFactory, Protocol
+from twisted.internet.address import IPv4Address, IPv6Address
+from twisted.internet import (_endpointspy3 as endpoints, error, defer,
+                              interfaces, reactor)
+
+from twisted.test import __file__ as testInitPath
+pemPath = FilePath(testInitPath.encode("utf-8")).sibling(b"server.pem")
+
+try:
+    from twisted.test.test_sslverify import makeCertificate
+    from twisted.internet.ssl import (CertificateOptions, Certificate,
+                                      PrivateCertificate)
+    testCertificate = Certificate.loadPEM(pemPath.getContent())
+    testPrivateCertificate = PrivateCertificate.loadPEM(pemPath.getContent())
+
+    skipSSL = False
+except ImportError:
+    skipSSL = "OpenSSL is required to construct SSL Endpoints"
+
+
+class TestProtocol(Protocol):
+    """
+    Protocol whose only function is to callback deferreds on the
+    factory when it is connected or disconnected.
+    """
+
+    def __init__(self):
+        self.data = []
+        self.connectionsLost = []
+        self.connectionMadeCalls = 0
+
+
+    def logPrefix(self):
+        return "A Test Protocol"
+
+
+    def connectionMade(self):
+        self.connectionMadeCalls += 1
+
+
+    def dataReceived(self, data):
+        self.data.append(data)
+
+
+    def connectionLost(self, reason):
+        self.connectionsLost.append(reason)
+
+
+
+ at implementer(interfaces.IHalfCloseableProtocol)
+class TestHalfCloseableProtocol(TestProtocol):
+    """
+    A Protocol that implements L{IHalfCloseableProtocol} and records whether its
+    C{readConnectionLost} and {writeConnectionLost} methods are called.
+
+    @ivar readLost: A C{bool} indicating whether C{readConnectionLost} has been
+        called.
+
+    @ivar writeLost: A C{bool} indicating whether C{writeConnectionLost} has
+        been called.
+    """
+
+    def __init__(self):
+        TestProtocol.__init__(self)
+        self.readLost = False
+        self.writeLost = False
+
+
+    def readConnectionLost(self):
+        self.readLost = True
+
+
+    def writeConnectionLost(self):
+        self.writeLost = True
+
+
+
+ at implementer(interfaces.IFileDescriptorReceiver)
+class TestFileDescriptorReceiverProtocol(TestProtocol):
+    """
+    A Protocol that implements L{IFileDescriptorReceiver} and records how its
+    C{fileDescriptorReceived} method is called.
+
+    @ivar receivedDescriptors: A C{list} containing all of the file descriptors
+        passed to C{fileDescriptorReceived} calls made on this instance.
+    """
+
+    def connectionMade(self):
+        TestProtocol.connectionMade(self)
+        self.receivedDescriptors = []
+
+
+    def fileDescriptorReceived(self, descriptor):
+        self.receivedDescriptors.append(descriptor)
+
+
+
+class TestFactory(ClientFactory):
+    """
+    Simple factory to be used both when connecting and listening. It contains
+    two deferreds which are called back when my protocol connects and
+    disconnects.
+    """
+
+    protocol = TestProtocol
+
+
+
+class WrappingFactoryTests(unittest.TestCase):
+    """
+    Test the behaviour of our ugly implementation detail C{_WrappingFactory}.
+    """
+    def test_doStart(self):
+        """
+        L{_WrappingFactory.doStart} passes through to the wrapped factory's
+        C{doStart} method, allowing application-specific setup and logging.
+        """
+        factory = ClientFactory()
+        wf = endpoints._WrappingFactory(factory)
+        wf.doStart()
+        self.assertEqual(1, factory.numPorts)
+
+
+    def test_doStop(self):
+        """
+        L{_WrappingFactory.doStop} passes through to the wrapped factory's
+        C{doStop} method, allowing application-specific cleanup and logging.
+        """
+        factory = ClientFactory()
+        factory.numPorts = 3
+        wf = endpoints._WrappingFactory(factory)
+        wf.doStop()
+        self.assertEqual(2, factory.numPorts)
+
+
+    def test_failedBuildProtocol(self):
+        """
+        An exception raised in C{buildProtocol} of our wrappedFactory
+        results in our C{onConnection} errback being fired.
+        """
+
+        class BogusFactory(ClientFactory):
+            """
+            A one off factory whose C{buildProtocol} raises an C{Exception}.
+            """
+
+            def buildProtocol(self, addr):
+                raise ValueError("My protocol is poorly defined.")
+
+
+        wf = endpoints._WrappingFactory(BogusFactory())
+
+        wf.buildProtocol(None)
+
+        d = self.assertFailure(wf._onConnection, ValueError)
+        d.addCallback(lambda e: self.assertEqual(
+                e.args,
+                ("My protocol is poorly defined.",)))
+
+        return d
+
+
+    def test_logPrefixPassthrough(self):
+        """
+        If the wrapped protocol provides L{ILoggingContext}, whatever is
+        returned from the wrapped C{logPrefix} method is returned from
+        L{_WrappingProtocol.logPrefix}.
+        """
+        wf = endpoints._WrappingFactory(TestFactory())
+        wp = wf.buildProtocol(None)
+        self.assertEqual(wp.logPrefix(), "A Test Protocol")
+
+
+    def test_logPrefixDefault(self):
+        """
+        If the wrapped protocol does not provide L{ILoggingContext}, the wrapped
+        protocol's class name is returned from L{_WrappingProtocol.logPrefix}.
+        """
+        class NoProtocol(object):
+            pass
+        factory = TestFactory()
+        factory.protocol = NoProtocol
+        wf = endpoints._WrappingFactory(factory)
+        wp = wf.buildProtocol(None)
+        self.assertEqual(wp.logPrefix(), "NoProtocol")
+
+
+    def test_wrappedProtocolDataReceived(self):
+        """
+        The wrapped C{Protocol}'s C{dataReceived} will get called when our
+        C{_WrappingProtocol}'s C{dataReceived} gets called.
+        """
+        wf = endpoints._WrappingFactory(TestFactory())
+        p = wf.buildProtocol(None)
+        p.makeConnection(None)
+
+        p.dataReceived(b'foo')
+        self.assertEqual(p._wrappedProtocol.data, [b'foo'])
+
+        p.dataReceived(b'bar')
+        self.assertEqual(p._wrappedProtocol.data, [b'foo', b'bar'])
+
+
+    def test_wrappedProtocolTransport(self):
+        """
+        Our transport is properly hooked up to the wrappedProtocol when a
+        connection is made.
+        """
+        wf = endpoints._WrappingFactory(TestFactory())
+        p = wf.buildProtocol(None)
+
+        dummyTransport = object()
+
+        p.makeConnection(dummyTransport)
+
+        self.assertEqual(p.transport, dummyTransport)
+
+        self.assertEqual(p._wrappedProtocol.transport, dummyTransport)
+
+
+    def test_wrappedProtocolConnectionLost(self):
+        """
+        Our wrappedProtocol's connectionLost method is called when
+        L{_WrappingProtocol.connectionLost} is called.
+        """
+        tf = TestFactory()
+        wf = endpoints._WrappingFactory(tf)
+        p = wf.buildProtocol(None)
+
+        p.connectionLost("fail")
+
+        self.assertEqual(p._wrappedProtocol.connectionsLost, ["fail"])
+
+
+    def test_clientConnectionFailed(self):
+        """
+        Calls to L{_WrappingFactory.clientConnectionLost} should errback the
+        L{_WrappingFactory._onConnection} L{Deferred}
+        """
+        wf = endpoints._WrappingFactory(TestFactory())
+        expectedFailure = Failure(error.ConnectError(string="fail"))
+
+        wf.clientConnectionFailed(
+            None,
+            expectedFailure)
+
+        errors = []
+        def gotError(f):
+            errors.append(f)
+
+        wf._onConnection.addErrback(gotError)
+
+        self.assertEqual(errors, [expectedFailure])
+
+
+    def test_wrappingProtocolFileDescriptorReceiver(self):
+        """
+        Our L{_WrappingProtocol} should be an L{IFileDescriptorReceiver} if the
+        wrapped protocol is.
+        """
+        connectedDeferred = None
+        applicationProtocol = TestFileDescriptorReceiverProtocol()
+        wrapper = endpoints._WrappingProtocol(
+            connectedDeferred, applicationProtocol)
+        self.assertTrue(interfaces.IFileDescriptorReceiver.providedBy(wrapper))
+        self.assertTrue(verifyObject(interfaces.IFileDescriptorReceiver, wrapper))
+
+
+    def test_wrappingProtocolNotFileDescriptorReceiver(self):
+        """
+        Our L{_WrappingProtocol} does not provide L{IHalfCloseableProtocol} if
+        the wrapped protocol doesn't.
+        """
+        tp = TestProtocol()
+        p = endpoints._WrappingProtocol(None, tp)
+        self.assertFalse(interfaces.IFileDescriptorReceiver.providedBy(p))
+
+
+    def test_wrappedProtocolFileDescriptorReceived(self):
+        """
+        L{_WrappingProtocol.fileDescriptorReceived} calls the wrapped protocol's
+        C{fileDescriptorReceived} method.
+        """
+        wrappedProtocol = TestFileDescriptorReceiverProtocol()
+        wrapper = endpoints._WrappingProtocol(
+            defer.Deferred(), wrappedProtocol)
+        wrapper.makeConnection(StringTransport())
+        wrapper.fileDescriptorReceived(42)
+        self.assertEqual(wrappedProtocol.receivedDescriptors, [42])
+
+
+    def test_wrappingProtocolHalfCloseable(self):
+        """
+        Our L{_WrappingProtocol} should be an L{IHalfCloseableProtocol} if the
+        C{wrappedProtocol} is.
+        """
+        cd = object()
+        hcp = TestHalfCloseableProtocol()
+        p = endpoints._WrappingProtocol(cd, hcp)
+        self.assertEqual(
+            interfaces.IHalfCloseableProtocol.providedBy(p), True)
+
+
+    def test_wrappingProtocolNotHalfCloseable(self):
+        """
+        Our L{_WrappingProtocol} should not provide L{IHalfCloseableProtocol}
+        if the C{WrappedProtocol} doesn't.
+        """
+        tp = TestProtocol()
+        p = endpoints._WrappingProtocol(None, tp)
+        self.assertEqual(
+            interfaces.IHalfCloseableProtocol.providedBy(p), False)
+
+
+    def test_wrappedProtocolReadConnectionLost(self):
+        """
+        L{_WrappingProtocol.readConnectionLost} should proxy to the wrapped
+        protocol's C{readConnectionLost}
+        """
+        hcp = TestHalfCloseableProtocol()
+        p = endpoints._WrappingProtocol(None, hcp)
+        p.readConnectionLost()
+        self.assertEqual(hcp.readLost, True)
+
+
+    def test_wrappedProtocolWriteConnectionLost(self):
+        """
+        L{_WrappingProtocol.writeConnectionLost} should proxy to the wrapped
+        protocol's C{writeConnectionLost}
+        """
+        hcp = TestHalfCloseableProtocol()
+        p = endpoints._WrappingProtocol(None, hcp)
+        p.writeConnectionLost()
+        self.assertEqual(hcp.writeLost, True)
+
+
+
+class ClientEndpointTestCaseMixin(object):
+    """
+    Generic test methods to be mixed into all client endpoint test classes.
+    """
+    def test_interface(self):
+        """
+        The endpoint provides L{interfaces.IStreamClientEndpoint}
+        """
+        clientFactory = object()
+        ep, ignoredArgs, address = self.createClientEndpoint(MemoryReactor(), clientFactory)
+        self.assertTrue(verifyObject(interfaces.IStreamClientEndpoint, ep))
+
+
+    def retrieveConnectedFactory(self, reactor):
+        """
+        Retrieve a single factory that has connected using the given reactor.
+        (This behavior is valid for TCP and SSL but needs to be overridden for
+        UNIX.)
+
+        @param reactor: a L{MemoryReactor}
+        """
+        return self.expectedClients(reactor)[0][2]
+
+
+    def test_endpointConnectSuccess(self):
+        """
+        A client endpoint can connect and returns a deferred who gets called
+        back with a protocol instance.
+        """
+        proto = object()
+        mreactor = MemoryReactor()
+
+        clientFactory = object()
+
+        ep, expectedArgs, ignoredDest = self.createClientEndpoint(
+            mreactor, clientFactory)
+
+        d = ep.connect(clientFactory)
+
+        receivedProtos = []
+
+        def checkProto(p):
+            receivedProtos.append(p)
+
+        d.addCallback(checkProto)
+
+        factory = self.retrieveConnectedFactory(mreactor)
+        factory._onConnection.callback(proto)
+        self.assertEqual(receivedProtos, [proto])
+
+        expectedClients = self.expectedClients(mreactor)
+
+        self.assertEqual(len(expectedClients), 1)
+        self.assertConnectArgs(expectedClients[0], expectedArgs)
+
+
+    def test_endpointConnectFailure(self):
+        """
+        If an endpoint tries to connect to a non-listening port it gets
+        a C{ConnectError} failure.
+        """
+        expectedError = error.ConnectError(string="Connection Failed")
+
+        mreactor = RaisingMemoryReactor(connectException=expectedError)
+
+        clientFactory = object()
+
+        ep, ignoredArgs, ignoredDest = self.createClientEndpoint(
+            mreactor, clientFactory)
+
+        d = ep.connect(clientFactory)
+
+        receivedExceptions = []
+
+        def checkFailure(f):
+            receivedExceptions.append(f.value)
+
+        d.addErrback(checkFailure)
+
+        self.assertEqual(receivedExceptions, [expectedError])
+
+
+    def test_endpointConnectingCancelled(self):
+        """
+        Calling L{Deferred.cancel} on the L{Deferred} returned from
+        L{IStreamClientEndpoint.connect} is errbacked with an expected
+        L{ConnectingCancelledError} exception.
+        """
+        mreactor = MemoryReactor()
+
+        clientFactory = object()
+
+        ep, ignoredArgs, address = self.createClientEndpoint(
+            mreactor, clientFactory)
+
+        d = ep.connect(clientFactory)
+
+        receivedFailures = []
+
+        def checkFailure(f):
+            receivedFailures.append(f)
+
+        d.addErrback(checkFailure)
+
+        d.cancel()
+        # When canceled, the connector will immediately notify its factory that
+        # the connection attempt has failed due to a UserError.
+        attemptFactory = self.retrieveConnectedFactory(mreactor)
+        attemptFactory.clientConnectionFailed(None, Failure(error.UserError()))
+        # This should be a feature of MemoryReactor: <http://tm.tl/5630>.
+
+        self.assertEqual(len(receivedFailures), 1)
+
+        failure = receivedFailures[0]
+
+        self.assertIsInstance(failure.value, error.ConnectingCancelledError)
+        self.assertEqual(failure.value.address, address)
+
+
+    def test_endpointConnectNonDefaultArgs(self):
+        """
+        The endpoint should pass it's connectArgs parameter to the reactor's
+        listen methods.
+        """
+        factory = object()
+
+        mreactor = MemoryReactor()
+
+        ep, expectedArgs, ignoredHost = self.createClientEndpoint(
+            mreactor, factory,
+            **self.connectArgs())
+
+        ep.connect(factory)
+
+        expectedClients = self.expectedClients(mreactor)
+
+        self.assertEqual(len(expectedClients), 1)
+        self.assertConnectArgs(expectedClients[0], expectedArgs)
+
+
+
+class ServerEndpointTestCaseMixin(object):
+    """
+    Generic test methods to be mixed into all client endpoint test classes.
+    """
+    def test_interface(self):
+        """
+        The endpoint provides L{interfaces.IStreamServerEndpoint}.
+        """
+        factory = object()
+        ep, ignoredArgs, ignoredDest = self.createServerEndpoint(
+                MemoryReactor(), factory)
+        self.assertTrue(verifyObject(interfaces.IStreamServerEndpoint, ep))
+
+
+    def test_endpointListenSuccess(self):
+        """
+        An endpoint can listen and returns a deferred that gets called back
+        with a port instance.
+        """
+        mreactor = MemoryReactor()
+
+        factory = object()
+
+        ep, expectedArgs, expectedHost = self.createServerEndpoint(
+            mreactor, factory)
+
+        d = ep.listen(factory)
+
+        receivedHosts = []
+
+        def checkPortAndServer(port):
+            receivedHosts.append(port.getHost())
+
+        d.addCallback(checkPortAndServer)
+
+        self.assertEqual(receivedHosts, [expectedHost])
+        self.assertEqual(self.expectedServers(mreactor), [expectedArgs])
+
+
+    def test_endpointListenFailure(self):
+        """
+        When an endpoint tries to listen on an already listening port, a
+        C{CannotListenError} failure is errbacked.
+        """
+        factory = object()
+        exception = error.CannotListenError('', 80, factory)
+        mreactor = RaisingMemoryReactor(listenException=exception)
+
+        ep, ignoredArgs, ignoredDest = self.createServerEndpoint(
+            mreactor, factory)
+
+        d = ep.listen(object())
+
+        receivedExceptions = []
+
+        def checkFailure(f):
+            receivedExceptions.append(f.value)
+
+        d.addErrback(checkFailure)
+
+        self.assertEqual(receivedExceptions, [exception])
+
+
+    def test_endpointListenNonDefaultArgs(self):
+        """
+        The endpoint should pass it's listenArgs parameter to the reactor's
+        listen methods.
+        """
+        factory = object()
+
+        mreactor = MemoryReactor()
+
+        ep, expectedArgs, ignoredHost = self.createServerEndpoint(
+            mreactor, factory,
+            **self.listenArgs())
+
+        ep.listen(factory)
+
+        expectedServers = self.expectedServers(mreactor)
+
+        self.assertEqual(expectedServers, [expectedArgs])
+
+
+
+class EndpointTestCaseMixin(ServerEndpointTestCaseMixin,
+                            ClientEndpointTestCaseMixin):
+    """
+    Generic test methods to be mixed into all endpoint test classes.
+    """
+
+
+
+class TCP4EndpointsTestCase(EndpointTestCaseMixin, unittest.TestCase):
+    """
+    Tests for TCP IPv4 Endpoints.
+    """
+
+    def expectedServers(self, reactor):
+        """
+        @return: List of calls to L{IReactorTCP.listenTCP}
+        """
+        return reactor.tcpServers
+
+
+    def expectedClients(self, reactor):
+        """
+        @return: List of calls to L{IReactorTCP.connectTCP}
+        """
+        return reactor.tcpClients
+
+
+    def assertConnectArgs(self, receivedArgs, expectedArgs):
+        """
+        Compare host, port, timeout, and bindAddress in C{receivedArgs}
+        to C{expectedArgs}.  We ignore the factory because we don't
+        only care what protocol comes out of the
+        C{IStreamClientEndpoint.connect} call.
+
+        @param receivedArgs: C{tuple} of (C{host}, C{port}, C{factory},
+            C{timeout}, C{bindAddress}) that was passed to
+            L{IReactorTCP.connectTCP}.
+        @param expectedArgs: C{tuple} of (C{host}, C{port}, C{factory},
+            C{timeout}, C{bindAddress}) that we expect to have been passed
+            to L{IReactorTCP.connectTCP}.
+        """
+        (host, port, ignoredFactory, timeout, bindAddress) = receivedArgs
+        (expectedHost, expectedPort, _ignoredFactory,
+         expectedTimeout, expectedBindAddress) = expectedArgs
+
+        self.assertEqual(host, expectedHost)
+        self.assertEqual(port, expectedPort)
+        self.assertEqual(timeout, expectedTimeout)
+        self.assertEqual(bindAddress, expectedBindAddress)
+
+
+    def connectArgs(self):
+        """
+        @return: C{dict} of keyword arguments to pass to connect.
+        """
+        return {'timeout': 10, 'bindAddress': ('localhost', 49595)}
+
+
+    def listenArgs(self):
+        """
+        @return: C{dict} of keyword arguments to pass to listen
+        """
+        return {'backlog': 100, 'interface': '127.0.0.1'}
+
+
+    def createServerEndpoint(self, reactor, factory, **listenArgs):
+        """
+        Create an L{TCP4ServerEndpoint} and return the values needed to verify
+        its behaviour.
+
+        @param reactor: A fake L{IReactorTCP} that L{TCP4ServerEndpoint} can
+            call L{IReactorTCP.listenTCP} on.
+        @param factory: The thing that we expect to be passed to our
+            L{IStreamServerEndpoint.listen} implementation.
+        @param listenArgs: Optional dictionary of arguments to
+            L{IReactorTCP.listenTCP}.
+        """
+        address = IPv4Address("TCP", "0.0.0.0", 0)
+
+        if listenArgs is None:
+            listenArgs = {}
+
+        return (endpoints.TCP4ServerEndpoint(reactor,
+                                             address.port,
+                                             **listenArgs),
+                (address.port, factory,
+                 listenArgs.get('backlog', 50),
+                 listenArgs.get('interface', '')),
+                address)
+
+
+    def createClientEndpoint(self, reactor, clientFactory, **connectArgs):
+        """
+        Create an L{TCP4ClientEndpoint} and return the values needed to verify
+        its behavior.
+
+        @param reactor: A fake L{IReactorTCP} that L{TCP4ClientEndpoint} can
+            call L{IReactorTCP.connectTCP} on.
+        @param clientFactory: The thing that we expect to be passed to our
+            L{IStreamClientEndpoint.connect} implementation.
+        @param connectArgs: Optional dictionary of arguments to
+            L{IReactorTCP.connectTCP}
+        """
+        address = IPv4Address("TCP", "localhost", 80)
+
+        return (endpoints.TCP4ClientEndpoint(reactor,
+                                             address.host,
+                                             address.port,
+                                             **connectArgs),
+                (address.host, address.port, clientFactory,
+                 connectArgs.get('timeout', 30),
+                 connectArgs.get('bindAddress', None)),
+                address)
+
+
+
+class TCP6EndpointsTestCase(EndpointTestCaseMixin, unittest.TestCase):
+    """
+    Tests for TCP IPv6 Endpoints.
+    """
+
+    def expectedServers(self, reactor):
+        """
+        @return: List of calls to L{IReactorTCP.listenTCP}
+        """
+        return reactor.tcpServers
+
+
+    def expectedClients(self, reactor):
+        """
+        @return: List of calls to L{IReactorTCP.connectTCP}
+        """
+        return reactor.tcpClients
+
+
+    def assertConnectArgs(self, receivedArgs, expectedArgs):
+        """
+        Compare host, port, timeout, and bindAddress in C{receivedArgs}
+        to C{expectedArgs}.  We ignore the factory because we don't
+        only care what protocol comes out of the
+        C{IStreamClientEndpoint.connect} call.
+
+        @param receivedArgs: C{tuple} of (C{host}, C{port}, C{factory},
+            C{timeout}, C{bindAddress}) that was passed to
+            L{IReactorTCP.connectTCP}.
+        @param expectedArgs: C{tuple} of (C{host}, C{port}, C{factory},
+            C{timeout}, C{bindAddress}) that we expect to have been passed
+            to L{IReactorTCP.connectTCP}.
+        """
+        (host, port, ignoredFactory, timeout, bindAddress) = receivedArgs
+        (expectedHost, expectedPort, _ignoredFactory,
+         expectedTimeout, expectedBindAddress) = expectedArgs
+
+        self.assertEqual(host, expectedHost)
+        self.assertEqual(port, expectedPort)
+        self.assertEqual(timeout, expectedTimeout)
+        self.assertEqual(bindAddress, expectedBindAddress)
+
+
+    def connectArgs(self):
+        """
+        @return: C{dict} of keyword arguments to pass to connect.
+        """
+        return {'timeout': 10, 'bindAddress': ('localhost', 49595)}
+
+
+    def listenArgs(self):
+        """
+        @return: C{dict} of keyword arguments to pass to listen
+        """
+        return {'backlog': 100, 'interface': '::1'}
+
+
+    def createServerEndpoint(self, reactor, factory, **listenArgs):
+        """
+        Create a L{TCP6ServerEndpoint} and return the values needed to verify
+        its behaviour.
+
+        @param reactor: A fake L{IReactorTCP} that L{TCP6ServerEndpoint} can
+            call L{IReactorTCP.listenTCP} on.
+        @param factory: The thing that we expect to be passed to our
+            L{IStreamServerEndpoint.listen} implementation.
+        @param listenArgs: Optional dictionary of arguments to
+            L{IReactorTCP.listenTCP}.
+        """
+        interface = listenArgs.get('interface', '::')
+        address = IPv6Address("TCP", interface, 0)
+
+        if listenArgs is None:
+            listenArgs = {}
+
+        return (endpoints.TCP6ServerEndpoint(reactor,
+                                             address.port,
+                                             **listenArgs),
+                (address.port, factory,
+                 listenArgs.get('backlog', 50),
+                 interface),
+                address)
+
+
+    def createClientEndpoint(self, reactor, clientFactory, **connectArgs):
+        """
+        Create a L{TCP6ClientEndpoint} and return the values needed to verify
+        its behavior.
+
+        @param reactor: A fake L{IReactorTCP} that L{TCP6ClientEndpoint} can
+            call L{IReactorTCP.connectTCP} on.
+        @param clientFactory: The thing that we expect to be passed to our
+            L{IStreamClientEndpoint.connect} implementation.
+        @param connectArgs: Optional dictionary of arguments to
+            L{IReactorTCP.connectTCP}
+        """
+        address = IPv6Address("TCP", "::1", 80)
+
+        return (endpoints.TCP6ClientEndpoint(reactor,
+                                             address.host,
+                                             address.port,
+                                             **connectArgs),
+                (address.host, address.port, clientFactory,
+                 connectArgs.get('timeout', 30),
+                 connectArgs.get('bindAddress', None)),
+                address)
+
+
+class TCP6EndpointNameResolutionTestCase(ClientEndpointTestCaseMixin,
+                                         unittest.TestCase):
+    """
+    Tests for a TCP IPv6 Client Endpoint pointed at a hostname instead
+    of an IPv6 address literal.
+    """
+
+
+    def createClientEndpoint(self, reactor, clientFactory, **connectArgs):
+        """
+        Create a L{TCP6ClientEndpoint} and return the values needed to verify
+        its behavior.
+
+        @param reactor: A fake L{IReactorTCP} that L{TCP6ClientEndpoint} can
+            call L{IReactorTCP.connectTCP} on.
+        @param clientFactory: The thing that we expect to be passed to our
+            L{IStreamClientEndpoint.connect} implementation.
+        @param connectArgs: Optional dictionary of arguments to
+            L{IReactorTCP.connectTCP}
+        """
+        address = IPv6Address("TCP", "::2", 80)
+        self.ep = endpoints.TCP6ClientEndpoint(reactor,
+                                             'ipv6.example.com',
+                                             address.port,
+                                             **connectArgs)
+
+        def testNameResolution(host):
+            self.assertEqual("ipv6.example.com", host)
+            data = [(AF_INET6, SOCK_STREAM, IPPROTO_TCP, '', ('::2', 0, 0, 0)),
+                    (AF_INET6, SOCK_STREAM, IPPROTO_TCP, '', ('::3', 0, 0, 0)),
+                    (AF_INET6, SOCK_STREAM, IPPROTO_TCP, '', ('::4', 0, 0, 0))]
+            return defer.succeed(data)
+
+        self.ep._nameResolution = testNameResolution
+
+        return (self.ep,
+                (address.host, address.port, clientFactory,
+                 connectArgs.get('timeout', 30),
+                 connectArgs.get('bindAddress', None)),
+                address)
+
+
+    def connectArgs(self):
+        """
+        @return: C{dict} of keyword arguments to pass to connect.
+        """
+        return {'timeout': 10, 'bindAddress': ('localhost', 49595)}
+
+
+    def expectedClients(self, reactor):
+        """
+        @return: List of calls to L{IReactorTCP.connectTCP}
+        """
+        return reactor.tcpClients
+
+
+    def assertConnectArgs(self, receivedArgs, expectedArgs):
+        """
+        Compare host, port, timeout, and bindAddress in C{receivedArgs}
+        to C{expectedArgs}.  We ignore the factory because we don't
+        only care what protocol comes out of the
+        C{IStreamClientEndpoint.connect} call.
+
+        @param receivedArgs: C{tuple} of (C{host}, C{port}, C{factory},
+            C{timeout}, C{bindAddress}) that was passed to
+            L{IReactorTCP.connectTCP}.
+        @param expectedArgs: C{tuple} of (C{host}, C{port}, C{factory},
+            C{timeout}, C{bindAddress}) that we expect to have been passed
+            to L{IReactorTCP.connectTCP}.
+        """
+        (host, port, ignoredFactory, timeout, bindAddress) = receivedArgs
+        (expectedHost, expectedPort, _ignoredFactory,
+         expectedTimeout, expectedBindAddress) = expectedArgs
+
+        self.assertEqual(host, expectedHost)
+        self.assertEqual(port, expectedPort)
+        self.assertEqual(timeout, expectedTimeout)
+        self.assertEqual(bindAddress, expectedBindAddress)
+
+
+    def test_nameResolution(self):
+        """
+        While resolving host names, _nameResolution calls
+        _deferToThread with _getaddrinfo.
+        """
+        calls = []
+
+        def fakeDeferToThread(f, *args, **kwargs):
+            calls.append((f, args, kwargs))
+            return defer.Deferred()
+
+        endpoint = endpoints.TCP6ClientEndpoint(reactor, 'ipv6.example.com',
+            1234)
+        fakegetaddrinfo = object()
+        endpoint._getaddrinfo = fakegetaddrinfo
+        endpoint._deferToThread = fakeDeferToThread
+        endpoint.connect(TestFactory())
+        self.assertEqual(
+            [(fakegetaddrinfo, ("ipv6.example.com", 0, AF_INET6), {})], calls)
+
+
+
+class SSL4EndpointsTestCase(EndpointTestCaseMixin,
+                            unittest.TestCase):
+    """
+    Tests for SSL Endpoints.
+    """
+    if skipSSL:
+        skip = skipSSL
+
+    def expectedServers(self, reactor):
+        """
+        @return: List of calls to L{IReactorSSL.listenSSL}
+        """
+        return reactor.sslServers
+
+
+    def expectedClients(self, reactor):
+        """
+        @return: List of calls to L{IReactorSSL.connectSSL}
+        """
+        return reactor.sslClients
+
+
+    def assertConnectArgs(self, receivedArgs, expectedArgs):
+        """
+        Compare host, port, contextFactory, timeout, and bindAddress in
+        C{receivedArgs} to C{expectedArgs}.  We ignore the factory because we
+        don't only care what protocol comes out of the
+        C{IStreamClientEndpoint.connect} call.
+
+        @param receivedArgs: C{tuple} of (C{host}, C{port}, C{factory},
+            C{contextFactory}, C{timeout}, C{bindAddress}) that was passed to
+            L{IReactorSSL.connectSSL}.
+        @param expectedArgs: C{tuple} of (C{host}, C{port}, C{factory},
+            C{contextFactory}, C{timeout}, C{bindAddress}) that we expect to
+            have been passed to L{IReactorSSL.connectSSL}.
+        """
+        (host, port, ignoredFactory, contextFactory, timeout,
+         bindAddress) = receivedArgs
+
+        (expectedHost, expectedPort, _ignoredFactory, expectedContextFactory,
+         expectedTimeout, expectedBindAddress) = expectedArgs
+
+        self.assertEqual(host, expectedHost)
+        self.assertEqual(port, expectedPort)
+        self.assertEqual(contextFactory, expectedContextFactory)
+        self.assertEqual(timeout, expectedTimeout)
+        self.assertEqual(bindAddress, expectedBindAddress)
+
+
+    def connectArgs(self):
+        """
+        @return: C{dict} of keyword arguments to pass to connect.
+        """
+        return {'timeout': 10, 'bindAddress': ('localhost', 49595)}
+
+
+    def listenArgs(self):
+        """
+        @return: C{dict} of keyword arguments to pass to listen
+        """
+        return {'backlog': 100, 'interface': '127.0.0.1'}
+
+
+    def setUp(self):
+        """
+        Set up client and server SSL contexts for use later.
+        """
+        self.sKey, self.sCert = makeCertificate(
+            O="Server Test Certificate",
+            CN="server")
+        self.cKey, self.cCert = makeCertificate(
+            O="Client Test Certificate",
+            CN="client")
+        self.serverSSLContext = CertificateOptions(
+            privateKey=self.sKey,
+            certificate=self.sCert,
+            requireCertificate=False)
+        self.clientSSLContext = CertificateOptions(
+            requireCertificate=False)
+
+
+    def createServerEndpoint(self, reactor, factory, **listenArgs):
+        """
+        Create an L{SSL4ServerEndpoint} and return the tools to verify its
+        behaviour.
+
+        @param factory: The thing that we expect to be passed to our
+            L{IStreamServerEndpoint.listen} implementation.
+        @param reactor: A fake L{IReactorSSL} that L{SSL4ServerEndpoint} can
+            call L{IReactorSSL.listenSSL} on.
+        @param listenArgs: Optional dictionary of arguments to
+            L{IReactorSSL.listenSSL}.
+        """
+        address = IPv4Address("TCP", "0.0.0.0", 0)
+
+        return (endpoints.SSL4ServerEndpoint(reactor,
+                                             address.port,
+                                             self.serverSSLContext,
+                                             **listenArgs),
+                (address.port, factory, self.serverSSLContext,
+                 listenArgs.get('backlog', 50),
+                 listenArgs.get('interface', '')),
+                address)
+
+
+    def createClientEndpoint(self, reactor, clientFactory, **connectArgs):
+        """
+        Create an L{SSL4ClientEndpoint} and return the values needed to verify
+        its behaviour.
+
+        @param reactor: A fake L{IReactorSSL} that L{SSL4ClientEndpoint} can
+            call L{IReactorSSL.connectSSL} on.
+        @param clientFactory: The thing that we expect to be passed to our
+            L{IStreamClientEndpoint.connect} implementation.
+        @param connectArgs: Optional dictionary of arguments to
+            L{IReactorSSL.connectSSL}
+        """
+        address = IPv4Address("TCP", "localhost", 80)
+
+        if connectArgs is None:
+            connectArgs = {}
+
+        return (endpoints.SSL4ClientEndpoint(reactor,
+                                             address.host,
+                                             address.port,
+                                             self.clientSSLContext,
+                                             **connectArgs),
+                (address.host, address.port, clientFactory,
+                 self.clientSSLContext,
+                 connectArgs.get('timeout', 30),
+                 connectArgs.get('bindAddress', None)),
+                address)
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_epollreactor.py b/ThirdParty/Twisted/twisted/internet/test/test_epollreactor.py
new file mode 100644
index 0000000..91c686f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_epollreactor.py
@@ -0,0 +1,248 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet.epollreactor}.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.trial.unittest import TestCase
+try:
+    from twisted.internet.epollreactor import _ContinuousPolling
+except ImportError:
+    _ContinuousPolling = None
+from twisted.internet.task import Clock
+from twisted.internet.error import ConnectionDone
+
+
+
+class Descriptor(object):
+    """
+    Records reads and writes, as if it were a C{FileDescriptor}.
+    """
+
+    def __init__(self):
+        self.events = []
+
+
+    def fileno(self):
+        return 1
+
+
+    def doRead(self):
+        self.events.append("read")
+
+
+    def doWrite(self):
+        self.events.append("write")
+
+
+    def connectionLost(self, reason):
+        reason.trap(ConnectionDone)
+        self.events.append("lost")
+
+
+
+class ContinuousPollingTests(TestCase):
+    """
+    L{_ContinuousPolling} can be used to read and write from C{FileDescriptor}
+    objects.
+    """
+
+    def test_addReader(self):
+        """
+        Adding a reader when there was previously no reader starts up a
+        C{LoopingCall}.
+        """
+        poller = _ContinuousPolling(Clock())
+        self.assertEqual(poller._loop, None)
+        reader = object()
+        self.assertFalse(poller.isReading(reader))
+        poller.addReader(reader)
+        self.assertNotEqual(poller._loop, None)
+        self.assertTrue(poller._loop.running)
+        self.assertIdentical(poller._loop.clock, poller._reactor)
+        self.assertTrue(poller.isReading(reader))
+
+
+    def test_addWriter(self):
+        """
+        Adding a writer when there was previously no writer starts up a
+        C{LoopingCall}.
+        """
+        poller = _ContinuousPolling(Clock())
+        self.assertEqual(poller._loop, None)
+        writer = object()
+        self.assertFalse(poller.isWriting(writer))
+        poller.addWriter(writer)
+        self.assertNotEqual(poller._loop, None)
+        self.assertTrue(poller._loop.running)
+        self.assertIdentical(poller._loop.clock, poller._reactor)
+        self.assertTrue(poller.isWriting(writer))
+
+
+    def test_removeReader(self):
+        """
+        Removing a reader stops the C{LoopingCall}.
+        """
+        poller = _ContinuousPolling(Clock())
+        reader = object()
+        poller.addReader(reader)
+        poller.removeReader(reader)
+        self.assertEqual(poller._loop, None)
+        self.assertEqual(poller._reactor.getDelayedCalls(), [])
+        self.assertFalse(poller.isReading(reader))
+
+
+    def test_removeWriter(self):
+        """
+        Removing a writer stops the C{LoopingCall}.
+        """
+        poller = _ContinuousPolling(Clock())
+        writer = object()
+        poller.addWriter(writer)
+        poller.removeWriter(writer)
+        self.assertEqual(poller._loop, None)
+        self.assertEqual(poller._reactor.getDelayedCalls(), [])
+        self.assertFalse(poller.isWriting(writer))
+
+
+    def test_removeUnknown(self):
+        """
+        Removing unknown readers and writers silently does nothing.
+        """
+        poller = _ContinuousPolling(Clock())
+        poller.removeWriter(object())
+        poller.removeReader(object())
+
+
+    def test_multipleReadersAndWriters(self):
+        """
+        Adding multiple readers and writers results in a single
+        C{LoopingCall}.
+        """
+        poller = _ContinuousPolling(Clock())
+        writer = object()
+        poller.addWriter(writer)
+        self.assertNotEqual(poller._loop, None)
+        poller.addWriter(object())
+        self.assertNotEqual(poller._loop, None)
+        poller.addReader(object())
+        self.assertNotEqual(poller._loop, None)
+        poller.addReader(object())
+        poller.removeWriter(writer)
+        self.assertNotEqual(poller._loop, None)
+        self.assertTrue(poller._loop.running)
+        self.assertEqual(len(poller._reactor.getDelayedCalls()), 1)
+
+
+    def test_readerPolling(self):
+        """
+        Adding a reader causes its C{doRead} to be called every 1
+        milliseconds.
+        """
+        reactor = Clock()
+        poller = _ContinuousPolling(reactor)
+        desc = Descriptor()
+        poller.addReader(desc)
+        self.assertEqual(desc.events, [])
+        reactor.advance(0.00001)
+        self.assertEqual(desc.events, ["read"])
+        reactor.advance(0.00001)
+        self.assertEqual(desc.events, ["read", "read"])
+        reactor.advance(0.00001)
+        self.assertEqual(desc.events, ["read", "read", "read"])
+
+
+    def test_writerPolling(self):
+        """
+        Adding a writer causes its C{doWrite} to be called every 1
+        milliseconds.
+        """
+        reactor = Clock()
+        poller = _ContinuousPolling(reactor)
+        desc = Descriptor()
+        poller.addWriter(desc)
+        self.assertEqual(desc.events, [])
+        reactor.advance(0.001)
+        self.assertEqual(desc.events, ["write"])
+        reactor.advance(0.001)
+        self.assertEqual(desc.events, ["write", "write"])
+        reactor.advance(0.001)
+        self.assertEqual(desc.events, ["write", "write", "write"])
+
+
+    def test_connectionLostOnRead(self):
+        """
+        If a C{doRead} returns a value indicating disconnection,
+        C{connectionLost} is called on it.
+        """
+        reactor = Clock()
+        poller = _ContinuousPolling(reactor)
+        desc = Descriptor()
+        desc.doRead = lambda: ConnectionDone()
+        poller.addReader(desc)
+        self.assertEqual(desc.events, [])
+        reactor.advance(0.001)
+        self.assertEqual(desc.events, ["lost"])
+
+
+    def test_connectionLostOnWrite(self):
+        """
+        If a C{doWrite} returns a value indicating disconnection,
+        C{connectionLost} is called on it.
+        """
+        reactor = Clock()
+        poller = _ContinuousPolling(reactor)
+        desc = Descriptor()
+        desc.doWrite = lambda: ConnectionDone()
+        poller.addWriter(desc)
+        self.assertEqual(desc.events, [])
+        reactor.advance(0.001)
+        self.assertEqual(desc.events, ["lost"])
+
+
+    def test_removeAll(self):
+        """
+        L{_ContinuousPolling.removeAll} removes all descriptors and returns
+        the readers and writers.
+        """
+        poller = _ContinuousPolling(Clock())
+        reader = object()
+        writer = object()
+        both = object()
+        poller.addReader(reader)
+        poller.addReader(both)
+        poller.addWriter(writer)
+        poller.addWriter(both)
+        removed = poller.removeAll()
+        self.assertEqual(poller.getReaders(), [])
+        self.assertEqual(poller.getWriters(), [])
+        self.assertEqual(len(removed), 3)
+        self.assertEqual(set(removed), set([reader, writer, both]))
+
+
+    def test_getReaders(self):
+        """
+        L{_ContinuousPolling.getReaders} returns a list of the read
+        descriptors.
+        """
+        poller = _ContinuousPolling(Clock())
+        reader = object()
+        poller.addReader(reader)
+        self.assertIn(reader, poller.getReaders())
+
+
+    def test_getWriters(self):
+        """
+        L{_ContinuousPolling.getWriters} returns a list of the write
+        descriptors.
+        """
+        poller = _ContinuousPolling(Clock())
+        writer = object()
+        poller.addWriter(writer)
+        self.assertIn(writer, poller.getWriters())
+
+    if _ContinuousPolling is None:
+        skip = "epoll not supported in this environment."
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_fdset.py b/ThirdParty/Twisted/twisted/internet/test/test_fdset.py
new file mode 100644
index 0000000..2139cf2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_fdset.py
@@ -0,0 +1,426 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for implementations of L{IReactorFDSet}.
+"""
+
+__metaclass__ = type
+
+import os, socket, traceback
+
+from zope.interface import implementer
+
+from twisted.python.runtime import platform
+from twisted.trial.unittest import SkipTest
+from twisted.internet.interfaces import IReactorFDSet, IReadDescriptor
+from twisted.internet.abstract import FileDescriptor
+from twisted.internet.test.reactormixins import ReactorBuilder
+
+# twisted.internet.tcp nicely defines some names with proper values on
+# several different platforms.
+from twisted.internet.tcp import EINPROGRESS, EWOULDBLOCK
+
+
+def socketpair():
+    serverSocket = socket.socket()
+    serverSocket.bind(('127.0.0.1', 0))
+    serverSocket.listen(1)
+    try:
+        client = socket.socket()
+        try:
+            client.setblocking(False)
+            try:
+                client.connect(('127.0.0.1', serverSocket.getsockname()[1]))
+            except socket.error as e:
+                if e.args[0] not in (EINPROGRESS, EWOULDBLOCK):
+                    raise
+            server, addr = serverSocket.accept()
+        except:
+            client.close()
+            raise
+    finally:
+        serverSocket.close()
+
+    return client, server
+
+
+class ReactorFDSetTestsBuilder(ReactorBuilder):
+    """
+    Builder defining tests relating to L{IReactorFDSet}.
+    """
+    requiredInterfaces = [IReactorFDSet]
+
+    def _connectedPair(self):
+        """
+        Return the two sockets which make up a new TCP connection.
+        """
+        client, server = socketpair()
+        self.addCleanup(client.close)
+        self.addCleanup(server.close)
+        return client, server
+
+
+    def _simpleSetup(self):
+        reactor = self.buildReactor()
+
+        client, server = self._connectedPair()
+
+        fd = FileDescriptor(reactor)
+        fd.fileno = client.fileno
+
+        return reactor, fd, server
+
+
+    def test_addReader(self):
+        """
+        C{reactor.addReader()} accepts an L{IReadDescriptor} provider and calls
+        its C{doRead} method when there may be data available on its C{fileno}.
+        """
+        reactor, fd, server = self._simpleSetup()
+
+        def removeAndStop():
+            reactor.removeReader(fd)
+            reactor.stop()
+        fd.doRead = removeAndStop
+        reactor.addReader(fd)
+        server.sendall(b'x')
+
+        # The reactor will only stop if it calls fd.doRead.
+        self.runReactor(reactor)
+        # Nothing to assert, just be glad we got this far.
+
+
+    def test_removeReader(self):
+        """
+        L{reactor.removeReader()} accepts an L{IReadDescriptor} provider
+        previously passed to C{reactor.addReader()} and causes it to no longer
+        be monitored for input events.
+        """
+        reactor, fd, server = self._simpleSetup()
+
+        def fail():
+            self.fail("doRead should not be called")
+        fd.doRead = fail
+
+        reactor.addReader(fd)
+        reactor.removeReader(fd)
+        server.sendall(b'x')
+
+        # Give the reactor two timed event passes to notice that there's I/O
+        # (if it is incorrectly watching for I/O).
+        reactor.callLater(0, reactor.callLater, 0, reactor.stop)
+
+        self.runReactor(reactor)
+        # Getting here means the right thing happened probably.
+
+
+    def test_addWriter(self):
+        """
+        C{reactor.addWriter()} accepts an L{IWriteDescriptor} provider and
+        calls its C{doWrite} method when it may be possible to write to its
+        C{fileno}.
+        """
+        reactor, fd, server = self._simpleSetup()
+
+        def removeAndStop():
+            reactor.removeWriter(fd)
+            reactor.stop()
+        fd.doWrite = removeAndStop
+        reactor.addWriter(fd)
+
+        self.runReactor(reactor)
+        # Getting here is great.
+
+
+    def _getFDTest(self, kind):
+        """
+        Helper for getReaders and getWriters tests.
+        """
+        reactor = self.buildReactor()
+        get = getattr(reactor, 'get' + kind + 's')
+        add = getattr(reactor, 'add' + kind)
+        remove = getattr(reactor, 'remove' + kind)
+
+        client, server = self._connectedPair()
+
+        self.assertNotIn(client, get())
+        self.assertNotIn(server, get())
+
+        add(client)
+        self.assertIn(client, get())
+        self.assertNotIn(server, get())
+
+        remove(client)
+        self.assertNotIn(client, get())
+        self.assertNotIn(server, get())
+
+
+    def test_getReaders(self):
+        """
+        L{IReactorFDSet.getReaders} reflects the additions and removals made
+        with L{IReactorFDSet.addReader} and L{IReactorFDSet.removeReader}.
+        """
+        self._getFDTest('Reader')
+
+
+    def test_removeWriter(self):
+        """
+        L{reactor.removeWriter()} accepts an L{IWriteDescriptor} provider
+        previously passed to C{reactor.addWriter()} and causes it to no longer
+        be monitored for outputability.
+        """
+        reactor, fd, server = self._simpleSetup()
+
+        def fail():
+            self.fail("doWrite should not be called")
+        fd.doWrite = fail
+
+        reactor.addWriter(fd)
+        reactor.removeWriter(fd)
+
+        # Give the reactor two timed event passes to notice that there's I/O
+        # (if it is incorrectly watching for I/O).
+        reactor.callLater(0, reactor.callLater, 0, reactor.stop)
+
+        self.runReactor(reactor)
+        # Getting here means the right thing happened probably.
+
+
+    def test_getWriters(self):
+        """
+        L{IReactorFDSet.getWriters} reflects the additions and removals made
+        with L{IReactorFDSet.addWriter} and L{IReactorFDSet.removeWriter}.
+        """
+        self._getFDTest('Writer')
+
+
+    def test_removeAll(self):
+        """
+        C{reactor.removeAll()} removes all registered L{IReadDescriptor}
+        providers and all registered L{IWriteDescriptor} providers and returns
+        them.
+        """
+        reactor = self.buildReactor()
+
+        reactor, fd, server = self._simpleSetup()
+
+        fd.doRead = lambda: self.fail("doRead should not be called")
+        fd.doWrite = lambda: self.fail("doWrite should not be called")
+
+        server.sendall(b'x')
+
+        reactor.addReader(fd)
+        reactor.addWriter(fd)
+
+        removed = reactor.removeAll()
+
+        # Give the reactor two timed event passes to notice that there's I/O
+        # (if it is incorrectly watching for I/O).
+        reactor.callLater(0, reactor.callLater, 0, reactor.stop)
+
+        self.runReactor(reactor)
+        # Getting here means the right thing happened probably.
+
+        self.assertEqual(removed, [fd])
+
+
+    def test_removedFromReactor(self):
+        """
+        A descriptor's C{fileno} method should not be called after the
+        descriptor has been removed from the reactor.
+        """
+        reactor = self.buildReactor()
+        descriptor = RemovingDescriptor(reactor)
+        reactor.callWhenRunning(descriptor.start)
+        self.runReactor(reactor)
+        self.assertEqual(descriptor.calls, [])
+
+
+    def test_negativeOneFileDescriptor(self):
+        """
+        If L{FileDescriptor.fileno} returns C{-1}, the descriptor is removed
+        from the reactor.
+        """
+        reactor = self.buildReactor()
+
+        client, server = self._connectedPair()
+
+        class DisappearingDescriptor(FileDescriptor):
+            _fileno = server.fileno()
+
+            _received = b""
+
+            def fileno(self):
+                return self._fileno
+
+            def doRead(self):
+                self._fileno = -1
+                self._received += server.recv(1)
+                client.send(b'y')
+
+            def connectionLost(self, reason):
+                reactor.stop()
+
+        descriptor = DisappearingDescriptor(reactor)
+        reactor.addReader(descriptor)
+        client.send(b'x')
+        self.runReactor(reactor)
+        self.assertEqual(descriptor._received, b"x")
+
+
+    def test_lostFileDescriptor(self):
+        """
+        The file descriptor underlying a FileDescriptor may be closed and
+        replaced by another at some point.  Bytes which arrive on the new
+        descriptor must not be delivered to the FileDescriptor which was
+        originally registered with the original descriptor of the same number.
+
+        Practically speaking, this is difficult or impossible to detect.  The
+        implementation relies on C{fileno} raising an exception if the original
+        descriptor has gone away.  If C{fileno} continues to return the original
+        file descriptor value, the reactor may deliver events from that
+        descriptor.  This is a best effort attempt to ease certain debugging
+        situations.  Applications should not rely on it intentionally.
+        """
+        reactor = self.buildReactor()
+
+        name = reactor.__class__.__name__
+        if name in ('EPollReactor', 'KQueueReactor', 'CFReactor'):
+            # Closing a file descriptor immediately removes it from the epoll
+            # set without generating a notification.  That means epollreactor
+            # will not call any methods on Victim after the close, so there's
+            # no chance to notice the socket is no longer valid.
+            raise SkipTest("%r cannot detect lost file descriptors" % (name,))
+
+        client, server = self._connectedPair()
+
+        class Victim(FileDescriptor):
+            """
+            This L{FileDescriptor} will have its socket closed out from under it
+            and another socket will take its place.  It will raise a
+            socket.error from C{fileno} after this happens (because socket
+            objects remember whether they have been closed), so as long as the
+            reactor calls the C{fileno} method the problem will be detected.
+            """
+            def fileno(self):
+                return server.fileno()
+
+            def doRead(self):
+                raise Exception("Victim.doRead should never be called")
+
+            def connectionLost(self, reason):
+                """
+                When the problem is detected, the reactor should disconnect this
+                file descriptor.  When that happens, stop the reactor so the
+                test ends.
+                """
+                reactor.stop()
+
+        reactor.addReader(Victim())
+
+        # Arrange for the socket to be replaced at some unspecified time.
+        # Significantly, this will not be while any I/O processing code is on
+        # the stack.  It is something that happens independently and cannot be
+        # relied upon to happen at a convenient time, such as within a call to
+        # doRead.
+        def messItUp():
+            newC, newS = self._connectedPair()
+            fileno = server.fileno()
+            server.close()
+            os.dup2(newS.fileno(), fileno)
+            newC.send(b"x")
+        reactor.callLater(0, messItUp)
+
+        self.runReactor(reactor)
+
+        # If the implementation feels like logging the exception raised by
+        # MessedUp.fileno, that's fine.
+        self.flushLoggedErrors(socket.error)
+    if platform.isWindows():
+        test_lostFileDescriptor.skip = (
+            "Cannot duplicate socket filenos on Windows")
+
+
+    def test_connectionLostOnShutdown(self):
+        """
+        Any file descriptors added to the reactor have their C{connectionLost}
+        called when C{reactor.stop} is called.
+        """
+        reactor = self.buildReactor()
+
+        class DoNothingDescriptor(FileDescriptor):
+            def doRead(self):
+                return None
+            def doWrite(self):
+                return None
+
+        client, server = self._connectedPair()
+
+        fd1 = DoNothingDescriptor(reactor)
+        fd1.fileno = client.fileno
+        fd2 = DoNothingDescriptor(reactor)
+        fd2.fileno = server.fileno
+        reactor.addReader(fd1)
+        reactor.addWriter(fd2)
+
+        reactor.callWhenRunning(reactor.stop)
+        self.runReactor(reactor)
+        self.assertTrue(fd1.disconnected)
+        self.assertTrue(fd2.disconnected)
+
+
+
+ at implementer(IReadDescriptor)
+class RemovingDescriptor(object):
+    """
+    A read descriptor which removes itself from the reactor as soon as it
+    gets a chance to do a read and keeps track of when its own C{fileno}
+    method is called.
+
+    @ivar insideReactor: A flag which is true as long as the reactor has
+        this descriptor as a reader.
+
+    @ivar calls: A list of the bottom of the call stack for any call to
+        C{fileno} when C{insideReactor} is false.
+    """
+
+
+    def __init__(self, reactor):
+        self.reactor = reactor
+        self.insideReactor = False
+        self.calls = []
+        self.read, self.write = socketpair()
+
+
+    def start(self):
+        self.insideReactor = True
+        self.reactor.addReader(self)
+        self.write.send(b'a')
+
+
+    def logPrefix(self):
+        return 'foo'
+
+
+    def doRead(self):
+        self.reactor.removeReader(self)
+        self.insideReactor = False
+        self.reactor.stop()
+        self.read.close()
+        self.write.close()
+
+
+    def fileno(self):
+        if not self.insideReactor:
+            self.calls.append(traceback.extract_stack(limit=5)[:-1])
+        return self.read.fileno()
+
+
+    def connectionLost(self, reason):
+        # Ideally we'd close the descriptors here... but actually
+        # connectionLost is never called because we remove ourselves from the
+        # reactor before it stops.
+        pass
+
+globals().update(ReactorFDSetTestsBuilder.makeTestCaseClasses())
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_filedescriptor.py b/ThirdParty/Twisted/twisted/internet/test/test_filedescriptor.py
new file mode 100644
index 0000000..7a5465f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_filedescriptor.py
@@ -0,0 +1,99 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Whitebox tests for L{twisted.internet.abstract.FileDescriptor}.
+"""
+
+from __future__ import division, absolute_import
+
+from zope.interface.verify import verifyClass
+
+from twisted.internet.abstract import FileDescriptor
+from twisted.internet.interfaces import IPushProducer
+from twisted.trial.unittest import SynchronousTestCase
+
+
+
+class MemoryFile(FileDescriptor):
+    """
+    A L{FileDescriptor} customization which writes to a Python list in memory
+    with certain limitations.
+
+    @ivar _written: A C{list} of C{bytes} which have been accepted as written.
+
+    @ivar _freeSpace: A C{int} giving the number of bytes which will be accepted
+        by future writes.
+    """
+    connected = True
+
+    def __init__(self):
+        FileDescriptor.__init__(self, reactor=object())
+        self._written = []
+        self._freeSpace = 0
+
+
+    def startWriting(self):
+        pass
+
+
+    def stopWriting(self):
+        pass
+
+
+    def writeSomeData(self, data):
+        """
+        Copy at most C{self._freeSpace} bytes from C{data} into C{self._written}.
+
+        @return: A C{int} indicating how many bytes were copied from C{data}.
+        """
+        acceptLength = min(self._freeSpace, len(data))
+        if acceptLength:
+            self._freeSpace -= acceptLength
+            self._written.append(data[:acceptLength])
+        return acceptLength
+
+
+
+class FileDescriptorTests(SynchronousTestCase):
+    """
+    Tests for L{FileDescriptor}.
+    """
+    def test_writeWithUnicodeRaisesException(self):
+        """
+        L{FileDescriptor.write} doesn't accept unicode data.
+        """
+        fileDescriptor = FileDescriptor(reactor=object())
+        self.assertRaises(TypeError, fileDescriptor.write, u'foo')
+
+
+    def test_writeSequenceWithUnicodeRaisesException(self):
+        """
+        L{FileDescriptor.writeSequence} doesn't accept unicode data.
+        """
+        fileDescriptor = FileDescriptor(reactor=object())
+        self.assertRaises(
+            TypeError, fileDescriptor.writeSequence, [b'foo', u'bar', b'baz'])
+
+
+    def test_implementInterfaceIPushProducer(self):
+        """
+        L{FileDescriptor} should implement L{IPushProducer}.
+        """
+        self.assertTrue(verifyClass(IPushProducer, FileDescriptor))
+
+
+
+class WriteDescriptorTests(SynchronousTestCase):
+    """
+    Tests for L{FileDescriptor}'s implementation of L{IWriteDescriptor}.
+    """
+    def test_kernelBufferFull(self):
+        """
+        When L{FileDescriptor.writeSomeData} returns C{0} to indicate no more
+        data can be written immediately, L{FileDescriptor.doWrite} returns
+        C{None}.
+        """
+        descriptor = MemoryFile()
+        descriptor.write(b"hello, world")
+        self.assertIdentical(None, descriptor.doWrite())
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_gireactor.py b/ThirdParty/Twisted/twisted/internet/test/test_gireactor.py
new file mode 100644
index 0000000..b87c4de
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_gireactor.py
@@ -0,0 +1,251 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+GI/GTK3 reactor tests.
+"""
+
+from __future__ import division, absolute_import
+
+import sys, os
+try:
+    from twisted.internet import gireactor
+    from gi.repository import Gio
+except ImportError:
+    gireactor = None
+    gtk3reactor = None
+else:
+    # gtk3reactor may be unavailable even if gireactor is available; in
+    # particular in pygobject 3.4/gtk 3.6, when no X11 DISPLAY is found.
+    try:
+        from twisted.internet import gtk3reactor
+    except ImportError:
+        gtk3reactor = None
+    else:
+        from gi.repository import Gtk
+
+from twisted.python.filepath import FilePath
+from twisted.python.runtime import platform
+from twisted.internet.defer import Deferred
+from twisted.internet.error import ReactorAlreadyRunning
+from twisted.internet.protocol import ProcessProtocol
+from twisted.trial.unittest import TestCase, SkipTest
+from twisted.internet.test.reactormixins import ReactorBuilder
+from twisted.test.test_twisted import SetAsideModule
+from twisted.internet.interfaces import IReactorProcess
+
+# Skip all tests if gi is unavailable:
+if gireactor is None:
+    skip = "gtk3/gi not importable"
+
+
+class GApplicationRegistration(ReactorBuilder, TestCase):
+    """
+    GtkApplication and GApplication are supported by
+    L{twisted.internet.gtk3reactor} and L{twisted.internet.gireactor}.
+
+    We inherit from L{ReactorBuilder} in order to use some of its
+    reactor-running infrastructure, but don't need its test-creation
+    functionality.
+    """
+    def runReactor(self, app, reactor):
+        """
+        Register the app, run the reactor, make sure app was activated, and
+        that reactor was running, and that reactor can be stopped.
+        """
+        if not hasattr(app, "quit"):
+            raise SkipTest("Version of PyGObject is too old.")
+
+        result = []
+        def stop():
+            result.append("stopped")
+            reactor.stop()
+        def activate(widget):
+            result.append("activated")
+            reactor.callLater(0, stop)
+        app.connect('activate', activate)
+
+        # We want reactor.stop() to *always* stop the event loop, even if
+        # someone has called hold() on the application and never done the
+        # corresponding release() -- for more details see
+        # http://developer.gnome.org/gio/unstable/GApplication.html.
+        app.hold()
+
+        reactor.registerGApplication(app)
+        ReactorBuilder.runReactor(self, reactor)
+        self.assertEqual(result, ["activated", "stopped"])
+
+
+    def test_gApplicationActivate(self):
+        """
+        L{Gio.Application} instances can be registered with a gireactor.
+        """
+        reactor = gireactor.GIReactor(useGtk=False)
+        self.addCleanup(self.unbuildReactor, reactor)
+        app = Gio.Application(
+            application_id='com.twistedmatrix.trial.gireactor',
+            flags=Gio.ApplicationFlags.FLAGS_NONE)
+
+        self.runReactor(app, reactor)
+
+
+    def test_gtkApplicationActivate(self):
+        """
+        L{Gtk.Application} instances can be registered with a gtk3reactor.
+        """
+        reactor = gtk3reactor.Gtk3Reactor()
+        self.addCleanup(self.unbuildReactor, reactor)
+        app = Gtk.Application(
+            application_id='com.twistedmatrix.trial.gtk3reactor',
+            flags=Gio.ApplicationFlags.FLAGS_NONE)
+
+        self.runReactor(app, reactor)
+
+    if gtk3reactor is None:
+        test_gtkApplicationActivate.skip = (
+            "Gtk unavailable (may require running with X11 DISPLAY env set)")
+
+
+    def test_portable(self):
+        """
+        L{gireactor.PortableGIReactor} doesn't support application
+        registration at this time.
+        """
+        reactor = gireactor.PortableGIReactor()
+        self.addCleanup(self.unbuildReactor, reactor)
+        app = Gio.Application(
+            application_id='com.twistedmatrix.trial.gireactor',
+            flags=Gio.ApplicationFlags.FLAGS_NONE)
+        self.assertRaises(NotImplementedError,
+                          reactor.registerGApplication, app)
+
+
+    def test_noQuit(self):
+        """
+        Older versions of PyGObject lack C{Application.quit}, and so won't
+        allow registration.
+        """
+        reactor = gireactor.GIReactor(useGtk=False)
+        self.addCleanup(self.unbuildReactor, reactor)
+        # An app with no "quit" method:
+        app = object()
+        exc = self.assertRaises(RuntimeError, reactor.registerGApplication, app)
+        self.assertTrue(exc.args[0].startswith(
+                "Application registration is not"))
+
+
+    def test_cantRegisterAfterRun(self):
+        """
+        It is not possible to register a C{Application} after the reactor has
+        already started.
+        """
+        reactor = gireactor.GIReactor(useGtk=False)
+        self.addCleanup(self.unbuildReactor, reactor)
+        app = Gio.Application(
+            application_id='com.twistedmatrix.trial.gireactor',
+            flags=Gio.ApplicationFlags.FLAGS_NONE)
+
+        def tryRegister():
+            exc = self.assertRaises(ReactorAlreadyRunning,
+                                    reactor.registerGApplication, app)
+            self.assertEqual(exc.args[0],
+                             "Can't register application after reactor was started.")
+            reactor.stop()
+        reactor.callLater(0, tryRegister)
+        ReactorBuilder.runReactor(self, reactor)
+
+
+    def test_cantRegisterTwice(self):
+        """
+        It is not possible to register more than one C{Application}.
+        """
+        reactor = gireactor.GIReactor(useGtk=False)
+        self.addCleanup(self.unbuildReactor, reactor)
+        app = Gio.Application(
+            application_id='com.twistedmatrix.trial.gireactor',
+            flags=Gio.ApplicationFlags.FLAGS_NONE)
+        reactor.registerGApplication(app)
+        app2 = Gio.Application(
+            application_id='com.twistedmatrix.trial.gireactor2',
+            flags=Gio.ApplicationFlags.FLAGS_NONE)
+        exc = self.assertRaises(RuntimeError,
+                                    reactor.registerGApplication, app2)
+        self.assertEqual(exc.args[0],
+                         "Can't register more than one application instance.")
+
+
+
+class PygtkCompatibilityTests(TestCase):
+    """
+    pygtk imports are either prevented, or a compatiblity layer is used if
+    possible.
+    """
+
+    def test_noCompatibilityLayer(self):
+        """
+        If no compatiblity layer is present, imports of gobject and friends
+        are disallowed.
+
+        We do this by running a process where we make sure gi.pygtkcompat
+        isn't present.
+        """
+        from twisted.internet import reactor
+        if not IReactorProcess.providedBy(reactor):
+            raise SkipTest("No process support available in this reactor.")
+
+        result = Deferred()
+        class Stdout(ProcessProtocol):
+            data = b""
+
+            def errReceived(self, err):
+                print(err)
+
+            def outReceived(self, data):
+                self.data += data
+
+            def processExited(self, reason):
+                result.callback(self.data)
+
+        path = FilePath(__file__.encode("utf-8")).sibling(
+            b"process_gireactornocompat.py").path
+        reactor.spawnProcess(Stdout(), sys.executable, [sys.executable, path],
+                             env=os.environ)
+        result.addCallback(self.assertEqual, b"success")
+        return result
+
+
+    def test_compatibilityLayer(self):
+        """
+        If compatiblity layer is present, importing gobject uses the gi
+        compatibility layer.
+        """
+        if "gi.pygtkcompat" not in sys.modules:
+            raise SkipTest("This version of gi doesn't include pygtkcompat.")
+        import gobject
+        self.assertTrue(gobject.__name__.startswith("gi."))
+
+
+
+class Gtk3ReactorTests(TestCase):
+    """
+    Tests for L{gtk3reactor}.
+    """
+
+    def test_requiresDISPLAY(self):
+        """
+        On X11, L{gtk3reactor} is unimportable if the C{DISPLAY} environment
+        variable is not set.
+        """
+        display = os.environ.get("DISPLAY", None)
+        if display is not None:
+            self.addCleanup(os.environ.__setitem__, "DISPLAY", display)
+            del os.environ["DISPLAY"]
+        with SetAsideModule("twisted.internet.gtk3reactor"):
+            exc = self.assertRaises(ImportError,
+                                    __import__, "twisted.internet.gtk3reactor")
+            self.assertEqual(
+                exc.args[0],
+                "Gtk3 requires X11, and no DISPLAY environment variable is set")
+
+    if platform.getType() != "posix" or platform.isMacOSX():
+        test_requiresDISPLAY.skip = "This test is only relevant when using X11"
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_glibbase.py b/ThirdParty/Twisted/twisted/internet/test/test_glibbase.py
new file mode 100644
index 0000000..53524cd
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_glibbase.py
@@ -0,0 +1,68 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for twisted.internet.glibbase.
+"""
+
+from __future__ import division, absolute_import
+
+import sys
+from twisted.trial.unittest import TestCase
+from twisted.internet._glibbase import ensureNotImported
+
+
+
+class EnsureNotImportedTests(TestCase):
+    """
+    L{ensureNotImported} protects against unwanted past and future imports.
+    """
+
+    def test_ensureWhenNotImported(self):
+        """
+        If the specified modules have never been imported, and import
+        prevention is requested, L{ensureNotImported} makes sure they will not
+        be imported in the future.
+        """
+        modules = {}
+        self.patch(sys, "modules", modules)
+        ensureNotImported(["m1", "m2"], "A message.",
+                          preventImports=["m1", "m2", "m3"])
+        self.assertEquals(modules, {"m1": None, "m2": None, "m3": None})
+
+
+    def test_ensureWhenNotImportedDontPrevent(self):
+        """
+        If the specified modules have never been imported, and import
+        prevention is not requested, L{ensureNotImported} has no effect.
+        """
+        modules = {}
+        self.patch(sys, "modules", modules)
+        ensureNotImported(["m1", "m2"], "A message.")
+        self.assertEquals(modules, {})
+
+
+    def test_ensureWhenFailedToImport(self):
+        """
+        If the specified modules have been set to C{None} in C{sys.modules},
+        L{ensureNotImported} does not complain.
+        """
+        modules = {"m2": None}
+        self.patch(sys, "modules", modules)
+        ensureNotImported(["m1", "m2"], "A message.", preventImports=["m1", "m2"])
+        self.assertEquals(modules, {"m1": None, "m2": None})
+
+
+    def test_ensureFailsWhenImported(self):
+        """
+        If one of the specified modules has been previously imported,
+        L{ensureNotImported} raises an exception.
+        """
+        module = object()
+        modules = {"m2": module}
+        self.patch(sys, "modules", modules)
+        e = self.assertRaises(ImportError, ensureNotImported,
+                              ["m1", "m2"], "A message.",
+                              preventImports=["m1", "m2"])
+        self.assertEquals(modules, {"m2": module})
+        self.assertEquals(e.args, ("A message.",))
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_gtkreactor.py b/ThirdParty/Twisted/twisted/internet/test/test_gtkreactor.py
new file mode 100644
index 0000000..78039c0
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_gtkreactor.py
@@ -0,0 +1,95 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests to ensure all attributes of L{twisted.internet.gtkreactor} are 
+deprecated.
+"""
+
+import sys
+
+from twisted.trial.unittest import TestCase
+
+
+class GtkReactorDeprecation(TestCase):
+    """
+    Tests to ensure all attributes of L{twisted.internet.gtkreactor} are 
+    deprecated.
+    """
+
+    class StubGTK:
+        class GDK:
+            INPUT_READ = None
+        def input_add(self, *params):
+            pass
+
+    class StubPyGTK:
+        def require(self, something):
+            pass
+
+    def setUp(self):
+        """
+        Create a stub for the module 'gtk' if it does not exist, so that it can
+        be imported without errors or warnings.
+        """
+        self.mods = sys.modules.copy()
+        sys.modules['gtk'] = self.StubGTK()
+        sys.modules['pygtk'] = self.StubPyGTK()
+
+
+    def tearDown(self):
+        """
+        Return sys.modules to the way it was before the test.
+        """
+        sys.modules.clear()
+        sys.modules.update(self.mods)
+
+
+    def lookForDeprecationWarning(self, testmethod, attributeName):
+        warningsShown = self.flushWarnings([testmethod])
+        self.assertEqual(len(warningsShown), 1)
+        self.assertIdentical(warningsShown[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warningsShown[0]['message'],
+            "twisted.internet.gtkreactor." + attributeName + " "
+            "was deprecated in Twisted 10.1.0: All new applications should be "
+            "written with gtk 2.x, which is supported by "
+            "twisted.internet.gtk2reactor.")
+
+
+    def test_gtkReactor(self):
+        """
+        Test deprecation of L{gtkreactor.GtkReactor}
+        """
+        from twisted.internet import gtkreactor
+        gtkreactor.GtkReactor();
+        self.lookForDeprecationWarning(self.test_gtkReactor, "GtkReactor")
+
+
+    def test_portableGtkReactor(self):
+        """
+        Test deprecation of L{gtkreactor.GtkReactor}
+        """
+        from twisted.internet import gtkreactor
+        gtkreactor.PortableGtkReactor()
+        self.lookForDeprecationWarning(self.test_portableGtkReactor,
+                                       "PortableGtkReactor")
+
+
+    def test_install(self):
+        """
+        Test deprecation of L{gtkreactor.install}
+        """
+        from twisted.internet import gtkreactor
+        self.assertRaises(AssertionError, gtkreactor.install)
+        self.lookForDeprecationWarning(self.test_install, "install")
+
+
+    def test_portableInstall(self):
+        """
+        Test deprecation of L{gtkreactor.portableInstall}
+        """
+        from twisted.internet import gtkreactor
+        self.assertRaises(AssertionError, gtkreactor.portableInstall)
+        self.lookForDeprecationWarning(self.test_portableInstall,
+                                       "portableInstall")
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_inlinecb.py b/ThirdParty/Twisted/twisted/internet/test/test_inlinecb.py
new file mode 100644
index 0000000..fe5e9af
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_inlinecb.py
@@ -0,0 +1,90 @@
+# -*- test-case-name: twisted.internet.test.test_inlinecb -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet.defer.inlineCallbacks}.
+
+Some tests for inlineCallbacks are defined in L{twisted.test.test_defgen} as
+well.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.trial.unittest import TestCase
+from twisted.internet.defer import Deferred, returnValue, inlineCallbacks
+
+class NonLocalExitTests(TestCase):
+    """
+    It's possible for L{returnValue} to be (accidentally) invoked at a stack
+    level below the L{inlineCallbacks}-decorated function which it is exiting.
+    If this happens, L{returnValue} should report useful errors.
+
+    If L{returnValue} is invoked from a function not decorated by
+    L{inlineCallbacks}, it will emit a warning if it causes an
+    L{inlineCallbacks} function further up the stack to exit.
+    """
+
+    def mistakenMethod(self):
+        """
+        This method mistakenly invokes L{returnValue}, despite the fact that it
+        is not decorated with L{inlineCallbacks}.
+        """
+        returnValue(1)
+
+
+    def assertMistakenMethodWarning(self, resultList):
+        """
+        Flush the current warnings and assert that we have been told that
+        C{mistakenMethod} was invoked, and that the result from the Deferred
+        that was fired (appended to the given list) is C{mistakenMethod}'s
+        result.  The warning should indicate that an inlineCallbacks function
+        called 'inline' was made to exit.
+        """
+        self.assertEqual(resultList, [1])
+        warnings = self.flushWarnings(offendingFunctions=[self.mistakenMethod])
+        self.assertEqual(len(warnings), 1)
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            "returnValue() in 'mistakenMethod' causing 'inline' to exit: "
+            "returnValue should only be invoked by functions decorated with "
+            "inlineCallbacks")
+
+
+    def test_returnValueNonLocalWarning(self):
+        """
+        L{returnValue} will emit a non-local exit warning in the simplest case,
+        where the offending function is invoked immediately.
+        """
+        @inlineCallbacks
+        def inline():
+            self.mistakenMethod()
+            returnValue(2)
+            yield 0
+        d = inline()
+        results = []
+        d.addCallback(results.append)
+        self.assertMistakenMethodWarning(results)
+
+
+    def test_returnValueNonLocalDeferred(self):
+        """
+        L{returnValue} will emit a non-local warning in the case where the
+        L{inlineCallbacks}-decorated function has already yielded a Deferred
+        and therefore moved its generator function along.
+        """
+        cause = Deferred()
+        @inlineCallbacks
+        def inline():
+            yield cause
+            self.mistakenMethod()
+            returnValue(2)
+        effect = inline()
+        results = []
+        effect.addCallback(results.append)
+        self.assertEqual(results, [])
+        cause.callback(1)
+        self.assertMistakenMethodWarning(results)
+
+
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_inotify.py b/ThirdParty/Twisted/twisted/internet/test/test_inotify.py
new file mode 100644
index 0000000..a003562
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_inotify.py
@@ -0,0 +1,504 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for the inotify wrapper in L{twisted.internet.inotify}.
+"""
+
+from twisted.internet import defer, reactor
+from twisted.python import filepath, runtime
+from twisted.trial import unittest
+
+try:
+    from twisted.python import _inotify
+except ImportError:
+    inotify = None
+else:
+    from twisted.internet import inotify
+
+
+
+class TestINotify(unittest.TestCase):
+    """
+    Define all the tests for the basic functionality exposed by
+    L{inotify.INotify}.
+    """
+    if not runtime.platform.supportsINotify():
+        skip = "This platform doesn't support INotify."
+
+    def setUp(self):
+        self.dirname = filepath.FilePath(self.mktemp())
+        self.dirname.createDirectory()
+        self.inotify = inotify.INotify()
+        self.inotify.startReading()
+        self.addCleanup(self.inotify.loseConnection)
+
+
+    def test_initializationErrors(self):
+        """
+        L{inotify.INotify} emits a C{RuntimeError} when initialized
+        in an environment that doesn't support inotify as we expect it.
+
+        We just try to raise an exception for every possible case in
+        the for loop in L{inotify.INotify._inotify__init__}.
+        """
+        class FakeINotify:
+            def init(self):
+                raise inotify.INotifyError()
+        self.patch(inotify.INotify, '_inotify', FakeINotify())
+        self.assertRaises(inotify.INotifyError, inotify.INotify)
+
+
+    def _notificationTest(self, mask, operation, expectedPath=None):
+        """
+        Test notification from some filesystem operation.
+
+        @param mask: The event mask to use when setting up the watch.
+
+        @param operation: A function which will be called with the
+            name of a file in the watched directory and which should
+            trigger the event.
+
+        @param expectedPath: Optionally, the name of the path which is
+            expected to come back in the notification event; this will
+            also be passed to C{operation} (primarily useful when the
+            operation is being done to the directory itself, not a
+            file in it).
+
+        @return: A L{Deferred} which fires successfully when the
+            expected event has been received or fails otherwise.
+        """
+        if expectedPath is None:
+            expectedPath = self.dirname.child("foo.bar")
+        notified = defer.Deferred()
+        def cbNotified((watch, filename, events)):
+            self.assertEqual(filename, expectedPath)
+            self.assertTrue(events & mask)
+        notified.addCallback(cbNotified)
+
+        self.inotify.watch(
+            self.dirname, mask=mask,
+            callbacks=[lambda *args: notified.callback(args)])
+        operation(expectedPath)
+        return notified
+
+
+    def test_access(self):
+        """
+        Reading from a file in a monitored directory sends an
+        C{inotify.IN_ACCESS} event to the callback.
+        """
+        def operation(path):
+            path.setContent("foo")
+            path.getContent()
+
+        return self._notificationTest(inotify.IN_ACCESS, operation)
+
+
+    def test_modify(self):
+        """
+        Writing to a file in a monitored directory sends an
+        C{inotify.IN_MODIFY} event to the callback.
+        """
+        def operation(path):
+            fObj = path.open("w")
+            fObj.write('foo')
+            fObj.close()
+
+        return self._notificationTest(inotify.IN_MODIFY, operation)
+
+
+    def test_attrib(self):
+        """
+        Changing the metadata of a a file in a monitored directory
+        sends an C{inotify.IN_ATTRIB} event to the callback.
+        """
+        def operation(path):
+            path.touch()
+            path.touch()
+
+        return self._notificationTest(inotify.IN_ATTRIB, operation)
+
+
+    def test_closeWrite(self):
+        """
+        Closing a file which was open for writing in a monitored
+        directory sends an C{inotify.IN_CLOSE_WRITE} event to the
+        callback.
+        """
+        def operation(path):
+            fObj = path.open("w")
+            fObj.close()
+
+        return self._notificationTest(inotify.IN_CLOSE_WRITE, operation)
+
+
+    def test_closeNoWrite(self):
+        """
+        Closing a file which was open for reading but not writing in a
+        monitored directory sends an C{inotify.IN_CLOSE_NOWRITE} event
+        to the callback.
+        """
+        def operation(path):
+            path.touch()
+            fObj = path.open("r")
+            fObj.close()
+
+        return self._notificationTest(inotify.IN_CLOSE_NOWRITE, operation)
+
+
+    def test_open(self):
+        """
+        Opening a file in a monitored directory sends an
+        C{inotify.IN_OPEN} event to the callback.
+        """
+        def operation(path):
+            fObj = path.open("w")
+            fObj.close()
+
+        return self._notificationTest(inotify.IN_OPEN, operation)
+
+
+    def test_movedFrom(self):
+        """
+        Moving a file out of a monitored directory sends an
+        C{inotify.IN_MOVED_FROM} event to the callback.
+        """
+        def operation(path):
+            fObj = path.open("w")
+            fObj.close()
+            path.moveTo(filepath.FilePath(self.mktemp()))
+
+        return self._notificationTest(inotify.IN_MOVED_FROM, operation)
+
+
+    def test_movedTo(self):
+        """
+        Moving a file into a monitored directory sends an
+        C{inotify.IN_MOVED_TO} event to the callback.
+        """
+        def operation(path):
+            p = filepath.FilePath(self.mktemp())
+            p.touch()
+            p.moveTo(path)
+
+        return self._notificationTest(inotify.IN_MOVED_TO, operation)
+
+
+    def test_create(self):
+        """
+        Creating a file in a monitored directory sends an
+        C{inotify.IN_CREATE} event to the callback.
+        """
+        def operation(path):
+            fObj = path.open("w")
+            fObj.close()
+
+        return self._notificationTest(inotify.IN_CREATE, operation)
+
+
+    def test_delete(self):
+        """
+        Deleting a file in a monitored directory sends an
+        C{inotify.IN_DELETE} event to the callback.
+        """
+        def operation(path):
+            path.touch()
+            path.remove()
+
+        return self._notificationTest(inotify.IN_DELETE, operation)
+
+
+    def test_deleteSelf(self):
+        """
+        Deleting the monitored directory itself sends an
+        C{inotify.IN_DELETE_SELF} event to the callback.
+        """
+        def operation(path):
+            path.remove()
+
+        return self._notificationTest(
+            inotify.IN_DELETE_SELF, operation, expectedPath=self.dirname)
+
+
+    def test_moveSelf(self):
+        """
+        Renaming the monitored directory itself sends an
+        C{inotify.IN_MOVE_SELF} event to the callback.
+        """
+        def operation(path):
+            path.moveTo(filepath.FilePath(self.mktemp()))
+
+        return self._notificationTest(
+            inotify.IN_MOVE_SELF, operation, expectedPath=self.dirname)
+
+
+    def test_simpleSubdirectoryAutoAdd(self):
+        """
+        L{inotify.INotify} when initialized with autoAdd==True adds
+        also adds the created subdirectories to the watchlist.
+        """
+        def _callback(wp, filename, mask):
+            # We are notified before we actually process new
+            # directories, so we need to defer this check.
+            def _():
+                try:
+                    self.assertTrue(self.inotify._isWatched(subdir))
+                    d.callback(None)
+                except Exception:
+                    d.errback()
+            reactor.callLater(0, _)
+
+        checkMask = inotify.IN_ISDIR | inotify.IN_CREATE
+        self.inotify.watch(
+            self.dirname, mask=checkMask, autoAdd=True,
+            callbacks=[_callback])
+        subdir = self.dirname.child('test')
+        d = defer.Deferred()
+        subdir.createDirectory()
+        return d
+
+
+    def test_simpleDeleteDirectory(self):
+        """
+        L{inotify.INotify} removes a directory from the watchlist when
+        it's removed from the filesystem.
+        """
+        calls = []
+        def _callback(wp, filename, mask):
+            # We are notified before we actually process new
+            # directories, so we need to defer this check.
+            def _():
+                try:
+                    self.assertTrue(self.inotify._isWatched(subdir))
+                    subdir.remove()
+                except Exception:
+                    d.errback()
+            def _eb():
+                # second call, we have just removed the subdir
+                try:
+                    self.assertTrue(not self.inotify._isWatched(subdir))
+                    d.callback(None)
+                except Exception:
+                    d.errback()
+
+            if not calls:
+                # first call, it's the create subdir
+                calls.append(filename)
+                reactor.callLater(0, _)
+
+            else:
+                reactor.callLater(0, _eb)
+
+        checkMask = inotify.IN_ISDIR | inotify.IN_CREATE
+        self.inotify.watch(
+            self.dirname, mask=checkMask, autoAdd=True,
+            callbacks=[_callback])
+        subdir = self.dirname.child('test')
+        d = defer.Deferred()
+        subdir.createDirectory()
+        return d
+
+
+    def test_ignoreDirectory(self):
+        """
+        L{inotify.INotify.ignore} removes a directory from the watchlist
+        """
+        self.inotify.watch(self.dirname, autoAdd=True)
+        self.assertTrue(self.inotify._isWatched(self.dirname))
+        self.inotify.ignore(self.dirname)
+        self.assertFalse(self.inotify._isWatched(self.dirname))
+
+
+    def test_humanReadableMask(self):
+        """
+        L{inotify.humaReadableMask} translates all the possible event
+        masks to a human readable string.
+        """
+        for mask, value in inotify._FLAG_TO_HUMAN:
+            self.assertEqual(inotify.humanReadableMask(mask)[0], value)
+
+        checkMask = (
+            inotify.IN_CLOSE_WRITE | inotify.IN_ACCESS | inotify.IN_OPEN)
+        self.assertEqual(
+            set(inotify.humanReadableMask(checkMask)),
+            set(['close_write', 'access', 'open']))
+
+
+    def test_recursiveWatch(self):
+        """
+        L{inotify.INotify.watch} with recursive==True will add all the
+        subdirectories under the given path to the watchlist.
+        """
+        subdir = self.dirname.child('test')
+        subdir2 = subdir.child('test2')
+        subdir3 = subdir2.child('test3')
+        subdir3.makedirs()
+        dirs = [subdir, subdir2, subdir3]
+        self.inotify.watch(self.dirname, recursive=True)
+        # let's even call this twice so that we test that nothing breaks
+        self.inotify.watch(self.dirname, recursive=True)
+        for d in dirs:
+            self.assertTrue(self.inotify._isWatched(d))
+
+
+    def test_connectionLostError(self):
+        """
+        L{inotify.INotify.connectionLost} if there's a problem while closing
+        the fd shouldn't raise the exception but should log the error
+        """
+        import os
+        in_ = inotify.INotify()
+        os.close(in_._fd)
+        in_.loseConnection()
+        self.flushLoggedErrors()
+
+    def test_noAutoAddSubdirectory(self):
+        """
+        L{inotify.INotify.watch} with autoAdd==False will stop inotify
+        from watching subdirectories created under the watched one.
+        """
+        def _callback(wp, fp, mask):
+            # We are notified before we actually process new
+            # directories, so we need to defer this check.
+            def _():
+                try:
+                    self.assertFalse(self.inotify._isWatched(subdir.path))
+                    d.callback(None)
+                except Exception:
+                    d.errback()
+            reactor.callLater(0, _)
+
+        checkMask = inotify.IN_ISDIR | inotify.IN_CREATE
+        self.inotify.watch(
+            self.dirname, mask=checkMask, autoAdd=False,
+            callbacks=[_callback])
+        subdir = self.dirname.child('test')
+        d = defer.Deferred()
+        subdir.createDirectory()
+        return d
+
+
+    def test_seriesOfWatchAndIgnore(self):
+        """
+        L{inotify.INotify} will watch a filepath for events even if the same
+        path is repeatedly added/removed/re-added to the watchpoints.
+        """
+        expectedPath = self.dirname.child("foo.bar2")
+        expectedPath.touch()
+
+        notified = defer.Deferred()
+        def cbNotified((ignored, filename, events)):
+            self.assertEqual(filename, expectedPath)
+            self.assertTrue(events & inotify.IN_DELETE_SELF)
+
+        def callIt(*args):
+            notified.callback(args)
+
+        # Watch, ignore, watch again to get into the state being tested.
+        self.assertTrue(self.inotify.watch(expectedPath, callbacks=[callIt]))
+        self.inotify.ignore(expectedPath)
+        self.assertTrue(
+            self.inotify.watch(
+                expectedPath, mask=inotify.IN_DELETE_SELF, callbacks=[callIt]))
+
+        notified.addCallback(cbNotified)
+
+        # Apparently in kernel version < 2.6.25, inofify has a bug in the way
+        # similar events are coalesced.  So, be sure to generate a different
+        # event here than the touch() at the top of this method might have
+        # generated.
+        expectedPath.remove()
+
+        return notified
+
+
+    def test_ignoreFilePath(self):
+        """
+        L{inotify.INotify} will ignore a filepath after it has been removed from
+        the watch list.
+        """
+        expectedPath = self.dirname.child("foo.bar2")
+        expectedPath.touch()
+        expectedPath2 = self.dirname.child("foo.bar3")
+        expectedPath2.touch()
+
+        notified = defer.Deferred()
+        def cbNotified((ignored, filename, events)):
+            self.assertEqual(filename, expectedPath2)
+            self.assertTrue(events & inotify.IN_DELETE_SELF)
+
+        def callIt(*args):
+            notified.callback(args)
+
+        self.assertTrue(
+            self.inotify.watch(
+                expectedPath, inotify.IN_DELETE_SELF, callbacks=[callIt]))
+        notified.addCallback(cbNotified)
+
+        self.assertTrue(
+            self.inotify.watch(
+                expectedPath2, inotify.IN_DELETE_SELF, callbacks=[callIt]))
+
+        self.inotify.ignore(expectedPath)
+
+        expectedPath.remove()
+        expectedPath2.remove()
+
+        return notified
+
+
+    def test_ignoreNonWatchedFile(self):
+        """
+        L{inotify.INotify} will raise KeyError if a non-watched filepath is
+        ignored.
+        """
+        expectedPath = self.dirname.child("foo.ignored")
+        expectedPath.touch()
+
+        self.assertRaises(KeyError, self.inotify.ignore, expectedPath)
+
+
+    def test_complexSubdirectoryAutoAdd(self):
+        """
+        L{inotify.INotify} with autoAdd==True for a watched path
+        generates events for every file or directory already present
+        in a newly created subdirectory under the watched one.
+
+        This tests that we solve a race condition in inotify even though
+        we may generate duplicate events.
+        """
+        calls = set()
+        def _callback(wp, filename, mask):
+            calls.add(filename)
+            if len(calls) == 6:
+                try:
+                    self.assertTrue(self.inotify._isWatched(subdir))
+                    self.assertTrue(self.inotify._isWatched(subdir2))
+                    self.assertTrue(self.inotify._isWatched(subdir3))
+                    created = someFiles + [subdir, subdir2, subdir3]
+                    self.assertEqual(len(calls), len(created))
+                    self.assertEqual(calls, set(created))
+                except Exception:
+                    d.errback()
+                else:
+                    d.callback(None)
+
+        checkMask = inotify.IN_ISDIR | inotify.IN_CREATE
+        self.inotify.watch(
+            self.dirname, mask=checkMask, autoAdd=True,
+            callbacks=[_callback])
+        subdir = self.dirname.child('test')
+        subdir2 = subdir.child('test2')
+        subdir3 = subdir2.child('test3')
+        d = defer.Deferred()
+        subdir3.makedirs()
+
+        someFiles = [subdir.child('file1.dat'),
+                     subdir2.child('file2.dat'),
+                     subdir3.child('file3.dat')]
+        # Add some files in pretty much all the directories so that we
+        # see that we process all of them.
+        for i, filename in enumerate(someFiles):
+            filename.setContent(filename.path)
+        return d
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_iocp.py b/ThirdParty/Twisted/twisted/internet/test/test_iocp.py
new file mode 100644
index 0000000..76d7646
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_iocp.py
@@ -0,0 +1,150 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet.iocpreactor}.
+"""
+
+import errno
+from array import array
+from struct import pack
+from socket import AF_INET6, AF_INET, SOCK_STREAM, SOL_SOCKET, error, socket
+
+from zope.interface.verify import verifyClass
+
+from twisted.trial import unittest
+from twisted.python.log import msg
+from twisted.internet.interfaces import IPushProducer
+
+try:
+    from twisted.internet.iocpreactor import iocpsupport as _iocp, tcp, udp
+    from twisted.internet.iocpreactor.reactor import IOCPReactor, EVENTS_PER_LOOP, KEY_NORMAL
+    from twisted.internet.iocpreactor.interfaces import IReadWriteHandle
+    from twisted.internet.iocpreactor.const import SO_UPDATE_ACCEPT_CONTEXT
+    from twisted.internet.iocpreactor.abstract import FileHandle
+except ImportError:
+    skip = 'This test only applies to IOCPReactor'
+
+try:
+    socket(AF_INET6, SOCK_STREAM).close()
+except error, e:
+    ipv6Skip = str(e)
+else:
+    ipv6Skip = None
+
+class SupportTests(unittest.TestCase):
+    """
+    Tests for L{twisted.internet.iocpreactor.iocpsupport}, low-level reactor
+    implementation helpers.
+    """
+    def _acceptAddressTest(self, family, localhost):
+        """
+        Create a C{SOCK_STREAM} connection to localhost using a socket with an
+        address family of C{family} and assert that the result of
+        L{iocpsupport.get_accept_addrs} is consistent with the result of
+        C{socket.getsockname} and C{socket.getpeername}.
+        """
+        msg("family = %r" % (family,))
+        port = socket(family, SOCK_STREAM)
+        self.addCleanup(port.close)
+        port.bind(('', 0))
+        port.listen(1)
+        client = socket(family, SOCK_STREAM)
+        self.addCleanup(client.close)
+        client.setblocking(False)
+        try:
+            client.connect((localhost, port.getsockname()[1]))
+        except error, (errnum, message):
+            self.assertIn(errnum, (errno.EINPROGRESS, errno.EWOULDBLOCK))
+
+        server = socket(family, SOCK_STREAM)
+        self.addCleanup(server.close)
+        buff = array('c', '\0' * 256)
+        self.assertEqual(
+            0, _iocp.accept(port.fileno(), server.fileno(), buff, None))
+        server.setsockopt(
+            SOL_SOCKET, SO_UPDATE_ACCEPT_CONTEXT, pack('P', server.fileno()))
+        self.assertEqual(
+            (family, client.getpeername()[:2], client.getsockname()[:2]),
+            _iocp.get_accept_addrs(server.fileno(), buff))
+
+
+    def test_ipv4AcceptAddress(self):
+        """
+        L{iocpsupport.get_accept_addrs} returns a three-tuple of address
+        information about the socket associated with the file descriptor passed
+        to it.  For a connection using IPv4:
+
+          - the first element is C{AF_INET}
+          - the second element is a two-tuple of a dotted decimal notation IPv4
+            address and a port number giving the peer address of the connection
+          - the third element is the same type giving the host address of the
+            connection
+        """
+        self._acceptAddressTest(AF_INET, '127.0.0.1')
+
+
+    def test_ipv6AcceptAddress(self):
+        """
+        Like L{test_ipv4AcceptAddress}, but for IPv6 connections.  In this case:
+
+          - the first element is C{AF_INET6}
+          - the second element is a two-tuple of a hexadecimal IPv6 address
+            literal and a port number giving the peer address of the connection
+          - the third element is the same type giving the host address of the
+            connection
+        """
+        self._acceptAddressTest(AF_INET6, '::1')
+    if ipv6Skip is not None:
+        test_ipv6AcceptAddress.skip = ipv6Skip
+
+
+
+class IOCPReactorTestCase(unittest.TestCase):
+    def test_noPendingTimerEvents(self):
+        """
+        Test reactor behavior (doIteration) when there are no pending time
+        events.
+        """
+        ir = IOCPReactor()
+        ir.wakeUp()
+        self.failIf(ir.doIteration(None))
+
+
+    def test_reactorInterfaces(self):
+        """
+        Verify that IOCP socket-representing classes implement IReadWriteHandle
+        """
+        self.assertTrue(verifyClass(IReadWriteHandle, tcp.Connection))
+        self.assertTrue(verifyClass(IReadWriteHandle, udp.Port))
+
+
+    def test_fileHandleInterfaces(self):
+        """
+        Verify that L{Filehandle} implements L{IPushProducer}.
+        """
+        self.assertTrue(verifyClass(IPushProducer, FileHandle))
+
+
+    def test_maxEventsPerIteration(self):
+        """
+        Verify that we don't lose an event when more than EVENTS_PER_LOOP
+        events occur in the same reactor iteration
+        """
+        class FakeFD:
+            counter = 0
+            def logPrefix(self):
+                return 'FakeFD'
+            def cb(self, rc, bytes, evt):
+                self.counter += 1
+
+        ir = IOCPReactor()
+        fd = FakeFD()
+        event = _iocp.Event(fd.cb, fd)
+        for _ in range(EVENTS_PER_LOOP + 1):
+            ir.port.postEvent(0, KEY_NORMAL, event)
+        ir.doIteration(None)
+        self.assertEqual(fd.counter, EVENTS_PER_LOOP)
+        ir.doIteration(0)
+        self.assertEqual(fd.counter, EVENTS_PER_LOOP + 1)
+
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_main.py b/ThirdParty/Twisted/twisted/internet/test/test_main.py
new file mode 100644
index 0000000..78e1677
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_main.py
@@ -0,0 +1,50 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet.main}.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.trial import unittest
+from twisted.internet.error import ReactorAlreadyInstalledError
+from twisted.internet.main import installReactor
+
+from twisted.internet.test.modulehelpers import NoReactor
+
+
+class InstallReactorTests(unittest.SynchronousTestCase):
+    """
+    Tests for L{installReactor}.
+    """
+
+    def test_installReactor(self):
+        """
+        L{installReactor} installs a new reactor if none is present.
+        """
+        with NoReactor():
+            newReactor = object()
+            installReactor(newReactor)
+            from twisted.internet import reactor
+            self.assertIdentical(newReactor, reactor)
+
+
+    def test_alreadyInstalled(self):
+        """
+        If a reactor is already installed, L{installReactor} raises
+        L{ReactorAlreadyInstalledError}.
+        """
+        with NoReactor():
+            installReactor(object())
+            self.assertRaises(ReactorAlreadyInstalledError, installReactor,
+                              object())
+
+
+    def test_errorIsAnAssertionError(self):
+        """
+        For backwards compatibility, L{ReactorAlreadyInstalledError} is an
+        L{AssertionError}.
+        """
+        self.assertTrue(issubclass(ReactorAlreadyInstalledError,
+                        AssertionError))
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_newtls.py b/ThirdParty/Twisted/twisted/internet/test/test_newtls.py
new file mode 100644
index 0000000..b24befe
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_newtls.py
@@ -0,0 +1,197 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet._newtls}.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.trial import unittest
+from twisted.internet.test.reactormixins import ReactorBuilder
+from twisted.internet.test.connectionmixins import (
+    ConnectableProtocol, runProtocolsWithReactor)
+from twisted.internet.test.test_tls import SSLCreator, TLSMixin
+from twisted.internet.test.test_tls import StartTLSClientCreator
+from twisted.internet.test.test_tls import ContextGeneratingMixin
+from twisted.internet.test.test_tcp import TCPCreator
+try:
+    from twisted.protocols import tls
+    from twisted.internet import _newtls
+except ImportError:
+    _newtls = None
+
+
+class BypassTLSTests(unittest.TestCase):
+    """
+    Tests for the L{_newtls._BypassTLS} class.
+    """
+
+    if not _newtls:
+        skip = "Couldn't import _newtls, perhaps pyOpenSSL is old or missing"
+
+    def test_loseConnectionPassThrough(self):
+        """
+        C{_BypassTLS.loseConnection} calls C{loseConnection} on the base
+        class, while preserving any default argument in the base class'
+        C{loseConnection} implementation.
+        """
+        default = object()
+        result = []
+
+        class FakeTransport(object):
+            def loseConnection(self, _connDone=default):
+                result.append(_connDone)
+
+        bypass = _newtls._BypassTLS(FakeTransport, FakeTransport())
+
+        # The default from FakeTransport is used:
+        bypass.loseConnection()
+        self.assertEqual(result, [default])
+
+        # And we can pass our own:
+        notDefault = object()
+        bypass.loseConnection(notDefault)
+        self.assertEqual(result, [default, notDefault])
+
+
+
+class FakeProducer(object):
+    """
+    A producer that does nothing.
+    """
+
+    def pauseProducing(self):
+        pass
+
+
+    def resumeProducing(self):
+        pass
+
+
+    def stopProducing(self):
+        pass
+
+
+
+class ProducerProtocol(ConnectableProtocol):
+    """
+    Register a producer, unregister it, and verify the producer hooks up to
+    innards of C{TLSMemoryBIOProtocol}.
+    """
+
+    def __init__(self, producer, result):
+        self.producer = producer
+        self.result = result
+
+
+    def connectionMade(self):
+        if not isinstance(self.transport.protocol,
+                          tls.TLSMemoryBIOProtocol):
+            # Either the test or the code have a bug...
+            raise RuntimeError("TLSMemoryBIOProtocol not hooked up.")
+
+        self.transport.registerProducer(self.producer, True)
+        # The producer was registered with the TLSMemoryBIOProtocol:
+        self.result.append(self.transport.protocol._producer._producer)
+
+        self.transport.unregisterProducer()
+        # The producer was unregistered from the TLSMemoryBIOProtocol:
+        self.result.append(self.transport.protocol._producer)
+        self.transport.loseConnection()
+
+
+
+class ProducerTestsMixin(ReactorBuilder, TLSMixin, ContextGeneratingMixin):
+    """
+    Test the new TLS code integrates C{TLSMemoryBIOProtocol} correctly.
+    """
+
+    if not _newtls:
+        skip = "Could not import twisted.internet._newtls"
+
+    def test_producerSSLFromStart(self):
+        """
+        C{registerProducer} and C{unregisterProducer} on TLS transports
+        created as SSL from the get go are passed to the
+        C{TLSMemoryBIOProtocol}, not the underlying transport directly.
+        """
+        result = []
+        producer = FakeProducer()
+
+        runProtocolsWithReactor(self, ConnectableProtocol(),
+                                ProducerProtocol(producer, result),
+                                SSLCreator())
+        self.assertEqual(result, [producer, None])
+
+
+    def test_producerAfterStartTLS(self):
+        """
+        C{registerProducer} and C{unregisterProducer} on TLS transports
+        created by C{startTLS} are passed to the C{TLSMemoryBIOProtocol}, not
+        the underlying transport directly.
+        """
+        result = []
+        producer = FakeProducer()
+
+        runProtocolsWithReactor(self, ConnectableProtocol(),
+                                ProducerProtocol(producer, result),
+                                StartTLSClientCreator())
+        self.assertEqual(result, [producer, None])
+
+
+    def startTLSAfterRegisterProducer(self, streaming):
+        """
+        When a producer is registered, and then startTLS is called,
+        the producer is re-registered with the C{TLSMemoryBIOProtocol}.
+        """
+        clientContext = self.getClientContext()
+        serverContext = self.getServerContext()
+        result = []
+        producer = FakeProducer()
+
+        class RegisterTLSProtocol(ConnectableProtocol):
+            def connectionMade(self):
+                self.transport.registerProducer(producer, streaming)
+                self.transport.startTLS(serverContext)
+                # Store TLSMemoryBIOProtocol and underlying transport producer
+                # status:
+                if streaming:
+                    # _ProducerMembrane -> producer:
+                    result.append(self.transport.protocol._producer._producer)
+                    result.append(self.transport.producer._producer)
+                else:
+                    # _ProducerMembrane -> _PullToPush -> producer:
+                    result.append(
+                        self.transport.protocol._producer._producer._producer)
+                    result.append(self.transport.producer._producer._producer)
+                self.transport.unregisterProducer()
+                self.transport.loseConnection()
+
+        class StartTLSProtocol(ConnectableProtocol):
+            def connectionMade(self):
+                self.transport.startTLS(clientContext)
+
+        runProtocolsWithReactor(self, RegisterTLSProtocol(),
+                                StartTLSProtocol(), TCPCreator())
+        self.assertEqual(result, [producer, producer])
+
+
+    def test_startTLSAfterRegisterProducerStreaming(self):
+        """
+        When a streaming producer is registered, and then startTLS is called,
+        the producer is re-registered with the C{TLSMemoryBIOProtocol}.
+        """
+        self.startTLSAfterRegisterProducer(True)
+
+
+    def test_startTLSAfterRegisterProducerNonStreaming(self):
+        """
+        When a non-streaming producer is registered, and then startTLS is
+        called, the producer is re-registered with the
+        C{TLSMemoryBIOProtocol}.
+        """
+        self.startTLSAfterRegisterProducer(False)
+
+
+globals().update(ProducerTestsMixin.makeTestCaseClasses())
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_pollingfile.py b/ThirdParty/Twisted/twisted/internet/test/test_pollingfile.py
new file mode 100644
index 0000000..75022ad
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_pollingfile.py
@@ -0,0 +1,46 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet._pollingfile}.
+"""
+
+from twisted.python.runtime import platform
+from twisted.trial.unittest import TestCase
+
+if platform.isWindows():
+    from twisted.internet import _pollingfile
+else:
+    _pollingfile = None
+
+
+
+class TestPollableWritePipe(TestCase):
+    """
+    Tests for L{_pollingfile._PollableWritePipe}.
+    """
+
+    def test_writeUnicode(self):
+        """
+        L{_pollingfile._PollableWritePipe.write} raises a C{TypeError} if an
+        attempt is made to append unicode data to the output buffer.
+        """
+        p = _pollingfile._PollableWritePipe(1, lambda: None)
+        self.assertRaises(TypeError, p.write, u"test")
+
+
+    def test_writeSequenceUnicode(self):
+        """
+        L{_pollingfile._PollableWritePipe.writeSequence} raises a C{TypeError}
+        if unicode data is part of the data sequence to be appended to the
+        output buffer.
+        """
+        p = _pollingfile._PollableWritePipe(1, lambda: None)
+        self.assertRaises(TypeError, p.writeSequence, [u"test"])
+        self.assertRaises(TypeError, p.writeSequence, (u"test", ))
+
+
+
+
+if _pollingfile is None:
+    TestPollableWritePipe.skip = "Test will run only on Windows."
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_posixbase.py b/ThirdParty/Twisted/twisted/internet/test/test_posixbase.py
new file mode 100644
index 0000000..2e6cdd0
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_posixbase.py
@@ -0,0 +1,320 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet.posixbase} and supporting code.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.python.compat import set, _PY3
+from twisted.trial.unittest import TestCase
+from twisted.internet.defer import Deferred
+from twisted.internet.posixbase import PosixReactorBase, _Waker
+from twisted.internet.protocol import ServerFactory
+
+skipSockets = None
+if _PY3:
+    skipSockets = "Re-enable when Python 3 port supports AF_UNIX"
+else:
+    try:
+        from twisted.internet import unix
+        from twisted.test.test_unix import ClientProto
+    except ImportError:
+        skipSockets = "Platform does not support AF_UNIX sockets"
+
+from twisted.internet.tcp import Port
+from twisted.internet import reactor
+
+
+
+
+class TrivialReactor(PosixReactorBase):
+    def __init__(self):
+        self._readers = {}
+        self._writers = {}
+        PosixReactorBase.__init__(self)
+
+
+    def addReader(self, reader):
+        self._readers[reader] = True
+
+
+    def removeReader(self, reader):
+        del self._readers[reader]
+
+
+    def addWriter(self, writer):
+        self._writers[writer] = True
+
+
+    def removeWriter(self, writer):
+        del self._writers[writer]
+
+
+
+class PosixReactorBaseTests(TestCase):
+    """
+    Tests for L{PosixReactorBase}.
+    """
+
+    def _checkWaker(self, reactor):
+        self.assertIsInstance(reactor.waker, _Waker)
+        self.assertIn(reactor.waker, reactor._internalReaders)
+        self.assertIn(reactor.waker, reactor._readers)
+
+
+    def test_wakerIsInternalReader(self):
+        """
+        When L{PosixReactorBase} is instantiated, it creates a waker and adds
+        it to its internal readers set.
+        """
+        reactor = TrivialReactor()
+        self._checkWaker(reactor)
+
+
+    def test_removeAllSkipsInternalReaders(self):
+        """
+        Any L{IReadDescriptors} in L{PosixReactorBase._internalReaders} are
+        left alone by L{PosixReactorBase._removeAll}.
+        """
+        reactor = TrivialReactor()
+        extra = object()
+        reactor._internalReaders.add(extra)
+        reactor.addReader(extra)
+        reactor._removeAll(reactor._readers, reactor._writers)
+        self._checkWaker(reactor)
+        self.assertIn(extra, reactor._internalReaders)
+        self.assertIn(extra, reactor._readers)
+
+
+    def test_removeAllReturnsRemovedDescriptors(self):
+        """
+        L{PosixReactorBase._removeAll} returns a list of removed
+        L{IReadDescriptor} and L{IWriteDescriptor} objects.
+        """
+        reactor = TrivialReactor()
+        reader = object()
+        writer = object()
+        reactor.addReader(reader)
+        reactor.addWriter(writer)
+        removed = reactor._removeAll(
+            reactor._readers, reactor._writers)
+        self.assertEqual(set(removed), set([reader, writer]))
+        self.assertNotIn(reader, reactor._readers)
+        self.assertNotIn(writer, reactor._writers)
+
+
+
+class TCPPortTests(TestCase):
+    """
+    Tests for L{twisted.internet.tcp.Port}.
+    """
+
+    if not isinstance(reactor, PosixReactorBase):
+        skip = "Non-posixbase reactor"
+
+    def test_connectionLostFailed(self):
+        """
+        L{Port.stopListening} returns a L{Deferred} which errbacks if
+        L{Port.connectionLost} raises an exception.
+        """
+        port = Port(12345, ServerFactory())
+        port.connected = True
+        port.connectionLost = lambda reason: 1 // 0
+        return self.assertFailure(port.stopListening(), ZeroDivisionError)
+
+
+
+class TimeoutReportReactor(PosixReactorBase):
+    """
+    A reactor which is just barely runnable and which cannot monitor any
+    readers or writers, and which fires a L{Deferred} with the timeout
+    passed to its C{doIteration} method as soon as that method is invoked.
+    """
+    def __init__(self):
+        PosixReactorBase.__init__(self)
+        self.iterationTimeout = Deferred()
+        self.now = 100
+
+
+    def addReader(self, reader):
+        """
+        Ignore the reader.  This is necessary because the waker will be
+        added.  However, we won't actually monitor it for any events.
+        """
+
+
+    def removeAll(self):
+        """
+        There are no readers or writers, so there is nothing to remove.
+        This will be called when the reactor stops, though, so it must be
+        implemented.
+        """
+        return []
+
+
+    def seconds(self):
+        """
+        Override the real clock with a deterministic one that can be easily
+        controlled in a unit test.
+        """
+        return self.now
+
+
+    def doIteration(self, timeout):
+        d = self.iterationTimeout
+        if d is not None:
+            self.iterationTimeout = None
+            d.callback(timeout)
+
+
+
+class IterationTimeoutTests(TestCase):
+    """
+    Tests for the timeout argument L{PosixReactorBase.run} calls
+    L{PosixReactorBase.doIteration} with in the presence of various delayed
+    calls.
+    """
+    def _checkIterationTimeout(self, reactor):
+        timeout = []
+        reactor.iterationTimeout.addCallback(timeout.append)
+        reactor.iterationTimeout.addCallback(lambda ignored: reactor.stop())
+        reactor.run()
+        return timeout[0]
+
+
+    def test_noCalls(self):
+        """
+        If there are no delayed calls, C{doIteration} is called with a
+        timeout of C{None}.
+        """
+        reactor = TimeoutReportReactor()
+        timeout = self._checkIterationTimeout(reactor)
+        self.assertEqual(timeout, None)
+
+
+    def test_delayedCall(self):
+        """
+        If there is a delayed call, C{doIteration} is called with a timeout
+        which is the difference between the current time and the time at
+        which that call is to run.
+        """
+        reactor = TimeoutReportReactor()
+        reactor.callLater(100, lambda: None)
+        timeout = self._checkIterationTimeout(reactor)
+        self.assertEqual(timeout, 100)
+
+
+    def test_timePasses(self):
+        """
+        If a delayed call is scheduled and then some time passes, the
+        timeout passed to C{doIteration} is reduced by the amount of time
+        which passed.
+        """
+        reactor = TimeoutReportReactor()
+        reactor.callLater(100, lambda: None)
+        reactor.now += 25
+        timeout = self._checkIterationTimeout(reactor)
+        self.assertEqual(timeout, 75)
+
+
+    def test_multipleDelayedCalls(self):
+        """
+        If there are several delayed calls, C{doIteration} is called with a
+        timeout which is the difference between the current time and the
+        time at which the earlier of the two calls is to run.
+        """
+        reactor = TimeoutReportReactor()
+        reactor.callLater(50, lambda: None)
+        reactor.callLater(10, lambda: None)
+        reactor.callLater(100, lambda: None)
+        timeout = self._checkIterationTimeout(reactor)
+        self.assertEqual(timeout, 10)
+
+
+    def test_resetDelayedCall(self):
+        """
+        If a delayed call is reset, the timeout passed to C{doIteration} is
+        based on the interval between the time when reset is called and the
+        new delay of the call.
+        """
+        reactor = TimeoutReportReactor()
+        call = reactor.callLater(50, lambda: None)
+        reactor.now += 25
+        call.reset(15)
+        timeout = self._checkIterationTimeout(reactor)
+        self.assertEqual(timeout, 15)
+
+
+    def test_delayDelayedCall(self):
+        """
+        If a delayed call is re-delayed, the timeout passed to
+        C{doIteration} is based on the remaining time before the call would
+        have been made and the additional amount of time passed to the delay
+        method.
+        """
+        reactor = TimeoutReportReactor()
+        call = reactor.callLater(50, lambda: None)
+        reactor.now += 10
+        call.delay(20)
+        timeout = self._checkIterationTimeout(reactor)
+        self.assertEqual(timeout, 60)
+
+
+    def test_cancelDelayedCall(self):
+        """
+        If the only delayed call is canceled, C{None} is the timeout passed
+        to C{doIteration}.
+        """
+        reactor = TimeoutReportReactor()
+        call = reactor.callLater(50, lambda: None)
+        call.cancel()
+        timeout = self._checkIterationTimeout(reactor)
+        self.assertEqual(timeout, None)
+
+
+
+class ConnectedDatagramPortTestCase(TestCase):
+    """
+    Test connected datagram UNIX sockets.
+    """
+    if skipSockets is not None:
+        skip = skipSockets
+
+
+    def test_connectionFailedDoesntCallLoseConnection(self):
+        """
+        L{ConnectedDatagramPort} does not call the deprecated C{loseConnection}
+        in L{ConnectedDatagramPort.connectionFailed}.
+        """
+        def loseConnection():
+            """
+            Dummy C{loseConnection} method. C{loseConnection} is deprecated and
+            should not get called.
+            """
+            self.fail("loseConnection is deprecated and should not get called.")
+
+        port = unix.ConnectedDatagramPort(None, ClientProto())
+        port.loseConnection = loseConnection
+        port.connectionFailed("goodbye")
+
+
+    def test_connectionFailedCallsStopListening(self):
+        """
+        L{ConnectedDatagramPort} calls L{ConnectedDatagramPort.stopListening}
+        instead of the deprecated C{loseConnection} in
+        L{ConnectedDatagramPort.connectionFailed}.
+        """
+        self.called = False
+
+        def stopListening():
+            """
+            Dummy C{stopListening} method.
+            """
+            self.called = True
+
+        port = unix.ConnectedDatagramPort(None, ClientProto())
+        port.stopListening = stopListening
+        port.connectionFailed("goodbye")
+        self.assertEqual(self.called, True)
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_posixprocess.py b/ThirdParty/Twisted/twisted/internet/test/test_posixprocess.py
new file mode 100644
index 0000000..f7abd55
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_posixprocess.py
@@ -0,0 +1,340 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for POSIX-based L{IReactorProcess} implementations.
+"""
+
+import errno, os, sys
+
+try:
+    import fcntl
+except ImportError:
+    platformSkip = "non-POSIX platform"
+else:
+    from twisted.internet import process
+    platformSkip = None
+
+from twisted.trial.unittest import TestCase
+
+
+class FakeFile(object):
+    """
+    A dummy file object which records when it is closed.
+    """
+    def __init__(self, testcase, fd):
+        self.testcase = testcase
+        self.fd = fd
+
+
+    def close(self):
+        self.testcase._files.remove(self.fd)
+
+
+
+class FakeResourceModule(object):
+    """
+    Fake version of L{resource} which hard-codes a particular rlimit for maximum
+    open files.
+
+    @ivar _limit: The value to return for the hard limit of number of open files.
+    """
+    RLIMIT_NOFILE = 1
+
+    def __init__(self, limit):
+        self._limit = limit
+
+
+    def getrlimit(self, no):
+        """
+        A fake of L{resource.getrlimit} which returns a pre-determined result.
+        """
+        if no == self.RLIMIT_NOFILE:
+            return [0, self._limit]
+        return [123, 456]
+
+
+
+class FDDetectorTests(TestCase):
+    """
+    Tests for _FDDetector class in twisted.internet.process, which detects
+    which function to drop in place for the _listOpenFDs method.
+
+    @ivar devfs: A flag indicating whether the filesystem fake will indicate
+        that /dev/fd exists.
+
+    @ivar accurateDevFDResults: A flag indicating whether the /dev/fd fake
+        returns accurate open file information.
+
+    @ivar procfs: A flag indicating whether the filesystem fake will indicate
+        that /proc/<pid>/fd exists.
+    """
+    skip = platformSkip
+
+    devfs = False
+    accurateDevFDResults = False
+
+    procfs = False
+
+    def getpid(self):
+        """
+        Fake os.getpid, always return the same thing
+        """
+        return 123
+
+
+    def listdir(self, arg):
+        """
+        Fake os.listdir, depending on what mode we're in to simulate behaviour.
+
+        @param arg: the directory to list
+        """
+        accurate = map(str, self._files)
+        if self.procfs and arg == ('/proc/%d/fd' % (self.getpid(),)):
+            return accurate
+        if self.devfs and arg == '/dev/fd':
+            if self.accurateDevFDResults:
+                return accurate
+            return ["0", "1", "2"]
+        raise OSError()
+
+
+    def openfile(self, fname, mode):
+        """
+        This is a mock for L{open}.  It keeps track of opened files so extra
+        descriptors can be returned from the mock for L{os.listdir} when used on
+        one of the list-of-filedescriptors directories.
+
+        A L{FakeFile} is returned which can be closed to remove the new
+        descriptor from the open list.
+        """
+        # Find the smallest unused file descriptor and give it to the new file.
+        f = FakeFile(self, min(set(range(1024)) - set(self._files)))
+        self._files.append(f.fd)
+        return f
+
+
+    def hideResourceModule(self):
+        """
+        Make the L{resource} module unimportable for the remainder of the
+        current test method.
+        """
+        sys.modules['resource'] = None
+
+
+    def revealResourceModule(self, limit):
+        """
+        Make a L{FakeResourceModule} instance importable at the L{resource}
+        name.
+
+        @param limit: The value which will be returned for the hard limit of
+            number of open files by the fake resource module's C{getrlimit}
+            function.
+        """
+        sys.modules['resource'] = FakeResourceModule(limit)
+
+
+    def replaceResourceModule(self, value):
+        """
+        Restore the original resource module to L{sys.modules}.
+        """
+        if value is None:
+            try:
+                del sys.modules['resource']
+            except KeyError:
+                pass
+        else:
+            sys.modules['resource'] = value
+
+
+    def setUp(self):
+        """
+        Set up the tests, giving ourselves a detector object to play with and
+        setting up its testable knobs to refer to our mocked versions.
+        """
+        self.detector = process._FDDetector()
+        self.detector.listdir = self.listdir
+        self.detector.getpid = self.getpid
+        self.detector.openfile = self.openfile
+        self._files = [0, 1, 2]
+        self.addCleanup(
+            self.replaceResourceModule, sys.modules.get('resource'))
+
+
+    def test_selectFirstWorking(self):
+        """
+        L{FDDetector._getImplementation} returns the first method from its
+        C{_implementations} list which returns results which reflect a newly
+        opened file descriptor.
+        """
+        def failWithException():
+            raise ValueError("This does not work")
+
+        def failWithWrongResults():
+            return [0, 1, 2]
+
+        def correct():
+            return self._files[:]
+
+        self.detector._implementations = [
+            failWithException, failWithWrongResults, correct]
+
+        self.assertIdentical(correct, self.detector._getImplementation())
+
+
+    def test_selectLast(self):
+        """
+        L{FDDetector._getImplementation} returns the last method from its
+        C{_implementations} list if none of the implementations manage to return
+        results which reflect a newly opened file descriptor.
+        """
+        def failWithWrongResults():
+            return [3, 5, 9]
+
+        def failWithOtherWrongResults():
+            return [0, 1, 2]
+
+        self.detector._implementations = [
+            failWithWrongResults, failWithOtherWrongResults]
+
+        self.assertIdentical(
+            failWithOtherWrongResults, self.detector._getImplementation())
+
+
+    def test_identityOfListOpenFDsChanges(self):
+        """
+        Check that the identity of _listOpenFDs changes after running
+        _listOpenFDs the first time, but not after the second time it's run.
+
+        In other words, check that the monkey patching actually works.
+        """
+        # Create a new instance
+        detector = process._FDDetector()
+
+        first = detector._listOpenFDs.func_name
+        detector._listOpenFDs()
+        second = detector._listOpenFDs.func_name
+        detector._listOpenFDs()
+        third = detector._listOpenFDs.func_name
+
+        self.assertNotEqual(first, second)
+        self.assertEqual(second, third)
+
+
+    def test_devFDImplementation(self):
+        """
+        L{_FDDetector._devFDImplementation} raises L{OSError} if there is no
+        I{/dev/fd} directory, otherwise it returns the basenames of its children
+        interpreted as integers.
+        """
+        self.devfs = False
+        self.assertRaises(OSError, self.detector._devFDImplementation)
+        self.devfs = True
+        self.accurateDevFDResults = False
+        self.assertEqual([0, 1, 2], self.detector._devFDImplementation())
+
+
+    def test_procFDImplementation(self):
+        """
+        L{_FDDetector._procFDImplementation} raises L{OSError} if there is no
+        I{/proc/<pid>/fd} directory, otherwise it returns the basenames of its
+        children interpreted as integers.
+        """
+        self.procfs = False
+        self.assertRaises(OSError, self.detector._procFDImplementation)
+        self.procfs = True
+        self.assertEqual([0, 1, 2], self.detector._procFDImplementation())
+
+
+    def test_resourceFDImplementation(self):
+        """
+        L{_FDDetector._fallbackFDImplementation} uses the L{resource} module if
+        it is available, returning a range of integers from 0 to the the
+        minimum of C{1024} and the hard I{NOFILE} limit.
+        """
+        # When the resource module is here, use its value.
+        self.revealResourceModule(512)
+        self.assertEqual(
+            range(512), self.detector._fallbackFDImplementation())
+
+        # But limit its value to the arbitrarily selected value 1024.
+        self.revealResourceModule(2048)
+        self.assertEqual(
+            range(1024), self.detector._fallbackFDImplementation())
+
+
+    def test_fallbackFDImplementation(self):
+        """
+        L{_FDDetector._fallbackFDImplementation}, the implementation of last
+        resort, succeeds with a fixed range of integers from 0 to 1024 when the
+        L{resource} module is not importable.
+        """
+        self.hideResourceModule()
+        self.assertEqual(range(1024), self.detector._fallbackFDImplementation())
+
+
+
+class FileDescriptorTests(TestCase):
+    """
+    Tests for L{twisted.internet.process._listOpenFDs}
+    """
+    skip = platformSkip
+
+    def test_openFDs(self):
+        """
+        File descriptors returned by L{_listOpenFDs} are mostly open.
+
+        This test assumes that zero-legth writes fail with EBADF on closed
+        file descriptors.
+        """
+        for fd in process._listOpenFDs():
+            try:
+                fcntl.fcntl(fd, fcntl.F_GETFL)
+            except IOError, err:
+                self.assertEqual(
+                    errno.EBADF, err.errno,
+                    "fcntl(%d, F_GETFL) failed with unexpected errno %d" % (
+                        fd, err.errno))
+
+
+    def test_expectedFDs(self):
+        """
+        L{_listOpenFDs} lists expected file descriptors.
+        """
+        # This is a tricky test.  A priori, there is no way to know what file
+        # descriptors are open now, so there is no way to know what _listOpenFDs
+        # should return.  Work around this by creating some new file descriptors
+        # which we can know the state of and then just making assertions about
+        # their presence or absence in the result.
+
+        # Expect a file we just opened to be listed.
+        f = file(os.devnull)
+        openfds = process._listOpenFDs()
+        self.assertIn(f.fileno(), openfds)
+
+        # Expect a file we just closed not to be listed - with a caveat.  The
+        # implementation may need to open a file to discover the result.  That
+        # open file descriptor will be allocated the same number as the one we
+        # just closed.  So, instead, create a hole in the file descriptor space
+        # to catch that internal descriptor and make the assertion about a
+        # different closed file descriptor.
+
+        # This gets allocated a file descriptor larger than f's, since nothing
+        # has been closed since we opened f.
+        fd = os.dup(f.fileno())
+
+        # But sanity check that; if it fails the test is invalid.
+        self.assertTrue(
+            fd > f.fileno(),
+            "Expected duplicate file descriptor to be greater than original")
+
+        try:
+            # Get rid of the original, creating the hole.  The copy should still
+            # be open, of course.
+            f.close()
+            self.assertIn(fd, process._listOpenFDs())
+        finally:
+            # Get rid of the copy now
+            os.close(fd)
+        # And it should not appear in the result.
+        self.assertNotIn(fd, process._listOpenFDs())
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_process.py b/ThirdParty/Twisted/twisted/internet/test/test_process.py
new file mode 100644
index 0000000..ca7ceab
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_process.py
@@ -0,0 +1,695 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for implementations of L{IReactorProcess}.
+"""
+
+__metaclass__ = type
+
+import os, sys, signal, threading
+
+from twisted.trial.unittest import TestCase, SkipTest
+from twisted.internet.test.reactormixins import ReactorBuilder
+from twisted.python.compat import set
+from twisted.python.log import msg, err
+from twisted.python.runtime import platform, platformType
+from twisted.python.filepath import FilePath
+from twisted.internet import utils
+from twisted.internet.interfaces import IReactorProcess, IProcessTransport
+from twisted.internet.defer import Deferred, succeed
+from twisted.internet.protocol import ProcessProtocol
+from twisted.internet.error import ProcessDone, ProcessTerminated
+
+
+
+class _ShutdownCallbackProcessProtocol(ProcessProtocol):
+    """
+    An L{IProcessProtocol} which fires a Deferred when the process it is
+    associated with ends.
+
+    @ivar received: A C{dict} mapping file descriptors to lists of bytes
+        received from the child process on those file descriptors.
+    """
+    def __init__(self, whenFinished):
+        self.whenFinished = whenFinished
+        self.received = {}
+
+
+    def childDataReceived(self, fd, bytes):
+        self.received.setdefault(fd, []).append(bytes)
+
+
+    def processEnded(self, reason):
+        self.whenFinished.callback(None)
+
+
+
+class ProcessTestsBuilderBase(ReactorBuilder):
+    """
+    Base class for L{IReactorProcess} tests which defines some tests which
+    can be applied to PTY or non-PTY uses of C{spawnProcess}.
+
+    Subclasses are expected to set the C{usePTY} attribute to C{True} or
+    C{False}.
+    """
+    requiredInterfaces = [IReactorProcess]
+
+
+    def test_processTransportInterface(self):
+        """
+        L{IReactorProcess.spawnProcess} connects the protocol passed to it
+        to a transport which provides L{IProcessTransport}.
+        """
+        ended = Deferred()
+        protocol = _ShutdownCallbackProcessProtocol(ended)
+
+        reactor = self.buildReactor()
+        transport = reactor.spawnProcess(
+            protocol, sys.executable, [sys.executable, "-c", ""],
+            usePTY=self.usePTY)
+
+        # The transport is available synchronously, so we can check it right
+        # away (unlike many transport-based tests).  This is convenient even
+        # though it's probably not how the spawnProcess interface should really
+        # work.
+        # We're not using verifyObject here because part of
+        # IProcessTransport is a lie - there are no getHost or getPeer
+        # methods.  See #1124.
+        self.assertTrue(IProcessTransport.providedBy(transport))
+
+        # Let the process run and exit so we don't leave a zombie around.
+        ended.addCallback(lambda ignored: reactor.stop())
+        self.runReactor(reactor)
+
+
+    def _writeTest(self, write):
+        """
+        Helper for testing L{IProcessTransport} write functionality.  This
+        method spawns a child process and gives C{write} a chance to write some
+        bytes to it.  It then verifies that the bytes were actually written to
+        it (by relying on the child process to echo them back).
+
+        @param write: A two-argument callable.  This is invoked with a process
+            transport and some bytes to write to it.
+        """
+        reactor = self.buildReactor()
+
+        ended = Deferred()
+        protocol = _ShutdownCallbackProcessProtocol(ended)
+
+        bytes = "hello, world" + os.linesep
+        program = (
+            "import sys\n"
+            "sys.stdout.write(sys.stdin.readline())\n"
+            )
+
+        def startup():
+            transport = reactor.spawnProcess(
+                protocol, sys.executable, [sys.executable, "-c", program])
+            try:
+                write(transport, bytes)
+            except:
+                err(None, "Unhandled exception while writing")
+                transport.signalProcess('KILL')
+        reactor.callWhenRunning(startup)
+
+        ended.addCallback(lambda ignored: reactor.stop())
+
+        self.runReactor(reactor)
+        self.assertEqual(bytes, "".join(protocol.received[1]))
+
+
+    def test_write(self):
+        """
+        L{IProcessTransport.write} writes the specified C{str} to the standard
+        input of the child process.
+        """
+        def write(transport, bytes):
+            transport.write(bytes)
+        self._writeTest(write)
+
+
+    def test_writeSequence(self):
+        """
+        L{IProcessTransport.writeSequence} writes the specified C{list} of
+        C{str} to the standard input of the child process.
+        """
+        def write(transport, bytes):
+            transport.writeSequence(list(bytes))
+        self._writeTest(write)
+
+
+    def test_writeToChild(self):
+        """
+        L{IProcessTransport.writeToChild} writes the specified C{str} to the
+        specified file descriptor of the child process.
+        """
+        def write(transport, bytes):
+            transport.writeToChild(0, bytes)
+        self._writeTest(write)
+
+
+    def test_writeToChildBadFileDescriptor(self):
+        """
+        L{IProcessTransport.writeToChild} raises L{KeyError} if passed a file
+        descriptor which is was not set up by L{IReactorProcess.spawnProcess}.
+        """
+        def write(transport, bytes):
+            try:
+                self.assertRaises(KeyError, transport.writeToChild, 13, bytes)
+            finally:
+                # Just get the process to exit so the test can complete
+                transport.write(bytes)
+        self._writeTest(write)
+
+
+    def test_spawnProcessEarlyIsReaped(self):
+        """
+        If, before the reactor is started with L{IReactorCore.run}, a
+        process is started with L{IReactorProcess.spawnProcess} and
+        terminates, the process is reaped once the reactor is started.
+        """
+        reactor = self.buildReactor()
+
+        # Create the process with no shared file descriptors, so that there
+        # are no other events for the reactor to notice and "cheat" with.
+        # We want to be sure it's really dealing with the process exiting,
+        # not some associated event.
+        if self.usePTY:
+            childFDs = None
+        else:
+            childFDs = {}
+
+        # Arrange to notice the SIGCHLD.
+        signaled = threading.Event()
+        def handler(*args):
+            signaled.set()
+        signal.signal(signal.SIGCHLD, handler)
+
+        # Start a process - before starting the reactor!
+        ended = Deferred()
+        reactor.spawnProcess(
+            _ShutdownCallbackProcessProtocol(ended), sys.executable,
+            [sys.executable, "-c", ""], usePTY=self.usePTY, childFDs=childFDs)
+
+        # Wait for the SIGCHLD (which might have been delivered before we got
+        # here, but that's okay because the signal handler was installed above,
+        # before we could have gotten it).
+        signaled.wait(120)
+        if not signaled.isSet():
+            self.fail("Timed out waiting for child process to exit.")
+
+        # Capture the processEnded callback.
+        result = []
+        ended.addCallback(result.append)
+
+        if result:
+            # The synchronous path through spawnProcess / Process.__init__ /
+            # registerReapProcessHandler was encountered.  There's no reason to
+            # start the reactor, because everything is done already.
+            return
+
+        # Otherwise, though, start the reactor so it can tell us the process
+        # exited.
+        ended.addCallback(lambda ignored: reactor.stop())
+        self.runReactor(reactor)
+
+        # Make sure the reactor stopped because the Deferred fired.
+        self.assertTrue(result)
+
+    if getattr(signal, 'SIGCHLD', None) is None:
+        test_spawnProcessEarlyIsReaped.skip = (
+            "Platform lacks SIGCHLD, early-spawnProcess test can't work.")
+
+
+    def test_processExitedWithSignal(self):
+        """
+        The C{reason} argument passed to L{IProcessProtocol.processExited} is a
+        L{ProcessTerminated} instance if the child process exits with a signal.
+        """
+        sigName = 'TERM'
+        sigNum = getattr(signal, 'SIG' + sigName)
+        exited = Deferred()
+        source = (
+            "import sys\n"
+            # Talk so the parent process knows the process is running.  This is
+            # necessary because ProcessProtocol.makeConnection may be called
+            # before this process is exec'd.  It would be unfortunate if we
+            # SIGTERM'd the Twisted process while it was on its way to doing
+            # the exec.
+            "sys.stdout.write('x')\n"
+            "sys.stdout.flush()\n"
+            "sys.stdin.read()\n")
+
+        class Exiter(ProcessProtocol):
+            def childDataReceived(self, fd, data):
+                msg('childDataReceived(%d, %r)' % (fd, data))
+                self.transport.signalProcess(sigName)
+
+            def childConnectionLost(self, fd):
+                msg('childConnectionLost(%d)' % (fd,))
+
+            def processExited(self, reason):
+                msg('processExited(%r)' % (reason,))
+                # Protect the Deferred from the failure so that it follows
+                # the callback chain.  This doesn't use the errback chain
+                # because it wants to make sure reason is a Failure.  An
+                # Exception would also make an errback-based test pass, and
+                # that would be wrong.
+                exited.callback([reason])
+
+            def processEnded(self, reason):
+                msg('processEnded(%r)' % (reason,))
+
+        reactor = self.buildReactor()
+        reactor.callWhenRunning(
+            reactor.spawnProcess, Exiter(), sys.executable,
+            [sys.executable, "-c", source], usePTY=self.usePTY)
+
+        def cbExited((failure,)):
+            # Trapping implicitly verifies that it's a Failure (rather than
+            # an exception) and explicitly makes sure it's the right type.
+            failure.trap(ProcessTerminated)
+            err = failure.value
+            if platform.isWindows():
+                # Windows can't really /have/ signals, so it certainly can't
+                # report them as the reason for termination.  Maybe there's
+                # something better we could be doing here, anyway?  Hard to
+                # say.  Anyway, this inconsistency between different platforms
+                # is extremely unfortunate and I would remove it if I
+                # could. -exarkun
+                self.assertIdentical(err.signal, None)
+                self.assertEqual(err.exitCode, 1)
+            else:
+                self.assertEqual(err.signal, sigNum)
+                self.assertIdentical(err.exitCode, None)
+
+        exited.addCallback(cbExited)
+        exited.addErrback(err)
+        exited.addCallback(lambda ign: reactor.stop())
+
+        self.runReactor(reactor)
+
+
+    def test_systemCallUninterruptedByChildExit(self):
+        """
+        If a child process exits while a system call is in progress, the system
+        call should not be interfered with.  In particular, it should not fail
+        with EINTR.
+
+        Older versions of Twisted installed a SIGCHLD handler on POSIX without
+        using the feature exposed by the SA_RESTART flag to sigaction(2).  The
+        most noticable problem this caused was for blocking reads and writes to
+        sometimes fail with EINTR.
+        """
+        reactor = self.buildReactor()
+        result = []
+
+        def f():
+            try:
+                f1 = os.popen('%s -c "import time; time.sleep(0.1)"' %
+                    (sys.executable,))
+                f2 = os.popen('%s -c "import time; time.sleep(0.5); print \'Foo\'"' %
+                    (sys.executable,))
+                # The read call below will blow up with an EINTR from the
+                # SIGCHLD from the first process exiting if we install a
+                # SIGCHLD handler without SA_RESTART.  (which we used to do)
+                result.append(f2.read())
+            finally:
+                reactor.stop()
+
+        reactor.callWhenRunning(f)
+        self.runReactor(reactor)
+        self.assertEqual(result, ["Foo\n"])
+
+
+    def test_openFileDescriptors(self):
+        """
+        A spawned process has only stdin, stdout and stderr open
+        (file descriptor 3 is also reported as open, because of the call to
+        'os.listdir()').
+        """
+        here = FilePath(__file__)
+        top = here.parent().parent().parent().parent()
+        source = (
+            "import sys",
+            "sys.path.insert(0, '%s')" % (top.path,),
+            "from twisted.internet import process",
+            "sys.stdout.write(str(process._listOpenFDs()))",
+            "sys.stdout.flush()")
+
+        def checkOutput(output):
+            self.assertEqual('[0, 1, 2, 3]', output)
+
+        reactor = self.buildReactor()
+
+        class Protocol(ProcessProtocol):
+            def __init__(self):
+                self.output = []
+
+            def outReceived(self, data):
+                self.output.append(data)
+
+            def processEnded(self, reason):
+                try:
+                    checkOutput("".join(self.output))
+                finally:
+                    reactor.stop()
+
+        proto = Protocol()
+        reactor.callWhenRunning(
+            reactor.spawnProcess, proto, sys.executable,
+            [sys.executable, "-Wignore", "-c", "\n".join(source)],
+            usePTY=self.usePTY)
+        self.runReactor(reactor)
+
+    if platformType != "posix":
+        test_openFileDescriptors.skip = "Test only applies to POSIX platforms"
+
+
+    def test_timelyProcessExited(self):
+        """
+        If a spawned process exits, C{processExited} will be called in a
+        timely manner.
+        """
+        reactor = self.buildReactor()
+
+        class ExitingProtocol(ProcessProtocol):
+            exited = False
+
+            def processExited(protoSelf, reason):
+                protoSelf.exited = True
+                reactor.stop()
+                self.assertEqual(reason.value.exitCode, 0)
+
+        protocol = ExitingProtocol()
+        reactor.callWhenRunning(
+            reactor.spawnProcess, protocol, sys.executable,
+            [sys.executable, "-c", "raise SystemExit(0)"],
+            usePTY=self.usePTY)
+
+        # This will timeout if processExited isn't called:
+        self.runReactor(reactor, timeout=30)
+        self.assertEqual(protocol.exited, True)
+
+
+
+class ProcessTestsBuilder(ProcessTestsBuilderBase):
+    """
+    Builder defining tests relating to L{IReactorProcess} for child processes
+    which do not have a PTY.
+    """
+    usePTY = False
+
+    keepStdioOpenProgram = FilePath(__file__).sibling('process_helper.py').path
+    if platform.isWindows():
+        keepStdioOpenArg = "windows"
+    else:
+        # Just a value that doesn't equal "windows"
+        keepStdioOpenArg = ""
+
+    # Define this test here because PTY-using processes only have stdin and
+    # stdout and the test would need to be different for that to work.
+    def test_childConnectionLost(self):
+        """
+        L{IProcessProtocol.childConnectionLost} is called each time a file
+        descriptor associated with a child process is closed.
+        """
+        connected = Deferred()
+        lost = {0: Deferred(), 1: Deferred(), 2: Deferred()}
+
+        class Closer(ProcessProtocol):
+            def makeConnection(self, transport):
+                connected.callback(transport)
+
+            def childConnectionLost(self, childFD):
+                lost[childFD].callback(None)
+
+        source = (
+            "import os, sys\n"
+            "while 1:\n"
+            "    line = sys.stdin.readline().strip()\n"
+            "    if not line:\n"
+            "        break\n"
+            "    os.close(int(line))\n")
+
+        reactor = self.buildReactor()
+        reactor.callWhenRunning(
+            reactor.spawnProcess, Closer(), sys.executable,
+            [sys.executable, "-c", source], usePTY=self.usePTY)
+
+        def cbConnected(transport):
+            transport.write('2\n')
+            return lost[2].addCallback(lambda ign: transport)
+        connected.addCallback(cbConnected)
+
+        def lostSecond(transport):
+            transport.write('1\n')
+            return lost[1].addCallback(lambda ign: transport)
+        connected.addCallback(lostSecond)
+
+        def lostFirst(transport):
+            transport.write('\n')
+        connected.addCallback(lostFirst)
+        connected.addErrback(err)
+
+        def cbEnded(ignored):
+            reactor.stop()
+        connected.addCallback(cbEnded)
+
+        self.runReactor(reactor)
+
+
+    # This test is here because PTYProcess never delivers childConnectionLost.
+    def test_processEnded(self):
+        """
+        L{IProcessProtocol.processEnded} is called after the child process
+        exits and L{IProcessProtocol.childConnectionLost} is called for each of
+        its file descriptors.
+        """
+        ended = Deferred()
+        lost = []
+
+        class Ender(ProcessProtocol):
+            def childDataReceived(self, fd, data):
+                msg('childDataReceived(%d, %r)' % (fd, data))
+                self.transport.loseConnection()
+
+            def childConnectionLost(self, childFD):
+                msg('childConnectionLost(%d)' % (childFD,))
+                lost.append(childFD)
+
+            def processExited(self, reason):
+                msg('processExited(%r)' % (reason,))
+
+            def processEnded(self, reason):
+                msg('processEnded(%r)' % (reason,))
+                ended.callback([reason])
+
+        reactor = self.buildReactor()
+        reactor.callWhenRunning(
+            reactor.spawnProcess, Ender(), sys.executable,
+            [sys.executable, self.keepStdioOpenProgram, "child",
+             self.keepStdioOpenArg],
+            usePTY=self.usePTY)
+
+        def cbEnded((failure,)):
+            failure.trap(ProcessDone)
+            self.assertEqual(set(lost), set([0, 1, 2]))
+        ended.addCallback(cbEnded)
+
+        ended.addErrback(err)
+        ended.addCallback(lambda ign: reactor.stop())
+
+        self.runReactor(reactor)
+
+
+    # This test is here because PTYProcess.loseConnection does not actually
+    # close the file descriptors to the child process.  This test needs to be
+    # written fairly differently for PTYProcess.
+    def test_processExited(self):
+        """
+        L{IProcessProtocol.processExited} is called when the child process
+        exits, even if file descriptors associated with the child are still
+        open.
+        """
+        exited = Deferred()
+        allLost = Deferred()
+        lost = []
+
+        class Waiter(ProcessProtocol):
+            def childDataReceived(self, fd, data):
+                msg('childDataReceived(%d, %r)' % (fd, data))
+
+            def childConnectionLost(self, childFD):
+                msg('childConnectionLost(%d)' % (childFD,))
+                lost.append(childFD)
+                if len(lost) == 3:
+                    allLost.callback(None)
+
+            def processExited(self, reason):
+                msg('processExited(%r)' % (reason,))
+                # See test_processExitedWithSignal
+                exited.callback([reason])
+                self.transport.loseConnection()
+
+        reactor = self.buildReactor()
+        reactor.callWhenRunning(
+            reactor.spawnProcess, Waiter(), sys.executable,
+            [sys.executable, self.keepStdioOpenProgram, "child",
+             self.keepStdioOpenArg],
+            usePTY=self.usePTY)
+
+        def cbExited((failure,)):
+            failure.trap(ProcessDone)
+            msg('cbExited; lost = %s' % (lost,))
+            self.assertEqual(lost, [])
+            return allLost
+        exited.addCallback(cbExited)
+
+        def cbAllLost(ignored):
+            self.assertEqual(set(lost), set([0, 1, 2]))
+        exited.addCallback(cbAllLost)
+
+        exited.addErrback(err)
+        exited.addCallback(lambda ign: reactor.stop())
+
+        self.runReactor(reactor)
+
+
+    def makeSourceFile(self, sourceLines):
+        """
+        Write the given list of lines to a text file and return the absolute
+        path to it.
+        """
+        script = self.mktemp()
+        scriptFile = file(script, 'wt')
+        scriptFile.write(os.linesep.join(sourceLines) + os.linesep)
+        scriptFile.close()
+        return os.path.abspath(script)
+
+
+    def test_shebang(self):
+        """
+        Spawning a process with an executable which is a script starting
+        with an interpreter definition line (#!) uses that interpreter to
+        evaluate the script.
+        """
+        SHEBANG_OUTPUT = 'this is the shebang output'
+
+        scriptFile = self.makeSourceFile([
+                "#!%s" % (sys.executable,),
+                "import sys",
+                "sys.stdout.write('%s')" % (SHEBANG_OUTPUT,),
+                "sys.stdout.flush()"])
+        os.chmod(scriptFile, 0700)
+
+        reactor = self.buildReactor()
+
+        def cbProcessExited((out, err, code)):
+            msg("cbProcessExited((%r, %r, %d))" % (out, err, code))
+            self.assertEqual(out, SHEBANG_OUTPUT)
+            self.assertEqual(err, "")
+            self.assertEqual(code, 0)
+
+        def shutdown(passthrough):
+            reactor.stop()
+            return passthrough
+
+        def start():
+            d = utils.getProcessOutputAndValue(scriptFile, reactor=reactor)
+            d.addBoth(shutdown)
+            d.addCallback(cbProcessExited)
+            d.addErrback(err)
+
+        reactor.callWhenRunning(start)
+        self.runReactor(reactor)
+
+
+    def test_processCommandLineArguments(self):
+        """
+        Arguments given to spawnProcess are passed to the child process as
+        originally intended.
+        """
+        source = (
+            # On Windows, stdout is not opened in binary mode by default,
+            # so newline characters are munged on writing, interfering with
+            # the tests.
+            'import sys, os\n'
+            'try:\n'
+            '  import msvcrt\n'
+            '  msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)\n'
+            'except ImportError:\n'
+            '  pass\n'
+            'for arg in sys.argv[1:]:\n'
+            '  sys.stdout.write(arg + chr(0))\n'
+            '  sys.stdout.flush()')
+
+        args = ['hello', '"', ' \t|<>^&', r'"\\"hello\\"', r'"foo\ bar baz\""']
+        # Ensure that all non-NUL characters can be passed too.
+        args.append(''.join(map(chr, xrange(1, 256))))
+
+        reactor = self.buildReactor()
+
+        def processFinished(output):
+            output = output.split('\0')
+            # Drop the trailing \0.
+            output.pop()
+            self.assertEqual(args, output)
+
+        def shutdown(result):
+            reactor.stop()
+            return result
+
+        def spawnChild():
+            d = succeed(None)
+            d.addCallback(lambda dummy: utils.getProcessOutput(
+                sys.executable, ['-c', source] + args, reactor=reactor))
+            d.addCallback(processFinished)
+            d.addBoth(shutdown)
+
+        reactor.callWhenRunning(spawnChild)
+        self.runReactor(reactor)
+globals().update(ProcessTestsBuilder.makeTestCaseClasses())
+
+
+
+class PTYProcessTestsBuilder(ProcessTestsBuilderBase):
+    """
+    Builder defining tests relating to L{IReactorProcess} for child processes
+    which have a PTY.
+    """
+    usePTY = True
+
+    if platform.isWindows():
+        skip = "PTYs are not supported on Windows."
+    elif platform.isMacOSX():
+        skippedReactors = {
+            "twisted.internet.pollreactor.PollReactor":
+                "OS X's poll() does not support PTYs"}
+globals().update(PTYProcessTestsBuilder.makeTestCaseClasses())
+
+
+
+class PotentialZombieWarningTests(TestCase):
+    """
+    Tests for L{twisted.internet.error.PotentialZombieWarning}.
+    """
+    def test_deprecated(self):
+        """
+        Accessing L{PotentialZombieWarning} via the
+        I{PotentialZombieWarning} attribute of L{twisted.internet.error}
+        results in a deprecation warning being emitted.
+        """
+        from twisted.internet import error
+        error.PotentialZombieWarning
+
+        warnings = self.flushWarnings([self.test_deprecated])
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            "twisted.internet.error.PotentialZombieWarning was deprecated in "
+            "Twisted 10.0.0: There is no longer any potential for zombie "
+            "process.")
+        self.assertEqual(len(warnings), 1)
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_protocol.py b/ThirdParty/Twisted/twisted/internet/test/test_protocol.py
new file mode 100644
index 0000000..8d7108b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_protocol.py
@@ -0,0 +1,457 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet.protocol}.
+"""
+
+from __future__ import division, absolute_import
+
+from zope.interface.verify import verifyObject
+from zope.interface import implementer
+
+from twisted.python.failure import Failure
+from twisted.internet.interfaces import (IProtocol, ILoggingContext,
+                                         IProtocolFactory, IConsumer)
+from twisted.internet.defer import CancelledError
+from twisted.internet.protocol import (
+    Protocol, ClientCreator, Factory, ProtocolToConsumerAdapter,
+    ConsumerToProtocolAdapter)
+from twisted.internet.task import Clock
+from twisted.trial.unittest import TestCase
+from twisted.test.proto_helpers import MemoryReactor, StringTransport
+
+
+
+class MemoryConnector:
+    _disconnected = False
+
+    def disconnect(self):
+        self._disconnected = True
+
+
+
+class MemoryReactorWithConnectorsAndTime(MemoryReactor, Clock):
+    """
+    An extension of L{MemoryReactor} which returns L{IConnector}
+    providers from its C{connectTCP} method.
+    """
+    def __init__(self):
+        MemoryReactor.__init__(self)
+        Clock.__init__(self)
+        self.connectors = []
+
+
+    def connectTCP(self, *a, **kw):
+        MemoryReactor.connectTCP(self, *a, **kw)
+        connector = MemoryConnector()
+        self.connectors.append(connector)
+        return connector
+
+
+    def connectUNIX(self, *a, **kw):
+        MemoryReactor.connectUNIX(self, *a, **kw)
+        connector = MemoryConnector()
+        self.connectors.append(connector)
+        return connector
+
+
+    def connectSSL(self, *a, **kw):
+        MemoryReactor.connectSSL(self, *a, **kw)
+        connector = MemoryConnector()
+        self.connectors.append(connector)
+        return connector
+
+
+
+class ClientCreatorTests(TestCase):
+    """
+    Tests for L{twisted.internet.protocol.ClientCreator}.
+    """
+    def _basicConnectTest(self, check):
+        """
+        Helper for implementing a test to verify that one of the I{connect}
+        methods of L{ClientCreator} passes the right arguments to the right
+        reactor method.
+
+        @param check: A function which will be invoked with a reactor and a
+            L{ClientCreator} instance and which should call one of the
+            L{ClientCreator}'s I{connect} methods and assert that all of its
+            arguments except for the factory are passed on as expected to the
+            reactor.  The factory should be returned.
+        """
+        class SomeProtocol(Protocol):
+            pass
+
+        reactor = MemoryReactorWithConnectorsAndTime()
+        cc = ClientCreator(reactor, SomeProtocol)
+        factory = check(reactor, cc)
+        protocol = factory.buildProtocol(None)
+        self.assertIsInstance(protocol, SomeProtocol)
+
+
+    def test_connectTCP(self):
+        """
+        L{ClientCreator.connectTCP} calls C{reactor.connectTCP} with the host
+        and port information passed to it, and with a factory which will
+        construct the protocol passed to L{ClientCreator.__init__}.
+        """
+        def check(reactor, cc):
+            cc.connectTCP('example.com', 1234, 4321, ('1.2.3.4', 9876))
+            host, port, factory, timeout, bindAddress = reactor.tcpClients.pop()
+            self.assertEqual(host, 'example.com')
+            self.assertEqual(port, 1234)
+            self.assertEqual(timeout, 4321)
+            self.assertEqual(bindAddress, ('1.2.3.4', 9876))
+            return factory
+        self._basicConnectTest(check)
+
+
+    def test_connectUNIX(self):
+        """
+        L{ClientCreator.connectUNIX} calls C{reactor.connectUNIX} with the
+        filename passed to it, and with a factory which will construct the
+        protocol passed to L{ClientCreator.__init__}.
+        """
+        def check(reactor, cc):
+            cc.connectUNIX('/foo/bar', 123, True)
+            address, factory, timeout, checkPID = reactor.unixClients.pop()
+            self.assertEqual(address, '/foo/bar')
+            self.assertEqual(timeout, 123)
+            self.assertEqual(checkPID, True)
+            return factory
+        self._basicConnectTest(check)
+
+
+    def test_connectSSL(self):
+        """
+        L{ClientCreator.connectSSL} calls C{reactor.connectSSL} with the host,
+        port, and context factory passed to it, and with a factory which will
+        construct the protocol passed to L{ClientCreator.__init__}.
+        """
+        def check(reactor, cc):
+            expectedContextFactory = object()
+            cc.connectSSL('example.com', 1234, expectedContextFactory, 4321, ('4.3.2.1', 5678))
+            host, port, factory, contextFactory, timeout, bindAddress = reactor.sslClients.pop()
+            self.assertEqual(host, 'example.com')
+            self.assertEqual(port, 1234)
+            self.assertIdentical(contextFactory, expectedContextFactory)
+            self.assertEqual(timeout, 4321)
+            self.assertEqual(bindAddress, ('4.3.2.1', 5678))
+            return factory
+        self._basicConnectTest(check)
+
+
+    def _cancelConnectTest(self, connect):
+        """
+        Helper for implementing a test to verify that cancellation of the
+        L{Deferred} returned by one of L{ClientCreator}'s I{connect} methods is
+        implemented to cancel the underlying connector.
+
+        @param connect: A function which will be invoked with a L{ClientCreator}
+            instance as an argument and which should call one its I{connect}
+            methods and return the result.
+
+        @return: A L{Deferred} which fires when the test is complete or fails if
+            there is a problem.
+        """
+        reactor = MemoryReactorWithConnectorsAndTime()
+        cc = ClientCreator(reactor, Protocol)
+        d = connect(cc)
+        connector = reactor.connectors.pop()
+        self.assertFalse(connector._disconnected)
+        d.cancel()
+        self.assertTrue(connector._disconnected)
+        return self.assertFailure(d, CancelledError)
+
+
+    def test_cancelConnectTCP(self):
+        """
+        The L{Deferred} returned by L{ClientCreator.connectTCP} can be cancelled
+        to abort the connection attempt before it completes.
+        """
+        def connect(cc):
+            return cc.connectTCP('example.com', 1234)
+        return self._cancelConnectTest(connect)
+
+
+    def test_cancelConnectUNIX(self):
+        """
+        The L{Deferred} returned by L{ClientCreator.connectTCP} can be cancelled
+        to abort the connection attempt before it completes.
+        """
+        def connect(cc):
+            return cc.connectUNIX('/foo/bar')
+        return self._cancelConnectTest(connect)
+
+
+    def test_cancelConnectSSL(self):
+        """
+        The L{Deferred} returned by L{ClientCreator.connectTCP} can be cancelled
+        to abort the connection attempt before it completes.
+        """
+        def connect(cc):
+            return cc.connectSSL('example.com', 1234, object())
+        return self._cancelConnectTest(connect)
+
+
+    def _cancelConnectTimeoutTest(self, connect):
+        """
+        Like L{_cancelConnectTest}, but for the case where the L{Deferred} is
+        cancelled after the connection is set up but before it is fired with the
+        resulting protocol instance.
+        """
+        reactor = MemoryReactorWithConnectorsAndTime()
+        cc = ClientCreator(reactor, Protocol)
+        d = connect(reactor, cc)
+        connector = reactor.connectors.pop()
+        # Sanity check - there is an outstanding delayed call to fire the
+        # Deferred.
+        self.assertEqual(len(reactor.getDelayedCalls()), 1)
+
+        # Cancel the Deferred, disconnecting the transport just set up and
+        # cancelling the delayed call.
+        d.cancel()
+
+        self.assertEqual(reactor.getDelayedCalls(), [])
+
+        # A real connector implementation is responsible for disconnecting the
+        # transport as well.  For our purposes, just check that someone told the
+        # connector to disconnect.
+        self.assertTrue(connector._disconnected)
+
+        return self.assertFailure(d, CancelledError)
+
+
+    def test_cancelConnectTCPTimeout(self):
+        """
+        L{ClientCreator.connectTCP} inserts a very short delayed call between
+        the time the connection is established and the time the L{Deferred}
+        returned from one of its connect methods actually fires.  If the
+        L{Deferred} is cancelled in this interval, the established connection is
+        closed, the timeout is cancelled, and the L{Deferred} fails with
+        L{CancelledError}.
+        """
+        def connect(reactor, cc):
+            d = cc.connectTCP('example.com', 1234)
+            host, port, factory, timeout, bindAddress = reactor.tcpClients.pop()
+            protocol = factory.buildProtocol(None)
+            transport = StringTransport()
+            protocol.makeConnection(transport)
+            return d
+        return self._cancelConnectTimeoutTest(connect)
+
+
+    def test_cancelConnectUNIXTimeout(self):
+        """
+        L{ClientCreator.connectUNIX} inserts a very short delayed call between
+        the time the connection is established and the time the L{Deferred}
+        returned from one of its connect methods actually fires.  If the
+        L{Deferred} is cancelled in this interval, the established connection is
+        closed, the timeout is cancelled, and the L{Deferred} fails with
+        L{CancelledError}.
+        """
+        def connect(reactor, cc):
+            d = cc.connectUNIX('/foo/bar')
+            address, factory, timeout, bindAddress = reactor.unixClients.pop()
+            protocol = factory.buildProtocol(None)
+            transport = StringTransport()
+            protocol.makeConnection(transport)
+            return d
+        return self._cancelConnectTimeoutTest(connect)
+
+
+    def test_cancelConnectSSLTimeout(self):
+        """
+        L{ClientCreator.connectSSL} inserts a very short delayed call between
+        the time the connection is established and the time the L{Deferred}
+        returned from one of its connect methods actually fires.  If the
+        L{Deferred} is cancelled in this interval, the established connection is
+        closed, the timeout is cancelled, and the L{Deferred} fails with
+        L{CancelledError}.
+        """
+        def connect(reactor, cc):
+            d = cc.connectSSL('example.com', 1234, object())
+            host, port, factory, contextFactory, timeout, bindADdress = reactor.sslClients.pop()
+            protocol = factory.buildProtocol(None)
+            transport = StringTransport()
+            protocol.makeConnection(transport)
+            return d
+        return self._cancelConnectTimeoutTest(connect)
+
+
+    def _cancelConnectFailedTimeoutTest(self, connect):
+        """
+        Like L{_cancelConnectTest}, but for the case where the L{Deferred} is
+        cancelled after the connection attempt has failed but before it is fired
+        with the resulting failure.
+        """
+        reactor = MemoryReactorWithConnectorsAndTime()
+        cc = ClientCreator(reactor, Protocol)
+        d, factory = connect(reactor, cc)
+        connector = reactor.connectors.pop()
+        factory.clientConnectionFailed(
+            connector, Failure(Exception("Simulated failure")))
+
+        # Sanity check - there is an outstanding delayed call to fire the
+        # Deferred.
+        self.assertEqual(len(reactor.getDelayedCalls()), 1)
+
+        # Cancel the Deferred, cancelling the delayed call.
+        d.cancel()
+
+        self.assertEqual(reactor.getDelayedCalls(), [])
+
+        return self.assertFailure(d, CancelledError)
+
+
+    def test_cancelConnectTCPFailedTimeout(self):
+        """
+        Similar to L{test_cancelConnectTCPTimeout}, but for the case where the
+        connection attempt fails.
+        """
+        def connect(reactor, cc):
+            d = cc.connectTCP('example.com', 1234)
+            host, port, factory, timeout, bindAddress = reactor.tcpClients.pop()
+            return d, factory
+        return self._cancelConnectFailedTimeoutTest(connect)
+
+
+    def test_cancelConnectUNIXFailedTimeout(self):
+        """
+        Similar to L{test_cancelConnectUNIXTimeout}, but for the case where the
+        connection attempt fails.
+        """
+        def connect(reactor, cc):
+            d = cc.connectUNIX('/foo/bar')
+            address, factory, timeout, bindAddress = reactor.unixClients.pop()
+            return d, factory
+        return self._cancelConnectFailedTimeoutTest(connect)
+
+
+    def test_cancelConnectSSLFailedTimeout(self):
+        """
+        Similar to L{test_cancelConnectSSLTimeout}, but for the case where the
+        connection attempt fails.
+        """
+        def connect(reactor, cc):
+            d = cc.connectSSL('example.com', 1234, object())
+            host, port, factory, contextFactory, timeout, bindADdress = reactor.sslClients.pop()
+            return d, factory
+        return self._cancelConnectFailedTimeoutTest(connect)
+
+
+
+class ProtocolTests(TestCase):
+    """
+    Tests for L{twisted.internet.protocol.Protocol}.
+    """
+    def test_interfaces(self):
+        """
+        L{Protocol} instances provide L{IProtocol} and L{ILoggingContext}.
+        """
+        proto = Protocol()
+        self.assertTrue(verifyObject(IProtocol, proto))
+        self.assertTrue(verifyObject(ILoggingContext, proto))
+
+
+    def test_logPrefix(self):
+        """
+        L{Protocol.logPrefix} returns the protocol class's name.
+        """
+        class SomeThing(Protocol):
+            pass
+        self.assertEqual("SomeThing", SomeThing().logPrefix())
+
+
+    def test_makeConnection(self):
+        """
+        L{Protocol.makeConnection} sets the given transport on itself, and
+        then calls C{connectionMade}.
+        """
+        result = []
+        class SomeProtocol(Protocol):
+            def connectionMade(self):
+                result.append(self.transport)
+
+        transport = object()
+        protocol = SomeProtocol()
+        protocol.makeConnection(transport)
+        self.assertEqual(result, [transport])
+
+
+
+class FactoryTests(TestCase):
+    """
+    Tests for L{protocol.Factory}.
+    """
+    def test_interfaces(self):
+        """
+        L{Factory} instances provide both L{IProtocolFactory} and
+        L{ILoggingContext}.
+        """
+        factory = Factory()
+        self.assertTrue(verifyObject(IProtocolFactory, factory))
+        self.assertTrue(verifyObject(ILoggingContext, factory))
+
+
+    def test_logPrefix(self):
+        """
+        L{Factory.logPrefix} returns the name of the factory class.
+        """
+        class SomeKindOfFactory(Factory):
+            pass
+
+        self.assertEqual("SomeKindOfFactory", SomeKindOfFactory().logPrefix())
+
+
+    def test_defaultBuildProtocol(self):
+        """
+        L{Factory.buildProtocol} by default constructs a protocol by
+        calling its C{protocol} attribute, and attaches the factory to the
+        result.
+        """
+        class SomeProtocol(Protocol):
+            pass
+        f = Factory()
+        f.protocol = SomeProtocol
+        protocol = f.buildProtocol(None)
+        self.assertIsInstance(protocol, SomeProtocol)
+        self.assertIdentical(protocol.factory, f)
+
+
+
+class AdapterTests(TestCase):
+    """
+    Tests for L{ProtocolToConsumerAdapter} and L{ConsumerToProtocolAdapter}.
+    """
+    def test_protocolToConsumer(self):
+        """
+        L{IProtocol} providers can be adapted to L{IConsumer} providers using
+        L{ProtocolToConsumerAdapter}.
+        """
+        result = []
+        p = Protocol()
+        p.dataReceived = result.append
+        consumer = IConsumer(p)
+        consumer.write(b"hello")
+        self.assertEqual(result, [b"hello"])
+        self.assertIsInstance(consumer, ProtocolToConsumerAdapter)
+
+
+    def test_consumerToProtocol(self):
+        """
+        L{IConsumer} providers can be adapted to L{IProtocol} providers using
+        L{ProtocolToConsumerAdapter}.
+        """
+        result = []
+        @implementer(IConsumer)
+        class Consumer(object):
+            def write(self, d):
+                result.append(d)
+
+        c = Consumer()
+        protocol = IProtocol(c)
+        protocol.dataReceived(b"hello")
+        self.assertEqual(result, [b"hello"])
+        self.assertIsInstance(protocol, ConsumerToProtocolAdapter)
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_qtreactor.py b/ThirdParty/Twisted/twisted/internet/test/test_qtreactor.py
new file mode 100644
index 0000000..e87b74f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_qtreactor.py
@@ -0,0 +1,35 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import sys
+
+from twisted.trial import unittest
+from twisted.python.runtime import platform
+from twisted.python.util import sibpath
+from twisted.internet.utils import getProcessOutputAndValue
+
+
+skipWindowsNopywin32 = None
+if platform.isWindows():
+    try:
+        import win32process
+    except ImportError:
+        skipWindowsNopywin32 = ("On windows, spawnProcess is not available "
+                                "in the absence of win32process.")
+
+class QtreactorTestCase(unittest.TestCase):
+    """
+    Tests for L{twisted.internet.qtreactor}.
+    """
+    def test_importQtreactor(self):
+        """
+        Attempting to import L{twisted.internet.qtreactor} should raise an
+        C{ImportError} indicating that C{qtreactor} is no longer a part of
+        Twisted.
+        """
+        sys.modules["qtreactor"] = None
+        from twisted.plugins.twisted_qtstub import errorMessage
+        try:
+            import twisted.internet.qtreactor
+        except ImportError, e:
+            self.assertEqual(str(e), errorMessage)
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_serialport.py b/ThirdParty/Twisted/twisted/internet/test/test_serialport.py
new file mode 100644
index 0000000..85b3f3a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_serialport.py
@@ -0,0 +1,72 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet.serialport}.
+"""
+
+from twisted.trial import unittest
+from twisted.python.failure import Failure
+from twisted.internet.protocol import Protocol
+from twisted.internet.error import ConnectionDone
+try:
+    from twisted.internet import serialport
+except ImportError:
+    serialport = None
+
+
+
+class DoNothing(object):
+    """
+    Object with methods that do nothing.
+    """
+
+    def __init__(self, *args, **kwargs):
+        pass
+
+
+    def __getattr__(self, attr):
+        return lambda *args, **kwargs: None
+
+
+
+class SerialPortTests(unittest.TestCase):
+    """
+    Minimal testing for Twisted's serial port support.
+
+    See ticket #2462 for the eventual full test suite.
+    """
+
+    if serialport is None:
+        skip = "Serial port support is not available."
+
+
+    def test_connectionMadeLost(self):
+        """
+        C{connectionMade} and C{connectionLost} are called on the protocol by
+        the C{SerialPort}.
+        """
+        # Serial port that doesn't actually connect to anything:
+        class DummySerialPort(serialport.SerialPort):
+            _serialFactory = DoNothing
+
+            def _finishPortSetup(self):
+                pass # override default win32 actions
+
+        events = []
+
+        class SerialProtocol(Protocol):
+            def connectionMade(self):
+                events.append("connectionMade")
+
+            def connectionLost(self, reason):
+                events.append(("connectionLost", reason))
+
+        # Creation of port should result in connectionMade call:
+        port = DummySerialPort(SerialProtocol(), "", reactor=DoNothing())
+        self.assertEqual(events, ["connectionMade"])
+
+        # Simulate reactor calling connectionLost on the SerialPort:
+        f = Failure(ConnectionDone())
+        port.connectionLost(f)
+        self.assertEqual(events, ["connectionMade", ("connectionLost", f)])
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_sigchld.py b/ThirdParty/Twisted/twisted/internet/test/test_sigchld.py
new file mode 100644
index 0000000..68dc286
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_sigchld.py
@@ -0,0 +1,125 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet._sigchld}, an alternate, superior SIGCHLD
+monitoring API.
+"""
+
+from __future__ import division, absolute_import
+
+import os, signal, errno
+
+from twisted.python.runtime import platformType
+from twisted.python.log import msg
+from twisted.trial.unittest import SynchronousTestCase
+if platformType == "posix":
+    from twisted.internet.fdesc import setNonBlocking
+    from twisted.internet._signals import installHandler, isDefaultHandler
+else:
+    skip = "These tests can only run on POSIX platforms."
+
+
+class SetWakeupSIGCHLDTests(SynchronousTestCase):
+    """
+    Tests for the L{signal.set_wakeup_fd} implementation of the
+    L{installHandler} and L{isDefaultHandler} APIs.
+    """
+
+    def pipe(self):
+        """
+        Create a non-blocking pipe which will be closed after the currently
+        running test.
+        """
+        read, write = os.pipe()
+        self.addCleanup(os.close, read)
+        self.addCleanup(os.close, write)
+        setNonBlocking(read)
+        setNonBlocking(write)
+        return read, write
+
+
+    def setUp(self):
+        """
+        Save the current SIGCHLD handler as reported by L{signal.signal} and
+        the current file descriptor registered with L{installHandler}.
+        """
+        handler = signal.getsignal(signal.SIGCHLD)
+        if handler != signal.SIG_DFL:
+            self.signalModuleHandler = handler
+            signal.signal(signal.SIGCHLD, signal.SIG_DFL)
+        else:
+            self.signalModuleHandler = None
+
+        self.oldFD = installHandler(-1)
+
+        if self.signalModuleHandler is not None and self.oldFD != -1:
+            msg("Previous test didn't clean up after its SIGCHLD setup: %r %r"
+                % (self.signalModuleHandler, self.oldFD))
+
+
+    def tearDown(self):
+        """
+        Restore whatever signal handler was present when setUp ran.
+        """
+        # If tests set up any kind of handlers, clear them out.
+        installHandler(-1)
+        signal.signal(signal.SIGCHLD, signal.SIG_DFL)
+
+        # Now restore whatever the setup was before the test ran.
+        if self.signalModuleHandler is not None:
+            signal.signal(signal.SIGCHLD, self.signalModuleHandler)
+        elif self.oldFD != -1:
+            installHandler(self.oldFD)
+
+
+    def test_isDefaultHandler(self):
+        """
+        L{isDefaultHandler} returns true if the SIGCHLD handler is SIG_DFL,
+        false otherwise.
+        """
+        self.assertTrue(isDefaultHandler())
+        signal.signal(signal.SIGCHLD, signal.SIG_IGN)
+        self.assertFalse(isDefaultHandler())
+        signal.signal(signal.SIGCHLD, signal.SIG_DFL)
+        self.assertTrue(isDefaultHandler())
+        signal.signal(signal.SIGCHLD, lambda *args: None)
+        self.assertFalse(isDefaultHandler())
+
+
+    def test_returnOldFD(self):
+        """
+        L{installHandler} returns the previously registered file descriptor.
+        """
+        read, write = self.pipe()
+        oldFD = installHandler(write)
+        self.assertEqual(installHandler(oldFD), write)
+
+
+    def test_uninstallHandler(self):
+        """
+        C{installHandler(-1)} removes the SIGCHLD handler completely.
+        """
+        read, write = self.pipe()
+        self.assertTrue(isDefaultHandler())
+        installHandler(write)
+        self.assertFalse(isDefaultHandler())
+        installHandler(-1)
+        self.assertTrue(isDefaultHandler())
+
+
+    def test_installHandler(self):
+        """
+        The file descriptor passed to L{installHandler} has a byte written to
+        it when SIGCHLD is delivered to the process.
+        """
+        read, write = self.pipe()
+        installHandler(write)
+
+        exc = self.assertRaises(OSError, os.read, read, 1)
+        self.assertEqual(exc.errno, errno.EAGAIN)
+
+        os.kill(os.getpid(), signal.SIGCHLD)
+
+        self.assertEqual(len(os.read(read, 5)), 1)
+
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_socket.py b/ThirdParty/Twisted/twisted/internet/test/test_socket.py
new file mode 100644
index 0000000..b31eb91
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_socket.py
@@ -0,0 +1,128 @@
+
+import errno, socket
+
+from twisted.python.log import err
+from twisted.internet.interfaces import IReactorSocket
+from twisted.internet.error import UnsupportedAddressFamily
+from twisted.internet.protocol import ServerFactory
+from twisted.internet.test.reactormixins import (
+    ReactorBuilder, needsRunningReactor)
+
+
+class AdoptStreamPortErrorsTestsBuilder(ReactorBuilder):
+    """
+    Builder for testing L{IReactorSocket.adoptStreamPort} implementations.
+
+    Generally only tests for failure cases are found here.  Success cases for
+    this interface are tested elsewhere.  For example, the success case for
+    I{AF_INET} is in L{twisted.internet.test.test_tcp}, since that case should
+    behave exactly the same as L{IReactorTCP.listenTCP}.
+    """
+    requiredInterfaces = [IReactorSocket]
+
+    def test_invalidDescriptor(self):
+        """
+        An implementation of L{IReactorSocket.adoptStreamPort} raises
+        L{socket.error} if passed an integer which is not associated with a
+        socket.
+        """
+        reactor = self.buildReactor()
+
+        probe = socket.socket()
+        fileno = probe.fileno()
+        probe.close()
+
+        exc = self.assertRaises(
+            socket.error,
+            reactor.adoptStreamPort, fileno, socket.AF_INET, ServerFactory())
+        self.assertEqual(exc.args[0], errno.EBADF)
+
+
+    def test_invalidAddressFamily(self):
+        """
+        An implementation of L{IReactorSocket.adoptStreamPort} raises
+        L{UnsupportedAddressFamily} if passed an address family it does not
+        support.
+        """
+        reactor = self.buildReactor()
+
+        port = socket.socket()
+        port.listen(1)
+        self.addCleanup(port.close)
+
+        arbitrary = 2 ** 16 + 7
+
+        self.assertRaises(
+            UnsupportedAddressFamily,
+            reactor.adoptStreamPort, port.fileno(), arbitrary, ServerFactory())
+
+
+    def test_stopOnlyCloses(self):
+        """
+        When the L{IListeningPort} returned by L{IReactorSocket.adoptStreamPort}
+        is stopped using C{stopListening}, the underlying socket is closed but
+        not shutdown.  This allows another process which still has a reference
+        to it to continue accepting connections over it.
+        """
+        reactor = self.buildReactor()
+
+        portSocket = socket.socket()
+        self.addCleanup(portSocket.close)
+
+        portSocket.listen(1)
+        portSocket.setblocking(False)
+
+        # The file descriptor is duplicated by adoptStreamPort
+        port = reactor.adoptStreamPort(
+            portSocket.fileno(), portSocket.family, ServerFactory())
+        d = port.stopListening()
+        def stopped(ignored):
+            # Should still be possible to accept a connection on portSocket.  If
+            # it was shutdown, the exception would be EINVAL instead.
+            exc = self.assertRaises(socket.error, portSocket.accept)
+            self.assertEqual(exc.args[0], errno.EAGAIN)
+        d.addCallback(stopped)
+        d.addErrback(err, "Failed to accept on original port.")
+
+        needsRunningReactor(
+            reactor,
+            lambda: d.addCallback(lambda ignored: reactor.stop()))
+
+        reactor.run()
+
+
+
+class AdoptStreamConnectionErrorsTestsBuilder(ReactorBuilder):
+    """
+    Builder for testing L{IReactorSocket.adoptStreamConnection}
+    implementations.
+
+    Generally only tests for failure cases are found here.  Success cases for
+    this interface are tested elsewhere.  For example, the success case for
+    I{AF_INET} is in L{twisted.internet.test.test_tcp}, since that case should
+    behave exactly the same as L{IReactorTCP.listenTCP}.
+    """
+    requiredInterfaces = [IReactorSocket]
+
+    def test_invalidAddressFamily(self):
+        """
+        An implementation of L{IReactorSocket.adoptStreamConnection} raises
+        L{UnsupportedAddressFamily} if passed an address family it does not
+        support.
+        """
+        reactor = self.buildReactor()
+
+        connection = socket.socket()
+        self.addCleanup(connection.close)
+
+        arbitrary = 2 ** 16 + 7
+
+        self.assertRaises(
+            UnsupportedAddressFamily,
+            reactor.adoptStreamConnection, connection.fileno(), arbitrary,
+            ServerFactory())
+
+
+
+globals().update(AdoptStreamPortErrorsTestsBuilder.makeTestCaseClasses())
+globals().update(AdoptStreamConnectionErrorsTestsBuilder.makeTestCaseClasses())
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_stdio.py b/ThirdParty/Twisted/twisted/internet/test/test_stdio.py
new file mode 100644
index 0000000..4163e41
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_stdio.py
@@ -0,0 +1,195 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet.stdio}.
+"""
+
+from twisted.python.runtime import platform
+from twisted.internet.test.reactormixins import ReactorBuilder
+from twisted.internet.protocol import Protocol
+if not platform.isWindows():
+    from twisted.internet._posixstdio import StandardIO
+
+
+
+class StdioFilesTests(ReactorBuilder):
+    """
+    L{StandardIO} supports reading and writing to filesystem files.
+    """
+
+    def setUp(self):
+        path = self.mktemp()
+        file(path, "w").close()
+        self.extraFile = file(path, "r+")
+
+
+    def test_addReader(self):
+        """
+        Adding a filesystem file reader to a reactor will make sure it is
+        polled.
+        """
+        reactor = self.buildReactor()
+
+        class DataProtocol(Protocol):
+            data = ""
+            def dataReceived(self, data):
+                self.data += data
+                # It'd be better to stop reactor on connectionLost, but that
+                # fails on FreeBSD, probably due to
+                # http://bugs.python.org/issue9591:
+                if self.data == "hello!":
+                    reactor.stop()
+
+        path = self.mktemp()
+        f = file(path, "w")
+        f.write("hello!")
+        f.close()
+        f = file(path, "r")
+
+        # Read bytes from a file, deliver them to a protocol instance:
+        protocol = DataProtocol()
+        StandardIO(protocol, stdin=f.fileno(),
+                   stdout=self.extraFile.fileno(),
+                   reactor=reactor)
+
+        self.runReactor(reactor)
+        self.assertEqual(protocol.data, "hello!")
+
+
+    def test_addWriter(self):
+        """
+        Adding a filesystem file writer to a reactor will make sure it is
+        polled.
+        """
+        reactor = self.buildReactor()
+
+        class DisconnectProtocol(Protocol):
+            def connectionLost(self, reason):
+                reactor.stop()
+
+        path = self.mktemp()
+        f = file(path, "w")
+
+        # Write bytes to a transport, hopefully have them written to a file:
+        protocol = DisconnectProtocol()
+        StandardIO(protocol, stdout=f.fileno(),
+                   stdin=self.extraFile.fileno(), reactor=reactor)
+        protocol.transport.write("hello")
+        protocol.transport.write(", world")
+        protocol.transport.loseConnection()
+
+        self.runReactor(reactor)
+        f.close()
+        f = file(path, "r")
+        self.assertEqual(f.read(), "hello, world")
+        f.close()
+
+
+    def test_removeReader(self):
+        """
+        Removing a filesystem file reader from a reactor will make sure it is
+        no longer polled.
+        """
+        reactor = self.buildReactor()
+        self.addCleanup(self.unbuildReactor, reactor)
+
+        path = self.mktemp()
+        file(path, "w").close()
+        # Cleanup might fail if file is GCed too soon:
+        self.f = f = file(path, "r")
+
+        # Have the reader added:
+        stdio = StandardIO(Protocol(), stdin=f.fileno(),
+                           stdout=self.extraFile.fileno(),
+                           reactor=reactor)
+        self.assertIn(stdio._reader, reactor.getReaders())
+        stdio._reader.stopReading()
+        self.assertNotIn(stdio._reader, reactor.getReaders())
+
+
+    def test_removeWriter(self):
+        """
+        Removing a filesystem file writer from a reactor will make sure it is
+        no longer polled.
+        """
+        reactor = self.buildReactor()
+        self.addCleanup(self.unbuildReactor, reactor)
+
+        # Cleanup might fail if file is GCed too soon:
+        self.f = f = file(self.mktemp(), "w")
+
+        # Have the reader added:
+        protocol = Protocol()
+        stdio = StandardIO(protocol, stdout=f.fileno(),
+                           stdin=self.extraFile.fileno(),
+                           reactor=reactor)
+        protocol.transport.write("hello")
+        self.assertIn(stdio._writer, reactor.getWriters())
+        stdio._writer.stopWriting()
+        self.assertNotIn(stdio._writer, reactor.getWriters())
+
+
+    def test_removeAll(self):
+        """
+        Calling C{removeAll} on a reactor includes descriptors that are
+        filesystem files.
+        """
+        reactor = self.buildReactor()
+        self.addCleanup(self.unbuildReactor, reactor)
+
+        path = self.mktemp()
+        file(path, "w").close()
+        # Cleanup might fail if file is GCed too soon:
+        self.f = f = file(path, "r")
+
+        # Have the reader added:
+        stdio = StandardIO(Protocol(), stdin=f.fileno(),
+                           stdout=self.extraFile.fileno(), reactor=reactor)
+        # And then removed:
+        removed = reactor.removeAll()
+        self.assertIn(stdio._reader, removed)
+        self.assertNotIn(stdio._reader, reactor.getReaders())
+
+
+    def test_getReaders(self):
+        """
+        C{reactor.getReaders} includes descriptors that are filesystem files.
+        """
+        reactor = self.buildReactor()
+        self.addCleanup(self.unbuildReactor, reactor)
+
+        path = self.mktemp()
+        file(path, "w").close()
+        # Cleanup might fail if file is GCed too soon:
+        self.f = f = file(path, "r")
+
+        # Have the reader added:
+        stdio = StandardIO(Protocol(), stdin=f.fileno(),
+                           stdout=self.extraFile.fileno(), reactor=reactor)
+        self.assertIn(stdio._reader, reactor.getReaders())
+
+
+    def test_getWriters(self):
+        """
+        C{reactor.getWriters} includes descriptors that are filesystem files.
+        """
+        reactor = self.buildReactor()
+        self.addCleanup(self.unbuildReactor, reactor)
+
+        # Cleanup might fail if file is GCed too soon:
+        self.f = f = file(self.mktemp(), "w")
+
+        # Have the reader added:
+        stdio = StandardIO(Protocol(), stdout=f.fileno(),
+                           stdin=self.extraFile.fileno(), reactor=reactor)
+        self.assertNotIn(stdio._writer, reactor.getWriters())
+        stdio._writer.startWriting()
+        self.assertIn(stdio._writer, reactor.getWriters())
+
+    if platform.isWindows():
+        skip = ("StandardIO does not accept stdout as an argument to Windows.  "
+                "Testing redirection to a file is therefore harder.")
+
+
+globals().update(StdioFilesTests.makeTestCaseClasses())
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_tcp.py b/ThirdParty/Twisted/twisted/internet/test/test_tcp.py
new file mode 100644
index 0000000..f73e721
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_tcp.py
@@ -0,0 +1,2092 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for implementations of L{IReactorTCP} and the TCP parts of
+L{IReactorSocket}.
+"""
+
+from __future__ import division, absolute_import
+
+__metaclass__ = type
+
+import socket, errno
+
+from zope.interface import implementer
+
+from twisted.python.compat import _PY3
+from twisted.python.runtime import platform
+from twisted.python.failure import Failure
+from twisted.python import log
+
+from twisted.trial.unittest import SkipTest, TestCase
+from twisted.internet.test.reactormixins import ReactorBuilder
+from twisted.internet.error import (
+    ConnectionLost, UserError, ConnectionRefusedError, ConnectionDone,
+    ConnectionAborted)
+from twisted.internet.interfaces import (
+    ILoggingContext, IConnector, IReactorFDSet, IReactorSocket, IReactorTCP)
+from twisted.internet.address import IPv4Address, IPv6Address
+from twisted.internet.defer import (
+    Deferred, DeferredList, maybeDeferred, gatherResults)
+from twisted.internet._endpointspy3 import (
+    TCP4ServerEndpoint, TCP4ClientEndpoint)
+from twisted.internet.protocol import ServerFactory, ClientFactory, Protocol
+from twisted.internet.interfaces import (
+    IPushProducer, IPullProducer, IHalfCloseableProtocol)
+from twisted.internet.tcp import Connection, Server, _resolveIPv6
+
+from twisted.internet.test.connectionmixins import (
+    LogObserverMixin, ConnectionTestsMixin, TCPClientTestsMixin, findFreePort,
+    ConnectableProtocol, EndpointCreator, runProtocolsWithReactor)
+from twisted.internet.test.test_core import ObjectModelIntegrationMixin
+from twisted.test.test_tcp import MyClientFactory, MyServerFactory
+from twisted.test.test_tcp import ClosingFactory, ClientStartStopFactory
+
+try:
+    from OpenSSL import SSL
+except ImportError:
+    useSSL = False
+else:
+    from twisted.internet.ssl import ClientContextFactory
+    useSSL = True
+
+try:
+    socket.socket(socket.AF_INET6, socket.SOCK_STREAM).close()
+except socket.error as e:
+    ipv6Skip = str(e)
+else:
+    ipv6Skip = None
+
+
+
+if platform.isWindows():
+    from twisted.internet.test import _win32ifaces
+    getLinkLocalIPv6Addresses = _win32ifaces.win32GetLinkLocalIPv6Addresses
+else:
+    try:
+        from twisted.internet.test import _posixifaces
+    except ImportError:
+        getLinkLocalIPv6Addresses = lambda: []
+    else:
+        getLinkLocalIPv6Addresses = _posixifaces.posixGetLinkLocalIPv6Addresses
+
+
+
+def getLinkLocalIPv6Address():
+    """
+    Find and return a configured link local IPv6 address including a scope
+    identifier using the % separation syntax.  If the system has no link local
+    IPv6 addresses, raise L{SkipTest} instead.
+
+    @raise SkipTest: if no link local address can be found or if the
+        C{netifaces} module is not available.
+
+    @return: a C{str} giving the address
+    """
+    addresses = getLinkLocalIPv6Addresses()
+    if addresses:
+        return addresses[0]
+    raise SkipTest("Link local IPv6 address unavailable")
+
+
+
+def connect(client, destination):
+    """
+    Connect a socket to the given destination.
+
+    @param client: A C{socket.socket}.
+
+    @param destination: A tuple of (host, port). The host is a C{str}, the
+        port a C{int}. If the C{host} is an IPv6 IP, the address is resolved
+        using C{getaddrinfo} and the first version found is used.
+    """
+    (host, port) = destination
+    if '%' in host or ':' in host:
+        address = socket.getaddrinfo(host, port)[0][4]
+    else:
+        address = (host, port)
+    client.connect(address)
+
+
+
+class FakeSocket(object):
+    """
+    A fake for L{socket.socket} objects.
+
+    @ivar data: A C{str} giving the data which will be returned from
+        L{FakeSocket.recv}.
+
+    @ivar sendBuffer: A C{list} of the objects passed to L{FakeSocket.send}.
+    """
+    def __init__(self, data):
+        self.data = data
+        self.sendBuffer = []
+
+    def setblocking(self, blocking):
+        self.blocking = blocking
+
+    def recv(self, size):
+        return self.data
+
+    def send(self, bytes):
+        """
+        I{Send} all of C{bytes} by accumulating it into C{self.sendBuffer}.
+
+        @return: The length of C{bytes}, indicating all the data has been
+            accepted.
+        """
+        self.sendBuffer.append(bytes)
+        return len(bytes)
+
+
+    def shutdown(self, how):
+        """
+        Shutdown is not implemented.  The method is provided since real sockets
+        have it and some code expects it.  No behavior of L{FakeSocket} is
+        affected by a call to it.
+        """
+
+
+    def close(self):
+        """
+        Close is not implemented.  The method is provided since real sockets
+        have it and some code expects it.  No behavior of L{FakeSocket} is
+        affected by a call to it.
+        """
+
+
+    def setsockopt(self, *args):
+        """
+        Setsockopt is not implemented.  The method is provided since
+        real sockets have it and some code expects it.  No behavior of
+        L{FakeSocket} is affected by a call to it.
+        """
+
+
+    def fileno(self):
+        """
+        Return a fake file descriptor.  If actually used, this will have no
+        connection to this L{FakeSocket} and will probably cause surprising
+        results.
+        """
+        return 1
+
+
+
+class TestFakeSocket(TestCase):
+    """
+    Test that the FakeSocket can be used by the doRead method of L{Connection}
+    """
+
+    def test_blocking(self):
+        skt = FakeSocket(b"someData")
+        skt.setblocking(0)
+        self.assertEqual(skt.blocking, 0)
+
+
+    def test_recv(self):
+        skt = FakeSocket(b"someData")
+        self.assertEqual(skt.recv(10), b"someData")
+
+
+    def test_send(self):
+        """
+        L{FakeSocket.send} accepts the entire string passed to it, adds it to
+        its send buffer, and returns its length.
+        """
+        skt = FakeSocket(b"")
+        count = skt.send(b"foo")
+        self.assertEqual(count, 3)
+        self.assertEqual(skt.sendBuffer, [b"foo"])
+
+
+
+class FakeProtocol(Protocol):
+    """
+    An L{IProtocol} that returns a value from its dataReceived method.
+    """
+    def dataReceived(self, data):
+        """
+        Return something other than C{None} to trigger a deprecation warning for
+        that behavior.
+        """
+        return ()
+
+
+
+ at implementer(IReactorFDSet)
+class _FakeFDSetReactor(object):
+    """
+    A no-op implementation of L{IReactorFDSet}, which ignores all adds and
+    removes.
+    """
+
+    addReader = addWriter = removeReader = removeWriter = (
+        lambda self, desc: None)
+
+
+
+class TCPServerTests(TestCase):
+    """
+    Whitebox tests for L{twisted.internet.tcp.Server}.
+    """
+    def setUp(self):
+        self.reactor = _FakeFDSetReactor()
+        class FakePort(object):
+            _realPortNumber = 3
+        self.skt = FakeSocket(b"")
+        self.protocol = Protocol()
+        self.server = Server(
+            self.skt, self.protocol, ("", 0), FakePort(), None, self.reactor)
+
+
+    def test_writeAfterDisconnect(self):
+        """
+        L{Server.write} discards bytes passed to it if called after it has lost
+        its connection.
+        """
+        self.server.connectionLost(
+            Failure(Exception("Simulated lost connection")))
+        self.server.write(b"hello world")
+        self.assertEqual(self.skt.sendBuffer, [])
+
+
+    def test_writeAfteDisconnectAfterTLS(self):
+        """
+        L{Server.write} discards bytes passed to it if called after it has lost
+        its connection when the connection had started TLS.
+        """
+        self.server.TLS = True
+        self.test_writeAfterDisconnect()
+
+
+    def test_writeSequenceAfterDisconnect(self):
+        """
+        L{Server.writeSequence} discards bytes passed to it if called after it
+        has lost its connection.
+        """
+        self.server.connectionLost(
+            Failure(Exception("Simulated lost connection")))
+        self.server.writeSequence([b"hello world"])
+        self.assertEqual(self.skt.sendBuffer, [])
+
+
+    def test_writeSequenceAfteDisconnectAfterTLS(self):
+        """
+        L{Server.writeSequence} discards bytes passed to it if called after it
+        has lost its connection when the connection had started TLS.
+        """
+        self.server.TLS = True
+        self.test_writeSequenceAfterDisconnect()
+
+
+
+class TCPConnectionTests(TestCase):
+    """
+    Whitebox tests for L{twisted.internet.tcp.Connection}.
+    """
+    def test_doReadWarningIsRaised(self):
+        """
+        When an L{IProtocol} implementation that returns a value from its
+        C{dataReceived} method, a deprecated warning is emitted.
+        """
+        skt = FakeSocket(b"someData")
+        protocol = FakeProtocol()
+        conn = Connection(skt, protocol)
+        conn.doRead()
+        warnings = self.flushWarnings([FakeProtocol.dataReceived])
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]["message"],
+            "Returning a value other than None from "
+            "twisted.internet.test.test_tcp.FakeProtocol.dataReceived "
+            "is deprecated since Twisted 11.0.0.")
+        self.assertEqual(len(warnings), 1)
+
+
+    def test_noTLSBeforeStartTLS(self):
+        """
+        The C{TLS} attribute of a L{Connection} instance is C{False} before
+        L{Connection.startTLS} is called.
+        """
+        skt = FakeSocket(b"")
+        protocol = FakeProtocol()
+        conn = Connection(skt, protocol)
+        self.assertFalse(conn.TLS)
+
+
+    def test_tlsAfterStartTLS(self):
+        """
+        The C{TLS} attribute of a L{Connection} instance is C{True} after
+        L{Connection.startTLS} is called.
+        """
+        skt = FakeSocket(b"")
+        protocol = FakeProtocol()
+        conn = Connection(skt, protocol, reactor=_FakeFDSetReactor())
+        conn._tlsClientDefault = True
+        conn.startTLS(ClientContextFactory(), True)
+        self.assertTrue(conn.TLS)
+    if not useSSL:
+        test_tlsAfterStartTLS.skip = "No SSL support available"
+
+
+
+class TCPCreator(EndpointCreator):
+    """
+    Create IPv4 TCP endpoints for L{runProtocolsWithReactor}-based tests.
+    """
+
+    interface = "127.0.0.1"
+
+    def server(self, reactor):
+        """
+        Create a server-side TCP endpoint.
+        """
+        return TCP4ServerEndpoint(reactor, 0, interface=self.interface)
+
+
+    def client(self, reactor, serverAddress):
+        """
+        Create a client end point that will connect to the given address.
+
+        @type serverAddress: L{IPv4Address}
+        """
+        return TCP4ClientEndpoint(reactor, self.interface, serverAddress.port)
+
+
+
+class TCP6Creator(TCPCreator):
+    """
+    Create IPv6 TCP endpoints for
+    C{ReactorBuilder.runProtocolsWithReactor}-based tests.
+
+    The endpoint types in question here are still the TCP4 variety, since
+    these simply pass through IPv6 address literals to the reactor, and we are
+    only testing address literals, not name resolution (as name resolution has
+    not yet been implemented).  See http://twistedmatrix.com/trac/ticket/4470
+    for more specific information about new endpoint classes.  The naming is
+    slightly misleading, but presumably if you're passing an IPv6 literal, you
+    know what you're asking for.
+    """
+    def __init__(self):
+        self.interface = getLinkLocalIPv6Address()
+
+
+
+class TCPClientTestsBase(ReactorBuilder, ConnectionTestsMixin,
+                         TCPClientTestsMixin):
+    """
+    Base class for builders defining tests related to L{IReactorTCP.connectTCP}.
+    """
+    requiredInterfaces = (IReactorTCP,)
+
+    port = 1234
+
+    @property
+    def interface(self):
+        """
+        Return the interface attribute from the endpoints object.
+        """
+        return self.endpoints.interface
+
+
+
+class TCP4ClientTestsBuilder(TCPClientTestsBase):
+    """
+    Builder configured with IPv4 parameters for tests related to
+    L{IReactorTCP.connectTCP}.
+    """
+    fakeDomainName = 'some-fake.domain.example.com'
+    family = socket.AF_INET
+    addressClass = IPv4Address
+
+    endpoints = TCPCreator()
+
+
+
+class TCP6ClientTestsBuilder(TCPClientTestsBase):
+    """
+    Builder configured with IPv6 parameters for tests related to
+    L{IReactorTCP.connectTCP}.
+    """
+    if ipv6Skip:
+        skip = ipv6Skip
+
+    family = socket.AF_INET6
+    addressClass = IPv6Address
+
+    def setUp(self):
+        # Only create this object here, so that it won't be created if tests
+        # are being skipped:
+        self.endpoints = TCP6Creator()
+        # This is used by test_addresses to test the distinction between the
+        # resolved name and the name on the socket itself.  All the same
+        # invariants should hold, but giving back an IPv6 address from a
+        # resolver is not something the reactor can handle, so instead, we make
+        # it so that the connect call for the IPv6 address test simply uses an
+        # address literal.
+        self.fakeDomainName = self.endpoints.interface
+
+
+
+class TCPConnectorTestsBuilder(ReactorBuilder):
+    """
+    Tests for the L{IConnector} provider returned by L{IReactorTCP.connectTCP}.
+    """
+    requiredInterfaces = (IReactorTCP,)
+
+    def test_connectorIdentity(self):
+        """
+        L{IReactorTCP.connectTCP} returns an object which provides
+        L{IConnector}.  The destination of the connector is the address which
+        was passed to C{connectTCP}.  The same connector object is passed to
+        the factory's C{startedConnecting} method as to the factory's
+        C{clientConnectionLost} method.
+        """
+        serverFactory = ClosingFactory()
+        reactor = self.buildReactor()
+        tcpPort = reactor.listenTCP(0, serverFactory, interface=self.interface)
+        serverFactory.port = tcpPort
+        portNumber = tcpPort.getHost().port
+
+        seenConnectors = []
+        seenFailures = []
+
+        clientFactory = ClientStartStopFactory()
+        clientFactory.clientConnectionLost = (
+            lambda connector, reason: (seenConnectors.append(connector),
+                                       seenFailures.append(reason)))
+        clientFactory.startedConnecting = seenConnectors.append
+
+        connector = reactor.connectTCP(self.interface, portNumber,
+                                       clientFactory)
+        self.assertTrue(IConnector.providedBy(connector))
+        dest = connector.getDestination()
+        self.assertEqual(dest.type, "TCP")
+        self.assertEqual(dest.host, self.interface)
+        self.assertEqual(dest.port, portNumber)
+
+        clientFactory.whenStopped.addBoth(lambda _: reactor.stop())
+
+        self.runReactor(reactor)
+
+        seenFailures[0].trap(ConnectionDone)
+        self.assertEqual(seenConnectors, [connector, connector])
+
+
+    def test_userFail(self):
+        """
+        Calling L{IConnector.stopConnecting} in C{Factory.startedConnecting}
+        results in C{Factory.clientConnectionFailed} being called with
+        L{error.UserError} as the reason.
+        """
+        serverFactory = MyServerFactory()
+        reactor = self.buildReactor()
+        tcpPort = reactor.listenTCP(0, serverFactory, interface=self.interface)
+        portNumber = tcpPort.getHost().port
+
+        fatalErrors = []
+
+        def startedConnecting(connector):
+            try:
+                connector.stopConnecting()
+            except Exception:
+                fatalErrors.append(Failure())
+                reactor.stop()
+
+        clientFactory = ClientStartStopFactory()
+        clientFactory.startedConnecting = startedConnecting
+
+        clientFactory.whenStopped.addBoth(lambda _: reactor.stop())
+
+        reactor.callWhenRunning(lambda: reactor.connectTCP(self.interface,
+                                                           portNumber,
+                                                           clientFactory))
+
+        self.runReactor(reactor)
+
+        if fatalErrors:
+            self.fail(fatalErrors[0].getTraceback())
+        clientFactory.reason.trap(UserError)
+        self.assertEqual(clientFactory.failed, 1)
+
+
+    def test_reconnect(self):
+        """
+        Calling L{IConnector.connect} in C{Factory.clientConnectionLost} causes
+        a new connection attempt to be made.
+        """
+        serverFactory = ClosingFactory()
+        reactor = self.buildReactor()
+        tcpPort = reactor.listenTCP(0, serverFactory, interface=self.interface)
+        serverFactory.port = tcpPort
+        portNumber = tcpPort.getHost().port
+
+        clientFactory = MyClientFactory()
+
+        def clientConnectionLost(connector, reason):
+            connector.connect()
+        clientFactory.clientConnectionLost = clientConnectionLost
+        reactor.connectTCP(self.interface, portNumber, clientFactory)
+
+        protocolMadeAndClosed = []
+        def reconnectFailed(ignored):
+            p = clientFactory.protocol
+            protocolMadeAndClosed.append((p.made, p.closed))
+            reactor.stop()
+
+        clientFactory.failDeferred.addCallback(reconnectFailed)
+
+        self.runReactor(reactor)
+
+        clientFactory.reason.trap(ConnectionRefusedError)
+        self.assertEqual(protocolMadeAndClosed, [(1, 1)])
+
+
+
+class TCP4ConnectorTestsBuilder(TCPConnectorTestsBuilder):
+    interface = '127.0.0.1'
+    family = socket.AF_INET
+    addressClass = IPv4Address
+
+
+
+class TCP6ConnectorTestsBuilder(TCPConnectorTestsBuilder):
+    family = socket.AF_INET6
+    addressClass = IPv6Address
+
+    if ipv6Skip:
+        skip = ipv6Skip
+
+    def setUp(self):
+        self.interface = getLinkLocalIPv6Address()
+
+
+
+def createTestSocket(test, addressFamily, socketType):
+    """
+    Create a socket for the duration of the given test.
+
+    @param test: the test to add cleanup to.
+
+    @param addressFamily: an C{AF_*} constant
+
+    @param socketType: a C{SOCK_*} constant.
+
+    @return: a socket object.
+    """
+    skt = socket.socket(addressFamily, socketType)
+    test.addCleanup(skt.close)
+    return skt
+
+
+
+class StreamTransportTestsMixin(LogObserverMixin):
+    """
+    Mixin defining tests which apply to any port/connection based transport.
+    """
+    def test_startedListeningLogMessage(self):
+        """
+        When a port starts, a message including a description of the associated
+        factory is logged.
+        """
+        loggedMessages = self.observe()
+        reactor = self.buildReactor()
+
+        @implementer(ILoggingContext)
+        class SomeFactory(ServerFactory):
+            def logPrefix(self):
+                return "Crazy Factory"
+
+        factory = SomeFactory()
+        p = self.getListeningPort(reactor, factory)
+        expectedMessage = self.getExpectedStartListeningLogMessage(
+            p, "Crazy Factory")
+        self.assertEqual((expectedMessage,), loggedMessages[0]['message'])
+
+
+    def test_connectionLostLogMsg(self):
+        """
+        When a connection is lost, an informative message should be logged
+        (see L{getExpectedConnectionLostLogMsg}): an address identifying
+        the port and the fact that it was closed.
+        """
+
+        loggedMessages = []
+        def logConnectionLostMsg(eventDict):
+            loggedMessages.append(log.textFromEventDict(eventDict))
+
+        reactor = self.buildReactor()
+        p = self.getListeningPort(reactor, ServerFactory())
+        expectedMessage = self.getExpectedConnectionLostLogMsg(p)
+        log.addObserver(logConnectionLostMsg)
+
+        def stopReactor(ignored):
+            log.removeObserver(logConnectionLostMsg)
+            reactor.stop()
+
+        def doStopListening():
+            log.addObserver(logConnectionLostMsg)
+            maybeDeferred(p.stopListening).addCallback(stopReactor)
+
+        reactor.callWhenRunning(doStopListening)
+        reactor.run()
+
+        self.assertIn(expectedMessage, loggedMessages)
+
+
+    def test_allNewStyle(self):
+        """
+        The L{IListeningPort} object is an instance of a class with no
+        classic classes in its hierarchy.
+        """
+        reactor = self.buildReactor()
+        port = self.getListeningPort(reactor, ServerFactory())
+        self.assertFullyNewStyle(port)
+
+
+class ListenTCPMixin(object):
+    """
+    Mixin which uses L{IReactorTCP.listenTCP} to hand out listening TCP ports.
+    """
+    def getListeningPort(self, reactor, factory, port=0, interface=''):
+        """
+        Get a TCP port from a reactor.
+        """
+        return reactor.listenTCP(port, factory, interface=interface)
+
+
+
+class SocketTCPMixin(object):
+    """
+    Mixin which uses L{IReactorSocket.adoptStreamPort} to hand out listening TCP
+    ports.
+    """
+    def getListeningPort(self, reactor, factory, port=0, interface=''):
+        """
+        Get a TCP port from a reactor, wrapping an already-initialized file
+        descriptor.
+        """
+        if IReactorSocket.providedBy(reactor):
+            if ':' in interface:
+                domain = socket.AF_INET6
+                address = socket.getaddrinfo(interface, port)[0][4]
+            else:
+                domain = socket.AF_INET
+                address = (interface, port)
+            portSock = socket.socket(domain)
+            portSock.bind(address)
+            portSock.listen(3)
+            portSock.setblocking(False)
+            try:
+                return reactor.adoptStreamPort(
+                    portSock.fileno(), portSock.family, factory)
+            finally:
+                # The socket should still be open; fileno will raise if it is
+                # not.
+                portSock.fileno()
+                # Now clean it up, because the rest of the test does not need
+                # it.
+                portSock.close()
+        else:
+            raise SkipTest("Reactor does not provide IReactorSocket")
+
+
+
+class TCPPortTestsMixin(object):
+    """
+    Tests for L{IReactorTCP.listenTCP}
+    """
+    requiredInterfaces = (IReactorTCP,)
+
+    def getExpectedStartListeningLogMessage(self, port, factory):
+        """
+        Get the message expected to be logged when a TCP port starts listening.
+        """
+        return "%s starting on %d" % (
+            factory, port.getHost().port)
+
+
+    def getExpectedConnectionLostLogMsg(self, port):
+        """
+        Get the expected connection lost message for a TCP port.
+        """
+        return "(TCP Port %s Closed)" % (port.getHost().port,)
+
+
+    def test_portGetHostOnIPv4(self):
+        """
+        When no interface is passed to L{IReactorTCP.listenTCP}, the returned
+        listening port listens on an IPv4 address.
+        """
+        reactor = self.buildReactor()
+        port = self.getListeningPort(reactor, ServerFactory())
+        address = port.getHost()
+        self.assertIsInstance(address, IPv4Address)
+
+
+    def test_portGetHostOnIPv6(self):
+        """
+        When listening on an IPv6 address, L{IListeningPort.getHost} returns
+        an L{IPv6Address} with C{host} and C{port} attributes reflecting the
+        address the port is bound to.
+        """
+        reactor = self.buildReactor()
+        host, portNumber = findFreePort(
+            family=socket.AF_INET6, interface='::1')[:2]
+        port = self.getListeningPort(
+            reactor, ServerFactory(), portNumber, host)
+        address = port.getHost()
+        self.assertIsInstance(address, IPv6Address)
+        self.assertEqual('::1', address.host)
+        self.assertEqual(portNumber, address.port)
+    if ipv6Skip:
+        test_portGetHostOnIPv6.skip = ipv6Skip
+
+
+    def test_portGetHostOnIPv6ScopeID(self):
+        """
+        When a link-local IPv6 address including a scope identifier is passed as
+        the C{interface} argument to L{IReactorTCP.listenTCP}, the resulting
+        L{IListeningPort} reports its address as an L{IPv6Address} with a host
+        value that includes the scope identifier.
+        """
+        linkLocal = getLinkLocalIPv6Address()
+        reactor = self.buildReactor()
+        port = self.getListeningPort(reactor, ServerFactory(), 0, linkLocal)
+        address = port.getHost()
+        self.assertIsInstance(address, IPv6Address)
+        self.assertEqual(linkLocal, address.host)
+    if ipv6Skip:
+        test_portGetHostOnIPv6ScopeID.skip = ipv6Skip
+
+
+    def _buildProtocolAddressTest(self, client, interface):
+        """
+        Connect C{client} to a server listening on C{interface} started with
+        L{IReactorTCP.listenTCP} and return the address passed to the factory's
+        C{buildProtocol} method.
+
+        @param client: A C{SOCK_STREAM} L{socket.socket} created with an address
+            family such that it will be able to connect to a server listening on
+            C{interface}.
+
+        @param interface: A C{str} giving an address for a server to listen on.
+            This should almost certainly be the loopback address for some
+            address family supported by L{IReactorTCP.listenTCP}.
+
+        @return: Whatever object, probably an L{IAddress} provider, is passed to
+            a server factory's C{buildProtocol} method when C{client}
+            establishes a connection.
+        """
+        class ObserveAddress(ServerFactory):
+            def buildProtocol(self, address):
+                reactor.stop()
+                self.observedAddress = address
+                return Protocol()
+
+        factory = ObserveAddress()
+        reactor = self.buildReactor()
+        port = self.getListeningPort(reactor, factory, 0, interface)
+        client.setblocking(False)
+        try:
+            connect(client, (port.getHost().host, port.getHost().port))
+        except socket.error as e:
+            errnum, message = e.args
+            self.assertIn(errnum, (errno.EINPROGRESS, errno.EWOULDBLOCK))
+
+        self.runReactor(reactor)
+
+        return factory.observedAddress
+
+
+    def test_buildProtocolIPv4Address(self):
+        """
+        When a connection is accepted over IPv4, an L{IPv4Address} is passed
+        to the factory's C{buildProtocol} method giving the peer's address.
+        """
+        interface = '127.0.0.1'
+        client = createTestSocket(self, socket.AF_INET, socket.SOCK_STREAM)
+        observedAddress = self._buildProtocolAddressTest(client, interface)
+        self.assertEqual(
+            IPv4Address('TCP', *client.getsockname()), observedAddress)
+
+
+    def test_buildProtocolIPv6Address(self):
+        """
+        When a connection is accepted to an IPv6 address, an L{IPv6Address} is
+        passed to the factory's C{buildProtocol} method giving the peer's
+        address.
+        """
+        interface = '::1'
+        client = createTestSocket(self, socket.AF_INET6, socket.SOCK_STREAM)
+        observedAddress = self._buildProtocolAddressTest(client, interface)
+        self.assertEqual(
+            IPv6Address('TCP', *client.getsockname()[:2]), observedAddress)
+    if ipv6Skip:
+        test_buildProtocolIPv6Address.skip = ipv6Skip
+
+
+    def test_buildProtocolIPv6AddressScopeID(self):
+        """
+        When a connection is accepted to a link-local IPv6 address, an
+        L{IPv6Address} is passed to the factory's C{buildProtocol} method
+        giving the peer's address, including a scope identifier.
+        """
+        interface = getLinkLocalIPv6Address()
+        client = createTestSocket(self, socket.AF_INET6, socket.SOCK_STREAM)
+        observedAddress = self._buildProtocolAddressTest(client, interface)
+        self.assertEqual(
+            IPv6Address('TCP', *client.getsockname()[:2]), observedAddress)
+    if ipv6Skip:
+        test_buildProtocolIPv6AddressScopeID.skip = ipv6Skip
+
+
+    def _serverGetConnectionAddressTest(self, client, interface, which):
+        """
+        Connect C{client} to a server listening on C{interface} started with
+        L{IReactorTCP.listenTCP} and return the address returned by one of the
+        server transport's address lookup methods, C{getHost} or C{getPeer}.
+
+        @param client: A C{SOCK_STREAM} L{socket.socket} created with an address
+            family such that it will be able to connect to a server listening on
+            C{interface}.
+
+        @param interface: A C{str} giving an address for a server to listen on.
+            This should almost certainly be the loopback address for some
+            address family supported by L{IReactorTCP.listenTCP}.
+
+        @param which: A C{str} equal to either C{"getHost"} or C{"getPeer"}
+            determining which address will be returned.
+
+        @return: Whatever object, probably an L{IAddress} provider, is returned
+            from the method indicated by C{which}.
+        """
+        class ObserveAddress(Protocol):
+            def makeConnection(self, transport):
+                reactor.stop()
+                self.factory.address = getattr(transport, which)()
+
+        reactor = self.buildReactor()
+        factory = ServerFactory()
+        factory.protocol = ObserveAddress
+        port = self.getListeningPort(reactor, factory, 0, interface)
+        client.setblocking(False)
+        try:
+            connect(client, (port.getHost().host, port.getHost().port))
+        except socket.error as e:
+            errnum, message = e.args
+            self.assertIn(errnum, (errno.EINPROGRESS, errno.EWOULDBLOCK))
+        self.runReactor(reactor)
+        return factory.address
+
+
+    def test_serverGetHostOnIPv4(self):
+        """
+        When a connection is accepted over IPv4, the server
+        L{ITransport.getHost} method returns an L{IPv4Address} giving the
+        address on which the server accepted the connection.
+        """
+        interface = '127.0.0.1'
+        client = createTestSocket(self, socket.AF_INET, socket.SOCK_STREAM)
+        hostAddress = self._serverGetConnectionAddressTest(
+            client, interface, 'getHost')
+        self.assertEqual(
+            IPv4Address('TCP', *client.getpeername()), hostAddress)
+
+
+    def test_serverGetHostOnIPv6(self):
+        """
+        When a connection is accepted over IPv6, the server
+        L{ITransport.getHost} method returns an L{IPv6Address} giving the
+        address on which the server accepted the connection.
+        """
+        interface = '::1'
+        client = createTestSocket(self, socket.AF_INET6, socket.SOCK_STREAM)
+        hostAddress = self._serverGetConnectionAddressTest(
+            client, interface, 'getHost')
+        self.assertEqual(
+            IPv6Address('TCP', *client.getpeername()[:2]), hostAddress)
+    if ipv6Skip:
+        test_serverGetHostOnIPv6.skip = ipv6Skip
+
+
+    def test_serverGetHostOnIPv6ScopeID(self):
+        """
+        When a connection is accepted over IPv6, the server
+        L{ITransport.getHost} method returns an L{IPv6Address} giving the
+        address on which the server accepted the connection, including the scope
+        identifier.
+        """
+        interface = getLinkLocalIPv6Address()
+        client = createTestSocket(self, socket.AF_INET6, socket.SOCK_STREAM)
+        hostAddress = self._serverGetConnectionAddressTest(
+            client, interface, 'getHost')
+        self.assertEqual(
+            IPv6Address('TCP', *client.getpeername()[:2]), hostAddress)
+    if ipv6Skip:
+        test_serverGetHostOnIPv6ScopeID.skip = ipv6Skip
+
+
+    def test_serverGetPeerOnIPv4(self):
+        """
+        When a connection is accepted over IPv4, the server
+        L{ITransport.getPeer} method returns an L{IPv4Address} giving the
+        address of the remote end of the connection.
+        """
+        interface = '127.0.0.1'
+        client = createTestSocket(self, socket.AF_INET, socket.SOCK_STREAM)
+        peerAddress = self._serverGetConnectionAddressTest(
+            client, interface, 'getPeer')
+        self.assertEqual(
+            IPv4Address('TCP', *client.getsockname()), peerAddress)
+
+
+    def test_serverGetPeerOnIPv6(self):
+        """
+        When a connection is accepted over IPv6, the server
+        L{ITransport.getPeer} method returns an L{IPv6Address} giving the
+        address on the remote end of the connection.
+        """
+        interface = '::1'
+        client = createTestSocket(self, socket.AF_INET6, socket.SOCK_STREAM)
+        peerAddress = self._serverGetConnectionAddressTest(
+            client, interface, 'getPeer')
+        self.assertEqual(
+            IPv6Address('TCP', *client.getsockname()[:2]), peerAddress)
+    if ipv6Skip:
+        test_serverGetPeerOnIPv6.skip = ipv6Skip
+
+
+    def test_serverGetPeerOnIPv6ScopeID(self):
+        """
+        When a connection is accepted over IPv6, the server
+        L{ITransport.getPeer} method returns an L{IPv6Address} giving the
+        address on the remote end of the connection, including the scope
+        identifier.
+        """
+        interface = getLinkLocalIPv6Address()
+        client = createTestSocket(self, socket.AF_INET6, socket.SOCK_STREAM)
+        peerAddress = self._serverGetConnectionAddressTest(
+            client, interface, 'getPeer')
+        self.assertEqual(
+            IPv6Address('TCP', *client.getsockname()[:2]), peerAddress)
+    if ipv6Skip:
+        test_serverGetPeerOnIPv6ScopeID.skip = ipv6Skip
+
+
+
+class TCPPortTestsBuilder(ReactorBuilder, ListenTCPMixin, TCPPortTestsMixin,
+                          ObjectModelIntegrationMixin,
+                          StreamTransportTestsMixin):
+    pass
+
+
+
+class TCPFDPortTestsBuilder(ReactorBuilder, SocketTCPMixin, TCPPortTestsMixin,
+                            ObjectModelIntegrationMixin,
+                            StreamTransportTestsMixin):
+    pass
+
+
+
+class StopStartReadingProtocol(Protocol):
+    """
+    Protocol that pauses and resumes the transport a few times
+    """
+
+    def connectionMade(self):
+        self.data = b''
+        self.pauseResumeProducing(3)
+
+
+    def pauseResumeProducing(self, counter):
+        """
+        Toggle transport read state, then count down.
+        """
+        self.transport.pauseProducing()
+        self.transport.resumeProducing()
+        if counter:
+            self.factory.reactor.callLater(0,
+                    self.pauseResumeProducing, counter - 1)
+        else:
+            self.factory.reactor.callLater(0,
+                    self.factory.ready.callback, self)
+
+
+    def dataReceived(self, data):
+        log.msg('got data', len(data))
+        self.data += data
+        if len(self.data) == 4*4096:
+            self.factory.stop.callback(self.data)
+
+
+
+class TCPConnectionTestsBuilder(ReactorBuilder):
+    """
+    Builder defining tests relating to L{twisted.internet.tcp.Connection}.
+    """
+    requiredInterfaces = (IReactorTCP,)
+
+    def test_stopStartReading(self):
+        """
+        This test verifies transport socket read state after multiple
+        pause/resumeProducing calls.
+        """
+        sf = ServerFactory()
+        reactor = sf.reactor = self.buildReactor()
+
+        skippedReactors = ["Glib2Reactor", "Gtk2Reactor"]
+        reactorClassName = reactor.__class__.__name__
+        if reactorClassName in skippedReactors and platform.isWindows():
+            raise SkipTest(
+                "This test is broken on gtk/glib under Windows.")
+
+        sf.protocol = StopStartReadingProtocol
+        sf.ready = Deferred()
+        sf.stop = Deferred()
+        p = reactor.listenTCP(0, sf)
+        port = p.getHost().port
+        def proceed(protos, port):
+            """
+            Send several IOCPReactor's buffers' worth of data.
+            """
+            self.assertTrue(protos[0])
+            self.assertTrue(protos[1])
+            protos = protos[0][1], protos[1][1]
+            protos[0].transport.write(b'x' * (2 * 4096) + b'y' * (2 * 4096))
+            return (sf.stop.addCallback(cleanup, protos, port)
+                           .addCallback(lambda ign: reactor.stop()))
+
+        def cleanup(data, protos, port):
+            """
+            Make sure IOCPReactor didn't start several WSARecv operations
+            that clobbered each other's results.
+            """
+            self.assertEqual(data, b'x'*(2*4096) + b'y'*(2*4096),
+                                 'did not get the right data')
+            return DeferredList([
+                    maybeDeferred(protos[0].transport.loseConnection),
+                    maybeDeferred(protos[1].transport.loseConnection),
+                    maybeDeferred(port.stopListening)])
+
+        cc = TCP4ClientEndpoint(reactor, '127.0.0.1', port)
+        cf = ClientFactory()
+        cf.protocol = Protocol
+        d = DeferredList([cc.connect(cf), sf.ready]).addCallback(proceed, p)
+        d.addErrback(log.err)
+        self.runReactor(reactor)
+
+
+    def test_connectionLostAfterPausedTransport(self):
+        """
+        Alice connects to Bob.  Alice writes some bytes and then shuts down the
+        connection.  Bob receives the bytes from the connection and then pauses
+        the transport object.  Shortly afterwards Bob resumes the transport
+        object.  At that point, Bob is notified that the connection has been
+        closed.
+
+        This is no problem for most reactors.  The underlying event notification
+        API will probably just remind them that the connection has been closed.
+        It is a little tricky for win32eventreactor (MsgWaitForMultipleObjects).
+        MsgWaitForMultipleObjects will only deliver the close notification once.
+        The reactor needs to remember that notification until Bob resumes the
+        transport.
+        """
+        class Pauser(ConnectableProtocol):
+            def __init__(self):
+                self.events = []
+
+            def dataReceived(self, bytes):
+                self.events.append("paused")
+                self.transport.pauseProducing()
+                self.reactor.callLater(0, self.resume)
+
+            def resume(self):
+                self.events.append("resumed")
+                self.transport.resumeProducing()
+
+            def connectionLost(self, reason):
+                # This is the event you have been waiting for.
+                self.events.append("lost")
+                ConnectableProtocol.connectionLost(self, reason)
+
+        class Client(ConnectableProtocol):
+            def connectionMade(self):
+                self.transport.write(b"some bytes for you")
+                self.transport.loseConnection()
+
+        pauser = Pauser()
+        runProtocolsWithReactor(self, pauser, Client(), TCPCreator())
+        self.assertEqual(pauser.events, ["paused", "resumed", "lost"])
+
+
+    def test_doubleHalfClose(self):
+        """
+        If one side half-closes its connection, and then the other side of the
+        connection calls C{loseWriteConnection}, and then C{loseConnection} in
+        {writeConnectionLost}, the connection is closed correctly.
+
+        This rather obscure case used to fail (see ticket #3037).
+        """
+        @implementer(IHalfCloseableProtocol)
+        class ListenerProtocol(ConnectableProtocol):
+
+            def readConnectionLost(self):
+                self.transport.loseWriteConnection()
+
+            def writeConnectionLost(self):
+                self.transport.loseConnection()
+
+        class Client(ConnectableProtocol):
+            def connectionMade(self):
+                self.transport.loseConnection()
+
+        # If test fails, reactor won't stop and we'll hit timeout:
+        runProtocolsWithReactor(
+            self, ListenerProtocol(), Client(), TCPCreator())
+
+
+
+class WriteSequenceTestsMixin(object):
+    """
+    Test for L{twisted.internet.abstract.FileDescriptor.writeSequence}.
+    """
+    requiredInterfaces = (IReactorTCP,)
+
+    def setWriteBufferSize(self, transport, value):
+        """
+        Set the write buffer size for the given transport, mananing possible
+        differences (ie, IOCP). Bug #4322 should remove the need of that hack.
+        """
+        if getattr(transport, "writeBufferSize", None) is not None:
+            transport.writeBufferSize = value
+        else:
+            transport.bufferSize = value
+
+
+    def test_writeSequeceWithoutWrite(self):
+        """
+        C{writeSequence} sends the data even if C{write} hasn't been called.
+        """
+
+        def connected(protocols):
+            client, server, port = protocols
+
+            def dataReceived(data):
+                log.msg("data received: %r" % data)
+                self.assertEqual(data, b"Some sequence splitted")
+                client.transport.loseConnection()
+
+            server.dataReceived = dataReceived
+
+            client.transport.writeSequence([b"Some ", b"sequence ", b"splitted"])
+
+        reactor = self.buildReactor()
+        d = self.getConnectedClientAndServer(reactor, "127.0.0.1",
+                                             socket.AF_INET)
+        d.addCallback(connected)
+        d.addErrback(log.err)
+        self.runReactor(reactor)
+
+
+    def test_writeSequenceWithUnicodeRaisesException(self):
+        """
+        C{writeSequence} with an element in the sequence of type unicode raises
+        C{TypeError}.
+        """
+
+        def connected(protocols):
+            client, server, port = protocols
+
+            exc = self.assertRaises(
+                TypeError,
+                server.transport.writeSequence, [u"Unicode is not kosher"])
+
+            self.assertEqual(str(exc), "Data must not be unicode")
+
+            server.transport.loseConnection()
+
+        reactor = self.buildReactor()
+        d = self.getConnectedClientAndServer(reactor, "127.0.0.1",
+                                             socket.AF_INET)
+        d.addCallback(connected)
+        d.addErrback(log.err)
+        self.runReactor(reactor)
+
+
+    def test_streamingProducer(self):
+        """
+        C{writeSequence} pauses its streaming producer if too much data is
+        buffered, and then resumes it.
+        """
+        @implementer(IPushProducer)
+        class SaveActionProducer(object):
+            client = None
+            server = None
+
+            def __init__(self):
+                self.actions = []
+
+            def pauseProducing(self):
+                self.actions.append("pause")
+
+            def resumeProducing(self):
+                self.actions.append("resume")
+                # Unregister the producer so the connection can close
+                self.client.transport.unregisterProducer()
+                # This is why the code below waits for the server connection
+                # first - so we have it to close here.  We close the server
+                # side because win32evenreactor cannot reliably observe us
+                # closing the client side (#5285).
+                self.server.transport.loseConnection()
+
+            def stopProducing(self):
+                self.actions.append("stop")
+
+        producer = SaveActionProducer()
+
+        def connected(protocols):
+            client, server = protocols[:2]
+            producer.client = client
+            producer.server = server
+            # Register a streaming producer and verify that it gets paused
+            # after it writes more than the local send buffer can hold.
+            client.transport.registerProducer(producer, True)
+            self.assertEqual(producer.actions, [])
+            self.setWriteBufferSize(client.transport, 500)
+            client.transport.writeSequence([b"x" * 50] * 20)
+            self.assertEqual(producer.actions, ["pause"])
+
+        reactor = self.buildReactor()
+        d = self.getConnectedClientAndServer(reactor, "127.0.0.1",
+                                             socket.AF_INET)
+        d.addCallback(connected)
+        d.addErrback(log.err)
+        self.runReactor(reactor)
+        # After the send buffer gets a chance to empty out a bit, the producer
+        # should be resumed.
+        self.assertEqual(producer.actions, ["pause", "resume"])
+
+
+    def test_nonStreamingProducer(self):
+        """
+        C{writeSequence} pauses its producer if too much data is buffered only
+        if this is a streaming producer.
+        """
+        test = self
+
+        @implementer(IPullProducer)
+        class SaveActionProducer(object):
+            client = None
+
+            def __init__(self):
+                self.actions = []
+
+            def resumeProducing(self):
+                self.actions.append("resume")
+                if self.actions.count("resume") == 2:
+                    self.client.transport.stopConsuming()
+                else:
+                    test.setWriteBufferSize(self.client.transport, 500)
+                    self.client.transport.writeSequence([b"x" * 50] * 20)
+
+            def stopProducing(self):
+                self.actions.append("stop")
+
+
+        producer = SaveActionProducer()
+
+        def connected(protocols):
+            client = protocols[0]
+            producer.client = client
+            # Register a non-streaming producer and verify that it is resumed
+            # immediately.
+            client.transport.registerProducer(producer, False)
+            self.assertEqual(producer.actions, ["resume"])
+
+        reactor = self.buildReactor()
+        d = self.getConnectedClientAndServer(reactor, "127.0.0.1",
+                                             socket.AF_INET)
+        d.addCallback(connected)
+        d.addErrback(log.err)
+        self.runReactor(reactor)
+        # After the local send buffer empties out, the producer should be
+        # resumed again.
+        self.assertEqual(producer.actions, ["resume", "resume"])
+
+
+
+class TCPTransportServerAddressTestMixin(object):
+    """
+    Test mixing for TCP server address building and log prefix.
+    """
+
+    def getConnectedClientAndServer(self, reactor, interface, addressFamily):
+        """
+        Helper method returnine a L{Deferred} firing with a tuple of a client
+        protocol, a server protocol, and a running TCP port.
+        """
+        raise NotImplementedError()
+
+
+    def _testServerAddress(self, interface, addressFamily, adressClass):
+        """
+        Helper method to test TCP server addresses on either IPv4 or IPv6.
+        """
+
+        def connected(protocols):
+            client, server, port = protocols
+            try:
+                self.assertEqual(
+                    "<AccumulatingProtocol #%s on %s>" %
+                        (server.transport.sessionno, port.getHost().port),
+                    str(server.transport))
+
+                self.assertEqual(
+                    "AccumulatingProtocol,%s,%s" %
+                        (server.transport.sessionno, interface),
+                    server.transport.logstr)
+
+                [peerAddress] = server.factory.peerAddresses
+                self.assertIsInstance(peerAddress, adressClass)
+                self.assertEqual('TCP', peerAddress.type)
+                self.assertEqual(interface, peerAddress.host)
+            finally:
+                # Be certain to drop the connection so the test completes.
+                server.transport.loseConnection()
+
+        reactor = self.buildReactor()
+        d = self.getConnectedClientAndServer(reactor, interface, addressFamily)
+        d.addCallback(connected)
+        d.addErrback(log.err)
+        self.runReactor(reactor)
+
+
+    def test_serverAddressTCP4(self):
+        """
+        L{Server} instances have a string representation indicating on which
+        port they're running, and the connected address is stored on the
+        C{peerAddresses} attribute of the factory.
+        """
+        return self._testServerAddress("127.0.0.1", socket.AF_INET,
+                                       IPv4Address)
+
+
+    def test_serverAddressTCP6(self):
+        """
+        IPv6 L{Server} instances have a string representation indicating on
+        which port they're running, and the connected address is stored on the
+        C{peerAddresses} attribute of the factory.
+        """
+        return self._testServerAddress(getLinkLocalIPv6Address(),
+                                       socket.AF_INET6, IPv6Address)
+
+    if ipv6Skip:
+        test_serverAddressTCP6.skip = ipv6Skip
+
+
+
+class TCPTransportTestsBuilder(TCPTransportServerAddressTestMixin,
+                               WriteSequenceTestsMixin, ReactorBuilder):
+    """
+    Test standard L{ITCPTransport}s built with C{listenTCP} and C{connectTCP}.
+    """
+
+    def getConnectedClientAndServer(self, reactor, interface, addressFamily):
+        """
+        Return a L{Deferred} firing with a L{MyClientFactory} and
+        L{MyServerFactory} connected pair, and the listening C{Port}.
+        """
+        server = MyServerFactory()
+        server.protocolConnectionMade = Deferred()
+        server.protocolConnectionLost = Deferred()
+
+        client = MyClientFactory()
+        client.protocolConnectionMade = Deferred()
+        client.protocolConnectionLost = Deferred()
+
+        port = reactor.listenTCP(0, server, interface=interface)
+
+        lostDeferred = gatherResults([client.protocolConnectionLost,
+                                      server.protocolConnectionLost])
+        def stop(result):
+            reactor.stop()
+            return result
+
+        lostDeferred.addBoth(stop)
+
+        startDeferred = gatherResults([client.protocolConnectionMade,
+                                       server.protocolConnectionMade])
+
+        deferred = Deferred()
+
+        def start(protocols):
+            client, server = protocols
+            log.msg("client connected %s" % client)
+            log.msg("server connected %s" % server)
+            deferred.callback((client, server, port))
+
+        startDeferred.addCallback(start)
+
+        reactor.connectTCP(interface, port.getHost().port, client)
+
+        return deferred
+
+
+
+class AdoptStreamConnectionTestsBuilder(TCPTransportServerAddressTestMixin,
+                                        WriteSequenceTestsMixin,
+                                        ReactorBuilder):
+    """
+    Test server transports built using C{adoptStreamConnection}.
+    """
+    requiredInterfaces = (IReactorFDSet, IReactorSocket)
+
+    def getConnectedClientAndServer(self, reactor, interface, addressFamily):
+        """
+        Return a L{Deferred} firing with a L{MyClientFactory} and
+        L{MyServerFactory} connected pair, and the listening C{Port}. The
+        particularity is that the server protocol has been obtained after doing
+        a C{adoptStreamConnection} against the original server connection.
+        """
+        firstServer = MyServerFactory()
+        firstServer.protocolConnectionMade = Deferred()
+
+        server = MyServerFactory()
+        server.protocolConnectionMade = Deferred()
+        server.protocolConnectionLost = Deferred()
+
+        client = MyClientFactory()
+        client.protocolConnectionMade = Deferred()
+        client.protocolConnectionLost = Deferred()
+
+        port = reactor.listenTCP(0, firstServer, interface=interface)
+
+        def firtServerConnected(proto):
+            reactor.removeReader(proto.transport)
+            reactor.removeWriter(proto.transport)
+            reactor.adoptStreamConnection(
+                proto.transport.fileno(), addressFamily, server)
+
+        firstServer.protocolConnectionMade.addCallback(firtServerConnected)
+
+        lostDeferred = gatherResults([client.protocolConnectionLost,
+                                      server.protocolConnectionLost])
+        def stop(result):
+            if reactor.running:
+                reactor.stop()
+            return result
+
+        lostDeferred.addBoth(stop)
+
+        deferred = Deferred()
+        deferred.addErrback(stop)
+
+        startDeferred = gatherResults([client.protocolConnectionMade,
+                                       server.protocolConnectionMade])
+        def start(protocols):
+            client, server = protocols
+            log.msg("client connected %s" % client)
+            log.msg("server connected %s" % server)
+            deferred.callback((client, server, port))
+
+        startDeferred.addCallback(start)
+
+        reactor.connectTCP(interface, port.getHost().port, client)
+        return deferred
+
+
+
+globals().update(TCP4ClientTestsBuilder.makeTestCaseClasses())
+globals().update(TCP6ClientTestsBuilder.makeTestCaseClasses())
+globals().update(TCPPortTestsBuilder.makeTestCaseClasses())
+globals().update(TCPFDPortTestsBuilder.makeTestCaseClasses())
+globals().update(TCPConnectionTestsBuilder.makeTestCaseClasses())
+globals().update(TCP4ConnectorTestsBuilder.makeTestCaseClasses())
+globals().update(TCP6ConnectorTestsBuilder.makeTestCaseClasses())
+globals().update(TCPTransportTestsBuilder.makeTestCaseClasses())
+globals().update(AdoptStreamConnectionTestsBuilder.makeTestCaseClasses())
+
+
+
+class ServerAbortsTwice(ConnectableProtocol):
+    """
+    Call abortConnection() twice.
+    """
+
+    def dataReceived(self, data):
+        self.transport.abortConnection()
+        self.transport.abortConnection()
+
+
+
+class ServerAbortsThenLoses(ConnectableProtocol):
+    """
+    Call abortConnection() followed by loseConnection().
+    """
+
+    def dataReceived(self, data):
+        self.transport.abortConnection()
+        self.transport.loseConnection()
+
+
+
+class AbortServerWritingProtocol(ConnectableProtocol):
+    """
+    Protocol that writes data upon connection.
+    """
+
+    def connectionMade(self):
+        """
+        Tell the client that the connection is set up and it's time to abort.
+        """
+        self.transport.write(b"ready")
+
+
+
+class ReadAbortServerProtocol(AbortServerWritingProtocol):
+    """
+    Server that should never receive any data, except 'X's which are written
+    by the other side of the connection before abortConnection, and so might
+    possibly arrive.
+    """
+
+    def dataReceived(self, data):
+        if data.replace(b'X', b''):
+            raise Exception("Unexpectedly received data.")
+
+
+
+class NoReadServer(ConnectableProtocol):
+    """
+    Stop reading immediately on connection.
+
+    This simulates a lost connection that will cause the other side to time
+    out, and therefore call abortConnection().
+    """
+
+    def connectionMade(self):
+        self.transport.stopReading()
+
+
+
+class EventualNoReadServer(ConnectableProtocol):
+    """
+    Like NoReadServer, except we Wait until some bytes have been delivered
+    before stopping reading. This means TLS handshake has finished, where
+    applicable.
+    """
+
+    gotData = False
+    stoppedReading = False
+
+
+    def dataReceived(self, data):
+        if not self.gotData:
+            self.gotData = True
+            self.transport.registerProducer(self, False)
+            self.transport.write(b"hello")
+
+
+    def resumeProducing(self):
+        if self.stoppedReading:
+            return
+        self.stoppedReading = True
+        # We've written out the data:
+        self.transport.stopReading()
+
+
+    def pauseProducing(self):
+        pass
+
+
+    def stopProducing(self):
+        pass
+
+
+
+class BaseAbortingClient(ConnectableProtocol):
+    """
+    Base class for abort-testing clients.
+    """
+    inReactorMethod = False
+
+    def connectionLost(self, reason):
+        if self.inReactorMethod:
+            raise RuntimeError("BUG: connectionLost was called re-entrantly!")
+        ConnectableProtocol.connectionLost(self, reason)
+
+
+
+class WritingButNotAbortingClient(BaseAbortingClient):
+    """
+    Write data, but don't abort.
+    """
+
+    def connectionMade(self):
+        self.transport.write(b"hello")
+
+
+
+class AbortingClient(BaseAbortingClient):
+    """
+    Call abortConnection() after writing some data.
+    """
+
+    def dataReceived(self, data):
+        """
+        Some data was received, so the connection is set up.
+        """
+        self.inReactorMethod = True
+        self.writeAndAbort()
+        self.inReactorMethod = False
+
+
+    def writeAndAbort(self):
+        # X is written before abortConnection, and so there is a chance it
+        # might arrive. Y is written after, and so no Ys should ever be
+        # delivered:
+        self.transport.write(b"X" * 10000)
+        self.transport.abortConnection()
+        self.transport.write(b"Y" * 10000)
+
+
+
+class AbortingTwiceClient(AbortingClient):
+    """
+    Call abortConnection() twice, after writing some data.
+    """
+
+    def writeAndAbort(self):
+        AbortingClient.writeAndAbort(self)
+        self.transport.abortConnection()
+
+
+
+class AbortingThenLosingClient(AbortingClient):
+    """
+    Call abortConnection() and then loseConnection().
+    """
+
+    def writeAndAbort(self):
+        AbortingClient.writeAndAbort(self)
+        self.transport.loseConnection()
+
+
+
+class ProducerAbortingClient(ConnectableProtocol):
+    """
+    Call abortConnection from doWrite, via resumeProducing.
+    """
+
+    inReactorMethod = True
+    producerStopped = False
+
+    def write(self):
+        self.transport.write(b"lalala" * 127000)
+        self.inRegisterProducer = True
+        self.transport.registerProducer(self, False)
+        self.inRegisterProducer = False
+
+
+    def connectionMade(self):
+        self.write()
+
+
+    def resumeProducing(self):
+        self.inReactorMethod = True
+        if not self.inRegisterProducer:
+            self.transport.abortConnection()
+        self.inReactorMethod = False
+
+
+    def stopProducing(self):
+        self.producerStopped = True
+
+
+    def connectionLost(self, reason):
+        if not self.producerStopped:
+            raise RuntimeError("BUG: stopProducing() was never called.")
+        if self.inReactorMethod:
+            raise RuntimeError("BUG: connectionLost called re-entrantly!")
+        ConnectableProtocol.connectionLost(self, reason)
+
+
+
+class StreamingProducerClient(ConnectableProtocol):
+    """
+    Call abortConnection() when the other side has stopped reading.
+
+    In particular, we want to call abortConnection() only once our local
+    socket hits a state where it is no longer writeable. This helps emulate
+    the most common use case for abortConnection(), closing a connection after
+    a timeout, with write buffers being full.
+
+    Since it's very difficult to know when this actually happens, we just
+    write a lot of data, and assume at that point no more writes will happen.
+    """
+    paused = False
+    extraWrites = 0
+    inReactorMethod = False
+
+    def connectionMade(self):
+        self.write()
+
+
+    def write(self):
+        """
+        Write large amount to transport, then wait for a while for buffers to
+        fill up.
+        """
+        self.transport.registerProducer(self, True)
+        for i in range(100):
+            self.transport.write(b"1234567890" * 32000)
+
+
+    def resumeProducing(self):
+        self.paused = False
+
+
+    def stopProducing(self):
+        pass
+
+
+    def pauseProducing(self):
+        """
+        Called when local buffer fills up.
+
+        The goal is to hit the point where the local file descriptor is not
+        writeable (or the moral equivalent). The fact that pauseProducing has
+        been called is not sufficient, since that can happen when Twisted's
+        buffers fill up but OS hasn't gotten any writes yet. We want to be as
+        close as possible to every buffer (including OS buffers) being full.
+
+        So, we wait a bit more after this for Twisted to write out a few
+        chunks, then abortConnection.
+        """
+        if self.paused:
+            return
+        self.paused = True
+        # The amount we wait is arbitrary, we just want to make sure some
+        # writes have happened and outgoing OS buffers filled up -- see
+        # http://twistedmatrix.com/trac/ticket/5303 for details:
+        self.reactor.callLater(0.01, self.doAbort)
+
+
+    def doAbort(self):
+        if not self.paused:
+            log.err(RuntimeError("BUG: We should be paused a this point."))
+        self.inReactorMethod = True
+        self.transport.abortConnection()
+        self.inReactorMethod = False
+
+
+    def connectionLost(self, reason):
+        # Tell server to start reading again so it knows to go away:
+        self.otherProtocol.transport.startReading()
+        ConnectableProtocol.connectionLost(self, reason)
+
+
+
+class StreamingProducerClientLater(StreamingProducerClient):
+    """
+    Call abortConnection() from dataReceived, after bytes have been
+    exchanged.
+    """
+
+    def connectionMade(self):
+        self.transport.write(b"hello")
+        self.gotData = False
+
+
+    def dataReceived(self, data):
+        if not self.gotData:
+            self.gotData = True
+            self.write()
+
+
+class ProducerAbortingClientLater(ProducerAbortingClient):
+    """
+    Call abortConnection from doWrite, via resumeProducing.
+
+    Try to do so after some bytes have already been exchanged, so we
+    don't interrupt SSL handshake.
+    """
+
+    def connectionMade(self):
+        # Override base class connectionMade().
+        pass
+
+
+    def dataReceived(self, data):
+        self.write()
+
+
+
+class DataReceivedRaisingClient(AbortingClient):
+    """
+    Call abortConnection(), and then throw exception, from dataReceived.
+    """
+
+    def dataReceived(self, data):
+        self.transport.abortConnection()
+        raise ZeroDivisionError("ONO")
+
+
+
+class ResumeThrowsClient(ProducerAbortingClient):
+    """
+    Call abortConnection() and throw exception from resumeProducing().
+    """
+
+    def resumeProducing(self):
+        if not self.inRegisterProducer:
+            self.transport.abortConnection()
+            raise ZeroDivisionError("ono!")
+
+
+    def connectionLost(self, reason):
+        # Base class assertion about stopProducing being called isn't valid;
+        # if the we blew up in resumeProducing, consumers are justified in
+        # giving up on the producer and not calling stopProducing.
+        ConnectableProtocol.connectionLost(self, reason)
+
+
+
+class AbortConnectionMixin(object):
+    """
+    Unit tests for L{ITransport.abortConnection}.
+    """
+    # Override in subclasses, should be a EndpointCreator instance:
+    endpoints = None
+
+    def runAbortTest(self, clientClass, serverClass,
+                     clientConnectionLostReason=None):
+        """
+        A test runner utility function, which hooks up a matched pair of client
+        and server protocols.
+
+        We then run the reactor until both sides have disconnected, and then
+        verify that the right exception resulted.
+        """
+        clientExpectedExceptions = (ConnectionAborted, ConnectionLost)
+        serverExpectedExceptions = (ConnectionLost, ConnectionDone)
+        # In TLS tests we may get SSL.Error instead of ConnectionLost,
+        # since we're trashing the TLS protocol layer.
+        if useSSL:
+            clientExpectedExceptions = clientExpectedExceptions + (SSL.Error,)
+            serverExpectedExceptions = serverExpectedExceptions + (SSL.Error,)
+
+        client = clientClass()
+        server = serverClass()
+        client.otherProtocol = server
+        server.otherProtocol = client
+        reactor = runProtocolsWithReactor(self, server, client, self.endpoints)
+
+        # Make sure everything was shutdown correctly:
+        self.assertEqual(reactor.removeAll(), [])
+        # The reactor always has a timeout added in runReactor():
+        delayedCalls = reactor.getDelayedCalls()
+        self.assertEqual(len(delayedCalls), 1, map(str, delayedCalls))
+
+        if clientConnectionLostReason is not None:
+            self.assertIsInstance(
+                client.disconnectReason.value,
+                (clientConnectionLostReason,) + clientExpectedExceptions)
+        else:
+            self.assertIsInstance(client.disconnectReason.value,
+                                  clientExpectedExceptions)
+        self.assertIsInstance(server.disconnectReason.value, serverExpectedExceptions)
+
+
+    def test_dataReceivedAbort(self):
+        """
+        abortConnection() is called in dataReceived. The protocol should be
+        disconnected, but connectionLost should not be called re-entrantly.
+        """
+        return self.runAbortTest(AbortingClient, ReadAbortServerProtocol)
+
+
+    def test_clientAbortsConnectionTwice(self):
+        """
+        abortConnection() is called twice by client.
+
+        No exception should be thrown, and the connection will be closed.
+        """
+        return self.runAbortTest(AbortingTwiceClient, ReadAbortServerProtocol)
+
+
+    def test_clientAbortsConnectionThenLosesConnection(self):
+        """
+        Client calls abortConnection(), followed by loseConnection().
+
+        No exception should be thrown, and the connection will be closed.
+        """
+        return self.runAbortTest(AbortingThenLosingClient,
+                                 ReadAbortServerProtocol)
+
+
+    def test_serverAbortsConnectionTwice(self):
+        """
+        abortConnection() is called twice by server.
+
+        No exception should be thrown, and the connection will be closed.
+        """
+        return self.runAbortTest(WritingButNotAbortingClient, ServerAbortsTwice,
+                                 clientConnectionLostReason=ConnectionLost)
+
+
+    def test_serverAbortsConnectionThenLosesConnection(self):
+        """
+        Server calls abortConnection(), followed by loseConnection().
+
+        No exception should be thrown, and the connection will be closed.
+        """
+        return self.runAbortTest(WritingButNotAbortingClient,
+                                 ServerAbortsThenLoses,
+                                 clientConnectionLostReason=ConnectionLost)
+
+
+    def test_resumeProducingAbort(self):
+        """
+        abortConnection() is called in resumeProducing, before any bytes have
+        been exchanged. The protocol should be disconnected, but
+        connectionLost should not be called re-entrantly.
+        """
+        self.runAbortTest(ProducerAbortingClient,
+                          ConnectableProtocol)
+
+
+    def test_resumeProducingAbortLater(self):
+        """
+        abortConnection() is called in resumeProducing, after some
+        bytes have been exchanged. The protocol should be disconnected.
+        """
+        return self.runAbortTest(ProducerAbortingClientLater,
+                                 AbortServerWritingProtocol)
+
+
+    def test_fullWriteBuffer(self):
+        """
+        abortConnection() triggered by the write buffer being full.
+
+        In particular, the server side stops reading. This is supposed
+        to simulate a realistic timeout scenario where the client
+        notices the server is no longer accepting data.
+
+        The protocol should be disconnected, but connectionLost should not be
+        called re-entrantly.
+        """
+        self.runAbortTest(StreamingProducerClient,
+                          NoReadServer)
+
+
+    def test_fullWriteBufferAfterByteExchange(self):
+        """
+        abortConnection() is triggered by a write buffer being full.
+
+        However, this buffer is filled after some bytes have been exchanged,
+        allowing a TLS handshake if we're testing TLS. The connection will
+        then be lost.
+        """
+        return self.runAbortTest(StreamingProducerClientLater,
+                                 EventualNoReadServer)
+
+
+    def test_dataReceivedThrows(self):
+        """
+        dataReceived calls abortConnection(), and then raises an exception.
+
+        The connection will be lost, with the thrown exception
+        (C{ZeroDivisionError}) as the reason on the client. The idea here is
+        that bugs should not be masked by abortConnection, in particular
+        unexpected exceptions.
+        """
+        self.runAbortTest(DataReceivedRaisingClient,
+                          AbortServerWritingProtocol,
+                          clientConnectionLostReason=ZeroDivisionError)
+        errors = self.flushLoggedErrors(ZeroDivisionError)
+        self.assertEqual(len(errors), 1)
+
+
+    def test_resumeProducingThrows(self):
+        """
+        resumeProducing calls abortConnection(), and then raises an exception.
+
+        The connection will be lost, with the thrown exception
+        (C{ZeroDivisionError}) as the reason on the client. The idea here is
+        that bugs should not be masked by abortConnection, in particular
+        unexpected exceptions.
+        """
+        self.runAbortTest(ResumeThrowsClient,
+                          ConnectableProtocol,
+                          clientConnectionLostReason=ZeroDivisionError)
+        errors = self.flushLoggedErrors(ZeroDivisionError)
+        self.assertEqual(len(errors), 1)
+
+
+
+class AbortConnectionTestCase(ReactorBuilder, AbortConnectionMixin):
+    """
+    TCP-specific L{AbortConnectionMixin} tests.
+    """
+    requiredInterfaces = (IReactorTCP,)
+
+    endpoints = TCPCreator()
+
+globals().update(AbortConnectionTestCase.makeTestCaseClasses())
+
+
+
+class SimpleUtilityTestCase(TestCase):
+    """
+    Simple, direct tests for helpers within L{twisted.internet.tcp}.
+    """
+    if ipv6Skip:
+        skip = ipv6Skip
+
+    def test_resolveNumericHost(self):
+        """
+        L{_resolveIPv6} raises a L{socket.gaierror} (L{socket.EAI_NONAME}) when
+        invoked with a non-numeric host.  (In other words, it is passing
+        L{socket.AI_NUMERICHOST} to L{socket.getaddrinfo} and will not
+        accidentally block if it receives bad input.)
+        """
+        err = self.assertRaises(socket.gaierror, _resolveIPv6, "localhost", 1)
+        self.assertEqual(err.args[0], socket.EAI_NONAME)
+
+
+    def test_resolveNumericService(self):
+        """
+        L{_resolveIPv6} raises a L{socket.gaierror} (L{socket.EAI_NONAME}) when
+        invoked with a non-numeric port.  (In other words, it is passing
+        L{socket.AI_NUMERICSERV} to L{socket.getaddrinfo} and will not
+        accidentally block if it receives bad input.)
+        """
+        err = self.assertRaises(socket.gaierror, _resolveIPv6, "::1", "http")
+        self.assertEqual(err.args[0], socket.EAI_NONAME)
+
+    if platform.isWindows():
+        test_resolveNumericService.skip = ("The AI_NUMERICSERV flag is not "
+                                           "supported by Microsoft providers.")
+        # http://msdn.microsoft.com/en-us/library/windows/desktop/ms738520.aspx
+
+
+    def test_resolveIPv6(self):
+        """
+        L{_resolveIPv6} discovers the flow info and scope ID of an IPv6
+        address.
+        """
+        result = _resolveIPv6("::1", 2)
+        self.assertEqual(len(result), 4)
+        # We can't say anything more useful about these than that they're
+        # integers, because the whole point of getaddrinfo is that you can never
+        # know a-priori know _anything_ about the network interfaces of the
+        # computer that you're on and you have to ask it.
+        self.assertIsInstance(result[2], int) # flow info
+        self.assertIsInstance(result[3], int) # scope id
+        # but, luckily, IP presentation format and what it means to be a port
+        # number are a little better specified.
+        self.assertEqual(result[:2], ("::1", 2))
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_threads.py b/ThirdParty/Twisted/twisted/internet/test/test_threads.py
new file mode 100644
index 0000000..1436918
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_threads.py
@@ -0,0 +1,220 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for implementations of L{IReactorThreads}.
+"""
+
+from __future__ import division, absolute_import
+
+__metaclass__ = type
+
+from weakref import ref
+import gc, threading
+
+from twisted.python.threadable import isInIOThread
+from twisted.internet.test.reactormixins import ReactorBuilder
+from twisted.python.threadpool import ThreadPool
+from twisted.internet.interfaces import IReactorThreads
+
+
+class ThreadTestsBuilder(ReactorBuilder):
+    """
+    Builder for defining tests relating to L{IReactorThreads}.
+    """
+    requiredInterfaces = (IReactorThreads,)
+
+    def test_getThreadPool(self):
+        """
+        C{reactor.getThreadPool()} returns an instance of L{ThreadPool} which
+        starts when C{reactor.run()} is called and stops before it returns.
+        """
+        state = []
+        reactor = self.buildReactor()
+
+        pool = reactor.getThreadPool()
+        self.assertIsInstance(pool, ThreadPool)
+        self.assertFalse(
+            pool.started, "Pool should not start before reactor.run")
+
+        def f():
+            # Record the state for later assertions
+            state.append(pool.started)
+            state.append(pool.joined)
+            reactor.stop()
+
+        reactor.callWhenRunning(f)
+        self.runReactor(reactor, 2)
+
+        self.assertTrue(
+            state[0], "Pool should start after reactor.run")
+        self.assertFalse(
+            state[1], "Pool should not be joined before reactor.stop")
+        self.assertTrue(
+            pool.joined,
+            "Pool should be stopped after reactor.run returns")
+
+
+    def test_suggestThreadPoolSize(self):
+        """
+        C{reactor.suggestThreadPoolSize()} sets the maximum size of the reactor
+        threadpool.
+        """
+        reactor = self.buildReactor()
+        reactor.suggestThreadPoolSize(17)
+        pool = reactor.getThreadPool()
+        self.assertEqual(pool.max, 17)
+
+
+    def test_delayedCallFromThread(self):
+        """
+        A function scheduled with L{IReactorThreads.callFromThread} invoked
+        from a delayed call is run immediately in the next reactor iteration.
+
+        When invoked from the reactor thread, previous implementations of
+        L{IReactorThreads.callFromThread} would skip the pipe/socket based wake
+        up step, assuming the reactor would wake up on its own.  However, this
+        resulted in the reactor not noticing a insert into the thread queue at
+        the right time (in this case, after the thread queue has been processed
+        for that reactor iteration).
+        """
+        reactor = self.buildReactor()
+
+        def threadCall():
+            reactor.stop()
+
+        # Set up the use of callFromThread being tested.
+        reactor.callLater(0, reactor.callFromThread, threadCall)
+
+        before = reactor.seconds()
+        self.runReactor(reactor, 60)
+        after = reactor.seconds()
+
+        # We specified a timeout of 60 seconds.  The timeout code in runReactor
+        # probably won't actually work, though.  If the reactor comes out of
+        # the event notification API just a little bit early, say after 59.9999
+        # seconds instead of after 60 seconds, then the queued thread call will
+        # get processed but the timeout delayed call runReactor sets up won't!
+        # Then the reactor will stop and runReactor will return without the
+        # timeout firing.  As it turns out, select() and poll() are quite
+        # likely to return *slightly* earlier than we ask them to, so the
+        # timeout will rarely happen, even if callFromThread is broken.  So,
+        # instead we'll measure the elapsed time and make sure it's something
+        # less than about half of the timeout we specified.  This is heuristic.
+        # It assumes that select() won't ever return after 30 seconds when we
+        # asked it to timeout after 60 seconds.  And of course like all
+        # time-based tests, it's slightly non-deterministic.  If the OS doesn't
+        # schedule this process for 30 seconds, then the test might fail even
+        # if callFromThread is working.
+        self.assertTrue(after - before < 30)
+
+
+    def test_callFromThread(self):
+        """
+        A function scheduled with L{IReactorThreads.callFromThread} invoked
+        from another thread is run in the reactor thread.
+        """
+        reactor = self.buildReactor()
+        result = []
+
+        def threadCall():
+            result.append(threading.currentThread())
+            reactor.stop()
+        reactor.callLater(0, reactor.callInThread,
+                          reactor.callFromThread, threadCall)
+        self.runReactor(reactor, 5)
+
+        self.assertEquals(result, [threading.currentThread()])
+
+
+    def test_stopThreadPool(self):
+        """
+        When the reactor stops, L{ReactorBase._stopThreadPool} drops the
+        reactor's direct reference to its internal threadpool and removes
+        the associated startup and shutdown triggers.
+
+        This is the case of the thread pool being created before the reactor
+        is run.
+        """
+        reactor = self.buildReactor()
+        threadpool = ref(reactor.getThreadPool())
+        reactor.callWhenRunning(reactor.stop)
+        self.runReactor(reactor)
+        gc.collect()
+        self.assertIdentical(threadpool(), None)
+
+
+    def test_stopThreadPoolWhenStartedAfterReactorRan(self):
+        """
+        We must handle the case of shutting down the thread pool when it was
+        started after the reactor was run in a special way.
+
+        Some implementation background: The thread pool is started with
+        callWhenRunning, which only returns a system trigger ID when it is
+        invoked before the reactor is started.
+
+        This is the case of the thread pool being created after the reactor
+        is started.
+        """
+        reactor = self.buildReactor()
+        threadPoolRefs = []
+        def acquireThreadPool():
+            threadPoolRefs.append(ref(reactor.getThreadPool()))
+            reactor.stop()
+        reactor.callWhenRunning(acquireThreadPool)
+        self.runReactor(reactor)
+        gc.collect()
+        self.assertIdentical(threadPoolRefs[0](), None)
+
+
+    def test_cleanUpThreadPoolEvenBeforeReactorIsRun(self):
+        """
+        When the reactor has its shutdown event fired before it is run, the
+        thread pool is completely destroyed.
+
+        For what it's worth, the reason we support this behavior at all is
+        because Trial does this.
+
+        This is the case of the thread pool being created without the reactor
+        being started at al.
+        """
+        reactor = self.buildReactor()
+        threadPoolRef = ref(reactor.getThreadPool())
+        reactor.fireSystemEvent("shutdown")
+        gc.collect()
+        self.assertIdentical(threadPoolRef(), None)
+
+
+    def test_isInIOThread(self):
+        """
+        The reactor registers itself as the I/O thread when it runs so that
+        L{twisted.python.threadable.isInIOThread} returns C{True} if it is
+        called in the thread the reactor is running in.
+        """
+        results = []
+        reactor = self.buildReactor()
+        def check():
+            results.append(isInIOThread())
+            reactor.stop()
+        reactor.callWhenRunning(check)
+        self.runReactor(reactor)
+        self.assertEqual([True], results)
+
+
+    def test_isNotInIOThread(self):
+        """
+        The reactor registers itself as the I/O thread when it runs so that
+        L{twisted.python.threadable.isInIOThread} returns C{False} if it is
+        called in a different thread than the reactor is running in.
+        """
+        results = []
+        reactor = self.buildReactor()
+        def check():
+            results.append(isInIOThread())
+            reactor.callFromThread(reactor.stop)
+        reactor.callInThread(check)
+        self.runReactor(reactor)
+        self.assertEqual([False], results)
+
+
+globals().update(ThreadTestsBuilder.makeTestCaseClasses())
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_time.py b/ThirdParty/Twisted/twisted/internet/test/test_time.py
new file mode 100644
index 0000000..b6f9e1d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_time.py
@@ -0,0 +1,66 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for implementations of L{IReactorTime}.
+"""
+
+__metaclass__ = type
+
+from twisted.python.runtime import platform
+from twisted.internet.test.reactormixins import ReactorBuilder
+from twisted.internet.interfaces import IReactorTime
+
+
+class TimeTestsBuilder(ReactorBuilder):
+    """
+    Builder for defining tests relating to L{IReactorTime}.
+    """
+    requiredInterfaces = (IReactorTime,)
+
+    def test_delayedCallStopsReactor(self):
+        """
+        The reactor can be stopped by a delayed call.
+        """
+        reactor = self.buildReactor()
+        reactor.callLater(0, reactor.stop)
+        reactor.run()
+
+
+
+class GlibTimeTestsBuilder(ReactorBuilder):
+    """
+    Builder for defining tests relating to L{IReactorTime} for reactors based
+    off glib.
+    """
+    requiredInterfaces = (IReactorTime,)
+
+    if platform.isWindows():
+        _reactors = ["twisted.internet.gtk2reactor.PortableGtkReactor"]
+    else:
+        _reactors = ["twisted.internet.glib2reactor.Glib2Reactor",
+                     "twisted.internet.gtk2reactor.Gtk2Reactor"]
+
+    def test_timeout_add(self):
+        """
+        A C{reactor.callLater} call scheduled from a C{gobject.timeout_add}
+        call is run on time.
+        """
+        import gobject
+        reactor = self.buildReactor()
+
+        result = []
+        def gschedule():
+            reactor.callLater(0, callback)
+            return 0
+        def callback():
+            result.append(True)
+            reactor.stop()
+
+        reactor.callWhenRunning(gobject.timeout_add, 10, gschedule)
+        self.runReactor(reactor, 5)
+        self.assertEqual(result, [True])
+
+
+globals().update(TimeTestsBuilder.makeTestCaseClasses())
+globals().update(GlibTimeTestsBuilder.makeTestCaseClasses())
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_tls.py b/ThirdParty/Twisted/twisted/internet/test/test_tls.py
new file mode 100644
index 0000000..080ceb5
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_tls.py
@@ -0,0 +1,438 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for implementations of L{ITLSTransport}.
+"""
+
+from __future__ import division, absolute_import
+
+__metaclass__ = type
+
+import sys, operator
+
+from zope.interface import implementer
+
+from twisted.python.compat import _PY3
+from twisted.internet.test.reactormixins import ReactorBuilder
+from twisted.internet.protocol import ServerFactory, ClientFactory, Protocol
+from twisted.internet.interfaces import (
+    IReactorSSL, ITLSTransport, IStreamClientEndpoint)
+from twisted.internet.defer import Deferred, DeferredList
+from twisted.internet._endpointspy3 import (
+    SSL4ServerEndpoint, SSL4ClientEndpoint, TCP4ClientEndpoint)
+from twisted.internet.error import ConnectionClosed
+from twisted.internet.task import Cooperator
+from twisted.trial.unittest import TestCase, SkipTest
+from twisted.python.runtime import platform
+
+from twisted.internet.test.test_core import ObjectModelIntegrationMixin
+from twisted.internet.test.test_tcp import (
+    StreamTransportTestsMixin, AbortConnectionMixin)
+from twisted.internet.test.connectionmixins import (
+    EndpointCreator, ConnectionTestsMixin, BrokenContextFactory)
+
+try:
+    from OpenSSL.crypto import FILETYPE_PEM
+except ImportError:
+    FILETYPE_PEM = None
+else:
+    from twisted.internet.ssl import PrivateCertificate, KeyPair
+    from twisted.internet.ssl import ClientContextFactory
+
+
+class TLSMixin:
+    requiredInterfaces = [IReactorSSL]
+
+    if platform.isWindows():
+        msg = (
+            "For some reason, these reactors don't deal with SSL "
+            "disconnection correctly on Windows.  See #3371.")
+        skippedReactors = {
+            "twisted.internet.glib2reactor.Glib2Reactor": msg,
+            "twisted.internet.gtk2reactor.Gtk2Reactor": msg}
+
+
+class ContextGeneratingMixin(object):
+    _certificateText = (
+        "-----BEGIN CERTIFICATE-----\n"
+        "MIIDBjCCAm+gAwIBAgIBATANBgkqhkiG9w0BAQQFADB7MQswCQYDVQQGEwJTRzER\n"
+        "MA8GA1UEChMITTJDcnlwdG8xFDASBgNVBAsTC00yQ3J5cHRvIENBMSQwIgYDVQQD\n"
+        "ExtNMkNyeXB0byBDZXJ0aWZpY2F0ZSBNYXN0ZXIxHTAbBgkqhkiG9w0BCQEWDm5n\n"
+        "cHNAcG9zdDEuY29tMB4XDTAwMDkxMDA5NTEzMFoXDTAyMDkxMDA5NTEzMFowUzEL\n"
+        "MAkGA1UEBhMCU0cxETAPBgNVBAoTCE0yQ3J5cHRvMRIwEAYDVQQDEwlsb2NhbGhv\n"
+        "c3QxHTAbBgkqhkiG9w0BCQEWDm5ncHNAcG9zdDEuY29tMFwwDQYJKoZIhvcNAQEB\n"
+        "BQADSwAwSAJBAKy+e3dulvXzV7zoTZWc5TzgApr8DmeQHTYC8ydfzH7EECe4R1Xh\n"
+        "5kwIzOuuFfn178FBiS84gngaNcrFi0Z5fAkCAwEAAaOCAQQwggEAMAkGA1UdEwQC\n"
+        "MAAwLAYJYIZIAYb4QgENBB8WHU9wZW5TU0wgR2VuZXJhdGVkIENlcnRpZmljYXRl\n"
+        "MB0GA1UdDgQWBBTPhIKSvnsmYsBVNWjj0m3M2z0qVTCBpQYDVR0jBIGdMIGagBT7\n"
+        "hyNp65w6kxXlxb8pUU/+7Sg4AaF/pH0wezELMAkGA1UEBhMCU0cxETAPBgNVBAoT\n"
+        "CE0yQ3J5cHRvMRQwEgYDVQQLEwtNMkNyeXB0byBDQTEkMCIGA1UEAxMbTTJDcnlw\n"
+        "dG8gQ2VydGlmaWNhdGUgTWFzdGVyMR0wGwYJKoZIhvcNAQkBFg5uZ3BzQHBvc3Qx\n"
+        "LmNvbYIBADANBgkqhkiG9w0BAQQFAAOBgQA7/CqT6PoHycTdhEStWNZde7M/2Yc6\n"
+        "BoJuVwnW8YxGO8Sn6UJ4FeffZNcYZddSDKosw8LtPOeWoK3JINjAk5jiPQ2cww++\n"
+        "7QGG/g5NDjxFZNDJP1dGiLAxPW6JXwov4v0FmdzfLOZ01jDcgQQZqEpYlgpuI5JE\n"
+        "WUQ9Ho4EzbYCOQ==\n"
+        "-----END CERTIFICATE-----\n")
+
+    _privateKeyText = (
+        "-----BEGIN RSA PRIVATE KEY-----\n"
+        "MIIBPAIBAAJBAKy+e3dulvXzV7zoTZWc5TzgApr8DmeQHTYC8ydfzH7EECe4R1Xh\n"
+        "5kwIzOuuFfn178FBiS84gngaNcrFi0Z5fAkCAwEAAQJBAIqm/bz4NA1H++Vx5Ewx\n"
+        "OcKp3w19QSaZAwlGRtsUxrP7436QjnREM3Bm8ygU11BjkPVmtrKm6AayQfCHqJoT\n"
+        "ZIECIQDW0BoMoL0HOYM/mrTLhaykYAVqgIeJsPjvkEhTFXWBuQIhAM3deFAvWNu4\n"
+        "nklUQ37XsCT2c9tmNt1LAT+slG2JOTTRAiAuXDtC/m3NYVwyHfFm+zKHRzHkClk2\n"
+        "HjubeEgjpj32AQIhAJqMGTaZVOwevTXvvHwNEH+vRWsAYU/gbx+OQB+7VOcBAiEA\n"
+        "oolb6NMg/R3enNPvS1O4UU1H8wpaF77L4yiSWlE0p4w=\n"
+        "-----END RSA PRIVATE KEY-----\n")
+
+
+    def getServerContext(self):
+        """
+        Return a new SSL context suitable for use in a test server.
+        """
+        cert = PrivateCertificate.load(
+            self._certificateText,
+            KeyPair.load(self._privateKeyText, FILETYPE_PEM),
+            FILETYPE_PEM)
+        return cert.options()
+
+
+    def getClientContext(self):
+        return ClientContextFactory()
+
+
+
+ at implementer(IStreamClientEndpoint)
+class StartTLSClientEndpoint(object):
+    """
+    An endpoint which wraps another one and adds a TLS layer immediately when
+    connections are set up.
+
+    @ivar wrapped: A L{IStreamClientEndpoint} provider which will be used to
+        really set up connections.
+
+    @ivar contextFactory: A L{ContextFactory} to use to do TLS.
+    """
+
+    def __init__(self, wrapped, contextFactory):
+        self.wrapped = wrapped
+        self.contextFactory = contextFactory
+
+
+    def connect(self, factory):
+        """
+        Establish a connection using a protocol build by C{factory} and
+        immediately start TLS on it.  Return a L{Deferred} which fires with the
+        protocol instance.
+        """
+        # This would be cleaner when we have ITransport.switchProtocol, which
+        # will be added with ticket #3204:
+        class WrapperFactory(ServerFactory):
+            def buildProtocol(wrapperSelf, addr):
+                protocol = factory.buildProtocol(addr)
+                def connectionMade(orig=protocol.connectionMade):
+                    protocol.transport.startTLS(self.contextFactory)
+                    orig()
+                protocol.connectionMade = connectionMade
+                return protocol
+
+        return self.wrapped.connect(WrapperFactory())
+
+
+
+class StartTLSClientCreator(EndpointCreator, ContextGeneratingMixin):
+    """
+    Create L{ITLSTransport.startTLS} endpoint for the client, and normal SSL
+    for server just because it's easier.
+    """
+    def server(self, reactor):
+        """
+        Construct an SSL server endpoint.  This should be be constructing a TCP
+        server endpoint which immediately calls C{startTLS} instead, but that
+        is hard.
+        """
+        return SSL4ServerEndpoint(reactor, 0, self.getServerContext())
+
+
+    def client(self, reactor, serverAddress):
+        """
+        Construct a TCP client endpoint wrapped to immediately start TLS.
+        """
+        return StartTLSClientEndpoint(
+            TCP4ClientEndpoint(
+                reactor, '127.0.0.1', serverAddress.port),
+            ClientContextFactory())
+
+
+
+class BadContextTestsMixin(object):
+    """
+    Mixin for L{ReactorBuilder} subclasses which defines a helper for testing
+    the handling of broken context factories.
+    """
+    def _testBadContext(self, useIt):
+        """
+        Assert that the exception raised by a broken context factory's
+        C{getContext} method is raised by some reactor method.  If it is not, an
+        exception will be raised to fail the test.
+
+        @param useIt: A two-argument callable which will be called with a
+            reactor and a broken context factory and which is expected to raise
+            the same exception as the broken context factory's C{getContext}
+            method.
+        """
+        reactor = self.buildReactor()
+        exc = self.assertRaises(
+            ValueError, useIt, reactor, BrokenContextFactory())
+        self.assertEqual(BrokenContextFactory.message, str(exc))
+
+
+
+class StartTLSClientTestsMixin(TLSMixin, ReactorBuilder, ConnectionTestsMixin):
+    """
+    Tests for TLS connections established using L{ITLSTransport.startTLS} (as
+    opposed to L{IReactorSSL.connectSSL} or L{IReactorSSL.listenSSL}).
+    """
+    endpoints = StartTLSClientCreator()
+
+
+
+class SSLCreator(EndpointCreator, ContextGeneratingMixin):
+    """
+    Create SSL endpoints.
+    """
+    def server(self, reactor):
+        """
+        Create an SSL server endpoint on a TCP/IP-stack allocated port.
+        """
+        return SSL4ServerEndpoint(reactor, 0, self.getServerContext())
+
+
+    def client(self, reactor, serverAddress):
+        """
+        Create an SSL client endpoint which will connect localhost on
+        the port given by C{serverAddress}.
+
+        @type serverAddress: L{IPv4Address}
+        """
+        return SSL4ClientEndpoint(
+            reactor, '127.0.0.1', serverAddress.port,
+            ClientContextFactory())
+
+
+class SSLClientTestsMixin(TLSMixin, ReactorBuilder, ContextGeneratingMixin,
+                          ConnectionTestsMixin, BadContextTestsMixin):
+    """
+    Mixin defining tests relating to L{ITLSTransport}.
+    """
+    endpoints = SSLCreator()
+
+    def test_badContext(self):
+        """
+        If the context factory passed to L{IReactorSSL.connectSSL} raises an
+        exception from its C{getContext} method, that exception is raised by
+        L{IReactorSSL.connectSSL}.
+        """
+        def useIt(reactor, contextFactory):
+            return reactor.connectSSL(
+                "127.0.0.1", 1234, ClientFactory(), contextFactory)
+        self._testBadContext(useIt)
+
+
+    def test_disconnectAfterWriteAfterStartTLS(self):
+        """
+        L{ITCPTransport.loseConnection} ends a connection which was set up with
+        L{ITLSTransport.startTLS} and which has recently been written to.  This
+        is intended to verify that a socket send error masked by the TLS
+        implementation doesn't prevent the connection from being reported as
+        closed.
+        """
+        class ShortProtocol(Protocol):
+            def connectionMade(self):
+                if not ITLSTransport.providedBy(self.transport):
+                    # Functionality isn't available to be tested.
+                    finished = self.factory.finished
+                    self.factory.finished = None
+                    finished.errback(SkipTest("No ITLSTransport support"))
+                    return
+
+                # Switch the transport to TLS.
+                self.transport.startTLS(self.factory.context)
+                # Force TLS to really get negotiated.  If nobody talks, nothing
+                # will happen.
+                self.transport.write(b"x")
+
+            def dataReceived(self, data):
+                # Stuff some bytes into the socket.  This mostly has the effect
+                # of causing the next write to fail with ENOTCONN or EPIPE.
+                # With the pyOpenSSL implementation of ITLSTransport, the error
+                # is swallowed outside of the control of Twisted.
+                self.transport.write(b"y")
+                # Now close the connection, which requires a TLS close alert to
+                # be sent.
+                self.transport.loseConnection()
+
+            def connectionLost(self, reason):
+                # This is the success case.  The client and the server want to
+                # get here.
+                finished = self.factory.finished
+                if finished is not None:
+                    self.factory.finished = None
+                    finished.callback(reason)
+
+        reactor = self.buildReactor()
+
+        serverFactory = ServerFactory()
+        serverFactory.finished = Deferred()
+        serverFactory.protocol = ShortProtocol
+        serverFactory.context = self.getServerContext()
+
+        clientFactory = ClientFactory()
+        clientFactory.finished = Deferred()
+        clientFactory.protocol = ShortProtocol
+        clientFactory.context = self.getClientContext()
+        clientFactory.context.method = serverFactory.context.method
+
+        lostConnectionResults = []
+        finished = DeferredList(
+            [serverFactory.finished, clientFactory.finished],
+            consumeErrors=True)
+        def cbFinished(results):
+            lostConnectionResults.extend([results[0][1], results[1][1]])
+        finished.addCallback(cbFinished)
+
+        port = reactor.listenTCP(0, serverFactory, interface='127.0.0.1')
+        self.addCleanup(port.stopListening)
+
+        connector = reactor.connectTCP(
+            port.getHost().host, port.getHost().port, clientFactory)
+        self.addCleanup(connector.disconnect)
+
+        finished.addCallback(lambda ign: reactor.stop())
+        self.runReactor(reactor)
+        lostConnectionResults[0].trap(ConnectionClosed)
+        lostConnectionResults[1].trap(ConnectionClosed)
+
+
+
+class TLSPortTestsBuilder(TLSMixin, ContextGeneratingMixin,
+                          ObjectModelIntegrationMixin, BadContextTestsMixin,
+                          StreamTransportTestsMixin, ReactorBuilder):
+    """
+    Tests for L{IReactorSSL.listenSSL}
+    """
+    def getListeningPort(self, reactor, factory):
+        """
+        Get a TLS port from a reactor.
+        """
+        return reactor.listenSSL(0, factory, self.getServerContext())
+
+
+    def getExpectedStartListeningLogMessage(self, port, factory):
+        """
+        Get the message expected to be logged when a TLS port starts listening.
+        """
+        return "%s (TLS) starting on %d" % (factory, port.getHost().port)
+
+
+    def getExpectedConnectionLostLogMsg(self, port):
+        """
+        Get the expected connection lost message for a TLS port.
+        """
+        return "(TLS Port %s Closed)" % (port.getHost().port,)
+
+
+    def test_badContext(self):
+        """
+        If the context factory passed to L{IReactorSSL.listenSSL} raises an
+        exception from its C{getContext} method, that exception is raised by
+        L{IReactorSSL.listenSSL}.
+        """
+        def useIt(reactor, contextFactory):
+            return reactor.listenSSL(0, ServerFactory(), contextFactory)
+        self._testBadContext(useIt)
+
+
+
+globals().update(SSLClientTestsMixin.makeTestCaseClasses())
+globals().update(StartTLSClientTestsMixin.makeTestCaseClasses())
+globals().update(TLSPortTestsBuilder().makeTestCaseClasses())
+
+
+
+class AbortSSLConnectionTest(ReactorBuilder, AbortConnectionMixin, ContextGeneratingMixin):
+    """
+    C{abortConnection} tests using SSL.
+    """
+    requiredInterfaces = (IReactorSSL,)
+    endpoints = SSLCreator()
+
+    def buildReactor(self):
+        reactor = ReactorBuilder.buildReactor(self)
+        try:
+            from twisted.protocols import tls
+        except ImportError:
+            return reactor
+
+        # Patch twisted.protocols.tls to use this reactor, until we get
+        # around to fixing #5206, or the TLS code uses an explicit reactor:
+        cooperator = Cooperator(
+            scheduler=lambda x: reactor.callLater(0.00001, x))
+        self.patch(tls, "cooperate", cooperator.cooperate)
+        return reactor
+
+
+    def setUp(self):
+        if FILETYPE_PEM is None:
+            raise SkipTest("OpenSSL not available.")
+
+globals().update(AbortSSLConnectionTest.makeTestCaseClasses())
+
+class OldTLSDeprecationTest(TestCase):
+    """
+    Tests for the deprecation of L{twisted.internet._oldtls}, the implementation
+    module for L{IReactorSSL} used when only an old version of pyOpenSSL is
+    available.
+    """
+    if _PY3:
+        skip = "_oldtls not supported on Python 3."
+
+    def test_warning(self):
+        """
+        The use of L{twisted.internet._oldtls} is deprecated, and emits a
+        L{DeprecationWarning}.
+        """
+        # Since _oldtls depends on OpenSSL, just skip this test if it isn't
+        # installed on the system.  Faking it would be error prone.
+        try:
+            import OpenSSL
+        except ImportError:
+            raise SkipTest("OpenSSL not available.")
+
+        # Change the apparent version of OpenSSL to one support for which is
+        # deprecated.  And have it change back again after the test.
+        self.patch(OpenSSL, '__version__', '0.5')
+
+        # If the module was already imported, the import statement below won't
+        # execute its top-level code.  Take it out of sys.modules so the import
+        # system re-evaluates it.  Arrange to put the original back afterwards.
+        # Also handle the case where it hasn't yet been imported.
+        try:
+            oldtls = sys.modules['twisted.internet._oldtls']
+        except KeyError:
+            self.addCleanup(sys.modules.pop, 'twisted.internet._oldtls')
+        else:
+            del sys.modules['twisted.internet._oldtls']
+            self.addCleanup(
+                operator.setitem, sys.modules, 'twisted.internet._oldtls',
+                oldtls)
+
+        # The actual test.
+        import twisted.internet._oldtls
+        warnings = self.flushWarnings()
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            "Support for pyOpenSSL 0.5 is deprecated.  "
+            "Upgrade to pyOpenSSL 0.10 or newer.")
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_udp.py b/ThirdParty/Twisted/twisted/internet/test/test_udp.py
new file mode 100644
index 0000000..97b58e4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_udp.py
@@ -0,0 +1,218 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for implementations of L{IReactorUDP}.
+"""
+
+from __future__ import division, absolute_import
+
+__metaclass__ = type
+
+from socket import SOCK_DGRAM
+
+from zope.interface import implementer
+from zope.interface.verify import verifyObject
+
+from twisted.python import context
+from twisted.python.log import ILogContext, err
+from twisted.internet.test.reactormixins import ReactorBuilder
+from twisted.internet.defer import Deferred, maybeDeferred
+from twisted.internet.interfaces import (
+    ILoggingContext, IListeningPort, IReactorUDP)
+from twisted.internet.address import IPv4Address
+from twisted.internet.protocol import DatagramProtocol
+
+from twisted.internet.test.connectionmixins import (LogObserverMixin,
+                                                    findFreePort)
+
+
+class UDPPortMixin(object):
+    def getListeningPort(self, reactor, protocol):
+        """
+        Get a UDP port from a reactor.
+        """
+        return reactor.listenUDP(0, protocol)
+
+
+    def getExpectedStartListeningLogMessage(self, port, protocol):
+        """
+        Get the message expected to be logged when a UDP port starts listening.
+        """
+        return "%s starting on %d" % (protocol, port.getHost().port)
+
+
+    def getExpectedConnectionLostLogMessage(self, port):
+        """
+        Get the expected connection lost message for a UDP port.
+        """
+        return "(UDP Port %s Closed)" % (port.getHost().port,)
+
+
+
+class DatagramTransportTestsMixin(LogObserverMixin):
+    """
+    Mixin defining tests which apply to any port/datagram based transport.
+    """
+    def test_startedListeningLogMessage(self):
+        """
+        When a port starts, a message including a description of the associated
+        protocol is logged.
+        """
+        loggedMessages = self.observe()
+        reactor = self.buildReactor()
+
+        @implementer(ILoggingContext)
+        class SomeProtocol(DatagramProtocol):
+            def logPrefix(self):
+                return "Crazy Protocol"
+        protocol = SomeProtocol()
+
+        p = self.getListeningPort(reactor, protocol)
+        expectedMessage = self.getExpectedStartListeningLogMessage(
+            p, "Crazy Protocol")
+        self.assertEqual((expectedMessage,), loggedMessages[0]['message'])
+
+
+    def test_connectionLostLogMessage(self):
+        """
+        When a connection is lost, an informative message should be logged (see
+        L{getExpectedConnectionLostLogMessage}): an address identifying the port
+        and the fact that it was closed.
+        """
+        loggedMessages = self.observe()
+        reactor = self.buildReactor()
+        p = self.getListeningPort(reactor, DatagramProtocol())
+        expectedMessage = self.getExpectedConnectionLostLogMessage(p)
+
+        def stopReactor(ignored):
+            reactor.stop()
+
+        def doStopListening():
+            del loggedMessages[:]
+            maybeDeferred(p.stopListening).addCallback(stopReactor)
+
+        reactor.callWhenRunning(doStopListening)
+        self.runReactor(reactor)
+
+        self.assertEqual((expectedMessage,), loggedMessages[0]['message'])
+
+
+    def test_stopProtocolScheduling(self):
+        """
+        L{DatagramProtocol.stopProtocol} is called asynchronously (ie, not
+        re-entrantly) when C{stopListening} is used to stop the the datagram
+        transport.
+        """
+        class DisconnectingProtocol(DatagramProtocol):
+
+            started = False
+            stopped = False
+            inStartProtocol = False
+            stoppedInStart = False
+
+            def startProtocol(self):
+                self.started = True
+                self.inStartProtocol = True
+                self.transport.stopListening()
+                self.inStartProtocol = False
+
+            def stopProtocol(self):
+                self.stopped = True
+                self.stoppedInStart = self.inStartProtocol
+                reactor.stop()
+
+        reactor = self.buildReactor()
+        protocol = DisconnectingProtocol()
+        self.getListeningPort(reactor, protocol)
+        self.runReactor(reactor)
+
+        self.assertTrue(protocol.started)
+        self.assertTrue(protocol.stopped)
+        self.assertFalse(protocol.stoppedInStart)
+
+
+
+class UDPServerTestsBuilder(ReactorBuilder, UDPPortMixin,
+                            DatagramTransportTestsMixin):
+    """
+    Builder defining tests relating to L{IReactorUDP.listenUDP}.
+    """
+    requiredInterfaces = (IReactorUDP,)
+
+    def test_interface(self):
+        """
+        L{IReactorUDP.listenUDP} returns an object providing L{IListeningPort}.
+        """
+        reactor = self.buildReactor()
+        port = reactor.listenUDP(0, DatagramProtocol())
+        self.assertTrue(verifyObject(IListeningPort, port))
+
+
+    def test_getHost(self):
+        """
+        L{IListeningPort.getHost} returns an L{IPv4Address} giving a
+        dotted-quad of the IPv4 address the port is listening on as well as
+        the port number.
+        """
+        host, portNumber = findFreePort(type=SOCK_DGRAM)
+        reactor = self.buildReactor()
+        port = reactor.listenUDP(
+            portNumber, DatagramProtocol(), interface=host)
+        self.assertEqual(
+            port.getHost(), IPv4Address('UDP', host, portNumber))
+
+
+    def test_logPrefix(self):
+        """
+        Datagram transports implement L{ILoggingContext.logPrefix} to return a
+        message reflecting the protocol they are running.
+        """
+        class CustomLogPrefixDatagramProtocol(DatagramProtocol):
+            def __init__(self, prefix):
+                self._prefix = prefix
+                self.system = Deferred()
+
+            def logPrefix(self):
+                return self._prefix
+
+            def datagramReceived(self, bytes, addr):
+                if self.system is not None:
+                    system = self.system
+                    self.system = None
+                    system.callback(context.get(ILogContext)["system"])
+
+        reactor = self.buildReactor()
+        protocol = CustomLogPrefixDatagramProtocol("Custom Datagrams")
+        d = protocol.system
+        port = reactor.listenUDP(0, protocol)
+        address = port.getHost()
+
+        def gotSystem(system):
+            self.assertEqual("Custom Datagrams (UDP)", system)
+        d.addCallback(gotSystem)
+        d.addErrback(err)
+        d.addCallback(lambda ignored: reactor.stop())
+
+        port.write(b"some bytes", ('127.0.0.1', address.port))
+        self.runReactor(reactor)
+
+
+    def test_str(self):
+        """
+        C{str()} on the listening port object includes the port number.
+        """
+        reactor = self.buildReactor()
+        port = reactor.listenUDP(0, DatagramProtocol())
+        self.assertIn(str(port.getHost().port), str(port))
+
+
+    def test_repr(self):
+        """
+        C{repr()} on the listening port object includes the port number.
+        """
+        reactor = self.buildReactor()
+        port = reactor.listenUDP(0, DatagramProtocol())
+        self.assertIn(repr(port.getHost().port), str(port))
+
+globals().update(UDPServerTestsBuilder.makeTestCaseClasses())
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_udp_internals.py b/ThirdParty/Twisted/twisted/internet/test/test_udp_internals.py
new file mode 100644
index 0000000..7a17a6f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_udp_internals.py
@@ -0,0 +1,167 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for the internal implementation details of L{twisted.internet.udp}.
+"""
+
+from __future__ import division, absolute_import
+
+import socket
+
+from twisted.trial import unittest
+from twisted.internet.protocol import DatagramProtocol
+from twisted.internet import udp
+from twisted.python.runtime import platformType
+
+if platformType == 'win32':
+    from errno import WSAEWOULDBLOCK as EWOULDBLOCK
+    from errno import WSAECONNREFUSED as ECONNREFUSED
+else:
+    from errno import EWOULDBLOCK
+    from errno import ECONNREFUSED
+
+
+
+class StringUDPSocket(object):
+    """
+    A fake UDP socket object, which returns a fixed sequence of strings and/or
+    socket errors.  Useful for testing.
+
+    @ivar retvals: A C{list} containing either strings or C{socket.error}s.
+
+    @ivar connectedAddr: The address the socket is connected to.
+    """
+
+    def __init__(self, retvals):
+        self.retvals = retvals
+        self.connectedAddr = None
+
+
+    def connect(self, addr):
+        self.connectedAddr = addr
+
+
+    def recvfrom(self, size):
+        """
+        Return (or raise) the next value from C{self.retvals}.
+        """
+        ret = self.retvals.pop(0)
+        if isinstance(ret, socket.error):
+            raise ret
+        return ret, None
+
+
+
+class KeepReads(DatagramProtocol):
+    """
+    Accumulate reads in a list.
+    """
+
+    def __init__(self):
+        self.reads = []
+
+
+    def datagramReceived(self, data, addr):
+        self.reads.append(data)
+
+
+
+class ErrorsTestCase(unittest.SynchronousTestCase):
+    """
+    Error handling tests for C{udp.Port}.
+    """
+
+    def test_socketReadNormal(self):
+        """
+        Socket reads with some good data followed by a socket error which can
+        be ignored causes reading to stop, and no log messages to be logged.
+        """
+        # Add a fake error to the list of ignorables:
+        udp._sockErrReadIgnore.append(-7000)
+        self.addCleanup(udp._sockErrReadIgnore.remove, -7000)
+
+        protocol = KeepReads()
+        port = udp.Port(None, protocol)
+
+        # Normal result, no errors
+        port.socket = StringUDPSocket(
+            [b"result", b"123", socket.error(-7000), b"456",
+             socket.error(-7000)])
+        port.doRead()
+        # Read stops on error:
+        self.assertEqual(protocol.reads, [b"result", b"123"])
+        port.doRead()
+        self.assertEqual(protocol.reads, [b"result", b"123", b"456"])
+
+
+    def test_readImmediateError(self):
+        """
+        If the socket is unconnected, socket reads with an immediate
+        connection refusal are ignored, and reading stops. The protocol's
+        C{connectionRefused} method is not called.
+        """
+        # Add a fake error to the list of those that count as connection
+        # refused:
+        udp._sockErrReadRefuse.append(-6000)
+        self.addCleanup(udp._sockErrReadRefuse.remove, -6000)
+
+        protocol = KeepReads()
+        # Fail if connectionRefused is called:
+        protocol.connectionRefused = lambda: 1/0
+
+        port = udp.Port(None, protocol)
+
+        # Try an immediate "connection refused"
+        port.socket = StringUDPSocket([b"a", socket.error(-6000), b"b",
+                                       socket.error(EWOULDBLOCK)])
+        port.doRead()
+        # Read stops on error:
+        self.assertEqual(protocol.reads, [b"a"])
+        # Read again:
+        port.doRead()
+        self.assertEqual(protocol.reads, [b"a", b"b"])
+
+
+    def test_connectedReadImmediateError(self):
+        """
+        If the socket connected, socket reads with an immediate
+        connection refusal are ignored, and reading stops. The protocol's
+        C{connectionRefused} method is called.
+        """
+        # Add a fake error to the list of those that count as connection
+        # refused:
+        udp._sockErrReadRefuse.append(-6000)
+        self.addCleanup(udp._sockErrReadRefuse.remove, -6000)
+
+        protocol = KeepReads()
+        refused = []
+        protocol.connectionRefused = lambda: refused.append(True)
+
+        port = udp.Port(None, protocol)
+        port.socket = StringUDPSocket([b"a", socket.error(-6000), b"b",
+                                       socket.error(EWOULDBLOCK)])
+        port.connect("127.0.0.1", 9999)
+
+        # Read stops on error:
+        port.doRead()
+        self.assertEqual(protocol.reads, [b"a"])
+        self.assertEqual(refused, [True])
+
+        # Read again:
+        port.doRead()
+        self.assertEqual(protocol.reads, [b"a", b"b"])
+        self.assertEqual(refused, [True])
+
+
+    def test_readUnknownError(self):
+        """
+        Socket reads with an unknown socket error are raised.
+        """
+        protocol = KeepReads()
+        port = udp.Port(None, protocol)
+
+        # Some good data, followed by an unknown error
+        port.socket = StringUDPSocket([b"good", socket.error(-1337)])
+        self.assertRaises(socket.error, port.doRead)
+        self.assertEqual(protocol.reads, [b"good"])
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_unix.py b/ThirdParty/Twisted/twisted/internet/test/test_unix.py
new file mode 100644
index 0000000..fd76ecf
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_unix.py
@@ -0,0 +1,559 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for implementations of L{IReactorUNIX}.
+"""
+
+from stat import S_IMODE
+from os import stat, close
+from tempfile import mktemp
+from socket import AF_INET, SOCK_STREAM, socket
+from pprint import pformat
+
+try:
+    from socket import AF_UNIX
+except ImportError:
+    AF_UNIX = None
+
+from zope.interface import implements
+from zope.interface.verify import verifyObject
+
+from twisted.python.log import addObserver, removeObserver, err
+from twisted.python.failure import Failure
+from twisted.python.hashlib import md5
+from twisted.python.runtime import platform
+from twisted.internet.interfaces import (
+    IConnector, IFileDescriptorReceiver, IReactorUNIX)
+from twisted.internet.error import ConnectionClosed, FileDescriptorOverrun
+from twisted.internet.address import UNIXAddress
+from twisted.internet.endpoints import UNIXServerEndpoint, UNIXClientEndpoint
+from twisted.internet.defer import Deferred, fail
+from twisted.internet.task import LoopingCall
+from twisted.internet import interfaces
+from twisted.internet.protocol import (
+    ServerFactory, ClientFactory, DatagramProtocol)
+from twisted.internet.test.reactormixins import ReactorBuilder
+from twisted.internet.test.test_core import ObjectModelIntegrationMixin
+from twisted.internet.test.test_tcp import StreamTransportTestsMixin
+from twisted.internet.test.connectionmixins import (
+    EndpointCreator, ConnectableProtocol, runProtocolsWithReactor,
+    ConnectionTestsMixin)
+
+try:
+    from twisted.python import sendmsg
+except ImportError:
+    sendmsgSkip = (
+        "sendmsg extension unavailable, extended UNIX features disabled")
+else:
+    sendmsgSkip = None
+
+
+class UNIXFamilyMixin:
+    """
+    Test-helper defining mixin for things related to AF_UNIX sockets.
+    """
+    def _modeTest(self, methodName, path, factory):
+        """
+        Assert that the mode of the created unix socket is set to the mode
+        specified to the reactor method.
+        """
+        mode = 0600
+        reactor = self.buildReactor()
+        unixPort = getattr(reactor, methodName)(path, factory, mode=mode)
+        unixPort.stopListening()
+        self.assertEqual(S_IMODE(stat(path).st_mode), mode)
+
+
+def _abstractPath(case):
+    """
+    Return a new, unique abstract namespace path to be listened on.
+    """
+    # Use the test cases's mktemp to get something unique, but also squash it
+    # down to make sure it fits in the unix socket path limit (something around
+    # 110 bytes).
+    return md5(case.mktemp()).hexdigest()
+
+
+
+class UNIXCreator(EndpointCreator):
+    """
+    Create UNIX socket end points.
+    """
+    requiredInterfaces = (interfaces.IReactorUNIX,)
+
+    def server(self, reactor):
+        """
+        Construct a UNIX server endpoint.
+        """
+        # self.mktemp() often returns a path which is too long to be used.
+        path = mktemp(suffix='.sock', dir='.')
+        return UNIXServerEndpoint(reactor, path)
+
+
+    def client(self, reactor, serverAddress):
+        """
+        Construct a UNIX client endpoint.
+        """
+        return UNIXClientEndpoint(reactor, serverAddress.name)
+
+
+
+class SendFileDescriptor(ConnectableProtocol):
+    """
+    L{SendFileDescriptorAndBytes} sends a file descriptor and optionally some
+    normal bytes and then closes its connection.
+
+    @ivar reason: The reason the connection was lost, after C{connectionLost}
+        is called.
+    """
+    reason = None
+
+    def __init__(self, fd, data):
+        """
+        @param fd: A C{int} giving a file descriptor to send over the
+            connection.
+
+        @param data: A C{str} giving data to send over the connection, or
+            C{None} if no data is to be sent.
+        """
+        self.fd = fd
+        self.data = data
+
+
+    def connectionMade(self):
+        """
+        Send C{self.fd} and, if it is not C{None}, C{self.data}.  Then close the
+        connection.
+        """
+        self.transport.sendFileDescriptor(self.fd)
+        if self.data:
+            self.transport.write(self.data)
+        self.transport.loseConnection()
+
+
+    def connectionLost(self, reason):
+        ConnectableProtocol.connectionLost(self, reason)
+        self.reason = reason
+
+
+
+class ReceiveFileDescriptor(ConnectableProtocol):
+    """
+    L{ReceiveFileDescriptor} provides an API for waiting for file descriptors to
+    be received.
+
+    @ivar reason: The reason the connection was lost, after C{connectionLost}
+        is called.
+
+    @ivar waiting: A L{Deferred} which fires with a file descriptor once one is
+        received, or with a failure if the connection is lost with no descriptor
+        arriving.
+    """
+    implements(IFileDescriptorReceiver)
+
+    reason = None
+    waiting = None
+
+    def waitForDescriptor(self):
+        """
+        Return a L{Deferred} which will fire with the next file descriptor
+        received, or with a failure if the connection is or has already been
+        lost.
+        """
+        if self.reason is None:
+            self.waiting = Deferred()
+            return self.waiting
+        else:
+            return fail(self.reason)
+
+
+    def fileDescriptorReceived(self, descriptor):
+        """
+        Fire the waiting Deferred, initialized by C{waitForDescriptor}, with the
+        file descriptor just received.
+        """
+        self.waiting.callback(descriptor)
+        self.waiting = None
+
+
+    def dataReceived(self, data):
+        """
+        Fail the waiting Deferred, if it has not already been fired by
+        C{fileDescriptorReceived}.  The bytes sent along with a file descriptor
+        are guaranteed to be delivered to the protocol's C{dataReceived} method
+        only after the file descriptor has been delivered to the protocol's
+        C{fileDescriptorReceived}.
+        """
+        if self.waiting is not None:
+            self.waiting.errback(Failure(Exception(
+                        "Received bytes (%r) before descriptor." % (data,))))
+            self.waiting = None
+
+
+    def connectionLost(self, reason):
+        """
+        Fail the waiting Deferred, initialized by C{waitForDescriptor}, if there
+        is one.
+        """
+        ConnectableProtocol.connectionLost(self, reason)
+        if self.waiting is not None:
+            self.waiting.errback(reason)
+            self.waiting = None
+        self.reason = reason
+
+
+
+class UNIXTestsBuilder(UNIXFamilyMixin, ReactorBuilder, ConnectionTestsMixin):
+    """
+    Builder defining tests relating to L{IReactorUNIX}.
+    """
+    requiredInterfaces = (IReactorUNIX,)
+
+    endpoints = UNIXCreator()
+
+    def test_interface(self):
+        """
+        L{IReactorUNIX.connectUNIX} returns an object providing L{IConnector}.
+        """
+        reactor = self.buildReactor()
+        connector = reactor.connectUNIX(self.mktemp(), ClientFactory())
+        self.assertTrue(verifyObject(IConnector, connector))
+
+
+    def test_mode(self):
+        """
+        The UNIX socket created by L{IReactorUNIX.listenUNIX} is created with
+        the mode specified.
+        """
+        self._modeTest('listenUNIX', self.mktemp(), ServerFactory())
+
+
+    def test_listenOnLinuxAbstractNamespace(self):
+        """
+        On Linux, a UNIX socket path may begin with C{'\0'} to indicate a socket
+        in the abstract namespace.  L{IReactorUNIX.listenUNIX} accepts such a
+        path.
+        """
+        # Don't listen on a path longer than the maximum allowed.
+        path = _abstractPath(self)
+        reactor = self.buildReactor()
+        port = reactor.listenUNIX('\0' + path, ServerFactory())
+        self.assertEqual(port.getHost(), UNIXAddress('\0' + path))
+    if not platform.isLinux():
+        test_listenOnLinuxAbstractNamespace.skip = (
+            'Abstract namespace UNIX sockets only supported on Linux.')
+
+
+    def test_connectToLinuxAbstractNamespace(self):
+        """
+        L{IReactorUNIX.connectUNIX} also accepts a Linux abstract namespace
+        path.
+        """
+        path = _abstractPath(self)
+        reactor = self.buildReactor()
+        connector = reactor.connectUNIX('\0' + path, ClientFactory())
+        self.assertEqual(
+            connector.getDestination(), UNIXAddress('\0' + path))
+    if not platform.isLinux():
+        test_connectToLinuxAbstractNamespace.skip = (
+            'Abstract namespace UNIX sockets only supported on Linux.')
+
+
+    def test_addresses(self):
+        """
+        A client's transport's C{getHost} and C{getPeer} return L{UNIXAddress}
+        instances which have the filesystem path of the host and peer ends of
+        the connection.
+        """
+        class SaveAddress(ConnectableProtocol):
+            def makeConnection(self, transport):
+                self.addresses = dict(
+                    host=transport.getHost(), peer=transport.getPeer())
+                transport.loseConnection()
+
+        server = SaveAddress()
+        client = SaveAddress()
+
+        runProtocolsWithReactor(self, server, client, self.endpoints)
+
+        self.assertEqual(server.addresses['host'], client.addresses['peer'])
+        self.assertEqual(server.addresses['peer'], client.addresses['host'])
+
+
+    def test_sendFileDescriptor(self):
+        """
+        L{IUNIXTransport.sendFileDescriptor} accepts an integer file descriptor
+        and sends a copy of it to the process reading from the connection.
+        """
+        from socket import fromfd
+
+        s = socket()
+        s.bind(('', 0))
+        server = SendFileDescriptor(s.fileno(), "junk")
+
+        client = ReceiveFileDescriptor()
+        d = client.waitForDescriptor()
+        def checkDescriptor(descriptor):
+            received = fromfd(descriptor, AF_INET, SOCK_STREAM)
+            # Thanks for the free dup, fromfd()
+            close(descriptor)
+
+            # If the sockets have the same local address, they're probably the
+            # same.
+            self.assertEqual(s.getsockname(), received.getsockname())
+
+            # But it would be cheating for them to be identified by the same
+            # file descriptor.  The point was to get a copy, as we might get if
+            # there were two processes involved here.
+            self.assertNotEqual(s.fileno(), received.fileno())
+        d.addCallback(checkDescriptor)
+        d.addErrback(err, "Sending file descriptor encountered a problem")
+        d.addBoth(lambda ignored: server.transport.loseConnection())
+
+        runProtocolsWithReactor(self, server, client, self.endpoints)
+    if sendmsgSkip is not None:
+        test_sendFileDescriptor.skip = sendmsgSkip
+
+
+    def test_sendFileDescriptorTriggersPauseProducing(self):
+        """
+        If a L{IUNIXTransport.sendFileDescriptor} call fills up the send buffer,
+        any registered producer is paused.
+        """
+        class DoesNotRead(ConnectableProtocol):
+            def connectionMade(self):
+                self.transport.pauseProducing()
+
+        class SendsManyFileDescriptors(ConnectableProtocol):
+            paused = False
+
+            def connectionMade(self):
+                self.socket = socket()
+                self.transport.registerProducer(self, True)
+                def sender():
+                    self.transport.sendFileDescriptor(self.socket.fileno())
+                    self.transport.write("x")
+                self.task = LoopingCall(sender)
+                self.task.clock = self.transport.reactor
+                self.task.start(0).addErrback(err, "Send loop failure")
+
+            def stopProducing(self):
+                self._disconnect()
+
+            def resumeProducing(self):
+                self._disconnect()
+
+            def pauseProducing(self):
+                self.paused = True
+                self.transport.unregisterProducer()
+                self._disconnect()
+
+            def _disconnect(self):
+                self.task.stop()
+                self.transport.abortConnection()
+                self.other.transport.abortConnection()
+
+        server = SendsManyFileDescriptors()
+        client = DoesNotRead()
+        server.other = client
+        runProtocolsWithReactor(self, server, client, self.endpoints)
+
+        self.assertTrue(
+            server.paused, "sendFileDescriptor producer was not paused")
+    if sendmsgSkip is not None:
+        test_sendFileDescriptorTriggersPauseProducing.skip = sendmsgSkip
+
+
+    def test_fileDescriptorOverrun(self):
+        """
+        If L{IUNIXTransport.sendFileDescriptor} is used to queue a greater
+        number of file descriptors than the number of bytes sent using
+        L{ITransport.write}, the connection is closed and the protocol connected
+        to the transport has its C{connectionLost} method called with a failure
+        wrapping L{FileDescriptorOverrun}.
+        """
+        cargo = socket()
+        server = SendFileDescriptor(cargo.fileno(), None)
+
+        client = ReceiveFileDescriptor()
+        result = []
+        d = client.waitForDescriptor()
+        d.addBoth(result.append)
+        d.addBoth(lambda ignored: server.transport.loseConnection())
+
+        runProtocolsWithReactor(self, server, client, self.endpoints)
+
+        self.assertIsInstance(result[0], Failure)
+        result[0].trap(ConnectionClosed)
+        self.assertIsInstance(server.reason.value, FileDescriptorOverrun)
+    if sendmsgSkip is not None:
+        test_fileDescriptorOverrun.skip = sendmsgSkip
+
+
+    def test_avoidLeakingFileDescriptors(self):
+        """
+        If associated with a protocol which does not provide
+        L{IFileDescriptorReceiver}, file descriptors received by the
+        L{IUNIXTransport} implementation are closed and a warning is emitted.
+        """
+        # To verify this, establish a connection.  Send one end of the
+        # connection over the IUNIXTransport implementation.  After the copy
+        # should no longer exist, close the original.  If the opposite end of
+        # the connection decides the connection is closed, the copy does not
+        # exist.
+        from socket import socketpair
+        probeClient, probeServer = socketpair()
+
+        events = []
+        addObserver(events.append)
+        self.addCleanup(removeObserver, events.append)
+
+        class RecordEndpointAddresses(SendFileDescriptor):
+            def connectionMade(self):
+                self.hostAddress = self.transport.getHost()
+                self.peerAddress = self.transport.getPeer()
+                SendFileDescriptor.connectionMade(self)
+
+        server = RecordEndpointAddresses(probeClient.fileno(), "junk")
+        client = ConnectableProtocol()
+
+        runProtocolsWithReactor(self, server, client, self.endpoints)
+
+        # Get rid of the original reference to the socket.
+        probeClient.close()
+
+        # A non-blocking recv will return "" if the connection is closed, as
+        # desired.  If the connection has not been closed, because the duplicate
+        # file descriptor is still open, it will fail with EAGAIN instead.
+        probeServer.setblocking(False)
+        self.assertEqual("", probeServer.recv(1024))
+
+        # This is a surprising circumstance, so it should be logged.
+        format = (
+            "%(protocolName)s (on %(hostAddress)r) does not "
+            "provide IFileDescriptorReceiver; closing file "
+            "descriptor received (from %(peerAddress)r).")
+        clsName = "ConnectableProtocol"
+
+        # Reverse host and peer, since the log event is from the client
+        # perspective.
+        expectedEvent = dict(hostAddress=server.peerAddress,
+                             peerAddress=server.hostAddress,
+                             protocolName=clsName,
+                             format=format)
+
+        for logEvent in events:
+            for k, v in expectedEvent.iteritems():
+                if v != logEvent.get(k):
+                    break
+            else:
+                # No mismatches were found, stop looking at events
+                break
+        else:
+            # No fully matching events were found, fail the test.
+            self.fail(
+                "Expected event (%s) not found in logged events (%s)" % (
+                    expectedEvent, pformat(events,)))
+    if sendmsgSkip is not None:
+        test_avoidLeakingFileDescriptors.skip = sendmsgSkip
+
+
+    def test_descriptorDeliveredBeforeBytes(self):
+        """
+        L{IUNIXTransport.sendFileDescriptor} sends file descriptors before
+        L{ITransport.write} sends normal bytes.
+        """
+        class RecordEvents(ConnectableProtocol):
+            implements(IFileDescriptorReceiver)
+
+            def connectionMade(self):
+                ConnectableProtocol.connectionMade(self)
+                self.events = []
+
+            def fileDescriptorReceived(innerSelf, descriptor):
+                self.addCleanup(close, descriptor)
+                innerSelf.events.append(type(descriptor))
+
+            def dataReceived(self, data):
+                self.events.extend(data)
+
+        cargo = socket()
+        server = SendFileDescriptor(cargo.fileno(), "junk")
+        client = RecordEvents()
+
+        runProtocolsWithReactor(self, server, client, self.endpoints)
+
+        self.assertEqual([int, "j", "u", "n", "k"], client.events)
+    if sendmsgSkip is not None:
+        test_descriptorDeliveredBeforeBytes.skip = sendmsgSkip
+
+
+
+class UNIXDatagramTestsBuilder(UNIXFamilyMixin, ReactorBuilder):
+    """
+    Builder defining tests relating to L{IReactorUNIXDatagram}.
+    """
+    requiredInterfaces = (interfaces.IReactorUNIXDatagram,)
+
+    # There's no corresponding test_connectMode because the mode parameter to
+    # connectUNIXDatagram has been completely ignored since that API was first
+    # introduced.
+    def test_listenMode(self):
+        """
+        The UNIX socket created by L{IReactorUNIXDatagram.listenUNIXDatagram}
+        is created with the mode specified.
+        """
+        self._modeTest('listenUNIXDatagram', self.mktemp(), DatagramProtocol())
+
+
+    def test_listenOnLinuxAbstractNamespace(self):
+        """
+        On Linux, a UNIX socket path may begin with C{'\0'} to indicate a socket
+        in the abstract namespace.  L{IReactorUNIX.listenUNIXDatagram} accepts
+        such a path.
+        """
+        path = _abstractPath(self)
+        reactor = self.buildReactor()
+        port = reactor.listenUNIXDatagram('\0' + path, DatagramProtocol())
+        self.assertEqual(port.getHost(), UNIXAddress('\0' + path))
+    if not platform.isLinux():
+        test_listenOnLinuxAbstractNamespace.skip = (
+            'Abstract namespace UNIX sockets only supported on Linux.')
+
+
+
+class UNIXPortTestsBuilder(ReactorBuilder, ObjectModelIntegrationMixin,
+                           StreamTransportTestsMixin):
+    """
+    Tests for L{IReactorUNIX.listenUnix}
+    """
+    requiredInterfaces = (interfaces.IReactorUNIX,)
+
+    def getListeningPort(self, reactor, factory):
+        """
+        Get a UNIX port from a reactor
+        """
+        # self.mktemp() often returns a path which is too long to be used.
+        path = mktemp(suffix='.sock', dir='.')
+        return reactor.listenUNIX(path, factory)
+
+
+    def getExpectedStartListeningLogMessage(self, port, factory):
+        """
+        Get the message expected to be logged when a UNIX port starts listening.
+        """
+        return "%s starting on %r" % (factory, port.getHost().name)
+
+
+    def getExpectedConnectionLostLogMsg(self, port):
+        """
+        Get the expected connection lost message for a UNIX port
+        """
+        return "(UNIX Port %s Closed)" % (repr(port.port),)
+
+
+
+globals().update(UNIXTestsBuilder.makeTestCaseClasses())
+globals().update(UNIXDatagramTestsBuilder.makeTestCaseClasses())
+globals().update(UNIXPortTestsBuilder.makeTestCaseClasses())
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_utilspy3.py b/ThirdParty/Twisted/twisted/internet/test/test_utilspy3.py
new file mode 100644
index 0000000..ed67f10
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_utilspy3.py
@@ -0,0 +1,92 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet._utilspy3}.
+"""
+
+from __future__ import division, absolute_import
+
+import warnings
+
+from twisted.trial import unittest
+from twisted.internet import _utilspy3 as utils
+from twisted.internet.defer import Deferred
+from twisted.python.test.test_utilpy3 import SuppressedWarningsTests
+
+class SuppressWarningsTests(unittest.SynchronousTestCase):
+    """
+    Tests for L{utils.suppressWarnings}.
+    """
+    def test_suppressWarnings(self):
+        """
+        L{utils.suppressWarnings} decorates a function so that the given
+        warnings are suppressed.
+        """
+        result = []
+        def showwarning(self, *a, **kw):
+            result.append((a, kw))
+        self.patch(warnings, "showwarning", showwarning)
+
+        def f(msg):
+            warnings.warn(msg)
+        g = utils.suppressWarnings(f, (('ignore',), dict(message="This is message")))
+
+        # Start off with a sanity check - calling the original function
+        # should emit the warning.
+        f("Sanity check message")
+        self.assertEqual(len(result), 1)
+
+        # Now that that's out of the way, call the wrapped function, and
+        # make sure no new warnings show up.
+        g("This is message")
+        self.assertEqual(len(result), 1)
+
+        # Finally, emit another warning which should not be ignored, and
+        # make sure it is not.
+        g("Unignored message")
+        self.assertEqual(len(result), 2)
+
+
+
+class DeferredSuppressedWarningsTests(SuppressedWarningsTests):
+    """
+    Tests for L{utils.runWithWarningsSuppressed}, the version that supports
+    Deferreds.
+    """
+    # Override the non-Deferred-supporting function from the base class with
+    # the function we are testing in this class:
+    runWithWarningsSuppressed = staticmethod(utils.runWithWarningsSuppressed)
+
+    def test_deferredCallback(self):
+        """
+        If the function called by L{utils.runWithWarningsSuppressed} returns a
+        C{Deferred}, the warning filters aren't removed until the Deferred
+        fires.
+        """
+        filters = [(("ignore", ".*foo.*"), {}),
+                   (("ignore", ".*bar.*"), {})]
+        result = Deferred()
+        self.runWithWarningsSuppressed(filters, lambda: result)
+        warnings.warn("ignore foo")
+        result.callback(3)
+        warnings.warn("ignore foo 2")
+        self.assertEqual(
+            ["ignore foo 2"], [w['message'] for w in self.flushWarnings()])
+
+    def test_deferredErrback(self):
+        """
+        If the function called by L{utils.runWithWarningsSuppressed} returns a
+        C{Deferred}, the warning filters aren't removed until the Deferred
+        fires with an errback.
+        """
+        filters = [(("ignore", ".*foo.*"), {}),
+                   (("ignore", ".*bar.*"), {})]
+        result = Deferred()
+        d = self.runWithWarningsSuppressed(filters, lambda: result)
+        warnings.warn("ignore foo")
+        result.errback(ZeroDivisionError())
+        d.addErrback(lambda f: f.trap(ZeroDivisionError))
+        warnings.warn("ignore foo 2")
+        self.assertEqual(
+            ["ignore foo 2"], [w['message'] for w in self.flushWarnings()])
diff --git a/ThirdParty/Twisted/twisted/internet/test/test_win32events.py b/ThirdParty/Twisted/twisted/internet/test/test_win32events.py
new file mode 100644
index 0000000..f126c60
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/test/test_win32events.py
@@ -0,0 +1,200 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for implementations of L{IReactorWin32Events}.
+"""
+
+from thread import get_ident
+
+try:
+    import win32event
+except ImportError:
+    win32event = None
+
+from zope.interface.verify import verifyObject
+
+from twisted.python.failure import Failure
+from twisted.python.threadable import isInIOThread
+from twisted.internet.interfaces import IReactorWin32Events
+from twisted.internet.defer import Deferred
+from twisted.internet.test.reactormixins import ReactorBuilder
+
+
+class Listener(object):
+    """
+    L{Listener} is an object that can be added to a L{IReactorWin32Events}
+    reactor to receive callback notification when a Windows event is set.  It
+    records what thread its callback is invoked in and fires a Deferred.
+
+    @ivar success: A flag which is set to C{True} when the event callback is
+        called.
+
+    @ivar logThreadID: The id of the thread in which the C{logPrefix} method is
+        called.
+
+    @ivar eventThreadID: The id of the thread in which the event callback is
+        called.
+
+    @ivar connLostThreadID: The id of the thread in which the C{connectionLost}
+        method is called.
+
+    @ivar _finished: The L{Deferred} which will be fired when the event callback
+        is called.
+    """
+    success = False
+    logThreadID = eventThreadID = connLostThreadID = None
+
+    def __init__(self, finished):
+        self._finished = finished
+
+
+    def logPrefix(self):
+        self.logThreadID = get_ident()
+        return 'Listener'
+
+
+    def occurred(self):
+        self.success = True
+        self.eventThreadID = get_ident()
+        self._finished.callback(None)
+
+
+    def brokenOccurred(self):
+        raise RuntimeError("Some problem")
+
+
+    def returnValueOccurred(self):
+        return EnvironmentError("Entirely different problem")
+
+
+    def connectionLost(self, reason):
+        self.connLostThreadID = get_ident()
+        self._finished.errback(reason)
+
+
+
+class Win32EventsTestsBuilder(ReactorBuilder):
+    """
+    Builder defining tests relating to L{IReactorWin32Events}.
+    """
+    requiredInterfaces = [IReactorWin32Events]
+
+    def test_interface(self):
+        """
+        An instance of the reactor has all of the methods defined on
+        L{IReactorWin32Events}.
+        """
+        reactor = self.buildReactor()
+        verifyObject(IReactorWin32Events, reactor)
+
+
+    def test_addEvent(self):
+        """
+        When an event which has been added to the reactor is set, the action
+        associated with the event is invoked in the reactor thread.
+        """
+        reactorThreadID = get_ident()
+        reactor = self.buildReactor()
+        event = win32event.CreateEvent(None, False, False, None)
+        finished = Deferred()
+        finished.addCallback(lambda ignored: reactor.stop())
+        listener = Listener(finished)
+        reactor.addEvent(event, listener, 'occurred')
+        reactor.callWhenRunning(win32event.SetEvent, event)
+        self.runReactor(reactor)
+        self.assertTrue(listener.success)
+        self.assertEqual(reactorThreadID, listener.logThreadID)
+        self.assertEqual(reactorThreadID, listener.eventThreadID)
+
+
+    def test_ioThreadDoesNotChange(self):
+        """
+        Using L{IReactorWin32Events.addEvent} does not change which thread is
+        reported as the I/O thread.
+        """
+        results = []
+        def check(ignored):
+            results.append(isInIOThread())
+            reactor.stop()
+        reactor = self.buildReactor()
+        event = win32event.CreateEvent(None, False, False, None)
+        finished = Deferred()
+        listener = Listener(finished)
+        finished.addCallback(check)
+        reactor.addEvent(event, listener, 'occurred')
+        reactor.callWhenRunning(win32event.SetEvent, event)
+        self.runReactor(reactor)
+        self.assertTrue(listener.success)
+        self.assertEqual([True], results)
+
+
+    def test_disconnectedOnError(self):
+        """
+        If the event handler raises an exception, the event is removed from the
+        reactor and the handler's C{connectionLost} method is called in the I/O
+        thread and the exception is logged.
+        """
+        reactorThreadID = get_ident()
+        reactor = self.buildReactor()
+        event = win32event.CreateEvent(None, False, False, None)
+
+        result = []
+        finished = Deferred()
+        finished.addBoth(result.append)
+        finished.addBoth(lambda ignored: reactor.stop())
+
+        listener = Listener(finished)
+        reactor.addEvent(event, listener, 'brokenOccurred')
+        reactor.callWhenRunning(win32event.SetEvent, event)
+        self.runReactor(reactor)
+
+        self.assertIsInstance(result[0], Failure)
+        result[0].trap(RuntimeError)
+
+        self.assertEqual(reactorThreadID, listener.connLostThreadID)
+        self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError)))
+
+
+    def test_disconnectOnReturnValue(self):
+        """
+        If the event handler returns a value, the event is removed from the
+        reactor and the handler's C{connectionLost} method is called in the I/O
+        thread.
+        """
+        reactorThreadID = get_ident()
+        reactor = self.buildReactor()
+        event = win32event.CreateEvent(None, False, False, None)
+
+        result = []
+        finished = Deferred()
+        finished.addBoth(result.append)
+        finished.addBoth(lambda ignored: reactor.stop())
+
+        listener = Listener(finished)
+        reactor.addEvent(event, listener, 'returnValueOccurred')
+        reactor.callWhenRunning(win32event.SetEvent, event)
+        self.runReactor(reactor)
+
+        self.assertIsInstance(result[0], Failure)
+        result[0].trap(EnvironmentError)
+
+        self.assertEqual(reactorThreadID, listener.connLostThreadID)
+
+
+    def test_notDisconnectedOnShutdown(self):
+        """
+        Event handlers added with L{IReactorWin32Events.addEvent} do not have
+        C{connectionLost} called on them if they are still active when the
+        reactor shuts down.
+        """
+        reactor = self.buildReactor()
+        event = win32event.CreateEvent(None, False, False, None)
+        finished = Deferred()
+        listener = Listener(finished)
+        reactor.addEvent(event, listener, 'occurred')
+        reactor.callWhenRunning(reactor.stop)
+        self.runReactor(reactor)
+        self.assertIdentical(None, listener.connLostThreadID)
+
+globals().update(Win32EventsTestsBuilder.makeTestCaseClasses())
diff --git a/ThirdParty/Twisted/twisted/internet/threads.py b/ThirdParty/Twisted/twisted/internet/threads.py
new file mode 100644
index 0000000..8852d00
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/threads.py
@@ -0,0 +1,127 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Extended thread dispatching support.
+
+For basic support see reactor threading API docs.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.python.compat import _PY3
+if not _PY3:
+    import Queue
+else:
+    import queue as Queue
+
+from twisted.python import failure
+from twisted.internet import defer
+
+
+def deferToThreadPool(reactor, threadpool, f, *args, **kwargs):
+    """
+    Call the function C{f} using a thread from the given threadpool and return
+    the result as a Deferred.
+
+    This function is only used by client code which is maintaining its own
+    threadpool.  To run a function in the reactor's threadpool, use
+    C{deferToThread}.
+
+    @param reactor: The reactor in whose main thread the Deferred will be
+        invoked.
+
+    @param threadpool: An object which supports the C{callInThreadWithCallback}
+        method of C{twisted.python.threadpool.ThreadPool}.
+
+    @param f: The function to call.
+    @param *args: positional arguments to pass to f.
+    @param **kwargs: keyword arguments to pass to f.
+
+    @return: A Deferred which fires a callback with the result of f, or an
+        errback with a L{twisted.python.failure.Failure} if f throws an
+        exception.
+    """
+    d = defer.Deferred()
+
+    def onResult(success, result):
+        if success:
+            reactor.callFromThread(d.callback, result)
+        else:
+            reactor.callFromThread(d.errback, result)
+
+    threadpool.callInThreadWithCallback(onResult, f, *args, **kwargs)
+
+    return d
+
+
+def deferToThread(f, *args, **kwargs):
+    """
+    Run a function in a thread and return the result as a Deferred.
+
+    @param f: The function to call.
+    @param *args: positional arguments to pass to f.
+    @param **kwargs: keyword arguments to pass to f.
+
+    @return: A Deferred which fires a callback with the result of f,
+    or an errback with a L{twisted.python.failure.Failure} if f throws
+    an exception.
+    """
+    from twisted.internet import reactor
+    return deferToThreadPool(reactor, reactor.getThreadPool(),
+                             f, *args, **kwargs)
+
+
+def _runMultiple(tupleList):
+    """
+    Run a list of functions.
+    """
+    for f, args, kwargs in tupleList:
+        f(*args, **kwargs)
+
+
+def callMultipleInThread(tupleList):
+    """
+    Run a list of functions in the same thread.
+
+    tupleList should be a list of (function, argsList, kwargsDict) tuples.
+    """
+    from twisted.internet import reactor
+    reactor.callInThread(_runMultiple, tupleList)
+
+
+def blockingCallFromThread(reactor, f, *a, **kw):
+    """
+    Run a function in the reactor from a thread, and wait for the result
+    synchronously.  If the function returns a L{Deferred}, wait for its
+    result and return that.
+
+    @param reactor: The L{IReactorThreads} provider which will be used to
+        schedule the function call.
+    @param f: the callable to run in the reactor thread
+    @type f: any callable.
+    @param a: the arguments to pass to C{f}.
+    @param kw: the keyword arguments to pass to C{f}.
+
+    @return: the result of the L{Deferred} returned by C{f}, or the result
+        of C{f} if it returns anything other than a L{Deferred}.
+
+    @raise: If C{f} raises a synchronous exception,
+        C{blockingCallFromThread} will raise that exception.  If C{f}
+        returns a L{Deferred} which fires with a L{Failure},
+        C{blockingCallFromThread} will raise that failure's exception (see
+        L{Failure.raiseException}).
+    """
+    queue = Queue.Queue()
+    def _callFromThread():
+        result = defer.maybeDeferred(f, *a, **kw)
+        result.addBoth(queue.put)
+    reactor.callFromThread(_callFromThread)
+    result = queue.get()
+    if isinstance(result, failure.Failure):
+        result.raiseException()
+    return result
+
+
+__all__ = ["deferToThread", "deferToThreadPool", "callMultipleInThread",
+           "blockingCallFromThread"]
diff --git a/ThirdParty/Twisted/twisted/internet/tksupport.py b/ThirdParty/Twisted/twisted/internet/tksupport.py
new file mode 100644
index 0000000..ddec55e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/tksupport.py
@@ -0,0 +1,75 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+This module integrates Tkinter with twisted.internet's mainloop.
+
+Maintainer: Itamar Shtull-Trauring
+
+To use, do::
+
+    | tksupport.install(rootWidget)
+
+and then run your reactor as usual - do *not* call Tk's mainloop(),
+use Twisted's regular mechanism for running the event loop.
+
+Likewise, to stop your program you will need to stop Twisted's
+event loop. For example, if you want closing your root widget to
+stop Twisted::
+
+    | root.protocol('WM_DELETE_WINDOW', reactor.stop)
+
+When using Aqua Tcl/Tk on Mac OS X the standard Quit menu item in
+your application might become unresponsive without the additional
+fix::
+
+    | root.createcommand("::tk::mac::Quit", reactor.stop)
+
+ at see: U{Tcl/TkAqua FAQ for more info<http://wiki.tcl.tk/12987>}
+"""
+
+# system imports
+import Tkinter, tkSimpleDialog, tkMessageBox
+
+# twisted imports
+from twisted.python import log
+from twisted.internet import task
+
+
+_task = None
+
+def install(widget, ms=10, reactor=None):
+    """Install a Tkinter.Tk() object into the reactor."""
+    installTkFunctions()
+    global _task
+    _task = task.LoopingCall(widget.update)
+    _task.start(ms / 1000.0, False)
+
+def uninstall():
+    """Remove the root Tk widget from the reactor.
+
+    Call this before destroy()ing the root widget.
+    """
+    global _task
+    _task.stop()
+    _task = None
+
+
+def installTkFunctions():
+    import twisted.python.util
+    twisted.python.util.getPassword = getPassword
+
+
+def getPassword(prompt = '', confirm = 0):
+    while 1:
+        try1 = tkSimpleDialog.askstring('Password Dialog', prompt, show='*')
+        if not confirm:
+            return try1
+        try2 = tkSimpleDialog.askstring('Password Dialog', 'Confirm Password', show='*')
+        if try1 == try2:
+            return try1
+        else:
+            tkMessageBox.showerror('Password Mismatch', 'Passwords did not match, starting over')
+
+__all__ = ["install", "uninstall"]
diff --git a/ThirdParty/Twisted/twisted/internet/udp.py b/ThirdParty/Twisted/twisted/internet/udp.py
new file mode 100644
index 0000000..8d3b864
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/udp.py
@@ -0,0 +1,348 @@
+# -*- test-case-name: twisted.test.test_udp -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Various asynchronous UDP classes.
+
+Please do not use this module directly.
+
+ at var _sockErrReadIgnore: list of symbolic error constants (from the C{errno}
+    module) representing socket errors where the error is temporary and can be
+    ignored.
+
+ at var _sockErrReadRefuse: list of symbolic error constants (from the C{errno}
+    module) representing socket errors that indicate connection refused.
+"""
+
+from __future__ import division, absolute_import
+
+# System Imports
+import socket
+import operator
+import struct
+import warnings
+
+from zope.interface import implementer
+
+from twisted.python.runtime import platformType
+if platformType == 'win32':
+    from errno import WSAEWOULDBLOCK
+    from errno import WSAEINTR, WSAEMSGSIZE, WSAETIMEDOUT
+    from errno import WSAECONNREFUSED, WSAECONNRESET, WSAENETRESET
+    from errno import WSAEINPROGRESS
+
+    # Classify read and write errors
+    _sockErrReadIgnore = [WSAEINTR, WSAEWOULDBLOCK, WSAEMSGSIZE, WSAEINPROGRESS]
+    _sockErrReadRefuse = [WSAECONNREFUSED, WSAECONNRESET, WSAENETRESET,
+                          WSAETIMEDOUT]
+
+    # POSIX-compatible write errors
+    EMSGSIZE = WSAEMSGSIZE
+    ECONNREFUSED = WSAECONNREFUSED
+    EAGAIN = WSAEWOULDBLOCK
+    EINTR = WSAEINTR
+else:
+    from errno import EWOULDBLOCK, EINTR, EMSGSIZE, ECONNREFUSED, EAGAIN
+    _sockErrReadIgnore = [EAGAIN, EINTR, EWOULDBLOCK]
+    _sockErrReadRefuse = [ECONNREFUSED]
+
+# Twisted Imports
+from twisted.internet import base, defer, address
+from twisted.python import log, failure
+from twisted.internet import abstract, error, interfaces
+
+
+ at implementer(
+    interfaces.IListeningPort, interfaces.IUDPTransport,
+    interfaces.ISystemHandle)
+class Port(base.BasePort):
+    """
+    UDP port, listening for packets.
+    """
+
+    addressFamily = socket.AF_INET
+    socketType = socket.SOCK_DGRAM
+    maxThroughput = 256 * 1024 # max bytes we read in one eventloop iteration
+
+    # Actual port number being listened on, only set to a non-None
+    # value when we are actually listening.
+    _realPortNumber = None
+
+    def __init__(self, port, proto, interface='', maxPacketSize=8192, reactor=None):
+        """
+        Initialize with a numeric port to listen on.
+        """
+        base.BasePort.__init__(self, reactor)
+        self.port = port
+        self.protocol = proto
+        self.maxPacketSize = maxPacketSize
+        self.interface = interface
+        self.setLogStr()
+        self._connectedAddr = None
+
+    def __repr__(self):
+        if self._realPortNumber is not None:
+            return "<%s on %s>" % (self.protocol.__class__, self._realPortNumber)
+        else:
+            return "<%s not connected>" % (self.protocol.__class__,)
+
+    def getHandle(self):
+        """
+        Return a socket object.
+        """
+        return self.socket
+
+    def startListening(self):
+        """
+        Create and bind my socket, and begin listening on it.
+
+        This is called on unserialization, and must be called after creating a
+        server to begin listening on the specified port.
+        """
+        self._bindSocket()
+        self._connectToProtocol()
+
+    def _bindSocket(self):
+        try:
+            skt = self.createInternetSocket()
+            skt.bind((self.interface, self.port))
+        except socket.error as le:
+            raise error.CannotListenError(self.interface, self.port, le)
+
+        # Make sure that if we listened on port 0, we update that to
+        # reflect what the OS actually assigned us.
+        self._realPortNumber = skt.getsockname()[1]
+
+        log.msg("%s starting on %s" % (
+                self._getLogPrefix(self.protocol), self._realPortNumber))
+
+        self.connected = 1
+        self.socket = skt
+        self.fileno = self.socket.fileno
+
+    def _connectToProtocol(self):
+        self.protocol.makeConnection(self)
+        self.startReading()
+
+
+    def doRead(self):
+        """
+        Called when my socket is ready for reading.
+        """
+        read = 0
+        while read < self.maxThroughput:
+            try:
+                data, addr = self.socket.recvfrom(self.maxPacketSize)
+            except socket.error as se:
+                no = se.args[0]
+                if no in _sockErrReadIgnore:
+                    return
+                if no in _sockErrReadRefuse:
+                    if self._connectedAddr:
+                        self.protocol.connectionRefused()
+                    return
+                raise
+            else:
+                read += len(data)
+                try:
+                    self.protocol.datagramReceived(data, addr)
+                except:
+                    log.err()
+
+
+    def write(self, datagram, addr=None):
+        """
+        Write a datagram.
+
+        @type datagram: C{str}
+        @param datagram: The datagram to be sent.
+
+        @type addr: C{tuple} containing C{str} as first element and C{int} as
+            second element, or C{None}
+        @param addr: A tuple of (I{stringified dotted-quad IP address},
+            I{integer port number}); can be C{None} in connected mode.
+        """
+        if self._connectedAddr:
+            assert addr in (None, self._connectedAddr)
+            try:
+                return self.socket.send(datagram)
+            except socket.error as se:
+                no = se.args[0]
+                if no == EINTR:
+                    return self.write(datagram)
+                elif no == EMSGSIZE:
+                    raise error.MessageLengthError("message too long")
+                elif no == ECONNREFUSED:
+                    self.protocol.connectionRefused()
+                else:
+                    raise
+        else:
+            assert addr != None
+            if not addr[0].replace(".", "").isdigit() and addr[0] != "<broadcast>":
+                warnings.warn("Please only pass IPs to write(), not hostnames",
+                              DeprecationWarning, stacklevel=2)
+            try:
+                return self.socket.sendto(datagram, addr)
+            except socket.error as se:
+                no = se.args[0]
+                if no == EINTR:
+                    return self.write(datagram, addr)
+                elif no == EMSGSIZE:
+                    raise error.MessageLengthError("message too long")
+                elif no == ECONNREFUSED:
+                    # in non-connected UDP ECONNREFUSED is platform dependent, I
+                    # think and the info is not necessarily useful. Nevertheless
+                    # maybe we should call connectionRefused? XXX
+                    return
+                else:
+                    raise
+
+    def writeSequence(self, seq, addr):
+        self.write("".join(seq), addr)
+
+    def connect(self, host, port):
+        """
+        'Connect' to remote server.
+        """
+        if self._connectedAddr:
+            raise RuntimeError("already connected, reconnecting is not currently supported")
+        if not abstract.isIPAddress(host):
+            raise ValueError("please pass only IP addresses, not domain names")
+        self._connectedAddr = (host, port)
+        self.socket.connect((host, port))
+
+    def _loseConnection(self):
+        self.stopReading()
+        if self.connected: # actually means if we are *listening*
+            self.reactor.callLater(0, self.connectionLost)
+
+    def stopListening(self):
+        if self.connected:
+            result = self.d = defer.Deferred()
+        else:
+            result = None
+        self._loseConnection()
+        return result
+
+    def loseConnection(self):
+        warnings.warn("Please use stopListening() to disconnect port", DeprecationWarning, stacklevel=2)
+        self.stopListening()
+
+    def connectionLost(self, reason=None):
+        """
+        Cleans up my socket.
+        """
+        log.msg('(UDP Port %s Closed)' % self._realPortNumber)
+        self._realPortNumber = None
+        base.BasePort.connectionLost(self, reason)
+        self.protocol.doStop()
+        self.socket.close()
+        del self.socket
+        del self.fileno
+        if hasattr(self, "d"):
+            self.d.callback(None)
+            del self.d
+
+
+    def setLogStr(self):
+        """
+        Initialize the C{logstr} attribute to be used by C{logPrefix}.
+        """
+        logPrefix = self._getLogPrefix(self.protocol)
+        self.logstr = "%s (UDP)" % logPrefix
+
+
+    def logPrefix(self):
+        """
+        Return the prefix to log with.
+        """
+        return self.logstr
+
+
+    def getHost(self):
+        """
+        Returns an IPv4Address.
+
+        This indicates the address from which I am connecting.
+        """
+        return address.IPv4Address('UDP', *self.socket.getsockname())
+
+
+
+class MulticastMixin:
+    """
+    Implement multicast functionality.
+    """
+
+    def getOutgoingInterface(self):
+        i = self.socket.getsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_IF)
+        return socket.inet_ntoa(struct.pack("@i", i))
+
+    def setOutgoingInterface(self, addr):
+        """Returns Deferred of success."""
+        return self.reactor.resolve(addr).addCallback(self._setInterface)
+
+    def _setInterface(self, addr):
+        i = socket.inet_aton(addr)
+        self.socket.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_IF, i)
+        return 1
+
+    def getLoopbackMode(self):
+        return self.socket.getsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP)
+
+    def setLoopbackMode(self, mode):
+        mode = struct.pack("b", operator.truth(mode))
+        self.socket.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, mode)
+
+    def getTTL(self):
+        return self.socket.getsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL)
+
+    def setTTL(self, ttl):
+        ttl = struct.pack("B", ttl)
+        self.socket.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_TTL, ttl)
+
+    def joinGroup(self, addr, interface=""):
+        """Join a multicast group. Returns Deferred of success."""
+        return self.reactor.resolve(addr).addCallback(self._joinAddr1, interface, 1)
+
+    def _joinAddr1(self, addr, interface, join):
+        return self.reactor.resolve(interface).addCallback(self._joinAddr2, addr, join)
+
+    def _joinAddr2(self, interface, addr, join):
+        addr = socket.inet_aton(addr)
+        interface = socket.inet_aton(interface)
+        if join:
+            cmd = socket.IP_ADD_MEMBERSHIP
+        else:
+            cmd = socket.IP_DROP_MEMBERSHIP
+        try:
+            self.socket.setsockopt(socket.IPPROTO_IP, cmd, addr + interface)
+        except socket.error as e:
+            return failure.Failure(error.MulticastJoinError(addr, interface, *e.args))
+
+    def leaveGroup(self, addr, interface=""):
+        """Leave multicast group, return Deferred of success."""
+        return self.reactor.resolve(addr).addCallback(self._joinAddr1, interface, 0)
+
+
+ at implementer(interfaces.IMulticastTransport)
+class MulticastPort(MulticastMixin, Port):
+    """
+    UDP Port that supports multicasting.
+    """
+
+    def __init__(self, port, proto, interface='', maxPacketSize=8192, reactor=None, listenMultiple=False):
+        """
+        @see: L{twisted.internet.interfaces.IReactorMulticast.listenMulticast}
+        """
+        Port.__init__(self, port, proto, interface, maxPacketSize, reactor)
+        self.listenMultiple = listenMultiple
+
+    def createInternetSocket(self):
+        skt = Port.createInternetSocket(self)
+        if self.listenMultiple:
+            skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
+            if hasattr(socket, "SO_REUSEPORT"):
+                skt.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
+        return skt
diff --git a/ThirdParty/Twisted/twisted/internet/unix.py b/ThirdParty/Twisted/twisted/internet/unix.py
new file mode 100644
index 0000000..77b87cd
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/unix.py
@@ -0,0 +1,518 @@
+# -*- test-case-name: twisted.test.test_unix,twisted.internet.test.test_unix,twisted.internet.test.test_posixbase -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Various asynchronous TCP/IP classes.
+
+End users shouldn't use this module directly - use the reactor APIs instead.
+
+Maintainer: Itamar Shtull-Trauring
+"""
+
+# System imports
+import os, sys, stat, socket, struct
+from errno import EINTR, EMSGSIZE, EAGAIN, EWOULDBLOCK, ECONNREFUSED, ENOBUFS
+
+from zope.interface import implements, implementsOnly, implementedBy
+
+if not hasattr(socket, 'AF_UNIX'):
+    raise ImportError("UNIX sockets not supported on this platform")
+
+# Twisted imports
+from twisted.internet import main, base, tcp, udp, error, interfaces, protocol, address
+from twisted.internet.error import CannotListenError
+from twisted.python.util import untilConcludes
+from twisted.python import lockfile, log, reflect, failure
+
+try:
+    from twisted.python import sendmsg
+except ImportError:
+    sendmsg = None
+
+
+def _ancillaryDescriptor(fd):
+    """
+    Pack an integer into an ancillary data structure suitable for use with
+    L{sendmsg.send1msg}.
+    """
+    packed = struct.pack("i", fd)
+    return [(socket.SOL_SOCKET, sendmsg.SCM_RIGHTS, packed)]
+
+
+
+class _SendmsgMixin(object):
+    """
+    Mixin for stream-oriented UNIX transports which uses sendmsg and recvmsg to
+    offer additional functionality, such as copying file descriptors into other
+    processes.
+
+    @ivar _writeSomeDataBase: The class which provides the basic implementation
+        of C{writeSomeData}.  Ultimately this should be a subclass of
+        L{twisted.internet.abstract.FileDescriptor}.  Subclasses which mix in
+        L{_SendmsgMixin} must define this.
+
+    @ivar _sendmsgQueue: A C{list} of C{int} holding file descriptors which are
+        currently buffered before being sent.
+
+    @ivar _fileDescriptorBufferSize: An C{int} giving the maximum number of file
+        descriptors to accept and queue for sending before pausing the
+        registered producer, if there is one.
+    """
+    implements(interfaces.IUNIXTransport)
+
+    _writeSomeDataBase = None
+    _fileDescriptorBufferSize = 64
+
+    def __init__(self):
+        self._sendmsgQueue = []
+
+
+    def _isSendBufferFull(self):
+        """
+        Determine whether the user-space send buffer for this transport is full
+        or not.
+
+        This extends the base determination by adding consideration of how many
+        file descriptors need to be sent using L{sendmsg.send1msg}.  When there
+        are more than C{self._fileDescriptorBufferSize}, the buffer is
+        considered full.
+
+        @return: C{True} if it is full, C{False} otherwise.
+        """
+        # There must be some bytes in the normal send buffer, checked by
+        # _writeSomeDataBase._isSendBufferFull, in order to send file
+        # descriptors from _sendmsgQueue.  That means that the buffer will
+        # eventually be considered full even without this additional logic.
+        # However, since we send only one byte per file descriptor, having lots
+        # of elements in _sendmsgQueue incurs more overhead and perhaps slows
+        # things down.  Anyway, try this for now, maybe rethink it later.
+        return (
+            len(self._sendmsgQueue) > self._fileDescriptorBufferSize
+            or self._writeSomeDataBase._isSendBufferFull(self))
+
+
+    def sendFileDescriptor(self, fileno):
+        """
+        Queue the given file descriptor to be sent and start trying to send it.
+        """
+        self._sendmsgQueue.append(fileno)
+        self._maybePauseProducer()
+        self.startWriting()
+
+
+    def writeSomeData(self, data):
+        """
+        Send as much of C{data} as possible.  Also send any pending file
+        descriptors.
+        """
+        # Make it a programming error to send more file descriptors than you
+        # send regular bytes.  Otherwise, due to the limitation mentioned below,
+        # we could end up with file descriptors left, but no bytes to send with
+        # them, therefore no way to send those file descriptors.
+        if len(self._sendmsgQueue) > len(data):
+            return error.FileDescriptorOverrun()
+
+        # If there are file descriptors to send, try sending them first, using a
+        # little bit of data from the stream-oriented write buffer too.  It is
+        # not possible to send a file descriptor without sending some regular
+        # data.
+        index = 0
+        try:
+            while index < len(self._sendmsgQueue):
+                fd = self._sendmsgQueue[index]
+                try:
+                    untilConcludes(
+                        sendmsg.send1msg, self.socket.fileno(), data[index], 0,
+                        _ancillaryDescriptor(fd))
+                except socket.error, se:
+                    if se.args[0] in (EWOULDBLOCK, ENOBUFS):
+                        return index
+                    else:
+                        return main.CONNECTION_LOST
+                else:
+                    index += 1
+        finally:
+            del self._sendmsgQueue[:index]
+
+        # Hand the remaining data to the base implementation.  Avoid slicing in
+        # favor of a buffer, in case that happens to be any faster.
+        limitedData = buffer(data, index)
+        result = self._writeSomeDataBase.writeSomeData(self, limitedData)
+        try:
+            return index + result
+        except TypeError:
+            return result
+
+
+    def doRead(self):
+        """
+        Calls L{IFileDescriptorReceiver.fileDescriptorReceived} and
+        L{IProtocol.dataReceived} with all available data.
+
+        This reads up to C{self.bufferSize} bytes of data from its socket, then
+        dispatches the data to protocol callbacks to be handled.  If the
+        connection is not lost through an error in the underlying recvmsg(),
+        this function will return the result of the dataReceived call.
+        """
+        try:
+            data, flags, ancillary = untilConcludes(
+                sendmsg.recv1msg, self.socket.fileno(), 0, self.bufferSize)
+        except socket.error, se:
+            if se.args[0] == EWOULDBLOCK:
+                return
+            else:
+                return main.CONNECTION_LOST
+
+        if ancillary:
+            fd = struct.unpack('i', ancillary[0][2])[0]
+            if interfaces.IFileDescriptorReceiver.providedBy(self.protocol):
+                self.protocol.fileDescriptorReceived(fd)
+            else:
+                log.msg(
+                    format=(
+                        "%(protocolName)s (on %(hostAddress)r) does not "
+                        "provide IFileDescriptorReceiver; closing file "
+                        "descriptor received (from %(peerAddress)r)."),
+                    hostAddress=self.getHost(), peerAddress=self.getPeer(),
+                    protocolName=self._getLogPrefix(self.protocol),
+                    )
+                os.close(fd)
+
+        return self._dataReceived(data)
+
+if sendmsg is None:
+    class _SendmsgMixin(object):
+        """
+        Behaviorless placeholder used when L{twisted.python.sendmsg} is not
+        available, preventing L{IUNIXTransport} from being supported.
+        """
+
+
+
+class Server(_SendmsgMixin, tcp.Server):
+
+    _writeSomeDataBase = tcp.Server
+
+    def __init__(self, sock, protocol, client, server, sessionno, reactor):
+        _SendmsgMixin.__init__(self)
+        tcp.Server.__init__(self, sock, protocol, (client, None), server, sessionno, reactor)
+
+
+    def getHost(self):
+        return address.UNIXAddress(self.socket.getsockname())
+
+    def getPeer(self):
+        return address.UNIXAddress(self.hostname or None)
+
+
+
+def _inFilesystemNamespace(path):
+    """
+    Determine whether the given unix socket path is in a filesystem namespace.
+
+    While most PF_UNIX sockets are entries in the filesystem, Linux 2.2 and
+    above support PF_UNIX sockets in an "abstract namespace" that does not
+    correspond to any path. This function returns C{True} if the given socket
+    path is stored in the filesystem and C{False} if the path is in this
+    abstract namespace.
+    """
+    return path[:1] != "\0"
+
+
+class _UNIXPort(object):
+    def getHost(self):
+        """Returns a UNIXAddress.
+
+        This indicates the server's address.
+        """
+        if sys.version_info > (2, 5) or _inFilesystemNamespace(self.port):
+            path = self.socket.getsockname()
+        else:
+            # Abstract namespace sockets aren't well supported on Python 2.4.
+            # getsockname() always returns ''.
+            path = self.port
+        return address.UNIXAddress(path)
+
+
+
+class Port(_UNIXPort, tcp.Port):
+    addressFamily = socket.AF_UNIX
+    socketType = socket.SOCK_STREAM
+
+    transport = Server
+    lockFile = None
+
+    def __init__(self, fileName, factory, backlog=50, mode=0666, reactor=None, wantPID = 0):
+        tcp.Port.__init__(self, fileName, factory, backlog, reactor=reactor)
+        self.mode = mode
+        self.wantPID = wantPID
+
+    def __repr__(self):
+        factoryName = reflect.qual(self.factory.__class__)
+        if hasattr(self, 'socket'):
+            return '<%s on %r>' % (factoryName, self.port)
+        else:
+            return '<%s (not listening)>' % (factoryName,)
+
+    def _buildAddr(self, name):
+        return address.UNIXAddress(name)
+
+    def startListening(self):
+        """
+        Create and bind my socket, and begin listening on it.
+
+        This is called on unserialization, and must be called after creating a
+        server to begin listening on the specified port.
+        """
+        log.msg("%s starting on %r" % (
+                self._getLogPrefix(self.factory), self.port))
+        if self.wantPID:
+            self.lockFile = lockfile.FilesystemLock(self.port + ".lock")
+            if not self.lockFile.lock():
+                raise CannotListenError, (None, self.port, "Cannot acquire lock")
+            else:
+                if not self.lockFile.clean:
+                    try:
+                        # This is a best-attempt at cleaning up
+                        # left-over unix sockets on the filesystem.
+                        # If it fails, there's not much else we can
+                        # do.  The bind() below will fail with an
+                        # exception that actually propagates.
+                        if stat.S_ISSOCK(os.stat(self.port).st_mode):
+                            os.remove(self.port)
+                    except:
+                        pass
+
+        self.factory.doStart()
+        try:
+            skt = self.createInternetSocket()
+            skt.bind(self.port)
+        except socket.error, le:
+            raise CannotListenError, (None, self.port, le)
+        else:
+            if _inFilesystemNamespace(self.port):
+                # Make the socket readable and writable to the world.
+                os.chmod(self.port, self.mode)
+            skt.listen(self.backlog)
+            self.connected = True
+            self.socket = skt
+            self.fileno = self.socket.fileno
+            self.numberAccepts = 100
+            self.startReading()
+
+
+    def _logConnectionLostMsg(self):
+        """
+        Log message for closing socket
+        """
+        log.msg('(UNIX Port %s Closed)' % (repr(self.port),))
+
+
+    def connectionLost(self, reason):
+        if _inFilesystemNamespace(self.port):
+            os.unlink(self.port)
+        if self.lockFile is not None:
+            self.lockFile.unlock()
+        tcp.Port.connectionLost(self, reason)
+
+
+
+class Client(_SendmsgMixin, tcp.BaseClient):
+    """A client for Unix sockets."""
+    addressFamily = socket.AF_UNIX
+    socketType = socket.SOCK_STREAM
+
+    _writeSomeDataBase = tcp.BaseClient
+
+    def __init__(self, filename, connector, reactor=None, checkPID = 0):
+        _SendmsgMixin.__init__(self)
+        self.connector = connector
+        self.realAddress = self.addr = filename
+        if checkPID and not lockfile.isLocked(filename + ".lock"):
+            self._finishInit(None, None, error.BadFileError(filename), reactor)
+        self._finishInit(self.doConnect, self.createInternetSocket(),
+                         None, reactor)
+
+    def getPeer(self):
+        return address.UNIXAddress(self.addr)
+
+    def getHost(self):
+        return address.UNIXAddress(None)
+
+
+class Connector(base.BaseConnector):
+    def __init__(self, address, factory, timeout, reactor, checkPID):
+        base.BaseConnector.__init__(self, factory, timeout, reactor)
+        self.address = address
+        self.checkPID = checkPID
+
+    def _makeTransport(self):
+        return Client(self.address, self, self.reactor, self.checkPID)
+
+    def getDestination(self):
+        return address.UNIXAddress(self.address)
+
+
+class DatagramPort(_UNIXPort, udp.Port):
+    """Datagram UNIX port, listening for packets."""
+
+    implements(interfaces.IUNIXDatagramTransport)
+
+    addressFamily = socket.AF_UNIX
+
+    def __init__(self, addr, proto, maxPacketSize=8192, mode=0666, reactor=None):
+        """Initialize with address to listen on.
+        """
+        udp.Port.__init__(self, addr, proto, maxPacketSize=maxPacketSize, reactor=reactor)
+        self.mode = mode
+
+
+    def __repr__(self):
+        protocolName = reflect.qual(self.protocol.__class__,)
+        if hasattr(self, 'socket'):
+            return '<%s on %r>' % (protocolName, self.port)
+        else:
+            return '<%s (not listening)>' % (protocolName,)
+
+
+    def _bindSocket(self):
+        log.msg("%s starting on %s"%(self.protocol.__class__, repr(self.port)))
+        try:
+            skt = self.createInternetSocket() # XXX: haha misnamed method
+            if self.port:
+                skt.bind(self.port)
+        except socket.error, le:
+            raise error.CannotListenError, (None, self.port, le)
+        if self.port and _inFilesystemNamespace(self.port):
+            # Make the socket readable and writable to the world.
+            os.chmod(self.port, self.mode)
+        self.connected = 1
+        self.socket = skt
+        self.fileno = self.socket.fileno
+
+    def write(self, datagram, address):
+        """Write a datagram."""
+        try:
+            return self.socket.sendto(datagram, address)
+        except socket.error, se:
+            no = se.args[0]
+            if no == EINTR:
+                return self.write(datagram, address)
+            elif no == EMSGSIZE:
+                raise error.MessageLengthError, "message too long"
+            elif no == EAGAIN:
+                # oh, well, drop the data. The only difference from UDP
+                # is that UDP won't ever notice.
+                # TODO: add TCP-like buffering
+                pass
+            else:
+                raise
+
+    def connectionLost(self, reason=None):
+        """Cleans up my socket.
+        """
+        log.msg('(Port %s Closed)' % repr(self.port))
+        base.BasePort.connectionLost(self, reason)
+        if hasattr(self, "protocol"):
+            # we won't have attribute in ConnectedPort, in cases
+            # where there was an error in connection process
+            self.protocol.doStop()
+        self.connected = 0
+        self.socket.close()
+        del self.socket
+        del self.fileno
+        if hasattr(self, "d"):
+            self.d.callback(None)
+            del self.d
+
+    def setLogStr(self):
+        self.logstr = reflect.qual(self.protocol.__class__) + " (UDP)"
+
+
+
+class ConnectedDatagramPort(DatagramPort):
+    """
+    A connected datagram UNIX socket.
+    """
+
+    implementsOnly(interfaces.IUNIXDatagramConnectedTransport,
+                   *(implementedBy(base.BasePort)))
+
+    def __init__(self, addr, proto, maxPacketSize=8192, mode=0666,
+                 bindAddress=None, reactor=None):
+        assert isinstance(proto, protocol.ConnectedDatagramProtocol)
+        DatagramPort.__init__(self, bindAddress, proto, maxPacketSize, mode,
+                              reactor)
+        self.remoteaddr = addr
+
+
+    def startListening(self):
+        try:
+            self._bindSocket()
+            self.socket.connect(self.remoteaddr)
+            self._connectToProtocol()
+        except:
+            self.connectionFailed(failure.Failure())
+
+
+    def connectionFailed(self, reason):
+        """
+        Called when a connection fails. Stop listening on the socket.
+
+        @type reason: L{Failure}
+        @param reason: Why the connection failed.
+        """
+        self.stopListening()
+        self.protocol.connectionFailed(reason)
+        del self.protocol
+
+
+    def doRead(self):
+        """
+        Called when my socket is ready for reading.
+        """
+        read = 0
+        while read < self.maxThroughput:
+            try:
+                data, addr = self.socket.recvfrom(self.maxPacketSize)
+                read += len(data)
+                self.protocol.datagramReceived(data)
+            except socket.error, se:
+                no = se.args[0]
+                if no in (EAGAIN, EINTR, EWOULDBLOCK):
+                    return
+                if no == ECONNREFUSED:
+                    self.protocol.connectionRefused()
+                else:
+                    raise
+            except:
+                log.deferr()
+
+
+    def write(self, data):
+        """
+        Write a datagram.
+        """
+        try:
+            return self.socket.send(data)
+        except socket.error, se:
+            no = se.args[0]
+            if no == EINTR:
+                return self.write(data)
+            elif no == EMSGSIZE:
+                raise error.MessageLengthError, "message too long"
+            elif no == ECONNREFUSED:
+                self.protocol.connectionRefused()
+            elif no == EAGAIN:
+                # oh, well, drop the data. The only difference from UDP
+                # is that UDP won't ever notice.
+                # TODO: add TCP-like buffering
+                pass
+            else:
+                raise
+
+
+    def getPeer(self):
+        return address.UNIXAddress(self.remoteaddr)
diff --git a/ThirdParty/Twisted/twisted/internet/utils.py b/ThirdParty/Twisted/twisted/internet/utils.py
new file mode 100644
index 0000000..7413997
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/utils.py
@@ -0,0 +1,178 @@
+# -*- test-case-name: twisted.test.test_iutils -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Utility methods.
+"""
+
+import sys, warnings
+
+from twisted.internet import protocol, defer
+from twisted.internet._utilspy3 import runWithWarningsSuppressed
+from twisted.internet._utilspy3 import suppressWarnings
+from twisted.python import failure
+
+try:
+    import cStringIO as StringIO
+except ImportError:
+    import StringIO
+
+def _callProtocolWithDeferred(protocol, executable, args, env, path, reactor=None):
+    if reactor is None:
+        from twisted.internet import reactor
+
+    d = defer.Deferred()
+    p = protocol(d)
+    reactor.spawnProcess(p, executable, (executable,)+tuple(args), env, path)
+    return d
+
+
+
+class _UnexpectedErrorOutput(IOError):
+    """
+    Standard error data was received where it was not expected.  This is a
+    subclass of L{IOError} to preserve backward compatibility with the previous
+    error behavior of L{getProcessOutput}.
+
+    @ivar processEnded: A L{Deferred} which will fire when the process which
+        produced the data on stderr has ended (exited and all file descriptors
+        closed).
+    """
+    def __init__(self, text, processEnded):
+        IOError.__init__(self, "got stderr: %r" % (text,))
+        self.processEnded = processEnded
+
+
+
+class _BackRelay(protocol.ProcessProtocol):
+    """
+    Trivial protocol for communicating with a process and turning its output
+    into the result of a L{Deferred}.
+
+    @ivar deferred: A L{Deferred} which will be called back with all of stdout
+        and, if C{errortoo} is true, all of stderr as well (mixed together in
+        one string).  If C{errortoo} is false and any bytes are received over
+        stderr, this will fire with an L{_UnexpectedErrorOutput} instance and
+        the attribute will be set to C{None}.
+
+    @ivar onProcessEnded: If C{errortoo} is false and bytes are received over
+        stderr, this attribute will refer to a L{Deferred} which will be called
+        back when the process ends.  This C{Deferred} is also associated with
+        the L{_UnexpectedErrorOutput} which C{deferred} fires with earlier in
+        this case so that users can determine when the process has actually
+        ended, in addition to knowing when bytes have been received via stderr.
+    """
+
+    def __init__(self, deferred, errortoo=0):
+        self.deferred = deferred
+        self.s = StringIO.StringIO()
+        if errortoo:
+            self.errReceived = self.errReceivedIsGood
+        else:
+            self.errReceived = self.errReceivedIsBad
+
+    def errReceivedIsBad(self, text):
+        if self.deferred is not None:
+            self.onProcessEnded = defer.Deferred()
+            err = _UnexpectedErrorOutput(text, self.onProcessEnded)
+            self.deferred.errback(failure.Failure(err))
+            self.deferred = None
+            self.transport.loseConnection()
+
+    def errReceivedIsGood(self, text):
+        self.s.write(text)
+
+    def outReceived(self, text):
+        self.s.write(text)
+
+    def processEnded(self, reason):
+        if self.deferred is not None:
+            self.deferred.callback(self.s.getvalue())
+        elif self.onProcessEnded is not None:
+            self.onProcessEnded.errback(reason)
+
+
+
+def getProcessOutput(executable, args=(), env={}, path=None, reactor=None,
+                     errortoo=0):
+    """
+    Spawn a process and return its output as a deferred returning a string.
+
+    @param executable: The file name to run and get the output of - the
+                       full path should be used.
+
+    @param args: the command line arguments to pass to the process; a
+                 sequence of strings. The first string should *NOT* be the
+                 executable's name.
+
+    @param env: the environment variables to pass to the processs; a
+                dictionary of strings.
+
+    @param path: the path to run the subprocess in - defaults to the
+                 current directory.
+
+    @param reactor: the reactor to use - defaults to the default reactor
+
+    @param errortoo: If true, include stderr in the result.  If false, if
+        stderr is received the returned L{Deferred} will errback with an
+        L{IOError} instance with a C{processEnded} attribute.  The
+        C{processEnded} attribute refers to a L{Deferred} which fires when the
+        executed process ends.
+    """
+    return _callProtocolWithDeferred(lambda d:
+                                        _BackRelay(d, errortoo=errortoo),
+                                     executable, args, env, path,
+                                     reactor)
+
+
+class _ValueGetter(protocol.ProcessProtocol):
+
+    def __init__(self, deferred):
+        self.deferred = deferred
+
+    def processEnded(self, reason):
+        self.deferred.callback(reason.value.exitCode)
+
+
+def getProcessValue(executable, args=(), env={}, path=None, reactor=None):
+    """Spawn a process and return its exit code as a Deferred."""
+    return _callProtocolWithDeferred(_ValueGetter, executable, args, env, path,
+                                    reactor)
+
+
+class _EverythingGetter(protocol.ProcessProtocol):
+
+    def __init__(self, deferred):
+        self.deferred = deferred
+        self.outBuf = StringIO.StringIO()
+        self.errBuf = StringIO.StringIO()
+        self.outReceived = self.outBuf.write
+        self.errReceived = self.errBuf.write
+
+    def processEnded(self, reason):
+        out = self.outBuf.getvalue()
+        err = self.errBuf.getvalue()
+        e = reason.value
+        code = e.exitCode
+        if e.signal:
+            self.deferred.errback((out, err, e.signal))
+        else:
+            self.deferred.callback((out, err, code))
+
+def getProcessOutputAndValue(executable, args=(), env={}, path=None,
+                             reactor=None):
+    """Spawn a process and returns a Deferred that will be called back with
+    its output (from stdout and stderr) and it's exit code as (out, err, code)
+    If a signal is raised, the Deferred will errback with the stdout and
+    stderr up to that point, along with the signal, as (out, err, signalNum)
+    """
+    return _callProtocolWithDeferred(_EverythingGetter, executable, args, env, path,
+                                    reactor)
+
+
+__all__ = [
+    "runWithWarningsSuppressed", "suppressWarnings",
+
+    "getProcessOutput", "getProcessValue", "getProcessOutputAndValue",
+    ]
diff --git a/ThirdParty/Twisted/twisted/internet/win32eventreactor.py b/ThirdParty/Twisted/twisted/internet/win32eventreactor.py
new file mode 100644
index 0000000..3c0e09c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/win32eventreactor.py
@@ -0,0 +1,430 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+A win32event based implementation of the Twisted main loop.
+
+This requires pywin32 (formerly win32all) or ActivePython to be installed.
+
+To install the event loop (and you should do this before any connections,
+listeners or connectors are added)::
+
+    from twisted.internet import win32eventreactor
+    win32eventreactor.install()
+
+LIMITATIONS:
+ 1. WaitForMultipleObjects and thus the event loop can only handle 64 objects.
+ 2. Process running has some problems (see L{Process} docstring).
+
+
+TODO:
+ 1. Event loop handling of writes is *very* problematic (this is causing failed tests).
+    Switch to doing it the correct way, whatever that means (see below).
+ 2. Replace icky socket loopback waker with event based waker (use dummyEvent object)
+ 3. Switch everyone to using Free Software so we don't have to deal with proprietary APIs.
+
+
+ALTERNATIVE SOLUTIONS:
+ - IIRC, sockets can only be registered once. So we switch to a structure
+   like the poll() reactor, thus allowing us to deal with write events in
+   a decent fashion. This should allow us to pass tests, but we're still
+   limited to 64 events.
+
+Or:
+
+ - Instead of doing a reactor, we make this an addon to the select reactor.
+   The WFMO event loop runs in a separate thread. This means no need to maintain
+   separate code for networking, 64 event limit doesn't apply to sockets,
+   we can run processes and other win32 stuff in default event loop. The
+   only problem is that we're stuck with the icky socket based waker.
+   Another benefit is that this could be extended to support >64 events
+   in a simpler manner than the previous solution.
+
+The 2nd solution is probably what will get implemented.
+"""
+
+# System imports
+import time
+import sys
+from threading import Thread
+from weakref import WeakKeyDictionary
+
+from zope.interface import implements
+
+# Win32 imports
+from win32file import FD_READ, FD_CLOSE, FD_ACCEPT, FD_CONNECT, WSAEventSelect
+try:
+    # WSAEnumNetworkEvents was added in pywin32 215
+    from win32file import WSAEnumNetworkEvents
+except ImportError:
+    import warnings
+    warnings.warn(
+        'Reliable disconnection notification requires pywin32 215 or later',
+        category=UserWarning)
+    def WSAEnumNetworkEvents(fd, event):
+        return set([FD_READ])
+
+from win32event import CreateEvent, MsgWaitForMultipleObjects
+from win32event import WAIT_OBJECT_0, WAIT_TIMEOUT, QS_ALLINPUT, QS_ALLEVENTS
+
+import win32gui
+
+# Twisted imports
+from twisted.internet import posixbase
+from twisted.python import log, threadable, failure
+from twisted.internet.interfaces import IReactorFDSet
+from twisted.internet.interfaces import IReactorWin32Events
+from twisted.internet.threads import blockingCallFromThread
+
+
+class Win32Reactor(posixbase.PosixReactorBase):
+    """
+    Reactor that uses Win32 event APIs.
+
+    @ivar _reads: A dictionary mapping L{FileDescriptor} instances to a
+        win32 event object used to check for read events for that descriptor.
+
+    @ivar _writes: A dictionary mapping L{FileDescriptor} instances to a
+        arbitrary value.  Keys in this dictionary will be given a chance to
+        write out their data.
+
+    @ivar _events: A dictionary mapping win32 event object to tuples of
+        L{FileDescriptor} instances and event masks.
+
+    @ivar _closedAndReading: Along with C{_closedAndNotReading}, keeps track of
+        descriptors which have had close notification delivered from the OS but
+        which we have not finished reading data from.  MsgWaitForMultipleObjects
+        will only deliver close notification to us once, so we remember it in
+        these two dictionaries until we're ready to act on it.  The OS has
+        delivered close notification for each descriptor in this dictionary, and
+        the descriptors are marked as allowed to handle read events in the
+        reactor, so they can be processed.  When a descriptor is marked as not
+        allowed to handle read events in the reactor (ie, it is passed to
+        L{IReactorFDSet.removeReader}), it is moved out of this dictionary and
+        into C{_closedAndNotReading}.  The descriptors are keys in this
+        dictionary.  The values are arbitrary.
+    @type _closedAndReading: C{dict}
+
+    @ivar _closedAndNotReading: These descriptors have had close notification
+        delivered from the OS, but are not marked as allowed to handle read
+        events in the reactor.  They are saved here to record their closed
+        state, but not processed at all.  When one of these descriptors is
+        passed to L{IReactorFDSet.addReader}, it is moved out of this dictionary
+        and into C{_closedAndReading}.  The descriptors are keys in this
+        dictionary.  The values are arbitrary.  This is a weak key dictionary so
+        that if an application tells the reactor to stop reading from a
+        descriptor and then forgets about that descriptor itself, the reactor
+        will also forget about it.
+    @type _closedAndNotReading: C{WeakKeyDictionary}
+    """
+    implements(IReactorFDSet, IReactorWin32Events)
+
+    dummyEvent = CreateEvent(None, 0, 0, None)
+
+    def __init__(self):
+        self._reads = {}
+        self._writes = {}
+        self._events = {}
+        self._closedAndReading = {}
+        self._closedAndNotReading = WeakKeyDictionary()
+        posixbase.PosixReactorBase.__init__(self)
+
+
+    def _makeSocketEvent(self, fd, action, why):
+        """
+        Make a win32 event object for a socket.
+        """
+        event = CreateEvent(None, 0, 0, None)
+        WSAEventSelect(fd, event, why)
+        self._events[event] = (fd, action)
+        return event
+
+
+    def addEvent(self, event, fd, action):
+        """
+        Add a new win32 event to the event loop.
+        """
+        self._events[event] = (fd, action)
+
+
+    def removeEvent(self, event):
+        """
+        Remove an event.
+        """
+        del self._events[event]
+
+
+    def addReader(self, reader):
+        """
+        Add a socket FileDescriptor for notification of data available to read.
+        """
+        if reader not in self._reads:
+            self._reads[reader] = self._makeSocketEvent(
+                reader, 'doRead', FD_READ | FD_ACCEPT | FD_CONNECT | FD_CLOSE)
+            # If the reader is closed, move it over to the dictionary of reading
+            # descriptors.
+            if reader in self._closedAndNotReading:
+                self._closedAndReading[reader] = True
+                del self._closedAndNotReading[reader]
+
+
+    def addWriter(self, writer):
+        """
+        Add a socket FileDescriptor for notification of data available to write.
+        """
+        if writer not in self._writes:
+            self._writes[writer] = 1
+
+
+    def removeReader(self, reader):
+        """Remove a Selectable for notification of data available to read.
+        """
+        if reader in self._reads:
+            del self._events[self._reads[reader]]
+            del self._reads[reader]
+
+            # If the descriptor is closed, move it out of the dictionary of
+            # reading descriptors into the dictionary of waiting descriptors.
+            if reader in self._closedAndReading:
+                self._closedAndNotReading[reader] = True
+                del self._closedAndReading[reader]
+
+
+    def removeWriter(self, writer):
+        """Remove a Selectable for notification of data available to write.
+        """
+        if writer in self._writes:
+            del self._writes[writer]
+
+
+    def removeAll(self):
+        """
+        Remove all selectables, and return a list of them.
+        """
+        return self._removeAll(self._reads, self._writes)
+
+
+    def getReaders(self):
+        return self._reads.keys()
+
+
+    def getWriters(self):
+        return self._writes.keys()
+
+
+    def doWaitForMultipleEvents(self, timeout):
+        log.msg(channel='system', event='iteration', reactor=self)
+        if timeout is None:
+            timeout = 100
+
+        # Keep track of whether we run any application code before we get to the
+        # MsgWaitForMultipleObjects.  If so, there's a chance it will schedule a
+        # new timed call or stop the reactor or do something else that means we
+        # shouldn't block in MsgWaitForMultipleObjects for the full timeout.
+        ranUserCode = False
+
+        # If any descriptors are trying to close, try to get them out of the way
+        # first.
+        for reader in self._closedAndReading.keys():
+            ranUserCode = True
+            self._runAction('doRead', reader)
+
+        for fd in self._writes.keys():
+            ranUserCode = True
+            log.callWithLogger(fd, self._runWrite, fd)
+
+        if ranUserCode:
+            # If application code *might* have scheduled an event, assume it
+            # did.  If we're wrong, we'll get back here shortly anyway.  If
+            # we're right, we'll be sure to handle the event (including reactor
+            # shutdown) in a timely manner.
+            timeout = 0
+
+        if not (self._events or self._writes):
+            # sleep so we don't suck up CPU time
+            time.sleep(timeout)
+            return
+
+        handles = self._events.keys() or [self.dummyEvent]
+        timeout = int(timeout * 1000)
+        val = MsgWaitForMultipleObjects(handles, 0, timeout, QS_ALLINPUT)
+        if val == WAIT_TIMEOUT:
+            return
+        elif val == WAIT_OBJECT_0 + len(handles):
+            exit = win32gui.PumpWaitingMessages()
+            if exit:
+                self.callLater(0, self.stop)
+                return
+        elif val >= WAIT_OBJECT_0 and val < WAIT_OBJECT_0 + len(handles):
+            event = handles[val - WAIT_OBJECT_0]
+            fd, action = self._events[event]
+
+            if fd in self._reads:
+                # Before anything, make sure it's still a valid file descriptor.
+                fileno = fd.fileno()
+                if fileno == -1:
+                    self._disconnectSelectable(fd, posixbase._NO_FILEDESC, False)
+                    return
+
+                # Since it's a socket (not another arbitrary event added via
+                # addEvent) and we asked for FD_READ | FD_CLOSE, check to see if
+                # we actually got FD_CLOSE.  This needs a special check because
+                # it only gets delivered once.  If we miss it, it's gone forever
+                # and we'll never know that the connection is closed.
+                events = WSAEnumNetworkEvents(fileno, event)
+                if FD_CLOSE in events:
+                    self._closedAndReading[fd] = True
+            log.callWithLogger(fd, self._runAction, action, fd)
+
+
+    def _runWrite(self, fd):
+        closed = 0
+        try:
+            closed = fd.doWrite()
+        except:
+            closed = sys.exc_info()[1]
+            log.deferr()
+
+        if closed:
+            self.removeReader(fd)
+            self.removeWriter(fd)
+            try:
+                fd.connectionLost(failure.Failure(closed))
+            except:
+                log.deferr()
+        elif closed is None:
+            return 1
+
+    def _runAction(self, action, fd):
+        try:
+            closed = getattr(fd, action)()
+        except:
+            closed = sys.exc_info()[1]
+            log.deferr()
+        if closed:
+            self._disconnectSelectable(fd, closed, action == 'doRead')
+
+    doIteration = doWaitForMultipleEvents
+
+
+
+class _ThreadFDWrapper(object):
+    """
+    This wraps an event handler and translates notification in the helper
+    L{Win32Reactor} thread into a notification in the primary reactor thread.
+
+    @ivar _reactor: The primary reactor, the one to which event notification
+        will be sent.
+
+    @ivar _fd: The L{FileDescriptor} to which the event will be dispatched.
+
+    @ivar _action: A C{str} giving the method of C{_fd} which handles the event.
+
+    @ivar _logPrefix: The pre-fetched log prefix string for C{_fd}, so that
+        C{_fd.logPrefix} does not need to be called in a non-main thread.
+    """
+    def __init__(self, reactor, fd, action, logPrefix):
+        self._reactor = reactor
+        self._fd = fd
+        self._action = action
+        self._logPrefix = logPrefix
+
+
+    def logPrefix(self):
+        """
+        Return the original handler's log prefix, as it was given to
+        C{__init__}.
+        """
+        return self._logPrefix
+
+
+    def _execute(self):
+        """
+        Callback fired when the associated event is set.  Run the C{action}
+        callback on the wrapped descriptor in the main reactor thread and raise
+        or return whatever it raises or returns to cause this event handler to
+        be removed from C{self._reactor} if appropriate.
+        """
+        return blockingCallFromThread(
+            self._reactor, lambda: getattr(self._fd, self._action)())
+
+
+    def connectionLost(self, reason):
+        """
+        Pass through to the wrapped descriptor, but in the main reactor thread
+        instead of the helper C{Win32Reactor} thread.
+        """
+        self._reactor.callFromThread(self._fd.connectionLost, reason)
+
+
+
+class _ThreadedWin32EventsMixin(object):
+    """
+    This mixin implements L{IReactorWin32Events} for another reactor by running
+    a L{Win32Reactor} in a separate thread and dispatching work to it.
+
+    @ivar _reactor: The L{Win32Reactor} running in the other thread.  This is
+        C{None} until it is actually needed.
+
+    @ivar _reactorThread: The L{threading.Thread} which is running the
+        L{Win32Reactor}.  This is C{None} until it is actually needed.
+    """
+    implements(IReactorWin32Events)
+
+    _reactor = None
+    _reactorThread = None
+
+
+    def _unmakeHelperReactor(self):
+        """
+        Stop and discard the reactor started by C{_makeHelperReactor}.
+        """
+        self._reactor.callFromThread(self._reactor.stop)
+        self._reactor = None
+
+
+    def _makeHelperReactor(self):
+        """
+        Create and (in a new thread) start a L{Win32Reactor} instance to use for
+        the implementation of L{IReactorWin32Events}.
+        """
+        self._reactor = Win32Reactor()
+        # This is a helper reactor, it is not the global reactor and its thread
+        # is not "the" I/O thread.  Prevent it from registering it as such.
+        self._reactor._registerAsIOThread = False
+        self._reactorThread = Thread(
+            target=self._reactor.run, args=(False,))
+        self.addSystemEventTrigger(
+            'after', 'shutdown', self._unmakeHelperReactor)
+        self._reactorThread.start()
+
+
+    def addEvent(self, event, fd, action):
+        """
+        @see: L{IReactorWin32Events}
+        """
+        if self._reactor is None:
+            self._makeHelperReactor()
+        self._reactor.callFromThread(
+            self._reactor.addEvent,
+            event, _ThreadFDWrapper(self, fd, action, fd.logPrefix()),
+            "_execute")
+
+
+    def removeEvent(self, event):
+        """
+        @see: L{IReactorWin32Events}
+        """
+        self._reactor.callFromThread(self._reactor.removeEvent, event)
+
+
+
+def install():
+    threadable.init(1)
+    r = Win32Reactor()
+    import main
+    main.installReactor(r)
+
+
+__all__ = ["Win32Reactor", "install"]
diff --git a/ThirdParty/Twisted/twisted/internet/wxreactor.py b/ThirdParty/Twisted/twisted/internet/wxreactor.py
new file mode 100644
index 0000000..71e861a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/wxreactor.py
@@ -0,0 +1,184 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module provides wxPython event loop support for Twisted.
+
+In order to use this support, simply do the following::
+
+    |  from twisted.internet import wxreactor
+    |  wxreactor.install()
+
+Then, when your root wxApp has been created::
+
+    | from twisted.internet import reactor
+    | reactor.registerWxApp(yourApp)
+    | reactor.run()
+
+Then use twisted.internet APIs as usual. Stop the event loop using
+reactor.stop(), not yourApp.ExitMainLoop().
+
+IMPORTANT: tests will fail when run under this reactor. This is
+expected and probably does not reflect on the reactor's ability to run
+real applications.
+"""
+
+import Queue
+try:
+    from wx import PySimpleApp as wxPySimpleApp, CallAfter as wxCallAfter, \
+         Timer as wxTimer
+except ImportError:
+    # older version of wxPython:
+    from wxPython.wx import wxPySimpleApp, wxCallAfter, wxTimer
+
+from twisted.python import log, runtime
+from twisted.internet import _threadedselect
+
+
+class ProcessEventsTimer(wxTimer):
+    """
+    Timer that tells wx to process pending events.
+
+    This is necessary on OS X, probably due to a bug in wx, if we want
+    wxCallAfters to be handled when modal dialogs, menus, etc.  are open.
+    """
+    def __init__(self, wxapp):
+        wxTimer.__init__(self)
+        self.wxapp = wxapp
+    
+
+    def Notify(self):
+        """
+        Called repeatedly by wx event loop.
+        """
+        self.wxapp.ProcessPendingEvents()
+
+
+
+class WxReactor(_threadedselect.ThreadedSelectReactor):
+    """
+    wxPython reactor.
+
+    wxPython drives the event loop, select() runs in a thread.
+    """
+
+    _stopping = False
+
+    def registerWxApp(self, wxapp):
+        """
+        Register wxApp instance with the reactor.
+        """
+        self.wxapp = wxapp
+
+
+    def _installSignalHandlersAgain(self):
+        """
+        wx sometimes removes our own signal handlers, so re-add them.
+        """
+        try:
+            # make _handleSignals happy:
+            import signal
+            signal.signal(signal.SIGINT, signal.default_int_handler)
+        except ImportError:
+            return
+        self._handleSignals()
+
+
+    def stop(self):
+        """
+        Stop the reactor.
+        """
+        if self._stopping:
+            return
+        self._stopping = True
+        _threadedselect.ThreadedSelectReactor.stop(self)
+
+
+    def _runInMainThread(self, f):
+        """
+        Schedule function to run in main wx/Twisted thread.
+
+        Called by the select() thread.
+        """
+        if hasattr(self, "wxapp"):
+            wxCallAfter(f)
+        else:
+            # wx shutdown but twisted hasn't
+            self._postQueue.put(f)
+
+
+    def _stopWx(self):
+        """
+        Stop the wx event loop if it hasn't already been stopped.
+
+        Called during Twisted event loop shutdown.
+        """
+        if hasattr(self, "wxapp"):
+            self.wxapp.ExitMainLoop()
+
+
+    def run(self, installSignalHandlers=True):
+        """
+        Start the reactor.
+        """
+        self._postQueue = Queue.Queue()
+        if not hasattr(self, "wxapp"):
+            log.msg("registerWxApp() was not called on reactor, "
+                    "registering my own wxApp instance.")
+            self.registerWxApp(wxPySimpleApp())
+
+        # start select() thread:
+        self.interleave(self._runInMainThread,
+                        installSignalHandlers=installSignalHandlers)
+        if installSignalHandlers:
+            self.callLater(0, self._installSignalHandlersAgain)
+
+        # add cleanup events:
+        self.addSystemEventTrigger("after", "shutdown", self._stopWx)
+        self.addSystemEventTrigger("after", "shutdown",
+                                   lambda: self._postQueue.put(None))
+
+        # On Mac OS X, work around wx bug by starting timer to ensure
+        # wxCallAfter calls are always processed. We don't wake up as
+        # often as we could since that uses too much CPU.
+        if runtime.platform.isMacOSX():
+            t = ProcessEventsTimer(self.wxapp)
+            t.Start(2) # wake up every 2ms
+
+        self.wxapp.MainLoop()
+        wxapp = self.wxapp
+        del self.wxapp
+
+        if not self._stopping:
+            # wx event loop exited without reactor.stop() being
+            # called.  At this point events from select() thread will
+            # be added to _postQueue, but some may still be waiting
+            # unprocessed in wx, thus the ProcessPendingEvents()
+            # below.
+            self.stop()
+            wxapp.ProcessPendingEvents() # deal with any queued wxCallAfters
+            while 1:
+                try:
+                    f = self._postQueue.get(timeout=0.01)
+                except Queue.Empty:
+                    continue
+                else:
+                    if f is None:
+                        break
+                    try:
+                        f()
+                    except:
+                        log.err()
+
+
+def install():
+    """
+    Configure the twisted mainloop to be run inside the wxPython mainloop.
+    """
+    reactor = WxReactor()
+    from twisted.internet.main import installReactor
+    installReactor(reactor)
+    return reactor
+
+
+__all__ = ['install']
diff --git a/ThirdParty/Twisted/twisted/internet/wxsupport.py b/ThirdParty/Twisted/twisted/internet/wxsupport.py
new file mode 100644
index 0000000..d17c666
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/internet/wxsupport.py
@@ -0,0 +1,61 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+# 
+"""Old method of wxPython support for Twisted.
+
+twisted.internet.wxreactor is probably a better choice.
+
+To use::
+
+    | # given a wxApp instance called myWxAppInstance:
+    | from twisted.internet import wxsupport
+    | wxsupport.install(myWxAppInstance)
+    
+Use Twisted's APIs for running and stopping the event loop, don't use
+wxPython's methods.
+
+On Windows the Twisted event loop might block when dialogs are open
+or menus are selected.
+
+Maintainer: Itamar Shtull-Trauring
+"""
+
+import warnings
+warnings.warn("wxsupport is not fully functional on Windows, wxreactor is better.")
+
+# wxPython imports
+from wxPython.wx import wxApp
+
+# twisted imports
+from twisted.internet import reactor
+from twisted.python.runtime import platformType
+
+
+class wxRunner:
+    """Make sure GUI events are handled."""
+    
+    def __init__(self, app):
+        self.app = app
+        
+    def run(self):
+        """
+        Execute pending WX events followed by WX idle events and
+        reschedule.
+        """
+        # run wx events
+        while self.app.Pending():
+            self.app.Dispatch()
+        
+        # run wx idle events
+        self.app.ProcessIdle()
+        reactor.callLater(0.02, self.run)
+
+
+def install(app):
+    """Install the wxPython support, given a wxApp instance"""
+    runner = wxRunner(app)
+    reactor.callLater(0.02, runner.run)
+
+
+__all__ = ["install"]
diff --git a/ThirdParty/Twisted/twisted/lore/__init__.py b/ThirdParty/Twisted/twisted/lore/__init__.py
new file mode 100644
index 0000000..89ab207
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/__init__.py
@@ -0,0 +1,21 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+# 
+'''
+The Twisted Documentation Generation System
+
+Maintainer: Andrew Bennetts
+'''
+
+# TODO
+# Abstract
+# Bibliography
+# Index
+# Allow non-web image formats (EPS, specifically)
+# Allow pickle output and input to minimize parses
+# Numbered headers
+# Navigational aides
+
+from twisted.lore._version import version
+__version__ = version.short()
diff --git a/ThirdParty/Twisted/twisted/lore/_version.py b/ThirdParty/Twisted/twisted/lore/_version.py
new file mode 100644
index 0000000..5fdf02d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/_version.py
@@ -0,0 +1,3 @@
+# This is an auto-generated file. Do not edit it.
+from twisted.python import versions
+version = versions.Version('twisted.lore', 12, 3, 0)
diff --git a/ThirdParty/Twisted/twisted/lore/default.py b/ThirdParty/Twisted/twisted/lore/default.py
new file mode 100644
index 0000000..5b542ad
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/default.py
@@ -0,0 +1,56 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Default processing factory plugin.
+"""
+
+from xml.dom import minidom as dom
+
+from twisted.lore import tree, latex, lint, process
+from twisted.web import sux
+
+htmlDefault = {'template': 'template.tpl', 'baseurl': '%s', 'ext': '.html'}
+
+class ProcessingFunctionFactory:
+
+    def getDoFile(self):
+        return tree.doFile
+
+    def generate_html(self, options, filenameGenerator=tree.getOutputFileName):
+        n = htmlDefault.copy()
+        n.update(options)
+        options = n
+        try:
+            fp = open(options['template'])
+            templ = dom.parse(fp)
+        except IOError, e:
+            raise process.NoProcessorError(e.filename+": "+e.strerror)
+        except sux.ParseError, e:
+            raise process.NoProcessorError(str(e))
+        df = lambda file, linkrel: self.getDoFile()(file, linkrel, options['ext'],
+                                                    options['baseurl'], templ, options, filenameGenerator)
+        return df
+
+    latexSpitters = {None: latex.LatexSpitter,
+                     'section': latex.SectionLatexSpitter,
+                     'chapter': latex.ChapterLatexSpitter,
+                     'book': latex.BookLatexSpitter,
+                     }
+
+    def generate_latex(self, options, filenameGenerator=None):
+        spitter = self.latexSpitters[None]
+        for (key, value) in self.latexSpitters.items():
+            if key and options.get(key):
+               spitter = value
+        df = lambda file, linkrel: latex.convertFile(file, spitter)
+        return df
+
+    def getLintChecker(self):
+        return lint.getDefaultChecker()
+
+    def generate_lint(self, options, filenameGenerator=None):
+        checker = self.getLintChecker()
+        return lambda file, linkrel: lint.doFile(file, checker)
+
+factory = ProcessingFunctionFactory()
diff --git a/ThirdParty/Twisted/twisted/lore/docbook.py b/ThirdParty/Twisted/twisted/lore/docbook.py
new file mode 100644
index 0000000..62c8fc6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/docbook.py
@@ -0,0 +1,68 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+DocBook output support for Lore.
+"""
+
+import os, cgi
+from xml.dom import minidom as dom
+
+from twisted.lore import latex
+
+
+class DocbookSpitter(latex.BaseLatexSpitter):
+
+    currentLevel = 1
+
+    def writeNodeData(self, node):
+        self.writer(node.data)
+
+    def visitNode_body(self, node):
+        self.visitNodeDefault(node)
+        self.writer('</section>'*self.currentLevel)
+
+    def visitNodeHeader(self, node):
+        level = int(node.tagName[1])
+        difference, self.currentLevel = level-self.currentLevel, level
+        self.writer('<section>'*difference+'</section>'*-difference)
+        if difference<=0:
+            self.writer('</section>\n<section>')
+        self.writer('<title>')
+        self.visitNodeDefault(node)
+
+    def visitNode_a_listing(self, node):
+        fileName = os.path.join(self.currDir, node.getAttribute('href'))
+        self.writer('<programlisting>\n')
+        self.writer(cgi.escape(open(fileName).read()))
+        self.writer('</programlisting>\n')
+
+    def visitNode_a_href(self, node):
+        self.visitNodeDefault(node)
+
+    def visitNode_a_name(self, node):
+        self.visitNodeDefault(node)
+
+    def visitNode_li(self, node):
+        for child in node.childNodes:
+            if getattr(child, 'tagName', None) != 'p':
+                new = dom.Element('p')
+                new.childNodes = [child]
+                node.replaceChild(new, child)
+        self.visitNodeDefault(node)
+
+    visitNode_h2 = visitNode_h3 = visitNode_h4 = visitNodeHeader
+    end_h2 = end_h3 = end_h4 = '</title><para />'
+    start_title, end_title = '<section><title>', '</title><para />'
+    start_p, end_p = '<para>', '</para>'
+    start_strong, end_strong = start_em, end_em = '<emphasis>', '</emphasis>'
+    start_span_footnote, end_span_footnote = '<footnote><para>', '</para></footnote>'
+    start_q = end_q = '"'
+    start_pre, end_pre = '<programlisting>', '</programlisting>'
+    start_div_note, end_div_note = '<note>', '</note>'
+    start_li, end_li = '<listitem>', '</listitem>'
+    start_ul, end_ul = '<itemizedlist>', '</itemizedlist>'
+    start_ol, end_ol = '<orderedlist>', '</orderedlist>'
+    start_dl, end_dl = '<variablelist>', '</variablelist>'
+    start_dt, end_dt = '<varlistentry><term>', '</term>'
+    start_dd, end_dd = '<listitem><para>', '</para></listitem></varlistentry>'
diff --git a/ThirdParty/Twisted/twisted/lore/htmlbook.py b/ThirdParty/Twisted/twisted/lore/htmlbook.py
new file mode 100644
index 0000000..3b227c0
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/htmlbook.py
@@ -0,0 +1,49 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.python.compat import execfile
+
+
+def getNumber(filename):
+    return None
+
+def getReference(filename):
+    return None
+
+class Book:
+
+    def __init__(self, filename):
+        self.chapters = []
+        self.indexFilename = None
+
+        global Chapter
+        Chapter = self.Chapter
+        global getNumber
+        getNumber = self.getNumber
+        global getReference
+        getReference = self.getNumber
+        global Index
+        Index = self.Index
+
+        if filename:
+            execfile(filename, globals())
+
+    def getFiles(self):
+        return [c[0] for c in self.chapters]
+
+    def getNumber(self, filename):
+        for c in self.chapters:
+            if c[0] == filename:
+                return c[1]
+        return None
+
+    def getIndexFilename(self):
+        return self.indexFilename
+
+    def Chapter(self, filename, number):
+        self.chapters.append((filename, number))
+
+    def Index(self, filename):
+        self.indexFilename = filename
+
+#_book = Book(None)
diff --git a/ThirdParty/Twisted/twisted/lore/indexer.py b/ThirdParty/Twisted/twisted/lore/indexer.py
new file mode 100644
index 0000000..528e7d6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/indexer.py
@@ -0,0 +1,50 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+def setIndexFilename(filename='index.xhtml'):
+    global indexFilename
+    indexFilename = filename
+
+def getIndexFilename():
+    global indexFilename
+    return indexFilename
+
+def addEntry(filename, anchor, text, reference):
+    global entries
+    if text not in entries:
+        entries[text] = []
+    entries[text].append((filename, anchor, reference))
+
+def clearEntries():
+    global entries
+    entries = {}
+
+def generateIndex():
+    global entries
+    global indexFilename
+
+    if not indexFilename:
+        return
+
+    f = open(indexFilename, 'w')
+    sortedEntries = [(e.lower(), e) for e in entries]
+    sortedEntries.sort()
+    sortedEntries = [e[1] for e in sortedEntries]
+    for text in sortedEntries:
+        refs = []
+        f.write(text.replace('!', ', ') + ': ')
+        for (file, anchor, reference) in entries[text]:
+            refs.append('<a href="%s#%s">%s</a>' % (file, anchor, reference))
+        if text == 'infinite recursion':
+            refs.append('<em>See Also:</em> recursion, infinite\n')
+        if text == 'recursion!infinite':
+            refs.append('<em>See Also:</em> infinite recursion\n')
+        f.write('%s<br />\n' % ", ".join(refs))
+    f.close()
+
+def reset():
+    clearEntries()
+    setIndexFilename()
+
+reset()
diff --git a/ThirdParty/Twisted/twisted/lore/latex.py b/ThirdParty/Twisted/twisted/lore/latex.py
new file mode 100644
index 0000000..ed843ed
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/latex.py
@@ -0,0 +1,463 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+LaTeX output support for Lore.
+"""
+
+from xml.dom import minidom as dom
+import os.path, re
+from cStringIO import StringIO
+import urlparse
+
+from twisted.web import domhelpers
+from twisted.python import text, procutils
+
+import tree
+
+escapingRE = re.compile(r'([\[\]#$%&_{}^~\\])')
+lowerUpperRE = re.compile(r'([a-z])([A-Z])')
+
+def _escapeMatch(match):
+    c = match.group()
+    if c == '\\':
+        return '$\\backslash$'
+    elif c == '~':
+        return '\\~{}'
+    elif c == '^':
+        return '\\^{}'
+    elif c in '[]':
+        return '{'+c+'}'
+    else:
+        return '\\' + c
+
+def latexEscape(txt):
+    txt = escapingRE.sub(_escapeMatch, txt)
+    return txt.replace('\n', ' ')
+
+entities = {'amp': '\&', 'gt': '>', 'lt': '<', 'quot': '"',
+            'copy': '\\copyright', 'mdash': '---', 'rdquo': '``', 
+            'ldquo': "''"}
+
+
+def realpath(path):
+    # Normalise path
+    cwd = os.getcwd()
+    path = os.path.normpath(os.path.join(cwd, path))
+    return path.replace('\\', '/') # windows slashes make LaTeX blow up
+
+
+def getLatexText(node, writer, filter=lambda x:x, entities=entities):
+    if hasattr(node, 'eref'):
+        return writer(entities.get(node.eref, ''))
+    if hasattr(node, 'data'):
+        if isinstance(node.data, unicode):
+            data = node.data.encode('utf-8')
+        else:
+            data = node.data
+        return writer(filter(data))
+    for child in node.childNodes:
+        getLatexText(child, writer, filter, entities)
+
+class BaseLatexSpitter:
+
+    def __init__(self, writer, currDir='.', filename=''):
+        self.writer = writer
+        self.currDir = currDir
+        self.filename = filename
+
+    def visitNode(self, node):
+        if isinstance(node, dom.Comment):
+            return
+        if not hasattr(node, 'tagName'):
+            self.writeNodeData(node)
+            return
+        getattr(self, 'visitNode_'+node.tagName, self.visitNodeDefault)(node)
+
+    def visitNodeDefault(self, node):
+        self.writer(getattr(self, 'start_'+node.tagName, ''))
+        for child in node.childNodes:
+            self.visitNode(child)
+        self.writer(getattr(self, 'end_'+node.tagName, ''))
+
+    def visitNode_a(self, node):
+        if node.hasAttribute('class'):
+            if node.getAttribute('class').endswith('listing'):
+                return self.visitNode_a_listing(node)
+        if node.hasAttribute('href'):
+            return self.visitNode_a_href(node)
+        if node.hasAttribute('name'):
+            return self.visitNode_a_name(node)
+        self.visitNodeDefault(node)
+
+    def visitNode_span(self, node):
+        if not node.hasAttribute('class'):
+            return self.visitNodeDefault(node)
+        node.tagName += '_'+node.getAttribute('class')
+        self.visitNode(node)
+
+    visitNode_div = visitNode_span
+
+    def visitNode_h1(self, node):
+        pass
+
+    def visitNode_style(self, node):
+        pass
+
+
+class LatexSpitter(BaseLatexSpitter):
+
+    baseLevel = 0
+    diaHack = bool(procutils.which("dia"))
+
+    def writeNodeData(self, node):
+        buf = StringIO()
+        getLatexText(node, buf.write, latexEscape)
+        self.writer(buf.getvalue().replace('<', '$<$').replace('>', '$>$'))
+
+    def visitNode_head(self, node):
+        authorNodes = domhelpers.findElementsWithAttribute(node, 'rel', 'author')
+        authorNodes = [n for n in authorNodes if n.tagName == 'link']
+
+        if authorNodes:
+            self.writer('\\author{')
+            authors = []
+            for aNode in authorNodes:
+                name = aNode.getAttribute('title')
+                href = aNode.getAttribute('href')
+                if href.startswith('mailto:'):
+                    href = href[7:]
+                if href:
+                    if name:
+                        name += ' '
+                    name += '$<$' + href + '$>$'
+                if name:
+                    authors.append(name)
+            
+            self.writer(' \\and '.join(authors))
+            self.writer('}')
+
+        self.visitNodeDefault(node)
+
+    def visitNode_pre(self, node):
+        self.writer('\\begin{verbatim}\n')
+        buf = StringIO()
+        getLatexText(node, buf.write)
+        self.writer(text.removeLeadingTrailingBlanks(buf.getvalue()))
+        self.writer('\\end{verbatim}\n')
+
+    def visitNode_code(self, node):
+        fout = StringIO()
+        getLatexText(node, fout.write, latexEscape)
+        data = lowerUpperRE.sub(r'\1\\linebreak[1]\2', fout.getvalue())
+        data = data[:1] + data[1:].replace('.', '.\\linebreak[1]')
+        self.writer('\\texttt{'+data+'}')
+
+    def visitNode_img(self, node):
+        fileName = os.path.join(self.currDir, node.getAttribute('src'))
+        target, ext = os.path.splitext(fileName)
+        if self.diaHack and os.access(target + '.dia', os.R_OK):
+            ext = '.dia'
+            fileName = target + ext
+        f = getattr(self, 'convert_'+ext[1:], None)
+        if not f:
+            return
+        target = os.path.join(self.currDir, os.path.basename(target)+'.eps')
+        f(fileName, target)
+        target = os.path.basename(target)
+        self._write_img(target)
+
+    def _write_img(self, target):
+        """Write LaTeX for image."""
+        self.writer('\\begin{center}\\includegraphics[%%\n'
+                    'width=1.0\n'
+                    '\\textwidth,height=1.0\\textheight,\nkeepaspectratio]'
+                    '{%s}\\end{center}\n' % target)
+    
+    def convert_png(self, src, target):
+        # XXX there's a *reason* Python comes with the pipes module -
+        # someone fix this to use it.
+        r = os.system('pngtopnm "%s" | pnmtops -noturn > "%s"' % (src, target))
+        if r != 0:
+            raise OSError(r)
+
+    def convert_dia(self, src, target):
+        # EVIL DISGUSTING HACK
+        data = os.popen("gunzip -dc %s" % (src)).read()
+        pre = '<dia:attribute name="scaling">\n          <dia:real val="1"/>'
+        post = '<dia:attribute name="scaling">\n          <dia:real val="0.5"/>'
+        f = open('%s_hacked.dia' % (src), 'wb')
+        f.write(data.replace(pre, post))
+        f.close()
+        os.system('gzip %s_hacked.dia' % (src,))
+        os.system('mv %s_hacked.dia.gz %s_hacked.dia' % (src,src))
+        # Let's pretend we never saw that.
+
+        # Silly dia needs an X server, even though it doesn't display anything.
+        # If this is a problem for you, try using Xvfb.
+        os.system("dia %s_hacked.dia -n -e %s" % (src, target))
+
+    def visitNodeHeader(self, node):
+        level = (int(node.tagName[1])-2)+self.baseLevel
+        self.writer('\n\n\\'+level*'sub'+'section{')
+        spitter = HeadingLatexSpitter(self.writer, self.currDir, self.filename)
+        spitter.visitNodeDefault(node)
+        self.writer('}\n')
+
+    def visitNode_a_listing(self, node):
+        fileName = os.path.join(self.currDir, node.getAttribute('href'))
+        self.writer('\\begin{verbatim}\n')
+        lines = map(str.rstrip, open(fileName).readlines())
+        skipLines = int(node.getAttribute('skipLines') or 0)
+        lines = lines[skipLines:]
+        self.writer(text.removeLeadingTrailingBlanks('\n'.join(lines)))
+        self.writer('\\end{verbatim}')
+
+        # Write a caption for this source listing
+        fileName = os.path.basename(fileName)
+        caption = domhelpers.getNodeText(node)
+        if caption == fileName:
+            caption = 'Source listing'
+        self.writer('\parbox[b]{\linewidth}{\\begin{center}%s --- '
+                    '\\begin{em}%s\\end{em}\\end{center}}'
+                    % (latexEscape(caption), latexEscape(fileName)))
+
+    def visitNode_a_href(self, node):
+        supported_schemes=['http', 'https', 'ftp', 'mailto']
+        href = node.getAttribute('href')
+        if urlparse.urlparse(href)[0] in supported_schemes:
+            text = domhelpers.getNodeText(node)
+            self.visitNodeDefault(node)
+            if text != href:
+                self.writer('\\footnote{%s}' % latexEscape(href))
+        else:
+            path, fragid = (href.split('#', 1) + [None])[:2]
+            if path == '':
+                path = self.filename
+            else:
+                path = os.path.join(os.path.dirname(self.filename), path)
+            #if path == '':
+                #path = os.path.basename(self.filename)
+            #else:
+            #    # Hack for linking to man pages from howtos, i.e.
+            #    # ../doc/foo-man.html -> foo-man.html
+            #    path = os.path.basename(path)
+
+            path = realpath(path)
+
+            if fragid:
+                ref = path + 'HASH' + fragid
+            else:
+                ref = path
+            self.writer('\\textit{')
+            self.visitNodeDefault(node)
+            self.writer('}')
+            self.writer('\\loreref{%s}' % ref)
+
+    def visitNode_a_name(self, node):
+        self.writer('\\label{%sHASH%s}' % (
+                realpath(self.filename), node.getAttribute('name')))
+        self.visitNodeDefault(node)
+
+    def visitNode_table(self, node):
+        rows = [[col for col in row.childNodes
+                     if getattr(col, 'tagName', None) in ('th', 'td')]
+            for row in node.childNodes if getattr(row, 'tagName', None)=='tr']
+        numCols = 1+max([len(row) for row in rows])
+        self.writer('\\begin{table}[ht]\\begin{center}')
+        self.writer('\\begin{tabular}{@{}'+'l'*numCols+'@{}}')
+        for row in rows:
+            th = 0
+            for col in row:
+                self.visitNode(col)
+                self.writer('&')
+                if col.tagName == 'th':
+                    th = 1
+            self.writer('\\\\\n') #\\ ends lines
+            if th:
+                self.writer('\\hline\n')
+        self.writer('\\end{tabular}\n')
+        if node.hasAttribute('title'):
+            self.writer('\\caption{%s}'
+                        % latexEscape(node.getAttribute('title')))
+        self.writer('\\end{center}\\end{table}\n')
+
+    def visitNode_span_footnote(self, node):
+        self.writer('\\footnote{')
+        spitter = FootnoteLatexSpitter(self.writer, self.currDir, self.filename)
+        spitter.visitNodeDefault(node)
+        self.writer('}')
+
+    def visitNode_span_index(self, node):
+        self.writer('\\index{%s}\n' % node.getAttribute('value'))
+        self.visitNodeDefault(node)
+
+    visitNode_h2 = visitNode_h3 = visitNode_h4 = visitNodeHeader
+
+    start_title = '\\title{'
+    end_title = '}\n'
+
+    start_sub = '$_{'
+    end_sub = '}$'
+
+    start_sup = '$^{'
+    end_sup = '}$'
+
+    start_html = '''\\documentclass{article}
+    \\newcommand{\\loreref}[1]{%
+    \\ifthenelse{\\value{page}=\\pageref{#1}}%
+               { (this page)}%
+               { (page \\pageref{#1})}%
+    }'''
+
+    start_body = '\\begin{document}\n\\maketitle\n'
+    end_body = '\\end{document}'
+
+    start_dl = '\\begin{description}\n'
+    end_dl = '\\end{description}\n'
+    start_ul = '\\begin{itemize}\n'
+    end_ul = '\\end{itemize}\n'
+
+    start_ol = '\\begin{enumerate}\n'
+    end_ol = '\\end{enumerate}\n'
+
+    start_li = '\\item '
+    end_li = '\n'
+
+    start_dt = '\\item['
+    end_dt = ']'
+    end_dd = '\n'
+
+    start_p = '\n\n'
+
+    start_strong = start_em = '\\begin{em}'
+    end_strong = end_em = '\\end{em}'
+
+    start_q = "``"
+    end_q = "''"
+
+    start_div_note = '\\begin{quotation}\\textbf{Note:}'
+    end_div_note = '\\end{quotation}'
+
+    start_th = '\\textbf{'
+    end_th = '}'
+
+
+class SectionLatexSpitter(LatexSpitter):
+
+    baseLevel = 1
+
+    start_title = '\\section{'
+
+    def visitNode_title(self, node):
+        self.visitNodeDefault(node)
+        #self.writer('\\label{%s}}\n' % os.path.basename(self.filename))
+        self.writer('\\label{%s}}\n' % realpath(self.filename))
+
+    end_title = end_body = start_body = start_html = ''
+
+
+class ChapterLatexSpitter(SectionLatexSpitter):
+    baseLevel = 0
+    start_title = '\\chapter{'
+
+
+class HeadingLatexSpitter(BaseLatexSpitter):
+    start_q = "``"
+    end_q = "''"
+
+    writeNodeData = LatexSpitter.writeNodeData.im_func
+
+
+class FootnoteLatexSpitter(LatexSpitter):
+    """For multi-paragraph footnotes, this avoids having an empty leading
+    paragraph."""
+
+    start_p = ''
+
+    def visitNode_span_footnote(self, node):
+        self.visitNodeDefault(node)
+
+    def visitNode_p(self, node):
+        self.visitNodeDefault(node)
+        self.start_p = LatexSpitter.start_p
+
+class BookLatexSpitter(LatexSpitter):
+    def visitNode_body(self, node):
+        tocs=domhelpers.locateNodes([node], 'class', 'toc')
+        domhelpers.clearNode(node)
+        if len(tocs):
+            toc=tocs[0]
+            node.appendChild(toc)
+        self.visitNodeDefault(node)
+
+    def visitNode_link(self, node):
+        if not node.hasAttribute('rel'):
+            return self.visitNodeDefault(node)
+        node.tagName += '_'+node.getAttribute('rel')
+        self.visitNode(node)
+
+    def visitNode_link_author(self, node):
+        self.writer('\\author{%s}\n' % node.getAttribute('text'))
+
+    def visitNode_link_stylesheet(self, node):
+        if node.hasAttribute('type') and node.hasAttribute('href'):
+            if node.getAttribute('type')=='application/x-latex':
+                packagename=node.getAttribute('href')
+                packagebase,ext=os.path.splitext(packagename)
+                self.writer('\\usepackage{%s}\n' % packagebase)
+
+    start_html = r'''\documentclass[oneside]{book}
+\usepackage{graphicx}
+\usepackage{times,mathptmx}
+'''
+
+    start_body = r'''\begin{document}
+\maketitle
+\tableofcontents
+'''
+
+    start_li=''
+    end_li=''
+    start_ul=''
+    end_ul=''
+
+
+    def visitNode_a(self, node):
+        if node.hasAttribute('class'):
+            a_class=node.getAttribute('class')
+            if a_class.endswith('listing'):
+                return self.visitNode_a_listing(node)
+            else:
+                return getattr(self, 'visitNode_a_%s' % a_class)(node)
+        if node.hasAttribute('href'):
+            return self.visitNode_a_href(node)
+        if node.hasAttribute('name'):
+            return self.visitNode_a_name(node)
+        self.visitNodeDefault(node)
+
+    def visitNode_a_chapter(self, node):
+        self.writer('\\chapter{')
+        self.visitNodeDefault(node)
+        self.writer('}\n')
+
+    def visitNode_a_sect(self, node):
+        base,ext=os.path.splitext(node.getAttribute('href'))
+        self.writer('\\input{%s}\n' % base)
+
+
+
+def processFile(spitter, fin):
+    # XXX Use Inversion Of Control Pattern to orthogonalize the parsing API
+    # from the Visitor Pattern application. (EnterPrise)
+    dom = tree.parseFileAndReport(fin.name, lambda x: fin).documentElement
+    spitter.visitNode(dom)
+
+
+def convertFile(filename, spitterClass):
+    fout = open(os.path.splitext(filename)[0]+".tex", 'w')
+    spitter = spitterClass(fout.write, os.path.dirname(filename), filename)
+    fin = open(filename)
+    processFile(spitter, fin)
+    fin.close()
+    fout.close()
diff --git a/ThirdParty/Twisted/twisted/lore/lint.py b/ThirdParty/Twisted/twisted/lore/lint.py
new file mode 100644
index 0000000..d58d9ad
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/lint.py
@@ -0,0 +1,204 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Checker for common errors in Lore documents.
+"""
+
+from xml.dom import minidom as dom
+import parser, urlparse, os.path
+
+from twisted.lore import tree, process
+from twisted.web import domhelpers
+from twisted.python import reflect
+
+
+# parser.suite in Python 2.3 raises SyntaxError, <2.3 raises parser.ParserError
+parserErrors = (SyntaxError, parser.ParserError)
+
+class TagChecker:
+
+    def check(self, dom, filename):
+        self.hadErrors = 0
+        for method in reflect.prefixedMethods(self, 'check_'):
+            method(dom, filename)
+        if self.hadErrors:
+            raise process.ProcessingFailure("invalid format")
+
+    def _reportError(self, filename, element, error):
+        hlint = element.hasAttribute('hlint') and element.getAttribute('hlint')
+        if hlint != 'off':
+            self.hadErrors = 1
+            pos = getattr(element, '_markpos', None) or (0, 0)
+            print "%s:%s:%s: %s" % ((filename,)+pos+(error,))
+
+
+class DefaultTagChecker(TagChecker):
+
+    def __init__(self, allowedTags, allowedClasses):
+        self.allowedTags = allowedTags
+        self.allowedClasses = allowedClasses
+
+    def check_disallowedElements(self, dom, filename):
+        def m(node, self=self):
+            return not self.allowedTags(node.tagName)
+        for element in domhelpers.findElements(dom, m):
+            self._reportError(filename, element,
+                               'unrecommended tag %s' % element.tagName)
+
+    def check_disallowedClasses(self, dom, filename):
+        def matcher(element, self=self):
+            if not element.hasAttribute('class'):
+                return 0
+            checker = self.allowedClasses.get(element.tagName, lambda x:0)
+            return not checker(element.getAttribute('class'))
+        for element in domhelpers.findElements(dom, matcher):
+            self._reportError(filename, element,
+                              'unknown class %s' %element.getAttribute('class'))
+
+    def check_quote(self, doc, filename):
+        def matcher(node):
+            return ('"' in getattr(node, 'data', '') and
+                    not isinstance(node, dom.Comment) and
+                    not  [1 for n in domhelpers.getParents(node)[1:-1]
+                           if n.tagName in ('pre', 'code')])
+        for node in domhelpers.findNodes(doc, matcher):
+            self._reportError(filename, node.parentNode, 'contains quote')
+
+    def check_styleattr(self, dom, filename):
+        for node in domhelpers.findElementsWithAttribute(dom, 'style'):
+            self._reportError(filename, node, 'explicit style')
+
+    def check_align(self, dom, filename):
+        for node in domhelpers.findElementsWithAttribute(dom, 'align'):
+            self._reportError(filename, node, 'explicit alignment')
+
+    def check_style(self, dom, filename):
+        for node in domhelpers.findNodesNamed(dom, 'style'):
+            if domhelpers.getNodeText(node) != '':
+                self._reportError(filename, node, 'hand hacked style')
+
+    def check_title(self, dom, filename):
+        doc = dom.documentElement
+        title = domhelpers.findNodesNamed(dom, 'title')
+        if len(title)!=1:
+            return self._reportError(filename, doc, 'not exactly one title')
+        h1 = domhelpers.findNodesNamed(dom, 'h1')
+        if len(h1)!=1:
+            return self._reportError(filename, doc, 'not exactly one h1')
+        if domhelpers.getNodeText(h1[0]) != domhelpers.getNodeText(title[0]):
+            self._reportError(filename, h1[0], 'title and h1 text differ')
+
+    def check_80_columns(self, dom, filename):
+        for node in domhelpers.findNodesNamed(dom, 'pre'):
+            # the ps/pdf output is in a font that cuts off at 80 characters,
+            # so this is enforced to make sure the interesting parts (which
+            # are likely to be on the right-hand edge) stay on the printed
+            # page.
+            for line in domhelpers.gatherTextNodes(node, 1).split('\n'):
+                if len(line.rstrip()) > 80:
+                    self._reportError(filename, node,
+                                      'text wider than 80 columns in pre')
+        for node in domhelpers.findNodesNamed(dom, 'a'):
+            if node.getAttribute('class').endswith('listing'):
+                try:
+                    fn = os.path.dirname(filename)
+                    fn = os.path.join(fn, node.getAttribute('href'))
+                    lines = open(fn,'r').readlines()
+                except:
+                    self._reportError(filename, node,
+                                      'bad listing href: %r' %
+                                      node.getAttribute('href'))
+                    continue
+
+                for line in lines:
+                    if len(line.rstrip()) > 80:
+                        self._reportError(filename, node,
+                                          'listing wider than 80 columns')
+
+    def check_pre_py_listing(self, dom, filename):
+        for node in domhelpers.findNodesNamed(dom, 'pre'):
+            if node.getAttribute('class') == 'python':
+                try:
+                    text = domhelpers.getNodeText(node)
+                    # Fix < and >
+                    text = text.replace('>', '>').replace('<', '<')
+                    # Strip blank lines
+                    lines = filter(None,[l.rstrip() for l in text.split('\n')])
+                    # Strip leading space
+                    while not [1 for line in lines if line[:1] not in ('',' ')]:
+                        lines = [line[1:] for line in lines]
+                    text = '\n'.join(lines) + '\n'
+                    try:
+                        parser.suite(text)
+                    except parserErrors, e:
+                        # Pretend the "..." idiom is syntactically valid
+                        text = text.replace("...","'...'")
+                        parser.suite(text)
+                except parserErrors, e:
+                    self._reportError(filename, node,
+                                      'invalid python code:' + str(e))
+
+    def check_anchor_in_heading(self, dom, filename):
+        headingNames = ['h%d' % n for n in range(1,7)]
+        for hname in headingNames:
+            for node in domhelpers.findNodesNamed(dom, hname):
+                if domhelpers.findNodesNamed(node, 'a'):
+                    self._reportError(filename, node, 'anchor in heading')
+
+    def check_texturl_matches_href(self, dom, filename):
+        for node in domhelpers.findNodesNamed(dom, 'a'):
+            if not node.hasAttribute('href'):
+                continue
+            text = domhelpers.getNodeText(node)
+            proto = urlparse.urlparse(text)[0]
+            if proto and ' ' not in text:
+                if text != node.getAttribute('href'):
+                    self._reportError(filename, node,
+                                      'link text does not match href')
+
+    def check_lists(self, dom, filename):
+        for node in (domhelpers.findNodesNamed(dom, 'ul')+
+                     domhelpers.findNodesNamed(dom, 'ol')):
+            if not node.childNodes:
+                self._reportError(filename, node, 'empty list')
+            for child in node.childNodes:
+                if child.nodeName != 'li':
+                    self._reportError(filename, node,
+                                      'only list items allowed in lists')
+
+
+def list2dict(l):
+    d = {}
+    for el in l:
+        d[el] = None
+    return d
+
+classes = list2dict(['shell', 'API', 'python', 'py-prototype', 'py-filename',
+                     'py-src-string', 'py-signature', 'py-src-parameter',
+                     'py-src-identifier', 'py-src-keyword'])
+
+tags = list2dict(["html", "title", "head", "body", "h1", "h2", "h3", "ol", "ul",
+                  "dl", "li", "dt", "dd", "p", "code", "img", "blockquote", "a",
+                  "cite", "div", "span", "strong", "em", "pre", "q", "table",
+                  "tr", "td", "th", "style", "sub", "sup", "link"])
+
+span = list2dict(['footnote', 'manhole-output', 'index'])
+
+div = list2dict(['note', 'boxed', 'doit'])
+
+a = list2dict(['listing', 'py-listing', 'html-listing', 'absolute'])
+
+pre = list2dict(['python', 'shell', 'python-interpreter', 'elisp'])
+
+allowed = {'code': classes.has_key, 'span': span.has_key, 'div': div.has_key,
+           'a': a.has_key, 'pre': pre.has_key, 'ul': lambda x: x=='toc',
+           'ol': lambda x: x=='toc', 'li': lambda x: x=='ignoretoc'}
+
+def getDefaultChecker():
+    return DefaultTagChecker(tags.__contains__, allowed)
+
+def doFile(file, checker):
+    doc = tree.parseFileAndReport(file)
+    if doc:
+        checker.check(doc, file)
diff --git a/ThirdParty/Twisted/twisted/lore/lmath.py b/ThirdParty/Twisted/twisted/lore/lmath.py
new file mode 100644
index 0000000..fbd7e20
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/lmath.py
@@ -0,0 +1,85 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+LaTeX-defined image support for Lore documents.
+"""
+
+import os, tempfile
+from xml.dom import minidom as dom
+
+from twisted.web import domhelpers
+import latex, tree, lint, default
+
+
+class MathLatexSpitter(latex.LatexSpitter):
+
+    start_html = '\\documentclass{amsart}\n'
+
+    def visitNode_div_latexmacros(self, node):
+        self.writer(domhelpers.getNodeText(node))
+
+    def visitNode_span_latexformula(self, node):
+        self.writer('\[')
+        self.writer(domhelpers.getNodeText(node))
+        self.writer('\]')
+
+def formulaeToImages(document, dir, _system=os.system):
+    # gather all macros
+    macros = ''
+    for node in domhelpers.findElementsWithAttribute(document, 'class',
+                                                     'latexmacros'):
+        macros += domhelpers.getNodeText(node)
+        node.parentNode.removeChild(node)
+    i = 0
+    for node in domhelpers.findElementsWithAttribute(document, 'class',
+                                                    'latexformula'):
+        latexText='''\\documentclass[12pt]{amsart}%s
+                     \\begin{document}\[%s\]
+                     \\end{document}''' % (macros, domhelpers.getNodeText(node))
+        # This file really should be cleaned up by this function, or placed
+        # somewhere such that the calling code can find it and clean it up.
+        file = tempfile.mktemp()
+        f = open(file+'.tex', 'w')
+        f.write(latexText)
+        f.close()
+        _system('latex %s.tex' % file)
+        _system('dvips %s.dvi -o %s.ps' % (os.path.basename(file), file))
+        baseimgname = 'latexformula%d.png' % i
+        imgname = os.path.join(dir, baseimgname)
+        i += 1
+        _system('pstoimg -type png -crop a -trans -interlace -out '
+                  '%s %s.ps' % (imgname, file))
+        newNode = dom.parseString(
+            '<span><br /><img src="%s" /><br /></span>' % (
+                baseimgname,)).documentElement
+        node.parentNode.replaceChild(newNode, node)
+
+
+def doFile(fn, docsdir, ext, url, templ, linkrel='', d=None):
+    d = d or {}
+    doc = tree.parseFileAndReport(fn)
+    formulaeToImages(doc, os.path.dirname(fn))
+    cn = templ.cloneNode(1)
+    tree.munge(doc, cn, linkrel, docsdir, fn, ext, url, d)
+    cn.writexml(open(os.path.splitext(fn)[0]+ext, 'wb'))
+
+
+class ProcessingFunctionFactory(default.ProcessingFunctionFactory):
+
+    latexSpitters = {None: MathLatexSpitter}
+
+    def getDoFile(self):
+        return doFile
+
+    def getLintChecker(self):
+        checker = lint.getDefaultChecker()
+        checker.allowedClasses = checker.allowedClasses.copy()
+        oldDiv = checker.allowedClasses['div']
+        oldSpan = checker.allowedClasses['span']
+        checker.allowedClasses['div'] = lambda x:oldDiv(x) or x=='latexmacros'
+        checker.allowedClasses['span'] = (lambda x:oldSpan(x) or
+                                                     x=='latexformula')
+        return checker
+
+factory = ProcessingFunctionFactory()
diff --git a/ThirdParty/Twisted/twisted/lore/man2lore.py b/ThirdParty/Twisted/twisted/lore/man2lore.py
new file mode 100644
index 0000000..fbcba1c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/man2lore.py
@@ -0,0 +1,295 @@
+# -*- test-case-name: twisted.lore.test.test_man2lore -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+man2lore: Converts man page source (i.e. groff) into lore-compatible html.
+
+This is nasty and hackish (and doesn't support lots of real groff), but is good
+enough for converting fairly simple man pages.
+"""
+
+import re, os
+
+quoteRE = re.compile('"(.*?)"')
+
+
+
+def escape(text):
+    text = text.replace('<', '<').replace('>', '>')
+    text = quoteRE.sub('<q>\\1</q>', text)
+    return text
+
+
+
+def stripQuotes(s):
+    if s[0] == s[-1] == '"':
+        s = s[1:-1]
+    return s
+
+
+
+class ManConverter(object):
+    """
+    Convert a man page to the Lore format.
+
+    @ivar tp: State variable for handling text inside a C{TP} token. It can
+        take values from 0 to 3:
+            - 0: when outside of a C{TP} token.
+            - 1: once a C{TP} token has been encountered. If the previous value
+              was 0, a definition list is started. Then, at the first line of
+              text, a definition term is started.
+            - 2: when the first line after the C{TP} token has been handled.
+              The definition term is closed, and a definition is started with
+              the next line of text.
+            - 3: when the first line as definition data has been handled.
+    @type tp: C{int}
+    """
+    state = 'regular'
+    name = None
+    tp = 0
+    dl = 0
+    para = 0
+
+    def convert(self, inf, outf):
+        self.write = outf.write
+        longline = ''
+        for line in inf.readlines():
+            if line.rstrip() and line.rstrip()[-1] == '\\':
+                longline += line.rstrip()[:-1] + ' '
+                continue
+            if longline:
+                line = longline + line
+                longline = ''
+            self.lineReceived(line)
+        self.closeTags()
+        self.write('</body>\n</html>\n')
+        outf.flush()
+
+
+    def lineReceived(self, line):
+        if line[0] == '.':
+            f = getattr(self, 'macro_' + line[1:3].rstrip().upper(), None)
+            if f:
+                f(line[3:].strip())
+        else:
+            self.text(line)
+
+
+    def continueReceived(self, cont):
+        if not cont:
+            return
+        if cont[0].isupper():
+            f = getattr(self, 'macro_' + cont[:2].rstrip().upper(), None)
+            if f:
+                f(cont[2:].strip())
+        else:
+            self.text(cont)
+
+
+    def closeTags(self):
+        if self.state != 'regular':
+            self.write('</%s>' % self.state)
+        if self.tp == 3:
+            self.write('</dd>\n\n')
+            self.tp = 0
+        if self.dl:
+            self.write('</dl>\n\n')
+            self.dl = 0
+        if self.para:
+            self.write('</p>\n\n')
+            self.para = 0
+
+
+    def paraCheck(self):
+        if not self.tp and not self.para:
+            self.write('<p>')
+            self.para = 1
+
+
+    def macro_TH(self, line):
+        self.write(
+            '<?xml version="1.0"?>\n'
+            '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"\n'
+            '    "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">\n')
+        self.write('<html><head>\n')
+        parts = [stripQuotes(x) for x in line.split(' ', 2)] + ['', '']
+        title, manSection = parts[:2]
+        self.write('<title>%s.%s</title>' % (title, manSection))
+        self.write('</head>\n<body>\n\n')
+        self.write('<h1>%s.%s</h1>\n\n' % (title, manSection))
+
+    macro_DT = macro_TH
+
+
+    def macro_SH(self, line):
+        self.closeTags()
+        self.write('<h2>')
+        self.para = 1
+        self.text(stripQuotes(line))
+        self.para = 0
+        self.closeTags()
+        self.write('</h2>\n\n')
+
+
+    def macro_B(self, line):
+        words = line.split()
+        words[0] = '\\fB' + words[0] + '\\fR '
+        self.text(' '.join(words))
+
+
+    def macro_NM(self, line):
+        if not self.name:
+           self.name = line
+        self.text(self.name + ' ')
+
+
+    def macro_NS(self, line):
+        parts = line.split(' Ns ')
+        i = 0
+        for l in parts:
+            i = not i
+            if i:
+                self.text(l)
+            else:
+                self.continueReceived(l)
+
+
+    def macro_OO(self, line):
+        self.text('[')
+        self.continueReceived(line)
+
+
+    def macro_OC(self, line):
+        self.text(']')
+        self.continueReceived(line)
+
+
+    def macro_OP(self, line):
+        self.text('[')
+        self.continueReceived(line)
+        self.text(']')
+
+
+    def macro_FL(self, line):
+        parts = line.split()
+        self.text('\\fB-%s\\fR' % parts[0])
+        self.continueReceived(' '.join(parts[1:]))
+
+
+    def macro_AR(self, line):
+        parts = line.split()
+        self.text('\\fI %s\\fR' % parts[0])
+        self.continueReceived(' '.join(parts[1:]))
+
+
+    def macro_PP(self, line):
+        self.closeTags()
+
+
+    def macro_IC(self, line):
+        cmd = line.split(' ', 1)[0]
+        args = line[line.index(cmd) + len(cmd):]
+        args = args.split(' ')
+        text = cmd
+        while args:
+            arg = args.pop(0)
+            if arg.lower() == "ar":
+                text += " \\fU%s\\fR" % (args.pop(0),)
+            elif arg.lower() == "op":
+                ign = args.pop(0)
+                text += " [\\fU%s\\fR]" % (args.pop(0),)
+
+        self.text(text)
+
+
+    def macro_TP(self, line):
+        """
+        Handle C{TP} token: start a definition list if it's first token, or
+        close previous definition data.
+        """
+        if self.tp == 3:
+            self.write('</dd>\n\n')
+            self.tp = 1
+        else:
+            self.tp = 1
+            self.write('<dl>')
+            self.dl = 1
+
+
+    def macro_BL(self, line):
+        self.write('<dl>')
+        self.tp = 1
+
+
+    def macro_EL(self, line):
+        if self.tp == 3:
+            self.write('</dd>')
+            self.tp = 1
+        self.write('</dl>\n\n')
+        self.tp = 0
+
+
+    def macro_IT(self, line):
+        if self.tp == 3:
+            self.write('</dd>')
+            self.tp = 1
+        self.continueReceived(line)
+
+
+    def text(self, line):
+        """
+        Handle a line of text without detected token.
+        """
+        if self.tp == 1:
+            self.write('<dt>')
+        if self.tp == 2:
+            self.write('<dd>')
+        self.paraCheck()
+
+        bits = line.split('\\')
+        self.write(escape(bits[0]))
+        for bit in bits[1:]:
+            if bit[:2] == 'fI':
+                self.write('<em>' + escape(bit[2:]))
+                self.state = 'em'
+            elif bit[:2] == 'fB':
+                self.write('<strong>' + escape(bit[2:]))
+                self.state = 'strong'
+            elif bit[:2] == 'fR':
+                self.write('</%s>' % self.state)
+                self.write(escape(bit[2:]))
+                self.state = 'regular'
+            elif bit[:2] == 'fU':
+                # fU doesn't really exist, but it helps us to manage underlined
+                # text.
+                self.write('<u>' + escape(bit[2:]))
+                self.state = 'u'
+            elif bit[:3] == '(co':
+                self.write('©' + escape(bit[3:]))
+            else:
+                self.write(escape(bit))
+
+        if self.tp == 1:
+            self.write('</dt>')
+            self.tp = 2
+        elif self.tp == 2:
+            self.tp = 3
+
+
+
+class ProcessingFunctionFactory:
+
+    def generate_lore(self, d, filenameGenerator=None):
+        ext = d.get('ext', '.html')
+        return lambda file,_: ManConverter().convert(open(file),
+                                    open(os.path.splitext(file)[0]+ext, 'w'))
+
+
+
+factory = ProcessingFunctionFactory()
+
+
+if __name__ == '__main__':
+    import sys
+    mc = ManConverter().convert(open(sys.argv[1]), sys.stdout)
diff --git a/ThirdParty/Twisted/twisted/lore/numberer.py b/ThirdParty/Twisted/twisted/lore/numberer.py
new file mode 100644
index 0000000..f91cc28
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/numberer.py
@@ -0,0 +1,33 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+def reset():
+    resetFilenum()
+    setNumberSections(False)
+
+def resetFilenum():
+    setFilenum(0)
+
+def setFilenum(arg):
+    global filenum
+    filenum = arg
+
+def getFilenum():
+    global filenum
+    return filenum
+
+def getNextFilenum():
+    global filenum
+    filenum += 1
+    return filenum
+
+def setNumberSections(arg):
+    global numberSections
+    numberSections = arg
+
+def getNumberSections():
+    global numberSections
+    return numberSections
+
+reset()
diff --git a/ThirdParty/Twisted/twisted/lore/process.py b/ThirdParty/Twisted/twisted/lore/process.py
new file mode 100644
index 0000000..ec5d036
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/process.py
@@ -0,0 +1,120 @@
+# -*- test-case-name: twisted.lore.test.test_lore -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+import sys, os
+import tree #todo: get rid of this later
+import indexer
+
+class NoProcessorError(Exception):
+    pass
+
+class ProcessingFailure(Exception):
+    pass
+
+cols = 79
+
+def dircount(d):
+    return len([1 for el in d.split("/") if el != '.'])
+
+
+class Walker:
+
+    def __init__(self, df, fext, linkrel):
+        self.df = df
+        self.linkrel = linkrel
+        self.fext = fext
+        self.walked = []
+        self.failures = []
+
+    def walkdir(self, topdir, prefix=''):
+        self.basecount = dircount(topdir)
+        os.path.walk(topdir, self.walk, prefix)
+
+    def walk(self, prefix, d, names):
+        linkrel = prefix + '../' * (dircount(d) - self.basecount)
+        for name in names:
+            fullpath = os.path.join(d, name)
+            fext = os.path.splitext(name)[1]
+            if fext == self.fext:
+                self.walked.append((linkrel, fullpath))
+                
+    def generate(self):
+        i = 0
+        indexer.clearEntries()
+        tree.filenum = 0
+        for linkrel, fullpath in self.walked:
+            linkrel = self.linkrel + linkrel
+            i += 1
+            fname = os.path.splitext(fullpath)[0]
+            self.percentdone((float(i) / len(self.walked)), fname)
+            try:
+                self.df(fullpath, linkrel)
+            except ProcessingFailure, e:
+                self.failures.append((fullpath, e))
+        indexer.generateIndex()
+        self.percentdone(1., None)
+
+    def percentdone(self, percent, fname):
+        # override for neater progress bars
+        proglen = 40
+        hashes = int(percent * proglen)
+        spaces = proglen - hashes
+        progstat = "[%s%s] (%s)" %('#' * hashes, ' ' * spaces,fname or "*Done*")
+        progstat += (cols - len(progstat)) * ' '
+        progstat += '\r'
+        sys.stdout.write(progstat)
+        sys.stdout.flush()
+        if fname is None:
+            print
+
+class PlainReportingWalker(Walker):
+
+    def percentdone(self, percent, fname):
+        if fname:
+            print fname
+
+class NullReportingWalker(Walker):
+
+    def percentdone(self, percent, fname):
+        pass
+
+def parallelGenerator(originalFileName, outputExtension):
+    return os.path.splitext(originalFileName)[0]+outputExtension
+
+def fooAddingGenerator(originalFileName, outputExtension):
+    return os.path.splitext(originalFileName)[0]+"foo"+outputExtension
+
+def outputdirGenerator(originalFileName, outputExtension, inputdir, outputdir):
+    originalFileName = os.path.abspath(originalFileName)
+    abs_inputdir = os.path.abspath(inputdir)
+    if os.path.commonprefix((originalFileName, abs_inputdir)) != abs_inputdir:
+        raise ValueError("Original file name '" + originalFileName +
+              "' not under input directory '" + abs_inputdir + "'")
+
+    adjustedPath = os.path.join(outputdir, os.path.basename(originalFileName))
+    return tree.getOutputFileName(adjustedPath, outputExtension)
+
+def getFilenameGenerator(config, outputExt):
+    if config.get('outputdir'):
+        return (lambda originalFileName, outputExtension:
+            outputdirGenerator(originalFileName, outputExtension,
+                               os.path.abspath(config.get('inputdir')),
+                               os.path.abspath(config.get('outputdir'))))
+    else:
+        return tree.getOutputFileName
+
+def getProcessor(module, output, config):
+    try:
+        m = getattr(module.factory, 'generate_'+output)
+    except AttributeError:
+        raise NoProcessorError("cannot generate "+output+" output")
+
+    if config.get('ext'):
+        ext = config['ext']
+    else:
+        from default import htmlDefault
+        ext = htmlDefault['ext']
+
+    return m(config, getFilenameGenerator(config, ext))
diff --git a/ThirdParty/Twisted/twisted/lore/scripts/__init__.py b/ThirdParty/Twisted/twisted/lore/scripts/__init__.py
new file mode 100644
index 0000000..265270e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/scripts/__init__.py
@@ -0,0 +1 @@
+"lore scripts"
diff --git a/ThirdParty/Twisted/twisted/lore/scripts/lore.py b/ThirdParty/Twisted/twisted/lore/scripts/lore.py
new file mode 100644
index 0000000..c82b2c6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/scripts/lore.py
@@ -0,0 +1,155 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import sys
+
+from zope.interface import Interface, Attribute
+
+from twisted.lore import process, indexer, numberer, htmlbook
+
+from twisted.python import usage, reflect
+from twisted import plugin as plugin
+
+class IProcessor(Interface):
+    """
+    """
+
+    name = Attribute("The user-facing name of this processor")
+
+    moduleName = Attribute(
+        "The fully qualified Python name of the object defining "
+        "this processor.  This object (typically a module) should "
+        "have a C{factory} attribute with C{generate_<output>} methods.")
+
+
+class Options(usage.Options):
+
+    longdesc = "lore converts documentation formats."
+
+    optFlags = [["plain", 'p', "Report filenames without progress bar"],
+                ["null", 'n', "Do not report filenames"],
+                ["number", 'N', "Add chapter/section numbers to section headings"],
+]
+
+    optParameters = [
+                     ["input", "i", 'lore'],
+                     ["inputext", "e", ".xhtml", "The extension that your Lore input files have"],
+                     ["docsdir", "d", None],
+                     ["linkrel", "l", ''],
+                     ["output", "o", 'html'],
+                     ["index", "x", None, "The base filename you want to give your index file"],
+                     ["book", "b", None, "The book file to generate a book from"],
+                     ["prefixurl", None, "", "The prefix to stick on to relative links; only useful when processing directories"],
+                    ]
+
+    compData = usage.Completions(
+        extraActions=[usage.CompleteFiles(descr="files", repeat=True)])
+
+    def __init__(self, *args, **kw):
+        usage.Options.__init__(self, *args, **kw)
+        self.config = {}
+
+    def opt_config(self, s):
+        if '=' in s:
+            k, v = s.split('=', 1)
+            self.config[k] = v
+        else:
+            self.config[s] = 1
+
+    def parseArgs(self, *files):
+        self['files'] = files
+
+
+def getProcessor(input, output, config):
+    plugins = plugin.getPlugins(IProcessor)
+    for plug in plugins:
+        if plug.name == input:
+            module = reflect.namedModule(plug.moduleName)
+            break
+    else:
+        # try treating it as a module name
+        try:
+            module = reflect.namedModule(input)
+        except ImportError:
+            print '%s: no such input: %s' % (sys.argv[0], input)
+            return
+    try:
+        return process.getProcessor(module, output, config)
+    except process.NoProcessorError, e:
+        print "%s: %s" % (sys.argv[0], e)
+
+
+def getWalker(df, opt):
+    klass = process.Walker
+    if opt['plain']:
+        klass = process.PlainReportingWalker
+    if opt['null']:
+        klass = process.NullReportingWalker
+    return klass(df, opt['inputext'], opt['linkrel'])
+
+
+def runGivenOptions(opt):
+    """Do everything but parse the options; useful for testing.
+    Returns a descriptive string if there's an error."""
+
+    book = None
+    if opt['book']:
+        book = htmlbook.Book(opt['book'])
+
+    df = getProcessor(opt['input'], opt['output'], opt.config)
+    if not df:
+        return 'getProcessor() failed'
+
+    walker = getWalker(df, opt)
+
+    if opt['files']:
+        for filename in opt['files']:
+            walker.walked.append(('', filename))
+    elif book:
+        for filename in book.getFiles():
+            walker.walked.append(('', filename))
+    else:
+        walker.walkdir(opt['docsdir'] or '.', opt['prefixurl'])
+
+    if opt['index']:
+        indexFilename = opt['index']
+    elif book:
+        indexFilename = book.getIndexFilename()
+    else:
+        indexFilename = None
+
+    if indexFilename:
+        indexer.setIndexFilename("%s.%s" % (indexFilename, opt['output']))
+    else:
+        indexer.setIndexFilename(None)
+
+    ## TODO: get numberSections from book, if any
+    numberer.setNumberSections(opt['number'])
+
+    walker.generate()
+
+    if walker.failures:
+        for (file, errors) in walker.failures:
+            for error in errors:
+                print "%s:%s" % (file, error)
+        return 'Walker failures'
+
+
+def run():
+    opt = Options()
+    try:
+        opt.parseOptions()
+    except usage.UsageError, errortext:
+        print '%s: %s' % (sys.argv[0], errortext)
+        print '%s: Try --help for usage details.' % sys.argv[0]
+        sys.exit(1)
+
+    result = runGivenOptions(opt)
+    if result:
+        print result
+        sys.exit(1)
+
+
+if __name__ == '__main__':
+    run()
+
diff --git a/ThirdParty/Twisted/twisted/lore/slides.py b/ThirdParty/Twisted/twisted/lore/slides.py
new file mode 100644
index 0000000..fcddbc5
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/slides.py
@@ -0,0 +1,359 @@
+# -*- test-case-name: twisted.lore.test.test_slides -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Rudimentary slide support for Lore.
+
+TODO:
+    - Complete mgp output target
+        - syntax highlighting
+        - saner font handling
+        - probably lots more
+    - Add HTML output targets
+        - one slides per page (with navigation links)
+        - all in one page
+
+Example input file::
+    <html>
+
+    <head><title>Title of talk</title></head>
+
+    <body>
+    <h1>Title of talk</h1>
+
+    <h2>First Slide</h2>
+
+    <ul>
+      <li>Bullet point</li>
+      <li>Look ma, I'm <strong>bold</strong>!</li>
+      <li>... etc ...</li>
+    </ul>
+
+
+    <h2>Second Slide</h2>
+
+    <pre class="python">
+    # Sample code sample.
+    print "Hello, World!"
+    </pre>
+
+    </body>
+
+    </html>
+"""
+
+from xml.dom import minidom as dom
+import os.path, re
+from cStringIO import StringIO
+
+from twisted.lore import default
+from twisted.web import domhelpers
+from twisted.python import text
+# These should be factored out
+from twisted.lore.latex import BaseLatexSpitter, LatexSpitter, processFile
+from twisted.lore.latex import getLatexText, HeadingLatexSpitter
+from twisted.lore.tree import getHeaders
+from twisted.lore.tree import removeH1, fixAPI, fontifyPython
+from twisted.lore.tree import addPyListings, addHTMLListings, setTitle
+
+hacked_entities = { 'amp': ' &', 'gt': ' >', 'lt': ' <', 'quot': ' "',
+                    'copy': ' (c)'}
+
+entities = { 'amp': '&', 'gt': '>', 'lt': '<', 'quot': '"',
+             'copy': '(c)'}
+
+class MagicpointOutput(BaseLatexSpitter):
+    bulletDepth = 0
+
+    def writeNodeData(self, node):
+        buf = StringIO()
+        getLatexText(node, buf.write, entities=hacked_entities)
+        data = buf.getvalue().rstrip().replace('\n', ' ')
+        self.writer(re.sub(' +', ' ', data))
+
+    def visitNode_title(self, node):
+        self.title = domhelpers.getNodeText(node)
+
+    def visitNode_body(self, node):
+        # Adapted from tree.generateToC
+        self.fontStack = [('standard', None)]
+
+        # Title slide
+        self.writer(self.start_h2)
+        self.writer(self.title)
+        self.writer(self.end_h2)
+
+        self.writer('%center\n\n\n\n\n')
+        for authorNode in domhelpers.findElementsWithAttribute(node, 'class', 'author'):
+            getLatexText(authorNode, self.writer, entities=entities)
+            self.writer('\n')
+
+        # Table of contents
+        self.writer(self.start_h2)
+        self.writer(self.title)
+        self.writer(self.end_h2)
+
+        for element in getHeaders(node):
+            level = int(element.tagName[1])-1
+            self.writer(level * '\t')
+            self.writer(domhelpers.getNodeText(element))
+            self.writer('\n')
+
+        self.visitNodeDefault(node)
+
+    def visitNode_div_author(self, node):
+        # Skip this node; it's already been used by visitNode_body
+        pass
+
+    def visitNode_div_pause(self, node):
+        self.writer('%pause\n')
+
+    def visitNode_pre(self, node):
+        # TODO: Syntax highlighting
+        buf = StringIO()
+        getLatexText(node, buf.write, entities=entities)
+        data = buf.getvalue()
+        data = text.removeLeadingTrailingBlanks(data)
+        lines = data.split('\n')
+        self.fontStack.append(('typewriter', 4))
+        self.writer('%' + self.fontName() + '\n')
+        for line in lines:
+            self.writer(' ' + line + '\n')
+        del self.fontStack[-1]
+        self.writer('%' + self.fontName() + '\n')
+
+    def visitNode_ul(self, node):
+        if self.bulletDepth > 0:
+            self.writer(self._start_ul)
+        self.bulletDepth += 1
+        self.start_li = self._start_li * self.bulletDepth
+        self.visitNodeDefault(node)
+        self.bulletDepth -= 1
+        self.start_li = self._start_li * self.bulletDepth
+
+    def visitNode_strong(self, node):
+        self.doFont(node, 'bold')
+
+    def visitNode_em(self, node):
+        self.doFont(node, 'italic')
+
+    def visitNode_code(self, node):
+        self.doFont(node, 'typewriter')
+
+    def doFont(self, node, style):
+        self.fontStack.append((style, None))
+        self.writer(' \n%cont, ' + self.fontName() + '\n')
+        self.visitNodeDefault(node)
+        del self.fontStack[-1]
+        self.writer('\n%cont, ' + self.fontName() + '\n')
+
+    def fontName(self):
+        names = [x[0] for x in self.fontStack]
+        if 'typewriter' in names:
+            name = 'typewriter'
+        else:
+            name = ''
+
+        if 'bold' in names:
+            name += 'bold'
+        if 'italic' in names:
+            name += 'italic'
+
+        if name == '':
+            name = 'standard'
+
+        sizes = [x[1] for x in self.fontStack]
+        sizes.reverse()
+        for size in sizes:
+            if size:
+                return 'font "%s", size %d' % (name, size)
+
+        return 'font "%s"' % name
+
+    start_h2 = "%page\n\n"
+    end_h2 = '\n\n\n'
+
+    _start_ul = '\n'
+
+    _start_li = "\t"
+    end_li = "\n"
+
+
+def convertFile(filename, outputter, template, ext=".mgp"):
+    fout = open(os.path.splitext(filename)[0]+ext, 'w')
+    fout.write(open(template).read())
+    spitter = outputter(fout.write, os.path.dirname(filename), filename)
+    fin = open(filename)
+    processFile(spitter, fin)
+    fin.close()
+    fout.close()
+
+
+# HTML DOM tree stuff
+
+def splitIntoSlides(document):
+    body = domhelpers.findNodesNamed(document, 'body')[0]
+    slides = []
+    slide = []
+    title = '(unset)'
+    for child in body.childNodes:
+        if isinstance(child, dom.Element) and child.tagName == 'h2':
+            if slide:
+                slides.append((title, slide))
+                slide = []
+            title = domhelpers.getNodeText(child)
+        else:
+            slide.append(child)
+    slides.append((title, slide))
+    return slides
+
+def insertPrevNextLinks(slides, filename, ext):
+    for slide in slides:
+        for name, offset in (("previous", -1), ("next", +1)):
+            if (slide.pos > 0 and name == "previous") or \
+               (slide.pos < len(slides)-1 and name == "next"):
+                for node in domhelpers.findElementsWithAttribute(slide.dom, "class", name):
+                    if node.tagName == 'a':
+                        node.setAttribute('href', '%s-%d%s'
+                                          % (filename[0], slide.pos+offset, ext))
+                    else:
+                        text = dom.Text()
+                        text.data = slides[slide.pos+offset].title
+                        node.appendChild(text)
+            else:
+                for node in domhelpers.findElementsWithAttribute(slide.dom, "class", name):
+                    pos = 0
+                    for child in node.parentNode.childNodes:
+                        if child is node:
+                            del node.parentNode.childNodes[pos]
+                            break
+                        pos += 1
+
+
+class HTMLSlide:
+    def __init__(self, dom, title, pos):
+        self.dom = dom
+        self.title = title
+        self.pos = pos
+
+
+def munge(document, template, linkrel, d, fullpath, ext, url, config):
+    # FIXME: This has *way* to much duplicated crap in common with tree.munge
+    #fixRelativeLinks(template, linkrel)
+    removeH1(document)
+    fixAPI(document, url)
+    fontifyPython(document)
+    addPyListings(document, d)
+    addHTMLListings(document, d)
+    #fixLinks(document, ext)
+    #putInToC(template, generateToC(document))
+    template = template.cloneNode(1)
+
+    # Insert the slides into the template
+    slides = []
+    pos = 0
+    for title, slide in splitIntoSlides(document):
+        t = template.cloneNode(1)
+        text = dom.Text()
+        text.data = title
+        setTitle(t, [text])
+        tmplbody = domhelpers.findElementsWithAttribute(t, "class", "body")[0]
+        tmplbody.childNodes = slide
+        tmplbody.setAttribute("class", "content")
+        # FIXME: Next/Prev links
+        # FIXME: Perhaps there should be a "Template" class?  (setTitle/setBody
+        #        could be methods...)
+        slides.append(HTMLSlide(t, title, pos))
+        pos += 1
+
+    insertPrevNextLinks(slides, os.path.splitext(os.path.basename(fullpath)), ext)
+
+    return slides
+
+from tree import makeSureDirectoryExists
+
+def getOutputFileName(originalFileName, outputExtension, index):
+    return os.path.splitext(originalFileName)[0]+'-'+str(index) + outputExtension
+
+def doFile(filename, linkrel, ext, url, templ, options={}, outfileGenerator=getOutputFileName):    
+    from tree import parseFileAndReport
+    doc = parseFileAndReport(filename)
+    slides = munge(doc, templ, linkrel, os.path.dirname(filename), filename, ext, url, options)
+    for slide, index in zip(slides, range(len(slides))):
+        newFilename = outfileGenerator(filename, ext, index)
+        makeSureDirectoryExists(newFilename)
+        f = open(newFilename, 'wb')
+        slide.dom.writexml(f)
+        f.close()
+
+# Prosper output
+
+class ProsperSlides(LatexSpitter):
+    firstSlide = 1
+    start_html = '\\documentclass[ps]{prosper}\n'
+    start_body = '\\begin{document}\n'
+    start_div_author = '\\author{'
+    end_div_author = '}'
+
+    def visitNode_h2(self, node):
+        if self.firstSlide:
+            self.firstSlide = 0
+            self.end_body = '\\end{slide}\n\n' + self.end_body
+        else:
+            self.writer('\\end{slide}\n\n')
+        self.writer('\\begin{slide}{')
+        spitter = HeadingLatexSpitter(self.writer, self.currDir, self.filename)
+        spitter.visitNodeDefault(node)
+        self.writer('}')
+
+    def _write_img(self, target):
+        self.writer('\\begin{center}\\includegraphics[%%\nwidth=1.0\n\\textwidth,'
+                    'height=1.0\\textheight,\nkeepaspectratio]{%s}\\end{center}\n' % target)
+
+
+class PagebreakLatex(LatexSpitter):
+
+    everyN = 1
+    currentN = 0
+    seenH2 = 0
+
+    start_html = LatexSpitter.start_html+"\\date{}\n"
+    start_body = '\\begin{document}\n\n'
+
+    def visitNode_h2(self, node):
+        if not self.seenH2:
+            self.currentN = 0
+            self.seenH2 = 1
+        else:
+            self.currentN += 1
+            self.currentN %= self.everyN
+            if not self.currentN:
+                self.writer('\\clearpage\n')
+        level = (int(node.tagName[1])-2)+self.baseLevel
+        self.writer('\n\n\\'+level*'sub'+'section*{')
+        spitter = HeadingLatexSpitter(self.writer, self.currDir, self.filename)
+        spitter.visitNodeDefault(node)
+        self.writer('}\n')
+
+class TwoPagebreakLatex(PagebreakLatex):
+
+    everyN = 2
+
+
+class SlidesProcessingFunctionFactory(default.ProcessingFunctionFactory):
+
+    latexSpitters = default.ProcessingFunctionFactory.latexSpitters.copy()
+    latexSpitters['prosper'] = ProsperSlides
+    latexSpitters['page'] = PagebreakLatex
+    latexSpitters['twopage'] = TwoPagebreakLatex
+
+    def getDoFile(self):
+        return doFile
+
+    def generate_mgp(self, d, fileNameGenerator=None):
+        template = d.get('template', 'template.mgp')
+        df = lambda file, linkrel: convertFile(file, MagicpointOutput, template, ext=".mgp")
+        return df
+
+factory=SlidesProcessingFunctionFactory()
diff --git a/ThirdParty/Twisted/twisted/lore/template.mgp b/ThirdParty/Twisted/twisted/lore/template.mgp
new file mode 100644
index 0000000..79fc4d1
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/template.mgp
@@ -0,0 +1,24 @@
+%%deffont "standard"   tfont "Arial.ttf"
+%%deffont "bold"       tfont "Arial_Bold.ttf"
+%%deffont "italic"     tfont "Arial_Italic.ttf"
+%%deffont "bolditalic" tfont "Arial_Bold_Italic.ttf"
+%%deffont "typewriter" tfont "Courier_New.ttf"
+%deffont "standard"   xfont "Arial-medium-r"
+%deffont "bold"       xfont "Arial-bold-r"
+%deffont "italic"     xfont "Arial-medium-i"
+%deffont "bolditalic" xfont "Arial-bold-i"
+%deffont "typewriter" xfont "andale mono"
+%%deffont "standard"   tfont "tahoma.ttf"
+%%deffont "thick"      tfont "tahomabd.ttf"
+#%deffont "typewriter" tfont "Andale_Mono.ttf"
+%default 1 area 90 90, leftfill, size 2, fore "white", back "black", font "bold"
+%default 2 size 7, vgap 10, prefix " ", center
+%default 3 size 2, bar "gray70", vgap 10, leftfill
+%default 4 size 1, fore "white", vgap 30, prefix " ", font "standard"
+%default 5 size 5
+%%tab 1 size 5, vgap 40, prefix "  ", icon box "green" 50
+%%tab 2 size 4, vgap 40, prefix "      ", icon arc "yellow" 50
+%%tab 3 size 3, vgap 40, prefix "            ", icon delta3 "white" 40
+%tab 1 size 5, vgap 50, prefix "  ", icon box "green" 50
+%tab 2 size 5, vgap 50, prefix "      ", icon arc "yellow" 50
+%tab 3 size 4, vgap 50, prefix "            ", icon delta3 "white" 40
diff --git a/ThirdParty/Twisted/twisted/lore/test/__init__.py b/ThirdParty/Twisted/twisted/lore/test/__init__.py
new file mode 100644
index 0000000..1641a43
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/test/__init__.py
@@ -0,0 +1 @@
+"lore tests"
diff --git a/ThirdParty/Twisted/twisted/lore/test/lore_index_file_out.html b/ThirdParty/Twisted/twisted/lore/test/lore_index_file_out.html
new file mode 100644
index 0000000..0490f0c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/test/lore_index_file_out.html
@@ -0,0 +1,2 @@
+language of programming: <a href="lore_index_test.html#index02">1.3</a><br />
+programming language: <a href="lore_index_test.html#index01">1.2</a><br />
diff --git a/ThirdParty/Twisted/twisted/lore/test/lore_index_file_out_multiple.html b/ThirdParty/Twisted/twisted/lore/test/lore_index_file_out_multiple.html
new file mode 100644
index 0000000..fa0235e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/test/lore_index_file_out_multiple.html
@@ -0,0 +1,5 @@
+aahz: <a href="lore_index_test2.html#index03">link</a>
+aahz2: <a href="lore_index_test2.html#index02">link</a>
+language of programming: <a href="lore_index_test.html#index02">link</a>
+<a href="lore_index_test2.html#index01">link</a>
+programming language: <a href="lore_index_test.html#index01">link</a>
diff --git a/ThirdParty/Twisted/twisted/lore/test/lore_index_file_unnumbered_out.html b/ThirdParty/Twisted/twisted/lore/test/lore_index_file_unnumbered_out.html
new file mode 100644
index 0000000..fa724d7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/test/lore_index_file_unnumbered_out.html
@@ -0,0 +1,2 @@
+language of programming: <a href="lore_index_test.html#index02">link</a><br />
+programming language: <a href="lore_index_test.html#index01">link</a><br />
diff --git a/ThirdParty/Twisted/twisted/lore/test/lore_index_test.xhtml b/ThirdParty/Twisted/twisted/lore/test/lore_index_test.xhtml
new file mode 100644
index 0000000..570b411
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/test/lore_index_test.xhtml
@@ -0,0 +1,21 @@
+<html>
+<head>
+  <title>The way of the program</title>
+</head>
+
+<body>
+
+<h1>The way of the program</h1>
+
+<p>The first paragraph.</p>
+
+
+<h2>The Python programming language</h2>
+<span class="index" value="programming language" />
+<span class="index" value="language of programming" />
+
+<p>The second paragraph.</p>
+
+
+</body>
+</html>
diff --git a/ThirdParty/Twisted/twisted/lore/test/lore_index_test2.xhtml b/ThirdParty/Twisted/twisted/lore/test/lore_index_test2.xhtml
new file mode 100644
index 0000000..4214e48
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/test/lore_index_test2.xhtml
@@ -0,0 +1,22 @@
+<html>
+<head>
+  <title>The second page to index</title>
+</head>
+
+<body>
+
+<h1>The second page to index</h1>
+
+<p>The first paragraph of the second page.</p>
+
+
+<h2>The Jython programming language</h2>
+<span class="index" value="language of programming" />
+<span class="index" value="aahz2" />
+<span class="index" value="aahz" />
+
+<p>The second paragraph of the second page.</p>
+
+
+</body>
+</html>
diff --git a/ThirdParty/Twisted/twisted/lore/test/lore_numbering_test_out.html b/ThirdParty/Twisted/twisted/lore/test/lore_numbering_test_out.html
new file mode 100644
index 0000000..15bb2b7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/test/lore_numbering_test_out.html
@@ -0,0 +1,2 @@
+<?xml version="1.0"?><!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+    "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"><html xmlns="http://www.w3.org/1999/xhtml" lang="en"><head><title>Twisted Documentation: 1. The way of the program</title><link href="resources/stylesheet.css" type="text/css" rel="stylesheet" /></head><body bgcolor="white"><h1 class="title">1. The way of the program</h1><div class="toc"><ol><li><a href="#auto0">The Python programming language</a></li><li><a href="#auto1">Section The Second</a></li><li><a href="#auto2">Section [...]
\ No newline at end of file
diff --git a/ThirdParty/Twisted/twisted/lore/test/lore_numbering_test_out2.html b/ThirdParty/Twisted/twisted/lore/test/lore_numbering_test_out2.html
new file mode 100644
index 0000000..33aff77
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/test/lore_numbering_test_out2.html
@@ -0,0 +1,2 @@
+<?xml version="1.0"?><!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+    "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"><html xmlns="http://www.w3.org/1999/xhtml" lang="en"><head><title>Twisted Documentation: 2. The second page to index</title><link href="resources/stylesheet.css" type="text/css" rel="stylesheet" /></head><body bgcolor="white"><h1 class="title">2. The second page to index</h1><div class="toc"><ol><li><a href="#auto0">The Jython programming language</a></li><li><a href="#auto1">Second Section</a></li><li><a href="#auto2">Third S [...]
\ No newline at end of file
diff --git a/ThirdParty/Twisted/twisted/lore/test/simple.html b/ThirdParty/Twisted/twisted/lore/test/simple.html
new file mode 100644
index 0000000..8d77609
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/test/simple.html
@@ -0,0 +1,9 @@
+<html>
+<head>
+<title>My Test Lore Input</title>
+</head>
+<body>
+<h1>My Test Lore Input</h1>
+<p>A Body.</p>
+</body>
+</html>
\ No newline at end of file
diff --git a/ThirdParty/Twisted/twisted/lore/test/simple3.html b/ThirdParty/Twisted/twisted/lore/test/simple3.html
new file mode 100644
index 0000000..8d77609
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/test/simple3.html
@@ -0,0 +1,9 @@
+<html>
+<head>
+<title>My Test Lore Input</title>
+</head>
+<body>
+<h1>My Test Lore Input</h1>
+<p>A Body.</p>
+</body>
+</html>
\ No newline at end of file
diff --git a/ThirdParty/Twisted/twisted/lore/test/simple4.html b/ThirdParty/Twisted/twisted/lore/test/simple4.html
new file mode 100644
index 0000000..8d77609
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/test/simple4.html
@@ -0,0 +1,9 @@
+<html>
+<head>
+<title>My Test Lore Input</title>
+</head>
+<body>
+<h1>My Test Lore Input</h1>
+<p>A Body.</p>
+</body>
+</html>
\ No newline at end of file
diff --git a/ThirdParty/Twisted/twisted/lore/test/template.tpl b/ThirdParty/Twisted/twisted/lore/test/template.tpl
new file mode 100644
index 0000000..195f6ca
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/test/template.tpl
@@ -0,0 +1,13 @@
+<?xml version="1.0"?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+    "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en">
+  <head><title>Twisted Documentation: </title></head>
+  <body bgcolor="white">
+    <h1 class="title" />
+    <div class="body" />
+    <span class="index-link">Index</span>
+  </body>
+</html>
+
diff --git a/ThirdParty/Twisted/twisted/lore/test/test_docbook.py b/ThirdParty/Twisted/twisted/lore/test/test_docbook.py
new file mode 100644
index 0000000..4bec127
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/test/test_docbook.py
@@ -0,0 +1,35 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.lore.docbook}.
+"""
+
+from xml.dom.minidom import Element, Text
+
+from twisted.trial.unittest import TestCase
+from twisted.lore.docbook import DocbookSpitter
+
+
+class DocbookSpitterTests(TestCase):
+    """
+    Tests for L{twisted.lore.docbook.DocbookSpitter}.
+    """
+    def test_li(self):
+        """
+        L{DocbookSpitter} wraps any non-I{p} elements found intside any I{li}
+        elements with I{p} elements.
+        """
+        output = []
+        spitter = DocbookSpitter(output.append)
+
+        li = Element('li')
+        li.appendChild(Element('p'))
+        text = Text()
+        text.data = 'foo bar'
+        li.appendChild(text)
+
+        spitter.visitNode(li)
+        self.assertEqual(
+            ''.join(output),
+            '<listitem><para></para><para>foo bar</para></listitem>')
diff --git a/ThirdParty/Twisted/twisted/lore/test/test_latex.py b/ThirdParty/Twisted/twisted/lore/test/test_latex.py
new file mode 100644
index 0000000..21d5029
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/test/test_latex.py
@@ -0,0 +1,146 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.lore.latex}.
+"""
+
+import os.path
+from xml.dom.minidom import Comment, Element, Text
+
+from twisted.python.filepath import FilePath
+from twisted.trial.unittest import TestCase
+from twisted.lore.latex import LatexSpitter, getLatexText
+
+
+class LatexHelperTests(TestCase):
+    """
+    Tests for free functions in L{twisted.lore.latex}.
+    """
+    def test_getLatexText(self):
+        """
+        L{getLatexText} calls the writer function with all of the text at or
+        beneath the given node.  Non-ASCII characters are encoded using
+        UTF-8.
+        """
+        node = Element('foo')
+        text = Text()
+        text.data = u"foo \N{SNOWMAN}"
+        node.appendChild(text)
+        result = []
+        getLatexText(node, result.append)
+        self.assertEqual(result, [u"foo \N{SNOWMAN}".encode('utf-8')])
+
+
+
+class LatexSpitterTests(TestCase):
+    """
+    Tests for L{LatexSpitter}.
+    """
+    def setUp(self):
+        self.filename = self.mktemp()
+        self.output = []
+        self.spitter = LatexSpitter(self.output.append, filename=self.filename)
+
+
+    def test_head(self):
+        """
+        L{LatexSpitter.visitNode} writes out author information for each
+        I{link} element with a I{rel} attribute set to I{author}.
+        """
+        head = Element('head')
+        first = Element('link')
+        first.setAttribute('rel', 'author')
+        first.setAttribute('title', 'alice')
+        second = Element('link')
+        second.setAttribute('rel', 'author')
+        second.setAttribute('href', 'http://example.com/bob')
+        third = Element('link')
+        third.setAttribute('rel', 'author')
+        third.setAttribute('href', 'mailto:carol at example.com')
+        head.appendChild(first)
+        head.appendChild(second)
+        head.appendChild(third)
+
+        self.spitter.visitNode(head)
+
+        self.assertEqual(
+            ''.join(self.output),
+            '\\author{alice \\and $<$http://example.com/bob$>$ \\and $<$carol at example.com$>$}')
+
+
+    def test_skipComments(self):
+        """
+        L{LatexSpitter.visitNode} writes nothing to its output stream for
+        comments.
+        """
+        self.spitter.visitNode(Comment('foo'))
+        self.assertNotIn('foo', ''.join(self.output))
+
+
+    def test_anchorListing(self):
+        """
+        L{LatexSpitter.visitNode} emits a verbatim block when it encounters a
+        code listing (represented by an I{a} element with a I{listing} class).
+        """
+        path = FilePath(self.mktemp())
+        path.setContent('foo\nbar\n')
+        listing = Element('a')
+        listing.setAttribute('class', 'listing')
+        listing.setAttribute('href', path.path)
+        self.spitter.visitNode(listing)
+        self.assertEqual(
+            ''.join(self.output),
+            "\\begin{verbatim}\n"
+            "foo\n"
+            "bar\n"
+            "\\end{verbatim}\\parbox[b]{\\linewidth}{\\begin{center} --- "
+            "\\begin{em}temp\\end{em}\\end{center}}")
+
+
+    def test_anchorListingSkipLines(self):
+        """
+        When passed an I{a} element with a I{listing} class and an I{skipLines}
+        attribute, L{LatexSpitter.visitNode} emits a verbatim block which skips
+        the indicated number of lines from the beginning of the source listing.
+        """
+        path = FilePath(self.mktemp())
+        path.setContent('foo\nbar\n')
+        listing = Element('a')
+        listing.setAttribute('class', 'listing')
+        listing.setAttribute('skipLines', '1')
+        listing.setAttribute('href', path.path)
+        self.spitter.visitNode(listing)
+        self.assertEqual(
+            ''.join(self.output),
+            "\\begin{verbatim}\n"
+            "bar\n"
+            "\\end{verbatim}\\parbox[b]{\\linewidth}{\\begin{center} --- "
+            "\\begin{em}temp\\end{em}\\end{center}}")
+
+
+    def test_anchorRef(self):
+        """
+        L{LatexSpitter.visitNode} emits a footnote when it encounters an I{a}
+        element with an I{href} attribute with a network scheme.
+        """
+        listing = Element('a')
+        listing.setAttribute('href', 'http://example.com/foo')
+        self.spitter.visitNode(listing)
+        self.assertEqual(
+            ''.join(self.output),
+            "\\footnote{http://example.com/foo}")
+
+
+    def test_anchorName(self):
+        """
+        When passed an I{a} element with a I{name} attribute,
+        L{LatexSpitter.visitNode} emits a label.
+        """
+        listing = Element('a')
+        listing.setAttribute('name', 'foo')
+        self.spitter.visitNode(listing)
+        self.assertEqual(
+            ''.join(self.output),
+            "\\label{%sHASHfoo}" % (
+                os.path.abspath(self.filename).replace('\\', '/'),))
diff --git a/ThirdParty/Twisted/twisted/lore/test/test_lint.py b/ThirdParty/Twisted/twisted/lore/test/test_lint.py
new file mode 100644
index 0000000..ac94df2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/test/test_lint.py
@@ -0,0 +1,132 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.lore.lint}.
+"""
+
+import sys
+from xml.dom import minidom
+from cStringIO import StringIO
+
+from twisted.trial.unittest import TestCase
+from twisted.lore.lint import getDefaultChecker
+from twisted.lore.process import ProcessingFailure
+
+
+
+class DefaultTagCheckerTests(TestCase):
+    """
+    Tests for L{twisted.lore.lint.DefaultTagChecker}.
+    """
+    def test_quote(self):
+        """
+        If a non-comment node contains a quote (C{'"'}), the checker returned
+        by L{getDefaultChecker} reports an error and raises
+        L{ProcessingFailure}.
+        """
+        documentSource = (
+            '<html>'
+            '<head><title>foo</title></head>'
+            '<body><h1>foo</h1><div>"</div></body>'
+            '</html>')
+        document = minidom.parseString(documentSource)
+        filename = self.mktemp()
+        checker = getDefaultChecker()
+
+        output = StringIO()
+        patch = self.patch(sys, 'stdout', output)
+        self.assertRaises(ProcessingFailure, checker.check, document, filename)
+        patch.restore()
+
+        self.assertIn("contains quote", output.getvalue())
+
+
+    def test_quoteComment(self):
+        """
+        If a comment node contains a quote (C{'"'}), the checker returned by
+        L{getDefaultChecker} does not report an error.
+        """
+        documentSource = (
+            '<html>'
+            '<head><title>foo</title></head>'
+            '<body><h1>foo</h1><!-- " --></body>'
+            '</html>')
+        document = minidom.parseString(documentSource)
+        filename = self.mktemp()
+        checker = getDefaultChecker()
+
+        output = StringIO()
+        patch = self.patch(sys, 'stdout', output)
+        checker.check(document, filename)
+        patch.restore()
+
+        self.assertEqual(output.getvalue(), "")
+
+
+    def test_aNode(self):
+        """
+        If there is an <a> tag in the document, the checker returned by
+        L{getDefaultChecker} does not report an error.
+        """
+        documentSource = (
+            '<html>'
+            '<head><title>foo</title></head>'
+            '<body><h1>foo</h1><a>A link.</a></body>'
+            '</html>')
+
+        self.assertEqual(self._lintCheck(True, documentSource), "")
+
+
+    def test_textMatchesRef(self):
+        """
+        If an I{a} node has a link with a scheme as its contained text, a
+        warning is emitted if that link does not match the value of the
+        I{href} attribute.
+        """
+        documentSource = (
+            '<html>'
+            '<head><title>foo</title></head>'
+            '<body><h1>foo</h1>'
+            '<a href="http://bar/baz">%s</a>'
+            '</body>'
+            '</html>')
+        self.assertEqual(
+            self._lintCheck(True, documentSource % ("http://bar/baz",)), "")
+        self.assertIn(
+            "link text does not match href",
+            self._lintCheck(False, documentSource % ("http://bar/quux",)))
+
+
+    def _lintCheck(self, expectSuccess, source):
+        """
+        Lint the given document source and return the output.
+
+        @param expectSuccess: A flag indicating whether linting is expected
+            to succeed or not.
+
+        @param source: The document source to lint.
+
+        @return: A C{str} of the output of linting.
+        """
+        document = minidom.parseString(source)
+        filename = self.mktemp()
+        checker = getDefaultChecker()
+
+        output = StringIO()
+        patch = self.patch(sys, 'stdout', output)
+        try:
+            try:
+                checker.check(document, filename)
+            finally:
+                patch.restore()
+        except ProcessingFailure, e:
+            if expectSuccess:
+                raise
+        else:
+            if not expectSuccess:
+                self.fail(
+                    "Expected checker to fail, but it did not.  "
+                    "Output was: %r" % (output.getvalue(),))
+
+        return output.getvalue()
diff --git a/ThirdParty/Twisted/twisted/lore/test/test_lmath.py b/ThirdParty/Twisted/twisted/lore/test/test_lmath.py
new file mode 100644
index 0000000..a1e4c09
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/test/test_lmath.py
@@ -0,0 +1,72 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.lore.lmath}.
+"""
+
+from xml.dom.minidom import Element, Text
+
+from twisted.trial.unittest import TestCase
+from twisted.python.filepath import FilePath
+from twisted.lore.scripts.lore import IProcessor
+
+from twisted.plugin import getPlugins
+
+from twisted.lore.lmath import formulaeToImages
+
+
+class PluginTests(TestCase):
+    """
+    Tests for the plugin which lets L{twisted.lore.lmath} be used from the lore
+    command line tool.
+    """
+    def test_discoverable(self):
+        """
+        The plugin for L{twisted.lore.lmath} can be discovered by querying for
+        L{IProcessor} plugins.
+        """
+        plugins = getPlugins(IProcessor)
+        lmath = [p for p in plugins if p.name == "mlore"]
+        self.assertEqual(len(lmath), 1, "Did not find math lore plugin: %r" % (lmath,))
+
+
+
+class FormulaeTests(TestCase):
+    """
+    Tests for L{formulaeToImages}.
+    """
+    def test_insertImages(self):
+        """
+        L{formulaeToImages} replaces any elements with the I{latexformula}
+        class with I{img} elements which refer to external images generated
+        based on the latex in the original elements.
+        """
+        parent = Element('div')
+        base = FilePath(self.mktemp())
+        base.makedirs()
+
+        macros = Element('span')
+        macros.setAttribute('class', 'latexmacros')
+        text = Text()
+        text.data = 'foo'
+        macros.appendChild(text)
+        parent.appendChild(macros)
+
+        formula = Element('span')
+        formula.setAttribute('class', 'latexformula')
+        text = Text()
+        text.data = 'bar'
+        formula.appendChild(text)
+        parent.appendChild(formula)
+
+        # Avoid actually executing the commands to generate images from the
+        # latex.  It might be nice to have some assertions about what commands
+        # are executed, or perhaps even execute them and make sure an image
+        # file is created, but that is a task for another day.
+        commands = []
+        formulaeToImages(parent, base.path, _system=commands.append)
+
+        self.assertEqual(
+            parent.toxml(),
+            '<div><span><br/><img src="latexformula0.png"/><br/></span></div>')
diff --git a/ThirdParty/Twisted/twisted/lore/test/test_lore.py b/ThirdParty/Twisted/twisted/lore/test/test_lore.py
new file mode 100644
index 0000000..3f399d9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/test/test_lore.py
@@ -0,0 +1,1198 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+# ++ single anchor added to individual output file
+# ++ two anchors added to individual output file
+# ++ anchors added to individual output files
+# ++ entry added to index
+# ++ index entry pointing to correct file and anchor
+# ++ multiple entries added to index
+# ++ multiple index entries pointing to correct files and anchors
+# __ all of above for files in deep directory structure
+#
+# ++ group index entries by indexed term
+# ++ sort index entries by indexed term
+# __ hierarchical index entries (e.g. language!programming)
+#
+# ++ add parameter for what the index filename should be
+# ++ add (default) ability to NOT index (if index not specified)
+#
+# ++ put actual index filename into INDEX link (if any) in the template
+# __ make index links RELATIVE!
+# __ make index pay attention to the outputdir!
+#
+# __ make index look nice
+#
+# ++ add section numbers to headers in lore output
+# ++ make text of index entry links be chapter numbers
+# ++ make text of index entry links be section numbers
+#
+# __ put all of our test files someplace neat and tidy
+#
+
+import os, shutil, errno, time
+from StringIO import StringIO
+from xml.dom import minidom as dom
+
+from twisted.trial import unittest
+from twisted.python.filepath import FilePath
+
+from twisted.lore import tree, process, indexer, numberer, htmlbook, default
+from twisted.lore.default import factory
+from twisted.lore.latex import LatexSpitter
+
+from twisted.python.util import sibpath
+
+from twisted.lore.scripts import lore
+
+from twisted.web import domhelpers
+
+def sp(originalFileName):
+    return sibpath(__file__, originalFileName)
+
+options = {"template" : sp("template.tpl"), 'baseurl': '%s', 'ext': '.xhtml' }
+d = options
+
+
+class _XMLAssertionMixin:
+    """
+    Test mixin defining a method for comparing serialized XML documents.
+    """
+    def assertXMLEqual(self, first, second):
+        """
+        Verify that two strings represent the same XML document.
+        """
+        self.assertEqual(
+            dom.parseString(first).toxml(),
+            dom.parseString(second).toxml())
+
+
+class TestFactory(unittest.TestCase, _XMLAssertionMixin):
+
+    file = sp('simple.html')
+    linkrel = ""
+
+    def assertEqualFiles1(self, exp, act):
+        if (exp == act): return True
+        fact = open(act)
+        self.assertEqualsFile(exp, fact.read())
+
+    def assertEqualFiles(self, exp, act):
+        if (exp == act): return True
+        fact = open(sp(act))
+        self.assertEqualsFile(exp, fact.read())
+
+    def assertEqualsFile(self, exp, act):
+        expected = open(sp(exp)).read()
+        self.assertEqual(expected, act)
+
+    def makeTemp(self, *filenames):
+        tmp = self.mktemp()
+        os.mkdir(tmp)
+        for filename in filenames:
+            tmpFile = os.path.join(tmp, filename)
+            shutil.copyfile(sp(filename), tmpFile)
+        return tmp
+
+########################################
+
+    def setUp(self):
+        indexer.reset()
+        numberer.reset()
+
+    def testProcessingFunctionFactory(self):
+        base = FilePath(self.mktemp())
+        base.makedirs()
+
+        simple = base.child('simple.html')
+        FilePath(__file__).sibling('simple.html').copyTo(simple)
+
+        htmlGenerator = factory.generate_html(options)
+        htmlGenerator(simple.path, self.linkrel)
+
+        self.assertXMLEqual(
+            """\
+<?xml version="1.0" ?><!DOCTYPE html  PUBLIC '-//W3C//DTD XHTML 1.0 Transitional//EN'  'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd'><html lang="en" xmlns="http://www.w3.org/1999/xhtml">
+  <head><title>Twisted Documentation: My Test Lore Input</title></head>
+  <body bgcolor="white">
+    <h1 class="title">My Test Lore Input</h1>
+    <div class="content">
+<span/>
+<p>A Body.</p>
+</div>
+    <a href="index.xhtml">Index</a>
+  </body>
+</html>""",
+            simple.sibling('simple.xhtml').getContent())
+
+
+    def testProcessingFunctionFactoryWithFilenameGenerator(self):
+        base = FilePath(self.mktemp())
+        base.makedirs()
+
+        def filenameGenerator(originalFileName, outputExtension):
+            name = os.path.splitext(FilePath(originalFileName).basename())[0]
+            return base.child(name + outputExtension).path
+
+        htmlGenerator = factory.generate_html(options, filenameGenerator)
+        htmlGenerator(self.file, self.linkrel)
+        self.assertXMLEqual(
+            """\
+<?xml version="1.0" ?><!DOCTYPE html  PUBLIC '-//W3C//DTD XHTML 1.0 Transitional//EN'  'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd'><html lang="en" xmlns="http://www.w3.org/1999/xhtml">
+  <head><title>Twisted Documentation: My Test Lore Input</title></head>
+  <body bgcolor="white">
+    <h1 class="title">My Test Lore Input</h1>
+    <div class="content">
+<span/>
+<p>A Body.</p>
+</div>
+    <a href="index.xhtml">Index</a>
+  </body>
+</html>""",
+            base.child("simple.xhtml").getContent())
+
+
+    def test_doFile(self):
+        base = FilePath(self.mktemp())
+        base.makedirs()
+
+        simple = base.child('simple.html')
+        FilePath(__file__).sibling('simple.html').copyTo(simple)
+
+        templ = dom.parse(open(d['template']))
+
+        tree.doFile(simple.path, self.linkrel, d['ext'], d['baseurl'], templ, d)
+        self.assertXMLEqual(
+            """\
+<?xml version="1.0" ?><!DOCTYPE html  PUBLIC '-//W3C//DTD XHTML 1.0 Transitional//EN'  'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd'><html lang="en" xmlns="http://www.w3.org/1999/xhtml">
+  <head><title>Twisted Documentation: My Test Lore Input</title></head>
+  <body bgcolor="white">
+    <h1 class="title">My Test Lore Input</h1>
+    <div class="content">
+<span/>
+<p>A Body.</p>
+</div>
+    <a href="index.xhtml">Index</a>
+  </body>
+</html>""",
+            base.child("simple.xhtml").getContent())
+
+
+    def test_doFile_withFilenameGenerator(self):
+        base = FilePath(self.mktemp())
+        base.makedirs()
+
+        def filenameGenerator(originalFileName, outputExtension):
+            name = os.path.splitext(FilePath(originalFileName).basename())[0]
+            return base.child(name + outputExtension).path
+
+        templ = dom.parse(open(d['template']))
+        tree.doFile(self.file, self.linkrel, d['ext'], d['baseurl'], templ, d, filenameGenerator)
+
+        self.assertXMLEqual(
+            """\
+<?xml version="1.0" ?><!DOCTYPE html  PUBLIC '-//W3C//DTD XHTML 1.0 Transitional//EN'  'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd'><html lang="en" xmlns="http://www.w3.org/1999/xhtml">
+  <head><title>Twisted Documentation: My Test Lore Input</title></head>
+  <body bgcolor="white">
+    <h1 class="title">My Test Lore Input</h1>
+    <div class="content">
+<span/>
+<p>A Body.</p>
+</div>
+    <a href="index.xhtml">Index</a>
+  </body>
+</html>""",
+            base.child("simple.xhtml").getContent())
+
+
+    def test_munge(self):
+        indexer.setIndexFilename("lore_index_file.html")
+        doc = dom.parse(open(self.file))
+        node = dom.parse(open(d['template']))
+        tree.munge(doc, node, self.linkrel,
+                   os.path.dirname(self.file),
+                   self.file,
+                   d['ext'], d['baseurl'], d)
+
+        self.assertXMLEqual(
+            """\
+<?xml version="1.0" ?><!DOCTYPE html  PUBLIC '-//W3C//DTD XHTML 1.0 Transitional//EN'  'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd'><html lang="en" xmlns="http://www.w3.org/1999/xhtml">
+  <head><title>Twisted Documentation: My Test Lore Input</title></head>
+  <body bgcolor="white">
+    <h1 class="title">My Test Lore Input</h1>
+    <div class="content">
+<span/>
+<p>A Body.</p>
+</div>
+    <a href="lore_index_file.html">Index</a>
+  </body>
+</html>""",
+            node.toxml())
+
+
+    def test_mungeAuthors(self):
+        """
+        If there is a node with a I{class} attribute set to C{"authors"},
+        L{tree.munge} adds anchors as children to it, takeing the necessary
+        information from any I{link} nodes in the I{head} with their I{rel}
+        attribute set to C{"author"}.
+        """
+        document = dom.parseString(
+            """\
+<html>
+  <head>
+    <title>munge authors</title>
+    <link rel="author" title="foo" href="bar"/>
+    <link rel="author" title="baz" href="quux"/>
+    <link rel="author" title="foobar" href="barbaz"/>
+  </head>
+  <body>
+    <h1>munge authors</h1>
+  </body>
+</html>""")
+        template = dom.parseString(
+            """\
+<html xmlns="http://www.w3.org/1999/xhtml" lang="en">
+  <head>
+    <title />
+  </head>
+
+  <body>
+    <div class="body" />
+    <div class="authors" />
+  </body>
+</html>
+""")
+        tree.munge(
+            document, template, self.linkrel, os.path.dirname(self.file),
+            self.file, d['ext'], d['baseurl'], d)
+
+        self.assertXMLEqual(
+            template.toxml(),
+            """\
+<?xml version="1.0" ?><html lang="en" xmlns="http://www.w3.org/1999/xhtml">
+  <head>
+    <title>munge authors</title>
+  <link href="bar" rel="author" title="foo"/><link href="quux" rel="author" title="baz"/><link href="barbaz" rel="author" title="foobar"/></head>
+
+  <body>
+    <div class="content">
+    <span/>
+  </div>
+    <div class="authors"><span><a href="bar">foo</a>, <a href="quux">baz</a>, and <a href="barbaz">foobar</a></span></div>
+  </body>
+</html>""")
+
+
+    def test_getProcessor(self):
+
+        base = FilePath(self.mktemp())
+        base.makedirs()
+        input = base.child("simple3.html")
+        FilePath(__file__).sibling("simple3.html").copyTo(input)
+
+        options = { 'template': sp('template.tpl'), 'ext': '.xhtml', 'baseurl': 'burl',
+                    'filenameMapping': None }
+        p = process.getProcessor(default, "html", options)
+        p(input.path, self.linkrel)
+        self.assertXMLEqual(
+            """\
+<?xml version="1.0" ?><!DOCTYPE html  PUBLIC '-//W3C//DTD XHTML 1.0 Transitional//EN'  'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd'><html lang="en" xmlns="http://www.w3.org/1999/xhtml">
+  <head><title>Twisted Documentation: My Test Lore Input</title></head>
+  <body bgcolor="white">
+    <h1 class="title">My Test Lore Input</h1>
+    <div class="content">
+<span/>
+<p>A Body.</p>
+</div>
+    <a href="index.xhtml">Index</a>
+  </body>
+</html>""",
+            base.child("simple3.xhtml").getContent())
+
+    def test_outputdirGenerator(self):
+        normp = os.path.normpath; join = os.path.join
+        inputdir  = normp(join("/", 'home', 'joe'))
+        outputdir = normp(join("/", 'away', 'joseph'))
+        actual = process.outputdirGenerator(join("/", 'home', 'joe', "myfile.html"),
+                                            '.xhtml', inputdir, outputdir)
+        expected = normp(join("/", 'away', 'joseph', 'myfile.xhtml'))
+        self.assertEqual(expected, actual)
+
+    def test_outputdirGeneratorBadInput(self):
+        options = {'outputdir': '/away/joseph/', 'inputdir': '/home/joe/' }
+        self.assertRaises(ValueError, process.outputdirGenerator, '.html', '.xhtml', **options)
+
+    def test_makeSureDirectoryExists(self):
+        dirname = os.path.join("tmp", 'nonexistentdir')
+        if os.path.exists(dirname):
+            os.rmdir(dirname)
+        self.failIf(os.path.exists(dirname), "Hey: someone already created the dir")
+        filename = os.path.join(dirname, 'newfile')
+        tree.makeSureDirectoryExists(filename)
+        self.failUnless(os.path.exists(dirname), 'should have created dir')
+        os.rmdir(dirname)
+
+
+    def test_indexAnchorsAdded(self):
+        indexer.setIndexFilename('theIndexFile.html')
+        # generate the output file
+        templ = dom.parse(open(d['template']))
+        tmp = self.makeTemp('lore_index_test.xhtml')
+
+        tree.doFile(os.path.join(tmp, 'lore_index_test.xhtml'),
+                    self.linkrel, '.html', d['baseurl'], templ, d)
+
+        self.assertXMLEqual(
+            """\
+<?xml version="1.0" ?><!DOCTYPE html  PUBLIC '-//W3C//DTD XHTML 1.0 Transitional//EN'  'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd'><html lang="en" xmlns="http://www.w3.org/1999/xhtml">
+  <head><title>Twisted Documentation: The way of the program</title></head>
+  <body bgcolor="white">
+    <h1 class="title">The way of the program</h1>
+    <div class="content">
+
+<span/>
+
+<p>The first paragraph.</p>
+
+
+<h2>The Python programming language<a name="auto0"/></h2>
+<a name="index01"/>
+<a name="index02"/>
+
+<p>The second paragraph.</p>
+
+
+</div>
+    <a href="theIndexFile.html">Index</a>
+  </body>
+</html>""",
+            FilePath(tmp).child("lore_index_test.html").getContent())
+
+
+    def test_indexEntriesAdded(self):
+        indexer.addEntry('lore_index_test.html', 'index02', 'language of programming', '1.3')
+        indexer.addEntry('lore_index_test.html', 'index01', 'programming language', '1.2')
+        indexer.setIndexFilename("lore_index_file.html")
+        indexer.generateIndex()
+        self.assertEqualFiles1("lore_index_file_out.html", "lore_index_file.html")
+
+    def test_book(self):
+        tmp = self.makeTemp()
+        inputFilename = sp('lore_index_test.xhtml')
+
+        bookFilename = os.path.join(tmp, 'lore_test_book.book')
+        bf = open(bookFilename, 'w')
+        bf.write('Chapter(r"%s", None)\r\n' % inputFilename)
+        bf.close()
+
+        book = htmlbook.Book(bookFilename)
+        expected = {'indexFilename': None,
+                    'chapters': [(inputFilename, None)],
+                    }
+        dct = book.__dict__
+        for k in dct:
+            self.assertEqual(dct[k], expected[k])
+
+    def test_runningLore(self):
+        options = lore.Options()
+        tmp = self.makeTemp('lore_index_test.xhtml')
+
+        templateFilename = sp('template.tpl')
+        inputFilename = os.path.join(tmp, 'lore_index_test.xhtml')
+        indexFilename = 'theIndexFile'
+
+        bookFilename = os.path.join(tmp, 'lore_test_book.book')
+        bf = open(bookFilename, 'w')
+        bf.write('Chapter(r"%s", None)\n' % inputFilename)
+        bf.close()
+
+        options.parseOptions(['--null', '--book=%s' % bookFilename,
+                              '--config', 'template=%s' % templateFilename,
+                              '--index=%s' % indexFilename
+                              ])
+        result = lore.runGivenOptions(options)
+        self.assertEqual(None, result)
+        self.assertEqualFiles1("lore_index_file_unnumbered_out.html", indexFilename + ".html")
+
+
+    def test_runningLoreMultipleFiles(self):
+        tmp = self.makeTemp('lore_index_test.xhtml', 'lore_index_test2.xhtml')
+        templateFilename = sp('template.tpl')
+        inputFilename = os.path.join(tmp, 'lore_index_test.xhtml')
+        inputFilename2 = os.path.join(tmp, 'lore_index_test2.xhtml')
+        indexFilename = 'theIndexFile'
+
+        bookFilename = os.path.join(tmp, 'lore_test_book.book')
+        bf = open(bookFilename, 'w')
+        bf.write('Chapter(r"%s", None)\n' % inputFilename)
+        bf.write('Chapter(r"%s", None)\n' % inputFilename2)
+        bf.close()
+
+        options = lore.Options()
+        options.parseOptions(['--null', '--book=%s' % bookFilename,
+                              '--config', 'template=%s' % templateFilename,
+                              '--index=%s' % indexFilename
+                              ])
+        result = lore.runGivenOptions(options)
+        self.assertEqual(None, result)
+
+        self.assertEqual(
+            # XXX This doesn't seem like a very good index file.
+            """\
+aahz: <a href="lore_index_test2.html#index03">link</a><br />
+aahz2: <a href="lore_index_test2.html#index02">link</a><br />
+language of programming: <a href="lore_index_test.html#index02">link</a>, <a href="lore_index_test2.html#index01">link</a><br />
+programming language: <a href="lore_index_test.html#index01">link</a><br />
+""",
+            file(FilePath(indexFilename + ".html").path).read())
+
+        self.assertXMLEqual(
+            """\
+<?xml version="1.0" ?><!DOCTYPE html  PUBLIC '-//W3C//DTD XHTML 1.0 Transitional//EN'  'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd'><html lang="en" xmlns="http://www.w3.org/1999/xhtml">
+  <head><title>Twisted Documentation: The way of the program</title></head>
+  <body bgcolor="white">
+    <h1 class="title">The way of the program</h1>
+    <div class="content">
+
+<span/>
+
+<p>The first paragraph.</p>
+
+
+<h2>The Python programming language<a name="auto0"/></h2>
+<a name="index01"/>
+<a name="index02"/>
+
+<p>The second paragraph.</p>
+
+
+</div>
+    <a href="theIndexFile.html">Index</a>
+  </body>
+</html>""",
+            FilePath(tmp).child("lore_index_test.html").getContent())
+
+        self.assertXMLEqual(
+            """\
+<?xml version="1.0" ?><!DOCTYPE html  PUBLIC '-//W3C//DTD XHTML 1.0 Transitional//EN'  'http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd'><html lang="en" xmlns="http://www.w3.org/1999/xhtml">
+  <head><title>Twisted Documentation: The second page to index</title></head>
+  <body bgcolor="white">
+    <h1 class="title">The second page to index</h1>
+    <div class="content">
+
+<span/>
+
+<p>The first paragraph of the second page.</p>
+
+
+<h2>The Jython programming language<a name="auto0"/></h2>
+<a name="index01"/>
+<a name="index02"/>
+<a name="index03"/>
+
+<p>The second paragraph of the second page.</p>
+
+
+</div>
+    <a href="theIndexFile.html">Index</a>
+  </body>
+</html>""",
+            FilePath(tmp).child("lore_index_test2.html").getContent())
+
+
+
+    def XXXtest_NumberedSections(self):
+        # run two files through lore, with numbering turned on
+        # every h2 should be numbered:
+        # first  file's h2s should be 1.1, 1.2
+        # second file's h2s should be 2.1, 2.2
+        templateFilename = sp('template.tpl')
+        inputFilename = sp('lore_numbering_test.xhtml')
+        inputFilename2 = sp('lore_numbering_test2.xhtml')
+        indexFilename = 'theIndexFile'
+
+        # you can number without a book:
+        options = lore.Options()
+        options.parseOptions(['--null',
+                              '--index=%s' % indexFilename,
+                              '--config', 'template=%s' % templateFilename,
+                              '--config', 'ext=%s' % ".tns",
+                              '--number',
+                              inputFilename, inputFilename2])
+        result = lore.runGivenOptions(options)
+
+        self.assertEqual(None, result)
+        #self.assertEqualFiles1("lore_index_file_out_multiple.html", indexFilename + ".tns")
+        #                       VVV change to new, numbered files
+        self.assertEqualFiles("lore_numbering_test_out.html", "lore_numbering_test.tns")
+        self.assertEqualFiles("lore_numbering_test_out2.html", "lore_numbering_test2.tns")
+
+
+    def test_setTitle(self):
+        """
+        L{tree.setTitle} inserts the given title into the first I{title}
+        element and the first element with the I{title} class in the given
+        template.
+        """
+        parent = dom.Element('div')
+        firstTitle = dom.Element('title')
+        parent.appendChild(firstTitle)
+        secondTitle = dom.Element('span')
+        secondTitle.setAttribute('class', 'title')
+        parent.appendChild(secondTitle)
+
+        titleNodes = [dom.Text()]
+        # minidom has issues with cloning documentless-nodes.  See Python issue
+        # 4851.
+        titleNodes[0].ownerDocument = dom.Document()
+        titleNodes[0].data = 'foo bar'
+
+        tree.setTitle(parent, titleNodes, None)
+        self.assertEqual(firstTitle.toxml(), '<title>foo bar</title>')
+        self.assertEqual(
+            secondTitle.toxml(), '<span class="title">foo bar</span>')
+
+
+    def test_setTitleWithChapter(self):
+        """
+        L{tree.setTitle} includes a chapter number if it is passed one.
+        """
+        document = dom.Document()
+
+        parent = dom.Element('div')
+        parent.ownerDocument = document
+
+        title = dom.Element('title')
+        parent.appendChild(title)
+
+        titleNodes = [dom.Text()]
+        titleNodes[0].ownerDocument = document
+        titleNodes[0].data = 'foo bar'
+
+        # Oh yea.  The numberer has to agree to put the chapter number in, too.
+        numberer.setNumberSections(True)
+
+        tree.setTitle(parent, titleNodes, '13')
+        self.assertEqual(title.toxml(), '<title>13. foo bar</title>')
+
+
+    def test_setIndexLink(self):
+        """
+        Tests to make sure that index links are processed when an index page
+        exists and removed when there is not.
+        """
+        templ = dom.parse(open(d['template']))
+        indexFilename = 'theIndexFile'
+        numLinks = len(domhelpers.findElementsWithAttribute(templ,
+                                                            "class",
+                                                            "index-link"))
+
+        # if our testing template has no index-link nodes, complain about it
+        self.assertNotEquals(
+            [],
+            domhelpers.findElementsWithAttribute(templ,
+                                                 "class",
+                                                 "index-link"))
+
+        tree.setIndexLink(templ, indexFilename)
+
+        self.assertEqual(
+            [],
+            domhelpers.findElementsWithAttribute(templ,
+                                                 "class",
+                                                 "index-link"))
+
+        indexLinks = domhelpers.findElementsWithAttribute(templ,
+                                                          "href",
+                                                          indexFilename)
+        self.assertTrue(len(indexLinks) >= numLinks)
+
+        templ = dom.parse(open(d['template']))
+        self.assertNotEquals(
+            [],
+            domhelpers.findElementsWithAttribute(templ,
+                                                 "class",
+                                                 "index-link"))
+        indexFilename = None
+
+        tree.setIndexLink(templ, indexFilename)
+
+        self.assertEqual(
+            [],
+            domhelpers.findElementsWithAttribute(templ,
+                                                 "class",
+                                                 "index-link"))
+
+
+    def test_addMtime(self):
+        """
+        L{tree.addMtime} inserts a text node giving the last modification time
+        of the specified file wherever it encounters an element with the
+        I{mtime} class.
+        """
+        path = FilePath(self.mktemp())
+        path.setContent('')
+        when = time.ctime(path.getModificationTime())
+
+        parent = dom.Element('div')
+        mtime = dom.Element('span')
+        mtime.setAttribute('class', 'mtime')
+        parent.appendChild(mtime)
+
+        tree.addMtime(parent, path.path)
+        self.assertEqual(
+            mtime.toxml(), '<span class="mtime">' + when + '</span>')
+
+
+    def test_makeLineNumbers(self):
+        """
+        L{tree._makeLineNumbers} takes an integer and returns a I{p} tag with
+        that number of line numbers in it.
+        """
+        numbers = tree._makeLineNumbers(1)
+        self.assertEqual(numbers.tagName, 'p')
+        self.assertEqual(numbers.getAttribute('class'), 'py-linenumber')
+        self.assertIsInstance(numbers.firstChild, dom.Text)
+        self.assertEqual(numbers.firstChild.nodeValue, '1\n')
+
+        numbers = tree._makeLineNumbers(10)
+        self.assertEqual(numbers.tagName, 'p')
+        self.assertEqual(numbers.getAttribute('class'), 'py-linenumber')
+        self.assertIsInstance(numbers.firstChild, dom.Text)
+        self.assertEqual(
+            numbers.firstChild.nodeValue,
+            ' 1\n 2\n 3\n 4\n 5\n'
+            ' 6\n 7\n 8\n 9\n10\n')
+
+
+    def test_fontifyPythonNode(self):
+        """
+        L{tree.fontifyPythonNode} accepts a text node and replaces it in its
+        parent with a syntax colored and line numbered version of the Python
+        source it contains.
+        """
+        parent = dom.Element('div')
+        source = dom.Text()
+        source.data = 'def foo():\n    pass\n'
+        parent.appendChild(source)
+
+        tree.fontifyPythonNode(source)
+
+        expected = """\
+<div><pre class="python"><p class="py-linenumber">1
+2
+</p><span class="py-src-keyword">def</span> <span class="py-src-identifier">foo</span>():
+    <span class="py-src-keyword">pass</span>
+</pre></div>"""
+
+        self.assertEqual(parent.toxml(), expected)
+
+
+    def test_addPyListings(self):
+        """
+        L{tree.addPyListings} accepts a document with nodes with their I{class}
+        attribute set to I{py-listing} and replaces those nodes with Python
+        source listings from the file given by the node's I{href} attribute.
+        """
+        listingPath = FilePath(self.mktemp())
+        listingPath.setContent('def foo():\n    pass\n')
+
+        parent = dom.Element('div')
+        listing = dom.Element('a')
+        listing.setAttribute('href', listingPath.basename())
+        listing.setAttribute('class', 'py-listing')
+        parent.appendChild(listing)
+
+        tree.addPyListings(parent, listingPath.dirname())
+
+        expected = """\
+<div><div class="py-listing"><pre><p class="py-linenumber">1
+2
+</p><span class="py-src-keyword">def</span> <span class="py-src-identifier">foo</span>():
+    <span class="py-src-keyword">pass</span>
+</pre><div class="caption"> - <a href="temp"><span class="filename">temp</span></a></div></div></div>"""
+
+        self.assertEqual(parent.toxml(), expected)
+
+
+    def test_addPyListingsSkipLines(self):
+        """
+        If a node with the I{py-listing} class also has a I{skipLines}
+        attribute, that number of lines from the beginning of the source
+        listing are omitted.
+        """
+        listingPath = FilePath(self.mktemp())
+        listingPath.setContent('def foo():\n    pass\n')
+
+        parent = dom.Element('div')
+        listing = dom.Element('a')
+        listing.setAttribute('href', listingPath.basename())
+        listing.setAttribute('class', 'py-listing')
+        listing.setAttribute('skipLines', 1)
+        parent.appendChild(listing)
+
+        tree.addPyListings(parent, listingPath.dirname())
+
+        expected = """\
+<div><div class="py-listing"><pre><p class="py-linenumber">1
+</p>    <span class="py-src-keyword">pass</span>
+</pre><div class="caption"> - <a href="temp"><span class="filename">temp</span></a></div></div></div>"""
+
+        self.assertEqual(parent.toxml(), expected)
+
+
+    def test_fixAPI(self):
+        """
+        The element passed to L{tree.fixAPI} has all of its children with the
+        I{API} class rewritten to contain links to the API which is referred to
+        by the text they contain.
+        """
+        parent = dom.Element('div')
+        link = dom.Element('span')
+        link.setAttribute('class', 'API')
+        text = dom.Text()
+        text.data = 'foo'
+        link.appendChild(text)
+        parent.appendChild(link)
+
+        tree.fixAPI(parent, 'http://example.com/%s')
+        self.assertEqual(
+            parent.toxml(),
+            '<div><span class="API">'
+            '<a href="http://example.com/foo" title="foo">foo</a>'
+            '</span></div>')
+
+
+    def test_fixAPIBase(self):
+        """
+        If a node with the I{API} class and a value for the I{base} attribute
+        is included in the DOM passed to L{tree.fixAPI}, the link added to that
+        node refers to the API formed by joining the value of the I{base}
+        attribute to the text contents of the node.
+        """
+        parent = dom.Element('div')
+        link = dom.Element('span')
+        link.setAttribute('class', 'API')
+        link.setAttribute('base', 'bar')
+        text = dom.Text()
+        text.data = 'baz'
+        link.appendChild(text)
+        parent.appendChild(link)
+
+        tree.fixAPI(parent, 'http://example.com/%s')
+
+        self.assertEqual(
+            parent.toxml(),
+            '<div><span class="API">'
+            '<a href="http://example.com/bar.baz" title="bar.baz">baz</a>'
+            '</span></div>')
+
+
+    def test_fixLinks(self):
+        """
+        Links in the nodes of the DOM passed to L{tree.fixLinks} have their
+        extensions rewritten to the given extension.
+        """
+        parent = dom.Element('div')
+        link = dom.Element('a')
+        link.setAttribute('href', 'foo.html')
+        parent.appendChild(link)
+
+        tree.fixLinks(parent, '.xhtml')
+
+        self.assertEqual(parent.toxml(), '<div><a href="foo.xhtml"/></div>')
+
+
+    def test_setVersion(self):
+        """
+        Nodes of the DOM passed to L{tree.setVersion} which have the I{version}
+        class have the given version added to them a child.
+        """
+        parent = dom.Element('div')
+        version = dom.Element('span')
+        version.setAttribute('class', 'version')
+        parent.appendChild(version)
+
+        tree.setVersion(parent, '1.2.3')
+
+        self.assertEqual(
+            parent.toxml(), '<div><span class="version">1.2.3</span></div>')
+
+
+    def test_footnotes(self):
+        """
+        L{tree.footnotes} finds all of the nodes with the I{footnote} class in
+        the DOM passed to it and adds a footnotes section to the end of the
+        I{body} element which includes them.  It also inserts links to those
+        footnotes from the original definition location.
+        """
+        parent = dom.Element('div')
+        body = dom.Element('body')
+        footnote = dom.Element('span')
+        footnote.setAttribute('class', 'footnote')
+        text = dom.Text()
+        text.data = 'this is the footnote'
+        footnote.appendChild(text)
+        body.appendChild(footnote)
+        body.appendChild(dom.Element('p'))
+        parent.appendChild(body)
+
+        tree.footnotes(parent)
+
+        self.assertEqual(
+            parent.toxml(),
+            '<div><body>'
+            '<a href="#footnote-1" title="this is the footnote">'
+            '<super>1</super>'
+            '</a>'
+            '<p/>'
+            '<h2>Footnotes</h2>'
+            '<ol><li><a name="footnote-1">'
+            '<span class="footnote">this is the footnote</span>'
+            '</a></li></ol>'
+            '</body></div>')
+
+
+    def test_generateTableOfContents(self):
+        """
+        L{tree.generateToC} returns an element which contains a table of
+        contents generated from the headers in the document passed to it.
+        """
+        parent = dom.Element('body')
+        header = dom.Element('h2')
+        text = dom.Text()
+        text.data = u'header & special character'
+        header.appendChild(text)
+        parent.appendChild(header)
+        subheader = dom.Element('h3')
+        text = dom.Text()
+        text.data = 'subheader'
+        subheader.appendChild(text)
+        parent.appendChild(subheader)
+
+        tableOfContents = tree.generateToC(parent)
+        self.assertEqual(
+            tableOfContents.toxml(),
+            '<ol><li><a href="#auto0">header & special character</a></li><ul><li><a href="#auto1">subheader</a></li></ul></ol>')
+
+        self.assertEqual(
+            header.toxml(),
+            '<h2>header & special character<a name="auto0"/></h2>')
+
+        self.assertEqual(
+            subheader.toxml(),
+            '<h3>subheader<a name="auto1"/></h3>')
+
+
+    def test_putInToC(self):
+        """
+        L{tree.putInToC} replaces all of the children of the first node with
+        the I{toc} class with the given node representing a table of contents.
+        """
+        parent = dom.Element('div')
+        toc = dom.Element('span')
+        toc.setAttribute('class', 'toc')
+        toc.appendChild(dom.Element('foo'))
+        parent.appendChild(toc)
+
+        tree.putInToC(parent, dom.Element('toc'))
+
+        self.assertEqual(toc.toxml(), '<span class="toc"><toc/></span>')
+
+
+    def test_invalidTableOfContents(self):
+        """
+        If passed a document with I{h3} elements before any I{h2} element,
+        L{tree.generateToC} raises L{ValueError} explaining that this is not a
+        valid document.
+        """
+        parent = dom.Element('body')
+        parent.appendChild(dom.Element('h3'))
+        err = self.assertRaises(ValueError, tree.generateToC, parent)
+        self.assertEqual(
+            str(err), "No H3 element is allowed until after an H2 element")
+
+
+    def test_notes(self):
+        """
+        L{tree.notes} inserts some additional markup before the first child of
+        any node with the I{note} class.
+        """
+        parent = dom.Element('div')
+        noteworthy = dom.Element('span')
+        noteworthy.setAttribute('class', 'note')
+        noteworthy.appendChild(dom.Element('foo'))
+        parent.appendChild(noteworthy)
+
+        tree.notes(parent)
+
+        self.assertEqual(
+            noteworthy.toxml(),
+            '<span class="note"><strong>Note: </strong><foo/></span>')
+
+
+    def test_findNodeJustBefore(self):
+        """
+        L{tree.findNodeJustBefore} returns the previous sibling of the node it
+        is passed.  The list of nodes passed in is ignored.
+        """
+        parent = dom.Element('div')
+        result = dom.Element('foo')
+        target = dom.Element('bar')
+        parent.appendChild(result)
+        parent.appendChild(target)
+
+        self.assertIdentical(
+            tree.findNodeJustBefore(target, [parent, result]),
+            result)
+
+        # Also, support other configurations.  This is a really not nice API.
+        newTarget = dom.Element('baz')
+        target.appendChild(newTarget)
+        self.assertIdentical(
+            tree.findNodeJustBefore(newTarget, [parent, result]),
+            result)
+
+
+    def test_getSectionNumber(self):
+        """
+        L{tree.getSectionNumber} accepts an I{H2} element and returns its text
+        content.
+        """
+        header = dom.Element('foo')
+        text = dom.Text()
+        text.data = 'foobar'
+        header.appendChild(text)
+        self.assertEqual(tree.getSectionNumber(header), 'foobar')
+
+
+    def test_numberDocument(self):
+        """
+        L{tree.numberDocument} inserts section numbers into the text of each
+        header.
+        """
+        parent = dom.Element('foo')
+        section = dom.Element('h2')
+        text = dom.Text()
+        text.data = 'foo'
+        section.appendChild(text)
+        parent.appendChild(section)
+
+        tree.numberDocument(parent, '7')
+
+        self.assertEqual(section.toxml(), '<h2>7.1 foo</h2>')
+
+
+    def test_parseFileAndReport(self):
+        """
+        L{tree.parseFileAndReport} parses the contents of the filename passed
+        to it and returns the corresponding DOM.
+        """
+        path = FilePath(self.mktemp())
+        path.setContent('<foo bar="baz">hello</foo>\n')
+
+        document = tree.parseFileAndReport(path.path)
+        self.assertXMLEqual(
+            document.toxml(),
+            '<?xml version="1.0" ?><foo bar="baz">hello</foo>')
+
+
+    def test_parseFileAndReportMismatchedTags(self):
+        """
+        If the contents of the file passed to L{tree.parseFileAndReport}
+        contain a mismatched tag, L{process.ProcessingFailure} is raised
+        indicating the location of the open and close tags which were
+        mismatched.
+        """
+        path = FilePath(self.mktemp())
+        path.setContent('  <foo>\n\n  </bar>')
+
+        err = self.assertRaises(
+            process.ProcessingFailure, tree.parseFileAndReport, path.path)
+        self.assertEqual(
+            str(err),
+            "mismatched close tag at line 3, column 4; expected </foo> "
+            "(from line 1, column 2)")
+
+        # Test a case which requires involves proper close tag handling.
+        path.setContent('<foo><bar></bar>\n  </baz>')
+
+        err = self.assertRaises(
+            process.ProcessingFailure, tree.parseFileAndReport, path.path)
+        self.assertEqual(
+            str(err),
+            "mismatched close tag at line 2, column 4; expected </foo> "
+            "(from line 1, column 0)")
+
+
+    def test_parseFileAndReportParseError(self):
+        """
+        If the contents of the file passed to L{tree.parseFileAndReport} cannot
+        be parsed for a reason other than mismatched tags,
+        L{process.ProcessingFailure} is raised with a string describing the
+        parse error.
+        """
+        path = FilePath(self.mktemp())
+        path.setContent('\n   foo')
+
+        err = self.assertRaises(
+            process.ProcessingFailure, tree.parseFileAndReport, path.path)
+        self.assertEqual(str(err), 'syntax error at line 2, column 3')
+
+
+    def test_parseFileAndReportIOError(self):
+        """
+        If an L{IOError} is raised while reading from the file specified to
+        L{tree.parseFileAndReport}, a L{process.ProcessingFailure} is raised
+        indicating what the error was.  The file should be closed by the
+        time the exception is raised to the caller.
+        """
+        class FakeFile:
+            _open = True
+            def read(self, bytes=None):
+                raise IOError(errno.ENOTCONN, 'socket not connected')
+
+            def close(self):
+                self._open = False
+
+        theFile = FakeFile()
+        def fakeOpen(filename):
+            return theFile
+
+        err = self.assertRaises(
+            process.ProcessingFailure, tree.parseFileAndReport, "foo", fakeOpen)
+        self.assertEqual(str(err), "socket not connected, filename was 'foo'")
+        self.assertFalse(theFile._open)
+
+
+
+class XMLParsingTests(unittest.TestCase):
+    """
+    Tests for various aspects of parsing a Lore XML input document using
+    L{tree.parseFileAndReport}.
+    """
+    def _parseTest(self, xml):
+        path = FilePath(self.mktemp())
+        path.setContent(xml)
+        return tree.parseFileAndReport(path.path)
+
+
+    def test_withoutDocType(self):
+        """
+        A Lore XML input document may omit a I{DOCTYPE} declaration.  If it
+        does so, the XHTML1 Strict DTD is used.
+        """
+        # Parsing should succeed.
+        document = self._parseTest("<foo>uses an xhtml entity: ©</foo>")
+        # But even more than that, the © entity should be turned into the
+        # appropriate unicode codepoint.
+        self.assertEqual(
+            domhelpers.gatherTextNodes(document.documentElement),
+            u"uses an xhtml entity: \N{COPYRIGHT SIGN}")
+
+
+    def test_withTransitionalDocType(self):
+        """
+        A Lore XML input document may include a I{DOCTYPE} declaration
+        referring to the XHTML1 Transitional DTD.
+        """
+        # Parsing should succeed.
+        document = self._parseTest("""\
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+    "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+<foo>uses an xhtml entity: ©</foo>
+""")
+        # But even more than that, the © entity should be turned into the
+        # appropriate unicode codepoint.
+        self.assertEqual(
+            domhelpers.gatherTextNodes(document.documentElement),
+            u"uses an xhtml entity: \N{COPYRIGHT SIGN}")
+
+
+    def test_withStrictDocType(self):
+        """
+        A Lore XML input document may include a I{DOCTYPE} declaration
+        referring to the XHTML1 Strict DTD.
+        """
+        # Parsing should succeed.
+        document = self._parseTest("""\
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+    "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<foo>uses an xhtml entity: ©</foo>
+""")
+        # But even more than that, the © entity should be turned into the
+        # appropriate unicode codepoint.
+        self.assertEqual(
+            domhelpers.gatherTextNodes(document.documentElement),
+            u"uses an xhtml entity: \N{COPYRIGHT SIGN}")
+
+
+    def test_withDisallowedDocType(self):
+        """
+        A Lore XML input document may not include a I{DOCTYPE} declaration
+        referring to any DTD other than XHTML1 Transitional or XHTML1 Strict.
+        """
+        self.assertRaises(
+            process.ProcessingFailure,
+            self._parseTest,
+            """\
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+    "http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd">
+<foo>uses an xhtml entity: ©</foo>
+""")
+
+
+
+class XMLSerializationTests(unittest.TestCase, _XMLAssertionMixin):
+    """
+    Tests for L{tree._writeDocument}.
+    """
+    def test_nonASCIIData(self):
+        """
+        A document which contains non-ascii characters is serialized to a
+        file using UTF-8.
+        """
+        document = dom.Document()
+        parent = dom.Element('foo')
+        text = dom.Text()
+        text.data = u'\N{SNOWMAN}'
+        parent.appendChild(text)
+        document.appendChild(parent)
+        outFile = self.mktemp()
+        tree._writeDocument(outFile, document)
+        self.assertXMLEqual(
+            FilePath(outFile).getContent(),
+            u'<foo>\N{SNOWMAN}</foo>'.encode('utf-8'))
+
+
+
+class LatexSpitterTestCase(unittest.TestCase):
+    """
+    Tests for the Latex output plugin.
+    """
+    def test_indexedSpan(self):
+        """
+        Test processing of a span tag with an index class results in a latex
+        \\index directive the correct value.
+        """
+        doc = dom.parseString('<span class="index" value="name" />').documentElement
+        out = StringIO()
+        spitter = LatexSpitter(out.write)
+        spitter.visitNode(doc)
+        self.assertEqual(out.getvalue(), u'\\index{name}\n')
+
+
+
+class ScriptTests(unittest.TestCase):
+    """
+    Tests for L{twisted.lore.scripts.lore}, the I{lore} command's
+    implementation,
+    """
+    def test_getProcessor(self):
+        """
+        L{lore.getProcessor} loads the specified output plugin from the
+        specified input plugin.
+        """
+        processor = lore.getProcessor("lore", "html", options)
+        self.assertNotIdentical(processor, None)
diff --git a/ThirdParty/Twisted/twisted/lore/test/test_man2lore.py b/ThirdParty/Twisted/twisted/lore/test/test_man2lore.py
new file mode 100644
index 0000000..06ada30
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/test/test_man2lore.py
@@ -0,0 +1,169 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Tests for L{twisted.lore.man2lore}.
+"""
+
+from StringIO import StringIO
+
+from twisted.trial.unittest import TestCase
+
+from twisted.lore.man2lore import ManConverter
+
+
+_TRANSITIONAL_XHTML_DTD = ("""\
+<?xml version="1.0"?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+    "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
+""")
+
+
+class ManConverterTestCase(TestCase):
+    """
+    Tests for L{ManConverter}.
+    """
+
+    def setUp(self):
+        """
+        Build instance variables useful for tests.
+
+        @ivar converter: a L{ManConverter} to be used during tests.
+        """
+        self.converter = ManConverter()
+
+
+    def assertConvert(self, inputLines, expectedOutput):
+        """
+        Helper method to check conversion from a man page to a Lore output.
+
+        @param inputLines: lines of the manpages.
+        @type inputLines: C{list}
+
+        @param expectedOutput: expected Lore content.
+        @type expectedOutput: C{str}
+        """
+        inputFile = StringIO()
+        for line in inputLines:
+            inputFile.write(line + '\n')
+        inputFile.seek(0)
+        outputFile = StringIO()
+        self.converter.convert(inputFile, outputFile)
+        self.assertEqual(
+            outputFile.getvalue(), _TRANSITIONAL_XHTML_DTD + expectedOutput)
+
+
+    def test_convert(self):
+        """
+        Test convert on a minimal example.
+        """
+        inputLines = ['.TH BAR "1" "Oct 2007" "" ""', "Foo\n"]
+        output = ("<html><head>\n<title>BAR.1</title></head>\n<body>\n\n"
+                  "<h1>BAR.1</h1>\n\n<p>Foo\n\n</p>\n\n</body>\n</html>\n")
+        self.assertConvert(inputLines, output)
+
+
+    def test_TP(self):
+        """
+        Test C{TP} parsing.
+        """
+        inputLines = ['.TH BAR "1" "Oct 2007" "" ""',
+                ".SH HEADER",
+                ".TP",
+                "\\fB-o\\fR, \\fB--option\\fR",
+                "An option"]
+        output = ("<html><head>\n<title>BAR.1</title></head>\n<body>\n\n"
+                  "<h1>BAR.1</h1>\n\n<h2>HEADER</h2>\n\n<dl><dt>"
+                  "<strong>-o</strong>, <strong>--option</strong>\n</dt>"
+                  "<dd>An option\n</dd>\n\n</dl>\n\n</body>\n</html>\n")
+        self.assertConvert(inputLines, output)
+
+
+    def test_TPMultipleOptions(self):
+        """
+        Try to parse multiple C{TP} fields.
+        """
+        inputLines = ['.TH BAR "1" "Oct 2007" "" ""',
+                ".SH HEADER",
+                ".TP",
+                "\\fB-o\\fR, \\fB--option\\fR",
+                "An option",
+                ".TP",
+                "\\fB-n\\fR, \\fB--another\\fR",
+                "Another option",
+                ]
+        output = ("<html><head>\n<title>BAR.1</title></head>\n<body>\n\n"
+                  "<h1>BAR.1</h1>\n\n<h2>HEADER</h2>\n\n<dl><dt>"
+                  "<strong>-o</strong>, <strong>--option</strong>\n</dt>"
+                  "<dd>An option\n</dd>\n\n<dt>"
+                  "<strong>-n</strong>, <strong>--another</strong>\n</dt>"
+                  "<dd>Another option\n</dd>\n\n</dl>\n\n</body>\n</html>\n")
+        self.assertConvert(inputLines, output)
+
+
+    def test_TPMultiLineOptions(self):
+        """
+        Try to parse multiple C{TP} fields, with options text on several lines.
+        """
+        inputLines = ['.TH BAR "1" "Oct 2007" "" ""',
+                ".SH HEADER",
+                ".TP",
+                "\\fB-o\\fR, \\fB--option\\fR",
+                "An option",
+                "on two lines",
+                ".TP",
+                "\\fB-n\\fR, \\fB--another\\fR",
+                "Another option",
+                "on two lines",
+                ]
+        output = ("<html><head>\n<title>BAR.1</title></head>\n<body>\n\n"
+                  "<h1>BAR.1</h1>\n\n<h2>HEADER</h2>\n\n<dl><dt>"
+                  "<strong>-o</strong>, <strong>--option</strong>\n</dt>"
+                  "<dd>An option\non two lines\n</dd>\n\n"
+                  "<dt><strong>-n</strong>, <strong>--another</strong>\n</dt>"
+                  "<dd>Another option\non two lines\n</dd>\n\n</dl>\n\n"
+                  "</body>\n</html>\n")
+        self.assertConvert(inputLines, output)
+
+
+    def test_ITLegacyManagement(self):
+        """
+        Test management of BL/IT/EL used in some man pages.
+        """
+        inputLines = ['.TH BAR "1" "Oct 2007" "" ""',
+                ".SH HEADER",
+                ".BL",
+                ".IT An option",
+                "on two lines",
+                ".IT",
+                "Another option",
+                "on two lines",
+                ".EL"
+                ]
+        output = ("<html><head>\n<title>BAR.1</title></head>\n<body>\n\n"
+                  "<h1>BAR.1</h1>\n\n<h2>HEADER</h2>\n\n<dl>"
+                  "<dt>on two lines\n</dt><dd>Another option\non two lines\n"
+                  "</dd></dl>\n\n</body>\n</html>\n")
+        self.assertConvert(inputLines, output)
+
+
+    def test_interactiveCommand(self):
+        """
+        Test management of interactive command tag.
+        """
+        inputLines = ['.TH BAR "1" "Oct 2007" "" ""',
+                ".SH HEADER",
+                ".BL",
+                ".IT IC foo AR bar",
+                "option 1",
+                ".IT IC egg AR spam OP AR stuff",
+                "option 2",
+                ".EL"
+                ]
+        output = ("<html><head>\n<title>BAR.1</title></head>\n<body>\n\n"
+                  "<h1>BAR.1</h1>\n\n<h2>HEADER</h2>\n\n<dl>"
+                  "<dt>foo <u>bar</u></dt><dd>option 1\n</dd><dt>egg "
+                  "<u>spam</u> [<u>stuff</u>]</dt><dd>option 2\n</dd></dl>"
+                  "\n\n</body>\n</html>\n")
+        self.assertConvert(inputLines, output)
diff --git a/ThirdParty/Twisted/twisted/lore/test/test_scripts.py b/ThirdParty/Twisted/twisted/lore/test/test_scripts.py
new file mode 100644
index 0000000..0a8328b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/test/test_scripts.py
@@ -0,0 +1,27 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for the command-line interface to lore.
+"""
+
+from twisted.trial.unittest import TestCase
+from twisted.scripts.test.test_scripts import ScriptTestsMixin
+from twisted.python.test.test_shellcomp import ZshScriptTestMixin
+
+
+
+class ScriptTests(TestCase, ScriptTestsMixin):
+    """
+    Tests for all one of lore's scripts.
+    """
+    def test_lore(self):
+        self.scriptTest("lore/lore")
+
+
+
+class ZshIntegrationTestCase(TestCase, ZshScriptTestMixin):
+    """
+    Test that zsh completion functions are generated without error
+    """
+    generateFor = [('lore', 'twisted.lore.scripts.lore.Options')]
diff --git a/ThirdParty/Twisted/twisted/lore/test/test_slides.py b/ThirdParty/Twisted/twisted/lore/test/test_slides.py
new file mode 100644
index 0000000..78d2cbe
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/test/test_slides.py
@@ -0,0 +1,85 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.lore.slides}.
+"""
+
+from xml.dom.minidom import Element, Text
+
+from twisted.trial.unittest import TestCase
+from twisted.lore.slides import HTMLSlide, splitIntoSlides, insertPrevNextLinks
+
+
+class SlidesTests(TestCase):
+    """
+    Tests for functions in L{twisted.lore.slides}.
+    """
+    def test_splitIntoSlides(self):
+        """
+        L{splitIntoSlides} accepts a document and returns a list of two-tuples,
+        each element of which contains the title of a slide taken from an I{h2}
+        element and the body of that slide.
+        """
+        parent = Element('html')
+        body = Element('body')
+        parent.appendChild(body)
+
+        first = Element('h2')
+        text = Text()
+        text.data = 'first slide'
+        first.appendChild(text)
+        body.appendChild(first)
+        body.appendChild(Element('div'))
+        body.appendChild(Element('span'))
+
+        second = Element('h2')
+        text = Text()
+        text.data = 'second slide'
+        second.appendChild(text)
+        body.appendChild(second)
+        body.appendChild(Element('p'))
+        body.appendChild(Element('br'))
+
+        slides = splitIntoSlides(parent)
+
+        self.assertEqual(slides[0][0], 'first slide')
+        firstContent = slides[0][1]
+        self.assertEqual(firstContent[0].tagName, 'div')
+        self.assertEqual(firstContent[1].tagName, 'span')
+        self.assertEqual(len(firstContent), 2)
+
+        self.assertEqual(slides[1][0], 'second slide')
+        secondContent = slides[1][1]
+        self.assertEqual(secondContent[0].tagName, 'p')
+        self.assertEqual(secondContent[1].tagName, 'br')
+        self.assertEqual(len(secondContent), 2)
+
+        self.assertEqual(len(slides), 2)
+
+
+    def test_insertPrevNextText(self):
+        """
+        L{insertPrevNextLinks} appends a text node with the title of the
+        previous slide to each node with a I{previous} class and the title of
+        the next slide to each node with a I{next} class.
+        """
+        next = Element('span')
+        next.setAttribute('class', 'next')
+        container = Element('div')
+        container.appendChild(next)
+        slideWithNext = HTMLSlide(container, 'first', 0)
+
+        previous = Element('span')
+        previous.setAttribute('class', 'previous')
+        container = Element('div')
+        container.appendChild(previous)
+        slideWithPrevious = HTMLSlide(container, 'second', 1)
+
+        insertPrevNextLinks(
+            [slideWithNext, slideWithPrevious], None, None)
+
+        self.assertEqual(
+            next.toxml(), '<span class="next">second</span>')
+        self.assertEqual(
+            previous.toxml(), '<span class="previous">first</span>')
diff --git a/ThirdParty/Twisted/twisted/lore/texi.py b/ThirdParty/Twisted/twisted/lore/texi.py
new file mode 100644
index 0000000..03f7347
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/texi.py
@@ -0,0 +1,109 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+# 
+
+from cStringIO import StringIO
+import os, re
+from twisted.python import text
+from twisted.web import domhelpers
+import latex, tree
+
+spaceRe = re.compile('\s+')
+
+def texiEscape(text):
+    return spaceRe.sub(text, ' ')
+
+entities = latex.entities.copy()
+entities['copy'] = '@copyright{}'
+
+class TexiSpitter(latex.BaseLatexSpitter):
+
+    baseLevel = 1
+
+    def writeNodeData(self, node):
+        buf = StringIO()
+        latex.getLatexText(node, self.writer, texiEscape, entities)
+
+    def visitNode_title(self, node):
+        self.writer('@node ')
+        self.visitNodeDefault(node)
+        self.writer('\n')
+        self.writer('@section ')
+        self.visitNodeDefault(node)
+        self.writer('\n')
+        headers = tree.getHeaders(domhelpers.getParents(node)[-1])
+        if not headers:
+            return
+        self.writer('@menu\n')
+        for header in headers:
+            self.writer('* %s::\n' % domhelpers.getNodeText(header))
+        self.writer('@end menu\n')
+
+    def visitNode_pre(self, node):
+        self.writer('@verbatim\n')
+        buf = StringIO()
+        latex.getLatexText(node, buf.write, entities=entities)
+        self.writer(text.removeLeadingTrailingBlanks(buf.getvalue()))
+        self.writer('@end verbatim\n')
+
+    def visitNode_code(self, node):
+        fout = StringIO()
+        latex.getLatexText(node, fout.write, texiEscape, entities)
+        self.writer('@code{'+fout.getvalue()+'}')
+
+    def visitNodeHeader(self, node):
+        self.writer('\n\n at node ')
+        self.visitNodeDefault(node)
+        self.writer('\n')
+        level = (int(node.tagName[1])-2)+self.baseLevel
+        self.writer('\n\n@'+level*'sub'+'section ')
+        self.visitNodeDefault(node)
+        self.writer('\n')
+
+    def visitNode_a_listing(self, node):
+        fileName = os.path.join(self.currDir, node.getAttribute('href'))
+        self.writer('@verbatim\n')
+        self.writer(open(fileName).read())
+        self.writer('@end verbatim')
+        # Write a caption for this source listing
+
+    def visitNode_a_href(self, node):
+        self.visitNodeDefault(node)
+
+    def visitNode_a_name(self, node):
+        self.visitNodeDefault(node)
+
+    visitNode_h2 = visitNode_h3 = visitNode_h4 = visitNodeHeader
+
+    start_dl = '@itemize\n'
+    end_dl = '@end itemize\n'
+    start_ul = '@itemize\n'
+    end_ul = '@end itemize\n'
+
+    start_ol = '@enumerate\n'
+    end_ol = '@end enumerate\n'
+
+    start_li = '@item\n'
+    end_li = '\n'
+
+    start_dt = '@item\n'
+    end_dt = ': '
+    end_dd = '\n'
+
+    start_p = '\n\n'
+
+    start_strong = start_em = '@emph{'
+    end_strong = end_em = '}'
+
+    start_q = "``"
+    end_q = "''"
+
+    start_span_footnote = '@footnote{'
+    end_span_footnote = '}'
+
+    start_div_note = '@quotation\n at strong{Note:}'
+    end_div_note = '@end quotation\n'
+
+    start_th = '@strong{'
+    end_th = '}'
diff --git a/ThirdParty/Twisted/twisted/lore/topfiles/NEWS b/ThirdParty/Twisted/twisted/lore/topfiles/NEWS
new file mode 100644
index 0000000..7666373
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/topfiles/NEWS
@@ -0,0 +1,167 @@
+Ticket numbers in this file can be looked up by visiting
+http://twistedmatrix.com/trac/ticket/<number>
+
+Twisted Lore 12.3.0 (2012-12-20)
+================================
+
+No significant changes have been made for this release.
+
+
+Twisted Lore 12.2.0 (2012-08-26)
+================================
+
+No significant changes have been made for this release.
+
+
+Twisted Lore 12.1.0 (2012-06-02)
+================================
+
+Bugfixes
+--------
+ - twisted.plugins.twisted_lore's MathProcessor plugin is now
+   associated with the correct implementation module. (#5326)
+
+
+Twisted Lore 12.0.0 (2012-02-10)
+================================
+
+No significant changes have been made for this release.
+
+
+Twisted Lore 11.1.0 (2011-11-15)
+================================
+
+Bugfixes
+--------
+ - When run from an unpacked source tarball or a VCS checkout,
+   bin/lore/lore will now use the version of Twisted it is part of.
+   (#3526)
+
+Deprecations and Removals
+-------------------------
+ - Removed compareMarkPos and comparePosition from lore.tree,
+   deprecated in Twisted 9.0. (#5127)
+
+
+Twisted Lore 11.0.0 (2011-04-01)
+================================
+
+No significant changes have been made for this release.
+
+
+Twisted Lore 10.2.0 (2010-11-29)
+================================
+
+No significant changes have been made for this release.
+
+Other
+-----
+ - #4571
+
+
+Twisted Lore 10.1.0 (2010-06-27)
+================================
+
+No significant changes have been made for this release.
+
+
+Twisted Lore 10.0.0 (2010-03-01)
+================================
+
+Other
+-----
+ - #4241
+
+
+Twisted Lore 9.0.0 (2009-11-24)
+===============================
+
+Features
+--------
+ - Python source listings now include line numbers (#3486)
+
+Fixes
+-----
+ - Lore now uses minidom instead of Twisted's microdom, which incidentally
+   fixes some Lore bugs such as throwing away certain whitespace
+   (#3560, #414, #3619)
+ - Lore's "lint" command should no longer break on documents with links in them
+   (#4051, #4115)
+
+Deprecations and Removals
+-------------------------
+ - Lore no longer uses the ancient "tml" Twisted plugin system (#1911)
+
+Other
+-----
+ - #3565, #3246, #3540, #3750, #4050
+
+
+Lore 8.2.0 (2008-12-16)
+=======================
+
+Other
+-----
+ - #2207, #2514
+
+
+8.1.0 (2008-05-18)
+==================
+
+Fixes
+-----
+ - The deprecated mktap API is no longer used (#3127)
+
+
+8.0.0 (2008-03-17)
+==================
+
+Fixes
+-----
+ - Change twisted.lore.tree.setIndexLin so that it removes node with index-link
+   class when the specified index filename is None. (#812)
+ - Fix the conversion of the list of options in man pages to Lore format.
+   (#3017)
+ - Fix conch man pages generation. (#3075)
+ - Fix management of the interactive command tag in man2lore. (#3076)
+
+Misc
+----
+ - #2847
+
+
+0.3.0 (2007-01-06)
+==================
+
+Features
+--------
+ - Many docstrings were added to twisted.lore.tree (#2301)
+
+Fixes
+-----
+ - Emitting a span with an index class to latex now works (#2134)
+
+
+0.2.0 (2006-05-24)
+==================
+
+Features
+--------
+ - Docstring improvements.
+
+Fixes
+-----
+ - Embedded Dia support for Latex no longer requires the 'which'
+   command line tool.
+ - Misc: #1142.
+
+Deprecations
+------------
+ - The unused, undocumented, untested and severely crashy 'bookify'
+   functionality was removed.
+
+
+0.1.0
+=====
+ - Use htmlizer mode that doesn't insert extra span tags, thus making
+   it not mess up in Safari.
diff --git a/ThirdParty/Twisted/twisted/lore/topfiles/README b/ThirdParty/Twisted/twisted/lore/topfiles/README
new file mode 100644
index 0000000..5a83fbb
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/topfiles/README
@@ -0,0 +1,3 @@
+Twisted Lore 12.3.0
+
+Twisted Lore depends on Twisted and Twisted Web.
diff --git a/ThirdParty/Twisted/twisted/lore/topfiles/setup.py b/ThirdParty/Twisted/twisted/lore/topfiles/setup.py
new file mode 100644
index 0000000..a04f563
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/topfiles/setup.py
@@ -0,0 +1,29 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import sys
+
+try:
+    from twisted.python import dist
+except ImportError:
+    raise SystemExit("twisted.python.dist module not found.  Make sure you "
+                     "have installed the Twisted core package before "
+                     "attempting to install any other Twisted projects.")
+
+if __name__ == '__main__':
+    dist.setup(
+        twisted_subproject="lore",
+        scripts=dist.getScripts("lore"),
+        # metadata
+        name="Twisted Lore",
+        description="Twisted documentation system",
+        author="Twisted Matrix Laboratories",
+        author_email="twisted-python at twistedmatrix.com",
+        maintainer="Andrew Bennetts",
+        url="http://twistedmatrix.com/trac/wiki/TwistedLore",
+        license="MIT",
+        long_description="""\
+Twisted Lore is a documentation generator with HTML and LaTeX support,
+used in the Twisted project.
+""",
+        )
diff --git a/ThirdParty/Twisted/twisted/lore/tree.py b/ThirdParty/Twisted/twisted/lore/tree.py
new file mode 100644
index 0000000..5cc71aa
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/tree.py
@@ -0,0 +1,1122 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+from itertools import count
+import re, os, cStringIO, time, cgi, urlparse
+from xml.dom import minidom as dom
+from xml.sax.handler import ErrorHandler, feature_validation
+from xml.dom.pulldom import SAX2DOM
+from xml.sax import make_parser
+from xml.sax.xmlreader import InputSource
+
+from twisted.python import htmlizer, text
+from twisted.python.filepath import FilePath
+from twisted.web import domhelpers
+import process, latex, indexer, numberer, htmlbook
+
+# relative links to html files
+def fixLinks(document, ext):
+    """
+    Rewrite links to XHTML lore input documents so they point to lore XHTML
+    output documents.
+
+    Any node with an C{href} attribute which does not contain a value starting
+    with C{http}, C{https}, C{ftp}, or C{mailto} and which does not have a
+    C{class} attribute of C{absolute} or which contains C{listing} and which
+    does point to an URL ending with C{html} will have that attribute value
+    rewritten so that the filename extension is C{ext} instead of C{html}.
+
+    @type document: A DOM Node or Document
+    @param document: The input document which contains all of the content to be
+    presented.
+
+    @type ext: C{str}
+    @param ext: The extension to use when selecting an output file name.  This
+    replaces the extension of the input file name.
+
+    @return: C{None}
+    """
+    supported_schemes=['http', 'https', 'ftp', 'mailto']
+    for node in domhelpers.findElementsWithAttribute(document, 'href'):
+        href = node.getAttribute("href")
+        if urlparse.urlparse(href)[0] in supported_schemes:
+            continue
+        if node.getAttribute("class") == "absolute":
+            continue
+        if node.getAttribute("class").find('listing') != -1:
+            continue
+
+        # This is a relative link, so it should be munged.
+        if href.endswith('html') or href[:href.rfind('#')].endswith('html'):
+            fname, fext = os.path.splitext(href)
+            if '#' in fext:
+                fext = ext+'#'+fext.split('#', 1)[1]
+            else:
+                fext = ext
+            node.setAttribute("href", fname + fext)
+
+
+
+def addMtime(document, fullpath):
+    """
+    Set the last modified time of the given document.
+
+    @type document: A DOM Node or Document
+    @param document: The output template which defines the presentation of the
+    last modified time.
+
+    @type fullpath: C{str}
+    @param fullpath: The file name from which to take the last modified time.
+
+    @return: C{None}
+    """
+    for node in domhelpers.findElementsWithAttribute(document, "class","mtime"):
+        txt = dom.Text()
+        txt.data = time.ctime(os.path.getmtime(fullpath))
+        node.appendChild(txt)
+
+
+
+def _getAPI(node):
+    """
+    Retrieve the fully qualified Python name represented by the given node.
+
+    The name is represented by one or two aspects of the node: the value of the
+    node's first child forms the end of the name.  If the node has a C{base}
+    attribute, that attribute's value is prepended to the node's value, with
+    C{.} separating the two parts.
+
+    @rtype: C{str}
+    @return: The fully qualified Python name.
+    """
+    base = ""
+    if node.hasAttribute("base"):
+        base = node.getAttribute("base") + "."
+    return base+node.childNodes[0].nodeValue
+
+
+
+def fixAPI(document, url):
+    """
+    Replace API references with links to API documentation.
+
+    @type document: A DOM Node or Document
+    @param document: The input document which contains all of the content to be
+    presented.
+
+    @type url: C{str}
+    @param url: A string which will be interpolated with the fully qualified
+    Python name of any API reference encountered in the input document, the
+    result of which will be used as a link to API documentation for that name
+    in the output document.
+
+    @return: C{None}
+    """
+    # API references
+    for node in domhelpers.findElementsWithAttribute(document, "class", "API"):
+        fullname = _getAPI(node)
+        anchor = dom.Element('a')
+        anchor.setAttribute('href', url % (fullname,))
+        anchor.setAttribute('title', fullname)
+        while node.childNodes:
+            child = node.childNodes[0]
+            node.removeChild(child)
+            anchor.appendChild(child)
+        node.appendChild(anchor)
+        if node.hasAttribute('base'):
+            node.removeAttribute('base')
+
+
+
+def fontifyPython(document):
+    """
+    Syntax color any node in the given document which contains a Python source
+    listing.
+
+    @type document: A DOM Node or Document
+    @param document: The input document which contains all of the content to be
+    presented.
+
+    @return: C{None}
+    """
+    def matcher(node):
+        return (node.nodeName == 'pre' and node.hasAttribute('class') and
+                node.getAttribute('class') == 'python')
+    for node in domhelpers.findElements(document, matcher):
+        fontifyPythonNode(node)
+
+
+
+def fontifyPythonNode(node):
+    """
+    Syntax color the given node containing Python source code.
+
+    The node must have a parent.
+
+    @return: C{None}
+    """
+    oldio = cStringIO.StringIO()
+    latex.getLatexText(node, oldio.write,
+                       entities={'lt': '<', 'gt': '>', 'amp': '&'})
+    oldio = cStringIO.StringIO(oldio.getvalue().strip()+'\n')
+    howManyLines = len(oldio.getvalue().splitlines())
+    newio = cStringIO.StringIO()
+    htmlizer.filter(oldio, newio, writer=htmlizer.SmallerHTMLWriter)
+    lineLabels = _makeLineNumbers(howManyLines)
+    newel = dom.parseString(newio.getvalue()).documentElement
+    newel.setAttribute("class", "python")
+    node.parentNode.replaceChild(newel, node)
+    newel.insertBefore(lineLabels, newel.firstChild)
+
+
+
+def addPyListings(document, dir):
+    """
+    Insert Python source listings into the given document from files in the
+    given directory based on C{py-listing} nodes.
+
+    Any node in C{document} with a C{class} attribute set to C{py-listing} will
+    have source lines taken from the file named in that node's C{href}
+    attribute (searched for in C{dir}) inserted in place of that node.
+
+    If a node has a C{skipLines} attribute, its value will be parsed as an
+    integer and that many lines will be skipped at the beginning of the source
+    file.
+
+    @type document: A DOM Node or Document
+    @param document: The document within which to make listing replacements.
+
+    @type dir: C{str}
+    @param dir: The directory in which to find source files containing the
+    referenced Python listings.
+
+    @return: C{None}
+    """
+    for node in domhelpers.findElementsWithAttribute(document, "class",
+                                                     "py-listing"):
+        filename = node.getAttribute("href")
+        outfile = cStringIO.StringIO()
+        lines = map(str.rstrip, open(os.path.join(dir, filename)).readlines())
+
+        skip = node.getAttribute('skipLines') or 0
+        lines = lines[int(skip):]
+        howManyLines = len(lines)
+        data = '\n'.join(lines)
+
+        data = cStringIO.StringIO(text.removeLeadingTrailingBlanks(data))
+        htmlizer.filter(data, outfile, writer=htmlizer.SmallerHTMLWriter)
+        sourceNode = dom.parseString(outfile.getvalue()).documentElement
+        sourceNode.insertBefore(_makeLineNumbers(howManyLines), sourceNode.firstChild)
+        _replaceWithListing(node, sourceNode.toxml(), filename, "py-listing")
+
+
+
+def _makeLineNumbers(howMany):
+    """
+    Return an element which will render line numbers for a source listing.
+
+    @param howMany: The number of lines in the source listing.
+    @type howMany: C{int}
+
+    @return: An L{dom.Element} which can be added to the document before
+        the source listing to add line numbers to it.
+    """
+    # Figure out how many digits wide the widest line number label will be.
+    width = len(str(howMany))
+
+    # Render all the line labels with appropriate padding
+    labels = ['%*d' % (width, i) for i in range(1, howMany + 1)]
+
+    # Create a p element with the right style containing the labels
+    p = dom.Element('p')
+    p.setAttribute('class', 'py-linenumber')
+    t = dom.Text()
+    t.data = '\n'.join(labels) + '\n'
+    p.appendChild(t)
+    return p
+
+
+def _replaceWithListing(node, val, filename, class_):
+    captionTitle = domhelpers.getNodeText(node)
+    if captionTitle == os.path.basename(filename):
+        captionTitle = 'Source listing'
+    text = ('<div class="%s">%s<div class="caption">%s - '
+            '<a href="%s"><span class="filename">%s</span></a></div></div>' %
+            (class_, val, captionTitle, filename, filename))
+    newnode = dom.parseString(text).documentElement
+    node.parentNode.replaceChild(newnode, node)
+
+
+
+def addHTMLListings(document, dir):
+    """
+    Insert HTML source listings into the given document from files in the given
+    directory based on C{html-listing} nodes.
+
+    Any node in C{document} with a C{class} attribute set to C{html-listing}
+    will have source lines taken from the file named in that node's C{href}
+    attribute (searched for in C{dir}) inserted in place of that node.
+
+    @type document: A DOM Node or Document
+    @param document: The document within which to make listing replacements.
+
+    @type dir: C{str}
+    @param dir: The directory in which to find source files containing the
+    referenced HTML listings.
+
+    @return: C{None}
+    """
+    for node in domhelpers.findElementsWithAttribute(document, "class",
+                                                     "html-listing"):
+        filename = node.getAttribute("href")
+        val = ('<pre class="htmlsource">\n%s</pre>' %
+               cgi.escape(open(os.path.join(dir, filename)).read()))
+        _replaceWithListing(node, val, filename, "html-listing")
+
+
+
+def addPlainListings(document, dir):
+    """
+    Insert text listings into the given document from files in the given
+    directory based on C{listing} nodes.
+
+    Any node in C{document} with a C{class} attribute set to C{listing} will
+    have source lines taken from the file named in that node's C{href}
+    attribute (searched for in C{dir}) inserted in place of that node.
+
+    @type document: A DOM Node or Document
+    @param document: The document within which to make listing replacements.
+
+    @type dir: C{str}
+    @param dir: The directory in which to find source files containing the
+    referenced text listings.
+
+    @return: C{None}
+    """
+    for node in domhelpers.findElementsWithAttribute(document, "class",
+                                                     "listing"):
+        filename = node.getAttribute("href")
+        val = ('<pre>\n%s</pre>' %
+               cgi.escape(open(os.path.join(dir, filename)).read()))
+        _replaceWithListing(node, val, filename, "listing")
+
+
+
+def getHeaders(document):
+    """
+    Return all H2 and H3 nodes in the given document.
+
+    @type document: A DOM Node or Document
+
+    @rtype: C{list}
+    """
+    return domhelpers.findElements(
+        document,
+        lambda n, m=re.compile('h[23]$').match: m(n.nodeName))
+
+
+
+def generateToC(document):
+    """
+    Create a table of contents for the given document.
+
+    @type document: A DOM Node or Document
+
+    @rtype: A DOM Node
+    @return: a Node containing a table of contents based on the headers of the
+    given document.
+    """
+    subHeaders = None
+    headers = []
+    for element in getHeaders(document):
+        if element.tagName == 'h2':
+            subHeaders = []
+            headers.append((element, subHeaders))
+        elif subHeaders is None:
+            raise ValueError(
+                "No H3 element is allowed until after an H2 element")
+        else:
+            subHeaders.append(element)
+
+    auto = count().next
+
+    def addItem(headerElement, parent):
+        anchor = dom.Element('a')
+        name = 'auto%d' % (auto(),)
+        anchor.setAttribute('href', '#' + name)
+        text = dom.Text()
+        text.data = domhelpers.getNodeText(headerElement)
+        anchor.appendChild(text)
+        headerNameItem = dom.Element('li')
+        headerNameItem.appendChild(anchor)
+        parent.appendChild(headerNameItem)
+        anchor = dom.Element('a')
+        anchor.setAttribute('name', name)
+        headerElement.appendChild(anchor)
+
+    toc = dom.Element('ol')
+    for headerElement, subHeaders in headers:
+        addItem(headerElement, toc)
+        if subHeaders:
+            subtoc = dom.Element('ul')
+            toc.appendChild(subtoc)
+            for subHeaderElement in subHeaders:
+                addItem(subHeaderElement, subtoc)
+
+    return toc
+
+
+
+def putInToC(document, toc):
+    """
+    Insert the given table of contents into the given document.
+
+    The node with C{class} attribute set to C{toc} has its children replaced
+    with C{toc}.
+
+    @type document: A DOM Node or Document
+    @type toc: A DOM Node
+    """
+    tocOrig = domhelpers.findElementsWithAttribute(document, 'class', 'toc')
+    if tocOrig:
+        tocOrig= tocOrig[0]
+        tocOrig.childNodes = [toc]
+
+
+
+def removeH1(document):
+    """
+    Replace all C{h1} nodes in the given document with empty C{span} nodes.
+
+    C{h1} nodes mark up document sections and the output template is given an
+    opportunity to present this information in a different way.
+
+    @type document: A DOM Node or Document
+    @param document: The input document which contains all of the content to be
+    presented.
+
+    @return: C{None}
+    """
+    h1 = domhelpers.findNodesNamed(document, 'h1')
+    empty = dom.Element('span')
+    for node in h1:
+        node.parentNode.replaceChild(empty, node)
+
+
+
+def footnotes(document):
+    """
+    Find footnotes in the given document, move them to the end of the body, and
+    generate links to them.
+
+    A footnote is any node with a C{class} attribute set to C{footnote}.
+    Footnote links are generated as superscript.  Footnotes are collected in a
+    C{ol} node at the end of the document.
+
+    @type document: A DOM Node or Document
+    @param document: The input document which contains all of the content to be
+    presented.
+
+    @return: C{None}
+    """
+    footnotes = domhelpers.findElementsWithAttribute(document, "class",
+                                                     "footnote")
+    if not footnotes:
+        return
+    footnoteElement = dom.Element('ol')
+    id = 1
+    for footnote in footnotes:
+        href = dom.parseString('<a href="#footnote-%(id)d">'
+                               '<super>%(id)d</super></a>'
+                               % vars()).documentElement
+        text = ' '.join(domhelpers.getNodeText(footnote).split())
+        href.setAttribute('title', text)
+        target = dom.Element('a')
+        target.setAttribute('name', 'footnote-%d' % (id,))
+        target.childNodes = [footnote]
+        footnoteContent = dom.Element('li')
+        footnoteContent.childNodes = [target]
+        footnoteElement.childNodes.append(footnoteContent)
+        footnote.parentNode.replaceChild(href, footnote)
+        id += 1
+    body = domhelpers.findNodesNamed(document, "body")[0]
+    header = dom.parseString('<h2>Footnotes</h2>').documentElement
+    body.childNodes.append(header)
+    body.childNodes.append(footnoteElement)
+
+
+
+def notes(document):
+    """
+    Find notes in the given document and mark them up as such.
+
+    A note is any node with a C{class} attribute set to C{note}.
+
+    (I think this is a very stupid feature.  When I found it I actually
+    exclaimed out loud. -exarkun)
+
+    @type document: A DOM Node or Document
+    @param document: The input document which contains all of the content to be
+    presented.
+
+    @return: C{None}
+    """
+    notes = domhelpers.findElementsWithAttribute(document, "class", "note")
+    notePrefix = dom.parseString('<strong>Note: </strong>').documentElement
+    for note in notes:
+        note.childNodes.insert(0, notePrefix)
+
+
+
+def findNodeJustBefore(target, nodes):
+    """
+    Find the last Element which is a sibling of C{target} and is in C{nodes}.
+
+    @param target: A node the previous sibling of which to return.
+    @param nodes: A list of nodes which might be the right node.
+
+    @return: The previous sibling of C{target}.
+    """
+    while target is not None:
+        node = target.previousSibling
+        while node is not None:
+            if node in nodes:
+                return node
+            node = node.previousSibling
+        target = target.parentNode
+    raise RuntimeError("Oops")
+
+
+
+def getFirstAncestorWithSectionHeader(entry):
+    """
+    Visit the ancestors of C{entry} until one with at least one C{h2} child
+    node is found, then return all of that node's C{h2} child nodes.
+
+    @type entry: A DOM Node
+    @param entry: The node from which to begin traversal.  This node itself is
+    excluded from consideration.
+
+    @rtype: C{list} of DOM Nodes
+    @return: All C{h2} nodes of the ultimately selected parent node.
+    """
+    for a in domhelpers.getParents(entry)[1:]:
+        headers = domhelpers.findNodesNamed(a, "h2")
+        if len(headers) > 0:
+            return headers
+    return []
+
+
+
+def getSectionNumber(header):
+    """
+    Retrieve the section number of the given node.
+
+    This is probably intended to interact in a rather specific way with
+    L{numberDocument}.
+
+    @type header: A DOM Node or L{None}
+    @param header: The section from which to extract a number.  The section
+        number is the value of this node's first child.
+
+    @return: C{None} or a C{str} giving the section number.
+    """
+    if not header:
+        return None
+    return domhelpers.gatherTextNodes(header.childNodes[0])
+
+
+
+def getSectionReference(entry):
+    """
+    Find the section number which contains the given node.
+
+    This function looks at the given node's ancestry until it finds a node
+    which defines a section, then returns that section's number.
+
+    @type entry: A DOM Node
+    @param entry: The node for which to determine the section.
+
+    @rtype: C{str}
+    @return: The section number, as returned by C{getSectionNumber} of the
+    first ancestor of C{entry} which defines a section, as determined by
+    L{getFirstAncestorWithSectionHeader}.
+    """
+    headers = getFirstAncestorWithSectionHeader(entry)
+    myHeader = findNodeJustBefore(entry, headers)
+    return getSectionNumber(myHeader)
+
+
+
+def index(document, filename, chapterReference):
+    """
+    Extract index entries from the given document and store them for later use
+    and insert named anchors so that the index can link back to those entries.
+
+    Any node with a C{class} attribute set to C{index} is considered an index
+    entry.
+
+    @type document: A DOM Node or Document
+    @param document: The input document which contains all of the content to be
+    presented.
+
+    @type filename: C{str}
+    @param filename: A link to the output for the given document which will be
+    included in the index to link to any index entry found here.
+
+    @type chapterReference: ???
+    @param chapterReference: ???
+
+    @return: C{None}
+    """
+    entries = domhelpers.findElementsWithAttribute(document, "class", "index")
+    if not entries:
+        return
+    i = 0;
+    for entry in entries:
+        i += 1
+        anchor = 'index%02d' % i
+        if chapterReference:
+            ref = getSectionReference(entry) or chapterReference
+        else:
+            ref = 'link'
+        indexer.addEntry(filename, anchor, entry.getAttribute('value'), ref)
+        # does nodeName even affect anything?
+        entry.nodeName = entry.tagName = entry.endTagName = 'a'
+        for attrName in entry.attributes.keys():
+            entry.removeAttribute(attrName)
+        entry.setAttribute('name', anchor)
+
+
+
+def setIndexLink(template, indexFilename):
+    """
+    Insert a link to an index document.
+
+    Any node with a C{class} attribute set to C{index-link} will have its tag
+    name changed to C{a} and its C{href} attribute set to C{indexFilename}.
+
+    @type template: A DOM Node or Document
+    @param template: The output template which defines the presentation of the
+    version information.
+
+    @type indexFilename: C{str}
+    @param indexFilename: The address of the index document to which to link.
+    If any C{False} value, this function will remove all index-link nodes.
+
+    @return: C{None}
+    """
+    indexLinks = domhelpers.findElementsWithAttribute(template,
+                                                      "class",
+                                                      "index-link")
+    for link in indexLinks:
+        if indexFilename is None:
+            link.parentNode.removeChild(link)
+        else:
+            link.nodeName = link.tagName = link.endTagName = 'a'
+            for attrName in link.attributes.keys():
+                link.removeAttribute(attrName)
+            link.setAttribute('href', indexFilename)
+
+
+
+def numberDocument(document, chapterNumber):
+    """
+    Number the sections of the given document.
+
+    A dot-separated chapter, section number is added to the beginning of each
+    section, as defined by C{h2} nodes.
+
+    This is probably intended to interact in a rather specific way with
+    L{getSectionNumber}.
+
+    @type document: A DOM Node or Document
+    @param document: The input document which contains all of the content to be
+    presented.
+
+    @type chapterNumber: C{int}
+    @param chapterNumber: The chapter number of this content in an overall
+    document.
+
+    @return: C{None}
+    """
+    i = 1
+    for node in domhelpers.findNodesNamed(document, "h2"):
+        label = dom.Text()
+        label.data = "%s.%d " % (chapterNumber, i)
+        node.insertBefore(label, node.firstChild)
+        i += 1
+
+
+
+def fixRelativeLinks(document, linkrel):
+    """
+    Replace relative links in C{str} and C{href} attributes with links relative
+    to C{linkrel}.
+
+    @type document: A DOM Node or Document
+    @param document: The output template.
+
+    @type linkrel: C{str}
+    @param linkrel: An prefix to apply to all relative links in C{src} or
+    C{href} attributes in the input document when generating the output
+    document.
+    """
+    for attr in 'src', 'href':
+        for node in domhelpers.findElementsWithAttribute(document, attr):
+            href = node.getAttribute(attr)
+            if not href.startswith('http') and not href.startswith('/'):
+                node.setAttribute(attr, linkrel+node.getAttribute(attr))
+
+
+
+def setTitle(template, title, chapterNumber):
+    """
+    Add title and chapter number information to the template document.
+
+    The title is added to the end of the first C{title} tag and the end of the
+    first tag with a C{class} attribute set to C{title}.  If specified, the
+    chapter is inserted before the title.
+
+    @type template: A DOM Node or Document
+    @param template: The output template which defines the presentation of the
+    version information.
+
+    @type title: C{list} of DOM Nodes
+    @param title: Nodes from the input document defining its title.
+
+    @type chapterNumber: C{int}
+    @param chapterNumber: The chapter number of this content in an overall
+    document.  If not applicable, any C{False} value will result in this
+    information being omitted.
+
+    @return: C{None}
+    """
+    if numberer.getNumberSections() and chapterNumber:
+        titleNode = dom.Text()
+        # This is necessary in order for cloning below to work.  See Python
+        # isuse 4851.
+        titleNode.ownerDocument = template.ownerDocument
+        titleNode.data = '%s. ' % (chapterNumber,)
+        title.insert(0, titleNode)
+
+    for nodeList in (domhelpers.findNodesNamed(template, "title"),
+                     domhelpers.findElementsWithAttribute(template, "class",
+                                                          'title')):
+        if nodeList:
+            for titleNode in title:
+                nodeList[0].appendChild(titleNode.cloneNode(True))
+
+
+
+def setAuthors(template, authors):
+    """
+    Add author information to the template document.
+
+    Names and contact information for authors are added to each node with a
+    C{class} attribute set to C{authors} and to the template head as C{link}
+    nodes.
+
+    @type template: A DOM Node or Document
+    @param template: The output template which defines the presentation of the
+    version information.
+
+    @type authors: C{list} of two-tuples of C{str}
+    @param authors: List of names and contact information for the authors of
+    the input document.
+
+    @return: C{None}
+    """
+
+    for node in domhelpers.findElementsWithAttribute(template,
+                                                     "class", 'authors'):
+
+        # First, similarly to setTitle, insert text into an <div
+        # class="authors">
+        container = dom.Element('span')
+        for name, href in authors:
+            anchor = dom.Element('a')
+            anchor.setAttribute('href', href)
+            anchorText = dom.Text()
+            anchorText.data = name
+            anchor.appendChild(anchorText)
+            if (name, href) == authors[-1]:
+                if len(authors) == 1:
+                    container.appendChild(anchor)
+                else:
+                    andText = dom.Text()
+                    andText.data = 'and '
+                    container.appendChild(andText)
+                    container.appendChild(anchor)
+            else:
+                container.appendChild(anchor)
+                commaText = dom.Text()
+                commaText.data = ', '
+                container.appendChild(commaText)
+
+        node.appendChild(container)
+
+    # Second, add appropriate <link rel="author" ...> tags to the <head>.
+    head = domhelpers.findNodesNamed(template, 'head')[0]
+    authors = [dom.parseString('<link rel="author" href="%s" title="%s"/>'
+                               % (href, name)).childNodes[0]
+               for name, href in authors]
+    head.childNodes.extend(authors)
+
+
+
+def setVersion(template, version):
+    """
+    Add a version indicator to the given template.
+
+    @type template: A DOM Node or Document
+    @param template: The output template which defines the presentation of the
+    version information.
+
+    @type version: C{str}
+    @param version: The version string to add to the template.
+
+    @return: C{None}
+    """
+    for node in domhelpers.findElementsWithAttribute(template, "class",
+                                                               "version"):
+        text = dom.Text()
+        text.data = version
+        node.appendChild(text)
+
+
+
+def getOutputFileName(originalFileName, outputExtension, index=None):
+    """
+    Return a filename which is the same as C{originalFileName} except for the
+    extension, which is replaced with C{outputExtension}.
+
+    For example, if C{originalFileName} is C{'/foo/bar.baz'} and
+    C{outputExtension} is C{'quux'}, the return value will be
+    C{'/foo/bar.quux'}.
+
+    @type originalFileName: C{str}
+    @type outputExtension: C{stR}
+    @param index: ignored, never passed.
+    @rtype: C{str}
+    """
+    return os.path.splitext(originalFileName)[0]+outputExtension
+
+
+
+def munge(document, template, linkrel, dir, fullpath, ext, url, config, outfileGenerator=getOutputFileName):
+    """
+    Mutate C{template} until it resembles C{document}.
+
+    @type document: A DOM Node or Document
+    @param document: The input document which contains all of the content to be
+    presented.
+
+    @type template: A DOM Node or Document
+    @param template: The template document which defines the desired
+    presentation format of the content.
+
+    @type linkrel: C{str}
+    @param linkrel: An prefix to apply to all relative links in C{src} or
+    C{href} attributes in the input document when generating the output
+    document.
+
+    @type dir: C{str}
+    @param dir: The directory in which to search for source listing files.
+
+    @type fullpath: C{str}
+    @param fullpath: The file name which contained the input document.
+
+    @type ext: C{str}
+    @param ext: The extension to use when selecting an output file name.  This
+    replaces the extension of the input file name.
+
+    @type url: C{str}
+    @param url: A string which will be interpolated with the fully qualified
+    Python name of any API reference encountered in the input document, the
+    result of which will be used as a link to API documentation for that name
+    in the output document.
+
+    @type config: C{dict}
+    @param config: Further specification of the desired form of the output.
+    Valid keys in this dictionary::
+
+        noapi: If present and set to a True value, links to API documentation
+               will not be generated.
+
+        version: A string which will be included in the output to indicate the
+                 version of this documentation.
+
+    @type outfileGenerator: Callable of C{str}, C{str} returning C{str}
+    @param outfileGenerator: Output filename factory.  This is invoked with the
+    intput filename and C{ext} and the output document is serialized to the
+    file with the name returned.
+
+    @return: C{None}
+    """
+    fixRelativeLinks(template, linkrel)
+    addMtime(template, fullpath)
+    removeH1(document)
+    if not config.get('noapi', False):
+        fixAPI(document, url)
+    fontifyPython(document)
+    fixLinks(document, ext)
+    addPyListings(document, dir)
+    addHTMLListings(document, dir)
+    addPlainListings(document, dir)
+    putInToC(template, generateToC(document))
+    footnotes(document)
+    notes(document)
+
+    setIndexLink(template, indexer.getIndexFilename())
+    setVersion(template, config.get('version', ''))
+
+    # Insert the document into the template
+    chapterNumber = htmlbook.getNumber(fullpath)
+    title = domhelpers.findNodesNamed(document, 'title')[0].childNodes
+    setTitle(template, title, chapterNumber)
+    if numberer.getNumberSections() and chapterNumber:
+        numberDocument(document, chapterNumber)
+    index(document, outfileGenerator(os.path.split(fullpath)[1], ext),
+          htmlbook.getReference(fullpath))
+
+    authors = domhelpers.findNodesNamed(document, 'link')
+    authors = [(node.getAttribute('title') or '',
+                node.getAttribute('href') or '')
+               for node in authors
+               if node.getAttribute('rel') == 'author']
+    setAuthors(template, authors)
+
+    body = domhelpers.findNodesNamed(document, "body")[0]
+    tmplbody = domhelpers.findElementsWithAttribute(template, "class",
+                                                              "body")[0]
+    tmplbody.childNodes = body.childNodes
+    tmplbody.setAttribute("class", "content")
+
+
+class _LocationReportingErrorHandler(ErrorHandler):
+    """
+    Define a SAX error handler which can report the location of fatal
+    errors.
+
+    Unlike the errors reported during parsing by other APIs in the xml
+    package, this one tries to mismatched tag errors by including the
+    location of both the relevant opening and closing tags.
+    """
+    def __init__(self, contentHandler):
+        self.contentHandler = contentHandler
+
+    def fatalError(self, err):
+        # Unfortunately, the underlying expat error code is only exposed as
+        # a string.  I surely do hope no one ever goes and localizes expat.
+        if err.getMessage() == 'mismatched tag':
+            expect, begLine, begCol = self.contentHandler._locationStack[-1]
+            endLine, endCol = err.getLineNumber(), err.getColumnNumber()
+            raise process.ProcessingFailure(
+                "mismatched close tag at line %d, column %d; expected </%s> "
+                "(from line %d, column %d)" % (
+                    endLine, endCol, expect, begLine, begCol))
+        raise process.ProcessingFailure(
+            '%s at line %d, column %d' % (err.getMessage(),
+                                          err.getLineNumber(),
+                                          err.getColumnNumber()))
+
+
+class _TagTrackingContentHandler(SAX2DOM):
+    """
+    Define a SAX content handler which keeps track of the start location of
+    all open tags.  This information is used by the above defined error
+    handler to report useful locations when a fatal error is encountered.
+    """
+    def __init__(self):
+        SAX2DOM.__init__(self)
+        self._locationStack = []
+
+    def setDocumentLocator(self, locator):
+        self._docLocator = locator
+        SAX2DOM.setDocumentLocator(self, locator)
+
+    def startElement(self, name, attrs):
+        self._locationStack.append((name, self._docLocator.getLineNumber(), self._docLocator.getColumnNumber()))
+        SAX2DOM.startElement(self, name, attrs)
+
+    def endElement(self, name):
+        self._locationStack.pop()
+        SAX2DOM.endElement(self, name)
+
+
+class _LocalEntityResolver(object):
+    """
+    Implement DTD loading (from a local source) for the limited number of
+    DTDs which are allowed for Lore input documents.
+
+    @ivar filename: The name of the file containing the lore input
+        document.
+
+    @ivar knownDTDs: A mapping from DTD system identifiers to L{FilePath}
+        instances pointing to the corresponding DTD.
+    """
+    s = FilePath(__file__).sibling
+
+    knownDTDs = {
+        None: s("xhtml1-strict.dtd"),
+        "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd": s("xhtml1-strict.dtd"),
+        "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd": s("xhtml1-transitional.dtd"),
+        "xhtml-lat1.ent": s("xhtml-lat1.ent"),
+        "xhtml-symbol.ent": s("xhtml-symbol.ent"),
+        "xhtml-special.ent": s("xhtml-special.ent"),
+        }
+    del s
+
+    def __init__(self, filename):
+        self.filename = filename
+
+
+    def resolveEntity(self, publicId, systemId):
+        source = InputSource()
+        source.setSystemId(systemId)
+        try:
+            dtdPath = self.knownDTDs[systemId]
+        except KeyError:
+            raise process.ProcessingFailure(
+                "Invalid DTD system identifier (%r) in %s.  Only "
+                "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd "
+                "is allowed." % (systemId, self.filename))
+        source.setByteStream(dtdPath.open())
+        return source
+
+
+
+def parseFileAndReport(filename, _open=file):
+    """
+    Parse and return the contents of the given lore XHTML document.
+
+    @type filename: C{str}
+    @param filename: The name of a file containing a lore XHTML document to
+    load.
+
+    @raise process.ProcessingFailure: When the contents of the specified file
+    cannot be parsed.
+
+    @rtype: A DOM Document
+    @return: The document contained in C{filename}.
+    """
+    content = _TagTrackingContentHandler()
+    error = _LocationReportingErrorHandler(content)
+    parser = make_parser()
+    parser.setContentHandler(content)
+    parser.setErrorHandler(error)
+
+    # In order to call a method on the expat parser which will be used by this
+    # parser, we need the expat parser to be created.  This doesn't happen
+    # until reset is called, normally by the parser's parse method.  That's too
+    # late for us, since it will then go on to parse the document without
+    # letting us do any extra set up.  So, force the expat parser to be created
+    # here, and then disable reset so that the parser created is the one
+    # actually used to parse our document.  Resetting is only needed if more
+    # than one document is going to be parsed, and that isn't the case here.
+    parser.reset()
+    parser.reset = lambda: None
+
+    # This is necessary to make the xhtml1 transitional declaration optional.
+    # It causes LocalEntityResolver.resolveEntity(None, None) to be called.
+    # LocalEntityResolver handles that case by giving out the xhtml1
+    # transitional dtd.  Unfortunately, there is no public API for manipulating
+    # the expat parser when using xml.sax.  Using the private _parser attribute
+    # may break.  It's also possible that make_parser will return a parser
+    # which doesn't use expat, but uses some other parser.  Oh well. :(
+    # -exarkun
+    parser._parser.UseForeignDTD(True)
+    parser.setEntityResolver(_LocalEntityResolver(filename))
+
+    # This is probably no-op because expat is not a validating parser.  Who
+    # knows though, maybe you figured out a way to not use expat.
+    parser.setFeature(feature_validation, False)
+
+    fObj = _open(filename)
+    try:
+        try:
+            parser.parse(fObj)
+        except IOError, e:
+            raise process.ProcessingFailure(
+                e.strerror + ", filename was '" + filename + "'")
+    finally:
+        fObj.close()
+    return content.document
+
+
+def makeSureDirectoryExists(filename):
+    filename = os.path.abspath(filename)
+    dirname = os.path.dirname(filename)
+    if (not os.path.exists(dirname)):
+        os.makedirs(dirname)
+
+def doFile(filename, linkrel, ext, url, templ, options={}, outfileGenerator=getOutputFileName):
+    """
+    Process the input document at C{filename} and write an output document.
+
+    @type filename: C{str}
+    @param filename: The path to the input file which will be processed.
+
+    @type linkrel: C{str}
+    @param linkrel: An prefix to apply to all relative links in C{src} or
+    C{href} attributes in the input document when generating the output
+    document.
+
+    @type ext: C{str}
+    @param ext: The extension to use when selecting an output file name.  This
+    replaces the extension of the input file name.
+
+    @type url: C{str}
+    @param url: A string which will be interpolated with the fully qualified
+    Python name of any API reference encountered in the input document, the
+    result of which will be used as a link to API documentation for that name
+    in the output document.
+
+    @type templ: A DOM Node or Document
+    @param templ: The template on which the output document will be based.
+    This is mutated and then serialized to the output file.
+
+    @type options: C{dict}
+    @param options: Further specification of the desired form of the output.
+    Valid keys in this dictionary::
+
+        noapi: If present and set to a True value, links to API documentation
+               will not be generated.
+
+        version: A string which will be included in the output to indicate the
+                 version of this documentation.
+
+    @type outfileGenerator: Callable of C{str}, C{str} returning C{str}
+    @param outfileGenerator: Output filename factory.  This is invoked with the
+    intput filename and C{ext} and the output document is serialized to the
+    file with the name returned.
+
+    @return: C{None}
+    """
+    doc = parseFileAndReport(filename)
+    clonedNode = templ.cloneNode(1)
+    munge(doc, clonedNode, linkrel, os.path.dirname(filename), filename, ext,
+          url, options, outfileGenerator)
+    newFilename = outfileGenerator(filename, ext)
+    _writeDocument(newFilename, clonedNode)
+
+
+
+def _writeDocument(newFilename, clonedNode):
+    """
+    Serialize the given node to XML into the named file.
+
+    @param newFilename: The name of the file to which the XML will be
+        written.  If this is in a directory which does not exist, the
+        directory will be created.
+
+    @param clonedNode: The root DOM node which will be serialized.
+
+    @return: C{None}
+    """
+    makeSureDirectoryExists(newFilename)
+    f = open(newFilename, 'w')
+    f.write(clonedNode.toxml('utf-8'))
+    f.close()
diff --git a/ThirdParty/Twisted/twisted/lore/xhtml-lat1.ent b/ThirdParty/Twisted/twisted/lore/xhtml-lat1.ent
new file mode 100644
index 0000000..ffee223
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/xhtml-lat1.ent
@@ -0,0 +1,196 @@
+<!-- Portions (C) International Organization for Standardization 1986
+     Permission to copy in any form is granted for use with
+     conforming SGML systems and applications as defined in
+     ISO 8879, provided this notice is included in all copies.
+-->
+<!-- Character entity set. Typical invocation:
+    <!ENTITY % HTMLlat1 PUBLIC
+       "-//W3C//ENTITIES Latin 1 for XHTML//EN"
+       "http://www.w3.org/TR/xhtml1/DTD/xhtml-lat1.ent">
+    %HTMLlat1;
+-->
+
+<!ENTITY nbsp   " "> <!-- no-break space = non-breaking space,
+                                  U+00A0 ISOnum -->
+<!ENTITY iexcl  "¡"> <!-- inverted exclamation mark, U+00A1 ISOnum -->
+<!ENTITY cent   "¢"> <!-- cent sign, U+00A2 ISOnum -->
+<!ENTITY pound  "£"> <!-- pound sign, U+00A3 ISOnum -->
+<!ENTITY curren "¤"> <!-- currency sign, U+00A4 ISOnum -->
+<!ENTITY yen    "¥"> <!-- yen sign = yuan sign, U+00A5 ISOnum -->
+<!ENTITY brvbar "¦"> <!-- broken bar = broken vertical bar,
+                                  U+00A6 ISOnum -->
+<!ENTITY sect   "§"> <!-- section sign, U+00A7 ISOnum -->
+<!ENTITY uml    "¨"> <!-- diaeresis = spacing diaeresis,
+                                  U+00A8 ISOdia -->
+<!ENTITY copy   "©"> <!-- copyright sign, U+00A9 ISOnum -->
+<!ENTITY ordf   "ª"> <!-- feminine ordinal indicator, U+00AA ISOnum -->
+<!ENTITY laquo  "«"> <!-- left-pointing double angle quotation mark
+                                  = left pointing guillemet, U+00AB ISOnum -->
+<!ENTITY not    "¬"> <!-- not sign = angled dash,
+                                  U+00AC ISOnum -->
+<!ENTITY shy    "­"> <!-- soft hyphen = discretionary hyphen,
+                                  U+00AD ISOnum -->
+<!ENTITY reg    "®"> <!-- registered sign = registered trade mark sign,
+                                  U+00AE ISOnum -->
+<!ENTITY macr   "¯"> <!-- macron = spacing macron = overline
+                                  = APL overbar, U+00AF ISOdia -->
+<!ENTITY deg    "°"> <!-- degree sign, U+00B0 ISOnum -->
+<!ENTITY plusmn "±"> <!-- plus-minus sign = plus-or-minus sign,
+                                  U+00B1 ISOnum -->
+<!ENTITY sup2   "²"> <!-- superscript two = superscript digit two
+                                  = squared, U+00B2 ISOnum -->
+<!ENTITY sup3   "³"> <!-- superscript three = superscript digit three
+                                  = cubed, U+00B3 ISOnum -->
+<!ENTITY acute  "´"> <!-- acute accent = spacing acute,
+                                  U+00B4 ISOdia -->
+<!ENTITY micro  "µ"> <!-- micro sign, U+00B5 ISOnum -->
+<!ENTITY para   "¶"> <!-- pilcrow sign = paragraph sign,
+                                  U+00B6 ISOnum -->
+<!ENTITY middot "·"> <!-- middle dot = Georgian comma
+                                  = Greek middle dot, U+00B7 ISOnum -->
+<!ENTITY cedil  "¸"> <!-- cedilla = spacing cedilla, U+00B8 ISOdia -->
+<!ENTITY sup1   "¹"> <!-- superscript one = superscript digit one,
+                                  U+00B9 ISOnum -->
+<!ENTITY ordm   "º"> <!-- masculine ordinal indicator,
+                                  U+00BA ISOnum -->
+<!ENTITY raquo  "»"> <!-- right-pointing double angle quotation mark
+                                  = right pointing guillemet, U+00BB ISOnum -->
+<!ENTITY frac14 "¼"> <!-- vulgar fraction one quarter
+                                  = fraction one quarter, U+00BC ISOnum -->
+<!ENTITY frac12 "½"> <!-- vulgar fraction one half
+                                  = fraction one half, U+00BD ISOnum -->
+<!ENTITY frac34 "¾"> <!-- vulgar fraction three quarters
+                                  = fraction three quarters, U+00BE ISOnum -->
+<!ENTITY iquest "¿"> <!-- inverted question mark
+                                  = turned question mark, U+00BF ISOnum -->
+<!ENTITY Agrave "À"> <!-- latin capital letter A with grave
+                                  = latin capital letter A grave,
+                                  U+00C0 ISOlat1 -->
+<!ENTITY Aacute "Á"> <!-- latin capital letter A with acute,
+                                  U+00C1 ISOlat1 -->
+<!ENTITY Acirc  "Â"> <!-- latin capital letter A with circumflex,
+                                  U+00C2 ISOlat1 -->
+<!ENTITY Atilde "Ã"> <!-- latin capital letter A with tilde,
+                                  U+00C3 ISOlat1 -->
+<!ENTITY Auml   "Ä"> <!-- latin capital letter A with diaeresis,
+                                  U+00C4 ISOlat1 -->
+<!ENTITY Aring  "Å"> <!-- latin capital letter A with ring above
+                                  = latin capital letter A ring,
+                                  U+00C5 ISOlat1 -->
+<!ENTITY AElig  "Æ"> <!-- latin capital letter AE
+                                  = latin capital ligature AE,
+                                  U+00C6 ISOlat1 -->
+<!ENTITY Ccedil "Ç"> <!-- latin capital letter C with cedilla,
+                                  U+00C7 ISOlat1 -->
+<!ENTITY Egrave "È"> <!-- latin capital letter E with grave,
+                                  U+00C8 ISOlat1 -->
+<!ENTITY Eacute "É"> <!-- latin capital letter E with acute,
+                                  U+00C9 ISOlat1 -->
+<!ENTITY Ecirc  "Ê"> <!-- latin capital letter E with circumflex,
+                                  U+00CA ISOlat1 -->
+<!ENTITY Euml   "Ë"> <!-- latin capital letter E with diaeresis,
+                                  U+00CB ISOlat1 -->
+<!ENTITY Igrave "Ì"> <!-- latin capital letter I with grave,
+                                  U+00CC ISOlat1 -->
+<!ENTITY Iacute "Í"> <!-- latin capital letter I with acute,
+                                  U+00CD ISOlat1 -->
+<!ENTITY Icirc  "Î"> <!-- latin capital letter I with circumflex,
+                                  U+00CE ISOlat1 -->
+<!ENTITY Iuml   "Ï"> <!-- latin capital letter I with diaeresis,
+                                  U+00CF ISOlat1 -->
+<!ENTITY ETH    "Ð"> <!-- latin capital letter ETH, U+00D0 ISOlat1 -->
+<!ENTITY Ntilde "Ñ"> <!-- latin capital letter N with tilde,
+                                  U+00D1 ISOlat1 -->
+<!ENTITY Ograve "Ò"> <!-- latin capital letter O with grave,
+                                  U+00D2 ISOlat1 -->
+<!ENTITY Oacute "Ó"> <!-- latin capital letter O with acute,
+                                  U+00D3 ISOlat1 -->
+<!ENTITY Ocirc  "Ô"> <!-- latin capital letter O with circumflex,
+                                  U+00D4 ISOlat1 -->
+<!ENTITY Otilde "Õ"> <!-- latin capital letter O with tilde,
+                                  U+00D5 ISOlat1 -->
+<!ENTITY Ouml   "Ö"> <!-- latin capital letter O with diaeresis,
+                                  U+00D6 ISOlat1 -->
+<!ENTITY times  "×"> <!-- multiplication sign, U+00D7 ISOnum -->
+<!ENTITY Oslash "Ø"> <!-- latin capital letter O with stroke
+                                  = latin capital letter O slash,
+                                  U+00D8 ISOlat1 -->
+<!ENTITY Ugrave "Ù"> <!-- latin capital letter U with grave,
+                                  U+00D9 ISOlat1 -->
+<!ENTITY Uacute "Ú"> <!-- latin capital letter U with acute,
+                                  U+00DA ISOlat1 -->
+<!ENTITY Ucirc  "Û"> <!-- latin capital letter U with circumflex,
+                                  U+00DB ISOlat1 -->
+<!ENTITY Uuml   "Ü"> <!-- latin capital letter U with diaeresis,
+                                  U+00DC ISOlat1 -->
+<!ENTITY Yacute "Ý"> <!-- latin capital letter Y with acute,
+                                  U+00DD ISOlat1 -->
+<!ENTITY THORN  "Þ"> <!-- latin capital letter THORN,
+                                  U+00DE ISOlat1 -->
+<!ENTITY szlig  "ß"> <!-- latin small letter sharp s = ess-zed,
+                                  U+00DF ISOlat1 -->
+<!ENTITY agrave "à"> <!-- latin small letter a with grave
+                                  = latin small letter a grave,
+                                  U+00E0 ISOlat1 -->
+<!ENTITY aacute "á"> <!-- latin small letter a with acute,
+                                  U+00E1 ISOlat1 -->
+<!ENTITY acirc  "â"> <!-- latin small letter a with circumflex,
+                                  U+00E2 ISOlat1 -->
+<!ENTITY atilde "ã"> <!-- latin small letter a with tilde,
+                                  U+00E3 ISOlat1 -->
+<!ENTITY auml   "ä"> <!-- latin small letter a with diaeresis,
+                                  U+00E4 ISOlat1 -->
+<!ENTITY aring  "å"> <!-- latin small letter a with ring above
+                                  = latin small letter a ring,
+                                  U+00E5 ISOlat1 -->
+<!ENTITY aelig  "æ"> <!-- latin small letter ae
+                                  = latin small ligature ae, U+00E6 ISOlat1 -->
+<!ENTITY ccedil "ç"> <!-- latin small letter c with cedilla,
+                                  U+00E7 ISOlat1 -->
+<!ENTITY egrave "è"> <!-- latin small letter e with grave,
+                                  U+00E8 ISOlat1 -->
+<!ENTITY eacute "é"> <!-- latin small letter e with acute,
+                                  U+00E9 ISOlat1 -->
+<!ENTITY ecirc  "ê"> <!-- latin small letter e with circumflex,
+                                  U+00EA ISOlat1 -->
+<!ENTITY euml   "ë"> <!-- latin small letter e with diaeresis,
+                                  U+00EB ISOlat1 -->
+<!ENTITY igrave "ì"> <!-- latin small letter i with grave,
+                                  U+00EC ISOlat1 -->
+<!ENTITY iacute "í"> <!-- latin small letter i with acute,
+                                  U+00ED ISOlat1 -->
+<!ENTITY icirc  "î"> <!-- latin small letter i with circumflex,
+                                  U+00EE ISOlat1 -->
+<!ENTITY iuml   "ï"> <!-- latin small letter i with diaeresis,
+                                  U+00EF ISOlat1 -->
+<!ENTITY eth    "ð"> <!-- latin small letter eth, U+00F0 ISOlat1 -->
+<!ENTITY ntilde "ñ"> <!-- latin small letter n with tilde,
+                                  U+00F1 ISOlat1 -->
+<!ENTITY ograve "ò"> <!-- latin small letter o with grave,
+                                  U+00F2 ISOlat1 -->
+<!ENTITY oacute "ó"> <!-- latin small letter o with acute,
+                                  U+00F3 ISOlat1 -->
+<!ENTITY ocirc  "ô"> <!-- latin small letter o with circumflex,
+                                  U+00F4 ISOlat1 -->
+<!ENTITY otilde "õ"> <!-- latin small letter o with tilde,
+                                  U+00F5 ISOlat1 -->
+<!ENTITY ouml   "ö"> <!-- latin small letter o with diaeresis,
+                                  U+00F6 ISOlat1 -->
+<!ENTITY divide "÷"> <!-- division sign, U+00F7 ISOnum -->
+<!ENTITY oslash "ø"> <!-- latin small letter o with stroke,
+                                  = latin small letter o slash,
+                                  U+00F8 ISOlat1 -->
+<!ENTITY ugrave "ù"> <!-- latin small letter u with grave,
+                                  U+00F9 ISOlat1 -->
+<!ENTITY uacute "ú"> <!-- latin small letter u with acute,
+                                  U+00FA ISOlat1 -->
+<!ENTITY ucirc  "û"> <!-- latin small letter u with circumflex,
+                                  U+00FB ISOlat1 -->
+<!ENTITY uuml   "ü"> <!-- latin small letter u with diaeresis,
+                                  U+00FC ISOlat1 -->
+<!ENTITY yacute "ý"> <!-- latin small letter y with acute,
+                                  U+00FD ISOlat1 -->
+<!ENTITY thorn  "þ"> <!-- latin small letter thorn,
+                                  U+00FE ISOlat1 -->
+<!ENTITY yuml   "ÿ"> <!-- latin small letter y with diaeresis,
+                                  U+00FF ISOlat1 -->
diff --git a/ThirdParty/Twisted/twisted/lore/xhtml-special.ent b/ThirdParty/Twisted/twisted/lore/xhtml-special.ent
new file mode 100644
index 0000000..ca358b2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/xhtml-special.ent
@@ -0,0 +1,80 @@
+<!-- Special characters for XHTML -->
+
+<!-- Character entity set. Typical invocation:
+     <!ENTITY % HTMLspecial PUBLIC
+        "-//W3C//ENTITIES Special for XHTML//EN"
+        "http://www.w3.org/TR/xhtml1/DTD/xhtml-special.ent">
+     %HTMLspecial;
+-->
+
+<!-- Portions (C) International Organization for Standardization 1986:
+     Permission to copy in any form is granted for use with
+     conforming SGML systems and applications as defined in
+     ISO 8879, provided this notice is included in all copies.
+-->
+
+<!-- Relevant ISO entity set is given unless names are newly introduced.
+     New names (i.e., not in ISO 8879 list) do not clash with any
+     existing ISO 8879 entity names. ISO 10646 character numbers
+     are given for each character, in hex. values are decimal
+     conversions of the ISO 10646 values and refer to the document
+     character set. Names are Unicode names. 
+-->
+
+<!-- C0 Controls and Basic Latin -->
+<!ENTITY quot    """> <!--  quotation mark, U+0022 ISOnum -->
+<!ENTITY amp     "&#38;"> <!--  ampersand, U+0026 ISOnum -->
+<!ENTITY lt      "&#60;"> <!--  less-than sign, U+003C ISOnum -->
+<!ENTITY gt      ">"> <!--  greater-than sign, U+003E ISOnum -->
+<!ENTITY apos	 "'"> <!--  apostrophe = APL quote, U+0027 ISOnum -->
+
+<!-- Latin Extended-A -->
+<!ENTITY OElig   "Œ"> <!--  latin capital ligature OE,
+                                    U+0152 ISOlat2 -->
+<!ENTITY oelig   "œ"> <!--  latin small ligature oe, U+0153 ISOlat2 -->
+<!-- ligature is a misnomer, this is a separate character in some languages -->
+<!ENTITY Scaron  "Š"> <!--  latin capital letter S with caron,
+                                    U+0160 ISOlat2 -->
+<!ENTITY scaron  "š"> <!--  latin small letter s with caron,
+                                    U+0161 ISOlat2 -->
+<!ENTITY Yuml    "Ÿ"> <!--  latin capital letter Y with diaeresis,
+                                    U+0178 ISOlat2 -->
+
+<!-- Spacing Modifier Letters -->
+<!ENTITY circ    "ˆ"> <!--  modifier letter circumflex accent,
+                                    U+02C6 ISOpub -->
+<!ENTITY tilde   "˜"> <!--  small tilde, U+02DC ISOdia -->
+
+<!-- General Punctuation -->
+<!ENTITY ensp    " "> <!-- en space, U+2002 ISOpub -->
+<!ENTITY emsp    " "> <!-- em space, U+2003 ISOpub -->
+<!ENTITY thinsp  " "> <!-- thin space, U+2009 ISOpub -->
+<!ENTITY zwnj    "‌"> <!-- zero width non-joiner,
+                                    U+200C NEW RFC 2070 -->
+<!ENTITY zwj     "‍"> <!-- zero width joiner, U+200D NEW RFC 2070 -->
+<!ENTITY lrm     "‎"> <!-- left-to-right mark, U+200E NEW RFC 2070 -->
+<!ENTITY rlm     "‏"> <!-- right-to-left mark, U+200F NEW RFC 2070 -->
+<!ENTITY ndash   "–"> <!-- en dash, U+2013 ISOpub -->
+<!ENTITY mdash   "—"> <!-- em dash, U+2014 ISOpub -->
+<!ENTITY lsquo   "‘"> <!-- left single quotation mark,
+                                    U+2018 ISOnum -->
+<!ENTITY rsquo   "’"> <!-- right single quotation mark,
+                                    U+2019 ISOnum -->
+<!ENTITY sbquo   "‚"> <!-- single low-9 quotation mark, U+201A NEW -->
+<!ENTITY ldquo   "“"> <!-- left double quotation mark,
+                                    U+201C ISOnum -->
+<!ENTITY rdquo   "”"> <!-- right double quotation mark,
+                                    U+201D ISOnum -->
+<!ENTITY bdquo   "„"> <!-- double low-9 quotation mark, U+201E NEW -->
+<!ENTITY dagger  "†"> <!-- dagger, U+2020 ISOpub -->
+<!ENTITY Dagger  "‡"> <!-- double dagger, U+2021 ISOpub -->
+<!ENTITY permil  "‰"> <!-- per mille sign, U+2030 ISOtech -->
+<!ENTITY lsaquo  "‹"> <!-- single left-pointing angle quotation mark,
+                                    U+2039 ISO proposed -->
+<!-- lsaquo is proposed but not yet ISO standardized -->
+<!ENTITY rsaquo  "›"> <!-- single right-pointing angle quotation mark,
+                                    U+203A ISO proposed -->
+<!-- rsaquo is proposed but not yet ISO standardized -->
+
+<!-- Currency Symbols -->
+<!ENTITY euro   "€"> <!--  euro sign, U+20AC NEW -->
diff --git a/ThirdParty/Twisted/twisted/lore/xhtml-symbol.ent b/ThirdParty/Twisted/twisted/lore/xhtml-symbol.ent
new file mode 100644
index 0000000..63c2abf
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/xhtml-symbol.ent
@@ -0,0 +1,237 @@
+<!-- Mathematical, Greek and Symbolic characters for XHTML -->
+
+<!-- Character entity set. Typical invocation:
+     <!ENTITY % HTMLsymbol PUBLIC
+        "-//W3C//ENTITIES Symbols for XHTML//EN"
+        "http://www.w3.org/TR/xhtml1/DTD/xhtml-symbol.ent">
+     %HTMLsymbol;
+-->
+
+<!-- Portions (C) International Organization for Standardization 1986:
+     Permission to copy in any form is granted for use with
+     conforming SGML systems and applications as defined in
+     ISO 8879, provided this notice is included in all copies.
+-->
+
+<!-- Relevant ISO entity set is given unless names are newly introduced.
+     New names (i.e., not in ISO 8879 list) do not clash with any
+     existing ISO 8879 entity names. ISO 10646 character numbers
+     are given for each character, in hex. values are decimal
+     conversions of the ISO 10646 values and refer to the document
+     character set. Names are Unicode names. 
+-->
+
+<!-- Latin Extended-B -->
+<!ENTITY fnof     "ƒ"> <!-- latin small letter f with hook = function
+                                    = florin, U+0192 ISOtech -->
+
+<!-- Greek -->
+<!ENTITY Alpha    "Α"> <!-- greek capital letter alpha, U+0391 -->
+<!ENTITY Beta     "Β"> <!-- greek capital letter beta, U+0392 -->
+<!ENTITY Gamma    "Γ"> <!-- greek capital letter gamma,
+                                    U+0393 ISOgrk3 -->
+<!ENTITY Delta    "Δ"> <!-- greek capital letter delta,
+                                    U+0394 ISOgrk3 -->
+<!ENTITY Epsilon  "Ε"> <!-- greek capital letter epsilon, U+0395 -->
+<!ENTITY Zeta     "Ζ"> <!-- greek capital letter zeta, U+0396 -->
+<!ENTITY Eta      "Η"> <!-- greek capital letter eta, U+0397 -->
+<!ENTITY Theta    "Θ"> <!-- greek capital letter theta,
+                                    U+0398 ISOgrk3 -->
+<!ENTITY Iota     "Ι"> <!-- greek capital letter iota, U+0399 -->
+<!ENTITY Kappa    "Κ"> <!-- greek capital letter kappa, U+039A -->
+<!ENTITY Lambda   "Λ"> <!-- greek capital letter lamda,
+                                    U+039B ISOgrk3 -->
+<!ENTITY Mu       "Μ"> <!-- greek capital letter mu, U+039C -->
+<!ENTITY Nu       "Ν"> <!-- greek capital letter nu, U+039D -->
+<!ENTITY Xi       "Ξ"> <!-- greek capital letter xi, U+039E ISOgrk3 -->
+<!ENTITY Omicron  "Ο"> <!-- greek capital letter omicron, U+039F -->
+<!ENTITY Pi       "Π"> <!-- greek capital letter pi, U+03A0 ISOgrk3 -->
+<!ENTITY Rho      "Ρ"> <!-- greek capital letter rho, U+03A1 -->
+<!-- there is no Sigmaf, and no U+03A2 character either -->
+<!ENTITY Sigma    "Σ"> <!-- greek capital letter sigma,
+                                    U+03A3 ISOgrk3 -->
+<!ENTITY Tau      "Τ"> <!-- greek capital letter tau, U+03A4 -->
+<!ENTITY Upsilon  "Υ"> <!-- greek capital letter upsilon,
+                                    U+03A5 ISOgrk3 -->
+<!ENTITY Phi      "Φ"> <!-- greek capital letter phi,
+                                    U+03A6 ISOgrk3 -->
+<!ENTITY Chi      "Χ"> <!-- greek capital letter chi, U+03A7 -->
+<!ENTITY Psi      "Ψ"> <!-- greek capital letter psi,
+                                    U+03A8 ISOgrk3 -->
+<!ENTITY Omega    "Ω"> <!-- greek capital letter omega,
+                                    U+03A9 ISOgrk3 -->
+
+<!ENTITY alpha    "α"> <!-- greek small letter alpha,
+                                    U+03B1 ISOgrk3 -->
+<!ENTITY beta     "β"> <!-- greek small letter beta, U+03B2 ISOgrk3 -->
+<!ENTITY gamma    "γ"> <!-- greek small letter gamma,
+                                    U+03B3 ISOgrk3 -->
+<!ENTITY delta    "δ"> <!-- greek small letter delta,
+                                    U+03B4 ISOgrk3 -->
+<!ENTITY epsilon  "ε"> <!-- greek small letter epsilon,
+                                    U+03B5 ISOgrk3 -->
+<!ENTITY zeta     "ζ"> <!-- greek small letter zeta, U+03B6 ISOgrk3 -->
+<!ENTITY eta      "η"> <!-- greek small letter eta, U+03B7 ISOgrk3 -->
+<!ENTITY theta    "θ"> <!-- greek small letter theta,
+                                    U+03B8 ISOgrk3 -->
+<!ENTITY iota     "ι"> <!-- greek small letter iota, U+03B9 ISOgrk3 -->
+<!ENTITY kappa    "κ"> <!-- greek small letter kappa,
+                                    U+03BA ISOgrk3 -->
+<!ENTITY lambda   "λ"> <!-- greek small letter lamda,
+                                    U+03BB ISOgrk3 -->
+<!ENTITY mu       "μ"> <!-- greek small letter mu, U+03BC ISOgrk3 -->
+<!ENTITY nu       "ν"> <!-- greek small letter nu, U+03BD ISOgrk3 -->
+<!ENTITY xi       "ξ"> <!-- greek small letter xi, U+03BE ISOgrk3 -->
+<!ENTITY omicron  "ο"> <!-- greek small letter omicron, U+03BF NEW -->
+<!ENTITY pi       "π"> <!-- greek small letter pi, U+03C0 ISOgrk3 -->
+<!ENTITY rho      "ρ"> <!-- greek small letter rho, U+03C1 ISOgrk3 -->
+<!ENTITY sigmaf   "ς"> <!-- greek small letter final sigma,
+                                    U+03C2 ISOgrk3 -->
+<!ENTITY sigma    "σ"> <!-- greek small letter sigma,
+                                    U+03C3 ISOgrk3 -->
+<!ENTITY tau      "τ"> <!-- greek small letter tau, U+03C4 ISOgrk3 -->
+<!ENTITY upsilon  "υ"> <!-- greek small letter upsilon,
+                                    U+03C5 ISOgrk3 -->
+<!ENTITY phi      "φ"> <!-- greek small letter phi, U+03C6 ISOgrk3 -->
+<!ENTITY chi      "χ"> <!-- greek small letter chi, U+03C7 ISOgrk3 -->
+<!ENTITY psi      "ψ"> <!-- greek small letter psi, U+03C8 ISOgrk3 -->
+<!ENTITY omega    "ω"> <!-- greek small letter omega,
+                                    U+03C9 ISOgrk3 -->
+<!ENTITY thetasym "ϑ"> <!-- greek theta symbol,
+                                    U+03D1 NEW -->
+<!ENTITY upsih    "ϒ"> <!-- greek upsilon with hook symbol,
+                                    U+03D2 NEW -->
+<!ENTITY piv      "ϖ"> <!-- greek pi symbol, U+03D6 ISOgrk3 -->
+
+<!-- General Punctuation -->
+<!ENTITY bull     "•"> <!-- bullet = black small circle,
+                                     U+2022 ISOpub  -->
+<!-- bullet is NOT the same as bullet operator, U+2219 -->
+<!ENTITY hellip   "…"> <!-- horizontal ellipsis = three dot leader,
+                                     U+2026 ISOpub  -->
+<!ENTITY prime    "′"> <!-- prime = minutes = feet, U+2032 ISOtech -->
+<!ENTITY Prime    "″"> <!-- double prime = seconds = inches,
+                                     U+2033 ISOtech -->
+<!ENTITY oline    "‾"> <!-- overline = spacing overscore,
+                                     U+203E NEW -->
+<!ENTITY frasl    "⁄"> <!-- fraction slash, U+2044 NEW -->
+
+<!-- Letterlike Symbols -->
+<!ENTITY weierp   "℘"> <!-- script capital P = power set
+                                     = Weierstrass p, U+2118 ISOamso -->
+<!ENTITY image    "ℑ"> <!-- black-letter capital I = imaginary part,
+                                     U+2111 ISOamso -->
+<!ENTITY real     "ℜ"> <!-- black-letter capital R = real part symbol,
+                                     U+211C ISOamso -->
+<!ENTITY trade    "™"> <!-- trade mark sign, U+2122 ISOnum -->
+<!ENTITY alefsym  "ℵ"> <!-- alef symbol = first transfinite cardinal,
+                                     U+2135 NEW -->
+<!-- alef symbol is NOT the same as hebrew letter alef,
+     U+05D0 although the same glyph could be used to depict both characters -->
+
+<!-- Arrows -->
+<!ENTITY larr     "←"> <!-- leftwards arrow, U+2190 ISOnum -->
+<!ENTITY uarr     "↑"> <!-- upwards arrow, U+2191 ISOnum-->
+<!ENTITY rarr     "→"> <!-- rightwards arrow, U+2192 ISOnum -->
+<!ENTITY darr     "↓"> <!-- downwards arrow, U+2193 ISOnum -->
+<!ENTITY harr     "↔"> <!-- left right arrow, U+2194 ISOamsa -->
+<!ENTITY crarr    "↵"> <!-- downwards arrow with corner leftwards
+                                     = carriage return, U+21B5 NEW -->
+<!ENTITY lArr     "⇐"> <!-- leftwards double arrow, U+21D0 ISOtech -->
+<!-- Unicode does not say that lArr is the same as the 'is implied by' arrow
+    but also does not have any other character for that function. So lArr can
+    be used for 'is implied by' as ISOtech suggests -->
+<!ENTITY uArr     "⇑"> <!-- upwards double arrow, U+21D1 ISOamsa -->
+<!ENTITY rArr     "⇒"> <!-- rightwards double arrow,
+                                     U+21D2 ISOtech -->
+<!-- Unicode does not say this is the 'implies' character but does not have 
+     another character with this function so rArr can be used for 'implies'
+     as ISOtech suggests -->
+<!ENTITY dArr     "⇓"> <!-- downwards double arrow, U+21D3 ISOamsa -->
+<!ENTITY hArr     "⇔"> <!-- left right double arrow,
+                                     U+21D4 ISOamsa -->
+
+<!-- Mathematical Operators -->
+<!ENTITY forall   "∀"> <!-- for all, U+2200 ISOtech -->
+<!ENTITY part     "∂"> <!-- partial differential, U+2202 ISOtech  -->
+<!ENTITY exist    "∃"> <!-- there exists, U+2203 ISOtech -->
+<!ENTITY empty    "∅"> <!-- empty set = null set, U+2205 ISOamso -->
+<!ENTITY nabla    "∇"> <!-- nabla = backward difference,
+                                     U+2207 ISOtech -->
+<!ENTITY isin     "∈"> <!-- element of, U+2208 ISOtech -->
+<!ENTITY notin    "∉"> <!-- not an element of, U+2209 ISOtech -->
+<!ENTITY ni       "∋"> <!-- contains as member, U+220B ISOtech -->
+<!ENTITY prod     "∏"> <!-- n-ary product = product sign,
+                                     U+220F ISOamsb -->
+<!-- prod is NOT the same character as U+03A0 'greek capital letter pi' though
+     the same glyph might be used for both -->
+<!ENTITY sum      "∑"> <!-- n-ary summation, U+2211 ISOamsb -->
+<!-- sum is NOT the same character as U+03A3 'greek capital letter sigma'
+     though the same glyph might be used for both -->
+<!ENTITY minus    "−"> <!-- minus sign, U+2212 ISOtech -->
+<!ENTITY lowast   "∗"> <!-- asterisk operator, U+2217 ISOtech -->
+<!ENTITY radic    "√"> <!-- square root = radical sign,
+                                     U+221A ISOtech -->
+<!ENTITY prop     "∝"> <!-- proportional to, U+221D ISOtech -->
+<!ENTITY infin    "∞"> <!-- infinity, U+221E ISOtech -->
+<!ENTITY ang      "∠"> <!-- angle, U+2220 ISOamso -->
+<!ENTITY and      "∧"> <!-- logical and = wedge, U+2227 ISOtech -->
+<!ENTITY or       "∨"> <!-- logical or = vee, U+2228 ISOtech -->
+<!ENTITY cap      "∩"> <!-- intersection = cap, U+2229 ISOtech -->
+<!ENTITY cup      "∪"> <!-- union = cup, U+222A ISOtech -->
+<!ENTITY int      "∫"> <!-- integral, U+222B ISOtech -->
+<!ENTITY there4   "∴"> <!-- therefore, U+2234 ISOtech -->
+<!ENTITY sim      "∼"> <!-- tilde operator = varies with = similar to,
+                                     U+223C ISOtech -->
+<!-- tilde operator is NOT the same character as the tilde, U+007E,
+     although the same glyph might be used to represent both  -->
+<!ENTITY cong     "≅"> <!-- approximately equal to, U+2245 ISOtech -->
+<!ENTITY asymp    "≈"> <!-- almost equal to = asymptotic to,
+                                     U+2248 ISOamsr -->
+<!ENTITY ne       "≠"> <!-- not equal to, U+2260 ISOtech -->
+<!ENTITY equiv    "≡"> <!-- identical to, U+2261 ISOtech -->
+<!ENTITY le       "≤"> <!-- less-than or equal to, U+2264 ISOtech -->
+<!ENTITY ge       "≥"> <!-- greater-than or equal to,
+                                     U+2265 ISOtech -->
+<!ENTITY sub      "⊂"> <!-- subset of, U+2282 ISOtech -->
+<!ENTITY sup      "⊃"> <!-- superset of, U+2283 ISOtech -->
+<!ENTITY nsub     "⊄"> <!-- not a subset of, U+2284 ISOamsn -->
+<!ENTITY sube     "⊆"> <!-- subset of or equal to, U+2286 ISOtech -->
+<!ENTITY supe     "⊇"> <!-- superset of or equal to,
+                                     U+2287 ISOtech -->
+<!ENTITY oplus    "⊕"> <!-- circled plus = direct sum,
+                                     U+2295 ISOamsb -->
+<!ENTITY otimes   "⊗"> <!-- circled times = vector product,
+                                     U+2297 ISOamsb -->
+<!ENTITY perp     "⊥"> <!-- up tack = orthogonal to = perpendicular,
+                                     U+22A5 ISOtech -->
+<!ENTITY sdot     "⋅"> <!-- dot operator, U+22C5 ISOamsb -->
+<!-- dot operator is NOT the same character as U+00B7 middle dot -->
+
+<!-- Miscellaneous Technical -->
+<!ENTITY lceil    "⌈"> <!-- left ceiling = APL upstile,
+                                     U+2308 ISOamsc  -->
+<!ENTITY rceil    "⌉"> <!-- right ceiling, U+2309 ISOamsc  -->
+<!ENTITY lfloor   "⌊"> <!-- left floor = APL downstile,
+                                     U+230A ISOamsc  -->
+<!ENTITY rfloor   "⌋"> <!-- right floor, U+230B ISOamsc  -->
+<!ENTITY lang     "〈"> <!-- left-pointing angle bracket = bra,
+                                     U+2329 ISOtech -->
+<!-- lang is NOT the same character as U+003C 'less than sign' 
+     or U+2039 'single left-pointing angle quotation mark' -->
+<!ENTITY rang     "〉"> <!-- right-pointing angle bracket = ket,
+                                     U+232A ISOtech -->
+<!-- rang is NOT the same character as U+003E 'greater than sign' 
+     or U+203A 'single right-pointing angle quotation mark' -->
+
+<!-- Geometric Shapes -->
+<!ENTITY loz      "◊"> <!-- lozenge, U+25CA ISOpub -->
+
+<!-- Miscellaneous Symbols -->
+<!ENTITY spades   "♠"> <!-- black spade suit, U+2660 ISOpub -->
+<!-- black here seems to mean filled as opposed to hollow -->
+<!ENTITY clubs    "♣"> <!-- black club suit = shamrock,
+                                     U+2663 ISOpub -->
+<!ENTITY hearts   "♥"> <!-- black heart suit = valentine,
+                                     U+2665 ISOpub -->
+<!ENTITY diams    "♦"> <!-- black diamond suit, U+2666 ISOpub -->
diff --git a/ThirdParty/Twisted/twisted/lore/xhtml1-strict.dtd b/ThirdParty/Twisted/twisted/lore/xhtml1-strict.dtd
new file mode 100644
index 0000000..2927b9e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/xhtml1-strict.dtd
@@ -0,0 +1,978 @@
+<!--
+   Extensible HTML version 1.0 Strict DTD
+
+   This is the same as HTML 4 Strict except for
+   changes due to the differences between XML and SGML.
+
+   Namespace = http://www.w3.org/1999/xhtml
+
+   For further information, see: http://www.w3.org/TR/xhtml1
+
+   Copyright (c) 1998-2002 W3C (MIT, INRIA, Keio),
+   All Rights Reserved. 
+
+   This DTD module is identified by the PUBLIC and SYSTEM identifiers:
+
+   PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
+   SYSTEM "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd"
+
+   $Revision: 1.1 $
+   $Date: 2002/08/01 13:56:03 $
+
+-->
+
+<!--================ Character mnemonic entities =========================-->
+
+<!ENTITY % HTMLlat1 PUBLIC
+   "-//W3C//ENTITIES Latin 1 for XHTML//EN"
+   "xhtml-lat1.ent">
+%HTMLlat1;
+
+<!ENTITY % HTMLsymbol PUBLIC
+   "-//W3C//ENTITIES Symbols for XHTML//EN"
+   "xhtml-symbol.ent">
+%HTMLsymbol;
+
+<!ENTITY % HTMLspecial PUBLIC
+   "-//W3C//ENTITIES Special for XHTML//EN"
+   "xhtml-special.ent">
+%HTMLspecial;
+
+<!--================== Imported Names ====================================-->
+
+<!ENTITY % ContentType "CDATA">
+    <!-- media type, as per [RFC2045] -->
+
+<!ENTITY % ContentTypes "CDATA">
+    <!-- comma-separated list of media types, as per [RFC2045] -->
+
+<!ENTITY % Charset "CDATA">
+    <!-- a character encoding, as per [RFC2045] -->
+
+<!ENTITY % Charsets "CDATA">
+    <!-- a space separated list of character encodings, as per [RFC2045] -->
+
+<!ENTITY % LanguageCode "NMTOKEN">
+    <!-- a language code, as per [RFC3066] -->
+
+<!ENTITY % Character "CDATA">
+    <!-- a single character, as per section 2.2 of [XML] -->
+
+<!ENTITY % Number "CDATA">
+    <!-- one or more digits -->
+
+<!ENTITY % LinkTypes "CDATA">
+    <!-- space-separated list of link types -->
+
+<!ENTITY % MediaDesc "CDATA">
+    <!-- single or comma-separated list of media descriptors -->
+
+<!ENTITY % URI "CDATA">
+    <!-- a Uniform Resource Identifier, see [RFC2396] -->
+
+<!ENTITY % UriList "CDATA">
+    <!-- a space separated list of Uniform Resource Identifiers -->
+
+<!ENTITY % Datetime "CDATA">
+    <!-- date and time information. ISO date format -->
+
+<!ENTITY % Script "CDATA">
+    <!-- script expression -->
+
+<!ENTITY % StyleSheet "CDATA">
+    <!-- style sheet data -->
+
+<!ENTITY % Text "CDATA">
+    <!-- used for titles etc. -->
+
+<!ENTITY % Length "CDATA">
+    <!-- nn for pixels or nn% for percentage length -->
+
+<!ENTITY % MultiLength "CDATA">
+    <!-- pixel, percentage, or relative -->
+
+<!ENTITY % Pixels "CDATA">
+    <!-- integer representing length in pixels -->
+
+<!-- these are used for image maps -->
+
+<!ENTITY % Shape "(rect|circle|poly|default)">
+
+<!ENTITY % Coords "CDATA">
+    <!-- comma separated list of lengths -->
+
+<!--=================== Generic Attributes ===============================-->
+
+<!-- core attributes common to most elements
+  id       document-wide unique id
+  class    space separated list of classes
+  style    associated style info
+  title    advisory title/amplification
+-->
+<!ENTITY % coreattrs
+ "id          ID             #IMPLIED
+  class       CDATA          #IMPLIED
+  style       %StyleSheet;   #IMPLIED
+  title       %Text;         #IMPLIED"
+  >
+
+<!-- internationalization attributes
+  lang        language code (backwards compatible)
+  xml:lang    language code (as per XML 1.0 spec)
+  dir         direction for weak/neutral text
+-->
+<!ENTITY % i18n
+ "lang        %LanguageCode; #IMPLIED
+  xml:lang    %LanguageCode; #IMPLIED
+  dir         (ltr|rtl)      #IMPLIED"
+  >
+
+<!-- attributes for common UI events
+  onclick     a pointer button was clicked
+  ondblclick  a pointer button was double clicked
+  onmousedown a pointer button was pressed down
+  onmouseup   a pointer button was released
+  onmousemove a pointer was moved onto the element
+  onmouseout  a pointer was moved away from the element
+  onkeypress  a key was pressed and released
+  onkeydown   a key was pressed down
+  onkeyup     a key was released
+-->
+<!ENTITY % events
+ "onclick     %Script;       #IMPLIED
+  ondblclick  %Script;       #IMPLIED
+  onmousedown %Script;       #IMPLIED
+  onmouseup   %Script;       #IMPLIED
+  onmouseover %Script;       #IMPLIED
+  onmousemove %Script;       #IMPLIED
+  onmouseout  %Script;       #IMPLIED
+  onkeypress  %Script;       #IMPLIED
+  onkeydown   %Script;       #IMPLIED
+  onkeyup     %Script;       #IMPLIED"
+  >
+
+<!-- attributes for elements that can get the focus
+  accesskey   accessibility key character
+  tabindex    position in tabbing order
+  onfocus     the element got the focus
+  onblur      the element lost the focus
+-->
+<!ENTITY % focus
+ "accesskey   %Character;    #IMPLIED
+  tabindex    %Number;       #IMPLIED
+  onfocus     %Script;       #IMPLIED
+  onblur      %Script;       #IMPLIED"
+  >
+
+<!ENTITY % attrs "%coreattrs; %i18n; %events;">
+
+<!--=================== Text Elements ====================================-->
+
+<!ENTITY % special.pre
+   "br | span | bdo | map">
+
+
+<!ENTITY % special
+   "%special.pre; | object | img ">
+
+<!ENTITY % fontstyle "tt | i | b | big | small ">
+
+<!ENTITY % phrase "em | strong | dfn | code | q |
+                   samp | kbd | var | cite | abbr | acronym | sub | sup ">
+
+<!ENTITY % inline.forms "input | select | textarea | label | button">
+
+<!-- these can occur at block or inline level -->
+<!ENTITY % misc.inline "ins | del | script">
+
+<!-- these can only occur at block level -->
+<!ENTITY % misc "noscript | %misc.inline;">
+
+<!ENTITY % inline "a | %special; | %fontstyle; | %phrase; | %inline.forms;">
+
+<!-- %Inline; covers inline or "text-level" elements -->
+<!ENTITY % Inline "(#PCDATA | %inline; | %misc.inline;)*">
+
+<!--================== Block level elements ==============================-->
+
+<!ENTITY % heading "h1|h2|h3|h4|h5|h6">
+<!ENTITY % lists "ul | ol | dl">
+<!ENTITY % blocktext "pre | hr | blockquote | address">
+
+<!ENTITY % block
+     "p | %heading; | div | %lists; | %blocktext; | fieldset | table">
+
+<!ENTITY % Block "(%block; | form | %misc;)*">
+
+<!-- %Flow; mixes block and inline and is used for list items etc. -->
+<!ENTITY % Flow "(#PCDATA | %block; | form | %inline; | %misc;)*">
+
+<!--================== Content models for exclusions =====================-->
+
+<!-- a elements use %Inline; excluding a -->
+
+<!ENTITY % a.content
+   "(#PCDATA | %special; | %fontstyle; | %phrase; | %inline.forms; | %misc.inline;)*">
+
+<!-- pre uses %Inline excluding big, small, sup or sup -->
+
+<!ENTITY % pre.content
+   "(#PCDATA | a | %fontstyle; | %phrase; | %special.pre; | %misc.inline;
+      | %inline.forms;)*">
+
+<!-- form uses %Block; excluding form -->
+
+<!ENTITY % form.content "(%block; | %misc;)*">
+
+<!-- button uses %Flow; but excludes a, form and form controls -->
+
+<!ENTITY % button.content
+   "(#PCDATA | p | %heading; | div | %lists; | %blocktext; |
+    table | %special; | %fontstyle; | %phrase; | %misc;)*">
+
+<!--================ Document Structure ==================================-->
+
+<!-- the namespace URI designates the document profile -->
+
+<!ELEMENT html (head, body)>
+<!ATTLIST html
+  %i18n;
+  id          ID             #IMPLIED
+  xmlns       %URI;          #FIXED 'http://www.w3.org/1999/xhtml'
+  >
+
+<!--================ Document Head =======================================-->
+
+<!ENTITY % head.misc "(script|style|meta|link|object)*">
+
+<!-- content model is %head.misc; combined with a single
+     title and an optional base element in any order -->
+
+<!ELEMENT head (%head.misc;,
+     ((title, %head.misc;, (base, %head.misc;)?) |
+      (base, %head.misc;, (title, %head.misc;))))>
+
+<!ATTLIST head
+  %i18n;
+  id          ID             #IMPLIED
+  profile     %URI;          #IMPLIED
+  >
+
+<!-- The title element is not considered part of the flow of text.
+       It should be displayed, for example as the page header or
+       window title. Exactly one title is required per document.
+    -->
+<!ELEMENT title (#PCDATA)>
+<!ATTLIST title 
+  %i18n;
+  id          ID             #IMPLIED
+  >
+
+<!-- document base URI -->
+
+<!ELEMENT base EMPTY>
+<!ATTLIST base
+  href        %URI;          #REQUIRED
+  id          ID             #IMPLIED
+  >
+
+<!-- generic metainformation -->
+<!ELEMENT meta EMPTY>
+<!ATTLIST meta
+  %i18n;
+  id          ID             #IMPLIED
+  http-equiv  CDATA          #IMPLIED
+  name        CDATA          #IMPLIED
+  content     CDATA          #REQUIRED
+  scheme      CDATA          #IMPLIED
+  >
+
+<!--
+  Relationship values can be used in principle:
+
+   a) for document specific toolbars/menus when used
+      with the link element in document head e.g.
+        start, contents, previous, next, index, end, help
+   b) to link to a separate style sheet (rel="stylesheet")
+   c) to make a link to a script (rel="script")
+   d) by stylesheets to control how collections of
+      html nodes are rendered into printed documents
+   e) to make a link to a printable version of this document
+      e.g. a PostScript or PDF version (rel="alternate" media="print")
+-->
+
+<!ELEMENT link EMPTY>
+<!ATTLIST link
+  %attrs;
+  charset     %Charset;      #IMPLIED
+  href        %URI;          #IMPLIED
+  hreflang    %LanguageCode; #IMPLIED
+  type        %ContentType;  #IMPLIED
+  rel         %LinkTypes;    #IMPLIED
+  rev         %LinkTypes;    #IMPLIED
+  media       %MediaDesc;    #IMPLIED
+  >
+
+<!-- style info, which may include CDATA sections -->
+<!ELEMENT style (#PCDATA)>
+<!ATTLIST style
+  %i18n;
+  id          ID             #IMPLIED
+  type        %ContentType;  #REQUIRED
+  media       %MediaDesc;    #IMPLIED
+  title       %Text;         #IMPLIED
+  xml:space   (preserve)     #FIXED 'preserve'
+  >
+
+<!-- script statements, which may include CDATA sections -->
+<!ELEMENT script (#PCDATA)>
+<!ATTLIST script
+  id          ID             #IMPLIED
+  charset     %Charset;      #IMPLIED
+  type        %ContentType;  #REQUIRED
+  src         %URI;          #IMPLIED
+  defer       (defer)        #IMPLIED
+  xml:space   (preserve)     #FIXED 'preserve'
+  >
+
+<!-- alternate content container for non script-based rendering -->
+
+<!ELEMENT noscript %Block;>
+<!ATTLIST noscript
+  %attrs;
+  >
+
+<!--=================== Document Body ====================================-->
+
+<!ELEMENT body %Block;>
+<!ATTLIST body
+  %attrs;
+  onload          %Script;   #IMPLIED
+  onunload        %Script;   #IMPLIED
+  >
+
+<!ELEMENT div %Flow;>  <!-- generic language/style container -->
+<!ATTLIST div
+  %attrs;
+  >
+
+<!--=================== Paragraphs =======================================-->
+
+<!ELEMENT p %Inline;>
+<!ATTLIST p
+  %attrs;
+  >
+
+<!--=================== Headings =========================================-->
+
+<!--
+  There are six levels of headings from h1 (the most important)
+  to h6 (the least important).
+-->
+
+<!ELEMENT h1  %Inline;>
+<!ATTLIST h1
+   %attrs;
+   >
+
+<!ELEMENT h2 %Inline;>
+<!ATTLIST h2
+   %attrs;
+   >
+
+<!ELEMENT h3 %Inline;>
+<!ATTLIST h3
+   %attrs;
+   >
+
+<!ELEMENT h4 %Inline;>
+<!ATTLIST h4
+   %attrs;
+   >
+
+<!ELEMENT h5 %Inline;>
+<!ATTLIST h5
+   %attrs;
+   >
+
+<!ELEMENT h6 %Inline;>
+<!ATTLIST h6
+   %attrs;
+   >
+
+<!--=================== Lists ============================================-->
+
+<!-- Unordered list -->
+
+<!ELEMENT ul (li)+>
+<!ATTLIST ul
+  %attrs;
+  >
+
+<!-- Ordered (numbered) list -->
+
+<!ELEMENT ol (li)+>
+<!ATTLIST ol
+  %attrs;
+  >
+
+<!-- list item -->
+
+<!ELEMENT li %Flow;>
+<!ATTLIST li
+  %attrs;
+  >
+
+<!-- definition lists - dt for term, dd for its definition -->
+
+<!ELEMENT dl (dt|dd)+>
+<!ATTLIST dl
+  %attrs;
+  >
+
+<!ELEMENT dt %Inline;>
+<!ATTLIST dt
+  %attrs;
+  >
+
+<!ELEMENT dd %Flow;>
+<!ATTLIST dd
+  %attrs;
+  >
+
+<!--=================== Address ==========================================-->
+
+<!-- information on author -->
+
+<!ELEMENT address %Inline;>
+<!ATTLIST address
+  %attrs;
+  >
+
+<!--=================== Horizontal Rule ==================================-->
+
+<!ELEMENT hr EMPTY>
+<!ATTLIST hr
+  %attrs;
+  >
+
+<!--=================== Preformatted Text ================================-->
+
+<!-- content is %Inline; excluding "img|object|big|small|sub|sup" -->
+
+<!ELEMENT pre %pre.content;>
+<!ATTLIST pre
+  %attrs;
+  xml:space (preserve) #FIXED 'preserve'
+  >
+
+<!--=================== Block-like Quotes ================================-->
+
+<!ELEMENT blockquote %Block;>
+<!ATTLIST blockquote
+  %attrs;
+  cite        %URI;          #IMPLIED
+  >
+
+<!--=================== Inserted/Deleted Text ============================-->
+
+<!--
+  ins/del are allowed in block and inline content, but its
+  inappropriate to include block content within an ins element
+  occurring in inline content.
+-->
+<!ELEMENT ins %Flow;>
+<!ATTLIST ins
+  %attrs;
+  cite        %URI;          #IMPLIED
+  datetime    %Datetime;     #IMPLIED
+  >
+
+<!ELEMENT del %Flow;>
+<!ATTLIST del
+  %attrs;
+  cite        %URI;          #IMPLIED
+  datetime    %Datetime;     #IMPLIED
+  >
+
+<!--================== The Anchor Element ================================-->
+
+<!-- content is %Inline; except that anchors shouldn't be nested -->
+
+<!ELEMENT a %a.content;>
+<!ATTLIST a
+  %attrs;
+  %focus;
+  charset     %Charset;      #IMPLIED
+  type        %ContentType;  #IMPLIED
+  name        NMTOKEN        #IMPLIED
+  href        %URI;          #IMPLIED
+  hreflang    %LanguageCode; #IMPLIED
+  rel         %LinkTypes;    #IMPLIED
+  rev         %LinkTypes;    #IMPLIED
+  shape       %Shape;        "rect"
+  coords      %Coords;       #IMPLIED
+  >
+
+<!--===================== Inline Elements ================================-->
+
+<!ELEMENT span %Inline;> <!-- generic language/style container -->
+<!ATTLIST span
+  %attrs;
+  >
+
+<!ELEMENT bdo %Inline;>  <!-- I18N BiDi over-ride -->
+<!ATTLIST bdo
+  %coreattrs;
+  %events;
+  lang        %LanguageCode; #IMPLIED
+  xml:lang    %LanguageCode; #IMPLIED
+  dir         (ltr|rtl)      #REQUIRED
+  >
+
+<!ELEMENT br EMPTY>   <!-- forced line break -->
+<!ATTLIST br
+  %coreattrs;
+  >
+
+<!ELEMENT em %Inline;>   <!-- emphasis -->
+<!ATTLIST em %attrs;>
+
+<!ELEMENT strong %Inline;>   <!-- strong emphasis -->
+<!ATTLIST strong %attrs;>
+
+<!ELEMENT dfn %Inline;>   <!-- definitional -->
+<!ATTLIST dfn %attrs;>
+
+<!ELEMENT code %Inline;>   <!-- program code -->
+<!ATTLIST code %attrs;>
+
+<!ELEMENT samp %Inline;>   <!-- sample -->
+<!ATTLIST samp %attrs;>
+
+<!ELEMENT kbd %Inline;>  <!-- something user would type -->
+<!ATTLIST kbd %attrs;>
+
+<!ELEMENT var %Inline;>   <!-- variable -->
+<!ATTLIST var %attrs;>
+
+<!ELEMENT cite %Inline;>   <!-- citation -->
+<!ATTLIST cite %attrs;>
+
+<!ELEMENT abbr %Inline;>   <!-- abbreviation -->
+<!ATTLIST abbr %attrs;>
+
+<!ELEMENT acronym %Inline;>   <!-- acronym -->
+<!ATTLIST acronym %attrs;>
+
+<!ELEMENT q %Inline;>   <!-- inlined quote -->
+<!ATTLIST q
+  %attrs;
+  cite        %URI;          #IMPLIED
+  >
+
+<!ELEMENT sub %Inline;> <!-- subscript -->
+<!ATTLIST sub %attrs;>
+
+<!ELEMENT sup %Inline;> <!-- superscript -->
+<!ATTLIST sup %attrs;>
+
+<!ELEMENT tt %Inline;>   <!-- fixed pitch font -->
+<!ATTLIST tt %attrs;>
+
+<!ELEMENT i %Inline;>   <!-- italic font -->
+<!ATTLIST i %attrs;>
+
+<!ELEMENT b %Inline;>   <!-- bold font -->
+<!ATTLIST b %attrs;>
+
+<!ELEMENT big %Inline;>   <!-- bigger font -->
+<!ATTLIST big %attrs;>
+
+<!ELEMENT small %Inline;>   <!-- smaller font -->
+<!ATTLIST small %attrs;>
+
+<!--==================== Object ======================================-->
+<!--
+  object is used to embed objects as part of HTML pages.
+  param elements should precede other content. Parameters
+  can also be expressed as attribute/value pairs on the
+  object element itself when brevity is desired.
+-->
+
+<!ELEMENT object (#PCDATA | param | %block; | form | %inline; | %misc;)*>
+<!ATTLIST object
+  %attrs;
+  declare     (declare)      #IMPLIED
+  classid     %URI;          #IMPLIED
+  codebase    %URI;          #IMPLIED
+  data        %URI;          #IMPLIED
+  type        %ContentType;  #IMPLIED
+  codetype    %ContentType;  #IMPLIED
+  archive     %UriList;      #IMPLIED
+  standby     %Text;         #IMPLIED
+  height      %Length;       #IMPLIED
+  width       %Length;       #IMPLIED
+  usemap      %URI;          #IMPLIED
+  name        NMTOKEN        #IMPLIED
+  tabindex    %Number;       #IMPLIED
+  >
+
+<!--
+  param is used to supply a named property value.
+  In XML it would seem natural to follow RDF and support an
+  abbreviated syntax where the param elements are replaced
+  by attribute value pairs on the object start tag.
+-->
+<!ELEMENT param EMPTY>
+<!ATTLIST param
+  id          ID             #IMPLIED
+  name        CDATA          #IMPLIED
+  value       CDATA          #IMPLIED
+  valuetype   (data|ref|object) "data"
+  type        %ContentType;  #IMPLIED
+  >
+
+<!--=================== Images ===========================================-->
+
+<!--
+   To avoid accessibility problems for people who aren't
+   able to see the image, you should provide a text
+   description using the alt and longdesc attributes.
+   In addition, avoid the use of server-side image maps.
+   Note that in this DTD there is no name attribute. That
+   is only available in the transitional and frameset DTD.
+-->
+
+<!ELEMENT img EMPTY>
+<!ATTLIST img
+  %attrs;
+  src         %URI;          #REQUIRED
+  alt         %Text;         #REQUIRED
+  longdesc    %URI;          #IMPLIED
+  height      %Length;       #IMPLIED
+  width       %Length;       #IMPLIED
+  usemap      %URI;          #IMPLIED
+  ismap       (ismap)        #IMPLIED
+  >
+
+<!-- usemap points to a map element which may be in this document
+  or an external document, although the latter is not widely supported -->
+
+<!--================== Client-side image maps ============================-->
+
+<!-- These can be placed in the same document or grouped in a
+     separate document although this isn't yet widely supported -->
+
+<!ELEMENT map ((%block; | form | %misc;)+ | area+)>
+<!ATTLIST map
+  %i18n;
+  %events;
+  id          ID             #REQUIRED
+  class       CDATA          #IMPLIED
+  style       %StyleSheet;   #IMPLIED
+  title       %Text;         #IMPLIED
+  name        NMTOKEN        #IMPLIED
+  >
+
+<!ELEMENT area EMPTY>
+<!ATTLIST area
+  %attrs;
+  %focus;
+  shape       %Shape;        "rect"
+  coords      %Coords;       #IMPLIED
+  href        %URI;          #IMPLIED
+  nohref      (nohref)       #IMPLIED
+  alt         %Text;         #REQUIRED
+  >
+
+<!--================ Forms ===============================================-->
+<!ELEMENT form %form.content;>   <!-- forms shouldn't be nested -->
+
+<!ATTLIST form
+  %attrs;
+  action      %URI;          #REQUIRED
+  method      (get|post)     "get"
+  enctype     %ContentType;  "application/x-www-form-urlencoded"
+  onsubmit    %Script;       #IMPLIED
+  onreset     %Script;       #IMPLIED
+  accept      %ContentTypes; #IMPLIED
+  accept-charset %Charsets;  #IMPLIED
+  >
+
+<!--
+  Each label must not contain more than ONE field
+  Label elements shouldn't be nested.
+-->
+<!ELEMENT label %Inline;>
+<!ATTLIST label
+  %attrs;
+  for         IDREF          #IMPLIED
+  accesskey   %Character;    #IMPLIED
+  onfocus     %Script;       #IMPLIED
+  onblur      %Script;       #IMPLIED
+  >
+
+<!ENTITY % InputType
+  "(text | password | checkbox |
+    radio | submit | reset |
+    file | hidden | image | button)"
+   >
+
+<!-- the name attribute is required for all but submit & reset -->
+
+<!ELEMENT input EMPTY>     <!-- form control -->
+<!ATTLIST input
+  %attrs;
+  %focus;
+  type        %InputType;    "text"
+  name        CDATA          #IMPLIED
+  value       CDATA          #IMPLIED
+  checked     (checked)      #IMPLIED
+  disabled    (disabled)     #IMPLIED
+  readonly    (readonly)     #IMPLIED
+  size        CDATA          #IMPLIED
+  maxlength   %Number;       #IMPLIED
+  src         %URI;          #IMPLIED
+  alt         CDATA          #IMPLIED
+  usemap      %URI;          #IMPLIED
+  onselect    %Script;       #IMPLIED
+  onchange    %Script;       #IMPLIED
+  accept      %ContentTypes; #IMPLIED
+  >
+
+<!ELEMENT select (optgroup|option)+>  <!-- option selector -->
+<!ATTLIST select
+  %attrs;
+  name        CDATA          #IMPLIED
+  size        %Number;       #IMPLIED
+  multiple    (multiple)     #IMPLIED
+  disabled    (disabled)     #IMPLIED
+  tabindex    %Number;       #IMPLIED
+  onfocus     %Script;       #IMPLIED
+  onblur      %Script;       #IMPLIED
+  onchange    %Script;       #IMPLIED
+  >
+
+<!ELEMENT optgroup (option)+>   <!-- option group -->
+<!ATTLIST optgroup
+  %attrs;
+  disabled    (disabled)     #IMPLIED
+  label       %Text;         #REQUIRED
+  >
+
+<!ELEMENT option (#PCDATA)>     <!-- selectable choice -->
+<!ATTLIST option
+  %attrs;
+  selected    (selected)     #IMPLIED
+  disabled    (disabled)     #IMPLIED
+  label       %Text;         #IMPLIED
+  value       CDATA          #IMPLIED
+  >
+
+<!ELEMENT textarea (#PCDATA)>     <!-- multi-line text field -->
+<!ATTLIST textarea
+  %attrs;
+  %focus;
+  name        CDATA          #IMPLIED
+  rows        %Number;       #REQUIRED
+  cols        %Number;       #REQUIRED
+  disabled    (disabled)     #IMPLIED
+  readonly    (readonly)     #IMPLIED
+  onselect    %Script;       #IMPLIED
+  onchange    %Script;       #IMPLIED
+  >
+
+<!--
+  The fieldset element is used to group form fields.
+  Only one legend element should occur in the content
+  and if present should only be preceded by whitespace.
+-->
+<!ELEMENT fieldset (#PCDATA | legend | %block; | form | %inline; | %misc;)*>
+<!ATTLIST fieldset
+  %attrs;
+  >
+
+<!ELEMENT legend %Inline;>     <!-- fieldset label -->
+<!ATTLIST legend
+  %attrs;
+  accesskey   %Character;    #IMPLIED
+  >
+
+<!--
+ Content is %Flow; excluding a, form and form controls
+--> 
+<!ELEMENT button %button.content;>  <!-- push button -->
+<!ATTLIST button
+  %attrs;
+  %focus;
+  name        CDATA          #IMPLIED
+  value       CDATA          #IMPLIED
+  type        (button|submit|reset) "submit"
+  disabled    (disabled)     #IMPLIED
+  >
+
+<!--======================= Tables =======================================-->
+
+<!-- Derived from IETF HTML table standard, see [RFC1942] -->
+
+<!--
+ The border attribute sets the thickness of the frame around the
+ table. The default units are screen pixels.
+
+ The frame attribute specifies which parts of the frame around
+ the table should be rendered. The values are not the same as
+ CALS to avoid a name clash with the valign attribute.
+-->
+<!ENTITY % TFrame "(void|above|below|hsides|lhs|rhs|vsides|box|border)">
+
+<!--
+ The rules attribute defines which rules to draw between cells:
+
+ If rules is absent then assume:
+     "none" if border is absent or border="0" otherwise "all"
+-->
+
+<!ENTITY % TRules "(none | groups | rows | cols | all)">
+  
+<!-- horizontal alignment attributes for cell contents
+
+  char        alignment char, e.g. char=':'
+  charoff     offset for alignment char
+-->
+<!ENTITY % cellhalign
+  "align      (left|center|right|justify|char) #IMPLIED
+   char       %Character;    #IMPLIED
+   charoff    %Length;       #IMPLIED"
+  >
+
+<!-- vertical alignment attributes for cell contents -->
+<!ENTITY % cellvalign
+  "valign     (top|middle|bottom|baseline) #IMPLIED"
+  >
+
+<!ELEMENT table
+     (caption?, (col*|colgroup*), thead?, tfoot?, (tbody+|tr+))>
+<!ELEMENT caption  %Inline;>
+<!ELEMENT thead    (tr)+>
+<!ELEMENT tfoot    (tr)+>
+<!ELEMENT tbody    (tr)+>
+<!ELEMENT colgroup (col)*>
+<!ELEMENT col      EMPTY>
+<!ELEMENT tr       (th|td)+>
+<!ELEMENT th       %Flow;>
+<!ELEMENT td       %Flow;>
+
+<!ATTLIST table
+  %attrs;
+  summary     %Text;         #IMPLIED
+  width       %Length;       #IMPLIED
+  border      %Pixels;       #IMPLIED
+  frame       %TFrame;       #IMPLIED
+  rules       %TRules;       #IMPLIED
+  cellspacing %Length;       #IMPLIED
+  cellpadding %Length;       #IMPLIED
+  >
+
+<!ATTLIST caption
+  %attrs;
+  >
+
+<!--
+colgroup groups a set of col elements. It allows you to group
+several semantically related columns together.
+-->
+<!ATTLIST colgroup
+  %attrs;
+  span        %Number;       "1"
+  width       %MultiLength;  #IMPLIED
+  %cellhalign;
+  %cellvalign;
+  >
+
+<!--
+ col elements define the alignment properties for cells in
+ one or more columns.
+
+ The width attribute specifies the width of the columns, e.g.
+
+     width=64        width in screen pixels
+     width=0.5*      relative width of 0.5
+
+ The span attribute causes the attributes of one
+ col element to apply to more than one column.
+-->
+<!ATTLIST col
+  %attrs;
+  span        %Number;       "1"
+  width       %MultiLength;  #IMPLIED
+  %cellhalign;
+  %cellvalign;
+  >
+
+<!--
+    Use thead to duplicate headers when breaking table
+    across page boundaries, or for static headers when
+    tbody sections are rendered in scrolling panel.
+
+    Use tfoot to duplicate footers when breaking table
+    across page boundaries, or for static footers when
+    tbody sections are rendered in scrolling panel.
+
+    Use multiple tbody sections when rules are needed
+    between groups of table rows.
+-->
+<!ATTLIST thead
+  %attrs;
+  %cellhalign;
+  %cellvalign;
+  >
+
+<!ATTLIST tfoot
+  %attrs;
+  %cellhalign;
+  %cellvalign;
+  >
+
+<!ATTLIST tbody
+  %attrs;
+  %cellhalign;
+  %cellvalign;
+  >
+
+<!ATTLIST tr
+  %attrs;
+  %cellhalign;
+  %cellvalign;
+  >
+
+
+<!-- Scope is simpler than headers attribute for common tables -->
+<!ENTITY % Scope "(row|col|rowgroup|colgroup)">
+
+<!-- th is for headers, td for data and for cells acting as both -->
+
+<!ATTLIST th
+  %attrs;
+  abbr        %Text;         #IMPLIED
+  axis        CDATA          #IMPLIED
+  headers     IDREFS         #IMPLIED
+  scope       %Scope;        #IMPLIED
+  rowspan     %Number;       "1"
+  colspan     %Number;       "1"
+  %cellhalign;
+  %cellvalign;
+  >
+
+<!ATTLIST td
+  %attrs;
+  abbr        %Text;         #IMPLIED
+  axis        CDATA          #IMPLIED
+  headers     IDREFS         #IMPLIED
+  scope       %Scope;        #IMPLIED
+  rowspan     %Number;       "1"
+  colspan     %Number;       "1"
+  %cellhalign;
+  %cellvalign;
+  >
+
diff --git a/ThirdParty/Twisted/twisted/lore/xhtml1-transitional.dtd b/ThirdParty/Twisted/twisted/lore/xhtml1-transitional.dtd
new file mode 100644
index 0000000..628f27a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/lore/xhtml1-transitional.dtd
@@ -0,0 +1,1201 @@
+<!--
+   Extensible HTML version 1.0 Transitional DTD
+
+   This is the same as HTML 4 Transitional except for
+   changes due to the differences between XML and SGML.
+
+   Namespace = http://www.w3.org/1999/xhtml
+
+   For further information, see: http://www.w3.org/TR/xhtml1
+
+   Copyright (c) 1998-2002 W3C (MIT, INRIA, Keio),
+   All Rights Reserved. 
+
+   This DTD module is identified by the PUBLIC and SYSTEM identifiers:
+
+   PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
+   SYSTEM "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"
+
+   $Revision: 1.2 $
+   $Date: 2002/08/01 18:37:55 $
+
+-->
+
+<!--================ Character mnemonic entities =========================-->
+
+<!ENTITY % HTMLlat1 PUBLIC
+   "-//W3C//ENTITIES Latin 1 for XHTML//EN"
+   "xhtml-lat1.ent">
+%HTMLlat1;
+
+<!ENTITY % HTMLsymbol PUBLIC
+   "-//W3C//ENTITIES Symbols for XHTML//EN"
+   "xhtml-symbol.ent">
+%HTMLsymbol;
+
+<!ENTITY % HTMLspecial PUBLIC
+   "-//W3C//ENTITIES Special for XHTML//EN"
+   "xhtml-special.ent">
+%HTMLspecial;
+
+<!--================== Imported Names ====================================-->
+
+<!ENTITY % ContentType "CDATA">
+    <!-- media type, as per [RFC2045] -->
+
+<!ENTITY % ContentTypes "CDATA">
+    <!-- comma-separated list of media types, as per [RFC2045] -->
+
+<!ENTITY % Charset "CDATA">
+    <!-- a character encoding, as per [RFC2045] -->
+
+<!ENTITY % Charsets "CDATA">
+    <!-- a space separated list of character encodings, as per [RFC2045] -->
+
+<!ENTITY % LanguageCode "NMTOKEN">
+    <!-- a language code, as per [RFC3066] -->
+
+<!ENTITY % Character "CDATA">
+    <!-- a single character, as per section 2.2 of [XML] -->
+
+<!ENTITY % Number "CDATA">
+    <!-- one or more digits -->
+
+<!ENTITY % LinkTypes "CDATA">
+    <!-- space-separated list of link types -->
+
+<!ENTITY % MediaDesc "CDATA">
+    <!-- single or comma-separated list of media descriptors -->
+
+<!ENTITY % URI "CDATA">
+    <!-- a Uniform Resource Identifier, see [RFC2396] -->
+
+<!ENTITY % UriList "CDATA">
+    <!-- a space separated list of Uniform Resource Identifiers -->
+
+<!ENTITY % Datetime "CDATA">
+    <!-- date and time information. ISO date format -->
+
+<!ENTITY % Script "CDATA">
+    <!-- script expression -->
+
+<!ENTITY % StyleSheet "CDATA">
+    <!-- style sheet data -->
+
+<!ENTITY % Text "CDATA">
+    <!-- used for titles etc. -->
+
+<!ENTITY % FrameTarget "NMTOKEN">
+    <!-- render in this frame -->
+
+<!ENTITY % Length "CDATA">
+    <!-- nn for pixels or nn% for percentage length -->
+
+<!ENTITY % MultiLength "CDATA">
+    <!-- pixel, percentage, or relative -->
+
+<!ENTITY % Pixels "CDATA">
+    <!-- integer representing length in pixels -->
+
+<!-- these are used for image maps -->
+
+<!ENTITY % Shape "(rect|circle|poly|default)">
+
+<!ENTITY % Coords "CDATA">
+    <!-- comma separated list of lengths -->
+
+<!-- used for object, applet, img, input and iframe -->
+<!ENTITY % ImgAlign "(top|middle|bottom|left|right)">
+
+<!-- a color using sRGB: #RRGGBB as Hex values -->
+<!ENTITY % Color "CDATA">
+
+<!-- There are also 16 widely known color names with their sRGB values:
+
+    Black  = #000000    Green  = #008000
+    Silver = #C0C0C0    Lime   = #00FF00
+    Gray   = #808080    Olive  = #808000
+    White  = #FFFFFF    Yellow = #FFFF00
+    Maroon = #800000    Navy   = #000080
+    Red    = #FF0000    Blue   = #0000FF
+    Purple = #800080    Teal   = #008080
+    Fuchsia= #FF00FF    Aqua   = #00FFFF
+-->
+
+<!--=================== Generic Attributes ===============================-->
+
+<!-- core attributes common to most elements
+  id       document-wide unique id
+  class    space separated list of classes
+  style    associated style info
+  title    advisory title/amplification
+-->
+<!ENTITY % coreattrs
+ "id          ID             #IMPLIED
+  class       CDATA          #IMPLIED
+  style       %StyleSheet;   #IMPLIED
+  title       %Text;         #IMPLIED"
+  >
+
+<!-- internationalization attributes
+  lang        language code (backwards compatible)
+  xml:lang    language code (as per XML 1.0 spec)
+  dir         direction for weak/neutral text
+-->
+<!ENTITY % i18n
+ "lang        %LanguageCode; #IMPLIED
+  xml:lang    %LanguageCode; #IMPLIED
+  dir         (ltr|rtl)      #IMPLIED"
+  >
+
+<!-- attributes for common UI events
+  onclick     a pointer button was clicked
+  ondblclick  a pointer button was double clicked
+  onmousedown a pointer button was pressed down
+  onmouseup   a pointer button was released
+  onmousemove a pointer was moved onto the element
+  onmouseout  a pointer was moved away from the element
+  onkeypress  a key was pressed and released
+  onkeydown   a key was pressed down
+  onkeyup     a key was released
+-->
+<!ENTITY % events
+ "onclick     %Script;       #IMPLIED
+  ondblclick  %Script;       #IMPLIED
+  onmousedown %Script;       #IMPLIED
+  onmouseup   %Script;       #IMPLIED
+  onmouseover %Script;       #IMPLIED
+  onmousemove %Script;       #IMPLIED
+  onmouseout  %Script;       #IMPLIED
+  onkeypress  %Script;       #IMPLIED
+  onkeydown   %Script;       #IMPLIED
+  onkeyup     %Script;       #IMPLIED"
+  >
+
+<!-- attributes for elements that can get the focus
+  accesskey   accessibility key character
+  tabindex    position in tabbing order
+  onfocus     the element got the focus
+  onblur      the element lost the focus
+-->
+<!ENTITY % focus
+ "accesskey   %Character;    #IMPLIED
+  tabindex    %Number;       #IMPLIED
+  onfocus     %Script;       #IMPLIED
+  onblur      %Script;       #IMPLIED"
+  >
+
+<!ENTITY % attrs "%coreattrs; %i18n; %events;">
+
+<!-- text alignment for p, div, h1-h6. The default is
+     align="left" for ltr headings, "right" for rtl -->
+
+<!ENTITY % TextAlign "align (left|center|right|justify) #IMPLIED">
+
+<!--=================== Text Elements ====================================-->
+
+<!ENTITY % special.extra
+   "object | applet | img | map | iframe">
+	
+<!ENTITY % special.basic
+	"br | span | bdo">
+
+<!ENTITY % special
+   "%special.basic; | %special.extra;">
+
+<!ENTITY % fontstyle.extra "big | small | font | basefont">
+
+<!ENTITY % fontstyle.basic "tt | i | b | u
+                      | s | strike ">
+
+<!ENTITY % fontstyle "%fontstyle.basic; | %fontstyle.extra;">
+
+<!ENTITY % phrase.extra "sub | sup">
+<!ENTITY % phrase.basic "em | strong | dfn | code | q |
+                   samp | kbd | var | cite | abbr | acronym">
+
+<!ENTITY % phrase "%phrase.basic; | %phrase.extra;">
+
+<!ENTITY % inline.forms "input | select | textarea | label | button">
+
+<!-- these can occur at block or inline level -->
+<!ENTITY % misc.inline "ins | del | script">
+
+<!-- these can only occur at block level -->
+<!ENTITY % misc "noscript | %misc.inline;">
+
+<!ENTITY % inline "a | %special; | %fontstyle; | %phrase; | %inline.forms;">
+
+<!-- %Inline; covers inline or "text-level" elements -->
+<!ENTITY % Inline "(#PCDATA | %inline; | %misc.inline;)*">
+
+<!--================== Block level elements ==============================-->
+
+<!ENTITY % heading "h1|h2|h3|h4|h5|h6">
+<!ENTITY % lists "ul | ol | dl | menu | dir">
+<!ENTITY % blocktext "pre | hr | blockquote | address | center | noframes">
+
+<!ENTITY % block
+    "p | %heading; | div | %lists; | %blocktext; | isindex |fieldset | table">
+
+<!-- %Flow; mixes block and inline and is used for list items etc. -->
+<!ENTITY % Flow "(#PCDATA | %block; | form | %inline; | %misc;)*">
+
+<!--================== Content models for exclusions =====================-->
+
+<!-- a elements use %Inline; excluding a -->
+
+<!ENTITY % a.content
+   "(#PCDATA | %special; | %fontstyle; | %phrase; | %inline.forms; | %misc.inline;)*">
+
+<!-- pre uses %Inline excluding img, object, applet, big, small,
+     font, or basefont -->
+
+<!ENTITY % pre.content
+   "(#PCDATA | a | %special.basic; | %fontstyle.basic; | %phrase.basic; |
+	   %inline.forms; | %misc.inline;)*">
+
+<!-- form uses %Flow; excluding form -->
+
+<!ENTITY % form.content "(#PCDATA | %block; | %inline; | %misc;)*">
+
+<!-- button uses %Flow; but excludes a, form, form controls, iframe -->
+
+<!ENTITY % button.content
+   "(#PCDATA | p | %heading; | div | %lists; | %blocktext; |
+      table | br | span | bdo | object | applet | img | map |
+      %fontstyle; | %phrase; | %misc;)*">
+
+<!--================ Document Structure ==================================-->
+
+<!-- the namespace URI designates the document profile -->
+
+<!ELEMENT html (head, body)>
+<!ATTLIST html
+  %i18n;
+  id          ID             #IMPLIED
+  xmlns       %URI;          #FIXED 'http://www.w3.org/1999/xhtml'
+  >
+
+<!--================ Document Head =======================================-->
+
+<!ENTITY % head.misc "(script|style|meta|link|object|isindex)*">
+
+<!-- content model is %head.misc; combined with a single
+     title and an optional base element in any order -->
+
+<!ELEMENT head (%head.misc;,
+     ((title, %head.misc;, (base, %head.misc;)?) |
+      (base, %head.misc;, (title, %head.misc;))))>
+
+<!ATTLIST head
+  %i18n;
+  id          ID             #IMPLIED
+  profile     %URI;          #IMPLIED
+  >
+
+<!-- The title element is not considered part of the flow of text.
+       It should be displayed, for example as the page header or
+       window title. Exactly one title is required per document.
+    -->
+<!ELEMENT title (#PCDATA)>
+<!ATTLIST title 
+  %i18n;
+  id          ID             #IMPLIED
+  >
+
+<!-- document base URI -->
+
+<!ELEMENT base EMPTY>
+<!ATTLIST base
+  id          ID             #IMPLIED
+  href        %URI;          #IMPLIED
+  target      %FrameTarget;  #IMPLIED
+  >
+
+<!-- generic metainformation -->
+<!ELEMENT meta EMPTY>
+<!ATTLIST meta
+  %i18n;
+  id          ID             #IMPLIED
+  http-equiv  CDATA          #IMPLIED
+  name        CDATA          #IMPLIED
+  content     CDATA          #REQUIRED
+  scheme      CDATA          #IMPLIED
+  >
+
+<!--
+  Relationship values can be used in principle:
+
+   a) for document specific toolbars/menus when used
+      with the link element in document head e.g.
+        start, contents, previous, next, index, end, help
+   b) to link to a separate style sheet (rel="stylesheet")
+   c) to make a link to a script (rel="script")
+   d) by stylesheets to control how collections of
+      html nodes are rendered into printed documents
+   e) to make a link to a printable version of this document
+      e.g. a PostScript or PDF version (rel="alternate" media="print")
+-->
+
+<!ELEMENT link EMPTY>
+<!ATTLIST link
+  %attrs;
+  charset     %Charset;      #IMPLIED
+  href        %URI;          #IMPLIED
+  hreflang    %LanguageCode; #IMPLIED
+  type        %ContentType;  #IMPLIED
+  rel         %LinkTypes;    #IMPLIED
+  rev         %LinkTypes;    #IMPLIED
+  media       %MediaDesc;    #IMPLIED
+  target      %FrameTarget;  #IMPLIED
+  >
+
+<!-- style info, which may include CDATA sections -->
+<!ELEMENT style (#PCDATA)>
+<!ATTLIST style
+  %i18n;
+  id          ID             #IMPLIED
+  type        %ContentType;  #REQUIRED
+  media       %MediaDesc;    #IMPLIED
+  title       %Text;         #IMPLIED
+  xml:space   (preserve)     #FIXED 'preserve'
+  >
+
+<!-- script statements, which may include CDATA sections -->
+<!ELEMENT script (#PCDATA)>
+<!ATTLIST script
+  id          ID             #IMPLIED
+  charset     %Charset;      #IMPLIED
+  type        %ContentType;  #REQUIRED
+  language    CDATA          #IMPLIED
+  src         %URI;          #IMPLIED
+  defer       (defer)        #IMPLIED
+  xml:space   (preserve)     #FIXED 'preserve'
+  >
+
+<!-- alternate content container for non script-based rendering -->
+
+<!ELEMENT noscript %Flow;>
+<!ATTLIST noscript
+  %attrs;
+  >
+
+<!--======================= Frames =======================================-->
+
+<!-- inline subwindow -->
+
+<!ELEMENT iframe %Flow;>
+<!ATTLIST iframe
+  %coreattrs;
+  longdesc    %URI;          #IMPLIED
+  name        NMTOKEN        #IMPLIED
+  src         %URI;          #IMPLIED
+  frameborder (1|0)          "1"
+  marginwidth %Pixels;       #IMPLIED
+  marginheight %Pixels;      #IMPLIED
+  scrolling   (yes|no|auto)  "auto"
+  align       %ImgAlign;     #IMPLIED
+  height      %Length;       #IMPLIED
+  width       %Length;       #IMPLIED
+  >
+
+<!-- alternate content container for non frame-based rendering -->
+
+<!ELEMENT noframes %Flow;>
+<!ATTLIST noframes
+  %attrs;
+  >
+
+<!--=================== Document Body ====================================-->
+
+<!ELEMENT body %Flow;>
+<!ATTLIST body
+  %attrs;
+  onload      %Script;       #IMPLIED
+  onunload    %Script;       #IMPLIED
+  background  %URI;          #IMPLIED
+  bgcolor     %Color;        #IMPLIED
+  text        %Color;        #IMPLIED
+  link        %Color;        #IMPLIED
+  vlink       %Color;        #IMPLIED
+  alink       %Color;        #IMPLIED
+  >
+
+<!ELEMENT div %Flow;>  <!-- generic language/style container -->
+<!ATTLIST div
+  %attrs;
+  %TextAlign;
+  >
+
+<!--=================== Paragraphs =======================================-->
+
+<!ELEMENT p %Inline;>
+<!ATTLIST p
+  %attrs;
+  %TextAlign;
+  >
+
+<!--=================== Headings =========================================-->
+
+<!--
+  There are six levels of headings from h1 (the most important)
+  to h6 (the least important).
+-->
+
+<!ELEMENT h1  %Inline;>
+<!ATTLIST h1
+  %attrs;
+  %TextAlign;
+  >
+
+<!ELEMENT h2 %Inline;>
+<!ATTLIST h2
+  %attrs;
+  %TextAlign;
+  >
+
+<!ELEMENT h3 %Inline;>
+<!ATTLIST h3
+  %attrs;
+  %TextAlign;
+  >
+
+<!ELEMENT h4 %Inline;>
+<!ATTLIST h4
+  %attrs;
+  %TextAlign;
+  >
+
+<!ELEMENT h5 %Inline;>
+<!ATTLIST h5
+  %attrs;
+  %TextAlign;
+  >
+
+<!ELEMENT h6 %Inline;>
+<!ATTLIST h6
+  %attrs;
+  %TextAlign;
+  >
+
+<!--=================== Lists ============================================-->
+
+<!-- Unordered list bullet styles -->
+
+<!ENTITY % ULStyle "(disc|square|circle)">
+
+<!-- Unordered list -->
+
+<!ELEMENT ul (li)+>
+<!ATTLIST ul
+  %attrs;
+  type        %ULStyle;     #IMPLIED
+  compact     (compact)     #IMPLIED
+  >
+
+<!-- Ordered list numbering style
+
+    1   arabic numbers      1, 2, 3, ...
+    a   lower alpha         a, b, c, ...
+    A   upper alpha         A, B, C, ...
+    i   lower roman         i, ii, iii, ...
+    I   upper roman         I, II, III, ...
+
+    The style is applied to the sequence number which by default
+    is reset to 1 for the first list item in an ordered list.
+-->
+<!ENTITY % OLStyle "CDATA">
+
+<!-- Ordered (numbered) list -->
+
+<!ELEMENT ol (li)+>
+<!ATTLIST ol
+  %attrs;
+  type        %OLStyle;      #IMPLIED
+  compact     (compact)      #IMPLIED
+  start       %Number;       #IMPLIED
+  >
+
+<!-- single column list (DEPRECATED) --> 
+<!ELEMENT menu (li)+>
+<!ATTLIST menu
+  %attrs;
+  compact     (compact)     #IMPLIED
+  >
+
+<!-- multiple column list (DEPRECATED) --> 
+<!ELEMENT dir (li)+>
+<!ATTLIST dir
+  %attrs;
+  compact     (compact)     #IMPLIED
+  >
+
+<!-- LIStyle is constrained to: "(%ULStyle;|%OLStyle;)" -->
+<!ENTITY % LIStyle "CDATA">
+
+<!-- list item -->
+
+<!ELEMENT li %Flow;>
+<!ATTLIST li
+  %attrs;
+  type        %LIStyle;      #IMPLIED
+  value       %Number;       #IMPLIED
+  >
+
+<!-- definition lists - dt for term, dd for its definition -->
+
+<!ELEMENT dl (dt|dd)+>
+<!ATTLIST dl
+  %attrs;
+  compact     (compact)      #IMPLIED
+  >
+
+<!ELEMENT dt %Inline;>
+<!ATTLIST dt
+  %attrs;
+  >
+
+<!ELEMENT dd %Flow;>
+<!ATTLIST dd
+  %attrs;
+  >
+
+<!--=================== Address ==========================================-->
+
+<!-- information on author -->
+
+<!ELEMENT address (#PCDATA | %inline; | %misc.inline; | p)*>
+<!ATTLIST address
+  %attrs;
+  >
+
+<!--=================== Horizontal Rule ==================================-->
+
+<!ELEMENT hr EMPTY>
+<!ATTLIST hr
+  %attrs;
+  align       (left|center|right) #IMPLIED
+  noshade     (noshade)      #IMPLIED
+  size        %Pixels;       #IMPLIED
+  width       %Length;       #IMPLIED
+  >
+
+<!--=================== Preformatted Text ================================-->
+
+<!-- content is %Inline; excluding 
+        "img|object|applet|big|small|sub|sup|font|basefont" -->
+
+<!ELEMENT pre %pre.content;>
+<!ATTLIST pre
+  %attrs;
+  width       %Number;      #IMPLIED
+  xml:space   (preserve)    #FIXED 'preserve'
+  >
+
+<!--=================== Block-like Quotes ================================-->
+
+<!ELEMENT blockquote %Flow;>
+<!ATTLIST blockquote
+  %attrs;
+  cite        %URI;          #IMPLIED
+  >
+
+<!--=================== Text alignment ===================================-->
+
+<!-- center content -->
+<!ELEMENT center %Flow;>
+<!ATTLIST center
+  %attrs;
+  >
+
+<!--=================== Inserted/Deleted Text ============================-->
+
+<!--
+  ins/del are allowed in block and inline content, but its
+  inappropriate to include block content within an ins element
+  occurring in inline content.
+-->
+<!ELEMENT ins %Flow;>
+<!ATTLIST ins
+  %attrs;
+  cite        %URI;          #IMPLIED
+  datetime    %Datetime;     #IMPLIED
+  >
+
+<!ELEMENT del %Flow;>
+<!ATTLIST del
+  %attrs;
+  cite        %URI;          #IMPLIED
+  datetime    %Datetime;     #IMPLIED
+  >
+
+<!--================== The Anchor Element ================================-->
+
+<!-- content is %Inline; except that anchors shouldn't be nested -->
+
+<!ELEMENT a %a.content;>
+<!ATTLIST a
+  %attrs;
+  %focus;
+  charset     %Charset;      #IMPLIED
+  type        %ContentType;  #IMPLIED
+  name        NMTOKEN        #IMPLIED
+  href        %URI;          #IMPLIED
+  hreflang    %LanguageCode; #IMPLIED
+  rel         %LinkTypes;    #IMPLIED
+  rev         %LinkTypes;    #IMPLIED
+  shape       %Shape;        "rect"
+  coords      %Coords;       #IMPLIED
+  target      %FrameTarget;  #IMPLIED
+  >
+
+<!--===================== Inline Elements ================================-->
+
+<!ELEMENT span %Inline;> <!-- generic language/style container -->
+<!ATTLIST span
+  %attrs;
+  >
+
+<!ELEMENT bdo %Inline;>  <!-- I18N BiDi over-ride -->
+<!ATTLIST bdo
+  %coreattrs;
+  %events;
+  lang        %LanguageCode; #IMPLIED
+  xml:lang    %LanguageCode; #IMPLIED
+  dir         (ltr|rtl)      #REQUIRED
+  >
+
+<!ELEMENT br EMPTY>   <!-- forced line break -->
+<!ATTLIST br
+  %coreattrs;
+  clear       (left|all|right|none) "none"
+  >
+
+<!ELEMENT em %Inline;>   <!-- emphasis -->
+<!ATTLIST em %attrs;>
+
+<!ELEMENT strong %Inline;>   <!-- strong emphasis -->
+<!ATTLIST strong %attrs;>
+
+<!ELEMENT dfn %Inline;>   <!-- definitional -->
+<!ATTLIST dfn %attrs;>
+
+<!ELEMENT code %Inline;>   <!-- program code -->
+<!ATTLIST code %attrs;>
+
+<!ELEMENT samp %Inline;>   <!-- sample -->
+<!ATTLIST samp %attrs;>
+
+<!ELEMENT kbd %Inline;>  <!-- something user would type -->
+<!ATTLIST kbd %attrs;>
+
+<!ELEMENT var %Inline;>   <!-- variable -->
+<!ATTLIST var %attrs;>
+
+<!ELEMENT cite %Inline;>   <!-- citation -->
+<!ATTLIST cite %attrs;>
+
+<!ELEMENT abbr %Inline;>   <!-- abbreviation -->
+<!ATTLIST abbr %attrs;>
+
+<!ELEMENT acronym %Inline;>   <!-- acronym -->
+<!ATTLIST acronym %attrs;>
+
+<!ELEMENT q %Inline;>   <!-- inlined quote -->
+<!ATTLIST q
+  %attrs;
+  cite        %URI;          #IMPLIED
+  >
+
+<!ELEMENT sub %Inline;> <!-- subscript -->
+<!ATTLIST sub %attrs;>
+
+<!ELEMENT sup %Inline;> <!-- superscript -->
+<!ATTLIST sup %attrs;>
+
+<!ELEMENT tt %Inline;>   <!-- fixed pitch font -->
+<!ATTLIST tt %attrs;>
+
+<!ELEMENT i %Inline;>   <!-- italic font -->
+<!ATTLIST i %attrs;>
+
+<!ELEMENT b %Inline;>   <!-- bold font -->
+<!ATTLIST b %attrs;>
+
+<!ELEMENT big %Inline;>   <!-- bigger font -->
+<!ATTLIST big %attrs;>
+
+<!ELEMENT small %Inline;>   <!-- smaller font -->
+<!ATTLIST small %attrs;>
+
+<!ELEMENT u %Inline;>   <!-- underline -->
+<!ATTLIST u %attrs;>
+
+<!ELEMENT s %Inline;>   <!-- strike-through -->
+<!ATTLIST s %attrs;>
+
+<!ELEMENT strike %Inline;>   <!-- strike-through -->
+<!ATTLIST strike %attrs;>
+
+<!ELEMENT basefont EMPTY>  <!-- base font size -->
+<!ATTLIST basefont
+  id          ID             #IMPLIED
+  size        CDATA          #REQUIRED
+  color       %Color;        #IMPLIED
+  face        CDATA          #IMPLIED
+  >
+
+<!ELEMENT font %Inline;> <!-- local change to font -->
+<!ATTLIST font
+  %coreattrs;
+  %i18n;
+  size        CDATA          #IMPLIED
+  color       %Color;        #IMPLIED
+  face        CDATA          #IMPLIED
+  >
+
+<!--==================== Object ======================================-->
+<!--
+  object is used to embed objects as part of HTML pages.
+  param elements should precede other content. Parameters
+  can also be expressed as attribute/value pairs on the
+  object element itself when brevity is desired.
+-->
+
+<!ELEMENT object (#PCDATA | param | %block; | form | %inline; | %misc;)*>
+<!ATTLIST object
+  %attrs;
+  declare     (declare)      #IMPLIED
+  classid     %URI;          #IMPLIED
+  codebase    %URI;          #IMPLIED
+  data        %URI;          #IMPLIED
+  type        %ContentType;  #IMPLIED
+  codetype    %ContentType;  #IMPLIED
+  archive     %UriList;      #IMPLIED
+  standby     %Text;         #IMPLIED
+  height      %Length;       #IMPLIED
+  width       %Length;       #IMPLIED
+  usemap      %URI;          #IMPLIED
+  name        NMTOKEN        #IMPLIED
+  tabindex    %Number;       #IMPLIED
+  align       %ImgAlign;     #IMPLIED
+  border      %Pixels;       #IMPLIED
+  hspace      %Pixels;       #IMPLIED
+  vspace      %Pixels;       #IMPLIED
+  >
+
+<!--
+  param is used to supply a named property value.
+  In XML it would seem natural to follow RDF and support an
+  abbreviated syntax where the param elements are replaced
+  by attribute value pairs on the object start tag.
+-->
+<!ELEMENT param EMPTY>
+<!ATTLIST param
+  id          ID             #IMPLIED
+  name        CDATA          #REQUIRED
+  value       CDATA          #IMPLIED
+  valuetype   (data|ref|object) "data"
+  type        %ContentType;  #IMPLIED
+  >
+
+<!--=================== Java applet ==================================-->
+<!--
+  One of code or object attributes must be present.
+  Place param elements before other content.
+-->
+<!ELEMENT applet (#PCDATA | param | %block; | form | %inline; | %misc;)*>
+<!ATTLIST applet
+  %coreattrs;
+  codebase    %URI;          #IMPLIED
+  archive     CDATA          #IMPLIED
+  code        CDATA          #IMPLIED
+  object      CDATA          #IMPLIED
+  alt         %Text;         #IMPLIED
+  name        NMTOKEN        #IMPLIED
+  width       %Length;       #REQUIRED
+  height      %Length;       #REQUIRED
+  align       %ImgAlign;     #IMPLIED
+  hspace      %Pixels;       #IMPLIED
+  vspace      %Pixels;       #IMPLIED
+  >
+
+<!--=================== Images ===========================================-->
+
+<!--
+   To avoid accessibility problems for people who aren't
+   able to see the image, you should provide a text
+   description using the alt and longdesc attributes.
+   In addition, avoid the use of server-side image maps.
+-->
+
+<!ELEMENT img EMPTY>
+<!ATTLIST img
+  %attrs;
+  src         %URI;          #REQUIRED
+  alt         %Text;         #REQUIRED
+  name        NMTOKEN        #IMPLIED
+  longdesc    %URI;          #IMPLIED
+  height      %Length;       #IMPLIED
+  width       %Length;       #IMPLIED
+  usemap      %URI;          #IMPLIED
+  ismap       (ismap)        #IMPLIED
+  align       %ImgAlign;     #IMPLIED
+  border      %Length;       #IMPLIED
+  hspace      %Pixels;       #IMPLIED
+  vspace      %Pixels;       #IMPLIED
+  >
+
+<!-- usemap points to a map element which may be in this document
+  or an external document, although the latter is not widely supported -->
+
+<!--================== Client-side image maps ============================-->
+
+<!-- These can be placed in the same document or grouped in a
+     separate document although this isn't yet widely supported -->
+
+<!ELEMENT map ((%block; | form | %misc;)+ | area+)>
+<!ATTLIST map
+  %i18n;
+  %events;
+  id          ID             #REQUIRED
+  class       CDATA          #IMPLIED
+  style       %StyleSheet;   #IMPLIED
+  title       %Text;         #IMPLIED
+  name        CDATA          #IMPLIED
+  >
+
+<!ELEMENT area EMPTY>
+<!ATTLIST area
+  %attrs;
+  %focus;
+  shape       %Shape;        "rect"
+  coords      %Coords;       #IMPLIED
+  href        %URI;          #IMPLIED
+  nohref      (nohref)       #IMPLIED
+  alt         %Text;         #REQUIRED
+  target      %FrameTarget;  #IMPLIED
+  >
+
+<!--================ Forms ===============================================-->
+
+<!ELEMENT form %form.content;>   <!-- forms shouldn't be nested -->
+
+<!ATTLIST form
+  %attrs;
+  action      %URI;          #REQUIRED
+  method      (get|post)     "get"
+  name        NMTOKEN        #IMPLIED
+  enctype     %ContentType;  "application/x-www-form-urlencoded"
+  onsubmit    %Script;       #IMPLIED
+  onreset     %Script;       #IMPLIED
+  accept      %ContentTypes; #IMPLIED
+  accept-charset %Charsets;  #IMPLIED
+  target      %FrameTarget;  #IMPLIED
+  >
+
+<!--
+  Each label must not contain more than ONE field
+  Label elements shouldn't be nested.
+-->
+<!ELEMENT label %Inline;>
+<!ATTLIST label
+  %attrs;
+  for         IDREF          #IMPLIED
+  accesskey   %Character;    #IMPLIED
+  onfocus     %Script;       #IMPLIED
+  onblur      %Script;       #IMPLIED
+  >
+
+<!ENTITY % InputType
+  "(text | password | checkbox |
+    radio | submit | reset |
+    file | hidden | image | button)"
+   >
+
+<!-- the name attribute is required for all but submit & reset -->
+
+<!ELEMENT input EMPTY>     <!-- form control -->
+<!ATTLIST input
+  %attrs;
+  %focus;
+  type        %InputType;    "text"
+  name        CDATA          #IMPLIED
+  value       CDATA          #IMPLIED
+  checked     (checked)      #IMPLIED
+  disabled    (disabled)     #IMPLIED
+  readonly    (readonly)     #IMPLIED
+  size        CDATA          #IMPLIED
+  maxlength   %Number;       #IMPLIED
+  src         %URI;          #IMPLIED
+  alt         CDATA          #IMPLIED
+  usemap      %URI;          #IMPLIED
+  onselect    %Script;       #IMPLIED
+  onchange    %Script;       #IMPLIED
+  accept      %ContentTypes; #IMPLIED
+  align       %ImgAlign;     #IMPLIED
+  >
+
+<!ELEMENT select (optgroup|option)+>  <!-- option selector -->
+<!ATTLIST select
+  %attrs;
+  name        CDATA          #IMPLIED
+  size        %Number;       #IMPLIED
+  multiple    (multiple)     #IMPLIED
+  disabled    (disabled)     #IMPLIED
+  tabindex    %Number;       #IMPLIED
+  onfocus     %Script;       #IMPLIED
+  onblur      %Script;       #IMPLIED
+  onchange    %Script;       #IMPLIED
+  >
+
+<!ELEMENT optgroup (option)+>   <!-- option group -->
+<!ATTLIST optgroup
+  %attrs;
+  disabled    (disabled)     #IMPLIED
+  label       %Text;         #REQUIRED
+  >
+
+<!ELEMENT option (#PCDATA)>     <!-- selectable choice -->
+<!ATTLIST option
+  %attrs;
+  selected    (selected)     #IMPLIED
+  disabled    (disabled)     #IMPLIED
+  label       %Text;         #IMPLIED
+  value       CDATA          #IMPLIED
+  >
+
+<!ELEMENT textarea (#PCDATA)>     <!-- multi-line text field -->
+<!ATTLIST textarea
+  %attrs;
+  %focus;
+  name        CDATA          #IMPLIED
+  rows        %Number;       #REQUIRED
+  cols        %Number;       #REQUIRED
+  disabled    (disabled)     #IMPLIED
+  readonly    (readonly)     #IMPLIED
+  onselect    %Script;       #IMPLIED
+  onchange    %Script;       #IMPLIED
+  >
+
+<!--
+  The fieldset element is used to group form fields.
+  Only one legend element should occur in the content
+  and if present should only be preceded by whitespace.
+-->
+<!ELEMENT fieldset (#PCDATA | legend | %block; | form | %inline; | %misc;)*>
+<!ATTLIST fieldset
+  %attrs;
+  >
+
+<!ENTITY % LAlign "(top|bottom|left|right)">
+
+<!ELEMENT legend %Inline;>     <!-- fieldset label -->
+<!ATTLIST legend
+  %attrs;
+  accesskey   %Character;    #IMPLIED
+  align       %LAlign;       #IMPLIED
+  >
+
+<!--
+ Content is %Flow; excluding a, form, form controls, iframe
+--> 
+<!ELEMENT button %button.content;>  <!-- push button -->
+<!ATTLIST button
+  %attrs;
+  %focus;
+  name        CDATA          #IMPLIED
+  value       CDATA          #IMPLIED
+  type        (button|submit|reset) "submit"
+  disabled    (disabled)     #IMPLIED
+  >
+
+<!-- single-line text input control (DEPRECATED) -->
+<!ELEMENT isindex EMPTY>
+<!ATTLIST isindex
+  %coreattrs;
+  %i18n;
+  prompt      %Text;         #IMPLIED
+  >
+
+<!--======================= Tables =======================================-->
+
+<!-- Derived from IETF HTML table standard, see [RFC1942] -->
+
+<!--
+ The border attribute sets the thickness of the frame around the
+ table. The default units are screen pixels.
+
+ The frame attribute specifies which parts of the frame around
+ the table should be rendered. The values are not the same as
+ CALS to avoid a name clash with the valign attribute.
+-->
+<!ENTITY % TFrame "(void|above|below|hsides|lhs|rhs|vsides|box|border)">
+
+<!--
+ The rules attribute defines which rules to draw between cells:
+
+ If rules is absent then assume:
+     "none" if border is absent or border="0" otherwise "all"
+-->
+
+<!ENTITY % TRules "(none | groups | rows | cols | all)">
+  
+<!-- horizontal placement of table relative to document -->
+<!ENTITY % TAlign "(left|center|right)">
+
+<!-- horizontal alignment attributes for cell contents
+
+  char        alignment char, e.g. char=':'
+  charoff     offset for alignment char
+-->
+<!ENTITY % cellhalign
+  "align      (left|center|right|justify|char) #IMPLIED
+   char       %Character;    #IMPLIED
+   charoff    %Length;       #IMPLIED"
+  >
+
+<!-- vertical alignment attributes for cell contents -->
+<!ENTITY % cellvalign
+  "valign     (top|middle|bottom|baseline) #IMPLIED"
+  >
+
+<!ELEMENT table
+     (caption?, (col*|colgroup*), thead?, tfoot?, (tbody+|tr+))>
+<!ELEMENT caption  %Inline;>
+<!ELEMENT thead    (tr)+>
+<!ELEMENT tfoot    (tr)+>
+<!ELEMENT tbody    (tr)+>
+<!ELEMENT colgroup (col)*>
+<!ELEMENT col      EMPTY>
+<!ELEMENT tr       (th|td)+>
+<!ELEMENT th       %Flow;>
+<!ELEMENT td       %Flow;>
+
+<!ATTLIST table
+  %attrs;
+  summary     %Text;         #IMPLIED
+  width       %Length;       #IMPLIED
+  border      %Pixels;       #IMPLIED
+  frame       %TFrame;       #IMPLIED
+  rules       %TRules;       #IMPLIED
+  cellspacing %Length;       #IMPLIED
+  cellpadding %Length;       #IMPLIED
+  align       %TAlign;       #IMPLIED
+  bgcolor     %Color;        #IMPLIED
+  >
+
+<!ENTITY % CAlign "(top|bottom|left|right)">
+
+<!ATTLIST caption
+  %attrs;
+  align       %CAlign;       #IMPLIED
+  >
+
+<!--
+colgroup groups a set of col elements. It allows you to group
+several semantically related columns together.
+-->
+<!ATTLIST colgroup
+  %attrs;
+  span        %Number;       "1"
+  width       %MultiLength;  #IMPLIED
+  %cellhalign;
+  %cellvalign;
+  >
+
+<!--
+ col elements define the alignment properties for cells in
+ one or more columns.
+
+ The width attribute specifies the width of the columns, e.g.
+
+     width=64        width in screen pixels
+     width=0.5*      relative width of 0.5
+
+ The span attribute causes the attributes of one
+ col element to apply to more than one column.
+-->
+<!ATTLIST col
+  %attrs;
+  span        %Number;       "1"
+  width       %MultiLength;  #IMPLIED
+  %cellhalign;
+  %cellvalign;
+  >
+
+<!--
+    Use thead to duplicate headers when breaking table
+    across page boundaries, or for static headers when
+    tbody sections are rendered in scrolling panel.
+
+    Use tfoot to duplicate footers when breaking table
+    across page boundaries, or for static footers when
+    tbody sections are rendered in scrolling panel.
+
+    Use multiple tbody sections when rules are needed
+    between groups of table rows.
+-->
+<!ATTLIST thead
+  %attrs;
+  %cellhalign;
+  %cellvalign;
+  >
+
+<!ATTLIST tfoot
+  %attrs;
+  %cellhalign;
+  %cellvalign;
+  >
+
+<!ATTLIST tbody
+  %attrs;
+  %cellhalign;
+  %cellvalign;
+  >
+
+<!ATTLIST tr
+  %attrs;
+  %cellhalign;
+  %cellvalign;
+  bgcolor     %Color;        #IMPLIED
+  >
+
+<!-- Scope is simpler than headers attribute for common tables -->
+<!ENTITY % Scope "(row|col|rowgroup|colgroup)">
+
+<!-- th is for headers, td for data and for cells acting as both -->
+
+<!ATTLIST th
+  %attrs;
+  abbr        %Text;         #IMPLIED
+  axis        CDATA          #IMPLIED
+  headers     IDREFS         #IMPLIED
+  scope       %Scope;        #IMPLIED
+  rowspan     %Number;       "1"
+  colspan     %Number;       "1"
+  %cellhalign;
+  %cellvalign;
+  nowrap      (nowrap)       #IMPLIED
+  bgcolor     %Color;        #IMPLIED
+  width       %Length;       #IMPLIED
+  height      %Length;       #IMPLIED
+  >
+
+<!ATTLIST td
+  %attrs;
+  abbr        %Text;         #IMPLIED
+  axis        CDATA          #IMPLIED
+  headers     IDREFS         #IMPLIED
+  scope       %Scope;        #IMPLIED
+  rowspan     %Number;       "1"
+  colspan     %Number;       "1"
+  %cellhalign;
+  %cellvalign;
+  nowrap      (nowrap)       #IMPLIED
+  bgcolor     %Color;        #IMPLIED
+  width       %Length;       #IMPLIED
+  height      %Length;       #IMPLIED
+  >
+
diff --git a/ThirdParty/Twisted/twisted/mail/__init__.py b/ThirdParty/Twisted/twisted/mail/__init__.py
new file mode 100644
index 0000000..b434715
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/__init__.py
@@ -0,0 +1,15 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+
+Twisted Mail: a Twisted E-Mail Server.
+
+Maintainer: Jp Calderone
+
+"""
+
+from twisted.mail._version import version
+__version__ = version.short()
diff --git a/ThirdParty/Twisted/twisted/mail/_version.py b/ThirdParty/Twisted/twisted/mail/_version.py
new file mode 100644
index 0000000..a7d29f5
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/_version.py
@@ -0,0 +1,3 @@
+# This is an auto-generated file. Do not edit it.
+from twisted.python import versions
+version = versions.Version('twisted.mail', 12, 3, 0)
diff --git a/ThirdParty/Twisted/twisted/mail/alias.py b/ThirdParty/Twisted/twisted/mail/alias.py
new file mode 100644
index 0000000..8eccea6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/alias.py
@@ -0,0 +1,435 @@
+# -*- test-case-name: twisted.mail.test.test_mail -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Support for aliases(5) configuration files
+
+ at author: Jp Calderone
+
+TODO::
+    Monitor files for reparsing
+    Handle non-local alias targets
+    Handle maildir alias targets
+"""
+
+import os
+import tempfile
+
+from twisted.mail import smtp
+from twisted.internet import reactor
+from twisted.internet import protocol
+from twisted.internet import defer
+from twisted.python import failure
+from twisted.python import log
+from zope.interface import implements, Interface
+
+
+def handle(result, line, filename, lineNo):
+    parts = [p.strip() for p in line.split(':', 1)]
+    if len(parts) != 2:
+        fmt = "Invalid format on line %d of alias file %s."
+        arg = (lineNo, filename)
+        log.err(fmt % arg)
+    else:
+        user, alias = parts
+        result.setdefault(user.strip(), []).extend(map(str.strip, alias.split(',')))
+
+def loadAliasFile(domains, filename=None, fp=None):
+    """Load a file containing email aliases.
+
+    Lines in the file should be formatted like so::
+
+        username: alias1,alias2,...,aliasN
+
+    Aliases beginning with a | will be treated as programs, will be run, and
+    the message will be written to their stdin.
+
+    Aliases without a host part will be assumed to be addresses on localhost.
+
+    If a username is specified multiple times, the aliases for each are joined
+    together as if they had all been on one line.
+
+    @type domains: C{dict} of implementor of C{IDomain}
+    @param domains: The domains to which these aliases will belong.
+
+    @type filename: C{str}
+    @param filename: The filename from which to load aliases.
+
+    @type fp: Any file-like object.
+    @param fp: If specified, overrides C{filename}, and aliases are read from
+    it.
+
+    @rtype: C{dict}
+    @return: A dictionary mapping usernames to C{AliasGroup} objects.
+    """
+    result = {}
+    if fp is None:
+        fp = file(filename)
+    else:
+        filename = getattr(fp, 'name', '<unknown>')
+    i = 0
+    prev = ''
+    for line in fp:
+        i += 1
+        line = line.rstrip()
+        if line.lstrip().startswith('#'):
+            continue
+        elif line.startswith(' ') or line.startswith('\t'):
+            prev = prev + line
+        else:
+            if prev:
+                handle(result, prev, filename, i)
+            prev = line
+    if prev:
+        handle(result, prev, filename, i)
+    for (u, a) in result.items():
+        addr = smtp.Address(u)
+        result[u] = AliasGroup(a, domains, u)
+    return result
+
+class IAlias(Interface):
+    def createMessageReceiver():
+        pass
+
+class AliasBase:
+    def __init__(self, domains, original):
+        self.domains = domains
+        self.original = smtp.Address(original)
+
+    def domain(self):
+        return self.domains[self.original.domain]
+
+    def resolve(self, aliasmap, memo=None):
+        if memo is None:
+            memo = {}
+        if str(self) in memo:
+            return None
+        memo[str(self)] = None
+        return self.createMessageReceiver()
+
+class AddressAlias(AliasBase):
+    """The simplest alias, translating one email address into another."""
+
+    implements(IAlias)
+
+    def __init__(self, alias, *args):
+        AliasBase.__init__(self, *args)
+        self.alias = smtp.Address(alias)
+
+    def __str__(self):
+        return '<Address %s>' % (self.alias,)
+
+    def createMessageReceiver(self):
+        return self.domain().startMessage(str(self.alias))
+
+    def resolve(self, aliasmap, memo=None):
+        if memo is None:
+            memo = {}
+        if str(self) in memo:
+            return None
+        memo[str(self)] = None
+        try:
+            return self.domain().exists(smtp.User(self.alias, None, None, None), memo)()
+        except smtp.SMTPBadRcpt:
+            pass
+        if self.alias.local in aliasmap:
+            return aliasmap[self.alias.local].resolve(aliasmap, memo)
+        return None
+
+class FileWrapper:
+    implements(smtp.IMessage)
+
+    def __init__(self, filename):
+        self.fp = tempfile.TemporaryFile()
+        self.finalname = filename
+
+    def lineReceived(self, line):
+        self.fp.write(line + '\n')
+
+    def eomReceived(self):
+        self.fp.seek(0, 0)
+        try:
+            f = file(self.finalname, 'a')
+        except:
+            return defer.fail(failure.Failure())
+
+        f.write(self.fp.read())
+        self.fp.close()
+        f.close()
+
+        return defer.succeed(self.finalname)
+
+    def connectionLost(self):
+        self.fp.close()
+        self.fp = None
+
+    def __str__(self):
+        return '<FileWrapper %s>' % (self.finalname,)
+
+
+class FileAlias(AliasBase):
+
+    implements(IAlias)
+
+    def __init__(self, filename, *args):
+        AliasBase.__init__(self, *args)
+        self.filename = filename
+
+    def __str__(self):
+        return '<File %s>' % (self.filename,)
+
+    def createMessageReceiver(self):
+        return FileWrapper(self.filename)
+
+
+
+class ProcessAliasTimeout(Exception):
+    """
+    A timeout occurred while processing aliases.
+    """
+
+
+
+class MessageWrapper:
+    """
+    A message receiver which delivers content to a child process.
+
+    @type completionTimeout: C{int} or C{float}
+    @ivar completionTimeout: The number of seconds to wait for the child
+        process to exit before reporting the delivery as a failure.
+
+    @type _timeoutCallID: C{NoneType} or L{IDelayedCall}
+    @ivar _timeoutCallID: The call used to time out delivery, started when the
+        connection to the child process is closed.
+
+    @type done: C{bool}
+    @ivar done: Flag indicating whether the child process has exited or not.
+
+    @ivar reactor: An L{IReactorTime} provider which will be used to schedule
+        timeouts.
+    """
+    implements(smtp.IMessage)
+
+    done = False
+
+    completionTimeout = 60
+    _timeoutCallID = None
+
+    reactor = reactor
+
+    def __init__(self, protocol, process=None, reactor=None):
+        self.processName = process
+        self.protocol = protocol
+        self.completion = defer.Deferred()
+        self.protocol.onEnd = self.completion
+        self.completion.addBoth(self._processEnded)
+
+        if reactor is not None:
+            self.reactor = reactor
+
+
+    def _processEnded(self, result):
+        """
+        Record process termination and cancel the timeout call if it is active.
+        """
+        self.done = True
+        if self._timeoutCallID is not None:
+            # eomReceived was called, we're actually waiting for the process to
+            # exit.
+            self._timeoutCallID.cancel()
+            self._timeoutCallID = None
+        else:
+            # eomReceived was not called, this is unexpected, propagate the
+            # error.
+            return result
+
+
+    def lineReceived(self, line):
+        if self.done:
+            return
+        self.protocol.transport.write(line + '\n')
+
+
+    def eomReceived(self):
+        """
+        Disconnect from the child process, set up a timeout to wait for it to
+        exit, and return a Deferred which will be called back when the child
+        process exits.
+        """
+        if not self.done:
+            self.protocol.transport.loseConnection()
+            self._timeoutCallID = self.reactor.callLater(
+                self.completionTimeout, self._completionCancel)
+        return self.completion
+
+
+    def _completionCancel(self):
+        """
+        Handle the expiration of the timeout for the child process to exit by
+        terminating the child process forcefully and issuing a failure to the
+        completion deferred returned by L{eomReceived}.
+        """
+        self._timeoutCallID = None
+        self.protocol.transport.signalProcess('KILL')
+        exc = ProcessAliasTimeout(
+            "No answer after %s seconds" % (self.completionTimeout,))
+        self.protocol.onEnd = None
+        self.completion.errback(failure.Failure(exc))
+
+
+    def connectionLost(self):
+        # Heh heh
+        pass
+
+
+    def __str__(self):
+        return '<ProcessWrapper %s>' % (self.processName,)
+
+
+
+class ProcessAliasProtocol(protocol.ProcessProtocol):
+    """
+    Trivial process protocol which will callback a Deferred when the associated
+    process ends.
+
+    @ivar onEnd: If not C{None}, a L{Deferred} which will be called back with
+        the failure passed to C{processEnded}, when C{processEnded} is called.
+    """
+
+    onEnd = None
+
+    def processEnded(self, reason):
+        """
+        Call back C{onEnd} if it is set.
+        """
+        if self.onEnd is not None:
+            self.onEnd.errback(reason)
+
+
+
+class ProcessAlias(AliasBase):
+    """
+    An alias which is handled by the execution of a particular program.
+
+    @ivar reactor: An L{IReactorProcess} and L{IReactorTime} provider which
+        will be used to create and timeout the alias child process.
+    """
+    implements(IAlias)
+
+    reactor = reactor
+
+    def __init__(self, path, *args):
+        AliasBase.__init__(self, *args)
+        self.path = path.split()
+        self.program = self.path[0]
+
+
+    def __str__(self):
+        """
+        Build a string representation containing the path.
+        """
+        return '<Process %s>' % (self.path,)
+
+
+    def spawnProcess(self, proto, program, path):
+        """
+        Wrapper around C{reactor.spawnProcess}, to be customized for tests
+        purpose.
+        """
+        return self.reactor.spawnProcess(proto, program, path)
+
+
+    def createMessageReceiver(self):
+        """
+        Create a message receiver by launching a process.
+        """
+        p = ProcessAliasProtocol()
+        m = MessageWrapper(p, self.program, self.reactor)
+        fd = self.spawnProcess(p, self.program, self.path)
+        return m
+
+
+
+class MultiWrapper:
+    """
+    Wrapper to deliver a single message to multiple recipients.
+    """
+
+    implements(smtp.IMessage)
+
+    def __init__(self, objs):
+        self.objs = objs
+
+    def lineReceived(self, line):
+        for o in self.objs:
+            o.lineReceived(line)
+
+    def eomReceived(self):
+        return defer.DeferredList([
+            o.eomReceived() for o in self.objs
+        ])
+
+    def connectionLost(self):
+        for o in self.objs:
+            o.connectionLost()
+
+    def __str__(self):
+        return '<GroupWrapper %r>' % (map(str, self.objs),)
+
+
+
+class AliasGroup(AliasBase):
+    """
+    An alias which points to more than one recipient.
+
+    @ivar processAliasFactory: a factory for resolving process aliases.
+    @type processAliasFactory: C{class}
+    """
+
+    implements(IAlias)
+
+    processAliasFactory = ProcessAlias
+
+    def __init__(self, items, *args):
+        AliasBase.__init__(self, *args)
+        self.aliases = []
+        while items:
+            addr = items.pop().strip()
+            if addr.startswith(':'):
+                try:
+                    f = file(addr[1:])
+                except:
+                    log.err("Invalid filename in alias file %r" % (addr[1:],))
+                else:
+                    addr = ' '.join([l.strip() for l in f])
+                    items.extend(addr.split(','))
+            elif addr.startswith('|'):
+                self.aliases.append(self.processAliasFactory(addr[1:], *args))
+            elif addr.startswith('/'):
+                if os.path.isdir(addr):
+                    log.err("Directory delivery not supported")
+                else:
+                    self.aliases.append(FileAlias(addr, *args))
+            else:
+                self.aliases.append(AddressAlias(addr, *args))
+
+    def __len__(self):
+        return len(self.aliases)
+
+    def __str__(self):
+        return '<AliasGroup [%s]>' % (', '.join(map(str, self.aliases)))
+
+    def createMessageReceiver(self):
+        return MultiWrapper([a.createMessageReceiver() for a in self.aliases])
+
+    def resolve(self, aliasmap, memo=None):
+        if memo is None:
+            memo = {}
+        r = []
+        for a in self.aliases:
+            r.append(a.resolve(aliasmap, memo))
+        return MultiWrapper(filter(None, r))
+
diff --git a/ThirdParty/Twisted/twisted/mail/bounce.py b/ThirdParty/Twisted/twisted/mail/bounce.py
new file mode 100644
index 0000000..5d5bde9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/bounce.py
@@ -0,0 +1,60 @@
+# -*- test-case-name: twisted.mail.test.test_bounce -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+import StringIO
+import rfc822
+import time
+import os
+
+
+from twisted.mail import smtp
+
+BOUNCE_FORMAT = """\
+From: postmaster@%(failedDomain)s
+To: %(failedFrom)s
+Subject: Returned Mail: see transcript for details
+Message-ID: %(messageID)s
+Content-Type: multipart/report; report-type=delivery-status;
+    boundary="%(boundary)s"
+
+--%(boundary)s
+
+%(transcript)s
+
+--%(boundary)s
+Content-Type: message/delivery-status
+Arrival-Date: %(ctime)s
+Final-Recipient: RFC822; %(failedTo)s
+"""
+
+def generateBounce(message, failedFrom, failedTo, transcript=''):
+    if not transcript:
+        transcript = '''\
+I'm sorry, the following address has permanent errors: %(failedTo)s.
+I've given up, and I will not retry the message again.
+''' % vars()
+
+    boundary = "%s_%s_%s" % (time.time(), os.getpid(), 'XXXXX')
+    failedAddress = rfc822.AddressList(failedTo)[0][1]
+    failedDomain = failedAddress.split('@', 1)[1]
+    messageID = smtp.messageid(uniq='bounce')
+    ctime = time.ctime(time.time())
+
+    fp = StringIO.StringIO()
+    fp.write(BOUNCE_FORMAT % vars())
+    orig = message.tell()
+    message.seek(2, 0)
+    sz = message.tell()
+    message.seek(0, orig)
+    if sz > 10000:
+        while 1:
+            line = message.readline()
+            if len(line)<=1:
+                break
+            fp.write(line)
+    else:
+        fp.write(message.read())
+    return '', failedFrom, fp.getvalue()
diff --git a/ThirdParty/Twisted/twisted/mail/imap4.py b/ThirdParty/Twisted/twisted/mail/imap4.py
new file mode 100644
index 0000000..398243d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/imap4.py
@@ -0,0 +1,6209 @@
+# -*- test-case-name: twisted.mail.test.test_imap -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+An IMAP4 protocol implementation
+
+ at author: Jp Calderone
+
+To do::
+  Suspend idle timeout while server is processing
+  Use an async message parser instead of buffering in memory
+  Figure out a way to not queue multi-message client requests (Flow? A simple callback?)
+  Clarify some API docs (Query, etc)
+  Make APPEND recognize (again) non-existent mailboxes before accepting the literal
+"""
+
+import rfc822
+import base64
+import binascii
+import hmac
+import re
+import copy
+import tempfile
+import string
+import time
+import random
+import types
+
+import email.Utils
+
+try:
+    import cStringIO as StringIO
+except:
+    import StringIO
+
+from zope.interface import implements, Interface
+
+from twisted.protocols import basic
+from twisted.protocols import policies
+from twisted.internet import defer
+from twisted.internet import error
+from twisted.internet.defer import maybeDeferred
+from twisted.python import log, text
+from twisted.internet import interfaces
+
+from twisted import cred
+import twisted.cred.error
+import twisted.cred.credentials
+
+
+# locale-independent month names to use instead of strftime's
+_MONTH_NAMES = dict(zip(
+        range(1, 13),
+        "Jan Feb Mar Apr May Jun Jul Aug Sep Oct Nov Dec".split()))
+
+
+class MessageSet(object):
+    """
+    Essentially an infinite bitfield, with some extra features.
+
+    @type getnext: Function taking C{int} returning C{int}
+    @ivar getnext: A function that returns the next message number,
+    used when iterating through the MessageSet. By default, a function
+    returning the next integer is supplied, but as this can be rather
+    inefficient for sparse UID iterations, it is recommended to supply
+    one when messages are requested by UID.  The argument is provided
+    as a hint to the implementation and may be ignored if it makes sense
+    to do so (eg, if an iterator is being used that maintains its own
+    state, it is guaranteed that it will not be called out-of-order).
+    """
+    _empty = []
+
+    def __init__(self, start=_empty, end=_empty):
+        """
+        Create a new MessageSet()
+
+        @type start: Optional C{int}
+        @param start: Start of range, or only message number
+
+        @type end: Optional C{int}
+        @param end: End of range.
+        """
+        self._last = self._empty # Last message/UID in use
+        self.ranges = [] # List of ranges included
+        self.getnext = lambda x: x+1 # A function which will return the next
+                                     # message id. Handy for UID requests.
+
+        if start is self._empty:
+            return
+
+        if isinstance(start, types.ListType):
+            self.ranges = start[:]
+            self.clean()
+        else:
+            self.add(start,end)
+
+    # Ooo.  A property.
+    def last():
+        def _setLast(self, value):
+            if self._last is not self._empty:
+                raise ValueError("last already set")
+
+            self._last = value
+            for i, (l, h) in enumerate(self.ranges):
+                if l is not None:
+                    break # There are no more Nones after this
+                l = value
+                if h is None:
+                    h = value
+                if l > h:
+                    l, h = h, l
+                self.ranges[i] = (l, h)
+
+            self.clean()
+
+        def _getLast(self):
+            return self._last
+
+        doc = '''
+            "Highest" message number, refered to by "*".
+            Must be set before attempting to use the MessageSet.
+        '''
+        return _getLast, _setLast, None, doc
+    last = property(*last())
+
+    def add(self, start, end=_empty):
+        """
+        Add another range
+
+        @type start: C{int}
+        @param start: Start of range, or only message number
+
+        @type end: Optional C{int}
+        @param end: End of range.
+        """
+        if end is self._empty:
+            end = start
+
+        if self._last is not self._empty:
+            if start is None:
+                start = self.last
+            if end is None:
+                end = self.last
+
+        if start > end:
+            # Try to keep in low, high order if possible
+            # (But we don't know what None means, this will keep
+            # None at the start of the ranges list)
+            start, end = end, start
+
+        self.ranges.append((start, end))
+        self.clean()
+
+    def __add__(self, other):
+        if isinstance(other, MessageSet):
+            ranges = self.ranges + other.ranges
+            return MessageSet(ranges)
+        else:
+            res = MessageSet(self.ranges)
+            try:
+                res.add(*other)
+            except TypeError:
+                res.add(other)
+            return res
+
+
+    def extend(self, other):
+        if isinstance(other, MessageSet):
+            self.ranges.extend(other.ranges)
+            self.clean()
+        else:
+            try:
+                self.add(*other)
+            except TypeError:
+                self.add(other)
+
+        return self
+
+
+    def clean(self):
+        """
+        Clean ranges list, combining adjacent ranges
+        """
+
+        self.ranges.sort()
+
+        oldl, oldh = None, None
+        for i,(l, h) in enumerate(self.ranges):
+            if l is None:
+                continue
+            # l is >= oldl and h is >= oldh due to sort()
+            if oldl is not None and l <= oldh + 1:
+                l = oldl
+                h = max(oldh, h)
+                self.ranges[i - 1] = None
+                self.ranges[i] = (l, h)
+
+            oldl, oldh = l, h
+
+        self.ranges = filter(None, self.ranges)
+
+
+    def __contains__(self, value):
+        """
+        May raise TypeError if we encounter an open-ended range
+        """
+        for l, h in self.ranges:
+            if l is None:
+                raise TypeError(
+                    "Can't determine membership; last value not set")
+            if l <= value <= h:
+                return True
+
+        return False
+
+
+    def _iterator(self):
+        for l, h in self.ranges:
+            l = self.getnext(l-1)
+            while l <= h:
+                yield l
+                l = self.getnext(l)
+                if l is None:
+                    break
+
+    def __iter__(self):
+        if self.ranges and self.ranges[0][0] is None:
+            raise TypeError("Can't iterate; last value not set")
+
+        return self._iterator()
+
+    def __len__(self):
+        res = 0
+        for l, h in self.ranges:
+            if l is None:
+                if h is None:
+                    res += 1
+                else:
+                    raise TypeError("Can't size object; last value not set")
+            else:
+                res += (h - l) + 1
+
+        return res
+
+    def __str__(self):
+        p = []
+        for low, high in self.ranges:
+            if low == high:
+                if low is None:
+                    p.append('*')
+                else:
+                    p.append(str(low))
+            elif low is None:
+                p.append('%d:*' % (high,))
+            else:
+                p.append('%d:%d' % (low, high))
+        return ','.join(p)
+
+    def __repr__(self):
+        return '<MessageSet %s>' % (str(self),)
+
+    def __eq__(self, other):
+        if isinstance(other, MessageSet):
+            return self.ranges == other.ranges
+        return False
+
+
+class LiteralString:
+    def __init__(self, size, defered):
+        self.size = size
+        self.data = []
+        self.defer = defered
+
+    def write(self, data):
+        self.size -= len(data)
+        passon = None
+        if self.size > 0:
+            self.data.append(data)
+        else:
+            if self.size:
+                data, passon = data[:self.size], data[self.size:]
+            else:
+                passon = ''
+            if data:
+                self.data.append(data)
+        return passon
+
+    def callback(self, line):
+        """
+        Call defered with data and rest of line
+        """
+        self.defer.callback((''.join(self.data), line))
+
+class LiteralFile:
+    _memoryFileLimit = 1024 * 1024 * 10
+
+    def __init__(self, size, defered):
+        self.size = size
+        self.defer = defered
+        if size > self._memoryFileLimit:
+            self.data = tempfile.TemporaryFile()
+        else:
+            self.data = StringIO.StringIO()
+
+    def write(self, data):
+        self.size -= len(data)
+        passon = None
+        if self.size > 0:
+            self.data.write(data)
+        else:
+            if self.size:
+                data, passon = data[:self.size], data[self.size:]
+            else:
+                passon = ''
+            if data:
+                self.data.write(data)
+        return passon
+
+    def callback(self, line):
+        """
+        Call defered with data and rest of line
+        """
+        self.data.seek(0,0)
+        self.defer.callback((self.data, line))
+
+
+class WriteBuffer:
+    """Buffer up a bunch of writes before sending them all to a transport at once.
+    """
+    def __init__(self, transport, size=8192):
+        self.bufferSize = size
+        self.transport = transport
+        self._length = 0
+        self._writes = []
+
+    def write(self, s):
+        self._length += len(s)
+        self._writes.append(s)
+        if self._length > self.bufferSize:
+            self.flush()
+
+    def flush(self):
+        if self._writes:
+            self.transport.writeSequence(self._writes)
+            self._writes = []
+            self._length = 0
+
+
+class Command:
+    _1_RESPONSES = ('CAPABILITY', 'FLAGS', 'LIST', 'LSUB', 'STATUS', 'SEARCH', 'NAMESPACE')
+    _2_RESPONSES = ('EXISTS', 'EXPUNGE', 'FETCH', 'RECENT')
+    _OK_RESPONSES = ('UIDVALIDITY', 'UNSEEN', 'READ-WRITE', 'READ-ONLY', 'UIDNEXT', 'PERMANENTFLAGS')
+    defer = None
+
+    def __init__(self, command, args=None, wantResponse=(),
+                 continuation=None, *contArgs, **contKw):
+        self.command = command
+        self.args = args
+        self.wantResponse = wantResponse
+        self.continuation = lambda x: continuation(x, *contArgs, **contKw)
+        self.lines = []
+
+    def format(self, tag):
+        if self.args is None:
+            return ' '.join((tag, self.command))
+        return ' '.join((tag, self.command, self.args))
+
+    def finish(self, lastLine, unusedCallback):
+        send = []
+        unuse = []
+        for L in self.lines:
+            names = parseNestedParens(L)
+            N = len(names)
+            if (N >= 1 and names[0] in self._1_RESPONSES or
+                N >= 2 and names[1] in self._2_RESPONSES or
+                N >= 2 and names[0] == 'OK' and isinstance(names[1], types.ListType) and names[1][0] in self._OK_RESPONSES):
+                send.append(names)
+            else:
+                unuse.append(names)
+        d, self.defer = self.defer, None
+        d.callback((send, lastLine))
+        if unuse:
+            unusedCallback(unuse)
+
+class LOGINCredentials(cred.credentials.UsernamePassword):
+    def __init__(self):
+        self.challenges = ['Password\0', 'User Name\0']
+        self.responses = ['password', 'username']
+        cred.credentials.UsernamePassword.__init__(self, None, None)
+
+    def getChallenge(self):
+        return self.challenges.pop()
+
+    def setResponse(self, response):
+        setattr(self, self.responses.pop(), response)
+
+    def moreChallenges(self):
+        return bool(self.challenges)
+
+class PLAINCredentials(cred.credentials.UsernamePassword):
+    def __init__(self):
+        cred.credentials.UsernamePassword.__init__(self, None, None)
+
+    def getChallenge(self):
+        return ''
+
+    def setResponse(self, response):
+        parts = response.split('\0')
+        if len(parts) != 3:
+            raise IllegalClientResponse("Malformed Response - wrong number of parts")
+        useless, self.username, self.password = parts
+
+    def moreChallenges(self):
+        return False
+
+class IMAP4Exception(Exception):
+    def __init__(self, *args):
+        Exception.__init__(self, *args)
+
+class IllegalClientResponse(IMAP4Exception): pass
+
+class IllegalOperation(IMAP4Exception): pass
+
+class IllegalMailboxEncoding(IMAP4Exception): pass
+
+class IMailboxListener(Interface):
+    """Interface for objects interested in mailbox events"""
+
+    def modeChanged(writeable):
+        """Indicates that the write status of a mailbox has changed.
+
+        @type writeable: C{bool}
+        @param writeable: A true value if write is now allowed, false
+        otherwise.
+        """
+
+    def flagsChanged(newFlags):
+        """Indicates that the flags of one or more messages have changed.
+
+        @type newFlags: C{dict}
+        @param newFlags: A mapping of message identifiers to tuples of flags
+        now set on that message.
+        """
+
+    def newMessages(exists, recent):
+        """Indicates that the number of messages in a mailbox has changed.
+
+        @type exists: C{int} or C{None}
+        @param exists: The total number of messages now in this mailbox.
+        If the total number of messages has not changed, this should be
+        C{None}.
+
+        @type recent: C{int}
+        @param recent: The number of messages now flagged \\Recent.
+        If the number of recent messages has not changed, this should be
+        C{None}.
+        """
+
+class IMAP4Server(basic.LineReceiver, policies.TimeoutMixin):
+    """
+    Protocol implementation for an IMAP4rev1 server.
+
+    The server can be in any of four states:
+        - Non-authenticated
+        - Authenticated
+        - Selected
+        - Logout
+    """
+    implements(IMailboxListener)
+
+    # Identifier for this server software
+    IDENT = 'Twisted IMAP4rev1 Ready'
+
+    # Number of seconds before idle timeout
+    # Initially 1 minute.  Raised to 30 minutes after login.
+    timeOut = 60
+
+    POSTAUTH_TIMEOUT = 60 * 30
+
+    # Whether STARTTLS has been issued successfully yet or not.
+    startedTLS = False
+
+    # Whether our transport supports TLS
+    canStartTLS = False
+
+    # Mapping of tags to commands we have received
+    tags = None
+
+    # The object which will handle logins for us
+    portal = None
+
+    # The account object for this connection
+    account = None
+
+    # Logout callback
+    _onLogout = None
+
+    # The currently selected mailbox
+    mbox = None
+
+    # Command data to be processed when literal data is received
+    _pendingLiteral = None
+
+    # Maximum length to accept for a "short" string literal
+    _literalStringLimit = 4096
+
+    # IChallengeResponse factories for AUTHENTICATE command
+    challengers = None
+
+    # Search terms the implementation of which needs to be passed both the last
+    # message identifier (UID) and the last sequence id.
+    _requiresLastMessageInfo = set(["OR", "NOT", "UID"])
+
+    state = 'unauth'
+
+    parseState = 'command'
+
+    def __init__(self, chal = None, contextFactory = None, scheduler = None):
+        if chal is None:
+            chal = {}
+        self.challengers = chal
+        self.ctx = contextFactory
+        if scheduler is None:
+            scheduler = iterateInReactor
+        self._scheduler = scheduler
+        self._queuedAsync = []
+
+    def capabilities(self):
+        cap = {'AUTH': self.challengers.keys()}
+        if self.ctx and self.canStartTLS:
+            if not self.startedTLS and interfaces.ISSLTransport(self.transport, None) is None:
+                cap['LOGINDISABLED'] = None
+                cap['STARTTLS'] = None
+        cap['NAMESPACE'] = None
+        cap['IDLE'] = None
+        return cap
+
+    def connectionMade(self):
+        self.tags = {}
+        self.canStartTLS = interfaces.ITLSTransport(self.transport, None) is not None
+        self.setTimeout(self.timeOut)
+        self.sendServerGreeting()
+
+    def connectionLost(self, reason):
+        self.setTimeout(None)
+        if self._onLogout:
+            self._onLogout()
+            self._onLogout = None
+
+    def timeoutConnection(self):
+        self.sendLine('* BYE Autologout; connection idle too long')
+        self.transport.loseConnection()
+        if self.mbox:
+            self.mbox.removeListener(self)
+            cmbx = ICloseableMailbox(self.mbox, None)
+            if cmbx is not None:
+                maybeDeferred(cmbx.close).addErrback(log.err)
+            self.mbox = None
+        self.state = 'timeout'
+
+    def rawDataReceived(self, data):
+        self.resetTimeout()
+        passon = self._pendingLiteral.write(data)
+        if passon is not None:
+            self.setLineMode(passon)
+
+    # Avoid processing commands while buffers are being dumped to
+    # our transport
+    blocked = None
+
+    def _unblock(self):
+        commands = self.blocked
+        self.blocked = None
+        while commands and self.blocked is None:
+            self.lineReceived(commands.pop(0))
+        if self.blocked is not None:
+            self.blocked.extend(commands)
+
+    def lineReceived(self, line):
+        if self.blocked is not None:
+            self.blocked.append(line)
+            return
+
+        self.resetTimeout()
+
+        f = getattr(self, 'parse_' + self.parseState)
+        try:
+            f(line)
+        except Exception, e:
+            self.sendUntaggedResponse('BAD Server error: ' + str(e))
+            log.err()
+
+    def parse_command(self, line):
+        args = line.split(None, 2)
+        rest = None
+        if len(args) == 3:
+            tag, cmd, rest = args
+        elif len(args) == 2:
+            tag, cmd = args
+        elif len(args) == 1:
+            tag = args[0]
+            self.sendBadResponse(tag, 'Missing command')
+            return None
+        else:
+            self.sendBadResponse(None, 'Null command')
+            return None
+
+        cmd = cmd.upper()
+        try:
+            return self.dispatchCommand(tag, cmd, rest)
+        except IllegalClientResponse, e:
+            self.sendBadResponse(tag, 'Illegal syntax: ' + str(e))
+        except IllegalOperation, e:
+            self.sendNegativeResponse(tag, 'Illegal operation: ' + str(e))
+        except IllegalMailboxEncoding, e:
+            self.sendNegativeResponse(tag, 'Illegal mailbox name: ' + str(e))
+
+    def parse_pending(self, line):
+        d = self._pendingLiteral
+        self._pendingLiteral = None
+        self.parseState = 'command'
+        d.callback(line)
+
+    def dispatchCommand(self, tag, cmd, rest, uid=None):
+        f = self.lookupCommand(cmd)
+        if f:
+            fn = f[0]
+            parseargs = f[1:]
+            self.__doCommand(tag, fn, [self, tag], parseargs, rest, uid)
+        else:
+            self.sendBadResponse(tag, 'Unsupported command')
+
+    def lookupCommand(self, cmd):
+        return getattr(self, '_'.join((self.state, cmd.upper())), None)
+
+    def __doCommand(self, tag, handler, args, parseargs, line, uid):
+        for (i, arg) in enumerate(parseargs):
+            if callable(arg):
+                parseargs = parseargs[i+1:]
+                maybeDeferred(arg, self, line).addCallback(
+                    self.__cbDispatch, tag, handler, args,
+                    parseargs, uid).addErrback(self.__ebDispatch, tag)
+                return
+            else:
+                args.append(arg)
+
+        if line:
+            # Too many arguments
+            raise IllegalClientResponse("Too many arguments for command: " + repr(line))
+
+        if uid is not None:
+            handler(uid=uid, *args)
+        else:
+            handler(*args)
+
+    def __cbDispatch(self, (arg, rest), tag, fn, args, parseargs, uid):
+        args.append(arg)
+        self.__doCommand(tag, fn, args, parseargs, rest, uid)
+
+    def __ebDispatch(self, failure, tag):
+        if failure.check(IllegalClientResponse):
+            self.sendBadResponse(tag, 'Illegal syntax: ' + str(failure.value))
+        elif failure.check(IllegalOperation):
+            self.sendNegativeResponse(tag, 'Illegal operation: ' +
+                                      str(failure.value))
+        elif failure.check(IllegalMailboxEncoding):
+            self.sendNegativeResponse(tag, 'Illegal mailbox name: ' +
+                                      str(failure.value))
+        else:
+            self.sendBadResponse(tag, 'Server error: ' + str(failure.value))
+            log.err(failure)
+
+    def _stringLiteral(self, size):
+        if size > self._literalStringLimit:
+            raise IllegalClientResponse(
+                "Literal too long! I accept at most %d octets" %
+                (self._literalStringLimit,))
+        d = defer.Deferred()
+        self.parseState = 'pending'
+        self._pendingLiteral = LiteralString(size, d)
+        self.sendContinuationRequest('Ready for %d octets of text' % size)
+        self.setRawMode()
+        return d
+
+    def _fileLiteral(self, size):
+        d = defer.Deferred()
+        self.parseState = 'pending'
+        self._pendingLiteral = LiteralFile(size, d)
+        self.sendContinuationRequest('Ready for %d octets of data' % size)
+        self.setRawMode()
+        return d
+
+    def arg_astring(self, line):
+        """
+        Parse an astring from the line, return (arg, rest), possibly
+        via a deferred (to handle literals)
+        """
+        line = line.strip()
+        if not line:
+            raise IllegalClientResponse("Missing argument")
+        d = None
+        arg, rest = None, None
+        if line[0] == '"':
+            try:
+                spam, arg, rest = line.split('"',2)
+                rest = rest[1:] # Strip space
+            except ValueError:
+                raise IllegalClientResponse("Unmatched quotes")
+        elif line[0] == '{':
+            # literal
+            if line[-1] != '}':
+                raise IllegalClientResponse("Malformed literal")
+            try:
+                size = int(line[1:-1])
+            except ValueError:
+                raise IllegalClientResponse("Bad literal size: " + line[1:-1])
+            d = self._stringLiteral(size)
+        else:
+            arg = line.split(' ',1)
+            if len(arg) == 1:
+                arg.append('')
+            arg, rest = arg
+        return d or (arg, rest)
+
+    # ATOM: Any CHAR except ( ) { % * " \ ] CTL SP (CHAR is 7bit)
+    atomre = re.compile(r'(?P<atom>[^\](){%*"\\\x00-\x20\x80-\xff]+)( (?P<rest>.*$)|$)')
+
+    def arg_atom(self, line):
+        """
+        Parse an atom from the line
+        """
+        if not line:
+            raise IllegalClientResponse("Missing argument")
+        m = self.atomre.match(line)
+        if m:
+            return m.group('atom'), m.group('rest')
+        else:
+            raise IllegalClientResponse("Malformed ATOM")
+
+    def arg_plist(self, line):
+        """
+        Parse a (non-nested) parenthesised list from the line
+        """
+        if not line:
+            raise IllegalClientResponse("Missing argument")
+
+        if line[0] != "(":
+            raise IllegalClientResponse("Missing parenthesis")
+
+        i = line.find(")")
+
+        if i == -1:
+            raise IllegalClientResponse("Mismatched parenthesis")
+
+        return (parseNestedParens(line[1:i],0), line[i+2:])
+
+    def arg_literal(self, line):
+        """
+        Parse a literal from the line
+        """
+        if not line:
+            raise IllegalClientResponse("Missing argument")
+
+        if line[0] != '{':
+            raise IllegalClientResponse("Missing literal")
+
+        if line[-1] != '}':
+            raise IllegalClientResponse("Malformed literal")
+
+        try:
+            size = int(line[1:-1])
+        except ValueError:
+            raise IllegalClientResponse("Bad literal size: " + line[1:-1])
+
+        return self._fileLiteral(size)
+
+    def arg_searchkeys(self, line):
+        """
+        searchkeys
+        """
+        query = parseNestedParens(line)
+        # XXX Should really use list of search terms and parse into
+        # a proper tree
+
+        return (query, '')
+
+    def arg_seqset(self, line):
+        """
+        sequence-set
+        """
+        rest = ''
+        arg = line.split(' ',1)
+        if len(arg) == 2:
+            rest = arg[1]
+        arg = arg[0]
+
+        try:
+            return (parseIdList(arg), rest)
+        except IllegalIdentifierError, e:
+            raise IllegalClientResponse("Bad message number " + str(e))
+
+    def arg_fetchatt(self, line):
+        """
+        fetch-att
+        """
+        p = _FetchParser()
+        p.parseString(line)
+        return (p.result, '')
+
+    def arg_flaglist(self, line):
+        """
+        Flag part of store-att-flag
+        """
+        flags = []
+        if line[0] == '(':
+            if line[-1] != ')':
+                raise IllegalClientResponse("Mismatched parenthesis")
+            line = line[1:-1]
+
+        while line:
+            m = self.atomre.search(line)
+            if not m:
+                raise IllegalClientResponse("Malformed flag")
+            if line[0] == '\\' and m.start() == 1:
+                flags.append('\\' + m.group('atom'))
+            elif m.start() == 0:
+                flags.append(m.group('atom'))
+            else:
+                raise IllegalClientResponse("Malformed flag")
+            line = m.group('rest')
+
+        return (flags, '')
+
+    def arg_line(self, line):
+        """
+        Command line of UID command
+        """
+        return (line, '')
+
+    def opt_plist(self, line):
+        """
+        Optional parenthesised list
+        """
+        if line.startswith('('):
+            return self.arg_plist(line)
+        else:
+            return (None, line)
+
+    def opt_datetime(self, line):
+        """
+        Optional date-time string
+        """
+        if line.startswith('"'):
+            try:
+                spam, date, rest = line.split('"',2)
+            except IndexError:
+                raise IllegalClientResponse("Malformed date-time")
+            return (date, rest[1:])
+        else:
+            return (None, line)
+
+    def opt_charset(self, line):
+        """
+        Optional charset of SEARCH command
+        """
+        if line[:7].upper() == 'CHARSET':
+            arg = line.split(' ',2)
+            if len(arg) == 1:
+                raise IllegalClientResponse("Missing charset identifier")
+            if len(arg) == 2:
+                arg.append('')
+            spam, arg, rest = arg
+            return (arg, rest)
+        else:
+            return (None, line)
+
+    def sendServerGreeting(self):
+        msg = '[CAPABILITY %s] %s' % (' '.join(self.listCapabilities()), self.IDENT)
+        self.sendPositiveResponse(message=msg)
+
+    def sendBadResponse(self, tag = None, message = ''):
+        self._respond('BAD', tag, message)
+
+    def sendPositiveResponse(self, tag = None, message = ''):
+        self._respond('OK', tag, message)
+
+    def sendNegativeResponse(self, tag = None, message = ''):
+        self._respond('NO', tag, message)
+
+    def sendUntaggedResponse(self, message, async=False):
+        if not async or (self.blocked is None):
+            self._respond(message, None, None)
+        else:
+            self._queuedAsync.append(message)
+
+    def sendContinuationRequest(self, msg = 'Ready for additional command text'):
+        if msg:
+            self.sendLine('+ ' + msg)
+        else:
+            self.sendLine('+')
+
+    def _respond(self, state, tag, message):
+        if state in ('OK', 'NO', 'BAD') and self._queuedAsync:
+            lines = self._queuedAsync
+            self._queuedAsync = []
+            for msg in lines:
+                self._respond(msg, None, None)
+        if not tag:
+            tag = '*'
+        if message:
+            self.sendLine(' '.join((tag, state, message)))
+        else:
+            self.sendLine(' '.join((tag, state)))
+
+    def listCapabilities(self):
+        caps = ['IMAP4rev1']
+        for c, v in self.capabilities().iteritems():
+            if v is None:
+                caps.append(c)
+            elif len(v):
+                caps.extend([('%s=%s' % (c, cap)) for cap in v])
+        return caps
+
+    def do_CAPABILITY(self, tag):
+        self.sendUntaggedResponse('CAPABILITY ' + ' '.join(self.listCapabilities()))
+        self.sendPositiveResponse(tag, 'CAPABILITY completed')
+
+    unauth_CAPABILITY = (do_CAPABILITY,)
+    auth_CAPABILITY = unauth_CAPABILITY
+    select_CAPABILITY = unauth_CAPABILITY
+    logout_CAPABILITY = unauth_CAPABILITY
+
+    def do_LOGOUT(self, tag):
+        self.sendUntaggedResponse('BYE Nice talking to you')
+        self.sendPositiveResponse(tag, 'LOGOUT successful')
+        self.transport.loseConnection()
+
+    unauth_LOGOUT = (do_LOGOUT,)
+    auth_LOGOUT = unauth_LOGOUT
+    select_LOGOUT = unauth_LOGOUT
+    logout_LOGOUT = unauth_LOGOUT
+
+    def do_NOOP(self, tag):
+        self.sendPositiveResponse(tag, 'NOOP No operation performed')
+
+    unauth_NOOP = (do_NOOP,)
+    auth_NOOP = unauth_NOOP
+    select_NOOP = unauth_NOOP
+    logout_NOOP = unauth_NOOP
+
+    def do_AUTHENTICATE(self, tag, args):
+        args = args.upper().strip()
+        if args not in self.challengers:
+            self.sendNegativeResponse(tag, 'AUTHENTICATE method unsupported')
+        else:
+            self.authenticate(self.challengers[args](), tag)
+
+    unauth_AUTHENTICATE = (do_AUTHENTICATE, arg_atom)
+
+    def authenticate(self, chal, tag):
+        if self.portal is None:
+            self.sendNegativeResponse(tag, 'Temporary authentication failure')
+            return
+
+        self._setupChallenge(chal, tag)
+
+    def _setupChallenge(self, chal, tag):
+        try:
+            challenge = chal.getChallenge()
+        except Exception, e:
+            self.sendBadResponse(tag, 'Server error: ' + str(e))
+        else:
+            coded = base64.encodestring(challenge)[:-1]
+            self.parseState = 'pending'
+            self._pendingLiteral = defer.Deferred()
+            self.sendContinuationRequest(coded)
+            self._pendingLiteral.addCallback(self.__cbAuthChunk, chal, tag)
+            self._pendingLiteral.addErrback(self.__ebAuthChunk, tag)
+
+    def __cbAuthChunk(self, result, chal, tag):
+        try:
+            uncoded = base64.decodestring(result)
+        except binascii.Error:
+            raise IllegalClientResponse("Malformed Response - not base64")
+
+        chal.setResponse(uncoded)
+        if chal.moreChallenges():
+            self._setupChallenge(chal, tag)
+        else:
+            self.portal.login(chal, None, IAccount).addCallbacks(
+                self.__cbAuthResp,
+                self.__ebAuthResp,
+                (tag,), None, (tag,), None
+            )
+
+    def __cbAuthResp(self, (iface, avatar, logout), tag):
+        assert iface is IAccount, "IAccount is the only supported interface"
+        self.account = avatar
+        self.state = 'auth'
+        self._onLogout = logout
+        self.sendPositiveResponse(tag, 'Authentication successful')
+        self.setTimeout(self.POSTAUTH_TIMEOUT)
+
+    def __ebAuthResp(self, failure, tag):
+        if failure.check(cred.error.UnauthorizedLogin):
+            self.sendNegativeResponse(tag, 'Authentication failed: unauthorized')
+        elif failure.check(cred.error.UnhandledCredentials):
+            self.sendNegativeResponse(tag, 'Authentication failed: server misconfigured')
+        else:
+            self.sendBadResponse(tag, 'Server error: login failed unexpectedly')
+            log.err(failure)
+
+    def __ebAuthChunk(self, failure, tag):
+        self.sendNegativeResponse(tag, 'Authentication failed: ' + str(failure.value))
+
+    def do_STARTTLS(self, tag):
+        if self.startedTLS:
+            self.sendNegativeResponse(tag, 'TLS already negotiated')
+        elif self.ctx and self.canStartTLS:
+            self.sendPositiveResponse(tag, 'Begin TLS negotiation now')
+            self.transport.startTLS(self.ctx)
+            self.startedTLS = True
+            self.challengers = self.challengers.copy()
+            if 'LOGIN' not in self.challengers:
+                self.challengers['LOGIN'] = LOGINCredentials
+            if 'PLAIN' not in self.challengers:
+                self.challengers['PLAIN'] = PLAINCredentials
+        else:
+            self.sendNegativeResponse(tag, 'TLS not available')
+
+    unauth_STARTTLS = (do_STARTTLS,)
+
+    def do_LOGIN(self, tag, user, passwd):
+        if 'LOGINDISABLED' in self.capabilities():
+            self.sendBadResponse(tag, 'LOGIN is disabled before STARTTLS')
+            return
+
+        maybeDeferred(self.authenticateLogin, user, passwd
+            ).addCallback(self.__cbLogin, tag
+            ).addErrback(self.__ebLogin, tag
+            )
+
+    unauth_LOGIN = (do_LOGIN, arg_astring, arg_astring)
+
+    def authenticateLogin(self, user, passwd):
+        """Lookup the account associated with the given parameters
+
+        Override this method to define the desired authentication behavior.
+
+        The default behavior is to defer authentication to C{self.portal}
+        if it is not None, or to deny the login otherwise.
+
+        @type user: C{str}
+        @param user: The username to lookup
+
+        @type passwd: C{str}
+        @param passwd: The password to login with
+        """
+        if self.portal:
+            return self.portal.login(
+                cred.credentials.UsernamePassword(user, passwd),
+                None, IAccount
+            )
+        raise cred.error.UnauthorizedLogin()
+
+    def __cbLogin(self, (iface, avatar, logout), tag):
+        if iface is not IAccount:
+            self.sendBadResponse(tag, 'Server error: login returned unexpected value')
+            log.err("__cbLogin called with %r, IAccount expected" % (iface,))
+        else:
+            self.account = avatar
+            self._onLogout = logout
+            self.sendPositiveResponse(tag, 'LOGIN succeeded')
+            self.state = 'auth'
+            self.setTimeout(self.POSTAUTH_TIMEOUT)
+
+    def __ebLogin(self, failure, tag):
+        if failure.check(cred.error.UnauthorizedLogin):
+            self.sendNegativeResponse(tag, 'LOGIN failed')
+        else:
+            self.sendBadResponse(tag, 'Server error: ' + str(failure.value))
+            log.err(failure)
+
+    def do_NAMESPACE(self, tag):
+        personal = public = shared = None
+        np = INamespacePresenter(self.account, None)
+        if np is not None:
+            personal = np.getPersonalNamespaces()
+            public = np.getSharedNamespaces()
+            shared = np.getSharedNamespaces()
+        self.sendUntaggedResponse('NAMESPACE ' + collapseNestedLists([personal, public, shared]))
+        self.sendPositiveResponse(tag, "NAMESPACE command completed")
+
+    auth_NAMESPACE = (do_NAMESPACE,)
+    select_NAMESPACE = auth_NAMESPACE
+
+    def _parseMbox(self, name):
+        if isinstance(name, unicode):
+            return name
+        try:
+            return name.decode('imap4-utf-7')
+        except:
+            log.err()
+            raise IllegalMailboxEncoding(name)
+
+    def _selectWork(self, tag, name, rw, cmdName):
+        if self.mbox:
+            self.mbox.removeListener(self)
+            cmbx = ICloseableMailbox(self.mbox, None)
+            if cmbx is not None:
+                maybeDeferred(cmbx.close).addErrback(log.err)
+            self.mbox = None
+            self.state = 'auth'
+
+        name = self._parseMbox(name)
+        maybeDeferred(self.account.select, self._parseMbox(name), rw
+            ).addCallback(self._cbSelectWork, cmdName, tag
+            ).addErrback(self._ebSelectWork, cmdName, tag
+            )
+
+    def _ebSelectWork(self, failure, cmdName, tag):
+        self.sendBadResponse(tag, "%s failed: Server error" % (cmdName,))
+        log.err(failure)
+
+    def _cbSelectWork(self, mbox, cmdName, tag):
+        if mbox is None:
+            self.sendNegativeResponse(tag, 'No such mailbox')
+            return
+        if '\\noselect' in [s.lower() for s in mbox.getFlags()]:
+            self.sendNegativeResponse(tag, 'Mailbox cannot be selected')
+            return
+
+        flags = mbox.getFlags()
+        self.sendUntaggedResponse(str(mbox.getMessageCount()) + ' EXISTS')
+        self.sendUntaggedResponse(str(mbox.getRecentCount()) + ' RECENT')
+        self.sendUntaggedResponse('FLAGS (%s)' % ' '.join(flags))
+        self.sendPositiveResponse(None, '[UIDVALIDITY %d]' % mbox.getUIDValidity())
+
+        s = mbox.isWriteable() and 'READ-WRITE' or 'READ-ONLY'
+        mbox.addListener(self)
+        self.sendPositiveResponse(tag, '[%s] %s successful' % (s, cmdName))
+        self.state = 'select'
+        self.mbox = mbox
+
+    auth_SELECT = ( _selectWork, arg_astring, 1, 'SELECT' )
+    select_SELECT = auth_SELECT
+
+    auth_EXAMINE = ( _selectWork, arg_astring, 0, 'EXAMINE' )
+    select_EXAMINE = auth_EXAMINE
+
+
+    def do_IDLE(self, tag):
+        self.sendContinuationRequest(None)
+        self.parseTag = tag
+        self.lastState = self.parseState
+        self.parseState = 'idle'
+
+    def parse_idle(self, *args):
+        self.parseState = self.lastState
+        del self.lastState
+        self.sendPositiveResponse(self.parseTag, "IDLE terminated")
+        del self.parseTag
+
+    select_IDLE = ( do_IDLE, )
+    auth_IDLE = select_IDLE
+
+
+    def do_CREATE(self, tag, name):
+        name = self._parseMbox(name)
+        try:
+            result = self.account.create(name)
+        except MailboxException, c:
+            self.sendNegativeResponse(tag, str(c))
+        except:
+            self.sendBadResponse(tag, "Server error encountered while creating mailbox")
+            log.err()
+        else:
+            if result:
+                self.sendPositiveResponse(tag, 'Mailbox created')
+            else:
+                self.sendNegativeResponse(tag, 'Mailbox not created')
+
+    auth_CREATE = (do_CREATE, arg_astring)
+    select_CREATE = auth_CREATE
+
+    def do_DELETE(self, tag, name):
+        name = self._parseMbox(name)
+        if name.lower() == 'inbox':
+            self.sendNegativeResponse(tag, 'You cannot delete the inbox')
+            return
+        try:
+            self.account.delete(name)
+        except MailboxException, m:
+            self.sendNegativeResponse(tag, str(m))
+        except:
+            self.sendBadResponse(tag, "Server error encountered while deleting mailbox")
+            log.err()
+        else:
+            self.sendPositiveResponse(tag, 'Mailbox deleted')
+
+    auth_DELETE = (do_DELETE, arg_astring)
+    select_DELETE = auth_DELETE
+
+    def do_RENAME(self, tag, oldname, newname):
+        oldname, newname = [self._parseMbox(n) for n in oldname, newname]
+        if oldname.lower() == 'inbox' or newname.lower() == 'inbox':
+            self.sendNegativeResponse(tag, 'You cannot rename the inbox, or rename another mailbox to inbox.')
+            return
+        try:
+            self.account.rename(oldname, newname)
+        except TypeError:
+            self.sendBadResponse(tag, 'Invalid command syntax')
+        except MailboxException, m:
+            self.sendNegativeResponse(tag, str(m))
+        except:
+            self.sendBadResponse(tag, "Server error encountered while renaming mailbox")
+            log.err()
+        else:
+            self.sendPositiveResponse(tag, 'Mailbox renamed')
+
+    auth_RENAME = (do_RENAME, arg_astring, arg_astring)
+    select_RENAME = auth_RENAME
+
+    def do_SUBSCRIBE(self, tag, name):
+        name = self._parseMbox(name)
+        try:
+            self.account.subscribe(name)
+        except MailboxException, m:
+            self.sendNegativeResponse(tag, str(m))
+        except:
+            self.sendBadResponse(tag, "Server error encountered while subscribing to mailbox")
+            log.err()
+        else:
+            self.sendPositiveResponse(tag, 'Subscribed')
+
+    auth_SUBSCRIBE = (do_SUBSCRIBE, arg_astring)
+    select_SUBSCRIBE = auth_SUBSCRIBE
+
+    def do_UNSUBSCRIBE(self, tag, name):
+        name = self._parseMbox(name)
+        try:
+            self.account.unsubscribe(name)
+        except MailboxException, m:
+            self.sendNegativeResponse(tag, str(m))
+        except:
+            self.sendBadResponse(tag, "Server error encountered while unsubscribing from mailbox")
+            log.err()
+        else:
+            self.sendPositiveResponse(tag, 'Unsubscribed')
+
+    auth_UNSUBSCRIBE = (do_UNSUBSCRIBE, arg_astring)
+    select_UNSUBSCRIBE = auth_UNSUBSCRIBE
+
+    def _listWork(self, tag, ref, mbox, sub, cmdName):
+        mbox = self._parseMbox(mbox)
+        maybeDeferred(self.account.listMailboxes, ref, mbox
+            ).addCallback(self._cbListWork, tag, sub, cmdName
+            ).addErrback(self._ebListWork, tag
+            )
+
+    def _cbListWork(self, mailboxes, tag, sub, cmdName):
+        for (name, box) in mailboxes:
+            if not sub or self.account.isSubscribed(name):
+                flags = box.getFlags()
+                delim = box.getHierarchicalDelimiter()
+                resp = (DontQuoteMe(cmdName), map(DontQuoteMe, flags), delim, name.encode('imap4-utf-7'))
+                self.sendUntaggedResponse(collapseNestedLists(resp))
+        self.sendPositiveResponse(tag, '%s completed' % (cmdName,))
+
+    def _ebListWork(self, failure, tag):
+        self.sendBadResponse(tag, "Server error encountered while listing mailboxes.")
+        log.err(failure)
+
+    auth_LIST = (_listWork, arg_astring, arg_astring, 0, 'LIST')
+    select_LIST = auth_LIST
+
+    auth_LSUB = (_listWork, arg_astring, arg_astring, 1, 'LSUB')
+    select_LSUB = auth_LSUB
+
+    def do_STATUS(self, tag, mailbox, names):
+        mailbox = self._parseMbox(mailbox)
+        maybeDeferred(self.account.select, mailbox, 0
+            ).addCallback(self._cbStatusGotMailbox, tag, mailbox, names
+            ).addErrback(self._ebStatusGotMailbox, tag
+            )
+
+    def _cbStatusGotMailbox(self, mbox, tag, mailbox, names):
+        if mbox:
+            maybeDeferred(mbox.requestStatus, names).addCallbacks(
+                self.__cbStatus, self.__ebStatus,
+                (tag, mailbox), None, (tag, mailbox), None
+            )
+        else:
+            self.sendNegativeResponse(tag, "Could not open mailbox")
+
+    def _ebStatusGotMailbox(self, failure, tag):
+        self.sendBadResponse(tag, "Server error encountered while opening mailbox.")
+        log.err(failure)
+
+    auth_STATUS = (do_STATUS, arg_astring, arg_plist)
+    select_STATUS = auth_STATUS
+
+    def __cbStatus(self, status, tag, box):
+        line = ' '.join(['%s %s' % x for x in status.iteritems()])
+        self.sendUntaggedResponse('STATUS %s (%s)' % (box, line))
+        self.sendPositiveResponse(tag, 'STATUS complete')
+
+    def __ebStatus(self, failure, tag, box):
+        self.sendBadResponse(tag, 'STATUS %s failed: %s' % (box, str(failure.value)))
+
+    def do_APPEND(self, tag, mailbox, flags, date, message):
+        mailbox = self._parseMbox(mailbox)
+        maybeDeferred(self.account.select, mailbox
+            ).addCallback(self._cbAppendGotMailbox, tag, flags, date, message
+            ).addErrback(self._ebAppendGotMailbox, tag
+            )
+
+    def _cbAppendGotMailbox(self, mbox, tag, flags, date, message):
+        if not mbox:
+            self.sendNegativeResponse(tag, '[TRYCREATE] No such mailbox')
+            return
+
+        d = mbox.addMessage(message, flags, date)
+        d.addCallback(self.__cbAppend, tag, mbox)
+        d.addErrback(self.__ebAppend, tag)
+
+    def _ebAppendGotMailbox(self, failure, tag):
+        self.sendBadResponse(tag, "Server error encountered while opening mailbox.")
+        log.err(failure)
+
+    auth_APPEND = (do_APPEND, arg_astring, opt_plist, opt_datetime,
+                   arg_literal)
+    select_APPEND = auth_APPEND
+
+    def __cbAppend(self, result, tag, mbox):
+        self.sendUntaggedResponse('%d EXISTS' % mbox.getMessageCount())
+        self.sendPositiveResponse(tag, 'APPEND complete')
+
+    def __ebAppend(self, failure, tag):
+        self.sendBadResponse(tag, 'APPEND failed: ' + str(failure.value))
+
+    def do_CHECK(self, tag):
+        d = self.checkpoint()
+        if d is None:
+            self.__cbCheck(None, tag)
+        else:
+            d.addCallbacks(
+                self.__cbCheck,
+                self.__ebCheck,
+                callbackArgs=(tag,),
+                errbackArgs=(tag,)
+            )
+    select_CHECK = (do_CHECK,)
+
+    def __cbCheck(self, result, tag):
+        self.sendPositiveResponse(tag, 'CHECK completed')
+
+    def __ebCheck(self, failure, tag):
+        self.sendBadResponse(tag, 'CHECK failed: ' + str(failure.value))
+
+    def checkpoint(self):
+        """Called when the client issues a CHECK command.
+
+        This should perform any checkpoint operations required by the server.
+        It may be a long running operation, but may not block.  If it returns
+        a deferred, the client will only be informed of success (or failure)
+        when the deferred's callback (or errback) is invoked.
+        """
+        return None
+
+    def do_CLOSE(self, tag):
+        d = None
+        if self.mbox.isWriteable():
+            d = maybeDeferred(self.mbox.expunge)
+        cmbx = ICloseableMailbox(self.mbox, None)
+        if cmbx is not None:
+            if d is not None:
+                d.addCallback(lambda result: cmbx.close())
+            else:
+                d = maybeDeferred(cmbx.close)
+        if d is not None:
+            d.addCallbacks(self.__cbClose, self.__ebClose, (tag,), None, (tag,), None)
+        else:
+            self.__cbClose(None, tag)
+
+    select_CLOSE = (do_CLOSE,)
+
+    def __cbClose(self, result, tag):
+        self.sendPositiveResponse(tag, 'CLOSE completed')
+        self.mbox.removeListener(self)
+        self.mbox = None
+        self.state = 'auth'
+
+    def __ebClose(self, failure, tag):
+        self.sendBadResponse(tag, 'CLOSE failed: ' + str(failure.value))
+
+    def do_EXPUNGE(self, tag):
+        if self.mbox.isWriteable():
+            maybeDeferred(self.mbox.expunge).addCallbacks(
+                self.__cbExpunge, self.__ebExpunge, (tag,), None, (tag,), None
+            )
+        else:
+            self.sendNegativeResponse(tag, 'EXPUNGE ignored on read-only mailbox')
+
+    select_EXPUNGE = (do_EXPUNGE,)
+
+    def __cbExpunge(self, result, tag):
+        for e in result:
+            self.sendUntaggedResponse('%d EXPUNGE' % e)
+        self.sendPositiveResponse(tag, 'EXPUNGE completed')
+
+    def __ebExpunge(self, failure, tag):
+        self.sendBadResponse(tag, 'EXPUNGE failed: ' + str(failure.value))
+        log.err(failure)
+
+    def do_SEARCH(self, tag, charset, query, uid=0):
+        sm = ISearchableMailbox(self.mbox, None)
+        if sm is not None:
+            maybeDeferred(sm.search, query, uid=uid
+                          ).addCallback(self.__cbSearch, tag, self.mbox, uid
+                          ).addErrback(self.__ebSearch, tag)
+        else:
+            # that's not the ideal way to get all messages, there should be a
+            # method on mailboxes that gives you all of them
+            s = parseIdList('1:*')
+            maybeDeferred(self.mbox.fetch, s, uid=uid
+                          ).addCallback(self.__cbManualSearch,
+                                        tag, self.mbox, query, uid
+                          ).addErrback(self.__ebSearch, tag)
+
+
+    select_SEARCH = (do_SEARCH, opt_charset, arg_searchkeys)
+
+    def __cbSearch(self, result, tag, mbox, uid):
+        if uid:
+            result = map(mbox.getUID, result)
+        ids = ' '.join([str(i) for i in result])
+        self.sendUntaggedResponse('SEARCH ' + ids)
+        self.sendPositiveResponse(tag, 'SEARCH completed')
+
+
+    def __cbManualSearch(self, result, tag, mbox, query, uid,
+                         searchResults=None):
+        """
+        Apply the search filter to a set of messages. Send the response to the
+        client.
+
+        @type result: C{list} of C{tuple} of (C{int}, provider of
+            L{imap4.IMessage})
+        @param result: A list two tuples of messages with their sequence ids,
+            sorted by the ids in descending order.
+
+        @type tag: C{str}
+        @param tag: A command tag.
+
+        @type mbox: Provider of L{imap4.IMailbox}
+        @param mbox: The searched mailbox.
+
+        @type query: C{list}
+        @param query: A list representing the parsed form of the search query.
+
+        @param uid: A flag indicating whether the search is over message
+            sequence numbers or UIDs.
+
+        @type searchResults: C{list}
+        @param searchResults: The search results so far or C{None} if no
+            results yet.
+        """
+        if searchResults is None:
+            searchResults = []
+        i = 0
+
+        # result is a list of tuples (sequenceId, Message)
+        lastSequenceId = result and result[-1][0]
+        lastMessageId = result and result[-1][1].getUID()
+
+        for (i, (id, msg)) in zip(range(5), result):
+            # searchFilter and singleSearchStep will mutate the query.  Dang.
+            # Copy it here or else things will go poorly for subsequent
+            # messages.
+            if self._searchFilter(copy.deepcopy(query), id, msg,
+                                  lastSequenceId, lastMessageId):
+                if uid:
+                    searchResults.append(str(msg.getUID()))
+                else:
+                    searchResults.append(str(id))
+        if i == 4:
+            from twisted.internet import reactor
+            reactor.callLater(
+                0, self.__cbManualSearch, result[5:], tag, mbox, query, uid,
+                searchResults)
+        else:
+            if searchResults:
+                self.sendUntaggedResponse('SEARCH ' + ' '.join(searchResults))
+            self.sendPositiveResponse(tag, 'SEARCH completed')
+
+
+    def _searchFilter(self, query, id, msg, lastSequenceId, lastMessageId):
+        """
+        Pop search terms from the beginning of C{query} until there are none
+        left and apply them to the given message.
+
+        @param query: A list representing the parsed form of the search query.
+
+        @param id: The sequence number of the message being checked.
+
+        @param msg: The message being checked.
+
+        @type lastSequenceId: C{int}
+        @param lastSequenceId: The highest sequence number of any message in
+            the mailbox being searched.
+
+        @type lastMessageId: C{int}
+        @param lastMessageId: The highest UID of any message in the mailbox
+            being searched.
+
+        @return: Boolean indicating whether all of the query terms match the
+            message.
+        """
+        while query:
+            if not self._singleSearchStep(query, id, msg,
+                                          lastSequenceId, lastMessageId):
+                return False
+        return True
+
+
+    def _singleSearchStep(self, query, id, msg, lastSequenceId, lastMessageId):
+        """
+        Pop one search term from the beginning of C{query} (possibly more than
+        one element) and return whether it matches the given message.
+
+        @param query: A list representing the parsed form of the search query.
+
+        @param id: The sequence number of the message being checked.
+
+        @param msg: The message being checked.
+
+        @param lastSequenceId: The highest sequence number of any message in
+            the mailbox being searched.
+
+        @param lastMessageId: The highest UID of any message in the mailbox
+            being searched.
+
+        @return: Boolean indicating whether the query term matched the message.
+        """
+
+        q = query.pop(0)
+        if isinstance(q, list):
+            if not self._searchFilter(q, id, msg,
+                                      lastSequenceId, lastMessageId):
+                return False
+        else:
+            c = q.upper()
+            if not c[:1].isalpha():
+                # A search term may be a word like ALL, ANSWERED, BCC, etc (see
+                # below) or it may be a message sequence set.  Here we
+                # recognize a message sequence set "N:M".
+                messageSet = parseIdList(c, lastSequenceId)
+                return id in messageSet
+            else:
+                f = getattr(self, 'search_' + c, None)
+                if f is None:
+                    raise IllegalQueryError("Invalid search command %s" % c)
+
+                if c in self._requiresLastMessageInfo:
+                    result = f(query, id, msg, (lastSequenceId,
+                                                lastMessageId))
+                else:
+                    result = f(query, id, msg)
+
+                if not result:
+                    return False
+        return True
+
+    def search_ALL(self, query, id, msg):
+        """
+        Returns C{True} if the message matches the ALL search key (always).
+
+        @type query: A C{list} of C{str}
+        @param query: A list representing the parsed query string.
+
+        @type id: C{int}
+        @param id: The sequence number of the message being checked.
+
+        @type msg: Provider of L{imap4.IMessage}
+        """
+        return True
+
+    def search_ANSWERED(self, query, id, msg):
+        """
+        Returns C{True} if the message has been answered.
+
+        @type query: A C{list} of C{str}
+        @param query: A list representing the parsed query string.
+
+        @type id: C{int}
+        @param id: The sequence number of the message being checked.
+
+        @type msg: Provider of L{imap4.IMessage}
+        """
+        return '\\Answered' in msg.getFlags()
+
+    def search_BCC(self, query, id, msg):
+        """
+        Returns C{True} if the message has a BCC address matching the query.
+
+        @type query: A C{list} of C{str}
+        @param query: A list whose first element is a BCC C{str}
+
+        @type id: C{int}
+        @param id: The sequence number of the message being checked.
+
+        @type msg: Provider of L{imap4.IMessage}
+        """
+        bcc = msg.getHeaders(False, 'bcc').get('bcc', '')
+        return bcc.lower().find(query.pop(0).lower()) != -1
+
+    def search_BEFORE(self, query, id, msg):
+        date = parseTime(query.pop(0))
+        return rfc822.parsedate(msg.getInternalDate()) < date
+
+    def search_BODY(self, query, id, msg):
+        body = query.pop(0).lower()
+        return text.strFile(body, msg.getBodyFile(), False)
+
+    def search_CC(self, query, id, msg):
+        cc = msg.getHeaders(False, 'cc').get('cc', '')
+        return cc.lower().find(query.pop(0).lower()) != -1
+
+    def search_DELETED(self, query, id, msg):
+        return '\\Deleted' in msg.getFlags()
+
+    def search_DRAFT(self, query, id, msg):
+        return '\\Draft' in msg.getFlags()
+
+    def search_FLAGGED(self, query, id, msg):
+        return '\\Flagged' in msg.getFlags()
+
+    def search_FROM(self, query, id, msg):
+        fm = msg.getHeaders(False, 'from').get('from', '')
+        return fm.lower().find(query.pop(0).lower()) != -1
+
+    def search_HEADER(self, query, id, msg):
+        hdr = query.pop(0).lower()
+        hdr = msg.getHeaders(False, hdr).get(hdr, '')
+        return hdr.lower().find(query.pop(0).lower()) != -1
+
+    def search_KEYWORD(self, query, id, msg):
+        query.pop(0)
+        return False
+
+    def search_LARGER(self, query, id, msg):
+        return int(query.pop(0)) < msg.getSize()
+
+    def search_NEW(self, query, id, msg):
+        return '\\Recent' in msg.getFlags() and '\\Seen' not in msg.getFlags()
+
+    def search_NOT(self, query, id, msg, (lastSequenceId, lastMessageId)):
+        """
+        Returns C{True} if the message does not match the query.
+
+        @type query: A C{list} of C{str}
+        @param query: A list representing the parsed form of the search query.
+
+        @type id: C{int}
+        @param id: The sequence number of the message being checked.
+
+        @type msg: Provider of L{imap4.IMessage}
+        @param msg: The message being checked.
+
+        @type lastSequenceId: C{int}
+        @param lastSequenceId: The highest sequence number of a message in the
+            mailbox.
+
+        @type lastMessageId: C{int}
+        @param lastMessageId: The highest UID of a message in the mailbox.
+        """
+        return not self._singleSearchStep(query, id, msg,
+                                          lastSequenceId, lastMessageId)
+
+    def search_OLD(self, query, id, msg):
+        return '\\Recent' not in msg.getFlags()
+
+    def search_ON(self, query, id, msg):
+        date = parseTime(query.pop(0))
+        return rfc822.parsedate(msg.getInternalDate()) == date
+
+    def search_OR(self, query, id, msg, (lastSequenceId, lastMessageId)):
+        """
+        Returns C{True} if the message matches any of the first two query
+        items.
+
+        @type query: A C{list} of C{str}
+        @param query: A list representing the parsed form of the search query.
+
+        @type id: C{int}
+        @param id: The sequence number of the message being checked.
+
+        @type msg: Provider of L{imap4.IMessage}
+        @param msg: The message being checked.
+
+        @type lastSequenceId: C{int}
+        @param lastSequenceId: The highest sequence number of a message in the
+                               mailbox.
+
+        @type lastMessageId: C{int}
+        @param lastMessageId: The highest UID of a message in the mailbox.
+        """
+        a = self._singleSearchStep(query, id, msg,
+                                   lastSequenceId, lastMessageId)
+        b = self._singleSearchStep(query, id, msg,
+                                   lastSequenceId, lastMessageId)
+        return a or b
+
+    def search_RECENT(self, query, id, msg):
+        return '\\Recent' in msg.getFlags()
+
+    def search_SEEN(self, query, id, msg):
+        return '\\Seen' in msg.getFlags()
+
+    def search_SENTBEFORE(self, query, id, msg):
+        """
+        Returns C{True} if the message date is earlier than the query date.
+
+        @type query: A C{list} of C{str}
+        @param query: A list whose first element starts with a stringified date
+            that is a fragment of an L{imap4.Query()}. The date must be in the
+            format 'DD-Mon-YYYY', for example '03-March-2003' or '03-Mar-2003'.
+
+        @type id: C{int}
+        @param id: The sequence number of the message being checked.
+
+        @type msg: Provider of L{imap4.IMessage}
+        """
+        date = msg.getHeaders(False, 'date').get('date', '')
+        date = rfc822.parsedate(date)
+        return date < parseTime(query.pop(0))
+
+    def search_SENTON(self, query, id, msg):
+        """
+        Returns C{True} if the message date is the same as the query date.
+
+        @type query: A C{list} of C{str}
+        @param query: A list whose first element starts with a stringified date
+            that is a fragment of an L{imap4.Query()}. The date must be in the
+            format 'DD-Mon-YYYY', for example '03-March-2003' or '03-Mar-2003'.
+
+        @type msg: Provider of L{imap4.IMessage}
+        """
+        date = msg.getHeaders(False, 'date').get('date', '')
+        date = rfc822.parsedate(date)
+        return date[:3] == parseTime(query.pop(0))[:3]
+
+    def search_SENTSINCE(self, query, id, msg):
+        """
+        Returns C{True} if the message date is later than the query date.
+
+        @type query: A C{list} of C{str}
+        @param query: A list whose first element starts with a stringified date
+            that is a fragment of an L{imap4.Query()}. The date must be in the
+            format 'DD-Mon-YYYY', for example '03-March-2003' or '03-Mar-2003'.
+
+        @type msg: Provider of L{imap4.IMessage}
+        """
+        date = msg.getHeaders(False, 'date').get('date', '')
+        date = rfc822.parsedate(date)
+        return date > parseTime(query.pop(0))
+
+    def search_SINCE(self, query, id, msg):
+        date = parseTime(query.pop(0))
+        return rfc822.parsedate(msg.getInternalDate()) > date
+
+    def search_SMALLER(self, query, id, msg):
+        return int(query.pop(0)) > msg.getSize()
+
+    def search_SUBJECT(self, query, id, msg):
+        subj = msg.getHeaders(False, 'subject').get('subject', '')
+        return subj.lower().find(query.pop(0).lower()) != -1
+
+    def search_TEXT(self, query, id, msg):
+        # XXX - This must search headers too
+        body = query.pop(0).lower()
+        return text.strFile(body, msg.getBodyFile(), False)
+
+    def search_TO(self, query, id, msg):
+        to = msg.getHeaders(False, 'to').get('to', '')
+        return to.lower().find(query.pop(0).lower()) != -1
+
+    def search_UID(self, query, id, msg, (lastSequenceId, lastMessageId)):
+        """
+        Returns C{True} if the message UID is in the range defined by the
+        search query.
+
+        @type query: A C{list} of C{str}
+        @param query: A list representing the parsed form of the search
+            query. Its first element should be a C{str} that can be interpreted
+            as a sequence range, for example '2:4,5:*'.
+
+        @type id: C{int}
+        @param id: The sequence number of the message being checked.
+
+        @type msg: Provider of L{imap4.IMessage}
+        @param msg: The message being checked.
+
+        @type lastSequenceId: C{int}
+        @param lastSequenceId: The highest sequence number of a message in the
+            mailbox.
+
+        @type lastMessageId: C{int}
+        @param lastMessageId: The highest UID of a message in the mailbox.
+        """
+        c = query.pop(0)
+        m = parseIdList(c, lastMessageId)
+        return msg.getUID() in m
+
+    def search_UNANSWERED(self, query, id, msg):
+        return '\\Answered' not in msg.getFlags()
+
+    def search_UNDELETED(self, query, id, msg):
+        return '\\Deleted' not in msg.getFlags()
+
+    def search_UNDRAFT(self, query, id, msg):
+        return '\\Draft' not in msg.getFlags()
+
+    def search_UNFLAGGED(self, query, id, msg):
+        return '\\Flagged' not in msg.getFlags()
+
+    def search_UNKEYWORD(self, query, id, msg):
+        query.pop(0)
+        return False
+
+    def search_UNSEEN(self, query, id, msg):
+        return '\\Seen' not in msg.getFlags()
+
+    def __ebSearch(self, failure, tag):
+        self.sendBadResponse(tag, 'SEARCH failed: ' + str(failure.value))
+        log.err(failure)
+
+    def do_FETCH(self, tag, messages, query, uid=0):
+        if query:
+            self._oldTimeout = self.setTimeout(None)
+            maybeDeferred(self.mbox.fetch, messages, uid=uid
+                ).addCallback(iter
+                ).addCallback(self.__cbFetch, tag, query, uid
+                ).addErrback(self.__ebFetch, tag
+                )
+        else:
+            self.sendPositiveResponse(tag, 'FETCH complete')
+
+    select_FETCH = (do_FETCH, arg_seqset, arg_fetchatt)
+
+    def __cbFetch(self, results, tag, query, uid):
+        if self.blocked is None:
+            self.blocked = []
+        try:
+            id, msg = results.next()
+        except StopIteration:
+            # The idle timeout was suspended while we delivered results,
+            # restore it now.
+            self.setTimeout(self._oldTimeout)
+            del self._oldTimeout
+
+            # All results have been processed, deliver completion notification.
+
+            # It's important to run this *after* resetting the timeout to "rig
+            # a race" in some test code. writing to the transport will
+            # synchronously call test code, which synchronously loses the
+            # connection, calling our connectionLost method, which cancels the
+            # timeout. We want to make sure that timeout is cancelled *after*
+            # we reset it above, so that the final state is no timed
+            # calls. This avoids reactor uncleanliness errors in the test
+            # suite.
+            # XXX: Perhaps loopback should be fixed to not call the user code
+            # synchronously in transport.write?
+            self.sendPositiveResponse(tag, 'FETCH completed')
+
+            # Instance state is now consistent again (ie, it is as though
+            # the fetch command never ran), so allow any pending blocked
+            # commands to execute.
+            self._unblock()
+        else:
+            self.spewMessage(id, msg, query, uid
+                ).addCallback(lambda _: self.__cbFetch(results, tag, query, uid)
+                ).addErrback(self.__ebSpewMessage
+                )
+
+    def __ebSpewMessage(self, failure):
+        # This indicates a programming error.
+        # There's no reliable way to indicate anything to the client, since we
+        # may have already written an arbitrary amount of data in response to
+        # the command.
+        log.err(failure)
+        self.transport.loseConnection()
+
+    def spew_envelope(self, id, msg, _w=None, _f=None):
+        if _w is None:
+            _w = self.transport.write
+        _w('ENVELOPE ' + collapseNestedLists([getEnvelope(msg)]))
+
+    def spew_flags(self, id, msg, _w=None, _f=None):
+        if _w is None:
+            _w = self.transport.write
+        _w('FLAGS ' + '(%s)' % (' '.join(msg.getFlags())))
+
+    def spew_internaldate(self, id, msg, _w=None, _f=None):
+        if _w is None:
+            _w = self.transport.write
+        idate = msg.getInternalDate()
+        ttup = rfc822.parsedate_tz(idate)
+        if ttup is None:
+            log.msg("%d:%r: unpareseable internaldate: %r" % (id, msg, idate))
+            raise IMAP4Exception("Internal failure generating INTERNALDATE")
+
+        # need to specify the month manually, as strftime depends on locale
+        strdate = time.strftime("%d-%%s-%Y %H:%M:%S ", ttup[:9])
+        odate = strdate % (_MONTH_NAMES[ttup[1]],)
+        if ttup[9] is None:
+            odate = odate + "+0000"
+        else:
+            if ttup[9] >= 0:
+                sign = "+"
+            else:
+                sign = "-"
+            odate = odate + sign + str(((abs(ttup[9]) // 3600) * 100 + (abs(ttup[9]) % 3600) // 60)).zfill(4)
+        _w('INTERNALDATE ' + _quote(odate))
+
+    def spew_rfc822header(self, id, msg, _w=None, _f=None):
+        if _w is None:
+            _w = self.transport.write
+        hdrs = _formatHeaders(msg.getHeaders(True))
+        _w('RFC822.HEADER ' + _literal(hdrs))
+
+    def spew_rfc822text(self, id, msg, _w=None, _f=None):
+        if _w is None:
+            _w = self.transport.write
+        _w('RFC822.TEXT ')
+        _f()
+        return FileProducer(msg.getBodyFile()
+            ).beginProducing(self.transport
+            )
+
+    def spew_rfc822size(self, id, msg, _w=None, _f=None):
+        if _w is None:
+            _w = self.transport.write
+        _w('RFC822.SIZE ' + str(msg.getSize()))
+
+    def spew_rfc822(self, id, msg, _w=None, _f=None):
+        if _w is None:
+            _w = self.transport.write
+        _w('RFC822 ')
+        _f()
+        mf = IMessageFile(msg, None)
+        if mf is not None:
+            return FileProducer(mf.open()
+                ).beginProducing(self.transport
+                )
+        return MessageProducer(msg, None, self._scheduler
+            ).beginProducing(self.transport
+            )
+
+    def spew_uid(self, id, msg, _w=None, _f=None):
+        if _w is None:
+            _w = self.transport.write
+        _w('UID ' + str(msg.getUID()))
+
+    def spew_bodystructure(self, id, msg, _w=None, _f=None):
+        _w('BODYSTRUCTURE ' + collapseNestedLists([getBodyStructure(msg, True)]))
+
+    def spew_body(self, part, id, msg, _w=None, _f=None):
+        if _w is None:
+            _w = self.transport.write
+        for p in part.part:
+            if msg.isMultipart():
+                msg = msg.getSubPart(p)
+            elif p > 0:
+                # Non-multipart messages have an implicit first part but no
+                # other parts - reject any request for any other part.
+                raise TypeError("Requested subpart of non-multipart message")
+
+        if part.header:
+            hdrs = msg.getHeaders(part.header.negate, *part.header.fields)
+            hdrs = _formatHeaders(hdrs)
+            _w(str(part) + ' ' + _literal(hdrs))
+        elif part.text:
+            _w(str(part) + ' ')
+            _f()
+            return FileProducer(msg.getBodyFile()
+                ).beginProducing(self.transport
+                )
+        elif part.mime:
+            hdrs = _formatHeaders(msg.getHeaders(True))
+            _w(str(part) + ' ' + _literal(hdrs))
+        elif part.empty:
+            _w(str(part) + ' ')
+            _f()
+            if part.part:
+                return FileProducer(msg.getBodyFile()
+                    ).beginProducing(self.transport
+                    )
+            else:
+                mf = IMessageFile(msg, None)
+                if mf is not None:
+                    return FileProducer(mf.open()).beginProducing(self.transport)
+                return MessageProducer(msg, None, self._scheduler).beginProducing(self.transport)
+
+        else:
+            _w('BODY ' + collapseNestedLists([getBodyStructure(msg)]))
+
+    def spewMessage(self, id, msg, query, uid):
+        wbuf = WriteBuffer(self.transport)
+        write = wbuf.write
+        flush = wbuf.flush
+        def start():
+            write('* %d FETCH (' % (id,))
+        def finish():
+            write(')\r\n')
+        def space():
+            write(' ')
+
+        def spew():
+            seenUID = False
+            start()
+            for part in query:
+                if part.type == 'uid':
+                    seenUID = True
+                if part.type == 'body':
+                    yield self.spew_body(part, id, msg, write, flush)
+                else:
+                    f = getattr(self, 'spew_' + part.type)
+                    yield f(id, msg, write, flush)
+                if part is not query[-1]:
+                    space()
+            if uid and not seenUID:
+                space()
+                yield self.spew_uid(id, msg, write, flush)
+            finish()
+            flush()
+        return self._scheduler(spew())
+
+    def __ebFetch(self, failure, tag):
+        self.setTimeout(self._oldTimeout)
+        del self._oldTimeout
+        log.err(failure)
+        self.sendBadResponse(tag, 'FETCH failed: ' + str(failure.value))
+
+    def do_STORE(self, tag, messages, mode, flags, uid=0):
+        mode = mode.upper()
+        silent = mode.endswith('SILENT')
+        if mode.startswith('+'):
+            mode = 1
+        elif mode.startswith('-'):
+            mode = -1
+        else:
+            mode = 0
+
+        maybeDeferred(self.mbox.store, messages, flags, mode, uid=uid).addCallbacks(
+            self.__cbStore, self.__ebStore, (tag, self.mbox, uid, silent), None, (tag,), None
+        )
+
+    select_STORE = (do_STORE, arg_seqset, arg_atom, arg_flaglist)
+
+    def __cbStore(self, result, tag, mbox, uid, silent):
+        if result and not silent:
+              for (k, v) in result.iteritems():
+                  if uid:
+                      uidstr = ' UID %d' % mbox.getUID(k)
+                  else:
+                      uidstr = ''
+                  self.sendUntaggedResponse('%d FETCH (FLAGS (%s)%s)' %
+                                            (k, ' '.join(v), uidstr))
+        self.sendPositiveResponse(tag, 'STORE completed')
+
+    def __ebStore(self, failure, tag):
+        self.sendBadResponse(tag, 'Server error: ' + str(failure.value))
+
+    def do_COPY(self, tag, messages, mailbox, uid=0):
+        mailbox = self._parseMbox(mailbox)
+        maybeDeferred(self.account.select, mailbox
+            ).addCallback(self._cbCopySelectedMailbox, tag, messages, mailbox, uid
+            ).addErrback(self._ebCopySelectedMailbox, tag
+            )
+    select_COPY = (do_COPY, arg_seqset, arg_astring)
+
+    def _cbCopySelectedMailbox(self, mbox, tag, messages, mailbox, uid):
+        if not mbox:
+            self.sendNegativeResponse(tag, 'No such mailbox: ' + mailbox)
+        else:
+            maybeDeferred(self.mbox.fetch, messages, uid
+                ).addCallback(self.__cbCopy, tag, mbox
+                ).addCallback(self.__cbCopied, tag, mbox
+                ).addErrback(self.__ebCopy, tag
+                )
+
+    def _ebCopySelectedMailbox(self, failure, tag):
+        self.sendBadResponse(tag, 'Server error: ' + str(failure.value))
+
+    def __cbCopy(self, messages, tag, mbox):
+        # XXX - This should handle failures with a rollback or something
+        addedDeferreds = []
+        addedIDs = []
+        failures = []
+
+        fastCopyMbox = IMessageCopier(mbox, None)
+        for (id, msg) in messages:
+            if fastCopyMbox is not None:
+                d = maybeDeferred(fastCopyMbox.copy, msg)
+                addedDeferreds.append(d)
+                continue
+
+            # XXX - The following should be an implementation of IMessageCopier.copy
+            # on an IMailbox->IMessageCopier adapter.
+
+            flags = msg.getFlags()
+            date = msg.getInternalDate()
+
+            body = IMessageFile(msg, None)
+            if body is not None:
+                bodyFile = body.open()
+                d = maybeDeferred(mbox.addMessage, bodyFile, flags, date)
+            else:
+                def rewind(f):
+                    f.seek(0)
+                    return f
+                buffer = tempfile.TemporaryFile()
+                d = MessageProducer(msg, buffer, self._scheduler
+                    ).beginProducing(None
+                    ).addCallback(lambda _, b=buffer, f=flags, d=date: mbox.addMessage(rewind(b), f, d)
+                    )
+            addedDeferreds.append(d)
+        return defer.DeferredList(addedDeferreds)
+
+    def __cbCopied(self, deferredIds, tag, mbox):
+        ids = []
+        failures = []
+        for (status, result) in deferredIds:
+            if status:
+                ids.append(result)
+            else:
+                failures.append(result.value)
+        if failures:
+            self.sendNegativeResponse(tag, '[ALERT] Some messages were not copied')
+        else:
+            self.sendPositiveResponse(tag, 'COPY completed')
+
+    def __ebCopy(self, failure, tag):
+        self.sendBadResponse(tag, 'COPY failed:' + str(failure.value))
+        log.err(failure)
+
+    def do_UID(self, tag, command, line):
+        command = command.upper()
+
+        if command not in ('COPY', 'FETCH', 'STORE', 'SEARCH'):
+            raise IllegalClientResponse(command)
+
+        self.dispatchCommand(tag, command, line, uid=1)
+
+    select_UID = (do_UID, arg_atom, arg_line)
+    #
+    # IMailboxListener implementation
+    #
+    def modeChanged(self, writeable):
+        if writeable:
+            self.sendUntaggedResponse(message='[READ-WRITE]', async=True)
+        else:
+            self.sendUntaggedResponse(message='[READ-ONLY]', async=True)
+
+    def flagsChanged(self, newFlags):
+        for (mId, flags) in newFlags.iteritems():
+            msg = '%d FETCH (FLAGS (%s))' % (mId, ' '.join(flags))
+            self.sendUntaggedResponse(msg, async=True)
+
+    def newMessages(self, exists, recent):
+        if exists is not None:
+            self.sendUntaggedResponse('%d EXISTS' % exists, async=True)
+        if recent is not None:
+            self.sendUntaggedResponse('%d RECENT' % recent, async=True)
+
+
+class UnhandledResponse(IMAP4Exception): pass
+
+class NegativeResponse(IMAP4Exception): pass
+
+class NoSupportedAuthentication(IMAP4Exception):
+    def __init__(self, serverSupports, clientSupports):
+        IMAP4Exception.__init__(self, 'No supported authentication schemes available')
+        self.serverSupports = serverSupports
+        self.clientSupports = clientSupports
+
+    def __str__(self):
+        return (IMAP4Exception.__str__(self)
+            + ': Server supports %r, client supports %r'
+            % (self.serverSupports, self.clientSupports))
+
+class IllegalServerResponse(IMAP4Exception): pass
+
+TIMEOUT_ERROR = error.TimeoutError()
+
+class IMAP4Client(basic.LineReceiver, policies.TimeoutMixin):
+    """IMAP4 client protocol implementation
+
+    @ivar state: A string representing the state the connection is currently
+    in.
+    """
+    implements(IMailboxListener)
+
+    tags = None
+    waiting = None
+    queued = None
+    tagID = 1
+    state = None
+
+    startedTLS = False
+
+    # Number of seconds to wait before timing out a connection.
+    # If the number is <= 0 no timeout checking will be performed.
+    timeout = 0
+
+    # Capabilities are not allowed to change during the session
+    # So cache the first response and use that for all later
+    # lookups
+    _capCache = None
+
+    _memoryFileLimit = 1024 * 1024 * 10
+
+    # Authentication is pluggable.  This maps names to IClientAuthentication
+    # objects.
+    authenticators = None
+
+    STATUS_CODES = ('OK', 'NO', 'BAD', 'PREAUTH', 'BYE')
+
+    STATUS_TRANSFORMATIONS = {
+        'MESSAGES': int, 'RECENT': int, 'UNSEEN': int
+    }
+
+    context = None
+
+    def __init__(self, contextFactory = None):
+        self.tags = {}
+        self.queued = []
+        self.authenticators = {}
+        self.context = contextFactory
+
+        self._tag = None
+        self._parts = None
+        self._lastCmd = None
+
+    def registerAuthenticator(self, auth):
+        """Register a new form of authentication
+
+        When invoking the authenticate() method of IMAP4Client, the first
+        matching authentication scheme found will be used.  The ordering is
+        that in which the server lists support authentication schemes.
+
+        @type auth: Implementor of C{IClientAuthentication}
+        @param auth: The object to use to perform the client
+        side of this authentication scheme.
+        """
+        self.authenticators[auth.getName().upper()] = auth
+
+    def rawDataReceived(self, data):
+        if self.timeout > 0:
+            self.resetTimeout()
+
+        self._pendingSize -= len(data)
+        if self._pendingSize > 0:
+            self._pendingBuffer.write(data)
+        else:
+            passon = ''
+            if self._pendingSize < 0:
+                data, passon = data[:self._pendingSize], data[self._pendingSize:]
+            self._pendingBuffer.write(data)
+            rest = self._pendingBuffer
+            self._pendingBuffer = None
+            self._pendingSize = None
+            rest.seek(0, 0)
+            self._parts.append(rest.read())
+            self.setLineMode(passon.lstrip('\r\n'))
+
+#    def sendLine(self, line):
+#        print 'S:', repr(line)
+#        return basic.LineReceiver.sendLine(self, line)
+
+    def _setupForLiteral(self, rest, octets):
+        self._pendingBuffer = self.messageFile(octets)
+        self._pendingSize = octets
+        if self._parts is None:
+            self._parts = [rest, '\r\n']
+        else:
+            self._parts.extend([rest, '\r\n'])
+        self.setRawMode()
+
+    def connectionMade(self):
+        if self.timeout > 0:
+            self.setTimeout(self.timeout)
+
+    def connectionLost(self, reason):
+        """We are no longer connected"""
+        if self.timeout > 0:
+            self.setTimeout(None)
+        if self.queued is not None:
+            queued = self.queued
+            self.queued = None
+            for cmd in queued:
+                cmd.defer.errback(reason)
+        if self.tags is not None:
+            tags = self.tags
+            self.tags = None
+            for cmd in tags.itervalues():
+                if cmd is not None and cmd.defer is not None:
+                    cmd.defer.errback(reason)
+
+
+    def lineReceived(self, line):
+        """
+        Attempt to parse a single line from the server.
+
+        @type line: C{str}
+        @param line: The line from the server, without the line delimiter.
+
+        @raise IllegalServerResponse: If the line or some part of the line
+            does not represent an allowed message from the server at this time.
+        """
+#        print 'C: ' + repr(line)
+        if self.timeout > 0:
+            self.resetTimeout()
+
+        lastPart = line.rfind('{')
+        if lastPart != -1:
+            lastPart = line[lastPart + 1:]
+            if lastPart.endswith('}'):
+                # It's a literal a-comin' in
+                try:
+                    octets = int(lastPart[:-1])
+                except ValueError:
+                    raise IllegalServerResponse(line)
+                if self._parts is None:
+                    self._tag, parts = line.split(None, 1)
+                else:
+                    parts = line
+                self._setupForLiteral(parts, octets)
+                return
+
+        if self._parts is None:
+            # It isn't a literal at all
+            self._regularDispatch(line)
+        else:
+            # If an expression is in progress, no tag is required here
+            # Since we didn't find a literal indicator, this expression
+            # is done.
+            self._parts.append(line)
+            tag, rest = self._tag, ''.join(self._parts)
+            self._tag = self._parts = None
+            self.dispatchCommand(tag, rest)
+
+    def timeoutConnection(self):
+        if self._lastCmd and self._lastCmd.defer is not None:
+            d, self._lastCmd.defer = self._lastCmd.defer, None
+            d.errback(TIMEOUT_ERROR)
+
+        if self.queued:
+            for cmd in self.queued:
+                if cmd.defer is not None:
+                    d, cmd.defer = cmd.defer, d
+                    d.errback(TIMEOUT_ERROR)
+
+        self.transport.loseConnection()
+
+    def _regularDispatch(self, line):
+        parts = line.split(None, 1)
+        if len(parts) != 2:
+            parts.append('')
+        tag, rest = parts
+        self.dispatchCommand(tag, rest)
+
+    def messageFile(self, octets):
+        """Create a file to which an incoming message may be written.
+
+        @type octets: C{int}
+        @param octets: The number of octets which will be written to the file
+
+        @rtype: Any object which implements C{write(string)} and
+        C{seek(int, int)}
+        @return: A file-like object
+        """
+        if octets > self._memoryFileLimit:
+            return tempfile.TemporaryFile()
+        else:
+            return StringIO.StringIO()
+
+    def makeTag(self):
+        tag = '%0.4X' % self.tagID
+        self.tagID += 1
+        return tag
+
+    def dispatchCommand(self, tag, rest):
+        if self.state is None:
+            f = self.response_UNAUTH
+        else:
+            f = getattr(self, 'response_' + self.state.upper(), None)
+        if f:
+            try:
+                f(tag, rest)
+            except:
+                log.err()
+                self.transport.loseConnection()
+        else:
+            log.err("Cannot dispatch: %s, %s, %s" % (self.state, tag, rest))
+            self.transport.loseConnection()
+
+    def response_UNAUTH(self, tag, rest):
+        if self.state is None:
+            # Server greeting, this is
+            status, rest = rest.split(None, 1)
+            if status.upper() == 'OK':
+                self.state = 'unauth'
+            elif status.upper() == 'PREAUTH':
+                self.state = 'auth'
+            else:
+                # XXX - This is rude.
+                self.transport.loseConnection()
+                raise IllegalServerResponse(tag + ' ' + rest)
+
+            b, e = rest.find('['), rest.find(']')
+            if b != -1 and e != -1:
+                self.serverGreeting(
+                    self.__cbCapabilities(
+                        ([parseNestedParens(rest[b + 1:e])], None)))
+            else:
+                self.serverGreeting(None)
+        else:
+            self._defaultHandler(tag, rest)
+
+    def response_AUTH(self, tag, rest):
+        self._defaultHandler(tag, rest)
+
+    def _defaultHandler(self, tag, rest):
+        if tag == '*' or tag == '+':
+            if not self.waiting:
+                self._extraInfo([parseNestedParens(rest)])
+            else:
+                cmd = self.tags[self.waiting]
+                if tag == '+':
+                    cmd.continuation(rest)
+                else:
+                    cmd.lines.append(rest)
+        else:
+            try:
+                cmd = self.tags[tag]
+            except KeyError:
+                # XXX - This is rude.
+                self.transport.loseConnection()
+                raise IllegalServerResponse(tag + ' ' + rest)
+            else:
+                status, line = rest.split(None, 1)
+                if status == 'OK':
+                    # Give them this last line, too
+                    cmd.finish(rest, self._extraInfo)
+                else:
+                    cmd.defer.errback(IMAP4Exception(line))
+                del self.tags[tag]
+                self.waiting = None
+                self._flushQueue()
+
+    def _flushQueue(self):
+        if self.queued:
+            cmd = self.queued.pop(0)
+            t = self.makeTag()
+            self.tags[t] = cmd
+            self.sendLine(cmd.format(t))
+            self.waiting = t
+
+    def _extraInfo(self, lines):
+        # XXX - This is terrible.
+        # XXX - Also, this should collapse temporally proximate calls into single
+        #       invocations of IMailboxListener methods, where possible.
+        flags = {}
+        recent = exists = None
+        for response in lines:
+            elements = len(response)
+            if elements == 1 and response[0] == ['READ-ONLY']:
+                self.modeChanged(False)
+            elif elements == 1 and response[0] == ['READ-WRITE']:
+                self.modeChanged(True)
+            elif elements == 2 and response[1] == 'EXISTS':
+                exists = int(response[0])
+            elif elements == 2 and response[1] == 'RECENT':
+                recent = int(response[0])
+            elif elements == 3 and response[1] == 'FETCH':
+                mId = int(response[0])
+                values = self._parseFetchPairs(response[2])
+                flags.setdefault(mId, []).extend(values.get('FLAGS', ()))
+            else:
+                log.msg('Unhandled unsolicited response: %s' % (response,))
+
+        if flags:
+            self.flagsChanged(flags)
+        if recent is not None or exists is not None:
+            self.newMessages(exists, recent)
+
+    def sendCommand(self, cmd):
+        cmd.defer = defer.Deferred()
+        if self.waiting:
+            self.queued.append(cmd)
+            return cmd.defer
+        t = self.makeTag()
+        self.tags[t] = cmd
+        self.sendLine(cmd.format(t))
+        self.waiting = t
+        self._lastCmd = cmd
+        return cmd.defer
+
+    def getCapabilities(self, useCache=1):
+        """Request the capabilities available on this server.
+
+        This command is allowed in any state of connection.
+
+        @type useCache: C{bool}
+        @param useCache: Specify whether to use the capability-cache or to
+        re-retrieve the capabilities from the server.  Server capabilities
+        should never change, so for normal use, this flag should never be
+        false.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback will be invoked with a
+        dictionary mapping capability types to lists of supported
+        mechanisms, or to None if a support list is not applicable.
+        """
+        if useCache and self._capCache is not None:
+            return defer.succeed(self._capCache)
+        cmd = 'CAPABILITY'
+        resp = ('CAPABILITY',)
+        d = self.sendCommand(Command(cmd, wantResponse=resp))
+        d.addCallback(self.__cbCapabilities)
+        return d
+
+    def __cbCapabilities(self, (lines, tagline)):
+        caps = {}
+        for rest in lines:
+            for cap in rest[1:]:
+                parts = cap.split('=', 1)
+                if len(parts) == 1:
+                    category, value = parts[0], None
+                else:
+                    category, value = parts
+                caps.setdefault(category, []).append(value)
+
+        # Preserve a non-ideal API for backwards compatibility.  It would
+        # probably be entirely sensible to have an object with a wider API than
+        # dict here so this could be presented less insanely.
+        for category in caps:
+            if caps[category] == [None]:
+                caps[category] = None
+        self._capCache = caps
+        return caps
+
+    def logout(self):
+        """Inform the server that we are done with the connection.
+
+        This command is allowed in any state of connection.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback will be invoked with None
+        when the proper server acknowledgement has been received.
+        """
+        d = self.sendCommand(Command('LOGOUT', wantResponse=('BYE',)))
+        d.addCallback(self.__cbLogout)
+        return d
+
+    def __cbLogout(self, (lines, tagline)):
+        self.transport.loseConnection()
+        # We don't particularly care what the server said
+        return None
+
+
+    def noop(self):
+        """Perform no operation.
+
+        This command is allowed in any state of connection.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback will be invoked with a list
+        of untagged status updates the server responds with.
+        """
+        d = self.sendCommand(Command('NOOP'))
+        d.addCallback(self.__cbNoop)
+        return d
+
+    def __cbNoop(self, (lines, tagline)):
+        # Conceivable, this is elidable.
+        # It is, afterall, a no-op.
+        return lines
+
+    def startTLS(self, contextFactory=None):
+        """
+        Initiates a 'STARTTLS' request and negotiates the TLS / SSL
+        Handshake.
+
+        @param contextFactory: The TLS / SSL Context Factory to
+        leverage.  If the contextFactory is None the IMAP4Client will
+        either use the current TLS / SSL Context Factory or attempt to
+        create a new one.
+
+        @type contextFactory: C{ssl.ClientContextFactory}
+
+        @return: A Deferred which fires when the transport has been
+        secured according to the given contextFactory, or which fails
+        if the transport cannot be secured.
+        """
+        assert not self.startedTLS, "Client and Server are currently communicating via TLS"
+
+        if contextFactory is None:
+            contextFactory = self._getContextFactory()
+
+        if contextFactory is None:
+            return defer.fail(IMAP4Exception(
+                "IMAP4Client requires a TLS context to "
+                "initiate the STARTTLS handshake"))
+
+        if 'STARTTLS' not in self._capCache:
+            return defer.fail(IMAP4Exception(
+                "Server does not support secure communication "
+                "via TLS / SSL"))
+
+        tls = interfaces.ITLSTransport(self.transport, None)
+        if tls is None:
+            return defer.fail(IMAP4Exception(
+                "IMAP4Client transport does not implement "
+                "interfaces.ITLSTransport"))
+
+        d = self.sendCommand(Command('STARTTLS'))
+        d.addCallback(self._startedTLS, contextFactory)
+        d.addCallback(lambda _: self.getCapabilities())
+        return d
+
+
+    def authenticate(self, secret):
+        """Attempt to enter the authenticated state with the server
+
+        This command is allowed in the Non-Authenticated state.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked if the authentication
+        succeeds and whose errback will be invoked otherwise.
+        """
+        if self._capCache is None:
+            d = self.getCapabilities()
+        else:
+            d = defer.succeed(self._capCache)
+        d.addCallback(self.__cbAuthenticate, secret)
+        return d
+
+    def __cbAuthenticate(self, caps, secret):
+        auths = caps.get('AUTH', ())
+        for scheme in auths:
+            if scheme.upper() in self.authenticators:
+                cmd = Command('AUTHENTICATE', scheme, (),
+                              self.__cbContinueAuth, scheme,
+                              secret)
+                return self.sendCommand(cmd)
+
+        if self.startedTLS:
+            return defer.fail(NoSupportedAuthentication(
+                auths, self.authenticators.keys()))
+        else:
+            def ebStartTLS(err):
+                err.trap(IMAP4Exception)
+                # We couldn't negotiate TLS for some reason
+                return defer.fail(NoSupportedAuthentication(
+                    auths, self.authenticators.keys()))
+
+            d = self.startTLS()
+            d.addErrback(ebStartTLS)
+            d.addCallback(lambda _: self.getCapabilities())
+            d.addCallback(self.__cbAuthTLS, secret)
+            return d
+
+
+    def __cbContinueAuth(self, rest, scheme, secret):
+        try:
+            chal = base64.decodestring(rest + '\n')
+        except binascii.Error:
+            self.sendLine('*')
+            raise IllegalServerResponse(rest)
+        else:
+            auth = self.authenticators[scheme]
+            chal = auth.challengeResponse(secret, chal)
+            self.sendLine(base64.encodestring(chal).strip())
+
+    def __cbAuthTLS(self, caps, secret):
+        auths = caps.get('AUTH', ())
+        for scheme in auths:
+            if scheme.upper() in self.authenticators:
+                cmd = Command('AUTHENTICATE', scheme, (),
+                              self.__cbContinueAuth, scheme,
+                              secret)
+                return self.sendCommand(cmd)
+        raise NoSupportedAuthentication(auths, self.authenticators.keys())
+
+
+    def login(self, username, password):
+        """Authenticate with the server using a username and password
+
+        This command is allowed in the Non-Authenticated state.  If the
+        server supports the STARTTLS capability and our transport supports
+        TLS, TLS is negotiated before the login command is issued.
+
+        A more secure way to log in is to use C{startTLS} or
+        C{authenticate} or both.
+
+        @type username: C{str}
+        @param username: The username to log in with
+
+        @type password: C{str}
+        @param password: The password to log in with
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked if login is successful
+        and whose errback is invoked otherwise.
+        """
+        d = maybeDeferred(self.getCapabilities)
+        d.addCallback(self.__cbLoginCaps, username, password)
+        return d
+
+    def serverGreeting(self, caps):
+        """Called when the server has sent us a greeting.
+
+        @type caps: C{dict}
+        @param caps: Capabilities the server advertised in its greeting.
+        """
+
+    def _getContextFactory(self):
+        if self.context is not None:
+            return self.context
+        try:
+            from twisted.internet import ssl
+        except ImportError:
+            return None
+        else:
+            context = ssl.ClientContextFactory()
+            context.method = ssl.SSL.TLSv1_METHOD
+            return context
+
+    def __cbLoginCaps(self, capabilities, username, password):
+        # If the server advertises STARTTLS, we might want to try to switch to TLS
+        tryTLS = 'STARTTLS' in capabilities
+
+        # If our transport supports switching to TLS, we might want to try to switch to TLS.
+        tlsableTransport = interfaces.ITLSTransport(self.transport, None) is not None
+
+        # If our transport is not already using TLS, we might want to try to switch to TLS.
+        nontlsTransport = interfaces.ISSLTransport(self.transport, None) is None
+
+        if not self.startedTLS and tryTLS and tlsableTransport and nontlsTransport:
+            d = self.startTLS()
+
+            d.addCallbacks(
+                self.__cbLoginTLS,
+                self.__ebLoginTLS,
+                callbackArgs=(username, password),
+                )
+            return d
+        else:
+            if nontlsTransport:
+                log.msg("Server has no TLS support. logging in over cleartext!")
+            args = ' '.join((_quote(username), _quote(password)))
+            return self.sendCommand(Command('LOGIN', args))
+
+    def _startedTLS(self, result, context):
+        self.transport.startTLS(context)
+        self._capCache = None
+        self.startedTLS = True
+        return result
+
+    def __cbLoginTLS(self, result, username, password):
+        args = ' '.join((_quote(username), _quote(password)))
+        return self.sendCommand(Command('LOGIN', args))
+
+    def __ebLoginTLS(self, failure):
+        log.err(failure)
+        return failure
+
+    def namespace(self):
+        """Retrieve information about the namespaces available to this account
+
+        This command is allowed in the Authenticated and Selected states.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with namespace
+        information.  An example of this information is::
+
+            [[['', '/']], [], []]
+
+        which indicates a single personal namespace called '' with '/'
+        as its hierarchical delimiter, and no shared or user namespaces.
+        """
+        cmd = 'NAMESPACE'
+        resp = ('NAMESPACE',)
+        d = self.sendCommand(Command(cmd, wantResponse=resp))
+        d.addCallback(self.__cbNamespace)
+        return d
+
+    def __cbNamespace(self, (lines, last)):
+        for parts in lines:
+            if len(parts) == 4 and parts[0] == 'NAMESPACE':
+                return [e or [] for e in parts[1:]]
+        log.err("No NAMESPACE response to NAMESPACE command")
+        return [[], [], []]
+
+
+    def select(self, mailbox):
+        """
+        Select a mailbox
+
+        This command is allowed in the Authenticated and Selected states.
+
+        @type mailbox: C{str}
+        @param mailbox: The name of the mailbox to select
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with mailbox
+        information if the select is successful and whose errback is
+        invoked otherwise.  Mailbox information consists of a dictionary
+        with the following keys and values::
+
+                FLAGS: A list of strings containing the flags settable on
+                        messages in this mailbox.
+
+                EXISTS: An integer indicating the number of messages in this
+                        mailbox.
+
+                RECENT: An integer indicating the number of "recent"
+                        messages in this mailbox.
+
+                UNSEEN: The message sequence number (an integer) of the
+                        first unseen message in the mailbox.
+
+                PERMANENTFLAGS: A list of strings containing the flags that
+                        can be permanently set on messages in this mailbox.
+
+                UIDVALIDITY: An integer uniquely identifying this mailbox.
+        """
+        cmd = 'SELECT'
+        args = _prepareMailboxName(mailbox)
+        resp = ('FLAGS', 'EXISTS', 'RECENT', 'UNSEEN', 'PERMANENTFLAGS', 'UIDVALIDITY')
+        d = self.sendCommand(Command(cmd, args, wantResponse=resp))
+        d.addCallback(self.__cbSelect, 1)
+        return d
+
+
+    def examine(self, mailbox):
+        """Select a mailbox in read-only mode
+
+        This command is allowed in the Authenticated and Selected states.
+
+        @type mailbox: C{str}
+        @param mailbox: The name of the mailbox to examine
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with mailbox
+        information if the examine is successful and whose errback
+        is invoked otherwise.  Mailbox information consists of a dictionary
+        with the following keys and values::
+
+            'FLAGS': A list of strings containing the flags settable on
+                        messages in this mailbox.
+
+            'EXISTS': An integer indicating the number of messages in this
+                        mailbox.
+
+            'RECENT': An integer indicating the number of \"recent\"
+                        messages in this mailbox.
+
+            'UNSEEN': An integer indicating the number of messages not
+                        flagged \\Seen in this mailbox.
+
+            'PERMANENTFLAGS': A list of strings containing the flags that
+                        can be permanently set on messages in this mailbox.
+
+            'UIDVALIDITY': An integer uniquely identifying this mailbox.
+        """
+        cmd = 'EXAMINE'
+        args = _prepareMailboxName(mailbox)
+        resp = ('FLAGS', 'EXISTS', 'RECENT', 'UNSEEN', 'PERMANENTFLAGS', 'UIDVALIDITY')
+        d = self.sendCommand(Command(cmd, args, wantResponse=resp))
+        d.addCallback(self.__cbSelect, 0)
+        return d
+
+
+    def _intOrRaise(self, value, phrase):
+        """
+        Parse C{value} as an integer and return the result or raise
+        L{IllegalServerResponse} with C{phrase} as an argument if C{value}
+        cannot be parsed as an integer.
+        """
+        try:
+            return int(value)
+        except ValueError:
+            raise IllegalServerResponse(phrase)
+
+
+    def __cbSelect(self, (lines, tagline), rw):
+        """
+        Handle lines received in response to a SELECT or EXAMINE command.
+
+        See RFC 3501, section 6.3.1.
+        """
+        # In the absense of specification, we are free to assume:
+        #   READ-WRITE access
+        datum = {'READ-WRITE': rw}
+        lines.append(parseNestedParens(tagline))
+        for split in lines:
+            if len(split) > 0 and split[0].upper() == 'OK':
+                # Handle all the kinds of OK response.
+                content = split[1]
+                key = content[0].upper()
+                if key == 'READ-ONLY':
+                    datum['READ-WRITE'] = False
+                elif key == 'READ-WRITE':
+                    datum['READ-WRITE'] = True
+                elif key == 'UIDVALIDITY':
+                    datum['UIDVALIDITY'] = self._intOrRaise(
+                        content[1], split)
+                elif key == 'UNSEEN':
+                    datum['UNSEEN'] = self._intOrRaise(content[1], split)
+                elif key == 'UIDNEXT':
+                    datum['UIDNEXT'] = self._intOrRaise(content[1], split)
+                elif key == 'PERMANENTFLAGS':
+                    datum['PERMANENTFLAGS'] = tuple(content[1])
+                else:
+                    log.err('Unhandled SELECT response (2): %s' % (split,))
+            elif len(split) == 2:
+                # Handle FLAGS, EXISTS, and RECENT
+                if split[0].upper() == 'FLAGS':
+                    datum['FLAGS'] = tuple(split[1])
+                elif isinstance(split[1], str):
+                    # Must make sure things are strings before treating them as
+                    # strings since some other forms of response have nesting in
+                    # places which results in lists instead.
+                    if split[1].upper() == 'EXISTS':
+                        datum['EXISTS'] = self._intOrRaise(split[0], split)
+                    elif split[1].upper() == 'RECENT':
+                        datum['RECENT'] = self._intOrRaise(split[0], split)
+                    else:
+                        log.err('Unhandled SELECT response (0): %s' % (split,))
+                else:
+                    log.err('Unhandled SELECT response (1): %s' % (split,))
+            else:
+                log.err('Unhandled SELECT response (4): %s' % (split,))
+        return datum
+
+
+    def create(self, name):
+        """Create a new mailbox on the server
+
+        This command is allowed in the Authenticated and Selected states.
+
+        @type name: C{str}
+        @param name: The name of the mailbox to create.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked if the mailbox creation
+        is successful and whose errback is invoked otherwise.
+        """
+        return self.sendCommand(Command('CREATE', _prepareMailboxName(name)))
+
+    def delete(self, name):
+        """Delete a mailbox
+
+        This command is allowed in the Authenticated and Selected states.
+
+        @type name: C{str}
+        @param name: The name of the mailbox to delete.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose calblack is invoked if the mailbox is
+        deleted successfully and whose errback is invoked otherwise.
+        """
+        return self.sendCommand(Command('DELETE', _prepareMailboxName(name)))
+
+    def rename(self, oldname, newname):
+        """Rename a mailbox
+
+        This command is allowed in the Authenticated and Selected states.
+
+        @type oldname: C{str}
+        @param oldname: The current name of the mailbox to rename.
+
+        @type newname: C{str}
+        @param newname: The new name to give the mailbox.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked if the rename is
+        successful and whose errback is invoked otherwise.
+        """
+        oldname = _prepareMailboxName(oldname)
+        newname = _prepareMailboxName(newname)
+        return self.sendCommand(Command('RENAME', ' '.join((oldname, newname))))
+
+    def subscribe(self, name):
+        """Add a mailbox to the subscription list
+
+        This command is allowed in the Authenticated and Selected states.
+
+        @type name: C{str}
+        @param name: The mailbox to mark as 'active' or 'subscribed'
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked if the subscription
+        is successful and whose errback is invoked otherwise.
+        """
+        return self.sendCommand(Command('SUBSCRIBE', _prepareMailboxName(name)))
+
+    def unsubscribe(self, name):
+        """Remove a mailbox from the subscription list
+
+        This command is allowed in the Authenticated and Selected states.
+
+        @type name: C{str}
+        @param name: The mailbox to unsubscribe
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked if the unsubscription
+        is successful and whose errback is invoked otherwise.
+        """
+        return self.sendCommand(Command('UNSUBSCRIBE', _prepareMailboxName(name)))
+
+    def list(self, reference, wildcard):
+        """List a subset of the available mailboxes
+
+        This command is allowed in the Authenticated and Selected states.
+
+        @type reference: C{str}
+        @param reference: The context in which to interpret C{wildcard}
+
+        @type wildcard: C{str}
+        @param wildcard: The pattern of mailbox names to match, optionally
+        including either or both of the '*' and '%' wildcards.  '*' will
+        match zero or more characters and cross hierarchical boundaries.
+        '%' will also match zero or more characters, but is limited to a
+        single hierarchical level.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with a list of C{tuple}s,
+        the first element of which is a C{tuple} of mailbox flags, the second
+        element of which is the hierarchy delimiter for this mailbox, and the
+        third of which is the mailbox name; if the command is unsuccessful,
+        the deferred's errback is invoked instead.
+        """
+        cmd = 'LIST'
+        args = '"%s" "%s"' % (reference, wildcard.encode('imap4-utf-7'))
+        resp = ('LIST',)
+        d = self.sendCommand(Command(cmd, args, wantResponse=resp))
+        d.addCallback(self.__cbList, 'LIST')
+        return d
+
+    def lsub(self, reference, wildcard):
+        """List a subset of the subscribed available mailboxes
+
+        This command is allowed in the Authenticated and Selected states.
+
+        The parameters and returned object are the same as for the C{list}
+        method, with one slight difference: Only mailboxes which have been
+        subscribed can be included in the resulting list.
+        """
+        cmd = 'LSUB'
+        args = '"%s" "%s"' % (reference, wildcard.encode('imap4-utf-7'))
+        resp = ('LSUB',)
+        d = self.sendCommand(Command(cmd, args, wantResponse=resp))
+        d.addCallback(self.__cbList, 'LSUB')
+        return d
+
+    def __cbList(self, (lines, last), command):
+        results = []
+        for parts in lines:
+            if len(parts) == 4 and parts[0] == command:
+                parts[1] = tuple(parts[1])
+                results.append(tuple(parts[1:]))
+        return results
+
+    def status(self, mailbox, *names):
+        """
+        Retrieve the status of the given mailbox
+
+        This command is allowed in the Authenticated and Selected states.
+
+        @type mailbox: C{str}
+        @param mailbox: The name of the mailbox to query
+
+        @type *names: C{str}
+        @param *names: The status names to query.  These may be any number of:
+            C{'MESSAGES'}, C{'RECENT'}, C{'UIDNEXT'}, C{'UIDVALIDITY'}, and
+            C{'UNSEEN'}.
+
+        @rtype: C{Deferred}
+        @return: A deferred which fires with with the status information if the
+            command is successful and whose errback is invoked otherwise.  The
+            status information is in the form of a C{dict}.  Each element of
+            C{names} is a key in the dictionary.  The value for each key is the
+            corresponding response from the server.
+        """
+        cmd = 'STATUS'
+        args = "%s (%s)" % (_prepareMailboxName(mailbox), ' '.join(names))
+        resp = ('STATUS',)
+        d = self.sendCommand(Command(cmd, args, wantResponse=resp))
+        d.addCallback(self.__cbStatus)
+        return d
+
+    def __cbStatus(self, (lines, last)):
+        status = {}
+        for parts in lines:
+            if parts[0] == 'STATUS':
+                items = parts[2]
+                items = [items[i:i+2] for i in range(0, len(items), 2)]
+                status.update(dict(items))
+        for k in status.keys():
+            t = self.STATUS_TRANSFORMATIONS.get(k)
+            if t:
+                try:
+                    status[k] = t(status[k])
+                except Exception, e:
+                    raise IllegalServerResponse('(%s %s): %s' % (k, status[k], str(e)))
+        return status
+
+    def append(self, mailbox, message, flags = (), date = None):
+        """Add the given message to the given mailbox.
+
+        This command is allowed in the Authenticated and Selected states.
+
+        @type mailbox: C{str}
+        @param mailbox: The mailbox to which to add this message.
+
+        @type message: Any file-like object
+        @param message: The message to add, in RFC822 format.  Newlines
+        in this file should be \\r\\n-style.
+
+        @type flags: Any iterable of C{str}
+        @param flags: The flags to associated with this message.
+
+        @type date: C{str}
+        @param date: The date to associate with this message.  This should
+        be of the format DD-MM-YYYY HH:MM:SS +/-HHMM.  For example, in
+        Eastern Standard Time, on July 1st 2004 at half past 1 PM,
+        \"01-07-2004 13:30:00 -0500\".
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked when this command
+        succeeds or whose errback is invoked if it fails.
+        """
+        message.seek(0, 2)
+        L = message.tell()
+        message.seek(0, 0)
+        fmt = '%s (%s)%s {%d}'
+        if date:
+            date = ' "%s"' % date
+        else:
+            date = ''
+        cmd = fmt % (
+            _prepareMailboxName(mailbox), ' '.join(flags),
+            date, L
+        )
+        d = self.sendCommand(Command('APPEND', cmd, (), self.__cbContinueAppend, message))
+        return d
+
+    def __cbContinueAppend(self, lines, message):
+        s = basic.FileSender()
+        return s.beginFileTransfer(message, self.transport, None
+            ).addCallback(self.__cbFinishAppend)
+
+    def __cbFinishAppend(self, foo):
+        self.sendLine('')
+
+    def check(self):
+        """Tell the server to perform a checkpoint
+
+        This command is allowed in the Selected state.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked when this command
+        succeeds or whose errback is invoked if it fails.
+        """
+        return self.sendCommand(Command('CHECK'))
+
+    def close(self):
+        """Return the connection to the Authenticated state.
+
+        This command is allowed in the Selected state.
+
+        Issuing this command will also remove all messages flagged \\Deleted
+        from the selected mailbox if it is opened in read-write mode,
+        otherwise it indicates success by no messages are removed.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked when the command
+        completes successfully or whose errback is invoked if it fails.
+        """
+        return self.sendCommand(Command('CLOSE'))
+
+
+    def expunge(self):
+        """Return the connection to the Authenticate state.
+
+        This command is allowed in the Selected state.
+
+        Issuing this command will perform the same actions as issuing the
+        close command, but will also generate an 'expunge' response for
+        every message deleted.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with a list of the
+        'expunge' responses when this command is successful or whose errback
+        is invoked otherwise.
+        """
+        cmd = 'EXPUNGE'
+        resp = ('EXPUNGE',)
+        d = self.sendCommand(Command(cmd, wantResponse=resp))
+        d.addCallback(self.__cbExpunge)
+        return d
+
+
+    def __cbExpunge(self, (lines, last)):
+        ids = []
+        for parts in lines:
+            if len(parts) == 2 and parts[1] == 'EXPUNGE':
+                ids.append(self._intOrRaise(parts[0], parts))
+        return ids
+
+
+    def search(self, *queries, **kwarg):
+        """Search messages in the currently selected mailbox
+
+        This command is allowed in the Selected state.
+
+        Any non-zero number of queries are accepted by this method, as
+        returned by the C{Query}, C{Or}, and C{Not} functions.
+
+        One keyword argument is accepted: if uid is passed in with a non-zero
+        value, the server is asked to return message UIDs instead of message
+        sequence numbers.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback will be invoked with a list of all
+        the message sequence numbers return by the search, or whose errback
+        will be invoked if there is an error.
+        """
+        if kwarg.get('uid'):
+            cmd = 'UID SEARCH'
+        else:
+            cmd = 'SEARCH'
+        args = ' '.join(queries)
+        d = self.sendCommand(Command(cmd, args, wantResponse=(cmd,)))
+        d.addCallback(self.__cbSearch)
+        return d
+
+
+    def __cbSearch(self, (lines, end)):
+        ids = []
+        for parts in lines:
+            if len(parts) > 0 and parts[0] == 'SEARCH':
+                ids.extend([self._intOrRaise(p, parts) for p in parts[1:]])
+        return ids
+
+
+    def fetchUID(self, messages, uid=0):
+        """Retrieve the unique identifier for one or more messages
+
+        This command is allowed in the Selected state.
+
+        @type messages: C{MessageSet} or C{str}
+        @param messages: A message sequence set
+
+        @type uid: C{bool}
+        @param uid: Indicates whether the message sequence set is of message
+        numbers or of unique message IDs.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with a dict mapping
+        message sequence numbers to unique message identifiers, or whose
+        errback is invoked if there is an error.
+        """
+        return self._fetch(messages, useUID=uid, uid=1)
+
+
+    def fetchFlags(self, messages, uid=0):
+        """Retrieve the flags for one or more messages
+
+        This command is allowed in the Selected state.
+
+        @type messages: C{MessageSet} or C{str}
+        @param messages: The messages for which to retrieve flags.
+
+        @type uid: C{bool}
+        @param uid: Indicates whether the message sequence set is of message
+        numbers or of unique message IDs.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with a dict mapping
+        message numbers to lists of flags, or whose errback is invoked if
+        there is an error.
+        """
+        return self._fetch(str(messages), useUID=uid, flags=1)
+
+
+    def fetchInternalDate(self, messages, uid=0):
+        """Retrieve the internal date associated with one or more messages
+
+        This command is allowed in the Selected state.
+
+        @type messages: C{MessageSet} or C{str}
+        @param messages: The messages for which to retrieve the internal date.
+
+        @type uid: C{bool}
+        @param uid: Indicates whether the message sequence set is of message
+        numbers or of unique message IDs.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with a dict mapping
+        message numbers to date strings, or whose errback is invoked
+        if there is an error.  Date strings take the format of
+        \"day-month-year time timezone\".
+        """
+        return self._fetch(str(messages), useUID=uid, internaldate=1)
+
+
+    def fetchEnvelope(self, messages, uid=0):
+        """Retrieve the envelope data for one or more messages
+
+        This command is allowed in the Selected state.
+
+        @type messages: C{MessageSet} or C{str}
+        @param messages: The messages for which to retrieve envelope data.
+
+        @type uid: C{bool}
+        @param uid: Indicates whether the message sequence set is of message
+        numbers or of unique message IDs.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with a dict mapping
+        message numbers to envelope data, or whose errback is invoked
+        if there is an error.  Envelope data consists of a sequence of the
+        date, subject, from, sender, reply-to, to, cc, bcc, in-reply-to,
+        and message-id header fields.  The date, subject, in-reply-to, and
+        message-id fields are strings, while the from, sender, reply-to,
+        to, cc, and bcc fields contain address data.  Address data consists
+        of a sequence of name, source route, mailbox name, and hostname.
+        Fields which are not present for a particular address may be C{None}.
+        """
+        return self._fetch(str(messages), useUID=uid, envelope=1)
+
+
+    def fetchBodyStructure(self, messages, uid=0):
+        """Retrieve the structure of the body of one or more messages
+
+        This command is allowed in the Selected state.
+
+        @type messages: C{MessageSet} or C{str}
+        @param messages: The messages for which to retrieve body structure
+        data.
+
+        @type uid: C{bool}
+        @param uid: Indicates whether the message sequence set is of message
+        numbers or of unique message IDs.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with a dict mapping
+        message numbers to body structure data, or whose errback is invoked
+        if there is an error.  Body structure data describes the MIME-IMB
+        format of a message and consists of a sequence of mime type, mime
+        subtype, parameters, content id, description, encoding, and size.
+        The fields following the size field are variable: if the mime
+        type/subtype is message/rfc822, the contained message's envelope
+        information, body structure data, and number of lines of text; if
+        the mime type is text, the number of lines of text.  Extension fields
+        may also be included; if present, they are: the MD5 hash of the body,
+        body disposition, body language.
+        """
+        return self._fetch(messages, useUID=uid, bodystructure=1)
+
+
+    def fetchSimplifiedBody(self, messages, uid=0):
+        """Retrieve the simplified body structure of one or more messages
+
+        This command is allowed in the Selected state.
+
+        @type messages: C{MessageSet} or C{str}
+        @param messages: A message sequence set
+
+        @type uid: C{bool}
+        @param uid: Indicates whether the message sequence set is of message
+        numbers or of unique message IDs.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with a dict mapping
+        message numbers to body data, or whose errback is invoked
+        if there is an error.  The simplified body structure is the same
+        as the body structure, except that extension fields will never be
+        present.
+        """
+        return self._fetch(messages, useUID=uid, body=1)
+
+
+    def fetchMessage(self, messages, uid=0):
+        """Retrieve one or more entire messages
+
+        This command is allowed in the Selected state.
+
+        @type messages: L{MessageSet} or C{str}
+        @param messages: A message sequence set
+
+        @type uid: C{bool}
+        @param uid: Indicates whether the message sequence set is of message
+        numbers or of unique message IDs.
+
+        @rtype: L{Deferred}
+
+        @return: A L{Deferred} which will fire with a C{dict} mapping message
+            sequence numbers to C{dict}s giving message data for the
+            corresponding message.  If C{uid} is true, the inner dictionaries
+            have a C{'UID'} key mapped to a C{str} giving the UID for the
+            message.  The text of the message is a C{str} associated with the
+            C{'RFC822'} key in each dictionary.
+        """
+        return self._fetch(messages, useUID=uid, rfc822=1)
+
+
+    def fetchHeaders(self, messages, uid=0):
+        """Retrieve headers of one or more messages
+
+        This command is allowed in the Selected state.
+
+        @type messages: C{MessageSet} or C{str}
+        @param messages: A message sequence set
+
+        @type uid: C{bool}
+        @param uid: Indicates whether the message sequence set is of message
+        numbers or of unique message IDs.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with a dict mapping
+        message numbers to dicts of message headers, or whose errback is
+        invoked if there is an error.
+        """
+        return self._fetch(messages, useUID=uid, rfc822header=1)
+
+
+    def fetchBody(self, messages, uid=0):
+        """Retrieve body text of one or more messages
+
+        This command is allowed in the Selected state.
+
+        @type messages: C{MessageSet} or C{str}
+        @param messages: A message sequence set
+
+        @type uid: C{bool}
+        @param uid: Indicates whether the message sequence set is of message
+        numbers or of unique message IDs.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with a dict mapping
+        message numbers to file-like objects containing body text, or whose
+        errback is invoked if there is an error.
+        """
+        return self._fetch(messages, useUID=uid, rfc822text=1)
+
+
+    def fetchSize(self, messages, uid=0):
+        """Retrieve the size, in octets, of one or more messages
+
+        This command is allowed in the Selected state.
+
+        @type messages: C{MessageSet} or C{str}
+        @param messages: A message sequence set
+
+        @type uid: C{bool}
+        @param uid: Indicates whether the message sequence set is of message
+        numbers or of unique message IDs.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with a dict mapping
+        message numbers to sizes, or whose errback is invoked if there is
+        an error.
+        """
+        return self._fetch(messages, useUID=uid, rfc822size=1)
+
+
+    def fetchFull(self, messages, uid=0):
+        """Retrieve several different fields of one or more messages
+
+        This command is allowed in the Selected state.  This is equivalent
+        to issuing all of the C{fetchFlags}, C{fetchInternalDate},
+        C{fetchSize}, C{fetchEnvelope}, and C{fetchSimplifiedBody}
+        functions.
+
+        @type messages: C{MessageSet} or C{str}
+        @param messages: A message sequence set
+
+        @type uid: C{bool}
+        @param uid: Indicates whether the message sequence set is of message
+        numbers or of unique message IDs.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with a dict mapping
+        message numbers to dict of the retrieved data values, or whose
+        errback is invoked if there is an error.  They dictionary keys
+        are "flags", "date", "size", "envelope", and "body".
+        """
+        return self._fetch(
+            messages, useUID=uid, flags=1, internaldate=1,
+            rfc822size=1, envelope=1, body=1)
+
+
+    def fetchAll(self, messages, uid=0):
+        """Retrieve several different fields of one or more messages
+
+        This command is allowed in the Selected state.  This is equivalent
+        to issuing all of the C{fetchFlags}, C{fetchInternalDate},
+        C{fetchSize}, and C{fetchEnvelope} functions.
+
+        @type messages: C{MessageSet} or C{str}
+        @param messages: A message sequence set
+
+        @type uid: C{bool}
+        @param uid: Indicates whether the message sequence set is of message
+        numbers or of unique message IDs.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with a dict mapping
+        message numbers to dict of the retrieved data values, or whose
+        errback is invoked if there is an error.  They dictionary keys
+        are "flags", "date", "size", and "envelope".
+        """
+        return self._fetch(
+            messages, useUID=uid, flags=1, internaldate=1,
+            rfc822size=1, envelope=1)
+
+
+    def fetchFast(self, messages, uid=0):
+        """Retrieve several different fields of one or more messages
+
+        This command is allowed in the Selected state.  This is equivalent
+        to issuing all of the C{fetchFlags}, C{fetchInternalDate}, and
+        C{fetchSize} functions.
+
+        @type messages: C{MessageSet} or C{str}
+        @param messages: A message sequence set
+
+        @type uid: C{bool}
+        @param uid: Indicates whether the message sequence set is of message
+        numbers or of unique message IDs.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with a dict mapping
+        message numbers to dict of the retrieved data values, or whose
+        errback is invoked if there is an error.  They dictionary keys are
+        "flags", "date", and "size".
+        """
+        return self._fetch(
+            messages, useUID=uid, flags=1, internaldate=1, rfc822size=1)
+
+
+    def _parseFetchPairs(self, fetchResponseList):
+        """
+        Given the result of parsing a single I{FETCH} response, construct a
+        C{dict} mapping response keys to response values.
+
+        @param fetchResponseList: The result of parsing a I{FETCH} response
+            with L{parseNestedParens} and extracting just the response data
+            (that is, just the part that comes after C{"FETCH"}).  The form
+            of this input (and therefore the output of this method) is very
+            disagreable.  A valuable improvement would be to enumerate the
+            possible keys (representing them as structured objects of some
+            sort) rather than using strings and tuples of tuples of strings
+            and so forth.  This would allow the keys to be documented more
+            easily and would allow for a much simpler application-facing API
+            (one not based on looking up somewhat hard to predict keys in a
+            dict).  Since C{fetchResponseList} notionally represents a
+            flattened sequence of pairs (identifying keys followed by their
+            associated values), collapsing such complex elements of this
+            list as C{["BODY", ["HEADER.FIELDS", ["SUBJECT"]]]} into a
+            single object would also greatly simplify the implementation of
+            this method.
+
+        @return: A C{dict} of the response data represented by C{pairs}.  Keys
+            in this dictionary are things like C{"RFC822.TEXT"}, C{"FLAGS"}, or
+            C{("BODY", ("HEADER.FIELDS", ("SUBJECT",)))}.  Values are entirely
+            dependent on the key with which they are associated, but retain the
+            same structured as produced by L{parseNestedParens}.
+        """
+        values = {}
+        responseParts = iter(fetchResponseList)
+        while True:
+            try:
+                key = responseParts.next()
+            except StopIteration:
+                break
+
+            try:
+                value = responseParts.next()
+            except StopIteration:
+                raise IllegalServerResponse(
+                    "Not enough arguments", fetchResponseList)
+
+            # The parsed forms of responses like:
+            #
+            # BODY[] VALUE
+            # BODY[TEXT] VALUE
+            # BODY[HEADER.FIELDS (SUBJECT)] VALUE
+            # BODY[HEADER.FIELDS (SUBJECT)]<N.M> VALUE
+            #
+            # are:
+            #
+            # ["BODY", [], VALUE]
+            # ["BODY", ["TEXT"], VALUE]
+            # ["BODY", ["HEADER.FIELDS", ["SUBJECT"]], VALUE]
+            # ["BODY", ["HEADER.FIELDS", ["SUBJECT"]], "<N.M>", VALUE]
+            #
+            # Here, check for these cases and grab as many extra elements as
+            # necessary to retrieve the body information.
+            if key in ("BODY", "BODY.PEEK") and isinstance(value, list) and len(value) < 3:
+                if len(value) < 2:
+                    key = (key, tuple(value))
+                else:
+                    key = (key, (value[0], tuple(value[1])))
+                try:
+                    value = responseParts.next()
+                except StopIteration:
+                    raise IllegalServerResponse(
+                        "Not enough arguments", fetchResponseList)
+
+                # Handle partial ranges
+                if value.startswith('<') and value.endswith('>'):
+                    try:
+                        int(value[1:-1])
+                    except ValueError:
+                        # This isn't really a range, it's some content.
+                        pass
+                    else:
+                        key = key + (value,)
+                        try:
+                            value = responseParts.next()
+                        except StopIteration:
+                            raise IllegalServerResponse(
+                                "Not enough arguments", fetchResponseList)
+
+            values[key] = value
+        return values
+
+
+    def _cbFetch(self, (lines, last), requestedParts, structured):
+        info = {}
+        for parts in lines:
+            if len(parts) == 3 and parts[1] == 'FETCH':
+                id = self._intOrRaise(parts[0], parts)
+                if id not in info:
+                    info[id] = [parts[2]]
+                else:
+                    info[id][0].extend(parts[2])
+
+        results = {}
+        for (messageId, values) in info.iteritems():
+            mapping = self._parseFetchPairs(values[0])
+            results.setdefault(messageId, {}).update(mapping)
+
+        flagChanges = {}
+        for messageId in results.keys():
+            values = results[messageId]
+            for part in values.keys():
+                if part not in requestedParts and part == 'FLAGS':
+                    flagChanges[messageId] = values['FLAGS']
+                    # Find flags in the result and get rid of them.
+                    for i in range(len(info[messageId][0])):
+                        if info[messageId][0][i] == 'FLAGS':
+                            del info[messageId][0][i:i+2]
+                            break
+                    del values['FLAGS']
+                    if not values:
+                        del results[messageId]
+
+        if flagChanges:
+            self.flagsChanged(flagChanges)
+
+        if structured:
+            return results
+        else:
+            return info
+
+
+    def fetchSpecific(self, messages, uid=0, headerType=None,
+                      headerNumber=None, headerArgs=None, peek=None,
+                      offset=None, length=None):
+        """Retrieve a specific section of one or more messages
+
+        @type messages: C{MessageSet} or C{str}
+        @param messages: A message sequence set
+
+        @type uid: C{bool}
+        @param uid: Indicates whether the message sequence set is of message
+        numbers or of unique message IDs.
+
+        @type headerType: C{str}
+        @param headerType: If specified, must be one of HEADER,
+        HEADER.FIELDS, HEADER.FIELDS.NOT, MIME, or TEXT, and will determine
+        which part of the message is retrieved.  For HEADER.FIELDS and
+        HEADER.FIELDS.NOT, C{headerArgs} must be a sequence of header names.
+        For MIME, C{headerNumber} must be specified.
+
+        @type headerNumber: C{int} or C{int} sequence
+        @param headerNumber: The nested rfc822 index specifying the
+        entity to retrieve.  For example, C{1} retrieves the first
+        entity of the message, and C{(2, 1, 3}) retrieves the 3rd
+        entity inside the first entity inside the second entity of
+        the message.
+
+        @type headerArgs: A sequence of C{str}
+        @param headerArgs: If C{headerType} is HEADER.FIELDS, these are the
+        headers to retrieve.  If it is HEADER.FIELDS.NOT, these are the
+        headers to exclude from retrieval.
+
+        @type peek: C{bool}
+        @param peek: If true, cause the server to not set the \\Seen
+        flag on this message as a result of this command.
+
+        @type offset: C{int}
+        @param offset: The number of octets at the beginning of the result
+        to skip.
+
+        @type length: C{int}
+        @param length: The number of octets to retrieve.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with a mapping of
+        message numbers to retrieved data, or whose errback is invoked
+        if there is an error.
+        """
+        fmt = '%s BODY%s[%s%s%s]%s'
+        if headerNumber is None:
+            number = ''
+        elif isinstance(headerNumber, int):
+            number = str(headerNumber)
+        else:
+            number = '.'.join(map(str, headerNumber))
+        if headerType is None:
+            header = ''
+        elif number:
+            header = '.' + headerType
+        else:
+            header = headerType
+        if header and headerType not in ('TEXT', 'MIME'):
+            if headerArgs is not None:
+                payload = ' (%s)' % ' '.join(headerArgs)
+            else:
+                payload = ' ()'
+        else:
+            payload = ''
+        if offset is None:
+            extra = ''
+        else:
+            extra = '<%d.%d>' % (offset, length)
+        fetch = uid and 'UID FETCH' or 'FETCH'
+        cmd = fmt % (messages, peek and '.PEEK' or '', number, header, payload, extra)
+        d = self.sendCommand(Command(fetch, cmd, wantResponse=('FETCH',)))
+        d.addCallback(self._cbFetch, (), False)
+        return d
+
+
+    def _fetch(self, messages, useUID=0, **terms):
+        fetch = useUID and 'UID FETCH' or 'FETCH'
+
+        if 'rfc822text' in terms:
+            del terms['rfc822text']
+            terms['rfc822.text'] = True
+        if 'rfc822size' in terms:
+            del terms['rfc822size']
+            terms['rfc822.size'] = True
+        if 'rfc822header' in terms:
+            del terms['rfc822header']
+            terms['rfc822.header'] = True
+
+        cmd = '%s (%s)' % (messages, ' '.join([s.upper() for s in terms.keys()]))
+        d = self.sendCommand(Command(fetch, cmd, wantResponse=('FETCH',)))
+        d.addCallback(self._cbFetch, map(str.upper, terms.keys()), True)
+        return d
+
+    def setFlags(self, messages, flags, silent=1, uid=0):
+        """Set the flags for one or more messages.
+
+        This command is allowed in the Selected state.
+
+        @type messages: C{MessageSet} or C{str}
+        @param messages: A message sequence set
+
+        @type flags: Any iterable of C{str}
+        @param flags: The flags to set
+
+        @type silent: C{bool}
+        @param silent: If true, cause the server to supress its verbose
+        response.
+
+        @type uid: C{bool}
+        @param uid: Indicates whether the message sequence set is of message
+        numbers or of unique message IDs.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with a list of the
+        the server's responses (C{[]} if C{silent} is true) or whose
+        errback is invoked if there is an error.
+        """
+        return self._store(str(messages), 'FLAGS', silent, flags, uid)
+
+    def addFlags(self, messages, flags, silent=1, uid=0):
+        """Add to the set flags for one or more messages.
+
+        This command is allowed in the Selected state.
+
+        @type messages: C{MessageSet} or C{str}
+        @param messages: A message sequence set
+
+        @type flags: Any iterable of C{str}
+        @param flags: The flags to set
+
+        @type silent: C{bool}
+        @param silent: If true, cause the server to supress its verbose
+        response.
+
+        @type uid: C{bool}
+        @param uid: Indicates whether the message sequence set is of message
+        numbers or of unique message IDs.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with a list of the
+        the server's responses (C{[]} if C{silent} is true) or whose
+        errback is invoked if there is an error.
+        """
+        return self._store(str(messages),'+FLAGS', silent, flags, uid)
+
+    def removeFlags(self, messages, flags, silent=1, uid=0):
+        """Remove from the set flags for one or more messages.
+
+        This command is allowed in the Selected state.
+
+        @type messages: C{MessageSet} or C{str}
+        @param messages: A message sequence set
+
+        @type flags: Any iterable of C{str}
+        @param flags: The flags to set
+
+        @type silent: C{bool}
+        @param silent: If true, cause the server to supress its verbose
+        response.
+
+        @type uid: C{bool}
+        @param uid: Indicates whether the message sequence set is of message
+        numbers or of unique message IDs.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with a list of the
+        the server's responses (C{[]} if C{silent} is true) or whose
+        errback is invoked if there is an error.
+        """
+        return self._store(str(messages), '-FLAGS', silent, flags, uid)
+
+
+    def _store(self, messages, cmd, silent, flags, uid):
+        if silent:
+            cmd = cmd + '.SILENT'
+        store = uid and 'UID STORE' or 'STORE'
+        args = ' '.join((messages, cmd, '(%s)' % ' '.join(flags)))
+        d = self.sendCommand(Command(store, args, wantResponse=('FETCH',)))
+        expected = ()
+        if not silent:
+            expected = ('FLAGS',)
+        d.addCallback(self._cbFetch, expected, True)
+        return d
+
+
+    def copy(self, messages, mailbox, uid):
+        """Copy the specified messages to the specified mailbox.
+
+        This command is allowed in the Selected state.
+
+        @type messages: C{str}
+        @param messages: A message sequence set
+
+        @type mailbox: C{str}
+        @param mailbox: The mailbox to which to copy the messages
+
+        @type uid: C{bool}
+        @param uid: If true, the C{messages} refers to message UIDs, rather
+        than message sequence numbers.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with a true value
+        when the copy is successful, or whose errback is invoked if there
+        is an error.
+        """
+        if uid:
+            cmd = 'UID COPY'
+        else:
+            cmd = 'COPY'
+        args = '%s %s' % (messages, _prepareMailboxName(mailbox))
+        return self.sendCommand(Command(cmd, args))
+
+    #
+    # IMailboxListener methods
+    #
+    def modeChanged(self, writeable):
+        """Override me"""
+
+    def flagsChanged(self, newFlags):
+        """Override me"""
+
+    def newMessages(self, exists, recent):
+        """Override me"""
+
+
+class IllegalIdentifierError(IMAP4Exception): pass
+
+def parseIdList(s, lastMessageId=None):
+    """
+    Parse a message set search key into a C{MessageSet}.
+
+    @type s: C{str}
+    @param s: A string description of a id list, for example "1:3, 4:*"
+
+    @type lastMessageId: C{int}
+    @param lastMessageId: The last message sequence id or UID, depending on
+        whether we are parsing the list in UID or sequence id context. The
+        caller should pass in the correct value.
+
+    @rtype: C{MessageSet}
+    @return: A C{MessageSet} that contains the ids defined in the list
+    """
+    res = MessageSet()
+    parts = s.split(',')
+    for p in parts:
+        if ':' in p:
+            low, high = p.split(':', 1)
+            try:
+                if low == '*':
+                    low = None
+                else:
+                    low = long(low)
+                if high == '*':
+                    high = None
+                else:
+                    high = long(high)
+                if low is high is None:
+                    # *:* does not make sense
+                    raise IllegalIdentifierError(p)
+                # non-positive values are illegal according to RFC 3501
+                if ((low is not None and low <= 0) or
+                    (high is not None and high <= 0)):
+                    raise IllegalIdentifierError(p)
+                # star means "highest value of an id in the mailbox"
+                high = high or lastMessageId
+                low = low or lastMessageId
+
+                # RFC says that 2:4 and 4:2 are equivalent
+                if low > high:
+                    low, high = high, low
+                res.extend((low, high))
+            except ValueError:
+                raise IllegalIdentifierError(p)
+        else:
+            try:
+                if p == '*':
+                    p = None
+                else:
+                    p = long(p)
+                if p is not None and p <= 0:
+                    raise IllegalIdentifierError(p)
+            except ValueError:
+                raise IllegalIdentifierError(p)
+            else:
+                res.extend(p or lastMessageId)
+    return res
+
+class IllegalQueryError(IMAP4Exception): pass
+
+_SIMPLE_BOOL = (
+    'ALL', 'ANSWERED', 'DELETED', 'DRAFT', 'FLAGGED', 'NEW', 'OLD', 'RECENT',
+    'SEEN', 'UNANSWERED', 'UNDELETED', 'UNDRAFT', 'UNFLAGGED', 'UNSEEN'
+)
+
+_NO_QUOTES = (
+    'LARGER', 'SMALLER', 'UID'
+)
+
+def Query(sorted=0, **kwarg):
+    """Create a query string
+
+    Among the accepted keywords are::
+
+        all         : If set to a true value, search all messages in the
+                      current mailbox
+
+        answered    : If set to a true value, search messages flagged with
+                      \\Answered
+
+        bcc         : A substring to search the BCC header field for
+
+        before      : Search messages with an internal date before this
+                      value.  The given date should be a string in the format
+                      of 'DD-Mon-YYYY'.  For example, '03-Mar-2003'.
+
+        body        : A substring to search the body of the messages for
+
+        cc          : A substring to search the CC header field for
+
+        deleted     : If set to a true value, search messages flagged with
+                      \\Deleted
+
+        draft       : If set to a true value, search messages flagged with
+                      \\Draft
+
+        flagged     : If set to a true value, search messages flagged with
+                      \\Flagged
+
+        from        : A substring to search the From header field for
+
+        header      : A two-tuple of a header name and substring to search
+                      for in that header
+
+        keyword     : Search for messages with the given keyword set
+
+        larger      : Search for messages larger than this number of octets
+
+        messages    : Search only the given message sequence set.
+
+        new         : If set to a true value, search messages flagged with
+                      \\Recent but not \\Seen
+
+        old         : If set to a true value, search messages not flagged with
+                      \\Recent
+
+        on          : Search messages with an internal date which is on this
+                      date.  The given date should be a string in the format
+                      of 'DD-Mon-YYYY'.  For example, '03-Mar-2003'.
+
+        recent      : If set to a true value, search for messages flagged with
+                      \\Recent
+
+        seen        : If set to a true value, search for messages flagged with
+                      \\Seen
+
+        sentbefore  : Search for messages with an RFC822 'Date' header before
+                      this date.  The given date should be a string in the format
+                      of 'DD-Mon-YYYY'.  For example, '03-Mar-2003'.
+
+        senton      : Search for messages with an RFC822 'Date' header which is
+                      on this date  The given date should be a string in the format
+                      of 'DD-Mon-YYYY'.  For example, '03-Mar-2003'.
+
+        sentsince   : Search for messages with an RFC822 'Date' header which is
+                      after this date.  The given date should be a string in the format
+                      of 'DD-Mon-YYYY'.  For example, '03-Mar-2003'.
+
+        since       : Search for messages with an internal date that is after
+                      this date..  The given date should be a string in the format
+                      of 'DD-Mon-YYYY'.  For example, '03-Mar-2003'.
+
+        smaller     : Search for messages smaller than this number of octets
+
+        subject     : A substring to search the 'subject' header for
+
+        text        : A substring to search the entire message for
+
+        to          : A substring to search the 'to' header for
+
+        uid         : Search only the messages in the given message set
+
+        unanswered  : If set to a true value, search for messages not
+                      flagged with \\Answered
+
+        undeleted   : If set to a true value, search for messages not
+                      flagged with \\Deleted
+
+        undraft     : If set to a true value, search for messages not
+                      flagged with \\Draft
+
+        unflagged   : If set to a true value, search for messages not
+                      flagged with \\Flagged
+
+        unkeyword   : Search for messages without the given keyword set
+
+        unseen      : If set to a true value, search for messages not
+                      flagged with \\Seen
+
+    @type sorted: C{bool}
+    @param sorted: If true, the output will be sorted, alphabetically.
+    The standard does not require it, but it makes testing this function
+    easier.  The default is zero, and this should be acceptable for any
+    application.
+
+    @rtype: C{str}
+    @return: The formatted query string
+    """
+    cmd = []
+    keys = kwarg.keys()
+    if sorted:
+        keys.sort()
+    for k in keys:
+        v = kwarg[k]
+        k = k.upper()
+        if k in _SIMPLE_BOOL and v:
+           cmd.append(k)
+        elif k == 'HEADER':
+            cmd.extend([k, v[0], '"%s"' % (v[1],)])
+        elif k not in _NO_QUOTES:
+           cmd.extend([k, '"%s"' % (v,)])
+        else:
+           cmd.extend([k, '%s' % (v,)])
+    if len(cmd) > 1:
+        return '(%s)' % ' '.join(cmd)
+    else:
+        return ' '.join(cmd)
+
+def Or(*args):
+    """The disjunction of two or more queries"""
+    if len(args) < 2:
+        raise IllegalQueryError, args
+    elif len(args) == 2:
+        return '(OR %s %s)' % args
+    else:
+        return '(OR %s %s)' % (args[0], Or(*args[1:]))
+
+def Not(query):
+    """The negation of a query"""
+    return '(NOT %s)' % (query,)
+
+class MismatchedNesting(IMAP4Exception):
+    pass
+
+class MismatchedQuoting(IMAP4Exception):
+    pass
+
+def wildcardToRegexp(wildcard, delim=None):
+    wildcard = wildcard.replace('*', '(?:.*?)')
+    if delim is None:
+        wildcard = wildcard.replace('%', '(?:.*?)')
+    else:
+        wildcard = wildcard.replace('%', '(?:(?:[^%s])*?)' % re.escape(delim))
+    return re.compile(wildcard, re.I)
+
+def splitQuoted(s):
+    """Split a string into whitespace delimited tokens
+
+    Tokens that would otherwise be separated but are surrounded by \"
+    remain as a single token.  Any token that is not quoted and is
+    equal to \"NIL\" is tokenized as C{None}.
+
+    @type s: C{str}
+    @param s: The string to be split
+
+    @rtype: C{list} of C{str}
+    @return: A list of the resulting tokens
+
+    @raise MismatchedQuoting: Raised if an odd number of quotes are present
+    """
+    s = s.strip()
+    result = []
+    word = []
+    inQuote = inWord = False
+    for i, c in enumerate(s):
+        if c == '"':
+            if i and s[i-1] == '\\':
+                word.pop()
+                word.append('"')
+            elif not inQuote:
+                inQuote = True
+            else:
+                inQuote = False
+                result.append(''.join(word))
+                word = []
+        elif not inWord and not inQuote and c not in ('"' + string.whitespace):
+            inWord = True
+            word.append(c)
+        elif inWord and not inQuote and c in string.whitespace:
+            w = ''.join(word)
+            if w == 'NIL':
+                result.append(None)
+            else:
+                result.append(w)
+            word = []
+            inWord = False
+        elif inWord or inQuote:
+            word.append(c)
+
+    if inQuote:
+        raise MismatchedQuoting(s)
+    if inWord:
+        w = ''.join(word)
+        if w == 'NIL':
+            result.append(None)
+        else:
+            result.append(w)
+
+    return result
+
+
+
+def splitOn(sequence, predicate, transformers):
+    result = []
+    mode = predicate(sequence[0])
+    tmp = [sequence[0]]
+    for e in sequence[1:]:
+        p = predicate(e)
+        if p != mode:
+            result.extend(transformers[mode](tmp))
+            tmp = [e]
+            mode = p
+        else:
+            tmp.append(e)
+    result.extend(transformers[mode](tmp))
+    return result
+
+def collapseStrings(results):
+    """
+    Turns a list of length-one strings and lists into a list of longer
+    strings and lists.  For example,
+
+    ['a', 'b', ['c', 'd']] is returned as ['ab', ['cd']]
+
+    @type results: C{list} of C{str} and C{list}
+    @param results: The list to be collapsed
+
+    @rtype: C{list} of C{str} and C{list}
+    @return: A new list which is the collapsed form of C{results}
+    """
+    copy = []
+    begun = None
+    listsList = [isinstance(s, types.ListType) for s in results]
+
+    pred = lambda e: isinstance(e, types.TupleType)
+    tran = {
+        0: lambda e: splitQuoted(''.join(e)),
+        1: lambda e: [''.join([i[0] for i in e])]
+    }
+    for (i, c, isList) in zip(range(len(results)), results, listsList):
+        if isList:
+            if begun is not None:
+                copy.extend(splitOn(results[begun:i], pred, tran))
+                begun = None
+            copy.append(collapseStrings(c))
+        elif begun is None:
+            begun = i
+    if begun is not None:
+        copy.extend(splitOn(results[begun:], pred, tran))
+    return copy
+
+
+def parseNestedParens(s, handleLiteral = 1):
+    """Parse an s-exp-like string into a more useful data structure.
+
+    @type s: C{str}
+    @param s: The s-exp-like string to parse
+
+    @rtype: C{list} of C{str} and C{list}
+    @return: A list containing the tokens present in the input.
+
+    @raise MismatchedNesting: Raised if the number or placement
+    of opening or closing parenthesis is invalid.
+    """
+    s = s.strip()
+    inQuote = 0
+    contentStack = [[]]
+    try:
+        i = 0
+        L = len(s)
+        while i < L:
+            c = s[i]
+            if inQuote:
+                if c == '\\':
+                    contentStack[-1].append(s[i:i+2])
+                    i += 2
+                    continue
+                elif c == '"':
+                    inQuote = not inQuote
+                contentStack[-1].append(c)
+                i += 1
+            else:
+                if c == '"':
+                    contentStack[-1].append(c)
+                    inQuote = not inQuote
+                    i += 1
+                elif handleLiteral and c == '{':
+                    end = s.find('}', i)
+                    if end == -1:
+                        raise ValueError, "Malformed literal"
+                    literalSize = int(s[i+1:end])
+                    contentStack[-1].append((s[end+3:end+3+literalSize],))
+                    i = end + 3 + literalSize
+                elif c == '(' or c == '[':
+                    contentStack.append([])
+                    i += 1
+                elif c == ')' or c == ']':
+                    contentStack[-2].append(contentStack.pop())
+                    i += 1
+                else:
+                    contentStack[-1].append(c)
+                    i += 1
+    except IndexError:
+        raise MismatchedNesting(s)
+    if len(contentStack) != 1:
+        raise MismatchedNesting(s)
+    return collapseStrings(contentStack[0])
+
+def _quote(s):
+    return '"%s"' % (s.replace('\\', '\\\\').replace('"', '\\"'),)
+
+def _literal(s):
+    return '{%d}\r\n%s' % (len(s), s)
+
+class DontQuoteMe:
+    def __init__(self, value):
+        self.value = value
+
+    def __str__(self):
+        return str(self.value)
+
+_ATOM_SPECIALS = '(){ %*"'
+def _needsQuote(s):
+    if s == '':
+        return 1
+    for c in s:
+        if c < '\x20' or c > '\x7f':
+            return 1
+        if c in _ATOM_SPECIALS:
+            return 1
+    return 0
+
+def _prepareMailboxName(name):
+    name = name.encode('imap4-utf-7')
+    if _needsQuote(name):
+        return _quote(name)
+    return name
+
+def _needsLiteral(s):
+    # Change this to "return 1" to wig out stupid clients
+    return '\n' in s or '\r' in s or len(s) > 1000
+
+def collapseNestedLists(items):
+    """Turn a nested list structure into an s-exp-like string.
+
+    Strings in C{items} will be sent as literals if they contain CR or LF,
+    otherwise they will be quoted.  References to None in C{items} will be
+    translated to the atom NIL.  Objects with a 'read' attribute will have
+    it called on them with no arguments and the returned string will be
+    inserted into the output as a literal.  Integers will be converted to
+    strings and inserted into the output unquoted.  Instances of
+    C{DontQuoteMe} will be converted to strings and inserted into the output
+    unquoted.
+
+    This function used to be much nicer, and only quote things that really
+    needed to be quoted (and C{DontQuoteMe} did not exist), however, many
+    broken IMAP4 clients were unable to deal with this level of sophistication,
+    forcing the current behavior to be adopted for practical reasons.
+
+    @type items: Any iterable
+
+    @rtype: C{str}
+    """
+    pieces = []
+    for i in items:
+        if i is None:
+            pieces.extend([' ', 'NIL'])
+        elif isinstance(i, (DontQuoteMe, int, long)):
+            pieces.extend([' ', str(i)])
+        elif isinstance(i, types.StringTypes):
+            if _needsLiteral(i):
+                pieces.extend([' ', '{', str(len(i)), '}', IMAP4Server.delimiter, i])
+            else:
+                pieces.extend([' ', _quote(i)])
+        elif hasattr(i, 'read'):
+            d = i.read()
+            pieces.extend([' ', '{', str(len(d)), '}', IMAP4Server.delimiter, d])
+        else:
+            pieces.extend([' ', '(%s)' % (collapseNestedLists(i),)])
+    return ''.join(pieces[1:])
+
+
+class IClientAuthentication(Interface):
+    def getName():
+        """Return an identifier associated with this authentication scheme.
+
+        @rtype: C{str}
+        """
+
+    def challengeResponse(secret, challenge):
+        """Generate a challenge response string"""
+
+
+
+class CramMD5ClientAuthenticator:
+    implements(IClientAuthentication)
+
+    def __init__(self, user):
+        self.user = user
+
+    def getName(self):
+        return "CRAM-MD5"
+
+    def challengeResponse(self, secret, chal):
+        response = hmac.HMAC(secret, chal).hexdigest()
+        return '%s %s' % (self.user, response)
+
+
+
+class LOGINAuthenticator:
+    implements(IClientAuthentication)
+
+    def __init__(self, user):
+        self.user = user
+        self.challengeResponse = self.challengeUsername
+
+    def getName(self):
+        return "LOGIN"
+
+    def challengeUsername(self, secret, chal):
+        # Respond to something like "Username:"
+        self.challengeResponse = self.challengeSecret
+        return self.user
+
+    def challengeSecret(self, secret, chal):
+        # Respond to something like "Password:"
+        return secret
+
+class PLAINAuthenticator:
+    implements(IClientAuthentication)
+
+    def __init__(self, user):
+        self.user = user
+
+    def getName(self):
+        return "PLAIN"
+
+    def challengeResponse(self, secret, chal):
+        return '\0%s\0%s' % (self.user, secret)
+
+
+class MailboxException(IMAP4Exception): pass
+
+class MailboxCollision(MailboxException):
+    def __str__(self):
+        return 'Mailbox named %s already exists' % self.args
+
+class NoSuchMailbox(MailboxException):
+    def __str__(self):
+        return 'No mailbox named %s exists' % self.args
+
+class ReadOnlyMailbox(MailboxException):
+    def __str__(self):
+        return 'Mailbox open in read-only state'
+
+
+class IAccount(Interface):
+    """Interface for Account classes
+
+    Implementors of this interface should consider implementing
+    C{INamespacePresenter}.
+    """
+
+    def addMailbox(name, mbox = None):
+        """Add a new mailbox to this account
+
+        @type name: C{str}
+        @param name: The name associated with this mailbox.  It may not
+        contain multiple hierarchical parts.
+
+        @type mbox: An object implementing C{IMailbox}
+        @param mbox: The mailbox to associate with this name.  If C{None},
+        a suitable default is created and used.
+
+        @rtype: C{Deferred} or C{bool}
+        @return: A true value if the creation succeeds, or a deferred whose
+        callback will be invoked when the creation succeeds.
+
+        @raise MailboxException: Raised if this mailbox cannot be added for
+        some reason.  This may also be raised asynchronously, if a C{Deferred}
+        is returned.
+        """
+
+    def create(pathspec):
+        """Create a new mailbox from the given hierarchical name.
+
+        @type pathspec: C{str}
+        @param pathspec: The full hierarchical name of a new mailbox to create.
+        If any of the inferior hierarchical names to this one do not exist,
+        they are created as well.
+
+        @rtype: C{Deferred} or C{bool}
+        @return: A true value if the creation succeeds, or a deferred whose
+        callback will be invoked when the creation succeeds.
+
+        @raise MailboxException: Raised if this mailbox cannot be added.
+        This may also be raised asynchronously, if a C{Deferred} is
+        returned.
+        """
+
+    def select(name, rw=True):
+        """Acquire a mailbox, given its name.
+
+        @type name: C{str}
+        @param name: The mailbox to acquire
+
+        @type rw: C{bool}
+        @param rw: If a true value, request a read-write version of this
+        mailbox.  If a false value, request a read-only version.
+
+        @rtype: Any object implementing C{IMailbox} or C{Deferred}
+        @return: The mailbox object, or a C{Deferred} whose callback will
+        be invoked with the mailbox object.  None may be returned if the
+        specified mailbox may not be selected for any reason.
+        """
+
+    def delete(name):
+        """Delete the mailbox with the specified name.
+
+        @type name: C{str}
+        @param name: The mailbox to delete.
+
+        @rtype: C{Deferred} or C{bool}
+        @return: A true value if the mailbox is successfully deleted, or a
+        C{Deferred} whose callback will be invoked when the deletion
+        completes.
+
+        @raise MailboxException: Raised if this mailbox cannot be deleted.
+        This may also be raised asynchronously, if a C{Deferred} is returned.
+        """
+
+    def rename(oldname, newname):
+        """Rename a mailbox
+
+        @type oldname: C{str}
+        @param oldname: The current name of the mailbox to rename.
+
+        @type newname: C{str}
+        @param newname: The new name to associate with the mailbox.
+
+        @rtype: C{Deferred} or C{bool}
+        @return: A true value if the mailbox is successfully renamed, or a
+        C{Deferred} whose callback will be invoked when the rename operation
+        is completed.
+
+        @raise MailboxException: Raised if this mailbox cannot be
+        renamed.  This may also be raised asynchronously, if a C{Deferred}
+        is returned.
+        """
+
+    def isSubscribed(name):
+        """Check the subscription status of a mailbox
+
+        @type name: C{str}
+        @param name: The name of the mailbox to check
+
+        @rtype: C{Deferred} or C{bool}
+        @return: A true value if the given mailbox is currently subscribed
+        to, a false value otherwise.  A C{Deferred} may also be returned
+        whose callback will be invoked with one of these values.
+        """
+
+    def subscribe(name):
+        """Subscribe to a mailbox
+
+        @type name: C{str}
+        @param name: The name of the mailbox to subscribe to
+
+        @rtype: C{Deferred} or C{bool}
+        @return: A true value if the mailbox is subscribed to successfully,
+        or a Deferred whose callback will be invoked with this value when
+        the subscription is successful.
+
+        @raise MailboxException: Raised if this mailbox cannot be
+        subscribed to.  This may also be raised asynchronously, if a
+        C{Deferred} is returned.
+        """
+
+    def unsubscribe(name):
+        """Unsubscribe from a mailbox
+
+        @type name: C{str}
+        @param name: The name of the mailbox to unsubscribe from
+
+        @rtype: C{Deferred} or C{bool}
+        @return: A true value if the mailbox is unsubscribed from successfully,
+        or a Deferred whose callback will be invoked with this value when
+        the unsubscription is successful.
+
+        @raise MailboxException: Raised if this mailbox cannot be
+        unsubscribed from.  This may also be raised asynchronously, if a
+        C{Deferred} is returned.
+        """
+
+    def listMailboxes(ref, wildcard):
+        """List all the mailboxes that meet a certain criteria
+
+        @type ref: C{str}
+        @param ref: The context in which to apply the wildcard
+
+        @type wildcard: C{str}
+        @param wildcard: An expression against which to match mailbox names.
+        '*' matches any number of characters in a mailbox name, and '%'
+        matches similarly, but will not match across hierarchical boundaries.
+
+        @rtype: C{list} of C{tuple}
+        @return: A list of C{(mailboxName, mailboxObject)} which meet the
+        given criteria.  C{mailboxObject} should implement either
+        C{IMailboxInfo} or C{IMailbox}.  A Deferred may also be returned.
+        """
+
+class INamespacePresenter(Interface):
+    def getPersonalNamespaces():
+        """Report the available personal namespaces.
+
+        Typically there should be only one personal namespace.  A common
+        name for it is \"\", and its hierarchical delimiter is usually
+        \"/\".
+
+        @rtype: iterable of two-tuples of strings
+        @return: The personal namespaces and their hierarchical delimiters.
+        If no namespaces of this type exist, None should be returned.
+        """
+
+    def getSharedNamespaces():
+        """Report the available shared namespaces.
+
+        Shared namespaces do not belong to any individual user but are
+        usually to one or more of them.  Examples of shared namespaces
+        might be \"#news\" for a usenet gateway.
+
+        @rtype: iterable of two-tuples of strings
+        @return: The shared namespaces and their hierarchical delimiters.
+        If no namespaces of this type exist, None should be returned.
+        """
+
+    def getUserNamespaces():
+        """Report the available user namespaces.
+
+        These are namespaces that contain folders belonging to other users
+        access to which this account has been granted.
+
+        @rtype: iterable of two-tuples of strings
+        @return: The user namespaces and their hierarchical delimiters.
+        If no namespaces of this type exist, None should be returned.
+        """
+
+
+class MemoryAccount(object):
+    implements(IAccount, INamespacePresenter)
+
+    mailboxes = None
+    subscriptions = None
+    top_id = 0
+
+    def __init__(self, name):
+        self.name = name
+        self.mailboxes = {}
+        self.subscriptions = []
+
+    def allocateID(self):
+        id = self.top_id
+        self.top_id += 1
+        return id
+
+    ##
+    ## IAccount
+    ##
+    def addMailbox(self, name, mbox = None):
+        name = name.upper()
+        if name in self.mailboxes:
+            raise MailboxCollision, name
+        if mbox is None:
+            mbox = self._emptyMailbox(name, self.allocateID())
+        self.mailboxes[name] = mbox
+        return 1
+
+    def create(self, pathspec):
+        paths = filter(None, pathspec.split('/'))
+        for accum in range(1, len(paths)):
+            try:
+                self.addMailbox('/'.join(paths[:accum]))
+            except MailboxCollision:
+                pass
+        try:
+            self.addMailbox('/'.join(paths))
+        except MailboxCollision:
+            if not pathspec.endswith('/'):
+                return False
+        return True
+
+    def _emptyMailbox(self, name, id):
+        raise NotImplementedError
+
+    def select(self, name, readwrite=1):
+        return self.mailboxes.get(name.upper())
+
+    def delete(self, name):
+        name = name.upper()
+        # See if this mailbox exists at all
+        mbox = self.mailboxes.get(name)
+        if not mbox:
+            raise MailboxException("No such mailbox")
+        # See if this box is flagged \Noselect
+        if r'\Noselect' in mbox.getFlags():
+            # Check for hierarchically inferior mailboxes with this one
+            # as part of their root.
+            for others in self.mailboxes.keys():
+                if others != name and others.startswith(name):
+                    raise MailboxException, "Hierarchically inferior mailboxes exist and \\Noselect is set"
+        mbox.destroy()
+
+        # iff there are no hierarchically inferior names, we will
+        # delete it from our ken.
+        if self._inferiorNames(name) > 1:
+            del self.mailboxes[name]
+
+    def rename(self, oldname, newname):
+        oldname = oldname.upper()
+        newname = newname.upper()
+        if oldname not in self.mailboxes:
+            raise NoSuchMailbox, oldname
+
+        inferiors = self._inferiorNames(oldname)
+        inferiors = [(o, o.replace(oldname, newname, 1)) for o in inferiors]
+
+        for (old, new) in inferiors:
+            if new in self.mailboxes:
+                raise MailboxCollision, new
+
+        for (old, new) in inferiors:
+            self.mailboxes[new] = self.mailboxes[old]
+            del self.mailboxes[old]
+
+    def _inferiorNames(self, name):
+        inferiors = []
+        for infname in self.mailboxes.keys():
+            if infname.startswith(name):
+                inferiors.append(infname)
+        return inferiors
+
+    def isSubscribed(self, name):
+        return name.upper() in self.subscriptions
+
+    def subscribe(self, name):
+        name = name.upper()
+        if name not in self.subscriptions:
+            self.subscriptions.append(name)
+
+    def unsubscribe(self, name):
+        name = name.upper()
+        if name not in self.subscriptions:
+            raise MailboxException, "Not currently subscribed to " + name
+        self.subscriptions.remove(name)
+
+    def listMailboxes(self, ref, wildcard):
+        ref = self._inferiorNames(ref.upper())
+        wildcard = wildcardToRegexp(wildcard, '/')
+        return [(i, self.mailboxes[i]) for i in ref if wildcard.match(i)]
+
+    ##
+    ## INamespacePresenter
+    ##
+    def getPersonalNamespaces(self):
+        return [["", "/"]]
+
+    def getSharedNamespaces(self):
+        return None
+
+    def getOtherNamespaces(self):
+        return None
+
+
+
+_statusRequestDict = {
+    'MESSAGES': 'getMessageCount',
+    'RECENT': 'getRecentCount',
+    'UIDNEXT': 'getUIDNext',
+    'UIDVALIDITY': 'getUIDValidity',
+    'UNSEEN': 'getUnseenCount'
+}
+def statusRequestHelper(mbox, names):
+    r = {}
+    for n in names:
+        r[n] = getattr(mbox, _statusRequestDict[n.upper()])()
+    return r
+
+def parseAddr(addr):
+    if addr is None:
+        return [(None, None, None),]
+    addrs = email.Utils.getaddresses([addr])
+    return [[fn or None, None] + addr.split('@') for fn, addr in addrs]
+
+def getEnvelope(msg):
+    headers = msg.getHeaders(True)
+    date = headers.get('date')
+    subject = headers.get('subject')
+    from_ = headers.get('from')
+    sender = headers.get('sender', from_)
+    reply_to = headers.get('reply-to', from_)
+    to = headers.get('to')
+    cc = headers.get('cc')
+    bcc = headers.get('bcc')
+    in_reply_to = headers.get('in-reply-to')
+    mid = headers.get('message-id')
+    return (date, subject, parseAddr(from_), parseAddr(sender),
+        reply_to and parseAddr(reply_to), to and parseAddr(to),
+        cc and parseAddr(cc), bcc and parseAddr(bcc), in_reply_to, mid)
+
+def getLineCount(msg):
+    # XXX - Super expensive, CACHE THIS VALUE FOR LATER RE-USE
+    # XXX - This must be the number of lines in the ENCODED version
+    lines = 0
+    for _ in msg.getBodyFile():
+        lines += 1
+    return lines
+
+def unquote(s):
+    if s[0] == s[-1] == '"':
+        return s[1:-1]
+    return s
+
+
+def _getContentType(msg):
+    """
+    Return a two-tuple of the main and subtype of the given message.
+    """
+    attrs = None
+    mm = msg.getHeaders(False, 'content-type').get('content-type', None)
+    if mm:
+        mm = ''.join(mm.splitlines())
+        mimetype = mm.split(';')
+        if mimetype:
+            type = mimetype[0].split('/', 1)
+            if len(type) == 1:
+                major = type[0]
+                minor = None
+            elif len(type) == 2:
+                major, minor = type
+            else:
+                major = minor = None
+            attrs = dict(x.strip().lower().split('=', 1) for x in mimetype[1:])
+        else:
+            major = minor = None
+    else:
+        major = minor = None
+    return major, minor, attrs
+
+
+
+def _getMessageStructure(message):
+    """
+    Construct an appropriate type of message structure object for the given
+    message object.
+
+    @param message: A L{IMessagePart} provider
+
+    @return: A L{_MessageStructure} instance of the most specific type available
+        for the given message, determined by inspecting the MIME type of the
+        message.
+    """
+    main, subtype, attrs = _getContentType(message)
+    if main is not None:
+        main = main.lower()
+    if subtype is not None:
+        subtype = subtype.lower()
+    if main == 'multipart':
+        return _MultipartMessageStructure(message, subtype, attrs)
+    elif (main, subtype) == ('message', 'rfc822'):
+        return _RFC822MessageStructure(message, main, subtype, attrs)
+    elif main == 'text':
+        return _TextMessageStructure(message, main, subtype, attrs)
+    else:
+        return _SinglepartMessageStructure(message, main, subtype, attrs)
+
+
+
+class _MessageStructure(object):
+    """
+    L{_MessageStructure} is a helper base class for message structure classes
+    representing the structure of particular kinds of messages, as defined by
+    their MIME type.
+    """
+    def __init__(self, message, attrs):
+        """
+        @param message: An L{IMessagePart} provider which this structure object
+            reports on.
+
+        @param attrs: A C{dict} giving the parameters of the I{Content-Type}
+            header of the message.
+        """
+        self.message = message
+        self.attrs = attrs
+
+
+    def _disposition(self, disp):
+        """
+        Parse a I{Content-Disposition} header into a two-sequence of the
+        disposition and a flattened list of its parameters.
+
+        @return: C{None} if there is no disposition header value, a C{list} with
+            two elements otherwise.
+        """
+        if disp:
+            disp = disp.split('; ')
+            if len(disp) == 1:
+                disp = (disp[0].lower(), None)
+            elif len(disp) > 1:
+                # XXX Poorly tested parser
+                params = [x for param in disp[1:] for x in param.split('=', 1)]
+                disp = [disp[0].lower(), params]
+            return disp
+        else:
+            return None
+
+
+    def _unquotedAttrs(self):
+        """
+        @return: The I{Content-Type} parameters, unquoted, as a flat list with
+            each Nth element giving a parameter name and N+1th element giving
+            the corresponding parameter value.
+        """
+        if self.attrs:
+            unquoted = [(k, unquote(v)) for (k, v) in self.attrs.iteritems()]
+            return [y for x in sorted(unquoted) for y in x]
+        return None
+
+
+
+class _SinglepartMessageStructure(_MessageStructure):
+    """
+    L{_SinglepartMessageStructure} represents the message structure of a
+    non-I{multipart/*} message.
+    """
+    _HEADERS = [
+        'content-id', 'content-description',
+        'content-transfer-encoding']
+
+    def __init__(self, message, main, subtype, attrs):
+        """
+        @param message: An L{IMessagePart} provider which this structure object
+            reports on.
+
+        @param main: A C{str} giving the main MIME type of the message (for
+            example, C{"text"}).
+
+        @param subtype: A C{str} giving the MIME subtype of the message (for
+            example, C{"plain"}).
+
+        @param attrs: A C{dict} giving the parameters of the I{Content-Type}
+            header of the message.
+        """
+        _MessageStructure.__init__(self, message, attrs)
+        self.main = main
+        self.subtype = subtype
+        self.attrs = attrs
+
+
+    def _basicFields(self):
+        """
+        Return a list of the basic fields for a single-part message.
+        """
+        headers = self.message.getHeaders(False, *self._HEADERS)
+
+        # Number of octets total
+        size = self.message.getSize()
+
+        major, minor = self.main, self.subtype
+
+        # content-type parameter list
+        unquotedAttrs = self._unquotedAttrs()
+
+        return [
+            major, minor, unquotedAttrs,
+            headers.get('content-id'),
+            headers.get('content-description'),
+            headers.get('content-transfer-encoding'),
+            size,
+            ]
+
+
+    def encode(self, extended):
+        """
+        Construct and return a list of the basic and extended fields for a
+        single-part message.  The list suitable to be encoded into a BODY or
+        BODYSTRUCTURE response.
+        """
+        result = self._basicFields()
+        if extended:
+            result.extend(self._extended())
+        return result
+
+
+    def _extended(self):
+        """
+        The extension data of a non-multipart body part are in the
+        following order:
+
+          1. body MD5
+
+             A string giving the body MD5 value as defined in [MD5].
+
+          2. body disposition
+
+             A parenthesized list with the same content and function as
+             the body disposition for a multipart body part.
+
+          3. body language
+
+             A string or parenthesized list giving the body language
+             value as defined in [LANGUAGE-TAGS].
+
+          4. body location
+
+             A string list giving the body content URI as defined in
+             [LOCATION].
+
+        """
+        result = []
+        headers = self.message.getHeaders(
+            False, 'content-md5', 'content-disposition',
+            'content-language', 'content-language')
+
+        result.append(headers.get('content-md5'))
+        result.append(self._disposition(headers.get('content-disposition')))
+        result.append(headers.get('content-language'))
+        result.append(headers.get('content-location'))
+
+        return result
+
+
+
+class _TextMessageStructure(_SinglepartMessageStructure):
+    """
+    L{_TextMessageStructure} represents the message structure of a I{text/*}
+    message.
+    """
+    def encode(self, extended):
+        """
+        A body type of type TEXT contains, immediately after the basic
+        fields, the size of the body in text lines.  Note that this
+        size is the size in its content transfer encoding and not the
+        resulting size after any decoding.
+        """
+        result = _SinglepartMessageStructure._basicFields(self)
+        result.append(getLineCount(self.message))
+        if extended:
+            result.extend(self._extended())
+        return result
+
+
+
+class _RFC822MessageStructure(_SinglepartMessageStructure):
+    """
+    L{_RFC822MessageStructure} represents the message structure of a
+    I{message/rfc822} message.
+    """
+    def encode(self, extended):
+        """
+        A body type of type MESSAGE and subtype RFC822 contains,
+        immediately after the basic fields, the envelope structure,
+        body structure, and size in text lines of the encapsulated
+        message.
+        """
+        result = _SinglepartMessageStructure.encode(self, extended)
+        contained = self.message.getSubPart(0)
+        result.append(getEnvelope(contained))
+        result.append(getBodyStructure(contained, False))
+        result.append(getLineCount(contained))
+        return result
+
+
+
+class _MultipartMessageStructure(_MessageStructure):
+    """
+    L{_MultipartMessageStructure} represents the message structure of a
+    I{multipart/*} message.
+    """
+    def __init__(self, message, subtype, attrs):
+        """
+        @param message: An L{IMessagePart} provider which this structure object
+            reports on.
+
+        @param subtype: A C{str} giving the MIME subtype of the message (for
+            example, C{"plain"}).
+
+        @param attrs: A C{dict} giving the parameters of the I{Content-Type}
+            header of the message.
+        """
+        _MessageStructure.__init__(self, message, attrs)
+        self.subtype = subtype
+
+
+    def _getParts(self):
+        """
+        Return an iterator over all of the sub-messages of this message.
+        """
+        i = 0
+        while True:
+            try:
+                part = self.message.getSubPart(i)
+            except IndexError:
+                break
+            else:
+                yield part
+                i += 1
+
+
+    def encode(self, extended):
+        """
+        Encode each sub-message and added the additional I{multipart} fields.
+        """
+        result = [_getMessageStructure(p).encode(extended) for p in self._getParts()]
+        result.append(self.subtype)
+        if extended:
+            result.extend(self._extended())
+        return result
+
+
+    def _extended(self):
+        """
+        The extension data of a multipart body part are in the following order:
+
+          1. body parameter parenthesized list
+               A parenthesized list of attribute/value pairs [e.g., ("foo"
+               "bar" "baz" "rag") where "bar" is the value of "foo", and
+               "rag" is the value of "baz"] as defined in [MIME-IMB].
+
+          2. body disposition
+               A parenthesized list, consisting of a disposition type
+               string, followed by a parenthesized list of disposition
+               attribute/value pairs as defined in [DISPOSITION].
+
+          3. body language
+               A string or parenthesized list giving the body language
+               value as defined in [LANGUAGE-TAGS].
+
+          4. body location
+               A string list giving the body content URI as defined in
+               [LOCATION].
+        """
+        result = []
+        headers = self.message.getHeaders(
+            False, 'content-language', 'content-location',
+            'content-disposition')
+
+        result.append(self._unquotedAttrs())
+        result.append(self._disposition(headers.get('content-disposition')))
+        result.append(headers.get('content-language', None))
+        result.append(headers.get('content-location', None))
+
+        return result
+
+
+
+def getBodyStructure(msg, extended=False):
+    """
+    RFC 3501, 7.4.2, BODYSTRUCTURE::
+
+      A parenthesized list that describes the [MIME-IMB] body structure of a
+      message.  This is computed by the server by parsing the [MIME-IMB] header
+      fields, defaulting various fields as necessary.
+
+        For example, a simple text message of 48 lines and 2279 octets can have
+        a body structure of: ("TEXT" "PLAIN" ("CHARSET" "US-ASCII") NIL NIL
+        "7BIT" 2279 48)
+
+    This is represented as::
+
+        ["TEXT", "PLAIN", ["CHARSET", "US-ASCII"], None, None, "7BIT", 2279, 48]
+
+    These basic fields are documented in the RFC as:
+
+      1. body type
+
+         A string giving the content media type name as defined in
+         [MIME-IMB].
+
+      2. body subtype
+
+         A string giving the content subtype name as defined in
+         [MIME-IMB].
+
+      3. body parameter parenthesized list
+
+         A parenthesized list of attribute/value pairs [e.g., ("foo"
+         "bar" "baz" "rag") where "bar" is the value of "foo" and
+         "rag" is the value of "baz"] as defined in [MIME-IMB].
+
+      4. body id
+
+         A string giving the content id as defined in [MIME-IMB].
+
+      5. body description
+
+         A string giving the content description as defined in
+         [MIME-IMB].
+
+      6. body encoding
+
+         A string giving the content transfer encoding as defined in
+         [MIME-IMB].
+
+      7. body size
+
+         A number giving the size of the body in octets.  Note that this size is
+         the size in its transfer encoding and not the resulting size after any
+         decoding.
+
+    Put another way, the body structure is a list of seven elements.  The
+    semantics of the elements of this list are:
+
+       1. Byte string giving the major MIME type
+       2. Byte string giving the minor MIME type
+       3. A list giving the Content-Type parameters of the message
+       4. A byte string giving the content identifier for the message part, or
+          None if it has no content identifier.
+       5. A byte string giving the content description for the message part, or
+          None if it has no content description.
+       6. A byte string giving the Content-Encoding of the message body
+       7. An integer giving the number of octets in the message body
+
+    The RFC goes on::
+
+        Multiple parts are indicated by parenthesis nesting.  Instead of a body
+        type as the first element of the parenthesized list, there is a sequence
+        of one or more nested body structures.  The second element of the
+        parenthesized list is the multipart subtype (mixed, digest, parallel,
+        alternative, etc.).
+
+        For example, a two part message consisting of a text and a
+        BASE64-encoded text attachment can have a body structure of: (("TEXT"
+        "PLAIN" ("CHARSET" "US-ASCII") NIL NIL "7BIT" 1152 23)("TEXT" "PLAIN"
+        ("CHARSET" "US-ASCII" "NAME" "cc.diff")
+        "<960723163407.20117h at cac.washington.edu>" "Compiler diff" "BASE64" 4554
+        73) "MIXED")
+
+    This is represented as::
+
+        [["TEXT", "PLAIN", ["CHARSET", "US-ASCII"], None, None, "7BIT", 1152,
+          23],
+         ["TEXT", "PLAIN", ["CHARSET", "US-ASCII", "NAME", "cc.diff"],
+          "<960723163407.20117h at cac.washington.edu>", "Compiler diff",
+          "BASE64", 4554, 73],
+         "MIXED"]
+
+    In other words, a list of N + 1 elements, where N is the number of parts in
+    the message.  The first N elements are structures as defined by the previous
+    section.  The last element is the minor MIME subtype of the multipart
+    message.
+
+    Additionally, the RFC describes extension data::
+
+        Extension data follows the multipart subtype.  Extension data is never
+        returned with the BODY fetch, but can be returned with a BODYSTRUCTURE
+        fetch.  Extension data, if present, MUST be in the defined order.
+
+    The C{extended} flag controls whether extension data might be returned with
+    the normal data.
+    """
+    return _getMessageStructure(msg).encode(extended)
+
+
+
+class IMessagePart(Interface):
+    def getHeaders(negate, *names):
+        """Retrieve a group of message headers.
+
+        @type names: C{tuple} of C{str}
+        @param names: The names of the headers to retrieve or omit.
+
+        @type negate: C{bool}
+        @param negate: If True, indicates that the headers listed in C{names}
+        should be omitted from the return value, rather than included.
+
+        @rtype: C{dict}
+        @return: A mapping of header field names to header field values
+        """
+
+    def getBodyFile():
+        """Retrieve a file object containing only the body of this message.
+        """
+
+    def getSize():
+        """Retrieve the total size, in octets, of this message.
+
+        @rtype: C{int}
+        """
+
+    def isMultipart():
+        """Indicate whether this message has subparts.
+
+        @rtype: C{bool}
+        """
+
+    def getSubPart(part):
+        """Retrieve a MIME sub-message
+
+        @type part: C{int}
+        @param part: The number of the part to retrieve, indexed from 0.
+
+        @raise IndexError: Raised if the specified part does not exist.
+        @raise TypeError: Raised if this message is not multipart.
+
+        @rtype: Any object implementing C{IMessagePart}.
+        @return: The specified sub-part.
+        """
+
+class IMessage(IMessagePart):
+    def getUID():
+        """Retrieve the unique identifier associated with this message.
+        """
+
+    def getFlags():
+        """Retrieve the flags associated with this message.
+
+        @rtype: C{iterable}
+        @return: The flags, represented as strings.
+        """
+
+    def getInternalDate():
+        """Retrieve the date internally associated with this message.
+
+        @rtype: C{str}
+        @return: An RFC822-formatted date string.
+        """
+
+class IMessageFile(Interface):
+    """Optional message interface for representing messages as files.
+
+    If provided by message objects, this interface will be used instead
+    the more complex MIME-based interface.
+    """
+    def open():
+        """Return an file-like object opened for reading.
+
+        Reading from the returned file will return all the bytes
+        of which this message consists.
+        """
+
+class ISearchableMailbox(Interface):
+    def search(query, uid):
+        """Search for messages that meet the given query criteria.
+
+        If this interface is not implemented by the mailbox, L{IMailbox.fetch}
+        and various methods of L{IMessage} will be used instead.
+
+        Implementations which wish to offer better performance than the
+        default implementation should implement this interface.
+
+        @type query: C{list}
+        @param query: The search criteria
+
+        @type uid: C{bool}
+        @param uid: If true, the IDs specified in the query are UIDs;
+        otherwise they are message sequence IDs.
+
+        @rtype: C{list} or C{Deferred}
+        @return: A list of message sequence numbers or message UIDs which
+        match the search criteria or a C{Deferred} whose callback will be
+        invoked with such a list.
+
+        @raise IllegalQueryError: Raised when query is not valid.
+        """
+
+class IMessageCopier(Interface):
+    def copy(messageObject):
+        """Copy the given message object into this mailbox.
+
+        The message object will be one which was previously returned by
+        L{IMailbox.fetch}.
+
+        Implementations which wish to offer better performance than the
+        default implementation should implement this interface.
+
+        If this interface is not implemented by the mailbox, IMailbox.addMessage
+        will be used instead.
+
+        @rtype: C{Deferred} or C{int}
+        @return: Either the UID of the message or a Deferred which fires
+        with the UID when the copy finishes.
+        """
+
+class IMailboxInfo(Interface):
+    """Interface specifying only the methods required for C{listMailboxes}.
+
+    Implementations can return objects implementing only these methods for
+    return to C{listMailboxes} if it can allow them to operate more
+    efficiently.
+    """
+
+    def getFlags():
+        """Return the flags defined in this mailbox
+
+        Flags with the \\ prefix are reserved for use as system flags.
+
+        @rtype: C{list} of C{str}
+        @return: A list of the flags that can be set on messages in this mailbox.
+        """
+
+    def getHierarchicalDelimiter():
+        """Get the character which delimits namespaces for in this mailbox.
+
+        @rtype: C{str}
+        """
+
+class IMailbox(IMailboxInfo):
+    def getUIDValidity():
+        """Return the unique validity identifier for this mailbox.
+
+        @rtype: C{int}
+        """
+
+    def getUIDNext():
+        """Return the likely UID for the next message added to this mailbox.
+
+        @rtype: C{int}
+        """
+
+    def getUID(message):
+        """Return the UID of a message in the mailbox
+
+        @type message: C{int}
+        @param message: The message sequence number
+
+        @rtype: C{int}
+        @return: The UID of the message.
+        """
+
+    def getMessageCount():
+        """Return the number of messages in this mailbox.
+
+        @rtype: C{int}
+        """
+
+    def getRecentCount():
+        """Return the number of messages with the 'Recent' flag.
+
+        @rtype: C{int}
+        """
+
+    def getUnseenCount():
+        """Return the number of messages with the 'Unseen' flag.
+
+        @rtype: C{int}
+        """
+
+    def isWriteable():
+        """Get the read/write status of the mailbox.
+
+        @rtype: C{int}
+        @return: A true value if write permission is allowed, a false value otherwise.
+        """
+
+    def destroy():
+        """Called before this mailbox is deleted, permanently.
+
+        If necessary, all resources held by this mailbox should be cleaned
+        up here.  This function _must_ set the \\Noselect flag on this
+        mailbox.
+        """
+
+    def requestStatus(names):
+        """Return status information about this mailbox.
+
+        Mailboxes which do not intend to do any special processing to
+        generate the return value, C{statusRequestHelper} can be used
+        to build the dictionary by calling the other interface methods
+        which return the data for each name.
+
+        @type names: Any iterable
+        @param names: The status names to return information regarding.
+        The possible values for each name are: MESSAGES, RECENT, UIDNEXT,
+        UIDVALIDITY, UNSEEN.
+
+        @rtype: C{dict} or C{Deferred}
+        @return: A dictionary containing status information about the
+        requested names is returned.  If the process of looking this
+        information up would be costly, a deferred whose callback will
+        eventually be passed this dictionary is returned instead.
+        """
+
+    def addListener(listener):
+        """Add a mailbox change listener
+
+        @type listener: Any object which implements C{IMailboxListener}
+        @param listener: An object to add to the set of those which will
+        be notified when the contents of this mailbox change.
+        """
+
+    def removeListener(listener):
+        """Remove a mailbox change listener
+
+        @type listener: Any object previously added to and not removed from
+        this mailbox as a listener.
+        @param listener: The object to remove from the set of listeners.
+
+        @raise ValueError: Raised when the given object is not a listener for
+        this mailbox.
+        """
+
+    def addMessage(message, flags = (), date = None):
+        """Add the given message to this mailbox.
+
+        @type message: A file-like object
+        @param message: The RFC822 formatted message
+
+        @type flags: Any iterable of C{str}
+        @param flags: The flags to associate with this message
+
+        @type date: C{str}
+        @param date: If specified, the date to associate with this
+        message.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked with the message
+        id if the message is added successfully and whose errback is
+        invoked otherwise.
+
+        @raise ReadOnlyMailbox: Raised if this Mailbox is not open for
+        read-write.
+        """
+
+    def expunge():
+        """Remove all messages flagged \\Deleted.
+
+        @rtype: C{list} or C{Deferred}
+        @return: The list of message sequence numbers which were deleted,
+        or a C{Deferred} whose callback will be invoked with such a list.
+
+        @raise ReadOnlyMailbox: Raised if this Mailbox is not open for
+        read-write.
+        """
+
+    def fetch(messages, uid):
+        """Retrieve one or more messages.
+
+        @type messages: C{MessageSet}
+        @param messages: The identifiers of messages to retrieve information
+        about
+
+        @type uid: C{bool}
+        @param uid: If true, the IDs specified in the query are UIDs;
+        otherwise they are message sequence IDs.
+
+        @rtype: Any iterable of two-tuples of message sequence numbers and
+        implementors of C{IMessage}.
+        """
+
+    def store(messages, flags, mode, uid):
+        """Set the flags of one or more messages.
+
+        @type messages: A MessageSet object with the list of messages requested
+        @param messages: The identifiers of the messages to set the flags of.
+
+        @type flags: sequence of C{str}
+        @param flags: The flags to set, unset, or add.
+
+        @type mode: -1, 0, or 1
+        @param mode: If mode is -1, these flags should be removed from the
+        specified messages.  If mode is 1, these flags should be added to
+        the specified messages.  If mode is 0, all existing flags should be
+        cleared and these flags should be added.
+
+        @type uid: C{bool}
+        @param uid: If true, the IDs specified in the query are UIDs;
+        otherwise they are message sequence IDs.
+
+        @rtype: C{dict} or C{Deferred}
+        @return: A C{dict} mapping message sequence numbers to sequences of C{str}
+        representing the flags set on the message after this operation has
+        been performed, or a C{Deferred} whose callback will be invoked with
+        such a C{dict}.
+
+        @raise ReadOnlyMailbox: Raised if this mailbox is not open for
+        read-write.
+        """
+
+class ICloseableMailbox(Interface):
+    """A supplementary interface for mailboxes which require cleanup on close.
+
+    Implementing this interface is optional.  If it is implemented, the protocol
+    code will call the close method defined whenever a mailbox is closed.
+    """
+    def close():
+        """Close this mailbox.
+
+        @return: A C{Deferred} which fires when this mailbox
+        has been closed, or None if the mailbox can be closed
+        immediately.
+        """
+
+def _formatHeaders(headers):
+    hdrs = [': '.join((k.title(), '\r\n'.join(v.splitlines()))) for (k, v)
+            in headers.iteritems()]
+    hdrs = '\r\n'.join(hdrs) + '\r\n'
+    return hdrs
+
+def subparts(m):
+    i = 0
+    try:
+        while True:
+            yield m.getSubPart(i)
+            i += 1
+    except IndexError:
+        pass
+
+def iterateInReactor(i):
+    """Consume an interator at most a single iteration per reactor iteration.
+
+    If the iterator produces a Deferred, the next iteration will not occur
+    until the Deferred fires, otherwise the next iteration will be taken
+    in the next reactor iteration.
+
+    @rtype: C{Deferred}
+    @return: A deferred which fires (with None) when the iterator is
+    exhausted or whose errback is called if there is an exception.
+    """
+    from twisted.internet import reactor
+    d = defer.Deferred()
+    def go(last):
+        try:
+            r = i.next()
+        except StopIteration:
+            d.callback(last)
+        except:
+            d.errback()
+        else:
+            if isinstance(r, defer.Deferred):
+                r.addCallback(go)
+            else:
+                reactor.callLater(0, go, r)
+    go(None)
+    return d
+
+class MessageProducer:
+    CHUNK_SIZE = 2 ** 2 ** 2 ** 2
+
+    def __init__(self, msg, buffer = None, scheduler = None):
+        """Produce this message.
+
+        @param msg: The message I am to produce.
+        @type msg: L{IMessage}
+
+        @param buffer: A buffer to hold the message in.  If None, I will
+            use a L{tempfile.TemporaryFile}.
+        @type buffer: file-like
+        """
+        self.msg = msg
+        if buffer is None:
+            buffer = tempfile.TemporaryFile()
+        self.buffer = buffer
+        if scheduler is None:
+            scheduler = iterateInReactor
+        self.scheduler = scheduler
+        self.write = self.buffer.write
+
+    def beginProducing(self, consumer):
+        self.consumer = consumer
+        return self.scheduler(self._produce())
+
+    def _produce(self):
+        headers = self.msg.getHeaders(True)
+        boundary = None
+        if self.msg.isMultipart():
+            content = headers.get('content-type')
+            parts = [x.split('=', 1) for x in content.split(';')[1:]]
+            parts = dict([(k.lower().strip(), v) for (k, v) in parts])
+            boundary = parts.get('boundary')
+            if boundary is None:
+                # Bastards
+                boundary = '----=_%f_boundary_%f' % (time.time(), random.random())
+                headers['content-type'] += '; boundary="%s"' % (boundary,)
+            else:
+                if boundary.startswith('"') and boundary.endswith('"'):
+                    boundary = boundary[1:-1]
+
+        self.write(_formatHeaders(headers))
+        self.write('\r\n')
+        if self.msg.isMultipart():
+            for p in subparts(self.msg):
+                self.write('\r\n--%s\r\n' % (boundary,))
+                yield MessageProducer(p, self.buffer, self.scheduler
+                    ).beginProducing(None
+                    )
+            self.write('\r\n--%s--\r\n' % (boundary,))
+        else:
+            f = self.msg.getBodyFile()
+            while True:
+                b = f.read(self.CHUNK_SIZE)
+                if b:
+                    self.buffer.write(b)
+                    yield None
+                else:
+                    break
+        if self.consumer:
+            self.buffer.seek(0, 0)
+            yield FileProducer(self.buffer
+                ).beginProducing(self.consumer
+                ).addCallback(lambda _: self
+                )
+
+class _FetchParser:
+    class Envelope:
+        # Response should be a list of fields from the message:
+        #   date, subject, from, sender, reply-to, to, cc, bcc, in-reply-to,
+        #   and message-id.
+        #
+        # from, sender, reply-to, to, cc, and bcc are themselves lists of
+        # address information:
+        #   personal name, source route, mailbox name, host name
+        #
+        # reply-to and sender must not be None.  If not present in a message
+        # they should be defaulted to the value of the from field.
+        type = 'envelope'
+        __str__ = lambda self: 'envelope'
+
+    class Flags:
+        type = 'flags'
+        __str__ = lambda self: 'flags'
+
+    class InternalDate:
+        type = 'internaldate'
+        __str__ = lambda self: 'internaldate'
+
+    class RFC822Header:
+        type = 'rfc822header'
+        __str__ = lambda self: 'rfc822.header'
+
+    class RFC822Text:
+        type = 'rfc822text'
+        __str__ = lambda self: 'rfc822.text'
+
+    class RFC822Size:
+        type = 'rfc822size'
+        __str__ = lambda self: 'rfc822.size'
+
+    class RFC822:
+        type = 'rfc822'
+        __str__ = lambda self: 'rfc822'
+
+    class UID:
+        type = 'uid'
+        __str__ = lambda self: 'uid'
+
+    class Body:
+        type = 'body'
+        peek = False
+        header = None
+        mime = None
+        text = None
+        part = ()
+        empty = False
+        partialBegin = None
+        partialLength = None
+        def __str__(self):
+            base = 'BODY'
+            part = ''
+            separator = ''
+            if self.part:
+                part = '.'.join([str(x + 1) for x in self.part])
+                separator = '.'
+#            if self.peek:
+#                base += '.PEEK'
+            if self.header:
+                base += '[%s%s%s]' % (part, separator, self.header,)
+            elif self.text:
+                base += '[%s%sTEXT]' % (part, separator)
+            elif self.mime:
+                base += '[%s%sMIME]' % (part, separator)
+            elif self.empty:
+                base += '[%s]' % (part,)
+            if self.partialBegin is not None:
+                base += '<%d.%d>' % (self.partialBegin, self.partialLength)
+            return base
+
+    class BodyStructure:
+        type = 'bodystructure'
+        __str__ = lambda self: 'bodystructure'
+
+    # These three aren't top-level, they don't need type indicators
+    class Header:
+        negate = False
+        fields = None
+        part = None
+        def __str__(self):
+            base = 'HEADER'
+            if self.fields:
+                base += '.FIELDS'
+                if self.negate:
+                    base += '.NOT'
+                fields = []
+                for f in self.fields:
+                    f = f.title()
+                    if _needsQuote(f):
+                        f = _quote(f)
+                    fields.append(f)
+                base += ' (%s)' % ' '.join(fields)
+            if self.part:
+                base = '.'.join([str(x + 1) for x in self.part]) + '.' + base
+            return base
+
+    class Text:
+        pass
+
+    class MIME:
+        pass
+
+    parts = None
+
+    _simple_fetch_att = [
+        ('envelope', Envelope),
+        ('flags', Flags),
+        ('internaldate', InternalDate),
+        ('rfc822.header', RFC822Header),
+        ('rfc822.text', RFC822Text),
+        ('rfc822.size', RFC822Size),
+        ('rfc822', RFC822),
+        ('uid', UID),
+        ('bodystructure', BodyStructure),
+    ]
+
+    def __init__(self):
+        self.state = ['initial']
+        self.result = []
+        self.remaining = ''
+
+    def parseString(self, s):
+        s = self.remaining + s
+        try:
+            while s or self.state:
+                if not self.state:
+                    raise IllegalClientResponse("Invalid Argument")
+                # print 'Entering state_' + self.state[-1] + ' with', repr(s)
+                state = self.state.pop()
+                try:
+                    used = getattr(self, 'state_' + state)(s)
+                except:
+                    self.state.append(state)
+                    raise
+                else:
+                    # print state, 'consumed', repr(s[:used])
+                    s = s[used:]
+        finally:
+            self.remaining = s
+
+    def state_initial(self, s):
+        # In the initial state, the literals "ALL", "FULL", and "FAST"
+        # are accepted, as is a ( indicating the beginning of a fetch_att
+        # token, as is the beginning of a fetch_att token.
+        if s == '':
+            return 0
+
+        l = s.lower()
+        if l.startswith('all'):
+            self.result.extend((
+                self.Flags(), self.InternalDate(),
+                self.RFC822Size(), self.Envelope()
+            ))
+            return 3
+        if l.startswith('full'):
+            self.result.extend((
+                self.Flags(), self.InternalDate(),
+                self.RFC822Size(), self.Envelope(),
+                self.Body()
+            ))
+            return 4
+        if l.startswith('fast'):
+            self.result.extend((
+                self.Flags(), self.InternalDate(), self.RFC822Size(),
+            ))
+            return 4
+
+        if l.startswith('('):
+            self.state.extend(('close_paren', 'maybe_fetch_att', 'fetch_att'))
+            return 1
+
+        self.state.append('fetch_att')
+        return 0
+
+    def state_close_paren(self, s):
+        if s.startswith(')'):
+            return 1
+        raise Exception("Missing )")
+
+    def state_whitespace(self, s):
+        # Eat up all the leading whitespace
+        if not s or not s[0].isspace():
+            raise Exception("Whitespace expected, none found")
+        i = 0
+        for i in range(len(s)):
+            if not s[i].isspace():
+                break
+        return i
+
+    def state_maybe_fetch_att(self, s):
+        if not s.startswith(')'):
+            self.state.extend(('maybe_fetch_att', 'fetch_att', 'whitespace'))
+        return 0
+
+    def state_fetch_att(self, s):
+        # Allowed fetch_att tokens are "ENVELOPE", "FLAGS", "INTERNALDATE",
+        # "RFC822", "RFC822.HEADER", "RFC822.SIZE", "RFC822.TEXT", "BODY",
+        # "BODYSTRUCTURE", "UID",
+        # "BODY [".PEEK"] [<section>] ["<" <number> "." <nz_number> ">"]
+
+        l = s.lower()
+        for (name, cls) in self._simple_fetch_att:
+            if l.startswith(name):
+                self.result.append(cls())
+                return len(name)
+
+        b = self.Body()
+        if l.startswith('body.peek'):
+            b.peek = True
+            used = 9
+        elif l.startswith('body'):
+            used = 4
+        else:
+            raise Exception("Nothing recognized in fetch_att: %s" % (l,))
+
+        self.pending_body = b
+        self.state.extend(('got_body', 'maybe_partial', 'maybe_section'))
+        return used
+
+    def state_got_body(self, s):
+        self.result.append(self.pending_body)
+        del self.pending_body
+        return 0
+
+    def state_maybe_section(self, s):
+        if not s.startswith("["):
+            return 0
+
+        self.state.extend(('section', 'part_number'))
+        return 1
+
+    _partExpr = re.compile(r'(\d+(?:\.\d+)*)\.?')
+    def state_part_number(self, s):
+        m = self._partExpr.match(s)
+        if m is not None:
+            self.parts = [int(p) - 1 for p in m.groups()[0].split('.')]
+            return m.end()
+        else:
+            self.parts = []
+            return 0
+
+    def state_section(self, s):
+        # Grab "HEADER]" or "HEADER.FIELDS (Header list)]" or
+        # "HEADER.FIELDS.NOT (Header list)]" or "TEXT]" or "MIME]" or
+        # just "]".
+
+        l = s.lower()
+        used = 0
+        if l.startswith(']'):
+            self.pending_body.empty = True
+            used += 1
+        elif l.startswith('header]'):
+            h = self.pending_body.header = self.Header()
+            h.negate = True
+            h.fields = ()
+            used += 7
+        elif l.startswith('text]'):
+            self.pending_body.text = self.Text()
+            used += 5
+        elif l.startswith('mime]'):
+            self.pending_body.mime = self.MIME()
+            used += 5
+        else:
+            h = self.Header()
+            if l.startswith('header.fields.not'):
+                h.negate = True
+                used += 17
+            elif l.startswith('header.fields'):
+                used += 13
+            else:
+                raise Exception("Unhandled section contents: %r" % (l,))
+
+            self.pending_body.header = h
+            self.state.extend(('finish_section', 'header_list', 'whitespace'))
+        self.pending_body.part = tuple(self.parts)
+        self.parts = None
+        return used
+
+    def state_finish_section(self, s):
+        if not s.startswith(']'):
+            raise Exception("section must end with ]")
+        return 1
+
+    def state_header_list(self, s):
+        if not s.startswith('('):
+            raise Exception("Header list must begin with (")
+        end = s.find(')')
+        if end == -1:
+            raise Exception("Header list must end with )")
+
+        headers = s[1:end].split()
+        self.pending_body.header.fields = map(str.upper, headers)
+        return end + 1
+
+    def state_maybe_partial(self, s):
+        # Grab <number.number> or nothing at all
+        if not s.startswith('<'):
+            return 0
+        end = s.find('>')
+        if end == -1:
+            raise Exception("Found < but not >")
+
+        partial = s[1:end]
+        parts = partial.split('.', 1)
+        if len(parts) != 2:
+            raise Exception("Partial specification did not include two .-delimited integers")
+        begin, length = map(int, parts)
+        self.pending_body.partialBegin = begin
+        self.pending_body.partialLength = length
+
+        return end + 1
+
+class FileProducer:
+    CHUNK_SIZE = 2 ** 2 ** 2 ** 2
+
+    firstWrite = True
+
+    def __init__(self, f):
+        self.f = f
+
+    def beginProducing(self, consumer):
+        self.consumer = consumer
+        self.produce = consumer.write
+        d = self._onDone = defer.Deferred()
+        self.consumer.registerProducer(self, False)
+        return d
+
+    def resumeProducing(self):
+        b = ''
+        if self.firstWrite:
+            b = '{%d}\r\n' % self._size()
+            self.firstWrite = False
+        if not self.f:
+            return
+        b = b + self.f.read(self.CHUNK_SIZE)
+        if not b:
+            self.consumer.unregisterProducer()
+            self._onDone.callback(self)
+            self._onDone = self.f = self.consumer = None
+        else:
+            self.produce(b)
+
+    def pauseProducing(self):
+        pass
+
+    def stopProducing(self):
+        pass
+
+    def _size(self):
+        b = self.f.tell()
+        self.f.seek(0, 2)
+        e = self.f.tell()
+        self.f.seek(b, 0)
+        return e - b
+
+def parseTime(s):
+    # XXX - This may require localization :(
+    months = [
+        'jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul', 'aug', 'sep', 'oct',
+        'nov', 'dec', 'january', 'february', 'march', 'april', 'may', 'june',
+        'july', 'august', 'september', 'october', 'november', 'december'
+    ]
+    expr = {
+        'day': r"(?P<day>3[0-1]|[1-2]\d|0[1-9]|[1-9]| [1-9])",
+        'mon': r"(?P<mon>\w+)",
+        'year': r"(?P<year>\d\d\d\d)"
+    }
+    m = re.match('%(day)s-%(mon)s-%(year)s' % expr, s)
+    if not m:
+        raise ValueError, "Cannot parse time string %r" % (s,)
+    d = m.groupdict()
+    try:
+        d['mon'] = 1 + (months.index(d['mon'].lower()) % 12)
+        d['year'] = int(d['year'])
+        d['day'] = int(d['day'])
+    except ValueError:
+        raise ValueError, "Cannot parse time string %r" % (s,)
+    else:
+        return time.struct_time(
+            (d['year'], d['mon'], d['day'], 0, 0, 0, -1, -1, -1)
+        )
+
+import codecs
+def modified_base64(s):
+    s_utf7 = s.encode('utf-7')
+    return s_utf7[1:-1].replace('/', ',')
+
+def modified_unbase64(s):
+    s_utf7 = '+' + s.replace(',', '/') + '-'
+    return s_utf7.decode('utf-7')
+
+def encoder(s, errors=None):
+    """
+    Encode the given C{unicode} string using the IMAP4 specific variation of
+    UTF-7.
+
+    @type s: C{unicode}
+    @param s: The text to encode.
+
+    @param errors: Policy for handling encoding errors.  Currently ignored.
+
+    @return: C{tuple} of a C{str} giving the encoded bytes and an C{int}
+        giving the number of code units consumed from the input.
+    """
+    r = []
+    _in = []
+    for c in s:
+        if ord(c) in (range(0x20, 0x26) + range(0x27, 0x7f)):
+            if _in:
+                r.extend(['&', modified_base64(''.join(_in)), '-'])
+                del _in[:]
+            r.append(str(c))
+        elif c == '&':
+            if _in:
+                r.extend(['&', modified_base64(''.join(_in)), '-'])
+                del _in[:]
+            r.append('&-')
+        else:
+            _in.append(c)
+    if _in:
+        r.extend(['&', modified_base64(''.join(_in)), '-'])
+    return (''.join(r), len(s))
+
+def decoder(s, errors=None):
+    """
+    Decode the given C{str} using the IMAP4 specific variation of UTF-7.
+
+    @type s: C{str}
+    @param s: The bytes to decode.
+
+    @param errors: Policy for handling decoding errors.  Currently ignored.
+
+    @return: a C{tuple} of a C{unicode} string giving the text which was
+        decoded and an C{int} giving the number of bytes consumed from the
+        input.
+    """
+    r = []
+    decode = []
+    for c in s:
+        if c == '&' and not decode:
+            decode.append('&')
+        elif c == '-' and decode:
+            if len(decode) == 1:
+                r.append('&')
+            else:
+                r.append(modified_unbase64(''.join(decode[1:])))
+            decode = []
+        elif decode:
+            decode.append(c)
+        else:
+            r.append(c)
+    if decode:
+        r.append(modified_unbase64(''.join(decode[1:])))
+    return (''.join(r), len(s))
+
+class StreamReader(codecs.StreamReader):
+    def decode(self, s, errors='strict'):
+        return decoder(s)
+
+class StreamWriter(codecs.StreamWriter):
+    def encode(self, s, errors='strict'):
+        return encoder(s)
+
+_codecInfo = (encoder, decoder, StreamReader, StreamWriter)
+try:
+    _codecInfoClass = codecs.CodecInfo
+except AttributeError:
+    pass
+else:
+    _codecInfo = _codecInfoClass(*_codecInfo)
+
+def imap4_utf_7(name):
+    if name == 'imap4-utf-7':
+        return _codecInfo
+codecs.register(imap4_utf_7)
+
+__all__ = [
+    # Protocol classes
+    'IMAP4Server', 'IMAP4Client',
+
+    # Interfaces
+    'IMailboxListener', 'IClientAuthentication', 'IAccount', 'IMailbox',
+    'INamespacePresenter', 'ICloseableMailbox', 'IMailboxInfo',
+    'IMessage', 'IMessageCopier', 'IMessageFile', 'ISearchableMailbox',
+
+    # Exceptions
+    'IMAP4Exception', 'IllegalClientResponse', 'IllegalOperation',
+    'IllegalMailboxEncoding', 'UnhandledResponse', 'NegativeResponse',
+    'NoSupportedAuthentication', 'IllegalServerResponse',
+    'IllegalIdentifierError', 'IllegalQueryError', 'MismatchedNesting',
+    'MismatchedQuoting', 'MailboxException', 'MailboxCollision',
+    'NoSuchMailbox', 'ReadOnlyMailbox',
+
+    # Auth objects
+    'CramMD5ClientAuthenticator', 'PLAINAuthenticator', 'LOGINAuthenticator',
+    'PLAINCredentials', 'LOGINCredentials',
+
+    # Simple query interface
+    'Query', 'Not', 'Or',
+
+    # Miscellaneous
+    'MemoryAccount',
+    'statusRequestHelper',
+]
diff --git a/ThirdParty/Twisted/twisted/mail/mail.py b/ThirdParty/Twisted/twisted/mail/mail.py
new file mode 100644
index 0000000..07789aa
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/mail.py
@@ -0,0 +1,333 @@
+# -*- test-case-name: twisted.mail.test.test_mail -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""Mail support for twisted python.
+"""
+
+# Twisted imports
+from twisted.internet import defer
+from twisted.application import service, internet
+from twisted.python import util
+from twisted.python import log
+
+from twisted import cred
+import twisted.cred.portal
+
+# Sibling imports
+from twisted.mail import protocols, smtp
+
+# System imports
+import os
+from zope.interface import implements, Interface
+
+
+class DomainWithDefaultDict:
+    '''Simulate a dictionary with a default value for non-existing keys.
+    '''
+    def __init__(self, domains, default):
+        self.domains = domains
+        self.default = default
+
+    def setDefaultDomain(self, domain):
+        self.default = domain
+
+    def has_key(self, name):
+        return 1
+
+    def fromkeys(klass, keys, value=None):
+        d = klass()
+        for k in keys:
+            d[k] = value
+        return d
+    fromkeys = classmethod(fromkeys)
+
+    def __contains__(self, name):
+        return 1
+
+    def __getitem__(self, name):
+        return self.domains.get(name, self.default)
+
+    def __setitem__(self, name, value):
+        self.domains[name] = value
+
+    def __delitem__(self, name):
+        del self.domains[name]
+
+    def __iter__(self):
+        return iter(self.domains)
+
+    def __len__(self):
+        return len(self.domains)
+
+
+    def __str__(self):
+        """
+        Return a string describing the underlying domain mapping of this
+        object.
+        """
+        return '<DomainWithDefaultDict %s>' % (self.domains,)
+
+
+    def __repr__(self):
+        """
+        Return a pseudo-executable string describing the underlying domain
+        mapping of this object.
+        """
+        return 'DomainWithDefaultDict(%s)' % (self.domains,)
+
+
+    def get(self, key, default=None):
+        return self.domains.get(key, default)
+
+    def copy(self):
+        return DomainWithDefaultDict(self.domains.copy(), self.default)
+
+    def iteritems(self):
+        return self.domains.iteritems()
+
+    def iterkeys(self):
+        return self.domains.iterkeys()
+
+    def itervalues(self):
+        return self.domains.itervalues()
+
+    def keys(self):
+        return self.domains.keys()
+
+    def values(self):
+        return self.domains.values()
+
+    def items(self):
+        return self.domains.items()
+
+    def popitem(self):
+        return self.domains.popitem()
+
+    def update(self, other):
+        return self.domains.update(other)
+
+    def clear(self):
+        return self.domains.clear()
+
+    def setdefault(self, key, default):
+        return self.domains.setdefault(key, default)
+
+class IDomain(Interface):
+    """An email domain."""
+
+    def exists(user):
+        """
+        Check whether or not the specified user exists in this domain.
+
+        @type user: C{twisted.protocols.smtp.User}
+        @param user: The user to check
+
+        @rtype: No-argument callable
+        @return: A C{Deferred} which becomes, or a callable which
+        takes no arguments and returns an object implementing C{IMessage}.
+        This will be called and the returned object used to deliver the
+        message when it arrives.
+
+        @raise twisted.protocols.smtp.SMTPBadRcpt: Raised if the given
+        user does not exist in this domain.
+        """
+
+    def addUser(user, password):
+        """Add a username/password to this domain."""
+
+    def startMessage(user):
+        """Create and return a new message to be delivered to the given user.
+
+        DEPRECATED.  Implement validateTo() correctly instead.
+        """
+
+    def getCredentialsCheckers():
+        """Return a list of ICredentialsChecker implementors for this domain.
+        """
+
+class IAliasableDomain(IDomain):
+    def setAliasGroup(aliases):
+        """Set the group of defined aliases for this domain
+
+        @type aliases: C{dict}
+        @param aliases: Mapping of domain names to objects implementing
+        C{IAlias}
+        """
+
+    def exists(user, memo=None):
+        """
+        Check whether or not the specified user exists in this domain.
+
+        @type user: C{twisted.protocols.smtp.User}
+        @param user: The user to check
+
+        @type memo: C{dict}
+        @param memo: A record of the addresses already considered while
+        resolving aliases.  The default value should be used by all
+        external code.
+
+        @rtype: No-argument callable
+        @return: A C{Deferred} which becomes, or a callable which
+        takes no arguments and returns an object implementing C{IMessage}.
+        This will be called and the returned object used to deliver the
+        message when it arrives.
+
+        @raise twisted.protocols.smtp.SMTPBadRcpt: Raised if the given
+        user does not exist in this domain.
+        """
+
+class BounceDomain:
+    """A domain in which no user exists.
+
+    This can be used to block off certain domains.
+    """
+
+    implements(IDomain)
+
+    def exists(self, user):
+        raise smtp.SMTPBadRcpt(user)
+
+    def willRelay(self, user, protocol):
+        return False
+
+    def addUser(self, user, password):
+        pass
+
+    def startMessage(self, user):
+        """
+        No code should ever call this function.
+        """
+        raise NotImplementedError(
+                "No code should ever call this method for any reason")
+
+    def getCredentialsCheckers(self):
+        return []
+
+
+class FileMessage:
+    """A file we can write an email too."""
+
+    implements(smtp.IMessage)
+
+    def __init__(self, fp, name, finalName):
+        self.fp = fp
+        self.name = name
+        self.finalName = finalName
+
+    def lineReceived(self, line):
+        self.fp.write(line+'\n')
+
+    def eomReceived(self):
+        self.fp.close()
+        os.rename(self.name, self.finalName)
+        return defer.succeed(self.finalName)
+
+    def connectionLost(self):
+        self.fp.close()
+        os.remove(self.name)
+
+
+class MailService(service.MultiService):
+    """An email service."""
+
+    queue = None
+    domains = None
+    portals = None
+    aliases = None
+    smtpPortal = None
+
+    def __init__(self):
+        service.MultiService.__init__(self)
+        # Domains and portals for "client" protocols - POP3, IMAP4, etc
+        self.domains = DomainWithDefaultDict({}, BounceDomain())
+        self.portals = {}
+
+        self.monitor = FileMonitoringService()
+        self.monitor.setServiceParent(self)
+        self.smtpPortal = cred.portal.Portal(self)
+
+    def getPOP3Factory(self):
+        return protocols.POP3Factory(self)
+
+    def getSMTPFactory(self):
+        return protocols.SMTPFactory(self, self.smtpPortal)
+
+    def getESMTPFactory(self):
+        return protocols.ESMTPFactory(self, self.smtpPortal)
+
+    def addDomain(self, name, domain):
+        portal = cred.portal.Portal(domain)
+        map(portal.registerChecker, domain.getCredentialsCheckers())
+        self.domains[name] = domain
+        self.portals[name] = portal
+        if self.aliases and IAliasableDomain.providedBy(domain):
+            domain.setAliasGroup(self.aliases)
+
+    def setQueue(self, queue):
+        """Set the queue for outgoing emails."""
+        self.queue = queue
+
+    def requestAvatar(self, avatarId, mind, *interfaces):
+        if smtp.IMessageDelivery in interfaces:
+            a = protocols.ESMTPDomainDelivery(self, avatarId)
+            return smtp.IMessageDelivery, a, lambda: None
+        raise NotImplementedError()
+
+    def lookupPortal(self, name):
+        return self.portals[name]
+
+    def defaultPortal(self):
+        return self.portals['']
+
+
+class FileMonitoringService(internet.TimerService):
+
+    def __init__(self):
+        self.files = []
+        self.intervals = iter(util.IntervalDifferential([], 60))
+
+    def startService(self):
+        service.Service.startService(self)
+        self._setupMonitor()
+
+    def _setupMonitor(self):
+        from twisted.internet import reactor
+        t, self.index = self.intervals.next()
+        self._call = reactor.callLater(t, self._monitor)
+
+    def stopService(self):
+        service.Service.stopService(self)
+        if self._call:
+            self._call.cancel()
+            self._call = None
+
+    def monitorFile(self, name, callback, interval=10):
+        try:
+            mtime = os.path.getmtime(name)
+        except:
+            mtime = 0
+        self.files.append([interval, name, callback, mtime])
+        self.intervals.addInterval(interval)
+
+    def unmonitorFile(self, name):
+        for i in range(len(self.files)):
+            if name == self.files[i][1]:
+                self.intervals.removeInterval(self.files[i][0])
+                del self.files[i]
+                break
+
+    def _monitor(self):
+        self._call = None
+        if self.index is not None:
+            name, callback, mtime = self.files[self.index][1:]
+            try:
+                now = os.path.getmtime(name)
+            except:
+                now = 0
+            if now > mtime:
+                log.msg("%s changed, notifying listener" % (name,))
+                self.files[self.index][3] = now
+                callback(name)
+        self._setupMonitor()
diff --git a/ThirdParty/Twisted/twisted/mail/maildir.py b/ThirdParty/Twisted/twisted/mail/maildir.py
new file mode 100644
index 0000000..7927b32
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/maildir.py
@@ -0,0 +1,518 @@
+# -*- test-case-name: twisted.mail.test.test_mail -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Maildir-style mailbox support
+"""
+
+import os
+import stat
+import socket
+
+from zope.interface import implements
+
+try:
+    import cStringIO as StringIO
+except ImportError:
+    import StringIO
+
+from twisted.python.compat import set
+from twisted.mail import pop3
+from twisted.mail import smtp
+from twisted.protocols import basic
+from twisted.persisted import dirdbm
+from twisted.python import log, failure
+from twisted.python.hashlib import md5
+from twisted.mail import mail
+from twisted.internet import interfaces, defer, reactor
+from twisted.cred import portal, credentials, checkers
+from twisted.cred.error import UnauthorizedLogin
+
+INTERNAL_ERROR = '''\
+From: Twisted.mail Internals
+Subject: An Error Occurred
+
+  An internal server error has occurred.  Please contact the
+  server administrator.
+'''
+
+class _MaildirNameGenerator:
+    """
+    Utility class to generate a unique maildir name
+
+    @ivar _clock: An L{IReactorTime} provider which will be used to learn
+        the current time to include in names returned by L{generate} so that
+        they sort properly.
+    """
+    n = 0
+    p = os.getpid()
+    s = socket.gethostname().replace('/', r'\057').replace(':', r'\072')
+
+    def __init__(self, clock):
+        self._clock = clock
+
+    def generate(self):
+        """
+        Return a string which is intended to unique across all calls to this
+        function (across all processes, reboots, etc).
+
+        Strings returned by earlier calls to this method will compare less
+        than strings returned by later calls as long as the clock provided
+        doesn't go backwards.
+        """
+        self.n = self.n + 1
+        t = self._clock.seconds()
+        seconds = str(int(t))
+        microseconds = '%07d' % (int((t - int(t)) * 10e6),)
+        return '%s.M%sP%sQ%s.%s' % (seconds, microseconds,
+                                    self.p, self.n, self.s)
+
+_generateMaildirName = _MaildirNameGenerator(reactor).generate
+
+def initializeMaildir(dir):
+    if not os.path.isdir(dir):
+        os.mkdir(dir, 0700)
+        for subdir in ['new', 'cur', 'tmp', '.Trash']:
+            os.mkdir(os.path.join(dir, subdir), 0700)
+        for subdir in ['new', 'cur', 'tmp']:
+            os.mkdir(os.path.join(dir, '.Trash', subdir), 0700)
+        # touch
+        open(os.path.join(dir, '.Trash', 'maildirfolder'), 'w').close()
+
+
+class MaildirMessage(mail.FileMessage):
+    size = None
+
+    def __init__(self, address, fp, *a, **kw):
+        header = "Delivered-To: %s\n" % address
+        fp.write(header)
+        self.size = len(header)
+        mail.FileMessage.__init__(self, fp, *a, **kw)
+
+    def lineReceived(self, line):
+        mail.FileMessage.lineReceived(self, line)
+        self.size += len(line)+1
+
+    def eomReceived(self):
+        self.finalName = self.finalName+',S=%d' % self.size
+        return mail.FileMessage.eomReceived(self)
+
+class AbstractMaildirDomain:
+    """Abstract maildir-backed domain.
+    """
+    alias = None
+    root = None
+
+    def __init__(self, service, root):
+        """Initialize.
+        """
+        self.root = root
+
+    def userDirectory(self, user):
+        """Get the maildir directory for a given user
+
+        Override to specify where to save mails for users.
+        Return None for non-existing users.
+        """
+        return None
+
+    ##
+    ## IAliasableDomain
+    ##
+
+    def setAliasGroup(self, alias):
+        self.alias = alias
+
+    ##
+    ## IDomain
+    ##
+    def exists(self, user, memo=None):
+        """Check for existence of user in the domain
+        """
+        if self.userDirectory(user.dest.local) is not None:
+            return lambda: self.startMessage(user)
+        try:
+            a = self.alias[user.dest.local]
+        except:
+            raise smtp.SMTPBadRcpt(user)
+        else:
+            aliases = a.resolve(self.alias, memo)
+            if aliases:
+                return lambda: aliases
+            log.err("Bad alias configuration: " + str(user))
+            raise smtp.SMTPBadRcpt(user)
+
+    def startMessage(self, user):
+        """Save a message for a given user
+        """
+        if isinstance(user, str):
+            name, domain = user.split('@', 1)
+        else:
+            name, domain = user.dest.local, user.dest.domain
+        dir = self.userDirectory(name)
+        fname = _generateMaildirName()
+        filename = os.path.join(dir, 'tmp', fname)
+        fp = open(filename, 'w')
+        return MaildirMessage('%s@%s' % (name, domain), fp, filename,
+                              os.path.join(dir, 'new', fname))
+
+    def willRelay(self, user, protocol):
+        return False
+
+    def addUser(self, user, password):
+        raise NotImplementedError
+
+    def getCredentialsCheckers(self):
+        raise NotImplementedError
+    ##
+    ## end of IDomain
+    ##
+
+class _MaildirMailboxAppendMessageTask:
+    implements(interfaces.IConsumer)
+
+    osopen = staticmethod(os.open)
+    oswrite = staticmethod(os.write)
+    osclose = staticmethod(os.close)
+    osrename = staticmethod(os.rename)
+
+    def __init__(self, mbox, msg):
+        self.mbox = mbox
+        self.defer = defer.Deferred()
+        self.openCall = None
+        if not hasattr(msg, "read"):
+            msg = StringIO.StringIO(msg)
+        self.msg = msg
+
+    def startUp(self):
+        self.createTempFile()
+        if self.fh != -1:
+            self.filesender = basic.FileSender()
+            self.filesender.beginFileTransfer(self.msg, self)
+
+    def registerProducer(self, producer, streaming):
+        self.myproducer = producer
+        self.streaming = streaming
+        if not streaming:
+            self.prodProducer()
+
+    def prodProducer(self):
+        self.openCall = None
+        if self.myproducer is not None:
+            self.openCall = reactor.callLater(0, self.prodProducer)
+            self.myproducer.resumeProducing()
+
+    def unregisterProducer(self):
+        self.myproducer = None
+        self.streaming = None
+        self.osclose(self.fh)
+        self.moveFileToNew()
+
+    def write(self, data):
+        try:
+            self.oswrite(self.fh, data)
+        except:
+            self.fail()
+
+    def fail(self, err=None):
+        if err is None:
+            err = failure.Failure()
+        if self.openCall is not None:
+            self.openCall.cancel()
+        self.defer.errback(err)
+        self.defer = None
+
+    def moveFileToNew(self):
+        while True:
+            newname = os.path.join(self.mbox.path, "new", _generateMaildirName())
+            try:
+                self.osrename(self.tmpname, newname)
+                break
+            except OSError, (err, estr):
+                import errno
+                # if the newname exists, retry with a new newname.
+                if err != errno.EEXIST:
+                    self.fail()
+                    newname = None
+                    break
+        if newname is not None:
+            self.mbox.list.append(newname)
+            self.defer.callback(None)
+            self.defer = None
+
+    def createTempFile(self):
+        attr = (os.O_RDWR | os.O_CREAT | os.O_EXCL
+                | getattr(os, "O_NOINHERIT", 0)
+                | getattr(os, "O_NOFOLLOW", 0))
+        tries = 0
+        self.fh = -1
+        while True:
+            self.tmpname = os.path.join(self.mbox.path, "tmp", _generateMaildirName())
+            try:
+                self.fh = self.osopen(self.tmpname, attr, 0600)
+                return None
+            except OSError:
+                tries += 1
+                if tries > 500:
+                    self.defer.errback(RuntimeError("Could not create tmp file for %s" % self.mbox.path))
+                    self.defer = None
+                    return None
+
+class MaildirMailbox(pop3.Mailbox):
+    """Implement the POP3 mailbox semantics for a Maildir mailbox
+    """
+    AppendFactory = _MaildirMailboxAppendMessageTask
+
+    def __init__(self, path):
+        """Initialize with name of the Maildir mailbox
+        """
+        self.path = path
+        self.list = []
+        self.deleted = {}
+        initializeMaildir(path)
+        for name in ('cur', 'new'):
+            for file in os.listdir(os.path.join(path, name)):
+                self.list.append((file, os.path.join(path, name, file)))
+        self.list.sort()
+        self.list = [e[1] for e in self.list]
+
+    def listMessages(self, i=None):
+        """Return a list of lengths of all files in new/ and cur/
+        """
+        if i is None:
+            ret = []
+            for mess in self.list:
+                if mess:
+                    ret.append(os.stat(mess)[stat.ST_SIZE])
+                else:
+                    ret.append(0)
+            return ret
+        return self.list[i] and os.stat(self.list[i])[stat.ST_SIZE] or 0
+
+    def getMessage(self, i):
+        """Return an open file-pointer to a message
+        """
+        return open(self.list[i])
+
+    def getUidl(self, i):
+        """Return a unique identifier for a message
+
+        This is done using the basename of the filename.
+        It is globally unique because this is how Maildirs are designed.
+        """
+        # Returning the actual filename is a mistake.  Hash it.
+        base = os.path.basename(self.list[i])
+        return md5(base).hexdigest()
+
+    def deleteMessage(self, i):
+        """Delete a message
+
+        This only moves a message to the .Trash/ subfolder,
+        so it can be undeleted by an administrator.
+        """
+        trashFile = os.path.join(
+            self.path, '.Trash', 'cur', os.path.basename(self.list[i])
+        )
+        os.rename(self.list[i], trashFile)
+        self.deleted[self.list[i]] = trashFile
+        self.list[i] = 0
+
+    def undeleteMessages(self):
+        """Undelete any deleted messages it is possible to undelete
+
+        This moves any messages from .Trash/ subfolder back to their
+        original position, and empties out the deleted dictionary.
+        """
+        for (real, trash) in self.deleted.items():
+            try:
+                os.rename(trash, real)
+            except OSError, (err, estr):
+                import errno
+                # If the file has been deleted from disk, oh well!
+                if err != errno.ENOENT:
+                    raise
+                # This is a pass
+            else:
+                try:
+                    self.list[self.list.index(0)] = real
+                except ValueError:
+                    self.list.append(real)
+        self.deleted.clear()
+
+    def appendMessage(self, txt):
+        """
+        Appends a message into the mailbox.
+
+        @param txt: A C{str} or file-like object giving the message to append.
+
+        @return: A L{Deferred} which fires when the message has been appended to
+            the mailbox.
+        """
+        task = self.AppendFactory(self, txt)
+        result = task.defer
+        task.startUp()
+        return result
+
+class StringListMailbox:
+    """
+    L{StringListMailbox} is an in-memory mailbox.
+
+    @ivar msgs: A C{list} of C{str} giving the contents of each message in the
+        mailbox.
+
+    @ivar _delete: A C{set} of the indexes of messages which have been deleted
+        since the last C{sync} call.
+    """
+    implements(pop3.IMailbox)
+
+    def __init__(self, msgs):
+        self.msgs = msgs
+        self._delete = set()
+
+
+    def listMessages(self, i=None):
+        """
+        Return the length of the message at the given offset, or a list of all
+        message lengths.
+        """
+        if i is None:
+            return [self.listMessages(i) for i in range(len(self.msgs))]
+        if i in self._delete:
+            return 0
+        return len(self.msgs[i])
+
+
+    def getMessage(self, i):
+        """
+        Return an in-memory file-like object for the message content at the
+        given offset.
+        """
+        return StringIO.StringIO(self.msgs[i])
+
+
+    def getUidl(self, i):
+        """
+        Return a hash of the contents of the message at the given offset.
+        """
+        return md5(self.msgs[i]).hexdigest()
+
+
+    def deleteMessage(self, i):
+        """
+        Mark the given message for deletion.
+        """
+        self._delete.add(i)
+
+
+    def undeleteMessages(self):
+        """
+        Reset deletion tracking, undeleting any messages which have been
+        deleted since the last call to C{sync}.
+        """
+        self._delete = set()
+
+
+    def sync(self):
+        """
+        Discard the contents of any message marked for deletion and reset
+        deletion tracking.
+        """
+        for index in self._delete:
+            self.msgs[index] = ""
+        self._delete = set()
+
+
+
+class MaildirDirdbmDomain(AbstractMaildirDomain):
+    """A Maildir Domain where membership is checked by a dirdbm file
+    """
+
+    implements(portal.IRealm, mail.IAliasableDomain)
+
+    portal = None
+    _credcheckers = None
+
+    def __init__(self, service, root, postmaster=0):
+        """Initialize
+
+        The first argument is where the Domain directory is rooted.
+        The second is whether non-existing addresses are simply
+        forwarded to postmaster instead of outright bounce
+
+        The directory structure of a MailddirDirdbmDomain is:
+
+        /passwd <-- a dirdbm file
+        /USER/{cur,new,del} <-- each user has these three directories
+        """
+        AbstractMaildirDomain.__init__(self, service, root)
+        dbm = os.path.join(root, 'passwd')
+        if not os.path.exists(dbm):
+            os.makedirs(dbm)
+        self.dbm = dirdbm.open(dbm)
+        self.postmaster = postmaster
+
+    def userDirectory(self, name):
+        """Get the directory for a user
+
+        If the user exists in the dirdbm file, return the directory
+        os.path.join(root, name), creating it if necessary.
+        Otherwise, returns postmaster's mailbox instead if bounces
+        go to postmaster, otherwise return None
+        """
+        if not self.dbm.has_key(name):
+            if not self.postmaster:
+                return None
+            name = 'postmaster'
+        dir = os.path.join(self.root, name)
+        if not os.path.exists(dir):
+            initializeMaildir(dir)
+        return dir
+
+    ##
+    ## IDomain
+    ##
+    def addUser(self, user, password):
+        self.dbm[user] = password
+        # Ensure it is initialized
+        self.userDirectory(user)
+
+    def getCredentialsCheckers(self):
+        if self._credcheckers is None:
+            self._credcheckers = [DirdbmDatabase(self.dbm)]
+        return self._credcheckers
+
+    ##
+    ## IRealm
+    ##
+    def requestAvatar(self, avatarId, mind, *interfaces):
+        if pop3.IMailbox not in interfaces:
+            raise NotImplementedError("No interface")
+        if avatarId == checkers.ANONYMOUS:
+            mbox = StringListMailbox([INTERNAL_ERROR])
+        else:
+            mbox = MaildirMailbox(os.path.join(self.root, avatarId))
+
+        return (
+            pop3.IMailbox,
+            mbox,
+            lambda: None
+        )
+
+class DirdbmDatabase:
+    implements(checkers.ICredentialsChecker)
+
+    credentialInterfaces = (
+        credentials.IUsernamePassword,
+        credentials.IUsernameHashedPassword
+    )
+
+    def __init__(self, dbm):
+        self.dirdbm = dbm
+
+    def requestAvatarId(self, c):
+        if c.username in self.dirdbm:
+            if c.checkPassword(self.dirdbm[c.username]):
+                return c.username
+        raise UnauthorizedLogin()
diff --git a/ThirdParty/Twisted/twisted/mail/pb.py b/ThirdParty/Twisted/twisted/mail/pb.py
new file mode 100644
index 0000000..8a9417f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/pb.py
@@ -0,0 +1,115 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+from twisted.spread import pb
+from twisted.spread import banana
+
+import os
+import types
+
+class Maildir(pb.Referenceable):
+
+    def __init__(self, directory, rootDirectory):
+        self.virtualDirectory = directory
+        self.rootDirectory = rootDirectory
+        self.directory = os.path.join(rootDirectory, directory)
+
+    def getFolderMessage(self, folder, name):
+        if '/' in name:
+            raise IOError("can only open files in '%s' directory'" % folder)
+        fp = open(os.path.join(self.directory, 'new', name))
+        try:
+            return fp.read()
+        finally:
+            fp.close()
+
+    def deleteFolderMessage(self, folder, name):
+        if '/' in name:
+            raise IOError("can only delete files in '%s' directory'" % folder)
+        os.rename(os.path.join(self.directory, folder, name),
+                  os.path.join(self.rootDirectory, '.Trash', folder, name))
+
+    def deleteNewMessage(self, name):
+        return self.deleteFolderMessage('new', name)
+    remote_deleteNewMessage = deleteNewMessage
+
+    def deleteCurMessage(self, name):
+        return self.deleteFolderMessage('cur', name)
+    remote_deleteCurMessage = deleteCurMessage
+
+    def getNewMessages(self):
+        return os.listdir(os.path.join(self.directory, 'new'))
+    remote_getNewMessages = getNewMessages
+
+    def getCurMessages(self):
+        return os.listdir(os.path.join(self.directory, 'cur'))
+    remote_getCurMessages = getCurMessages
+
+    def getNewMessage(self, name):
+        return self.getFolderMessage('new', name)
+    remote_getNewMessage = getNewMessage
+
+    def getCurMessage(self, name):
+        return self.getFolderMessage('cur', name)
+    remote_getCurMessage = getCurMessage
+
+    def getSubFolder(self, name):
+        if name[0] == '.':
+            raise IOError("subfolder name cannot begin with a '.'")
+        name = name.replace('/', ':')
+        if self.virtualDirectoy == '.':
+            name = '.'+name
+        else:
+            name = self.virtualDirectory+':'+name
+        if not self._isSubFolder(name):
+            raise IOError("not a subfolder")
+        return Maildir(name, self.rootDirectory)
+    remote_getSubFolder = getSubFolder
+
+    def _isSubFolder(self, name):
+        return (not os.path.isdir(os.path.join(self.rootDirectory, name)) or
+                not os.path.isfile(os.path.join(self.rootDirectory, name,
+                                                'maildirfolder')))
+
+
+class MaildirCollection(pb.Referenceable):
+
+    def __init__(self, root):
+        self.root = root
+
+    def getSubFolders(self):
+        return os.listdir(self.getRoot())
+    remote_getSubFolders = getSubFolders
+
+    def getSubFolder(self, name):
+        if '/' in name or name[0] == '.':
+            raise IOError("invalid name")
+        return Maildir('.', os.path.join(self.getRoot(), name))
+    remote_getSubFolder = getSubFolder
+
+
+class MaildirBroker(pb.Broker):
+
+    def proto_getCollection(self, requestID, name, domain, password):
+        collection = self._getCollection()
+        if collection is None:
+            self.sendError(requestID, "permission denied")
+        else:
+            self.sendAnswer(requestID, collection)
+
+    def getCollection(self, name, domain, password):
+        if not self.domains.has_key(domain):
+            return
+        domain = self.domains[domain]
+        if (domain.dbm.has_key(name) and
+            domain.dbm[name] == password):
+            return MaildirCollection(domain.userDirectory(name))
+
+
+class MaildirClient(pb.Broker):
+
+    def getCollection(self, name, domain, password, callback, errback):
+        requestID = self.newRequestID()
+        self.waitingForAnswers[requestID] = callback, errback
+        self.sendCall("getCollection", requestID, name, domain, password)
diff --git a/ThirdParty/Twisted/twisted/mail/pop3.py b/ThirdParty/Twisted/twisted/mail/pop3.py
new file mode 100644
index 0000000..3b65242
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/pop3.py
@@ -0,0 +1,1071 @@
+# -*- test-case-name: twisted.mail.test.test_pop3 -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Post-office Protocol version 3
+
+ at author: Glyph Lefkowitz
+ at author: Jp Calderone
+"""
+
+import base64
+import binascii
+import warnings
+
+from zope.interface import implements, Interface
+
+from twisted.mail import smtp
+from twisted.protocols import basic
+from twisted.protocols import policies
+from twisted.internet import task
+from twisted.internet import defer
+from twisted.internet import interfaces
+from twisted.python import log
+from twisted.python.hashlib import md5
+
+from twisted import cred
+import twisted.cred.error
+import twisted.cred.credentials
+
+##
+## Authentication
+##
+class APOPCredentials:
+    implements(cred.credentials.IUsernamePassword)
+
+    def __init__(self, magic, username, digest):
+        self.magic = magic
+        self.username = username
+        self.digest = digest
+
+    def checkPassword(self, password):
+        seed = self.magic + password
+        myDigest = md5(seed).hexdigest()
+        return myDigest == self.digest
+
+
+class _HeadersPlusNLines:
+    def __init__(self, f, n):
+        self.f = f
+        self.n = n
+        self.linecount = 0
+        self.headers = 1
+        self.done = 0
+        self.buf = ''
+
+    def read(self, bytes):
+        if self.done:
+            return ''
+        data = self.f.read(bytes)
+        if not data:
+            return data
+        if self.headers:
+            df, sz = data.find('\r\n\r\n'), 4
+            if df == -1:
+                df, sz = data.find('\n\n'), 2
+            if df != -1:
+                df += sz
+                val = data[:df]
+                data = data[df:]
+                self.linecount = 1
+                self.headers = 0
+        else:
+            val = ''
+        if self.linecount > 0:
+            dsplit = (self.buf+data).split('\n')
+            self.buf = dsplit[-1]
+            for ln in dsplit[:-1]:
+                if self.linecount > self.n:
+                    self.done = 1
+                    return val
+                val += (ln + '\n')
+                self.linecount += 1
+            return val
+        else:
+            return data
+
+
+
+class _POP3MessageDeleted(Exception):
+    """
+    Internal control-flow exception.  Indicates the file of a deleted message
+    was requested.
+    """
+
+
+class POP3Error(Exception):
+    pass
+
+
+
+class _IteratorBuffer(object):
+    bufSize = 0
+
+    def __init__(self, write, iterable, memoryBufferSize=None):
+        """
+        Create a _IteratorBuffer.
+
+        @param write: A one-argument callable which will be invoked with a list
+        of strings which have been buffered.
+
+        @param iterable: The source of input strings as any iterable.
+
+        @param memoryBufferSize: The upper limit on buffered string length,
+        beyond which the buffer will be flushed to the writer.
+        """
+        self.lines = []
+        self.write = write
+        self.iterator = iter(iterable)
+        if memoryBufferSize is None:
+            memoryBufferSize = 2 ** 16
+        self.memoryBufferSize = memoryBufferSize
+
+
+    def __iter__(self):
+        return self
+
+
+    def next(self):
+        try:
+            v = self.iterator.next()
+        except StopIteration:
+            if self.lines:
+                self.write(self.lines)
+            # Drop some references, in case they're edges in a cycle.
+            del self.iterator, self.lines, self.write
+            raise
+        else:
+            if v is not None:
+                self.lines.append(v)
+                self.bufSize += len(v)
+                if self.bufSize > self.memoryBufferSize:
+                    self.write(self.lines)
+                    self.lines = []
+                    self.bufSize = 0
+
+
+
+def iterateLineGenerator(proto, gen):
+    """
+    Hook the given protocol instance up to the given iterator with an
+    _IteratorBuffer and schedule the result to be exhausted via the protocol.
+
+    @type proto: L{POP3}
+    @type gen: iterator
+    @rtype: L{twisted.internet.defer.Deferred}
+    """
+    coll = _IteratorBuffer(proto.transport.writeSequence, gen)
+    return proto.schedule(coll)
+
+
+
+def successResponse(response):
+    """
+    Format the given object as a positive response.
+    """
+    response = str(response)
+    return '+OK %s\r\n' % (response,)
+
+
+
+def formatStatResponse(msgs):
+    """
+    Format the list of message sizes appropriately for a STAT response.
+
+    Yields None until it finishes computing a result, then yields a str
+    instance that is suitable for use as a response to the STAT command.
+    Intended to be used with a L{twisted.internet.task.Cooperator}.
+    """
+    i = 0
+    bytes = 0
+    for size in msgs:
+        i += 1
+        bytes += size
+        yield None
+    yield successResponse('%d %d' % (i, bytes))
+
+
+
+def formatListLines(msgs):
+    """
+    Format a list of message sizes appropriately for the lines of a LIST
+    response.
+
+    Yields str instances formatted appropriately for use as lines in the
+    response to the LIST command.  Does not include the trailing '.'.
+    """
+    i = 0
+    for size in msgs:
+        i += 1
+        yield '%d %d\r\n' % (i, size)
+
+
+
+def formatListResponse(msgs):
+    """
+    Format a list of message sizes appropriately for a complete LIST response.
+
+    Yields str instances formatted appropriately for use as a LIST command
+    response.
+    """
+    yield successResponse(len(msgs))
+    for ele in formatListLines(msgs):
+        yield ele
+    yield '.\r\n'
+
+
+
+def formatUIDListLines(msgs, getUidl):
+    """
+    Format the list of message sizes appropriately for the lines of a UIDL
+    response.
+
+    Yields str instances formatted appropriately for use as lines in the
+    response to the UIDL command.  Does not include the trailing '.'.
+    """
+    for i, m in enumerate(msgs):
+        if m is not None:
+            uid = getUidl(i)
+            yield '%d %s\r\n' % (i + 1, uid)
+
+
+
+def formatUIDListResponse(msgs, getUidl):
+    """
+    Format a list of message sizes appropriately for a complete UIDL response.
+
+    Yields str instances formatted appropriately for use as a UIDL command
+    response.
+    """
+    yield successResponse('')
+    for ele in formatUIDListLines(msgs, getUidl):
+        yield ele
+    yield '.\r\n'
+
+
+
+class POP3(basic.LineOnlyReceiver, policies.TimeoutMixin):
+    """
+    POP3 server protocol implementation.
+
+    @ivar portal: A reference to the L{twisted.cred.portal.Portal} instance we
+    will authenticate through.
+
+    @ivar factory: A L{twisted.mail.pop3.IServerFactory} which will be used to
+    determine some extended behavior of the server.
+
+    @ivar timeOut: An integer which defines the minimum amount of time which
+    may elapse without receiving any traffic after which the client will be
+    disconnected.
+
+    @ivar schedule: A one-argument callable which should behave like
+    L{twisted.internet.task.coiterate}.
+    """
+    implements(interfaces.IProducer)
+
+    magic = None
+    _userIs = None
+    _onLogout = None
+
+    AUTH_CMDS = ['CAPA', 'USER', 'PASS', 'APOP', 'AUTH', 'RPOP', 'QUIT']
+
+    portal = None
+    factory = None
+
+    # The mailbox we're serving
+    mbox = None
+
+    # Set this pretty low -- POP3 clients are expected to log in, download
+    # everything, and log out.
+    timeOut = 300
+
+    # Current protocol state
+    state = "COMMAND"
+
+    # PIPELINE
+    blocked = None
+
+    # Cooperate and suchlike.
+    schedule = staticmethod(task.coiterate)
+
+    # Message index of the highest retrieved message.
+    _highest = 0
+
+    def connectionMade(self):
+        if self.magic is None:
+            self.magic = self.generateMagic()
+        self.successResponse(self.magic)
+        self.setTimeout(self.timeOut)
+        if getattr(self.factory, 'noisy', True):
+            log.msg("New connection from " + str(self.transport.getPeer()))
+
+
+    def connectionLost(self, reason):
+        if self._onLogout is not None:
+            self._onLogout()
+            self._onLogout = None
+        self.setTimeout(None)
+
+
+    def generateMagic(self):
+        return smtp.messageid()
+
+
+    def successResponse(self, message=''):
+        self.transport.write(successResponse(message))
+
+    def failResponse(self, message=''):
+        self.sendLine('-ERR ' + str(message))
+
+#    def sendLine(self, line):
+#        print 'S:', repr(line)
+#        basic.LineOnlyReceiver.sendLine(self, line)
+
+    def lineReceived(self, line):
+#        print 'C:', repr(line)
+        self.resetTimeout()
+        getattr(self, 'state_' + self.state)(line)
+
+    def _unblock(self, _):
+        commands = self.blocked
+        self.blocked = None
+        while commands and self.blocked is None:
+            cmd, args = commands.pop(0)
+            self.processCommand(cmd, *args)
+        if self.blocked is not None:
+            self.blocked.extend(commands)
+
+    def state_COMMAND(self, line):
+        try:
+            return self.processCommand(*line.split(' '))
+        except (ValueError, AttributeError, POP3Error, TypeError), e:
+            log.err()
+            self.failResponse('bad protocol or server: %s: %s' % (e.__class__.__name__, e))
+
+    def processCommand(self, command, *args):
+        if self.blocked is not None:
+            self.blocked.append((command, args))
+            return
+
+        command = command.upper()
+        authCmd = command in self.AUTH_CMDS
+        if not self.mbox and not authCmd:
+            raise POP3Error("not authenticated yet: cannot do " + command)
+        f = getattr(self, 'do_' + command, None)
+        if f:
+            return f(*args)
+        raise POP3Error("Unknown protocol command: " + command)
+
+
+    def listCapabilities(self):
+        baseCaps = [
+            "TOP",
+            "USER",
+            "UIDL",
+            "PIPELINE",
+            "CELERITY",
+            "AUSPEX",
+            "POTENCE",
+        ]
+
+        if IServerFactory.providedBy(self.factory):
+            # Oh my god.  We can't just loop over a list of these because
+            # each has spectacularly different return value semantics!
+            try:
+                v = self.factory.cap_IMPLEMENTATION()
+            except NotImplementedError:
+                pass
+            except:
+                log.err()
+            else:
+                baseCaps.append("IMPLEMENTATION " + str(v))
+
+            try:
+                v = self.factory.cap_EXPIRE()
+            except NotImplementedError:
+                pass
+            except:
+                log.err()
+            else:
+                if v is None:
+                    v = "NEVER"
+                if self.factory.perUserExpiration():
+                    if self.mbox:
+                        v = str(self.mbox.messageExpiration)
+                    else:
+                        v = str(v) + " USER"
+                v = str(v)
+                baseCaps.append("EXPIRE " + v)
+
+            try:
+                v = self.factory.cap_LOGIN_DELAY()
+            except NotImplementedError:
+                pass
+            except:
+                log.err()
+            else:
+                if self.factory.perUserLoginDelay():
+                    if self.mbox:
+                        v = str(self.mbox.loginDelay)
+                    else:
+                        v = str(v) + " USER"
+                v = str(v)
+                baseCaps.append("LOGIN-DELAY " + v)
+
+            try:
+                v = self.factory.challengers
+            except AttributeError:
+                pass
+            except:
+                log.err()
+            else:
+                baseCaps.append("SASL " + ' '.join(v.keys()))
+        return baseCaps
+
+    def do_CAPA(self):
+        self.successResponse("I can do the following:")
+        for cap in self.listCapabilities():
+            self.sendLine(cap)
+        self.sendLine(".")
+
+    def do_AUTH(self, args=None):
+        if not getattr(self.factory, 'challengers', None):
+            self.failResponse("AUTH extension unsupported")
+            return
+
+        if args is None:
+            self.successResponse("Supported authentication methods:")
+            for a in self.factory.challengers:
+                self.sendLine(a.upper())
+            self.sendLine(".")
+            return
+
+        auth = self.factory.challengers.get(args.strip().upper())
+        if not self.portal or not auth:
+            self.failResponse("Unsupported SASL selected")
+            return
+
+        self._auth = auth()
+        chal = self._auth.getChallenge()
+
+        self.sendLine('+ ' + base64.encodestring(chal).rstrip('\n'))
+        self.state = 'AUTH'
+
+    def state_AUTH(self, line):
+        self.state = "COMMAND"
+        try:
+            parts = base64.decodestring(line).split(None, 1)
+        except binascii.Error:
+            self.failResponse("Invalid BASE64 encoding")
+        else:
+            if len(parts) != 2:
+                self.failResponse("Invalid AUTH response")
+                return
+            self._auth.username = parts[0]
+            self._auth.response = parts[1]
+            d = self.portal.login(self._auth, None, IMailbox)
+            d.addCallback(self._cbMailbox, parts[0])
+            d.addErrback(self._ebMailbox)
+            d.addErrback(self._ebUnexpected)
+
+    def do_APOP(self, user, digest):
+        d = defer.maybeDeferred(self.authenticateUserAPOP, user, digest)
+        d.addCallbacks(self._cbMailbox, self._ebMailbox, callbackArgs=(user,)
+        ).addErrback(self._ebUnexpected)
+
+    def _cbMailbox(self, (interface, avatar, logout), user):
+        if interface is not IMailbox:
+            self.failResponse('Authentication failed')
+            log.err("_cbMailbox() called with an interface other than IMailbox")
+            return
+
+        self.mbox = avatar
+        self._onLogout = logout
+        self.successResponse('Authentication succeeded')
+        if getattr(self.factory, 'noisy', True):
+            log.msg("Authenticated login for " + user)
+
+    def _ebMailbox(self, failure):
+        failure = failure.trap(cred.error.LoginDenied, cred.error.LoginFailed)
+        if issubclass(failure, cred.error.LoginDenied):
+            self.failResponse("Access denied: " + str(failure))
+        elif issubclass(failure, cred.error.LoginFailed):
+            self.failResponse('Authentication failed')
+        if getattr(self.factory, 'noisy', True):
+            log.msg("Denied login attempt from " + str(self.transport.getPeer()))
+
+    def _ebUnexpected(self, failure):
+        self.failResponse('Server error: ' + failure.getErrorMessage())
+        log.err(failure)
+
+    def do_USER(self, user):
+        self._userIs = user
+        self.successResponse('USER accepted, send PASS')
+
+    def do_PASS(self, password):
+        if self._userIs is None:
+            self.failResponse("USER required before PASS")
+            return
+        user = self._userIs
+        self._userIs = None
+        d = defer.maybeDeferred(self.authenticateUserPASS, user, password)
+        d.addCallbacks(self._cbMailbox, self._ebMailbox, callbackArgs=(user,)
+        ).addErrback(self._ebUnexpected)
+
+
+    def _longOperation(self, d):
+        # Turn off timeouts and block further processing until the Deferred
+        # fires, then reverse those changes.
+        timeOut = self.timeOut
+        self.setTimeout(None)
+        self.blocked = []
+        d.addCallback(self._unblock)
+        d.addCallback(lambda ign: self.setTimeout(timeOut))
+        return d
+
+
+    def _coiterate(self, gen):
+        return self.schedule(_IteratorBuffer(self.transport.writeSequence, gen))
+
+
+    def do_STAT(self):
+        d = defer.maybeDeferred(self.mbox.listMessages)
+        def cbMessages(msgs):
+            return self._coiterate(formatStatResponse(msgs))
+        def ebMessages(err):
+            self.failResponse(err.getErrorMessage())
+            log.msg("Unexpected do_STAT failure:")
+            log.err(err)
+        return self._longOperation(d.addCallbacks(cbMessages, ebMessages))
+
+
+    def do_LIST(self, i=None):
+        if i is None:
+            d = defer.maybeDeferred(self.mbox.listMessages)
+            def cbMessages(msgs):
+                return self._coiterate(formatListResponse(msgs))
+            def ebMessages(err):
+                self.failResponse(err.getErrorMessage())
+                log.msg("Unexpected do_LIST failure:")
+                log.err(err)
+            return self._longOperation(d.addCallbacks(cbMessages, ebMessages))
+        else:
+            try:
+                i = int(i)
+                if i < 1:
+                    raise ValueError()
+            except ValueError:
+                self.failResponse("Invalid message-number: %r" % (i,))
+            else:
+                d = defer.maybeDeferred(self.mbox.listMessages, i - 1)
+                def cbMessage(msg):
+                    self.successResponse('%d %d' % (i, msg))
+                def ebMessage(err):
+                    errcls = err.check(ValueError, IndexError)
+                    if errcls is not None:
+                        if errcls is IndexError:
+                            # IndexError was supported for a while, but really
+                            # shouldn't be.  One error condition, one exception
+                            # type.
+                            warnings.warn(
+                                "twisted.mail.pop3.IMailbox.listMessages may not "
+                                "raise IndexError for out-of-bounds message numbers: "
+                                "raise ValueError instead.",
+                                PendingDeprecationWarning)
+                        self.failResponse("Invalid message-number: %r" % (i,))
+                    else:
+                        self.failResponse(err.getErrorMessage())
+                        log.msg("Unexpected do_LIST failure:")
+                        log.err(err)
+                return self._longOperation(d.addCallbacks(cbMessage, ebMessage))
+
+
+    def do_UIDL(self, i=None):
+        if i is None:
+            d = defer.maybeDeferred(self.mbox.listMessages)
+            def cbMessages(msgs):
+                return self._coiterate(formatUIDListResponse(msgs, self.mbox.getUidl))
+            def ebMessages(err):
+                self.failResponse(err.getErrorMessage())
+                log.msg("Unexpected do_UIDL failure:")
+                log.err(err)
+            return self._longOperation(d.addCallbacks(cbMessages, ebMessages))
+        else:
+            try:
+                i = int(i)
+                if i < 1:
+                    raise ValueError()
+            except ValueError:
+                self.failResponse("Bad message number argument")
+            else:
+                try:
+                    msg = self.mbox.getUidl(i - 1)
+                except IndexError:
+                    # XXX TODO See above comment regarding IndexError.
+                    warnings.warn(
+                        "twisted.mail.pop3.IMailbox.getUidl may not "
+                        "raise IndexError for out-of-bounds message numbers: "
+                        "raise ValueError instead.",
+                        PendingDeprecationWarning)
+                    self.failResponse("Bad message number argument")
+                except ValueError:
+                    self.failResponse("Bad message number argument")
+                else:
+                    self.successResponse(str(msg))
+
+
+    def _getMessageFile(self, i):
+        """
+        Retrieve the size and contents of a given message, as a two-tuple.
+
+        @param i: The number of the message to operate on.  This is a base-ten
+        string representation starting at 1.
+
+        @return: A Deferred which fires with a two-tuple of an integer and a
+        file-like object.
+        """
+        try:
+            msg = int(i) - 1
+            if msg < 0:
+                raise ValueError()
+        except ValueError:
+            self.failResponse("Bad message number argument")
+            return defer.succeed(None)
+
+        sizeDeferred = defer.maybeDeferred(self.mbox.listMessages, msg)
+        def cbMessageSize(size):
+            if not size:
+                return defer.fail(_POP3MessageDeleted())
+            fileDeferred = defer.maybeDeferred(self.mbox.getMessage, msg)
+            fileDeferred.addCallback(lambda fObj: (size, fObj))
+            return fileDeferred
+
+        def ebMessageSomething(err):
+            errcls = err.check(_POP3MessageDeleted, ValueError, IndexError)
+            if errcls is _POP3MessageDeleted:
+                self.failResponse("message deleted")
+            elif errcls in (ValueError, IndexError):
+                if errcls is IndexError:
+                    # XXX TODO See above comment regarding IndexError.
+                    warnings.warn(
+                        "twisted.mail.pop3.IMailbox.listMessages may not "
+                        "raise IndexError for out-of-bounds message numbers: "
+                        "raise ValueError instead.",
+                        PendingDeprecationWarning)
+                self.failResponse("Bad message number argument")
+            else:
+                log.msg("Unexpected _getMessageFile failure:")
+                log.err(err)
+            return None
+
+        sizeDeferred.addCallback(cbMessageSize)
+        sizeDeferred.addErrback(ebMessageSomething)
+        return sizeDeferred
+
+
+    def _sendMessageContent(self, i, fpWrapper, successResponse):
+        d = self._getMessageFile(i)
+        def cbMessageFile(info):
+            if info is None:
+                # Some error occurred - a failure response has been sent
+                # already, just give up.
+                return
+
+            self._highest = max(self._highest, int(i))
+            resp, fp = info
+            fp = fpWrapper(fp)
+            self.successResponse(successResponse(resp))
+            s = basic.FileSender()
+            d = s.beginFileTransfer(fp, self.transport, self.transformChunk)
+
+            def cbFileTransfer(lastsent):
+                if lastsent != '\n':
+                    line = '\r\n.'
+                else:
+                    line = '.'
+                self.sendLine(line)
+
+            def ebFileTransfer(err):
+                self.transport.loseConnection()
+                log.msg("Unexpected error in _sendMessageContent:")
+                log.err(err)
+
+            d.addCallback(cbFileTransfer)
+            d.addErrback(ebFileTransfer)
+            return d
+        return self._longOperation(d.addCallback(cbMessageFile))
+
+
+    def do_TOP(self, i, size):
+        try:
+            size = int(size)
+            if size < 0:
+                raise ValueError
+        except ValueError:
+            self.failResponse("Bad line count argument")
+        else:
+            return self._sendMessageContent(
+                i,
+                lambda fp: _HeadersPlusNLines(fp, size),
+                lambda size: "Top of message follows")
+
+
+    def do_RETR(self, i):
+        return self._sendMessageContent(
+            i,
+            lambda fp: fp,
+            lambda size: "%d" % (size,))
+
+
+    def transformChunk(self, chunk):
+        return chunk.replace('\n', '\r\n').replace('\r\n.', '\r\n..')
+
+
+    def finishedFileTransfer(self, lastsent):
+        if lastsent != '\n':
+            line = '\r\n.'
+        else:
+            line = '.'
+        self.sendLine(line)
+
+
+    def do_DELE(self, i):
+        i = int(i)-1
+        self.mbox.deleteMessage(i)
+        self.successResponse()
+
+
+    def do_NOOP(self):
+        """Perform no operation.  Return a success code"""
+        self.successResponse()
+
+
+    def do_RSET(self):
+        """Unset all deleted message flags"""
+        try:
+            self.mbox.undeleteMessages()
+        except:
+            log.err()
+            self.failResponse()
+        else:
+            self._highest = 0
+            self.successResponse()
+
+
+    def do_LAST(self):
+        """
+        Return the index of the highest message yet downloaded.
+        """
+        self.successResponse(self._highest)
+
+
+    def do_RPOP(self, user):
+        self.failResponse('permission denied, sucker')
+
+
+    def do_QUIT(self):
+        if self.mbox:
+            self.mbox.sync()
+        self.successResponse()
+        self.transport.loseConnection()
+
+
+    def authenticateUserAPOP(self, user, digest):
+        """Perform authentication of an APOP login.
+
+        @type user: C{str}
+        @param user: The name of the user attempting to log in.
+
+        @type digest: C{str}
+        @param digest: The response string with which the user replied.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked if the login is
+        successful, and whose errback will be invoked otherwise.  The
+        callback will be passed a 3-tuple consisting of IMailbox,
+        an object implementing IMailbox, and a zero-argument callable
+        to be invoked when this session is terminated.
+        """
+        if self.portal is not None:
+            return self.portal.login(
+                APOPCredentials(self.magic, user, digest),
+                None,
+                IMailbox
+            )
+        raise cred.error.UnauthorizedLogin()
+
+    def authenticateUserPASS(self, user, password):
+        """Perform authentication of a username/password login.
+
+        @type user: C{str}
+        @param user: The name of the user attempting to log in.
+
+        @type password: C{str}
+        @param password: The password to attempt to authenticate with.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback is invoked if the login is
+        successful, and whose errback will be invoked otherwise.  The
+        callback will be passed a 3-tuple consisting of IMailbox,
+        an object implementing IMailbox, and a zero-argument callable
+        to be invoked when this session is terminated.
+        """
+        if self.portal is not None:
+            return self.portal.login(
+                cred.credentials.UsernamePassword(user, password),
+                None,
+                IMailbox
+            )
+        raise cred.error.UnauthorizedLogin()
+
+
+class IServerFactory(Interface):
+    """Interface for querying additional parameters of this POP3 server.
+
+    Any cap_* method may raise NotImplementedError if the particular
+    capability is not supported.  If cap_EXPIRE() does not raise
+    NotImplementedError, perUserExpiration() must be implemented, otherwise
+    they are optional.  If cap_LOGIN_DELAY() is implemented,
+    perUserLoginDelay() must be implemented, otherwise they are optional.
+
+    @ivar challengers: A dictionary mapping challenger names to classes
+    implementing C{IUsernameHashedPassword}.
+    """
+
+    def cap_IMPLEMENTATION():
+        """Return a string describing this POP3 server implementation."""
+
+    def cap_EXPIRE():
+        """Return the minimum number of days messages are retained."""
+
+    def perUserExpiration():
+        """Indicate whether message expiration is per-user.
+
+        @return: True if it is, false otherwise.
+        """
+
+    def cap_LOGIN_DELAY():
+        """Return the minimum number of seconds between client logins."""
+
+    def perUserLoginDelay():
+        """Indicate whether the login delay period is per-user.
+
+        @return: True if it is, false otherwise.
+        """
+
+class IMailbox(Interface):
+    """
+    @type loginDelay: C{int}
+    @ivar loginDelay: The number of seconds between allowed logins for the
+    user associated with this mailbox.  None
+
+    @type messageExpiration: C{int}
+    @ivar messageExpiration: The number of days messages in this mailbox will
+    remain on the server before being deleted.
+    """
+
+    def listMessages(index=None):
+        """Retrieve the size of one or more messages.
+
+        @type index: C{int} or C{None}
+        @param index: The number of the message for which to retrieve the
+        size (starting at 0), or None to retrieve the size of all messages.
+
+        @rtype: C{int} or any iterable of C{int} or a L{Deferred} which fires
+        with one of these.
+
+        @return: The number of octets in the specified message, or an iterable
+        of integers representing the number of octets in all the messages.  Any
+        value which would have referred to a deleted message should be set to 0.
+
+        @raise ValueError: if C{index} is greater than the index of any message
+        in the mailbox.
+        """
+
+    def getMessage(index):
+        """Retrieve a file-like object for a particular message.
+
+        @type index: C{int}
+        @param index: The number of the message to retrieve
+
+        @rtype: A file-like object
+        @return: A file containing the message data with lines delimited by
+        C{\\n}.
+        """
+
+    def getUidl(index):
+        """Get a unique identifier for a particular message.
+
+        @type index: C{int}
+        @param index: The number of the message for which to retrieve a UIDL
+
+        @rtype: C{str}
+        @return: A string of printable characters uniquely identifying for all
+        time the specified message.
+
+        @raise ValueError: if C{index} is greater than the index of any message
+        in the mailbox.
+        """
+
+    def deleteMessage(index):
+        """Delete a particular message.
+
+        This must not change the number of messages in this mailbox.  Further
+        requests for the size of deleted messages should return 0.  Further
+        requests for the message itself may raise an exception.
+
+        @type index: C{int}
+        @param index: The number of the message to delete.
+        """
+
+    def undeleteMessages():
+        """
+        Undelete any messages which have been marked for deletion since the
+        most recent L{sync} call.
+
+        Any message which can be undeleted should be returned to its
+        original position in the message sequence and retain its original
+        UID.
+        """
+
+    def sync():
+        """Perform checkpointing.
+
+        This method will be called to indicate the mailbox should attempt to
+        clean up any remaining deleted messages.
+        """
+
+
+
+class Mailbox:
+    implements(IMailbox)
+
+    def listMessages(self, i=None):
+        return []
+    def getMessage(self, i):
+        raise ValueError
+    def getUidl(self, i):
+        raise ValueError
+    def deleteMessage(self, i):
+        raise ValueError
+    def undeleteMessages(self):
+        pass
+    def sync(self):
+        pass
+
+
+NONE, SHORT, FIRST_LONG, LONG = range(4)
+
+NEXT = {}
+NEXT[NONE] = NONE
+NEXT[SHORT] = NONE
+NEXT[FIRST_LONG] = LONG
+NEXT[LONG] = NONE
+
+class POP3Client(basic.LineOnlyReceiver):
+
+    mode = SHORT
+    command = 'WELCOME'
+    import re
+    welcomeRe = re.compile('<(.*)>')
+
+    def __init__(self):
+        import warnings
+        warnings.warn("twisted.mail.pop3.POP3Client is deprecated, "
+                      "please use twisted.mail.pop3.AdvancedPOP3Client "
+                      "instead.", DeprecationWarning,
+                      stacklevel=3)
+
+    def sendShort(self, command, params=None):
+        if params is not None:
+            self.sendLine('%s %s' % (command, params))
+        else:
+            self.sendLine(command)
+        self.command = command
+        self.mode = SHORT
+
+    def sendLong(self, command, params):
+        if params:
+            self.sendLine('%s %s' % (command, params))
+        else:
+            self.sendLine(command)
+        self.command = command
+        self.mode = FIRST_LONG
+
+    def handle_default(self, line):
+        if line[:-4] == '-ERR':
+            self.mode = NONE
+
+    def handle_WELCOME(self, line):
+        code, data = line.split(' ', 1)
+        if code != '+OK':
+            self.transport.loseConnection()
+        else:
+            m = self.welcomeRe.match(line)
+            if m:
+                self.welcomeCode = m.group(1)
+
+    def _dispatch(self, command, default, *args):
+        try:
+            method = getattr(self, 'handle_'+command, default)
+            if method is not None:
+                method(*args)
+        except:
+            log.err()
+
+    def lineReceived(self, line):
+        if self.mode == SHORT or self.mode == FIRST_LONG:
+            self.mode = NEXT[self.mode]
+            self._dispatch(self.command, self.handle_default, line)
+        elif self.mode == LONG:
+            if line == '.':
+                self.mode = NEXT[self.mode]
+                self._dispatch(self.command+'_end', None)
+                return
+            if line[:1] == '.':
+                line = line[1:]
+            self._dispatch(self.command+"_continue", None, line)
+
+    def apopAuthenticate(self, user, password, magic):
+        digest = md5(magic + password).hexdigest()
+        self.apop(user, digest)
+
+    def apop(self, user, digest):
+        self.sendLong('APOP', ' '.join((user, digest)))
+    def retr(self, i):
+        self.sendLong('RETR', i)
+    def dele(self, i):
+        self.sendShort('DELE', i)
+    def list(self, i=''):
+        self.sendLong('LIST', i)
+    def uidl(self, i=''):
+        self.sendLong('UIDL', i)
+    def user(self, name):
+        self.sendShort('USER', name)
+    def pass_(self, pass_):
+        self.sendShort('PASS', pass_)
+    def quit(self):
+        self.sendShort('QUIT')
+
+from twisted.mail.pop3client import POP3Client as AdvancedPOP3Client
+from twisted.mail.pop3client import POP3ClientError
+from twisted.mail.pop3client import InsecureAuthenticationDisallowed
+from twisted.mail.pop3client import ServerErrorResponse
+from twisted.mail.pop3client import LineTooLong
+
+__all__ = [
+    # Interfaces
+    'IMailbox', 'IServerFactory',
+
+    # Exceptions
+    'POP3Error', 'POP3ClientError', 'InsecureAuthenticationDisallowed',
+    'ServerErrorResponse', 'LineTooLong',
+
+    # Protocol classes
+    'POP3', 'POP3Client', 'AdvancedPOP3Client',
+
+    # Misc
+    'APOPCredentials', 'Mailbox']
diff --git a/ThirdParty/Twisted/twisted/mail/pop3client.py b/ThirdParty/Twisted/twisted/mail/pop3client.py
new file mode 100644
index 0000000..fe8f497
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/pop3client.py
@@ -0,0 +1,706 @@
+# -*- test-case-name: twisted.mail.test.test_pop3client -*-
+# Copyright (c) 2001-2004 Divmod Inc.
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+POP3 client protocol implementation
+
+Don't use this module directly.  Use twisted.mail.pop3 instead.
+
+ at author: Jp Calderone
+"""
+
+import re
+
+from twisted.python import log
+from twisted.python.hashlib import md5
+from twisted.internet import defer
+from twisted.protocols import basic
+from twisted.protocols import policies
+from twisted.internet import error
+from twisted.internet import interfaces
+
+OK = '+OK'
+ERR = '-ERR'
+
+class POP3ClientError(Exception):
+    """Base class for all exceptions raised by POP3Client.
+    """
+
+class InsecureAuthenticationDisallowed(POP3ClientError):
+    """Secure authentication was required but no mechanism could be found.
+    """
+
+class TLSError(POP3ClientError):
+    """
+    Secure authentication was required but either the transport does
+    not support TLS or no TLS context factory was supplied.
+    """
+
+class TLSNotSupportedError(POP3ClientError):
+    """
+    Secure authentication was required but the server does not support
+    TLS.
+    """
+
+class ServerErrorResponse(POP3ClientError):
+    """The server returned an error response to a request.
+    """
+    def __init__(self, reason, consumer=None):
+        POP3ClientError.__init__(self, reason)
+        self.consumer = consumer
+
+class LineTooLong(POP3ClientError):
+    """The server sent an extremely long line.
+    """
+
+class _ListSetter:
+    # Internal helper.  POP3 responses sometimes occur in the
+    # form of a list of lines containing two pieces of data,
+    # a message index and a value of some sort.  When a message
+    # is deleted, it is omitted from these responses.  The
+    # setitem method of this class is meant to be called with
+    # these two values.  In the cases where indexes are skipped,
+    # it takes care of padding out the missing values with None.
+    def __init__(self, L):
+        self.L = L
+    def setitem(self, (item, value)):
+        diff = item - len(self.L) + 1
+        if diff > 0:
+            self.L.extend([None] * diff)
+        self.L[item] = value
+
+
+def _statXform(line):
+    # Parse a STAT response
+    numMsgs, totalSize = line.split(None, 1)
+    return int(numMsgs), int(totalSize)
+
+
+def _listXform(line):
+    # Parse a LIST response
+    index, size = line.split(None, 1)
+    return int(index) - 1, int(size)
+
+
+def _uidXform(line):
+    # Parse a UIDL response
+    index, uid = line.split(None, 1)
+    return int(index) - 1, uid
+
+def _codeStatusSplit(line):
+    # Parse an +OK or -ERR response
+    parts = line.split(' ', 1)
+    if len(parts) == 1:
+        return parts[0], ''
+    return parts
+
+def _dotUnquoter(line):
+    """
+    C{'.'} characters which begin a line of a message are doubled to avoid
+    confusing with the terminating C{'.\\r\\n'} sequence.  This function
+    unquotes them.
+    """
+    if line.startswith('..'):
+        return line[1:]
+    return line
+
+class POP3Client(basic.LineOnlyReceiver, policies.TimeoutMixin):
+    """POP3 client protocol implementation class
+
+    Instances of this class provide a convenient, efficient API for
+    retrieving and deleting messages from a POP3 server.
+
+    @type startedTLS: C{bool}
+    @ivar startedTLS: Whether TLS has been negotiated successfully.
+
+
+    @type allowInsecureLogin: C{bool}
+    @ivar allowInsecureLogin: Indicate whether login() should be
+    allowed if the server offers no authentication challenge and if
+    our transport does not offer any protection via encryption.
+
+    @type serverChallenge: C{str} or C{None}
+    @ivar serverChallenge: Challenge received from the server
+
+    @type timeout: C{int}
+    @ivar timeout: Number of seconds to wait before timing out a
+    connection.  If the number is <= 0, no timeout checking will be
+    performed.
+    """
+
+    startedTLS = False
+    allowInsecureLogin = False
+    timeout = 0
+    serverChallenge = None
+
+    # Capabilities are not allowed to change during the session
+    # (except when TLS is negotiated), so cache the first response and
+    # use that for all later lookups
+    _capCache = None
+
+    # Regular expression to search for in the challenge string in the server
+    # greeting line.
+    _challengeMagicRe = re.compile('(<[^>]+>)')
+
+    # List of pending calls.
+    # We are a pipelining API but don't actually
+    # support pipelining on the network yet.
+    _blockedQueue = None
+
+    # The Deferred to which the very next result will go.
+    _waiting = None
+
+    # Whether we dropped the connection because of a timeout
+    _timedOut = False
+
+    # If the server sends an initial -ERR, this is the message it sent
+    # with it.
+    _greetingError = None
+
+    def _blocked(self, f, *a):
+        # Internal helper.  If commands are being blocked, append
+        # the given command and arguments to a list and return a Deferred
+        # that will be chained with the return value of the function
+        # when it eventually runs.  Otherwise, set up for commands to be
+
+        # blocked and return None.
+        if self._blockedQueue is not None:
+            d = defer.Deferred()
+            self._blockedQueue.append((d, f, a))
+            return d
+        self._blockedQueue = []
+        return None
+
+    def _unblock(self):
+        # Internal helper.  Indicate that a function has completed.
+        # If there are blocked commands, run the next one.  If there
+        # are not, set up for the next command to not be blocked.
+        if self._blockedQueue == []:
+            self._blockedQueue = None
+        elif self._blockedQueue is not None:
+            _blockedQueue = self._blockedQueue
+            self._blockedQueue = None
+
+            d, f, a = _blockedQueue.pop(0)
+            d2 = f(*a)
+            d2.chainDeferred(d)
+            # f is a function which uses _blocked (otherwise it wouldn't
+            # have gotten into the blocked queue), which means it will have
+            # re-set _blockedQueue to an empty list, so we can put the rest
+            # of the blocked queue back into it now.
+            self._blockedQueue.extend(_blockedQueue)
+
+
+    def sendShort(self, cmd, args):
+        # Internal helper.  Send a command to which a short response
+        # is expected.  Return a Deferred that fires when the response
+        # is received.  Block all further commands from being sent until
+        # the response is received.  Transition the state to SHORT.
+        d = self._blocked(self.sendShort, cmd, args)
+        if d is not None:
+            return d
+
+        if args:
+            self.sendLine(cmd + ' ' + args)
+        else:
+            self.sendLine(cmd)
+        self.state = 'SHORT'
+        self._waiting = defer.Deferred()
+        return self._waiting
+
+    def sendLong(self, cmd, args, consumer, xform):
+        # Internal helper.  Send a command to which a multiline
+        # response is expected.  Return a Deferred that fires when
+        # the entire response is received.  Block all further commands
+        # from being sent until the entire response is received.
+        # Transition the state to LONG_INITIAL.
+        d = self._blocked(self.sendLong, cmd, args, consumer, xform)
+        if d is not None:
+            return d
+
+        if args:
+            self.sendLine(cmd + ' ' + args)
+        else:
+            self.sendLine(cmd)
+        self.state = 'LONG_INITIAL'
+        self._xform = xform
+        self._consumer = consumer
+        self._waiting = defer.Deferred()
+        return self._waiting
+
+    # Twisted protocol callback
+    def connectionMade(self):
+        if self.timeout > 0:
+            self.setTimeout(self.timeout)
+
+        self.state = 'WELCOME'
+        self._blockedQueue = []
+
+    def timeoutConnection(self):
+        self._timedOut = True
+        self.transport.loseConnection()
+
+    def connectionLost(self, reason):
+        if self.timeout > 0:
+            self.setTimeout(None)
+
+        if self._timedOut:
+            reason = error.TimeoutError()
+        elif self._greetingError:
+            reason = ServerErrorResponse(self._greetingError)
+
+        d = []
+        if self._waiting is not None:
+            d.append(self._waiting)
+            self._waiting = None
+        if self._blockedQueue is not None:
+            d.extend([deferred for (deferred, f, a) in self._blockedQueue])
+            self._blockedQueue = None
+        for w in d:
+            w.errback(reason)
+
+    def lineReceived(self, line):
+        if self.timeout > 0:
+            self.resetTimeout()
+
+        state = self.state
+        self.state = None
+        state = getattr(self, 'state_' + state)(line) or state
+        if self.state is None:
+            self.state = state
+
+    def lineLengthExceeded(self, buffer):
+        # XXX - We need to be smarter about this
+        if self._waiting is not None:
+            waiting, self._waiting = self._waiting, None
+            waiting.errback(LineTooLong())
+        self.transport.loseConnection()
+
+    # POP3 Client state logic - don't touch this.
+    def state_WELCOME(self, line):
+        # WELCOME is the first state.  The server sends one line of text
+        # greeting us, possibly with an APOP challenge.  Transition the
+        # state to WAITING.
+        code, status = _codeStatusSplit(line)
+        if code != OK:
+            self._greetingError = status
+            self.transport.loseConnection()
+        else:
+            m = self._challengeMagicRe.search(status)
+
+            if m is not None:
+                self.serverChallenge = m.group(1)
+
+            self.serverGreeting(status)
+
+        self._unblock()
+        return 'WAITING'
+
+    def state_WAITING(self, line):
+        # The server isn't supposed to send us anything in this state.
+        log.msg("Illegal line from server: " + repr(line))
+
+    def state_SHORT(self, line):
+        # This is the state we are in when waiting for a single
+        # line response.  Parse it and fire the appropriate callback
+        # or errback.  Transition the state back to WAITING.
+        deferred, self._waiting = self._waiting, None
+        self._unblock()
+        code, status = _codeStatusSplit(line)
+        if code == OK:
+            deferred.callback(status)
+        else:
+            deferred.errback(ServerErrorResponse(status))
+        return 'WAITING'
+
+    def state_LONG_INITIAL(self, line):
+        # This is the state we are in when waiting for the first
+        # line of a long response.  Parse it and transition the
+        # state to LONG if it is an okay response; if it is an
+        # error response, fire an errback, clean up the things
+        # waiting for a long response, and transition the state
+        # to WAITING.
+        code, status = _codeStatusSplit(line)
+        if code == OK:
+            return 'LONG'
+        consumer = self._consumer
+        deferred = self._waiting
+        self._consumer = self._waiting = self._xform = None
+        self._unblock()
+        deferred.errback(ServerErrorResponse(status, consumer))
+        return 'WAITING'
+
+    def state_LONG(self, line):
+        # This is the state for each line of a long response.
+        # If it is the last line, finish things, fire the
+        # Deferred, and transition the state to WAITING.
+        # Otherwise, pass the line to the consumer.
+        if line == '.':
+            consumer = self._consumer
+            deferred = self._waiting
+            self._consumer = self._waiting = self._xform = None
+            self._unblock()
+            deferred.callback(consumer)
+            return 'WAITING'
+        else:
+            if self._xform is not None:
+                self._consumer(self._xform(line))
+            else:
+                self._consumer(line)
+            return 'LONG'
+
+
+    # Callbacks - override these
+    def serverGreeting(self, greeting):
+        """Called when the server has sent us a greeting.
+
+        @type greeting: C{str} or C{None}
+        @param greeting: The status message sent with the server
+        greeting.  For servers implementing APOP authentication, this
+        will be a challenge string.  .
+        """
+
+
+    # External API - call these (most of 'em anyway)
+    def startTLS(self, contextFactory=None):
+        """
+        Initiates a 'STLS' request and negotiates the TLS / SSL
+        Handshake.
+
+        @type contextFactory: C{ssl.ClientContextFactory} @param
+        contextFactory: The context factory with which to negotiate
+        TLS.  If C{None}, try to create a new one.
+
+        @return: A Deferred which fires when the transport has been
+        secured according to the given contextFactory, or which fails
+        if the transport cannot be secured.
+        """
+        tls = interfaces.ITLSTransport(self.transport, None)
+        if tls is None:
+            return defer.fail(TLSError(
+                "POP3Client transport does not implement "
+                "interfaces.ITLSTransport"))
+
+        if contextFactory is None:
+            contextFactory = self._getContextFactory()
+
+        if contextFactory is None:
+            return defer.fail(TLSError(
+                "POP3Client requires a TLS context to "
+                "initiate the STLS handshake"))
+
+        d = self.capabilities()
+        d.addCallback(self._startTLS, contextFactory, tls)
+        return d
+
+
+    def _startTLS(self, caps, contextFactory, tls):
+        assert not self.startedTLS, "Client and Server are currently communicating via TLS"
+
+        if 'STLS' not in caps:
+            return defer.fail(TLSNotSupportedError(
+                "Server does not support secure communication "
+                "via TLS / SSL"))
+
+        d = self.sendShort('STLS', None)
+        d.addCallback(self._startedTLS, contextFactory, tls)
+        d.addCallback(lambda _: self.capabilities())
+        return d
+
+
+    def _startedTLS(self, result, context, tls):
+        self.transport = tls
+        self.transport.startTLS(context)
+        self._capCache = None
+        self.startedTLS = True
+        return result
+
+
+    def _getContextFactory(self):
+        try:
+            from twisted.internet import ssl
+        except ImportError:
+            return None
+        else:
+            context = ssl.ClientContextFactory()
+            context.method = ssl.SSL.TLSv1_METHOD
+            return context
+
+
+    def login(self, username, password):
+        """Log into the server.
+
+        If APOP is available it will be used.  Otherwise, if TLS is
+        available an 'STLS' session will be started and plaintext
+        login will proceed.  Otherwise, if the instance attribute
+        allowInsecureLogin is set to True, insecure plaintext login
+        will proceed.  Otherwise, InsecureAuthenticationDisallowed
+        will be raised (asynchronously).
+
+        @param username: The username with which to log in.
+        @param password: The password with which to log in.
+
+        @rtype: C{Deferred}
+        @return: A deferred which fires when login has
+        completed.
+        """
+        d = self.capabilities()
+        d.addCallback(self._login, username, password)
+        return d
+
+
+    def _login(self, caps, username, password):
+        if self.serverChallenge is not None:
+            return self._apop(username, password, self.serverChallenge)
+
+        tryTLS = 'STLS' in caps
+
+        #If our transport supports switching to TLS, we might want to try to switch to TLS.
+        tlsableTransport = interfaces.ITLSTransport(self.transport, None) is not None
+
+        # If our transport is not already using TLS, we might want to try to switch to TLS.
+        nontlsTransport = interfaces.ISSLTransport(self.transport, None) is None
+
+        if not self.startedTLS and tryTLS and tlsableTransport and nontlsTransport:
+            d = self.startTLS()
+
+            d.addCallback(self._loginTLS, username, password)
+            return d
+
+        elif self.startedTLS or not nontlsTransport or self.allowInsecureLogin:
+            return self._plaintext(username, password)
+        else:
+            return defer.fail(InsecureAuthenticationDisallowed())
+
+
+    def _loginTLS(self, res, username, password):
+        return self._plaintext(username, password)
+
+    def _plaintext(self, username, password):
+        # Internal helper.  Send a username/password pair, returning a Deferred
+        # that fires when both have succeeded or fails when the server rejects
+        # either.
+        return self.user(username).addCallback(lambda r: self.password(password))
+
+    def _apop(self, username, password, challenge):
+        # Internal helper.  Computes and sends an APOP response.  Returns
+        # a Deferred that fires when the server responds to the response.
+        digest = md5(challenge + password).hexdigest()
+        return self.apop(username, digest)
+
+    def apop(self, username, digest):
+        """Perform APOP login.
+
+        This should be used in special circumstances only, when it is
+        known that the server supports APOP authentication, and APOP
+        authentication is absolutely required.  For the common case,
+        use L{login} instead.
+
+        @param username: The username with which to log in.
+        @param digest: The challenge response to authenticate with.
+        """
+        return self.sendShort('APOP', username + ' ' + digest)
+
+    def user(self, username):
+        """Send the user command.
+
+        This performs the first half of plaintext login.  Unless this
+        is absolutely required, use the L{login} method instead.
+
+        @param username: The username with which to log in.
+        """
+        return self.sendShort('USER', username)
+
+    def password(self, password):
+        """Send the password command.
+
+        This performs the second half of plaintext login.  Unless this
+        is absolutely required, use the L{login} method instead.
+
+        @param password: The plaintext password with which to authenticate.
+        """
+        return self.sendShort('PASS', password)
+
+    def delete(self, index):
+        """Delete a message from the server.
+
+        @type index: C{int}
+        @param index: The index of the message to delete.
+        This is 0-based.
+
+        @rtype: C{Deferred}
+        @return: A deferred which fires when the delete command
+        is successful, or fails if the server returns an error.
+        """
+        return self.sendShort('DELE', str(index + 1))
+
+    def _consumeOrSetItem(self, cmd, args, consumer, xform):
+        # Internal helper.  Send a long command.  If no consumer is
+        # provided, create a consumer that puts results into a list
+        # and return a Deferred that fires with that list when it
+        # is complete.
+        if consumer is None:
+            L = []
+            consumer = _ListSetter(L).setitem
+            return self.sendLong(cmd, args, consumer, xform).addCallback(lambda r: L)
+        return self.sendLong(cmd, args, consumer, xform)
+
+    def _consumeOrAppend(self, cmd, args, consumer, xform):
+        # Internal helper.  Send a long command.  If no consumer is
+        # provided, create a consumer that appends results to a list
+        # and return a Deferred that fires with that list when it is
+        # complete.
+        if consumer is None:
+            L = []
+            consumer = L.append
+            return self.sendLong(cmd, args, consumer, xform).addCallback(lambda r: L)
+        return self.sendLong(cmd, args, consumer, xform)
+
+    def capabilities(self, useCache=True):
+        """Retrieve the capabilities supported by this server.
+
+        Not all servers support this command.  If the server does not
+        support this, it is treated as though it returned a successful
+        response listing no capabilities.  At some future time, this may be
+        changed to instead seek out information about a server's
+        capabilities in some other fashion (only if it proves useful to do
+        so, and only if there are servers still in use which do not support
+        CAPA but which do support POP3 extensions that are useful).
+
+        @type useCache: C{bool}
+        @param useCache: If set, and if capabilities have been
+        retrieved previously, just return the previously retrieved
+        results.
+
+        @return: A Deferred which fires with a C{dict} mapping C{str}
+        to C{None} or C{list}s of C{str}.  For example::
+
+            C: CAPA
+            S: +OK Capability list follows
+            S: TOP
+            S: USER
+            S: SASL CRAM-MD5 KERBEROS_V4
+            S: RESP-CODES
+            S: LOGIN-DELAY 900
+            S: PIPELINING
+            S: EXPIRE 60
+            S: UIDL
+            S: IMPLEMENTATION Shlemazle-Plotz-v302
+            S: .
+
+        will be lead to a result of::
+
+            | {'TOP': None,
+            |  'USER': None,
+            |  'SASL': ['CRAM-MD5', 'KERBEROS_V4'],
+            |  'RESP-CODES': None,
+            |  'LOGIN-DELAY': ['900'],
+            |  'PIPELINING': None,
+            |  'EXPIRE': ['60'],
+            |  'UIDL': None,
+            |  'IMPLEMENTATION': ['Shlemazle-Plotz-v302']}
+        """
+        if useCache and self._capCache is not None:
+            return defer.succeed(self._capCache)
+
+        cache = {}
+        def consume(line):
+            tmp = line.split()
+            if len(tmp) == 1:
+                cache[tmp[0]] = None
+            elif len(tmp) > 1:
+                cache[tmp[0]] = tmp[1:]
+
+        def capaNotSupported(err):
+            err.trap(ServerErrorResponse)
+            return None
+
+        def gotCapabilities(result):
+            self._capCache = cache
+            return cache
+
+        d = self._consumeOrAppend('CAPA', None, consume, None)
+        d.addErrback(capaNotSupported).addCallback(gotCapabilities)
+        return d
+
+
+    def noop(self):
+        """Do nothing, with the help of the server.
+
+        No operation is performed.  The returned Deferred fires when
+        the server responds.
+        """
+        return self.sendShort("NOOP", None)
+
+
+    def reset(self):
+        """Remove the deleted flag from any messages which have it.
+
+        The returned Deferred fires when the server responds.
+        """
+        return self.sendShort("RSET", None)
+
+
+    def retrieve(self, index, consumer=None, lines=None):
+        """Retrieve a message from the server.
+
+        If L{consumer} is not None, it will be called with
+        each line of the message as it is received.  Otherwise,
+        the returned Deferred will be fired with a list of all
+        the lines when the message has been completely received.
+        """
+        idx = str(index + 1)
+        if lines is None:
+            return self._consumeOrAppend('RETR', idx, consumer, _dotUnquoter)
+
+        return self._consumeOrAppend('TOP', '%s %d' % (idx, lines), consumer, _dotUnquoter)
+
+
+    def stat(self):
+        """Get information about the size of this mailbox.
+
+        The returned Deferred will be fired with a tuple containing
+        the number or messages in the mailbox and the size (in bytes)
+        of the mailbox.
+        """
+        return self.sendShort('STAT', None).addCallback(_statXform)
+
+
+    def listSize(self, consumer=None):
+        """Retrieve a list of the size of all messages on the server.
+
+        If L{consumer} is not None, it will be called with two-tuples
+        of message index number and message size as they are received.
+        Otherwise, a Deferred which will fire with a list of B{only}
+        message sizes will be returned.  For messages which have been
+        deleted, None will be used in place of the message size.
+        """
+        return self._consumeOrSetItem('LIST', None, consumer, _listXform)
+
+
+    def listUID(self, consumer=None):
+        """Retrieve a list of the UIDs of all messages on the server.
+
+        If L{consumer} is not None, it will be called with two-tuples
+        of message index number and message UID as they are received.
+        Otherwise, a Deferred which will fire with of list of B{only}
+        message UIDs will be returned.  For messages which have been
+        deleted, None will be used in place of the message UID.
+        """
+        return self._consumeOrSetItem('UIDL', None, consumer, _uidXform)
+
+
+    def quit(self):
+        """Disconnect from the server.
+        """
+        return self.sendShort('QUIT', None)
+
+__all__ = [
+    # Exceptions
+    'InsecureAuthenticationDisallowed', 'LineTooLong', 'POP3ClientError',
+    'ServerErrorResponse', 'TLSError', 'TLSNotSupportedError',
+
+    # Protocol classes
+    'POP3Client']
diff --git a/ThirdParty/Twisted/twisted/mail/protocols.py b/ThirdParty/Twisted/twisted/mail/protocols.py
new file mode 100644
index 0000000..446592b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/protocols.py
@@ -0,0 +1,233 @@
+# -*- test-case-name: twisted.mail.test.test_mail -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""Protocol support for twisted.mail."""
+
+# twisted imports
+from twisted.mail import pop3
+from twisted.mail import smtp
+from twisted.internet import protocol
+from twisted.internet import defer
+from twisted.copyright import longversion
+from twisted.python import log
+from twisted.python.deprecate import deprecatedModuleAttribute
+from twisted.python.versions import Version
+
+from twisted import cred
+import twisted.cred.error
+import twisted.cred.credentials
+
+from twisted.mail import relay
+
+from zope.interface import implements
+
+
+class DomainDeliveryBase:
+    """A server that uses twisted.mail service's domains."""
+
+    implements(smtp.IMessageDelivery)
+    
+    service = None
+    protocolName = None
+
+    def __init__(self, service, user, host=smtp.DNSNAME):
+        self.service = service
+        self.user = user
+        self.host = host
+    
+    def receivedHeader(self, helo, origin, recipients):
+        authStr = heloStr = ""
+        if self.user:
+            authStr = " auth=%s" % (self.user.encode('xtext'),)
+        if helo[0]:
+            heloStr = " helo=%s" % (helo[0],)
+        from_ = "from %s ([%s]%s%s)" % (helo[0], helo[1], heloStr, authStr)
+        by = "by %s with %s (%s)" % (
+            self.host, self.protocolName, longversion
+        )
+        for_ = "for <%s>; %s" % (' '.join(map(str, recipients)), smtp.rfc822date())
+        return "Received: %s\n\t%s\n\t%s" % (from_, by, for_)
+    
+    def validateTo(self, user):
+        # XXX - Yick.  This needs cleaning up.
+        if self.user and self.service.queue:
+            d = self.service.domains.get(user.dest.domain, None)
+            if d is None:
+                d = relay.DomainQueuer(self.service, True)
+        else:
+            d = self.service.domains[user.dest.domain]
+        return defer.maybeDeferred(d.exists, user)
+
+    def validateFrom(self, helo, origin):
+        if not helo:
+            raise smtp.SMTPBadSender(origin, 503, "Who are you?  Say HELO first.")
+        if origin.local != '' and origin.domain == '':
+            raise smtp.SMTPBadSender(origin, 501, "Sender address must contain domain.")
+        return origin
+
+    def startMessage(self, users):
+        ret = []
+        for user in users:
+            ret.append(self.service.domains[user.dest.domain].startMessage(user))
+        return ret
+
+
+class SMTPDomainDelivery(DomainDeliveryBase):
+    protocolName = 'smtp'
+
+class ESMTPDomainDelivery(DomainDeliveryBase):
+    protocolName = 'esmtp'
+
+class DomainSMTP(SMTPDomainDelivery, smtp.SMTP):
+    service = user = None
+
+    def __init__(self, *args, **kw):
+        import warnings
+        warnings.warn(
+            "DomainSMTP is deprecated.  Use IMessageDelivery objects instead.",
+            DeprecationWarning, stacklevel=2,
+        )
+        smtp.SMTP.__init__(self, *args, **kw)
+        if self.delivery is None:
+            self.delivery = self
+
+class DomainESMTP(ESMTPDomainDelivery, smtp.ESMTP):
+    service = user = None
+
+    def __init__(self, *args, **kw):
+        import warnings
+        warnings.warn(
+            "DomainESMTP is deprecated.  Use IMessageDelivery objects instead.",
+            DeprecationWarning, stacklevel=2,
+        )
+        smtp.ESMTP.__init__(self, *args, **kw)
+        if self.delivery is None:
+            self.delivery = self
+
+class SMTPFactory(smtp.SMTPFactory):
+    """A protocol factory for SMTP."""
+
+    protocol = smtp.SMTP
+    portal = None
+
+    def __init__(self, service, portal = None):
+        smtp.SMTPFactory.__init__(self)
+        self.service = service
+        self.portal = portal
+    
+    def buildProtocol(self, addr):
+        log.msg('Connection from %s' % (addr,))
+        p = smtp.SMTPFactory.buildProtocol(self, addr)
+        p.service = self.service
+        p.portal = self.portal
+        return p
+
+class ESMTPFactory(SMTPFactory):
+    protocol = smtp.ESMTP
+    context = None
+
+    def __init__(self, *args):
+        SMTPFactory.__init__(self, *args)
+        self.challengers = {
+            'CRAM-MD5': cred.credentials.CramMD5Credentials
+        }
+    
+    def buildProtocol(self, addr):
+        p = SMTPFactory.buildProtocol(self, addr)
+        p.challengers = self.challengers
+        p.ctx = self.context
+        return p
+
+class VirtualPOP3(pop3.POP3):
+    """Virtual hosting POP3."""
+
+    service = None
+
+    domainSpecifier = '@' # Gaagh! I hate POP3. No standardized way
+                          # to indicate user at host. '@' doesn't work
+                          # with NS, e.g.
+
+    def authenticateUserAPOP(self, user, digest):
+        # Override the default lookup scheme to allow virtual domains
+        user, domain = self.lookupDomain(user)
+        try:
+            portal = self.service.lookupPortal(domain)
+        except KeyError:
+            return defer.fail(cred.error.UnauthorizedLogin())
+        else:
+            return portal.login(
+                pop3.APOPCredentials(self.magic, user, digest),
+                None,
+                pop3.IMailbox
+            )
+
+    def authenticateUserPASS(self, user, password):
+        user, domain = self.lookupDomain(user)
+        try:
+            portal = self.service.lookupPortal(domain)
+        except KeyError:
+            return defer.fail(cred.error.UnauthorizedLogin())
+        else:
+            return portal.login(
+                cred.credentials.UsernamePassword(user, password),
+                None,
+                pop3.IMailbox
+            )
+
+    def lookupDomain(self, user):
+        try:
+            user, domain = user.split(self.domainSpecifier, 1)
+        except ValueError:
+            domain = ''
+        if domain not in self.service.domains:
+             raise pop3.POP3Error("no such domain %s" % domain)
+        return user, domain
+
+
+class POP3Factory(protocol.ServerFactory):
+    """POP3 protocol factory."""
+
+    protocol = VirtualPOP3
+    service = None
+
+    def __init__(self, service):
+        self.service = service
+    
+    def buildProtocol(self, addr):
+        p = protocol.ServerFactory.buildProtocol(self, addr)
+        p.service = self.service
+        return p
+
+#
+# It is useful to know, perhaps, that the required file for this to work can
+# be created thusly:
+#
+# openssl req -x509 -newkey rsa:2048 -keyout file.key -out file.crt \
+# -days 365 -nodes
+#
+# And then cat file.key and file.crt together.  The number of days and bits
+# can be changed, of course.
+#
+class SSLContextFactory:
+    """
+    An SSL Context Factory.
+
+    This loads a certificate and private key from a specified file.
+    """
+    deprecatedModuleAttribute(
+        Version("Twisted", 12, 2, 0),
+        "Use twisted.internet.ssl.DefaultOpenSSLContextFactory instead.",
+        "twisted.mail.protocols", "SSLContextFactory")
+
+    def __init__(self, filename):
+        self.filename = filename
+
+    def getContext(self):
+        """Create an SSL context."""
+        from OpenSSL import SSL
+        ctx = SSL.Context(SSL.SSLv23_METHOD)
+        ctx.use_certificate_file(self.filename)
+        ctx.use_privatekey_file(self.filename)
+        return ctx
diff --git a/ThirdParty/Twisted/twisted/mail/relay.py b/ThirdParty/Twisted/twisted/mail/relay.py
new file mode 100644
index 0000000..ac68095
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/relay.py
@@ -0,0 +1,114 @@
+# -*- test-case-name: twisted.mail.test.test_mail -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""Support for relaying mail for twisted.mail"""
+
+from twisted.mail import smtp
+from twisted.python import log
+from twisted.internet.address import UNIXAddress
+
+import os
+
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+
+class DomainQueuer:
+    """An SMTP domain which add messages to a queue intended for relaying."""
+
+    def __init__(self, service, authenticated=False):
+        self.service = service
+        self.authed = authenticated
+
+    def exists(self, user):
+        """Check whether we will relay
+
+        Call overridable willRelay method
+        """
+        if self.willRelay(user.dest, user.protocol):
+            # The most cursor form of verification of the addresses
+            orig = filter(None, str(user.orig).split('@', 1))
+            dest = filter(None, str(user.dest).split('@', 1))
+            if len(orig) == 2 and len(dest) == 2:
+                return lambda: self.startMessage(user)
+        raise smtp.SMTPBadRcpt(user)
+
+    def willRelay(self, address, protocol):
+        """Check whether we agree to relay
+
+        The default is to relay for all connections over UNIX
+        sockets and all connections from localhost.
+        """
+        peer = protocol.transport.getPeer()
+        return self.authed or isinstance(peer, UNIXAddress) or peer.host == '127.0.0.1'
+
+    def startMessage(self, user):
+        """Add envelope to queue and returns ISMTPMessage."""
+        queue = self.service.queue
+        envelopeFile, smtpMessage = queue.createNewMessage()
+        try:
+            log.msg('Queueing mail %r -> %r' % (str(user.orig), str(user.dest)))
+            pickle.dump([str(user.orig), str(user.dest)], envelopeFile)
+        finally:
+            envelopeFile.close()
+        return smtpMessage
+
+class RelayerMixin:
+
+    # XXX - This is -totally- bogus
+    # It opens about a -hundred- -billion- files
+    # and -leaves- them open!
+
+    def loadMessages(self, messagePaths):
+        self.messages = []
+        self.names = []
+        for message in messagePaths:
+            fp = open(message+'-H')
+            try:
+                messageContents = pickle.load(fp)
+            finally:
+                fp.close()
+            fp = open(message+'-D')
+            messageContents.append(fp)
+            self.messages.append(messageContents)
+            self.names.append(message)
+    
+    def getMailFrom(self):
+        if not self.messages:
+            return None
+        return self.messages[0][0]
+
+    def getMailTo(self):
+        if not self.messages:
+            return None
+        return [self.messages[0][1]]
+
+    def getMailData(self):
+        if not self.messages:
+            return None
+        return self.messages[0][2]
+
+    def sentMail(self, code, resp, numOk, addresses, log):
+        """Since we only use one recipient per envelope, this
+        will be called with 0 or 1 addresses. We probably want
+        to do something with the error message if we failed.
+        """
+        if code in smtp.SUCCESS:
+            # At least one, i.e. all, recipients successfully delivered
+            os.remove(self.names[0]+'-D')
+            os.remove(self.names[0]+'-H')
+        del self.messages[0]
+        del self.names[0]
+
+class SMTPRelayer(RelayerMixin, smtp.SMTPClient):
+    def __init__(self, messagePaths, *args, **kw):
+        smtp.SMTPClient.__init__(self, *args, **kw)
+        self.loadMessages(messagePaths)
+
+class ESMTPRelayer(RelayerMixin, smtp.ESMTPClient):
+    def __init__(self, messagePaths, *args, **kw):
+        smtp.ESMTPClient.__init__(self, *args, **kw)
+        self.loadMessages(messagePaths)
diff --git a/ThirdParty/Twisted/twisted/mail/relaymanager.py b/ThirdParty/Twisted/twisted/mail/relaymanager.py
new file mode 100644
index 0000000..66c777a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/relaymanager.py
@@ -0,0 +1,631 @@
+# -*- test-case-name: twisted.mail.test.test_mail -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Infrastructure for relaying mail through smart host
+
+Today, internet e-mail has stopped being Peer-to-peer for many problems,
+spam (unsolicited bulk mail) among them. Instead, most nodes on the
+internet send all e-mail to a single computer, usually the ISP's though
+sometimes other schemes, such as SMTP-after-POP, are used. This computer
+is supposedly permanently up and traceable, and will do the work of
+figuring out MXs and connecting to them. This kind of configuration
+is usually termed "smart host", since the host we are connecting to
+is "smart" (and will find MXs and connect to them) rather then just
+accepting mail for a small set of domains.
+
+The classes here are meant to facilitate support for such a configuration
+for the twisted.mail SMTP server
+"""
+
+import rfc822
+import os
+import time
+
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+
+from twisted.python import log
+from twisted.python.failure import Failure
+from twisted.python.compat import set
+from twisted.mail import relay
+from twisted.mail import bounce
+from twisted.internet import protocol
+from twisted.internet.defer import Deferred, DeferredList
+from twisted.internet.error import DNSLookupError
+from twisted.mail import smtp
+from twisted.application import internet
+
+class ManagedRelayerMixin:
+    """SMTP Relayer which notifies a manager
+
+    Notify the manager about successful mail, failed mail
+    and broken connections
+    """
+
+    def __init__(self, manager):
+        self.manager = manager
+
+    def sentMail(self, code, resp, numOk, addresses, log):
+        """called when e-mail has been sent
+
+        we will always get 0 or 1 addresses.
+        """
+        message = self.names[0]
+        if code in smtp.SUCCESS:
+            self.manager.notifySuccess(self.factory, message)
+        else:
+            self.manager.notifyFailure(self.factory, message)
+        del self.messages[0]
+        del self.names[0]
+
+    def connectionLost(self, reason):
+        """called when connection is broken
+
+        notify manager we will try to send no more e-mail
+        """
+        self.manager.notifyDone(self.factory)
+
+class SMTPManagedRelayer(ManagedRelayerMixin, relay.SMTPRelayer):
+    def __init__(self, messages, manager, *args, **kw):
+        """
+        @type messages: C{list} of C{str}
+        @param messages: Filenames of messages to relay
+
+        manager should support .notifySuccess, .notifyFailure
+        and .notifyDone
+        """
+        ManagedRelayerMixin.__init__(self, manager)
+        relay.SMTPRelayer.__init__(self, messages, *args, **kw)
+
+class ESMTPManagedRelayer(ManagedRelayerMixin, relay.ESMTPRelayer):
+    def __init__(self, messages, manager, *args, **kw):
+        """
+        @type messages: C{list} of C{str}
+        @param messages: Filenames of messages to relay
+
+        manager should support .notifySuccess, .notifyFailure
+        and .notifyDone
+        """
+        ManagedRelayerMixin.__init__(self, manager)
+        relay.ESMTPRelayer.__init__(self, messages, *args, **kw)
+
+class SMTPManagedRelayerFactory(protocol.ClientFactory):
+    protocol = SMTPManagedRelayer
+
+    def __init__(self, messages, manager, *args, **kw):
+        self.messages = messages
+        self.manager = manager
+        self.pArgs = args
+        self.pKwArgs = kw
+
+    def buildProtocol(self, addr):
+        protocol = self.protocol(self.messages, self.manager, *self.pArgs,
+            **self.pKwArgs)
+        protocol.factory = self
+        return protocol
+
+    def clientConnectionFailed(self, connector, reason):
+        """called when connection could not be made
+
+        our manager should be notified that this happened,
+        it might prefer some other host in that case"""
+        self.manager.notifyNoConnection(self)
+        self.manager.notifyDone(self)
+
+class ESMTPManagedRelayerFactory(SMTPManagedRelayerFactory):
+    protocol = ESMTPManagedRelayer
+
+    def __init__(self, messages, manager, secret, contextFactory, *args, **kw):
+        self.secret = secret
+        self.contextFactory = contextFactory
+        SMTPManagedRelayerFactory.__init__(self, messages, manager, *args, **kw)
+
+    def buildProtocol(self, addr):
+        s = self.secret and self.secret(addr)
+        protocol = self.protocol(self.messages, self.manager, s,
+            self.contextFactory, *self.pArgs, **self.pKwArgs)
+        protocol.factory = self
+        return protocol
+
+class Queue:
+    """A queue of ougoing emails."""
+
+    noisy = True
+
+    def __init__(self, directory):
+        self.directory = directory
+        self._init()
+
+    def _init(self):
+        self.n = 0
+        self.waiting = {}
+        self.relayed = {}
+        self.readDirectory()
+
+    def __getstate__(self):
+        """(internal) delete volatile state"""
+        return {'directory' : self.directory}
+
+    def __setstate__(self, state):
+        """(internal) restore volatile state"""
+        self.__dict__.update(state)
+        self._init()
+
+    def readDirectory(self):
+        """Read the messages directory.
+
+        look for new messages.
+        """
+        for message in os.listdir(self.directory):
+            # Skip non data files
+            if message[-2:]!='-D':
+                continue
+            self.addMessage(message[:-2])
+
+    def getWaiting(self):
+        return self.waiting.keys()
+
+    def hasWaiting(self):
+        return len(self.waiting) > 0
+
+    def getRelayed(self):
+        return self.relayed.keys()
+
+    def setRelaying(self, message):
+        del self.waiting[message]
+        self.relayed[message] = 1
+
+    def setWaiting(self, message):
+        del self.relayed[message]
+        self.waiting[message] = 1
+
+    def addMessage(self, message):
+        if message not in self.relayed:
+            self.waiting[message] = 1
+            if self.noisy:
+                log.msg('Set ' + message + ' waiting')
+
+    def done(self, message):
+        """Remove message to from queue."""
+        message = os.path.basename(message)
+        os.remove(self.getPath(message) + '-D')
+        os.remove(self.getPath(message) + '-H')
+        del self.relayed[message]
+
+    def getPath(self, message):
+        """Get the path in the filesystem of a message."""
+        return os.path.join(self.directory, message)
+
+    def getEnvelope(self, message):
+        return pickle.load(self.getEnvelopeFile(message))
+
+    def getEnvelopeFile(self, message):
+        return open(os.path.join(self.directory, message+'-H'), 'rb')
+
+    def createNewMessage(self):
+        """Create a new message in the queue.
+
+        Return a tuple - file-like object for headers, and ISMTPMessage.
+        """
+        fname = "%s_%s_%s_%s" % (os.getpid(), time.time(), self.n, id(self))
+        self.n = self.n + 1
+        headerFile = open(os.path.join(self.directory, fname+'-H'), 'wb')
+        tempFilename = os.path.join(self.directory, fname+'-C')
+        finalFilename = os.path.join(self.directory, fname+'-D')
+        messageFile = open(tempFilename, 'wb')
+
+        from twisted.mail.mail import FileMessage
+        return headerFile,FileMessage(messageFile, tempFilename, finalFilename)
+
+
+class _AttemptManager(object):
+    """
+    Manage the state of a single attempt to flush the relay queue.
+    """
+    def __init__(self, manager):
+        self.manager = manager
+        self._completionDeferreds = []
+
+
+    def getCompletionDeferred(self):
+        self._completionDeferreds.append(Deferred())
+        return self._completionDeferreds[-1]
+
+
+    def _finish(self, relay, message):
+        self.manager.managed[relay].remove(os.path.basename(message))
+        self.manager.queue.done(message)
+
+
+    def notifySuccess(self, relay, message):
+        """a relay sent a message successfully
+
+        Mark it as sent in our lists
+        """
+        if self.manager.queue.noisy:
+            log.msg("success sending %s, removing from queue" % message)
+        self._finish(relay, message)
+
+
+    def notifyFailure(self, relay, message):
+        """Relaying the message has failed."""
+        if self.manager.queue.noisy:
+            log.msg("could not relay "+message)
+        # Moshe - Bounce E-mail here
+        # Be careful: if it's a bounced bounce, silently
+        # discard it
+        message = os.path.basename(message)
+        fp = self.manager.queue.getEnvelopeFile(message)
+        from_, to = pickle.load(fp)
+        fp.close()
+        from_, to, bounceMessage = bounce.generateBounce(open(self.manager.queue.getPath(message)+'-D'), from_, to)
+        fp, outgoingMessage = self.manager.queue.createNewMessage()
+        pickle.dump([from_, to], fp)
+        fp.close()
+        for line in bounceMessage.splitlines():
+             outgoingMessage.lineReceived(line)
+        outgoingMessage.eomReceived()
+        self._finish(relay, self.manager.queue.getPath(message))
+
+
+    def notifyDone(self, relay):
+        """A relaying SMTP client is disconnected.
+
+        unmark all pending messages under this relay's responsibility
+        as being relayed, and remove the relay.
+        """
+        for message in self.manager.managed.get(relay, ()):
+            if self.manager.queue.noisy:
+                log.msg("Setting " + message + " waiting")
+            self.manager.queue.setWaiting(message)
+        try:
+            del self.manager.managed[relay]
+        except KeyError:
+            pass
+        notifications = self._completionDeferreds
+        self._completionDeferreds = None
+        for d in notifications:
+            d.callback(None)
+
+
+    def notifyNoConnection(self, relay):
+        """Relaying SMTP client couldn't connect.
+
+        Useful because it tells us our upstream server is unavailable.
+        """
+        # Back off a bit
+        try:
+            msgs = self.manager.managed[relay]
+        except KeyError:
+            log.msg("notifyNoConnection passed unknown relay!")
+            return
+
+        if self.manager.queue.noisy:
+            log.msg("Backing off on delivery of " + str(msgs))
+        def setWaiting(queue, messages):
+            map(queue.setWaiting, messages)
+        from twisted.internet import reactor
+        reactor.callLater(30, setWaiting, self.manager.queue, msgs)
+        del self.manager.managed[relay]
+
+
+
+class SmartHostSMTPRelayingManager:
+    """Manage SMTP Relayers
+
+    Manage SMTP relayers, keeping track of the existing connections,
+    each connection's responsibility in term of messages. Create
+    more relayers if the need arises.
+
+    Someone should press .checkState periodically
+
+    @ivar fArgs: Additional positional arguments used to instantiate
+    C{factory}.
+
+    @ivar fKwArgs: Additional keyword arguments used to instantiate
+    C{factory}.
+
+    @ivar factory: A callable which returns a ClientFactory suitable for
+    making SMTP connections.
+    """
+
+    factory = SMTPManagedRelayerFactory
+
+    PORT = 25
+
+    mxcalc = None
+
+    def __init__(self, queue, maxConnections=2, maxMessagesPerConnection=10):
+        """
+        @type queue: Any implementor of C{IQueue}
+        @param queue: The object used to queue messages on their way to
+        delivery.
+
+        @type maxConnections: C{int}
+        @param maxConnections: The maximum number of SMTP connections to
+        allow to be opened at any given time.
+
+        @type maxMessagesPerConnection: C{int}
+        @param maxMessagesPerConnection: The maximum number of messages a
+        relayer will be given responsibility for.
+
+        Default values are meant for a small box with 1-5 users.
+        """
+        self.maxConnections = maxConnections
+        self.maxMessagesPerConnection = maxMessagesPerConnection
+        self.managed = {} # SMTP clients we're managing
+        self.queue = queue
+        self.fArgs = ()
+        self.fKwArgs = {}
+
+    def __getstate__(self):
+        """(internal) delete volatile state"""
+        dct = self.__dict__.copy()
+        del dct['managed']
+        return dct
+
+    def __setstate__(self, state):
+        """(internal) restore volatile state"""
+        self.__dict__.update(state)
+        self.managed = {}
+
+    def checkState(self):
+        """
+        Synchronize with the state of the world, and maybe launch a new
+        relay.
+
+        Call me periodically to check I am still up to date.
+
+        @return: None or a Deferred which fires when all of the SMTP clients
+        started by this call have disconnected.
+        """
+        self.queue.readDirectory()
+        if (len(self.managed) >= self.maxConnections):
+            return
+        if  not self.queue.hasWaiting():
+            return
+
+        return self._checkStateMX()
+
+    def _checkStateMX(self):
+        nextMessages = self.queue.getWaiting()
+        nextMessages.reverse()
+
+        exchanges = {}
+        for msg in nextMessages:
+            from_, to = self.queue.getEnvelope(msg)
+            name, addr = rfc822.parseaddr(to)
+            parts = addr.split('@', 1)
+            if len(parts) != 2:
+                log.err("Illegal message destination: " + to)
+                continue
+            domain = parts[1]
+
+            self.queue.setRelaying(msg)
+            exchanges.setdefault(domain, []).append(self.queue.getPath(msg))
+            if len(exchanges) >= (self.maxConnections - len(self.managed)):
+                break
+
+        if self.mxcalc is None:
+            self.mxcalc = MXCalculator()
+
+        relays = []
+        for (domain, msgs) in exchanges.iteritems():
+            manager = _AttemptManager(self)
+            factory = self.factory(msgs, manager, *self.fArgs, **self.fKwArgs)
+            self.managed[factory] = map(os.path.basename, msgs)
+            relayAttemptDeferred = manager.getCompletionDeferred()
+            connectSetupDeferred = self.mxcalc.getMX(domain)
+            connectSetupDeferred.addCallback(lambda mx: str(mx.name))
+            connectSetupDeferred.addCallback(self._cbExchange, self.PORT, factory)
+            connectSetupDeferred.addErrback(lambda err: (relayAttemptDeferred.errback(err), err)[1])
+            connectSetupDeferred.addErrback(self._ebExchange, factory, domain)
+            relays.append(relayAttemptDeferred)
+        return DeferredList(relays)
+
+
+    def _cbExchange(self, address, port, factory):
+        from twisted.internet import reactor
+        reactor.connectTCP(address, port, factory)
+
+    def _ebExchange(self, failure, factory, domain):
+        log.err('Error setting up managed relay factory for ' + domain)
+        log.err(failure)
+        def setWaiting(queue, messages):
+            map(queue.setWaiting, messages)
+        from twisted.internet import reactor
+        reactor.callLater(30, setWaiting, self.queue, self.managed[factory])
+        del self.managed[factory]
+
+class SmartHostESMTPRelayingManager(SmartHostSMTPRelayingManager):
+    factory = ESMTPManagedRelayerFactory
+
+def _checkState(manager):
+    manager.checkState()
+
+def RelayStateHelper(manager, delay):
+    return internet.TimerService(delay, _checkState, manager)
+
+
+
+class CanonicalNameLoop(Exception):
+    """
+    When trying to look up the MX record for a host, a set of CNAME records was
+    found which form a cycle and resolution was abandoned.
+    """
+
+
+class CanonicalNameChainTooLong(Exception):
+    """
+    When trying to look up the MX record for a host, too many CNAME records
+    which point to other CNAME records were encountered and resolution was
+    abandoned.
+    """
+
+
+class MXCalculator:
+    """
+    A utility for looking up mail exchange hosts and tracking whether they are
+    working or not.
+
+    @ivar clock: L{IReactorTime} provider which will be used to decide when to
+        retry mail exchanges which have not been working.
+    """
+    timeOutBadMX = 60 * 60 # One hour
+    fallbackToDomain = True
+
+    def __init__(self, resolver=None, clock=None):
+        self.badMXs = {}
+        if resolver is None:
+            from twisted.names.client import createResolver
+            resolver = createResolver()
+        self.resolver = resolver
+        if clock is None:
+            from twisted.internet import reactor as clock
+        self.clock = clock
+
+
+    def markBad(self, mx):
+        """Indicate a given mx host is not currently functioning.
+
+        @type mx: C{str}
+        @param mx: The hostname of the host which is down.
+        """
+        self.badMXs[str(mx)] = self.clock.seconds() + self.timeOutBadMX
+
+    def markGood(self, mx):
+        """Indicate a given mx host is back online.
+
+        @type mx: C{str}
+        @param mx: The hostname of the host which is up.
+        """
+        try:
+            del self.badMXs[mx]
+        except KeyError:
+            pass
+
+    def getMX(self, domain, maximumCanonicalChainLength=3):
+        """
+        Find an MX record for the given domain.
+
+        @type domain: C{str}
+        @param domain: The domain name for which to look up an MX record.
+
+        @type maximumCanonicalChainLength: C{int}
+        @param maximumCanonicalChainLength: The maximum number of unique CNAME
+            records to follow while looking up the MX record.
+
+        @return: A L{Deferred} which is called back with a string giving the
+            name in the found MX record or which is errbacked if no MX record
+            can be found.
+        """
+        mailExchangeDeferred = self.resolver.lookupMailExchange(domain)
+        mailExchangeDeferred.addCallback(self._filterRecords)
+        mailExchangeDeferred.addCallback(
+            self._cbMX, domain, maximumCanonicalChainLength)
+        mailExchangeDeferred.addErrback(self._ebMX, domain)
+        return mailExchangeDeferred
+
+
+    def _filterRecords(self, records):
+        """
+        Convert a DNS response (a three-tuple of lists of RRHeaders) into a
+        mapping from record names to lists of corresponding record payloads.
+        """
+        recordBag = {}
+        for answer in records[0]:
+            recordBag.setdefault(str(answer.name), []).append(answer.payload)
+        return recordBag
+
+
+    def _cbMX(self, answers, domain, cnamesLeft):
+        """
+        Try to find the MX host from the given DNS information.
+
+        This will attempt to resolve CNAME results.  It can recognize loops
+        and will give up on non-cyclic chains after a specified number of
+        lookups.
+        """
+        # Do this import here so that relaymanager.py doesn't depend on
+        # twisted.names, only MXCalculator will.
+        from twisted.names import dns, error
+
+        seenAliases = set()
+        exchanges = []
+        # Examine the answers for the domain we asked about
+        pertinentRecords = answers.get(domain, [])
+        while pertinentRecords:
+            record = pertinentRecords.pop()
+
+            # If it's a CNAME, we'll need to do some more processing
+            if record.TYPE == dns.CNAME:
+
+                # Remember that this name was an alias.
+                seenAliases.add(domain)
+
+                canonicalName = str(record.name)
+                # See if we have some local records which might be relevant.
+                if canonicalName in answers:
+
+                    # Make sure it isn't a loop contained entirely within the
+                    # results we have here.
+                    if canonicalName in seenAliases:
+                        return Failure(CanonicalNameLoop(record))
+
+                    pertinentRecords = answers[canonicalName]
+                    exchanges = []
+                else:
+                    if cnamesLeft:
+                        # Request more information from the server.
+                        return self.getMX(canonicalName, cnamesLeft - 1)
+                    else:
+                        # Give up.
+                        return Failure(CanonicalNameChainTooLong(record))
+
+            # If it's an MX, collect it.
+            if record.TYPE == dns.MX:
+                exchanges.append((record.preference, record))
+
+        if exchanges:
+            exchanges.sort()
+            for (preference, record) in exchanges:
+                host = str(record.name)
+                if host not in self.badMXs:
+                    return record
+                t = self.clock.seconds() - self.badMXs[host]
+                if t >= 0:
+                    del self.badMXs[host]
+                    return record
+            return exchanges[0][1]
+        else:
+            # Treat no answers the same as an error - jump to the errback to try
+            # to look up an A record.  This provides behavior described as a
+            # special case in RFC 974 in the section headed I{Interpreting the
+            # List of MX RRs}.
+            return Failure(
+                error.DNSNameError("No MX records for %r" % (domain,)))
+
+
+    def _ebMX(self, failure, domain):
+        from twisted.names import error, dns
+
+        if self.fallbackToDomain:
+            failure.trap(error.DNSNameError)
+            log.msg("MX lookup failed; attempting to use hostname (%s) directly" % (domain,))
+
+            # Alright, I admit, this is a bit icky.
+            d = self.resolver.getHostByName(domain)
+            def cbResolved(addr):
+                return dns.Record_MX(name=addr)
+            def ebResolved(err):
+                err.trap(error.DNSNameError)
+                raise DNSLookupError()
+            d.addCallbacks(cbResolved, ebResolved)
+            return d
+        elif failure.check(error.DNSNameError):
+            raise IOError("No MX found for %r" % (domain,))
+        return failure
diff --git a/ThirdParty/Twisted/twisted/mail/scripts/__init__.py b/ThirdParty/Twisted/twisted/mail/scripts/__init__.py
new file mode 100644
index 0000000..f653cc7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/scripts/__init__.py
@@ -0,0 +1 @@
+"mail scripts"
diff --git a/ThirdParty/Twisted/twisted/mail/scripts/mailmail.py b/ThirdParty/Twisted/twisted/mail/scripts/mailmail.py
new file mode 100644
index 0000000..a045e82
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/scripts/mailmail.py
@@ -0,0 +1,366 @@
+# -*- test-case-name: twisted.mail.test.test_mailmail -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Implementation module for the I{mailmail} command.
+"""
+
+import os
+import sys
+import rfc822
+import getpass
+from ConfigParser import ConfigParser
+
+try:
+    import cStringIO as StringIO
+except:
+    import StringIO
+
+from twisted.copyright import version
+from twisted.internet import reactor
+from twisted.mail import smtp
+
+GLOBAL_CFG = "/etc/mailmail"
+LOCAL_CFG = os.path.expanduser("~/.twisted/mailmail")
+SMARTHOST = '127.0.0.1'
+
+ERROR_FMT = """\
+Subject: Failed Message Delivery
+
+  Message delivery failed.  The following occurred:
+
+  %s
+--
+The Twisted sendmail application.
+"""
+
+def log(message, *args):
+    sys.stderr.write(str(message) % args + '\n')
+
+class Options:
+    """
+    @type to: C{list} of C{str}
+    @ivar to: The addresses to which to deliver this message.
+
+    @type sender: C{str}
+    @ivar sender: The address from which this message is being sent.
+
+    @type body: C{file}
+    @ivar body: The object from which the message is to be read.
+    """
+
+def getlogin():
+    try:
+        return os.getlogin()
+    except:
+        return getpass.getuser()
+
+
+_unsupportedOption = SystemExit("Unsupported option.")
+
+def parseOptions(argv):
+    o = Options()
+    o.to = [e for e in argv if not e.startswith('-')]
+    o.sender = getlogin()
+
+    # Just be very stupid
+
+    # Skip -bm -- it is the default
+
+    # Add a non-standard option for querying the version of this tool.
+    if '--version' in argv:
+        print 'mailmail version:', version
+        raise SystemExit()
+
+    # -bp lists queue information.  Screw that.
+    if '-bp' in argv:
+        raise _unsupportedOption
+
+    # -bs makes sendmail use stdin/stdout as its transport.  Screw that.
+    if '-bs' in argv:
+        raise _unsupportedOption
+
+    # -F sets who the mail is from, but is overridable by the From header
+    if '-F' in argv:
+        o.sender = argv[argv.index('-F') + 1]
+        o.to.remove(o.sender)
+
+    # -i and -oi makes us ignore lone "."
+    if ('-i' in argv) or ('-oi' in argv):
+        raise _unsupportedOption
+
+    # -odb is background delivery
+    if '-odb' in argv:
+        o.background = True
+    else:
+        o.background = False
+
+    # -odf is foreground delivery
+    if '-odf' in argv:
+        o.background = False
+    else:
+        o.background = True
+
+    # -oem and -em cause errors to be mailed back to the sender.
+    # It is also the default.
+
+    # -oep and -ep cause errors to be printed to stderr
+    if ('-oep' in argv) or ('-ep' in argv):
+        o.printErrors = True
+    else:
+        o.printErrors = False
+
+    # -om causes a copy of the message to be sent to the sender if the sender
+    # appears in an alias expansion.  We do not support aliases.
+    if '-om' in argv:
+        raise _unsupportedOption
+
+    # -t causes us to pick the recipients of the message from the To, Cc, and Bcc
+    # headers, and to remove the Bcc header if present.
+    if '-t' in argv:
+        o.recipientsFromHeaders = True
+        o.excludeAddresses = o.to
+        o.to = []
+    else:
+        o.recipientsFromHeaders = False
+        o.exludeAddresses = []
+
+    requiredHeaders = {
+        'from': [],
+        'to': [],
+        'cc': [],
+        'bcc': [],
+        'date': [],
+    }
+
+    headers = []
+    buffer = StringIO.StringIO()
+    while 1:
+        write = 1
+        line = sys.stdin.readline()
+        if not line.strip():
+            break
+
+        hdrs = line.split(': ', 1)
+
+        hdr = hdrs[0].lower()
+        if o.recipientsFromHeaders and hdr in ('to', 'cc', 'bcc'):
+            o.to.extend([
+                a[1] for a in rfc822.AddressList(hdrs[1]).addresslist
+            ])
+            if hdr == 'bcc':
+                write = 0
+        elif hdr == 'from':
+            o.sender = rfc822.parseaddr(hdrs[1])[1]
+
+        if hdr in requiredHeaders:
+            requiredHeaders[hdr].append(hdrs[1])
+
+        if write:
+            buffer.write(line)
+
+    if not requiredHeaders['from']:
+        buffer.write('From: %s\r\n' % (o.sender,))
+    if not requiredHeaders['to']:
+        if not o.to:
+            raise SystemExit("No recipients specified.")
+        buffer.write('To: %s\r\n' % (', '.join(o.to),))
+    if not requiredHeaders['date']:
+        buffer.write('Date: %s\r\n' % (smtp.rfc822date(),))
+
+    buffer.write(line)
+
+    if o.recipientsFromHeaders:
+        for a in o.excludeAddresses:
+            try:
+                o.to.remove(a)
+            except:
+                pass
+
+    buffer.seek(0, 0)
+    o.body = StringIO.StringIO(buffer.getvalue() + sys.stdin.read())
+    return o
+
+class Configuration:
+    """
+    @ivar allowUIDs: A list of UIDs which are allowed to send mail.
+    @ivar allowGIDs: A list of GIDs which are allowed to send mail.
+    @ivar denyUIDs: A list of UIDs which are not allowed to send mail.
+    @ivar denyGIDs: A list of GIDs which are not allowed to send mail.
+
+    @type defaultAccess: C{bool}
+    @ivar defaultAccess: C{True} if access will be allowed when no other access
+    control rule matches or C{False} if it will be denied in that case.
+
+    @ivar useraccess: Either C{'allow'} to check C{allowUID} first
+    or C{'deny'} to check C{denyUID} first.
+
+    @ivar groupaccess: Either C{'allow'} to check C{allowGID} first or
+    C{'deny'} to check C{denyGID} first.
+
+    @ivar identities: A C{dict} mapping hostnames to credentials to use when
+    sending mail to that host.
+
+    @ivar smarthost: C{None} or a hostname through which all outgoing mail will
+    be sent.
+
+    @ivar domain: C{None} or the hostname with which to identify ourselves when
+    connecting to an MTA.
+    """
+    def __init__(self):
+        self.allowUIDs = []
+        self.denyUIDs = []
+        self.allowGIDs = []
+        self.denyGIDs = []
+        self.useraccess = 'deny'
+        self.groupaccess= 'deny'
+
+        self.identities = {}
+        self.smarthost = None
+        self.domain = None
+
+        self.defaultAccess = True
+
+
+def loadConfig(path):
+    # [useraccess]
+    # allow=uid1,uid2,...
+    # deny=uid1,uid2,...
+    # order=allow,deny
+    # [groupaccess]
+    # allow=gid1,gid2,...
+    # deny=gid1,gid2,...
+    # order=deny,allow
+    # [identity]
+    # host1=username:password
+    # host2=username:password
+    # [addresses]
+    # smarthost=a.b.c.d
+    # default_domain=x.y.z
+
+    c = Configuration()
+
+    if not os.access(path, os.R_OK):
+        return c
+
+    p = ConfigParser()
+    p.read(path)
+
+    au = c.allowUIDs
+    du = c.denyUIDs
+    ag = c.allowGIDs
+    dg = c.denyGIDs
+    for (section, a, d) in (('useraccess', au, du), ('groupaccess', ag, dg)):
+        if p.has_section(section):
+            for (mode, L) in (('allow', a), ('deny', d)):
+                if p.has_option(section, mode) and p.get(section, mode):
+                    for id in p.get(section, mode).split(','):
+                        try:
+                            id = int(id)
+                        except ValueError:
+                            log("Illegal %sID in [%s] section: %s", section[0].upper(), section, id)
+                        else:
+                            L.append(id)
+            order = p.get(section, 'order')
+            order = map(str.split, map(str.lower, order.split(',')))
+            if order[0] == 'allow':
+                setattr(c, section, 'allow')
+            else:
+                setattr(c, section, 'deny')
+
+    if p.has_section('identity'):
+        for (host, up) in p.items('identity'):
+            parts = up.split(':', 1)
+            if len(parts) != 2:
+                log("Illegal entry in [identity] section: %s", up)
+                continue
+            p.identities[host] = parts
+
+    if p.has_section('addresses'):
+        if p.has_option('addresses', 'smarthost'):
+            c.smarthost = p.get('addresses', 'smarthost')
+        if p.has_option('addresses', 'default_domain'):
+            c.domain = p.get('addresses', 'default_domain')
+
+    return c
+
+def success(result):
+    reactor.stop()
+
+failed = None
+def failure(f):
+    global failed
+    reactor.stop()
+    failed = f
+
+def sendmail(host, options, ident):
+    d = smtp.sendmail(host, options.sender, options.to, options.body)
+    d.addCallbacks(success, failure)
+    reactor.run()
+
+def senderror(failure, options):
+    recipient = [options.sender]
+    sender = '"Internally Generated Message (%s)"<postmaster@%s>' % (sys.argv[0], smtp.DNSNAME)
+    error = StringIO.StringIO()
+    failure.printTraceback(file=error)
+    body = StringIO.StringIO(ERROR_FMT % error.getvalue())
+
+    d = smtp.sendmail('localhost', sender, recipient, body)
+    d.addBoth(lambda _: reactor.stop())
+
+def deny(conf):
+    uid = os.getuid()
+    gid = os.getgid()
+
+    if conf.useraccess == 'deny':
+        if uid in conf.denyUIDs:
+            return True
+        if uid in conf.allowUIDs:
+            return False
+    else:
+        if uid in conf.allowUIDs:
+            return False
+        if uid in conf.denyUIDs:
+            return True
+
+    if conf.groupaccess == 'deny':
+        if gid in conf.denyGIDs:
+            return True
+        if gid in conf.allowGIDs:
+            return False
+    else:
+        if gid in conf.allowGIDs:
+            return False
+        if gid in conf.denyGIDs:
+            return True
+
+    return not conf.defaultAccess
+
+def run():
+    o = parseOptions(sys.argv[1:])
+    gConf = loadConfig(GLOBAL_CFG)
+    lConf = loadConfig(LOCAL_CFG)
+
+    if deny(gConf) or deny(lConf):
+        log("Permission denied")
+        return
+
+    host = lConf.smarthost or gConf.smarthost or SMARTHOST
+
+    ident = gConf.identities.copy()
+    ident.update(lConf.identities)
+
+    if lConf.domain:
+        smtp.DNSNAME = lConf.domain
+    elif gConf.domain:
+        smtp.DNSNAME = gConf.domain
+
+    sendmail(host, o, ident)
+
+    if failed:
+        if o.printErrors:
+            failed.printTraceback(file=sys.stderr)
+            raise SystemExit(1)
+        else:
+            senderror(failed, o)
diff --git a/ThirdParty/Twisted/twisted/mail/smtp.py b/ThirdParty/Twisted/twisted/mail/smtp.py
new file mode 100644
index 0000000..3b8bd0a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/smtp.py
@@ -0,0 +1,1934 @@
+# -*- test-case-name: twisted.mail.test.test_smtp -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Simple Mail Transfer Protocol implementation.
+"""
+
+import time, re, base64, types, socket, os, random, rfc822
+import binascii
+from email.base64MIME import encode as encode_base64
+
+from zope.interface import implements, Interface
+
+from twisted.copyright import longversion
+from twisted.protocols import basic
+from twisted.protocols import policies
+from twisted.internet import protocol
+from twisted.internet import defer
+from twisted.internet import error
+from twisted.internet import reactor
+from twisted.internet.interfaces import ITLSTransport
+from twisted.python import log
+from twisted.python import util
+
+from twisted import cred
+from twisted.python.runtime import platform
+
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+# Cache the hostname (XXX Yes - this is broken)
+if platform.isMacOSX():
+    # On OS X, getfqdn() is ridiculously slow - use the
+    # probably-identical-but-sometimes-not gethostname() there.
+    DNSNAME = socket.gethostname()
+else:
+    DNSNAME = socket.getfqdn()
+
+# Used for fast success code lookup
+SUCCESS = dict.fromkeys(xrange(200,300))
+
+class IMessageDelivery(Interface):
+    def receivedHeader(helo, origin, recipients):
+        """
+        Generate the Received header for a message
+
+        @type helo: C{(str, str)}
+        @param helo: The argument to the HELO command and the client's IP
+        address.
+
+        @type origin: C{Address}
+        @param origin: The address the message is from
+
+        @type recipients: C{list} of L{User}
+        @param recipients: A list of the addresses for which this message
+        is bound.
+
+        @rtype: C{str}
+        @return: The full \"Received\" header string.
+        """
+
+    def validateTo(user):
+        """
+        Validate the address for which the message is destined.
+
+        @type user: C{User}
+        @param user: The address to validate.
+
+        @rtype: no-argument callable
+        @return: A C{Deferred} which becomes, or a callable which
+        takes no arguments and returns an object implementing C{IMessage}.
+        This will be called and the returned object used to deliver the
+        message when it arrives.
+
+        @raise SMTPBadRcpt: Raised if messages to the address are
+        not to be accepted.
+        """
+
+    def validateFrom(helo, origin):
+        """
+        Validate the address from which the message originates.
+
+        @type helo: C{(str, str)}
+        @param helo: The argument to the HELO command and the client's IP
+        address.
+
+        @type origin: C{Address}
+        @param origin: The address the message is from
+
+        @rtype: C{Deferred} or C{Address}
+        @return: C{origin} or a C{Deferred} whose callback will be
+        passed C{origin}.
+
+        @raise SMTPBadSender: Raised of messages from this address are
+        not to be accepted.
+        """
+
+class IMessageDeliveryFactory(Interface):
+    """An alternate interface to implement for handling message delivery.
+
+    It is useful to implement this interface instead of L{IMessageDelivery}
+    directly because it allows the implementor to distinguish between
+    different messages delivery over the same connection.  This can be
+    used to optimize delivery of a single message to multiple recipients,
+    something which cannot be done by L{IMessageDelivery} implementors
+    due to their lack of information.
+    """
+    def getMessageDelivery():
+        """Return an L{IMessageDelivery} object.
+
+        This will be called once per message.
+        """
+
+class SMTPError(Exception):
+    pass
+
+
+
+class SMTPClientError(SMTPError):
+    """Base class for SMTP client errors.
+    """
+    def __init__(self, code, resp, log=None, addresses=None, isFatal=False, retry=False):
+        """
+        @param code: The SMTP response code associated with this error.
+        @param resp: The string response associated with this error.
+
+        @param log: A string log of the exchange leading up to and including
+            the error.
+        @type log: L{str}
+
+        @param isFatal: A boolean indicating whether this connection can
+            proceed or not.  If True, the connection will be dropped.
+
+        @param retry: A boolean indicating whether the delivery should be
+            retried.  If True and the factory indicates further retries are
+            desirable, they will be attempted, otherwise the delivery will
+            be failed.
+        """
+        self.code = code
+        self.resp = resp
+        self.log = log
+        self.addresses = addresses
+        self.isFatal = isFatal
+        self.retry = retry
+
+
+    def __str__(self):
+        if self.code > 0:
+            res = ["%.3d %s" % (self.code, self.resp)]
+        else:
+            res = [self.resp]
+        if self.log:
+            res.append(self.log)
+            res.append('')
+        return '\n'.join(res)
+
+
+class ESMTPClientError(SMTPClientError):
+    """Base class for ESMTP client errors.
+    """
+
+class EHLORequiredError(ESMTPClientError):
+    """The server does not support EHLO.
+
+    This is considered a non-fatal error (the connection will not be
+    dropped).
+    """
+
+class AUTHRequiredError(ESMTPClientError):
+    """Authentication was required but the server does not support it.
+
+    This is considered a non-fatal error (the connection will not be
+    dropped).
+    """
+
+class TLSRequiredError(ESMTPClientError):
+    """Transport security was required but the server does not support it.
+
+    This is considered a non-fatal error (the connection will not be
+    dropped).
+    """
+
+class AUTHDeclinedError(ESMTPClientError):
+    """The server rejected our credentials.
+
+    Either the username, password, or challenge response
+    given to the server was rejected.
+
+    This is considered a non-fatal error (the connection will not be
+    dropped).
+    """
+
+class AuthenticationError(ESMTPClientError):
+    """An error ocurred while authenticating.
+
+    Either the server rejected our request for authentication or the
+    challenge received was malformed.
+
+    This is considered a non-fatal error (the connection will not be
+    dropped).
+    """
+
+class TLSError(ESMTPClientError):
+    """An error occurred while negiotiating for transport security.
+
+    This is considered a non-fatal error (the connection will not be
+    dropped).
+    """
+
+class SMTPConnectError(SMTPClientError):
+    """Failed to connect to the mail exchange host.
+
+    This is considered a fatal error.  A retry will be made.
+    """
+    def __init__(self, code, resp, log=None, addresses=None, isFatal=True, retry=True):
+        SMTPClientError.__init__(self, code, resp, log, addresses, isFatal, retry)
+
+class SMTPTimeoutError(SMTPClientError):
+    """Failed to receive a response from the server in the expected time period.
+
+    This is considered a fatal error.  A retry will be made.
+    """
+    def __init__(self, code, resp, log=None, addresses=None, isFatal=True, retry=True):
+        SMTPClientError.__init__(self, code, resp, log, addresses, isFatal, retry)
+
+class SMTPProtocolError(SMTPClientError):
+    """The server sent a mangled response.
+
+    This is considered a fatal error.  A retry will not be made.
+    """
+    def __init__(self, code, resp, log=None, addresses=None, isFatal=True, retry=False):
+        SMTPClientError.__init__(self, code, resp, log, addresses, isFatal, retry)
+
+class SMTPDeliveryError(SMTPClientError):
+    """Indicates that a delivery attempt has had an error.
+    """
+
+class SMTPServerError(SMTPError):
+    def __init__(self, code, resp):
+        self.code = code
+        self.resp = resp
+
+    def __str__(self):
+        return "%.3d %s" % (self.code, self.resp)
+
+class SMTPAddressError(SMTPServerError):
+    def __init__(self, addr, code, resp):
+        SMTPServerError.__init__(self, code, resp)
+        self.addr = Address(addr)
+
+    def __str__(self):
+        return "%.3d <%s>... %s" % (self.code, self.addr, self.resp)
+
+class SMTPBadRcpt(SMTPAddressError):
+    def __init__(self, addr, code=550,
+                 resp='Cannot receive for specified address'):
+        SMTPAddressError.__init__(self, addr, code, resp)
+
+class SMTPBadSender(SMTPAddressError):
+    def __init__(self, addr, code=550, resp='Sender not acceptable'):
+        SMTPAddressError.__init__(self, addr, code, resp)
+
+def rfc822date(timeinfo=None,local=1):
+    """
+    Format an RFC-2822 compliant date string.
+
+    @param timeinfo: (optional) A sequence as returned by C{time.localtime()}
+        or C{time.gmtime()}. Default is now.
+    @param local: (optional) Indicates if the supplied time is local or
+        universal time, or if no time is given, whether now should be local or
+        universal time. Default is local, as suggested (SHOULD) by rfc-2822.
+
+    @returns: A string representing the time and date in RFC-2822 format.
+    """
+    if not timeinfo:
+        if local:
+            timeinfo = time.localtime()
+        else:
+            timeinfo = time.gmtime()
+    if local:
+        if timeinfo[8]:
+            # DST
+            tz = -time.altzone
+        else:
+            tz = -time.timezone
+
+        (tzhr, tzmin) = divmod(abs(tz), 3600)
+        if tz:
+            tzhr *= int(abs(tz)//tz)
+        (tzmin, tzsec) = divmod(tzmin, 60)
+    else:
+        (tzhr, tzmin) = (0,0)
+
+    return "%s, %02d %s %04d %02d:%02d:%02d %+03d%02d" % (
+        ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun'][timeinfo[6]],
+        timeinfo[2],
+        ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+         'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'][timeinfo[1] - 1],
+        timeinfo[0], timeinfo[3], timeinfo[4], timeinfo[5],
+        tzhr, tzmin)
+
+def idGenerator():
+    i = 0
+    while True:
+        yield i
+        i += 1
+
+def messageid(uniq=None, N=idGenerator().next):
+    """Return a globally unique random string in RFC 2822 Message-ID format
+
+    <datetime.pid.random at host.dom.ain>
+
+    Optional uniq string will be added to strenghten uniqueness if given.
+    """
+    datetime = time.strftime('%Y%m%d%H%M%S', time.gmtime())
+    pid = os.getpid()
+    rand = random.randrange(2**31L-1)
+    if uniq is None:
+        uniq = ''
+    else:
+        uniq = '.' + uniq
+
+    return '<%s.%s.%s%s.%s@%s>' % (datetime, pid, rand, uniq, N(), DNSNAME)
+
+def quoteaddr(addr):
+    """Turn an email address, possibly with realname part etc, into
+    a form suitable for and SMTP envelope.
+    """
+
+    if isinstance(addr, Address):
+        return '<%s>' % str(addr)
+
+    res = rfc822.parseaddr(addr)
+
+    if res == (None, None):
+        # It didn't parse, use it as-is
+        return '<%s>' % str(addr)
+    else:
+        return '<%s>' % str(res[1])
+
+COMMAND, DATA, AUTH = 'COMMAND', 'DATA', 'AUTH'
+
+class AddressError(SMTPError):
+    "Parse error in address"
+
+# Character classes for parsing addresses
+atom = r"[-A-Za-z0-9!\#$%&'*+/=?^_`{|}~]"
+
+class Address:
+    """Parse and hold an RFC 2821 address.
+
+    Source routes are stipped and ignored, UUCP-style bang-paths
+    and %-style routing are not parsed.
+
+    @type domain: C{str}
+    @ivar domain: The domain within which this address resides.
+
+    @type local: C{str}
+    @ivar local: The local (\"user\") portion of this address.
+    """
+
+    tstring = re.compile(r'''( # A string of
+                          (?:"[^"]*" # quoted string
+                          |\\. # backslash-escaped characted
+                          |''' + atom + r''' # atom character
+                          )+|.) # or any single character''',re.X)
+    atomre = re.compile(atom) # match any one atom character
+
+    def __init__(self, addr, defaultDomain=None):
+        if isinstance(addr, User):
+            addr = addr.dest
+        if isinstance(addr, Address):
+            self.__dict__ = addr.__dict__.copy()
+            return
+        elif not isinstance(addr, types.StringTypes):
+            addr = str(addr)
+        self.addrstr = addr
+
+        # Tokenize
+        atl = filter(None,self.tstring.split(addr))
+
+        local = []
+        domain = []
+
+        while atl:
+            if atl[0] == '<':
+                if atl[-1] != '>':
+                    raise AddressError, "Unbalanced <>"
+                atl = atl[1:-1]
+            elif atl[0] == '@':
+                atl = atl[1:]
+                if not local:
+                    # Source route
+                    while atl and atl[0] != ':':
+                        # remove it
+                        atl = atl[1:]
+                    if not atl:
+                        raise AddressError, "Malformed source route"
+                    atl = atl[1:] # remove :
+                elif domain:
+                    raise AddressError, "Too many @"
+                else:
+                    # Now in domain
+                    domain = ['']
+            elif len(atl[0]) == 1 and not self.atomre.match(atl[0]) and atl[0] !=  '.':
+                raise AddressError, "Parse error at %r of %r" % (atl[0], (addr, atl))
+            else:
+                if not domain:
+                    local.append(atl[0])
+                else:
+                    domain.append(atl[0])
+                atl = atl[1:]
+
+        self.local = ''.join(local)
+        self.domain = ''.join(domain)
+        if self.local != '' and self.domain == '':
+            if defaultDomain is None:
+                defaultDomain = DNSNAME
+            self.domain = defaultDomain
+
+    dequotebs = re.compile(r'\\(.)')
+
+    def dequote(self,addr):
+        """Remove RFC-2821 quotes from address."""
+        res = []
+
+        atl = filter(None,self.tstring.split(str(addr)))
+
+        for t in atl:
+            if t[0] == '"' and t[-1] == '"':
+                res.append(t[1:-1])
+            elif '\\' in t:
+                res.append(self.dequotebs.sub(r'\1',t))
+            else:
+                res.append(t)
+
+        return ''.join(res)
+
+    def __str__(self):
+        if self.local or self.domain:
+            return '@'.join((self.local, self.domain))
+        else:
+            return ''
+
+    def __repr__(self):
+        return "%s.%s(%s)" % (self.__module__, self.__class__.__name__,
+                              repr(str(self)))
+
+class User:
+    """Hold information about and SMTP message recipient,
+    including information on where the message came from
+    """
+
+    def __init__(self, destination, helo, protocol, orig):
+        host = getattr(protocol, 'host', None)
+        self.dest = Address(destination, host)
+        self.helo = helo
+        self.protocol = protocol
+        if isinstance(orig, Address):
+            self.orig = orig
+        else:
+            self.orig = Address(orig, host)
+
+    def __getstate__(self):
+        """Helper for pickle.
+
+        protocol isn't picklabe, but we want User to be, so skip it in
+        the pickle.
+        """
+        return { 'dest' : self.dest,
+                 'helo' : self.helo,
+                 'protocol' : None,
+                 'orig' : self.orig }
+
+    def __str__(self):
+        return str(self.dest)
+
+class IMessage(Interface):
+    """Interface definition for messages that can be sent via SMTP."""
+
+    def lineReceived(line):
+        """handle another line"""
+
+    def eomReceived():
+        """handle end of message
+
+        return a deferred. The deferred should be called with either:
+        callback(string) or errback(error)
+        """
+
+    def connectionLost():
+        """handle message truncated
+
+        semantics should be to discard the message
+        """
+
+class SMTP(basic.LineOnlyReceiver, policies.TimeoutMixin):
+    """SMTP server-side protocol."""
+
+    timeout = 600
+    host = DNSNAME
+    portal = None
+
+    # Control whether we log SMTP events
+    noisy = True
+
+    # A factory for IMessageDelivery objects.  If an
+    # avatar implementing IMessageDeliveryFactory can
+    # be acquired from the portal, it will be used to
+    # create a new IMessageDelivery object for each
+    # message which is received.
+    deliveryFactory = None
+
+    # An IMessageDelivery object.  A new instance is
+    # used for each message received if we can get an
+    # IMessageDeliveryFactory from the portal.  Otherwise,
+    # a single instance is used throughout the lifetime
+    # of the connection.
+    delivery = None
+
+    # Cred cleanup function.
+    _onLogout = None
+
+    def __init__(self, delivery=None, deliveryFactory=None):
+        self.mode = COMMAND
+        self._from = None
+        self._helo = None
+        self._to = []
+        self.delivery = delivery
+        self.deliveryFactory = deliveryFactory
+
+    def timeoutConnection(self):
+        msg = '%s Timeout. Try talking faster next time!' % (self.host,)
+        self.sendCode(421, msg)
+        self.transport.loseConnection()
+
+    def greeting(self):
+        return '%s NO UCE NO UBE NO RELAY PROBES' % (self.host,)
+
+    def connectionMade(self):
+        # Ensure user-code always gets something sane for _helo
+        peer = self.transport.getPeer()
+        try:
+            host = peer.host
+        except AttributeError: # not an IPv4Address
+            host = str(peer)
+        self._helo = (None, host)
+        self.sendCode(220, self.greeting())
+        self.setTimeout(self.timeout)
+
+    def sendCode(self, code, message=''):
+        "Send an SMTP code with a message."
+        lines = message.splitlines()
+        lastline = lines[-1:]
+        for line in lines[:-1]:
+            self.sendLine('%3.3d-%s' % (code, line))
+        self.sendLine('%3.3d %s' % (code,
+                                    lastline and lastline[0] or ''))
+
+    def lineReceived(self, line):
+        self.resetTimeout()
+        return getattr(self, 'state_' + self.mode)(line)
+
+    def state_COMMAND(self, line):
+        # Ignore leading and trailing whitespace, as well as an arbitrary
+        # amount of whitespace between the command and its argument, though
+        # it is not required by the protocol, for it is a nice thing to do.
+        line = line.strip()
+
+        parts = line.split(None, 1)
+        if parts:
+            method = self.lookupMethod(parts[0]) or self.do_UNKNOWN
+            if len(parts) == 2:
+                method(parts[1])
+            else:
+                method('')
+        else:
+            self.sendSyntaxError()
+
+    def sendSyntaxError(self):
+        self.sendCode(500, 'Error: bad syntax')
+
+    def lookupMethod(self, command):
+        return getattr(self, 'do_' + command.upper(), None)
+
+    def lineLengthExceeded(self, line):
+        if self.mode is DATA:
+            for message in self.__messages:
+                message.connectionLost()
+            self.mode = COMMAND
+            del self.__messages
+        self.sendCode(500, 'Line too long')
+
+    def do_UNKNOWN(self, rest):
+        self.sendCode(500, 'Command not implemented')
+
+    def do_HELO(self, rest):
+        peer = self.transport.getPeer()
+        try:
+            host = peer.host
+        except AttributeError:
+            host = str(peer)
+        self._helo = (rest, host)
+        self._from = None
+        self._to = []
+        self.sendCode(250, '%s Hello %s, nice to meet you' % (self.host, host))
+
+    def do_QUIT(self, rest):
+        self.sendCode(221, 'See you later')
+        self.transport.loseConnection()
+
+    # A string of quoted strings, backslash-escaped character or
+    # atom characters + '@.,:'
+    qstring = r'("[^"]*"|\\.|' + atom + r'|[@.,:])+'
+
+    mail_re = re.compile(r'''\s*FROM:\s*(?P<path><> # Empty <>
+                         |<''' + qstring + r'''> # <addr>
+                         |''' + qstring + r''' # addr
+                         )\s*(\s(?P<opts>.*))? # Optional WS + ESMTP options
+                         $''',re.I|re.X)
+    rcpt_re = re.compile(r'\s*TO:\s*(?P<path><' + qstring + r'''> # <addr>
+                         |''' + qstring + r''' # addr
+                         )\s*(\s(?P<opts>.*))? # Optional WS + ESMTP options
+                         $''',re.I|re.X)
+
+    def do_MAIL(self, rest):
+        if self._from:
+            self.sendCode(503,"Only one sender per message, please")
+            return
+        # Clear old recipient list
+        self._to = []
+        m = self.mail_re.match(rest)
+        if not m:
+            self.sendCode(501, "Syntax error")
+            return
+
+        try:
+            addr = Address(m.group('path'), self.host)
+        except AddressError, e:
+            self.sendCode(553, str(e))
+            return
+
+        validated = defer.maybeDeferred(self.validateFrom, self._helo, addr)
+        validated.addCallbacks(self._cbFromValidate, self._ebFromValidate)
+
+
+    def _cbFromValidate(self, from_, code=250, msg='Sender address accepted'):
+        self._from = from_
+        self.sendCode(code, msg)
+
+
+    def _ebFromValidate(self, failure):
+        if failure.check(SMTPBadSender):
+            self.sendCode(failure.value.code,
+                          'Cannot receive from specified address %s: %s'
+                          % (quoteaddr(failure.value.addr), failure.value.resp))
+        elif failure.check(SMTPServerError):
+            self.sendCode(failure.value.code, failure.value.resp)
+        else:
+            log.err(failure, "SMTP sender validation failure")
+            self.sendCode(
+                451,
+                'Requested action aborted: local error in processing')
+
+
+    def do_RCPT(self, rest):
+        if not self._from:
+            self.sendCode(503, "Must have sender before recipient")
+            return
+        m = self.rcpt_re.match(rest)
+        if not m:
+            self.sendCode(501, "Syntax error")
+            return
+
+        try:
+            user = User(m.group('path'), self._helo, self, self._from)
+        except AddressError, e:
+            self.sendCode(553, str(e))
+            return
+
+        d = defer.maybeDeferred(self.validateTo, user)
+        d.addCallbacks(
+            self._cbToValidate,
+            self._ebToValidate,
+            callbackArgs=(user,)
+        )
+
+    def _cbToValidate(self, to, user=None, code=250, msg='Recipient address accepted'):
+        if user is None:
+            user = to
+        self._to.append((user, to))
+        self.sendCode(code, msg)
+
+    def _ebToValidate(self, failure):
+        if failure.check(SMTPBadRcpt, SMTPServerError):
+            self.sendCode(failure.value.code, failure.value.resp)
+        else:
+            log.err(failure)
+            self.sendCode(
+                451,
+                'Requested action aborted: local error in processing'
+            )
+
+    def _disconnect(self, msgs):
+        for msg in msgs:
+            try:
+                msg.connectionLost()
+            except:
+                log.msg("msg raised exception from connectionLost")
+                log.err()
+
+    def do_DATA(self, rest):
+        if self._from is None or (not self._to):
+            self.sendCode(503, 'Must have valid receiver and originator')
+            return
+        self.mode = DATA
+        helo, origin = self._helo, self._from
+        recipients = self._to
+
+        self._from = None
+        self._to = []
+        self.datafailed = None
+
+        msgs = []
+        for (user, msgFunc) in recipients:
+            try:
+                msg = msgFunc()
+                rcvdhdr = self.receivedHeader(helo, origin, [user])
+                if rcvdhdr:
+                    msg.lineReceived(rcvdhdr)
+                msgs.append(msg)
+            except SMTPServerError, e:
+                self.sendCode(e.code, e.resp)
+                self.mode = COMMAND
+                self._disconnect(msgs)
+                return
+            except:
+                log.err()
+                self.sendCode(550, "Internal server error")
+                self.mode = COMMAND
+                self._disconnect(msgs)
+                return
+        self.__messages = msgs
+
+        self.__inheader = self.__inbody = 0
+        self.sendCode(354, 'Continue')
+
+        if self.noisy:
+            fmt = 'Receiving message for delivery: from=%s to=%s'
+            log.msg(fmt % (origin, [str(u) for (u, f) in recipients]))
+
+    def connectionLost(self, reason):
+        # self.sendCode(421, 'Dropping connection.') # This does nothing...
+        # Ideally, if we (rather than the other side) lose the connection,
+        # we should be able to tell the other side that we are going away.
+        # RFC-2821 requires that we try.
+        if self.mode is DATA:
+            try:
+                for message in self.__messages:
+                    try:
+                        message.connectionLost()
+                    except:
+                        log.err()
+                del self.__messages
+            except AttributeError:
+                pass
+        if self._onLogout:
+            self._onLogout()
+            self._onLogout = None
+        self.setTimeout(None)
+
+    def do_RSET(self, rest):
+        self._from = None
+        self._to = []
+        self.sendCode(250, 'I remember nothing.')
+
+    def dataLineReceived(self, line):
+        if line[:1] == '.':
+            if line == '.':
+                self.mode = COMMAND
+                if self.datafailed:
+                    self.sendCode(self.datafailed.code,
+                                  self.datafailed.resp)
+                    return
+                if not self.__messages:
+                    self._messageHandled("thrown away")
+                    return
+                defer.DeferredList([
+                    m.eomReceived() for m in self.__messages
+                ], consumeErrors=True).addCallback(self._messageHandled
+                                                   )
+                del self.__messages
+                return
+            line = line[1:]
+
+        if self.datafailed:
+            return
+
+        try:
+            # Add a blank line between the generated Received:-header
+            # and the message body if the message comes in without any
+            # headers
+            if not self.__inheader and not self.__inbody:
+                if ':' in line:
+                    self.__inheader = 1
+                elif line:
+                    for message in self.__messages:
+                        message.lineReceived('')
+                    self.__inbody = 1
+
+            if not line:
+                self.__inbody = 1
+
+            for message in self.__messages:
+                message.lineReceived(line)
+        except SMTPServerError, e:
+            self.datafailed = e
+            for message in self.__messages:
+                message.connectionLost()
+    state_DATA = dataLineReceived
+
+    def _messageHandled(self, resultList):
+        failures = 0
+        for (success, result) in resultList:
+            if not success:
+                failures += 1
+                log.err(result)
+        if failures:
+            msg = 'Could not send e-mail'
+            L = len(resultList)
+            if L > 1:
+                msg += ' (%d failures out of %d recipients)' % (failures, L)
+            self.sendCode(550, msg)
+        else:
+            self.sendCode(250, 'Delivery in progress')
+
+
+    def _cbAnonymousAuthentication(self, (iface, avatar, logout)):
+        """
+        Save the state resulting from a successful anonymous cred login.
+        """
+        if issubclass(iface, IMessageDeliveryFactory):
+            self.deliveryFactory = avatar
+            self.delivery = None
+        elif issubclass(iface, IMessageDelivery):
+            self.deliveryFactory = None
+            self.delivery = avatar
+        else:
+            raise RuntimeError("%s is not a supported interface" % (iface.__name__,))
+        self._onLogout = logout
+        self.challenger = None
+
+
+    # overridable methods:
+    def validateFrom(self, helo, origin):
+        """
+        Validate the address from which the message originates.
+
+        @type helo: C{(str, str)}
+        @param helo: The argument to the HELO command and the client's IP
+        address.
+
+        @type origin: C{Address}
+        @param origin: The address the message is from
+
+        @rtype: C{Deferred} or C{Address}
+        @return: C{origin} or a C{Deferred} whose callback will be
+        passed C{origin}.
+
+        @raise SMTPBadSender: Raised of messages from this address are
+        not to be accepted.
+        """
+        if self.deliveryFactory is not None:
+            self.delivery = self.deliveryFactory.getMessageDelivery()
+
+        if self.delivery is not None:
+            return defer.maybeDeferred(self.delivery.validateFrom,
+                                       helo, origin)
+
+        # No login has been performed, no default delivery object has been
+        # provided: try to perform an anonymous login and then invoke this
+        # method again.
+        if self.portal:
+
+            result = self.portal.login(
+                cred.credentials.Anonymous(),
+                None,
+                IMessageDeliveryFactory, IMessageDelivery)
+
+            def ebAuthentication(err):
+                """
+                Translate cred exceptions into SMTP exceptions so that the
+                protocol code which invokes C{validateFrom} can properly report
+                the failure.
+                """
+                if err.check(cred.error.UnauthorizedLogin):
+                    exc = SMTPBadSender(origin)
+                elif err.check(cred.error.UnhandledCredentials):
+                    exc = SMTPBadSender(
+                        origin, resp="Unauthenticated senders not allowed")
+                else:
+                    return err
+                return defer.fail(exc)
+
+            result.addCallbacks(
+                self._cbAnonymousAuthentication, ebAuthentication)
+
+            def continueValidation(ignored):
+                """
+                Re-attempt from address validation.
+                """
+                return self.validateFrom(helo, origin)
+
+            result.addCallback(continueValidation)
+            return result
+
+        raise SMTPBadSender(origin)
+
+
+    def validateTo(self, user):
+        """
+        Validate the address for which the message is destined.
+
+        @type user: C{User}
+        @param user: The address to validate.
+
+        @rtype: no-argument callable
+        @return: A C{Deferred} which becomes, or a callable which
+        takes no arguments and returns an object implementing C{IMessage}.
+        This will be called and the returned object used to deliver the
+        message when it arrives.
+
+        @raise SMTPBadRcpt: Raised if messages to the address are
+        not to be accepted.
+        """
+        if self.delivery is not None:
+            return self.delivery.validateTo(user)
+        raise SMTPBadRcpt(user)
+
+    def receivedHeader(self, helo, origin, recipients):
+        if self.delivery is not None:
+            return self.delivery.receivedHeader(helo, origin, recipients)
+
+        heloStr = ""
+        if helo[0]:
+            heloStr = " helo=%s" % (helo[0],)
+        domain = self.transport.getHost().host
+        from_ = "from %s ([%s]%s)" % (helo[0], helo[1], heloStr)
+        by = "by %s with %s (%s)" % (domain,
+                                     self.__class__.__name__,
+                                     longversion)
+        for_ = "for %s; %s" % (' '.join(map(str, recipients)),
+                               rfc822date())
+        return "Received: %s\n\t%s\n\t%s" % (from_, by, for_)
+
+    def startMessage(self, recipients):
+        if self.delivery:
+            return self.delivery.startMessage(recipients)
+        return []
+
+
+class SMTPFactory(protocol.ServerFactory):
+    """Factory for SMTP."""
+
+    # override in instances or subclasses
+    domain = DNSNAME
+    timeout = 600
+    protocol = SMTP
+
+    portal = None
+
+    def __init__(self, portal = None):
+        self.portal = portal
+
+    def buildProtocol(self, addr):
+        p = protocol.ServerFactory.buildProtocol(self, addr)
+        p.portal = self.portal
+        p.host = self.domain
+        return p
+
+class SMTPClient(basic.LineReceiver, policies.TimeoutMixin):
+    """
+    SMTP client for sending emails.
+    
+    After the client has connected to the SMTP server, it repeatedly calls
+    L{SMTPClient.getMailFrom}, L{SMTPClient.getMailTo} and
+    L{SMTPClient.getMailData} and uses this information to send an email.
+    It then calls L{SMTPClient.getMailFrom} again; if it returns C{None}, the
+    client will disconnect, otherwise it will continue as normal i.e. call
+    L{SMTPClient.getMailTo} and L{SMTPClient.getMailData} and send a new email.
+    """
+
+    # If enabled then log SMTP client server communication
+    debug = True
+
+    # Number of seconds to wait before timing out a connection.  If
+    # None, perform no timeout checking.
+    timeout = None
+
+    def __init__(self, identity, logsize=10):
+        self.identity = identity or ''
+        self.toAddressesResult = []
+        self.successAddresses = []
+        self._from = None
+        self.resp = []
+        self.code = -1
+        self.log = util.LineLog(logsize)
+
+    def sendLine(self, line):
+        # Log sendLine only if you are in debug mode for performance
+        if self.debug:
+            self.log.append('>>> ' + line)
+
+        basic.LineReceiver.sendLine(self,line)
+
+    def connectionMade(self):
+        self.setTimeout(self.timeout)
+
+        self._expected = [ 220 ]
+        self._okresponse = self.smtpState_helo
+        self._failresponse = self.smtpConnectionFailed
+
+    def connectionLost(self, reason=protocol.connectionDone):
+        """We are no longer connected"""
+        self.setTimeout(None)
+        self.mailFile = None
+
+    def timeoutConnection(self):
+        self.sendError(
+            SMTPTimeoutError(
+                -1, "Timeout waiting for SMTP server response",
+                 self.log.str()))
+
+    def lineReceived(self, line):
+        self.resetTimeout()
+
+        # Log lineReceived only if you are in debug mode for performance
+        if self.debug:
+            self.log.append('<<< ' + line)
+
+        why = None
+
+        try:
+            self.code = int(line[:3])
+        except ValueError:
+            # This is a fatal error and will disconnect the transport lineReceived will not be called again
+            self.sendError(SMTPProtocolError(-1, "Invalid response from SMTP server: %s" % line, self.log.str()))
+            return
+
+        if line[0] == '0':
+            # Verbose informational message, ignore it
+            return
+
+        self.resp.append(line[4:])
+
+        if line[3:4] == '-':
+            # continuation
+            return
+
+        if self.code in self._expected:
+            why = self._okresponse(self.code,'\n'.join(self.resp))
+        else:
+            why = self._failresponse(self.code,'\n'.join(self.resp))
+
+        self.code = -1
+        self.resp = []
+        return why
+
+    def smtpConnectionFailed(self, code, resp):
+        self.sendError(SMTPConnectError(code, resp, self.log.str()))
+
+    def smtpTransferFailed(self, code, resp):
+        if code < 0:
+            self.sendError(SMTPProtocolError(code, resp, self.log.str()))
+        else:
+            self.smtpState_msgSent(code, resp)
+
+    def smtpState_helo(self, code, resp):
+        self.sendLine('HELO ' + self.identity)
+        self._expected = SUCCESS
+        self._okresponse = self.smtpState_from
+
+    def smtpState_from(self, code, resp):
+        self._from = self.getMailFrom()
+        self._failresponse = self.smtpTransferFailed
+        if self._from is not None:
+            self.sendLine('MAIL FROM:%s' % quoteaddr(self._from))
+            self._expected = [250]
+            self._okresponse = self.smtpState_to
+        else:
+            # All messages have been sent, disconnect
+            self._disconnectFromServer()
+
+    def smtpState_disconnect(self, code, resp):
+        self.transport.loseConnection()
+
+    def smtpState_to(self, code, resp):
+        self.toAddresses = iter(self.getMailTo())
+        self.toAddressesResult = []
+        self.successAddresses = []
+        self._okresponse = self.smtpState_toOrData
+        self._expected = xrange(0,1000)
+        self.lastAddress = None
+        return self.smtpState_toOrData(0, '')
+
+    def smtpState_toOrData(self, code, resp):
+        if self.lastAddress is not None:
+            self.toAddressesResult.append((self.lastAddress, code, resp))
+            if code in SUCCESS:
+                self.successAddresses.append(self.lastAddress)
+        try:
+            self.lastAddress = self.toAddresses.next()
+        except StopIteration:
+            if self.successAddresses:
+                self.sendLine('DATA')
+                self._expected = [ 354 ]
+                self._okresponse = self.smtpState_data
+            else:
+                return self.smtpState_msgSent(code,'No recipients accepted')
+        else:
+            self.sendLine('RCPT TO:%s' % quoteaddr(self.lastAddress))
+
+    def smtpState_data(self, code, resp):
+        s = basic.FileSender()
+        d = s.beginFileTransfer(
+            self.getMailData(), self.transport, self.transformChunk)
+        def ebTransfer(err):
+            self.sendError(err.value)
+        d.addCallbacks(self.finishedFileTransfer, ebTransfer)
+        self._expected = SUCCESS
+        self._okresponse = self.smtpState_msgSent
+
+
+    def smtpState_msgSent(self, code, resp):
+        if self._from is not None:
+            self.sentMail(code, resp, len(self.successAddresses),
+                          self.toAddressesResult, self.log)
+
+        self.toAddressesResult = []
+        self._from = None
+        self.sendLine('RSET')
+        self._expected = SUCCESS
+        self._okresponse = self.smtpState_from
+
+    ##
+    ## Helpers for FileSender
+    ##
+    def transformChunk(self, chunk):
+        """
+        Perform the necessary local to network newline conversion and escape
+        leading periods.
+
+        This method also resets the idle timeout so that as long as process is
+        being made sending the message body, the client will not time out.
+        """
+        self.resetTimeout()
+        return chunk.replace('\n', '\r\n').replace('\r\n.', '\r\n..')
+
+    def finishedFileTransfer(self, lastsent):
+        if lastsent != '\n':
+            line = '\r\n.'
+        else:
+            line = '.'
+        self.sendLine(line)
+
+    ##
+    # these methods should be overriden in subclasses
+    def getMailFrom(self):
+        """Return the email address the mail is from."""
+        raise NotImplementedError
+
+    def getMailTo(self):
+        """Return a list of emails to send to."""
+        raise NotImplementedError
+
+    def getMailData(self):
+        """Return file-like object containing data of message to be sent.
+
+        Lines in the file should be delimited by '\\n'.
+        """
+        raise NotImplementedError
+
+    def sendError(self, exc):
+        """
+        If an error occurs before a mail message is sent sendError will be
+        called.  This base class method sends a QUIT if the error is
+        non-fatal and disconnects the connection.
+
+        @param exc: The SMTPClientError (or child class) raised
+        @type exc: C{SMTPClientError}
+        """
+        if isinstance(exc, SMTPClientError) and not exc.isFatal:
+            self._disconnectFromServer()
+        else:
+            # If the error was fatal then the communication channel with the
+            # SMTP Server is broken so just close the transport connection
+            self.smtpState_disconnect(-1, None)
+
+
+    def sentMail(self, code, resp, numOk, addresses, log):
+        """Called when an attempt to send an email is completed.
+
+        If some addresses were accepted, code and resp are the response
+        to the DATA command. If no addresses were accepted, code is -1
+        and resp is an informative message.
+
+        @param code: the code returned by the SMTP Server
+        @param resp: The string response returned from the SMTP Server
+        @param numOK: the number of addresses accepted by the remote host.
+        @param addresses: is a list of tuples (address, code, resp) listing
+                          the response to each RCPT command.
+        @param log: is the SMTP session log
+        """
+        raise NotImplementedError
+
+    def _disconnectFromServer(self):
+        self._expected = xrange(0, 1000)
+        self._okresponse = self.smtpState_disconnect
+        self.sendLine('QUIT')
+
+
+
+class ESMTPClient(SMTPClient):
+    # Fall back to HELO if the server does not support EHLO
+    heloFallback = True
+
+    # Refuse to proceed if authentication cannot be performed
+    requireAuthentication = False
+
+    # Refuse to proceed if TLS is not available
+    requireTransportSecurity = False
+
+    # Indicate whether or not our transport can be considered secure.
+    tlsMode = False
+
+    # ClientContextFactory to use for STARTTLS
+    context = None
+
+    def __init__(self, secret, contextFactory=None, *args, **kw):
+        SMTPClient.__init__(self, *args, **kw)
+        self.authenticators = []
+        self.secret = secret
+        self.context = contextFactory
+        self.tlsMode = False
+
+
+    def esmtpEHLORequired(self, code=-1, resp=None):
+        self.sendError(EHLORequiredError(502, "Server does not support ESMTP Authentication", self.log.str()))
+
+
+    def esmtpAUTHRequired(self, code=-1, resp=None):
+        tmp = []
+
+        for a in self.authenticators:
+            tmp.append(a.getName().upper())
+
+        auth = "[%s]" % ', '.join(tmp)
+
+        self.sendError(AUTHRequiredError(502, "Server does not support Client Authentication schemes %s" % auth,
+                                         self.log.str()))
+
+
+    def esmtpTLSRequired(self, code=-1, resp=None):
+        self.sendError(TLSRequiredError(502, "Server does not support secure communication via TLS / SSL",
+                                        self.log.str()))
+
+    def esmtpTLSFailed(self, code=-1, resp=None):
+        self.sendError(TLSError(code, "Could not complete the SSL/TLS handshake", self.log.str()))
+
+    def esmtpAUTHDeclined(self, code=-1, resp=None):
+        self.sendError(AUTHDeclinedError(code, resp, self.log.str()))
+
+    def esmtpAUTHMalformedChallenge(self, code=-1, resp=None):
+        str =  "Login failed because the SMTP Server returned a malformed Authentication Challenge"
+        self.sendError(AuthenticationError(501, str, self.log.str()))
+
+    def esmtpAUTHServerError(self, code=-1, resp=None):
+        self.sendError(AuthenticationError(code, resp, self.log.str()))
+
+    def registerAuthenticator(self, auth):
+        """Registers an Authenticator with the ESMTPClient. The ESMTPClient
+           will attempt to login to the SMTP Server in the order the
+           Authenticators are registered. The most secure Authentication
+           mechanism should be registered first.
+
+           @param auth: The Authentication mechanism to register
+           @type auth: class implementing C{IClientAuthentication}
+        """
+
+        self.authenticators.append(auth)
+
+    def connectionMade(self):
+        SMTPClient.connectionMade(self)
+        self._okresponse = self.esmtpState_ehlo
+
+    def esmtpState_ehlo(self, code, resp):
+        self._expected = SUCCESS
+
+        self._okresponse = self.esmtpState_serverConfig
+        self._failresponse = self.esmtpEHLORequired
+
+        if self.heloFallback:
+            self._failresponse = self.smtpState_helo
+
+        self.sendLine('EHLO ' + self.identity)
+
+    def esmtpState_serverConfig(self, code, resp):
+        items = {}
+        for line in resp.splitlines():
+            e = line.split(None, 1)
+            if len(e) > 1:
+                items[e[0]] = e[1]
+            else:
+                items[e[0]] = None
+
+        if self.tlsMode:
+            self.authenticate(code, resp, items)
+        else:
+            self.tryTLS(code, resp, items)
+
+    def tryTLS(self, code, resp, items):
+        if self.context and 'STARTTLS' in items:
+            self._expected = [220]
+            self._okresponse = self.esmtpState_starttls
+            self._failresponse = self.esmtpTLSFailed
+            self.sendLine('STARTTLS')
+        elif self.requireTransportSecurity:
+            self.tlsMode = False
+            self.esmtpTLSRequired()
+        else:
+            self.tlsMode = False
+            self.authenticate(code, resp, items)
+
+    def esmtpState_starttls(self, code, resp):
+        try:
+            self.transport.startTLS(self.context)
+            self.tlsMode = True
+        except:
+            log.err()
+            self.esmtpTLSFailed(451)
+
+        # Send another EHLO once TLS has been started to
+        # get the TLS / AUTH schemes. Some servers only allow AUTH in TLS mode.
+        self.esmtpState_ehlo(code, resp)
+
+    def authenticate(self, code, resp, items):
+        if self.secret and items.get('AUTH'):
+            schemes = items['AUTH'].split()
+            tmpSchemes = {}
+
+            #XXX: May want to come up with a more efficient way to do this
+            for s in schemes:
+                tmpSchemes[s.upper()] = 1
+
+            for a in self.authenticators:
+                auth = a.getName().upper()
+
+                if auth in tmpSchemes:
+                    self._authinfo = a
+
+                    # Special condition handled
+                    if auth  == "PLAIN":
+                        self._okresponse = self.smtpState_from
+                        self._failresponse = self._esmtpState_plainAuth
+                        self._expected = [235]
+                        challenge = encode_base64(self._authinfo.challengeResponse(self.secret, 1), eol="")
+                        self.sendLine('AUTH ' + auth + ' ' + challenge)
+                    else:
+                        self._expected = [334]
+                        self._okresponse = self.esmtpState_challenge
+                        # If some error occurs here, the server declined the AUTH
+                        # before the user / password phase. This would be
+                        # a very rare case
+                        self._failresponse = self.esmtpAUTHServerError
+                        self.sendLine('AUTH ' + auth)
+                    return
+
+        if self.requireAuthentication:
+            self.esmtpAUTHRequired()
+        else:
+            self.smtpState_from(code, resp)
+
+    def _esmtpState_plainAuth(self, code, resp):
+        self._okresponse = self.smtpState_from
+        self._failresponse = self.esmtpAUTHDeclined
+        self._expected = [235]
+        challenge = encode_base64(self._authinfo.challengeResponse(self.secret, 2), eol="")
+        self.sendLine('AUTH PLAIN ' + challenge)
+
+    def esmtpState_challenge(self, code, resp):
+        self._authResponse(self._authinfo, resp)
+
+    def _authResponse(self, auth, challenge):
+        self._failresponse = self.esmtpAUTHDeclined
+        try:
+            challenge = base64.decodestring(challenge)
+        except binascii.Error:
+            # Illegal challenge, give up, then quit
+            self.sendLine('*')
+            self._okresponse = self.esmtpAUTHMalformedChallenge
+            self._failresponse = self.esmtpAUTHMalformedChallenge
+        else:
+            resp = auth.challengeResponse(self.secret, challenge)
+            self._expected = [235, 334]
+            self._okresponse = self.smtpState_maybeAuthenticated
+            self.sendLine(encode_base64(resp, eol=""))
+
+
+    def smtpState_maybeAuthenticated(self, code, resp):
+        """
+        Called to handle the next message from the server after sending a
+        response to a SASL challenge.  The server response might be another
+        challenge or it might indicate authentication has succeeded.
+        """
+        if code == 235:
+            # Yes, authenticated!
+            del self._authinfo
+            self.smtpState_from(code, resp)
+        else:
+            # No, not authenticated yet.  Keep trying.
+            self._authResponse(self._authinfo, resp)
+
+
+
+class ESMTP(SMTP):
+
+    ctx = None
+    canStartTLS = False
+    startedTLS = False
+
+    authenticated = False
+
+    def __init__(self, chal = None, contextFactory = None):
+        SMTP.__init__(self)
+        if chal is None:
+            chal = {}
+        self.challengers = chal
+        self.authenticated = False
+        self.ctx = contextFactory
+
+    def connectionMade(self):
+        SMTP.connectionMade(self)
+        self.canStartTLS = ITLSTransport.providedBy(self.transport)
+        self.canStartTLS = self.canStartTLS and (self.ctx is not None)
+
+
+    def greeting(self):
+        return SMTP.greeting(self) + ' ESMTP'
+
+
+    def extensions(self):
+        ext = {'AUTH': self.challengers.keys()}
+        if self.canStartTLS and not self.startedTLS:
+            ext['STARTTLS'] = None
+        return ext
+
+    def lookupMethod(self, command):
+        m = SMTP.lookupMethod(self, command)
+        if m is None:
+            m = getattr(self, 'ext_' + command.upper(), None)
+        return m
+
+    def listExtensions(self):
+        r = []
+        for (c, v) in self.extensions().iteritems():
+            if v is not None:
+                if v:
+                    # Intentionally omit extensions with empty argument lists
+                    r.append('%s %s' % (c, ' '.join(v)))
+            else:
+                r.append(c)
+        return '\n'.join(r)
+
+    def do_EHLO(self, rest):
+        peer = self.transport.getPeer().host
+        self._helo = (rest, peer)
+        self._from = None
+        self._to = []
+        self.sendCode(
+            250,
+            '%s Hello %s, nice to meet you\n%s' % (
+                self.host, peer,
+                self.listExtensions(),
+            )
+        )
+
+    def ext_STARTTLS(self, rest):
+        if self.startedTLS:
+            self.sendCode(503, 'TLS already negotiated')
+        elif self.ctx and self.canStartTLS:
+            self.sendCode(220, 'Begin TLS negotiation now')
+            self.transport.startTLS(self.ctx)
+            self.startedTLS = True
+        else:
+            self.sendCode(454, 'TLS not available')
+
+    def ext_AUTH(self, rest):
+        if self.authenticated:
+            self.sendCode(503, 'Already authenticated')
+            return
+        parts = rest.split(None, 1)
+        chal = self.challengers.get(parts[0].upper(), lambda: None)()
+        if not chal:
+            self.sendCode(504, 'Unrecognized authentication type')
+            return
+
+        self.mode = AUTH
+        self.challenger = chal
+
+        if len(parts) > 1:
+            chal.getChallenge() # Discard it, apparently the client does not
+                                # care about it.
+            rest = parts[1]
+        else:
+            rest = None
+        self.state_AUTH(rest)
+
+
+    def _cbAuthenticated(self, loginInfo):
+        """
+        Save the state resulting from a successful cred login and mark this
+        connection as authenticated.
+        """
+        result = SMTP._cbAnonymousAuthentication(self, loginInfo)
+        self.authenticated = True
+        return result
+
+
+    def _ebAuthenticated(self, reason):
+        """
+        Handle cred login errors by translating them to the SMTP authenticate
+        failed.  Translate all other errors into a generic SMTP error code and
+        log the failure for inspection.  Stop all errors from propagating.
+        """
+        self.challenge = None
+        if reason.check(cred.error.UnauthorizedLogin):
+            self.sendCode(535, 'Authentication failed')
+        else:
+            log.err(reason, "SMTP authentication failure")
+            self.sendCode(
+                451,
+                'Requested action aborted: local error in processing')
+
+
+    def state_AUTH(self, response):
+        """
+        Handle one step of challenge/response authentication.
+
+        @param response: The text of a response. If None, this
+        function has been called as a result of an AUTH command with
+        no initial response. A response of '*' aborts authentication,
+        as per RFC 2554.
+        """
+        if self.portal is None:
+            self.sendCode(454, 'Temporary authentication failure')
+            self.mode = COMMAND
+            return
+
+        if response is None:
+            challenge = self.challenger.getChallenge()
+            encoded = challenge.encode('base64')
+            self.sendCode(334, encoded)
+            return
+
+        if response == '*':
+            self.sendCode(501, 'Authentication aborted')
+            self.challenger = None
+            self.mode = COMMAND
+            return
+
+        try:
+            uncoded = response.decode('base64')
+        except binascii.Error:
+            self.sendCode(501, 'Syntax error in parameters or arguments')
+            self.challenger = None
+            self.mode = COMMAND
+            return
+
+        self.challenger.setResponse(uncoded)
+        if self.challenger.moreChallenges():
+            challenge = self.challenger.getChallenge()
+            coded = challenge.encode('base64')[:-1]
+            self.sendCode(334, coded)
+            return
+
+        self.mode = COMMAND
+        result = self.portal.login(
+            self.challenger, None,
+            IMessageDeliveryFactory, IMessageDelivery)
+        result.addCallback(self._cbAuthenticated)
+        result.addCallback(lambda ign: self.sendCode(235, 'Authentication successful.'))
+        result.addErrback(self._ebAuthenticated)
+
+
+
+class SenderMixin:
+    """Utility class for sending emails easily.
+
+    Use with SMTPSenderFactory or ESMTPSenderFactory.
+    """
+    done = 0
+
+    def getMailFrom(self):
+        if not self.done:
+            self.done = 1
+            return str(self.factory.fromEmail)
+        else:
+            return None
+
+    def getMailTo(self):
+        return self.factory.toEmail
+
+    def getMailData(self):
+        return self.factory.file
+
+    def sendError(self, exc):
+        # Call the base class to close the connection with the SMTP server
+        SMTPClient.sendError(self, exc)
+
+        #  Do not retry to connect to SMTP Server if:
+        #   1. No more retries left (This allows the correct error to be returned to the errorback)
+        #   2. retry is false
+        #   3. The error code is not in the 4xx range (Communication Errors)
+
+        if (self.factory.retries >= 0 or
+            (not exc.retry and not (exc.code >= 400 and exc.code < 500))):
+            self.factory.sendFinished = 1
+            self.factory.result.errback(exc)
+
+    def sentMail(self, code, resp, numOk, addresses, log):
+        # Do not retry, the SMTP server acknowledged the request
+        self.factory.sendFinished = 1
+        if code not in SUCCESS:
+            errlog = []
+            for addr, acode, aresp in addresses:
+                if acode not in SUCCESS:
+                    errlog.append("%s: %03d %s" % (addr, acode, aresp))
+
+            errlog.append(log.str())
+
+            exc = SMTPDeliveryError(code, resp, '\n'.join(errlog), addresses)
+            self.factory.result.errback(exc)
+        else:
+            self.factory.result.callback((numOk, addresses))
+
+
+class SMTPSender(SenderMixin, SMTPClient):
+    """
+    SMTP protocol that sends a single email based on information it 
+    gets from its factory, a L{SMTPSenderFactory}.
+    """
+
+
+class SMTPSenderFactory(protocol.ClientFactory):
+    """
+    Utility factory for sending emails easily.
+    """
+
+    domain = DNSNAME
+    protocol = SMTPSender
+
+    def __init__(self, fromEmail, toEmail, file, deferred, retries=5,
+                 timeout=None):
+        """
+        @param fromEmail: The RFC 2821 address from which to send this
+        message.
+
+        @param toEmail: A sequence of RFC 2821 addresses to which to
+        send this message.
+
+        @param file: A file-like object containing the message to send.
+
+        @param deferred: A Deferred to callback or errback when sending
+        of this message completes.
+
+        @param retries: The number of times to retry delivery of this
+        message.
+
+        @param timeout: Period, in seconds, for which to wait for
+        server responses, or None to wait forever.
+        """
+        assert isinstance(retries, (int, long))
+
+        if isinstance(toEmail, types.StringTypes):
+            toEmail = [toEmail]
+        self.fromEmail = Address(fromEmail)
+        self.nEmails = len(toEmail)
+        self.toEmail = toEmail
+        self.file = file
+        self.result = deferred
+        self.result.addBoth(self._removeDeferred)
+        self.sendFinished = 0
+
+        self.retries = -retries
+        self.timeout = timeout
+
+    def _removeDeferred(self, argh):
+        del self.result
+        return argh
+
+    def clientConnectionFailed(self, connector, err):
+        self._processConnectionError(connector, err)
+
+    def clientConnectionLost(self, connector, err):
+        self._processConnectionError(connector, err)
+
+    def _processConnectionError(self, connector, err):
+        if self.retries < self.sendFinished <= 0:
+            log.msg("SMTP Client retrying server. Retry: %s" % -self.retries)
+
+            # Rewind the file in case part of it was read while attempting to
+            # send the message.
+            self.file.seek(0, 0)
+            connector.connect()
+            self.retries += 1
+        elif self.sendFinished <= 0:
+            # If we were unable to communicate with the SMTP server a ConnectionDone will be
+            # returned. We want a more clear error message for debugging
+            if err.check(error.ConnectionDone):
+                err.value = SMTPConnectError(-1, "Unable to connect to server.")
+            self.result.errback(err.value)
+
+    def buildProtocol(self, addr):
+        p = self.protocol(self.domain, self.nEmails*2+2)
+        p.factory = self
+        p.timeout = self.timeout
+        return p
+
+
+
+from twisted.mail.imap4 import IClientAuthentication
+from twisted.mail.imap4 import CramMD5ClientAuthenticator, LOGINAuthenticator
+from twisted.mail.imap4 import LOGINCredentials as _lcredentials
+
+class LOGINCredentials(_lcredentials):
+    """
+    L{LOGINCredentials} generates challenges for I{LOGIN} authentication.
+
+    For interoperability with Outlook, the challenge generated does not exactly
+    match the one defined in the
+    U{draft specification<http://sepp.oetiker.ch/sasl-2.1.19-ds/draft-murchison-sasl-login-00.txt>}.
+    """
+
+    def __init__(self):
+        _lcredentials.__init__(self)
+        self.challenges = ['Password:', 'Username:']
+
+
+
+class PLAINAuthenticator:
+    implements(IClientAuthentication)
+
+    def __init__(self, user):
+        self.user = user
+
+    def getName(self):
+        return "PLAIN"
+
+    def challengeResponse(self, secret, chal=1):
+        if chal == 1:
+            return "%s\0%s\0%s" % (self.user, self.user, secret)
+        else:
+            return "%s\0%s" % (self.user, secret)
+
+
+
+class ESMTPSender(SenderMixin, ESMTPClient):
+
+    requireAuthentication = True
+    requireTransportSecurity = True
+
+    def __init__(self, username, secret, contextFactory=None, *args, **kw):
+        self.heloFallback = 0
+        self.username = username
+
+        if contextFactory is None:
+            contextFactory = self._getContextFactory()
+
+        ESMTPClient.__init__(self, secret, contextFactory, *args, **kw)
+
+        self._registerAuthenticators()
+
+    def _registerAuthenticators(self):
+        # Register Authenticator in order from most secure to least secure
+        self.registerAuthenticator(CramMD5ClientAuthenticator(self.username))
+        self.registerAuthenticator(LOGINAuthenticator(self.username))
+        self.registerAuthenticator(PLAINAuthenticator(self.username))
+
+    def _getContextFactory(self):
+        if self.context is not None:
+            return self.context
+        try:
+            from twisted.internet import ssl
+        except ImportError:
+            return None
+        else:
+            try:
+                context = ssl.ClientContextFactory()
+                context.method = ssl.SSL.TLSv1_METHOD
+                return context
+            except AttributeError:
+                return None
+
+
+class ESMTPSenderFactory(SMTPSenderFactory):
+    """
+    Utility factory for sending emails easily.
+    """
+
+    protocol = ESMTPSender
+
+    def __init__(self, username, password, fromEmail, toEmail, file,
+                 deferred, retries=5, timeout=None,
+                 contextFactory=None, heloFallback=False,
+                 requireAuthentication=True,
+                 requireTransportSecurity=True):
+
+        SMTPSenderFactory.__init__(self, fromEmail, toEmail, file, deferred, retries, timeout)
+        self.username = username
+        self.password = password
+        self._contextFactory = contextFactory
+        self._heloFallback = heloFallback
+        self._requireAuthentication = requireAuthentication
+        self._requireTransportSecurity = requireTransportSecurity
+
+    def buildProtocol(self, addr):
+        p = self.protocol(self.username, self.password, self._contextFactory, self.domain, self.nEmails*2+2)
+        p.heloFallback = self._heloFallback
+        p.requireAuthentication = self._requireAuthentication
+        p.requireTransportSecurity = self._requireTransportSecurity
+        p.factory = self
+        p.timeout = self.timeout
+        return p
+
+def sendmail(smtphost, from_addr, to_addrs, msg, senderDomainName=None, port=25):
+    """Send an email
+
+    This interface is intended to be a direct replacement for
+    smtplib.SMTP.sendmail() (with the obvious change that
+    you specify the smtphost as well). Also, ESMTP options
+    are not accepted, as we don't do ESMTP yet. I reserve the
+    right to implement the ESMTP options differently.
+
+    @param smtphost: The host the message should be sent to
+    @param from_addr: The (envelope) address sending this mail.
+    @param to_addrs: A list of addresses to send this mail to.  A string will
+        be treated as a list of one address
+    @param msg: The message, including headers, either as a file or a string.
+        File-like objects need to support read() and close(). Lines must be
+        delimited by '\\n'. If you pass something that doesn't look like a
+        file, we try to convert it to a string (so you should be able to
+        pass an email.Message directly, but doing the conversion with
+        email.Generator manually will give you more control over the
+        process).
+
+    @param senderDomainName: Name by which to identify.  If None, try
+    to pick something sane (but this depends on external configuration
+    and may not succeed).
+
+    @param port: Remote port to which to connect.
+
+    @rtype: L{Deferred}
+    @returns: A L{Deferred}, its callback will be called if a message is sent
+        to ANY address, the errback if no message is sent.
+
+        The callback will be called with a tuple (numOk, addresses) where numOk
+        is the number of successful recipient addresses and addresses is a list
+        of tuples (address, code, resp) giving the response to the RCPT command
+        for each address.
+    """
+    if not hasattr(msg,'read'):
+        # It's not a file
+        msg = StringIO(str(msg))
+
+    d = defer.Deferred()
+    factory = SMTPSenderFactory(from_addr, to_addrs, msg, d)
+
+    if senderDomainName is not None:
+        factory.domain = senderDomainName
+
+    reactor.connectTCP(smtphost, port, factory)
+
+    return d
+
+
+
+##
+## Yerg.  Codecs!
+##
+import codecs
+def xtext_encode(s, errors=None):
+    r = []
+    for ch in s:
+        o = ord(ch)
+        if ch == '+' or ch == '=' or o < 33 or o > 126:
+            r.append('+%02X' % o)
+        else:
+            r.append(chr(o))
+    return (''.join(r), len(s))
+
+
+def xtext_decode(s, errors=None):
+    """
+    Decode the xtext-encoded string C{s}.
+    """
+    r = []
+    i = 0
+    while i < len(s):
+        if s[i] == '+':
+            try:
+                r.append(chr(int(s[i + 1:i + 3], 16)))
+            except ValueError:
+                r.append(s[i:i + 3])
+            i += 3
+        else:
+            r.append(s[i])
+            i += 1
+    return (''.join(r), len(s))
+
+class xtextStreamReader(codecs.StreamReader):
+    def decode(self, s, errors='strict'):
+        return xtext_decode(s)
+
+class xtextStreamWriter(codecs.StreamWriter):
+    def decode(self, s, errors='strict'):
+        return xtext_encode(s)
+
+def xtext_codec(name):
+    if name == 'xtext':
+        return (xtext_encode, xtext_decode, xtextStreamReader, xtextStreamWriter)
+codecs.register(xtext_codec)
diff --git a/ThirdParty/Twisted/twisted/mail/tap.py b/ThirdParty/Twisted/twisted/mail/tap.py
new file mode 100644
index 0000000..7b974ab
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/tap.py
@@ -0,0 +1,361 @@
+# -*- test-case-name: twisted.mail.test.test_options -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+I am the support module for creating mail servers with twistd
+"""
+
+import os
+import warnings
+
+from twisted.mail import mail
+from twisted.mail import maildir
+from twisted.mail import relay
+from twisted.mail import relaymanager
+from twisted.mail import alias
+
+from twisted.internet import endpoints
+
+from twisted.python import usage
+
+from twisted.cred import checkers
+from twisted.cred import strcred
+
+from twisted.application import internet
+
+
+class Options(usage.Options, strcred.AuthOptionMixin):
+    synopsis = "[options]"
+
+    optParameters = [
+        ["pop3s", "S", 0,
+         "Port to start the POP3-over-SSL server on (0 to disable). "
+         "DEPRECATED: use "
+         "'--pop3 ssl:port:privateKey=pkey.pem:certKey=cert.pem'"],
+
+        ["certificate", "c", None,
+         "Certificate file to use for SSL connections. "
+         "DEPRECATED: use "
+         "'--pop3 ssl:port:privateKey=pkey.pem:certKey=cert.pem'"],
+
+        ["relay", "R", None,
+         "Relay messages according to their envelope 'To', using "
+         "the given path as a queue directory."],
+
+        ["hostname", "H", None,
+         "The hostname by which to identify this server."],
+    ]
+
+    optFlags = [
+        ["esmtp", "E", "Use RFC 1425/1869 SMTP extensions"],
+        ["disable-anonymous", None,
+         "Disallow non-authenticated SMTP connections"],
+        ["no-pop3", None, "Disable the default POP3 server."],
+        ["no-smtp", None, "Disable the default SMTP server."],
+    ]
+
+    _protoDefaults = {
+        "pop3": 8110,
+        "smtp": 8025,
+    }
+
+    compData = usage.Completions(
+                   optActions={"hostname" : usage.CompleteHostnames(),
+                               "certificate" : usage.CompleteFiles("*.pem")}
+                   )
+
+    longdesc = """
+    An SMTP / POP3 email server plugin for twistd.
+
+    Examples:
+
+    1. SMTP and POP server
+
+    twistd mail --maildirdbmdomain=example.com=/tmp/example.com
+    --user=joe=password
+
+    Starts an SMTP server that only accepts emails to joe at example.com and saves
+    them to /tmp/example.com.
+
+    Also starts a POP mail server which will allow a client to log in using
+    username: joe at example.com and password: password and collect any email that
+    has been saved in /tmp/example.com.  
+
+    2. SMTP relay
+
+    twistd mail --relay=/tmp/mail_queue
+
+    Starts an SMTP server that accepts emails to any email address and relays
+    them to an appropriate remote SMTP server. Queued emails will be
+    temporarily stored in /tmp/mail_queue.
+    """
+
+    def __init__(self):
+        usage.Options.__init__(self)
+        self.service = mail.MailService()
+        self.last_domain = None
+        for service in self._protoDefaults:
+            self[service] = []
+
+
+    def addEndpoint(self, service, description, certificate=None):
+        """
+        Given a 'service' (pop3 or smtp), add an endpoint.
+        """
+        self[service].append(
+            _toEndpoint(description, certificate=certificate))
+
+
+    def opt_pop3(self, description):
+        """
+        Add a pop3 port listener on the specified endpoint.  You can listen on
+        multiple ports by specifying multiple --pop3 options.  For backwards
+        compatibility, a bare TCP port number can be specified, but this is
+        deprecated. [SSL Example: ssl:8995:privateKey=mycert.pem] [default:
+        tcp:8110]
+        """
+        self.addEndpoint('pop3', description)
+    opt_p = opt_pop3
+
+
+    def opt_smtp(self, description):
+        """
+        Add an smtp port listener on the specified endpoint.  You can listen on
+        multiple ports by specifying multiple --smtp options For backwards
+        compatibility, a bare TCP port number can be specified, but this is
+        deprecated.  [SSL Example: ssl:8465:privateKey=mycert.pem] [default:
+        tcp:8025]
+        """
+        self.addEndpoint('smtp', description)
+    opt_s = opt_smtp
+
+
+    def opt_default(self):
+        """Make the most recently specified domain the default domain."""
+        if self.last_domain:
+            self.service.addDomain('', self.last_domain)
+        else:
+            raise usage.UsageError("Specify a domain before specifying using --default")
+    opt_D = opt_default
+
+
+    def opt_maildirdbmdomain(self, domain):
+        """Generate an SMTP/POP3 virtual domain. This option requires
+        an argument of the form 'NAME=PATH' where NAME is the DNS
+        Domain Name for which email will be accepted and where PATH is
+        a the filesystem path to a Maildir folder. [Example:
+        'example.com=/tmp/example.com']
+        """
+        try:
+            name, path = domain.split('=')
+        except ValueError:
+            raise usage.UsageError("Argument to --maildirdbmdomain must be of the form 'name=path'")
+
+        self.last_domain = maildir.MaildirDirdbmDomain(self.service, os.path.abspath(path))
+        self.service.addDomain(name, self.last_domain)
+    opt_d = opt_maildirdbmdomain
+
+    def opt_user(self, user_pass):
+        """add a user/password to the last specified domains
+        """
+        try:
+            user, password = user_pass.split('=', 1)
+        except ValueError:
+            raise usage.UsageError("Argument to --user must be of the form 'user=password'")
+        if self.last_domain:
+            self.last_domain.addUser(user, password)
+        else:
+            raise usage.UsageError("Specify a domain before specifying users")
+    opt_u = opt_user
+
+    def opt_bounce_to_postmaster(self):
+        """undelivered mails are sent to the postmaster
+        """
+        self.last_domain.postmaster = 1
+    opt_b = opt_bounce_to_postmaster
+
+    def opt_aliases(self, filename):
+        """Specify an aliases(5) file to use for this domain"""
+        if self.last_domain is not None:
+            if mail.IAliasableDomain.providedBy(self.last_domain):
+                aliases = alias.loadAliasFile(self.service.domains, filename)
+                self.last_domain.setAliasGroup(aliases)
+                self.service.monitor.monitorFile(
+                    filename,
+                    AliasUpdater(self.service.domains, self.last_domain)
+                )
+            else:
+                raise usage.UsageError(
+                    "%s does not support alias files" % (
+                        self.last_domain.__class__.__name__,
+                    )
+                )
+        else:
+            raise usage.UsageError("Specify a domain before specifying aliases")
+    opt_A = opt_aliases
+
+    def _getEndpoints(self, reactor, service):
+        """
+        Return a list of endpoints for the specified service, constructing
+        defaults if necessary.
+
+        @param reactor: If any endpoints are created, this is the reactor with
+            which they are created.
+
+        @param service: A key into self indicating the type of service to
+            retrieve endpoints for.  This is either C{"pop3"} or C{"smtp"}.
+
+        @return: A C{list} of C{IServerStreamEndpoint} providers corresponding
+            to the command line parameters that were specified for C{service}.
+            If none were and the protocol was not explicitly disabled with a
+            I{--no-*} option, a default endpoint for the service is created
+            using C{self._protoDefaults}.
+        """
+        if service == 'pop3' and self['pop3s'] and len(self[service]) == 1:
+            # The single endpoint here is the POP3S service we added in
+            # postOptions.  Include the default endpoint alongside it.
+            return self[service] + [
+                endpoints.TCP4ServerEndpoint(
+                    reactor, self._protoDefaults[service])]
+        elif self[service]:
+            # For any non-POP3S case, if there are any services set up, just
+            # return those.
+            return self[service]
+        elif self['no-' + service]:
+            # If there are no services, but the service was explicitly disabled,
+            # return nothing.
+            return []
+        else:
+            # Otherwise, return the old default service.
+            return [
+                endpoints.TCP4ServerEndpoint(
+                    reactor, self._protoDefaults[service])]
+
+
+    def postOptions(self):
+        from twisted.internet import reactor
+
+        if self['pop3s']:
+            if not self['certificate']:
+                raise usage.UsageError("Cannot specify --pop3s without "
+                                       "--certificate")
+            elif not os.path.exists(self['certificate']):
+                raise usage.UsageError("Certificate file %r does not exist."
+                                       % self['certificate'])
+            else:
+                self.addEndpoint(
+                    'pop3', self['pop3s'], certificate=self['certificate'])
+
+        if self['esmtp'] and self['hostname'] is None:
+            raise usage.UsageError("--esmtp requires --hostname")
+
+        # If the --auth option was passed, this will be present -- otherwise,
+        # it won't be, which is also a perfectly valid state.
+        if 'credCheckers' in self:
+            for ch in self['credCheckers']:
+                self.service.smtpPortal.registerChecker(ch)
+
+        if not self['disable-anonymous']:
+            self.service.smtpPortal.registerChecker(checkers.AllowAnonymousAccess())
+
+        anything = False
+        for service in self._protoDefaults:
+            self[service] = self._getEndpoints(reactor, service)
+            if self[service]:
+                anything = True
+
+        if not anything:
+            raise usage.UsageError("You cannot disable all protocols")
+
+
+
+class AliasUpdater:
+    def __init__(self, domains, domain):
+        self.domains = domains
+        self.domain = domain
+    def __call__(self, new):
+        self.domain.setAliasGroup(alias.loadAliasFile(self.domains, new))
+
+
+def _toEndpoint(description, certificate=None):
+    """
+    Tries to guess whether a description is a bare TCP port or a endpoint.  If a
+    bare port is specified and a certificate file is present, returns an
+    SSL4ServerEndpoint and otherwise returns a TCP4ServerEndpoint.
+    """
+    from twisted.internet import reactor
+    try:
+        port = int(description)
+    except ValueError:
+        return endpoints.serverFromString(reactor, description)
+
+    warnings.warn(
+        "Specifying plain ports and/or a certificate is deprecated since "
+        "Twisted 11.0; use endpoint descriptions instead.",
+        category=DeprecationWarning, stacklevel=3)
+
+    if certificate:
+        from twisted.internet.ssl import DefaultOpenSSLContextFactory
+        ctx = DefaultOpenSSLContextFactory(certificate, certificate)
+        return endpoints.SSL4ServerEndpoint(reactor, port, ctx)
+    return endpoints.TCP4ServerEndpoint(reactor, port)
+
+
+def makeService(config):
+    """
+    Construct a service for operating a mail server.
+
+    The returned service may include POP3 servers or SMTP servers (or both),
+    depending on the configuration passed in.  If there are multiple servers,
+    they will share all of their non-network state (eg, the same user accounts
+    are available on all of them).
+
+    @param config: An L{Options} instance specifying what servers to include in
+        the returned service and where they should keep mail data.
+
+    @return: An L{IService} provider which contains the requested mail servers.
+    """
+    if config['esmtp']:
+        rmType = relaymanager.SmartHostESMTPRelayingManager
+        smtpFactory = config.service.getESMTPFactory
+    else:
+        rmType = relaymanager.SmartHostSMTPRelayingManager
+        smtpFactory = config.service.getSMTPFactory
+
+    if config['relay']:
+        dir = config['relay']
+        if not os.path.isdir(dir):
+            os.mkdir(dir)
+
+        config.service.setQueue(relaymanager.Queue(dir))
+        default = relay.DomainQueuer(config.service)
+
+        manager = rmType(config.service.queue)
+        if config['esmtp']:
+            manager.fArgs += (None, None)
+        manager.fArgs += (config['hostname'],)
+
+        helper = relaymanager.RelayStateHelper(manager, 1)
+        helper.setServiceParent(config.service)
+        config.service.domains.setDefaultDomain(default)
+
+    if config['pop3']:
+        f = config.service.getPOP3Factory()
+        for endpoint in config['pop3']:
+            svc = internet.StreamServerEndpointService(endpoint, f)
+            svc.setServiceParent(config.service)
+
+    if config['smtp']:
+        f = smtpFactory()
+        if config['hostname']:
+            f.domain = config['hostname']
+            f.fArgs = (f.domain,)
+        if config['esmtp']:
+            f.fArgs = (None, None) + f.fArgs
+        for endpoint in config['smtp']:
+            svc = internet.StreamServerEndpointService(endpoint, f)
+            svc.setServiceParent(config.service)
+
+    return config.service
diff --git a/ThirdParty/Twisted/twisted/mail/test/__init__.py b/ThirdParty/Twisted/twisted/mail/test/__init__.py
new file mode 100644
index 0000000..f8ec705
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/test/__init__.py
@@ -0,0 +1 @@
+"Tests for twistd.mail"
diff --git a/ThirdParty/Twisted/twisted/mail/test/pop3testserver.py b/ThirdParty/Twisted/twisted/mail/test/pop3testserver.py
new file mode 100755
index 0000000..c87892c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/test/pop3testserver.py
@@ -0,0 +1,314 @@
+#!/usr/bin/env python
+# -*- test-case-name: twisted.mail.test.test_pop3client -*-
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.internet.protocol import Factory
+from twisted.protocols import basic
+from twisted.internet import reactor
+import sys, time
+
+USER = "test"
+PASS = "twisted"
+
+PORT = 1100
+
+SSL_SUPPORT = True
+UIDL_SUPPORT = True
+INVALID_SERVER_RESPONSE = False
+INVALID_CAPABILITY_RESPONSE = False
+INVALID_LOGIN_RESPONSE = False
+DENY_CONNECTION = False
+DROP_CONNECTION = False
+BAD_TLS_RESPONSE = False
+TIMEOUT_RESPONSE = False
+TIMEOUT_DEFERRED = False
+SLOW_GREETING = False
+
+"""Commands"""
+CONNECTION_MADE = "+OK POP3 localhost v2003.83 server ready"
+
+CAPABILITIES = [
+"TOP",
+"LOGIN-DELAY 180",
+"USER",
+"SASL LOGIN"
+]
+
+CAPABILITIES_SSL = "STLS"
+CAPABILITIES_UIDL = "UIDL"
+
+ 
+INVALID_RESPONSE = "-ERR Unknown request"
+VALID_RESPONSE = "+OK Command Completed"
+AUTH_DECLINED = "-ERR LOGIN failed"
+AUTH_ACCEPTED = "+OK Mailbox open, 0 messages"
+TLS_ERROR = "-ERR server side error start TLS handshake"
+LOGOUT_COMPLETE = "+OK quit completed"
+NOT_LOGGED_IN = "-ERR Unknown AUHORIZATION state command"
+STAT = "+OK 0 0"
+UIDL = "+OK Unique-ID listing follows\r\n."
+LIST = "+OK Mailbox scan listing follows\r\n."
+CAP_START = "+OK Capability list follows:"
+
+
+class POP3TestServer(basic.LineReceiver):
+    def __init__(self, contextFactory = None):
+        self.loggedIn = False
+        self.caps = None
+        self.tmpUser = None
+        self.ctx = contextFactory
+
+    def sendSTATResp(self, req):
+        self.sendLine(STAT)
+
+    def sendUIDLResp(self, req):
+        self.sendLine(UIDL)
+
+    def sendLISTResp(self, req):
+        self.sendLine(LIST)
+
+    def sendCapabilities(self):
+        if self.caps is None:
+            self.caps = [CAP_START]
+
+        if UIDL_SUPPORT:
+            self.caps.append(CAPABILITIES_UIDL)
+
+        if SSL_SUPPORT:
+            self.caps.append(CAPABILITIES_SSL)
+
+        for cap in CAPABILITIES:
+            self.caps.append(cap)
+        resp = '\r\n'.join(self.caps)
+        resp += "\r\n."
+
+        self.sendLine(resp)
+
+
+    def connectionMade(self):
+        if DENY_CONNECTION:
+            self.disconnect()
+            return
+
+        if SLOW_GREETING:
+            reactor.callLater(20, self.sendGreeting)
+
+        else:
+            self.sendGreeting()
+
+    def sendGreeting(self):
+        self.sendLine(CONNECTION_MADE)
+
+    def lineReceived(self, line):
+        """Error Conditions"""
+
+        uline = line.upper()
+        find = lambda s: uline.find(s) != -1
+
+        if TIMEOUT_RESPONSE:
+            # Do not respond to clients request
+            return
+
+        if DROP_CONNECTION:
+            self.disconnect()
+            return
+
+        elif find("CAPA"):
+            if INVALID_CAPABILITY_RESPONSE:
+                self.sendLine(INVALID_RESPONSE)
+            else:
+                self.sendCapabilities()
+
+        elif find("STLS") and SSL_SUPPORT:
+            self.startTLS()
+
+        elif find("USER"):
+            if INVALID_LOGIN_RESPONSE:
+                self.sendLine(INVALID_RESPONSE)
+                return
+
+            resp = None
+            try:
+                self.tmpUser = line.split(" ")[1]
+                resp = VALID_RESPONSE
+            except:
+                resp = AUTH_DECLINED
+
+            self.sendLine(resp)
+
+        elif find("PASS"):
+            resp = None
+            try:
+                pwd = line.split(" ")[1]
+
+                if self.tmpUser is None or pwd is None:
+                    resp = AUTH_DECLINED
+                elif self.tmpUser == USER and pwd == PASS:
+                    resp = AUTH_ACCEPTED
+                    self.loggedIn = True
+                else:
+                    resp = AUTH_DECLINED
+            except:
+                resp = AUTH_DECLINED
+
+            self.sendLine(resp)
+
+        elif find("QUIT"):
+            self.loggedIn = False
+            self.sendLine(LOGOUT_COMPLETE)
+            self.disconnect()
+
+        elif INVALID_SERVER_RESPONSE:
+            self.sendLine(INVALID_RESPONSE)
+
+        elif not self.loggedIn:
+            self.sendLine(NOT_LOGGED_IN)
+
+        elif find("NOOP"):
+            self.sendLine(VALID_RESPONSE)
+
+        elif find("STAT"):
+            if TIMEOUT_DEFERRED:
+                return
+            self.sendLine(STAT)
+
+        elif find("LIST"):
+            if TIMEOUT_DEFERRED:
+                return
+            self.sendLine(LIST)
+
+        elif find("UIDL"):
+            if TIMEOUT_DEFERRED:
+                return
+            elif not UIDL_SUPPORT:
+                self.sendLine(INVALID_RESPONSE)
+                return
+
+            self.sendLine(UIDL)
+
+    def startTLS(self):
+        if self.ctx is None:
+            self.getContext()
+
+        if SSL_SUPPORT and self.ctx is not None:
+            self.sendLine('+OK Begin TLS negotiation now')
+            self.transport.startTLS(self.ctx)
+        else:
+            self.sendLine('-ERR TLS not available')
+
+    def disconnect(self):
+        self.transport.loseConnection()
+
+    def getContext(self):
+        try:
+            from twisted.internet import ssl
+        except ImportError:
+           self.ctx = None
+        else:
+            self.ctx = ssl.ClientContextFactory()
+            self.ctx.method = ssl.SSL.TLSv1_METHOD
+
+
+usage = """popServer.py [arg] (default is Standard POP Server with no messages)
+no_ssl  - Start with no SSL support
+no_uidl - Start with no UIDL support
+bad_resp - Send a non-RFC compliant response to the Client
+bad_cap_resp - send a non-RFC compliant response when the Client sends a 'CAPABILITY' request
+bad_login_resp - send a non-RFC compliant response when the Client sends a 'LOGIN' request
+deny - Deny the connection
+drop - Drop the connection after sending the greeting
+bad_tls - Send a bad response to a STARTTLS
+timeout - Do not return a response to a Client request
+to_deferred - Do not return a response on a 'Select' request. This
+              will test Deferred callback handling
+slow - Wait 20 seconds after the connection is made to return a Server Greeting
+"""
+
+def printMessage(msg):
+    print "Server Starting in %s mode" % msg
+
+def processArg(arg):
+
+    if arg.lower() == 'no_ssl':
+        global SSL_SUPPORT
+        SSL_SUPPORT = False
+        printMessage("NON-SSL")
+
+    elif arg.lower() == 'no_uidl':
+        global UIDL_SUPPORT
+        UIDL_SUPPORT = False
+        printMessage("NON-UIDL")
+
+    elif arg.lower() == 'bad_resp':
+        global INVALID_SERVER_RESPONSE
+        INVALID_SERVER_RESPONSE = True
+        printMessage("Invalid Server Response")
+
+    elif arg.lower() == 'bad_cap_resp':
+        global INVALID_CAPABILITY_RESPONSE
+        INVALID_CAPABILITY_RESPONSE = True
+        printMessage("Invalid Capability Response")
+
+    elif arg.lower() == 'bad_login_resp':
+        global INVALID_LOGIN_RESPONSE
+        INVALID_LOGIN_RESPONSE = True
+        printMessage("Invalid Capability Response")
+
+    elif arg.lower() == 'deny':
+        global DENY_CONNECTION
+        DENY_CONNECTION = True
+        printMessage("Deny Connection")
+
+    elif arg.lower() == 'drop':
+        global DROP_CONNECTION
+        DROP_CONNECTION = True
+        printMessage("Drop Connection")
+
+
+    elif arg.lower() == 'bad_tls':
+        global BAD_TLS_RESPONSE
+        BAD_TLS_RESPONSE = True
+        printMessage("Bad TLS Response")
+
+    elif arg.lower() == 'timeout':
+        global TIMEOUT_RESPONSE
+        TIMEOUT_RESPONSE = True
+        printMessage("Timeout Response")
+
+    elif arg.lower() == 'to_deferred':
+        global TIMEOUT_DEFERRED
+        TIMEOUT_DEFERRED = True
+        printMessage("Timeout Deferred Response")
+
+    elif arg.lower() == 'slow':
+        global SLOW_GREETING
+        SLOW_GREETING = True
+        printMessage("Slow Greeting")
+
+    elif arg.lower() == '--help':
+        print usage
+        sys.exit()
+
+    else:
+        print usage
+        sys.exit()
+
+def main():
+
+    if len(sys.argv) < 2:
+        printMessage("POP3 with no messages")
+    else:
+        args = sys.argv[1:]
+
+        for arg in args:
+            processArg(arg)
+
+    f = Factory()
+    f.protocol = POP3TestServer
+    reactor.listenTCP(PORT, f)
+    reactor.run()
+
+if __name__ == '__main__':
+    main()
diff --git a/ThirdParty/Twisted/twisted/mail/test/rfc822.message b/ThirdParty/Twisted/twisted/mail/test/rfc822.message
new file mode 100644
index 0000000..ee97ab9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/test/rfc822.message
@@ -0,0 +1,86 @@
+Return-Path: <twisted-commits-admin at twistedmatrix.com>
+Delivered-To: exarkun at meson.dyndns.org
+Received: from localhost [127.0.0.1]
+	by localhost with POP3 (fetchmail-6.2.1)
+	for exarkun at localhost (single-drop); Thu, 20 Mar 2003 14:50:20 -0500 (EST)
+Received: from pyramid.twistedmatrix.com (adsl-64-123-27-105.dsl.austtx.swbell.net [64.123.27.105])
+	by intarweb.us (Postfix) with ESMTP id 4A4A513EA4
+	for <exarkun at meson.dyndns.org>; Thu, 20 Mar 2003 14:49:27 -0500 (EST)
+Received: from localhost ([127.0.0.1] helo=pyramid.twistedmatrix.com)
+	by pyramid.twistedmatrix.com with esmtp (Exim 3.35 #1 (Debian))
+	id 18w648-0007Vl-00; Thu, 20 Mar 2003 13:51:04 -0600
+Received: from acapnotic by pyramid.twistedmatrix.com with local (Exim 3.35 #1 (Debian))
+	id 18w63j-0007VK-00
+	for <twisted-commits at twistedmatrix.com>; Thu, 20 Mar 2003 13:50:39 -0600
+To: twisted-commits at twistedmatrix.com
+From: etrepum CVS <etrepum at twistedmatrix.com>
+Reply-To: twisted-python at twistedmatrix.com
+X-Mailer: CVSToys
+Message-Id: <E18w63j-0007VK-00 at pyramid.twistedmatrix.com>
+Subject: [Twisted-commits] rebuild now works on python versions from 2.2.0 and up.
+Sender: twisted-commits-admin at twistedmatrix.com
+Errors-To: twisted-commits-admin at twistedmatrix.com
+X-BeenThere: twisted-commits at twistedmatrix.com
+X-Mailman-Version: 2.0.11
+Precedence: bulk
+List-Help: <mailto:twisted-commits-request at twistedmatrix.com?subject=help>
+List-Post: <mailto:twisted-commits at twistedmatrix.com>
+List-Subscribe: <http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits>,
+	<mailto:twisted-commits-request at twistedmatrix.com?subject=subscribe>
+List-Id: <twisted-commits.twistedmatrix.com>
+List-Unsubscribe: <http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits>,
+	<mailto:twisted-commits-request at twistedmatrix.com?subject=unsubscribe>
+List-Archive: <http://twistedmatrix.com/pipermail/twisted-commits/>
+Date: Thu, 20 Mar 2003 13:50:39 -0600
+
+Modified files:
+Twisted/twisted/python/rebuild.py 1.19 1.20
+
+Log message:
+rebuild now works on python versions from 2.2.0 and up.
+
+
+ViewCVS links:
+http://twistedmatrix.com/users/jh.twistd/viewcvs/cgi/viewcvs.cgi/twisted/python/rebuild.py.diff?r1=text&tr1=1.19&r2=text&tr2=1.20&cvsroot=Twisted
+
+Index: Twisted/twisted/python/rebuild.py
+diff -u Twisted/twisted/python/rebuild.py:1.19 Twisted/twisted/python/rebuild.py:1.20
+--- Twisted/twisted/python/rebuild.py:1.19	Fri Jan 17 13:50:49 2003
++++ Twisted/twisted/python/rebuild.py	Thu Mar 20 11:50:08 2003
+@@ -206,15 +206,27 @@
+             clazz.__dict__.clear()
+             clazz.__getattr__ = __getattr__
+             clazz.__module__ = module.__name__
++    if newclasses:
++        import gc
++        if (2, 2, 0) <= sys.version_info[:3] < (2, 2, 2):
++            hasBrokenRebuild = 1
++            gc_objects = gc.get_objects()
++        else:
++            hasBrokenRebuild = 0
+     for nclass in newclasses:
+         ga = getattr(module, nclass.__name__)
+         if ga is nclass:
+             log.msg("WARNING: new-class %s not replaced by reload!" % reflect.qual(nclass))
+         else:
+-            import gc
+-            for r in gc.get_referrers(nclass):
+-                if isinstance(r, nclass):
++            if hasBrokenRebuild:
++                for r in gc_objects:
++                    if not getattr(r, '__class__', None) is nclass:
++                        continue
+                     r.__class__ = ga
++            else:
++                for r in gc.get_referrers(nclass):
++                    if getattr(r, '__class__', None) is nclass:
++                        r.__class__ = ga
+     if doLog:
+         log.msg('')
+         log.msg('  (fixing   %s): ' % str(module.__name__))
+
+
+_______________________________________________
+Twisted-commits mailing list
+Twisted-commits at twistedmatrix.com
+http://twistedmatrix.com/cgi-bin/mailman/listinfo/twisted-commits
diff --git a/ThirdParty/Twisted/twisted/mail/test/server.pem b/ThirdParty/Twisted/twisted/mail/test/server.pem
new file mode 100644
index 0000000..80ef9dc
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/test/server.pem
@@ -0,0 +1,36 @@
+-----BEGIN CERTIFICATE-----
+MIIDBjCCAm+gAwIBAgIBATANBgkqhkiG9w0BAQQFADB7MQswCQYDVQQGEwJTRzER
+MA8GA1UEChMITTJDcnlwdG8xFDASBgNVBAsTC00yQ3J5cHRvIENBMSQwIgYDVQQD
+ExtNMkNyeXB0byBDZXJ0aWZpY2F0ZSBNYXN0ZXIxHTAbBgkqhkiG9w0BCQEWDm5n
+cHNAcG9zdDEuY29tMB4XDTAwMDkxMDA5NTEzMFoXDTAyMDkxMDA5NTEzMFowUzEL
+MAkGA1UEBhMCU0cxETAPBgNVBAoTCE0yQ3J5cHRvMRIwEAYDVQQDEwlsb2NhbGhv
+c3QxHTAbBgkqhkiG9w0BCQEWDm5ncHNAcG9zdDEuY29tMFwwDQYJKoZIhvcNAQEB
+BQADSwAwSAJBAKy+e3dulvXzV7zoTZWc5TzgApr8DmeQHTYC8ydfzH7EECe4R1Xh
+5kwIzOuuFfn178FBiS84gngaNcrFi0Z5fAkCAwEAAaOCAQQwggEAMAkGA1UdEwQC
+MAAwLAYJYIZIAYb4QgENBB8WHU9wZW5TU0wgR2VuZXJhdGVkIENlcnRpZmljYXRl
+MB0GA1UdDgQWBBTPhIKSvnsmYsBVNWjj0m3M2z0qVTCBpQYDVR0jBIGdMIGagBT7
+hyNp65w6kxXlxb8pUU/+7Sg4AaF/pH0wezELMAkGA1UEBhMCU0cxETAPBgNVBAoT
+CE0yQ3J5cHRvMRQwEgYDVQQLEwtNMkNyeXB0byBDQTEkMCIGA1UEAxMbTTJDcnlw
+dG8gQ2VydGlmaWNhdGUgTWFzdGVyMR0wGwYJKoZIhvcNAQkBFg5uZ3BzQHBvc3Qx
+LmNvbYIBADANBgkqhkiG9w0BAQQFAAOBgQA7/CqT6PoHycTdhEStWNZde7M/2Yc6
+BoJuVwnW8YxGO8Sn6UJ4FeffZNcYZddSDKosw8LtPOeWoK3JINjAk5jiPQ2cww++
+7QGG/g5NDjxFZNDJP1dGiLAxPW6JXwov4v0FmdzfLOZ01jDcgQQZqEpYlgpuI5JE
+WUQ9Ho4EzbYCOQ==
+-----END CERTIFICATE-----
+-----BEGIN RSA PRIVATE KEY-----
+MIIBPAIBAAJBAKy+e3dulvXzV7zoTZWc5TzgApr8DmeQHTYC8ydfzH7EECe4R1Xh
+5kwIzOuuFfn178FBiS84gngaNcrFi0Z5fAkCAwEAAQJBAIqm/bz4NA1H++Vx5Ewx
+OcKp3w19QSaZAwlGRtsUxrP7436QjnREM3Bm8ygU11BjkPVmtrKm6AayQfCHqJoT
+ZIECIQDW0BoMoL0HOYM/mrTLhaykYAVqgIeJsPjvkEhTFXWBuQIhAM3deFAvWNu4
+nklUQ37XsCT2c9tmNt1LAT+slG2JOTTRAiAuXDtC/m3NYVwyHfFm+zKHRzHkClk2
+HjubeEgjpj32AQIhAJqMGTaZVOwevTXvvHwNEH+vRWsAYU/gbx+OQB+7VOcBAiEA
+oolb6NMg/R3enNPvS1O4UU1H8wpaF77L4yiSWlE0p4w=
+-----END RSA PRIVATE KEY-----
+-----BEGIN CERTIFICATE REQUEST-----
+MIIBDTCBuAIBADBTMQswCQYDVQQGEwJTRzERMA8GA1UEChMITTJDcnlwdG8xEjAQ
+BgNVBAMTCWxvY2FsaG9zdDEdMBsGCSqGSIb3DQEJARYObmdwc0Bwb3N0MS5jb20w
+XDANBgkqhkiG9w0BAQEFAANLADBIAkEArL57d26W9fNXvOhNlZzlPOACmvwOZ5Ad
+NgLzJ1/MfsQQJ7hHVeHmTAjM664V+fXvwUGJLziCeBo1ysWLRnl8CQIDAQABoAAw
+DQYJKoZIhvcNAQEEBQADQQA7uqbrNTjVWpF6By5ZNPvhZ4YdFgkeXFVWi5ao/TaP
+Vq4BG021fJ9nlHRtr4rotpgHDX1rr+iWeHKsx4+5DRSy
+-----END CERTIFICATE REQUEST-----
diff --git a/ThirdParty/Twisted/twisted/mail/test/test_bounce.py b/ThirdParty/Twisted/twisted/mail/test/test_bounce.py
new file mode 100644
index 0000000..963d21d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/test/test_bounce.py
@@ -0,0 +1,32 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""Test cases for bounce message generation
+"""
+
+from twisted.trial import unittest
+from twisted.mail import bounce
+import rfc822, cStringIO
+
+class BounceTestCase(unittest.TestCase):
+    """
+    testcases for bounce message generation
+    """
+
+    def testBounceFormat(self):
+        from_, to, s = bounce.generateBounce(cStringIO.StringIO('''\
+From: Moshe Zadka <moshez at example.com>
+To: nonexistant at example.org
+Subject: test
+
+'''), 'moshez at example.com', 'nonexistant at example.org')
+        self.assertEqual(from_, '')
+        self.assertEqual(to, 'moshez at example.com')
+        mess = rfc822.Message(cStringIO.StringIO(s))
+        self.assertEqual(mess['To'], 'moshez at example.com')
+        self.assertEqual(mess['From'], 'postmaster at example.org')
+        self.assertEqual(mess['subject'], 'Returned Mail: see transcript for details')
+
+    def testBounceMIME(self):
+        pass
diff --git a/ThirdParty/Twisted/twisted/mail/test/test_imap.py b/ThirdParty/Twisted/twisted/mail/test/test_imap.py
new file mode 100644
index 0000000..857e786
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/test/test_imap.py
@@ -0,0 +1,4892 @@
+# -*- test-case-name: twisted.mail.test.test_imap -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Test case for twisted.mail.imap4
+"""
+
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+import codecs
+import locale
+import os
+import types
+
+from zope.interface import implements
+
+from twisted.mail.imap4 import MessageSet
+from twisted.mail import imap4
+from twisted.protocols import loopback
+from twisted.internet import defer
+from twisted.internet import error
+from twisted.internet import reactor
+from twisted.internet import interfaces
+from twisted.internet.task import Clock
+from twisted.trial import unittest
+from twisted.python import util
+from twisted.python import failure
+
+from twisted import cred
+import twisted.cred.error
+import twisted.cred.checkers
+import twisted.cred.credentials
+import twisted.cred.portal
+
+from twisted.test.proto_helpers import StringTransport, StringTransportWithDisconnection
+
+try:
+    from twisted.test.ssl_helpers import ClientTLSContext, ServerTLSContext
+except ImportError:
+    ClientTLSContext = ServerTLSContext = None
+
+def strip(f):
+    return lambda result, f=f: f()
+
+def sortNest(l):
+    l = l[:]
+    l.sort()
+    for i in range(len(l)):
+        if isinstance(l[i], types.ListType):
+            l[i] = sortNest(l[i])
+        elif isinstance(l[i], types.TupleType):
+            l[i] = tuple(sortNest(list(l[i])))
+    return l
+
+class IMAP4UTF7TestCase(unittest.TestCase):
+    tests = [
+        [u'Hello world', 'Hello world'],
+        [u'Hello & world', 'Hello &- world'],
+        [u'Hello\xffworld', 'Hello&AP8-world'],
+        [u'\xff\xfe\xfd\xfc', '&AP8A,gD9APw-'],
+        [u'~peter/mail/\u65e5\u672c\u8a9e/\u53f0\u5317',
+         '~peter/mail/&ZeVnLIqe-/&U,BTFw-'], # example from RFC 2060
+    ]
+
+    def test_encodeWithErrors(self):
+        """
+        Specifying an error policy to C{unicode.encode} with the
+        I{imap4-utf-7} codec should produce the same result as not
+        specifying the error policy.
+        """
+        text = u'Hello world'
+        self.assertEqual(
+            text.encode('imap4-utf-7', 'strict'),
+            text.encode('imap4-utf-7'))
+
+
+    def test_decodeWithErrors(self):
+        """
+        Similar to L{test_encodeWithErrors}, but for C{str.decode}.
+        """
+        bytes = 'Hello world'
+        self.assertEqual(
+            bytes.decode('imap4-utf-7', 'strict'),
+            bytes.decode('imap4-utf-7'))
+
+
+    def test_getreader(self):
+        """
+        C{codecs.getreader('imap4-utf-7')} returns the I{imap4-utf-7} stream
+        reader class.
+        """
+        reader = codecs.getreader('imap4-utf-7')(StringIO('Hello&AP8-world'))
+        self.assertEqual(reader.read(), u'Hello\xffworld')
+
+
+    def test_getwriter(self):
+        """
+        C{codecs.getwriter('imap4-utf-7')} returns the I{imap4-utf-7} stream
+        writer class.
+        """
+        output = StringIO()
+        writer = codecs.getwriter('imap4-utf-7')(output)
+        writer.write(u'Hello\xffworld')
+        self.assertEqual(output.getvalue(), 'Hello&AP8-world')
+
+
+    def test_encode(self):
+        """
+        The I{imap4-utf-7} can be used to encode a unicode string into a byte
+        string according to the IMAP4 modified UTF-7 encoding rules.
+        """
+        for (input, output) in self.tests:
+            self.assertEqual(input.encode('imap4-utf-7'), output)
+
+
+    def test_decode(self):
+        """
+        The I{imap4-utf-7} can be used to decode a byte string into a unicode
+        string according to the IMAP4 modified UTF-7 encoding rules.
+        """
+        for (input, output) in self.tests:
+            self.assertEqual(input, output.decode('imap4-utf-7'))
+
+
+    def test_printableSingletons(self):
+        """
+        The IMAP4 modified UTF-7 implementation encodes all printable
+        characters which are in ASCII using the corresponding ASCII byte.
+        """
+        # All printables represent themselves
+        for o in range(0x20, 0x26) + range(0x27, 0x7f):
+            self.assertEqual(chr(o), chr(o).encode('imap4-utf-7'))
+            self.assertEqual(chr(o), chr(o).decode('imap4-utf-7'))
+        self.assertEqual('&'.encode('imap4-utf-7'), '&-')
+        self.assertEqual('&-'.decode('imap4-utf-7'), '&')
+
+
+
+class BufferingConsumer:
+    def __init__(self):
+        self.buffer = []
+
+    def write(self, bytes):
+        self.buffer.append(bytes)
+        if self.consumer:
+            self.consumer.resumeProducing()
+
+    def registerProducer(self, consumer, streaming):
+        self.consumer = consumer
+        self.consumer.resumeProducing()
+
+    def unregisterProducer(self):
+        self.consumer = None
+
+class MessageProducerTestCase(unittest.TestCase):
+    def testSinglePart(self):
+        body = 'This is body text.  Rar.'
+        headers = util.OrderedDict()
+        headers['from'] = 'sender at host'
+        headers['to'] = 'recipient at domain'
+        headers['subject'] = 'booga booga boo'
+        headers['content-type'] = 'text/plain'
+
+        msg = FakeyMessage(headers, (), None, body, 123, None )
+
+        c = BufferingConsumer()
+        p = imap4.MessageProducer(msg)
+        d = p.beginProducing(c)
+
+        def cbProduced(result):
+            self.assertIdentical(result, p)
+            self.assertEqual(
+                ''.join(c.buffer),
+
+                '{119}\r\n'
+                'From: sender at host\r\n'
+                'To: recipient at domain\r\n'
+                'Subject: booga booga boo\r\n'
+                'Content-Type: text/plain\r\n'
+                '\r\n'
+                + body)
+        return d.addCallback(cbProduced)
+
+
+    def testSingleMultiPart(self):
+        outerBody = ''
+        innerBody = 'Contained body message text.  Squarge.'
+        headers = util.OrderedDict()
+        headers['from'] = 'sender at host'
+        headers['to'] = 'recipient at domain'
+        headers['subject'] = 'booga booga boo'
+        headers['content-type'] = 'multipart/alternative; boundary="xyz"'
+
+        innerHeaders = util.OrderedDict()
+        innerHeaders['subject'] = 'this is subject text'
+        innerHeaders['content-type'] = 'text/plain'
+        msg = FakeyMessage(headers, (), None, outerBody, 123,
+                           [FakeyMessage(innerHeaders, (), None, innerBody,
+                                         None, None)],
+                           )
+
+        c = BufferingConsumer()
+        p = imap4.MessageProducer(msg)
+        d = p.beginProducing(c)
+
+        def cbProduced(result):
+            self.failUnlessIdentical(result, p)
+
+            self.assertEqual(
+                ''.join(c.buffer),
+
+                '{239}\r\n'
+                'From: sender at host\r\n'
+                'To: recipient at domain\r\n'
+                'Subject: booga booga boo\r\n'
+                'Content-Type: multipart/alternative; boundary="xyz"\r\n'
+                '\r\n'
+                '\r\n'
+                '--xyz\r\n'
+                'Subject: this is subject text\r\n'
+                'Content-Type: text/plain\r\n'
+                '\r\n'
+                + innerBody
+                + '\r\n--xyz--\r\n')
+
+        return d.addCallback(cbProduced)
+
+
+    def testMultipleMultiPart(self):
+        outerBody = ''
+        innerBody1 = 'Contained body message text.  Squarge.'
+        innerBody2 = 'Secondary <i>message</i> text of squarge body.'
+        headers = util.OrderedDict()
+        headers['from'] = 'sender at host'
+        headers['to'] = 'recipient at domain'
+        headers['subject'] = 'booga booga boo'
+        headers['content-type'] = 'multipart/alternative; boundary="xyz"'
+        innerHeaders = util.OrderedDict()
+        innerHeaders['subject'] = 'this is subject text'
+        innerHeaders['content-type'] = 'text/plain'
+        innerHeaders2 = util.OrderedDict()
+        innerHeaders2['subject'] = '<b>this is subject</b>'
+        innerHeaders2['content-type'] = 'text/html'
+        msg = FakeyMessage(headers, (), None, outerBody, 123, [
+            FakeyMessage(innerHeaders, (), None, innerBody1, None, None),
+            FakeyMessage(innerHeaders2, (), None, innerBody2, None, None)
+            ],
+        )
+
+        c = BufferingConsumer()
+        p = imap4.MessageProducer(msg)
+        d = p.beginProducing(c)
+
+        def cbProduced(result):
+            self.failUnlessIdentical(result, p)
+
+            self.assertEqual(
+                ''.join(c.buffer),
+
+                '{354}\r\n'
+                'From: sender at host\r\n'
+                'To: recipient at domain\r\n'
+                'Subject: booga booga boo\r\n'
+                'Content-Type: multipart/alternative; boundary="xyz"\r\n'
+                '\r\n'
+                '\r\n'
+                '--xyz\r\n'
+                'Subject: this is subject text\r\n'
+                'Content-Type: text/plain\r\n'
+                '\r\n'
+                + innerBody1
+                + '\r\n--xyz\r\n'
+                'Subject: <b>this is subject</b>\r\n'
+                'Content-Type: text/html\r\n'
+                '\r\n'
+                + innerBody2
+                + '\r\n--xyz--\r\n')
+        return d.addCallback(cbProduced)
+
+
+
+class IMAP4HelperTestCase(unittest.TestCase):
+    """
+    Tests for various helper utilities in the IMAP4 module.
+    """
+
+    def test_fileProducer(self):
+        b = (('x' * 1) + ('y' * 1) + ('z' * 1)) * 10
+        c = BufferingConsumer()
+        f = StringIO(b)
+        p = imap4.FileProducer(f)
+        d = p.beginProducing(c)
+
+        def cbProduced(result):
+            self.failUnlessIdentical(result, p)
+            self.assertEqual(
+                ('{%d}\r\n' % len(b))+ b,
+                ''.join(c.buffer))
+        return d.addCallback(cbProduced)
+
+
+    def test_wildcard(self):
+        cases = [
+            ['foo/%gum/bar',
+                ['foo/bar', 'oo/lalagum/bar', 'foo/gumx/bar', 'foo/gum/baz'],
+                ['foo/xgum/bar', 'foo/gum/bar'],
+            ], ['foo/x%x/bar',
+                ['foo', 'bar', 'fuz fuz fuz', 'foo/*/bar', 'foo/xyz/bar', 'foo/xx/baz'],
+                ['foo/xyx/bar', 'foo/xx/bar', 'foo/xxxxxxxxxxxxxx/bar'],
+            ], ['foo/xyz*abc/bar',
+                ['foo/xyz/bar', 'foo/abc/bar', 'foo/xyzab/cbar', 'foo/xyza/bcbar'],
+                ['foo/xyzabc/bar', 'foo/xyz/abc/bar', 'foo/xyz/123/abc/bar'],
+            ]
+        ]
+
+        for (wildcard, fail, succeed) in cases:
+            wildcard = imap4.wildcardToRegexp(wildcard, '/')
+            for x in fail:
+                self.failIf(wildcard.match(x))
+            for x in succeed:
+                self.failUnless(wildcard.match(x))
+
+
+    def test_wildcardNoDelim(self):
+        cases = [
+            ['foo/%gum/bar',
+                ['foo/bar', 'oo/lalagum/bar', 'foo/gumx/bar', 'foo/gum/baz'],
+                ['foo/xgum/bar', 'foo/gum/bar', 'foo/x/gum/bar'],
+            ], ['foo/x%x/bar',
+                ['foo', 'bar', 'fuz fuz fuz', 'foo/*/bar', 'foo/xyz/bar', 'foo/xx/baz'],
+                ['foo/xyx/bar', 'foo/xx/bar', 'foo/xxxxxxxxxxxxxx/bar', 'foo/x/x/bar'],
+            ], ['foo/xyz*abc/bar',
+                ['foo/xyz/bar', 'foo/abc/bar', 'foo/xyzab/cbar', 'foo/xyza/bcbar'],
+                ['foo/xyzabc/bar', 'foo/xyz/abc/bar', 'foo/xyz/123/abc/bar'],
+            ]
+        ]
+
+        for (wildcard, fail, succeed) in cases:
+            wildcard = imap4.wildcardToRegexp(wildcard, None)
+            for x in fail:
+                self.failIf(wildcard.match(x), x)
+            for x in succeed:
+                self.failUnless(wildcard.match(x), x)
+
+
+    def test_headerFormatter(self):
+        """
+        L{imap4._formatHeaders} accepts a C{dict} of header name/value pairs and
+        returns a string representing those headers in the standard multiline,
+        C{":"}-separated format.
+        """
+        cases = [
+            ({'Header1': 'Value1', 'Header2': 'Value2'}, 'Header2: Value2\r\nHeader1: Value1\r\n'),
+        ]
+
+        for (input, expected) in cases:
+            output = imap4._formatHeaders(input)
+            self.assertEqual(sorted(output.splitlines(True)),
+                             sorted(expected.splitlines(True)))
+
+
+    def test_messageSet(self):
+        m1 = MessageSet()
+        m2 = MessageSet()
+
+        self.assertEqual(m1, m2)
+
+        m1 = m1 + (1, 3)
+        self.assertEqual(len(m1), 3)
+        self.assertEqual(list(m1), [1, 2, 3])
+
+        m2 = m2 + (1, 3)
+        self.assertEqual(m1, m2)
+        self.assertEqual(list(m1 + m2), [1, 2, 3])
+
+
+    def test_messageSetStringRepresentationWithWildcards(self):
+        """
+        In a L{MessageSet}, in the presence of wildcards, if the highest message
+        id is known, the wildcard should get replaced by that high value.
+        """
+        inputs = [
+            MessageSet(imap4.parseIdList('*')),
+            MessageSet(imap4.parseIdList('3:*', 6)),
+            MessageSet(imap4.parseIdList('*:2', 6)),
+        ]
+
+        outputs = [
+            "*",
+            "3:6",
+            "2:6",
+        ]
+
+        for i, o in zip(inputs, outputs):
+            self.assertEqual(str(i), o)
+
+
+    def test_messageSetStringRepresentationWithInversion(self):
+        """
+        In a L{MessageSet}, inverting the high and low numbers in a range
+        doesn't affect the meaning of the range. For example, 3:2 displays just
+        like 2:3, because according to the RFC they have the same meaning.
+        """
+        inputs = [
+            MessageSet(imap4.parseIdList('2:3')),
+            MessageSet(imap4.parseIdList('3:2')),
+        ]
+
+        outputs = [
+            "2:3",
+            "2:3",
+        ]
+
+        for i, o in zip(inputs, outputs):
+            self.assertEqual(str(i), o)
+
+
+    def test_quotedSplitter(self):
+        cases = [
+            '''Hello World''',
+            '''Hello "World!"''',
+            '''World "Hello" "How are you?"''',
+            '''"Hello world" How "are you?"''',
+            '''foo bar "baz buz" NIL''',
+            '''foo bar "baz buz" "NIL"''',
+            '''foo NIL "baz buz" bar''',
+            '''foo "NIL" "baz buz" bar''',
+            '''"NIL" bar "baz buz" foo''',
+            'oo \\"oo\\" oo',
+            '"oo \\"oo\\" oo"',
+            'oo \t oo',
+            '"oo \t oo"',
+            'oo \\t oo',
+            '"oo \\t oo"',
+            'oo \o oo',
+            '"oo \o oo"',
+            'oo \\o oo',
+            '"oo \\o oo"',
+        ]
+
+        answers = [
+            ['Hello', 'World'],
+            ['Hello', 'World!'],
+            ['World', 'Hello', 'How are you?'],
+            ['Hello world', 'How', 'are you?'],
+            ['foo', 'bar', 'baz buz', None],
+            ['foo', 'bar', 'baz buz', 'NIL'],
+            ['foo', None, 'baz buz', 'bar'],
+            ['foo', 'NIL', 'baz buz', 'bar'],
+            ['NIL', 'bar', 'baz buz', 'foo'],
+            ['oo', '"oo"', 'oo'],
+            ['oo "oo" oo'],
+            ['oo', 'oo'],
+            ['oo \t oo'],
+            ['oo', '\\t', 'oo'],
+            ['oo \\t oo'],
+            ['oo', '\o', 'oo'],
+            ['oo \o oo'],
+            ['oo', '\\o', 'oo'],
+            ['oo \\o oo'],
+
+        ]
+
+        errors = [
+            '"mismatched quote',
+            'mismatched quote"',
+            'mismatched"quote',
+            '"oops here is" another"',
+        ]
+
+        for s in errors:
+            self.assertRaises(imap4.MismatchedQuoting, imap4.splitQuoted, s)
+
+        for (case, expected) in zip(cases, answers):
+            self.assertEqual(imap4.splitQuoted(case), expected)
+
+
+    def test_stringCollapser(self):
+        cases = [
+            ['a', 'b', 'c', 'd', 'e'],
+            ['a', ' ', '"', 'b', 'c', ' ', '"', ' ', 'd', 'e'],
+            [['a', 'b', 'c'], 'd', 'e'],
+            ['a', ['b', 'c', 'd'], 'e'],
+            ['a', 'b', ['c', 'd', 'e']],
+            ['"', 'a', ' ', '"', ['b', 'c', 'd'], '"', ' ', 'e', '"'],
+            ['a', ['"', ' ', 'b', 'c', ' ', ' ', '"'], 'd', 'e'],
+        ]
+
+        answers = [
+            ['abcde'],
+            ['a', 'bc ', 'de'],
+            [['abc'], 'de'],
+            ['a', ['bcd'], 'e'],
+            ['ab', ['cde']],
+            ['a ', ['bcd'], ' e'],
+            ['a', [' bc  '], 'de'],
+        ]
+
+        for (case, expected) in zip(cases, answers):
+            self.assertEqual(imap4.collapseStrings(case), expected)
+
+
+    def test_parenParser(self):
+        s = '\r\n'.join(['xx'] * 4)
+        cases = [
+            '(BODY.PEEK[HEADER.FIELDS.NOT (subject bcc cc)] {%d}\r\n%s)' % (len(s), s,),
+
+#            '(FLAGS (\Seen) INTERNALDATE "17-Jul-1996 02:44:25 -0700" '
+#            'RFC822.SIZE 4286 ENVELOPE ("Wed, 17 Jul 1996 02:23:25 -0700 (PDT)" '
+#            '"IMAP4rev1 WG mtg summary and minutes" '
+#            '(("Terry Gray" NIL "gray" "cac.washington.edu")) '
+#            '(("Terry Gray" NIL "gray" "cac.washington.edu")) '
+#            '(("Terry Gray" NIL "gray" "cac.washington.edu")) '
+#            '((NIL NIL "imap" "cac.washington.edu")) '
+#            '((NIL NIL "minutes" "CNRI.Reston.VA.US") '
+#            '("John Klensin" NIL "KLENSIN" "INFOODS.MIT.EDU")) NIL NIL '
+#            '"<B27397-0100000 at cac.washington.edu>") '
+#            'BODY ("TEXT" "PLAIN" ("CHARSET" "US-ASCII") NIL NIL "7BIT" 3028 92))',
+
+            '(FLAGS (\Seen) INTERNALDATE "17-Jul-1996 02:44:25 -0700" '
+            'RFC822.SIZE 4286 ENVELOPE ("Wed, 17 Jul 1996 02:23:25 -0700 (PDT)" '
+            '"IMAP4rev1 WG mtg summary and minutes" '
+            '(("Terry Gray" NIL gray cac.washington.edu)) '
+            '(("Terry Gray" NIL gray cac.washington.edu)) '
+            '(("Terry Gray" NIL gray cac.washington.edu)) '
+            '((NIL NIL imap cac.washington.edu)) '
+            '((NIL NIL minutes CNRI.Reston.VA.US) '
+            '("John Klensin" NIL KLENSIN INFOODS.MIT.EDU)) NIL NIL '
+            '<B27397-0100000 at cac.washington.edu>) '
+            'BODY (TEXT PLAIN (CHARSET US-ASCII) NIL NIL 7BIT 3028 92))',
+            '("oo \\"oo\\" oo")',
+            '("oo \\\\ oo")',
+            '("oo \\ oo")',
+            '("oo \\o")',
+            '("oo \o")',
+            '(oo \o)',
+            '(oo \\o)',
+
+        ]
+
+        answers = [
+            ['BODY.PEEK', ['HEADER.FIELDS.NOT', ['subject', 'bcc', 'cc']], s],
+
+            ['FLAGS', [r'\Seen'], 'INTERNALDATE',
+            '17-Jul-1996 02:44:25 -0700', 'RFC822.SIZE', '4286', 'ENVELOPE',
+            ['Wed, 17 Jul 1996 02:23:25 -0700 (PDT)',
+            'IMAP4rev1 WG mtg summary and minutes', [["Terry Gray", None,
+            "gray", "cac.washington.edu"]], [["Terry Gray", None,
+            "gray", "cac.washington.edu"]], [["Terry Gray", None,
+            "gray", "cac.washington.edu"]], [[None, None, "imap",
+            "cac.washington.edu"]], [[None, None, "minutes",
+            "CNRI.Reston.VA.US"], ["John Klensin", None, "KLENSIN",
+            "INFOODS.MIT.EDU"]], None, None,
+            "<B27397-0100000 at cac.washington.edu>"], "BODY", ["TEXT", "PLAIN",
+            ["CHARSET", "US-ASCII"], None, None, "7BIT", "3028", "92"]],
+            ['oo "oo" oo'],
+            ['oo \\\\ oo'],
+            ['oo \\ oo'],
+            ['oo \\o'],
+            ['oo \o'],
+            ['oo', '\o'],
+            ['oo', '\\o'],
+        ]
+
+        for (case, expected) in zip(cases, answers):
+            self.assertEqual(imap4.parseNestedParens(case), [expected])
+
+        # XXX This code used to work, but changes occurred within the
+        # imap4.py module which made it no longer necessary for *all* of it
+        # to work.  In particular, only the part that makes
+        # 'BODY.PEEK[HEADER.FIELDS.NOT (Subject Bcc Cc)]' come out correctly
+        # no longer needs to work.  So, I am loathe to delete the entire
+        # section of the test. --exarkun
+        #
+
+#        for (case, expected) in zip(answers, cases):
+#            self.assertEqual('(' + imap4.collapseNestedLists(case) + ')', expected)
+
+
+    def test_fetchParserSimple(self):
+        cases = [
+            ['ENVELOPE', 'Envelope'],
+            ['FLAGS', 'Flags'],
+            ['INTERNALDATE', 'InternalDate'],
+            ['RFC822.HEADER', 'RFC822Header'],
+            ['RFC822.SIZE', 'RFC822Size'],
+            ['RFC822.TEXT', 'RFC822Text'],
+            ['RFC822', 'RFC822'],
+            ['UID', 'UID'],
+            ['BODYSTRUCTURE', 'BodyStructure'],
+        ]
+
+        for (inp, outp) in cases:
+            p = imap4._FetchParser()
+            p.parseString(inp)
+            self.assertEqual(len(p.result), 1)
+            self.failUnless(isinstance(p.result[0], getattr(p, outp)))
+
+
+    def test_fetchParserMacros(self):
+        cases = [
+            ['ALL', (4, ['flags', 'internaldate', 'rfc822.size', 'envelope'])],
+            ['FULL', (5, ['flags', 'internaldate', 'rfc822.size', 'envelope', 'body'])],
+            ['FAST', (3, ['flags', 'internaldate', 'rfc822.size'])],
+        ]
+
+        for (inp, outp) in cases:
+            p = imap4._FetchParser()
+            p.parseString(inp)
+            self.assertEqual(len(p.result), outp[0])
+            p = [str(p).lower() for p in p.result]
+            p.sort()
+            outp[1].sort()
+            self.assertEqual(p, outp[1])
+
+
+    def test_fetchParserBody(self):
+        P = imap4._FetchParser
+
+        p = P()
+        p.parseString('BODY')
+        self.assertEqual(len(p.result), 1)
+        self.failUnless(isinstance(p.result[0], p.Body))
+        self.assertEqual(p.result[0].peek, False)
+        self.assertEqual(p.result[0].header, None)
+        self.assertEqual(str(p.result[0]), 'BODY')
+
+        p = P()
+        p.parseString('BODY.PEEK')
+        self.assertEqual(len(p.result), 1)
+        self.failUnless(isinstance(p.result[0], p.Body))
+        self.assertEqual(p.result[0].peek, True)
+        self.assertEqual(str(p.result[0]), 'BODY')
+
+        p = P()
+        p.parseString('BODY[]')
+        self.assertEqual(len(p.result), 1)
+        self.failUnless(isinstance(p.result[0], p.Body))
+        self.assertEqual(p.result[0].empty, True)
+        self.assertEqual(str(p.result[0]), 'BODY[]')
+
+        p = P()
+        p.parseString('BODY[HEADER]')
+        self.assertEqual(len(p.result), 1)
+        self.failUnless(isinstance(p.result[0], p.Body))
+        self.assertEqual(p.result[0].peek, False)
+        self.failUnless(isinstance(p.result[0].header, p.Header))
+        self.assertEqual(p.result[0].header.negate, True)
+        self.assertEqual(p.result[0].header.fields, ())
+        self.assertEqual(p.result[0].empty, False)
+        self.assertEqual(str(p.result[0]), 'BODY[HEADER]')
+
+        p = P()
+        p.parseString('BODY.PEEK[HEADER]')
+        self.assertEqual(len(p.result), 1)
+        self.failUnless(isinstance(p.result[0], p.Body))
+        self.assertEqual(p.result[0].peek, True)
+        self.failUnless(isinstance(p.result[0].header, p.Header))
+        self.assertEqual(p.result[0].header.negate, True)
+        self.assertEqual(p.result[0].header.fields, ())
+        self.assertEqual(p.result[0].empty, False)
+        self.assertEqual(str(p.result[0]), 'BODY[HEADER]')
+
+        p = P()
+        p.parseString('BODY[HEADER.FIELDS (Subject Cc Message-Id)]')
+        self.assertEqual(len(p.result), 1)
+        self.failUnless(isinstance(p.result[0], p.Body))
+        self.assertEqual(p.result[0].peek, False)
+        self.failUnless(isinstance(p.result[0].header, p.Header))
+        self.assertEqual(p.result[0].header.negate, False)
+        self.assertEqual(p.result[0].header.fields, ['SUBJECT', 'CC', 'MESSAGE-ID'])
+        self.assertEqual(p.result[0].empty, False)
+        self.assertEqual(str(p.result[0]), 'BODY[HEADER.FIELDS (Subject Cc Message-Id)]')
+
+        p = P()
+        p.parseString('BODY.PEEK[HEADER.FIELDS (Subject Cc Message-Id)]')
+        self.assertEqual(len(p.result), 1)
+        self.failUnless(isinstance(p.result[0], p.Body))
+        self.assertEqual(p.result[0].peek, True)
+        self.failUnless(isinstance(p.result[0].header, p.Header))
+        self.assertEqual(p.result[0].header.negate, False)
+        self.assertEqual(p.result[0].header.fields, ['SUBJECT', 'CC', 'MESSAGE-ID'])
+        self.assertEqual(p.result[0].empty, False)
+        self.assertEqual(str(p.result[0]), 'BODY[HEADER.FIELDS (Subject Cc Message-Id)]')
+
+        p = P()
+        p.parseString('BODY.PEEK[HEADER.FIELDS.NOT (Subject Cc Message-Id)]')
+        self.assertEqual(len(p.result), 1)
+        self.failUnless(isinstance(p.result[0], p.Body))
+        self.assertEqual(p.result[0].peek, True)
+        self.failUnless(isinstance(p.result[0].header, p.Header))
+        self.assertEqual(p.result[0].header.negate, True)
+        self.assertEqual(p.result[0].header.fields, ['SUBJECT', 'CC', 'MESSAGE-ID'])
+        self.assertEqual(p.result[0].empty, False)
+        self.assertEqual(str(p.result[0]), 'BODY[HEADER.FIELDS.NOT (Subject Cc Message-Id)]')
+
+        p = P()
+        p.parseString('BODY[1.MIME]<10.50>')
+        self.assertEqual(len(p.result), 1)
+        self.failUnless(isinstance(p.result[0], p.Body))
+        self.assertEqual(p.result[0].peek, False)
+        self.failUnless(isinstance(p.result[0].mime, p.MIME))
+        self.assertEqual(p.result[0].part, (0,))
+        self.assertEqual(p.result[0].partialBegin, 10)
+        self.assertEqual(p.result[0].partialLength, 50)
+        self.assertEqual(p.result[0].empty, False)
+        self.assertEqual(str(p.result[0]), 'BODY[1.MIME]<10.50>')
+
+        p = P()
+        p.parseString('BODY.PEEK[1.3.9.11.HEADER.FIELDS.NOT (Message-Id Date)]<103.69>')
+        self.assertEqual(len(p.result), 1)
+        self.failUnless(isinstance(p.result[0], p.Body))
+        self.assertEqual(p.result[0].peek, True)
+        self.failUnless(isinstance(p.result[0].header, p.Header))
+        self.assertEqual(p.result[0].part, (0, 2, 8, 10))
+        self.assertEqual(p.result[0].header.fields, ['MESSAGE-ID', 'DATE'])
+        self.assertEqual(p.result[0].partialBegin, 103)
+        self.assertEqual(p.result[0].partialLength, 69)
+        self.assertEqual(p.result[0].empty, False)
+        self.assertEqual(str(p.result[0]), 'BODY[1.3.9.11.HEADER.FIELDS.NOT (Message-Id Date)]<103.69>')
+
+
+    def test_files(self):
+        inputStructure = [
+            'foo', 'bar', 'baz', StringIO('this is a file\r\n'), 'buz'
+        ]
+
+        output = '"foo" "bar" "baz" {16}\r\nthis is a file\r\n "buz"'
+
+        self.assertEqual(imap4.collapseNestedLists(inputStructure), output)
+
+
+    def test_quoteAvoider(self):
+        input = [
+            'foo', imap4.DontQuoteMe('bar'), "baz", StringIO('this is a file\r\n'),
+            imap4.DontQuoteMe('buz'), ""
+        ]
+
+        output = '"foo" bar "baz" {16}\r\nthis is a file\r\n buz ""'
+
+        self.assertEqual(imap4.collapseNestedLists(input), output)
+
+
+    def test_literals(self):
+        cases = [
+            ('({10}\r\n0123456789)', [['0123456789']]),
+        ]
+
+        for (case, expected) in cases:
+            self.assertEqual(imap4.parseNestedParens(case), expected)
+
+
+    def test_queryBuilder(self):
+        inputs = [
+            imap4.Query(flagged=1),
+            imap4.Query(sorted=1, unflagged=1, deleted=1),
+            imap4.Or(imap4.Query(flagged=1), imap4.Query(deleted=1)),
+            imap4.Query(before='today'),
+            imap4.Or(
+                imap4.Query(deleted=1),
+                imap4.Query(unseen=1),
+                imap4.Query(new=1)
+            ),
+            imap4.Or(
+                imap4.Not(
+                    imap4.Or(
+                        imap4.Query(sorted=1, since='yesterday', smaller=1000),
+                        imap4.Query(sorted=1, before='tuesday', larger=10000),
+                        imap4.Query(sorted=1, unseen=1, deleted=1, before='today'),
+                        imap4.Not(
+                            imap4.Query(subject='spam')
+                        ),
+                    ),
+                ),
+                imap4.Not(
+                    imap4.Query(uid='1:5')
+                ),
+            )
+        ]
+
+        outputs = [
+            'FLAGGED',
+            '(DELETED UNFLAGGED)',
+            '(OR FLAGGED DELETED)',
+            '(BEFORE "today")',
+            '(OR DELETED (OR UNSEEN NEW))',
+            '(OR (NOT (OR (SINCE "yesterday" SMALLER 1000) ' # Continuing
+            '(OR (BEFORE "tuesday" LARGER 10000) (OR (BEFORE ' # Some more
+            '"today" DELETED UNSEEN) (NOT (SUBJECT "spam")))))) ' # And more
+            '(NOT (UID 1:5)))',
+        ]
+
+        for (query, expected) in zip(inputs, outputs):
+            self.assertEqual(query, expected)
+
+
+    def test_invalidIdListParser(self):
+        """
+        Trying to parse an invalid representation of a sequence range raises an
+        L{IllegalIdentifierError}.
+        """
+        inputs = [
+            '*:*',
+            'foo',
+            '4:',
+            'bar:5'
+        ]
+
+        for input in inputs:
+            self.assertRaises(imap4.IllegalIdentifierError,
+                              imap4.parseIdList, input, 12345)
+
+
+    def test_invalidIdListParserNonPositive(self):
+        """
+        Zeroes and negative values are not accepted in id range expressions. RFC
+        3501 states that sequence numbers and sequence ranges consist of
+        non-negative numbers (RFC 3501 section 9, the seq-number grammar item).
+        """
+        inputs = [
+            '0:5',
+            '0:0',
+            '*:0',
+            '0',
+            '-3:5',
+            '1:-2',
+            '-1'
+        ]
+
+        for input in inputs:
+            self.assertRaises(imap4.IllegalIdentifierError,
+                              imap4.parseIdList, input, 12345)
+
+
+    def test_parseIdList(self):
+        """
+        The function to parse sequence ranges yields appropriate L{MessageSet}
+        objects.
+        """
+        inputs = [
+            '1:*',
+            '5:*',
+            '1:2,5:*',
+            '*',
+            '1',
+            '1,2',
+            '1,3,5',
+            '1:10',
+            '1:10,11',
+            '1:5,10:20',
+            '1,5:10',
+            '1,5:10,15:20',
+            '1:10,15,20:25',
+            '4:2'
+        ]
+
+        outputs = [
+            MessageSet(1, None),
+            MessageSet(5, None),
+            MessageSet(5, None) + MessageSet(1, 2),
+            MessageSet(None, None),
+            MessageSet(1),
+            MessageSet(1, 2),
+            MessageSet(1) + MessageSet(3) + MessageSet(5),
+            MessageSet(1, 10),
+            MessageSet(1, 11),
+            MessageSet(1, 5) + MessageSet(10, 20),
+            MessageSet(1) + MessageSet(5, 10),
+            MessageSet(1) + MessageSet(5, 10) + MessageSet(15, 20),
+            MessageSet(1, 10) + MessageSet(15) + MessageSet(20, 25),
+            MessageSet(2, 4),
+        ]
+
+        lengths = [
+            None, None, None,
+            1, 1, 2, 3, 10, 11, 16, 7, 13, 17, 3
+        ]
+
+        for (input, expected) in zip(inputs, outputs):
+            self.assertEqual(imap4.parseIdList(input), expected)
+
+        for (input, expected) in zip(inputs, lengths):
+            if expected is None:
+                self.assertRaises(TypeError, len, imap4.parseIdList(input))
+            else:
+                L = len(imap4.parseIdList(input))
+                self.assertEqual(L, expected,
+                                  "len(%r) = %r != %r" % (input, L, expected))
+
+class SimpleMailbox:
+    implements(imap4.IMailboxInfo, imap4.IMailbox, imap4.ICloseableMailbox)
+
+    flags = ('\\Flag1', 'Flag2', '\\AnotherSysFlag', 'LastFlag')
+    messages = []
+    mUID = 0
+    rw = 1
+    closed = False
+
+    def __init__(self):
+        self.listeners = []
+        self.addListener = self.listeners.append
+        self.removeListener = self.listeners.remove
+
+    def getFlags(self):
+        return self.flags
+
+    def getUIDValidity(self):
+        return 42
+
+    def getUIDNext(self):
+        return len(self.messages) + 1
+
+    def getMessageCount(self):
+        return 9
+
+    def getRecentCount(self):
+        return 3
+
+    def getUnseenCount(self):
+        return 4
+
+    def isWriteable(self):
+        return self.rw
+
+    def destroy(self):
+        pass
+
+    def getHierarchicalDelimiter(self):
+        return '/'
+
+    def requestStatus(self, names):
+        r = {}
+        if 'MESSAGES' in names:
+            r['MESSAGES'] = self.getMessageCount()
+        if 'RECENT' in names:
+            r['RECENT'] = self.getRecentCount()
+        if 'UIDNEXT' in names:
+            r['UIDNEXT'] = self.getMessageCount() + 1
+        if 'UIDVALIDITY' in names:
+            r['UIDVALIDITY'] = self.getUID()
+        if 'UNSEEN' in names:
+            r['UNSEEN'] = self.getUnseenCount()
+        return defer.succeed(r)
+
+    def addMessage(self, message, flags, date = None):
+        self.messages.append((message, flags, date, self.mUID))
+        self.mUID += 1
+        return defer.succeed(None)
+
+    def expunge(self):
+        delete = []
+        for i in self.messages:
+            if '\\Deleted' in i[1]:
+                delete.append(i)
+        for i in delete:
+            self.messages.remove(i)
+        return [i[3] for i in delete]
+
+    def close(self):
+        self.closed = True
+
+class Account(imap4.MemoryAccount):
+    mailboxFactory = SimpleMailbox
+    def _emptyMailbox(self, name, id):
+        return self.mailboxFactory()
+
+    def select(self, name, rw=1):
+        mbox = imap4.MemoryAccount.select(self, name)
+        if mbox is not None:
+            mbox.rw = rw
+        return mbox
+
+class SimpleServer(imap4.IMAP4Server):
+    def __init__(self, *args, **kw):
+        imap4.IMAP4Server.__init__(self, *args, **kw)
+        realm = TestRealm()
+        realm.theAccount = Account('testuser')
+        portal = cred.portal.Portal(realm)
+        c = cred.checkers.InMemoryUsernamePasswordDatabaseDontUse()
+        self.checker = c
+        self.portal = portal
+        portal.registerChecker(c)
+        self.timeoutTest = False
+
+    def lineReceived(self, line):
+        if self.timeoutTest:
+            #Do not send a respones
+            return
+
+        imap4.IMAP4Server.lineReceived(self, line)
+
+    _username = 'testuser'
+    _password = 'password-test'
+    def authenticateLogin(self, username, password):
+        if username == self._username and password == self._password:
+            return imap4.IAccount, self.theAccount, lambda: None
+        raise cred.error.UnauthorizedLogin()
+
+
+class SimpleClient(imap4.IMAP4Client):
+    def __init__(self, deferred, contextFactory = None):
+        imap4.IMAP4Client.__init__(self, contextFactory)
+        self.deferred = deferred
+        self.events = []
+
+    def serverGreeting(self, caps):
+        self.deferred.callback(None)
+
+    def modeChanged(self, writeable):
+        self.events.append(['modeChanged', writeable])
+        self.transport.loseConnection()
+
+    def flagsChanged(self, newFlags):
+        self.events.append(['flagsChanged', newFlags])
+        self.transport.loseConnection()
+
+    def newMessages(self, exists, recent):
+        self.events.append(['newMessages', exists, recent])
+        self.transport.loseConnection()
+
+
+
+class IMAP4HelperMixin:
+
+    serverCTX = None
+    clientCTX = None
+
+    def setUp(self):
+        d = defer.Deferred()
+        self.server = SimpleServer(contextFactory=self.serverCTX)
+        self.client = SimpleClient(d, contextFactory=self.clientCTX)
+        self.connected = d
+
+        SimpleMailbox.messages = []
+        theAccount = Account('testuser')
+        theAccount.mboxType = SimpleMailbox
+        SimpleServer.theAccount = theAccount
+
+    def tearDown(self):
+        del self.server
+        del self.client
+        del self.connected
+
+    def _cbStopClient(self, ignore):
+        self.client.transport.loseConnection()
+
+    def _ebGeneral(self, failure):
+        self.client.transport.loseConnection()
+        self.server.transport.loseConnection()
+        failure.raiseException()
+
+    def loopback(self):
+        return loopback.loopbackAsync(self.server, self.client)
+
+class IMAP4ServerTestCase(IMAP4HelperMixin, unittest.TestCase):
+    def testCapability(self):
+        caps = {}
+        def getCaps():
+            def gotCaps(c):
+                caps.update(c)
+                self.server.transport.loseConnection()
+            return self.client.getCapabilities().addCallback(gotCaps)
+        d1 = self.connected.addCallback(strip(getCaps)).addErrback(self._ebGeneral)
+        d = defer.gatherResults([self.loopback(), d1])
+        expected = {'IMAP4rev1': None, 'NAMESPACE': None, 'IDLE': None}
+        return d.addCallback(lambda _: self.assertEqual(expected, caps))
+
+    def testCapabilityWithAuth(self):
+        caps = {}
+        self.server.challengers['CRAM-MD5'] = cred.credentials.CramMD5Credentials
+        def getCaps():
+            def gotCaps(c):
+                caps.update(c)
+                self.server.transport.loseConnection()
+            return self.client.getCapabilities().addCallback(gotCaps)
+        d1 = self.connected.addCallback(strip(getCaps)).addErrback(self._ebGeneral)
+        d = defer.gatherResults([self.loopback(), d1])
+
+        expCap = {'IMAP4rev1': None, 'NAMESPACE': None,
+                  'IDLE': None, 'AUTH': ['CRAM-MD5']}
+
+        return d.addCallback(lambda _: self.assertEqual(expCap, caps))
+
+    def testLogout(self):
+        self.loggedOut = 0
+        def logout():
+            def setLoggedOut():
+                self.loggedOut = 1
+            self.client.logout().addCallback(strip(setLoggedOut))
+        self.connected.addCallback(strip(logout)).addErrback(self._ebGeneral)
+        d = self.loopback()
+        return d.addCallback(lambda _: self.assertEqual(self.loggedOut, 1))
+
+    def testNoop(self):
+        self.responses = None
+        def noop():
+            def setResponses(responses):
+                self.responses = responses
+                self.server.transport.loseConnection()
+            self.client.noop().addCallback(setResponses)
+        self.connected.addCallback(strip(noop)).addErrback(self._ebGeneral)
+        d = self.loopback()
+        return d.addCallback(lambda _: self.assertEqual(self.responses, []))
+
+    def testLogin(self):
+        def login():
+            d = self.client.login('testuser', 'password-test')
+            d.addCallback(self._cbStopClient)
+        d1 = self.connected.addCallback(strip(login)).addErrback(self._ebGeneral)
+        d = defer.gatherResults([d1, self.loopback()])
+        return d.addCallback(self._cbTestLogin)
+
+    def _cbTestLogin(self, ignored):
+        self.assertEqual(self.server.account, SimpleServer.theAccount)
+        self.assertEqual(self.server.state, 'auth')
+
+    def testFailedLogin(self):
+        def login():
+            d = self.client.login('testuser', 'wrong-password')
+            d.addBoth(self._cbStopClient)
+
+        d1 = self.connected.addCallback(strip(login)).addErrback(self._ebGeneral)
+        d2 = self.loopback()
+        d = defer.gatherResults([d1, d2])
+        return d.addCallback(self._cbTestFailedLogin)
+
+    def _cbTestFailedLogin(self, ignored):
+        self.assertEqual(self.server.account, None)
+        self.assertEqual(self.server.state, 'unauth')
+
+
+    def testLoginRequiringQuoting(self):
+        self.server._username = '{test}user'
+        self.server._password = '{test}password'
+
+        def login():
+            d = self.client.login('{test}user', '{test}password')
+            d.addBoth(self._cbStopClient)
+
+        d1 = self.connected.addCallback(strip(login)).addErrback(self._ebGeneral)
+        d = defer.gatherResults([self.loopback(), d1])
+        return d.addCallback(self._cbTestLoginRequiringQuoting)
+
+    def _cbTestLoginRequiringQuoting(self, ignored):
+        self.assertEqual(self.server.account, SimpleServer.theAccount)
+        self.assertEqual(self.server.state, 'auth')
+
+
+    def testNamespace(self):
+        self.namespaceArgs = None
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def namespace():
+            def gotNamespace(args):
+                self.namespaceArgs = args
+                self._cbStopClient(None)
+            return self.client.namespace().addCallback(gotNamespace)
+
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallback(strip(namespace))
+        d1.addErrback(self._ebGeneral)
+        d2 = self.loopback()
+        d = defer.gatherResults([d1, d2])
+        d.addCallback(lambda _: self.assertEqual(self.namespaceArgs,
+                                                  [[['', '/']], [], []]))
+        return d
+
+    def testSelect(self):
+        SimpleServer.theAccount.addMailbox('test-mailbox')
+        self.selectedArgs = None
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def select():
+            def selected(args):
+                self.selectedArgs = args
+                self._cbStopClient(None)
+            d = self.client.select('test-mailbox')
+            d.addCallback(selected)
+            return d
+
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallback(strip(select))
+        d1.addErrback(self._ebGeneral)
+        d2 = self.loopback()
+        return defer.gatherResults([d1, d2]).addCallback(self._cbTestSelect)
+
+    def _cbTestSelect(self, ignored):
+        mbox = SimpleServer.theAccount.mailboxes['TEST-MAILBOX']
+        self.assertEqual(self.server.mbox, mbox)
+        self.assertEqual(self.selectedArgs, {
+            'EXISTS': 9, 'RECENT': 3, 'UIDVALIDITY': 42,
+            'FLAGS': ('\\Flag1', 'Flag2', '\\AnotherSysFlag', 'LastFlag'),
+            'READ-WRITE': 1
+        })
+
+
+    def test_examine(self):
+        """
+        L{IMAP4Client.examine} issues an I{EXAMINE} command to the server and
+        returns a L{Deferred} which fires with a C{dict} with as many of the
+        following keys as the server includes in its response: C{'FLAGS'},
+        C{'EXISTS'}, C{'RECENT'}, C{'UNSEEN'}, C{'READ-WRITE'}, C{'READ-ONLY'},
+        C{'UIDVALIDITY'}, and C{'PERMANENTFLAGS'}.
+
+        Unfortunately the server doesn't generate all of these so it's hard to
+        test the client's handling of them here.  See
+        L{IMAP4ClientExamineTests} below.
+
+        See U{RFC 3501<http://www.faqs.org/rfcs/rfc3501.html>}, section 6.3.2,
+        for details.
+        """
+        SimpleServer.theAccount.addMailbox('test-mailbox')
+        self.examinedArgs = None
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def examine():
+            def examined(args):
+                self.examinedArgs = args
+                self._cbStopClient(None)
+            d = self.client.examine('test-mailbox')
+            d.addCallback(examined)
+            return d
+
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallback(strip(examine))
+        d1.addErrback(self._ebGeneral)
+        d2 = self.loopback()
+        d = defer.gatherResults([d1, d2])
+        return d.addCallback(self._cbTestExamine)
+
+
+    def _cbTestExamine(self, ignored):
+        mbox = SimpleServer.theAccount.mailboxes['TEST-MAILBOX']
+        self.assertEqual(self.server.mbox, mbox)
+        self.assertEqual(self.examinedArgs, {
+            'EXISTS': 9, 'RECENT': 3, 'UIDVALIDITY': 42,
+            'FLAGS': ('\\Flag1', 'Flag2', '\\AnotherSysFlag', 'LastFlag'),
+            'READ-WRITE': False})
+
+
+    def testCreate(self):
+        succeed = ('testbox', 'test/box', 'test/', 'test/box/box', 'INBOX')
+        fail = ('testbox', 'test/box')
+
+        def cb(): self.result.append(1)
+        def eb(failure): self.result.append(0)
+
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def create():
+            for name in succeed + fail:
+                d = self.client.create(name)
+                d.addCallback(strip(cb)).addErrback(eb)
+            d.addCallbacks(self._cbStopClient, self._ebGeneral)
+
+        self.result = []
+        d1 = self.connected.addCallback(strip(login)).addCallback(strip(create))
+        d2 = self.loopback()
+        d = defer.gatherResults([d1, d2])
+        return d.addCallback(self._cbTestCreate, succeed, fail)
+
+    def _cbTestCreate(self, ignored, succeed, fail):
+        self.assertEqual(self.result, [1] * len(succeed) + [0] * len(fail))
+        mbox = SimpleServer.theAccount.mailboxes.keys()
+        answers = ['inbox', 'testbox', 'test/box', 'test', 'test/box/box']
+        mbox.sort()
+        answers.sort()
+        self.assertEqual(mbox, [a.upper() for a in answers])
+
+    def testDelete(self):
+        SimpleServer.theAccount.addMailbox('delete/me')
+
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def delete():
+            return self.client.delete('delete/me')
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallbacks(strip(delete), self._ebGeneral)
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d2 = self.loopback()
+        d = defer.gatherResults([d1, d2])
+        d.addCallback(lambda _:
+                      self.assertEqual(SimpleServer.theAccount.mailboxes.keys(), []))
+        return d
+
+    def testIllegalInboxDelete(self):
+        self.stashed = None
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def delete():
+            return self.client.delete('inbox')
+        def stash(result):
+            self.stashed = result
+
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallbacks(strip(delete), self._ebGeneral)
+        d1.addBoth(stash)
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d2 = self.loopback()
+        d = defer.gatherResults([d1, d2])
+        d.addCallback(lambda _: self.failUnless(isinstance(self.stashed,
+                                                           failure.Failure)))
+        return d
+
+
+    def testNonExistentDelete(self):
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def delete():
+            return self.client.delete('delete/me')
+        def deleteFailed(failure):
+            self.failure = failure
+
+        self.failure = None
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallback(strip(delete)).addErrback(deleteFailed)
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d2 = self.loopback()
+        d = defer.gatherResults([d1, d2])
+        d.addCallback(lambda _: self.assertEqual(str(self.failure.value),
+                                                  'No such mailbox'))
+        return d
+
+
+    def testIllegalDelete(self):
+        m = SimpleMailbox()
+        m.flags = (r'\Noselect',)
+        SimpleServer.theAccount.addMailbox('delete', m)
+        SimpleServer.theAccount.addMailbox('delete/me')
+
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def delete():
+            return self.client.delete('delete')
+        def deleteFailed(failure):
+            self.failure = failure
+
+        self.failure = None
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallback(strip(delete)).addErrback(deleteFailed)
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d2 = self.loopback()
+        d = defer.gatherResults([d1, d2])
+        expected = "Hierarchically inferior mailboxes exist and \\Noselect is set"
+        d.addCallback(lambda _:
+                      self.assertEqual(str(self.failure.value), expected))
+        return d
+
+    def testRename(self):
+        SimpleServer.theAccount.addMailbox('oldmbox')
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def rename():
+            return self.client.rename('oldmbox', 'newname')
+
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallbacks(strip(rename), self._ebGeneral)
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d2 = self.loopback()
+        d = defer.gatherResults([d1, d2])
+        d.addCallback(lambda _:
+                      self.assertEqual(SimpleServer.theAccount.mailboxes.keys(),
+                                        ['NEWNAME']))
+        return d
+
+    def testIllegalInboxRename(self):
+        self.stashed = None
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def rename():
+            return self.client.rename('inbox', 'frotz')
+        def stash(stuff):
+            self.stashed = stuff
+
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallbacks(strip(rename), self._ebGeneral)
+        d1.addBoth(stash)
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d2 = self.loopback()
+        d = defer.gatherResults([d1, d2])
+        d.addCallback(lambda _:
+                      self.failUnless(isinstance(self.stashed, failure.Failure)))
+        return d
+
+    def testHierarchicalRename(self):
+        SimpleServer.theAccount.create('oldmbox/m1')
+        SimpleServer.theAccount.create('oldmbox/m2')
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def rename():
+            return self.client.rename('oldmbox', 'newname')
+
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallbacks(strip(rename), self._ebGeneral)
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d2 = self.loopback()
+        d = defer.gatherResults([d1, d2])
+        return d.addCallback(self._cbTestHierarchicalRename)
+
+    def _cbTestHierarchicalRename(self, ignored):
+        mboxes = SimpleServer.theAccount.mailboxes.keys()
+        expected = ['newname', 'newname/m1', 'newname/m2']
+        mboxes.sort()
+        self.assertEqual(mboxes, [s.upper() for s in expected])
+
+    def testSubscribe(self):
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def subscribe():
+            return self.client.subscribe('this/mbox')
+
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallbacks(strip(subscribe), self._ebGeneral)
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d2 = self.loopback()
+        d = defer.gatherResults([d1, d2])
+        d.addCallback(lambda _:
+                      self.assertEqual(SimpleServer.theAccount.subscriptions,
+                                        ['THIS/MBOX']))
+        return d
+
+    def testUnsubscribe(self):
+        SimpleServer.theAccount.subscriptions = ['THIS/MBOX', 'THAT/MBOX']
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def unsubscribe():
+            return self.client.unsubscribe('this/mbox')
+
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallbacks(strip(unsubscribe), self._ebGeneral)
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d2 = self.loopback()
+        d = defer.gatherResults([d1, d2])
+        d.addCallback(lambda _:
+                      self.assertEqual(SimpleServer.theAccount.subscriptions,
+                                        ['THAT/MBOX']))
+        return d
+
+    def _listSetup(self, f):
+        SimpleServer.theAccount.addMailbox('root/subthing')
+        SimpleServer.theAccount.addMailbox('root/another-thing')
+        SimpleServer.theAccount.addMailbox('non-root/subthing')
+
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def listed(answers):
+            self.listed = answers
+
+        self.listed = None
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallbacks(strip(f), self._ebGeneral)
+        d1.addCallbacks(listed, self._ebGeneral)
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d2 = self.loopback()
+        return defer.gatherResults([d1, d2]).addCallback(lambda _: self.listed)
+
+    def testList(self):
+        def list():
+            return self.client.list('root', '%')
+        d = self._listSetup(list)
+        d.addCallback(lambda listed: self.assertEqual(
+            sortNest(listed),
+            sortNest([
+                (SimpleMailbox.flags, "/", "ROOT/SUBTHING"),
+                (SimpleMailbox.flags, "/", "ROOT/ANOTHER-THING")
+            ])
+        ))
+        return d
+
+    def testLSub(self):
+        SimpleServer.theAccount.subscribe('ROOT/SUBTHING')
+        def lsub():
+            return self.client.lsub('root', '%')
+        d = self._listSetup(lsub)
+        d.addCallback(self.assertEqual,
+                      [(SimpleMailbox.flags, "/", "ROOT/SUBTHING")])
+        return d
+
+    def testStatus(self):
+        SimpleServer.theAccount.addMailbox('root/subthing')
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def status():
+            return self.client.status('root/subthing', 'MESSAGES', 'UIDNEXT', 'UNSEEN')
+        def statused(result):
+            self.statused = result
+
+        self.statused = None
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallbacks(strip(status), self._ebGeneral)
+        d1.addCallbacks(statused, self._ebGeneral)
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d2 = self.loopback()
+        d = defer.gatherResults([d1, d2])
+        d.addCallback(lambda _: self.assertEqual(
+            self.statused,
+            {'MESSAGES': 9, 'UIDNEXT': '10', 'UNSEEN': 4}
+        ))
+        return d
+
+    def testFailedStatus(self):
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def status():
+            return self.client.status('root/nonexistent', 'MESSAGES', 'UIDNEXT', 'UNSEEN')
+        def statused(result):
+            self.statused = result
+        def failed(failure):
+            self.failure = failure
+
+        self.statused = self.failure = None
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallbacks(strip(status), self._ebGeneral)
+        d1.addCallbacks(statused, failed)
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d2 = self.loopback()
+        return defer.gatherResults([d1, d2]).addCallback(self._cbTestFailedStatus)
+
+    def _cbTestFailedStatus(self, ignored):
+        self.assertEqual(
+            self.statused, None
+        )
+        self.assertEqual(
+            self.failure.value.args,
+            ('Could not open mailbox',)
+        )
+
+    def testFullAppend(self):
+        infile = util.sibpath(__file__, 'rfc822.message')
+        message = open(infile)
+        SimpleServer.theAccount.addMailbox('root/subthing')
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def append():
+            return self.client.append(
+                'root/subthing',
+                message,
+                ('\\SEEN', '\\DELETED'),
+                'Tue, 17 Jun 2003 11:22:16 -0600 (MDT)',
+            )
+
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallbacks(strip(append), self._ebGeneral)
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d2 = self.loopback()
+        d = defer.gatherResults([d1, d2])
+        return d.addCallback(self._cbTestFullAppend, infile)
+
+    def _cbTestFullAppend(self, ignored, infile):
+        mb = SimpleServer.theAccount.mailboxes['ROOT/SUBTHING']
+        self.assertEqual(1, len(mb.messages))
+        self.assertEqual(
+            (['\\SEEN', '\\DELETED'], 'Tue, 17 Jun 2003 11:22:16 -0600 (MDT)', 0),
+            mb.messages[0][1:]
+        )
+        self.assertEqual(open(infile).read(), mb.messages[0][0].getvalue())
+
+    def testPartialAppend(self):
+        infile = util.sibpath(__file__, 'rfc822.message')
+        message = open(infile)
+        SimpleServer.theAccount.addMailbox('PARTIAL/SUBTHING')
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def append():
+            message = file(infile)
+            return self.client.sendCommand(
+                imap4.Command(
+                    'APPEND',
+                    'PARTIAL/SUBTHING (\\SEEN) "Right now" {%d}' % os.path.getsize(infile),
+                    (), self.client._IMAP4Client__cbContinueAppend, message
+                )
+            )
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallbacks(strip(append), self._ebGeneral)
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d2 = self.loopback()
+        d = defer.gatherResults([d1, d2])
+        return d.addCallback(self._cbTestPartialAppend, infile)
+
+    def _cbTestPartialAppend(self, ignored, infile):
+        mb = SimpleServer.theAccount.mailboxes['PARTIAL/SUBTHING']
+        self.assertEqual(1, len(mb.messages))
+        self.assertEqual(
+            (['\\SEEN'], 'Right now', 0),
+            mb.messages[0][1:]
+        )
+        self.assertEqual(open(infile).read(), mb.messages[0][0].getvalue())
+
+    def testCheck(self):
+        SimpleServer.theAccount.addMailbox('root/subthing')
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def select():
+            return self.client.select('root/subthing')
+        def check():
+            return self.client.check()
+
+        d = self.connected.addCallback(strip(login))
+        d.addCallbacks(strip(select), self._ebGeneral)
+        d.addCallbacks(strip(check), self._ebGeneral)
+        d.addCallbacks(self._cbStopClient, self._ebGeneral)
+        return self.loopback()
+
+        # Okay, that was fun
+
+    def testClose(self):
+        m = SimpleMailbox()
+        m.messages = [
+            ('Message 1', ('\\Deleted', 'AnotherFlag'), None, 0),
+            ('Message 2', ('AnotherFlag',), None, 1),
+            ('Message 3', ('\\Deleted',), None, 2),
+        ]
+        SimpleServer.theAccount.addMailbox('mailbox', m)
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def select():
+            return self.client.select('mailbox')
+        def close():
+            return self.client.close()
+
+        d = self.connected.addCallback(strip(login))
+        d.addCallbacks(strip(select), self._ebGeneral)
+        d.addCallbacks(strip(close), self._ebGeneral)
+        d.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d2 = self.loopback()
+        return defer.gatherResults([d, d2]).addCallback(self._cbTestClose, m)
+
+    def _cbTestClose(self, ignored, m):
+        self.assertEqual(len(m.messages), 1)
+        self.assertEqual(m.messages[0], ('Message 2', ('AnotherFlag',), None, 1))
+        self.failUnless(m.closed)
+
+    def testExpunge(self):
+        m = SimpleMailbox()
+        m.messages = [
+            ('Message 1', ('\\Deleted', 'AnotherFlag'), None, 0),
+            ('Message 2', ('AnotherFlag',), None, 1),
+            ('Message 3', ('\\Deleted',), None, 2),
+        ]
+        SimpleServer.theAccount.addMailbox('mailbox', m)
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def select():
+            return self.client.select('mailbox')
+        def expunge():
+            return self.client.expunge()
+        def expunged(results):
+            self.failIf(self.server.mbox is None)
+            self.results = results
+
+        self.results = None
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallbacks(strip(select), self._ebGeneral)
+        d1.addCallbacks(strip(expunge), self._ebGeneral)
+        d1.addCallbacks(expunged, self._ebGeneral)
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d2 = self.loopback()
+        d = defer.gatherResults([d1, d2])
+        return d.addCallback(self._cbTestExpunge, m)
+
+    def _cbTestExpunge(self, ignored, m):
+        self.assertEqual(len(m.messages), 1)
+        self.assertEqual(m.messages[0], ('Message 2', ('AnotherFlag',), None, 1))
+
+        self.assertEqual(self.results, [0, 2])
+
+
+
+class IMAP4ServerSearchTestCase(IMAP4HelperMixin, unittest.TestCase):
+    """
+    Tests for the behavior of the search_* functions in L{imap4.IMAP4Server}.
+    """
+    def setUp(self):
+        IMAP4HelperMixin.setUp(self)
+        self.earlierQuery = ["10-Dec-2009"]
+        self.sameDateQuery = ["13-Dec-2009"]
+        self.laterQuery = ["16-Dec-2009"]
+        self.seq = 0
+        self.msg = FakeyMessage({"date" : "Mon, 13 Dec 2009 21:25:10 GMT"}, [],
+                                '', '', 1234, None)
+
+
+    def test_searchSentBefore(self):
+        """
+        L{imap4.IMAP4Server.search_SENTBEFORE} returns True if the message date
+        is earlier than the query date.
+        """
+        self.assertFalse(
+            self.server.search_SENTBEFORE(self.earlierQuery, self.seq, self.msg))
+        self.assertTrue(
+            self.server.search_SENTBEFORE(self.laterQuery, self.seq, self.msg))
+
+    def test_searchWildcard(self):
+        """
+        L{imap4.IMAP4Server.search_UID} returns True if the message UID is in
+        the search range.
+        """
+        self.assertFalse(
+            self.server.search_UID(['2:3'], self.seq, self.msg, (1, 1234)))
+        # 2:* should get translated to 2:<max UID> and then to 1:2
+        self.assertTrue(
+            self.server.search_UID(['2:*'], self.seq, self.msg, (1, 1234)))
+        self.assertTrue(
+            self.server.search_UID(['*'], self.seq, self.msg, (1, 1234)))
+
+    def test_searchWildcardHigh(self):
+        """
+        L{imap4.IMAP4Server.search_UID} should return True if there is a
+        wildcard, because a wildcard means "highest UID in the mailbox".
+        """
+        self.assertTrue(
+            self.server.search_UID(['1235:*'], self.seq, self.msg, (1234, 1)))
+
+    def test_reversedSearchTerms(self):
+        """
+        L{imap4.IMAP4Server.search_SENTON} returns True if the message date is
+        the same as the query date.
+        """
+        msgset = imap4.parseIdList('4:2')
+        self.assertEqual(list(msgset), [2, 3, 4])
+
+    def test_searchSentOn(self):
+        """
+        L{imap4.IMAP4Server.search_SENTON} returns True if the message date is
+        the same as the query date.
+        """
+        self.assertFalse(
+            self.server.search_SENTON(self.earlierQuery, self.seq, self.msg))
+        self.assertTrue(
+            self.server.search_SENTON(self.sameDateQuery, self.seq, self.msg))
+        self.assertFalse(
+            self.server.search_SENTON(self.laterQuery, self.seq, self.msg))
+
+
+    def test_searchSentSince(self):
+        """
+        L{imap4.IMAP4Server.search_SENTSINCE} returns True if the message date
+        is later than the query date.
+        """
+        self.assertTrue(
+            self.server.search_SENTSINCE(self.earlierQuery, self.seq, self.msg))
+        self.assertFalse(
+            self.server.search_SENTSINCE(self.laterQuery, self.seq, self.msg))
+
+
+    def test_searchOr(self):
+        """
+        L{imap4.IMAP4Server.search_OR} returns true if either of the two
+        expressions supplied to it returns true and returns false if neither
+        does.
+        """
+        self.assertTrue(
+            self.server.search_OR(
+                ["SENTSINCE"] + self.earlierQuery +
+                ["SENTSINCE"] + self.laterQuery,
+            self.seq, self.msg, (None, None)))
+        self.assertTrue(
+            self.server.search_OR(
+                ["SENTSINCE"] + self.laterQuery +
+                ["SENTSINCE"] + self.earlierQuery,
+            self.seq, self.msg, (None, None)))
+        self.assertFalse(
+            self.server.search_OR(
+                ["SENTON"] + self.laterQuery +
+                ["SENTSINCE"] + self.laterQuery,
+            self.seq, self.msg, (None, None)))
+
+
+    def test_searchNot(self):
+        """
+        L{imap4.IMAP4Server.search_NOT} returns the negation of the result
+        of the expression supplied to it.
+        """
+        self.assertFalse(self.server.search_NOT(
+                ["SENTSINCE"] + self.earlierQuery, self.seq, self.msg,
+                (None, None)))
+        self.assertTrue(self.server.search_NOT(
+                ["SENTON"] + self.laterQuery, self.seq, self.msg,
+                (None, None)))
+
+
+
+class TestRealm:
+    theAccount = None
+
+    def requestAvatar(self, avatarId, mind, *interfaces):
+        return imap4.IAccount, self.theAccount, lambda: None
+
+class TestChecker:
+    credentialInterfaces = (cred.credentials.IUsernameHashedPassword, cred.credentials.IUsernamePassword)
+
+    users = {
+        'testuser': 'secret'
+    }
+
+    def requestAvatarId(self, credentials):
+        if credentials.username in self.users:
+            return defer.maybeDeferred(
+                credentials.checkPassword, self.users[credentials.username]
+        ).addCallback(self._cbCheck, credentials.username)
+
+    def _cbCheck(self, result, username):
+        if result:
+            return username
+        raise cred.error.UnauthorizedLogin()
+
+class AuthenticatorTestCase(IMAP4HelperMixin, unittest.TestCase):
+    def setUp(self):
+        IMAP4HelperMixin.setUp(self)
+
+        realm = TestRealm()
+        realm.theAccount = Account('testuser')
+        portal = cred.portal.Portal(realm)
+        portal.registerChecker(TestChecker())
+        self.server.portal = portal
+
+        self.authenticated = 0
+        self.account = realm.theAccount
+
+    def testCramMD5(self):
+        self.server.challengers['CRAM-MD5'] = cred.credentials.CramMD5Credentials
+        cAuth = imap4.CramMD5ClientAuthenticator('testuser')
+        self.client.registerAuthenticator(cAuth)
+
+        def auth():
+            return self.client.authenticate('secret')
+        def authed():
+            self.authenticated = 1
+
+        d1 = self.connected.addCallback(strip(auth))
+        d1.addCallbacks(strip(authed), self._ebGeneral)
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d2 = self.loopback()
+        d = defer.gatherResults([d1, d2])
+        return d.addCallback(self._cbTestCramMD5)
+
+    def _cbTestCramMD5(self, ignored):
+        self.assertEqual(self.authenticated, 1)
+        self.assertEqual(self.server.account, self.account)
+
+    def testFailedCramMD5(self):
+        self.server.challengers['CRAM-MD5'] = cred.credentials.CramMD5Credentials
+        cAuth = imap4.CramMD5ClientAuthenticator('testuser')
+        self.client.registerAuthenticator(cAuth)
+
+        def misauth():
+            return self.client.authenticate('not the secret')
+        def authed():
+            self.authenticated = 1
+        def misauthed():
+            self.authenticated = -1
+
+        d1 = self.connected.addCallback(strip(misauth))
+        d1.addCallbacks(strip(authed), strip(misauthed))
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d = defer.gatherResults([self.loopback(), d1])
+        return d.addCallback(self._cbTestFailedCramMD5)
+
+    def _cbTestFailedCramMD5(self, ignored):
+        self.assertEqual(self.authenticated, -1)
+        self.assertEqual(self.server.account, None)
+
+    def testLOGIN(self):
+        self.server.challengers['LOGIN'] = imap4.LOGINCredentials
+        cAuth = imap4.LOGINAuthenticator('testuser')
+        self.client.registerAuthenticator(cAuth)
+
+        def auth():
+            return self.client.authenticate('secret')
+        def authed():
+            self.authenticated = 1
+
+        d1 = self.connected.addCallback(strip(auth))
+        d1.addCallbacks(strip(authed), self._ebGeneral)
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d = defer.gatherResults([self.loopback(), d1])
+        return d.addCallback(self._cbTestLOGIN)
+
+    def _cbTestLOGIN(self, ignored):
+        self.assertEqual(self.authenticated, 1)
+        self.assertEqual(self.server.account, self.account)
+
+    def testFailedLOGIN(self):
+        self.server.challengers['LOGIN'] = imap4.LOGINCredentials
+        cAuth = imap4.LOGINAuthenticator('testuser')
+        self.client.registerAuthenticator(cAuth)
+
+        def misauth():
+            return self.client.authenticate('not the secret')
+        def authed():
+            self.authenticated = 1
+        def misauthed():
+            self.authenticated = -1
+
+        d1 = self.connected.addCallback(strip(misauth))
+        d1.addCallbacks(strip(authed), strip(misauthed))
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d = defer.gatherResults([self.loopback(), d1])
+        return d.addCallback(self._cbTestFailedLOGIN)
+
+    def _cbTestFailedLOGIN(self, ignored):
+        self.assertEqual(self.authenticated, -1)
+        self.assertEqual(self.server.account, None)
+
+    def testPLAIN(self):
+        self.server.challengers['PLAIN'] = imap4.PLAINCredentials
+        cAuth = imap4.PLAINAuthenticator('testuser')
+        self.client.registerAuthenticator(cAuth)
+
+        def auth():
+            return self.client.authenticate('secret')
+        def authed():
+            self.authenticated = 1
+
+        d1 = self.connected.addCallback(strip(auth))
+        d1.addCallbacks(strip(authed), self._ebGeneral)
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d = defer.gatherResults([self.loopback(), d1])
+        return d.addCallback(self._cbTestPLAIN)
+
+    def _cbTestPLAIN(self, ignored):
+        self.assertEqual(self.authenticated, 1)
+        self.assertEqual(self.server.account, self.account)
+
+    def testFailedPLAIN(self):
+        self.server.challengers['PLAIN'] = imap4.PLAINCredentials
+        cAuth = imap4.PLAINAuthenticator('testuser')
+        self.client.registerAuthenticator(cAuth)
+
+        def misauth():
+            return self.client.authenticate('not the secret')
+        def authed():
+            self.authenticated = 1
+        def misauthed():
+            self.authenticated = -1
+
+        d1 = self.connected.addCallback(strip(misauth))
+        d1.addCallbacks(strip(authed), strip(misauthed))
+        d1.addCallbacks(self._cbStopClient, self._ebGeneral)
+        d = defer.gatherResults([self.loopback(), d1])
+        return d.addCallback(self._cbTestFailedPLAIN)
+
+    def _cbTestFailedPLAIN(self, ignored):
+        self.assertEqual(self.authenticated, -1)
+        self.assertEqual(self.server.account, None)
+
+
+
+class SASLPLAINTestCase(unittest.TestCase):
+    """
+    Tests for I{SASL PLAIN} authentication, as implemented by
+    L{imap4.PLAINAuthenticator} and L{imap4.PLAINCredentials}.
+
+    @see: U{http://www.faqs.org/rfcs/rfc2595.html}
+    @see: U{http://www.faqs.org/rfcs/rfc4616.html}
+    """
+    def test_authenticatorChallengeResponse(self):
+        """
+        L{PLAINAuthenticator.challengeResponse} returns challenge strings of
+        the form::
+
+            NUL<authn-id>NUL<secret>
+        """
+        username = 'testuser'
+        secret = 'secret'
+        chal = 'challenge'
+        cAuth = imap4.PLAINAuthenticator(username)
+        response = cAuth.challengeResponse(secret, chal)
+        self.assertEqual(response, '\0%s\0%s' % (username, secret))
+
+
+    def test_credentialsSetResponse(self):
+        """
+        L{PLAINCredentials.setResponse} parses challenge strings of the
+        form::
+
+            NUL<authn-id>NUL<secret>
+        """
+        cred = imap4.PLAINCredentials()
+        cred.setResponse('\0testuser\0secret')
+        self.assertEqual(cred.username, 'testuser')
+        self.assertEqual(cred.password, 'secret')
+
+
+    def test_credentialsInvalidResponse(self):
+        """
+        L{PLAINCredentials.setResponse} raises L{imap4.IllegalClientResponse}
+        when passed a string not of the expected form.
+        """
+        cred = imap4.PLAINCredentials()
+        self.assertRaises(
+            imap4.IllegalClientResponse, cred.setResponse, 'hello')
+        self.assertRaises(
+            imap4.IllegalClientResponse, cred.setResponse, 'hello\0world')
+        self.assertRaises(
+            imap4.IllegalClientResponse, cred.setResponse,
+            'hello\0world\0Zoom!\0')
+
+
+
+class UnsolicitedResponseTestCase(IMAP4HelperMixin, unittest.TestCase):
+    def testReadWrite(self):
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def loggedIn():
+            self.server.modeChanged(1)
+
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallback(strip(loggedIn)).addErrback(self._ebGeneral)
+        d = defer.gatherResults([self.loopback(), d1])
+        return d.addCallback(self._cbTestReadWrite)
+
+    def _cbTestReadWrite(self, ignored):
+        E = self.client.events
+        self.assertEqual(E, [['modeChanged', 1]])
+
+    def testReadOnly(self):
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def loggedIn():
+            self.server.modeChanged(0)
+
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallback(strip(loggedIn)).addErrback(self._ebGeneral)
+        d = defer.gatherResults([self.loopback(), d1])
+        return d.addCallback(self._cbTestReadOnly)
+
+    def _cbTestReadOnly(self, ignored):
+        E = self.client.events
+        self.assertEqual(E, [['modeChanged', 0]])
+
+    def testFlagChange(self):
+        flags = {
+            1: ['\\Answered', '\\Deleted'],
+            5: [],
+            10: ['\\Recent']
+        }
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def loggedIn():
+            self.server.flagsChanged(flags)
+
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallback(strip(loggedIn)).addErrback(self._ebGeneral)
+        d = defer.gatherResults([self.loopback(), d1])
+        return d.addCallback(self._cbTestFlagChange, flags)
+
+    def _cbTestFlagChange(self, ignored, flags):
+        E = self.client.events
+        expect = [['flagsChanged', {x[0]: x[1]}] for x in flags.items()]
+        E.sort()
+        expect.sort()
+        self.assertEqual(E, expect)
+
+    def testNewMessages(self):
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def loggedIn():
+            self.server.newMessages(10, None)
+
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallback(strip(loggedIn)).addErrback(self._ebGeneral)
+        d = defer.gatherResults([self.loopback(), d1])
+        return d.addCallback(self._cbTestNewMessages)
+
+    def _cbTestNewMessages(self, ignored):
+        E = self.client.events
+        self.assertEqual(E, [['newMessages', 10, None]])
+
+    def testNewRecentMessages(self):
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def loggedIn():
+            self.server.newMessages(None, 10)
+
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallback(strip(loggedIn)).addErrback(self._ebGeneral)
+        d = defer.gatherResults([self.loopback(), d1])
+        return d.addCallback(self._cbTestNewRecentMessages)
+
+    def _cbTestNewRecentMessages(self, ignored):
+        E = self.client.events
+        self.assertEqual(E, [['newMessages', None, 10]])
+
+    def testNewMessagesAndRecent(self):
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def loggedIn():
+            self.server.newMessages(20, 10)
+
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallback(strip(loggedIn)).addErrback(self._ebGeneral)
+        d = defer.gatherResults([self.loopback(), d1])
+        return d.addCallback(self._cbTestNewMessagesAndRecent)
+
+    def _cbTestNewMessagesAndRecent(self, ignored):
+        E = self.client.events
+        self.assertEqual(E, [['newMessages', 20, None], ['newMessages', None, 10]])
+
+
+class ClientCapabilityTests(unittest.TestCase):
+    """
+    Tests for issuance of the CAPABILITY command and handling of its response.
+    """
+    def setUp(self):
+        """
+        Create an L{imap4.IMAP4Client} connected to a L{StringTransport}.
+        """
+        self.transport = StringTransport()
+        self.protocol = imap4.IMAP4Client()
+        self.protocol.makeConnection(self.transport)
+        self.protocol.dataReceived('* OK [IMAP4rev1]\r\n')
+
+
+    def test_simpleAtoms(self):
+        """
+        A capability response consisting only of atoms without C{'='} in them
+        should result in a dict mapping those atoms to C{None}.
+        """
+        capabilitiesResult = self.protocol.getCapabilities(useCache=False)
+        self.protocol.dataReceived('* CAPABILITY IMAP4rev1 LOGINDISABLED\r\n')
+        self.protocol.dataReceived('0001 OK Capability completed.\r\n')
+        def gotCapabilities(capabilities):
+            self.assertEqual(
+                capabilities, {'IMAP4rev1': None, 'LOGINDISABLED': None})
+        capabilitiesResult.addCallback(gotCapabilities)
+        return capabilitiesResult
+
+
+    def test_categoryAtoms(self):
+        """
+        A capability response consisting of atoms including C{'='} should have
+        those atoms split on that byte and have capabilities in the same
+        category aggregated into lists in the resulting dictionary.
+
+        (n.b. - I made up the word "category atom"; the protocol has no notion
+        of structure here, but rather allows each capability to define the
+        semantics of its entry in the capability response in a freeform manner.
+        If I had realized this earlier, the API for capabilities would look
+        different.  As it is, we can hope that no one defines any crazy
+        semantics which are incompatible with this API, or try to figure out a
+        better API when someone does. -exarkun)
+        """
+        capabilitiesResult = self.protocol.getCapabilities(useCache=False)
+        self.protocol.dataReceived('* CAPABILITY IMAP4rev1 AUTH=LOGIN AUTH=PLAIN\r\n')
+        self.protocol.dataReceived('0001 OK Capability completed.\r\n')
+        def gotCapabilities(capabilities):
+            self.assertEqual(
+                capabilities, {'IMAP4rev1': None, 'AUTH': ['LOGIN', 'PLAIN']})
+        capabilitiesResult.addCallback(gotCapabilities)
+        return capabilitiesResult
+
+
+    def test_mixedAtoms(self):
+        """
+        A capability response consisting of both simple and category atoms of
+        the same type should result in a list containing C{None} as well as the
+        values for the category.
+        """
+        capabilitiesResult = self.protocol.getCapabilities(useCache=False)
+        # Exercise codepath for both orderings of =-having and =-missing
+        # capabilities.
+        self.protocol.dataReceived(
+            '* CAPABILITY IMAP4rev1 FOO FOO=BAR BAR=FOO BAR\r\n')
+        self.protocol.dataReceived('0001 OK Capability completed.\r\n')
+        def gotCapabilities(capabilities):
+            self.assertEqual(capabilities, {'IMAP4rev1': None,
+                                            'FOO': [None, 'BAR'],
+                                            'BAR': ['FOO', None]})
+        capabilitiesResult.addCallback(gotCapabilities)
+        return capabilitiesResult
+
+
+
+class StillSimplerClient(imap4.IMAP4Client):
+    """
+    An IMAP4 client which keeps track of unsolicited flag changes.
+    """
+    def __init__(self):
+        imap4.IMAP4Client.__init__(self)
+        self.flags = {}
+
+
+    def flagsChanged(self, newFlags):
+        self.flags.update(newFlags)
+
+
+
+class HandCraftedTestCase(IMAP4HelperMixin, unittest.TestCase):
+    def testTrailingLiteral(self):
+        transport = StringTransport()
+        c = imap4.IMAP4Client()
+        c.makeConnection(transport)
+        c.lineReceived('* OK [IMAP4rev1]')
+
+        def cbSelect(ignored):
+            d = c.fetchMessage('1')
+            c.dataReceived('* 1 FETCH (RFC822 {10}\r\n0123456789\r\n RFC822.SIZE 10)\r\n')
+            c.dataReceived('0003 OK FETCH\r\n')
+            return d
+
+        def cbLogin(ignored):
+            d = c.select('inbox')
+            c.lineReceived('0002 OK SELECT')
+            d.addCallback(cbSelect)
+            return d
+
+        d = c.login('blah', 'blah')
+        c.dataReceived('0001 OK LOGIN\r\n')
+        d.addCallback(cbLogin)
+        return d
+
+
+    def testPathelogicalScatteringOfLiterals(self):
+        self.server.checker.addUser('testuser', 'password-test')
+        transport = StringTransport()
+        self.server.makeConnection(transport)
+
+        transport.clear()
+        self.server.dataReceived("01 LOGIN {8}\r\n")
+        self.assertEqual(transport.value(), "+ Ready for 8 octets of text\r\n")
+
+        transport.clear()
+        self.server.dataReceived("testuser {13}\r\n")
+        self.assertEqual(transport.value(), "+ Ready for 13 octets of text\r\n")
+
+        transport.clear()
+        self.server.dataReceived("password-test\r\n")
+        self.assertEqual(transport.value(), "01 OK LOGIN succeeded\r\n")
+        self.assertEqual(self.server.state, 'auth')
+
+        self.server.connectionLost(error.ConnectionDone("Connection done."))
+
+
+    def test_unsolicitedResponseMixedWithSolicitedResponse(self):
+        """
+        If unsolicited data is received along with solicited data in the
+        response to a I{FETCH} command issued by L{IMAP4Client.fetchSpecific},
+        the unsolicited data is passed to the appropriate callback and not
+        included in the result with wihch the L{Deferred} returned by
+        L{IMAP4Client.fetchSpecific} fires.
+        """
+        transport = StringTransport()
+        c = StillSimplerClient()
+        c.makeConnection(transport)
+        c.lineReceived('* OK [IMAP4rev1]')
+
+        def login():
+            d = c.login('blah', 'blah')
+            c.dataReceived('0001 OK LOGIN\r\n')
+            return d
+        def select():
+            d = c.select('inbox')
+            c.lineReceived('0002 OK SELECT')
+            return d
+        def fetch():
+            d = c.fetchSpecific('1:*',
+                headerType='HEADER.FIELDS',
+                headerArgs=['SUBJECT'])
+            c.dataReceived('* 1 FETCH (BODY[HEADER.FIELDS ("SUBJECT")] {38}\r\n')
+            c.dataReceived('Subject: Suprise for your woman...\r\n')
+            c.dataReceived('\r\n')
+            c.dataReceived(')\r\n')
+            c.dataReceived('* 1 FETCH (FLAGS (\Seen))\r\n')
+            c.dataReceived('* 2 FETCH (BODY[HEADER.FIELDS ("SUBJECT")] {75}\r\n')
+            c.dataReceived('Subject: What you been doing. Order your meds here . ,. handcuff madsen\r\n')
+            c.dataReceived('\r\n')
+            c.dataReceived(')\r\n')
+            c.dataReceived('0003 OK FETCH completed\r\n')
+            return d
+        def test(res):
+            self.assertEqual(res, {
+                1: [['BODY', ['HEADER.FIELDS', ['SUBJECT']],
+                    'Subject: Suprise for your woman...\r\n\r\n']],
+                2: [['BODY', ['HEADER.FIELDS', ['SUBJECT']],
+                    'Subject: What you been doing. Order your meds here . ,. handcuff madsen\r\n\r\n']]
+            })
+
+            self.assertEqual(c.flags, {1: ['\\Seen']})
+
+        return login(
+            ).addCallback(strip(select)
+            ).addCallback(strip(fetch)
+            ).addCallback(test)
+
+
+    def test_literalWithoutPrecedingWhitespace(self):
+        """
+        Literals should be recognized even when they are not preceded by
+        whitespace.
+        """
+        transport = StringTransport()
+        protocol = imap4.IMAP4Client()
+
+        protocol.makeConnection(transport)
+        protocol.lineReceived('* OK [IMAP4rev1]')
+
+        def login():
+            d = protocol.login('blah', 'blah')
+            protocol.dataReceived('0001 OK LOGIN\r\n')
+            return d
+        def select():
+            d = protocol.select('inbox')
+            protocol.lineReceived('0002 OK SELECT')
+            return d
+        def fetch():
+            d = protocol.fetchSpecific('1:*',
+                headerType='HEADER.FIELDS',
+                headerArgs=['SUBJECT'])
+            protocol.dataReceived(
+                '* 1 FETCH (BODY[HEADER.FIELDS ({7}\r\nSUBJECT)] "Hello")\r\n')
+            protocol.dataReceived('0003 OK FETCH completed\r\n')
+            return d
+        def test(result):
+            self.assertEqual(
+                result,  {1: [['BODY', ['HEADER.FIELDS', ['SUBJECT']], 'Hello']]})
+
+        d = login()
+        d.addCallback(strip(select))
+        d.addCallback(strip(fetch))
+        d.addCallback(test)
+        return d
+
+
+    def test_nonIntegerLiteralLength(self):
+        """
+        If the server sends a literal length which cannot be parsed as an
+        integer, L{IMAP4Client.lineReceived} should cause the protocol to be
+        disconnected by raising L{imap4.IllegalServerResponse}.
+        """
+        transport = StringTransport()
+        protocol = imap4.IMAP4Client()
+
+        protocol.makeConnection(transport)
+        protocol.lineReceived('* OK [IMAP4rev1]')
+
+        def login():
+            d = protocol.login('blah', 'blah')
+            protocol.dataReceived('0001 OK LOGIN\r\n')
+            return d
+        def select():
+            d = protocol.select('inbox')
+            protocol.lineReceived('0002 OK SELECT')
+            return d
+        def fetch():
+            d = protocol.fetchSpecific('1:*',
+                headerType='HEADER.FIELDS',
+                headerArgs=['SUBJECT'])
+            self.assertRaises(
+                imap4.IllegalServerResponse,
+                protocol.dataReceived,
+                '* 1 FETCH {xyz}\r\n...')
+        d = login()
+        d.addCallback(strip(select))
+        d.addCallback(strip(fetch))
+        return d
+
+
+    def test_flagsChangedInsideFetchSpecificResponse(self):
+        """
+        Any unrequested flag information received along with other requested
+        information in an untagged I{FETCH} received in response to a request
+        issued with L{IMAP4Client.fetchSpecific} is passed to the
+        C{flagsChanged} callback.
+        """
+        transport = StringTransport()
+        c = StillSimplerClient()
+        c.makeConnection(transport)
+        c.lineReceived('* OK [IMAP4rev1]')
+
+        def login():
+            d = c.login('blah', 'blah')
+            c.dataReceived('0001 OK LOGIN\r\n')
+            return d
+        def select():
+            d = c.select('inbox')
+            c.lineReceived('0002 OK SELECT')
+            return d
+        def fetch():
+            d = c.fetchSpecific('1:*',
+                headerType='HEADER.FIELDS',
+                headerArgs=['SUBJECT'])
+            # This response includes FLAGS after the requested data.
+            c.dataReceived('* 1 FETCH (BODY[HEADER.FIELDS ("SUBJECT")] {22}\r\n')
+            c.dataReceived('Subject: subject one\r\n')
+            c.dataReceived(' FLAGS (\\Recent))\r\n')
+            # And this one includes it before!  Either is possible.
+            c.dataReceived('* 2 FETCH (FLAGS (\\Seen) BODY[HEADER.FIELDS ("SUBJECT")] {22}\r\n')
+            c.dataReceived('Subject: subject two\r\n')
+            c.dataReceived(')\r\n')
+            c.dataReceived('0003 OK FETCH completed\r\n')
+            return d
+
+        def test(res):
+            self.assertEqual(res, {
+                1: [['BODY', ['HEADER.FIELDS', ['SUBJECT']],
+                    'Subject: subject one\r\n']],
+                2: [['BODY', ['HEADER.FIELDS', ['SUBJECT']],
+                    'Subject: subject two\r\n']]
+            })
+
+            self.assertEqual(c.flags, {1: ['\\Recent'], 2: ['\\Seen']})
+
+        return login(
+            ).addCallback(strip(select)
+            ).addCallback(strip(fetch)
+            ).addCallback(test)
+
+
+    def test_flagsChangedInsideFetchMessageResponse(self):
+        """
+        Any unrequested flag information received along with other requested
+        information in an untagged I{FETCH} received in response to a request
+        issued with L{IMAP4Client.fetchMessage} is passed to the
+        C{flagsChanged} callback.
+        """
+        transport = StringTransport()
+        c = StillSimplerClient()
+        c.makeConnection(transport)
+        c.lineReceived('* OK [IMAP4rev1]')
+
+        def login():
+            d = c.login('blah', 'blah')
+            c.dataReceived('0001 OK LOGIN\r\n')
+            return d
+        def select():
+            d = c.select('inbox')
+            c.lineReceived('0002 OK SELECT')
+            return d
+        def fetch():
+            d = c.fetchMessage('1:*')
+            c.dataReceived('* 1 FETCH (RFC822 {24}\r\n')
+            c.dataReceived('Subject: first subject\r\n')
+            c.dataReceived(' FLAGS (\Seen))\r\n')
+            c.dataReceived('* 2 FETCH (FLAGS (\Recent \Seen) RFC822 {25}\r\n')
+            c.dataReceived('Subject: second subject\r\n')
+            c.dataReceived(')\r\n')
+            c.dataReceived('0003 OK FETCH completed\r\n')
+            return d
+
+        def test(res):
+            self.assertEqual(res, {
+                1: {'RFC822': 'Subject: first subject\r\n'},
+                2: {'RFC822': 'Subject: second subject\r\n'}})
+
+            self.assertEqual(
+                c.flags, {1: ['\\Seen'], 2: ['\\Recent', '\\Seen']})
+
+        return login(
+            ).addCallback(strip(select)
+            ).addCallback(strip(fetch)
+            ).addCallback(test)
+
+
+    def test_authenticationChallengeDecodingException(self):
+        """
+        When decoding a base64 encoded authentication message from the server,
+        decoding errors are logged and then the client closes the connection.
+        """
+        transport = StringTransportWithDisconnection()
+        protocol = imap4.IMAP4Client()
+        transport.protocol = protocol
+
+        protocol.makeConnection(transport)
+        protocol.lineReceived(
+            '* OK [CAPABILITY IMAP4rev1 IDLE NAMESPACE AUTH=CRAM-MD5] '
+            'Twisted IMAP4rev1 Ready')
+        cAuth = imap4.CramMD5ClientAuthenticator('testuser')
+        protocol.registerAuthenticator(cAuth)
+
+        d = protocol.authenticate('secret')
+        # Should really be something describing the base64 decode error.  See
+        # #6021.
+        self.assertFailure(d, error.ConnectionDone)
+
+        protocol.dataReceived('+ Something bad! and bad\r\n')
+
+        # This should not really be logged.  See #6021.
+        logged = self.flushLoggedErrors(imap4.IllegalServerResponse)
+        self.assertEqual(len(logged), 1)
+        self.assertEqual(logged[0].value.args[0], "Something bad! and bad")
+        return d
+
+
+
+class PreauthIMAP4ClientMixin:
+    """
+    Mixin for L{unittest.TestCase} subclasses which provides a C{setUp} method
+    which creates an L{IMAP4Client} connected to a L{StringTransport} and puts
+    it into the I{authenticated} state.
+
+    @ivar transport: A L{StringTransport} to which C{client} is connected.
+    @ivar client: An L{IMAP4Client} which is connected to C{transport}.
+    """
+    clientProtocol = imap4.IMAP4Client
+
+    def setUp(self):
+        """
+        Create an IMAP4Client connected to a fake transport and in the
+        authenticated state.
+        """
+        self.transport = StringTransport()
+        self.client = self.clientProtocol()
+        self.client.makeConnection(self.transport)
+        self.client.dataReceived('* PREAUTH Hello unittest\r\n')
+
+
+    def _extractDeferredResult(self, d):
+        """
+        Synchronously extract the result of the given L{Deferred}.  Fail the
+        test if that is not possible.
+        """
+        result = []
+        error = []
+        d.addCallbacks(result.append, error.append)
+        if result:
+            return result[0]
+        elif error:
+            error[0].raiseException()
+        else:
+            self.fail("Expected result not available")
+
+
+
+class SelectionTestsMixin(PreauthIMAP4ClientMixin):
+    """
+    Mixin for test cases which defines tests which apply to both I{EXAMINE} and
+    I{SELECT} support.
+    """
+    def _examineOrSelect(self):
+        """
+        Issue either an I{EXAMINE} or I{SELECT} command (depending on
+        C{self.method}), assert that the correct bytes are written to the
+        transport, and return the L{Deferred} returned by whichever method was
+        called.
+        """
+        d = getattr(self.client, self.method)('foobox')
+        self.assertEqual(
+            self.transport.value(), '0001 %s foobox\r\n' % (self.command,))
+        return d
+
+
+    def _response(self, *lines):
+        """
+        Deliver the given (unterminated) response lines to C{self.client} and
+        then deliver a tagged SELECT or EXAMINE completion line to finish the
+        SELECT or EXAMINE response.
+        """
+        for line in lines:
+            self.client.dataReceived(line + '\r\n')
+        self.client.dataReceived(
+            '0001 OK [READ-ONLY] %s completed\r\n' % (self.command,))
+
+
+    def test_exists(self):
+        """
+        If the server response to a I{SELECT} or I{EXAMINE} command includes an
+        I{EXISTS} response, the L{Deferred} return by L{IMAP4Client.select} or
+        L{IMAP4Client.examine} fires with a C{dict} including the value
+        associated with the C{'EXISTS'} key.
+        """
+        d = self._examineOrSelect()
+        self._response('* 3 EXISTS')
+        self.assertEqual(
+            self._extractDeferredResult(d),
+            {'READ-WRITE': False, 'EXISTS': 3})
+
+
+    def test_nonIntegerExists(self):
+        """
+        If the server returns a non-integer EXISTS value in its response to a
+        I{SELECT} or I{EXAMINE} command, the L{Deferred} returned by
+        L{IMAP4Client.select} or L{IMAP4Client.examine} fails with
+        L{IllegalServerResponse}.
+        """
+        d = self._examineOrSelect()
+        self._response('* foo EXISTS')
+        self.assertRaises(
+            imap4.IllegalServerResponse, self._extractDeferredResult, d)
+
+
+    def test_recent(self):
+        """
+        If the server response to a I{SELECT} or I{EXAMINE} command includes an
+        I{RECENT} response, the L{Deferred} return by L{IMAP4Client.select} or
+        L{IMAP4Client.examine} fires with a C{dict} including the value
+        associated with the C{'RECENT'} key.
+        """
+        d = self._examineOrSelect()
+        self._response('* 5 RECENT')
+        self.assertEqual(
+            self._extractDeferredResult(d),
+            {'READ-WRITE': False, 'RECENT': 5})
+
+
+    def test_nonIntegerRecent(self):
+        """
+        If the server returns a non-integer RECENT value in its response to a
+        I{SELECT} or I{EXAMINE} command, the L{Deferred} returned by
+        L{IMAP4Client.select} or L{IMAP4Client.examine} fails with
+        L{IllegalServerResponse}.
+        """
+        d = self._examineOrSelect()
+        self._response('* foo RECENT')
+        self.assertRaises(
+            imap4.IllegalServerResponse, self._extractDeferredResult, d)
+
+
+    def test_unseen(self):
+        """
+        If the server response to a I{SELECT} or I{EXAMINE} command includes an
+        I{UNSEEN} response, the L{Deferred} returned by L{IMAP4Client.select} or
+        L{IMAP4Client.examine} fires with a C{dict} including the value
+        associated with the C{'UNSEEN'} key.
+        """
+        d = self._examineOrSelect()
+        self._response('* OK [UNSEEN 8] Message 8 is first unseen')
+        self.assertEqual(
+            self._extractDeferredResult(d),
+            {'READ-WRITE': False, 'UNSEEN': 8})
+
+
+    def test_nonIntegerUnseen(self):
+        """
+        If the server returns a non-integer UNSEEN value in its response to a
+        I{SELECT} or I{EXAMINE} command, the L{Deferred} returned by
+        L{IMAP4Client.select} or L{IMAP4Client.examine} fails with
+        L{IllegalServerResponse}.
+        """
+        d = self._examineOrSelect()
+        self._response('* OK [UNSEEN foo] Message foo is first unseen')
+        self.assertRaises(
+            imap4.IllegalServerResponse, self._extractDeferredResult, d)
+
+
+    def test_uidvalidity(self):
+        """
+        If the server response to a I{SELECT} or I{EXAMINE} command includes an
+        I{UIDVALIDITY} response, the L{Deferred} returned by
+        L{IMAP4Client.select} or L{IMAP4Client.examine} fires with a C{dict}
+        including the value associated with the C{'UIDVALIDITY'} key.
+        """
+        d = self._examineOrSelect()
+        self._response('* OK [UIDVALIDITY 12345] UIDs valid')
+        self.assertEqual(
+            self._extractDeferredResult(d),
+            {'READ-WRITE': False, 'UIDVALIDITY': 12345})
+
+
+    def test_nonIntegerUIDVALIDITY(self):
+        """
+        If the server returns a non-integer UIDVALIDITY value in its response to
+        a I{SELECT} or I{EXAMINE} command, the L{Deferred} returned by
+        L{IMAP4Client.select} or L{IMAP4Client.examine} fails with
+        L{IllegalServerResponse}.
+        """
+        d = self._examineOrSelect()
+        self._response('* OK [UIDVALIDITY foo] UIDs valid')
+        self.assertRaises(
+            imap4.IllegalServerResponse, self._extractDeferredResult, d)
+
+
+    def test_uidnext(self):
+        """
+        If the server response to a I{SELECT} or I{EXAMINE} command includes an
+        I{UIDNEXT} response, the L{Deferred} returned by L{IMAP4Client.select}
+        or L{IMAP4Client.examine} fires with a C{dict} including the value
+        associated with the C{'UIDNEXT'} key.
+        """
+        d = self._examineOrSelect()
+        self._response('* OK [UIDNEXT 4392] Predicted next UID')
+        self.assertEqual(
+            self._extractDeferredResult(d),
+            {'READ-WRITE': False, 'UIDNEXT': 4392})
+
+
+    def test_nonIntegerUIDNEXT(self):
+        """
+        If the server returns a non-integer UIDNEXT value in its response to a
+        I{SELECT} or I{EXAMINE} command, the L{Deferred} returned by
+        L{IMAP4Client.select} or L{IMAP4Client.examine} fails with
+        L{IllegalServerResponse}.
+        """
+        d = self._examineOrSelect()
+        self._response('* OK [UIDNEXT foo] Predicted next UID')
+        self.assertRaises(
+            imap4.IllegalServerResponse, self._extractDeferredResult, d)
+
+
+    def test_flags(self):
+        """
+        If the server response to a I{SELECT} or I{EXAMINE} command includes an
+        I{FLAGS} response, the L{Deferred} returned by L{IMAP4Client.select} or
+        L{IMAP4Client.examine} fires with a C{dict} including the value
+        associated with the C{'FLAGS'} key.
+        """
+        d = self._examineOrSelect()
+        self._response(
+            '* FLAGS (\\Answered \\Flagged \\Deleted \\Seen \\Draft)')
+        self.assertEqual(
+            self._extractDeferredResult(d), {
+                'READ-WRITE': False,
+                'FLAGS': ('\\Answered', '\\Flagged', '\\Deleted', '\\Seen',
+                          '\\Draft')})
+
+
+    def test_permanentflags(self):
+        """
+        If the server response to a I{SELECT} or I{EXAMINE} command includes an
+        I{FLAGS} response, the L{Deferred} returned by L{IMAP4Client.select} or
+        L{IMAP4Client.examine} fires with a C{dict} including the value
+        associated with the C{'FLAGS'} key.
+        """
+        d = self._examineOrSelect()
+        self._response(
+            '* OK [PERMANENTFLAGS (\\Starred)] Just one permanent flag in '
+            'that list up there')
+        self.assertEqual(
+            self._extractDeferredResult(d), {
+                'READ-WRITE': False,
+                'PERMANENTFLAGS': ('\\Starred',)})
+
+
+    def test_unrecognizedOk(self):
+        """
+        If the server response to a I{SELECT} or I{EXAMINE} command includes an
+        I{OK} with unrecognized response code text, parsing does not fail.
+        """
+        d = self._examineOrSelect()
+        self._response(
+            '* OK [X-MADE-UP] I just made this response text up.')
+        # The value won't show up in the result.  It would be okay if it did
+        # someday, perhaps.  This shouldn't ever happen, though.
+        self.assertEqual(
+            self._extractDeferredResult(d), {'READ-WRITE': False})
+
+
+    def test_bareOk(self):
+        """
+        If the server response to a I{SELECT} or I{EXAMINE} command includes an
+        I{OK} with no response code text, parsing does not fail.
+        """
+        d = self._examineOrSelect()
+        self._response('* OK')
+        self.assertEqual(
+            self._extractDeferredResult(d), {'READ-WRITE': False})
+
+
+
+class IMAP4ClientExamineTests(SelectionTestsMixin, unittest.TestCase):
+    """
+    Tests for the L{IMAP4Client.examine} method.
+
+    An example of usage of the EXAMINE command from RFC 3501, section 6.3.2::
+
+        S: * 17 EXISTS
+        S: * 2 RECENT
+        S: * OK [UNSEEN 8] Message 8 is first unseen
+        S: * OK [UIDVALIDITY 3857529045] UIDs valid
+        S: * OK [UIDNEXT 4392] Predicted next UID
+        S: * FLAGS (\\Answered \\Flagged \\Deleted \\Seen \\Draft)
+        S: * OK [PERMANENTFLAGS ()] No permanent flags permitted
+        S: A932 OK [READ-ONLY] EXAMINE completed
+    """
+    method = 'examine'
+    command = 'EXAMINE'
+
+
+
+
+class IMAP4ClientSelectTests(SelectionTestsMixin, unittest.TestCase):
+    """
+    Tests for the L{IMAP4Client.select} method.
+
+    An example of usage of the SELECT command from RFC 3501, section 6.3.1::
+
+        C: A142 SELECT INBOX
+        S: * 172 EXISTS
+        S: * 1 RECENT
+        S: * OK [UNSEEN 12] Message 12 is first unseen
+        S: * OK [UIDVALIDITY 3857529045] UIDs valid
+        S: * OK [UIDNEXT 4392] Predicted next UID
+        S: * FLAGS (\Answered \Flagged \Deleted \Seen \Draft)
+        S: * OK [PERMANENTFLAGS (\Deleted \Seen \*)] Limited
+        S: A142 OK [READ-WRITE] SELECT completed
+    """
+    method = 'select'
+    command = 'SELECT'
+
+
+
+class IMAP4ClientExpungeTests(PreauthIMAP4ClientMixin, unittest.TestCase):
+    """
+    Tests for the L{IMAP4Client.expunge} method.
+
+    An example of usage of the EXPUNGE command from RFC 3501, section 6.4.3::
+
+        C: A202 EXPUNGE
+        S: * 3 EXPUNGE
+        S: * 3 EXPUNGE
+        S: * 5 EXPUNGE
+        S: * 8 EXPUNGE
+        S: A202 OK EXPUNGE completed
+    """
+    def _expunge(self):
+        d = self.client.expunge()
+        self.assertEqual(self.transport.value(), '0001 EXPUNGE\r\n')
+        self.transport.clear()
+        return d
+
+
+    def _response(self, sequenceNumbers):
+        for number in sequenceNumbers:
+            self.client.lineReceived('* %s EXPUNGE' % (number,))
+        self.client.lineReceived('0001 OK EXPUNGE COMPLETED')
+
+
+    def test_expunge(self):
+        """
+        L{IMAP4Client.expunge} sends the I{EXPUNGE} command and returns a
+        L{Deferred} which fires with a C{list} of message sequence numbers
+        given by the server's response.
+        """
+        d = self._expunge()
+        self._response([3, 3, 5, 8])
+        self.assertEqual(self._extractDeferredResult(d), [3, 3, 5, 8])
+
+
+    def test_nonIntegerExpunged(self):
+        """
+        If the server responds with a non-integer where a message sequence
+        number is expected, the L{Deferred} returned by L{IMAP4Client.expunge}
+        fails with L{IllegalServerResponse}.
+        """
+        d = self._expunge()
+        self._response([3, 3, 'foo', 8])
+        self.assertRaises(
+            imap4.IllegalServerResponse, self._extractDeferredResult, d)
+
+
+
+class IMAP4ClientSearchTests(PreauthIMAP4ClientMixin, unittest.TestCase):
+    """
+    Tests for the L{IMAP4Client.search} method.
+
+    An example of usage of the SEARCH command from RFC 3501, section 6.4.4::
+
+        C: A282 SEARCH FLAGGED SINCE 1-Feb-1994 NOT FROM "Smith"
+        S: * SEARCH 2 84 882
+        S: A282 OK SEARCH completed
+        C: A283 SEARCH TEXT "string not in mailbox"
+        S: * SEARCH
+        S: A283 OK SEARCH completed
+        C: A284 SEARCH CHARSET UTF-8 TEXT {6}
+        C: XXXXXX
+        S: * SEARCH 43
+        S: A284 OK SEARCH completed
+    """
+    def _search(self):
+        d = self.client.search(imap4.Query(text="ABCDEF"))
+        self.assertEqual(
+            self.transport.value(), '0001 SEARCH (TEXT "ABCDEF")\r\n')
+        return d
+
+
+    def _response(self, messageNumbers):
+        self.client.lineReceived(
+            "* SEARCH " + " ".join(map(str, messageNumbers)))
+        self.client.lineReceived("0001 OK SEARCH completed")
+
+
+    def test_search(self):
+        """
+        L{IMAP4Client.search} sends the I{SEARCH} command and returns a
+        L{Deferred} which fires with a C{list} of message sequence numbers
+        given by the server's response.
+        """
+        d = self._search()
+        self._response([2, 5, 10])
+        self.assertEqual(self._extractDeferredResult(d), [2, 5, 10])
+
+
+    def test_nonIntegerFound(self):
+        """
+        If the server responds with a non-integer where a message sequence
+        number is expected, the L{Deferred} returned by L{IMAP4Client.search}
+        fails with L{IllegalServerResponse}.
+        """
+        d = self._search()
+        self._response([2, "foo", 10])
+        self.assertRaises(
+            imap4.IllegalServerResponse, self._extractDeferredResult, d)
+
+
+
+class IMAP4ClientFetchTests(PreauthIMAP4ClientMixin, unittest.TestCase):
+    """
+    Tests for the L{IMAP4Client.fetch} method.
+
+    See RFC 3501, section 6.4.5.
+    """
+    def test_fetchUID(self):
+        """
+        L{IMAP4Client.fetchUID} sends the I{FETCH UID} command and returns a
+        L{Deferred} which fires with a C{dict} mapping message sequence numbers
+        to C{dict}s mapping C{'UID'} to that message's I{UID} in the server's
+        response.
+        """
+        d = self.client.fetchUID('1:7')
+        self.assertEqual(self.transport.value(), '0001 FETCH 1:7 (UID)\r\n')
+        self.client.lineReceived('* 2 FETCH (UID 22)')
+        self.client.lineReceived('* 3 FETCH (UID 23)')
+        self.client.lineReceived('* 4 FETCH (UID 24)')
+        self.client.lineReceived('* 5 FETCH (UID 25)')
+        self.client.lineReceived('0001 OK FETCH completed')
+        self.assertEqual(
+            self._extractDeferredResult(d), {
+                2: {'UID': '22'},
+                3: {'UID': '23'},
+                4: {'UID': '24'},
+                5: {'UID': '25'}})
+
+
+    def test_fetchUIDNonIntegerFound(self):
+        """
+        If the server responds with a non-integer where a message sequence
+        number is expected, the L{Deferred} returned by L{IMAP4Client.fetchUID}
+        fails with L{IllegalServerResponse}.
+        """
+        d = self.client.fetchUID('1')
+        self.assertEqual(self.transport.value(), '0001 FETCH 1 (UID)\r\n')
+        self.client.lineReceived('* foo FETCH (UID 22)')
+        self.client.lineReceived('0001 OK FETCH completed')
+        self.assertRaises(
+            imap4.IllegalServerResponse, self._extractDeferredResult, d)
+
+
+    def test_incompleteFetchUIDResponse(self):
+        """
+        If the server responds with an incomplete I{FETCH} response line, the
+        L{Deferred} returned by L{IMAP4Client.fetchUID} fails with
+        L{IllegalServerResponse}.
+        """
+        d = self.client.fetchUID('1:7')
+        self.assertEqual(self.transport.value(), '0001 FETCH 1:7 (UID)\r\n')
+        self.client.lineReceived('* 2 FETCH (UID 22)')
+        self.client.lineReceived('* 3 FETCH (UID)')
+        self.client.lineReceived('* 4 FETCH (UID 24)')
+        self.client.lineReceived('0001 OK FETCH completed')
+        self.assertRaises(
+            imap4.IllegalServerResponse, self._extractDeferredResult, d)
+
+
+    def test_fetchBody(self):
+        """
+        L{IMAP4Client.fetchBody} sends the I{FETCH BODY} command and returns a
+        L{Deferred} which fires with a C{dict} mapping message sequence numbers
+        to C{dict}s mapping C{'RFC822.TEXT'} to that message's body as given in
+        the server's response.
+        """
+        d = self.client.fetchBody('3')
+        self.assertEqual(
+            self.transport.value(), '0001 FETCH 3 (RFC822.TEXT)\r\n')
+        self.client.lineReceived('* 3 FETCH (RFC822.TEXT "Message text")')
+        self.client.lineReceived('0001 OK FETCH completed')
+        self.assertEqual(
+            self._extractDeferredResult(d),
+            {3: {'RFC822.TEXT': 'Message text'}})
+
+
+    def test_fetchSpecific(self):
+        """
+        L{IMAP4Client.fetchSpecific} sends the I{BODY[]} command if no
+        parameters beyond the message set to retrieve are given.  It returns a
+        L{Deferred} which fires with a C{dict} mapping message sequence numbers
+        to C{list}s of corresponding message data given by the server's
+        response.
+        """
+        d = self.client.fetchSpecific('7')
+        self.assertEqual(
+            self.transport.value(), '0001 FETCH 7 BODY[]\r\n')
+        self.client.lineReceived('* 7 FETCH (BODY[] "Some body")')
+        self.client.lineReceived('0001 OK FETCH completed')
+        self.assertEqual(
+            self._extractDeferredResult(d), {7: [['BODY', [], "Some body"]]})
+
+
+    def test_fetchSpecificPeek(self):
+        """
+        L{IMAP4Client.fetchSpecific} issues a I{BODY.PEEK[]} command if passed
+        C{True} for the C{peek} parameter.
+        """
+        d = self.client.fetchSpecific('6', peek=True)
+        self.assertEqual(
+            self.transport.value(), '0001 FETCH 6 BODY.PEEK[]\r\n')
+        # BODY.PEEK responses are just BODY
+        self.client.lineReceived('* 6 FETCH (BODY[] "Some body")')
+        self.client.lineReceived('0001 OK FETCH completed')
+        self.assertEqual(
+            self._extractDeferredResult(d), {6: [['BODY', [], "Some body"]]})
+
+
+    def test_fetchSpecificNumbered(self):
+        """
+        L{IMAP4Client.fetchSpecific}, when passed a sequence for for
+        C{headerNumber}, sends the I{BODY[N.M]} command.  It returns a
+        L{Deferred} which fires with a C{dict} mapping message sequence numbers
+        to C{list}s of corresponding message data given by the server's
+        response.
+        """
+        d = self.client.fetchSpecific('7', headerNumber=(1, 2, 3))
+        self.assertEqual(
+            self.transport.value(), '0001 FETCH 7 BODY[1.2.3]\r\n')
+        self.client.lineReceived('* 7 FETCH (BODY[1.2.3] "Some body")')
+        self.client.lineReceived('0001 OK FETCH completed')
+        self.assertEqual(
+            self._extractDeferredResult(d),
+            {7: [['BODY', ['1.2.3'], "Some body"]]})
+
+
+    def test_fetchSpecificText(self):
+        """
+        L{IMAP4Client.fetchSpecific}, when passed C{'TEXT'} for C{headerType},
+        sends the I{BODY[TEXT]} command.  It returns a L{Deferred} which fires
+        with a C{dict} mapping message sequence numbers to C{list}s of
+        corresponding message data given by the server's response.
+        """
+        d = self.client.fetchSpecific('8', headerType='TEXT')
+        self.assertEqual(
+            self.transport.value(), '0001 FETCH 8 BODY[TEXT]\r\n')
+        self.client.lineReceived('* 8 FETCH (BODY[TEXT] "Some body")')
+        self.client.lineReceived('0001 OK FETCH completed')
+        self.assertEqual(
+            self._extractDeferredResult(d),
+            {8: [['BODY', ['TEXT'], "Some body"]]})
+
+
+    def test_fetchSpecificNumberedText(self):
+        """
+        If passed a value for the C{headerNumber} parameter and C{'TEXT'} for
+        the C{headerType} parameter, L{IMAP4Client.fetchSpecific} sends a
+        I{BODY[number.TEXT]} request and returns a L{Deferred} which fires with
+        a C{dict} mapping message sequence numbers to C{list}s of message data
+        given by the server's response.
+        """
+        d = self.client.fetchSpecific('4', headerType='TEXT', headerNumber=7)
+        self.assertEqual(
+            self.transport.value(), '0001 FETCH 4 BODY[7.TEXT]\r\n')
+        self.client.lineReceived('* 4 FETCH (BODY[7.TEXT] "Some body")')
+        self.client.lineReceived('0001 OK FETCH completed')
+        self.assertEqual(
+            self._extractDeferredResult(d),
+            {4: [['BODY', ['7.TEXT'], "Some body"]]})
+
+
+    def test_incompleteFetchSpecificTextResponse(self):
+        """
+        If the server responds to a I{BODY[TEXT]} request with a I{FETCH} line
+        which is truncated after the I{BODY[TEXT]} tokens, the L{Deferred}
+        returned by L{IMAP4Client.fetchUID} fails with
+        L{IllegalServerResponse}.
+        """
+        d = self.client.fetchSpecific('8', headerType='TEXT')
+        self.assertEqual(
+            self.transport.value(), '0001 FETCH 8 BODY[TEXT]\r\n')
+        self.client.lineReceived('* 8 FETCH (BODY[TEXT])')
+        self.client.lineReceived('0001 OK FETCH completed')
+        self.assertRaises(
+            imap4.IllegalServerResponse, self._extractDeferredResult, d)
+
+
+    def test_fetchSpecificMIME(self):
+        """
+        L{IMAP4Client.fetchSpecific}, when passed C{'MIME'} for C{headerType},
+        sends the I{BODY[MIME]} command.  It returns a L{Deferred} which fires
+        with a C{dict} mapping message sequence numbers to C{list}s of
+        corresponding message data given by the server's response.
+        """
+        d = self.client.fetchSpecific('8', headerType='MIME')
+        self.assertEqual(
+            self.transport.value(), '0001 FETCH 8 BODY[MIME]\r\n')
+        self.client.lineReceived('* 8 FETCH (BODY[MIME] "Some body")')
+        self.client.lineReceived('0001 OK FETCH completed')
+        self.assertEqual(
+            self._extractDeferredResult(d),
+            {8: [['BODY', ['MIME'], "Some body"]]})
+
+
+    def test_fetchSpecificPartial(self):
+        """
+        L{IMAP4Client.fetchSpecific}, when passed C{offset} and C{length},
+        sends a partial content request (like I{BODY[TEXT]<offset.length>}).
+        It returns a L{Deferred} which fires with a C{dict} mapping message
+        sequence numbers to C{list}s of corresponding message data given by the
+        server's response.
+        """
+        d = self.client.fetchSpecific(
+            '9', headerType='TEXT', offset=17, length=3)
+        self.assertEqual(
+            self.transport.value(), '0001 FETCH 9 BODY[TEXT]<17.3>\r\n')
+        self.client.lineReceived('* 9 FETCH (BODY[TEXT]<17> "foo")')
+        self.client.lineReceived('0001 OK FETCH completed')
+        self.assertEqual(
+            self._extractDeferredResult(d),
+            {9: [['BODY', ['TEXT'], '<17>', 'foo']]})
+
+
+    def test_incompleteFetchSpecificPartialResponse(self):
+        """
+        If the server responds to a I{BODY[TEXT]} request with a I{FETCH} line
+        which is truncated after the I{BODY[TEXT]<offset>} tokens, the
+        L{Deferred} returned by L{IMAP4Client.fetchUID} fails with
+        L{IllegalServerResponse}.
+        """
+        d = self.client.fetchSpecific('8', headerType='TEXT')
+        self.assertEqual(
+            self.transport.value(), '0001 FETCH 8 BODY[TEXT]\r\n')
+        self.client.lineReceived('* 8 FETCH (BODY[TEXT]<17>)')
+        self.client.lineReceived('0001 OK FETCH completed')
+        self.assertRaises(
+            imap4.IllegalServerResponse, self._extractDeferredResult, d)
+
+
+    def test_fetchSpecificHTML(self):
+        """
+        If the body of a message begins with I{<} and ends with I{>} (as,
+        for example, HTML bodies typically will), this is still interpreted
+        as the body by L{IMAP4Client.fetchSpecific} (and particularly, not
+        as a length indicator for a response to a request for a partial
+        body).
+        """
+        d = self.client.fetchSpecific('7')
+        self.assertEqual(
+            self.transport.value(), '0001 FETCH 7 BODY[]\r\n')
+        self.client.lineReceived('* 7 FETCH (BODY[] "<html>test</html>")')
+        self.client.lineReceived('0001 OK FETCH completed')
+        self.assertEqual(
+            self._extractDeferredResult(d), {7: [['BODY', [], "<html>test</html>"]]})
+
+
+
+class IMAP4ClientStoreTests(PreauthIMAP4ClientMixin, unittest.TestCase):
+    """
+    Tests for the L{IMAP4Client.setFlags}, L{IMAP4Client.addFlags}, and
+    L{IMAP4Client.removeFlags} methods.
+
+    An example of usage of the STORE command, in terms of which these three
+    methods are implemented, from RFC 3501, section 6.4.6::
+
+        C: A003 STORE 2:4 +FLAGS (\Deleted)
+        S: * 2 FETCH (FLAGS (\Deleted \Seen))
+        S: * 3 FETCH (FLAGS (\Deleted))
+        S: * 4 FETCH (FLAGS (\Deleted \Flagged \Seen))
+        S: A003 OK STORE completed
+    """
+    clientProtocol = StillSimplerClient
+
+    def _flagsTest(self, method, item):
+        """
+        Test a non-silent flag modifying method.  Call the method, assert that
+        the correct bytes are sent, deliver a I{FETCH} response, and assert
+        that the result of the Deferred returned by the method is correct.
+
+        @param method: The name of the method to test.
+        @param item: The data item which is expected to be specified.
+        """
+        d = getattr(self.client, method)('3', ('\\Read', '\\Seen'), False)
+        self.assertEqual(
+            self.transport.value(),
+            '0001 STORE 3 ' + item + ' (\\Read \\Seen)\r\n')
+        self.client.lineReceived('* 3 FETCH (FLAGS (\\Read \\Seen))')
+        self.client.lineReceived('0001 OK STORE completed')
+        self.assertEqual(
+            self._extractDeferredResult(d),
+            {3: {'FLAGS': ['\\Read', '\\Seen']}})
+
+
+    def _flagsSilentlyTest(self, method, item):
+        """
+        Test a silent flag modifying method.  Call the method, assert that the
+        correct bytes are sent, deliver an I{OK} response, and assert that the
+        result of the Deferred returned by the method is correct.
+
+        @param method: The name of the method to test.
+        @param item: The data item which is expected to be specified.
+        """
+        d = getattr(self.client, method)('3', ('\\Read', '\\Seen'), True)
+        self.assertEqual(
+            self.transport.value(),
+            '0001 STORE 3 ' + item + ' (\\Read \\Seen)\r\n')
+        self.client.lineReceived('0001 OK STORE completed')
+        self.assertEqual(self._extractDeferredResult(d), {})
+
+
+    def _flagsSilentlyWithUnsolicitedDataTest(self, method, item):
+        """
+        Test unsolicited data received in response to a silent flag modifying
+        method.  Call the method, assert that the correct bytes are sent,
+        deliver the unsolicited I{FETCH} response, and assert that the result
+        of the Deferred returned by the method is correct.
+
+        @param method: The name of the method to test.
+        @param item: The data item which is expected to be specified.
+        """
+        d = getattr(self.client, method)('3', ('\\Read', '\\Seen'), True)
+        self.assertEqual(
+            self.transport.value(),
+            '0001 STORE 3 ' + item + ' (\\Read \\Seen)\r\n')
+        self.client.lineReceived('* 2 FETCH (FLAGS (\\Read \\Seen))')
+        self.client.lineReceived('0001 OK STORE completed')
+        self.assertEqual(self._extractDeferredResult(d), {})
+        self.assertEqual(self.client.flags, {2: ['\\Read', '\\Seen']})
+
+
+    def test_setFlags(self):
+        """
+        When passed a C{False} value for the C{silent} parameter,
+        L{IMAP4Client.setFlags} sends the I{STORE} command with a I{FLAGS} data
+        item and returns a L{Deferred} which fires with a C{dict} mapping
+        message sequence numbers to C{dict}s mapping C{'FLAGS'} to the new
+        flags of those messages.
+        """
+        self._flagsTest('setFlags', 'FLAGS')
+
+
+    def test_setFlagsSilently(self):
+        """
+        When passed a C{True} value for the C{silent} parameter,
+        L{IMAP4Client.setFlags} sends the I{STORE} command with a
+        I{FLAGS.SILENT} data item and returns a L{Deferred} which fires with an
+        empty dictionary.
+        """
+        self._flagsSilentlyTest('setFlags', 'FLAGS.SILENT')
+
+
+    def test_setFlagsSilentlyWithUnsolicitedData(self):
+        """
+        If unsolicited flag data is received in response to a I{STORE}
+        I{FLAGS.SILENT} request, that data is passed to the C{flagsChanged}
+        callback.
+        """
+        self._flagsSilentlyWithUnsolicitedDataTest('setFlags', 'FLAGS.SILENT')
+
+
+    def test_addFlags(self):
+        """
+        L{IMAP4Client.addFlags} is like L{IMAP4Client.setFlags}, but sends
+        I{+FLAGS} instead of I{FLAGS}.
+        """
+        self._flagsTest('addFlags', '+FLAGS')
+
+
+    def test_addFlagsSilently(self):
+        """
+        L{IMAP4Client.addFlags} with a C{True} value for C{silent} behaves like
+        L{IMAP4Client.setFlags} with a C{True} value for C{silent}, but it
+        sends I{+FLAGS.SILENT} instead of I{FLAGS.SILENT}.
+        """
+        self._flagsSilentlyTest('addFlags', '+FLAGS.SILENT')
+
+
+    def test_addFlagsSilentlyWithUnsolicitedData(self):
+        """
+        L{IMAP4Client.addFlags} behaves like L{IMAP4Client.setFlags} when used
+        in silent mode and unsolicited data is received.
+        """
+        self._flagsSilentlyWithUnsolicitedDataTest('addFlags', '+FLAGS.SILENT')
+
+
+    def test_removeFlags(self):
+        """
+        L{IMAP4Client.removeFlags} is like L{IMAP4Client.setFlags}, but sends
+        I{-FLAGS} instead of I{FLAGS}.
+        """
+        self._flagsTest('removeFlags', '-FLAGS')
+
+
+    def test_removeFlagsSilently(self):
+        """
+        L{IMAP4Client.removeFlags} with a C{True} value for C{silent} behaves
+        like L{IMAP4Client.setFlags} with a C{True} value for C{silent}, but it
+        sends I{-FLAGS.SILENT} instead of I{FLAGS.SILENT}.
+        """
+        self._flagsSilentlyTest('removeFlags', '-FLAGS.SILENT')
+
+
+    def test_removeFlagsSilentlyWithUnsolicitedData(self):
+        """
+        L{IMAP4Client.removeFlags} behaves like L{IMAP4Client.setFlags} when
+        used in silent mode and unsolicited data is received.
+        """
+        self._flagsSilentlyWithUnsolicitedDataTest('removeFlags', '-FLAGS.SILENT')
+
+
+
+class FakeyServer(imap4.IMAP4Server):
+    state = 'select'
+    timeout = None
+
+    def sendServerGreeting(self):
+        pass
+
+class FakeyMessage(util.FancyStrMixin):
+    implements(imap4.IMessage)
+
+    showAttributes = ('headers', 'flags', 'date', 'body', 'uid')
+
+    def __init__(self, headers, flags, date, body, uid, subpart):
+        self.headers = headers
+        self.flags = flags
+        self._body = body
+        self.size = len(body)
+        self.date = date
+        self.uid = uid
+        self.subpart = subpart
+
+    def getHeaders(self, negate, *names):
+        self.got_headers = negate, names
+        return self.headers
+
+    def getFlags(self):
+        return self.flags
+
+    def getInternalDate(self):
+        return self.date
+
+    def getBodyFile(self):
+        return StringIO(self._body)
+
+    def getSize(self):
+        return self.size
+
+    def getUID(self):
+        return self.uid
+
+    def isMultipart(self):
+        return self.subpart is not None
+
+    def getSubPart(self, part):
+        self.got_subpart = part
+        return self.subpart[part]
+
+class NewStoreTestCase(unittest.TestCase, IMAP4HelperMixin):
+    result = None
+    storeArgs = None
+
+    def setUp(self):
+        self.received_messages = self.received_uid = None
+
+        self.server = imap4.IMAP4Server()
+        self.server.state = 'select'
+        self.server.mbox = self
+        self.connected = defer.Deferred()
+        self.client = SimpleClient(self.connected)
+
+    def addListener(self, x):
+        pass
+    def removeListener(self, x):
+        pass
+
+    def store(self, *args, **kw):
+        self.storeArgs = args, kw
+        return self.response
+
+    def _storeWork(self):
+        def connected():
+            return self.function(self.messages, self.flags, self.silent, self.uid)
+        def result(R):
+            self.result = R
+
+        self.connected.addCallback(strip(connected)
+        ).addCallback(result
+        ).addCallback(self._cbStopClient
+        ).addErrback(self._ebGeneral)
+
+        def check(ignored):
+            self.assertEqual(self.result, self.expected)
+            self.assertEqual(self.storeArgs, self.expectedArgs)
+        d = loopback.loopbackTCP(self.server, self.client, noisy=False)
+        d.addCallback(check)
+        return d
+
+    def testSetFlags(self, uid=0):
+        self.function = self.client.setFlags
+        self.messages = '1,5,9'
+        self.flags = ['\\A', '\\B', 'C']
+        self.silent = False
+        self.uid = uid
+        self.response = {
+            1: ['\\A', '\\B', 'C'],
+            5: ['\\A', '\\B', 'C'],
+            9: ['\\A', '\\B', 'C'],
+        }
+        self.expected = {
+            1: {'FLAGS': ['\\A', '\\B', 'C']},
+            5: {'FLAGS': ['\\A', '\\B', 'C']},
+            9: {'FLAGS': ['\\A', '\\B', 'C']},
+        }
+        msg = imap4.MessageSet()
+        msg.add(1)
+        msg.add(5)
+        msg.add(9)
+        self.expectedArgs = ((msg, ['\\A', '\\B', 'C'], 0), {'uid': 0})
+        return self._storeWork()
+
+
+
+class GetBodyStructureTests(unittest.TestCase):
+    """
+    Tests for L{imap4.getBodyStructure}, a helper for constructing a list which
+    directly corresponds to the wire information needed for a I{BODY} or
+    I{BODYSTRUCTURE} response.
+    """
+    def test_singlePart(self):
+        """
+        L{imap4.getBodyStructure} accepts a L{IMessagePart} provider and returns
+        a list giving the basic fields for the I{BODY} response for that
+        message.
+        """
+        body = 'hello, world'
+        major = 'image'
+        minor = 'jpeg'
+        charset = 'us-ascii'
+        identifier = 'some kind of id'
+        description = 'great justice'
+        encoding = 'maximum'
+        msg = FakeyMessage({
+                'content-type': '%s/%s; charset=%s; x=y' % (
+                    major, minor, charset),
+                'content-id': identifier,
+                'content-description': description,
+                'content-transfer-encoding': encoding,
+                }, (), '', body, 123, None)
+        structure = imap4.getBodyStructure(msg)
+        self.assertEqual(
+            [major, minor, ["charset", charset, 'x', 'y'], identifier,
+             description, encoding, len(body)],
+            structure)
+
+
+    def test_singlePartExtended(self):
+        """
+        L{imap4.getBodyStructure} returns a list giving the basic and extended
+        fields for a I{BODYSTRUCTURE} response if passed C{True} for the
+        C{extended} parameter.
+        """
+        body = 'hello, world'
+        major = 'image'
+        minor = 'jpeg'
+        charset = 'us-ascii'
+        identifier = 'some kind of id'
+        description = 'great justice'
+        encoding = 'maximum'
+        md5 = 'abcdefabcdef'
+        msg = FakeyMessage({
+                'content-type': '%s/%s; charset=%s; x=y' % (
+                    major, minor, charset),
+                'content-id': identifier,
+                'content-description': description,
+                'content-transfer-encoding': encoding,
+                'content-md5': md5,
+                'content-disposition': 'attachment; name=foo; size=bar',
+                'content-language': 'fr',
+                'content-location': 'France',
+                }, (), '', body, 123, None)
+        structure = imap4.getBodyStructure(msg, extended=True)
+        self.assertEqual(
+            [major, minor, ["charset", charset, 'x', 'y'], identifier,
+             description, encoding, len(body), md5,
+             ['attachment', ['name', 'foo', 'size', 'bar']], 'fr', 'France'],
+            structure)
+
+
+    def test_singlePartWithMissing(self):
+        """
+        For fields with no information contained in the message headers,
+        L{imap4.getBodyStructure} fills in C{None} values in its result.
+        """
+        major = 'image'
+        minor = 'jpeg'
+        body = 'hello, world'
+        msg = FakeyMessage({
+                'content-type': '%s/%s' % (major, minor),
+                }, (), '', body, 123, None)
+        structure = imap4.getBodyStructure(msg, extended=True)
+        self.assertEqual(
+            [major, minor, None, None, None, None, len(body), None, None,
+             None, None],
+            structure)
+
+
+    def test_textPart(self):
+        """
+        For a I{text/*} message, the number of lines in the message body are
+        included after the common single-part basic fields.
+        """
+        body = 'hello, world\nhow are you?\ngoodbye\n'
+        major = 'text'
+        minor = 'jpeg'
+        charset = 'us-ascii'
+        identifier = 'some kind of id'
+        description = 'great justice'
+        encoding = 'maximum'
+        msg = FakeyMessage({
+                'content-type': '%s/%s; charset=%s; x=y' % (
+                    major, minor, charset),
+                'content-id': identifier,
+                'content-description': description,
+                'content-transfer-encoding': encoding,
+                }, (), '', body, 123, None)
+        structure = imap4.getBodyStructure(msg)
+        self.assertEqual(
+            [major, minor, ["charset", charset, 'x', 'y'], identifier,
+             description, encoding, len(body), len(body.splitlines())],
+            structure)
+
+
+    def test_rfc822Message(self):
+        """
+        For a I{message/rfc822} message, the common basic fields are followed
+        by information about the contained message.
+        """
+        body = 'hello, world\nhow are you?\ngoodbye\n'
+        major = 'text'
+        minor = 'jpeg'
+        charset = 'us-ascii'
+        identifier = 'some kind of id'
+        description = 'great justice'
+        encoding = 'maximum'
+        msg = FakeyMessage({
+                'content-type': '%s/%s; charset=%s; x=y' % (
+                    major, minor, charset),
+                'from': 'Alice <alice at example.com>',
+                'to': 'Bob <bob at example.com>',
+                'content-id': identifier,
+                'content-description': description,
+                'content-transfer-encoding': encoding,
+                }, (), '', body, 123, None)
+
+        container = FakeyMessage({
+                'content-type': 'message/rfc822',
+                }, (), '', '', 123, [msg])
+
+        structure = imap4.getBodyStructure(container)
+        self.assertEqual(
+            ['message', 'rfc822', None, None, None, None, 0,
+             imap4.getEnvelope(msg), imap4.getBodyStructure(msg), 3],
+            structure)
+
+
+    def test_multiPart(self):
+        """
+        For a I{multipart/*} message, L{imap4.getBodyStructure} returns a list
+        containing the body structure information for each part of the message
+        followed by an element giving the MIME subtype of the message.
+        """
+        oneSubPart = FakeyMessage({
+                'content-type': 'image/jpeg; x=y',
+                'content-id': 'some kind of id',
+                'content-description': 'great justice',
+                'content-transfer-encoding': 'maximum',
+                }, (), '', 'hello world', 123, None)
+
+        anotherSubPart = FakeyMessage({
+                'content-type': 'text/plain; charset=us-ascii',
+                }, (), '', 'some stuff', 321, None)
+
+        container = FakeyMessage({
+                'content-type': 'multipart/related',
+                }, (), '', '', 555, [oneSubPart, anotherSubPart])
+
+        self.assertEqual(
+            [imap4.getBodyStructure(oneSubPart),
+             imap4.getBodyStructure(anotherSubPart),
+             'related'],
+            imap4.getBodyStructure(container))
+
+
+    def test_multiPartExtended(self):
+        """
+        When passed a I{multipart/*} message and C{True} for the C{extended}
+        argument, L{imap4.getBodyStructure} includes extended structure
+        information from the parts of the multipart message and extended
+        structure information about the multipart message itself.
+        """
+        oneSubPart = FakeyMessage({
+                'content-type': 'image/jpeg; x=y',
+                'content-id': 'some kind of id',
+                'content-description': 'great justice',
+                'content-transfer-encoding': 'maximum',
+                }, (), '', 'hello world', 123, None)
+
+        anotherSubPart = FakeyMessage({
+                'content-type': 'text/plain; charset=us-ascii',
+                }, (), '', 'some stuff', 321, None)
+
+        container = FakeyMessage({
+                'content-type': 'multipart/related; foo=bar',
+                'content-language': 'es',
+                'content-location': 'Spain',
+                'content-disposition': 'attachment; name=monkeys',
+                }, (), '', '', 555, [oneSubPart, anotherSubPart])
+
+        self.assertEqual(
+            [imap4.getBodyStructure(oneSubPart, extended=True),
+             imap4.getBodyStructure(anotherSubPart, extended=True),
+             'related', ['foo', 'bar'], ['attachment', ['name', 'monkeys']],
+             'es', 'Spain'],
+            imap4.getBodyStructure(container, extended=True))
+
+
+
+class NewFetchTestCase(unittest.TestCase, IMAP4HelperMixin):
+    def setUp(self):
+        self.received_messages = self.received_uid = None
+        self.result = None
+
+        self.server = imap4.IMAP4Server()
+        self.server.state = 'select'
+        self.server.mbox = self
+        self.connected = defer.Deferred()
+        self.client = SimpleClient(self.connected)
+
+    def addListener(self, x):
+        pass
+    def removeListener(self, x):
+        pass
+
+    def fetch(self, messages, uid):
+        self.received_messages = messages
+        self.received_uid = uid
+        return iter(zip(range(len(self.msgObjs)), self.msgObjs))
+
+    def _fetchWork(self, uid):
+        if uid:
+            for (i, msg) in zip(range(len(self.msgObjs)), self.msgObjs):
+                self.expected[i]['UID'] = str(msg.getUID())
+
+        def result(R):
+            self.result = R
+
+        self.connected.addCallback(lambda _: self.function(self.messages, uid)
+        ).addCallback(result
+        ).addCallback(self._cbStopClient
+        ).addErrback(self._ebGeneral)
+
+        d = loopback.loopbackTCP(self.server, self.client, noisy=False)
+        d.addCallback(lambda x : self.assertEqual(self.result, self.expected))
+        return d
+
+    def testFetchUID(self):
+        self.function = lambda m, u: self.client.fetchUID(m)
+
+        self.messages = '7'
+        self.msgObjs = [
+            FakeyMessage({}, (), '', '', 12345, None),
+            FakeyMessage({}, (), '', '', 999, None),
+            FakeyMessage({}, (), '', '', 10101, None),
+        ]
+        self.expected = {
+            0: {'UID': '12345'},
+            1: {'UID': '999'},
+            2: {'UID': '10101'},
+        }
+        return self._fetchWork(0)
+
+    def testFetchFlags(self, uid=0):
+        self.function = self.client.fetchFlags
+        self.messages = '9'
+        self.msgObjs = [
+            FakeyMessage({}, ['FlagA', 'FlagB', '\\FlagC'], '', '', 54321, None),
+            FakeyMessage({}, ['\\FlagC', 'FlagA', 'FlagB'], '', '', 12345, None),
+        ]
+        self.expected = {
+            0: {'FLAGS': ['FlagA', 'FlagB', '\\FlagC']},
+            1: {'FLAGS': ['\\FlagC', 'FlagA', 'FlagB']},
+        }
+        return self._fetchWork(uid)
+
+    def testFetchFlagsUID(self):
+        return self.testFetchFlags(1)
+
+    def testFetchInternalDate(self, uid=0):
+        self.function = self.client.fetchInternalDate
+        self.messages = '13'
+        self.msgObjs = [
+            FakeyMessage({}, (), 'Fri, 02 Nov 2003 21:25:10 GMT', '', 23232, None),
+            FakeyMessage({}, (), 'Thu, 29 Dec 2013 11:31:52 EST', '', 101, None),
+            FakeyMessage({}, (), 'Mon, 10 Mar 1992 02:44:30 CST', '', 202, None),
+            FakeyMessage({}, (), 'Sat, 11 Jan 2000 14:40:24 PST', '', 303, None),
+        ]
+        self.expected = {
+            0: {'INTERNALDATE': '02-Nov-2003 21:25:10 +0000'},
+            1: {'INTERNALDATE': '29-Dec-2013 11:31:52 -0500'},
+            2: {'INTERNALDATE': '10-Mar-1992 02:44:30 -0600'},
+            3: {'INTERNALDATE': '11-Jan-2000 14:40:24 -0800'},
+        }
+        return self._fetchWork(uid)
+
+    def testFetchInternalDateUID(self):
+        return self.testFetchInternalDate(1)
+
+
+    def test_fetchInternalDateLocaleIndependent(self):
+        """
+        The month name in the date is locale independent.
+        """
+        # Fake that we're in a language where December is not Dec
+        currentLocale = locale.setlocale(locale.LC_ALL, None)
+        locale.setlocale(locale.LC_ALL, "es_AR.UTF8")
+        self.addCleanup(locale.setlocale, locale.LC_ALL, currentLocale)
+        return self.testFetchInternalDate(1)
+
+    # if alternate locale is not available, the previous test will be skipped,
+    # please install this locale for it to run.  Avoid using locale.getlocale to
+    # learn the current locale; its values don't round-trip well on all
+    # platforms.  Fortunately setlocale returns a value which does round-trip
+    # well.
+    currentLocale = locale.setlocale(locale.LC_ALL, None)
+    try:
+        locale.setlocale(locale.LC_ALL, "es_AR.UTF8")
+    except locale.Error:
+        test_fetchInternalDateLocaleIndependent.skip = (
+            "The es_AR.UTF8 locale is not installed.")
+    else:
+        locale.setlocale(locale.LC_ALL, currentLocale)
+
+
+    def testFetchEnvelope(self, uid=0):
+        self.function = self.client.fetchEnvelope
+        self.messages = '15'
+        self.msgObjs = [
+            FakeyMessage({
+                'from': 'user at domain', 'to': 'resu at domain',
+                'date': 'thursday', 'subject': 'it is a message',
+                'message-id': 'id-id-id-yayaya'}, (), '', '', 65656,
+                None),
+        ]
+        self.expected = {
+            0: {'ENVELOPE':
+                ['thursday', 'it is a message',
+                    [[None, None, 'user', 'domain']],
+                    [[None, None, 'user', 'domain']],
+                    [[None, None, 'user', 'domain']],
+                    [[None, None, 'resu', 'domain']],
+                    None, None, None, 'id-id-id-yayaya']
+            }
+        }
+        return self._fetchWork(uid)
+
+    def testFetchEnvelopeUID(self):
+        return self.testFetchEnvelope(1)
+
+
+    def test_fetchBodyStructure(self, uid=0):
+        """
+        L{IMAP4Client.fetchBodyStructure} issues a I{FETCH BODYSTRUCTURE}
+        command and returns a Deferred which fires with a structure giving the
+        result of parsing the server's response.  The structure is a list
+        reflecting the parenthesized data sent by the server, as described by
+        RFC 3501, section 7.4.2.
+        """
+        self.function = self.client.fetchBodyStructure
+        self.messages = '3:9,10:*'
+        self.msgObjs = [FakeyMessage({
+                'content-type': 'text/plain; name=thing; key="value"',
+                'content-id': 'this-is-the-content-id',
+                'content-description': 'describing-the-content-goes-here!',
+                'content-transfer-encoding': '8BIT',
+                'content-md5': 'abcdef123456',
+                'content-disposition': 'attachment; filename=monkeys',
+                'content-language': 'es',
+                'content-location': 'http://example.com/monkeys',
+            }, (), '', 'Body\nText\nGoes\nHere\n', 919293, None)]
+        self.expected = {0: {'BODYSTRUCTURE': [
+            'text', 'plain', ['key', 'value', 'name', 'thing'],
+            'this-is-the-content-id', 'describing-the-content-goes-here!',
+            '8BIT', '20', '4', 'abcdef123456',
+            ['attachment', ['filename', 'monkeys']], 'es',
+             'http://example.com/monkeys']}}
+        return self._fetchWork(uid)
+
+
+    def testFetchBodyStructureUID(self):
+        """
+        If passed C{True} for the C{uid} argument, C{fetchBodyStructure} can
+        also issue a I{UID FETCH BODYSTRUCTURE} command.
+        """
+        return self.test_fetchBodyStructure(1)
+
+
+    def test_fetchBodyStructureMultipart(self, uid=0):
+        """
+        L{IMAP4Client.fetchBodyStructure} can also parse the response to a
+        I{FETCH BODYSTRUCTURE} command for a multipart message.
+        """
+        self.function = self.client.fetchBodyStructure
+        self.messages = '3:9,10:*'
+        innerMessage = FakeyMessage({
+                'content-type': 'text/plain; name=thing; key="value"',
+                'content-id': 'this-is-the-content-id',
+                'content-description': 'describing-the-content-goes-here!',
+                'content-transfer-encoding': '8BIT',
+                'content-language': 'fr',
+                'content-md5': '123456abcdef',
+                'content-disposition': 'inline',
+                'content-location': 'outer space',
+            }, (), '', 'Body\nText\nGoes\nHere\n', 919293, None)
+        self.msgObjs = [FakeyMessage({
+                'content-type': 'multipart/mixed; boundary="xyz"',
+                'content-language': 'en',
+                'content-location': 'nearby',
+            }, (), '', '', 919293, [innerMessage])]
+        self.expected = {0: {'BODYSTRUCTURE': [
+            ['text', 'plain', ['key', 'value', 'name', 'thing'],
+             'this-is-the-content-id', 'describing-the-content-goes-here!',
+             '8BIT', '20', '4', '123456abcdef', ['inline', None], 'fr',
+             'outer space'],
+            'mixed', ['boundary', 'xyz'], None, 'en', 'nearby'
+            ]}}
+        return self._fetchWork(uid)
+
+
+    def testFetchSimplifiedBody(self, uid=0):
+        self.function = self.client.fetchSimplifiedBody
+        self.messages = '21'
+        self.msgObjs = [FakeyMessage({}, (), '', 'Yea whatever', 91825,
+            [FakeyMessage({'content-type': 'image/jpg'}, (), '',
+                'Body Body Body', None, None
+            )]
+        )]
+        self.expected = {0:
+            {'BODY':
+                [None, None, None, None, None, None,
+                    '12'
+                ]
+            }
+        }
+
+        return self._fetchWork(uid)
+
+    def testFetchSimplifiedBodyUID(self):
+        return self.testFetchSimplifiedBody(1)
+
+    def testFetchSimplifiedBodyText(self, uid=0):
+        self.function = self.client.fetchSimplifiedBody
+        self.messages = '21'
+        self.msgObjs = [FakeyMessage({'content-type': 'text/plain'},
+            (), '', 'Yea whatever', 91825, None)]
+        self.expected = {0:
+            {'BODY':
+                ['text', 'plain', None, None, None, None,
+                    '12', '1'
+                ]
+            }
+        }
+
+        return self._fetchWork(uid)
+
+    def testFetchSimplifiedBodyTextUID(self):
+        return self.testFetchSimplifiedBodyText(1)
+
+    def testFetchSimplifiedBodyRFC822(self, uid=0):
+        self.function = self.client.fetchSimplifiedBody
+        self.messages = '21'
+        self.msgObjs = [FakeyMessage({'content-type': 'message/rfc822'},
+            (), '', 'Yea whatever', 91825,
+            [FakeyMessage({'content-type': 'image/jpg'}, (), '',
+                'Body Body Body', None, None
+            )]
+        )]
+        self.expected = {0:
+            {'BODY':
+                ['message', 'rfc822', None, None, None, None,
+                    '12', [None, None, [[None, None, None]],
+                    [[None, None, None]], None, None, None,
+                    None, None, None], ['image', 'jpg', None,
+                    None, None, None, '14'], '1'
+                ]
+            }
+        }
+
+        return self._fetchWork(uid)
+
+    def testFetchSimplifiedBodyRFC822UID(self):
+        return self.testFetchSimplifiedBodyRFC822(1)
+
+    def testFetchMessage(self, uid=0):
+        self.function = self.client.fetchMessage
+        self.messages = '1,3,7,10101'
+        self.msgObjs = [
+            FakeyMessage({'Header': 'Value'}, (), '', 'BODY TEXT\r\n', 91, None),
+        ]
+        self.expected = {
+            0: {'RFC822': 'Header: Value\r\n\r\nBODY TEXT\r\n'}
+        }
+        return self._fetchWork(uid)
+
+    def testFetchMessageUID(self):
+        return self.testFetchMessage(1)
+
+    def testFetchHeaders(self, uid=0):
+        self.function = self.client.fetchHeaders
+        self.messages = '9,6,2'
+        self.msgObjs = [
+            FakeyMessage({'H1': 'V1', 'H2': 'V2'}, (), '', '', 99, None),
+        ]
+        self.expected = {
+            0: {'RFC822.HEADER': imap4._formatHeaders({'H1': 'V1', 'H2': 'V2'})},
+        }
+        return self._fetchWork(uid)
+
+    def testFetchHeadersUID(self):
+        return self.testFetchHeaders(1)
+
+    def testFetchBody(self, uid=0):
+        self.function = self.client.fetchBody
+        self.messages = '1,2,3,4,5,6,7'
+        self.msgObjs = [
+            FakeyMessage({'Header': 'Value'}, (), '', 'Body goes here\r\n', 171, None),
+        ]
+        self.expected = {
+            0: {'RFC822.TEXT': 'Body goes here\r\n'},
+        }
+        return self._fetchWork(uid)
+
+    def testFetchBodyUID(self):
+        return self.testFetchBody(1)
+
+    def testFetchBodyParts(self):
+        """
+        Test the server's handling of requests for specific body sections.
+        """
+        self.function = self.client.fetchSpecific
+        self.messages = '1'
+        outerBody = ''
+        innerBody1 = 'Contained body message text.  Squarge.'
+        innerBody2 = 'Secondary <i>message</i> text of squarge body.'
+        headers = util.OrderedDict()
+        headers['from'] = 'sender at host'
+        headers['to'] = 'recipient at domain'
+        headers['subject'] = 'booga booga boo'
+        headers['content-type'] = 'multipart/alternative; boundary="xyz"'
+        innerHeaders = util.OrderedDict()
+        innerHeaders['subject'] = 'this is subject text'
+        innerHeaders['content-type'] = 'text/plain'
+        innerHeaders2 = util.OrderedDict()
+        innerHeaders2['subject'] = '<b>this is subject</b>'
+        innerHeaders2['content-type'] = 'text/html'
+        self.msgObjs = [FakeyMessage(
+            headers, (), None, outerBody, 123,
+            [FakeyMessage(innerHeaders, (), None, innerBody1, None, None),
+             FakeyMessage(innerHeaders2, (), None, innerBody2, None, None)])]
+        self.expected = {
+            0: [['BODY', ['1'], 'Contained body message text.  Squarge.']]}
+
+        def result(R):
+            self.result = R
+
+        self.connected.addCallback(
+            lambda _: self.function(self.messages, headerNumber=1))
+        self.connected.addCallback(result)
+        self.connected.addCallback(self._cbStopClient)
+        self.connected.addErrback(self._ebGeneral)
+
+        d = loopback.loopbackTCP(self.server, self.client, noisy=False)
+        d.addCallback(lambda ign: self.assertEqual(self.result, self.expected))
+        return d
+
+
+    def test_fetchBodyPartOfNonMultipart(self):
+        """
+        Single-part messages have an implicit first part which clients
+        should be able to retrieve explicitly.  Test that a client
+        requesting part 1 of a text/plain message receives the body of the
+        text/plain part.
+        """
+        self.function = self.client.fetchSpecific
+        self.messages = '1'
+        parts = [1]
+        outerBody = 'DA body'
+        headers = util.OrderedDict()
+        headers['from'] = 'sender at host'
+        headers['to'] = 'recipient at domain'
+        headers['subject'] = 'booga booga boo'
+        headers['content-type'] = 'text/plain'
+        self.msgObjs = [FakeyMessage(
+            headers, (), None, outerBody, 123, None)]
+
+        self.expected = {0: [['BODY', ['1'], 'DA body']]}
+
+        def result(R):
+            self.result = R
+
+        self.connected.addCallback(
+            lambda _: self.function(self.messages, headerNumber=parts))
+        self.connected.addCallback(result)
+        self.connected.addCallback(self._cbStopClient)
+        self.connected.addErrback(self._ebGeneral)
+
+        d = loopback.loopbackTCP(self.server, self.client, noisy=False)
+        d.addCallback(lambda ign: self.assertEqual(self.result, self.expected))
+        return d
+
+
+    def testFetchSize(self, uid=0):
+        self.function = self.client.fetchSize
+        self.messages = '1:100,2:*'
+        self.msgObjs = [
+            FakeyMessage({}, (), '', 'x' * 20, 123, None),
+        ]
+        self.expected = {
+            0: {'RFC822.SIZE': '20'},
+        }
+        return self._fetchWork(uid)
+
+    def testFetchSizeUID(self):
+        return self.testFetchSize(1)
+
+    def testFetchFull(self, uid=0):
+        self.function = self.client.fetchFull
+        self.messages = '1,3'
+        self.msgObjs = [
+            FakeyMessage({}, ('\\XYZ', '\\YZX', 'Abc'),
+                'Sun, 25 Jul 2010 06:20:30 -0400 (EDT)',
+                'xyz' * 2, 654, None),
+            FakeyMessage({}, ('\\One', '\\Two', 'Three'),
+                'Mon, 14 Apr 2003 19:43:44 -0400',
+                'abc' * 4, 555, None),
+        ]
+        self.expected = {
+            0: {'FLAGS': ['\\XYZ', '\\YZX', 'Abc'],
+                'INTERNALDATE': '25-Jul-2010 06:20:30 -0400',
+                'RFC822.SIZE': '6',
+                'ENVELOPE': [None, None, [[None, None, None]], [[None, None, None]], None, None, None, None, None, None],
+                'BODY': [None, None, None, None, None, None, '6']},
+            1: {'FLAGS': ['\\One', '\\Two', 'Three'],
+                'INTERNALDATE': '14-Apr-2003 19:43:44 -0400',
+                'RFC822.SIZE': '12',
+                'ENVELOPE': [None, None, [[None, None, None]], [[None, None, None]], None, None, None, None, None, None],
+                'BODY': [None, None, None, None, None, None, '12']},
+        }
+        return self._fetchWork(uid)
+
+    def testFetchFullUID(self):
+        return self.testFetchFull(1)
+
+    def testFetchAll(self, uid=0):
+        self.function = self.client.fetchAll
+        self.messages = '1,2:3'
+        self.msgObjs = [
+            FakeyMessage({}, (), 'Mon, 14 Apr 2003 19:43:44 +0400',
+                'Lalala', 10101, None),
+            FakeyMessage({}, (), 'Tue, 15 Apr 2003 19:43:44 +0200',
+                'Alalal', 20202, None),
+        ]
+        self.expected = {
+            0: {'ENVELOPE': [None, None, [[None, None, None]], [[None, None, None]], None, None, None, None, None, None],
+                'RFC822.SIZE': '6',
+                'INTERNALDATE': '14-Apr-2003 19:43:44 +0400',
+                'FLAGS': []},
+            1: {'ENVELOPE': [None, None, [[None, None, None]], [[None, None, None]], None, None, None, None, None, None],
+                'RFC822.SIZE': '6',
+                'INTERNALDATE': '15-Apr-2003 19:43:44 +0200',
+                'FLAGS': []},
+        }
+        return self._fetchWork(uid)
+
+    def testFetchAllUID(self):
+        return self.testFetchAll(1)
+
+    def testFetchFast(self, uid=0):
+        self.function = self.client.fetchFast
+        self.messages = '1'
+        self.msgObjs = [
+            FakeyMessage({}, ('\\X',), '19 Mar 2003 19:22:21 -0500', '', 9, None),
+        ]
+        self.expected = {
+            0: {'FLAGS': ['\\X'],
+                'INTERNALDATE': '19-Mar-2003 19:22:21 -0500',
+                'RFC822.SIZE': '0'},
+        }
+        return self._fetchWork(uid)
+
+    def testFetchFastUID(self):
+        return self.testFetchFast(1)
+
+
+
+class DefaultSearchTestCase(IMAP4HelperMixin, unittest.TestCase):
+    """
+    Test the behavior of the server's SEARCH implementation, particularly in
+    the face of unhandled search terms.
+    """
+    def setUp(self):
+        self.server = imap4.IMAP4Server()
+        self.server.state = 'select'
+        self.server.mbox = self
+        self.connected = defer.Deferred()
+        self.client = SimpleClient(self.connected)
+        self.msgObjs = [
+            FakeyMessage({}, (), '', '', 999, None),
+            FakeyMessage({}, (), '', '', 10101, None),
+            FakeyMessage({}, (), '', '', 12345, None),
+            FakeyMessage({}, (), '', '', 20001, None),
+            FakeyMessage({}, (), '', '', 20002, None),
+        ]
+
+
+    def fetch(self, messages, uid):
+        """
+        Pretend to be a mailbox and let C{self.server} lookup messages on me.
+        """
+        return zip(range(1, len(self.msgObjs) + 1), self.msgObjs)
+
+
+    def _messageSetSearchTest(self, queryTerms, expectedMessages):
+        """
+        Issue a search with given query and verify that the returned messages
+        match the given expected messages.
+
+        @param queryTerms: A string giving the search query.
+        @param expectedMessages: A list of the message sequence numbers
+            expected as the result of the search.
+        @return: A L{Deferred} which fires when the test is complete.
+        """
+        def search():
+            return self.client.search(queryTerms)
+
+        d = self.connected.addCallback(strip(search))
+        def searched(results):
+            self.assertEqual(results, expectedMessages)
+        d.addCallback(searched)
+        d.addCallback(self._cbStopClient)
+        d.addErrback(self._ebGeneral)
+        self.loopback()
+        return d
+
+
+    def test_searchMessageSet(self):
+        """
+        Test that a search which starts with a message set properly limits
+        the search results to messages in that set.
+        """
+        return self._messageSetSearchTest('1', [1])
+
+
+    def test_searchMessageSetWithStar(self):
+        """
+        If the search filter ends with a star, all the message from the
+        starting point are returned.
+        """
+        return self._messageSetSearchTest('2:*', [2, 3, 4, 5])
+
+
+    def test_searchMessageSetWithStarFirst(self):
+        """
+        If the search filter starts with a star, the result should be identical
+        with if the filter would end with a star.
+        """
+        return self._messageSetSearchTest('*:2', [2, 3, 4, 5])
+
+
+    def test_searchMessageSetUIDWithStar(self):
+        """
+        If the search filter ends with a star, all the message from the
+        starting point are returned (also for the SEARCH UID case).
+        """
+        return self._messageSetSearchTest('UID 10000:*', [2, 3, 4, 5])
+
+
+    def test_searchMessageSetUIDWithStarFirst(self):
+        """
+        If the search filter starts with a star, the result should be identical
+        with if the filter would end with a star (also for the SEARCH UID case).
+        """
+        return self._messageSetSearchTest('UID *:10000', [2, 3, 4, 5])
+
+
+    def test_searchMessageSetUIDWithStarAndHighStart(self):
+        """
+        A search filter of 1234:* should include the UID of the last message in
+        the mailbox, even if its UID is less than 1234.
+        """
+        # in our fake mbox the highest message UID is 20002
+        return self._messageSetSearchTest('UID 30000:*', [5])
+
+
+    def test_searchMessageSetWithList(self):
+        """
+        If the search filter contains nesting terms, one of which includes a
+        message sequence set with a wildcard, IT ALL WORKS GOOD.
+        """
+        # 6 is bigger than the biggest message sequence number, but that's
+        # okay, because N:* includes the biggest message sequence number even
+        # if N is bigger than that (read the rfc nub).
+        return self._messageSetSearchTest('(6:*)', [5])
+
+
+    def test_searchOr(self):
+        """
+        If the search filter contains an I{OR} term, all messages
+        which match either subexpression are returned.
+        """
+        return self._messageSetSearchTest('OR 1 2', [1, 2])
+
+
+    def test_searchOrMessageSet(self):
+        """
+        If the search filter contains an I{OR} term with a
+        subexpression which includes a message sequence set wildcard,
+        all messages in that set are considered for inclusion in the
+        results.
+        """
+        return self._messageSetSearchTest('OR 2:* 2:*', [2, 3, 4, 5])
+
+
+    def test_searchNot(self):
+        """
+        If the search filter contains a I{NOT} term, all messages
+        which do not match the subexpression are returned.
+        """
+        return self._messageSetSearchTest('NOT 3', [1, 2, 4, 5])
+
+
+    def test_searchNotMessageSet(self):
+        """
+        If the search filter contains a I{NOT} term with a
+        subexpression which includes a message sequence set wildcard,
+        no messages in that set are considered for inclusion in the
+        result.
+        """
+        return self._messageSetSearchTest('NOT 2:*', [1])
+
+
+    def test_searchAndMessageSet(self):
+        """
+        If the search filter contains multiple terms implicitly
+        conjoined with a message sequence set wildcard, only the
+        intersection of the results of each term are returned.
+        """
+        return self._messageSetSearchTest('2:* 3', [3])
+
+    def test_searchInvalidCriteria(self):
+        """
+        If the search criteria is not a valid key, a NO result is returned to
+        the client (resulting in an error callback), and an IllegalQueryError is
+        logged on the server side.
+        """
+        queryTerms = 'FOO'
+        def search():
+            return self.client.search(queryTerms)
+
+        d = self.connected.addCallback(strip(search))
+        d = self.assertFailure(d, imap4.IMAP4Exception)
+
+        def errorReceived(results):
+            """
+            Verify that the server logs an IllegalQueryError and the
+            client raises an IMAP4Exception with 'Search failed:...'
+            """
+            self.client.transport.loseConnection()
+            self.server.transport.loseConnection()
+
+            # Check what the server logs
+            errors = self.flushLoggedErrors(imap4.IllegalQueryError)
+            self.assertEqual(len(errors), 1)
+
+            # Verify exception given to client has the correct message
+            self.assertEqual(
+                "SEARCH failed: Invalid search command FOO", str(results))
+
+        d.addCallback(errorReceived)
+        d.addErrback(self._ebGeneral)
+        self.loopback()
+        return d
+
+
+
+class FetchSearchStoreTestCase(unittest.TestCase, IMAP4HelperMixin):
+    implements(imap4.ISearchableMailbox)
+
+    def setUp(self):
+        self.expected = self.result = None
+        self.server_received_query = None
+        self.server_received_uid = None
+        self.server_received_parts = None
+        self.server_received_messages = None
+
+        self.server = imap4.IMAP4Server()
+        self.server.state = 'select'
+        self.server.mbox = self
+        self.connected = defer.Deferred()
+        self.client = SimpleClient(self.connected)
+
+    def search(self, query, uid):
+        # Look for a specific bad query, so we can verify we handle it properly
+        if query == ['FOO']:
+            raise imap4.IllegalQueryError("FOO is not a valid search criteria")
+
+        self.server_received_query = query
+        self.server_received_uid = uid
+        return self.expected
+
+    def addListener(self, *a, **kw):
+        pass
+    removeListener = addListener
+
+    def _searchWork(self, uid):
+        def search():
+            return self.client.search(self.query, uid=uid)
+        def result(R):
+            self.result = R
+
+        self.connected.addCallback(strip(search)
+        ).addCallback(result
+        ).addCallback(self._cbStopClient
+        ).addErrback(self._ebGeneral)
+
+        def check(ignored):
+            # Ensure no short-circuiting wierdness is going on
+            self.failIf(self.result is self.expected)
+
+            self.assertEqual(self.result, self.expected)
+            self.assertEqual(self.uid, self.server_received_uid)
+            self.assertEqual(
+                imap4.parseNestedParens(self.query),
+                self.server_received_query
+            )
+        d = loopback.loopbackTCP(self.server, self.client, noisy=False)
+        d.addCallback(check)
+        return d
+
+    def testSearch(self):
+        self.query = imap4.Or(
+            imap4.Query(header=('subject', 'substring')),
+            imap4.Query(larger=1024, smaller=4096),
+        )
+        self.expected = [1, 4, 5, 7]
+        self.uid = 0
+        return self._searchWork(0)
+
+    def testUIDSearch(self):
+        self.query = imap4.Or(
+            imap4.Query(header=('subject', 'substring')),
+            imap4.Query(larger=1024, smaller=4096),
+        )
+        self.uid = 1
+        self.expected = [1, 2, 3]
+        return self._searchWork(1)
+
+    def getUID(self, msg):
+        try:
+            return self.expected[msg]['UID']
+        except (TypeError, IndexError):
+            return self.expected[msg-1]
+        except KeyError:
+            return 42
+
+    def fetch(self, messages, uid):
+        self.server_received_uid = uid
+        self.server_received_messages = str(messages)
+        return self.expected
+
+    def _fetchWork(self, fetch):
+        def result(R):
+            self.result = R
+
+        self.connected.addCallback(strip(fetch)
+        ).addCallback(result
+        ).addCallback(self._cbStopClient
+        ).addErrback(self._ebGeneral)
+
+        def check(ignored):
+            # Ensure no short-circuiting wierdness is going on
+            self.failIf(self.result is self.expected)
+
+            self.parts and self.parts.sort()
+            self.server_received_parts and self.server_received_parts.sort()
+
+            if self.uid:
+                for (k, v) in self.expected.items():
+                    v['UID'] = str(k)
+
+            self.assertEqual(self.result, self.expected)
+            self.assertEqual(self.uid, self.server_received_uid)
+            self.assertEqual(self.parts, self.server_received_parts)
+            self.assertEqual(imap4.parseIdList(self.messages),
+                              imap4.parseIdList(self.server_received_messages))
+
+        d = loopback.loopbackTCP(self.server, self.client, noisy=False)
+        d.addCallback(check)
+        return d
+
+
+    def test_invalidTerm(self):
+        """
+        If, as part of a search, an ISearchableMailbox raises an
+        IllegalQueryError (e.g. due to invalid search criteria), client sees a
+        failure response, and an IllegalQueryError is logged on the server.
+        """
+        query = 'FOO'
+
+        def search():
+            return self.client.search(query)
+
+        d = self.connected.addCallback(strip(search))
+        d = self.assertFailure(d, imap4.IMAP4Exception)
+
+        def errorReceived(results):
+            """
+            Verify that the server logs an IllegalQueryError and the
+            client raises an IMAP4Exception with 'Search failed:...'
+            """
+            self.client.transport.loseConnection()
+            self.server.transport.loseConnection()
+
+            # Check what the server logs
+            errors = self.flushLoggedErrors(imap4.IllegalQueryError)
+            self.assertEqual(len(errors), 1)
+
+            # Verify exception given to client has the correct message
+            self.assertEqual(
+                "SEARCH failed: FOO is not a valid search criteria",
+                str(results))
+
+        d.addCallback(errorReceived)
+        d.addErrback(self._ebGeneral)
+        self.loopback()
+        return d
+
+
+
+class FakeMailbox:
+    def __init__(self):
+        self.args = []
+    def addMessage(self, body, flags, date):
+        self.args.append((body, flags, date))
+        return defer.succeed(None)
+
+class FeaturefulMessage:
+    implements(imap4.IMessageFile)
+
+    def getFlags(self):
+        return 'flags'
+
+    def getInternalDate(self):
+        return 'internaldate'
+
+    def open(self):
+        return StringIO("open")
+
+class MessageCopierMailbox:
+    implements(imap4.IMessageCopier)
+
+    def __init__(self):
+        self.msgs = []
+
+    def copy(self, msg):
+        self.msgs.append(msg)
+        return len(self.msgs)
+
+class CopyWorkerTestCase(unittest.TestCase):
+    def testFeaturefulMessage(self):
+        s = imap4.IMAP4Server()
+
+        # Yes.  I am grabbing this uber-non-public method to test it.
+        # It is complex.  It needs to be tested directly!
+        # Perhaps it should be refactored, simplified, or split up into
+        # not-so-private components, but that is a task for another day.
+
+        # Ha ha! Addendum!  Soon it will be split up, and this test will
+        # be re-written to just use the default adapter for IMailbox to
+        # IMessageCopier and call .copy on that adapter.
+        f = s._IMAP4Server__cbCopy
+
+        m = FakeMailbox()
+        d = f([(i, FeaturefulMessage()) for i in range(1, 11)], 'tag', m)
+
+        def cbCopy(results):
+            for a in m.args:
+                self.assertEqual(a[0].read(), "open")
+                self.assertEqual(a[1], "flags")
+                self.assertEqual(a[2], "internaldate")
+
+            for (status, result) in results:
+                self.failUnless(status)
+                self.assertEqual(result, None)
+
+        return d.addCallback(cbCopy)
+
+
+    def testUnfeaturefulMessage(self):
+        s = imap4.IMAP4Server()
+
+        # See above comment
+        f = s._IMAP4Server__cbCopy
+
+        m = FakeMailbox()
+        msgs = [FakeyMessage({'Header-Counter': str(i)}, (), 'Date', 'Body %d' % (i,), i + 10, None) for i in range(1, 11)]
+        d = f([im for im in zip(range(1, 11), msgs)], 'tag', m)
+
+        def cbCopy(results):
+            seen = []
+            for a in m.args:
+                seen.append(a[0].read())
+                self.assertEqual(a[1], ())
+                self.assertEqual(a[2], "Date")
+
+            seen.sort()
+            exp = ["Header-Counter: %d\r\n\r\nBody %d" % (i, i) for i in range(1, 11)]
+            exp.sort()
+            self.assertEqual(seen, exp)
+
+            for (status, result) in results:
+                self.failUnless(status)
+                self.assertEqual(result, None)
+
+        return d.addCallback(cbCopy)
+
+    def testMessageCopier(self):
+        s = imap4.IMAP4Server()
+
+        # See above comment
+        f = s._IMAP4Server__cbCopy
+
+        m = MessageCopierMailbox()
+        msgs = [object() for i in range(1, 11)]
+        d = f([im for im in zip(range(1, 11), msgs)], 'tag', m)
+
+        def cbCopy(results):
+            self.assertEqual(results, zip([1] * 10, range(1, 11)))
+            for (orig, new) in zip(msgs, m.msgs):
+                self.assertIdentical(orig, new)
+
+        return d.addCallback(cbCopy)
+
+
+class TLSTestCase(IMAP4HelperMixin, unittest.TestCase):
+    serverCTX = ServerTLSContext and ServerTLSContext()
+    clientCTX = ClientTLSContext and ClientTLSContext()
+
+    def loopback(self):
+        return loopback.loopbackTCP(self.server, self.client, noisy=False)
+
+    def testAPileOfThings(self):
+        SimpleServer.theAccount.addMailbox('inbox')
+        called = []
+        def login():
+            called.append(None)
+            return self.client.login('testuser', 'password-test')
+        def list():
+            called.append(None)
+            return self.client.list('inbox', '%')
+        def status():
+            called.append(None)
+            return self.client.status('inbox', 'UIDNEXT')
+        def examine():
+            called.append(None)
+            return self.client.examine('inbox')
+        def logout():
+            called.append(None)
+            return self.client.logout()
+
+        self.client.requireTransportSecurity = True
+
+        methods = [login, list, status, examine, logout]
+        map(self.connected.addCallback, map(strip, methods))
+        self.connected.addCallbacks(self._cbStopClient, self._ebGeneral)
+        def check(ignored):
+            self.assertEqual(self.server.startedTLS, True)
+            self.assertEqual(self.client.startedTLS, True)
+            self.assertEqual(len(called), len(methods))
+        d = self.loopback()
+        d.addCallback(check)
+        return d
+
+    def testLoginLogin(self):
+        self.server.checker.addUser('testuser', 'password-test')
+        success = []
+        self.client.registerAuthenticator(imap4.LOGINAuthenticator('testuser'))
+        self.connected.addCallback(
+                lambda _: self.client.authenticate('password-test')
+            ).addCallback(
+                lambda _: self.client.logout()
+            ).addCallback(success.append
+            ).addCallback(self._cbStopClient
+            ).addErrback(self._ebGeneral)
+
+        d = self.loopback()
+        d.addCallback(lambda x : self.assertEqual(len(success), 1))
+        return d
+
+
+    def test_startTLS(self):
+        """
+        L{IMAP4Client.startTLS} triggers TLS negotiation and returns a
+        L{Deferred} which fires after the client's transport is using
+        encryption.
+        """
+        success = []
+        self.connected.addCallback(lambda _: self.client.startTLS())
+        def checkSecure(ignored):
+            self.assertTrue(
+                interfaces.ISSLTransport.providedBy(self.client.transport))
+        self.connected.addCallback(checkSecure)
+        self.connected.addCallback(self._cbStopClient)
+        self.connected.addCallback(success.append)
+        self.connected.addErrback(self._ebGeneral)
+
+        d = self.loopback()
+        d.addCallback(lambda x : self.failUnless(success))
+        return defer.gatherResults([d, self.connected])
+
+
+    def testFailedStartTLS(self):
+        failure = []
+        def breakServerTLS(ign):
+            self.server.canStartTLS = False
+
+        self.connected.addCallback(breakServerTLS)
+        self.connected.addCallback(lambda ign: self.client.startTLS())
+        self.connected.addErrback(lambda err: failure.append(err.trap(imap4.IMAP4Exception)))
+        self.connected.addCallback(self._cbStopClient)
+        self.connected.addErrback(self._ebGeneral)
+
+        def check(ignored):
+            self.failUnless(failure)
+            self.assertIdentical(failure[0], imap4.IMAP4Exception)
+        return self.loopback().addCallback(check)
+
+
+
+class SlowMailbox(SimpleMailbox):
+    howSlow = 2
+    callLater = None
+    fetchDeferred = None
+
+    # Not a very nice implementation of fetch(), but it'll
+    # do for the purposes of testing.
+    def fetch(self, messages, uid):
+        d = defer.Deferred()
+        self.callLater(self.howSlow, d.callback, ())
+        self.fetchDeferred.callback(None)
+        return d
+
+class Timeout(IMAP4HelperMixin, unittest.TestCase):
+
+    def test_serverTimeout(self):
+        """
+        The *client* has a timeout mechanism which will close connections that
+        are inactive for a period.
+        """
+        c = Clock()
+        self.server.timeoutTest = True
+        self.client.timeout = 5 #seconds
+        self.client.callLater = c.callLater
+        self.selectedArgs = None
+
+        def login():
+            d = self.client.login('testuser', 'password-test')
+            c.advance(5)
+            d.addErrback(timedOut)
+            return d
+
+        def timedOut(failure):
+            self._cbStopClient(None)
+            failure.trap(error.TimeoutError)
+
+        d = self.connected.addCallback(strip(login))
+        d.addErrback(self._ebGeneral)
+        return defer.gatherResults([d, self.loopback()])
+
+
+    def test_longFetchDoesntTimeout(self):
+        """
+        The connection timeout does not take effect during fetches.
+        """
+        c = Clock()
+        SlowMailbox.callLater = c.callLater
+        SlowMailbox.fetchDeferred = defer.Deferred()
+        self.server.callLater = c.callLater
+        SimpleServer.theAccount.mailboxFactory = SlowMailbox
+        SimpleServer.theAccount.addMailbox('mailbox-test')
+
+        self.server.setTimeout(1)
+
+        def login():
+            return self.client.login('testuser', 'password-test')
+        def select():
+            self.server.setTimeout(1)
+            return self.client.select('mailbox-test')
+        def fetch():
+            return self.client.fetchUID('1:*')
+        def stillConnected():
+            self.assertNotEquals(self.server.state, 'timeout')
+
+        def cbAdvance(ignored):
+            for i in xrange(4):
+                c.advance(.5)
+
+        SlowMailbox.fetchDeferred.addCallback(cbAdvance)
+
+        d1 = self.connected.addCallback(strip(login))
+        d1.addCallback(strip(select))
+        d1.addCallback(strip(fetch))
+        d1.addCallback(strip(stillConnected))
+        d1.addCallback(self._cbStopClient)
+        d1.addErrback(self._ebGeneral)
+        d = defer.gatherResults([d1, self.loopback()])
+        return d
+
+
+    def test_idleClientDoesDisconnect(self):
+        """
+        The *server* has a timeout mechanism which will close connections that
+        are inactive for a period.
+        """
+        c = Clock()
+        # Hook up our server protocol
+        transport = StringTransportWithDisconnection()
+        transport.protocol = self.server
+        self.server.callLater = c.callLater
+        self.server.makeConnection(transport)
+
+        # Make sure we can notice when the connection goes away
+        lost = []
+        connLost = self.server.connectionLost
+        self.server.connectionLost = lambda reason: (lost.append(None), connLost(reason))[1]
+
+        # 2/3rds of the idle timeout elapses...
+        c.pump([0.0] + [self.server.timeOut / 3.0] * 2)
+        self.failIf(lost, lost)
+
+        # Now some more
+        c.pump([0.0, self.server.timeOut / 2.0])
+        self.failUnless(lost)
+
+
+
+class Disconnection(unittest.TestCase):
+    def testClientDisconnectFailsDeferreds(self):
+        c = imap4.IMAP4Client()
+        t = StringTransportWithDisconnection()
+        c.makeConnection(t)
+        d = self.assertFailure(c.login('testuser', 'example.com'), error.ConnectionDone)
+        c.connectionLost(error.ConnectionDone("Connection closed"))
+        return d
+
+
+
+class SynchronousMailbox(object):
+    """
+    Trivial, in-memory mailbox implementation which can produce a message
+    synchronously.
+    """
+    def __init__(self, messages):
+        self.messages = messages
+
+
+    def fetch(self, msgset, uid):
+        assert not uid, "Cannot handle uid requests."
+        for msg in msgset:
+            yield msg, self.messages[msg - 1]
+
+
+
+class StringTransportConsumer(StringTransport):
+    producer = None
+    streaming = None
+
+    def registerProducer(self, producer, streaming):
+        self.producer = producer
+        self.streaming = streaming
+
+
+
+class Pipelining(unittest.TestCase):
+    """
+    Tests for various aspects of the IMAP4 server's pipelining support.
+    """
+    messages = [
+        FakeyMessage({}, [], '', '0', None, None),
+        FakeyMessage({}, [], '', '1', None, None),
+        FakeyMessage({}, [], '', '2', None, None),
+        ]
+
+    def setUp(self):
+        self.iterators = []
+
+        self.transport = StringTransportConsumer()
+        self.server = imap4.IMAP4Server(None, None, self.iterateInReactor)
+        self.server.makeConnection(self.transport)
+
+
+    def iterateInReactor(self, iterator):
+        d = defer.Deferred()
+        self.iterators.append((iterator, d))
+        return d
+
+
+    def tearDown(self):
+        self.server.connectionLost(failure.Failure(error.ConnectionDone()))
+
+
+    def test_synchronousFetch(self):
+        """
+        Test that pipelined FETCH commands which can be responded to
+        synchronously are responded to correctly.
+        """
+        mailbox = SynchronousMailbox(self.messages)
+
+        # Skip over authentication and folder selection
+        self.server.state = 'select'
+        self.server.mbox = mailbox
+
+        # Get rid of any greeting junk
+        self.transport.clear()
+
+        # Here's some pipelined stuff
+        self.server.dataReceived(
+            '01 FETCH 1 BODY[]\r\n'
+            '02 FETCH 2 BODY[]\r\n'
+            '03 FETCH 3 BODY[]\r\n')
+
+        # Flush anything the server has scheduled to run
+        while self.iterators:
+            for e in self.iterators[0][0]:
+                break
+            else:
+                self.iterators.pop(0)[1].callback(None)
+
+        # The bodies are empty because we aren't simulating a transport
+        # exactly correctly (we have StringTransportConsumer but we never
+        # call resumeProducing on its producer).  It doesn't matter: just
+        # make sure the surrounding structure is okay, and that no
+        # exceptions occurred.
+        self.assertEqual(
+            self.transport.value(),
+            '* 1 FETCH (BODY[] )\r\n'
+            '01 OK FETCH completed\r\n'
+            '* 2 FETCH (BODY[] )\r\n'
+            '02 OK FETCH completed\r\n'
+            '* 3 FETCH (BODY[] )\r\n'
+            '03 OK FETCH completed\r\n')
+
+
+
+if ClientTLSContext is None:
+    for case in (TLSTestCase,):
+        case.skip = "OpenSSL not present"
+elif interfaces.IReactorSSL(reactor, None) is None:
+    for case in (TLSTestCase,):
+        case.skip = "Reactor doesn't support SSL"
+
+
+
+class IMAP4ServerFetchTestCase(unittest.TestCase):
+    """
+    This test case is for the FETCH tests that require
+    a C{StringTransport}.
+    """
+
+    def setUp(self):
+        self.transport = StringTransport()
+        self.server = imap4.IMAP4Server()
+        self.server.state = 'select'
+        self.server.makeConnection(self.transport)
+
+
+    def test_fetchWithPartialValidArgument(self):
+        """
+        If by any chance, extra bytes got appended at the end of of an valid
+        FETCH arguments, the client should get a BAD - arguments invalid
+        response.
+
+        See U{RFC 3501<http://tools.ietf.org/html/rfc3501#section-6.4.5>},
+        section 6.4.5,
+        """
+        # We need to clear out the welcome message.
+        self.transport.clear()
+        # Let's send out the faulty command.
+        self.server.dataReceived("0001 FETCH 1 FULLL\r\n")
+        expected = "0001 BAD Illegal syntax: Invalid Argument\r\n"
+        self.assertEqual(self.transport.value(), expected)
+        self.transport.clear()
+        self.server.connectionLost(error.ConnectionDone("Connection closed"))
diff --git a/ThirdParty/Twisted/twisted/mail/test/test_mail.py b/ThirdParty/Twisted/twisted/mail/test/test_mail.py
new file mode 100644
index 0000000..ba91544
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/test/test_mail.py
@@ -0,0 +1,2060 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for large portions of L{twisted.mail}.
+"""
+
+import os
+import errno
+import shutil
+import pickle
+import StringIO
+import rfc822
+import tempfile
+import signal
+
+from zope.interface import Interface, implements
+
+from twisted.trial import unittest
+from twisted.mail import smtp
+from twisted.mail import pop3
+from twisted.names import dns
+from twisted.internet import protocol
+from twisted.internet import defer
+from twisted.internet.defer import Deferred
+from twisted.internet import reactor
+from twisted.internet import interfaces
+from twisted.internet import task
+from twisted.internet.error import DNSLookupError, CannotListenError
+from twisted.internet.error import ProcessDone, ProcessTerminated
+from twisted.internet import address
+from twisted.python import failure
+from twisted.python.filepath import FilePath
+from twisted.python.hashlib import md5
+
+from twisted import mail
+import twisted.mail.mail
+import twisted.mail.maildir
+import twisted.mail.relay
+import twisted.mail.relaymanager
+import twisted.mail.protocols
+import twisted.mail.alias
+
+from twisted.names.error import DNSNameError
+from twisted.names.dns import RRHeader, Record_CNAME, Record_MX
+
+from twisted import cred
+import twisted.cred.credentials
+import twisted.cred.checkers
+import twisted.cred.portal
+
+from twisted.test.proto_helpers import LineSendingProtocol
+
+class DomainWithDefaultsTestCase(unittest.TestCase):
+    def testMethods(self):
+        d = dict([(x, x + 10) for x in range(10)])
+        d = mail.mail.DomainWithDefaultDict(d, 'Default')
+
+        self.assertEqual(len(d), 10)
+        self.assertEqual(list(iter(d)), range(10))
+        self.assertEqual(list(d.iterkeys()), list(iter(d)))
+
+        items = list(d.iteritems())
+        items.sort()
+        self.assertEqual(items, [(x, x + 10) for x in range(10)])
+
+        values = list(d.itervalues())
+        values.sort()
+        self.assertEqual(values, range(10, 20))
+
+        items = d.items()
+        items.sort()
+        self.assertEqual(items, [(x, x + 10) for x in range(10)])
+
+        values = d.values()
+        values.sort()
+        self.assertEqual(values, range(10, 20))
+
+        for x in range(10):
+            self.assertEqual(d[x], x + 10)
+            self.assertEqual(d.get(x), x + 10)
+            self.failUnless(x in d)
+            self.failUnless(d.has_key(x))
+
+        del d[2], d[4], d[6]
+
+        self.assertEqual(len(d), 7)
+        self.assertEqual(d[2], 'Default')
+        self.assertEqual(d[4], 'Default')
+        self.assertEqual(d[6], 'Default')
+
+        d.update({'a': None, 'b': (), 'c': '*'})
+        self.assertEqual(len(d), 10)
+        self.assertEqual(d['a'], None)
+        self.assertEqual(d['b'], ())
+        self.assertEqual(d['c'], '*')
+
+        d.clear()
+        self.assertEqual(len(d), 0)
+
+        self.assertEqual(d.setdefault('key', 'value'), 'value')
+        self.assertEqual(d['key'], 'value')
+
+        self.assertEqual(d.popitem(), ('key', 'value'))
+        self.assertEqual(len(d), 0)
+
+        dcopy = d.copy()
+        self.assertEqual(d.domains, dcopy.domains)
+        self.assertEqual(d.default, dcopy.default)
+
+
+    def _stringificationTest(self, stringifier):
+        """
+        Assert that the class name of a L{mail.mail.DomainWithDefaultDict}
+        instance and the string-formatted underlying domain dictionary both
+        appear in the string produced by the given string-returning function.
+
+        @type stringifier: one-argument callable
+        @param stringifier: either C{str} or C{repr}, to be used to get a
+            string to make assertions against.
+        """
+        domain = mail.mail.DomainWithDefaultDict({}, 'Default')
+        self.assertIn(domain.__class__.__name__, stringifier(domain))
+        domain['key'] = 'value'
+        self.assertIn(str({'key': 'value'}), stringifier(domain))
+
+
+    def test_str(self):
+        """
+        L{DomainWithDefaultDict.__str__} should return a string including
+        the class name and the domain mapping held by the instance.
+        """
+        self._stringificationTest(str)
+
+
+    def test_repr(self):
+        """
+        L{DomainWithDefaultDict.__repr__} should return a string including
+        the class name and the domain mapping held by the instance.
+        """
+        self._stringificationTest(repr)
+
+
+
+class BounceTestCase(unittest.TestCase):
+    def setUp(self):
+        self.domain = mail.mail.BounceDomain()
+
+    def testExists(self):
+        self.assertRaises(smtp.AddressError, self.domain.exists, "any user")
+
+    def testRelay(self):
+        self.assertEqual(
+            self.domain.willRelay("random q emailer", "protocol"),
+            False
+        )
+
+    def testMessage(self):
+        self.assertRaises(NotImplementedError, self.domain.startMessage, "whomever")
+
+    def testAddUser(self):
+        self.domain.addUser("bob", "password")
+        self.assertRaises(smtp.SMTPBadRcpt, self.domain.exists, "bob")
+
+class FileMessageTestCase(unittest.TestCase):
+    def setUp(self):
+        self.name = "fileMessage.testFile"
+        self.final = "final.fileMessage.testFile"
+        self.f = file(self.name, 'w')
+        self.fp = mail.mail.FileMessage(self.f, self.name, self.final)
+
+    def tearDown(self):
+        try:
+            self.f.close()
+        except:
+            pass
+        try:
+            os.remove(self.name)
+        except:
+            pass
+        try:
+            os.remove(self.final)
+        except:
+            pass
+
+    def testFinalName(self):
+        return self.fp.eomReceived().addCallback(self._cbFinalName)
+
+    def _cbFinalName(self, result):
+        self.assertEqual(result, self.final)
+        self.failUnless(self.f.closed)
+        self.failIf(os.path.exists(self.name))
+
+    def testContents(self):
+        contents = "first line\nsecond line\nthird line\n"
+        for line in contents.splitlines():
+            self.fp.lineReceived(line)
+        self.fp.eomReceived()
+        self.assertEqual(file(self.final).read(), contents)
+
+    def testInterrupted(self):
+        contents = "first line\nsecond line\n"
+        for line in contents.splitlines():
+            self.fp.lineReceived(line)
+        self.fp.connectionLost()
+        self.failIf(os.path.exists(self.name))
+        self.failIf(os.path.exists(self.final))
+
+class MailServiceTestCase(unittest.TestCase):
+    def setUp(self):
+        self.service = mail.mail.MailService()
+
+    def testFactories(self):
+        f = self.service.getPOP3Factory()
+        self.failUnless(isinstance(f, protocol.ServerFactory))
+        self.failUnless(f.buildProtocol(('127.0.0.1', 12345)), pop3.POP3)
+
+        f = self.service.getSMTPFactory()
+        self.failUnless(isinstance(f, protocol.ServerFactory))
+        self.failUnless(f.buildProtocol(('127.0.0.1', 12345)), smtp.SMTP)
+
+        f = self.service.getESMTPFactory()
+        self.failUnless(isinstance(f, protocol.ServerFactory))
+        self.failUnless(f.buildProtocol(('127.0.0.1', 12345)), smtp.ESMTP)
+
+    def testPortals(self):
+        o1 = object()
+        o2 = object()
+        self.service.portals['domain'] = o1
+        self.service.portals[''] = o2
+
+        self.failUnless(self.service.lookupPortal('domain') is o1)
+        self.failUnless(self.service.defaultPortal() is o2)
+
+
+class StringListMailboxTests(unittest.TestCase):
+    """
+    Tests for L{StringListMailbox}, an in-memory only implementation of
+    L{pop3.IMailbox}.
+    """
+    def test_listOneMessage(self):
+        """
+        L{StringListMailbox.listMessages} returns the length of the message at
+        the offset into the mailbox passed to it.
+        """
+        mailbox = mail.maildir.StringListMailbox(["abc", "ab", "a"])
+        self.assertEqual(mailbox.listMessages(0), 3)
+        self.assertEqual(mailbox.listMessages(1), 2)
+        self.assertEqual(mailbox.listMessages(2), 1)
+
+
+    def test_listAllMessages(self):
+        """
+        L{StringListMailbox.listMessages} returns a list of the lengths of all
+        messages if not passed an index.
+        """
+        mailbox = mail.maildir.StringListMailbox(["a", "abc", "ab"])
+        self.assertEqual(mailbox.listMessages(), [1, 3, 2])
+
+
+    def test_getMessage(self):
+        """
+        L{StringListMailbox.getMessage} returns a file-like object from which
+        the contents of the message at the given offset into the mailbox can be
+        read.
+        """
+        mailbox = mail.maildir.StringListMailbox(["foo", "real contents"])
+        self.assertEqual(mailbox.getMessage(1).read(), "real contents")
+
+
+    def test_getUidl(self):
+        """
+        L{StringListMailbox.getUidl} returns a unique identifier for the
+        message at the given offset into the mailbox.
+        """
+        mailbox = mail.maildir.StringListMailbox(["foo", "bar"])
+        self.assertNotEqual(mailbox.getUidl(0), mailbox.getUidl(1))
+
+
+    def test_deleteMessage(self):
+        """
+        L{StringListMailbox.deleteMessage} marks a message for deletion causing
+        further requests for its length to return 0.
+        """
+        mailbox = mail.maildir.StringListMailbox(["foo"])
+        mailbox.deleteMessage(0)
+        self.assertEqual(mailbox.listMessages(0), 0)
+        self.assertEqual(mailbox.listMessages(), [0])
+
+
+    def test_undeleteMessages(self):
+        """
+        L{StringListMailbox.undeleteMessages} causes any messages marked for
+        deletion to be returned to their original state.
+        """
+        mailbox = mail.maildir.StringListMailbox(["foo"])
+        mailbox.deleteMessage(0)
+        mailbox.undeleteMessages()
+        self.assertEqual(mailbox.listMessages(0), 3)
+        self.assertEqual(mailbox.listMessages(), [3])
+
+
+    def test_sync(self):
+        """
+        L{StringListMailbox.sync} causes any messages as marked for deletion to
+        be permanently deleted.
+        """
+        mailbox = mail.maildir.StringListMailbox(["foo"])
+        mailbox.deleteMessage(0)
+        mailbox.sync()
+        mailbox.undeleteMessages()
+        self.assertEqual(mailbox.listMessages(0), 0)
+        self.assertEqual(mailbox.listMessages(), [0])
+
+
+
+class FailingMaildirMailboxAppendMessageTask(mail.maildir._MaildirMailboxAppendMessageTask):
+    _openstate = True
+    _writestate = True
+    _renamestate = True
+    def osopen(self, fn, attr, mode):
+        if self._openstate:
+            return os.open(fn, attr, mode)
+        else:
+            raise OSError(errno.EPERM, "Faked Permission Problem")
+    def oswrite(self, fh, data):
+        if self._writestate:
+            return os.write(fh, data)
+        else:
+            raise OSError(errno.ENOSPC, "Faked Space problem")
+    def osrename(self, oldname, newname):
+        if self._renamestate:
+            return os.rename(oldname, newname)
+        else:
+            raise OSError(errno.EPERM, "Faked Permission Problem")
+
+
+class _AppendTestMixin(object):
+    """
+    Mixin for L{MaildirMailbox.appendMessage} test cases which defines a helper
+    for serially appending multiple messages to a mailbox.
+    """
+    def _appendMessages(self, mbox, messages):
+        """
+        Deliver the given messages one at a time.  Delivery is serialized to
+        guarantee a predictable order in the mailbox (overlapped message deliver
+        makes no guarantees about which message which appear first).
+        """
+        results = []
+        def append():
+            for m in messages:
+                d = mbox.appendMessage(m)
+                d.addCallback(results.append)
+                yield d
+        d = task.cooperate(append()).whenDone()
+        d.addCallback(lambda ignored: results)
+        return d
+
+
+
+class MaildirAppendStringTestCase(unittest.TestCase, _AppendTestMixin):
+    """
+    Tests for L{MaildirMailbox.appendMessage} when invoked with a C{str}.
+    """
+    def setUp(self):
+        self.d = self.mktemp()
+        mail.maildir.initializeMaildir(self.d)
+
+
+    def _append(self, ignored, mbox):
+        d = mbox.appendMessage('TEST')
+        return self.assertFailure(d, Exception)
+
+
+    def _setState(self, ignored, mbox, rename=None, write=None, open=None):
+        """
+        Change the behavior of future C{rename}, C{write}, or C{open} calls made
+        by the mailbox C{mbox}.
+
+        @param rename: If not C{None}, a new value for the C{_renamestate}
+            attribute of the mailbox's append factory.  The original value will
+            be restored at the end of the test.
+
+        @param write: Like C{rename}, but for the C{_writestate} attribute.
+
+        @param open: Like C{rename}, but for the C{_openstate} attribute.
+        """
+        if rename is not None:
+            self.addCleanup(
+                setattr, mbox.AppendFactory, '_renamestate',
+                mbox.AppendFactory._renamestate)
+            mbox.AppendFactory._renamestate = rename
+        if write is not None:
+            self.addCleanup(
+                setattr, mbox.AppendFactory, '_writestate',
+                mbox.AppendFactory._writestate)
+            mbox.AppendFactory._writestate = write
+        if open is not None:
+            self.addCleanup(
+                setattr, mbox.AppendFactory, '_openstate',
+                mbox.AppendFactory._openstate)
+            mbox.AppendFactory._openstate = open
+
+
+    def test_append(self):
+        """
+        L{MaildirMailbox.appendMessage} returns a L{Deferred} which fires when
+        the message has been added to the end of the mailbox.
+        """
+        mbox = mail.maildir.MaildirMailbox(self.d)
+        mbox.AppendFactory = FailingMaildirMailboxAppendMessageTask
+
+        d = self._appendMessages(mbox, ["X" * i for i in range(1, 11)])
+        d.addCallback(self.assertEqual, [None] * 10)
+        d.addCallback(self._cbTestAppend, mbox)
+        return d
+
+
+    def _cbTestAppend(self, ignored, mbox):
+        """
+        Check that the mailbox has the expected number (ten) of messages in it,
+        and that each has the expected contents, and that they are in the same
+        order as that in which they were appended.
+        """
+        self.assertEqual(len(mbox.listMessages()), 10)
+        self.assertEqual(
+            [len(mbox.getMessage(i).read()) for i in range(10)],
+            range(1, 11))
+        # test in the right order: last to first error location.
+        self._setState(None, mbox, rename=False)
+        d = self._append(None, mbox)
+        d.addCallback(self._setState, mbox, rename=True, write=False)
+        d.addCallback(self._append, mbox)
+        d.addCallback(self._setState, mbox, write=True, open=False)
+        d.addCallback(self._append, mbox)
+        d.addCallback(self._setState, mbox, open=True)
+        return d
+
+
+
+class MaildirAppendFileTestCase(unittest.TestCase, _AppendTestMixin):
+    """
+    Tests for L{MaildirMailbox.appendMessage} when invoked with a C{str}.
+    """
+    def setUp(self):
+        self.d = self.mktemp()
+        mail.maildir.initializeMaildir(self.d)
+
+
+    def test_append(self):
+        """
+        L{MaildirMailbox.appendMessage} returns a L{Deferred} which fires when
+        the message has been added to the end of the mailbox.
+        """
+        mbox = mail.maildir.MaildirMailbox(self.d)
+        messages = []
+        for i in xrange(1, 11):
+            temp = tempfile.TemporaryFile()
+            temp.write("X" * i)
+            temp.seek(0, 0)
+            messages.append(temp)
+            self.addCleanup(temp.close)
+
+        d = self._appendMessages(mbox, messages)
+        d.addCallback(self._cbTestAppend, mbox)
+        return d
+
+
+    def _cbTestAppend(self, result, mbox):
+        """
+        Check that the mailbox has the expected number (ten) of messages in it,
+        and that each has the expected contents, and that they are in the same
+        order as that in which they were appended.
+        """
+        self.assertEqual(len(mbox.listMessages()), 10)
+        self.assertEqual(
+            [len(mbox.getMessage(i).read()) for i in range(10)],
+            range(1, 11))
+
+
+
+class MaildirTestCase(unittest.TestCase):
+    def setUp(self):
+        self.d = self.mktemp()
+        mail.maildir.initializeMaildir(self.d)
+
+    def tearDown(self):
+        shutil.rmtree(self.d)
+
+    def testInitializer(self):
+        d = self.d
+        trash = os.path.join(d, '.Trash')
+
+        self.failUnless(os.path.exists(d) and os.path.isdir(d))
+        self.failUnless(os.path.exists(os.path.join(d, 'new')))
+        self.failUnless(os.path.exists(os.path.join(d, 'cur')))
+        self.failUnless(os.path.exists(os.path.join(d, 'tmp')))
+        self.failUnless(os.path.isdir(os.path.join(d, 'new')))
+        self.failUnless(os.path.isdir(os.path.join(d, 'cur')))
+        self.failUnless(os.path.isdir(os.path.join(d, 'tmp')))
+
+        self.failUnless(os.path.exists(os.path.join(trash, 'new')))
+        self.failUnless(os.path.exists(os.path.join(trash, 'cur')))
+        self.failUnless(os.path.exists(os.path.join(trash, 'tmp')))
+        self.failUnless(os.path.isdir(os.path.join(trash, 'new')))
+        self.failUnless(os.path.isdir(os.path.join(trash, 'cur')))
+        self.failUnless(os.path.isdir(os.path.join(trash, 'tmp')))
+
+
+    def test_nameGenerator(self):
+        """
+        Each call to L{_MaildirNameGenerator.generate} returns a unique
+        string suitable for use as the basename of a new message file.  The
+        names are ordered such that those generated earlier sort less than
+        those generated later.
+        """
+        clock = task.Clock()
+        clock.advance(0.05)
+        generator = mail.maildir._MaildirNameGenerator(clock)
+
+        firstName = generator.generate()
+        clock.advance(0.05)
+        secondName = generator.generate()
+
+        self.assertTrue(firstName < secondName)
+
+
+    def test_mailbox(self):
+        """
+        Exercise the methods of L{IMailbox} as implemented by
+        L{MaildirMailbox}.
+        """
+        j = os.path.join
+        n = mail.maildir._generateMaildirName
+        msgs = [j(b, n()) for b in ('cur', 'new') for x in range(5)]
+
+        # Toss a few files into the mailbox
+        i = 1
+        for f in msgs:
+            fObj = file(j(self.d, f), 'w')
+            fObj.write('x' * i)
+            fObj.close()
+            i = i + 1
+
+        mb = mail.maildir.MaildirMailbox(self.d)
+        self.assertEqual(mb.listMessages(), range(1, 11))
+        self.assertEqual(mb.listMessages(1), 2)
+        self.assertEqual(mb.listMessages(5), 6)
+
+        self.assertEqual(mb.getMessage(6).read(), 'x' * 7)
+        self.assertEqual(mb.getMessage(1).read(), 'x' * 2)
+
+        d = {}
+        for i in range(10):
+            u = mb.getUidl(i)
+            self.failIf(u in d)
+            d[u] = None
+
+        p, f = os.path.split(msgs[5])
+
+        mb.deleteMessage(5)
+        self.assertEqual(mb.listMessages(5), 0)
+        self.failUnless(os.path.exists(j(self.d, '.Trash', 'cur', f)))
+        self.failIf(os.path.exists(j(self.d, msgs[5])))
+
+        mb.undeleteMessages()
+        self.assertEqual(mb.listMessages(5), 6)
+        self.failIf(os.path.exists(j(self.d, '.Trash', 'cur', f)))
+        self.failUnless(os.path.exists(j(self.d, msgs[5])))
+
+class MaildirDirdbmDomainTestCase(unittest.TestCase):
+    def setUp(self):
+        self.P = self.mktemp()
+        self.S = mail.mail.MailService()
+        self.D = mail.maildir.MaildirDirdbmDomain(self.S, self.P)
+
+    def tearDown(self):
+        shutil.rmtree(self.P)
+
+    def testAddUser(self):
+        toAdd = (('user1', 'pwd1'), ('user2', 'pwd2'), ('user3', 'pwd3'))
+        for (u, p) in toAdd:
+            self.D.addUser(u, p)
+
+        for (u, p) in toAdd:
+            self.failUnless(u in self.D.dbm)
+            self.assertEqual(self.D.dbm[u], p)
+            self.failUnless(os.path.exists(os.path.join(self.P, u)))
+
+    def testCredentials(self):
+        creds = self.D.getCredentialsCheckers()
+
+        self.assertEqual(len(creds), 1)
+        self.failUnless(cred.checkers.ICredentialsChecker.providedBy(creds[0]))
+        self.failUnless(cred.credentials.IUsernamePassword in creds[0].credentialInterfaces)
+
+    def testRequestAvatar(self):
+        class ISomething(Interface):
+            pass
+
+        self.D.addUser('user', 'password')
+        self.assertRaises(
+            NotImplementedError,
+            self.D.requestAvatar, 'user', None, ISomething
+        )
+
+        t = self.D.requestAvatar('user', None, pop3.IMailbox)
+        self.assertEqual(len(t), 3)
+        self.failUnless(t[0] is pop3.IMailbox)
+        self.failUnless(pop3.IMailbox.providedBy(t[1]))
+
+        t[2]()
+
+    def testRequestAvatarId(self):
+        self.D.addUser('user', 'password')
+        database = self.D.getCredentialsCheckers()[0]
+
+        creds = cred.credentials.UsernamePassword('user', 'wrong password')
+        self.assertRaises(
+            cred.error.UnauthorizedLogin,
+            database.requestAvatarId, creds
+        )
+
+        creds = cred.credentials.UsernamePassword('user', 'password')
+        self.assertEqual(database.requestAvatarId(creds), 'user')
+
+
+class StubAliasableDomain(object):
+    """
+    Minimal testable implementation of IAliasableDomain.
+    """
+    implements(mail.mail.IAliasableDomain)
+
+    def exists(self, user):
+        """
+        No test coverage for invocations of this method on domain objects,
+        so we just won't implement it.
+        """
+        raise NotImplementedError()
+
+
+    def addUser(self, user, password):
+        """
+        No test coverage for invocations of this method on domain objects,
+        so we just won't implement it.
+        """
+        raise NotImplementedError()
+
+
+    def getCredentialsCheckers(self):
+        """
+        This needs to succeed in order for other tests to complete
+        successfully, but we don't actually assert anything about its
+        behavior.  Return an empty list.  Sometime later we should return
+        something else and assert that a portal got set up properly.
+        """
+        return []
+
+
+    def setAliasGroup(self, aliases):
+        """
+        Just record the value so the test can check it later.
+        """
+        self.aliasGroup = aliases
+
+
+class ServiceDomainTestCase(unittest.TestCase):
+    def setUp(self):
+        self.S = mail.mail.MailService()
+        self.D = mail.protocols.DomainDeliveryBase(self.S, None)
+        self.D.service = self.S
+        self.D.protocolName = 'TEST'
+        self.D.host = 'hostname'
+
+        self.tmpdir = self.mktemp()
+        domain = mail.maildir.MaildirDirdbmDomain(self.S, self.tmpdir)
+        domain.addUser('user', 'password')
+        self.S.addDomain('test.domain', domain)
+
+    def tearDown(self):
+        shutil.rmtree(self.tmpdir)
+
+
+    def testAddAliasableDomain(self):
+        """
+        Test that adding an IAliasableDomain to a mail service properly sets
+        up alias group references and such.
+        """
+        aliases = object()
+        domain = StubAliasableDomain()
+        self.S.aliases = aliases
+        self.S.addDomain('example.com', domain)
+        self.assertIdentical(domain.aliasGroup, aliases)
+
+
+    def testReceivedHeader(self):
+         hdr = self.D.receivedHeader(
+             ('remotehost', '123.232.101.234'),
+             smtp.Address('<someguy at somplace>'),
+             ['user at host.name']
+         )
+         fp = StringIO.StringIO(hdr)
+         m = rfc822.Message(fp)
+         self.assertEqual(len(m.items()), 1)
+         self.failUnless(m.has_key('Received'))
+
+    def testValidateTo(self):
+        user = smtp.User('user at test.domain', 'helo', None, 'wherever at whatever')
+        return defer.maybeDeferred(self.D.validateTo, user
+            ).addCallback(self._cbValidateTo
+            )
+
+    def _cbValidateTo(self, result):
+        self.failUnless(callable(result))
+
+    def testValidateToBadUsername(self):
+        user = smtp.User('resu at test.domain', 'helo', None, 'wherever at whatever')
+        return self.assertFailure(
+            defer.maybeDeferred(self.D.validateTo, user),
+            smtp.SMTPBadRcpt)
+
+    def testValidateToBadDomain(self):
+        user = smtp.User('user at domain.test', 'helo', None, 'wherever at whatever')
+        return self.assertFailure(
+            defer.maybeDeferred(self.D.validateTo, user),
+            smtp.SMTPBadRcpt)
+
+    def testValidateFrom(self):
+        helo = ('hostname', '127.0.0.1')
+        origin = smtp.Address('<user at hostname>')
+        self.failUnless(self.D.validateFrom(helo, origin) is origin)
+
+        helo = ('hostname', '1.2.3.4')
+        origin = smtp.Address('<user at hostname>')
+        self.failUnless(self.D.validateFrom(helo, origin) is origin)
+
+        helo = ('hostname', '1.2.3.4')
+        origin = smtp.Address('<>')
+        self.failUnless(self.D.validateFrom(helo, origin) is origin)
+
+        self.assertRaises(
+            smtp.SMTPBadSender,
+            self.D.validateFrom, None, origin
+        )
+
+class VirtualPOP3TestCase(unittest.TestCase):
+    def setUp(self):
+        self.tmpdir = self.mktemp()
+        self.S = mail.mail.MailService()
+        self.D = mail.maildir.MaildirDirdbmDomain(self.S, self.tmpdir)
+        self.D.addUser('user', 'password')
+        self.S.addDomain('test.domain', self.D)
+
+        portal = cred.portal.Portal(self.D)
+        map(portal.registerChecker, self.D.getCredentialsCheckers())
+        self.S.portals[''] = self.S.portals['test.domain'] = portal
+
+        self.P = mail.protocols.VirtualPOP3()
+        self.P.service = self.S
+        self.P.magic = '<unit test magic>'
+
+    def tearDown(self):
+        shutil.rmtree(self.tmpdir)
+
+    def testAuthenticateAPOP(self):
+        resp = md5(self.P.magic + 'password').hexdigest()
+        return self.P.authenticateUserAPOP('user', resp
+            ).addCallback(self._cbAuthenticateAPOP
+            )
+
+    def _cbAuthenticateAPOP(self, result):
+        self.assertEqual(len(result), 3)
+        self.assertEqual(result[0], pop3.IMailbox)
+        self.failUnless(pop3.IMailbox.providedBy(result[1]))
+        result[2]()
+
+    def testAuthenticateIncorrectUserAPOP(self):
+        resp = md5(self.P.magic + 'password').hexdigest()
+        return self.assertFailure(
+            self.P.authenticateUserAPOP('resu', resp),
+            cred.error.UnauthorizedLogin)
+
+    def testAuthenticateIncorrectResponseAPOP(self):
+        resp = md5('wrong digest').hexdigest()
+        return self.assertFailure(
+            self.P.authenticateUserAPOP('user', resp),
+            cred.error.UnauthorizedLogin)
+
+    def testAuthenticatePASS(self):
+        return self.P.authenticateUserPASS('user', 'password'
+            ).addCallback(self._cbAuthenticatePASS
+            )
+
+    def _cbAuthenticatePASS(self, result):
+        self.assertEqual(len(result), 3)
+        self.assertEqual(result[0], pop3.IMailbox)
+        self.failUnless(pop3.IMailbox.providedBy(result[1]))
+        result[2]()
+
+    def testAuthenticateBadUserPASS(self):
+        return self.assertFailure(
+            self.P.authenticateUserPASS('resu', 'password'),
+            cred.error.UnauthorizedLogin)
+
+    def testAuthenticateBadPasswordPASS(self):
+        return self.assertFailure(
+            self.P.authenticateUserPASS('user', 'wrong password'),
+            cred.error.UnauthorizedLogin)
+
+class empty(smtp.User):
+    def __init__(self):
+        pass
+
+class RelayTestCase(unittest.TestCase):
+    def testExists(self):
+        service = mail.mail.MailService()
+        domain = mail.relay.DomainQueuer(service)
+
+        doRelay = [
+            address.UNIXAddress('/var/run/mail-relay'),
+            address.IPv4Address('TCP', '127.0.0.1', 12345),
+        ]
+
+        dontRelay = [
+            address.IPv4Address('TCP', '192.168.2.1', 62),
+            address.IPv4Address('TCP', '1.2.3.4', 1943),
+        ]
+
+        for peer in doRelay:
+            user = empty()
+            user.orig = 'user at host'
+            user.dest = 'tsoh at resu'
+            user.protocol = empty()
+            user.protocol.transport = empty()
+            user.protocol.transport.getPeer = lambda: peer
+
+            self.failUnless(callable(domain.exists(user)))
+
+        for peer in dontRelay:
+            user = empty()
+            user.orig = 'some at place'
+            user.protocol = empty()
+            user.protocol.transport = empty()
+            user.protocol.transport.getPeer = lambda: peer
+            user.dest = 'who at cares'
+
+            self.assertRaises(smtp.SMTPBadRcpt, domain.exists, user)
+
+class RelayerTestCase(unittest.TestCase):
+    def setUp(self):
+        self.tmpdir = self.mktemp()
+        os.mkdir(self.tmpdir)
+        self.messageFiles = []
+        for i in range(10):
+            name = os.path.join(self.tmpdir, 'body-%d' % (i,))
+            f = file(name + '-H', 'w')
+            pickle.dump(['from-%d' % (i,), 'to-%d' % (i,)], f)
+            f.close()
+
+            f = file(name + '-D', 'w')
+            f.write(name)
+            f.seek(0, 0)
+            self.messageFiles.append(name)
+
+        self.R = mail.relay.RelayerMixin()
+        self.R.loadMessages(self.messageFiles)
+
+    def tearDown(self):
+        shutil.rmtree(self.tmpdir)
+
+    def testMailFrom(self):
+        for i in range(10):
+            self.assertEqual(self.R.getMailFrom(), 'from-%d' % (i,))
+            self.R.sentMail(250, None, None, None, None)
+        self.assertEqual(self.R.getMailFrom(), None)
+
+    def testMailTo(self):
+        for i in range(10):
+            self.assertEqual(self.R.getMailTo(), ['to-%d' % (i,)])
+            self.R.sentMail(250, None, None, None, None)
+        self.assertEqual(self.R.getMailTo(), None)
+
+    def testMailData(self):
+        for i in range(10):
+            name = os.path.join(self.tmpdir, 'body-%d' % (i,))
+            self.assertEqual(self.R.getMailData().read(), name)
+            self.R.sentMail(250, None, None, None, None)
+        self.assertEqual(self.R.getMailData(), None)
+
+class Manager:
+    def __init__(self):
+        self.success = []
+        self.failure = []
+        self.done = []
+
+    def notifySuccess(self, factory, message):
+        self.success.append((factory, message))
+
+    def notifyFailure(self, factory, message):
+        self.failure.append((factory, message))
+
+    def notifyDone(self, factory):
+        self.done.append(factory)
+
+class ManagedRelayerTestCase(unittest.TestCase):
+    def setUp(self):
+        self.manager = Manager()
+        self.messages = range(0, 20, 2)
+        self.factory = object()
+        self.relay = mail.relaymanager.ManagedRelayerMixin(self.manager)
+        self.relay.messages = self.messages[:]
+        self.relay.names = self.messages[:]
+        self.relay.factory = self.factory
+
+    def testSuccessfulSentMail(self):
+        for i in self.messages:
+            self.relay.sentMail(250, None, None, None, None)
+
+        self.assertEqual(
+            self.manager.success,
+            [(self.factory, m) for m in self.messages]
+        )
+
+    def testFailedSentMail(self):
+        for i in self.messages:
+            self.relay.sentMail(550, None, None, None, None)
+
+        self.assertEqual(
+            self.manager.failure,
+            [(self.factory, m) for m in self.messages]
+        )
+
+    def testConnectionLost(self):
+        self.relay.connectionLost(failure.Failure(Exception()))
+        self.assertEqual(self.manager.done, [self.factory])
+
+class DirectoryQueueTestCase(unittest.TestCase):
+    def setUp(self):
+        # This is almost a test case itself.
+        self.tmpdir = self.mktemp()
+        os.mkdir(self.tmpdir)
+        self.queue = mail.relaymanager.Queue(self.tmpdir)
+        self.queue.noisy = False
+        for m in range(25):
+            hdrF, msgF = self.queue.createNewMessage()
+            pickle.dump(['header', m], hdrF)
+            hdrF.close()
+            msgF.lineReceived('body: %d' % (m,))
+            msgF.eomReceived()
+        self.queue.readDirectory()
+
+    def tearDown(self):
+        shutil.rmtree(self.tmpdir)
+
+    def testWaiting(self):
+        self.failUnless(self.queue.hasWaiting())
+        self.assertEqual(len(self.queue.getWaiting()), 25)
+
+        waiting = self.queue.getWaiting()
+        self.queue.setRelaying(waiting[0])
+        self.assertEqual(len(self.queue.getWaiting()), 24)
+
+        self.queue.setWaiting(waiting[0])
+        self.assertEqual(len(self.queue.getWaiting()), 25)
+
+    def testRelaying(self):
+        for m in self.queue.getWaiting():
+            self.queue.setRelaying(m)
+            self.assertEqual(
+                len(self.queue.getRelayed()),
+                25 - len(self.queue.getWaiting())
+            )
+
+        self.failIf(self.queue.hasWaiting())
+
+        relayed = self.queue.getRelayed()
+        self.queue.setWaiting(relayed[0])
+        self.assertEqual(len(self.queue.getWaiting()), 1)
+        self.assertEqual(len(self.queue.getRelayed()), 24)
+
+    def testDone(self):
+        msg = self.queue.getWaiting()[0]
+        self.queue.setRelaying(msg)
+        self.queue.done(msg)
+
+        self.assertEqual(len(self.queue.getWaiting()), 24)
+        self.assertEqual(len(self.queue.getRelayed()), 0)
+
+        self.failIf(msg in self.queue.getWaiting())
+        self.failIf(msg in self.queue.getRelayed())
+
+    def testEnvelope(self):
+        envelopes = []
+
+        for msg in self.queue.getWaiting():
+            envelopes.append(self.queue.getEnvelope(msg))
+
+        envelopes.sort()
+        for i in range(25):
+            self.assertEqual(
+                envelopes.pop(0),
+                ['header', i]
+            )
+
+from twisted.names import server
+from twisted.names import client
+from twisted.names import common
+
+class TestAuthority(common.ResolverBase):
+    def __init__(self):
+        common.ResolverBase.__init__(self)
+        self.addresses = {}
+
+    def _lookup(self, name, cls, type, timeout = None):
+        if name in self.addresses and type == dns.MX:
+            results = []
+            for a in self.addresses[name]:
+                hdr = dns.RRHeader(
+                    name, dns.MX, dns.IN, 60, dns.Record_MX(0, a)
+                )
+                results.append(hdr)
+            return defer.succeed((results, [], []))
+        return defer.fail(failure.Failure(dns.DomainError(name)))
+
+def setUpDNS(self):
+    self.auth = TestAuthority()
+    factory = server.DNSServerFactory([self.auth])
+    protocol = dns.DNSDatagramProtocol(factory)
+    while 1:
+        self.port = reactor.listenTCP(0, factory, interface='127.0.0.1')
+        portNumber = self.port.getHost().port
+
+        try:
+            self.udpPort = reactor.listenUDP(portNumber, protocol, interface='127.0.0.1')
+        except CannotListenError:
+            self.port.stopListening()
+        else:
+            break
+    self.resolver = client.Resolver(servers=[('127.0.0.1', portNumber)])
+
+
+def tearDownDNS(self):
+    dl = []
+    dl.append(defer.maybeDeferred(self.port.stopListening))
+    dl.append(defer.maybeDeferred(self.udpPort.stopListening))
+    try:
+        self.resolver._parseCall.cancel()
+    except:
+        pass
+    return defer.DeferredList(dl)
+
+class MXTestCase(unittest.TestCase):
+    """
+    Tests for L{mail.relaymanager.MXCalculator}.
+    """
+    def setUp(self):
+        setUpDNS(self)
+        self.clock = task.Clock()
+        self.mx = mail.relaymanager.MXCalculator(self.resolver, self.clock)
+
+    def tearDown(self):
+        return tearDownDNS(self)
+
+
+    def test_defaultClock(self):
+        """
+        L{MXCalculator}'s default clock is C{twisted.internet.reactor}.
+        """
+        self.assertIdentical(
+            mail.relaymanager.MXCalculator(self.resolver).clock,
+            reactor)
+
+
+    def testSimpleSuccess(self):
+        self.auth.addresses['test.domain'] = ['the.email.test.domain']
+        return self.mx.getMX('test.domain').addCallback(self._cbSimpleSuccess)
+
+    def _cbSimpleSuccess(self, mx):
+        self.assertEqual(mx.preference, 0)
+        self.assertEqual(str(mx.name), 'the.email.test.domain')
+
+    def testSimpleFailure(self):
+        self.mx.fallbackToDomain = False
+        return self.assertFailure(self.mx.getMX('test.domain'), IOError)
+
+    def testSimpleFailureWithFallback(self):
+        return self.assertFailure(self.mx.getMX('test.domain'), DNSLookupError)
+
+
+    def _exchangeTest(self, domain, records, correctMailExchange):
+        """
+        Issue an MX request for the given domain and arrange for it to be
+        responded to with the given records.  Verify that the resulting mail
+        exchange is the indicated host.
+
+        @type domain: C{str}
+        @type records: C{list} of L{RRHeader}
+        @type correctMailExchange: C{str}
+        @rtype: L{Deferred}
+        """
+        class DummyResolver(object):
+            def lookupMailExchange(self, name):
+                if name == domain:
+                    return defer.succeed((
+                            records,
+                            [],
+                            []))
+                return defer.fail(DNSNameError(domain))
+
+        self.mx.resolver = DummyResolver()
+        d = self.mx.getMX(domain)
+        def gotMailExchange(record):
+            self.assertEqual(str(record.name), correctMailExchange)
+        d.addCallback(gotMailExchange)
+        return d
+
+
+    def test_mailExchangePreference(self):
+        """
+        The MX record with the lowest preference is returned by
+        L{MXCalculator.getMX}.
+        """
+        domain = "example.com"
+        good = "good.example.com"
+        bad = "bad.example.com"
+
+        records = [
+            RRHeader(name=domain,
+                     type=Record_MX.TYPE,
+                     payload=Record_MX(1, bad)),
+            RRHeader(name=domain,
+                     type=Record_MX.TYPE,
+                     payload=Record_MX(0, good)),
+            RRHeader(name=domain,
+                     type=Record_MX.TYPE,
+                     payload=Record_MX(2, bad))]
+        return self._exchangeTest(domain, records, good)
+
+
+    def test_badExchangeExcluded(self):
+        """
+        L{MXCalculator.getMX} returns the MX record with the lowest preference
+        which is not also marked as bad.
+        """
+        domain = "example.com"
+        good = "good.example.com"
+        bad = "bad.example.com"
+
+        records = [
+            RRHeader(name=domain,
+                     type=Record_MX.TYPE,
+                     payload=Record_MX(0, bad)),
+            RRHeader(name=domain,
+                     type=Record_MX.TYPE,
+                     payload=Record_MX(1, good))]
+        self.mx.markBad(bad)
+        return self._exchangeTest(domain, records, good)
+
+
+    def test_fallbackForAllBadExchanges(self):
+        """
+        L{MXCalculator.getMX} returns the MX record with the lowest preference
+        if all the MX records in the response have been marked bad.
+        """
+        domain = "example.com"
+        bad = "bad.example.com"
+        worse = "worse.example.com"
+
+        records = [
+            RRHeader(name=domain,
+                     type=Record_MX.TYPE,
+                     payload=Record_MX(0, bad)),
+            RRHeader(name=domain,
+                     type=Record_MX.TYPE,
+                     payload=Record_MX(1, worse))]
+        self.mx.markBad(bad)
+        self.mx.markBad(worse)
+        return self._exchangeTest(domain, records, bad)
+
+
+    def test_badExchangeExpires(self):
+        """
+        L{MXCalculator.getMX} returns the MX record with the lowest preference
+        if it was last marked bad longer than L{MXCalculator.timeOutBadMX}
+        seconds ago.
+        """
+        domain = "example.com"
+        good = "good.example.com"
+        previouslyBad = "bad.example.com"
+
+        records = [
+            RRHeader(name=domain,
+                     type=Record_MX.TYPE,
+                     payload=Record_MX(0, previouslyBad)),
+            RRHeader(name=domain,
+                     type=Record_MX.TYPE,
+                     payload=Record_MX(1, good))]
+        self.mx.markBad(previouslyBad)
+        self.clock.advance(self.mx.timeOutBadMX)
+        return self._exchangeTest(domain, records, previouslyBad)
+
+
+    def test_goodExchangeUsed(self):
+        """
+        L{MXCalculator.getMX} returns the MX record with the lowest preference
+        if it was marked good after it was marked bad.
+        """
+        domain = "example.com"
+        good = "good.example.com"
+        previouslyBad = "bad.example.com"
+
+        records = [
+            RRHeader(name=domain,
+                     type=Record_MX.TYPE,
+                     payload=Record_MX(0, previouslyBad)),
+            RRHeader(name=domain,
+                     type=Record_MX.TYPE,
+                     payload=Record_MX(1, good))]
+        self.mx.markBad(previouslyBad)
+        self.mx.markGood(previouslyBad)
+        self.clock.advance(self.mx.timeOutBadMX)
+        return self._exchangeTest(domain, records, previouslyBad)
+
+
+    def test_successWithoutResults(self):
+        """
+        If an MX lookup succeeds but the result set is empty,
+        L{MXCalculator.getMX} should try to look up an I{A} record for the
+        requested name and call back its returned Deferred with that
+        address.
+        """
+        ip = '1.2.3.4'
+        domain = 'example.org'
+
+        class DummyResolver(object):
+            """
+            Fake resolver which will respond to an MX lookup with an empty
+            result set.
+
+            @ivar mx: A dictionary mapping hostnames to three-tuples of
+                results to be returned from I{MX} lookups.
+
+            @ivar a: A dictionary mapping hostnames to addresses to be
+                returned from I{A} lookups.
+            """
+            mx = {domain: ([], [], [])}
+            a = {domain: ip}
+
+            def lookupMailExchange(self, domain):
+                return defer.succeed(self.mx[domain])
+
+            def getHostByName(self, domain):
+                return defer.succeed(self.a[domain])
+
+        self.mx.resolver = DummyResolver()
+        d = self.mx.getMX(domain)
+        d.addCallback(self.assertEqual, Record_MX(name=ip))
+        return d
+
+
+    def test_failureWithSuccessfulFallback(self):
+        """
+        Test that if the MX record lookup fails, fallback is enabled, and an A
+        record is available for the name, then the Deferred returned by
+        L{MXCalculator.getMX} ultimately fires with a Record_MX instance which
+        gives the address in the A record for the name.
+        """
+        class DummyResolver(object):
+            """
+            Fake resolver which will fail an MX lookup but then succeed a
+            getHostByName call.
+            """
+            def lookupMailExchange(self, domain):
+                return defer.fail(DNSNameError())
+
+            def getHostByName(self, domain):
+                return defer.succeed("1.2.3.4")
+
+        self.mx.resolver = DummyResolver()
+        d = self.mx.getMX("domain")
+        d.addCallback(self.assertEqual, Record_MX(name="1.2.3.4"))
+        return d
+
+
+    def test_cnameWithoutGlueRecords(self):
+        """
+        If an MX lookup returns a single CNAME record as a result, MXCalculator
+        will perform an MX lookup for the canonical name indicated and return
+        the MX record which results.
+        """
+        alias = "alias.example.com"
+        canonical = "canonical.example.com"
+        exchange = "mail.example.com"
+
+        class DummyResolver(object):
+            """
+            Fake resolver which will return a CNAME for an MX lookup of a name
+            which is an alias and an MX for an MX lookup of the canonical name.
+            """
+            def lookupMailExchange(self, domain):
+                if domain == alias:
+                    return defer.succeed((
+                            [RRHeader(name=domain,
+                                      type=Record_CNAME.TYPE,
+                                      payload=Record_CNAME(canonical))],
+                            [], []))
+                elif domain == canonical:
+                    return defer.succeed((
+                            [RRHeader(name=domain,
+                                      type=Record_MX.TYPE,
+                                      payload=Record_MX(0, exchange))],
+                            [], []))
+                else:
+                    return defer.fail(DNSNameError(domain))
+
+        self.mx.resolver = DummyResolver()
+        d = self.mx.getMX(alias)
+        d.addCallback(self.assertEqual, Record_MX(name=exchange))
+        return d
+
+
+    def test_cnameChain(self):
+        """
+        If L{MXCalculator.getMX} encounters a CNAME chain which is longer than
+        the length specified, the returned L{Deferred} should errback with
+        L{CanonicalNameChainTooLong}.
+        """
+        class DummyResolver(object):
+            """
+            Fake resolver which generates a CNAME chain of infinite length in
+            response to MX lookups.
+            """
+            chainCounter = 0
+
+            def lookupMailExchange(self, domain):
+                self.chainCounter += 1
+                name = 'x-%d.example.com' % (self.chainCounter,)
+                return defer.succeed((
+                        [RRHeader(name=domain,
+                                  type=Record_CNAME.TYPE,
+                                  payload=Record_CNAME(name))],
+                        [], []))
+
+        cnameLimit = 3
+        self.mx.resolver = DummyResolver()
+        d = self.mx.getMX("mail.example.com", cnameLimit)
+        self.assertFailure(
+            d, twisted.mail.relaymanager.CanonicalNameChainTooLong)
+        def cbChainTooLong(error):
+            self.assertEqual(error.args[0], Record_CNAME("x-%d.example.com" % (cnameLimit + 1,)))
+            self.assertEqual(self.mx.resolver.chainCounter, cnameLimit + 1)
+        d.addCallback(cbChainTooLong)
+        return d
+
+
+    def test_cnameWithGlueRecords(self):
+        """
+        If an MX lookup returns a CNAME and the MX record for the CNAME, the
+        L{Deferred} returned by L{MXCalculator.getMX} should be called back
+        with the name from the MX record without further lookups being
+        attempted.
+        """
+        lookedUp = []
+        alias = "alias.example.com"
+        canonical = "canonical.example.com"
+        exchange = "mail.example.com"
+
+        class DummyResolver(object):
+            def lookupMailExchange(self, domain):
+                if domain != alias or lookedUp:
+                    # Don't give back any results for anything except the alias
+                    # or on any request after the first.
+                    return ([], [], [])
+                return defer.succeed((
+                        [RRHeader(name=alias,
+                                  type=Record_CNAME.TYPE,
+                                  payload=Record_CNAME(canonical)),
+                         RRHeader(name=canonical,
+                                  type=Record_MX.TYPE,
+                                  payload=Record_MX(name=exchange))],
+                        [], []))
+
+        self.mx.resolver = DummyResolver()
+        d = self.mx.getMX(alias)
+        d.addCallback(self.assertEqual, Record_MX(name=exchange))
+        return d
+
+
+    def test_cnameLoopWithGlueRecords(self):
+        """
+        If an MX lookup returns two CNAME records which point to each other,
+        the loop should be detected and the L{Deferred} returned by
+        L{MXCalculator.getMX} should be errbacked with L{CanonicalNameLoop}.
+        """
+        firstAlias = "cname1.example.com"
+        secondAlias = "cname2.example.com"
+
+        class DummyResolver(object):
+            def lookupMailExchange(self, domain):
+                return defer.succeed((
+                        [RRHeader(name=firstAlias,
+                                  type=Record_CNAME.TYPE,
+                                  payload=Record_CNAME(secondAlias)),
+                         RRHeader(name=secondAlias,
+                                  type=Record_CNAME.TYPE,
+                                  payload=Record_CNAME(firstAlias))],
+                        [], []))
+
+        self.mx.resolver = DummyResolver()
+        d = self.mx.getMX(firstAlias)
+        self.assertFailure(d, twisted.mail.relaymanager.CanonicalNameLoop)
+        return d
+
+
+    def testManyRecords(self):
+        self.auth.addresses['test.domain'] = [
+            'mx1.test.domain', 'mx2.test.domain', 'mx3.test.domain'
+        ]
+        return self.mx.getMX('test.domain'
+            ).addCallback(self._cbManyRecordsSuccessfulLookup
+            )
+
+    def _cbManyRecordsSuccessfulLookup(self, mx):
+        self.failUnless(str(mx.name).split('.', 1)[0] in ('mx1', 'mx2', 'mx3'))
+        self.mx.markBad(str(mx.name))
+        return self.mx.getMX('test.domain'
+            ).addCallback(self._cbManyRecordsDifferentResult, mx
+            )
+
+    def _cbManyRecordsDifferentResult(self, nextMX, mx):
+        self.assertNotEqual(str(mx.name), str(nextMX.name))
+        self.mx.markBad(str(nextMX.name))
+
+        return self.mx.getMX('test.domain'
+            ).addCallback(self._cbManyRecordsLastResult, mx, nextMX
+            )
+
+    def _cbManyRecordsLastResult(self, lastMX, mx, nextMX):
+        self.assertNotEqual(str(mx.name), str(lastMX.name))
+        self.assertNotEqual(str(nextMX.name), str(lastMX.name))
+
+        self.mx.markBad(str(lastMX.name))
+        self.mx.markGood(str(nextMX.name))
+
+        return self.mx.getMX('test.domain'
+            ).addCallback(self._cbManyRecordsRepeatSpecificResult, nextMX
+            )
+
+    def _cbManyRecordsRepeatSpecificResult(self, againMX, nextMX):
+        self.assertEqual(str(againMX.name), str(nextMX.name))
+
+class LiveFireExercise(unittest.TestCase):
+    if interfaces.IReactorUDP(reactor, None) is None:
+        skip = "UDP support is required to determining MX records"
+
+    def setUp(self):
+        setUpDNS(self)
+        self.tmpdirs = [
+            'domainDir', 'insertionDomain', 'insertionQueue',
+            'destinationDomain', 'destinationQueue'
+        ]
+
+    def tearDown(self):
+        for d in self.tmpdirs:
+            if os.path.exists(d):
+                shutil.rmtree(d)
+        return tearDownDNS(self)
+
+    def testLocalDelivery(self):
+        service = mail.mail.MailService()
+        service.smtpPortal.registerChecker(cred.checkers.AllowAnonymousAccess())
+        domain = mail.maildir.MaildirDirdbmDomain(service, 'domainDir')
+        domain.addUser('user', 'password')
+        service.addDomain('test.domain', domain)
+        service.portals[''] = service.portals['test.domain']
+        map(service.portals[''].registerChecker, domain.getCredentialsCheckers())
+
+        service.setQueue(mail.relay.DomainQueuer(service))
+        manager = mail.relaymanager.SmartHostSMTPRelayingManager(service.queue, None)
+        helper = mail.relaymanager.RelayStateHelper(manager, 1)
+
+        f = service.getSMTPFactory()
+
+        self.smtpServer = reactor.listenTCP(0, f, interface='127.0.0.1')
+
+        client = LineSendingProtocol([
+            'HELO meson',
+            'MAIL FROM: <user at hostname>',
+            'RCPT TO: <user at test.domain>',
+            'DATA',
+            'This is the message',
+            '.',
+            'QUIT'
+        ])
+
+        done = Deferred()
+        f = protocol.ClientFactory()
+        f.protocol = lambda: client
+        f.clientConnectionLost = lambda *args: done.callback(None)
+        reactor.connectTCP('127.0.0.1', self.smtpServer.getHost().port, f)
+
+        def finished(ign):
+            mbox = domain.requestAvatar('user', None, pop3.IMailbox)[1]
+            msg = mbox.getMessage(0).read()
+            self.failIfEqual(msg.find('This is the message'), -1)
+
+            return self.smtpServer.stopListening()
+        done.addCallback(finished)
+        return done
+
+
+    def testRelayDelivery(self):
+        # Here is the service we will connect to and send mail from
+        insServ = mail.mail.MailService()
+        insServ.smtpPortal.registerChecker(cred.checkers.AllowAnonymousAccess())
+        domain = mail.maildir.MaildirDirdbmDomain(insServ, 'insertionDomain')
+        insServ.addDomain('insertion.domain', domain)
+        os.mkdir('insertionQueue')
+        insServ.setQueue(mail.relaymanager.Queue('insertionQueue'))
+        insServ.domains.setDefaultDomain(mail.relay.DomainQueuer(insServ))
+        manager = mail.relaymanager.SmartHostSMTPRelayingManager(insServ.queue)
+        manager.fArgs += ('test.identity.hostname',)
+        helper = mail.relaymanager.RelayStateHelper(manager, 1)
+        # Yoink!  Now the internet obeys OUR every whim!
+        manager.mxcalc = mail.relaymanager.MXCalculator(self.resolver)
+        # And this is our whim.
+        self.auth.addresses['destination.domain'] = ['127.0.0.1']
+
+        f = insServ.getSMTPFactory()
+        self.insServer = reactor.listenTCP(0, f, interface='127.0.0.1')
+
+        # Here is the service the previous one will connect to for final
+        # delivery
+        destServ = mail.mail.MailService()
+        destServ.smtpPortal.registerChecker(cred.checkers.AllowAnonymousAccess())
+        domain = mail.maildir.MaildirDirdbmDomain(destServ, 'destinationDomain')
+        domain.addUser('user', 'password')
+        destServ.addDomain('destination.domain', domain)
+        os.mkdir('destinationQueue')
+        destServ.setQueue(mail.relaymanager.Queue('destinationQueue'))
+        manager2 = mail.relaymanager.SmartHostSMTPRelayingManager(destServ.queue)
+        helper = mail.relaymanager.RelayStateHelper(manager, 1)
+        helper.startService()
+
+        f = destServ.getSMTPFactory()
+        self.destServer = reactor.listenTCP(0, f, interface='127.0.0.1')
+
+        # Update the port number the *first* relay will connect to, because we can't use
+        # port 25
+        manager.PORT = self.destServer.getHost().port
+
+        client = LineSendingProtocol([
+            'HELO meson',
+            'MAIL FROM: <user at wherever>',
+            'RCPT TO: <user at destination.domain>',
+            'DATA',
+            'This is the message',
+            '.',
+            'QUIT'
+        ])
+
+        done = Deferred()
+        f = protocol.ClientFactory()
+        f.protocol = lambda: client
+        f.clientConnectionLost = lambda *args: done.callback(None)
+        reactor.connectTCP('127.0.0.1', self.insServer.getHost().port, f)
+
+        def finished(ign):
+            # First part of the delivery is done.  Poke the queue manually now
+            # so we don't have to wait for the queue to be flushed.
+            delivery = manager.checkState()
+            def delivered(ign):
+                mbox = domain.requestAvatar('user', None, pop3.IMailbox)[1]
+                msg = mbox.getMessage(0).read()
+                self.failIfEqual(msg.find('This is the message'), -1)
+
+                self.insServer.stopListening()
+                self.destServer.stopListening()
+                helper.stopService()
+            delivery.addCallback(delivered)
+            return delivery
+        done.addCallback(finished)
+        return done
+
+
+aliasFile = StringIO.StringIO("""\
+# Here's a comment
+   # woop another one
+testuser:                   address1,address2, address3,
+    continuation at address, |/bin/process/this
+
+usertwo:thisaddress,thataddress, lastaddress
+lastuser:       :/includable, /filename, |/program, address
+""")
+
+class LineBufferMessage:
+    def __init__(self):
+        self.lines = []
+        self.eom = False
+        self.lost = False
+
+    def lineReceived(self, line):
+        self.lines.append(line)
+
+    def eomReceived(self):
+        self.eom = True
+        return defer.succeed('<Whatever>')
+
+    def connectionLost(self):
+        self.lost = True
+
+class AliasTestCase(unittest.TestCase):
+    lines = [
+        'First line',
+        'Next line',
+        '',
+        'After a blank line',
+        'Last line'
+    ]
+
+    def setUp(self):
+        aliasFile.seek(0)
+
+    def testHandle(self):
+        result = {}
+        lines = [
+            'user:  another at host\n',
+            'nextuser:  |/bin/program\n',
+            'user:  me at again\n',
+            'moreusers: :/etc/include/filename\n',
+            'multiuser: first at host, second at host,last at anotherhost',
+        ]
+
+        for l in lines:
+            mail.alias.handle(result, l, 'TestCase', None)
+
+        self.assertEqual(result['user'], ['another at host', 'me at again'])
+        self.assertEqual(result['nextuser'], ['|/bin/program'])
+        self.assertEqual(result['moreusers'], [':/etc/include/filename'])
+        self.assertEqual(result['multiuser'], ['first at host', 'second at host', 'last at anotherhost'])
+
+    def testFileLoader(self):
+        domains = {'': object()}
+        result = mail.alias.loadAliasFile(domains, fp=aliasFile)
+
+        self.assertEqual(len(result), 3)
+
+        group = result['testuser']
+        s = str(group)
+        for a in ('address1', 'address2', 'address3', 'continuation at address', '/bin/process/this'):
+            self.failIfEqual(s.find(a), -1)
+        self.assertEqual(len(group), 5)
+
+        group = result['usertwo']
+        s = str(group)
+        for a in ('thisaddress', 'thataddress', 'lastaddress'):
+            self.failIfEqual(s.find(a), -1)
+        self.assertEqual(len(group), 3)
+
+        group = result['lastuser']
+        s = str(group)
+        self.assertEqual(s.find('/includable'), -1)
+        for a in ('/filename', 'program', 'address'):
+            self.failIfEqual(s.find(a), -1, '%s not found' % a)
+        self.assertEqual(len(group), 3)
+
+    def testMultiWrapper(self):
+        msgs = LineBufferMessage(), LineBufferMessage(), LineBufferMessage()
+        msg = mail.alias.MultiWrapper(msgs)
+
+        for L in self.lines:
+            msg.lineReceived(L)
+        return msg.eomReceived().addCallback(self._cbMultiWrapper, msgs)
+
+    def _cbMultiWrapper(self, ignored, msgs):
+        for m in msgs:
+            self.failUnless(m.eom)
+            self.failIf(m.lost)
+            self.assertEqual(self.lines, m.lines)
+
+    def testFileAlias(self):
+        tmpfile = self.mktemp()
+        a = mail.alias.FileAlias(tmpfile, None, None)
+        m = a.createMessageReceiver()
+
+        for l in self.lines:
+            m.lineReceived(l)
+        return m.eomReceived().addCallback(self._cbTestFileAlias, tmpfile)
+
+    def _cbTestFileAlias(self, ignored, tmpfile):
+        lines = file(tmpfile).readlines()
+        self.assertEqual([L[:-1] for L in lines], self.lines)
+
+
+
+class DummyProcess(object):
+    __slots__ = ['onEnd']
+
+
+
+class MockProcessAlias(mail.alias.ProcessAlias):
+    """
+    A alias processor that doesn't actually launch processes.
+    """
+
+    def spawnProcess(self, proto, program, path):
+        """
+        Don't spawn a process.
+        """
+
+
+
+class MockAliasGroup(mail.alias.AliasGroup):
+    """
+    An alias group using C{MockProcessAlias}.
+    """
+    processAliasFactory = MockProcessAlias
+
+
+
+class StubProcess(object):
+    """
+    Fake implementation of L{IProcessTransport}.
+
+    @ivar signals: A list of all the signals which have been sent to this fake
+        process.
+    """
+    def __init__(self):
+        self.signals = []
+
+
+    def loseConnection(self):
+        """
+        No-op implementation of disconnection.
+        """
+
+
+    def signalProcess(self, signal):
+        """
+        Record a signal sent to this process for later inspection.
+        """
+        self.signals.append(signal)
+
+
+
+class ProcessAliasTestCase(unittest.TestCase):
+    """
+    Tests for alias resolution.
+    """
+    if interfaces.IReactorProcess(reactor, None) is None:
+        skip = "IReactorProcess not supported"
+
+    lines = [
+        'First line',
+        'Next line',
+        '',
+        'After a blank line',
+        'Last line'
+    ]
+
+    def exitStatus(self, code):
+        """
+        Construct a status from the given exit code.
+
+        @type code: L{int} between 0 and 255 inclusive.
+        @param code: The exit status which the code will represent.
+
+        @rtype: L{int}
+        @return: A status integer for the given exit code.
+        """
+        # /* Macros for constructing status values.  */
+        # #define __W_EXITCODE(ret, sig)  ((ret) << 8 | (sig))
+        status = (code << 8) | 0
+
+        # Sanity check
+        self.assertTrue(os.WIFEXITED(status))
+        self.assertEqual(os.WEXITSTATUS(status), code)
+        self.assertFalse(os.WIFSIGNALED(status))
+
+        return status
+
+
+    def signalStatus(self, signal):
+        """
+        Construct a status from the given signal.
+
+        @type signal: L{int} between 0 and 255 inclusive.
+        @param signal: The signal number which the status will represent.
+
+        @rtype: L{int}
+        @return: A status integer for the given signal.
+        """
+        # /* If WIFSIGNALED(STATUS), the terminating signal.  */
+        # #define __WTERMSIG(status)      ((status) & 0x7f)
+        # /* Nonzero if STATUS indicates termination by a signal.  */
+        # #define __WIFSIGNALED(status) \
+        #    (((signed char) (((status) & 0x7f) + 1) >> 1) > 0)
+        status = signal
+
+        # Sanity check
+        self.assertTrue(os.WIFSIGNALED(status))
+        self.assertEqual(os.WTERMSIG(status), signal)
+        self.assertFalse(os.WIFEXITED(status))
+
+        return status
+
+
+    def setUp(self):
+        """
+        Replace L{smtp.DNSNAME} with a well-known value.
+        """
+        self.DNSNAME = smtp.DNSNAME
+        smtp.DNSNAME = ''
+
+
+    def tearDown(self):
+        """
+        Restore the original value of L{smtp.DNSNAME}.
+        """
+        smtp.DNSNAME = self.DNSNAME
+
+
+    def test_processAlias(self):
+        """
+        Standard call to C{mail.alias.ProcessAlias}: check that the specified
+        script is called, and that the input is correctly transferred to it.
+        """
+        sh = FilePath(self.mktemp())
+        sh.setContent("""\
+#!/bin/sh
+rm -f process.alias.out
+while read i; do
+    echo $i >> process.alias.out
+done""")
+        os.chmod(sh.path, 0700)
+        a = mail.alias.ProcessAlias(sh.path, None, None)
+        m = a.createMessageReceiver()
+
+        for l in self.lines:
+            m.lineReceived(l)
+
+        def _cbProcessAlias(ignored):
+            lines = file('process.alias.out').readlines()
+            self.assertEqual([L[:-1] for L in lines], self.lines)
+
+        return m.eomReceived().addCallback(_cbProcessAlias)
+
+
+    def test_processAliasTimeout(self):
+        """
+        If the alias child process does not exit within a particular period of
+        time, the L{Deferred} returned by L{MessageWrapper.eomReceived} should
+        fail with L{ProcessAliasTimeout} and send the I{KILL} signal to the
+        child process..
+        """
+        reactor = task.Clock()
+        transport = StubProcess()
+        proto = mail.alias.ProcessAliasProtocol()
+        proto.makeConnection(transport)
+
+        receiver = mail.alias.MessageWrapper(proto, None, reactor)
+        d = receiver.eomReceived()
+        reactor.advance(receiver.completionTimeout)
+        def timedOut(ignored):
+            self.assertEqual(transport.signals, ['KILL'])
+            # Now that it has been killed, disconnect the protocol associated
+            # with it.
+            proto.processEnded(
+                ProcessTerminated(self.signalStatus(signal.SIGKILL)))
+        self.assertFailure(d, mail.alias.ProcessAliasTimeout)
+        d.addCallback(timedOut)
+        return d
+
+
+    def test_earlyProcessTermination(self):
+        """
+        If the process associated with an L{mail.alias.MessageWrapper} exits
+        before I{eomReceived} is called, the L{Deferred} returned by
+        I{eomReceived} should fail.
+        """
+        transport = StubProcess()
+        protocol = mail.alias.ProcessAliasProtocol()
+        protocol.makeConnection(transport)
+        receiver = mail.alias.MessageWrapper(protocol, None, None)
+        protocol.processEnded(failure.Failure(ProcessDone(0)))
+        return self.assertFailure(receiver.eomReceived(), ProcessDone)
+
+
+    def _terminationTest(self, status):
+        """
+        Verify that if the process associated with an
+        L{mail.alias.MessageWrapper} exits with the given status, the
+        L{Deferred} returned by I{eomReceived} fails with L{ProcessTerminated}.
+        """
+        transport = StubProcess()
+        protocol = mail.alias.ProcessAliasProtocol()
+        protocol.makeConnection(transport)
+        receiver = mail.alias.MessageWrapper(protocol, None, None)
+        protocol.processEnded(
+            failure.Failure(ProcessTerminated(status)))
+        return self.assertFailure(receiver.eomReceived(), ProcessTerminated)
+
+
+    def test_errorProcessTermination(self):
+        """
+        If the process associated with an L{mail.alias.MessageWrapper} exits
+        with a non-zero exit code, the L{Deferred} returned by I{eomReceived}
+        should fail.
+        """
+        return self._terminationTest(self.exitStatus(1))
+
+
+    def test_signalProcessTermination(self):
+        """
+        If the process associated with an L{mail.alias.MessageWrapper} exits
+        because it received a signal, the L{Deferred} returned by
+        I{eomReceived} should fail.
+        """
+        return self._terminationTest(self.signalStatus(signal.SIGHUP))
+
+
+    def test_aliasResolution(self):
+        """
+        Check that the C{resolve} method of alias processors produce the correct
+        set of objects:
+            - direct alias with L{mail.alias.AddressAlias} if a simple input is passed
+            - aliases in a file with L{mail.alias.FileWrapper} if an input in the format
+              '/file' is given
+            - aliases resulting of a process call wrapped by L{mail.alias.MessageWrapper}
+              if the format is '|process'
+        """
+        aliases = {}
+        domain = {'': TestDomain(aliases, ['user1', 'user2', 'user3'])}
+        A1 = MockAliasGroup(['user1', '|echo', '/file'], domain, 'alias1')
+        A2 = MockAliasGroup(['user2', 'user3'], domain, 'alias2')
+        A3 = mail.alias.AddressAlias('alias1', domain, 'alias3')
+        aliases.update({
+            'alias1': A1,
+            'alias2': A2,
+            'alias3': A3,
+        })
+
+        res1 = A1.resolve(aliases)
+        r1 = map(str, res1.objs)
+        r1.sort()
+        expected = map(str, [
+            mail.alias.AddressAlias('user1', None, None),
+            mail.alias.MessageWrapper(DummyProcess(), 'echo'),
+            mail.alias.FileWrapper('/file'),
+        ])
+        expected.sort()
+        self.assertEqual(r1, expected)
+
+        res2 = A2.resolve(aliases)
+        r2 = map(str, res2.objs)
+        r2.sort()
+        expected = map(str, [
+            mail.alias.AddressAlias('user2', None, None),
+            mail.alias.AddressAlias('user3', None, None)
+        ])
+        expected.sort()
+        self.assertEqual(r2, expected)
+
+        res3 = A3.resolve(aliases)
+        r3 = map(str, res3.objs)
+        r3.sort()
+        expected = map(str, [
+            mail.alias.AddressAlias('user1', None, None),
+            mail.alias.MessageWrapper(DummyProcess(), 'echo'),
+            mail.alias.FileWrapper('/file'),
+        ])
+        expected.sort()
+        self.assertEqual(r3, expected)
+
+
+    def test_cyclicAlias(self):
+        """
+        Check that a cycle in alias resolution is correctly handled.
+        """
+        aliases = {}
+        domain = {'': TestDomain(aliases, [])}
+        A1 = mail.alias.AddressAlias('alias2', domain, 'alias1')
+        A2 = mail.alias.AddressAlias('alias3', domain, 'alias2')
+        A3 = mail.alias.AddressAlias('alias1', domain, 'alias3')
+        aliases.update({
+            'alias1': A1,
+            'alias2': A2,
+            'alias3': A3
+        })
+
+        self.assertEqual(aliases['alias1'].resolve(aliases), None)
+        self.assertEqual(aliases['alias2'].resolve(aliases), None)
+        self.assertEqual(aliases['alias3'].resolve(aliases), None)
+
+        A4 = MockAliasGroup(['|echo', 'alias1'], domain, 'alias4')
+        aliases['alias4'] = A4
+
+        res = A4.resolve(aliases)
+        r = map(str, res.objs)
+        r.sort()
+        expected = map(str, [
+            mail.alias.MessageWrapper(DummyProcess(), 'echo')
+        ])
+        expected.sort()
+        self.assertEqual(r, expected)
+
+
+
+
+
+
+class TestDomain:
+    def __init__(self, aliases, users):
+        self.aliases = aliases
+        self.users = users
+
+    def exists(self, user, memo=None):
+        user = user.dest.local
+        if user in self.users:
+            return lambda: mail.alias.AddressAlias(user, None, None)
+        try:
+            a = self.aliases[user]
+        except:
+            raise smtp.SMTPBadRcpt(user)
+        else:
+            aliases = a.resolve(self.aliases, memo)
+            if aliases:
+                return lambda: aliases
+            raise smtp.SMTPBadRcpt(user)
+
+
+
+class SSLContextFactoryTests(unittest.TestCase):
+    """
+    Tests for twisted.mail.protocols.SSLContextFactory.
+    """
+    def test_deprecation(self):
+        """
+        Accessing L{twisted.mail.protocols.SSLContextFactory} emits a
+        deprecation warning recommending the use of the more general SSL context
+        factory from L{twisted.internet.ssl}.
+        """
+        mail.protocols.SSLContextFactory
+        warningsShown = self.flushWarnings([self.test_deprecation])
+        self.assertEqual(len(warningsShown), 1)
+        self.assertIdentical(warningsShown[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warningsShown[0]['message'],
+            'twisted.mail.protocols.SSLContextFactory was deprecated in '
+            'Twisted 12.2.0: Use twisted.internet.ssl.'
+            'DefaultOpenSSLContextFactory instead.')
+
+
+
+from twisted.python.runtime import platformType
+import types
+if platformType != "posix":
+    for o in locals().values():
+        if isinstance(o, (types.ClassType, type)) and issubclass(o, unittest.TestCase):
+            o.skip = "twisted.mail only works on posix"
diff --git a/ThirdParty/Twisted/twisted/mail/test/test_mailmail.py b/ThirdParty/Twisted/twisted/mail/test/test_mailmail.py
new file mode 100644
index 0000000..8b9e4d8
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/test/test_mailmail.py
@@ -0,0 +1,75 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.mail.scripts.mailmail}, the implementation of the
+command line program I{mailmail}.
+"""
+
+import sys
+from StringIO import StringIO
+
+from twisted.trial.unittest import TestCase
+from twisted.mail.scripts.mailmail import parseOptions
+
+
+class OptionsTests(TestCase):
+    """
+    Tests for L{parseOptions} which parses command line arguments and reads
+    message text from stdin to produce an L{Options} instance which can be
+    used to send a message.
+    """
+    def test_unspecifiedRecipients(self):
+        """
+        If no recipients are given in the argument list and there is no
+        recipient header in the message text, L{parseOptions} raises
+        L{SystemExit} with a string describing the problem.
+        """
+        self.addCleanup(setattr, sys, 'stdin', sys.stdin)
+        sys.stdin = StringIO(
+            'Subject: foo\n'
+            '\n'
+            'Hello, goodbye.\n')
+        exc = self.assertRaises(SystemExit, parseOptions, [])
+        self.assertEqual(exc.args, ('No recipients specified.',))
+
+
+    def test_listQueueInformation(self):
+        """
+        The I{-bp} option for listing queue information is unsupported and
+        if it is passed to L{parseOptions}, L{SystemExit} is raised.
+        """
+        exc = self.assertRaises(SystemExit, parseOptions, ['-bp'])
+        self.assertEqual(exc.args, ("Unsupported option.",))
+
+
+    def test_stdioTransport(self):
+        """
+        The I{-bs} option for using stdin and stdout as the SMTP transport
+        is unsupported and if it is passed to L{parseOptions}, L{SystemExit}
+        is raised.
+        """
+        exc = self.assertRaises(SystemExit, parseOptions, ['-bs'])
+        self.assertEqual(exc.args, ("Unsupported option.",))
+
+
+    def test_ignoreFullStop(self):
+        """
+        The I{-i} and I{-oi} options for ignoring C{"."} by itself on a line
+        are unsupported and if either is passed to L{parseOptions},
+        L{SystemExit} is raised.
+        """
+        exc = self.assertRaises(SystemExit, parseOptions, ['-i'])
+        self.assertEqual(exc.args, ("Unsupported option.",))
+        exc = self.assertRaises(SystemExit, parseOptions, ['-oi'])
+        self.assertEqual(exc.args, ("Unsupported option.",))
+
+
+    def test_copyAliasedSender(self):
+        """
+        The I{-om} option for copying the sender if they appear in an alias
+        expansion is unsupported and if it is passed to L{parseOptions},
+        L{SystemExit} is raised.
+        """
+        exc = self.assertRaises(SystemExit, parseOptions, ['-om'])
+        self.assertEqual(exc.args, ("Unsupported option.",))
diff --git a/ThirdParty/Twisted/twisted/mail/test/test_options.py b/ThirdParty/Twisted/twisted/mail/test/test_options.py
new file mode 100644
index 0000000..daee5d2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/test/test_options.py
@@ -0,0 +1,247 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.mail.tap}.
+"""
+
+from twisted.trial.unittest import TestCase
+
+from twisted.python.usage import UsageError
+from twisted.mail import protocols
+from twisted.mail.tap import Options, makeService
+from twisted.python.filepath import FilePath
+from twisted.internet import endpoints, defer
+from twisted.python import util
+
+try:
+    import OpenSSL
+except ImportError, e:
+    sslSkip = str(e)
+else:
+    sslSkip = None
+
+
+class OptionsTestCase(TestCase):
+    """
+    Tests for the command line option parser used for I{twistd mail}.
+    """
+    def setUp(self):
+        self.aliasFilename = self.mktemp()
+        aliasFile = file(self.aliasFilename, 'w')
+        aliasFile.write('someuser:\tdifferentuser\n')
+        aliasFile.close()
+
+
+    def testAliasesWithoutDomain(self):
+        """
+        Test that adding an aliases(5) file before adding a domain raises a
+        UsageError.
+        """
+        self.assertRaises(
+            UsageError,
+            Options().parseOptions,
+            ['--aliases', self.aliasFilename])
+
+
+    def testAliases(self):
+        """
+        Test that adding an aliases(5) file to an IAliasableDomain at least
+        doesn't raise an unhandled exception.
+        """
+        Options().parseOptions([
+            '--maildirdbmdomain', 'example.com=example.com',
+            '--aliases', self.aliasFilename])
+
+
+    def test_barePort(self):
+        """
+        A bare port passed to I{--pop3} results in deprecation warning in
+        addition to a TCP4ServerEndpoint.
+        """
+        options = Options()
+        options.parseOptions(['--pop3', '8110'])
+        self.assertEqual(len(options['pop3']), 1)
+        self.assertIsInstance(
+            options['pop3'][0], endpoints.TCP4ServerEndpoint)
+        warnings = self.flushWarnings([options.opt_pop3])
+        self.assertEqual(len(warnings), 1)
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            "Specifying plain ports and/or a certificate is deprecated since "
+            "Twisted 11.0; use endpoint descriptions instead.")
+
+
+    def _endpointTest(self, service):
+        """
+        Use L{Options} to parse a single service configuration parameter and
+        verify that an endpoint of the correct type is added to the list for
+        that service.
+        """
+        options = Options()
+        options.parseOptions(['--' + service, 'tcp:1234'])
+        self.assertEqual(len(options[service]), 1)
+        self.assertIsInstance(
+            options[service][0], endpoints.TCP4ServerEndpoint)
+
+
+    def test_endpointSMTP(self):
+        """
+        When I{--smtp} is given a TCP endpoint description as an argument, a
+        TCPServerEndpoint is added to the list of SMTP endpoints.
+        """
+        self._endpointTest('smtp')
+
+
+    def test_endpointPOP3(self):
+        """
+        When I{--pop3} is given a TCP endpoint description as an argument, a
+        TCPServerEndpoint is added to the list of POP3 endpoints.
+        """
+        self._endpointTest('pop3')
+
+
+    def test_protoDefaults(self):
+        """
+        POP3 and SMTP each listen on a TCP4ServerEndpoint by default.
+        """
+        options = Options()
+        options.parseOptions([])
+
+        self.assertEqual(len(options['pop3']), 1)
+        self.assertIsInstance(
+            options['pop3'][0], endpoints.TCP4ServerEndpoint)
+
+        self.assertEqual(len(options['smtp']), 1)
+        self.assertIsInstance(
+            options['smtp'][0], endpoints.TCP4ServerEndpoint)
+
+
+    def test_protoDisable(self):
+        """
+        The I{--no-pop3} and I{--no-smtp} options disable POP3 and SMTP
+        respectively.
+        """
+        options = Options()
+        options.parseOptions(['--no-pop3'])
+        self.assertEqual(options._getEndpoints(None, 'pop3'), [])
+        self.assertNotEquals(options._getEndpoints(None, 'smtp'), [])
+
+        options = Options()
+        options.parseOptions(['--no-smtp'])
+        self.assertNotEquals(options._getEndpoints(None, 'pop3'), [])
+        self.assertEqual(options._getEndpoints(None, 'smtp'), [])
+
+
+    def test_allProtosDisabledError(self):
+        """
+        If all protocols are disabled, L{UsageError} is raised.
+        """
+        options = Options()
+        self.assertRaises(
+            UsageError, options.parseOptions, (['--no-pop3', '--no-smtp']))
+
+
+    def test_pop3sBackwardCompatibility(self):
+        """
+        The deprecated I{--pop3s} and I{--certificate} options set up a POP3 SSL
+        server.
+        """
+        cert = FilePath(__file__).sibling("server.pem")
+        options = Options()
+        options.parseOptions(['--pop3s', '8995',
+                              '--certificate', cert.path])
+        self.assertEqual(len(options['pop3']), 2)
+        self.assertIsInstance(
+            options['pop3'][0], endpoints.SSL4ServerEndpoint)
+        self.assertIsInstance(
+            options['pop3'][1], endpoints.TCP4ServerEndpoint)
+
+        warnings = self.flushWarnings([options.postOptions])
+        self.assertEqual(len(warnings), 1)
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            "Specifying plain ports and/or a certificate is deprecated since "
+            "Twisted 11.0; use endpoint descriptions instead.")
+    if sslSkip is not None:
+        test_pop3sBackwardCompatibility.skip = sslSkip
+
+
+    def test_esmtpWithoutHostname(self):
+        """
+        If I{--esmtp} is given without I{--hostname}, L{Options.parseOptions}
+        raises L{UsageError}.
+        """
+        options = Options()
+        exc = self.assertRaises(UsageError, options.parseOptions, ['--esmtp'])
+        self.assertEqual("--esmtp requires --hostname", str(exc))
+
+
+    def test_auth(self):
+        """
+        Tests that the --auth option registers a checker.
+        """
+        options = Options()
+        options.parseOptions(['--auth', 'memory:admin:admin:bob:password'])
+        self.assertEqual(len(options['credCheckers']), 1)
+        checker = options['credCheckers'][0]
+        interfaces = checker.credentialInterfaces
+        registered_checkers = options.service.smtpPortal.checkers
+        for iface in interfaces:
+            self.assertEqual(checker, registered_checkers[iface])
+
+
+
+class SpyEndpoint(object):
+    """
+    SpyEndpoint remembers what factory it is told to listen with.
+    """
+    listeningWith = None
+    def listen(self, factory):
+        self.listeningWith = factory
+        return defer.succeed(None)
+
+
+
+class MakeServiceTests(TestCase):
+    """
+    Tests for L{twisted.mail.tap.makeService}
+    """
+    def _endpointServerTest(self, key, factoryClass):
+        """
+        Configure a service with two endpoints for the protocol associated with
+        C{key} and verify that when the service is started a factory of type
+        C{factoryClass} is used to listen on each of them.
+        """
+        cleartext = SpyEndpoint()
+        secure = SpyEndpoint()
+        config = Options()
+        config[key] = [cleartext, secure]
+        service = makeService(config)
+        service.privilegedStartService()
+        service.startService()
+        self.addCleanup(service.stopService)
+        self.assertIsInstance(cleartext.listeningWith, factoryClass)
+        self.assertIsInstance(secure.listeningWith, factoryClass)
+
+
+    def test_pop3(self):
+        """
+        If one or more endpoints is included in the configuration passed to
+        L{makeService} for the C{"pop3"} key, a service for starting a POP3
+        server is constructed for each of them and attached to the returned
+        service.
+        """
+        self._endpointServerTest("pop3", protocols.POP3Factory)
+
+
+    def test_smtp(self):
+        """
+        If one or more endpoints is included in the configuration passed to
+        L{makeService} for the C{"smtp"} key, a service for starting an SMTP
+        server is constructed for each of them and attached to the returned
+        service.
+        """
+        self._endpointServerTest("smtp", protocols.SMTPFactory)
diff --git a/ThirdParty/Twisted/twisted/mail/test/test_pop3.py b/ThirdParty/Twisted/twisted/mail/test/test_pop3.py
new file mode 100644
index 0000000..4379a1e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/test/test_pop3.py
@@ -0,0 +1,1071 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test cases for Ltwisted.mail.pop3} module.
+"""
+
+import StringIO
+import hmac
+import base64
+import itertools
+
+from zope.interface import implements
+
+from twisted.internet import defer
+
+from twisted.trial import unittest, util
+from twisted import mail
+import twisted.mail.protocols
+import twisted.mail.pop3
+import twisted.internet.protocol
+from twisted import internet
+from twisted.mail import pop3
+from twisted.protocols import loopback
+from twisted.python import failure
+from twisted.python.util import OrderedDict
+
+from twisted import cred
+import twisted.cred.portal
+import twisted.cred.checkers
+import twisted.cred.credentials
+
+from twisted.test.proto_helpers import LineSendingProtocol
+
+
+class UtilityTestCase(unittest.TestCase):
+    """
+    Test the various helper functions and classes used by the POP3 server
+    protocol implementation.
+    """
+
+    def testLineBuffering(self):
+        """
+        Test creating a LineBuffer and feeding it some lines.  The lines should
+        build up in its internal buffer for a while and then get spat out to
+        the writer.
+        """
+        output = []
+        input = iter(itertools.cycle(['012', '345', '6', '7', '8', '9']))
+        c = pop3._IteratorBuffer(output.extend, input, 6)
+        i = iter(c)
+        self.assertEqual(output, []) # nothing is buffer
+        i.next()
+        self.assertEqual(output, []) # '012' is buffered
+        i.next()
+        self.assertEqual(output, []) # '012345' is buffered
+        i.next()
+        self.assertEqual(output, ['012', '345', '6']) # nothing is buffered
+        for n in range(5):
+            i.next()
+        self.assertEqual(output, ['012', '345', '6', '7', '8', '9', '012', '345'])
+
+
+    def testFinishLineBuffering(self):
+        """
+        Test that a LineBuffer flushes everything when its iterator is
+        exhausted, and itself raises StopIteration.
+        """
+        output = []
+        input = iter(['a', 'b', 'c'])
+        c = pop3._IteratorBuffer(output.extend, input, 5)
+        for i in c:
+            pass
+        self.assertEqual(output, ['a', 'b', 'c'])
+
+
+    def testSuccessResponseFormatter(self):
+        """
+        Test that the thing that spits out POP3 'success responses' works
+        right.
+        """
+        self.assertEqual(
+            pop3.successResponse('Great.'),
+            '+OK Great.\r\n')
+
+
+    def testStatLineFormatter(self):
+        """
+        Test that the function which formats stat lines does so appropriately.
+        """
+        statLine = list(pop3.formatStatResponse([]))[-1]
+        self.assertEqual(statLine, '+OK 0 0\r\n')
+
+        statLine = list(pop3.formatStatResponse([10, 31, 0, 10101]))[-1]
+        self.assertEqual(statLine, '+OK 4 10142\r\n')
+
+
+    def testListLineFormatter(self):
+        """
+        Test that the function which formats the lines in response to a LIST
+        command does so appropriately.
+        """
+        listLines = list(pop3.formatListResponse([]))
+        self.assertEqual(
+            listLines,
+            ['+OK 0\r\n', '.\r\n'])
+
+        listLines = list(pop3.formatListResponse([1, 2, 3, 100]))
+        self.assertEqual(
+            listLines,
+            ['+OK 4\r\n', '1 1\r\n', '2 2\r\n', '3 3\r\n', '4 100\r\n', '.\r\n'])
+
+
+
+    def testUIDListLineFormatter(self):
+        """
+        Test that the function which formats lines in response to a UIDL
+        command does so appropriately.
+        """
+        UIDs = ['abc', 'def', 'ghi']
+        listLines = list(pop3.formatUIDListResponse([], UIDs.__getitem__))
+        self.assertEqual(
+            listLines,
+            ['+OK \r\n', '.\r\n'])
+
+        listLines = list(pop3.formatUIDListResponse([123, 431, 591], UIDs.__getitem__))
+        self.assertEqual(
+            listLines,
+            ['+OK \r\n', '1 abc\r\n', '2 def\r\n', '3 ghi\r\n', '.\r\n'])
+
+        listLines = list(pop3.formatUIDListResponse([0, None, 591], UIDs.__getitem__))
+        self.assertEqual(
+            listLines,
+            ['+OK \r\n', '1 abc\r\n', '3 ghi\r\n', '.\r\n'])
+
+
+
+class MyVirtualPOP3(mail.protocols.VirtualPOP3):
+
+    magic = '<moshez>'
+
+    def authenticateUserAPOP(self, user, digest):
+        user, domain = self.lookupDomain(user)
+        return self.service.domains['baz.com'].authenticateUserAPOP(user, digest, self.magic, domain)
+
+class DummyDomain:
+
+   def __init__(self):
+       self.users = {}
+
+   def addUser(self, name):
+       self.users[name] = []
+
+   def addMessage(self, name, message):
+       self.users[name].append(message)
+
+   def authenticateUserAPOP(self, name, digest, magic, domain):
+       return pop3.IMailbox, ListMailbox(self.users[name]), lambda: None
+
+
+class ListMailbox:
+
+    def __init__(self, list):
+        self.list = list
+
+    def listMessages(self, i=None):
+        if i is None:
+            return map(len, self.list)
+        return len(self.list[i])
+
+    def getMessage(self, i):
+        return StringIO.StringIO(self.list[i])
+
+    def getUidl(self, i):
+        return i
+
+    def deleteMessage(self, i):
+        self.list[i] = ''
+
+    def sync(self):
+        pass
+
+class MyPOP3Downloader(pop3.POP3Client):
+
+    def handle_WELCOME(self, line):
+        pop3.POP3Client.handle_WELCOME(self, line)
+        self.apop('hello at baz.com', 'world')
+
+    def handle_APOP(self, line):
+        parts = line.split()
+        code = parts[0]
+        data = (parts[1:] or ['NONE'])[0]
+        if code != '+OK':
+            print parts
+            raise AssertionError, 'code is ' + code
+        self.lines = []
+        self.retr(1)
+
+    def handle_RETR_continue(self, line):
+        self.lines.append(line)
+
+    def handle_RETR_end(self):
+        self.message = '\n'.join(self.lines) + '\n'
+        self.quit()
+
+    def handle_QUIT(self, line):
+        if line[:3] != '+OK':
+            raise AssertionError, 'code is ' + line
+
+
+class POP3TestCase(unittest.TestCase):
+
+    message = '''\
+Subject: urgent
+
+Someone set up us the bomb!
+'''
+
+    expectedOutput = '''\
++OK <moshez>\015
++OK Authentication succeeded\015
++OK \015
+1 0\015
+.\015
++OK %d\015
+Subject: urgent\015
+\015
+Someone set up us the bomb!\015
+.\015
++OK \015
+''' % len(message)
+
+    def setUp(self):
+        self.factory = internet.protocol.Factory()
+        self.factory.domains = {}
+        self.factory.domains['baz.com'] = DummyDomain()
+        self.factory.domains['baz.com'].addUser('hello')
+        self.factory.domains['baz.com'].addMessage('hello', self.message)
+
+    def testMessages(self):
+        client = LineSendingProtocol([
+            'APOP hello at baz.com world',
+            'UIDL',
+            'RETR 1',
+            'QUIT',
+        ])
+        server =  MyVirtualPOP3()
+        server.service = self.factory
+        def check(ignored):
+            output = '\r\n'.join(client.response) + '\r\n'
+            self.assertEqual(output, self.expectedOutput)
+        return loopback.loopbackTCP(server, client).addCallback(check)
+
+    def testLoopback(self):
+        protocol =  MyVirtualPOP3()
+        protocol.service = self.factory
+        clientProtocol = MyPOP3Downloader()
+        def check(ignored):
+            self.assertEqual(clientProtocol.message, self.message)
+            protocol.connectionLost(
+                failure.Failure(Exception("Test harness disconnect")))
+        d = loopback.loopbackAsync(protocol, clientProtocol)
+        return d.addCallback(check)
+    testLoopback.suppress = [util.suppress(message="twisted.mail.pop3.POP3Client is deprecated")]
+
+
+
+class DummyPOP3(pop3.POP3):
+
+    magic = '<moshez>'
+
+    def authenticateUserAPOP(self, user, password):
+        return pop3.IMailbox, DummyMailbox(ValueError), lambda: None
+
+
+
+class DummyMailbox(pop3.Mailbox):
+
+    messages = ['From: moshe\nTo: moshe\n\nHow are you, friend?\n']
+
+    def __init__(self, exceptionType):
+        self.messages = DummyMailbox.messages[:]
+        self.exceptionType = exceptionType
+
+    def listMessages(self, i=None):
+        if i is None:
+            return map(len, self.messages)
+        if i >= len(self.messages):
+            raise self.exceptionType()
+        return len(self.messages[i])
+
+    def getMessage(self, i):
+        return StringIO.StringIO(self.messages[i])
+
+    def getUidl(self, i):
+        if i >= len(self.messages):
+            raise self.exceptionType()
+        return str(i)
+
+    def deleteMessage(self, i):
+        self.messages[i] = ''
+
+
+class AnotherPOP3TestCase(unittest.TestCase):
+
+    def runTest(self, lines, expectedOutput):
+        dummy = DummyPOP3()
+        client = LineSendingProtocol(lines)
+        d = loopback.loopbackAsync(dummy, client)
+        return d.addCallback(self._cbRunTest, client, dummy, expectedOutput)
+
+
+    def _cbRunTest(self, ignored, client, dummy, expectedOutput):
+        self.assertEqual('\r\n'.join(expectedOutput),
+                             '\r\n'.join(client.response))
+        dummy.connectionLost(failure.Failure(Exception("Test harness disconnect")))
+        return ignored
+
+
+    def test_buffer(self):
+        """
+        Test a lot of different POP3 commands in an extremely pipelined
+        scenario.
+
+        This test may cover legitimate behavior, but the intent and
+        granularity are not very good.  It would likely be an improvement to
+        split it into a number of smaller, more focused tests.
+        """
+        return self.runTest(
+            ["APOP moshez dummy",
+             "LIST",
+             "UIDL",
+             "RETR 1",
+             "RETR 2",
+             "DELE 1",
+             "RETR 1",
+             "QUIT"],
+            ['+OK <moshez>',
+             '+OK Authentication succeeded',
+             '+OK 1',
+             '1 44',
+             '.',
+             '+OK ',
+             '1 0',
+             '.',
+             '+OK 44',
+             'From: moshe',
+             'To: moshe',
+             '',
+             'How are you, friend?',
+             '.',
+             '-ERR Bad message number argument',
+             '+OK ',
+             '-ERR message deleted',
+             '+OK '])
+
+
+    def test_noop(self):
+        """
+        Test the no-op command.
+        """
+        return self.runTest(
+            ['APOP spiv dummy',
+             'NOOP',
+             'QUIT'],
+            ['+OK <moshez>',
+             '+OK Authentication succeeded',
+             '+OK ',
+             '+OK '])
+
+
+    def testAuthListing(self):
+        p = DummyPOP3()
+        p.factory = internet.protocol.Factory()
+        p.factory.challengers = {'Auth1': None, 'secondAuth': None, 'authLast': None}
+        client = LineSendingProtocol([
+            "AUTH",
+            "QUIT",
+        ])
+
+        d = loopback.loopbackAsync(p, client)
+        return d.addCallback(self._cbTestAuthListing, client)
+
+    def _cbTestAuthListing(self, ignored, client):
+        self.failUnless(client.response[1].startswith('+OK'))
+        self.assertEqual(sorted(client.response[2:5]),
+                         ["AUTH1", "AUTHLAST", "SECONDAUTH"])
+        self.assertEqual(client.response[5], ".")
+
+    def testIllegalPASS(self):
+        dummy = DummyPOP3()
+        client = LineSendingProtocol([
+            "PASS fooz",
+            "QUIT"
+        ])
+        d = loopback.loopbackAsync(dummy, client)
+        return d.addCallback(self._cbTestIllegalPASS, client, dummy)
+
+    def _cbTestIllegalPASS(self, ignored, client, dummy):
+        expected_output = '+OK <moshez>\r\n-ERR USER required before PASS\r\n+OK \r\n'
+        self.assertEqual(expected_output, '\r\n'.join(client.response) + '\r\n')
+        dummy.connectionLost(failure.Failure(Exception("Test harness disconnect")))
+
+    def testEmptyPASS(self):
+        dummy = DummyPOP3()
+        client = LineSendingProtocol([
+            "PASS ",
+            "QUIT"
+        ])
+        d = loopback.loopbackAsync(dummy, client)
+        return d.addCallback(self._cbTestEmptyPASS, client, dummy)
+
+    def _cbTestEmptyPASS(self, ignored, client, dummy):
+        expected_output = '+OK <moshez>\r\n-ERR USER required before PASS\r\n+OK \r\n'
+        self.assertEqual(expected_output, '\r\n'.join(client.response) + '\r\n')
+        dummy.connectionLost(failure.Failure(Exception("Test harness disconnect")))
+
+
+class TestServerFactory:
+    implements(pop3.IServerFactory)
+
+    def cap_IMPLEMENTATION(self):
+        return "Test Implementation String"
+
+    def cap_EXPIRE(self):
+        return 60
+
+    challengers = OrderedDict([("SCHEME_1", None), ("SCHEME_2", None)])
+
+    def cap_LOGIN_DELAY(self):
+        return 120
+
+    pue = True
+    def perUserExpiration(self):
+        return self.pue
+
+    puld = True
+    def perUserLoginDelay(self):
+        return self.puld
+
+
+class TestMailbox:
+    loginDelay = 100
+    messageExpiration = 25
+
+
+class CapabilityTestCase(unittest.TestCase):
+    def setUp(self):
+        s = StringIO.StringIO()
+        p = pop3.POP3()
+        p.factory = TestServerFactory()
+        p.transport = internet.protocol.FileWrapper(s)
+        p.connectionMade()
+        p.do_CAPA()
+
+        self.caps = p.listCapabilities()
+        self.pcaps = s.getvalue().splitlines()
+
+        s = StringIO.StringIO()
+        p.mbox = TestMailbox()
+        p.transport = internet.protocol.FileWrapper(s)
+        p.do_CAPA()
+
+        self.lpcaps = s.getvalue().splitlines()
+        p.connectionLost(failure.Failure(Exception("Test harness disconnect")))
+
+    def contained(self, s, *caps):
+        for c in caps:
+            self.assertIn(s, c)
+
+    def testUIDL(self):
+        self.contained("UIDL", self.caps, self.pcaps, self.lpcaps)
+
+    def testTOP(self):
+        self.contained("TOP", self.caps, self.pcaps, self.lpcaps)
+
+    def testUSER(self):
+        self.contained("USER", self.caps, self.pcaps, self.lpcaps)
+
+    def testEXPIRE(self):
+        self.contained("EXPIRE 60 USER", self.caps, self.pcaps)
+        self.contained("EXPIRE 25", self.lpcaps)
+
+    def testIMPLEMENTATION(self):
+        self.contained(
+            "IMPLEMENTATION Test Implementation String",
+            self.caps, self.pcaps, self.lpcaps
+        )
+
+    def testSASL(self):
+        self.contained(
+            "SASL SCHEME_1 SCHEME_2",
+            self.caps, self.pcaps, self.lpcaps
+        )
+
+    def testLOGIN_DELAY(self):
+        self.contained("LOGIN-DELAY 120 USER", self.caps, self.pcaps)
+        self.assertIn("LOGIN-DELAY 100", self.lpcaps)
+
+
+
+class GlobalCapabilitiesTestCase(unittest.TestCase):
+    def setUp(self):
+        s = StringIO.StringIO()
+        p = pop3.POP3()
+        p.factory = TestServerFactory()
+        p.factory.pue = p.factory.puld = False
+        p.transport = internet.protocol.FileWrapper(s)
+        p.connectionMade()
+        p.do_CAPA()
+
+        self.caps = p.listCapabilities()
+        self.pcaps = s.getvalue().splitlines()
+
+        s = StringIO.StringIO()
+        p.mbox = TestMailbox()
+        p.transport = internet.protocol.FileWrapper(s)
+        p.do_CAPA()
+
+        self.lpcaps = s.getvalue().splitlines()
+        p.connectionLost(failure.Failure(Exception("Test harness disconnect")))
+
+    def contained(self, s, *caps):
+        for c in caps:
+            self.assertIn(s, c)
+
+    def testEXPIRE(self):
+        self.contained("EXPIRE 60", self.caps, self.pcaps, self.lpcaps)
+
+    def testLOGIN_DELAY(self):
+        self.contained("LOGIN-DELAY 120", self.caps, self.pcaps, self.lpcaps)
+
+
+
+class TestRealm:
+    def requestAvatar(self, avatarId, mind, *interfaces):
+        if avatarId == 'testuser':
+            return pop3.IMailbox, DummyMailbox(ValueError), lambda: None
+        assert False
+
+
+
+class SASLTestCase(unittest.TestCase):
+    def testValidLogin(self):
+        p = pop3.POP3()
+        p.factory = TestServerFactory()
+        p.factory.challengers = {'CRAM-MD5': cred.credentials.CramMD5Credentials}
+        p.portal = cred.portal.Portal(TestRealm())
+        ch = cred.checkers.InMemoryUsernamePasswordDatabaseDontUse()
+        ch.addUser('testuser', 'testpassword')
+        p.portal.registerChecker(ch)
+
+        s = StringIO.StringIO()
+        p.transport = internet.protocol.FileWrapper(s)
+        p.connectionMade()
+
+        p.lineReceived("CAPA")
+        self.failUnless(s.getvalue().find("SASL CRAM-MD5") >= 0)
+
+        p.lineReceived("AUTH CRAM-MD5")
+        chal = s.getvalue().splitlines()[-1][2:]
+        chal = base64.decodestring(chal)
+        response = hmac.HMAC('testpassword', chal).hexdigest()
+
+        p.lineReceived(base64.encodestring('testuser ' + response).rstrip('\n'))
+        self.failUnless(p.mbox)
+        self.failUnless(s.getvalue().splitlines()[-1].find("+OK") >= 0)
+        p.connectionLost(failure.Failure(Exception("Test harness disconnect")))
+
+
+
+class CommandMixin:
+    """
+    Tests for all the commands a POP3 server is allowed to receive.
+    """
+
+    extraMessage = '''\
+From: guy
+To: fellow
+
+More message text for you.
+'''
+
+
+    def setUp(self):
+        """
+        Make a POP3 server protocol instance hooked up to a simple mailbox and
+        a transport that buffers output to a StringIO.
+        """
+        p = pop3.POP3()
+        p.mbox = self.mailboxType(self.exceptionType)
+        p.schedule = list
+        self.pop3Server = p
+
+        s = StringIO.StringIO()
+        p.transport = internet.protocol.FileWrapper(s)
+        p.connectionMade()
+        s.truncate(0)
+        self.pop3Transport = s
+
+
+    def tearDown(self):
+        """
+        Disconnect the server protocol so it can clean up anything it might
+        need to clean up.
+        """
+        self.pop3Server.connectionLost(failure.Failure(Exception("Test harness disconnect")))
+
+
+    def _flush(self):
+        """
+        Do some of the things that the reactor would take care of, if the
+        reactor were actually running.
+        """
+        # Oh man FileWrapper is pooh.
+        self.pop3Server.transport._checkProducer()
+
+
+    def testLIST(self):
+        """
+        Test the two forms of list: with a message index number, which should
+        return a short-form response, and without a message index number, which
+        should return a long-form response, one line per message.
+        """
+        p = self.pop3Server
+        s = self.pop3Transport
+
+        p.lineReceived("LIST 1")
+        self._flush()
+        self.assertEqual(s.getvalue(), "+OK 1 44\r\n")
+        s.truncate(0)
+
+        p.lineReceived("LIST")
+        self._flush()
+        self.assertEqual(s.getvalue(), "+OK 1\r\n1 44\r\n.\r\n")
+
+
+    def testLISTWithBadArgument(self):
+        """
+        Test that non-integers and out-of-bound integers produce appropriate
+        error responses.
+        """
+        p = self.pop3Server
+        s = self.pop3Transport
+
+        p.lineReceived("LIST a")
+        self.assertEqual(
+            s.getvalue(),
+            "-ERR Invalid message-number: 'a'\r\n")
+        s.truncate(0)
+
+        p.lineReceived("LIST 0")
+        self.assertEqual(
+            s.getvalue(),
+            "-ERR Invalid message-number: 0\r\n")
+        s.truncate(0)
+
+        p.lineReceived("LIST 2")
+        self.assertEqual(
+            s.getvalue(),
+            "-ERR Invalid message-number: 2\r\n")
+        s.truncate(0)
+
+
+    def testUIDL(self):
+        """
+        Test the two forms of the UIDL command.  These are just like the two
+        forms of the LIST command.
+        """
+        p = self.pop3Server
+        s = self.pop3Transport
+
+        p.lineReceived("UIDL 1")
+        self.assertEqual(s.getvalue(), "+OK 0\r\n")
+        s.truncate(0)
+
+        p.lineReceived("UIDL")
+        self._flush()
+        self.assertEqual(s.getvalue(), "+OK \r\n1 0\r\n.\r\n")
+
+
+    def testUIDLWithBadArgument(self):
+        """
+        Test that UIDL with a non-integer or an out-of-bounds integer produces
+        the appropriate error response.
+        """
+        p = self.pop3Server
+        s = self.pop3Transport
+
+        p.lineReceived("UIDL a")
+        self.assertEqual(
+            s.getvalue(),
+            "-ERR Bad message number argument\r\n")
+        s.truncate(0)
+
+        p.lineReceived("UIDL 0")
+        self.assertEqual(
+            s.getvalue(),
+            "-ERR Bad message number argument\r\n")
+        s.truncate(0)
+
+        p.lineReceived("UIDL 2")
+        self.assertEqual(
+            s.getvalue(),
+            "-ERR Bad message number argument\r\n")
+        s.truncate(0)
+
+
+    def testSTAT(self):
+        """
+        Test the single form of the STAT command, which returns a short-form
+        response of the number of messages in the mailbox and their total size.
+        """
+        p = self.pop3Server
+        s = self.pop3Transport
+
+        p.lineReceived("STAT")
+        self._flush()
+        self.assertEqual(s.getvalue(), "+OK 1 44\r\n")
+
+
+    def testRETR(self):
+        """
+        Test downloading a message.
+        """
+        p = self.pop3Server
+        s = self.pop3Transport
+
+        p.lineReceived("RETR 1")
+        self._flush()
+        self.assertEqual(
+            s.getvalue(),
+            "+OK 44\r\n"
+            "From: moshe\r\n"
+            "To: moshe\r\n"
+            "\r\n"
+            "How are you, friend?\r\n"
+            ".\r\n")
+        s.truncate(0)
+
+
+    def testRETRWithBadArgument(self):
+        """
+        Test that trying to download a message with a bad argument, either not
+        an integer or an out-of-bounds integer, fails with the appropriate
+        error response.
+        """
+        p = self.pop3Server
+        s = self.pop3Transport
+
+        p.lineReceived("RETR a")
+        self.assertEqual(
+            s.getvalue(),
+            "-ERR Bad message number argument\r\n")
+        s.truncate(0)
+
+        p.lineReceived("RETR 0")
+        self.assertEqual(
+            s.getvalue(),
+            "-ERR Bad message number argument\r\n")
+        s.truncate(0)
+
+        p.lineReceived("RETR 2")
+        self.assertEqual(
+            s.getvalue(),
+            "-ERR Bad message number argument\r\n")
+        s.truncate(0)
+
+
+    def testTOP(self):
+        """
+        Test downloading the headers and part of the body of a message.
+        """
+        p = self.pop3Server
+        s = self.pop3Transport
+        p.mbox.messages.append(self.extraMessage)
+
+        p.lineReceived("TOP 1 0")
+        self._flush()
+        self.assertEqual(
+            s.getvalue(),
+            "+OK Top of message follows\r\n"
+            "From: moshe\r\n"
+            "To: moshe\r\n"
+            "\r\n"
+            ".\r\n")
+
+
+    def testTOPWithBadArgument(self):
+        """
+        Test that trying to download a message with a bad argument, either a
+        message number which isn't an integer or is an out-of-bounds integer or
+        a number of lines which isn't an integer or is a negative integer,
+        fails with the appropriate error response.
+        """
+        p = self.pop3Server
+        s = self.pop3Transport
+        p.mbox.messages.append(self.extraMessage)
+
+        p.lineReceived("TOP 1 a")
+        self.assertEqual(
+            s.getvalue(),
+            "-ERR Bad line count argument\r\n")
+        s.truncate(0)
+
+        p.lineReceived("TOP 1 -1")
+        self.assertEqual(
+            s.getvalue(),
+            "-ERR Bad line count argument\r\n")
+        s.truncate(0)
+
+        p.lineReceived("TOP a 1")
+        self.assertEqual(
+            s.getvalue(),
+            "-ERR Bad message number argument\r\n")
+        s.truncate(0)
+
+        p.lineReceived("TOP 0 1")
+        self.assertEqual(
+            s.getvalue(),
+            "-ERR Bad message number argument\r\n")
+        s.truncate(0)
+
+        p.lineReceived("TOP 3 1")
+        self.assertEqual(
+            s.getvalue(),
+            "-ERR Bad message number argument\r\n")
+        s.truncate(0)
+
+
+    def testLAST(self):
+        """
+        Test the exceedingly pointless LAST command, which tells you the
+        highest message index which you have already downloaded.
+        """
+        p = self.pop3Server
+        s = self.pop3Transport
+        p.mbox.messages.append(self.extraMessage)
+
+        p.lineReceived('LAST')
+        self.assertEqual(
+            s.getvalue(),
+            "+OK 0\r\n")
+        s.truncate(0)
+
+
+    def testRetrieveUpdatesHighest(self):
+        """
+        Test that issuing a RETR command updates the LAST response.
+        """
+        p = self.pop3Server
+        s = self.pop3Transport
+        p.mbox.messages.append(self.extraMessage)
+
+        p.lineReceived('RETR 2')
+        self._flush()
+        s.truncate(0)
+        p.lineReceived('LAST')
+        self.assertEqual(
+            s.getvalue(),
+            '+OK 2\r\n')
+        s.truncate(0)
+
+
+    def testTopUpdatesHighest(self):
+        """
+        Test that issuing a TOP command updates the LAST response.
+        """
+        p = self.pop3Server
+        s = self.pop3Transport
+        p.mbox.messages.append(self.extraMessage)
+
+        p.lineReceived('TOP 2 10')
+        self._flush()
+        s.truncate(0)
+        p.lineReceived('LAST')
+        self.assertEqual(
+            s.getvalue(),
+            '+OK 2\r\n')
+
+
+    def testHighestOnlyProgresses(self):
+        """
+        Test that downloading a message with a smaller index than the current
+        LAST response doesn't change the LAST response.
+        """
+        p = self.pop3Server
+        s = self.pop3Transport
+        p.mbox.messages.append(self.extraMessage)
+
+        p.lineReceived('RETR 2')
+        self._flush()
+        p.lineReceived('TOP 1 10')
+        self._flush()
+        s.truncate(0)
+        p.lineReceived('LAST')
+        self.assertEqual(
+            s.getvalue(),
+            '+OK 2\r\n')
+
+
+    def testResetClearsHighest(self):
+        """
+        Test that issuing RSET changes the LAST response to 0.
+        """
+        p = self.pop3Server
+        s = self.pop3Transport
+        p.mbox.messages.append(self.extraMessage)
+
+        p.lineReceived('RETR 2')
+        self._flush()
+        p.lineReceived('RSET')
+        s.truncate(0)
+        p.lineReceived('LAST')
+        self.assertEqual(
+            s.getvalue(),
+            '+OK 0\r\n')
+
+
+
+_listMessageDeprecation = (
+    "twisted.mail.pop3.IMailbox.listMessages may not "
+    "raise IndexError for out-of-bounds message numbers: "
+    "raise ValueError instead.")
+_listMessageSuppression = util.suppress(
+    message=_listMessageDeprecation,
+    category=PendingDeprecationWarning)
+
+_getUidlDeprecation = (
+    "twisted.mail.pop3.IMailbox.getUidl may not "
+    "raise IndexError for out-of-bounds message numbers: "
+    "raise ValueError instead.")
+_getUidlSuppression = util.suppress(
+    message=_getUidlDeprecation,
+    category=PendingDeprecationWarning)
+
+class IndexErrorCommandTestCase(CommandMixin, unittest.TestCase):
+    """
+    Run all of the command tests against a mailbox which raises IndexError
+    when an out of bounds request is made.  This behavior will be deprecated
+    shortly and then removed.
+    """
+    exceptionType = IndexError
+    mailboxType = DummyMailbox
+
+    def testLISTWithBadArgument(self):
+        return CommandMixin.testLISTWithBadArgument(self)
+    testLISTWithBadArgument.suppress = [_listMessageSuppression]
+
+
+    def testUIDLWithBadArgument(self):
+        return CommandMixin.testUIDLWithBadArgument(self)
+    testUIDLWithBadArgument.suppress = [_getUidlSuppression]
+
+
+    def testTOPWithBadArgument(self):
+        return CommandMixin.testTOPWithBadArgument(self)
+    testTOPWithBadArgument.suppress = [_listMessageSuppression]
+
+
+    def testRETRWithBadArgument(self):
+        return CommandMixin.testRETRWithBadArgument(self)
+    testRETRWithBadArgument.suppress = [_listMessageSuppression]
+
+
+
+class ValueErrorCommandTestCase(CommandMixin, unittest.TestCase):
+    """
+    Run all of the command tests against a mailbox which raises ValueError
+    when an out of bounds request is made.  This is the correct behavior and
+    after support for mailboxes which raise IndexError is removed, this will
+    become just C{CommandTestCase}.
+    """
+    exceptionType = ValueError
+    mailboxType = DummyMailbox
+
+
+
+class SyncDeferredMailbox(DummyMailbox):
+    """
+    Mailbox which has a listMessages implementation which returns a Deferred
+    which has already fired.
+    """
+    def listMessages(self, n=None):
+        return defer.succeed(DummyMailbox.listMessages(self, n))
+
+
+
+class IndexErrorSyncDeferredCommandTestCase(IndexErrorCommandTestCase):
+    """
+    Run all of the L{IndexErrorCommandTestCase} tests with a
+    synchronous-Deferred returning IMailbox implementation.
+    """
+    mailboxType = SyncDeferredMailbox
+
+
+
+class ValueErrorSyncDeferredCommandTestCase(ValueErrorCommandTestCase):
+    """
+    Run all of the L{ValueErrorCommandTestCase} tests with a
+    synchronous-Deferred returning IMailbox implementation.
+    """
+    mailboxType = SyncDeferredMailbox
+
+
+
+class AsyncDeferredMailbox(DummyMailbox):
+    """
+    Mailbox which has a listMessages implementation which returns a Deferred
+    which has not yet fired.
+    """
+    def __init__(self, *a, **kw):
+        self.waiting = []
+        DummyMailbox.__init__(self, *a, **kw)
+
+
+    def listMessages(self, n=None):
+        d = defer.Deferred()
+        # See AsyncDeferredMailbox._flush
+        self.waiting.append((d, DummyMailbox.listMessages(self, n)))
+        return d
+
+
+
+class IndexErrorAsyncDeferredCommandTestCase(IndexErrorCommandTestCase):
+    """
+    Run all of the L{IndexErrorCommandTestCase} tests with an asynchronous-Deferred
+    returning IMailbox implementation.
+    """
+    mailboxType = AsyncDeferredMailbox
+
+    def _flush(self):
+        """
+        Fire whatever Deferreds we've built up in our mailbox.
+        """
+        while self.pop3Server.mbox.waiting:
+            d, a = self.pop3Server.mbox.waiting.pop()
+            d.callback(a)
+        IndexErrorCommandTestCase._flush(self)
+
+
+
+class ValueErrorAsyncDeferredCommandTestCase(ValueErrorCommandTestCase):
+    """
+    Run all of the L{IndexErrorCommandTestCase} tests with an asynchronous-Deferred
+    returning IMailbox implementation.
+    """
+    mailboxType = AsyncDeferredMailbox
+
+    def _flush(self):
+        """
+        Fire whatever Deferreds we've built up in our mailbox.
+        """
+        while self.pop3Server.mbox.waiting:
+            d, a = self.pop3Server.mbox.waiting.pop()
+            d.callback(a)
+        ValueErrorCommandTestCase._flush(self)
+
+class POP3MiscTestCase(unittest.TestCase):
+    """
+    Miscellaneous tests more to do with module/package structure than
+    anything to do with the Post Office Protocol.
+    """
+    def test_all(self):
+        """
+        This test checks that all names listed in
+        twisted.mail.pop3.__all__ are actually present in the module.
+        """
+        mod = twisted.mail.pop3
+        for attr in mod.__all__:
+            self.failUnless(hasattr(mod, attr))
diff --git a/ThirdParty/Twisted/twisted/mail/test/test_pop3client.py b/ThirdParty/Twisted/twisted/mail/test/test_pop3client.py
new file mode 100644
index 0000000..502aae8
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/test/test_pop3client.py
@@ -0,0 +1,582 @@
+# -*- test-case-name: twisted.mail.test.test_pop3client -*-
+# Copyright (c) 2001-2004 Divmod Inc.
+# See LICENSE for details.
+
+from zope.interface import directlyProvides
+
+from twisted.mail.pop3 import AdvancedPOP3Client as POP3Client
+from twisted.mail.pop3 import InsecureAuthenticationDisallowed
+from twisted.mail.pop3 import ServerErrorResponse
+from twisted.protocols import loopback
+from twisted.internet import reactor, defer, error, protocol, interfaces
+from twisted.python import log
+
+from twisted.trial import unittest
+from twisted.test.proto_helpers import StringTransport
+from twisted.protocols import basic
+
+from twisted.mail.test import pop3testserver
+
+try:
+    from twisted.test.ssl_helpers import ClientTLSContext, ServerTLSContext
+except ImportError:
+    ClientTLSContext = ServerTLSContext = None
+
+
+class StringTransportWithConnectionLosing(StringTransport):
+    def loseConnection(self):
+        self.protocol.connectionLost(error.ConnectionDone())
+
+
+capCache = {"TOP": None, "LOGIN-DELAY": "180", "UIDL": None, \
+            "STLS": None, "USER": None, "SASL": "LOGIN"}
+def setUp(greet=True):
+    p = POP3Client()
+
+    # Skip the CAPA login will issue if it doesn't already have a
+    # capability cache
+    p._capCache = capCache
+
+    t = StringTransportWithConnectionLosing()
+    t.protocol = p
+    p.makeConnection(t)
+
+    if greet:
+        p.dataReceived('+OK Hello!\r\n')
+
+    return p, t
+
+def strip(f):
+    return lambda result, f=f: f()
+
+class POP3ClientLoginTestCase(unittest.TestCase):
+    def testNegativeGreeting(self):
+        p, t = setUp(greet=False)
+        p.allowInsecureLogin = True
+        d = p.login("username", "password")
+        p.dataReceived('-ERR Offline for maintenance\r\n')
+        return self.assertFailure(
+            d, ServerErrorResponse).addCallback(
+            lambda exc: self.assertEqual(exc.args[0], "Offline for maintenance"))
+
+
+    def testOkUser(self):
+        p, t = setUp()
+        d = p.user("username")
+        self.assertEqual(t.value(), "USER username\r\n")
+        p.dataReceived("+OK send password\r\n")
+        return d.addCallback(self.assertEqual, "send password")
+
+    def testBadUser(self):
+        p, t = setUp()
+        d = p.user("username")
+        self.assertEqual(t.value(), "USER username\r\n")
+        p.dataReceived("-ERR account suspended\r\n")
+        return self.assertFailure(
+            d, ServerErrorResponse).addCallback(
+            lambda exc: self.assertEqual(exc.args[0], "account suspended"))
+
+    def testOkPass(self):
+        p, t = setUp()
+        d = p.password("password")
+        self.assertEqual(t.value(), "PASS password\r\n")
+        p.dataReceived("+OK you're in!\r\n")
+        return d.addCallback(self.assertEqual, "you're in!")
+
+    def testBadPass(self):
+        p, t = setUp()
+        d = p.password("password")
+        self.assertEqual(t.value(), "PASS password\r\n")
+        p.dataReceived("-ERR go away\r\n")
+        return self.assertFailure(
+            d, ServerErrorResponse).addCallback(
+            lambda exc: self.assertEqual(exc.args[0], "go away"))
+
+    def testOkLogin(self):
+        p, t = setUp()
+        p.allowInsecureLogin = True
+        d = p.login("username", "password")
+        self.assertEqual(t.value(), "USER username\r\n")
+        p.dataReceived("+OK go ahead\r\n")
+        self.assertEqual(t.value(), "USER username\r\nPASS password\r\n")
+        p.dataReceived("+OK password accepted\r\n")
+        return d.addCallback(self.assertEqual, "password accepted")
+
+    def testBadPasswordLogin(self):
+        p, t = setUp()
+        p.allowInsecureLogin = True
+        d = p.login("username", "password")
+        self.assertEqual(t.value(), "USER username\r\n")
+        p.dataReceived("+OK waiting on you\r\n")
+        self.assertEqual(t.value(), "USER username\r\nPASS password\r\n")
+        p.dataReceived("-ERR bogus login\r\n")
+        return self.assertFailure(
+            d, ServerErrorResponse).addCallback(
+            lambda exc: self.assertEqual(exc.args[0], "bogus login"))
+
+    def testBadUsernameLogin(self):
+        p, t = setUp()
+        p.allowInsecureLogin = True
+        d = p.login("username", "password")
+        self.assertEqual(t.value(), "USER username\r\n")
+        p.dataReceived("-ERR bogus login\r\n")
+        return self.assertFailure(
+            d, ServerErrorResponse).addCallback(
+            lambda exc: self.assertEqual(exc.args[0], "bogus login"))
+
+    def testServerGreeting(self):
+        p, t = setUp(greet=False)
+        p.dataReceived("+OK lalala this has no challenge\r\n")
+        self.assertEqual(p.serverChallenge, None)
+
+    def testServerGreetingWithChallenge(self):
+        p, t = setUp(greet=False)
+        p.dataReceived("+OK <here is the challenge>\r\n")
+        self.assertEqual(p.serverChallenge, "<here is the challenge>")
+
+    def testAPOP(self):
+        p, t = setUp(greet=False)
+        p.dataReceived("+OK <challenge string goes here>\r\n")
+        d = p.login("username", "password")
+        self.assertEqual(t.value(), "APOP username f34f1e464d0d7927607753129cabe39a\r\n")
+        p.dataReceived("+OK Welcome!\r\n")
+        return d.addCallback(self.assertEqual, "Welcome!")
+
+    def testInsecureLoginRaisesException(self):
+        p, t = setUp(greet=False)
+        p.dataReceived("+OK Howdy\r\n")
+        d = p.login("username", "password")
+        self.failIf(t.value())
+        return self.assertFailure(
+            d, InsecureAuthenticationDisallowed)
+
+
+    def testSSLTransportConsideredSecure(self):
+        """
+        If a server doesn't offer APOP but the transport is secured using
+        SSL or TLS, a plaintext login should be allowed, not rejected with
+        an InsecureAuthenticationDisallowed exception.
+        """
+        p, t = setUp(greet=False)
+        directlyProvides(t, interfaces.ISSLTransport)
+        p.dataReceived("+OK Howdy\r\n")
+        d = p.login("username", "password")
+        self.assertEqual(t.value(), "USER username\r\n")
+        t.clear()
+        p.dataReceived("+OK\r\n")
+        self.assertEqual(t.value(), "PASS password\r\n")
+        p.dataReceived("+OK\r\n")
+        return d
+
+
+
+class ListConsumer:
+    def __init__(self):
+        self.data = {}
+
+    def consume(self, (item, value)):
+        self.data.setdefault(item, []).append(value)
+
+class MessageConsumer:
+    def __init__(self):
+        self.data = []
+
+    def consume(self, line):
+        self.data.append(line)
+
+class POP3ClientListTestCase(unittest.TestCase):
+    def testListSize(self):
+        p, t = setUp()
+        d = p.listSize()
+        self.assertEqual(t.value(), "LIST\r\n")
+        p.dataReceived("+OK Here it comes\r\n")
+        p.dataReceived("1 3\r\n2 2\r\n3 1\r\n.\r\n")
+        return d.addCallback(self.assertEqual, [3, 2, 1])
+
+    def testListSizeWithConsumer(self):
+        p, t = setUp()
+        c = ListConsumer()
+        f = c.consume
+        d = p.listSize(f)
+        self.assertEqual(t.value(), "LIST\r\n")
+        p.dataReceived("+OK Here it comes\r\n")
+        p.dataReceived("1 3\r\n2 2\r\n3 1\r\n")
+        self.assertEqual(c.data, {0: [3], 1: [2], 2: [1]})
+        p.dataReceived("5 3\r\n6 2\r\n7 1\r\n")
+        self.assertEqual(c.data, {0: [3], 1: [2], 2: [1], 4: [3], 5: [2], 6: [1]})
+        p.dataReceived(".\r\n")
+        return d.addCallback(self.assertIdentical, f)
+
+    def testFailedListSize(self):
+        p, t = setUp()
+        d = p.listSize()
+        self.assertEqual(t.value(), "LIST\r\n")
+        p.dataReceived("-ERR Fatal doom server exploded\r\n")
+        return self.assertFailure(
+            d, ServerErrorResponse).addCallback(
+            lambda exc: self.assertEqual(exc.args[0], "Fatal doom server exploded"))
+
+    def testListUID(self):
+        p, t = setUp()
+        d = p.listUID()
+        self.assertEqual(t.value(), "UIDL\r\n")
+        p.dataReceived("+OK Here it comes\r\n")
+        p.dataReceived("1 abc\r\n2 def\r\n3 ghi\r\n.\r\n")
+        return d.addCallback(self.assertEqual, ["abc", "def", "ghi"])
+
+    def testListUIDWithConsumer(self):
+        p, t = setUp()
+        c = ListConsumer()
+        f = c.consume
+        d = p.listUID(f)
+        self.assertEqual(t.value(), "UIDL\r\n")
+        p.dataReceived("+OK Here it comes\r\n")
+        p.dataReceived("1 xyz\r\n2 abc\r\n5 mno\r\n")
+        self.assertEqual(c.data, {0: ["xyz"], 1: ["abc"], 4: ["mno"]})
+        p.dataReceived(".\r\n")
+        return d.addCallback(self.assertIdentical, f)
+
+    def testFailedListUID(self):
+        p, t = setUp()
+        d = p.listUID()
+        self.assertEqual(t.value(), "UIDL\r\n")
+        p.dataReceived("-ERR Fatal doom server exploded\r\n")
+        return self.assertFailure(
+            d, ServerErrorResponse).addCallback(
+            lambda exc: self.assertEqual(exc.args[0], "Fatal doom server exploded"))
+
+class POP3ClientMessageTestCase(unittest.TestCase):
+    def testRetrieve(self):
+        p, t = setUp()
+        d = p.retrieve(7)
+        self.assertEqual(t.value(), "RETR 8\r\n")
+        p.dataReceived("+OK Message incoming\r\n")
+        p.dataReceived("La la la here is message text\r\n")
+        p.dataReceived("..Further message text tra la la\r\n")
+        p.dataReceived(".\r\n")
+        return d.addCallback(
+            self.assertEqual,
+            ["La la la here is message text",
+             ".Further message text tra la la"])
+
+    def testRetrieveWithConsumer(self):
+        p, t = setUp()
+        c = MessageConsumer()
+        f = c.consume
+        d = p.retrieve(7, f)
+        self.assertEqual(t.value(), "RETR 8\r\n")
+        p.dataReceived("+OK Message incoming\r\n")
+        p.dataReceived("La la la here is message text\r\n")
+        p.dataReceived("..Further message text\r\n.\r\n")
+        return d.addCallback(self._cbTestRetrieveWithConsumer, f, c)
+
+    def _cbTestRetrieveWithConsumer(self, result, f, c):
+        self.assertIdentical(result, f)
+        self.assertEqual(c.data, ["La la la here is message text",
+                                   ".Further message text"])
+
+    def testPartialRetrieve(self):
+        p, t = setUp()
+        d = p.retrieve(7, lines=2)
+        self.assertEqual(t.value(), "TOP 8 2\r\n")
+        p.dataReceived("+OK 2 lines on the way\r\n")
+        p.dataReceived("Line the first!  Woop\r\n")
+        p.dataReceived("Line the last!  Bye\r\n")
+        p.dataReceived(".\r\n")
+        return d.addCallback(
+            self.assertEqual,
+            ["Line the first!  Woop",
+             "Line the last!  Bye"])
+
+    def testPartialRetrieveWithConsumer(self):
+        p, t = setUp()
+        c = MessageConsumer()
+        f = c.consume
+        d = p.retrieve(7, f, lines=2)
+        self.assertEqual(t.value(), "TOP 8 2\r\n")
+        p.dataReceived("+OK 2 lines on the way\r\n")
+        p.dataReceived("Line the first!  Woop\r\n")
+        p.dataReceived("Line the last!  Bye\r\n")
+        p.dataReceived(".\r\n")
+        return d.addCallback(self._cbTestPartialRetrieveWithConsumer, f, c)
+
+    def _cbTestPartialRetrieveWithConsumer(self, result, f, c):
+        self.assertIdentical(result, f)
+        self.assertEqual(c.data, ["Line the first!  Woop",
+                                   "Line the last!  Bye"])
+
+    def testFailedRetrieve(self):
+        p, t = setUp()
+        d = p.retrieve(0)
+        self.assertEqual(t.value(), "RETR 1\r\n")
+        p.dataReceived("-ERR Fatal doom server exploded\r\n")
+        return self.assertFailure(
+            d, ServerErrorResponse).addCallback(
+            lambda exc: self.assertEqual(exc.args[0], "Fatal doom server exploded"))
+
+
+    def test_concurrentRetrieves(self):
+        """
+        Issue three retrieve calls immediately without waiting for any to
+        succeed and make sure they all do succeed eventually.
+        """
+        p, t = setUp()
+        messages = [
+            p.retrieve(i).addCallback(
+                self.assertEqual,
+                ["First line of %d." % (i + 1,),
+                 "Second line of %d." % (i + 1,)])
+            for i
+            in range(3)]
+
+        for i in range(1, 4):
+            self.assertEqual(t.value(), "RETR %d\r\n" % (i,))
+            t.clear()
+            p.dataReceived("+OK 2 lines on the way\r\n")
+            p.dataReceived("First line of %d.\r\n" % (i,))
+            p.dataReceived("Second line of %d.\r\n" % (i,))
+            self.assertEqual(t.value(), "")
+            p.dataReceived(".\r\n")
+
+        return defer.DeferredList(messages, fireOnOneErrback=True)
+
+
+
+class POP3ClientMiscTestCase(unittest.TestCase):
+    def testCapability(self):
+        p, t = setUp()
+        d = p.capabilities(useCache=0)
+        self.assertEqual(t.value(), "CAPA\r\n")
+        p.dataReceived("+OK Capabilities on the way\r\n")
+        p.dataReceived("X\r\nY\r\nZ\r\nA 1 2 3\r\nB 1 2\r\nC 1\r\n.\r\n")
+        return d.addCallback(
+            self.assertEqual,
+            {"X": None, "Y": None, "Z": None,
+             "A": ["1", "2", "3"],
+             "B": ["1", "2"],
+             "C": ["1"]})
+
+    def testCapabilityError(self):
+        p, t = setUp()
+        d = p.capabilities(useCache=0)
+        self.assertEqual(t.value(), "CAPA\r\n")
+        p.dataReceived("-ERR This server is lame!\r\n")
+        return d.addCallback(self.assertEqual, {})
+
+    def testStat(self):
+        p, t = setUp()
+        d = p.stat()
+        self.assertEqual(t.value(), "STAT\r\n")
+        p.dataReceived("+OK 1 1212\r\n")
+        return d.addCallback(self.assertEqual, (1, 1212))
+
+    def testStatError(self):
+        p, t = setUp()
+        d = p.stat()
+        self.assertEqual(t.value(), "STAT\r\n")
+        p.dataReceived("-ERR This server is lame!\r\n")
+        return self.assertFailure(
+            d, ServerErrorResponse).addCallback(
+            lambda exc: self.assertEqual(exc.args[0], "This server is lame!"))
+
+    def testNoop(self):
+        p, t = setUp()
+        d = p.noop()
+        self.assertEqual(t.value(), "NOOP\r\n")
+        p.dataReceived("+OK No-op to you too!\r\n")
+        return d.addCallback(self.assertEqual, "No-op to you too!")
+
+    def testNoopError(self):
+        p, t = setUp()
+        d = p.noop()
+        self.assertEqual(t.value(), "NOOP\r\n")
+        p.dataReceived("-ERR This server is lame!\r\n")
+        return self.assertFailure(
+            d, ServerErrorResponse).addCallback(
+            lambda exc: self.assertEqual(exc.args[0], "This server is lame!"))
+
+    def testRset(self):
+        p, t = setUp()
+        d = p.reset()
+        self.assertEqual(t.value(), "RSET\r\n")
+        p.dataReceived("+OK Reset state\r\n")
+        return d.addCallback(self.assertEqual, "Reset state")
+
+    def testRsetError(self):
+        p, t = setUp()
+        d = p.reset()
+        self.assertEqual(t.value(), "RSET\r\n")
+        p.dataReceived("-ERR This server is lame!\r\n")
+        return self.assertFailure(
+            d, ServerErrorResponse).addCallback(
+            lambda exc: self.assertEqual(exc.args[0], "This server is lame!"))
+
+    def testDelete(self):
+        p, t = setUp()
+        d = p.delete(3)
+        self.assertEqual(t.value(), "DELE 4\r\n")
+        p.dataReceived("+OK Hasta la vista\r\n")
+        return d.addCallback(self.assertEqual, "Hasta la vista")
+
+    def testDeleteError(self):
+        p, t = setUp()
+        d = p.delete(3)
+        self.assertEqual(t.value(), "DELE 4\r\n")
+        p.dataReceived("-ERR Winner is not you.\r\n")
+        return self.assertFailure(
+            d, ServerErrorResponse).addCallback(
+            lambda exc: self.assertEqual(exc.args[0], "Winner is not you."))
+
+
+class SimpleClient(POP3Client):
+    def __init__(self, deferred, contextFactory = None):
+        self.deferred = deferred
+        self.allowInsecureLogin = True
+
+    def serverGreeting(self, challenge):
+        self.deferred.callback(None)
+
+class POP3HelperMixin:
+    serverCTX = None
+    clientCTX = None
+
+    def setUp(self):
+        d = defer.Deferred()
+        self.server = pop3testserver.POP3TestServer(contextFactory=self.serverCTX)
+        self.client = SimpleClient(d, contextFactory=self.clientCTX)
+        self.client.timeout = 30
+        self.connected = d
+
+    def tearDown(self):
+        del self.server
+        del self.client
+        del self.connected
+
+    def _cbStopClient(self, ignore):
+        self.client.transport.loseConnection()
+
+    def _ebGeneral(self, failure):
+        self.client.transport.loseConnection()
+        self.server.transport.loseConnection()
+        return failure
+
+    def loopback(self):
+        return loopback.loopbackTCP(self.server, self.client, noisy=False)
+
+
+class TLSServerFactory(protocol.ServerFactory):
+    class protocol(basic.LineReceiver):
+        context = None
+        output = []
+        def connectionMade(self):
+            self.factory.input = []
+            self.output = self.output[:]
+            map(self.sendLine, self.output.pop(0))
+        def lineReceived(self, line):
+            self.factory.input.append(line)
+            map(self.sendLine, self.output.pop(0))
+            if line == 'STLS':
+                self.transport.startTLS(self.context)
+
+
+class POP3TLSTestCase(unittest.TestCase):
+    """
+    Tests for POP3Client's support for TLS connections.
+    """
+
+    def test_startTLS(self):
+        """
+        POP3Client.startTLS starts a TLS session over its existing TCP
+        connection.
+        """
+        sf = TLSServerFactory()
+        sf.protocol.output = [
+            ['+OK'], # Server greeting
+            ['+OK', 'STLS', '.'], # CAPA response
+            ['+OK'], # STLS response
+            ['+OK', '.'], # Second CAPA response
+            ['+OK'] # QUIT response
+            ]
+        sf.protocol.context = ServerTLSContext()
+        port = reactor.listenTCP(0, sf, interface='127.0.0.1')
+        self.addCleanup(port.stopListening)
+        H = port.getHost().host
+        P = port.getHost().port
+
+        connLostDeferred = defer.Deferred()
+        cp = SimpleClient(defer.Deferred(), ClientTLSContext())
+        def connectionLost(reason):
+            SimpleClient.connectionLost(cp, reason)
+            connLostDeferred.callback(None)
+        cp.connectionLost = connectionLost
+        cf = protocol.ClientFactory()
+        cf.protocol = lambda: cp
+
+        conn = reactor.connectTCP(H, P, cf)
+
+        def cbConnected(ignored):
+            log.msg("Connected to server; starting TLS")
+            return cp.startTLS()
+
+        def cbStartedTLS(ignored):
+            log.msg("Started TLS; disconnecting")
+            return cp.quit()
+
+        def cbDisconnected(ign):
+            log.msg("Disconnected; asserting correct input received")
+            self.assertEqual(
+                sf.input,
+                ['CAPA', 'STLS', 'CAPA', 'QUIT'])
+
+        def cleanup(result):
+            log.msg("Asserted correct input; disconnecting client and shutting down server")
+            conn.disconnect()
+            return connLostDeferred
+
+        cp.deferred.addCallback(cbConnected)
+        cp.deferred.addCallback(cbStartedTLS)
+        cp.deferred.addCallback(cbDisconnected)
+        cp.deferred.addBoth(cleanup)
+
+        return cp.deferred
+
+
+class POP3TimeoutTestCase(POP3HelperMixin, unittest.TestCase):
+    def testTimeout(self):
+        def login():
+            d = self.client.login('test', 'twisted')
+            d.addCallback(loggedIn)
+            d.addErrback(timedOut)
+            return d
+
+        def loggedIn(result):
+            self.fail("Successfully logged in!?  Impossible!")
+
+
+        def timedOut(failure):
+            failure.trap(error.TimeoutError)
+            self._cbStopClient(None)
+
+        def quit():
+            return self.client.quit()
+
+        self.client.timeout = 0.01
+
+        # Tell the server to not return a response to client.  This
+        # will trigger a timeout.
+        pop3testserver.TIMEOUT_RESPONSE = True
+
+        methods = [login, quit]
+        map(self.connected.addCallback, map(strip, methods))
+        self.connected.addCallback(self._cbStopClient)
+        self.connected.addErrback(self._ebGeneral)
+        return self.loopback()
+
+
+if ClientTLSContext is None:
+    for case in (POP3TLSTestCase,):
+        case.skip = "OpenSSL not present"
+elif interfaces.IReactorSSL(reactor, None) is None:
+    for case in (POP3TLSTestCase,):
+        case.skip = "Reactor doesn't support SSL"
+
diff --git a/ThirdParty/Twisted/twisted/mail/test/test_scripts.py b/ThirdParty/Twisted/twisted/mail/test/test_scripts.py
new file mode 100644
index 0000000..cc14061
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/test/test_scripts.py
@@ -0,0 +1,18 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for the command-line mailer tool provided by Twisted Mail.
+"""
+
+from twisted.trial.unittest import TestCase
+from twisted.scripts.test.test_scripts import ScriptTestsMixin
+
+
+
+class ScriptTests(TestCase, ScriptTestsMixin):
+    """
+    Tests for all one of mail's scripts.
+    """
+    def test_mailmail(self):
+        self.scriptTest("mail/mailmail")
diff --git a/ThirdParty/Twisted/twisted/mail/test/test_smtp.py b/ThirdParty/Twisted/twisted/mail/test/test_smtp.py
new file mode 100644
index 0000000..058bb8e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/test/test_smtp.py
@@ -0,0 +1,1520 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test cases for twisted.mail.smtp module.
+"""
+
+from zope.interface import implements
+
+from twisted.python.util import LineLog
+from twisted.trial import unittest, util
+from twisted.protocols import basic, loopback
+from twisted.mail import smtp
+from twisted.internet import defer, protocol, reactor, interfaces
+from twisted.internet import address, error, task
+from twisted.test.proto_helpers import StringTransport
+
+from twisted import cred
+import twisted.cred.error
+import twisted.cred.portal
+import twisted.cred.checkers
+import twisted.cred.credentials
+
+from twisted.cred.portal import IRealm, Portal
+from twisted.cred.checkers import ICredentialsChecker, AllowAnonymousAccess
+from twisted.cred.credentials import IAnonymous
+from twisted.cred.error import UnauthorizedLogin
+
+from twisted.mail import imap4
+
+
+try:
+    from twisted.test.ssl_helpers import ClientTLSContext, ServerTLSContext
+except ImportError:
+    ClientTLSContext = ServerTLSContext = None
+
+import re
+
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+
+def spameater(*spam, **eggs):
+    return None
+
+
+
+class BrokenMessage(object):
+    """
+    L{BrokenMessage} is an L{IMessage} which raises an unexpected exception
+    from its C{eomReceived} method.  This is useful for creating a server which
+    can be used to test client retry behavior.
+    """
+    implements(smtp.IMessage)
+
+    def __init__(self, user):
+        pass
+
+
+    def lineReceived(self, line):
+        pass
+
+
+    def eomReceived(self):
+        raise RuntimeError("Some problem, delivery is failing.")
+
+
+    def connectionLost(self):
+        pass
+
+
+
+class DummyMessage(object):
+    """
+    L{BrokenMessage} is an L{IMessage} which saves the message delivered to it
+    to its domain object.
+
+    @ivar domain: A L{DummyDomain} which will be used to store the message once
+        it is received.
+    """
+    def __init__(self, domain, user):
+        self.domain = domain
+        self.user = user
+        self.buffer = []
+
+
+    def lineReceived(self, line):
+        # Throw away the generated Received: header
+        if not re.match('Received: From yyy.com \(\[.*\]\) by localhost;', line):
+            self.buffer.append(line)
+
+
+    def eomReceived(self):
+        message = '\n'.join(self.buffer) + '\n'
+        self.domain.messages[self.user.dest.local].append(message)
+        deferred = defer.Deferred()
+        deferred.callback("saved")
+        return deferred
+
+
+
+class DummyDomain(object):
+    """
+    L{DummyDomain} is an L{IDomain} which keeps track of messages delivered to
+    it in memory.
+    """
+    def __init__(self, names):
+        self.messages = {}
+        for name in names:
+            self.messages[name] = []
+
+
+    def exists(self, user):
+        if user.dest.local in self.messages:
+            return defer.succeed(lambda: self.startMessage(user))
+        return defer.fail(smtp.SMTPBadRcpt(user))
+
+
+    def startMessage(self, user):
+        return DummyMessage(self, user)
+
+
+
+class SMTPTestCase(unittest.TestCase):
+
+    messages = [('foo at bar.com', ['foo at baz.com', 'qux at baz.com'], '''\
+Subject: urgent\015
+\015
+Someone set up us the bomb!\015
+''')]
+
+    mbox = {'foo': ['Subject: urgent\n\nSomeone set up us the bomb!\n']}
+
+    def setUp(self):
+        """
+        Create an in-memory mail domain to which messages may be delivered by
+        tests and create a factory and transport to do the delivering.
+        """
+        self.factory = smtp.SMTPFactory()
+        self.factory.domains = {}
+        self.factory.domains['baz.com'] = DummyDomain(['foo'])
+        self.transport = StringTransport()
+
+
+    def testMessages(self):
+        from twisted.mail import protocols
+        protocol =  protocols.DomainSMTP()
+        protocol.service = self.factory
+        protocol.factory = self.factory
+        protocol.receivedHeader = spameater
+        protocol.makeConnection(self.transport)
+        protocol.lineReceived('HELO yyy.com')
+        for message in self.messages:
+            protocol.lineReceived('MAIL FROM:<%s>' % message[0])
+            for target in message[1]:
+                protocol.lineReceived('RCPT TO:<%s>' % target)
+            protocol.lineReceived('DATA')
+            protocol.dataReceived(message[2])
+            protocol.lineReceived('.')
+        protocol.lineReceived('QUIT')
+        if self.mbox != self.factory.domains['baz.com'].messages:
+            raise AssertionError(self.factory.domains['baz.com'].messages)
+        protocol.setTimeout(None)
+
+    testMessages.suppress = [util.suppress(message='DomainSMTP', category=DeprecationWarning)]
+
+mail = '''\
+Subject: hello
+
+Goodbye
+'''
+
+class MyClient:
+    def __init__(self, messageInfo=None):
+        if messageInfo is None:
+            messageInfo = (
+                'moshez at foo.bar', ['moshez at foo.bar'], StringIO(mail))
+        self._sender = messageInfo[0]
+        self._recipient = messageInfo[1]
+        self._data = messageInfo[2]
+
+
+    def getMailFrom(self):
+        return self._sender
+
+
+    def getMailTo(self):
+        return self._recipient
+
+
+    def getMailData(self):
+        return self._data
+
+
+    def sendError(self, exc):
+        self._error = exc
+
+
+    def sentMail(self, code, resp, numOk, addresses, log):
+        # Prevent another mail from being sent.
+        self._sender = None
+        self._recipient = None
+        self._data = None
+
+
+
+class MySMTPClient(MyClient, smtp.SMTPClient):
+    def __init__(self, messageInfo=None):
+        smtp.SMTPClient.__init__(self, 'foo.baz')
+        MyClient.__init__(self, messageInfo)
+
+class MyESMTPClient(MyClient, smtp.ESMTPClient):
+    def __init__(self, secret = '', contextFactory = None):
+        smtp.ESMTPClient.__init__(self, secret, contextFactory, 'foo.baz')
+        MyClient.__init__(self)
+
+class LoopbackMixin:
+    def loopback(self, server, client):
+        return loopback.loopbackTCP(server, client)
+
+class LoopbackTestCase(LoopbackMixin):
+    def testMessages(self):
+        factory = smtp.SMTPFactory()
+        factory.domains = {}
+        factory.domains['foo.bar'] = DummyDomain(['moshez'])
+        from twisted.mail.protocols import DomainSMTP
+        protocol =  DomainSMTP()
+        protocol.service = factory
+        protocol.factory = factory
+        clientProtocol = self.clientClass()
+        return self.loopback(protocol, clientProtocol)
+    testMessages.suppress = [util.suppress(message='DomainSMTP', category=DeprecationWarning)]
+
+class LoopbackSMTPTestCase(LoopbackTestCase, unittest.TestCase):
+    clientClass = MySMTPClient
+
+class LoopbackESMTPTestCase(LoopbackTestCase, unittest.TestCase):
+    clientClass = MyESMTPClient
+
+
+class FakeSMTPServer(basic.LineReceiver):
+
+    clientData = [
+        '220 hello', '250 nice to meet you',
+        '250 great', '250 great', '354 go on, lad'
+    ]
+
+    def connectionMade(self):
+        self.buffer = []
+        self.clientData = self.clientData[:]
+        self.clientData.reverse()
+        self.sendLine(self.clientData.pop())
+
+    def lineReceived(self, line):
+        self.buffer.append(line)
+        if line == "QUIT":
+            self.transport.write("221 see ya around\r\n")
+            self.transport.loseConnection()
+        elif line == ".":
+            self.transport.write("250 gotcha\r\n")
+        elif line == "RSET":
+            self.transport.loseConnection()
+
+        if self.clientData:
+            self.sendLine(self.clientData.pop())
+
+
+class SMTPClientTestCase(unittest.TestCase, LoopbackMixin):
+    """
+    Tests for L{smtp.SMTPClient}.
+    """
+
+    def test_timeoutConnection(self):
+        """
+        L{smtp.SMTPClient.timeoutConnection} calls the C{sendError} hook with a
+        fatal L{SMTPTimeoutError} with the current line log.
+        """
+        error = []
+        client = MySMTPClient()
+        client.sendError = error.append
+        client.makeConnection(StringTransport())
+        client.lineReceived("220 hello")
+        client.timeoutConnection()
+        self.assertIsInstance(error[0], smtp.SMTPTimeoutError)
+        self.assertTrue(error[0].isFatal)
+        self.assertEqual(
+            str(error[0]),
+            "Timeout waiting for SMTP server response\n"
+            "<<< 220 hello\n"
+            ">>> HELO foo.baz\n")
+
+
+    expected_output = [
+        'HELO foo.baz', 'MAIL FROM:<moshez at foo.bar>',
+        'RCPT TO:<moshez at foo.bar>', 'DATA',
+        'Subject: hello', '', 'Goodbye', '.', 'RSET'
+    ]
+
+    def test_messages(self):
+        """
+        L{smtp.SMTPClient} sends I{HELO}, I{MAIL FROM}, I{RCPT TO}, and I{DATA}
+        commands based on the return values of its C{getMailFrom},
+        C{getMailTo}, and C{getMailData} methods.
+        """
+        client = MySMTPClient()
+        server = FakeSMTPServer()
+        d = self.loopback(server, client)
+        d.addCallback(lambda x :
+                      self.assertEqual(server.buffer, self.expected_output))
+        return d
+
+
+    def test_transferError(self):
+        """
+        If there is an error while producing the message body to the
+        connection, the C{sendError} callback is invoked.
+        """
+        client = MySMTPClient(
+            ('alice at example.com', ['bob at example.com'], StringIO("foo")))
+        transport = StringTransport()
+        client.makeConnection(transport)
+        client.dataReceived(
+            '220 Ok\r\n' # Greeting
+            '250 Ok\r\n' # EHLO response
+            '250 Ok\r\n' # MAIL FROM response
+            '250 Ok\r\n' # RCPT TO response
+            '354 Ok\r\n' # DATA response
+            )
+
+        # Sanity check - a pull producer should be registered now.
+        self.assertNotIdentical(transport.producer, None)
+        self.assertFalse(transport.streaming)
+
+        # Now stop the producer prematurely, meaning the message was not sent.
+        transport.producer.stopProducing()
+
+        # The sendError hook should have been invoked as a result.
+        self.assertIsInstance(client._error, Exception)
+
+
+    def test_sendFatalError(self):
+        """
+        If L{smtp.SMTPClient.sendError} is called with an L{SMTPClientError}
+        which is fatal, it disconnects its transport without writing anything
+        more to it.
+        """
+        client = smtp.SMTPClient(None)
+        transport = StringTransport()
+        client.makeConnection(transport)
+        client.sendError(smtp.SMTPClientError(123, "foo", isFatal=True))
+        self.assertEqual(transport.value(), "")
+        self.assertTrue(transport.disconnecting)
+
+
+    def test_sendNonFatalError(self):
+        """
+        If L{smtp.SMTPClient.sendError} is called with an L{SMTPClientError}
+        which is not fatal, it sends C{"QUIT"} and waits for the server to
+        close the connection.
+        """
+        client = smtp.SMTPClient(None)
+        transport = StringTransport()
+        client.makeConnection(transport)
+        client.sendError(smtp.SMTPClientError(123, "foo", isFatal=False))
+        self.assertEqual(transport.value(), "QUIT\r\n")
+        self.assertFalse(transport.disconnecting)
+
+
+    def test_sendOtherError(self):
+        """
+        If L{smtp.SMTPClient.sendError} is called with an exception which is
+        not an L{SMTPClientError}, it disconnects its transport without
+        writing anything more to it.
+        """
+        client = smtp.SMTPClient(None)
+        transport = StringTransport()
+        client.makeConnection(transport)
+        client.sendError(Exception("foo"))
+        self.assertEqual(transport.value(), "")
+        self.assertTrue(transport.disconnecting)
+
+
+
+class DummySMTPMessage:
+
+    def __init__(self, protocol, users):
+        self.protocol = protocol
+        self.users = users
+        self.buffer = []
+
+    def lineReceived(self, line):
+        self.buffer.append(line)
+
+    def eomReceived(self):
+        message = '\n'.join(self.buffer) + '\n'
+        helo, origin = self.users[0].helo[0], str(self.users[0].orig)
+        recipients = []
+        for user in self.users:
+            recipients.append(str(user))
+        self.protocol.message[tuple(recipients)] = (helo, origin, recipients, message)
+        return defer.succeed("saved")
+
+
+
+class DummyProto:
+    def connectionMade(self):
+        self.dummyMixinBase.connectionMade(self)
+        self.message = {}
+
+    def startMessage(self, users):
+        return DummySMTPMessage(self, users)
+
+    def receivedHeader(*spam):
+        return None
+
+    def validateTo(self, user):
+        self.delivery = SimpleDelivery(None)
+        return lambda: self.startMessage([user])
+
+    def validateFrom(self, helo, origin):
+        return origin
+
+
+
+class DummySMTP(DummyProto, smtp.SMTP):
+    dummyMixinBase = smtp.SMTP
+
+class DummyESMTP(DummyProto, smtp.ESMTP):
+    dummyMixinBase = smtp.ESMTP
+
+class AnotherTestCase:
+    serverClass = None
+    clientClass = None
+
+    messages = [ ('foo.com', 'moshez at foo.com', ['moshez at bar.com'],
+                  'moshez at foo.com', ['moshez at bar.com'], '''\
+From: Moshe
+To: Moshe
+
+Hi,
+how are you?
+'''),
+                 ('foo.com', 'tttt at rrr.com', ['uuu at ooo', 'yyy at eee'],
+                  'tttt at rrr.com', ['uuu at ooo', 'yyy at eee'], '''\
+Subject: pass
+
+..rrrr..
+'''),
+                 ('foo.com', '@this, at is, at ignored:foo at bar.com',
+                  ['@ignore, at this, at too:bar at foo.com'],
+                  'foo at bar.com', ['bar at foo.com'], '''\
+Subject: apa
+To: foo
+
+123
+.
+456
+'''),
+              ]
+
+    data = [
+        ('', '220.*\r\n$', None, None),
+        ('HELO foo.com\r\n', '250.*\r\n$', None, None),
+        ('RSET\r\n', '250.*\r\n$', None, None),
+        ]
+    for helo_, from_, to_, realfrom, realto, msg in messages:
+        data.append(('MAIL FROM:<%s>\r\n' % from_, '250.*\r\n',
+                     None, None))
+        for rcpt in to_:
+            data.append(('RCPT TO:<%s>\r\n' % rcpt, '250.*\r\n',
+                         None, None))
+
+        data.append(('DATA\r\n','354.*\r\n',
+                     msg, ('250.*\r\n',
+                           (helo_, realfrom, realto, msg))))
+
+
+    def test_buffer(self):
+        """
+        Exercise a lot of the SMTP client code.  This is a "shotgun" style unit
+        test.  It does a lot of things and hopes that something will go really
+        wrong if it is going to go wrong.  This test should be replaced with a
+        suite of nicer tests.
+        """
+        transport = StringTransport()
+        a = self.serverClass()
+        class fooFactory:
+            domain = 'foo.com'
+
+        a.factory = fooFactory()
+        a.makeConnection(transport)
+        for (send, expect, msg, msgexpect) in self.data:
+            if send:
+                a.dataReceived(send)
+            data = transport.value()
+            transport.clear()
+            if not re.match(expect, data):
+                raise AssertionError, (send, expect, data)
+            if data[:3] == '354':
+                for line in msg.splitlines():
+                    if line and line[0] == '.':
+                        line = '.' + line
+                    a.dataReceived(line + '\r\n')
+                a.dataReceived('.\r\n')
+                # Special case for DATA. Now we want a 250, and then
+                # we compare the messages
+                data = transport.value()
+                transport.clear()
+                resp, msgdata = msgexpect
+                if not re.match(resp, data):
+                    raise AssertionError, (resp, data)
+                for recip in msgdata[2]:
+                    expected = list(msgdata[:])
+                    expected[2] = [recip]
+                    self.assertEqual(
+                        a.message[(recip,)],
+                        tuple(expected)
+                    )
+        a.setTimeout(None)
+
+
+class AnotherESMTPTestCase(AnotherTestCase, unittest.TestCase):
+    serverClass = DummyESMTP
+    clientClass = MyESMTPClient
+
+class AnotherSMTPTestCase(AnotherTestCase, unittest.TestCase):
+    serverClass = DummySMTP
+    clientClass = MySMTPClient
+
+
+
+class DummyChecker:
+    implements(cred.checkers.ICredentialsChecker)
+
+    users = {
+        'testuser': 'testpassword'
+    }
+
+    credentialInterfaces = (cred.credentials.IUsernamePassword,
+                            cred.credentials.IUsernameHashedPassword)
+
+    def requestAvatarId(self, credentials):
+        return defer.maybeDeferred(
+            credentials.checkPassword, self.users[credentials.username]
+        ).addCallback(self._cbCheck, credentials.username)
+
+    def _cbCheck(self, result, username):
+        if result:
+            return username
+        raise cred.error.UnauthorizedLogin()
+
+
+
+class SimpleDelivery(object):
+    """
+    L{SimpleDelivery} is a message delivery factory with no interesting
+    behavior.
+    """
+    implements(smtp.IMessageDelivery)
+
+    def __init__(self, messageFactory):
+        self._messageFactory = messageFactory
+
+
+    def receivedHeader(self, helo, origin, recipients):
+        return None
+
+
+    def validateFrom(self, helo, origin):
+        return origin
+
+
+    def validateTo(self, user):
+        return lambda: self._messageFactory(user)
+
+
+
+class DummyRealm:
+    def requestAvatar(self, avatarId, mind, *interfaces):
+        return smtp.IMessageDelivery, SimpleDelivery(None), lambda: None
+
+
+
+class AuthTestCase(unittest.TestCase, LoopbackMixin):
+    def test_crammd5Auth(self):
+        """
+        L{ESMTPClient} can authenticate using the I{CRAM-MD5} SASL mechanism.
+
+        @see: U{http://tools.ietf.org/html/rfc2195}
+        """
+        realm = DummyRealm()
+        p = cred.portal.Portal(realm)
+        p.registerChecker(DummyChecker())
+
+        server = DummyESMTP({'CRAM-MD5': cred.credentials.CramMD5Credentials})
+        server.portal = p
+        client = MyESMTPClient('testpassword')
+
+        cAuth = smtp.CramMD5ClientAuthenticator('testuser')
+        client.registerAuthenticator(cAuth)
+
+        d = self.loopback(server, client)
+        d.addCallback(lambda x : self.assertEqual(server.authenticated, 1))
+        return d
+
+
+    def test_loginAuth(self):
+        """
+        L{ESMTPClient} can authenticate using the I{LOGIN} SASL mechanism.
+
+        @see: U{http://sepp.oetiker.ch/sasl-2.1.19-ds/draft-murchison-sasl-login-00.txt}
+        """
+        realm = DummyRealm()
+        p = cred.portal.Portal(realm)
+        p.registerChecker(DummyChecker())
+
+        server = DummyESMTP({'LOGIN': imap4.LOGINCredentials})
+        server.portal = p
+        client = MyESMTPClient('testpassword')
+
+        cAuth = smtp.LOGINAuthenticator('testuser')
+        client.registerAuthenticator(cAuth)
+
+        d = self.loopback(server, client)
+        d.addCallback(lambda x: self.assertTrue(server.authenticated))
+        return d
+
+
+    def test_loginAgainstWeirdServer(self):
+        """
+        When communicating with a server which implements the I{LOGIN} SASL
+        mechanism using C{"Username:"} as the challenge (rather than C{"User
+        Name\\0"}), L{ESMTPClient} can still authenticate successfully using
+        the I{LOGIN} mechanism.
+        """
+        realm = DummyRealm()
+        p = cred.portal.Portal(realm)
+        p.registerChecker(DummyChecker())
+
+        server = DummyESMTP({'LOGIN': smtp.LOGINCredentials})
+        server.portal = p
+
+        client = MyESMTPClient('testpassword')
+        cAuth = smtp.LOGINAuthenticator('testuser')
+        client.registerAuthenticator(cAuth)
+
+        d = self.loopback(server, client)
+        d.addCallback(lambda x: self.assertTrue(server.authenticated))
+        return d
+
+
+
+class SMTPHelperTestCase(unittest.TestCase):
+    def testMessageID(self):
+        d = {}
+        for i in range(1000):
+            m = smtp.messageid('testcase')
+            self.failIf(m in d)
+            d[m] = None
+
+    def testQuoteAddr(self):
+        cases = [
+            ['user at host.name', '<user at host.name>'],
+            ['"User Name" <user at host.name>', '<user at host.name>'],
+            [smtp.Address('someguy at someplace'), '<someguy at someplace>'],
+            ['', '<>'],
+            [smtp.Address(''), '<>'],
+        ]
+
+        for (c, e) in cases:
+            self.assertEqual(smtp.quoteaddr(c), e)
+
+    def testUser(self):
+        u = smtp.User('user at host', 'helo.host.name', None, None)
+        self.assertEqual(str(u), 'user at host')
+
+    def testXtextEncoding(self):
+        cases = [
+            ('Hello world', 'Hello+20world'),
+            ('Hello+world', 'Hello+2Bworld'),
+            ('\0\1\2\3\4\5', '+00+01+02+03+04+05'),
+            ('e=mc2 at example.com', 'e+3Dmc2 at example.com')
+        ]
+
+        for (case, expected) in cases:
+            self.assertEqual(smtp.xtext_encode(case), (expected, len(case)))
+            self.assertEqual(case.encode('xtext'), expected)
+            self.assertEqual(
+                smtp.xtext_decode(expected), (case, len(expected)))
+            self.assertEqual(expected.decode('xtext'), case)
+
+
+    def test_encodeWithErrors(self):
+        """
+        Specifying an error policy to C{unicode.encode} with the
+        I{xtext} codec should produce the same result as not
+        specifying the error policy.
+        """
+        text = u'Hello world'
+        self.assertEqual(
+            smtp.xtext_encode(text, 'strict'),
+            (text.encode('xtext'), len(text)))
+        self.assertEqual(
+            text.encode('xtext', 'strict'),
+            text.encode('xtext'))
+
+
+    def test_decodeWithErrors(self):
+        """
+        Similar to L{test_encodeWithErrors}, but for C{str.decode}.
+        """
+        bytes = 'Hello world'
+        self.assertEqual(
+            smtp.xtext_decode(bytes, 'strict'),
+            (bytes.decode('xtext'), len(bytes)))
+        self.assertEqual(
+            bytes.decode('xtext', 'strict'),
+            bytes.decode('xtext'))
+
+
+
+class NoticeTLSClient(MyESMTPClient):
+    tls = False
+
+    def esmtpState_starttls(self, code, resp):
+        MyESMTPClient.esmtpState_starttls(self, code, resp)
+        self.tls = True
+
+class TLSTestCase(unittest.TestCase, LoopbackMixin):
+    def testTLS(self):
+        clientCTX = ClientTLSContext()
+        serverCTX = ServerTLSContext()
+
+        client = NoticeTLSClient(contextFactory=clientCTX)
+        server = DummyESMTP(contextFactory=serverCTX)
+
+        def check(ignored):
+            self.assertEqual(client.tls, True)
+            self.assertEqual(server.startedTLS, True)
+
+        return self.loopback(server, client).addCallback(check)
+
+if ClientTLSContext is None:
+    for case in (TLSTestCase,):
+        case.skip = "OpenSSL not present"
+
+if not interfaces.IReactorSSL.providedBy(reactor):
+    for case in (TLSTestCase,):
+        case.skip = "Reactor doesn't support SSL"
+
+class EmptyLineTestCase(unittest.TestCase):
+    def test_emptyLineSyntaxError(self):
+        """
+        If L{smtp.SMTP} receives an empty line, it responds with a 500 error
+        response code and a message about a syntax error.
+        """
+        proto = smtp.SMTP()
+        transport = StringTransport()
+        proto.makeConnection(transport)
+        proto.lineReceived('')
+        proto.setTimeout(None)
+
+        out = transport.value().splitlines()
+        self.assertEqual(len(out), 2)
+        self.failUnless(out[0].startswith('220'))
+        self.assertEqual(out[1], "500 Error: bad syntax")
+
+
+
+class TimeoutTestCase(unittest.TestCase, LoopbackMixin):
+    """
+    Check that SMTP client factories correctly use the timeout.
+    """
+
+    def _timeoutTest(self, onDone, clientFactory):
+        """
+        Connect the clientFactory, and check the timeout on the request.
+        """
+        clock = task.Clock()
+        client = clientFactory.buildProtocol(
+            address.IPv4Address('TCP', 'example.net', 25))
+        client.callLater = clock.callLater
+        t = StringTransport()
+        client.makeConnection(t)
+        t.protocol = client
+        def check(ign):
+            self.assertEqual(clock.seconds(), 0.5)
+        d = self.assertFailure(onDone, smtp.SMTPTimeoutError
+            ).addCallback(check)
+        # The first call should not trigger the timeout
+        clock.advance(0.1)
+        # But this one should
+        clock.advance(0.4)
+        return d
+
+
+    def test_SMTPClient(self):
+        """
+        Test timeout for L{smtp.SMTPSenderFactory}: the response L{Deferred}
+        should be errback with a L{smtp.SMTPTimeoutError}.
+        """
+        onDone = defer.Deferred()
+        clientFactory = smtp.SMTPSenderFactory(
+            'source at address', 'recipient at address',
+            StringIO("Message body"), onDone,
+            retries=0, timeout=0.5)
+        return self._timeoutTest(onDone, clientFactory)
+
+
+    def test_ESMTPClient(self):
+        """
+        Test timeout for L{smtp.ESMTPSenderFactory}: the response L{Deferred}
+        should be errback with a L{smtp.SMTPTimeoutError}.
+        """
+        onDone = defer.Deferred()
+        clientFactory = smtp.ESMTPSenderFactory(
+            'username', 'password',
+            'source at address', 'recipient at address',
+            StringIO("Message body"), onDone,
+            retries=0, timeout=0.5)
+        return self._timeoutTest(onDone, clientFactory)
+
+
+    def test_resetTimeoutWhileSending(self):
+        """
+        The timeout is not allowed to expire after the server has accepted a
+        DATA command and the client is actively sending data to it.
+        """
+        class SlowFile:
+            """
+            A file-like which returns one byte from each read call until the
+            specified number of bytes have been returned.
+            """
+            def __init__(self, size):
+                self._size = size
+
+            def read(self, max=None):
+                if self._size:
+                    self._size -= 1
+                    return 'x'
+                return ''
+
+        failed = []
+        onDone = defer.Deferred()
+        onDone.addErrback(failed.append)
+        clientFactory = smtp.SMTPSenderFactory(
+            'source at address', 'recipient at address',
+            SlowFile(1), onDone, retries=0, timeout=3)
+        clientFactory.domain = "example.org"
+        clock = task.Clock()
+        client = clientFactory.buildProtocol(
+            address.IPv4Address('TCP', 'example.net', 25))
+        client.callLater = clock.callLater
+        transport = StringTransport()
+        client.makeConnection(transport)
+
+        client.dataReceived(
+            "220 Ok\r\n" # Greet the client
+            "250 Ok\r\n" # Respond to HELO
+            "250 Ok\r\n" # Respond to MAIL FROM
+            "250 Ok\r\n" # Respond to RCPT TO
+            "354 Ok\r\n" # Respond to DATA
+            )
+
+        # Now the client is producing data to the server.  Any time
+        # resumeProducing is called on the producer, the timeout should be
+        # extended.  First, a sanity check.  This test is only written to
+        # handle pull producers.
+        self.assertNotIdentical(transport.producer, None)
+        self.assertFalse(transport.streaming)
+
+        # Now, allow 2 seconds (1 less than the timeout of 3 seconds) to
+        # elapse.
+        clock.advance(2)
+
+        # The timeout has not expired, so the failure should not have happened.
+        self.assertEqual(failed, [])
+
+        # Let some bytes be produced, extending the timeout.  Then advance the
+        # clock some more and verify that the timeout still hasn't happened.
+        transport.producer.resumeProducing()
+        clock.advance(2)
+        self.assertEqual(failed, [])
+
+        # The file has been completely produced - the next resume producing
+        # finishes the upload, successfully.
+        transport.producer.resumeProducing()
+        client.dataReceived("250 Ok\r\n")
+        self.assertEqual(failed, [])
+
+        # Verify that the client actually did send the things expected.
+        self.assertEqual(
+            transport.value(),
+            "HELO example.org\r\n"
+            "MAIL FROM:<source at address>\r\n"
+            "RCPT TO:<recipient at address>\r\n"
+            "DATA\r\n"
+            "x\r\n"
+            ".\r\n"
+            # This RSET is just an implementation detail.  It's nice, but this
+            # test doesn't really care about it.
+            "RSET\r\n")
+
+
+
+class MultipleDeliveryFactorySMTPServerFactory(protocol.ServerFactory):
+    """
+    L{MultipleDeliveryFactorySMTPServerFactory} creates SMTP server protocol
+    instances with message delivery factory objects supplied to it.  Each
+    factory is used for one connection and then discarded.  Factories are used
+    in the order they are supplied.
+    """
+    def __init__(self, messageFactories):
+        self._messageFactories = messageFactories
+
+
+    def buildProtocol(self, addr):
+        p = protocol.ServerFactory.buildProtocol(self, addr)
+        p.delivery = SimpleDelivery(self._messageFactories.pop(0))
+        return p
+
+
+
+class SMTPSenderFactoryRetryTestCase(unittest.TestCase):
+    """
+    Tests for the retry behavior of L{smtp.SMTPSenderFactory}.
+    """
+    def test_retryAfterDisconnect(self):
+        """
+        If the protocol created by L{SMTPSenderFactory} loses its connection
+        before receiving confirmation of message delivery, it reconnects and
+        tries to deliver the message again.
+        """
+        recipient = 'alice'
+        message = "some message text"
+        domain = DummyDomain([recipient])
+
+        class CleanSMTP(smtp.SMTP):
+            """
+            An SMTP subclass which ensures that its transport will be
+            disconnected before the test ends.
+            """
+            def makeConnection(innerSelf, transport):
+                self.addCleanup(transport.loseConnection)
+                smtp.SMTP.makeConnection(innerSelf, transport)
+
+        # Create a server which will fail the first message deliver attempt to
+        # it with a 500 and a disconnect, but which will accept a message
+        # delivered over the 2nd connection to it.
+        serverFactory = MultipleDeliveryFactorySMTPServerFactory([
+                BrokenMessage,
+                lambda user: DummyMessage(domain, user)])
+        serverFactory.protocol = CleanSMTP
+        serverPort = reactor.listenTCP(0, serverFactory, interface='127.0.0.1')
+        serverHost = serverPort.getHost()
+        self.addCleanup(serverPort.stopListening)
+
+        # Set up a client to try to deliver a message to the above created
+        # server.
+        sentDeferred = defer.Deferred()
+        clientFactory = smtp.SMTPSenderFactory(
+            "bob at example.org", recipient + "@example.com",
+            StringIO(message), sentDeferred)
+        clientFactory.domain = "example.org"
+        clientConnector = reactor.connectTCP(
+            serverHost.host, serverHost.port, clientFactory)
+        self.addCleanup(clientConnector.disconnect)
+
+        def cbSent(ignored):
+            """
+            Verify that the message was successfully delivered and flush the
+            error which caused the first attempt to fail.
+            """
+            self.assertEqual(
+                domain.messages,
+                {recipient: ["\n%s\n" % (message,)]})
+            # Flush the RuntimeError that BrokenMessage caused to be logged.
+            self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1)
+        sentDeferred.addCallback(cbSent)
+        return sentDeferred
+
+
+
+class SingletonRealm(object):
+    """
+    Trivial realm implementation which is constructed with an interface and an
+    avatar and returns that avatar when asked for that interface.
+    """
+    implements(IRealm)
+
+    def __init__(self, interface, avatar):
+        self.interface = interface
+        self.avatar = avatar
+
+
+    def requestAvatar(self, avatarId, mind, *interfaces):
+        for iface in interfaces:
+            if iface is self.interface:
+                return iface, self.avatar, lambda: None
+
+
+
+class NotImplementedDelivery(object):
+    """
+    Non-implementation of L{smtp.IMessageDelivery} which only has methods which
+    raise L{NotImplementedError}.  Subclassed by various tests to provide the
+    particular behavior being tested.
+    """
+    def validateFrom(self, helo, origin):
+        raise NotImplementedError("This oughtn't be called in the course of this test.")
+
+
+    def validateTo(self, user):
+        raise NotImplementedError("This oughtn't be called in the course of this test.")
+
+
+    def receivedHeader(self, helo, origin, recipients):
+        raise NotImplementedError("This oughtn't be called in the course of this test.")
+
+
+
+class SMTPServerTestCase(unittest.TestCase):
+    """
+    Test various behaviors of L{twisted.mail.smtp.SMTP} and
+    L{twisted.mail.smtp.ESMTP}.
+    """
+    def testSMTPGreetingHost(self, serverClass=smtp.SMTP):
+        """
+        Test that the specified hostname shows up in the SMTP server's
+        greeting.
+        """
+        s = serverClass()
+        s.host = "example.com"
+        t = StringTransport()
+        s.makeConnection(t)
+        s.connectionLost(error.ConnectionDone())
+        self.assertIn("example.com", t.value())
+
+
+    def testSMTPGreetingNotExtended(self):
+        """
+        Test that the string "ESMTP" does not appear in the SMTP server's
+        greeting since that string strongly suggests the presence of support
+        for various SMTP extensions which are not supported by L{smtp.SMTP}.
+        """
+        s = smtp.SMTP()
+        t = StringTransport()
+        s.makeConnection(t)
+        s.connectionLost(error.ConnectionDone())
+        self.assertNotIn("ESMTP", t.value())
+
+
+    def testESMTPGreetingHost(self):
+        """
+        Similar to testSMTPGreetingHost, but for the L{smtp.ESMTP} class.
+        """
+        self.testSMTPGreetingHost(smtp.ESMTP)
+
+
+    def testESMTPGreetingExtended(self):
+        """
+        Test that the string "ESMTP" does appear in the ESMTP server's
+        greeting since L{smtp.ESMTP} does support the SMTP extensions which
+        that advertises to the client.
+        """
+        s = smtp.ESMTP()
+        t = StringTransport()
+        s.makeConnection(t)
+        s.connectionLost(error.ConnectionDone())
+        self.assertIn("ESMTP", t.value())
+
+
+    def test_acceptSenderAddress(self):
+        """
+        Test that a C{MAIL FROM} command with an acceptable address is
+        responded to with the correct success code.
+        """
+        class AcceptanceDelivery(NotImplementedDelivery):
+            """
+            Delivery object which accepts all senders as valid.
+            """
+            def validateFrom(self, helo, origin):
+                return origin
+
+        realm = SingletonRealm(smtp.IMessageDelivery, AcceptanceDelivery())
+        portal = Portal(realm, [AllowAnonymousAccess()])
+        proto = smtp.SMTP()
+        proto.portal = portal
+        trans = StringTransport()
+        proto.makeConnection(trans)
+
+        # Deal with the necessary preliminaries
+        proto.dataReceived('HELO example.com\r\n')
+        trans.clear()
+
+        # Try to specify our sender address
+        proto.dataReceived('MAIL FROM:<alice at example.com>\r\n')
+
+        # Clean up the protocol before doing anything that might raise an
+        # exception.
+        proto.connectionLost(error.ConnectionLost())
+
+        # Make sure that we received exactly the correct response
+        self.assertEqual(
+            trans.value(),
+            '250 Sender address accepted\r\n')
+
+
+    def test_deliveryRejectedSenderAddress(self):
+        """
+        Test that a C{MAIL FROM} command with an address rejected by a
+        L{smtp.IMessageDelivery} instance is responded to with the correct
+        error code.
+        """
+        class RejectionDelivery(NotImplementedDelivery):
+            """
+            Delivery object which rejects all senders as invalid.
+            """
+            def validateFrom(self, helo, origin):
+                raise smtp.SMTPBadSender(origin)
+
+        realm = SingletonRealm(smtp.IMessageDelivery, RejectionDelivery())
+        portal = Portal(realm, [AllowAnonymousAccess()])
+        proto = smtp.SMTP()
+        proto.portal = portal
+        trans = StringTransport()
+        proto.makeConnection(trans)
+
+        # Deal with the necessary preliminaries
+        proto.dataReceived('HELO example.com\r\n')
+        trans.clear()
+
+        # Try to specify our sender address
+        proto.dataReceived('MAIL FROM:<alice at example.com>\r\n')
+
+        # Clean up the protocol before doing anything that might raise an
+        # exception.
+        proto.connectionLost(error.ConnectionLost())
+
+        # Make sure that we received exactly the correct response
+        self.assertEqual(
+            trans.value(),
+            '550 Cannot receive from specified address '
+            '<alice at example.com>: Sender not acceptable\r\n')
+
+
+    def test_portalRejectedSenderAddress(self):
+        """
+        Test that a C{MAIL FROM} command with an address rejected by an
+        L{smtp.SMTP} instance's portal is responded to with the correct error
+        code.
+        """
+        class DisallowAnonymousAccess(object):
+            """
+            Checker for L{IAnonymous} which rejects authentication attempts.
+            """
+            implements(ICredentialsChecker)
+
+            credentialInterfaces = (IAnonymous,)
+
+            def requestAvatarId(self, credentials):
+                return defer.fail(UnauthorizedLogin())
+
+        realm = SingletonRealm(smtp.IMessageDelivery, NotImplementedDelivery())
+        portal = Portal(realm, [DisallowAnonymousAccess()])
+        proto = smtp.SMTP()
+        proto.portal = portal
+        trans = StringTransport()
+        proto.makeConnection(trans)
+
+        # Deal with the necessary preliminaries
+        proto.dataReceived('HELO example.com\r\n')
+        trans.clear()
+
+        # Try to specify our sender address
+        proto.dataReceived('MAIL FROM:<alice at example.com>\r\n')
+
+        # Clean up the protocol before doing anything that might raise an
+        # exception.
+        proto.connectionLost(error.ConnectionLost())
+
+        # Make sure that we received exactly the correct response
+        self.assertEqual(
+            trans.value(),
+            '550 Cannot receive from specified address '
+            '<alice at example.com>: Sender not acceptable\r\n')
+
+
+    def test_portalRejectedAnonymousSender(self):
+        """
+        Test that a C{MAIL FROM} command issued without first authenticating
+        when a portal has been configured to disallow anonymous logins is
+        responded to with the correct error code.
+        """
+        realm = SingletonRealm(smtp.IMessageDelivery, NotImplementedDelivery())
+        portal = Portal(realm, [])
+        proto = smtp.SMTP()
+        proto.portal = portal
+        trans = StringTransport()
+        proto.makeConnection(trans)
+
+        # Deal with the necessary preliminaries
+        proto.dataReceived('HELO example.com\r\n')
+        trans.clear()
+
+        # Try to specify our sender address
+        proto.dataReceived('MAIL FROM:<alice at example.com>\r\n')
+
+        # Clean up the protocol before doing anything that might raise an
+        # exception.
+        proto.connectionLost(error.ConnectionLost())
+
+        # Make sure that we received exactly the correct response
+        self.assertEqual(
+            trans.value(),
+            '550 Cannot receive from specified address '
+            '<alice at example.com>: Unauthenticated senders not allowed\r\n')
+
+
+
+class ESMTPAuthenticationTestCase(unittest.TestCase):
+    def assertServerResponse(self, bytes, response):
+        """
+        Assert that when the given bytes are delivered to the ESMTP server
+        instance, it responds with the indicated lines.
+
+        @type bytes: str
+        @type response: list of str
+        """
+        self.transport.clear()
+        self.server.dataReceived(bytes)
+        self.assertEqual(
+            response,
+            self.transport.value().splitlines())
+
+
+    def assertServerAuthenticated(self, loginArgs, username="username", password="password"):
+        """
+        Assert that a login attempt has been made, that the credentials and
+        interfaces passed to it are correct, and that when the login request
+        is satisfied, a successful response is sent by the ESMTP server
+        instance.
+
+        @param loginArgs: A C{list} previously passed to L{portalFactory}.
+        """
+        d, credentials, mind, interfaces = loginArgs.pop()
+        self.assertEqual(loginArgs, [])
+        self.failUnless(twisted.cred.credentials.IUsernamePassword.providedBy(credentials))
+        self.assertEqual(credentials.username, username)
+        self.failUnless(credentials.checkPassword(password))
+        self.assertIn(smtp.IMessageDeliveryFactory, interfaces)
+        self.assertIn(smtp.IMessageDelivery, interfaces)
+        d.callback((smtp.IMessageDeliveryFactory, None, lambda: None))
+
+        self.assertEqual(
+            ["235 Authentication successful."],
+            self.transport.value().splitlines())
+
+
+    def setUp(self):
+        """
+        Create an ESMTP instance attached to a StringTransport.
+        """
+        self.server = smtp.ESMTP({
+                'LOGIN': imap4.LOGINCredentials})
+        self.server.host = 'localhost'
+        self.transport = StringTransport(
+            peerAddress=address.IPv4Address('TCP', '127.0.0.1', 12345))
+        self.server.makeConnection(self.transport)
+
+
+    def tearDown(self):
+        """
+        Disconnect the ESMTP instance to clean up its timeout DelayedCall.
+        """
+        self.server.connectionLost(error.ConnectionDone())
+
+
+    def portalFactory(self, loginList):
+        class DummyPortal:
+            def login(self, credentials, mind, *interfaces):
+                d = defer.Deferred()
+                loginList.append((d, credentials, mind, interfaces))
+                return d
+        return DummyPortal()
+
+
+    def test_authenticationCapabilityAdvertised(self):
+        """
+        Test that AUTH is advertised to clients which issue an EHLO command.
+        """
+        self.transport.clear()
+        self.server.dataReceived('EHLO\r\n')
+        responseLines = self.transport.value().splitlines()
+        self.assertEqual(
+            responseLines[0],
+            "250-localhost Hello 127.0.0.1, nice to meet you")
+        self.assertEqual(
+            responseLines[1],
+            "250 AUTH LOGIN")
+        self.assertEqual(len(responseLines), 2)
+
+
+    def test_plainAuthentication(self):
+        """
+        Test that the LOGIN authentication mechanism can be used
+        """
+        loginArgs = []
+        self.server.portal = self.portalFactory(loginArgs)
+
+        self.server.dataReceived('EHLO\r\n')
+        self.transport.clear()
+
+        self.assertServerResponse(
+            'AUTH LOGIN\r\n',
+            ["334 " + "User Name\0".encode('base64').strip()])
+
+        self.assertServerResponse(
+            'username'.encode('base64') + '\r\n',
+            ["334 " + "Password\0".encode('base64').strip()])
+
+        self.assertServerResponse(
+            'password'.encode('base64').strip() + '\r\n',
+            [])
+
+        self.assertServerAuthenticated(loginArgs)
+
+
+    def test_plainAuthenticationEmptyPassword(self):
+        """
+        Test that giving an empty password for plain auth succeeds.
+        """
+        loginArgs = []
+        self.server.portal = self.portalFactory(loginArgs)
+
+        self.server.dataReceived('EHLO\r\n')
+        self.transport.clear()
+
+        self.assertServerResponse(
+            'AUTH LOGIN\r\n',
+            ["334 " + "User Name\0".encode('base64').strip()])
+
+        self.assertServerResponse(
+            'username'.encode('base64') + '\r\n',
+            ["334 " + "Password\0".encode('base64').strip()])
+
+        self.assertServerResponse('\r\n', [])
+        self.assertServerAuthenticated(loginArgs, password='')
+
+
+    def test_plainAuthenticationInitialResponse(self):
+        """
+        The response to the first challenge may be included on the AUTH command
+        line.  Test that this is also supported.
+        """
+        loginArgs = []
+        self.server.portal = self.portalFactory(loginArgs)
+
+        self.server.dataReceived('EHLO\r\n')
+        self.transport.clear()
+
+        self.assertServerResponse(
+            'AUTH LOGIN ' + "username".encode('base64').strip() + '\r\n',
+            ["334 " + "Password\0".encode('base64').strip()])
+
+        self.assertServerResponse(
+            'password'.encode('base64').strip() + '\r\n',
+            [])
+
+        self.assertServerAuthenticated(loginArgs)
+
+
+    def test_abortAuthentication(self):
+        """
+        Test that a challenge/response sequence can be aborted by the client.
+        """
+        loginArgs = []
+        self.server.portal = self.portalFactory(loginArgs)
+
+        self.server.dataReceived('EHLO\r\n')
+        self.server.dataReceived('AUTH LOGIN\r\n')
+
+        self.assertServerResponse(
+            '*\r\n',
+            ['501 Authentication aborted'])
+
+
+    def test_invalidBase64EncodedResponse(self):
+        """
+        Test that a response which is not properly Base64 encoded results in
+        the appropriate error code.
+        """
+        loginArgs = []
+        self.server.portal = self.portalFactory(loginArgs)
+
+        self.server.dataReceived('EHLO\r\n')
+        self.server.dataReceived('AUTH LOGIN\r\n')
+
+        self.assertServerResponse(
+            'x\r\n',
+            ['501 Syntax error in parameters or arguments'])
+
+        self.assertEqual(loginArgs, [])
+
+
+    def test_invalidBase64EncodedInitialResponse(self):
+        """
+        Like L{test_invalidBase64EncodedResponse} but for the case of an
+        initial response included with the C{AUTH} command.
+        """
+        loginArgs = []
+        self.server.portal = self.portalFactory(loginArgs)
+
+        self.server.dataReceived('EHLO\r\n')
+        self.assertServerResponse(
+            'AUTH LOGIN x\r\n',
+            ['501 Syntax error in parameters or arguments'])
+
+        self.assertEqual(loginArgs, [])
+
+
+    def test_unexpectedLoginFailure(self):
+        """
+        If the L{Deferred} returned by L{Portal.login} fires with an
+        exception of any type other than L{UnauthorizedLogin}, the exception
+        is logged and the client is informed that the authentication attempt
+        has failed.
+        """
+        loginArgs = []
+        self.server.portal = self.portalFactory(loginArgs)
+
+        self.server.dataReceived('EHLO\r\n')
+        self.transport.clear()
+
+        self.assertServerResponse(
+            'AUTH LOGIN ' + 'username'.encode('base64').strip() + '\r\n',
+            ['334 ' + 'Password\0'.encode('base64').strip()])
+        self.assertServerResponse(
+            'password'.encode('base64').strip() + '\r\n',
+            [])
+
+        d, credentials, mind, interfaces = loginArgs.pop()
+        d.errback(RuntimeError("Something wrong with the server"))
+
+        self.assertEqual(
+            '451 Requested action aborted: local error in processing\r\n',
+            self.transport.value())
+
+        self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1)
+
+
+
+class SMTPClientErrorTestCase(unittest.TestCase):
+    """
+    Tests for L{smtp.SMTPClientError}.
+    """
+    def test_str(self):
+        """
+        The string representation of a L{SMTPClientError} instance includes
+        the response code and response string.
+        """
+        err = smtp.SMTPClientError(123, "some text")
+        self.assertEqual(str(err), "123 some text")
+
+
+    def test_strWithNegativeCode(self):
+        """
+        If the response code supplied to L{SMTPClientError} is negative, it
+        is excluded from the string representation.
+        """
+        err = smtp.SMTPClientError(-1, "foo bar")
+        self.assertEqual(str(err), "foo bar")
+
+
+    def test_strWithLog(self):
+        """
+        If a line log is supplied to L{SMTPClientError}, its contents are
+        included in the string representation of the exception instance.
+        """
+        log = LineLog(10)
+        log.append("testlog")
+        log.append("secondline")
+        err = smtp.SMTPClientError(100, "test error", log=log.str())
+        self.assertEqual(
+            str(err),
+            "100 test error\n"
+            "testlog\n"
+            "secondline\n")
+
+
+
+class SenderMixinSentMailTests(unittest.TestCase):
+    """
+    Tests for L{smtp.SenderMixin.sentMail}, used in particular by
+    L{smtp.SMTPSenderFactory} and L{smtp.ESMTPSenderFactory}.
+    """
+    def test_onlyLogFailedAddresses(self):
+        """
+        L{smtp.SenderMixin.sentMail} adds only the addresses with failing
+        SMTP response codes to the log passed to the factory's errback.
+        """
+        onDone = self.assertFailure(defer.Deferred(), smtp.SMTPDeliveryError)
+        onDone.addCallback(lambda e: self.assertEqual(
+                e.log, "bob at example.com: 199 Error in sending.\n"))
+
+        clientFactory = smtp.SMTPSenderFactory(
+            'source at address', 'recipient at address',
+            StringIO("Message body"), onDone,
+            retries=0, timeout=0.5)
+
+        client = clientFactory.buildProtocol(
+            address.IPv4Address('TCP', 'example.net', 25))
+
+        addresses = [("alice at example.com", 200, "No errors here!"),
+                     ("bob at example.com", 199, "Error in sending.")]
+        client.sentMail(199, "Test response", 1, addresses, client.log)
+
+        return onDone
diff --git a/ThirdParty/Twisted/twisted/mail/topfiles/NEWS b/ThirdParty/Twisted/twisted/mail/topfiles/NEWS
new file mode 100644
index 0000000..c9cbbdb
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/topfiles/NEWS
@@ -0,0 +1,328 @@
+Ticket numbers in this file can be looked up by visiting
+http://twistedmatrix.com/trac/ticket/<number>
+
+Twisted Mail 12.3.0 (2012-12-20)
+================================
+
+Bugfixes
+--------
+ - twisted.mail.imap4._FetchParser now raises
+   IllegalClientResponse("Invalid Argument") when protocol encounters
+   extra bytes at the end of a valid FETCH command. (#4000)
+
+Improved Documentation
+----------------------
+ - twisted.mail.tap now documents example usage in its longdesc
+   output for the 'mail' plugin (#5922)
+
+Other
+-----
+ - #3751
+
+
+Twisted Mail 12.2.0 (2012-08-26)
+================================
+
+Bugfixes
+--------
+ - twisted.mail.imap4.IMAP4Server will now generate an error response
+   when it receives an illegal SEARCH term from a client. (#4080)
+ - twisted.mail.imap4 now serves BODYSTRUCTURE responses which provide
+   more information and conform to the IMAP4 RFC more closely. (#5763)
+
+Deprecations and Removals
+-------------------------
+ - twisted.mail.protocols.SSLContextFactory is now deprecated. (#4963)
+ - The --passwordfile option to twistd mail is now removed. (#5541)
+
+Other
+-----
+ - #5697, #5750, #5751, #5783
+
+
+Twisted Mail 12.1.0 (2012-06-02)
+================================
+
+Bugfixes
+--------
+ - twistd mail --auth, broken in 11.0, now correctly connects
+   authentication to the portal being used (#5219)
+
+Other
+-----
+ - #5686
+
+
+Twisted Mail 12.0.0 (2012-02-10)
+================================
+
+No significant changes have been made for this release.
+
+
+Twisted Mail 11.1.0 (2011-11-15)
+================================
+
+Features
+--------
+ - twisted.mail.smtp.LOGINCredentials now generates challenges with
+   ":" instead of "\0" for interoperability with Microsoft Outlook.
+   (#4692)
+
+Bugfixes
+--------
+ - When run from an unpacked source tarball or a VCS checkout,
+   bin/mail/mailmail will now use the version of Twisted it is part
+   of. (#3526)
+
+Other
+-----
+ - #4796, #5006
+
+
+Twisted Mail 11.0.0 (2011-04-01)
+================================
+
+Features
+--------
+ - The `twistd mail` command line now accepts endpoint descriptions
+   for POP3 and SMTP servers. (#4739)
+ - The twistd mail plugin now accepts new authentication options via
+   strcred.AuthOptionMixin.  These include --auth, --auth-help, and
+   authentication type-specific help options. (#4740)
+
+Bugfixes
+--------
+ - twisted.mail.imap4.IMAP4Server now generates INTERNALDATE strings
+   which do not consider the locale. (#4937)
+
+Improved Documentation
+----------------------
+ - Added a simple SMTP example, showing how to use sendmail. (#4042)
+
+Other
+-----
+
+ - #4162
+
+
+Twisted Mail 10.2.0 (2010-11-29)
+================================
+
+Improved Documentation
+----------------------
+ - The email server example now demonstrates how to set up
+   authentication and authorization using twisted.cred. (#4609)
+
+Deprecations and Removals
+-------------------------
+ - twisted.mail.smtp.sendEmail, deprecated since mid 2003 (before
+   Twisted 2.0), has been removed. (#4529)
+
+Other
+-----
+ - #4038, #4572
+
+
+Twisted Mail 10.1.0 (2010-06-27)
+================================
+
+Bugfixes
+--------
+ - twisted.mail.imap4.IMAP4Server no longer fails on search queries
+   that contain wildcards. (#2278)
+ - A case which would cause twisted.mail.imap4.IMAP4Server to loop
+   indefinitely when handling a search command has been fixed. (#4385)
+
+Other
+-----
+ - #4069, #4271, #4467
+
+
+Twisted Mail 10.0.0 (2010-03-01)
+================================
+
+Bugfixes
+--------
+ - twisted.mail.smtp.ESMTPClient and
+   twisted.mail.smtp.LOGINAuthenticator now implement the (obsolete)
+   LOGIN SASL mechanism according to the draft specification. (#4031)
+
+ - twisted.mail.imap4.IMAP4Client will no longer misparse all html-
+   formatted message bodies received in response to a fetch command.
+   (#4049)
+
+ - The regression in IMAP4 search handling of "OR" and "NOT" terms has
+   been fixed. (#4178)
+
+Other
+-----
+ - #4028, #4170, #4200
+
+
+Twisted Mail 9.0.0 (2009-11-24)
+===============================
+
+Features
+--------
+ - maildir.StringListMailbox, an in-memory maildir mailbox, now supports
+   deletion, undeletion, and syncing (#3547)
+ - SMTPClient's callbacks are now more completely documented (#684)
+
+Fixes
+-----
+ - Parse UNSEEN response data and include it in the result of
+   IMAP4Client.examine (#3550)
+ - The IMAP4 client now delivers more unsolicited server responses to callbacks
+   rather than ignoring them, and also won't ignore solicited responses that
+   arrive on the same line as an unsolicited one (#1105)
+ - Several bugs in the SMTP client's idle timeout support were fixed (#3641,
+   #1219)
+ - A case where the SMTP client could skip some recipients when retrying
+   delivery has been fixed (#3638)
+ - Errors during certain data transfers will no longer be swallowed. They will
+   now bubble up to the higher-level API (such as the sendmail function) (#3642)
+ - Escape sequences inside quoted strings in IMAP4 should now be parsed
+   correctly by the IMAP4 server protocol (#3659)
+ - The "imap4-utf-7" codec that is registered by twisted.mail.imap4 had a number
+   of fixes that allow it to work better with the Python codecs system, and to
+   actually work (#3663)
+ - The Maildir implementation now ensures time-based ordering of filenames so
+   that the lexical sorting of messages matches the order in which they were
+   received (#3812)
+ - SASL PLAIN credentials generated by the IMAP4 protocol implementations
+   (client and server) should now be RFC-compliant (#3939)
+ - Searching for a set of sequences using the IMAP4 "SEARCH" command should 
+   now work on the IMAP4 server protocol implementation. This at least improves
+   support for the Pine mail client (#1977)
+
+Other
+-----
+ - #2763, #3647, #3750, #3819, #3540, #3846, #2023, #4050
+
+
+Mail 8.2.0 (2008-12-16)
+=======================
+
+Fixes
+-----
+ - The mailmail tool now provides better error messages for usage errors (#3339)
+ - The SMTP protocol implementation now works on PyPy (#2976)
+
+Other
+-----
+ - #3475
+
+
+8.1.0 (2008-05-18)
+==================
+
+Fixes
+-----
+ - The deprecated mktap API is no longer used (#3127)
+
+
+8.0.0 (2008-03-17)
+==================
+
+Features
+--------
+ - Support CAPABILITY responses that include atoms of the form "FOO" and
+   "FOO=BAR" in IMAP4 (#2695)
+ - Parameterize error handling behavior of imap4.encoder and imap4.decoder.
+   (#2929)
+
+Fixes
+-----
+ - Handle empty passwords in SMTP auth. (#2521)
+ - Fix IMAP4Client's parsing of literals which are not preceeded by whitespace.
+   (#2700)
+ - Handle MX lookup suceeding without answers. (#2807)
+ - Fix issues with aliases(5) process support. (#2729)
+
+Misc
+----
+ - #2371, #2123, #2378, #739, #2640, #2746, #1917, #2266, #2864, #2832, #2063,
+   #2865, #2847
+
+
+0.4.0 (2007-01-06)
+==================
+
+Features
+--------
+ - Plaintext POP3 logins are now possible over SSL or TLS (#1809)
+
+Fixes
+-----
+ - ESMTP servers now greet with an "ESMTP" string (#1891)
+ - The POP3 client can now correctly deal with concurrent POP3
+   retrievals (#1988, #1691)
+ - In the IMAP4 server, a bug involving retrieving the first part
+   of a single-part message was fixed. This improves compatibility
+   with Pine (#1978)
+ - A bug in the IMAP4 server which caused corruption under heavy
+   pipelining was fixed (#1992)
+ - More strict support for the AUTH command was added to the SMTP
+   server, to support the AUTH <mechanism>
+   <initial-authentication-data> form of the command (#1552)
+ - An SMTP bug involving the interaction with validateFrom, which
+   caused multiple conflicting SMTP messages to be sent over the wire,
+   was fixed (#2158)
+
+Misc
+----
+ - #1648, #1801, #1636, #2003, #1936, #1202, #2051, #2072, #2248, #2250
+
+0.3.0 (2006-05-21)
+==================
+
+Features
+--------
+  - Support Deferred results from POP3's IMailbox.listMessages (#1701).
+
+Fixes
+-----
+  - Quote usernames and passwords automatically in the IMAP client (#1411).
+  - Improved parsing of literals in IMAP4 client and server (#1417).
+  - Recognize unsolicted FLAGS response in IMAP4 client (#1105).
+  - Parse and respond to requests with multiple BODY arguments in IMAP4
+    server (#1307).
+  - Misc: #1356, #1290, #1602
+
+0.2.0:
+ - SMTP server:
+   - Now gives application-level code opportunity to set a different
+     Received header for each recipient of a multi-recipient message.
+ - IMAP client:
+   - New `startTLS' method to allow explicit negotiation of transport
+     security.
+- POP client:
+   - Support for per-command timeouts
+   - New `startTLS' method, similar to the one added to the IMAP
+     client.
+   - NOOP, RSET, and STAT support added
+- POP server:
+   - Bug handling passwords of "" fixed
+
+
+0.1.0:
+ - Tons of bugfixes in IMAP4, POP3, and SMTP protocols
+ - Maildir append support
+ - Brand new, improved POP3 client (twisted.mail.pop3.AdvancedPOP3Client)
+ - Deprecated the old POP3 client (twisted.mail.pop3.POP3Client)
+ - SMTP client:
+   - Support SMTP AUTH
+   - Allow user to supply SSL context
+   - Improved error handling, via new exception classes and an overridable
+     hook to customize handling.
+   - Order to try the authenication schemes is user-definable.
+   - Timeout support.
+ - SMTP server:
+   - Properly understand <> sender.
+   - Parameterize remote port
+ - IMAP4:
+   - LOGIN authentication compatibility improved
+   - Improved unicode mailbox support
+   - Fix parsing/handling of "FETCH BODY[HEADER]"
+   - Many many quoting fixes
+   - Timeout support on client
diff --git a/ThirdParty/Twisted/twisted/mail/topfiles/README b/ThirdParty/Twisted/twisted/mail/topfiles/README
new file mode 100644
index 0000000..1f548cf
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/topfiles/README
@@ -0,0 +1,6 @@
+Twisted Mail 12.3.0
+
+Twisted Mail depends on Twisted Core and (sometimes) Twisted Names.  For TLS
+support, pyOpenSSL (<http://launchpad.net/pyopenssl>) is also required.  Aside
+from protocol implementations, much of Twisted Mail also only runs on POSIX
+platforms.
diff --git a/ThirdParty/Twisted/twisted/mail/topfiles/setup.py b/ThirdParty/Twisted/twisted/mail/topfiles/setup.py
new file mode 100644
index 0000000..d14fb6b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/mail/topfiles/setup.py
@@ -0,0 +1,50 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import sys
+
+try:
+    from twisted.python import dist
+except ImportError:
+    raise SystemExit("twisted.python.dist module not found.  Make sure you "
+                     "have installed the Twisted core package before "
+                     "attempting to install any other Twisted projects.")
+
+if __name__ == '__main__':
+    if sys.version_info[:2] >= (2, 4):
+        extraMeta = dict(
+            classifiers=[
+                "Development Status :: 4 - Beta",
+                "Environment :: No Input/Output (Daemon)",
+                "Intended Audience :: Developers",
+                "License :: OSI Approved :: MIT License",
+                "Programming Language :: Python",
+                "Topic :: Communications :: Email :: Post-Office :: IMAP",
+                "Topic :: Communications :: Email :: Post-Office :: POP3",
+                "Topic :: Software Development :: Libraries :: Python Modules",
+            ])
+    else:
+        extraMeta = {}
+
+    dist.setup(
+        twisted_subproject="mail",
+        scripts=dist.getScripts("mail"),
+        # metadata
+        name="Twisted Mail",
+        description="A Twisted Mail library, server and client.",
+        author="Twisted Matrix Laboratories",
+        author_email="twisted-python at twistedmatrix.com",
+        maintainer="Jp Calderone",
+        url="http://twistedmatrix.com/trac/wiki/TwistedMail",
+        license="MIT",
+        long_description="""\
+An SMTP, IMAP and POP protocol implementation together with clients
+and servers.
+
+Twisted Mail contains high-level, efficient protocol implementations
+for both clients and servers of SMTP, POP3, and IMAP4. Additionally,
+it contains an "out of the box" combination SMTP/POP3 virtual-hosting
+mail server. Also included is a read/write Maildir implementation and
+a basic Mail Exchange calculator.
+""",
+        **extraMeta)
diff --git a/ThirdParty/Twisted/twisted/manhole/__init__.py b/ThirdParty/Twisted/twisted/manhole/__init__.py
new file mode 100644
index 0000000..64e2bbc
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/manhole/__init__.py
@@ -0,0 +1,8 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Twisted Manhole: interactive interpreter and direct manipulation support for Twisted.
+"""
diff --git a/ThirdParty/Twisted/twisted/manhole/_inspectro.py b/ThirdParty/Twisted/twisted/manhole/_inspectro.py
new file mode 100644
index 0000000..430ae7b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/manhole/_inspectro.py
@@ -0,0 +1,369 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""An input/output window for the glade reactor inspector.
+"""
+
+import time
+import gtk
+import gobject
+import gtk.glade
+from twisted.python.util import sibpath
+from twisted.python import reflect
+
+from twisted.manhole.ui import gtk2manhole
+from twisted.python.components import Adapter, registerAdapter
+from twisted.python import log
+from twisted.protocols import policies
+from zope.interface import implements, Interface
+
+# the glade file uses stock icons, which requires gnome to be installed
+import gnome
+version = "$Revision: 1.1 $"[11:-2]
+gnome.init("gladereactor Inspector", version)
+
+class ConsoleOutput(gtk2manhole.ConsoleOutput):
+    def _captureLocalLog(self):
+        self.fobs = log.FileLogObserver(gtk2manhole._Notafile(self, "log"))
+        self.fobs.start()
+
+    def stop(self):
+        self.fobs.stop()
+        del self.fobs
+
+class ConsoleInput(gtk2manhole.ConsoleInput):
+    def sendMessage(self):
+        buffer = self.textView.get_buffer()
+        iter1, iter2 = buffer.get_bounds()
+        text = buffer.get_text(iter1, iter2, False)
+        self.do(text)
+
+    def do(self, text):
+        self.toplevel.do(text)
+
+class INode(Interface):
+    """A node in the inspector tree model.
+    """
+
+    def __adapt__(adaptable, default):
+        if hasattr(adaptable, "__dict__"):
+            return InstanceNode(adaptable)
+        return AttributesNode(adaptable)
+
+class InspectorNode(Adapter):
+    implements(INode)
+
+    def postInit(self, offset, parent, slot):
+        self.offset = offset
+        self.parent = parent
+        self.slot = slot
+
+    def getPath(self):
+        L = []
+        x = self
+        while x.parent is not None:
+            L.append(x.offset)
+            x = x.parent
+        L.reverse()
+        return L
+
+    def __getitem__(self, index):
+        slot, o = self.get(index)
+        n = INode(o, persist=False)
+        n.postInit(index, self, slot)
+        return n
+
+    def origstr(self):
+        return str(self.original)
+
+    def format(self):
+        return (self.slot, self.origstr())
+
+
+class ConstantNode(InspectorNode):
+    def __len__(self):
+        return 0
+
+class DictionaryNode(InspectorNode):
+    def get(self, index):
+        L = self.original.items()
+        L.sort()
+        return L[index]
+
+    def __len__(self):
+        return len(self.original)
+
+    def origstr(self):
+        return "Dictionary"
+
+class ListNode(InspectorNode):
+    def get(self, index):
+        return index, self.original[index]
+
+    def origstr(self):
+        return "List"
+
+    def __len__(self):
+        return len(self.original)
+
+class AttributesNode(InspectorNode):
+    def __len__(self):
+        return len(dir(self.original))
+
+    def get(self, index):
+        L = dir(self.original)
+        L.sort()
+        return L[index], getattr(self.original, L[index])
+
+class InstanceNode(InspectorNode):
+    def __len__(self):
+        return len(self.original.__dict__) + 1
+
+    def get(self, index):
+        if index == 0:
+            if hasattr(self.original, "__class__"):
+                v = self.original.__class__
+            else:
+                v = type(self.original)
+            return "__class__", v
+        else:
+            index -= 1
+            L = self.original.__dict__.items()
+            L.sort()
+            return L[index]
+
+import types
+
+for x in dict, types.DictProxyType:
+    registerAdapter(DictionaryNode, x, INode)
+for x in list, tuple:
+    registerAdapter(ListNode, x, INode)
+for x in int, str:
+    registerAdapter(ConstantNode, x, INode)
+
+
+class InspectorTreeModel(gtk.GenericTreeModel):
+    def __init__(self, root):
+        gtk.GenericTreeModel.__init__(self)
+        self.root = INode(root, persist=False)
+        self.root.postInit(0, None, 'root')
+
+    def on_get_flags(self):
+        return 0
+
+    def on_get_n_columns(self):
+        return 1
+
+    def on_get_column_type(self, index):
+        return gobject.TYPE_STRING
+
+    def on_get_path(self, node):
+        return node.getPath()
+
+    def on_get_iter(self, path):
+        x = self.root
+        for elem in path:
+            x = x[elem]
+        return x
+
+    def on_get_value(self, node, column):
+        return node.format()[column]
+
+    def on_iter_next(self, node):
+        try:
+            return node.parent[node.offset + 1]
+        except IndexError:
+            return None
+
+    def on_iter_children(self, node):
+        return node[0]
+
+    def on_iter_has_child(self, node):
+        return len(node)
+
+    def on_iter_n_children(self, node):
+        return len(node)
+
+    def on_iter_nth_child(self, node, n):
+        if node is None:
+            return None
+        return node[n]
+
+    def on_iter_parent(self, node):
+        return node.parent
+
+
+class Inspectro:
+    selected = None
+    def __init__(self, o=None):
+        self.xml = x = gtk.glade.XML(sibpath(__file__, "inspectro.glade"))
+        self.tree_view = x.get_widget("treeview")
+        colnames = ["Name", "Value"]
+        for i in range(len(colnames)):
+            self.tree_view.append_column(
+                gtk.TreeViewColumn(
+                colnames[i], gtk.CellRendererText(), text=i))
+        d = {}
+        for m in reflect.prefixedMethods(self, "on_"):
+            d[m.im_func.__name__] = m
+        self.xml.signal_autoconnect(d)
+        if o is not None:
+            self.inspect(o)
+        self.ns = {'inspect': self.inspect}
+        iwidget = x.get_widget('input')
+        self.input = ConsoleInput(iwidget)
+        self.input.toplevel = self
+        iwidget.connect("key_press_event", self.input._on_key_press_event)
+        self.output = ConsoleOutput(x.get_widget('output'))
+
+    def select(self, o):
+        self.selected = o
+        self.ns['it'] = o
+        self.xml.get_widget("itname").set_text(repr(o))
+        self.xml.get_widget("itpath").set_text("???")
+
+    def inspect(self, o):
+        self.model = InspectorTreeModel(o)
+        self.tree_view.set_model(self.model)
+        self.inspected = o
+
+    def do(self, command):
+        filename = '<inspector>'
+        try:
+            print repr(command)
+            try:
+                code = compile(command, filename, 'eval')
+            except:
+                code = compile(command, filename, 'single')
+            val = eval(code, self.ns, self.ns)
+            if val is not None:
+                print repr(val)
+            self.ns['_'] = val
+        except:
+            log.err()
+
+    def on_inspect(self, *a):
+        self.inspect(self.selected)
+
+    def on_inspect_new(self, *a):
+        Inspectro(self.selected)
+
+    def on_row_activated(self, tv, path, column):
+        self.select(self.model.on_get_iter(path).original)
+
+
+class LoggingProtocol(policies.ProtocolWrapper):
+    """Log network traffic."""
+
+    logging = True
+    logViewer = None
+    
+    def __init__(self, *args):
+        policies.ProtocolWrapper.__init__(self, *args)
+        self.inLog = []
+        self.outLog = []
+
+    def write(self, data):
+        if self.logging:
+            self.outLog.append((time.time(), data))
+            if self.logViewer:
+                self.logViewer.updateOut(self.outLog[-1])
+        policies.ProtocolWrapper.write(self, data)
+
+    def dataReceived(self, data):
+        if self.logging:
+            self.inLog.append((time.time(), data))
+            if self.logViewer:
+                self.logViewer.updateIn(self.inLog[-1])
+        policies.ProtocolWrapper.dataReceived(self, data)
+
+    def __repr__(self):
+        r = "wrapped " + repr(self.wrappedProtocol)
+        if self.logging:
+            r += " (logging)"
+        return r
+
+
+class LoggingFactory(policies.WrappingFactory):
+    """Wrap protocols with logging wrappers."""
+
+    protocol = LoggingProtocol
+    logging = True
+    
+    def buildProtocol(self, addr):
+        p = self.protocol(self, self.wrappedFactory.buildProtocol(addr))    
+        p.logging = self.logging
+        return p
+
+    def __repr__(self):
+        r = "wrapped " + repr(self.wrappedFactory)
+        if self.logging:
+            r += " (logging)"
+        return r
+
+
+class LogViewer:
+    """Display log of network traffic."""
+    
+    def __init__(self, p):
+        self.p = p
+        vals = [time.time()]
+        if p.inLog:
+            vals.append(p.inLog[0][0])
+        if p.outLog:
+            vals.append(p.outLog[0][0])
+        self.startTime = min(vals)
+        p.logViewer = self
+        self.xml = x = gtk.glade.XML(sibpath(__file__, "logview.glade"))
+        self.xml.signal_autoconnect(self)
+        self.loglist = self.xml.get_widget("loglist")
+        # setup model, connect it to my treeview
+        self.model = gtk.ListStore(str, str, str)
+        self.loglist.set_model(self.model)
+        self.loglist.set_reorderable(1)
+        self.loglist.set_headers_clickable(1)
+        # self.servers.set_headers_draggable(1)
+        # add a column
+        for col in [
+            gtk.TreeViewColumn('Time',
+                               gtk.CellRendererText(),
+                               text=0),
+            gtk.TreeViewColumn('D',
+                               gtk.CellRendererText(),
+                               text=1),
+            gtk.TreeViewColumn('Data',
+                               gtk.CellRendererText(),
+                               text=2)]:
+            self.loglist.append_column(col)
+            col.set_resizable(1)
+        r = []
+        for t, data in p.inLog:
+            r.append(((str(t - self.startTime), "R", repr(data)[1:-1])))
+        for t, data in p.outLog:
+            r.append(((str(t - self.startTime), "S", repr(data)[1:-1])))
+        r.sort()
+        for i in r:
+            self.model.append(i)
+    
+    def updateIn(self, (time, data)):
+        self.model.append((str(time - self.startTime), "R", repr(data)[1:-1]))
+
+    def updateOut(self, (time, data)):
+        self.model.append((str(time - self.startTime), "S", repr(data)[1:-1]))
+
+    def on_logview_destroy(self, w):
+        self.p.logViewer = None
+        del self.p
+
+
+def main():
+    x = Inspectro()
+    x.inspect(x)
+    gtk.main()
+
+if __name__ == '__main__':
+    import sys
+    log.startLogging(sys.stdout)
+    main()
+
diff --git a/ThirdParty/Twisted/twisted/manhole/explorer.py b/ThirdParty/Twisted/twisted/manhole/explorer.py
new file mode 100644
index 0000000..30db8e2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/manhole/explorer.py
@@ -0,0 +1,654 @@
+# -*- test-case-name: twisted.test.test_explorer -*-
+# $Id: explorer.py,v 1.6 2003/02/18 21:15:30 acapnotic Exp $
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""Support for python object introspection and exploration.
+
+Note that Explorers, what with their list of attributes, are much like
+manhole.coil.Configurables.  Someone should investigate this further. (TODO)
+
+Also TODO: Determine how much code in here (particularly the function
+signature stuff) can be replaced with functions available in the
+L{inspect} module available in Python 2.1.
+"""
+
+# System Imports
+import inspect, string, sys, types
+import UserDict
+
+# Twisted Imports
+from twisted.spread import pb
+from twisted.python import reflect
+
+
+True=(1==1)
+False=not True
+
+class Pool(UserDict.UserDict):
+    def getExplorer(self, object, identifier):
+        oid = id(object)
+        if oid in self.data:
+            # XXX: This potentially returns something with
+            # 'identifier' set to a different value.
+            return self.data[oid]
+        else:
+            klass = typeTable.get(type(object), ExplorerGeneric)
+            e = types.InstanceType(klass, {})
+            self.data[oid] = e
+            klass.__init__(e, object, identifier)
+            return e
+
+explorerPool = Pool()
+
+class Explorer(pb.Cacheable):
+    properties = ["id", "identifier"]
+    attributeGroups = []
+    accessors = ["get_refcount"]
+
+    id = None
+    identifier = None
+
+    def __init__(self, object, identifier):
+        self.object = object
+        self.identifier = identifier
+        self.id = id(object)
+
+        self.properties = []
+        reflect.accumulateClassList(self.__class__, 'properties',
+                                    self.properties)
+
+        self.attributeGroups = []
+        reflect.accumulateClassList(self.__class__, 'attributeGroups',
+                                    self.attributeGroups)
+
+        self.accessors = []
+        reflect.accumulateClassList(self.__class__, 'accessors',
+                                    self.accessors)
+
+    def getStateToCopyFor(self, perspective):
+        all = ["properties", "attributeGroups", "accessors"]
+        all.extend(self.properties)
+        all.extend(self.attributeGroups)
+
+        state = {}
+        for key in all:
+            state[key] = getattr(self, key)
+
+        state['view'] = pb.ViewPoint(perspective, self)
+        state['explorerClass'] = self.__class__.__name__
+        return state
+
+    def view_get_refcount(self, perspective):
+        return sys.getrefcount(self)
+
+class ExplorerGeneric(Explorer):
+    properties = ["str", "repr", "typename"]
+
+    def __init__(self, object, identifier):
+        Explorer.__init__(self, object, identifier)
+        self.str = str(object)
+        self.repr = repr(object)
+        self.typename = type(object).__name__
+
+
+class ExplorerImmutable(Explorer):
+    properties = ["value"]
+
+    def __init__(self, object, identifier):
+        Explorer.__init__(self, object, identifier)
+        self.value = object
+
+
+class ExplorerSequence(Explorer):
+    properties = ["len"]
+    attributeGroups = ["elements"]
+    accessors = ["get_elements"]
+
+    def __init__(self, seq, identifier):
+        Explorer.__init__(self, seq, identifier)
+        self.seq = seq
+        self.len = len(seq)
+
+        # Use accessor method to fill me in.
+        self.elements = []
+
+    def get_elements(self):
+        self.len = len(self.seq)
+        l = []
+        for i in xrange(self.len):
+            identifier = "%s[%s]" % (self.identifier, i)
+
+            # GLOBAL: using global explorerPool
+            l.append(explorerPool.getExplorer(self.seq[i], identifier))
+
+        return l
+
+    def view_get_elements(self, perspective):
+        # XXX: set the .elements member of all my remoteCaches
+        return self.get_elements()
+
+
+class ExplorerMapping(Explorer):
+    properties = ["len"]
+    attributeGroups = ["keys"]
+    accessors = ["get_keys", "get_item"]
+
+    def __init__(self, dct, identifier):
+        Explorer.__init__(self, dct, identifier)
+
+        self.dct = dct
+        self.len = len(dct)
+
+        # Use accessor method to fill me in.
+        self.keys = []
+
+    def get_keys(self):
+        keys = self.dct.keys()
+        self.len = len(keys)
+        l = []
+        for i in xrange(self.len):
+            identifier = "%s.keys()[%s]" % (self.identifier, i)
+
+            # GLOBAL: using global explorerPool
+            l.append(explorerPool.getExplorer(keys[i], identifier))
+
+        return l
+
+    def view_get_keys(self, perspective):
+        # XXX: set the .keys member of all my remoteCaches
+        return self.get_keys()
+
+    def view_get_item(self, perspective, key):
+        if type(key) is types.InstanceType:
+            key = key.object
+
+        item = self.dct[key]
+
+        identifier = "%s[%s]" % (self.identifier, repr(key))
+        # GLOBAL: using global explorerPool
+        item = explorerPool.getExplorer(item, identifier)
+        return item
+
+
+class ExplorerBuiltin(Explorer):
+    """
+    @ivar name: the name the function was defined as
+    @ivar doc: function's docstring, or C{None} if unavailable
+    @ivar self: if not C{None}, the function is a method of this object.
+    """
+    properties = ["doc", "name", "self"]
+    def __init__(self, function, identifier):
+        Explorer.__init__(self, function, identifier)
+        self.doc = function.__doc__
+        self.name = function.__name__
+        self.self = function.__self__
+
+
+class ExplorerInstance(Explorer):
+    """
+    Attribute groups:
+        - B{methods} -- dictionary of methods
+        - B{data} -- dictionary of data members
+
+    Note these are only the *instance* methods and members --
+    if you want the class methods, you'll have to look up the class.
+
+    TODO: Detail levels (me, me & class, me & class ancestory)
+
+    @ivar klass: the class this is an instance of.
+    """
+    properties = ["klass"]
+    attributeGroups = ["methods", "data"]
+
+    def __init__(self, instance, identifier):
+        Explorer.__init__(self, instance, identifier)
+        members = {}
+        methods = {}
+        for i in dir(instance):
+            # TODO: Make screening of private attributes configurable.
+            if i[0] == '_':
+                continue
+            mIdentifier = string.join([identifier, i], ".")
+            member = getattr(instance, i)
+            mType = type(member)
+
+            if mType is types.MethodType:
+                methods[i] = explorerPool.getExplorer(member, mIdentifier)
+            else:
+                members[i] = explorerPool.getExplorer(member, mIdentifier)
+
+        self.klass = explorerPool.getExplorer(instance.__class__,
+                                              self.identifier +
+                                              '.__class__')
+        self.data = members
+        self.methods = methods
+
+
+class ExplorerClass(Explorer):
+    """
+    @ivar name: the name the class was defined with
+    @ivar doc: the class's docstring
+    @ivar bases: a list of this class's base classes.
+    @ivar module: the module the class is defined in
+
+    Attribute groups:
+        - B{methods} -- class methods
+        - B{data} -- other members of the class
+    """
+    properties = ["name", "doc", "bases", "module"]
+    attributeGroups = ["methods", "data"]
+    def __init__(self, theClass, identifier):
+        Explorer.__init__(self, theClass, identifier)
+        if not identifier:
+            identifier = theClass.__name__
+        members = {}
+        methods = {}
+        for i in dir(theClass):
+            if (i[0] == '_') and (i != '__init__'):
+                continue
+
+            mIdentifier = string.join([identifier, i], ".")
+            member = getattr(theClass, i)
+            mType = type(member)
+
+            if mType is types.MethodType:
+                methods[i] = explorerPool.getExplorer(member, mIdentifier)
+            else:
+                members[i] = explorerPool.getExplorer(member, mIdentifier)
+
+        self.name = theClass.__name__
+        self.doc = inspect.getdoc(theClass)
+        self.data = members
+        self.methods = methods
+        self.bases = explorerPool.getExplorer(theClass.__bases__,
+                                              identifier + ".__bases__")
+        self.module = getattr(theClass, '__module__', None)
+
+
+class ExplorerFunction(Explorer):
+    properties = ["name", "doc", "file", "line","signature"]
+    """
+        name -- the name the function was defined as
+        signature -- the function's calling signature (Signature instance)
+        doc -- the function's docstring
+        file -- the file the function is defined in
+        line -- the line in the file the function begins on
+    """
+    def __init__(self, function, identifier):
+        Explorer.__init__(self, function, identifier)
+        code = function.func_code
+        argcount = code.co_argcount
+        takesList = (code.co_flags & 0x04) and 1
+        takesKeywords = (code.co_flags & 0x08) and 1
+
+        n = (argcount + takesList + takesKeywords)
+        signature = Signature(code.co_varnames[:n])
+
+        if function.func_defaults:
+            i_d = 0
+            for i in xrange(argcount - len(function.func_defaults),
+                            argcount):
+                default = function.func_defaults[i_d]
+                default = explorerPool.getExplorer(
+                    default, '%s.func_defaults[%d]' % (identifier, i_d))
+                signature.set_default(i, default)
+
+                i_d = i_d + 1
+
+        if takesKeywords:
+            signature.set_keyword(n - 1)
+
+        if takesList:
+            signature.set_varlist(n - 1 - takesKeywords)
+
+        # maybe also: function.func_globals,
+        # or at least func_globals.__name__?
+        # maybe the bytecode, for disassembly-view?
+
+        self.name = function.__name__
+        self.signature = signature
+        self.doc = inspect.getdoc(function)
+        self.file = code.co_filename
+        self.line = code.co_firstlineno
+
+
+class ExplorerMethod(ExplorerFunction):
+    properties = ["self", "klass"]
+    """
+    In addition to ExplorerFunction properties:
+        self -- the object I am bound to, or None if unbound
+        klass -- the class I am a method of
+    """
+    def __init__(self, method, identifier):
+
+        function = method.im_func
+        if type(function) is types.InstanceType:
+            function = function.__call__.im_func
+
+        ExplorerFunction.__init__(self, function, identifier)
+        self.id = id(method)
+        self.klass = explorerPool.getExplorer(method.im_class,
+                                              identifier + '.im_class')
+        self.self = explorerPool.getExplorer(method.im_self,
+                                             identifier + '.im_self')
+
+        if method.im_self:
+            # I'm a bound method -- eat the 'self' arg.
+            self.signature.discardSelf()
+
+
+class ExplorerModule(Explorer):
+    """
+    @ivar name: the name the module was defined as
+    @ivar doc: documentation string for the module
+    @ivar file: the file the module is defined in
+
+    Attribute groups:
+        - B{classes} -- the public classes provided by the module
+        - B{functions} -- the public functions provided by the module
+        - B{data} -- the public data members provided by the module
+
+    (\"Public\" is taken to be \"anything that doesn't start with _\")
+    """
+    properties = ["name","doc","file"]
+    attributeGroups = ["classes", "functions", "data"]
+
+    def __init__(self, module, identifier):
+        Explorer.__init__(self, module, identifier)
+        functions = {}
+        classes = {}
+        data = {}
+        for key, value in module.__dict__.items():
+            if key[0] == '_':
+                continue
+
+            mIdentifier = "%s.%s" % (identifier, key)
+
+            if type(value) is types.ClassType:
+                classes[key] = explorerPool.getExplorer(value,
+                                                        mIdentifier)
+            elif type(value) is types.FunctionType:
+                functions[key] = explorerPool.getExplorer(value,
+                                                          mIdentifier)
+            elif type(value) is types.ModuleType:
+                pass # pass on imported modules
+            else:
+                data[key] = explorerPool.getExplorer(value, mIdentifier)
+
+        self.name = module.__name__
+        self.doc = inspect.getdoc(module)
+        self.file = getattr(module, '__file__', None)
+        self.classes = classes
+        self.functions = functions
+        self.data = data
+
+typeTable = {types.InstanceType: ExplorerInstance,
+             types.ClassType: ExplorerClass,
+             types.MethodType: ExplorerMethod,
+             types.FunctionType: ExplorerFunction,
+             types.ModuleType: ExplorerModule,
+             types.BuiltinFunctionType: ExplorerBuiltin,
+             types.ListType: ExplorerSequence,
+             types.TupleType: ExplorerSequence,
+             types.DictType: ExplorerMapping,
+             types.StringType: ExplorerImmutable,
+             types.NoneType: ExplorerImmutable,
+             types.IntType: ExplorerImmutable,
+             types.FloatType: ExplorerImmutable,
+             types.LongType: ExplorerImmutable,
+             types.ComplexType: ExplorerImmutable,
+             }
+
+class Signature(pb.Copyable):
+    """I represent the signature of a callable.
+
+    Signatures are immutable, so don't expect my contents to change once
+    they've been set.
+    """
+    _FLAVOURLESS = None
+    _HAS_DEFAULT = 2
+    _VAR_LIST = 4
+    _KEYWORD_DICT = 8
+
+    def __init__(self, argNames):
+        self.name = argNames
+        self.default = [None] * len(argNames)
+        self.flavour = [None] * len(argNames)
+
+    def get_name(self, arg):
+        return self.name[arg]
+
+    def get_default(self, arg):
+        if arg is types.StringType:
+            arg = self.name.index(arg)
+
+        # Wouldn't it be nice if we just returned "None" when there
+        # wasn't a default?  Well, yes, but often times "None" *is*
+        # the default, so return a tuple instead.
+        if self.flavour[arg] == self._HAS_DEFAULT:
+            return (True, self.default[arg])
+        else:
+            return (False, None)
+
+    def set_default(self, arg, value):
+        if arg is types.StringType:
+            arg = self.name.index(arg)
+
+        self.flavour[arg] = self._HAS_DEFAULT
+        self.default[arg] = value
+
+    def set_varlist(self, arg):
+        if arg is types.StringType:
+            arg = self.name.index(arg)
+
+        self.flavour[arg] = self._VAR_LIST
+
+    def set_keyword(self, arg):
+        if arg is types.StringType:
+            arg = self.name.index(arg)
+
+        self.flavour[arg] = self._KEYWORD_DICT
+
+    def is_varlist(self, arg):
+        if arg is types.StringType:
+            arg = self.name.index(arg)
+
+        return (self.flavour[arg] == self._VAR_LIST)
+
+    def is_keyword(self, arg):
+        if arg is types.StringType:
+            arg = self.name.index(arg)
+
+        return (self.flavour[arg] == self._KEYWORD_DICT)
+
+    def discardSelf(self):
+        """Invoke me to discard the first argument if this is a bound method.
+        """
+        ## if self.name[0] != 'self':
+        ##    log.msg("Warning: Told to discard self, but name is %s" %
+        ##            self.name[0])
+        self.name = self.name[1:]
+        self.default.pop(0)
+        self.flavour.pop(0)
+
+    def getStateToCopy(self):
+        return {'name': tuple(self.name),
+                'flavour': tuple(self.flavour),
+                'default': tuple(self.default)}
+
+    def __len__(self):
+        return len(self.name)
+
+    def __str__(self):
+        arglist = []
+        for arg in xrange(len(self)):
+            name = self.get_name(arg)
+            hasDefault, default = self.get_default(arg)
+            if hasDefault:
+                a = "%s=%s" % (name, default)
+            elif self.is_varlist(arg):
+                a = "*%s" % (name,)
+            elif self.is_keyword(arg):
+                a = "**%s" % (name,)
+            else:
+                a = name
+            arglist.append(a)
+
+        return string.join(arglist,", ")
+
+
+
+
+
+class CRUFT_WatchyThingie:
+    # TODO:
+    #
+    #  * an exclude mechanism for the watcher's browser, to avoid
+    #    sending back large and uninteresting data structures.
+    #
+    #  * an exclude mechanism for the watcher's trigger, to avoid
+    #    triggering on some frequently-called-method-that-doesn't-
+    #    actually-change-anything.
+    #
+    #  * XXX! need removeWatch()
+
+    def watchIdentifier(self, identifier, callback):
+        """Watch the object returned by evaluating the identifier.
+
+        Whenever I think the object might have changed, I'll send an
+        ObjectLink of it to the callback.
+
+        WARNING: This calls eval() on its argument!
+        """
+        object = eval(identifier,
+                      self.globalNamespace,
+                      self.localNamespace)
+        return self.watchObject(object, identifier, callback)
+
+    def watchObject(self, object, identifier, callback):
+        """Watch the given object.
+
+        Whenever I think the object might have changed, I'll send an
+        ObjectLink of it to the callback.
+
+        The identifier argument is used to generate identifiers for
+        objects which are members of this one.
+        """
+        if type(object) is not types.InstanceType:
+            raise TypeError, "Sorry, can only place a watch on Instances."
+
+        # uninstallers = []
+
+        dct = {}
+        reflect.addMethodNamesToDict(object.__class__, dct, '')
+        for k in object.__dict__.keys():
+            dct[k] = 1
+
+        members = dct.keys()
+
+        clazzNS = {}
+        clazz = types.ClassType('Watching%s%X' %
+                                (object.__class__.__name__, id(object)),
+                                (_MonkeysSetattrMixin, object.__class__,),
+                                clazzNS)
+
+        clazzNS['_watchEmitChanged'] = types.MethodType(
+            lambda slf, i=identifier, b=self, cb=callback:
+            cb(b.browseObject(slf, i)),
+            None, clazz)
+
+        # orig_class = object.__class__
+        object.__class__ = clazz
+
+        for name in members:
+            m = getattr(object, name)
+            # Only hook bound methods.
+            if ((type(m) is types.MethodType)
+                and (m.im_self is not None)):
+                # What's the use of putting watch monkeys on methods
+                # in addition to __setattr__?  Well, um, uh, if the
+                # methods modify their attributes (i.e. add a key to
+                # a dictionary) instead of [re]setting them, then
+                # we wouldn't know about it unless we did this.
+                # (Is that convincing?)
+
+                monkey = _WatchMonkey(object)
+                monkey.install(name)
+                # uninstallers.append(monkey.uninstall)
+
+        # XXX: This probably prevents these objects from ever having a
+        # zero refcount.  Leak, Leak!
+        ## self.watchUninstallers[object] = uninstallers
+
+
+class _WatchMonkey:
+    """I hang on a method and tell you what I see.
+
+    TODO: Aya!  Now I just do browseObject all the time, but I could
+    tell you what got called with what when and returning what.
+    """
+    oldMethod = None
+
+    def __init__(self, instance):
+        """Make a monkey to hang on this instance object.
+        """
+        self.instance = instance
+
+    def install(self, methodIdentifier):
+        """Install myself on my instance in place of this method.
+        """
+        oldMethod = getattr(self.instance, methodIdentifier, None)
+
+        # XXX: this conditional probably isn't effective.
+        if oldMethod is not self:
+            # avoid triggering __setattr__
+            self.instance.__dict__[methodIdentifier] = types.MethodType(
+                self, self.instance, self.instance.__class__)
+            self.oldMethod = (methodIdentifier, oldMethod)
+
+    def uninstall(self):
+        """Remove myself from this instance and restore the original method.
+
+        (I hope.)
+        """
+        if self.oldMethod is None:
+            return
+
+        # XXX: This probably doesn't work if multiple monkies are hanging
+        # on a method and they're not removed in order.
+        if self.oldMethod[1] is None:
+            delattr(self.instance, self.oldMethod[0])
+        else:
+            setattr(self.instance, self.oldMethod[0], self.oldMethod[1])
+
+    def __call__(self, instance, *a, **kw):
+        """Pretend to be the method I replaced, and ring the bell.
+        """
+        if self.oldMethod[1]:
+            rval = apply(self.oldMethod[1], a, kw)
+        else:
+            rval = None
+
+        instance._watchEmitChanged()
+        return rval
+
+
+class _MonkeysSetattrMixin:
+    """A mix-in class providing __setattr__ for objects being watched.
+    """
+    def __setattr__(self, k, v):
+        """Set the attribute and ring the bell.
+        """
+        if hasattr(self.__class__.__bases__[1], '__setattr__'):
+            # Hack!  Using __bases__[1] is Bad, but since we created
+            # this class, we can be reasonably sure it'll work.
+            self.__class__.__bases__[1].__setattr__(self, k, v)
+        else:
+            self.__dict__[k] = v
+
+        # XXX: Hey, waitasec, did someone just hang a new method on me?
+        #  Do I need to put a monkey on it?
+
+        self._watchEmitChanged()
diff --git a/ThirdParty/Twisted/twisted/manhole/gladereactor.glade b/ThirdParty/Twisted/twisted/manhole/gladereactor.glade
new file mode 100644
index 0000000..c78dd5a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/manhole/gladereactor.glade
@@ -0,0 +1,342 @@
+<?xml version="1.0" standalone="no"?> <!--*- mode: xml -*-->
+<!DOCTYPE glade-interface SYSTEM "http://glade.gnome.org/glade-2.0.dtd">
+
+<glade-interface>
+
+<widget class="GtkWindow" id="window1">
+  <property name="visible">True</property>
+  <property name="title" translatable="yes">Twisted Daemon</property>
+  <property name="type">GTK_WINDOW_TOPLEVEL</property>
+  <property name="window_position">GTK_WIN_POS_NONE</property>
+  <property name="modal">False</property>
+  <property name="default_width">256</property>
+  <property name="default_height">300</property>
+  <property name="resizable">True</property>
+  <property name="destroy_with_parent">False</property>
+
+  <child>
+    <widget class="GtkVBox" id="vbox1">
+      <property name="visible">True</property>
+      <property name="homogeneous">False</property>
+      <property name="spacing">0</property>
+
+      <child>
+	<widget class="GtkScrolledWindow" id="scrolledwindow2">
+	  <property name="visible">True</property>
+	  <property name="can_focus">True</property>
+	  <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
+	  <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
+	  <property name="shadow_type">GTK_SHADOW_NONE</property>
+	  <property name="window_placement">GTK_CORNER_TOP_LEFT</property>
+
+	  <child>
+	    <widget class="GtkTreeView" id="servertree">
+	      <property name="visible">True</property>
+	      <property name="can_focus">True</property>
+	      <property name="headers_visible">True</property>
+	      <property name="rules_hint">False</property>
+	      <property name="reorderable">True</property>
+	      <property name="enable_search">True</property>
+	    </widget>
+	  </child>
+	</widget>
+	<packing>
+	  <property name="padding">0</property>
+	  <property name="expand">True</property>
+	  <property name="fill">True</property>
+	</packing>
+      </child>
+
+      <child>
+	<widget class="GtkHButtonBox" id="hbuttonbox1">
+	  <property name="visible">True</property>
+	  <property name="layout_style">GTK_BUTTONBOX_DEFAULT_STYLE</property>
+	  <property name="spacing">0</property>
+
+	  <child>
+	    <widget class="GtkButton" id="suspend">
+	      <property name="visible">True</property>
+	      <property name="can_default">True</property>
+	      <property name="can_focus">True</property>
+	      <property name="relief">GTK_RELIEF_NORMAL</property>
+	      <signal name="clicked" handler="on_suspend_clicked" last_modification_time="Sun, 22 Jun 2003 05:09:20 GMT"/>
+
+	      <child>
+		<widget class="GtkAlignment" id="alignment2">
+		  <property name="visible">True</property>
+		  <property name="xalign">0.5</property>
+		  <property name="yalign">0.5</property>
+		  <property name="xscale">0</property>
+		  <property name="yscale">0</property>
+
+		  <child>
+		    <widget class="GtkHBox" id="hbox3">
+		      <property name="visible">True</property>
+		      <property name="homogeneous">False</property>
+		      <property name="spacing">2</property>
+
+		      <child>
+			<widget class="GtkImage" id="image2">
+			  <property name="visible">True</property>
+			  <property name="stock">gtk-undo</property>
+			  <property name="icon_size">4</property>
+			  <property name="xalign">0.5</property>
+			  <property name="yalign">0.5</property>
+			  <property name="xpad">0</property>
+			  <property name="ypad">0</property>
+			</widget>
+			<packing>
+			  <property name="padding">0</property>
+			  <property name="expand">False</property>
+			  <property name="fill">False</property>
+			</packing>
+		      </child>
+
+		      <child>
+			<widget class="GtkLabel" id="label11">
+			  <property name="visible">True</property>
+			  <property name="label" translatable="yes">Suspend</property>
+			  <property name="use_underline">True</property>
+			  <property name="use_markup">False</property>
+			  <property name="justify">GTK_JUSTIFY_LEFT</property>
+			  <property name="wrap">False</property>
+			  <property name="selectable">False</property>
+			  <property name="xalign">0.5</property>
+			  <property name="yalign">0.5</property>
+			  <property name="xpad">0</property>
+			  <property name="ypad">0</property>
+			</widget>
+			<packing>
+			  <property name="padding">0</property>
+			  <property name="expand">False</property>
+			  <property name="fill">False</property>
+			</packing>
+		      </child>
+		    </widget>
+		  </child>
+		</widget>
+	      </child>
+	    </widget>
+	  </child>
+
+	  <child>
+	    <widget class="GtkButton" id="disconnect">
+	      <property name="visible">True</property>
+	      <property name="can_default">True</property>
+	      <property name="can_focus">True</property>
+	      <property name="relief">GTK_RELIEF_NORMAL</property>
+	      <signal name="clicked" handler="on_disconnect_clicked" last_modification_time="Sun, 22 Jun 2003 05:09:27 GMT"/>
+
+	      <child>
+		<widget class="GtkAlignment" id="alignment1">
+		  <property name="visible">True</property>
+		  <property name="xalign">0.5</property>
+		  <property name="yalign">0.5</property>
+		  <property name="xscale">0</property>
+		  <property name="yscale">0</property>
+
+		  <child>
+		    <widget class="GtkHBox" id="hbox2">
+		      <property name="visible">True</property>
+		      <property name="homogeneous">False</property>
+		      <property name="spacing">2</property>
+
+		      <child>
+			<widget class="GtkImage" id="image1">
+			  <property name="visible">True</property>
+			  <property name="stock">gtk-dialog-warning</property>
+			  <property name="icon_size">4</property>
+			  <property name="xalign">0.5</property>
+			  <property name="yalign">0.5</property>
+			  <property name="xpad">0</property>
+			  <property name="ypad">0</property>
+			</widget>
+			<packing>
+			  <property name="padding">0</property>
+			  <property name="expand">False</property>
+			  <property name="fill">False</property>
+			</packing>
+		      </child>
+
+		      <child>
+			<widget class="GtkLabel" id="label10">
+			  <property name="visible">True</property>
+			  <property name="label" translatable="yes">Disconnect</property>
+			  <property name="use_underline">True</property>
+			  <property name="use_markup">False</property>
+			  <property name="justify">GTK_JUSTIFY_LEFT</property>
+			  <property name="wrap">False</property>
+			  <property name="selectable">False</property>
+			  <property name="xalign">0.5</property>
+			  <property name="yalign">0.5</property>
+			  <property name="xpad">0</property>
+			  <property name="ypad">0</property>
+			</widget>
+			<packing>
+			  <property name="padding">0</property>
+			  <property name="expand">False</property>
+			  <property name="fill">False</property>
+			</packing>
+		      </child>
+		    </widget>
+		  </child>
+		</widget>
+	      </child>
+	    </widget>
+	  </child>
+
+	  <child>
+	    <widget class="GtkButton" id="inspect">
+	      <property name="visible">True</property>
+	      <property name="can_default">True</property>
+	      <property name="can_focus">True</property>
+	      <property name="relief">GTK_RELIEF_NORMAL</property>
+	      <signal name="clicked" handler="on_inspect_clicked" last_modification_time="Wed, 17 Dec 2003 06:14:18 GMT"/>
+
+	      <child>
+		<widget class="GtkAlignment" id="alignment3">
+		  <property name="visible">True</property>
+		  <property name="xalign">0.5</property>
+		  <property name="yalign">0.5</property>
+		  <property name="xscale">0</property>
+		  <property name="yscale">0</property>
+
+		  <child>
+		    <widget class="GtkHBox" id="hbox4">
+		      <property name="visible">True</property>
+		      <property name="homogeneous">False</property>
+		      <property name="spacing">2</property>
+
+		      <child>
+			<widget class="GtkImage" id="image3">
+			  <property name="visible">True</property>
+			  <property name="stock">gtk-open</property>
+			  <property name="icon_size">4</property>
+			  <property name="xalign">0.5</property>
+			  <property name="yalign">0.5</property>
+			  <property name="xpad">0</property>
+			  <property name="ypad">0</property>
+			</widget>
+			<packing>
+			  <property name="padding">0</property>
+			  <property name="expand">False</property>
+			  <property name="fill">False</property>
+			</packing>
+		      </child>
+
+		      <child>
+			<widget class="GtkLabel" id="label12">
+			  <property name="visible">True</property>
+			  <property name="label" translatable="yes">Inspect</property>
+			  <property name="use_underline">True</property>
+			  <property name="use_markup">False</property>
+			  <property name="justify">GTK_JUSTIFY_LEFT</property>
+			  <property name="wrap">False</property>
+			  <property name="selectable">False</property>
+			  <property name="xalign">0.5</property>
+			  <property name="yalign">0.5</property>
+			  <property name="xpad">0</property>
+			  <property name="ypad">0</property>
+			</widget>
+			<packing>
+			  <property name="padding">0</property>
+			  <property name="expand">False</property>
+			  <property name="fill">False</property>
+			</packing>
+		      </child>
+		    </widget>
+		  </child>
+		</widget>
+	      </child>
+	    </widget>
+	  </child>
+
+	  <child>
+	    <widget class="GtkButton" id="viewlog">
+	      <property name="visible">True</property>
+	      <property name="can_default">True</property>
+	      <property name="can_focus">True</property>
+	      <property name="relief">GTK_RELIEF_NORMAL</property>
+	      <signal name="clicked" handler="on_viewlog_clicked" last_modification_time="Sun, 04 Jan 2004 22:28:19 GMT"/>
+
+	      <child>
+		<widget class="GtkAlignment" id="alignment4">
+		  <property name="visible">True</property>
+		  <property name="xalign">0.5</property>
+		  <property name="yalign">0.5</property>
+		  <property name="xscale">0</property>
+		  <property name="yscale">0</property>
+
+		  <child>
+		    <widget class="GtkHBox" id="hbox5">
+		      <property name="visible">True</property>
+		      <property name="homogeneous">False</property>
+		      <property name="spacing">2</property>
+
+		      <child>
+			<widget class="GtkImage" id="image4">
+			  <property name="visible">True</property>
+			  <property name="stock">gtk-dialog-info</property>
+			  <property name="icon_size">4</property>
+			  <property name="xalign">0.5</property>
+			  <property name="yalign">0.5</property>
+			  <property name="xpad">0</property>
+			  <property name="ypad">0</property>
+			</widget>
+			<packing>
+			  <property name="padding">0</property>
+			  <property name="expand">False</property>
+			  <property name="fill">False</property>
+			</packing>
+		      </child>
+
+		      <child>
+			<widget class="GtkLabel" id="label13">
+			  <property name="visible">True</property>
+			  <property name="label" translatable="yes">View Log</property>
+			  <property name="use_underline">True</property>
+			  <property name="use_markup">False</property>
+			  <property name="justify">GTK_JUSTIFY_LEFT</property>
+			  <property name="wrap">False</property>
+			  <property name="selectable">False</property>
+			  <property name="xalign">0.5</property>
+			  <property name="yalign">0.5</property>
+			  <property name="xpad">0</property>
+			  <property name="ypad">0</property>
+			</widget>
+			<packing>
+			  <property name="padding">0</property>
+			  <property name="expand">False</property>
+			  <property name="fill">False</property>
+			</packing>
+		      </child>
+		    </widget>
+		  </child>
+		</widget>
+	      </child>
+	    </widget>
+	  </child>
+
+	  <child>
+	    <widget class="GtkButton" id="quit">
+	      <property name="visible">True</property>
+	      <property name="can_default">True</property>
+	      <property name="can_focus">True</property>
+	      <property name="label">gtk-quit</property>
+	      <property name="use_stock">True</property>
+	      <property name="relief">GTK_RELIEF_NORMAL</property>
+	      <signal name="clicked" handler="on_quit_clicked" last_modification_time="Sun, 04 Jan 2004 22:26:43 GMT"/>
+	    </widget>
+	  </child>
+	</widget>
+	<packing>
+	  <property name="padding">0</property>
+	  <property name="expand">False</property>
+	  <property name="fill">True</property>
+	</packing>
+      </child>
+    </widget>
+  </child>
+</widget>
+
+</glade-interface>
diff --git a/ThirdParty/Twisted/twisted/manhole/gladereactor.py b/ThirdParty/Twisted/twisted/manhole/gladereactor.py
new file mode 100644
index 0000000..148fc5e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/manhole/gladereactor.py
@@ -0,0 +1,219 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+A modified gtk2 reactor with a Glade dialog in-process that allows you to stop,
+suspend, resume and inspect transports interactively.
+"""
+
+__all__ = ['install']
+
+# Twisted Imports
+from twisted.python import log, threadable, runtime, failure, util, reflect
+from twisted.internet.gtk2reactor import Gtk2Reactor as sup
+
+import gtk
+import gobject
+import gtk.glade
+
+COLUMN_DESCRIPTION = 0
+COLUMN_TRANSPORT = 1
+COLUMN_READING = 2
+COLUMN_WRITING = 3
+
+
+class GladeReactor(sup):
+    """GTK+-2 event loop reactor with GUI.
+    """
+
+    def listenTCP(self, port, factory, backlog=50, interface=''):
+        from _inspectro import LoggingFactory
+        factory = LoggingFactory(factory)
+        return sup.listenTCP(self, port, factory, backlog, interface)
+    
+    def connectTCP(self, host, port, factory, timeout=30, bindAddress=None):
+        from _inspectro import LoggingFactory
+        factory = LoggingFactory(factory)
+        return sup.connectTCP(self, host, port, factory, timeout, bindAddress)
+
+    def listenSSL(self, port, factory, contextFactory, backlog=50, interface=''):
+        from _inspectro import LoggingFactory
+        factory = LoggingFactory(factory)
+        return sup.listenSSL(self, port, factory, contextFactory, backlog, interface)
+
+    def connectSSL(self, host, port, factory, contextFactory, timeout=30, bindAddress=None):
+        from _inspectro import LoggingFactory
+        factory = LoggingFactory(factory)
+        return sup.connectSSL(self, host, port, factory, contextFactory, timeout, bindAddress)
+
+    def connectUNIX(self, address, factory, timeout=30):
+        from _inspectro import LoggingFactory
+        factory = LoggingFactory(factory)
+        return sup.connectUNIX(self, address, factory, timeout)
+
+    def listenUNIX(self, address, factory, backlog=50, mode=0666):
+        from _inspectro import LoggingFactory
+        factory = LoggingFactory(factory)
+        return sup.listenUNIX(self, address, factory, backlog, mode)
+
+    def on_disconnect_clicked(self, w):
+        store, iter = self.servers.get_selection().get_selected()
+        store[iter][COLUMN_TRANSPORT].loseConnection()
+
+    def on_viewlog_clicked(self, w):
+        store, iter = self.servers.get_selection().get_selected()
+        data = store[iter][1]
+        from _inspectro import LogViewer
+        if hasattr(data, "protocol") and not data.protocol.logViewer:
+            LogViewer(data.protocol)
+    
+    def on_inspect_clicked(self, w):
+        store, iter = self.servers.get_selection().get_selected()
+        data = store[iter]
+        from _inspectro import Inspectro
+        Inspectro(data[1])
+
+    def on_suspend_clicked(self, w):
+        store, iter = self.servers.get_selection().get_selected()
+        data = store[iter]
+        sup.removeReader(self, data[1])
+        sup.removeWriter(self, data[1])
+        if data[COLUMN_DESCRIPTION].endswith('(suspended)'):
+            if data[COLUMN_READING]:
+                sup.addReader(self, data[COLUMN_TRANSPORT])
+            if data[COLUMN_WRITING]:
+                sup.addWriter(self, data[COLUMN_TRANSPORT])
+            data[COLUMN_DESCRIPTION] = str(data[COLUMN_TRANSPORT])
+            self.toggle_suspend(1)
+        else:
+            data[0] += ' (suspended)'
+            self.toggle_suspend(0)
+
+    def toggle_suspend(self, suspending=0):
+        stock, nonstock = [('gtk-redo', 'Resume'),
+                           ('gtk-undo', 'Suspend')][suspending]
+        b = self.xml.get_widget("suspend")
+        b.set_use_stock(1)
+        b.set_label(stock)
+        b.get_child().get_child().get_children()[1].set_label(nonstock)
+
+    def servers_selection_changed(self, w):
+        store, iter = w.get_selected()
+        if iter is None:
+            self.xml.get_widget("suspend").set_sensitive(0)
+            self.xml.get_widget('disconnect').set_sensitive(0)
+        else:
+            data = store[iter]
+            self.toggle_suspend(not 
+                data[COLUMN_DESCRIPTION].endswith('(suspended)'))
+            self.xml.get_widget("suspend").set_sensitive(1)
+            self.xml.get_widget('disconnect').set_sensitive(1)
+
+    def on_quit_clicked(self, w):
+        self.stop()
+
+    def __init__(self):
+        self.xml = gtk.glade.XML(util.sibpath(__file__,"gladereactor.glade"))
+        d = {}
+        for m in reflect.prefixedMethods(self, "on_"):
+            d[m.im_func.__name__] = m
+        self.xml.signal_autoconnect(d)
+        self.xml.get_widget('window1').connect('destroy',
+                                               lambda w: self.stop())
+        self.servers = self.xml.get_widget("servertree")
+        sel = self.servers.get_selection()
+        sel.set_mode(gtk.SELECTION_SINGLE)
+        sel.connect("changed",
+                    self.servers_selection_changed)
+        ## argh coredump: self.servers_selection_changed(sel)
+        self.xml.get_widget('suspend').set_sensitive(0)
+        self.xml.get_widget('disconnect').set_sensitive(0)
+        # setup model, connect it to my treeview
+        self.model = gtk.ListStore(str, object, gobject.TYPE_BOOLEAN,
+                                   gobject.TYPE_BOOLEAN)
+        self.servers.set_model(self.model)
+        self.servers.set_reorderable(1)
+        self.servers.set_headers_clickable(1)
+        # self.servers.set_headers_draggable(1)
+        # add a column
+        for col in [
+            gtk.TreeViewColumn('Server',
+                               gtk.CellRendererText(),
+                               text=0),
+            gtk.TreeViewColumn('Reading',
+                               gtk.CellRendererToggle(),
+                               active=2),
+            gtk.TreeViewColumn('Writing',
+                               gtk.CellRendererToggle(),
+                               active=3)]:
+            
+            self.servers.append_column(col)
+            col.set_resizable(1)
+        sup.__init__(self)
+
+    def addReader(self, reader):
+        sup.addReader(self, reader)
+##      gtk docs suggest this - but it's stupid
+##         self.model.set(self.model.append(),
+##                        0, str(reader),
+##                        1, reader)
+        self._maybeAddServer(reader, read=1)
+
+    def _goAway(self,reader):
+        for p in range(len(self.model)):
+            if self.model[p][1] == reader:
+                self.model.remove(self.model.get_iter_from_string(str(p)))
+                return
+
+
+    def _maybeAddServer(self, reader, read=0, write=0):
+        p = 0
+        for x in self.model:
+            if x[1] == reader:
+                if reader == 0:
+                    reader += 1
+                x[2] += read
+                x[3] += write
+                x[2] = max(x[2],0)
+                x[3] = max(x[3],0)
+                
+                if not (x[2] or x[3]):
+                    x[0] = x[0] + '(disconnected)'
+                    self.callLater(5, self._goAway, reader)
+                return
+            p += 1
+        else:
+            read = max(read,0)
+            write = max(write, 0)
+            if read or write:
+                self.model.append((reader,reader,read,write))
+
+    def addWriter(self, writer):
+        sup.addWriter(self, writer)
+        self._maybeAddServer(writer, write=1)
+
+    def removeReader(self, reader):
+        sup.removeReader(self, reader)
+        self._maybeAddServer(reader, read=-1)
+
+    def removeWriter(self, writer):
+        sup.removeWriter(self, writer)
+        self._maybeAddServer(writer, write=-1)
+
+    def crash(self):
+        gtk.main_quit()
+
+    def run(self, installSignalHandlers=1):
+        self.startRunning(installSignalHandlers=installSignalHandlers)
+        self.simulate()
+        gtk.main()
+
+
+def install():
+    """Configure the twisted mainloop to be run inside the gtk mainloop.
+    """
+    reactor = GladeReactor()
+    from twisted.internet.main import installReactor
+    installReactor(reactor)
+    return reactor
diff --git a/ThirdParty/Twisted/twisted/manhole/inspectro.glade b/ThirdParty/Twisted/twisted/manhole/inspectro.glade
new file mode 100644
index 0000000..94b8717
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/manhole/inspectro.glade
@@ -0,0 +1,510 @@
+<?xml version="1.0" standalone="no"?> <!--*- mode: xml -*-->
+<!DOCTYPE glade-interface SYSTEM "http://glade.gnome.org/glade-2.0.dtd">
+
+<glade-interface>
+<requires lib="gnome"/>
+<requires lib="bonobo"/>
+
+<widget class="GnomeApp" id="app1">
+  <property name="visible">True</property>
+  <property name="title" translatable="yes">Inspectro</property>
+  <property name="type">GTK_WINDOW_TOPLEVEL</property>
+  <property name="window_position">GTK_WIN_POS_NONE</property>
+  <property name="modal">False</property>
+  <property name="default_width">640</property>
+  <property name="default_height">480</property>
+  <property name="resizable">True</property>
+  <property name="destroy_with_parent">False</property>
+  <property name="enable_layout_config">True</property>
+
+  <child internal-child="dock">
+    <widget class="BonoboDock" id="bonobodock1">
+      <property name="visible">True</property>
+      <property name="allow_floating">True</property>
+
+      <child>
+	<widget class="BonoboDockItem" id="bonobodockitem1">
+	  <property name="visible">True</property>
+	  <property name="shadow_type">GTK_SHADOW_NONE</property>
+
+	  <child>
+	    <widget class="GtkMenuBar" id="menubar1">
+	      <property name="visible">True</property>
+
+	      <child>
+		<widget class="GtkMenuItem" id="inspector1">
+		  <property name="visible">True</property>
+		  <property name="label" translatable="yes">Inspector</property>
+		  <property name="use_underline">True</property>
+
+		  <child>
+		    <widget class="GtkMenu" id="inspector1_menu">
+
+		      <child>
+			<widget class="GtkMenuItem" id="select1">
+			  <property name="visible">True</property>
+			  <property name="label" translatable="yes">Select</property>
+			  <property name="use_underline">True</property>
+			  <signal name="activate" handler="on_select" last_modification_time="Wed, 17 Dec 2003 05:05:34 GMT"/>
+			</widget>
+		      </child>
+
+		      <child>
+			<widget class="GtkMenuItem" id="inspect1">
+			  <property name="visible">True</property>
+			  <property name="label" translatable="yes">Inspect</property>
+			  <property name="use_underline">True</property>
+			  <signal name="activate" handler="on_inspect" last_modification_time="Wed, 17 Dec 2003 05:05:34 GMT"/>
+			</widget>
+		      </child>
+
+		      <child>
+			<widget class="GtkMenuItem" id="inspect_new1">
+			  <property name="visible">True</property>
+			  <property name="label" translatable="yes">Inspect New</property>
+			  <property name="use_underline">True</property>
+			  <signal name="activate" handler="on_inspect_new" last_modification_time="Wed, 17 Dec 2003 05:05:34 GMT"/>
+			</widget>
+		      </child>
+		    </widget>
+		  </child>
+		</widget>
+	      </child>
+
+	      <child>
+		<widget class="GtkMenuItem" id="help1">
+		  <property name="visible">True</property>
+		  <property name="stock_item">GNOMEUIINFO_MENU_HELP_TREE</property>
+
+		  <child>
+		    <widget class="GtkMenu" id="help1_menu">
+
+		      <child>
+			<widget class="GtkImageMenuItem" id="about1">
+			  <property name="visible">True</property>
+			  <property name="stock_item">GNOMEUIINFO_MENU_ABOUT_ITEM</property>
+			  <signal name="activate" handler="on_about1_activate" last_modification_time="Wed, 17 Dec 2003 04:48:59 GMT"/>
+			</widget>
+		      </child>
+		    </widget>
+		  </child>
+		</widget>
+	      </child>
+	    </widget>
+	  </child>
+	</widget>
+	<packing>
+	  <property name="placement">BONOBO_DOCK_TOP</property>
+	  <property name="band">0</property>
+	  <property name="position">0</property>
+	  <property name="offset">0</property>
+	  <property name="behavior">BONOBO_DOCK_ITEM_BEH_EXCLUSIVE|BONOBO_DOCK_ITEM_BEH_NEVER_VERTICAL|BONOBO_DOCK_ITEM_BEH_LOCKED</property>
+	</packing>
+      </child>
+
+      <child>
+	<widget class="BonoboDockItem" id="bonobodockitem2">
+	  <property name="visible">True</property>
+	  <property name="shadow_type">GTK_SHADOW_OUT</property>
+
+	  <child>
+	    <widget class="GtkToolbar" id="toolbar2">
+	      <property name="visible">True</property>
+	      <property name="orientation">GTK_ORIENTATION_HORIZONTAL</property>
+	      <property name="toolbar_style">GTK_TOOLBAR_BOTH</property>
+	      <property name="tooltips">True</property>
+
+	      <child>
+		<widget class="button" id="button13">
+		  <property name="visible">True</property>
+		  <property name="label" translatable="yes">Select</property>
+		  <property name="use_underline">True</property>
+		  <property name="stock_pixmap">gtk-convert</property>
+		  <signal name="clicked" handler="on_select" last_modification_time="Wed, 17 Dec 2003 05:05:14 GMT"/>
+		</widget>
+	      </child>
+
+	      <child>
+		<widget class="button" id="button14">
+		  <property name="visible">True</property>
+		  <property name="label" translatable="yes">Inspect</property>
+		  <property name="use_underline">True</property>
+		  <property name="stock_pixmap">gtk-jump-to</property>
+		  <signal name="clicked" handler="on_inspect" last_modification_time="Wed, 17 Dec 2003 05:05:02 GMT"/>
+		</widget>
+	      </child>
+
+	      <child>
+		<widget class="button" id="button15">
+		  <property name="visible">True</property>
+		  <property name="label" translatable="yes">Inspect New</property>
+		  <property name="use_underline">True</property>
+		  <property name="stock_pixmap">gtk-redo</property>
+		  <signal name="clicked" handler="on_inspect_new" last_modification_time="Wed, 17 Dec 2003 05:04:50 GMT"/>
+		</widget>
+	      </child>
+	    </widget>
+	  </child>
+	</widget>
+	<packing>
+	  <property name="placement">BONOBO_DOCK_TOP</property>
+	  <property name="band">1</property>
+	  <property name="position">0</property>
+	  <property name="offset">0</property>
+	  <property name="behavior">BONOBO_DOCK_ITEM_BEH_EXCLUSIVE</property>
+	</packing>
+      </child>
+
+      <child>
+	<widget class="GtkHPaned" id="hpaned1">
+	  <property name="width_request">350</property>
+	  <property name="visible">True</property>
+	  <property name="can_focus">True</property>
+	  <property name="position">250</property>
+
+	  <child>
+	    <widget class="GtkVBox" id="vbox1">
+	      <property name="visible">True</property>
+	      <property name="homogeneous">False</property>
+	      <property name="spacing">0</property>
+
+	      <child>
+		<widget class="GtkScrolledWindow" id="scrolledwindow4">
+		  <property name="visible">True</property>
+		  <property name="can_focus">True</property>
+		  <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
+		  <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
+		  <property name="shadow_type">GTK_SHADOW_NONE</property>
+		  <property name="window_placement">GTK_CORNER_TOP_LEFT</property>
+
+		  <child>
+		    <widget class="GtkTreeView" id="treeview">
+		      <property name="visible">True</property>
+		      <property name="can_focus">True</property>
+		      <property name="headers_visible">True</property>
+		      <property name="rules_hint">False</property>
+		      <property name="reorderable">False</property>
+		      <property name="enable_search">True</property>
+		      <signal name="row_activated" handler="on_row_activated" last_modification_time="Wed, 17 Dec 2003 05:07:55 GMT"/>
+		    </widget>
+		  </child>
+		</widget>
+		<packing>
+		  <property name="padding">0</property>
+		  <property name="expand">True</property>
+		  <property name="fill">True</property>
+		</packing>
+	      </child>
+
+	      <child>
+		<widget class="GtkTable" id="table1">
+		  <property name="visible">True</property>
+		  <property name="n_rows">2</property>
+		  <property name="n_columns">2</property>
+		  <property name="homogeneous">False</property>
+		  <property name="row_spacing">0</property>
+		  <property name="column_spacing">0</property>
+
+		  <child>
+		    <widget class="GtkLabel" id="itname">
+		      <property name="visible">True</property>
+		      <property name="label" translatable="yes">None</property>
+		      <property name="use_underline">False</property>
+		      <property name="use_markup">False</property>
+		      <property name="justify">GTK_JUSTIFY_LEFT</property>
+		      <property name="wrap">False</property>
+		      <property name="selectable">False</property>
+		      <property name="xalign">0</property>
+		      <property name="yalign">0.5</property>
+		      <property name="xpad">0</property>
+		      <property name="ypad">0</property>
+		    </widget>
+		    <packing>
+		      <property name="left_attach">1</property>
+		      <property name="right_attach">2</property>
+		      <property name="top_attach">0</property>
+		      <property name="bottom_attach">1</property>
+		      <property name="y_options"></property>
+		    </packing>
+		  </child>
+
+		  <child>
+		    <widget class="GtkLabel" id="itpath">
+		      <property name="visible">True</property>
+		      <property name="label" translatable="yes">[]</property>
+		      <property name="use_underline">False</property>
+		      <property name="use_markup">False</property>
+		      <property name="justify">GTK_JUSTIFY_LEFT</property>
+		      <property name="wrap">False</property>
+		      <property name="selectable">False</property>
+		      <property name="xalign">0</property>
+		      <property name="yalign">0.5</property>
+		      <property name="xpad">0</property>
+		      <property name="ypad">0</property>
+		    </widget>
+		    <packing>
+		      <property name="left_attach">1</property>
+		      <property name="right_attach">2</property>
+		      <property name="top_attach">1</property>
+		      <property name="bottom_attach">2</property>
+		      <property name="y_options"></property>
+		    </packing>
+		  </child>
+
+		  <child>
+		    <widget class="GtkLabel" id="label1">
+		      <property name="visible">True</property>
+		      <property name="label" translatable="yes">It: </property>
+		      <property name="use_underline">False</property>
+		      <property name="use_markup">False</property>
+		      <property name="justify">GTK_JUSTIFY_RIGHT</property>
+		      <property name="wrap">False</property>
+		      <property name="selectable">False</property>
+		      <property name="xalign">0</property>
+		      <property name="yalign">0.5</property>
+		      <property name="xpad">0</property>
+		      <property name="ypad">0</property>
+		    </widget>
+		    <packing>
+		      <property name="left_attach">0</property>
+		      <property name="right_attach">1</property>
+		      <property name="top_attach">0</property>
+		      <property name="bottom_attach">1</property>
+		      <property name="x_padding">3</property>
+		      <property name="x_options">fill</property>
+		      <property name="y_options"></property>
+		    </packing>
+		  </child>
+
+		  <child>
+		    <widget class="GtkLabel" id="label2">
+		      <property name="visible">True</property>
+		      <property name="label" translatable="yes">Path: </property>
+		      <property name="use_underline">False</property>
+		      <property name="use_markup">False</property>
+		      <property name="justify">GTK_JUSTIFY_RIGHT</property>
+		      <property name="wrap">False</property>
+		      <property name="selectable">False</property>
+		      <property name="xalign">0</property>
+		      <property name="yalign">0.5</property>
+		      <property name="xpad">0</property>
+		      <property name="ypad">0</property>
+		    </widget>
+		    <packing>
+		      <property name="left_attach">0</property>
+		      <property name="right_attach">1</property>
+		      <property name="top_attach">1</property>
+		      <property name="bottom_attach">2</property>
+		      <property name="x_padding">3</property>
+		      <property name="x_options">fill</property>
+		      <property name="y_options"></property>
+		    </packing>
+		  </child>
+		</widget>
+		<packing>
+		  <property name="padding">3</property>
+		  <property name="expand">False</property>
+		  <property name="fill">True</property>
+		</packing>
+	      </child>
+	    </widget>
+	    <packing>
+	      <property name="shrink">True</property>
+	      <property name="resize">False</property>
+	    </packing>
+	  </child>
+
+	  <child>
+	    <widget class="GtkVPaned" id="vpaned1">
+	      <property name="visible">True</property>
+	      <property name="can_focus">True</property>
+	      <property name="position">303</property>
+
+	      <child>
+		<widget class="GtkScrolledWindow" id="scrolledwindow3">
+		  <property name="visible">True</property>
+		  <property name="can_focus">True</property>
+		  <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
+		  <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
+		  <property name="shadow_type">GTK_SHADOW_NONE</property>
+		  <property name="window_placement">GTK_CORNER_TOP_LEFT</property>
+
+		  <child>
+		    <widget class="GtkTextView" id="output">
+		      <property name="visible">True</property>
+		      <property name="can_focus">True</property>
+		      <property name="editable">False</property>
+		      <property name="justification">GTK_JUSTIFY_LEFT</property>
+		      <property name="wrap_mode">GTK_WRAP_NONE</property>
+		      <property name="cursor_visible">True</property>
+		      <property name="pixels_above_lines">0</property>
+		      <property name="pixels_below_lines">0</property>
+		      <property name="pixels_inside_wrap">0</property>
+		      <property name="left_margin">0</property>
+		      <property name="right_margin">0</property>
+		      <property name="indent">0</property>
+		      <property name="text" translatable="yes"></property>
+		    </widget>
+		  </child>
+		</widget>
+		<packing>
+		  <property name="shrink">False</property>
+		  <property name="resize">True</property>
+		</packing>
+	      </child>
+
+	      <child>
+		<widget class="GtkScrolledWindow" id="scrolledwindow2">
+		  <property name="visible">True</property>
+		  <property name="can_focus">True</property>
+		  <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
+		  <property name="vscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
+		  <property name="shadow_type">GTK_SHADOW_NONE</property>
+		  <property name="window_placement">GTK_CORNER_TOP_LEFT</property>
+
+		  <child>
+		    <widget class="GtkViewport" id="viewport1">
+		      <property name="visible">True</property>
+		      <property name="shadow_type">GTK_SHADOW_IN</property>
+
+		      <child>
+			<widget class="GtkHBox" id="hbox1">
+			  <property name="visible">True</property>
+			  <property name="homogeneous">False</property>
+			  <property name="spacing">0</property>
+
+			  <child>
+			    <widget class="GtkButton" id="button16">
+			      <property name="visible">True</property>
+			      <property name="can_focus">True</property>
+			      <property name="relief">GTK_RELIEF_NORMAL</property>
+			      <signal name="clicked" handler="on_execute" last_modification_time="Wed, 17 Dec 2003 05:06:44 GMT"/>
+
+			      <child>
+				<widget class="GtkAlignment" id="alignment2">
+				  <property name="visible">True</property>
+				  <property name="xalign">0.5</property>
+				  <property name="yalign">0.5</property>
+				  <property name="xscale">0</property>
+				  <property name="yscale">0</property>
+
+				  <child>
+				    <widget class="GtkHBox" id="hbox3">
+				      <property name="visible">True</property>
+				      <property name="homogeneous">False</property>
+				      <property name="spacing">2</property>
+
+				      <child>
+					<widget class="GtkImage" id="image2">
+					  <property name="visible">True</property>
+					  <property name="stock">gtk-execute</property>
+					  <property name="icon_size">4</property>
+					  <property name="xalign">0.5</property>
+					  <property name="yalign">0.5</property>
+					  <property name="xpad">0</property>
+					  <property name="ypad">0</property>
+					</widget>
+					<packing>
+					  <property name="padding">0</property>
+					  <property name="expand">False</property>
+					  <property name="fill">False</property>
+					</packing>
+				      </child>
+
+				      <child>
+					<widget class="GtkLabel" id="label6">
+					  <property name="visible">True</property>
+					  <property name="label" translatable="yes">>>></property>
+					  <property name="use_underline">True</property>
+					  <property name="use_markup">False</property>
+					  <property name="justify">GTK_JUSTIFY_LEFT</property>
+					  <property name="wrap">False</property>
+					  <property name="selectable">False</property>
+					  <property name="xalign">0.5</property>
+					  <property name="yalign">0.5</property>
+					  <property name="xpad">0</property>
+					  <property name="ypad">0</property>
+					</widget>
+					<packing>
+					  <property name="padding">0</property>
+					  <property name="expand">False</property>
+					  <property name="fill">False</property>
+					</packing>
+				      </child>
+				    </widget>
+				  </child>
+				</widget>
+			      </child>
+			    </widget>
+			    <packing>
+			      <property name="padding">0</property>
+			      <property name="expand">False</property>
+			      <property name="fill">False</property>
+			    </packing>
+			  </child>
+
+			  <child>
+			    <widget class="GtkTextView" id="input">
+			      <property name="height_request">25</property>
+			      <property name="visible">True</property>
+			      <property name="can_focus">True</property>
+			      <property name="has_focus">True</property>
+			      <property name="editable">True</property>
+			      <property name="justification">GTK_JUSTIFY_LEFT</property>
+			      <property name="wrap_mode">GTK_WRAP_NONE</property>
+			      <property name="cursor_visible">True</property>
+			      <property name="pixels_above_lines">0</property>
+			      <property name="pixels_below_lines">0</property>
+			      <property name="pixels_inside_wrap">0</property>
+			      <property name="left_margin">0</property>
+			      <property name="right_margin">0</property>
+			      <property name="indent">0</property>
+			      <property name="text" translatable="yes"></property>
+			    </widget>
+			    <packing>
+			      <property name="padding">0</property>
+			      <property name="expand">True</property>
+			      <property name="fill">True</property>
+			    </packing>
+			  </child>
+			</widget>
+		      </child>
+		    </widget>
+		  </child>
+		</widget>
+		<packing>
+		  <property name="shrink">False</property>
+		  <property name="resize">True</property>
+		</packing>
+	      </child>
+	    </widget>
+	    <packing>
+	      <property name="shrink">True</property>
+	      <property name="resize">True</property>
+	    </packing>
+	  </child>
+	</widget>
+      </child>
+    </widget>
+    <packing>
+      <property name="padding">0</property>
+      <property name="expand">True</property>
+      <property name="fill">True</property>
+    </packing>
+  </child>
+
+  <child internal-child="appbar">
+    <widget class="GnomeAppBar" id="appbar1">
+      <property name="visible">True</property>
+      <property name="has_progress">False</property>
+      <property name="has_status">True</property>
+    </widget>
+    <packing>
+      <property name="padding">0</property>
+      <property name="expand">True</property>
+      <property name="fill">True</property>
+    </packing>
+  </child>
+</widget>
+
+</glade-interface>
diff --git a/ThirdParty/Twisted/twisted/manhole/logview.glade b/ThirdParty/Twisted/twisted/manhole/logview.glade
new file mode 100644
index 0000000..1ec0b1f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/manhole/logview.glade
@@ -0,0 +1,39 @@
+<?xml version="1.0" standalone="no"?> <!--*- mode: xml -*-->
+<!DOCTYPE glade-interface SYSTEM "http://glade.gnome.org/glade-2.0.dtd">
+
+<glade-interface>
+
+<widget class="GtkWindow" id="logview">
+  <property name="visible">True</property>
+  <property name="title" translatable="yes">Log</property>
+  <property name="type">GTK_WINDOW_TOPLEVEL</property>
+  <property name="window_position">GTK_WIN_POS_NONE</property>
+  <property name="modal">False</property>
+  <property name="resizable">True</property>
+  <property name="destroy_with_parent">False</property>
+  <signal name="destroy" handler="on_logview_destroy" last_modification_time="Sun, 04 Jan 2004 22:16:59 GMT"/>
+
+  <child>
+    <widget class="GtkScrolledWindow" id="scrolledwindow">
+      <property name="visible">True</property>
+      <property name="can_focus">True</property>
+      <property name="hscrollbar_policy">GTK_POLICY_ALWAYS</property>
+      <property name="vscrollbar_policy">GTK_POLICY_ALWAYS</property>
+      <property name="shadow_type">GTK_SHADOW_NONE</property>
+      <property name="window_placement">GTK_CORNER_TOP_LEFT</property>
+
+      <child>
+	<widget class="GtkTreeView" id="loglist">
+	  <property name="visible">True</property>
+	  <property name="can_focus">True</property>
+	  <property name="headers_visible">True</property>
+	  <property name="rules_hint">False</property>
+	  <property name="reorderable">False</property>
+	  <property name="enable_search">True</property>
+	</widget>
+      </child>
+    </widget>
+  </child>
+</widget>
+
+</glade-interface>
diff --git a/ThirdParty/Twisted/twisted/manhole/service.py b/ThirdParty/Twisted/twisted/manhole/service.py
new file mode 100644
index 0000000..c9d4679
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/manhole/service.py
@@ -0,0 +1,399 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""L{twisted.manhole} L{PB<twisted.spread.pb>} service implementation.
+"""
+
+# twisted imports
+from twisted import copyright
+from twisted.spread import pb
+from twisted.python import log, failure
+from twisted.cred import portal
+from twisted.application import service
+from zope.interface import implements, Interface
+
+# sibling imports
+import explorer
+
+# system imports
+from cStringIO import StringIO
+
+import string
+import sys
+import traceback
+import types
+
+
+class FakeStdIO:
+    def __init__(self, type_, list):
+        self.type = type_
+        self.list = list
+
+    def write(self, text):
+        log.msg("%s: %s" % (self.type, string.strip(str(text))))
+        self.list.append((self.type, text))
+
+    def flush(self):
+        pass
+
+    def consolidate(self):
+        """Concatenate adjacent messages of same type into one.
+
+        Greatly cuts down on the number of elements, increasing
+        network transport friendliness considerably.
+        """
+        if not self.list:
+            return
+
+        inlist = self.list
+        outlist = []
+        last_type = inlist[0]
+        block_begin = 0
+        for i in xrange(1, len(self.list)):
+            (mtype, message) = inlist[i]
+            if mtype == last_type:
+                continue
+            else:
+                if (i - block_begin) == 1:
+                    outlist.append(inlist[block_begin])
+                else:
+                    messages = map(lambda l: l[1],
+                                   inlist[block_begin:i])
+                    message = string.join(messages, '')
+                    outlist.append((last_type, message))
+                last_type = mtype
+                block_begin = i
+
+
+class IManholeClient(Interface):
+    def console(list_of_messages):
+        """Takes a list of (type, message) pairs to display.
+
+        Types include:
+            - \"stdout\" -- string sent to sys.stdout
+
+            - \"stderr\" -- string sent to sys.stderr
+
+            - \"result\" -- string repr of the resulting value
+                 of the expression
+
+            - \"exception\" -- a L{failure.Failure}
+        """
+
+    def receiveExplorer(xplorer):
+        """Receives an explorer.Explorer
+        """
+
+    def listCapabilities():
+        """List what manholey things I am capable of doing.
+
+        i.e. C{\"Explorer\"}, C{\"Failure\"}
+        """
+
+def runInConsole(command, console, globalNS=None, localNS=None,
+                 filename=None, args=None, kw=None, unsafeTracebacks=False):
+    """Run this, directing all output to the specified console.
+
+    If command is callable, it will be called with the args and keywords
+    provided.  Otherwise, command will be compiled and eval'd.
+    (Wouldn't you like a macro?)
+
+    Returns the command's return value.
+
+    The console is called with a list of (type, message) pairs for
+    display, see L{IManholeClient.console}.
+    """
+    output = []
+    fakeout = FakeStdIO("stdout", output)
+    fakeerr = FakeStdIO("stderr", output)
+    errfile = FakeStdIO("exception", output)
+    code = None
+    val = None
+    if filename is None:
+        filename = str(console)
+    if args is None:
+        args = ()
+    if kw is None:
+        kw = {}
+    if localNS is None:
+        localNS = globalNS
+    if (globalNS is None) and (not callable(command)):
+        raise ValueError("Need a namespace to evaluate the command in.")
+
+    try:
+        out = sys.stdout
+        err = sys.stderr
+        sys.stdout = fakeout
+        sys.stderr = fakeerr
+        try:
+            if callable(command):
+                val = apply(command, args, kw)
+            else:
+                try:
+                    code = compile(command, filename, 'eval')
+                except:
+                    code = compile(command, filename, 'single')
+
+                if code:
+                    val = eval(code, globalNS, localNS)
+        finally:
+            sys.stdout = out
+            sys.stderr = err
+    except:
+        (eType, eVal, tb) = sys.exc_info()
+        fail = failure.Failure(eVal, eType, tb)
+        del tb
+        # In CVS reversion 1.35, there was some code here to fill in the
+        # source lines in the traceback for frames in the local command
+        # buffer.  But I can't figure out when that's triggered, so it's
+        # going away in the conversion to Failure, until you bring it back.
+        errfile.write(pb.failure2Copyable(fail, unsafeTracebacks))
+
+    if console:
+        fakeout.consolidate()
+        console(output)
+
+    return val
+
+def _failureOldStyle(fail):
+    """Pre-Failure manhole representation of exceptions.
+
+    For compatibility with manhole clients without the \"Failure\"
+    capability.
+
+    A dictionary with two members:
+        - \'traceback\' -- traceback.extract_tb output; a list of tuples
+             (filename, line number, function name, text) suitable for
+             feeding to traceback.format_list.
+
+        - \'exception\' -- a list of one or more strings, each
+             ending in a newline. (traceback.format_exception_only output)
+    """
+    import linecache
+    tb = []
+    for f in fail.frames:
+        # (filename, line number, function name, text)
+        tb.append((f[1], f[2], f[0], linecache.getline(f[1], f[2])))
+
+    return {
+        'traceback': tb,
+        'exception': traceback.format_exception_only(fail.type, fail.value)
+        }
+
+# Capabilities clients are likely to have before they knew how to answer a
+# "listCapabilities" query.
+_defaultCapabilities = {
+    "Explorer": 'Set'
+    }
+
+class Perspective(pb.Avatar):
+    lastDeferred = 0
+    def __init__(self, service):
+        self.localNamespace = {
+            "service": service,
+            "avatar": self,
+            "_": None,
+            }
+        self.clients = {}
+        self.service = service
+
+    def __getstate__(self):
+        state = self.__dict__.copy()
+        state['clients'] = {}
+        if state['localNamespace'].has_key("__builtins__"):
+            del state['localNamespace']['__builtins__']
+        return state
+
+    def attached(self, client, identity):
+        """A client has attached -- welcome them and add them to the list.
+        """
+        self.clients[client] = identity
+
+        host = ':'.join(map(str, client.broker.transport.getHost()[1:]))
+
+        msg = self.service.welcomeMessage % {
+            'you': getattr(identity, 'name', str(identity)),
+            'host': host,
+            'longversion': copyright.longversion,
+            }
+
+        client.callRemote('console', [("stdout", msg)])
+
+        client.capabilities = _defaultCapabilities
+        client.callRemote('listCapabilities').addCallbacks(
+            self._cbClientCapable, self._ebClientCapable,
+            callbackArgs=(client,),errbackArgs=(client,))
+
+    def detached(self, client, identity):
+        try:
+            del self.clients[client]
+        except KeyError:
+            pass
+
+    def runInConsole(self, command, *args, **kw):
+        """Convience method to \"runInConsole with my stuff\".
+        """
+        return runInConsole(command,
+                            self.console,
+                            self.service.namespace,
+                            self.localNamespace,
+                            str(self.service),
+                            args=args,
+                            kw=kw,
+                            unsafeTracebacks=self.service.unsafeTracebacks)
+
+
+    ### Methods for communicating to my clients.
+
+    def console(self, message):
+        """Pass a message to my clients' console.
+        """
+        clients = self.clients.keys()
+        origMessage = message
+        compatMessage = None
+        for client in clients:
+            try:
+                if "Failure" not in client.capabilities:
+                    if compatMessage is None:
+                        compatMessage = origMessage[:]
+                        for i in xrange(len(message)):
+                            if ((message[i][0] == "exception") and
+                                isinstance(message[i][1], failure.Failure)):
+                                compatMessage[i] = (
+                                    message[i][0],
+                                    _failureOldStyle(message[i][1]))
+                    client.callRemote('console', compatMessage)
+                else:
+                    client.callRemote('console', message)
+            except pb.ProtocolError:
+                # Stale broker.
+                self.detached(client, None)
+
+    def receiveExplorer(self, objectLink):
+        """Pass an Explorer on to my clients.
+        """
+        clients = self.clients.keys()
+        for client in clients:
+            try:
+                client.callRemote('receiveExplorer', objectLink)
+            except pb.ProtocolError:
+                # Stale broker.
+                self.detached(client, None)
+
+
+    def _cbResult(self, val, dnum):
+        self.console([('result', "Deferred #%s Result: %r\n" %(dnum, val))])
+        return val
+
+    def _cbClientCapable(self, capabilities, client):
+        log.msg("client %x has %s" % (id(client), capabilities))
+        client.capabilities = capabilities
+
+    def _ebClientCapable(self, reason, client):
+        reason.trap(AttributeError)
+        log.msg("Couldn't get capabilities from %s, assuming defaults." %
+                (client,))
+
+    ### perspective_ methods, commands used by the client.
+
+    def perspective_do(self, expr):
+        """Evaluate the given expression, with output to the console.
+
+        The result is stored in the local variable '_', and its repr()
+        string is sent to the console as a \"result\" message.
+        """
+        log.msg(">>> %s" % expr)
+        val = self.runInConsole(expr)
+        if val is not None:
+            self.localNamespace["_"] = val
+            from twisted.internet.defer import Deferred
+            # TODO: client support for Deferred.
+            if isinstance(val, Deferred):
+                self.lastDeferred += 1
+                self.console([('result', "Waiting for Deferred #%s...\n" % self.lastDeferred)])
+                val.addBoth(self._cbResult, self.lastDeferred)
+            else:
+                self.console([("result", repr(val) + '\n')])
+        log.msg("<<<")
+
+    def perspective_explore(self, identifier):
+        """Browse the object obtained by evaluating the identifier.
+
+        The resulting ObjectLink is passed back through the client's
+        receiveBrowserObject method.
+        """
+        object = self.runInConsole(identifier)
+        if object:
+            expl = explorer.explorerPool.getExplorer(object, identifier)
+            self.receiveExplorer(expl)
+
+    def perspective_watch(self, identifier):
+        """Watch the object obtained by evaluating the identifier.
+
+        Whenever I think this object might have changed, I will pass
+        an ObjectLink of it back to the client's receiveBrowserObject
+        method.
+        """
+        raise NotImplementedError
+        object = self.runInConsole(identifier)
+        if object:
+            # Return an ObjectLink of this right away, before the watch.
+            oLink = self.runInConsole(self.browser.browseObject,
+                                      object, identifier)
+            self.receiveExplorer(oLink)
+
+            self.runInConsole(self.browser.watchObject,
+                              object, identifier,
+                              self.receiveExplorer)
+
+
+class Realm:
+
+    implements(portal.IRealm)
+
+    def __init__(self, service):
+        self.service = service
+        self._cache = {}
+
+    def requestAvatar(self, avatarId, mind, *interfaces):
+        if pb.IPerspective not in interfaces:
+            raise NotImplementedError("no interface")
+        if avatarId in self._cache:
+            p = self._cache[avatarId]
+        else:
+            p = Perspective(self.service)
+        p.attached(mind, avatarId)
+        def detached():
+            p.detached(mind, avatarId)
+        return (pb.IPerspective, p, detached)
+
+
+class Service(service.Service):
+
+    welcomeMessage = (
+        "\nHello %(you)s, welcome to Manhole "
+        "on %(host)s.\n"
+        "%(longversion)s.\n\n")
+
+    def __init__(self, unsafeTracebacks=False, namespace=None):
+        self.unsafeTracebacks = unsafeTracebacks
+        self.namespace = {
+            '__name__': '__manhole%x__' % (id(self),),
+            'sys': sys
+            }
+        if namespace:
+            self.namespace.update(namespace)
+
+    def __getstate__(self):
+        """This returns the persistent state of this shell factory.
+        """
+        # TODO -- refactor this and twisted.reality.author.Author to
+        # use common functionality (perhaps the 'code' module?)
+        dict = self.__dict__.copy()
+        ns = dict['namespace'].copy()
+        dict['namespace'] = ns
+        if ns.has_key('__builtins__'):
+            del ns['__builtins__']
+        return dict
diff --git a/ThirdParty/Twisted/twisted/manhole/telnet.py b/ThirdParty/Twisted/twisted/manhole/telnet.py
new file mode 100644
index 0000000..d63b3a6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/manhole/telnet.py
@@ -0,0 +1,117 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""Telnet-based shell."""
+
+# twisted imports
+from twisted.protocols import telnet
+from twisted.internet import protocol
+from twisted.python import log, failure
+
+# system imports
+import string, copy, sys
+from cStringIO import StringIO
+
+
+class Shell(telnet.Telnet):
+    """A Python command-line shell."""
+    
+    def connectionMade(self):
+        telnet.Telnet.connectionMade(self)
+        self.lineBuffer = []
+    
+    def loggedIn(self):
+        self.transport.write(">>> ")
+    
+    def checkUserAndPass(self, username, password):
+        return ((self.factory.username == username) and (password == self.factory.password))
+
+    def write(self, data):
+        """Write some data to the transport.
+        """
+        self.transport.write(data)
+
+    def telnet_Command(self, cmd):
+        if self.lineBuffer:
+            if not cmd:
+                cmd = string.join(self.lineBuffer, '\n') + '\n\n\n'
+                self.doCommand(cmd)
+                self.lineBuffer = []
+                return "Command"
+            else:
+                self.lineBuffer.append(cmd)
+                self.transport.write("... ")
+                return "Command"
+        else:
+            self.doCommand(cmd)
+            return "Command"
+    
+    def doCommand(self, cmd):
+
+        # TODO -- refactor this, Reality.author.Author, and the manhole shell
+        #to use common functionality (perhaps a twisted.python.code module?)
+        fn = '$telnet$'
+        result = None
+        try:
+            out = sys.stdout
+            sys.stdout = self
+            try:
+                code = compile(cmd,fn,'eval')
+                result = eval(code, self.factory.namespace)
+            except:
+                try:
+                    code = compile(cmd, fn, 'exec')
+                    exec code in self.factory.namespace
+                except SyntaxError, e:
+                    if not self.lineBuffer and str(e)[:14] == "unexpected EOF":
+                        self.lineBuffer.append(cmd)
+                        self.transport.write("... ")
+                        return
+                    else:
+                        failure.Failure().printTraceback(file=self)
+                        log.deferr()
+                        self.write('\r\n>>> ')
+                        return
+                except:
+                    io = StringIO()
+                    failure.Failure().printTraceback(file=self)
+                    log.deferr()
+                    self.write('\r\n>>> ')
+                    return
+        finally:
+            sys.stdout = out
+        
+        self.factory.namespace['_'] = result
+        if result is not None:
+            self.transport.write(repr(result))
+            self.transport.write('\r\n')
+        self.transport.write(">>> ")
+
+
+
+class ShellFactory(protocol.Factory):
+    username = "admin"
+    password = "admin"
+    protocol = Shell
+    service = None
+
+    def __init__(self):
+        self.namespace = {
+            'factory': self,
+            'service': None,
+            '_': None
+        }
+
+    def setService(self, service):
+        self.namespace['service'] = self.service = service
+
+    def __getstate__(self):
+        """This returns the persistent state of this shell factory.
+        """
+        dict = self.__dict__
+        ns = copy.copy(dict['namespace'])
+        dict['namespace'] = ns
+        if ns.has_key('__builtins__'):
+            del ns['__builtins__']
+        return dict
diff --git a/ThirdParty/Twisted/twisted/manhole/test/__init__.py b/ThirdParty/Twisted/twisted/manhole/test/__init__.py
new file mode 100644
index 0000000..83c9ea1
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/manhole/test/__init__.py
@@ -0,0 +1,6 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.manhole}.
+"""
diff --git a/ThirdParty/Twisted/twisted/manhole/test/test_explorer.py b/ThirdParty/Twisted/twisted/manhole/test/test_explorer.py
new file mode 100644
index 0000000..a52d3c1
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/manhole/test/test_explorer.py
@@ -0,0 +1,102 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.manhole.explorer}.
+"""
+
+from twisted.trial import unittest
+from twisted.manhole.explorer import (
+    CRUFT_WatchyThingie,
+    ExplorerImmutable,
+    Pool,
+    _WatchMonkey,
+    )
+
+
+class Foo:
+    """
+    Test helper.
+    """
+
+
+class PoolTestCase(unittest.TestCase):
+    """
+    Tests for the Pool class.
+    """
+
+    def test_instanceBuilding(self):
+        """
+        If the object is not in the pool a new instance is created and
+        returned.
+        """
+        p = Pool()
+        e = p.getExplorer(123, 'id')
+        self.assertIsInstance(e, ExplorerImmutable)
+        self.assertEqual(e.value, 123)
+        self.assertEqual(e.identifier, 'id')
+
+
+
+class CRUFTWatchyThingieTestCase(unittest.TestCase):
+    """
+    Tests for the CRUFT_WatchyThingie class.
+    """
+    def test_watchObjectConstructedClass(self):
+        """
+        L{CRUFT_WatchyThingie.watchObject} changes the class of its
+        first argument to a custom watching class.
+        """
+        foo = Foo()
+        cwt = CRUFT_WatchyThingie()
+        cwt.watchObject(foo, 'id', 'cback')
+
+        # check new constructed class
+        newClassName = foo.__class__.__name__
+        self.assertEqual(newClassName, "WatchingFoo%X" % (id(foo),))
+
+
+    def test_watchObjectConstructedInstanceMethod(self):
+        """
+        L{CRUFT_WatchyThingie.watchingfoo} adds a C{_watchEmitChanged}
+        attribute which refers to a bound method on the instance
+        passed to it.
+        """
+        foo = Foo()
+        cwt = CRUFT_WatchyThingie()
+        cwt.watchObject(foo, 'id', 'cback')
+
+        # check new constructed instance method
+        self.assertIdentical(foo._watchEmitChanged.im_self, foo)
+
+
+
+class WatchMonkeyTestCase(unittest.TestCase):
+    """
+    Tests for the _WatchMonkey class.
+    """
+    def test_install(self):
+        """
+        When _WatchMonkey is installed on a method, calling that
+        method calls the _WatchMonkey.
+        """
+        class Foo:
+            """
+            Helper.
+            """
+            def someMethod(self):
+                """
+                Just a method.
+                """
+
+        foo = Foo()
+        wm = _WatchMonkey(foo)
+        wm.install('someMethod')
+
+        # patch wm's method to check that the method was exchanged
+        called = []
+        wm.__call__ = lambda s: called.append(True)
+
+        # call and check
+        foo.someMethod()
+        self.assertTrue(called)
diff --git a/ThirdParty/Twisted/twisted/manhole/ui/__init__.py b/ThirdParty/Twisted/twisted/manhole/ui/__init__.py
new file mode 100644
index 0000000..14af615
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/manhole/ui/__init__.py
@@ -0,0 +1,7 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""Twisted Manhole UI: User interface for direct manipulation in Twisted.
+"""
diff --git a/ThirdParty/Twisted/twisted/manhole/ui/gtk2manhole.glade b/ThirdParty/Twisted/twisted/manhole/ui/gtk2manhole.glade
new file mode 100644
index 0000000..423b3fb
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/manhole/ui/gtk2manhole.glade
@@ -0,0 +1,268 @@
+<?xml version="1.0" standalone="no"?> <!--*- mode: xml -*-->
+<!DOCTYPE glade-interface SYSTEM "http://glade.gnome.org/glade-2.0.dtd">
+
+<glade-interface>
+
+<widget class="GtkWindow" id="manholeWindow">
+  <property name="visible">True</property>
+  <property name="title" translatable="yes">Manhole</property>
+  <property name="type">GTK_WINDOW_TOPLEVEL</property>
+  <property name="window_position">GTK_WIN_POS_NONE</property>
+  <property name="modal">False</property>
+  <property name="default_width">620</property>
+  <property name="default_height">320</property>
+  <property name="resizable">True</property>
+  <property name="destroy_with_parent">False</property>
+  <property name="decorated">True</property>
+  <property name="skip_taskbar_hint">False</property>
+  <property name="skip_pager_hint">False</property>
+  <property name="type_hint">GDK_WINDOW_TYPE_HINT_NORMAL</property>
+  <property name="gravity">GDK_GRAVITY_NORTH_WEST</property>
+  <signal name="delete_event" handler="_on_manholeWindow_delete_event" last_modification_time="Mon, 27 Jan 2003 05:14:26 GMT"/>
+
+  <child>
+    <widget class="GtkVBox" id="vbox1">
+      <property name="visible">True</property>
+      <property name="homogeneous">False</property>
+      <property name="spacing">0</property>
+
+      <child>
+	<widget class="GtkMenuBar" id="menubar1">
+	  <property name="visible">True</property>
+
+	  <child>
+	    <widget class="GtkMenuItem" id="menuitem4">
+	      <property name="visible">True</property>
+	      <property name="label" translatable="yes">_File</property>
+	      <property name="use_underline">True</property>
+
+	      <child>
+		<widget class="GtkMenu" id="menuitem4_menu">
+
+		  <child>
+		    <widget class="GtkImageMenuItem" id="openMenuItem">
+		      <property name="visible">True</property>
+		      <property name="label">gtk-open</property>
+		      <property name="use_stock">True</property>
+		      <signal name="activate" handler="_on_openMenuItem_activate" last_modification_time="Sun, 02 Feb 2003 18:44:51 GMT"/>
+		    </widget>
+		  </child>
+
+		  <child>
+		    <widget class="GtkImageMenuItem" id="reload_self">
+		      <property name="visible">True</property>
+		      <property name="tooltip" translatable="yes">Reload the manhole client code.  (Only useful for client development.)</property>
+		      <property name="label" translatable="yes">_Reload self</property>
+		      <property name="use_underline">True</property>
+		      <signal name="activate" handler="on_reload_self_activate" last_modification_time="Mon, 24 Feb 2003 00:15:10 GMT"/>
+
+		      <child internal-child="image">
+			<widget class="GtkImage" id="image1">
+			  <property name="visible">True</property>
+			  <property name="stock">gtk-revert-to-saved</property>
+			  <property name="icon_size">1</property>
+			  <property name="xalign">0.5</property>
+			  <property name="yalign">0.5</property>
+			  <property name="xpad">0</property>
+			  <property name="ypad">0</property>
+			</widget>
+		      </child>
+		    </widget>
+		  </child>
+
+		  <child>
+		    <widget class="GtkMenuItem" id="separatormenuitem1">
+		      <property name="visible">True</property>
+		    </widget>
+		  </child>
+
+		  <child>
+		    <widget class="GtkImageMenuItem" id="quitMenuItem">
+		      <property name="visible">True</property>
+		      <property name="label">gtk-quit</property>
+		      <property name="use_stock">True</property>
+		      <signal name="activate" handler="_on_quitMenuItem_activate" last_modification_time="Sun, 02 Feb 2003 18:48:12 GMT"/>
+		    </widget>
+		  </child>
+		</widget>
+	      </child>
+	    </widget>
+	  </child>
+
+	  <child>
+	    <widget class="GtkMenuItem" id="menuitem5">
+	      <property name="visible">True</property>
+	      <property name="label" translatable="yes">_Edit</property>
+	      <property name="use_underline">True</property>
+
+	      <child>
+		<widget class="GtkMenu" id="menuitem5_menu">
+
+		  <child>
+		    <widget class="GtkImageMenuItem" id="cut1">
+		      <property name="visible">True</property>
+		      <property name="label">gtk-cut</property>
+		      <property name="use_stock">True</property>
+		      <signal name="activate" handler="on_cut1_activate" last_modification_time="Mon, 27 Jan 2003 04:50:50 GMT"/>
+		    </widget>
+		  </child>
+
+		  <child>
+		    <widget class="GtkImageMenuItem" id="copy1">
+		      <property name="visible">True</property>
+		      <property name="label">gtk-copy</property>
+		      <property name="use_stock">True</property>
+		      <signal name="activate" handler="on_copy1_activate" last_modification_time="Mon, 27 Jan 2003 04:50:50 GMT"/>
+		    </widget>
+		  </child>
+
+		  <child>
+		    <widget class="GtkImageMenuItem" id="paste1">
+		      <property name="visible">True</property>
+		      <property name="label">gtk-paste</property>
+		      <property name="use_stock">True</property>
+		      <signal name="activate" handler="on_paste1_activate" last_modification_time="Mon, 27 Jan 2003 04:50:50 GMT"/>
+		    </widget>
+		  </child>
+
+		  <child>
+		    <widget class="GtkImageMenuItem" id="delete1">
+		      <property name="visible">True</property>
+		      <property name="label">gtk-delete</property>
+		      <property name="use_stock">True</property>
+		      <signal name="activate" handler="on_delete1_activate" last_modification_time="Mon, 27 Jan 2003 04:50:50 GMT"/>
+		    </widget>
+		  </child>
+		</widget>
+	      </child>
+	    </widget>
+	  </child>
+
+	  <child>
+	    <widget class="GtkMenuItem" id="menuitem7">
+	      <property name="visible">True</property>
+	      <property name="label" translatable="yes">_Help</property>
+	      <property name="use_underline">True</property>
+
+	      <child>
+		<widget class="GtkMenu" id="menuitem7_menu">
+
+		  <child>
+		    <widget class="GtkMenuItem" id="aboutMenuItem">
+		      <property name="visible">True</property>
+		      <property name="label" translatable="yes">_About</property>
+		      <property name="use_underline">True</property>
+		      <signal name="activate" handler="_on_aboutMenuItem_activate" last_modification_time="Thu, 06 Feb 2003 19:49:53 GMT"/>
+		    </widget>
+		  </child>
+		</widget>
+	      </child>
+	    </widget>
+	  </child>
+	</widget>
+	<packing>
+	  <property name="padding">0</property>
+	  <property name="expand">False</property>
+	  <property name="fill">False</property>
+	</packing>
+      </child>
+
+      <child>
+	<widget class="GtkVPaned" id="vpaned1">
+	  <property name="visible">True</property>
+	  <property name="can_focus">True</property>
+
+	  <child>
+	    <widget class="GtkScrolledWindow" id="scrolledwindow1">
+	      <property name="visible">True</property>
+	      <property name="can_focus">True</property>
+	      <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
+	      <property name="vscrollbar_policy">GTK_POLICY_ALWAYS</property>
+	      <property name="shadow_type">GTK_SHADOW_NONE</property>
+	      <property name="window_placement">GTK_CORNER_TOP_LEFT</property>
+
+	      <child>
+		<widget class="GtkTextView" id="output">
+		  <property name="visible">True</property>
+		  <property name="can_focus">True</property>
+		  <property name="editable">False</property>
+		  <property name="overwrite">False</property>
+		  <property name="accepts_tab">True</property>
+		  <property name="justification">GTK_JUSTIFY_LEFT</property>
+		  <property name="wrap_mode">GTK_WRAP_WORD</property>
+		  <property name="cursor_visible">True</property>
+		  <property name="pixels_above_lines">0</property>
+		  <property name="pixels_below_lines">0</property>
+		  <property name="pixels_inside_wrap">0</property>
+		  <property name="left_margin">0</property>
+		  <property name="right_margin">0</property>
+		  <property name="indent">0</property>
+		  <property name="text" translatable="yes"></property>
+		</widget>
+	      </child>
+	    </widget>
+	    <packing>
+	      <property name="shrink">True</property>
+	      <property name="resize">True</property>
+	    </packing>
+	  </child>
+
+	  <child>
+	    <widget class="GtkScrolledWindow" id="scrolledwindow2">
+	      <property name="visible">True</property>
+	      <property name="can_focus">True</property>
+	      <property name="hscrollbar_policy">GTK_POLICY_AUTOMATIC</property>
+	      <property name="vscrollbar_policy">GTK_POLICY_ALWAYS</property>
+	      <property name="shadow_type">GTK_SHADOW_NONE</property>
+	      <property name="window_placement">GTK_CORNER_TOP_LEFT</property>
+
+	      <child>
+		<widget class="GtkTextView" id="input">
+		  <property name="visible">True</property>
+		  <property name="can_focus">True</property>
+		  <property name="has_focus">True</property>
+		  <property name="editable">True</property>
+		  <property name="overwrite">False</property>
+		  <property name="accepts_tab">True</property>
+		  <property name="justification">GTK_JUSTIFY_LEFT</property>
+		  <property name="wrap_mode">GTK_WRAP_NONE</property>
+		  <property name="cursor_visible">True</property>
+		  <property name="pixels_above_lines">0</property>
+		  <property name="pixels_below_lines">0</property>
+		  <property name="pixels_inside_wrap">0</property>
+		  <property name="left_margin">0</property>
+		  <property name="right_margin">0</property>
+		  <property name="indent">0</property>
+		  <property name="text" translatable="yes"></property>
+		</widget>
+	      </child>
+	    </widget>
+	    <packing>
+	      <property name="shrink">True</property>
+	      <property name="resize">False</property>
+	    </packing>
+	  </child>
+	</widget>
+	<packing>
+	  <property name="padding">0</property>
+	  <property name="expand">True</property>
+	  <property name="fill">True</property>
+	</packing>
+      </child>
+
+      <child>
+	<widget class="GtkStatusbar" id="statusbar1">
+	  <property name="visible">True</property>
+	  <property name="has_resize_grip">True</property>
+	</widget>
+	<packing>
+	  <property name="padding">0</property>
+	  <property name="expand">False</property>
+	  <property name="fill">False</property>
+	</packing>
+      </child>
+    </widget>
+  </child>
+</widget>
+
+</glade-interface>
diff --git a/ThirdParty/Twisted/twisted/manhole/ui/gtk2manhole.py b/ThirdParty/Twisted/twisted/manhole/ui/gtk2manhole.py
new file mode 100644
index 0000000..2c6a532
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/manhole/ui/gtk2manhole.py
@@ -0,0 +1,375 @@
+# -*- test-case-name: twisted.manhole.ui.test.test_gtk2manhole -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Manhole client with a GTK v2.x front-end.
+"""
+
+__version__ = '$Revision: 1.9 $'[11:-2]
+
+from twisted import copyright
+from twisted.internet import reactor
+from twisted.python import components, failure, log, util
+from twisted.python.reflect import prefixedMethodNames
+from twisted.spread import pb
+from twisted.spread.ui import gtk2util
+
+from twisted.manhole.service import IManholeClient
+from zope.interface import implements
+
+# The pygtk.require for version 2.0 has already been done by the reactor.
+import gtk
+
+import code, types, inspect
+
+# TODO:
+#  Make wrap-mode a run-time option.
+#  Explorer.
+#  Code doesn't cleanly handle opening a second connection.  Fix that.
+#  Make some acknowledgement of when a command has completed, even if
+#     it has no return value so it doesn't print anything to the console.
+
+class OfflineError(Exception):
+    pass
+
+class ManholeWindow(components.Componentized, gtk2util.GladeKeeper):
+    gladefile = util.sibpath(__file__, "gtk2manhole.glade")
+
+    _widgets = ('input','output','manholeWindow')
+
+    def __init__(self):
+        self.defaults = {}
+        gtk2util.GladeKeeper.__init__(self)
+        components.Componentized.__init__(self)
+
+        self.input = ConsoleInput(self._input)
+        self.input.toplevel = self
+        self.output = ConsoleOutput(self._output)
+
+        # Ugh.  GladeKeeper actually isn't so good for composite objects.
+        # I want this connected to the ConsoleInput's handler, not something
+        # on this class.
+        self._input.connect("key_press_event", self.input._on_key_press_event)
+
+    def setDefaults(self, defaults):
+        self.defaults = defaults
+
+    def login(self):
+        client = self.getComponent(IManholeClient)
+        d = gtk2util.login(client, **self.defaults)
+        d.addCallback(self._cbLogin)
+        d.addCallback(client._cbLogin)
+        d.addErrback(self._ebLogin)
+
+    def _cbDisconnected(self, perspective):
+        self.output.append("%s went away. :(\n" % (perspective,), "local")
+        self._manholeWindow.set_title("Manhole")
+
+    def _cbLogin(self, perspective):
+        peer = perspective.broker.transport.getPeer()
+        self.output.append("Connected to %s\n" % (peer,), "local")
+        perspective.notifyOnDisconnect(self._cbDisconnected)
+        self._manholeWindow.set_title("Manhole - %s" % (peer))
+        return perspective
+
+    def _ebLogin(self, reason):
+        self.output.append("Login FAILED %s\n" % (reason.value,), "exception")
+
+    def _on_aboutMenuItem_activate(self, widget, *unused):
+        import sys
+        from os import path
+        self.output.append("""\
+a Twisted Manhole client
+  Versions:
+    %(twistedVer)s
+    Python %(pythonVer)s on %(platform)s
+    GTK %(gtkVer)s / PyGTK %(pygtkVer)s
+    %(module)s %(modVer)s
+http://twistedmatrix.com/
+""" % {'twistedVer': copyright.longversion,
+       'pythonVer': sys.version.replace('\n', '\n      '),
+       'platform': sys.platform,
+       'gtkVer': ".".join(map(str, gtk.gtk_version)),
+       'pygtkVer': ".".join(map(str, gtk.pygtk_version)),
+       'module': path.basename(__file__),
+       'modVer': __version__,
+       }, "local")
+
+    def _on_openMenuItem_activate(self, widget, userdata=None):
+        self.login()
+
+    def _on_manholeWindow_delete_event(self, widget, *unused):
+        reactor.stop()
+
+    def _on_quitMenuItem_activate(self, widget, *unused):
+        reactor.stop()
+
+    def on_reload_self_activate(self, *unused):
+        from twisted.python import rebuild
+        rebuild.rebuild(inspect.getmodule(self.__class__))
+
+
+tagdefs = {
+    'default': {"family": "monospace"},
+    # These are message types we get from the server.
+    'stdout': {"foreground": "black"},
+    'stderr': {"foreground": "#AA8000"},
+    'result': {"foreground": "blue"},
+    'exception': {"foreground": "red"},
+    # Messages generate locally.
+    'local': {"foreground": "#008000"},
+    'log': {"foreground": "#000080"},
+    'command': {"foreground": "#666666"},
+    }
+
+# TODO: Factor Python console stuff back out to pywidgets.
+
+class ConsoleOutput:
+    _willScroll = None
+    def __init__(self, textView):
+        self.textView = textView
+        self.buffer = textView.get_buffer()
+
+        # TODO: Make this a singleton tag table.
+        for name, props in tagdefs.iteritems():
+            tag = self.buffer.create_tag(name)
+            # This can be done in the constructor in newer pygtk (post 1.99.14)
+            for k, v in props.iteritems():
+                tag.set_property(k, v)
+
+        self.buffer.tag_table.lookup("default").set_priority(0)
+
+        self._captureLocalLog()
+
+    def _captureLocalLog(self):
+        return log.startLogging(_Notafile(self, "log"), setStdout=False)
+
+    def append(self, text, kind=None):
+        # XXX: It seems weird to have to do this thing with always applying
+        # a 'default' tag.  Can't we change the fundamental look instead?
+        tags = ["default"]
+        if kind is not None:
+            tags.append(kind)
+
+        self.buffer.insert_with_tags_by_name(self.buffer.get_end_iter(),
+                                             text, *tags)
+        # Silly things, the TextView needs to update itself before it knows
+        # where the bottom is.
+        if self._willScroll is None:
+            self._willScroll = gtk.idle_add(self._scrollDown)
+
+    def _scrollDown(self, *unused):
+        self.textView.scroll_to_iter(self.buffer.get_end_iter(), 0,
+                                     True, 1.0, 1.0)
+        self._willScroll = None
+        return False
+
+class History:
+    def __init__(self, maxhist=10000):
+        self.ringbuffer = ['']
+        self.maxhist = maxhist
+        self.histCursor = 0
+
+    def append(self, htext):
+        self.ringbuffer.insert(-1, htext)
+        if len(self.ringbuffer) > self.maxhist:
+            self.ringbuffer.pop(0)
+        self.histCursor = len(self.ringbuffer) - 1
+        self.ringbuffer[-1] = ''
+
+    def move(self, prevnext=1):
+        '''
+        Return next/previous item in the history, stopping at top/bottom.
+        '''
+        hcpn = self.histCursor + prevnext
+        if hcpn >= 0 and hcpn < len(self.ringbuffer):
+            self.histCursor = hcpn
+            return self.ringbuffer[hcpn]
+        else:
+            return None
+
+    def histup(self, textbuffer):
+        if self.histCursor == len(self.ringbuffer) - 1:
+            si, ei = textbuffer.get_start_iter(), textbuffer.get_end_iter()
+            self.ringbuffer[-1] = textbuffer.get_text(si,ei)
+        newtext = self.move(-1)
+        if newtext is None:
+            return
+        textbuffer.set_text(newtext)
+
+    def histdown(self, textbuffer):
+        newtext = self.move(1)
+        if newtext is None:
+            return
+        textbuffer.set_text(newtext)
+
+
+class ConsoleInput:
+    toplevel, rkeymap = None, None
+    __debug = False
+
+    def __init__(self, textView):
+        self.textView=textView
+        self.rkeymap = {}
+        self.history = History()
+        for name in prefixedMethodNames(self.__class__, "key_"):
+            keysymName = name.split("_")[-1]
+            self.rkeymap[getattr(gtk.keysyms, keysymName)] = keysymName
+
+    def _on_key_press_event(self, entry, event):
+        stopSignal = False
+        ksym = self.rkeymap.get(event.keyval, None)
+
+        mods = []
+        for prefix, mask in [('ctrl', gtk.gdk.CONTROL_MASK), ('shift', gtk.gdk.SHIFT_MASK)]:
+            if event.state & mask:
+                mods.append(prefix)
+
+        if mods:
+            ksym = '_'.join(mods + [ksym])
+
+        if ksym:
+            rvalue = getattr(
+                self, 'key_%s' % ksym, lambda *a, **kw: None)(entry, event)
+
+        if self.__debug:
+            print ksym
+        return rvalue
+
+    def getText(self):
+        buffer = self.textView.get_buffer()
+        iter1, iter2 = buffer.get_bounds()
+        text = buffer.get_text(iter1, iter2, False)
+        return text
+
+    def setText(self, text):
+        self.textView.get_buffer().set_text(text)
+
+    def key_Return(self, entry, event):
+        text = self.getText()
+        # Figure out if that Return meant "next line" or "execute."
+        try:
+            c = code.compile_command(text)
+        except SyntaxError, e:
+            # This could conceivably piss you off if the client's python
+            # doesn't accept keywords that are known to the manhole's
+            # python.
+            point = buffer.get_iter_at_line_offset(e.lineno, e.offset)
+            buffer.place(point)
+            # TODO: Componentize!
+            self.toplevel.output.append(str(e), "exception")
+        except (OverflowError, ValueError), e:
+            self.toplevel.output.append(str(e), "exception")
+        else:
+            if c is not None:
+                self.sendMessage()
+                # Don't insert Return as a newline in the buffer.
+                self.history.append(text)
+                self.clear()
+                # entry.emit_stop_by_name("key_press_event")
+                return True
+            else:
+                # not a complete code block
+                return False
+
+        return False
+
+    def key_Up(self, entry, event):
+        # if I'm at the top, previous history item.
+        textbuffer = self.textView.get_buffer()
+        if textbuffer.get_iter_at_mark(textbuffer.get_insert()).get_line() == 0:
+            self.history.histup(textbuffer)
+            return True
+        return False
+
+    def key_Down(self, entry, event):
+        textbuffer = self.textView.get_buffer()
+        if textbuffer.get_iter_at_mark(textbuffer.get_insert()).get_line() == (
+            textbuffer.get_line_count() - 1):
+            self.history.histdown(textbuffer)
+            return True
+        return False
+
+    key_ctrl_p = key_Up
+    key_ctrl_n = key_Down
+
+    def key_ctrl_shift_F9(self, entry, event):
+        if self.__debug:
+            import pdb; pdb.set_trace()
+
+    def clear(self):
+        buffer = self.textView.get_buffer()
+        buffer.delete(*buffer.get_bounds())
+
+    def sendMessage(self):
+        buffer = self.textView.get_buffer()
+        iter1, iter2 = buffer.get_bounds()
+        text = buffer.get_text(iter1, iter2, False)
+        self.toplevel.output.append(pythonify(text), 'command')
+        # TODO: Componentize better!
+        try:
+            return self.toplevel.getComponent(IManholeClient).do(text)
+        except OfflineError:
+            self.toplevel.output.append("Not connected, command not sent.\n",
+                                        "exception")
+
+
+def pythonify(text):
+    '''
+    Make some text appear as though it was typed in at a Python prompt.
+    '''
+    lines = text.split('\n')
+    lines[0] = '>>> ' + lines[0]
+    return '\n... '.join(lines) + '\n'
+
+class _Notafile:
+    """Curry to make failure.printTraceback work with the output widget."""
+    def __init__(self, output, kind):
+        self.output = output
+        self.kind = kind
+
+    def write(self, txt):
+        self.output.append(txt, self.kind)
+
+    def flush(self):
+        pass
+
+class ManholeClient(components.Adapter, pb.Referenceable):
+    implements(IManholeClient)
+
+    capabilities = {
+#        "Explorer": 'Set',
+        "Failure": 'Set'
+        }
+
+    def _cbLogin(self, perspective):
+        self.perspective = perspective
+        perspective.notifyOnDisconnect(self._cbDisconnected)
+        return perspective
+
+    def remote_console(self, messages):
+        for kind, content in messages:
+            if isinstance(content, types.StringTypes):
+                self.original.output.append(content, kind)
+            elif (kind == "exception") and isinstance(content, failure.Failure):
+                content.printTraceback(_Notafile(self.original.output,
+                                                 "exception"))
+            else:
+                self.original.output.append(str(content), kind)
+
+    def remote_receiveExplorer(self, xplorer):
+        pass
+
+    def remote_listCapabilities(self):
+        return self.capabilities
+
+    def _cbDisconnected(self, perspective):
+        self.perspective = None
+
+    def do(self, text):
+        if self.perspective is None:
+            raise OfflineError
+        return self.perspective.callRemote("do", text)
+
+components.registerAdapter(ManholeClient, ManholeWindow, IManholeClient)
diff --git a/ThirdParty/Twisted/twisted/manhole/ui/test/__init__.py b/ThirdParty/Twisted/twisted/manhole/ui/test/__init__.py
new file mode 100644
index 0000000..36214ba
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/manhole/ui/test/__init__.py
@@ -0,0 +1,4 @@
+# Copyright (c) 2009 Twisted Matrix Laboratories.
+"""
+Tests for the L{twisted.manhole.ui} package.
+"""
diff --git a/ThirdParty/Twisted/twisted/manhole/ui/test/test_gtk2manhole.py b/ThirdParty/Twisted/twisted/manhole/ui/test/test_gtk2manhole.py
new file mode 100644
index 0000000..b59f937
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/manhole/ui/test/test_gtk2manhole.py
@@ -0,0 +1,48 @@
+# Copyright (c) 2009 Twisted Matrix Laboratories.
+"""
+Tests for GTK2 GUI manhole.
+"""
+
+skip = False
+
+try:
+    import pygtk
+    pygtk.require("2.0")
+except:
+    skip = "GTK 2.0 not available"
+else:
+    try:
+        import gtk
+    except ImportError:
+        skip = "GTK 2.0 not available"
+    except RuntimeError:
+        skip = "Old version of GTK 2.0 requires DISPLAY, and we don't have one."
+    else:
+        if gtk.gtk_version[0] == 1:
+            skip = "Requested GTK 2.0, but 1.0 was already imported."
+        else:
+            from twisted.manhole.ui.gtk2manhole import ConsoleInput
+
+from twisted.trial.unittest import TestCase
+
+from twisted.python.reflect import prefixedMethodNames
+
+class ConsoleInputTests(TestCase):
+    """
+    Tests for L{ConsoleInput}.
+    """
+
+    def test_reverseKeymap(self):
+        """
+        Verify that a L{ConsoleInput} has a reverse mapping of the keysym names
+        it needs for event handling to their corresponding keysym.
+        """
+        ci = ConsoleInput(None)
+        for eventName in prefixedMethodNames(ConsoleInput, 'key_'):
+            keysymName = eventName.split("_")[-1]
+            keysymValue = getattr(gtk.keysyms, keysymName)
+            self.assertEqual(ci.rkeymap[keysymValue], keysymName)
+
+
+    skip = skip
+
diff --git a/ThirdParty/Twisted/twisted/names/__init__.py b/ThirdParty/Twisted/twisted/names/__init__.py
new file mode 100644
index 0000000..4c1d7c9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/__init__.py
@@ -0,0 +1,7 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""Resolving Internet Names"""
+
+from twisted.names._version import version
+__version__ = version.short()
diff --git a/ThirdParty/Twisted/twisted/names/_version.py b/ThirdParty/Twisted/twisted/names/_version.py
new file mode 100644
index 0000000..9a3c071
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/_version.py
@@ -0,0 +1,3 @@
+# This is an auto-generated file. Do not edit it.
+from twisted.python import versions
+version = versions.Version('twisted.names', 12, 3, 0)
diff --git a/ThirdParty/Twisted/twisted/names/authority.py b/ThirdParty/Twisted/twisted/names/authority.py
new file mode 100644
index 0000000..0dc7c1c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/authority.py
@@ -0,0 +1,334 @@
+# -*- test-case-name: twisted.names.test.test_names -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Authoritative resolvers.
+"""
+
+import os
+import time
+
+from twisted.names import dns
+from twisted.internet import defer
+from twisted.python import failure
+from twisted.python.compat import execfile
+
+import common
+
+def getSerial(filename = '/tmp/twisted-names.serial'):
+    """Return a monotonically increasing (across program runs) integer.
+
+    State is stored in the given file.  If it does not exist, it is
+    created with rw-/---/--- permissions.
+    """
+    serial = time.strftime('%Y%m%d')
+
+    o = os.umask(0177)
+    try:
+        if not os.path.exists(filename):
+            f = file(filename, 'w')
+            f.write(serial + ' 0')
+            f.close()
+    finally:
+        os.umask(o)
+
+    serialFile = file(filename, 'r')
+    lastSerial, ID = serialFile.readline().split()
+    ID = (lastSerial == serial) and (int(ID) + 1) or 0
+    serialFile.close()
+    serialFile = file(filename, 'w')
+    serialFile.write('%s %d' % (serial, ID))
+    serialFile.close()
+    serial = serial + ('%02d' % (ID,))
+    return serial
+
+
+#class LookupCacherMixin(object):
+#    _cache = None
+#
+#    def _lookup(self, name, cls, type, timeout = 10):
+#        if not self._cache:
+#            self._cache = {}
+#            self._meth = super(LookupCacherMixin, self)._lookup
+#
+#        if self._cache.has_key((name, cls, type)):
+#            return self._cache[(name, cls, type)]
+#        else:
+#            r = self._meth(name, cls, type, timeout)
+#            self._cache[(name, cls, type)] = r
+#            return r
+
+
+class FileAuthority(common.ResolverBase):
+    """An Authority that is loaded from a file."""
+
+    soa = None
+    records = None
+
+    def __init__(self, filename):
+        common.ResolverBase.__init__(self)
+        self.loadFile(filename)
+        self._cache = {}
+
+
+    def __setstate__(self, state):
+        self.__dict__ = state
+#        print 'setstate ', self.soa
+
+    def _lookup(self, name, cls, type, timeout = None):
+        cnames = []
+        results = []
+        authority = []
+        additional = []
+        default_ttl = max(self.soa[1].minimum, self.soa[1].expire)
+
+        domain_records = self.records.get(name.lower())
+
+        if domain_records:
+            for record in domain_records:
+                if record.ttl is not None:
+                    ttl = record.ttl
+                else:
+                    ttl = default_ttl
+
+                if record.TYPE == dns.NS and name.lower() != self.soa[0].lower():
+                    # NS record belong to a child zone: this is a referral.  As
+                    # NS records are authoritative in the child zone, ours here
+                    # are not.  RFC 2181, section 6.1.
+                    authority.append(
+                        dns.RRHeader(name, record.TYPE, dns.IN, ttl, record, auth=False)
+                    )
+                elif record.TYPE == type or type == dns.ALL_RECORDS:
+                    results.append(
+                        dns.RRHeader(name, record.TYPE, dns.IN, ttl, record, auth=True)
+                    )
+                if record.TYPE == dns.CNAME:
+                    cnames.append(
+                        dns.RRHeader(name, record.TYPE, dns.IN, ttl, record, auth=True)
+                    )
+            if not results:
+                results = cnames
+
+            for record in results + authority:
+                section = {dns.NS: additional, dns.CNAME: results, dns.MX: additional}.get(record.type)
+                if section is not None:
+                    n = str(record.payload.name)
+                    for rec in self.records.get(n.lower(), ()):
+                        if rec.TYPE == dns.A:
+                            section.append(
+                                dns.RRHeader(n, dns.A, dns.IN, rec.ttl or default_ttl, rec, auth=True)
+                            )
+
+            if not results and not authority:
+                # Empty response. Include SOA record to allow clients to cache
+                # this response.  RFC 1034, sections 3.7 and 4.3.4, and RFC 2181
+                # section 7.1.
+                authority.append(
+                    dns.RRHeader(self.soa[0], dns.SOA, dns.IN, ttl, self.soa[1], auth=True)
+                    )
+            return defer.succeed((results, authority, additional))
+        else:
+            if name.lower().endswith(self.soa[0].lower()):
+                # We are the authority and we didn't find it.  Goodbye.
+                return defer.fail(failure.Failure(dns.AuthoritativeDomainError(name)))
+            return defer.fail(failure.Failure(dns.DomainError(name)))
+
+
+    def lookupZone(self, name, timeout = 10):
+        if self.soa[0].lower() == name.lower():
+            # Wee hee hee hooo yea
+            default_ttl = max(self.soa[1].minimum, self.soa[1].expire)
+            if self.soa[1].ttl is not None:
+                soa_ttl = self.soa[1].ttl
+            else:
+                soa_ttl = default_ttl
+            results = [dns.RRHeader(self.soa[0], dns.SOA, dns.IN, soa_ttl, self.soa[1], auth=True)]
+            for (k, r) in self.records.items():
+                for rec in r:
+                    if rec.ttl is not None:
+                        ttl = rec.ttl
+                    else:
+                        ttl = default_ttl
+                    if rec.TYPE != dns.SOA:
+                        results.append(dns.RRHeader(k, rec.TYPE, dns.IN, ttl, rec, auth=True))
+            results.append(results[0])
+            return defer.succeed((results, (), ()))
+        return defer.fail(failure.Failure(dns.DomainError(name)))
+
+    def _cbAllRecords(self, results):
+        ans, auth, add = [], [], []
+        for res in results:
+            if res[0]:
+                ans.extend(res[1][0])
+                auth.extend(res[1][1])
+                add.extend(res[1][2])
+        return ans, auth, add
+
+
+class PySourceAuthority(FileAuthority):
+    """A FileAuthority that is built up from Python source code."""
+
+    def loadFile(self, filename):
+        g, l = self.setupConfigNamespace(), {}
+        execfile(filename, g, l)
+        if not l.has_key('zone'):
+            raise ValueError, "No zone defined in " + filename
+
+        self.records = {}
+        for rr in l['zone']:
+            if isinstance(rr[1], dns.Record_SOA):
+                self.soa = rr
+            self.records.setdefault(rr[0].lower(), []).append(rr[1])
+
+
+    def wrapRecord(self, type):
+        return lambda name, *arg, **kw: (name, type(*arg, **kw))
+
+
+    def setupConfigNamespace(self):
+        r = {}
+        items = dns.__dict__.iterkeys()
+        for record in [x for x in items if x.startswith('Record_')]:
+            type = getattr(dns, record)
+            f = self.wrapRecord(type)
+            r[record[len('Record_'):]] = f
+        return r
+
+
+class BindAuthority(FileAuthority):
+    """An Authority that loads BIND configuration files"""
+
+    def loadFile(self, filename):
+        self.origin = os.path.basename(filename) + '.' # XXX - this might suck
+        lines = open(filename).readlines()
+        lines = self.stripComments(lines)
+        lines = self.collapseContinuations(lines)
+        self.parseLines(lines)
+
+
+    def stripComments(self, lines):
+        return [
+            a.find(';') == -1 and a or a[:a.find(';')] for a in [
+                b.strip() for b in lines
+            ]
+        ]
+
+
+    def collapseContinuations(self, lines):
+        L = []
+        state = 0
+        for line in lines:
+            if state == 0:
+                if line.find('(') == -1:
+                    L.append(line)
+                else:
+                    L.append(line[:line.find('(')])
+                    state = 1
+            else:
+                if line.find(')') != -1:
+                    L[-1] += ' ' + line[:line.find(')')]
+                    state = 0
+                else:
+                    L[-1] += ' ' + line
+        lines = L
+        L = []
+        for line in lines:
+            L.append(line.split())
+        return filter(None, L)
+
+
+    def parseLines(self, lines):
+        TTL = 60 * 60 * 3
+        ORIGIN = self.origin
+
+        self.records = {}
+
+        for (line, index) in zip(lines, range(len(lines))):
+            if line[0] == '$TTL':
+                TTL = dns.str2time(line[1])
+            elif line[0] == '$ORIGIN':
+                ORIGIN = line[1]
+            elif line[0] == '$INCLUDE': # XXX - oh, fuck me
+                raise NotImplementedError('$INCLUDE directive not implemented')
+            elif line[0] == '$GENERATE':
+                raise NotImplementedError('$GENERATE directive not implemented')
+            else:
+                self.parseRecordLine(ORIGIN, TTL, line)
+
+
+    def addRecord(self, owner, ttl, type, domain, cls, rdata):
+        if not domain.endswith('.'):
+            domain = domain + '.' + owner
+        else:
+            domain = domain[:-1]
+        f = getattr(self, 'class_%s' % cls, None)
+        if f:
+            f(ttl, type, domain, rdata)
+        else:
+            raise NotImplementedError, "Record class %r not supported" % cls
+
+
+    def class_IN(self, ttl, type, domain, rdata):
+        record = getattr(dns, 'Record_%s' % type, None)
+        if record:
+            r = record(*rdata)
+            r.ttl = ttl
+            self.records.setdefault(domain.lower(), []).append(r)
+
+            print 'Adding IN Record', domain, ttl, r
+            if type == 'SOA':
+                self.soa = (domain, r)
+        else:
+            raise NotImplementedError, "Record type %r not supported" % type
+
+
+    #
+    # This file ends here.  Read no further.
+    #
+    def parseRecordLine(self, origin, ttl, line):
+        MARKERS = dns.QUERY_CLASSES.values() + dns.QUERY_TYPES.values()
+        cls = 'IN'
+        owner = origin
+
+        if line[0] == '@':
+            line = line[1:]
+            owner = origin
+#            print 'default owner'
+        elif not line[0].isdigit() and line[0] not in MARKERS:
+            owner = line[0]
+            line = line[1:]
+#            print 'owner is ', owner
+
+        if line[0].isdigit() or line[0] in MARKERS:
+            domain = owner
+            owner = origin
+#            print 'woops, owner is ', owner, ' domain is ', domain
+        else:
+            domain = line[0]
+            line = line[1:]
+#            print 'domain is ', domain
+
+        if line[0] in dns.QUERY_CLASSES.values():
+            cls = line[0]
+            line = line[1:]
+#            print 'cls is ', cls
+            if line[0].isdigit():
+                ttl = int(line[0])
+                line = line[1:]
+#                print 'ttl is ', ttl
+        elif line[0].isdigit():
+            ttl = int(line[0])
+            line = line[1:]
+#            print 'ttl is ', ttl
+            if line[0] in dns.QUERY_CLASSES.values():
+                cls = line[0]
+                line = line[1:]
+#                print 'cls is ', cls
+
+        type = line[0]
+#        print 'type is ', type
+        rdata = line[1:]
+#        print 'rdata is ', rdata
+
+        self.addRecord(owner, ttl, type, domain, cls, rdata)
diff --git a/ThirdParty/Twisted/twisted/names/cache.py b/ThirdParty/Twisted/twisted/names/cache.py
new file mode 100644
index 0000000..bbb29be
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/cache.py
@@ -0,0 +1,127 @@
+# -*- test-case-name: twisted.names.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+An in-memory caching resolver.
+"""
+
+from __future__ import division, absolute_import
+
+from zope.interface import implementer
+
+from twisted.names import dns, common
+from twisted.python import failure, log
+from twisted.internet import interfaces, defer
+
+
+implementer(interfaces.IResolver)
+class CacheResolver(common.ResolverBase):
+    """
+    A resolver that serves records from a local, memory cache.
+
+    @ivar _reactor: A provider of L{interfaces.IReactorTime}.
+    """
+    cache = None
+
+    def __init__(self, cache=None, verbose=0, reactor=None):
+        common.ResolverBase.__init__(self)
+
+        self.cache = {}
+        self.verbose = verbose
+        self.cancel = {}
+        if reactor is None:
+            from twisted.internet import reactor
+        self._reactor = reactor
+
+        if cache:
+            for query, (seconds, payload) in cache.items():
+                self.cacheResult(query, payload, seconds)
+
+
+    def __setstate__(self, state):
+        self.__dict__ = state
+
+        now = self._reactor.seconds()
+        for (k, (when, (ans, add, ns))) in self.cache.items():
+            diff = now - when
+            for rec in ans + add + ns:
+                if rec.ttl < diff:
+                    del self.cache[k]
+                    break
+
+
+    def __getstate__(self):
+        for c in self.cancel.values():
+            c.cancel()
+        self.cancel.clear()
+        return self.__dict__
+
+
+    def _lookup(self, name, cls, type, timeout):
+        now = self._reactor.seconds()
+        q = dns.Query(name, type, cls)
+        try:
+            when, (ans, auth, add) = self.cache[q]
+        except KeyError:
+            if self.verbose > 1:
+                log.msg('Cache miss for ' + repr(name))
+            return defer.fail(failure.Failure(dns.DomainError(name)))
+        else:
+            if self.verbose:
+                log.msg('Cache hit for ' + repr(name))
+            diff = now - when
+
+            try:
+                result = (
+                    [dns.RRHeader(r.name.name, r.type, r.cls, r.ttl - diff,
+                                  r.payload) for r in ans],
+                    [dns.RRHeader(r.name.name, r.type, r.cls, r.ttl - diff,
+                                  r.payload) for r in auth],
+                    [dns.RRHeader(r.name.name, r.type, r.cls, r.ttl - diff,
+                                  r.payload) for r in add])
+            except ValueError:
+                return defer.fail(failure.Failure(dns.DomainError(name)))
+            else:
+                return defer.succeed(result)
+
+
+    def lookupAllRecords(self, name, timeout = None):
+        return defer.fail(failure.Failure(dns.DomainError(name)))
+
+
+    def cacheResult(self, query, payload, cacheTime=None):
+        """
+        Cache a DNS entry.
+
+        @param query: a L{dns.Query} instance.
+
+        @param payload: a 3-tuple of lists of L{dns.RRHeader} records, the
+            matching result of the query (answers, authority and additional).
+
+        @param cacheTime: The time (seconds since epoch) at which the entry is
+            considered to have been added to the cache. If C{None} is given,
+            the current time is used.
+        """
+        if self.verbose > 1:
+            log.msg('Adding %r to cache' % query)
+
+        self.cache[query] = (cacheTime or self._reactor.seconds(), payload)
+
+        if query in self.cancel:
+            self.cancel[query].cancel()
+
+        s = list(payload[0]) + list(payload[1]) + list(payload[2])
+        if s:
+            m = s[0].ttl
+            for r in s:
+                m = min(m, r.ttl)
+        else:
+            m = 0
+
+        self.cancel[query] = self._reactor.callLater(m, self.clearEntry, query)
+
+
+    def clearEntry(self, query):
+        del self.cache[query]
+        del self.cancel[query]
diff --git a/ThirdParty/Twisted/twisted/names/client.py b/ThirdParty/Twisted/twisted/names/client.py
new file mode 100644
index 0000000..27e4828
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/client.py
@@ -0,0 +1,932 @@
+# -*- test-case-name: twisted.names.test.test_names -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Asynchronous client DNS
+
+The functions exposed in this module can be used for asynchronous name
+resolution and dns queries.
+
+If you need to create a resolver with specific requirements, such as needing to
+do queries against a particular host, the L{createResolver} function will
+return an C{IResolver}.
+
+Future plans: Proper nameserver acquisition on Windows/MacOS,
+better caching, respect timeouts
+"""
+
+import os
+import errno
+import warnings
+
+from zope.interface import implementer
+
+# Twisted imports
+from twisted.python.compat import nativeString
+from twisted.python.runtime import platform
+from twisted.python.filepath import FilePath
+from twisted.internet import error, defer, protocol, interfaces
+from twisted.python import log, failure
+from twisted.names import dns, common
+
+
+implementer(interfaces.IResolver)
+class Resolver(common.ResolverBase):
+    """
+    @ivar _waiting: A C{dict} mapping tuple keys of query name/type/class to
+        Deferreds which will be called back with the result of those queries.
+        This is used to avoid issuing the same query more than once in
+        parallel.  This is more efficient on the network and helps avoid a
+        "birthday paradox" attack by keeping the number of outstanding requests
+        for a particular query fixed at one instead of allowing the attacker to
+        raise it to an arbitrary number.
+
+    @ivar _reactor: A provider of L{IReactorTCP}, L{IReactorUDP}, and
+        L{IReactorTime} which will be used to set up network resources and
+        track timeouts.
+    """
+    index = 0
+    timeout = None
+
+    factory = None
+    servers = None
+    dynServers = ()
+    pending = None
+    connections = None
+
+    resolv = None
+    _lastResolvTime = None
+    _resolvReadInterval = 60
+
+    def __init__(self, resolv=None, servers=None, timeout=(1, 3, 11, 45), reactor=None):
+        """
+        Construct a resolver which will query domain name servers listed in
+        the C{resolv.conf(5)}-format file given by C{resolv} as well as
+        those in the given C{servers} list.  Servers are queried in a
+        round-robin fashion.  If given, C{resolv} is periodically checked
+        for modification and re-parsed if it is noticed to have changed.
+
+        @type servers: C{list} of C{(str, int)} or C{None}
+        @param servers: If not None, interpreted as a list of (host, port)
+            pairs specifying addresses of domain name servers to attempt to use
+            for this lookup.  Host addresses should be in IPv4 dotted-quad
+            form.  If specified, overrides C{resolv}.
+
+        @type resolv: C{str}
+        @param resolv: Filename to read and parse as a resolver(5)
+            configuration file.
+
+        @type timeout: Sequence of C{int}
+        @param timeout: Default number of seconds after which to reissue the
+            query.  When the last timeout expires, the query is considered
+            failed.
+
+        @param reactor: A provider of L{IReactorTime}, L{IReactorUDP}, and
+            L{IReactorTCP} which will be used to establish connections, listen
+            for DNS datagrams, and enforce timeouts.  If not provided, the
+            global reactor will be used.
+
+        @raise ValueError: Raised if no nameserver addresses can be found.
+        """
+        common.ResolverBase.__init__(self)
+
+        if reactor is None:
+            from twisted.internet import reactor
+        self._reactor = reactor
+
+        self.timeout = timeout
+
+        if servers is None:
+            self.servers = []
+        else:
+            self.servers = servers
+
+        self.resolv = resolv
+
+        if not len(self.servers) and not resolv:
+            raise ValueError("No nameservers specified")
+
+        self.factory = DNSClientFactory(self, timeout)
+        self.factory.noisy = 0   # Be quiet by default
+
+        self.connections = []
+        self.pending = []
+
+        self._waiting = {}
+
+        self.maybeParseConfig()
+
+
+    def __getstate__(self):
+        d = self.__dict__.copy()
+        d['connections'] = []
+        d['_parseCall'] = None
+        return d
+
+
+    def __setstate__(self, state):
+        self.__dict__.update(state)
+        self.maybeParseConfig()
+
+
+    def maybeParseConfig(self):
+        if self.resolv is None:
+            # Don't try to parse it, don't set up a call loop
+            return
+
+        try:
+            resolvConf = FilePath(self.resolv).open()
+        except IOError as e:
+            if e.errno == errno.ENOENT:
+                # Missing resolv.conf is treated the same as an empty resolv.conf
+                self.parseConfig(())
+            else:
+                raise
+        else:
+            mtime = os.fstat(resolvConf.fileno()).st_mtime
+            if mtime != self._lastResolvTime:
+                log.msg('%s changed, reparsing' % (self.resolv,))
+                self._lastResolvTime = mtime
+                self.parseConfig(resolvConf)
+
+        # Check again in a little while
+        self._parseCall = self._reactor.callLater(
+            self._resolvReadInterval, self.maybeParseConfig)
+
+
+    def parseConfig(self, resolvConf):
+        servers = []
+        for L in resolvConf:
+            L = L.strip()
+            if L.startswith(b'nameserver'):
+                resolver = (nativeString(L.split()[1]), dns.PORT)
+                servers.append(resolver)
+                log.msg("Resolver added %r to server list" % (resolver,))
+            elif L.startswith(b'domain'):
+                try:
+                    self.domain = L.split()[1]
+                except IndexError:
+                    self.domain = b''
+                self.search = None
+            elif L.startswith(b'search'):
+                self.search = L.split()[1:]
+                self.domain = None
+        if not servers:
+            servers.append(('127.0.0.1', dns.PORT))
+        self.dynServers = servers
+
+
+    def pickServer(self):
+        """
+        Return the address of a nameserver.
+
+        TODO: Weight servers for response time so faster ones can be
+        preferred.
+        """
+        if not self.servers and not self.dynServers:
+            return None
+        serverL = len(self.servers)
+        dynL = len(self.dynServers)
+
+        self.index += 1
+        self.index %= (serverL + dynL)
+        if self.index < serverL:
+            return self.servers[self.index]
+        else:
+            return self.dynServers[self.index - serverL]
+
+
+    def _connectedProtocol(self):
+        """
+        Return a new L{DNSDatagramProtocol} bound to a randomly selected port
+        number.
+        """
+        proto = dns.DNSDatagramProtocol(self)
+        while True:
+            try:
+                self._reactor.listenUDP(dns.randomSource(), proto)
+            except error.CannotListenError:
+                pass
+            else:
+                return proto
+
+
+    def connectionMade(self, protocol):
+        """
+        Called by associated L{dns.DNSProtocol} instances when they connect.
+        """
+        self.connections.append(protocol)
+        for (d, q, t) in self.pending:
+            self.queryTCP(q, t).chainDeferred(d)
+        del self.pending[:]
+
+
+    def connectionLost(self, protocol):
+        """
+        Called by associated L{dns.DNSProtocol} instances when they disconnect.
+        """
+        if protocol in self.connections:
+            self.connections.remove(protocol)
+
+
+    def messageReceived(self, message, protocol, address = None):
+        log.msg("Unexpected message (%d) received from %r" % (message.id, address))
+
+
+    def _query(self, *args):
+        """
+        Get a new L{DNSDatagramProtocol} instance from L{_connectedProtocol},
+        issue a query to it using C{*args}, and arrange for it to be
+        disconnected from its transport after the query completes.
+
+        @param *args: Positional arguments to be passed to
+            L{DNSDatagramProtocol.query}.
+
+        @return: A L{Deferred} which will be called back with the result of the
+            query.
+        """
+        protocol = self._connectedProtocol()
+        d = protocol.query(*args)
+        def cbQueried(result):
+            protocol.transport.stopListening()
+            return result
+        d.addBoth(cbQueried)
+        return d
+
+
+    def queryUDP(self, queries, timeout = None):
+        """
+        Make a number of DNS queries via UDP.
+
+        @type queries: A C{list} of C{dns.Query} instances
+        @param queries: The queries to make.
+
+        @type timeout: Sequence of C{int}
+        @param timeout: Number of seconds after which to reissue the query.
+        When the last timeout expires, the query is considered failed.
+
+        @rtype: C{Deferred}
+        @raise C{twisted.internet.defer.TimeoutError}: When the query times
+        out.
+        """
+        if timeout is None:
+            timeout = self.timeout
+
+        addresses = self.servers + list(self.dynServers)
+        if not addresses:
+            return defer.fail(IOError("No domain name servers available"))
+
+        # Make sure we go through servers in the list in the order they were
+        # specified.
+        addresses.reverse()
+
+        used = addresses.pop()
+        d = self._query(used, queries, timeout[0])
+        d.addErrback(self._reissue, addresses, [used], queries, timeout)
+        return d
+
+
+    def _reissue(self, reason, addressesLeft, addressesUsed, query, timeout):
+        reason.trap(dns.DNSQueryTimeoutError)
+
+        # If there are no servers left to be tried, adjust the timeout
+        # to the next longest timeout period and move all the
+        # "used" addresses back to the list of addresses to try.
+        if not addressesLeft:
+            addressesLeft = addressesUsed
+            addressesLeft.reverse()
+            addressesUsed = []
+            timeout = timeout[1:]
+
+        # If all timeout values have been used this query has failed.  Tell the
+        # protocol we're giving up on it and return a terminal timeout failure
+        # to our caller.
+        if not timeout:
+            return failure.Failure(defer.TimeoutError(query))
+
+        # Get an address to try.  Take it out of the list of addresses
+        # to try and put it ino the list of already tried addresses.
+        address = addressesLeft.pop()
+        addressesUsed.append(address)
+
+        # Issue a query to a server.  Use the current timeout.  Add this
+        # function as a timeout errback in case another retry is required.
+        d = self._query(address, query, timeout[0], reason.value.id)
+        d.addErrback(self._reissue, addressesLeft, addressesUsed, query, timeout)
+        return d
+
+
+    def queryTCP(self, queries, timeout = 10):
+        """
+        Make a number of DNS queries via TCP.
+
+        @type queries: Any non-zero number of C{dns.Query} instances
+        @param queries: The queries to make.
+
+        @type timeout: C{int}
+        @param timeout: The number of seconds after which to fail.
+
+        @rtype: C{Deferred}
+        """
+        if not len(self.connections):
+            address = self.pickServer()
+            if address is None:
+                return defer.fail(IOError("No domain name servers available"))
+            host, port = address
+            self._reactor.connectTCP(host, port, self.factory)
+            self.pending.append((defer.Deferred(), queries, timeout))
+            return self.pending[-1][0]
+        else:
+            return self.connections[0].query(queries, timeout)
+
+
+    def filterAnswers(self, message):
+        """
+        Extract results from the given message.
+
+        If the message was truncated, re-attempt the query over TCP and return
+        a Deferred which will fire with the results of that query.
+
+        If the message's result code is not L{dns.OK}, return a Failure
+        indicating the type of error which occurred.
+
+        Otherwise, return a three-tuple of lists containing the results from
+        the answers section, the authority section, and the additional section.
+        """
+        if message.trunc:
+            return self.queryTCP(message.queries).addCallback(self.filterAnswers)
+        if message.rCode != dns.OK:
+            return failure.Failure(self.exceptionForCode(message.rCode)(message))
+        return (message.answers, message.authority, message.additional)
+
+
+    def _lookup(self, name, cls, type, timeout):
+        """
+        Build a L{dns.Query} for the given parameters and dispatch it via UDP.
+
+        If this query is already outstanding, it will not be re-issued.
+        Instead, when the outstanding query receives a response, that response
+        will be re-used for this query as well.
+
+        @type name: C{str}
+        @type type: C{int}
+        @type cls: C{int}
+
+        @return: A L{Deferred} which fires with a three-tuple giving the
+            answer, authority, and additional sections of the response or with
+            a L{Failure} if the response code is anything other than C{dns.OK}.
+        """
+        key = (name, type, cls)
+        waiting = self._waiting.get(key)
+        if waiting is None:
+            self._waiting[key] = []
+            d = self.queryUDP([dns.Query(name, type, cls)], timeout)
+            def cbResult(result):
+                for d in self._waiting.pop(key):
+                    d.callback(result)
+                return result
+            d.addCallback(self.filterAnswers)
+            d.addBoth(cbResult)
+        else:
+            d = defer.Deferred()
+            waiting.append(d)
+        return d
+
+
+    # This one doesn't ever belong on UDP
+    def lookupZone(self, name, timeout = 10):
+        """
+        Perform an AXFR request. This is quite different from usual
+        DNS requests. See http://cr.yp.to/djbdns/axfr-notes.html for
+        more information.
+        """
+        address = self.pickServer()
+        if address is None:
+            return defer.fail(IOError('No domain name servers available'))
+        host, port = address
+        d = defer.Deferred()
+        controller = AXFRController(name, d)
+        factory = DNSClientFactory(controller, timeout)
+        factory.noisy = False #stfu
+
+        connector = self._reactor.connectTCP(host, port, factory)
+        controller.timeoutCall = self._reactor.callLater(
+            timeout or 10, self._timeoutZone, d, controller,
+            connector, timeout or 10)
+        return d.addCallback(self._cbLookupZone, connector)
+
+    def _timeoutZone(self, d, controller, connector, seconds):
+        connector.disconnect()
+        controller.timeoutCall = None
+        controller.deferred = None
+        d.errback(error.TimeoutError("Zone lookup timed out after %d seconds" % (seconds,)))
+
+    def _cbLookupZone(self, result, connector):
+        connector.disconnect()
+        return (result, [], [])
+
+
+class AXFRController:
+    timeoutCall = None
+
+    def __init__(self, name, deferred):
+        self.name = name
+        self.deferred = deferred
+        self.soa = None
+        self.records = []
+
+    def connectionMade(self, protocol):
+        # dig saids recursion-desired to 0, so I will too
+        message = dns.Message(protocol.pickID(), recDes=0)
+        message.queries = [dns.Query(self.name, dns.AXFR, dns.IN)]
+        protocol.writeMessage(message)
+
+
+    def connectionLost(self, protocol):
+        # XXX Do something here - see #3428
+        pass
+
+
+    def messageReceived(self, message, protocol):
+        # Caveat: We have to handle two cases: All records are in 1
+        # message, or all records are in N messages.
+
+        # According to http://cr.yp.to/djbdns/axfr-notes.html,
+        # 'authority' and 'additional' are always empty, and only
+        # 'answers' is present.
+        self.records.extend(message.answers)
+        if not self.records:
+            return
+        if not self.soa:
+            if self.records[0].type == dns.SOA:
+                #print "first SOA!"
+                self.soa = self.records[0]
+        if len(self.records) > 1 and self.records[-1].type == dns.SOA:
+            #print "It's the second SOA! We're done."
+            if self.timeoutCall is not None:
+                self.timeoutCall.cancel()
+                self.timeoutCall = None
+            if self.deferred is not None:
+                self.deferred.callback(self.records)
+                self.deferred = None
+
+
+
+from twisted.internet.base import ThreadedResolver as _ThreadedResolverImpl
+
+class ThreadedResolver(_ThreadedResolverImpl):
+    def __init__(self, reactor=None):
+        if reactor is None:
+            from twisted.internet import reactor
+        _ThreadedResolverImpl.__init__(self, reactor)
+        warnings.warn(
+            "twisted.names.client.ThreadedResolver is deprecated since "
+            "Twisted 9.0, use twisted.internet.base.ThreadedResolver "
+            "instead.",
+            category=DeprecationWarning, stacklevel=2)
+
+class DNSClientFactory(protocol.ClientFactory):
+    def __init__(self, controller, timeout = 10):
+        self.controller = controller
+        self.timeout = timeout
+
+
+    def clientConnectionLost(self, connector, reason):
+        pass
+
+
+    def buildProtocol(self, addr):
+        p = dns.DNSProtocol(self.controller)
+        p.factory = self
+        return p
+
+
+
+def createResolver(servers=None, resolvconf=None, hosts=None):
+    """
+    Create and return a Resolver.
+
+    @type servers: C{list} of C{(str, int)} or C{None}
+
+    @param servers: If not C{None}, interpreted as a list of domain name servers
+    to attempt to use. Each server is a tuple of address in C{str} dotted-quad
+    form and C{int} port number.
+
+    @type resolvconf: C{str} or C{None}
+    @param resolvconf: If not C{None}, on posix systems will be interpreted as
+    an alternate resolv.conf to use. Will do nothing on windows systems. If
+    C{None}, /etc/resolv.conf will be used.
+
+    @type hosts: C{str} or C{None}
+    @param hosts: If not C{None}, an alternate hosts file to use. If C{None}
+    on posix systems, /etc/hosts will be used. On windows, C:\windows\hosts
+    will be used.
+
+    @rtype: C{IResolver}
+    """
+    from twisted.names import resolve, cache, root, hosts as hostsModule
+    if platform.getType() == 'posix':
+        if resolvconf is None:
+            resolvconf = b'/etc/resolv.conf'
+        if hosts is None:
+            hosts = b'/etc/hosts'
+        theResolver = Resolver(resolvconf, servers)
+        hostResolver = hostsModule.Resolver(hosts)
+    else:
+        if hosts is None:
+            hosts = r'c:\windows\hosts'
+        from twisted.internet import reactor
+        bootstrap = _ThreadedResolverImpl(reactor)
+        hostResolver = hostsModule.Resolver(hosts)
+        theResolver = root.bootstrap(bootstrap)
+
+    L = [hostResolver, cache.CacheResolver(), theResolver]
+    return resolve.ResolverChain(L)
+
+theResolver = None
+def getResolver():
+    """
+    Get a Resolver instance.
+
+    Create twisted.names.client.theResolver if it is C{None}, and then return
+    that value.
+
+    @rtype: C{IResolver}
+    """
+    global theResolver
+    if theResolver is None:
+        try:
+            theResolver = createResolver()
+        except ValueError:
+            theResolver = createResolver(servers=[('127.0.0.1', 53)])
+    return theResolver
+
+def getHostByName(name, timeout=None, effort=10):
+    """
+    Resolve a name to a valid ipv4 or ipv6 address.
+
+    Will errback with C{DNSQueryTimeoutError} on a timeout, C{DomainError} or
+    C{AuthoritativeDomainError} (or subclasses) on other errors.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @type effort: C{int}
+    @param effort: How many times CNAME and NS records to follow while
+    resolving this name.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().getHostByName(name, timeout, effort)
+
+def lookupAddress(name, timeout=None):
+    """
+    Perform an A record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupAddress(name, timeout)
+
+def lookupIPV6Address(name, timeout=None):
+    """
+    Perform an AAAA record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupIPV6Address(name, timeout)
+
+def lookupAddress6(name, timeout=None):
+    """
+    Perform an A6 record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupAddress6(name, timeout)
+
+def lookupMailExchange(name, timeout=None):
+    """
+    Perform an MX record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupMailExchange(name, timeout)
+
+def lookupNameservers(name, timeout=None):
+    """
+    Perform an NS record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupNameservers(name, timeout)
+
+def lookupCanonicalName(name, timeout=None):
+    """
+    Perform a CNAME record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupCanonicalName(name, timeout)
+
+def lookupMailBox(name, timeout=None):
+    """
+    Perform an MB record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupMailBox(name, timeout)
+
+def lookupMailGroup(name, timeout=None):
+    """
+    Perform an MG record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupMailGroup(name, timeout)
+
+def lookupMailRename(name, timeout=None):
+    """
+    Perform an MR record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupMailRename(name, timeout)
+
+def lookupPointer(name, timeout=None):
+    """
+    Perform a PTR record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupPointer(name, timeout)
+
+def lookupAuthority(name, timeout=None):
+    """
+    Perform an SOA record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupAuthority(name, timeout)
+
+def lookupNull(name, timeout=None):
+    """
+    Perform a NULL record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupNull(name, timeout)
+
+def lookupWellKnownServices(name, timeout=None):
+    """
+    Perform a WKS record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupWellKnownServices(name, timeout)
+
+def lookupService(name, timeout=None):
+    """
+    Perform an SRV record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupService(name, timeout)
+
+def lookupHostInfo(name, timeout=None):
+    """
+    Perform a HINFO record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupHostInfo(name, timeout)
+
+def lookupMailboxInfo(name, timeout=None):
+    """
+    Perform an MINFO record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupMailboxInfo(name, timeout)
+
+def lookupText(name, timeout=None):
+    """
+    Perform a TXT record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupText(name, timeout)
+
+def lookupSenderPolicy(name, timeout=None):
+    """
+    Perform a SPF record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupSenderPolicy(name, timeout)
+
+def lookupResponsibility(name, timeout=None):
+    """
+    Perform an RP record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupResponsibility(name, timeout)
+
+def lookupAFSDatabase(name, timeout=None):
+    """
+    Perform an AFSDB record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupAFSDatabase(name, timeout)
+
+def lookupZone(name, timeout=None):
+    """
+    Perform an AXFR record lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: C{int}
+    @param timeout: When this timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    # XXX: timeout here is not a list of ints, it is a single int.
+    return getResolver().lookupZone(name, timeout)
+
+def lookupAllRecords(name, timeout=None):
+    """
+    ALL_RECORD lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+    When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupAllRecords(name, timeout)
+
+
+
+def lookupNamingAuthorityPointer(name, timeout=None):
+    """
+    NAPTR lookup.
+
+    @type name: C{str}
+    @param name: DNS name to resolve.
+
+    @type timeout: Sequence of C{int}
+    @param timeout: Number of seconds after which to reissue the query.
+        When the last timeout expires, the query is considered failed.
+
+    @rtype: C{Deferred}
+    """
+    return getResolver().lookupNamingAuthorityPointer(name, timeout)
diff --git a/ThirdParty/Twisted/twisted/names/common.py b/ThirdParty/Twisted/twisted/names/common.py
new file mode 100644
index 0000000..27cac35
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/common.py
@@ -0,0 +1,307 @@
+# -*- test-case-name: twisted.names.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Base functionality useful to various parts of Twisted Names.
+"""
+
+from __future__ import division, absolute_import
+
+import socket
+
+from twisted.names import dns
+from twisted.names.error import DNSFormatError, DNSServerError, DNSNameError
+from twisted.names.error import DNSNotImplementedError, DNSQueryRefusedError
+from twisted.names.error import DNSUnknownError
+
+from twisted.internet import defer, error
+from twisted.python import failure
+
+# Helpers for indexing the three-tuples that get thrown around by this code a
+# lot.
+_ANS, _AUTH, _ADD = range(3)
+
+EMPTY_RESULT = (), (), ()
+
+class ResolverBase:
+    """
+    L{ResolverBase} is a base class for implementations of
+    L{IResolver<twisted.internet.interfaces.IResolver>} which deals with a lot
+    of the boilerplate of implementing all of the lookup methods.
+
+    @cvar _errormap: A C{dict} mapping DNS protocol failure response codes
+        to exception classes which will be used to represent those failures.
+    """
+    _errormap = {
+        dns.EFORMAT: DNSFormatError,
+        dns.ESERVER: DNSServerError,
+        dns.ENAME: DNSNameError,
+        dns.ENOTIMP: DNSNotImplementedError,
+        dns.EREFUSED: DNSQueryRefusedError}
+
+    typeToMethod = None
+
+    def __init__(self):
+        self.typeToMethod = {}
+        for (k, v) in typeToMethod.items():
+            self.typeToMethod[k] = getattr(self, v)
+
+
+    def exceptionForCode(self, responseCode):
+        """
+        Convert a response code (one of the possible values of
+        L{dns.Message.rCode} to an exception instance representing it.
+
+        @since: 10.0
+        """
+        return self._errormap.get(responseCode, DNSUnknownError)
+
+
+    def query(self, query, timeout=None):
+        """
+        Dispatch C{query} to the method which can handle its type.
+
+        @param query: The DNS query being issued, to which a response is to be
+            generated.
+        @type query: L{twisted.names.dns.Query}
+
+        @return: A L{Deferred} which fires with a three-tuple of lists of
+            L{twisted.names.dns.RRHeader} instances.  The first element of the
+            tuple gives answers.  The second element of the tuple gives
+            authorities.  The third element of the tuple gives additional
+            information.  The L{Deferred} may instead fail with one of the
+            exceptions defined in L{twisted.names.error} or with
+            C{NotImplementedError}.
+        """
+        try:
+            method = self.typeToMethod[query.type]
+        except KeyError:
+            return defer.fail(failure.Failure(NotImplementedError(
+                        str(self.__class__) + " " + str(query.type))))
+        else:
+            return defer.maybeDeferred(method, query.name.name, timeout)
+
+
+    def _lookup(self, name, cls, type, timeout):
+        return defer.fail(NotImplementedError("ResolverBase._lookup"))
+
+    def lookupAddress(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupAddress
+        """
+        return self._lookup(name, dns.IN, dns.A, timeout)
+
+    def lookupIPV6Address(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupIPV6Address
+        """
+        return self._lookup(name, dns.IN, dns.AAAA, timeout)
+
+    def lookupAddress6(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupAddress6
+        """
+        return self._lookup(name, dns.IN, dns.A6, timeout)
+
+    def lookupMailExchange(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupMailExchange
+        """
+        return self._lookup(name, dns.IN, dns.MX, timeout)
+
+    def lookupNameservers(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupNameservers
+        """
+        return self._lookup(name, dns.IN, dns.NS, timeout)
+
+    def lookupCanonicalName(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupCanonicalName
+        """
+        return self._lookup(name, dns.IN, dns.CNAME, timeout)
+
+    def lookupMailBox(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupMailBox
+        """
+        return self._lookup(name, dns.IN, dns.MB, timeout)
+
+    def lookupMailGroup(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupMailGroup
+        """
+        return self._lookup(name, dns.IN, dns.MG, timeout)
+
+    def lookupMailRename(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupMailRename
+        """
+        return self._lookup(name, dns.IN, dns.MR, timeout)
+
+    def lookupPointer(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupPointer
+        """
+        return self._lookup(name, dns.IN, dns.PTR, timeout)
+
+    def lookupAuthority(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupAuthority
+        """
+        return self._lookup(name, dns.IN, dns.SOA, timeout)
+
+    def lookupNull(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupNull
+        """
+        return self._lookup(name, dns.IN, dns.NULL, timeout)
+
+    def lookupWellKnownServices(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupWellKnownServices
+        """
+        return self._lookup(name, dns.IN, dns.WKS, timeout)
+
+    def lookupService(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupService
+        """
+        return self._lookup(name, dns.IN, dns.SRV, timeout)
+
+    def lookupHostInfo(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupHostInfo
+        """
+        return self._lookup(name, dns.IN, dns.HINFO, timeout)
+
+    def lookupMailboxInfo(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupMailboxInfo
+        """
+        return self._lookup(name, dns.IN, dns.MINFO, timeout)
+
+    def lookupText(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupText
+        """
+        return self._lookup(name, dns.IN, dns.TXT, timeout)
+
+    def lookupSenderPolicy(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupSenderPolicy
+        """
+        return self._lookup(name, dns.IN, dns.SPF, timeout)
+
+    def lookupResponsibility(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupResponsibility
+        """
+        return self._lookup(name, dns.IN, dns.RP, timeout)
+
+    def lookupAFSDatabase(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupAFSDatabase
+        """
+        return self._lookup(name, dns.IN, dns.AFSDB, timeout)
+
+    def lookupZone(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupZone
+        """
+        return self._lookup(name, dns.IN, dns.AXFR, timeout)
+
+
+    def lookupNamingAuthorityPointer(self, name, timeout=None):
+        """
+        @see: twisted.names.client.lookupNamingAuthorityPointer
+        """
+        return self._lookup(name, dns.IN, dns.NAPTR, timeout)
+
+
+    def lookupAllRecords(self, name, timeout = None):
+        """
+        @see: twisted.names.client.lookupAllRecords
+        """
+        return self._lookup(name, dns.IN, dns.ALL_RECORDS, timeout)
+
+    def getHostByName(self, name, timeout = None, effort = 10):
+        """
+        @see: twisted.names.client.getHostByName
+        """
+        # XXX - respect timeout
+        return self.lookupAllRecords(name, timeout
+            ).addCallback(self._cbRecords, name, effort
+            )
+
+    def _cbRecords(self, records, name, effort):
+        (ans, auth, add) = records
+        result = extractRecord(self, dns.Name(name), ans + auth + add, effort)
+        if not result:
+            raise error.DNSLookupError(name)
+        return result
+
+
+def extractRecord(resolver, name, answers, level=10):
+    if not level:
+        return None
+    if hasattr(socket, 'inet_ntop'):
+        for r in answers:
+            if r.name == name and r.type == dns.A6:
+                return socket.inet_ntop(socket.AF_INET6, r.payload.address)
+        for r in answers:
+            if r.name == name and r.type == dns.AAAA:
+                return socket.inet_ntop(socket.AF_INET6, r.payload.address)
+    for r in answers:
+        if r.name == name and r.type == dns.A:
+            return socket.inet_ntop(socket.AF_INET, r.payload.address)
+    for r in answers:
+        if r.name == name and r.type == dns.CNAME:
+            result = extractRecord(
+                resolver, r.payload.name, answers, level - 1)
+            if not result:
+                return resolver.getHostByName(
+                    str(r.payload.name), effort=level - 1)
+            return result
+    # No answers, but maybe there's a hint at who we should be asking about
+    # this
+    for r in answers:
+        if r.type == dns.NS:
+            from twisted.names import client
+            r = client.Resolver(servers=[(str(r.payload.name), dns.PORT)])
+            return r.lookupAddress(str(name)
+                ).addCallback(
+                    lambda records: extractRecord(
+                        r, name,
+                        records[_ANS] + records[_AUTH] + records[_ADD],
+                        level - 1))
+
+
+
+typeToMethod = {
+    dns.A:     'lookupAddress',
+    dns.AAAA:  'lookupIPV6Address',
+    dns.A6:    'lookupAddress6',
+    dns.NS:    'lookupNameservers',
+    dns.CNAME: 'lookupCanonicalName',
+    dns.SOA:   'lookupAuthority',
+    dns.MB:    'lookupMailBox',
+    dns.MG:    'lookupMailGroup',
+    dns.MR:    'lookupMailRename',
+    dns.NULL:  'lookupNull',
+    dns.WKS:   'lookupWellKnownServices',
+    dns.PTR:   'lookupPointer',
+    dns.HINFO: 'lookupHostInfo',
+    dns.MINFO: 'lookupMailboxInfo',
+    dns.MX:    'lookupMailExchange',
+    dns.TXT:   'lookupText',
+    dns.SPF:   'lookupSenderPolicy',
+
+    dns.RP:    'lookupResponsibility',
+    dns.AFSDB: 'lookupAFSDatabase',
+    dns.SRV:   'lookupService',
+    dns.NAPTR: 'lookupNamingAuthorityPointer',
+    dns.AXFR:         'lookupZone',
+    dns.ALL_RECORDS:  'lookupAllRecords',
+}
diff --git a/ThirdParty/Twisted/twisted/names/dns.py b/ThirdParty/Twisted/twisted/names/dns.py
new file mode 100644
index 0000000..f7d4221
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/dns.py
@@ -0,0 +1,2049 @@
+# -*- test-case-name: twisted.names.test.test_dns -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+DNS protocol implementation.
+
+Future Plans:
+    - Get rid of some toplevels, maybe.
+"""
+
+from __future__ import division, absolute_import
+
+__all__ = [
+    'IEncodable', 'IRecord',
+
+    'A', 'A6', 'AAAA', 'AFSDB', 'CNAME', 'DNAME', 'HINFO',
+    'MAILA', 'MAILB', 'MB', 'MD', 'MF', 'MG', 'MINFO', 'MR', 'MX',
+    'NAPTR', 'NS', 'NULL', 'PTR', 'RP', 'SOA', 'SPF', 'SRV', 'TXT', 'WKS',
+
+    'ANY', 'CH', 'CS', 'HS', 'IN',
+
+    'ALL_RECORDS', 'AXFR', 'IXFR',
+
+    'EFORMAT', 'ENAME', 'ENOTIMP', 'EREFUSED', 'ESERVER',
+
+    'Record_A', 'Record_A6', 'Record_AAAA', 'Record_AFSDB', 'Record_CNAME',
+    'Record_DNAME', 'Record_HINFO', 'Record_MB', 'Record_MD', 'Record_MF',
+    'Record_MG', 'Record_MINFO', 'Record_MR', 'Record_MX', 'Record_NAPTR',
+    'Record_NS', 'Record_NULL', 'Record_PTR', 'Record_RP', 'Record_SOA',
+    'Record_SPF', 'Record_SRV', 'Record_TXT', 'Record_WKS', 'UnknownRecord',
+
+    'QUERY_CLASSES', 'QUERY_TYPES', 'REV_CLASSES', 'REV_TYPES', 'EXT_QUERIES',
+
+    'Charstr', 'Message', 'Name', 'Query', 'RRHeader', 'SimpleRecord',
+    'DNSDatagramProtocol', 'DNSMixin', 'DNSProtocol',
+
+    'OK', 'OP_INVERSE', 'OP_NOTIFY', 'OP_QUERY', 'OP_STATUS', 'OP_UPDATE',
+    'PORT',
+
+    'AuthoritativeDomainError', 'DNSQueryTimeoutError', 'DomainError',
+    ]
+
+
+# System imports
+import warnings
+
+import struct, random, types, socket
+from itertools import chain
+
+from io import BytesIO
+
+AF_INET6 = socket.AF_INET6
+
+from zope.interface import implementer, Interface, Attribute
+
+
+# Twisted imports
+from twisted.internet import protocol, defer
+from twisted.internet.error import CannotListenError
+from twisted.python import log, failure
+from twisted.python import _utilpy3 as tputil
+from twisted.python import randbytes
+from twisted.python.compat import _PY3, unicode, comparable, cmp, nativeString
+
+
+if _PY3:
+    def _ord2bytes(ordinal):
+        """
+        Construct a bytes object representing a single byte with the given
+        ordinal value.
+
+        @type ordinal: C{int}
+        @rtype: C{bytes}
+        """
+        return bytes([ordinal])
+
+
+    def _nicebytes(bytes):
+        """
+        Represent a mostly textful bytes object in a way suitable for presentation
+        to an end user.
+
+        @param bytes: The bytes to represent.
+        @rtype: C{str}
+        """
+        return repr(bytes)[1:]
+
+
+    def _nicebyteslist(list):
+        """
+        Represent a list of mostly textful bytes objects in a way suitable for
+        presentation to an end user.
+
+        @param list: The list of bytes to represent.
+        @rtype: C{str}
+        """
+        return '[%s]' % (
+            ', '.join([_nicebytes(b) for b in list]),)
+else:
+    _ord2bytes = chr
+    _nicebytes = _nicebyteslist = repr
+
+
+
+def randomSource():
+    """
+    Wrapper around L{randbytes.secureRandom} to return 2 random chars.
+    """
+    return struct.unpack('H', randbytes.secureRandom(2, fallback=True))[0]
+
+
+PORT = 53
+
+(A, NS, MD, MF, CNAME, SOA, MB, MG, MR, NULL, WKS, PTR, HINFO, MINFO, MX, TXT,
+ RP, AFSDB) = range(1, 19)
+AAAA = 28
+SRV = 33
+NAPTR = 35
+A6 = 38
+DNAME = 39
+SPF = 99
+
+QUERY_TYPES = {
+    A: 'A',
+    NS: 'NS',
+    MD: 'MD',
+    MF: 'MF',
+    CNAME: 'CNAME',
+    SOA: 'SOA',
+    MB: 'MB',
+    MG: 'MG',
+    MR: 'MR',
+    NULL: 'NULL',
+    WKS: 'WKS',
+    PTR: 'PTR',
+    HINFO: 'HINFO',
+    MINFO: 'MINFO',
+    MX: 'MX',
+    TXT: 'TXT',
+    RP: 'RP',
+    AFSDB: 'AFSDB',
+
+    # 19 through 27?  Eh, I'll get to 'em.
+
+    AAAA: 'AAAA',
+    SRV: 'SRV',
+    NAPTR: 'NAPTR',
+    A6: 'A6',
+    DNAME: 'DNAME',
+    SPF: 'SPF'
+}
+
+IXFR, AXFR, MAILB, MAILA, ALL_RECORDS = range(251, 256)
+
+# "Extended" queries (Hey, half of these are deprecated, good job)
+EXT_QUERIES = {
+    IXFR: 'IXFR',
+    AXFR: 'AXFR',
+    MAILB: 'MAILB',
+    MAILA: 'MAILA',
+    ALL_RECORDS: 'ALL_RECORDS'
+}
+
+REV_TYPES = dict([
+    (v, k) for (k, v) in chain(QUERY_TYPES.items(), EXT_QUERIES.items())
+])
+
+IN, CS, CH, HS = range(1, 5)
+ANY = 255
+
+QUERY_CLASSES = {
+    IN: 'IN',
+    CS: 'CS',
+    CH: 'CH',
+    HS: 'HS',
+    ANY: 'ANY'
+}
+REV_CLASSES = dict([
+    (v, k) for (k, v) in QUERY_CLASSES.items()
+])
+
+
+# Opcodes
+OP_QUERY, OP_INVERSE, OP_STATUS = range(3)
+OP_NOTIFY = 4 # RFC 1996
+OP_UPDATE = 5 # RFC 2136
+
+
+# Response Codes
+OK, EFORMAT, ESERVER, ENAME, ENOTIMP, EREFUSED = range(6)
+
+class IRecord(Interface):
+    """
+    An single entry in a zone of authority.
+    """
+
+    TYPE = Attribute("An indicator of what kind of record this is.")
+
+
+# Backwards compatibility aliases - these should be deprecated or something I
+# suppose. -exarkun
+from twisted.names.error import DomainError, AuthoritativeDomainError
+from twisted.names.error import DNSQueryTimeoutError
+
+
+def str2time(s):
+    """
+    Parse a string description of an interval into an integer number of seconds.
+
+    @param s: An interval definition constructed as an interval duration
+        followed by an interval unit.  An interval duration is a base ten
+        representation of an integer.  An interval unit is one of the following
+        letters: S (seconds), M (minutes), H (hours), D (days), W (weeks), or Y
+        (years).  For example: C{"3S"} indicates an interval of three seconds;
+        C{"5D"} indicates an interval of five days.  Alternatively, C{s} may be
+        any non-string and it will be returned unmodified.
+    @type s: text string (C{str}) for parsing; anything else for passthrough.
+
+    @return: an C{int} giving the interval represented by the string C{s}, or
+        whatever C{s} is if it is not a string.
+    """
+    suffixes = (
+        ('S', 1), ('M', 60), ('H', 60 * 60), ('D', 60 * 60 * 24),
+        ('W', 60 * 60 * 24 * 7), ('Y', 60 * 60 * 24 * 365)
+    )
+    if isinstance(s, str):
+        s = s.upper().strip()
+        for (suff, mult) in suffixes:
+            if s.endswith(suff):
+                return int(float(s[:-1]) * mult)
+        try:
+            s = int(s)
+        except ValueError:
+            raise ValueError("Invalid time interval specifier: " + s)
+    return s
+
+
+def readPrecisely(file, l):
+    buff = file.read(l)
+    if len(buff) < l:
+        raise EOFError
+    return buff
+
+
+class IEncodable(Interface):
+    """
+    Interface for something which can be encoded to and decoded
+    from a file object.
+    """
+
+    def encode(strio, compDict = None):
+        """
+        Write a representation of this object to the given
+        file object.
+
+        @type strio: File-like object
+        @param strio: The stream to which to write bytes
+
+        @type compDict: C{dict} or C{None}
+        @param compDict: A dictionary of backreference addresses that have
+        have already been written to this stream and that may be used for
+        compression.
+        """
+
+    def decode(strio, length = None):
+        """
+        Reconstruct an object from data read from the given
+        file object.
+
+        @type strio: File-like object
+        @param strio: The stream from which bytes may be read
+
+        @type length: C{int} or C{None}
+        @param length: The number of bytes in this RDATA field.  Most
+        implementations can ignore this value.  Only in the case of
+        records similar to TXT where the total length is in no way
+        encoded in the data is it necessary.
+        """
+
+
+
+ at implementer(IEncodable)
+class Charstr(object):
+
+    def __init__(self, string=b''):
+        if not isinstance(string, bytes):
+            raise ValueError("%r is not a byte string" % (string,))
+        self.string = string
+
+
+    def encode(self, strio, compDict=None):
+        """
+        Encode this Character string into the appropriate byte format.
+
+        @type strio: file
+        @param strio: The byte representation of this Charstr will be written
+            to this file.
+        """
+        string = self.string
+        ind = len(string)
+        strio.write(_ord2bytes(ind))
+        strio.write(string)
+
+
+    def decode(self, strio, length=None):
+        """
+        Decode a byte string into this Charstr.
+
+        @type strio: file
+        @param strio: Bytes will be read from this file until the full string
+            is decoded.
+
+        @raise EOFError: Raised when there are not enough bytes available from
+            C{strio}.
+        """
+        self.string = b''
+        l = ord(readPrecisely(strio, 1))
+        self.string = readPrecisely(strio, l)
+
+
+    def __eq__(self, other):
+        if isinstance(other, Charstr):
+            return self.string == other.string
+        return NotImplemented
+
+
+    def __ne__(self, other):
+        if isinstance(other, Charstr):
+            return self.string != other.string
+        return NotImplemented
+
+
+    def __hash__(self):
+        return hash(self.string)
+
+
+    def __str__(self):
+        """
+        Represent this L{Charstr} instance by its string value.
+        """
+        return nativeString(self.string)
+
+
+
+ at implementer(IEncodable)
+class Name:
+    """
+    A name in the domain name system, made up of multiple labels.  For example,
+    I{twistedmatrix.com}.
+
+    @ivar name: A byte string giving the name.
+    @type name: C{bytes}
+    """
+    def __init__(self, name=b''):
+        if not isinstance(name, bytes):
+            raise TypeError("%r is not a byte string" % (name,))
+        self.name = name
+
+
+    def encode(self, strio, compDict=None):
+        """
+        Encode this Name into the appropriate byte format.
+
+        @type strio: file
+        @param strio: The byte representation of this Name will be written to
+        this file.
+
+        @type compDict: dict
+        @param compDict: dictionary of Names that have already been encoded
+        and whose addresses may be backreferenced by this Name (for the purpose
+        of reducing the message size).
+        """
+        name = self.name
+        while name:
+            if compDict is not None:
+                if name in compDict:
+                    strio.write(
+                        struct.pack("!H", 0xc000 | compDict[name]))
+                    return
+                else:
+                    compDict[name] = strio.tell() + Message.headerSize
+            ind = name.find(b'.')
+            if ind > 0:
+                label, name = name[:ind], name[ind + 1:]
+            else:
+                # This is the last label, end the loop after handling it.
+                label = name
+                name = None
+                ind = len(label)
+            strio.write(_ord2bytes(ind))
+            strio.write(label)
+        strio.write(b'\x00')
+
+
+    def decode(self, strio, length=None):
+        """
+        Decode a byte string into this Name.
+
+        @type strio: file
+        @param strio: Bytes will be read from this file until the full Name
+        is decoded.
+
+        @raise EOFError: Raised when there are not enough bytes available
+        from C{strio}.
+
+        @raise ValueError: Raised when the name cannot be decoded (for example,
+            because it contains a loop).
+        """
+        visited = set()
+        self.name = b''
+        off = 0
+        while 1:
+            l = ord(readPrecisely(strio, 1))
+            if l == 0:
+                if off > 0:
+                    strio.seek(off)
+                return
+            if (l >> 6) == 3:
+                new_off = ((l&63) << 8
+                            | ord(readPrecisely(strio, 1)))
+                if new_off in visited:
+                    raise ValueError("Compression loop in encoded name")
+                visited.add(new_off)
+                if off == 0:
+                    off = strio.tell()
+                strio.seek(new_off)
+                continue
+            label = readPrecisely(strio, l)
+            if self.name == b'':
+                self.name = label
+            else:
+                self.name = self.name + b'.' + label
+
+    def __eq__(self, other):
+        if isinstance(other, Name):
+            return self.name == other.name
+        return NotImplemented
+
+
+    def __ne__(self, other):
+        if isinstance(other, Name):
+            return self.name != other.name
+        return NotImplemented
+
+
+    def __hash__(self):
+        return hash(self.name)
+
+
+    def __str__(self):
+        """
+        Represent this L{Name} instance by its string name.
+        """
+        return nativeString(self.name)
+
+
+
+ at comparable
+ at implementer(IEncodable)
+class Query:
+    """
+    Represent a single DNS query.
+
+    @ivar name: The name about which this query is requesting information.
+    @ivar type: The query type.
+    @ivar cls: The query class.
+    """
+    name = None
+    type = None
+    cls = None
+
+    def __init__(self, name=b'', type=A, cls=IN):
+        """
+        @type name: C{bytes}
+        @param name: The name about which to request information.
+
+        @type type: C{int}
+        @param type: The query type.
+
+        @type cls: C{int}
+        @param cls: The query class.
+        """
+        self.name = Name(name)
+        self.type = type
+        self.cls = cls
+
+
+    def encode(self, strio, compDict=None):
+        self.name.encode(strio, compDict)
+        strio.write(struct.pack("!HH", self.type, self.cls))
+
+
+    def decode(self, strio, length = None):
+        self.name.decode(strio)
+        buff = readPrecisely(strio, 4)
+        self.type, self.cls = struct.unpack("!HH", buff)
+
+
+    def __hash__(self):
+        return hash((str(self.name).lower(), self.type, self.cls))
+
+
+    def __cmp__(self, other):
+        if isinstance(other, Query):
+            return cmp(
+                (str(self.name).lower(), self.type, self.cls),
+                (str(other.name).lower(), other.type, other.cls))
+        return NotImplemented
+
+
+    def __str__(self):
+        t = QUERY_TYPES.get(self.type, EXT_QUERIES.get(self.type, 'UNKNOWN (%d)' % self.type))
+        c = QUERY_CLASSES.get(self.cls, 'UNKNOWN (%d)' % self.cls)
+        return '<Query %s %s %s>' % (self.name, t, c)
+
+
+    def __repr__(self):
+        return 'Query(%r, %r, %r)' % (str(self.name), self.type, self.cls)
+
+
+
+ at implementer(IEncodable)
+class RRHeader(tputil.FancyEqMixin):
+    """
+    A resource record header.
+
+    @cvar fmt: C{str} specifying the byte format of an RR.
+
+    @ivar name: The name about which this reply contains information.
+    @ivar type: The query type of the original request.
+    @ivar cls: The query class of the original request.
+    @ivar ttl: The time-to-live for this record.
+    @ivar payload: An object that implements the IEncodable interface
+
+    @ivar auth: A C{bool} indicating whether this C{RRHeader} was parsed from an
+        authoritative message.
+    """
+    compareAttributes = ('name', 'type', 'cls', 'ttl', 'payload', 'auth')
+
+    fmt = "!HHIH"
+
+    name = None
+    type = None
+    cls = None
+    ttl = None
+    payload = None
+    rdlength = None
+
+    cachedResponse = None
+
+    def __init__(self, name=b'', type=A, cls=IN, ttl=0, payload=None, auth=False):
+        """
+        @type name: C{bytes}
+        @param name: The name about which this reply contains information.
+
+        @type type: C{int}
+        @param type: The query type.
+
+        @type cls: C{int}
+        @param cls: The query class.
+
+        @type ttl: C{int}
+        @param ttl: Time to live for this record.
+
+        @type payload: An object implementing C{IEncodable}
+        @param payload: A Query Type specific data object.
+
+        @raises ValueError: if the ttl is negative.
+        """
+        assert (payload is None) or isinstance(payload, UnknownRecord) or (payload.TYPE == type)
+
+        if ttl < 0:
+            raise ValueError("TTL cannot be negative")
+
+        self.name = Name(name)
+        self.type = type
+        self.cls = cls
+        self.ttl = ttl
+        self.payload = payload
+        self.auth = auth
+
+
+    def encode(self, strio, compDict=None):
+        self.name.encode(strio, compDict)
+        strio.write(struct.pack(self.fmt, self.type, self.cls, self.ttl, 0))
+        if self.payload:
+            prefix = strio.tell()
+            self.payload.encode(strio, compDict)
+            aft = strio.tell()
+            strio.seek(prefix - 2, 0)
+            strio.write(struct.pack('!H', aft - prefix))
+            strio.seek(aft, 0)
+
+
+    def decode(self, strio, length = None):
+        self.name.decode(strio)
+        l = struct.calcsize(self.fmt)
+        buff = readPrecisely(strio, l)
+        r = struct.unpack(self.fmt, buff)
+        self.type, self.cls, self.ttl, self.rdlength = r
+
+
+    def isAuthoritative(self):
+        return self.auth
+
+
+    def __str__(self):
+        t = QUERY_TYPES.get(self.type, EXT_QUERIES.get(self.type, 'UNKNOWN (%d)' % self.type))
+        c = QUERY_CLASSES.get(self.cls, 'UNKNOWN (%d)' % self.cls)
+        return '<RR name=%s type=%s class=%s ttl=%ds auth=%s>' % (self.name, t, c, self.ttl, self.auth and 'True' or 'False')
+
+
+    __repr__ = __str__
+
+
+
+ at implementer(IEncodable, IRecord)
+class SimpleRecord(tputil.FancyStrMixin, tputil.FancyEqMixin):
+    """
+    A Resource Record which consists of a single RFC 1035 domain-name.
+
+    @type name: L{Name}
+    @ivar name: The name associated with this record.
+
+    @type ttl: C{int}
+    @ivar ttl: The maximum number of seconds which this record should be
+        cached.
+    """
+    showAttributes = (('name', 'name', '%s'), 'ttl')
+    compareAttributes = ('name', 'ttl')
+
+    TYPE = None
+    name = None
+
+    def __init__(self, name=b'', ttl=None):
+        self.name = Name(name)
+        self.ttl = str2time(ttl)
+
+
+    def encode(self, strio, compDict = None):
+        self.name.encode(strio, compDict)
+
+
+    def decode(self, strio, length = None):
+        self.name = Name()
+        self.name.decode(strio)
+
+
+    def __hash__(self):
+        return hash(self.name)
+
+
+# Kinds of RRs - oh my!
+class Record_NS(SimpleRecord):
+    """
+    An authoritative nameserver.
+    """
+    TYPE = NS
+    fancybasename = 'NS'
+
+
+
+class Record_MD(SimpleRecord):
+    """
+    A mail destination.
+
+    This record type is obsolete.
+
+    @see: L{Record_MX}
+    """
+    TYPE = MD
+    fancybasename = 'MD'
+
+
+
+class Record_MF(SimpleRecord):
+    """
+    A mail forwarder.
+
+    This record type is obsolete.
+
+    @see: L{Record_MX}
+    """
+    TYPE = MF
+    fancybasename = 'MF'
+
+
+
+class Record_CNAME(SimpleRecord):
+    """
+    The canonical name for an alias.
+    """
+    TYPE = CNAME
+    fancybasename = 'CNAME'
+
+
+
+class Record_MB(SimpleRecord):
+    """
+    A mailbox domain name.
+
+    This is an experimental record type.
+    """
+    TYPE = MB
+    fancybasename = 'MB'
+
+
+
+class Record_MG(SimpleRecord):
+    """
+    A mail group member.
+
+    This is an experimental record type.
+    """
+    TYPE = MG
+    fancybasename = 'MG'
+
+
+
+class Record_MR(SimpleRecord):
+    """
+    A mail rename domain name.
+
+    This is an experimental record type.
+    """
+    TYPE = MR
+    fancybasename = 'MR'
+
+
+
+class Record_PTR(SimpleRecord):
+    """
+    A domain name pointer.
+    """
+    TYPE = PTR
+    fancybasename = 'PTR'
+
+
+
+class Record_DNAME(SimpleRecord):
+    """
+    A non-terminal DNS name redirection.
+
+    This record type provides the capability to map an entire subtree of the
+    DNS name space to another domain.  It differs from the CNAME record which
+    maps a single node of the name space.
+
+    @see: U{http://www.faqs.org/rfcs/rfc2672.html}
+    @see: U{http://www.faqs.org/rfcs/rfc3363.html}
+    """
+    TYPE = DNAME
+    fancybasename = 'DNAME'
+
+
+
+ at implementer(IEncodable, IRecord)
+class Record_A(tputil.FancyEqMixin):
+    """
+    An IPv4 host address.
+
+    @type address: C{str}
+    @ivar address: The packed network-order representation of the IPv4 address
+        associated with this record.
+
+    @type ttl: C{int}
+    @ivar ttl: The maximum number of seconds which this record should be
+        cached.
+    """
+    compareAttributes = ('address', 'ttl')
+
+    TYPE = A
+    address = None
+
+    def __init__(self, address='0.0.0.0', ttl=None):
+        address = socket.inet_aton(address)
+        self.address = address
+        self.ttl = str2time(ttl)
+
+
+    def encode(self, strio, compDict = None):
+        strio.write(self.address)
+
+
+    def decode(self, strio, length = None):
+        self.address = readPrecisely(strio, 4)
+
+
+    def __hash__(self):
+        return hash(self.address)
+
+
+    def __str__(self):
+        return '<A address=%s ttl=%s>' % (self.dottedQuad(), self.ttl)
+    __repr__ = __str__
+
+
+    def dottedQuad(self):
+        return socket.inet_ntoa(self.address)
+
+
+
+ at implementer(IEncodable, IRecord)
+class Record_SOA(tputil.FancyEqMixin, tputil.FancyStrMixin):
+    """
+    Marks the start of a zone of authority.
+
+    This record describes parameters which are shared by all records within a
+    particular zone.
+
+    @type mname: L{Name}
+    @ivar mname: The domain-name of the name server that was the original or
+        primary source of data for this zone.
+
+    @type rname: L{Name}
+    @ivar rname: A domain-name which specifies the mailbox of the person
+        responsible for this zone.
+
+    @type serial: C{int}
+    @ivar serial: The unsigned 32 bit version number of the original copy of
+        the zone.  Zone transfers preserve this value.  This value wraps and
+        should be compared using sequence space arithmetic.
+
+    @type refresh: C{int}
+    @ivar refresh: A 32 bit time interval before the zone should be refreshed.
+
+    @type minimum: C{int}
+    @ivar minimum: The unsigned 32 bit minimum TTL field that should be
+        exported with any RR from this zone.
+
+    @type expire: C{int}
+    @ivar expire: A 32 bit time value that specifies the upper limit on the
+        time interval that can elapse before the zone is no longer
+        authoritative.
+
+    @type retry: C{int}
+    @ivar retry: A 32 bit time interval that should elapse before a failed
+        refresh should be retried.
+
+    @type ttl: C{int}
+    @ivar ttl: The default TTL to use for records served from this zone.
+    """
+    fancybasename = 'SOA'
+    compareAttributes = ('serial', 'mname', 'rname', 'refresh', 'expire', 'retry', 'minimum', 'ttl')
+    showAttributes = (('mname', 'mname', '%s'), ('rname', 'rname', '%s'), 'serial', 'refresh', 'retry', 'expire', 'minimum', 'ttl')
+
+    TYPE = SOA
+
+    def __init__(self, mname=b'', rname=b'', serial=0, refresh=0, retry=0,
+                 expire=0, minimum=0, ttl=None):
+        self.mname, self.rname = Name(mname), Name(rname)
+        self.serial, self.refresh = str2time(serial), str2time(refresh)
+        self.minimum, self.expire = str2time(minimum), str2time(expire)
+        self.retry = str2time(retry)
+        self.ttl = str2time(ttl)
+
+
+    def encode(self, strio, compDict = None):
+        self.mname.encode(strio, compDict)
+        self.rname.encode(strio, compDict)
+        strio.write(
+            struct.pack(
+                '!LlllL',
+                self.serial, self.refresh, self.retry, self.expire,
+                self.minimum
+            )
+        )
+
+
+    def decode(self, strio, length = None):
+        self.mname, self.rname = Name(), Name()
+        self.mname.decode(strio)
+        self.rname.decode(strio)
+        r = struct.unpack('!LlllL', readPrecisely(strio, 20))
+        self.serial, self.refresh, self.retry, self.expire, self.minimum = r
+
+
+    def __hash__(self):
+        return hash((
+            self.serial, self.mname, self.rname,
+            self.refresh, self.expire, self.retry
+        ))
+
+
+
+ at implementer(IEncodable, IRecord)
+class Record_NULL(tputil.FancyStrMixin, tputil.FancyEqMixin):
+    """
+    A null record.
+
+    This is an experimental record type.
+
+    @type ttl: C{int}
+    @ivar ttl: The maximum number of seconds which this record should be
+        cached.
+    """
+    fancybasename = 'NULL'
+    showAttributes = (('payload', _nicebytes), 'ttl')
+    compareAttributes = ('payload', 'ttl')
+
+    TYPE = NULL
+
+    def __init__(self, payload=None, ttl=None):
+        self.payload = payload
+        self.ttl = str2time(ttl)
+
+
+    def encode(self, strio, compDict = None):
+        strio.write(self.payload)
+
+
+    def decode(self, strio, length = None):
+        self.payload = readPrecisely(strio, length)
+
+
+    def __hash__(self):
+        return hash(self.payload)
+
+
+
+ at implementer(IEncodable, IRecord)
+class Record_WKS(tputil.FancyEqMixin, tputil.FancyStrMixin):
+    """
+    A well known service description.
+
+    This record type is obsolete.  See L{Record_SRV}.
+
+    @type address: C{str}
+    @ivar address: The packed network-order representation of the IPv4 address
+        associated with this record.
+
+    @type protocol: C{int}
+    @ivar protocol: The 8 bit IP protocol number for which this service map is
+        relevant.
+
+    @type map: C{str}
+    @ivar map: A bitvector indicating the services available at the specified
+        address.
+
+    @type ttl: C{int}
+    @ivar ttl: The maximum number of seconds which this record should be
+        cached.
+    """
+    fancybasename = "WKS"
+    compareAttributes = ('address', 'protocol', 'map', 'ttl')
+    showAttributes = [('_address', 'address', '%s'), 'protocol', 'ttl']
+
+    TYPE = WKS
+
+    _address = property(lambda self: socket.inet_ntoa(self.address))
+
+    def __init__(self, address='0.0.0.0', protocol=0, map='', ttl=None):
+        self.address = socket.inet_aton(address)
+        self.protocol, self.map = protocol, map
+        self.ttl = str2time(ttl)
+
+
+    def encode(self, strio, compDict = None):
+        strio.write(self.address)
+        strio.write(struct.pack('!B', self.protocol))
+        strio.write(self.map)
+
+
+    def decode(self, strio, length = None):
+        self.address = readPrecisely(strio, 4)
+        self.protocol = struct.unpack('!B', readPrecisely(strio, 1))[0]
+        self.map = readPrecisely(strio, length - 5)
+
+
+    def __hash__(self):
+        return hash((self.address, self.protocol, self.map))
+
+
+
+ at implementer(IEncodable, IRecord)
+class Record_AAAA(tputil.FancyEqMixin, tputil.FancyStrMixin):
+    """
+    An IPv6 host address.
+
+    @type address: C{str}
+    @ivar address: The packed network-order representation of the IPv6 address
+        associated with this record.
+
+    @type ttl: C{int}
+    @ivar ttl: The maximum number of seconds which this record should be
+        cached.
+
+    @see: U{http://www.faqs.org/rfcs/rfc1886.html}
+    """
+    TYPE = AAAA
+
+    fancybasename = 'AAAA'
+    showAttributes = (('_address', 'address', '%s'), 'ttl')
+    compareAttributes = ('address', 'ttl')
+
+    _address = property(lambda self: socket.inet_ntop(AF_INET6, self.address))
+
+    def __init__(self, address='::', ttl=None):
+        self.address = socket.inet_pton(AF_INET6, address)
+        self.ttl = str2time(ttl)
+
+
+    def encode(self, strio, compDict = None):
+        strio.write(self.address)
+
+
+    def decode(self, strio, length = None):
+        self.address = readPrecisely(strio, 16)
+
+
+    def __hash__(self):
+        return hash(self.address)
+
+
+
+ at implementer(IEncodable, IRecord)
+class Record_A6(tputil.FancyStrMixin, tputil.FancyEqMixin):
+    """
+    An IPv6 address.
+
+    This is an experimental record type.
+
+    @type prefixLen: C{int}
+    @ivar prefixLen: The length of the suffix.
+
+    @type suffix: C{str}
+    @ivar suffix: An IPv6 address suffix in network order.
+
+    @type prefix: L{Name}
+    @ivar prefix: If specified, a name which will be used as a prefix for other
+        A6 records.
+
+    @type bytes: C{int}
+    @ivar bytes: The length of the prefix.
+
+    @type ttl: C{int}
+    @ivar ttl: The maximum number of seconds which this record should be
+        cached.
+
+    @see: U{http://www.faqs.org/rfcs/rfc2874.html}
+    @see: U{http://www.faqs.org/rfcs/rfc3363.html}
+    @see: U{http://www.faqs.org/rfcs/rfc3364.html}
+    """
+    TYPE = A6
+
+    fancybasename = 'A6'
+    showAttributes = (('_suffix', 'suffix', '%s'), ('prefix', 'prefix', '%s'), 'ttl')
+    compareAttributes = ('prefixLen', 'prefix', 'suffix', 'ttl')
+
+    _suffix = property(lambda self: socket.inet_ntop(AF_INET6, self.suffix))
+
+    def __init__(self, prefixLen=0, suffix='::', prefix=b'', ttl=None):
+        self.prefixLen = prefixLen
+        self.suffix = socket.inet_pton(AF_INET6, suffix)
+        self.prefix = Name(prefix)
+        self.bytes = int((128 - self.prefixLen) / 8.0)
+        self.ttl = str2time(ttl)
+
+
+    def encode(self, strio, compDict = None):
+        strio.write(struct.pack('!B', self.prefixLen))
+        if self.bytes:
+            strio.write(self.suffix[-self.bytes:])
+        if self.prefixLen:
+            # This may not be compressed
+            self.prefix.encode(strio, None)
+
+
+    def decode(self, strio, length = None):
+        self.prefixLen = struct.unpack('!B', readPrecisely(strio, 1))[0]
+        self.bytes = int((128 - self.prefixLen) / 8.0)
+        if self.bytes:
+            self.suffix = b'\x00' * (16 - self.bytes) + readPrecisely(strio, self.bytes)
+        if self.prefixLen:
+            self.prefix.decode(strio)
+
+
+    def __eq__(self, other):
+        if isinstance(other, Record_A6):
+            return (self.prefixLen == other.prefixLen and
+                    self.suffix[-self.bytes:] == other.suffix[-self.bytes:] and
+                    self.prefix == other.prefix and
+                    self.ttl == other.ttl)
+        return NotImplemented
+
+
+    def __hash__(self):
+        return hash((self.prefixLen, self.suffix[-self.bytes:], self.prefix))
+
+
+    def __str__(self):
+        return '<A6 %s %s (%d) ttl=%s>' % (
+            self.prefix,
+            socket.inet_ntop(AF_INET6, self.suffix),
+            self.prefixLen, self.ttl
+        )
+
+
+
+ at implementer(IEncodable, IRecord)
+class Record_SRV(tputil.FancyEqMixin, tputil.FancyStrMixin):
+    """
+    The location of the server(s) for a specific protocol and domain.
+
+    This is an experimental record type.
+
+    @type priority: C{int}
+    @ivar priority: The priority of this target host.  A client MUST attempt to
+        contact the target host with the lowest-numbered priority it can reach;
+        target hosts with the same priority SHOULD be tried in an order defined
+        by the weight field.
+
+    @type weight: C{int}
+    @ivar weight: Specifies a relative weight for entries with the same
+        priority. Larger weights SHOULD be given a proportionately higher
+        probability of being selected.
+
+    @type port: C{int}
+    @ivar port: The port on this target host of this service.
+
+    @type target: L{Name}
+    @ivar target: The domain name of the target host.  There MUST be one or
+        more address records for this name, the name MUST NOT be an alias (in
+        the sense of RFC 1034 or RFC 2181).  Implementors are urged, but not
+        required, to return the address record(s) in the Additional Data
+        section.  Unless and until permitted by future standards action, name
+        compression is not to be used for this field.
+
+    @type ttl: C{int}
+    @ivar ttl: The maximum number of seconds which this record should be
+        cached.
+
+    @see: U{http://www.faqs.org/rfcs/rfc2782.html}
+    """
+    TYPE = SRV
+
+    fancybasename = 'SRV'
+    compareAttributes = ('priority', 'weight', 'target', 'port', 'ttl')
+    showAttributes = ('priority', 'weight', ('target', 'target', '%s'), 'port', 'ttl')
+
+    def __init__(self, priority=0, weight=0, port=0, target=b'', ttl=None):
+        self.priority = int(priority)
+        self.weight = int(weight)
+        self.port = int(port)
+        self.target = Name(target)
+        self.ttl = str2time(ttl)
+
+
+    def encode(self, strio, compDict = None):
+        strio.write(struct.pack('!HHH', self.priority, self.weight, self.port))
+        # This can't be compressed
+        self.target.encode(strio, None)
+
+
+    def decode(self, strio, length = None):
+        r = struct.unpack('!HHH', readPrecisely(strio, struct.calcsize('!HHH')))
+        self.priority, self.weight, self.port = r
+        self.target = Name()
+        self.target.decode(strio)
+
+
+    def __hash__(self):
+        return hash((self.priority, self.weight, self.port, self.target))
+
+
+
+ at implementer(IEncodable, IRecord)
+class Record_NAPTR(tputil.FancyEqMixin, tputil.FancyStrMixin):
+    """
+    The location of the server(s) for a specific protocol and domain.
+
+    @type order: C{int}
+    @ivar order: An integer specifying the order in which the NAPTR records
+        MUST be processed to ensure the correct ordering of rules.  Low numbers
+        are processed before high numbers.
+
+    @type preference: C{int}
+    @ivar preference: An integer that specifies the order in which NAPTR
+        records with equal "order" values SHOULD be processed, low numbers
+        being processed before high numbers.
+
+    @type flag: L{Charstr}
+    @ivar flag: A <character-string> containing flags to control aspects of the
+        rewriting and interpretation of the fields in the record.  Flags
+        are single characters from the set [A-Z0-9].  The case of the alphabetic
+        characters is not significant.
+
+        At this time only four flags, "S", "A", "U", and "P", are defined.
+
+    @type service: L{Charstr}
+    @ivar service: Specifies the service(s) available down this rewrite path.
+        It may also specify the particular protocol that is used to talk with a
+        service.  A protocol MUST be specified if the flags field states that
+        the NAPTR is terminal.
+
+    @type regexp: L{Charstr}
+    @ivar regexp: A STRING containing a substitution expression that is applied
+        to the original string held by the client in order to construct the
+        next domain name to lookup.
+
+    @type replacement: L{Name}
+    @ivar replacement: The next NAME to query for NAPTR, SRV, or address
+        records depending on the value of the flags field.  This MUST be a
+        fully qualified domain-name.
+
+    @type ttl: C{int}
+    @ivar ttl: The maximum number of seconds which this record should be
+        cached.
+
+    @see: U{http://www.faqs.org/rfcs/rfc2915.html}
+    """
+    TYPE = NAPTR
+
+    compareAttributes = ('order', 'preference', 'flags', 'service', 'regexp',
+                         'replacement')
+    fancybasename = 'NAPTR'
+
+    showAttributes = ('order', 'preference', ('flags', 'flags', '%s'),
+                      ('service', 'service', '%s'), ('regexp', 'regexp', '%s'),
+                      ('replacement', 'replacement', '%s'), 'ttl')
+
+    def __init__(self, order=0, preference=0, flags=b'', service=b'', regexp=b'',
+                 replacement=b'', ttl=None):
+        self.order = int(order)
+        self.preference = int(preference)
+        self.flags = Charstr(flags)
+        self.service = Charstr(service)
+        self.regexp = Charstr(regexp)
+        self.replacement = Name(replacement)
+        self.ttl = str2time(ttl)
+
+
+    def encode(self, strio, compDict=None):
+        strio.write(struct.pack('!HH', self.order, self.preference))
+        # This can't be compressed
+        self.flags.encode(strio, None)
+        self.service.encode(strio, None)
+        self.regexp.encode(strio, None)
+        self.replacement.encode(strio, None)
+
+
+    def decode(self, strio, length=None):
+        r = struct.unpack('!HH', readPrecisely(strio, struct.calcsize('!HH')))
+        self.order, self.preference = r
+        self.flags = Charstr()
+        self.service = Charstr()
+        self.regexp = Charstr()
+        self.replacement = Name()
+        self.flags.decode(strio)
+        self.service.decode(strio)
+        self.regexp.decode(strio)
+        self.replacement.decode(strio)
+
+
+    def __hash__(self):
+        return hash((
+            self.order, self.preference, self.flags,
+            self.service, self.regexp, self.replacement))
+
+
+
+ at implementer(IEncodable, IRecord)
+class Record_AFSDB(tputil.FancyStrMixin, tputil.FancyEqMixin):
+    """
+    Map from a domain name to the name of an AFS cell database server.
+
+    @type subtype: C{int}
+    @ivar subtype: In the case of subtype 1, the host has an AFS version 3.0
+        Volume Location Server for the named AFS cell.  In the case of subtype
+        2, the host has an authenticated name server holding the cell-root
+        directory node for the named DCE/NCA cell.
+
+    @type hostname: L{Name}
+    @ivar hostname: The domain name of a host that has a server for the cell
+        named by this record.
+
+    @type ttl: C{int}
+    @ivar ttl: The maximum number of seconds which this record should be
+        cached.
+
+    @see: U{http://www.faqs.org/rfcs/rfc1183.html}
+    """
+    TYPE = AFSDB
+
+    fancybasename = 'AFSDB'
+    compareAttributes = ('subtype', 'hostname', 'ttl')
+    showAttributes = ('subtype', ('hostname', 'hostname', '%s'), 'ttl')
+
+    def __init__(self, subtype=0, hostname=b'', ttl=None):
+        self.subtype = int(subtype)
+        self.hostname = Name(hostname)
+        self.ttl = str2time(ttl)
+
+
+    def encode(self, strio, compDict = None):
+        strio.write(struct.pack('!H', self.subtype))
+        self.hostname.encode(strio, compDict)
+
+
+    def decode(self, strio, length = None):
+        r = struct.unpack('!H', readPrecisely(strio, struct.calcsize('!H')))
+        self.subtype, = r
+        self.hostname.decode(strio)
+
+
+    def __hash__(self):
+        return hash((self.subtype, self.hostname))
+
+
+
+ at implementer(IEncodable, IRecord)
+class Record_RP(tputil.FancyEqMixin, tputil.FancyStrMixin):
+    """
+    The responsible person for a domain.
+
+    @type mbox: L{Name}
+    @ivar mbox: A domain name that specifies the mailbox for the responsible
+        person.
+
+    @type txt: L{Name}
+    @ivar txt: A domain name for which TXT RR's exist (indirection through
+        which allows information sharing about the contents of this RP record).
+
+    @type ttl: C{int}
+    @ivar ttl: The maximum number of seconds which this record should be
+        cached.
+
+    @see: U{http://www.faqs.org/rfcs/rfc1183.html}
+    """
+    TYPE = RP
+
+    fancybasename = 'RP'
+    compareAttributes = ('mbox', 'txt', 'ttl')
+    showAttributes = (('mbox', 'mbox', '%s'), ('txt', 'txt', '%s'), 'ttl')
+
+    def __init__(self, mbox=b'', txt=b'', ttl=None):
+        self.mbox = Name(mbox)
+        self.txt = Name(txt)
+        self.ttl = str2time(ttl)
+
+
+    def encode(self, strio, compDict = None):
+        self.mbox.encode(strio, compDict)
+        self.txt.encode(strio, compDict)
+
+
+    def decode(self, strio, length = None):
+        self.mbox = Name()
+        self.txt = Name()
+        self.mbox.decode(strio)
+        self.txt.decode(strio)
+
+
+    def __hash__(self):
+        return hash((self.mbox, self.txt))
+
+
+
+ at implementer(IEncodable, IRecord)
+class Record_HINFO(tputil.FancyStrMixin, tputil.FancyEqMixin):
+    """
+    Host information.
+
+    @type cpu: C{str}
+    @ivar cpu: Specifies the CPU type.
+
+    @type os: C{str}
+    @ivar os: Specifies the OS.
+
+    @type ttl: C{int}
+    @ivar ttl: The maximum number of seconds which this record should be
+        cached.
+    """
+    TYPE = HINFO
+
+    fancybasename = 'HINFO'
+    showAttributes = (('cpu', _nicebytes), ('os', _nicebytes), 'ttl')
+    compareAttributes = ('cpu', 'os', 'ttl')
+
+    def __init__(self, cpu='', os='', ttl=None):
+        self.cpu, self.os = cpu, os
+        self.ttl = str2time(ttl)
+
+
+    def encode(self, strio, compDict = None):
+        strio.write(struct.pack('!B', len(self.cpu)) + self.cpu)
+        strio.write(struct.pack('!B', len(self.os)) + self.os)
+
+
+    def decode(self, strio, length = None):
+        cpu = struct.unpack('!B', readPrecisely(strio, 1))[0]
+        self.cpu = readPrecisely(strio, cpu)
+        os = struct.unpack('!B', readPrecisely(strio, 1))[0]
+        self.os = readPrecisely(strio, os)
+
+
+    def __eq__(self, other):
+        if isinstance(other, Record_HINFO):
+            return (self.os.lower() == other.os.lower() and
+                    self.cpu.lower() == other.cpu.lower() and
+                    self.ttl == other.ttl)
+        return NotImplemented
+
+
+    def __hash__(self):
+        return hash((self.os.lower(), self.cpu.lower()))
+
+
+
+ at implementer(IEncodable, IRecord)
+class Record_MINFO(tputil.FancyEqMixin, tputil.FancyStrMixin):
+    """
+    Mailbox or mail list information.
+
+    This is an experimental record type.
+
+    @type rmailbx: L{Name}
+    @ivar rmailbx: A domain-name which specifies a mailbox which is responsible
+        for the mailing list or mailbox.  If this domain name names the root,
+        the owner of the MINFO RR is responsible for itself.
+
+    @type emailbx: L{Name}
+    @ivar emailbx: A domain-name which specifies a mailbox which is to receive
+        error messages related to the mailing list or mailbox specified by the
+        owner of the MINFO record.  If this domain name names the root, errors
+        should be returned to the sender of the message.
+
+    @type ttl: C{int}
+    @ivar ttl: The maximum number of seconds which this record should be
+        cached.
+    """
+    TYPE = MINFO
+
+    rmailbx = None
+    emailbx = None
+
+    fancybasename = 'MINFO'
+    compareAttributes = ('rmailbx', 'emailbx', 'ttl')
+    showAttributes = (('rmailbx', 'responsibility', '%s'),
+                      ('emailbx', 'errors', '%s'),
+                      'ttl')
+
+    def __init__(self, rmailbx=b'', emailbx=b'', ttl=None):
+        self.rmailbx, self.emailbx = Name(rmailbx), Name(emailbx)
+        self.ttl = str2time(ttl)
+
+
+    def encode(self, strio, compDict = None):
+        self.rmailbx.encode(strio, compDict)
+        self.emailbx.encode(strio, compDict)
+
+
+    def decode(self, strio, length = None):
+        self.rmailbx, self.emailbx = Name(), Name()
+        self.rmailbx.decode(strio)
+        self.emailbx.decode(strio)
+
+
+    def __hash__(self):
+        return hash((self.rmailbx, self.emailbx))
+
+
+
+ at implementer(IEncodable, IRecord)
+class Record_MX(tputil.FancyStrMixin, tputil.FancyEqMixin):
+    """
+    Mail exchange.
+
+    @type preference: C{int}
+    @ivar preference: Specifies the preference given to this RR among others at
+        the same owner.  Lower values are preferred.
+
+    @type name: L{Name}
+    @ivar name: A domain-name which specifies a host willing to act as a mail
+        exchange.
+
+    @type ttl: C{int}
+    @ivar ttl: The maximum number of seconds which this record should be
+        cached.
+    """
+    TYPE = MX
+
+    fancybasename = 'MX'
+    compareAttributes = ('preference', 'name', 'ttl')
+    showAttributes = ('preference', ('name', 'name', '%s'), 'ttl')
+
+    def __init__(self, preference=0, name=b'', ttl=None, **kwargs):
+        self.preference, self.name = int(preference), Name(kwargs.get('exchange', name))
+        self.ttl = str2time(ttl)
+
+    def encode(self, strio, compDict = None):
+        strio.write(struct.pack('!H', self.preference))
+        self.name.encode(strio, compDict)
+
+
+    def decode(self, strio, length = None):
+        self.preference = struct.unpack('!H', readPrecisely(strio, 2))[0]
+        self.name = Name()
+        self.name.decode(strio)
+
+    def __hash__(self):
+        return hash((self.preference, self.name))
+
+
+
+ at implementer(IEncodable, IRecord)
+class Record_TXT(tputil.FancyEqMixin, tputil.FancyStrMixin):
+    """
+    Freeform text.
+
+    @type data: C{list} of C{bytes}
+    @ivar data: Freeform text which makes up this record.
+
+    @type ttl: C{int}
+    @ivar ttl: The maximum number of seconds which this record should be cached.
+    """
+    TYPE = TXT
+
+    fancybasename = 'TXT'
+    showAttributes = (('data', _nicebyteslist), 'ttl')
+    compareAttributes = ('data', 'ttl')
+
+    def __init__(self, *data, **kw):
+        self.data = list(data)
+        # arg man python sucks so bad
+        self.ttl = str2time(kw.get('ttl', None))
+
+
+    def encode(self, strio, compDict=None):
+        for d in self.data:
+            strio.write(struct.pack('!B', len(d)) + d)
+
+
+    def decode(self, strio, length=None):
+        soFar = 0
+        self.data = []
+        while soFar < length:
+            L = struct.unpack('!B', readPrecisely(strio, 1))[0]
+            self.data.append(readPrecisely(strio, L))
+            soFar += L + 1
+        if soFar != length:
+            log.msg(
+                "Decoded %d bytes in %s record, but rdlength is %d" % (
+                    soFar, self.fancybasename, length
+                )
+            )
+
+
+    def __hash__(self):
+        return hash(tuple(self.data))
+
+
+ at implementer(IEncodable, IRecord)
+class UnknownRecord(tputil.FancyEqMixin, tputil.FancyStrMixin, object):
+    """
+    Encapsulate the wire data for unknown record types so that they can
+    pass through the system unchanged.
+
+    @type data: C{bytes}
+    @ivar data: Wire data which makes up this record.
+
+    @type ttl: C{int}
+    @ivar ttl: The maximum number of seconds which this record should be cached.
+
+    @since: 11.1
+    """
+    fancybasename = 'UNKNOWN'
+    compareAttributes = ('data', 'ttl')
+    showAttributes = (('data', _nicebytes), 'ttl')
+
+    def __init__(self, data=b'', ttl=None):
+        self.data = data
+        self.ttl = str2time(ttl)
+
+
+    def encode(self, strio, compDict=None):
+        """
+        Write the raw bytes corresponding to this record's payload to the
+        stream.
+        """
+        strio.write(self.data)
+
+
+    def decode(self, strio, length=None):
+        """
+        Load the bytes which are part of this record from the stream and store
+        them unparsed and unmodified.
+        """
+        if length is None:
+            raise Exception('must know length for unknown record types')
+        self.data = readPrecisely(strio, length)
+
+
+    def __hash__(self):
+        return hash((self.data, self.ttl))
+
+
+
+class Record_SPF(Record_TXT):
+    """
+    Structurally, freeform text. Semantically, a policy definition, formatted
+    as defined in U{rfc 4408<http://www.faqs.org/rfcs/rfc4408.html>}.
+
+    @type data: C{list} of C{str}
+    @ivar data: Freeform text which makes up this record.
+
+    @type ttl: C{int}
+    @ivar ttl: The maximum number of seconds which this record should be cached.
+    """
+    TYPE = SPF
+    fancybasename = 'SPF'
+
+
+
+class Message:
+    """
+    L{Message} contains all the information represented by a single
+    DNS request or response.
+
+    @ivar rCode: A response code, used to indicate success or failure in a
+        message which is a response from a server to a client request.
+    @type rCode: C{0 <= int < 16}
+    """
+    headerFmt = "!H2B4H"
+    headerSize = struct.calcsize(headerFmt)
+
+    # Question, answer, additional, and nameserver lists
+    queries = answers = add = ns = None
+
+    def __init__(self, id=0, answer=0, opCode=0, recDes=0, recAv=0,
+                       auth=0, rCode=OK, trunc=0, maxSize=512):
+        self.maxSize = maxSize
+        self.id = id
+        self.answer = answer
+        self.opCode = opCode
+        self.auth = auth
+        self.trunc = trunc
+        self.recDes = recDes
+        self.recAv = recAv
+        self.rCode = rCode
+        self.queries = []
+        self.answers = []
+        self.authority = []
+        self.additional = []
+
+
+    def addQuery(self, name, type=ALL_RECORDS, cls=IN):
+        """
+        Add another query to this Message.
+
+        @type name: C{bytes}
+        @param name: The name to query.
+
+        @type type: C{int}
+        @param type: Query type
+
+        @type cls: C{int}
+        @param cls: Query class
+        """
+        self.queries.append(Query(name, type, cls))
+
+
+    def encode(self, strio):
+        compDict = {}
+        body_tmp = BytesIO()
+        for q in self.queries:
+            q.encode(body_tmp, compDict)
+        for q in self.answers:
+            q.encode(body_tmp, compDict)
+        for q in self.authority:
+            q.encode(body_tmp, compDict)
+        for q in self.additional:
+            q.encode(body_tmp, compDict)
+        body = body_tmp.getvalue()
+        size = len(body) + self.headerSize
+        if self.maxSize and size > self.maxSize:
+            self.trunc = 1
+            body = body[:self.maxSize - self.headerSize]
+        byte3 = (( ( self.answer & 1 ) << 7 )
+                 | ((self.opCode & 0xf ) << 3 )
+                 | ((self.auth & 1 ) << 2 )
+                 | ((self.trunc & 1 ) << 1 )
+                 | ( self.recDes & 1 ) )
+        byte4 = ( ( (self.recAv & 1 ) << 7 )
+                  | (self.rCode & 0xf ) )
+
+        strio.write(struct.pack(self.headerFmt, self.id, byte3, byte4,
+                                len(self.queries), len(self.answers),
+                                len(self.authority), len(self.additional)))
+        strio.write(body)
+
+
+    def decode(self, strio, length=None):
+        self.maxSize = 0
+        header = readPrecisely(strio, self.headerSize)
+        r = struct.unpack(self.headerFmt, header)
+        self.id, byte3, byte4, nqueries, nans, nns, nadd = r
+        self.answer = ( byte3 >> 7 ) & 1
+        self.opCode = ( byte3 >> 3 ) & 0xf
+        self.auth = ( byte3 >> 2 ) & 1
+        self.trunc = ( byte3 >> 1 ) & 1
+        self.recDes = byte3 & 1
+        self.recAv = ( byte4 >> 7 ) & 1
+        self.rCode = byte4 & 0xf
+
+        self.queries = []
+        for i in range(nqueries):
+            q = Query()
+            try:
+                q.decode(strio)
+            except EOFError:
+                return
+            self.queries.append(q)
+
+        items = ((self.answers, nans), (self.authority, nns), (self.additional, nadd))
+        for (l, n) in items:
+            self.parseRecords(l, n, strio)
+
+
+    def parseRecords(self, list, num, strio):
+        for i in range(num):
+            header = RRHeader(auth=self.auth)
+            try:
+                header.decode(strio)
+            except EOFError:
+                return
+            t = self.lookupRecordType(header.type)
+            if not t:
+                continue
+            header.payload = t(ttl=header.ttl)
+            try:
+                header.payload.decode(strio, header.rdlength)
+            except EOFError:
+                return
+            list.append(header)
+
+
+    # Create a mapping from record types to their corresponding Record_*
+    # classes.  This relies on the global state which has been created so
+    # far in initializing this module (so don't define Record classes after
+    # this).
+    _recordTypes = {}
+    for name in globals():
+        if name.startswith('Record_'):
+            _recordTypes[globals()[name].TYPE] = globals()[name]
+
+    # Clear the iteration variable out of the class namespace so it
+    # doesn't become an attribute.
+    del name
+
+
+    def lookupRecordType(self, type):
+        """
+        Retrieve the L{IRecord} implementation for the given record type.
+
+        @param type: A record type, such as L{A} or L{NS}.
+        @type type: C{int}
+
+        @return: An object which implements L{IRecord} or C{None} if none
+            can be found for the given type.
+        @rtype: L{types.ClassType}
+        """
+        return self._recordTypes.get(type, UnknownRecord)
+
+
+    def toStr(self):
+        """
+        Encode this L{Message} into a byte string in the format described by RFC
+        1035.
+
+        @rtype: C{bytes}
+        """
+        strio = BytesIO()
+        self.encode(strio)
+        return strio.getvalue()
+
+
+    def fromStr(self, str):
+        """
+        Decode a byte string in the format described by RFC 1035 into this
+        L{Message}.
+
+        @param str: L{bytes}
+        """
+        strio = BytesIO(str)
+        self.decode(strio)
+
+
+
+class DNSMixin(object):
+    """
+    DNS protocol mixin shared by UDP and TCP implementations.
+
+    @ivar _reactor: A L{IReactorTime} and L{IReactorUDP} provider which will
+        be used to issue DNS queries and manage request timeouts.
+    """
+    id = None
+    liveMessages = None
+
+    def __init__(self, controller, reactor=None):
+        self.controller = controller
+        self.id = random.randrange(2 ** 10, 2 ** 15)
+        if reactor is None:
+            from twisted.internet import reactor
+        self._reactor = reactor
+
+
+    def pickID(self):
+        """
+        Return a unique ID for queries.
+        """
+        while True:
+            id = randomSource()
+            if id not in self.liveMessages:
+                return id
+
+
+    def callLater(self, period, func, *args):
+        """
+        Wrapper around reactor.callLater, mainly for test purpose.
+        """
+        return self._reactor.callLater(period, func, *args)
+
+
+    def _query(self, queries, timeout, id, writeMessage):
+        """
+        Send out a message with the given queries.
+
+        @type queries: C{list} of C{Query} instances
+        @param queries: The queries to transmit
+
+        @type timeout: C{int} or C{float}
+        @param timeout: How long to wait before giving up
+
+        @type id: C{int}
+        @param id: Unique key for this request
+
+        @type writeMessage: C{callable}
+        @param writeMessage: One-parameter callback which writes the message
+
+        @rtype: C{Deferred}
+        @return: a C{Deferred} which will be fired with the result of the
+            query, or errbacked with any errors that could happen (exceptions
+            during writing of the query, timeout errors, ...).
+        """
+        m = Message(id, recDes=1)
+        m.queries = queries
+
+        try:
+            writeMessage(m)
+        except:
+            return defer.fail()
+
+        resultDeferred = defer.Deferred()
+        cancelCall = self.callLater(timeout, self._clearFailed, resultDeferred, id)
+        self.liveMessages[id] = (resultDeferred, cancelCall)
+
+        return resultDeferred
+
+    def _clearFailed(self, deferred, id):
+        """
+        Clean the Deferred after a timeout.
+        """
+        try:
+            del self.liveMessages[id]
+        except KeyError:
+            pass
+        deferred.errback(failure.Failure(DNSQueryTimeoutError(id)))
+
+
+class DNSDatagramProtocol(DNSMixin, protocol.DatagramProtocol):
+    """
+    DNS protocol over UDP.
+    """
+    resends = None
+
+    def stopProtocol(self):
+        """
+        Stop protocol: reset state variables.
+        """
+        self.liveMessages = {}
+        self.resends = {}
+        self.transport = None
+
+    def startProtocol(self):
+        """
+        Upon start, reset internal state.
+        """
+        self.liveMessages = {}
+        self.resends = {}
+
+    def writeMessage(self, message, address):
+        """
+        Send a message holding DNS queries.
+
+        @type message: L{Message}
+        """
+        self.transport.write(message.toStr(), address)
+
+    def startListening(self):
+        self._reactor.listenUDP(0, self, maxPacketSize=512)
+
+    def datagramReceived(self, data, addr):
+        """
+        Read a datagram, extract the message in it and trigger the associated
+        Deferred.
+        """
+        m = Message()
+        try:
+            m.fromStr(data)
+        except EOFError:
+            log.msg("Truncated packet (%d bytes) from %s" % (len(data), addr))
+            return
+        except:
+            # Nothing should trigger this, but since we're potentially
+            # invoking a lot of different decoding methods, we might as well
+            # be extra cautious.  Anything that triggers this is itself
+            # buggy.
+            log.err(failure.Failure(), "Unexpected decoding error")
+            return
+
+        if m.id in self.liveMessages:
+            d, canceller = self.liveMessages[m.id]
+            del self.liveMessages[m.id]
+            canceller.cancel()
+            # XXX we shouldn't need this hack of catching exception on callback()
+            try:
+                d.callback(m)
+            except:
+                log.err()
+        else:
+            if m.id not in self.resends:
+                self.controller.messageReceived(m, self, addr)
+
+
+    def removeResend(self, id):
+        """
+        Mark message ID as no longer having duplication suppression.
+        """
+        try:
+            del self.resends[id]
+        except KeyError:
+            pass
+
+    def query(self, address, queries, timeout=10, id=None):
+        """
+        Send out a message with the given queries.
+
+        @type address: C{tuple} of C{str} and C{int}
+        @param address: The address to which to send the query
+
+        @type queries: C{list} of C{Query} instances
+        @param queries: The queries to transmit
+
+        @rtype: C{Deferred}
+        """
+        if not self.transport:
+            # XXX transport might not get created automatically, use callLater?
+            try:
+                self.startListening()
+            except CannotListenError:
+                return defer.fail()
+
+        if id is None:
+            id = self.pickID()
+        else:
+            self.resends[id] = 1
+
+        def writeMessage(m):
+            self.writeMessage(m, address)
+
+        return self._query(queries, timeout, id, writeMessage)
+
+
+class DNSProtocol(DNSMixin, protocol.Protocol):
+    """
+    DNS protocol over TCP.
+    """
+    length = None
+    buffer = b''
+
+    def writeMessage(self, message):
+        """
+        Send a message holding DNS queries.
+
+        @type message: L{Message}
+        """
+        s = message.toStr()
+        self.transport.write(struct.pack('!H', len(s)) + s)
+
+    def connectionMade(self):
+        """
+        Connection is made: reset internal state, and notify the controller.
+        """
+        self.liveMessages = {}
+        self.controller.connectionMade(self)
+
+
+    def connectionLost(self, reason):
+        """
+        Notify the controller that this protocol is no longer
+        connected.
+        """
+        self.controller.connectionLost(self)
+
+
+    def dataReceived(self, data):
+        self.buffer += data
+
+        while self.buffer:
+            if self.length is None and len(self.buffer) >= 2:
+                self.length = struct.unpack('!H', self.buffer[:2])[0]
+                self.buffer = self.buffer[2:]
+
+            if len(self.buffer) >= self.length:
+                myChunk = self.buffer[:self.length]
+                m = Message()
+                m.fromStr(myChunk)
+
+                try:
+                    d, canceller = self.liveMessages[m.id]
+                except KeyError:
+                    self.controller.messageReceived(m, self)
+                else:
+                    del self.liveMessages[m.id]
+                    canceller.cancel()
+                    # XXX we shouldn't need this hack
+                    try:
+                        d.callback(m)
+                    except:
+                        log.err()
+
+                self.buffer = self.buffer[self.length:]
+                self.length = None
+            else:
+                break
+
+
+    def query(self, queries, timeout=60):
+        """
+        Send out a message with the given queries.
+
+        @type queries: C{list} of C{Query} instances
+        @param queries: The queries to transmit
+
+        @rtype: C{Deferred}
+        """
+        id = self.pickID()
+        return self._query(queries, timeout, id, self.writeMessage)
diff --git a/ThirdParty/Twisted/twisted/names/error.py b/ThirdParty/Twisted/twisted/names/error.py
new file mode 100644
index 0000000..db11ee9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/error.py
@@ -0,0 +1,97 @@
+# -*- test-case-name: twisted.names.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Exception class definitions for Twisted Names.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.internet.defer import TimeoutError
+
+
+class DomainError(ValueError):
+    """
+    Indicates a lookup failed because there were no records matching the given
+    C{name, class, type} triple.
+    """
+
+
+
+class AuthoritativeDomainError(ValueError):
+    """
+    Indicates a lookup failed for a name for which this server is authoritative
+    because there were no records matching the given C{name, class, type}
+    triple.
+    """
+
+
+
+class DNSQueryTimeoutError(TimeoutError):
+    """
+    Indicates a lookup failed due to a timeout.
+
+    @ivar id: The id of the message which timed out.
+    """
+    def __init__(self, id):
+        TimeoutError.__init__(self)
+        self.id = id
+
+
+
+class DNSFormatError(DomainError):
+    """
+    Indicates a query failed with a result of L{twisted.names.dns.EFORMAT}.
+    """
+
+
+
+class DNSServerError(DomainError):
+    """
+    Indicates a query failed with a result of L{twisted.names.dns.ESERVER}.
+    """
+
+
+
+class DNSNameError(DomainError):
+    """
+    Indicates a query failed with a result of L{twisted.names.dns.ENAME}.
+    """
+
+
+
+class DNSNotImplementedError(DomainError):
+    """
+    Indicates a query failed with a result of L{twisted.names.dns.ENOTIMP}.
+    """
+
+
+
+class DNSQueryRefusedError(DomainError):
+    """
+    Indicates a query failed with a result of L{twisted.names.dns.EREFUSED}.
+    """
+
+
+
+class DNSUnknownError(DomainError):
+    """
+    Indicates a query failed with an unknown result.
+    """
+
+
+
+class ResolverError(Exception):
+    """
+    Indicates a query failed because of a decision made by the local
+    resolver object.
+    """
+
+
+__all__ = [
+    'DomainError', 'AuthoritativeDomainError', 'DNSQueryTimeoutError',
+
+    'DNSFormatError', 'DNSServerError', 'DNSNameError',
+    'DNSNotImplementedError', 'DNSQueryRefusedError',
+    'DNSUnknownError', 'ResolverError']
diff --git a/ThirdParty/Twisted/twisted/names/hosts.py b/ThirdParty/Twisted/twisted/names/hosts.py
new file mode 100644
index 0000000..64674a3
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/hosts.py
@@ -0,0 +1,149 @@
+# -*- test-case-name: twisted.names.test.test_hosts -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+hosts(5) support.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.python.compat import nativeString
+from twisted.names import dns
+from twisted.python import failure
+from twisted.python.filepath import FilePath
+from twisted.internet import defer
+from twisted.internet.abstract import isIPAddress
+
+from twisted.names import common
+
+def searchFileForAll(hostsFile, name):
+    """
+    Search the given file, which is in hosts(5) standard format, for an address
+    entry with a given name.
+
+    @param hostsFile: The name of the hosts(5)-format file to search.
+    @type hostsFile: L{FilePath}
+
+    @param name: The name to search for.
+    @type name: C{str}
+
+    @return: C{None} if the name is not found in the file, otherwise a
+        C{str} giving the address in the file associated with the name.
+    """
+    results = []
+    try:
+        lines = hostsFile.getContent().splitlines()
+    except:
+        return results
+
+    name = name.lower()
+    for line in lines:
+        idx = line.find(b'#')
+        if idx != -1:
+            line = line[:idx]
+        if not line:
+            continue
+        parts = line.split()
+
+        if name.lower() in [s.lower() for s in parts[1:]]:
+            results.append(nativeString(parts[0]))
+    return results
+
+
+
+def searchFileFor(file, name):
+    """
+    Grep given file, which is in hosts(5) standard format, for an address
+    entry with a given name.
+
+    @param file: The name of the hosts(5)-format file to search.
+
+    @param name: The name to search for.
+    @type name: C{str}
+
+    @return: C{None} if the name is not found in the file, otherwise a
+        C{str} giving the address in the file associated with the name.
+    """
+    addresses = searchFileForAll(FilePath(file), name)
+    if addresses:
+        return addresses[0]
+    return None
+
+
+
+class Resolver(common.ResolverBase):
+    """
+    A resolver that services hosts(5) format files.
+    """
+    def __init__(self, file=b'/etc/hosts', ttl = 60 * 60):
+        common.ResolverBase.__init__(self)
+        self.file = file
+        self.ttl = ttl
+
+
+    def _aRecords(self, name):
+        """
+        Return a tuple of L{dns.RRHeader} instances for all of the IPv4
+        addresses in the hosts file.
+        """
+        return tuple([
+            dns.RRHeader(name, dns.A, dns.IN, self.ttl,
+                         dns.Record_A(addr, self.ttl))
+            for addr
+            in searchFileForAll(FilePath(self.file), name)
+            if isIPAddress(addr)])
+
+
+    def _aaaaRecords(self, name):
+        """
+        Return a tuple of L{dns.RRHeader} instances for all of the IPv6
+        addresses in the hosts file.
+        """
+        return tuple([
+            dns.RRHeader(name, dns.AAAA, dns.IN, self.ttl,
+                         dns.Record_AAAA(addr, self.ttl))
+            for addr
+            in searchFileForAll(FilePath(self.file), name)
+            if not isIPAddress(addr)])
+
+
+    def _respond(self, name, records):
+        """
+        Generate a response for the given name containing the given result
+        records, or a failure if there are no result records.
+
+        @param name: The DNS name the response is for.
+        @type name: C{str}
+
+        @param records: A tuple of L{dns.RRHeader} instances giving the results
+            that will go into the response.
+
+        @return: A L{Deferred} which will fire with a three-tuple of result
+            records, authority records, and additional records, or which will
+            fail with L{dns.DomainError} if there are no result records.
+        """
+        if records:
+            return defer.succeed((records, (), ()))
+        return defer.fail(failure.Failure(dns.DomainError(name)))
+
+
+    def lookupAddress(self, name, timeout=None):
+        """
+        Read any IPv4 addresses from C{self.file} and return them as L{Record_A}
+        instances.
+        """
+        return self._respond(name, self._aRecords(name))
+
+
+    def lookupIPV6Address(self, name, timeout=None):
+        """
+        Read any IPv4 addresses from C{self.file} and return them as L{Record_A}
+        instances.
+        """
+        return self._respond(name, self._aaaaRecords(name))
+
+    # Someday this should include IPv6 addresses too, but that will cause
+    # problems if users of the API (mainly via getHostByName) aren't updated to
+    # know about IPv6 first.
+    lookupAllRecords = lookupAddress
diff --git a/ThirdParty/Twisted/twisted/names/resolve.py b/ThirdParty/Twisted/twisted/names/resolve.py
new file mode 100644
index 0000000..8af447a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/resolve.py
@@ -0,0 +1,59 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Lookup a name using multiple resolvers.
+
+Future Plans: This needs someway to specify which resolver answered
+the query, or someway to specify (authority|ttl|cache behavior|more?)
+"""
+
+from __future__ import division, absolute_import
+
+from zope.interface import implementer
+
+from twisted.internet import defer, interfaces
+from twisted.names import dns, common
+
+
+class FailureHandler:
+    def __init__(self, resolver, query, timeout):
+        self.resolver = resolver
+        self.query = query
+        self.timeout = timeout
+
+
+    def __call__(self, failure):
+        # AuthoritativeDomainErrors should halt resolution attempts
+        failure.trap(dns.DomainError, defer.TimeoutError, NotImplementedError)
+        return self.resolver(self.query, self.timeout)
+
+
+
+ at implementer(interfaces.IResolver)
+class ResolverChain(common.ResolverBase):
+    """
+    Lookup an address using multiple C{IResolver}s
+    """
+    def __init__(self, resolvers):
+        common.ResolverBase.__init__(self)
+        self.resolvers = resolvers
+
+
+    def _lookup(self, name, cls, type, timeout):
+        q = dns.Query(name, type, cls)
+        d = self.resolvers[0].query(q, timeout)
+        for r in self.resolvers[1:]:
+            d = d.addErrback(
+                FailureHandler(r.query, q, timeout)
+            )
+        return d
+
+
+    def lookupAllRecords(self, name, timeout = None):
+        d = self.resolvers[0].lookupAllRecords(name, timeout)
+        for r in self.resolvers[1:]:
+            d = d.addErrback(
+                FailureHandler(r.lookupAllRecords, name, timeout)
+            )
+        return d
diff --git a/ThirdParty/Twisted/twisted/names/root.py b/ThirdParty/Twisted/twisted/names/root.py
new file mode 100644
index 0000000..a6a5eb3
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/root.py
@@ -0,0 +1,448 @@
+# -*- test-case-name: twisted.names.test.test_rootresolve -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Resolver implementation for querying successive authoritative servers to
+lookup a record, starting from the root nameservers.
+
+ at author: Jp Calderone
+
+todo::
+    robustify it
+    documentation
+"""
+
+import warnings
+
+from twisted.python.failure import Failure
+from twisted.internet import defer
+from twisted.names import dns, common, error
+
+
+def retry(t, p, *args):
+    """
+    Issue a query one or more times.
+
+    This function is deprecated.  Use one of the resolver classes for retry
+    logic, or implement it yourself.
+    """
+    warnings.warn(
+        "twisted.names.root.retry is deprecated since Twisted 10.0.  Use a "
+        "Resolver object for retry logic.", category=DeprecationWarning,
+        stacklevel=2)
+
+    assert t, "Timeout is required"
+    t = list(t)
+    def errback(failure):
+        failure.trap(defer.TimeoutError)
+        if not t:
+            return failure
+        return p.query(timeout=t.pop(0), *args
+            ).addErrback(errback
+            )
+    return p.query(timeout=t.pop(0), *args
+        ).addErrback(errback
+        )
+
+
+
+class _DummyController:
+    """
+    A do-nothing DNS controller.  This is useful when all messages received
+    will be responses to previously issued queries.  Anything else received
+    will be ignored.
+    """
+    def messageReceived(self, *args):
+        pass
+
+
+
+class Resolver(common.ResolverBase):
+    """
+    L{Resolver} implements recursive lookup starting from a specified list of
+    root servers.
+
+    @ivar hints: A C{list} of C{str} giving the dotted quad representation
+        of IP addresses of root servers at which to begin resolving names.
+
+    @ivar _maximumQueries: A C{int} giving the maximum number of queries
+        which will be attempted to resolve a single name.
+
+    @ivar _reactor: A L{IReactorTime} and L{IReactorUDP} provider to use to
+        bind UDP ports and manage timeouts.
+    """
+    def __init__(self, hints, maximumQueries=10, reactor=None):
+        common.ResolverBase.__init__(self)
+        self.hints = hints
+        self._maximumQueries = maximumQueries
+        self._reactor = reactor
+
+
+    def _roots(self):
+        """
+        Return a list of two-tuples representing the addresses of the root
+        servers, as defined by C{self.hints}.
+        """
+        return [(ip, dns.PORT) for ip in self.hints]
+
+
+    def _query(self, query, servers, timeout, filter):
+        """
+        Issue one query and return a L{Deferred} which fires with its response.
+
+        @param query: The query to issue.
+        @type query: L{dns.Query}
+
+        @param servers: The servers which might have an answer for this
+            query.
+        @type servers: L{list} of L{tuple} of L{str} and L{int}
+
+        @param timeout: A timeout on how long to wait for the response.
+        @type timeout: L{tuple} of L{int}
+
+        @param filter: A flag indicating whether to filter the results.  If
+            C{True}, the returned L{Deferred} will fire with a three-tuple of
+            lists of L{RRHeaders} (like the return value of the I{lookup*}
+            methods of L{IResolver}.  IF C{False}, the result will be a
+            L{Message} instance.
+        @type filter: L{bool}
+
+        @return: A L{Deferred} which fires with the response or a timeout
+            error.
+        @rtype: L{Deferred}
+        """
+        from twisted.names import client
+        r = client.Resolver(servers=servers, reactor=self._reactor)
+        d = r.queryUDP([query], timeout)
+        if filter:
+            d.addCallback(r.filterAnswers)
+        return d
+
+
+    def _lookup(self, name, cls, type, timeout):
+        """
+        Implement name lookup by recursively discovering the authoritative
+        server for the name and then asking it, starting at one of the servers
+        in C{self.hints}.
+        """
+        if timeout is None:
+            # A series of timeouts for semi-exponential backoff, summing to an
+            # arbitrary total of 60 seconds.
+            timeout = (1, 3, 11, 45)
+        return self._discoverAuthority(
+            dns.Query(name, type, cls), self._roots(), timeout,
+            self._maximumQueries)
+
+
+    def _discoverAuthority(self, query, servers, timeout, queriesLeft):
+        """
+        Issue a query to a server and follow a delegation if necessary.
+
+        @param query: The query to issue.
+        @type query: L{dns.Query}
+
+        @param servers: The servers which might have an answer for this
+            query.
+        @type servers: L{list} of L{tuple} of L{str} and L{int}
+
+        @param timeout: A C{tuple} of C{int} giving the timeout to use for this
+            query.
+
+        @param queriesLeft: A C{int} giving the number of queries which may
+            yet be attempted to answer this query before the attempt will be
+            abandoned.
+
+        @return: A L{Deferred} which fires with a three-tuple of lists of
+            L{RRHeaders} giving the response, or with a L{Failure} if there is
+            a timeout or response error.
+        """
+        # Stop now if we've hit the query limit.
+        if queriesLeft <= 0:
+            return Failure(
+                error.ResolverError("Query limit reached without result"))
+
+        d = self._query(query, servers, timeout, False)
+        d.addCallback(
+            self._discoveredAuthority, query, timeout, queriesLeft - 1)
+        return d
+
+
+    def _discoveredAuthority(self, response, query, timeout, queriesLeft):
+        """
+        Interpret the response to a query, checking for error codes and
+        following delegations if necessary.
+
+        @param response: The L{Message} received in response to issuing C{query}.
+        @type response: L{Message}
+
+        @param query: The L{dns.Query} which was issued.
+        @type query: L{dns.Query}.
+
+        @param timeout: The timeout to use if another query is indicated by
+            this response.
+        @type timeout: L{tuple} of L{int}
+
+        @param queriesLeft: A C{int} giving the number of queries which may
+            yet be attempted to answer this query before the attempt will be
+            abandoned.
+
+        @return: A L{Failure} indicating a response error, a three-tuple of
+            lists of L{RRHeaders} giving the response to C{query} or a
+            L{Deferred} which will fire with one of those.
+        """
+        if response.rCode != dns.OK:
+            return Failure(self.exceptionForCode(response.rCode)(response))
+
+        # Turn the answers into a structure that's a little easier to work with.
+        records = {}
+        for answer in response.answers:
+            records.setdefault(answer.name, []).append(answer)
+
+        def findAnswerOrCName(name, type, cls):
+            cname = None
+            for record in records.get(name, []):
+                if record.cls ==  cls:
+                    if record.type == type:
+                        return record
+                    elif record.type == dns.CNAME:
+                        cname = record
+            # If there were any CNAME records, return the last one.  There's
+            # only supposed to be zero or one, though.
+            return cname
+
+        seen = set()
+        name = query.name
+        record = None
+        while True:
+            seen.add(name)
+            previous = record
+            record = findAnswerOrCName(name, query.type, query.cls)
+            if record is None:
+                if name == query.name:
+                    # If there's no answer for the original name, then this may
+                    # be a delegation.  Code below handles it.
+                    break
+                else:
+                    # Try to resolve the CNAME with another query.
+                    d = self._discoverAuthority(
+                        dns.Query(str(name), query.type, query.cls),
+                        self._roots(), timeout, queriesLeft)
+                    # We also want to include the CNAME in the ultimate result,
+                    # otherwise this will be pretty confusing.
+                    def cbResolved(results):
+                        answers, authority, additional = results
+                        answers.insert(0, previous)
+                        return (answers, authority, additional)
+                    d.addCallback(cbResolved)
+                    return d
+            elif record.type == query.type:
+                return (
+                    response.answers,
+                    response.authority,
+                    response.additional)
+            else:
+                # It's a CNAME record.  Try to resolve it from the records
+                # in this response with another iteration around the loop.
+                if record.payload.name in seen:
+                    raise error.ResolverError("Cycle in CNAME processing")
+                name = record.payload.name
+
+
+        # Build a map to use to convert NS names into IP addresses.
+        addresses = {}
+        for rr in response.additional:
+            if rr.type == dns.A:
+                addresses[rr.name.name] = rr.payload.dottedQuad()
+
+        hints = []
+        traps = []
+        for rr in response.authority:
+            if rr.type == dns.NS:
+                ns = rr.payload.name.name
+                if ns in addresses:
+                    hints.append((addresses[ns], dns.PORT))
+                else:
+                    traps.append(ns)
+        if hints:
+            return self._discoverAuthority(
+                query, hints, timeout, queriesLeft)
+        elif traps:
+            d = self.lookupAddress(traps[0], timeout)
+            def getOneAddress(results):
+                answers, authority, additional = results
+                return answers[0].payload.dottedQuad()
+            d.addCallback(getOneAddress)
+            d.addCallback(
+                lambda hint: self._discoverAuthority(
+                    query, [(hint, dns.PORT)], timeout, queriesLeft - 1))
+            return d
+        else:
+            return Failure(error.ResolverError(
+                    "Stuck at response without answers or delegation"))
+
+
+    def discoveredAuthority(self, auth, name, cls, type, timeout):
+        warnings.warn(
+            'twisted.names.root.Resolver.discoveredAuthority is deprecated since '
+            'Twisted 10.0.  Use twisted.names.client.Resolver directly, instead.',
+            category=DeprecationWarning, stacklevel=2)
+        from twisted.names import client
+        q = dns.Query(name, type, cls)
+        r = client.Resolver(servers=[(auth, dns.PORT)])
+        d = r.queryUDP([q], timeout)
+        d.addCallback(r.filterAnswers)
+        return d
+
+
+
+def lookupNameservers(host, atServer, p=None):
+    warnings.warn(
+        'twisted.names.root.lookupNameservers is deprecated since Twisted '
+        '10.0.  Use twisted.names.root.Resolver.lookupNameservers instead.',
+        category=DeprecationWarning, stacklevel=2)
+    # print 'Nameserver lookup for', host, 'at', atServer, 'with', p
+    if p is None:
+        p = dns.DNSDatagramProtocol(_DummyController())
+        p.noisy = False
+    return retry(
+        (1, 3, 11, 45),                     # Timeouts
+        p,                                  # Protocol instance
+        (atServer, dns.PORT),               # Server to query
+        [dns.Query(host, dns.NS, dns.IN)]   # Question to ask
+    )
+
+def lookupAddress(host, atServer, p=None):
+    warnings.warn(
+        'twisted.names.root.lookupAddress is deprecated since Twisted '
+        '10.0.  Use twisted.names.root.Resolver.lookupAddress instead.',
+        category=DeprecationWarning, stacklevel=2)
+    # print 'Address lookup for', host, 'at', atServer, 'with', p
+    if p is None:
+        p = dns.DNSDatagramProtocol(_DummyController())
+        p.noisy = False
+    return retry(
+        (1, 3, 11, 45),                     # Timeouts
+        p,                                  # Protocol instance
+        (atServer, dns.PORT),               # Server to query
+        [dns.Query(host, dns.A, dns.IN)]    # Question to ask
+    )
+
+def extractAuthority(msg, cache):
+    warnings.warn(
+        'twisted.names.root.extractAuthority is deprecated since Twisted '
+        '10.0.  Please inspect the Message object directly.',
+        category=DeprecationWarning, stacklevel=2)
+    records = msg.answers + msg.authority + msg.additional
+    nameservers = [r for r in records if r.type == dns.NS]
+
+    # print 'Records for', soFar, ':', records
+    # print 'NS for', soFar, ':', nameservers
+
+    if not nameservers:
+        return None, nameservers
+    if not records:
+        raise IOError("No records")
+    for r in records:
+        if r.type == dns.A:
+            cache[str(r.name)] = r.payload.dottedQuad()
+    for r in records:
+        if r.type == dns.NS:
+            if str(r.payload.name) in cache:
+                return cache[str(r.payload.name)], nameservers
+    for addr in records:
+        if addr.type == dns.A and addr.name == r.name:
+            return addr.payload.dottedQuad(), nameservers
+    return None, nameservers
+
+def discoverAuthority(host, roots, cache=None, p=None):
+    warnings.warn(
+        'twisted.names.root.discoverAuthority is deprecated since Twisted '
+        '10.0.  Use twisted.names.root.Resolver.lookupNameservers instead.',
+        category=DeprecationWarning, stacklevel=4)
+
+    if cache is None:
+        cache = {}
+
+    rootAuths = list(roots)
+
+    parts = host.rstrip('.').split('.')
+    parts.reverse()
+
+    authority = rootAuths.pop()
+
+    soFar = ''
+    for part in parts:
+        soFar = part + '.' + soFar
+        # print '///////',  soFar, authority, p
+        msg = defer.waitForDeferred(lookupNameservers(soFar, authority, p))
+        yield msg
+        msg = msg.getResult()
+
+        newAuth, nameservers = extractAuthority(msg, cache)
+
+        if newAuth is not None:
+            # print "newAuth is not None"
+            authority = newAuth
+        else:
+            if nameservers:
+                r = str(nameservers[0].payload.name)
+                # print 'Recursively discovering authority for', r
+                authority = defer.waitForDeferred(discoverAuthority(r, roots, cache, p))
+                yield authority
+                authority = authority.getResult()
+                # print 'Discovered to be', authority, 'for', r
+##            else:
+##                # print 'Doing address lookup for', soFar, 'at', authority
+##                msg = defer.waitForDeferred(lookupAddress(soFar, authority, p))
+##                yield msg
+##                msg = msg.getResult()
+##                records = msg.answers + msg.authority + msg.additional
+##                addresses = [r for r in records if r.type == dns.A]
+##                if addresses:
+##                    authority = addresses[0].payload.dottedQuad()
+##                else:
+##                    raise IOError("Resolution error")
+    # print "Yielding authority", authority
+    yield authority
+
+discoverAuthority = defer.deferredGenerator(discoverAuthority)
+
+def makePlaceholder(deferred, name):
+    def placeholder(*args, **kw):
+        deferred.addCallback(lambda r: getattr(r, name)(*args, **kw))
+        return deferred
+    return placeholder
+
+class DeferredResolver:
+    def __init__(self, resolverDeferred):
+        self.waiting = []
+        resolverDeferred.addCallback(self.gotRealResolver)
+
+    def gotRealResolver(self, resolver):
+        w = self.waiting
+        self.__dict__ = resolver.__dict__
+        self.__class__ = resolver.__class__
+        for d in w:
+            d.callback(resolver)
+
+    def __getattr__(self, name):
+        if name.startswith('lookup') or name in ('getHostByName', 'query'):
+            self.waiting.append(defer.Deferred())
+            return makePlaceholder(self.waiting[-1], name)
+        raise AttributeError(name)
+
+def bootstrap(resolver):
+    """Lookup the root nameserver addresses using the given resolver
+
+    Return a Resolver which will eventually become a C{root.Resolver}
+    instance that has references to all the root servers that we were able
+    to look up.
+    """
+    domains = [chr(ord('a') + i) for i in range(13)]
+    # f = lambda r: (log.msg('Root server address: ' + str(r)), r)[1]
+    f = lambda r: r
+    L = [resolver.getHostByName('%s.root-servers.net' % d).addCallback(f) for d in domains]
+    d = defer.DeferredList(L)
+    d.addCallback(lambda r: Resolver([e[1] for e in r if e[0]]))
+    return DeferredResolver(d)
diff --git a/ThirdParty/Twisted/twisted/names/secondary.py b/ThirdParty/Twisted/twisted/names/secondary.py
new file mode 100644
index 0000000..c7c098c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/secondary.py
@@ -0,0 +1,179 @@
+# -*- test-case-name: twisted.names.test.test_names -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+__all__ = ['SecondaryAuthority', 'SecondaryAuthorityService']
+
+from twisted.internet import task, defer
+from twisted.names import dns
+from twisted.names import common
+from twisted.names import client
+from twisted.names import resolve
+from twisted.names.authority import FileAuthority
+
+from twisted.python import log, failure
+from twisted.application import service
+
+class SecondaryAuthorityService(service.Service):
+    calls = None
+
+    _port = 53
+
+    def __init__(self, primary, domains):
+        """
+        @param primary: The IP address of the server from which to perform
+        zone transfers.
+
+        @param domains: A sequence of domain names for which to perform
+        zone transfers.
+        """
+        self.primary = primary
+        self.domains = [SecondaryAuthority(primary, d) for d in domains]
+
+
+    @classmethod
+    def fromServerAddressAndDomains(cls, serverAddress, domains):
+        """
+        Construct a new L{SecondaryAuthorityService} from a tuple giving a
+        server address and a C{str} giving the name of a domain for which this
+        is an authority.
+
+        @param serverAddress: A two-tuple, the first element of which is a
+            C{str} giving an IP address and the second element of which is a
+            C{int} giving a port number.  Together, these define where zone
+            transfers will be attempted from.
+
+        @param domain: A C{str} giving the domain to transfer.
+
+        @return: A new instance of L{SecondaryAuthorityService}.
+        """
+        service = cls(None, [])
+        service.primary = serverAddress[0]
+        service._port = serverAddress[1]
+        service.domains = [
+            SecondaryAuthority.fromServerAddressAndDomain(serverAddress, d)
+            for d in domains]
+        return service
+
+
+    def getAuthority(self):
+        return resolve.ResolverChain(self.domains)
+
+    def startService(self):
+        service.Service.startService(self)
+        self.calls = [task.LoopingCall(d.transfer) for d in self.domains]
+        i = 0
+        from twisted.internet import reactor
+        for c in self.calls:
+            # XXX Add errbacks, respect proper timeouts
+            reactor.callLater(i, c.start, 60 * 60)
+            i += 1
+
+    def stopService(self):
+        service.Service.stopService(self)
+        for c in self.calls:
+            c.stop()
+
+
+
+class SecondaryAuthority(common.ResolverBase):
+    """
+    An Authority that keeps itself updated by performing zone transfers.
+
+    @ivar primary: The IP address of the server from which zone transfers will
+        be attempted.
+    @type primary: C{str}
+
+    @ivar _port: The port number of the server from which zone transfers will be
+        attempted.
+    @type: C{int}
+
+    @ivar _reactor: The reactor to use to perform the zone transfers, or C{None}
+        to use the global reactor.
+    """
+
+    transferring = False
+    soa = records = None
+    _port = 53
+    _reactor = None
+
+    def __init__(self, primaryIP, domain):
+        common.ResolverBase.__init__(self)
+        self.primary = primaryIP
+        self.domain = domain
+
+
+    @classmethod
+    def fromServerAddressAndDomain(cls, serverAddress, domain):
+        """
+        Construct a new L{SecondaryAuthority} from a tuple giving a server
+        address and a C{str} giving the name of a domain for which this is an
+        authority.
+
+        @param serverAddress: A two-tuple, the first element of which is a
+            C{str} giving an IP address and the second element of which is a
+            C{int} giving a port number.  Together, these define where zone
+            transfers will be attempted from.
+
+        @param domain: A C{str} giving the domain to transfer.
+
+        @return: A new instance of L{SecondaryAuthority}.
+        """
+        secondary = cls(None, None)
+        secondary.primary = serverAddress[0]
+        secondary._port = serverAddress[1]
+        secondary.domain = domain
+        return secondary
+
+
+    def transfer(self):
+        if self.transferring:
+            return
+        self.transfering = True
+
+        reactor = self._reactor
+        if reactor is None:
+            from twisted.internet import reactor
+
+        resolver = client.Resolver(
+            servers=[(self.primary, self._port)], reactor=reactor)
+        return resolver.lookupZone(self.domain
+            ).addCallback(self._cbZone
+            ).addErrback(self._ebZone
+            )
+
+
+    def _lookup(self, name, cls, type, timeout=None):
+        if not self.soa or not self.records:
+            return defer.fail(failure.Failure(dns.DomainError(name)))
+
+
+        return FileAuthority.__dict__['_lookup'](self, name, cls, type, timeout)
+
+    #shouldn't we just subclass? :P
+
+    lookupZone = FileAuthority.__dict__['lookupZone']
+
+    def _cbZone(self, zone):
+        ans, _, _ = zone
+        self.records = r = {}
+        for rec in ans:
+            if not self.soa and rec.type == dns.SOA:
+                self.soa = (str(rec.name).lower(), rec.payload)
+            else:
+                r.setdefault(str(rec.name).lower(), []).append(rec.payload)
+
+    def _ebZone(self, failure):
+        log.msg("Updating %s from %s failed during zone transfer" % (self.domain, self.primary))
+        log.err(failure)
+
+    def update(self):
+        self.transfer().addCallbacks(self._cbTransferred, self._ebTransferred)
+
+    def _cbTransferred(self, result):
+        self.transferring = False
+
+    def _ebTransferred(self, failure):
+        self.transferred = False
+        log.msg("Transferring %s from %s failed after zone transfer" % (self.domain, self.primary))
+        log.err(failure)
diff --git a/ThirdParty/Twisted/twisted/names/server.py b/ThirdParty/Twisted/twisted/names/server.py
new file mode 100644
index 0000000..0da6acd
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/server.py
@@ -0,0 +1,205 @@
+# -*- test-case-name: twisted.names.test.test_names -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Async DNS server
+
+Future plans:
+    - Better config file format maybe
+    - Make sure to differentiate between different classes
+    - notice truncation bit
+
+Important: No additional processing is done on some of the record types.
+This violates the most basic RFC and is just plain annoying
+for resolvers to deal with.  Fix it.
+
+ at author: Jp Calderone
+"""
+
+import time
+
+from twisted.internet import protocol
+from twisted.names import dns, resolve
+from twisted.python import log
+
+
+class DNSServerFactory(protocol.ServerFactory):
+    """
+    Server factory and tracker for L{DNSProtocol} connections.  This
+    class also provides records for responses to DNS queries.
+
+    @ivar connections: A list of all the connected L{DNSProtocol}
+        instances using this object as their controller.
+    @type connections: C{list} of L{DNSProtocol}
+    """
+
+    protocol = dns.DNSProtocol
+    cache = None
+
+    def __init__(self, authorities = None, caches = None, clients = None, verbose = 0):
+        resolvers = []
+        if authorities is not None:
+            resolvers.extend(authorities)
+        if caches is not None:
+            resolvers.extend(caches)
+        if clients is not None:
+            resolvers.extend(clients)
+
+        self.canRecurse = not not clients
+        self.resolver = resolve.ResolverChain(resolvers)
+        self.verbose = verbose
+        if caches:
+            self.cache = caches[-1]
+        self.connections = []
+
+
+    def buildProtocol(self, addr):
+        p = self.protocol(self)
+        p.factory = self
+        return p
+
+
+    def connectionMade(self, protocol):
+        """
+        Track a newly connected L{DNSProtocol}.
+        """
+        self.connections.append(protocol)
+
+
+    def connectionLost(self, protocol):
+        """
+        Stop tracking a no-longer connected L{DNSProtocol}.
+        """
+        self.connections.remove(protocol)
+
+
+    def sendReply(self, protocol, message, address):
+        if self.verbose > 1:
+            s = ' '.join([str(a.payload) for a in message.answers])
+            auth = ' '.join([str(a.payload) for a in message.authority])
+            add = ' '.join([str(a.payload) for a in message.additional])
+            if not s:
+                log.msg("Replying with no answers")
+            else:
+                log.msg("Answers are " + s)
+                log.msg("Authority is " + auth)
+                log.msg("Additional is " + add)
+
+        if address is None:
+            protocol.writeMessage(message)
+        else:
+            protocol.writeMessage(message, address)
+
+        if self.verbose > 1:
+            log.msg("Processed query in %0.3f seconds" % (time.time() - message.timeReceived))
+
+
+    def gotResolverResponse(self, (ans, auth, add), protocol, message, address):
+        message.rCode = dns.OK
+        message.answers = ans
+        for x in ans:
+            if x.isAuthoritative():
+                message.auth = 1
+                break
+        message.authority = auth
+        message.additional = add
+        self.sendReply(protocol, message, address)
+
+        l = len(ans) + len(auth) + len(add)
+        if self.verbose:
+            log.msg("Lookup found %d record%s" % (l, l != 1 and "s" or ""))
+
+        if self.cache and l:
+            self.cache.cacheResult(
+                message.queries[0], (ans, auth, add)
+            )
+
+
+    def gotResolverError(self, failure, protocol, message, address):
+        if failure.check(dns.DomainError, dns.AuthoritativeDomainError):
+            message.rCode = dns.ENAME
+        else:
+            message.rCode = dns.ESERVER
+            log.err(failure)
+
+        self.sendReply(protocol, message, address)
+        if self.verbose:
+            log.msg("Lookup failed")
+
+
+    def handleQuery(self, message, protocol, address):
+        # Discard all but the first query!  HOO-AAH HOOOOO-AAAAH
+        # (no other servers implement multi-query messages, so we won't either)
+        query = message.queries[0]
+
+        return self.resolver.query(query).addCallback(
+            self.gotResolverResponse, protocol, message, address
+        ).addErrback(
+            self.gotResolverError, protocol, message, address
+        )
+
+
+    def handleInverseQuery(self, message, protocol, address):
+        message.rCode = dns.ENOTIMP
+        self.sendReply(protocol, message, address)
+        if self.verbose:
+            log.msg("Inverse query from %r" % (address,))
+
+
+    def handleStatus(self, message, protocol, address):
+        message.rCode = dns.ENOTIMP
+        self.sendReply(protocol, message, address)
+        if self.verbose:
+            log.msg("Status request from %r" % (address,))
+
+
+    def handleNotify(self, message, protocol, address):
+        message.rCode = dns.ENOTIMP
+        self.sendReply(protocol, message, address)
+        if self.verbose:
+            log.msg("Notify message from %r" % (address,))
+
+
+    def handleOther(self, message, protocol, address):
+        message.rCode = dns.ENOTIMP
+        self.sendReply(protocol, message, address)
+        if self.verbose:
+            log.msg("Unknown op code (%d) from %r" % (message.opCode, address))
+
+
+    def messageReceived(self, message, proto, address = None):
+        message.timeReceived = time.time()
+
+        if self.verbose:
+            if self.verbose > 1:
+                s = ' '.join([str(q) for q in message.queries])
+            elif self.verbose > 0:
+                s = ' '.join([dns.QUERY_TYPES.get(q.type, 'UNKNOWN') for q in message.queries])
+
+            if not len(s):
+                log.msg("Empty query from %r" % ((address or proto.transport.getPeer()),))
+            else:
+                log.msg("%s query from %r" % (s, address or proto.transport.getPeer()))
+
+        message.recAv = self.canRecurse
+        message.answer = 1
+
+        if not self.allowQuery(message, proto, address):
+            message.rCode = dns.EREFUSED
+            self.sendReply(proto, message, address)
+        elif message.opCode == dns.OP_QUERY:
+            self.handleQuery(message, proto, address)
+        elif message.opCode == dns.OP_INVERSE:
+            self.handleInverseQuery(message, proto, address)
+        elif message.opCode == dns.OP_STATUS:
+            self.handleStatus(message, proto, address)
+        elif message.opCode == dns.OP_NOTIFY:
+            self.handleNotify(message, proto, address)
+        else:
+            self.handleOther(message, proto, address)
+
+
+    def allowQuery(self, message, protocol, address):
+        # Allow anything but empty queries
+        return len(message.queries)
diff --git a/ThirdParty/Twisted/twisted/names/srvconnect.py b/ThirdParty/Twisted/twisted/names/srvconnect.py
new file mode 100644
index 0000000..34434db
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/srvconnect.py
@@ -0,0 +1,211 @@
+# -*- test-case-name: twisted.names.test.test_srvconnect -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import random
+
+from zope.interface import implements
+
+from twisted.internet import error, interfaces
+
+from twisted.names import client, dns
+from twisted.names.error import DNSNameError
+from twisted.python.compat import reduce
+
+class _SRVConnector_ClientFactoryWrapper:
+    def __init__(self, connector, wrappedFactory):
+        self.__connector = connector
+        self.__wrappedFactory = wrappedFactory
+
+    def startedConnecting(self, connector):
+        self.__wrappedFactory.startedConnecting(self.__connector)
+
+    def clientConnectionFailed(self, connector, reason):
+        self.__connector.connectionFailed(reason)
+
+    def clientConnectionLost(self, connector, reason):
+        self.__connector.connectionLost(reason)
+
+    def __getattr__(self, key):
+        return getattr(self.__wrappedFactory, key)
+
+class SRVConnector:
+    """A connector that looks up DNS SRV records. See RFC2782."""
+
+    implements(interfaces.IConnector)
+
+    stopAfterDNS=0
+
+    def __init__(self, reactor, service, domain, factory,
+                 protocol='tcp', connectFuncName='connectTCP',
+                 connectFuncArgs=(),
+                 connectFuncKwArgs={},
+                 defaultPort=None,
+                 ):
+        """
+        @ivar defaultPort: Optional default port number to be used when SRV
+            lookup fails and the service name is unknown. This should be the
+            port number associated with the service name as defined by the IANA
+            registry.
+        @type defaultPort: C{int}
+        """
+        self.reactor = reactor
+        self.service = service
+        self.domain = domain
+        self.factory = factory
+
+        self.protocol = protocol
+        self.connectFuncName = connectFuncName
+        self.connectFuncArgs = connectFuncArgs
+        self.connectFuncKwArgs = connectFuncKwArgs
+        self._defaultPort = defaultPort
+
+        self.connector = None
+        self.servers = None
+        self.orderedServers = None # list of servers already used in this round
+
+    def connect(self):
+        """Start connection to remote server."""
+        self.factory.doStart()
+        self.factory.startedConnecting(self)
+
+        if not self.servers:
+            if self.domain is None:
+                self.connectionFailed(error.DNSLookupError("Domain is not defined."))
+                return
+            d = client.lookupService('_%s._%s.%s' % (self.service,
+                                                     self.protocol,
+                                                     self.domain))
+            d.addCallbacks(self._cbGotServers, self._ebGotServers)
+            d.addCallback(lambda x, self=self: self._reallyConnect())
+            if self._defaultPort:
+                d.addErrback(self._ebServiceUnknown)
+            d.addErrback(self.connectionFailed)
+        elif self.connector is None:
+            self._reallyConnect()
+        else:
+            self.connector.connect()
+
+    def _ebGotServers(self, failure):
+        failure.trap(DNSNameError)
+
+        # Some DNS servers reply with NXDOMAIN when in fact there are
+        # just no SRV records for that domain. Act as if we just got an
+        # empty response and use fallback.
+
+        self.servers = []
+        self.orderedServers = []
+
+    def _cbGotServers(self, (answers, auth, add)):
+        if len(answers) == 1 and answers[0].type == dns.SRV \
+                             and answers[0].payload \
+                             and answers[0].payload.target == dns.Name('.'):
+            # decidedly not available
+            raise error.DNSLookupError("Service %s not available for domain %s."
+                                       % (repr(self.service), repr(self.domain)))
+
+        self.servers = []
+        self.orderedServers = []
+        for a in answers:
+            if a.type != dns.SRV or not a.payload:
+                continue
+
+            self.orderedServers.append((a.payload.priority, a.payload.weight,
+                                        str(a.payload.target), a.payload.port))
+
+    def _ebServiceUnknown(self, failure):
+        """
+        Connect to the default port when the service name is unknown.
+
+        If no SRV records were found, the service name will be passed as the
+        port. If resolving the name fails with
+        L{error.ServiceNameUnknownError}, a final attempt is done using the
+        default port.
+        """
+        failure.trap(error.ServiceNameUnknownError)
+        self.servers = [(0, 0, self.domain, self._defaultPort)]
+        self.orderedServers = []
+        self.connect()
+
+    def _serverCmp(self, a, b):
+        if a[0]!=b[0]:
+            return cmp(a[0], b[0])
+        else:
+            return cmp(a[1], b[1])
+
+    def pickServer(self):
+        assert self.servers is not None
+        assert self.orderedServers is not None
+
+        if not self.servers and not self.orderedServers:
+            # no SRV record, fall back..
+            return self.domain, self.service
+
+        if not self.servers and self.orderedServers:
+            # start new round
+            self.servers = self.orderedServers
+            self.orderedServers = []
+
+        assert self.servers
+
+        self.servers.sort(self._serverCmp)
+        minPriority=self.servers[0][0]
+
+        weightIndex = zip(xrange(len(self.servers)), [x[1] for x in self.servers
+                                                      if x[0]==minPriority])
+        weightSum = reduce(lambda x, y: (None, x[1]+y[1]), weightIndex, (None, 0))[1]
+        rand = random.randint(0, weightSum)
+
+        for index, weight in weightIndex:
+            weightSum -= weight
+            if weightSum <= 0:
+                chosen = self.servers[index]
+                del self.servers[index]
+                self.orderedServers.append(chosen)
+
+                p, w, host, port = chosen
+                return host, port
+
+        raise RuntimeError, 'Impossible %s pickServer result.' % self.__class__.__name__
+
+    def _reallyConnect(self):
+        if self.stopAfterDNS:
+            self.stopAfterDNS=0
+            return
+
+        self.host, self.port = self.pickServer()
+        assert self.host is not None, 'Must have a host to connect to.'
+        assert self.port is not None, 'Must have a port to connect to.'
+
+        connectFunc = getattr(self.reactor, self.connectFuncName)
+        self.connector=connectFunc(
+            self.host, self.port,
+            _SRVConnector_ClientFactoryWrapper(self, self.factory),
+            *self.connectFuncArgs, **self.connectFuncKwArgs)
+
+    def stopConnecting(self):
+        """Stop attempting to connect."""
+        if self.connector:
+            self.connector.stopConnecting()
+        else:
+            self.stopAfterDNS=1
+
+    def disconnect(self):
+        """Disconnect whatever our are state is."""
+        if self.connector is not None:
+            self.connector.disconnect()
+        else:
+            self.stopConnecting()
+
+    def getDestination(self):
+        assert self.connector
+        return self.connector.getDestination()
+
+    def connectionFailed(self, reason):
+        self.factory.clientConnectionFailed(self, reason)
+        self.factory.doStop()
+
+    def connectionLost(self, reason):
+        self.factory.clientConnectionLost(self, reason)
+        self.factory.doStop()
+
diff --git a/ThirdParty/Twisted/twisted/names/tap.py b/ThirdParty/Twisted/twisted/names/tap.py
new file mode 100644
index 0000000..d0e3b1d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/tap.py
@@ -0,0 +1,150 @@
+# -*- test-case-name: twisted.names.test.test_tap -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Domain Name Server
+"""
+
+import os, traceback
+
+from twisted.python import usage
+from twisted.names import dns
+from twisted.application import internet, service
+
+from twisted.names import server
+from twisted.names import authority
+from twisted.names import secondary
+
+class Options(usage.Options):
+    optParameters = [
+        ["interface", "i", "",   "The interface to which to bind"],
+        ["port",      "p", "53", "The port on which to listen"],
+        ["resolv-conf", None, None,
+            "Override location of resolv.conf (implies --recursive)"],
+        ["hosts-file", None, None, "Perform lookups with a hosts file"],
+    ]
+
+    optFlags = [
+        ["cache",       "c", "Enable record caching"],
+        ["recursive",   "r", "Perform recursive lookups"],
+        ["verbose",     "v", "Log verbosely"],
+    ]
+
+    compData = usage.Completions(
+        optActions={"interface" : usage.CompleteNetInterfaces()}
+        )
+
+    zones = None
+    zonefiles = None
+
+    def __init__(self):
+        usage.Options.__init__(self)
+        self['verbose'] = 0
+        self.bindfiles = []
+        self.zonefiles = []
+        self.secondaries = []
+
+
+    def opt_pyzone(self, filename):
+        """Specify the filename of a Python syntax zone definition"""
+        if not os.path.exists(filename):
+            raise usage.UsageError(filename + ": No such file")
+        self.zonefiles.append(filename)
+
+    def opt_bindzone(self, filename):
+        """Specify the filename of a BIND9 syntax zone definition"""
+        if not os.path.exists(filename):
+            raise usage.UsageError(filename + ": No such file")
+        self.bindfiles.append(filename)
+
+
+    def opt_secondary(self, ip_domain):
+        """Act as secondary for the specified domain, performing
+        zone transfers from the specified IP (IP/domain)
+        """
+        args = ip_domain.split('/', 1)
+        if len(args) != 2:
+            raise usage.UsageError("Argument must be of the form IP[:port]/domain")
+        address = args[0].split(':')
+        if len(address) == 1:
+            address = (address[0], dns.PORT)
+        else:
+            try:
+                port = int(address[1])
+            except ValueError:
+                raise usage.UsageError(
+                    "Specify an integer port number, not %r" % (address[1],))
+            address = (address[0], port)
+        self.secondaries.append((address, [args[1]]))
+
+
+    def opt_verbose(self):
+        """Increment verbosity level"""
+        self['verbose'] += 1
+
+
+    def postOptions(self):
+        if self['resolv-conf']:
+            self['recursive'] = True
+
+        self.svcs = []
+        self.zones = []
+        for f in self.zonefiles:
+            try:
+                self.zones.append(authority.PySourceAuthority(f))
+            except Exception:
+                traceback.print_exc()
+                raise usage.UsageError("Invalid syntax in " + f)
+        for f in self.bindfiles:
+            try:
+                self.zones.append(authority.BindAuthority(f))
+            except Exception:
+                traceback.print_exc()
+                raise usage.UsageError("Invalid syntax in " + f)
+        for f in self.secondaries:
+            svc = secondary.SecondaryAuthorityService.fromServerAddressAndDomains(*f)
+            self.svcs.append(svc)
+            self.zones.append(self.svcs[-1].getAuthority())
+        try:
+            self['port'] = int(self['port'])
+        except ValueError:
+            raise usage.UsageError("Invalid port: %r" % (self['port'],))
+
+
+def _buildResolvers(config):
+    """
+    Build DNS resolver instances in an order which leaves recursive
+    resolving as a last resort.
+
+    @type config: L{Options} instance
+    @param config: Parsed command-line configuration
+
+    @return: Two-item tuple of a list of cache resovers and a list of client
+        resolvers
+    """
+    from twisted.names import client, cache, hosts
+
+    ca, cl = [], []
+    if config['cache']:
+        ca.append(cache.CacheResolver(verbose=config['verbose']))
+    if config['hosts-file']:
+        cl.append(hosts.Resolver(file=config['hosts-file']))
+    if config['recursive']:
+        cl.append(client.createResolver(resolvconf=config['resolv-conf']))
+    return ca, cl
+
+
+def makeService(config):
+    ca, cl = _buildResolvers(config)
+
+    f = server.DNSServerFactory(config.zones, ca, cl, config['verbose'])
+    p = dns.DNSDatagramProtocol(f)
+    f.noisy = 0
+    ret = service.MultiService()
+    for (klass, arg) in [(internet.TCPServer, f), (internet.UDPServer, p)]:
+        s = klass(config['port'], arg, interface=config['interface'])
+        s.setServiceParent(ret)
+    for svc in config.svcs:
+        svc.setServiceParent(ret)
+    return ret
diff --git a/ThirdParty/Twisted/twisted/names/test/__init__.py b/ThirdParty/Twisted/twisted/names/test/__init__.py
new file mode 100644
index 0000000..f6b7e3a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/test/__init__.py
@@ -0,0 +1 @@
+"Tests for twisted.names"
diff --git a/ThirdParty/Twisted/twisted/names/test/test_cache.py b/ThirdParty/Twisted/twisted/names/test/test_cache.py
new file mode 100644
index 0000000..06245c3
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/test/test_cache.py
@@ -0,0 +1,135 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.names.cache}.
+"""
+
+from __future__ import division, absolute_import
+
+import time
+
+from twisted.trial import unittest
+
+from twisted.names import dns, cache
+from twisted.internet import task
+
+
+class Caching(unittest.TestCase):
+    """
+    Tests for L{cache.CacheResolver}.
+    """
+
+    def test_lookup(self):
+        c = cache.CacheResolver({
+            dns.Query(name=b'example.com', type=dns.MX, cls=dns.IN):
+                (time.time(), ([], [], []))})
+        return c.lookupMailExchange(b'example.com').addCallback(
+            self.assertEqual, ([], [], []))
+
+
+    def test_constructorExpires(self):
+        """
+        Cache entries passed into L{cache.CacheResolver.__init__} get
+        cancelled just like entries added with cacheResult
+        """
+        r = ([dns.RRHeader(b"example.com", dns.A, dns.IN, 60,
+                           dns.Record_A("127.0.0.1", 60))],
+             [dns.RRHeader(b"example.com", dns.A, dns.IN, 50,
+                           dns.Record_A("127.0.0.1", 50))],
+             [dns.RRHeader(b"example.com", dns.A, dns.IN, 40,
+                           dns.Record_A("127.0.0.1", 40))])
+
+        clock = task.Clock()
+        query = dns.Query(name=b"example.com", type=dns.A, cls=dns.IN)
+
+        c = cache.CacheResolver({ query : (clock.seconds(), r)}, reactor=clock)
+
+        # 40 seconds is enough to expire the entry because expiration is based
+        # on the minimum TTL.
+        clock.advance(40)
+
+        self.assertNotIn(query, c.cache)
+
+        return self.assertFailure(
+            c.lookupAddress(b"example.com"), dns.DomainError)
+
+
+    def test_normalLookup(self):
+        """
+        When a cache lookup finds a cached entry from 1 second ago, it is
+        returned with a TTL of original TTL minus the elapsed 1 second.
+        """
+        r = ([dns.RRHeader(b"example.com", dns.A, dns.IN, 60,
+                           dns.Record_A("127.0.0.1", 60))],
+             [dns.RRHeader(b"example.com", dns.A, dns.IN, 50,
+                           dns.Record_A("127.0.0.1", 50))],
+             [dns.RRHeader(b"example.com", dns.A, dns.IN, 40,
+                           dns.Record_A("127.0.0.1", 40))])
+
+        clock = task.Clock()
+
+        c = cache.CacheResolver(reactor=clock)
+        c.cacheResult(dns.Query(name=b"example.com", type=dns.A, cls=dns.IN), r)
+
+        clock.advance(1)
+
+        def cbLookup(result):
+            self.assertEquals(result[0][0].ttl, 59)
+            self.assertEquals(result[1][0].ttl, 49)
+            self.assertEquals(result[2][0].ttl, 39)
+            self.assertEquals(result[0][0].name.name, b"example.com")
+
+        return c.lookupAddress(b"example.com").addCallback(cbLookup)
+
+
+    def test_cachedResultExpires(self):
+        """
+        Once the TTL has been exceeded, the result is removed from the cache.
+        """
+        r = ([dns.RRHeader(b"example.com", dns.A, dns.IN, 60,
+                           dns.Record_A("127.0.0.1", 60))],
+             [dns.RRHeader(b"example.com", dns.A, dns.IN, 50,
+                           dns.Record_A("127.0.0.1", 50))],
+             [dns.RRHeader(b"example.com", dns.A, dns.IN, 40,
+                           dns.Record_A("127.0.0.1", 40))])
+
+        clock = task.Clock()
+
+        c = cache.CacheResolver(reactor=clock)
+        query = dns.Query(name=b"example.com", type=dns.A, cls=dns.IN)
+        c.cacheResult(query, r)
+
+        clock.advance(40)
+
+        self.assertNotIn(query, c.cache)
+
+        return self.assertFailure(
+            c.lookupAddress(b"example.com"), dns.DomainError)
+
+
+    def test_expiredTTLLookup(self):
+        """
+        When the cache is queried exactly as the cached entry should expire but
+        before it has actually been cleared, the cache does not return the
+        expired entry.
+        """
+        r = ([dns.RRHeader(b"example.com", dns.A, dns.IN, 60,
+                           dns.Record_A("127.0.0.1", 60))],
+             [dns.RRHeader(b"example.com", dns.A, dns.IN, 50,
+                           dns.Record_A("127.0.0.1", 50))],
+             [dns.RRHeader(b"example.com", dns.A, dns.IN, 40,
+                           dns.Record_A("127.0.0.1", 40))])
+
+        clock = task.Clock()
+        # Make sure timeouts never happen, so entries won't get cleared:
+        clock.callLater = lambda *args, **kwargs: None
+
+        c = cache.CacheResolver({
+            dns.Query(name=b"example.com", type=dns.A, cls=dns.IN) :
+                (clock.seconds(), r)}, reactor=clock)
+
+        clock.advance(60.1)
+
+        return self.assertFailure(
+            c.lookupAddress(b"example.com"), dns.DomainError)
diff --git a/ThirdParty/Twisted/twisted/names/test/test_client.py b/ThirdParty/Twisted/twisted/names/test/test_client.py
new file mode 100644
index 0000000..2960bbc
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/test/test_client.py
@@ -0,0 +1,1047 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test cases for L{twisted.names.client}.
+"""
+
+import sys
+
+from zope.interface.verify import verifyObject
+
+from twisted.python.compat import set
+from twisted.python import failure
+from twisted.python.runtime import platform
+
+from twisted.internet import defer
+from twisted.internet.error import CannotListenError
+from twisted.internet.interfaces import IResolver
+from twisted.internet.test.modulehelpers import NoReactor
+from twisted.internet.task import Clock
+
+from twisted.names import error, client, dns, hosts, cache
+from twisted.names.error import DNSQueryTimeoutError
+from twisted.names.common import ResolverBase
+
+from twisted.names.test.test_hosts import GoodTempPathMixin
+
+from twisted.trial import unittest
+
+if platform.isWindows():
+    windowsSkip = "These tests need more work before they'll work on Windows."
+else:
+    windowsSkip = None
+
+class AlternateReactor(NoReactor):
+    """
+    A context manager which temporarily installs a different object as the global reactor.
+    """
+    def __init__(self, reactor):
+        """
+        @param reactor: Any object to install as the global reactor.
+        """
+        NoReactor.__init__(self)
+        self.alternate = reactor
+
+
+    def __enter__(self):
+        NoReactor.__enter__(self)
+        import twisted.internet
+        twisted.internet.reactor = self.alternate
+        sys.modules['twisted.internet.reactor'] = self.alternate
+
+
+
+class FakeResolver(ResolverBase):
+
+    def _lookup(self, name, cls, qtype, timeout):
+        """
+        The getHostByNameTest does a different type of query that requires it
+        return an A record from an ALL_RECORDS lookup, so we accomodate that
+        here.
+        """
+        if name == b'getHostByNameTest':
+            rr = dns.RRHeader(name=name, type=dns.A, cls=cls, ttl=60,
+                    payload=dns.Record_A(address='127.0.0.1', ttl=60))
+        else:
+            rr = dns.RRHeader(name=name, type=qtype, cls=cls, ttl=60)
+
+        results = [rr]
+        authority = []
+        addtional = []
+        return defer.succeed((results, authority, addtional))
+
+
+
+class StubPort(object):
+    """
+    A partial implementation of L{IListeningPort} which only keeps track of
+    whether it has been stopped.
+
+    @ivar disconnected: A C{bool} which is C{False} until C{stopListening} is
+        called, C{True} afterwards.
+    """
+    disconnected = False
+
+    def stopListening(self):
+        self.disconnected = True
+
+
+
+class StubDNSDatagramProtocol(object):
+    """
+    L{dns.DNSDatagramProtocol}-alike.
+
+    @ivar queries: A C{list} of tuples giving the arguments passed to
+        C{query} along with the L{defer.Deferred} which was returned from
+        the call.
+    """
+    def __init__(self):
+        self.queries = []
+        self.transport = StubPort()
+
+
+    def query(self, address, queries, timeout=10, id=None):
+        """
+        Record the given arguments and return a Deferred which will not be
+        called back by this code.
+        """
+        result = defer.Deferred()
+        self.queries.append((address, queries, timeout, id, result))
+        return result
+
+
+
+class GetResolverTests(unittest.TestCase):
+    """
+    Tests for L{client.getResolver}.
+    """
+    if windowsSkip:
+        skip = windowsSkip
+
+    def test_interface(self):
+        """
+        L{client.getResolver} returns an object providing L{IResolver}.
+        """
+        with AlternateReactor(Clock()):
+            resolver = client.getResolver()
+        self.assertTrue(verifyObject(IResolver, resolver))
+
+
+    def test_idempotent(self):
+        """
+        Multiple calls to L{client.getResolver} return the same L{IResolver}
+        implementation.
+        """
+        with AlternateReactor(Clock()):
+            a = client.getResolver()
+            b = client.getResolver()
+        self.assertIdentical(a, b)
+
+
+
+class CreateResolverTests(unittest.TestCase, GoodTempPathMixin):
+    """
+    Tests for L{client.createResolver}.
+    """
+    if windowsSkip:
+        skip = windowsSkip
+
+    def _hostsTest(self, resolver, filename):
+        res = [r for r in resolver.resolvers if isinstance(r, hosts.Resolver)]
+        self.assertEqual(1, len(res))
+        self.assertEqual(res[0].file, filename)
+
+
+    def test_defaultHosts(self):
+        """
+        L{client.createResolver} returns a L{resolve.ResolverChain} including a
+        L{hosts.Resolver} using I{/etc/hosts} if no alternate hosts file is
+        specified.
+        """
+        with AlternateReactor(Clock()):
+            sys.modules["twisted.internet.reactor"] = Clock()
+            resolver = client.createResolver()
+        self._hostsTest(resolver, b"/etc/hosts")
+
+
+    def test_overrideHosts(self):
+        """
+        The I{hosts} parameter to L{client.createResolver} overrides the hosts
+        file used by the L{hosts.Resolver} in the L{resolve.ResolverChain} it
+        returns.
+        """
+        with AlternateReactor(Clock()):
+            resolver = client.createResolver(hosts=b"/foo/bar")
+        self._hostsTest(resolver, b"/foo/bar")
+
+
+    def _resolvConfTest(self, resolver, filename):
+        """
+        Verify that C{resolver} has a L{client.Resolver} with a configuration
+        filename set to C{filename}.
+        """
+        res = [r for r in resolver.resolvers if isinstance(r, client.Resolver)]
+        self.assertEqual(1, len(res))
+        self.assertEqual(res[0].resolv, filename)
+
+
+    def test_reactor(self):
+        """
+        The L{client.Resolver} included in the L{resolve.ResolverChain} returned
+        by L{client.createResolver} uses the global reactor.
+        """
+        reactor = Clock()
+        with AlternateReactor(reactor):
+            resolver = client.createResolver()
+        res = [r for r in resolver.resolvers if isinstance(r, client.Resolver)]
+        self.assertEqual(1, len(res))
+        self.assertIdentical(reactor, res[0]._reactor)
+
+
+    def test_defaultResolvConf(self):
+        """
+        L{client.createResolver} returns a L{resolve.ResolverChain} including a
+        L{client.Resolver} using I{/etc/resolv.conf} if no alternate resolver
+        configuration file is specified.
+        """
+        with AlternateReactor(Clock()):
+            resolver = client.createResolver()
+        self._resolvConfTest(resolver, b"/etc/resolv.conf")
+
+
+    def test_overrideResolvConf(self):
+        """
+        The I{resolvconf} parameter to L{client.createResolver} overrides the
+        resolver configuration file used by the L{client.Resolver} in the
+        L{resolve.ResolverChain} it returns.
+        """
+        with AlternateReactor(Clock()):
+            resolver = client.createResolver(resolvconf=b"/foo/bar")
+        self._resolvConfTest(resolver, b"/foo/bar")
+
+
+    def test_defaultServers(self):
+        """
+        If no servers are given, addresses are taken from the file given by the
+        I{resolvconf} parameter to L{client.createResolver}.
+        """
+        resolvconf = self.path()
+        resolvconf.setContent(b"nameserver 127.1.2.3\n")
+        with AlternateReactor(Clock()):
+            resolver = client.createResolver(resolvconf=resolvconf.path)
+        res = [r for r in resolver.resolvers if isinstance(r, client.Resolver)]
+        self.assertEqual(1, len(res))
+        self.assertEqual([], res[0].servers)
+        self.assertEqual([("127.1.2.3", 53)], res[0].dynServers)
+
+
+    def test_overrideServers(self):
+        """
+        Servers passed to L{client.createResolver} are used in addition to any
+        found in the file given by the I{resolvconf} parameter.
+        """
+        resolvconf = self.path()
+        resolvconf.setContent(b"nameserver 127.1.2.3\n")
+        with AlternateReactor(Clock()):
+            resolver = client.createResolver(
+                servers=[("127.3.2.1", 53)], resolvconf=resolvconf.path)
+        res = [r for r in resolver.resolvers if isinstance(r, client.Resolver)]
+        self.assertEqual(1, len(res))
+        self.assertEqual([("127.3.2.1", 53)], res[0].servers)
+        self.assertEqual([("127.1.2.3", 53)], res[0].dynServers)
+
+
+    def test_cache(self):
+        """
+        L{client.createResolver} returns a L{resolve.ResolverChain} including a
+        L{cache.CacheResolver}.
+        """
+        with AlternateReactor(Clock()):
+            resolver = client.createResolver()
+        res = [r for r in resolver.resolvers if isinstance(r, cache.CacheResolver)]
+        self.assertEqual(1, len(res))
+
+
+
+
+class ResolverTests(unittest.TestCase):
+    """
+    Tests for L{client.Resolver}.
+    """
+    def test_noServers(self):
+        """
+        L{client.Resolver} raises L{ValueError} if constructed with neither
+        servers nor a nameserver configuration file.
+        """
+        self.assertRaises(ValueError, client.Resolver)
+
+
+    def test_missingConfiguration(self):
+        """
+        A missing nameserver configuration file results in no server information
+        being loaded from it (ie, not an exception) and a default server being
+        provided.
+        """
+        resolver = client.Resolver(resolv=self.mktemp(), reactor=Clock())
+        self.assertEqual([("127.0.0.1", 53)], resolver.dynServers)
+
+
+    def test_domainEmptyArgument(self):
+        """
+        L{client.Resolver.parseConfig} treats a I{domain} line without an
+        argument as indicating a domain of C{b""}.
+        """
+        resolver = client.Resolver(servers=[("127.0.0.1", 53)])
+        resolver.parseConfig([b"domain\n"])
+        self.assertEqual(b"", resolver.domain)
+
+
+    def test_searchEmptyArgument(self):
+        """
+        L{client.Resolver.parseConfig} treats a I{search} line without an
+        argument as indicating an empty search suffix.
+        """
+        resolver = client.Resolver(servers=[("127.0.0.1", 53)])
+        resolver.parseConfig([b"search\n"])
+        self.assertEqual([], resolver.search)
+
+
+    def test_datagramQueryServerOrder(self):
+        """
+        L{client.Resolver.queryUDP} should issue queries to its
+        L{dns.DNSDatagramProtocol} with server addresses taken from its own
+        C{servers} and C{dynServers} lists, proceeding through them in order
+        as L{DNSQueryTimeoutError}s occur.
+        """
+        protocol = StubDNSDatagramProtocol()
+
+        servers = [object(), object()]
+        dynServers = [object(), object()]
+        resolver = client.Resolver(servers=servers)
+        resolver.dynServers = dynServers
+        resolver._connectedProtocol = lambda: protocol
+
+        expectedResult = object()
+        queryResult = resolver.queryUDP(None)
+        queryResult.addCallback(self.assertEqual, expectedResult)
+
+        self.assertEqual(len(protocol.queries), 1)
+        self.assertIdentical(protocol.queries[0][0], servers[0])
+        protocol.queries[0][-1].errback(DNSQueryTimeoutError(0))
+        self.assertEqual(len(protocol.queries), 2)
+        self.assertIdentical(protocol.queries[1][0], servers[1])
+        protocol.queries[1][-1].errback(DNSQueryTimeoutError(1))
+        self.assertEqual(len(protocol.queries), 3)
+        self.assertIdentical(protocol.queries[2][0], dynServers[0])
+        protocol.queries[2][-1].errback(DNSQueryTimeoutError(2))
+        self.assertEqual(len(protocol.queries), 4)
+        self.assertIdentical(protocol.queries[3][0], dynServers[1])
+        protocol.queries[3][-1].callback(expectedResult)
+
+        return queryResult
+
+
+    def test_singleConcurrentRequest(self):
+        """
+        L{client.Resolver.query} only issues one request at a time per query.
+        Subsequent requests made before responses to prior ones are received
+        are queued and given the same response as is given to the first one.
+        """
+        protocol = StubDNSDatagramProtocol()
+        resolver = client.Resolver(servers=[('example.com', 53)])
+        resolver._connectedProtocol = lambda: protocol
+        queries = protocol.queries
+
+        query = dns.Query(b'foo.example.com', dns.A, dns.IN)
+        # The first query should be passed to the underlying protocol.
+        firstResult = resolver.query(query)
+        self.assertEqual(len(queries), 1)
+
+        # The same query again should not be passed to the underlying protocol.
+        secondResult = resolver.query(query)
+        self.assertEqual(len(queries), 1)
+
+        # The response to the first query should be sent in response to both
+        # queries.
+        answer = object()
+        response = dns.Message()
+        response.answers.append(answer)
+        queries.pop()[-1].callback(response)
+
+        d = defer.gatherResults([firstResult, secondResult])
+        def cbFinished(responses):
+            firstResponse, secondResponse = responses
+            self.assertEqual(firstResponse, ([answer], [], []))
+            self.assertEqual(secondResponse, ([answer], [], []))
+        d.addCallback(cbFinished)
+        return d
+
+
+    def test_multipleConcurrentRequests(self):
+        """
+        L{client.Resolver.query} issues a request for each different concurrent
+        query.
+        """
+        protocol = StubDNSDatagramProtocol()
+        resolver = client.Resolver(servers=[('example.com', 53)])
+        resolver._connectedProtocol = lambda: protocol
+        queries = protocol.queries
+
+        # The first query should be passed to the underlying protocol.
+        firstQuery = dns.Query(b'foo.example.com', dns.A)
+        resolver.query(firstQuery)
+        self.assertEqual(len(queries), 1)
+
+        # A query for a different name is also passed to the underlying
+        # protocol.
+        secondQuery = dns.Query(b'bar.example.com', dns.A)
+        resolver.query(secondQuery)
+        self.assertEqual(len(queries), 2)
+
+        # A query for a different type is also passed to the underlying
+        # protocol.
+        thirdQuery = dns.Query(b'foo.example.com', dns.A6)
+        resolver.query(thirdQuery)
+        self.assertEqual(len(queries), 3)
+
+
+    def test_multipleSequentialRequests(self):
+        """
+        After a response is received to a query issued with
+        L{client.Resolver.query}, another query with the same parameters
+        results in a new network request.
+        """
+        protocol = StubDNSDatagramProtocol()
+        resolver = client.Resolver(servers=[('example.com', 53)])
+        resolver._connectedProtocol = lambda: protocol
+        queries = protocol.queries
+
+        query = dns.Query(b'foo.example.com', dns.A)
+
+        # The first query should be passed to the underlying protocol.
+        resolver.query(query)
+        self.assertEqual(len(queries), 1)
+
+        # Deliver the response.
+        queries.pop()[-1].callback(dns.Message())
+
+        # Repeating the first query should touch the protocol again.
+        resolver.query(query)
+        self.assertEqual(len(queries), 1)
+
+
+    def test_multipleConcurrentFailure(self):
+        """
+        If the result of a request is an error response, the Deferreds for all
+        concurrently issued requests associated with that result fire with the
+        L{Failure}.
+        """
+        protocol = StubDNSDatagramProtocol()
+        resolver = client.Resolver(servers=[('example.com', 53)])
+        resolver._connectedProtocol = lambda: protocol
+        queries = protocol.queries
+
+        query = dns.Query(b'foo.example.com', dns.A)
+        firstResult = resolver.query(query)
+        secondResult = resolver.query(query)
+
+        class ExpectedException(Exception):
+            pass
+
+        queries.pop()[-1].errback(failure.Failure(ExpectedException()))
+
+        return defer.gatherResults([
+                self.assertFailure(firstResult, ExpectedException),
+                self.assertFailure(secondResult, ExpectedException)])
+
+
+    def test_connectedProtocol(self):
+        """
+        L{client.Resolver._connectedProtocol} returns a new
+        L{DNSDatagramProtocol} connected to a new address with a
+        cryptographically secure random port number.
+        """
+        resolver = client.Resolver(servers=[('example.com', 53)])
+        firstProto = resolver._connectedProtocol()
+        secondProto = resolver._connectedProtocol()
+
+        self.assertNotIdentical(firstProto.transport, None)
+        self.assertNotIdentical(secondProto.transport, None)
+        self.assertNotEqual(
+            firstProto.transport.getHost().port,
+            secondProto.transport.getHost().port)
+
+        return defer.gatherResults([
+                defer.maybeDeferred(firstProto.transport.stopListening),
+                defer.maybeDeferred(secondProto.transport.stopListening)])
+
+
+    def test_differentProtocol(self):
+        """
+        L{client.Resolver._connectedProtocol} is called once each time a UDP
+        request needs to be issued and the resulting protocol instance is used
+        for that request.
+        """
+        resolver = client.Resolver(servers=[('example.com', 53)])
+        protocols = []
+
+        class FakeProtocol(object):
+            def __init__(self):
+                self.transport = StubPort()
+
+            def query(self, address, query, timeout=10, id=None):
+                protocols.append(self)
+                return defer.succeed(dns.Message())
+
+        resolver._connectedProtocol = FakeProtocol
+        resolver.query(dns.Query(b'foo.example.com'))
+        resolver.query(dns.Query(b'bar.example.com'))
+        self.assertEqual(len(set(protocols)), 2)
+
+
+    def test_disallowedPort(self):
+        """
+        If a port number is initially selected which cannot be bound, the
+        L{CannotListenError} is handled and another port number is attempted.
+        """
+        ports = []
+
+        class FakeReactor(object):
+            def listenUDP(self, port, *args):
+                ports.append(port)
+                if len(ports) == 1:
+                    raise CannotListenError(None, port, None)
+
+        resolver = client.Resolver(servers=[('example.com', 53)])
+        resolver._reactor = FakeReactor()
+
+        resolver._connectedProtocol()
+        self.assertEqual(len(set(ports)), 2)
+
+
+    def test_differentProtocolAfterTimeout(self):
+        """
+        When a query issued by L{client.Resolver.query} times out, the retry
+        uses a new protocol instance.
+        """
+        resolver = client.Resolver(servers=[('example.com', 53)])
+        protocols = []
+        results = [defer.fail(failure.Failure(DNSQueryTimeoutError(None))),
+                   defer.succeed(dns.Message())]
+
+        class FakeProtocol(object):
+            def __init__(self):
+                self.transport = StubPort()
+
+            def query(self, address, query, timeout=10, id=None):
+                protocols.append(self)
+                return results.pop(0)
+
+        resolver._connectedProtocol = FakeProtocol
+        resolver.query(dns.Query(b'foo.example.com'))
+        self.assertEqual(len(set(protocols)), 2)
+
+
+    def test_protocolShutDown(self):
+        """
+        After the L{Deferred} returned by L{DNSDatagramProtocol.query} is
+        called back, the L{DNSDatagramProtocol} is disconnected from its
+        transport.
+        """
+        resolver = client.Resolver(servers=[('example.com', 53)])
+        protocols = []
+        result = defer.Deferred()
+
+        class FakeProtocol(object):
+            def __init__(self):
+                self.transport = StubPort()
+
+            def query(self, address, query, timeout=10, id=None):
+                protocols.append(self)
+                return result
+
+        resolver._connectedProtocol = FakeProtocol
+        resolver.query(dns.Query(b'foo.example.com'))
+
+        self.assertFalse(protocols[0].transport.disconnected)
+        result.callback(dns.Message())
+        self.assertTrue(protocols[0].transport.disconnected)
+
+
+    def test_protocolShutDownAfterTimeout(self):
+        """
+        The L{DNSDatagramProtocol} created when an interim timeout occurs is
+        also disconnected from its transport after the Deferred returned by its
+        query method completes.
+        """
+        resolver = client.Resolver(servers=[('example.com', 53)])
+        protocols = []
+        result = defer.Deferred()
+        results = [defer.fail(failure.Failure(DNSQueryTimeoutError(None))),
+                   result]
+
+        class FakeProtocol(object):
+            def __init__(self):
+                self.transport = StubPort()
+
+            def query(self, address, query, timeout=10, id=None):
+                protocols.append(self)
+                return results.pop(0)
+
+        resolver._connectedProtocol = FakeProtocol
+        resolver.query(dns.Query(b'foo.example.com'))
+
+        self.assertFalse(protocols[1].transport.disconnected)
+        result.callback(dns.Message())
+        self.assertTrue(protocols[1].transport.disconnected)
+
+
+    def test_protocolShutDownAfterFailure(self):
+        """
+        If the L{Deferred} returned by L{DNSDatagramProtocol.query} fires with
+        a failure, the L{DNSDatagramProtocol} is still disconnected from its
+        transport.
+        """
+        class ExpectedException(Exception):
+            pass
+
+        resolver = client.Resolver(servers=[('example.com', 53)])
+        protocols = []
+        result = defer.Deferred()
+
+        class FakeProtocol(object):
+            def __init__(self):
+                self.transport = StubPort()
+
+            def query(self, address, query, timeout=10, id=None):
+                protocols.append(self)
+                return result
+
+        resolver._connectedProtocol = FakeProtocol
+        queryResult = resolver.query(dns.Query(b'foo.example.com'))
+
+        self.assertFalse(protocols[0].transport.disconnected)
+        result.errback(failure.Failure(ExpectedException()))
+        self.assertTrue(protocols[0].transport.disconnected)
+
+        return self.assertFailure(queryResult, ExpectedException)
+
+
+    def test_tcpDisconnectRemovesFromConnections(self):
+        """
+        When a TCP DNS protocol associated with a Resolver disconnects, it is
+        removed from the Resolver's connection list.
+        """
+        resolver = client.Resolver(servers=[('example.com', 53)])
+        protocol = resolver.factory.buildProtocol(None)
+        protocol.makeConnection(None)
+        self.assertIn(protocol, resolver.connections)
+
+        # Disconnecting should remove the protocol from the connection list:
+        protocol.connectionLost(None)
+        self.assertNotIn(protocol, resolver.connections)
+
+
+
+class ClientTestCase(unittest.TestCase):
+
+    def setUp(self):
+        """
+        Replace the resolver with a FakeResolver
+        """
+        client.theResolver = FakeResolver()
+        self.hostname = b'example.com'
+        self.hostnameForGetHostByName = b'getHostByNameTest'
+
+    def tearDown(self):
+        """
+        By setting the resolver to None, it will be recreated next time a name
+        lookup is done.
+        """
+        client.theResolver = None
+
+    def checkResult(self, results, qtype):
+        """
+        Verify that the result is the same query type as what is expected.
+        """
+        answers, authority, additional = results
+        result = answers[0]
+        self.assertEqual(result.name.name, self.hostname)
+        self.assertEqual(result.type, qtype)
+
+    def checkGetHostByName(self, result):
+        """
+        Test that the getHostByName query returns the 127.0.0.1 address.
+        """
+        self.assertEqual(result, '127.0.0.1')
+
+    def test_getHostByName(self):
+        """
+        do a getHostByName of a value that should return 127.0.0.1.
+        """
+        d = client.getHostByName(self.hostnameForGetHostByName)
+        d.addCallback(self.checkGetHostByName)
+        return d
+
+    def test_lookupAddress(self):
+        """
+        Do a lookup and test that the resolver will issue the correct type of
+        query type. We do this by checking that FakeResolver returns a result
+        record with the same query type as what we issued.
+        """
+        d = client.lookupAddress(self.hostname)
+        d.addCallback(self.checkResult, dns.A)
+        return d
+
+    def test_lookupIPV6Address(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupIPV6Address(self.hostname)
+        d.addCallback(self.checkResult, dns.AAAA)
+        return d
+
+    def test_lookupAddress6(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupAddress6(self.hostname)
+        d.addCallback(self.checkResult, dns.A6)
+        return d
+
+    def test_lookupNameservers(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupNameservers(self.hostname)
+        d.addCallback(self.checkResult, dns.NS)
+        return d
+
+    def test_lookupCanonicalName(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupCanonicalName(self.hostname)
+        d.addCallback(self.checkResult, dns.CNAME)
+        return d
+
+    def test_lookupAuthority(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupAuthority(self.hostname)
+        d.addCallback(self.checkResult, dns.SOA)
+        return d
+
+    def test_lookupMailBox(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupMailBox(self.hostname)
+        d.addCallback(self.checkResult, dns.MB)
+        return d
+
+    def test_lookupMailGroup(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupMailGroup(self.hostname)
+        d.addCallback(self.checkResult, dns.MG)
+        return d
+
+    def test_lookupMailRename(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupMailRename(self.hostname)
+        d.addCallback(self.checkResult, dns.MR)
+        return d
+
+    def test_lookupNull(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupNull(self.hostname)
+        d.addCallback(self.checkResult, dns.NULL)
+        return d
+
+    def test_lookupWellKnownServices(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupWellKnownServices(self.hostname)
+        d.addCallback(self.checkResult, dns.WKS)
+        return d
+
+    def test_lookupPointer(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupPointer(self.hostname)
+        d.addCallback(self.checkResult, dns.PTR)
+        return d
+
+    def test_lookupHostInfo(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupHostInfo(self.hostname)
+        d.addCallback(self.checkResult, dns.HINFO)
+        return d
+
+    def test_lookupMailboxInfo(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupMailboxInfo(self.hostname)
+        d.addCallback(self.checkResult, dns.MINFO)
+        return d
+
+    def test_lookupMailExchange(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupMailExchange(self.hostname)
+        d.addCallback(self.checkResult, dns.MX)
+        return d
+
+    def test_lookupText(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupText(self.hostname)
+        d.addCallback(self.checkResult, dns.TXT)
+        return d
+
+    def test_lookupSenderPolicy(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupSenderPolicy(self.hostname)
+        d.addCallback(self.checkResult, dns.SPF)
+        return d
+
+    def test_lookupResponsibility(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupResponsibility(self.hostname)
+        d.addCallback(self.checkResult, dns.RP)
+        return d
+
+    def test_lookupAFSDatabase(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupAFSDatabase(self.hostname)
+        d.addCallback(self.checkResult, dns.AFSDB)
+        return d
+
+    def test_lookupService(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupService(self.hostname)
+        d.addCallback(self.checkResult, dns.SRV)
+        return d
+
+
+    def test_lookupZone(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupZone(self.hostname)
+        d.addCallback(self.checkResult, dns.AXFR)
+        return d
+
+
+    def test_lookupAllRecords(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupAllRecords(self.hostname)
+        d.addCallback(self.checkResult, dns.ALL_RECORDS)
+        return d
+
+
+    def test_lookupNamingAuthorityPointer(self):
+        """
+        See L{test_lookupAddress}
+        """
+        d = client.lookupNamingAuthorityPointer(self.hostname)
+        d.addCallback(self.checkResult, dns.NAPTR)
+        return d
+
+
+
+class FilterAnswersTests(unittest.TestCase):
+    """
+    Test L{twisted.names.client.Resolver.filterAnswers}'s handling of various
+    error conditions it might encounter.
+    """
+    def setUp(self):
+        # Create a resolver pointed at an invalid server - we won't be hitting
+        # the network in any of these tests.
+        self.resolver = client.Resolver(servers=[('0.0.0.0', 0)])
+
+
+    def test_truncatedMessage(self):
+        """
+        Test that a truncated message results in an equivalent request made via
+        TCP.
+        """
+        m = dns.Message(trunc=True)
+        m.addQuery(b'example.com')
+
+        def queryTCP(queries):
+            self.assertEqual(queries, m.queries)
+            response = dns.Message()
+            response.answers = ['answer']
+            response.authority = ['authority']
+            response.additional = ['additional']
+            return defer.succeed(response)
+        self.resolver.queryTCP = queryTCP
+        d = self.resolver.filterAnswers(m)
+        d.addCallback(
+            self.assertEqual, (['answer'], ['authority'], ['additional']))
+        return d
+
+
+    def _rcodeTest(self, rcode, exc):
+        m = dns.Message(rCode=rcode)
+        err = self.resolver.filterAnswers(m)
+        err.trap(exc)
+
+
+    def test_formatError(self):
+        """
+        Test that a message with a result code of C{EFORMAT} results in a
+        failure wrapped around L{DNSFormatError}.
+        """
+        return self._rcodeTest(dns.EFORMAT, error.DNSFormatError)
+
+
+    def test_serverError(self):
+        """
+        Like L{test_formatError} but for C{ESERVER}/L{DNSServerError}.
+        """
+        return self._rcodeTest(dns.ESERVER, error.DNSServerError)
+
+
+    def test_nameError(self):
+        """
+        Like L{test_formatError} but for C{ENAME}/L{DNSNameError}.
+        """
+        return self._rcodeTest(dns.ENAME, error.DNSNameError)
+
+
+    def test_notImplementedError(self):
+        """
+        Like L{test_formatError} but for C{ENOTIMP}/L{DNSNotImplementedError}.
+        """
+        return self._rcodeTest(dns.ENOTIMP, error.DNSNotImplementedError)
+
+
+    def test_refusedError(self):
+        """
+        Like L{test_formatError} but for C{EREFUSED}/L{DNSQueryRefusedError}.
+        """
+        return self._rcodeTest(dns.EREFUSED, error.DNSQueryRefusedError)
+
+
+    def test_refusedErrorUnknown(self):
+        """
+        Like L{test_formatError} but for an unrecognized error code and
+        L{DNSUnknownError}.
+        """
+        return self._rcodeTest(dns.EREFUSED + 1, error.DNSUnknownError)
+
+
+
+class FakeDNSDatagramProtocol(object):
+    def __init__(self):
+        self.queries = []
+        self.transport = StubPort()
+
+    def query(self, address, queries, timeout=10, id=None):
+        self.queries.append((address, queries, timeout, id))
+        return defer.fail(error.DNSQueryTimeoutError(queries))
+
+    def removeResend(self, id):
+        # Ignore this for the time being.
+        pass
+
+
+
+class RetryLogic(unittest.TestCase):
+    """
+    Tests for query retrying implemented by L{client.Resolver}.
+    """
+    testServers = [
+        '1.2.3.4',
+        '4.3.2.1',
+        'a.b.c.d',
+        'z.y.x.w']
+
+    def test_roundRobinBackoff(self):
+        """
+        When timeouts occur waiting for responses to queries, the next
+        configured server is issued the query.  When the query has been issued
+        to all configured servers, the timeout is increased and the process
+        begins again at the beginning.
+        """
+        addrs = [(x, 53) for x in self.testServers]
+        r = client.Resolver(resolv=None, servers=addrs)
+        proto = FakeDNSDatagramProtocol()
+        r._connectedProtocol = lambda: proto
+        return r.lookupAddress(b"foo.example.com"
+            ).addCallback(self._cbRoundRobinBackoff
+            ).addErrback(self._ebRoundRobinBackoff, proto
+            )
+
+
+    def _cbRoundRobinBackoff(self, result):
+        self.fail("Lookup address succeeded, should have timed out")
+
+
+    def _ebRoundRobinBackoff(self, failure, fakeProto):
+        failure.trap(defer.TimeoutError)
+
+        # Assert that each server is tried with a particular timeout
+        # before the timeout is increased and the attempts are repeated.
+
+        for t in (1, 3, 11, 45):
+            tries = fakeProto.queries[:len(self.testServers)]
+            del fakeProto.queries[:len(self.testServers)]
+
+            tries.sort()
+            expected = list(self.testServers)
+            expected.sort()
+
+            for ((addr, query, timeout, id), expectedAddr) in zip(tries, expected):
+                self.assertEqual(addr, (expectedAddr, 53))
+                self.assertEqual(timeout, t)
+
+        self.assertFalse(fakeProto.queries)
+
+
+
+class ThreadedResolverTests(unittest.TestCase):
+    """
+    Tests for L{client.ThreadedResolver}.
+    """
+    def test_deprecated(self):
+        """
+        L{client.ThreadedResolver} is deprecated.  Instantiating it emits a
+        deprecation warning pointing at the code that does the instantiation.
+        """
+        client.ThreadedResolver()
+        warnings = self.flushWarnings(offendingFunctions=[self.test_deprecated])
+        self.assertEqual(
+            warnings[0]['message'],
+            "twisted.names.client.ThreadedResolver is deprecated since "
+            "Twisted 9.0, use twisted.internet.base.ThreadedResolver "
+            "instead.")
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(len(warnings), 1)
diff --git a/ThirdParty/Twisted/twisted/names/test/test_common.py b/ThirdParty/Twisted/twisted/names/test/test_common.py
new file mode 100644
index 0000000..0f711f4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/test/test_common.py
@@ -0,0 +1,126 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.names.common}.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.trial.unittest import SynchronousTestCase
+from twisted.python.failure import Failure
+from twisted.names.common import ResolverBase
+from twisted.names.dns import EFORMAT, ESERVER, ENAME, ENOTIMP, EREFUSED, Query
+from twisted.names.error import DNSFormatError, DNSServerError, DNSNameError
+from twisted.names.error import DNSNotImplementedError, DNSQueryRefusedError
+from twisted.names.error import DNSUnknownError
+
+
+class ExceptionForCodeTests(SynchronousTestCase):
+    """
+    Tests for L{ResolverBase.exceptionForCode}.
+    """
+    def setUp(self):
+        self.exceptionForCode = ResolverBase().exceptionForCode
+
+
+    def test_eformat(self):
+        """
+        L{ResolverBase.exceptionForCode} converts L{EFORMAT} to
+        L{DNSFormatError}.
+        """
+        self.assertIdentical(self.exceptionForCode(EFORMAT), DNSFormatError)
+
+
+    def test_eserver(self):
+        """
+        L{ResolverBase.exceptionForCode} converts L{ESERVER} to
+        L{DNSServerError}.
+        """
+        self.assertIdentical(self.exceptionForCode(ESERVER), DNSServerError)
+
+
+    def test_ename(self):
+        """
+        L{ResolverBase.exceptionForCode} converts L{ENAME} to L{DNSNameError}.
+        """
+        self.assertIdentical(self.exceptionForCode(ENAME), DNSNameError)
+
+
+    def test_enotimp(self):
+        """
+        L{ResolverBase.exceptionForCode} converts L{ENOTIMP} to
+        L{DNSNotImplementedError}.
+        """
+        self.assertIdentical(
+            self.exceptionForCode(ENOTIMP), DNSNotImplementedError)
+
+
+    def test_erefused(self):
+        """
+        L{ResolverBase.exceptionForCode} converts L{EREFUSED} to
+        L{DNSQueryRefusedError}.
+        """
+        self.assertIdentical(
+            self.exceptionForCode(EREFUSED), DNSQueryRefusedError)
+
+
+    def test_other(self):
+        """
+        L{ResolverBase.exceptionForCode} converts any other response code to
+        L{DNSUnknownError}.
+        """
+        self.assertIdentical(
+            self.exceptionForCode(object()), DNSUnknownError)
+
+
+
+class QueryTests(SynchronousTestCase):
+    """
+    Tests for L{ResolverBase.query}.
+    """
+    def test_typeToMethodDispatch(self):
+        """
+        L{ResolverBase.query} looks up a method to invoke using the type of the
+        query passed to it and the C{typeToMethod} mapping on itself.
+        """
+        results = []
+        resolver = ResolverBase()
+        resolver.typeToMethod = {
+            12345: lambda query, timeout: results.append((query, timeout))}
+        query = Query(name=b"example.com", type=12345)
+        resolver.query(query, 123)
+        self.assertEqual([(b"example.com", 123)], results)
+
+
+    def test_typeToMethodResult(self):
+        """
+        L{ResolverBase.query} returns a L{Deferred} which fires with the result
+        of the method found in the C{typeToMethod} mapping for the type of the
+        query passed to it.
+        """
+        expected = object()
+        resolver = ResolverBase()
+        resolver.typeToMethod = {54321: lambda query, timeout: expected}
+        query = Query(name=b"example.com", type=54321)
+        queryDeferred = resolver.query(query, 123)
+        result = []
+        queryDeferred.addBoth(result.append)
+        self.assertEqual(expected, result[0])
+
+
+    def test_unknownQueryType(self):
+        """
+        L{ResolverBase.query} returns a L{Deferred} which fails with
+        L{NotImplementedError} when called with a query of a type not present in
+        its C{typeToMethod} dictionary.
+        """
+        resolver = ResolverBase()
+        resolver.typeToMethod = {}
+        query = Query(name=b"example.com", type=12345)
+        queryDeferred = resolver.query(query, 123)
+        result = []
+        queryDeferred.addBoth(result.append)
+        self.assertIsInstance(result[0], Failure)
+        result[0].trap(NotImplementedError)
+
diff --git a/ThirdParty/Twisted/twisted/names/test/test_dns.py b/ThirdParty/Twisted/twisted/names/test/test_dns.py
new file mode 100644
index 0000000..32635ef
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/test/test_dns.py
@@ -0,0 +1,1790 @@
+# test-case-name: twisted.names.test.test_dns
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for twisted.names.dns.
+"""
+
+from __future__ import division, absolute_import
+
+from io import BytesIO
+
+import struct
+
+from twisted.python.failure import Failure
+from twisted.internet import address, task
+from twisted.internet.error import CannotListenError, ConnectionDone
+from twisted.trial import unittest
+from twisted.names import dns
+
+from twisted.test import proto_helpers
+
+RECORD_TYPES = [
+    dns.Record_NS, dns.Record_MD, dns.Record_MF, dns.Record_CNAME,
+    dns.Record_MB, dns.Record_MG, dns.Record_MR, dns.Record_PTR,
+    dns.Record_DNAME, dns.Record_A, dns.Record_SOA, dns.Record_NULL,
+    dns.Record_WKS, dns.Record_SRV, dns.Record_AFSDB, dns.Record_RP,
+    dns.Record_HINFO, dns.Record_MINFO, dns.Record_MX, dns.Record_TXT,
+    dns.Record_AAAA, dns.Record_A6, dns.Record_NAPTR, dns.UnknownRecord,
+    ]
+
+
+class Ord2ByteTests(unittest.TestCase):
+    """
+    Tests for L{dns._ord2bytes}.
+    """
+    def test_ord2byte(self):
+        """
+        L{dns._ord2byte} accepts an integer and returns a byte string of length
+        one with an ordinal value equal to the given integer.
+        """
+        self.assertEqual(b'\x10', dns._ord2bytes(0x10))
+
+
+
+class Str2TimeTests(unittest.TestCase):
+    """
+    Tests for L{dns.str2name}.
+    """
+    def test_nonString(self):
+        """
+        When passed a non-string object, L{dns.str2name} returns it unmodified.
+        """
+        time = object()
+        self.assertIdentical(time, dns.str2time(time))
+
+
+    def test_seconds(self):
+        """
+        Passed a string giving a number of seconds, L{dns.str2time} returns the
+        number of seconds represented.  For example, C{"10S"} represents C{10}
+        seconds.
+        """
+        self.assertEqual(10, dns.str2time("10S"))
+
+
+    def test_minutes(self):
+        """
+        Like C{test_seconds}, but for the C{"M"} suffix which multiplies the
+        time value by C{60} (the number of seconds in a minute!).
+        """
+        self.assertEqual(2 * 60, dns.str2time("2M"))
+
+
+    def test_hours(self):
+        """
+        Like C{test_seconds}, but for the C{"H"} suffix which multiplies the
+        time value by C{3600}, the number of seconds in an hour.
+        """
+        self.assertEqual(3 * 3600, dns.str2time("3H"))
+
+
+    def test_days(self):
+        """
+        Like L{test_seconds}, but for the C{"D"} suffix which multiplies the
+        time value by C{86400}, the number of seconds in a day.
+        """
+        self.assertEqual(4 * 86400, dns.str2time("4D"))
+
+
+    def test_weeks(self):
+        """
+        Like L{test_seconds}, but for the C{"W"} suffix which multiplies the
+        time value by C{604800}, the number of seconds in a week.
+        """
+        self.assertEqual(5 * 604800, dns.str2time("5W"))
+
+
+    def test_years(self):
+        """
+        Like L{test_seconds}, but for the C{"Y"} suffix which multiplies the
+        time value by C{31536000}, the number of seconds in a year.
+        """
+        self.assertEqual(6 * 31536000, dns.str2time("6Y"))
+
+
+    def test_invalidPrefix(self):
+        """
+        If a non-integer prefix is given, L{dns.str2time} raises L{ValueError}.
+        """
+        self.assertRaises(ValueError, dns.str2time, "fooS")
+
+
+
+class NameTests(unittest.TestCase):
+    """
+    Tests for L{Name}, the representation of a single domain name with support
+    for encoding into and decoding from DNS message format.
+    """
+    def test_nonStringName(self):
+        """
+        When constructed with a name which is neither C{bytes} nor C{str},
+        L{Name} raises L{TypeError}.
+        """
+        self.assertRaises(TypeError, dns.Name, 123)
+        self.assertRaises(TypeError, dns.Name, object())
+        self.assertRaises(TypeError, dns.Name, [])
+        self.assertRaises(TypeError, dns.Name, u"text")
+
+
+    def test_decode(self):
+        """
+        L{Name.decode} populates the L{Name} instance with name information read
+        from the file-like object passed to it.
+        """
+        n = dns.Name()
+        n.decode(BytesIO(b"\x07example\x03com\x00"))
+        self.assertEqual(n.name, b"example.com")
+
+
+    def test_encode(self):
+        """
+        L{Name.encode} encodes its name information and writes it to the
+        file-like object passed to it.
+        """
+        name = dns.Name(b"foo.example.com")
+        stream = BytesIO()
+        name.encode(stream)
+        self.assertEqual(stream.getvalue(), b"\x03foo\x07example\x03com\x00")
+
+
+    def test_encodeWithCompression(self):
+        """
+        If a compression dictionary is passed to it, L{Name.encode} uses offset
+        information from it to encode its name with references to existing
+        labels in the stream instead of including another copy of them in the
+        output.  It also updates the compression dictionary with the location of
+        the name it writes to the stream.
+        """
+        name = dns.Name(b"foo.example.com")
+        compression = {b"example.com": 0x17}
+
+        # Some bytes already encoded into the stream for this message
+        previous = b"some prefix to change .tell()"
+        stream = BytesIO()
+        stream.write(previous)
+
+        # The position at which the encoded form of this new name will appear in
+        # the stream.
+        expected = len(previous) + dns.Message.headerSize
+        name.encode(stream, compression)
+        self.assertEqual(
+            b"\x03foo\xc0\x17",
+            stream.getvalue()[len(previous):])
+        self.assertEqual(
+            {b"example.com": 0x17, b"foo.example.com": expected},
+            compression)
+
+
+    def test_unknown(self):
+        """
+        A resource record of unknown type and class is parsed into an
+        L{UnknownRecord} instance with its data preserved, and an
+        L{UnknownRecord} instance is serialized to a string equal to the one it
+        was parsed from.
+        """
+        wire = (
+            b'\x01\x00' # Message ID
+            b'\x00' # answer bit, opCode nibble, auth bit, trunc bit, recursive
+                    # bit
+            b'\x00' # recursion bit, empty bit, empty bit, empty bit, response
+                    # code nibble
+            b'\x00\x01' # number of queries
+            b'\x00\x01' # number of answers
+            b'\x00\x00' # number of authorities
+            b'\x00\x01' # number of additionals
+
+            # query
+            b'\x03foo\x03bar\x00'    # foo.bar
+            b'\xde\xad'              # type=0xdead
+            b'\xbe\xef'              # cls=0xbeef
+
+            # 1st answer
+            b'\xc0\x0c'              # foo.bar - compressed
+            b'\xde\xad'              # type=0xdead
+            b'\xbe\xef'              # cls=0xbeef
+            b'\x00\x00\x01\x01'      # ttl=257
+            b'\x00\x08somedata'      # some payload data
+
+            # 1st additional
+            b'\x03baz\x03ban\x00'    # baz.ban
+            b'\x00\x01'              # type=A
+            b'\x00\x01'              # cls=IN
+            b'\x00\x00\x01\x01'      # ttl=257
+            b'\x00\x04'              # len=4
+            b'\x01\x02\x03\x04'      # 1.2.3.4
+            )
+
+        msg = dns.Message()
+        msg.fromStr(wire)
+
+        self.assertEqual(msg.queries, [
+                dns.Query(b'foo.bar', type=0xdead, cls=0xbeef),
+                ])
+        self.assertEqual(msg.answers, [
+                dns.RRHeader(b'foo.bar', type=0xdead, cls=0xbeef, ttl=257,
+                             payload=dns.UnknownRecord(b'somedata', ttl=257)),
+                ])
+        self.assertEqual(msg.additional, [
+                dns.RRHeader(b'baz.ban', type=dns.A, cls=dns.IN, ttl=257,
+                             payload=dns.Record_A('1.2.3.4', ttl=257)),
+                ])
+
+        enc = msg.toStr()
+
+        self.assertEqual(enc, wire)
+
+
+    def test_decodeWithCompression(self):
+        """
+        If the leading byte of an encoded label (in bytes read from a stream
+        passed to L{Name.decode}) has its two high bits set, the next byte is
+        treated as a pointer to another label in the stream and that label is
+        included in the name being decoded.
+        """
+        # Slightly modified version of the example from RFC 1035, section 4.1.4.
+        stream = BytesIO(
+            b"x" * 20 +
+            b"\x01f\x03isi\x04arpa\x00"
+            b"\x03foo\xc0\x14"
+            b"\x03bar\xc0\x20")
+        stream.seek(20)
+        name = dns.Name()
+        name.decode(stream)
+        # Verify we found the first name in the stream and that the stream
+        # position is left at the first byte after the decoded name.
+        self.assertEqual(b"f.isi.arpa", name.name)
+        self.assertEqual(32, stream.tell())
+
+        # Get the second name from the stream and make the same assertions.
+        name.decode(stream)
+        self.assertEqual(name.name, b"foo.f.isi.arpa")
+        self.assertEqual(38, stream.tell())
+
+        # Get the third and final name
+        name.decode(stream)
+        self.assertEqual(name.name, b"bar.foo.f.isi.arpa")
+        self.assertEqual(44, stream.tell())
+
+
+    def test_rejectCompressionLoop(self):
+        """
+        L{Name.decode} raises L{ValueError} if the stream passed to it includes
+        a compression pointer which forms a loop, causing the name to be
+        undecodable.
+        """
+        name = dns.Name()
+        stream = BytesIO(b"\xc0\x00")
+        self.assertRaises(ValueError, name.decode, stream)
+
+
+
+class RoundtripDNSTestCase(unittest.TestCase):
+    """
+    Encoding and then decoding various objects.
+    """
+
+    names = [b"example.org", b"go-away.fish.tv", b"23strikesback.net"]
+
+    def testName(self):
+        for n in self.names:
+            # encode the name
+            f = BytesIO()
+            dns.Name(n).encode(f)
+
+            # decode the name
+            f.seek(0, 0)
+            result = dns.Name()
+            result.decode(f)
+            self.assertEqual(result.name, n)
+
+    def test_query(self):
+        """
+        L{dns.Query.encode} returns a byte string representing the fields of the
+        query which can be decoded into a new L{dns.Query} instance using
+        L{dns.Query.decode}.
+        """
+        for n in self.names:
+            for dnstype in range(1, 17):
+                for dnscls in range(1, 5):
+                    # encode the query
+                    f = BytesIO()
+                    dns.Query(n, dnstype, dnscls).encode(f)
+
+                    # decode the result
+                    f.seek(0, 0)
+                    result = dns.Query()
+                    result.decode(f)
+                    self.assertEqual(result.name.name, n)
+                    self.assertEqual(result.type, dnstype)
+                    self.assertEqual(result.cls, dnscls)
+
+    def test_resourceRecordHeader(self):
+        """
+        L{dns.RRHeader.encode} encodes the record header's information and
+        writes it to the file-like object passed to it and
+        L{dns.RRHeader.decode} reads from a file-like object to re-construct a
+        L{dns.RRHeader} instance.
+        """
+        # encode the RR
+        f = BytesIO()
+        dns.RRHeader(b"test.org", 3, 4, 17).encode(f)
+
+        # decode the result
+        f.seek(0, 0)
+        result = dns.RRHeader()
+        result.decode(f)
+        self.assertEqual(result.name, dns.Name(b"test.org"))
+        self.assertEqual(result.type, 3)
+        self.assertEqual(result.cls, 4)
+        self.assertEqual(result.ttl, 17)
+
+
+    def test_resources(self):
+        """
+        L{dns.SimpleRecord.encode} encodes the record's name information and
+        writes it to the file-like object passed to it and
+        L{dns.SimpleRecord.decode} reads from a file-like object to re-construct
+        a L{dns.SimpleRecord} instance.
+        """
+        names = (
+            b"this.are.test.name",
+            b"will.compress.will.this.will.name.will.hopefully",
+            b"test.CASE.preSErVatIOn.YeAH",
+            b"a.s.h.o.r.t.c.a.s.e.t.o.t.e.s.t",
+            b"singleton"
+        )
+        for s in names:
+            f = BytesIO()
+            dns.SimpleRecord(s).encode(f)
+            f.seek(0, 0)
+            result = dns.SimpleRecord()
+            result.decode(f)
+            self.assertEqual(result.name, dns.Name(s))
+
+
+    def test_hashable(self):
+        """
+        Instances of all record types are hashable.
+        """
+        for k in RECORD_TYPES:
+            k1, k2 = k(), k()
+            hk1 = hash(k1)
+            hk2 = hash(k2)
+            self.assertEqual(hk1, hk2, "%s != %s (for %s)" % (hk1,hk2,k))
+
+
+    def test_Charstr(self):
+        """
+        Test L{dns.Charstr} encode and decode.
+        """
+        for n in self.names:
+            # encode the name
+            f = BytesIO()
+            dns.Charstr(n).encode(f)
+
+            # decode the name
+            f.seek(0, 0)
+            result = dns.Charstr()
+            result.decode(f)
+            self.assertEqual(result.string, n)
+
+
+    def _recordRoundtripTest(self, record):
+        """
+        Assert that encoding C{record} and then decoding the resulting bytes
+        creates a record which compares equal to C{record}.
+        """
+        stream = BytesIO()
+        record.encode(stream)
+
+        length = stream.tell()
+        stream.seek(0, 0)
+        replica = record.__class__()
+        replica.decode(stream, length)
+        self.assertEqual(record, replica)
+
+
+    def test_SOA(self):
+        """
+        The byte stream written by L{dns.Record_SOA.encode} can be used by
+        L{dns.Record_SOA.decode} to reconstruct the state of the original
+        L{dns.Record_SOA} instance.
+        """
+        self._recordRoundtripTest(
+            dns.Record_SOA(
+                mname=b'foo', rname=b'bar', serial=12, refresh=34,
+                retry=56, expire=78, minimum=90))
+
+
+    def test_A(self):
+        """
+        The byte stream written by L{dns.Record_A.encode} can be used by
+        L{dns.Record_A.decode} to reconstruct the state of the original
+        L{dns.Record_A} instance.
+        """
+        self._recordRoundtripTest(dns.Record_A('1.2.3.4'))
+
+
+    def test_NULL(self):
+        """
+        The byte stream written by L{dns.Record_NULL.encode} can be used by
+        L{dns.Record_NULL.decode} to reconstruct the state of the original
+        L{dns.Record_NULL} instance.
+        """
+        self._recordRoundtripTest(dns.Record_NULL(b'foo bar'))
+
+
+    def test_WKS(self):
+        """
+        The byte stream written by L{dns.Record_WKS.encode} can be used by
+        L{dns.Record_WKS.decode} to reconstruct the state of the original
+        L{dns.Record_WKS} instance.
+        """
+        self._recordRoundtripTest(dns.Record_WKS('1.2.3.4', 3, b'xyz'))
+
+
+    def test_AAAA(self):
+        """
+        The byte stream written by L{dns.Record_AAAA.encode} can be used by
+        L{dns.Record_AAAA.decode} to reconstruct the state of the original
+        L{dns.Record_AAAA} instance.
+        """
+        self._recordRoundtripTest(dns.Record_AAAA('::1'))
+
+
+    def test_A6(self):
+        """
+        The byte stream written by L{dns.Record_A6.encode} can be used by
+        L{dns.Record_A6.decode} to reconstruct the state of the original
+        L{dns.Record_A6} instance.
+        """
+        self._recordRoundtripTest(dns.Record_A6(8, '::1:2', b'foo'))
+
+
+    def test_SRV(self):
+        """
+        The byte stream written by L{dns.Record_SRV.encode} can be used by
+        L{dns.Record_SRV.decode} to reconstruct the state of the original
+        L{dns.Record_SRV} instance.
+        """
+        self._recordRoundtripTest(dns.Record_SRV(
+                priority=1, weight=2, port=3, target=b'example.com'))
+
+
+    def test_NAPTR(self):
+        """
+        Test L{dns.Record_NAPTR} encode and decode.
+        """
+        naptrs = [
+            (100, 10, b"u", b"sip+E2U",
+             b"!^.*$!sip:information at domain.tld!", b""),
+            (100, 50, b"s", b"http+I2L+I2C+I2R",
+             b"", b"_http._tcp.gatech.edu")]
+
+        for (order, preference, flags, service, regexp, replacement) in naptrs:
+            rin = dns.Record_NAPTR(order, preference, flags, service, regexp,
+                                   replacement)
+            e = BytesIO()
+            rin.encode(e)
+            e.seek(0, 0)
+            rout = dns.Record_NAPTR()
+            rout.decode(e)
+            self.assertEqual(rin.order, rout.order)
+            self.assertEqual(rin.preference, rout.preference)
+            self.assertEqual(rin.flags, rout.flags)
+            self.assertEqual(rin.service, rout.service)
+            self.assertEqual(rin.regexp, rout.regexp)
+            self.assertEqual(rin.replacement.name, rout.replacement.name)
+            self.assertEqual(rin.ttl, rout.ttl)
+
+
+    def test_AFSDB(self):
+        """
+        The byte stream written by L{dns.Record_AFSDB.encode} can be used by
+        L{dns.Record_AFSDB.decode} to reconstruct the state of the original
+        L{dns.Record_AFSDB} instance.
+        """
+        self._recordRoundtripTest(dns.Record_AFSDB(
+                subtype=3, hostname=b'example.com'))
+
+
+    def test_RP(self):
+        """
+        The byte stream written by L{dns.Record_RP.encode} can be used by
+        L{dns.Record_RP.decode} to reconstruct the state of the original
+        L{dns.Record_RP} instance.
+        """
+        self._recordRoundtripTest(dns.Record_RP(
+                mbox=b'alice.example.com', txt=b'example.com'))
+
+
+    def test_HINFO(self):
+        """
+        The byte stream written by L{dns.Record_HINFO.encode} can be used by
+        L{dns.Record_HINFO.decode} to reconstruct the state of the original
+        L{dns.Record_HINFO} instance.
+        """
+        self._recordRoundtripTest(dns.Record_HINFO(cpu=b'fast', os=b'great'))
+
+
+    def test_MINFO(self):
+        """
+        The byte stream written by L{dns.Record_MINFO.encode} can be used by
+        L{dns.Record_MINFO.decode} to reconstruct the state of the original
+        L{dns.Record_MINFO} instance.
+        """
+        self._recordRoundtripTest(dns.Record_MINFO(
+                rmailbx=b'foo', emailbx=b'bar'))
+
+
+    def test_MX(self):
+        """
+        The byte stream written by L{dns.Record_MX.encode} can be used by
+        L{dns.Record_MX.decode} to reconstruct the state of the original
+        L{dns.Record_MX} instance.
+        """
+        self._recordRoundtripTest(dns.Record_MX(
+                preference=1, name=b'example.com'))
+
+
+    def test_TXT(self):
+        """
+        The byte stream written by L{dns.Record_TXT.encode} can be used by
+        L{dns.Record_TXT.decode} to reconstruct the state of the original
+        L{dns.Record_TXT} instance.
+        """
+        self._recordRoundtripTest(dns.Record_TXT(b'foo', b'bar'))
+
+
+
+class MessageTestCase(unittest.SynchronousTestCase):
+    """
+    Tests for L{twisted.names.dns.Message}.
+    """
+
+    def testEmptyMessage(self):
+        """
+        Test that a message which has been truncated causes an EOFError to
+        be raised when it is parsed.
+        """
+        msg = dns.Message()
+        self.assertRaises(EOFError, msg.fromStr, b'')
+
+
+    def test_emptyQuery(self):
+        """
+        Test that bytes representing an empty query message can be decoded
+        as such.
+        """
+        msg = dns.Message()
+        msg.fromStr(
+            b'\x01\x00' # Message ID
+            b'\x00' # answer bit, opCode nibble, auth bit, trunc bit, recursive bit
+            b'\x00' # recursion bit, empty bit, empty bit, empty bit, response code nibble
+            b'\x00\x00' # number of queries
+            b'\x00\x00' # number of answers
+            b'\x00\x00' # number of authorities
+            b'\x00\x00' # number of additionals
+            )
+        self.assertEqual(msg.id, 256)
+        self.failIf(msg.answer, "Message was not supposed to be an answer.")
+        self.assertEqual(msg.opCode, dns.OP_QUERY)
+        self.failIf(msg.auth, "Message was not supposed to be authoritative.")
+        self.failIf(msg.trunc, "Message was not supposed to be truncated.")
+        self.assertEqual(msg.queries, [])
+        self.assertEqual(msg.answers, [])
+        self.assertEqual(msg.authority, [])
+        self.assertEqual(msg.additional, [])
+
+
+    def test_NULL(self):
+        """
+        A I{NULL} record with an arbitrary payload can be encoded and decoded as
+        part of a L{dns.Message}.
+        """
+        bytes = b''.join([dns._ord2bytes(i) for i in range(256)])
+        rec = dns.Record_NULL(bytes)
+        rr = dns.RRHeader(b'testname', dns.NULL, payload=rec)
+        msg1 = dns.Message()
+        msg1.answers.append(rr)
+        s = BytesIO()
+        msg1.encode(s)
+        s.seek(0, 0)
+        msg2 = dns.Message()
+        msg2.decode(s)
+
+        self.failUnless(isinstance(msg2.answers[0].payload, dns.Record_NULL))
+        self.assertEqual(msg2.answers[0].payload.payload, bytes)
+
+
+    def test_lookupRecordTypeDefault(self):
+        """
+        L{Message.lookupRecordType} returns C{dns.UnknownRecord} if it is
+        called with an integer which doesn't correspond to any known record
+        type.
+        """
+        # 65280 is the first value in the range reserved for private
+        # use, so it shouldn't ever conflict with an officially
+        # allocated value.
+        self.assertIdentical(
+            dns.Message().lookupRecordType(65280), dns.UnknownRecord)
+
+
+    def test_nonAuthoritativeMessage(self):
+        """
+        The L{RRHeader} instances created by L{Message} from a non-authoritative
+        message are marked as not authoritative.
+        """
+        buf = BytesIO()
+        answer = dns.RRHeader(payload=dns.Record_A('1.2.3.4', ttl=0))
+        answer.encode(buf)
+        message = dns.Message()
+        message.fromStr(
+            b'\x01\x00' # Message ID
+            # answer bit, opCode nibble, auth bit, trunc bit, recursive bit
+            b'\x00'
+            # recursion bit, empty bit, empty bit, empty bit, response code
+            # nibble
+            b'\x00'
+            b'\x00\x00' # number of queries
+            b'\x00\x01' # number of answers
+            b'\x00\x00' # number of authorities
+            b'\x00\x00' # number of additionals
+            + buf.getvalue()
+            )
+        self.assertEqual(message.answers, [answer])
+        self.assertFalse(message.answers[0].auth)
+
+
+    def test_authoritativeMessage(self):
+        """
+        The L{RRHeader} instances created by L{Message} from an authoritative
+        message are marked as authoritative.
+        """
+        buf = BytesIO()
+        answer = dns.RRHeader(payload=dns.Record_A('1.2.3.4', ttl=0))
+        answer.encode(buf)
+        message = dns.Message()
+        message.fromStr(
+            b'\x01\x00' # Message ID
+            # answer bit, opCode nibble, auth bit, trunc bit, recursive bit
+            b'\x04'
+            # recursion bit, empty bit, empty bit, empty bit, response code
+            # nibble
+            b'\x00'
+            b'\x00\x00' # number of queries
+            b'\x00\x01' # number of answers
+            b'\x00\x00' # number of authorities
+            b'\x00\x00' # number of additionals
+            + buf.getvalue()
+            )
+        answer.auth = True
+        self.assertEqual(message.answers, [answer])
+        self.assertTrue(message.answers[0].auth)
+
+
+
+class TestController(object):
+    """
+    Pretend to be a DNS query processor for a DNSDatagramProtocol.
+
+    @ivar messages: the list of received messages.
+    @type messages: C{list} of (msg, protocol, address)
+    """
+
+    def __init__(self):
+        """
+        Initialize the controller: create a list of messages.
+        """
+        self.messages = []
+
+
+    def messageReceived(self, msg, proto, addr):
+        """
+        Save the message so that it can be checked during the tests.
+        """
+        self.messages.append((msg, proto, addr))
+
+
+
+class DatagramProtocolTestCase(unittest.TestCase):
+    """
+    Test various aspects of L{dns.DNSDatagramProtocol}.
+    """
+
+    def setUp(self):
+        """
+        Create a L{dns.DNSDatagramProtocol} with a deterministic clock.
+        """
+        self.clock = task.Clock()
+        self.controller = TestController()
+        self.proto = dns.DNSDatagramProtocol(self.controller)
+        transport = proto_helpers.FakeDatagramTransport()
+        self.proto.makeConnection(transport)
+        self.proto.callLater = self.clock.callLater
+
+
+    def test_truncatedPacket(self):
+        """
+        Test that when a short datagram is received, datagramReceived does
+        not raise an exception while processing it.
+        """
+        self.proto.datagramReceived(
+            b'', address.IPv4Address('UDP', '127.0.0.1', 12345))
+        self.assertEqual(self.controller.messages, [])
+
+
+    def test_simpleQuery(self):
+        """
+        Test content received after a query.
+        """
+        d = self.proto.query(('127.0.0.1', 21345), [dns.Query(b'foo')])
+        self.assertEqual(len(self.proto.liveMessages.keys()), 1)
+        m = dns.Message()
+        m.id = next(iter(self.proto.liveMessages.keys()))
+        m.answers = [dns.RRHeader(payload=dns.Record_A(address='1.2.3.4'))]
+        def cb(result):
+            self.assertEqual(result.answers[0].payload.dottedQuad(), '1.2.3.4')
+        d.addCallback(cb)
+        self.proto.datagramReceived(m.toStr(), ('127.0.0.1', 21345))
+        return d
+
+
+    def test_queryTimeout(self):
+        """
+        Test that query timeouts after some seconds.
+        """
+        d = self.proto.query(('127.0.0.1', 21345), [dns.Query(b'foo')])
+        self.assertEqual(len(self.proto.liveMessages), 1)
+        self.clock.advance(10)
+        self.assertFailure(d, dns.DNSQueryTimeoutError)
+        self.assertEqual(len(self.proto.liveMessages), 0)
+        return d
+
+
+    def test_writeError(self):
+        """
+        Exceptions raised by the transport's write method should be turned into
+        C{Failure}s passed to errbacks of the C{Deferred} returned by
+        L{DNSDatagramProtocol.query}.
+        """
+        def writeError(message, addr):
+            raise RuntimeError("bar")
+        self.proto.transport.write = writeError
+
+        d = self.proto.query(('127.0.0.1', 21345), [dns.Query(b'foo')])
+        return self.assertFailure(d, RuntimeError)
+
+
+    def test_listenError(self):
+        """
+        Exception L{CannotListenError} raised by C{listenUDP} should be turned
+        into a C{Failure} passed to errback of the C{Deferred} returned by
+        L{DNSDatagramProtocol.query}.
+        """
+        def startListeningError():
+            raise CannotListenError(None, None, None)
+        self.proto.startListening = startListeningError
+        # Clean up transport so that the protocol calls startListening again
+        self.proto.transport = None
+
+        d = self.proto.query(('127.0.0.1', 21345), [dns.Query(b'foo')])
+        return self.assertFailure(d, CannotListenError)
+
+
+
+class TestTCPController(TestController):
+    """
+    Pretend to be a DNS query processor for a DNSProtocol.
+
+    @ivar connections: A list of L{DNSProtocol} instances which have
+        notified this controller that they are connected and have not
+        yet notified it that their connection has been lost.
+    """
+    def __init__(self):
+        TestController.__init__(self)
+        self.connections = []
+
+
+    def connectionMade(self, proto):
+        self.connections.append(proto)
+
+
+    def connectionLost(self, proto):
+        self.connections.remove(proto)
+
+
+
+class DNSProtocolTestCase(unittest.TestCase):
+    """
+    Test various aspects of L{dns.DNSProtocol}.
+    """
+
+    def setUp(self):
+        """
+        Create a L{dns.DNSProtocol} with a deterministic clock.
+        """
+        self.clock = task.Clock()
+        self.controller = TestTCPController()
+        self.proto = dns.DNSProtocol(self.controller)
+        self.proto.makeConnection(proto_helpers.StringTransport())
+        self.proto.callLater = self.clock.callLater
+
+
+    def test_connectionTracking(self):
+        """
+        L{dns.DNSProtocol} calls its controller's C{connectionMade}
+        method with itself when it is connected to a transport and its
+        controller's C{connectionLost} method when it is disconnected.
+        """
+        self.assertEqual(self.controller.connections, [self.proto])
+        self.proto.connectionLost(
+            Failure(ConnectionDone("Fake Connection Done")))
+        self.assertEqual(self.controller.connections, [])
+
+
+    def test_queryTimeout(self):
+        """
+        Test that query timeouts after some seconds.
+        """
+        d = self.proto.query([dns.Query(b'foo')])
+        self.assertEqual(len(self.proto.liveMessages), 1)
+        self.clock.advance(60)
+        self.assertFailure(d, dns.DNSQueryTimeoutError)
+        self.assertEqual(len(self.proto.liveMessages), 0)
+        return d
+
+
+    def test_simpleQuery(self):
+        """
+        Test content received after a query.
+        """
+        d = self.proto.query([dns.Query(b'foo')])
+        self.assertEqual(len(self.proto.liveMessages.keys()), 1)
+        m = dns.Message()
+        m.id = next(iter(self.proto.liveMessages.keys()))
+        m.answers = [dns.RRHeader(payload=dns.Record_A(address='1.2.3.4'))]
+        def cb(result):
+            self.assertEqual(result.answers[0].payload.dottedQuad(), '1.2.3.4')
+        d.addCallback(cb)
+        s = m.toStr()
+        s = struct.pack('!H', len(s)) + s
+        self.proto.dataReceived(s)
+        return d
+
+
+    def test_writeError(self):
+        """
+        Exceptions raised by the transport's write method should be turned into
+        C{Failure}s passed to errbacks of the C{Deferred} returned by
+        L{DNSProtocol.query}.
+        """
+        def writeError(message):
+            raise RuntimeError("bar")
+        self.proto.transport.write = writeError
+
+        d = self.proto.query([dns.Query(b'foo')])
+        return self.assertFailure(d, RuntimeError)
+
+
+
+class ReprTests(unittest.TestCase):
+    """
+    Tests for the C{__repr__} implementation of record classes.
+    """
+    def test_ns(self):
+        """
+        The repr of a L{dns.Record_NS} instance includes the name of the
+        nameserver and the TTL of the record.
+        """
+        self.assertEqual(
+            repr(dns.Record_NS(b'example.com', 4321)),
+            "<NS name=example.com ttl=4321>")
+
+
+    def test_md(self):
+        """
+        The repr of a L{dns.Record_MD} instance includes the name of the
+        mail destination and the TTL of the record.
+        """
+        self.assertEqual(
+            repr(dns.Record_MD(b'example.com', 4321)),
+            "<MD name=example.com ttl=4321>")
+
+
+    def test_mf(self):
+        """
+        The repr of a L{dns.Record_MF} instance includes the name of the
+        mail forwarder and the TTL of the record.
+        """
+        self.assertEqual(
+            repr(dns.Record_MF(b'example.com', 4321)),
+            "<MF name=example.com ttl=4321>")
+
+
+    def test_cname(self):
+        """
+        The repr of a L{dns.Record_CNAME} instance includes the name of the
+        mail forwarder and the TTL of the record.
+        """
+        self.assertEqual(
+            repr(dns.Record_CNAME(b'example.com', 4321)),
+            "<CNAME name=example.com ttl=4321>")
+
+
+    def test_mb(self):
+        """
+        The repr of a L{dns.Record_MB} instance includes the name of the
+        mailbox and the TTL of the record.
+        """
+        self.assertEqual(
+            repr(dns.Record_MB(b'example.com', 4321)),
+            "<MB name=example.com ttl=4321>")
+
+
+    def test_mg(self):
+        """
+        The repr of a L{dns.Record_MG} instance includes the name of the
+        mail group memeber and the TTL of the record.
+        """
+        self.assertEqual(
+            repr(dns.Record_MG(b'example.com', 4321)),
+            "<MG name=example.com ttl=4321>")
+
+
+    def test_mr(self):
+        """
+        The repr of a L{dns.Record_MR} instance includes the name of the
+        mail rename domain and the TTL of the record.
+        """
+        self.assertEqual(
+            repr(dns.Record_MR(b'example.com', 4321)),
+            "<MR name=example.com ttl=4321>")
+
+
+    def test_ptr(self):
+        """
+        The repr of a L{dns.Record_PTR} instance includes the name of the
+        pointer and the TTL of the record.
+        """
+        self.assertEqual(
+            repr(dns.Record_PTR(b'example.com', 4321)),
+            "<PTR name=example.com ttl=4321>")
+
+
+    def test_dname(self):
+        """
+        The repr of a L{dns.Record_DNAME} instance includes the name of the
+        non-terminal DNS name redirection and the TTL of the record.
+        """
+        self.assertEqual(
+            repr(dns.Record_DNAME(b'example.com', 4321)),
+            "<DNAME name=example.com ttl=4321>")
+
+
+    def test_a(self):
+        """
+        The repr of a L{dns.Record_A} instance includes the dotted-quad
+        string representation of the address it is for and the TTL of the
+        record.
+        """
+        self.assertEqual(
+            repr(dns.Record_A('1.2.3.4', 567)),
+            '<A address=1.2.3.4 ttl=567>')
+
+
+    def test_soa(self):
+        """
+        The repr of a L{dns.Record_SOA} instance includes all of the
+        authority fields.
+        """
+        self.assertEqual(
+            repr(dns.Record_SOA(mname=b'mName', rname=b'rName', serial=123,
+                                refresh=456, retry=789, expire=10,
+                                minimum=11, ttl=12)),
+            "<SOA mname=mName rname=rName serial=123 refresh=456 "
+            "retry=789 expire=10 minimum=11 ttl=12>")
+
+
+    def test_null(self):
+        """
+        The repr of a L{dns.Record_NULL} instance includes the repr of its
+        payload and the TTL of the record.
+        """
+        self.assertEqual(
+            repr(dns.Record_NULL(b'abcd', 123)),
+            "<NULL payload='abcd' ttl=123>")
+
+
+    def test_wks(self):
+        """
+        The repr of a L{dns.Record_WKS} instance includes the dotted-quad
+        string representation of the address it is for, the IP protocol
+        number it is for, and the TTL of the record.
+        """
+        self.assertEqual(
+            repr(dns.Record_WKS('2.3.4.5', 7, ttl=8)),
+            "<WKS address=2.3.4.5 protocol=7 ttl=8>")
+
+
+    def test_aaaa(self):
+        """
+        The repr of a L{dns.Record_AAAA} instance includes the colon-separated
+        hex string representation of the address it is for and the TTL of the
+        record.
+        """
+        self.assertEqual(
+            repr(dns.Record_AAAA('8765::1234', ttl=10)),
+            "<AAAA address=8765::1234 ttl=10>")
+
+
+    def test_a6(self):
+        """
+        The repr of a L{dns.Record_A6} instance includes the colon-separated
+        hex string representation of the address it is for and the TTL of the
+        record.
+        """
+        self.assertEqual(
+            repr(dns.Record_A6(0, '1234::5678', b'foo.bar', ttl=10)),
+            "<A6 suffix=1234::5678 prefix=foo.bar ttl=10>")
+
+
+    def test_srv(self):
+        """
+        The repr of a L{dns.Record_SRV} instance includes the name and port of
+        the target and the priority, weight, and TTL of the record.
+        """
+        self.assertEqual(
+            repr(dns.Record_SRV(1, 2, 3, b'example.org', 4)),
+            "<SRV priority=1 weight=2 target=example.org port=3 ttl=4>")
+
+
+    def test_naptr(self):
+        """
+        The repr of a L{dns.Record_NAPTR} instance includes the order,
+        preference, flags, service, regular expression, replacement, and TTL of
+        the record.
+        """
+        record = dns.Record_NAPTR(
+            5, 9, b"S", b"http", b"/foo/bar/i", b"baz", 3)
+        self.assertEqual(
+            repr(record),
+            "<NAPTR order=5 preference=9 flags=S service=http "
+            "regexp=/foo/bar/i replacement=baz ttl=3>")
+
+
+    def test_afsdb(self):
+        """
+        The repr of a L{dns.Record_AFSDB} instance includes the subtype,
+        hostname, and TTL of the record.
+        """
+        self.assertEqual(
+            repr(dns.Record_AFSDB(3, b'example.org', 5)),
+            "<AFSDB subtype=3 hostname=example.org ttl=5>")
+
+
+    def test_rp(self):
+        """
+        The repr of a L{dns.Record_RP} instance includes the mbox, txt, and TTL
+        fields of the record.
+        """
+        self.assertEqual(
+            repr(dns.Record_RP(b'alice.example.com', b'admin.example.com', 3)),
+            "<RP mbox=alice.example.com txt=admin.example.com ttl=3>")
+
+
+    def test_hinfo(self):
+        """
+        The repr of a L{dns.Record_HINFO} instance includes the cpu, os, and
+        TTL fields of the record.
+        """
+        self.assertEqual(
+            repr(dns.Record_HINFO(b'sparc', b'minix', 12)),
+            "<HINFO cpu='sparc' os='minix' ttl=12>")
+
+
+    def test_minfo(self):
+        """
+        The repr of a L{dns.Record_MINFO} instance includes the rmailbx,
+        emailbx, and TTL fields of the record.
+        """
+        record = dns.Record_MINFO(
+            b'alice.example.com', b'bob.example.com', 15)
+        self.assertEqual(
+            repr(record),
+            "<MINFO responsibility=alice.example.com "
+            "errors=bob.example.com ttl=15>")
+
+
+    def test_mx(self):
+        """
+        The repr of a L{dns.Record_MX} instance includes the preference, name,
+        and TTL fields of the record.
+        """
+        self.assertEqual(
+            repr(dns.Record_MX(13, b'mx.example.com', 2)),
+            "<MX preference=13 name=mx.example.com ttl=2>")
+
+
+    def test_txt(self):
+        """
+        The repr of a L{dns.Record_TXT} instance includes the data and ttl
+        fields of the record.
+        """
+        self.assertEqual(
+            repr(dns.Record_TXT(b"foo", b"bar", ttl=15)),
+            "<TXT data=['foo', 'bar'] ttl=15>")
+
+
+    def test_spf(self):
+        """
+        The repr of a L{dns.Record_SPF} instance includes the data and ttl
+        fields of the record.
+        """
+        self.assertEqual(
+            repr(dns.Record_SPF(b"foo", b"bar", ttl=15)),
+            "<SPF data=['foo', 'bar'] ttl=15>")
+
+
+    def test_unknown(self):
+        """
+        The repr of a L{dns.UnknownRecord} instance includes the data and ttl
+        fields of the record.
+        """
+        self.assertEqual(
+            repr(dns.UnknownRecord(b"foo\x1fbar", 12)),
+            "<UNKNOWN data='foo\\x1fbar' ttl=12>")
+
+
+
+class _Equal(object):
+    """
+    A class the instances of which are equal to anything and everything.
+    """
+    def __eq__(self, other):
+        return True
+
+
+    def __ne__(self, other):
+        return False
+
+
+
+class _NotEqual(object):
+    """
+    A class the instances of which are equal to nothing.
+    """
+    def __eq__(self, other):
+        return False
+
+
+    def __ne__(self, other):
+        return True
+
+
+
+class EqualityTests(unittest.TestCase):
+    """
+    Tests for the equality and non-equality behavior of record classes.
+    """
+    def _equalityTest(self, firstValueOne, secondValueOne, valueTwo):
+        """
+        Assert that C{firstValueOne} is equal to C{secondValueOne} but not
+        equal to C{valueOne} and that it defines equality cooperatively with
+        other types it doesn't know about.
+        """
+        # This doesn't use assertEqual and assertNotEqual because the exact
+        # operator those functions use is not very well defined.  The point
+        # of these assertions is to check the results of the use of specific
+        # operators (precisely to ensure that using different permutations
+        # (eg "x == y" or "not (x != y)") which should yield the same results
+        # actually does yield the same result). -exarkun
+        self.assertTrue(firstValueOne == firstValueOne)
+        self.assertTrue(firstValueOne == secondValueOne)
+        self.assertFalse(firstValueOne == valueTwo)
+        self.assertFalse(firstValueOne != firstValueOne)
+        self.assertFalse(firstValueOne != secondValueOne)
+        self.assertTrue(firstValueOne != valueTwo)
+        self.assertTrue(firstValueOne == _Equal())
+        self.assertFalse(firstValueOne != _Equal())
+        self.assertFalse(firstValueOne == _NotEqual())
+        self.assertTrue(firstValueOne != _NotEqual())
+
+
+    def test_charstr(self):
+        """
+        Two L{dns.Charstr} instances compare equal if and only if they have the
+        same string value.
+        """
+        self._equalityTest(
+            dns.Charstr(b'abc'), dns.Charstr(b'abc'), dns.Charstr(b'def'))
+
+
+    def test_name(self):
+        """
+        Two L{dns.Name} instances compare equal if and only if they have the
+        same name value.
+        """
+        self._equalityTest(
+            dns.Name(b'abc'), dns.Name(b'abc'), dns.Name(b'def'))
+
+
+    def _simpleEqualityTest(self, cls):
+        """
+        Assert that instances of C{cls} with the same attributes compare equal
+        to each other and instances with different attributes compare as not
+        equal.
+
+        @param cls: A L{dns.SimpleRecord} subclass.
+        """
+        # Vary the TTL
+        self._equalityTest(
+            cls(b'example.com', 123),
+            cls(b'example.com', 123),
+            cls(b'example.com', 321))
+        # Vary the name
+        self._equalityTest(
+            cls(b'example.com', 123),
+            cls(b'example.com', 123),
+            cls(b'example.org', 123))
+
+
+    def test_rrheader(self):
+        """
+        Two L{dns.RRHeader} instances compare equal if and only if they have
+        the same name, type, class, time to live, payload, and authoritative
+        bit.
+        """
+        # Vary the name
+        self._equalityTest(
+            dns.RRHeader(b'example.com', payload=dns.Record_A('1.2.3.4')),
+            dns.RRHeader(b'example.com', payload=dns.Record_A('1.2.3.4')),
+            dns.RRHeader(b'example.org', payload=dns.Record_A('1.2.3.4')))
+
+        # Vary the payload
+        self._equalityTest(
+            dns.RRHeader(b'example.com', payload=dns.Record_A('1.2.3.4')),
+            dns.RRHeader(b'example.com', payload=dns.Record_A('1.2.3.4')),
+            dns.RRHeader(b'example.com', payload=dns.Record_A('1.2.3.5')))
+
+        # Vary the type.  Leave the payload as None so that we don't have to
+        # provide non-equal values.
+        self._equalityTest(
+            dns.RRHeader(b'example.com', dns.A),
+            dns.RRHeader(b'example.com', dns.A),
+            dns.RRHeader(b'example.com', dns.MX))
+
+        # Probably not likely to come up.  Most people use the internet.
+        self._equalityTest(
+            dns.RRHeader(b'example.com', cls=dns.IN, payload=dns.Record_A('1.2.3.4')),
+            dns.RRHeader(b'example.com', cls=dns.IN, payload=dns.Record_A('1.2.3.4')),
+            dns.RRHeader(b'example.com', cls=dns.CS, payload=dns.Record_A('1.2.3.4')))
+
+        # Vary the ttl
+        self._equalityTest(
+            dns.RRHeader(b'example.com', ttl=60, payload=dns.Record_A('1.2.3.4')),
+            dns.RRHeader(b'example.com', ttl=60, payload=dns.Record_A('1.2.3.4')),
+            dns.RRHeader(b'example.com', ttl=120, payload=dns.Record_A('1.2.3.4')))
+
+        # Vary the auth bit
+        self._equalityTest(
+            dns.RRHeader(b'example.com', auth=1, payload=dns.Record_A('1.2.3.4')),
+            dns.RRHeader(b'example.com', auth=1, payload=dns.Record_A('1.2.3.4')),
+            dns.RRHeader(b'example.com', auth=0, payload=dns.Record_A('1.2.3.4')))
+
+
+    def test_ns(self):
+        """
+        Two L{dns.Record_NS} instances compare equal if and only if they have
+        the same name and TTL.
+        """
+        self._simpleEqualityTest(dns.Record_NS)
+
+
+    def test_md(self):
+        """
+        Two L{dns.Record_MD} instances compare equal if and only if they have
+        the same name and TTL.
+        """
+        self._simpleEqualityTest(dns.Record_MD)
+
+
+    def test_mf(self):
+        """
+        Two L{dns.Record_MF} instances compare equal if and only if they have
+        the same name and TTL.
+        """
+        self._simpleEqualityTest(dns.Record_MF)
+
+
+    def test_cname(self):
+        """
+        Two L{dns.Record_CNAME} instances compare equal if and only if they
+        have the same name and TTL.
+        """
+        self._simpleEqualityTest(dns.Record_CNAME)
+
+
+    def test_mb(self):
+        """
+        Two L{dns.Record_MB} instances compare equal if and only if they have
+        the same name and TTL.
+        """
+        self._simpleEqualityTest(dns.Record_MB)
+
+
+    def test_mg(self):
+        """
+        Two L{dns.Record_MG} instances compare equal if and only if they have
+        the same name and TTL.
+        """
+        self._simpleEqualityTest(dns.Record_MG)
+
+
+    def test_mr(self):
+        """
+        Two L{dns.Record_MR} instances compare equal if and only if they have
+        the same name and TTL.
+        """
+        self._simpleEqualityTest(dns.Record_MR)
+
+
+    def test_ptr(self):
+        """
+        Two L{dns.Record_PTR} instances compare equal if and only if they have
+        the same name and TTL.
+        """
+        self._simpleEqualityTest(dns.Record_PTR)
+
+
+    def test_dname(self):
+        """
+        Two L{dns.Record_MD} instances compare equal if and only if they have
+        the same name and TTL.
+        """
+        self._simpleEqualityTest(dns.Record_DNAME)
+
+
+    def test_a(self):
+        """
+        Two L{dns.Record_A} instances compare equal if and only if they have
+        the same address and TTL.
+        """
+        # Vary the TTL
+        self._equalityTest(
+            dns.Record_A('1.2.3.4', 5),
+            dns.Record_A('1.2.3.4', 5),
+            dns.Record_A('1.2.3.4', 6))
+        # Vary the address
+        self._equalityTest(
+            dns.Record_A('1.2.3.4', 5),
+            dns.Record_A('1.2.3.4', 5),
+            dns.Record_A('1.2.3.5', 5))
+
+
+    def test_soa(self):
+        """
+        Two L{dns.Record_SOA} instances compare equal if and only if they have
+        the same mname, rname, serial, refresh, minimum, expire, retry, and
+        ttl.
+        """
+        # Vary the mname
+        self._equalityTest(
+            dns.Record_SOA(b'mname', b'rname', 123, 456, 789, 10, 20, 30),
+            dns.Record_SOA(b'mname', b'rname', 123, 456, 789, 10, 20, 30),
+            dns.Record_SOA(b'xname', b'rname', 123, 456, 789, 10, 20, 30))
+        # Vary the rname
+        self._equalityTest(
+            dns.Record_SOA(b'mname', b'rname', 123, 456, 789, 10, 20, 30),
+            dns.Record_SOA(b'mname', b'rname', 123, 456, 789, 10, 20, 30),
+            dns.Record_SOA(b'mname', b'xname', 123, 456, 789, 10, 20, 30))
+        # Vary the serial
+        self._equalityTest(
+            dns.Record_SOA(b'mname', b'rname', 123, 456, 789, 10, 20, 30),
+            dns.Record_SOA(b'mname', b'rname', 123, 456, 789, 10, 20, 30),
+            dns.Record_SOA(b'mname', b'rname', 1, 456, 789, 10, 20, 30))
+        # Vary the refresh
+        self._equalityTest(
+            dns.Record_SOA(b'mname', b'rname', 123, 456, 789, 10, 20, 30),
+            dns.Record_SOA(b'mname', b'rname', 123, 456, 789, 10, 20, 30),
+            dns.Record_SOA(b'mname', b'rname', 123, 1, 789, 10, 20, 30))
+        # Vary the minimum
+        self._equalityTest(
+            dns.Record_SOA(b'mname', b'rname', 123, 456, 789, 10, 20, 30),
+            dns.Record_SOA(b'mname', b'rname', 123, 456, 789, 10, 20, 30),
+            dns.Record_SOA(b'mname', b'rname', 123, 456, 1, 10, 20, 30))
+        # Vary the expire
+        self._equalityTest(
+            dns.Record_SOA(b'mname', b'rname', 123, 456, 789, 10, 20, 30),
+            dns.Record_SOA(b'mname', b'rname', 123, 456, 789, 10, 20, 30),
+            dns.Record_SOA(b'mname', b'rname', 123, 456, 789, 1, 20, 30))
+        # Vary the retry
+        self._equalityTest(
+            dns.Record_SOA(b'mname', b'rname', 123, 456, 789, 10, 20, 30),
+            dns.Record_SOA(b'mname', b'rname', 123, 456, 789, 10, 20, 30),
+            dns.Record_SOA(b'mname', b'rname', 123, 456, 789, 10, 1, 30))
+        # Vary the ttl
+        self._equalityTest(
+            dns.Record_SOA(b'mname', b'rname', 123, 456, 789, 10, 20, 30),
+            dns.Record_SOA(b'mname', b'rname', 123, 456, 789, 10, 20, 30),
+            dns.Record_SOA(b'mname', b'xname', 123, 456, 789, 10, 20, 1))
+
+
+    def test_null(self):
+        """
+        Two L{dns.Record_NULL} instances compare equal if and only if they have
+        the same payload and ttl.
+        """
+        # Vary the payload
+        self._equalityTest(
+            dns.Record_NULL('foo bar', 10),
+            dns.Record_NULL('foo bar', 10),
+            dns.Record_NULL('bar foo', 10))
+        # Vary the ttl
+        self._equalityTest(
+            dns.Record_NULL('foo bar', 10),
+            dns.Record_NULL('foo bar', 10),
+            dns.Record_NULL('foo bar', 100))
+
+
+    def test_wks(self):
+        """
+        Two L{dns.Record_WKS} instances compare equal if and only if they have
+        the same address, protocol, map, and ttl.
+        """
+        # Vary the address
+        self._equalityTest(
+            dns.Record_WKS('1.2.3.4', 1, 'foo', 2),
+            dns.Record_WKS('1.2.3.4', 1, 'foo', 2),
+            dns.Record_WKS('4.3.2.1', 1, 'foo', 2))
+        # Vary the protocol
+        self._equalityTest(
+            dns.Record_WKS('1.2.3.4', 1, 'foo', 2),
+            dns.Record_WKS('1.2.3.4', 1, 'foo', 2),
+            dns.Record_WKS('1.2.3.4', 100, 'foo', 2))
+        # Vary the map
+        self._equalityTest(
+            dns.Record_WKS('1.2.3.4', 1, 'foo', 2),
+            dns.Record_WKS('1.2.3.4', 1, 'foo', 2),
+            dns.Record_WKS('1.2.3.4', 1, 'bar', 2))
+        # Vary the ttl
+        self._equalityTest(
+            dns.Record_WKS('1.2.3.4', 1, 'foo', 2),
+            dns.Record_WKS('1.2.3.4', 1, 'foo', 2),
+            dns.Record_WKS('1.2.3.4', 1, 'foo', 200))
+
+
+    def test_aaaa(self):
+        """
+        Two L{dns.Record_AAAA} instances compare equal if and only if they have
+        the same address and ttl.
+        """
+        # Vary the address
+        self._equalityTest(
+            dns.Record_AAAA('1::2', 1),
+            dns.Record_AAAA('1::2', 1),
+            dns.Record_AAAA('2::1', 1))
+        # Vary the ttl
+        self._equalityTest(
+            dns.Record_AAAA('1::2', 1),
+            dns.Record_AAAA('1::2', 1),
+            dns.Record_AAAA('1::2', 10))
+
+
+    def test_a6(self):
+        """
+        Two L{dns.Record_A6} instances compare equal if and only if they have
+        the same prefix, prefix length, suffix, and ttl.
+        """
+        # Note, A6 is crazy, I'm not sure these values are actually legal.
+        # Hopefully that doesn't matter for this test. -exarkun
+
+        # Vary the prefix length
+        self._equalityTest(
+            dns.Record_A6(16, '::abcd', b'example.com', 10),
+            dns.Record_A6(16, '::abcd', b'example.com', 10),
+            dns.Record_A6(32, '::abcd', b'example.com', 10))
+        # Vary the suffix
+        self._equalityTest(
+            dns.Record_A6(16, '::abcd', b'example.com', 10),
+            dns.Record_A6(16, '::abcd', b'example.com', 10),
+            dns.Record_A6(16, '::abcd:0', b'example.com', 10))
+        # Vary the prefix
+        self._equalityTest(
+            dns.Record_A6(16, '::abcd', b'example.com', 10),
+            dns.Record_A6(16, '::abcd', b'example.com', 10),
+            dns.Record_A6(16, '::abcd', b'example.org', 10))
+        # Vary the ttl
+        self._equalityTest(
+            dns.Record_A6(16, '::abcd', b'example.com', 10),
+            dns.Record_A6(16, '::abcd', b'example.com', 10),
+            dns.Record_A6(16, '::abcd', b'example.com', 100))
+
+
+    def test_srv(self):
+        """
+        Two L{dns.Record_SRV} instances compare equal if and only if they have
+        the same priority, weight, port, target, and ttl.
+        """
+        # Vary the priority
+        self._equalityTest(
+            dns.Record_SRV(10, 20, 30, b'example.com', 40),
+            dns.Record_SRV(10, 20, 30, b'example.com', 40),
+            dns.Record_SRV(100, 20, 30, b'example.com', 40))
+        # Vary the weight
+        self._equalityTest(
+            dns.Record_SRV(10, 20, 30, b'example.com', 40),
+            dns.Record_SRV(10, 20, 30, b'example.com', 40),
+            dns.Record_SRV(10, 200, 30, b'example.com', 40))
+        # Vary the port
+        self._equalityTest(
+            dns.Record_SRV(10, 20, 30, b'example.com', 40),
+            dns.Record_SRV(10, 20, 30, b'example.com', 40),
+            dns.Record_SRV(10, 20, 300, b'example.com', 40))
+        # Vary the target
+        self._equalityTest(
+            dns.Record_SRV(10, 20, 30, b'example.com', 40),
+            dns.Record_SRV(10, 20, 30, b'example.com', 40),
+            dns.Record_SRV(10, 20, 30, b'example.org', 40))
+        # Vary the ttl
+        self._equalityTest(
+            dns.Record_SRV(10, 20, 30, b'example.com', 40),
+            dns.Record_SRV(10, 20, 30, b'example.com', 40),
+            dns.Record_SRV(10, 20, 30, b'example.com', 400))
+
+
+    def test_naptr(self):
+        """
+        Two L{dns.Record_NAPTR} instances compare equal if and only if they
+        have the same order, preference, flags, service, regexp, replacement,
+        and ttl.
+        """
+        # Vary the order
+        self._equalityTest(
+            dns.Record_NAPTR(1, 2, b"u", b"sip+E2U", b"/foo/bar/", b"baz", 12),
+            dns.Record_NAPTR(1, 2, b"u", b"sip+E2U", b"/foo/bar/", b"baz", 12),
+            dns.Record_NAPTR(2, 2, b"u", b"sip+E2U", b"/foo/bar/", b"baz", 12))
+        # Vary the preference
+        self._equalityTest(
+            dns.Record_NAPTR(1, 2, b"u", b"sip+E2U", b"/foo/bar/", b"baz", 12),
+            dns.Record_NAPTR(1, 2, b"u", b"sip+E2U", b"/foo/bar/", b"baz", 12),
+            dns.Record_NAPTR(1, 3, b"u", b"sip+E2U", b"/foo/bar/", b"baz", 12))
+        # Vary the flags
+        self._equalityTest(
+            dns.Record_NAPTR(1, 2, b"u", b"sip+E2U", b"/foo/bar/", b"baz", 12),
+            dns.Record_NAPTR(1, 2, b"u", b"sip+E2U", b"/foo/bar/", b"baz", 12),
+            dns.Record_NAPTR(1, 2, b"p", b"sip+E2U", b"/foo/bar/", b"baz", 12))
+        # Vary the service
+        self._equalityTest(
+            dns.Record_NAPTR(1, 2, b"u", b"sip+E2U", b"/foo/bar/", b"baz", 12),
+            dns.Record_NAPTR(1, 2, b"u", b"sip+E2U", b"/foo/bar/", b"baz", 12),
+            dns.Record_NAPTR(1, 2, b"u", b"http", b"/foo/bar/", b"baz", 12))
+        # Vary the regexp
+        self._equalityTest(
+            dns.Record_NAPTR(1, 2, b"u", b"sip+E2U", b"/foo/bar/", b"baz", 12),
+            dns.Record_NAPTR(1, 2, b"u", b"sip+E2U", b"/foo/bar/", b"baz", 12),
+            dns.Record_NAPTR(1, 2, b"u", b"sip+E2U", b"/bar/foo/", b"baz", 12))
+        # Vary the replacement
+        self._equalityTest(
+            dns.Record_NAPTR(1, 2, b"u", b"sip+E2U", b"/foo/bar/", b"baz", 12),
+            dns.Record_NAPTR(1, 2, b"u", b"sip+E2U", b"/foo/bar/", b"baz", 12),
+            dns.Record_NAPTR(1, 2, b"u", b"sip+E2U", b"/bar/foo/", b"quux", 12))
+        # Vary the ttl
+        self._equalityTest(
+            dns.Record_NAPTR(1, 2, b"u", b"sip+E2U", b"/foo/bar/", b"baz", 12),
+            dns.Record_NAPTR(1, 2, b"u", b"sip+E2U", b"/foo/bar/", b"baz", 12),
+            dns.Record_NAPTR(1, 2, b"u", b"sip+E2U", b"/bar/foo/", b"baz", 5))
+
+
+    def test_afsdb(self):
+        """
+        Two L{dns.Record_AFSDB} instances compare equal if and only if they
+        have the same subtype, hostname, and ttl.
+        """
+        # Vary the subtype
+        self._equalityTest(
+            dns.Record_AFSDB(1, b'example.com', 2),
+            dns.Record_AFSDB(1, b'example.com', 2),
+            dns.Record_AFSDB(2, b'example.com', 2))
+        # Vary the hostname
+        self._equalityTest(
+            dns.Record_AFSDB(1, b'example.com', 2),
+            dns.Record_AFSDB(1, b'example.com', 2),
+            dns.Record_AFSDB(1, b'example.org', 2))
+        # Vary the ttl
+        self._equalityTest(
+            dns.Record_AFSDB(1, b'example.com', 2),
+            dns.Record_AFSDB(1, b'example.com', 2),
+            dns.Record_AFSDB(1, b'example.com', 3))
+
+
+    def test_rp(self):
+        """
+        Two L{Record_RP} instances compare equal if and only if they have the
+        same mbox, txt, and ttl.
+        """
+        # Vary the mbox
+        self._equalityTest(
+            dns.Record_RP(b'alice.example.com', b'alice is nice', 10),
+            dns.Record_RP(b'alice.example.com', b'alice is nice', 10),
+            dns.Record_RP(b'bob.example.com', b'alice is nice', 10))
+        # Vary the txt
+        self._equalityTest(
+            dns.Record_RP(b'alice.example.com', b'alice is nice', 10),
+            dns.Record_RP(b'alice.example.com', b'alice is nice', 10),
+            dns.Record_RP(b'alice.example.com', b'alice is not nice', 10))
+        # Vary the ttl
+        self._equalityTest(
+            dns.Record_RP(b'alice.example.com', b'alice is nice', 10),
+            dns.Record_RP(b'alice.example.com', b'alice is nice', 10),
+            dns.Record_RP(b'alice.example.com', b'alice is nice', 100))
+
+
+    def test_hinfo(self):
+        """
+        Two L{dns.Record_HINFO} instances compare equal if and only if they
+        have the same cpu, os, and ttl.
+        """
+        # Vary the cpu
+        self._equalityTest(
+            dns.Record_HINFO('x86-64', 'plan9', 10),
+            dns.Record_HINFO('x86-64', 'plan9', 10),
+            dns.Record_HINFO('i386', 'plan9', 10))
+        # Vary the os
+        self._equalityTest(
+            dns.Record_HINFO('x86-64', 'plan9', 10),
+            dns.Record_HINFO('x86-64', 'plan9', 10),
+            dns.Record_HINFO('x86-64', 'plan11', 10))
+        # Vary the ttl
+        self._equalityTest(
+            dns.Record_HINFO('x86-64', 'plan9', 10),
+            dns.Record_HINFO('x86-64', 'plan9', 10),
+            dns.Record_HINFO('x86-64', 'plan9', 100))
+
+
+    def test_minfo(self):
+        """
+        Two L{dns.Record_MINFO} instances compare equal if and only if they
+        have the same rmailbx, emailbx, and ttl.
+        """
+        # Vary the rmailbx
+        self._equalityTest(
+            dns.Record_MINFO(b'rmailbox', b'emailbox', 10),
+            dns.Record_MINFO(b'rmailbox', b'emailbox', 10),
+            dns.Record_MINFO(b'someplace', b'emailbox', 10))
+        # Vary the emailbx
+        self._equalityTest(
+            dns.Record_MINFO(b'rmailbox', b'emailbox', 10),
+            dns.Record_MINFO(b'rmailbox', b'emailbox', 10),
+            dns.Record_MINFO(b'rmailbox', b'something', 10))
+        # Vary the ttl
+        self._equalityTest(
+            dns.Record_MINFO(b'rmailbox', b'emailbox', 10),
+            dns.Record_MINFO(b'rmailbox', b'emailbox', 10),
+            dns.Record_MINFO(b'rmailbox', b'emailbox', 100))
+
+
+    def test_mx(self):
+        """
+        Two L{dns.Record_MX} instances compare equal if and only if they have
+        the same preference, name, and ttl.
+        """
+        # Vary the preference
+        self._equalityTest(
+            dns.Record_MX(10, b'example.org', 20),
+            dns.Record_MX(10, b'example.org', 20),
+            dns.Record_MX(100, b'example.org', 20))
+        # Vary the name
+        self._equalityTest(
+            dns.Record_MX(10, b'example.org', 20),
+            dns.Record_MX(10, b'example.org', 20),
+            dns.Record_MX(10, b'example.net', 20))
+        # Vary the ttl
+        self._equalityTest(
+            dns.Record_MX(10, b'example.org', 20),
+            dns.Record_MX(10, b'example.org', 20),
+            dns.Record_MX(10, b'example.org', 200))
+
+
+    def test_txt(self):
+        """
+        Two L{dns.Record_TXT} instances compare equal if and only if they have
+        the same data and ttl.
+        """
+        # Vary the length of the data
+        self._equalityTest(
+            dns.Record_TXT('foo', 'bar', ttl=10),
+            dns.Record_TXT('foo', 'bar', ttl=10),
+            dns.Record_TXT('foo', 'bar', 'baz', ttl=10))
+        # Vary the value of the data
+        self._equalityTest(
+            dns.Record_TXT('foo', 'bar', ttl=10),
+            dns.Record_TXT('foo', 'bar', ttl=10),
+            dns.Record_TXT('bar', 'foo', ttl=10))
+        # Vary the ttl
+        self._equalityTest(
+            dns.Record_TXT('foo', 'bar', ttl=10),
+            dns.Record_TXT('foo', 'bar', ttl=10),
+            dns.Record_TXT('foo', 'bar', ttl=100))
+
+
+    def test_spf(self):
+        """
+        L{dns.Record_SPF} instances compare equal if and only if they have the
+        same data and ttl.
+        """
+        # Vary the length of the data
+        self._equalityTest(
+            dns.Record_SPF('foo', 'bar', ttl=10),
+            dns.Record_SPF('foo', 'bar', ttl=10),
+            dns.Record_SPF('foo', 'bar', 'baz', ttl=10))
+        # Vary the value of the data
+        self._equalityTest(
+            dns.Record_SPF('foo', 'bar', ttl=10),
+            dns.Record_SPF('foo', 'bar', ttl=10),
+            dns.Record_SPF('bar', 'foo', ttl=10))
+        # Vary the ttl
+        self._equalityTest(
+            dns.Record_SPF('foo', 'bar', ttl=10),
+            dns.Record_SPF('foo', 'bar', ttl=10),
+            dns.Record_SPF('foo', 'bar', ttl=100))
+
+
+    def test_unknown(self):
+        """
+        L{dns.UnknownRecord} instances compare equal if and only if they have
+        the same data and ttl.
+        """
+        # Vary the length of the data
+        self._equalityTest(
+            dns.UnknownRecord('foo', ttl=10),
+            dns.UnknownRecord('foo', ttl=10),
+            dns.UnknownRecord('foobar', ttl=10))
+        # Vary the value of the data
+        self._equalityTest(
+            dns.UnknownRecord('foo', ttl=10),
+            dns.UnknownRecord('foo', ttl=10),
+            dns.UnknownRecord('bar', ttl=10))
+        # Vary the ttl
+        self._equalityTest(
+            dns.UnknownRecord('foo', ttl=10),
+            dns.UnknownRecord('foo', ttl=10),
+            dns.UnknownRecord('foo', ttl=100))
+
+
+
+class RRHeaderTests(unittest.TestCase):
+    """
+    Tests for L{twisted.names.dns.RRHeader}.
+    """
+
+    def test_negativeTTL(self):
+        """
+        Attempting to create a L{dns.RRHeader} instance with a negative TTL
+        causes L{ValueError} to be raised.
+        """
+        self.assertRaises(
+            ValueError, dns.RRHeader, "example.com", dns.A,
+            dns.IN, -1, dns.Record_A("127.0.0.1"))
diff --git a/ThirdParty/Twisted/twisted/names/test/test_hosts.py b/ThirdParty/Twisted/twisted/names/test/test_hosts.py
new file mode 100644
index 0000000..5b6b986
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/test/test_hosts.py
@@ -0,0 +1,258 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for the I{hosts(5)}-based resolver, L{twisted.names.hosts}.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.trial.unittest import TestCase
+from twisted.python.filepath import FilePath
+from twisted.internet.defer import gatherResults
+
+from twisted.names.dns import (
+    A, AAAA, IN, DomainError, RRHeader, Query, Record_A, Record_AAAA)
+from twisted.names.hosts import Resolver, searchFileFor, searchFileForAll
+
+
+class GoodTempPathMixin(object):
+    def path(self):
+        return FilePath(self.mktemp().encode('utf-8'))
+
+
+
+class SearchHostsFileTests(TestCase, GoodTempPathMixin):
+    """
+    Tests for L{searchFileFor}, a helper which finds the first address for a
+    particular hostname in a I{hosts(5)}-style file.
+    """
+    def test_findAddress(self):
+        """
+        If there is an IPv4 address for the hostname passed to L{searchFileFor},
+        it is returned.
+        """
+        hosts = self.path()
+        hosts.setContent(
+            b"10.2.3.4 foo.example.com\n")
+        self.assertEqual(
+            "10.2.3.4", searchFileFor(hosts.path, b"foo.example.com"))
+
+
+    def test_notFoundAddress(self):
+        """
+        If there is no address information for the hostname passed to
+        L{searchFileFor}, C{None} is returned.
+        """
+        hosts = self.path()
+        hosts.setContent(
+            b"10.2.3.4 foo.example.com\n")
+        self.assertIdentical(
+            None, searchFileFor(hosts.path, b"bar.example.com"))
+
+
+    def test_firstAddress(self):
+        """
+        The first address associated with the given hostname is returned.
+        """
+        hosts = self.path()
+        hosts.setContent(
+            b"::1 foo.example.com\n"
+            b"10.1.2.3 foo.example.com\n"
+            b"fe80::21b:fcff:feee:5a1d foo.example.com\n")
+        self.assertEqual("::1", searchFileFor(hosts.path, b"foo.example.com"))
+
+
+    def test_searchFileForAliases(self):
+        """
+        For a host with a canonical name and one or more aliases,
+        L{searchFileFor} can find an address given any of the names.
+        """
+        hosts = self.path()
+        hosts.setContent(
+            b"127.0.1.1\thelmut.example.org\thelmut\n"
+            b"# a comment\n"
+            b"::1 localhost ip6-localhost ip6-loopback\n")
+        self.assertEqual(searchFileFor(hosts.path, b'helmut'), '127.0.1.1')
+        self.assertEqual(
+            searchFileFor(hosts.path, b'helmut.example.org'), '127.0.1.1')
+        self.assertEqual(searchFileFor(hosts.path, b'ip6-localhost'), '::1')
+        self.assertEqual(searchFileFor(hosts.path, b'ip6-loopback'), '::1')
+        self.assertEqual(searchFileFor(hosts.path, b'localhost'), '::1')
+
+
+
+class SearchHostsFileForAllTests(TestCase, GoodTempPathMixin):
+    """
+    Tests for L{searchFileForAll}, a helper which finds all addresses for a
+    particular hostname in a I{hosts(5)}-style file.
+    """
+    def test_allAddresses(self):
+        """
+        L{searchFileForAll} returns a list of all addresses associated with the
+        name passed to it.
+        """
+        hosts = self.path()
+        hosts.setContent(
+            b"127.0.0.1     foobar.example.com\n"
+            b"127.0.0.2     foobar.example.com\n"
+            b"::1           foobar.example.com\n")
+        self.assertEqual(
+            ["127.0.0.1", "127.0.0.2", "::1"],
+            searchFileForAll(hosts, b"foobar.example.com"))
+
+
+    def test_caseInsensitively(self):
+        """
+        L{searchFileForAll} searches for names case-insensitively.
+        """
+        hosts = self.path()
+        hosts.setContent(b"127.0.0.1     foobar.EXAMPLE.com\n")
+        self.assertEqual(
+            ["127.0.0.1"], searchFileForAll(hosts, b"FOOBAR.example.com"))
+
+
+    def test_readError(self):
+        """
+        If there is an error reading the contents of the hosts file,
+        L{searchFileForAll} returns an empty list.
+        """
+        self.assertEqual(
+            [], searchFileForAll(self.path(), b"example.com"))
+
+
+
+class HostsTestCase(TestCase, GoodTempPathMixin):
+    """
+    Tests for the I{hosts(5)}-based L{twisted.names.hosts.Resolver}.
+    """
+    def setUp(self):
+        f = self.path()
+        f.setContent(b'''
+1.1.1.1    EXAMPLE EXAMPLE.EXAMPLETHING
+::2        mixed
+1.1.1.2    MIXED
+::1        ip6thingy
+1.1.1.3    multiple
+1.1.1.4    multiple
+::3        ip6-multiple
+::4        ip6-multiple
+''')
+        self.ttl = 4200
+        self.resolver = Resolver(f.path, self.ttl)
+
+
+    def test_defaultPath(self):
+        """
+        The default hosts file used by L{Resolver} is I{/etc/hosts} if no value
+        is given for the C{file} initializer parameter.
+        """
+        resolver = Resolver()
+        self.assertEqual(b"/etc/hosts", resolver.file)
+
+
+    def test_getHostByName(self):
+        """
+        L{hosts.Resolver.getHostByName} returns a L{Deferred} which fires with a
+        string giving the address of the queried name as found in the resolver's
+        hosts file.
+        """
+        data = [(b'EXAMPLE', '1.1.1.1'),
+                (b'EXAMPLE.EXAMPLETHING', '1.1.1.1'),
+                (b'MIXED', '1.1.1.2'),
+                ]
+        ds = [self.resolver.getHostByName(n).addCallback(self.assertEqual, ip)
+              for n, ip in data]
+        return gatherResults(ds)
+
+
+    def test_lookupAddress(self):
+        """
+        L{hosts.Resolver.lookupAddress} returns a L{Deferred} which fires with A
+        records from the hosts file.
+        """
+        d = self.resolver.lookupAddress(b'multiple')
+        def resolved(results):
+            answers, authority, additional = results
+            self.assertEqual(
+                (RRHeader(b"multiple", A, IN, self.ttl,
+                          Record_A("1.1.1.3", self.ttl)),
+                 RRHeader(b"multiple", A, IN, self.ttl,
+                          Record_A("1.1.1.4", self.ttl))),
+                answers)
+        d.addCallback(resolved)
+        return d
+
+
+    def test_lookupIPV6Address(self):
+        """
+        L{hosts.Resolver.lookupIPV6Address} returns a L{Deferred} which fires
+        with AAAA records from the hosts file.
+        """
+        d = self.resolver.lookupIPV6Address(b'ip6-multiple')
+        def resolved(results):
+            answers, authority, additional = results
+            self.assertEqual(
+                (RRHeader(b"ip6-multiple", AAAA, IN, self.ttl,
+                          Record_AAAA("::3", self.ttl)),
+                 RRHeader(b"ip6-multiple", AAAA, IN, self.ttl,
+                          Record_AAAA("::4", self.ttl))),
+                answers)
+        d.addCallback(resolved)
+        return d
+
+
+    def test_lookupAllRecords(self):
+        """
+        L{hosts.Resolver.lookupAllRecords} returns a L{Deferred} which fires
+        with A records from the hosts file.
+        """
+        d = self.resolver.lookupAllRecords(b'mixed')
+        def resolved(results):
+            answers, authority, additional = results
+            self.assertEqual(
+                (RRHeader(b"mixed", A, IN, self.ttl,
+                          Record_A("1.1.1.2", self.ttl)),),
+                answers)
+        d.addCallback(resolved)
+        return d
+
+
+    def testNotImplemented(self):
+        return self.assertFailure(self.resolver.lookupMailExchange(b'EXAMPLE'),
+                                  NotImplementedError)
+
+
+    def testQuery(self):
+        d = self.resolver.query(Query(b'EXAMPLE'))
+        d.addCallback(lambda x: self.assertEqual(x[0][0].payload.dottedQuad(),
+                                                 '1.1.1.1'))
+        return d
+
+
+    def test_lookupAddressNotFound(self):
+        """
+        L{hosts.Resolver.lookupAddress} returns a L{Deferred} which fires with
+        L{dns.DomainError} if the name passed in has no addresses in the hosts
+        file.
+        """
+        return self.assertFailure(self.resolver.lookupAddress(b'foueoa'),
+                                  DomainError)
+
+
+    def test_lookupIPV6AddressNotFound(self):
+        """
+        Like L{test_lookupAddressNotFound}, but for
+        L{hosts.Resolver.lookupIPV6Address}.
+        """
+        return self.assertFailure(self.resolver.lookupIPV6Address(b'foueoa'),
+                                  DomainError)
+
+
+    def test_lookupAllRecordsNotFound(self):
+        """
+        Like L{test_lookupAddressNotFound}, but for
+        L{hosts.Resolver.lookupAllRecords}.
+        """
+        return self.assertFailure(self.resolver.lookupAllRecords(b'foueoa'),
+                                  DomainError)
diff --git a/ThirdParty/Twisted/twisted/names/test/test_names.py b/ThirdParty/Twisted/twisted/names/test/test_names.py
new file mode 100644
index 0000000..49ec40f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/test/test_names.py
@@ -0,0 +1,817 @@
+# -*- test-case-name: twisted.names.test.test_names -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test cases for twisted.names.
+"""
+
+import socket, operator, copy
+from StringIO import StringIO
+
+from twisted.trial import unittest
+
+from twisted.internet import reactor, defer, error
+from twisted.internet.task import Clock
+from twisted.internet.defer import succeed
+from twisted.names import client, server, common, authority, dns
+from twisted.python import failure
+from twisted.names.dns import Message
+from twisted.names.client import Resolver
+from twisted.names.secondary import (
+    SecondaryAuthorityService, SecondaryAuthority)
+
+from twisted.python.compat import reduce
+from twisted.test.proto_helpers import StringTransport, MemoryReactor
+
+def justPayload(results):
+    return [r.payload for r in results[0]]
+
+class NoFileAuthority(authority.FileAuthority):
+    def __init__(self, soa, records):
+        # Yes, skip FileAuthority
+        common.ResolverBase.__init__(self)
+        self.soa, self.records = soa, records
+
+
+soa_record = dns.Record_SOA(
+                    mname = 'test-domain.com',
+                    rname = 'root.test-domain.com',
+                    serial = 100,
+                    refresh = 1234,
+                    minimum = 7654,
+                    expire = 19283784,
+                    retry = 15,
+                    ttl=1
+                )
+
+reverse_soa = dns.Record_SOA(
+                     mname = '93.84.28.in-addr.arpa',
+                     rname = '93.84.28.in-addr.arpa',
+                     serial = 120,
+                     refresh = 54321,
+                     minimum = 382,
+                     expire = 11193983,
+                     retry = 30,
+                     ttl=3
+                )
+
+my_soa = dns.Record_SOA(
+    mname = 'my-domain.com',
+    rname = 'postmaster.test-domain.com',
+    serial = 130,
+    refresh = 12345,
+    minimum = 1,
+    expire = 999999,
+    retry = 100,
+    )
+
+test_domain_com = NoFileAuthority(
+    soa = ('test-domain.com', soa_record),
+    records = {
+        'test-domain.com': [
+            soa_record,
+            dns.Record_A('127.0.0.1'),
+            dns.Record_NS('39.28.189.39'),
+            dns.Record_SPF('v=spf1 mx/30 mx:example.org/30 -all'),
+            dns.Record_SPF('v=spf1 +mx a:\0colo', '.example.com/28 -all not valid'),
+            dns.Record_MX(10, 'host.test-domain.com'),
+            dns.Record_HINFO(os='Linux', cpu='A Fast One, Dontcha know'),
+            dns.Record_CNAME('canonical.name.com'),
+            dns.Record_MB('mailbox.test-domain.com'),
+            dns.Record_MG('mail.group.someplace'),
+            dns.Record_TXT('A First piece of Text', 'a SecoNd piece'),
+            dns.Record_A6(0, 'ABCD::4321', ''),
+            dns.Record_A6(12, '0:0069::0', 'some.network.tld'),
+            dns.Record_A6(8, '0:5634:1294:AFCB:56AC:48EF:34C3:01FF', 'tra.la.la.net'),
+            dns.Record_TXT('Some more text, haha!  Yes.  \0  Still here?'),
+            dns.Record_MR('mail.redirect.or.whatever'),
+            dns.Record_MINFO(rmailbx='r mail box', emailbx='e mail box'),
+            dns.Record_AFSDB(subtype=1, hostname='afsdb.test-domain.com'),
+            dns.Record_RP(mbox='whatever.i.dunno', txt='some.more.text'),
+            dns.Record_WKS('12.54.78.12', socket.IPPROTO_TCP,
+                           '\x12\x01\x16\xfe\xc1\x00\x01'),
+            dns.Record_NAPTR(100, 10, "u", "sip+E2U",
+                             "!^.*$!sip:information at domain.tld!"),
+            dns.Record_AAAA('AF43:5634:1294:AFCB:56AC:48EF:34C3:01FF')],
+        'http.tcp.test-domain.com': [
+            dns.Record_SRV(257, 16383, 43690, 'some.other.place.fool')
+        ],
+        'host.test-domain.com': [
+            dns.Record_A('123.242.1.5'),
+            dns.Record_A('0.255.0.255'),
+        ],
+        'host-two.test-domain.com': [
+#
+#  Python bug
+#           dns.Record_A('255.255.255.255'),
+#
+            dns.Record_A('255.255.255.254'),
+            dns.Record_A('0.0.0.0')
+        ],
+        'cname.test-domain.com': [
+            dns.Record_CNAME('test-domain.com')
+        ],
+        'anothertest-domain.com': [
+            dns.Record_A('1.2.3.4')],
+    }
+)
+
+reverse_domain = NoFileAuthority(
+    soa = ('93.84.28.in-addr.arpa', reverse_soa),
+    records = {
+        '123.93.84.28.in-addr.arpa': [
+             dns.Record_PTR('test.host-reverse.lookup.com'),
+             reverse_soa
+        ]
+    }
+)
+
+
+my_domain_com = NoFileAuthority(
+    soa = ('my-domain.com', my_soa),
+    records = {
+        'my-domain.com': [
+            my_soa,
+            dns.Record_A('1.2.3.4', ttl='1S'),
+            dns.Record_NS('ns1.domain', ttl='2M'),
+            dns.Record_NS('ns2.domain', ttl='3H'),
+            dns.Record_SRV(257, 16383, 43690, 'some.other.place.fool', ttl='4D')
+            ]
+        }
+    )
+
+
+class ServerDNSTestCase(unittest.TestCase):
+    """
+    Test cases for DNS server and client.
+    """
+
+    def setUp(self):
+        self.factory = server.DNSServerFactory([
+            test_domain_com, reverse_domain, my_domain_com
+        ], verbose=2)
+
+        p = dns.DNSDatagramProtocol(self.factory)
+
+        while 1:
+            listenerTCP = reactor.listenTCP(0, self.factory, interface="127.0.0.1")
+            # It's simpler to do the stop listening with addCleanup,
+            # even though we might not end up using this TCP port in
+            # the test (if the listenUDP below fails).  Cleaning up
+            # this TCP port sooner than "cleanup time" would mean
+            # adding more code to keep track of the Deferred returned
+            # by stopListening.
+            self.addCleanup(listenerTCP.stopListening)
+            port = listenerTCP.getHost().port
+
+            try:
+                listenerUDP = reactor.listenUDP(port, p, interface="127.0.0.1")
+            except error.CannotListenError:
+                pass
+            else:
+                self.addCleanup(listenerUDP.stopListening)
+                break
+
+        self.listenerTCP = listenerTCP
+        self.listenerUDP = listenerUDP
+        self.resolver = client.Resolver(servers=[('127.0.0.1', port)])
+
+
+    def tearDown(self):
+        """
+        Clean up any server connections associated with the
+        L{DNSServerFactory} created in L{setUp}
+        """
+        # It'd be great if DNSServerFactory had a method that
+        # encapsulated this task.  At least the necessary data is
+        # available, though.
+        for conn in self.factory.connections[:]:
+            conn.transport.loseConnection()
+
+
+    def namesTest(self, d, r):
+        self.response = None
+        def setDone(response):
+            self.response = response
+
+        def checkResults(ignored):
+            if isinstance(self.response, failure.Failure):
+                raise self.response
+            results = justPayload(self.response)
+            assert len(results) == len(r), "%s != %s" % (map(str, results), map(str, r))
+            for rec in results:
+                assert rec in r, "%s not in %s" % (rec, map(str, r))
+
+        d.addBoth(setDone)
+        d.addCallback(checkResults)
+        return d
+
+    def testAddressRecord1(self):
+        """Test simple DNS 'A' record queries"""
+        return self.namesTest(
+            self.resolver.lookupAddress('test-domain.com'),
+            [dns.Record_A('127.0.0.1', ttl=19283784)]
+        )
+
+
+    def testAddressRecord2(self):
+        """Test DNS 'A' record queries with multiple answers"""
+        return self.namesTest(
+            self.resolver.lookupAddress('host.test-domain.com'),
+            [dns.Record_A('123.242.1.5', ttl=19283784), dns.Record_A('0.255.0.255', ttl=19283784)]
+        )
+
+
+    def testAddressRecord3(self):
+        """Test DNS 'A' record queries with edge cases"""
+        return self.namesTest(
+            self.resolver.lookupAddress('host-two.test-domain.com'),
+            [dns.Record_A('255.255.255.254', ttl=19283784), dns.Record_A('0.0.0.0', ttl=19283784)]
+        )
+
+
+    def testAuthority(self):
+        """Test DNS 'SOA' record queries"""
+        return self.namesTest(
+            self.resolver.lookupAuthority('test-domain.com'),
+            [soa_record]
+        )
+
+
+    def testMailExchangeRecord(self):
+        """Test DNS 'MX' record queries"""
+        return self.namesTest(
+            self.resolver.lookupMailExchange('test-domain.com'),
+            [dns.Record_MX(10, 'host.test-domain.com', ttl=19283784)]
+        )
+
+
+    def testNameserver(self):
+        """Test DNS 'NS' record queries"""
+        return self.namesTest(
+            self.resolver.lookupNameservers('test-domain.com'),
+            [dns.Record_NS('39.28.189.39', ttl=19283784)]
+        )
+
+
+    def testHINFO(self):
+        """Test DNS 'HINFO' record queries"""
+        return self.namesTest(
+            self.resolver.lookupHostInfo('test-domain.com'),
+            [dns.Record_HINFO(os='Linux', cpu='A Fast One, Dontcha know', ttl=19283784)]
+        )
+
+    def testPTR(self):
+        """Test DNS 'PTR' record queries"""
+        return self.namesTest(
+            self.resolver.lookupPointer('123.93.84.28.in-addr.arpa'),
+            [dns.Record_PTR('test.host-reverse.lookup.com', ttl=11193983)]
+        )
+
+
+    def testCNAME(self):
+        """Test DNS 'CNAME' record queries"""
+        return self.namesTest(
+            self.resolver.lookupCanonicalName('test-domain.com'),
+            [dns.Record_CNAME('canonical.name.com', ttl=19283784)]
+        )
+
+    def testCNAMEAdditional(self):
+        """Test additional processing for CNAME records"""
+        return self.namesTest(
+        self.resolver.lookupAddress('cname.test-domain.com'),
+        [dns.Record_CNAME('test-domain.com', ttl=19283784), dns.Record_A('127.0.0.1', ttl=19283784)]
+    )
+
+    def testMB(self):
+        """Test DNS 'MB' record queries"""
+        return self.namesTest(
+            self.resolver.lookupMailBox('test-domain.com'),
+            [dns.Record_MB('mailbox.test-domain.com', ttl=19283784)]
+        )
+
+
+    def testMG(self):
+        """Test DNS 'MG' record queries"""
+        return self.namesTest(
+            self.resolver.lookupMailGroup('test-domain.com'),
+            [dns.Record_MG('mail.group.someplace', ttl=19283784)]
+        )
+
+
+    def testMR(self):
+        """Test DNS 'MR' record queries"""
+        return self.namesTest(
+            self.resolver.lookupMailRename('test-domain.com'),
+            [dns.Record_MR('mail.redirect.or.whatever', ttl=19283784)]
+        )
+
+
+    def testMINFO(self):
+        """Test DNS 'MINFO' record queries"""
+        return self.namesTest(
+            self.resolver.lookupMailboxInfo('test-domain.com'),
+            [dns.Record_MINFO(rmailbx='r mail box', emailbx='e mail box', ttl=19283784)]
+        )
+
+
+    def testSRV(self):
+        """Test DNS 'SRV' record queries"""
+        return self.namesTest(
+            self.resolver.lookupService('http.tcp.test-domain.com'),
+            [dns.Record_SRV(257, 16383, 43690, 'some.other.place.fool', ttl=19283784)]
+        )
+
+    def testAFSDB(self):
+        """Test DNS 'AFSDB' record queries"""
+        return self.namesTest(
+            self.resolver.lookupAFSDatabase('test-domain.com'),
+            [dns.Record_AFSDB(subtype=1, hostname='afsdb.test-domain.com', ttl=19283784)]
+        )
+
+
+    def testRP(self):
+        """Test DNS 'RP' record queries"""
+        return self.namesTest(
+            self.resolver.lookupResponsibility('test-domain.com'),
+            [dns.Record_RP(mbox='whatever.i.dunno', txt='some.more.text', ttl=19283784)]
+        )
+
+
+    def testTXT(self):
+        """Test DNS 'TXT' record queries"""
+        return self.namesTest(
+            self.resolver.lookupText('test-domain.com'),
+            [dns.Record_TXT('A First piece of Text', 'a SecoNd piece', ttl=19283784),
+             dns.Record_TXT('Some more text, haha!  Yes.  \0  Still here?', ttl=19283784)]
+        )
+
+
+    def test_spf(self):
+        """
+        L{DNSServerFactory} can serve I{SPF} resource records.
+        """
+        return self.namesTest(
+            self.resolver.lookupSenderPolicy('test-domain.com'),
+            [dns.Record_SPF('v=spf1 mx/30 mx:example.org/30 -all', ttl=19283784),
+            dns.Record_SPF('v=spf1 +mx a:\0colo', '.example.com/28 -all not valid', ttl=19283784)]
+        )
+
+
+    def testWKS(self):
+        """Test DNS 'WKS' record queries"""
+        return self.namesTest(
+            self.resolver.lookupWellKnownServices('test-domain.com'),
+            [dns.Record_WKS('12.54.78.12', socket.IPPROTO_TCP, '\x12\x01\x16\xfe\xc1\x00\x01', ttl=19283784)]
+        )
+
+
+    def testSomeRecordsWithTTLs(self):
+        result_soa = copy.copy(my_soa)
+        result_soa.ttl = my_soa.expire
+        return self.namesTest(
+            self.resolver.lookupAllRecords('my-domain.com'),
+            [result_soa,
+             dns.Record_A('1.2.3.4', ttl='1S'),
+             dns.Record_NS('ns1.domain', ttl='2M'),
+             dns.Record_NS('ns2.domain', ttl='3H'),
+             dns.Record_SRV(257, 16383, 43690, 'some.other.place.fool', ttl='4D')]
+            )
+
+
+    def testAAAA(self):
+        """Test DNS 'AAAA' record queries (IPv6)"""
+        return self.namesTest(
+            self.resolver.lookupIPV6Address('test-domain.com'),
+            [dns.Record_AAAA('AF43:5634:1294:AFCB:56AC:48EF:34C3:01FF', ttl=19283784)]
+        )
+
+    def testA6(self):
+        """Test DNS 'A6' record queries (IPv6)"""
+        return self.namesTest(
+            self.resolver.lookupAddress6('test-domain.com'),
+            [dns.Record_A6(0, 'ABCD::4321', '', ttl=19283784),
+             dns.Record_A6(12, '0:0069::0', 'some.network.tld', ttl=19283784),
+             dns.Record_A6(8, '0:5634:1294:AFCB:56AC:48EF:34C3:01FF', 'tra.la.la.net', ttl=19283784)]
+         )
+
+
+    def test_zoneTransfer(self):
+        """
+        Test DNS 'AXFR' queries (Zone transfer)
+        """
+        default_ttl = soa_record.expire
+        results = [copy.copy(r) for r in reduce(operator.add, test_domain_com.records.values())]
+        for r in results:
+            if r.ttl is None:
+                r.ttl = default_ttl
+        return self.namesTest(
+            self.resolver.lookupZone('test-domain.com').addCallback(lambda r: (r[0][:-1],)),
+            results
+        )
+
+
+    def testSimilarZonesDontInterfere(self):
+        """Tests that unrelated zones don't mess with each other."""
+        return self.namesTest(
+            self.resolver.lookupAddress("anothertest-domain.com"),
+            [dns.Record_A('1.2.3.4', ttl=19283784)]
+        )
+
+
+    def test_NAPTR(self):
+        """
+        Test DNS 'NAPTR' record queries.
+        """
+        return self.namesTest(
+            self.resolver.lookupNamingAuthorityPointer('test-domain.com'),
+            [dns.Record_NAPTR(100, 10, "u", "sip+E2U",
+                              "!^.*$!sip:information at domain.tld!",
+                              ttl=19283784)])
+
+
+
+class DNSServerFactoryTests(unittest.TestCase):
+    """
+    Tests for L{server.DNSServerFactory}.
+    """
+    def _messageReceivedTest(self, methodName, message):
+        """
+        Assert that the named method is called with the given message when
+        it is passed to L{DNSServerFactory.messageReceived}.
+        """
+        # Make it appear to have some queries so that
+        # DNSServerFactory.allowQuery allows it.
+        message.queries = [None]
+
+        receivedMessages = []
+        def fakeHandler(message, protocol, address):
+            receivedMessages.append((message, protocol, address))
+
+        class FakeProtocol(object):
+            def writeMessage(self, message):
+                pass
+
+        protocol = FakeProtocol()
+        factory = server.DNSServerFactory(None)
+        setattr(factory, methodName, fakeHandler)
+        factory.messageReceived(message, protocol)
+        self.assertEqual(receivedMessages, [(message, protocol, None)])
+
+
+    def test_notifyMessageReceived(self):
+        """
+        L{DNSServerFactory.messageReceived} passes messages with an opcode
+        of C{OP_NOTIFY} on to L{DNSServerFactory.handleNotify}.
+        """
+        # RFC 1996, section 4.5
+        opCode = 4
+        self._messageReceivedTest('handleNotify', Message(opCode=opCode))
+
+
+    def test_updateMessageReceived(self):
+        """
+        L{DNSServerFactory.messageReceived} passes messages with an opcode
+        of C{OP_UPDATE} on to L{DNSServerFactory.handleOther}.
+
+        This may change if the implementation ever covers update messages.
+        """
+        # RFC 2136, section 1.3
+        opCode = 5
+        self._messageReceivedTest('handleOther', Message(opCode=opCode))
+
+
+    def test_connectionTracking(self):
+        """
+        The C{connectionMade} and C{connectionLost} methods of
+        L{DNSServerFactory} cooperate to keep track of all
+        L{DNSProtocol} objects created by a factory which are
+        connected.
+        """
+        protoA, protoB = object(), object()
+        factory = server.DNSServerFactory()
+        factory.connectionMade(protoA)
+        self.assertEqual(factory.connections, [protoA])
+        factory.connectionMade(protoB)
+        self.assertEqual(factory.connections, [protoA, protoB])
+        factory.connectionLost(protoA)
+        self.assertEqual(factory.connections, [protoB])
+        factory.connectionLost(protoB)
+        self.assertEqual(factory.connections, [])
+
+
+class HelperTestCase(unittest.TestCase):
+    def testSerialGenerator(self):
+        f = self.mktemp()
+        a = authority.getSerial(f)
+        for i in range(20):
+            b = authority.getSerial(f)
+            self.failUnless(a < b)
+            a = b
+
+
+class AXFRTest(unittest.TestCase):
+    def setUp(self):
+        self.results = None
+        self.d = defer.Deferred()
+        self.d.addCallback(self._gotResults)
+        self.controller = client.AXFRController('fooby.com', self.d)
+
+        self.soa = dns.RRHeader(name='fooby.com', type=dns.SOA, cls=dns.IN, ttl=86400, auth=False,
+                                payload=dns.Record_SOA(mname='fooby.com',
+                                                       rname='hooj.fooby.com',
+                                                       serial=100,
+                                                       refresh=200,
+                                                       retry=300,
+                                                       expire=400,
+                                                       minimum=500,
+                                                       ttl=600))
+
+        self.records = [
+            self.soa,
+            dns.RRHeader(name='fooby.com', type=dns.NS, cls=dns.IN, ttl=700, auth=False,
+                         payload=dns.Record_NS(name='ns.twistedmatrix.com', ttl=700)),
+
+            dns.RRHeader(name='fooby.com', type=dns.MX, cls=dns.IN, ttl=700, auth=False,
+                         payload=dns.Record_MX(preference=10, exchange='mail.mv3d.com', ttl=700)),
+
+            dns.RRHeader(name='fooby.com', type=dns.A, cls=dns.IN, ttl=700, auth=False,
+                         payload=dns.Record_A(address='64.123.27.105', ttl=700)),
+            self.soa
+            ]
+
+    def _makeMessage(self):
+        # hooray they all have the same message format
+        return dns.Message(id=999, answer=1, opCode=0, recDes=0, recAv=1, auth=1, rCode=0, trunc=0, maxSize=0)
+
+    def testBindAndTNamesStyle(self):
+        # Bind style = One big single message
+        m = self._makeMessage()
+        m.queries = [dns.Query('fooby.com', dns.AXFR, dns.IN)]
+        m.answers = self.records
+        self.controller.messageReceived(m, None)
+        self.assertEqual(self.results, self.records)
+
+    def _gotResults(self, result):
+        self.results = result
+
+    def testDJBStyle(self):
+        # DJB style = message per record
+        records = self.records[:]
+        while records:
+            m = self._makeMessage()
+            m.queries = [] # DJB *doesn't* specify any queries.. hmm..
+            m.answers = [records.pop(0)]
+            self.controller.messageReceived(m, None)
+        self.assertEqual(self.results, self.records)
+
+
+
+class ResolvConfHandling(unittest.TestCase):
+    def testMissing(self):
+        resolvConf = self.mktemp()
+        r = client.Resolver(resolv=resolvConf)
+        self.assertEqual(r.dynServers, [('127.0.0.1', 53)])
+        r._parseCall.cancel()
+
+    def testEmpty(self):
+        resolvConf = self.mktemp()
+        fObj = file(resolvConf, 'w')
+        fObj.close()
+        r = client.Resolver(resolv=resolvConf)
+        self.assertEqual(r.dynServers, [('127.0.0.1', 53)])
+        r._parseCall.cancel()
+
+
+
+class AuthorityTests(unittest.TestCase):
+    """
+    Tests for the basic response record selection code in L{FileAuthority}
+    (independent of its fileness).
+    """
+    def test_recordMissing(self):
+        """
+        If a L{FileAuthority} has a zone which includes an I{NS} record for a
+        particular name and that authority is asked for another record for the
+        same name which does not exist, the I{NS} record is not included in the
+        authority section of the response.
+        """
+        authority = NoFileAuthority(
+            soa=(str(soa_record.mname), soa_record),
+            records={
+                str(soa_record.mname): [
+                    soa_record,
+                    dns.Record_NS('1.2.3.4'),
+                    ]})
+        d = authority.lookupAddress(str(soa_record.mname))
+        result = []
+        d.addCallback(result.append)
+        answer, authority, additional = result[0]
+        self.assertEqual(answer, [])
+        self.assertEqual(
+            authority, [
+                dns.RRHeader(
+                    str(soa_record.mname), soa_record.TYPE,
+                    ttl=soa_record.expire, payload=soa_record,
+                    auth=True)])
+        self.assertEqual(additional, [])
+
+
+    def _referralTest(self, method):
+        """
+        Create an authority and make a request against it.  Then verify that the
+        result is a referral, including no records in the answers or additional
+        sections, but with an I{NS} record in the authority section.
+        """
+        subdomain = 'example.' + str(soa_record.mname)
+        nameserver = dns.Record_NS('1.2.3.4')
+        authority = NoFileAuthority(
+            soa=(str(soa_record.mname), soa_record),
+            records={
+                subdomain: [
+                    nameserver,
+                    ]})
+        d = getattr(authority, method)(subdomain)
+        result = []
+        d.addCallback(result.append)
+        answer, authority, additional = result[0]
+        self.assertEqual(answer, [])
+        self.assertEqual(
+            authority, [dns.RRHeader(
+                    subdomain, dns.NS, ttl=soa_record.expire,
+                    payload=nameserver, auth=False)])
+        self.assertEqual(additional, [])
+
+
+    def test_referral(self):
+        """
+        When an I{NS} record is found for a child zone, it is included in the
+        authority section of the response.  It is marked as non-authoritative if
+        the authority is not also authoritative for the child zone (RFC 2181,
+        section 6.1).
+        """
+        self._referralTest('lookupAddress')
+
+
+    def test_allRecordsReferral(self):
+        """
+        A referral is also generated for a request of type C{ALL_RECORDS}.
+        """
+        self._referralTest('lookupAllRecords')
+
+
+
+class NoInitialResponseTestCase(unittest.TestCase):
+
+    def test_no_answer(self):
+        """
+        If a request returns a L{dns.NS} response, but we can't connect to the
+        given server, the request fails with the error returned at connection.
+        """
+
+        def query(self, *args):
+            # Pop from the message list, so that it blows up if more queries
+            # are run than expected.
+            return succeed(messages.pop(0))
+
+        def queryProtocol(self, *args, **kwargs):
+            return defer.fail(socket.gaierror("Couldn't connect"))
+
+        resolver = Resolver(servers=[('0.0.0.0', 0)])
+        resolver._query = query
+        messages = []
+        # Let's patch dns.DNSDatagramProtocol.query, as there is no easy way to
+        # customize it.
+        self.patch(dns.DNSDatagramProtocol, "query", queryProtocol)
+
+        records = [
+            dns.RRHeader(name='fooba.com', type=dns.NS, cls=dns.IN, ttl=700,
+                         auth=False,
+                         payload=dns.Record_NS(name='ns.twistedmatrix.com',
+                         ttl=700))]
+        m = dns.Message(id=999, answer=1, opCode=0, recDes=0, recAv=1, auth=1,
+                        rCode=0, trunc=0, maxSize=0)
+        m.answers = records
+        messages.append(m)
+        return self.assertFailure(
+            resolver.getHostByName("fooby.com"), socket.gaierror)
+
+
+
+class SecondaryAuthorityServiceTests(unittest.TestCase):
+    """
+    Tests for L{SecondaryAuthorityService}, a service which keeps one or more
+    authorities up to date by doing zone transfers from a master.
+    """
+
+    def test_constructAuthorityFromHost(self):
+        """
+        L{SecondaryAuthorityService} can be constructed with a C{str} giving a
+        master server address and several domains, causing the creation of a
+        secondary authority for each domain and that master server address and
+        the default DNS port.
+        """
+        primary = '192.168.1.2'
+        service = SecondaryAuthorityService(
+            primary, ['example.com', 'example.org'])
+        self.assertEqual(service.primary, primary)
+        self.assertEqual(service._port, 53)
+
+        self.assertEqual(service.domains[0].primary, primary)
+        self.assertEqual(service.domains[0]._port, 53)
+        self.assertEqual(service.domains[0].domain, 'example.com')
+
+        self.assertEqual(service.domains[1].primary, primary)
+        self.assertEqual(service.domains[1]._port, 53)
+        self.assertEqual(service.domains[1].domain, 'example.org')
+
+
+    def test_constructAuthorityFromHostAndPort(self):
+        """
+        L{SecondaryAuthorityService.fromServerAddressAndDomains} constructs a
+        new L{SecondaryAuthorityService} from a C{str} giving a master server
+        address and DNS port and several domains, causing the creation of a secondary
+        authority for each domain and that master server address and the given
+        DNS port.
+        """
+        primary = '192.168.1.3'
+        port = 5335
+        service = SecondaryAuthorityService.fromServerAddressAndDomains(
+            (primary, port), ['example.net', 'example.edu'])
+        self.assertEqual(service.primary, primary)
+        self.assertEqual(service._port, 5335)
+
+        self.assertEqual(service.domains[0].primary, primary)
+        self.assertEqual(service.domains[0]._port, port)
+        self.assertEqual(service.domains[0].domain, 'example.net')
+
+        self.assertEqual(service.domains[1].primary, primary)
+        self.assertEqual(service.domains[1]._port, port)
+        self.assertEqual(service.domains[1].domain, 'example.edu')
+
+
+
+class SecondaryAuthorityTests(unittest.TestCase):
+    """
+    L{twisted.names.secondary.SecondaryAuthority} correctly constructs objects
+    with a specified IP address and optionally specified DNS port.
+    """
+
+    def test_defaultPort(self):
+        """
+        When constructed using L{SecondaryAuthority.__init__}, the default port
+        of 53 is used.
+        """
+        secondary = SecondaryAuthority('192.168.1.1', 'inside.com')
+        self.assertEqual(secondary.primary, '192.168.1.1')
+        self.assertEqual(secondary._port, 53)
+        self.assertEqual(secondary.domain, 'inside.com')
+
+
+    def test_explicitPort(self):
+        """
+        When constructed using L{SecondaryAuthority.fromServerAddressAndDomain},
+        the specified port is used.
+        """
+        secondary = SecondaryAuthority.fromServerAddressAndDomain(
+            ('192.168.1.1', 5353), 'inside.com')
+        self.assertEqual(secondary.primary, '192.168.1.1')
+        self.assertEqual(secondary._port, 5353)
+        self.assertEqual(secondary.domain, 'inside.com')
+
+
+    def test_transfer(self):
+        """
+        An attempt is made to transfer the zone for the domain the
+        L{SecondaryAuthority} was constructed with from the server address it
+        was constructed with when L{SecondaryAuthority.transfer} is called.
+        """
+        class ClockMemoryReactor(Clock, MemoryReactor):
+            def __init__(self):
+                Clock.__init__(self)
+                MemoryReactor.__init__(self)
+
+        secondary = SecondaryAuthority.fromServerAddressAndDomain(
+            ('192.168.1.2', 1234), 'example.com')
+        secondary._reactor = reactor = ClockMemoryReactor()
+
+        secondary.transfer()
+
+        # Verify a connection attempt to the server address above
+        host, port, factory, timeout, bindAddress = reactor.tcpClients.pop(0)
+        self.assertEqual(host, '192.168.1.2')
+        self.assertEqual(port, 1234)
+
+        # See if a zone transfer query is issued.
+        proto = factory.buildProtocol((host, port))
+        transport = StringTransport()
+        proto.makeConnection(transport)
+
+        msg = Message()
+        # DNSProtocol.writeMessage length encodes the message by prepending a
+        # 2 byte message length to the buffered value.
+        msg.decode(StringIO(transport.value()[2:]))
+
+        self.assertEqual(
+            [dns.Query('example.com', dns.AXFR, dns.IN)], msg.queries)
diff --git a/ThirdParty/Twisted/twisted/names/test/test_rootresolve.py b/ThirdParty/Twisted/twisted/names/test/test_rootresolve.py
new file mode 100644
index 0000000..713e366
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/test/test_rootresolve.py
@@ -0,0 +1,725 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test cases for Twisted.names' root resolver.
+"""
+
+from random import randrange
+
+from zope.interface import implementer
+from zope.interface.verify import verifyClass
+
+from twisted.python.log import msg
+from twisted.trial import util
+from twisted.trial.unittest import TestCase
+from twisted.internet.defer import Deferred, succeed, gatherResults
+from twisted.internet.task import Clock
+from twisted.internet.address import IPv4Address
+from twisted.internet.interfaces import IReactorUDP, IUDPTransport
+from twisted.names.root import Resolver, lookupNameservers, lookupAddress
+from twisted.names.root import extractAuthority, discoverAuthority, retry
+from twisted.names.dns import (
+    IN, HS, A, NS, CNAME, OK, ENAME, Record_CNAME,
+    Name, Query, Message, RRHeader, Record_A, Record_NS)
+from twisted.names.error import DNSNameError, ResolverError
+
+
+def getOnePayload(results):
+    """
+    From the result of a L{Deferred} returned by L{IResolver.lookupAddress},
+    return the payload of the first record in the answer section.
+    """
+    ans, auth, add = results
+    return ans[0].payload
+
+
+def getOneAddress(results):
+    """
+    From the result of a L{Deferred} returned by L{IResolver.lookupAddress},
+    return the first IPv4 address from the answer section.
+    """
+    return getOnePayload(results).dottedQuad()
+
+
+
+ at implementer(IUDPTransport)
+class MemoryDatagramTransport(object):
+    """
+    This L{IUDPTransport} implementation enforces the usual connection rules
+    and captures sent traffic in a list for later inspection.
+
+    @ivar _host: The host address to which this transport is bound.
+    @ivar _protocol: The protocol connected to this transport.
+    @ivar _sentPackets: A C{list} of two-tuples of the datagrams passed to
+        C{write} and the addresses to which they are destined.
+
+    @ivar _connectedTo: C{None} if this transport is unconnected, otherwise an
+        address to which all traffic is supposedly sent.
+
+    @ivar _maxPacketSize: An C{int} giving the maximum length of a datagram
+        which will be successfully handled by C{write}.
+    """
+    def __init__(self, host, protocol, maxPacketSize):
+        self._host = host
+        self._protocol = protocol
+        self._sentPackets = []
+        self._connectedTo = None
+        self._maxPacketSize = maxPacketSize
+
+
+    def getHost(self):
+        """
+        Return the address which this transport is pretending to be bound
+        to.
+        """
+        return IPv4Address('UDP', *self._host)
+
+
+    def connect(self, host, port):
+        """
+        Connect this transport to the given address.
+        """
+        if self._connectedTo is not None:
+            raise ValueError("Already connected")
+        self._connectedTo = (host, port)
+
+
+    def write(self, datagram, addr=None):
+        """
+        Send the given datagram.
+        """
+        if addr is None:
+            addr = self._connectedTo
+        if addr is None:
+            raise ValueError("Need an address")
+        if len(datagram) > self._maxPacketSize:
+            raise ValueError("Packet too big")
+        self._sentPackets.append((datagram, addr))
+
+
+    def stopListening(self):
+        """
+        Shut down this transport.
+        """
+        self._protocol.stopProtocol()
+        return succeed(None)
+
+verifyClass(IUDPTransport, MemoryDatagramTransport)
+
+
+
+ at implementer(IReactorUDP)
+class MemoryReactor(Clock):
+    """
+    An L{IReactorTime} and L{IReactorUDP} provider.
+
+    Time is controlled deterministically via the base class, L{Clock}.  UDP is
+    handled in-memory by connecting protocols to instances of
+    L{MemoryDatagramTransport}.
+
+    @ivar udpPorts: A C{dict} mapping port numbers to instances of
+        L{MemoryDatagramTransport}.
+    """
+    def __init__(self):
+        Clock.__init__(self)
+        self.udpPorts = {}
+
+
+    def listenUDP(self, port, protocol, interface='', maxPacketSize=8192):
+        """
+        Pretend to bind a UDP port and connect the given protocol to it.
+        """
+        if port == 0:
+            while True:
+                port = randrange(1, 2 ** 16)
+                if port not in self.udpPorts:
+                    break
+        if port in self.udpPorts:
+            raise ValueError("Address in use")
+        transport = MemoryDatagramTransport(
+            (interface, port), protocol, maxPacketSize)
+        self.udpPorts[port] = transport
+        protocol.makeConnection(transport)
+        return transport
+
+verifyClass(IReactorUDP, MemoryReactor)
+
+
+
+class RootResolverTests(TestCase):
+    """
+    Tests for L{twisted.names.root.Resolver}.
+    """
+    def _queryTest(self, filter):
+        """
+        Invoke L{Resolver._query} and verify that it sends the correct DNS
+        query.  Deliver a canned response to the query and return whatever the
+        L{Deferred} returned by L{Resolver._query} fires with.
+
+        @param filter: The value to pass for the C{filter} parameter to
+            L{Resolver._query}.
+        """
+        reactor = MemoryReactor()
+        resolver = Resolver([], reactor=reactor)
+        d = resolver._query(
+            Query(b'foo.example.com', A, IN), [('1.1.2.3', 1053)], (30,),
+            filter)
+
+        # A UDP port should have been started.
+        portNumber, transport = reactor.udpPorts.popitem()
+
+        # And a DNS packet sent.
+        [(packet, address)] = transport._sentPackets
+
+        msg = Message()
+        msg.fromStr(packet)
+
+        # It should be a query with the parameters used above.
+        self.assertEqual(msg.queries, [Query(b'foo.example.com', A, IN)])
+        self.assertEqual(msg.answers, [])
+        self.assertEqual(msg.authority, [])
+        self.assertEqual(msg.additional, [])
+
+        response = []
+        d.addCallback(response.append)
+        self.assertEqual(response, [])
+
+        # Once a reply is received, the Deferred should fire.
+        del msg.queries[:]
+        msg.answer = 1
+        msg.answers.append(RRHeader(b'foo.example.com', payload=Record_A('5.8.13.21')))
+        transport._protocol.datagramReceived(msg.toStr(), ('1.1.2.3', 1053))
+        return response[0]
+
+
+    def test_filteredQuery(self):
+        """
+        L{Resolver._query} accepts a L{Query} instance and an address, issues
+        the query, and returns a L{Deferred} which fires with the response to
+        the query.  If a true value is passed for the C{filter} parameter, the
+        result is a three-tuple of lists of records.
+        """
+        answer, authority, additional = self._queryTest(True)
+        self.assertEqual(
+            answer,
+            [RRHeader(b'foo.example.com', payload=Record_A('5.8.13.21', ttl=0))])
+        self.assertEqual(authority, [])
+        self.assertEqual(additional, [])
+
+
+    def test_unfilteredQuery(self):
+        """
+        Similar to L{test_filteredQuery}, but for the case where a false value
+        is passed for the C{filter} parameter.  In this case, the result is a
+        L{Message} instance.
+        """
+        message = self._queryTest(False)
+        self.assertIsInstance(message, Message)
+        self.assertEqual(message.queries, [])
+        self.assertEqual(
+            message.answers,
+            [RRHeader(b'foo.example.com', payload=Record_A('5.8.13.21', ttl=0))])
+        self.assertEqual(message.authority, [])
+        self.assertEqual(message.additional, [])
+
+
+    def _respond(self, answers=[], authority=[], additional=[], rCode=OK):
+        """
+        Create a L{Message} suitable for use as a response to a query.
+
+        @param answers: A C{list} of two-tuples giving data for the answers
+            section of the message.  The first element of each tuple is a name
+            for the L{RRHeader}.  The second element is the payload.
+        @param authority: A C{list} like C{answers}, but for the authority
+            section of the response.
+        @param additional: A C{list} like C{answers}, but for the
+            additional section of the response.
+        @param rCode: The response code the message will be created with.
+
+        @return: A new L{Message} initialized with the given values.
+        """
+        response = Message(rCode=rCode)
+        for (section, data) in [(response.answers, answers),
+                                (response.authority, authority),
+                                (response.additional, additional)]:
+            section.extend([
+                    RRHeader(name, record.TYPE, getattr(record, 'CLASS', IN),
+                             payload=record)
+                    for (name, record) in data])
+        return response
+
+
+    def _getResolver(self, serverResponses, maximumQueries=10):
+        """
+        Create and return a new L{root.Resolver} modified to resolve queries
+        against the record data represented by C{servers}.
+
+        @param serverResponses: A mapping from dns server addresses to
+            mappings.  The inner mappings are from query two-tuples (name,
+            type) to dictionaries suitable for use as **arguments to
+            L{_respond}.  See that method for details.
+        """
+        roots = ['1.1.2.3']
+        resolver = Resolver(roots, maximumQueries)
+
+        def query(query, serverAddresses, timeout, filter):
+            msg("Query for QNAME %s at %r" % (query.name, serverAddresses))
+            for addr in serverAddresses:
+                try:
+                    server = serverResponses[addr]
+                except KeyError:
+                    continue
+                records = server[query.name.name, query.type]
+                return succeed(self._respond(**records))
+        resolver._query = query
+        return resolver
+
+
+    def test_lookupAddress(self):
+        """
+        L{root.Resolver.lookupAddress} looks up the I{A} records for the
+        specified hostname by first querying one of the root servers the
+        resolver was created with and then following the authority delegations
+        until a result is received.
+        """
+        servers = {
+            ('1.1.2.3', 53): {
+                (b'foo.example.com', A): {
+                    'authority': [(b'foo.example.com', Record_NS(b'ns1.example.com'))],
+                    'additional': [(b'ns1.example.com', Record_A('34.55.89.144'))],
+                    },
+                },
+            ('34.55.89.144', 53): {
+                (b'foo.example.com', A): {
+                    'answers': [(b'foo.example.com', Record_A('10.0.0.1'))],
+                    }
+                },
+            }
+        resolver = self._getResolver(servers)
+        d = resolver.lookupAddress(b'foo.example.com')
+        d.addCallback(getOneAddress)
+        d.addCallback(self.assertEqual, '10.0.0.1')
+        return d
+
+
+    def test_lookupChecksClass(self):
+        """
+        If a response includes a record with a class different from the one
+        in the query, it is ignored and lookup continues until a record with
+        the right class is found.
+        """
+        badClass = Record_A('10.0.0.1')
+        badClass.CLASS = HS
+        servers = {
+            ('1.1.2.3', 53): {
+                ('foo.example.com', A): {
+                    'answers': [('foo.example.com', badClass)],
+                    'authority': [('foo.example.com', Record_NS('ns1.example.com'))],
+                    'additional': [('ns1.example.com', Record_A('10.0.0.2'))],
+                },
+            },
+            ('10.0.0.2', 53): {
+                ('foo.example.com', A): {
+                    'answers': [('foo.example.com', Record_A('10.0.0.3'))],
+                },
+            },
+        }
+        resolver = self._getResolver(servers)
+        d = resolver.lookupAddress('foo.example.com')
+        d.addCallback(getOnePayload)
+        d.addCallback(self.assertEqual, Record_A('10.0.0.3'))
+        return d
+
+
+    def test_missingGlue(self):
+        """
+        If an intermediate response includes no glue records for the
+        authorities, separate queries are made to find those addresses.
+        """
+        servers = {
+            ('1.1.2.3', 53): {
+                (b'foo.example.com', A): {
+                    'authority': [(b'foo.example.com', Record_NS(b'ns1.example.org'))],
+                    # Conspicuous lack of an additional section naming ns1.example.com
+                    },
+                (b'ns1.example.org', A): {
+                    'answers': [(b'ns1.example.org', Record_A('10.0.0.1'))],
+                    },
+                },
+            ('10.0.0.1', 53): {
+                (b'foo.example.com', A): {
+                    'answers': [(b'foo.example.com', Record_A('10.0.0.2'))],
+                    },
+                },
+            }
+        resolver = self._getResolver(servers)
+        d = resolver.lookupAddress(b'foo.example.com')
+        d.addCallback(getOneAddress)
+        d.addCallback(self.assertEqual, '10.0.0.2')
+        return d
+
+
+    def test_missingName(self):
+        """
+        If a name is missing, L{Resolver.lookupAddress} returns a L{Deferred}
+        which fails with L{DNSNameError}.
+        """
+        servers = {
+            ('1.1.2.3', 53): {
+                (b'foo.example.com', A): {
+                    'rCode': ENAME,
+                    },
+                },
+            }
+        resolver = self._getResolver(servers)
+        d = resolver.lookupAddress(b'foo.example.com')
+        return self.assertFailure(d, DNSNameError)
+
+
+    def test_answerless(self):
+        """
+        If a query is responded to with no answers or nameserver records, the
+        L{Deferred} returned by L{Resolver.lookupAddress} fires with
+        L{ResolverError}.
+        """
+        servers = {
+            ('1.1.2.3', 53): {
+                ('example.com', A): {
+                    },
+                },
+            }
+        resolver = self._getResolver(servers)
+        d = resolver.lookupAddress('example.com')
+        return self.assertFailure(d, ResolverError)
+
+
+    def test_delegationLookupError(self):
+        """
+        If there is an error resolving the nameserver in a delegation response,
+        the L{Deferred} returned by L{Resolver.lookupAddress} fires with that
+        error.
+        """
+        servers = {
+            ('1.1.2.3', 53): {
+                ('example.com', A): {
+                    'authority': [('example.com', Record_NS('ns1.example.com'))],
+                    },
+                ('ns1.example.com', A): {
+                    'rCode': ENAME,
+                    },
+                },
+            }
+        resolver = self._getResolver(servers)
+        d = resolver.lookupAddress('example.com')
+        return self.assertFailure(d, DNSNameError)
+
+
+    def test_delegationLookupEmpty(self):
+        """
+        If there are no records in the response to a lookup of a delegation
+        nameserver, the L{Deferred} returned by L{Resolver.lookupAddress} fires
+        with L{ResolverError}.
+        """
+        servers = {
+            ('1.1.2.3', 53): {
+                ('example.com', A): {
+                    'authority': [('example.com', Record_NS('ns1.example.com'))],
+                    },
+                ('ns1.example.com', A): {
+                    },
+                },
+            }
+        resolver = self._getResolver(servers)
+        d = resolver.lookupAddress('example.com')
+        return self.assertFailure(d, ResolverError)
+
+
+    def test_lookupNameservers(self):
+        """
+        L{Resolver.lookupNameservers} is like L{Resolver.lookupAddress}, except
+        it queries for I{NS} records instead of I{A} records.
+        """
+        servers = {
+            ('1.1.2.3', 53): {
+                (b'example.com', A): {
+                    'rCode': ENAME,
+                    },
+                (b'example.com', NS): {
+                    'answers': [(b'example.com', Record_NS(b'ns1.example.com'))],
+                    },
+                },
+            }
+        resolver = self._getResolver(servers)
+        d = resolver.lookupNameservers(b'example.com')
+        def getOneName(results):
+            ans, auth, add = results
+            return ans[0].payload.name
+        d.addCallback(getOneName)
+        d.addCallback(self.assertEqual, Name(b'ns1.example.com'))
+        return d
+
+
+    def test_returnCanonicalName(self):
+        """
+        If a I{CNAME} record is encountered as the answer to a query for
+        another record type, that record is returned as the answer.
+        """
+        servers = {
+            ('1.1.2.3', 53): {
+                (b'example.com', A): {
+                    'answers': [(b'example.com', Record_CNAME(b'example.net')),
+                                (b'example.net', Record_A('10.0.0.7'))],
+                    },
+                },
+            }
+        resolver = self._getResolver(servers)
+        d = resolver.lookupAddress(b'example.com')
+        d.addCallback(lambda results: results[0]) # Get the answer section
+        d.addCallback(
+            self.assertEqual,
+            [RRHeader(b'example.com', CNAME, payload=Record_CNAME(b'example.net')),
+             RRHeader(b'example.net', A, payload=Record_A('10.0.0.7'))])
+        return d
+
+
+    def test_followCanonicalName(self):
+        """
+        If no record of the requested type is included in a response, but a
+        I{CNAME} record for the query name is included, queries are made to
+        resolve the value of the I{CNAME}.
+        """
+        servers = {
+            ('1.1.2.3', 53): {
+                ('example.com', A): {
+                    'answers': [('example.com', Record_CNAME('example.net'))],
+                },
+                ('example.net', A): {
+                    'answers': [('example.net', Record_A('10.0.0.5'))],
+                },
+            },
+        }
+        resolver = self._getResolver(servers)
+        d = resolver.lookupAddress('example.com')
+        d.addCallback(lambda results: results[0]) # Get the answer section
+        d.addCallback(
+            self.assertEqual,
+            [RRHeader('example.com', CNAME, payload=Record_CNAME('example.net')),
+             RRHeader('example.net', A, payload=Record_A('10.0.0.5'))])
+        return d
+
+
+    def test_detectCanonicalNameLoop(self):
+        """
+        If there is a cycle between I{CNAME} records in a response, this is
+        detected and the L{Deferred} returned by the lookup method fails
+        with L{ResolverError}.
+        """
+        servers = {
+            ('1.1.2.3', 53): {
+                ('example.com', A): {
+                    'answers': [('example.com', Record_CNAME('example.net')),
+                                ('example.net', Record_CNAME('example.com'))],
+                },
+            },
+        }
+        resolver = self._getResolver(servers)
+        d = resolver.lookupAddress('example.com')
+        return self.assertFailure(d, ResolverError)
+
+
+    def test_boundedQueries(self):
+        """
+        L{Resolver.lookupAddress} won't issue more queries following
+        delegations than the limit passed to its initializer.
+        """
+        servers = {
+            ('1.1.2.3', 53): {
+                # First query - force it to start over with a name lookup of
+                # ns1.example.com
+                ('example.com', A): {
+                    'authority': [('example.com', Record_NS('ns1.example.com'))],
+                },
+                # Second query - let it resume the original lookup with the
+                # address of the nameserver handling the delegation.
+                ('ns1.example.com', A): {
+                    'answers': [('ns1.example.com', Record_A('10.0.0.2'))],
+                },
+            },
+            ('10.0.0.2', 53): {
+                # Third query - let it jump straight to asking the
+                # delegation server by including its address here (different
+                # case from the first query).
+                ('example.com', A): {
+                    'authority': [('example.com', Record_NS('ns2.example.com'))],
+                    'additional': [('ns2.example.com', Record_A('10.0.0.3'))],
+                },
+            },
+            ('10.0.0.3', 53): {
+                # Fourth query - give it the answer, we're done.
+                ('example.com', A): {
+                    'answers': [('example.com', Record_A('10.0.0.4'))],
+                },
+            },
+        }
+
+        # Make two resolvers.  One which is allowed to make 3 queries
+        # maximum, and so will fail, and on which may make 4, and so should
+        # succeed.
+        failer = self._getResolver(servers, 3)
+        failD = self.assertFailure(
+            failer.lookupAddress('example.com'), ResolverError)
+
+        succeeder = self._getResolver(servers, 4)
+        succeedD = succeeder.lookupAddress('example.com')
+        succeedD.addCallback(getOnePayload)
+        succeedD.addCallback(self.assertEqual, Record_A('10.0.0.4'))
+
+        return gatherResults([failD, succeedD])
+
+
+    def test_discoveredAuthorityDeprecated(self):
+        """
+        Calling L{Resolver.discoveredAuthority} produces a deprecation warning.
+        """
+        resolver = Resolver([])
+        d = resolver.discoveredAuthority('127.0.0.1', 'example.com', IN, A, (0,))
+
+        warnings = self.flushWarnings([
+                self.test_discoveredAuthorityDeprecated])
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            'twisted.names.root.Resolver.discoveredAuthority is deprecated since '
+            'Twisted 10.0.  Use twisted.names.client.Resolver directly, instead.')
+        self.assertEqual(len(warnings), 1)
+
+        # This will time out quickly, but we need to wait for it because there
+        # are resources associated with.
+        d.addErrback(lambda ignored: None)
+        return d
+
+
+
+class StubDNSDatagramProtocol:
+    """
+    A do-nothing stand-in for L{DNSDatagramProtocol} which can be used to avoid
+    network traffic in tests where that kind of thing doesn't matter.
+    """
+    def query(self, *a, **kw):
+        return Deferred()
+
+
+
+_retrySuppression = util.suppress(
+    category=DeprecationWarning,
+    message=(
+        'twisted.names.root.retry is deprecated since Twisted 10.0.  Use a '
+        'Resolver object for retry logic.'))
+
+
+class DiscoveryToolsTests(TestCase):
+    """
+    Tests for the free functions in L{twisted.names.root} which help out with
+    authority discovery.  Since these are mostly deprecated, these are mostly
+    deprecation tests.
+    """
+    def test_lookupNameserversDeprecated(self):
+        """
+        Calling L{root.lookupNameservers} produces a deprecation warning.
+        """
+        # Don't care about the return value, since it will never have a result,
+        # since StubDNSDatagramProtocol doesn't actually work.
+        lookupNameservers('example.com', '127.0.0.1', StubDNSDatagramProtocol())
+
+        warnings = self.flushWarnings([
+                self.test_lookupNameserversDeprecated])
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            'twisted.names.root.lookupNameservers is deprecated since Twisted '
+            '10.0.  Use twisted.names.root.Resolver.lookupNameservers '
+            'instead.')
+        self.assertEqual(len(warnings), 1)
+    test_lookupNameserversDeprecated.suppress = [_retrySuppression]
+
+
+    def test_lookupAddressDeprecated(self):
+        """
+        Calling L{root.lookupAddress} produces a deprecation warning.
+        """
+        # Don't care about the return value, since it will never have a result,
+        # since StubDNSDatagramProtocol doesn't actually work.
+        lookupAddress('example.com', '127.0.0.1', StubDNSDatagramProtocol())
+
+        warnings = self.flushWarnings([
+                self.test_lookupAddressDeprecated])
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            'twisted.names.root.lookupAddress is deprecated since Twisted '
+            '10.0.  Use twisted.names.root.Resolver.lookupAddress '
+            'instead.')
+        self.assertEqual(len(warnings), 1)
+    test_lookupAddressDeprecated.suppress = [_retrySuppression]
+
+
+    def test_extractAuthorityDeprecated(self):
+        """
+        Calling L{root.extractAuthority} produces a deprecation warning.
+        """
+        extractAuthority(Message(), {})
+
+        warnings = self.flushWarnings([
+                self.test_extractAuthorityDeprecated])
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            'twisted.names.root.extractAuthority is deprecated since Twisted '
+            '10.0.  Please inspect the Message object directly.')
+        self.assertEqual(len(warnings), 1)
+
+
+    def test_discoverAuthorityDeprecated(self):
+        """
+        Calling L{root.discoverAuthority} produces a deprecation warning.
+        """
+        discoverAuthority(
+            'example.com', ['10.0.0.1'], p=StubDNSDatagramProtocol())
+
+        warnings = self.flushWarnings([
+                self.test_discoverAuthorityDeprecated])
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            'twisted.names.root.discoverAuthority is deprecated since Twisted '
+            '10.0.  Use twisted.names.root.Resolver.lookupNameservers '
+            'instead.')
+        self.assertEqual(len(warnings), 1)
+
+    # discoverAuthority is implemented in terms of deprecated functions,
+    # too.  Ignore those.
+    test_discoverAuthorityDeprecated.suppress = [
+        util.suppress(
+            category=DeprecationWarning,
+            message=(
+                'twisted.names.root.lookupNameservers is deprecated since '
+                'Twisted 10.0.  Use '
+                'twisted.names.root.Resolver.lookupNameservers instead.')),
+        _retrySuppression]
+
+
+    def test_retryDeprecated(self):
+        """
+        Calling L{root.retry} produces a deprecation warning.
+        """
+        retry([0], StubDNSDatagramProtocol())
+
+        warnings = self.flushWarnings([
+                self.test_retryDeprecated])
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            'twisted.names.root.retry is deprecated since Twisted '
+            '10.0.  Use a Resolver object for retry logic.')
+        self.assertEqual(len(warnings), 1)
diff --git a/ThirdParty/Twisted/twisted/names/test/test_srvconnect.py b/ThirdParty/Twisted/twisted/names/test/test_srvconnect.py
new file mode 100644
index 0000000..2eabdfd
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/test/test_srvconnect.py
@@ -0,0 +1,169 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test cases for L{twisted.names.srvconnect}.
+"""
+
+from twisted.internet import defer, protocol
+from twisted.names import client, dns, srvconnect
+from twisted.names.common import ResolverBase
+from twisted.names.error import DNSNameError
+from twisted.internet.error import DNSLookupError, ServiceNameUnknownError
+from twisted.trial import unittest
+from twisted.test.proto_helpers import MemoryReactor
+
+
+class FakeResolver(ResolverBase):
+    """
+    Resolver that only gives out one given result.
+
+    Either L{results} or L{failure} must be set and will be used for
+    the return value of L{_lookup}
+
+    @ivar results: List of L{dns.RRHeader} for the desired result.
+    @type results: C{list}
+    @ivar failure: Failure with an exception from L{twisted.names.error}.
+    @type failure: L{Failure<twisted.python.failure.Failure>}
+    """
+
+    def __init__(self, results=None, failure=None):
+        self.results = results
+        self.failure = failure
+
+    def _lookup(self, name, cls, qtype, timeout):
+        """
+        Return the result or failure on lookup.
+        """
+        if self.results is not None:
+            return defer.succeed((self.results, [], []))
+        else:
+            return defer.fail(self.failure)
+
+
+
+class DummyFactory(protocol.ClientFactory):
+    """
+    Dummy client factory that stores the reason of connection failure.
+    """
+    def __init__(self):
+        self.reason = None
+
+    def clientConnectionFailed(self, connector, reason):
+        self.reason = reason
+
+
+
+class SRVConnectorTest(unittest.TestCase):
+    """
+    Tests for L{srvconnect.SRVConnector}.
+    """
+
+    def setUp(self):
+        self.patch(client, 'theResolver', FakeResolver())
+        self.reactor = MemoryReactor()
+        self.factory = DummyFactory()
+        self.connector = srvconnect.SRVConnector(self.reactor, 'xmpp-server',
+                                                 'example.org', self.factory)
+
+
+    def test_SRVPresent(self):
+        """
+        Test connectTCP gets called with the address from the SRV record.
+        """
+        payload = dns.Record_SRV(port=6269, target='host.example.org', ttl=60)
+        client.theResolver.results = [dns.RRHeader(name='example.org',
+                                                   type=dns.SRV,
+                                                   cls=dns.IN, ttl=60,
+                                                   payload=payload)]
+        self.connector.connect()
+
+        self.assertIdentical(None, self.factory.reason)
+        self.assertEqual(
+            self.reactor.tcpClients.pop()[:2], ('host.example.org', 6269))
+
+
+    def test_SRVNotPresent(self):
+        """
+        Test connectTCP gets called with fallback parameters on NXDOMAIN.
+        """
+        client.theResolver.failure = DNSNameError('example.org')
+        self.connector.connect()
+
+        self.assertIdentical(None, self.factory.reason)
+        self.assertEqual(
+            self.reactor.tcpClients.pop()[:2], ('example.org', 'xmpp-server'))
+
+
+    def test_SRVNoResult(self):
+        """
+        Test connectTCP gets called with fallback parameters on empty result.
+        """
+        client.theResolver.results = []
+        self.connector.connect()
+
+        self.assertIdentical(None, self.factory.reason)
+        self.assertEqual(
+            self.reactor.tcpClients.pop()[:2], ('example.org', 'xmpp-server'))
+
+
+    def test_SRVNoResultUnknownServiceDefaultPort(self):
+        """
+        connectTCP gets called with default port if the service is not defined.
+        """
+        self.connector = srvconnect.SRVConnector(self.reactor,
+                                                 'thisbetternotexist',
+                                                 'example.org', self.factory,
+                                                 defaultPort=5222)
+
+        client.theResolver.failure = ServiceNameUnknownError()
+        self.connector.connect()
+
+        self.assertIdentical(None, self.factory.reason)
+        self.assertEqual(
+            self.reactor.tcpClients.pop()[:2], ('example.org', 5222))
+
+
+    def test_SRVNoResultUnknownServiceNoDefaultPort(self):
+        """
+        Connect fails on no result, unknown service and no default port.
+        """
+        self.connector = srvconnect.SRVConnector(self.reactor,
+                                                 'thisbetternotexist',
+                                                 'example.org', self.factory)
+
+        client.theResolver.failure = ServiceNameUnknownError()
+        self.connector.connect()
+
+        self.assertTrue(self.factory.reason.check(ServiceNameUnknownError))
+
+
+    def test_SRVBadResult(self):
+        """
+        Test connectTCP gets called with fallback parameters on bad result.
+        """
+        client.theResolver.results = [dns.RRHeader(name='example.org',
+                                                   type=dns.CNAME,
+                                                   cls=dns.IN, ttl=60,
+                                                   payload=None)]
+        self.connector.connect()
+
+        self.assertIdentical(None, self.factory.reason)
+        self.assertEqual(
+            self.reactor.tcpClients.pop()[:2], ('example.org', 'xmpp-server'))
+
+
+    def test_SRVNoService(self):
+        """
+        Test that connecting fails when no service is present.
+        """
+        payload = dns.Record_SRV(port=5269, target='.', ttl=60)
+        client.theResolver.results = [dns.RRHeader(name='example.org',
+                                                   type=dns.SRV,
+                                                   cls=dns.IN, ttl=60,
+                                                   payload=payload)]
+        self.connector.connect()
+
+        self.assertNotIdentical(None, self.factory.reason)
+        self.factory.reason.trap(DNSLookupError)
+        self.assertEqual(self.reactor.tcpClients, [])
diff --git a/ThirdParty/Twisted/twisted/names/test/test_tap.py b/ThirdParty/Twisted/twisted/names/test/test_tap.py
new file mode 100644
index 0000000..0858d26
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/test/test_tap.py
@@ -0,0 +1,99 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.names.tap}.
+"""
+
+from twisted.trial.unittest import TestCase
+from twisted.python.usage import UsageError
+from twisted.names.tap import Options, _buildResolvers
+from twisted.names.dns import PORT
+from twisted.names.secondary import SecondaryAuthorityService
+from twisted.names.resolve import ResolverChain
+from twisted.names.client import Resolver
+
+class OptionsTests(TestCase):
+    """
+    Tests for L{Options}, defining how command line arguments for the DNS server
+    are parsed.
+    """
+    def test_malformedSecondary(self):
+        """
+        If the value supplied for an I{--secondary} option does not provide a
+        server IP address, optional port number, and domain name,
+        L{Options.parseOptions} raises L{UsageError}.
+        """
+        options = Options()
+        self.assertRaises(
+            UsageError, options.parseOptions, ['--secondary', ''])
+        self.assertRaises(
+            UsageError, options.parseOptions, ['--secondary', '1.2.3.4'])
+        self.assertRaises(
+            UsageError, options.parseOptions, ['--secondary', '1.2.3.4:hello'])
+        self.assertRaises(
+            UsageError, options.parseOptions,
+            ['--secondary', '1.2.3.4:hello/example.com'])
+
+
+    def test_secondary(self):
+        """
+        An argument of the form C{"ip/domain"} is parsed by L{Options} for the
+        I{--secondary} option and added to its list of secondaries, using the
+        default DNS port number.
+        """
+        options = Options()
+        options.parseOptions(['--secondary', '1.2.3.4/example.com'])
+        self.assertEqual(
+            [(('1.2.3.4', PORT), ['example.com'])], options.secondaries)
+
+
+    def test_secondaryExplicitPort(self):
+        """
+        An argument of the form C{"ip:port/domain"} can be used to specify an
+        alternate port number for for which to act as a secondary.
+        """
+        options = Options()
+        options.parseOptions(['--secondary', '1.2.3.4:5353/example.com'])
+        self.assertEqual(
+            [(('1.2.3.4', 5353), ['example.com'])], options.secondaries)
+
+
+    def test_secondaryAuthorityServices(self):
+        """
+        After parsing I{--secondary} options, L{Options} constructs a
+        L{SecondaryAuthorityService} instance for each configured secondary.
+        """
+        options = Options()
+        options.parseOptions(['--secondary', '1.2.3.4:5353/example.com',
+                              '--secondary', '1.2.3.5:5354/example.com'])
+        self.assertEqual(len(options.svcs), 2)
+        secondary = options.svcs[0]
+        self.assertIsInstance(options.svcs[0], SecondaryAuthorityService)
+        self.assertEqual(secondary.primary, '1.2.3.4')
+        self.assertEqual(secondary._port, 5353)
+        secondary = options.svcs[1]
+        self.assertIsInstance(options.svcs[1], SecondaryAuthorityService)
+        self.assertEqual(secondary.primary, '1.2.3.5')
+        self.assertEqual(secondary._port, 5354)
+
+
+    def test_recursiveConfiguration(self):
+        """
+        Recursive DNS lookups, if enabled, should be a last-resort option.
+        Any other lookup method (cache, local lookup, etc.) should take
+        precedence over recursive lookups
+        """
+        options = Options()
+        options.parseOptions(['--hosts-file', 'hosts.txt', '--recursive'])
+        ca, cl = _buildResolvers(options)
+
+        # Extra cleanup, necessary on POSIX because client.Resolver doesn't know
+        # when to stop parsing resolv.conf.  See #NNN for improving this.
+        for x in cl:
+            if isinstance(x, ResolverChain):
+                recurser = x.resolvers[-1]
+                if isinstance(recurser, Resolver):
+                    recurser._parseCall.cancel()
+
+        self.assertIsInstance(cl[-1], ResolverChain)
diff --git a/ThirdParty/Twisted/twisted/names/topfiles/NEWS b/ThirdParty/Twisted/twisted/names/topfiles/NEWS
new file mode 100644
index 0000000..60e36e7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/topfiles/NEWS
@@ -0,0 +1,258 @@
+Ticket numbers in this file can be looked up by visiting
+http://twistedmatrix.com/trac/ticket/<number>
+
+Twisted Names 12.3.0 (2012-12-20)
+=================================
+
+Deprecations and Removals
+-------------------------
+ - The `protocol` attribute of twisted.names.client.Resolver,
+   deprecated since Twisted 8.2, has been removed. (#6045)
+ - twisted.names.hosts.Resolver is no longer a
+   `twisted.persisted.styles.Versioned` subclass. (#6092)
+
+Other
+-----
+ - #5594, #6056, #6057, #6058, #6059, #6093
+
+
+Twisted Names 12.2.0 (2012-08-26)
+=================================
+
+Features
+--------
+ - twisted.names.srvconnect.SRVConnector now takes a default port to
+   use when SRV lookup fails. (#3456)
+
+Other
+-----
+ - #5647
+
+
+Twisted Names 12.1.0 (2012-06-02)
+=================================
+
+Features
+--------
+ - "twistd dns" secondary server functionality and
+   twisted.names.secondary now support retrieving zone information
+   from a master running on a non-standard DNS port. (#5468)
+
+Bugfixes
+--------
+ - twisted.names.dns.DNSProtocol instances no longer throw an
+   exception when disconnecting. (#5471)
+ - twisted.names.tap.makeService (thus also "twistd dns") now makes a
+   DNS server which gives precedence to the hosts file from its
+   configuration over the remote DNS servers from its configuration.
+   (#5524)
+ - twisted.name.cache.CacheResolver now makes sure TTLs on returned
+   results are never negative. (#5579)
+ - twisted.names.cache.CacheResolver entries added via the initializer
+   are now timed out correctly. (#5638)
+
+Improved Documentation
+----------------------
+ - The examples now contain instructions on how to run them and
+   descriptions in the examples index. (#5588)
+
+Deprecations and Removals
+-------------------------
+ - The deprecated twisted.names.dns.Record_mx.exchange attribute was
+   removed. (#4549)
+
+
+Twisted Names 12.0.0 (2012-02-10)
+=================================
+
+Bugfixes
+--------
+ - twisted.names.dns.Message now sets the `auth` flag on RRHeader
+   instances it creates to reflect the authority of the message
+   itself. (#5421)
+
+
+Twisted Names 11.1.0 (2011-11-15)
+=================================
+
+Features
+--------
+ - twisted.names.dns.Message now parses records of unknown type into
+   instances of a new `UnknownType` class. (#4603)
+
+Bugfixes
+--------
+ - twisted.names.dns.Name now detects loops in names it is decoding
+   and raises an exception.  Previously it would follow the loop
+   forever, allowing a remote denial of service attack against any
+   twisted.names client or server. (#5064)
+ - twisted.names.hosts.Resolver now supports IPv6 addresses; its
+   lookupAddress method now filters them out and its lookupIPV6Address
+   method is now implemented. (#5098)
+
+
+Twisted Names 11.0.0 (2011-04-01)
+=================================
+
+No significant changes have been made for this release.
+
+
+Twisted Names 10.2.0 (2010-11-29)
+=================================
+
+Features
+--------
+ - twisted.names.server can now serve SPF resource records using
+   twisted.names.dns.Record_SPF.  twisted.names.client can query for
+   them using lookupSenderPolicy.   (#3928)
+
+Bugfixes
+--------
+ - twisted.names.common.extractRecords doesn't try to close the
+   transport anymore in case of recursion, as it's done by the
+   Resolver itself now. (#3998)
+
+Improved Documentation
+----------------------
+ - Tidied up the Twisted Names documentation for easier conversion.
+   (#4573)
+
+
+Twisted Names 10.1.0 (2010-06-27)
+=================================
+
+Features
+--------
+ - twisted.names.dns.Message now uses a specially constructed
+   dictionary for looking up record types.  This yields a significant
+   performance improvement on PyPy. (#4283)
+
+
+Twisted Names 10.0.0 (2010-03-01)
+=================================
+
+Bugfixes
+--------
+ - twisted.names.root.Resolver no longer leaks UDP sockets while
+   resolving names. (#970)
+
+Deprecations and Removals
+-------------------------
+ - Several top-level functions in twisted.names.root are now
+   deprecated. (#970)
+
+Other
+-----
+ - #4066
+
+
+Twisted Names 9.0.0 (2009-11-24)
+================================
+
+Deprecations and Removals
+-------------------------
+ - client.ThreadedResolver is deprecated in favor of
+   twisted.internet.base.ThreadedResolver (#3710)
+
+Other
+-----
+ - #3540, #3560, #3712, #3750, #3990
+
+
+Names 8.2.0 (2008-12-16)
+========================
+
+Features
+--------
+ - The NAPTR record type is now supported (#2276)
+
+Fixes
+-----
+ - Make client.Resolver less vulnerable to the Birthday Paradox attack by
+   avoiding sending duplicate queries when it's not necessary (#3347)
+ - client.Resolver now uses a random source port for each DNS request (#3342)
+ - client.Resolver now uses a full 16 bits of randomness for message IDs,
+   instead of 10 which it previously used (#3342)
+ - All record types now have value-based equality and a string representation
+   (#2935)
+
+Other
+-----
+ - #1622, #3424
+
+
+8.1.0 (2008-05-18)
+==================
+
+Fixes
+-----
+ - The deprecated mktap API is no longer used (#3127)
+
+
+8.0.0 (2008-03-17)
+==================
+
+Fixes
+-----
+
+ - Refactor DNSDatagramProtocol and DNSProtocol to use same base class (#2414)
+ - Change Resolver to query specified nameservers in specified order, instead
+   of reverse order. (#2290)
+ - Make SRVConnector work with bad results and NXDOMAIN responses.
+   (#1908, #2777)
+ - Handle write errors happening in dns queries, to have correct deferred
+   failures. (#2492)
+ - Fix the value of OP_NOTIFY and add a definition for OP_UPDATE. (#2945)
+
+Misc
+----
+ - #2685, #2936, #2581, #2847
+
+
+0.4.0 (2007-01-06)
+==================
+
+Features
+--------
+
+ - In the twisted.names client, DNS responses which represent errors
+   are now translated to informative exception objects, rather than
+   empty lists. This means that client requests which fail will now
+   errback their Deferreds (#2248)
+
+Fixes
+-----
+ - A major DoS vulnerability in the UDP DNS server was fixed (#1708)
+
+Misc
+----
+ - #1799, #1636, #2149, #2181
+
+
+0.3.0 (2006-05-21)
+==================
+
+Features
+--------
+ - Some docstring improvements
+
+Fixes
+-----
+ - Fix a problem where the response for the first query with a
+   newly-created Resolver object would be dropped.(#1447)
+ - Misc: #1581, #1583
+
+
+0.2.0
+=====
+ - Fix occassional TCP connection leak in gethostbyname()
+ - Fix TCP connection leak in recursive lookups
+ - Remove deprecated use of Deferred.setTimeout
+ - Improved test coverage for zone transfers
+
+0.1.0
+=====
+ - Fix TCP connection leak in zone transfers
+ - Handle empty or missing resolv.conf as if 127.0.0.1 was specified
+ - Don't use blocking kernel entropy sources
+ - Retry logic now properly tries all specified servers.
diff --git a/ThirdParty/Twisted/twisted/names/topfiles/README b/ThirdParty/Twisted/twisted/names/topfiles/README
new file mode 100644
index 0000000..261a178
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/topfiles/README
@@ -0,0 +1,3 @@
+Twisted Names 12.3.0
+
+Twisted Names depends on Twisted Core.
diff --git a/ThirdParty/Twisted/twisted/names/topfiles/setup.py b/ThirdParty/Twisted/twisted/names/topfiles/setup.py
new file mode 100644
index 0000000..9a694c9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/names/topfiles/setup.py
@@ -0,0 +1,50 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import sys
+
+try:
+    from twisted.python import dist
+except ImportError:
+    raise SystemExit("twisted.python.dist module not found.  Make sure you "
+                     "have installed the Twisted core package before "
+                     "attempting to install any other Twisted projects.")
+
+if __name__ == '__main__':
+    if sys.version_info[:2] >= (2, 4):
+        extraMeta = dict(
+            classifiers=[
+                "Development Status :: 4 - Beta",
+                "Environment :: No Input/Output (Daemon)",
+                "Intended Audience :: Developers",
+                "License :: OSI Approved :: MIT License",
+                "Programming Language :: Python",
+                "Topic :: Internet :: Name Service (DNS)",
+                "Topic :: Software Development :: Libraries :: Python Modules",
+            ])
+    else:
+        extraMeta = {}
+
+    dist.setup(
+        twisted_subproject="names",
+        # metadata
+        name="Twisted Names",
+        description="A Twisted DNS implementation.",
+        author="Twisted Matrix Laboratories",
+        author_email="twisted-python at twistedmatrix.com",
+        maintainer="Jp Calderone",
+        url="http://twistedmatrix.com/trac/wiki/TwistedNames",
+        license="MIT",
+        long_description="""\
+Twisted Names is both a domain name server as well as a client
+resolver library. Twisted Names comes with an "out of the box"
+nameserver which can read most BIND-syntax zone files as well as a
+simple Python-based configuration format. Twisted Names can act as an
+authoritative server, perform zone transfers from a master to act as a
+secondary, act as a caching nameserver, or any combination of
+these. Twisted Names' client resolver library provides functions to
+query for all commonly used record types as well as a replacement for
+the blocking gethostbyname() function provided by the Python stdlib
+socket module.
+""",
+        **extraMeta)
diff --git a/ThirdParty/Twisted/twisted/news/__init__.py b/ThirdParty/Twisted/twisted/news/__init__.py
new file mode 100644
index 0000000..d70440c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/news/__init__.py
@@ -0,0 +1,11 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+
+Twisted News: an NNTP-based news service.
+
+"""
+
+from twisted.news._version import version
+__version__ = version.short()
diff --git a/ThirdParty/Twisted/twisted/news/_version.py b/ThirdParty/Twisted/twisted/news/_version.py
new file mode 100644
index 0000000..e7b219e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/news/_version.py
@@ -0,0 +1,3 @@
+# This is an auto-generated file. Do not edit it.
+from twisted.python import versions
+version = versions.Version('twisted.news', 12, 3, 0)
diff --git a/ThirdParty/Twisted/twisted/news/database.py b/ThirdParty/Twisted/twisted/news/database.py
new file mode 100644
index 0000000..137736a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/news/database.py
@@ -0,0 +1,1051 @@
+# -*- test-case-name: twisted.news.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+News server backend implementations.
+"""
+
+import getpass, pickle, time, socket
+import os
+import StringIO
+from email.Message import Message
+from email.Generator import Generator
+from zope.interface import implements, Interface
+
+from twisted.news.nntp import NNTPError
+from twisted.mail import smtp
+from twisted.internet import defer
+from twisted.enterprise import adbapi
+from twisted.persisted import dirdbm
+from twisted.python.hashlib import md5
+
+
+
+ERR_NOGROUP, ERR_NOARTICLE = range(2, 4)  # XXX - put NNTP values here (I guess?)
+
+OVERVIEW_FMT = [
+    'Subject', 'From', 'Date', 'Message-ID', 'References',
+    'Bytes', 'Lines', 'Xref'
+]
+
+def hexdigest(md5): #XXX: argh. 1.5.2 doesn't have this.
+    return ''.join(map(lambda x: hex(ord(x))[2:], md5.digest()))
+
+class Article:
+    def __init__(self, head, body):
+        self.body = body
+        self.headers = {}
+        header = None
+        for line in head.split('\r\n'):
+            if line[0] in ' \t':
+                i = list(self.headers[header])
+                i[1] += '\r\n' + line
+            else:
+                i = line.split(': ', 1)
+                header = i[0].lower()
+            self.headers[header] = tuple(i)
+
+        if not self.getHeader('Message-ID'):
+            s = str(time.time()) + self.body
+            id = hexdigest(md5(s)) + '@' + socket.gethostname()
+            self.putHeader('Message-ID', '<%s>' % id)
+
+        if not self.getHeader('Bytes'):
+            self.putHeader('Bytes', str(len(self.body)))
+
+        if not self.getHeader('Lines'):
+            self.putHeader('Lines', str(self.body.count('\n')))
+
+        if not self.getHeader('Date'):
+            self.putHeader('Date', time.ctime(time.time()))
+
+
+    def getHeader(self, header):
+        h = header.lower()
+        if h in self.headers:
+            return self.headers[h][1]
+        else:
+            return ''
+
+
+    def putHeader(self, header, value):
+        self.headers[header.lower()] = (header, value)
+
+
+    def textHeaders(self):
+        headers = []
+        for i in self.headers.values():
+            headers.append('%s: %s' % i)
+        return '\r\n'.join(headers) + '\r\n'
+
+    def overview(self):
+        xover = []
+        for i in OVERVIEW_FMT:
+            xover.append(self.getHeader(i))
+        return xover
+
+
+class NewsServerError(Exception):
+    pass
+
+
+class INewsStorage(Interface):
+    """
+    An interface for storing and requesting news articles
+    """
+
+    def listRequest():
+        """
+        Returns a deferred whose callback will be passed a list of 4-tuples
+        containing (name, max index, min index, flags) for each news group
+        """
+
+
+    def subscriptionRequest():
+        """
+        Returns a deferred whose callback will be passed the list of
+        recommended subscription groups for new server users
+        """
+
+
+    def postRequest(message):
+        """
+        Returns a deferred whose callback will be invoked if 'message'
+        is successfully posted to one or more specified groups and
+        whose errback will be invoked otherwise.
+        """
+
+
+    def overviewRequest():
+        """
+        Returns a deferred whose callback will be passed the a list of
+        headers describing this server's overview format.
+        """
+
+
+    def xoverRequest(group, low, high):
+        """
+        Returns a deferred whose callback will be passed a list of xover
+        headers for the given group over the given range.  If low is None,
+        the range starts at the first article.  If high is None, the range
+        ends at the last article.
+        """
+
+
+    def xhdrRequest(group, low, high, header):
+        """
+        Returns a deferred whose callback will be passed a list of XHDR data
+        for the given group over the given range.  If low is None,
+        the range starts at the first article.  If high is None, the range
+        ends at the last article.
+        """
+
+
+    def listGroupRequest(group):
+        """
+        Returns a deferred whose callback will be passed a two-tuple of
+        (group name, [article indices])
+        """
+
+
+    def groupRequest(group):
+        """
+        Returns a deferred whose callback will be passed a five-tuple of
+        (group name, article count, highest index, lowest index, group flags)
+        """
+
+
+    def articleExistsRequest(id):
+        """
+        Returns a deferred whose callback will be passed with a true value
+        if a message with the specified Message-ID exists in the database
+        and with a false value otherwise.
+        """
+
+
+    def articleRequest(group, index, id = None):
+        """
+        Returns a deferred whose callback will be passed a file-like object
+        containing the full article text (headers and body) for the article
+        of the specified index in the specified group, and whose errback
+        will be invoked if the article or group does not exist.  If id is
+        not None, index is ignored and the article with the given Message-ID
+        will be returned instead, along with its index in the specified
+        group.
+        """
+
+
+    def headRequest(group, index):
+        """
+        Returns a deferred whose callback will be passed the header for
+        the article of the specified index in the specified group, and
+        whose errback will be invoked if the article or group does not
+        exist.
+        """
+
+
+    def bodyRequest(group, index):
+        """
+        Returns a deferred whose callback will be passed the body for
+        the article of the specified index in the specified group, and
+        whose errback will be invoked if the article or group does not
+        exist.
+        """
+
+class NewsStorage:
+    """
+    Backwards compatibility class -- There is no reason to inherit from this,
+    just implement INewsStorage instead.
+    """
+    def listRequest(self):
+        raise NotImplementedError()
+    def subscriptionRequest(self):
+        raise NotImplementedError()
+    def postRequest(self, message):
+        raise NotImplementedError()
+    def overviewRequest(self):
+        return defer.succeed(OVERVIEW_FMT)
+    def xoverRequest(self, group, low, high):
+        raise NotImplementedError()
+    def xhdrRequest(self, group, low, high, header):
+        raise NotImplementedError()
+    def listGroupRequest(self, group):
+        raise NotImplementedError()
+    def groupRequest(self, group):
+        raise NotImplementedError()
+    def articleExistsRequest(self, id):
+        raise NotImplementedError()
+    def articleRequest(self, group, index, id = None):
+        raise NotImplementedError()
+    def headRequest(self, group, index):
+        raise NotImplementedError()
+    def bodyRequest(self, group, index):
+        raise NotImplementedError()
+
+
+
+class _ModerationMixin:
+    """
+    Storage implementations can inherit from this class to get the easy-to-use
+    C{notifyModerators} method which will take care of sending messages which
+    require moderation to a list of moderators.
+    """
+    sendmail = staticmethod(smtp.sendmail)
+
+    def notifyModerators(self, moderators, article):
+        """
+        Send an article to a list of group moderators to be moderated.
+
+        @param moderators: A C{list} of C{str} giving RFC 2821 addresses of
+            group moderators to notify.
+
+        @param article: The article requiring moderation.
+        @type article: L{Article}
+
+        @return: A L{Deferred} which fires with the result of sending the email.
+        """
+        # Moderated postings go through as long as they have an Approved
+        # header, regardless of what the value is
+        group = article.getHeader('Newsgroups')
+        subject = article.getHeader('Subject')
+
+        if self._sender is None:
+            # This case should really go away.  This isn't a good default.
+            sender = 'twisted-news@' + socket.gethostname()
+        else:
+            sender = self._sender
+
+        msg = Message()
+        msg['Message-ID'] = smtp.messageid()
+        msg['From'] = sender
+        msg['To'] = ', '.join(moderators)
+        msg['Subject'] = 'Moderate new %s message: %s' % (group, subject)
+        msg['Content-Type'] = 'message/rfc822'
+
+        payload = Message()
+        for header, value in article.headers.values():
+            payload.add_header(header, value)
+        payload.set_payload(article.body)
+
+        msg.attach(payload)
+
+        out = StringIO.StringIO()
+        gen = Generator(out, False)
+        gen.flatten(msg)
+        msg = out.getvalue()
+
+        return self.sendmail(self._mailhost, sender, moderators, msg)
+
+
+
+class PickleStorage(_ModerationMixin):
+    """
+    A trivial NewsStorage implementation using pickles
+
+    Contains numerous flaws and is generally unsuitable for any
+    real applications.  Consider yourself warned!
+    """
+
+    implements(INewsStorage)
+
+    sharedDBs = {}
+
+    def __init__(self, filename, groups=None, moderators=(),
+                 mailhost=None, sender=None):
+        """
+        @param mailhost: A C{str} giving the mail exchange host which will
+            accept moderation emails from this server.  Must accept emails
+            destined for any address specified as a moderator.
+
+        @param sender: A C{str} giving the address which will be used as the
+            sender of any moderation email generated by this server.
+        """
+        self.datafile = filename
+        self.load(filename, groups, moderators)
+        self._mailhost = mailhost
+        self._sender = sender
+
+
+    def getModerators(self, groups):
+        # first see if any groups are moderated.  if so, nothing gets posted,
+        # but the whole messages gets forwarded to the moderator address
+        moderators = []
+        for group in groups:
+            moderators.extend(self.db['moderators'].get(group, None))
+        return filter(None, moderators)
+
+
+    def listRequest(self):
+        "Returns a list of 4-tuples: (name, max index, min index, flags)"
+        l = self.db['groups']
+        r = []
+        for i in l:
+            if len(self.db[i].keys()):
+                low = min(self.db[i].keys())
+                high = max(self.db[i].keys()) + 1
+            else:
+                low = high = 0
+            if self.db['moderators'].has_key(i):
+                flags = 'm'
+            else:
+                flags = 'y'
+            r.append((i, high, low, flags))
+        return defer.succeed(r)
+
+    def subscriptionRequest(self):
+        return defer.succeed(['alt.test'])
+
+    def postRequest(self, message):
+        cleave = message.find('\r\n\r\n')
+        headers, article = message[:cleave], message[cleave + 4:]
+
+        a = Article(headers, article)
+        groups = a.getHeader('Newsgroups').split()
+        xref = []
+
+        # Check moderated status
+        moderators = self.getModerators(groups)
+        if moderators and not a.getHeader('Approved'):
+            return self.notifyModerators(moderators, a)
+
+        for group in groups:
+            if group in self.db:
+                if len(self.db[group].keys()):
+                    index = max(self.db[group].keys()) + 1
+                else:
+                    index = 1
+                xref.append((group, str(index)))
+                self.db[group][index] = a
+
+        if len(xref) == 0:
+            return defer.fail(None)
+
+        a.putHeader('Xref', '%s %s' % (
+            socket.gethostname().split()[0],
+            ''.join(map(lambda x: ':'.join(x), xref))
+        ))
+
+        self.flush()
+        return defer.succeed(None)
+
+
+    def overviewRequest(self):
+        return defer.succeed(OVERVIEW_FMT)
+
+
+    def xoverRequest(self, group, low, high):
+        if not self.db.has_key(group):
+            return defer.succeed([])
+        r = []
+        for i in self.db[group].keys():
+            if (low is None or i >= low) and (high is None or i <= high):
+                r.append([str(i)] + self.db[group][i].overview())
+        return defer.succeed(r)
+
+
+    def xhdrRequest(self, group, low, high, header):
+        if not self.db.has_key(group):
+            return defer.succeed([])
+        r = []
+        for i in self.db[group].keys():
+            if low is None or i >= low and high is None or i <= high:
+                r.append((i, self.db[group][i].getHeader(header)))
+        return defer.succeed(r)
+
+
+    def listGroupRequest(self, group):
+        if self.db.has_key(group):
+            return defer.succeed((group, self.db[group].keys()))
+        else:
+            return defer.fail(None)
+
+    def groupRequest(self, group):
+        if self.db.has_key(group):
+            if len(self.db[group].keys()):
+                num = len(self.db[group].keys())
+                low = min(self.db[group].keys())
+                high = max(self.db[group].keys())
+            else:
+                num = low = high = 0
+            flags = 'y'
+            return defer.succeed((group, num, high, low, flags))
+        else:
+            return defer.fail(ERR_NOGROUP)
+
+
+    def articleExistsRequest(self, id):
+        for group in self.db['groups']:
+            for a in self.db[group].values():
+                if a.getHeader('Message-ID') == id:
+                    return defer.succeed(1)
+        return defer.succeed(0)
+
+
+    def articleRequest(self, group, index, id = None):
+        if id is not None:
+            raise NotImplementedError
+
+        if self.db.has_key(group):
+            if self.db[group].has_key(index):
+                a = self.db[group][index]
+                return defer.succeed((
+                    index,
+                    a.getHeader('Message-ID'),
+                    StringIO.StringIO(a.textHeaders() + '\r\n' + a.body)
+                ))
+            else:
+                return defer.fail(ERR_NOARTICLE)
+        else:
+            return defer.fail(ERR_NOGROUP)
+
+
+    def headRequest(self, group, index):
+        if self.db.has_key(group):
+            if self.db[group].has_key(index):
+                a = self.db[group][index]
+                return defer.succeed((index, a.getHeader('Message-ID'), a.textHeaders()))
+            else:
+                return defer.fail(ERR_NOARTICLE)
+        else:
+            return defer.fail(ERR_NOGROUP)
+
+
+    def bodyRequest(self, group, index):
+        if self.db.has_key(group):
+            if self.db[group].has_key(index):
+                a = self.db[group][index]
+                return defer.succeed((index, a.getHeader('Message-ID'), StringIO.StringIO(a.body)))
+            else:
+                return defer.fail(ERR_NOARTICLE)
+        else:
+            return defer.fail(ERR_NOGROUP)
+
+
+    def flush(self):
+        f = open(self.datafile, 'w')
+        pickle.dump(self.db, f)
+        f.close()
+
+
+    def load(self, filename, groups = None, moderators = ()):
+        if filename in PickleStorage.sharedDBs:
+            self.db = PickleStorage.sharedDBs[filename]
+        else:
+            try:
+                self.db = pickle.load(open(filename))
+                PickleStorage.sharedDBs[filename] = self.db
+            except IOError:
+                self.db = PickleStorage.sharedDBs[filename] = {}
+                self.db['groups'] = groups
+                if groups is not None:
+                    for i in groups:
+                        self.db[i] = {}
+                self.db['moderators'] = dict(moderators)
+                self.flush()
+
+
+class Group:
+    name = None
+    flags = ''
+    minArticle = 1
+    maxArticle = 0
+    articles = None
+
+    def __init__(self, name, flags = 'y'):
+        self.name = name
+        self.flags = flags
+        self.articles = {}
+
+
+class NewsShelf(_ModerationMixin):
+    """
+    A NewStorage implementation using Twisted's dirdbm persistence module.
+    """
+
+    implements(INewsStorage)
+
+    def __init__(self, mailhost, path, sender=None):
+        """
+        @param mailhost: A C{str} giving the mail exchange host which will
+            accept moderation emails from this server.  Must accept emails
+            destined for any address specified as a moderator.
+
+        @param sender: A C{str} giving the address which will be used as the
+            sender of any moderation email generated by this server.
+        """
+        self.path = path
+        self._mailhost = self.mailhost = mailhost
+        self._sender = sender
+
+        if not os.path.exists(path):
+            os.mkdir(path)
+
+        self.dbm = dirdbm.Shelf(os.path.join(path, "newsshelf"))
+        if not len(self.dbm.keys()):
+            self.initialize()
+
+
+    def initialize(self):
+        # A dictionary of group name/Group instance items
+        self.dbm['groups'] = dirdbm.Shelf(os.path.join(self.path, 'groups'))
+
+        # A dictionary of group name/email address
+        self.dbm['moderators'] = dirdbm.Shelf(os.path.join(self.path, 'moderators'))
+
+        # A list of group names
+        self.dbm['subscriptions'] = []
+
+        # A dictionary of MessageID strings/xref lists
+        self.dbm['Message-IDs'] = dirdbm.Shelf(os.path.join(self.path, 'Message-IDs'))
+
+
+    def addGroup(self, name, flags):
+        self.dbm['groups'][name] = Group(name, flags)
+
+
+    def addSubscription(self, name):
+        self.dbm['subscriptions'] = self.dbm['subscriptions'] + [name]
+
+
+    def addModerator(self, group, email):
+        self.dbm['moderators'][group] = email
+
+
+    def listRequest(self):
+        result = []
+        for g in self.dbm['groups'].values():
+            result.append((g.name, g.maxArticle, g.minArticle, g.flags))
+        return defer.succeed(result)
+
+
+    def subscriptionRequest(self):
+        return defer.succeed(self.dbm['subscriptions'])
+
+
+    def getModerator(self, groups):
+        # first see if any groups are moderated.  if so, nothing gets posted,
+        # but the whole messages gets forwarded to the moderator address
+        for group in groups:
+            try:
+                return self.dbm['moderators'][group]
+            except KeyError:
+                pass
+        return None
+
+
+    def notifyModerator(self, moderator, article):
+        """
+        Notify a single moderator about an article requiring moderation.
+
+        C{notifyModerators} should be preferred.
+        """
+        return self.notifyModerators([moderator], article)
+
+
+    def postRequest(self, message):
+        cleave = message.find('\r\n\r\n')
+        headers, article = message[:cleave], message[cleave + 4:]
+
+        article = Article(headers, article)
+        groups = article.getHeader('Newsgroups').split()
+        xref = []
+
+        # Check for moderated status
+        moderator = self.getModerator(groups)
+        if moderator and not article.getHeader('Approved'):
+            return self.notifyModerators([moderator], article)
+
+
+        for group in groups:
+            try:
+                g = self.dbm['groups'][group]
+            except KeyError:
+                pass
+            else:
+                index = g.maxArticle + 1
+                g.maxArticle += 1
+                g.articles[index] = article
+                xref.append((group, str(index)))
+                self.dbm['groups'][group] = g
+
+        if not xref:
+            return defer.fail(NewsServerError("No groups carried: " + ' '.join(groups)))
+
+        article.putHeader('Xref', '%s %s' % (socket.gethostname().split()[0], ' '.join(map(lambda x: ':'.join(x), xref))))
+        self.dbm['Message-IDs'][article.getHeader('Message-ID')] = xref
+        return defer.succeed(None)
+
+
+    def overviewRequest(self):
+        return defer.succeed(OVERVIEW_FMT)
+
+
+    def xoverRequest(self, group, low, high):
+        if not self.dbm['groups'].has_key(group):
+            return defer.succeed([])
+
+        if low is None:
+            low = 0
+        if high is None:
+            high = self.dbm['groups'][group].maxArticle
+        r = []
+        for i in range(low, high + 1):
+            if self.dbm['groups'][group].articles.has_key(i):
+                r.append([str(i)] + self.dbm['groups'][group].articles[i].overview())
+        return defer.succeed(r)
+
+
+    def xhdrRequest(self, group, low, high, header):
+        if group not in self.dbm['groups']:
+            return defer.succeed([])
+
+        if low is None:
+            low = 0
+        if high is None:
+            high = self.dbm['groups'][group].maxArticle
+        r = []
+        for i in range(low, high + 1):
+            if i in self.dbm['groups'][group].articles:
+                r.append((i, self.dbm['groups'][group].articles[i].getHeader(header)))
+        return defer.succeed(r)
+
+
+    def listGroupRequest(self, group):
+        if self.dbm['groups'].has_key(group):
+            return defer.succeed((group, self.dbm['groups'][group].articles.keys()))
+        return defer.fail(NewsServerError("No such group: " + group))
+
+
+    def groupRequest(self, group):
+        try:
+            g = self.dbm['groups'][group]
+        except KeyError:
+            return defer.fail(NewsServerError("No such group: " + group))
+        else:
+            flags = g.flags
+            low = g.minArticle
+            high = g.maxArticle
+            num = high - low + 1
+            return defer.succeed((group, num, high, low, flags))
+
+
+    def articleExistsRequest(self, id):
+        return defer.succeed(id in self.dbm['Message-IDs'])
+
+
+    def articleRequest(self, group, index, id = None):
+        if id is not None:
+            try:
+                xref = self.dbm['Message-IDs'][id]
+            except KeyError:
+                return defer.fail(NewsServerError("No such article: " + id))
+            else:
+                group, index = xref[0]
+                index = int(index)
+
+        try:
+            a = self.dbm['groups'][group].articles[index]
+        except KeyError:
+            return defer.fail(NewsServerError("No such group: " + group))
+        else:
+            return defer.succeed((
+                index,
+                a.getHeader('Message-ID'),
+                StringIO.StringIO(a.textHeaders() + '\r\n' + a.body)
+            ))
+
+
+    def headRequest(self, group, index, id = None):
+        if id is not None:
+            try:
+                xref = self.dbm['Message-IDs'][id]
+            except KeyError:
+                return defer.fail(NewsServerError("No such article: " + id))
+            else:
+                group, index = xref[0]
+                index = int(index)
+
+        try:
+            a = self.dbm['groups'][group].articles[index]
+        except KeyError:
+            return defer.fail(NewsServerError("No such group: " + group))
+        else:
+            return defer.succeed((index, a.getHeader('Message-ID'), a.textHeaders()))
+
+
+    def bodyRequest(self, group, index, id = None):
+        if id is not None:
+            try:
+                xref = self.dbm['Message-IDs'][id]
+            except KeyError:
+                return defer.fail(NewsServerError("No such article: " + id))
+            else:
+                group, index = xref[0]
+                index = int(index)
+
+        try:
+            a = self.dbm['groups'][group].articles[index]
+        except KeyError:
+            return defer.fail(NewsServerError("No such group: " + group))
+        else:
+            return defer.succeed((index, a.getHeader('Message-ID'), StringIO.StringIO(a.body)))
+
+
+class NewsStorageAugmentation:
+    """
+    A NewsStorage implementation using Twisted's asynchronous DB-API
+    """
+
+    implements(INewsStorage)
+
+    schema = """
+
+    CREATE TABLE groups (
+        group_id      SERIAL,
+        name          VARCHAR(80) NOT NULL,
+
+        flags         INTEGER DEFAULT 0 NOT NULL
+    );
+
+    CREATE UNIQUE INDEX group_id_index ON groups (group_id);
+    CREATE UNIQUE INDEX name_id_index ON groups (name);
+
+    CREATE TABLE articles (
+        article_id    SERIAL,
+        message_id    TEXT,
+
+        header        TEXT,
+        body          TEXT
+    );
+
+    CREATE UNIQUE INDEX article_id_index ON articles (article_id);
+    CREATE UNIQUE INDEX article_message_index ON articles (message_id);
+
+    CREATE TABLE postings (
+        group_id      INTEGER,
+        article_id    INTEGER,
+        article_index INTEGER NOT NULL
+    );
+
+    CREATE UNIQUE INDEX posting_article_index ON postings (article_id);
+
+    CREATE TABLE subscriptions (
+        group_id    INTEGER
+    );
+
+    CREATE TABLE overview (
+        header      TEXT
+    );
+    """
+
+    def __init__(self, info):
+        self.info = info
+        self.dbpool = adbapi.ConnectionPool(**self.info)
+
+
+    def __setstate__(self, state):
+        self.__dict__ = state
+        self.info['password'] = getpass.getpass('Database password for %s: ' % (self.info['user'],))
+        self.dbpool = adbapi.ConnectionPool(**self.info)
+        del self.info['password']
+
+
+    def listRequest(self):
+        # COALESCE may not be totally portable
+        # it is shorthand for
+        # CASE WHEN (first parameter) IS NOT NULL then (first parameter) ELSE (second parameter) END
+        sql = """
+            SELECT groups.name,
+                COALESCE(MAX(postings.article_index), 0),
+                COALESCE(MIN(postings.article_index), 0),
+                groups.flags
+            FROM groups LEFT OUTER JOIN postings
+            ON postings.group_id = groups.group_id
+            GROUP BY groups.name, groups.flags
+            ORDER BY groups.name
+        """
+        return self.dbpool.runQuery(sql)
+
+
+    def subscriptionRequest(self):
+        sql = """
+            SELECT groups.name FROM groups,subscriptions WHERE groups.group_id = subscriptions.group_id
+        """
+        return self.dbpool.runQuery(sql)
+
+
+    def postRequest(self, message):
+        cleave = message.find('\r\n\r\n')
+        headers, article = message[:cleave], message[cleave + 4:]
+        article = Article(headers, article)
+        return self.dbpool.runInteraction(self._doPost, article)
+
+
+    def _doPost(self, transaction, article):
+        # Get the group ids
+        groups = article.getHeader('Newsgroups').split()
+        if not len(groups):
+            raise NNTPError('Missing Newsgroups header')
+
+        sql = """
+            SELECT name, group_id FROM groups
+            WHERE name IN (%s)
+        """ % (', '.join([("'%s'" % (adbapi.safe(group),)) for group in groups]),)
+
+        transaction.execute(sql)
+        result = transaction.fetchall()
+
+        # No relevant groups, bye bye!
+        if not len(result):
+            raise NNTPError('None of groups in Newsgroup header carried')
+
+        # Got some groups, now find the indices this article will have in each
+        sql = """
+            SELECT groups.group_id, COALESCE(MAX(postings.article_index), 0) + 1
+            FROM groups LEFT OUTER JOIN postings
+            ON postings.group_id = groups.group_id
+            WHERE groups.group_id IN (%s)
+            GROUP BY groups.group_id
+        """ % (', '.join([("%d" % (id,)) for (group, id) in result]),)
+
+        transaction.execute(sql)
+        indices = transaction.fetchall()
+
+        if not len(indices):
+            raise NNTPError('Internal server error - no indices found')
+
+        # Associate indices with group names
+        gidToName = dict([(b, a) for (a, b) in result])
+        gidToIndex = dict(indices)
+
+        nameIndex = []
+        for i in gidToName:
+            nameIndex.append((gidToName[i], gidToIndex[i]))
+
+        # Build xrefs
+        xrefs = socket.gethostname().split()[0]
+        xrefs = xrefs + ' ' + ' '.join([('%s:%d' % (group, id)) for (group, id) in nameIndex])
+        article.putHeader('Xref', xrefs)
+
+        # Hey!  The article is ready to be posted!  God damn f'in finally.
+        sql = """
+            INSERT INTO articles (message_id, header, body)
+            VALUES ('%s', '%s', '%s')
+        """ % (
+            adbapi.safe(article.getHeader('Message-ID')),
+            adbapi.safe(article.textHeaders()),
+            adbapi.safe(article.body)
+        )
+
+        transaction.execute(sql)
+
+        # Now update the posting to reflect the groups to which this belongs
+        for gid in gidToName:
+            sql = """
+                INSERT INTO postings (group_id, article_id, article_index)
+                VALUES (%d, (SELECT last_value FROM articles_article_id_seq), %d)
+            """ % (gid, gidToIndex[gid])
+            transaction.execute(sql)
+
+        return len(nameIndex)
+
+
+    def overviewRequest(self):
+        sql = """
+            SELECT header FROM overview
+        """
+        return self.dbpool.runQuery(sql).addCallback(lambda result: [header[0] for header in result])
+
+
+    def xoverRequest(self, group, low, high):
+        sql = """
+            SELECT postings.article_index, articles.header
+            FROM articles,postings,groups
+            WHERE postings.group_id = groups.group_id
+            AND groups.name = '%s'
+            AND postings.article_id = articles.article_id
+            %s
+            %s
+        """ % (
+            adbapi.safe(group),
+            low is not None and "AND postings.article_index >= %d" % (low,) or "",
+            high is not None and "AND postings.article_index <= %d" % (high,) or ""
+        )
+
+        return self.dbpool.runQuery(sql).addCallback(
+            lambda results: [
+                [id] + Article(header, None).overview() for (id, header) in results
+            ]
+        )
+
+
+    def xhdrRequest(self, group, low, high, header):
+        sql = """
+            SELECT articles.header
+            FROM groups,postings,articles
+            WHERE groups.name = '%s' AND postings.group_id = groups.group_id
+            AND postings.article_index >= %d
+            AND postings.article_index <= %d
+        """ % (adbapi.safe(group), low, high)
+
+        return self.dbpool.runQuery(sql).addCallback(
+            lambda results: [
+                (i, Article(h, None).getHeader(h)) for (i, h) in results
+            ]
+        )
+
+
+    def listGroupRequest(self, group):
+        sql = """
+            SELECT postings.article_index FROM postings,groups
+            WHERE postings.group_id = groups.group_id
+            AND groups.name = '%s'
+        """ % (adbapi.safe(group),)
+
+        return self.dbpool.runQuery(sql).addCallback(
+            lambda results, group = group: (group, [res[0] for res in results])
+        )
+
+
+    def groupRequest(self, group):
+        sql = """
+            SELECT groups.name,
+                COUNT(postings.article_index),
+                COALESCE(MAX(postings.article_index), 0),
+                COALESCE(MIN(postings.article_index), 0),
+                groups.flags
+            FROM groups LEFT OUTER JOIN postings
+            ON postings.group_id = groups.group_id
+            WHERE groups.name = '%s'
+            GROUP BY groups.name, groups.flags
+        """ % (adbapi.safe(group),)
+
+        return self.dbpool.runQuery(sql).addCallback(
+            lambda results: tuple(results[0])
+        )
+
+
+    def articleExistsRequest(self, id):
+        sql = """
+            SELECT COUNT(message_id) FROM articles
+            WHERE message_id = '%s'
+        """ % (adbapi.safe(id),)
+
+        return self.dbpool.runQuery(sql).addCallback(
+            lambda result: bool(result[0][0])
+        )
+
+
+    def articleRequest(self, group, index, id = None):
+        if id is not None:
+            sql = """
+                SELECT postings.article_index, articles.message_id, articles.header, articles.body
+                FROM groups,postings LEFT OUTER JOIN articles
+                ON articles.message_id = '%s'
+                WHERE groups.name = '%s'
+                AND groups.group_id = postings.group_id
+            """ % (adbapi.safe(id), adbapi.safe(group))
+        else:
+            sql = """
+                SELECT postings.article_index, articles.message_id, articles.header, articles.body
+                FROM groups,articles LEFT OUTER JOIN postings
+                ON postings.article_id = articles.article_id
+                WHERE postings.article_index = %d
+                AND postings.group_id = groups.group_id
+                AND groups.name = '%s'
+            """ % (index, adbapi.safe(group))
+
+        return self.dbpool.runQuery(sql).addCallback(
+            lambda result: (
+                result[0][0],
+                result[0][1],
+                StringIO.StringIO(result[0][2] + '\r\n' + result[0][3])
+            )
+        )
+
+
+    def headRequest(self, group, index):
+        sql = """
+            SELECT postings.article_index, articles.message_id, articles.header
+            FROM groups,articles LEFT OUTER JOIN postings
+            ON postings.article_id = articles.article_id
+            WHERE postings.article_index = %d
+            AND postings.group_id = groups.group_id
+            AND groups.name = '%s'
+        """ % (index, adbapi.safe(group))
+
+        return self.dbpool.runQuery(sql).addCallback(lambda result: result[0])
+
+
+    def bodyRequest(self, group, index):
+        sql = """
+            SELECT postings.article_index, articles.message_id, articles.body
+            FROM groups,articles LEFT OUTER JOIN postings
+            ON postings.article_id = articles.article_id
+            WHERE postings.article_index = %d
+            AND postings.group_id = groups.group_id
+            AND groups.name = '%s'
+        """ % (index, adbapi.safe(group))
+
+        return self.dbpool.runQuery(sql).addCallback(
+            lambda result: result[0]
+        ).addCallback(
+            lambda (index, id, body): (index, id, StringIO.StringIO(body))
+        )
+
+####
+#### XXX - make these static methods some day
+####
+def makeGroupSQL(groups):
+    res = ''
+    for g in groups:
+        res = res + """\n    INSERT INTO groups (name) VALUES ('%s');\n""" % (adbapi.safe(g),)
+    return res
+
+
+def makeOverviewSQL():
+    res = ''
+    for o in OVERVIEW_FMT:
+        res = res + """\n    INSERT INTO overview (header) VALUES ('%s');\n""" % (adbapi.safe(o),)
+    return res
diff --git a/ThirdParty/Twisted/twisted/news/news.py b/ThirdParty/Twisted/twisted/news/news.py
new file mode 100644
index 0000000..8165171
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/news/news.py
@@ -0,0 +1,90 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Maintainer: Jp Calderone
+"""
+
+from twisted.news import nntp
+from twisted.internet import protocol, reactor
+
+import time
+
+class NNTPFactory(protocol.ServerFactory):
+    """A factory for NNTP server protocols."""
+    
+    protocol = nntp.NNTPServer
+    
+    def __init__(self, backend):
+        self.backend = backend
+    
+    def buildProtocol(self, connection):
+        p = self.protocol()
+        p.factory = self
+        return p
+
+
+class UsenetClientFactory(protocol.ClientFactory):
+    def __init__(self, groups, storage):
+        self.lastChecks = {}
+        self.groups = groups
+        self.storage = storage
+
+
+    def clientConnectionLost(self, connector, reason):
+        pass
+
+
+    def clientConnectionFailed(self, connector, reason):
+        print 'Connection failed: ', reason
+    
+    
+    def updateChecks(self, addr):
+        self.lastChecks[addr] = time.mktime(time.gmtime())
+
+
+    def buildProtocol(self, addr):
+        last = self.lastChecks.setdefault(addr, time.mktime(time.gmtime()) - (60 * 60 * 24 * 7))
+        p = nntp.UsenetClientProtocol(self.groups, last, self.storage)
+        p.factory = self
+        return p
+
+
+# XXX - Maybe this inheritence doesn't make so much sense?
+class UsenetServerFactory(NNTPFactory):
+    """A factory for NNTP Usenet server protocols."""
+
+    protocol = nntp.NNTPServer
+
+    def __init__(self, backend, remoteHosts = None, updatePeriod = 60):
+        NNTPFactory.__init__(self, backend)
+        self.updatePeriod = updatePeriod
+        self.remoteHosts = remoteHosts or []
+        self.clientFactory = UsenetClientFactory(self.remoteHosts, self.backend)
+
+
+    def startFactory(self):
+        self._updateCall = reactor.callLater(0, self.syncWithRemotes)
+
+
+    def stopFactory(self):
+        if self._updateCall:
+            self._updateCall.cancel()
+            self._updateCall = None
+
+
+    def buildProtocol(self, connection):
+        p = self.protocol()
+        p.factory = self
+        return p
+
+
+    def syncWithRemotes(self):
+        for remote in self.remoteHosts:
+            reactor.connectTCP(remote, 119, self.clientFactory)
+        self._updateCall = reactor.callLater(self.updatePeriod, self.syncWithRemotes)
+
+
+# backwards compatability
+Factory = UsenetServerFactory
diff --git a/ThirdParty/Twisted/twisted/news/nntp.py b/ThirdParty/Twisted/twisted/news/nntp.py
new file mode 100644
index 0000000..864bd53
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/news/nntp.py
@@ -0,0 +1,1036 @@
+# -*- test-case-name: twisted.news.test.test_nntp -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+NNTP protocol support.
+
+The following protocol commands are currently understood::
+
+    LIST        LISTGROUP                  XOVER        XHDR
+    POST        GROUP        ARTICLE       STAT         HEAD
+    BODY        NEXT         MODE STREAM   MODE READER  SLAVE
+    LAST        QUIT         HELP          IHAVE        XPATH
+    XINDEX      XROVER       TAKETHIS      CHECK
+
+The following protocol commands require implementation::
+
+                             NEWNEWS
+                             XGTITLE                XPAT
+                             XTHREAD       AUTHINFO NEWGROUPS
+
+
+Other desired features:
+
+   - A real backend
+   - More robust client input handling
+   - A control protocol
+"""
+
+import time
+
+from twisted.protocols import basic
+from twisted.python import log
+
+def parseRange(text):
+    articles = text.split('-')
+    if len(articles) == 1:
+        try:
+            a = int(articles[0])
+            return a, a
+        except ValueError:
+            return None, None
+    elif len(articles) == 2:
+        try:
+            if len(articles[0]):
+                l = int(articles[0])
+            else:
+                l = None
+            if len(articles[1]):
+                h = int(articles[1])
+            else:
+                h = None
+        except ValueError:
+            return None, None
+    return l, h
+
+
+def extractCode(line):
+    line = line.split(' ', 1)
+    if len(line) != 2:
+        return None
+    try:
+        return int(line[0]), line[1]
+    except ValueError:
+        return None
+
+
+class NNTPError(Exception):
+    def __init__(self, string):
+        self.string = string
+
+    def __str__(self):
+        return 'NNTPError: %s' % self.string
+
+
+class NNTPClient(basic.LineReceiver):
+    MAX_COMMAND_LENGTH = 510
+
+    def __init__(self):
+        self.currentGroup = None
+
+        self._state = []
+        self._error = []
+        self._inputBuffers = []
+        self._responseCodes = []
+        self._responseHandlers = []
+
+        self._postText = []
+
+        self._newState(self._statePassive, None, self._headerInitial)
+
+
+    def gotAllGroups(self, groups):
+        "Override for notification when fetchGroups() action is completed"
+
+
+    def getAllGroupsFailed(self, error):
+        "Override for notification when fetchGroups() action fails"
+
+
+    def gotOverview(self, overview):
+        "Override for notification when fetchOverview() action is completed"
+
+
+    def getOverviewFailed(self, error):
+        "Override for notification when fetchOverview() action fails"
+
+
+    def gotSubscriptions(self, subscriptions):
+        "Override for notification when fetchSubscriptions() action is completed"
+
+
+    def getSubscriptionsFailed(self, error):
+        "Override for notification when fetchSubscriptions() action fails"
+
+
+    def gotGroup(self, group):
+        "Override for notification when fetchGroup() action is completed"
+
+
+    def getGroupFailed(self, error):
+        "Override for notification when fetchGroup() action fails"
+
+
+    def gotArticle(self, article):
+        "Override for notification when fetchArticle() action is completed"
+
+
+    def getArticleFailed(self, error):
+        "Override for notification when fetchArticle() action fails"
+
+
+    def gotHead(self, head):
+        "Override for notification when fetchHead() action is completed"
+
+
+    def getHeadFailed(self, error):
+        "Override for notification when fetchHead() action fails"
+
+
+    def gotBody(self, info):
+        "Override for notification when fetchBody() action is completed"
+
+
+    def getBodyFailed(self, body):
+        "Override for notification when fetchBody() action fails"
+
+
+    def postedOk(self):
+        "Override for notification when postArticle() action is successful"
+
+
+    def postFailed(self, error):
+        "Override for notification when postArticle() action fails"
+
+
+    def gotXHeader(self, headers):
+        "Override for notification when getXHeader() action is successful"
+
+
+    def getXHeaderFailed(self, error):
+        "Override for notification when getXHeader() action fails"
+
+
+    def gotNewNews(self, news):
+        "Override for notification when getNewNews() action is successful"
+
+
+    def getNewNewsFailed(self, error):
+        "Override for notification when getNewNews() action fails"
+
+
+    def gotNewGroups(self, groups):
+        "Override for notification when getNewGroups() action is successful"
+
+
+    def getNewGroupsFailed(self, error):
+        "Override for notification when getNewGroups() action fails"
+
+
+    def setStreamSuccess(self):
+        "Override for notification when setStream() action is successful"
+
+
+    def setStreamFailed(self, error):
+        "Override for notification when setStream() action fails"
+
+
+    def fetchGroups(self):
+        """
+        Request a list of all news groups from the server.  gotAllGroups()
+        is called on success, getGroupsFailed() on failure
+        """
+        self.sendLine('LIST')
+        self._newState(self._stateList, self.getAllGroupsFailed)
+
+
+    def fetchOverview(self):
+        """
+        Request the overview format from the server.  gotOverview() is called
+        on success, getOverviewFailed() on failure
+        """
+        self.sendLine('LIST OVERVIEW.FMT')
+        self._newState(self._stateOverview, self.getOverviewFailed)
+
+
+    def fetchSubscriptions(self):
+        """
+        Request a list of the groups it is recommended a new user subscribe to.
+        gotSubscriptions() is called on success, getSubscriptionsFailed() on
+        failure
+        """
+        self.sendLine('LIST SUBSCRIPTIONS')
+        self._newState(self._stateSubscriptions, self.getSubscriptionsFailed)
+
+
+    def fetchGroup(self, group):
+        """
+        Get group information for the specified group from the server.  gotGroup()
+        is called on success, getGroupFailed() on failure.
+        """
+        self.sendLine('GROUP %s' % (group,))
+        self._newState(None, self.getGroupFailed, self._headerGroup)
+
+
+    def fetchHead(self, index = ''):
+        """
+        Get the header for the specified article (or the currently selected
+        article if index is '') from the server.  gotHead() is called on
+        success, getHeadFailed() on failure
+        """
+        self.sendLine('HEAD %s' % (index,))
+        self._newState(self._stateHead, self.getHeadFailed)
+
+
+    def fetchBody(self, index = ''):
+        """
+        Get the body for the specified article (or the currently selected
+        article if index is '') from the server.  gotBody() is called on
+        success, getBodyFailed() on failure
+        """
+        self.sendLine('BODY %s' % (index,))
+        self._newState(self._stateBody, self.getBodyFailed)
+
+
+    def fetchArticle(self, index = ''):
+        """
+        Get the complete article with the specified index (or the currently
+        selected article if index is '') or Message-ID from the server.
+        gotArticle() is called on success, getArticleFailed() on failure.
+        """
+        self.sendLine('ARTICLE %s' % (index,))
+        self._newState(self._stateArticle, self.getArticleFailed)
+
+
+    def postArticle(self, text):
+        """
+        Attempt to post an article with the specified text to the server.  'text'
+        must consist of both head and body data, as specified by RFC 850.  If the
+        article is posted successfully, postedOk() is called, otherwise postFailed()
+        is called.
+        """
+        self.sendLine('POST')
+        self._newState(None, self.postFailed, self._headerPost)
+        self._postText.append(text)
+
+
+    def fetchNewNews(self, groups, date, distributions = ''):
+        """
+        Get the Message-IDs for all new news posted to any of the given
+        groups since the specified date - in seconds since the epoch, GMT -
+        optionally restricted to the given distributions.  gotNewNews() is
+        called on success, getNewNewsFailed() on failure.
+
+        One invocation of this function may result in multiple invocations
+        of gotNewNews()/getNewNewsFailed().
+        """
+        date, timeStr = time.strftime('%y%m%d %H%M%S', time.gmtime(date)).split()
+        line = 'NEWNEWS %%s %s %s %s' % (date, timeStr, distributions)
+        groupPart = ''
+        while len(groups) and len(line) + len(groupPart) + len(groups[-1]) + 1 < NNTPClient.MAX_COMMAND_LENGTH:
+            group = groups.pop()
+            groupPart = groupPart + ',' + group
+
+        self.sendLine(line % (groupPart,))
+        self._newState(self._stateNewNews, self.getNewNewsFailed)
+
+        if len(groups):
+            self.fetchNewNews(groups, date, distributions)
+
+
+    def fetchNewGroups(self, date, distributions):
+        """
+        Get the names of all new groups created/added to the server since
+        the specified date - in seconds since the ecpoh, GMT - optionally
+        restricted to the given distributions.  gotNewGroups() is called
+        on success, getNewGroupsFailed() on failure.
+        """
+        date, timeStr = time.strftime('%y%m%d %H%M%S', time.gmtime(date)).split()
+        self.sendLine('NEWGROUPS %s %s %s' % (date, timeStr, distributions))
+        self._newState(self._stateNewGroups, self.getNewGroupsFailed)
+
+
+    def fetchXHeader(self, header, low = None, high = None, id = None):
+        """
+        Request a specific header from the server for an article or range
+        of articles.  If 'id' is not None, a header for only the article
+        with that Message-ID will be requested.  If both low and high are
+        None, a header for the currently selected article will be selected;
+        If both low and high are zero-length strings, headers for all articles
+        in the currently selected group will be requested;  Otherwise, high
+        and low will be used as bounds - if one is None the first or last
+        article index will be substituted, as appropriate.
+        """
+        if id is not None:
+            r = header + ' <%s>' % (id,)
+        elif low is high is None:
+            r = header
+        elif high is None:
+            r = header + ' %d-' % (low,)
+        elif low is None:
+            r = header + ' -%d' % (high,)
+        else:
+            r = header + ' %d-%d' % (low, high)
+        self.sendLine('XHDR ' + r)
+        self._newState(self._stateXHDR, self.getXHeaderFailed)
+
+
+    def setStream(self):
+        """
+        Set the mode to STREAM, suspending the normal "lock-step" mode of
+        communications.  setStreamSuccess() is called on success,
+        setStreamFailed() on failure.
+        """
+        self.sendLine('MODE STREAM')
+        self._newState(None, self.setStreamFailed, self._headerMode)
+
+
+    def quit(self):
+        self.sendLine('QUIT')
+        self.transport.loseConnection()
+
+
+    def _newState(self, method, error, responseHandler = None):
+        self._inputBuffers.append([])
+        self._responseCodes.append(None)
+        self._state.append(method)
+        self._error.append(error)
+        self._responseHandlers.append(responseHandler)
+
+
+    def _endState(self):
+        buf = self._inputBuffers[0]
+        del self._responseCodes[0]
+        del self._inputBuffers[0]
+        del self._state[0]
+        del self._error[0]
+        del self._responseHandlers[0]
+        return buf
+
+
+    def _newLine(self, line, check = 1):
+        if check and line and line[0] == '.':
+            line = line[1:]
+        self._inputBuffers[0].append(line)
+
+
+    def _setResponseCode(self, code):
+        self._responseCodes[0] = code
+
+
+    def _getResponseCode(self):
+        return self._responseCodes[0]
+
+
+    def lineReceived(self, line):
+        if not len(self._state):
+            self._statePassive(line)
+        elif self._getResponseCode() is None:
+            code = extractCode(line)
+            if code is None or not (200 <= code[0] < 400):    # An error!
+                self._error[0](line)
+                self._endState()
+            else:
+                self._setResponseCode(code)
+                if self._responseHandlers[0]:
+                    self._responseHandlers[0](code)
+        else:
+            self._state[0](line)
+
+
+    def _statePassive(self, line):
+        log.msg('Server said: %s' % line)
+
+
+    def _passiveError(self, error):
+        log.err('Passive Error: %s' % (error,))
+
+
+    def _headerInitial(self, (code, message)):
+        if code == 200:
+            self.canPost = 1
+        else:
+            self.canPost = 0
+        self._endState()
+
+
+    def _stateList(self, line):
+        if line != '.':
+            data = filter(None, line.strip().split())
+            self._newLine((data[0], int(data[1]), int(data[2]), data[3]), 0)
+        else:
+            self.gotAllGroups(self._endState())
+
+
+    def _stateOverview(self, line):
+        if line != '.':
+            self._newLine(filter(None, line.strip().split()), 0)
+        else:
+            self.gotOverview(self._endState())
+
+
+    def _stateSubscriptions(self, line):
+        if line != '.':
+            self._newLine(line.strip(), 0)
+        else:
+            self.gotSubscriptions(self._endState())
+
+
+    def _headerGroup(self, (code, line)):
+        self.gotGroup(tuple(line.split()))
+        self._endState()
+
+
+    def _stateArticle(self, line):
+        if line != '.':
+            if line.startswith('.'):
+                line = line[1:]
+            self._newLine(line, 0)
+        else:
+            self.gotArticle('\n'.join(self._endState())+'\n')
+
+
+    def _stateHead(self, line):
+        if line != '.':
+            self._newLine(line, 0)
+        else:
+            self.gotHead('\n'.join(self._endState()))
+
+
+    def _stateBody(self, line):
+        if line != '.':
+            if line.startswith('.'):
+                line = line[1:]
+            self._newLine(line, 0)
+        else:
+            self.gotBody('\n'.join(self._endState())+'\n')
+
+
+    def _headerPost(self, (code, message)):
+        if code == 340:
+            self.transport.write(self._postText[0].replace('\n', '\r\n').replace('\r\n.', '\r\n..'))
+            if self._postText[0][-1:] != '\n':
+                self.sendLine('')
+            self.sendLine('.')
+            del self._postText[0]
+            self._newState(None, self.postFailed, self._headerPosted)
+        else:
+            self.postFailed('%d %s' % (code, message))
+        self._endState()
+
+
+    def _headerPosted(self, (code, message)):
+        if code == 240:
+            self.postedOk()
+        else:
+            self.postFailed('%d %s' % (code, message))
+        self._endState()
+
+
+    def _stateXHDR(self, line):
+        if line != '.':
+            self._newLine(line.split(), 0)
+        else:
+            self._gotXHeader(self._endState())
+
+
+    def _stateNewNews(self, line):
+        if line != '.':
+            self._newLine(line, 0)
+        else:
+            self.gotNewNews(self._endState())
+
+
+    def _stateNewGroups(self, line):
+        if line != '.':
+            self._newLine(line, 0)
+        else:
+            self.gotNewGroups(self._endState())
+
+
+    def _headerMode(self, (code, message)):
+        if code == 203:
+            self.setStreamSuccess()
+        else:
+            self.setStreamFailed((code, message))
+        self._endState()
+
+
+class NNTPServer(basic.LineReceiver):
+    COMMANDS = [
+        'LIST', 'GROUP', 'ARTICLE', 'STAT', 'MODE', 'LISTGROUP', 'XOVER',
+        'XHDR', 'HEAD', 'BODY', 'NEXT', 'LAST', 'POST', 'QUIT', 'IHAVE',
+        'HELP', 'SLAVE', 'XPATH', 'XINDEX', 'XROVER', 'TAKETHIS', 'CHECK'
+    ]
+
+    def __init__(self):
+        self.servingSlave = 0
+
+
+    def connectionMade(self):
+        self.inputHandler = None
+        self.currentGroup = None
+        self.currentIndex = None
+        self.sendLine('200 server ready - posting allowed')
+
+    def lineReceived(self, line):
+        if self.inputHandler is not None:
+            self.inputHandler(line)
+        else:
+            parts = line.strip().split()
+            if len(parts):
+                cmd, parts = parts[0].upper(), parts[1:]
+                if cmd in NNTPServer.COMMANDS:
+                    func = getattr(self, 'do_%s' % cmd)
+                    try:
+                        func(*parts)
+                    except TypeError:
+                        self.sendLine('501 command syntax error')
+                        log.msg("501 command syntax error")
+                        log.msg("command was", line)
+                        log.deferr()
+                    except:
+                        self.sendLine('503 program fault - command not performed')
+                        log.msg("503 program fault")
+                        log.msg("command was", line)
+                        log.deferr()
+                else:
+                    self.sendLine('500 command not recognized')
+
+
+    def do_LIST(self, subcmd = '', *dummy):
+        subcmd = subcmd.strip().lower()
+        if subcmd == 'newsgroups':
+            # XXX - this could use a real implementation, eh?
+            self.sendLine('215 Descriptions in form "group description"')
+            self.sendLine('.')
+        elif subcmd == 'overview.fmt':
+            defer = self.factory.backend.overviewRequest()
+            defer.addCallbacks(self._gotOverview, self._errOverview)
+            log.msg('overview')
+        elif subcmd == 'subscriptions':
+            defer = self.factory.backend.subscriptionRequest()
+            defer.addCallbacks(self._gotSubscription, self._errSubscription)
+            log.msg('subscriptions')
+        elif subcmd == '':
+            defer = self.factory.backend.listRequest()
+            defer.addCallbacks(self._gotList, self._errList)
+        else:
+            self.sendLine('500 command not recognized')
+
+
+    def _gotList(self, list):
+        self.sendLine('215 newsgroups in form "group high low flags"')
+        for i in list:
+            self.sendLine('%s %d %d %s' % tuple(i))
+        self.sendLine('.')
+
+
+    def _errList(self, failure):
+        print 'LIST failed: ', failure
+        self.sendLine('503 program fault - command not performed')
+
+
+    def _gotSubscription(self, parts):
+        self.sendLine('215 information follows')
+        for i in parts:
+            self.sendLine(i)
+        self.sendLine('.')
+
+
+    def _errSubscription(self, failure):
+        print 'SUBSCRIPTIONS failed: ', failure
+        self.sendLine('503 program fault - comand not performed')
+
+
+    def _gotOverview(self, parts):
+        self.sendLine('215 Order of fields in overview database.')
+        for i in parts:
+            self.sendLine(i + ':')
+        self.sendLine('.')
+
+
+    def _errOverview(self, failure):
+        print 'LIST OVERVIEW.FMT failed: ', failure
+        self.sendLine('503 program fault - command not performed')
+
+
+    def do_LISTGROUP(self, group = None):
+        group = group or self.currentGroup
+        if group is None:
+            self.sendLine('412 Not currently in newsgroup')
+        else:
+            defer = self.factory.backend.listGroupRequest(group)
+            defer.addCallbacks(self._gotListGroup, self._errListGroup)
+
+
+    def _gotListGroup(self, (group, articles)):
+        self.currentGroup = group
+        if len(articles):
+            self.currentIndex = int(articles[0])
+        else:
+            self.currentIndex = None
+
+        self.sendLine('211 list of article numbers follow')
+        for i in articles:
+            self.sendLine(str(i))
+        self.sendLine('.')
+
+
+    def _errListGroup(self, failure):
+        print 'LISTGROUP failed: ', failure
+        self.sendLine('502 no permission')
+
+
+    def do_XOVER(self, range):
+        if self.currentGroup is None:
+            self.sendLine('412 No news group currently selected')
+        else:
+            l, h = parseRange(range)
+            defer = self.factory.backend.xoverRequest(self.currentGroup, l, h)
+            defer.addCallbacks(self._gotXOver, self._errXOver)
+
+
+    def _gotXOver(self, parts):
+        self.sendLine('224 Overview information follows')
+        for i in parts:
+            self.sendLine('\t'.join(map(str, i)))
+        self.sendLine('.')
+
+
+    def _errXOver(self, failure):
+        print 'XOVER failed: ', failure
+        self.sendLine('420 No article(s) selected')
+
+
+    def xhdrWork(self, header, range):
+        if self.currentGroup is None:
+            self.sendLine('412 No news group currently selected')
+        else:
+            if range is None:
+                if self.currentIndex is None:
+                    self.sendLine('420 No current article selected')
+                    return
+                else:
+                    l = h = self.currentIndex
+            else:
+                # FIXME: articles may be a message-id
+                l, h = parseRange(range)
+
+            if l is h is None:
+                self.sendLine('430 no such article')
+            else:
+                return self.factory.backend.xhdrRequest(self.currentGroup, l, h, header)
+
+
+    def do_XHDR(self, header, range = None):
+        d = self.xhdrWork(header, range)
+        if d:
+            d.addCallbacks(self._gotXHDR, self._errXHDR)
+
+
+    def _gotXHDR(self, parts):
+        self.sendLine('221 Header follows')
+        for i in parts:
+            self.sendLine('%d %s' % i)
+        self.sendLine('.')
+
+    def _errXHDR(self, failure):
+        print 'XHDR failed: ', failure
+        self.sendLine('502 no permission')
+
+
+    def do_POST(self):
+        self.inputHandler = self._doingPost
+        self.message = ''
+        self.sendLine('340 send article to be posted.  End with <CR-LF>.<CR-LF>')
+
+
+    def _doingPost(self, line):
+        if line == '.':
+            self.inputHandler = None
+            group, article = self.currentGroup, self.message
+            self.message = ''
+
+            defer = self.factory.backend.postRequest(article)
+            defer.addCallbacks(self._gotPost, self._errPost)
+        else:
+            self.message = self.message + line + '\r\n'
+
+
+    def _gotPost(self, parts):
+        self.sendLine('240 article posted ok')
+
+
+    def _errPost(self, failure):
+        print 'POST failed: ', failure
+        self.sendLine('441 posting failed')
+
+
+    def do_CHECK(self, id):
+        d = self.factory.backend.articleExistsRequest(id)
+        d.addCallbacks(self._gotCheck, self._errCheck)
+
+
+    def _gotCheck(self, result):
+        if result:
+            self.sendLine("438 already have it, please don't send it to me")
+        else:
+            self.sendLine('238 no such article found, please send it to me')
+
+
+    def _errCheck(self, failure):
+        print 'CHECK failed: ', failure
+        self.sendLine('431 try sending it again later')
+
+
+    def do_TAKETHIS(self, id):
+        self.inputHandler = self._doingTakeThis
+        self.message = ''
+
+
+    def _doingTakeThis(self, line):
+        if line == '.':
+            self.inputHandler = None
+            article = self.message
+            self.message = ''
+            d = self.factory.backend.postRequest(article)
+            d.addCallbacks(self._didTakeThis, self._errTakeThis)
+        else:
+            self.message = self.message + line + '\r\n'
+
+
+    def _didTakeThis(self, result):
+        self.sendLine('239 article transferred ok')
+
+
+    def _errTakeThis(self, failure):
+        print 'TAKETHIS failed: ', failure
+        self.sendLine('439 article transfer failed')
+
+
+    def do_GROUP(self, group):
+        defer = self.factory.backend.groupRequest(group)
+        defer.addCallbacks(self._gotGroup, self._errGroup)
+
+
+    def _gotGroup(self, (name, num, high, low, flags)):
+        self.currentGroup = name
+        self.currentIndex = low
+        self.sendLine('211 %d %d %d %s group selected' % (num, low, high, name))
+
+
+    def _errGroup(self, failure):
+        print 'GROUP failed: ', failure
+        self.sendLine('411 no such group')
+
+
+    def articleWork(self, article, cmd, func):
+        if self.currentGroup is None:
+            self.sendLine('412 no newsgroup has been selected')
+        else:
+            if not article:
+                if self.currentIndex is None:
+                    self.sendLine('420 no current article has been selected')
+                else:
+                    article = self.currentIndex
+            else:
+                if article[0] == '<':
+                    return func(self.currentGroup, index = None, id = article)
+                else:
+                    try:
+                        article = int(article)
+                        return func(self.currentGroup, article)
+                    except ValueError:
+                        self.sendLine('501 command syntax error')
+
+
+    def do_ARTICLE(self, article = None):
+        defer = self.articleWork(article, 'ARTICLE', self.factory.backend.articleRequest)
+        if defer:
+            defer.addCallbacks(self._gotArticle, self._errArticle)
+
+
+    def _gotArticle(self, (index, id, article)):
+        self.currentIndex = index
+        self.sendLine('220 %d %s article' % (index, id))
+        s = basic.FileSender()
+        d = s.beginFileTransfer(article, self.transport)
+        d.addCallback(self.finishedFileTransfer)
+
+    ##
+    ## Helper for FileSender
+    ##
+    def finishedFileTransfer(self, lastsent):
+        if lastsent != '\n':
+            line = '\r\n.'
+        else:
+            line = '.'
+        self.sendLine(line)
+    ##
+
+    def _errArticle(self, failure):
+        print 'ARTICLE failed: ', failure
+        self.sendLine('423 bad article number')
+
+
+    def do_STAT(self, article = None):
+        defer = self.articleWork(article, 'STAT', self.factory.backend.articleRequest)
+        if defer:
+            defer.addCallbacks(self._gotStat, self._errStat)
+
+
+    def _gotStat(self, (index, id, article)):
+        self.currentIndex = index
+        self.sendLine('223 %d %s article retreived - request text separately' % (index, id))
+
+
+    def _errStat(self, failure):
+        print 'STAT failed: ', failure
+        self.sendLine('423 bad article number')
+
+
+    def do_HEAD(self, article = None):
+        defer = self.articleWork(article, 'HEAD', self.factory.backend.headRequest)
+        if defer:
+            defer.addCallbacks(self._gotHead, self._errHead)
+
+
+    def _gotHead(self, (index, id, head)):
+        self.currentIndex = index
+        self.sendLine('221 %d %s article retrieved' % (index, id))
+        self.transport.write(head + '\r\n')
+        self.sendLine('.')
+
+
+    def _errHead(self, failure):
+        print 'HEAD failed: ', failure
+        self.sendLine('423 no such article number in this group')
+
+
+    def do_BODY(self, article):
+        defer = self.articleWork(article, 'BODY', self.factory.backend.bodyRequest)
+        if defer:
+            defer.addCallbacks(self._gotBody, self._errBody)
+
+
+    def _gotBody(self, (index, id, body)):
+        self.currentIndex = index
+        self.sendLine('221 %d %s article retrieved' % (index, id))
+        self.lastsent = ''
+        s = basic.FileSender()
+        d = s.beginFileTransfer(body, self.transport)
+        d.addCallback(self.finishedFileTransfer)
+
+    def _errBody(self, failure):
+        print 'BODY failed: ', failure
+        self.sendLine('423 no such article number in this group')
+
+
+    # NEXT and LAST are just STATs that increment currentIndex first.
+    # Accordingly, use the STAT callbacks.
+    def do_NEXT(self):
+        i = self.currentIndex + 1
+        defer = self.factory.backend.articleRequest(self.currentGroup, i)
+        defer.addCallbacks(self._gotStat, self._errStat)
+
+
+    def do_LAST(self):
+        i = self.currentIndex - 1
+        defer = self.factory.backend.articleRequest(self.currentGroup, i)
+        defer.addCallbacks(self._gotStat, self._errStat)
+
+
+    def do_MODE(self, cmd):
+        cmd = cmd.strip().upper()
+        if cmd == 'READER':
+            self.servingSlave = 0
+            self.sendLine('200 Hello, you can post')
+        elif cmd == 'STREAM':
+            self.sendLine('500 Command not understood')
+        else:
+            # This is not a mistake
+            self.sendLine('500 Command not understood')
+
+
+    def do_QUIT(self):
+        self.sendLine('205 goodbye')
+        self.transport.loseConnection()
+
+
+    def do_HELP(self):
+        self.sendLine('100 help text follows')
+        self.sendLine('Read the RFC.')
+        self.sendLine('.')
+
+
+    def do_SLAVE(self):
+        self.sendLine('202 slave status noted')
+        self.servingeSlave = 1
+
+
+    def do_XPATH(self, article):
+        # XPATH is a silly thing to have.  No client has the right to ask
+        # for this piece of information from me, and so that is what I'll
+        # tell them.
+        self.sendLine('502 access restriction or permission denied')
+
+
+    def do_XINDEX(self, article):
+        # XINDEX is another silly command.  The RFC suggests it be relegated
+        # to the history books, and who am I to disagree?
+        self.sendLine('502 access restriction or permission denied')
+
+
+    def do_XROVER(self, range=None):
+        """
+        Handle a request for references of all messages in the currently
+        selected group.
+
+        This generates the same response a I{XHDR References} request would
+        generate.
+        """
+        self.do_XHDR('References', range)
+
+
+    def do_IHAVE(self, id):
+        self.factory.backend.articleExistsRequest(id).addCallback(self._foundArticle)
+
+
+    def _foundArticle(self, result):
+        if result:
+            self.sendLine('437 article rejected - do not try again')
+        else:
+            self.sendLine('335 send article to be transferred.  End with <CR-LF>.<CR-LF>')
+            self.inputHandler = self._handleIHAVE
+            self.message = ''
+
+
+    def _handleIHAVE(self, line):
+        if line == '.':
+            self.inputHandler = None
+            self.factory.backend.postRequest(
+                self.message
+            ).addCallbacks(self._gotIHAVE, self._errIHAVE)
+
+            self.message = ''
+        else:
+            self.message = self.message + line + '\r\n'
+
+
+    def _gotIHAVE(self, result):
+        self.sendLine('235 article transferred ok')
+
+
+    def _errIHAVE(self, failure):
+        print 'IHAVE failed: ', failure
+        self.sendLine('436 transfer failed - try again later')
+
+
+class UsenetClientProtocol(NNTPClient):
+    """
+    A client that connects to an NNTP server and asks for articles new
+    since a certain time.
+    """
+
+    def __init__(self, groups, date, storage):
+        """
+        Fetch all new articles from the given groups since the
+        given date and dump them into the given storage.  groups
+        is a list of group names.  date is an integer or floating
+        point representing seconds since the epoch (GMT).  storage is
+        any object that implements the NewsStorage interface.
+        """
+        NNTPClient.__init__(self)
+        self.groups, self.date, self.storage = groups, date, storage
+
+
+    def connectionMade(self):
+        NNTPClient.connectionMade(self)
+        log.msg("Initiating update with remote host: " + str(self.transport.getPeer()))
+        self.setStream()
+        self.fetchNewNews(self.groups, self.date, '')
+
+
+    def articleExists(self, exists, article):
+        if exists:
+            self.fetchArticle(article)
+        else:
+            self.count = self.count - 1
+            self.disregard = self.disregard + 1
+
+
+    def gotNewNews(self, news):
+        self.disregard = 0
+        self.count = len(news)
+        log.msg("Transfering " + str(self.count) + " articles from remote host: " + str(self.transport.getPeer()))
+        for i in news:
+            self.storage.articleExistsRequest(i).addCallback(self.articleExists, i)
+
+
+    def getNewNewsFailed(self, reason):
+        log.msg("Updated failed (" + reason + ") with remote host: " + str(self.transport.getPeer()))
+        self.quit()
+
+
+    def gotArticle(self, article):
+        self.storage.postRequest(article)
+        self.count = self.count - 1
+        if not self.count:
+            log.msg("Completed update with remote host: " + str(self.transport.getPeer()))
+            if self.disregard:
+                log.msg("Disregarded %d articles." % (self.disregard,))
+            self.factory.updateChecks(self.transport.getPeer())
+            self.quit()
diff --git a/ThirdParty/Twisted/twisted/news/tap.py b/ThirdParty/Twisted/twisted/news/tap.py
new file mode 100644
index 0000000..a4cf542
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/news/tap.py
@@ -0,0 +1,138 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+from twisted.news import news, database
+from twisted.application import strports
+from twisted.python import usage, log
+
+class DBOptions(usage.Options):
+    optParameters = [
+        ['module',   None, 'pyPgSQL.PgSQL', "DB-API 2.0 module to use"],
+        ['dbhost',   None, 'localhost',     "Host where database manager is listening"],
+        ['dbuser',   None, 'news',          "Username with which to connect to database"],
+        ['database', None, 'news',          "Database name to use"],
+        ['schema',   None, 'schema.sql',    "File to which to write SQL schema initialisation"],
+
+        # XXX - Hrm.
+        ["groups",     "g", "groups.list",   "File containing group list"],
+        ["servers",    "s", "servers.list",  "File containing server list"]
+    ]
+    
+    def postOptions(self):
+        # XXX - Hmmm.
+        self['groups'] = [g.strip() for g in open(self['groups']).readlines() if not g.startswith('#')]
+        self['servers'] = [s.strip() for s in open(self['servers']).readlines() if not s.startswith('#')]
+
+        try:
+            __import__(self['module'])
+        except ImportError:
+            log.msg("Warning: Cannot import %s" % (self['module'],))
+        
+        f = open(self['schema'], 'w')
+        f.write(
+            database.NewsStorageAugmentation.schema + '\n' +
+            database.makeGroupSQL(self['groups']) + '\n' +
+            database.makeOverviewSQL()
+        )
+        f.close()
+        
+        info = {
+            'host': self['dbhost'], 'user': self['dbuser'],
+            'database': self['database'], 'dbapiName': self['module']
+        }
+        self.db = database.NewsStorageAugmentation(info)
+
+
+class PickleOptions(usage.Options):
+    optParameters = [
+        ['file', None, 'news.pickle', "File to which to save pickle"],
+
+        # XXX - Hrm.
+        ["groups",     "g", "groups.list",   "File containing group list"],
+        ["servers",    "s", "servers.list",  "File containing server list"],
+        ["moderators", "m", "moderators.list",
+         "File containing moderators list"],
+    ]
+    
+    subCommands = None
+
+    def postOptions(self):
+        # XXX - Hmmm.
+        filename = self['file']
+        self['groups'] = [g.strip() for g in open(self['groups']).readlines()
+                          if not g.startswith('#')]
+        self['servers'] = [s.strip() for s in open(self['servers']).readlines()
+                           if not s.startswith('#')]
+        self['moderators'] = [s.split()
+                              for s in open(self['moderators']).readlines()
+                              if not s.startswith('#')]
+        self.db = database.PickleStorage(filename, self['groups'],
+                                         self['moderators'])
+
+
+class Options(usage.Options):
+    synopsis = "[options]"
+    
+    groups = None
+    servers = None
+    subscriptions = None
+
+    optParameters = [
+        ["port",       "p", "119",           "Listen port"],
+        ["interface",  "i", "",              "Interface to which to bind"],
+        ["datadir",    "d", "news.db",       "Root data storage path"],
+        ["mailhost",   "m", "localhost",     "Host of SMTP server to use"]
+    ]
+    compData = usage.Completions(
+                   optActions={"datadir" : usage.CompleteDirs(),
+                               "mailhost" : usage.CompleteHostnames(),
+                               "interface" : usage.CompleteNetInterfaces()}
+                   )
+
+    def __init__(self):
+        usage.Options.__init__(self)
+        self.groups = []
+        self.servers = []
+        self.subscriptions = []
+
+
+    def opt_group(self, group):
+        """The name of a newsgroup to carry."""
+        self.groups.append([group, None])
+
+
+    def opt_moderator(self, moderator):
+        """The email of the moderator for the most recently passed group."""
+        self.groups[-1][1] = moderator
+
+
+    def opt_subscription(self, group):
+        """A newsgroup to list as a recommended subscription."""
+        self.subscriptions.append(group)
+
+
+    def opt_server(self, server):
+        """The address of a Usenet server to pass messages to and receive messages from."""
+        self.servers.append(server)
+
+
+def makeService(config):
+    if not len(config.groups):
+        raise usage.UsageError("No newsgroups specified")
+    
+    db = database.NewsShelf(config['mailhost'], config['datadir'])
+    for (g, m) in config.groups:
+        if m:
+            db.addGroup(g, 'm')
+            db.addModerator(g, m)
+        else:
+            db.addGroup(g, 'y')
+    for s in config.subscriptions:
+        print s
+        db.addSubscription(s)
+    s = config['port']
+    if config['interface']:
+        # Add a warning here
+        s += ':interface='+config['interface']
+    return strports.service(s, news.UsenetServerFactory(db, config.servers))
diff --git a/ThirdParty/Twisted/twisted/news/test/__init__.py b/ThirdParty/Twisted/twisted/news/test/__init__.py
new file mode 100644
index 0000000..677518d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/news/test/__init__.py
@@ -0,0 +1 @@
+"""News Tests"""
diff --git a/ThirdParty/Twisted/twisted/news/test/test_database.py b/ThirdParty/Twisted/twisted/news/test/test_database.py
new file mode 100644
index 0000000..42900a2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/news/test/test_database.py
@@ -0,0 +1,224 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.news.database}.
+"""
+
+__metaclass__ = type
+
+from email.Parser import Parser
+from socket import gethostname
+
+from twisted.trial.unittest import TestCase
+from twisted.internet.defer import succeed
+from twisted.mail.smtp import messageid
+from twisted.news.database import Article, PickleStorage, NewsShelf
+
+
+
+class ModerationTestsMixin:
+    """
+    Tests for the moderation features of L{INewsStorage} implementations.
+    """
+    def setUp(self):
+        self._email = []
+
+
+    def sendmail(self, smtphost, from_addr, to_addrs, msg,
+                 senderDomainName=None, port=25):
+        """
+        Fake of L{twisted.mail.smtp.sendmail} which records attempts to send
+        email and immediately pretends success.
+
+        Subclasses should arrange for their storage implementation to call this
+        instead of the real C{sendmail} function.
+        """
+        self._email.append((
+                smtphost, from_addr, to_addrs, msg, senderDomainName, port))
+        return succeed(None)
+
+
+    _messageTemplate = """\
+From: some dude
+To: another person
+Subject: activities etc
+Message-ID: %(articleID)s
+Newsgroups: %(newsgroup)s
+%(approved)s
+Body of the message is such.
+""".replace('\n', '\r\n')
+
+
+    def getApprovedMessage(self, articleID, group):
+        """
+        Return a C{str} containing an RFC 2822 formatted message including an
+        I{Approved} header indicating it has passed through moderation.
+        """
+        return self._messageTemplate % {
+            'articleID': articleID,
+            'newsgroup': group,
+            'approved': 'Approved: yup\r\n'}
+
+
+    def getUnapprovedMessage(self, articleID, group):
+        """
+        Return a C{str} containing an RFC 2822 formatted message with no
+        I{Approved} header indicating it may require moderation.
+        """
+        return self._messageTemplate % {
+            'articleID': articleID,
+            'newsgroup': group,
+            'approved': '\r\n'}
+
+
+    def getStorage(self, groups, moderators, mailhost, sender):
+        """
+        Override in a subclass to return a L{INewsStorage} provider to test for
+        correct moderation behavior.
+
+        @param groups: A C{list} of C{str} naming the groups which should exist
+            in the resulting storage object.
+
+        @param moderators: A C{dict} mapping C{str} each group name to a C{list}
+            of C{str} giving moderator email (RFC 2821) addresses.
+        """
+        raise NotImplementedError()
+
+
+    def test_postApproved(self):
+        """
+        L{INewsStorage.postRequest} posts the message if it includes an
+        I{Approved} header.
+        """
+        group = "example.group"
+        moderator = "alice at example.com"
+        mailhost = "127.0.0.1"
+        sender = "bob at example.org"
+        articleID = messageid()
+        storage = self.getStorage(
+            [group], {group: [moderator]}, mailhost, sender)
+        message = self.getApprovedMessage(articleID, group)
+        result = storage.postRequest(message)
+
+        def cbPosted(ignored):
+            self.assertEqual(self._email, [])
+            exists = storage.articleExistsRequest(articleID)
+            exists.addCallback(self.assertTrue)
+            return exists
+        result.addCallback(cbPosted)
+        return result
+
+
+    def test_postModerated(self):
+        """
+        L{INewsStorage.postRequest} forwards a message to the moderator if it
+        does not include an I{Approved} header.
+        """
+        group = "example.group"
+        moderator = "alice at example.com"
+        mailhost = "127.0.0.1"
+        sender = "bob at example.org"
+        articleID = messageid()
+        storage = self.getStorage(
+            [group], {group: [moderator]}, mailhost, sender)
+        message = self.getUnapprovedMessage(articleID, group)
+        result = storage.postRequest(message)
+
+        def cbModerated(ignored):
+            self.assertEqual(len(self._email), 1)
+            self.assertEqual(self._email[0][0], mailhost)
+            self.assertEqual(self._email[0][1], sender)
+            self.assertEqual(self._email[0][2], [moderator])
+            self._checkModeratorMessage(
+                self._email[0][3], sender, moderator, group, message)
+            self.assertEqual(self._email[0][4], None)
+            self.assertEqual(self._email[0][5], 25)
+            exists = storage.articleExistsRequest(articleID)
+            exists.addCallback(self.assertFalse)
+            return exists
+        result.addCallback(cbModerated)
+        return result
+
+
+    def _checkModeratorMessage(self, messageText, sender, moderator, group, postingText):
+        p = Parser()
+        msg = p.parsestr(messageText)
+        headers = dict(msg.items())
+        del headers['Message-ID']
+        self.assertEqual(
+            headers,
+            {'From': sender,
+             'To': moderator,
+             'Subject': 'Moderate new %s message: activities etc' % (group,),
+             'Content-Type': 'message/rfc822'})
+
+        posting = p.parsestr(postingText)
+        attachment = msg.get_payload()[0]
+
+        for header in ['from', 'to', 'subject', 'message-id', 'newsgroups']:
+            self.assertEqual(posting[header], attachment[header])
+
+        self.assertEqual(posting.get_payload(), attachment.get_payload())
+
+
+
+class PickleStorageTests(ModerationTestsMixin, TestCase):
+    """
+    Tests for L{PickleStorage}.
+    """
+    def getStorage(self, groups, moderators, mailhost, sender):
+        """
+        Create and return a L{PickleStorage} instance configured to require
+        moderation.
+        """
+        storageFilename = self.mktemp()
+        storage = PickleStorage(
+            storageFilename, groups, moderators, mailhost, sender)
+        storage.sendmail = self.sendmail
+        self.addCleanup(PickleStorage.sharedDBs.pop, storageFilename)
+        return storage
+
+
+
+class NewsShelfTests(ModerationTestsMixin, TestCase):
+    """
+    Tests for L{NewsShelf}.
+    """
+    def getStorage(self, groups, moderators, mailhost, sender):
+        """
+        Create and return a L{NewsShelf} instance configured to require
+        moderation.
+        """
+        storageFilename = self.mktemp()
+        shelf = NewsShelf(mailhost, storageFilename, sender)
+        for name in groups:
+            shelf.addGroup(name, 'm') # Dial 'm' for moderator
+            for address in moderators.get(name, []):
+                shelf.addModerator(name, address)
+        shelf.sendmail = self.sendmail
+        return shelf
+
+
+    def test_notifyModerator(self):
+        """
+        L{NewsShelf.notifyModerator} sends a moderation email to a single
+        moderator.
+        """
+        shelf = NewsShelf('example.com', self.mktemp(), 'alice at example.com')
+        shelf.sendmail = self.sendmail
+        shelf.notifyModerator('bob at example.org', Article('Foo: bar', 'Some text'))
+        self.assertEqual(len(self._email), 1)
+
+
+    def test_defaultSender(self):
+        """
+        If no sender is specified to L{NewsShelf.notifyModerators}, a default
+        address based on the system hostname is used for both the envelope and
+        RFC 2822 sender addresses.
+        """
+        shelf = NewsShelf('example.com', self.mktemp())
+        shelf.sendmail = self.sendmail
+        shelf.notifyModerators(['bob at example.org'], Article('Foo: bar', 'Some text'))
+        self.assertEqual(self._email[0][1], 'twisted-news@' + gethostname())
+        self.assertIn('From: twisted-news@' + gethostname(), self._email[0][3])
diff --git a/ThirdParty/Twisted/twisted/news/test/test_news.py b/ThirdParty/Twisted/twisted/news/test/test_news.py
new file mode 100644
index 0000000..35ac7d7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/news/test/test_news.py
@@ -0,0 +1,107 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import sys, types
+from pprint import pformat
+
+from twisted.trial import unittest
+from twisted.news import database
+from twisted.internet import reactor
+
+MESSAGE_ID = "f83ba57450ed0fd8ac9a472b847e830e"
+
+POST_STRING = """Path: not-for-mail
+From: <exarkun at somehost.domain.com>
+Subject: a test
+Newsgroups: alt.test.nntp
+Organization: 
+Summary: 
+Keywords: 
+Message-Id: %s
+User-Agent: tin/1.4.5-20010409 ("One More Nightmare") (UNIX) (Linux/2.4.17 (i686))
+
+this is a test
+...
+lala
+moo
+-- 
+"One World, one Web, one Program." - Microsoft(R) promotional ad
+"Ein Volk, ein Reich, ein Fuhrer." - Adolf Hitler
+--
+ 10:56pm up 4 days, 4:42, 1 user, load average: 0.08, 0.08, 0.12
+""" % (MESSAGE_ID)
+
+class NewsTestCase(unittest.TestCase):
+    def setUp(self):
+        self.backend = database.NewsShelf(None, 'news2.db')
+        self.backend.addGroup('alt.test.nntp', 'y')
+        self.backend.postRequest(POST_STRING.replace('\n', '\r\n'))
+
+
+    def testArticleExists(self):
+        d = self.backend.articleExistsRequest(MESSAGE_ID)
+        d.addCallback(self.failUnless)
+        return d
+
+
+    def testArticleRequest(self):
+        d = self.backend.articleRequest(None, None, MESSAGE_ID)
+
+        def cbArticle(result):
+            self.failUnless(isinstance(result, tuple),
+                            'callback result is wrong type: ' + str(result))
+            self.assertEqual(len(result), 3,
+                              'callback result list should have three entries: ' +
+                              str(result))
+            self.assertEqual(result[1], MESSAGE_ID,
+                              "callback result Message-Id doesn't match: %s vs %s" %
+                              (MESSAGE_ID, result[1]))
+            body = result[2].read()
+            self.failIfEqual(body.find('\r\n\r\n'), -1,
+                             "Can't find \\r\\n\\r\\n between header and body")
+            return result
+
+        d.addCallback(cbArticle)
+        return d
+
+
+    def testHeadRequest(self):
+        d = self.testArticleRequest()
+
+        def cbArticle(result):
+            index = result[0]
+
+            d = self.backend.headRequest("alt.test.nntp", index)
+            d.addCallback(cbHead)
+            return d
+
+        def cbHead(result):
+            self.assertEqual(result[1], MESSAGE_ID,
+                              "callback result Message-Id doesn't match: %s vs %s" %
+                              (MESSAGE_ID, result[1]))
+
+            self.assertEqual(result[2][-2:], '\r\n',
+                              "headers must be \\r\\n terminated.")
+
+        d.addCallback(cbArticle)
+        return d
+
+
+    def testBodyRequest(self):
+        d = self.testArticleRequest()
+
+        def cbArticle(result):
+            index = result[0]
+
+            d = self.backend.bodyRequest("alt.test.nntp", index)
+            d.addCallback(cbBody)
+            return d
+
+        def cbBody(result):
+            body = result[2].read()
+            self.assertEqual(body[0:4], 'this',
+                              "message body has been altered: " +
+                              pformat(body[0:4]))
+
+        d.addCallback(cbArticle)
+        return d
diff --git a/ThirdParty/Twisted/twisted/news/test/test_nntp.py b/ThirdParty/Twisted/twisted/news/test/test_nntp.py
new file mode 100644
index 0000000..987546b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/news/test/test_nntp.py
@@ -0,0 +1,197 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.trial import unittest
+from twisted.news import database
+from twisted.news import nntp
+from twisted.protocols import loopback
+from twisted.test import proto_helpers
+
+ALL_GROUPS = ('alt.test.nntp', 0, 1, 'y'),
+GROUP = ('0', '1', '0', 'alt.test.nntp', 'group', 'selected')
+SUBSCRIPTIONS = ['alt.test.nntp', 'news.testgroup']
+
+POST_STRING = """Path: not-for-mail
+From: <exarkun at somehost.domain.com>
+Subject: a test
+Newsgroups: alt.test.nntp
+Organization: 
+Summary: 
+Keywords: 
+User-Agent: tin/1.4.5-20010409 ("One More Nightmare") (UNIX) (Linux/2.4.17 (i686))
+
+this is a test
+.
+..
+...
+lala
+moo
+-- 
+"One World, one Web, one Program." - Microsoft(R) promotional ad
+"Ein Volk, ein Reich, ein Fuhrer." - Adolf Hitler
+--
+ 10:56pm up 4 days, 4:42, 1 user, load average: 0.08, 0.08, 0.12
+"""
+
+class TestNNTPClient(nntp.NNTPClient):
+    def __init__(self):
+        nntp.NNTPClient.__init__(self)
+
+    def assertEqual(self, foo, bar):
+        if foo != bar: raise AssertionError("%r != %r!" % (foo, bar))
+
+    def connectionMade(self):
+        nntp.NNTPClient.connectionMade(self)
+        self.fetchSubscriptions()
+
+
+    def gotSubscriptions(self, subscriptions):
+        self.assertEqual(len(subscriptions), len(SUBSCRIPTIONS))
+        for s in subscriptions:
+            assert s in SUBSCRIPTIONS
+
+        self.fetchGroups()
+
+    def gotAllGroups(self, info):
+        self.assertEqual(len(info), len(ALL_GROUPS))
+        self.assertEqual(info[0], ALL_GROUPS[0])
+
+        self.fetchGroup('alt.test.nntp')
+
+
+    def getAllGroupsFailed(self, error):
+        raise AssertionError("fetchGroups() failed: %s" % (error,))
+
+
+    def gotGroup(self, info):
+        self.assertEqual(len(info), 6)
+        self.assertEqual(info, GROUP)
+
+        self.postArticle(POST_STRING)
+
+
+    def getSubscriptionsFailed(self, error):
+        raise AssertionError("fetchSubscriptions() failed: %s" % (error,))
+
+
+    def getGroupFailed(self, error):
+        raise AssertionError("fetchGroup() failed: %s" % (error,))
+
+
+    def postFailed(self, error):
+        raise AssertionError("postArticle() failed: %s" % (error,))
+
+
+    def postedOk(self):
+        self.fetchArticle(1)
+
+
+    def gotArticle(self, info):
+        origBody = POST_STRING.split('\n\n')[1]
+        newBody = info.split('\n\n', 1)[1]
+
+        self.assertEqual(origBody, newBody)
+
+        # We're done
+        self.transport.loseConnection()
+
+
+    def getArticleFailed(self, error):
+        raise AssertionError("fetchArticle() failed: %s" % (error,))
+
+
+class NNTPTestCase(unittest.TestCase):
+    def setUp(self):
+        self.server = nntp.NNTPServer()
+        self.server.factory = self
+        self.backend = database.NewsShelf(None, 'news.db')
+        self.backend.addGroup('alt.test.nntp', 'y')
+
+        for s in SUBSCRIPTIONS:
+            self.backend.addSubscription(s)
+
+        self.transport = proto_helpers.StringTransport()
+        self.server.makeConnection(self.transport)
+        self.client = TestNNTPClient()
+
+    def testLoopback(self):
+        return loopback.loopbackAsync(self.server, self.client)
+
+        # XXX This test is woefully incomplete.  It tests the single
+        # most common code path and nothing else.  Expand it and the
+        # test fairy will leave you a surprise.
+
+        #         reactor.iterate(1) # fetchGroups()
+        #         reactor.iterate(1) # fetchGroup()
+        #         reactor.iterate(1) # postArticle()
+
+
+    def test_connectionMade(self):
+        """
+        When L{NNTPServer} is connected, it sends a server greeting to the
+        client.
+        """
+        self.assertEqual(
+            self.transport.value().split('\r\n'), [
+                '200 server ready - posting allowed',
+                ''])
+
+
+    def test_LIST(self):
+        """
+        When L{NTTPServer} receives a I{LIST} command, it sends a list of news
+        groups to the client (RFC 3977, section 7.6.1.1).
+        """
+        self.transport.clear()
+        self.server.do_LIST()
+        self.assertEqual(
+            self.transport.value().split('\r\n'), [
+                '215 newsgroups in form "group high low flags"',
+                'alt.test.nntp 0 1 y',
+                '.',
+                ''])
+
+
+    def test_GROUP(self):
+        """
+        When L{NNTPServer} receives a I{GROUP} command, it sends a line of
+        information about that group to the client (RFC 3977, section 6.1.1.1).
+        """
+        self.transport.clear()
+        self.server.do_GROUP('alt.test.nntp')
+        self.assertEqual(
+            self.transport.value().split('\r\n'), [
+                '211 0 1 0 alt.test.nntp group selected',
+                ''])
+
+
+    def test_LISTGROUP(self):
+        """
+        When L{NNTPServer} receives a I{LISTGROUP} command, it sends a list of
+        message numbers for the messages in a particular group (RFC 3977,
+        section 6.1.2.1).
+        """
+        self.transport.clear()
+        self.server.do_LISTGROUP('alt.test.nntp')
+        self.assertEqual(
+            self.transport.value().split('\r\n'), [
+                '211 list of article numbers follow',
+                '.',
+                ''])
+
+
+    def test_XROVER(self):
+        """
+        When L{NTTPServer} receives a I{XROVER} command, it sends a list of
+        I{References} header values for the messages in a particular group (RFC
+        2980, section 2.11).
+        """
+        self.server.do_GROUP('alt.test.nntp')
+        self.transport.clear()
+
+        self.server.do_XROVER()
+        self.assertEqual(
+            self.transport.value().split('\r\n'), [
+                '221 Header follows',
+                '.',
+                ''])
diff --git a/ThirdParty/Twisted/twisted/news/topfiles/NEWS b/ThirdParty/Twisted/twisted/news/topfiles/NEWS
new file mode 100644
index 0000000..8e56024
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/news/topfiles/NEWS
@@ -0,0 +1,118 @@
+Ticket numbers in this file can be looked up by visiting
+http://twistedmatrix.com/trac/ticket/<number>
+
+Twisted News 12.3.0 (2012-12-20)
+================================
+
+No significant changes have been made for this release.
+
+
+Twisted News 12.2.0 (2012-08-26)
+================================
+
+No significant changes have been made for this release.
+
+
+Twisted News 12.1.0 (2012-06-02)
+================================
+
+Bugfixes
+--------
+ - twisted.news.nntp.NNTPServer now has additional test coverage and
+   less redundant implementation code. (#5537)
+
+Deprecations and Removals
+-------------------------
+ - The ability to pass a string article to NNTPServer._gotBody and
+   NNTPServer._gotArticle in t.news.nntp has been deprecated for years
+   and is now removed. (#4548)
+
+
+Twisted News 12.0.0 (2012-02-10)
+================================
+
+No significant changes have been made for this release.
+
+
+Twisted News 11.1.0 (2011-11-15)
+================================
+
+No significant changes have been made for this release.
+
+
+Twisted News 11.0.0 (2011-04-01)
+================================
+
+No significant changes have been made for this release.
+
+Other
+-----
+ - #4580
+
+
+Twisted News 10.2.0 (2010-11-29)
+================================
+
+Bugfixes
+--------
+ - twisted.news.database.PickleStorage now invokes the email APIs
+   correctly, allowing it to actually send moderation emails. (#4528)
+
+
+Twisted News 10.1.0 (2010-06-27)
+================================
+
+No significant changes have been made for this release.
+
+
+Twisted News 10.0.0 (2010-03-01)
+================================
+
+No interesting changes since Twisted 9.0.
+
+
+Twisted News 9.0.0 (2009-11-24)
+===============================
+
+Other
+-----
+ - #2763, #3540
+
+
+News 8.2.0 (2008-12-16)
+=======================
+
+No interesting changes since Twisted 8.0.
+
+
+8.1.0 (2008-05-18)
+==================
+
+Fixes
+-----
+ - The deprecated mktap API is no longer used (#3127)
+
+
+8.0.0 (2008-03-17)
+==================
+
+Misc
+----
+ - Remove all "API Stability" markers (#2847)
+
+
+0.3.0 (2007-01-06)
+==================
+Fixes
+-----
+ - News was updated to work with the latest twisted.components changes
+   to Twisted (#1636)
+ - The 'ip' attribute is no longer available on NNTP protocols (#1936)
+
+
+0.2.0 (2006-05-24)
+==================
+
+Fixes:
+ - Fixed a critical bug in moderation support.
+
diff --git a/ThirdParty/Twisted/twisted/news/topfiles/README b/ThirdParty/Twisted/twisted/news/topfiles/README
new file mode 100644
index 0000000..02652c2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/news/topfiles/README
@@ -0,0 +1,4 @@
+Twisted News 12.3.0
+
+News depends on Twisted, and, if you want to use the moderation
+features, Twisted Mail.
diff --git a/ThirdParty/Twisted/twisted/news/topfiles/setup.py b/ThirdParty/Twisted/twisted/news/topfiles/setup.py
new file mode 100644
index 0000000..d776f30
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/news/topfiles/setup.py
@@ -0,0 +1,28 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+try:
+    from twisted.python import dist
+except ImportError:
+    raise SystemExit("twisted.python.dist module not found.  Make sure you "
+                     "have installed the Twisted core package before "
+                     "attempting to install any other Twisted projects.")
+
+if __name__ == '__main__':
+    dist.setup(
+        twisted_subproject="news",
+        # metadata
+        name="Twisted News",
+        description="Twisted News is an NNTP server and programming library.",
+        author="Twisted Matrix Laboratories",
+        author_email="twisted-python at twistedmatrix.com",
+        maintainer="Jp Calderone",
+        url="http://twistedmatrix.com/trac/wiki/TwistedNews",
+        license="MIT",
+        long_description="""\
+Twisted News is an NNTP protocol (Usenet) programming library. The
+library contains server and client protocol implementations. A simple
+NNTP server is also provided.
+""",
+    )
+
diff --git a/ThirdParty/Twisted/twisted/pair/__init__.py b/ThirdParty/Twisted/twisted/pair/__init__.py
new file mode 100644
index 0000000..6d3f5aa
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/pair/__init__.py
@@ -0,0 +1,20 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+
+Twisted Pair: The framework of your ethernet.
+
+Low-level networking transports and utilities.
+
+See also twisted.protocols.ethernet, twisted.protocols.ip,
+twisted.protocols.raw and twisted.protocols.rawudp.
+
+Maintainer: Tommi Virtanen
+
+"""
+
+from twisted.pair._version import version
+__version__ = version.short()
diff --git a/ThirdParty/Twisted/twisted/pair/_version.py b/ThirdParty/Twisted/twisted/pair/_version.py
new file mode 100644
index 0000000..3163d8f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/pair/_version.py
@@ -0,0 +1,3 @@
+# This is an auto-generated file. Do not edit it.
+from twisted.python import versions
+version = versions.Version('twisted.pair', 12, 3, 0)
diff --git a/ThirdParty/Twisted/twisted/pair/ethernet.py b/ThirdParty/Twisted/twisted/pair/ethernet.py
new file mode 100644
index 0000000..b432c6f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/pair/ethernet.py
@@ -0,0 +1,56 @@
+# -*- test-case-name: twisted.pair.test.test_ethernet -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+
+
+"""Support for working directly with ethernet frames"""
+
+import struct
+
+
+from twisted.internet import protocol
+from twisted.pair import raw
+from zope.interface import implements, Interface
+
+
+class IEthernetProtocol(Interface):
+    """An interface for protocols that handle Ethernet frames"""
+    def addProto():
+        """Add an IRawPacketProtocol protocol"""
+
+    def datagramReceived():
+        """An Ethernet frame has been received"""
+
+class EthernetHeader:
+    def __init__(self, data):
+
+        (self.dest, self.source, self.proto) \
+                    = struct.unpack("!6s6sH", data[:6+6+2])
+
+class EthernetProtocol(protocol.AbstractDatagramProtocol):
+
+    implements(IEthernetProtocol)
+    
+    def __init__(self):
+        self.etherProtos = {}
+
+    def addProto(self, num, proto):
+        proto = raw.IRawPacketProtocol(proto)
+        if num < 0:
+            raise TypeError, 'Added protocol must be positive or zero'
+        if num >= 2**16:
+            raise TypeError, 'Added protocol must fit in 16 bits'
+        if num not in self.etherProtos:
+            self.etherProtos[num] = []
+        self.etherProtos[num].append(proto)
+
+    def datagramReceived(self, data, partial=0):
+        header = EthernetHeader(data[:14])
+        for proto in self.etherProtos.get(header.proto, ()):
+            proto.datagramReceived(data=data[14:],
+                                   partial=partial,
+                                   dest=header.dest,
+                                   source=header.source,
+                                   protocol=header.proto)
diff --git a/ThirdParty/Twisted/twisted/pair/ip.py b/ThirdParty/Twisted/twisted/pair/ip.py
new file mode 100644
index 0000000..de03bd4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/pair/ip.py
@@ -0,0 +1,72 @@
+# -*- test-case-name: twisted.pair.test.test_ip -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+
+
+"""Support for working directly with IP packets"""
+
+import struct
+import socket
+
+from twisted.internet import protocol
+from twisted.pair import raw
+from zope.interface import implements
+
+
+class IPHeader:
+    def __init__(self, data):
+
+        (ihlversion, self.tos, self.tot_len, self.fragment_id, frag_off,
+         self.ttl, self.protocol, self.check, saddr, daddr) \
+         = struct.unpack("!BBHHHBBH4s4s", data[:20])
+        self.saddr = socket.inet_ntoa(saddr)
+        self.daddr = socket.inet_ntoa(daddr)
+        self.version = ihlversion & 0x0F
+        self.ihl = ((ihlversion & 0xF0) >> 4) << 2
+        self.fragment_offset = frag_off & 0x1FFF
+        self.dont_fragment = (frag_off & 0x4000 != 0)
+        self.more_fragments = (frag_off & 0x2000 != 0)
+
+MAX_SIZE = 2L**32
+
+class IPProtocol(protocol.AbstractDatagramProtocol):
+    implements(raw.IRawPacketProtocol)
+
+    def __init__(self):
+        self.ipProtos = {}
+
+    def addProto(self, num, proto):
+        proto = raw.IRawDatagramProtocol(proto)
+        if num < 0:
+            raise TypeError, 'Added protocol must be positive or zero'
+        if num >= MAX_SIZE:
+            raise TypeError, 'Added protocol must fit in 32 bits'
+        if num not in self.ipProtos:
+            self.ipProtos[num] = []
+        self.ipProtos[num].append(proto)
+
+    def datagramReceived(self,
+                         data,
+                         partial,
+                         dest,
+                         source,
+                         protocol):
+        header = IPHeader(data)
+        for proto in self.ipProtos.get(header.protocol, ()):
+            proto.datagramReceived(data=data[20:],
+                                   partial=partial,
+                                   source=header.saddr,
+                                   dest=header.daddr,
+                                   protocol=header.protocol,
+                                   version=header.version,
+                                   ihl=header.ihl,
+                                   tos=header.tos,
+                                   tot_len=header.tot_len,
+                                   fragment_id=header.fragment_id,
+                                   fragment_offset=header.fragment_offset,
+                                   dont_fragment=header.dont_fragment,
+                                   more_fragments=header.more_fragments,
+                                   ttl=header.ttl,
+                                   )
diff --git a/ThirdParty/Twisted/twisted/pair/raw.py b/ThirdParty/Twisted/twisted/pair/raw.py
new file mode 100644
index 0000000..0d3875b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/pair/raw.py
@@ -0,0 +1,35 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+
+"""Interface definitions for working with raw packets"""
+
+from twisted.internet import protocol
+from zope.interface import Interface
+
+class IRawDatagramProtocol(Interface):
+    """An interface for protocols such as UDP, ICMP and TCP."""
+
+    def addProto():
+        """
+        Add a protocol on top of this one.
+        """
+
+    def datagramReceived():
+        """
+        An IP datagram has been received. Parse and process it.
+        """
+
+class IRawPacketProtocol(Interface):
+    """An interface for low-level protocols such as IP and ARP."""
+
+    def addProto():
+        """
+        Add a protocol on top of this one.
+        """
+
+    def datagramReceived():
+        """
+        An IP datagram has been received. Parse and process it.
+        """
diff --git a/ThirdParty/Twisted/twisted/pair/rawudp.py b/ThirdParty/Twisted/twisted/pair/rawudp.py
new file mode 100644
index 0000000..1425e6b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/pair/rawudp.py
@@ -0,0 +1,55 @@
+# -*- test-case-name: twisted.pair.test.test_rawudp -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+
+"""Implementation of raw packet interfaces for UDP"""
+
+import struct
+
+from twisted.internet import protocol
+from twisted.pair import raw
+from zope.interface import implements
+
+class UDPHeader:
+    def __init__(self, data):
+
+        (self.source, self.dest, self.len, self.check) \
+                 = struct.unpack("!HHHH", data[:8])
+
+class RawUDPProtocol(protocol.AbstractDatagramProtocol):
+    implements(raw.IRawDatagramProtocol)
+    def __init__(self):
+        self.udpProtos = {}
+
+    def addProto(self, num, proto):
+        if not isinstance(proto, protocol.DatagramProtocol):
+            raise TypeError, 'Added protocol must be an instance of DatagramProtocol'
+        if num < 0:
+            raise TypeError, 'Added protocol must be positive or zero'
+        if num >= 2**16:
+            raise TypeError, 'Added protocol must fit in 16 bits'
+        if num not in self.udpProtos:
+            self.udpProtos[num] = []
+        self.udpProtos[num].append(proto)
+
+    def datagramReceived(self,
+                         data,
+                         partial,
+                         source,
+                         dest,
+                         protocol,
+                         version,
+                         ihl,
+                         tos,
+                         tot_len,
+                         fragment_id,
+                         fragment_offset,
+                         dont_fragment,
+                         more_fragments,
+                         ttl):
+        header = UDPHeader(data)
+        for proto in self.udpProtos.get(header.dest, ()):
+            proto.datagramReceived(data[8:],
+                                   (source, header.source))
diff --git a/ThirdParty/Twisted/twisted/pair/test/__init__.py b/ThirdParty/Twisted/twisted/pair/test/__init__.py
new file mode 100644
index 0000000..5aa286e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/pair/test/__init__.py
@@ -0,0 +1 @@
+'pair tests'
diff --git a/ThirdParty/Twisted/twisted/pair/test/test_ethernet.py b/ThirdParty/Twisted/twisted/pair/test/test_ethernet.py
new file mode 100644
index 0000000..2b675fe
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/pair/test/test_ethernet.py
@@ -0,0 +1,226 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+from twisted.trial import unittest
+
+from twisted.internet import protocol, reactor, error
+from twisted.python import failure, components
+from twisted.pair import ethernet, raw
+from zope.interface import implements
+
+class MyProtocol:
+    implements(raw.IRawPacketProtocol)
+    
+    def __init__(self, expecting):
+        self.expecting = list(expecting)
+
+    def datagramReceived(self, data, **kw):
+        assert self.expecting, 'Got a packet when not expecting anymore.'
+        expect = self.expecting.pop(0)
+        assert expect == (data, kw), \
+               "Expected %r, got %r" % (
+            expect, (data, kw),
+            )
+
+class EthernetTestCase(unittest.TestCase):
+    def testPacketParsing(self):
+        proto = ethernet.EthernetProtocol()
+        p1 = MyProtocol([
+
+            ('foobar', {
+            'partial': 0,
+            'dest': "123456",
+            'source': "987654",
+            'protocol': 0x0800,
+            }),
+
+            ])
+        proto.addProto(0x0800, p1)
+
+        proto.datagramReceived("123456987654\x08\x00foobar",
+                               partial=0)
+
+        assert not p1.expecting, \
+               'Should not expect any more packets, but still want %r' % p1.expecting
+
+
+    def testMultiplePackets(self):
+        proto = ethernet.EthernetProtocol()
+        p1 = MyProtocol([
+
+            ('foobar', {
+            'partial': 0,
+            'dest': "123456",
+            'source': "987654",
+            'protocol': 0x0800,
+            }),
+
+            ('quux', {
+            'partial': 1,
+            'dest': "012345",
+            'source': "abcdef",
+            'protocol': 0x0800,
+            }),
+
+            ])
+        proto.addProto(0x0800, p1)
+
+        proto.datagramReceived("123456987654\x08\x00foobar",
+                               partial=0)
+        proto.datagramReceived("012345abcdef\x08\x00quux",
+                               partial=1)
+
+        assert not p1.expecting, \
+               'Should not expect any more packets, but still want %r' % p1.expecting
+
+
+    def testMultipleSameProtos(self):
+        proto = ethernet.EthernetProtocol()
+        p1 = MyProtocol([
+
+            ('foobar', {
+            'partial': 0,
+            'dest': "123456",
+            'source': "987654",
+            'protocol': 0x0800,
+            }),
+
+            ])
+
+        p2 = MyProtocol([
+
+            ('foobar', {
+            'partial': 0,
+            'dest': "123456",
+            'source': "987654",
+            'protocol': 0x0800,
+            }),
+
+            ])
+
+        proto.addProto(0x0800, p1)
+        proto.addProto(0x0800, p2)
+
+        proto.datagramReceived("123456987654\x08\x00foobar",
+                               partial=0)
+
+        assert not p1.expecting, \
+               'Should not expect any more packets, but still want %r' % p1.expecting
+        assert not p2.expecting, \
+               'Should not expect any more packets, but still want %r' % p2.expecting
+
+    def testWrongProtoNotSeen(self):
+        proto = ethernet.EthernetProtocol()
+        p1 = MyProtocol([])
+        proto.addProto(0x0801, p1)
+
+        proto.datagramReceived("123456987654\x08\x00foobar",
+                               partial=0)
+        proto.datagramReceived("012345abcdef\x08\x00quux",
+                               partial=1)
+
+    def testDemuxing(self):
+        proto = ethernet.EthernetProtocol()
+        p1 = MyProtocol([
+
+            ('foobar', {
+            'partial': 0,
+            'dest': "123456",
+            'source': "987654",
+            'protocol': 0x0800,
+            }),
+
+            ('quux', {
+            'partial': 1,
+            'dest': "012345",
+            'source': "abcdef",
+            'protocol': 0x0800,
+            }),
+
+            ])
+        proto.addProto(0x0800, p1)
+
+        p2 = MyProtocol([
+
+            ('quux', {
+            'partial': 1,
+            'dest': "012345",
+            'source': "abcdef",
+            'protocol': 0x0806,
+            }),
+
+            ('foobar', {
+            'partial': 0,
+            'dest': "123456",
+            'source': "987654",
+            'protocol': 0x0806,
+            }),
+
+            ])
+        proto.addProto(0x0806, p2)
+
+        proto.datagramReceived("123456987654\x08\x00foobar",
+                               partial=0)
+        proto.datagramReceived("012345abcdef\x08\x06quux",
+                               partial=1)
+        proto.datagramReceived("123456987654\x08\x06foobar",
+                               partial=0)
+        proto.datagramReceived("012345abcdef\x08\x00quux",
+                               partial=1)
+
+        assert not p1.expecting, \
+               'Should not expect any more packets, but still want %r' % p1.expecting
+        assert not p2.expecting, \
+               'Should not expect any more packets, but still want %r' % p2.expecting
+
+    def testAddingBadProtos_WrongLevel(self):
+        """Adding a wrong level protocol raises an exception."""
+        e = ethernet.EthernetProtocol()
+        try:
+            e.addProto(42, "silliness")
+        except components.CannotAdapt:
+            pass
+        else:
+            raise AssertionError, 'addProto must raise an exception for bad protocols'
+
+
+    def testAddingBadProtos_TooSmall(self):
+        """Adding a protocol with a negative number raises an exception."""
+        e = ethernet.EthernetProtocol()
+        try:
+            e.addProto(-1, MyProtocol([]))
+        except TypeError, e:
+            if e.args == ('Added protocol must be positive or zero',):
+                pass
+            else:
+                raise
+        else:
+            raise AssertionError, 'addProto must raise an exception for bad protocols'
+
+
+    def testAddingBadProtos_TooBig(self):
+        """Adding a protocol with a number >=2**16 raises an exception."""
+        e = ethernet.EthernetProtocol()
+        try:
+            e.addProto(2**16, MyProtocol([]))
+        except TypeError, e:
+            if e.args == ('Added protocol must fit in 16 bits',):
+                pass
+            else:
+                raise
+        else:
+            raise AssertionError, 'addProto must raise an exception for bad protocols'
+
+    def testAddingBadProtos_TooBig2(self):
+        """Adding a protocol with a number >=2**16 raises an exception."""
+        e = ethernet.EthernetProtocol()
+        try:
+            e.addProto(2**16+1, MyProtocol([]))
+        except TypeError, e:
+            if e.args == ('Added protocol must fit in 16 bits',):
+                pass
+            else:
+                raise
+        else:
+            raise AssertionError, 'addProto must raise an exception for bad protocols'
diff --git a/ThirdParty/Twisted/twisted/pair/test/test_ip.py b/ThirdParty/Twisted/twisted/pair/test/test_ip.py
new file mode 100644
index 0000000..ed1623b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/pair/test/test_ip.py
@@ -0,0 +1,417 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+from twisted.trial import unittest
+
+from twisted.internet import protocol, reactor, error
+from twisted.python import failure, components
+from twisted.pair import ip, raw
+from zope import interface
+
+class MyProtocol:
+    interface.implements(raw.IRawDatagramProtocol)
+    
+    def __init__(self, expecting):
+        self.expecting = list(expecting)
+
+    def datagramReceived(self, data, **kw):
+        assert self.expecting, 'Got a packet when not expecting anymore.'
+        expectData, expectKw = self.expecting.pop(0)
+
+        expectKwKeys = expectKw.keys(); expectKwKeys.sort()
+        kwKeys = kw.keys(); kwKeys.sort()
+        assert expectKwKeys == kwKeys, "Expected %r, got %r" % (expectKwKeys, kwKeys)
+
+        for k in expectKwKeys:
+            assert expectKw[k] == kw[k], "Expected %s=%r, got %r" % (k, expectKw[k], kw[k])
+        assert expectKw == kw, "Expected %r, got %r" % (expectKw, kw)
+        assert expectData == data, "Expected %r, got %r" % (expectData, data)
+
+class IPTestCase(unittest.TestCase):
+    def testPacketParsing(self):
+        proto = ip.IPProtocol()
+        p1 = MyProtocol([
+
+            ('foobar', {
+            'partial': 0,
+            'dest': '1.2.3.4',
+            'source': '5.6.7.8',
+            'protocol': 0x0F,
+            'version': 4,
+            'ihl': 20,
+            'tos': 7,
+            'tot_len': 20+6,
+            'fragment_id': 0xDEAD,
+            'fragment_offset': 0x1EEF,
+            'dont_fragment': 0,
+            'more_fragments': 1,
+            'ttl': 0xC0,
+            }),
+
+            ])
+        proto.addProto(0x0F, p1)
+
+        proto.datagramReceived("\x54" #ihl version
+                               + "\x07" #tos
+                               + "\x00\x1a" #tot_len
+                               + "\xDE\xAD" #id
+                               + "\xBE\xEF" #frag_off
+                               + "\xC0" #ttl
+                               + "\x0F" #protocol
+                               + "FE" #checksum
+                               + "\x05\x06\x07\x08" + "\x01\x02\x03\x04" + "foobar",
+                               partial=0,
+                               dest='dummy',
+                               source='dummy',
+                               protocol='dummy',
+                               )
+
+        assert not p1.expecting, \
+               'Should not expect any more packets, but still want %r' % p1.expecting
+
+    def testMultiplePackets(self):
+        proto = ip.IPProtocol()
+        p1 = MyProtocol([
+
+            ('foobar', {
+            'partial': 0,
+            'dest': '1.2.3.4',
+            'source': '5.6.7.8',
+            'protocol': 0x0F,
+            'version': 4,
+            'ihl': 20,
+            'tos': 7,
+            'tot_len': 20+6,
+            'fragment_id': 0xDEAD,
+            'fragment_offset': 0x1EEF,
+            'dont_fragment': 0,
+            'more_fragments': 1,
+            'ttl': 0xC0,
+            }),
+
+            ('quux', {
+            'partial': 1,
+            'dest': '5.4.3.2',
+            'source': '6.7.8.9',
+            'protocol': 0x0F,
+            'version': 4,
+            'ihl': 20,
+            'tos': 7,
+            'tot_len': 20+6,
+            'fragment_id': 0xDEAD,
+            'fragment_offset': 0x1EEF,
+            'dont_fragment': 0,
+            'more_fragments': 1,
+            'ttl': 0xC0,
+            }),
+
+            ])
+        proto.addProto(0x0F, p1)
+        proto.datagramReceived("\x54" #ihl version
+                               + "\x07" #tos
+                               + "\x00\x1a" #tot_len
+                               + "\xDE\xAD" #id
+                               + "\xBE\xEF" #frag_off
+                               + "\xC0" #ttl
+                               + "\x0F" #protocol
+                               + "FE" #checksum
+                               + "\x05\x06\x07\x08" + "\x01\x02\x03\x04" + "foobar",
+                               partial=0,
+                               dest='dummy',
+                               source='dummy',
+                               protocol='dummy',
+                               )
+        proto.datagramReceived("\x54" #ihl version
+                               + "\x07" #tos
+                               + "\x00\x1a" #tot_len
+                               + "\xDE\xAD" #id
+                               + "\xBE\xEF" #frag_off
+                               + "\xC0" #ttl
+                               + "\x0F" #protocol
+                               + "FE" #checksum
+                               + "\x06\x07\x08\x09" + "\x05\x04\x03\x02" + "quux",
+                               partial=1,
+                               dest='dummy',
+                               source='dummy',
+                               protocol='dummy',
+                               )
+
+        assert not p1.expecting, \
+               'Should not expect any more packets, but still want %r' % p1.expecting
+
+
+    def testMultipleSameProtos(self):
+        proto = ip.IPProtocol()
+        p1 = MyProtocol([
+
+            ('foobar', {
+            'partial': 0,
+            'dest': '1.2.3.4',
+            'source': '5.6.7.8',
+            'protocol': 0x0F,
+            'version': 4,
+            'ihl': 20,
+            'tos': 7,
+            'tot_len': 20+6,
+            'fragment_id': 0xDEAD,
+            'fragment_offset': 0x1EEF,
+            'dont_fragment': 0,
+            'more_fragments': 1,
+            'ttl': 0xC0,
+            }),
+
+            ])
+
+        p2 = MyProtocol([
+
+            ('foobar', {
+            'partial': 0,
+            'dest': '1.2.3.4',
+            'source': '5.6.7.8',
+            'protocol': 0x0F,
+            'version': 4,
+            'ihl': 20,
+            'tos': 7,
+            'tot_len': 20+6,
+            'fragment_id': 0xDEAD,
+            'fragment_offset': 0x1EEF,
+            'dont_fragment': 0,
+            'more_fragments': 1,
+            'ttl': 0xC0,
+            }),
+
+            ])
+
+        proto.addProto(0x0F, p1)
+        proto.addProto(0x0F, p2)
+
+        proto.datagramReceived("\x54" #ihl version
+                               + "\x07" #tos
+                               + "\x00\x1a" #tot_len
+                               + "\xDE\xAD" #id
+                               + "\xBE\xEF" #frag_off
+                               + "\xC0" #ttl
+                               + "\x0F" #protocol
+                               + "FE" #checksum
+                               + "\x05\x06\x07\x08" + "\x01\x02\x03\x04" + "foobar",
+                               partial=0,
+                               dest='dummy',
+                               source='dummy',
+                               protocol='dummy',
+                               )
+
+        assert not p1.expecting, \
+               'Should not expect any more packets, but still want %r' % p1.expecting
+        assert not p2.expecting, \
+               'Should not expect any more packets, but still want %r' % p2.expecting
+
+    def testWrongProtoNotSeen(self):
+        proto = ip.IPProtocol()
+        p1 = MyProtocol([])
+        proto.addProto(1, p1)
+
+        proto.datagramReceived("\x54" #ihl version
+                               + "\x07" #tos
+                               + "\x00\x1a" #tot_len
+                               + "\xDE\xAD" #id
+                               + "\xBE\xEF" #frag_off
+                               + "\xC0" #ttl
+                               + "\x0F" #protocol
+                               + "FE" #checksum
+                               + "\x05\x06\x07\x08" + "\x01\x02\x03\x04" + "foobar",
+                               partial=0,
+                               dest='dummy',
+                               source='dummy',
+                               protocol='dummy',
+                               )
+
+    def testDemuxing(self):
+        proto = ip.IPProtocol()
+        p1 = MyProtocol([
+
+            ('foobar', {
+            'partial': 0,
+            'dest': '1.2.3.4',
+            'source': '5.6.7.8',
+            'protocol': 0x0F,
+            'version': 4,
+            'ihl': 20,
+            'tos': 7,
+            'tot_len': 20+6,
+            'fragment_id': 0xDEAD,
+            'fragment_offset': 0x1EEF,
+            'dont_fragment': 0,
+            'more_fragments': 1,
+            'ttl': 0xC0,
+            }),
+
+            ('quux', {
+            'partial': 1,
+            'dest': '5.4.3.2',
+            'source': '6.7.8.9',
+            'protocol': 0x0F,
+            'version': 4,
+            'ihl': 20,
+            'tos': 7,
+            'tot_len': 20+6,
+            'fragment_id': 0xDEAD,
+            'fragment_offset': 0x1EEF,
+            'dont_fragment': 0,
+            'more_fragments': 1,
+            'ttl': 0xC0,
+            }),
+
+            ])
+        proto.addProto(0x0F, p1)
+
+        p2 = MyProtocol([
+
+            ('quux', {
+            'partial': 1,
+            'dest': '5.4.3.2',
+            'source': '6.7.8.9',
+            'protocol': 0x0A,
+            'version': 4,
+            'ihl': 20,
+            'tos': 7,
+            'tot_len': 20+6,
+            'fragment_id': 0xDEAD,
+            'fragment_offset': 0x1EEF,
+            'dont_fragment': 0,
+            'more_fragments': 1,
+            'ttl': 0xC0,
+            }),
+
+            ('foobar', {
+            'partial': 0,
+            'dest': '1.2.3.4',
+            'source': '5.6.7.8',
+            'protocol': 0x0A,
+            'version': 4,
+            'ihl': 20,
+            'tos': 7,
+            'tot_len': 20+6,
+            'fragment_id': 0xDEAD,
+            'fragment_offset': 0x1EEF,
+            'dont_fragment': 0,
+            'more_fragments': 1,
+            'ttl': 0xC0,
+            }),
+
+
+            ])
+        proto.addProto(0x0A, p2)
+
+        proto.datagramReceived("\x54" #ihl version
+                               + "\x07" #tos
+                               + "\x00\x1a" #tot_len
+                               + "\xDE\xAD" #id
+                               + "\xBE\xEF" #frag_off
+                               + "\xC0" #ttl
+                               + "\x0A" #protocol
+                               + "FE" #checksum
+                               + "\x06\x07\x08\x09" + "\x05\x04\x03\x02" + "quux",
+                               partial=1,
+                               dest='dummy',
+                               source='dummy',
+                               protocol='dummy',
+                               )
+        proto.datagramReceived("\x54" #ihl version
+                               + "\x07" #tos
+                               + "\x00\x1a" #tot_len
+                               + "\xDE\xAD" #id
+                               + "\xBE\xEF" #frag_off
+                               + "\xC0" #ttl
+                               + "\x0F" #protocol
+                               + "FE" #checksum
+                               + "\x05\x06\x07\x08" + "\x01\x02\x03\x04" + "foobar",
+                               partial=0,
+                               dest='dummy',
+                               source='dummy',
+                               protocol='dummy',
+                               )
+        proto.datagramReceived("\x54" #ihl version
+                               + "\x07" #tos
+                               + "\x00\x1a" #tot_len
+                               + "\xDE\xAD" #id
+                               + "\xBE\xEF" #frag_off
+                               + "\xC0" #ttl
+                               + "\x0F" #protocol
+                               + "FE" #checksum
+                               + "\x06\x07\x08\x09" + "\x05\x04\x03\x02" + "quux",
+                               partial=1,
+                               dest='dummy',
+                               source='dummy',
+                               protocol='dummy',
+                               )
+        proto.datagramReceived("\x54" #ihl version
+                               + "\x07" #tos
+                               + "\x00\x1a" #tot_len
+                               + "\xDE\xAD" #id
+                               + "\xBE\xEF" #frag_off
+                               + "\xC0" #ttl
+                               + "\x0A" #protocol
+                               + "FE" #checksum
+                               + "\x05\x06\x07\x08" + "\x01\x02\x03\x04" + "foobar",
+                               partial=0,
+                               dest='dummy',
+                               source='dummy',
+                               protocol='dummy',
+                               )
+
+        assert not p1.expecting, \
+               'Should not expect any more packets, but still want %r' % p1.expecting
+        assert not p2.expecting, \
+               'Should not expect any more packets, but still want %r' % p2.expecting
+
+    def testAddingBadProtos_WrongLevel(self):
+        """Adding a wrong level protocol raises an exception."""
+        e = ip.IPProtocol()
+        try:
+            e.addProto(42, "silliness")
+        except components.CannotAdapt:
+            pass
+        else:
+            raise AssertionError, 'addProto must raise an exception for bad protocols'
+
+
+    def testAddingBadProtos_TooSmall(self):
+        """Adding a protocol with a negative number raises an exception."""
+        e = ip.IPProtocol()
+        try:
+            e.addProto(-1, MyProtocol([]))
+        except TypeError, e:
+            if e.args == ('Added protocol must be positive or zero',):
+                pass
+            else:
+                raise
+        else:
+            raise AssertionError, 'addProto must raise an exception for bad protocols'
+
+
+    def testAddingBadProtos_TooBig(self):
+        """Adding a protocol with a number >=2**32 raises an exception."""
+        e = ip.IPProtocol()
+        try:
+            e.addProto(2L**32, MyProtocol([]))
+        except TypeError, e:
+            if e.args == ('Added protocol must fit in 32 bits',):
+                pass
+            else:
+                raise
+        else:
+            raise AssertionError, 'addProto must raise an exception for bad protocols'
+
+    def testAddingBadProtos_TooBig2(self):
+        """Adding a protocol with a number >=2**32 raises an exception."""
+        e = ip.IPProtocol()
+        try:
+            e.addProto(2L**32+1, MyProtocol([]))
+        except TypeError, e:
+            if e.args == ('Added protocol must fit in 32 bits',):
+                pass
+            else:
+                raise
+        else:
+            raise AssertionError, 'addProto must raise an exception for bad protocols'
diff --git a/ThirdParty/Twisted/twisted/pair/test/test_rawudp.py b/ThirdParty/Twisted/twisted/pair/test/test_rawudp.py
new file mode 100644
index 0000000..f53f078
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/pair/test/test_rawudp.py
@@ -0,0 +1,327 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+from twisted.trial import unittest
+
+from twisted.internet import protocol, reactor, error
+from twisted.python import failure
+from twisted.pair import rawudp
+
+class MyProtocol(protocol.DatagramProtocol):
+    def __init__(self, expecting):
+        self.expecting = list(expecting)
+
+    def datagramReceived(self, data, (host, port)):
+        assert self.expecting, 'Got a packet when not expecting anymore.'
+        expectData, expectHost, expectPort = self.expecting.pop(0)
+
+        assert expectData == data, "Expected data %r, got %r" % (expectData, data)
+        assert expectHost == host, "Expected host %r, got %r" % (expectHost, host)
+        assert expectPort == port, "Expected port %d=0x%04x, got %d=0x%04x" % (expectPort, expectPort, port, port)
+
+class RawUDPTestCase(unittest.TestCase):
+    def testPacketParsing(self):
+        proto = rawudp.RawUDPProtocol()
+        p1 = MyProtocol([
+
+            ('foobar', 'testHost', 0x43A2),
+
+            ])
+        proto.addProto(0xF00F, p1)
+
+        proto.datagramReceived("\x43\xA2" #source
+                               + "\xf0\x0f" #dest
+                               + "\x00\x06" #len
+                               + "\xDE\xAD" #check
+                               + "foobar",
+                               partial=0,
+                               dest='dummy',
+                               source='testHost',
+                               protocol='dummy',
+                               version='dummy',
+                               ihl='dummy',
+                               tos='dummy',
+                               tot_len='dummy',
+                               fragment_id='dummy',
+                               fragment_offset='dummy',
+                               dont_fragment='dummy',
+                               more_fragments='dummy',
+                               ttl='dummy',
+                               )
+
+        assert not p1.expecting, \
+               'Should not expect any more packets, but still want %r' % p1.expecting
+
+    def testMultiplePackets(self):
+        proto = rawudp.RawUDPProtocol()
+        p1 = MyProtocol([
+
+            ('foobar', 'testHost', 0x43A2),
+            ('quux', 'otherHost', 0x33FE),
+
+            ])
+        proto.addProto(0xF00F, p1)
+        proto.datagramReceived("\x43\xA2" #source
+                               + "\xf0\x0f" #dest
+                               + "\x00\x06" #len
+                               + "\xDE\xAD" #check
+                               + "foobar",
+                               partial=0,
+                               dest='dummy',
+                               source='testHost',
+                               protocol='dummy',
+                               version='dummy',
+                               ihl='dummy',
+                               tos='dummy',
+                               tot_len='dummy',
+                               fragment_id='dummy',
+                               fragment_offset='dummy',
+                               dont_fragment='dummy',
+                               more_fragments='dummy',
+                               ttl='dummy',
+                               )
+        proto.datagramReceived("\x33\xFE" #source
+                               + "\xf0\x0f" #dest
+                               + "\x00\x05" #len
+                               + "\xDE\xAD" #check
+                               + "quux",
+                               partial=0,
+                               dest='dummy',
+                               source='otherHost',
+                               protocol='dummy',
+                               version='dummy',
+                               ihl='dummy',
+                               tos='dummy',
+                               tot_len='dummy',
+                               fragment_id='dummy',
+                               fragment_offset='dummy',
+                               dont_fragment='dummy',
+                               more_fragments='dummy',
+                               ttl='dummy',
+                               )
+
+        assert not p1.expecting, \
+               'Should not expect any more packets, but still want %r' % p1.expecting
+
+
+    def testMultipleSameProtos(self):
+        proto = rawudp.RawUDPProtocol()
+        p1 = MyProtocol([
+
+            ('foobar', 'testHost', 0x43A2),
+
+            ])
+
+        p2 = MyProtocol([
+
+            ('foobar', 'testHost', 0x43A2),
+
+            ])
+
+        proto.addProto(0xF00F, p1)
+        proto.addProto(0xF00F, p2)
+
+        proto.datagramReceived("\x43\xA2" #source
+                               + "\xf0\x0f" #dest
+                               + "\x00\x06" #len
+                               + "\xDE\xAD" #check
+                               + "foobar",
+                               partial=0,
+                               dest='dummy',
+                               source='testHost',
+                               protocol='dummy',
+                               version='dummy',
+                               ihl='dummy',
+                               tos='dummy',
+                               tot_len='dummy',
+                               fragment_id='dummy',
+                               fragment_offset='dummy',
+                               dont_fragment='dummy',
+                               more_fragments='dummy',
+                               ttl='dummy',
+                               )
+
+        assert not p1.expecting, \
+               'Should not expect any more packets, but still want %r' % p1.expecting
+        assert not p2.expecting, \
+               'Should not expect any more packets, but still want %r' % p2.expecting
+
+    def testWrongProtoNotSeen(self):
+        proto = rawudp.RawUDPProtocol()
+        p1 = MyProtocol([])
+        proto.addProto(1, p1)
+
+        proto.datagramReceived("\x43\xA2" #source
+                               + "\xf0\x0f" #dest
+                               + "\x00\x06" #len
+                               + "\xDE\xAD" #check
+                               + "foobar",
+                               partial=0,
+                               dest='dummy',
+                               source='testHost',
+                               protocol='dummy',
+                               version='dummy',
+                               ihl='dummy',
+                               tos='dummy',
+                               tot_len='dummy',
+                               fragment_id='dummy',
+                               fragment_offset='dummy',
+                               dont_fragment='dummy',
+                               more_fragments='dummy',
+                               ttl='dummy',
+                               )
+
+    def testDemuxing(self):
+        proto = rawudp.RawUDPProtocol()
+        p1 = MyProtocol([
+
+            ('foobar', 'testHost', 0x43A2),
+            ('quux', 'otherHost', 0x33FE),
+
+            ])
+        proto.addProto(0xF00F, p1)
+
+        p2 = MyProtocol([
+
+            ('quux', 'otherHost', 0xA401),
+            ('foobar', 'testHost', 0xA302),
+
+            ])
+        proto.addProto(0xB050, p2)
+
+        proto.datagramReceived("\xA4\x01" #source
+                               + "\xB0\x50" #dest
+                               + "\x00\x05" #len
+                               + "\xDE\xAD" #check
+                               + "quux",
+                               partial=0,
+                               dest='dummy',
+                               source='otherHost',
+                               protocol='dummy',
+                               version='dummy',
+                               ihl='dummy',
+                               tos='dummy',
+                               tot_len='dummy',
+                               fragment_id='dummy',
+                               fragment_offset='dummy',
+                               dont_fragment='dummy',
+                               more_fragments='dummy',
+                               ttl='dummy',
+                               )
+        proto.datagramReceived("\x43\xA2" #source
+                               + "\xf0\x0f" #dest
+                               + "\x00\x06" #len
+                               + "\xDE\xAD" #check
+                               + "foobar",
+                               partial=0,
+                               dest='dummy',
+                               source='testHost',
+                               protocol='dummy',
+                               version='dummy',
+                               ihl='dummy',
+                               tos='dummy',
+                               tot_len='dummy',
+                               fragment_id='dummy',
+                               fragment_offset='dummy',
+                               dont_fragment='dummy',
+                               more_fragments='dummy',
+                               ttl='dummy',
+                               )
+        proto.datagramReceived("\x33\xFE" #source
+                               + "\xf0\x0f" #dest
+                               + "\x00\x05" #len
+                               + "\xDE\xAD" #check
+                               + "quux",
+                               partial=0,
+                               dest='dummy',
+                               source='otherHost',
+                               protocol='dummy',
+                               version='dummy',
+                               ihl='dummy',
+                               tos='dummy',
+                               tot_len='dummy',
+                               fragment_id='dummy',
+                               fragment_offset='dummy',
+                               dont_fragment='dummy',
+                               more_fragments='dummy',
+                               ttl='dummy',
+                               )
+        proto.datagramReceived("\xA3\x02" #source
+                               + "\xB0\x50" #dest
+                               + "\x00\x06" #len
+                               + "\xDE\xAD" #check
+                               + "foobar",
+                               partial=0,
+                               dest='dummy',
+                               source='testHost',
+                               protocol='dummy',
+                               version='dummy',
+                               ihl='dummy',
+                               tos='dummy',
+                               tot_len='dummy',
+                               fragment_id='dummy',
+                               fragment_offset='dummy',
+                               dont_fragment='dummy',
+                               more_fragments='dummy',
+                               ttl='dummy',
+                               )
+
+        assert not p1.expecting, \
+               'Should not expect any more packets, but still want %r' % p1.expecting
+        assert not p2.expecting, \
+               'Should not expect any more packets, but still want %r' % p2.expecting
+
+    def testAddingBadProtos_WrongLevel(self):
+        """Adding a wrong level protocol raises an exception."""
+        e = rawudp.RawUDPProtocol()
+        try:
+            e.addProto(42, "silliness")
+        except TypeError, e:
+            if e.args == ('Added protocol must be an instance of DatagramProtocol',):
+                pass
+            else:
+                raise
+        else:
+            raise AssertionError, 'addProto must raise an exception for bad protocols'
+
+
+    def testAddingBadProtos_TooSmall(self):
+        """Adding a protocol with a negative number raises an exception."""
+        e = rawudp.RawUDPProtocol()
+        try:
+            e.addProto(-1, protocol.DatagramProtocol())
+        except TypeError, e:
+            if e.args == ('Added protocol must be positive or zero',):
+                pass
+            else:
+                raise
+        else:
+            raise AssertionError, 'addProto must raise an exception for bad protocols'
+
+
+    def testAddingBadProtos_TooBig(self):
+        """Adding a protocol with a number >=2**16 raises an exception."""
+        e = rawudp.RawUDPProtocol()
+        try:
+            e.addProto(2**16, protocol.DatagramProtocol())
+        except TypeError, e:
+            if e.args == ('Added protocol must fit in 16 bits',):
+                pass
+            else:
+                raise
+        else:
+            raise AssertionError, 'addProto must raise an exception for bad protocols'
+
+    def testAddingBadProtos_TooBig2(self):
+        """Adding a protocol with a number >=2**16 raises an exception."""
+        e = rawudp.RawUDPProtocol()
+        try:
+            e.addProto(2**16+1, protocol.DatagramProtocol())
+        except TypeError, e:
+            if e.args == ('Added protocol must fit in 16 bits',):
+                pass
+            else:
+                raise
+        else:
+            raise AssertionError, 'addProto must raise an exception for bad protocols'
diff --git a/ThirdParty/Twisted/twisted/pair/topfiles/NEWS b/ThirdParty/Twisted/twisted/pair/topfiles/NEWS
new file mode 100644
index 0000000..56c3acb
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/pair/topfiles/NEWS
@@ -0,0 +1,68 @@
+Twisted Pair 12.3.0 (2012-12-20)
+================================
+
+No significant changes have been made for this release.
+
+
+Twisted Pair 12.2.0 (2012-08-26)
+================================
+
+No significant changes have been made for this release.
+
+
+Twisted Pair 12.1.0 (2012-06-02)
+================================
+
+No significant changes have been made for this release.
+
+
+Twisted Pair 12.0.0 (2012-02-10)
+================================
+
+No significant changes have been made for this release.
+
+
+Twisted Pair 11.1.0 (2011-11-15)
+================================
+
+No significant changes have been made for this release.
+
+
+Twisted Pair 11.0.0 (2011-04-01)
+================================
+
+No significant changes have been made for this release.
+
+
+Twisted Pair 10.2.0 (2010-11-29)
+================================
+
+No significant changes have been made for this release.
+
+
+Twisted Pair 10.1.0 (2010-06-27)
+================================
+
+No significant changes have been made for this release.
+
+
+Twisted Pair 10.0.0 (2010-03-01)
+================================
+
+Other
+-----
+ - #4170
+
+
+Twisted Pair 9.0.0 (2009-11-24)
+===============================
+
+Other
+-----
+ - #3540, #4050
+
+
+Pair 8.2.0 (2008-12-16)
+=======================
+
+No interesting changes since Twisted 8.0.
diff --git a/ThirdParty/Twisted/twisted/pair/topfiles/README b/ThirdParty/Twisted/twisted/pair/topfiles/README
new file mode 100644
index 0000000..a628903
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/pair/topfiles/README
@@ -0,0 +1,4 @@
+Twisted Pair 12.3.0
+
+Twisted Pair depends on Twisted Core.  For TUN/TAP access, python-eunuchs
+(<http://pypi.python.org/pypi/python-eunuchs/0.0.0>) is also required.
diff --git a/ThirdParty/Twisted/twisted/pair/topfiles/setup.py b/ThirdParty/Twisted/twisted/pair/topfiles/setup.py
new file mode 100644
index 0000000..c42754f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/pair/topfiles/setup.py
@@ -0,0 +1,28 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import sys
+
+try:
+    from twisted.python import dist
+except ImportError:
+    raise SystemExit("twisted.python.dist module not found.  Make sure you "
+                     "have installed the Twisted core package before "
+                     "attempting to install any other Twisted projects.")
+
+if __name__ == '__main__':
+    dist.setup(
+        twisted_subproject="pair",
+        # metadata
+        name="Twisted Pair",
+        description="Twisted Pair contains low-level networking support.",
+        author="Twisted Matrix Laboratories",
+        author_email="twisted-python at twistedmatrix.com",
+        maintainer="Tommi Virtanen",
+        url="http://twistedmatrix.com/trac/wiki/TwistedPair",
+        license="MIT",
+        long_description="""
+Raw network packet parsing routines, including ethernet, IP and UDP
+packets, and tuntap support.
+""",
+        )
diff --git a/ThirdParty/Twisted/twisted/pair/tuntap.py b/ThirdParty/Twisted/twisted/pair/tuntap.py
new file mode 100644
index 0000000..e3ece5e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/pair/tuntap.py
@@ -0,0 +1,170 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+import errno, os
+from twisted.python import log, reflect, components
+from twisted.internet import base, fdesc, error
+from twisted.pair import ethernet, ip
+
+"""
+You need Eunuchs for twisted.pair.tuntap to work.
+
+Eunuchs is a library containing the missing manly parts of
+UNIX API for Python.
+
+Eunuchs is a library of Python extension that complement the standard
+libraries in parts where full support for the UNIX API (or the Linux
+API) is missing.
+
+Most of the functions wrapped by Eunuchs are low-level, dirty, but
+absolutely necessary functions for real systems programming. The aim is
+to have the functions added to mainstream Python libraries.
+
+Current list of functions included:
+
+ - fchdir(2)
+ - recvmsg(2) and sendmsg(2), including use of cmsg(3)
+ - socketpair(2)
+ - support for TUN/TAP virtual network interfaces
+
+Eunuchs doesn't have a proper web home right now, but you can fetch
+the source from http://ftp.debian.org/debian/pool/main/e/eunuch
+-- debian users can just use 'apt-get install python-eunuchs'.
+
+"""
+from eunuchs.tuntap import opentuntap, TuntapPacketInfo, makePacketInfo
+
+class TuntapPort(base.BasePort):
+    """A Port that reads and writes packets from/to a TUN/TAP-device.
+
+    TODO: Share general start/stop etc implementation details with
+    twisted.internet.udp.Port.
+    """
+    maxThroughput = 256 * 1024 # max bytes we read in one eventloop iteration
+
+    def __init__(self, interface, proto, maxPacketSize=8192, reactor=None):
+        if components.implements(proto, ethernet.IEthernetProtocol):
+            self.ethernet = 1
+        else:
+            self.ethernet = 0
+            assert components.implements(proto, ip.IIPProtocol) # XXX: fix me
+        base.BasePort.__init__(self, reactor)
+        self.interface = interface
+        self.protocol = proto
+        self.maxPacketSize = maxPacketSize
+        self.setLogStr()
+
+    def __repr__(self):
+        return "<%s on %s>" % (self.protocol.__class__, self.interface)
+
+    def startListening(self):
+        """Create and bind my socket, and begin listening on it.
+
+        This is called on unserialization, and must be called after creating a
+        server to begin listening on the specified port.
+        """
+        self._bindSocket()
+        self._connectToProtocol()
+
+    def _bindSocket(self):
+        log.msg("%s starting on %s"%(self.protocol.__class__, self.interface))
+        try:
+            fd, name = opentuntap(name=self.interface,
+                                  ethernet=self.ethernet,
+                                  packetinfo=0)
+        except OSError, e:
+            raise error.CannotListenError, (None, self.interface, e)
+        fdesc.setNonBlocking(fd)
+        self.interface = name
+        self.connected = 1
+        self.fd = fd
+
+    def fileno(self):
+        return self.fd
+
+    def _connectToProtocol(self):
+        self.protocol.makeConnection(self)
+        self.startReading()
+
+    def doRead(self):
+        """Called when my socket is ready for reading."""
+        read = 0
+        while read < self.maxThroughput:
+            try:
+                data = os.read(self.fd, self.maxPacketSize)
+                read += len(data)
+#                pkt = TuntapPacketInfo(data)
+                self.protocol.datagramReceived(data,
+                                               partial=0 # pkt.isPartial(),
+                                               )
+            except OSError, e:
+                if e.errno in (errno.EWOULDBLOCK,):
+                    return
+                else:
+                    raise
+            except IOError, e:
+                if e.errno in (errno.EAGAIN, errno.EINTR):
+                    return
+                else:
+                    raise
+            except:
+                log.deferr()
+
+    def write(self, datagram):
+        """Write a datagram."""
+#        header = makePacketInfo(0, 0)
+        try:
+            return os.write(self.fd, datagram)
+        except IOError, e:
+            if e.errno == errno.EINTR:
+                return self.write(datagram)
+            elif e.errno == errno.EMSGSIZE:
+                raise error.MessageLengthError, "message too long"
+            elif e.errno == errno.ECONNREFUSED:
+                raise error.ConnectionRefusedError
+            else:
+                raise
+
+    def writeSequence(self, seq):
+        self.write("".join(seq))
+
+    def loseConnection(self):
+        """Stop accepting connections on this port.
+
+        This will shut down my socket and call self.connectionLost().
+        """
+        self.stopReading()
+        if self.connected:
+            from twisted.internet import reactor
+            reactor.callLater(0, self.connectionLost)
+
+    stopListening = loseConnection
+
+    def connectionLost(self, reason=None):
+        """Cleans up my socket.
+        """
+        log.msg('(Tuntap %s Closed)' % self.interface)
+        base.BasePort.connectionLost(self, reason)
+        if hasattr(self, "protocol"):
+            # we won't have attribute in ConnectedPort, in cases
+            # where there was an error in connection process
+            self.protocol.doStop()
+        self.connected = 0
+        os.close(self.fd)
+        del self.fd
+
+    def setLogStr(self):
+        self.logstr = reflect.qual(self.protocol.__class__) + " (TUNTAP)"
+
+    def logPrefix(self):
+        """Returns the name of my class, to prefix log entries with.
+        """
+        return self.logstr
+
+    def getHost(self):
+        """
+        Returns a tuple of ('TUNTAP', interface), indicating
+        the servers address
+        """
+        return ('TUNTAP',)+self.interface
diff --git a/ThirdParty/Twisted/twisted/persisted/__init__.py b/ThirdParty/Twisted/twisted/persisted/__init__.py
new file mode 100644
index 0000000..a8a918b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/persisted/__init__.py
@@ -0,0 +1,6 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Twisted Persisted: utilities for managing persistence.
+"""
diff --git a/ThirdParty/Twisted/twisted/persisted/aot.py b/ThirdParty/Twisted/twisted/persisted/aot.py
new file mode 100644
index 0000000..c0e0282
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/persisted/aot.py
@@ -0,0 +1,560 @@
+# -*- test-case-name: twisted.test.test_persisted -*-
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+
+"""
+AOT: Abstract Object Trees
+The source-code-marshallin'est abstract-object-serializin'est persister
+this side of Marmalade!
+"""
+
+import types, copy_reg, tokenize, re
+
+from twisted.python import reflect, log
+from twisted.persisted import crefutil
+
+###########################
+# Abstract Object Classes #
+###########################
+
+#"\0" in a getSource means "insert variable-width indention here".
+#see `indentify'.
+
+class Named:
+    def __init__(self, name):
+        self.name = name
+
+class Class(Named):
+    def getSource(self):
+        return "Class(%r)" % self.name
+
+class Function(Named):
+    def getSource(self):
+        return "Function(%r)" % self.name
+
+class Module(Named):
+    def getSource(self):
+        return "Module(%r)" % self.name
+
+
+class InstanceMethod:
+    def __init__(self, name, klass, inst):
+        if not (isinstance(inst, Ref) or isinstance(inst, Instance) or isinstance(inst, Deref)):
+            raise TypeError("%s isn't an Instance, Ref, or Deref!" % inst)
+        self.name = name
+        self.klass = klass
+        self.instance = inst
+
+    def getSource(self):
+        return "InstanceMethod(%r, %r, \n\0%s)" % (self.name, self.klass, prettify(self.instance))
+
+
+class _NoStateObj:
+    pass
+NoStateObj = _NoStateObj()
+
+_SIMPLE_BUILTINS = [
+    types.StringType, types.UnicodeType, types.IntType, types.FloatType,
+    types.ComplexType, types.LongType, types.NoneType, types.SliceType,
+    types.EllipsisType]
+
+try:
+    _SIMPLE_BUILTINS.append(types.BooleanType)
+except AttributeError:
+    pass
+
+class Instance:
+    def __init__(self, className, __stateObj__=NoStateObj, **state):
+        if not isinstance(className, types.StringType):
+            raise TypeError("%s isn't a string!" % className)
+        self.klass = className
+        if __stateObj__ is not NoStateObj:
+            self.state = __stateObj__
+            self.stateIsDict = 0
+        else:
+            self.state = state
+            self.stateIsDict = 1
+
+    def getSource(self):
+        #XXX make state be foo=bar instead of a dict.
+        if self.stateIsDict:
+            stateDict = self.state
+        elif isinstance(self.state, Ref) and isinstance(self.state.obj, types.DictType):
+            stateDict = self.state.obj
+        else:
+            stateDict = None
+        if stateDict is not None:
+            try:
+                return "Instance(%r, %s)" % (self.klass, dictToKW(stateDict))
+            except NonFormattableDict:
+                return "Instance(%r, %s)" % (self.klass, prettify(stateDict))
+        return "Instance(%r, %s)" % (self.klass, prettify(self.state))
+
+class Ref:
+
+    def __init__(self, *args):
+        #blargh, lame.
+        if len(args) == 2:
+            self.refnum = args[0]
+            self.obj = args[1]
+        elif not args:
+            self.refnum = None
+            self.obj = None
+
+    def setRef(self, num):
+        if self.refnum:
+            raise ValueError("Error setting id %s, I already have %s" % (num, self.refnum))
+        self.refnum = num
+
+    def setObj(self, obj):
+        if self.obj:
+            raise ValueError("Error setting obj %s, I already have %s" % (obj, self.obj))
+        self.obj = obj
+
+    def getSource(self):
+        if self.obj is None:
+            raise RuntimeError("Don't try to display me before setting an object on me!")
+        if self.refnum:
+            return "Ref(%d, \n\0%s)" % (self.refnum, prettify(self.obj))
+        return prettify(self.obj)
+
+
+class Deref:
+    def __init__(self, num):
+        self.refnum = num
+
+    def getSource(self):
+        return "Deref(%d)" % self.refnum
+
+    __repr__ = getSource
+
+
+class Copyreg:
+    def __init__(self, loadfunc, state):
+        self.loadfunc = loadfunc
+        self.state = state
+
+    def getSource(self):
+        return "Copyreg(%r, %s)" % (self.loadfunc, prettify(self.state))
+
+
+
+###############
+# Marshalling #
+###############
+
+
+def getSource(ao):
+    """Pass me an AO, I'll return a nicely-formatted source representation."""
+    return indentify("app = " + prettify(ao))
+
+
+class NonFormattableDict(Exception):
+    """A dictionary was not formattable.
+    """
+
+r = re.compile('[a-zA-Z_][a-zA-Z0-9_]*$')
+
+def dictToKW(d):
+    out = []
+    items = d.items()
+    items.sort()
+    for k,v in items:
+        if not isinstance(k, types.StringType):
+            raise NonFormattableDict("%r ain't a string" % k)
+        if not r.match(k):
+            raise NonFormattableDict("%r ain't an identifier" % k)
+        out.append(
+            "\n\0%s=%s," % (k, prettify(v))
+            )
+    return ''.join(out)
+
+
+def prettify(obj):
+    if hasattr(obj, 'getSource'):
+        return obj.getSource()
+    else:
+        #basic type
+        t = type(obj)
+
+        if t in _SIMPLE_BUILTINS:
+            return repr(obj)
+
+        elif t is types.DictType:
+            out = ['{']
+            for k,v in obj.items():
+                out.append('\n\0%s: %s,' % (prettify(k), prettify(v)))
+            out.append(len(obj) and '\n\0}' or '}')
+            return ''.join(out)
+
+        elif t is types.ListType:
+            out = ["["]
+            for x in obj:
+                out.append('\n\0%s,' % prettify(x))
+            out.append(len(obj) and '\n\0]' or ']')
+            return ''.join(out)
+
+        elif t is types.TupleType:
+            out = ["("]
+            for x in obj:
+                out.append('\n\0%s,' % prettify(x))
+            out.append(len(obj) and '\n\0)' or ')')
+            return ''.join(out)
+        else:
+            raise TypeError("Unsupported type %s when trying to prettify %s." % (t, obj))
+
+def indentify(s):
+    out = []
+    stack = []
+    def eater(type, val, r, c, l, out=out, stack=stack):
+        #import sys
+        #sys.stdout.write(val)
+        if val in ['[', '(', '{']:
+            stack.append(val)
+        elif val in [']', ')', '}']:
+            stack.pop()
+        if val == '\0':
+            out.append('  '*len(stack))
+        else:
+            out.append(val)
+    l = ['', s]
+    tokenize.tokenize(l.pop, eater)
+    return ''.join(out)
+
+
+
+
+
+###########
+# Unjelly #
+###########
+
+def unjellyFromAOT(aot):
+    """
+    Pass me an Abstract Object Tree, and I'll unjelly it for you.
+    """
+    return AOTUnjellier().unjelly(aot)
+
+def unjellyFromSource(stringOrFile):
+    """
+    Pass me a string of code or a filename that defines an 'app' variable (in
+    terms of Abstract Objects!), and I'll execute it and unjelly the resulting
+    AOT for you, returning a newly unpersisted Application object!
+    """
+
+    ns = {"Instance": Instance,
+          "InstanceMethod": InstanceMethod,
+          "Class": Class,
+          "Function": Function,
+          "Module": Module,
+          "Ref": Ref,
+          "Deref": Deref,
+          "Copyreg": Copyreg,
+          }
+
+    if hasattr(stringOrFile, "read"):
+        exec stringOrFile.read() in ns
+    else:
+        exec stringOrFile in ns
+
+    if 'app' in ns:
+        return unjellyFromAOT(ns['app'])
+    else:
+        raise ValueError("%s needs to define an 'app', it didn't!" % stringOrFile)
+
+
+class AOTUnjellier:
+    """I handle the unjellying of an Abstract Object Tree.
+    See AOTUnjellier.unjellyAO
+    """
+    def __init__(self):
+        self.references = {}
+        self.stack = []
+        self.afterUnjelly = []
+
+    ##
+    # unjelly helpers (copied pretty much directly from (now deleted) marmalade)
+    ##
+    def unjellyLater(self, node):
+        """Unjelly a node, later.
+        """
+        d = crefutil._Defer()
+        self.unjellyInto(d, 0, node)
+        return d
+
+    def unjellyInto(self, obj, loc, ao):
+        """Utility method for unjellying one object into another.
+        This automates the handling of backreferences.
+        """
+        o = self.unjellyAO(ao)
+        obj[loc] = o
+        if isinstance(o, crefutil.NotKnown):
+            o.addDependant(obj, loc)
+        return o
+
+    def callAfter(self, callable, result):
+        if isinstance(result, crefutil.NotKnown):
+            l = [None]
+            result.addDependant(l, 1)
+        else:
+            l = [result]
+        self.afterUnjelly.append((callable, l))
+
+    def unjellyAttribute(self, instance, attrName, ao):
+        #XXX this is unused????
+        """Utility method for unjellying into instances of attributes.
+
+        Use this rather than unjellyAO unless you like surprising bugs!
+        Alternatively, you can use unjellyInto on your instance's __dict__.
+        """
+        self.unjellyInto(instance.__dict__, attrName, ao)
+
+    def unjellyAO(self, ao):
+        """Unjelly an Abstract Object and everything it contains.
+        I return the real object.
+        """
+        self.stack.append(ao)
+        t = type(ao)
+        if t is types.InstanceType:
+            #Abstract Objects
+            c = ao.__class__
+            if c is Module:
+                return reflect.namedModule(ao.name)
+
+            elif c in [Class, Function] or issubclass(c, type):
+                return reflect.namedObject(ao.name)
+
+            elif c is InstanceMethod:
+                im_name = ao.name
+                im_class = reflect.namedObject(ao.klass)
+                im_self = self.unjellyAO(ao.instance)
+                if im_name in im_class.__dict__:
+                    if im_self is None:
+                        return getattr(im_class, im_name)
+                    elif isinstance(im_self, crefutil.NotKnown):
+                        return crefutil._InstanceMethod(im_name, im_self, im_class)
+                    else:
+                        return types.MethodType(im_class.__dict__[im_name],
+                                                im_self,
+                                                im_class)
+                else:
+                    raise TypeError("instance method changed")
+
+            elif c is Instance:
+                klass = reflect.namedObject(ao.klass)
+                state = self.unjellyAO(ao.state)
+                if hasattr(klass, "__setstate__"):
+                    inst = types.InstanceType(klass, {})
+                    self.callAfter(inst.__setstate__, state)
+                else:
+                    inst = types.InstanceType(klass, state)
+                return inst
+
+            elif c is Ref:
+                o = self.unjellyAO(ao.obj) #THIS IS CHANGING THE REF OMG
+                refkey = ao.refnum
+                ref = self.references.get(refkey)
+                if ref is None:
+                    self.references[refkey] = o
+                elif isinstance(ref, crefutil.NotKnown):
+                    ref.resolveDependants(o)
+                    self.references[refkey] = o
+                elif refkey is None:
+                    # This happens when you're unjellying from an AOT not read from source
+                    pass
+                else:
+                    raise ValueError("Multiple references with the same ID: %s, %s, %s!" % (ref, refkey, ao))
+                return o
+
+            elif c is Deref:
+                num = ao.refnum
+                ref = self.references.get(num)
+                if ref is None:
+                    der = crefutil._Dereference(num)
+                    self.references[num] = der
+                    return der
+                return ref
+
+            elif c is Copyreg:
+                loadfunc = reflect.namedObject(ao.loadfunc)
+                d = self.unjellyLater(ao.state).addCallback(
+                    lambda result, _l: _l(*result), loadfunc)
+                return d
+
+        #Types
+
+        elif t in _SIMPLE_BUILTINS:
+            return ao
+
+        elif t is types.ListType:
+            l = []
+            for x in ao:
+                l.append(None)
+                self.unjellyInto(l, len(l)-1, x)
+            return l
+
+        elif t is types.TupleType:
+            l = []
+            tuple_ = tuple
+            for x in ao:
+                l.append(None)
+                if isinstance(self.unjellyInto(l, len(l)-1, x), crefutil.NotKnown):
+                    tuple_ = crefutil._Tuple
+            return tuple_(l)
+
+        elif t is types.DictType:
+            d = {}
+            for k,v in ao.items():
+                kvd = crefutil._DictKeyAndValue(d)
+                self.unjellyInto(kvd, 0, k)
+                self.unjellyInto(kvd, 1, v)
+            return d
+
+        else:
+            raise TypeError("Unsupported AOT type: %s" % t)
+
+        del self.stack[-1]
+
+
+    def unjelly(self, ao):
+        try:
+            l = [None]
+            self.unjellyInto(l, 0, ao)
+            for func, v in self.afterUnjelly:
+                func(v[0])
+            return l[0]
+        except:
+            log.msg("Error jellying object! Stacktrace follows::")
+            log.msg("\n".join(map(repr, self.stack)))
+            raise
+#########
+# Jelly #
+#########
+
+
+def jellyToAOT(obj):
+    """Convert an object to an Abstract Object Tree."""
+    return AOTJellier().jelly(obj)
+
+def jellyToSource(obj, file=None):
+    """
+    Pass me an object and, optionally, a file object.
+    I'll convert the object to an AOT either return it (if no file was
+    specified) or write it to the file.
+    """
+
+    aot = jellyToAOT(obj)
+    if file:
+        file.write(getSource(aot))
+    else:
+        return getSource(aot)
+
+
+class AOTJellier:
+    def __init__(self):
+        # dict of {id(obj): (obj, node)}
+        self.prepared = {}
+        self._ref_id = 0
+        self.stack = []
+
+    def prepareForRef(self, aoref, object):
+        """I prepare an object for later referencing, by storing its id()
+        and its _AORef in a cache."""
+        self.prepared[id(object)] = aoref
+
+    def jellyToAO(self, obj):
+        """I turn an object into an AOT and return it."""
+        objType = type(obj)
+        self.stack.append(repr(obj))
+
+        #immutable: We don't care if these have multiple refs!
+        if objType in _SIMPLE_BUILTINS:
+            retval = obj
+
+        elif objType is types.MethodType:
+            # TODO: make methods 'prefer' not to jelly the object internally,
+            # so that the object will show up where it's referenced first NOT
+            # by a method.
+            retval = InstanceMethod(obj.im_func.__name__, reflect.qual(obj.im_class),
+                                    self.jellyToAO(obj.im_self))
+
+        elif objType is types.ModuleType:
+            retval = Module(obj.__name__)
+
+        elif objType is types.ClassType:
+            retval = Class(reflect.qual(obj))
+
+        elif issubclass(objType, type):
+            retval = Class(reflect.qual(obj))
+
+        elif objType is types.FunctionType:
+            retval = Function(reflect.fullFuncName(obj))
+
+        else: #mutable! gotta watch for refs.
+
+#Marmalade had the nicety of being able to just stick a 'reference' attribute
+#on any Node object that was referenced, but in AOT, the referenced object
+#is *inside* of a Ref call (Ref(num, obj) instead of
+#<objtype ... reference="1">). The problem is, especially for built-in types,
+#I can't just assign some attribute to them to give them a refnum. So, I have
+#to "wrap" a Ref(..) around them later -- that's why I put *everything* that's
+#mutable inside one. The Ref() class will only print the "Ref(..)" around an
+#object if it has a Reference explicitly attached.
+
+            if id(obj) in self.prepared:
+                oldRef = self.prepared[id(obj)]
+                if oldRef.refnum:
+                    # it's been referenced already
+                    key = oldRef.refnum
+                else:
+                    # it hasn't been referenced yet
+                    self._ref_id = self._ref_id + 1
+                    key = self._ref_id
+                    oldRef.setRef(key)
+                return Deref(key)
+
+            retval = Ref()
+            self.prepareForRef(retval, obj)
+
+            if objType is types.ListType:
+                retval.setObj(map(self.jellyToAO, obj)) #hah!
+
+            elif objType is types.TupleType:
+                retval.setObj(tuple(map(self.jellyToAO, obj)))
+
+            elif objType is types.DictionaryType:
+                d = {}
+                for k,v in obj.items():
+                    d[self.jellyToAO(k)] = self.jellyToAO(v)
+                retval.setObj(d)
+
+            elif objType is types.InstanceType:
+                if hasattr(obj, "__getstate__"):
+                    state = self.jellyToAO(obj.__getstate__())
+                else:
+                    state = self.jellyToAO(obj.__dict__)
+                retval.setObj(Instance(reflect.qual(obj.__class__), state))
+
+            elif objType in copy_reg.dispatch_table:
+                unpickleFunc, state = copy_reg.dispatch_table[objType](obj)
+
+                retval.setObj(Copyreg( reflect.fullFuncName(unpickleFunc),
+                                       self.jellyToAO(state)))
+
+            else:
+                raise TypeError("Unsupported type: %s" % objType.__name__)
+
+        del self.stack[-1]
+        return retval
+
+    def jelly(self, obj):
+        try:
+            ao = self.jellyToAO(obj)
+            return ao
+        except:
+            log.msg("Error jellying object! Stacktrace follows::")
+            log.msg('\n'.join(self.stack))
+            raise
diff --git a/ThirdParty/Twisted/twisted/persisted/crefutil.py b/ThirdParty/Twisted/twisted/persisted/crefutil.py
new file mode 100644
index 0000000..39d7eb9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/persisted/crefutil.py
@@ -0,0 +1,163 @@
+# -*- test-case-name: twisted.test.test_persisted -*-
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Utility classes for dealing with circular references.
+"""
+
+import types
+
+from twisted.python import log, reflect
+
+
+class NotKnown:
+    def __init__(self):
+        self.dependants = []
+        self.resolved = 0
+
+    def addDependant(self, mutableObject, key):
+        assert not self.resolved
+        self.dependants.append( (mutableObject, key) )
+
+    resolvedObject = None
+
+    def resolveDependants(self, newObject):
+        self.resolved = 1
+        self.resolvedObject = newObject
+        for mut, key in self.dependants:
+            mut[key] = newObject
+            if isinstance(newObject, NotKnown):
+                newObject.addDependant(mut, key)
+
+    def __hash__(self):
+        assert 0, "I am not to be used as a dictionary key."
+
+
+
+class _Container(NotKnown):
+    """
+    Helper class to resolve circular references on container objects.
+    """
+
+    def __init__(self, l, containerType):
+        """
+        @param l: The list of object which may contain some not yet referenced
+        objects.
+
+        @param containerType: A type of container objects (e.g., C{tuple} or
+            C{set}).
+        """
+        NotKnown.__init__(self)
+        self.containerType = containerType
+        self.l = l
+        self.locs = range(len(l))
+        for idx in xrange(len(l)):
+            if not isinstance(l[idx], NotKnown):
+                self.locs.remove(idx)
+            else:
+                l[idx].addDependant(self, idx)
+        if not self.locs:
+            self.resolveDependants(self.containerType(self.l))
+
+
+    def __setitem__(self, n, obj):
+        """
+        Change the value of one contained objects, and resolve references if
+        all objects have been referenced.
+        """
+        self.l[n] = obj
+        if not isinstance(obj, NotKnown):
+            self.locs.remove(n)
+            if not self.locs:
+                self.resolveDependants(self.containerType(self.l))
+
+
+
+class _Tuple(_Container):
+    """
+    Manage tuple containing circular references. Deprecated: use C{_Container}
+    instead.
+    """
+
+    def __init__(self, l):
+        """
+        @param l: The list of object which may contain some not yet referenced
+        objects.
+        """
+        _Container.__init__(self, l, tuple)
+
+
+
+class _InstanceMethod(NotKnown):
+    def __init__(self, im_name, im_self, im_class):
+        NotKnown.__init__(self)
+        self.my_class = im_class
+        self.name = im_name
+        # im_self _must_ be a
+        im_self.addDependant(self, 0)
+
+    def __call__(self, *args, **kw):
+        import traceback
+        log.msg('instance method %s.%s' % (reflect.qual(self.my_class), self.name))
+        log.msg('being called with %r %r' % (args, kw))
+        traceback.print_stack(file=log.logfile)
+        assert 0
+
+    def __setitem__(self, n, obj):
+        assert n == 0, "only zero index allowed"
+        if not isinstance(obj, NotKnown):
+            method = types.MethodType(self.my_class.__dict__[self.name],
+                                      obj, self.my_class)
+            self.resolveDependants(method)
+
+class _DictKeyAndValue:
+    def __init__(self, dict):
+        self.dict = dict
+    def __setitem__(self, n, obj):
+        if n not in (1, 0):
+            raise RuntimeError("DictKeyAndValue should only ever be called with 0 or 1")
+        if n: # value
+            self.value = obj
+        else:
+            self.key = obj
+        if hasattr(self, "key") and hasattr(self, "value"):
+            self.dict[self.key] = self.value
+
+
+class _Dereference(NotKnown):
+    def __init__(self, id):
+        NotKnown.__init__(self)
+        self.id = id
+
+
+from twisted.internet.defer import Deferred
+
+class _Catcher:
+    def catch(self, value):
+        self.value = value
+
+class _Defer(Deferred, NotKnown):
+    def __init__(self):
+        Deferred.__init__(self)
+        NotKnown.__init__(self)
+        self.pause()
+
+    wasset = 0
+
+    def __setitem__(self, n, obj):
+        if self.wasset:
+            raise RuntimeError('setitem should only be called once, setting %r to %r' % (n, obj))
+        else:
+            self.wasset = 1
+        self.callback(obj)
+
+    def addDependant(self, dep, key):
+        # by the time I'm adding a dependant, I'm *not* adding any more
+        # callbacks
+        NotKnown.addDependant(self,  dep, key)
+        self.unpause()
+        resovd = self.result
+        self.resolveDependants(resovd)
diff --git a/ThirdParty/Twisted/twisted/persisted/dirdbm.py b/ThirdParty/Twisted/twisted/persisted/dirdbm.py
new file mode 100644
index 0000000..26bbc1b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/persisted/dirdbm.py
@@ -0,0 +1,358 @@
+# -*- test-case-name: twisted.test.test_dirdbm -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+
+"""
+DBM-style interface to a directory.
+
+Each key is stored as a single file.  This is not expected to be very fast or
+efficient, but it's good for easy debugging.
+
+DirDBMs are *not* thread-safe, they should only be accessed by one thread at
+a time.
+
+No files should be placed in the working directory of a DirDBM save those
+created by the DirDBM itself!
+
+Maintainer: Itamar Shtull-Trauring
+"""
+
+
+import os
+import types
+import base64
+import glob
+
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+
+try:
+    _open
+except NameError:
+    _open = open
+
+
+class DirDBM:
+    """A directory with a DBM interface.
+    
+    This class presents a hash-like interface to a directory of small,
+    flat files. It can only use strings as keys or values.
+    """
+    
+    def __init__(self, name):
+        """
+        @type name: str
+        @param name: Base path to use for the directory storage.
+        """
+        self.dname = os.path.abspath(name)
+        if not os.path.isdir(self.dname):
+            os.mkdir(self.dname)
+        else:
+            # Run recovery, in case we crashed. we delete all files ending
+            # with ".new". Then we find all files who end with ".rpl". If a
+            # corresponding file exists without ".rpl", we assume the write
+            # failed and delete the ".rpl" file. If only a ".rpl" exist we
+            # assume the program crashed right after deleting the old entry
+            # but before renaming the replacement entry.
+            #
+            # NOTE: '.' is NOT in the base64 alphabet!
+            for f in glob.glob(os.path.join(self.dname, "*.new")):
+                os.remove(f)
+            replacements = glob.glob(os.path.join(self.dname, "*.rpl"))
+            for f in replacements:
+                old = f[:-4]
+                if os.path.exists(old):
+                    os.remove(f)
+                else:
+                    os.rename(f, old)
+    
+    def _encode(self, k):
+        """Encode a key so it can be used as a filename.
+        """
+        # NOTE: '_' is NOT in the base64 alphabet!
+        return base64.encodestring(k).replace('\n', '_').replace("/", "-")
+    
+    def _decode(self, k):
+        """Decode a filename to get the key.
+        """
+        return base64.decodestring(k.replace('_', '\n').replace("-", "/"))
+    
+    def _readFile(self, path):
+        """Read in the contents of a file.
+        
+        Override in subclasses to e.g. provide transparently encrypted dirdbm.
+        """
+        f = _open(path, "rb")
+        s = f.read()
+        f.close()
+        return s
+    
+    def _writeFile(self, path, data):
+        """Write data to a file.
+        
+        Override in subclasses to e.g. provide transparently encrypted dirdbm.
+        """
+        f = _open(path, "wb")
+        f.write(data)
+        f.flush()
+        f.close()
+    
+    def __len__(self):
+        """
+        @return: The number of key/value pairs in this Shelf
+        """
+        return len(os.listdir(self.dname))
+
+    def __setitem__(self, k, v):
+        """
+        C{dirdbm[k] = v}
+        Create or modify a textfile in this directory
+
+        @type k: str
+        @param k: key to set
+        
+        @type v: str
+        @param v: value to associate with C{k}
+        """
+        assert type(k) == types.StringType, "DirDBM key must be a string"
+        assert type(v) == types.StringType, "DirDBM value must be a string"
+        k = self._encode(k)
+        
+        # we create a new file with extension .new, write the data to it, and
+        # if the write succeeds delete the old file and rename the new one.
+        old = os.path.join(self.dname, k)
+        if os.path.exists(old):
+            new = old + ".rpl" # replacement entry
+        else:
+            new = old + ".new" # new entry
+        try:
+            self._writeFile(new, v)
+        except:
+            os.remove(new)
+            raise
+        else:
+            if os.path.exists(old): os.remove(old)
+            os.rename(new, old)
+
+    def __getitem__(self, k):
+        """
+        C{dirdbm[k]}
+        Get the contents of a file in this directory as a string.
+        
+        @type k: str
+        @param k: key to lookup
+        
+        @return: The value associated with C{k}
+        @raise KeyError: Raised when there is no such key
+        """
+        assert type(k) == types.StringType, "DirDBM key must be a string"
+        path = os.path.join(self.dname, self._encode(k))
+        try:
+            return self._readFile(path)
+        except:
+            raise KeyError, k
+
+    def __delitem__(self, k):
+        """
+        C{del dirdbm[foo]}
+        Delete a file in this directory.
+        
+        @type k: str
+        @param k: key to delete
+        
+        @raise KeyError: Raised when there is no such key
+        """
+        assert type(k) == types.StringType, "DirDBM key must be a string"
+        k = self._encode(k)
+        try:    os.remove(os.path.join(self.dname, k))
+        except (OSError, IOError): raise KeyError(self._decode(k))
+
+    def keys(self):
+        """
+        @return: a C{list} of filenames (keys).
+        """
+        return map(self._decode, os.listdir(self.dname))
+
+    def values(self):
+        """
+        @return: a C{list} of file-contents (values).
+        """
+        vals = []
+        keys = self.keys()
+        for key in keys:
+            vals.append(self[key])
+        return vals
+
+    def items(self):
+        """
+        @return: a C{list} of 2-tuples containing key/value pairs.
+        """
+        items = []
+        keys = self.keys()
+        for key in keys:
+            items.append((key, self[key]))
+        return items
+
+    def has_key(self, key):
+        """
+        @type key: str
+        @param key: The key to test
+        
+        @return: A true value if this dirdbm has the specified key, a faluse
+        value otherwise.
+        """
+        assert type(key) == types.StringType, "DirDBM key must be a string"
+        key = self._encode(key)
+        return os.path.isfile(os.path.join(self.dname, key))
+
+    def setdefault(self, key, value):
+        """
+        @type key: str
+        @param key: The key to lookup
+        
+        @param value: The value to associate with key if key is not already
+        associated with a value.
+        """
+        if not self.has_key(key):
+            self[key] = value
+            return value
+        return self[key]
+
+    def get(self, key, default = None):
+        """
+        @type key: str
+        @param key: The key to lookup
+        
+        @param default: The value to return if the given key does not exist
+        
+        @return: The value associated with C{key} or C{default} if not
+        C{self.has_key(key)}
+        """
+        if self.has_key(key):
+            return self[key]
+        else:
+            return default
+
+    def __contains__(self, key):
+        """
+        C{key in dirdbm}
+
+        @type key: str
+        @param key: The key to test
+                
+        @return: A true value if C{self.has_key(key)}, a false value otherwise.
+        """
+        assert type(key) == types.StringType, "DirDBM key must be a string"
+        key = self._encode(key)
+        return os.path.isfile(os.path.join(self.dname, key))
+
+    def update(self, dict):
+        """
+        Add all the key/value pairs in C{dict} to this dirdbm.  Any conflicting
+        keys will be overwritten with the values from C{dict}.
+
+        @type dict: mapping
+        @param dict: A mapping of key/value pairs to add to this dirdbm.
+        """
+        for key, val in dict.items():
+            self[key]=val
+            
+    def copyTo(self, path):
+        """
+        Copy the contents of this dirdbm to the dirdbm at C{path}.
+        
+        @type path: C{str}
+        @param path: The path of the dirdbm to copy to.  If a dirdbm
+        exists at the destination path, it is cleared first.
+        
+        @rtype: C{DirDBM}
+        @return: The dirdbm this dirdbm was copied to.
+        """
+        path = os.path.abspath(path)
+        assert path != self.dname
+        
+        d = self.__class__(path)
+        d.clear()
+        for k in self.keys():
+            d[k] = self[k]
+        return d
+
+    def clear(self):
+        """
+        Delete all key/value pairs in this dirdbm.
+        """
+        for k in self.keys():
+            del self[k]
+
+    def close(self):
+        """
+        Close this dbm: no-op, for dbm-style interface compliance.
+        """
+
+    def getModificationTime(self, key):
+        """
+        Returns modification time of an entry.
+        
+        @return: Last modification date (seconds since epoch) of entry C{key}
+        @raise KeyError: Raised when there is no such key
+        """
+        assert type(key) == types.StringType, "DirDBM key must be a string"
+        path = os.path.join(self.dname, self._encode(key))
+        if os.path.isfile(path):
+            return os.path.getmtime(path)
+        else:
+            raise KeyError, key
+
+
+class Shelf(DirDBM):
+    """A directory with a DBM shelf interface.
+    
+    This class presents a hash-like interface to a directory of small,
+    flat files. Keys must be strings, but values can be any given object.
+    """
+    
+    def __setitem__(self, k, v):
+        """
+        C{shelf[foo] = bar}
+        Create or modify a textfile in this directory.
+
+        @type k: str
+        @param k: The key to set
+
+        @param v: The value to associate with C{key}
+        """
+        v = pickle.dumps(v)
+        DirDBM.__setitem__(self, k, v)
+
+    def __getitem__(self, k):
+        """
+        C{dirdbm[foo]}
+        Get and unpickle the contents of a file in this directory.
+        
+        @type k: str
+        @param k: The key to lookup
+        
+        @return: The value associated with the given key
+        @raise KeyError: Raised if the given key does not exist
+        """
+        return pickle.loads(DirDBM.__getitem__(self, k))
+
+
+def open(file, flag = None, mode = None):
+    """
+    This is for 'anydbm' compatibility.
+    
+    @param file: The parameter to pass to the DirDBM constructor.
+
+    @param flag: ignored
+    @param mode: ignored
+    """
+    return DirDBM(file)
+
+
+__all__ = ["open", "DirDBM", "Shelf"]
diff --git a/ThirdParty/Twisted/twisted/persisted/sob.py b/ThirdParty/Twisted/twisted/persisted/sob.py
new file mode 100644
index 0000000..2ba2e49
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/persisted/sob.py
@@ -0,0 +1,227 @@
+# -*- test-case-name: twisted.test.test_sob -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+"""
+Save and load Small OBjects to and from files, using various formats.
+
+Maintainer: Moshe Zadka
+"""
+
+import os, sys
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+try:
+    import cStringIO as StringIO
+except ImportError:
+    import StringIO
+from twisted.python import log, runtime
+from twisted.python.hashlib import md5
+from twisted.persisted import styles
+from zope.interface import implements, Interface
+
+# Note:
+# These encrypt/decrypt functions only work for data formats
+# which are immune to having spaces tucked at the end.
+# All data formats which persist saves hold that condition.
+def _encrypt(passphrase, data):
+    from Crypto.Cipher import AES as cipher
+    leftover = len(data) % cipher.block_size
+    if leftover:
+        data += ' '*(cipher.block_size - leftover)
+    return cipher.new(md5(passphrase).digest()[:16]).encrypt(data)
+
+def _decrypt(passphrase, data):
+    from Crypto.Cipher import AES
+    return AES.new(md5(passphrase).digest()[:16]).decrypt(data)
+
+
+class IPersistable(Interface):
+
+    """An object which can be saved in several formats to a file"""
+
+    def setStyle(style):
+        """Set desired format.
+
+        @type style: string (one of 'pickle' or 'source')
+        """
+
+    def save(tag=None, filename=None, passphrase=None):
+        """Save object to file.
+
+        @type tag: string
+        @type filename: string
+        @type passphrase: string
+        """
+
+
+class Persistent:
+
+    implements(IPersistable)
+
+    style = "pickle"
+
+    def __init__(self, original, name):
+        self.original = original
+        self.name = name
+
+    def setStyle(self, style):
+        """Set desired format.
+
+        @type style: string (one of 'pickle' or 'source')
+        """
+        self.style = style
+
+    def _getFilename(self, filename, ext, tag):
+        if filename:
+            finalname = filename
+            filename = finalname + "-2"
+        elif tag:
+            filename = "%s-%s-2.%s" % (self.name, tag, ext)
+            finalname = "%s-%s.%s" % (self.name, tag, ext)
+        else:
+            filename = "%s-2.%s" % (self.name, ext)
+            finalname = "%s.%s" % (self.name, ext)
+        return finalname, filename
+
+    def _saveTemp(self, filename, passphrase, dumpFunc):
+        f = open(filename, 'wb')
+        if passphrase is None:
+            dumpFunc(self.original, f)
+        else:
+            s = StringIO.StringIO()
+            dumpFunc(self.original, s)
+            f.write(_encrypt(passphrase, s.getvalue()))
+        f.close()
+
+    def _getStyle(self):
+        if self.style == "source":
+            from twisted.persisted.aot import jellyToSource as dumpFunc
+            ext = "tas"
+        else:
+            def dumpFunc(obj, file):
+                pickle.dump(obj, file, 2)
+            ext = "tap"
+        return ext, dumpFunc
+
+    def save(self, tag=None, filename=None, passphrase=None):
+        """Save object to file.
+
+        @type tag: string
+        @type filename: string
+        @type passphrase: string
+        """
+        ext, dumpFunc = self._getStyle()
+        if passphrase:
+            ext = 'e' + ext
+        finalname, filename = self._getFilename(filename, ext, tag)
+        log.msg("Saving "+self.name+" application to "+finalname+"...")
+        self._saveTemp(filename, passphrase, dumpFunc)
+        if runtime.platformType == "win32" and os.path.isfile(finalname):
+            os.remove(finalname)
+        os.rename(filename, finalname)
+        log.msg("Saved.")
+
+# "Persistant" has been present since 1.0.7, so retain it for compatibility
+Persistant = Persistent
+
+class _EverythingEphemeral(styles.Ephemeral):
+
+    initRun = 0
+
+    def __init__(self, mainMod):
+        """
+        @param mainMod: The '__main__' module that this class will proxy.
+        """
+        self.mainMod = mainMod
+
+    def __getattr__(self, key):
+        try:
+            return getattr(self.mainMod, key)
+        except AttributeError:
+            if self.initRun:
+                raise
+            else:
+                log.msg("Warning!  Loading from __main__: %s" % key)
+                return styles.Ephemeral()
+
+
+def load(filename, style, passphrase=None):
+    """Load an object from a file.
+
+    Deserialize an object from a file. The file can be encrypted.
+
+    @param filename: string
+    @param style: string (one of 'pickle' or 'source')
+    @param passphrase: string
+    """
+    mode = 'r'
+    if style=='source':
+        from twisted.persisted.aot import unjellyFromSource as _load
+    else:
+        _load, mode = pickle.load, 'rb'
+    if passphrase:
+        fp = StringIO.StringIO(_decrypt(passphrase,
+                                        open(filename, 'rb').read()))
+    else:
+        fp = open(filename, mode)
+    ee = _EverythingEphemeral(sys.modules['__main__'])
+    sys.modules['__main__'] = ee
+    ee.initRun = 1
+    try:
+        value = _load(fp)
+    finally:
+        # restore __main__ if an exception is raised.
+        sys.modules['__main__'] = ee.mainMod
+
+    styles.doUpgrade()
+    ee.initRun = 0
+    persistable = IPersistable(value, None)
+    if persistable is not None:
+        persistable.setStyle(style)
+    return value
+
+
+def loadValueFromFile(filename, variable, passphrase=None):
+    """Load the value of a variable in a Python file.
+
+    Run the contents of the file, after decrypting if C{passphrase} is
+    given, in a namespace and return the result of the variable
+    named C{variable}.
+
+    @param filename: string
+    @param variable: string
+    @param passphrase: string
+    """
+    if passphrase:
+        mode = 'rb'
+    else:
+        mode = 'r'
+    fileObj = open(filename, mode)
+    d = {'__file__': filename}
+    if passphrase:
+        data = fileObj.read()
+        data = _decrypt(passphrase, data)
+        exec data in d, d
+    else:
+        exec fileObj in d, d
+    value = d[variable]
+    return value
+
+def guessType(filename):
+    ext = os.path.splitext(filename)[1]
+    return {
+        '.tac':  'python',
+        '.etac':  'python',
+        '.py':  'python',
+        '.tap': 'pickle',
+        '.etap': 'pickle',
+        '.tas': 'source',
+        '.etas': 'source',
+    }[ext]
+
+__all__ = ['loadValueFromFile', 'load', 'Persistent', 'Persistant',
+           'IPersistable', 'guessType']
diff --git a/ThirdParty/Twisted/twisted/persisted/styles.py b/ThirdParty/Twisted/twisted/persisted/styles.py
new file mode 100644
index 0000000..e3ca39b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/persisted/styles.py
@@ -0,0 +1,262 @@
+# -*- test-case-name: twisted.test.test_persisted -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+
+"""
+Different styles of persisted objects.
+"""
+
+# System Imports
+import types
+import copy_reg
+import copy
+import inspect
+import sys
+
+try:
+    import cStringIO as StringIO
+except ImportError:
+    import StringIO
+
+# Twisted Imports
+from twisted.python import log
+from twisted.python import reflect
+
+oldModules = {}
+
+## First, let's register support for some stuff that really ought to
+## be registerable...
+
+def pickleMethod(method):
+    'support function for copy_reg to pickle method refs'
+    return unpickleMethod, (method.im_func.__name__,
+                             method.im_self,
+                             method.im_class)
+
+def unpickleMethod(im_name,
+                    im_self,
+                    im_class):
+    'support function for copy_reg to unpickle method refs'
+    try:
+        unbound = getattr(im_class,im_name)
+        if im_self is None:
+            return unbound
+        bound = types.MethodType(unbound.im_func, im_self, im_class)
+        return bound
+    except AttributeError:
+        log.msg("Method",im_name,"not on class",im_class)
+        assert im_self is not None,"No recourse: no instance to guess from."
+        # Attempt a common fix before bailing -- if classes have
+        # changed around since we pickled this method, we may still be
+        # able to get it by looking on the instance's current class.
+        unbound = getattr(im_self.__class__,im_name)
+        log.msg("Attempting fixup with",unbound)
+        if im_self is None:
+            return unbound
+        bound = types.MethodType(unbound.im_func, im_self, im_self.__class__)
+        return bound
+
+copy_reg.pickle(types.MethodType,
+                pickleMethod,
+                unpickleMethod)
+
+def pickleModule(module):
+    'support function for copy_reg to pickle module refs'
+    return unpickleModule, (module.__name__,)
+
+def unpickleModule(name):
+    'support function for copy_reg to unpickle module refs'
+    if name in oldModules:
+        log.msg("Module has moved: %s" % name)
+        name = oldModules[name]
+        log.msg(name)
+    return __import__(name,{},{},'x')
+
+
+copy_reg.pickle(types.ModuleType,
+                pickleModule,
+                unpickleModule)
+
+def pickleStringO(stringo):
+    'support function for copy_reg to pickle StringIO.OutputTypes'
+    return unpickleStringO, (stringo.getvalue(), stringo.tell())
+
+def unpickleStringO(val, sek):
+    x = StringIO.StringIO()
+    x.write(val)
+    x.seek(sek)
+    return x
+
+if hasattr(StringIO, 'OutputType'):
+    copy_reg.pickle(StringIO.OutputType,
+                    pickleStringO,
+                    unpickleStringO)
+
+def pickleStringI(stringi):
+    return unpickleStringI, (stringi.getvalue(), stringi.tell())
+
+def unpickleStringI(val, sek):
+    x = StringIO.StringIO(val)
+    x.seek(sek)
+    return x
+
+
+if hasattr(StringIO, 'InputType'):
+    copy_reg.pickle(StringIO.InputType,
+                pickleStringI,
+                unpickleStringI)
+
+class Ephemeral:
+    """
+    This type of object is never persisted; if possible, even references to it
+    are eliminated.
+    """
+
+    def __getstate__(self):
+        log.msg( "WARNING: serializing ephemeral %s" % self )
+        import gc
+        if '__pypy__' not in sys.builtin_module_names:
+            if getattr(gc, 'get_referrers', None):
+                for r in gc.get_referrers(self):
+                    log.msg( " referred to by %s" % (r,))
+        return None
+
+    def __setstate__(self, state):
+        log.msg( "WARNING: unserializing ephemeral %s" % self.__class__ )
+        self.__class__ = Ephemeral
+
+
+versionedsToUpgrade = {}
+upgraded = {}
+
+def doUpgrade():
+    global versionedsToUpgrade, upgraded
+    for versioned in versionedsToUpgrade.values():
+        requireUpgrade(versioned)
+    versionedsToUpgrade = {}
+    upgraded = {}
+
+def requireUpgrade(obj):
+    """Require that a Versioned instance be upgraded completely first.
+    """
+    objID = id(obj)
+    if objID in versionedsToUpgrade and objID not in upgraded:
+        upgraded[objID] = 1
+        obj.versionUpgrade()
+        return obj
+
+def _aybabtu(c):
+    """
+    Get all of the parent classes of C{c}, not including C{c} itself, which are
+    strict subclasses of L{Versioned}.
+
+    The name comes from "all your base are belong to us", from the deprecated
+    L{twisted.python.reflect.allYourBase} function.
+
+    @param c: a class
+    @returns: list of classes
+    """
+    # begin with two classes that should *not* be included in the
+    # final result
+    l = [c, Versioned]
+    for b in inspect.getmro(c):
+        if b not in l and issubclass(b, Versioned):
+            l.append(b)
+    # return all except the unwanted classes
+    return l[2:]
+
+class Versioned:
+    """
+    This type of object is persisted with versioning information.
+
+    I have a single class attribute, the int persistenceVersion.  After I am
+    unserialized (and styles.doUpgrade() is called), self.upgradeToVersionX()
+    will be called for each version upgrade I must undergo.
+
+    For example, if I serialize an instance of a Foo(Versioned) at version 4
+    and then unserialize it when the code is at version 9, the calls::
+
+      self.upgradeToVersion5()
+      self.upgradeToVersion6()
+      self.upgradeToVersion7()
+      self.upgradeToVersion8()
+      self.upgradeToVersion9()
+
+    will be made.  If any of these methods are undefined, a warning message
+    will be printed.
+    """
+    persistenceVersion = 0
+    persistenceForgets = ()
+
+    def __setstate__(self, state):
+        versionedsToUpgrade[id(self)] = self
+        self.__dict__ = state
+
+    def __getstate__(self, dict=None):
+        """Get state, adding a version number to it on its way out.
+        """
+        dct = copy.copy(dict or self.__dict__)
+        bases = _aybabtu(self.__class__)
+        bases.reverse()
+        bases.append(self.__class__) # don't forget me!!
+        for base in bases:
+            if 'persistenceForgets' in base.__dict__:
+                for slot in base.persistenceForgets:
+                    if slot in dct:
+                        del dct[slot]
+            if 'persistenceVersion' in base.__dict__:
+                dct['%s.persistenceVersion' % reflect.qual(base)] = base.persistenceVersion
+        return dct
+
+    def versionUpgrade(self):
+        """(internal) Do a version upgrade.
+        """
+        bases = _aybabtu(self.__class__)
+        # put the bases in order so superclasses' persistenceVersion methods
+        # will be called first.
+        bases.reverse()
+        bases.append(self.__class__) # don't forget me!!
+        # first let's look for old-skool versioned's
+        if "persistenceVersion" in self.__dict__:
+
+            # Hacky heuristic: if more than one class subclasses Versioned,
+            # we'll assume that the higher version number wins for the older
+            # class, so we'll consider the attribute the version of the older
+            # class.  There are obviously possibly times when this will
+            # eventually be an incorrect assumption, but hopefully old-school
+            # persistenceVersion stuff won't make it that far into multiple
+            # classes inheriting from Versioned.
+
+            pver = self.__dict__['persistenceVersion']
+            del self.__dict__['persistenceVersion']
+            highestVersion = 0
+            highestBase = None
+            for base in bases:
+                if not base.__dict__.has_key('persistenceVersion'):
+                    continue
+                if base.persistenceVersion > highestVersion:
+                    highestBase = base
+                    highestVersion = base.persistenceVersion
+            if highestBase:
+                self.__dict__['%s.persistenceVersion' % reflect.qual(highestBase)] = pver
+        for base in bases:
+            # ugly hack, but it's what the user expects, really
+            if (Versioned not in base.__bases__ and
+                'persistenceVersion' not in base.__dict__):
+                continue
+            currentVers = base.persistenceVersion
+            pverName = '%s.persistenceVersion' % reflect.qual(base)
+            persistVers = (self.__dict__.get(pverName) or 0)
+            if persistVers:
+                del self.__dict__[pverName]
+            assert persistVers <=  currentVers, "Sorry, can't go backwards in time."
+            while persistVers < currentVers:
+                persistVers = persistVers + 1
+                method = base.__dict__.get('upgradeToVersion%s' % persistVers, None)
+                if method:
+                    log.msg( "Upgrading %s (of %s @ %s) to version %s" % (reflect.qual(base), reflect.qual(self.__class__), id(self), persistVers) )
+                    method(self)
+                else:
+                    log.msg( 'Warning: cannot upgrade %s to version %s' % (base, persistVers) )
diff --git a/ThirdParty/Twisted/twisted/persisted/test/__init__.py b/ThirdParty/Twisted/twisted/persisted/test/__init__.py
new file mode 100644
index 0000000..01ae065
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/persisted/test/__init__.py
@@ -0,0 +1,6 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.persisted}.
+"""
diff --git a/ThirdParty/Twisted/twisted/persisted/test/test_styles.py b/ThirdParty/Twisted/twisted/persisted/test/test_styles.py
new file mode 100644
index 0000000..29647a9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/persisted/test/test_styles.py
@@ -0,0 +1,55 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.persisted.styles}.
+"""
+
+from twisted.trial import unittest
+from twisted.persisted.styles import unpickleMethod
+
+
+class Foo:
+    """
+    Helper class.
+    """
+    def method(self):
+        """
+        Helper method.
+        """
+
+
+
+class Bar:
+    """
+    Helper class.
+    """
+
+
+
+class UnpickleMethodTestCase(unittest.TestCase):
+    """
+    Tests for the unpickleMethod function.
+    """
+
+    def test_instanceBuildingNamePresent(self):
+        """
+        L{unpickleMethod} returns an instance method bound to the
+        instance passed to it.
+        """
+        foo = Foo()
+        m = unpickleMethod('method', foo, Foo)
+        self.assertEqual(m, foo.method)
+        self.assertNotIdentical(m, foo.method)
+
+
+    def test_instanceBuildingNameNotPresent(self):
+        """
+        If the named method is not present in the class,
+        L{unpickleMethod} finds a method on the class of the instance
+        and returns a bound method from there.
+        """
+        foo = Foo()
+        m = unpickleMethod('method', foo, Bar)
+        self.assertEqual(m, foo.method)
+        self.assertNotIdentical(m, foo.method)
diff --git a/ThirdParty/Twisted/twisted/plugin.py b/ThirdParty/Twisted/twisted/plugin.py
new file mode 100644
index 0000000..a4f8334
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugin.py
@@ -0,0 +1,255 @@
+# -*- test-case-name: twisted.test.test_plugin -*-
+# Copyright (c) 2005 Divmod, Inc.
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Plugin system for Twisted.
+
+ at author: Jp Calderone
+ at author: Glyph Lefkowitz
+"""
+
+import os
+import sys
+
+from zope.interface import Interface, providedBy
+
+def _determinePickleModule():
+    """
+    Determine which 'pickle' API module to use.
+    """
+    try:
+        import cPickle
+        return cPickle
+    except ImportError:
+        import pickle
+        return pickle
+
+pickle = _determinePickleModule()
+
+from twisted.python.components import getAdapterFactory
+from twisted.python.reflect import namedAny
+from twisted.python import log
+from twisted.python.modules import getModule
+
+
+
+class IPlugin(Interface):
+    """
+    Interface that must be implemented by all plugins.
+
+    Only objects which implement this interface will be considered for return
+    by C{getPlugins}.  To be useful, plugins should also implement some other
+    application-specific interface.
+    """
+
+
+
+class CachedPlugin(object):
+    def __init__(self, dropin, name, description, provided):
+        self.dropin = dropin
+        self.name = name
+        self.description = description
+        self.provided = provided
+        self.dropin.plugins.append(self)
+
+    def __repr__(self):
+        return '<CachedPlugin %r/%r (provides %r)>' % (
+            self.name, self.dropin.moduleName,
+            ', '.join([i.__name__ for i in self.provided]))
+
+    def load(self):
+        return namedAny(self.dropin.moduleName + '.' + self.name)
+
+    def __conform__(self, interface, registry=None, default=None):
+        for providedInterface in self.provided:
+            if providedInterface.isOrExtends(interface):
+                return self.load()
+            if getAdapterFactory(providedInterface, interface, None) is not None:
+                return interface(self.load(), default)
+        return default
+
+    # backwards compat HOORJ
+    getComponent = __conform__
+
+
+
+class CachedDropin(object):
+    """
+    A collection of L{CachedPlugin} instances from a particular module in a
+    plugin package.
+
+    @type moduleName: C{str}
+    @ivar moduleName: The fully qualified name of the plugin module this
+        represents.
+
+    @type description: C{str} or C{NoneType}
+    @ivar description: A brief explanation of this collection of plugins
+        (probably the plugin module's docstring).
+
+    @type plugins: C{list}
+    @ivar plugins: The L{CachedPlugin} instances which were loaded from this
+        dropin.
+    """
+    def __init__(self, moduleName, description):
+        self.moduleName = moduleName
+        self.description = description
+        self.plugins = []
+
+
+
+def _generateCacheEntry(provider):
+    dropin = CachedDropin(provider.__name__,
+                          provider.__doc__)
+    for k, v in provider.__dict__.iteritems():
+        plugin = IPlugin(v, None)
+        if plugin is not None:
+            # Instantiated for its side-effects.
+            CachedPlugin(dropin, k, v.__doc__, list(providedBy(plugin)))
+    return dropin
+
+try:
+    fromkeys = dict.fromkeys
+except AttributeError:
+    def fromkeys(keys, value=None):
+        d = {}
+        for k in keys:
+            d[k] = value
+        return d
+
+
+
+def getCache(module):
+    """
+    Compute all the possible loadable plugins, while loading as few as
+    possible and hitting the filesystem as little as possible.
+
+    @param module: a Python module object.  This represents a package to search
+    for plugins.
+
+    @return: a dictionary mapping module names to L{CachedDropin} instances.
+    """
+    allCachesCombined = {}
+    mod = getModule(module.__name__)
+    # don't want to walk deep, only immediate children.
+    buckets = {}
+    # Fill buckets with modules by related entry on the given package's
+    # __path__.  There's an abstraction inversion going on here, because this
+    # information is already represented internally in twisted.python.modules,
+    # but it's simple enough that I'm willing to live with it.  If anyone else
+    # wants to fix up this iteration so that it's one path segment at a time,
+    # be my guest.  --glyph
+    for plugmod in mod.iterModules():
+        fpp = plugmod.filePath.parent()
+        if fpp not in buckets:
+            buckets[fpp] = []
+        bucket = buckets[fpp]
+        bucket.append(plugmod)
+    for pseudoPackagePath, bucket in buckets.iteritems():
+        dropinPath = pseudoPackagePath.child('dropin.cache')
+        try:
+            lastCached = dropinPath.getModificationTime()
+            dropinDotCache = pickle.load(dropinPath.open('r'))
+        except:
+            dropinDotCache = {}
+            lastCached = 0
+
+        needsWrite = False
+        existingKeys = {}
+        for pluginModule in bucket:
+            pluginKey = pluginModule.name.split('.')[-1]
+            existingKeys[pluginKey] = True
+            if ((pluginKey not in dropinDotCache) or
+                (pluginModule.filePath.getModificationTime() >= lastCached)):
+                needsWrite = True
+                try:
+                    provider = pluginModule.load()
+                except:
+                    # dropinDotCache.pop(pluginKey, None)
+                    log.err()
+                else:
+                    entry = _generateCacheEntry(provider)
+                    dropinDotCache[pluginKey] = entry
+        # Make sure that the cache doesn't contain any stale plugins.
+        for pluginKey in dropinDotCache.keys():
+            if pluginKey not in existingKeys:
+                del dropinDotCache[pluginKey]
+                needsWrite = True
+        if needsWrite:
+            try:
+                dropinPath.setContent(pickle.dumps(dropinDotCache))
+            except OSError, e:
+                log.msg(
+                    format=(
+                        "Unable to write to plugin cache %(path)s: error "
+                        "number %(errno)d"),
+                    path=dropinPath.path, errno=e.errno)
+            except:
+                log.err(None, "Unexpected error while writing cache file")
+        allCachesCombined.update(dropinDotCache)
+    return allCachesCombined
+
+
+
+def getPlugins(interface, package=None):
+    """
+    Retrieve all plugins implementing the given interface beneath the given module.
+
+    @param interface: An interface class.  Only plugins which implement this
+    interface will be returned.
+
+    @param package: A package beneath which plugins are installed.  For
+    most uses, the default value is correct.
+
+    @return: An iterator of plugins.
+    """
+    if package is None:
+        import twisted.plugins as package
+    allDropins = getCache(package)
+    for dropin in allDropins.itervalues():
+        for plugin in dropin.plugins:
+            try:
+                adapted = interface(plugin, None)
+            except:
+                log.err()
+            else:
+                if adapted is not None:
+                    yield adapted
+
+
+# Old, backwards compatible name.  Don't use this.
+getPlugIns = getPlugins
+
+
+def pluginPackagePaths(name):
+    """
+    Return a list of additional directories which should be searched for
+    modules to be included as part of the named plugin package.
+
+    @type name: C{str}
+    @param name: The fully-qualified Python name of a plugin package, eg
+        C{'twisted.plugins'}.
+
+    @rtype: C{list} of C{str}
+    @return: The absolute paths to other directories which may contain plugin
+        modules for the named plugin package.
+    """
+    package = name.split('.')
+    # Note that this may include directories which do not exist.  It may be
+    # preferable to remove such directories at this point, rather than allow
+    # them to be searched later on.
+    #
+    # Note as well that only '__init__.py' will be considered to make a
+    # directory a package (and thus exclude it from this list).  This means
+    # that if you create a master plugin package which has some other kind of
+    # __init__ (eg, __init__.pyc) it will be incorrectly treated as a
+    # supplementary plugin directory.
+    return [
+        os.path.abspath(os.path.join(x, *package))
+        for x
+        in sys.path
+        if
+        not os.path.exists(os.path.join(x, *package + ['__init__.py']))]
+
+__all__ = ['getPlugins', 'pluginPackagePaths']
diff --git a/ThirdParty/Twisted/twisted/plugins/__init__.py b/ThirdParty/Twisted/twisted/plugins/__init__.py
new file mode 100644
index 0000000..0c11760
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/__init__.py
@@ -0,0 +1,17 @@
+# -*- test-case-name: twisted.test.test_plugin -*-
+# Copyright (c) 2005 Divmod, Inc.
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Plugins go in directories on your PYTHONPATH named twisted/plugins:
+this is the only place where an __init__.py is necessary, thanks to
+the __path__ variable.
+
+ at author: Jp Calderone
+ at author: Glyph Lefkowitz
+"""
+
+from twisted.plugin import pluginPackagePaths
+__path__.extend(pluginPackagePaths(__name__))
+__all__ = []                    # nothing to see here, move along, move along
diff --git a/ThirdParty/Twisted/twisted/plugins/cred_anonymous.py b/ThirdParty/Twisted/twisted/plugins/cred_anonymous.py
new file mode 100644
index 0000000..ad0ea9e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/cred_anonymous.py
@@ -0,0 +1,40 @@
+# -*- test-case-name: twisted.test.test_strcred -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Cred plugin for anonymous logins.
+"""
+
+from zope.interface import implements
+
+from twisted import plugin
+from twisted.cred.checkers import AllowAnonymousAccess
+from twisted.cred.strcred import ICheckerFactory
+from twisted.cred.credentials import IAnonymous
+
+
+anonymousCheckerFactoryHelp = """
+This allows anonymous authentication for servers that support it.
+"""
+
+
+class AnonymousCheckerFactory(object):
+    """
+    Generates checkers that will authenticate an anonymous request.
+    """
+    implements(ICheckerFactory, plugin.IPlugin)
+    authType = 'anonymous'
+    authHelp = anonymousCheckerFactoryHelp
+    argStringFormat = 'No argstring required.'
+    credentialInterfaces = (IAnonymous,)
+
+
+    def generateChecker(self, argstring=''):
+        return AllowAnonymousAccess()
+
+
+
+theAnonymousCheckerFactory = AnonymousCheckerFactory()
+
diff --git a/ThirdParty/Twisted/twisted/plugins/cred_file.py b/ThirdParty/Twisted/twisted/plugins/cred_file.py
new file mode 100644
index 0000000..3ff9b37
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/cred_file.py
@@ -0,0 +1,60 @@
+# -*- test-case-name: twisted.test.test_strcred -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Cred plugin for a file of the format 'username:password'.
+"""
+
+import sys
+
+from zope.interface import implements
+
+from twisted import plugin
+from twisted.cred.checkers import FilePasswordDB
+from twisted.cred.strcred import ICheckerFactory
+from twisted.cred.credentials import IUsernamePassword, IUsernameHashedPassword
+
+
+
+fileCheckerFactoryHelp = """
+This checker expects to receive the location of a file that
+conforms to the FilePasswordDB format. Each line in the file
+should be of the format 'username:password', in plain text.
+"""
+
+invalidFileWarning = 'Warning: not a valid file'
+
+
+
+class FileCheckerFactory(object):
+    """
+    A factory for instances of L{FilePasswordDB}.
+    """
+    implements(ICheckerFactory, plugin.IPlugin)
+    authType = 'file'
+    authHelp = fileCheckerFactoryHelp
+    argStringFormat = 'Location of a FilePasswordDB-formatted file.'
+    # Explicitly defined here because FilePasswordDB doesn't do it for us
+    credentialInterfaces = (IUsernamePassword, IUsernameHashedPassword)
+
+    errorOutput = sys.stderr
+
+    def generateChecker(self, argstring):
+        """
+        This checker factory expects to get the location of a file.
+        The file should conform to the format required by
+        L{FilePasswordDB} (using defaults for all
+        initialization parameters).
+        """
+        from twisted.python.filepath import FilePath
+        if not argstring.strip():
+            raise ValueError, '%r requires a filename' % self.authType
+        elif not FilePath(argstring).isfile():
+            self.errorOutput.write('%s: %s\n' % (invalidFileWarning, argstring))
+        return FilePasswordDB(argstring)
+
+
+
+theFileCheckerFactory = FileCheckerFactory()
diff --git a/ThirdParty/Twisted/twisted/plugins/cred_memory.py b/ThirdParty/Twisted/twisted/plugins/cred_memory.py
new file mode 100644
index 0000000..0ed9083
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/cred_memory.py
@@ -0,0 +1,68 @@
+# -*- test-case-name: twisted.test.test_strcred -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Cred plugin for an in-memory user database.
+"""
+
+from zope.interface import implements
+
+from twisted import plugin
+from twisted.cred.strcred import ICheckerFactory
+from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse
+from twisted.cred.credentials import IUsernamePassword, IUsernameHashedPassword
+
+
+
+inMemoryCheckerFactoryHelp = """
+A checker that uses an in-memory user database.
+
+This is only of use in one-off test programs or examples which
+don't want to focus too much on how credentials are verified. You
+really don't want to use this for anything else. It is a toy.
+"""
+
+
+
+class InMemoryCheckerFactory(object):
+    """
+    A factory for in-memory credentials checkers.
+
+    This is only of use in one-off test programs or examples which don't
+    want to focus too much on how credentials are verified.
+
+    You really don't want to use this for anything else.  It is, at best, a
+    toy.  If you need a simple credentials checker for a real application,
+    see L{cred_passwd.PasswdCheckerFactory}.
+    """
+    implements(ICheckerFactory, plugin.IPlugin)
+    authType = 'memory'
+    authHelp = inMemoryCheckerFactoryHelp
+    argStringFormat = 'A colon-separated list (name:password:...)'
+    credentialInterfaces = (IUsernamePassword,
+                            IUsernameHashedPassword)
+
+    def generateChecker(self, argstring):
+        """
+        This checker factory expects to get a list of
+        username:password pairs, with each pair also separated by a
+        colon. For example, the string 'alice:f:bob:g' would generate
+        two users, one named 'alice' and one named 'bob'.
+        """
+        checker = InMemoryUsernamePasswordDatabaseDontUse()
+        if argstring:
+            pieces = argstring.split(':')
+            if len(pieces) % 2:
+                from twisted.cred.strcred import InvalidAuthArgumentString
+                raise InvalidAuthArgumentString(
+                    "argstring must be in format U:P:...")
+            for i in range(0, len(pieces), 2):
+                username, password = pieces[i], pieces[i+1]
+                checker.addUser(username, password)
+        return checker
+
+
+
+theInMemoryCheckerFactory = InMemoryCheckerFactory()
diff --git a/ThirdParty/Twisted/twisted/plugins/cred_sshkeys.py b/ThirdParty/Twisted/twisted/plugins/cred_sshkeys.py
new file mode 100644
index 0000000..226b34a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/cred_sshkeys.py
@@ -0,0 +1,51 @@
+# -*- test-case-name: twisted.test.test_strcred -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Cred plugin for ssh key login
+"""
+
+from zope.interface import implements
+
+from twisted import plugin
+from twisted.cred.strcred import ICheckerFactory
+from twisted.cred.credentials import ISSHPrivateKey
+
+
+sshKeyCheckerFactoryHelp = """
+This allows SSH public key authentication, based on public keys listed in
+authorized_keys and authorized_keys2 files in user .ssh/ directories.
+"""
+
+
+try:
+    from twisted.conch.checkers import SSHPublicKeyDatabase
+
+    class SSHKeyCheckerFactory(object):
+        """
+        Generates checkers that will authenticate a SSH public key
+        """
+        implements(ICheckerFactory, plugin.IPlugin)
+        authType = 'sshkey'
+        authHelp = sshKeyCheckerFactoryHelp
+        argStringFormat = 'No argstring required.'
+        credentialInterfaces = SSHPublicKeyDatabase.credentialInterfaces
+
+
+        def generateChecker(self, argstring=''):
+            """
+            This checker factory ignores the argument string. Everything
+            needed to authenticate users is pulled out of the public keys
+            listed in user .ssh/ directories.
+            """
+            return SSHPublicKeyDatabase()
+
+
+
+    theSSHKeyCheckerFactory = SSHKeyCheckerFactory()
+
+except ImportError:
+    # if checkers can't be imported, then there should be no SSH cred plugin
+    pass
diff --git a/ThirdParty/Twisted/twisted/plugins/cred_unix.py b/ThirdParty/Twisted/twisted/plugins/cred_unix.py
new file mode 100644
index 0000000..a636497
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/cred_unix.py
@@ -0,0 +1,138 @@
+# -*- test-case-name: twisted.test.test_strcred -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Cred plugin for UNIX user accounts.
+"""
+
+from zope.interface import implements
+
+from twisted import plugin
+from twisted.cred.strcred import ICheckerFactory
+from twisted.cred.checkers import ICredentialsChecker
+from twisted.cred.credentials import IUsernamePassword
+from twisted.cred.error import UnauthorizedLogin
+from twisted.internet import defer
+
+
+
+def verifyCryptedPassword(crypted, pw):
+    if crypted[0] == '$': # md5_crypt encrypted
+        salt = '$1$' + crypted.split('$')[2]
+    else:
+        salt = crypted[:2]
+    try:
+        import crypt
+    except ImportError:
+        crypt = None
+
+    if crypt is None:
+        raise NotImplementedError("cred_unix not supported on this platform")
+    return crypt.crypt(pw, salt) == crypted
+
+
+
+class UNIXChecker(object):
+    """
+    A credentials checker for a UNIX server. This will check that
+    an authenticating username/password is a valid user on the system.
+
+    Does not work on Windows.
+
+    Right now this supports Python's pwd and spwd modules, if they are
+    installed. It does not support PAM.
+    """
+    implements(ICredentialsChecker)
+    credentialInterfaces = (IUsernamePassword,)
+
+
+    def checkPwd(self, pwd, username, password):
+        try:
+            cryptedPass = pwd.getpwnam(username)[1]
+        except KeyError:
+            return defer.fail(UnauthorizedLogin())
+        else:
+            if cryptedPass in ('*', 'x'):
+                # Allow checkSpwd to take over
+                return None
+            elif verifyCryptedPassword(cryptedPass, password):
+                return defer.succeed(username)
+
+
+    def checkSpwd(self, spwd, username, password):
+        try:
+            cryptedPass = spwd.getspnam(username)[1]
+        except KeyError:
+            return defer.fail(UnauthorizedLogin())
+        else:
+            if verifyCryptedPassword(cryptedPass, password):
+                return defer.succeed(username)
+
+
+    def requestAvatarId(self, credentials):
+        username, password = credentials.username, credentials.password
+
+        try:
+            import pwd
+        except ImportError:
+            pwd = None
+
+        if pwd is not None:
+            checked = self.checkPwd(pwd, username, password)
+            if checked is not None:
+                return checked
+
+        try:
+            import spwd
+        except ImportError:
+            spwd = None
+
+        if spwd is not None:
+            checked = self.checkSpwd(spwd, username, password)
+            if checked is not None:
+                return checked
+        # TODO: check_pam?
+        # TODO: check_shadow?
+        return defer.fail(UnauthorizedLogin())
+
+
+
+unixCheckerFactoryHelp = """
+This checker will attempt to use every resource available to
+authenticate against the list of users on the local UNIX system.
+(This does not support Windows servers for very obvious reasons.)
+
+Right now, this includes support for:
+
+  * Python's pwd module (which checks /etc/passwd)
+  * Python's spwd module (which checks /etc/shadow)
+
+Future versions may include support for PAM authentication.
+"""
+
+
+
+class UNIXCheckerFactory(object):
+    """
+    A factory for L{UNIXChecker}.
+    """
+    implements(ICheckerFactory, plugin.IPlugin)
+    authType = 'unix'
+    authHelp = unixCheckerFactoryHelp
+    argStringFormat = 'No argstring required.'
+    credentialInterfaces = UNIXChecker.credentialInterfaces
+
+    def generateChecker(self, argstring):
+        """
+        This checker factory ignores the argument string. Everything
+        needed to generate a user database is pulled out of the local
+        UNIX environment.
+        """
+        return UNIXChecker()
+
+
+
+theUnixCheckerFactory = UNIXCheckerFactory()
+
diff --git a/ThirdParty/Twisted/twisted/plugins/twisted_conch.py b/ThirdParty/Twisted/twisted/plugins/twisted_conch.py
new file mode 100644
index 0000000..4b37e0b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/twisted_conch.py
@@ -0,0 +1,18 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.application.service import ServiceMaker
+
+TwistedSSH = ServiceMaker(
+    "Twisted Conch Server",
+    "twisted.conch.tap",
+    "A Conch SSH service.",
+    "conch")
+
+TwistedManhole = ServiceMaker(
+    "Twisted Manhole (new)",
+    "twisted.conch.manhole_tap",
+    ("An interactive remote debugger service accessible via telnet "
+     "and ssh and providing syntax coloring and basic line editing "
+     "functionality."),
+    "manhole")
diff --git a/ThirdParty/Twisted/twisted/plugins/twisted_core.py b/ThirdParty/Twisted/twisted/plugins/twisted_core.py
new file mode 100644
index 0000000..3907d42
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/twisted_core.py
@@ -0,0 +1,9 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+from twisted.internet.endpoints import _SystemdParser, _TCP6ServerParser, _StandardIOParser
+
+systemdEndpointParser = _SystemdParser()
+tcp6ServerEndpointParser = _TCP6ServerParser()
+stdioEndpointParser = _StandardIOParser()
diff --git a/ThirdParty/Twisted/twisted/plugins/twisted_ftp.py b/ThirdParty/Twisted/twisted/plugins/twisted_ftp.py
new file mode 100644
index 0000000..474a9c7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/twisted_ftp.py
@@ -0,0 +1,10 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.application.service import ServiceMaker
+
+TwistedFTP = ServiceMaker(
+    "Twisted FTP",
+    "twisted.tap.ftp",
+    "An FTP server.",
+    "ftp")
diff --git a/ThirdParty/Twisted/twisted/plugins/twisted_inet.py b/ThirdParty/Twisted/twisted/plugins/twisted_inet.py
new file mode 100644
index 0000000..1196343
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/twisted_inet.py
@@ -0,0 +1,10 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.application.service import ServiceMaker
+
+TwistedINETD = ServiceMaker(
+    "Twisted INETD Server",
+    "twisted.runner.inetdtap",
+    "An inetd(8) replacement.",
+    "inetd")
diff --git a/ThirdParty/Twisted/twisted/plugins/twisted_lore.py b/ThirdParty/Twisted/twisted/plugins/twisted_lore.py
new file mode 100644
index 0000000..1ab57a5
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/twisted_lore.py
@@ -0,0 +1,38 @@
+
+from zope.interface import implements
+
+from twisted.lore.scripts.lore import IProcessor
+from twisted.plugin import IPlugin
+
+class _LorePlugin(object):
+    implements(IPlugin, IProcessor)
+
+    def __init__(self, name, moduleName, description):
+        self.name = name
+        self.moduleName = moduleName
+        self.description = description
+
+DefaultProcessor = _LorePlugin(
+    "lore",
+    "twisted.lore.default",
+    "Lore format")
+
+MathProcessor = _LorePlugin(
+    "mlore",
+    "twisted.lore.lmath",
+    "Lore format with LaTeX formula")
+
+SlideProcessor = _LorePlugin(
+    "lore-slides",
+    "twisted.lore.slides",
+    "Lore for slides")
+
+ManProcessor = _LorePlugin(
+    "man",
+    "twisted.lore.man2lore",
+    "UNIX Man pages")
+
+NevowProcessor = _LorePlugin(
+    "nevow",
+    "twisted.lore.nevowlore",
+    "Nevow for Lore")
diff --git a/ThirdParty/Twisted/twisted/plugins/twisted_mail.py b/ThirdParty/Twisted/twisted/plugins/twisted_mail.py
new file mode 100644
index 0000000..7e9a5bd
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/twisted_mail.py
@@ -0,0 +1,10 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.application.service import ServiceMaker
+
+TwistedMail = ServiceMaker(
+    "Twisted Mail",
+    "twisted.mail.tap",
+    "An email service",
+    "mail")
diff --git a/ThirdParty/Twisted/twisted/plugins/twisted_manhole.py b/ThirdParty/Twisted/twisted/plugins/twisted_manhole.py
new file mode 100644
index 0000000..2481890
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/twisted_manhole.py
@@ -0,0 +1,10 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.application.service import ServiceMaker
+
+TwistedManhole = ServiceMaker(
+    "Twisted Manhole (old)",
+    "twisted.tap.manhole",
+    "An interactive remote debugger service.",
+    "manhole-old")
diff --git a/ThirdParty/Twisted/twisted/plugins/twisted_names.py b/ThirdParty/Twisted/twisted/plugins/twisted_names.py
new file mode 100644
index 0000000..7123bf0
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/twisted_names.py
@@ -0,0 +1,10 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.application.service import ServiceMaker
+
+TwistedNames = ServiceMaker(
+    "Twisted DNS Server",
+    "twisted.names.tap",
+    "A domain name server.",
+    "dns")
diff --git a/ThirdParty/Twisted/twisted/plugins/twisted_news.py b/ThirdParty/Twisted/twisted/plugins/twisted_news.py
new file mode 100644
index 0000000..0fc88d8
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/twisted_news.py
@@ -0,0 +1,10 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.application.service import ServiceMaker
+
+TwistedNews = ServiceMaker(
+    "Twisted News",
+    "twisted.news.tap",
+    "A news server.",
+    "news")
diff --git a/ThirdParty/Twisted/twisted/plugins/twisted_portforward.py b/ThirdParty/Twisted/twisted/plugins/twisted_portforward.py
new file mode 100644
index 0000000..1969434
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/twisted_portforward.py
@@ -0,0 +1,10 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.application.service import ServiceMaker
+
+TwistedPortForward = ServiceMaker(
+    "Twisted Port-Forwarding",
+    "twisted.tap.portforward",
+    "A simple port-forwarder.",
+    "portforward")
diff --git a/ThirdParty/Twisted/twisted/plugins/twisted_qtstub.py b/ThirdParty/Twisted/twisted/plugins/twisted_qtstub.py
new file mode 100644
index 0000000..ddf8843
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/twisted_qtstub.py
@@ -0,0 +1,45 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Backwards-compatibility plugin for the Qt reactor.
+
+This provides a Qt reactor plugin named C{qt} which emits a deprecation
+warning and a pointer to the separately distributed Qt reactor plugins.
+"""
+
+import warnings
+
+from twisted.application.reactors import Reactor, NoSuchReactor
+
+wikiURL = 'http://twistedmatrix.com/trac/wiki/QTReactor'
+errorMessage = ('qtreactor is no longer a part of Twisted due to licensing '
+                'issues. Please see %s for details.' % (wikiURL,))
+
+class QTStub(Reactor):
+    """
+    Reactor plugin which emits a deprecation warning on the successful
+    installation of its reactor or a pointer to further information if an
+    ImportError occurs while attempting to install it.
+    """
+    def __init__(self):
+        super(QTStub, self).__init__(
+            'qt', 'qtreactor', 'QT integration reactor')
+
+
+    def install(self):
+        """
+        Install the Qt reactor with a deprecation warning or try to point
+        the user to further information if it cannot be installed.
+        """
+        try:
+            super(QTStub, self).install()
+        except (ValueError, ImportError):
+            raise NoSuchReactor(errorMessage)
+        else:
+            warnings.warn(
+                "Please use -r qt3 to import qtreactor",
+                category=DeprecationWarning)
+
+
+qt = QTStub()
diff --git a/ThirdParty/Twisted/twisted/plugins/twisted_reactors.py b/ThirdParty/Twisted/twisted/plugins/twisted_reactors.py
new file mode 100644
index 0000000..8562aa9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/twisted_reactors.py
@@ -0,0 +1,42 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.application.reactors import Reactor
+
+default = Reactor(
+    'default', 'twisted.internet.default',
+    'A reasonable default: poll(2) if available, otherwise select(2).')
+
+select = Reactor(
+    'select', 'twisted.internet.selectreactor', 'select(2)-based reactor.')
+wx = Reactor(
+    'wx', 'twisted.internet.wxreactor', 'wxPython integration reactor.')
+gi = Reactor(
+    'gi', 'twisted.internet.gireactor', 'GObject Introspection integration reactor.')
+gtk3 = Reactor(
+    'gtk3', 'twisted.internet.gtk3reactor', 'Gtk3 integration reactor.')
+gtk = Reactor(
+    'gtk', 'twisted.internet.gtkreactor', 'Gtk1 integration reactor.')
+gtk2 = Reactor(
+    'gtk2', 'twisted.internet.gtk2reactor', 'Gtk2 integration reactor.')
+glib2 = Reactor(
+    'glib2', 'twisted.internet.glib2reactor',
+    'GLib2 event-loop integration reactor.')
+glade = Reactor(
+    'debug-gui', 'twisted.manhole.gladereactor',
+    'Semi-functional debugging/introspection reactor.')
+win32er = Reactor(
+    'win32', 'twisted.internet.win32eventreactor',
+    'Win32 WaitForMultipleObjects-based reactor.')
+poll = Reactor(
+    'poll', 'twisted.internet.pollreactor', 'poll(2)-based reactor.')
+epoll = Reactor(
+    'epoll', 'twisted.internet.epollreactor', 'epoll(4)-based reactor.')
+cf = Reactor(
+    'cf' , 'twisted.internet.cfreactor',
+    'CoreFoundation integration reactor.')
+kqueue = Reactor(
+    'kqueue', 'twisted.internet.kqreactor', 'kqueue(2)-based reactor.')
+iocp = Reactor(
+    'iocp', 'twisted.internet.iocpreactor',
+    'Win32 IO Completion Ports-based reactor.')
diff --git a/ThirdParty/Twisted/twisted/plugins/twisted_runner.py b/ThirdParty/Twisted/twisted/plugins/twisted_runner.py
new file mode 100644
index 0000000..dc63028
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/twisted_runner.py
@@ -0,0 +1,10 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.application.service import ServiceMaker
+
+TwistedProcmon = ServiceMaker(
+    "Twisted Process Monitor",
+    "twisted.runner.procmontap",
+    ("A process watchdog / supervisor"),
+    "procmon")
diff --git a/ThirdParty/Twisted/twisted/plugins/twisted_socks.py b/ThirdParty/Twisted/twisted/plugins/twisted_socks.py
new file mode 100644
index 0000000..5a94f87
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/twisted_socks.py
@@ -0,0 +1,10 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.application.service import ServiceMaker
+
+TwistedSOCKS = ServiceMaker(
+    "Twisted SOCKS",
+    "twisted.tap.socks",
+    "A SOCKSv4 proxy service.",
+    "socks")
diff --git a/ThirdParty/Twisted/twisted/plugins/twisted_telnet.py b/ThirdParty/Twisted/twisted/plugins/twisted_telnet.py
new file mode 100644
index 0000000..4cb1f98
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/twisted_telnet.py
@@ -0,0 +1,10 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.application.service import ServiceMaker
+
+TwistedTelnet = ServiceMaker(
+    "Twisted Telnet Shell Server",
+    "twisted.tap.telnet",
+    "A simple, telnet-based remote debugging service.",
+    "telnet")
diff --git a/ThirdParty/Twisted/twisted/plugins/twisted_trial.py b/ThirdParty/Twisted/twisted/plugins/twisted_trial.py
new file mode 100644
index 0000000..debc8af
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/twisted_trial.py
@@ -0,0 +1,59 @@
+
+from zope.interface import implements
+
+from twisted.trial.itrial import IReporter
+from twisted.plugin import IPlugin
+
+class _Reporter(object):
+    implements(IPlugin, IReporter)
+
+    def __init__(self, name, module, description, longOpt, shortOpt, klass):
+        self.name = name
+        self.module = module
+        self.description = description
+        self.longOpt = longOpt
+        self.shortOpt = shortOpt
+        self.klass = klass
+
+
+Tree = _Reporter("Tree Reporter",
+                 "twisted.trial.reporter",
+                 description="verbose color output (default reporter)",
+                 longOpt="verbose",
+                 shortOpt="v",
+                 klass="TreeReporter")
+
+BlackAndWhite = _Reporter("Black-And-White Reporter",
+                          "twisted.trial.reporter",
+                          description="Colorless verbose output",
+                          longOpt="bwverbose",
+                          shortOpt="o",
+                          klass="VerboseTextReporter")
+
+Minimal = _Reporter("Minimal Reporter",
+                    "twisted.trial.reporter",
+                    description="minimal summary output",
+                    longOpt="summary",
+                    shortOpt="s",
+                    klass="MinimalReporter")
+
+Classic = _Reporter("Classic Reporter",
+                    "twisted.trial.reporter",
+                    description="terse text output",
+                    longOpt="text",
+                    shortOpt="t",
+                    klass="TextReporter")
+
+Timing = _Reporter("Timing Reporter",
+                   "twisted.trial.reporter",
+                   description="Timing output",
+                   longOpt="timing",
+                   shortOpt=None,
+                   klass="TimingTextReporter")
+
+Subunit = _Reporter("Subunit Reporter",
+                    "twisted.trial.reporter",
+                    description="subunit output",
+                    longOpt="subunit",
+                    shortOpt=None,
+                    klass="SubunitReporter")
diff --git a/ThirdParty/Twisted/twisted/plugins/twisted_web.py b/ThirdParty/Twisted/twisted/plugins/twisted_web.py
new file mode 100644
index 0000000..c7655a6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/twisted_web.py
@@ -0,0 +1,11 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.application.service import ServiceMaker
+
+TwistedWeb = ServiceMaker(
+    "Twisted Web",
+    "twisted.web.tap",
+    ("A general-purpose web server which can serve from a "
+     "filesystem or application resource."),
+    "web")
diff --git a/ThirdParty/Twisted/twisted/plugins/twisted_words.py b/ThirdParty/Twisted/twisted/plugins/twisted_words.py
new file mode 100644
index 0000000..6f14aef
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/plugins/twisted_words.py
@@ -0,0 +1,43 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from zope.interface import classProvides
+
+from twisted.plugin import IPlugin
+
+from twisted.application.service import ServiceMaker
+from twisted.words import iwords
+
+
+NewTwistedWords = ServiceMaker(
+    "New Twisted Words",
+    "twisted.words.tap",
+    "A modern words server",
+    "words")
+
+TwistedXMPPRouter = ServiceMaker(
+    "XMPP Router",
+    "twisted.words.xmpproutertap",
+    "An XMPP Router server",
+    "xmpp-router")
+
+class RelayChatInterface(object):
+    classProvides(IPlugin, iwords.IProtocolPlugin)
+
+    name = 'irc'
+
+    def getFactory(cls, realm, portal):
+        from twisted.words import service
+        return service.IRCFactory(realm, portal)
+    getFactory = classmethod(getFactory)
+
+class PBChatInterface(object):
+    classProvides(IPlugin, iwords.IProtocolPlugin)
+
+    name = 'pb'
+
+    def getFactory(cls, realm, portal):
+        from twisted.spread import pb
+        return pb.PBServerFactory(portal, True)
+    getFactory = classmethod(getFactory)
+
diff --git a/ThirdParty/Twisted/twisted/protocols/__init__.py b/ThirdParty/Twisted/twisted/protocols/__init__.py
new file mode 100644
index 0000000..a079651
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/__init__.py
@@ -0,0 +1,7 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Twisted Protocols: a collection of internet protocol implementations.
+"""
diff --git a/ThirdParty/Twisted/twisted/protocols/amp.py b/ThirdParty/Twisted/twisted/protocols/amp.py
new file mode 100644
index 0000000..72a3e7a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/amp.py
@@ -0,0 +1,2705 @@
+# -*- test-case-name: twisted.test.test_amp -*-
+# Copyright (c) 2005 Divmod, Inc.
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module implements AMP, the Asynchronous Messaging Protocol.
+
+AMP is a protocol for sending multiple asynchronous request/response pairs over
+the same connection.  Requests and responses are both collections of key/value
+pairs.
+
+AMP is a very simple protocol which is not an application.  This module is a
+"protocol construction kit" of sorts; it attempts to be the simplest wire-level
+implementation of Deferreds.  AMP provides the following base-level features:
+
+    - Asynchronous request/response handling (hence the name)
+
+    - Requests and responses are both key/value pairs
+
+    - Binary transfer of all data: all data is length-prefixed.  Your
+      application will never need to worry about quoting.
+
+    - Command dispatching (like HTTP Verbs): the protocol is extensible, and
+      multiple AMP sub-protocols can be grouped together easily.
+
+The protocol implementation also provides a few additional features which are
+not part of the core wire protocol, but are nevertheless very useful:
+
+    - Tight TLS integration, with an included StartTLS command.
+
+    - Handshaking to other protocols: because AMP has well-defined message
+      boundaries and maintains all incoming and outgoing requests for you, you
+      can start a connection over AMP and then switch to another protocol.
+      This makes it ideal for firewall-traversal applications where you may
+      have only one forwarded port but multiple applications that want to use
+      it.
+
+Using AMP with Twisted is simple.  Each message is a command, with a response.
+You begin by defining a command type.  Commands specify their input and output
+in terms of the types that they expect to see in the request and response
+key-value pairs.  Here's an example of a command that adds two integers, 'a'
+and 'b'::
+
+    class Sum(amp.Command):
+        arguments = [('a', amp.Integer()),
+                     ('b', amp.Integer())]
+        response = [('total', amp.Integer())]
+
+Once you have specified a command, you need to make it part of a protocol, and
+define a responder for it.  Here's a 'JustSum' protocol that includes a
+responder for our 'Sum' command::
+
+    class JustSum(amp.AMP):
+        def sum(self, a, b):
+            total = a + b
+            print 'Did a sum: %d + %d = %d' % (a, b, total)
+            return {'total': total}
+        Sum.responder(sum)
+
+Later, when you want to actually do a sum, the following expression will return
+a L{Deferred} which will fire with the result::
+
+    ClientCreator(reactor, amp.AMP).connectTCP(...).addCallback(
+        lambda p: p.callRemote(Sum, a=13, b=81)).addCallback(
+            lambda result: result['total'])
+
+Command responders may also return Deferreds, causing the response to be
+sent only once the Deferred fires::
+
+    class DelayedSum(amp.AMP):
+        def slowSum(self, a, b):
+            total = a + b
+            result = defer.Deferred()
+            reactor.callLater(3, result.callback, {'total': total})
+            return result
+        Sum.responder(slowSum)
+
+This is transparent to the caller.
+
+You can also define the propagation of specific errors in AMP.  For example,
+for the slightly more complicated case of division, we might have to deal with
+division by zero::
+
+    class Divide(amp.Command):
+        arguments = [('numerator', amp.Integer()),
+                     ('denominator', amp.Integer())]
+        response = [('result', amp.Float())]
+        errors = {ZeroDivisionError: 'ZERO_DIVISION'}
+
+The 'errors' mapping here tells AMP that if a responder to Divide emits a
+L{ZeroDivisionError}, then the other side should be informed that an error of
+the type 'ZERO_DIVISION' has occurred.  Writing a responder which takes
+advantage of this is very simple - just raise your exception normally::
+
+    class JustDivide(amp.AMP):
+        def divide(self, numerator, denominator):
+            result = numerator / denominator
+            print 'Divided: %d / %d = %d' % (numerator, denominator, total)
+            return {'result': result}
+        Divide.responder(divide)
+
+On the client side, the errors mapping will be used to determine what the
+'ZERO_DIVISION' error means, and translated into an asynchronous exception,
+which can be handled normally as any L{Deferred} would be::
+
+    def trapZero(result):
+        result.trap(ZeroDivisionError)
+        print "Divided by zero: returning INF"
+        return 1e1000
+    ClientCreator(reactor, amp.AMP).connectTCP(...).addCallback(
+        lambda p: p.callRemote(Divide, numerator=1234,
+                               denominator=0)
+        ).addErrback(trapZero)
+
+For a complete, runnable example of both of these commands, see the files in
+the Twisted repository::
+
+    doc/core/examples/ampserver.py
+    doc/core/examples/ampclient.py
+
+On the wire, AMP is a protocol which uses 2-byte lengths to prefix keys and
+values, and empty keys to separate messages::
+
+    <2-byte length><key><2-byte length><value>
+    <2-byte length><key><2-byte length><value>
+    ...
+    <2-byte length><key><2-byte length><value>
+    <NUL><NUL>                  # Empty Key == End of Message
+
+And so on.  Because it's tedious to refer to lengths and NULs constantly, the
+documentation will refer to packets as if they were newline delimited, like
+so::
+
+    C: _command: sum
+    C: _ask: ef639e5c892ccb54
+    C: a: 13
+    C: b: 81
+
+    S: _answer: ef639e5c892ccb54
+    S: total: 94
+
+Notes:
+
+In general, the order of keys is arbitrary.  Specific uses of AMP may impose an
+ordering requirement, but unless this is specified explicitly, any ordering may
+be generated and any ordering must be accepted.  This applies to the
+command-related keys I{_command} and I{_ask} as well as any other keys.
+
+Values are limited to the maximum encodable size in a 16-bit length, 65535
+bytes.
+
+Keys are limited to the maximum encodable size in a 8-bit length, 255 bytes.
+Note that we still use 2-byte lengths to encode keys.  This small redundancy
+has several features:
+
+    - If an implementation becomes confused and starts emitting corrupt data,
+      or gets keys confused with values, many common errors will be signalled
+      immediately instead of delivering obviously corrupt packets.
+
+    - A single NUL will separate every key, and a double NUL separates
+      messages.  This provides some redundancy when debugging traffic dumps.
+
+    - NULs will be present at regular intervals along the protocol, providing
+      some padding for otherwise braindead C implementations of the protocol,
+      so that <stdio.h> string functions will see the NUL and stop.
+
+    - This makes it possible to run an AMP server on a port also used by a
+      plain-text protocol, and easily distinguish between non-AMP clients (like
+      web browsers) which issue non-NUL as the first byte, and AMP clients,
+      which always issue NUL as the first byte.
+"""
+
+__metaclass__ = type
+
+import types, warnings
+
+from cStringIO import StringIO
+from struct import pack
+import decimal, datetime
+from itertools import count
+
+from zope.interface import Interface, implements
+
+from twisted.python.compat import set
+from twisted.python.util import unsignedID
+from twisted.python.reflect import accumulateClassDict
+from twisted.python.failure import Failure
+from twisted.python import log, filepath
+
+from twisted.internet.interfaces import IFileDescriptorReceiver
+from twisted.internet.main import CONNECTION_LOST
+from twisted.internet.error import PeerVerifyError, ConnectionLost
+from twisted.internet.error import ConnectionClosed
+from twisted.internet.defer import Deferred, maybeDeferred, fail
+from twisted.protocols.basic import Int16StringReceiver, StatefulStringProtocol
+
+try:
+    from twisted.internet import ssl
+except ImportError:
+    ssl = None
+
+if ssl and not ssl.supported:
+    ssl = None
+
+if ssl is not None:
+    from twisted.internet.ssl import CertificateOptions, Certificate, DN, KeyPair
+
+ASK = '_ask'
+ANSWER = '_answer'
+COMMAND = '_command'
+ERROR = '_error'
+ERROR_CODE = '_error_code'
+ERROR_DESCRIPTION = '_error_description'
+UNKNOWN_ERROR_CODE = 'UNKNOWN'
+UNHANDLED_ERROR_CODE = 'UNHANDLED'
+
+MAX_KEY_LENGTH = 0xff
+MAX_VALUE_LENGTH = 0xffff
+
+
+class IArgumentType(Interface):
+    """
+    An L{IArgumentType} can serialize a Python object into an AMP box and
+    deserialize information from an AMP box back into a Python object.
+
+    @since: 9.0
+    """
+    def fromBox(name, strings, objects, proto):
+        """
+        Given an argument name and an AMP box containing serialized values,
+        extract one or more Python objects and add them to the C{objects}
+        dictionary.
+
+        @param name: The name associated with this argument.  Most commonly,
+            this is the key which can be used to find a serialized value in
+            C{strings} and which should be used as the key in C{objects} to
+            associate with a structured Python object.
+        @type name: C{str}
+
+        @param strings: The AMP box from which to extract one or more
+            values.
+        @type strings: C{dict}
+
+        @param objects: The output dictionary to populate with the value for
+            this argument.
+        @type objects: C{dict}
+
+        @param proto: The protocol instance which received the AMP box being
+            interpreted.  Most likely this is an instance of L{AMP}, but
+            this is not guaranteed.
+
+        @return: C{None}
+        """
+
+
+    def toBox(name, strings, objects, proto):
+        """
+        Given an argument name and a dictionary containing structured Python
+        objects, serialize values into one or more strings and add them to
+        the C{strings} dictionary.
+
+        @param name: The name associated with this argument.  Most commonly,
+            this is the key which can be used to find an object in
+            C{objects} and which should be used as the key in C{strings} to
+            associate with a C{str} giving the serialized form of that
+            object.
+        @type name: C{str}
+
+        @param strings: The AMP box into which to insert one or more
+            strings.
+        @type strings: C{dict}
+
+        @param objects: The input dictionary from which to extract Python
+            objects to serialize.
+        @type objects: C{dict}
+
+        @param proto: The protocol instance which will send the AMP box once
+            it is fully populated.  Most likely this is an instance of
+            L{AMP}, but this is not guaranteed.
+
+        @return: C{None}
+        """
+
+
+
+class IBoxSender(Interface):
+    """
+    A transport which can send L{AmpBox} objects.
+    """
+
+    def sendBox(box):
+        """
+        Send an L{AmpBox}.
+
+        @raise ProtocolSwitched: if the underlying protocol has been
+        switched.
+
+        @raise ConnectionLost: if the underlying connection has already been
+        lost.
+        """
+
+    def unhandledError(failure):
+        """
+        An unhandled error occurred in response to a box.  Log it
+        appropriately.
+
+        @param failure: a L{Failure} describing the error that occurred.
+        """
+
+
+
+class IBoxReceiver(Interface):
+    """
+    An application object which can receive L{AmpBox} objects and dispatch them
+    appropriately.
+    """
+
+    def startReceivingBoxes(boxSender):
+        """
+        The L{ampBoxReceived} method will start being called; boxes may be
+        responded to by responding to the given L{IBoxSender}.
+
+        @param boxSender: an L{IBoxSender} provider.
+        """
+
+
+    def ampBoxReceived(box):
+        """
+        A box was received from the transport; dispatch it appropriately.
+        """
+
+
+    def stopReceivingBoxes(reason):
+        """
+        No further boxes will be received on this connection.
+
+        @type reason: L{Failure}
+        """
+
+
+
+class IResponderLocator(Interface):
+    """
+    An application object which can look up appropriate responder methods for
+    AMP commands.
+    """
+
+    def locateResponder(name):
+        """
+        Locate a responder method appropriate for the named command.
+
+        @param name: the wire-level name (commandName) of the AMP command to be
+        responded to.
+
+        @return: a 1-argument callable that takes an L{AmpBox} with argument
+        values for the given command, and returns an L{AmpBox} containing
+        argument values for the named command, or a L{Deferred} that fires the
+        same.
+        """
+
+
+
+class AmpError(Exception):
+    """
+    Base class of all Amp-related exceptions.
+    """
+
+
+
+class ProtocolSwitched(Exception):
+    """
+    Connections which have been switched to other protocols can no longer
+    accept traffic at the AMP level.  This is raised when you try to send it.
+    """
+
+
+
+class OnlyOneTLS(AmpError):
+    """
+    This is an implementation limitation; TLS may only be started once per
+    connection.
+    """
+
+
+
+class NoEmptyBoxes(AmpError):
+    """
+    You can't have empty boxes on the connection.  This is raised when you
+    receive or attempt to send one.
+    """
+
+
+
+class InvalidSignature(AmpError):
+    """
+    You didn't pass all the required arguments.
+    """
+
+
+
+class TooLong(AmpError):
+    """
+    One of the protocol's length limitations was violated.
+
+    @ivar isKey: true if the string being encoded in a key position, false if
+    it was in a value position.
+
+    @ivar isLocal: Was the string encoded locally, or received too long from
+    the network?  (It's only physically possible to encode "too long" values on
+    the network for keys.)
+
+    @ivar value: The string that was too long.
+
+    @ivar keyName: If the string being encoded was in a value position, what
+    key was it being encoded for?
+    """
+
+    def __init__(self, isKey, isLocal, value, keyName=None):
+        AmpError.__init__(self)
+        self.isKey = isKey
+        self.isLocal = isLocal
+        self.value = value
+        self.keyName = keyName
+
+
+    def __repr__(self):
+        hdr = self.isKey and "key" or "value"
+        if not self.isKey:
+            hdr += ' ' + repr(self.keyName)
+        lcl = self.isLocal and "local" or "remote"
+        return "%s %s too long: %d" % (lcl, hdr, len(self.value))
+
+
+
+class BadLocalReturn(AmpError):
+    """
+    A bad value was returned from a local command; we were unable to coerce it.
+    """
+    def __init__(self, message, enclosed):
+        AmpError.__init__(self)
+        self.message = message
+        self.enclosed = enclosed
+
+
+    def __repr__(self):
+        return self.message + " " + self.enclosed.getBriefTraceback()
+
+    __str__ = __repr__
+
+
+
+class RemoteAmpError(AmpError):
+    """
+    This error indicates that something went wrong on the remote end of the
+    connection, and the error was serialized and transmitted to you.
+    """
+    def __init__(self, errorCode, description, fatal=False, local=None):
+        """Create a remote error with an error code and description.
+
+        @param errorCode: the AMP error code of this error.
+
+        @param description: some text to show to the user.
+
+        @param fatal: a boolean, true if this error should terminate the
+        connection.
+
+        @param local: a local Failure, if one exists.
+        """
+        if local:
+            localwhat = ' (local)'
+            othertb = local.getBriefTraceback()
+        else:
+            localwhat = ''
+            othertb = ''
+        Exception.__init__(self, "Code<%s>%s: %s%s" % (
+                errorCode, localwhat,
+                description, othertb))
+        self.local = local
+        self.errorCode = errorCode
+        self.description = description
+        self.fatal = fatal
+
+
+
+class UnknownRemoteError(RemoteAmpError):
+    """
+    This means that an error whose type we can't identify was raised from the
+    other side.
+    """
+    def __init__(self, description):
+        errorCode = UNKNOWN_ERROR_CODE
+        RemoteAmpError.__init__(self, errorCode, description)
+
+
+
+class MalformedAmpBox(AmpError):
+    """
+    This error indicates that the wire-level protocol was malformed.
+    """
+
+
+
+class UnhandledCommand(AmpError):
+    """
+    A command received via amp could not be dispatched.
+    """
+
+
+
+class IncompatibleVersions(AmpError):
+    """
+    It was impossible to negotiate a compatible version of the protocol with
+    the other end of the connection.
+    """
+
+
+PROTOCOL_ERRORS = {UNHANDLED_ERROR_CODE: UnhandledCommand}
+
+class AmpBox(dict):
+    """
+    I am a packet in the AMP protocol, much like a regular str:str dictionary.
+    """
+    __slots__ = []              # be like a regular dictionary, don't magically
+                                # acquire a __dict__...
+
+
+    def copy(self):
+        """
+        Return another AmpBox just like me.
+        """
+        newBox = self.__class__()
+        newBox.update(self)
+        return newBox
+
+
+    def serialize(self):
+        """
+        Convert me into a wire-encoded string.
+
+        @return: a str encoded according to the rules described in the module
+        docstring.
+        """
+        i = self.items()
+        i.sort()
+        L = []
+        w = L.append
+        for k, v in i:
+            if type(k) == unicode:
+                raise TypeError("Unicode key not allowed: %r" % k)
+            if type(v) == unicode:
+                raise TypeError(
+                    "Unicode value for key %r not allowed: %r" % (k, v))
+            if len(k) > MAX_KEY_LENGTH:
+                raise TooLong(True, True, k, None)
+            if len(v) > MAX_VALUE_LENGTH:
+                raise TooLong(False, True, v, k)
+            for kv in k, v:
+                w(pack("!H", len(kv)))
+                w(kv)
+        w(pack("!H", 0))
+        return ''.join(L)
+
+
+    def _sendTo(self, proto):
+        """
+        Serialize and send this box to a Amp instance.  By the time it is being
+        sent, several keys are required.  I must have exactly ONE of::
+
+            _ask
+            _answer
+            _error
+
+        If the '_ask' key is set, then the '_command' key must also be
+        set.
+
+        @param proto: an AMP instance.
+        """
+        proto.sendBox(self)
+
+    def __repr__(self):
+        return 'AmpBox(%s)' % (dict.__repr__(self),)
+
+# amp.Box => AmpBox
+
+Box = AmpBox
+
+class QuitBox(AmpBox):
+    """
+    I am an AmpBox that, upon being sent, terminates the connection.
+    """
+    __slots__ = []
+
+
+    def __repr__(self):
+        return 'QuitBox(**%s)' % (super(QuitBox, self).__repr__(),)
+
+
+    def _sendTo(self, proto):
+        """
+        Immediately call loseConnection after sending.
+        """
+        super(QuitBox, self)._sendTo(proto)
+        proto.transport.loseConnection()
+
+
+
+class _SwitchBox(AmpBox):
+    """
+    Implementation detail of ProtocolSwitchCommand: I am a AmpBox which sets
+    up state for the protocol to switch.
+    """
+
+    # DON'T set __slots__ here; we do have an attribute.
+
+    def __init__(self, innerProto, **kw):
+        """
+        Create a _SwitchBox with the protocol to switch to after being sent.
+
+        @param innerProto: the protocol instance to switch to.
+        @type innerProto: an IProtocol provider.
+        """
+        super(_SwitchBox, self).__init__(**kw)
+        self.innerProto = innerProto
+
+
+    def __repr__(self):
+        return '_SwitchBox(%r, **%s)' % (self.innerProto,
+                                         dict.__repr__(self),)
+
+
+    def _sendTo(self, proto):
+        """
+        Send me; I am the last box on the connection.  All further traffic will be
+        over the new protocol.
+        """
+        super(_SwitchBox, self)._sendTo(proto)
+        proto._lockForSwitch()
+        proto._switchTo(self.innerProto)
+
+
+
+class BoxDispatcher:
+    """
+    A L{BoxDispatcher} dispatches '_ask', '_answer', and '_error' L{AmpBox}es,
+    both incoming and outgoing, to their appropriate destinations.
+
+    Outgoing commands are converted into L{Deferred}s and outgoing boxes, and
+    associated tracking state to fire those L{Deferred} when '_answer' boxes
+    come back.  Incoming '_answer' and '_error' boxes are converted into
+    callbacks and errbacks on those L{Deferred}s, respectively.
+
+    Incoming '_ask' boxes are converted into method calls on a supplied method
+    locator.
+
+    @ivar _outstandingRequests: a dictionary mapping request IDs to
+    L{Deferred}s which were returned for those requests.
+
+    @ivar locator: an object with a L{locateResponder} method that locates a
+    responder function that takes a Box and returns a result (either a Box or a
+    Deferred which fires one).
+
+    @ivar boxSender: an object which can send boxes, via the L{_sendBox}
+    method, such as an L{AMP} instance.
+    @type boxSender: L{IBoxSender}
+    """
+
+    implements(IBoxReceiver)
+
+    _failAllReason = None
+    _outstandingRequests = None
+    _counter = 0L
+    boxSender = None
+
+    def __init__(self, locator):
+        self._outstandingRequests = {}
+        self.locator = locator
+
+
+    def startReceivingBoxes(self, boxSender):
+        """
+        The given boxSender is going to start calling boxReceived on this
+        L{BoxDispatcher}.
+
+        @param boxSender: The L{IBoxSender} to send command responses to.
+        """
+        self.boxSender = boxSender
+
+
+    def stopReceivingBoxes(self, reason):
+        """
+        No further boxes will be received here.  Terminate all currently
+        oustanding command deferreds with the given reason.
+        """
+        self.failAllOutgoing(reason)
+
+
+    def failAllOutgoing(self, reason):
+        """
+        Call the errback on all outstanding requests awaiting responses.
+
+        @param reason: the Failure instance to pass to those errbacks.
+        """
+        self._failAllReason = reason
+        OR = self._outstandingRequests.items()
+        self._outstandingRequests = None # we can never send another request
+        for key, value in OR:
+            value.errback(reason)
+
+
+    def _nextTag(self):
+        """
+        Generate protocol-local serial numbers for _ask keys.
+
+        @return: a string that has not yet been used on this connection.
+        """
+        self._counter += 1
+        return '%x' % (self._counter,)
+
+
+    def _sendBoxCommand(self, command, box, requiresAnswer=True):
+        """
+        Send a command across the wire with the given C{amp.Box}.
+
+        Mutate the given box to give it any additional keys (_command, _ask)
+        required for the command and request/response machinery, then send it.
+
+        If requiresAnswer is True, returns a C{Deferred} which fires when a
+        response is received. The C{Deferred} is fired with an C{amp.Box} on
+        success, or with an C{amp.RemoteAmpError} if an error is received.
+
+        If the Deferred fails and the error is not handled by the caller of
+        this method, the failure will be logged and the connection dropped.
+
+        @param command: a str, the name of the command to issue.
+
+        @param box: an AmpBox with the arguments for the command.
+
+        @param requiresAnswer: a boolean.  Defaults to True.  If True, return a
+        Deferred which will fire when the other side responds to this command.
+        If False, return None and do not ask the other side for acknowledgement.
+
+        @return: a Deferred which fires the AmpBox that holds the response to
+        this command, or None, as specified by requiresAnswer.
+
+        @raise ProtocolSwitched: if the protocol has been switched.
+        """
+        if self._failAllReason is not None:
+            return fail(self._failAllReason)
+        box[COMMAND] = command
+        tag = self._nextTag()
+        if requiresAnswer:
+            box[ASK] = tag
+        box._sendTo(self.boxSender)
+        if requiresAnswer:
+            result = self._outstandingRequests[tag] = Deferred()
+        else:
+            result = None
+        return result
+
+
+    def callRemoteString(self, command, requiresAnswer=True, **kw):
+        """
+        This is a low-level API, designed only for optimizing simple messages
+        for which the overhead of parsing is too great.
+
+        @param command: a str naming the command.
+
+        @param kw: arguments to the amp box.
+
+        @param requiresAnswer: a boolean.  Defaults to True.  If True, return a
+        Deferred which will fire when the other side responds to this command.
+        If False, return None and do not ask the other side for acknowledgement.
+
+        @return: a Deferred which fires the AmpBox that holds the response to
+        this command, or None, as specified by requiresAnswer.
+        """
+        box = Box(kw)
+        return self._sendBoxCommand(command, box, requiresAnswer)
+
+
+    def callRemote(self, commandType, *a, **kw):
+        """
+        This is the primary high-level API for sending messages via AMP.  Invoke it
+        with a command and appropriate arguments to send a message to this
+        connection's peer.
+
+        @param commandType: a subclass of Command.
+        @type commandType: L{type}
+
+        @param a: Positional (special) parameters taken by the command.
+        Positional parameters will typically not be sent over the wire.  The
+        only command included with AMP which uses positional parameters is
+        L{ProtocolSwitchCommand}, which takes the protocol that will be
+        switched to as its first argument.
+
+        @param kw: Keyword arguments taken by the command.  These are the
+        arguments declared in the command's 'arguments' attribute.  They will
+        be encoded and sent to the peer as arguments for the L{commandType}.
+
+        @return: If L{commandType} has a C{requiresAnswer} attribute set to
+        L{False}, then return L{None}.  Otherwise, return a L{Deferred} which
+        fires with a dictionary of objects representing the result of this
+        call.  Additionally, this L{Deferred} may fail with an exception
+        representing a connection failure, with L{UnknownRemoteError} if the
+        other end of the connection fails for an unknown reason, or with any
+        error specified as a key in L{commandType}'s C{errors} dictionary.
+        """
+
+        # XXX this takes command subclasses and not command objects on purpose.
+        # There's really no reason to have all this back-and-forth between
+        # command objects and the protocol, and the extra object being created
+        # (the Command instance) is pointless.  Command is kind of like
+        # Interface, and should be more like it.
+
+        # In other words, the fact that commandType is instantiated here is an
+        # implementation detail.  Don't rely on it.
+
+        try:
+            co = commandType(*a, **kw)
+        except:
+            return fail()
+        return co._doCommand(self)
+
+
+    def unhandledError(self, failure):
+        """
+        This is a terminal callback called after application code has had a
+        chance to quash any errors.
+        """
+        return self.boxSender.unhandledError(failure)
+
+
+    def _answerReceived(self, box):
+        """
+        An AMP box was received that answered a command previously sent with
+        L{callRemote}.
+
+        @param box: an AmpBox with a value for its L{ANSWER} key.
+        """
+        question = self._outstandingRequests.pop(box[ANSWER])
+        question.addErrback(self.unhandledError)
+        question.callback(box)
+
+
+    def _errorReceived(self, box):
+        """
+        An AMP box was received that answered a command previously sent with
+        L{callRemote}, with an error.
+
+        @param box: an L{AmpBox} with a value for its L{ERROR}, L{ERROR_CODE},
+        and L{ERROR_DESCRIPTION} keys.
+        """
+        question = self._outstandingRequests.pop(box[ERROR])
+        question.addErrback(self.unhandledError)
+        errorCode = box[ERROR_CODE]
+        description = box[ERROR_DESCRIPTION]
+        if errorCode in PROTOCOL_ERRORS:
+            exc = PROTOCOL_ERRORS[errorCode](errorCode, description)
+        else:
+            exc = RemoteAmpError(errorCode, description)
+        question.errback(Failure(exc))
+
+
+    def _commandReceived(self, box):
+        """
+        @param box: an L{AmpBox} with a value for its L{COMMAND} and L{ASK}
+        keys.
+        """
+        def formatAnswer(answerBox):
+            answerBox[ANSWER] = box[ASK]
+            return answerBox
+        def formatError(error):
+            if error.check(RemoteAmpError):
+                code = error.value.errorCode
+                desc = error.value.description
+                if error.value.fatal:
+                    errorBox = QuitBox()
+                else:
+                    errorBox = AmpBox()
+            else:
+                errorBox = QuitBox()
+                log.err(error) # here is where server-side logging happens
+                               # if the error isn't handled
+                code = UNKNOWN_ERROR_CODE
+                desc = "Unknown Error"
+            errorBox[ERROR] = box[ASK]
+            errorBox[ERROR_DESCRIPTION] = desc
+            errorBox[ERROR_CODE] = code
+            return errorBox
+        deferred = self.dispatchCommand(box)
+        if ASK in box:
+            deferred.addCallbacks(formatAnswer, formatError)
+            deferred.addCallback(self._safeEmit)
+        deferred.addErrback(self.unhandledError)
+
+
+    def ampBoxReceived(self, box):
+        """
+        An AmpBox was received, representing a command, or an answer to a
+        previously issued command (either successful or erroneous).  Respond to
+        it according to its contents.
+
+        @param box: an AmpBox
+
+        @raise NoEmptyBoxes: when a box is received that does not contain an
+        '_answer', '_command' / '_ask', or '_error' key; i.e. one which does not
+        fit into the command / response protocol defined by AMP.
+        """
+        if ANSWER in box:
+            self._answerReceived(box)
+        elif ERROR in box:
+            self._errorReceived(box)
+        elif COMMAND in box:
+            self._commandReceived(box)
+        else:
+            raise NoEmptyBoxes(box)
+
+
+    def _safeEmit(self, aBox):
+        """
+        Emit a box, ignoring L{ProtocolSwitched} and L{ConnectionLost} errors
+        which cannot be usefully handled.
+        """
+        try:
+            aBox._sendTo(self.boxSender)
+        except (ProtocolSwitched, ConnectionLost):
+            pass
+
+
+    def dispatchCommand(self, box):
+        """
+        A box with a _command key was received.
+
+        Dispatch it to a local handler call it.
+
+        @param proto: an AMP instance.
+        @param box: an AmpBox to be dispatched.
+        """
+        cmd = box[COMMAND]
+        responder = self.locator.locateResponder(cmd)
+        if responder is None:
+            return fail(RemoteAmpError(
+                    UNHANDLED_ERROR_CODE,
+                    "Unhandled Command: %r" % (cmd,),
+                    False,
+                    local=Failure(UnhandledCommand())))
+        return maybeDeferred(responder, box)
+
+
+
+class CommandLocator:
+    """
+    A L{CommandLocator} is a collection of responders to AMP L{Command}s, with
+    the help of the L{Command.responder} decorator.
+    """
+
+    class __metaclass__(type):
+        """
+        This metaclass keeps track of all of the Command.responder-decorated
+        methods defined since the last CommandLocator subclass was defined.  It
+        assumes (usually correctly, but unfortunately not necessarily so) that
+        those commands responders were all declared as methods of the class
+        being defined.  Note that this list can be incorrect if users use the
+        Command.responder decorator outside the context of a CommandLocator
+        class declaration.
+
+        Command responders defined on subclasses are given precedence over
+        those inherited from a base class.
+
+        The Command.responder decorator explicitly cooperates with this
+        metaclass.
+        """
+
+        _currentClassCommands = []
+        def __new__(cls, name, bases, attrs):
+            commands = cls._currentClassCommands[:]
+            cls._currentClassCommands[:] = []
+            cd = attrs['_commandDispatch'] = {}
+            subcls = type.__new__(cls, name, bases, attrs)
+            ancestors = list(subcls.__mro__[1:])
+            ancestors.reverse()
+            for ancestor in ancestors:
+                cd.update(getattr(ancestor, '_commandDispatch', {}))
+            for commandClass, responderFunc in commands:
+                cd[commandClass.commandName] = (commandClass, responderFunc)
+            if (bases and (
+                    subcls.lookupFunction != CommandLocator.lookupFunction)):
+                def locateResponder(self, name):
+                    warnings.warn(
+                        "Override locateResponder, not lookupFunction.",
+                        category=PendingDeprecationWarning,
+                        stacklevel=2)
+                    return self.lookupFunction(name)
+                subcls.locateResponder = locateResponder
+            return subcls
+
+
+    implements(IResponderLocator)
+
+
+    def _wrapWithSerialization(self, aCallable, command):
+        """
+        Wrap aCallable with its command's argument de-serialization
+        and result serialization logic.
+
+        @param aCallable: a callable with a 'command' attribute, designed to be
+        called with keyword arguments.
+
+        @param command: the command class whose serialization to use.
+
+        @return: a 1-arg callable which, when invoked with an AmpBox, will
+        deserialize the argument list and invoke appropriate user code for the
+        callable's command, returning a Deferred which fires with the result or
+        fails with an error.
+        """
+        def doit(box):
+            kw = command.parseArguments(box, self)
+            def checkKnownErrors(error):
+                key = error.trap(*command.allErrors)
+                code = command.allErrors[key]
+                desc = str(error.value)
+                return Failure(RemoteAmpError(
+                        code, desc, key in command.fatalErrors, local=error))
+            def makeResponseFor(objects):
+                try:
+                    return command.makeResponse(objects, self)
+                except:
+                    # let's helpfully log this.
+                    originalFailure = Failure()
+                    raise BadLocalReturn(
+                        "%r returned %r and %r could not serialize it" % (
+                            aCallable,
+                            objects,
+                            command),
+                        originalFailure)
+            return maybeDeferred(aCallable, **kw).addCallback(
+                makeResponseFor).addErrback(
+                checkKnownErrors)
+        return doit
+
+
+    def lookupFunction(self, name):
+        """
+        Deprecated synonym for L{locateResponder}
+        """
+        if self.__class__.lookupFunction != CommandLocator.lookupFunction:
+            return CommandLocator.locateResponder(self, name)
+        else:
+            warnings.warn("Call locateResponder, not lookupFunction.",
+                          category=PendingDeprecationWarning,
+                          stacklevel=2)
+        return self.locateResponder(name)
+
+
+    def locateResponder(self, name):
+        """
+        Locate a callable to invoke when executing the named command.
+
+        @param name: the normalized name (from the wire) of the command.
+
+        @return: a 1-argument function that takes a Box and returns a box or a
+        Deferred which fires a Box, for handling the command identified by the
+        given name, or None, if no appropriate responder can be found.
+        """
+        # Try to find a high-level method to invoke, and if we can't find one,
+        # fall back to a low-level one.
+        cd = self._commandDispatch
+        if name in cd:
+            commandClass, responderFunc = cd[name]
+            responderMethod = types.MethodType(
+                responderFunc, self, self.__class__)
+            return self._wrapWithSerialization(responderMethod, commandClass)
+
+
+
+class SimpleStringLocator(object):
+    """
+    Implement the L{locateResponder} method to do simple, string-based
+    dispatch.
+    """
+
+    implements(IResponderLocator)
+
+    baseDispatchPrefix = 'amp_'
+
+    def locateResponder(self, name):
+        """
+        Locate a callable to invoke when executing the named command.
+
+        @return: a function with the name C{"amp_" + name} on L{self}, or None
+        if no such function exists.  This function will then be called with the
+        L{AmpBox} itself as an argument.
+
+        @param name: the normalized name (from the wire) of the command.
+        """
+        fName = self.baseDispatchPrefix + (name.upper())
+        return getattr(self, fName, None)
+
+
+
+PYTHON_KEYWORDS = [
+    'and', 'del', 'for', 'is', 'raise', 'assert', 'elif', 'from', 'lambda',
+    'return', 'break', 'else', 'global', 'not', 'try', 'class', 'except',
+    'if', 'or', 'while', 'continue', 'exec', 'import', 'pass', 'yield',
+    'def', 'finally', 'in', 'print']
+
+
+
+def _wireNameToPythonIdentifier(key):
+    """
+    (Private) Normalize an argument name from the wire for use with Python
+    code.  If the return value is going to be a python keyword it will be
+    capitalized.  If it contains any dashes they will be replaced with
+    underscores.
+
+    The rationale behind this method is that AMP should be an inherently
+    multi-language protocol, so message keys may contain all manner of bizarre
+    bytes.  This is not a complete solution; there are still forms of arguments
+    that this implementation will be unable to parse.  However, Python
+    identifiers share a huge raft of properties with identifiers from many
+    other languages, so this is a 'good enough' effort for now.  We deal
+    explicitly with dashes because that is the most likely departure: Lisps
+    commonly use dashes to separate method names, so protocols initially
+    implemented in a lisp amp dialect may use dashes in argument or command
+    names.
+
+    @param key: a str, looking something like 'foo-bar-baz' or 'from'
+
+    @return: a str which is a valid python identifier, looking something like
+    'foo_bar_baz' or 'From'.
+    """
+    lkey = key.replace("-", "_")
+    if lkey in PYTHON_KEYWORDS:
+        return lkey.title()
+    return lkey
+
+
+
+class Argument:
+    """
+    Base-class of all objects that take values from Amp packets and convert
+    them into objects for Python functions.
+
+    This implementation of L{IArgumentType} provides several higher-level
+    hooks for subclasses to override.  See L{toString} and L{fromString}
+    which will be used to define the behavior of L{IArgumentType.toBox} and
+    L{IArgumentType.fromBox}, respectively.
+    """
+    implements(IArgumentType)
+
+    optional = False
+
+
+    def __init__(self, optional=False):
+        """
+        Create an Argument.
+
+        @param optional: a boolean indicating whether this argument can be
+        omitted in the protocol.
+        """
+        self.optional = optional
+
+
+    def retrieve(self, d, name, proto):
+        """
+        Retrieve the given key from the given dictionary, removing it if found.
+
+        @param d: a dictionary.
+
+        @param name: a key in L{d}.
+
+        @param proto: an instance of an AMP.
+
+        @raise KeyError: if I am not optional and no value was found.
+
+        @return: d[name].
+        """
+        if self.optional:
+            value = d.get(name)
+            if value is not None:
+                del d[name]
+        else:
+            value = d.pop(name)
+        return value
+
+
+    def fromBox(self, name, strings, objects, proto):
+        """
+        Populate an 'out' dictionary with mapping names to Python values
+        decoded from an 'in' AmpBox mapping strings to string values.
+
+        @param name: the argument name to retrieve
+        @type name: str
+
+        @param strings: The AmpBox to read string(s) from, a mapping of
+        argument names to string values.
+        @type strings: AmpBox
+
+        @param objects: The dictionary to write object(s) to, a mapping of
+        names to Python objects.
+        @type objects: dict
+
+        @param proto: an AMP instance.
+        """
+        st = self.retrieve(strings, name, proto)
+        nk = _wireNameToPythonIdentifier(name)
+        if self.optional and st is None:
+            objects[nk] = None
+        else:
+            objects[nk] = self.fromStringProto(st, proto)
+
+
+    def toBox(self, name, strings, objects, proto):
+        """
+        Populate an 'out' AmpBox with strings encoded from an 'in' dictionary
+        mapping names to Python values.
+
+        @param name: the argument name to retrieve
+        @type name: str
+
+        @param strings: The AmpBox to write string(s) to, a mapping of
+        argument names to string values.
+        @type strings: AmpBox
+
+        @param objects: The dictionary to read object(s) from, a mapping of
+        names to Python objects.
+
+        @type objects: dict
+
+        @param proto: the protocol we are converting for.
+        @type proto: AMP
+        """
+        obj = self.retrieve(objects, _wireNameToPythonIdentifier(name), proto)
+        if self.optional and obj is None:
+            # strings[name] = None
+            pass
+        else:
+            strings[name] = self.toStringProto(obj, proto)
+
+
+    def fromStringProto(self, inString, proto):
+        """
+        Convert a string to a Python value.
+
+        @param inString: the string to convert.
+
+        @param proto: the protocol we are converting for.
+        @type proto: AMP
+
+        @return: a Python object.
+        """
+        return self.fromString(inString)
+
+
+    def toStringProto(self, inObject, proto):
+        """
+        Convert a Python object to a string.
+
+        @param inObject: the object to convert.
+
+        @param proto: the protocol we are converting for.
+        @type proto: AMP
+        """
+        return self.toString(inObject)
+
+
+    def fromString(self, inString):
+        """
+        Convert a string to a Python object.  Subclasses must implement this.
+
+        @param inString: the string to convert.
+        @type inString: str
+
+        @return: the decoded value from inString
+        """
+
+
+    def toString(self, inObject):
+        """
+        Convert a Python object into a string for passing over the network.
+
+        @param inObject: an object of the type that this Argument is intended
+        to deal with.
+
+        @return: the wire encoding of inObject
+        @rtype: str
+        """
+
+
+
+class Integer(Argument):
+    """
+    Encode any integer values of any size on the wire as the string
+    representation.
+
+    Example: C{123} becomes C{"123"}
+    """
+    fromString = int
+    def toString(self, inObject):
+        return str(int(inObject))
+
+
+
+class String(Argument):
+    """
+    Don't do any conversion at all; just pass through 'str'.
+    """
+    def toString(self, inObject):
+        return inObject
+
+
+    def fromString(self, inString):
+        return inString
+
+
+
+class Float(Argument):
+    """
+    Encode floating-point values on the wire as their repr.
+    """
+    fromString = float
+    toString = repr
+
+
+
+class Boolean(Argument):
+    """
+    Encode True or False as "True" or "False" on the wire.
+    """
+    def fromString(self, inString):
+        if inString == 'True':
+            return True
+        elif inString == 'False':
+            return False
+        else:
+            raise TypeError("Bad boolean value: %r" % (inString,))
+
+
+    def toString(self, inObject):
+        if inObject:
+            return 'True'
+        else:
+            return 'False'
+
+
+
+class Unicode(String):
+    """
+    Encode a unicode string on the wire as UTF-8.
+    """
+
+    def toString(self, inObject):
+        # assert isinstance(inObject, unicode)
+        return String.toString(self, inObject.encode('utf-8'))
+
+
+    def fromString(self, inString):
+        # assert isinstance(inString, str)
+        return String.fromString(self, inString).decode('utf-8')
+
+
+
+class Path(Unicode):
+    """
+    Encode and decode L{filepath.FilePath} instances as paths on the wire.
+
+    This is really intended for use with subprocess communication tools:
+    exchanging pathnames on different machines over a network is not generally
+    meaningful, but neither is it disallowed; you can use this to communicate
+    about NFS paths, for example.
+    """
+    def fromString(self, inString):
+        return filepath.FilePath(Unicode.fromString(self, inString))
+
+
+    def toString(self, inObject):
+        return Unicode.toString(self, inObject.path)
+
+
+
+class ListOf(Argument):
+    """
+    Encode and decode lists of instances of a single other argument type.
+
+    For example, if you want to pass::
+
+        [3, 7, 9, 15]
+
+    You can create an argument like this::
+
+        ListOf(Integer())
+
+    The serialized form of the entire list is subject to the limit imposed by
+    L{MAX_VALUE_LENGTH}.  List elements are represented as 16-bit length
+    prefixed strings.  The argument type passed to the L{ListOf} initializer is
+    responsible for producing the serialized form of each element.
+
+    @ivar elementType: The L{Argument} instance used to encode and decode list
+        elements (note, not an arbitrary L{IArgument} implementation:
+        arguments must be implemented using only the C{fromString} and
+        C{toString} methods, not the C{fromBox} and C{toBox} methods).
+
+    @param optional: a boolean indicating whether this argument can be
+        omitted in the protocol.
+
+    @since: 10.0
+    """
+    def __init__(self, elementType, optional=False):
+        self.elementType = elementType
+        Argument.__init__(self, optional)
+
+
+    def fromString(self, inString):
+        """
+        Convert the serialized form of a list of instances of some type back
+        into that list.
+        """
+        strings = []
+        parser = Int16StringReceiver()
+        parser.stringReceived = strings.append
+        parser.dataReceived(inString)
+        return map(self.elementType.fromString, strings)
+
+
+    def toString(self, inObject):
+        """
+        Serialize the given list of objects to a single string.
+        """
+        strings = []
+        for obj in inObject:
+            serialized = self.elementType.toString(obj)
+            strings.append(pack('!H', len(serialized)))
+            strings.append(serialized)
+        return ''.join(strings)
+
+
+
+class AmpList(Argument):
+    """
+    Convert a list of dictionaries into a list of AMP boxes on the wire.
+
+    For example, if you want to pass::
+
+        [{'a': 7, 'b': u'hello'}, {'a': 9, 'b': u'goodbye'}]
+
+    You might use an AmpList like this in your arguments or response list::
+
+        AmpList([('a', Integer()),
+                 ('b', Unicode())])
+    """
+    def __init__(self, subargs, optional=False):
+        """
+        Create an AmpList.
+
+        @param subargs: a list of 2-tuples of ('name', argument) describing the
+        schema of the dictionaries in the sequence of amp boxes.
+
+        @param optional: a boolean indicating whether this argument can be
+        omitted in the protocol.
+        """
+        self.subargs = subargs
+        Argument.__init__(self, optional)
+
+
+    def fromStringProto(self, inString, proto):
+        boxes = parseString(inString)
+        values = [_stringsToObjects(box, self.subargs, proto)
+                  for box in boxes]
+        return values
+
+
+    def toStringProto(self, inObject, proto):
+        return ''.join([_objectsToStrings(
+                    objects, self.subargs, Box(), proto
+                    ).serialize() for objects in inObject])
+
+
+
+class Descriptor(Integer):
+    """
+    Encode and decode file descriptors for exchange over a UNIX domain socket.
+
+    This argument type requires an AMP connection set up over an
+    L{IUNIXTransport<twisted.internet.interfaces.IUNIXTransport>} provider (for
+    example, the kind of connection created by
+    L{IReactorUNIX.connectUNIX<twisted.internet.interfaces.IReactorUNIX.connectUNIX>}
+    and L{UNIXClientEndpoint<twisted.internet.endpoints.UNIXClientEndpoint>}).
+
+    There is no correspondence between the integer value of the file descriptor
+    on the sending and receiving sides, therefore an alternate approach is taken
+    to matching up received descriptors with particular L{Descriptor}
+    parameters.  The argument is encoded to an ordinal (unique per connection)
+    for inclusion in the AMP command or response box.  The descriptor itself is
+    sent using
+    L{IUNIXTransport.sendFileDescriptor<twisted.internet.interfaces.IUNIXTransport.sendFileDescriptor>}.
+    The receiver uses the order in which file descriptors are received and the
+    ordinal value to come up with the received copy of the descriptor.
+    """
+    def fromStringProto(self, inString, proto):
+        """
+        Take a unique identifier associated with a file descriptor which must
+        have been received by now and use it to look up that descriptor in a
+        dictionary where they are kept.
+
+        @param inString: The base representation (as a byte string) of an
+            ordinal indicating which file descriptor corresponds to this usage
+            of this argument.
+        @type inString: C{str}
+
+        @param proto: The protocol used to receive this descriptor.  This
+            protocol must be connected via a transport providing
+            L{IUNIXTransport<twisted.internet.interfaces.IUNIXTransport>}.
+        @type proto: L{BinaryBoxProtocol}
+
+        @return: The file descriptor represented by C{inString}.
+        @rtype: C{int}
+        """
+        return proto._getDescriptor(int(inString))
+
+
+    def toStringProto(self, inObject, proto):
+        """
+        Send C{inObject}, an integer file descriptor, over C{proto}'s connection
+        and return a unique identifier which will allow the receiver to
+        associate the file descriptor with this argument.
+
+        @param inObject: A file descriptor to duplicate over an AMP connection
+            as the value for this argument.
+        @type inObject: C{int}
+
+        @param proto: The protocol which will be used to send this descriptor.
+            This protocol must be connected via a transport providing
+            L{IUNIXTransport<twisted.internet.interfaces.IUNIXTransport>}.
+
+        @return: A byte string which can be used by the receiver to reconstruct
+            the file descriptor.
+        @type: C{str}
+        """
+        identifier = proto._sendFileDescriptor(inObject)
+        outString = Integer.toStringProto(self, identifier, proto)
+        return outString
+
+
+
+class Command:
+    """
+    Subclass me to specify an AMP Command.
+
+    @cvar arguments: A list of 2-tuples of (name, Argument-subclass-instance),
+    specifying the names and values of the parameters which are required for
+    this command.
+
+    @cvar response: A list like L{arguments}, but instead used for the return
+    value.
+
+    @cvar errors: A mapping of subclasses of L{Exception} to wire-protocol tags
+    for errors represented as L{str}s.  Responders which raise keys from this
+    dictionary will have the error translated to the corresponding tag on the
+    wire.  Invokers which receive Deferreds from invoking this command with
+    L{AMP.callRemote} will potentially receive Failures with keys from this
+    mapping as their value.  This mapping is inherited; if you declare a
+    command which handles C{FooError} as 'FOO_ERROR', then subclass it and
+    specify C{BarError} as 'BAR_ERROR', responders to the subclass may raise
+    either C{FooError} or C{BarError}, and invokers must be able to deal with
+    either of those exceptions.
+
+    @cvar fatalErrors: like 'errors', but errors in this list will always
+    terminate the connection, despite being of a recognizable error type.
+
+    @cvar commandType: The type of Box used to issue commands; useful only for
+    protocol-modifying behavior like startTLS or protocol switching.  Defaults
+    to a plain vanilla L{Box}.
+
+    @cvar responseType: The type of Box used to respond to this command; only
+    useful for protocol-modifying behavior like startTLS or protocol switching.
+    Defaults to a plain vanilla L{Box}.
+
+    @ivar requiresAnswer: a boolean; defaults to True.  Set it to False on your
+    subclass if you want callRemote to return None.  Note: this is a hint only
+    to the client side of the protocol.  The return-type of a command responder
+    method must always be a dictionary adhering to the contract specified by
+    L{response}, because clients are always free to request a response if they
+    want one.
+    """
+
+    class __metaclass__(type):
+        """
+        Metaclass hack to establish reverse-mappings for 'errors' and
+        'fatalErrors' as class vars.
+        """
+        def __new__(cls, name, bases, attrs):
+            reverseErrors = attrs['reverseErrors'] = {}
+            er = attrs['allErrors'] = {}
+            if 'commandName' not in attrs:
+                attrs['commandName'] = name
+            newtype = type.__new__(cls, name, bases, attrs)
+            errors = {}
+            fatalErrors = {}
+            accumulateClassDict(newtype, 'errors', errors)
+            accumulateClassDict(newtype, 'fatalErrors', fatalErrors)
+            for v, k in errors.iteritems():
+                reverseErrors[k] = v
+                er[v] = k
+            for v, k in fatalErrors.iteritems():
+                reverseErrors[k] = v
+                er[v] = k
+            return newtype
+
+    arguments = []
+    response = []
+    extra = []
+    errors = {}
+    fatalErrors = {}
+
+    commandType = Box
+    responseType = Box
+
+    requiresAnswer = True
+
+
+    def __init__(self, **kw):
+        """
+        Create an instance of this command with specified values for its
+        parameters.
+
+        @param kw: a dict containing an appropriate value for each name
+        specified in the L{arguments} attribute of my class.
+
+        @raise InvalidSignature: if you forgot any required arguments.
+        """
+        self.structured = kw
+        givenArgs = kw.keys()
+        forgotten = []
+        for name, arg in self.arguments:
+            pythonName = _wireNameToPythonIdentifier(name)
+            if pythonName not in givenArgs and not arg.optional:
+                forgotten.append(pythonName)
+        if forgotten:
+            raise InvalidSignature("forgot %s for %s" % (
+                    ', '.join(forgotten), self.commandName))
+        forgotten = []
+
+
+    def makeResponse(cls, objects, proto):
+        """
+        Serialize a mapping of arguments using this L{Command}'s
+        response schema.
+
+        @param objects: a dict with keys matching the names specified in
+        self.response, having values of the types that the Argument objects in
+        self.response can format.
+
+        @param proto: an L{AMP}.
+
+        @return: an L{AmpBox}.
+        """
+        try:
+            responseType = cls.responseType()
+        except:
+            return fail()
+        return _objectsToStrings(objects, cls.response, responseType, proto)
+    makeResponse = classmethod(makeResponse)
+
+
+    def makeArguments(cls, objects, proto):
+        """
+        Serialize a mapping of arguments using this L{Command}'s
+        argument schema.
+
+        @param objects: a dict with keys similar to the names specified in
+        self.arguments, having values of the types that the Argument objects in
+        self.arguments can parse.
+
+        @param proto: an L{AMP}.
+
+        @return: An instance of this L{Command}'s C{commandType}.
+        """
+        allowedNames = set()
+        for (argName, ignored) in cls.arguments:
+            allowedNames.add(_wireNameToPythonIdentifier(argName))
+
+        for intendedArg in objects:
+            if intendedArg not in allowedNames:
+                raise InvalidSignature(
+                    "%s is not a valid argument" % (intendedArg,))
+        return _objectsToStrings(objects, cls.arguments, cls.commandType(),
+                                 proto)
+    makeArguments = classmethod(makeArguments)
+
+
+    def parseResponse(cls, box, protocol):
+        """
+        Parse a mapping of serialized arguments using this
+        L{Command}'s response schema.
+
+        @param box: A mapping of response-argument names to the
+        serialized forms of those arguments.
+        @param protocol: The L{AMP} protocol.
+
+        @return: A mapping of response-argument names to the parsed
+        forms.
+        """
+        return _stringsToObjects(box, cls.response, protocol)
+    parseResponse = classmethod(parseResponse)
+
+
+    def parseArguments(cls, box, protocol):
+        """
+        Parse a mapping of serialized arguments using this
+        L{Command}'s argument schema.
+
+        @param box: A mapping of argument names to the seralized forms
+        of those arguments.
+        @param protocol: The L{AMP} protocol.
+
+        @return: A mapping of argument names to the parsed forms.
+        """
+        return _stringsToObjects(box, cls.arguments, protocol)
+    parseArguments = classmethod(parseArguments)
+
+
+    def responder(cls, methodfunc):
+        """
+        Declare a method to be a responder for a particular command.
+
+        This is a decorator.
+
+        Use like so::
+
+            class MyCommand(Command):
+                arguments = [('a', ...), ('b', ...)]
+
+            class MyProto(AMP):
+                def myFunMethod(self, a, b):
+                    ...
+                MyCommand.responder(myFunMethod)
+
+        Notes: Although decorator syntax is not used within Twisted, this
+        function returns its argument and is therefore safe to use with
+        decorator syntax.
+
+        This is not thread safe.  Don't declare AMP subclasses in other
+        threads.  Don't declare responders outside the scope of AMP subclasses;
+        the behavior is undefined.
+
+        @param methodfunc: A function which will later become a method, which
+        has a keyword signature compatible with this command's L{argument} list
+        and returns a dictionary with a set of keys compatible with this
+        command's L{response} list.
+
+        @return: the methodfunc parameter.
+        """
+        CommandLocator._currentClassCommands.append((cls, methodfunc))
+        return methodfunc
+    responder = classmethod(responder)
+
+
+    # Our only instance method
+    def _doCommand(self, proto):
+        """
+        Encode and send this Command to the given protocol.
+
+        @param proto: an AMP, representing the connection to send to.
+
+        @return: a Deferred which will fire or error appropriately when the
+        other side responds to the command (or error if the connection is lost
+        before it is responded to).
+        """
+
+        def _massageError(error):
+            error.trap(RemoteAmpError)
+            rje = error.value
+            errorType = self.reverseErrors.get(rje.errorCode,
+                                               UnknownRemoteError)
+            return Failure(errorType(rje.description))
+
+        d = proto._sendBoxCommand(self.commandName,
+                                  self.makeArguments(self.structured, proto),
+                                  self.requiresAnswer)
+
+        if self.requiresAnswer:
+            d.addCallback(self.parseResponse, proto)
+            d.addErrback(_massageError)
+
+        return d
+
+
+
+class _NoCertificate:
+    """
+    This is for peers which don't want to use a local certificate.  Used by
+    AMP because AMP's internal language is all about certificates and this
+    duck-types in the appropriate place; this API isn't really stable though,
+    so it's not exposed anywhere public.
+
+    For clients, it will use ephemeral DH keys, or whatever the default is for
+    certificate-less clients in OpenSSL.  For servers, it will generate a
+    temporary self-signed certificate with garbage values in the DN and use
+    that.
+    """
+
+    def __init__(self, client):
+        """
+        Create a _NoCertificate which either is or isn't for the client side of
+        the connection.
+
+        @param client: True if we are a client and should truly have no
+        certificate and be anonymous, False if we are a server and actually
+        have to generate a temporary certificate.
+
+        @type client: bool
+        """
+        self.client = client
+
+
+    def options(self, *authorities):
+        """
+        Behaves like L{twisted.internet.ssl.PrivateCertificate.options}().
+        """
+        if not self.client:
+            # do some crud with sslverify to generate a temporary self-signed
+            # certificate.  This is SLOOOWWWWW so it is only in the absolute
+            # worst, most naive case.
+
+            # We have to do this because OpenSSL will not let both the server
+            # and client be anonymous.
+            sharedDN = DN(CN='TEMPORARY CERTIFICATE')
+            key = KeyPair.generate()
+            cr = key.certificateRequest(sharedDN)
+            sscrd = key.signCertificateRequest(sharedDN, cr, lambda dn: True, 1)
+            cert = key.newCertificate(sscrd)
+            return cert.options(*authorities)
+        options = dict()
+        if authorities:
+            options.update(dict(verify=True,
+                                requireCertificate=True,
+                                caCerts=[auth.original for auth in authorities]))
+        occo = CertificateOptions(**options)
+        return occo
+
+
+
+class _TLSBox(AmpBox):
+    """
+    I am an AmpBox that, upon being sent, initiates a TLS connection.
+    """
+    __slots__ = []
+
+    def __init__(self):
+        if ssl is None:
+            raise RemoteAmpError("TLS_ERROR", "TLS not available")
+        AmpBox.__init__(self)
+
+
+    def _keyprop(k, default):
+        return property(lambda self: self.get(k, default))
+
+
+    # These properties are described in startTLS
+    certificate = _keyprop('tls_localCertificate', _NoCertificate(False))
+    verify = _keyprop('tls_verifyAuthorities', None)
+
+    def _sendTo(self, proto):
+        """
+        Send my encoded value to the protocol, then initiate TLS.
+        """
+        ab = AmpBox(self)
+        for k in ['tls_localCertificate',
+                  'tls_verifyAuthorities']:
+            ab.pop(k, None)
+        ab._sendTo(proto)
+        proto._startTLS(self.certificate, self.verify)
+
+
+
+class _LocalArgument(String):
+    """
+    Local arguments are never actually relayed across the wire.  This is just a
+    shim so that StartTLS can pretend to have some arguments: if arguments
+    acquire documentation properties, replace this with something nicer later.
+    """
+
+    def fromBox(self, name, strings, objects, proto):
+        pass
+
+
+
+class StartTLS(Command):
+    """
+    Use, or subclass, me to implement a command that starts TLS.
+
+    Callers of StartTLS may pass several special arguments, which affect the
+    TLS negotiation:
+
+        - tls_localCertificate: This is a
+        twisted.internet.ssl.PrivateCertificate which will be used to secure
+        the side of the connection it is returned on.
+
+        - tls_verifyAuthorities: This is a list of
+        twisted.internet.ssl.Certificate objects that will be used as the
+        certificate authorities to verify our peer's certificate.
+
+    Each of those special parameters may also be present as a key in the
+    response dictionary.
+    """
+
+    arguments = [("tls_localCertificate", _LocalArgument(optional=True)),
+                 ("tls_verifyAuthorities", _LocalArgument(optional=True))]
+
+    response = [("tls_localCertificate", _LocalArgument(optional=True)),
+                ("tls_verifyAuthorities", _LocalArgument(optional=True))]
+
+    responseType = _TLSBox
+
+    def __init__(self, **kw):
+        """
+        Create a StartTLS command.  (This is private.  Use AMP.callRemote.)
+
+        @param tls_localCertificate: the PrivateCertificate object to use to
+        secure the connection.  If it's None, or unspecified, an ephemeral DH
+        key is used instead.
+
+        @param tls_verifyAuthorities: a list of Certificate objects which
+        represent root certificates to verify our peer with.
+        """
+        if ssl is None:
+            raise RuntimeError("TLS not available.")
+        self.certificate = kw.pop('tls_localCertificate', _NoCertificate(True))
+        self.authorities = kw.pop('tls_verifyAuthorities', None)
+        Command.__init__(self, **kw)
+
+
+    def _doCommand(self, proto):
+        """
+        When a StartTLS command is sent, prepare to start TLS, but don't actually
+        do it; wait for the acknowledgement, then initiate the TLS handshake.
+        """
+        d = Command._doCommand(self, proto)
+        proto._prepareTLS(self.certificate, self.authorities)
+        # XXX before we get back to user code we are going to start TLS...
+        def actuallystart(response):
+            proto._startTLS(self.certificate, self.authorities)
+            return response
+        d.addCallback(actuallystart)
+        return d
+
+
+
+class ProtocolSwitchCommand(Command):
+    """
+    Use this command to switch from something Amp-derived to a different
+    protocol mid-connection.  This can be useful to use amp as the
+    connection-startup negotiation phase.  Since TLS is a different layer
+    entirely, you can use Amp to negotiate the security parameters of your
+    connection, then switch to a different protocol, and the connection will
+    remain secured.
+    """
+
+    def __init__(self, _protoToSwitchToFactory, **kw):
+        """
+        Create a ProtocolSwitchCommand.
+
+        @param _protoToSwitchToFactory: a ProtocolFactory which will generate
+        the Protocol to switch to.
+
+        @param kw: Keyword arguments, encoded and handled normally as
+        L{Command} would.
+        """
+
+        self.protoToSwitchToFactory = _protoToSwitchToFactory
+        super(ProtocolSwitchCommand, self).__init__(**kw)
+
+
+    def makeResponse(cls, innerProto, proto):
+        return _SwitchBox(innerProto)
+    makeResponse = classmethod(makeResponse)
+
+
+    def _doCommand(self, proto):
+        """
+        When we emit a ProtocolSwitchCommand, lock the protocol, but don't actually
+        switch to the new protocol unless an acknowledgement is received.  If
+        an error is received, switch back.
+        """
+        d = super(ProtocolSwitchCommand, self)._doCommand(proto)
+        proto._lockForSwitch()
+        def switchNow(ign):
+            innerProto = self.protoToSwitchToFactory.buildProtocol(
+                proto.transport.getPeer())
+            proto._switchTo(innerProto, self.protoToSwitchToFactory)
+            return ign
+        def handle(ign):
+            proto._unlockFromSwitch()
+            self.protoToSwitchToFactory.clientConnectionFailed(
+                None, Failure(CONNECTION_LOST))
+            return ign
+        return d.addCallbacks(switchNow, handle)
+
+
+
+class _DescriptorExchanger(object):
+    """
+    L{_DescriptorExchanger} is a mixin for L{BinaryBoxProtocol} which adds
+    support for receiving file descriptors, a feature offered by
+    L{IUNIXTransport<twisted.internet.interfaces.IUNIXTransport>}.
+
+    @ivar _descriptors: Temporary storage for all file descriptors received.
+        Values in this dictionary are the file descriptors (as integers).  Keys
+        in this dictionary are ordinals giving the order in which each
+        descriptor was received.  The ordering information is used to allow
+        L{Descriptor} to determine which is the correct descriptor for any
+        particular usage of that argument type.
+    @type _descriptors: C{dict}
+
+    @ivar _sendingDescriptorCounter: A no-argument callable which returns the
+        ordinals, starting from 0.  This is used to construct values for
+        C{_sendFileDescriptor}.
+
+    @ivar _receivingDescriptorCounter: A no-argument callable which returns the
+        ordinals, starting from 0.  This is used to construct values for
+        C{fileDescriptorReceived}.
+    """
+    implements(IFileDescriptorReceiver)
+
+    def __init__(self):
+        self._descriptors = {}
+        self._getDescriptor = self._descriptors.pop
+        self._sendingDescriptorCounter = count().next
+        self._receivingDescriptorCounter = count().next
+
+
+    def _sendFileDescriptor(self, descriptor):
+        """
+        Assign and return the next ordinal to the given descriptor after sending
+        the descriptor over this protocol's transport.
+        """
+        self.transport.sendFileDescriptor(descriptor)
+        return self._sendingDescriptorCounter()
+
+
+    def fileDescriptorReceived(self, descriptor):
+        """
+        Collect received file descriptors to be claimed later by L{Descriptor}.
+
+        @param descriptor: The received file descriptor.
+        @type descriptor: C{int}
+        """
+        self._descriptors[self._receivingDescriptorCounter()] = descriptor
+
+
+
+class BinaryBoxProtocol(StatefulStringProtocol, Int16StringReceiver,
+                        _DescriptorExchanger):
+    """
+    A protocol for receiving L{AmpBox}es - key/value pairs - via length-prefixed
+    strings.  A box is composed of:
+
+        - any number of key-value pairs, described by:
+            - a 2-byte network-endian packed key length (of which the first
+              byte must be null, and the second must be non-null: i.e. the
+              value of the length must be 1-255)
+            - a key, comprised of that many bytes
+            - a 2-byte network-endian unsigned value length (up to the maximum
+              of 65535)
+            - a value, comprised of that many bytes
+        - 2 null bytes
+
+    In other words, an even number of strings prefixed with packed unsigned
+    16-bit integers, and then a 0-length string to indicate the end of the box.
+
+    This protocol also implements 2 extra private bits of functionality related
+    to the byte boundaries between messages; it can start TLS between two given
+    boxes or switch to an entirely different protocol.  However, due to some
+    tricky elements of the implementation, the public interface to this
+    functionality is L{ProtocolSwitchCommand} and L{StartTLS}.
+
+    @ivar _keyLengthLimitExceeded: A flag which is only true when the
+        connection is being closed because a key length prefix which was longer
+        than allowed by the protocol was received.
+
+    @ivar boxReceiver: an L{IBoxReceiver} provider, whose L{ampBoxReceived}
+    method will be invoked for each L{AmpBox} that is received.
+    """
+
+    implements(IBoxSender)
+
+    _justStartedTLS = False
+    _startingTLSBuffer = None
+    _locked = False
+    _currentKey = None
+    _currentBox = None
+
+    _keyLengthLimitExceeded = False
+
+    hostCertificate = None
+    noPeerCertificate = False   # for tests
+    innerProtocol = None
+    innerProtocolClientFactory = None
+
+    def __init__(self, boxReceiver):
+        _DescriptorExchanger.__init__(self)
+        self.boxReceiver = boxReceiver
+
+
+    def _switchTo(self, newProto, clientFactory=None):
+        """
+        Switch this BinaryBoxProtocol's transport to a new protocol.  You need
+        to do this 'simultaneously' on both ends of a connection; the easiest
+        way to do this is to use a subclass of ProtocolSwitchCommand.
+
+        @param newProto: the new protocol instance to switch to.
+
+        @param clientFactory: the ClientFactory to send the
+        L{clientConnectionLost} notification to.
+        """
+        # All the data that Int16Receiver has not yet dealt with belongs to our
+        # new protocol: luckily it's keeping that in a handy (although
+        # ostensibly internal) variable for us:
+        newProtoData = self.recvd
+        # We're quite possibly in the middle of a 'dataReceived' loop in
+        # Int16StringReceiver: let's make sure that the next iteration, the
+        # loop will break and not attempt to look at something that isn't a
+        # length prefix.
+        self.recvd = ''
+        # Finally, do the actual work of setting up the protocol and delivering
+        # its first chunk of data, if one is available.
+        self.innerProtocol = newProto
+        self.innerProtocolClientFactory = clientFactory
+        newProto.makeConnection(self.transport)
+        if newProtoData:
+            newProto.dataReceived(newProtoData)
+
+
+    def sendBox(self, box):
+        """
+        Send a amp.Box to my peer.
+
+        Note: transport.write is never called outside of this method.
+
+        @param box: an AmpBox.
+
+        @raise ProtocolSwitched: if the protocol has previously been switched.
+
+        @raise ConnectionLost: if the connection has previously been lost.
+        """
+        if self._locked:
+            raise ProtocolSwitched(
+                "This connection has switched: no AMP traffic allowed.")
+        if self.transport is None:
+            raise ConnectionLost()
+        if self._startingTLSBuffer is not None:
+            self._startingTLSBuffer.append(box)
+        else:
+            self.transport.write(box.serialize())
+
+
+    def makeConnection(self, transport):
+        """
+        Notify L{boxReceiver} that it is about to receive boxes from this
+        protocol by invoking L{startReceivingBoxes}.
+        """
+        self.transport = transport
+        self.boxReceiver.startReceivingBoxes(self)
+        self.connectionMade()
+
+
+    def dataReceived(self, data):
+        """
+        Either parse incoming data as L{AmpBox}es or relay it to our nested
+        protocol.
+        """
+        if self._justStartedTLS:
+            self._justStartedTLS = False
+        # If we already have an inner protocol, then we don't deliver data to
+        # the protocol parser any more; we just hand it off.
+        if self.innerProtocol is not None:
+            self.innerProtocol.dataReceived(data)
+            return
+        return Int16StringReceiver.dataReceived(self, data)
+
+
+    def connectionLost(self, reason):
+        """
+        The connection was lost; notify any nested protocol.
+        """
+        if self.innerProtocol is not None:
+            self.innerProtocol.connectionLost(reason)
+            if self.innerProtocolClientFactory is not None:
+                self.innerProtocolClientFactory.clientConnectionLost(None, reason)
+        if self._keyLengthLimitExceeded:
+            failReason = Failure(TooLong(True, False, None, None))
+        elif reason.check(ConnectionClosed) and self._justStartedTLS:
+            # We just started TLS and haven't received any data.  This means
+            # the other connection didn't like our cert (although they may not
+            # have told us why - later Twisted should make 'reason' into a TLS
+            # error.)
+            failReason = PeerVerifyError(
+                "Peer rejected our certificate for an unknown reason.")
+        else:
+            failReason = reason
+        self.boxReceiver.stopReceivingBoxes(failReason)
+
+
+    # The longest key allowed
+    _MAX_KEY_LENGTH = 255
+
+    # The longest value allowed (this is somewhat redundant, as longer values
+    # cannot be encoded - ah well).
+    _MAX_VALUE_LENGTH = 65535
+
+    # The first thing received is a key.
+    MAX_LENGTH = _MAX_KEY_LENGTH
+
+    def proto_init(self, string):
+        """
+        String received in the 'init' state.
+        """
+        self._currentBox = AmpBox()
+        return self.proto_key(string)
+
+
+    def proto_key(self, string):
+        """
+        String received in the 'key' state.  If the key is empty, a complete
+        box has been received.
+        """
+        if string:
+            self._currentKey = string
+            self.MAX_LENGTH = self._MAX_VALUE_LENGTH
+            return 'value'
+        else:
+            self.boxReceiver.ampBoxReceived(self._currentBox)
+            self._currentBox = None
+            return 'init'
+
+
+    def proto_value(self, string):
+        """
+        String received in the 'value' state.
+        """
+        self._currentBox[self._currentKey] = string
+        self._currentKey = None
+        self.MAX_LENGTH = self._MAX_KEY_LENGTH
+        return 'key'
+
+
+    def lengthLimitExceeded(self, length):
+        """
+        The key length limit was exceeded.  Disconnect the transport and make
+        sure a meaningful exception is reported.
+        """
+        self._keyLengthLimitExceeded = True
+        self.transport.loseConnection()
+
+
+    def _lockForSwitch(self):
+        """
+        Lock this binary protocol so that no further boxes may be sent.  This
+        is used when sending a request to switch underlying protocols.  You
+        probably want to subclass ProtocolSwitchCommand rather than calling
+        this directly.
+        """
+        self._locked = True
+
+
+    def _unlockFromSwitch(self):
+        """
+        Unlock this locked binary protocol so that further boxes may be sent
+        again.  This is used after an attempt to switch protocols has failed
+        for some reason.
+        """
+        if self.innerProtocol is not None:
+            raise ProtocolSwitched("Protocol already switched.  Cannot unlock.")
+        self._locked = False
+
+
+    def _prepareTLS(self, certificate, verifyAuthorities):
+        """
+        Used by StartTLSCommand to put us into the state where we don't
+        actually send things that get sent, instead we buffer them.  see
+        L{_sendBox}.
+        """
+        self._startingTLSBuffer = []
+        if self.hostCertificate is not None:
+            raise OnlyOneTLS(
+                "Previously authenticated connection between %s and %s "
+                "is trying to re-establish as %s" % (
+                    self.hostCertificate,
+                    self.peerCertificate,
+                    (certificate, verifyAuthorities)))
+
+
+    def _startTLS(self, certificate, verifyAuthorities):
+        """
+        Used by TLSBox to initiate the SSL handshake.
+
+        @param certificate: a L{twisted.internet.ssl.PrivateCertificate} for
+        use locally.
+
+        @param verifyAuthorities: L{twisted.internet.ssl.Certificate} instances
+        representing certificate authorities which will verify our peer.
+        """
+        self.hostCertificate = certificate
+        self._justStartedTLS = True
+        if verifyAuthorities is None:
+            verifyAuthorities = ()
+        self.transport.startTLS(certificate.options(*verifyAuthorities))
+        stlsb = self._startingTLSBuffer
+        if stlsb is not None:
+            self._startingTLSBuffer = None
+            for box in stlsb:
+                self.sendBox(box)
+
+
+    def _getPeerCertificate(self):
+        if self.noPeerCertificate:
+            return None
+        return Certificate.peerFromTransport(self.transport)
+    peerCertificate = property(_getPeerCertificate)
+
+
+    def unhandledError(self, failure):
+        """
+        The buck stops here.  This error was completely unhandled, time to
+        terminate the connection.
+        """
+        log.err(
+            failure,
+            "Amp server or network failure unhandled by client application.  "
+            "Dropping connection!  To avoid, add errbacks to ALL remote "
+            "commands!")
+        if self.transport is not None:
+            self.transport.loseConnection()
+
+
+    def _defaultStartTLSResponder(self):
+        """
+        The default TLS responder doesn't specify any certificate or anything.
+
+        From a security perspective, it's little better than a plain-text
+        connection - but it is still a *bit* better, so it's included for
+        convenience.
+
+        You probably want to override this by providing your own StartTLS.responder.
+        """
+        return {}
+    StartTLS.responder(_defaultStartTLSResponder)
+
+
+
+class AMP(BinaryBoxProtocol, BoxDispatcher,
+          CommandLocator, SimpleStringLocator):
+    """
+    This protocol is an AMP connection.  See the module docstring for protocol
+    details.
+    """
+
+    _ampInitialized = False
+
+    def __init__(self, boxReceiver=None, locator=None):
+        # For backwards compatibility.  When AMP did not separate parsing logic
+        # (L{BinaryBoxProtocol}), request-response logic (L{BoxDispatcher}) and
+        # command routing (L{CommandLocator}), it did not have a constructor.
+        # Now it does, so old subclasses might have defined their own that did
+        # not upcall.  If this flag isn't set, we'll call the constructor in
+        # makeConnection before anything actually happens.
+        self._ampInitialized = True
+        if boxReceiver is None:
+            boxReceiver = self
+        if locator is None:
+            locator = self
+        BoxDispatcher.__init__(self, locator)
+        BinaryBoxProtocol.__init__(self, boxReceiver)
+
+
+    def locateResponder(self, name):
+        """
+        Unify the implementations of L{CommandLocator} and
+        L{SimpleStringLocator} to perform both kinds of dispatch, preferring
+        L{CommandLocator}.
+        """
+        firstResponder = CommandLocator.locateResponder(self, name)
+        if firstResponder is not None:
+            return firstResponder
+        secondResponder = SimpleStringLocator.locateResponder(self, name)
+        return secondResponder
+
+
+    def __repr__(self):
+        """
+        A verbose string representation which gives us information about this
+        AMP connection.
+        """
+        if self.innerProtocol is not None:
+            innerRepr = ' inner %r' % (self.innerProtocol,)
+        else:
+            innerRepr = ''
+        return '<%s%s at 0x%x>' % (
+            self.__class__.__name__, innerRepr, unsignedID(self))
+
+
+    def makeConnection(self, transport):
+        """
+        Emit a helpful log message when the connection is made.
+        """
+        if not self._ampInitialized:
+            # See comment in the constructor re: backward compatibility.  I
+            # should probably emit a deprecation warning here.
+            AMP.__init__(self)
+        # Save these so we can emit a similar log message in L{connectionLost}.
+        self._transportPeer = transport.getPeer()
+        self._transportHost = transport.getHost()
+        log.msg("%s connection established (HOST:%s PEER:%s)" % (
+                self.__class__.__name__,
+                self._transportHost,
+                self._transportPeer))
+        BinaryBoxProtocol.makeConnection(self, transport)
+
+
+    def connectionLost(self, reason):
+        """
+        Emit a helpful log message when the connection is lost.
+        """
+        log.msg("%s connection lost (HOST:%s PEER:%s)" %
+                (self.__class__.__name__,
+                 self._transportHost,
+                 self._transportPeer))
+        BinaryBoxProtocol.connectionLost(self, reason)
+        self.transport = None
+
+
+
+class _ParserHelper:
+    """
+    A box receiver which records all boxes received.
+    """
+    def __init__(self):
+        self.boxes = []
+
+
+    def getPeer(self):
+        return 'string'
+
+
+    def getHost(self):
+        return 'string'
+
+    disconnecting = False
+
+
+    def startReceivingBoxes(self, sender):
+        """
+        No initialization is required.
+        """
+
+
+    def ampBoxReceived(self, box):
+        self.boxes.append(box)
+
+
+    # Synchronous helpers
+    def parse(cls, fileObj):
+        """
+        Parse some amp data stored in a file.
+
+        @param fileObj: a file-like object.
+
+        @return: a list of AmpBoxes encoded in the given file.
+        """
+        parserHelper = cls()
+        bbp = BinaryBoxProtocol(boxReceiver=parserHelper)
+        bbp.makeConnection(parserHelper)
+        bbp.dataReceived(fileObj.read())
+        return parserHelper.boxes
+    parse = classmethod(parse)
+
+
+    def parseString(cls, data):
+        """
+        Parse some amp data stored in a string.
+
+        @param data: a str holding some amp-encoded data.
+
+        @return: a list of AmpBoxes encoded in the given string.
+        """
+        return cls.parse(StringIO(data))
+    parseString = classmethod(parseString)
+
+
+
+parse = _ParserHelper.parse
+parseString = _ParserHelper.parseString
+
+def _stringsToObjects(strings, arglist, proto):
+    """
+    Convert an AmpBox to a dictionary of python objects, converting through a
+    given arglist.
+
+    @param strings: an AmpBox (or dict of strings)
+
+    @param arglist: a list of 2-tuples of strings and Argument objects, as
+    described in L{Command.arguments}.
+
+    @param proto: an L{AMP} instance.
+
+    @return: the converted dictionary mapping names to argument objects.
+    """
+    objects = {}
+    myStrings = strings.copy()
+    for argname, argparser in arglist:
+        argparser.fromBox(argname, myStrings, objects, proto)
+    return objects
+
+
+
+def _objectsToStrings(objects, arglist, strings, proto):
+    """
+    Convert a dictionary of python objects to an AmpBox, converting through a
+    given arglist.
+
+    @param objects: a dict mapping names to python objects
+
+    @param arglist: a list of 2-tuples of strings and Argument objects, as
+    described in L{Command.arguments}.
+
+    @param strings: [OUT PARAMETER] An object providing the L{dict}
+    interface which will be populated with serialized data.
+
+    @param proto: an L{AMP} instance.
+
+    @return: The converted dictionary mapping names to encoded argument
+    strings (identical to C{strings}).
+    """
+    myObjects = objects.copy()
+    for argname, argparser in arglist:
+        argparser.toBox(argname, strings, myObjects, proto)
+    return strings
+
+
+
+class _FixedOffsetTZInfo(datetime.tzinfo):
+    """
+    Represents a fixed timezone offset (without daylight saving time).
+
+    @ivar name: A C{str} giving the name of this timezone; the name just
+        includes how much time this offset represents.
+
+    @ivar offset: A C{datetime.timedelta} giving the amount of time this
+        timezone is offset.
+    """
+
+    def __init__(self, sign, hours, minutes):
+        self.name = '%s%02i:%02i' % (sign, hours, minutes)
+        if sign == '-':
+            hours = -hours
+            minutes = -minutes
+        elif sign != '+':
+            raise ValueError('invalid sign for timezone %r' % (sign,))
+        self.offset = datetime.timedelta(hours=hours, minutes=minutes)
+
+
+    def utcoffset(self, dt):
+        """
+        Return this timezone's offset from UTC.
+        """
+        return self.offset
+
+
+    def dst(self, dt):
+        """
+        Return a zero C{datetime.timedelta} for the daylight saving time offset,
+        since there is never one.
+        """
+        return datetime.timedelta(0)
+
+
+    def tzname(self, dt):
+        """
+        Return a string describing this timezone.
+        """
+        return self.name
+
+
+
+utc = _FixedOffsetTZInfo('+', 0, 0)
+
+
+
+class Decimal(Argument):
+    """
+    Encodes C{decimal.Decimal} instances.
+
+    There are several ways in which a decimal value might be encoded.
+
+    Special values are encoded as special strings::
+
+      - Positive infinity is encoded as C{"Infinity"}
+      - Negative infinity is encoded as C{"-Infinity"}
+      - Quiet not-a-number is encoded as either C{"NaN"} or C{"-NaN"}
+      - Signalling not-a-number is encoded as either C{"sNaN"} or C{"-sNaN"}
+
+    Normal values are encoded using the base ten string representation, using
+    engineering notation to indicate magnitude without precision, and "normal"
+    digits to indicate precision.  For example::
+
+      - C{"1"} represents the value I{1} with precision to one place.
+      - C{"-1"} represents the value I{-1} with precision to one place.
+      - C{"1.0"} represents the value I{1} with precision to two places.
+      - C{"10"} represents the value I{10} with precision to two places.
+      - C{"1E+2"} represents the value I{10} with precision to one place.
+      - C{"1E-1"} represents the value I{0.1} with precision to one place.
+      - C{"1.5E+2"} represents the value I{15} with precision to two places.
+
+    U{http://speleotrove.com/decimal/} should be considered the authoritative
+    specification for the format.
+    """
+    fromString = decimal.Decimal
+
+    def toString(self, inObject):
+        """
+        Serialize a C{decimal.Decimal} instance to the specified wire format.
+        """
+        if isinstance(inObject, decimal.Decimal):
+            # Hopefully decimal.Decimal.__str__ actually does what we want.
+            return str(inObject)
+        raise ValueError(
+            "amp.Decimal can only encode instances of decimal.Decimal")
+
+
+
+class DateTime(Argument):
+    """
+    Encodes C{datetime.datetime} instances.
+
+    Wire format: '%04i-%02i-%02iT%02i:%02i:%02i.%06i%s%02i:%02i'. Fields in
+    order are: year, month, day, hour, minute, second, microsecond, timezone
+    direction (+ or -), timezone hour, timezone minute. Encoded string is
+    always exactly 32 characters long. This format is compatible with ISO 8601,
+    but that does not mean all ISO 8601 dates can be accepted.
+
+    Also, note that the datetime module's notion of a "timezone" can be
+    complex, but the wire format includes only a fixed offset, so the
+    conversion is not lossless. A lossless transmission of a C{datetime} instance
+    is not feasible since the receiving end would require a Python interpreter.
+
+    @ivar _positions: A sequence of slices giving the positions of various
+        interesting parts of the wire format.
+    """
+
+    _positions = [
+        slice(0, 4), slice(5, 7), slice(8, 10), # year, month, day
+        slice(11, 13), slice(14, 16), slice(17, 19), # hour, minute, second
+        slice(20, 26), # microsecond
+        # intentionally skip timezone direction, as it is not an integer
+        slice(27, 29), slice(30, 32) # timezone hour, timezone minute
+        ]
+
+    def fromString(self, s):
+        """
+        Parse a string containing a date and time in the wire format into a
+        C{datetime.datetime} instance.
+        """
+        if len(s) != 32:
+            raise ValueError('invalid date format %r' % (s,))
+
+        values = [int(s[p]) for p in self._positions]
+        sign = s[26]
+        timezone = _FixedOffsetTZInfo(sign, *values[7:])
+        values[7:] = [timezone]
+        return datetime.datetime(*values)
+
+
+    def toString(self, i):
+        """
+        Serialize a C{datetime.datetime} instance to a string in the specified
+        wire format.
+        """
+        offset = i.utcoffset()
+        if offset is None:
+            raise ValueError(
+                'amp.DateTime cannot serialize naive datetime instances.  '
+                'You may find amp.utc useful.')
+
+        minutesOffset = (offset.days * 86400 + offset.seconds) // 60
+
+        if minutesOffset > 0:
+            sign = '+'
+        else:
+            sign = '-'
+
+        # strftime has no way to format the microseconds, or put a ':' in the
+        # timezone. Suprise!
+
+        return '%04i-%02i-%02iT%02i:%02i:%02i.%06i%s%02i:%02i' % (
+            i.year,
+            i.month,
+            i.day,
+            i.hour,
+            i.minute,
+            i.second,
+            i.microsecond,
+            sign,
+            abs(minutesOffset) // 60,
+            abs(minutesOffset) % 60)
diff --git a/ThirdParty/Twisted/twisted/protocols/basic.py b/ThirdParty/Twisted/twisted/protocols/basic.py
new file mode 100644
index 0000000..191cf6e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/basic.py
@@ -0,0 +1,963 @@
+# -*- test-case-name: twisted.protocols.test.test_basic -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Basic protocols, such as line-oriented, netstring, and int prefixed strings.
+"""
+
+from __future__ import absolute_import, division
+
+# System imports
+import re
+from struct import pack, unpack, calcsize
+from io import BytesIO
+import math
+
+from zope.interface import implementer
+
+# Twisted imports
+from twisted.python.compat import _PY3
+from twisted.internet import protocol, defer, interfaces, error
+from twisted.python import log, deprecate, versions
+
+
+# Unfortunately we cannot use regular string formatting on Python 3; see
+# http://bugs.python.org/issue3982 for details.
+if _PY3:
+    def _formatNetstring(data):
+        return b''.join([str(len(data)).encode("ascii"), b':', data, b','])
+else:
+    def _formatNetstring(data):
+        return b'%d:%s,' % (len(data), data)
+_formatNetstring.__doc__ = """
+Convert some C{bytes} into netstring format.
+
+ at param data: C{bytes} that will be reformatted.
+"""
+
+
+
+LENGTH, DATA, COMMA = range(3)
+NUMBER = re.compile(b'(\d*)(:?)')
+
+deprecatedSince = versions.Version("Twisted", 10, 2, 0)
+message = "NetstringReceiver parser state is private."
+for attr in ["LENGTH", "DATA", "COMMA", "NUMBER"]:
+    deprecate.deprecatedModuleAttribute(
+        deprecatedSince, message, __name__, attr)
+del deprecatedSince, message, attr
+
+DEBUG = 0
+
+class NetstringParseError(ValueError):
+    """
+    The incoming data is not in valid Netstring format.
+    """
+
+
+
+class IncompleteNetstring(Exception):
+    """
+    Not enough data to complete a netstring.
+    """
+
+
+class NetstringReceiver(protocol.Protocol):
+    """
+    A protocol that sends and receives netstrings.
+
+    See U{http://cr.yp.to/proto/netstrings.txt} for the specification of
+    netstrings. Every netstring starts with digits that specify the length
+    of the data. This length specification is separated from the data by
+    a colon. The data is terminated with a comma.
+
+    Override L{stringReceived} to handle received netstrings. This
+    method is called with the netstring payload as a single argument
+    whenever a complete netstring is received.
+
+    Security features:
+        1. Messages are limited in size, useful if you don't want
+           someone sending you a 500MB netstring (change C{self.MAX_LENGTH}
+           to the maximum length you wish to accept).
+        2. The connection is lost if an illegal message is received.
+
+    @ivar MAX_LENGTH: Defines the maximum length of netstrings that can be
+        received.
+    @type MAX_LENGTH: C{int}
+
+    @ivar _LENGTH: A pattern describing all strings that contain a netstring
+        length specification. Examples for length specifications are C{b'0:'},
+        C{b'12:'}, and C{b'179:'}. C{b'007:'} is not a valid length
+        specification, since leading zeros are not allowed.
+    @type _LENGTH: C{re.Match}
+
+    @ivar _LENGTH_PREFIX: A pattern describing all strings that contain
+        the first part of a netstring length specification (without the
+        trailing comma). Examples are '0', '12', and '179'. '007' does not
+        start a netstring length specification, since leading zeros are
+        not allowed.
+    @type _LENGTH_PREFIX: C{re.Match}
+
+    @ivar _PARSING_LENGTH: Indicates that the C{NetstringReceiver} is in
+        the state of parsing the length portion of a netstring.
+    @type _PARSING_LENGTH: C{int}
+
+    @ivar _PARSING_PAYLOAD: Indicates that the C{NetstringReceiver} is in
+        the state of parsing the payload portion (data and trailing comma)
+        of a netstring.
+    @type _PARSING_PAYLOAD: C{int}
+
+    @ivar brokenPeer: Indicates if the connection is still functional
+    @type brokenPeer: C{int}
+
+    @ivar _state: Indicates if the protocol is consuming the length portion
+        (C{PARSING_LENGTH}) or the payload (C{PARSING_PAYLOAD}) of a netstring
+    @type _state: C{int}
+
+    @ivar _remainingData: Holds the chunk of data that has not yet been consumed
+    @type _remainingData: C{string}
+
+    @ivar _payload: Holds the payload portion of a netstring including the
+        trailing comma
+    @type _payload: C{BytesIO}
+
+    @ivar _expectedPayloadSize: Holds the payload size plus one for the trailing
+        comma.
+    @type _expectedPayloadSize: C{int}
+    """
+    MAX_LENGTH = 99999
+    _LENGTH = re.compile(b'(0|[1-9]\d*)(:)')
+
+    _LENGTH_PREFIX = re.compile(b'(0|[1-9]\d*)$')
+
+    # Some error information for NetstringParseError instances.
+    _MISSING_LENGTH = ("The received netstring does not start with a "
+                                "length specification.")
+    _OVERFLOW = ("The length specification of the received netstring "
+                          "cannot be represented in Python - it causes an "
+                          "OverflowError!")
+    _TOO_LONG = ("The received netstring is longer than the maximum %s "
+                          "specified by self.MAX_LENGTH")
+    _MISSING_COMMA = "The received netstring is not terminated by a comma."
+
+    # The following constants are used for determining if the NetstringReceiver
+    # is parsing the length portion of a netstring, or the payload.
+    _PARSING_LENGTH, _PARSING_PAYLOAD = range(2)
+
+    def makeConnection(self, transport):
+        """
+        Initializes the protocol.
+        """
+        protocol.Protocol.makeConnection(self, transport)
+        self._remainingData = b""
+        self._currentPayloadSize = 0
+        self._payload = BytesIO()
+        self._state = self._PARSING_LENGTH
+        self._expectedPayloadSize = 0
+        self.brokenPeer = 0
+
+
+    def sendString(self, string):
+        """
+        Sends a netstring.
+
+        Wraps up C{string} by adding length information and a
+        trailing comma; writes the result to the transport.
+
+        @param string: The string to send.  The necessary framing (length
+            prefix, etc) will be added.
+        @type string: C{bytes}
+        """
+        self.transport.write(_formatNetstring(string))
+
+
+    def dataReceived(self, data):
+        """
+        Receives some characters of a netstring.
+
+        Whenever a complete netstring is received, this method extracts
+        its payload and calls L{stringReceived} to process it.
+
+        @param data: A chunk of data representing a (possibly partial)
+            netstring
+        @type data: C{bytes}
+        """
+        self._remainingData += data
+        while self._remainingData:
+            try:
+                self._consumeData()
+            except IncompleteNetstring:
+                break
+            except NetstringParseError:
+                self._handleParseError()
+                break
+
+
+    def stringReceived(self, string):
+        """
+        Override this for notification when each complete string is received.
+
+        @param string: The complete string which was received with all
+            framing (length prefix, etc) removed.
+        @type string: C{bytes}
+
+        @raise NotImplementedError: because the method has to be implemented
+            by the child class.
+        """
+        raise NotImplementedError()
+
+
+    def _maxLengthSize(self):
+        """
+        Calculate and return the string size of C{self.MAX_LENGTH}.
+
+        @return: The size of the string representation for C{self.MAX_LENGTH}
+        @rtype: C{float}
+        """
+        return math.ceil(math.log10(self.MAX_LENGTH)) + 1
+
+
+    def _consumeData(self):
+        """
+        Consumes the content of C{self._remainingData}.
+
+        @raise IncompleteNetstring: if C{self._remainingData} does not
+            contain enough data to complete the current netstring.
+        @raise NetstringParseError: if the received data do not
+            form a valid netstring.
+        """
+        if self._state == self._PARSING_LENGTH:
+            self._consumeLength()
+            self._prepareForPayloadConsumption()
+        if self._state == self._PARSING_PAYLOAD:
+            self._consumePayload()
+
+
+    def _consumeLength(self):
+        """
+        Consumes the length portion of C{self._remainingData}.
+
+        @raise IncompleteNetstring: if C{self._remainingData} contains
+            a partial length specification (digits without trailing
+            comma).
+        @raise NetstringParseError: if the received data do not form a valid
+            netstring.
+        """
+        lengthMatch = self._LENGTH.match(self._remainingData)
+        if not lengthMatch:
+            self._checkPartialLengthSpecification()
+            raise IncompleteNetstring()
+        self._processLength(lengthMatch)
+
+
+    def _checkPartialLengthSpecification(self):
+        """
+        Makes sure that the received data represents a valid number.
+
+        Checks if C{self._remainingData} represents a number smaller or
+        equal to C{self.MAX_LENGTH}.
+
+        @raise NetstringParseError: if C{self._remainingData} is no
+            number or is too big (checked by L{extractLength}).
+        """
+        partialLengthMatch = self._LENGTH_PREFIX.match(self._remainingData)
+        if not partialLengthMatch:
+            raise NetstringParseError(self._MISSING_LENGTH)
+        lengthSpecification = (partialLengthMatch.group(1))
+        self._extractLength(lengthSpecification)
+
+
+    def _processLength(self, lengthMatch):
+        """
+        Processes the length definition of a netstring.
+
+        Extracts and stores in C{self._expectedPayloadSize} the number
+        representing the netstring size.  Removes the prefix
+        representing the length specification from
+        C{self._remainingData}.
+
+        @raise NetstringParseError: if the received netstring does not
+            start with a number or the number is bigger than
+            C{self.MAX_LENGTH}.
+        @param lengthMatch: A regular expression match object matching
+            a netstring length specification
+        @type lengthMatch: C{re.Match}
+        """
+        endOfNumber = lengthMatch.end(1)
+        startOfData = lengthMatch.end(2)
+        lengthString = self._remainingData[:endOfNumber]
+        # Expect payload plus trailing comma:
+        self._expectedPayloadSize = self._extractLength(lengthString) + 1
+        self._remainingData = self._remainingData[startOfData:]
+
+
+    def _extractLength(self, lengthAsString):
+        """
+        Attempts to extract the length information of a netstring.
+
+        @raise NetstringParseError: if the number is bigger than
+            C{self.MAX_LENGTH}.
+        @param lengthAsString: A chunk of data starting with a length
+            specification
+        @type lengthAsString: C{bytes}
+        @return: The length of the netstring
+        @rtype: C{int}
+        """
+        self._checkStringSize(lengthAsString)
+        length = int(lengthAsString)
+        if length > self.MAX_LENGTH:
+            raise NetstringParseError(self._TOO_LONG % (self.MAX_LENGTH,))
+        return length
+
+
+    def _checkStringSize(self, lengthAsString):
+        """
+        Checks the sanity of lengthAsString.
+
+        Checks if the size of the length specification exceeds the
+        size of the string representing self.MAX_LENGTH. If this is
+        not the case, the number represented by lengthAsString is
+        certainly bigger than self.MAX_LENGTH, and a
+        NetstringParseError can be raised.
+
+        This method should make sure that netstrings with extremely
+        long length specifications are refused before even attempting
+        to convert them to an integer (which might trigger a
+        MemoryError).
+        """
+        if len(lengthAsString) > self._maxLengthSize():
+            raise NetstringParseError(self._TOO_LONG % (self.MAX_LENGTH,))
+
+
+    def _prepareForPayloadConsumption(self):
+        """
+        Sets up variables necessary for consuming the payload of a netstring.
+        """
+        self._state = self._PARSING_PAYLOAD
+        self._currentPayloadSize = 0
+        self._payload.seek(0)
+        self._payload.truncate()
+
+
+    def _consumePayload(self):
+        """
+        Consumes the payload portion of C{self._remainingData}.
+
+        If the payload is complete, checks for the trailing comma and
+        processes the payload. If not, raises an L{IncompleteNetstring}
+        exception.
+
+        @raise IncompleteNetstring: if the payload received so far
+            contains fewer characters than expected.
+        @raise NetstringParseError: if the payload does not end with a
+        comma.
+        """
+        self._extractPayload()
+        if self._currentPayloadSize < self._expectedPayloadSize:
+            raise IncompleteNetstring()
+        self._checkForTrailingComma()
+        self._state = self._PARSING_LENGTH
+        self._processPayload()
+
+
+    def _extractPayload(self):
+        """
+        Extracts payload information from C{self._remainingData}.
+
+        Splits C{self._remainingData} at the end of the netstring.  The
+        first part becomes C{self._payload}, the second part is stored
+        in C{self._remainingData}.
+
+        If the netstring is not yet complete, the whole content of
+        C{self._remainingData} is moved to C{self._payload}.
+        """
+        if self._payloadComplete():
+            remainingPayloadSize = (self._expectedPayloadSize -
+                                    self._currentPayloadSize)
+            self._payload.write(self._remainingData[:remainingPayloadSize])
+            self._remainingData = self._remainingData[remainingPayloadSize:]
+            self._currentPayloadSize = self._expectedPayloadSize
+        else:
+            self._payload.write(self._remainingData)
+            self._currentPayloadSize += len(self._remainingData)
+            self._remainingData = b""
+
+
+    def _payloadComplete(self):
+        """
+        Checks if enough data have been received to complete the netstring.
+
+        @return: C{True} iff the received data contain at least as many
+            characters as specified in the length section of the
+            netstring
+        @rtype: C{bool}
+        """
+        return (len(self._remainingData) + self._currentPayloadSize >=
+                self._expectedPayloadSize)
+
+
+    def _processPayload(self):
+        """
+        Processes the actual payload with L{stringReceived}.
+
+        Strips C{self._payload} of the trailing comma and calls
+        L{stringReceived} with the result.
+        """
+        self.stringReceived(self._payload.getvalue()[:-1])
+
+
+    def _checkForTrailingComma(self):
+        """
+        Checks if the netstring has a trailing comma at the expected position.
+
+        @raise NetstringParseError: if the last payload character is
+            anything but a comma.
+        """
+        if self._payload.getvalue()[-1:] != b",":
+            raise NetstringParseError(self._MISSING_COMMA)
+
+
+    def _handleParseError(self):
+        """
+        Terminates the connection and sets the flag C{self.brokenPeer}.
+        """
+        self.transport.loseConnection()
+        self.brokenPeer = 1
+
+
+
+class LineOnlyReceiver(protocol.Protocol):
+    """
+    A protocol that receives only lines.
+
+    This is purely a speed optimisation over LineReceiver, for the
+    cases that raw mode is known to be unnecessary.
+
+    @cvar delimiter: The line-ending delimiter to use. By default this is
+                     C{b'\\r\\n'}.
+    @cvar MAX_LENGTH: The maximum length of a line to allow (If a
+                      sent line is longer than this, the connection is dropped).
+                      Default is 16384.
+    """
+    _buffer = b''
+    delimiter = b'\r\n'
+    MAX_LENGTH = 16384
+
+    def dataReceived(self, data):
+        """
+        Translates bytes into lines, and calls lineReceived.
+        """
+        lines  = (self._buffer+data).split(self.delimiter)
+        self._buffer = lines.pop(-1)
+        for line in lines:
+            if self.transport.disconnecting:
+                # this is necessary because the transport may be told to lose
+                # the connection by a line within a larger packet, and it is
+                # important to disregard all the lines in that packet following
+                # the one that told it to close.
+                return
+            if len(line) > self.MAX_LENGTH:
+                return self.lineLengthExceeded(line)
+            else:
+                self.lineReceived(line)
+        if len(self._buffer) > self.MAX_LENGTH:
+            return self.lineLengthExceeded(self._buffer)
+
+
+    def lineReceived(self, line):
+        """
+        Override this for when each line is received.
+
+        @param line: The line which was received with the delimiter removed.
+        @type line: C{bytes}
+        """
+        raise NotImplementedError
+
+
+    def sendLine(self, line):
+        """
+        Sends a line to the other end of the connection.
+
+        @param line: The line to send, not including the delimiter.
+        @type line: C{bytes}
+        """
+        return self.transport.writeSequence((line, self.delimiter))
+
+
+    def lineLengthExceeded(self, line):
+        """
+        Called when the maximum line length has been reached.
+        Override if it needs to be dealt with in some special way.
+        """
+        return error.ConnectionLost('Line length exceeded')
+
+
+
+class _PauseableMixin:
+    paused = False
+
+    def pauseProducing(self):
+        self.paused = True
+        self.transport.pauseProducing()
+
+
+    def resumeProducing(self):
+        self.paused = False
+        self.transport.resumeProducing()
+        self.dataReceived(b'')
+
+
+    def stopProducing(self):
+        self.paused = True
+        self.transport.stopProducing()
+
+
+
+class LineReceiver(protocol.Protocol, _PauseableMixin):
+    """
+    A protocol that receives lines and/or raw data, depending on mode.
+
+    In line mode, each line that's received becomes a callback to
+    L{lineReceived}.  In raw data mode, each chunk of raw data becomes a
+    callback to L{rawDataReceived}.  The L{setLineMode} and L{setRawMode}
+    methods switch between the two modes.
+
+    This is useful for line-oriented protocols such as IRC, HTTP, POP, etc.
+
+    @cvar delimiter: The line-ending delimiter to use. By default this is
+                     C{b'\\r\\n'}.
+    @cvar MAX_LENGTH: The maximum length of a line to allow (If a
+                      sent line is longer than this, the connection is dropped).
+                      Default is 16384.
+    """
+    line_mode = 1
+    _buffer = b''
+    _busyReceiving = False
+    delimiter = b'\r\n'
+    MAX_LENGTH = 16384
+
+    def clearLineBuffer(self):
+        """
+        Clear buffered data.
+
+        @return: All of the cleared buffered data.
+        @rtype: C{bytes}
+        """
+        b, self._buffer = self._buffer, b""
+        return b
+
+
+    def dataReceived(self, data):
+        """
+        Protocol.dataReceived.
+        Translates bytes into lines, and calls lineReceived (or
+        rawDataReceived, depending on mode.)
+        """
+        if self._busyReceiving:
+            self._buffer += data
+            return
+
+        try:
+            self._busyReceiving = True
+            self._buffer += data
+            while self._buffer and not self.paused:
+                if self.line_mode:
+                    try:
+                        line, self._buffer = self._buffer.split(
+                            self.delimiter, 1)
+                    except ValueError:
+                        if len(self._buffer) > self.MAX_LENGTH:
+                            line, self._buffer = self._buffer, b''
+                            return self.lineLengthExceeded(line)
+                        return
+                    else:
+                        lineLength = len(line)
+                        if lineLength > self.MAX_LENGTH:
+                            exceeded = line + self._buffer
+                            self._buffer = b''
+                            return self.lineLengthExceeded(exceeded)
+                        why = self.lineReceived(line)
+                        if (why or self.transport and
+                            self.transport.disconnecting):
+                            return why
+                else:
+                    data = self._buffer
+                    self._buffer = b''
+                    why = self.rawDataReceived(data)
+                    if why:
+                        return why
+        finally:
+            self._busyReceiving = False
+
+
+    def setLineMode(self, extra=b''):
+        """
+        Sets the line-mode of this receiver.
+
+        If you are calling this from a rawDataReceived callback,
+        you can pass in extra unhandled data, and that data will
+        be parsed for lines.  Further data received will be sent
+        to lineReceived rather than rawDataReceived.
+
+        Do not pass extra data if calling this function from
+        within a lineReceived callback.
+        """
+        self.line_mode = 1
+        if extra:
+            return self.dataReceived(extra)
+
+
+    def setRawMode(self):
+        """
+        Sets the raw mode of this receiver.
+        Further data received will be sent to rawDataReceived rather
+        than lineReceived.
+        """
+        self.line_mode = 0
+
+
+    def rawDataReceived(self, data):
+        """
+        Override this for when raw data is received.
+        """
+        raise NotImplementedError
+
+
+    def lineReceived(self, line):
+        """
+        Override this for when each line is received.
+
+        @param line: The line which was received with the delimiter removed.
+        @type line: C{bytes}
+        """
+        raise NotImplementedError
+
+
+    def sendLine(self, line):
+        """
+        Sends a line to the other end of the connection.
+
+        @param line: The line to send, not including the delimiter.
+        @type line: C{bytes}
+        """
+        return self.transport.write(line + self.delimiter)
+
+
+    def lineLengthExceeded(self, line):
+        """
+        Called when the maximum line length has been reached.
+        Override if it needs to be dealt with in some special way.
+
+        The argument 'line' contains the remainder of the buffer, starting
+        with (at least some part) of the line which is too long. This may
+        be more than one line, or may be only the initial portion of the
+        line.
+        """
+        return self.transport.loseConnection()
+
+
+
+class StringTooLongError(AssertionError):
+    """
+    Raised when trying to send a string too long for a length prefixed
+    protocol.
+    """
+
+
+
+class _RecvdCompatHack(object):
+    """
+    Emulates the to-be-deprecated C{IntNStringReceiver.recvd} attribute.
+
+    The C{recvd} attribute was where the working buffer for buffering and
+    parsing netstrings was kept.  It was updated each time new data arrived and
+    each time some of that data was parsed and delivered to application code.
+    The piecemeal updates to its string value were expensive and have been
+    removed from C{IntNStringReceiver} in the normal case.  However, for
+    applications directly reading this attribute, this descriptor restores that
+    behavior.  It only copies the working buffer when necessary (ie, when
+    accessed).  This avoids the cost for applications not using the data.
+
+    This is a custom descriptor rather than a property, because we still need
+    the default __set__ behavior in both new-style and old-style subclasses.
+    """
+    def __get__(self, oself, type=None):
+        return oself._unprocessed[oself._compatibilityOffset:]
+
+
+
+class IntNStringReceiver(protocol.Protocol, _PauseableMixin):
+    """
+    Generic class for length prefixed protocols.
+
+    @ivar _unprocessed: bytes received, but not yet broken up into messages /
+        sent to stringReceived.  _compatibilityOffset must be updated when this
+        value is updated so that the C{recvd} attribute can be generated
+        correctly.
+    @type _unprocessed: C{bytes}
+
+    @ivar structFormat: format used for struct packing/unpacking. Define it in
+        subclass.
+    @type structFormat: C{str}
+
+    @ivar prefixLength: length of the prefix, in bytes. Define it in subclass,
+        using C{struct.calcsize(structFormat)}
+    @type prefixLength: C{int}
+
+    @ivar _compatibilityOffset: the offset within C{_unprocessed} to the next
+        message to be parsed. (used to generate the recvd attribute)
+    @type _compatibilityOffset: C{int}
+    """
+
+    MAX_LENGTH = 99999
+    _unprocessed = b""
+    _compatibilityOffset = 0
+
+    # Backwards compatibility support for applications which directly touch the
+    # "internal" parse buffer.
+    recvd = _RecvdCompatHack()
+
+    def stringReceived(self, string):
+        """
+        Override this for notification when each complete string is received.
+
+        @param string: The complete string which was received with all
+            framing (length prefix, etc) removed.
+        @type string: C{bytes}
+        """
+        raise NotImplementedError
+
+
+    def lengthLimitExceeded(self, length):
+        """
+        Callback invoked when a length prefix greater than C{MAX_LENGTH} is
+        received.  The default implementation disconnects the transport.
+        Override this.
+
+        @param length: The length prefix which was received.
+        @type length: C{int}
+        """
+        self.transport.loseConnection()
+
+
+    def dataReceived(self, data):
+        """
+        Convert int prefixed strings into calls to stringReceived.
+        """
+        # Try to minimize string copying (via slices) by keeping one buffer
+        # containing all the data we have so far and a separate offset into that
+        # buffer.
+        alldata = self._unprocessed + data
+        currentOffset = 0
+        prefixLength = self.prefixLength
+        fmt = self.structFormat
+        self._unprocessed = alldata
+
+        while len(alldata) >= (currentOffset + prefixLength) and not self.paused:
+            messageStart = currentOffset + prefixLength
+            length, = unpack(fmt, alldata[currentOffset:messageStart])
+            if length > self.MAX_LENGTH:
+                self._unprocessed = alldata
+                self._compatibilityOffset = currentOffset
+                self.lengthLimitExceeded(length)
+                return
+            messageEnd = messageStart + length
+            if len(alldata) < messageEnd:
+                break
+
+            # Here we have to slice the working buffer so we can send just the
+            # netstring into the stringReceived callback.
+            packet = alldata[messageStart:messageEnd]
+            currentOffset = messageEnd
+            self._compatibilityOffset = currentOffset
+            self.stringReceived(packet)
+
+            # Check to see if the backwards compat "recvd" attribute got written
+            # to by application code.  If so, drop the current data buffer and
+            # switch to the new buffer given by that attribute's value.
+            if 'recvd' in self.__dict__:
+                alldata = self.__dict__.pop('recvd')
+                self._unprocessed = alldata
+                self._compatibilityOffset = currentOffset = 0
+                if alldata:
+                    continue
+                return
+
+        # Slice off all the data that has been processed, avoiding holding onto
+        # memory to store it, and update the compatibility attributes to reflect
+        # that change.
+        self._unprocessed = alldata[currentOffset:]
+        self._compatibilityOffset = 0
+
+
+    def sendString(self, string):
+        """
+        Send a prefixed string to the other end of the connection.
+
+        @param string: The string to send.  The necessary framing (length
+            prefix, etc) will be added.
+        @type string: C{bytes}
+        """
+        if len(string) >= 2 ** (8 * self.prefixLength):
+            raise StringTooLongError(
+                "Try to send %s bytes whereas maximum is %s" % (
+                len(string), 2 ** (8 * self.prefixLength)))
+        self.transport.write(
+            pack(self.structFormat, len(string)) + string)
+
+
+
+class Int32StringReceiver(IntNStringReceiver):
+    """
+    A receiver for int32-prefixed strings.
+
+    An int32 string is a string prefixed by 4 bytes, the 32-bit length of
+    the string encoded in network byte order.
+
+    This class publishes the same interface as NetstringReceiver.
+    """
+    structFormat = "!I"
+    prefixLength = calcsize(structFormat)
+
+
+
+class Int16StringReceiver(IntNStringReceiver):
+    """
+    A receiver for int16-prefixed strings.
+
+    An int16 string is a string prefixed by 2 bytes, the 16-bit length of
+    the string encoded in network byte order.
+
+    This class publishes the same interface as NetstringReceiver.
+    """
+    structFormat = "!H"
+    prefixLength = calcsize(structFormat)
+
+
+
+class Int8StringReceiver(IntNStringReceiver):
+    """
+    A receiver for int8-prefixed strings.
+
+    An int8 string is a string prefixed by 1 byte, the 8-bit length of
+    the string.
+
+    This class publishes the same interface as NetstringReceiver.
+    """
+    structFormat = "!B"
+    prefixLength = calcsize(structFormat)
+
+
+
+class StatefulStringProtocol:
+    """
+    A stateful string protocol.
+
+    This is a mixin for string protocols (Int32StringReceiver,
+    NetstringReceiver) which translates stringReceived into a callback
+    (prefixed with 'proto_') depending on state.
+
+    The state 'done' is special; if a proto_* method returns it, the
+    connection will be closed immediately.
+    """
+
+    state = 'init'
+
+    def stringReceived(self, string):
+        """
+        Choose a protocol phase function and call it.
+
+        Call back to the appropriate protocol phase; this begins with
+        the function proto_init and moves on to proto_* depending on
+        what each proto_* function returns.  (For example, if
+        self.proto_init returns 'foo', then self.proto_foo will be the
+        next function called when a protocol message is received.
+        """
+        try:
+            pto = 'proto_'+self.state
+            statehandler = getattr(self,pto)
+        except AttributeError:
+            log.msg('callback',self.state,'not found')
+        else:
+            self.state = statehandler(string)
+            if self.state == 'done':
+                self.transport.loseConnection()
+
+
+
+ at implementer(interfaces.IProducer)
+class FileSender:
+    """
+    A producer that sends the contents of a file to a consumer.
+
+    This is a helper for protocols that, at some point, will take a
+    file-like object, read its contents, and write them out to the network,
+    optionally performing some transformation on the bytes in between.
+    """
+
+    CHUNK_SIZE = 2 ** 14
+
+    lastSent = ''
+    deferred = None
+
+    def beginFileTransfer(self, file, consumer, transform = None):
+        """
+        Begin transferring a file
+
+        @type file: Any file-like object
+        @param file: The file object to read data from
+
+        @type consumer: Any implementor of IConsumer
+        @param consumer: The object to write data to
+
+        @param transform: A callable taking one string argument and returning
+        the same.  All bytes read from the file are passed through this before
+        being written to the consumer.
+
+        @rtype: C{Deferred}
+        @return: A deferred whose callback will be invoked when the file has
+        been completely written to the consumer. The last byte written to the
+        consumer is passed to the callback.
+        """
+        self.file = file
+        self.consumer = consumer
+        self.transform = transform
+
+        self.deferred = deferred = defer.Deferred()
+        self.consumer.registerProducer(self, False)
+        return deferred
+
+
+    def resumeProducing(self):
+        chunk = ''
+        if self.file:
+            chunk = self.file.read(self.CHUNK_SIZE)
+        if not chunk:
+            self.file = None
+            self.consumer.unregisterProducer()
+            if self.deferred:
+                self.deferred.callback(self.lastSent)
+                self.deferred = None
+            return
+
+        if self.transform:
+            chunk = self.transform(chunk)
+        self.consumer.write(chunk)
+        self.lastSent = chunk[-1]
+
+
+    def pauseProducing(self):
+        pass
+
+
+    def stopProducing(self):
+        if self.deferred:
+            self.deferred.errback(
+                Exception("Consumer asked us to stop producing"))
+            self.deferred = None
+
+if _PY3:
+    # Add it back as part of ticket #6026:
+    del FileSender
diff --git a/ThirdParty/Twisted/twisted/protocols/dict.py b/ThirdParty/Twisted/twisted/protocols/dict.py
new file mode 100644
index 0000000..c3af402
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/dict.py
@@ -0,0 +1,362 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Dict client protocol implementation.
+
+ at author: Pavel Pergamenshchik
+"""
+
+from twisted.protocols import basic
+from twisted.internet import defer, protocol
+from twisted.python import log
+from StringIO import StringIO
+
+def parseParam(line):
+    """Chew one dqstring or atom from beginning of line and return (param, remaningline)"""
+    if line == '':
+        return (None, '')
+    elif line[0] != '"': # atom
+        mode = 1
+    else: # dqstring
+        mode = 2
+    res = ""
+    io = StringIO(line)
+    if mode == 2: # skip the opening quote
+        io.read(1)
+    while 1:
+        a = io.read(1)
+        if a == '"':
+            if mode == 2:
+                io.read(1) # skip the separating space
+                return (res, io.read())
+        elif a == '\\':
+            a = io.read(1)
+            if a == '':
+                return (None, line) # unexpected end of string
+        elif a == '':
+            if mode == 1:
+                return (res, io.read())
+            else:
+                return (None, line) # unexpected end of string
+        elif a == ' ':
+            if mode == 1:
+                return (res, io.read())
+        res += a
+
+def makeAtom(line):
+    """Munch a string into an 'atom'"""
+    # FIXME: proper quoting
+    return filter(lambda x: not (x in map(chr, range(33)+[34, 39, 92])), line)
+
+def makeWord(s):
+    mustquote = range(33)+[34, 39, 92]
+    result = []
+    for c in s:
+        if ord(c) in mustquote:
+            result.append("\\")
+        result.append(c)
+    s = "".join(result)
+    return s
+
+def parseText(line):
+    if len(line) == 1 and line == '.':
+        return None
+    else:
+        if len(line) > 1 and line[0:2] == '..':
+            line = line[1:]
+        return line
+
+class Definition:
+    """A word definition"""
+    def __init__(self, name, db, dbdesc, text):
+        self.name = name
+        self.db = db
+        self.dbdesc = dbdesc
+        self.text = text # list of strings not terminated by newline
+
+class DictClient(basic.LineReceiver):
+    """dict (RFC2229) client"""
+
+    data = None # multiline data
+    MAX_LENGTH = 1024
+    state = None
+    mode = None
+    result = None
+    factory = None
+
+    def __init__(self):
+        self.data = None
+        self.result = None
+
+    def connectionMade(self):
+        self.state = "conn"
+        self.mode = "command"
+
+    def sendLine(self, line):
+        """Throw up if the line is longer than 1022 characters"""
+        if len(line) > self.MAX_LENGTH - 2:
+            raise ValueError("DictClient tried to send a too long line")
+        basic.LineReceiver.sendLine(self, line)
+
+    def lineReceived(self, line):
+        try:
+            line = line.decode("UTF-8")
+        except UnicodeError: # garbage received, skip
+            return
+        if self.mode == "text": # we are receiving textual data
+            code = "text"
+        else:
+            if len(line) < 4:
+                log.msg("DictClient got invalid line from server -- %s" % line)
+                self.protocolError("Invalid line from server")
+                self.transport.LoseConnection()
+                return
+            code = int(line[:3])
+            line = line[4:]
+        method = getattr(self, 'dictCode_%s_%s' % (code, self.state), self.dictCode_default)
+        method(line)
+
+    def dictCode_default(self, line):
+        """Unkown message"""
+        log.msg("DictClient got unexpected message from server -- %s" % line)
+        self.protocolError("Unexpected server message")
+        self.transport.loseConnection()
+
+    def dictCode_221_ready(self, line):
+        """We are about to get kicked off, do nothing"""
+        pass
+
+    def dictCode_220_conn(self, line):
+        """Greeting message"""
+        self.state = "ready"
+        self.dictConnected()
+
+    def dictCode_530_conn(self):
+        self.protocolError("Access denied")
+        self.transport.loseConnection()
+
+    def dictCode_420_conn(self):
+        self.protocolError("Server temporarily unavailable")
+        self.transport.loseConnection()
+
+    def dictCode_421_conn(self):
+        self.protocolError("Server shutting down at operator request")
+        self.transport.loseConnection()
+
+    def sendDefine(self, database, word):
+        """Send a dict DEFINE command"""
+        assert self.state == "ready", "DictClient.sendDefine called when not in ready state"
+        self.result = None  # these two are just in case. In "ready" state, result and data
+        self.data = None    # should be None
+        self.state = "define"
+        command = "DEFINE %s %s" % (makeAtom(database.encode("UTF-8")), makeWord(word.encode("UTF-8")))
+        self.sendLine(command)
+
+    def sendMatch(self, database, strategy, word):
+        """Send a dict MATCH command"""
+        assert self.state == "ready", "DictClient.sendMatch called when not in ready state"
+        self.result = None
+        self.data = None
+        self.state = "match"
+        command = "MATCH %s %s %s" % (makeAtom(database), makeAtom(strategy), makeAtom(word))
+        self.sendLine(command.encode("UTF-8"))
+
+    def dictCode_550_define(self, line):
+        """Invalid database"""
+        self.mode = "ready"
+        self.defineFailed("Invalid database")
+
+    def dictCode_550_match(self, line):
+        """Invalid database"""
+        self.mode = "ready"
+        self.matchFailed("Invalid database")
+
+    def dictCode_551_match(self, line):
+        """Invalid strategy"""
+        self.mode = "ready"
+        self.matchFailed("Invalid strategy")
+
+    def dictCode_552_define(self, line):
+        """No match"""
+        self.mode = "ready"
+        self.defineFailed("No match")
+
+    def dictCode_552_match(self, line):
+        """No match"""
+        self.mode = "ready"
+        self.matchFailed("No match")
+
+    def dictCode_150_define(self, line):
+        """n definitions retrieved"""
+        self.result = []
+
+    def dictCode_151_define(self, line):
+        """Definition text follows"""
+        self.mode = "text"
+        (word, line) = parseParam(line)
+        (db, line) = parseParam(line)
+        (dbdesc, line) = parseParam(line)
+        if not (word and db and dbdesc):
+            self.protocolError("Invalid server response")
+            self.transport.loseConnection()
+        else:
+            self.result.append(Definition(word, db, dbdesc, []))
+            self.data = []
+
+    def dictCode_152_match(self, line):
+        """n matches found, text follows"""
+        self.mode = "text"
+        self.result = []
+        self.data = []
+
+    def dictCode_text_define(self, line):
+        """A line of definition text received"""
+        res = parseText(line)
+        if res == None:
+            self.mode = "command"
+            self.result[-1].text = self.data
+            self.data = None
+        else:
+            self.data.append(line)
+
+    def dictCode_text_match(self, line):
+        """One line of match text received"""
+        def l(s):
+            p1, t = parseParam(s)
+            p2, t = parseParam(t)
+            return (p1, p2)
+        res = parseText(line)
+        if res == None:
+            self.mode = "command"
+            self.result = map(l, self.data)
+            self.data = None
+        else:
+            self.data.append(line)
+
+    def dictCode_250_define(self, line):
+        """ok"""
+        t = self.result
+        self.result = None
+        self.state = "ready"
+        self.defineDone(t)
+
+    def dictCode_250_match(self, line):
+        """ok"""
+        t = self.result
+        self.result = None
+        self.state = "ready"
+        self.matchDone(t)
+    
+    def protocolError(self, reason):
+        """override to catch unexpected dict protocol conditions"""
+        pass
+
+    def dictConnected(self):
+        """override to be notified when the server is ready to accept commands"""
+        pass
+
+    def defineFailed(self, reason):
+        """override to catch reasonable failure responses to DEFINE"""
+        pass
+
+    def defineDone(self, result):
+        """override to catch succesful DEFINE"""
+        pass
+    
+    def matchFailed(self, reason):
+        """override to catch resonable failure responses to MATCH"""
+        pass
+
+    def matchDone(self, result):
+        """override to catch succesful MATCH"""
+        pass
+
+
+class InvalidResponse(Exception):
+    pass
+
+
+class DictLookup(DictClient):
+    """Utility class for a single dict transaction. To be used with DictLookupFactory"""
+
+    def protocolError(self, reason):
+        if not self.factory.done:
+            self.factory.d.errback(InvalidResponse(reason))
+            self.factory.clientDone()
+
+    def dictConnected(self):
+        if self.factory.queryType == "define":
+            apply(self.sendDefine, self.factory.param)
+        elif self.factory.queryType == "match":
+            apply(self.sendMatch, self.factory.param)
+
+    def defineFailed(self, reason):
+        self.factory.d.callback([])
+        self.factory.clientDone()
+        self.transport.loseConnection()
+
+    def defineDone(self, result):
+        self.factory.d.callback(result)
+        self.factory.clientDone()
+        self.transport.loseConnection()
+
+    def matchFailed(self, reason):
+        self.factory.d.callback([])
+        self.factory.clientDone()
+        self.transport.loseConnection()
+
+    def matchDone(self, result):
+        self.factory.d.callback(result)
+        self.factory.clientDone()
+        self.transport.loseConnection()
+
+
+class DictLookupFactory(protocol.ClientFactory):
+    """Utility factory for a single dict transaction"""
+    protocol = DictLookup
+    done = None
+
+    def __init__(self, queryType, param, d):
+        self.queryType = queryType
+        self.param = param
+        self.d = d
+        self.done = 0
+
+    def clientDone(self):
+        """Called by client when done."""
+        self.done = 1
+        del self.d
+    
+    def clientConnectionFailed(self, connector, error):
+        self.d.errback(error)
+
+    def clientConnectionLost(self, connector, error):
+        if not self.done:
+            self.d.errback(error)
+
+    def buildProtocol(self, addr):
+        p = self.protocol()
+        p.factory = self
+        return p
+
+
+def define(host, port, database, word):
+    """Look up a word using a dict server"""
+    d = defer.Deferred()
+    factory = DictLookupFactory("define", (database, word), d)
+    
+    from twisted.internet import reactor
+    reactor.connectTCP(host, port, factory)
+    return d
+
+def match(host, port, database, strategy, word):
+    """Match a word using a dict server"""
+    d = defer.Deferred()
+    factory = DictLookupFactory("match", (database, strategy, word), d)
+
+    from twisted.internet import reactor
+    reactor.connectTCP(host, port, factory)
+    return d
+
diff --git a/ThirdParty/Twisted/twisted/protocols/finger.py b/ThirdParty/Twisted/twisted/protocols/finger.py
new file mode 100644
index 0000000..fcb9396
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/finger.py
@@ -0,0 +1,42 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""The Finger User Information Protocol (RFC 1288)"""
+
+from twisted.protocols import basic
+
+class Finger(basic.LineReceiver):
+
+    def lineReceived(self, line):
+        parts = line.split()
+        if not parts:
+            parts = ['']
+        if len(parts) == 1:
+            slash_w = 0
+        else:
+            slash_w = 1
+        user = parts[-1]
+        if '@' in user:
+            host_place = user.rfind('@')
+            user = user[:host_place]
+            host = user[host_place+1:]
+            return self.forwardQuery(slash_w, user, host)
+        if user:
+            return self.getUser(slash_w, user)
+        else:
+            return self.getDomain(slash_w)
+
+    def _refuseMessage(self, message):
+        self.transport.write(message+"\n")
+        self.transport.loseConnection()
+
+    def forwardQuery(self, slash_w, user, host):
+        self._refuseMessage('Finger forwarding service denied')
+
+    def getDomain(self, slash_w):
+        self._refuseMessage('Finger online list denied')
+
+    def getUser(self, slash_w, user):
+        self.transport.write('Login: '+user+'\n')
+        self._refuseMessage('No such user')
diff --git a/ThirdParty/Twisted/twisted/protocols/ftp.py b/ThirdParty/Twisted/twisted/protocols/ftp.py
new file mode 100644
index 0000000..110dfe6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/ftp.py
@@ -0,0 +1,3059 @@
+# -*- test-case-name: twisted.test.test_ftp -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+An FTP protocol implementation
+"""
+
+# System Imports
+import os
+import time
+import re
+import operator
+import stat
+import errno
+import fnmatch
+import warnings
+
+try:
+    import pwd, grp
+except ImportError:
+    pwd = grp = None
+
+from zope.interface import Interface, implements
+
+# Twisted Imports
+from twisted import copyright
+from twisted.internet import reactor, interfaces, protocol, error, defer
+from twisted.protocols import basic, policies
+
+from twisted.python import log, failure, filepath
+from twisted.python.compat import reduce
+
+from twisted.cred import error as cred_error, portal, credentials, checkers
+
+# constants
+# response codes
+
+RESTART_MARKER_REPLY                    = "100"
+SERVICE_READY_IN_N_MINUTES              = "120"
+DATA_CNX_ALREADY_OPEN_START_XFR         = "125"
+FILE_STATUS_OK_OPEN_DATA_CNX            = "150"
+
+CMD_OK                                  = "200.1"
+TYPE_SET_OK                             = "200.2"
+ENTERING_PORT_MODE                      = "200.3"
+CMD_NOT_IMPLMNTD_SUPERFLUOUS            = "202"
+SYS_STATUS_OR_HELP_REPLY                = "211.1"
+FEAT_OK                                 = '211.2'
+DIR_STATUS                              = "212"
+FILE_STATUS                             = "213"
+HELP_MSG                                = "214"
+NAME_SYS_TYPE                           = "215"
+SVC_READY_FOR_NEW_USER                  = "220.1"
+WELCOME_MSG                             = "220.2"
+SVC_CLOSING_CTRL_CNX                    = "221.1"
+GOODBYE_MSG                             = "221.2"
+DATA_CNX_OPEN_NO_XFR_IN_PROGRESS        = "225"
+CLOSING_DATA_CNX                        = "226.1"
+TXFR_COMPLETE_OK                        = "226.2"
+ENTERING_PASV_MODE                      = "227"
+ENTERING_EPSV_MODE                      = "229"
+USR_LOGGED_IN_PROCEED                   = "230.1"     # v1 of code 230
+GUEST_LOGGED_IN_PROCEED                 = "230.2"     # v2 of code 230
+REQ_FILE_ACTN_COMPLETED_OK              = "250"
+PWD_REPLY                               = "257.1"
+MKD_REPLY                               = "257.2"
+
+USR_NAME_OK_NEED_PASS                   = "331.1"     # v1 of Code 331
+GUEST_NAME_OK_NEED_EMAIL                = "331.2"     # v2 of code 331
+NEED_ACCT_FOR_LOGIN                     = "332"
+REQ_FILE_ACTN_PENDING_FURTHER_INFO      = "350"
+
+SVC_NOT_AVAIL_CLOSING_CTRL_CNX          = "421.1"
+TOO_MANY_CONNECTIONS                    = "421.2"
+CANT_OPEN_DATA_CNX                      = "425"
+CNX_CLOSED_TXFR_ABORTED                 = "426"
+REQ_ACTN_ABRTD_FILE_UNAVAIL             = "450"
+REQ_ACTN_ABRTD_LOCAL_ERR                = "451"
+REQ_ACTN_ABRTD_INSUFF_STORAGE           = "452"
+
+SYNTAX_ERR                              = "500"
+SYNTAX_ERR_IN_ARGS                      = "501"
+CMD_NOT_IMPLMNTD                        = "502.1"
+OPTS_NOT_IMPLEMENTED                    = '502.2'
+BAD_CMD_SEQ                             = "503"
+CMD_NOT_IMPLMNTD_FOR_PARAM              = "504"
+NOT_LOGGED_IN                           = "530.1"     # v1 of code 530 - please log in
+AUTH_FAILURE                            = "530.2"     # v2 of code 530 - authorization failure
+NEED_ACCT_FOR_STOR                      = "532"
+FILE_NOT_FOUND                          = "550.1"     # no such file or directory
+PERMISSION_DENIED                       = "550.2"     # permission denied
+ANON_USER_DENIED                        = "550.3"     # anonymous users can't alter filesystem
+IS_NOT_A_DIR                            = "550.4"     # rmd called on a path that is not a directory
+REQ_ACTN_NOT_TAKEN                      = "550.5"
+FILE_EXISTS                             = "550.6"
+IS_A_DIR                                = "550.7"
+PAGE_TYPE_UNK                           = "551"
+EXCEEDED_STORAGE_ALLOC                  = "552"
+FILENAME_NOT_ALLOWED                    = "553"
+
+
+RESPONSE = {
+    # -- 100's --
+    RESTART_MARKER_REPLY:               '110 MARK yyyy-mmmm', # TODO: this must be fixed
+    SERVICE_READY_IN_N_MINUTES:         '120 service ready in %s minutes',
+    DATA_CNX_ALREADY_OPEN_START_XFR:    '125 Data connection already open, starting transfer',
+    FILE_STATUS_OK_OPEN_DATA_CNX:       '150 File status okay; about to open data connection.',
+
+    # -- 200's --
+    CMD_OK:                             '200 Command OK',
+    TYPE_SET_OK:                        '200 Type set to %s.',
+    ENTERING_PORT_MODE:                 '200 PORT OK',
+    CMD_NOT_IMPLMNTD_SUPERFLUOUS:       '202 Command not implemented, superfluous at this site',
+    SYS_STATUS_OR_HELP_REPLY:           '211 System status reply',
+    FEAT_OK:                            ['211-Features:','211 End'],
+    DIR_STATUS:                         '212 %s',
+    FILE_STATUS:                        '213 %s',
+    HELP_MSG:                           '214 help: %s',
+    NAME_SYS_TYPE:                      '215 UNIX Type: L8',
+    WELCOME_MSG:                        "220 %s",
+    SVC_READY_FOR_NEW_USER:             '220 Service ready',
+    SVC_CLOSING_CTRL_CNX:               '221 Service closing control connection',
+    GOODBYE_MSG:                        '221 Goodbye.',
+    DATA_CNX_OPEN_NO_XFR_IN_PROGRESS:   '225 data connection open, no transfer in progress',
+    CLOSING_DATA_CNX:                   '226 Abort successful',
+    TXFR_COMPLETE_OK:                   '226 Transfer Complete.',
+    ENTERING_PASV_MODE:                 '227 Entering Passive Mode (%s).',
+    ENTERING_EPSV_MODE:                 '229 Entering Extended Passive Mode (|||%s|).', # where is epsv defined in the rfc's?
+    USR_LOGGED_IN_PROCEED:              '230 User logged in, proceed',
+    GUEST_LOGGED_IN_PROCEED:            '230 Anonymous login ok, access restrictions apply.',
+    REQ_FILE_ACTN_COMPLETED_OK:         '250 Requested File Action Completed OK', #i.e. CWD completed ok
+    PWD_REPLY:                          '257 "%s"',
+    MKD_REPLY:                          '257 "%s" created',
+
+    # -- 300's --
+    USR_NAME_OK_NEED_PASS:              '331 Password required for %s.',
+    GUEST_NAME_OK_NEED_EMAIL:           '331 Guest login ok, type your email address as password.',
+    NEED_ACCT_FOR_LOGIN:                '332 Need account for login.',
+
+    REQ_FILE_ACTN_PENDING_FURTHER_INFO: '350 Requested file action pending further information.',
+
+# -- 400's --
+    SVC_NOT_AVAIL_CLOSING_CTRL_CNX:     '421 Service not available, closing control connection.',
+    TOO_MANY_CONNECTIONS:               '421 Too many users right now, try again in a few minutes.',
+    CANT_OPEN_DATA_CNX:                 "425 Can't open data connection.",
+    CNX_CLOSED_TXFR_ABORTED:            '426 Transfer aborted.  Data connection closed.',
+
+    REQ_ACTN_ABRTD_FILE_UNAVAIL:        '450 Requested action aborted. File unavailable.',
+    REQ_ACTN_ABRTD_LOCAL_ERR:           '451 Requested action aborted. Local error in processing.',
+    REQ_ACTN_ABRTD_INSUFF_STORAGE:      '452 Requested action aborted. Insufficient storage.',
+
+    # -- 500's --
+    SYNTAX_ERR:                         "500 Syntax error: %s",
+    SYNTAX_ERR_IN_ARGS:                 '501 syntax error in argument(s) %s.',
+    CMD_NOT_IMPLMNTD:                   "502 Command '%s' not implemented",
+    OPTS_NOT_IMPLEMENTED:               "502 Option '%s' not implemented.",
+    BAD_CMD_SEQ:                        '503 Incorrect sequence of commands: %s',
+    CMD_NOT_IMPLMNTD_FOR_PARAM:         "504 Not implemented for parameter '%s'.",
+    NOT_LOGGED_IN:                      '530 Please login with USER and PASS.',
+    AUTH_FAILURE:                       '530 Sorry, Authentication failed.',
+    NEED_ACCT_FOR_STOR:                 '532 Need an account for storing files',
+    FILE_NOT_FOUND:                     '550 %s: No such file or directory.',
+    PERMISSION_DENIED:                  '550 %s: Permission denied.',
+    ANON_USER_DENIED:                   '550 Anonymous users are forbidden to change the filesystem',
+    IS_NOT_A_DIR:                       '550 Cannot rmd, %s is not a directory',
+    FILE_EXISTS:                        '550 %s: File exists',
+    IS_A_DIR:                           '550 %s: is a directory',
+    REQ_ACTN_NOT_TAKEN:                 '550 Requested action not taken: %s',
+    PAGE_TYPE_UNK:                      '551 Page type unknown',
+    EXCEEDED_STORAGE_ALLOC:             '552 Requested file action aborted, exceeded file storage allocation',
+    FILENAME_NOT_ALLOWED:               '553 Requested action not taken, file name not allowed'
+}
+
+
+
+class InvalidPath(Exception):
+    """
+    Internal exception used to signify an error during parsing a path.
+    """
+
+
+
+def toSegments(cwd, path):
+    """
+    Normalize a path, as represented by a list of strings each
+    representing one segment of the path.
+    """
+    if path.startswith('/'):
+        segs = []
+    else:
+        segs = cwd[:]
+
+    for s in path.split('/'):
+        if s == '.' or s == '':
+            continue
+        elif s == '..':
+            if segs:
+                segs.pop()
+            else:
+                raise InvalidPath(cwd, path)
+        elif '\0' in s or '/' in s:
+            raise InvalidPath(cwd, path)
+        else:
+            segs.append(s)
+    return segs
+
+
+def errnoToFailure(e, path):
+    """
+    Map C{OSError} and C{IOError} to standard FTP errors.
+    """
+    if e == errno.ENOENT:
+        return defer.fail(FileNotFoundError(path))
+    elif e == errno.EACCES or e == errno.EPERM:
+        return defer.fail(PermissionDeniedError(path))
+    elif e == errno.ENOTDIR:
+        return defer.fail(IsNotADirectoryError(path))
+    elif e == errno.EEXIST:
+        return defer.fail(FileExistsError(path))
+    elif e == errno.EISDIR:
+        return defer.fail(IsADirectoryError(path))
+    else:
+        return defer.fail()
+
+
+
+class FTPCmdError(Exception):
+    """
+    Generic exception for FTP commands.
+    """
+    def __init__(self, *msg):
+        Exception.__init__(self, *msg)
+        self.errorMessage = msg
+
+
+    def response(self):
+        """
+        Generate a FTP response message for this error.
+        """
+        return RESPONSE[self.errorCode] % self.errorMessage
+
+
+
+class FileNotFoundError(FTPCmdError):
+    """
+    Raised when trying to access a non existent file or directory.
+    """
+    errorCode = FILE_NOT_FOUND
+
+
+
+class AnonUserDeniedError(FTPCmdError):
+    """
+    Raised when an anonymous user issues a command that will alter the
+    filesystem
+    """
+
+    errorCode = ANON_USER_DENIED
+
+
+
+class PermissionDeniedError(FTPCmdError):
+    """
+    Raised when access is attempted to a resource to which access is
+    not allowed.
+    """
+    errorCode = PERMISSION_DENIED
+
+
+
+class IsNotADirectoryError(FTPCmdError):
+    """
+    Raised when RMD is called on a path that isn't a directory.
+    """
+    errorCode = IS_NOT_A_DIR
+
+
+
+class FileExistsError(FTPCmdError):
+    """
+    Raised when attempted to override an existing resource.
+    """
+    errorCode = FILE_EXISTS
+
+
+
+class IsADirectoryError(FTPCmdError):
+    """
+    Raised when DELE is called on a path that is a directory.
+    """
+    errorCode = IS_A_DIR
+
+
+
+class CmdSyntaxError(FTPCmdError):
+    """
+    Raised when a command syntax is wrong.
+    """
+    errorCode = SYNTAX_ERR
+
+
+
+class CmdArgSyntaxError(FTPCmdError):
+    """
+    Raised when a command is called with wrong value or a wrong number of
+    arguments.
+    """
+    errorCode = SYNTAX_ERR_IN_ARGS
+
+
+
+class CmdNotImplementedError(FTPCmdError):
+    """
+    Raised when an unimplemented command is given to the server.
+    """
+    errorCode = CMD_NOT_IMPLMNTD
+
+
+
+class CmdNotImplementedForArgError(FTPCmdError):
+    """
+    Raised when the handling of a parameter for a command is not implemented by
+    the server.
+    """
+    errorCode = CMD_NOT_IMPLMNTD_FOR_PARAM
+
+
+
+class FTPError(Exception):
+    pass
+
+
+
+class PortConnectionError(Exception):
+    pass
+
+
+
+class BadCmdSequenceError(FTPCmdError):
+    """
+    Raised when a client sends a series of commands in an illogical sequence.
+    """
+    errorCode = BAD_CMD_SEQ
+
+
+
+class AuthorizationError(FTPCmdError):
+    """
+    Raised when client authentication fails.
+    """
+    errorCode = AUTH_FAILURE
+
+
+
+def debugDeferred(self, *_):
+    log.msg('debugDeferred(): %s' % str(_), debug=True)
+
+
+# -- DTP Protocol --
+
+
+_months = [
+    None,
+    'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+    'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
+
+
+class DTP(object, protocol.Protocol):
+    implements(interfaces.IConsumer)
+
+    isConnected = False
+
+    _cons = None
+    _onConnLost = None
+    _buffer = None
+
+    def connectionMade(self):
+        self.isConnected = True
+        self.factory.deferred.callback(None)
+        self._buffer = []
+
+    def connectionLost(self, reason):
+        self.isConnected = False
+        if self._onConnLost is not None:
+            self._onConnLost.callback(None)
+
+    def sendLine(self, line):
+        self.transport.write(line + '\r\n')
+
+
+    def _formatOneListResponse(self, name, size, directory, permissions, hardlinks, modified, owner, group):
+        def formatMode(mode):
+            return ''.join([mode & (256 >> n) and 'rwx'[n % 3] or '-' for n in range(9)])
+
+        def formatDate(mtime):
+            now = time.gmtime()
+            info = {
+                'month': _months[mtime.tm_mon],
+                'day': mtime.tm_mday,
+                'year': mtime.tm_year,
+                'hour': mtime.tm_hour,
+                'minute': mtime.tm_min
+                }
+            if now.tm_year != mtime.tm_year:
+                return '%(month)s %(day)02d %(year)5d' % info
+            else:
+                return '%(month)s %(day)02d %(hour)02d:%(minute)02d' % info
+
+        format = ('%(directory)s%(permissions)s%(hardlinks)4d '
+                  '%(owner)-9s %(group)-9s %(size)15d %(date)12s '
+                  '%(name)s')
+
+        return format % {
+            'directory': directory and 'd' or '-',
+            'permissions': formatMode(permissions),
+            'hardlinks': hardlinks,
+            'owner': owner[:8],
+            'group': group[:8],
+            'size': size,
+            'date': formatDate(time.gmtime(modified)),
+            'name': name}
+
+    def sendListResponse(self, name, response):
+        self.sendLine(self._formatOneListResponse(name, *response))
+
+
+    # Proxy IConsumer to our transport
+    def registerProducer(self, producer, streaming):
+        return self.transport.registerProducer(producer, streaming)
+
+    def unregisterProducer(self):
+        self.transport.unregisterProducer()
+        self.transport.loseConnection()
+
+    def write(self, data):
+        if self.isConnected:
+            return self.transport.write(data)
+        raise Exception("Crap damn crap damn crap damn")
+
+
+    # Pretend to be a producer, too.
+    def _conswrite(self, bytes):
+        try:
+            self._cons.write(bytes)
+        except:
+            self._onConnLost.errback()
+
+    def dataReceived(self, bytes):
+        if self._cons is not None:
+            self._conswrite(bytes)
+        else:
+            self._buffer.append(bytes)
+
+    def _unregConsumer(self, ignored):
+        self._cons.unregisterProducer()
+        self._cons = None
+        del self._onConnLost
+        return ignored
+
+    def registerConsumer(self, cons):
+        assert self._cons is None
+        self._cons = cons
+        self._cons.registerProducer(self, True)
+        for chunk in self._buffer:
+            self._conswrite(chunk)
+        self._buffer = None
+        if self.isConnected:
+            self._onConnLost = d = defer.Deferred()
+            d.addBoth(self._unregConsumer)
+            return d
+        else:
+            self._cons.unregisterProducer()
+            self._cons = None
+            return defer.succeed(None)
+
+    def resumeProducing(self):
+        self.transport.resumeProducing()
+
+    def pauseProducing(self):
+        self.transport.pauseProducing()
+
+    def stopProducing(self):
+        self.transport.stopProducing()
+
+class DTPFactory(protocol.ClientFactory):
+    """
+    Client factory for I{data transfer process} protocols.
+
+    @ivar peerCheck: perform checks to make sure the ftp-pi's peer is the same
+        as the dtp's
+    @ivar pi: a reference to this factory's protocol interpreter
+
+    @ivar _state: Indicates the current state of the DTPFactory.  Initially,
+        this is L{_IN_PROGRESS}.  If the connection fails or times out, it is
+        L{_FAILED}.  If the connection succeeds before the timeout, it is
+        L{_FINISHED}.
+    """
+
+    _IN_PROGRESS = object()
+    _FAILED = object()
+    _FINISHED = object()
+
+    _state = _IN_PROGRESS
+
+    # -- configuration variables --
+    peerCheck = False
+
+    # -- class variables --
+    def __init__(self, pi, peerHost=None, reactor=None):
+        """
+        Constructor
+
+        @param pi: this factory's protocol interpreter
+        @param peerHost: if peerCheck is True, this is the tuple that the
+            generated instance will use to perform security checks
+        """
+        self.pi = pi                        # the protocol interpreter that is using this factory
+        self.peerHost = peerHost            # the from FTP.transport.peerHost()
+        self.deferred = defer.Deferred()    # deferred will fire when instance is connected
+        self.delayedCall = None
+        if reactor is None:
+            from twisted.internet import reactor
+        self._reactor = reactor
+
+
+    def buildProtocol(self, addr):
+        log.msg('DTPFactory.buildProtocol', debug=True)
+
+        if self._state is not self._IN_PROGRESS:
+            return None
+        self._state = self._FINISHED
+
+        self.cancelTimeout()
+        p = DTP()
+        p.factory = self
+        p.pi = self.pi
+        self.pi.dtpInstance = p
+        return p
+
+
+    def stopFactory(self):
+        log.msg('dtpFactory.stopFactory', debug=True)
+        self.cancelTimeout()
+
+
+    def timeoutFactory(self):
+        log.msg('timed out waiting for DTP connection')
+        if self._state is not self._IN_PROGRESS:
+            return
+        self._state = self._FAILED
+
+        d = self.deferred
+        self.deferred = None
+        d.errback(
+            PortConnectionError(defer.TimeoutError("DTPFactory timeout")))
+
+
+    def cancelTimeout(self):
+        if self.delayedCall is not None and self.delayedCall.active():
+            log.msg('cancelling DTP timeout', debug=True)
+            self.delayedCall.cancel()
+
+
+    def setTimeout(self, seconds):
+        log.msg('DTPFactory.setTimeout set to %s seconds' % seconds)
+        self.delayedCall = self._reactor.callLater(seconds, self.timeoutFactory)
+
+
+    def clientConnectionFailed(self, connector, reason):
+        if self._state is not self._IN_PROGRESS:
+            return
+        self._state = self._FAILED
+        d = self.deferred
+        self.deferred = None
+        d.errback(PortConnectionError(reason))
+
+
+# -- FTP-PI (Protocol Interpreter) --
+
+class ASCIIConsumerWrapper(object):
+    def __init__(self, cons):
+        self.cons = cons
+        self.registerProducer = cons.registerProducer
+        self.unregisterProducer = cons.unregisterProducer
+
+        assert os.linesep == "\r\n" or len(os.linesep) == 1, "Unsupported platform (yea right like this even exists)"
+
+        if os.linesep == "\r\n":
+            self.write = cons.write
+
+    def write(self, bytes):
+        return self.cons.write(bytes.replace(os.linesep, "\r\n"))
+
+
+
+class FileConsumer(object):
+    """
+    A consumer for FTP input that writes data to a file.
+
+    @ivar fObj: a file object opened for writing, used to write data received.
+    @type fObj: C{file}
+    """
+
+    implements(interfaces.IConsumer)
+
+    def __init__(self, fObj):
+        self.fObj = fObj
+
+
+    def registerProducer(self, producer, streaming):
+        self.producer = producer
+        assert streaming
+
+
+    def unregisterProducer(self):
+        self.producer = None
+        self.fObj.close()
+
+
+    def write(self, bytes):
+        self.fObj.write(bytes)
+
+
+
+class FTPOverflowProtocol(basic.LineReceiver):
+    """FTP mini-protocol for when there are too many connections."""
+    def connectionMade(self):
+        self.sendLine(RESPONSE[TOO_MANY_CONNECTIONS])
+        self.transport.loseConnection()
+
+
+class FTP(object, basic.LineReceiver, policies.TimeoutMixin):
+    """
+    Protocol Interpreter for the File Transfer Protocol
+
+    @ivar state: The current server state.  One of L{UNAUTH},
+        L{INAUTH}, L{AUTHED}, L{RENAMING}.
+
+    @ivar shell: The connected avatar
+    @ivar binary: The transfer mode.  If false, ASCII.
+    @ivar dtpFactory: Generates a single DTP for this session
+    @ivar dtpPort: Port returned from listenTCP
+    @ivar listenFactory: A callable with the signature of
+        L{twisted.internet.interfaces.IReactorTCP.listenTCP} which will be used
+        to create Ports for passive connections (mainly for testing).
+
+    @ivar passivePortRange: iterator used as source of passive port numbers.
+    @type passivePortRange: C{iterator}
+    """
+
+    disconnected = False
+
+    # States an FTP can be in
+    UNAUTH, INAUTH, AUTHED, RENAMING = range(4)
+
+    # how long the DTP waits for a connection
+    dtpTimeout = 10
+
+    portal = None
+    shell = None
+    dtpFactory = None
+    dtpPort = None
+    dtpInstance = None
+    binary = True
+    PUBLIC_COMMANDS = ['FEAT', 'QUIT']
+    FEATURES = ['FEAT', 'MDTM', 'PASV', 'SIZE', 'TYPE A;I']
+
+    passivePortRange = xrange(0, 1)
+
+    listenFactory = reactor.listenTCP
+
+    def reply(self, key, *args):
+        msg = RESPONSE[key] % args
+        self.sendLine(msg)
+
+
+    def connectionMade(self):
+        self.state = self.UNAUTH
+        self.setTimeout(self.timeOut)
+        self.reply(WELCOME_MSG, self.factory.welcomeMessage)
+
+    def connectionLost(self, reason):
+        # if we have a DTP protocol instance running and
+        # we lose connection to the client's PI, kill the
+        # DTP connection and close the port
+        if self.dtpFactory:
+            self.cleanupDTP()
+        self.setTimeout(None)
+        if hasattr(self.shell, 'logout') and self.shell.logout is not None:
+            self.shell.logout()
+        self.shell = None
+        self.transport = None
+
+    def timeoutConnection(self):
+        self.transport.loseConnection()
+
+    def lineReceived(self, line):
+        self.resetTimeout()
+        self.pauseProducing()
+
+        def processFailed(err):
+            if err.check(FTPCmdError):
+                self.sendLine(err.value.response())
+            elif (err.check(TypeError) and
+                  err.value.args[0].find('takes exactly') != -1):
+                self.reply(SYNTAX_ERR, "%s requires an argument." % (cmd,))
+            else:
+                log.msg("Unexpected FTP error")
+                log.err(err)
+                self.reply(REQ_ACTN_NOT_TAKEN, "internal server error")
+
+        def processSucceeded(result):
+            if isinstance(result, tuple):
+                self.reply(*result)
+            elif result is not None:
+                self.reply(result)
+
+        def allDone(ignored):
+            if not self.disconnected:
+                self.resumeProducing()
+
+        spaceIndex = line.find(' ')
+        if spaceIndex != -1:
+            cmd = line[:spaceIndex]
+            args = (line[spaceIndex + 1:],)
+        else:
+            cmd = line
+            args = ()
+        d = defer.maybeDeferred(self.processCommand, cmd, *args)
+        d.addCallbacks(processSucceeded, processFailed)
+        d.addErrback(log.err)
+
+        # XXX It burnsss
+        # LineReceiver doesn't let you resumeProducing inside
+        # lineReceived atm
+        from twisted.internet import reactor
+        reactor.callLater(0, d.addBoth, allDone)
+
+
+    def processCommand(self, cmd, *params):
+
+        def call_ftp_command(command):
+            method = getattr(self, "ftp_" + command, None)
+            if method is not None:
+                return method(*params)
+            return defer.fail(CmdNotImplementedError(command))
+
+        cmd = cmd.upper()
+
+        if cmd in self.PUBLIC_COMMANDS:
+            return call_ftp_command(cmd)
+
+        elif self.state == self.UNAUTH:
+            if cmd == 'USER':
+                return self.ftp_USER(*params)
+            elif cmd == 'PASS':
+                return BAD_CMD_SEQ, "USER required before PASS"
+            else:
+                return NOT_LOGGED_IN
+
+        elif self.state == self.INAUTH:
+            if cmd == 'PASS':
+                return self.ftp_PASS(*params)
+            else:
+                return BAD_CMD_SEQ, "PASS required after USER"
+
+        elif self.state == self.AUTHED:
+            return call_ftp_command(cmd)
+
+        elif self.state == self.RENAMING:
+            if cmd == 'RNTO':
+                return self.ftp_RNTO(*params)
+            else:
+                return BAD_CMD_SEQ, "RNTO required after RNFR"
+
+
+    def getDTPPort(self, factory):
+        """
+        Return a port for passive access, using C{self.passivePortRange}
+        attribute.
+        """
+        for portn in self.passivePortRange:
+            try:
+                dtpPort = self.listenFactory(portn, factory)
+            except error.CannotListenError:
+                continue
+            else:
+                return dtpPort
+        raise error.CannotListenError('', portn,
+            "No port available in range %s" %
+            (self.passivePortRange,))
+
+
+    def ftp_USER(self, username):
+        """
+        First part of login.  Get the username the peer wants to
+        authenticate as.
+        """
+        if not username:
+            return defer.fail(CmdSyntaxError('USER requires an argument'))
+
+        self._user = username
+        self.state = self.INAUTH
+        if self.factory.allowAnonymous and self._user == self.factory.userAnonymous:
+            return GUEST_NAME_OK_NEED_EMAIL
+        else:
+            return (USR_NAME_OK_NEED_PASS, username)
+
+    # TODO: add max auth try before timeout from ip...
+    # TODO: need to implement minimal ABOR command
+
+    def ftp_PASS(self, password):
+        """
+        Second part of login.  Get the password the peer wants to
+        authenticate with.
+        """
+        if self.factory.allowAnonymous and self._user == self.factory.userAnonymous:
+            # anonymous login
+            creds = credentials.Anonymous()
+            reply = GUEST_LOGGED_IN_PROCEED
+        else:
+            # user login
+            creds = credentials.UsernamePassword(self._user, password)
+            reply = USR_LOGGED_IN_PROCEED
+        del self._user
+
+        def _cbLogin((interface, avatar, logout)):
+            assert interface is IFTPShell, "The realm is busted, jerk."
+            self.shell = avatar
+            self.logout = logout
+            self.workingDirectory = []
+            self.state = self.AUTHED
+            return reply
+
+        def _ebLogin(failure):
+            failure.trap(cred_error.UnauthorizedLogin, cred_error.UnhandledCredentials)
+            self.state = self.UNAUTH
+            raise AuthorizationError
+
+        d = self.portal.login(creds, None, IFTPShell)
+        d.addCallbacks(_cbLogin, _ebLogin)
+        return d
+
+
+    def ftp_PASV(self):
+        """
+        Request for a passive connection
+
+        from the rfc::
+
+            This command requests the server-DTP to \"listen\" on a data port
+            (which is not its default data port) and to wait for a connection
+            rather than initiate one upon receipt of a transfer command.  The
+            response to this command includes the host and port address this
+            server is listening on.
+        """
+        # if we have a DTP port set up, lose it.
+        if self.dtpFactory is not None:
+            # cleanupDTP sets dtpFactory to none.  Later we'll do
+            # cleanup here or something.
+            self.cleanupDTP()
+        self.dtpFactory = DTPFactory(pi=self)
+        self.dtpFactory.setTimeout(self.dtpTimeout)
+        self.dtpPort = self.getDTPPort(self.dtpFactory)
+
+        host = self.transport.getHost().host
+        port = self.dtpPort.getHost().port
+        self.reply(ENTERING_PASV_MODE, encodeHostPort(host, port))
+        return self.dtpFactory.deferred.addCallback(lambda ign: None)
+
+
+    def ftp_PORT(self, address):
+        addr = map(int, address.split(','))
+        ip = '%d.%d.%d.%d' % tuple(addr[:4])
+        port = addr[4] << 8 | addr[5]
+
+        # if we have a DTP port set up, lose it.
+        if self.dtpFactory is not None:
+            self.cleanupDTP()
+
+        self.dtpFactory = DTPFactory(pi=self, peerHost=self.transport.getPeer().host)
+        self.dtpFactory.setTimeout(self.dtpTimeout)
+        self.dtpPort = reactor.connectTCP(ip, port, self.dtpFactory)
+
+        def connected(ignored):
+            return ENTERING_PORT_MODE
+        def connFailed(err):
+            err.trap(PortConnectionError)
+            return CANT_OPEN_DATA_CNX
+        return self.dtpFactory.deferred.addCallbacks(connected, connFailed)
+
+
+    def ftp_LIST(self, path=''):
+        """ This command causes a list to be sent from the server to the
+        passive DTP.  If the pathname specifies a directory or other
+        group of files, the server should transfer a list of files
+        in the specified directory.  If the pathname specifies a
+        file then the server should send current information on the
+        file.  A null argument implies the user's current working or
+        default directory.
+        """
+        # Uh, for now, do this retarded thing.
+        if self.dtpInstance is None or not self.dtpInstance.isConnected:
+            return defer.fail(BadCmdSequenceError('must send PORT or PASV before RETR'))
+
+        # bug in konqueror
+        if path == "-a":
+            path = ''
+        # bug in gFTP 2.0.15
+        if path == "-aL":
+            path = ''
+        # bug in Nautilus 2.10.0
+        if path == "-L":
+            path = ''
+        # bug in ange-ftp
+        if path == "-la":
+            path = ''
+
+        def gotListing(results):
+            self.reply(DATA_CNX_ALREADY_OPEN_START_XFR)
+            for (name, attrs) in results:
+                self.dtpInstance.sendListResponse(name, attrs)
+            self.dtpInstance.transport.loseConnection()
+            return (TXFR_COMPLETE_OK,)
+
+        try:
+            segments = toSegments(self.workingDirectory, path)
+        except InvalidPath:
+            return defer.fail(FileNotFoundError(path))
+
+        d = self.shell.list(
+            segments,
+            ('size', 'directory', 'permissions', 'hardlinks',
+             'modified', 'owner', 'group'))
+        d.addCallback(gotListing)
+        return d
+
+
+    def ftp_NLST(self, path):
+        """
+        This command causes a directory listing to be sent from the server to
+        the client. The pathname should specify a directory or other
+        system-specific file group descriptor. An empty path implies the current
+        working directory. If the path is non-existent, send nothing. If the
+        path is to a file, send only the file name.
+
+        @type path: C{str}
+        @param path: The path for which a directory listing should be returned.
+
+        @rtype: L{Deferred}
+        @return: a L{Deferred} which will be fired when the listing request
+            is finished.
+        """
+        # XXX: why is this check different from ftp_RETR/ftp_STOR? See #4180
+        if self.dtpInstance is None or not self.dtpInstance.isConnected:
+            return defer.fail(
+                BadCmdSequenceError('must send PORT or PASV before RETR'))
+
+        try:
+            segments = toSegments(self.workingDirectory, path)
+        except InvalidPath:
+            return defer.fail(FileNotFoundError(path))
+
+        def cbList(results):
+            """
+            Send, line by line, each file in the directory listing, and then
+            close the connection.
+
+            @type results: A C{list} of C{tuple}. The first element of each
+                C{tuple} is a C{str} and the second element is a C{list}.
+            @param results: The names of the files in the directory.
+
+            @rtype: C{tuple}
+            @return: A C{tuple} containing the status code for a successful
+                transfer.
+            """
+            self.reply(DATA_CNX_ALREADY_OPEN_START_XFR)
+            for (name, ignored) in results:
+                self.dtpInstance.sendLine(name)
+            self.dtpInstance.transport.loseConnection()
+            return (TXFR_COMPLETE_OK,)
+
+        def cbGlob(results):
+            self.reply(DATA_CNX_ALREADY_OPEN_START_XFR)
+            for (name, ignored) in results:
+                if fnmatch.fnmatch(name, segments[-1]):
+                    self.dtpInstance.sendLine(name)
+            self.dtpInstance.transport.loseConnection()
+            return (TXFR_COMPLETE_OK,)
+
+        def listErr(results):
+            """
+            RFC 959 specifies that an NLST request may only return directory
+            listings. Thus, send nothing and just close the connection.
+
+            @type results: L{Failure}
+            @param results: The L{Failure} wrapping a L{FileNotFoundError} that
+                occurred while trying to list the contents of a nonexistent
+                directory.
+
+            @rtype: C{tuple}
+            @returns: A C{tuple} containing the status code for a successful
+                transfer.
+            """
+            self.dtpInstance.transport.loseConnection()
+            return (TXFR_COMPLETE_OK,)
+
+        # XXX This globbing may be incomplete: see #4181
+        if segments and (
+            '*' in segments[-1] or '?' in segments[-1] or
+            ('[' in segments[-1] and ']' in segments[-1])):
+            d = self.shell.list(segments[:-1])
+            d.addCallback(cbGlob)
+        else:
+            d = self.shell.list(segments)
+            d.addCallback(cbList)
+            # self.shell.list will generate an error if the path is invalid
+            d.addErrback(listErr)
+        return d
+
+
+    def ftp_CWD(self, path):
+        try:
+            segments = toSegments(self.workingDirectory, path)
+        except InvalidPath:
+            # XXX Eh, what to fail with here?
+            return defer.fail(FileNotFoundError(path))
+
+        def accessGranted(result):
+            self.workingDirectory = segments
+            return (REQ_FILE_ACTN_COMPLETED_OK,)
+
+        return self.shell.access(segments).addCallback(accessGranted)
+
+
+    def ftp_CDUP(self):
+        return self.ftp_CWD('..')
+
+
+    def ftp_PWD(self):
+        return (PWD_REPLY, '/' + '/'.join(self.workingDirectory))
+
+
+    def ftp_RETR(self, path):
+        """
+        This command causes the content of a file to be sent over the data
+        transfer channel. If the path is to a folder, an error will be raised.
+
+        @type path: C{str}
+        @param path: The path to the file which should be transferred over the
+        data transfer channel.
+
+        @rtype: L{Deferred}
+        @return: a L{Deferred} which will be fired when the transfer is done.
+        """
+        if self.dtpInstance is None:
+            raise BadCmdSequenceError('PORT or PASV required before RETR')
+
+        try:
+            newsegs = toSegments(self.workingDirectory, path)
+        except InvalidPath:
+            return defer.fail(FileNotFoundError(path))
+
+        # XXX For now, just disable the timeout.  Later we'll want to
+        # leave it active and have the DTP connection reset it
+        # periodically.
+        self.setTimeout(None)
+
+        # Put it back later
+        def enableTimeout(result):
+            self.setTimeout(self.factory.timeOut)
+            return result
+
+        # And away she goes
+        if not self.binary:
+            cons = ASCIIConsumerWrapper(self.dtpInstance)
+        else:
+            cons = self.dtpInstance
+
+        def cbSent(result):
+            return (TXFR_COMPLETE_OK,)
+
+        def ebSent(err):
+            log.msg("Unexpected error attempting to transmit file to client:")
+            log.err(err)
+            if err.check(FTPCmdError):
+                return err
+            return (CNX_CLOSED_TXFR_ABORTED,)
+
+        def cbOpened(file):
+            # Tell them what to doooo
+            if self.dtpInstance.isConnected:
+                self.reply(DATA_CNX_ALREADY_OPEN_START_XFR)
+            else:
+                self.reply(FILE_STATUS_OK_OPEN_DATA_CNX)
+
+            d = file.send(cons)
+            d.addCallbacks(cbSent, ebSent)
+            return d
+
+        def ebOpened(err):
+            if not err.check(PermissionDeniedError, FileNotFoundError, IsADirectoryError):
+                log.msg("Unexpected error attempting to open file for transmission:")
+                log.err(err)
+            if err.check(FTPCmdError):
+                return (err.value.errorCode, '/'.join(newsegs))
+            return (FILE_NOT_FOUND, '/'.join(newsegs))
+
+        d = self.shell.openForReading(newsegs)
+        d.addCallbacks(cbOpened, ebOpened)
+        d.addBoth(enableTimeout)
+
+        # Pass back Deferred that fires when the transfer is done
+        return d
+
+
+    def ftp_STOR(self, path):
+        if self.dtpInstance is None:
+            raise BadCmdSequenceError('PORT or PASV required before STOR')
+
+        try:
+            newsegs = toSegments(self.workingDirectory, path)
+        except InvalidPath:
+            return defer.fail(FileNotFoundError(path))
+
+        # XXX For now, just disable the timeout.  Later we'll want to
+        # leave it active and have the DTP connection reset it
+        # periodically.
+        self.setTimeout(None)
+
+        # Put it back later
+        def enableTimeout(result):
+            self.setTimeout(self.factory.timeOut)
+            return result
+
+        def cbSent(result):
+            return (TXFR_COMPLETE_OK,)
+
+        def ebSent(err):
+            log.msg("Unexpected error receiving file from client:")
+            log.err(err)
+            if err.check(FTPCmdError):
+                return err
+            return (CNX_CLOSED_TXFR_ABORTED,)
+
+        def cbConsumer(cons):
+            if not self.binary:
+                cons = ASCIIConsumerWrapper(cons)
+
+            d = self.dtpInstance.registerConsumer(cons)
+
+            # Tell them what to doooo
+            if self.dtpInstance.isConnected:
+                self.reply(DATA_CNX_ALREADY_OPEN_START_XFR)
+            else:
+                self.reply(FILE_STATUS_OK_OPEN_DATA_CNX)
+
+            return d
+
+        def cbOpened(file):
+            d = file.receive()
+            d.addCallback(cbConsumer)
+            d.addCallback(lambda ignored: file.close())
+            d.addCallbacks(cbSent, ebSent)
+            return d
+
+        def ebOpened(err):
+            if not err.check(PermissionDeniedError, FileNotFoundError, IsNotADirectoryError):
+                log.msg("Unexpected error attempting to open file for upload:")
+                log.err(err)
+            if isinstance(err.value, FTPCmdError):
+                return (err.value.errorCode, '/'.join(newsegs))
+            return (FILE_NOT_FOUND, '/'.join(newsegs))
+
+        d = self.shell.openForWriting(newsegs)
+        d.addCallbacks(cbOpened, ebOpened)
+        d.addBoth(enableTimeout)
+
+        # Pass back Deferred that fires when the transfer is done
+        return d
+
+
+    def ftp_SIZE(self, path):
+        """
+        File SIZE
+
+        The FTP command, SIZE OF FILE (SIZE), is used to obtain the transfer
+        size of a file from the server-FTP process.  This is the exact number
+        of octets (8 bit bytes) that would be transmitted over the data
+        connection should that file be transmitted.  This value will change
+        depending on the current STRUcture, MODE, and TYPE of the data
+        connection or of a data connection that would be created were one
+        created now.  Thus, the result of the SIZE command is dependent on
+        the currently established STRU, MODE, and TYPE parameters.
+
+        The SIZE command returns how many octets would be transferred if the
+        file were to be transferred using the current transfer structure,
+        mode, and type.  This command is normally used in conjunction with
+        the RESTART (REST) command when STORing a file to a remote server in
+        STREAM mode, to determine the restart point.  The server-PI might
+        need to read the partially transferred file, do any appropriate
+        conversion, and count the number of octets that would be generated
+        when sending the file in order to correctly respond to this command.
+        Estimates of the file transfer size MUST NOT be returned; only
+        precise information is acceptable.
+
+        http://tools.ietf.org/html/rfc3659
+        """
+        try:
+            newsegs = toSegments(self.workingDirectory, path)
+        except InvalidPath:
+            return defer.fail(FileNotFoundError(path))
+
+        def cbStat((size,)):
+            return (FILE_STATUS, str(size))
+
+        return self.shell.stat(newsegs, ('size',)).addCallback(cbStat)
+
+
+    def ftp_MDTM(self, path):
+        """
+        File Modification Time (MDTM)
+
+        The FTP command, MODIFICATION TIME (MDTM), can be used to determine
+        when a file in the server NVFS was last modified.  This command has
+        existed in many FTP servers for many years, as an adjunct to the REST
+        command for STREAM mode, thus is widely available.  However, where
+        supported, the "modify" fact that can be provided in the result from
+        the new MLST command is recommended as a superior alternative.
+
+        http://tools.ietf.org/html/rfc3659
+        """
+        try:
+            newsegs = toSegments(self.workingDirectory, path)
+        except InvalidPath:
+            return defer.fail(FileNotFoundError(path))
+
+        def cbStat((modified,)):
+            return (FILE_STATUS, time.strftime('%Y%m%d%H%M%S', time.gmtime(modified)))
+
+        return self.shell.stat(newsegs, ('modified',)).addCallback(cbStat)
+
+
+    def ftp_TYPE(self, type):
+        """
+        REPRESENTATION TYPE (TYPE)
+
+        The argument specifies the representation type as described
+        in the Section on Data Representation and Storage.  Several
+        types take a second parameter.  The first parameter is
+        denoted by a single Telnet character, as is the second
+        Format parameter for ASCII and EBCDIC; the second parameter
+        for local byte is a decimal integer to indicate Bytesize.
+        The parameters are separated by a <SP> (Space, ASCII code
+        32).
+        """
+        p = type.upper()
+        if p:
+            f = getattr(self, 'type_' + p[0], None)
+            if f is not None:
+                return f(p[1:])
+            return self.type_UNKNOWN(p)
+        return (SYNTAX_ERR,)
+
+    def type_A(self, code):
+        if code == '' or code == 'N':
+            self.binary = False
+            return (TYPE_SET_OK, 'A' + code)
+        else:
+            return defer.fail(CmdArgSyntaxError(code))
+
+    def type_I(self, code):
+        if code == '':
+            self.binary = True
+            return (TYPE_SET_OK, 'I')
+        else:
+            return defer.fail(CmdArgSyntaxError(code))
+
+    def type_UNKNOWN(self, code):
+        return defer.fail(CmdNotImplementedForArgError(code))
+
+
+
+    def ftp_SYST(self):
+        return NAME_SYS_TYPE
+
+
+    def ftp_STRU(self, structure):
+        p = structure.upper()
+        if p == 'F':
+            return (CMD_OK,)
+        return defer.fail(CmdNotImplementedForArgError(structure))
+
+
+    def ftp_MODE(self, mode):
+        p = mode.upper()
+        if p == 'S':
+            return (CMD_OK,)
+        return defer.fail(CmdNotImplementedForArgError(mode))
+
+
+    def ftp_MKD(self, path):
+        try:
+            newsegs = toSegments(self.workingDirectory, path)
+        except InvalidPath:
+            return defer.fail(FileNotFoundError(path))
+        return self.shell.makeDirectory(newsegs).addCallback(lambda ign: (MKD_REPLY, path))
+
+
+    def ftp_RMD(self, path):
+        try:
+            newsegs = toSegments(self.workingDirectory, path)
+        except InvalidPath:
+            return defer.fail(FileNotFoundError(path))
+        return self.shell.removeDirectory(newsegs).addCallback(lambda ign: (REQ_FILE_ACTN_COMPLETED_OK,))
+
+
+    def ftp_DELE(self, path):
+        try:
+            newsegs = toSegments(self.workingDirectory, path)
+        except InvalidPath:
+            return defer.fail(FileNotFoundError(path))
+        return self.shell.removeFile(newsegs).addCallback(lambda ign: (REQ_FILE_ACTN_COMPLETED_OK,))
+
+
+    def ftp_NOOP(self):
+        return (CMD_OK,)
+
+
+    def ftp_RNFR(self, fromName):
+        self._fromName = fromName
+        self.state = self.RENAMING
+        return (REQ_FILE_ACTN_PENDING_FURTHER_INFO,)
+
+
+    def ftp_RNTO(self, toName):
+        fromName = self._fromName
+        del self._fromName
+        self.state = self.AUTHED
+
+        try:
+            fromsegs = toSegments(self.workingDirectory, fromName)
+            tosegs = toSegments(self.workingDirectory, toName)
+        except InvalidPath:
+            return defer.fail(FileNotFoundError(fromName))
+        return self.shell.rename(fromsegs, tosegs).addCallback(lambda ign: (REQ_FILE_ACTN_COMPLETED_OK,))
+
+
+    def ftp_FEAT(self):
+        """
+        Advertise the features supported by the server.
+
+        http://tools.ietf.org/html/rfc2389
+        """
+        self.sendLine(RESPONSE[FEAT_OK][0])
+        for feature in self.FEATURES:
+            self.sendLine(' ' + feature)
+        self.sendLine(RESPONSE[FEAT_OK][1])
+
+    def ftp_OPTS(self, option):
+        """
+        Handle OPTS command.
+
+        http://tools.ietf.org/html/draft-ietf-ftpext-utf-8-option-00
+        """
+        return self.reply(OPTS_NOT_IMPLEMENTED, option)
+
+    def ftp_QUIT(self):
+        self.reply(GOODBYE_MSG)
+        self.transport.loseConnection()
+        self.disconnected = True
+
+    def cleanupDTP(self):
+        """
+        Call when DTP connection exits
+        """
+        log.msg('cleanupDTP', debug=True)
+
+        log.msg(self.dtpPort)
+        dtpPort, self.dtpPort = self.dtpPort, None
+        if interfaces.IListeningPort.providedBy(dtpPort):
+            dtpPort.stopListening()
+        elif interfaces.IConnector.providedBy(dtpPort):
+            dtpPort.disconnect()
+        else:
+            assert False, "dtpPort should be an IListeningPort or IConnector, instead is %r" % (dtpPort,)
+
+        self.dtpFactory.stopFactory()
+        self.dtpFactory = None
+
+        if self.dtpInstance is not None:
+            self.dtpInstance = None
+
+
+class FTPFactory(policies.LimitTotalConnectionsFactory):
+    """
+    A factory for producing ftp protocol instances
+
+    @ivar timeOut: the protocol interpreter's idle timeout time in seconds,
+        default is 600 seconds.
+
+    @ivar passivePortRange: value forwarded to C{protocol.passivePortRange}.
+    @type passivePortRange: C{iterator}
+    """
+    protocol = FTP
+    overflowProtocol = FTPOverflowProtocol
+    allowAnonymous = True
+    userAnonymous = 'anonymous'
+    timeOut = 600
+
+    welcomeMessage = "Twisted %s FTP Server" % (copyright.version,)
+
+    passivePortRange = xrange(0, 1)
+
+    def __init__(self, portal=None, userAnonymous='anonymous'):
+        self.portal = portal
+        self.userAnonymous = userAnonymous
+        self.instances = []
+
+    def buildProtocol(self, addr):
+        p = policies.LimitTotalConnectionsFactory.buildProtocol(self, addr)
+        if p is not None:
+            p.wrappedProtocol.portal = self.portal
+            p.wrappedProtocol.timeOut = self.timeOut
+            p.wrappedProtocol.passivePortRange = self.passivePortRange
+        return p
+
+    def stopFactory(self):
+        # make sure ftp instance's timeouts are set to None
+        # to avoid reactor complaints
+        [p.setTimeout(None) for p in self.instances if p.timeOut is not None]
+        policies.LimitTotalConnectionsFactory.stopFactory(self)
+
+# -- Cred Objects --
+
+
+class IFTPShell(Interface):
+    """
+    An abstraction of the shell commands used by the FTP protocol for
+    a given user account.
+
+    All path names must be absolute.
+    """
+
+    def makeDirectory(path):
+        """
+        Create a directory.
+
+        @param path: The path, as a list of segments, to create
+        @type path: C{list} of C{unicode}
+
+        @return: A Deferred which fires when the directory has been
+        created, or which fails if the directory cannot be created.
+        """
+
+
+    def removeDirectory(path):
+        """
+        Remove a directory.
+
+        @param path: The path, as a list of segments, to remove
+        @type path: C{list} of C{unicode}
+
+        @return: A Deferred which fires when the directory has been
+        removed, or which fails if the directory cannot be removed.
+        """
+
+
+    def removeFile(path):
+        """
+        Remove a file.
+
+        @param path: The path, as a list of segments, to remove
+        @type path: C{list} of C{unicode}
+
+        @return: A Deferred which fires when the file has been
+        removed, or which fails if the file cannot be removed.
+        """
+
+
+    def rename(fromPath, toPath):
+        """
+        Rename a file or directory.
+
+        @param fromPath: The current name of the path.
+        @type fromPath: C{list} of C{unicode}
+
+        @param toPath: The desired new name of the path.
+        @type toPath: C{list} of C{unicode}
+
+        @return: A Deferred which fires when the path has been
+        renamed, or which fails if the path cannot be renamed.
+        """
+
+
+    def access(path):
+        """
+        Determine whether access to the given path is allowed.
+
+        @param path: The path, as a list of segments
+
+        @return: A Deferred which fires with None if access is allowed
+        or which fails with a specific exception type if access is
+        denied.
+        """
+
+
+    def stat(path, keys=()):
+        """
+        Retrieve information about the given path.
+
+        This is like list, except it will never return results about
+        child paths.
+        """
+
+
+    def list(path, keys=()):
+        """
+        Retrieve information about the given path.
+
+        If the path represents a non-directory, the result list should
+        have only one entry with information about that non-directory.
+        Otherwise, the result list should have an element for each
+        child of the directory.
+
+        @param path: The path, as a list of segments, to list
+        @type path: C{list} of C{unicode}
+
+        @param keys: A tuple of keys desired in the resulting
+        dictionaries.
+
+        @return: A Deferred which fires with a list of (name, list),
+        where the name is the name of the entry as a unicode string
+        and each list contains values corresponding to the requested
+        keys.  The following are possible elements of keys, and the
+        values which should be returned for them:
+
+            - C{'size'}: size in bytes, as an integer (this is kinda required)
+
+            - C{'directory'}: boolean indicating the type of this entry
+
+            - C{'permissions'}: a bitvector (see os.stat(foo).st_mode)
+
+            - C{'hardlinks'}: Number of hard links to this entry
+
+            - C{'modified'}: number of seconds since the epoch since entry was
+              modified
+
+            - C{'owner'}: string indicating the user owner of this entry
+
+            - C{'group'}: string indicating the group owner of this entry
+        """
+
+
+    def openForReading(path):
+        """
+        @param path: The path, as a list of segments, to open
+        @type path: C{list} of C{unicode}
+
+        @rtype: C{Deferred} which will fire with L{IReadFile}
+        """
+
+
+    def openForWriting(path):
+        """
+        @param path: The path, as a list of segments, to open
+        @type path: C{list} of C{unicode}
+
+        @rtype: C{Deferred} which will fire with L{IWriteFile}
+        """
+
+
+
+class IReadFile(Interface):
+    """
+    A file out of which bytes may be read.
+    """
+
+    def send(consumer):
+        """
+        Produce the contents of the given path to the given consumer.  This
+        method may only be invoked once on each provider.
+
+        @type consumer: C{IConsumer}
+
+        @return: A Deferred which fires when the file has been
+        consumed completely.
+        """
+
+
+
+class IWriteFile(Interface):
+    """
+    A file into which bytes may be written.
+    """
+
+    def receive():
+        """
+        Create a consumer which will write to this file.  This method may
+        only be invoked once on each provider.
+
+        @rtype: C{Deferred} of C{IConsumer}
+        """
+
+    def close():
+        """
+        Perform any post-write work that needs to be done. This method may
+        only be invoked once on each provider, and will always be invoked
+        after receive().
+
+        @rtype: C{Deferred} of anything: the value is ignored. The FTP client
+        will not see their upload request complete until this Deferred has
+        been fired.
+        """
+
+def _getgroups(uid):
+    """
+    Return the primary and supplementary groups for the given UID.
+
+    @type uid: C{int}
+    """
+    result = []
+    pwent = pwd.getpwuid(uid)
+
+    result.append(pwent.pw_gid)
+
+    for grent in grp.getgrall():
+        if pwent.pw_name in grent.gr_mem:
+            result.append(grent.gr_gid)
+
+    return result
+
+
+def _testPermissions(uid, gid, spath, mode='r'):
+    """
+    checks to see if uid has proper permissions to access path with mode
+
+    @type uid: C{int}
+    @param uid: numeric user id
+
+    @type gid: C{int}
+    @param gid: numeric group id
+
+    @type spath: C{str}
+    @param spath: the path on the server to test
+
+    @type mode: C{str}
+    @param mode: 'r' or 'w' (read or write)
+
+    @rtype: C{bool}
+    @return: True if the given credentials have the specified form of
+        access to the given path
+    """
+    if mode == 'r':
+        usr = stat.S_IRUSR
+        grp = stat.S_IRGRP
+        oth = stat.S_IROTH
+        amode = os.R_OK
+    elif mode == 'w':
+        usr = stat.S_IWUSR
+        grp = stat.S_IWGRP
+        oth = stat.S_IWOTH
+        amode = os.W_OK
+    else:
+        raise ValueError("Invalid mode %r: must specify 'r' or 'w'" % (mode,))
+
+    access = False
+    if os.path.exists(spath):
+        if uid == 0:
+            access = True
+        else:
+            s = os.stat(spath)
+            if usr & s.st_mode and uid == s.st_uid:
+                access = True
+            elif grp & s.st_mode and gid in _getgroups(uid):
+                access = True
+            elif oth & s.st_mode:
+                access = True
+
+    if access:
+        if not os.access(spath, amode):
+            access = False
+            log.msg("Filesystem grants permission to UID %d but it is inaccessible to me running as UID %d" % (
+                uid, os.getuid()))
+    return access
+
+
+
+class FTPAnonymousShell(object):
+    """
+    An anonymous implementation of IFTPShell
+
+    @type filesystemRoot: L{twisted.python.filepath.FilePath}
+    @ivar filesystemRoot: The path which is considered the root of
+    this shell.
+    """
+    implements(IFTPShell)
+
+    def __init__(self, filesystemRoot):
+        self.filesystemRoot = filesystemRoot
+
+
+    def _path(self, path):
+        return reduce(filepath.FilePath.child, path, self.filesystemRoot)
+
+
+    def makeDirectory(self, path):
+        return defer.fail(AnonUserDeniedError())
+
+
+    def removeDirectory(self, path):
+        return defer.fail(AnonUserDeniedError())
+
+
+    def removeFile(self, path):
+        return defer.fail(AnonUserDeniedError())
+
+
+    def rename(self, fromPath, toPath):
+        return defer.fail(AnonUserDeniedError())
+
+
+    def receive(self, path):
+        path = self._path(path)
+        return defer.fail(AnonUserDeniedError())
+
+
+    def openForReading(self, path):
+        """
+        Open C{path} for reading.
+
+        @param path: The path, as a list of segments, to open.
+        @type path: C{list} of C{unicode}
+        @return: A L{Deferred} is returned that will fire with an object
+            implementing L{IReadFile} if the file is successfully opened.  If
+            C{path} is a directory, or if an exception is raised while trying
+            to open the file, the L{Deferred} will fire with an error.
+        """
+        p = self._path(path)
+        if p.isdir():
+            # Normally, we would only check for EISDIR in open, but win32
+            # returns EACCES in this case, so we check before
+            return defer.fail(IsADirectoryError(path))
+        try:
+            f = p.open('r')
+        except (IOError, OSError), e:
+            return errnoToFailure(e.errno, path)
+        except:
+            return defer.fail()
+        else:
+            return defer.succeed(_FileReader(f))
+
+
+    def openForWriting(self, path):
+        """
+        Reject write attempts by anonymous users with
+        L{PermissionDeniedError}.
+        """
+        return defer.fail(PermissionDeniedError("STOR not allowed"))
+
+
+    def access(self, path):
+        p = self._path(path)
+        if not p.exists():
+            # Again, win32 doesn't report a sane error after, so let's fail
+            # early if we can
+            return defer.fail(FileNotFoundError(path))
+        # For now, just see if we can os.listdir() it
+        try:
+            p.listdir()
+        except (IOError, OSError), e:
+            return errnoToFailure(e.errno, path)
+        except:
+            return defer.fail()
+        else:
+            return defer.succeed(None)
+
+
+    def stat(self, path, keys=()):
+        p = self._path(path)
+        if p.isdir():
+            try:
+                statResult = self._statNode(p, keys)
+            except (IOError, OSError), e:
+                return errnoToFailure(e.errno, path)
+            except:
+                return defer.fail()
+            else:
+                return defer.succeed(statResult)
+        else:
+            return self.list(path, keys).addCallback(lambda res: res[0][1])
+
+
+    def list(self, path, keys=()):
+        """
+        Return the list of files at given C{path}, adding C{keys} stat
+        informations if specified.
+
+        @param path: the directory or file to check.
+        @type path: C{str}
+
+        @param keys: the list of desired metadata
+        @type keys: C{list} of C{str}
+        """
+        filePath = self._path(path)
+        if filePath.isdir():
+            entries = filePath.listdir()
+            fileEntries = [filePath.child(p) for p in entries]
+        elif filePath.isfile():
+            entries = [os.path.join(*filePath.segmentsFrom(self.filesystemRoot))]
+            fileEntries = [filePath]
+        else:
+            return defer.fail(FileNotFoundError(path))
+
+        results = []
+        for fileName, filePath in zip(entries, fileEntries):
+            ent = []
+            results.append((fileName, ent))
+            if keys:
+                try:
+                    ent.extend(self._statNode(filePath, keys))
+                except (IOError, OSError), e:
+                    return errnoToFailure(e.errno, fileName)
+                except:
+                    return defer.fail()
+
+        return defer.succeed(results)
+
+
+    def _statNode(self, filePath, keys):
+        """
+        Shortcut method to get stat info on a node.
+
+        @param filePath: the node to stat.
+        @type filePath: C{filepath.FilePath}
+
+        @param keys: the stat keys to get.
+        @type keys: C{iterable}
+        """
+        filePath.restat()
+        return [getattr(self, '_stat_' + k)(filePath.statinfo) for k in keys]
+
+    _stat_size = operator.attrgetter('st_size')
+    _stat_permissions = operator.attrgetter('st_mode')
+    _stat_hardlinks = operator.attrgetter('st_nlink')
+    _stat_modified = operator.attrgetter('st_mtime')
+
+
+    def _stat_owner(self, st):
+        if pwd is not None:
+            try:
+                return pwd.getpwuid(st.st_uid)[0]
+            except KeyError:
+                pass
+        return str(st.st_uid)
+
+
+    def _stat_group(self, st):
+        if grp is not None:
+            try:
+                return grp.getgrgid(st.st_gid)[0]
+            except KeyError:
+                pass
+        return str(st.st_gid)
+
+
+    def _stat_directory(self, st):
+        return bool(st.st_mode & stat.S_IFDIR)
+
+
+
+class _FileReader(object):
+    implements(IReadFile)
+
+    def __init__(self, fObj):
+        self.fObj = fObj
+        self._send = False
+
+    def _close(self, passthrough):
+        self._send = True
+        self.fObj.close()
+        return passthrough
+
+    def send(self, consumer):
+        assert not self._send, "Can only call IReadFile.send *once* per instance"
+        self._send = True
+        d = basic.FileSender().beginFileTransfer(self.fObj, consumer)
+        d.addBoth(self._close)
+        return d
+
+
+
+class FTPShell(FTPAnonymousShell):
+    """
+    An authenticated implementation of L{IFTPShell}.
+    """
+
+    def makeDirectory(self, path):
+        p = self._path(path)
+        try:
+            p.makedirs()
+        except (IOError, OSError), e:
+            return errnoToFailure(e.errno, path)
+        except:
+            return defer.fail()
+        else:
+            return defer.succeed(None)
+
+
+    def removeDirectory(self, path):
+        p = self._path(path)
+        if p.isfile():
+            # Win32 returns the wrong errno when rmdir is called on a file
+            # instead of a directory, so as we have the info here, let's fail
+            # early with a pertinent error
+            return defer.fail(IsNotADirectoryError(path))
+        try:
+            os.rmdir(p.path)
+        except (IOError, OSError), e:
+            return errnoToFailure(e.errno, path)
+        except:
+            return defer.fail()
+        else:
+            return defer.succeed(None)
+
+
+    def removeFile(self, path):
+        p = self._path(path)
+        if p.isdir():
+            # Win32 returns the wrong errno when remove is called on a
+            # directory instead of a file, so as we have the info here,
+            # let's fail early with a pertinent error
+            return defer.fail(IsADirectoryError(path))
+        try:
+            p.remove()
+        except (IOError, OSError), e:
+            return errnoToFailure(e.errno, path)
+        except:
+            return defer.fail()
+        else:
+            return defer.succeed(None)
+
+
+    def rename(self, fromPath, toPath):
+        fp = self._path(fromPath)
+        tp = self._path(toPath)
+        try:
+            os.rename(fp.path, tp.path)
+        except (IOError, OSError), e:
+            return errnoToFailure(e.errno, fromPath)
+        except:
+            return defer.fail()
+        else:
+            return defer.succeed(None)
+
+
+    def openForWriting(self, path):
+        """
+        Open C{path} for writing.
+
+        @param path: The path, as a list of segments, to open.
+        @type path: C{list} of C{unicode}
+        @return: A L{Deferred} is returned that will fire with an object
+            implementing L{IWriteFile} if the file is successfully opened.  If
+            C{path} is a directory, or if an exception is raised while trying
+            to open the file, the L{Deferred} will fire with an error.
+        """
+        p = self._path(path)
+        if p.isdir():
+            # Normally, we would only check for EISDIR in open, but win32
+            # returns EACCES in this case, so we check before
+            return defer.fail(IsADirectoryError(path))
+        try:
+            fObj = p.open('w')
+        except (IOError, OSError), e:
+            return errnoToFailure(e.errno, path)
+        except:
+            return defer.fail()
+        return defer.succeed(_FileWriter(fObj))
+
+
+
+class _FileWriter(object):
+    implements(IWriteFile)
+
+    def __init__(self, fObj):
+        self.fObj = fObj
+        self._receive = False
+
+    def receive(self):
+        assert not self._receive, "Can only call IWriteFile.receive *once* per instance"
+        self._receive = True
+        # FileConsumer will close the file object
+        return defer.succeed(FileConsumer(self.fObj))
+
+    def close(self):
+        return defer.succeed(None)
+
+
+
+class BaseFTPRealm:
+    """
+    Base class for simple FTP realms which provides an easy hook for specifying
+    the home directory for each user.
+    """
+    implements(portal.IRealm)
+
+    def __init__(self, anonymousRoot):
+        self.anonymousRoot = filepath.FilePath(anonymousRoot)
+
+
+    def getHomeDirectory(self, avatarId):
+        """
+        Return a L{FilePath} representing the home directory of the given
+        avatar.  Override this in a subclass.
+
+        @param avatarId: A user identifier returned from a credentials checker.
+        @type avatarId: C{str}
+
+        @rtype: L{FilePath}
+        """
+        raise NotImplementedError(
+            "%r did not override getHomeDirectory" % (self.__class__,))
+
+
+    def requestAvatar(self, avatarId, mind, *interfaces):
+        for iface in interfaces:
+            if iface is IFTPShell:
+                if avatarId is checkers.ANONYMOUS:
+                    avatar = FTPAnonymousShell(self.anonymousRoot)
+                else:
+                    avatar = FTPShell(self.getHomeDirectory(avatarId))
+                return (IFTPShell, avatar,
+                        getattr(avatar, 'logout', lambda: None))
+        raise NotImplementedError(
+            "Only IFTPShell interface is supported by this realm")
+
+
+
+class FTPRealm(BaseFTPRealm):
+    """
+    @type anonymousRoot: L{twisted.python.filepath.FilePath}
+    @ivar anonymousRoot: Root of the filesystem to which anonymous
+        users will be granted access.
+
+    @type userHome: L{filepath.FilePath}
+    @ivar userHome: Root of the filesystem containing user home directories.
+    """
+    def __init__(self, anonymousRoot, userHome='/home'):
+        BaseFTPRealm.__init__(self, anonymousRoot)
+        self.userHome = filepath.FilePath(userHome)
+
+
+    def getHomeDirectory(self, avatarId):
+        """
+        Use C{avatarId} as a single path segment to construct a child of
+        C{self.userHome} and return that child.
+        """
+        return self.userHome.child(avatarId)
+
+
+
+class SystemFTPRealm(BaseFTPRealm):
+    """
+    L{SystemFTPRealm} uses system user account information to decide what the
+    home directory for a particular avatarId is.
+
+    This works on POSIX but probably is not reliable on Windows.
+    """
+    def getHomeDirectory(self, avatarId):
+        """
+        Return the system-defined home directory of the system user account with
+        the name C{avatarId}.
+        """
+        path = os.path.expanduser('~' + avatarId)
+        if path.startswith('~'):
+            raise cred_error.UnauthorizedLogin()
+        return filepath.FilePath(path)
+
+
+
+# --- FTP CLIENT  -------------------------------------------------------------
+
+####
+# And now for the client...
+
+# Notes:
+#   * Reference: http://cr.yp.to/ftp.html
+#   * FIXME: Does not support pipelining (which is not supported by all
+#     servers anyway).  This isn't a functionality limitation, just a
+#     small performance issue.
+#   * Only has a rudimentary understanding of FTP response codes (although
+#     the full response is passed to the caller if they so choose).
+#   * Assumes that USER and PASS should always be sent
+#   * Always sets TYPE I  (binary mode)
+#   * Doesn't understand any of the weird, obscure TELNET stuff (\377...)
+#   * FIXME: Doesn't share any code with the FTPServer
+
+class ConnectionLost(FTPError):
+    pass
+
+class CommandFailed(FTPError):
+    pass
+
+class BadResponse(FTPError):
+    pass
+
+class UnexpectedResponse(FTPError):
+    pass
+
+class UnexpectedData(FTPError):
+    pass
+
+class FTPCommand:
+    def __init__(self, text=None, public=0):
+        self.text = text
+        self.deferred = defer.Deferred()
+        self.ready = 1
+        self.public = public
+        self.transferDeferred = None
+
+    def fail(self, failure):
+        if self.public:
+            self.deferred.errback(failure)
+
+
+class ProtocolWrapper(protocol.Protocol):
+    def __init__(self, original, deferred):
+        self.original = original
+        self.deferred = deferred
+    def makeConnection(self, transport):
+        self.original.makeConnection(transport)
+    def dataReceived(self, data):
+        self.original.dataReceived(data)
+    def connectionLost(self, reason):
+        self.original.connectionLost(reason)
+        # Signal that transfer has completed
+        self.deferred.callback(None)
+
+
+
+class IFinishableConsumer(interfaces.IConsumer):
+    """
+    A Consumer for producers that finish.
+
+    @since: 11.0
+    """
+
+    def finish():
+        """
+        The producer has finished producing.
+        """
+
+
+
+class SenderProtocol(protocol.Protocol):
+    implements(IFinishableConsumer)
+
+    def __init__(self):
+        # Fired upon connection
+        self.connectedDeferred = defer.Deferred()
+
+        # Fired upon disconnection
+        self.deferred = defer.Deferred()
+
+    #Protocol stuff
+    def dataReceived(self, data):
+        raise UnexpectedData(
+            "Received data from the server on a "
+            "send-only data-connection"
+        )
+
+    def makeConnection(self, transport):
+        protocol.Protocol.makeConnection(self, transport)
+        self.connectedDeferred.callback(self)
+
+    def connectionLost(self, reason):
+        if reason.check(error.ConnectionDone):
+            self.deferred.callback('connection done')
+        else:
+            self.deferred.errback(reason)
+
+    #IFinishableConsumer stuff
+    def write(self, data):
+        self.transport.write(data)
+
+    def registerProducer(self, producer, streaming):
+        """
+        Register the given producer with our transport.
+        """
+        self.transport.registerProducer(producer, streaming)
+
+    def unregisterProducer(self):
+        """
+        Unregister the previously registered producer.
+        """
+        self.transport.unregisterProducer()
+
+    def finish(self):
+        self.transport.loseConnection()
+
+
+def decodeHostPort(line):
+    """
+    Decode an FTP response specifying a host and port.
+
+    @return: a 2-tuple of (host, port).
+    """
+    abcdef = re.sub('[^0-9, ]', '', line)
+    parsed = [int(p.strip()) for p in abcdef.split(',')]
+    for x in parsed:
+        if x < 0 or x > 255:
+            raise ValueError("Out of range", line, x)
+    a, b, c, d, e, f = parsed
+    host = "%s.%s.%s.%s" % (a, b, c, d)
+    port = (int(e) << 8) + int(f)
+    return host, port
+
+def encodeHostPort(host, port):
+    numbers = host.split('.') + [str(port >> 8), str(port % 256)]
+    return ','.join(numbers)
+
+def _unwrapFirstError(failure):
+    failure.trap(defer.FirstError)
+    return failure.value.subFailure
+
+class FTPDataPortFactory(protocol.ServerFactory):
+    """
+    Factory for data connections that use the PORT command
+
+    (i.e. "active" transfers)
+    """
+    noisy = 0
+    def buildProtocol(self, addr):
+        # This is a bit hackish -- we already have a Protocol instance,
+        # so just return it instead of making a new one
+        # FIXME: Reject connections from the wrong address/port
+        #        (potential security problem)
+        self.protocol.factory = self
+        self.port.loseConnection()
+        return self.protocol
+
+
+class FTPClientBasic(basic.LineReceiver):
+    """
+    Foundations of an FTP client.
+    """
+    debug = False
+
+    def __init__(self):
+        self.actionQueue = []
+        self.greeting = None
+        self.nextDeferred = defer.Deferred().addCallback(self._cb_greeting)
+        self.nextDeferred.addErrback(self.fail)
+        self.response = []
+        self._failed = 0
+
+    def fail(self, error):
+        """
+        Give an error to any queued deferreds.
+        """
+        self._fail(error)
+
+    def _fail(self, error):
+        """
+        Errback all queued deferreds.
+        """
+        if self._failed:
+            # We're recursing; bail out here for simplicity
+            return error
+        self._failed = 1
+        if self.nextDeferred:
+            try:
+                self.nextDeferred.errback(failure.Failure(ConnectionLost('FTP connection lost', error)))
+            except defer.AlreadyCalledError:
+                pass
+        for ftpCommand in self.actionQueue:
+            ftpCommand.fail(failure.Failure(ConnectionLost('FTP connection lost', error)))
+        return error
+
+    def _cb_greeting(self, greeting):
+        self.greeting = greeting
+
+    def sendLine(self, line):
+        """
+        (Private) Sends a line, unless line is None.
+        """
+        if line is None:
+            return
+        basic.LineReceiver.sendLine(self, line)
+
+    def sendNextCommand(self):
+        """
+        (Private) Processes the next command in the queue.
+        """
+        ftpCommand = self.popCommandQueue()
+        if ftpCommand is None:
+            self.nextDeferred = None
+            return
+        if not ftpCommand.ready:
+            self.actionQueue.insert(0, ftpCommand)
+            reactor.callLater(1.0, self.sendNextCommand)
+            self.nextDeferred = None
+            return
+
+        # FIXME: this if block doesn't belong in FTPClientBasic, it belongs in
+        #        FTPClient.
+        if ftpCommand.text == 'PORT':
+            self.generatePortCommand(ftpCommand)
+
+        if self.debug:
+            log.msg('<-- %s' % ftpCommand.text)
+        self.nextDeferred = ftpCommand.deferred
+        self.sendLine(ftpCommand.text)
+
+    def queueCommand(self, ftpCommand):
+        """
+        Add an FTPCommand object to the queue.
+
+        If it's the only thing in the queue, and we are connected and we aren't
+        waiting for a response of an earlier command, the command will be sent
+        immediately.
+
+        @param ftpCommand: an L{FTPCommand}
+        """
+        self.actionQueue.append(ftpCommand)
+        if (len(self.actionQueue) == 1 and self.transport is not None and
+            self.nextDeferred is None):
+            self.sendNextCommand()
+
+    def queueStringCommand(self, command, public=1):
+        """
+        Queues a string to be issued as an FTP command
+
+        @param command: string of an FTP command to queue
+        @param public: a flag intended for internal use by FTPClient.  Don't
+            change it unless you know what you're doing.
+
+        @return: a L{Deferred} that will be called when the response to the
+            command has been received.
+        """
+        ftpCommand = FTPCommand(command, public)
+        self.queueCommand(ftpCommand)
+        return ftpCommand.deferred
+
+    def popCommandQueue(self):
+        """
+        Return the front element of the command queue, or None if empty.
+        """
+        if self.actionQueue:
+            return self.actionQueue.pop(0)
+        else:
+            return None
+
+    def queueLogin(self, username, password):
+        """
+        Login: send the username, send the password.
+
+        If the password is C{None}, the PASS command won't be sent.  Also, if
+        the response to the USER command has a response code of 230 (User logged
+        in), then PASS won't be sent either.
+        """
+        # Prepare the USER command
+        deferreds = []
+        userDeferred = self.queueStringCommand('USER ' + username, public=0)
+        deferreds.append(userDeferred)
+
+        # Prepare the PASS command (if a password is given)
+        if password is not None:
+            passwordCmd = FTPCommand('PASS ' + password, public=0)
+            self.queueCommand(passwordCmd)
+            deferreds.append(passwordCmd.deferred)
+
+            # Avoid sending PASS if the response to USER is 230.
+            # (ref: http://cr.yp.to/ftp/user.html#user)
+            def cancelPasswordIfNotNeeded(response):
+                if response[0].startswith('230'):
+                    # No password needed!
+                    self.actionQueue.remove(passwordCmd)
+                return response
+            userDeferred.addCallback(cancelPasswordIfNotNeeded)
+
+        # Error handling.
+        for deferred in deferreds:
+            # If something goes wrong, call fail
+            deferred.addErrback(self.fail)
+            # But also swallow the error, so we don't cause spurious errors
+            deferred.addErrback(lambda x: None)
+
+    def lineReceived(self, line):
+        """
+        (Private) Parses the response messages from the FTP server.
+        """
+        # Add this line to the current response
+        if self.debug:
+            log.msg('--> %s' % line)
+        self.response.append(line)
+
+        # Bail out if this isn't the last line of a response
+        # The last line of response starts with 3 digits followed by a space
+        codeIsValid = re.match(r'\d{3} ', line)
+        if not codeIsValid:
+            return
+
+        code = line[0:3]
+
+        # Ignore marks
+        if code[0] == '1':
+            return
+
+        # Check that we were expecting a response
+        if self.nextDeferred is None:
+            self.fail(UnexpectedResponse(self.response))
+            return
+
+        # Reset the response
+        response = self.response
+        self.response = []
+
+        # Look for a success or error code, and call the appropriate callback
+        if code[0] in ('2', '3'):
+            # Success
+            self.nextDeferred.callback(response)
+        elif code[0] in ('4', '5'):
+            # Failure
+            self.nextDeferred.errback(failure.Failure(CommandFailed(response)))
+        else:
+            # This shouldn't happen unless something screwed up.
+            log.msg('Server sent invalid response code %s' % (code,))
+            self.nextDeferred.errback(failure.Failure(BadResponse(response)))
+
+        # Run the next command
+        self.sendNextCommand()
+
+    def connectionLost(self, reason):
+        self._fail(reason)
+
+
+
+class _PassiveConnectionFactory(protocol.ClientFactory):
+    noisy = False
+
+    def __init__(self, protoInstance):
+        self.protoInstance = protoInstance
+
+    def buildProtocol(self, ignored):
+        self.protoInstance.factory = self
+        return self.protoInstance
+
+    def clientConnectionFailed(self, connector, reason):
+        e = FTPError('Connection Failed', reason)
+        self.protoInstance.deferred.errback(e)
+
+
+
+class FTPClient(FTPClientBasic):
+    """
+    L{FTPClient} is a client implementation of the FTP protocol which
+    exposes FTP commands as methods which return L{Deferred}s.
+
+    Each command method returns a L{Deferred} which is called back when a
+    successful response code (2xx or 3xx) is received from the server or
+    which is error backed if an error response code (4xx or 5xx) is received
+    from the server or if a protocol violation occurs.  If an error response
+    code is received, the L{Deferred} fires with a L{Failure} wrapping a
+    L{CommandFailed} instance.  The L{CommandFailed} instance is created
+    with a list of the response lines received from the server.
+
+    See U{RFC 959<http://www.ietf.org/rfc/rfc959.txt>} for error code
+    definitions.
+
+    Both active and passive transfers are supported.
+
+    @ivar passive: See description in __init__.
+    """
+    connectFactory = reactor.connectTCP
+
+    def __init__(self, username='anonymous',
+                 password='twisted at twistedmatrix.com',
+                 passive=1):
+        """
+        Constructor.
+
+        I will login as soon as I receive the welcome message from the server.
+
+        @param username: FTP username
+        @param password: FTP password
+        @param passive: flag that controls if I use active or passive data
+            connections.  You can also change this after construction by
+            assigning to C{self.passive}.
+        """
+        FTPClientBasic.__init__(self)
+        self.queueLogin(username, password)
+
+        self.passive = passive
+
+    def fail(self, error):
+        """
+        Disconnect, and also give an error to any queued deferreds.
+        """
+        self.transport.loseConnection()
+        self._fail(error)
+
+    def receiveFromConnection(self, commands, protocol):
+        """
+        Retrieves a file or listing generated by the given command,
+        feeding it to the given protocol.
+
+        @param commands: list of strings of FTP commands to execute then receive
+            the results of (e.g. C{LIST}, C{RETR})
+        @param protocol: A L{Protocol} B{instance} e.g. an
+            L{FTPFileListProtocol}, or something that can be adapted to one.
+            Typically this will be an L{IConsumer} implementation.
+
+        @return: L{Deferred}.
+        """
+        protocol = interfaces.IProtocol(protocol)
+        wrapper = ProtocolWrapper(protocol, defer.Deferred())
+        return self._openDataConnection(commands, wrapper)
+
+    def queueLogin(self, username, password):
+        """
+        Login: send the username, send the password, and
+        set retrieval mode to binary
+        """
+        FTPClientBasic.queueLogin(self, username, password)
+        d = self.queueStringCommand('TYPE I', public=0)
+        # If something goes wrong, call fail
+        d.addErrback(self.fail)
+        # But also swallow the error, so we don't cause spurious errors
+        d.addErrback(lambda x: None)
+
+    def sendToConnection(self, commands):
+        """
+        XXX
+
+        @return: A tuple of two L{Deferred}s:
+                  - L{Deferred} L{IFinishableConsumer}. You must call
+                    the C{finish} method on the IFinishableConsumer when the file
+                    is completely transferred.
+                  - L{Deferred} list of control-connection responses.
+        """
+        s = SenderProtocol()
+        r = self._openDataConnection(commands, s)
+        return (s.connectedDeferred, r)
+
+    def _openDataConnection(self, commands, protocol):
+        """
+        This method returns a DeferredList.
+        """
+        cmds = [FTPCommand(command, public=1) for command in commands]
+        cmdsDeferred = defer.DeferredList([cmd.deferred for cmd in cmds],
+                                    fireOnOneErrback=True, consumeErrors=True)
+        cmdsDeferred.addErrback(_unwrapFirstError)
+
+        if self.passive:
+            # Hack: use a mutable object to sneak a variable out of the
+            # scope of doPassive
+            _mutable = [None]
+            def doPassive(response):
+                """Connect to the port specified in the response to PASV"""
+                host, port = decodeHostPort(response[-1][4:])
+
+                f = _PassiveConnectionFactory(protocol)
+                _mutable[0] = self.connectFactory(host, port, f)
+
+            pasvCmd = FTPCommand('PASV')
+            self.queueCommand(pasvCmd)
+            pasvCmd.deferred.addCallback(doPassive).addErrback(self.fail)
+
+            results = [cmdsDeferred, pasvCmd.deferred, protocol.deferred]
+            d = defer.DeferredList(results, fireOnOneErrback=True, consumeErrors=True)
+            d.addErrback(_unwrapFirstError)
+
+            # Ensure the connection is always closed
+            def close(x, m=_mutable):
+                m[0] and m[0].disconnect()
+                return x
+            d.addBoth(close)
+
+        else:
+            # We just place a marker command in the queue, and will fill in
+            # the host and port numbers later (see generatePortCommand)
+            portCmd = FTPCommand('PORT')
+
+            # Ok, now we jump through a few hoops here.
+            # This is the problem: a transfer is not to be trusted as complete
+            # until we get both the "226 Transfer complete" message on the
+            # control connection, and the data socket is closed.  Thus, we use
+            # a DeferredList to make sure we only fire the callback at the
+            # right time.
+
+            portCmd.transferDeferred = protocol.deferred
+            portCmd.protocol = protocol
+            portCmd.deferred.addErrback(portCmd.transferDeferred.errback)
+            self.queueCommand(portCmd)
+
+            # Create dummy functions for the next callback to call.
+            # These will also be replaced with real functions in
+            # generatePortCommand.
+            portCmd.loseConnection = lambda result: result
+            portCmd.fail = lambda error: error
+
+            # Ensure that the connection always gets closed
+            cmdsDeferred.addErrback(lambda e, pc=portCmd: pc.fail(e) or e)
+
+            results = [cmdsDeferred, portCmd.deferred, portCmd.transferDeferred]
+            d = defer.DeferredList(results, fireOnOneErrback=True, consumeErrors=True)
+            d.addErrback(_unwrapFirstError)
+
+        for cmd in cmds:
+            self.queueCommand(cmd)
+        return d
+
+    def generatePortCommand(self, portCmd):
+        """
+        (Private) Generates the text of a given PORT command.
+        """
+
+        # The problem is that we don't create the listening port until we need
+        # it for various reasons, and so we have to muck about to figure out
+        # what interface and port it's listening on, and then finally we can
+        # create the text of the PORT command to send to the FTP server.
+
+        # FIXME: This method is far too ugly.
+
+        # FIXME: The best solution is probably to only create the data port
+        #        once per FTPClient, and just recycle it for each new download.
+        #        This should be ok, because we don't pipeline commands.
+
+        # Start listening on a port
+        factory = FTPDataPortFactory()
+        factory.protocol = portCmd.protocol
+        listener = reactor.listenTCP(0, factory)
+        factory.port = listener
+
+        # Ensure we close the listening port if something goes wrong
+        def listenerFail(error, listener=listener):
+            if listener.connected:
+                listener.loseConnection()
+            return error
+        portCmd.fail = listenerFail
+
+        # Construct crufty FTP magic numbers that represent host & port
+        host = self.transport.getHost().host
+        port = listener.getHost().port
+        portCmd.text = 'PORT ' + encodeHostPort(host, port)
+
+    def escapePath(self, path):
+        """
+        Returns a FTP escaped path (replace newlines with nulls).
+        """
+        # Escape newline characters
+        return path.replace('\n', '\0')
+
+    def retrieveFile(self, path, protocol, offset=0):
+        """
+        Retrieve a file from the given path
+
+        This method issues the 'RETR' FTP command.
+
+        The file is fed into the given Protocol instance.  The data connection
+        will be passive if self.passive is set.
+
+        @param path: path to file that you wish to receive.
+        @param protocol: a L{Protocol} instance.
+        @param offset: offset to start downloading from
+
+        @return: L{Deferred}
+        """
+        cmds = ['RETR ' + self.escapePath(path)]
+        if offset:
+            cmds.insert(0, ('REST ' + str(offset)))
+        return self.receiveFromConnection(cmds, protocol)
+
+    retr = retrieveFile
+
+    def storeFile(self, path, offset=0):
+        """
+        Store a file at the given path.
+
+        This method issues the 'STOR' FTP command.
+
+        @return: A tuple of two L{Deferred}s:
+                  - L{Deferred} L{IFinishableConsumer}. You must call
+                    the C{finish} method on the IFinishableConsumer when the file
+                    is completely transferred.
+                  - L{Deferred} list of control-connection responses.
+        """
+        cmds = ['STOR ' + self.escapePath(path)]
+        if offset:
+            cmds.insert(0, ('REST ' + str(offset)))
+        return self.sendToConnection(cmds)
+
+    stor = storeFile
+
+
+    def rename(self, pathFrom, pathTo):
+        """
+        Rename a file.
+
+        This method issues the I{RNFR}/I{RNTO} command sequence to rename
+        C{pathFrom} to C{pathTo}.
+
+        @param: pathFrom: the absolute path to the file to be renamed
+        @type pathFrom: C{str}
+
+        @param: pathTo: the absolute path to rename the file to.
+        @type pathTo: C{str}
+
+        @return: A L{Deferred} which fires when the rename operation has
+            succeeded or failed.  If it succeeds, the L{Deferred} is called
+            back with a two-tuple of lists.  The first list contains the
+            responses to the I{RNFR} command.  The second list contains the
+            responses to the I{RNTO} command.  If either I{RNFR} or I{RNTO}
+            fails, the L{Deferred} is errbacked with L{CommandFailed} or
+            L{BadResponse}.
+        @rtype: L{Deferred}
+
+        @since: 8.2
+        """
+        renameFrom = self.queueStringCommand('RNFR ' + self.escapePath(pathFrom))
+        renameTo = self.queueStringCommand('RNTO ' + self.escapePath(pathTo))
+
+        fromResponse = []
+
+        # Use a separate Deferred for the ultimate result so that Deferred
+        # chaining can't interfere with its result.
+        result = defer.Deferred()
+        # Bundle up all the responses
+        result.addCallback(lambda toResponse: (fromResponse, toResponse))
+
+        def ebFrom(failure):
+            # Make sure the RNTO doesn't run if the RNFR failed.
+            self.popCommandQueue()
+            result.errback(failure)
+
+        # Save the RNFR response to pass to the result Deferred later
+        renameFrom.addCallbacks(fromResponse.extend, ebFrom)
+
+        # Hook up the RNTO to the result Deferred as well
+        renameTo.chainDeferred(result)
+
+        return result
+
+
+    def list(self, path, protocol):
+        """
+        Retrieve a file listing into the given protocol instance.
+
+        This method issues the 'LIST' FTP command.
+
+        @param path: path to get a file listing for.
+        @param protocol: a L{Protocol} instance, probably a
+            L{FTPFileListProtocol} instance.  It can cope with most common file
+            listing formats.
+
+        @return: L{Deferred}
+        """
+        if path is None:
+            path = ''
+        return self.receiveFromConnection(['LIST ' + self.escapePath(path)], protocol)
+
+
+    def nlst(self, path, protocol):
+        """
+        Retrieve a short file listing into the given protocol instance.
+
+        This method issues the 'NLST' FTP command.
+
+        NLST (should) return a list of filenames, one per line.
+
+        @param path: path to get short file listing for.
+        @param protocol: a L{Protocol} instance.
+        """
+        if path is None:
+            path = ''
+        return self.receiveFromConnection(['NLST ' + self.escapePath(path)], protocol)
+
+
+    def cwd(self, path):
+        """
+        Issues the CWD (Change Working Directory) command. It's also
+        available as changeDirectory, which parses the result.
+
+        @return: a L{Deferred} that will be called when done.
+        """
+        return self.queueStringCommand('CWD ' + self.escapePath(path))
+
+
+    def changeDirectory(self, path):
+        """
+        Change the directory on the server and parse the result to determine
+        if it was successful or not.
+
+        @type path: C{str}
+        @param path: The path to which to change.
+
+        @return: a L{Deferred} which will be called back when the directory
+            change has succeeded or errbacked if an error occurrs.
+        """
+        warnings.warn(
+            "FTPClient.changeDirectory is deprecated in Twisted 8.2 and "
+            "newer.  Use FTPClient.cwd instead.",
+            category=DeprecationWarning,
+            stacklevel=2)
+
+        def cbResult(result):
+            if result[-1][:3] != '250':
+                return failure.Failure(CommandFailed(result))
+            return True
+        return self.cwd(path).addCallback(cbResult)
+
+
+    def makeDirectory(self, path):
+        """
+        Make a directory
+
+        This method issues the MKD command.
+
+        @param path: The path to the directory to create.
+        @type path: C{str}
+
+        @return: A L{Deferred} which fires when the server responds.  If the
+            directory is created, the L{Deferred} is called back with the
+            server response.  If the server response indicates the directory
+            was not created, the L{Deferred} is errbacked with a L{Failure}
+            wrapping L{CommandFailed} or L{BadResponse}.
+        @rtype: L{Deferred}
+
+        @since: 8.2
+        """
+        return self.queueStringCommand('MKD ' + self.escapePath(path))
+
+
+    def removeFile(self, path):
+        """
+        Delete a file on the server.
+
+        L{removeFile} issues a I{DELE} command to the server to remove the
+        indicated file.  Note that this command cannot remove a directory.
+
+        @param path: The path to the file to delete. May be relative to the
+            current dir.
+        @type path: C{str}
+
+        @return: A L{Deferred} which fires when the server responds.  On error,
+            it is errbacked with either L{CommandFailed} or L{BadResponse}.  On
+            success, it is called back with a list of response lines.
+        @rtype: L{Deferred}
+
+        @since: 8.2
+        """
+        return self.queueStringCommand('DELE ' + self.escapePath(path))
+
+
+    def removeDirectory(self, path):
+        """
+        Delete a directory on the server.
+
+        L{removeDirectory} issues a I{RMD} command to the server to remove the
+        indicated directory. Described in RFC959.
+
+        @param path: The path to the directory to delete. May be relative to
+            the current working directory.
+        @type path: C{str}
+
+        @return: A L{Deferred} which fires when the server responds. On error,
+            it is errbacked with either L{CommandFailed} or L{BadResponse}. On
+            success, it is called back with a list of response lines.
+        @rtype: L{Deferred}
+
+        @since: 11.1
+        """
+        return self.queueStringCommand('RMD ' + self.escapePath(path))
+
+
+    def cdup(self):
+        """
+        Issues the CDUP (Change Directory UP) command.
+
+        @return: a L{Deferred} that will be called when done.
+        """
+        return self.queueStringCommand('CDUP')
+
+
+    def pwd(self):
+        """
+        Issues the PWD (Print Working Directory) command.
+
+        The L{getDirectory} does the same job but automatically parses the
+        result.
+
+        @return: a L{Deferred} that will be called when done.  It is up to the
+            caller to interpret the response, but the L{parsePWDResponse} method
+            in this module should work.
+        """
+        return self.queueStringCommand('PWD')
+
+
+    def getDirectory(self):
+        """
+        Returns the current remote directory.
+
+        @return: a L{Deferred} that will be called back with a C{str} giving
+            the remote directory or which will errback with L{CommandFailed}
+            if an error response is returned.
+        """
+        def cbParse(result):
+            try:
+                # The only valid code is 257
+                if int(result[0].split(' ', 1)[0]) != 257:
+                    raise ValueError
+            except (IndexError, ValueError):
+                return failure.Failure(CommandFailed(result))
+            path = parsePWDResponse(result[0])
+            if path is None:
+                return failure.Failure(CommandFailed(result))
+            return path
+        return self.pwd().addCallback(cbParse)
+
+
+    def quit(self):
+        """
+        Issues the I{QUIT} command.
+
+        @return: A L{Deferred} that fires when the server acknowledges the
+            I{QUIT} command.  The transport should not be disconnected until
+            this L{Deferred} fires.
+        """
+        return self.queueStringCommand('QUIT')
+
+
+
+class FTPFileListProtocol(basic.LineReceiver):
+    """
+    Parser for standard FTP file listings
+
+    This is the evil required to match::
+
+        -rw-r--r--   1 root     other        531 Jan 29 03:26 README
+
+    If you need different evil for a wacky FTP server, you can
+    override either C{fileLinePattern} or C{parseDirectoryLine()}.
+
+    It populates the instance attribute self.files, which is a list containing
+    dicts with the following keys (examples from the above line):
+        - filetype:   e.g. 'd' for directories, or '-' for an ordinary file
+        - perms:      e.g. 'rw-r--r--'
+        - nlinks:     e.g. 1
+        - owner:      e.g. 'root'
+        - group:      e.g. 'other'
+        - size:       e.g. 531
+        - date:       e.g. 'Jan 29 03:26'
+        - filename:   e.g. 'README'
+        - linktarget: e.g. 'some/file'
+
+    Note that the 'date' value will be formatted differently depending on the
+    date.  Check U{http://cr.yp.to/ftp.html} if you really want to try to parse
+    it.
+
+    It also matches the following::
+        -rw-r--r--   1 root     other        531 Jan 29 03:26 I HAVE\ SPACE
+           - filename:   e.g. 'I HAVE SPACE'
+
+        -rw-r--r--   1 root     other        531 Jan 29 03:26 LINK -> TARGET
+           - filename:   e.g. 'LINK'
+           - linktarget: e.g. 'TARGET'
+
+        -rw-r--r--   1 root     other        531 Jan 29 03:26 N S -> L S
+           - filename:   e.g. 'N S'
+           - linktarget: e.g. 'L S'
+
+    @ivar files: list of dicts describing the files in this listing
+    """
+    fileLinePattern = re.compile(
+        r'^(?P<filetype>.)(?P<perms>.{9})\s+(?P<nlinks>\d*)\s*'
+        r'(?P<owner>\S+)\s+(?P<group>\S+)\s+(?P<size>\d+)\s+'
+        r'(?P<date>...\s+\d+\s+[\d:]+)\s+(?P<filename>.{1,}?)'
+        r'( -> (?P<linktarget>[^\r]*))?\r?$'
+    )
+    delimiter = '\n'
+
+    def __init__(self):
+        self.files = []
+
+    def lineReceived(self, line):
+        d = self.parseDirectoryLine(line)
+        if d is None:
+            self.unknownLine(line)
+        else:
+            self.addFile(d)
+
+    def parseDirectoryLine(self, line):
+        """
+        Return a dictionary of fields, or None if line cannot be parsed.
+
+        @param line: line of text expected to contain a directory entry
+        @type line: str
+
+        @return: dict
+        """
+        match = self.fileLinePattern.match(line)
+        if match is None:
+            return None
+        else:
+            d = match.groupdict()
+            d['filename'] = d['filename'].replace(r'\ ', ' ')
+            d['nlinks'] = int(d['nlinks'])
+            d['size'] = int(d['size'])
+            if d['linktarget']:
+                d['linktarget'] = d['linktarget'].replace(r'\ ', ' ')
+            return d
+
+    def addFile(self, info):
+        """
+        Append file information dictionary to the list of known files.
+
+        Subclasses can override or extend this method to handle file
+        information differently without affecting the parsing of data
+        from the server.
+
+        @param info: dictionary containing the parsed representation
+                     of the file information
+        @type info: dict
+        """
+        self.files.append(info)
+
+    def unknownLine(self, line):
+        """
+        Deal with received lines which could not be parsed as file
+        information.
+
+        Subclasses can override this to perform any special processing
+        needed.
+
+        @param line: unparsable line as received
+        @type line: str
+        """
+        pass
+
+def parsePWDResponse(response):
+    """
+    Returns the path from a response to a PWD command.
+
+    Responses typically look like::
+
+        257 "/home/andrew" is current directory.
+
+    For this example, I will return C{'/home/andrew'}.
+
+    If I can't find the path, I return C{None}.
+    """
+    match = re.search('"(.*)"', response)
+    if match:
+        return match.groups()[0]
+    else:
+        return None
diff --git a/ThirdParty/Twisted/twisted/protocols/gps/__init__.py b/ThirdParty/Twisted/twisted/protocols/gps/__init__.py
new file mode 100644
index 0000000..278648c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/gps/__init__.py
@@ -0,0 +1 @@
+"""Global Positioning System protocols."""
diff --git a/ThirdParty/Twisted/twisted/protocols/gps/nmea.py b/ThirdParty/Twisted/twisted/protocols/gps/nmea.py
new file mode 100644
index 0000000..71d37ea
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/gps/nmea.py
@@ -0,0 +1,209 @@
+# -*- test-case-name: twisted.test.test_nmea -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""NMEA 0183 implementation
+
+Maintainer: Bob Ippolito
+
+The following NMEA 0183 sentences are currently understood::
+    GPGGA (fix)
+    GPGLL (position)
+    GPRMC (position and time)
+    GPGSA (active satellites)
+ 
+The following NMEA 0183 sentences require implementation::
+    None really, the others aren't generally useful or implemented in most devices anyhow
+
+Other desired features::
+    - A NMEA 0183 producer to emulate GPS devices (?)
+"""
+
+import operator
+from twisted.protocols import basic
+from twisted.python.compat import reduce
+
+POSFIX_INVALID, POSFIX_SPS, POSFIX_DGPS, POSFIX_PPS = 0, 1, 2, 3
+MODE_AUTO, MODE_FORCED = 'A', 'M'
+MODE_NOFIX, MODE_2D, MODE_3D = 1, 2, 3
+
+class InvalidSentence(Exception):
+    pass
+
+class InvalidChecksum(Exception):
+    pass
+
+class NMEAReceiver(basic.LineReceiver):
+    """This parses most common NMEA-0183 messages, presumably from a serial GPS device at 4800 bps
+    """
+    delimiter = '\r\n'
+    dispatch = {
+        'GPGGA': 'fix',
+        'GPGLL': 'position',
+        'GPGSA': 'activesatellites',
+        'GPRMC': 'positiontime',
+        'GPGSV': 'viewsatellites',    # not implemented
+        'GPVTG': 'course',            # not implemented
+        'GPALM': 'almanac',           # not implemented
+        'GPGRS': 'range',             # not implemented
+        'GPGST': 'noise',             # not implemented
+        'GPMSS': 'beacon',            # not implemented
+        'GPZDA': 'time',              # not implemented
+    }
+    # generally you may miss the beginning of the first message
+    ignore_invalid_sentence = 1
+    # checksums shouldn't be invalid
+    ignore_checksum_mismatch = 0
+    # ignore unknown sentence types
+    ignore_unknown_sentencetypes = 0
+    # do we want to even bother checking to see if it's from the 20th century?
+    convert_dates_before_y2k = 1
+
+    def lineReceived(self, line):
+        if not line.startswith('$'):
+            if self.ignore_invalid_sentence:
+                return
+            raise InvalidSentence("%r does not begin with $" % (line,))
+        # message is everything between $ and *, checksum is xor of all ASCII values of the message
+        strmessage, checksum = line[1:].strip().split('*')
+        message = strmessage.split(',')
+        sentencetype, message = message[0], message[1:]
+        dispatch = self.dispatch.get(sentencetype, None)
+        if (not dispatch) and (not self.ignore_unknown_sentencetypes):
+            raise InvalidSentence("sentencetype %r" % (sentencetype,))
+        if not self.ignore_checksum_mismatch:
+            checksum, calculated_checksum = int(checksum, 16), reduce(operator.xor, map(ord, strmessage))
+            if checksum != calculated_checksum:
+                raise InvalidChecksum("Given 0x%02X != 0x%02X" % (checksum, calculated_checksum))
+        handler = getattr(self, "handle_%s" % dispatch, None)
+        decoder = getattr(self, "decode_%s" % dispatch, None)
+        if not (dispatch and handler and decoder):
+            # missing dispatch, handler, or decoder
+            return
+        # return handler(*decoder(*message))
+        try:
+            decoded = decoder(*message)
+        except Exception, e:
+            raise InvalidSentence("%r is not a valid %s (%s) sentence" % (line, sentencetype, dispatch))
+        return handler(*decoded)
+
+    def decode_position(self, latitude, ns, longitude, ew, utc, status):
+        latitude, longitude = self._decode_latlon(latitude, ns, longitude, ew)
+        utc = self._decode_utc(utc)
+        if status == 'A':
+            status = 1
+        else:
+            status = 0
+        return (
+            latitude,
+            longitude,
+            utc,
+            status,
+        )
+
+    def decode_positiontime(self, utc, status, latitude, ns, longitude, ew, speed, course, utcdate, magvar, magdir):
+        utc = self._decode_utc(utc)
+        latitude, longitude = self._decode_latlon(latitude, ns, longitude, ew)
+        if speed != '':
+            speed = float(speed)
+        else:
+            speed = None
+        if course != '':
+            course = float(course)
+        else:
+            course = None
+        utcdate = 2000+int(utcdate[4:6]), int(utcdate[2:4]), int(utcdate[0:2])
+        if self.convert_dates_before_y2k and utcdate[0] > 2073:
+            # GPS was invented by the US DoD in 1973, but NMEA uses 2 digit year.
+            # Highly unlikely that we'll be using NMEA or this twisted module in 70 years,
+            # but remotely possible that you'll be using it to play back data from the 20th century.
+            utcdate = (utcdate[0] - 100, utcdate[1], utcdate[2])
+        if magvar != '':
+            magvar = float(magvar)
+        if magdir == 'W':
+            magvar = -magvar
+        else:
+            magvar = None
+        return (
+            latitude,
+            longitude,
+            speed,
+            course,
+            # UTC seconds past utcdate
+            utc,
+            # UTC (year, month, day)
+            utcdate,
+            # None or magnetic variation in degrees (west is negative)
+            magvar,
+        )
+
+    def _decode_utc(self, utc):
+        utc_hh, utc_mm, utc_ss = map(float, (utc[:2], utc[2:4], utc[4:]))
+        return utc_hh * 3600.0 + utc_mm * 60.0 + utc_ss
+
+    def _decode_latlon(self, latitude, ns, longitude, ew):
+        latitude = float(latitude[:2]) + float(latitude[2:])/60.0
+        if ns == 'S':
+            latitude = -latitude
+        longitude = float(longitude[:3]) + float(longitude[3:])/60.0
+        if ew == 'W':
+            longitude = -longitude
+        return (latitude, longitude)
+
+    def decode_activesatellites(self, mode1, mode2, *args):
+        satellites, (pdop, hdop, vdop) = args[:12], map(float, args[12:])
+        satlist = []
+        for n in satellites:
+            if n:
+                satlist.append(int(n))
+            else:
+                satlist.append(None)
+        mode = (mode1, int(mode2))
+        return (
+            # satellite list by channel
+            tuple(satlist),
+            # (MODE_AUTO/MODE_FORCED, MODE_NOFIX/MODE_2DFIX/MODE_3DFIX)
+            mode,
+            # position dilution of precision
+            pdop,
+            # horizontal dilution of precision
+            hdop,
+            # vertical dilution of precision
+            vdop,
+        )
+    
+    def decode_fix(self, utc, latitude, ns, longitude, ew, posfix, satellites, hdop, altitude, altitude_units, geoid_separation, geoid_separation_units, dgps_age, dgps_station_id):
+        latitude, longitude = self._decode_latlon(latitude, ns, longitude, ew)
+        utc = self._decode_utc(utc)
+        posfix = int(posfix)
+        satellites = int(satellites)
+        hdop = float(hdop)
+        altitude = (float(altitude), altitude_units)
+        if geoid_separation != '':
+            geoid = (float(geoid_separation), geoid_separation_units)
+        else:
+            geoid = None
+        if dgps_age != '':
+            dgps = (float(dgps_age), dgps_station_id)
+        else:
+            dgps = None
+        return (
+            # seconds since 00:00 UTC
+            utc,                 
+            # latitude (degrees)
+            latitude,       
+            # longitude (degrees)
+            longitude,     
+            # position fix status (POSFIX_INVALID, POSFIX_SPS, POSFIX_DGPS, POSFIX_PPS)
+            posfix,           
+            # number of satellites used for fix 0 <= satellites <= 12 
+            satellites,   
+            # horizontal dilution of precision
+            hdop,               
+            # None or (altitude according to WGS-84 ellipsoid, units (typically 'M' for meters)) 
+            altitude,
+            # None or (geoid separation according to WGS-84 ellipsoid, units (typically 'M' for meters))
+            geoid,
+            # (age of dgps data in seconds, dgps station id)
+            dgps,
+        )
diff --git a/ThirdParty/Twisted/twisted/protocols/gps/rockwell.py b/ThirdParty/Twisted/twisted/protocols/gps/rockwell.py
new file mode 100644
index 0000000..7c1d2ad
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/gps/rockwell.py
@@ -0,0 +1,268 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+ 
+
+"""Rockwell Semiconductor Zodiac Serial Protocol
+Coded from official protocol specs (Order No. GPS-25, 09/24/1996, Revision 11)
+
+Maintainer: Bob Ippolito
+
+The following Rockwell Zodiac messages are currently understood::
+    EARTHA\\r\\n (a hack to "turn on" a DeLorme Earthmate)
+    1000 (Geodesic Position Status Output)
+    1002 (Channel Summary)
+    1003 (Visible Satellites)
+    1011 (Receiver ID)
+
+The following Rockwell Zodiac messages require implementation::
+    None really, the others aren't quite so useful and require bidirectional communication w/ the device
+
+Other desired features::
+    - Compatability with the DeLorme Tripmate and other devices with this chipset (?)
+"""
+
+import struct, operator, math
+from twisted.internet import protocol
+from twisted.python import log
+
+DEBUG = 1
+
+class ZodiacParseError(ValueError):
+  pass
+
+class Zodiac(protocol.Protocol):
+  dispatch = {
+    # Output Messages (* means they get sent by the receiver by default periodically)
+    1000: 'fix',          # *Geodesic Position Status Output
+    1001: 'ecef',         # ECEF Position Status Output
+    1002: 'channels',     # *Channel Summary
+    1003: 'satellites',   # *Visible Satellites
+    1005: 'dgps',         # Differential GPS Status
+    1007: 'channelmeas',  # Channel Measurement
+    1011: 'id',           # *Receiver ID
+    1012: 'usersettings', # User-Settings Output
+    1100: 'testresults',  # Built-In Test Results
+    1102: 'meastimemark', # Measurement Time Mark
+    1108: 'utctimemark',  # UTC Time Mark Pulse Output
+    1130: 'serial',       # Serial Port Communication Parameters In Use
+    1135: 'eepromupdate', # EEPROM Update
+    1136: 'eepromstatus', # EEPROM Status
+  }
+  # these aren't used for anything yet, just sitting here for reference
+  messages = {
+    # Input Messages
+    'fix':      1200,     # Geodesic Position and Velocity Initialization
+    'udatum':   1210,     # User-Defined Datum Definition
+    'mdatum':   1211,     # Map Datum Select
+    'smask':    1212,     # Satellite Elevation Mask Control
+    'sselect':  1213,     # Satellite Candidate Select
+    'dgpsc':    1214,     # Differential GPS Control
+    'startc':   1216,     # Cold Start Control
+    'svalid':   1217,     # Solution Validity Control
+    'antenna':  1218,     # Antenna Type Select
+    'altinput': 1219,     # User-Entered Altitude Input
+    'appctl':   1220,     # Application Platform Control
+    'navcfg':   1221,     # Nav Configuration
+    'test':     1300,     # Perform Built-In Test Command
+    'restart':  1303,     # Restart Command
+    'serial':   1330,     # Serial Port Communications Parameters
+    'msgctl':   1331,     # Message Protocol Control
+    'dgpsd':    1351,     # Raw DGPS RTCM SC-104 Data
+  }  
+  MAX_LENGTH = 296
+  allow_earthmate_hack = 1
+  recvd = ""
+  
+  def dataReceived(self, recd):
+    self.recvd = self.recvd + recd
+    while len(self.recvd) >= 10:
+
+      # hack for DeLorme EarthMate
+      if self.recvd[:8] == 'EARTHA\r\n':
+        if self.allow_earthmate_hack:
+          self.allow_earthmate_hack = 0
+          self.transport.write('EARTHA\r\n')
+        self.recvd = self.recvd[8:]
+        continue
+      
+      if self.recvd[0:2] != '\xFF\x81':
+        if DEBUG:
+          raise ZodiacParseError('Invalid Sync %r' % self.recvd)
+        else:
+          raise ZodiacParseError
+      sync, msg_id, length, acknak, checksum = struct.unpack('<HHHHh', self.recvd[:10])
+      
+      # verify checksum
+      cksum = -(reduce(operator.add, (sync, msg_id, length, acknak)) & 0xFFFF)
+      cksum, = struct.unpack('<h', struct.pack('<h', cksum))
+      if cksum != checksum:
+        if DEBUG:
+          raise ZodiacParseError('Invalid Header Checksum %r != %r %r' % (checksum, cksum, self.recvd[:8]))
+        else:
+          raise ZodiacParseError
+      
+      # length was in words, now it's bytes
+      length = length * 2
+
+      # do we need more data ?
+      neededBytes = 10
+      if length:
+        neededBytes += length + 2
+      if len(self.recvd) < neededBytes:
+        break
+      
+      if neededBytes > self.MAX_LENGTH:
+        raise ZodiacParseError("Invalid Header??")
+
+      # empty messages pass empty strings
+      message = ''
+
+      # does this message have data ?
+      if length:
+        message, checksum = self.recvd[10:10+length], struct.unpack('<h', self.recvd[10+length:neededBytes])[0]
+        cksum = 0x10000 - (reduce(operator.add, struct.unpack('<%dH' % (length/2), message)) & 0xFFFF)
+        cksum, = struct.unpack('<h', struct.pack('<h', cksum))
+        if cksum != checksum:
+          if DEBUG:
+            log.dmsg('msg_id = %r length = %r' % (msg_id, length), debug=True)
+            raise ZodiacParseError('Invalid Data Checksum %r != %r %r' % (checksum, cksum, message))
+          else:
+            raise ZodiacParseError
+      
+      # discard used buffer, dispatch message
+      self.recvd = self.recvd[neededBytes:]
+      self.receivedMessage(msg_id, message, acknak)
+  
+  def receivedMessage(self, msg_id, message, acknak):
+    dispatch = self.dispatch.get(msg_id, None)
+    if not dispatch:
+      raise ZodiacParseError('Unknown msg_id = %r' % msg_id)
+    handler = getattr(self, 'handle_%s' % dispatch, None)
+    decoder = getattr(self, 'decode_%s' % dispatch, None)
+    if not (handler and decoder):
+      # missing handler or decoder
+      #if DEBUG:
+      #  log.msg('MISSING HANDLER/DECODER PAIR FOR: %r' % (dispatch,), debug=True)
+      return
+    decoded = decoder(message)
+    return handler(*decoded)
+  
+  def decode_fix(self, message):
+    assert len(message) == 98, "Geodesic Position Status Output should be 55 words total (98 byte message)"
+    (ticks, msgseq, satseq, navstatus, navtype, nmeasure, polar, gpswk, gpses, gpsns, utcdy, utcmo, utcyr, utchr, utcmn, utcsc, utcns, latitude, longitude, height, geoidalsep, speed, course, magvar, climb, mapdatum, exhposerr, exvposerr, extimeerr, exphvelerr, clkbias, clkbiasdev, clkdrift, clkdriftdev) = struct.unpack('<LhhHHHHHLLHHHHHHLlllhLHhhHLLLHllll', message)
+
+    # there's a lot of shit in here.. 
+    # I'll just snag the important stuff and spit it out like my NMEA decoder
+    utc = (utchr * 3600.0) + (utcmn * 60.0) + utcsc + (float(utcns) * 0.000000001)
+    
+    log.msg('utchr, utcmn, utcsc, utcns = ' + repr((utchr, utcmn, utcsc, utcns)), debug=True)
+    
+    latitude = float(latitude)   * 0.00000180 / math.pi
+    longitude = float(longitude) * 0.00000180 / math.pi
+    posfix = not (navstatus & 0x001c)
+    satellites = nmeasure
+    hdop = float(exhposerr) * 0.01
+    altitude = float(height) * 0.01, 'M'
+    geoid = float(geoidalsep) * 0.01, 'M'
+    dgps = None
+    return (
+      # seconds since 00:00 UTC
+      utc,                 
+      # latitude (degrees)
+      latitude,
+      # longitude (degrees)
+      longitude,
+      # position fix status (invalid = False, valid = True)
+      posfix,
+      # number of satellites [measurements] used for fix 0 <= satellites <= 12 
+      satellites,
+      # horizontal dilution of precision
+      hdop,
+      # (altitude according to WGS-84 ellipsoid, units (always 'M' for meters)) 
+      altitude,
+      # (geoid separation according to WGS-84 ellipsoid, units (always 'M' for meters))
+      geoid,
+      # None, for compatability w/ NMEA code
+      dgps,
+    )
+
+  def decode_id(self, message):
+    assert len(message) == 106, "Receiver ID Message should be 59 words total (106 byte message)"
+    ticks, msgseq, channels, software_version, software_date, options_list, reserved = struct.unpack('<Lh20s20s20s20s20s', message)
+    channels, software_version, software_date, options_list = map(lambda s: s.split('\0')[0], (channels, software_version, software_date, options_list))
+    software_version = float(software_version)
+    channels = int(channels) # 0-12 .. but ALWAYS 12, so we ignore.
+    options_list = int(options_list[:4], 16) # only two bitflags, others are reserved
+    minimize_rom = (options_list & 0x01) > 0
+    minimize_ram = (options_list & 0x02) > 0
+    # (version info), (options info)
+    return ((software_version, software_date), (minimize_rom, minimize_ram))
+
+  def decode_channels(self, message):
+    assert len(message) == 90, "Channel Summary Message should be 51 words total (90 byte message)"
+    ticks, msgseq, satseq, gpswk, gpsws, gpsns = struct.unpack('<LhhHLL', message[:18])
+    channels = []
+    message = message[18:]
+    for i in range(12):
+      flags, prn, cno = struct.unpack('<HHH', message[6 * i:6 * (i + 1)])
+      # measurement used, ephemeris available, measurement valid, dgps corrections available
+      flags = (flags & 0x01, flags & 0x02, flags & 0x04, flags & 0x08)
+      channels.append((flags, prn, cno))
+    # ((flags, satellite PRN, C/No in dbHz)) for 12 channels
+    # satellite message sequence number
+    # gps week number, gps seconds in week (??), gps nanoseconds from Epoch
+    return (tuple(channels),) #, satseq, (gpswk, gpsws, gpsns))
+
+  def decode_satellites(self, message):
+    assert len(message) == 90, "Visible Satellites Message should be 51 words total (90 byte message)"
+    ticks, msgseq, gdop, pdop, hdop, vdop, tdop, numsatellites = struct.unpack('<LhhhhhhH', message[:18])
+    gdop, pdop, hdop, vdop, tdop = map(lambda n: float(n) * 0.01, (gdop, pdop, hdop, vdop, tdop))
+    satellites = []
+    message = message[18:]
+    for i in range(numsatellites):
+      prn, azi, elev = struct.unpack('<Hhh', message[6 * i:6 * (i + 1)])
+      azi, elev = map(lambda n: (float(n) * 0.0180 / math.pi), (azi, elev))
+      satellites.push((prn, azi, elev))
+    # ((PRN [0, 32], azimuth +=[0.0, 180.0] deg, elevation +-[0.0, 90.0] deg)) satellite info (0-12)
+    # (geometric, position, horizontal, vertical, time) dilution of precision 
+    return (tuple(satellites), (gdop, pdop, hdop, vdop, tdop))
+
+  def decode_dgps(self, message):
+    assert len(message) == 38, "Differential GPS Status Message should be 25 words total (38 byte message)"
+    raise NotImplementedError
+
+  def decode_ecef(self, message):
+    assert len(message) == 96, "ECEF Position Status Output Message should be 54 words total (96 byte message)"
+    raise NotImplementedError
+
+  def decode_channelmeas(self, message):
+    assert len(message) == 296, "Channel Measurement Message should be 154 words total (296 byte message)"
+    raise NotImplementedError
+
+  def decode_usersettings(self, message):
+    assert len(message) == 32, "User-Settings Output Message should be 22 words total (32 byte message)"
+    raise NotImplementedError
+
+  def decode_testresults(self, message):
+    assert len(message) == 28, "Built-In Test Results Message should be 20 words total (28 byte message)"
+    raise NotImplementedError
+
+  def decode_meastimemark(self, message):
+    assert len(message) == 494, "Measurement Time Mark Message should be 253 words total (494 byte message)"
+    raise NotImplementedError
+
+  def decode_utctimemark(self, message):
+    assert len(message) == 28, "UTC Time Mark Pulse Output Message should be 20 words total (28 byte message)"
+    raise NotImplementedError
+
+  def decode_serial(self, message):
+    assert len(message) == 30, "Serial Port Communication Paramaters In Use Message should be 21 words total (30 byte message)"
+    raise NotImplementedError
+
+  def decode_eepromupdate(self, message):
+    assert len(message) == 8, "EEPROM Update Message should be 10 words total (8 byte message)"
+    raise NotImplementedError
+
+  def decode_eepromstatus(self, message):
+    assert len(message) == 24, "EEPROM Status Message should be 18 words total (24 byte message)"
+    raise NotImplementedError
diff --git a/ThirdParty/Twisted/twisted/protocols/htb.py b/ThirdParty/Twisted/twisted/protocols/htb.py
new file mode 100644
index 0000000..10008cf
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/htb.py
@@ -0,0 +1,297 @@
+# -*- test-case-name: twisted.test.test_htb -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Hierarchical Token Bucket traffic shaping.
+
+Patterned after U{Martin Devera's Hierarchical Token Bucket traffic
+shaper for the Linux kernel<http://luxik.cdi.cz/~devik/qos/htb/>}.
+
+ at seealso: U{HTB Linux queuing discipline manual - user guide
+  <http://luxik.cdi.cz/~devik/qos/htb/manual/userg.htm>}
+ at seealso: U{Token Bucket Filter in Linux Advanced Routing & Traffic Control
+    HOWTO<http://lartc.org/howto/lartc.qdisc.classless.html#AEN682>}
+"""
+
+
+# TODO: Investigate whether we should be using os.times()[-1] instead of
+# time.time.  time.time, it has been pointed out, can go backwards.  Is
+# the same true of os.times?
+from time import time
+from zope.interface import implements, Interface
+
+from twisted.protocols import pcp
+
+
+class Bucket:
+    """
+    Implementation of a Token bucket.
+
+    A bucket can hold a certain number of tokens and it drains over time.
+
+    @cvar maxburst: The maximum number of tokens that the bucket can
+        hold at any given time. If this is C{None}, the bucket has
+        an infinite size.
+    @type maxburst: C{int}
+    @cvar rate: The rate at which the bucket drains, in number
+        of tokens per second. If the rate is C{None}, the bucket
+        drains instantaneously.
+    @type rate: C{int}
+    """
+
+    maxburst = None
+    rate = None
+
+    _refcount = 0
+
+    def __init__(self, parentBucket=None):
+        """
+        Create a L{Bucket} that may have a parent L{Bucket}.
+
+        @param parentBucket: If a parent Bucket is specified,
+            all L{add} and L{drip} operations on this L{Bucket}
+            will be applied on the parent L{Bucket} as well.
+        @type parentBucket: L{Bucket}
+        """
+        self.content = 0
+        self.parentBucket = parentBucket
+        self.lastDrip = time()
+
+
+    def add(self, amount):
+        """
+        Adds tokens to the L{Bucket} and its C{parentBucket}.
+
+        This will add as many of the C{amount} tokens as will fit into both
+        this L{Bucket} and its C{parentBucket}.
+
+        @param amount: The number of tokens to try to add.
+        @type amount: C{int}
+
+        @returns: The number of tokens that actually fit.
+        @returntype: C{int}
+        """
+        self.drip()
+        if self.maxburst is None:
+            allowable = amount
+        else:
+            allowable = min(amount, self.maxburst - self.content)
+
+        if self.parentBucket is not None:
+            allowable = self.parentBucket.add(allowable)
+        self.content += allowable
+        return allowable
+
+
+    def drip(self):
+        """
+        Let some of the bucket drain.
+
+        The L{Bucket} drains at the rate specified by the class
+        variable C{rate}.
+
+        @returns: C{True} if the bucket is empty after this drip.
+        @returntype: C{bool}
+        """
+        if self.parentBucket is not None:
+            self.parentBucket.drip()
+
+        if self.rate is None:
+            self.content = 0
+        else:
+            now = time()
+            deltaTime = now - self.lastDrip
+            deltaTokens = deltaTime * self.rate
+            self.content = max(0, self.content - deltaTokens)
+            self.lastDrip = now
+        return self.content == 0
+
+
+class IBucketFilter(Interface):
+    def getBucketFor(*somethings, **some_kw):
+        """
+        Return a L{Bucket} corresponding to the provided parameters.
+
+        @returntype: L{Bucket}
+        """
+
+class HierarchicalBucketFilter:
+    """
+    Filter things into buckets that can be nested.
+
+    @cvar bucketFactory: Class of buckets to make.
+    @type bucketFactory: L{Bucket}
+    @cvar sweepInterval: Seconds between sweeping out the bucket cache.
+    @type sweepInterval: C{int}
+    """
+
+    implements(IBucketFilter)
+
+    bucketFactory = Bucket
+    sweepInterval = None
+
+    def __init__(self, parentFilter=None):
+        self.buckets = {}
+        self.parentFilter = parentFilter
+        self.lastSweep = time()
+
+    def getBucketFor(self, *a, **kw):
+        """
+        Find or create a L{Bucket} corresponding to the provided parameters.
+
+        Any parameters are passed on to L{getBucketKey}, from them it
+        decides which bucket you get.
+
+        @returntype: L{Bucket}
+        """
+        if ((self.sweepInterval is not None)
+            and ((time() - self.lastSweep) > self.sweepInterval)):
+            self.sweep()
+
+        if self.parentFilter:
+            parentBucket = self.parentFilter.getBucketFor(self, *a, **kw)
+        else:
+            parentBucket = None
+
+        key = self.getBucketKey(*a, **kw)
+        bucket = self.buckets.get(key)
+        if bucket is None:
+            bucket = self.bucketFactory(parentBucket)
+            self.buckets[key] = bucket
+        return bucket
+
+    def getBucketKey(self, *a, **kw):
+        """
+        Construct a key based on the input parameters to choose a L{Bucket}.
+
+        The default implementation returns the same key for all
+        arguments. Override this method to provide L{Bucket} selection.
+
+        @returns: Something to be used as a key in the bucket cache.
+        """
+        return None
+
+    def sweep(self):
+        """
+        Remove empty buckets.
+        """
+        for key, bucket in self.buckets.items():
+            bucket_is_empty = bucket.drip()
+            if (bucket._refcount == 0) and bucket_is_empty:
+                del self.buckets[key]
+
+        self.lastSweep = time()
+
+
+class FilterByHost(HierarchicalBucketFilter):
+    """
+    A Hierarchical Bucket filter with a L{Bucket} for each host.
+    """
+    sweepInterval = 60 * 20
+
+    def getBucketKey(self, transport):
+        return transport.getPeer()[1]
+
+
+class FilterByServer(HierarchicalBucketFilter):
+    """
+    A Hierarchical Bucket filter with a L{Bucket} for each service.
+    """
+    sweepInterval = None
+
+    def getBucketKey(self, transport):
+        return transport.getHost()[2]
+
+
+class ShapedConsumer(pcp.ProducerConsumerProxy):
+    """
+    Wraps a C{Consumer} and shapes the rate at which it receives data.
+    """
+    # Providing a Pull interface means I don't have to try to schedule
+    # traffic with callLaters.
+    iAmStreaming = False
+
+    def __init__(self, consumer, bucket):
+        pcp.ProducerConsumerProxy.__init__(self, consumer)
+        self.bucket = bucket
+        self.bucket._refcount += 1
+
+    def _writeSomeData(self, data):
+        # In practice, this actually results in obscene amounts of
+        # overhead, as a result of generating lots and lots of packets
+        # with twelve-byte payloads.  We may need to do a version of
+        # this with scheduled writes after all.
+        amount = self.bucket.add(len(data))
+        return pcp.ProducerConsumerProxy._writeSomeData(self, data[:amount])
+
+    def stopProducing(self):
+        pcp.ProducerConsumerProxy.stopProducing(self)
+        self.bucket._refcount -= 1
+
+
+class ShapedTransport(ShapedConsumer):
+    """
+    Wraps a C{Transport} and shapes the rate at which it receives data.
+
+    This is a L{ShapedConsumer} with a little bit of magic to provide for
+    the case where the consumer it wraps is also a C{Transport} and people
+    will be attempting to access attributes this does not proxy as a
+    C{Consumer} (e.g. C{loseConnection}).
+    """
+    # Ugh.  We only wanted to filter IConsumer, not ITransport.
+
+    iAmStreaming = False
+    def __getattr__(self, name):
+        # Because people will be doing things like .getPeer and
+        # .loseConnection on me.
+        return getattr(self.consumer, name)
+
+
+class ShapedProtocolFactory:
+    """
+    Dispense C{Protocols} with traffic shaping on their transports.
+
+    Usage::
+
+        myserver = SomeFactory()
+        myserver.protocol = ShapedProtocolFactory(myserver.protocol,
+                                                  bucketFilter)
+
+    Where C{SomeServerFactory} is a L{twisted.internet.protocol.Factory}, and
+    C{bucketFilter} is an instance of L{HierarchicalBucketFilter}.
+    """
+    def __init__(self, protoClass, bucketFilter):
+        """
+        Tell me what to wrap and where to get buckets.
+
+        @param protoClass: The class of C{Protocol} this will generate
+          wrapped instances of.
+        @type protoClass: L{Protocol<twisted.internet.interfaces.IProtocol>}
+          class
+        @param bucketFilter: The filter which will determine how
+          traffic is shaped.
+        @type bucketFilter: L{HierarchicalBucketFilter}.
+        """
+        # More precisely, protoClass can be any callable that will return
+        # instances of something that implements IProtocol.
+        self.protocol = protoClass
+        self.bucketFilter = bucketFilter
+
+    def __call__(self, *a, **kw):
+        """
+        Make a C{Protocol} instance with a shaped transport.
+
+        Any parameters will be passed on to the protocol's initializer.
+
+        @returns: A C{Protocol} instance with a L{ShapedTransport}.
+        """
+        proto = self.protocol(*a, **kw)
+        origMakeConnection = proto.makeConnection
+        def makeConnection(transport):
+            bucket = self.bucketFilter.getBucketFor(transport)
+            shapedTransport = ShapedTransport(transport, bucket)
+            return origMakeConnection(shapedTransport)
+        proto.makeConnection = makeConnection
+        return proto
diff --git a/ThirdParty/Twisted/twisted/protocols/ident.py b/ThirdParty/Twisted/twisted/protocols/ident.py
new file mode 100644
index 0000000..985322d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/ident.py
@@ -0,0 +1,231 @@
+# -*- test-case-name: twisted.test.test_ident -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Ident protocol implementation.
+"""
+
+import struct
+
+from twisted.internet import defer
+from twisted.protocols import basic
+from twisted.python import log, failure
+
+_MIN_PORT = 1
+_MAX_PORT = 2 ** 16 - 1
+
+class IdentError(Exception):
+    """
+    Can't determine connection owner; reason unknown.
+    """
+
+    identDescription = 'UNKNOWN-ERROR'
+
+    def __str__(self):
+        return self.identDescription
+
+
+class NoUser(IdentError):
+    """
+    The connection specified by the port pair is not currently in use or
+    currently not owned by an identifiable entity.
+    """
+    identDescription = 'NO-USER'
+
+
+class InvalidPort(IdentError):
+    """
+    Either the local or foreign port was improperly specified. This should
+    be returned if either or both of the port ids were out of range (TCP
+    port numbers are from 1-65535), negative integers, reals or in any
+    fashion not recognized as a non-negative integer.
+    """
+    identDescription = 'INVALID-PORT'
+
+
+class HiddenUser(IdentError):
+    """
+    The server was able to identify the user of this port, but the
+    information was not returned at the request of the user.
+    """
+    identDescription = 'HIDDEN-USER'
+
+
+class IdentServer(basic.LineOnlyReceiver):
+    """
+    The Identification Protocol (a.k.a., "ident", a.k.a., "the Ident
+    Protocol") provides a means to determine the identity of a user of a
+    particular TCP connection. Given a TCP port number pair, it returns a
+    character string which identifies the owner of that connection on the
+    server's system.
+
+    Server authors should subclass this class and override the lookup method.
+    The default implementation returns an UNKNOWN-ERROR response for every
+    query.
+    """
+
+    def lineReceived(self, line):
+        parts = line.split(',')
+        if len(parts) != 2:
+            self.invalidQuery()
+        else:
+            try:
+                portOnServer, portOnClient = map(int, parts)
+            except ValueError:
+                self.invalidQuery()
+            else:
+                if _MIN_PORT <= portOnServer <= _MAX_PORT and _MIN_PORT <= portOnClient <= _MAX_PORT:
+                    self.validQuery(portOnServer, portOnClient)
+                else:
+                    self._ebLookup(failure.Failure(InvalidPort()), portOnServer, portOnClient)
+
+    def invalidQuery(self):
+        self.transport.loseConnection()
+
+
+    def validQuery(self, portOnServer, portOnClient):
+        """
+        Called when a valid query is received to look up and deliver the
+        response.
+
+        @param portOnServer: The server port from the query.
+        @param portOnClient: The client port from the query.
+        """
+        serverAddr = self.transport.getHost().host, portOnServer
+        clientAddr = self.transport.getPeer().host, portOnClient
+        defer.maybeDeferred(self.lookup, serverAddr, clientAddr
+            ).addCallback(self._cbLookup, portOnServer, portOnClient
+            ).addErrback(self._ebLookup, portOnServer, portOnClient
+            )
+
+
+    def _cbLookup(self, (sysName, userId), sport, cport):
+        self.sendLine('%d, %d : USERID : %s : %s' % (sport, cport, sysName, userId))
+
+    def _ebLookup(self, failure, sport, cport):
+        if failure.check(IdentError):
+            self.sendLine('%d, %d : ERROR : %s' % (sport, cport, failure.value))
+        else:
+            log.err(failure)
+            self.sendLine('%d, %d : ERROR : %s' % (sport, cport, IdentError(failure.value)))
+
+    def lookup(self, serverAddress, clientAddress):
+        """Lookup user information about the specified address pair.
+
+        Return value should be a two-tuple of system name and username.
+        Acceptable values for the system name may be found online at::
+
+            U{http://www.iana.org/assignments/operating-system-names}
+
+        This method may also raise any IdentError subclass (or IdentError
+        itself) to indicate user information will not be provided for the
+        given query.
+
+        A Deferred may also be returned.
+
+        @param serverAddress: A two-tuple representing the server endpoint
+        of the address being queried.  The first element is a string holding
+        a dotted-quad IP address.  The second element is an integer
+        representing the port.
+
+        @param clientAddress: Like L{serverAddress}, but represents the
+        client endpoint of the address being queried.
+        """
+        raise IdentError()
+
+class ProcServerMixin:
+    """Implements lookup() to grab entries for responses from /proc/net/tcp
+    """
+
+    SYSTEM_NAME = 'LINUX'
+
+    try:
+        from pwd import getpwuid
+        def getUsername(self, uid, getpwuid=getpwuid):
+            return getpwuid(uid)[0]
+        del getpwuid
+    except ImportError:
+        def getUsername(self, uid):
+            raise IdentError()
+
+    def entries(self):
+        f = file('/proc/net/tcp')
+        f.readline()
+        for L in f:
+            yield L.strip()
+
+    def dottedQuadFromHexString(self, hexstr):
+        return '.'.join(map(str, struct.unpack('4B', struct.pack('=L', int(hexstr, 16)))))
+
+    def unpackAddress(self, packed):
+        addr, port = packed.split(':')
+        addr = self.dottedQuadFromHexString(addr)
+        port = int(port, 16)
+        return addr, port
+
+    def parseLine(self, line):
+        parts = line.strip().split()
+        localAddr, localPort = self.unpackAddress(parts[1])
+        remoteAddr, remotePort = self.unpackAddress(parts[2])
+        uid = int(parts[7])
+        return (localAddr, localPort), (remoteAddr, remotePort), uid
+
+    def lookup(self, serverAddress, clientAddress):
+        for ent in self.entries():
+            localAddr, remoteAddr, uid = self.parseLine(ent)
+            if remoteAddr == clientAddress and localAddr[1] == serverAddress[1]:
+                return (self.SYSTEM_NAME, self.getUsername(uid))
+
+        raise NoUser()
+
+
+class IdentClient(basic.LineOnlyReceiver):
+
+    errorTypes = (IdentError, NoUser, InvalidPort, HiddenUser)
+
+    def __init__(self):
+        self.queries = []
+
+    def lookup(self, portOnServer, portOnClient):
+        """Lookup user information about the specified address pair.
+        """
+        self.queries.append((defer.Deferred(), portOnServer, portOnClient))
+        if len(self.queries) > 1:
+            return self.queries[-1][0]
+
+        self.sendLine('%d, %d' % (portOnServer, portOnClient))
+        return self.queries[-1][0]
+
+    def lineReceived(self, line):
+        if not self.queries:
+            log.msg("Unexpected server response: %r" % (line,))
+        else:
+            d, _, _ = self.queries.pop(0)
+            self.parseResponse(d, line)
+            if self.queries:
+                self.sendLine('%d, %d' % (self.queries[0][1], self.queries[0][2]))
+
+    def connectionLost(self, reason):
+        for q in self.queries:
+            q[0].errback(IdentError(reason))
+        self.queries = []
+
+    def parseResponse(self, deferred, line):
+        parts = line.split(':', 2)
+        if len(parts) != 3:
+            deferred.errback(IdentError(line))
+        else:
+            ports, type, addInfo = map(str.strip, parts)
+            if type == 'ERROR':
+                for et in self.errorTypes:
+                    if et.identDescription == addInfo:
+                        deferred.errback(et(line))
+                        return
+                deferred.errback(IdentError(line))
+            else:
+                deferred.callback((type, addInfo))
+
+__all__ = ['IdentError', 'NoUser', 'InvalidPort', 'HiddenUser',
+           'IdentServer', 'IdentClient',
+           'ProcServerMixin']
diff --git a/ThirdParty/Twisted/twisted/protocols/loopback.py b/ThirdParty/Twisted/twisted/protocols/loopback.py
new file mode 100644
index 0000000..0e0ba3b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/loopback.py
@@ -0,0 +1,377 @@
+# -*- test-case-name: twisted.test.test_loopback -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Testing support for protocols -- loopback between client and server.
+"""
+
+from __future__ import division, absolute_import
+
+# system imports
+import tempfile
+
+from zope.interface import implementer
+
+# Twisted Imports
+from twisted.protocols import policies
+from twisted.internet import interfaces, protocol, main, defer
+from twisted.internet.task import deferLater
+from twisted.python import failure
+from twisted.internet.interfaces import IAddress
+
+
+class _LoopbackQueue(object):
+    """
+    Trivial wrapper around a list to give it an interface like a queue, which
+    the addition of also sending notifications by way of a Deferred whenever
+    the list has something added to it.
+    """
+
+    _notificationDeferred = None
+    disconnect = False
+
+    def __init__(self):
+        self._queue = []
+
+
+    def put(self, v):
+        self._queue.append(v)
+        if self._notificationDeferred is not None:
+            d, self._notificationDeferred = self._notificationDeferred, None
+            d.callback(None)
+
+
+    def __nonzero__(self):
+        return bool(self._queue)
+    __bool__ = __nonzero__
+
+
+    def get(self):
+        return self._queue.pop(0)
+
+
+
+ at implementer(IAddress)
+class _LoopbackAddress(object):
+    pass
+
+
+
+ at implementer(interfaces.ITransport, interfaces.IConsumer)
+class _LoopbackTransport(object):
+    disconnecting = False
+    producer = None
+
+    # ITransport
+    def __init__(self, q):
+        self.q = q
+
+    def write(self, data):
+        if not isinstance(data, bytes):
+            raise TypeError("Can only write bytes to ITransport")
+        self.q.put(data)
+
+    def writeSequence(self, iovec):
+        self.q.put(b''.join(iovec))
+
+    def loseConnection(self):
+        self.q.disconnect = True
+        self.q.put(None)
+
+    def getPeer(self):
+        return _LoopbackAddress()
+
+    def getHost(self):
+        return _LoopbackAddress()
+
+    # IConsumer
+    def registerProducer(self, producer, streaming):
+        assert self.producer is None
+        self.producer = producer
+        self.streamingProducer = streaming
+        self._pollProducer()
+
+    def unregisterProducer(self):
+        assert self.producer is not None
+        self.producer = None
+
+    def _pollProducer(self):
+        if self.producer is not None and not self.streamingProducer:
+            self.producer.resumeProducing()
+
+
+
+def identityPumpPolicy(queue, target):
+    """
+    L{identityPumpPolicy} is a policy which delivers each chunk of data written
+    to the given queue as-is to the target.
+
+    This isn't a particularly realistic policy.
+
+    @see: L{loopbackAsync}
+    """
+    while queue:
+        bytes = queue.get()
+        if bytes is None:
+            break
+        target.dataReceived(bytes)
+
+
+
+def collapsingPumpPolicy(queue, target):
+    """
+    L{collapsingPumpPolicy} is a policy which collapses all outstanding chunks
+    into a single string and delivers it to the target.
+
+    @see: L{loopbackAsync}
+    """
+    bytes = []
+    while queue:
+        chunk = queue.get()
+        if chunk is None:
+            break
+        bytes.append(chunk)
+    if bytes:
+        target.dataReceived(b''.join(bytes))
+
+
+
+def loopbackAsync(server, client, pumpPolicy=identityPumpPolicy):
+    """
+    Establish a connection between C{server} and C{client} then transfer data
+    between them until the connection is closed. This is often useful for
+    testing a protocol.
+
+    @param server: The protocol instance representing the server-side of this
+        connection.
+
+    @param client: The protocol instance representing the client-side of this
+        connection.
+
+    @param pumpPolicy: When either C{server} or C{client} writes to its
+        transport, the string passed in is added to a queue of data for the
+        other protocol.  Eventually, C{pumpPolicy} will be called with one such
+        queue and the corresponding protocol object.  The pump policy callable
+        is responsible for emptying the queue and passing the strings it
+        contains to the given protocol's C{dataReceived} method.  The signature
+        of C{pumpPolicy} is C{(queue, protocol)}.  C{queue} is an object with a
+        C{get} method which will return the next string written to the
+        transport, or C{None} if the transport has been disconnected, and which
+        evaluates to C{True} if and only if there are more items to be
+        retrieved via C{get}.
+
+    @return: A L{Deferred} which fires when the connection has been closed and
+        both sides have received notification of this.
+    """
+    serverToClient = _LoopbackQueue()
+    clientToServer = _LoopbackQueue()
+
+    server.makeConnection(_LoopbackTransport(serverToClient))
+    client.makeConnection(_LoopbackTransport(clientToServer))
+
+    return _loopbackAsyncBody(
+        server, serverToClient, client, clientToServer, pumpPolicy)
+
+
+
+def _loopbackAsyncBody(server, serverToClient, client, clientToServer,
+                       pumpPolicy):
+    """
+    Transfer bytes from the output queue of each protocol to the input of the other.
+
+    @param server: The protocol instance representing the server-side of this
+    connection.
+
+    @param serverToClient: The L{_LoopbackQueue} holding the server's output.
+
+    @param client: The protocol instance representing the client-side of this
+    connection.
+
+    @param clientToServer: The L{_LoopbackQueue} holding the client's output.
+
+    @param pumpPolicy: See L{loopbackAsync}.
+
+    @return: A L{Deferred} which fires when the connection has been closed and
+        both sides have received notification of this.
+    """
+    def pump(source, q, target):
+        sent = False
+        if q:
+            pumpPolicy(q, target)
+            sent = True
+        if sent and not q:
+            # A write buffer has now been emptied.  Give any producer on that
+            # side an opportunity to produce more data.
+            source.transport._pollProducer()
+
+        return sent
+
+    while 1:
+        disconnect = clientSent = serverSent = False
+
+        # Deliver the data which has been written.
+        serverSent = pump(server, serverToClient, client)
+        clientSent = pump(client, clientToServer, server)
+
+        if not clientSent and not serverSent:
+            # Neither side wrote any data.  Wait for some new data to be added
+            # before trying to do anything further.
+            d = defer.Deferred()
+            clientToServer._notificationDeferred = d
+            serverToClient._notificationDeferred = d
+            d.addCallback(
+                _loopbackAsyncContinue,
+                server, serverToClient, client, clientToServer, pumpPolicy)
+            return d
+        if serverToClient.disconnect:
+            # The server wants to drop the connection.  Flush any remaining
+            # data it has.
+            disconnect = True
+            pump(server, serverToClient, client)
+        elif clientToServer.disconnect:
+            # The client wants to drop the connection.  Flush any remaining
+            # data it has.
+            disconnect = True
+            pump(client, clientToServer, server)
+        if disconnect:
+            # Someone wanted to disconnect, so okay, the connection is gone.
+            server.connectionLost(failure.Failure(main.CONNECTION_DONE))
+            client.connectionLost(failure.Failure(main.CONNECTION_DONE))
+            return defer.succeed(None)
+
+
+
+def _loopbackAsyncContinue(ignored, server, serverToClient, client,
+                           clientToServer, pumpPolicy):
+    # Clear the Deferred from each message queue, since it has already fired
+    # and cannot be used again.
+    clientToServer._notificationDeferred = None
+    serverToClient._notificationDeferred = None
+
+    # Schedule some more byte-pushing to happen.  This isn't done
+    # synchronously because no actual transport can re-enter dataReceived as
+    # a result of calling write, and doing this synchronously could result
+    # in that.
+    from twisted.internet import reactor
+    return deferLater(
+        reactor, 0,
+        _loopbackAsyncBody,
+        server, serverToClient, client, clientToServer, pumpPolicy)
+
+
+
+ at implementer(interfaces.ITransport, interfaces.IConsumer)
+class LoopbackRelay:
+    buffer = ''
+    shouldLose = 0
+    disconnecting = 0
+    producer = None
+
+    def __init__(self, target, logFile=None):
+        self.target = target
+        self.logFile = logFile
+
+    def write(self, data):
+        self.buffer = self.buffer + data
+        if self.logFile:
+            self.logFile.write("loopback writing %s\n" % repr(data))
+
+    def writeSequence(self, iovec):
+        self.write("".join(iovec))
+
+    def clearBuffer(self):
+        if self.shouldLose == -1:
+            return
+
+        if self.producer:
+            self.producer.resumeProducing()
+        if self.buffer:
+            if self.logFile:
+                self.logFile.write("loopback receiving %s\n" % repr(self.buffer))
+            buffer = self.buffer
+            self.buffer = ''
+            self.target.dataReceived(buffer)
+        if self.shouldLose == 1:
+            self.shouldLose = -1
+            self.target.connectionLost(failure.Failure(main.CONNECTION_DONE))
+
+    def loseConnection(self):
+        if self.shouldLose != -1:
+            self.shouldLose = 1
+
+    def getHost(self):
+        return 'loopback'
+
+    def getPeer(self):
+        return 'loopback'
+
+    def registerProducer(self, producer, streaming):
+        self.producer = producer
+
+    def unregisterProducer(self):
+        self.producer = None
+
+    def logPrefix(self):
+        return 'Loopback(%r)' % (self.target.__class__.__name__,)
+
+
+
+class LoopbackClientFactory(protocol.ClientFactory):
+
+    def __init__(self, protocol):
+        self.disconnected = 0
+        self.deferred = defer.Deferred()
+        self.protocol = protocol
+
+    def buildProtocol(self, addr):
+        return self.protocol
+
+    def clientConnectionLost(self, connector, reason):
+        self.disconnected = 1
+        self.deferred.callback(None)
+
+
+class _FireOnClose(policies.ProtocolWrapper):
+    def __init__(self, protocol, factory):
+        policies.ProtocolWrapper.__init__(self, protocol, factory)
+        self.deferred = defer.Deferred()
+
+    def connectionLost(self, reason):
+        policies.ProtocolWrapper.connectionLost(self, reason)
+        self.deferred.callback(None)
+
+
+def loopbackTCP(server, client, port=0, noisy=True):
+    """Run session between server and client protocol instances over TCP."""
+    from twisted.internet import reactor
+    f = policies.WrappingFactory(protocol.Factory())
+    serverWrapper = _FireOnClose(f, server)
+    f.noisy = noisy
+    f.buildProtocol = lambda addr: serverWrapper
+    serverPort = reactor.listenTCP(port, f, interface='127.0.0.1')
+    clientF = LoopbackClientFactory(client)
+    clientF.noisy = noisy
+    reactor.connectTCP('127.0.0.1', serverPort.getHost().port, clientF)
+    d = clientF.deferred
+    d.addCallback(lambda x: serverWrapper.deferred)
+    d.addCallback(lambda x: serverPort.stopListening())
+    return d
+
+
+def loopbackUNIX(server, client, noisy=True):
+    """Run session between server and client protocol instances over UNIX socket."""
+    path = tempfile.mktemp()
+    from twisted.internet import reactor
+    f = policies.WrappingFactory(protocol.Factory())
+    serverWrapper = _FireOnClose(f, server)
+    f.noisy = noisy
+    f.buildProtocol = lambda addr: serverWrapper
+    serverPort = reactor.listenUNIX(path, f)
+    clientF = LoopbackClientFactory(client)
+    clientF.noisy = noisy
+    reactor.connectUNIX(path, clientF)
+    d = clientF.deferred
+    d.addCallback(lambda x: serverWrapper.deferred)
+    d.addCallback(lambda x: serverPort.stopListening())
+    return d
diff --git a/ThirdParty/Twisted/twisted/protocols/memcache.py b/ThirdParty/Twisted/twisted/protocols/memcache.py
new file mode 100644
index 0000000..a5e987d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/memcache.py
@@ -0,0 +1,758 @@
+# -*- test-case-name: twisted.test.test_memcache -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Memcache client protocol. Memcached is a caching server, storing data in the
+form of pairs key/value, and memcache is the protocol to talk with it.
+
+To connect to a server, create a factory for L{MemCacheProtocol}::
+
+    from twisted.internet import reactor, protocol
+    from twisted.protocols.memcache import MemCacheProtocol, DEFAULT_PORT
+    d = protocol.ClientCreator(reactor, MemCacheProtocol
+        ).connectTCP("localhost", DEFAULT_PORT)
+    def doSomething(proto):
+        # Here you call the memcache operations
+        return proto.set("mykey", "a lot of data")
+    d.addCallback(doSomething)
+    reactor.run()
+
+All the operations of the memcache protocol are present, but
+L{MemCacheProtocol.set} and L{MemCacheProtocol.get} are the more important.
+
+See U{http://code.sixapart.com/svn/memcached/trunk/server/doc/protocol.txt} for
+more information about the protocol.
+"""
+
+try:
+    from collections import deque
+except ImportError:
+    class deque(list):
+        def popleft(self):
+            return self.pop(0)
+
+
+from twisted.protocols.basic import LineReceiver
+from twisted.protocols.policies import TimeoutMixin
+from twisted.internet.defer import Deferred, fail, TimeoutError
+from twisted.python import log
+
+
+
+DEFAULT_PORT = 11211
+
+
+
+class NoSuchCommand(Exception):
+    """
+    Exception raised when a non existent command is called.
+    """
+
+
+
+class ClientError(Exception):
+    """
+    Error caused by an invalid client call.
+    """
+
+
+
+class ServerError(Exception):
+    """
+    Problem happening on the server.
+    """
+
+
+
+class Command(object):
+    """
+    Wrap a client action into an object, that holds the values used in the
+    protocol.
+
+    @ivar _deferred: the L{Deferred} object that will be fired when the result
+        arrives.
+    @type _deferred: L{Deferred}
+
+    @ivar command: name of the command sent to the server.
+    @type command: C{str}
+    """
+
+    def __init__(self, command, **kwargs):
+        """
+        Create a command.
+
+        @param command: the name of the command.
+        @type command: C{str}
+
+        @param kwargs: this values will be stored as attributes of the object
+            for future use
+        """
+        self.command = command
+        self._deferred = Deferred()
+        for k, v in kwargs.items():
+            setattr(self, k, v)
+
+
+    def success(self, value):
+        """
+        Shortcut method to fire the underlying deferred.
+        """
+        self._deferred.callback(value)
+
+
+    def fail(self, error):
+        """
+        Make the underlying deferred fails.
+        """
+        self._deferred.errback(error)
+
+
+
+class MemCacheProtocol(LineReceiver, TimeoutMixin):
+    """
+    MemCache protocol: connect to a memcached server to store/retrieve values.
+
+    @ivar persistentTimeOut: the timeout period used to wait for a response.
+    @type persistentTimeOut: C{int}
+
+    @ivar _current: current list of requests waiting for an answer from the
+        server.
+    @type _current: C{deque} of L{Command}
+
+    @ivar _lenExpected: amount of data expected in raw mode, when reading for
+        a value.
+    @type _lenExpected: C{int}
+
+    @ivar _getBuffer: current buffer of data, used to store temporary data
+        when reading in raw mode.
+    @type _getBuffer: C{list}
+
+    @ivar _bufferLength: the total amount of bytes in C{_getBuffer}.
+    @type _bufferLength: C{int}
+
+    @ivar _disconnected: indicate if the connectionLost has been called or not.
+    @type _disconnected: C{bool}
+    """
+    MAX_KEY_LENGTH = 250
+    _disconnected = False
+
+    def __init__(self, timeOut=60):
+        """
+        Create the protocol.
+
+        @param timeOut: the timeout to wait before detecting that the
+            connection is dead and close it. It's expressed in seconds.
+        @type timeOut: C{int}
+        """
+        self._current = deque()
+        self._lenExpected = None
+        self._getBuffer = None
+        self._bufferLength = None
+        self.persistentTimeOut = self.timeOut = timeOut
+
+
+    def _cancelCommands(self, reason):
+        """
+        Cancel all the outstanding commands, making them fail with C{reason}.
+        """
+        while self._current:
+            cmd = self._current.popleft()
+            cmd.fail(reason)
+
+
+    def timeoutConnection(self):
+        """
+        Close the connection in case of timeout.
+        """
+        self._cancelCommands(TimeoutError("Connection timeout"))
+        self.transport.loseConnection()
+
+
+    def connectionLost(self, reason):
+        """
+        Cause any outstanding commands to fail.
+        """
+        self._disconnected = True
+        self._cancelCommands(reason)
+        LineReceiver.connectionLost(self, reason)
+
+
+    def sendLine(self, line):
+        """
+        Override sendLine to add a timeout to response.
+        """
+        if not self._current:
+            self.setTimeout(self.persistentTimeOut)
+        LineReceiver.sendLine(self, line)
+
+
+    def rawDataReceived(self, data):
+        """
+        Collect data for a get.
+        """
+        self.resetTimeout()
+        self._getBuffer.append(data)
+        self._bufferLength += len(data)
+        if self._bufferLength >= self._lenExpected + 2:
+            data = "".join(self._getBuffer)
+            buf = data[:self._lenExpected]
+            rem = data[self._lenExpected + 2:]
+            val = buf
+            self._lenExpected = None
+            self._getBuffer = None
+            self._bufferLength = None
+            cmd = self._current[0]
+            if cmd.multiple:
+                flags, cas = cmd.values[cmd.currentKey]
+                cmd.values[cmd.currentKey] = (flags, cas, val)
+            else:
+                cmd.value = val
+            self.setLineMode(rem)
+
+
+    def cmd_STORED(self):
+        """
+        Manage a success response to a set operation.
+        """
+        self._current.popleft().success(True)
+
+
+    def cmd_NOT_STORED(self):
+        """
+        Manage a specific 'not stored' response to a set operation: this is not
+        an error, but some condition wasn't met.
+        """
+        self._current.popleft().success(False)
+
+
+    def cmd_END(self):
+        """
+        This the end token to a get or a stat operation.
+        """
+        cmd = self._current.popleft()
+        if cmd.command == "get":
+            if cmd.multiple:
+                values = dict([(key, val[::2]) for key, val in
+                               cmd.values.iteritems()])
+                cmd.success(values)
+            else:
+                cmd.success((cmd.flags, cmd.value))
+        elif cmd.command == "gets":
+            if cmd.multiple:
+                cmd.success(cmd.values)
+            else:
+                cmd.success((cmd.flags, cmd.cas, cmd.value))
+        elif cmd.command == "stats":
+            cmd.success(cmd.values)
+
+
+    def cmd_NOT_FOUND(self):
+        """
+        Manage error response for incr/decr/delete.
+        """
+        self._current.popleft().success(False)
+
+
+    def cmd_VALUE(self, line):
+        """
+        Prepare the reading a value after a get.
+        """
+        cmd = self._current[0]
+        if cmd.command == "get":
+            key, flags, length = line.split()
+            cas = ""
+        else:
+            key, flags, length, cas = line.split()
+        self._lenExpected = int(length)
+        self._getBuffer = []
+        self._bufferLength = 0
+        if cmd.multiple:
+            if key not in cmd.keys:
+                raise RuntimeError("Unexpected commands answer.")
+            cmd.currentKey = key
+            cmd.values[key] = [int(flags), cas]
+        else:
+            if cmd.key != key:
+                raise RuntimeError("Unexpected commands answer.")
+            cmd.flags = int(flags)
+            cmd.cas = cas
+        self.setRawMode()
+
+
+    def cmd_STAT(self, line):
+        """
+        Reception of one stat line.
+        """
+        cmd = self._current[0]
+        key, val = line.split(" ", 1)
+        cmd.values[key] = val
+
+
+    def cmd_VERSION(self, versionData):
+        """
+        Read version token.
+        """
+        self._current.popleft().success(versionData)
+
+
+    def cmd_ERROR(self):
+        """
+        An non-existent command has been sent.
+        """
+        log.err("Non-existent command sent.")
+        cmd = self._current.popleft()
+        cmd.fail(NoSuchCommand())
+
+
+    def cmd_CLIENT_ERROR(self, errText):
+        """
+        An invalid input as been sent.
+        """
+        log.err("Invalid input: %s" % (errText,))
+        cmd = self._current.popleft()
+        cmd.fail(ClientError(errText))
+
+
+    def cmd_SERVER_ERROR(self, errText):
+        """
+        An error has happened server-side.
+        """
+        log.err("Server error: %s" % (errText,))
+        cmd = self._current.popleft()
+        cmd.fail(ServerError(errText))
+
+
+    def cmd_DELETED(self):
+        """
+        A delete command has completed successfully.
+        """
+        self._current.popleft().success(True)
+
+
+    def cmd_OK(self):
+        """
+        The last command has been completed.
+        """
+        self._current.popleft().success(True)
+
+
+    def cmd_EXISTS(self):
+        """
+        A C{checkAndSet} update has failed.
+        """
+        self._current.popleft().success(False)
+
+
+    def lineReceived(self, line):
+        """
+        Receive line commands from the server.
+        """
+        self.resetTimeout()
+        token = line.split(" ", 1)[0]
+        # First manage standard commands without space
+        cmd = getattr(self, "cmd_%s" % (token,), None)
+        if cmd is not None:
+            args = line.split(" ", 1)[1:]
+            if args:
+                cmd(args[0])
+            else:
+                cmd()
+        else:
+            # Then manage commands with space in it
+            line = line.replace(" ", "_")
+            cmd = getattr(self, "cmd_%s" % (line,), None)
+            if cmd is not None:
+                cmd()
+            else:
+                # Increment/Decrement response
+                cmd = self._current.popleft()
+                val = int(line)
+                cmd.success(val)
+        if not self._current:
+            # No pending request, remove timeout
+            self.setTimeout(None)
+
+
+    def increment(self, key, val=1):
+        """
+        Increment the value of C{key} by given value (default to 1).
+        C{key} must be consistent with an int. Return the new value.
+
+        @param key: the key to modify.
+        @type key: C{str}
+
+        @param val: the value to increment.
+        @type val: C{int}
+
+        @return: a deferred with will be called back with the new value
+            associated with the key (after the increment).
+        @rtype: L{Deferred}
+        """
+        return self._incrdecr("incr", key, val)
+
+
+    def decrement(self, key, val=1):
+        """
+        Decrement the value of C{key} by given value (default to 1).
+        C{key} must be consistent with an int. Return the new value, coerced to
+        0 if negative.
+
+        @param key: the key to modify.
+        @type key: C{str}
+
+        @param val: the value to decrement.
+        @type val: C{int}
+
+        @return: a deferred with will be called back with the new value
+            associated with the key (after the decrement).
+        @rtype: L{Deferred}
+        """
+        return self._incrdecr("decr", key, val)
+
+
+    def _incrdecr(self, cmd, key, val):
+        """
+        Internal wrapper for incr/decr.
+        """
+        if self._disconnected:
+            return fail(RuntimeError("not connected"))
+        if not isinstance(key, str):
+            return fail(ClientError(
+                "Invalid type for key: %s, expecting a string" % (type(key),)))
+        if len(key) > self.MAX_KEY_LENGTH:
+            return fail(ClientError("Key too long"))
+        fullcmd = "%s %s %d" % (cmd, key, int(val))
+        self.sendLine(fullcmd)
+        cmdObj = Command(cmd, key=key)
+        self._current.append(cmdObj)
+        return cmdObj._deferred
+
+
+    def replace(self, key, val, flags=0, expireTime=0):
+        """
+        Replace the given C{key}. It must already exist in the server.
+
+        @param key: the key to replace.
+        @type key: C{str}
+
+        @param val: the new value associated with the key.
+        @type val: C{str}
+
+        @param flags: the flags to store with the key.
+        @type flags: C{int}
+
+        @param expireTime: if different from 0, the relative time in seconds
+            when the key will be deleted from the store.
+        @type expireTime: C{int}
+
+        @return: a deferred that will fire with C{True} if the operation has
+            succeeded, and C{False} with the key didn't previously exist.
+        @rtype: L{Deferred}
+        """
+        return self._set("replace", key, val, flags, expireTime, "")
+
+
+    def add(self, key, val, flags=0, expireTime=0):
+        """
+        Add the given C{key}. It must not exist in the server.
+
+        @param key: the key to add.
+        @type key: C{str}
+
+        @param val: the value associated with the key.
+        @type val: C{str}
+
+        @param flags: the flags to store with the key.
+        @type flags: C{int}
+
+        @param expireTime: if different from 0, the relative time in seconds
+            when the key will be deleted from the store.
+        @type expireTime: C{int}
+
+        @return: a deferred that will fire with C{True} if the operation has
+            succeeded, and C{False} with the key already exists.
+        @rtype: L{Deferred}
+        """
+        return self._set("add", key, val, flags, expireTime, "")
+
+
+    def set(self, key, val, flags=0, expireTime=0):
+        """
+        Set the given C{key}.
+
+        @param key: the key to set.
+        @type key: C{str}
+
+        @param val: the value associated with the key.
+        @type val: C{str}
+
+        @param flags: the flags to store with the key.
+        @type flags: C{int}
+
+        @param expireTime: if different from 0, the relative time in seconds
+            when the key will be deleted from the store.
+        @type expireTime: C{int}
+
+        @return: a deferred that will fire with C{True} if the operation has
+            succeeded.
+        @rtype: L{Deferred}
+        """
+        return self._set("set", key, val, flags, expireTime, "")
+
+
+    def checkAndSet(self, key, val, cas, flags=0, expireTime=0):
+        """
+        Change the content of C{key} only if the C{cas} value matches the
+        current one associated with the key. Use this to store a value which
+        hasn't been modified since last time you fetched it.
+
+        @param key: The key to set.
+        @type key: C{str}
+
+        @param val: The value associated with the key.
+        @type val: C{str}
+
+        @param cas: Unique 64-bit value returned by previous call of C{get}.
+        @type cas: C{str}
+
+        @param flags: The flags to store with the key.
+        @type flags: C{int}
+
+        @param expireTime: If different from 0, the relative time in seconds
+            when the key will be deleted from the store.
+        @type expireTime: C{int}
+
+        @return: A deferred that will fire with C{True} if the operation has
+            succeeded, C{False} otherwise.
+        @rtype: L{Deferred}
+        """
+        return self._set("cas", key, val, flags, expireTime, cas)
+
+
+    def _set(self, cmd, key, val, flags, expireTime, cas):
+        """
+        Internal wrapper for setting values.
+        """
+        if self._disconnected:
+            return fail(RuntimeError("not connected"))
+        if not isinstance(key, str):
+            return fail(ClientError(
+                "Invalid type for key: %s, expecting a string" % (type(key),)))
+        if len(key) > self.MAX_KEY_LENGTH:
+            return fail(ClientError("Key too long"))
+        if not isinstance(val, str):
+            return fail(ClientError(
+                "Invalid type for value: %s, expecting a string" %
+                (type(val),)))
+        if cas:
+            cas = " " + cas
+        length = len(val)
+        fullcmd = "%s %s %d %d %d%s" % (
+            cmd, key, flags, expireTime, length, cas)
+        self.sendLine(fullcmd)
+        self.sendLine(val)
+        cmdObj = Command(cmd, key=key, flags=flags, length=length)
+        self._current.append(cmdObj)
+        return cmdObj._deferred
+
+
+    def append(self, key, val):
+        """
+        Append given data to the value of an existing key.
+
+        @param key: The key to modify.
+        @type key: C{str}
+
+        @param val: The value to append to the current value associated with
+            the key.
+        @type val: C{str}
+
+        @return: A deferred that will fire with C{True} if the operation has
+            succeeded, C{False} otherwise.
+        @rtype: L{Deferred}
+        """
+        # Even if flags and expTime values are ignored, we have to pass them
+        return self._set("append", key, val, 0, 0, "")
+
+
+    def prepend(self, key, val):
+        """
+        Prepend given data to the value of an existing key.
+
+        @param key: The key to modify.
+        @type key: C{str}
+
+        @param val: The value to prepend to the current value associated with
+            the key.
+        @type val: C{str}
+
+        @return: A deferred that will fire with C{True} if the operation has
+            succeeded, C{False} otherwise.
+        @rtype: L{Deferred}
+        """
+        # Even if flags and expTime values are ignored, we have to pass them
+        return self._set("prepend", key, val, 0, 0, "")
+
+
+    def get(self, key, withIdentifier=False):
+        """
+        Get the given C{key}. It doesn't support multiple keys. If
+        C{withIdentifier} is set to C{True}, the command issued is a C{gets},
+        that will return the current identifier associated with the value. This
+        identifier has to be used when issuing C{checkAndSet} update later,
+        using the corresponding method.
+
+        @param key: The key to retrieve.
+        @type key: C{str}
+
+        @param withIdentifier: If set to C{True}, retrieve the current
+            identifier along with the value and the flags.
+        @type withIdentifier: C{bool}
+
+        @return: A deferred that will fire with the tuple (flags, value) if
+            C{withIdentifier} is C{False}, or (flags, cas identifier, value)
+            if C{True}.  If the server indicates there is no value
+            associated with C{key}, the returned value will be C{None} and
+            the returned flags will be C{0}.
+        @rtype: L{Deferred}
+        """
+        return self._get([key], withIdentifier, False)
+
+
+    def getMultiple(self, keys, withIdentifier=False):
+        """
+        Get the given list of C{keys}.  If C{withIdentifier} is set to C{True},
+        the command issued is a C{gets}, that will return the identifiers
+        associated with each values. This identifier has to be used when
+        issuing C{checkAndSet} update later, using the corresponding method.
+
+        @param keys: The keys to retrieve.
+        @type keys: C{list} of C{str}
+
+        @param withIdentifier: If set to C{True}, retrieve the identifiers
+            along with the values and the flags.
+        @type withIdentifier: C{bool}
+
+        @return: A deferred that will fire with a dictionary with the elements
+            of C{keys} as keys and the tuples (flags, value) as values if
+            C{withIdentifier} is C{False}, or (flags, cas identifier, value) if
+            C{True}.  If the server indicates there is no value associated with
+            C{key}, the returned values will be C{None} and the returned flags
+            will be C{0}.
+        @rtype: L{Deferred}
+
+        @since: 9.0
+        """
+        return self._get(keys, withIdentifier, True)
+
+    def _get(self, keys, withIdentifier, multiple):
+        """
+        Helper method for C{get} and C{getMultiple}.
+        """
+        if self._disconnected:
+            return fail(RuntimeError("not connected"))
+        for key in keys:
+            if not isinstance(key, str):
+                return fail(ClientError(
+                    "Invalid type for key: %s, expecting a string" % (type(key),)))
+            if len(key) > self.MAX_KEY_LENGTH:
+                return fail(ClientError("Key too long"))
+        if withIdentifier:
+            cmd = "gets"
+        else:
+            cmd = "get"
+        fullcmd = "%s %s" % (cmd, " ".join(keys))
+        self.sendLine(fullcmd)
+        if multiple:
+            values = dict([(key, (0, "", None)) for key in keys])
+            cmdObj = Command(cmd, keys=keys, values=values, multiple=True)
+        else:
+            cmdObj = Command(cmd, key=keys[0], value=None, flags=0, cas="",
+                             multiple=False)
+        self._current.append(cmdObj)
+        return cmdObj._deferred
+
+    def stats(self, arg=None):
+        """
+        Get some stats from the server. It will be available as a dict.
+
+        @param arg: An optional additional string which will be sent along
+            with the I{stats} command.  The interpretation of this value by
+            the server is left undefined by the memcache protocol
+            specification.
+        @type arg: L{NoneType} or L{str}
+
+        @return: a deferred that will fire with a C{dict} of the available
+            statistics.
+        @rtype: L{Deferred}
+        """
+        if arg:
+            cmd = "stats " + arg
+        else:
+            cmd = "stats"
+        if self._disconnected:
+            return fail(RuntimeError("not connected"))
+        self.sendLine(cmd)
+        cmdObj = Command("stats", values={})
+        self._current.append(cmdObj)
+        return cmdObj._deferred
+
+
+    def version(self):
+        """
+        Get the version of the server.
+
+        @return: a deferred that will fire with the string value of the
+            version.
+        @rtype: L{Deferred}
+        """
+        if self._disconnected:
+            return fail(RuntimeError("not connected"))
+        self.sendLine("version")
+        cmdObj = Command("version")
+        self._current.append(cmdObj)
+        return cmdObj._deferred
+
+
+    def delete(self, key):
+        """
+        Delete an existing C{key}.
+
+        @param key: the key to delete.
+        @type key: C{str}
+
+        @return: a deferred that will be called back with C{True} if the key
+            was successfully deleted, or C{False} if not.
+        @rtype: L{Deferred}
+        """
+        if self._disconnected:
+            return fail(RuntimeError("not connected"))
+        if not isinstance(key, str):
+            return fail(ClientError(
+                "Invalid type for key: %s, expecting a string" % (type(key),)))
+        self.sendLine("delete %s" % key)
+        cmdObj = Command("delete", key=key)
+        self._current.append(cmdObj)
+        return cmdObj._deferred
+
+
+    def flushAll(self):
+        """
+        Flush all cached values.
+
+        @return: a deferred that will be called back with C{True} when the
+            operation has succeeded.
+        @rtype: L{Deferred}
+        """
+        if self._disconnected:
+            return fail(RuntimeError("not connected"))
+        self.sendLine("flush_all")
+        cmdObj = Command("flush_all")
+        self._current.append(cmdObj)
+        return cmdObj._deferred
+
+
+
+__all__ = ["MemCacheProtocol", "DEFAULT_PORT", "NoSuchCommand", "ClientError",
+           "ServerError"]
diff --git a/ThirdParty/Twisted/twisted/protocols/mice/__init__.py b/ThirdParty/Twisted/twisted/protocols/mice/__init__.py
new file mode 100644
index 0000000..fda89c5
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/mice/__init__.py
@@ -0,0 +1 @@
+"""Mice Protocols."""
diff --git a/ThirdParty/Twisted/twisted/protocols/mice/mouseman.py b/ThirdParty/Twisted/twisted/protocols/mice/mouseman.py
new file mode 100644
index 0000000..4071b20
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/mice/mouseman.py
@@ -0,0 +1,127 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+"""Logictech MouseMan serial protocol.
+
+http://www.softnco.demon.co.uk/SerialMouse.txt
+"""
+
+from twisted.internet import protocol
+
+class MouseMan(protocol.Protocol):
+    """
+
+    Parser for Logitech MouseMan serial mouse protocol (compatible
+    with Microsoft Serial Mouse).
+
+    """
+
+    state = 'initial'
+
+    leftbutton=None
+    rightbutton=None
+    middlebutton=None
+
+    leftold=None
+    rightold=None
+    middleold=None
+
+    horiz=None
+    vert=None
+    horizold=None
+    vertold=None
+
+    def down_left(self):
+        pass
+
+    def up_left(self):
+        pass
+
+    def down_middle(self):
+        pass
+
+    def up_middle(self):
+        pass
+
+    def down_right(self):
+        pass
+
+    def up_right(self):
+        pass
+
+    def move(self, x, y):
+        pass
+
+    horiz=None
+    vert=None
+
+    def state_initial(self, byte):
+        if byte & 1<<6:
+            self.word1=byte
+            self.leftbutton = byte & 1<<5
+            self.rightbutton = byte & 1<<4
+            return 'horiz'
+        else:
+            return 'initial'
+
+    def state_horiz(self, byte):
+        if byte & 1<<6:
+            return self.state_initial(byte)
+        else:
+            x=(self.word1 & 0x03)<<6 | (byte & 0x3f)
+            if x>=128:
+                x=-256+x
+            self.horiz = x
+            return 'vert'
+
+    def state_vert(self, byte):
+        if byte & 1<<6:
+            # short packet
+            return self.state_initial(byte)
+        else:
+            x = (self.word1 & 0x0c)<<4 | (byte & 0x3f)
+            if x>=128:
+                x=-256+x
+            self.vert = x
+            self.snapshot()
+            return 'maybemiddle'
+
+    def state_maybemiddle(self, byte):
+        if byte & 1<<6:
+            self.snapshot()
+            return self.state_initial(byte)
+        else:
+            self.middlebutton=byte & 1<<5
+            self.snapshot()
+            return 'initial'
+
+    def snapshot(self):
+        if self.leftbutton and not self.leftold:
+            self.down_left()
+            self.leftold=1
+        if not self.leftbutton and self.leftold:
+            self.up_left()
+            self.leftold=0
+
+        if self.middlebutton and not self.middleold:
+            self.down_middle()
+            self.middleold=1
+        if not self.middlebutton and self.middleold:
+            self.up_middle()
+            self.middleold=0
+
+        if self.rightbutton and not self.rightold:
+            self.down_right()
+            self.rightold=1
+        if not self.rightbutton and self.rightold:
+            self.up_right()
+            self.rightold=0
+
+        if self.horiz or self.vert:
+            self.move(self.horiz, self.vert)
+
+    def dataReceived(self, data):
+        for c in data:
+            byte = ord(c)
+            self.state = getattr(self, 'state_'+self.state)(byte)
diff --git a/ThirdParty/Twisted/twisted/protocols/pcp.py b/ThirdParty/Twisted/twisted/protocols/pcp.py
new file mode 100644
index 0000000..8970f90
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/pcp.py
@@ -0,0 +1,204 @@
+# -*- test-case-name: twisted.test.test_pcp -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Producer-Consumer Proxy.
+"""
+
+from zope.interface import implements
+
+from twisted.internet import interfaces
+
+
+class BasicProducerConsumerProxy:
+    """
+    I can act as a man in the middle between any Producer and Consumer.
+
+    @ivar producer: the Producer I subscribe to.
+    @type producer: L{IProducer<interfaces.IProducer>}
+    @ivar consumer: the Consumer I publish to.
+    @type consumer: L{IConsumer<interfaces.IConsumer>}
+    @ivar paused: As a Producer, am I paused?
+    @type paused: bool
+    """
+    implements(interfaces.IProducer, interfaces.IConsumer)
+
+    consumer = None
+    producer = None
+    producerIsStreaming = None
+    iAmStreaming = True
+    outstandingPull = False
+    paused = False
+    stopped = False
+
+    def __init__(self, consumer):
+        self._buffer = []
+        if consumer is not None:
+            self.consumer = consumer
+            consumer.registerProducer(self, self.iAmStreaming)
+
+    # Producer methods:
+
+    def pauseProducing(self):
+        self.paused = True
+        if self.producer:
+            self.producer.pauseProducing()
+
+    def resumeProducing(self):
+        self.paused = False
+        if self._buffer:
+            # TODO: Check to see if consumer supports writeSeq.
+            self.consumer.write(''.join(self._buffer))
+            self._buffer[:] = []
+        else:
+            if not self.iAmStreaming:
+                self.outstandingPull = True
+
+        if self.producer is not None:
+            self.producer.resumeProducing()
+
+    def stopProducing(self):
+        if self.producer is not None:
+            self.producer.stopProducing()
+        if self.consumer is not None:
+            del self.consumer
+
+    # Consumer methods:
+
+    def write(self, data):
+        if self.paused or (not self.iAmStreaming and not self.outstandingPull):
+            # We could use that fifo queue here.
+            self._buffer.append(data)
+
+        elif self.consumer is not None:
+            self.consumer.write(data)
+            self.outstandingPull = False
+
+    def finish(self):
+        if self.consumer is not None:
+            self.consumer.finish()
+        self.unregisterProducer()
+
+    def registerProducer(self, producer, streaming):
+        self.producer = producer
+        self.producerIsStreaming = streaming
+
+    def unregisterProducer(self):
+        if self.producer is not None:
+            del self.producer
+            del self.producerIsStreaming
+        if self.consumer:
+            self.consumer.unregisterProducer()
+
+    def __repr__(self):
+        return '<%s@%x around %s>' % (self.__class__, id(self), self.consumer)
+
+
+class ProducerConsumerProxy(BasicProducerConsumerProxy):
+    """ProducerConsumerProxy with a finite buffer.
+
+    When my buffer fills up, I have my parent Producer pause until my buffer
+    has room in it again.
+    """
+    # Copies much from abstract.FileDescriptor
+    bufferSize = 2**2**2**2
+
+    producerPaused = False
+    unregistered = False
+
+    def pauseProducing(self):
+        # Does *not* call up to ProducerConsumerProxy to relay the pause
+        # message through to my parent Producer.
+        self.paused = True
+
+    def resumeProducing(self):
+        self.paused = False
+        if self._buffer:
+            data = ''.join(self._buffer)
+            bytesSent = self._writeSomeData(data)
+            if bytesSent < len(data):
+                unsent = data[bytesSent:]
+                assert not self.iAmStreaming, (
+                    "Streaming producer did not write all its data.")
+                self._buffer[:] = [unsent]
+            else:
+                self._buffer[:] = []
+        else:
+            bytesSent = 0
+
+        if (self.unregistered and bytesSent and not self._buffer and
+            self.consumer is not None):
+            self.consumer.unregisterProducer()
+
+        if not self.iAmStreaming:
+            self.outstandingPull = not bytesSent
+
+        if self.producer is not None:
+            bytesBuffered = sum([len(s) for s in self._buffer])
+            # TODO: You can see here the potential for high and low
+            # watermarks, where bufferSize would be the high mark when we
+            # ask the upstream producer to pause, and we wouldn't have
+            # it resume again until it hit the low mark.  Or if producer
+            # is Pull, maybe we'd like to pull from it as much as necessary
+            # to keep our buffer full to the low mark, so we're never caught
+            # without something to send.
+            if self.producerPaused and (bytesBuffered < self.bufferSize):
+                # Now that our buffer is empty,
+                self.producerPaused = False
+                self.producer.resumeProducing()
+            elif self.outstandingPull:
+                # I did not have any data to write in response to a pull,
+                # so I'd better pull some myself.
+                self.producer.resumeProducing()
+
+    def write(self, data):
+        if self.paused or (not self.iAmStreaming and not self.outstandingPull):
+            # We could use that fifo queue here.
+            self._buffer.append(data)
+
+        elif self.consumer is not None:
+            assert not self._buffer, (
+                "Writing fresh data to consumer before my buffer is empty!")
+            # I'm going to use _writeSomeData here so that there is only one
+            # path to self.consumer.write.  But it doesn't actually make sense,
+            # if I am streaming, for some data to not be all data.  But maybe I
+            # am not streaming, but I am writing here anyway, because there was
+            # an earlier request for data which was not answered.
+            bytesSent = self._writeSomeData(data)
+            self.outstandingPull = False
+            if not bytesSent == len(data):
+                assert not self.iAmStreaming, (
+                    "Streaming producer did not write all its data.")
+                self._buffer.append(data[bytesSent:])
+
+        if (self.producer is not None) and self.producerIsStreaming:
+            bytesBuffered = sum([len(s) for s in self._buffer])
+            if bytesBuffered >= self.bufferSize:
+
+                self.producer.pauseProducing()
+                self.producerPaused = True
+
+    def registerProducer(self, producer, streaming):
+        self.unregistered = False
+        BasicProducerConsumerProxy.registerProducer(self, producer, streaming)
+        if not streaming:
+            producer.resumeProducing()
+
+    def unregisterProducer(self):
+        if self.producer is not None:
+            del self.producer
+            del self.producerIsStreaming
+        self.unregistered = True
+        if self.consumer and not self._buffer:
+            self.consumer.unregisterProducer()
+
+    def _writeSomeData(self, data):
+        """Write as much of this data as possible.
+
+        @returns: The number of bytes written.
+        """
+        if self.consumer is None:
+            return 0
+        self.consumer.write(data)
+        return len(data)
diff --git a/ThirdParty/Twisted/twisted/protocols/policies.py b/ThirdParty/Twisted/twisted/protocols/policies.py
new file mode 100644
index 0000000..b9c0661
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/policies.py
@@ -0,0 +1,727 @@
+# -*- test-case-name: twisted.test.test_policies -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Resource limiting policies.
+
+ at seealso: See also L{twisted.protocols.htb} for rate limiting.
+"""
+
+from __future__ import division, absolute_import
+
+# system imports
+import sys, operator
+
+from zope.interface import directlyProvides, providedBy
+
+# twisted imports
+from twisted.internet.protocol import ServerFactory, Protocol, ClientFactory
+from twisted.internet import error
+from twisted.internet.interfaces import ILoggingContext
+from twisted.python import log
+
+
+def _wrappedLogPrefix(wrapper, wrapped):
+    """
+    Compute a log prefix for a wrapper and the object it wraps.
+
+    @rtype: C{str}
+    """
+    if ILoggingContext.providedBy(wrapped):
+        logPrefix = wrapped.logPrefix()
+    else:
+        logPrefix = wrapped.__class__.__name__
+    return "%s (%s)" % (logPrefix, wrapper.__class__.__name__)
+
+
+
+class ProtocolWrapper(Protocol):
+    """
+    Wraps protocol instances and acts as their transport as well.
+
+    @ivar wrappedProtocol: An L{IProtocol<twisted.internet.interfaces.IProtocol>}
+        provider to which L{IProtocol<twisted.internet.interfaces.IProtocol>}
+        method calls onto this L{ProtocolWrapper} will be proxied.
+
+    @ivar factory: The L{WrappingFactory} which created this
+        L{ProtocolWrapper}.
+    """
+
+    disconnecting = 0
+
+    def __init__(self, factory, wrappedProtocol):
+        self.wrappedProtocol = wrappedProtocol
+        self.factory = factory
+
+
+    def logPrefix(self):
+        """
+        Use a customized log prefix mentioning both the wrapped protocol and
+        the current one.
+        """
+        return _wrappedLogPrefix(self, self.wrappedProtocol)
+
+
+    def makeConnection(self, transport):
+        """
+        When a connection is made, register this wrapper with its factory,
+        save the real transport, and connect the wrapped protocol to this
+        L{ProtocolWrapper} to intercept any transport calls it makes.
+        """
+        directlyProvides(self, providedBy(transport))
+        Protocol.makeConnection(self, transport)
+        self.factory.registerProtocol(self)
+        self.wrappedProtocol.makeConnection(self)
+
+
+    # Transport relaying
+
+    def write(self, data):
+        self.transport.write(data)
+
+
+    def writeSequence(self, data):
+        self.transport.writeSequence(data)
+
+
+    def loseConnection(self):
+        self.disconnecting = 1
+        self.transport.loseConnection()
+
+
+    def getPeer(self):
+        return self.transport.getPeer()
+
+
+    def getHost(self):
+        return self.transport.getHost()
+
+
+    def registerProducer(self, producer, streaming):
+        self.transport.registerProducer(producer, streaming)
+
+
+    def unregisterProducer(self):
+        self.transport.unregisterProducer()
+
+
+    def stopConsuming(self):
+        self.transport.stopConsuming()
+
+
+    def __getattr__(self, name):
+        return getattr(self.transport, name)
+
+
+    # Protocol relaying
+
+    def dataReceived(self, data):
+        self.wrappedProtocol.dataReceived(data)
+
+
+    def connectionLost(self, reason):
+        self.factory.unregisterProtocol(self)
+        self.wrappedProtocol.connectionLost(reason)
+
+
+
+class WrappingFactory(ClientFactory):
+    """
+    Wraps a factory and its protocols, and keeps track of them.
+    """
+
+    protocol = ProtocolWrapper
+
+    def __init__(self, wrappedFactory):
+        self.wrappedFactory = wrappedFactory
+        self.protocols = {}
+
+
+    def logPrefix(self):
+        """
+        Generate a log prefix mentioning both the wrapped factory and this one.
+        """
+        return _wrappedLogPrefix(self, self.wrappedFactory)
+
+
+    def doStart(self):
+        self.wrappedFactory.doStart()
+        ClientFactory.doStart(self)
+
+
+    def doStop(self):
+        self.wrappedFactory.doStop()
+        ClientFactory.doStop(self)
+
+
+    def startedConnecting(self, connector):
+        self.wrappedFactory.startedConnecting(connector)
+
+
+    def clientConnectionFailed(self, connector, reason):
+        self.wrappedFactory.clientConnectionFailed(connector, reason)
+
+
+    def clientConnectionLost(self, connector, reason):
+        self.wrappedFactory.clientConnectionLost(connector, reason)
+
+
+    def buildProtocol(self, addr):
+        return self.protocol(self, self.wrappedFactory.buildProtocol(addr))
+
+
+    def registerProtocol(self, p):
+        """
+        Called by protocol to register itself.
+        """
+        self.protocols[p] = 1
+
+
+    def unregisterProtocol(self, p):
+        """
+        Called by protocols when they go away.
+        """
+        del self.protocols[p]
+
+
+
+class ThrottlingProtocol(ProtocolWrapper):
+    """Protocol for ThrottlingFactory."""
+
+    # wrap API for tracking bandwidth
+
+    def write(self, data):
+        self.factory.registerWritten(len(data))
+        ProtocolWrapper.write(self, data)
+
+    def writeSequence(self, seq):
+        self.factory.registerWritten(reduce(operator.add, map(len, seq)))
+        ProtocolWrapper.writeSequence(self, seq)
+
+    def dataReceived(self, data):
+        self.factory.registerRead(len(data))
+        ProtocolWrapper.dataReceived(self, data)
+
+    def registerProducer(self, producer, streaming):
+        self.producer = producer
+        ProtocolWrapper.registerProducer(self, producer, streaming)
+
+    def unregisterProducer(self):
+        del self.producer
+        ProtocolWrapper.unregisterProducer(self)
+
+
+    def throttleReads(self):
+        self.transport.pauseProducing()
+
+    def unthrottleReads(self):
+        self.transport.resumeProducing()
+
+    def throttleWrites(self):
+        if hasattr(self, "producer"):
+            self.producer.pauseProducing()
+
+    def unthrottleWrites(self):
+        if hasattr(self, "producer"):
+            self.producer.resumeProducing()
+
+
+class ThrottlingFactory(WrappingFactory):
+    """
+    Throttles bandwidth and number of connections.
+
+    Write bandwidth will only be throttled if there is a producer
+    registered.
+    """
+
+    protocol = ThrottlingProtocol
+
+    def __init__(self, wrappedFactory, maxConnectionCount=sys.maxsize,
+                 readLimit=None, writeLimit=None):
+        WrappingFactory.__init__(self, wrappedFactory)
+        self.connectionCount = 0
+        self.maxConnectionCount = maxConnectionCount
+        self.readLimit = readLimit # max bytes we should read per second
+        self.writeLimit = writeLimit # max bytes we should write per second
+        self.readThisSecond = 0
+        self.writtenThisSecond = 0
+        self.unthrottleReadsID = None
+        self.checkReadBandwidthID = None
+        self.unthrottleWritesID = None
+        self.checkWriteBandwidthID = None
+
+
+    def callLater(self, period, func):
+        """
+        Wrapper around L{reactor.callLater} for test purpose.
+        """
+        from twisted.internet import reactor
+        return reactor.callLater(period, func)
+
+
+    def registerWritten(self, length):
+        """
+        Called by protocol to tell us more bytes were written.
+        """
+        self.writtenThisSecond += length
+
+
+    def registerRead(self, length):
+        """
+        Called by protocol to tell us more bytes were read.
+        """
+        self.readThisSecond += length
+
+
+    def checkReadBandwidth(self):
+        """
+        Checks if we've passed bandwidth limits.
+        """
+        if self.readThisSecond > self.readLimit:
+            self.throttleReads()
+            throttleTime = (float(self.readThisSecond) / self.readLimit) - 1.0
+            self.unthrottleReadsID = self.callLater(throttleTime,
+                                                    self.unthrottleReads)
+        self.readThisSecond = 0
+        self.checkReadBandwidthID = self.callLater(1, self.checkReadBandwidth)
+
+
+    def checkWriteBandwidth(self):
+        if self.writtenThisSecond > self.writeLimit:
+            self.throttleWrites()
+            throttleTime = (float(self.writtenThisSecond) / self.writeLimit) - 1.0
+            self.unthrottleWritesID = self.callLater(throttleTime,
+                                                        self.unthrottleWrites)
+        # reset for next round
+        self.writtenThisSecond = 0
+        self.checkWriteBandwidthID = self.callLater(1, self.checkWriteBandwidth)
+
+
+    def throttleReads(self):
+        """
+        Throttle reads on all protocols.
+        """
+        log.msg("Throttling reads on %s" % self)
+        for p in self.protocols.keys():
+            p.throttleReads()
+
+
+    def unthrottleReads(self):
+        """
+        Stop throttling reads on all protocols.
+        """
+        self.unthrottleReadsID = None
+        log.msg("Stopped throttling reads on %s" % self)
+        for p in self.protocols.keys():
+            p.unthrottleReads()
+
+
+    def throttleWrites(self):
+        """
+        Throttle writes on all protocols.
+        """
+        log.msg("Throttling writes on %s" % self)
+        for p in self.protocols.keys():
+            p.throttleWrites()
+
+
+    def unthrottleWrites(self):
+        """
+        Stop throttling writes on all protocols.
+        """
+        self.unthrottleWritesID = None
+        log.msg("Stopped throttling writes on %s" % self)
+        for p in self.protocols.keys():
+            p.unthrottleWrites()
+
+
+    def buildProtocol(self, addr):
+        if self.connectionCount == 0:
+            if self.readLimit is not None:
+                self.checkReadBandwidth()
+            if self.writeLimit is not None:
+                self.checkWriteBandwidth()
+
+        if self.connectionCount < self.maxConnectionCount:
+            self.connectionCount += 1
+            return WrappingFactory.buildProtocol(self, addr)
+        else:
+            log.msg("Max connection count reached!")
+            return None
+
+
+    def unregisterProtocol(self, p):
+        WrappingFactory.unregisterProtocol(self, p)
+        self.connectionCount -= 1
+        if self.connectionCount == 0:
+            if self.unthrottleReadsID is not None:
+                self.unthrottleReadsID.cancel()
+            if self.checkReadBandwidthID is not None:
+                self.checkReadBandwidthID.cancel()
+            if self.unthrottleWritesID is not None:
+                self.unthrottleWritesID.cancel()
+            if self.checkWriteBandwidthID is not None:
+                self.checkWriteBandwidthID.cancel()
+
+
+
+class SpewingProtocol(ProtocolWrapper):
+    def dataReceived(self, data):
+        log.msg("Received: %r" % data)
+        ProtocolWrapper.dataReceived(self,data)
+
+    def write(self, data):
+        log.msg("Sending: %r" % data)
+        ProtocolWrapper.write(self,data)
+
+
+
+class SpewingFactory(WrappingFactory):
+    protocol = SpewingProtocol
+
+
+
+class LimitConnectionsByPeer(WrappingFactory):
+
+    maxConnectionsPerPeer = 5
+
+    def startFactory(self):
+        self.peerConnections = {}
+
+    def buildProtocol(self, addr):
+        peerHost = addr[0]
+        connectionCount = self.peerConnections.get(peerHost, 0)
+        if connectionCount >= self.maxConnectionsPerPeer:
+            return None
+        self.peerConnections[peerHost] = connectionCount + 1
+        return WrappingFactory.buildProtocol(self, addr)
+
+    def unregisterProtocol(self, p):
+        peerHost = p.getPeer()[1]
+        self.peerConnections[peerHost] -= 1
+        if self.peerConnections[peerHost] == 0:
+            del self.peerConnections[peerHost]
+
+
+class LimitTotalConnectionsFactory(ServerFactory):
+    """
+    Factory that limits the number of simultaneous connections.
+
+    @type connectionCount: C{int}
+    @ivar connectionCount: number of current connections.
+    @type connectionLimit: C{int} or C{None}
+    @cvar connectionLimit: maximum number of connections.
+    @type overflowProtocol: L{Protocol} or C{None}
+    @cvar overflowProtocol: Protocol to use for new connections when
+        connectionLimit is exceeded.  If C{None} (the default value), excess
+        connections will be closed immediately.
+    """
+    connectionCount = 0
+    connectionLimit = None
+    overflowProtocol = None
+
+    def buildProtocol(self, addr):
+        if (self.connectionLimit is None or
+            self.connectionCount < self.connectionLimit):
+                # Build the normal protocol
+                wrappedProtocol = self.protocol()
+        elif self.overflowProtocol is None:
+            # Just drop the connection
+            return None
+        else:
+            # Too many connections, so build the overflow protocol
+            wrappedProtocol = self.overflowProtocol()
+
+        wrappedProtocol.factory = self
+        protocol = ProtocolWrapper(self, wrappedProtocol)
+        self.connectionCount += 1
+        return protocol
+
+    def registerProtocol(self, p):
+        pass
+
+    def unregisterProtocol(self, p):
+        self.connectionCount -= 1
+
+
+
+class TimeoutProtocol(ProtocolWrapper):
+    """
+    Protocol that automatically disconnects when the connection is idle.
+    """
+
+    def __init__(self, factory, wrappedProtocol, timeoutPeriod):
+        """
+        Constructor.
+
+        @param factory: An L{IFactory}.
+        @param wrappedProtocol: A L{Protocol} to wrapp.
+        @param timeoutPeriod: Number of seconds to wait for activity before
+            timing out.
+        """
+        ProtocolWrapper.__init__(self, factory, wrappedProtocol)
+        self.timeoutCall = None
+        self.setTimeout(timeoutPeriod)
+
+
+    def setTimeout(self, timeoutPeriod=None):
+        """
+        Set a timeout.
+
+        This will cancel any existing timeouts.
+
+        @param timeoutPeriod: If not C{None}, change the timeout period.
+            Otherwise, use the existing value.
+        """
+        self.cancelTimeout()
+        if timeoutPeriod is not None:
+            self.timeoutPeriod = timeoutPeriod
+        self.timeoutCall = self.factory.callLater(self.timeoutPeriod, self.timeoutFunc)
+
+
+    def cancelTimeout(self):
+        """
+        Cancel the timeout.
+
+        If the timeout was already cancelled, this does nothing.
+        """
+        if self.timeoutCall:
+            try:
+                self.timeoutCall.cancel()
+            except error.AlreadyCalled:
+                pass
+            self.timeoutCall = None
+
+
+    def resetTimeout(self):
+        """
+        Reset the timeout, usually because some activity just happened.
+        """
+        if self.timeoutCall:
+            self.timeoutCall.reset(self.timeoutPeriod)
+
+
+    def write(self, data):
+        self.resetTimeout()
+        ProtocolWrapper.write(self, data)
+
+
+    def writeSequence(self, seq):
+        self.resetTimeout()
+        ProtocolWrapper.writeSequence(self, seq)
+
+
+    def dataReceived(self, data):
+        self.resetTimeout()
+        ProtocolWrapper.dataReceived(self, data)
+
+
+    def connectionLost(self, reason):
+        self.cancelTimeout()
+        ProtocolWrapper.connectionLost(self, reason)
+
+
+    def timeoutFunc(self):
+        """
+        This method is called when the timeout is triggered.
+
+        By default it calls L{loseConnection}.  Override this if you want
+        something else to happen.
+        """
+        self.loseConnection()
+
+
+
+class TimeoutFactory(WrappingFactory):
+    """
+    Factory for TimeoutWrapper.
+    """
+    protocol = TimeoutProtocol
+
+
+    def __init__(self, wrappedFactory, timeoutPeriod=30*60):
+        self.timeoutPeriod = timeoutPeriod
+        WrappingFactory.__init__(self, wrappedFactory)
+
+
+    def buildProtocol(self, addr):
+        return self.protocol(self, self.wrappedFactory.buildProtocol(addr),
+                             timeoutPeriod=self.timeoutPeriod)
+
+
+    def callLater(self, period, func):
+        """
+        Wrapper around L{reactor.callLater} for test purpose.
+        """
+        from twisted.internet import reactor
+        return reactor.callLater(period, func)
+
+
+
+class TrafficLoggingProtocol(ProtocolWrapper):
+
+    def __init__(self, factory, wrappedProtocol, logfile, lengthLimit=None,
+                 number=0):
+        """
+        @param factory: factory which created this protocol.
+        @type factory: C{protocol.Factory}.
+        @param wrappedProtocol: the underlying protocol.
+        @type wrappedProtocol: C{protocol.Protocol}.
+        @param logfile: file opened for writing used to write log messages.
+        @type logfile: C{file}
+        @param lengthLimit: maximum size of the datareceived logged.
+        @type lengthLimit: C{int}
+        @param number: identifier of the connection.
+        @type number: C{int}.
+        """
+        ProtocolWrapper.__init__(self, factory, wrappedProtocol)
+        self.logfile = logfile
+        self.lengthLimit = lengthLimit
+        self._number = number
+
+
+    def _log(self, line):
+        self.logfile.write(line + '\n')
+        self.logfile.flush()
+
+
+    def _mungeData(self, data):
+        if self.lengthLimit and len(data) > self.lengthLimit:
+            data = data[:self.lengthLimit - 12] + '<... elided>'
+        return data
+
+
+    # IProtocol
+    def connectionMade(self):
+        self._log('*')
+        return ProtocolWrapper.connectionMade(self)
+
+
+    def dataReceived(self, data):
+        self._log('C %d: %r' % (self._number, self._mungeData(data)))
+        return ProtocolWrapper.dataReceived(self, data)
+
+
+    def connectionLost(self, reason):
+        self._log('C %d: %r' % (self._number, reason))
+        return ProtocolWrapper.connectionLost(self, reason)
+
+
+    # ITransport
+    def write(self, data):
+        self._log('S %d: %r' % (self._number, self._mungeData(data)))
+        return ProtocolWrapper.write(self, data)
+
+
+    def writeSequence(self, iovec):
+        self._log('SV %d: %r' % (self._number, [self._mungeData(d) for d in iovec]))
+        return ProtocolWrapper.writeSequence(self, iovec)
+
+
+    def loseConnection(self):
+        self._log('S %d: *' % (self._number,))
+        return ProtocolWrapper.loseConnection(self)
+
+
+
+class TrafficLoggingFactory(WrappingFactory):
+    protocol = TrafficLoggingProtocol
+
+    _counter = 0
+
+    def __init__(self, wrappedFactory, logfilePrefix, lengthLimit=None):
+        self.logfilePrefix = logfilePrefix
+        self.lengthLimit = lengthLimit
+        WrappingFactory.__init__(self, wrappedFactory)
+
+
+    def open(self, name):
+        return file(name, 'w')
+
+
+    def buildProtocol(self, addr):
+        self._counter += 1
+        logfile = self.open(self.logfilePrefix + '-' + str(self._counter))
+        return self.protocol(self, self.wrappedFactory.buildProtocol(addr),
+                             logfile, self.lengthLimit, self._counter)
+
+
+    def resetCounter(self):
+        """
+        Reset the value of the counter used to identify connections.
+        """
+        self._counter = 0
+
+
+
+class TimeoutMixin:
+    """
+    Mixin for protocols which wish to timeout connections.
+
+    Protocols that mix this in have a single timeout, set using L{setTimeout}.
+    When the timeout is hit, L{timeoutConnection} is called, which, by
+    default, closes the connection.
+
+    @cvar timeOut: The number of seconds after which to timeout the connection.
+    """
+    timeOut = None
+
+    __timeoutCall = None
+
+    def callLater(self, period, func):
+        """
+        Wrapper around L{reactor.callLater} for test purpose.
+        """
+        from twisted.internet import reactor
+        return reactor.callLater(period, func)
+
+
+    def resetTimeout(self):
+        """
+        Reset the timeout count down.
+
+        If the connection has already timed out, then do nothing.  If the
+        timeout has been cancelled (probably using C{setTimeout(None)}), also
+        do nothing.
+
+        It's often a good idea to call this when the protocol has received
+        some meaningful input from the other end of the connection.  "I've got
+        some data, they're still there, reset the timeout".
+        """
+        if self.__timeoutCall is not None and self.timeOut is not None:
+            self.__timeoutCall.reset(self.timeOut)
+
+    def setTimeout(self, period):
+        """
+        Change the timeout period
+
+        @type period: C{int} or C{NoneType}
+        @param period: The period, in seconds, to change the timeout to, or
+        C{None} to disable the timeout.
+        """
+        prev = self.timeOut
+        self.timeOut = period
+
+        if self.__timeoutCall is not None:
+            if period is None:
+                self.__timeoutCall.cancel()
+                self.__timeoutCall = None
+            else:
+                self.__timeoutCall.reset(period)
+        elif period is not None:
+            self.__timeoutCall = self.callLater(period, self.__timedOut)
+
+        return prev
+
+    def __timedOut(self):
+        self.__timeoutCall = None
+        self.timeoutConnection()
+
+    def timeoutConnection(self):
+        """
+        Called when the connection times out.
+
+        Override to define behavior other than dropping the connection.
+        """
+        self.transport.loseConnection()
diff --git a/ThirdParty/Twisted/twisted/protocols/portforward.py b/ThirdParty/Twisted/twisted/protocols/portforward.py
new file mode 100644
index 0000000..626d5aa
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/portforward.py
@@ -0,0 +1,87 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+A simple port forwarder.
+"""
+
+# Twisted imports
+from twisted.internet import protocol
+from twisted.python import log
+
+class Proxy(protocol.Protocol):
+    noisy = True
+
+    peer = None
+
+    def setPeer(self, peer):
+        self.peer = peer
+
+    def connectionLost(self, reason):
+        if self.peer is not None:
+            self.peer.transport.loseConnection()
+            self.peer = None
+        elif self.noisy:
+            log.msg("Unable to connect to peer: %s" % (reason,))
+
+    def dataReceived(self, data):
+        self.peer.transport.write(data)
+
+class ProxyClient(Proxy):
+    def connectionMade(self):
+        self.peer.setPeer(self)
+
+        # Wire this and the peer transport together to enable
+        # flow control (this stops connections from filling
+        # this proxy memory when one side produces data at a
+        # higher rate than the other can consume).
+        self.transport.registerProducer(self.peer.transport, True)
+        self.peer.transport.registerProducer(self.transport, True)
+
+        # We're connected, everybody can read to their hearts content.
+        self.peer.transport.resumeProducing()
+
+class ProxyClientFactory(protocol.ClientFactory):
+
+    protocol = ProxyClient
+
+    def setServer(self, server):
+        self.server = server
+
+    def buildProtocol(self, *args, **kw):
+        prot = protocol.ClientFactory.buildProtocol(self, *args, **kw)
+        prot.setPeer(self.server)
+        return prot
+
+    def clientConnectionFailed(self, connector, reason):
+        self.server.transport.loseConnection()
+
+
+class ProxyServer(Proxy):
+
+    clientProtocolFactory = ProxyClientFactory
+    reactor = None
+
+    def connectionMade(self):
+        # Don't read anything from the connecting client until we have
+        # somewhere to send it to.
+        self.transport.pauseProducing()
+
+        client = self.clientProtocolFactory()
+        client.setServer(self)
+
+        if self.reactor is None:
+            from twisted.internet import reactor
+            self.reactor = reactor
+        self.reactor.connectTCP(self.factory.host, self.factory.port, client)
+
+
+class ProxyFactory(protocol.Factory):
+    """Factory for port forwarder."""
+
+    protocol = ProxyServer
+
+    def __init__(self, host, port):
+        self.host = host
+        self.port = port
diff --git a/ThirdParty/Twisted/twisted/protocols/postfix.py b/ThirdParty/Twisted/twisted/protocols/postfix.py
new file mode 100644
index 0000000..7a2079d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/postfix.py
@@ -0,0 +1,112 @@
+# -*- test-case-name: twisted.test.test_postfix -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Postfix mail transport agent related protocols.
+"""
+
+import sys
+import UserDict
+import urllib
+
+from twisted.protocols import basic
+from twisted.protocols import policies
+from twisted.internet import protocol, defer
+from twisted.python import log
+
+# urllib's quote functions just happen to match
+# the postfix semantics.
+def quote(s):
+    return urllib.quote(s)
+
+def unquote(s):
+    return urllib.unquote(s)
+
+class PostfixTCPMapServer(basic.LineReceiver, policies.TimeoutMixin):
+    """Postfix mail transport agent TCP map protocol implementation.
+
+    Receive requests for data matching given key via lineReceived,
+    asks it's factory for the data with self.factory.get(key), and
+    returns the data to the requester. None means no entry found.
+
+    You can use postfix's postmap to test the map service::
+
+    /usr/sbin/postmap -q KEY tcp:localhost:4242
+
+    """
+
+    timeout = 600
+    delimiter = '\n'
+
+    def connectionMade(self):
+        self.setTimeout(self.timeout)
+
+    def sendCode(self, code, message=''):
+        "Send an SMTP-like code with a message."
+        self.sendLine('%3.3d %s' % (code, message or ''))
+
+    def lineReceived(self, line):
+        self.resetTimeout()
+        try:
+            request, params = line.split(None, 1)
+        except ValueError:
+            request = line
+            params = None
+        try:
+            f = getattr(self, 'do_' + request)
+        except AttributeError:
+            self.sendCode(400, 'unknown command')
+        else:
+            try:
+                f(params)
+            except:
+                self.sendCode(400, 'Command %r failed: %s.' % (request, sys.exc_info()[1]))
+
+    def do_get(self, key):
+        if key is None:
+            self.sendCode(400, 'Command %r takes 1 parameters.' % 'get')
+        else:
+            d = defer.maybeDeferred(self.factory.get, key)
+            d.addCallbacks(self._cbGot, self._cbNot)
+            d.addErrback(log.err)
+
+    def _cbNot(self, fail):
+        self.sendCode(400, fail.getErrorMessage())
+
+    def _cbGot(self, value):
+        if value is None:
+            self.sendCode(500)
+        else:
+            self.sendCode(200, quote(value))
+
+    def do_put(self, keyAndValue):
+        if keyAndValue is None:
+            self.sendCode(400, 'Command %r takes 2 parameters.' % 'put')
+        else:
+            try:
+                key, value = keyAndValue.split(None, 1)
+            except ValueError:
+                self.sendCode(400, 'Command %r takes 2 parameters.' % 'put')
+            else:
+                self.sendCode(500, 'put is not implemented yet.')
+
+
+class PostfixTCPMapDictServerFactory(protocol.ServerFactory,
+                                     UserDict.UserDict):
+    """An in-memory dictionary factory for PostfixTCPMapServer."""
+
+    protocol = PostfixTCPMapServer
+
+class PostfixTCPMapDeferringDictServerFactory(protocol.ServerFactory):
+    """An in-memory dictionary factory for PostfixTCPMapServer."""
+
+    protocol = PostfixTCPMapServer
+
+    def __init__(self, data=None):
+        self.data = {}
+        if data is not None:
+            self.data.update(data)
+
+    def get(self, key):
+        return defer.succeed(self.data.get(key))
diff --git a/ThirdParty/Twisted/twisted/protocols/shoutcast.py b/ThirdParty/Twisted/twisted/protocols/shoutcast.py
new file mode 100644
index 0000000..317d5e8
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/shoutcast.py
@@ -0,0 +1,111 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Chop up shoutcast stream into MP3s and metadata, if available.
+"""
+
+from twisted.web import http
+from twisted import copyright
+
+
+class ShoutcastClient(http.HTTPClient):
+    """
+    Shoutcast HTTP stream.
+
+    Modes can be 'length', 'meta' and 'mp3'.
+
+    See U{http://www.smackfu.com/stuff/programming/shoutcast.html}
+    for details on the protocol.
+    """
+
+    userAgent = "Twisted Shoutcast client " + copyright.version
+
+    def __init__(self, path="/"):
+        self.path = path
+        self.got_metadata = False
+        self.metaint = None
+        self.metamode = "mp3"
+        self.databuffer = ""
+        
+    def connectionMade(self):
+        self.sendCommand("GET", self.path)
+        self.sendHeader("User-Agent", self.userAgent)
+        self.sendHeader("Icy-MetaData", "1")
+        self.endHeaders()
+        
+    def lineReceived(self, line):
+        # fix shoutcast crappiness
+        if not self.firstLine and line:
+            if len(line.split(": ", 1)) == 1:
+                line = line.replace(":", ": ", 1)
+        http.HTTPClient.lineReceived(self, line)
+    
+    def handleHeader(self, key, value):
+        if key.lower() == 'icy-metaint':
+            self.metaint = int(value)
+            self.got_metadata = True
+
+    def handleEndHeaders(self):
+        # Lets check if we got metadata, and set the
+        # appropriate handleResponsePart method.
+        if self.got_metadata:
+            # if we have metadata, then it has to be parsed out of the data stream
+            self.handleResponsePart = self.handleResponsePart_with_metadata
+        else:
+            # otherwise, all the data is MP3 data
+            self.handleResponsePart = self.gotMP3Data
+
+    def handleResponsePart_with_metadata(self, data):
+        self.databuffer += data
+        while self.databuffer:
+            stop = getattr(self, "handle_%s" % self.metamode)()
+            if stop:
+                return
+
+    def handle_length(self):
+        self.remaining = ord(self.databuffer[0]) * 16
+        self.databuffer = self.databuffer[1:]
+        self.metamode = "meta"
+    
+    def handle_mp3(self):
+        if len(self.databuffer) > self.metaint:
+            self.gotMP3Data(self.databuffer[:self.metaint])
+            self.databuffer = self.databuffer[self.metaint:]
+            self.metamode = "length"
+        else:
+            return 1
+    
+    def handle_meta(self):
+        if len(self.databuffer) >= self.remaining:
+            if self.remaining:
+                data = self.databuffer[:self.remaining]
+                self.gotMetaData(self.parseMetadata(data))
+            self.databuffer = self.databuffer[self.remaining:]
+            self.metamode = "mp3"
+        else:
+            return 1
+
+    def parseMetadata(self, data):
+        meta = []
+        for chunk in data.split(';'):
+            chunk = chunk.strip().replace("\x00", "")
+            if not chunk:
+                continue
+            key, value = chunk.split('=', 1)
+            if value.startswith("'") and value.endswith("'"):
+                value = value[1:-1]
+            meta.append((key, value))
+        return meta
+    
+    def gotMetaData(self, metadata):
+        """Called with a list of (key, value) pairs of metadata,
+        if metadata is available on the server.
+
+        Will only be called on non-empty metadata.
+        """
+        raise NotImplementedError, "implement in subclass"
+    
+    def gotMP3Data(self, data):
+        """Called with chunk of MP3 data."""
+        raise NotImplementedError, "implement in subclass"
diff --git a/ThirdParty/Twisted/twisted/protocols/sip.py b/ThirdParty/Twisted/twisted/protocols/sip.py
new file mode 100644
index 0000000..8a3f05c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/sip.py
@@ -0,0 +1,1347 @@
+# -*- test-case-name: twisted.test.test_sip -*-
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""Session Initialization Protocol.
+
+Documented in RFC 2543.
+[Superceded by 3261]
+
+
+This module contains a deprecated implementation of HTTP Digest authentication.
+See L{twisted.cred.credentials} and L{twisted.cred._digest} for its new home.
+"""
+
+# system imports
+import socket, time, sys, random, warnings
+from zope.interface import implements, Interface
+
+# twisted imports
+from twisted.python import log, util
+from twisted.python.deprecate import deprecated
+from twisted.python.versions import Version
+from twisted.python.hashlib import md5
+from twisted.internet import protocol, defer, reactor
+
+from twisted import cred
+import twisted.cred.error
+from twisted.cred.credentials import UsernameHashedPassword, UsernamePassword
+
+
+# sibling imports
+from twisted.protocols import basic
+
+PORT = 5060
+
+# SIP headers have short forms
+shortHeaders = {"call-id": "i",
+                "contact": "m",
+                "content-encoding": "e",
+                "content-length": "l",
+                "content-type": "c",
+                "from": "f",
+                "subject": "s",
+                "to": "t",
+                "via": "v",
+                }
+
+longHeaders = {}
+for k, v in shortHeaders.items():
+    longHeaders[v] = k
+del k, v
+
+statusCodes = {
+    100: "Trying",
+    180: "Ringing",
+    181: "Call Is Being Forwarded",
+    182: "Queued",
+    183: "Session Progress",
+
+    200: "OK",
+
+    300: "Multiple Choices",
+    301: "Moved Permanently",
+    302: "Moved Temporarily",
+    303: "See Other",
+    305: "Use Proxy",
+    380: "Alternative Service",
+
+    400: "Bad Request",
+    401: "Unauthorized",
+    402: "Payment Required",
+    403: "Forbidden",
+    404: "Not Found",
+    405: "Method Not Allowed",
+    406: "Not Acceptable",
+    407: "Proxy Authentication Required",
+    408: "Request Timeout",
+    409: "Conflict", # Not in RFC3261
+    410: "Gone",
+    411: "Length Required", # Not in RFC3261
+    413: "Request Entity Too Large",
+    414: "Request-URI Too Large",
+    415: "Unsupported Media Type",
+    416: "Unsupported URI Scheme",
+    420: "Bad Extension",
+    421: "Extension Required",
+    423: "Interval Too Brief",
+    480: "Temporarily Unavailable",
+    481: "Call/Transaction Does Not Exist",
+    482: "Loop Detected",
+    483: "Too Many Hops",
+    484: "Address Incomplete",
+    485: "Ambiguous",
+    486: "Busy Here",
+    487: "Request Terminated",
+    488: "Not Acceptable Here",
+    491: "Request Pending",
+    493: "Undecipherable",
+
+    500: "Internal Server Error",
+    501: "Not Implemented",
+    502: "Bad Gateway", # no donut
+    503: "Service Unavailable",
+    504: "Server Time-out",
+    505: "SIP Version not supported",
+    513: "Message Too Large",
+
+    600: "Busy Everywhere",
+    603: "Decline",
+    604: "Does not exist anywhere",
+    606: "Not Acceptable",
+}
+
+specialCases = {
+    'cseq': 'CSeq',
+    'call-id': 'Call-ID',
+    'www-authenticate': 'WWW-Authenticate',
+}
+
+
+def dashCapitalize(s):
+    ''' Capitalize a string, making sure to treat - as a word seperator '''
+    return '-'.join([ x.capitalize() for x in s.split('-')])
+
+def unq(s):
+    if s[0] == s[-1] == '"':
+        return s[1:-1]
+    return s
+
+def DigestCalcHA1(
+    pszAlg,
+    pszUserName,
+    pszRealm,
+    pszPassword,
+    pszNonce,
+    pszCNonce,
+):
+    m = md5()
+    m.update(pszUserName)
+    m.update(":")
+    m.update(pszRealm)
+    m.update(":")
+    m.update(pszPassword)
+    HA1 = m.digest()
+    if pszAlg == "md5-sess":
+        m = md5()
+        m.update(HA1)
+        m.update(":")
+        m.update(pszNonce)
+        m.update(":")
+        m.update(pszCNonce)
+        HA1 = m.digest()
+    return HA1.encode('hex')
+
+
+DigestCalcHA1 = deprecated(Version("Twisted", 9, 0, 0))(DigestCalcHA1)
+
+def DigestCalcResponse(
+    HA1,
+    pszNonce,
+    pszNonceCount,
+    pszCNonce,
+    pszQop,
+    pszMethod,
+    pszDigestUri,
+    pszHEntity,
+):
+    m = md5()
+    m.update(pszMethod)
+    m.update(":")
+    m.update(pszDigestUri)
+    if pszQop == "auth-int":
+        m.update(":")
+        m.update(pszHEntity)
+    HA2 = m.digest().encode('hex')
+
+    m = md5()
+    m.update(HA1)
+    m.update(":")
+    m.update(pszNonce)
+    m.update(":")
+    if pszNonceCount and pszCNonce: # pszQop:
+        m.update(pszNonceCount)
+        m.update(":")
+        m.update(pszCNonce)
+        m.update(":")
+        m.update(pszQop)
+        m.update(":")
+    m.update(HA2)
+    hash = m.digest().encode('hex')
+    return hash
+
+
+DigestCalcResponse = deprecated(Version("Twisted", 9, 0, 0))(DigestCalcResponse)
+
+_absent = object()
+
+class Via(object):
+    """
+    A L{Via} is a SIP Via header, representing a segment of the path taken by
+    the request.
+
+    See RFC 3261, sections 8.1.1.7, 18.2.2, and 20.42.
+
+    @ivar transport: Network protocol used for this leg. (Probably either "TCP"
+    or "UDP".)
+    @type transport: C{str}
+    @ivar branch: Unique identifier for this request.
+    @type branch: C{str}
+    @ivar host: Hostname or IP for this leg.
+    @type host: C{str}
+    @ivar port: Port used for this leg.
+    @type port C{int}, or None.
+    @ivar rportRequested: Whether to request RFC 3581 client processing or not.
+    @type rportRequested: C{bool}
+    @ivar rportValue: Servers wishing to honor requests for RFC 3581 processing
+    should set this parameter to the source port the request was received
+    from.
+    @type rportValue: C{int}, or None.
+
+    @ivar ttl: Time-to-live for requests on multicast paths.
+    @type ttl: C{int}, or None.
+    @ivar maddr: The destination multicast address, if any.
+    @type maddr: C{str}, or None.
+    @ivar hidden: Obsolete in SIP 2.0.
+    @type hidden: C{bool}
+    @ivar otherParams: Any other parameters in the header.
+    @type otherParams: C{dict}
+    """
+
+    def __init__(self, host, port=PORT, transport="UDP", ttl=None,
+                 hidden=False, received=None, rport=_absent, branch=None,
+                 maddr=None, **kw):
+        """
+        Set parameters of this Via header. All arguments correspond to
+        attributes of the same name.
+
+        To maintain compatibility with old SIP
+        code, the 'rport' argument is used to determine the values of
+        C{rportRequested} and C{rportValue}. If None, C{rportRequested} is set
+        to True. (The deprecated method for doing this is to pass True.) If an
+        integer, C{rportValue} is set to the given value.
+
+        Any arguments not explicitly named here are collected into the
+        C{otherParams} dict.
+        """
+        self.transport = transport
+        self.host = host
+        self.port = port
+        self.ttl = ttl
+        self.hidden = hidden
+        self.received = received
+        if rport is True:
+            warnings.warn(
+                "rport=True is deprecated since Twisted 9.0.",
+                DeprecationWarning,
+                stacklevel=2)
+            self.rportValue = None
+            self.rportRequested = True
+        elif rport is None:
+            self.rportValue = None
+            self.rportRequested = True
+        elif rport is _absent:
+            self.rportValue = None
+            self.rportRequested = False
+        else:
+            self.rportValue = rport
+            self.rportRequested = False
+
+        self.branch = branch
+        self.maddr = maddr
+        self.otherParams = kw
+
+
+    def _getrport(self):
+        """
+        Returns the rport value expected by the old SIP code.
+        """
+        if self.rportRequested == True:
+            return True
+        elif self.rportValue is not None:
+            return self.rportValue
+        else:
+            return None
+
+
+    def _setrport(self, newRPort):
+        """
+        L{Base._fixupNAT} sets C{rport} directly, so this method sets
+        C{rportValue} based on that.
+
+        @param newRPort: The new rport value.
+        @type newRPort: C{int}
+        """
+        self.rportValue = newRPort
+        self.rportRequested = False
+
+
+    rport = property(_getrport, _setrport)
+
+    def toString(self):
+        """
+        Serialize this header for use in a request or response.
+        """
+        s = "SIP/2.0/%s %s:%s" % (self.transport, self.host, self.port)
+        if self.hidden:
+            s += ";hidden"
+        for n in "ttl", "branch", "maddr", "received":
+            value = getattr(self, n)
+            if value is not None:
+                s += ";%s=%s" % (n, value)
+        if self.rportRequested:
+            s += ";rport"
+        elif self.rportValue is not None:
+            s += ";rport=%s" % (self.rport,)
+
+        etc = self.otherParams.items()
+        etc.sort()
+        for k, v in etc:
+            if v is None:
+                s += ";" + k
+            else:
+                s += ";%s=%s" % (k, v)
+        return s
+
+
+def parseViaHeader(value):
+    """
+    Parse a Via header.
+
+    @return: The parsed version of this header.
+    @rtype: L{Via}
+    """
+    parts = value.split(";")
+    sent, params = parts[0], parts[1:]
+    protocolinfo, by = sent.split(" ", 1)
+    by = by.strip()
+    result = {}
+    pname, pversion, transport = protocolinfo.split("/")
+    if pname != "SIP" or pversion != "2.0":
+        raise ValueError, "wrong protocol or version: %r" % value
+    result["transport"] = transport
+    if ":" in by:
+        host, port = by.split(":")
+        result["port"] = int(port)
+        result["host"] = host
+    else:
+        result["host"] = by
+    for p in params:
+        # it's the comment-striping dance!
+        p = p.strip().split(" ", 1)
+        if len(p) == 1:
+            p, comment = p[0], ""
+        else:
+            p, comment = p
+        if p == "hidden":
+            result["hidden"] = True
+            continue
+        parts = p.split("=", 1)
+        if len(parts) == 1:
+            name, value = parts[0], None
+        else:
+            name, value = parts
+            if name in ("rport", "ttl"):
+                value = int(value)
+        result[name] = value
+    return Via(**result)
+
+
+class URL:
+    """A SIP URL."""
+
+    def __init__(self, host, username=None, password=None, port=None,
+                 transport=None, usertype=None, method=None,
+                 ttl=None, maddr=None, tag=None, other=None, headers=None):
+        self.username = username
+        self.host = host
+        self.password = password
+        self.port = port
+        self.transport = transport
+        self.usertype = usertype
+        self.method = method
+        self.tag = tag
+        self.ttl = ttl
+        self.maddr = maddr
+        if other == None:
+            self.other = []
+        else:
+            self.other = other
+        if headers == None:
+            self.headers = {}
+        else:
+            self.headers = headers
+
+    def toString(self):
+        l = []; w = l.append
+        w("sip:")
+        if self.username != None:
+            w(self.username)
+            if self.password != None:
+                w(":%s" % self.password)
+            w("@")
+        w(self.host)
+        if self.port != None:
+            w(":%d" % self.port)
+        if self.usertype != None:
+            w(";user=%s" % self.usertype)
+        for n in ("transport", "ttl", "maddr", "method", "tag"):
+            v = getattr(self, n)
+            if v != None:
+                w(";%s=%s" % (n, v))
+        for v in self.other:
+            w(";%s" % v)
+        if self.headers:
+            w("?")
+            w("&".join([("%s=%s" % (specialCases.get(h) or dashCapitalize(h), v)) for (h, v) in self.headers.items()]))
+        return "".join(l)
+
+    def __str__(self):
+        return self.toString()
+
+    def __repr__(self):
+        return '<URL %s:%s@%s:%r/%s>' % (self.username, self.password, self.host, self.port, self.transport)
+
+
+def parseURL(url, host=None, port=None):
+    """Return string into URL object.
+
+    URIs are of of form 'sip:user at example.com'.
+    """
+    d = {}
+    if not url.startswith("sip:"):
+        raise ValueError("unsupported scheme: " + url[:4])
+    parts = url[4:].split(";")
+    userdomain, params = parts[0], parts[1:]
+    udparts = userdomain.split("@", 1)
+    if len(udparts) == 2:
+        userpass, hostport = udparts
+        upparts = userpass.split(":", 1)
+        if len(upparts) == 1:
+            d["username"] = upparts[0]
+        else:
+            d["username"] = upparts[0]
+            d["password"] = upparts[1]
+    else:
+        hostport = udparts[0]
+    hpparts = hostport.split(":", 1)
+    if len(hpparts) == 1:
+        d["host"] = hpparts[0]
+    else:
+        d["host"] = hpparts[0]
+        d["port"] = int(hpparts[1])
+    if host != None:
+        d["host"] = host
+    if port != None:
+        d["port"] = port
+    for p in params:
+        if p == params[-1] and "?" in p:
+            d["headers"] = h = {}
+            p, headers = p.split("?", 1)
+            for header in headers.split("&"):
+                k, v = header.split("=")
+                h[k] = v
+        nv = p.split("=", 1)
+        if len(nv) == 1:
+            d.setdefault("other", []).append(p)
+            continue
+        name, value = nv
+        if name == "user":
+            d["usertype"] = value
+        elif name in ("transport", "ttl", "maddr", "method", "tag"):
+            if name == "ttl":
+                value = int(value)
+            d[name] = value
+        else:
+            d.setdefault("other", []).append(p)
+    return URL(**d)
+
+
+def cleanRequestURL(url):
+    """Clean a URL from a Request line."""
+    url.transport = None
+    url.maddr = None
+    url.ttl = None
+    url.headers = {}
+
+
+def parseAddress(address, host=None, port=None, clean=0):
+    """Return (name, uri, params) for From/To/Contact header.
+
+    @param clean: remove unnecessary info, usually for From and To headers.
+    """
+    address = address.strip()
+    # simple 'sip:foo' case
+    if address.startswith("sip:"):
+        return "", parseURL(address, host=host, port=port), {}
+    params = {}
+    name, url = address.split("<", 1)
+    name = name.strip()
+    if name.startswith('"'):
+        name = name[1:]
+    if name.endswith('"'):
+        name = name[:-1]
+    url, paramstring = url.split(">", 1)
+    url = parseURL(url, host=host, port=port)
+    paramstring = paramstring.strip()
+    if paramstring:
+        for l in paramstring.split(";"):
+            if not l:
+                continue
+            k, v = l.split("=")
+            params[k] = v
+    if clean:
+        # rfc 2543 6.21
+        url.ttl = None
+        url.headers = {}
+        url.transport = None
+        url.maddr = None
+    return name, url, params
+
+
+class SIPError(Exception):
+    def __init__(self, code, phrase=None):
+        if phrase is None:
+            phrase = statusCodes[code]
+        Exception.__init__(self, "SIP error (%d): %s" % (code, phrase))
+        self.code = code
+        self.phrase = phrase
+
+
+class RegistrationError(SIPError):
+    """Registration was not possible."""
+
+
+class Message:
+    """A SIP message."""
+
+    length = None
+
+    def __init__(self):
+        self.headers = util.OrderedDict() # map name to list of values
+        self.body = ""
+        self.finished = 0
+
+    def addHeader(self, name, value):
+        name = name.lower()
+        name = longHeaders.get(name, name)
+        if name == "content-length":
+            self.length = int(value)
+        self.headers.setdefault(name,[]).append(value)
+
+    def bodyDataReceived(self, data):
+        self.body += data
+
+    def creationFinished(self):
+        if (self.length != None) and (self.length != len(self.body)):
+            raise ValueError, "wrong body length"
+        self.finished = 1
+
+    def toString(self):
+        s = "%s\r\n" % self._getHeaderLine()
+        for n, vs in self.headers.items():
+            for v in vs:
+                s += "%s: %s\r\n" % (specialCases.get(n) or dashCapitalize(n), v)
+        s += "\r\n"
+        s += self.body
+        return s
+
+    def _getHeaderLine(self):
+        raise NotImplementedError
+
+
+class Request(Message):
+    """A Request for a URI"""
+
+
+    def __init__(self, method, uri, version="SIP/2.0"):
+        Message.__init__(self)
+        self.method = method
+        if isinstance(uri, URL):
+            self.uri = uri
+        else:
+            self.uri = parseURL(uri)
+            cleanRequestURL(self.uri)
+
+    def __repr__(self):
+        return "<SIP Request %d:%s %s>" % (id(self), self.method, self.uri.toString())
+
+    def _getHeaderLine(self):
+        return "%s %s SIP/2.0" % (self.method, self.uri.toString())
+
+
+class Response(Message):
+    """A Response to a URI Request"""
+
+    def __init__(self, code, phrase=None, version="SIP/2.0"):
+        Message.__init__(self)
+        self.code = code
+        if phrase == None:
+            phrase = statusCodes[code]
+        self.phrase = phrase
+
+    def __repr__(self):
+        return "<SIP Response %d:%s>" % (id(self), self.code)
+
+    def _getHeaderLine(self):
+        return "SIP/2.0 %s %s" % (self.code, self.phrase)
+
+
+class MessagesParser(basic.LineReceiver):
+    """A SIP messages parser.
+
+    Expects dataReceived, dataDone repeatedly,
+    in that order. Shouldn't be connected to actual transport.
+    """
+
+    version = "SIP/2.0"
+    acceptResponses = 1
+    acceptRequests = 1
+    state = "firstline" # or "headers", "body" or "invalid"
+
+    debug = 0
+
+    def __init__(self, messageReceivedCallback):
+        self.messageReceived = messageReceivedCallback
+        self.reset()
+
+    def reset(self, remainingData=""):
+        self.state = "firstline"
+        self.length = None # body length
+        self.bodyReceived = 0 # how much of the body we received
+        self.message = None
+        self.header = None
+        self.setLineMode(remainingData)
+
+    def invalidMessage(self):
+        self.state = "invalid"
+        self.setRawMode()
+
+    def dataDone(self):
+        # clear out any buffered data that may be hanging around
+        self.clearLineBuffer()
+        if self.state == "firstline":
+            return
+        if self.state != "body":
+            self.reset()
+            return
+        if self.length == None:
+            # no content-length header, so end of data signals message done
+            self.messageDone()
+        elif self.length < self.bodyReceived:
+            # aborted in the middle
+            self.reset()
+        else:
+            # we have enough data and message wasn't finished? something is wrong
+            raise RuntimeError, "this should never happen"
+
+    def dataReceived(self, data):
+        try:
+            basic.LineReceiver.dataReceived(self, data)
+        except:
+            log.err()
+            self.invalidMessage()
+
+    def handleFirstLine(self, line):
+        """Expected to create self.message."""
+        raise NotImplementedError
+
+    def lineLengthExceeded(self, line):
+        self.invalidMessage()
+
+    def lineReceived(self, line):
+        if self.state == "firstline":
+            while line.startswith("\n") or line.startswith("\r"):
+                line = line[1:]
+            if not line:
+                return
+            try:
+                a, b, c = line.split(" ", 2)
+            except ValueError:
+                self.invalidMessage()
+                return
+            if a == "SIP/2.0" and self.acceptResponses:
+                # response
+                try:
+                    code = int(b)
+                except ValueError:
+                    self.invalidMessage()
+                    return
+                self.message = Response(code, c)
+            elif c == "SIP/2.0" and self.acceptRequests:
+                self.message = Request(a, b)
+            else:
+                self.invalidMessage()
+                return
+            self.state = "headers"
+            return
+        else:
+            assert self.state == "headers"
+        if line:
+            # multiline header
+            if line.startswith(" ") or line.startswith("\t"):
+                name, value = self.header
+                self.header = name, (value + line.lstrip())
+            else:
+                # new header
+                if self.header:
+                    self.message.addHeader(*self.header)
+                    self.header = None
+                try:
+                    name, value = line.split(":", 1)
+                except ValueError:
+                    self.invalidMessage()
+                    return
+                self.header = name, value.lstrip()
+                # XXX we assume content-length won't be multiline
+                if name.lower() == "content-length":
+                    try:
+                        self.length = int(value.lstrip())
+                    except ValueError:
+                        self.invalidMessage()
+                        return
+        else:
+            # CRLF, we now have message body until self.length bytes,
+            # or if no length was given, until there is no more data
+            # from the connection sending us data.
+            self.state = "body"
+            if self.header:
+                self.message.addHeader(*self.header)
+                self.header = None
+            if self.length == 0:
+                self.messageDone()
+                return
+            self.setRawMode()
+
+    def messageDone(self, remainingData=""):
+        assert self.state == "body"
+        self.message.creationFinished()
+        self.messageReceived(self.message)
+        self.reset(remainingData)
+
+    def rawDataReceived(self, data):
+        assert self.state in ("body", "invalid")
+        if self.state == "invalid":
+            return
+        if self.length == None:
+            self.message.bodyDataReceived(data)
+        else:
+            dataLen = len(data)
+            expectedLen = self.length - self.bodyReceived
+            if dataLen > expectedLen:
+                self.message.bodyDataReceived(data[:expectedLen])
+                self.messageDone(data[expectedLen:])
+                return
+            else:
+                self.bodyReceived += dataLen
+                self.message.bodyDataReceived(data)
+                if self.bodyReceived == self.length:
+                    self.messageDone()
+
+
+class Base(protocol.DatagramProtocol):
+    """Base class for SIP clients and servers."""
+
+    PORT = PORT
+    debug = False
+
+    def __init__(self):
+        self.messages = []
+        self.parser = MessagesParser(self.addMessage)
+
+    def addMessage(self, msg):
+        self.messages.append(msg)
+
+    def datagramReceived(self, data, addr):
+        self.parser.dataReceived(data)
+        self.parser.dataDone()
+        for m in self.messages:
+            self._fixupNAT(m, addr)
+            if self.debug:
+                log.msg("Received %r from %r" % (m.toString(), addr))
+            if isinstance(m, Request):
+                self.handle_request(m, addr)
+            else:
+                self.handle_response(m, addr)
+        self.messages[:] = []
+
+    def _fixupNAT(self, message, (srcHost, srcPort)):
+        # RFC 2543 6.40.2,
+        senderVia = parseViaHeader(message.headers["via"][0])
+        if senderVia.host != srcHost:
+            senderVia.received = srcHost
+            if senderVia.port != srcPort:
+                senderVia.rport = srcPort
+            message.headers["via"][0] = senderVia.toString()
+        elif senderVia.rport == True:
+            senderVia.received = srcHost
+            senderVia.rport = srcPort
+            message.headers["via"][0] = senderVia.toString()
+
+    def deliverResponse(self, responseMessage):
+        """Deliver response.
+
+        Destination is based on topmost Via header."""
+        destVia = parseViaHeader(responseMessage.headers["via"][0])
+        # XXX we don't do multicast yet
+        host = destVia.received or destVia.host
+        port = destVia.rport or destVia.port or self.PORT
+        destAddr = URL(host=host, port=port)
+        self.sendMessage(destAddr, responseMessage)
+
+    def responseFromRequest(self, code, request):
+        """Create a response to a request message."""
+        response = Response(code)
+        for name in ("via", "to", "from", "call-id", "cseq"):
+            response.headers[name] = request.headers.get(name, [])[:]
+
+        return response
+
+    def sendMessage(self, destURL, message):
+        """Send a message.
+
+        @param destURL: C{URL}. This should be a *physical* URL, not a logical one.
+        @param message: The message to send.
+        """
+        if destURL.transport not in ("udp", None):
+            raise RuntimeError, "only UDP currently supported"
+        if self.debug:
+            log.msg("Sending %r to %r" % (message.toString(), destURL))
+        self.transport.write(message.toString(), (destURL.host, destURL.port or self.PORT))
+
+    def handle_request(self, message, addr):
+        """Override to define behavior for requests received
+
+        @type message: C{Message}
+        @type addr: C{tuple}
+        """
+        raise NotImplementedError
+
+    def handle_response(self, message, addr):
+        """Override to define behavior for responses received.
+
+        @type message: C{Message}
+        @type addr: C{tuple}
+        """
+        raise NotImplementedError
+
+
+class IContact(Interface):
+    """A user of a registrar or proxy"""
+
+
+class Registration:
+    def __init__(self, secondsToExpiry, contactURL):
+        self.secondsToExpiry = secondsToExpiry
+        self.contactURL = contactURL
+
+class IRegistry(Interface):
+    """Allows registration of logical->physical URL mapping."""
+
+    def registerAddress(domainURL, logicalURL, physicalURL):
+        """Register the physical address of a logical URL.
+
+        @return: Deferred of C{Registration} or failure with RegistrationError.
+        """
+
+    def unregisterAddress(domainURL, logicalURL, physicalURL):
+        """Unregister the physical address of a logical URL.
+
+        @return: Deferred of C{Registration} or failure with RegistrationError.
+        """
+
+    def getRegistrationInfo(logicalURL):
+        """Get registration info for logical URL.
+
+        @return: Deferred of C{Registration} object or failure of LookupError.
+        """
+
+
+class ILocator(Interface):
+    """Allow looking up physical address for logical URL."""
+
+    def getAddress(logicalURL):
+        """Return physical URL of server for logical URL of user.
+
+        @param logicalURL: a logical C{URL}.
+        @return: Deferred which becomes URL or fails with LookupError.
+        """
+
+
+class Proxy(Base):
+    """SIP proxy."""
+
+    PORT = PORT
+
+    locator = None # object implementing ILocator
+
+    def __init__(self, host=None, port=PORT):
+        """Create new instance.
+
+        @param host: our hostname/IP as set in Via headers.
+        @param port: our port as set in Via headers.
+        """
+        self.host = host or socket.getfqdn()
+        self.port = port
+        Base.__init__(self)
+
+    def getVia(self):
+        """Return value of Via header for this proxy."""
+        return Via(host=self.host, port=self.port)
+
+    def handle_request(self, message, addr):
+        # send immediate 100/trying message before processing
+        #self.deliverResponse(self.responseFromRequest(100, message))
+        f = getattr(self, "handle_%s_request" % message.method, None)
+        if f is None:
+            f = self.handle_request_default
+        try:
+            d = f(message, addr)
+        except SIPError, e:
+            self.deliverResponse(self.responseFromRequest(e.code, message))
+        except:
+            log.err()
+            self.deliverResponse(self.responseFromRequest(500, message))
+        else:
+            if d is not None:
+                d.addErrback(lambda e:
+                    self.deliverResponse(self.responseFromRequest(e.code, message))
+                )
+
+    def handle_request_default(self, message, (srcHost, srcPort)):
+        """Default request handler.
+
+        Default behaviour for OPTIONS and unknown methods for proxies
+        is to forward message on to the client.
+
+        Since at the moment we are stateless proxy, thats basically
+        everything.
+        """
+        def _mungContactHeader(uri, message):
+            message.headers['contact'][0] = uri.toString()
+            return self.sendMessage(uri, message)
+
+        viaHeader = self.getVia()
+        if viaHeader.toString() in message.headers["via"]:
+            # must be a loop, so drop message
+            log.msg("Dropping looped message.")
+            return
+
+        message.headers["via"].insert(0, viaHeader.toString())
+        name, uri, tags = parseAddress(message.headers["to"][0], clean=1)
+
+        # this is broken and needs refactoring to use cred
+        d = self.locator.getAddress(uri)
+        d.addCallback(self.sendMessage, message)
+        d.addErrback(self._cantForwardRequest, message)
+
+    def _cantForwardRequest(self, error, message):
+        error.trap(LookupError)
+        del message.headers["via"][0] # this'll be us
+        self.deliverResponse(self.responseFromRequest(404, message))
+
+    def deliverResponse(self, responseMessage):
+        """Deliver response.
+
+        Destination is based on topmost Via header."""
+        destVia = parseViaHeader(responseMessage.headers["via"][0])
+        # XXX we don't do multicast yet
+        host = destVia.received or destVia.host
+        port = destVia.rport or destVia.port or self.PORT
+
+        destAddr = URL(host=host, port=port)
+        self.sendMessage(destAddr, responseMessage)
+
+    def responseFromRequest(self, code, request):
+        """Create a response to a request message."""
+        response = Response(code)
+        for name in ("via", "to", "from", "call-id", "cseq"):
+            response.headers[name] = request.headers.get(name, [])[:]
+        return response
+
+    def handle_response(self, message, addr):
+        """Default response handler."""
+        v = parseViaHeader(message.headers["via"][0])
+        if (v.host, v.port) != (self.host, self.port):
+            # we got a message not intended for us?
+            # XXX note this check breaks if we have multiple external IPs
+            # yay for suck protocols
+            log.msg("Dropping incorrectly addressed message")
+            return
+        del message.headers["via"][0]
+        if not message.headers["via"]:
+            # this message is addressed to us
+            self.gotResponse(message, addr)
+            return
+        self.deliverResponse(message)
+
+    def gotResponse(self, message, addr):
+        """Called with responses that are addressed at this server."""
+        pass
+
+class IAuthorizer(Interface):
+    def getChallenge(peer):
+        """Generate a challenge the client may respond to.
+
+        @type peer: C{tuple}
+        @param peer: The client's address
+
+        @rtype: C{str}
+        @return: The challenge string
+        """
+
+    def decode(response):
+        """Create a credentials object from the given response.
+
+        @type response: C{str}
+        """
+
+class BasicAuthorizer:
+    """Authorizer for insecure Basic (base64-encoded plaintext) authentication.
+
+    This form of authentication is broken and insecure.  Do not use it.
+    """
+
+    implements(IAuthorizer)
+
+    def __init__(self):
+        """
+        This method exists solely to issue a deprecation warning.
+        """
+        warnings.warn(
+            "twisted.protocols.sip.BasicAuthorizer was deprecated "
+            "in Twisted 9.0.0",
+            category=DeprecationWarning,
+            stacklevel=2)
+
+
+    def getChallenge(self, peer):
+        return None
+
+    def decode(self, response):
+        # At least one SIP client improperly pads its Base64 encoded messages
+        for i in range(3):
+            try:
+                creds = (response + ('=' * i)).decode('base64')
+            except:
+                pass
+            else:
+                break
+        else:
+            # Totally bogus
+            raise SIPError(400)
+        p = creds.split(':', 1)
+        if len(p) == 2:
+            return UsernamePassword(*p)
+        raise SIPError(400)
+
+
+
+class DigestedCredentials(UsernameHashedPassword):
+    """Yet Another Simple Digest-MD5 authentication scheme"""
+
+    def __init__(self, username, fields, challenges):
+        warnings.warn(
+            "twisted.protocols.sip.DigestedCredentials was deprecated "
+            "in Twisted 9.0.0",
+            category=DeprecationWarning,
+            stacklevel=2)
+        self.username = username
+        self.fields = fields
+        self.challenges = challenges
+
+    def checkPassword(self, password):
+        method = 'REGISTER'
+        response = self.fields.get('response')
+        uri = self.fields.get('uri')
+        nonce = self.fields.get('nonce')
+        cnonce = self.fields.get('cnonce')
+        nc = self.fields.get('nc')
+        algo = self.fields.get('algorithm', 'MD5')
+        qop = self.fields.get('qop-options', 'auth')
+        opaque = self.fields.get('opaque')
+
+        if opaque not in self.challenges:
+            return False
+        del self.challenges[opaque]
+
+        user, domain = self.username.split('@', 1)
+        if uri is None:
+            uri = 'sip:' + domain
+
+        expected = DigestCalcResponse(
+            DigestCalcHA1(algo, user, domain, password, nonce, cnonce),
+            nonce, nc, cnonce, qop, method, uri, None,
+        )
+
+        return expected == response
+
+class DigestAuthorizer:
+    CHALLENGE_LIFETIME = 15
+
+    implements(IAuthorizer)
+
+    def __init__(self):
+        warnings.warn(
+            "twisted.protocols.sip.DigestAuthorizer was deprecated "
+            "in Twisted 9.0.0",
+            category=DeprecationWarning,
+            stacklevel=2)
+
+        self.outstanding = {}
+
+
+
+    def generateNonce(self):
+        c = tuple([random.randrange(sys.maxint) for _ in range(3)])
+        c = '%d%d%d' % c
+        return c
+
+    def generateOpaque(self):
+        return str(random.randrange(sys.maxint))
+
+    def getChallenge(self, peer):
+        c = self.generateNonce()
+        o = self.generateOpaque()
+        self.outstanding[o] = c
+        return ','.join((
+            'nonce="%s"' % c,
+            'opaque="%s"' % o,
+            'qop-options="auth"',
+            'algorithm="MD5"',
+        ))
+
+    def decode(self, response):
+        response = ' '.join(response.splitlines())
+        parts = response.split(',')
+        auth = dict([(k.strip(), unq(v.strip())) for (k, v) in [p.split('=', 1) for p in parts]])
+        try:
+            username = auth['username']
+        except KeyError:
+            raise SIPError(401)
+        try:
+            return DigestedCredentials(username, auth, self.outstanding)
+        except:
+            raise SIPError(400)
+
+
+class RegisterProxy(Proxy):
+    """A proxy that allows registration for a specific domain.
+
+    Unregistered users won't be handled.
+    """
+
+    portal = None
+
+    registry = None # should implement IRegistry
+
+    authorizers = {}
+
+    def __init__(self, *args, **kw):
+        Proxy.__init__(self, *args, **kw)
+        self.liveChallenges = {}
+        if "digest" not in self.authorizers:
+            self.authorizers["digest"] = DigestAuthorizer()
+
+    def handle_ACK_request(self, message, (host, port)):
+        # XXX
+        # ACKs are a client's way of indicating they got the last message
+        # Responding to them is not a good idea.
+        # However, we should keep track of terminal messages and re-transmit
+        # if no ACK is received.
+        pass
+
+    def handle_REGISTER_request(self, message, (host, port)):
+        """Handle a registration request.
+
+        Currently registration is not proxied.
+        """
+        if self.portal is None:
+            # There is no portal.  Let anyone in.
+            self.register(message, host, port)
+        else:
+            # There is a portal.  Check for credentials.
+            if not message.headers.has_key("authorization"):
+                return self.unauthorized(message, host, port)
+            else:
+                return self.login(message, host, port)
+
+    def unauthorized(self, message, host, port):
+        m = self.responseFromRequest(401, message)
+        for (scheme, auth) in self.authorizers.iteritems():
+            chal = auth.getChallenge((host, port))
+            if chal is None:
+                value = '%s realm="%s"' % (scheme.title(), self.host)
+            else:
+                value = '%s %s,realm="%s"' % (scheme.title(), chal, self.host)
+            m.headers.setdefault('www-authenticate', []).append(value)
+        self.deliverResponse(m)
+
+
+    def login(self, message, host, port):
+        parts = message.headers['authorization'][0].split(None, 1)
+        a = self.authorizers.get(parts[0].lower())
+        if a:
+            try:
+                c = a.decode(parts[1])
+            except SIPError:
+                raise
+            except:
+                log.err()
+                self.deliverResponse(self.responseFromRequest(500, message))
+            else:
+                c.username += '@' + self.host
+                self.portal.login(c, None, IContact
+                    ).addCallback(self._cbLogin, message, host, port
+                    ).addErrback(self._ebLogin, message, host, port
+                    ).addErrback(log.err
+                    )
+        else:
+            self.deliverResponse(self.responseFromRequest(501, message))
+
+    def _cbLogin(self, (i, a, l), message, host, port):
+        # It's stateless, matey.  What a joke.
+        self.register(message, host, port)
+
+    def _ebLogin(self, failure, message, host, port):
+        failure.trap(cred.error.UnauthorizedLogin)
+        self.unauthorized(message, host, port)
+
+    def register(self, message, host, port):
+        """Allow all users to register"""
+        name, toURL, params = parseAddress(message.headers["to"][0], clean=1)
+        contact = None
+        if message.headers.has_key("contact"):
+            contact = message.headers["contact"][0]
+
+        if message.headers.get("expires", [None])[0] == "0":
+            self.unregister(message, toURL, contact)
+        else:
+            # XXX Check expires on appropriate URL, and pass it to registry
+            # instead of having registry hardcode it.
+            if contact is not None:
+                name, contactURL, params = parseAddress(contact, host=host, port=port)
+                d = self.registry.registerAddress(message.uri, toURL, contactURL)
+            else:
+                d = self.registry.getRegistrationInfo(toURL)
+            d.addCallbacks(self._cbRegister, self._ebRegister,
+                callbackArgs=(message,),
+                errbackArgs=(message,)
+            )
+
+    def _cbRegister(self, registration, message):
+        response = self.responseFromRequest(200, message)
+        if registration.contactURL != None:
+            response.addHeader("contact", registration.contactURL.toString())
+            response.addHeader("expires", "%d" % registration.secondsToExpiry)
+        response.addHeader("content-length", "0")
+        self.deliverResponse(response)
+
+    def _ebRegister(self, error, message):
+        error.trap(RegistrationError, LookupError)
+        # XXX return error message, and alter tests to deal with
+        # this, currently tests assume no message sent on failure
+
+    def unregister(self, message, toURL, contact):
+        try:
+            expires = int(message.headers["expires"][0])
+        except ValueError:
+            self.deliverResponse(self.responseFromRequest(400, message))
+        else:
+            if expires == 0:
+                if contact == "*":
+                    contactURL = "*"
+                else:
+                    name, contactURL, params = parseAddress(contact)
+                d = self.registry.unregisterAddress(message.uri, toURL, contactURL)
+                d.addCallback(self._cbUnregister, message
+                    ).addErrback(self._ebUnregister, message
+                    )
+
+    def _cbUnregister(self, registration, message):
+        msg = self.responseFromRequest(200, message)
+        msg.headers.setdefault('contact', []).append(registration.contactURL.toString())
+        msg.addHeader("expires", "0")
+        self.deliverResponse(msg)
+
+    def _ebUnregister(self, registration, message):
+        pass
+
+
+class InMemoryRegistry:
+    """A simplistic registry for a specific domain."""
+
+    implements(IRegistry, ILocator)
+
+    def __init__(self, domain):
+        self.domain = domain # the domain we handle registration for
+        self.users = {} # map username to (IDelayedCall for expiry, address URI)
+
+    def getAddress(self, userURI):
+        if userURI.host != self.domain:
+            return defer.fail(LookupError("unknown domain"))
+        if userURI.username in self.users:
+            dc, url = self.users[userURI.username]
+            return defer.succeed(url)
+        else:
+            return defer.fail(LookupError("no such user"))
+
+    def getRegistrationInfo(self, userURI):
+        if userURI.host != self.domain:
+            return defer.fail(LookupError("unknown domain"))
+        if self.users.has_key(userURI.username):
+            dc, url = self.users[userURI.username]
+            return defer.succeed(Registration(int(dc.getTime() - time.time()), url))
+        else:
+            return defer.fail(LookupError("no such user"))
+
+    def _expireRegistration(self, username):
+        try:
+            dc, url = self.users[username]
+        except KeyError:
+            return defer.fail(LookupError("no such user"))
+        else:
+            dc.cancel()
+            del self.users[username]
+        return defer.succeed(Registration(0, url))
+
+    def registerAddress(self, domainURL, logicalURL, physicalURL):
+        if domainURL.host != self.domain:
+            log.msg("Registration for domain we don't handle.")
+            return defer.fail(RegistrationError(404))
+        if logicalURL.host != self.domain:
+            log.msg("Registration for domain we don't handle.")
+            return defer.fail(RegistrationError(404))
+        if logicalURL.username in self.users:
+            dc, old = self.users[logicalURL.username]
+            dc.reset(3600)
+        else:
+            dc = reactor.callLater(3600, self._expireRegistration, logicalURL.username)
+        log.msg("Registered %s at %s" % (logicalURL.toString(), physicalURL.toString()))
+        self.users[logicalURL.username] = (dc, physicalURL)
+        return defer.succeed(Registration(int(dc.getTime() - time.time()), physicalURL))
+
+    def unregisterAddress(self, domainURL, logicalURL, physicalURL):
+        return self._expireRegistration(logicalURL.username)
diff --git a/ThirdParty/Twisted/twisted/protocols/socks.py b/ThirdParty/Twisted/twisted/protocols/socks.py
new file mode 100644
index 0000000..445b9f3
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/socks.py
@@ -0,0 +1,240 @@
+# -*- test-case-name: twisted.test.test_socks -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Implementation of the SOCKSv4 protocol.
+"""
+
+# python imports
+import struct
+import string
+import socket
+import time
+
+# twisted imports
+from twisted.internet import reactor, protocol, defer
+from twisted.python import log
+
+
+class SOCKSv4Outgoing(protocol.Protocol):
+
+    def __init__(self,socks):
+        self.socks=socks
+
+    def connectionMade(self):
+        peer = self.transport.getPeer()
+        self.socks.makeReply(90, 0, port=peer.port, ip=peer.host)
+        self.socks.otherConn=self
+
+    def connectionLost(self, reason):
+        self.socks.transport.loseConnection()
+
+    def dataReceived(self,data):
+        self.socks.write(data)
+
+    def write(self,data):
+        self.socks.log(self,data)
+        self.transport.write(data)
+
+
+
+class SOCKSv4Incoming(protocol.Protocol):
+
+    def __init__(self,socks):
+        self.socks=socks
+        self.socks.otherConn=self
+
+    def connectionLost(self, reason):
+        self.socks.transport.loseConnection()
+
+    def dataReceived(self,data):
+        self.socks.write(data)
+
+    def write(self,data):
+        self.socks.log(self,data)
+        self.transport.write(data)
+
+
+class SOCKSv4(protocol.Protocol):
+    """
+    An implementation of the SOCKSv4 protocol.
+
+    @type logging: C{str} or C{None}
+    @ivar logging: If not C{None}, the name of the logfile to which connection
+        information will be written.
+
+    @type reactor: object providing L{twisted.internet.interfaces.IReactorTCP}
+    @ivar reactor: The reactor used to create connections.
+
+    @type buf: C{str}
+    @ivar buf: Part of a SOCKSv4 connection request.
+
+    @type otherConn: C{SOCKSv4Incoming}, C{SOCKSv4Outgoing} or C{None}
+    @ivar otherConn: Until the connection has been established, C{otherConn} is
+        C{None}. After that, it is the proxy-to-destination protocol instance
+        along which the client's connection is being forwarded.
+    """
+    def __init__(self, logging=None, reactor=reactor):
+        self.logging = logging
+        self.reactor = reactor
+
+    def connectionMade(self):
+        self.buf = ""
+        self.otherConn = None
+
+    def dataReceived(self, data):
+        """
+        Called whenever data is received.
+
+        @type data: C{str}
+        @param data: Part or all of a SOCKSv4 packet.
+        """
+        if self.otherConn:
+            self.otherConn.write(data)
+            return
+        self.buf = self.buf + data
+        completeBuffer = self.buf
+        if "\000" in self.buf[8:]:
+            head, self.buf = self.buf[:8], self.buf[8:]
+            version, code, port = struct.unpack("!BBH", head[:4])
+            user, self.buf = self.buf.split("\000", 1)
+            if head[4:7] == "\000\000\000" and head[7] != "\000":
+                # An IP address of the form 0.0.0.X, where X is non-zero,
+                # signifies that this is a SOCKSv4a packet.
+                # If the complete packet hasn't been received, restore the
+                # buffer and wait for it.
+                if "\000" not in self.buf:
+                    self.buf = completeBuffer
+                    return
+                server, self.buf = self.buf.split("\000", 1)
+                d = self.reactor.resolve(server)
+                d.addCallback(self._dataReceived2, user,
+                              version, code, port)
+                d.addErrback(lambda result, self = self: self.makeReply(91))
+                return
+            else:
+                server = socket.inet_ntoa(head[4:8])
+
+            self._dataReceived2(server, user, version, code, port)
+
+    def _dataReceived2(self, server, user, version, code, port):
+        """
+        The second half of the SOCKS connection setup. For a SOCKSv4 packet this
+        is after the server address has been extracted from the header. For a
+        SOCKSv4a packet this is after the host name has been resolved.
+
+        @type server: C{str}
+        @param server: The IP address of the destination, represented as a
+            dotted quad.
+
+        @type user: C{str}
+        @param user: The username associated with the connection.
+
+        @type version: C{int}
+        @param version: The SOCKS protocol version number.
+
+        @type code: C{int}
+        @param code: The comand code. 1 means establish a TCP/IP stream
+            connection, and 2 means establish a TCP/IP port binding.
+
+        @type port: C{int}
+        @param port: The port number associated with the connection.
+        """
+        assert version == 4, "Bad version code: %s" % version
+        if not self.authorize(code, server, port, user):
+            self.makeReply(91)
+            return
+        if code == 1: # CONNECT
+            d = self.connectClass(server, port, SOCKSv4Outgoing, self)
+            d.addErrback(lambda result, self = self: self.makeReply(91))
+        elif code == 2: # BIND
+            d = self.listenClass(0, SOCKSv4IncomingFactory, self, server)
+            d.addCallback(lambda (h, p),
+                          self = self: self.makeReply(90, 0, p, h))
+        else:
+            raise RuntimeError, "Bad Connect Code: %s" % code
+        assert self.buf == "", "hmm, still stuff in buffer... %s" % repr(
+            self.buf)
+
+    def connectionLost(self, reason):
+        if self.otherConn:
+            self.otherConn.transport.loseConnection()
+
+    def authorize(self,code,server,port,user):
+        log.msg("code %s connection to %s:%s (user %s) authorized" % (code,server,port,user))
+        return 1
+
+    def connectClass(self, host, port, klass, *args):
+        return protocol.ClientCreator(reactor, klass, *args).connectTCP(host,port)
+
+    def listenClass(self, port, klass, *args):
+        serv = reactor.listenTCP(port, klass(*args))
+        return defer.succeed(serv.getHost()[1:])
+
+    def makeReply(self,reply,version=0,port=0,ip="0.0.0.0"):
+        self.transport.write(struct.pack("!BBH",version,reply,port)+socket.inet_aton(ip))
+        if reply!=90: self.transport.loseConnection()
+
+    def write(self,data):
+        self.log(self,data)
+        self.transport.write(data)
+
+    def log(self,proto,data):
+        if not self.logging: return
+        peer = self.transport.getPeer()
+        their_peer = self.otherConn.transport.getPeer()
+        f=open(self.logging,"a")
+        f.write("%s\t%s:%d %s %s:%d\n"%(time.ctime(),
+                                        peer.host,peer.port,
+                                        ((proto==self and '<') or '>'),
+                                        their_peer.host,their_peer.port))
+        while data:
+            p,data=data[:16],data[16:]
+            f.write(string.join(map(lambda x:'%02X'%ord(x),p),' ')+' ')
+            f.write((16-len(p))*3*' ')
+            for c in p:
+                if len(repr(c))>3: f.write('.')
+                else: f.write(c)
+            f.write('\n')
+        f.write('\n')
+        f.close()
+
+
+
+class SOCKSv4Factory(protocol.Factory):
+    """
+    A factory for a SOCKSv4 proxy.
+
+    Constructor accepts one argument, a log file name.
+    """
+
+    def __init__(self, log):
+        self.logging = log
+
+    def buildProtocol(self, addr):
+        return SOCKSv4(self.logging, reactor)
+
+
+
+class SOCKSv4IncomingFactory(protocol.Factory):
+    """
+    A utility class for building protocols for incoming connections.
+    """
+
+    def __init__(self, socks, ip):
+        self.socks = socks
+        self.ip = ip
+
+
+    def buildProtocol(self, addr):
+        if addr[0] == self.ip:
+            self.ip = ""
+            self.socks.makeReply(90, 0)
+            return SOCKSv4Incoming(self.socks)
+        elif self.ip == "":
+            return None
+        else:
+            self.socks.makeReply(91, 0)
+            self.ip = ""
+            return None
diff --git a/ThirdParty/Twisted/twisted/protocols/stateful.py b/ThirdParty/Twisted/twisted/protocols/stateful.py
new file mode 100644
index 0000000..7b82ae3
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/stateful.py
@@ -0,0 +1,52 @@
+# -*- test-case-name: twisted.test.test_stateful -*-
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+from twisted.internet import protocol
+
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+class StatefulProtocol(protocol.Protocol):
+    """A Protocol that stores state for you.
+
+    state is a pair (function, num_bytes). When num_bytes bytes of data arrives
+    from the network, function is called. It is expected to return the next
+    state or None to keep same state. Initial state is returned by
+    getInitialState (override it).
+    """
+    _sful_data = None, None, 0
+
+    def makeConnection(self, transport):
+        protocol.Protocol.makeConnection(self, transport)
+        self._sful_data = self.getInitialState(), StringIO(), 0
+
+    def getInitialState(self):
+        raise NotImplementedError
+
+    def dataReceived(self, data):
+        state, buffer, offset = self._sful_data
+        buffer.seek(0, 2)
+        buffer.write(data)
+        blen = buffer.tell() # how many bytes total is in the buffer
+        buffer.seek(offset)
+        while blen - offset >= state[1]:
+            d = buffer.read(state[1])
+            offset += state[1]
+            next = state[0](d)
+            if self.transport.disconnecting: # XXX: argh stupid hack borrowed right from LineReceiver
+                return # dataReceived won't be called again, so who cares about consistent state
+            if next:
+                state = next
+        if offset != 0:
+            b = buffer.read()
+            buffer.seek(0)
+            buffer.truncate()
+            buffer.write(b)
+            offset = 0
+        self._sful_data = state, buffer, offset
+
diff --git a/ThirdParty/Twisted/twisted/protocols/telnet.py b/ThirdParty/Twisted/twisted/protocols/telnet.py
new file mode 100644
index 0000000..ba1c826
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/telnet.py
@@ -0,0 +1,325 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""TELNET implementation, with line-oriented command handling.
+"""
+
+import warnings
+warnings.warn(
+    "As of Twisted 2.1, twisted.protocols.telnet is deprecated.  "
+    "See twisted.conch.telnet for the current, supported API.",
+    DeprecationWarning,
+    stacklevel=2)
+
+
+# System Imports
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+# Twisted Imports
+from twisted import copyright
+from twisted.internet import protocol
+
+# Some utility chars.
+ESC =            chr(27) # ESC for doing fanciness
+BOLD_MODE_ON =   ESC+"[1m" # turn bold on
+BOLD_MODE_OFF=   ESC+"[m"  # no char attributes
+
+
+# Characters gleaned from the various (and conflicting) RFCs.  Not all of these are correct.
+
+NULL =            chr(0)  # No operation.
+LF   =           chr(10)  # Moves the printer to the
+                          # next print line, keeping the
+                          # same horizontal position.
+CR =             chr(13)  # Moves the printer to the left
+                          # margin of the current line.
+BEL =             chr(7)  # Produces an audible or
+                          # visible signal (which does
+                          # NOT move the print head).
+BS  =             chr(8)  # Moves the print head one
+                          # character position towards
+                          # the left margin.
+HT  =             chr(9)  # Moves the printer to the
+                          # next horizontal tab stop.
+                          # It remains unspecified how
+                          # either party determines or
+                          # establishes where such tab
+                          # stops are located.
+VT =             chr(11)  # Moves the printer to the
+                          # next vertical tab stop.  It
+                          # remains unspecified how
+                          # either party determines or
+                          # establishes where such tab
+                          # stops are located.
+FF =             chr(12)  # Moves the printer to the top
+                          # of the next page, keeping
+                          # the same horizontal position.
+SE =            chr(240)  # End of subnegotiation parameters.
+NOP=            chr(241)  # No operation.
+DM =            chr(242)  # "Data Mark": The data stream portion
+                          # of a Synch.  This should always be
+                          # accompanied by a TCP Urgent
+                          # notification.
+BRK=            chr(243)  # NVT character Break.
+IP =            chr(244)  # The function Interrupt Process.
+AO =            chr(245)  # The function Abort Output
+AYT=            chr(246)  # The function Are You There.
+EC =            chr(247)  # The function Erase Character.
+EL =            chr(248)  # The function Erase Line
+GA =            chr(249)  # The Go Ahead signal.
+SB =            chr(250)  # Indicates that what follows is
+                          # subnegotiation of the indicated
+                          # option.
+WILL =          chr(251)  # Indicates the desire to begin
+                          # performing, or confirmation that
+                          # you are now performing, the
+                          # indicated option.
+WONT =          chr(252)  # Indicates the refusal to perform,
+                          # or continue performing, the
+                          # indicated option.
+DO =            chr(253)  # Indicates the request that the
+                          # other party perform, or
+                          # confirmation that you are expecting
+                          # the other party to perform, the
+                          # indicated option.
+DONT =          chr(254)  # Indicates the demand that the
+                          # other party stop performing,
+                          # or confirmation that you are no
+                          # longer expecting the other party
+                          # to perform, the indicated option.
+IAC =           chr(255)  # Data Byte 255.
+
+# features
+
+ECHO  =           chr(1)  # User-to-Server:  Asks the server to send
+                          # Echos of the transmitted data.
+
+                          # Server-to User:  States that the server is
+                          # sending echos of the transmitted data.
+                          # Sent only as a reply to ECHO or NO ECHO.
+
+SUPGA =           chr(3)  # Supress Go Ahead...? "Modern" telnet servers
+                          # are supposed to do this.
+
+LINEMODE =       chr(34)  # I don't care that Jon Postel is dead.
+
+HIDE  =         chr(133)  # The intention is that a server will send
+                          # this signal to a user system which is
+                          # echoing locally (to the user) when the user
+                          # is about to type something secret (e.g. a
+                          # password).  In this case, the user system
+                          # is to suppress local echoing or overprint
+                          # the input (or something) until the server
+                          # sends a NOECHO signal.  In situations where
+                          # the user system is not echoing locally,
+                          # this signal must not be sent by the server.
+
+
+NOECHO=         chr(131)  # User-to-Server:  Asks the server not to
+                          # return Echos of the transmitted data.
+                          # 
+                          # Server-to-User:  States that the server is
+                          # not sending echos of the transmitted data.
+                          # Sent only as a reply to ECHO or NO ECHO,
+                          # or to end the hide your input.
+
+
+
+iacBytes = {
+    DO:   'DO',
+    DONT: 'DONT',
+    WILL: 'WILL',
+    WONT: 'WONT',
+    IP:   'IP'
+    }
+
+def multireplace(st, dct):
+    for k, v in dct.items():
+        st = st.replace(k, v)
+    return st
+
+class Telnet(protocol.Protocol):
+    """I am a Protocol for handling Telnet connections. I have two
+    sets of special methods, telnet_* and iac_*.
+
+    telnet_* methods get called on every line sent to me. The method
+    to call is decided by the current mode. The initial mode is 'User';
+    this means that telnet_User is the first telnet_* method to be called.
+    All telnet_* methods should return a string which specifies the mode
+    to go into next; thus dictating which telnet_* method to call next.
+    For example, the default telnet_User method returns 'Password' to go
+    into Password mode, and the default telnet_Password method returns
+    'Command' to go into Command mode.
+
+    The iac_* methods are less-used; they are called when an IAC telnet
+    byte is received. You can define iac_DO, iac_DONT, iac_WILL, iac_WONT,
+    and iac_IP methods to do what you want when one of these bytes is
+    received."""
+
+
+    gotIAC = 0
+    iacByte = None
+    lastLine = None
+    buffer = ''
+    echo = 0
+    delimiters = ['\r\n', '\r\000']
+    mode = "User"
+
+    def write(self, data):
+        """Send the given data over my transport."""
+        self.transport.write(data)
+
+
+    def connectionMade(self):
+        """I will write a welcomeMessage and loginPrompt to the client."""
+        self.write(self.welcomeMessage() + self.loginPrompt())
+
+    def welcomeMessage(self):
+        """Override me to return a string which will be sent to the client
+        before login."""
+        x = self.factory.__class__
+        return ("\r\n" + x.__module__ + '.' + x.__name__ +
+                '\r\nTwisted %s\r\n' % copyright.version
+                )
+
+    def loginPrompt(self):
+        """Override me to return a 'login:'-type prompt."""
+        return "username: "
+
+    def iacSBchunk(self, chunk):
+        pass
+
+    def iac_DO(self, feature):
+        pass
+
+    def iac_DONT(self, feature):
+        pass
+
+    def iac_WILL(self, feature):
+        pass
+
+    def iac_WONT(self, feature):
+        pass
+
+    def iac_IP(self, feature):
+        pass
+
+    def processLine(self, line):
+        """I call a method that looks like 'telnet_*' where '*' is filled
+        in by the current mode. telnet_* methods should return a string which
+        will become the new mode.  If None is returned, the mode will not change.
+        """
+        mode = getattr(self, "telnet_"+self.mode)(line)
+        if mode is not None:
+            self.mode = mode
+
+    def telnet_User(self, user):
+        """I take a username, set it to the 'self.username' attribute,
+        print out a password prompt, and switch to 'Password' mode. If
+        you want to do something else when the username is received (ie,
+        create a new user if the user doesn't exist), override me."""
+        self.username = user
+        self.write(IAC+WILL+ECHO+"password: ")
+        return "Password"
+
+    def telnet_Password(self, paswd):
+        """I accept a password as an argument, and check it with the
+        checkUserAndPass method. If the login is successful, I call
+        loggedIn()."""
+        self.write(IAC+WONT+ECHO+"*****\r\n")
+        try:
+            checked = self.checkUserAndPass(self.username, paswd)
+        except:
+            return "Done"
+        if not checked:
+            return "Done"
+        self.loggedIn()
+        return "Command"
+
+    def telnet_Command(self, cmd):
+        """The default 'command processing' mode. You probably want to
+        override me."""
+        return "Command"
+
+    def processChunk(self, chunk):
+        """I take a chunk of data and delegate out to telnet_* methods
+        by way of processLine. If the current mode is 'Done', I'll close
+        the connection. """
+        self.buffer = self.buffer + chunk
+
+        #yech.
+        for delim in self.delimiters:
+            idx = self.buffer.find(delim)
+            if idx != -1:
+                break
+            
+        while idx != -1:
+            buf, self.buffer = self.buffer[:idx], self.buffer[idx+2:]
+            self.processLine(buf)
+            if self.mode == 'Done':
+                self.transport.loseConnection()
+
+            for delim in self.delimiters:
+                idx = self.buffer.find(delim)
+                if idx != -1:
+                    break
+
+    def dataReceived(self, data):
+        chunk = StringIO()
+        # silly little IAC state-machine
+        for char in data:
+            if self.gotIAC:
+                # working on an IAC request state
+                if self.iacByte:
+                    # we're in SB mode, getting a chunk
+                    if self.iacByte == SB:
+                        if char == SE:
+                            self.iacSBchunk(chunk.getvalue())
+                            chunk = StringIO()
+                            del self.iacByte
+                            del self.gotIAC
+                        else:
+                            chunk.write(char)
+                    else:
+                        # got all I need to know state
+                        try:
+                            getattr(self, 'iac_%s' % iacBytes[self.iacByte])(char)
+                        except KeyError:
+                            pass
+                        del self.iacByte
+                        del self.gotIAC
+                else:
+                    # got IAC, this is my W/W/D/D (or perhaps sb)
+                    self.iacByte = char
+            elif char == IAC:
+                # Process what I've got so far before going into
+                # the IAC state; don't want to process characters
+                # in an inconsistent state with what they were
+                # received in.
+                c = chunk.getvalue()
+                if c:
+                    why = self.processChunk(c)
+                    if why:
+                        return why
+                    chunk = StringIO()
+                self.gotIAC = 1
+            else:
+                chunk.write(char)
+        # chunks are of a relatively indeterminate size.
+        c = chunk.getvalue()
+        if c:
+            why = self.processChunk(c)
+            if why:
+                return why
+
+    def loggedIn(self):
+        """Called after the user succesfully logged in.
+        
+        Override in subclasses.
+        """
+        pass
diff --git a/ThirdParty/Twisted/twisted/protocols/test/__init__.py b/ThirdParty/Twisted/twisted/protocols/test/__init__.py
new file mode 100644
index 0000000..fd1e058
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/test/__init__.py
@@ -0,0 +1,6 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Unit tests for L{twisted.protocols}.
+"""
diff --git a/ThirdParty/Twisted/twisted/protocols/test/test_basic.py b/ThirdParty/Twisted/twisted/protocols/test/test_basic.py
new file mode 100644
index 0000000..5a1468e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/test/test_basic.py
@@ -0,0 +1,1061 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test cases for L{twisted.protocols.basic}.
+"""
+
+from __future__ import division, absolute_import
+
+import sys
+import struct
+
+from twisted.python.compat import _PY3, iterbytes
+from twisted.trial import unittest
+from twisted.protocols import basic
+from twisted.internet import protocol, error, task
+from twisted.test import proto_helpers
+
+_PY3NEWSTYLESKIP = "All classes are new style on Python 3."
+
+
+
+class FlippingLineTester(basic.LineReceiver):
+    """
+    A line receiver that flips between line and raw data modes after one byte.
+    """
+
+    delimiter = b'\n'
+
+    def __init__(self):
+        self.lines = []
+
+
+    def lineReceived(self, line):
+        """
+        Set the mode to raw.
+        """
+        self.lines.append(line)
+        self.setRawMode()
+
+
+    def rawDataReceived(self, data):
+        """
+        Set the mode back to line.
+        """
+        self.setLineMode(data[1:])
+
+
+
+class LineTester(basic.LineReceiver):
+    """
+    A line receiver that parses data received and make actions on some tokens.
+
+    @type delimiter: C{bytes}
+    @ivar delimiter: character used between received lines.
+    @type MAX_LENGTH: C{int}
+    @ivar MAX_LENGTH: size of a line when C{lineLengthExceeded} will be called.
+    @type clock: L{twisted.internet.task.Clock}
+    @ivar clock: clock simulating reactor callLater. Pass it to constructor if
+        you want to use the pause/rawpause functionalities.
+    """
+
+    delimiter = b'\n'
+    MAX_LENGTH = 64
+
+    def __init__(self, clock=None):
+        """
+        If given, use a clock to make callLater calls.
+        """
+        self.clock = clock
+
+
+    def connectionMade(self):
+        """
+        Create/clean data received on connection.
+        """
+        self.received = []
+
+
+    def lineReceived(self, line):
+        """
+        Receive line and make some action for some tokens: pause, rawpause,
+        stop, len, produce, unproduce.
+        """
+        self.received.append(line)
+        if line == b'':
+            self.setRawMode()
+        elif line == b'pause':
+            self.pauseProducing()
+            self.clock.callLater(0, self.resumeProducing)
+        elif line == b'rawpause':
+            self.pauseProducing()
+            self.setRawMode()
+            self.received.append(b'')
+            self.clock.callLater(0, self.resumeProducing)
+        elif line == b'stop':
+            self.stopProducing()
+        elif line[:4] == b'len ':
+            self.length = int(line[4:])
+        elif line.startswith(b'produce'):
+            self.transport.registerProducer(self, False)
+        elif line.startswith(b'unproduce'):
+            self.transport.unregisterProducer()
+
+
+    def rawDataReceived(self, data):
+        """
+        Read raw data, until the quantity specified by a previous 'len' line is
+        reached.
+        """
+        data, rest = data[:self.length], data[self.length:]
+        self.length = self.length - len(data)
+        self.received[-1] = self.received[-1] + data
+        if self.length == 0:
+            self.setLineMode(rest)
+
+
+    def lineLengthExceeded(self, line):
+        """
+        Adjust line mode when long lines received.
+        """
+        if len(line) > self.MAX_LENGTH + 1:
+            self.setLineMode(line[self.MAX_LENGTH + 1:])
+
+
+
+class LineOnlyTester(basic.LineOnlyReceiver):
+    """
+    A buffering line only receiver.
+    """
+    delimiter = b'\n'
+    MAX_LENGTH = 64
+
+    def connectionMade(self):
+        """
+        Create/clean data received on connection.
+        """
+        self.received = []
+
+
+    def lineReceived(self, line):
+        """
+        Save received data.
+        """
+        self.received.append(line)
+
+
+class LineReceiverTestCase(unittest.SynchronousTestCase):
+    """
+    Test LineReceiver, using the C{LineTester} wrapper.
+    """
+    buffer = b'''\
+len 10
+
+0123456789len 5
+
+1234
+len 20
+foo 123
+
+0123456789
+012345678len 0
+foo 5
+
+1234567890123456789012345678901234567890123456789012345678901234567890
+len 1
+
+a'''
+
+    output = [b'len 10', b'0123456789', b'len 5', b'1234\n',
+              b'len 20', b'foo 123', b'0123456789\n012345678',
+              b'len 0', b'foo 5', b'', b'67890', b'len 1', b'a']
+
+    def test_buffer(self):
+        """
+        Test buffering for different packet size, checking received matches
+        expected data.
+        """
+        for packet_size in range(1, 10):
+            t = proto_helpers.StringIOWithoutClosing()
+            a = LineTester()
+            a.makeConnection(protocol.FileWrapper(t))
+            for i in range(len(self.buffer) // packet_size + 1):
+                s = self.buffer[i * packet_size:(i + 1) * packet_size]
+                a.dataReceived(s)
+            self.assertEqual(self.output, a.received)
+
+
+    pauseBuf = b'twiddle1\ntwiddle2\npause\ntwiddle3\n'
+
+    pauseOutput1 = [b'twiddle1', b'twiddle2', b'pause']
+    pauseOutput2 = pauseOutput1 + [b'twiddle3']
+
+
+    def test_pausing(self):
+        """
+        Test pause inside data receiving. It uses fake clock to see if
+        pausing/resuming work.
+        """
+        for packet_size in range(1, 10):
+            t = proto_helpers.StringIOWithoutClosing()
+            clock = task.Clock()
+            a = LineTester(clock)
+            a.makeConnection(protocol.FileWrapper(t))
+            for i in range(len(self.pauseBuf) // packet_size + 1):
+                s = self.pauseBuf[i * packet_size:(i + 1) * packet_size]
+                a.dataReceived(s)
+            self.assertEqual(self.pauseOutput1, a.received)
+            clock.advance(0)
+            self.assertEqual(self.pauseOutput2, a.received)
+
+    rawpauseBuf = b'twiddle1\ntwiddle2\nlen 5\nrawpause\n12345twiddle3\n'
+
+    rawpauseOutput1 = [b'twiddle1', b'twiddle2', b'len 5', b'rawpause', b'']
+    rawpauseOutput2 = [b'twiddle1', b'twiddle2', b'len 5', b'rawpause',
+                        b'12345', b'twiddle3']
+
+
+    def test_rawPausing(self):
+        """
+        Test pause inside raw date receiving.
+        """
+        for packet_size in range(1, 10):
+            t = proto_helpers.StringIOWithoutClosing()
+            clock = task.Clock()
+            a = LineTester(clock)
+            a.makeConnection(protocol.FileWrapper(t))
+            for i in range(len(self.rawpauseBuf) // packet_size + 1):
+                s = self.rawpauseBuf[i * packet_size:(i + 1) * packet_size]
+                a.dataReceived(s)
+            self.assertEqual(self.rawpauseOutput1, a.received)
+            clock.advance(0)
+            self.assertEqual(self.rawpauseOutput2, a.received)
+
+    stop_buf = b'twiddle1\ntwiddle2\nstop\nmore\nstuff\n'
+
+    stop_output = [b'twiddle1', b'twiddle2', b'stop']
+
+
+    def test_stopProducing(self):
+        """
+        Test stop inside producing.
+        """
+        for packet_size in range(1, 10):
+            t = proto_helpers.StringIOWithoutClosing()
+            a = LineTester()
+            a.makeConnection(protocol.FileWrapper(t))
+            for i in range(len(self.stop_buf) // packet_size + 1):
+                s = self.stop_buf[i * packet_size:(i + 1) * packet_size]
+                a.dataReceived(s)
+            self.assertEqual(self.stop_output, a.received)
+
+
+    def test_lineReceiverAsProducer(self):
+        """
+        Test produce/unproduce in receiving.
+        """
+        a = LineTester()
+        t = proto_helpers.StringIOWithoutClosing()
+        a.makeConnection(protocol.FileWrapper(t))
+        a.dataReceived(b'produce\nhello world\nunproduce\ngoodbye\n')
+        self.assertEqual(a.received,
+                          [b'produce', b'hello world', b'unproduce', b'goodbye'])
+
+
+    def test_clearLineBuffer(self):
+        """
+        L{LineReceiver.clearLineBuffer} removes all buffered data and returns
+        it as a C{bytes} and can be called from beneath C{dataReceived}.
+        """
+        class ClearingReceiver(basic.LineReceiver):
+            def lineReceived(self, line):
+                self.line = line
+                self.rest = self.clearLineBuffer()
+
+        protocol = ClearingReceiver()
+        protocol.dataReceived(b'foo\r\nbar\r\nbaz')
+        self.assertEqual(protocol.line, b'foo')
+        self.assertEqual(protocol.rest, b'bar\r\nbaz')
+
+        # Deliver another line to make sure the previously buffered data is
+        # really gone.
+        protocol.dataReceived(b'quux\r\n')
+        self.assertEqual(protocol.line, b'quux')
+        self.assertEqual(protocol.rest, b'')
+
+
+    def test_stackRecursion(self):
+        """
+        Test switching modes many times on the same data.
+        """
+        proto = FlippingLineTester()
+        transport = proto_helpers.StringIOWithoutClosing()
+        proto.makeConnection(protocol.FileWrapper(transport))
+        limit = sys.getrecursionlimit()
+        proto.dataReceived(b'x\nx' * limit)
+        self.assertEqual(b'x' * limit, b''.join(proto.lines))
+
+
+    def test_maximumLineLength(self):
+        """
+        C{LineReceiver} disconnects the transport if it receives a line longer
+        than its C{MAX_LENGTH}.
+        """
+        proto = basic.LineReceiver()
+        transport = proto_helpers.StringTransport()
+        proto.makeConnection(transport)
+        proto.dataReceived(b'x' * (proto.MAX_LENGTH + 1) + b'\r\nr')
+        self.assertTrue(transport.disconnecting)
+
+
+    def test_maximumLineLengthRemaining(self):
+        """
+        C{LineReceiver} disconnects the transport it if receives a non-finished
+        line longer than its C{MAX_LENGTH}.
+        """
+        proto = basic.LineReceiver()
+        transport = proto_helpers.StringTransport()
+        proto.makeConnection(transport)
+        proto.dataReceived(b'x' * (proto.MAX_LENGTH + 1))
+        self.assertTrue(transport.disconnecting)
+
+
+    def test_rawDataError(self):
+        """
+        C{LineReceiver.dataReceived} forwards errors returned by
+        C{rawDataReceived}.
+        """
+        proto = basic.LineReceiver()
+        proto.rawDataReceived = lambda data: RuntimeError("oops")
+        transport = proto_helpers.StringTransport()
+        proto.makeConnection(transport)
+        proto.setRawMode()
+        why = proto.dataReceived(b'data')
+        self.assertIsInstance(why, RuntimeError)
+
+
+
+class LineOnlyReceiverTestCase(unittest.SynchronousTestCase):
+    """
+    Test line only receiveer.
+    """
+    buffer = b"""foo
+    bleakness
+    desolation
+    plastic forks
+    """
+
+    def test_buffer(self):
+        """
+        Test buffering over line protocol: data received should match buffer.
+        """
+        t = proto_helpers.StringTransport()
+        a = LineOnlyTester()
+        a.makeConnection(t)
+        for c in iterbytes(self.buffer):
+            a.dataReceived(c)
+        self.assertEqual(a.received, self.buffer.split(b'\n')[:-1])
+
+
+    def test_lineTooLong(self):
+        """
+        Test sending a line too long: it should close the connection.
+        """
+        t = proto_helpers.StringTransport()
+        a = LineOnlyTester()
+        a.makeConnection(t)
+        res = a.dataReceived(b'x' * 200)
+        self.assertIsInstance(res, error.ConnectionLost)
+
+
+
+class TestMixin:
+
+    def connectionMade(self):
+        self.received = []
+
+
+    def stringReceived(self, s):
+        self.received.append(s)
+
+    MAX_LENGTH = 50
+    closed = 0
+
+
+    def connectionLost(self, reason):
+        self.closed = 1
+
+
+
+class TestNetstring(TestMixin, basic.NetstringReceiver):
+
+    def stringReceived(self, s):
+        self.received.append(s)
+        self.transport.write(s)
+
+
+
+class LPTestCaseMixin:
+
+    illegalStrings = []
+    protocol = None
+
+
+    def getProtocol(self):
+        """
+        Return a new instance of C{self.protocol} connected to a new instance
+        of L{proto_helpers.StringTransport}.
+        """
+        t = proto_helpers.StringTransport()
+        a = self.protocol()
+        a.makeConnection(t)
+        return a
+
+
+    def test_illegal(self):
+        """
+        Assert that illegal strings cause the transport to be closed.
+        """
+        for s in self.illegalStrings:
+            r = self.getProtocol()
+            for c in iterbytes(s):
+                r.dataReceived(c)
+            self.assertTrue(r.transport.disconnecting)
+
+
+
+class NetstringReceiverTestCase(unittest.SynchronousTestCase, LPTestCaseMixin):
+
+    strings = [b'hello', b'world', b'how', b'are', b'you123', b':today',
+               b"a" * 515]
+
+    illegalStrings = [
+        b'9999999999999999999999', b'abc', b'4:abcde',
+        b'51:aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaab,',]
+
+    protocol = TestNetstring
+
+    def setUp(self):
+        self.transport = proto_helpers.StringTransport()
+        self.netstringReceiver = TestNetstring()
+        self.netstringReceiver.makeConnection(self.transport)
+
+
+    def test_buffer(self):
+        """
+        Strings can be received in chunks of different lengths.
+        """
+        for packet_size in range(1, 10):
+            t = proto_helpers.StringTransport()
+            a = TestNetstring()
+            a.MAX_LENGTH = 699
+            a.makeConnection(t)
+            for s in self.strings:
+                a.sendString(s)
+            out = t.value()
+            for i in range(len(out) // packet_size + 1):
+                s = out[i * packet_size:(i + 1) * packet_size]
+                if s:
+                    a.dataReceived(s)
+            self.assertEqual(a.received, self.strings)
+
+
+    def test_receiveEmptyNetstring(self):
+        """
+        Empty netstrings (with length '0') can be received.
+        """
+        self.netstringReceiver.dataReceived(b"0:,")
+        self.assertEqual(self.netstringReceiver.received, [b""])
+
+
+    def test_receiveOneCharacter(self):
+        """
+        One-character netstrings can be received.
+        """
+        self.netstringReceiver.dataReceived(b"1:a,")
+        self.assertEqual(self.netstringReceiver.received, [b"a"])
+
+
+    def test_receiveTwoCharacters(self):
+        """
+        Two-character netstrings can be received.
+        """
+        self.netstringReceiver.dataReceived(b"2:ab,")
+        self.assertEqual(self.netstringReceiver.received, [b"ab"])
+
+
+    def test_receiveNestedNetstring(self):
+        """
+        Netstrings with embedded netstrings. This test makes sure that
+        the parser does not become confused about the ',' and ':'
+        characters appearing inside the data portion of the netstring.
+        """
+        self.netstringReceiver.dataReceived(b"4:1:a,,")
+        self.assertEqual(self.netstringReceiver.received, [b"1:a,"])
+
+
+    def test_moreDataThanSpecified(self):
+        """
+        Netstrings containing more data than expected are refused.
+        """
+        self.netstringReceiver.dataReceived(b"2:aaa,")
+        self.assertTrue(self.transport.disconnecting)
+
+
+    def test_moreDataThanSpecifiedBorderCase(self):
+        """
+        Netstrings that should be empty according to their length
+        specification are refused if they contain data.
+        """
+        self.netstringReceiver.dataReceived(b"0:a,")
+        self.assertTrue(self.transport.disconnecting)
+
+
+    def test_missingNumber(self):
+        """
+        Netstrings without leading digits that specify the length
+        are refused.
+        """
+        self.netstringReceiver.dataReceived(b":aaa,")
+        self.assertTrue(self.transport.disconnecting)
+
+
+    def test_missingColon(self):
+        """
+        Netstrings without a colon between length specification and
+        data are refused.
+        """
+        self.netstringReceiver.dataReceived(b"3aaa,")
+        self.assertTrue(self.transport.disconnecting)
+
+
+    def test_missingNumberAndColon(self):
+        """
+        Netstrings that have no leading digits nor a colon are
+        refused.
+        """
+        self.netstringReceiver.dataReceived(b"aaa,")
+        self.assertTrue(self.transport.disconnecting)
+
+
+    def test_onlyData(self):
+        """
+        Netstrings consisting only of data are refused.
+        """
+        self.netstringReceiver.dataReceived(b"aaa")
+        self.assertTrue(self.transport.disconnecting)
+
+
+    def test_receiveNetstringPortions_1(self):
+        """
+        Netstrings can be received in two portions.
+        """
+        self.netstringReceiver.dataReceived(b"4:aa")
+        self.netstringReceiver.dataReceived(b"aa,")
+        self.assertEqual(self.netstringReceiver.received, [b"aaaa"])
+        self.assertTrue(self.netstringReceiver._payloadComplete())
+
+
+    def test_receiveNetstringPortions_2(self):
+        """
+        Netstrings can be received in more than two portions, even if
+        the length specification is split across two portions.
+        """
+        for part in [b"1", b"0:01234", b"56789", b","]:
+            self.netstringReceiver.dataReceived(part)
+        self.assertEqual(self.netstringReceiver.received, [b"0123456789"])
+
+
+    def test_receiveNetstringPortions_3(self):
+        """
+        Netstrings can be received one character at a time.
+        """
+        for part in [b"2", b":", b"a", b"b", b","]:
+            self.netstringReceiver.dataReceived(part)
+        self.assertEqual(self.netstringReceiver.received, [b"ab"])
+
+
+    def test_receiveTwoNetstrings(self):
+        """
+        A stream of two netstrings can be received in two portions,
+        where the first portion contains the complete first netstring
+        and the length specification of the second netstring.
+        """
+        self.netstringReceiver.dataReceived(b"1:a,1")
+        self.assertTrue(self.netstringReceiver._payloadComplete())
+        self.assertEqual(self.netstringReceiver.received, [b"a"])
+        self.netstringReceiver.dataReceived(b":b,")
+        self.assertEqual(self.netstringReceiver.received, [b"a", b"b"])
+
+
+    def test_maxReceiveLimit(self):
+        """
+        Netstrings with a length specification exceeding the specified
+        C{MAX_LENGTH} are refused.
+        """
+        tooLong = self.netstringReceiver.MAX_LENGTH + 1
+        self.netstringReceiver.dataReceived(b"".join(
+                (bytes(tooLong), b":", b"a" * tooLong)))
+        self.assertTrue(self.transport.disconnecting)
+
+
+    def test_consumeLength(self):
+        """
+        C{_consumeLength} returns the expected length of the
+        netstring, including the trailing comma.
+        """
+        self.netstringReceiver._remainingData = b"12:"
+        self.netstringReceiver._consumeLength()
+        self.assertEqual(self.netstringReceiver._expectedPayloadSize, 13)
+
+
+    def test_consumeLengthBorderCase1(self):
+        """
+        C{_consumeLength} works as expected if the length specification
+        contains the value of C{MAX_LENGTH} (border case).
+        """
+        self.netstringReceiver._remainingData = b"12:"
+        self.netstringReceiver.MAX_LENGTH = 12
+        self.netstringReceiver._consumeLength()
+        self.assertEqual(self.netstringReceiver._expectedPayloadSize, 13)
+
+
+    def test_consumeLengthBorderCase2(self):
+        """
+        C{_consumeLength} raises a L{basic.NetstringParseError} if
+        the length specification exceeds the value of C{MAX_LENGTH}
+        by 1 (border case).
+        """
+        self.netstringReceiver._remainingData = b"12:"
+        self.netstringReceiver.MAX_LENGTH = 11
+        self.assertRaises(basic.NetstringParseError,
+                          self.netstringReceiver._consumeLength)
+
+
+    def test_consumeLengthBorderCase3(self):
+        """
+        C{_consumeLength} raises a L{basic.NetstringParseError} if
+        the length specification exceeds the value of C{MAX_LENGTH}
+        by more than 1.
+        """
+        self.netstringReceiver._remainingData = b"1000:"
+        self.netstringReceiver.MAX_LENGTH = 11
+        self.assertRaises(basic.NetstringParseError,
+                          self.netstringReceiver._consumeLength)
+
+
+    def test_deprecatedModuleAttributes(self):
+        """
+        Accessing one of the old module attributes used by the
+        NetstringReceiver parser emits a deprecation warning.
+        """
+        basic.LENGTH, basic.DATA, basic.COMMA, basic.NUMBER
+        warnings = self.flushWarnings(
+            offendingFunctions=[self.test_deprecatedModuleAttributes])
+
+        self.assertEqual(len(warnings), 4)
+        for warning in warnings:
+            self.assertEqual(warning['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            ("twisted.protocols.basic.LENGTH was deprecated in Twisted 10.2.0: "
+             "NetstringReceiver parser state is private."))
+        self.assertEqual(
+            warnings[1]['message'],
+            ("twisted.protocols.basic.DATA was deprecated in Twisted 10.2.0: "
+             "NetstringReceiver parser state is private."))
+        self.assertEqual(
+            warnings[2]['message'],
+            ("twisted.protocols.basic.COMMA was deprecated in Twisted 10.2.0: "
+             "NetstringReceiver parser state is private."))
+        self.assertEqual(
+            warnings[3]['message'],
+            ("twisted.protocols.basic.NUMBER was deprecated in Twisted 10.2.0: "
+             "NetstringReceiver parser state is private."))
+
+
+
+class IntNTestCaseMixin(LPTestCaseMixin):
+    """
+    TestCase mixin for int-prefixed protocols.
+    """
+
+    protocol = None
+    strings = None
+    illegalStrings = None
+    partialStrings = None
+
+    def test_receive(self):
+        """
+        Test receiving data find the same data send.
+        """
+        r = self.getProtocol()
+        for s in self.strings:
+            for c in iterbytes(struct.pack(r.structFormat,len(s)) + s):
+                r.dataReceived(c)
+        self.assertEqual(r.received, self.strings)
+
+
+    def test_partial(self):
+        """
+        Send partial data, nothing should be definitely received.
+        """
+        for s in self.partialStrings:
+            r = self.getProtocol()
+            for c in iterbytes(s):
+                r.dataReceived(c)
+            self.assertEqual(r.received, [])
+
+
+    def test_send(self):
+        """
+        Test sending data over protocol.
+        """
+        r = self.getProtocol()
+        r.sendString(b"b" * 16)
+        self.assertEqual(r.transport.value(),
+            struct.pack(r.structFormat, 16) + b"b" * 16)
+
+
+    def test_lengthLimitExceeded(self):
+        """
+        When a length prefix is received which is greater than the protocol's
+        C{MAX_LENGTH} attribute, the C{lengthLimitExceeded} method is called
+        with the received length prefix.
+        """
+        length = []
+        r = self.getProtocol()
+        r.lengthLimitExceeded = length.append
+        r.MAX_LENGTH = 10
+        r.dataReceived(struct.pack(r.structFormat, 11))
+        self.assertEqual(length, [11])
+
+
+    def test_longStringNotDelivered(self):
+        """
+        If a length prefix for a string longer than C{MAX_LENGTH} is delivered
+        to C{dataReceived} at the same time as the entire string, the string is
+        not passed to C{stringReceived}.
+        """
+        r = self.getProtocol()
+        r.MAX_LENGTH = 10
+        r.dataReceived(
+            struct.pack(r.structFormat, 11) + b'x' * 11)
+        self.assertEqual(r.received, [])
+
+
+
+class RecvdAttributeMixin(object):
+    """
+    Mixin defining tests for string receiving protocols with a C{recvd}
+    attribute which should be settable by application code, to be combined with
+    L{IntNTestCaseMixin} on a L{TestCase} subclass
+    """
+
+    def makeMessage(self, protocol, data):
+        """
+        Return C{data} prefixed with message length in C{protocol.structFormat}
+        form.
+        """
+        return struct.pack(protocol.structFormat, len(data)) + data
+
+
+    def test_recvdContainsRemainingData(self):
+        """
+        In stringReceived, recvd contains the remaining data that was passed to
+        dataReceived that was not part of the current message.
+        """
+        result = []
+        r = self.getProtocol()
+        def stringReceived(receivedString):
+            result.append(r.recvd)
+        r.stringReceived = stringReceived
+        completeMessage = (struct.pack(r.structFormat, 5) + (b'a' * 5))
+        incompleteMessage = (struct.pack(r.structFormat, 5) + (b'b' * 4))
+        # Receive a complete message, followed by an incomplete one
+        r.dataReceived(completeMessage + incompleteMessage)
+        self.assertEquals(result, [incompleteMessage])
+
+
+    def test_recvdChanged(self):
+        """
+        In stringReceived, if recvd is changed, messages should be parsed from
+        it rather than the input to dataReceived.
+        """
+        r = self.getProtocol()
+        result = []
+        payloadC = b'c' * 5
+        messageC = self.makeMessage(r, payloadC)
+        def stringReceived(receivedString):
+            if not result:
+                r.recvd = messageC
+            result.append(receivedString)
+        r.stringReceived = stringReceived
+        payloadA = b'a' * 5
+        payloadB = b'b' * 5
+        messageA = self.makeMessage(r, payloadA)
+        messageB = self.makeMessage(r, payloadB)
+        r.dataReceived(messageA + messageB)
+        self.assertEquals(result, [payloadA, payloadC])
+
+
+    def test_switching(self):
+        """
+        Data already parsed by L{IntNStringReceiver.dataReceived} is not
+        reparsed if C{stringReceived} consumes some of the
+        L{IntNStringReceiver.recvd} buffer.
+        """
+        proto = self.getProtocol()
+        mix = []
+        SWITCH = b"\x00\x00\x00\x00"
+        for s in self.strings:
+            mix.append(self.makeMessage(proto, s))
+            mix.append(SWITCH)
+
+        result = []
+        def stringReceived(receivedString):
+            result.append(receivedString)
+            proto.recvd = proto.recvd[len(SWITCH):]
+
+        proto.stringReceived = stringReceived
+        proto.dataReceived(b"".join(mix))
+        # Just another byte, to trigger processing of anything that might have
+        # been left in the buffer (should be nothing).
+        proto.dataReceived(b"\x01")
+        self.assertEqual(result, self.strings)
+        # And verify that another way
+        self.assertEqual(proto.recvd, b"\x01")
+
+
+    def test_recvdInLengthLimitExceeded(self):
+        """
+        The L{IntNStringReceiver.recvd} buffer contains all data not yet
+        processed by L{IntNStringReceiver.dataReceived} if the
+        C{lengthLimitExceeded} event occurs.
+        """
+        proto = self.getProtocol()
+        DATA = b"too long"
+        proto.MAX_LENGTH = len(DATA) - 1
+        message = self.makeMessage(proto, DATA)
+
+        result = []
+        def lengthLimitExceeded(length):
+            result.append(length)
+            result.append(proto.recvd)
+
+        proto.lengthLimitExceeded = lengthLimitExceeded
+        proto.dataReceived(message)
+        self.assertEqual(result[0], len(DATA))
+        self.assertEqual(result[1], message)
+
+
+
+class TestInt32(TestMixin, basic.Int32StringReceiver):
+    """
+    A L{basic.Int32StringReceiver} storing received strings in an array.
+
+    @ivar received: array holding received strings.
+    """
+
+
+
+class Int32TestCase(unittest.SynchronousTestCase, IntNTestCaseMixin, RecvdAttributeMixin):
+    """
+    Test case for int32-prefixed protocol
+    """
+    protocol = TestInt32
+    strings = [b"a", b"b" * 16]
+    illegalStrings = [b"\x10\x00\x00\x00aaaaaa"]
+    partialStrings = [b"\x00\x00\x00", b"hello there", b""]
+
+    def test_data(self):
+        """
+        Test specific behavior of the 32-bits length.
+        """
+        r = self.getProtocol()
+        r.sendString(b"foo")
+        self.assertEqual(r.transport.value(), b"\x00\x00\x00\x03foo")
+        r.dataReceived(b"\x00\x00\x00\x04ubar")
+        self.assertEqual(r.received, [b"ubar"])
+
+
+
+class TestInt16(TestMixin, basic.Int16StringReceiver):
+    """
+    A L{basic.Int16StringReceiver} storing received strings in an array.
+
+    @ivar received: array holding received strings.
+    """
+
+
+
+class Int16TestCase(unittest.SynchronousTestCase, IntNTestCaseMixin, RecvdAttributeMixin):
+    """
+    Test case for int16-prefixed protocol
+    """
+    protocol = TestInt16
+    strings = [b"a", b"b" * 16]
+    illegalStrings = [b"\x10\x00aaaaaa"]
+    partialStrings = [b"\x00", b"hello there", b""]
+
+    def test_data(self):
+        """
+        Test specific behavior of the 16-bits length.
+        """
+        r = self.getProtocol()
+        r.sendString(b"foo")
+        self.assertEqual(r.transport.value(), b"\x00\x03foo")
+        r.dataReceived(b"\x00\x04ubar")
+        self.assertEqual(r.received, [b"ubar"])
+
+
+    def test_tooLongSend(self):
+        """
+        Send too much data: that should cause an error.
+        """
+        r = self.getProtocol()
+        tooSend = b"b" * (2**(r.prefixLength * 8) + 1)
+        self.assertRaises(AssertionError, r.sendString, tooSend)
+
+
+
+class NewStyleTestInt16(TestInt16, object):
+    """
+    A new-style class version of TestInt16
+    """
+
+
+
+class NewStyleInt16TestCase(Int16TestCase):
+    """
+    This test case verifies that IntNStringReceiver still works when inherited
+    by a new-style class.
+    """
+    if _PY3:
+        skip = _PY3NEWSTYLESKIP
+
+    protocol = NewStyleTestInt16
+
+
+
+class TestInt8(TestMixin, basic.Int8StringReceiver):
+    """
+    A L{basic.Int8StringReceiver} storing received strings in an array.
+
+    @ivar received: array holding received strings.
+    """
+
+
+
+class Int8TestCase(unittest.SynchronousTestCase, IntNTestCaseMixin, RecvdAttributeMixin):
+    """
+    Test case for int8-prefixed protocol
+    """
+    protocol = TestInt8
+    strings = [b"a", b"b" * 16]
+    illegalStrings = [b"\x00\x00aaaaaa"]
+    partialStrings = [b"\x08", b"dzadz", b""]
+
+
+    def test_data(self):
+        """
+        Test specific behavior of the 8-bits length.
+        """
+        r = self.getProtocol()
+        r.sendString(b"foo")
+        self.assertEqual(r.transport.value(), b"\x03foo")
+        r.dataReceived(b"\x04ubar")
+        self.assertEqual(r.received, [b"ubar"])
+
+
+    def test_tooLongSend(self):
+        """
+        Send too much data: that should cause an error.
+        """
+        r = self.getProtocol()
+        tooSend = b"b" * (2**(r.prefixLength * 8) + 1)
+        self.assertRaises(AssertionError, r.sendString, tooSend)
+
+
+
+class OnlyProducerTransport(object):
+    # Transport which isn't really a transport, just looks like one to
+    # someone not looking very hard.
+
+    paused = False
+    disconnecting = False
+
+    def __init__(self):
+        self.data = []
+
+
+    def pauseProducing(self):
+        self.paused = True
+
+
+    def resumeProducing(self):
+        self.paused = False
+
+
+    def write(self, bytes):
+        self.data.append(bytes)
+
+
+
+class ConsumingProtocol(basic.LineReceiver):
+    # Protocol that really, really doesn't want any more bytes.
+
+    def lineReceived(self, line):
+        self.transport.write(line)
+        self.pauseProducing()
+
+
+
+class ProducerTestCase(unittest.SynchronousTestCase):
+
+    def testPauseResume(self):
+        p = ConsumingProtocol()
+        t = OnlyProducerTransport()
+        p.makeConnection(t)
+
+        p.dataReceived(b'hello, ')
+        self.failIf(t.data)
+        self.failIf(t.paused)
+        self.failIf(p.paused)
+
+        p.dataReceived(b'world\r\n')
+
+        self.assertEqual(t.data, [b'hello, world'])
+        self.failUnless(t.paused)
+        self.failUnless(p.paused)
+
+        p.resumeProducing()
+
+        self.failIf(t.paused)
+        self.failIf(p.paused)
+
+        p.dataReceived(b'hello\r\nworld\r\n')
+
+        self.assertEqual(t.data, [b'hello, world', b'hello'])
+        self.failUnless(t.paused)
+        self.failUnless(p.paused)
+
+        p.resumeProducing()
+        p.dataReceived(b'goodbye\r\n')
+
+        self.assertEqual(t.data, [b'hello, world', b'hello', b'world'])
+        self.failUnless(t.paused)
+        self.failUnless(p.paused)
+
+        p.resumeProducing()
+
+        self.assertEqual(t.data, [b'hello, world', b'hello', b'world', b'goodbye'])
+        self.failUnless(t.paused)
+        self.failUnless(p.paused)
+
+        p.resumeProducing()
+
+        self.assertEqual(t.data, [b'hello, world', b'hello', b'world', b'goodbye'])
+        self.failIf(t.paused)
+        self.failIf(p.paused)
diff --git a/ThirdParty/Twisted/twisted/protocols/test/test_tls.py b/ThirdParty/Twisted/twisted/protocols/test/test_tls.py
new file mode 100644
index 0000000..49e3a79
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/test/test_tls.py
@@ -0,0 +1,1522 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.protocols.tls}.
+"""
+
+from __future__ import division, absolute_import
+
+from zope.interface.verify import verifyObject
+from zope.interface import Interface, directlyProvides
+
+from twisted.python.compat import intToBytes, iterbytes
+try:
+    from twisted.protocols.tls import TLSMemoryBIOProtocol, TLSMemoryBIOFactory
+    from twisted.protocols.tls import _PullToPush, _ProducerMembrane
+except ImportError:
+    # Skip the whole test module if it can't be imported.
+    skip = "pyOpenSSL 0.10 or newer required for twisted.protocol.tls"
+else:
+    # Otherwise, the pyOpenSSL dependency must be satisfied, so all these
+    # imports will work.
+    from OpenSSL.crypto import X509Type
+    from OpenSSL.SSL import (TLSv1_METHOD, Error, Context, ConnectionType,
+                             WantReadError)
+    from twisted.internet.ssl import PrivateCertificate
+    from twisted.test.ssl_helpers import (ClientTLSContext, ServerTLSContext,
+                                          certPath)
+
+from twisted.python.filepath import FilePath
+from twisted.python.failure import Failure
+from twisted.python import log
+from twisted.internet.interfaces import ISystemHandle, ISSLTransport
+from twisted.internet.interfaces import IPushProducer
+from twisted.internet.error import ConnectionDone, ConnectionLost
+from twisted.internet.defer import Deferred, gatherResults
+from twisted.internet.protocol import Protocol, ClientFactory, ServerFactory
+from twisted.internet.task import TaskStopped
+from twisted.protocols.loopback import loopbackAsync, collapsingPumpPolicy
+from twisted.trial.unittest import TestCase
+from twisted.test.test_tcp import ConnectionLostNotifyingProtocol
+from twisted.test.proto_helpers import StringTransport
+
+
+class HandshakeCallbackContextFactory:
+    """
+    L{HandshakeCallbackContextFactory} is a factory for SSL contexts which
+    allows applications to get notification when the SSL handshake completes.
+
+    @ivar _finished: A L{Deferred} which will be called back when the handshake
+        is done.
+    """
+    # pyOpenSSL needs to expose this.
+    # https://bugs.launchpad.net/pyopenssl/+bug/372832
+    SSL_CB_HANDSHAKE_DONE = 0x20
+
+    def __init__(self):
+        self._finished = Deferred()
+
+
+    def factoryAndDeferred(cls):
+        """
+        Create a new L{HandshakeCallbackContextFactory} and return a two-tuple
+        of it and a L{Deferred} which will fire when a connection created with
+        it completes a TLS handshake.
+        """
+        contextFactory = cls()
+        return contextFactory, contextFactory._finished
+    factoryAndDeferred = classmethod(factoryAndDeferred)
+
+
+    def _info(self, connection, where, ret):
+        """
+        This is the "info callback" on the context.  It will be called
+        periodically by pyOpenSSL with information about the state of a
+        connection.  When it indicates the handshake is complete, it will fire
+        C{self._finished}.
+        """
+        if where & self.SSL_CB_HANDSHAKE_DONE:
+            self._finished.callback(None)
+
+
+    def getContext(self):
+        """
+        Create and return an SSL context configured to use L{self._info} as the
+        info callback.
+        """
+        context = Context(TLSv1_METHOD)
+        context.set_info_callback(self._info)
+        return context
+
+
+
+class AccumulatingProtocol(Protocol):
+    """
+    A protocol which collects the bytes it receives and closes its connection
+    after receiving a certain minimum of data.
+
+    @ivar howMany: The number of bytes of data to wait for before closing the
+        connection.
+
+    @ivar receiving: A C{list} of C{str} of the bytes received so far.
+    """
+    def __init__(self, howMany):
+        self.howMany = howMany
+
+
+    def connectionMade(self):
+        self.received = []
+
+
+    def dataReceived(self, bytes):
+        self.received.append(bytes)
+        if sum(map(len, self.received)) >= self.howMany:
+            self.transport.loseConnection()
+
+
+    def connectionLost(self, reason):
+        if not reason.check(ConnectionDone):
+            log.err(reason)
+
+
+
+def buildTLSProtocol(server=False, transport=None):
+    """
+    Create a protocol hooked up to a TLS transport hooked up to a
+    StringTransport.
+    """
+    # We want to accumulate bytes without disconnecting, so set high limit:
+    clientProtocol = AccumulatingProtocol(999999999999)
+    clientFactory = ClientFactory()
+    clientFactory.protocol = lambda: clientProtocol
+
+    if server:
+        contextFactory = ServerTLSContext()
+    else:
+        contextFactory = ClientTLSContext()
+    wrapperFactory = TLSMemoryBIOFactory(
+        contextFactory, not server, clientFactory)
+    sslProtocol = wrapperFactory.buildProtocol(None)
+
+    if transport is None:
+        transport = StringTransport()
+    sslProtocol.makeConnection(transport)
+    return clientProtocol, sslProtocol
+
+
+
+class TLSMemoryBIOFactoryTests(TestCase):
+    """
+    Ensure TLSMemoryBIOFactory logging acts correctly.
+    """
+
+    def test_quiet(self):
+        """
+        L{TLSMemoryBIOFactory.doStart} and L{TLSMemoryBIOFactory.doStop} do
+        not log any messages.
+        """
+        contextFactory = ServerTLSContext()
+
+        logs = []
+        logger = logs.append
+        log.addObserver(logger)
+        self.addCleanup(log.removeObserver, logger)
+        wrappedFactory = ServerFactory()
+        # Disable logging on the wrapped factory:
+        wrappedFactory.doStart = lambda: None
+        wrappedFactory.doStop = lambda: None
+        factory = TLSMemoryBIOFactory(contextFactory, False, wrappedFactory)
+        factory.doStart()
+        factory.doStop()
+        self.assertEqual(logs, [])
+
+
+    def test_logPrefix(self):
+        """
+        L{TLSMemoryBIOFactory.logPrefix} amends the wrapped factory's log prefix
+        with a short string (C{"TLS"}) indicating the wrapping, rather than its
+        full class name.
+        """
+        contextFactory = ServerTLSContext()
+        factory = TLSMemoryBIOFactory(contextFactory, False, ServerFactory())
+        self.assertEqual("ServerFactory (TLS)", factory.logPrefix())
+
+
+    def test_logPrefixFallback(self):
+        """
+        If the wrapped factory does not provide L{ILoggingContext},
+        L{TLSMemoryBIOFactory.logPrefix} uses the wrapped factory's class name.
+        """
+        class NoFactory(object):
+            pass
+
+        contextFactory = ServerTLSContext()
+        factory = TLSMemoryBIOFactory(contextFactory, False, NoFactory())
+        self.assertEqual("NoFactory (TLS)", factory.logPrefix())
+
+
+
+class TLSMemoryBIOTests(TestCase):
+    """
+    Tests for the implementation of L{ISSLTransport} which runs over another
+    L{ITransport}.
+    """
+
+    def test_interfaces(self):
+        """
+        L{TLSMemoryBIOProtocol} instances provide L{ISSLTransport} and
+        L{ISystemHandle}.
+        """
+        proto = TLSMemoryBIOProtocol(None, None)
+        self.assertTrue(ISSLTransport.providedBy(proto))
+        self.assertTrue(ISystemHandle.providedBy(proto))
+
+
+    def test_wrappedProtocolInterfaces(self):
+        """
+        L{TLSMemoryBIOProtocol} instances provide the interfaces provided by
+        the transport they wrap.
+        """
+        class ITransport(Interface):
+            pass
+
+        class MyTransport(object):
+            def write(self, bytes):
+                pass
+
+        clientFactory = ClientFactory()
+        contextFactory = ClientTLSContext()
+        wrapperFactory = TLSMemoryBIOFactory(
+            contextFactory, True, clientFactory)
+
+        transport = MyTransport()
+        directlyProvides(transport, ITransport)
+        tlsProtocol = TLSMemoryBIOProtocol(wrapperFactory, Protocol())
+        tlsProtocol.makeConnection(transport)
+        self.assertTrue(ITransport.providedBy(tlsProtocol))
+
+
+    def test_getHandle(self):
+        """
+        L{TLSMemoryBIOProtocol.getHandle} returns the L{OpenSSL.SSL.Connection}
+        instance it uses to actually implement TLS.
+
+        This may seem odd.  In fact, it is.  The L{OpenSSL.SSL.Connection} is
+        not actually the "system handle" here, nor even an object the reactor
+        knows about directly.  However, L{twisted.internet.ssl.Certificate}'s
+        C{peerFromTransport} and C{hostFromTransport} methods depend on being
+        able to get an L{OpenSSL.SSL.Connection} object in order to work
+        properly.  Implementing L{ISystemHandle.getHandle} like this is the
+        easiest way for those APIs to be made to work.  If they are changed,
+        then it may make sense to get rid of this implementation of
+        L{ISystemHandle} and return the underlying socket instead.
+        """
+        factory = ClientFactory()
+        contextFactory = ClientTLSContext()
+        wrapperFactory = TLSMemoryBIOFactory(contextFactory, True, factory)
+        proto = TLSMemoryBIOProtocol(wrapperFactory, Protocol())
+        transport = StringTransport()
+        proto.makeConnection(transport)
+        self.assertIsInstance(proto.getHandle(), ConnectionType)
+
+
+    def test_makeConnection(self):
+        """
+        When L{TLSMemoryBIOProtocol} is connected to a transport, it connects
+        the protocol it wraps to a transport.
+        """
+        clientProtocol = Protocol()
+        clientFactory = ClientFactory()
+        clientFactory.protocol = lambda: clientProtocol
+
+        contextFactory = ClientTLSContext()
+        wrapperFactory = TLSMemoryBIOFactory(
+            contextFactory, True, clientFactory)
+        sslProtocol = wrapperFactory.buildProtocol(None)
+
+        transport = StringTransport()
+        sslProtocol.makeConnection(transport)
+
+        self.assertNotIdentical(clientProtocol.transport, None)
+        self.assertNotIdentical(clientProtocol.transport, transport)
+        self.assertIdentical(clientProtocol.transport, sslProtocol)
+
+
+    def handshakeProtocols(self):
+        """
+        Start handshake between TLS client and server.
+        """
+        clientFactory = ClientFactory()
+        clientFactory.protocol = Protocol
+
+        clientContextFactory, handshakeDeferred = (
+            HandshakeCallbackContextFactory.factoryAndDeferred())
+        wrapperFactory = TLSMemoryBIOFactory(
+            clientContextFactory, True, clientFactory)
+        sslClientProtocol = wrapperFactory.buildProtocol(None)
+
+        serverFactory = ServerFactory()
+        serverFactory.protocol = Protocol
+
+        serverContextFactory = ServerTLSContext()
+        wrapperFactory = TLSMemoryBIOFactory(
+            serverContextFactory, False, serverFactory)
+        sslServerProtocol = wrapperFactory.buildProtocol(None)
+
+        connectionDeferred = loopbackAsync(sslServerProtocol, sslClientProtocol)
+        return (sslClientProtocol, sslServerProtocol, handshakeDeferred,
+                connectionDeferred)
+
+
+    def test_handshake(self):
+        """
+        The TLS handshake is performed when L{TLSMemoryBIOProtocol} is
+        connected to a transport.
+        """
+        tlsClient, tlsServer, handshakeDeferred, _ = self.handshakeProtocols()
+
+        # Only wait for the handshake to complete.  Anything after that isn't
+        # important here.
+        return handshakeDeferred
+
+
+    def test_handshakeFailure(self):
+        """
+        L{TLSMemoryBIOProtocol} reports errors in the handshake process to the
+        application-level protocol object using its C{connectionLost} method
+        and disconnects the underlying transport.
+        """
+        clientConnectionLost = Deferred()
+        clientFactory = ClientFactory()
+        clientFactory.protocol = (
+            lambda: ConnectionLostNotifyingProtocol(
+                clientConnectionLost))
+
+        clientContextFactory = HandshakeCallbackContextFactory()
+        wrapperFactory = TLSMemoryBIOFactory(
+            clientContextFactory, True, clientFactory)
+        sslClientProtocol = wrapperFactory.buildProtocol(None)
+
+        serverConnectionLost = Deferred()
+        serverFactory = ServerFactory()
+        serverFactory.protocol = (
+            lambda: ConnectionLostNotifyingProtocol(
+                serverConnectionLost))
+
+        # This context factory rejects any clients which do not present a
+        # certificate.
+        certificateData = FilePath(certPath).getContent()
+        certificate = PrivateCertificate.loadPEM(certificateData)
+        serverContextFactory = certificate.options(certificate)
+        wrapperFactory = TLSMemoryBIOFactory(
+            serverContextFactory, False, serverFactory)
+        sslServerProtocol = wrapperFactory.buildProtocol(None)
+
+        connectionDeferred = loopbackAsync(sslServerProtocol, sslClientProtocol)
+
+        def cbConnectionLost(protocol):
+            # The connection should close on its own in response to the error
+            # induced by the client not supplying the required certificate.
+            # After that, check to make sure the protocol's connectionLost was
+            # called with the right thing.
+            protocol.lostConnectionReason.trap(Error)
+        clientConnectionLost.addCallback(cbConnectionLost)
+        serverConnectionLost.addCallback(cbConnectionLost)
+
+        # Additionally, the underlying transport should have been told to
+        # go away.
+        return gatherResults([
+                clientConnectionLost, serverConnectionLost,
+                connectionDeferred])
+
+
+    def test_getPeerCertificate(self):
+        """
+        L{TLSMemoryBIOProtocol.getPeerCertificate} returns the
+        L{OpenSSL.crypto.X509Type} instance representing the peer's
+        certificate.
+        """
+        # Set up a client and server so there's a certificate to grab.
+        clientFactory = ClientFactory()
+        clientFactory.protocol = Protocol
+
+        clientContextFactory, handshakeDeferred = (
+            HandshakeCallbackContextFactory.factoryAndDeferred())
+        wrapperFactory = TLSMemoryBIOFactory(
+            clientContextFactory, True, clientFactory)
+        sslClientProtocol = wrapperFactory.buildProtocol(None)
+
+        serverFactory = ServerFactory()
+        serverFactory.protocol = Protocol
+
+        serverContextFactory = ServerTLSContext()
+        wrapperFactory = TLSMemoryBIOFactory(
+            serverContextFactory, False, serverFactory)
+        sslServerProtocol = wrapperFactory.buildProtocol(None)
+
+        loopbackAsync(sslServerProtocol, sslClientProtocol)
+
+        # Wait for the handshake
+        def cbHandshook(ignored):
+            # Grab the server's certificate and check it out
+            cert = sslClientProtocol.getPeerCertificate()
+            self.assertIsInstance(cert, X509Type)
+            self.assertEqual(
+                cert.digest('md5'),
+                b'9B:A4:AB:43:10:BE:82:AE:94:3E:6B:91:F2:F3:40:E8')
+        handshakeDeferred.addCallback(cbHandshook)
+        return handshakeDeferred
+
+
+    def test_writeAfterHandshake(self):
+        """
+        Bytes written to L{TLSMemoryBIOProtocol} before the handshake is
+        complete are received by the protocol on the other side of the
+        connection once the handshake succeeds.
+        """
+        bytes = b"some bytes"
+
+        clientProtocol = Protocol()
+        clientFactory = ClientFactory()
+        clientFactory.protocol = lambda: clientProtocol
+
+        clientContextFactory, handshakeDeferred = (
+            HandshakeCallbackContextFactory.factoryAndDeferred())
+        wrapperFactory = TLSMemoryBIOFactory(
+            clientContextFactory, True, clientFactory)
+        sslClientProtocol = wrapperFactory.buildProtocol(None)
+
+        serverProtocol = AccumulatingProtocol(len(bytes))
+        serverFactory = ServerFactory()
+        serverFactory.protocol = lambda: serverProtocol
+
+        serverContextFactory = ServerTLSContext()
+        wrapperFactory = TLSMemoryBIOFactory(
+            serverContextFactory, False, serverFactory)
+        sslServerProtocol = wrapperFactory.buildProtocol(None)
+
+        connectionDeferred = loopbackAsync(sslServerProtocol, sslClientProtocol)
+
+        # Wait for the handshake to finish before writing anything.
+        def cbHandshook(ignored):
+            clientProtocol.transport.write(bytes)
+
+            # The server will drop the connection once it gets the bytes.
+            return connectionDeferred
+        handshakeDeferred.addCallback(cbHandshook)
+
+        # Once the connection is lost, make sure the server received the
+        # expected bytes.
+        def cbDisconnected(ignored):
+            self.assertEqual(b"".join(serverProtocol.received), bytes)
+        handshakeDeferred.addCallback(cbDisconnected)
+
+        return handshakeDeferred
+
+
+    def writeBeforeHandshakeTest(self, sendingProtocol, bytes):
+        """
+        Run test where client sends data before handshake, given the sending
+        protocol and expected bytes.
+        """
+        clientFactory = ClientFactory()
+        clientFactory.protocol = sendingProtocol
+
+        clientContextFactory, handshakeDeferred = (
+            HandshakeCallbackContextFactory.factoryAndDeferred())
+        wrapperFactory = TLSMemoryBIOFactory(
+            clientContextFactory, True, clientFactory)
+        sslClientProtocol = wrapperFactory.buildProtocol(None)
+
+        serverProtocol = AccumulatingProtocol(len(bytes))
+        serverFactory = ServerFactory()
+        serverFactory.protocol = lambda: serverProtocol
+
+        serverContextFactory = ServerTLSContext()
+        wrapperFactory = TLSMemoryBIOFactory(
+            serverContextFactory, False, serverFactory)
+        sslServerProtocol = wrapperFactory.buildProtocol(None)
+
+        connectionDeferred = loopbackAsync(sslServerProtocol, sslClientProtocol)
+
+        # Wait for the connection to end, then make sure the server received
+        # the bytes sent by the client.
+        def cbConnectionDone(ignored):
+            self.assertEqual(b"".join(serverProtocol.received), bytes)
+        connectionDeferred.addCallback(cbConnectionDone)
+        return connectionDeferred
+
+
+    def test_writeBeforeHandshake(self):
+        """
+        Bytes written to L{TLSMemoryBIOProtocol} before the handshake is
+        complete are received by the protocol on the other side of the
+        connection once the handshake succeeds.
+        """
+        bytes = b"some bytes"
+
+        class SimpleSendingProtocol(Protocol):
+            def connectionMade(self):
+                self.transport.write(bytes)
+
+        return self.writeBeforeHandshakeTest(SimpleSendingProtocol, bytes)
+
+
+    def test_writeSequence(self):
+        """
+        Bytes written to L{TLSMemoryBIOProtocol} with C{writeSequence} are
+        received by the protocol on the other side of the connection.
+        """
+        bytes = b"some bytes"
+        class SimpleSendingProtocol(Protocol):
+            def connectionMade(self):
+                self.transport.writeSequence(list(iterbytes(bytes)))
+
+        return self.writeBeforeHandshakeTest(SimpleSendingProtocol, bytes)
+
+
+    def test_writeAfterLoseConnection(self):
+        """
+        Bytes written to L{TLSMemoryBIOProtocol} after C{loseConnection} is
+        called are not transmitted (unless there is a registered producer,
+        which will be tested elsewhere).
+        """
+        bytes = b"some bytes"
+        class SimpleSendingProtocol(Protocol):
+            def connectionMade(self):
+                self.transport.write(bytes)
+                self.transport.loseConnection()
+                self.transport.write(b"hello")
+                self.transport.writeSequence([b"world"])
+        return self.writeBeforeHandshakeTest(SimpleSendingProtocol, bytes)
+
+
+    def test_writeUnicodeRaisesTypeError(self):
+        """
+        Writing C{unicode} to L{TLSMemoryBIOProtocol} throws a C{TypeError}.
+        """
+        notBytes = u"hello"
+        result = []
+        class SimpleSendingProtocol(Protocol):
+            def connectionMade(self):
+                try:
+                    self.transport.write(notBytes)
+                except TypeError:
+                    result.append(True)
+                self.transport.write(b"bytes")
+                self.transport.loseConnection()
+        d = self.writeBeforeHandshakeTest(SimpleSendingProtocol, b"bytes")
+        return d.addCallback(lambda ign: self.assertEqual(result, [True]))
+
+
+    def test_multipleWrites(self):
+        """
+        If multiple separate TLS messages are received in a single chunk from
+        the underlying transport, all of the application bytes from each
+        message are delivered to the application-level protocol.
+        """
+        bytes = [b'a', b'b', b'c', b'd', b'e', b'f', b'g', b'h', b'i']
+        class SimpleSendingProtocol(Protocol):
+            def connectionMade(self):
+                for b in bytes:
+                    self.transport.write(b)
+
+        clientFactory = ClientFactory()
+        clientFactory.protocol = SimpleSendingProtocol
+
+        clientContextFactory = HandshakeCallbackContextFactory()
+        wrapperFactory = TLSMemoryBIOFactory(
+            clientContextFactory, True, clientFactory)
+        sslClientProtocol = wrapperFactory.buildProtocol(None)
+
+        serverProtocol = AccumulatingProtocol(sum(map(len, bytes)))
+        serverFactory = ServerFactory()
+        serverFactory.protocol = lambda: serverProtocol
+
+        serverContextFactory = ServerTLSContext()
+        wrapperFactory = TLSMemoryBIOFactory(
+            serverContextFactory, False, serverFactory)
+        sslServerProtocol = wrapperFactory.buildProtocol(None)
+
+        connectionDeferred = loopbackAsync(sslServerProtocol, sslClientProtocol, collapsingPumpPolicy)
+
+        # Wait for the connection to end, then make sure the server received
+        # the bytes sent by the client.
+        def cbConnectionDone(ignored):
+            self.assertEqual(b"".join(serverProtocol.received), b''.join(bytes))
+        connectionDeferred.addCallback(cbConnectionDone)
+        return connectionDeferred
+
+
+    def test_hugeWrite(self):
+        """
+        If a very long string is passed to L{TLSMemoryBIOProtocol.write}, any
+        trailing part of it which cannot be send immediately is buffered and
+        sent later.
+        """
+        bytes = b"some bytes"
+        factor = 8192
+        class SimpleSendingProtocol(Protocol):
+            def connectionMade(self):
+                self.transport.write(bytes * factor)
+
+        clientFactory = ClientFactory()
+        clientFactory.protocol = SimpleSendingProtocol
+
+        clientContextFactory = HandshakeCallbackContextFactory()
+        wrapperFactory = TLSMemoryBIOFactory(
+            clientContextFactory, True, clientFactory)
+        sslClientProtocol = wrapperFactory.buildProtocol(None)
+
+        serverProtocol = AccumulatingProtocol(len(bytes) * factor)
+        serverFactory = ServerFactory()
+        serverFactory.protocol = lambda: serverProtocol
+
+        serverContextFactory = ServerTLSContext()
+        wrapperFactory = TLSMemoryBIOFactory(
+            serverContextFactory, False, serverFactory)
+        sslServerProtocol = wrapperFactory.buildProtocol(None)
+
+        connectionDeferred = loopbackAsync(sslServerProtocol, sslClientProtocol)
+
+        # Wait for the connection to end, then make sure the server received
+        # the bytes sent by the client.
+        def cbConnectionDone(ignored):
+            self.assertEqual(b"".join(serverProtocol.received), bytes * factor)
+        connectionDeferred.addCallback(cbConnectionDone)
+        return connectionDeferred
+
+
+    def test_disorderlyShutdown(self):
+        """
+        If a L{TLSMemoryBIOProtocol} loses its connection unexpectedly, this is
+        reported to the application.
+        """
+        clientConnectionLost = Deferred()
+        clientFactory = ClientFactory()
+        clientFactory.protocol = (
+            lambda: ConnectionLostNotifyingProtocol(
+                clientConnectionLost))
+
+        clientContextFactory = HandshakeCallbackContextFactory()
+        wrapperFactory = TLSMemoryBIOFactory(
+            clientContextFactory, True, clientFactory)
+        sslClientProtocol = wrapperFactory.buildProtocol(None)
+
+        # Client speaks first, so the server can be dumb.
+        serverProtocol = Protocol()
+
+        loopbackAsync(serverProtocol, sslClientProtocol)
+
+        # Now destroy the connection.
+        serverProtocol.transport.loseConnection()
+
+        # And when the connection completely dies, check the reason.
+        def cbDisconnected(clientProtocol):
+            clientProtocol.lostConnectionReason.trap(Error)
+        clientConnectionLost.addCallback(cbDisconnected)
+        return clientConnectionLost
+
+
+    def test_loseConnectionAfterHandshake(self):
+        """
+        L{TLSMemoryBIOProtocol.loseConnection} sends a TLS close alert and
+        shuts down the underlying connection cleanly on both sides, after
+        transmitting all buffered data.
+        """
+        class NotifyingProtocol(ConnectionLostNotifyingProtocol):
+            def __init__(self, onConnectionLost):
+                ConnectionLostNotifyingProtocol.__init__(self,
+                                                         onConnectionLost)
+                self.data = []
+
+            def dataReceived(self, bytes):
+                self.data.append(bytes)
+
+        clientConnectionLost = Deferred()
+        clientFactory = ClientFactory()
+        clientProtocol = NotifyingProtocol(clientConnectionLost)
+        clientFactory.protocol = lambda: clientProtocol
+
+        clientContextFactory, handshakeDeferred = (
+            HandshakeCallbackContextFactory.factoryAndDeferred())
+        wrapperFactory = TLSMemoryBIOFactory(
+            clientContextFactory, True, clientFactory)
+        sslClientProtocol = wrapperFactory.buildProtocol(None)
+
+        serverConnectionLost = Deferred()
+        serverProtocol = NotifyingProtocol(serverConnectionLost)
+        serverFactory = ServerFactory()
+        serverFactory.protocol = lambda: serverProtocol
+
+        serverContextFactory = ServerTLSContext()
+        wrapperFactory = TLSMemoryBIOFactory(
+            serverContextFactory, False, serverFactory)
+        sslServerProtocol = wrapperFactory.buildProtocol(None)
+
+        loopbackAsync(sslServerProtocol, sslClientProtocol)
+        chunkOfBytes = b"123456890" * 100000
+
+        # Wait for the handshake before dropping the connection.
+        def cbHandshake(ignored):
+            # Write more than a single bio_read, to ensure client will still
+            # have some data it needs to write when it receives the TLS close
+            # alert, and that simply doing a single bio_read won't be
+            # sufficient. Thus we will verify that any amount of buffered data
+            # will be written out before the connection is closed, rather than
+            # just small amounts that can be returned in a single bio_read:
+            clientProtocol.transport.write(chunkOfBytes)
+            serverProtocol.transport.loseConnection()
+
+            # Now wait for the client and server to notice.
+            return gatherResults([clientConnectionLost, serverConnectionLost])
+        handshakeDeferred.addCallback(cbHandshake)
+
+        # Wait for the connection to end, then make sure the client and server
+        # weren't notified of a handshake failure that would cause the test to
+        # fail.
+        def cbConnectionDone(result):
+            (clientProtocol, serverProtocol) = result
+            clientProtocol.lostConnectionReason.trap(ConnectionDone)
+            serverProtocol.lostConnectionReason.trap(ConnectionDone)
+
+            # The server should have received all bytes sent by the client:
+            self.assertEqual(b"".join(serverProtocol.data), chunkOfBytes)
+
+            # The server should have closed its underlying transport, in
+            # addition to whatever it did to shut down the TLS layer.
+            self.assertTrue(serverProtocol.transport.q.disconnect)
+
+            # The client should also have closed its underlying transport once
+            # it saw the server shut down the TLS layer, so as to avoid relying
+            # on the server to close the underlying connection.
+            self.assertTrue(clientProtocol.transport.q.disconnect)
+        handshakeDeferred.addCallback(cbConnectionDone)
+        return handshakeDeferred
+
+
+    def test_connectionLostOnlyAfterUnderlyingCloses(self):
+        """
+        The user protocol's connectionLost is only called when transport
+        underlying TLS is disconnected.
+        """
+        class LostProtocol(Protocol):
+            disconnected = None
+            def connectionLost(self, reason):
+                self.disconnected = reason
+        wrapperFactory = TLSMemoryBIOFactory(ClientTLSContext(),
+                                             True, ClientFactory())
+        protocol = LostProtocol()
+        tlsProtocol = TLSMemoryBIOProtocol(wrapperFactory, protocol)
+        transport = StringTransport()
+        tlsProtocol.makeConnection(transport)
+
+        # Pretend TLS shutdown finished cleanly; the underlying transport
+        # should be told to close, but the user protocol should not yet be
+        # notified:
+        tlsProtocol._tlsShutdownFinished(None)
+        self.assertEqual(transport.disconnecting, True)
+        self.assertEqual(protocol.disconnected, None)
+
+        # Now close the underlying connection; the user protocol should be
+        # notified with the given reason (since TLS closed cleanly):
+        tlsProtocol.connectionLost(Failure(ConnectionLost("ono")))
+        self.assertTrue(protocol.disconnected.check(ConnectionLost))
+        self.assertEqual(protocol.disconnected.value.args, ("ono",))
+
+
+    def test_loseConnectionTwice(self):
+        """
+        If TLSMemoryBIOProtocol.loseConnection is called multiple times, all
+        but the first call have no effect.
+        """
+        wrapperFactory = TLSMemoryBIOFactory(ClientTLSContext(),
+                                             True, ClientFactory())
+        tlsProtocol = TLSMemoryBIOProtocol(wrapperFactory, Protocol())
+        transport = StringTransport()
+        tlsProtocol.makeConnection(transport)
+        self.assertEqual(tlsProtocol.disconnecting, False)
+
+        # Make sure loseConnection calls _shutdownTLS the first time (mostly
+        # to make sure we've overriding it correctly):
+        calls = []
+        def _shutdownTLS(shutdown=tlsProtocol._shutdownTLS):
+            calls.append(1)
+            return shutdown()
+        tlsProtocol._shutdownTLS = _shutdownTLS
+        tlsProtocol.loseConnection()
+        self.assertEqual(tlsProtocol.disconnecting, True)
+        self.assertEqual(calls, [1])
+
+        # Make sure _shutdownTLS isn't called a second time:
+        tlsProtocol.loseConnection()
+        self.assertEqual(calls, [1])
+
+
+    def test_unexpectedEOF(self):
+        """
+        Unexpected disconnects get converted to ConnectionLost errors.
+        """
+        tlsClient, tlsServer, handshakeDeferred, disconnectDeferred = (
+            self.handshakeProtocols())
+        serverProtocol = tlsServer.wrappedProtocol
+        data = []
+        reason = []
+        serverProtocol.dataReceived = data.append
+        serverProtocol.connectionLost = reason.append
+
+        # Write data, then disconnect *underlying* transport, resulting in an
+        # unexpected TLS disconnect:
+        def handshakeDone(ign):
+            tlsClient.write(b"hello")
+            tlsClient.transport.loseConnection()
+        handshakeDeferred.addCallback(handshakeDone)
+
+        # Receiver should be disconnected, with ConnectionLost notification
+        # (masking the Unexpected EOF SSL error):
+        def disconnected(ign):
+            self.assertTrue(reason[0].check(ConnectionLost), reason[0])
+        disconnectDeferred.addCallback(disconnected)
+        return disconnectDeferred
+
+
+    def test_errorWriting(self):
+        """
+        Errors while writing cause the protocols to be disconnected.
+        """
+        tlsClient, tlsServer, handshakeDeferred, disconnectDeferred = (
+            self.handshakeProtocols())
+        reason = []
+        tlsClient.wrappedProtocol.connectionLost = reason.append
+
+        # Pretend TLS connection is unhappy sending:
+        class Wrapper(object):
+            def __init__(self, wrapped):
+                self._wrapped = wrapped
+            def __getattr__(self, attr):
+                return getattr(self._wrapped, attr)
+            def send(self, *args):
+                raise Error("ONO!")
+        tlsClient._tlsConnection = Wrapper(tlsClient._tlsConnection)
+
+        # Write some data:
+        def handshakeDone(ign):
+            tlsClient.write(b"hello")
+        handshakeDeferred.addCallback(handshakeDone)
+
+        # Failed writer should be disconnected with SSL error:
+        def disconnected(ign):
+            self.assertTrue(reason[0].check(Error), reason[0])
+        disconnectDeferred.addCallback(disconnected)
+        return disconnectDeferred
+
+
+
+class TLSProducerTests(TestCase):
+    """
+    The TLS transport must support the IConsumer interface.
+    """
+
+    def setupStreamingProducer(self, transport=None):
+        class HistoryStringTransport(StringTransport):
+            def __init__(self):
+                StringTransport.__init__(self)
+                self.producerHistory = []
+
+            def pauseProducing(self):
+                self.producerHistory.append("pause")
+                StringTransport.pauseProducing(self)
+
+            def resumeProducing(self):
+                self.producerHistory.append("resume")
+                StringTransport.resumeProducing(self)
+
+            def stopProducing(self):
+                self.producerHistory.append("stop")
+                StringTransport.stopProducing(self)
+
+        clientProtocol, tlsProtocol = buildTLSProtocol(transport=transport)
+        producer = HistoryStringTransport()
+        clientProtocol.transport.registerProducer(producer, True)
+        self.assertEqual(tlsProtocol.transport.streaming, True)
+        return clientProtocol, tlsProtocol, producer
+
+
+    def flushTwoTLSProtocols(self, tlsProtocol, serverTLSProtocol):
+        """
+        Transfer bytes back and forth between two TLS protocols.
+        """
+        # We want to make sure all bytes are passed back and forth; JP
+        # estimated that 3 rounds should be enough:
+        for i in range(3):
+            clientData = tlsProtocol.transport.value()
+            if clientData:
+                serverTLSProtocol.dataReceived(clientData)
+                tlsProtocol.transport.clear()
+            serverData = serverTLSProtocol.transport.value()
+            if serverData:
+                tlsProtocol.dataReceived(serverData)
+                serverTLSProtocol.transport.clear()
+            if not serverData and not clientData:
+                break
+        self.assertEqual(tlsProtocol.transport.value(), b"")
+        self.assertEqual(serverTLSProtocol.transport.value(), b"")
+
+
+    def test_streamingProducerPausedInNormalMode(self):
+        """
+        When the TLS transport is not blocked on reads, it correctly calls
+        pauseProducing on the registered producer.
+        """
+        _, tlsProtocol, producer = self.setupStreamingProducer()
+
+        # The TLS protocol's transport pretends to be full, pausing its
+        # producer:
+        tlsProtocol.transport.producer.pauseProducing()
+        self.assertEqual(producer.producerState, 'paused')
+        self.assertEqual(producer.producerHistory, ['pause'])
+        self.assertEqual(tlsProtocol._producer._producerPaused, True)
+
+
+    def test_streamingProducerResumedInNormalMode(self):
+        """
+        When the TLS transport is not blocked on reads, it correctly calls
+        resumeProducing on the registered producer.
+        """
+        _, tlsProtocol, producer = self.setupStreamingProducer()
+        tlsProtocol.transport.producer.pauseProducing()
+        self.assertEqual(producer.producerHistory, ['pause'])
+
+        # The TLS protocol's transport pretends to have written everything
+        # out, so it resumes its producer:
+        tlsProtocol.transport.producer.resumeProducing()
+        self.assertEqual(producer.producerState, 'producing')
+        self.assertEqual(producer.producerHistory, ['pause', 'resume'])
+        self.assertEqual(tlsProtocol._producer._producerPaused, False)
+
+
+    def test_streamingProducerPausedInWriteBlockedOnReadMode(self):
+        """
+        When the TLS transport is blocked on reads, it correctly calls
+        pauseProducing on the registered producer.
+        """
+        clientProtocol, tlsProtocol, producer = self.setupStreamingProducer()
+
+        # Write to TLS transport. Because we do this before the initial TLS
+        # handshake is finished, writing bytes triggers a WantReadError,
+        # indicating that until bytes are read for the handshake, more bytes
+        # cannot be written. Thus writing bytes before the handshake should
+        # cause the producer to be paused:
+        clientProtocol.transport.write(b"hello")
+        self.assertEqual(producer.producerState, 'paused')
+        self.assertEqual(producer.producerHistory, ['pause'])
+        self.assertEqual(tlsProtocol._producer._producerPaused, True)
+
+
+    def test_streamingProducerResumedInWriteBlockedOnReadMode(self):
+        """
+        When the TLS transport is blocked on reads, it correctly calls
+        resumeProducing on the registered producer.
+        """
+        clientProtocol, tlsProtocol, producer = self.setupStreamingProducer()
+
+        # Write to TLS transport, triggering WantReadError; this should cause
+        # the producer to be paused. We use a large chunk of data to make sure
+        # large writes don't trigger multiple pauses:
+        clientProtocol.transport.write(b"hello world" * 320000)
+        self.assertEqual(producer.producerHistory, ['pause'])
+
+        # Now deliver bytes that will fix the WantRead condition; this should
+        # unpause the producer:
+        serverProtocol, serverTLSProtocol = buildTLSProtocol(server=True)
+        self.flushTwoTLSProtocols(tlsProtocol, serverTLSProtocol)
+        self.assertEqual(producer.producerHistory, ['pause', 'resume'])
+        self.assertEqual(tlsProtocol._producer._producerPaused, False)
+
+        # Make sure we haven't disconnected for some reason:
+        self.assertEqual(tlsProtocol.transport.disconnecting, False)
+        self.assertEqual(producer.producerState, 'producing')
+
+
+    def test_streamingProducerTwice(self):
+        """
+        Registering a streaming producer twice throws an exception.
+        """
+        clientProtocol, tlsProtocol, producer = self.setupStreamingProducer()
+        originalProducer = tlsProtocol._producer
+        producer2 = object()
+        self.assertRaises(RuntimeError,
+            clientProtocol.transport.registerProducer, producer2, True)
+        self.assertIdentical(tlsProtocol._producer, originalProducer)
+
+
+    def test_streamingProducerUnregister(self):
+        """
+        Unregistering a streaming producer removes it, reverting to initial state.
+        """
+        clientProtocol, tlsProtocol, producer = self.setupStreamingProducer()
+        clientProtocol.transport.unregisterProducer()
+        self.assertEqual(tlsProtocol._producer, None)
+        self.assertEqual(tlsProtocol.transport.producer, None)
+
+
+    def loseConnectionWithProducer(self, writeBlockedOnRead):
+        """
+        Common code for tests involving writes by producer after
+        loseConnection is called.
+        """
+        clientProtocol, tlsProtocol, producer = self.setupStreamingProducer()
+        serverProtocol, serverTLSProtocol = buildTLSProtocol(server=True)
+
+        if not writeBlockedOnRead:
+            # Do the initial handshake before write:
+            self.flushTwoTLSProtocols(tlsProtocol, serverTLSProtocol)
+        else:
+            # In this case the write below will trigger write-blocked-on-read
+            # condition...
+            pass
+
+        # Now write, then lose connection:
+        clientProtocol.transport.write(b"x ")
+        clientProtocol.transport.loseConnection()
+        self.flushTwoTLSProtocols(tlsProtocol, serverTLSProtocol)
+
+        # Underlying transport should not have loseConnection called yet, nor
+        # should producer be stopped:
+        self.assertEqual(tlsProtocol.transport.disconnecting, False)
+        self.assertFalse("stop" in producer.producerHistory)
+
+        # Writes from client to server should continue to go through, since we
+        # haven't unregistered producer yet:
+        clientProtocol.transport.write(b"hello")
+        clientProtocol.transport.writeSequence([b" ", b"world"])
+
+        # Unregister producer; this should trigger TLS shutdown:
+        clientProtocol.transport.unregisterProducer()
+        self.assertNotEqual(tlsProtocol.transport.value(), b"")
+        self.assertEqual(tlsProtocol.transport.disconnecting, False)
+
+        # Additional writes should not go through:
+        clientProtocol.transport.write(b"won't")
+        clientProtocol.transport.writeSequence([b"won't!"])
+
+        # Finish TLS close handshake:
+        self.flushTwoTLSProtocols(tlsProtocol, serverTLSProtocol)
+        self.assertEqual(tlsProtocol.transport.disconnecting, True)
+
+        # Bytes made it through, as long as they were written before producer
+        # was unregistered:
+        self.assertEqual(b"".join(serverProtocol.received), b"x hello world")
+
+
+    def test_streamingProducerLoseConnectionWithProducer(self):
+        """
+        loseConnection() waits for the producer to unregister itself, then
+        does a clean TLS close alert, then closes the underlying connection.
+        """
+        return self.loseConnectionWithProducer(False)
+
+
+    def test_streamingProducerLoseConnectionWithProducerWBOR(self):
+        """
+        Even when writes are blocked on reading, loseConnection() waits for
+        the producer to unregister itself, then does a clean TLS close alert,
+        then closes the underlying connection.
+        """
+        return self.loseConnectionWithProducer(True)
+
+
+    def test_streamingProducerBothTransportsDecideToPause(self):
+        """
+        pauseProducing() events can come from both the TLS transport layer and
+        the underlying transport. In this case, both decide to pause,
+        underlying first.
+        """
+        class PausingStringTransport(StringTransport):
+            _didPause = False
+
+            def write(self, data):
+                if not self._didPause and self.producer is not None:
+                    self._didPause = True
+                    self.producer.pauseProducing()
+                StringTransport.write(self, data)
+
+
+        class TLSConnection(object):
+            def __init__(self):
+                self.l = []
+
+            def send(self, bytes):
+                # on first write, don't send all bytes:
+                if not self.l:
+                    bytes = bytes[:-1]
+                # pause on second write:
+                if len(self.l) == 1:
+                    self.l.append("paused")
+                    raise WantReadError()
+                # otherwise just take in data:
+                self.l.append(bytes)
+                return len(bytes)
+
+            def bio_write(self, data):
+                pass
+
+            def bio_read(self, size):
+                return b'X'
+
+            def recv(self, size):
+                raise WantReadError()
+
+        transport = PausingStringTransport()
+        clientProtocol, tlsProtocol, producer = self.setupStreamingProducer(
+            transport)
+        self.assertEqual(producer.producerState, 'producing')
+
+        # Shove in fake TLSConnection that will raise WantReadError the second
+        # time send() is called. This will allow us to have bytes written to
+        # to the PausingStringTransport, so it will pause the producer. Then,
+        # WantReadError will be thrown, triggering the TLS transport's
+        # producer code path.
+        tlsProtocol._tlsConnection = TLSConnection()
+        clientProtocol.transport.write(b"hello")
+        self.assertEqual(producer.producerState, 'paused')
+        self.assertEqual(producer.producerHistory, ['pause'])
+
+        # Now, underlying transport resumes, and then we deliver some data to
+        # TLS transport so that it will resume:
+        tlsProtocol.transport.producer.resumeProducing()
+        self.assertEqual(producer.producerState, 'producing')
+        self.assertEqual(producer.producerHistory, ['pause', 'resume'])
+        tlsProtocol.dataReceived(b"hello")
+        self.assertEqual(producer.producerState, 'producing')
+        self.assertEqual(producer.producerHistory, ['pause', 'resume'])
+
+
+    def test_streamingProducerStopProducing(self):
+        """
+        If the underlying transport tells its producer to stopProducing(),
+        this is passed on to the high-level producer.
+        """
+        _, tlsProtocol, producer = self.setupStreamingProducer()
+        tlsProtocol.transport.producer.stopProducing()
+        self.assertEqual(producer.producerState, 'stopped')
+
+
+    def test_nonStreamingProducer(self):
+        """
+        Non-streaming producers get wrapped as streaming producers.
+        """
+        clientProtocol, tlsProtocol = buildTLSProtocol()
+        producer = NonStreamingProducer(clientProtocol.transport)
+
+        # Register non-streaming producer:
+        clientProtocol.transport.registerProducer(producer, False)
+        streamingProducer = tlsProtocol.transport.producer._producer
+
+        # Verify it was wrapped into streaming producer:
+        self.assertIsInstance(streamingProducer, _PullToPush)
+        self.assertEqual(streamingProducer._producer, producer)
+        self.assertEqual(streamingProducer._consumer, clientProtocol.transport)
+        self.assertEqual(tlsProtocol.transport.streaming, True)
+
+        # Verify the streaming producer was started, and ran until the end:
+        def done(ignore):
+            # Our own producer is done:
+            self.assertEqual(producer.consumer, None)
+            # The producer has been unregistered:
+            self.assertEqual(tlsProtocol.transport.producer, None)
+            # The streaming producer wrapper knows it's done:
+            self.assertEqual(streamingProducer._finished, True)
+        producer.result.addCallback(done)
+
+        serverProtocol, serverTLSProtocol = buildTLSProtocol(server=True)
+        self.flushTwoTLSProtocols(tlsProtocol, serverTLSProtocol)
+        return producer.result
+
+
+    def test_interface(self):
+        """
+        L{_ProducerMembrane} implements L{IPushProducer}.
+        """
+        producer = StringTransport()
+        membrane = _ProducerMembrane(producer)
+        self.assertTrue(verifyObject(IPushProducer, membrane))
+
+
+    def registerProducerAfterConnectionLost(self, streaming):
+        """
+        If a producer is registered after the transport has disconnected, the
+        producer is not used, and its stopProducing method is called.
+        """
+        clientProtocol, tlsProtocol = buildTLSProtocol()
+        clientProtocol.connectionLost = lambda reason: reason.trap(Error)
+
+        class Producer(object):
+            stopped = False
+
+            def resumeProducing(self):
+                return 1/0 # this should never be called
+
+            def stopProducing(self):
+                self.stopped = True
+
+        # Disconnect the transport:
+        tlsProtocol.connectionLost(Failure(ConnectionDone()))
+
+        # Register the producer; startProducing should not be called, but
+        # stopProducing will:
+        producer = Producer()
+        tlsProtocol.registerProducer(producer, False)
+        self.assertIdentical(tlsProtocol.transport.producer, None)
+        self.assertEqual(producer.stopped, True)
+
+
+    def test_streamingProducerAfterConnectionLost(self):
+        """
+        If a streaming producer is registered after the transport has
+        disconnected, the producer is not used, and its stopProducing method
+        is called.
+        """
+        self.registerProducerAfterConnectionLost(True)
+
+
+    def test_nonStreamingProducerAfterConnectionLost(self):
+        """
+        If a non-streaming producer is registered after the transport has
+        disconnected, the producer is not used, and its stopProducing method
+        is called.
+        """
+        self.registerProducerAfterConnectionLost(False)
+
+
+
+class NonStreamingProducer(object):
+    """
+    A pull producer which writes 10 times only.
+    """
+
+    counter = 0
+    stopped = False
+
+    def __init__(self, consumer):
+        self.consumer = consumer
+        self.result = Deferred()
+
+    def resumeProducing(self):
+        if self.counter < 10:
+            self.consumer.write(intToBytes(self.counter))
+            self.counter += 1
+            if self.counter == 10:
+                self.consumer.unregisterProducer()
+                self._done()
+        else:
+            if self.consumer is None:
+                raise RuntimeError("BUG: resume after unregister/stop.")
+
+
+    def pauseProducing(self):
+        raise RuntimeError("BUG: pause should never be called.")
+
+
+    def _done(self):
+        self.consumer = None
+        d = self.result
+        del self.result
+        d.callback(None)
+
+
+    def stopProducing(self):
+        self.stopped = True
+        self._done()
+
+
+
+class NonStreamingProducerTests(TestCase):
+    """
+    Non-streaming producers can be adapted into being streaming producers.
+    """
+
+    def streamUntilEnd(self, consumer):
+        """
+        Verify the consumer writes out all its data, but is not called after
+        that.
+        """
+        nsProducer = NonStreamingProducer(consumer)
+        streamingProducer = _PullToPush(nsProducer, consumer)
+        consumer.registerProducer(streamingProducer, True)
+
+        # The producer will call unregisterProducer(), and we need to hook
+        # that up so the streaming wrapper is notified; the
+        # TLSMemoryBIOProtocol will have to do this itself, which is tested
+        # elsewhere:
+        def unregister(orig=consumer.unregisterProducer):
+            orig()
+            streamingProducer.stopStreaming()
+        consumer.unregisterProducer = unregister
+
+        done = nsProducer.result
+        def doneStreaming(_):
+            # All data was streamed, and the producer unregistered itself:
+            self.assertEqual(consumer.value(), b"0123456789")
+            self.assertEqual(consumer.producer, None)
+            # And the streaming wrapper stopped:
+            self.assertEqual(streamingProducer._finished, True)
+        done.addCallback(doneStreaming)
+
+        # Now, start streaming:
+        streamingProducer.startStreaming()
+        return done
+
+
+    def test_writeUntilDone(self):
+        """
+        When converted to a streaming producer, the non-streaming producer
+        writes out all its data, but is not called after that.
+        """
+        consumer = StringTransport()
+        return self.streamUntilEnd(consumer)
+
+
+    def test_pause(self):
+        """
+        When the streaming producer is paused, the underlying producer stops
+        getting resumeProducing calls.
+        """
+        class PausingStringTransport(StringTransport):
+            writes = 0
+
+            def __init__(self):
+                StringTransport.__init__(self)
+                self.paused = Deferred()
+
+            def write(self, data):
+                self.writes += 1
+                StringTransport.write(self, data)
+                if self.writes == 3:
+                    self.producer.pauseProducing()
+                    d = self.paused
+                    del self.paused
+                    d.callback(None)
+
+
+        consumer = PausingStringTransport()
+        nsProducer = NonStreamingProducer(consumer)
+        streamingProducer = _PullToPush(nsProducer, consumer)
+        consumer.registerProducer(streamingProducer, True)
+
+        # Make sure the consumer does not continue:
+        def shouldNotBeCalled(ignore):
+            self.fail("BUG: The producer should not finish!")
+        nsProducer.result.addCallback(shouldNotBeCalled)
+
+        done = consumer.paused
+        def paused(ignore):
+            # The CooperatorTask driving the producer was paused:
+            self.assertEqual(streamingProducer._coopTask._pauseCount, 1)
+        done.addCallback(paused)
+
+        # Now, start streaming:
+        streamingProducer.startStreaming()
+        return done
+
+
+    def test_resume(self):
+        """
+        When the streaming producer is paused and then resumed, the underlying
+        producer starts getting resumeProducing calls again after the resume.
+
+        The test will never finish (or rather, time out) if the resume
+        producing call is not working.
+        """
+        class PausingStringTransport(StringTransport):
+            writes = 0
+
+            def write(self, data):
+                self.writes += 1
+                StringTransport.write(self, data)
+                if self.writes == 3:
+                    self.producer.pauseProducing()
+                    self.producer.resumeProducing()
+
+        consumer = PausingStringTransport()
+        return self.streamUntilEnd(consumer)
+
+
+    def test_stopProducing(self):
+        """
+        When the streaming producer is stopped by the consumer, the underlying
+        producer is stopped, and streaming is stopped.
+        """
+        class StoppingStringTransport(StringTransport):
+            writes = 0
+
+            def write(self, data):
+                self.writes += 1
+                StringTransport.write(self, data)
+                if self.writes == 3:
+                    self.producer.stopProducing()
+
+        consumer = StoppingStringTransport()
+        nsProducer = NonStreamingProducer(consumer)
+        streamingProducer = _PullToPush(nsProducer, consumer)
+        consumer.registerProducer(streamingProducer, True)
+
+        done = nsProducer.result
+        def doneStreaming(_):
+            # Not all data was streamed, and the producer was stopped:
+            self.assertEqual(consumer.value(), b"012")
+            self.assertEqual(nsProducer.stopped, True)
+            # And the streaming wrapper stopped:
+            self.assertEqual(streamingProducer._finished, True)
+        done.addCallback(doneStreaming)
+
+        # Now, start streaming:
+        streamingProducer.startStreaming()
+        return done
+
+
+    def resumeProducingRaises(self, consumer, expectedExceptions):
+        """
+        Common implementation for tests where the underlying producer throws
+        an exception when its resumeProducing is called.
+        """
+        class ThrowingProducer(NonStreamingProducer):
+
+            def resumeProducing(self):
+                if self.counter == 2:
+                    return 1/0
+                else:
+                    NonStreamingProducer.resumeProducing(self)
+
+        nsProducer = ThrowingProducer(consumer)
+        streamingProducer = _PullToPush(nsProducer, consumer)
+        consumer.registerProducer(streamingProducer, True)
+
+        # Register log observer:
+        loggedMsgs = []
+        log.addObserver(loggedMsgs.append)
+        self.addCleanup(log.removeObserver, loggedMsgs.append)
+
+        # Make consumer unregister do what TLSMemoryBIOProtocol would do:
+        def unregister(orig=consumer.unregisterProducer):
+            orig()
+            streamingProducer.stopStreaming()
+        consumer.unregisterProducer = unregister
+
+        # Start streaming:
+        streamingProducer.startStreaming()
+
+        done = streamingProducer._coopTask.whenDone()
+        done.addErrback(lambda reason: reason.trap(TaskStopped))
+        def stopped(ign):
+            self.assertEqual(consumer.value(), b"01")
+            # Any errors from resumeProducing were logged:
+            errors = self.flushLoggedErrors()
+            self.assertEqual(len(errors), len(expectedExceptions))
+            for f, (expected, msg), logMsg in zip(
+                errors, expectedExceptions, loggedMsgs):
+                self.assertTrue(f.check(expected))
+                self.assertIn(msg, logMsg['why'])
+            # And the streaming wrapper stopped:
+            self.assertEqual(streamingProducer._finished, True)
+        done.addCallback(stopped)
+        return done
+
+
+    def test_resumeProducingRaises(self):
+        """
+        If the underlying producer raises an exception when resumeProducing is
+        called, the streaming wrapper should log the error, unregister from
+        the consumer and stop streaming.
+        """
+        consumer = StringTransport()
+        done = self.resumeProducingRaises(
+            consumer,
+            [(ZeroDivisionError, "failed, producing will be stopped")])
+        def cleanShutdown(ignore):
+            # Producer was unregistered from consumer:
+            self.assertEqual(consumer.producer, None)
+        done.addCallback(cleanShutdown)
+        return done
+
+
+    def test_resumeProducingRaiseAndUnregisterProducerRaises(self):
+        """
+        If the underlying producer raises an exception when resumeProducing is
+        called, the streaming wrapper should log the error, unregister from
+        the consumer and stop streaming even if the unregisterProducer call
+        also raise.
+        """
+        consumer = StringTransport()
+        def raiser():
+            raise RuntimeError()
+        consumer.unregisterProducer = raiser
+        return self.resumeProducingRaises(
+            consumer,
+            [(ZeroDivisionError, "failed, producing will be stopped"),
+             (RuntimeError, "failed to unregister producer")])
+
+
+    def test_stopStreamingTwice(self):
+        """
+        stopStreaming() can be called more than once without blowing
+        up. This is useful for error-handling paths.
+        """
+        consumer = StringTransport()
+        nsProducer = NonStreamingProducer(consumer)
+        streamingProducer = _PullToPush(nsProducer, consumer)
+        streamingProducer.startStreaming()
+        streamingProducer.stopStreaming()
+        streamingProducer.stopStreaming()
+        self.assertEqual(streamingProducer._finished, True)
+
+
+    def test_interface(self):
+        """
+        L{_PullToPush} implements L{IPushProducer}.
+        """
+        consumer = StringTransport()
+        nsProducer = NonStreamingProducer(consumer)
+        streamingProducer = _PullToPush(nsProducer, consumer)
+        self.assertTrue(verifyObject(IPushProducer, streamingProducer))
diff --git a/ThirdParty/Twisted/twisted/protocols/tls.py b/ThirdParty/Twisted/twisted/protocols/tls.py
new file mode 100644
index 0000000..c05b6c2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/tls.py
@@ -0,0 +1,617 @@
+# -*- test-case-name: twisted.protocols.test.test_tls,twisted.internet.test.test_tls,twisted.test.test_sslverify -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Implementation of a TLS transport (L{ISSLTransport}) as an
+L{IProtocol<twisted.internet.interfaces.IProtocol>} layered on top of any
+L{ITransport<twisted.internet.interfaces.ITransport>} implementation, based on
+U{OpenSSL<http://www.openssl.org>}'s memory BIO features.
+
+L{TLSMemoryBIOFactory} is a L{WrappingFactory} which wraps protocols created by
+the factory it wraps with L{TLSMemoryBIOProtocol}.  L{TLSMemoryBIOProtocol}
+intercedes between the underlying transport and the wrapped protocol to
+implement SSL and TLS.  Typical usage of this module looks like this::
+
+    from twisted.protocols.tls import TLSMemoryBIOFactory
+    from twisted.internet.protocol import ServerFactory
+    from twisted.internet.ssl import PrivateCertificate
+    from twisted.internet import reactor
+
+    from someapplication import ApplicationProtocol
+
+    serverFactory = ServerFactory()
+    serverFactory.protocol = ApplicationProtocol
+    certificate = PrivateCertificate.loadPEM(certPEMData)
+    contextFactory = certificate.options()
+    tlsFactory = TLSMemoryBIOFactory(contextFactory, False, serverFactory)
+    reactor.listenTCP(12345, tlsFactory)
+    reactor.run()
+
+This API offers somewhat more flexibility than
+L{twisted.internet.interfaces.IReactorSSL}; for example, a L{TLSMemoryBIOProtocol}
+instance can use another instance of L{TLSMemoryBIOProtocol} as its transport,
+yielding TLS over TLS - useful to implement onion routing.  It can also be used
+to run TLS over unusual transports, such as UNIX sockets and stdio.
+"""
+
+from __future__ import division, absolute_import
+
+from OpenSSL.SSL import Error, ZeroReturnError, WantReadError
+from OpenSSL.SSL import TLSv1_METHOD, Context, Connection
+
+try:
+    Connection(Context(TLSv1_METHOD), None)
+except TypeError as e:
+    if str(e) != "argument must be an int, or have a fileno() method.":
+        raise
+    raise ImportError("twisted.protocols.tls requires pyOpenSSL 0.10 or newer.")
+
+from zope.interface import implementer, providedBy, directlyProvides
+
+from twisted.python.compat import unicode
+from twisted.python.failure import Failure
+from twisted.python import log
+from twisted.python._reflectpy3 import safe_str
+from twisted.internet.interfaces import ISystemHandle, ISSLTransport
+from twisted.internet.interfaces import IPushProducer, ILoggingContext
+from twisted.internet.main import CONNECTION_LOST
+from twisted.internet.protocol import Protocol
+from twisted.internet.task import cooperate
+from twisted.protocols.policies import ProtocolWrapper, WrappingFactory
+
+
+ at implementer(IPushProducer)
+class _PullToPush(object):
+    """
+    An adapter that converts a non-streaming to a streaming producer.
+
+    Because of limitations of the producer API, this adapter requires the
+    cooperation of the consumer. When the consumer's C{registerProducer} is
+    called with a non-streaming producer, it must wrap it with L{_PullToPush}
+    and then call C{startStreaming} on the resulting object. When the
+    consumer's C{unregisterProducer} is called, it must call
+    C{stopStreaming} on the L{_PullToPush} instance.
+
+    If the underlying producer throws an exception from C{resumeProducing},
+    the producer will be unregistered from the consumer.
+
+    @ivar _producer: the underling non-streaming producer.
+
+    @ivar _consumer: the consumer with which the underlying producer was
+                     registered.
+
+    @ivar _finished: C{bool} indicating whether the producer has finished.
+
+    @ivar _coopTask: the result of calling L{cooperate}, the task driving the
+                     streaming producer.
+    """
+
+    _finished = False
+
+
+    def __init__(self, pullProducer, consumer):
+        self._producer = pullProducer
+        self._consumer = consumer
+
+
+    def _pull(self):
+        """
+        A generator that calls C{resumeProducing} on the underlying producer
+        forever.
+
+        If C{resumeProducing} throws an exception, the producer is
+        unregistered, which should result in streaming stopping.
+        """
+        while True:
+            try:
+                self._producer.resumeProducing()
+            except:
+                log.err(None, "%s failed, producing will be stopped:" %
+                        (safe_str(self._producer),))
+                try:
+                    self._consumer.unregisterProducer()
+                    # The consumer should now call stopStreaming() on us,
+                    # thus stopping the streaming.
+                except:
+                    # Since the consumer blew up, we may not have had
+                    # stopStreaming() called, so we just stop on our own:
+                    log.err(None, "%s failed to unregister producer:" %
+                            (safe_str(self._consumer),))
+                    self._finished = True
+                    return
+            yield None
+
+
+    def startStreaming(self):
+        """
+        This should be called by the consumer when the producer is registered.
+
+        Start streaming data to the consumer.
+        """
+        self._coopTask = cooperate(self._pull())
+
+
+    def stopStreaming(self):
+        """
+        This should be called by the consumer when the producer is unregistered.
+
+        Stop streaming data to the consumer.
+        """
+        if self._finished:
+            return
+        self._finished = True
+        self._coopTask.stop()
+
+
+    # IPushProducer implementation:
+    def pauseProducing(self):
+        self._coopTask.pause()
+
+
+    def resumeProducing(self):
+        self._coopTask.resume()
+
+
+    def stopProducing(self):
+        self.stopStreaming()
+        self._producer.stopProducing()
+
+
+
+ at implementer(IPushProducer)
+class _ProducerMembrane(object):
+    """
+    Stand-in for producer registered with a L{TLSMemoryBIOProtocol} transport.
+
+    Ensures that producer pause/resume events from the undelying transport are
+    coordinated with pause/resume events from the TLS layer.
+
+    @ivar _producer: The application-layer producer.
+    """
+
+    _producerPaused = False
+
+    def __init__(self, producer):
+        self._producer = producer
+
+
+    def pauseProducing(self):
+        """
+        C{pauseProducing} the underlying producer, if it's not paused.
+        """
+        if self._producerPaused:
+            return
+        self._producerPaused = True
+        self._producer.pauseProducing()
+
+
+    def resumeProducing(self):
+        """
+        C{resumeProducing} the underlying producer, if it's paused.
+        """
+        if not self._producerPaused:
+            return
+        self._producerPaused = False
+        self._producer.resumeProducing()
+
+
+    def stopProducing(self):
+        """
+        C{stopProducing} the underlying producer.
+
+        There is only a single source for this event, so it's simply passed
+        on.
+        """
+        self._producer.stopProducing()
+
+
+
+ at implementer(ISystemHandle, ISSLTransport)
+class TLSMemoryBIOProtocol(ProtocolWrapper):
+    """
+    L{TLSMemoryBIOProtocol} is a protocol wrapper which uses OpenSSL via a
+    memory BIO to encrypt bytes written to it before sending them on to the
+    underlying transport and decrypts bytes received from the underlying
+    transport before delivering them to the wrapped protocol.
+
+    In addition to producer events from the underlying transport, the need to
+    wait for reads before a write can proceed means the
+    L{TLSMemoryBIOProtocol} may also want to pause a producer. Pause/resume
+    events are therefore merged using the L{_ProducerMembrane}
+    wrapper. Non-streaming (pull) producers are supported by wrapping them
+    with L{_PullToPush}.
+
+    @ivar _tlsConnection: The L{OpenSSL.SSL.Connection} instance which is
+        encrypted and decrypting this connection.
+
+    @ivar _lostTLSConnection: A flag indicating whether connection loss has
+        already been dealt with (C{True}) or not (C{False}). TLS disconnection
+        is distinct from the underlying connection being lost.
+
+    @ivar _writeBlockedOnRead: A flag indicating whether further writing must
+        wait for data to be received (C{True}) or not (C{False}).
+
+    @ivar _appSendBuffer: A C{list} of C{str} of application-level (cleartext)
+        data which is waiting for C{_writeBlockedOnRead} to be reset to
+        C{False} so it can be passed to and perhaps accepted by
+        C{_tlsConnection.send}.
+
+    @ivar _connectWrapped: A flag indicating whether or not to call
+        C{makeConnection} on the wrapped protocol.  This is for the reactor's
+        L{twisted.internet.interfaces.ITLSTransport.startTLS} implementation,
+        since it has a protocol which it has already called C{makeConnection}
+        on, and which has no interest in a new transport.  See #3821.
+
+    @ivar _handshakeDone: A flag indicating whether or not the handshake is
+        known to have completed successfully (C{True}) or not (C{False}).  This
+        is used to control error reporting behavior.  If the handshake has not
+        completed, the underlying L{OpenSSL.SSL.Error} will be passed to the
+        application's C{connectionLost} method.  If it has completed, any
+        unexpected L{OpenSSL.SSL.Error} will be turned into a
+        L{ConnectionLost}.  This is weird; however, it is simply an attempt at
+        a faithful re-implementation of the behavior provided by
+        L{twisted.internet.ssl}.
+
+    @ivar _reason: If an unexpected L{OpenSSL.SSL.Error} occurs which causes
+        the connection to be lost, it is saved here.  If appropriate, this may
+        be used as the reason passed to the application protocol's
+        C{connectionLost} method.
+
+    @ivar _producer: The current producer registered via C{registerProducer},
+        or C{None} if no producer has been registered or a previous one was
+        unregistered.
+    """
+
+    _reason = None
+    _handshakeDone = False
+    _lostTLSConnection = False
+    _writeBlockedOnRead = False
+    _producer = None
+
+    def __init__(self, factory, wrappedProtocol, _connectWrapped=True):
+        ProtocolWrapper.__init__(self, factory, wrappedProtocol)
+        self._connectWrapped = _connectWrapped
+
+
+    def getHandle(self):
+        """
+        Return the L{OpenSSL.SSL.Connection} object being used to encrypt and
+        decrypt this connection.
+
+        This is done for the benefit of L{twisted.internet.ssl.Certificate}'s
+        C{peerFromTransport} and C{hostFromTransport} methods only.  A
+        different system handle may be returned by future versions of this
+        method.
+        """
+        return self._tlsConnection
+
+
+    def makeConnection(self, transport):
+        """
+        Connect this wrapper to the given transport and initialize the
+        necessary L{OpenSSL.SSL.Connection} with a memory BIO.
+        """
+        tlsContext = self.factory._contextFactory.getContext()
+        self._tlsConnection = Connection(tlsContext, None)
+        if self.factory._isClient:
+            self._tlsConnection.set_connect_state()
+        else:
+            self._tlsConnection.set_accept_state()
+        self._appSendBuffer = []
+
+        # Add interfaces provided by the transport we are wrapping:
+        for interface in providedBy(transport):
+            directlyProvides(self, interface)
+
+        # Intentionally skip ProtocolWrapper.makeConnection - it might call
+        # wrappedProtocol.makeConnection, which we want to make conditional.
+        Protocol.makeConnection(self, transport)
+        self.factory.registerProtocol(self)
+        if self._connectWrapped:
+            # Now that the TLS layer is initialized, notify the application of
+            # the connection.
+            ProtocolWrapper.makeConnection(self, transport)
+
+        # Now that we ourselves have a transport (initialized by the
+        # ProtocolWrapper.makeConnection call above), kick off the TLS
+        # handshake.
+        try:
+            self._tlsConnection.do_handshake()
+        except WantReadError:
+            # This is the expected case - there's no data in the connection's
+            # input buffer yet, so it won't be able to complete the whole
+            # handshake now.  If this is the speak-first side of the
+            # connection, then some bytes will be in the send buffer now; flush
+            # them.
+            self._flushSendBIO()
+
+
+    def _flushSendBIO(self):
+        """
+        Read any bytes out of the send BIO and write them to the underlying
+        transport.
+        """
+        try:
+            bytes = self._tlsConnection.bio_read(2 ** 15)
+        except WantReadError:
+            # There may be nothing in the send BIO right now.
+            pass
+        else:
+            self.transport.write(bytes)
+
+
+    def _flushReceiveBIO(self):
+        """
+        Try to receive any application-level bytes which are now available
+        because of a previous write into the receive BIO.  This will take
+        care of delivering any application-level bytes which are received to
+        the protocol, as well as handling of the various exceptions which
+        can come from trying to get such bytes.
+        """
+        # Keep trying this until an error indicates we should stop or we
+        # close the connection.  Looping is necessary to make sure we
+        # process all of the data which was put into the receive BIO, as
+        # there is no guarantee that a single recv call will do it all.
+        while not self._lostTLSConnection:
+            try:
+                bytes = self._tlsConnection.recv(2 ** 15)
+            except WantReadError:
+                # The newly received bytes might not have been enough to produce
+                # any application data.
+                break
+            except ZeroReturnError:
+                # TLS has shut down and no more TLS data will be received over
+                # this connection.
+                self._shutdownTLS()
+                # Passing in None means the user protocol's connnectionLost
+                # will get called with reason from underlying transport:
+                self._tlsShutdownFinished(None)
+            except Error as e:
+                # Something went pretty wrong.  For example, this might be a
+                # handshake failure (because there were no shared ciphers, because
+                # a certificate failed to verify, etc).  TLS can no longer proceed.
+
+                # Squash EOF in violation of protocol into ConnectionLost; we
+                # create Failure before calling _flushSendBio so that no new
+                # exception will get thrown in the interim.
+                if e.args[0] == -1 and e.args[1] == 'Unexpected EOF':
+                    failure = Failure(CONNECTION_LOST)
+                else:
+                    failure = Failure()
+
+                self._flushSendBIO()
+                self._tlsShutdownFinished(failure)
+            else:
+                # If we got application bytes, the handshake must be done by
+                # now.  Keep track of this to control error reporting later.
+                self._handshakeDone = True
+                ProtocolWrapper.dataReceived(self, bytes)
+
+        # The received bytes might have generated a response which needs to be
+        # sent now.  For example, the handshake involves several round-trip
+        # exchanges without ever producing application-bytes.
+        self._flushSendBIO()
+
+
+    def dataReceived(self, bytes):
+        """
+        Deliver any received bytes to the receive BIO and then read and deliver
+        to the application any application-level data which becomes available
+        as a result of this.
+        """
+        self._tlsConnection.bio_write(bytes)
+
+        if self._writeBlockedOnRead:
+            # A read just happened, so we might not be blocked anymore.  Try to
+            # flush all the pending application bytes.
+            self._writeBlockedOnRead = False
+            appSendBuffer = self._appSendBuffer
+            self._appSendBuffer = []
+            for bytes in appSendBuffer:
+                self._write(bytes)
+            if (not self._writeBlockedOnRead and self.disconnecting and
+                self.producer is None):
+                self._shutdownTLS()
+            if self._producer is not None:
+                self._producer.resumeProducing()
+
+        self._flushReceiveBIO()
+
+
+    def _shutdownTLS(self):
+        """
+        Initiate, or reply to, the shutdown handshake of the TLS layer.
+        """
+        shutdownSuccess = self._tlsConnection.shutdown()
+        self._flushSendBIO()
+        if shutdownSuccess:
+            # Both sides have shutdown, so we can start closing lower-level
+            # transport. This will also happen if we haven't started
+            # negotiation at all yet, in which case shutdown succeeds
+            # immediately.
+            self.transport.loseConnection()
+
+
+    def _tlsShutdownFinished(self, reason):
+        """
+        Called when TLS connection has gone away; tell underlying transport to
+        disconnect.
+        """
+        self._reason = reason
+        self._lostTLSConnection = True
+        # Using loseConnection causes the application protocol's
+        # connectionLost method to be invoked non-reentrantly, which is always
+        # a nice feature. However, for error cases (reason != None) we might
+        # want to use abortConnection when it becomes available. The
+        # loseConnection call is basically tested by test_handshakeFailure.
+        # At least one side will need to do it or the test never finishes.
+        self.transport.loseConnection()
+
+
+    def connectionLost(self, reason):
+        """
+        Handle the possible repetition of calls to this method (due to either
+        the underlying transport going away or due to an error at the TLS
+        layer) and make sure the base implementation only gets invoked once.
+        """
+        if not self._lostTLSConnection:
+            # Tell the TLS connection that it's not going to get any more data
+            # and give it a chance to finish reading.
+            self._tlsConnection.bio_shutdown()
+            self._flushReceiveBIO()
+            self._lostTLSConnection = True
+        reason = self._reason or reason
+        self._reason = None
+        ProtocolWrapper.connectionLost(self, reason)
+
+
+    def loseConnection(self):
+        """
+        Send a TLS close alert and close the underlying connection.
+        """
+        if self.disconnecting:
+            return
+        self.disconnecting = True
+        if not self._writeBlockedOnRead and self._producer is None:
+            self._shutdownTLS()
+
+
+    def write(self, bytes):
+        """
+        Process the given application bytes and send any resulting TLS traffic
+        which arrives in the send BIO.
+
+        If C{loseConnection} was called, subsequent calls to C{write} will
+        drop the bytes on the floor.
+        """
+        if isinstance(bytes, unicode):
+            raise TypeError("Must write bytes to a TLS transport, not unicode.")
+        # Writes after loseConnection are not supported, unless a producer has
+        # been registered, in which case writes can happen until the producer
+        # is unregistered:
+        if self.disconnecting and self._producer is None:
+            return
+        self._write(bytes)
+
+
+    def _write(self, bytes):
+        """
+        Process the given application bytes and send any resulting TLS traffic
+        which arrives in the send BIO.
+
+        This may be called by C{dataReceived} with bytes that were buffered
+        before C{loseConnection} was called, which is why this function
+        doesn't check for disconnection but accepts the bytes regardless.
+        """
+        if self._lostTLSConnection:
+            return
+
+        leftToSend = bytes
+        while leftToSend:
+            try:
+                sent = self._tlsConnection.send(leftToSend)
+            except WantReadError:
+                self._writeBlockedOnRead = True
+                self._appSendBuffer.append(leftToSend)
+                if self._producer is not None:
+                    self._producer.pauseProducing()
+                break
+            except Error:
+                # Pretend TLS connection disconnected, which will trigger
+                # disconnect of underlying transport. The error will be passed
+                # to the application protocol's connectionLost method.  The
+                # other SSL implementation doesn't, but losing helpful
+                # debugging information is a bad idea.
+                self._tlsShutdownFinished(Failure())
+                break
+            else:
+                # If we sent some bytes, the handshake must be done.  Keep
+                # track of this to control error reporting behavior.
+                self._handshakeDone = True
+                self._flushSendBIO()
+                leftToSend = leftToSend[sent:]
+
+
+    def writeSequence(self, iovec):
+        """
+        Write a sequence of application bytes by joining them into one string
+        and passing them to L{write}.
+        """
+        self.write(b"".join(iovec))
+
+
+    def getPeerCertificate(self):
+        return self._tlsConnection.get_peer_certificate()
+
+
+    def registerProducer(self, producer, streaming):
+        # If we've already disconnected, nothing to do here:
+        if self._lostTLSConnection:
+            producer.stopProducing()
+            return
+
+        # If we received a non-streaming producer, wrap it so it becomes a
+        # streaming producer:
+        if not streaming:
+            producer = streamingProducer = _PullToPush(producer, self)
+        producer = _ProducerMembrane(producer)
+        # This will raise an exception if a producer is already registered:
+        self.transport.registerProducer(producer, True)
+        self._producer = producer
+        # If we received a non-streaming producer, we need to start the
+        # streaming wrapper:
+        if not streaming:
+            streamingProducer.startStreaming()
+
+
+    def unregisterProducer(self):
+        # If we received a non-streaming producer, we need to stop the
+        # streaming wrapper:
+        if isinstance(self._producer._producer, _PullToPush):
+            self._producer._producer.stopStreaming()
+        self._producer = None
+        self._producerPaused = False
+        self.transport.unregisterProducer()
+        if self.disconnecting and not self._writeBlockedOnRead:
+            self._shutdownTLS()
+
+
+
+class TLSMemoryBIOFactory(WrappingFactory):
+    """
+    L{TLSMemoryBIOFactory} adds TLS to connections.
+
+    @ivar _contextFactory: The TLS context factory which will be used to define
+        certain TLS connection parameters.
+
+    @ivar _isClient: A flag which is C{True} if this is a client TLS
+        connection, C{False} if it is a server TLS connection.
+    """
+    protocol = TLSMemoryBIOProtocol
+
+    noisy = False  # disable unnecessary logging.
+
+    def __init__(self, contextFactory, isClient, wrappedFactory):
+        WrappingFactory.__init__(self, wrappedFactory)
+        self._contextFactory = contextFactory
+        self._isClient = isClient
+
+        # Force some parameter checking in pyOpenSSL.  It's better to fail now
+        # than after we've set up the transport.
+        contextFactory.getContext()
+
+
+    def logPrefix(self):
+        """
+        Annotate the wrapped factory's log prefix with some text indicating TLS
+        is in use.
+
+        @rtype: C{str}
+        """
+        if ILoggingContext.providedBy(self.wrappedFactory):
+            logPrefix = self.wrappedFactory.logPrefix()
+        else:
+            logPrefix = self.wrappedFactory.__class__.__name__
+        return "%s (TLS)" % (logPrefix,)
+
diff --git a/ThirdParty/Twisted/twisted/protocols/wire.py b/ThirdParty/Twisted/twisted/protocols/wire.py
new file mode 100644
index 0000000..dddf215
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/protocols/wire.py
@@ -0,0 +1,90 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""Implement standard (and unused) TCP protocols.
+
+These protocols are either provided by inetd, or are not provided at all.
+"""
+
+# system imports
+import time, struct
+from zope.interface import implements
+
+# twisted import
+from twisted.internet import protocol, interfaces
+
+
+class Echo(protocol.Protocol):
+    """As soon as any data is received, write it back (RFC 862)"""
+    
+    def dataReceived(self, data):
+        self.transport.write(data)
+
+
+class Discard(protocol.Protocol):
+    """Discard any received data (RFC 863)"""
+    
+    def dataReceived(self, data):
+        # I'm ignoring you, nyah-nyah
+        pass
+
+
+class Chargen(protocol.Protocol):
+    """Generate repeating noise (RFC 864)"""
+    noise = r'@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`abcdefghijklmnopqrstuvwxyz{|}~ !"#$%&?'
+
+    implements(interfaces.IProducer)
+
+    def connectionMade(self):
+        self.transport.registerProducer(self, 0)
+
+    def resumeProducing(self):
+        self.transport.write(self.noise)
+
+    def pauseProducing(self):
+        pass
+
+    def stopProducing(self):
+        pass
+
+
+class QOTD(protocol.Protocol):
+    """Return a quote of the day (RFC 865)"""
+    
+    def connectionMade(self):
+        self.transport.write(self.getQuote())
+        self.transport.loseConnection()
+
+    def getQuote(self):
+        """Return a quote. May be overrriden in subclasses."""
+        return "An apple a day keeps the doctor away.\r\n"
+
+class Who(protocol.Protocol):
+    """Return list of active users (RFC 866)"""
+    
+    def connectionMade(self):
+        self.transport.write(self.getUsers())
+        self.transport.loseConnection()
+    
+    def getUsers(self):
+        """Return active users. Override in subclasses."""
+        return "root\r\n"
+
+
+class Daytime(protocol.Protocol):
+    """Send back the daytime in ASCII form (RFC 867)"""
+    
+    def connectionMade(self):
+        self.transport.write(time.asctime(time.gmtime(time.time())) + '\r\n')
+        self.transport.loseConnection()
+
+
+class Time(protocol.Protocol):
+    """Send back the time in machine readable form (RFC 868)"""
+    
+    def connectionMade(self):
+        # is this correct only for 32-bit machines?
+        result = struct.pack("!i", int(time.time()))
+        self.transport.write(result)
+        self.transport.loseConnection()
+
diff --git a/ThirdParty/Twisted/twisted/python/__init__.py b/ThirdParty/Twisted/twisted/python/__init__.py
new file mode 100644
index 0000000..ae78c7b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/__init__.py
@@ -0,0 +1,13 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+
+Twisted Python: Utilities and Enhancements for Python.
+
+"""
+
+    
+
diff --git a/ThirdParty/Twisted/twisted/python/_epoll.c b/ThirdParty/Twisted/twisted/python/_epoll.c
new file mode 100644
index 0000000..dffbe25
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/_epoll.c
@@ -0,0 +1,3348 @@
+/* Generated by Cython 0.15.1 on Fri Feb 17 23:33:28 2012 */
+
+#define PY_SSIZE_T_CLEAN
+#include "Python.h"
+#ifndef Py_PYTHON_H
+    #error Python headers needed to compile C extensions, please install development version of Python.
+#else
+
+#include <stddef.h> /* For offsetof */
+#ifndef offsetof
+#define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
+#endif
+
+#if !defined(WIN32) && !defined(MS_WINDOWS)
+  #ifndef __stdcall
+    #define __stdcall
+  #endif
+  #ifndef __cdecl
+    #define __cdecl
+  #endif
+  #ifndef __fastcall
+    #define __fastcall
+  #endif
+#endif
+
+#ifndef DL_IMPORT
+  #define DL_IMPORT(t) t
+#endif
+#ifndef DL_EXPORT
+  #define DL_EXPORT(t) t
+#endif
+
+#ifndef PY_LONG_LONG
+  #define PY_LONG_LONG LONG_LONG
+#endif
+
+#if PY_VERSION_HEX < 0x02040000
+  #define METH_COEXIST 0
+  #define PyDict_CheckExact(op) (Py_TYPE(op) == &PyDict_Type)
+  #define PyDict_Contains(d,o)   PySequence_Contains(d,o)
+#endif
+
+#if PY_VERSION_HEX < 0x02050000
+  typedef int Py_ssize_t;
+  #define PY_SSIZE_T_MAX INT_MAX
+  #define PY_SSIZE_T_MIN INT_MIN
+  #define PY_FORMAT_SIZE_T ""
+  #define PyInt_FromSsize_t(z) PyInt_FromLong(z)
+  #define PyInt_AsSsize_t(o)   __Pyx_PyInt_AsInt(o)
+  #define PyNumber_Index(o)    PyNumber_Int(o)
+  #define PyIndex_Check(o)     PyNumber_Check(o)
+  #define PyErr_WarnEx(category, message, stacklevel) PyErr_Warn(category, message)
+#endif
+
+#if PY_VERSION_HEX < 0x02060000
+  #define Py_REFCNT(ob) (((PyObject*)(ob))->ob_refcnt)
+  #define Py_TYPE(ob)   (((PyObject*)(ob))->ob_type)
+  #define Py_SIZE(ob)   (((PyVarObject*)(ob))->ob_size)
+  #define PyVarObject_HEAD_INIT(type, size) \
+          PyObject_HEAD_INIT(type) size,
+  #define PyType_Modified(t)
+
+  typedef struct {
+     void *buf;
+     PyObject *obj;
+     Py_ssize_t len;
+     Py_ssize_t itemsize;
+     int readonly;
+     int ndim;
+     char *format;
+     Py_ssize_t *shape;
+     Py_ssize_t *strides;
+     Py_ssize_t *suboffsets;
+     void *internal;
+  } Py_buffer;
+
+  #define PyBUF_SIMPLE 0
+  #define PyBUF_WRITABLE 0x0001
+  #define PyBUF_FORMAT 0x0004
+  #define PyBUF_ND 0x0008
+  #define PyBUF_STRIDES (0x0010 | PyBUF_ND)
+  #define PyBUF_C_CONTIGUOUS (0x0020 | PyBUF_STRIDES)
+  #define PyBUF_F_CONTIGUOUS (0x0040 | PyBUF_STRIDES)
+  #define PyBUF_ANY_CONTIGUOUS (0x0080 | PyBUF_STRIDES)
+  #define PyBUF_INDIRECT (0x0100 | PyBUF_STRIDES)
+
+#endif
+
+#if PY_MAJOR_VERSION < 3
+  #define __Pyx_BUILTIN_MODULE_NAME "__builtin__"
+#else
+  #define __Pyx_BUILTIN_MODULE_NAME "builtins"
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+  #define Py_TPFLAGS_CHECKTYPES 0
+  #define Py_TPFLAGS_HAVE_INDEX 0
+#endif
+
+#if (PY_VERSION_HEX < 0x02060000) || (PY_MAJOR_VERSION >= 3)
+  #define Py_TPFLAGS_HAVE_NEWBUFFER 0
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+  #define PyBaseString_Type            PyUnicode_Type
+  #define PyStringObject               PyUnicodeObject
+  #define PyString_Type                PyUnicode_Type
+  #define PyString_Check               PyUnicode_Check
+  #define PyString_CheckExact          PyUnicode_CheckExact
+#endif
+
+#if PY_VERSION_HEX < 0x02060000
+  #define PyBytesObject                PyStringObject
+  #define PyBytes_Type                 PyString_Type
+  #define PyBytes_Check                PyString_Check
+  #define PyBytes_CheckExact           PyString_CheckExact
+  #define PyBytes_FromString           PyString_FromString
+  #define PyBytes_FromStringAndSize    PyString_FromStringAndSize
+  #define PyBytes_FromFormat           PyString_FromFormat
+  #define PyBytes_DecodeEscape         PyString_DecodeEscape
+  #define PyBytes_AsString             PyString_AsString
+  #define PyBytes_AsStringAndSize      PyString_AsStringAndSize
+  #define PyBytes_Size                 PyString_Size
+  #define PyBytes_AS_STRING            PyString_AS_STRING
+  #define PyBytes_GET_SIZE             PyString_GET_SIZE
+  #define PyBytes_Repr                 PyString_Repr
+  #define PyBytes_Concat               PyString_Concat
+  #define PyBytes_ConcatAndDel         PyString_ConcatAndDel
+#endif
+
+#if PY_VERSION_HEX < 0x02060000
+  #define PySet_Check(obj)             PyObject_TypeCheck(obj, &PySet_Type)
+  #define PyFrozenSet_Check(obj)       PyObject_TypeCheck(obj, &PyFrozenSet_Type)
+#endif
+#ifndef PySet_CheckExact
+  #define PySet_CheckExact(obj)        (Py_TYPE(obj) == &PySet_Type)
+#endif
+
+#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type)
+
+#if PY_MAJOR_VERSION >= 3
+  #define PyIntObject                  PyLongObject
+  #define PyInt_Type                   PyLong_Type
+  #define PyInt_Check(op)              PyLong_Check(op)
+  #define PyInt_CheckExact(op)         PyLong_CheckExact(op)
+  #define PyInt_FromString             PyLong_FromString
+  #define PyInt_FromUnicode            PyLong_FromUnicode
+  #define PyInt_FromLong               PyLong_FromLong
+  #define PyInt_FromSize_t             PyLong_FromSize_t
+  #define PyInt_FromSsize_t            PyLong_FromSsize_t
+  #define PyInt_AsLong                 PyLong_AsLong
+  #define PyInt_AS_LONG                PyLong_AS_LONG
+  #define PyInt_AsSsize_t              PyLong_AsSsize_t
+  #define PyInt_AsUnsignedLongMask     PyLong_AsUnsignedLongMask
+  #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+  #define PyBoolObject                 PyLongObject
+#endif
+
+#if PY_VERSION_HEX < 0x03020000
+  typedef long Py_hash_t;
+  #define __Pyx_PyInt_FromHash_t PyInt_FromLong
+  #define __Pyx_PyInt_AsHash_t   PyInt_AsLong
+#else
+  #define __Pyx_PyInt_FromHash_t PyInt_FromSsize_t
+  #define __Pyx_PyInt_AsHash_t   PyInt_AsSsize_t
+#endif
+
+
+#if PY_MAJOR_VERSION >= 3
+  #define __Pyx_PyNumber_Divide(x,y)         PyNumber_TrueDivide(x,y)
+  #define __Pyx_PyNumber_InPlaceDivide(x,y)  PyNumber_InPlaceTrueDivide(x,y)
+#else
+  #define __Pyx_PyNumber_Divide(x,y)         PyNumber_Divide(x,y)
+  #define __Pyx_PyNumber_InPlaceDivide(x,y)  PyNumber_InPlaceDivide(x,y)
+#endif
+
+#if (PY_MAJOR_VERSION < 3) || (PY_VERSION_HEX >= 0x03010300)
+  #define __Pyx_PySequence_GetSlice(obj, a, b) PySequence_GetSlice(obj, a, b)
+  #define __Pyx_PySequence_SetSlice(obj, a, b, value) PySequence_SetSlice(obj, a, b, value)
+  #define __Pyx_PySequence_DelSlice(obj, a, b) PySequence_DelSlice(obj, a, b)
+#else
+  #define __Pyx_PySequence_GetSlice(obj, a, b) (unlikely(!(obj)) ? \
+        (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), (PyObject*)0) : \
+        (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_GetSlice(obj, a, b)) : \
+            (PyErr_Format(PyExc_TypeError, "'%.200s' object is unsliceable", (obj)->ob_type->tp_name), (PyObject*)0)))
+  #define __Pyx_PySequence_SetSlice(obj, a, b, value) (unlikely(!(obj)) ? \
+        (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \
+        (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_SetSlice(obj, a, b, value)) : \
+            (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice assignment", (obj)->ob_type->tp_name), -1)))
+  #define __Pyx_PySequence_DelSlice(obj, a, b) (unlikely(!(obj)) ? \
+        (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \
+        (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_DelSlice(obj, a, b)) : \
+            (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice deletion", (obj)->ob_type->tp_name), -1)))
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+  #define PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func))
+#endif
+
+#if PY_VERSION_HEX < 0x02050000
+  #define __Pyx_GetAttrString(o,n)   PyObject_GetAttrString((o),((char *)(n)))
+  #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),((char *)(n)),(a))
+  #define __Pyx_DelAttrString(o,n)   PyObject_DelAttrString((o),((char *)(n)))
+#else
+  #define __Pyx_GetAttrString(o,n)   PyObject_GetAttrString((o),(n))
+  #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),(n),(a))
+  #define __Pyx_DelAttrString(o,n)   PyObject_DelAttrString((o),(n))
+#endif
+
+#if PY_VERSION_HEX < 0x02050000
+  #define __Pyx_NAMESTR(n) ((char *)(n))
+  #define __Pyx_DOCSTR(n)  ((char *)(n))
+#else
+  #define __Pyx_NAMESTR(n) (n)
+  #define __Pyx_DOCSTR(n)  (n)
+#endif
+
+#ifndef __PYX_EXTERN_C
+  #ifdef __cplusplus
+    #define __PYX_EXTERN_C extern "C"
+  #else
+    #define __PYX_EXTERN_C extern
+  #endif
+#endif
+
+#if defined(WIN32) || defined(MS_WINDOWS)
+#define _USE_MATH_DEFINES
+#endif
+#include <math.h>
+#define __PYX_HAVE__twisted__python___epoll
+#define __PYX_HAVE_API__twisted__python___epoll
+#include "stdio.h"
+#include "errno.h"
+#include "string.h"
+#include "stdint.h"
+#include "sys/epoll.h"
+#ifdef _OPENMP
+#include <omp.h>
+#endif /* _OPENMP */
+
+#ifdef PYREX_WITHOUT_ASSERTIONS
+#define CYTHON_WITHOUT_ASSERTIONS
+#endif
+
+
+/* inline attribute */
+#ifndef CYTHON_INLINE
+  #if defined(__GNUC__)
+    #define CYTHON_INLINE __inline__
+  #elif defined(_MSC_VER)
+    #define CYTHON_INLINE __inline
+  #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+    #define CYTHON_INLINE inline
+  #else
+    #define CYTHON_INLINE
+  #endif
+#endif
+
+/* unused attribute */
+#ifndef CYTHON_UNUSED
+# if defined(__GNUC__)
+#   if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))
+#     define CYTHON_UNUSED __attribute__ ((__unused__))
+#   else
+#     define CYTHON_UNUSED
+#   endif
+# elif defined(__ICC) || defined(__INTEL_COMPILER)
+#   define CYTHON_UNUSED __attribute__ ((__unused__))
+# else
+#   define CYTHON_UNUSED
+# endif
+#endif
+
+typedef struct {PyObject **p; char *s; const long n; const char* encoding; const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; /*proto*/
+
+
+/* Type Conversion Predeclarations */
+
+#define __Pyx_PyBytes_FromUString(s) PyBytes_FromString((char*)s)
+#define __Pyx_PyBytes_AsUString(s)   ((unsigned char*) PyBytes_AsString(s))
+
+#define __Pyx_Owned_Py_None(b) (Py_INCREF(Py_None), Py_None)
+#define __Pyx_PyBool_FromLong(b) ((b) ? (Py_INCREF(Py_True), Py_True) : (Py_INCREF(Py_False), Py_False))
+static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*);
+static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x);
+
+static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*);
+static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t);
+static CYTHON_INLINE size_t __Pyx_PyInt_AsSize_t(PyObject*);
+
+#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x))
+
+
+#ifdef __GNUC__
+  /* Test for GCC > 2.95 */
+  #if __GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))
+    #define likely(x)   __builtin_expect(!!(x), 1)
+    #define unlikely(x) __builtin_expect(!!(x), 0)
+  #else /* __GNUC__ > 2 ... */
+    #define likely(x)   (x)
+    #define unlikely(x) (x)
+  #endif /* __GNUC__ > 2 ... */
+#else /* __GNUC__ */
+  #define likely(x)   (x)
+  #define unlikely(x) (x)
+#endif /* __GNUC__ */
+    
+static PyObject *__pyx_m;
+static PyObject *__pyx_b;
+static PyObject *__pyx_empty_tuple;
+static PyObject *__pyx_empty_bytes;
+static int __pyx_lineno;
+static int __pyx_clineno = 0;
+static const char * __pyx_cfilenm= __FILE__;
+static const char *__pyx_filename;
+
+
+static const char *__pyx_f[] = {
+  "_epoll.pyx",
+};
+
+/*--- Type declarations ---*/
+struct __pyx_obj_7twisted_6python_6_epoll_epoll;
+
+/* "twisted/python/_epoll.pyx":106
+ *         free(events)
+ * 
+ * cdef class epoll:             # <<<<<<<<<<<<<<
+ *     """
+ *     Represent a set of file descriptors being monitored for events.
+ */
+struct __pyx_obj_7twisted_6python_6_epoll_epoll {
+  PyObject_HEAD
+  int fd;
+  int initialized;
+};
+
+
+#ifndef CYTHON_REFNANNY
+  #define CYTHON_REFNANNY 0
+#endif
+
+#if CYTHON_REFNANNY
+  typedef struct {
+    void (*INCREF)(void*, PyObject*, int);
+    void (*DECREF)(void*, PyObject*, int);
+    void (*GOTREF)(void*, PyObject*, int);
+    void (*GIVEREF)(void*, PyObject*, int);
+    void* (*SetupContext)(const char*, int, const char*);
+    void (*FinishContext)(void**);
+  } __Pyx_RefNannyAPIStruct;
+  static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL;
+  static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname); /*proto*/
+  #define __Pyx_RefNannyDeclarations void *__pyx_refnanny = NULL;
+  #define __Pyx_RefNannySetupContext(name)           __pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__)
+  #define __Pyx_RefNannyFinishContext()           __Pyx_RefNanny->FinishContext(&__pyx_refnanny)
+  #define __Pyx_INCREF(r)  __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+  #define __Pyx_DECREF(r)  __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+  #define __Pyx_GOTREF(r)  __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+  #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+  #define __Pyx_XINCREF(r)  do { if((r) != NULL) {__Pyx_INCREF(r); }} while(0)
+  #define __Pyx_XDECREF(r)  do { if((r) != NULL) {__Pyx_DECREF(r); }} while(0)
+  #define __Pyx_XGOTREF(r)  do { if((r) != NULL) {__Pyx_GOTREF(r); }} while(0)
+  #define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);}} while(0)
+#else
+  #define __Pyx_RefNannyDeclarations
+  #define __Pyx_RefNannySetupContext(name)
+  #define __Pyx_RefNannyFinishContext()
+  #define __Pyx_INCREF(r) Py_INCREF(r)
+  #define __Pyx_DECREF(r) Py_DECREF(r)
+  #define __Pyx_GOTREF(r)
+  #define __Pyx_GIVEREF(r)
+  #define __Pyx_XINCREF(r) Py_XINCREF(r)
+  #define __Pyx_XDECREF(r) Py_XDECREF(r)
+  #define __Pyx_XGOTREF(r)
+  #define __Pyx_XGIVEREF(r)
+#endif /* CYTHON_REFNANNY */
+
+static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name); /*proto*/
+
+static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb); /*proto*/
+static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb); /*proto*/
+
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause); /*proto*/
+
+static void __Pyx_RaiseDoubleKeywordsError(
+    const char* func_name, PyObject* kw_name); /*proto*/
+
+static int __Pyx_ParseOptionalKeywords(PyObject *kwds, PyObject **argnames[],     PyObject *kwds2, PyObject *values[], Py_ssize_t num_pos_args,     const char* function_name); /*proto*/
+
+static void __Pyx_RaiseArgtupleInvalid(const char* func_name, int exact,
+    Py_ssize_t num_min, Py_ssize_t num_max, Py_ssize_t num_found); /*proto*/
+
+static CYTHON_INLINE unsigned char __Pyx_PyInt_AsUnsignedChar(PyObject *);
+
+static CYTHON_INLINE unsigned short __Pyx_PyInt_AsUnsignedShort(PyObject *);
+
+static CYTHON_INLINE unsigned int __Pyx_PyInt_AsUnsignedInt(PyObject *);
+
+static CYTHON_INLINE char __Pyx_PyInt_AsChar(PyObject *);
+
+static CYTHON_INLINE short __Pyx_PyInt_AsShort(PyObject *);
+
+static CYTHON_INLINE int __Pyx_PyInt_AsInt(PyObject *);
+
+static CYTHON_INLINE signed char __Pyx_PyInt_AsSignedChar(PyObject *);
+
+static CYTHON_INLINE signed short __Pyx_PyInt_AsSignedShort(PyObject *);
+
+static CYTHON_INLINE signed int __Pyx_PyInt_AsSignedInt(PyObject *);
+
+static CYTHON_INLINE int __Pyx_PyInt_AsLongDouble(PyObject *);
+
+static CYTHON_INLINE unsigned long __Pyx_PyInt_AsUnsignedLong(PyObject *);
+
+static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_AsUnsignedLongLong(PyObject *);
+
+static CYTHON_INLINE long __Pyx_PyInt_AsLong(PyObject *);
+
+static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_AsLongLong(PyObject *);
+
+static CYTHON_INLINE signed long __Pyx_PyInt_AsSignedLong(PyObject *);
+
+static CYTHON_INLINE signed PY_LONG_LONG __Pyx_PyInt_AsSignedLongLong(PyObject *);
+
+static int __Pyx_check_binary_version(void);
+
+static void __Pyx_AddTraceback(const char *funcname, int __pyx_clineno,
+                               int __pyx_lineno, const char *__pyx_filename); /*proto*/
+
+static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/
+
+/* Module declarations from 'twisted.python._epoll' */
+static PyTypeObject *__pyx_ptype_7twisted_6python_6_epoll_epoll = 0;
+static PyObject *__pyx_f_7twisted_6python_6_epoll_call_epoll_wait(int, unsigned int, int); /*proto*/
+#define __Pyx_MODULE_NAME "twisted.python._epoll"
+int __pyx_module_is_main_twisted__python___epoll = 0;
+
+/* Implementation of 'twisted.python._epoll' */
+static PyObject *__pyx_builtin_IOError;
+static char __pyx_k_1[] = "\nInterface to epoll I/O event notification facility.\n";
+static char __pyx_k__ET[] = "ET";
+static char __pyx_k__IN[] = "IN";
+static char __pyx_k__fd[] = "fd";
+static char __pyx_k__op[] = "op";
+static char __pyx_k__ERR[] = "ERR";
+static char __pyx_k__HUP[] = "HUP";
+static char __pyx_k__MSG[] = "MSG";
+static char __pyx_k__OUT[] = "OUT";
+static char __pyx_k__PRI[] = "PRI";
+static char __pyx_k__size[] = "size";
+static char __pyx_k__RDBAND[] = "RDBAND";
+static char __pyx_k__RDNORM[] = "RDNORM";
+static char __pyx_k__WRBAND[] = "WRBAND";
+static char __pyx_k__WRNORM[] = "WRNORM";
+static char __pyx_k__events[] = "events";
+static char __pyx_k__CTL_ADD[] = "CTL_ADD";
+static char __pyx_k__CTL_DEL[] = "CTL_DEL";
+static char __pyx_k__CTL_MOD[] = "CTL_MOD";
+static char __pyx_k__EPOLLET[] = "EPOLLET";
+static char __pyx_k__EPOLLIN[] = "EPOLLIN";
+static char __pyx_k__IOError[] = "IOError";
+static char __pyx_k__timeout[] = "timeout";
+static char __pyx_k__EPOLLERR[] = "EPOLLERR";
+static char __pyx_k__EPOLLHUP[] = "EPOLLHUP";
+static char __pyx_k__EPOLLMSG[] = "EPOLLMSG";
+static char __pyx_k__EPOLLOUT[] = "EPOLLOUT";
+static char __pyx_k__EPOLLPRI[] = "EPOLLPRI";
+static char __pyx_k____main__[] = "__main__";
+static char __pyx_k____test__[] = "__test__";
+static char __pyx_k__maxevents[] = "maxevents";
+static char __pyx_k__EPOLLRDBAND[] = "EPOLLRDBAND";
+static char __pyx_k__EPOLLRDNORM[] = "EPOLLRDNORM";
+static char __pyx_k__EPOLLWRBAND[] = "EPOLLWRBAND";
+static char __pyx_k__EPOLLWRNORM[] = "EPOLLWRNORM";
+static PyObject *__pyx_n_s__CTL_ADD;
+static PyObject *__pyx_n_s__CTL_DEL;
+static PyObject *__pyx_n_s__CTL_MOD;
+static PyObject *__pyx_n_s__EPOLLERR;
+static PyObject *__pyx_n_s__EPOLLET;
+static PyObject *__pyx_n_s__EPOLLHUP;
+static PyObject *__pyx_n_s__EPOLLIN;
+static PyObject *__pyx_n_s__EPOLLMSG;
+static PyObject *__pyx_n_s__EPOLLOUT;
+static PyObject *__pyx_n_s__EPOLLPRI;
+static PyObject *__pyx_n_s__EPOLLRDBAND;
+static PyObject *__pyx_n_s__EPOLLRDNORM;
+static PyObject *__pyx_n_s__EPOLLWRBAND;
+static PyObject *__pyx_n_s__EPOLLWRNORM;
+static PyObject *__pyx_n_s__ERR;
+static PyObject *__pyx_n_s__ET;
+static PyObject *__pyx_n_s__HUP;
+static PyObject *__pyx_n_s__IN;
+static PyObject *__pyx_n_s__IOError;
+static PyObject *__pyx_n_s__MSG;
+static PyObject *__pyx_n_s__OUT;
+static PyObject *__pyx_n_s__PRI;
+static PyObject *__pyx_n_s__RDBAND;
+static PyObject *__pyx_n_s__RDNORM;
+static PyObject *__pyx_n_s__WRBAND;
+static PyObject *__pyx_n_s__WRNORM;
+static PyObject *__pyx_n_s____main__;
+static PyObject *__pyx_n_s____test__;
+static PyObject *__pyx_n_s__events;
+static PyObject *__pyx_n_s__fd;
+static PyObject *__pyx_n_s__maxevents;
+static PyObject *__pyx_n_s__op;
+static PyObject *__pyx_n_s__size;
+static PyObject *__pyx_n_s__timeout;
+
+/* "twisted/python/_epoll.pyx":68
+ *     cdef extern void PyEval_RestoreThread(PyThreadState*)
+ * 
+ * cdef call_epoll_wait(int fd, unsigned int maxevents, int timeout_msec):             # <<<<<<<<<<<<<<
+ *     """
+ *     Wait for an I/O event, wrap epoll_wait(2).
+ */
+
+static PyObject *__pyx_f_7twisted_6python_6_epoll_call_epoll_wait(int __pyx_v_fd, unsigned int __pyx_v_maxevents, int __pyx_v_timeout_msec) {
+  struct epoll_event *__pyx_v_events;
+  int __pyx_v_result;
+  int __pyx_v_nbytes;
+  PyThreadState *__pyx_v__save;
+  PyObject *__pyx_v_results = NULL;
+  long __pyx_v_i;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  int __pyx_t_1;
+  PyObject *__pyx_t_2 = NULL;
+  PyObject *__pyx_t_3 = NULL;
+  PyObject *__pyx_t_4 = NULL;
+  int __pyx_t_5;
+  int __pyx_t_6;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  __Pyx_RefNannySetupContext("call_epoll_wait");
+
+  /* "twisted/python/_epoll.pyx":89
+ *     cdef PyThreadState *_save
+ * 
+ *     nbytes = sizeof(epoll_event) * maxevents             # <<<<<<<<<<<<<<
+ *     events = <epoll_event*>malloc(nbytes)
+ *     memset(events, 0, nbytes)
+ */
+  __pyx_v_nbytes = ((sizeof(struct epoll_event)) * __pyx_v_maxevents);
+
+  /* "twisted/python/_epoll.pyx":90
+ * 
+ *     nbytes = sizeof(epoll_event) * maxevents
+ *     events = <epoll_event*>malloc(nbytes)             # <<<<<<<<<<<<<<
+ *     memset(events, 0, nbytes)
+ *     try:
+ */
+  __pyx_v_events = ((struct epoll_event *)malloc(__pyx_v_nbytes));
+
+  /* "twisted/python/_epoll.pyx":91
+ *     nbytes = sizeof(epoll_event) * maxevents
+ *     events = <epoll_event*>malloc(nbytes)
+ *     memset(events, 0, nbytes)             # <<<<<<<<<<<<<<
+ *     try:
+ *         _save = PyEval_SaveThread()
+ */
+  memset(__pyx_v_events, 0, __pyx_v_nbytes);
+
+  /* "twisted/python/_epoll.pyx":92
+ *     events = <epoll_event*>malloc(nbytes)
+ *     memset(events, 0, nbytes)
+ *     try:             # <<<<<<<<<<<<<<
+ *         _save = PyEval_SaveThread()
+ *         result = epoll_wait(fd, events, maxevents, timeout_msec)
+ */
+  /*try:*/ {
+
+    /* "twisted/python/_epoll.pyx":93
+ *     memset(events, 0, nbytes)
+ *     try:
+ *         _save = PyEval_SaveThread()             # <<<<<<<<<<<<<<
+ *         result = epoll_wait(fd, events, maxevents, timeout_msec)
+ *         PyEval_RestoreThread(_save)
+ */
+    __pyx_v__save = PyEval_SaveThread();
+
+    /* "twisted/python/_epoll.pyx":94
+ *     try:
+ *         _save = PyEval_SaveThread()
+ *         result = epoll_wait(fd, events, maxevents, timeout_msec)             # <<<<<<<<<<<<<<
+ *         PyEval_RestoreThread(_save)
+ * 
+ */
+    __pyx_v_result = epoll_wait(__pyx_v_fd, __pyx_v_events, __pyx_v_maxevents, __pyx_v_timeout_msec);
+
+    /* "twisted/python/_epoll.pyx":95
+ *         _save = PyEval_SaveThread()
+ *         result = epoll_wait(fd, events, maxevents, timeout_msec)
+ *         PyEval_RestoreThread(_save)             # <<<<<<<<<<<<<<
+ * 
+ *         if result == -1:
+ */
+    PyEval_RestoreThread(__pyx_v__save);
+
+    /* "twisted/python/_epoll.pyx":97
+ *         PyEval_RestoreThread(_save)
+ * 
+ *         if result == -1:             # <<<<<<<<<<<<<<
+ *             raise IOError(errno, strerror(errno))
+ *         results = []
+ */
+    __pyx_t_1 = (__pyx_v_result == -1);
+    if (__pyx_t_1) {
+
+      /* "twisted/python/_epoll.pyx":98
+ * 
+ *         if result == -1:
+ *             raise IOError(errno, strerror(errno))             # <<<<<<<<<<<<<<
+ *         results = []
+ *         for i from 0 <= i < result:
+ */
+      __pyx_t_2 = PyInt_FromLong(errno); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L4;}
+      __Pyx_GOTREF(__pyx_t_2);
+      __pyx_t_3 = PyBytes_FromString(strerror(errno)); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L4;}
+      __Pyx_GOTREF(((PyObject *)__pyx_t_3));
+      __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L4;}
+      __Pyx_GOTREF(((PyObject *)__pyx_t_4));
+      PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_2);
+      __Pyx_GIVEREF(__pyx_t_2);
+      PyTuple_SET_ITEM(__pyx_t_4, 1, ((PyObject *)__pyx_t_3));
+      __Pyx_GIVEREF(((PyObject *)__pyx_t_3));
+      __pyx_t_2 = 0;
+      __pyx_t_3 = 0;
+      __pyx_t_3 = PyObject_Call(__pyx_builtin_IOError, ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L4;}
+      __Pyx_GOTREF(__pyx_t_3);
+      __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0;
+      __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+      {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L4;}
+      goto __pyx_L6;
+    }
+    __pyx_L6:;
+
+    /* "twisted/python/_epoll.pyx":99
+ *         if result == -1:
+ *             raise IOError(errno, strerror(errno))
+ *         results = []             # <<<<<<<<<<<<<<
+ *         for i from 0 <= i < result:
+ *             results.append((events[i].data.fd, <int>events[i].events))
+ */
+    __pyx_t_3 = PyList_New(0); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 99; __pyx_clineno = __LINE__; goto __pyx_L4;}
+    __Pyx_GOTREF(((PyObject *)__pyx_t_3));
+    __pyx_v_results = __pyx_t_3;
+    __pyx_t_3 = 0;
+
+    /* "twisted/python/_epoll.pyx":100
+ *             raise IOError(errno, strerror(errno))
+ *         results = []
+ *         for i from 0 <= i < result:             # <<<<<<<<<<<<<<
+ *             results.append((events[i].data.fd, <int>events[i].events))
+ *         return results
+ */
+    __pyx_t_5 = __pyx_v_result;
+    for (__pyx_v_i = 0; __pyx_v_i < __pyx_t_5; __pyx_v_i++) {
+
+      /* "twisted/python/_epoll.pyx":101
+ *         results = []
+ *         for i from 0 <= i < result:
+ *             results.append((events[i].data.fd, <int>events[i].events))             # <<<<<<<<<<<<<<
+ *         return results
+ *     finally:
+ */
+      if (unlikely(((PyObject *)__pyx_v_results) == Py_None)) {
+        PyErr_Format(PyExc_AttributeError, "'NoneType' object has no attribute '%s'", "append"); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 101; __pyx_clineno = __LINE__; goto __pyx_L4;} 
+      }
+      __pyx_t_3 = PyInt_FromLong((__pyx_v_events[__pyx_v_i]).data.fd); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 101; __pyx_clineno = __LINE__; goto __pyx_L4;}
+      __Pyx_GOTREF(__pyx_t_3);
+      __pyx_t_4 = PyInt_FromLong(((int)(__pyx_v_events[__pyx_v_i]).events)); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 101; __pyx_clineno = __LINE__; goto __pyx_L4;}
+      __Pyx_GOTREF(__pyx_t_4);
+      __pyx_t_2 = PyTuple_New(2); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 101; __pyx_clineno = __LINE__; goto __pyx_L4;}
+      __Pyx_GOTREF(((PyObject *)__pyx_t_2));
+      PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_t_3);
+      __Pyx_GIVEREF(__pyx_t_3);
+      PyTuple_SET_ITEM(__pyx_t_2, 1, __pyx_t_4);
+      __Pyx_GIVEREF(__pyx_t_4);
+      __pyx_t_3 = 0;
+      __pyx_t_4 = 0;
+      __pyx_t_6 = PyList_Append(__pyx_v_results, ((PyObject *)__pyx_t_2)); if (unlikely(__pyx_t_6 == -1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 101; __pyx_clineno = __LINE__; goto __pyx_L4;}
+      __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0;
+    }
+
+    /* "twisted/python/_epoll.pyx":102
+ *         for i from 0 <= i < result:
+ *             results.append((events[i].data.fd, <int>events[i].events))
+ *         return results             # <<<<<<<<<<<<<<
+ *     finally:
+ *         free(events)
+ */
+    __Pyx_XDECREF(__pyx_r);
+    __Pyx_INCREF(((PyObject *)__pyx_v_results));
+    __pyx_r = ((PyObject *)__pyx_v_results);
+    goto __pyx_L3;
+  }
+
+  /* "twisted/python/_epoll.pyx":104
+ *         return results
+ *     finally:
+ *         free(events)             # <<<<<<<<<<<<<<
+ * 
+ * cdef class epoll:
+ */
+  /*finally:*/ {
+    int __pyx_why;
+    PyObject *__pyx_exc_type, *__pyx_exc_value, *__pyx_exc_tb;
+    int __pyx_exc_lineno;
+    __pyx_exc_type = 0; __pyx_exc_value = 0; __pyx_exc_tb = 0; __pyx_exc_lineno = 0;
+    __pyx_why = 0; goto __pyx_L5;
+    __pyx_L3: __pyx_exc_type = 0; __pyx_exc_value = 0; __pyx_exc_tb = 0; __pyx_exc_lineno = 0;
+    __pyx_why = 3; goto __pyx_L5;
+    __pyx_L4: {
+      __pyx_why = 4;
+      __Pyx_XDECREF(__pyx_t_3); __pyx_t_3 = 0;
+      __Pyx_XDECREF(__pyx_t_4); __pyx_t_4 = 0;
+      __Pyx_XDECREF(__pyx_t_2); __pyx_t_2 = 0;
+      __Pyx_ErrFetch(&__pyx_exc_type, &__pyx_exc_value, &__pyx_exc_tb);
+      __pyx_exc_lineno = __pyx_lineno;
+      goto __pyx_L5;
+    }
+    __pyx_L5:;
+    free(__pyx_v_events);
+    switch (__pyx_why) {
+      case 3: goto __pyx_L0;
+      case 4: {
+        __Pyx_ErrRestore(__pyx_exc_type, __pyx_exc_value, __pyx_exc_tb);
+        __pyx_lineno = __pyx_exc_lineno;
+        __pyx_exc_type = 0;
+        __pyx_exc_value = 0;
+        __pyx_exc_tb = 0;
+        goto __pyx_L1_error;
+      }
+    }
+  }
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_2);
+  __Pyx_XDECREF(__pyx_t_3);
+  __Pyx_XDECREF(__pyx_t_4);
+  __Pyx_AddTraceback("twisted.python._epoll.call_epoll_wait", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = 0;
+  __pyx_L0:;
+  __Pyx_XDECREF(__pyx_v_results);
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "twisted/python/_epoll.pyx":114
+ *     cdef int initialized
+ * 
+ *     def __init__(self, int size=1023):             # <<<<<<<<<<<<<<
+ *         """
+ *         The constructor arguments are compatible with select.poll.__init__.
+ */
+
+static int __pyx_pf_7twisted_6python_6_epoll_5epoll___init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static char __pyx_doc_7twisted_6python_6_epoll_5epoll___init__[] = "\n        The constructor arguments are compatible with select.poll.__init__.\n        ";
+struct wrapperbase __pyx_wrapperbase_7twisted_6python_6_epoll_5epoll___init__;
+static int __pyx_pf_7twisted_6python_6_epoll_5epoll___init__(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+  int __pyx_v_size;
+  int __pyx_r;
+  __Pyx_RefNannyDeclarations
+  int __pyx_t_1;
+  PyObject *__pyx_t_2 = NULL;
+  PyObject *__pyx_t_3 = NULL;
+  PyObject *__pyx_t_4 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__size,0};
+  __Pyx_RefNannySetupContext("__init__");
+  {
+    PyObject* values[1] = {0};
+    if (unlikely(__pyx_kwds)) {
+      Py_ssize_t kw_args;
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        case  0: break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+      kw_args = PyDict_Size(__pyx_kwds);
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  0:
+        if (kw_args > 0) {
+          PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s__size);
+          if (value) { values[0] = value; kw_args--; }
+        }
+      }
+      if (unlikely(kw_args > 0)) {
+        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, PyTuple_GET_SIZE(__pyx_args), "__init__") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 114; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+      }
+    } else {
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        case  0: break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+    }
+    if (values[0]) {
+      __pyx_v_size = __Pyx_PyInt_AsInt(values[0]); if (unlikely((__pyx_v_size == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 114; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    } else {
+      __pyx_v_size = ((int)1023);
+    }
+  }
+  goto __pyx_L4_argument_unpacking_done;
+  __pyx_L5_argtuple_error:;
+  __Pyx_RaiseArgtupleInvalid("__init__", 0, 0, 1, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 114; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  __pyx_L3_error:;
+  __Pyx_AddTraceback("twisted.python._epoll.epoll.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __Pyx_RefNannyFinishContext();
+  return -1;
+  __pyx_L4_argument_unpacking_done:;
+
+  /* "twisted/python/_epoll.pyx":118
+ *         The constructor arguments are compatible with select.poll.__init__.
+ *         """
+ *         self.fd = epoll_create(size)             # <<<<<<<<<<<<<<
+ *         if self.fd == -1:
+ *             raise IOError(errno, strerror(errno))
+ */
+  ((struct __pyx_obj_7twisted_6python_6_epoll_epoll *)__pyx_v_self)->fd = epoll_create(__pyx_v_size);
+
+  /* "twisted/python/_epoll.pyx":119
+ *         """
+ *         self.fd = epoll_create(size)
+ *         if self.fd == -1:             # <<<<<<<<<<<<<<
+ *             raise IOError(errno, strerror(errno))
+ *         self.initialized = 1
+ */
+  __pyx_t_1 = (((struct __pyx_obj_7twisted_6python_6_epoll_epoll *)__pyx_v_self)->fd == -1);
+  if (__pyx_t_1) {
+
+    /* "twisted/python/_epoll.pyx":120
+ *         self.fd = epoll_create(size)
+ *         if self.fd == -1:
+ *             raise IOError(errno, strerror(errno))             # <<<<<<<<<<<<<<
+ *         self.initialized = 1
+ * 
+ */
+    __pyx_t_2 = PyInt_FromLong(errno); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 120; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_2);
+    __pyx_t_3 = PyBytes_FromString(strerror(errno)); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 120; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(((PyObject *)__pyx_t_3));
+    __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 120; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(((PyObject *)__pyx_t_4));
+    PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_2);
+    __Pyx_GIVEREF(__pyx_t_2);
+    PyTuple_SET_ITEM(__pyx_t_4, 1, ((PyObject *)__pyx_t_3));
+    __Pyx_GIVEREF(((PyObject *)__pyx_t_3));
+    __pyx_t_2 = 0;
+    __pyx_t_3 = 0;
+    __pyx_t_3 = PyObject_Call(__pyx_builtin_IOError, ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 120; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_3);
+    __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0;
+    __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+    __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+    {__pyx_filename = __pyx_f[0]; __pyx_lineno = 120; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    goto __pyx_L6;
+  }
+  __pyx_L6:;
+
+  /* "twisted/python/_epoll.pyx":121
+ *         if self.fd == -1:
+ *             raise IOError(errno, strerror(errno))
+ *         self.initialized = 1             # <<<<<<<<<<<<<<
+ * 
+ *     def __dealloc__(self):
+ */
+  ((struct __pyx_obj_7twisted_6python_6_epoll_epoll *)__pyx_v_self)->initialized = 1;
+
+  __pyx_r = 0;
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_2);
+  __Pyx_XDECREF(__pyx_t_3);
+  __Pyx_XDECREF(__pyx_t_4);
+  __Pyx_AddTraceback("twisted.python._epoll.epoll.__init__", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = -1;
+  __pyx_L0:;
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "twisted/python/_epoll.pyx":123
+ *         self.initialized = 1
+ * 
+ *     def __dealloc__(self):             # <<<<<<<<<<<<<<
+ *         if self.initialized:
+ *             close(self.fd)
+ */
+
+static void __pyx_pf_7twisted_6python_6_epoll_5epoll_1__dealloc__(PyObject *__pyx_v_self); /*proto*/
+static void __pyx_pf_7twisted_6python_6_epoll_5epoll_1__dealloc__(PyObject *__pyx_v_self) {
+  __Pyx_RefNannyDeclarations
+  __Pyx_RefNannySetupContext("__dealloc__");
+
+  /* "twisted/python/_epoll.pyx":124
+ * 
+ *     def __dealloc__(self):
+ *         if self.initialized:             # <<<<<<<<<<<<<<
+ *             close(self.fd)
+ *             self.initialized = 0
+ */
+  if (((struct __pyx_obj_7twisted_6python_6_epoll_epoll *)__pyx_v_self)->initialized) {
+
+    /* "twisted/python/_epoll.pyx":125
+ *     def __dealloc__(self):
+ *         if self.initialized:
+ *             close(self.fd)             # <<<<<<<<<<<<<<
+ *             self.initialized = 0
+ * 
+ */
+    close(((struct __pyx_obj_7twisted_6python_6_epoll_epoll *)__pyx_v_self)->fd);
+
+    /* "twisted/python/_epoll.pyx":126
+ *         if self.initialized:
+ *             close(self.fd)
+ *             self.initialized = 0             # <<<<<<<<<<<<<<
+ * 
+ *     def close(self):
+ */
+    ((struct __pyx_obj_7twisted_6python_6_epoll_epoll *)__pyx_v_self)->initialized = 0;
+    goto __pyx_L5;
+  }
+  __pyx_L5:;
+
+  __Pyx_RefNannyFinishContext();
+}
+
+/* "twisted/python/_epoll.pyx":128
+ *             self.initialized = 0
+ * 
+ *     def close(self):             # <<<<<<<<<<<<<<
+ *         """
+ *         Close the epoll file descriptor.
+ */
+
+static PyObject *__pyx_pf_7twisted_6python_6_epoll_5epoll_2close(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static char __pyx_doc_7twisted_6python_6_epoll_5epoll_2close[] = "\n        Close the epoll file descriptor.\n        ";
+static PyObject *__pyx_pf_7twisted_6python_6_epoll_5epoll_2close(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  int __pyx_t_1;
+  PyObject *__pyx_t_2 = NULL;
+  PyObject *__pyx_t_3 = NULL;
+  PyObject *__pyx_t_4 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  __Pyx_RefNannySetupContext("close");
+
+  /* "twisted/python/_epoll.pyx":132
+ *         Close the epoll file descriptor.
+ *         """
+ *         if self.initialized:             # <<<<<<<<<<<<<<
+ *             if close(self.fd) == -1:
+ *                 raise IOError(errno, strerror(errno))
+ */
+  if (((struct __pyx_obj_7twisted_6python_6_epoll_epoll *)__pyx_v_self)->initialized) {
+
+    /* "twisted/python/_epoll.pyx":133
+ *         """
+ *         if self.initialized:
+ *             if close(self.fd) == -1:             # <<<<<<<<<<<<<<
+ *                 raise IOError(errno, strerror(errno))
+ *             self.initialized = 0
+ */
+    __pyx_t_1 = (close(((struct __pyx_obj_7twisted_6python_6_epoll_epoll *)__pyx_v_self)->fd) == -1);
+    if (__pyx_t_1) {
+
+      /* "twisted/python/_epoll.pyx":134
+ *         if self.initialized:
+ *             if close(self.fd) == -1:
+ *                 raise IOError(errno, strerror(errno))             # <<<<<<<<<<<<<<
+ *             self.initialized = 0
+ * 
+ */
+      __pyx_t_2 = PyInt_FromLong(errno); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 134; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      __Pyx_GOTREF(__pyx_t_2);
+      __pyx_t_3 = PyBytes_FromString(strerror(errno)); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 134; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      __Pyx_GOTREF(((PyObject *)__pyx_t_3));
+      __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 134; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      __Pyx_GOTREF(((PyObject *)__pyx_t_4));
+      PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_2);
+      __Pyx_GIVEREF(__pyx_t_2);
+      PyTuple_SET_ITEM(__pyx_t_4, 1, ((PyObject *)__pyx_t_3));
+      __Pyx_GIVEREF(((PyObject *)__pyx_t_3));
+      __pyx_t_2 = 0;
+      __pyx_t_3 = 0;
+      __pyx_t_3 = PyObject_Call(__pyx_builtin_IOError, ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 134; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      __Pyx_GOTREF(__pyx_t_3);
+      __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0;
+      __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+      __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+      {__pyx_filename = __pyx_f[0]; __pyx_lineno = 134; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+      goto __pyx_L6;
+    }
+    __pyx_L6:;
+
+    /* "twisted/python/_epoll.pyx":135
+ *             if close(self.fd) == -1:
+ *                 raise IOError(errno, strerror(errno))
+ *             self.initialized = 0             # <<<<<<<<<<<<<<
+ * 
+ *     def fileno(self):
+ */
+    ((struct __pyx_obj_7twisted_6python_6_epoll_epoll *)__pyx_v_self)->initialized = 0;
+    goto __pyx_L5;
+  }
+  __pyx_L5:;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_2);
+  __Pyx_XDECREF(__pyx_t_3);
+  __Pyx_XDECREF(__pyx_t_4);
+  __Pyx_AddTraceback("twisted.python._epoll.epoll.close", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "twisted/python/_epoll.pyx":137
+ *             self.initialized = 0
+ * 
+ *     def fileno(self):             # <<<<<<<<<<<<<<
+ *         """
+ *         Return the epoll file descriptor number.
+ */
+
+static PyObject *__pyx_pf_7twisted_6python_6_epoll_5epoll_3fileno(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static char __pyx_doc_7twisted_6python_6_epoll_5epoll_3fileno[] = "\n        Return the epoll file descriptor number.\n        ";
+static PyObject *__pyx_pf_7twisted_6python_6_epoll_5epoll_3fileno(PyObject *__pyx_v_self, CYTHON_UNUSED PyObject *unused) {
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  PyObject *__pyx_t_1 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  __Pyx_RefNannySetupContext("fileno");
+
+  /* "twisted/python/_epoll.pyx":141
+ *         Return the epoll file descriptor number.
+ *         """
+ *         return self.fd             # <<<<<<<<<<<<<<
+ * 
+ *     def register(self, int fd, int events):
+ */
+  __Pyx_XDECREF(__pyx_r);
+  __pyx_t_1 = PyInt_FromLong(((struct __pyx_obj_7twisted_6python_6_epoll_epoll *)__pyx_v_self)->fd); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 141; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  __pyx_r = __pyx_t_1;
+  __pyx_t_1 = 0;
+  goto __pyx_L0;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_1);
+  __Pyx_AddTraceback("twisted.python._epoll.epoll.fileno", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "twisted/python/_epoll.pyx":143
+ *         return self.fd
+ * 
+ *     def register(self, int fd, int events):             # <<<<<<<<<<<<<<
+ *         """
+ *         Add (register) a file descriptor to be monitored by self.
+ */
+
+static PyObject *__pyx_pf_7twisted_6python_6_epoll_5epoll_4register(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static char __pyx_doc_7twisted_6python_6_epoll_5epoll_4register[] = "\n        Add (register) a file descriptor to be monitored by self.\n\n        This method is compatible with select.epoll.register in Python 2.6.\n\n        Wrap epoll_ctl(2).\n\n        @type fd: C{int}\n        @param fd: File descriptor to modify\n\n        @type events: C{int}\n        @param events: A bit set of IN, OUT, PRI, ERR, HUP, and ET.\n\n        @raise IOError: Raised if the underlying epoll_ctl() call fa [...]
+static PyObject *__pyx_pf_7twisted_6python_6_epoll_5epoll_4register(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+  int __pyx_v_fd;
+  int __pyx_v_events;
+  int __pyx_v_result;
+  struct epoll_event __pyx_v_evt;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  PyObject *__pyx_t_1 = NULL;
+  int __pyx_t_2;
+  int __pyx_t_3;
+  PyObject *__pyx_t_4 = NULL;
+  PyObject *__pyx_t_5 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__fd,&__pyx_n_s__events,0};
+  __Pyx_RefNannySetupContext("register");
+  {
+    PyObject* values[2] = {0,0};
+    if (unlikely(__pyx_kwds)) {
+      Py_ssize_t kw_args;
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        case  0: break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+      kw_args = PyDict_Size(__pyx_kwds);
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  0:
+        values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__fd);
+        if (likely(values[0])) kw_args--;
+        else goto __pyx_L5_argtuple_error;
+        case  1:
+        values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__events);
+        if (likely(values[1])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("register", 1, 2, 2, 1); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 143; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+      }
+      if (unlikely(kw_args > 0)) {
+        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, PyTuple_GET_SIZE(__pyx_args), "register") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 143; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+      }
+    } else if (PyTuple_GET_SIZE(__pyx_args) != 2) {
+      goto __pyx_L5_argtuple_error;
+    } else {
+      values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+      values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+    }
+    __pyx_v_fd = __Pyx_PyInt_AsInt(values[0]); if (unlikely((__pyx_v_fd == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 143; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    __pyx_v_events = __Pyx_PyInt_AsInt(values[1]); if (unlikely((__pyx_v_events == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 143; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  }
+  goto __pyx_L4_argument_unpacking_done;
+  __pyx_L5_argtuple_error:;
+  __Pyx_RaiseArgtupleInvalid("register", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 143; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  __pyx_L3_error:;
+  __Pyx_AddTraceback("twisted.python._epoll.epoll.register", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __Pyx_RefNannyFinishContext();
+  return NULL;
+  __pyx_L4_argument_unpacking_done:;
+
+  /* "twisted/python/_epoll.pyx":161
+ *         cdef int result
+ *         cdef epoll_event evt
+ *         evt.events = events             # <<<<<<<<<<<<<<
+ *         evt.data.fd = fd
+ *         result = epoll_ctl(self.fd, CTL_ADD, fd, &evt)
+ */
+  __pyx_v_evt.events = __pyx_v_events;
+
+  /* "twisted/python/_epoll.pyx":162
+ *         cdef epoll_event evt
+ *         evt.events = events
+ *         evt.data.fd = fd             # <<<<<<<<<<<<<<
+ *         result = epoll_ctl(self.fd, CTL_ADD, fd, &evt)
+ *         if result == -1:
+ */
+  __pyx_v_evt.data.fd = __pyx_v_fd;
+
+  /* "twisted/python/_epoll.pyx":163
+ *         evt.events = events
+ *         evt.data.fd = fd
+ *         result = epoll_ctl(self.fd, CTL_ADD, fd, &evt)             # <<<<<<<<<<<<<<
+ *         if result == -1:
+ *             raise IOError(errno, strerror(errno))
+ */
+  __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__CTL_ADD); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 163; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  __pyx_t_2 = __Pyx_PyInt_AsInt(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 163; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+  __pyx_v_result = epoll_ctl(((struct __pyx_obj_7twisted_6python_6_epoll_epoll *)__pyx_v_self)->fd, __pyx_t_2, __pyx_v_fd, (&__pyx_v_evt));
+
+  /* "twisted/python/_epoll.pyx":164
+ *         evt.data.fd = fd
+ *         result = epoll_ctl(self.fd, CTL_ADD, fd, &evt)
+ *         if result == -1:             # <<<<<<<<<<<<<<
+ *             raise IOError(errno, strerror(errno))
+ * 
+ */
+  __pyx_t_3 = (__pyx_v_result == -1);
+  if (__pyx_t_3) {
+
+    /* "twisted/python/_epoll.pyx":165
+ *         result = epoll_ctl(self.fd, CTL_ADD, fd, &evt)
+ *         if result == -1:
+ *             raise IOError(errno, strerror(errno))             # <<<<<<<<<<<<<<
+ * 
+ *     def unregister(self, int fd):
+ */
+    __pyx_t_1 = PyInt_FromLong(errno); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 165; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_1);
+    __pyx_t_4 = PyBytes_FromString(strerror(errno)); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 165; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(((PyObject *)__pyx_t_4));
+    __pyx_t_5 = PyTuple_New(2); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 165; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(((PyObject *)__pyx_t_5));
+    PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_1);
+    __Pyx_GIVEREF(__pyx_t_1);
+    PyTuple_SET_ITEM(__pyx_t_5, 1, ((PyObject *)__pyx_t_4));
+    __Pyx_GIVEREF(((PyObject *)__pyx_t_4));
+    __pyx_t_1 = 0;
+    __pyx_t_4 = 0;
+    __pyx_t_4 = PyObject_Call(__pyx_builtin_IOError, ((PyObject *)__pyx_t_5), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 165; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_4);
+    __Pyx_DECREF(((PyObject *)__pyx_t_5)); __pyx_t_5 = 0;
+    __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+    __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+    {__pyx_filename = __pyx_f[0]; __pyx_lineno = 165; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    goto __pyx_L6;
+  }
+  __pyx_L6:;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_1);
+  __Pyx_XDECREF(__pyx_t_4);
+  __Pyx_XDECREF(__pyx_t_5);
+  __Pyx_AddTraceback("twisted.python._epoll.epoll.register", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "twisted/python/_epoll.pyx":167
+ *             raise IOError(errno, strerror(errno))
+ * 
+ *     def unregister(self, int fd):             # <<<<<<<<<<<<<<
+ *         """
+ *         Remove (unregister) a file descriptor monitored by self.
+ */
+
+static PyObject *__pyx_pf_7twisted_6python_6_epoll_5epoll_5unregister(PyObject *__pyx_v_self, PyObject *__pyx_arg_fd); /*proto*/
+static char __pyx_doc_7twisted_6python_6_epoll_5epoll_5unregister[] = "\n        Remove (unregister) a file descriptor monitored by self.\n\n        This method is compatible with select.epoll.unregister in Python 2.6.\n\n        Wrap epoll_ctl(2).\n\n        @type fd: C{int}\n        @param fd: File descriptor to modify\n\n        @raise IOError: Raised if the underlying epoll_ctl() call fails.\n        ";
+static PyObject *__pyx_pf_7twisted_6python_6_epoll_5epoll_5unregister(PyObject *__pyx_v_self, PyObject *__pyx_arg_fd) {
+  int __pyx_v_fd;
+  int __pyx_v_result;
+  struct epoll_event __pyx_v_evt;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  PyObject *__pyx_t_1 = NULL;
+  int __pyx_t_2;
+  int __pyx_t_3;
+  PyObject *__pyx_t_4 = NULL;
+  PyObject *__pyx_t_5 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  __Pyx_RefNannySetupContext("unregister");
+  assert(__pyx_arg_fd); {
+    __pyx_v_fd = __Pyx_PyInt_AsInt(__pyx_arg_fd); if (unlikely((__pyx_v_fd == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 167; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  }
+  goto __pyx_L4_argument_unpacking_done;
+  __pyx_L3_error:;
+  __Pyx_AddTraceback("twisted.python._epoll.epoll.unregister", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __Pyx_RefNannyFinishContext();
+  return NULL;
+  __pyx_L4_argument_unpacking_done:;
+
+  /* "twisted/python/_epoll.pyx":183
+ *         cdef epoll_event evt
+ *         # We don't have to fill evt.events for CTL_DEL.
+ *         evt.data.fd = fd             # <<<<<<<<<<<<<<
+ *         result = epoll_ctl(self.fd, CTL_DEL, fd, &evt)
+ *         if result == -1:
+ */
+  __pyx_v_evt.data.fd = __pyx_v_fd;
+
+  /* "twisted/python/_epoll.pyx":184
+ *         # We don't have to fill evt.events for CTL_DEL.
+ *         evt.data.fd = fd
+ *         result = epoll_ctl(self.fd, CTL_DEL, fd, &evt)             # <<<<<<<<<<<<<<
+ *         if result == -1:
+ *             raise IOError(errno, strerror(errno))
+ */
+  __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__CTL_DEL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 184; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  __pyx_t_2 = __Pyx_PyInt_AsInt(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 184; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+  __pyx_v_result = epoll_ctl(((struct __pyx_obj_7twisted_6python_6_epoll_epoll *)__pyx_v_self)->fd, __pyx_t_2, __pyx_v_fd, (&__pyx_v_evt));
+
+  /* "twisted/python/_epoll.pyx":185
+ *         evt.data.fd = fd
+ *         result = epoll_ctl(self.fd, CTL_DEL, fd, &evt)
+ *         if result == -1:             # <<<<<<<<<<<<<<
+ *             raise IOError(errno, strerror(errno))
+ * 
+ */
+  __pyx_t_3 = (__pyx_v_result == -1);
+  if (__pyx_t_3) {
+
+    /* "twisted/python/_epoll.pyx":186
+ *         result = epoll_ctl(self.fd, CTL_DEL, fd, &evt)
+ *         if result == -1:
+ *             raise IOError(errno, strerror(errno))             # <<<<<<<<<<<<<<
+ * 
+ *     def modify(self, int fd, int events):
+ */
+    __pyx_t_1 = PyInt_FromLong(errno); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 186; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_1);
+    __pyx_t_4 = PyBytes_FromString(strerror(errno)); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 186; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(((PyObject *)__pyx_t_4));
+    __pyx_t_5 = PyTuple_New(2); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 186; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(((PyObject *)__pyx_t_5));
+    PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_1);
+    __Pyx_GIVEREF(__pyx_t_1);
+    PyTuple_SET_ITEM(__pyx_t_5, 1, ((PyObject *)__pyx_t_4));
+    __Pyx_GIVEREF(((PyObject *)__pyx_t_4));
+    __pyx_t_1 = 0;
+    __pyx_t_4 = 0;
+    __pyx_t_4 = PyObject_Call(__pyx_builtin_IOError, ((PyObject *)__pyx_t_5), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 186; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_4);
+    __Pyx_DECREF(((PyObject *)__pyx_t_5)); __pyx_t_5 = 0;
+    __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+    __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+    {__pyx_filename = __pyx_f[0]; __pyx_lineno = 186; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    goto __pyx_L5;
+  }
+  __pyx_L5:;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_1);
+  __Pyx_XDECREF(__pyx_t_4);
+  __Pyx_XDECREF(__pyx_t_5);
+  __Pyx_AddTraceback("twisted.python._epoll.epoll.unregister", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "twisted/python/_epoll.pyx":188
+ *             raise IOError(errno, strerror(errno))
+ * 
+ *     def modify(self, int fd, int events):             # <<<<<<<<<<<<<<
+ *         """
+ *         Modify the modified state of a file descriptor monitored by self.
+ */
+
+static PyObject *__pyx_pf_7twisted_6python_6_epoll_5epoll_6modify(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static char __pyx_doc_7twisted_6python_6_epoll_5epoll_6modify[] = "\n        Modify the modified state of a file descriptor monitored by self.\n\n        This method is compatible with select.epoll.modify in Python 2.6.\n\n        Wrap epoll_ctl(2).\n\n        @type fd: C{int}\n        @param fd: File descriptor to modify\n\n        @type events: C{int}\n        @param events: A bit set of IN, OUT, PRI, ERR, HUP, and ET.\n\n        @raise IOError: Raised if the underlying epoll_ctl() cal [...]
+static PyObject *__pyx_pf_7twisted_6python_6_epoll_5epoll_6modify(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+  int __pyx_v_fd;
+  int __pyx_v_events;
+  int __pyx_v_result;
+  struct epoll_event __pyx_v_evt;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  PyObject *__pyx_t_1 = NULL;
+  int __pyx_t_2;
+  int __pyx_t_3;
+  PyObject *__pyx_t_4 = NULL;
+  PyObject *__pyx_t_5 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__fd,&__pyx_n_s__events,0};
+  __Pyx_RefNannySetupContext("modify");
+  {
+    PyObject* values[2] = {0,0};
+    if (unlikely(__pyx_kwds)) {
+      Py_ssize_t kw_args;
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        case  0: break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+      kw_args = PyDict_Size(__pyx_kwds);
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  0:
+        values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__fd);
+        if (likely(values[0])) kw_args--;
+        else goto __pyx_L5_argtuple_error;
+        case  1:
+        values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__events);
+        if (likely(values[1])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("modify", 1, 2, 2, 1); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 188; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+      }
+      if (unlikely(kw_args > 0)) {
+        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, PyTuple_GET_SIZE(__pyx_args), "modify") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 188; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+      }
+    } else if (PyTuple_GET_SIZE(__pyx_args) != 2) {
+      goto __pyx_L5_argtuple_error;
+    } else {
+      values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+      values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+    }
+    __pyx_v_fd = __Pyx_PyInt_AsInt(values[0]); if (unlikely((__pyx_v_fd == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 188; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    __pyx_v_events = __Pyx_PyInt_AsInt(values[1]); if (unlikely((__pyx_v_events == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 188; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  }
+  goto __pyx_L4_argument_unpacking_done;
+  __pyx_L5_argtuple_error:;
+  __Pyx_RaiseArgtupleInvalid("modify", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 188; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  __pyx_L3_error:;
+  __Pyx_AddTraceback("twisted.python._epoll.epoll.modify", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __Pyx_RefNannyFinishContext();
+  return NULL;
+  __pyx_L4_argument_unpacking_done:;
+
+  /* "twisted/python/_epoll.pyx":206
+ *         cdef int result
+ *         cdef epoll_event evt
+ *         evt.events = events             # <<<<<<<<<<<<<<
+ *         evt.data.fd = fd
+ *         result = epoll_ctl(self.fd, CTL_MOD, fd, &evt)
+ */
+  __pyx_v_evt.events = __pyx_v_events;
+
+  /* "twisted/python/_epoll.pyx":207
+ *         cdef epoll_event evt
+ *         evt.events = events
+ *         evt.data.fd = fd             # <<<<<<<<<<<<<<
+ *         result = epoll_ctl(self.fd, CTL_MOD, fd, &evt)
+ *         if result == -1:
+ */
+  __pyx_v_evt.data.fd = __pyx_v_fd;
+
+  /* "twisted/python/_epoll.pyx":208
+ *         evt.events = events
+ *         evt.data.fd = fd
+ *         result = epoll_ctl(self.fd, CTL_MOD, fd, &evt)             # <<<<<<<<<<<<<<
+ *         if result == -1:
+ *             raise IOError(errno, strerror(errno))
+ */
+  __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__CTL_MOD); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 208; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  __pyx_t_2 = __Pyx_PyInt_AsInt(__pyx_t_1); if (unlikely((__pyx_t_2 == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 208; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+  __pyx_v_result = epoll_ctl(((struct __pyx_obj_7twisted_6python_6_epoll_epoll *)__pyx_v_self)->fd, __pyx_t_2, __pyx_v_fd, (&__pyx_v_evt));
+
+  /* "twisted/python/_epoll.pyx":209
+ *         evt.data.fd = fd
+ *         result = epoll_ctl(self.fd, CTL_MOD, fd, &evt)
+ *         if result == -1:             # <<<<<<<<<<<<<<
+ *             raise IOError(errno, strerror(errno))
+ * 
+ */
+  __pyx_t_3 = (__pyx_v_result == -1);
+  if (__pyx_t_3) {
+
+    /* "twisted/python/_epoll.pyx":210
+ *         result = epoll_ctl(self.fd, CTL_MOD, fd, &evt)
+ *         if result == -1:
+ *             raise IOError(errno, strerror(errno))             # <<<<<<<<<<<<<<
+ * 
+ *     def _control(self, int op, int fd, int events):
+ */
+    __pyx_t_1 = PyInt_FromLong(errno); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 210; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_1);
+    __pyx_t_4 = PyBytes_FromString(strerror(errno)); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 210; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(((PyObject *)__pyx_t_4));
+    __pyx_t_5 = PyTuple_New(2); if (unlikely(!__pyx_t_5)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 210; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(((PyObject *)__pyx_t_5));
+    PyTuple_SET_ITEM(__pyx_t_5, 0, __pyx_t_1);
+    __Pyx_GIVEREF(__pyx_t_1);
+    PyTuple_SET_ITEM(__pyx_t_5, 1, ((PyObject *)__pyx_t_4));
+    __Pyx_GIVEREF(((PyObject *)__pyx_t_4));
+    __pyx_t_1 = 0;
+    __pyx_t_4 = 0;
+    __pyx_t_4 = PyObject_Call(__pyx_builtin_IOError, ((PyObject *)__pyx_t_5), NULL); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 210; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_4);
+    __Pyx_DECREF(((PyObject *)__pyx_t_5)); __pyx_t_5 = 0;
+    __Pyx_Raise(__pyx_t_4, 0, 0, 0);
+    __Pyx_DECREF(__pyx_t_4); __pyx_t_4 = 0;
+    {__pyx_filename = __pyx_f[0]; __pyx_lineno = 210; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    goto __pyx_L6;
+  }
+  __pyx_L6:;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_1);
+  __Pyx_XDECREF(__pyx_t_4);
+  __Pyx_XDECREF(__pyx_t_5);
+  __Pyx_AddTraceback("twisted.python._epoll.epoll.modify", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "twisted/python/_epoll.pyx":212
+ *             raise IOError(errno, strerror(errno))
+ * 
+ *     def _control(self, int op, int fd, int events):             # <<<<<<<<<<<<<<
+ *         """
+ *         Modify the monitored state of a particular file descriptor.
+ */
+
+static PyObject *__pyx_pf_7twisted_6python_6_epoll_5epoll_7_control(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static char __pyx_doc_7twisted_6python_6_epoll_5epoll_7_control[] = "\n        Modify the monitored state of a particular file descriptor.\n        \n        Wrap epoll_ctl(2).\n\n        @type op: C{int}\n        @param op: One of CTL_ADD, CTL_DEL, or CTL_MOD\n\n        @type fd: C{int}\n        @param fd: File descriptor to modify\n\n        @type events: C{int}\n        @param events: A bit set of IN, OUT, PRI, ERR, HUP, and ET.\n\n        @raise IOError: Raised if the underlying epol [...]
+static PyObject *__pyx_pf_7twisted_6python_6_epoll_5epoll_7_control(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+  int __pyx_v_op;
+  int __pyx_v_fd;
+  int __pyx_v_events;
+  int __pyx_v_result;
+  struct epoll_event __pyx_v_evt;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  int __pyx_t_1;
+  PyObject *__pyx_t_2 = NULL;
+  PyObject *__pyx_t_3 = NULL;
+  PyObject *__pyx_t_4 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__op,&__pyx_n_s__fd,&__pyx_n_s__events,0};
+  __Pyx_RefNannySetupContext("_control");
+  {
+    PyObject* values[3] = {0,0,0};
+    if (unlikely(__pyx_kwds)) {
+      Py_ssize_t kw_args;
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  3: values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        case  0: break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+      kw_args = PyDict_Size(__pyx_kwds);
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  0:
+        values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__op);
+        if (likely(values[0])) kw_args--;
+        else goto __pyx_L5_argtuple_error;
+        case  1:
+        values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__fd);
+        if (likely(values[1])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("_control", 1, 3, 3, 1); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 212; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+        case  2:
+        values[2] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__events);
+        if (likely(values[2])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("_control", 1, 3, 3, 2); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 212; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+      }
+      if (unlikely(kw_args > 0)) {
+        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, PyTuple_GET_SIZE(__pyx_args), "_control") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 212; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+      }
+    } else if (PyTuple_GET_SIZE(__pyx_args) != 3) {
+      goto __pyx_L5_argtuple_error;
+    } else {
+      values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+      values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+      values[2] = PyTuple_GET_ITEM(__pyx_args, 2);
+    }
+    __pyx_v_op = __Pyx_PyInt_AsInt(values[0]); if (unlikely((__pyx_v_op == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 212; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    __pyx_v_fd = __Pyx_PyInt_AsInt(values[1]); if (unlikely((__pyx_v_fd == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 212; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    __pyx_v_events = __Pyx_PyInt_AsInt(values[2]); if (unlikely((__pyx_v_events == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 212; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  }
+  goto __pyx_L4_argument_unpacking_done;
+  __pyx_L5_argtuple_error:;
+  __Pyx_RaiseArgtupleInvalid("_control", 1, 3, 3, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 212; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  __pyx_L3_error:;
+  __Pyx_AddTraceback("twisted.python._epoll.epoll._control", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __Pyx_RefNannyFinishContext();
+  return NULL;
+  __pyx_L4_argument_unpacking_done:;
+
+  /* "twisted/python/_epoll.pyx":231
+ *         cdef int result
+ *         cdef epoll_event evt
+ *         evt.events = events             # <<<<<<<<<<<<<<
+ *         evt.data.fd = fd
+ *         result = epoll_ctl(self.fd, op, fd, &evt)
+ */
+  __pyx_v_evt.events = __pyx_v_events;
+
+  /* "twisted/python/_epoll.pyx":232
+ *         cdef epoll_event evt
+ *         evt.events = events
+ *         evt.data.fd = fd             # <<<<<<<<<<<<<<
+ *         result = epoll_ctl(self.fd, op, fd, &evt)
+ *         if result == -1:
+ */
+  __pyx_v_evt.data.fd = __pyx_v_fd;
+
+  /* "twisted/python/_epoll.pyx":233
+ *         evt.events = events
+ *         evt.data.fd = fd
+ *         result = epoll_ctl(self.fd, op, fd, &evt)             # <<<<<<<<<<<<<<
+ *         if result == -1:
+ *             raise IOError(errno, strerror(errno))
+ */
+  __pyx_v_result = epoll_ctl(((struct __pyx_obj_7twisted_6python_6_epoll_epoll *)__pyx_v_self)->fd, __pyx_v_op, __pyx_v_fd, (&__pyx_v_evt));
+
+  /* "twisted/python/_epoll.pyx":234
+ *         evt.data.fd = fd
+ *         result = epoll_ctl(self.fd, op, fd, &evt)
+ *         if result == -1:             # <<<<<<<<<<<<<<
+ *             raise IOError(errno, strerror(errno))
+ * 
+ */
+  __pyx_t_1 = (__pyx_v_result == -1);
+  if (__pyx_t_1) {
+
+    /* "twisted/python/_epoll.pyx":235
+ *         result = epoll_ctl(self.fd, op, fd, &evt)
+ *         if result == -1:
+ *             raise IOError(errno, strerror(errno))             # <<<<<<<<<<<<<<
+ * 
+ *     def wait(self, unsigned int maxevents, int timeout):
+ */
+    __pyx_t_2 = PyInt_FromLong(errno); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 235; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_2);
+    __pyx_t_3 = PyBytes_FromString(strerror(errno)); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 235; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(((PyObject *)__pyx_t_3));
+    __pyx_t_4 = PyTuple_New(2); if (unlikely(!__pyx_t_4)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 235; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(((PyObject *)__pyx_t_4));
+    PyTuple_SET_ITEM(__pyx_t_4, 0, __pyx_t_2);
+    __Pyx_GIVEREF(__pyx_t_2);
+    PyTuple_SET_ITEM(__pyx_t_4, 1, ((PyObject *)__pyx_t_3));
+    __Pyx_GIVEREF(((PyObject *)__pyx_t_3));
+    __pyx_t_2 = 0;
+    __pyx_t_3 = 0;
+    __pyx_t_3 = PyObject_Call(__pyx_builtin_IOError, ((PyObject *)__pyx_t_4), NULL); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 235; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    __Pyx_GOTREF(__pyx_t_3);
+    __Pyx_DECREF(((PyObject *)__pyx_t_4)); __pyx_t_4 = 0;
+    __Pyx_Raise(__pyx_t_3, 0, 0, 0);
+    __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+    {__pyx_filename = __pyx_f[0]; __pyx_lineno = 235; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    goto __pyx_L6;
+  }
+  __pyx_L6:;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_2);
+  __Pyx_XDECREF(__pyx_t_3);
+  __Pyx_XDECREF(__pyx_t_4);
+  __Pyx_AddTraceback("twisted.python._epoll.epoll._control", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "twisted/python/_epoll.pyx":237
+ *             raise IOError(errno, strerror(errno))
+ * 
+ *     def wait(self, unsigned int maxevents, int timeout):             # <<<<<<<<<<<<<<
+ *         """
+ *         Wait for an I/O event, wrap epoll_wait(2).
+ */
+
+static PyObject *__pyx_pf_7twisted_6python_6_epoll_5epoll_8wait(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static char __pyx_doc_7twisted_6python_6_epoll_5epoll_8wait[] = "\n        Wait for an I/O event, wrap epoll_wait(2).\n\n        @type maxevents: C{int}\n        @param maxevents: Maximum number of events returned.\n\n        @type timeout: C{int}\n        @param timeout: Maximum time in milliseconds waiting for events. 0\n            makes it return immediately whereas -1 makes it wait indefinitely.\n        \n        @raise IOError: Raised if the underlying epoll_wait() call fails.\n   [...]
+static PyObject *__pyx_pf_7twisted_6python_6_epoll_5epoll_8wait(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+  unsigned int __pyx_v_maxevents;
+  int __pyx_v_timeout;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  PyObject *__pyx_t_1 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__maxevents,&__pyx_n_s__timeout,0};
+  __Pyx_RefNannySetupContext("wait");
+  {
+    PyObject* values[2] = {0,0};
+    if (unlikely(__pyx_kwds)) {
+      Py_ssize_t kw_args;
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        case  0: break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+      kw_args = PyDict_Size(__pyx_kwds);
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  0:
+        values[0] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__maxevents);
+        if (likely(values[0])) kw_args--;
+        else goto __pyx_L5_argtuple_error;
+        case  1:
+        values[1] = PyDict_GetItem(__pyx_kwds, __pyx_n_s__timeout);
+        if (likely(values[1])) kw_args--;
+        else {
+          __Pyx_RaiseArgtupleInvalid("wait", 1, 2, 2, 1); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 237; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+        }
+      }
+      if (unlikely(kw_args > 0)) {
+        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, PyTuple_GET_SIZE(__pyx_args), "wait") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 237; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+      }
+    } else if (PyTuple_GET_SIZE(__pyx_args) != 2) {
+      goto __pyx_L5_argtuple_error;
+    } else {
+      values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+      values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+    }
+    __pyx_v_maxevents = __Pyx_PyInt_AsUnsignedInt(values[0]); if (unlikely((__pyx_v_maxevents == (unsigned int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 237; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    __pyx_v_timeout = __Pyx_PyInt_AsInt(values[1]); if (unlikely((__pyx_v_timeout == (int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 237; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  }
+  goto __pyx_L4_argument_unpacking_done;
+  __pyx_L5_argtuple_error:;
+  __Pyx_RaiseArgtupleInvalid("wait", 1, 2, 2, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 237; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  __pyx_L3_error:;
+  __Pyx_AddTraceback("twisted.python._epoll.epoll.wait", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __Pyx_RefNannyFinishContext();
+  return NULL;
+  __pyx_L4_argument_unpacking_done:;
+
+  /* "twisted/python/_epoll.pyx":250
+ *         @raise IOError: Raised if the underlying epoll_wait() call fails.
+ *         """
+ *         return call_epoll_wait(self.fd, maxevents, timeout)             # <<<<<<<<<<<<<<
+ * 
+ *     def poll(self, float timeout=-1, unsigned int maxevents=1024):
+ */
+  __Pyx_XDECREF(__pyx_r);
+  __pyx_t_1 = __pyx_f_7twisted_6python_6_epoll_call_epoll_wait(((struct __pyx_obj_7twisted_6python_6_epoll_epoll *)__pyx_v_self)->fd, __pyx_v_maxevents, __pyx_v_timeout); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 250; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  __pyx_r = __pyx_t_1;
+  __pyx_t_1 = 0;
+  goto __pyx_L0;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_1);
+  __Pyx_AddTraceback("twisted.python._epoll.epoll.wait", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+/* "twisted/python/_epoll.pyx":252
+ *         return call_epoll_wait(self.fd, maxevents, timeout)
+ * 
+ *     def poll(self, float timeout=-1, unsigned int maxevents=1024):             # <<<<<<<<<<<<<<
+ *         """
+ *         Wait for an I/O event, wrap epoll_wait(2).
+ */
+
+static PyObject *__pyx_pf_7twisted_6python_6_epoll_5epoll_9poll(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds); /*proto*/
+static char __pyx_doc_7twisted_6python_6_epoll_5epoll_9poll[] = "\n        Wait for an I/O event, wrap epoll_wait(2).\n\n        This method is compatible with select.epoll.poll in Python 2.6.\n\n        @type maxevents: C{int}\n        @param maxevents: Maximum number of events returned.\n\n        @type timeout: C{int}\n        @param timeout: Maximum time waiting for events. 0 makes it return\n            immediately whereas -1 makes it wait indefinitely.\n        \n        @raise IOE [...]
+static PyObject *__pyx_pf_7twisted_6python_6_epoll_5epoll_9poll(PyObject *__pyx_v_self, PyObject *__pyx_args, PyObject *__pyx_kwds) {
+  float __pyx_v_timeout;
+  unsigned int __pyx_v_maxevents;
+  PyObject *__pyx_r = NULL;
+  __Pyx_RefNannyDeclarations
+  PyObject *__pyx_t_1 = NULL;
+  int __pyx_lineno = 0;
+  const char *__pyx_filename = NULL;
+  int __pyx_clineno = 0;
+  static PyObject **__pyx_pyargnames[] = {&__pyx_n_s__timeout,&__pyx_n_s__maxevents,0};
+  __Pyx_RefNannySetupContext("poll");
+  {
+    PyObject* values[2] = {0,0};
+    if (unlikely(__pyx_kwds)) {
+      Py_ssize_t kw_args;
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        case  0: break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+      kw_args = PyDict_Size(__pyx_kwds);
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  0:
+        if (kw_args > 0) {
+          PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s__timeout);
+          if (value) { values[0] = value; kw_args--; }
+        }
+        case  1:
+        if (kw_args > 0) {
+          PyObject* value = PyDict_GetItem(__pyx_kwds, __pyx_n_s__maxevents);
+          if (value) { values[1] = value; kw_args--; }
+        }
+      }
+      if (unlikely(kw_args > 0)) {
+        if (unlikely(__Pyx_ParseOptionalKeywords(__pyx_kwds, __pyx_pyargnames, 0, values, PyTuple_GET_SIZE(__pyx_args), "poll") < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 252; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+      }
+    } else {
+      switch (PyTuple_GET_SIZE(__pyx_args)) {
+        case  2: values[1] = PyTuple_GET_ITEM(__pyx_args, 1);
+        case  1: values[0] = PyTuple_GET_ITEM(__pyx_args, 0);
+        case  0: break;
+        default: goto __pyx_L5_argtuple_error;
+      }
+    }
+    if (values[0]) {
+      __pyx_v_timeout = __pyx_PyFloat_AsDouble(values[0]); if (unlikely((__pyx_v_timeout == (float)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 252; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    } else {
+      __pyx_v_timeout = ((float)-1.0);
+    }
+    if (values[1]) {
+      __pyx_v_maxevents = __Pyx_PyInt_AsUnsignedInt(values[1]); if (unlikely((__pyx_v_maxevents == (unsigned int)-1) && PyErr_Occurred())) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 252; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+    } else {
+      __pyx_v_maxevents = ((unsigned int)1024);
+    }
+  }
+  goto __pyx_L4_argument_unpacking_done;
+  __pyx_L5_argtuple_error:;
+  __Pyx_RaiseArgtupleInvalid("poll", 0, 0, 2, PyTuple_GET_SIZE(__pyx_args)); {__pyx_filename = __pyx_f[0]; __pyx_lineno = 252; __pyx_clineno = __LINE__; goto __pyx_L3_error;}
+  __pyx_L3_error:;
+  __Pyx_AddTraceback("twisted.python._epoll.epoll.poll", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __Pyx_RefNannyFinishContext();
+  return NULL;
+  __pyx_L4_argument_unpacking_done:;
+
+  /* "twisted/python/_epoll.pyx":267
+ *         @raise IOError: Raised if the underlying epoll_wait() call fails.
+ *         """
+ *         return call_epoll_wait(self.fd, maxevents, <int>(timeout * 1000.0))             # <<<<<<<<<<<<<<
+ * 
+ * 
+ */
+  __Pyx_XDECREF(__pyx_r);
+  __pyx_t_1 = __pyx_f_7twisted_6python_6_epoll_call_epoll_wait(((struct __pyx_obj_7twisted_6python_6_epoll_epoll *)__pyx_v_self)->fd, __pyx_v_maxevents, ((int)(__pyx_v_timeout * 1000.0))); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 267; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  __pyx_r = __pyx_t_1;
+  __pyx_t_1 = 0;
+  goto __pyx_L0;
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_1);
+  __Pyx_AddTraceback("twisted.python._epoll.epoll.poll", __pyx_clineno, __pyx_lineno, __pyx_filename);
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+static PyObject *__pyx_tp_new_7twisted_6python_6_epoll_epoll(PyTypeObject *t, PyObject *a, PyObject *k) {
+  PyObject *o = (*t->tp_alloc)(t, 0);
+  if (!o) return 0;
+  return o;
+}
+
+static void __pyx_tp_dealloc_7twisted_6python_6_epoll_epoll(PyObject *o) {
+  {
+    PyObject *etype, *eval, *etb;
+    PyErr_Fetch(&etype, &eval, &etb);
+    ++Py_REFCNT(o);
+    __pyx_pf_7twisted_6python_6_epoll_5epoll_1__dealloc__(o);
+    if (PyErr_Occurred()) PyErr_WriteUnraisable(o);
+    --Py_REFCNT(o);
+    PyErr_Restore(etype, eval, etb);
+  }
+  (*Py_TYPE(o)->tp_free)(o);
+}
+
+static PyMethodDef __pyx_methods_7twisted_6python_6_epoll_epoll[] = {
+  {__Pyx_NAMESTR("close"), (PyCFunction)__pyx_pf_7twisted_6python_6_epoll_5epoll_2close, METH_NOARGS, __Pyx_DOCSTR(__pyx_doc_7twisted_6python_6_epoll_5epoll_2close)},
+  {__Pyx_NAMESTR("fileno"), (PyCFunction)__pyx_pf_7twisted_6python_6_epoll_5epoll_3fileno, METH_NOARGS, __Pyx_DOCSTR(__pyx_doc_7twisted_6python_6_epoll_5epoll_3fileno)},
+  {__Pyx_NAMESTR("register"), (PyCFunction)__pyx_pf_7twisted_6python_6_epoll_5epoll_4register, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(__pyx_doc_7twisted_6python_6_epoll_5epoll_4register)},
+  {__Pyx_NAMESTR("unregister"), (PyCFunction)__pyx_pf_7twisted_6python_6_epoll_5epoll_5unregister, METH_O, __Pyx_DOCSTR(__pyx_doc_7twisted_6python_6_epoll_5epoll_5unregister)},
+  {__Pyx_NAMESTR("modify"), (PyCFunction)__pyx_pf_7twisted_6python_6_epoll_5epoll_6modify, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(__pyx_doc_7twisted_6python_6_epoll_5epoll_6modify)},
+  {__Pyx_NAMESTR("_control"), (PyCFunction)__pyx_pf_7twisted_6python_6_epoll_5epoll_7_control, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(__pyx_doc_7twisted_6python_6_epoll_5epoll_7_control)},
+  {__Pyx_NAMESTR("wait"), (PyCFunction)__pyx_pf_7twisted_6python_6_epoll_5epoll_8wait, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(__pyx_doc_7twisted_6python_6_epoll_5epoll_8wait)},
+  {__Pyx_NAMESTR("poll"), (PyCFunction)__pyx_pf_7twisted_6python_6_epoll_5epoll_9poll, METH_VARARGS|METH_KEYWORDS, __Pyx_DOCSTR(__pyx_doc_7twisted_6python_6_epoll_5epoll_9poll)},
+  {0, 0, 0, 0}
+};
+
+static PyNumberMethods __pyx_tp_as_number_epoll = {
+  0, /*nb_add*/
+  0, /*nb_subtract*/
+  0, /*nb_multiply*/
+  #if PY_MAJOR_VERSION < 3
+  0, /*nb_divide*/
+  #endif
+  0, /*nb_remainder*/
+  0, /*nb_divmod*/
+  0, /*nb_power*/
+  0, /*nb_negative*/
+  0, /*nb_positive*/
+  0, /*nb_absolute*/
+  0, /*nb_nonzero*/
+  0, /*nb_invert*/
+  0, /*nb_lshift*/
+  0, /*nb_rshift*/
+  0, /*nb_and*/
+  0, /*nb_xor*/
+  0, /*nb_or*/
+  #if PY_MAJOR_VERSION < 3
+  0, /*nb_coerce*/
+  #endif
+  0, /*nb_int*/
+  #if PY_MAJOR_VERSION < 3
+  0, /*nb_long*/
+  #else
+  0, /*reserved*/
+  #endif
+  0, /*nb_float*/
+  #if PY_MAJOR_VERSION < 3
+  0, /*nb_oct*/
+  #endif
+  #if PY_MAJOR_VERSION < 3
+  0, /*nb_hex*/
+  #endif
+  0, /*nb_inplace_add*/
+  0, /*nb_inplace_subtract*/
+  0, /*nb_inplace_multiply*/
+  #if PY_MAJOR_VERSION < 3
+  0, /*nb_inplace_divide*/
+  #endif
+  0, /*nb_inplace_remainder*/
+  0, /*nb_inplace_power*/
+  0, /*nb_inplace_lshift*/
+  0, /*nb_inplace_rshift*/
+  0, /*nb_inplace_and*/
+  0, /*nb_inplace_xor*/
+  0, /*nb_inplace_or*/
+  0, /*nb_floor_divide*/
+  0, /*nb_true_divide*/
+  0, /*nb_inplace_floor_divide*/
+  0, /*nb_inplace_true_divide*/
+  #if PY_VERSION_HEX >= 0x02050000
+  0, /*nb_index*/
+  #endif
+};
+
+static PySequenceMethods __pyx_tp_as_sequence_epoll = {
+  0, /*sq_length*/
+  0, /*sq_concat*/
+  0, /*sq_repeat*/
+  0, /*sq_item*/
+  0, /*sq_slice*/
+  0, /*sq_ass_item*/
+  0, /*sq_ass_slice*/
+  0, /*sq_contains*/
+  0, /*sq_inplace_concat*/
+  0, /*sq_inplace_repeat*/
+};
+
+static PyMappingMethods __pyx_tp_as_mapping_epoll = {
+  0, /*mp_length*/
+  0, /*mp_subscript*/
+  0, /*mp_ass_subscript*/
+};
+
+static PyBufferProcs __pyx_tp_as_buffer_epoll = {
+  #if PY_MAJOR_VERSION < 3
+  0, /*bf_getreadbuffer*/
+  #endif
+  #if PY_MAJOR_VERSION < 3
+  0, /*bf_getwritebuffer*/
+  #endif
+  #if PY_MAJOR_VERSION < 3
+  0, /*bf_getsegcount*/
+  #endif
+  #if PY_MAJOR_VERSION < 3
+  0, /*bf_getcharbuffer*/
+  #endif
+  #if PY_VERSION_HEX >= 0x02060000
+  0, /*bf_getbuffer*/
+  #endif
+  #if PY_VERSION_HEX >= 0x02060000
+  0, /*bf_releasebuffer*/
+  #endif
+};
+
+static PyTypeObject __pyx_type_7twisted_6python_6_epoll_epoll = {
+  PyVarObject_HEAD_INIT(0, 0)
+  __Pyx_NAMESTR("twisted.python._epoll.epoll"), /*tp_name*/
+  sizeof(struct __pyx_obj_7twisted_6python_6_epoll_epoll), /*tp_basicsize*/
+  0, /*tp_itemsize*/
+  __pyx_tp_dealloc_7twisted_6python_6_epoll_epoll, /*tp_dealloc*/
+  0, /*tp_print*/
+  0, /*tp_getattr*/
+  0, /*tp_setattr*/
+  #if PY_MAJOR_VERSION < 3
+  0, /*tp_compare*/
+  #else
+  0, /*reserved*/
+  #endif
+  0, /*tp_repr*/
+  &__pyx_tp_as_number_epoll, /*tp_as_number*/
+  &__pyx_tp_as_sequence_epoll, /*tp_as_sequence*/
+  &__pyx_tp_as_mapping_epoll, /*tp_as_mapping*/
+  0, /*tp_hash*/
+  0, /*tp_call*/
+  0, /*tp_str*/
+  0, /*tp_getattro*/
+  0, /*tp_setattro*/
+  &__pyx_tp_as_buffer_epoll, /*tp_as_buffer*/
+  Py_TPFLAGS_DEFAULT|Py_TPFLAGS_CHECKTYPES|Py_TPFLAGS_HAVE_NEWBUFFER|Py_TPFLAGS_BASETYPE, /*tp_flags*/
+  __Pyx_DOCSTR("\n    Represent a set of file descriptors being monitored for events.\n    "), /*tp_doc*/
+  0, /*tp_traverse*/
+  0, /*tp_clear*/
+  0, /*tp_richcompare*/
+  0, /*tp_weaklistoffset*/
+  0, /*tp_iter*/
+  0, /*tp_iternext*/
+  __pyx_methods_7twisted_6python_6_epoll_epoll, /*tp_methods*/
+  0, /*tp_members*/
+  0, /*tp_getset*/
+  0, /*tp_base*/
+  0, /*tp_dict*/
+  0, /*tp_descr_get*/
+  0, /*tp_descr_set*/
+  0, /*tp_dictoffset*/
+  __pyx_pf_7twisted_6python_6_epoll_5epoll___init__, /*tp_init*/
+  0, /*tp_alloc*/
+  __pyx_tp_new_7twisted_6python_6_epoll_epoll, /*tp_new*/
+  0, /*tp_free*/
+  0, /*tp_is_gc*/
+  0, /*tp_bases*/
+  0, /*tp_mro*/
+  0, /*tp_cache*/
+  0, /*tp_subclasses*/
+  0, /*tp_weaklist*/
+  0, /*tp_del*/
+  #if PY_VERSION_HEX >= 0x02060000
+  0, /*tp_version_tag*/
+  #endif
+};
+
+static PyMethodDef __pyx_methods[] = {
+  {0, 0, 0, 0}
+};
+
+#if PY_MAJOR_VERSION >= 3
+static struct PyModuleDef __pyx_moduledef = {
+    PyModuleDef_HEAD_INIT,
+    __Pyx_NAMESTR("_epoll"),
+    __Pyx_DOCSTR(__pyx_k_1), /* m_doc */
+    -1, /* m_size */
+    __pyx_methods /* m_methods */,
+    NULL, /* m_reload */
+    NULL, /* m_traverse */
+    NULL, /* m_clear */
+    NULL /* m_free */
+};
+#endif
+
+static __Pyx_StringTabEntry __pyx_string_tab[] = {
+  {&__pyx_n_s__CTL_ADD, __pyx_k__CTL_ADD, sizeof(__pyx_k__CTL_ADD), 0, 0, 1, 1},
+  {&__pyx_n_s__CTL_DEL, __pyx_k__CTL_DEL, sizeof(__pyx_k__CTL_DEL), 0, 0, 1, 1},
+  {&__pyx_n_s__CTL_MOD, __pyx_k__CTL_MOD, sizeof(__pyx_k__CTL_MOD), 0, 0, 1, 1},
+  {&__pyx_n_s__EPOLLERR, __pyx_k__EPOLLERR, sizeof(__pyx_k__EPOLLERR), 0, 0, 1, 1},
+  {&__pyx_n_s__EPOLLET, __pyx_k__EPOLLET, sizeof(__pyx_k__EPOLLET), 0, 0, 1, 1},
+  {&__pyx_n_s__EPOLLHUP, __pyx_k__EPOLLHUP, sizeof(__pyx_k__EPOLLHUP), 0, 0, 1, 1},
+  {&__pyx_n_s__EPOLLIN, __pyx_k__EPOLLIN, sizeof(__pyx_k__EPOLLIN), 0, 0, 1, 1},
+  {&__pyx_n_s__EPOLLMSG, __pyx_k__EPOLLMSG, sizeof(__pyx_k__EPOLLMSG), 0, 0, 1, 1},
+  {&__pyx_n_s__EPOLLOUT, __pyx_k__EPOLLOUT, sizeof(__pyx_k__EPOLLOUT), 0, 0, 1, 1},
+  {&__pyx_n_s__EPOLLPRI, __pyx_k__EPOLLPRI, sizeof(__pyx_k__EPOLLPRI), 0, 0, 1, 1},
+  {&__pyx_n_s__EPOLLRDBAND, __pyx_k__EPOLLRDBAND, sizeof(__pyx_k__EPOLLRDBAND), 0, 0, 1, 1},
+  {&__pyx_n_s__EPOLLRDNORM, __pyx_k__EPOLLRDNORM, sizeof(__pyx_k__EPOLLRDNORM), 0, 0, 1, 1},
+  {&__pyx_n_s__EPOLLWRBAND, __pyx_k__EPOLLWRBAND, sizeof(__pyx_k__EPOLLWRBAND), 0, 0, 1, 1},
+  {&__pyx_n_s__EPOLLWRNORM, __pyx_k__EPOLLWRNORM, sizeof(__pyx_k__EPOLLWRNORM), 0, 0, 1, 1},
+  {&__pyx_n_s__ERR, __pyx_k__ERR, sizeof(__pyx_k__ERR), 0, 0, 1, 1},
+  {&__pyx_n_s__ET, __pyx_k__ET, sizeof(__pyx_k__ET), 0, 0, 1, 1},
+  {&__pyx_n_s__HUP, __pyx_k__HUP, sizeof(__pyx_k__HUP), 0, 0, 1, 1},
+  {&__pyx_n_s__IN, __pyx_k__IN, sizeof(__pyx_k__IN), 0, 0, 1, 1},
+  {&__pyx_n_s__IOError, __pyx_k__IOError, sizeof(__pyx_k__IOError), 0, 0, 1, 1},
+  {&__pyx_n_s__MSG, __pyx_k__MSG, sizeof(__pyx_k__MSG), 0, 0, 1, 1},
+  {&__pyx_n_s__OUT, __pyx_k__OUT, sizeof(__pyx_k__OUT), 0, 0, 1, 1},
+  {&__pyx_n_s__PRI, __pyx_k__PRI, sizeof(__pyx_k__PRI), 0, 0, 1, 1},
+  {&__pyx_n_s__RDBAND, __pyx_k__RDBAND, sizeof(__pyx_k__RDBAND), 0, 0, 1, 1},
+  {&__pyx_n_s__RDNORM, __pyx_k__RDNORM, sizeof(__pyx_k__RDNORM), 0, 0, 1, 1},
+  {&__pyx_n_s__WRBAND, __pyx_k__WRBAND, sizeof(__pyx_k__WRBAND), 0, 0, 1, 1},
+  {&__pyx_n_s__WRNORM, __pyx_k__WRNORM, sizeof(__pyx_k__WRNORM), 0, 0, 1, 1},
+  {&__pyx_n_s____main__, __pyx_k____main__, sizeof(__pyx_k____main__), 0, 0, 1, 1},
+  {&__pyx_n_s____test__, __pyx_k____test__, sizeof(__pyx_k____test__), 0, 0, 1, 1},
+  {&__pyx_n_s__events, __pyx_k__events, sizeof(__pyx_k__events), 0, 0, 1, 1},
+  {&__pyx_n_s__fd, __pyx_k__fd, sizeof(__pyx_k__fd), 0, 0, 1, 1},
+  {&__pyx_n_s__maxevents, __pyx_k__maxevents, sizeof(__pyx_k__maxevents), 0, 0, 1, 1},
+  {&__pyx_n_s__op, __pyx_k__op, sizeof(__pyx_k__op), 0, 0, 1, 1},
+  {&__pyx_n_s__size, __pyx_k__size, sizeof(__pyx_k__size), 0, 0, 1, 1},
+  {&__pyx_n_s__timeout, __pyx_k__timeout, sizeof(__pyx_k__timeout), 0, 0, 1, 1},
+  {0, 0, 0, 0, 0, 0, 0}
+};
+static int __Pyx_InitCachedBuiltins(void) {
+  __pyx_builtin_IOError = __Pyx_GetName(__pyx_b, __pyx_n_s__IOError); if (!__pyx_builtin_IOError) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 98; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  return 0;
+  __pyx_L1_error:;
+  return -1;
+}
+
+static int __Pyx_InitCachedConstants(void) {
+  __Pyx_RefNannyDeclarations
+  __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants");
+  __Pyx_RefNannyFinishContext();
+  return 0;
+}
+
+static int __Pyx_InitGlobals(void) {
+  if (__Pyx_InitStrings(__pyx_string_tab) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+  return 0;
+  __pyx_L1_error:;
+  return -1;
+}
+
+#if PY_MAJOR_VERSION < 3
+PyMODINIT_FUNC init_epoll(void); /*proto*/
+PyMODINIT_FUNC init_epoll(void)
+#else
+PyMODINIT_FUNC PyInit__epoll(void); /*proto*/
+PyMODINIT_FUNC PyInit__epoll(void)
+#endif
+{
+  PyObject *__pyx_t_1 = NULL;
+  __Pyx_RefNannyDeclarations
+  #if CYTHON_REFNANNY
+  __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny");
+  if (!__Pyx_RefNanny) {
+      PyErr_Clear();
+      __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny");
+      if (!__Pyx_RefNanny)
+          Py_FatalError("failed to import 'refnanny' module");
+  }
+  #endif
+  __Pyx_RefNannySetupContext("PyMODINIT_FUNC PyInit__epoll(void)");
+  if ( __Pyx_check_binary_version() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  #ifdef __pyx_binding_PyCFunctionType_USED
+  if (__pyx_binding_PyCFunctionType_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  #endif
+  /*--- Library function declarations ---*/
+  /*--- Threads initialization code ---*/
+  #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS
+  #ifdef WITH_THREAD /* Python build with threading support? */
+  PyEval_InitThreads();
+  #endif
+  #endif
+  /*--- Module creation code ---*/
+  #if PY_MAJOR_VERSION < 3
+  __pyx_m = Py_InitModule4(__Pyx_NAMESTR("_epoll"), __pyx_methods, __Pyx_DOCSTR(__pyx_k_1), 0, PYTHON_API_VERSION);
+  #else
+  __pyx_m = PyModule_Create(&__pyx_moduledef);
+  #endif
+  if (!__pyx_m) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+  #if PY_MAJOR_VERSION < 3
+  Py_INCREF(__pyx_m);
+  #endif
+  __pyx_b = PyImport_AddModule(__Pyx_NAMESTR(__Pyx_BUILTIN_MODULE_NAME));
+  if (!__pyx_b) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+  if (__Pyx_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+  /*--- Initialize various global constants etc. ---*/
+  if (unlikely(__Pyx_InitGlobals() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  if (__pyx_module_is_main_twisted__python___epoll) {
+    if (__Pyx_SetAttrString(__pyx_m, "__name__", __pyx_n_s____main__) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+  }
+  /*--- Builtin init code ---*/
+  if (unlikely(__Pyx_InitCachedBuiltins() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  /*--- Constants init code ---*/
+  if (unlikely(__Pyx_InitCachedConstants() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  /*--- Global init code ---*/
+  /*--- Variable export code ---*/
+  /*--- Function export code ---*/
+  /*--- Type init code ---*/
+  if (PyType_Ready(&__pyx_type_7twisted_6python_6_epoll_epoll) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 106; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  {
+    PyObject *wrapper = __Pyx_GetAttrString((PyObject *)&__pyx_type_7twisted_6python_6_epoll_epoll, "__init__"); if (unlikely(!wrapper)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 106; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+    if (Py_TYPE(wrapper) == &PyWrapperDescr_Type) {
+      __pyx_wrapperbase_7twisted_6python_6_epoll_5epoll___init__ = *((PyWrapperDescrObject *)wrapper)->d_base;
+      __pyx_wrapperbase_7twisted_6python_6_epoll_5epoll___init__.doc = __pyx_doc_7twisted_6python_6_epoll_5epoll___init__;
+      ((PyWrapperDescrObject *)wrapper)->d_base = &__pyx_wrapperbase_7twisted_6python_6_epoll_5epoll___init__;
+    }
+  }
+  if (__Pyx_SetAttrString(__pyx_m, "epoll", (PyObject *)&__pyx_type_7twisted_6python_6_epoll_epoll) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 106; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_ptype_7twisted_6python_6_epoll_epoll = &__pyx_type_7twisted_6python_6_epoll_epoll;
+  /*--- Type import code ---*/
+  /*--- Variable import code ---*/
+  /*--- Function import code ---*/
+  /*--- Execution code ---*/
+
+  /* "twisted/python/_epoll.pyx":270
+ * 
+ * 
+ * CTL_ADD = EPOLL_CTL_ADD             # <<<<<<<<<<<<<<
+ * CTL_DEL = EPOLL_CTL_DEL
+ * CTL_MOD = EPOLL_CTL_MOD
+ */
+  __pyx_t_1 = PyInt_FromLong(EPOLL_CTL_ADD); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 270; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__CTL_ADD, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 270; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "twisted/python/_epoll.pyx":271
+ * 
+ * CTL_ADD = EPOLL_CTL_ADD
+ * CTL_DEL = EPOLL_CTL_DEL             # <<<<<<<<<<<<<<
+ * CTL_MOD = EPOLL_CTL_MOD
+ * 
+ */
+  __pyx_t_1 = PyInt_FromLong(EPOLL_CTL_DEL); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 271; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__CTL_DEL, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 271; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "twisted/python/_epoll.pyx":272
+ * CTL_ADD = EPOLL_CTL_ADD
+ * CTL_DEL = EPOLL_CTL_DEL
+ * CTL_MOD = EPOLL_CTL_MOD             # <<<<<<<<<<<<<<
+ * 
+ * IN = EPOLLIN = c_EPOLLIN
+ */
+  __pyx_t_1 = PyInt_FromLong(EPOLL_CTL_MOD); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 272; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__CTL_MOD, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 272; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "twisted/python/_epoll.pyx":274
+ * CTL_MOD = EPOLL_CTL_MOD
+ * 
+ * IN = EPOLLIN = c_EPOLLIN             # <<<<<<<<<<<<<<
+ * OUT = EPOLLOUT = c_EPOLLOUT
+ * PRI = EPOLLPRI = c_EPOLLPRI
+ */
+  __pyx_t_1 = PyInt_FromLong(EPOLLIN); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 274; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__IN, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 274; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+  __pyx_t_1 = PyInt_FromLong(EPOLLIN); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 274; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__EPOLLIN, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 274; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "twisted/python/_epoll.pyx":275
+ * 
+ * IN = EPOLLIN = c_EPOLLIN
+ * OUT = EPOLLOUT = c_EPOLLOUT             # <<<<<<<<<<<<<<
+ * PRI = EPOLLPRI = c_EPOLLPRI
+ * ERR = EPOLLERR = c_EPOLLERR
+ */
+  __pyx_t_1 = PyInt_FromLong(EPOLLOUT); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 275; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__OUT, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 275; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+  __pyx_t_1 = PyInt_FromLong(EPOLLOUT); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 275; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__EPOLLOUT, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 275; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "twisted/python/_epoll.pyx":276
+ * IN = EPOLLIN = c_EPOLLIN
+ * OUT = EPOLLOUT = c_EPOLLOUT
+ * PRI = EPOLLPRI = c_EPOLLPRI             # <<<<<<<<<<<<<<
+ * ERR = EPOLLERR = c_EPOLLERR
+ * HUP = EPOLLHUP = c_EPOLLHUP
+ */
+  __pyx_t_1 = PyInt_FromLong(EPOLLPRI); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__PRI, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+  __pyx_t_1 = PyInt_FromLong(EPOLLPRI); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__EPOLLPRI, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 276; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "twisted/python/_epoll.pyx":277
+ * OUT = EPOLLOUT = c_EPOLLOUT
+ * PRI = EPOLLPRI = c_EPOLLPRI
+ * ERR = EPOLLERR = c_EPOLLERR             # <<<<<<<<<<<<<<
+ * HUP = EPOLLHUP = c_EPOLLHUP
+ * ET = EPOLLET = c_EPOLLET
+ */
+  __pyx_t_1 = PyInt_FromLong(EPOLLERR); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 277; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__ERR, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 277; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+  __pyx_t_1 = PyInt_FromLong(EPOLLERR); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 277; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__EPOLLERR, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 277; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "twisted/python/_epoll.pyx":278
+ * PRI = EPOLLPRI = c_EPOLLPRI
+ * ERR = EPOLLERR = c_EPOLLERR
+ * HUP = EPOLLHUP = c_EPOLLHUP             # <<<<<<<<<<<<<<
+ * ET = EPOLLET = c_EPOLLET
+ * 
+ */
+  __pyx_t_1 = PyInt_FromLong(EPOLLHUP); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 278; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__HUP, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 278; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+  __pyx_t_1 = PyInt_FromLong(EPOLLHUP); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 278; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__EPOLLHUP, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 278; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "twisted/python/_epoll.pyx":279
+ * ERR = EPOLLERR = c_EPOLLERR
+ * HUP = EPOLLHUP = c_EPOLLHUP
+ * ET = EPOLLET = c_EPOLLET             # <<<<<<<<<<<<<<
+ * 
+ * RDNORM = EPOLLRDNORM = c_EPOLLRDNORM
+ */
+  __pyx_t_1 = PyInt_FromLong(EPOLLET); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 279; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__ET, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 279; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+  __pyx_t_1 = PyInt_FromLong(EPOLLET); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 279; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__EPOLLET, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 279; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "twisted/python/_epoll.pyx":281
+ * ET = EPOLLET = c_EPOLLET
+ * 
+ * RDNORM = EPOLLRDNORM = c_EPOLLRDNORM             # <<<<<<<<<<<<<<
+ * RDBAND = EPOLLRDBAND = c_EPOLLRDBAND
+ * WRNORM = EPOLLWRNORM = c_EPOLLWRNORM
+ */
+  __pyx_t_1 = PyInt_FromLong(EPOLLRDNORM); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 281; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__RDNORM, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 281; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+  __pyx_t_1 = PyInt_FromLong(EPOLLRDNORM); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 281; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__EPOLLRDNORM, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 281; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "twisted/python/_epoll.pyx":282
+ * 
+ * RDNORM = EPOLLRDNORM = c_EPOLLRDNORM
+ * RDBAND = EPOLLRDBAND = c_EPOLLRDBAND             # <<<<<<<<<<<<<<
+ * WRNORM = EPOLLWRNORM = c_EPOLLWRNORM
+ * WRBAND = EPOLLWRBAND = c_EPOLLWRBAND
+ */
+  __pyx_t_1 = PyInt_FromLong(EPOLLRDBAND); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 282; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__RDBAND, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 282; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+  __pyx_t_1 = PyInt_FromLong(EPOLLRDBAND); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 282; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__EPOLLRDBAND, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 282; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "twisted/python/_epoll.pyx":283
+ * RDNORM = EPOLLRDNORM = c_EPOLLRDNORM
+ * RDBAND = EPOLLRDBAND = c_EPOLLRDBAND
+ * WRNORM = EPOLLWRNORM = c_EPOLLWRNORM             # <<<<<<<<<<<<<<
+ * WRBAND = EPOLLWRBAND = c_EPOLLWRBAND
+ * MSG = EPOLLMSG = c_EPOLLMSG
+ */
+  __pyx_t_1 = PyInt_FromLong(EPOLLWRNORM); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 283; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__WRNORM, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 283; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+  __pyx_t_1 = PyInt_FromLong(EPOLLWRNORM); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 283; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__EPOLLWRNORM, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 283; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "twisted/python/_epoll.pyx":284
+ * RDBAND = EPOLLRDBAND = c_EPOLLRDBAND
+ * WRNORM = EPOLLWRNORM = c_EPOLLWRNORM
+ * WRBAND = EPOLLWRBAND = c_EPOLLWRBAND             # <<<<<<<<<<<<<<
+ * MSG = EPOLLMSG = c_EPOLLMSG
+ */
+  __pyx_t_1 = PyInt_FromLong(EPOLLWRBAND); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 284; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__WRBAND, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 284; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+  __pyx_t_1 = PyInt_FromLong(EPOLLWRBAND); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 284; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__EPOLLWRBAND, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 284; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "twisted/python/_epoll.pyx":285
+ * WRNORM = EPOLLWRNORM = c_EPOLLWRNORM
+ * WRBAND = EPOLLWRBAND = c_EPOLLWRBAND
+ * MSG = EPOLLMSG = c_EPOLLMSG             # <<<<<<<<<<<<<<
+ */
+  __pyx_t_1 = PyInt_FromLong(EPOLLMSG); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 285; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__MSG, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 285; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+  __pyx_t_1 = PyInt_FromLong(EPOLLMSG); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 285; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__EPOLLMSG, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 285; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "twisted/python/_epoll.pyx":1
+ * # Copyright (c) Twisted Matrix Laboratories.             # <<<<<<<<<<<<<<
+ * # See LICENSE for details.
+ * 
+ */
+  __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(((PyObject *)__pyx_t_1));
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s____test__, ((PyObject *)__pyx_t_1)) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(((PyObject *)__pyx_t_1)); __pyx_t_1 = 0;
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_1);
+  if (__pyx_m) {
+    __Pyx_AddTraceback("init twisted.python._epoll", __pyx_clineno, __pyx_lineno, __pyx_filename);
+    Py_DECREF(__pyx_m); __pyx_m = 0;
+  } else if (!PyErr_Occurred()) {
+    PyErr_SetString(PyExc_ImportError, "init twisted.python._epoll");
+  }
+  __pyx_L0:;
+  __Pyx_RefNannyFinishContext();
+  #if PY_MAJOR_VERSION < 3
+  return;
+  #else
+  return __pyx_m;
+  #endif
+}
+
+/* Runtime support code */
+
+#if CYTHON_REFNANNY
+static __Pyx_RefNannyAPIStruct *__Pyx_RefNannyImportAPI(const char *modname) {
+    PyObject *m = NULL, *p = NULL;
+    void *r = NULL;
+    m = PyImport_ImportModule((char *)modname);
+    if (!m) goto end;
+    p = PyObject_GetAttrString(m, (char *)"RefNannyAPI");
+    if (!p) goto end;
+    r = PyLong_AsVoidPtr(p);
+end:
+    Py_XDECREF(p);
+    Py_XDECREF(m);
+    return (__Pyx_RefNannyAPIStruct *)r;
+}
+#endif /* CYTHON_REFNANNY */
+
+static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name) {
+    PyObject *result;
+    result = PyObject_GetAttr(dict, name);
+    if (!result) {
+        if (dict != __pyx_b) {
+            PyErr_Clear();
+            result = PyObject_GetAttr(__pyx_b, name);
+        }
+        if (!result) {
+            PyErr_SetObject(PyExc_NameError, name);
+        }
+    }
+    return result;
+}
+
+static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb) {
+    PyObject *tmp_type, *tmp_value, *tmp_tb;
+    PyThreadState *tstate = PyThreadState_GET();
+
+    tmp_type = tstate->curexc_type;
+    tmp_value = tstate->curexc_value;
+    tmp_tb = tstate->curexc_traceback;
+    tstate->curexc_type = type;
+    tstate->curexc_value = value;
+    tstate->curexc_traceback = tb;
+    Py_XDECREF(tmp_type);
+    Py_XDECREF(tmp_value);
+    Py_XDECREF(tmp_tb);
+}
+
+static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb) {
+    PyThreadState *tstate = PyThreadState_GET();
+    *type = tstate->curexc_type;
+    *value = tstate->curexc_value;
+    *tb = tstate->curexc_traceback;
+
+    tstate->curexc_type = 0;
+    tstate->curexc_value = 0;
+    tstate->curexc_traceback = 0;
+}
+
+
+#if PY_MAJOR_VERSION < 3
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) {
+    /* cause is unused */
+    Py_XINCREF(type);
+    Py_XINCREF(value);
+    Py_XINCREF(tb);
+    /* First, check the traceback argument, replacing None with NULL. */
+    if (tb == Py_None) {
+        Py_DECREF(tb);
+        tb = 0;
+    }
+    else if (tb != NULL && !PyTraceBack_Check(tb)) {
+        PyErr_SetString(PyExc_TypeError,
+            "raise: arg 3 must be a traceback or None");
+        goto raise_error;
+    }
+    /* Next, replace a missing value with None */
+    if (value == NULL) {
+        value = Py_None;
+        Py_INCREF(value);
+    }
+    #if PY_VERSION_HEX < 0x02050000
+    if (!PyClass_Check(type))
+    #else
+    if (!PyType_Check(type))
+    #endif
+    {
+        /* Raising an instance.  The value should be a dummy. */
+        if (value != Py_None) {
+            PyErr_SetString(PyExc_TypeError,
+                "instance exception may not have a separate value");
+            goto raise_error;
+        }
+        /* Normalize to raise <class>, <instance> */
+        Py_DECREF(value);
+        value = type;
+        #if PY_VERSION_HEX < 0x02050000
+            if (PyInstance_Check(type)) {
+                type = (PyObject*) ((PyInstanceObject*)type)->in_class;
+                Py_INCREF(type);
+            }
+            else {
+                type = 0;
+                PyErr_SetString(PyExc_TypeError,
+                    "raise: exception must be an old-style class or instance");
+                goto raise_error;
+            }
+        #else
+            type = (PyObject*) Py_TYPE(type);
+            Py_INCREF(type);
+            if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) {
+                PyErr_SetString(PyExc_TypeError,
+                    "raise: exception class must be a subclass of BaseException");
+                goto raise_error;
+            }
+        #endif
+    }
+
+    __Pyx_ErrRestore(type, value, tb);
+    return;
+raise_error:
+    Py_XDECREF(value);
+    Py_XDECREF(type);
+    Py_XDECREF(tb);
+    return;
+}
+
+#else /* Python 3+ */
+
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb, PyObject *cause) {
+    if (tb == Py_None) {
+        tb = 0;
+    } else if (tb && !PyTraceBack_Check(tb)) {
+        PyErr_SetString(PyExc_TypeError,
+            "raise: arg 3 must be a traceback or None");
+        goto bad;
+    }
+    if (value == Py_None)
+        value = 0;
+
+    if (PyExceptionInstance_Check(type)) {
+        if (value) {
+            PyErr_SetString(PyExc_TypeError,
+                "instance exception may not have a separate value");
+            goto bad;
+        }
+        value = type;
+        type = (PyObject*) Py_TYPE(value);
+    } else if (!PyExceptionClass_Check(type)) {
+        PyErr_SetString(PyExc_TypeError,
+            "raise: exception class must be a subclass of BaseException");
+        goto bad;
+    }
+
+    if (cause) {
+        PyObject *fixed_cause;
+        if (PyExceptionClass_Check(cause)) {
+            fixed_cause = PyObject_CallObject(cause, NULL);
+            if (fixed_cause == NULL)
+                goto bad;
+        }
+        else if (PyExceptionInstance_Check(cause)) {
+            fixed_cause = cause;
+            Py_INCREF(fixed_cause);
+        }
+        else {
+            PyErr_SetString(PyExc_TypeError,
+                            "exception causes must derive from "
+                            "BaseException");
+            goto bad;
+        }
+        if (!value) {
+            value = PyObject_CallObject(type, NULL);
+        }
+        PyException_SetCause(value, fixed_cause);
+    }
+
+    PyErr_SetObject(type, value);
+
+    if (tb) {
+        PyThreadState *tstate = PyThreadState_GET();
+        PyObject* tmp_tb = tstate->curexc_traceback;
+        if (tb != tmp_tb) {
+            Py_INCREF(tb);
+            tstate->curexc_traceback = tb;
+            Py_XDECREF(tmp_tb);
+        }
+    }
+
+bad:
+    return;
+}
+#endif
+
+static void __Pyx_RaiseDoubleKeywordsError(
+    const char* func_name,
+    PyObject* kw_name)
+{
+    PyErr_Format(PyExc_TypeError,
+        #if PY_MAJOR_VERSION >= 3
+        "%s() got multiple values for keyword argument '%U'", func_name, kw_name);
+        #else
+        "%s() got multiple values for keyword argument '%s'", func_name,
+        PyString_AS_STRING(kw_name));
+        #endif
+}
+
+static int __Pyx_ParseOptionalKeywords(
+    PyObject *kwds,
+    PyObject **argnames[],
+    PyObject *kwds2,
+    PyObject *values[],
+    Py_ssize_t num_pos_args,
+    const char* function_name)
+{
+    PyObject *key = 0, *value = 0;
+    Py_ssize_t pos = 0;
+    PyObject*** name;
+    PyObject*** first_kw_arg = argnames + num_pos_args;
+
+    while (PyDict_Next(kwds, &pos, &key, &value)) {
+        name = first_kw_arg;
+        while (*name && (**name != key)) name++;
+        if (*name) {
+            values[name-argnames] = value;
+        } else {
+            #if PY_MAJOR_VERSION < 3
+            if (unlikely(!PyString_CheckExact(key)) && unlikely(!PyString_Check(key))) {
+            #else
+            if (unlikely(!PyUnicode_CheckExact(key)) && unlikely(!PyUnicode_Check(key))) {
+            #endif
+                goto invalid_keyword_type;
+            } else {
+                for (name = first_kw_arg; *name; name++) {
+                    #if PY_MAJOR_VERSION >= 3
+                    if (PyUnicode_GET_SIZE(**name) == PyUnicode_GET_SIZE(key) &&
+                        PyUnicode_Compare(**name, key) == 0) break;
+                    #else
+                    if (PyString_GET_SIZE(**name) == PyString_GET_SIZE(key) &&
+                        _PyString_Eq(**name, key)) break;
+                    #endif
+                }
+                if (*name) {
+                    values[name-argnames] = value;
+                } else {
+                    /* unexpected keyword found */
+                    for (name=argnames; name != first_kw_arg; name++) {
+                        if (**name == key) goto arg_passed_twice;
+                        #if PY_MAJOR_VERSION >= 3
+                        if (PyUnicode_GET_SIZE(**name) == PyUnicode_GET_SIZE(key) &&
+                            PyUnicode_Compare(**name, key) == 0) goto arg_passed_twice;
+                        #else
+                        if (PyString_GET_SIZE(**name) == PyString_GET_SIZE(key) &&
+                            _PyString_Eq(**name, key)) goto arg_passed_twice;
+                        #endif
+                    }
+                    if (kwds2) {
+                        if (unlikely(PyDict_SetItem(kwds2, key, value))) goto bad;
+                    } else {
+                        goto invalid_keyword;
+                    }
+                }
+            }
+        }
+    }
+    return 0;
+arg_passed_twice:
+    __Pyx_RaiseDoubleKeywordsError(function_name, **name);
+    goto bad;
+invalid_keyword_type:
+    PyErr_Format(PyExc_TypeError,
+        "%s() keywords must be strings", function_name);
+    goto bad;
+invalid_keyword:
+    PyErr_Format(PyExc_TypeError,
+    #if PY_MAJOR_VERSION < 3
+        "%s() got an unexpected keyword argument '%s'",
+        function_name, PyString_AsString(key));
+    #else
+        "%s() got an unexpected keyword argument '%U'",
+        function_name, key);
+    #endif
+bad:
+    return -1;
+}
+
+static void __Pyx_RaiseArgtupleInvalid(
+    const char* func_name,
+    int exact,
+    Py_ssize_t num_min,
+    Py_ssize_t num_max,
+    Py_ssize_t num_found)
+{
+    Py_ssize_t num_expected;
+    const char *more_or_less;
+
+    if (num_found < num_min) {
+        num_expected = num_min;
+        more_or_less = "at least";
+    } else {
+        num_expected = num_max;
+        more_or_less = "at most";
+    }
+    if (exact) {
+        more_or_less = "exactly";
+    }
+    PyErr_Format(PyExc_TypeError,
+                 "%s() takes %s %"PY_FORMAT_SIZE_T"d positional argument%s (%"PY_FORMAT_SIZE_T"d given)",
+                 func_name, more_or_less, num_expected,
+                 (num_expected == 1) ? "" : "s", num_found);
+}
+
+static CYTHON_INLINE unsigned char __Pyx_PyInt_AsUnsignedChar(PyObject* x) {
+    const unsigned char neg_one = (unsigned char)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(unsigned char) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(unsigned char)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to unsigned char" :
+                    "value too large to convert to unsigned char");
+            }
+            return (unsigned char)-1;
+        }
+        return (unsigned char)val;
+    }
+    return (unsigned char)__Pyx_PyInt_AsUnsignedLong(x);
+}
+
+static CYTHON_INLINE unsigned short __Pyx_PyInt_AsUnsignedShort(PyObject* x) {
+    const unsigned short neg_one = (unsigned short)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(unsigned short) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(unsigned short)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to unsigned short" :
+                    "value too large to convert to unsigned short");
+            }
+            return (unsigned short)-1;
+        }
+        return (unsigned short)val;
+    }
+    return (unsigned short)__Pyx_PyInt_AsUnsignedLong(x);
+}
+
+static CYTHON_INLINE unsigned int __Pyx_PyInt_AsUnsignedInt(PyObject* x) {
+    const unsigned int neg_one = (unsigned int)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(unsigned int) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(unsigned int)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to unsigned int" :
+                    "value too large to convert to unsigned int");
+            }
+            return (unsigned int)-1;
+        }
+        return (unsigned int)val;
+    }
+    return (unsigned int)__Pyx_PyInt_AsUnsignedLong(x);
+}
+
+static CYTHON_INLINE char __Pyx_PyInt_AsChar(PyObject* x) {
+    const char neg_one = (char)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(char) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(char)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to char" :
+                    "value too large to convert to char");
+            }
+            return (char)-1;
+        }
+        return (char)val;
+    }
+    return (char)__Pyx_PyInt_AsLong(x);
+}
+
+static CYTHON_INLINE short __Pyx_PyInt_AsShort(PyObject* x) {
+    const short neg_one = (short)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(short) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(short)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to short" :
+                    "value too large to convert to short");
+            }
+            return (short)-1;
+        }
+        return (short)val;
+    }
+    return (short)__Pyx_PyInt_AsLong(x);
+}
+
+static CYTHON_INLINE int __Pyx_PyInt_AsInt(PyObject* x) {
+    const int neg_one = (int)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(int) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(int)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to int" :
+                    "value too large to convert to int");
+            }
+            return (int)-1;
+        }
+        return (int)val;
+    }
+    return (int)__Pyx_PyInt_AsLong(x);
+}
+
+static CYTHON_INLINE signed char __Pyx_PyInt_AsSignedChar(PyObject* x) {
+    const signed char neg_one = (signed char)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(signed char) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(signed char)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to signed char" :
+                    "value too large to convert to signed char");
+            }
+            return (signed char)-1;
+        }
+        return (signed char)val;
+    }
+    return (signed char)__Pyx_PyInt_AsSignedLong(x);
+}
+
+static CYTHON_INLINE signed short __Pyx_PyInt_AsSignedShort(PyObject* x) {
+    const signed short neg_one = (signed short)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(signed short) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(signed short)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to signed short" :
+                    "value too large to convert to signed short");
+            }
+            return (signed short)-1;
+        }
+        return (signed short)val;
+    }
+    return (signed short)__Pyx_PyInt_AsSignedLong(x);
+}
+
+static CYTHON_INLINE signed int __Pyx_PyInt_AsSignedInt(PyObject* x) {
+    const signed int neg_one = (signed int)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(signed int) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(signed int)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to signed int" :
+                    "value too large to convert to signed int");
+            }
+            return (signed int)-1;
+        }
+        return (signed int)val;
+    }
+    return (signed int)__Pyx_PyInt_AsSignedLong(x);
+}
+
+static CYTHON_INLINE int __Pyx_PyInt_AsLongDouble(PyObject* x) {
+    const int neg_one = (int)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(int) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(int)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to int" :
+                    "value too large to convert to int");
+            }
+            return (int)-1;
+        }
+        return (int)val;
+    }
+    return (int)__Pyx_PyInt_AsLong(x);
+}
+
+static CYTHON_INLINE unsigned long __Pyx_PyInt_AsUnsignedLong(PyObject* x) {
+    const unsigned long neg_one = (unsigned long)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+#if PY_VERSION_HEX < 0x03000000
+    if (likely(PyInt_Check(x))) {
+        long val = PyInt_AS_LONG(x);
+        if (is_unsigned && unlikely(val < 0)) {
+            PyErr_SetString(PyExc_OverflowError,
+                            "can't convert negative value to unsigned long");
+            return (unsigned long)-1;
+        }
+        return (unsigned long)val;
+    } else
+#endif
+    if (likely(PyLong_Check(x))) {
+        if (is_unsigned) {
+            if (unlikely(Py_SIZE(x) < 0)) {
+                PyErr_SetString(PyExc_OverflowError,
+                                "can't convert negative value to unsigned long");
+                return (unsigned long)-1;
+            }
+            return (unsigned long)PyLong_AsUnsignedLong(x);
+        } else {
+            return (unsigned long)PyLong_AsLong(x);
+        }
+    } else {
+        unsigned long val;
+        PyObject *tmp = __Pyx_PyNumber_Int(x);
+        if (!tmp) return (unsigned long)-1;
+        val = __Pyx_PyInt_AsUnsignedLong(tmp);
+        Py_DECREF(tmp);
+        return val;
+    }
+}
+
+static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_AsUnsignedLongLong(PyObject* x) {
+    const unsigned PY_LONG_LONG neg_one = (unsigned PY_LONG_LONG)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+#if PY_VERSION_HEX < 0x03000000
+    if (likely(PyInt_Check(x))) {
+        long val = PyInt_AS_LONG(x);
+        if (is_unsigned && unlikely(val < 0)) {
+            PyErr_SetString(PyExc_OverflowError,
+                            "can't convert negative value to unsigned PY_LONG_LONG");
+            return (unsigned PY_LONG_LONG)-1;
+        }
+        return (unsigned PY_LONG_LONG)val;
+    } else
+#endif
+    if (likely(PyLong_Check(x))) {
+        if (is_unsigned) {
+            if (unlikely(Py_SIZE(x) < 0)) {
+                PyErr_SetString(PyExc_OverflowError,
+                                "can't convert negative value to unsigned PY_LONG_LONG");
+                return (unsigned PY_LONG_LONG)-1;
+            }
+            return (unsigned PY_LONG_LONG)PyLong_AsUnsignedLongLong(x);
+        } else {
+            return (unsigned PY_LONG_LONG)PyLong_AsLongLong(x);
+        }
+    } else {
+        unsigned PY_LONG_LONG val;
+        PyObject *tmp = __Pyx_PyNumber_Int(x);
+        if (!tmp) return (unsigned PY_LONG_LONG)-1;
+        val = __Pyx_PyInt_AsUnsignedLongLong(tmp);
+        Py_DECREF(tmp);
+        return val;
+    }
+}
+
+static CYTHON_INLINE long __Pyx_PyInt_AsLong(PyObject* x) {
+    const long neg_one = (long)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+#if PY_VERSION_HEX < 0x03000000
+    if (likely(PyInt_Check(x))) {
+        long val = PyInt_AS_LONG(x);
+        if (is_unsigned && unlikely(val < 0)) {
+            PyErr_SetString(PyExc_OverflowError,
+                            "can't convert negative value to long");
+            return (long)-1;
+        }
+        return (long)val;
+    } else
+#endif
+    if (likely(PyLong_Check(x))) {
+        if (is_unsigned) {
+            if (unlikely(Py_SIZE(x) < 0)) {
+                PyErr_SetString(PyExc_OverflowError,
+                                "can't convert negative value to long");
+                return (long)-1;
+            }
+            return (long)PyLong_AsUnsignedLong(x);
+        } else {
+            return (long)PyLong_AsLong(x);
+        }
+    } else {
+        long val;
+        PyObject *tmp = __Pyx_PyNumber_Int(x);
+        if (!tmp) return (long)-1;
+        val = __Pyx_PyInt_AsLong(tmp);
+        Py_DECREF(tmp);
+        return val;
+    }
+}
+
+static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_AsLongLong(PyObject* x) {
+    const PY_LONG_LONG neg_one = (PY_LONG_LONG)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+#if PY_VERSION_HEX < 0x03000000
+    if (likely(PyInt_Check(x))) {
+        long val = PyInt_AS_LONG(x);
+        if (is_unsigned && unlikely(val < 0)) {
+            PyErr_SetString(PyExc_OverflowError,
+                            "can't convert negative value to PY_LONG_LONG");
+            return (PY_LONG_LONG)-1;
+        }
+        return (PY_LONG_LONG)val;
+    } else
+#endif
+    if (likely(PyLong_Check(x))) {
+        if (is_unsigned) {
+            if (unlikely(Py_SIZE(x) < 0)) {
+                PyErr_SetString(PyExc_OverflowError,
+                                "can't convert negative value to PY_LONG_LONG");
+                return (PY_LONG_LONG)-1;
+            }
+            return (PY_LONG_LONG)PyLong_AsUnsignedLongLong(x);
+        } else {
+            return (PY_LONG_LONG)PyLong_AsLongLong(x);
+        }
+    } else {
+        PY_LONG_LONG val;
+        PyObject *tmp = __Pyx_PyNumber_Int(x);
+        if (!tmp) return (PY_LONG_LONG)-1;
+        val = __Pyx_PyInt_AsLongLong(tmp);
+        Py_DECREF(tmp);
+        return val;
+    }
+}
+
+static CYTHON_INLINE signed long __Pyx_PyInt_AsSignedLong(PyObject* x) {
+    const signed long neg_one = (signed long)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+#if PY_VERSION_HEX < 0x03000000
+    if (likely(PyInt_Check(x))) {
+        long val = PyInt_AS_LONG(x);
+        if (is_unsigned && unlikely(val < 0)) {
+            PyErr_SetString(PyExc_OverflowError,
+                            "can't convert negative value to signed long");
+            return (signed long)-1;
+        }
+        return (signed long)val;
+    } else
+#endif
+    if (likely(PyLong_Check(x))) {
+        if (is_unsigned) {
+            if (unlikely(Py_SIZE(x) < 0)) {
+                PyErr_SetString(PyExc_OverflowError,
+                                "can't convert negative value to signed long");
+                return (signed long)-1;
+            }
+            return (signed long)PyLong_AsUnsignedLong(x);
+        } else {
+            return (signed long)PyLong_AsLong(x);
+        }
+    } else {
+        signed long val;
+        PyObject *tmp = __Pyx_PyNumber_Int(x);
+        if (!tmp) return (signed long)-1;
+        val = __Pyx_PyInt_AsSignedLong(tmp);
+        Py_DECREF(tmp);
+        return val;
+    }
+}
+
+static CYTHON_INLINE signed PY_LONG_LONG __Pyx_PyInt_AsSignedLongLong(PyObject* x) {
+    const signed PY_LONG_LONG neg_one = (signed PY_LONG_LONG)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+#if PY_VERSION_HEX < 0x03000000
+    if (likely(PyInt_Check(x))) {
+        long val = PyInt_AS_LONG(x);
+        if (is_unsigned && unlikely(val < 0)) {
+            PyErr_SetString(PyExc_OverflowError,
+                            "can't convert negative value to signed PY_LONG_LONG");
+            return (signed PY_LONG_LONG)-1;
+        }
+        return (signed PY_LONG_LONG)val;
+    } else
+#endif
+    if (likely(PyLong_Check(x))) {
+        if (is_unsigned) {
+            if (unlikely(Py_SIZE(x) < 0)) {
+                PyErr_SetString(PyExc_OverflowError,
+                                "can't convert negative value to signed PY_LONG_LONG");
+                return (signed PY_LONG_LONG)-1;
+            }
+            return (signed PY_LONG_LONG)PyLong_AsUnsignedLongLong(x);
+        } else {
+            return (signed PY_LONG_LONG)PyLong_AsLongLong(x);
+        }
+    } else {
+        signed PY_LONG_LONG val;
+        PyObject *tmp = __Pyx_PyNumber_Int(x);
+        if (!tmp) return (signed PY_LONG_LONG)-1;
+        val = __Pyx_PyInt_AsSignedLongLong(tmp);
+        Py_DECREF(tmp);
+        return val;
+    }
+}
+
+static int __Pyx_check_binary_version(void) {
+    char ctversion[4], rtversion[4];
+    PyOS_snprintf(ctversion, 4, "%d.%d", PY_MAJOR_VERSION, PY_MINOR_VERSION);
+    PyOS_snprintf(rtversion, 4, "%s", Py_GetVersion());
+    if (ctversion[0] != rtversion[0] || ctversion[2] != rtversion[2]) {
+        char message[200];
+        PyOS_snprintf(message, sizeof(message),
+                      "compiletime version %s of module '%.100s' "
+                      "does not match runtime version %s",
+                      ctversion, __Pyx_MODULE_NAME, rtversion);
+        #if PY_VERSION_HEX < 0x02050000
+        return PyErr_Warn(NULL, message);
+        #else
+        return PyErr_WarnEx(NULL, message, 1);
+        #endif
+    }
+    return 0;
+}
+
+#include "compile.h"
+#include "frameobject.h"
+#include "traceback.h"
+
+static void __Pyx_AddTraceback(const char *funcname, int __pyx_clineno,
+                               int __pyx_lineno, const char *__pyx_filename) {
+    PyObject *py_srcfile = 0;
+    PyObject *py_funcname = 0;
+    PyObject *py_globals = 0;
+    PyCodeObject *py_code = 0;
+    PyFrameObject *py_frame = 0;
+
+    #if PY_MAJOR_VERSION < 3
+    py_srcfile = PyString_FromString(__pyx_filename);
+    #else
+    py_srcfile = PyUnicode_FromString(__pyx_filename);
+    #endif
+    if (!py_srcfile) goto bad;
+    if (__pyx_clineno) {
+        #if PY_MAJOR_VERSION < 3
+        py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, __pyx_clineno);
+        #else
+        py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, __pyx_clineno);
+        #endif
+    }
+    else {
+        #if PY_MAJOR_VERSION < 3
+        py_funcname = PyString_FromString(funcname);
+        #else
+        py_funcname = PyUnicode_FromString(funcname);
+        #endif
+    }
+    if (!py_funcname) goto bad;
+    py_globals = PyModule_GetDict(__pyx_m);
+    if (!py_globals) goto bad;
+    py_code = PyCode_New(
+        0,            /*int argcount,*/
+        #if PY_MAJOR_VERSION >= 3
+        0,            /*int kwonlyargcount,*/
+        #endif
+        0,            /*int nlocals,*/
+        0,            /*int stacksize,*/
+        0,            /*int flags,*/
+        __pyx_empty_bytes, /*PyObject *code,*/
+        __pyx_empty_tuple,  /*PyObject *consts,*/
+        __pyx_empty_tuple,  /*PyObject *names,*/
+        __pyx_empty_tuple,  /*PyObject *varnames,*/
+        __pyx_empty_tuple,  /*PyObject *freevars,*/
+        __pyx_empty_tuple,  /*PyObject *cellvars,*/
+        py_srcfile,   /*PyObject *filename,*/
+        py_funcname,  /*PyObject *name,*/
+        __pyx_lineno,   /*int firstlineno,*/
+        __pyx_empty_bytes  /*PyObject *lnotab*/
+    );
+    if (!py_code) goto bad;
+    py_frame = PyFrame_New(
+        PyThreadState_GET(), /*PyThreadState *tstate,*/
+        py_code,             /*PyCodeObject *code,*/
+        py_globals,          /*PyObject *globals,*/
+        0                    /*PyObject *locals*/
+    );
+    if (!py_frame) goto bad;
+    py_frame->f_lineno = __pyx_lineno;
+    PyTraceBack_Here(py_frame);
+bad:
+    Py_XDECREF(py_srcfile);
+    Py_XDECREF(py_funcname);
+    Py_XDECREF(py_code);
+    Py_XDECREF(py_frame);
+}
+
+static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) {
+    while (t->p) {
+        #if PY_MAJOR_VERSION < 3
+        if (t->is_unicode) {
+            *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL);
+        } else if (t->intern) {
+            *t->p = PyString_InternFromString(t->s);
+        } else {
+            *t->p = PyString_FromStringAndSize(t->s, t->n - 1);
+        }
+        #else  /* Python 3+ has unicode identifiers */
+        if (t->is_unicode | t->is_str) {
+            if (t->intern) {
+                *t->p = PyUnicode_InternFromString(t->s);
+            } else if (t->encoding) {
+                *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL);
+            } else {
+                *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1);
+            }
+        } else {
+            *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1);
+        }
+        #endif
+        if (!*t->p)
+            return -1;
+        ++t;
+    }
+    return 0;
+}
+
+/* Type Conversion Functions */
+
+static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) {
+   int is_true = x == Py_True;
+   if (is_true | (x == Py_False) | (x == Py_None)) return is_true;
+   else return PyObject_IsTrue(x);
+}
+
+static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x) {
+  PyNumberMethods *m;
+  const char *name = NULL;
+  PyObject *res = NULL;
+#if PY_VERSION_HEX < 0x03000000
+  if (PyInt_Check(x) || PyLong_Check(x))
+#else
+  if (PyLong_Check(x))
+#endif
+    return Py_INCREF(x), x;
+  m = Py_TYPE(x)->tp_as_number;
+#if PY_VERSION_HEX < 0x03000000
+  if (m && m->nb_int) {
+    name = "int";
+    res = PyNumber_Int(x);
+  }
+  else if (m && m->nb_long) {
+    name = "long";
+    res = PyNumber_Long(x);
+  }
+#else
+  if (m && m->nb_int) {
+    name = "int";
+    res = PyNumber_Long(x);
+  }
+#endif
+  if (res) {
+#if PY_VERSION_HEX < 0x03000000
+    if (!PyInt_Check(res) && !PyLong_Check(res)) {
+#else
+    if (!PyLong_Check(res)) {
+#endif
+      PyErr_Format(PyExc_TypeError,
+                   "__%s__ returned non-%s (type %.200s)",
+                   name, name, Py_TYPE(res)->tp_name);
+      Py_DECREF(res);
+      return NULL;
+    }
+  }
+  else if (!PyErr_Occurred()) {
+    PyErr_SetString(PyExc_TypeError,
+                    "an integer is required");
+  }
+  return res;
+}
+
+static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) {
+  Py_ssize_t ival;
+  PyObject* x = PyNumber_Index(b);
+  if (!x) return -1;
+  ival = PyInt_AsSsize_t(x);
+  Py_DECREF(x);
+  return ival;
+}
+
+static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) {
+#if PY_VERSION_HEX < 0x02050000
+   if (ival <= LONG_MAX)
+       return PyInt_FromLong((long)ival);
+   else {
+       unsigned char *bytes = (unsigned char *) &ival;
+       int one = 1; int little = (int)*(unsigned char*)&one;
+       return _PyLong_FromByteArray(bytes, sizeof(size_t), little, 0);
+   }
+#else
+   return PyInt_FromSize_t(ival);
+#endif
+}
+
+static CYTHON_INLINE size_t __Pyx_PyInt_AsSize_t(PyObject* x) {
+   unsigned PY_LONG_LONG val = __Pyx_PyInt_AsUnsignedLongLong(x);
+   if (unlikely(val == (unsigned PY_LONG_LONG)-1 && PyErr_Occurred())) {
+       return (size_t)-1;
+   } else if (unlikely(val != (unsigned PY_LONG_LONG)(size_t)val)) {
+       PyErr_SetString(PyExc_OverflowError,
+                       "value too large to convert to size_t");
+       return (size_t)-1;
+   }
+   return (size_t)val;
+}
+
+
+#endif /* Py_PYTHON_H */
diff --git a/ThirdParty/Twisted/twisted/python/_epoll.pyx b/ThirdParty/Twisted/twisted/python/_epoll.pyx
new file mode 100644
index 0000000..b8d6aa7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/_epoll.pyx
@@ -0,0 +1,285 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Interface to epoll I/O event notification facility.
+"""
+
+# NOTE: The version of Pyrex you are using probably _does not work_ with
+# Python 2.5.  If you need to recompile this file, _make sure you are using
+# a version of Pyrex which works with Python 2.5_.  I am using 0.9.4.1 from
+# <http://codespeak.net/svn/lxml/pyrex/>. -exarkun
+
+cdef extern from "stdio.h":
+    cdef extern void *malloc(int)
+    cdef extern void free(void *)
+    cdef extern int close(int)
+
+cdef extern from "errno.h":
+    cdef extern int errno
+    cdef extern char *strerror(int)
+
+cdef extern from "string.h":
+    cdef extern void *memset(void* s, int c, int n)
+
+cdef extern from "stdint.h":
+    ctypedef unsigned long uint32_t
+    ctypedef unsigned long long uint64_t
+
+cdef extern from "sys/epoll.h":
+
+    cdef enum:
+        EPOLL_CTL_ADD = 1
+        EPOLL_CTL_DEL = 2
+        EPOLL_CTL_MOD = 3
+
+    cdef enum EPOLL_EVENTS:
+        c_EPOLLIN "EPOLLIN" = 0x001
+        c_EPOLLPRI "EPOLLPRI" = 0x002
+        c_EPOLLOUT "EPOLLOUT" = 0x004
+        c_EPOLLRDNORM "EPOLLRDNORM" = 0x040
+        c_EPOLLRDBAND "EPOLLRDBAND" = 0x080
+        c_EPOLLWRNORM "EPOLLWRNORM" = 0x100
+        c_EPOLLWRBAND "EPOLLWRBAND" = 0x200
+        c_EPOLLMSG "EPOLLMSG" = 0x400
+        c_EPOLLERR "EPOLLERR" = 0x008
+        c_EPOLLHUP "EPOLLHUP" = 0x010
+        c_EPOLLET "EPOLLET" = (1 << 31)
+
+    ctypedef union epoll_data_t:
+        void *ptr
+        int fd
+        uint32_t u32
+        uint64_t u64
+
+    cdef struct epoll_event:
+        uint32_t events
+        epoll_data_t data
+
+    int epoll_create(int size)
+    int epoll_ctl(int epfd, int op, int fd, epoll_event *event)
+    int epoll_wait(int epfd, epoll_event *events, int maxevents, int timeout)
+
+cdef extern from "Python.h":
+    ctypedef struct PyThreadState
+    cdef extern PyThreadState *PyEval_SaveThread()
+    cdef extern void PyEval_RestoreThread(PyThreadState*)
+
+cdef call_epoll_wait(int fd, unsigned int maxevents, int timeout_msec):
+    """
+    Wait for an I/O event, wrap epoll_wait(2).
+
+    @type fd: C{int}
+    @param fd: The epoll file descriptor number.
+
+    @type maxevents: C{int}
+    @param maxevents: Maximum number of events returned.
+
+    @type timeout_msec: C{int}
+    @param timeout_msec: Maximum time in milliseconds waiting for events. 0
+        makes it return immediately whereas -1 makes it wait indefinitely.
+
+    @raise IOError: Raised if the underlying epoll_wait() call fails.
+    """
+    cdef epoll_event *events
+    cdef int result
+    cdef int nbytes
+    cdef PyThreadState *_save
+
+    nbytes = sizeof(epoll_event) * maxevents
+    events = <epoll_event*>malloc(nbytes)
+    memset(events, 0, nbytes)
+    try:
+        _save = PyEval_SaveThread()
+        result = epoll_wait(fd, events, maxevents, timeout_msec)
+        PyEval_RestoreThread(_save)
+
+        if result == -1:
+            raise IOError(errno, strerror(errno))
+        results = []
+        for i from 0 <= i < result:
+            results.append((events[i].data.fd, <int>events[i].events))
+        return results
+    finally:
+        free(events)
+
+cdef class epoll:
+    """
+    Represent a set of file descriptors being monitored for events.
+    """
+
+    cdef int fd
+    cdef int initialized
+
+    def __init__(self, int size=1023):
+        """
+        The constructor arguments are compatible with select.poll.__init__.
+        """
+        self.fd = epoll_create(size)
+        if self.fd == -1:
+            raise IOError(errno, strerror(errno))
+        self.initialized = 1
+
+    def __dealloc__(self):
+        if self.initialized:
+            close(self.fd)
+            self.initialized = 0
+
+    def close(self):
+        """
+        Close the epoll file descriptor.
+        """
+        if self.initialized:
+            if close(self.fd) == -1:
+                raise IOError(errno, strerror(errno))
+            self.initialized = 0
+
+    def fileno(self):
+        """
+        Return the epoll file descriptor number.
+        """
+        return self.fd
+
+    def register(self, int fd, int events):
+        """
+        Add (register) a file descriptor to be monitored by self.
+
+        This method is compatible with select.epoll.register in Python 2.6.
+
+        Wrap epoll_ctl(2).
+
+        @type fd: C{int}
+        @param fd: File descriptor to modify
+
+        @type events: C{int}
+        @param events: A bit set of IN, OUT, PRI, ERR, HUP, and ET.
+
+        @raise IOError: Raised if the underlying epoll_ctl() call fails.
+        """
+        cdef int result
+        cdef epoll_event evt
+        evt.events = events
+        evt.data.fd = fd
+        result = epoll_ctl(self.fd, CTL_ADD, fd, &evt)
+        if result == -1:
+            raise IOError(errno, strerror(errno))
+
+    def unregister(self, int fd):
+        """
+        Remove (unregister) a file descriptor monitored by self.
+
+        This method is compatible with select.epoll.unregister in Python 2.6.
+
+        Wrap epoll_ctl(2).
+
+        @type fd: C{int}
+        @param fd: File descriptor to modify
+
+        @raise IOError: Raised if the underlying epoll_ctl() call fails.
+        """
+        cdef int result
+        cdef epoll_event evt
+        # We don't have to fill evt.events for CTL_DEL.
+        evt.data.fd = fd
+        result = epoll_ctl(self.fd, CTL_DEL, fd, &evt)
+        if result == -1:
+            raise IOError(errno, strerror(errno))
+
+    def modify(self, int fd, int events):
+        """
+        Modify the modified state of a file descriptor monitored by self.
+
+        This method is compatible with select.epoll.modify in Python 2.6.
+
+        Wrap epoll_ctl(2).
+
+        @type fd: C{int}
+        @param fd: File descriptor to modify
+
+        @type events: C{int}
+        @param events: A bit set of IN, OUT, PRI, ERR, HUP, and ET.
+
+        @raise IOError: Raised if the underlying epoll_ctl() call fails.
+        """
+        cdef int result
+        cdef epoll_event evt
+        evt.events = events
+        evt.data.fd = fd
+        result = epoll_ctl(self.fd, CTL_MOD, fd, &evt)
+        if result == -1:
+            raise IOError(errno, strerror(errno))
+
+    def _control(self, int op, int fd, int events):
+        """
+        Modify the monitored state of a particular file descriptor.
+
+        Wrap epoll_ctl(2).
+
+        @type op: C{int}
+        @param op: One of CTL_ADD, CTL_DEL, or CTL_MOD
+
+        @type fd: C{int}
+        @param fd: File descriptor to modify
+
+        @type events: C{int}
+        @param events: A bit set of IN, OUT, PRI, ERR, HUP, and ET.
+
+        @raise IOError: Raised if the underlying epoll_ctl() call fails.
+        """
+        cdef int result
+        cdef epoll_event evt
+        evt.events = events
+        evt.data.fd = fd
+        result = epoll_ctl(self.fd, op, fd, &evt)
+        if result == -1:
+            raise IOError(errno, strerror(errno))
+
+    def wait(self, unsigned int maxevents, int timeout):
+        """
+        Wait for an I/O event, wrap epoll_wait(2).
+
+        @type maxevents: C{int}
+        @param maxevents: Maximum number of events returned.
+
+        @type timeout: C{int}
+        @param timeout: Maximum time in milliseconds waiting for events. 0
+            makes it return immediately whereas -1 makes it wait indefinitely.
+
+        @raise IOError: Raised if the underlying epoll_wait() call fails.
+        """
+        return call_epoll_wait(self.fd, maxevents, timeout)
+
+    def poll(self, float timeout=-1, unsigned int maxevents=1024):
+        """
+        Wait for an I/O event, wrap epoll_wait(2).
+
+        This method is compatible with select.epoll.poll in Python 2.6.
+
+        @type maxevents: C{int}
+        @param maxevents: Maximum number of events returned.
+
+        @type timeout: C{int}
+        @param timeout: Maximum time waiting for events. 0 makes it return
+            immediately whereas -1 makes it wait indefinitely.
+
+        @raise IOError: Raised if the underlying epoll_wait() call fails.
+        """
+        return call_epoll_wait(self.fd, maxevents, <int>(timeout * 1000.0))
+
+
+CTL_ADD = EPOLL_CTL_ADD
+CTL_DEL = EPOLL_CTL_DEL
+CTL_MOD = EPOLL_CTL_MOD
+
+IN = EPOLLIN = c_EPOLLIN
+OUT = EPOLLOUT = c_EPOLLOUT
+PRI = EPOLLPRI = c_EPOLLPRI
+ERR = EPOLLERR = c_EPOLLERR
+HUP = EPOLLHUP = c_EPOLLHUP
+ET = EPOLLET = c_EPOLLET
+
+RDNORM = EPOLLRDNORM = c_EPOLLRDNORM
+RDBAND = EPOLLRDBAND = c_EPOLLRDBAND
+WRNORM = EPOLLWRNORM = c_EPOLLWRNORM
+WRBAND = EPOLLWRBAND = c_EPOLLWRBAND
+MSG = EPOLLMSG = c_EPOLLMSG
diff --git a/ThirdParty/Twisted/twisted/python/_initgroups.c b/ThirdParty/Twisted/twisted/python/_initgroups.c
new file mode 100644
index 0000000..93500b5
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/_initgroups.c
@@ -0,0 +1,66 @@
+/*****************************************************************************
+
+  Copyright (c) 2002 Zope Corporation and Contributors. All Rights Reserved.
+  
+  This software is subject to the provisions of the Zope Public License,
+  Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+  THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+  WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+  WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+  FOR A PARTICULAR PURPOSE
+  
+ ****************************************************************************/
+
+/* 
+ * This has been reported for inclusion in Python here: http://bugs.python.org/issue7333
+ * Hopefully we may be able to remove this file in some years.
+ */
+
+#include "Python.h"
+
+#if defined(__unix__) || defined(unix) || defined(__NetBSD__) || defined(__MACH__) /* Mac OS X */
+
+#include <grp.h>
+#include <sys/types.h>
+#include <unistd.h>
+
+static PyObject *
+initgroups_initgroups(PyObject *self, PyObject *args)
+{
+	char *username;
+	unsigned int igid;
+	gid_t gid;
+
+	if (!PyArg_ParseTuple(args, "sI:initgroups", &username, &igid))
+		return NULL;
+
+	gid = igid;
+
+	if (initgroups(username, gid) == -1)
+		return PyErr_SetFromErrno(PyExc_OSError);
+
+	Py_INCREF(Py_None);
+	return Py_None;
+}
+
+static PyMethodDef InitgroupsMethods[] = {
+	{"initgroups",	initgroups_initgroups,	METH_VARARGS},
+	{NULL,		NULL}
+};
+
+#else
+
+/* This module is empty on non-UNIX systems. */
+
+static PyMethodDef InitgroupsMethods[] = {
+	{NULL,		NULL}
+};
+
+#endif /* defined(__unix__) || defined(unix) */
+
+void
+init_initgroups(void)
+{
+	Py_InitModule("_initgroups", InitgroupsMethods);
+}
+
diff --git a/ThirdParty/Twisted/twisted/python/_inotify.py b/ThirdParty/Twisted/twisted/python/_inotify.py
new file mode 100644
index 0000000..b4692ba
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/_inotify.py
@@ -0,0 +1,101 @@
+# -*- test-case-name: twisted.internet.test.test_inotify -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Very low-level ctypes-based interface to Linux inotify(7).
+
+ctypes and a version of libc which supports inotify system calls are
+required.
+"""
+
+import ctypes
+import ctypes.util
+
+
+
+class INotifyError(Exception):
+    """
+    Unify all the possible exceptions that can be raised by the INotify API.
+    """
+
+
+
+def init():
+    """
+    Create an inotify instance and return the associated file descriptor.
+    """
+    fd = libc.inotify_init()
+    if fd < 0:
+        raise INotifyError("INotify initialization error.")
+    return fd
+
+
+
+def add(fd, path, mask):
+    """
+    Add a watch for the given path to the inotify file descriptor, and return
+    the watch descriptor.
+    """
+    wd = libc.inotify_add_watch(fd, path, mask)
+    if wd < 0:
+        raise INotifyError("Failed to add watch on '%r' - (%r)" % (path, wd))
+    return wd
+
+
+
+def remove(fd, wd):
+    """
+    Remove the given watch descriptor from the inotify file descriptor.
+    """
+    # When inotify_rm_watch returns -1 there's an error:
+    # The errno for this call can be either one of the following:
+    #  EBADF: fd is not a valid file descriptor.
+    #  EINVAL: The watch descriptor wd is not valid; or fd is
+    #          not an inotify file descriptor.
+    #
+    # if we can't access the errno here we cannot even raise
+    # an exception and we need to ignore the problem, one of
+    # the most common cases is when you remove a directory from
+    # the filesystem and that directory is observed. When inotify
+    # tries to call inotify_rm_watch with a non existing directory
+    # either of the 2 errors might come up because the files inside
+    # it might have events generated way before they were handled.
+    # Unfortunately only ctypes in Python 2.6 supports accessing errno:
+    #  http://bugs.python.org/issue1798 and in order to solve
+    # the problem for previous versions we need to introduce
+    # code that is quite complex:
+    #  http://stackoverflow.com/questions/661017/access-to-errno-from-python
+    #
+    # See #4310 for future resolution of this issue.
+    libc.inotify_rm_watch(fd, wd)
+
+
+
+def initializeModule(libc):
+    """
+    Intialize the module, checking if the expected APIs exist and setting the
+    argtypes and restype for for C{inotify_init}, C{inotify_add_watch}, and
+    C{inotify_rm_watch}.
+    """
+    for function in ("inotify_add_watch", "inotify_init", "inotify_rm_watch"):
+        if getattr(libc, function, None) is None:
+            raise ImportError("libc6 2.4 or higher needed")
+    libc.inotify_init.argtypes = []
+    libc.inotify_init.restype = ctypes.c_int
+
+    libc.inotify_rm_watch.argtypes = [
+        ctypes.c_int, ctypes.c_int]
+    libc.inotify_rm_watch.restype = ctypes.c_int
+
+    libc.inotify_add_watch.argtypes = [
+        ctypes.c_int, ctypes.c_char_p, ctypes.c_uint32]
+    libc.inotify_add_watch.restype = ctypes.c_int
+
+
+
+name = ctypes.util.find_library('c')
+if not name:
+    raise ImportError("Can't find C library.")
+libc = ctypes.cdll.LoadLibrary(name)
+initializeModule(libc)
diff --git a/ThirdParty/Twisted/twisted/python/_reflectpy3.py b/ThirdParty/Twisted/twisted/python/_reflectpy3.py
new file mode 100644
index 0000000..c0451e9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/_reflectpy3.py
@@ -0,0 +1,325 @@
+# -*- test-case-name: twisted.python.test.test_reflectpy3 -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Reflection APIs which have been ported to Python 3.
+"""
+
+from __future__ import division, absolute_import
+
+import types, sys, os, traceback
+from twisted.python.compat import reraise, nativeString, NativeStringIO
+
+from twisted.python._utilpy3 import unsignedID
+
+
+def prefixedMethodNames(classObj, prefix):
+    """
+    A list of method names with a given prefix in a given class.
+    """
+    dct = {}
+    addMethodNamesToDict(classObj, dct, prefix)
+    return list(dct.keys())
+
+
+def addMethodNamesToDict(classObj, dict, prefix, baseClass=None):
+    """
+    addMethodNamesToDict(classObj, dict, prefix, baseClass=None) -> dict
+    this goes through 'classObj' (and its bases) and puts method names
+    starting with 'prefix' in 'dict' with a value of 1. if baseClass isn't
+    None, methods will only be added if classObj is-a baseClass
+
+    If the class in question has the methods 'prefix_methodname' and
+    'prefix_methodname2', the resulting dict should look something like:
+    {"methodname": 1, "methodname2": 1}.
+    """
+    for base in classObj.__bases__:
+        addMethodNamesToDict(base, dict, prefix, baseClass)
+
+    if baseClass is None or baseClass in classObj.__bases__:
+        for name, method in classObj.__dict__.items():
+            optName = name[len(prefix):]
+            if ((type(method) is types.FunctionType)
+                and (name[:len(prefix)] == prefix)
+                and (len(optName))):
+                dict[optName] = 1
+
+
+def prefixedMethods(obj, prefix=''):
+    """
+    A list of methods with a given prefix on a given instance.
+    """
+    dct = {}
+    accumulateMethods(obj, dct, prefix)
+    return list(dct.values())
+
+
+def accumulateMethods(obj, dict, prefix='', curClass=None):
+    """
+    accumulateMethods(instance, dict, prefix)
+    I recurse through the bases of instance.__class__, and add methods
+    beginning with 'prefix' to 'dict', in the form of
+    {'methodname':*instance*method_object}.
+    """
+    if not curClass:
+        curClass = obj.__class__
+    for base in curClass.__bases__:
+        accumulateMethods(obj, dict, prefix, base)
+
+    for name, method in curClass.__dict__.items():
+        optName = name[len(prefix):]
+        if ((type(method) is types.FunctionType)
+            and (name[:len(prefix)] == prefix)
+            and (len(optName))):
+            dict[optName] = getattr(obj, name)
+
+
+def namedModule(name):
+    """
+    Return a module given its name.
+    """
+    topLevel = __import__(name)
+    packages = name.split(".")[1:]
+    m = topLevel
+    for p in packages:
+        m = getattr(m, p)
+    return m
+
+
+def namedObject(name):
+    """
+    Get a fully named module-global object.
+    """
+    classSplit = name.split('.')
+    module = namedModule('.'.join(classSplit[:-1]))
+    return getattr(module, classSplit[-1])
+
+namedClass = namedObject # backwards compat
+
+
+
+class _NoModuleFound(Exception):
+    """
+    No module was found because none exists.
+    """
+
+
+class InvalidName(ValueError):
+    """
+    The given name is not a dot-separated list of Python objects.
+    """
+
+
+class ModuleNotFound(InvalidName):
+    """
+    The module associated with the given name doesn't exist and it can't be
+    imported.
+    """
+
+
+class ObjectNotFound(InvalidName):
+    """
+    The object associated with the given name doesn't exist and it can't be
+    imported.
+    """
+
+
+def _importAndCheckStack(importName):
+    """
+    Import the given name as a module, then walk the stack to determine whether
+    the failure was the module not existing, or some code in the module (for
+    example a dependent import) failing.  This can be helpful to determine
+    whether any actual application code was run.  For example, to distiguish
+    administrative error (entering the wrong module name), from programmer
+    error (writing buggy code in a module that fails to import).
+
+    @raise Exception: if something bad happens.  This can be any type of
+    exception, since nobody knows what loading some arbitrary code might do.
+
+    @raise _NoModuleFound: if no module was found.
+    """
+    try:
+        try:
+            return __import__(importName)
+        except ImportError:
+            excType, excValue, excTraceback = sys.exc_info()
+            while excTraceback:
+                execName = excTraceback.tb_frame.f_globals["__name__"]
+                if (execName is None or # python 2.4+, post-cleanup
+                    execName == importName): # python 2.3, no cleanup
+                    reraise(excValue, excTraceback)
+                excTraceback = excTraceback.tb_next
+            raise _NoModuleFound()
+    except:
+        # Necessary for cleaning up modules in 2.3.
+        sys.modules.pop(importName, None)
+        raise
+
+
+
+def namedAny(name):
+    """
+    Retrieve a Python object by its fully qualified name from the global Python
+    module namespace.  The first part of the name, that describes a module,
+    will be discovered and imported.  Each subsequent part of the name is
+    treated as the name of an attribute of the object specified by all of the
+    name which came before it.  For example, the fully-qualified name of this
+    object is 'twisted.python.reflect.namedAny'.
+
+    @type name: L{str}
+    @param name: The name of the object to return.
+
+    @raise InvalidName: If the name is an empty string, starts or ends with
+        a '.', or is otherwise syntactically incorrect.
+
+    @raise ModuleNotFound: If the name is syntactically correct but the
+        module it specifies cannot be imported because it does not appear to
+        exist.
+
+    @raise ObjectNotFound: If the name is syntactically correct, includes at
+        least one '.', but the module it specifies cannot be imported because
+        it does not appear to exist.
+
+    @raise AttributeError: If an attribute of an object along the way cannot be
+        accessed, or a module along the way is not found.
+
+    @return: the Python object identified by 'name'.
+    """
+    if not name:
+        raise InvalidName('Empty module name')
+
+    names = name.split('.')
+
+    # if the name starts or ends with a '.' or contains '..', the __import__
+    # will raise an 'Empty module name' error. This will provide a better error
+    # message.
+    if '' in names:
+        raise InvalidName(
+            "name must be a string giving a '.'-separated list of Python "
+            "identifiers, not %r" % (name,))
+
+    topLevelPackage = None
+    moduleNames = names[:]
+    while not topLevelPackage:
+        if moduleNames:
+            trialname = '.'.join(moduleNames)
+            try:
+                topLevelPackage = _importAndCheckStack(trialname)
+            except _NoModuleFound:
+                moduleNames.pop()
+        else:
+            if len(names) == 1:
+                raise ModuleNotFound("No module named %r" % (name,))
+            else:
+                raise ObjectNotFound('%r does not name an object' % (name,))
+
+    obj = topLevelPackage
+    for n in names[1:]:
+        obj = getattr(obj, n)
+
+    return obj
+
+
+
+def filenameToModuleName(fn):
+    """
+    Convert a name in the filesystem to the name of the Python module it is.
+
+    This is aggressive about getting a module name back from a file; it will
+    always return a string.  Aggressive means 'sometimes wrong'; it won't look
+    at the Python path or try to do any error checking: don't use this method
+    unless you already know that the filename you're talking about is a Python
+    module.
+
+    @param fn: A filesystem path to a module or package; C{bytes} on Python 2,
+        C{bytes} or C{unicode} on Python 3.
+
+    @return: A hopefully importable module name.
+    @rtype: C{str}
+    """
+    if isinstance(fn, bytes):
+        initPy = b"__init__.py"
+    else:
+        initPy = "__init__.py"
+    fullName = os.path.abspath(fn)
+    base = os.path.basename(fn)
+    if not base:
+        # this happens when fn ends with a path separator, just skit it
+        base = os.path.basename(fn[:-1])
+    modName = nativeString(os.path.splitext(base)[0])
+    while 1:
+        fullName = os.path.dirname(fullName)
+        if os.path.exists(os.path.join(fullName, initPy)):
+            modName = "%s.%s" % (
+                nativeString(os.path.basename(fullName)),
+                nativeString(modName))
+        else:
+            break
+    return modName
+
+
+
+def qual(clazz):
+    """
+    Return full import path of a class.
+    """
+    return clazz.__module__ + '.' + clazz.__name__
+
+
+
+def _determineClass(x):
+    try:
+        return x.__class__
+    except:
+        return type(x)
+
+
+def _determineClassName(x):
+    c = _determineClass(x)
+    try:
+        return c.__name__
+    except:
+        try:
+            return str(c)
+        except:
+            return '<BROKEN CLASS AT 0x%x>' % unsignedID(c)
+
+
+def _safeFormat(formatter, o):
+    """
+    Helper function for L{safe_repr} and L{safe_str}.
+    """
+    try:
+        return formatter(o)
+    except:
+        io = NativeStringIO()
+        traceback.print_exc(file=io)
+        className = _determineClassName(o)
+        tbValue = io.getvalue()
+        return "<%s instance at 0x%x with %s error:\n %s>" % (
+            className, unsignedID(o), formatter.__name__, tbValue)
+
+
+def safe_repr(o):
+    """
+    Returns a string representation of an object, or a string containing a
+    traceback, if that object's __repr__ raised an exception.
+
+    @param o: Any object.
+
+    @rtype: C{str}
+    """
+    return _safeFormat(repr, o)
+
+
+def safe_str(o):
+    """
+    Returns a string representation of an object, or a string containing a
+    traceback, if that object's __str__ raised an exception.
+
+    @param o: Any object.
+
+    @rtype: C{str}
+    """
+    return _safeFormat(str, o)
diff --git a/ThirdParty/Twisted/twisted/python/_release.py b/ThirdParty/Twisted/twisted/python/_release.py
new file mode 100644
index 0000000..78d84f6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/_release.py
@@ -0,0 +1,1371 @@
+# -*- test-case-name: twisted.python.test.test_release -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Twisted's automated release system.
+
+This module is only for use within Twisted's release system. If you are anyone
+else, do not use it. The interface and behaviour will change without notice.
+
+Only Linux is supported by this code.  It should not be used by any tools
+which must run on multiple platforms (eg the setup.py script).
+"""
+
+import textwrap
+from datetime import date
+import re
+import sys
+import os
+from tempfile import mkdtemp
+import tarfile
+
+from subprocess import PIPE, STDOUT, Popen
+
+from twisted.python.versions import Version
+from twisted.python.filepath import FilePath
+from twisted.python.dist import twisted_subprojects
+from twisted.python.compat import execfile
+
+# This import is an example of why you shouldn't use this module unless you're
+# radix
+try:
+    from twisted.lore.scripts import lore
+except ImportError:
+    pass
+
+# The offset between a year and the corresponding major version number.
+VERSION_OFFSET = 2000
+
+
+def runCommand(args):
+    """
+    Execute a vector of arguments.
+
+    @type args: C{list} of C{str}
+    @param args: A list of arguments, the first of which will be used as the
+        executable to run.
+
+    @rtype: C{str}
+    @return: All of the standard output.
+
+    @raise CommandFailed: when the program exited with a non-0 exit code.
+    """
+    process = Popen(args, stdout=PIPE, stderr=STDOUT)
+    stdout = process.stdout.read()
+    exitCode = process.wait()
+    if exitCode < 0:
+        raise CommandFailed(None, -exitCode, stdout)
+    elif exitCode > 0:
+        raise CommandFailed(exitCode, None, stdout)
+    return stdout
+
+
+class CommandFailed(Exception):
+    """
+    Raised when a child process exits unsuccessfully.
+
+    @type exitStatus: C{int}
+    @ivar exitStatus: The exit status for the child process.
+
+    @type exitSignal: C{int}
+    @ivar exitSignal: The exit signal for the child process.
+
+    @type output: C{str}
+    @ivar output: The bytes read from stdout and stderr of the child process.
+    """
+    def __init__(self, exitStatus, exitSignal, output):
+        Exception.__init__(self, exitStatus, exitSignal, output)
+        self.exitStatus = exitStatus
+        self.exitSignal = exitSignal
+        self.output = output
+
+
+
+def _changeVersionInFile(old, new, filename):
+    """
+    Replace the C{old} version number with the C{new} one in the given
+    C{filename}.
+    """
+    replaceInFile(filename, {old.base(): new.base()})
+
+
+
+def getNextVersion(version, now=None):
+    """
+    Calculate the version number for a new release of Twisted based on
+    the previous version number.
+
+    @param version: The previous version number.
+    @param now: (optional) The current date.
+    """
+    # XXX: This has no way of incrementing the patch number. Currently, we
+    # don't need it. See bug 2915. Jonathan Lange, 2007-11-20.
+    if now is None:
+        now = date.today()
+    major = now.year - VERSION_OFFSET
+    if major != version.major:
+        minor = 0
+    else:
+        minor = version.minor + 1
+    return Version(version.package, major, minor, 0)
+
+
+def changeAllProjectVersions(root, versionTemplate, today=None):
+    """
+    Change the version of all projects (including core and all subprojects).
+
+    If the current version of a project is pre-release, then also change the
+    versions in the current NEWS entries for that project.
+
+    @type root: L{FilePath}
+    @param root: The root of the Twisted source tree.
+    @type versionTemplate: L{Version}
+    @param versionTemplate: The version of all projects.  The name will be
+        replaced for each respective project.
+    @type today: C{str}
+    @param today: A YYYY-MM-DD formatted string. If not provided, defaults to
+        the current day, according to the system clock.
+    """
+    if not today:
+        today = date.today().strftime('%Y-%m-%d')
+    for project in findTwistedProjects(root):
+        if project.directory.basename() == "twisted":
+            packageName = "twisted"
+        else:
+            packageName = "twisted." + project.directory.basename()
+        oldVersion = project.getVersion()
+        newVersion = Version(packageName, versionTemplate.major,
+                             versionTemplate.minor, versionTemplate.micro,
+                             prerelease=versionTemplate.prerelease)
+
+        if oldVersion.prerelease:
+            builder = NewsBuilder()
+            builder._changeNewsVersion(
+                root.child("NEWS"), builder._getNewsName(project),
+                oldVersion, newVersion, today)
+            builder._changeNewsVersion(
+                project.directory.child("topfiles").child("NEWS"),
+                builder._getNewsName(project), oldVersion, newVersion,
+                today)
+
+        # The placement of the top-level README with respect to other files (eg
+        # _version.py) is sufficiently different from the others that we just
+        # have to handle it specially.
+        if packageName == "twisted":
+            _changeVersionInFile(
+                oldVersion, newVersion, root.child('README').path)
+
+        project.updateVersion(newVersion)
+
+
+
+
+class Project(object):
+    """
+    A representation of a project that has a version.
+
+    @ivar directory: A L{twisted.python.filepath.FilePath} pointing to the base
+        directory of a Twisted-style Python package. The package should contain
+        a C{_version.py} file and a C{topfiles} directory that contains a
+        C{README} file.
+    """
+
+    def __init__(self, directory):
+        self.directory = directory
+
+
+    def __repr__(self):
+        return '%s(%r)' % (
+            self.__class__.__name__, self.directory)
+
+
+    def getVersion(self):
+        """
+        @return: A L{Version} specifying the version number of the project
+        based on live python modules.
+        """
+        namespace = {}
+        execfile(self.directory.child("_version.py").path, namespace)
+        return namespace["version"]
+
+
+    def updateVersion(self, version):
+        """
+        Replace the existing version numbers in _version.py and README files
+        with the specified version.
+        """
+        oldVersion = self.getVersion()
+        replaceProjectVersion(self.directory.child("_version.py").path,
+                              version)
+        _changeVersionInFile(
+            oldVersion, version,
+            self.directory.child("topfiles").child("README").path)
+
+
+
+def findTwistedProjects(baseDirectory):
+    """
+    Find all Twisted-style projects beneath a base directory.
+
+    @param baseDirectory: A L{twisted.python.filepath.FilePath} to look inside.
+    @return: A list of L{Project}.
+    """
+    projects = []
+    for filePath in baseDirectory.walk():
+        if filePath.basename() == 'topfiles':
+            projectDirectory = filePath.parent()
+            projects.append(Project(projectDirectory))
+    return projects
+
+
+
+def updateTwistedVersionInformation(baseDirectory, now):
+    """
+    Update the version information for Twisted and all subprojects to the
+    date-based version number.
+
+    @param baseDirectory: Where to look for Twisted. If None, the function
+        infers the information from C{twisted.__file__}.
+    @param now: The current date (as L{datetime.date}). If None, it defaults
+        to today.
+    """
+    for project in findTwistedProjects(baseDirectory):
+        project.updateVersion(getNextVersion(project.getVersion(), now=now))
+
+
+def generateVersionFileData(version):
+    """
+    Generate the data to be placed into a _version.py file.
+
+    @param version: A version object.
+    """
+    if version.prerelease is not None:
+        prerelease = ", prerelease=%r" % (version.prerelease,)
+    else:
+        prerelease = ""
+    data = '''\
+# This is an auto-generated file. Do not edit it.
+from twisted.python import versions
+version = versions.Version(%r, %s, %s, %s%s)
+''' % (version.package, version.major, version.minor, version.micro, prerelease)
+    return data
+
+
+def replaceProjectVersion(filename, newversion):
+    """
+    Write version specification code into the given filename, which
+    sets the version to the given version number.
+
+    @param filename: A filename which is most likely a "_version.py"
+        under some Twisted project.
+    @param newversion: A version object.
+    """
+    # XXX - this should be moved to Project and renamed to writeVersionFile.
+    # jml, 2007-11-15.
+    f = open(filename, 'w')
+    f.write(generateVersionFileData(newversion))
+    f.close()
+
+
+
+def replaceInFile(filename, oldToNew):
+    """
+    I replace the text `oldstr' with `newstr' in `filename' using science.
+    """
+    os.rename(filename, filename+'.bak')
+    f = open(filename+'.bak')
+    d = f.read()
+    f.close()
+    for k,v in oldToNew.items():
+        d = d.replace(k, v)
+    f = open(filename + '.new', 'w')
+    f.write(d)
+    f.close()
+    os.rename(filename+'.new', filename)
+    os.unlink(filename+'.bak')
+
+
+
+class NoDocumentsFound(Exception):
+    """
+    Raised when no input documents are found.
+    """
+
+
+
+class LoreBuilderMixin(object):
+    """
+    Base class for builders which invoke lore.
+    """
+    def lore(self, arguments):
+        """
+        Run lore with the given arguments.
+
+        @param arguments: A C{list} of C{str} giving command line arguments to
+            lore which should be used.
+        """
+        options = lore.Options()
+        options.parseOptions(["--null"] + arguments)
+        lore.runGivenOptions(options)
+
+
+
+class DocBuilder(LoreBuilderMixin):
+    """
+    Generate HTML documentation for projects.
+    """
+
+    def build(self, version, resourceDir, docDir, template, apiBaseURL=None,
+              deleteInput=False):
+        """
+        Build the documentation in C{docDir} with Lore.
+
+        Input files ending in .xhtml will be considered. Output will written as
+        .html files.
+
+        @param version: the version of the documentation to pass to lore.
+        @type version: C{str}
+
+        @param resourceDir: The directory which contains the toplevel index and
+            stylesheet file for this section of documentation.
+        @type resourceDir: L{twisted.python.filepath.FilePath}
+
+        @param docDir: The directory of the documentation.
+        @type docDir: L{twisted.python.filepath.FilePath}
+
+        @param template: The template used to generate the documentation.
+        @type template: L{twisted.python.filepath.FilePath}
+
+        @type apiBaseURL: C{str} or C{NoneType}
+        @param apiBaseURL: A format string which will be interpolated with the
+            fully-qualified Python name for each API link.  For example, to
+            generate the Twisted 8.0.0 documentation, pass
+            C{"http://twistedmatrix.com/documents/8.0.0/api/%s.html"}.
+
+        @param deleteInput: If True, the input documents will be deleted after
+            their output is generated.
+        @type deleteInput: C{bool}
+
+        @raise NoDocumentsFound: When there are no .xhtml files in the given
+            C{docDir}.
+        """
+        linkrel = self.getLinkrel(resourceDir, docDir)
+        inputFiles = docDir.globChildren("*.xhtml")
+        filenames = [x.path for x in inputFiles]
+        if not filenames:
+            raise NoDocumentsFound("No input documents found in %s" % (docDir,))
+        if apiBaseURL is not None:
+            arguments = ["--config", "baseurl=" + apiBaseURL]
+        else:
+            arguments = []
+        arguments.extend(["--config", "template=%s" % (template.path,),
+                          "--config", "ext=.html",
+                          "--config", "version=%s" % (version,),
+                          "--linkrel", linkrel] + filenames)
+        self.lore(arguments)
+        if deleteInput:
+            for inputFile in inputFiles:
+                inputFile.remove()
+
+
+    def getLinkrel(self, resourceDir, docDir):
+        """
+        Calculate a value appropriate for Lore's --linkrel option.
+
+        Lore's --linkrel option defines how to 'find' documents that are
+        linked to from TEMPLATE files (NOT document bodies). That is, it's a
+        prefix for links ('a' and 'link') in the template.
+
+        @param resourceDir: The directory which contains the toplevel index and
+            stylesheet file for this section of documentation.
+        @type resourceDir: L{twisted.python.filepath.FilePath}
+
+        @param docDir: The directory containing documents that must link to
+            C{resourceDir}.
+        @type docDir: L{twisted.python.filepath.FilePath}
+        """
+        if resourceDir != docDir:
+            return '/'.join(filePathDelta(docDir, resourceDir)) + "/"
+        else:
+            return ""
+
+
+
+class ManBuilder(LoreBuilderMixin):
+    """
+    Generate man pages of the different existing scripts.
+    """
+
+    def build(self, manDir):
+        """
+        Generate Lore input files from the man pages in C{manDir}.
+
+        Input files ending in .1 will be considered. Output will written as
+        -man.xhtml files.
+
+        @param manDir: The directory of the man pages.
+        @type manDir: L{twisted.python.filepath.FilePath}
+
+        @raise NoDocumentsFound: When there are no .1 files in the given
+            C{manDir}.
+        """
+        inputFiles = manDir.globChildren("*.1")
+        filenames = [x.path for x in inputFiles]
+        if not filenames:
+            raise NoDocumentsFound("No manual pages found in %s" % (manDir,))
+        arguments = ["--input", "man",
+                     "--output", "lore",
+                     "--config", "ext=-man.xhtml"] + filenames
+        self.lore(arguments)
+
+
+
+class APIBuilder(object):
+    """
+    Generate API documentation from source files using
+    U{pydoctor<http://codespeak.net/~mwh/pydoctor/>}.  This requires
+    pydoctor to be installed and usable (which means you won't be able to
+    use it with Python 2.3).
+    """
+    def build(self, projectName, projectURL, sourceURL, packagePath,
+              outputPath):
+        """
+        Call pydoctor's entry point with options which will generate HTML
+        documentation for the specified package's API.
+
+        @type projectName: C{str}
+        @param projectName: The name of the package for which to generate
+            documentation.
+
+        @type projectURL: C{str}
+        @param projectURL: The location (probably an HTTP URL) of the project
+            on the web.
+
+        @type sourceURL: C{str}
+        @param sourceURL: The location (probably an HTTP URL) of the root of
+            the source browser for the project.
+
+        @type packagePath: L{FilePath}
+        @param packagePath: The path to the top-level of the package named by
+            C{projectName}.
+
+        @type outputPath: L{FilePath}
+        @param outputPath: An existing directory to which the generated API
+            documentation will be written.
+        """
+        from pydoctor.driver import main
+        main(
+            ["--project-name", projectName,
+             "--project-url", projectURL,
+             "--system-class", "pydoctor.twistedmodel.TwistedSystem",
+             "--project-base-dir", packagePath.parent().path,
+             "--html-viewsource-base", sourceURL,
+             "--add-package", packagePath.path,
+             "--html-output", outputPath.path,
+             "--html-write-function-pages", "--quiet", "--make-html"])
+
+
+
+class BookBuilder(LoreBuilderMixin):
+    """
+    Generate the LaTeX and PDF documentation.
+
+    The book is built by assembling a number of LaTeX documents.  Only the
+    overall document which describes how to assemble the documents is stored
+    in LaTeX in the source.  The rest of the documentation is generated from
+    Lore input files.  These are primarily XHTML files (of the particular
+    Lore subset), but man pages are stored in GROFF format.  BookBuilder
+    expects all of its input to be Lore XHTML format, so L{ManBuilder}
+    should be invoked first if the man pages are to be included in the
+    result (this is determined by the book LaTeX definition file).
+    Therefore, a sample usage of BookBuilder may look something like this::
+
+        man = ManBuilder()
+        man.build(FilePath("doc/core/man"))
+        book = BookBuilder()
+        book.build(
+            FilePath('doc/core/howto'),
+            [FilePath('doc/core/howto'), FilePath('doc/core/howto/tutorial'),
+             FilePath('doc/core/man'), FilePath('doc/core/specifications')],
+            FilePath('doc/core/howto/book.tex'), FilePath('/tmp/book.pdf'))
+    """
+    def run(self, command):
+        """
+        Execute a command in a child process and return the output.
+
+        @type command: C{str}
+        @param command: The shell command to run.
+
+        @raise CommandFailed: If the child process exits with an error.
+        """
+        return runCommand(command)
+
+
+    def buildTeX(self, howtoDir):
+        """
+        Build LaTeX files for lore input files in the given directory.
+
+        Input files ending in .xhtml will be considered. Output will written as
+        .tex files.
+
+        @type howtoDir: L{FilePath}
+        @param howtoDir: A directory containing lore input files.
+
+        @raise ValueError: If C{howtoDir} does not exist.
+        """
+        if not howtoDir.exists():
+            raise ValueError("%r does not exist." % (howtoDir.path,))
+        self.lore(
+            ["--output", "latex",
+             "--config", "section"] +
+            [child.path for child in howtoDir.globChildren("*.xhtml")])
+
+
+    def buildPDF(self, bookPath, inputDirectory, outputPath):
+        """
+        Build a PDF from the given a LaTeX book document.
+
+        @type bookPath: L{FilePath}
+        @param bookPath: The location of a LaTeX document defining a book.
+
+        @type inputDirectory: L{FilePath}
+        @param inputDirectory: The directory which the inputs of the book are
+            relative to.
+
+        @type outputPath: L{FilePath}
+        @param outputPath: The location to which to write the resulting book.
+        """
+        if not bookPath.basename().endswith(".tex"):
+            raise ValueError("Book filename must end with .tex")
+
+        workPath = FilePath(mkdtemp())
+        try:
+            startDir = os.getcwd()
+            try:
+                os.chdir(inputDirectory.path)
+
+                texToDVI = [
+                    "latex", "-interaction=nonstopmode",
+                    "-output-directory=" + workPath.path,
+                    bookPath.path]
+
+                # What I tell you three times is true!
+                # The first two invocations of latex on the book file allows it
+                # correctly create page numbers for in-text references.  Why this is
+                # the case, I could not tell you. -exarkun
+                for i in range(3):
+                    self.run(texToDVI)
+
+                bookBaseWithoutExtension = bookPath.basename()[:-4]
+                dviPath = workPath.child(bookBaseWithoutExtension + ".dvi")
+                psPath = workPath.child(bookBaseWithoutExtension + ".ps")
+                pdfPath = workPath.child(bookBaseWithoutExtension + ".pdf")
+                self.run([
+                    "dvips", "-o", psPath.path, "-t", "letter", "-Ppdf",
+                    dviPath.path])
+                self.run(["ps2pdf13", psPath.path, pdfPath.path])
+                pdfPath.moveTo(outputPath)
+                workPath.remove()
+            finally:
+                os.chdir(startDir)
+        except:
+            workPath.moveTo(bookPath.parent().child(workPath.basename()))
+            raise
+
+
+    def build(self, baseDirectory, inputDirectories, bookPath, outputPath):
+        """
+        Build a PDF book from the given TeX book definition and directories
+        containing lore inputs.
+
+        @type baseDirectory: L{FilePath}
+        @param baseDirectory: The directory which the inputs of the book are
+            relative to.
+
+        @type inputDirectories: C{list} of L{FilePath}
+        @param inputDirectories: The paths which contain lore inputs to be
+            converted to LaTeX.
+
+        @type bookPath: L{FilePath}
+        @param bookPath: The location of a LaTeX document defining a book.
+
+        @type outputPath: L{FilePath}
+        @param outputPath: The location to which to write the resulting book.
+        """
+        for inputDir in inputDirectories:
+            self.buildTeX(inputDir)
+        self.buildPDF(bookPath, baseDirectory, outputPath)
+        for inputDirectory in inputDirectories:
+            for child in inputDirectory.children():
+                if child.splitext()[1] == ".tex" and child != bookPath:
+                    child.remove()
+
+
+
+class NewsBuilder(object):
+    """
+    Generate the new section of a NEWS file.
+
+    The C{_FEATURE}, C{_BUGFIX}, C{_DOC}, C{_REMOVAL}, and C{_MISC}
+    attributes of this class are symbolic names for the news entry types
+    which are supported.  Conveniently, they each also take on the value of
+    the file name extension which indicates a news entry of that type.
+
+    @cvar _headings: A C{dict} mapping one of the news entry types to the
+        heading to write out for that type of news entry.
+
+    @cvar _NO_CHANGES: A C{str} giving the text which appears when there are
+        no significant changes in a release.
+
+    @cvar _TICKET_HINT: A C{str} giving the text which appears at the top of
+        each news file and which should be kept at the top, not shifted down
+        with all the other content.  Put another way, this is the text after
+        which the new news text is inserted.
+    """
+
+    _FEATURE = ".feature"
+    _BUGFIX = ".bugfix"
+    _DOC = ".doc"
+    _REMOVAL = ".removal"
+    _MISC = ".misc"
+
+    _headings = {
+        _FEATURE: "Features",
+        _BUGFIX: "Bugfixes",
+        _DOC: "Improved Documentation",
+        _REMOVAL: "Deprecations and Removals",
+        _MISC: "Other",
+        }
+
+    _NO_CHANGES = "No significant changes have been made for this release.\n"
+
+    _TICKET_HINT = (
+        'Ticket numbers in this file can be looked up by visiting\n'
+        'http://twistedmatrix.com/trac/ticket/<number>\n'
+        '\n')
+
+    def _today(self):
+        """
+        Return today's date as a string in YYYY-MM-DD format.
+        """
+        return date.today().strftime('%Y-%m-%d')
+
+
+    def _findChanges(self, path, ticketType):
+        """
+        Load all the feature ticket summaries.
+
+        @param path: A L{FilePath} the direct children of which to search
+            for news entries.
+
+        @param ticketType: The type of news entries to search for.  One of
+            L{NewsBuilder._FEATURE}, L{NewsBuilder._BUGFIX},
+            L{NewsBuilder._REMOVAL}, or L{NewsBuilder._MISC}.
+
+        @return: A C{list} of two-tuples.  The first element is the ticket
+            number as an C{int}.  The second element of each tuple is the
+            description of the feature.
+        """
+        results = []
+        for child in path.children():
+            base, ext = os.path.splitext(child.basename())
+            if ext == ticketType:
+                results.append((
+                        int(base),
+                        ' '.join(child.getContent().splitlines())))
+        results.sort()
+        return results
+
+
+    def _formatHeader(self, header):
+        """
+        Format a header for a NEWS file.
+
+        A header is a title with '=' signs underlining it.
+
+        @param header: The header string to format.
+        @type header: C{str}
+        @return: A C{str} containing C{header}.
+        """
+        return header + '\n' + '=' * len(header) + '\n\n'
+
+
+    def _writeHeader(self, fileObj, header):
+        """
+        Write a version header to the given file.
+
+        @param fileObj: A file-like object to which to write the header.
+        @param header: The header to write to the file.
+        @type header: C{str}
+        """
+        fileObj.write(self._formatHeader(header))
+
+
+    def _writeSection(self, fileObj, header, tickets):
+        """
+        Write out one section (features, bug fixes, etc) to the given file.
+
+        @param fileObj: A file-like object to which to write the news section.
+
+        @param header: The header for the section to write.
+        @type header: C{str}
+
+        @param tickets: A C{list} of ticket information of the sort returned
+            by L{NewsBuilder._findChanges}.
+        """
+        if not tickets:
+            return
+
+        reverse = {}
+        for (ticket, description) in tickets:
+            reverse.setdefault(description, []).append(ticket)
+        for description in reverse:
+            reverse[description].sort()
+        reverse = reverse.items()
+        reverse.sort(key=lambda (descr, tickets): tickets[0])
+
+        fileObj.write(header + '\n' + '-' * len(header) + '\n')
+        for (description, relatedTickets) in reverse:
+            ticketList = ', '.join([
+                    '#' + str(ticket) for ticket in relatedTickets])
+            entry = ' - %s (%s)' % (description, ticketList)
+            entry = textwrap.fill(entry, subsequent_indent='   ')
+            fileObj.write(entry + '\n')
+        fileObj.write('\n')
+
+
+    def _writeMisc(self, fileObj, header, tickets):
+        """
+        Write out a miscellaneous-changes section to the given file.
+
+        @param fileObj: A file-like object to which to write the news section.
+
+        @param header: The header for the section to write.
+        @type header: C{str}
+
+        @param tickets: A C{list} of ticket information of the sort returned
+            by L{NewsBuilder._findChanges}.
+        """
+        if not tickets:
+            return
+
+        fileObj.write(header + '\n' + '-' * len(header) + '\n')
+        formattedTickets = []
+        for (ticket, ignored) in tickets:
+            formattedTickets.append('#' + str(ticket))
+        entry = ' - ' + ', '.join(formattedTickets)
+        entry = textwrap.fill(entry, subsequent_indent='   ')
+        fileObj.write(entry + '\n\n')
+
+
+    def build(self, path, output, header):
+        """
+        Load all of the change information from the given directory and write
+        it out to the given output file.
+
+        @param path: A directory (probably a I{topfiles} directory) containing
+            change information in the form of <ticket>.<change type> files.
+        @type path: L{FilePath}
+
+        @param output: The NEWS file to which the results will be prepended.
+        @type output: L{FilePath}
+
+        @param header: The top-level header to use when writing the news.
+        @type header: L{str}
+        """
+        changes = []
+        for part in (self._FEATURE, self._BUGFIX, self._DOC, self._REMOVAL):
+            tickets = self._findChanges(path, part)
+            if tickets:
+                changes.append((part, tickets))
+        misc = self._findChanges(path, self._MISC)
+
+        oldNews = output.getContent()
+        newNews = output.sibling('NEWS.new').open('w')
+        if oldNews.startswith(self._TICKET_HINT):
+            newNews.write(self._TICKET_HINT)
+            oldNews = oldNews[len(self._TICKET_HINT):]
+
+        self._writeHeader(newNews, header)
+        if changes:
+            for (part, tickets) in changes:
+                self._writeSection(newNews, self._headings.get(part), tickets)
+        else:
+            newNews.write(self._NO_CHANGES)
+            newNews.write('\n')
+        self._writeMisc(newNews, self._headings.get(self._MISC), misc)
+        newNews.write('\n')
+        newNews.write(oldNews)
+        newNews.close()
+        output.sibling('NEWS.new').moveTo(output)
+
+
+    def _getNewsName(self, project):
+        """
+        Return the name of C{project} that should appear in NEWS.
+
+        @param project: A L{Project}
+        @return: The name of C{project}.
+        """
+        name = project.directory.basename().title()
+        if name == 'Twisted':
+            name = 'Core'
+        return name
+
+
+    def _iterProjects(self, baseDirectory):
+        """
+        Iterate through the Twisted projects in C{baseDirectory}, yielding
+        everything we need to know to build news for them.
+
+        Yields C{topfiles}, C{news}, C{name}, C{version} for each sub-project
+        in reverse-alphabetical order. C{topfile} is the L{FilePath} for the
+        topfiles directory, C{news} is the L{FilePath} for the NEWS file,
+        C{name} is the nice name of the project (as should appear in the NEWS
+        file), C{version} is the current version string for that project.
+
+        @param baseDirectory: A L{FilePath} representing the root directory
+            beneath which to find Twisted projects for which to generate
+            news (see L{findTwistedProjects}).
+        @type baseDirectory: L{FilePath}
+        """
+        # Get all the subprojects to generate news for
+        projects = findTwistedProjects(baseDirectory)
+        # And order them alphabetically for ease of reading
+        projects.sort(key=lambda proj: proj.directory.path)
+        # And generate them backwards since we write news by prepending to
+        # files.
+        projects.reverse()
+
+        for aggregateNews in [False, True]:
+            for project in projects:
+                topfiles = project.directory.child("topfiles")
+                if aggregateNews:
+                    news = baseDirectory.child("NEWS")
+                else:
+                    news = topfiles.child("NEWS")
+                name = self._getNewsName(project)
+                version = project.getVersion()
+                yield topfiles, news, name, version
+
+
+    def buildAll(self, baseDirectory):
+        """
+        Find all of the Twisted subprojects beneath C{baseDirectory} and update
+        their news files from the ticket change description files in their
+        I{topfiles} directories and update the news file in C{baseDirectory}
+        with all of the news.
+
+        @param baseDirectory: A L{FilePath} representing the root directory
+            beneath which to find Twisted projects for which to generate
+            news (see L{findTwistedProjects}).
+        """
+        today = self._today()
+        for topfiles, news, name, version in self._iterProjects(baseDirectory):
+            self.build(
+                topfiles, news,
+                "Twisted %s %s (%s)" % (name, version.base(), today))
+
+
+    def _changeNewsVersion(self, news, name, oldVersion, newVersion, today):
+        """
+        Change all references to the current version number in a NEWS file to
+        refer to C{newVersion} instead.
+
+        @param news: The NEWS file to change.
+        @type news: L{FilePath}
+        @param name: The name of the project to change.
+        @type name: C{str}
+        @param oldVersion: The old version of the project.
+        @type oldVersion: L{Version}
+        @param newVersion: The new version of the project.
+        @type newVersion: L{Version}
+        @param today: A YYYY-MM-DD string representing today's date.
+        @type today: C{str}
+        """
+        newHeader = self._formatHeader(
+            "Twisted %s %s (%s)" % (name, newVersion.base(), today))
+        expectedHeaderRegex = re.compile(
+            r"Twisted %s %s \(\d{4}-\d\d-\d\d\)\n=+\n\n" % (
+                re.escape(name), re.escape(oldVersion.base())))
+        oldNews = news.getContent()
+        match = expectedHeaderRegex.search(oldNews)
+        if match:
+            oldHeader = match.group()
+            replaceInFile(news.path, {oldHeader: newHeader})
+
+
+    def main(self, args):
+        """
+        Build all news files.
+
+        @param args: The command line arguments to process.  This must contain
+            one string, the path to the base of the Twisted checkout for which
+            to build the news.
+        @type args: C{list} of C{str}
+        """
+        if len(args) != 1:
+            sys.exit("Must specify one argument: the path to the Twisted checkout")
+        self.buildAll(FilePath(args[0]))
+
+
+
+def filePathDelta(origin, destination):
+    """
+    Return a list of strings that represent C{destination} as a path relative
+    to C{origin}.
+
+    It is assumed that both paths represent directories, not files. That is to
+    say, the delta of L{twisted.python.filepath.FilePath} /foo/bar to
+    L{twisted.python.filepath.FilePath} /foo/baz will be C{../baz},
+    not C{baz}.
+
+    @type origin: L{twisted.python.filepath.FilePath}
+    @param origin: The origin of the relative path.
+
+    @type destination: L{twisted.python.filepath.FilePath}
+    @param destination: The destination of the relative path.
+    """
+    commonItems = 0
+    path1 = origin.path.split(os.sep)
+    path2 = destination.path.split(os.sep)
+    for elem1, elem2 in zip(path1, path2):
+        if elem1 == elem2:
+            commonItems += 1
+        else:
+            break
+    path = [".."] * (len(path1) - commonItems)
+    return path + path2[commonItems:]
+
+
+
+class DistributionBuilder(object):
+    """
+    A builder of Twisted distributions.
+
+    This knows how to build tarballs for Twisted and all of its subprojects.
+    """
+    from twisted.python.dist import twisted_subprojects as subprojects
+
+    def __init__(self, rootDirectory, outputDirectory, templatePath=None,
+                 apiBaseURL=None):
+        """
+        Create a distribution builder.
+
+        @param rootDirectory: root of a Twisted export which will populate
+            subsequent tarballs.
+        @type rootDirectory: L{FilePath}.
+
+        @param outputDirectory: The directory in which to create the tarballs.
+        @type outputDirectory: L{FilePath}
+
+        @param templatePath: Path to the template file that is used for the
+            howto documentation.
+        @type templatePath: L{FilePath}
+
+        @type apiBaseURL: C{str} or C{NoneType}
+        @param apiBaseURL: A format string which will be interpolated with the
+            fully-qualified Python name for each API link.  For example, to
+            generate the Twisted 8.0.0 documentation, pass
+            C{"http://twistedmatrix.com/documents/8.0.0/api/%s.html"}.
+        """
+        self.rootDirectory = rootDirectory
+        self.outputDirectory = outputDirectory
+        self.templatePath = templatePath
+        self.apiBaseURL = apiBaseURL
+        self.manBuilder = ManBuilder()
+        self.docBuilder = DocBuilder()
+
+
+    def _buildDocInDir(self, path, version, howtoPath):
+        """
+        Generate documentation in the given path, building man pages first if
+        necessary and swallowing errors (so that directories without lore
+        documentation in them are ignored).
+
+        @param path: The path containing documentation to build.
+        @type path: L{FilePath}
+        @param version: The version of the project to include in all generated
+            pages.
+        @type version: C{str}
+        @param howtoPath: The "resource path" as L{DocBuilder} describes it.
+        @type howtoPath: L{FilePath}
+        """
+        if self.templatePath is None:
+            self.templatePath = self.rootDirectory.descendant(
+                ["doc", "core", "howto", "template.tpl"])
+        if path.basename() == "man":
+            self.manBuilder.build(path)
+        if path.isdir():
+            try:
+                self.docBuilder.build(version, howtoPath, path,
+                    self.templatePath, self.apiBaseURL, True)
+            except NoDocumentsFound:
+                pass
+
+
+    def buildTwisted(self, version):
+        """
+        Build the main Twisted distribution in C{Twisted-<version>.tar.bz2}.
+
+        bin/admin is excluded.
+
+        @type version: C{str}
+        @param version: The version of Twisted to build.
+
+        @return: The tarball file.
+        @rtype: L{FilePath}.
+        """
+        releaseName = "Twisted-%s" % (version,)
+        buildPath = lambda *args: '/'.join((releaseName,) + args)
+
+        outputFile = self.outputDirectory.child(releaseName + ".tar.bz2")
+        tarball = tarfile.TarFile.open(outputFile.path, 'w:bz2')
+
+        docPath = self.rootDirectory.child("doc")
+
+        # Generate docs!
+        if docPath.isdir():
+            for subProjectDir in docPath.children():
+                if subProjectDir.isdir():
+                    for child in subProjectDir.walk():
+                        self._buildDocInDir(child, version,
+                            subProjectDir.child("howto"))
+
+        for binthing in self.rootDirectory.child("bin").children():
+            # bin/admin should not be included.
+            if binthing.basename() != "admin":
+                tarball.add(binthing.path,
+                            buildPath("bin", binthing.basename()))
+
+        for submodule in self.rootDirectory.child("twisted").children():
+            if submodule.basename() == "plugins":
+                for plugin in submodule.children():
+                    tarball.add(plugin.path, buildPath("twisted", "plugins",
+                                                       plugin.basename()))
+            else:
+                tarball.add(submodule.path, buildPath("twisted",
+                                                      submodule.basename()))
+
+        for docDir in self.rootDirectory.child("doc").children():
+            if docDir.basename() != "historic":
+                tarball.add(docDir.path, buildPath("doc", docDir.basename()))
+
+        for toplevel in self.rootDirectory.children():
+            if not toplevel.isdir():
+                tarball.add(toplevel.path, buildPath(toplevel.basename()))
+
+        tarball.close()
+
+        return outputFile
+
+
+    def buildCore(self, version):
+        """
+        Build a core distribution in C{TwistedCore-<version>.tar.bz2}.
+
+        This is very similar to L{buildSubProject}, but core tarballs and the
+        input are laid out slightly differently.
+
+         - scripts are in the top level of the C{bin} directory.
+         - code is included directly from the C{twisted} directory, excluding
+           subprojects.
+         - all plugins except the subproject plugins are included.
+
+        @type version: C{str}
+        @param version: The version of Twisted to build.
+
+        @return: The tarball file.
+        @rtype: L{FilePath}.
+        """
+        releaseName = "TwistedCore-%s" % (version,)
+        outputFile = self.outputDirectory.child(releaseName + ".tar.bz2")
+        buildPath = lambda *args: '/'.join((releaseName,) + args)
+        tarball = self._createBasicSubprojectTarball(
+            "core", version, outputFile)
+
+        # Include the bin directory for the subproject.
+        for path in self.rootDirectory.child("bin").children():
+            if not path.isdir():
+                tarball.add(path.path, buildPath("bin", path.basename()))
+
+        # Include all files within twisted/ that aren't part of a subproject.
+        for path in self.rootDirectory.child("twisted").children():
+            if path.basename() == "plugins":
+                for plugin in path.children():
+                    for subproject in self.subprojects:
+                        if plugin.basename() == "twisted_%s.py" % (subproject,):
+                            break
+                    else:
+                        tarball.add(plugin.path,
+                                    buildPath("twisted", "plugins",
+                                              plugin.basename()))
+            elif not path.basename() in self.subprojects + ["topfiles"]:
+                tarball.add(path.path, buildPath("twisted", path.basename()))
+
+        tarball.add(self.rootDirectory.child("twisted").child("topfiles").path,
+                    releaseName)
+        tarball.close()
+
+        return outputFile
+
+
+    def buildSubProject(self, projectName, version):
+        """
+        Build a subproject distribution in
+        C{Twisted<Projectname>-<version>.tar.bz2}.
+
+        @type projectName: C{str}
+        @param projectName: The lowercase name of the subproject to build.
+        @type version: C{str}
+        @param version: The version of Twisted to build.
+
+        @return: The tarball file.
+        @rtype: L{FilePath}.
+        """
+        releaseName = "Twisted%s-%s" % (projectName.capitalize(), version)
+        outputFile = self.outputDirectory.child(releaseName + ".tar.bz2")
+        buildPath = lambda *args: '/'.join((releaseName,) + args)
+        subProjectDir = self.rootDirectory.child("twisted").child(projectName)
+
+        tarball = self._createBasicSubprojectTarball(projectName, version,
+                                                     outputFile)
+
+        tarball.add(subProjectDir.child("topfiles").path, releaseName)
+
+        # Include all files in the subproject package except for topfiles.
+        for child in subProjectDir.children():
+            name = child.basename()
+            if name != "topfiles":
+                tarball.add(
+                    child.path,
+                    buildPath("twisted", projectName, name))
+
+        pluginsDir = self.rootDirectory.child("twisted").child("plugins")
+        # Include the plugin for the subproject.
+        pluginFileName = "twisted_%s.py" % (projectName,)
+        pluginFile = pluginsDir.child(pluginFileName)
+        if pluginFile.exists():
+            tarball.add(pluginFile.path,
+                        buildPath("twisted", "plugins", pluginFileName))
+
+        # Include the bin directory for the subproject.
+        binPath = self.rootDirectory.child("bin").child(projectName)
+        if binPath.isdir():
+            tarball.add(binPath.path, buildPath("bin"))
+        tarball.close()
+
+        return outputFile
+
+
+    def _createBasicSubprojectTarball(self, projectName, version, outputFile):
+        """
+        Helper method to create and fill a tarball with things common between
+        subprojects and core.
+
+        @param projectName: The subproject's name.
+        @type projectName: C{str}
+        @param version: The version of the release.
+        @type version: C{str}
+        @param outputFile: The location of the tar file to create.
+        @type outputFile: L{FilePath}
+        """
+        releaseName = "Twisted%s-%s" % (projectName.capitalize(), version)
+        buildPath = lambda *args: '/'.join((releaseName,) + args)
+
+        tarball = tarfile.TarFile.open(outputFile.path, 'w:bz2')
+
+        tarball.add(self.rootDirectory.child("LICENSE").path,
+                    buildPath("LICENSE"))
+
+        docPath = self.rootDirectory.child("doc").child(projectName)
+
+        if docPath.isdir():
+            for child in docPath.walk():
+                self._buildDocInDir(child, version, docPath.child("howto"))
+            tarball.add(docPath.path, buildPath("doc"))
+
+        return tarball
+
+
+
+class UncleanWorkingDirectory(Exception):
+    """
+    Raised when the working directory of an SVN checkout is unclean.
+    """
+
+
+
+class NotWorkingDirectory(Exception):
+    """
+    Raised when a directory does not appear to be an SVN working directory.
+    """
+
+
+
+def buildAllTarballs(checkout, destination, templatePath=None):
+    """
+    Build complete tarballs (including documentation) for Twisted and all
+    subprojects.
+
+    This should be called after the version numbers have been updated and
+    NEWS files created.
+
+    @type checkout: L{FilePath}
+    @param checkout: The SVN working copy from which a pristine source tree
+        will be exported.
+    @type destination: L{FilePath}
+    @param destination: The directory in which tarballs will be placed.
+    @type templatePath: L{FilePath}
+    @param templatePath: Location of the template file that is used for the
+        howto documentation.
+
+    @raise UncleanWorkingDirectory: If there are modifications to the
+        working directory of C{checkout}.
+    @raise NotWorkingDirectory: If the C{checkout} path is not an SVN checkout.
+    """
+    if not checkout.child(".svn").exists():
+        raise NotWorkingDirectory(
+            "%s does not appear to be an SVN working directory."
+            % (checkout.path,))
+    if runCommand(["svn", "st", checkout.path]).strip():
+        raise UncleanWorkingDirectory(
+            "There are local modifications to the SVN checkout in %s."
+            % (checkout.path,))
+
+    workPath = FilePath(mkdtemp())
+    export = workPath.child("export")
+    runCommand(["svn", "export", checkout.path, export.path])
+    twistedPath = export.child("twisted")
+    version = Project(twistedPath).getVersion()
+    versionString = version.base()
+
+    apiBaseURL = "http://twistedmatrix.com/documents/%s/api/%%s.html" % (
+        versionString)
+    if not destination.exists():
+        destination.createDirectory()
+    db = DistributionBuilder(export, destination, templatePath=templatePath,
+        apiBaseURL=apiBaseURL)
+
+    db.buildCore(versionString)
+    for subproject in twisted_subprojects:
+        if twistedPath.child(subproject).exists():
+            db.buildSubProject(subproject, versionString)
+
+    db.buildTwisted(versionString)
+    workPath.remove()
+
+
+class ChangeVersionsScript(object):
+    """
+    A thing for changing version numbers. See L{main}.
+    """
+    changeAllProjectVersions = staticmethod(changeAllProjectVersions)
+
+    def main(self, args):
+        """
+        Given a list of command-line arguments, change all the Twisted versions
+        in the current directory.
+
+        @type args: list of str
+        @param args: List of command line arguments.  This should only
+            contain the version number.
+        """
+        version_format = (
+            "Version should be in a form kind of like '1.2.3[pre4]'")
+        if len(args) != 1:
+            sys.exit("Must specify exactly one argument to change-versions")
+        version = args[0]
+        try:
+            major, minor, micro_and_pre = version.split(".")
+        except ValueError:
+            raise SystemExit(version_format)
+        if "pre" in micro_and_pre:
+            micro, pre = micro_and_pre.split("pre")
+        else:
+            micro = micro_and_pre
+            pre = None
+        try:
+            major = int(major)
+            minor = int(minor)
+            micro = int(micro)
+            if pre is not None:
+                pre = int(pre)
+        except ValueError:
+            raise SystemExit(version_format)
+        version_template = Version("Whatever",
+                                   major, minor, micro, prerelease=pre)
+        self.changeAllProjectVersions(FilePath("."), version_template)
+
+
+
+class BuildTarballsScript(object):
+    """
+    A thing for building release tarballs. See L{main}.
+    """
+    buildAllTarballs = staticmethod(buildAllTarballs)
+
+    def main(self, args):
+        """
+        Build all release tarballs.
+
+        @type args: list of C{str}
+        @param args: The command line arguments to process.  This must contain
+            at least two strings: the checkout directory and the destination
+            directory. An optional third string can be specified for the website
+            template file, used for building the howto documentation. If this
+            string isn't specified, the default template included in twisted
+            will be used.
+        """
+        if len(args) < 2 or len(args) > 3:
+            sys.exit("Must specify at least two arguments: "
+                     "Twisted checkout and destination path. The optional third "
+                     "argument is the website template path.")
+        if len(args) == 2:
+            self.buildAllTarballs(FilePath(args[0]), FilePath(args[1]))
+        elif len(args) == 3:
+            self.buildAllTarballs(FilePath(args[0]), FilePath(args[1]),
+                                  FilePath(args[2]))
+
+
+
+class BuildAPIDocsScript(object):
+    """
+    A thing for building API documentation. See L{main}.
+    """
+
+    def buildAPIDocs(self, projectRoot, output):
+        """
+        Build the API documentation of Twisted, with our project policy.
+
+        @param projectRoot: A L{FilePath} representing the root of the Twisted
+            checkout.
+        @param output: A L{FilePath} pointing to the desired output directory.
+        """
+        version = Project(projectRoot.child("twisted")).getVersion()
+        versionString = version.base()
+        sourceURL = ("http://twistedmatrix.com/trac/browser/tags/releases/"
+                     "twisted-%s" % (versionString,))
+        apiBuilder = APIBuilder()
+        apiBuilder.build(
+            "Twisted",
+            "http://twistedmatrix.com/",
+            sourceURL,
+            projectRoot.child("twisted"),
+            output)
+
+
+    def main(self, args):
+        """
+        Build API documentation.
+
+        @type args: list of str
+        @param args: The command line arguments to process.  This must contain
+            two strings: the path to the root of the Twisted checkout, and a
+            path to an output directory.
+        """
+        if len(args) != 2:
+            sys.exit("Must specify two arguments: "
+                     "Twisted checkout and destination path")
+        self.buildAPIDocs(FilePath(args[0]), FilePath(args[1]))
diff --git a/ThirdParty/Twisted/twisted/python/_shellcomp.py b/ThirdParty/Twisted/twisted/python/_shellcomp.py
new file mode 100644
index 0000000..b776802
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/_shellcomp.py
@@ -0,0 +1,668 @@
+# -*- test-case-name: twisted.python.test.test_shellcomp -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+No public APIs are provided by this module. Internal use only.
+
+This module implements dynamic tab-completion for any command that uses
+twisted.python.usage. Currently, only zsh is supported. Bash support may
+be added in the future.
+
+Maintainer: Eric P. Mangold - twisted AT teratorn DOT org
+
+In order for zsh completion to take place the shell must be able to find an
+appropriate "stub" file ("completion function") that invokes this code and
+displays the results to the user.
+
+The stub used for Twisted commands is in the file C{twisted-completion.zsh},
+which is also included in the official Zsh distribution at
+C{Completion/Unix/Command/_twisted}. Use this file as a basis for completion
+functions for your own commands. You should only need to change the first line
+to something like C{#compdef mycommand}.
+
+The main public documentation exists in the L{twisted.python.usage.Options}
+docstring, the L{twisted.python.usage.Completions} docstring, and the 
+Options howto.
+"""
+import itertools, getopt, inspect
+
+from twisted.python import reflect, util, usage
+
+
+
+def shellComplete(config, cmdName, words, shellCompFile):
+    """
+    Perform shell completion.
+
+    A completion function (shell script) is generated for the requested
+    shell and written to C{shellCompFile}, typically C{stdout}. The result
+    is then eval'd by the shell to produce the desired completions.
+
+    @type config: L{twisted.python.usage.Options}
+    @param config: The L{twisted.python.usage.Options} instance to generate
+        completions for.
+
+    @type cmdName: C{str}
+    @param cmdName: The name of the command we're generating completions for.
+        In the case of zsh, this is used to print an appropriate
+        "#compdef $CMD" line at the top of the output. This is
+        not necessary for the functionality of the system, but it
+        helps in debugging, since the output we produce is properly
+        formed and may be saved in a file and used as a stand-alone
+        completion function.
+
+    @type words: C{list} of C{str}
+    @param words: The raw command-line words passed to use by the shell
+        stub function. argv[0] has already been stripped off.
+
+    @type shellCompFile: C{file}
+    @param shellCompFile: The file to write completion data to.
+    """
+    # shellName is provided for forward-compatibility. It is not used,
+    # since we currently only support zsh.
+    shellName, position = words[-1].split(":")
+    position = int(position)
+    # zsh gives the completion position ($CURRENT) as a 1-based index,
+    # and argv[0] has already been stripped off, so we subtract 2 to
+    # get the real 0-based index.
+    position -= 2
+    cWord = words[position]
+
+    # since the user may hit TAB at any time, we may have been called with an
+    # incomplete command-line that would generate getopt errors if parsed
+    # verbatim. However, we must do *some* parsing in order to determine if
+    # there is a specific subcommand that we need to provide completion for.
+    # So, to make the command-line more sane we work backwards from the
+    # current completion position and strip off all words until we find one
+    # that "looks" like a subcommand. It may in fact be the argument to a
+    # normal command-line option, but that won't matter for our purposes.
+    while position >= 1:
+        if words[position - 1].startswith("-"):
+            position -= 1
+        else:
+            break
+    words = words[:position]
+
+    subCommands = getattr(config, 'subCommands', None)
+    if subCommands:
+        # OK, this command supports sub-commands, so lets see if we have been
+        # given one.
+
+        # If the command-line arguments are not valid then we won't be able to
+        # sanely detect the sub-command, so just generate completions as if no
+        # sub-command was found.
+        args = None
+        try:
+            opts, args = getopt.getopt(words,
+                                       config.shortOpt, config.longOpt)
+        except getopt.error:
+            pass
+
+        if args:
+            # yes, we have a subcommand. Try to find it.
+            for (cmd, short, parser, doc) in config.subCommands:
+                if args[0] == cmd or args[0] == short:
+                    subOptions = parser()
+                    subOptions.parent = config
+
+                    gen = ZshSubcommandBuilder(subOptions, config, cmdName,
+                                               shellCompFile)
+                    gen.write()
+                    return
+
+        # sub-command not given, or did not match any knowns sub-command names
+        genSubs = True
+        if cWord.startswith("-"):
+            # optimization: if the current word being completed starts
+            # with a hyphen then it can't be a sub-command, so skip
+            # the expensive generation of the sub-command list
+            genSubs = False
+        gen = ZshBuilder(config, cmdName, shellCompFile)
+        gen.write(genSubs=genSubs)
+    else:
+        gen = ZshBuilder(config, cmdName, shellCompFile)
+        gen.write()
+
+
+
+class SubcommandAction(usage.Completer):
+    def _shellCode(self, optName, shellType):
+        if shellType == usage._ZSH:
+            return '*::subcmd:->subcmd'
+        raise NotImplementedError("Unknown shellType %r" % (shellType,))
+
+
+
+class ZshBuilder(object):
+    """
+    Constructs zsh code that will complete options for a given usage.Options
+    instance, possibly including a list of subcommand names.
+
+    Completions for options to subcommands won't be generated because this
+    class will never be used if the user is completing options for a specific
+    subcommand. (See L{ZshSubcommandBuilder} below)
+
+    @type options: L{twisted.python.usage.Options}
+    @ivar options: The L{twisted.python.usage.Options} instance defined for this
+        command.
+
+    @type cmdName: C{str}
+    @ivar cmdName: The name of the command we're generating completions for.
+
+    @type file: C{file}
+    @ivar file: The C{file} to write the completion function to.
+    """
+    def __init__(self, options, cmdName, file):
+        self.options = options
+        self.cmdName = cmdName
+        self.file = file
+
+
+    def write(self, genSubs=True):
+        """
+        Generate the completion function and write it to the output file
+        @return: C{None}
+
+        @type genSubs: C{bool}
+        @param genSubs: Flag indicating whether or not completions for the list
+            of subcommand should be generated. Only has an effect
+            if the C{subCommands} attribute has been defined on the
+            L{twisted.python.usage.Options} instance.
+        """
+        if genSubs and getattr(self.options, 'subCommands', None) is not None:
+            gen = ZshArgumentsGenerator(self.options, self.cmdName, self.file)
+            gen.extraActions.insert(0, SubcommandAction())
+            gen.write()
+            self.file.write('local _zsh_subcmds_array\n_zsh_subcmds_array=(\n')
+            for (cmd, short, parser, desc) in self.options.subCommands:
+                self.file.write('"%s:%s"\n' % (cmd, desc))
+            self.file.write(")\n\n")
+            self.file.write('_describe "sub-command" _zsh_subcmds_array\n')
+        else:
+            gen = ZshArgumentsGenerator(self.options, self.cmdName, self.file)
+            gen.write()
+
+
+
+class ZshSubcommandBuilder(ZshBuilder):
+    """
+    Constructs zsh code that will complete options for a given usage.Options
+    instance, and also for a single sub-command. This will only be used in
+    the case where the user is completing options for a specific subcommand.
+
+    @type subOptions: L{twisted.python.usage.Options}
+    @ivar subOptions: The L{twisted.python.usage.Options} instance defined for
+        the sub command.
+    """
+    def __init__(self, subOptions, *args):
+        self.subOptions = subOptions
+        ZshBuilder.__init__(self, *args)
+
+
+    def write(self):
+        """
+        Generate the completion function and write it to the output file
+        @return: C{None}
+        """
+        gen = ZshArgumentsGenerator(self.options, self.cmdName, self.file)
+        gen.extraActions.insert(0, SubcommandAction())
+        gen.write()
+
+        gen = ZshArgumentsGenerator(self.subOptions, self.cmdName, self.file)
+        gen.write()
+
+
+
+class ZshArgumentsGenerator(object):
+    """
+    Generate a call to the zsh _arguments completion function
+    based on data in a usage.Options instance
+
+    @type options: L{twisted.python.usage.Options}
+    @ivar options: The L{twisted.python.usage.Options} instance to generate for
+
+    @type cmdName: C{str}
+    @ivar cmdName: The name of the command we're generating completions for.
+
+    @type file: C{file}
+    @ivar file: The C{file} to write the completion function to
+
+    The following non-constructor variables are populated by this class
+    with data gathered from the C{Options} instance passed in, and its
+    base classes.
+
+    @type descriptions: C{dict}
+    @ivar descriptions: A dict mapping long option names to alternate
+        descriptions. When this variable is defined, the descriptions
+        contained here will override those descriptions provided in the
+        optFlags and optParameters variables.
+
+    @type multiUse: C{list}
+    @ivar multiUse: An iterable containing those long option names which may
+        appear on the command line more than once. By default, options will
+        only be completed one time.
+
+    @type mutuallyExclusive: C{list} of C{tuple}
+    @ivar mutuallyExclusive: A sequence of sequences, with each sub-sequence
+        containing those long option names that are mutually exclusive. That is,
+        those options that cannot appear on the command line together.
+
+    @type optActions: C{dict}
+    @ivar optActions: A dict mapping long option names to shell "actions".
+        These actions define what may be completed as the argument to the
+        given option, and should be given as instances of
+        L{twisted.python.usage.Completer}.
+
+        Callables may instead be given for the values in this dict. The
+        callable should accept no arguments, and return a C{Completer}
+        instance used as the action.
+
+    @type extraActions: C{list} of C{twisted.python.usage.Completer}
+    @ivar extraActions: Extra arguments are those arguments typically
+        appearing at the end of the command-line, which are not associated
+        with any particular named option. That is, the arguments that are
+        given to the parseArgs() method of your usage.Options subclass.
+    """
+    def __init__(self, options, cmdName, file):
+        self.options = options
+        self.cmdName = cmdName
+        self.file = file
+
+        self.descriptions = {}
+        self.multiUse = set()
+        self.mutuallyExclusive = []
+        self.optActions = {}
+        self.extraActions = []
+
+        for cls in reversed(inspect.getmro(options.__class__)):
+            data = getattr(cls, 'compData', None)
+            if data:
+                self.descriptions.update(data.descriptions)
+                self.optActions.update(data.optActions)
+                self.multiUse.update(data.multiUse)
+
+                self.mutuallyExclusive.extend(data.mutuallyExclusive)
+
+                # I don't see any sane way to aggregate extraActions, so just
+                # take the one at the top of the MRO (nearest the `options'
+                # instance).
+                if data.extraActions:
+                    self.extraActions = data.extraActions
+
+        aCL = reflect.accumulateClassList
+        aCD = reflect.accumulateClassDict
+
+        optFlags = []
+        optParams = []
+
+        aCL(options.__class__, 'optFlags', optFlags)
+        aCL(options.__class__, 'optParameters', optParams)
+
+        for i, optList in enumerate(optFlags):
+            if len(optList) != 3:
+                optFlags[i] = util.padTo(3, optList)
+
+        for i, optList in enumerate(optParams):
+            if len(optList) != 5:
+                optParams[i] = util.padTo(5, optList)
+
+
+        self.optFlags = optFlags
+        self.optParams = optParams
+
+        paramNameToDefinition = {}
+        for optList in optParams:
+            paramNameToDefinition[optList[0]] = optList[1:]
+        self.paramNameToDefinition = paramNameToDefinition
+
+        flagNameToDefinition = {}
+        for optList in optFlags:
+            flagNameToDefinition[optList[0]] = optList[1:]
+        self.flagNameToDefinition = flagNameToDefinition
+
+        allOptionsNameToDefinition = {}
+        allOptionsNameToDefinition.update(paramNameToDefinition)
+        allOptionsNameToDefinition.update(flagNameToDefinition)
+        self.allOptionsNameToDefinition = allOptionsNameToDefinition
+
+        self.addAdditionalOptions()
+
+        # makes sure none of the Completions metadata references
+        # option names that don't exist. (great for catching typos)
+        self.verifyZshNames()
+
+        self.excludes = self.makeExcludesDict()
+
+
+    def write(self):
+        """
+        Write the zsh completion code to the file given to __init__
+        @return: C{None}
+        """
+        self.writeHeader()
+        self.writeExtras()
+        self.writeOptions()
+        self.writeFooter()
+
+
+    def writeHeader(self):
+        """
+        This is the start of the code that calls _arguments
+        @return: C{None}
+        """
+        self.file.write('#compdef %s\n\n'
+                        '_arguments -s -A "-*" \\\n' % (self.cmdName,))
+
+
+    def writeOptions(self):
+        """
+        Write out zsh code for each option in this command
+        @return: C{None}
+        """
+        optNames = self.allOptionsNameToDefinition.keys()
+        optNames.sort()
+        for longname in optNames:
+            self.writeOpt(longname)
+
+
+    def writeExtras(self):
+        """
+        Write out completion information for extra arguments appearing on the
+        command-line. These are extra positional arguments not associated
+        with a named option. That is, the stuff that gets passed to
+        Options.parseArgs().
+
+        @return: C{None}
+
+        @raises: ValueError: if C{Completer} with C{repeat=True} is found and
+            is not the last item in the C{extraActions} list.
+        """
+        for i, action in enumerate(self.extraActions):
+            descr = ""
+            if action._descr:
+                descr = action._descr
+            # a repeatable action must be the last action in the list
+            if action._repeat and i != len(self.extraActions) - 1:
+                raise ValueError("Completer with repeat=True must be "
+                                 "last item in Options.extraActions")
+            self.file.write(escape(action._shellCode('', usage._ZSH)))
+            self.file.write(' \\\n')
+
+
+    def writeFooter(self):
+        """
+        Write the last bit of code that finishes the call to _arguments
+        @return: C{None}
+        """
+        self.file.write('&& return 0\n')
+
+
+    def verifyZshNames(self):
+        """
+        Ensure that none of the option names given in the metadata are typoed
+        @return: C{None}
+        @raise ValueError: Raised if unknown option names have been found.
+        """
+        def err(name):
+            raise ValueError("Unknown option name \"%s\" found while\n"
+                "examining Completions instances on %s" % (
+                    name, self.options))
+
+        for name in itertools.chain(self.descriptions, self.optActions,
+                                    self.multiUse):
+            if name not in self.allOptionsNameToDefinition:
+                err(name)
+
+        for seq in self.mutuallyExclusive:
+            for name in seq:
+                if name not in self.allOptionsNameToDefinition:
+                    err(name)
+
+
+    def excludeStr(self, longname, buildShort=False):
+        """
+        Generate an "exclusion string" for the given option
+
+        @type longname: C{str}
+        @param longname: The long option name (e.g. "verbose" instead of "v")
+
+        @type buildShort: C{bool}
+        @param buildShort: May be True to indicate we're building an excludes
+            string for the short option that correspondes to the given long opt.
+
+        @return: The generated C{str}
+        """
+        if longname in self.excludes:
+            exclusions = self.excludes[longname].copy()
+        else:
+            exclusions = set()
+
+        # if longname isn't a multiUse option (can't appear on the cmd line more
+        # than once), then we have to exclude the short option if we're
+        # building for the long option, and vice versa.
+        if longname not in self.multiUse:
+            if buildShort is False:
+                short = self.getShortOption(longname)
+                if short is not None:
+                    exclusions.add(short)
+            else:
+                exclusions.add(longname)
+
+        if not exclusions:
+            return ''
+
+        strings = []
+        for optName in exclusions:
+            if len(optName) == 1:
+                # short option
+                strings.append("-" + optName)
+            else:
+                strings.append("--" + optName)
+        strings.sort() # need deterministic order for reliable unit-tests
+        return "(%s)" % " ".join(strings)
+
+
+    def makeExcludesDict(self):
+        """
+        @return: A C{dict} that maps each option name appearing in
+            self.mutuallyExclusive to a list of those option names that is it
+            mutually exclusive with (can't appear on the cmd line with).
+        """
+
+        #create a mapping of long option name -> single character name
+        longToShort = {}
+        for optList in itertools.chain(self.optParams, self.optFlags):
+            if optList[1] != None:
+                longToShort[optList[0]] = optList[1]
+
+        excludes = {}
+        for lst in self.mutuallyExclusive:
+            for i, longname in enumerate(lst):
+                tmp = set(lst[:i] + lst[i+1:])
+                for name in tmp.copy():
+                    if name in longToShort:
+                        tmp.add(longToShort[name])
+
+                if longname in excludes:
+                    excludes[longname] = excludes[longname].union(tmp)
+                else:
+                    excludes[longname] = tmp
+        return excludes
+
+
+    def writeOpt(self, longname):
+        """
+        Write out the zsh code for the given argument. This is just part of the
+        one big call to _arguments
+
+        @type longname: C{str}
+        @param longname: The long option name (e.g. "verbose" instead of "v")
+
+        @return: C{None}
+        """
+        if longname in self.flagNameToDefinition:
+            # It's a flag option. Not one that takes a parameter.
+            longField = "--%s" % longname
+        else:
+            longField = "--%s=" % longname
+
+        short = self.getShortOption(longname)
+        if short != None:
+            shortField = "-" + short
+        else:
+            shortField = ''
+
+        descr = self.getDescription(longname)
+        descriptionField = descr.replace("[", "\[")
+        descriptionField = descriptionField.replace("]", "\]")
+        descriptionField = '[%s]' % descriptionField
+
+        actionField = self.getAction(longname)
+        if longname in self.multiUse:
+            multiField = '*'
+        else:
+            multiField = ''
+
+        longExclusionsField = self.excludeStr(longname)
+
+        if short:
+            #we have to write an extra line for the short option if we have one
+            shortExclusionsField = self.excludeStr(longname, buildShort=True)
+            self.file.write(escape('%s%s%s%s%s' % (shortExclusionsField,
+                multiField, shortField, descriptionField, actionField)))
+            self.file.write(' \\\n')
+
+        self.file.write(escape('%s%s%s%s%s' % (longExclusionsField,
+            multiField, longField, descriptionField, actionField)))
+        self.file.write(' \\\n')
+
+
+    def getAction(self, longname):
+        """
+        Return a zsh "action" string for the given argument
+        @return: C{str}
+        """
+        if longname in self.optActions:
+            if callable(self.optActions[longname]):
+                action = self.optActions[longname]()
+            else:
+                action = self.optActions[longname]
+            return action._shellCode(longname, usage._ZSH)
+
+        if longname in self.paramNameToDefinition:
+            return ':%s:_files' % (longname,)
+        return ''
+
+
+    def getDescription(self, longname):
+        """
+        Return the description to be used for this argument
+        @return: C{str}
+        """
+        #check if we have an alternate descr for this arg, and if so use it
+        if longname in self.descriptions:
+            return self.descriptions[longname]
+
+        #otherwise we have to get it from the optFlags or optParams
+        try:
+            descr = self.flagNameToDefinition[longname][1]
+        except KeyError:
+            try:
+                descr = self.paramNameToDefinition[longname][2]
+            except KeyError:
+                descr = None
+
+        if descr is not None:
+            return descr
+
+        # let's try to get it from the opt_foo method doc string if there is one
+        longMangled = longname.replace('-', '_') # this is what t.p.usage does
+        obj = getattr(self.options, 'opt_%s' % longMangled, None)
+        if obj is not None:
+            descr = descrFromDoc(obj)
+            if descr is not None:
+                return descr
+
+        return longname # we really ought to have a good description to use
+
+
+    def getShortOption(self, longname):
+        """
+        Return the short option letter or None
+        @return: C{str} or C{None}
+        """
+        optList = self.allOptionsNameToDefinition[longname]
+        return optList[0] or None
+
+
+    def addAdditionalOptions(self):
+        """
+        Add additional options to the optFlags and optParams lists.
+        These will be defined by 'opt_foo' methods of the Options subclass
+        @return: C{None}
+        """
+        methodsDict = {}
+        reflect.accumulateMethods(self.options, methodsDict, 'opt_')
+        methodToShort = {}
+        for name in methodsDict.copy():
+            if len(name) == 1:
+                methodToShort[methodsDict[name]] = name
+                del methodsDict[name]
+
+        for methodName, methodObj in methodsDict.items():
+            longname = methodName.replace('_', '-') # t.p.usage does this
+            # if this option is already defined by the optFlags or
+            # optParameters then we don't want to override that data
+            if longname in self.allOptionsNameToDefinition:
+                continue
+
+            descr = self.getDescription(longname)
+
+            short = None
+            if methodObj in methodToShort:
+                short = methodToShort[methodObj]
+
+            reqArgs = methodObj.im_func.func_code.co_argcount
+            if reqArgs == 2:
+                self.optParams.append([longname, short, None, descr])
+                self.paramNameToDefinition[longname] = [short, None, descr]
+                self.allOptionsNameToDefinition[longname] = [short, None, descr]
+            else:
+                # reqArgs must equal 1. self.options would have failed
+                # to instantiate if it had opt_ methods with bad signatures.
+                self.optFlags.append([longname, short, descr])
+                self.flagNameToDefinition[longname] = [short, descr]
+                self.allOptionsNameToDefinition[longname] = [short, None, descr]
+
+
+
+def descrFromDoc(obj):
+    """
+    Generate an appropriate description from docstring of the given object
+    """
+    if obj.__doc__ is None or obj.__doc__.isspace():
+        return None
+
+    lines = [x.strip() for x in obj.__doc__.split("\n")
+                             if x and not x.isspace()]
+    return " ".join(lines)
+
+
+
+def escape(x):
+    """
+    Shell escape the given string
+
+    Implementation borrowed from now-deprecated commands.mkarg() in the stdlib
+    """
+    if '\'' not in x:
+        return '\'' + x + '\''
+    s = '"'
+    for c in x:
+        if c in '\\$"`':
+            s = s + '\\'
+        s = s + c
+    s = s + '"'
+    return s
+
diff --git a/ThirdParty/Twisted/twisted/python/_utilpy3.py b/ThirdParty/Twisted/twisted/python/_utilpy3.py
new file mode 100644
index 0000000..af8138a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/_utilpy3.py
@@ -0,0 +1,337 @@
+# -*- test-case-name: twisted.python.test.test_utilpy3 -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+The subset of L{twisted.python.util} which has been ported to Python 3.
+"""
+
+from __future__ import division, absolute_import
+
+import sys, errno, warnings
+
+from twisted.python.compat import unicode
+
+class FancyEqMixin:
+    """
+    Mixin that implements C{__eq__} and C{__ne__}.
+
+    Comparison is done using the list of attributes defined in
+    C{compareAttributes}.
+    """
+    compareAttributes = ()
+
+    def __eq__(self, other):
+        if not self.compareAttributes:
+            return self is other
+        if isinstance(self, other.__class__):
+            return (
+                [getattr(self, name) for name in self.compareAttributes] ==
+                [getattr(other, name) for name in self.compareAttributes])
+        return NotImplemented
+
+
+    def __ne__(self, other):
+        result = self.__eq__(other)
+        if result is NotImplemented:
+            return result
+        return not result
+
+
+_idFunction = id
+
+def setIDFunction(idFunction):
+    """
+    Change the function used by L{unsignedID} to determine the integer id value
+    of an object.  This is largely useful for testing to give L{unsignedID}
+    deterministic, easily-controlled behavior.
+
+    @param idFunction: A function with the signature of L{id}.
+    @return: The previous function being used by L{unsignedID}.
+    """
+    global _idFunction
+    oldIDFunction = _idFunction
+    _idFunction = idFunction
+    return oldIDFunction
+
+
+# A value about twice as large as any Python int, to which negative values
+# from id() will be added, moving them into a range which should begin just
+# above where positive values from id() leave off.
+_HUGEINT = (sys.maxsize + 1) * 2
+def unsignedID(obj):
+    """
+    Return the id of an object as an unsigned number so that its hex
+    representation makes sense.
+
+    This is mostly necessary in Python 2.4 which implements L{id} to sometimes
+    return a negative value.  Python 2.3 shares this behavior, but also
+    implements hex and the %x format specifier to represent negative values as
+    though they were positive ones, obscuring the behavior of L{id}.  Python
+    2.5's implementation of L{id} always returns positive values.
+    """
+    rval = _idFunction(obj)
+    if rval < 0:
+        rval += _HUGEINT
+    return rval
+
+
+
+def untilConcludes(f, *a, **kw):
+    """
+    Call C{f} with the given arguments, handling C{EINTR} by retrying.
+
+    @param f: A function to call.
+
+    @param *a: Positional arguments to pass to C{f}.
+
+    @param **kw: Keyword arguments to pass to C{f}.
+
+    @return: Whatever C{f} returns.
+
+    @raise: Whatever C{f} raises, except for C{IOError} or C{OSError} with
+        C{errno} set to C{EINTR}.
+    """
+    while True:
+        try:
+            return f(*a, **kw)
+        except (IOError, OSError) as e:
+            if e.args[0] == errno.EINTR:
+                continue
+            raise
+
+
+def runWithWarningsSuppressed(suppressedWarnings, f, *args, **kwargs):
+    """
+    Run C{f(*args, **kwargs)}, but with some warnings suppressed.
+
+    Unlike L{twisted.internet.utils.runWithWarningsSuppressed}, it has no
+    special support for L{twisted.internet.defer.Deferred}.
+
+    @param suppressedWarnings: A list of arguments to pass to filterwarnings.
+        Must be a sequence of 2-tuples (args, kwargs).
+
+    @param f: A callable.
+
+    @param args: Arguments for C{f}.
+
+    @param kwargs: Keyword arguments for C{f}
+
+    @return: The result of C{f(*args, **kwargs)}.
+    """
+    with warnings.catch_warnings():
+        for a, kw in suppressedWarnings:
+            warnings.filterwarnings(*a, **kw)
+        return f(*args, **kwargs)
+
+
+
+class FancyStrMixin:
+    """
+    Mixin providing a flexible implementation of C{__str__}.
+
+    C{__str__} output will begin with the name of the class, or the contents
+    of the attribute C{fancybasename} if it is set.
+
+    The body of C{__str__} can be controlled by overriding C{showAttributes} in
+    a subclass.  Set C{showAttributes} to a sequence of strings naming
+    attributes, or sequences of C{(attributeName, callable)}, or sequences of
+    C{(attributeName, displayName, formatCharacter)}. In the second case, the
+    callable is passed the value of the attribute and its return value used in
+    the output of C{__str__}.  In the final case, the attribute is looked up
+    using C{attributeName}, but the output uses C{displayName} instead, and
+    renders the value of the attribute using C{formatCharacter}, e.g. C{"%.3f"}
+    might be used for a float.
+    """
+    # Override in subclasses:
+    showAttributes = ()
+
+
+    def __str__(self):
+        r = ['<', (hasattr(self, 'fancybasename') and self.fancybasename)
+             or self.__class__.__name__]
+        for attr in self.showAttributes:
+            if isinstance(attr, str):
+                r.append(' %s=%r' % (attr, getattr(self, attr)))
+            elif len(attr) == 2:
+                r.append((' %s=' % (attr[0],)) + attr[1](getattr(self, attr[0])))
+            else:
+                r.append((' %s=' + attr[2]) % (attr[1], getattr(self, attr[0])))
+        r.append('>')
+        return ''.join(r)
+
+    __repr__ = __str__
+
+
+
+def nameToLabel(mname):
+    """
+    Convert a string like a variable name into a slightly more human-friendly
+    string with spaces and capitalized letters.
+
+    @type mname: C{str}
+    @param mname: The name to convert to a label.  This must be a string
+    which could be used as a Python identifier.  Strings which do not take
+    this form will result in unpredictable behavior.
+
+    @rtype: C{str}
+    """
+    labelList = []
+    word = ''
+    lastWasUpper = False
+    for letter in mname:
+        if letter.isupper() == lastWasUpper:
+            # Continuing a word.
+            word += letter
+        else:
+            # breaking a word OR beginning a word
+            if lastWasUpper:
+                # could be either
+                if len(word) == 1:
+                    # keep going
+                    word += letter
+                else:
+                    # acronym
+                    # we're processing the lowercase letter after the acronym-then-capital
+                    lastWord = word[:-1]
+                    firstLetter = word[-1]
+                    labelList.append(lastWord)
+                    word = firstLetter + letter
+            else:
+                # definitely breaking: lower to upper
+                labelList.append(word)
+                word = letter
+        lastWasUpper = letter.isupper()
+    if labelList:
+        labelList[0] = labelList[0].capitalize()
+    else:
+        return mname.capitalize()
+    labelList.append(word)
+    return ' '.join(labelList)
+
+
+
+class InsensitiveDict:
+    """Dictionary, that has case-insensitive keys.
+
+    Normally keys are retained in their original form when queried with
+    .keys() or .items().  If initialized with preserveCase=0, keys are both
+    looked up in lowercase and returned in lowercase by .keys() and .items().
+    """
+    """
+    Modified recipe at
+    http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66315 originally
+    contributed by Sami Hangaslammi.
+    """
+
+    def __init__(self, dict=None, preserve=1):
+        """Create an empty dictionary, or update from 'dict'."""
+        self.data = {}
+        self.preserve=preserve
+        if dict:
+            self.update(dict)
+
+    def __delitem__(self, key):
+        k=self._lowerOrReturn(key)
+        del self.data[k]
+
+    def _lowerOrReturn(self, key):
+        if isinstance(key, bytes) or isinstance(key, unicode):
+            return key.lower()
+        else:
+            return key
+
+    def __getitem__(self, key):
+        """Retrieve the value associated with 'key' (in any case)."""
+        k = self._lowerOrReturn(key)
+        return self.data[k][1]
+
+    def __setitem__(self, key, value):
+        """Associate 'value' with 'key'. If 'key' already exists, but
+        in different case, it will be replaced."""
+        k = self._lowerOrReturn(key)
+        self.data[k] = (key, value)
+
+    def has_key(self, key):
+        """Case insensitive test whether 'key' exists."""
+        k = self._lowerOrReturn(key)
+        return k in self.data
+
+    __contains__=has_key
+
+    def _doPreserve(self, key):
+        if not self.preserve and (isinstance(key, bytes)
+                                  or isinstance(key, unicode)):
+            return key.lower()
+        else:
+            return key
+
+    def keys(self):
+        """List of keys in their original case."""
+        return list(self.iterkeys())
+
+    def values(self):
+        """List of values."""
+        return list(self.itervalues())
+
+    def items(self):
+        """List of (key,value) pairs."""
+        return list(self.iteritems())
+
+    def get(self, key, default=None):
+        """Retrieve value associated with 'key' or return default value
+        if 'key' doesn't exist."""
+        try:
+            return self[key]
+        except KeyError:
+            return default
+
+    def setdefault(self, key, default):
+        """If 'key' doesn't exists, associate it with the 'default' value.
+        Return value associated with 'key'."""
+        if not self.has_key(key):
+            self[key] = default
+        return self[key]
+
+    def update(self, dict):
+        """Copy (key,value) pairs from 'dict'."""
+        for k,v in dict.items():
+            self[k] = v
+
+    def __repr__(self):
+        """String representation of the dictionary."""
+        items = ", ".join([("%r: %r" % (k,v)) for k,v in self.items()])
+        return "InsensitiveDict({%s})" % items
+
+    def iterkeys(self):
+        for v in self.data.values():
+            yield self._doPreserve(v[0])
+
+    def itervalues(self):
+        for v in self.data.values():
+            yield v[1]
+
+    def iteritems(self):
+        for (k, v) in self.data.values():
+            yield self._doPreserve(k), v
+
+    def popitem(self):
+        i=self.items()[0]
+        del self[i[0]]
+        return i
+
+    def clear(self):
+        for k in self.keys():
+            del self[k]
+
+    def copy(self):
+        return InsensitiveDict(self, self.preserve)
+
+    def __len__(self):
+        return len(self.data)
+
+    def __eq__(self, other):
+        for k,v in self.items():
+            if not (k in other) or not (other[k]==v):
+                return 0
+        return len(self)==len(other)
diff --git a/ThirdParty/Twisted/twisted/python/compat.py b/ThirdParty/Twisted/twisted/python/compat.py
new file mode 100644
index 0000000..fd29224
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/compat.py
@@ -0,0 +1,432 @@
+# -*- test-case-name: twisted.test.test_compat -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Compatibility module to provide backwards compatibility for useful Python
+features.
+
+This is mainly for use of internal Twisted code. We encourage you to use
+the latest version of Python directly from your code, if possible.
+
+ at var unicode: The type of Unicode strings, C{unicode} on Python 2 and C{str}
+    on Python 3.
+
+ at var NativeStringIO: An in-memory file-like object that operates on the native
+    string type (bytes in Python 2, unicode in Python 3).
+"""
+
+from __future__ import division
+
+import sys, string, socket, struct
+
+
+if sys.version_info < (3, 0):
+    _PY3 = False
+else:
+    _PY3 = True
+
+
+
+def inet_pton(af, addr):
+    if af == socket.AF_INET:
+        return socket.inet_aton(addr)
+    elif af == getattr(socket, 'AF_INET6', 'AF_INET6'):
+        if [x for x in addr if x not in string.hexdigits + ':.']:
+            raise ValueError("Illegal characters: %r" % (''.join(x),))
+
+        parts = addr.split(':')
+        elided = parts.count('')
+        ipv4Component = '.' in parts[-1]
+
+        if len(parts) > (8 - ipv4Component) or elided > 3:
+            raise ValueError("Syntactically invalid address")
+
+        if elided == 3:
+            return '\x00' * 16
+
+        if elided:
+            zeros = ['0'] * (8 - len(parts) - ipv4Component + elided)
+
+            if addr.startswith('::'):
+                parts[:2] = zeros
+            elif addr.endswith('::'):
+                parts[-2:] = zeros
+            else:
+                idx = parts.index('')
+                parts[idx:idx+1] = zeros
+
+            if len(parts) != 8 - ipv4Component:
+                raise ValueError("Syntactically invalid address")
+        else:
+            if len(parts) != (8 - ipv4Component):
+                raise ValueError("Syntactically invalid address")
+
+        if ipv4Component:
+            if parts[-1].count('.') != 3:
+                raise ValueError("Syntactically invalid address")
+            rawipv4 = socket.inet_aton(parts[-1])
+            unpackedipv4 = struct.unpack('!HH', rawipv4)
+            parts[-1:] = [hex(x)[2:] for x in unpackedipv4]
+
+        parts = [int(x, 16) for x in parts]
+        return struct.pack('!8H', *parts)
+    else:
+        raise socket.error(97, 'Address family not supported by protocol')
+
+def inet_ntop(af, addr):
+    if af == socket.AF_INET:
+        return socket.inet_ntoa(addr)
+    elif af == socket.AF_INET6:
+        if len(addr) != 16:
+            raise ValueError("address length incorrect")
+        parts = struct.unpack('!8H', addr)
+        curBase = bestBase = None
+        for i in range(8):
+            if not parts[i]:
+                if curBase is None:
+                    curBase = i
+                    curLen = 0
+                curLen += 1
+            else:
+                if curBase is not None:
+                    if bestBase is None or curLen > bestLen:
+                        bestBase = curBase
+                        bestLen = curLen
+                    curBase = None
+        if curBase is not None and (bestBase is None or curLen > bestLen):
+            bestBase = curBase
+            bestLen = curLen
+        parts = [hex(x)[2:] for x in parts]
+        if bestBase is not None:
+            parts[bestBase:bestBase + bestLen] = ['']
+        if parts[0] == '':
+            parts.insert(0, '')
+        if parts[-1] == '':
+            parts.insert(len(parts) - 1, '')
+        return ':'.join(parts)
+    else:
+        raise socket.error(97, 'Address family not supported by protocol')
+
+try:
+    socket.AF_INET6
+except AttributeError:
+    socket.AF_INET6 = 'AF_INET6'
+
+try:
+    socket.inet_pton(socket.AF_INET6, "::")
+except (AttributeError, NameError, socket.error):
+    socket.inet_pton = inet_pton
+    socket.inet_ntop = inet_ntop
+
+
+adict = dict
+
+
+
+if _PY3:
+    # These are actually useless in Python 2 as well, but we need to go
+    # through deprecation process there (ticket #5895):
+    del adict, inet_pton, inet_ntop
+
+
+
+try:
+    set = set
+except NameError:
+    from sets import Set as set
+
+
+try:
+    frozenset = frozenset
+except NameError:
+    from sets import ImmutableSet as frozenset
+
+
+try:
+    from functools import reduce
+except ImportError:
+    reduce = reduce
+
+
+
+def execfile(filename, globals, locals=None):
+    """
+    Execute a Python script in the given namespaces.
+
+    Similar to the execfile builtin, but a namespace is mandatory, partly
+    because that's a sensible thing to require, and because otherwise we'd
+    have to do some frame hacking.
+
+    This is a compatibility implementation for Python 3 porting, to avoid the
+    use of the deprecated builtin C{execfile} function.
+    """
+    if locals is None:
+        locals = globals
+    fin = open(filename, "rbU")
+    try:
+        source = fin.read()
+    finally:
+        fin.close()
+    code = compile(source, filename, "exec")
+    exec(code, globals, locals)
+
+
+try:
+    cmp = cmp
+except NameError:
+    def cmp(a, b):
+        """
+        Compare two objects.
+
+        Returns a negative number if C{a < b}, zero if they are equal, and a
+        positive number if C{a > b}.
+        """
+        if a < b:
+            return -1
+        elif a == b:
+            return 0
+        else:
+            return 1
+
+
+
+def comparable(klass):
+    """
+    Class decorator that ensures support for the special C{__cmp__} method.
+
+    On Python 2 this does nothing.
+
+    On Python 3, C{__eq__}, C{__lt__}, etc. methods are added to the class,
+    relying on C{__cmp__} to implement their comparisons.
+    """
+    # On Python 2, __cmp__ will just work, so no need to add extra methods:
+    if not _PY3:
+        return klass
+
+    def __eq__(self, other):
+        c = self.__cmp__(other)
+        if c is NotImplemented:
+            return c
+        return c == 0
+
+
+    def __ne__(self, other):
+        c = self.__cmp__(other)
+        if c is NotImplemented:
+            return c
+        return c != 0
+
+
+    def __lt__(self, other):
+        c = self.__cmp__(other)
+        if c is NotImplemented:
+            return c
+        return c < 0
+
+
+    def __le__(self, other):
+        c = self.__cmp__(other)
+        if c is NotImplemented:
+            return c
+        return c <= 0
+
+
+    def __gt__(self, other):
+        c = self.__cmp__(other)
+        if c is NotImplemented:
+            return c
+        return c > 0
+
+
+    def __ge__(self, other):
+        c = self.__cmp__(other)
+        if c is NotImplemented:
+            return c
+        return c >= 0
+
+    klass.__lt__ = __lt__
+    klass.__gt__ = __gt__
+    klass.__le__ = __le__
+    klass.__ge__ = __ge__
+    klass.__eq__ = __eq__
+    klass.__ne__ = __ne__
+    return klass
+
+
+
+if _PY3:
+    unicode = str
+else:
+    unicode = unicode
+
+
+
+def nativeString(s):
+    """
+    Convert C{bytes} or C{unicode} to the native C{str} type, using ASCII
+    encoding if conversion is necessary.
+
+    @raise UnicodeError: The input string is not ASCII encodable/decodable.
+    @raise TypeError: The input is neither C{bytes} nor C{unicode}.
+    """
+    if not isinstance(s, (bytes, unicode)):
+        raise TypeError("%r is neither bytes nor unicode" % s)
+    if _PY3:
+        if isinstance(s, bytes):
+            return s.decode("ascii")
+        else:
+            # Ensure we're limited to ASCII subset:
+            s.encode("ascii")
+    else:
+        if isinstance(s, unicode):
+            return s.encode("ascii")
+        else:
+            # Ensure we're limited to ASCII subset:
+            s.decode("ascii")
+    return s
+
+
+
+if _PY3:
+    def reraise(exception, traceback):
+        raise exception.with_traceback(traceback)
+else:
+    exec("""def reraise(exception, traceback):
+        raise exception.__class__, exception, traceback""")
+
+reraise.__doc__ = """
+Re-raise an exception, with an optional traceback, in a way that is compatible
+with both Python 2 and Python 3.
+
+Note that on Python 3, re-raised exceptions will be mutated, with their
+C{__traceback__} attribute being set.
+
+ at param exception: The exception instance.
+ at param traceback: The traceback to use, or C{None} indicating a new traceback.
+"""
+
+
+
+if _PY3:
+    from io import StringIO as NativeStringIO
+else:
+    from io import BytesIO as NativeStringIO
+
+
+
+# Functions for dealing with Python 3's bytes type, which is somewhat
+# different than Python 2's:
+if _PY3:
+    def iterbytes(originalBytes):
+        for i in range(len(originalBytes)):
+            yield originalBytes[i:i+1]
+
+
+    def intToBytes(i):
+        return ("%d" % i).encode("ascii")
+
+
+    # Ideally we would use memoryview, but it has a number of differences from
+    # the Python 2 buffer() that make that impractical
+    # (http://bugs.python.org/issue15945, incompatiblity with pyOpenSSL due to
+    # PyArg_ParseTuple differences.)
+    def lazyByteSlice(object, offset=0, size=None):
+        """
+        Return a copy of the given bytes-like object.
+
+        If an offset is given, the copy starts at that offset. If a size is
+        given, the copy will only be of that length.
+
+        @param object: C{bytes} to be copied.
+
+        @param offset: C{int}, starting index of copy.
+
+        @param size: Optional, if an C{int} is given limit the length of copy
+            to this size.
+        """
+        if size is None:
+            return object[offset:]
+        else:
+            return object[offset:(offset + size)]
+
+
+    def networkString(s):
+        if not isinstance(s, unicode):
+            raise TypeError("Can only convert text to bytes on Python 3")
+        return s.encode('ascii')
+else:
+    def iterbytes(originalBytes):
+        return originalBytes
+
+
+    def intToBytes(i):
+        return b"%d" % i
+
+
+    lazyByteSlice = buffer
+
+    def networkString(s):
+        if not isinstance(s, str):
+            raise TypeError("Can only pass-through bytes on Python 2")
+        # Ensure we're limited to ASCII subset:
+        s.decode('ascii')
+        return s
+
+iterbytes.__doc__ = """
+Return an iterable wrapper for a C{bytes} object that provides the behavior of
+iterating over C{bytes} on Python 2.
+
+In particular, the results of iteration are the individual bytes (rather than
+integers as on Python 3).
+
+ at param originalBytes: A C{bytes} object that will be wrapped.
+"""
+
+intToBytes.__doc__ = """
+Convert the given integer into C{bytes}, as ASCII-encoded Arab numeral.
+
+In other words, this is equivalent to calling C{bytes} in Python 2 on an
+integer.
+
+ at param i: The C{int} to convert to C{bytes}.
+ at rtype: C{bytes}
+"""
+
+networkString.__doc__ = """
+Convert the native string type to C{bytes} if it is not already C{bytes} using
+ASCII encoding if conversion is necessary.
+
+This is useful for sending text-like bytes that are constructed using string
+interpolation.  For example, this is safe on Python 2 and Python 3:
+
+    networkString("Hello %d" % (n,))
+
+ at param s: A native string to convert to bytes if necessary.
+ at type s: C{str}
+
+ at raise UnicodeError: The input string is not ASCII encodable/decodable.
+ at raise TypeError: The input is neither C{bytes} nor C{unicode}.
+
+ at rtype: C{bytes}
+"""
+
+
+__all__ = [
+    "reraise",
+    "execfile",
+    "frozenset",
+    "reduce",
+    "set",
+    "cmp",
+    "comparable",
+    "nativeString",
+    "NativeStringIO",
+    "networkString",
+    "unicode",
+    "iterbytes",
+    "intToBytes",
+    "lazyByteSlice",
+    ]
diff --git a/ThirdParty/Twisted/twisted/python/components.py b/ThirdParty/Twisted/twisted/python/components.py
new file mode 100644
index 0000000..53eb879
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/components.py
@@ -0,0 +1,443 @@
+# -*- test-case-name: twisted.python.test.test_components -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Component architecture for Twisted, based on Zope3 components.
+
+Using the Zope3 API directly is strongly recommended. Everything
+you need is in the top-level of the zope.interface package, e.g.::
+
+   from zope.interface import Interface, implements
+
+   class IFoo(Interface):
+       pass
+
+   class Foo:
+       implements(IFoo)
+
+   print IFoo.implementedBy(Foo) # True
+   print IFoo.providedBy(Foo()) # True
+
+L{twisted.python.components.registerAdapter} from this module may be used to
+add to Twisted's global adapter registry.
+
+L{twisted.python.components.proxyForInterface} is a factory for classes
+which allow access to only the parts of another class defined by a specified
+interface.
+"""
+
+from __future__ import division, absolute_import
+
+# zope3 imports
+from zope.interface import interface, declarations
+from zope.interface.adapter import AdapterRegistry
+
+# twisted imports
+from twisted.python.compat import NativeStringIO
+from twisted.python import _reflectpy3 as reflect
+
+
+
+# Twisted's global adapter registry
+globalRegistry = AdapterRegistry()
+
+# Attribute that registerAdapter looks at. Is this supposed to be public?
+ALLOW_DUPLICATES = 0
+
+# Define a function to find the registered adapter factory, using either a
+# version of Zope Interface which has the `registered' method or an older
+# version which does not.
+if getattr(AdapterRegistry, 'registered', None) is None:
+    def _registered(registry, required, provided):
+        """
+        Return the adapter factory for the given parameters in the given
+        registry, or None if there is not one.
+        """
+        return registry.get(required).selfImplied.get(provided, {}).get('')
+else:
+    def _registered(registry, required, provided):
+        """
+        Return the adapter factory for the given parameters in the given
+        registry, or None if there is not one.
+        """
+        return registry.registered([required], provided)
+
+
+def registerAdapter(adapterFactory, origInterface, *interfaceClasses):
+    """Register an adapter class.
+
+    An adapter class is expected to implement the given interface, by
+    adapting instances implementing 'origInterface'. An adapter class's
+    __init__ method should accept one parameter, an instance implementing
+    'origInterface'.
+    """
+    self = globalRegistry
+    assert interfaceClasses, "You need to pass an Interface"
+    global ALLOW_DUPLICATES
+
+    # deal with class->interface adapters:
+    if not isinstance(origInterface, interface.InterfaceClass):
+        origInterface = declarations.implementedBy(origInterface)
+
+    for interfaceClass in interfaceClasses:
+        factory = _registered(self, origInterface, interfaceClass)
+        if factory is not None and not ALLOW_DUPLICATES:
+            raise ValueError("an adapter (%s) was already registered." % (factory, ))
+    for interfaceClass in interfaceClasses:
+        self.register([origInterface], interfaceClass, '', adapterFactory)
+
+
+def getAdapterFactory(fromInterface, toInterface, default):
+    """Return registered adapter for a given class and interface.
+
+    Note that is tied to the *Twisted* global registry, and will
+    thus not find adapters registered elsewhere.
+    """
+    self = globalRegistry
+    if not isinstance(fromInterface, interface.InterfaceClass):
+        fromInterface = declarations.implementedBy(fromInterface)
+    factory = self.lookup1(fromInterface, toInterface)
+    if factory is None:
+        factory = default
+    return factory
+
+
+def _addHook(registry):
+    """
+    Add an adapter hook which will attempt to look up adapters in the given
+    registry.
+
+    @type registry: L{zope.interface.adapter.AdapterRegistry}
+
+    @return: The hook which was added, for later use with L{_removeHook}.
+    """
+    lookup = registry.lookup1
+    def _hook(iface, ob):
+        factory = lookup(declarations.providedBy(ob), iface)
+        if factory is None:
+            return None
+        else:
+            return factory(ob)
+    interface.adapter_hooks.append(_hook)
+    return _hook
+
+
+def _removeHook(hook):
+    """
+    Remove a previously added adapter hook.
+
+    @param hook: An object previously returned by a call to L{_addHook}.  This
+        will be removed from the list of adapter hooks.
+    """
+    interface.adapter_hooks.remove(hook)
+
+# add global adapter lookup hook for our newly created registry
+_addHook(globalRegistry)
+
+
+def getRegistry():
+    """Returns the Twisted global
+    C{zope.interface.adapter.AdapterRegistry} instance.
+    """
+    return globalRegistry
+
+# FIXME: deprecate attribute somehow?
+CannotAdapt = TypeError
+
+class Adapter:
+    """I am the default implementation of an Adapter for some interface.
+
+    This docstring contains a limerick, by popular demand::
+
+        Subclassing made Zope and TR
+        much harder to work with by far.
+            So before you inherit,
+            be sure to declare it
+        Adapter, not PyObject*
+
+    @cvar temporaryAdapter: If this is True, the adapter will not be
+          persisted on the Componentized.
+    @cvar multiComponent: If this adapter is persistent, should it be
+          automatically registered for all appropriate interfaces.
+    """
+
+    # These attributes are used with Componentized.
+
+    temporaryAdapter = 0
+    multiComponent = 1
+
+    def __init__(self, original):
+        """Set my 'original' attribute to be the object I am adapting.
+        """
+        self.original = original
+
+    def __conform__(self, interface):
+        """
+        I forward __conform__ to self.original if it has it, otherwise I
+        simply return None.
+        """
+        if hasattr(self.original, "__conform__"):
+            return self.original.__conform__(interface)
+        return None
+
+    def isuper(self, iface, adapter):
+        """
+        Forward isuper to self.original
+        """
+        return self.original.isuper(iface, adapter)
+
+
+class Componentized:
+    """I am a mixin to allow you to be adapted in various ways persistently.
+
+    I define a list of persistent adapters.  This is to allow adapter classes
+    to store system-specific state, and initialized on demand.  The
+    getComponent method implements this.  You must also register adapters for
+    this class for the interfaces that you wish to pass to getComponent.
+
+    Many other classes and utilities listed here are present in Zope3; this one
+    is specific to Twisted.
+    """
+
+    persistenceVersion = 1
+
+    def __init__(self):
+        self._adapterCache = {}
+
+    def locateAdapterClass(self, klass, interfaceClass, default):
+        return getAdapterFactory(klass, interfaceClass, default)
+
+    def setAdapter(self, interfaceClass, adapterClass):
+        """
+        Cache a provider for the given interface, by adapting C{self} using
+        the given adapter class.
+        """
+        self.setComponent(interfaceClass, adapterClass(self))
+
+    def addAdapter(self, adapterClass, ignoreClass=0):
+        """Utility method that calls addComponent.  I take an adapter class and
+        instantiate it with myself as the first argument.
+
+        @return: The adapter instantiated.
+        """
+        adapt = adapterClass(self)
+        self.addComponent(adapt, ignoreClass)
+        return adapt
+
+    def setComponent(self, interfaceClass, component):
+        """
+        Cache a provider of the given interface.
+        """
+        self._adapterCache[reflect.qual(interfaceClass)] = component
+
+    def addComponent(self, component, ignoreClass=0):
+        """
+        Add a component to me, for all appropriate interfaces.
+
+        In order to determine which interfaces are appropriate, the component's
+        provided interfaces will be scanned.
+
+        If the argument 'ignoreClass' is True, then all interfaces are
+        considered appropriate.
+
+        Otherwise, an 'appropriate' interface is one for which its class has
+        been registered as an adapter for my class according to the rules of
+        getComponent.
+
+        @return: the list of appropriate interfaces
+        """
+        for iface in declarations.providedBy(component):
+            if (ignoreClass or
+                (self.locateAdapterClass(self.__class__, iface, None)
+                 == component.__class__)):
+                self._adapterCache[reflect.qual(iface)] = component
+
+    def unsetComponent(self, interfaceClass):
+        """Remove my component specified by the given interface class."""
+        del self._adapterCache[reflect.qual(interfaceClass)]
+
+    def removeComponent(self, component):
+        """
+        Remove the given component from me entirely, for all interfaces for which
+        it has been registered.
+
+        @return: a list of the interfaces that were removed.
+        """
+        l = []
+        for k, v in list(self._adapterCache.items()):
+            if v is component:
+                del self._adapterCache[k]
+                l.append(reflect.namedObject(k))
+        return l
+
+    def getComponent(self, interface, default=None):
+        """Create or retrieve an adapter for the given interface.
+
+        If such an adapter has already been created, retrieve it from the cache
+        that this instance keeps of all its adapters.  Adapters created through
+        this mechanism may safely store system-specific state.
+
+        If you want to register an adapter that will be created through
+        getComponent, but you don't require (or don't want) your adapter to be
+        cached and kept alive for the lifetime of this Componentized object,
+        set the attribute 'temporaryAdapter' to True on your adapter class.
+
+        If you want to automatically register an adapter for all appropriate
+        interfaces (with addComponent), set the attribute 'multiComponent' to
+        True on your adapter class.
+        """
+        k = reflect.qual(interface)
+        if k in self._adapterCache:
+            return self._adapterCache[k]
+        else:
+            adapter = interface.__adapt__(self)
+            if adapter is not None and not (
+                hasattr(adapter, "temporaryAdapter") and
+                adapter.temporaryAdapter):
+                self._adapterCache[k] = adapter
+                if (hasattr(adapter, "multiComponent") and
+                    adapter.multiComponent):
+                    self.addComponent(adapter)
+            if adapter is None:
+                return default
+            return adapter
+
+
+    def __conform__(self, interface):
+        return self.getComponent(interface)
+
+
+class ReprableComponentized(Componentized):
+    def __init__(self):
+        Componentized.__init__(self)
+
+    def __repr__(self):
+        from pprint import pprint
+        sio = NativeStringIO()
+        pprint(self._adapterCache, sio)
+        return sio.getvalue()
+
+
+
+def proxyForInterface(iface, originalAttribute='original'):
+    """
+    Create a class which proxies all method calls which adhere to an interface
+    to another provider of that interface.
+
+    This function is intended for creating specialized proxies. The typical way
+    to use it is by subclassing the result::
+
+      class MySpecializedProxy(proxyForInterface(IFoo)):
+          def someInterfaceMethod(self, arg):
+              if arg == 3:
+                  return 3
+              return self.original.someInterfaceMethod(arg)
+
+    @param iface: The Interface to which the resulting object will conform, and
+        which the wrapped object must provide.
+
+    @param originalAttribute: name of the attribute used to save the original
+        object in the resulting class. Default to C{original}.
+    @type originalAttribute: C{str}
+
+    @return: A class whose constructor takes the original object as its only
+        argument. Constructing the class creates the proxy.
+    """
+    def __init__(self, original):
+        setattr(self, originalAttribute, original)
+    contents = {"__init__": __init__}
+    for name in iface:
+        contents[name] = _ProxyDescriptor(name, originalAttribute)
+    proxy = type("(Proxy for %s)"
+                 % (reflect.qual(iface),), (object,), contents)
+    declarations.classImplements(proxy, iface)
+    return proxy
+
+
+
+class _ProxiedClassMethod(object):
+    """
+    A proxied class method.
+
+    @ivar methodName: the name of the method which this should invoke when
+        called.
+    @type methodName: C{str}
+
+    @ivar originalAttribute: name of the attribute of the proxy where the
+        original object is stored.
+    @type orginalAttribute: C{str}
+    """
+    def __init__(self, methodName, originalAttribute):
+        self.methodName = methodName
+        self.originalAttribute = originalAttribute
+
+
+    def __call__(self, oself, *args, **kw):
+        """
+        Invoke the specified L{methodName} method of the C{original} attribute
+        for proxyForInterface.
+
+        @param oself: an instance of a L{proxyForInterface} object.
+
+        @return: the result of the underlying method.
+        """
+        original = getattr(oself, self.originalAttribute)
+        actualMethod = getattr(original, self.methodName)
+        return actualMethod(*args, **kw)
+
+
+
+class _ProxyDescriptor(object):
+    """
+    A descriptor which will proxy attribute access, mutation, and
+    deletion to the L{original} attribute of the object it is being accessed
+    from.
+
+    @ivar attributeName: the name of the attribute which this descriptor will
+        retrieve from instances' C{original} attribute.
+    @type attributeName: C{str}
+
+    @ivar originalAttribute: name of the attribute of the proxy where the
+        original object is stored.
+    @type orginalAttribute: C{str}
+    """
+    def __init__(self, attributeName, originalAttribute):
+        self.attributeName = attributeName
+        self.originalAttribute = originalAttribute
+
+
+    def __get__(self, oself, type=None):
+        """
+        Retrieve the C{self.attributeName} property from L{oself}.
+        """
+        if oself is None:
+            return _ProxiedClassMethod(self.attributeName,
+                                       self.originalAttribute)
+        original = getattr(oself, self.originalAttribute)
+        return getattr(original, self.attributeName)
+
+
+    def __set__(self, oself, value):
+        """
+        Set the C{self.attributeName} property of L{oself}.
+        """
+        original = getattr(oself, self.originalAttribute)
+        setattr(original, self.attributeName, value)
+
+
+    def __delete__(self, oself):
+        """
+        Delete the C{self.attributeName} property of L{oself}.
+        """
+        original = getattr(oself, self.originalAttribute)
+        delattr(original, self.attributeName)
+
+
+
+__all__ = [
+    "registerAdapter", "getAdapterFactory",
+    "Adapter", "Componentized", "ReprableComponentized", "getRegistry",
+    "proxyForInterface",
+]
diff --git a/ThirdParty/Twisted/twisted/python/constants.py b/ThirdParty/Twisted/twisted/python/constants.py
new file mode 100644
index 0000000..db708d6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/constants.py
@@ -0,0 +1,377 @@
+# -*- test-case-name: twisted.python.test.test_constants -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Symbolic constant support, including collections and constants with text,
+numeric, and bit flag values.
+"""
+
+__all__ = [
+    'NamedConstant', 'ValueConstant', 'FlagConstant',
+    'Names', 'Values', 'Flags']
+
+from itertools import count
+from operator import and_, or_, xor
+
+_unspecified = object()
+_constantOrder = count().next
+
+
+class _Constant(object):
+    """
+    @ivar _index: A C{int} allocated from a shared counter in order to keep
+        track of the order in which L{_Constant}s are instantiated.
+
+    @ivar name: A C{str} giving the name of this constant; only set once the
+        constant is initialized by L{_ConstantsContainer}.
+
+    @ivar _container: The L{_ConstantsContainer} subclass this constant belongs
+        to; only set once the constant is initialized by that subclass.
+    """
+    def __init__(self):
+        self._index = _constantOrder()
+
+
+    def __get__(self, oself, cls):
+        """
+        Ensure this constant has been initialized before returning it.
+        """
+        cls._initializeEnumerants()
+        return self
+
+
+    def __repr__(self):
+        """
+        Return text identifying both which constant this is and which collection
+        it belongs to.
+        """
+        return "<%s=%s>" % (self._container.__name__, self.name)
+
+
+    def _realize(self, container, name, value):
+        """
+        Complete the initialization of this L{_Constant}.
+
+        @param container: The L{_ConstantsContainer} subclass this constant is
+            part of.
+
+        @param name: The name of this constant in its container.
+
+        @param value: The value of this constant; not used, as named constants
+            have no value apart from their identity.
+        """
+        self._container = container
+        self.name = name
+
+
+
+class _EnumerantsInitializer(object):
+    """
+    L{_EnumerantsInitializer} is a descriptor used to initialize a cache of
+    objects representing named constants for a particular L{_ConstantsContainer}
+    subclass.
+    """
+    def __get__(self, oself, cls):
+        """
+        Trigger the initialization of the enumerants cache on C{cls} and then
+        return it.
+        """
+        cls._initializeEnumerants()
+        return cls._enumerants
+
+
+
+class _ConstantsContainer(object):
+    """
+    L{_ConstantsContainer} is a class with attributes used as symbolic
+    constants.  It is up to subclasses to specify what kind of constants are
+    allowed.
+
+    @cvar _constantType: Specified by a L{_ConstantsContainer} subclass to
+        specify the type of constants allowed by that subclass.
+
+    @cvar _enumerantsInitialized: A C{bool} tracking whether C{_enumerants} has
+        been initialized yet or not.
+
+    @cvar _enumerants: A C{dict} mapping the names of constants (eg
+        L{NamedConstant} instances) found in the class definition to those
+        instances.  This is initialized via the L{_EnumerantsInitializer}
+        descriptor the first time it is accessed.
+    """
+    _constantType = None
+
+    _enumerantsInitialized = False
+    _enumerants = _EnumerantsInitializer()
+
+    def __new__(cls):
+        """
+        Classes representing constants containers are not intended to be
+        instantiated.
+
+        The class object itself is used directly.
+        """
+        raise TypeError("%s may not be instantiated." % (cls.__name__,))
+
+
+    def _initializeEnumerants(cls):
+        """
+        Find all of the L{NamedConstant} instances in the definition of C{cls},
+        initialize them with constant values, and build a mapping from their
+        names to them to attach to C{cls}.
+        """
+        if not cls._enumerantsInitialized:
+            constants = []
+            for (name, descriptor) in cls.__dict__.iteritems():
+                if isinstance(descriptor, cls._constantType):
+                    constants.append((descriptor._index, name, descriptor))
+            enumerants = {}
+            constants.sort()
+            for (index, enumerant, descriptor) in constants:
+                value = cls._constantFactory(enumerant, descriptor)
+                descriptor._realize(cls, enumerant, value)
+                enumerants[enumerant] = descriptor
+            # Replace the _enumerants descriptor with the result so future
+            # access will go directly to the values.  The _enumerantsInitialized
+            # flag is still necessary because NamedConstant.__get__ may also
+            # call this method.
+            cls._enumerants = enumerants
+            cls._enumerantsInitialized = True
+    _initializeEnumerants = classmethod(_initializeEnumerants)
+
+
+    def _constantFactory(cls, name, descriptor):
+        """
+        Construct the value for a new constant to add to this container.
+
+        @param name: The name of the constant to create.
+
+        @return: L{NamedConstant} instances have no value apart from identity,
+            so return a meaningless dummy value.
+        """
+        return _unspecified
+    _constantFactory = classmethod(_constantFactory)
+
+
+    def lookupByName(cls, name):
+        """
+        Retrieve a constant by its name or raise a C{ValueError} if there is no
+        constant associated with that name.
+
+        @param name: A C{str} giving the name of one of the constants defined by
+            C{cls}.
+
+        @raise ValueError: If C{name} is not the name of one of the constants
+            defined by C{cls}.
+
+        @return: The L{NamedConstant} associated with C{name}.
+        """
+        if name in cls._enumerants:
+            return getattr(cls, name)
+        raise ValueError(name)
+    lookupByName = classmethod(lookupByName)
+
+
+    def iterconstants(cls):
+        """
+        Iteration over a L{Names} subclass results in all of the constants it
+        contains.
+
+        @return: an iterator the elements of which are the L{NamedConstant}
+            instances defined in the body of this L{Names} subclass.
+        """
+        constants = cls._enumerants.values()
+        constants.sort(key=lambda descriptor: descriptor._index)
+        return iter(constants)
+    iterconstants = classmethod(iterconstants)
+
+
+
+class NamedConstant(_Constant):
+    """
+    L{NamedConstant} defines an attribute to be a named constant within a
+    collection defined by a L{Names} subclass.
+
+    L{NamedConstant} is only for use in the definition of L{Names}
+    subclasses.  Do not instantiate L{NamedConstant} elsewhere and do not
+    subclass it.
+    """
+
+
+
+class Names(_ConstantsContainer):
+    """
+    A L{Names} subclass contains constants which differ only in their names and
+    identities.
+    """
+    _constantType = NamedConstant
+
+
+
+class ValueConstant(_Constant):
+    """
+    L{ValueConstant} defines an attribute to be a named constant within a
+    collection defined by a L{Values} subclass.
+
+    L{ValueConstant} is only for use in the definition of L{Values} subclasses.
+    Do not instantiate L{ValueConstant} elsewhere and do not subclass it.
+    """
+    def __init__(self, value):
+        _Constant.__init__(self)
+        self.value = value
+
+
+
+class Values(_ConstantsContainer):
+    """
+    A L{Values} subclass contains constants which are associated with arbitrary
+    values.
+    """
+    _constantType = ValueConstant
+
+    def lookupByValue(cls, value):
+        """
+        Retrieve a constant by its value or raise a C{ValueError} if there is no
+        constant associated with that value.
+
+        @param value: The value of one of the constants defined by C{cls}.
+
+        @raise ValueError: If C{value} is not the value of one of the constants
+            defined by C{cls}.
+
+        @return: The L{ValueConstant} associated with C{value}.
+        """
+        for constant in cls.iterconstants():
+            if constant.value == value:
+                return constant
+        raise ValueError(value)
+    lookupByValue = classmethod(lookupByValue)
+
+
+
+def _flagOp(op, left, right):
+    """
+    Implement a binary operator for a L{FlagConstant} instance.
+
+    @param op: A two-argument callable implementing the binary operation.  For
+        example, C{operator.or_}.
+
+    @param left: The left-hand L{FlagConstant} instance.
+    @param right: The right-hand L{FlagConstant} instance.
+
+    @return: A new L{FlagConstant} instance representing the result of the
+        operation.
+    """
+    value = op(left.value, right.value)
+    names = op(left.names, right.names)
+    result = FlagConstant()
+    result._realize(left._container, names, value)
+    return result
+
+
+
+class FlagConstant(_Constant):
+    """
+    L{FlagConstant} defines an attribute to be a flag constant within a
+    collection defined by a L{Flags} subclass.
+
+    L{FlagConstant} is only for use in the definition of L{Flags} subclasses.
+    Do not instantiate L{FlagConstant} elsewhere and do not subclass it.
+    """
+    def __init__(self, value=_unspecified):
+        _Constant.__init__(self)
+        self.value = value
+
+
+    def _realize(self, container, names, value):
+        """
+        Complete the initialization of this L{FlagConstant}.
+
+        This implementation differs from other C{_realize} implementations in
+        that a L{FlagConstant} may have several names which apply to it, due to
+        flags being combined with various operators.
+
+        @param container: The L{Flags} subclass this constant is part of.
+
+        @param names: When a single-flag value is being initialized, a C{str}
+            giving the name of that flag.  This is the case which happens when a
+            L{Flags} subclass is being initialized and L{FlagConstant} instances
+            from its body are being realized.  Otherwise, a C{set} of C{str}
+            giving names of all the flags set on this L{FlagConstant} instance.
+            This is the case when two flags are combined using C{|}, for
+            example.
+        """
+        if isinstance(names, str):
+            name = names
+            names = set([names])
+        elif len(names) == 1:
+            (name,) = names
+        else:
+            name = "{" + ",".join(sorted(names)) + "}"
+        _Constant._realize(self, container, name, value)
+        self.value = value
+        self.names = names
+
+
+    def __or__(self, other):
+        """
+        Define C{|} on two L{FlagConstant} instances to create a new
+        L{FlagConstant} instance with all flags set in either instance set.
+        """
+        return _flagOp(or_, self, other)
+
+
+    def __and__(self, other):
+        """
+        Define C{&} on two L{FlagConstant} instances to create a new
+        L{FlagConstant} instance with only flags set in both instances set.
+        """
+        return _flagOp(and_, self, other)
+
+
+    def __xor__(self, other):
+        """
+        Define C{^} on two L{FlagConstant} instances to create a new
+        L{FlagConstant} instance with only flags set on exactly one instance
+        set.
+        """
+        return _flagOp(xor, self, other)
+
+
+    def __invert__(self):
+        """
+        Define C{~} on a L{FlagConstant} instance to create a new
+        L{FlagConstant} instance with all flags not set on this instance set.
+        """
+        result = FlagConstant()
+        result._realize(self._container, set(), 0)
+        for flag in self._container.iterconstants():
+            if flag.value & self.value == 0:
+                result |= flag
+        return result
+
+
+
+class Flags(Values):
+    """
+    A L{Flags} subclass contains constants which can be combined using the
+    common bitwise operators (C{|}, C{&}, etc) similar to a I{bitvector} from a
+    language like C.
+    """
+    _constantType = FlagConstant
+
+    _value = 1
+
+    def _constantFactory(cls, name, descriptor):
+        """
+        For L{FlagConstant} instances with no explicitly defined value, assign
+        the next power of two as its value.
+        """
+        if descriptor.value is _unspecified:
+            value = cls._value
+            cls._value <<= 1
+        else:
+            value = descriptor.value
+            cls._value = value << 1
+        return value
+    _constantFactory = classmethod(_constantFactory)
diff --git a/ThirdParty/Twisted/twisted/python/context.py b/ThirdParty/Twisted/twisted/python/context.py
new file mode 100644
index 0000000..cb32be7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/context.py
@@ -0,0 +1,133 @@
+# -*- test-case-name: twisted.test.test_context -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Dynamic pseudo-scoping for Python.
+
+Call functions with context.call({key: value}, func); func and
+functions that it calls will be able to use 'context.get(key)' to
+retrieve 'value'.
+
+This is thread-safe.
+"""
+
+from __future__ import division, absolute_import
+
+from threading import local
+
+defaultContextDict = {}
+
+setDefault = defaultContextDict.__setitem__
+
+class ContextTracker:
+    """
+    A L{ContextTracker} provides a way to pass arbitrary key/value data up and
+    down a call stack without passing them as parameters to the functions on
+    that call stack.
+
+    This can be useful when functions on the top and bottom of the call stack
+    need to cooperate but the functions in between them do not allow passing the
+    necessary state.  For example::
+
+        from twisted.python.context import call, get
+
+        def handleRequest(request):
+            call({'request-id': request.id}, renderRequest, request.url)
+
+        def renderRequest(url):
+            renderHeader(url)
+            renderBody(url)
+
+        def renderHeader(url):
+            return "the header"
+
+        def renderBody(url):
+            return "the body (request id=%r)" % (get("request-id"),)
+
+    This should be used sparingly, since the lack of a clear connection between
+    the two halves can result in code which is difficult to understand and
+    maintain.
+
+    @ivar contexts: A C{list} of C{dict}s tracking the context state.  Each new
+        L{ContextTracker.callWithContext} pushes a new C{dict} onto this stack
+        for the duration of the call, making the data available to the function
+        called and restoring the previous data once it is complete..
+    """
+    def __init__(self):
+        self.contexts = [defaultContextDict]
+
+
+    def callWithContext(self, newContext, func, *args, **kw):
+        """
+        Call C{func(*args, **kw)} such that the contents of C{newContext} will
+        be available for it to retrieve using L{getContext}.
+
+        @param newContext: A C{dict} of data to push onto the context for the
+            duration of the call to C{func}.
+
+        @param func: A callable which will be called.
+
+        @param *args: Any additional positional arguments to pass to C{func}.
+
+        @param **kw: Any additional keyword arguments to pass to C{func}.
+
+        @return: Whatever is returned by C{func}
+
+        @raise: Whatever is raised by C{func}.
+        """
+        self.contexts.append(newContext)
+        try:
+            return func(*args,**kw)
+        finally:
+            self.contexts.pop()
+
+
+    def getContext(self, key, default=None):
+        """
+        Retrieve the value for a key from the context.
+
+        @param key: The key to look up in the context.
+
+        @param default: The value to return if C{key} is not found in the
+            context.
+
+        @return: The value most recently remembered in the context for C{key}.
+        """
+        for ctx in reversed(self.contexts):
+            try:
+                return ctx[key]
+            except KeyError:
+                pass
+        return default
+
+
+
+class ThreadedContextTracker(object):
+    def __init__(self):
+        self.storage = local()
+
+    def currentContext(self):
+        try:
+            return self.storage.ct
+        except AttributeError:
+            ct = self.storage.ct = ContextTracker()
+            return ct
+
+    def callWithContext(self, ctx, func, *args, **kw):
+        return self.currentContext().callWithContext(ctx, func, *args, **kw)
+
+    def getContext(self, key, default=None):
+        return self.currentContext().getContext(key, default)
+
+
+def installContextTracker(ctr):
+    global theContextTracker
+    global call
+    global get
+
+    theContextTracker = ctr
+    call = theContextTracker.callWithContext
+    get = theContextTracker.getContext
+
+installContextTracker(ThreadedContextTracker())
diff --git a/ThirdParty/Twisted/twisted/python/deprecate.py b/ThirdParty/Twisted/twisted/python/deprecate.py
new file mode 100644
index 0000000..7d71b2a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/deprecate.py
@@ -0,0 +1,539 @@
+# -*- test-case-name: twisted.python.test.test_deprecate -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Deprecation framework for Twisted.
+
+To mark a method or function as being deprecated do this::
+
+    from twisted.python.versions import Version
+    from twisted.python.deprecate import deprecated
+
+    @deprecated(Version("Twisted", 8, 0, 0))
+    def badAPI(self, first, second):
+        '''
+        Docstring for badAPI.
+        '''
+        ...
+
+The newly-decorated badAPI will issue a warning when called. It will also have
+a deprecation notice appended to its docstring.
+
+To mark module-level attributes as being deprecated you can use::
+
+    badAttribute = "someValue"
+
+    ...
+
+    deprecatedModuleAttribute(
+        Version("Twisted", 8, 0, 0),
+        "Use goodAttribute instead.",
+        "your.full.module.name",
+        "badAttribute")
+
+The deprecated attributes will issue a warning whenever they are accessed. If
+the attributes being deprecated are in the same module as the
+L{deprecatedModuleAttribute} call is being made from, the C{__name__} global
+can be used as the C{moduleName} parameter.
+
+See also L{Version}.
+
+ at type DEPRECATION_WARNING_FORMAT: C{str}
+ at var DEPRECATION_WARNING_FORMAT: The default deprecation warning string format
+    to use when one is not provided by the user.
+"""
+
+from __future__ import division, absolute_import
+
+__all__ = [
+    'deprecated',
+    'getDeprecationWarningString',
+    'getWarningMethod',
+    'setWarningMethod',
+    'deprecatedModuleAttribute',
+    ]
+
+
+import sys, inspect
+from warnings import warn, warn_explicit
+from dis import findlinestarts
+from functools import wraps
+
+from twisted.python.versions import getVersionString
+
+DEPRECATION_WARNING_FORMAT = '%(fqpn)s was deprecated in %(version)s'
+
+# Notionally, part of twisted.python.reflect, but defining it there causes a
+# cyclic dependency between this module and that module.  Define it here,
+# instead, and let reflect import it to re-expose to the public.
+def _fullyQualifiedName(obj):
+    """
+    Return the fully qualified name of a module, class, method or function.
+    Classes and functions need to be module level ones to be correctly
+    qualified.
+
+    @rtype: C{str}.
+    """
+    try:
+        name = obj.__qualname__
+    except AttributeError:
+        name = obj.__name__
+
+    if inspect.isclass(obj) or inspect.isfunction(obj):
+        moduleName = obj.__module__
+        return "%s.%s" % (moduleName, name)
+    elif inspect.ismethod(obj):
+        try:
+            cls = obj.im_class
+        except AttributeError:
+            # Python 3 eliminates im_class, substitutes __module__ and
+            # __qualname__ to provide similar information.
+            return "%s.%s" % (obj.__module__, obj.__qualname__)
+        else:
+            className = _fullyQualifiedName(cls)
+            return "%s.%s" % (className, name)
+    return name
+# Try to keep it looking like something in twisted.python.reflect.
+_fullyQualifiedName.__module__ = 'twisted.python.reflect'
+_fullyQualifiedName.__name__ = 'fullyQualifiedName'
+_fullyQualifiedName.__qualname__ = 'fullyQualifiedName'
+
+
+def _getReplacementString(replacement):
+    """
+    Surround a replacement for a deprecated API with some polite text exhorting
+    the user to consider it as an alternative.
+
+    @type replacement: C{str} or callable
+
+    @return: a string like "please use twisted.python.modules.getModule
+        instead".
+    """
+    if callable(replacement):
+        replacement = _fullyQualifiedName(replacement)
+    return "please use %s instead" % (replacement,)
+
+
+
+def _getDeprecationDocstring(version, replacement=None):
+    """
+    Generate an addition to a deprecated object's docstring that explains its
+    deprecation.
+
+    @param version: the version it was deprecated.
+    @type version: L{Version}
+
+    @param replacement: The replacement, if specified.
+    @type replacement: C{str} or callable
+
+    @return: a string like "Deprecated in Twisted 27.2.0; please use
+        twisted.timestream.tachyon.flux instead."
+    """
+    doc = "Deprecated in %s" % (getVersionString(version),)
+    if replacement:
+        doc = "%s; %s" % (doc, _getReplacementString(replacement))
+    return doc + "."
+
+
+
+def _getDeprecationWarningString(fqpn, version, format=None, replacement=None):
+    """
+    Return a string indicating that the Python name was deprecated in the given
+    version.
+
+    @param fqpn: Fully qualified Python name of the thing being deprecated
+    @type fqpn: C{str}
+
+    @param version: Version that C{fqpn} was deprecated in.
+    @type version: L{twisted.python.versions.Version}
+
+    @param format: A user-provided format to interpolate warning values into, or
+        L{DEPRECATION_WARNING_FORMAT
+        <twisted.python.deprecate.DEPRECATION_WARNING_FORMAT>} if C{None} is
+        given.
+    @type format: C{str}
+
+    @param replacement: what should be used in place of C{fqpn}. Either pass in
+        a string, which will be inserted into the warning message, or a
+        callable, which will be expanded to its full import path.
+    @type replacement: C{str} or callable
+
+    @return: A textual description of the deprecation
+    @rtype: C{str}
+    """
+    if format is None:
+        format = DEPRECATION_WARNING_FORMAT
+    warningString = format % {
+        'fqpn': fqpn,
+        'version': getVersionString(version)}
+    if replacement:
+        warningString = "%s; %s" % (
+            warningString, _getReplacementString(replacement))
+    return warningString
+
+
+
+def getDeprecationWarningString(callableThing, version, format=None,
+                                replacement=None):
+    """
+    Return a string indicating that the callable was deprecated in the given
+    version.
+
+    @type callableThing: C{callable}
+    @param callableThing: Callable object to be deprecated
+
+    @type version: L{twisted.python.versions.Version}
+    @param version: Version that C{callableThing} was deprecated in
+
+    @type format: C{str}
+    @param format: A user-provided format to interpolate warning values into,
+        or L{DEPRECATION_WARNING_FORMAT
+        <twisted.python.deprecate.DEPRECATION_WARNING_FORMAT>} if C{None} is
+        given
+
+    @param callableThing: A callable to be deprecated.
+
+    @param version: The L{twisted.python.versions.Version} that the callable
+        was deprecated in.
+
+    @param replacement: what should be used in place of the callable. Either
+        pass in a string, which will be inserted into the warning message,
+        or a callable, which will be expanded to its full import path.
+    @type replacement: C{str} or callable
+
+    @return: A string describing the deprecation.
+    @rtype: C{str}
+    """
+    return _getDeprecationWarningString(
+        _fullyQualifiedName(callableThing), version, format, replacement)
+
+
+
+def _appendToDocstring(thingWithDoc, textToAppend):
+    """
+    Append the given text to the docstring of C{thingWithDoc}.
+
+    If C{thingWithDoc} has no docstring, then the text just replaces the
+    docstring. If it has a single-line docstring then it appends a blank line
+    and the message text. If it has a multi-line docstring, then in appends a
+    blank line a the message text, and also does the indentation correctly.
+    """
+    if thingWithDoc.__doc__:
+        docstringLines = thingWithDoc.__doc__.splitlines()
+    else:
+        docstringLines = []
+
+    if len(docstringLines) == 0:
+        docstringLines.append(textToAppend)
+    elif len(docstringLines) == 1:
+        docstringLines.extend(['', textToAppend, ''])
+    else:
+        spaces = docstringLines.pop()
+        docstringLines.extend(['',
+                               spaces + textToAppend,
+                               spaces])
+    thingWithDoc.__doc__ = '\n'.join(docstringLines)
+
+
+
+def deprecated(version, replacement=None):
+    """
+    Return a decorator that marks callables as deprecated.
+
+    @type version: L{twisted.python.versions.Version}
+    @param version: The version in which the callable will be marked as
+        having been deprecated.  The decorated function will be annotated
+        with this version, having it set as its C{deprecatedVersion}
+        attribute.
+
+    @param version: the version that the callable was deprecated in.
+    @type version: L{twisted.python.versions.Version}
+
+    @param replacement: what should be used in place of the callable. Either
+        pass in a string, which will be inserted into the warning message,
+        or a callable, which will be expanded to its full import path.
+    @type replacement: C{str} or callable
+    """
+    def deprecationDecorator(function):
+        """
+        Decorator that marks C{function} as deprecated.
+        """
+        warningString = getDeprecationWarningString(
+            function, version, None, replacement)
+
+        @wraps(function)
+        def deprecatedFunction(*args, **kwargs):
+            warn(
+                warningString,
+                DeprecationWarning,
+                stacklevel=2)
+            return function(*args, **kwargs)
+
+        _appendToDocstring(deprecatedFunction,
+                           _getDeprecationDocstring(version, replacement))
+        deprecatedFunction.deprecatedVersion = version
+        return deprecatedFunction
+
+    return deprecationDecorator
+
+
+
+def getWarningMethod():
+    """
+    Return the warning method currently used to record deprecation warnings.
+    """
+    return warn
+
+
+
+def setWarningMethod(newMethod):
+    """
+    Set the warning method to use to record deprecation warnings.
+
+    The callable should take message, category and stacklevel. The return
+    value is ignored.
+    """
+    global warn
+    warn = newMethod
+
+
+
+class _InternalState(object):
+    """
+    An L{_InternalState} is a helper object for a L{_ModuleProxy}, so that it
+    can easily access its own attributes, bypassing its logic for delegating to
+    another object that it's proxying for.
+
+    @ivar proxy: a L{ModuleProxy}
+    """
+    def __init__(self, proxy):
+        object.__setattr__(self, 'proxy', proxy)
+
+
+    def __getattribute__(self, name):
+        return object.__getattribute__(object.__getattribute__(self, 'proxy'),
+                                       name)
+
+
+    def __setattr__(self, name, value):
+        return object.__setattr__(object.__getattribute__(self, 'proxy'),
+                                  name, value)
+
+
+
+class _ModuleProxy(object):
+    """
+    Python module wrapper to hook module-level attribute access.
+
+    Access to deprecated attributes first checks
+    L{_ModuleProxy._deprecatedAttributes}, if the attribute does not appear
+    there then access falls through to L{_ModuleProxy._module}, the wrapped
+    module object.
+
+    @ivar _module: Module on which to hook attribute access.
+    @type _module: C{module}
+
+    @ivar _deprecatedAttributes: Mapping of attribute names to objects that
+        retrieve the module attribute's original value.
+    @type _deprecatedAttributes: C{dict} mapping C{str} to
+        L{_DeprecatedAttribute}
+
+    @ivar _lastWasPath: Heuristic guess as to whether warnings about this
+        package should be ignored for the next call.  If the last attribute
+        access of this module was a C{getattr} of C{__path__}, we will assume
+        that it was the import system doing it and we won't emit a warning for
+        the next access, even if it is to a deprecated attribute.  The CPython
+        import system always tries to access C{__path__}, then the attribute
+        itself, then the attribute itself again, in both successful and failed
+        cases.
+    @type _lastWasPath: C{bool}
+    """
+    def __init__(self, module):
+        state = _InternalState(self)
+        state._module = module
+        state._deprecatedAttributes = {}
+        state._lastWasPath = False
+
+
+    def __repr__(self):
+        """
+        Get a string containing the type of the module proxy and a
+        representation of the wrapped module object.
+        """
+        state = _InternalState(self)
+        return '<%s module=%r>' % (type(self).__name__, state._module)
+
+
+    def __setattr__(self, name, value):
+        """
+        Set an attribute on the wrapped module object.
+        """
+        state = _InternalState(self)
+        state._lastWasPath = False
+        setattr(state._module, name, value)
+
+
+    def __getattribute__(self, name):
+        """
+        Get an attribute from the module object, possibly emitting a warning.
+
+        If the specified name has been deprecated, then a warning is issued.
+        (Unless certain obscure conditions are met; see
+        L{_ModuleProxy._lastWasPath} for more information about what might quash
+        such a warning.)
+        """
+        state = _InternalState(self)
+        if state._lastWasPath:
+            deprecatedAttribute = None
+        else:
+            deprecatedAttribute = state._deprecatedAttributes.get(name)
+
+        if deprecatedAttribute is not None:
+            # If we have a _DeprecatedAttribute object from the earlier lookup,
+            # allow it to issue the warning.
+            value = deprecatedAttribute.get()
+        else:
+            # Otherwise, just retrieve the underlying value directly; it's not
+            # deprecated, there's no warning to issue.
+            value = getattr(state._module, name)
+        if name == '__path__':
+            state._lastWasPath = True
+        else:
+            state._lastWasPath = False
+        return value
+
+
+
+class _DeprecatedAttribute(object):
+    """
+    Wrapper for deprecated attributes.
+
+    This is intended to be used by L{_ModuleProxy}. Calling
+    L{_DeprecatedAttribute.get} will issue a warning and retrieve the
+    underlying attribute's value.
+
+    @type module: C{module}
+    @ivar module: The original module instance containing this attribute
+
+    @type fqpn: C{str}
+    @ivar fqpn: Fully qualified Python name for the deprecated attribute
+
+    @type version: L{twisted.python.versions.Version}
+    @ivar version: Version that the attribute was deprecated in
+
+    @type message: C{str}
+    @ivar message: Deprecation message
+    """
+    def __init__(self, module, name, version, message):
+        """
+        Initialise a deprecated name wrapper.
+        """
+        self.module = module
+        self.__name__ = name
+        self.fqpn = module.__name__ + '.' + name
+        self.version = version
+        self.message = message
+
+
+    def get(self):
+        """
+        Get the underlying attribute value and issue a deprecation warning.
+        """
+        # This might fail if the deprecated thing is a module inside a package.
+        # In that case, don't emit the warning this time.  The import system
+        # will come back again when it's not an AttributeError and we can emit
+        # the warning then.
+        result = getattr(self.module, self.__name__)
+        message = _getDeprecationWarningString(self.fqpn, self.version,
+            DEPRECATION_WARNING_FORMAT + ': ' + self.message)
+        warn(message, DeprecationWarning, stacklevel=3)
+        return result
+
+
+
+def _deprecateAttribute(proxy, name, version, message):
+    """
+    Mark a module-level attribute as being deprecated.
+
+    @type proxy: L{_ModuleProxy}
+    @param proxy: The module proxy instance proxying the deprecated attributes
+
+    @type name: C{str}
+    @param name: Attribute name
+
+    @type version: L{twisted.python.versions.Version}
+    @param version: Version that the attribute was deprecated in
+
+    @type message: C{str}
+    @param message: Deprecation message
+    """
+    _module = object.__getattribute__(proxy, '_module')
+    attr = _DeprecatedAttribute(_module, name, version, message)
+    # Add a deprecated attribute marker for this module's attribute. When this
+    # attribute is accessed via _ModuleProxy a warning is emitted.
+    _deprecatedAttributes = object.__getattribute__(
+        proxy, '_deprecatedAttributes')
+    _deprecatedAttributes[name] = attr
+
+
+
+def deprecatedModuleAttribute(version, message, moduleName, name):
+    """
+    Declare a module-level attribute as being deprecated.
+
+    @type version: L{twisted.python.versions.Version}
+    @param version: Version that the attribute was deprecated in
+
+    @type message: C{str}
+    @param message: Deprecation message
+
+    @type moduleName: C{str}
+    @param moduleName: Fully-qualified Python name of the module containing
+        the deprecated attribute; if called from the same module as the
+        attributes are being deprecated in, using the C{__name__} global can
+        be helpful
+
+    @type name: C{str}
+    @param name: Attribute name to deprecate
+    """
+    module = sys.modules[moduleName]
+    if not isinstance(module, _ModuleProxy):
+        module = _ModuleProxy(module)
+        sys.modules[moduleName] = module
+
+    _deprecateAttribute(module, name, version, message)
+
+
+def warnAboutFunction(offender, warningString):
+    """
+    Issue a warning string, identifying C{offender} as the responsible code.
+
+    This function is used to deprecate some behavior of a function.  It differs
+    from L{warnings.warn} in that it is not limited to deprecating the behavior
+    of a function currently on the call stack.
+
+    @param function: The function that is being deprecated.
+
+    @param warningString: The string that should be emitted by this warning.
+    @type warningString: C{str}
+
+    @since: 11.0
+    """
+    # inspect.getmodule() is attractive, but somewhat
+    # broken in Python < 2.6.  See Python bug 4845.
+    offenderModule = sys.modules[offender.__module__]
+    filename = inspect.getabsfile(offenderModule)
+    lineStarts = list(findlinestarts(offender.__code__))
+    lastLineNo = lineStarts[-1][1]
+    globals = offender.__globals__
+
+    kwargs = dict(
+        category=DeprecationWarning,
+        filename=filename,
+        lineno=lastLineNo,
+        module=offenderModule.__name__,
+        registry=globals.setdefault("__warningregistry__", {}),
+        module_globals=None)
+
+    warn_explicit(warningString, **kwargs)
diff --git a/ThirdParty/Twisted/twisted/python/dist.py b/ThirdParty/Twisted/twisted/python/dist.py
new file mode 100644
index 0000000..8d16466
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/dist.py
@@ -0,0 +1,461 @@
+# -*- test-case-name: twisted.python.test.test_dist -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Distutils convenience functionality.
+
+Don't use this outside of Twisted.
+
+Maintainer: Christopher Armstrong
+"""
+
+from distutils.command import build_scripts, install_data, build_ext
+from distutils.errors import CompileError
+from distutils import core
+from distutils.core import Extension
+import fnmatch
+import os
+import platform
+import sys
+
+from twisted import copyright
+from twisted.python.compat import execfile
+
+STATIC_PACKAGE_METADATA = dict(
+    name="Twisted",
+    version=copyright.version,
+    description="An asynchronous networking framework written in Python",
+    author="Twisted Matrix Laboratories",
+    author_email="twisted-python at twistedmatrix.com",
+    maintainer="Glyph Lefkowitz",
+    maintainer_email="glyph at twistedmatrix.com",
+    url="http://twistedmatrix.com/",
+    license="MIT",
+    long_description="""\
+An extensible framework for Python programming, with special focus
+on event-based network programming and multiprotocol integration.
+""",
+    classifiers=[
+        "Programming Language :: Python :: 2.6",
+        "Programming Language :: Python :: 2.7",
+        ],
+    )
+
+
+twisted_subprojects = ["conch", "lore", "mail", "names",
+                       "news", "pair", "runner", "web",
+                       "words"]
+
+
+
+class ConditionalExtension(Extension):
+    """
+    An extension module that will only be compiled if certain conditions are
+    met.
+
+    @param condition: A callable of one argument which returns True or False to
+        indicate whether the extension should be built. The argument is an
+        instance of L{build_ext_twisted}, which has useful methods for checking
+        things about the platform.
+    """
+    def __init__(self, *args, **kwargs):
+        self.condition = kwargs.pop("condition", lambda builder: True)
+        Extension.__init__(self, *args, **kwargs)
+
+
+
+def setup(**kw):
+    """
+    An alternative to distutils' setup() which is specially designed
+    for Twisted subprojects.
+
+    Pass twisted_subproject=projname if you want package and data
+    files to automatically be found for you.
+
+    @param conditionalExtensions: Extensions to optionally build.
+    @type conditionalExtensions: C{list} of L{ConditionalExtension}
+    """
+    return core.setup(**get_setup_args(**kw))
+
+
+def get_setup_args(**kw):
+    if 'twisted_subproject' in kw:
+        if 'twisted' not in os.listdir('.'):
+            raise RuntimeError("Sorry, you need to run setup.py from the "
+                               "toplevel source directory.")
+        projname = kw['twisted_subproject']
+        projdir = os.path.join('twisted', projname)
+
+        kw['packages'] = getPackages(projdir, parent='twisted')
+        kw['version'] = getVersion(projname)
+
+        plugin = "twisted/plugins/twisted_" + projname + ".py"
+        if os.path.exists(plugin):
+            kw.setdefault('py_modules', []).append(
+                plugin.replace("/", ".")[:-3])
+
+        kw['data_files'] = getDataFiles(projdir, parent='twisted')
+
+        del kw['twisted_subproject']
+    else:
+        if 'plugins' in kw:
+            py_modules = []
+            for plg in kw['plugins']:
+                py_modules.append("twisted.plugins." + plg)
+            kw.setdefault('py_modules', []).extend(py_modules)
+            del kw['plugins']
+
+    if 'cmdclass' not in kw:
+        kw['cmdclass'] = {
+            'install_data': install_data_twisted,
+            'build_scripts': build_scripts_twisted}
+
+    if "conditionalExtensions" in kw:
+        extensions = kw["conditionalExtensions"]
+        del kw["conditionalExtensions"]
+
+        if 'ext_modules' not in kw:
+            # This is a workaround for distutils behavior; ext_modules isn't
+            # actually used by our custom builder.  distutils deep-down checks
+            # to see if there are any ext_modules defined before invoking
+            # the build_ext command.  We need to trigger build_ext regardless
+            # because it is the thing that does the conditional checks to see
+            # if it should build any extensions.  The reason we have to delay
+            # the conditional checks until then is that the compiler objects
+            # are not yet set up when this code is executed.
+            kw["ext_modules"] = extensions
+
+        class my_build_ext(build_ext_twisted):
+            conditionalExtensions = extensions
+        kw.setdefault('cmdclass', {})['build_ext'] = my_build_ext
+    return kw
+
+
+def getVersion(proj, base="twisted"):
+    """
+    Extract the version number for a given project.
+
+    @param proj: the name of the project. Examples are "core",
+    "conch", "words", "mail".
+
+    @rtype: str
+    @returns: The version number of the project, as a string like
+    "2.0.0".
+    """
+    if proj == 'core':
+        vfile = os.path.join(base, '_version.py')
+    else:
+        vfile = os.path.join(base, proj, '_version.py')
+    ns = {'__name__': 'Nothing to see here'}
+    execfile(vfile, ns)
+    return ns['version'].base()
+
+
+# Names that are exluded from globbing results:
+EXCLUDE_NAMES = ["{arch}", "CVS", ".cvsignore", "_darcs",
+                 "RCS", "SCCS", ".svn"]
+EXCLUDE_PATTERNS = ["*.py[cdo]", "*.s[ol]", ".#*", "*~", "*.py"]
+
+
+def _filterNames(names):
+    """
+    Given a list of file names, return those names that should be copied.
+    """
+    names = [n for n in names
+             if n not in EXCLUDE_NAMES]
+    # This is needed when building a distro from a working
+    # copy (likely a checkout) rather than a pristine export:
+    for pattern in EXCLUDE_PATTERNS:
+        names = [n for n in names
+                 if (not fnmatch.fnmatch(n, pattern))
+                 and (not n.endswith('.py'))]
+    return names
+
+
+def relativeTo(base, relativee):
+    """
+    Gets 'relativee' relative to 'basepath'.
+
+    i.e.,
+
+    >>> relativeTo('/home/', '/home/radix/')
+    'radix'
+    >>> relativeTo('.', '/home/radix/Projects/Twisted') # curdir is /home/radix
+    'Projects/Twisted'
+
+    The 'relativee' must be a child of 'basepath'.
+    """
+    basepath = os.path.abspath(base)
+    relativee = os.path.abspath(relativee)
+    if relativee.startswith(basepath):
+        relative = relativee[len(basepath):]
+        if relative.startswith(os.sep):
+            relative = relative[1:]
+        return os.path.join(base, relative)
+    raise ValueError("%s is not a subpath of %s" % (relativee, basepath))
+
+
+def getDataFiles(dname, ignore=None, parent=None):
+    """
+    Get all the data files that should be included in this distutils Project.
+
+    'dname' should be the path to the package that you're distributing.
+
+    'ignore' is a list of sub-packages to ignore.  This facilitates
+    disparate package hierarchies.  That's a fancy way of saying that
+    the 'twisted' package doesn't want to include the 'twisted.conch'
+    package, so it will pass ['conch'] as the value.
+
+    'parent' is necessary if you're distributing a subpackage like
+    twisted.conch.  'dname' should point to 'twisted/conch' and 'parent'
+    should point to 'twisted'.  This ensures that your data_files are
+    generated correctly, only using relative paths for the first element
+    of the tuple ('twisted/conch/*').
+    The default 'parent' is the current working directory.
+    """
+    parent = parent or "."
+    ignore = ignore or []
+    result = []
+    for directory, subdirectories, filenames in os.walk(dname):
+        resultfiles = []
+        for exname in EXCLUDE_NAMES:
+            if exname in subdirectories:
+                subdirectories.remove(exname)
+        for ig in ignore:
+            if ig in subdirectories:
+                subdirectories.remove(ig)
+        for filename in _filterNames(filenames):
+            resultfiles.append(filename)
+        if resultfiles:
+            result.append((relativeTo(parent, directory),
+                           [relativeTo(parent,
+                                       os.path.join(directory, filename))
+                            for filename in resultfiles]))
+    return result
+
+
+def getExtensions():
+    """
+    Get all extensions from core and all subprojects.
+    """
+    extensions = []
+
+    if not sys.platform.startswith('java'):
+        for dir in os.listdir("twisted") + [""]:
+            topfiles = os.path.join("twisted", dir, "topfiles")
+            if os.path.isdir(topfiles):
+                ns = {}
+                setup_py = os.path.join(topfiles, "setup.py")
+                execfile(setup_py, ns, ns)
+                if "extensions" in ns:
+                    extensions.extend(ns["extensions"])
+
+    return extensions
+
+
+def getPackages(dname, pkgname=None, results=None, ignore=None, parent=None):
+    """
+    Get all packages which are under dname. This is necessary for
+    Python 2.2's distutils. Pretty similar arguments to getDataFiles,
+    including 'parent'.
+    """
+    parent = parent or ""
+    prefix = []
+    if parent:
+        prefix = [parent]
+    bname = os.path.basename(dname)
+    ignore = ignore or []
+    if bname in ignore:
+        return []
+    if results is None:
+        results = []
+    if pkgname is None:
+        pkgname = []
+    subfiles = os.listdir(dname)
+    abssubfiles = [os.path.join(dname, x) for x in subfiles]
+    if '__init__.py' in subfiles:
+        results.append(prefix + pkgname + [bname])
+        for subdir in filter(os.path.isdir, abssubfiles):
+            getPackages(subdir, pkgname=pkgname + [bname],
+                        results=results, ignore=ignore,
+                        parent=parent)
+    res = ['.'.join(result) for result in results]
+    return res
+
+
+
+def getAllScripts():
+    # "" is included because core scripts are directly in bin/
+    projects = [''] + [x for x in os.listdir('bin')
+                       if os.path.isdir(os.path.join("bin", x))
+                       and x in twisted_subprojects]
+    scripts = []
+    for i in projects:
+        scripts.extend(getScripts(i))
+    return scripts
+
+
+
+def getScripts(projname, basedir=''):
+    """
+    Returns a list of scripts for a Twisted subproject; this works in
+    any of an SVN checkout, a project-specific tarball.
+    """
+    scriptdir = os.path.join(basedir, 'bin', projname)
+    if not os.path.isdir(scriptdir):
+        # Probably a project-specific tarball, in which case only this
+        # project's bins are included in 'bin'
+        scriptdir = os.path.join(basedir, 'bin')
+        if not os.path.isdir(scriptdir):
+            return []
+    thingies = os.listdir(scriptdir)
+    for specialExclusion in ['.svn', '_preamble.py', '_preamble.pyc']:
+        if specialExclusion in thingies:
+            thingies.remove(specialExclusion)
+    return filter(os.path.isfile,
+                  [os.path.join(scriptdir, x) for x in thingies])
+
+
+## Helpers and distutil tweaks
+
+class build_scripts_twisted(build_scripts.build_scripts):
+    """
+    Renames scripts so they end with '.py' on Windows.
+    """
+    def run(self):
+        build_scripts.build_scripts.run(self)
+        if not os.name == "nt":
+            return
+        for f in os.listdir(self.build_dir):
+            fpath = os.path.join(self.build_dir, f)
+            if not fpath.endswith(".py"):
+                pypath = fpath + ".py"
+                if os.path.exists(pypath):
+                    os.unlink(pypath)
+                os.rename(fpath, pypath)
+
+
+
+class install_data_twisted(install_data.install_data):
+    """
+    I make sure data files are installed in the package directory.
+    """
+    def finalize_options(self):
+        self.set_undefined_options('install',
+            ('install_lib', 'install_dir')
+        )
+        install_data.install_data.finalize_options(self)
+
+
+
+class build_ext_twisted(build_ext.build_ext):
+    """
+    Allow subclasses to easily detect and customize Extensions to
+    build at install-time.
+    """
+
+    def prepare_extensions(self):
+        """
+        Prepare the C{self.extensions} attribute (used by
+        L{build_ext.build_ext}) by checking which extensions in
+        L{conditionalExtensions} should be built.  In addition, if we are
+        building on NT, define the WIN32 macro to 1.
+        """
+        # always define WIN32 under Windows
+        if os.name == 'nt':
+            self.define_macros = [("WIN32", 1)]
+        else:
+            self.define_macros = []
+        self.extensions = [x for x in self.conditionalExtensions
+                           if x.condition(self)]
+        for ext in self.extensions:
+            ext.define_macros.extend(self.define_macros)
+
+
+    def build_extensions(self):
+        """
+        Check to see which extension modules to build and then build them.
+        """
+        self.prepare_extensions()
+        build_ext.build_ext.build_extensions(self)
+
+
+    def _remove_conftest(self):
+        for filename in ("conftest.c", "conftest.o", "conftest.obj"):
+            try:
+                os.unlink(filename)
+            except EnvironmentError:
+                pass
+
+
+    def _compile_helper(self, content):
+        conftest = open("conftest.c", "w")
+        try:
+            conftest.write(content)
+            conftest.close()
+
+            try:
+                self.compiler.compile(["conftest.c"], output_dir='')
+            except CompileError:
+                return False
+            return True
+        finally:
+            self._remove_conftest()
+
+
+    def _check_header(self, header_name):
+        """
+        Check if the given header can be included by trying to compile a file
+        that contains only an #include line.
+        """
+        self.compiler.announce("checking for %s ..." % header_name, 0)
+        return self._compile_helper("#include <%s>\n" % header_name)
+
+
+
+def _checkCPython(sys=sys, platform=platform):
+    """
+    Checks if this implementation is CPython.
+
+    On recent versions of Python, will use C{platform.python_implementation}.
+    On 2.5, it will try to extract the implementation from sys.subversion. On
+    older versions (currently the only supported older version is 2.4), checks
+    if C{__pypy__} is in C{sys.modules}, since PyPy is the implementation we
+    really care about. If it isn't, assumes CPython.
+
+    This takes C{sys} and C{platform} kwargs that by default use the real
+    modules. You shouldn't care about these -- they are for testing purposes
+    only.
+
+    @return: C{False} if the implementation is definitely not CPython, C{True}
+        otherwise.
+    """
+    try:
+        return platform.python_implementation() == "CPython"
+    except AttributeError:
+        # For 2.5:
+        try:
+            implementation, _, _ = sys.subversion
+            return implementation == "CPython"
+        except AttributeError:
+            pass
+
+        # Are we on Pypy?
+        if "__pypy__" in sys.modules:
+            return False
+
+        # No? Well, then we're *probably* on CPython.
+        return True
+
+
+_isCPython = _checkCPython()
+
+
+def _hasEpoll(builder):
+    """
+    Checks if the header for building epoll (C{sys/epoll.h}) is available.
+
+    @return: C{True} if the header is available, C{False} otherwise.
+    """
+    return builder._check_header("sys/epoll.h")
diff --git a/ThirdParty/Twisted/twisted/python/failure.py b/ThirdParty/Twisted/twisted/python/failure.py
new file mode 100644
index 0000000..e79862d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/failure.py
@@ -0,0 +1,654 @@
+# -*- test-case-name: twisted.test.test_failure -*-
+# See also test suite twisted.test.test_pbfailure
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Asynchronous-friendly error mechanism.
+
+See L{Failure}.
+"""
+
+from __future__ import division, absolute_import
+
+# System Imports
+import sys
+import linecache
+import inspect
+import opcode
+from inspect import getmro
+
+from twisted.python.compat import _PY3, NativeStringIO as StringIO
+from twisted.python import _reflectpy3 as reflect
+
+count = 0
+traceupLength = 4
+
+class DefaultException(Exception):
+    pass
+
+def format_frames(frames, write, detail="default"):
+    """Format and write frames.
+
+    @param frames: is a list of frames as used by Failure.frames, with
+        each frame being a list of
+        (funcName, fileName, lineNumber, locals.items(), globals.items())
+    @type frames: list
+    @param write: this will be called with formatted strings.
+    @type write: callable
+    @param detail: Four detail levels are available:
+        default, brief, verbose, and verbose-vars-not-captured.
+        C{Failure.printDetailedTraceback} uses the latter when the caller asks
+        for verbose, but no vars were captured, so that an explicit warning
+        about the missing data is shown.
+    @type detail: string
+    """
+    if detail not in ('default', 'brief', 'verbose',
+                      'verbose-vars-not-captured'):
+        raise ValueError(
+            "Detail must be default, brief, verbose, or "
+            "verbose-vars-not-captured. (not %r)" % (detail,))
+    w = write
+    if detail == "brief":
+        for method, filename, lineno, localVars, globalVars in frames:
+            w('%s:%s:%s\n' % (filename, lineno, method))
+    elif detail == "default":
+        for method, filename, lineno, localVars, globalVars in frames:
+            w( '  File "%s", line %s, in %s\n' % (filename, lineno, method))
+            w( '    %s\n' % linecache.getline(filename, lineno).strip())
+    elif detail == "verbose-vars-not-captured":
+        for method, filename, lineno, localVars, globalVars in frames:
+            w("%s:%d: %s(...)\n" % (filename, lineno, method))
+        w(' [Capture of Locals and Globals disabled (use captureVars=True)]\n')
+    elif detail == "verbose":
+        for method, filename, lineno, localVars, globalVars in frames:
+            w("%s:%d: %s(...)\n" % (filename, lineno, method))
+            w(' [ Locals ]\n')
+            # Note: the repr(val) was (self.pickled and val) or repr(val)))
+            for name, val in localVars:
+                w("  %s : %s\n" %  (name, repr(val)))
+            w(' ( Globals )\n')
+            for name, val in globalVars:
+                w("  %s : %s\n" %  (name, repr(val)))
+
+# slyphon: i have a need to check for this value in trial
+#          so I made it a module-level constant
+EXCEPTION_CAUGHT_HERE = "--- <exception caught here> ---"
+
+
+
+class NoCurrentExceptionError(Exception):
+    """
+    Raised when trying to create a Failure from the current interpreter
+    exception state and there is no current exception state.
+    """
+
+
+class _Traceback(object):
+    """
+    Fake traceback object which can be passed to functions in the standard
+    library L{traceback} module.
+    """
+
+    def __init__(self, frames):
+        """
+        Construct a fake traceback object using a list of frames. Note that
+        although frames generally include locals and globals, this information
+        is not kept by this object, since locals and globals are not used in
+        standard tracebacks.
+
+        @param frames: [(methodname, filename, lineno, locals, globals), ...]
+        """
+        assert len(frames) > 0, "Must pass some frames"
+        head, frames = frames[0], frames[1:]
+        name, filename, lineno, localz, globalz = head
+        self.tb_frame = _Frame(name, filename)
+        self.tb_lineno = lineno
+        if len(frames) == 0:
+            self.tb_next = None
+        else:
+            self.tb_next = _Traceback(frames)
+
+
+class _Frame(object):
+    """
+    A fake frame object, used by L{_Traceback}.
+
+    @ivar f_code: fake L{code<types.CodeType>} object
+    @ivar f_globals: fake f_globals dictionary (usually empty)
+    @ivar f_locals: fake f_locals dictionary (usually empty)
+    """
+
+    def __init__(self, name, filename):
+        """
+        @param name: method/function name for this frame.
+        @type name: C{str}
+        @param filename: filename for this frame.
+        @type name: C{str}
+        """
+        self.f_code = _Code(name, filename)
+        self.f_globals = {}
+        self.f_locals = {}
+
+
+class _Code(object):
+    """
+    A fake code object, used by L{_Traceback} via L{_Frame}.
+    """
+    def __init__(self, name, filename):
+        self.co_name = name
+        self.co_filename = filename
+
+
+class Failure:
+    """
+    A basic abstraction for an error that has occurred.
+
+    This is necessary because Python's built-in error mechanisms are
+    inconvenient for asynchronous communication.
+
+    The C{stack} and C{frame} attributes contain frames.  Each frame is a tuple
+    of (funcName, fileName, lineNumber, localsItems, globalsItems), where
+    localsItems and globalsItems are the contents of
+    C{locals().items()}/C{globals().items()} for that frame, or an empty tuple
+    if those details were not captured.
+
+    @ivar value: The exception instance responsible for this failure.
+    @ivar type: The exception's class.
+    @ivar stack: list of frames, innermost last, excluding C{Failure.__init__}.
+    @ivar frames: list of frames, innermost first.
+    """
+
+    pickled = 0
+    stack = None
+
+    # The opcode of "yield" in Python bytecode. We need this in _findFailure in
+    # order to identify whether an exception was thrown by a
+    # throwExceptionIntoGenerator.
+    _yieldOpcode = chr(opcode.opmap["YIELD_VALUE"])
+
+    def __init__(self, exc_value=None, exc_type=None, exc_tb=None,
+                 captureVars=False):
+        """
+        Initialize me with an explanation of the error.
+
+        By default, this will use the current C{exception}
+        (L{sys.exc_info}()).  However, if you want to specify a
+        particular kind of failure, you can pass an exception as an
+        argument.
+
+        If no C{exc_value} is passed, then an "original" C{Failure} will
+        be searched for. If the current exception handler that this
+        C{Failure} is being constructed in is handling an exception
+        raised by L{raiseException}, then this C{Failure} will act like
+        the original C{Failure}.
+
+        For C{exc_tb} only L{traceback} instances or C{None} are allowed.
+        If C{None} is supplied for C{exc_value}, the value of C{exc_tb} is
+        ignored, otherwise if C{exc_tb} is C{None}, it will be found from
+        execution context (ie, L{sys.exc_info}).
+
+        @param captureVars: if set, capture locals and globals of stack
+            frames.  This is pretty slow, and makes no difference unless you
+            are going to use L{printDetailedTraceback}.
+        """
+        global count
+        count = count + 1
+        self.count = count
+        self.type = self.value = tb = None
+        self.captureVars = captureVars
+
+        if isinstance(exc_value, str) and exc_type is None:
+            raise TypeError("Strings are not supported by Failure")
+
+        stackOffset = 0
+
+        if exc_value is None:
+            exc_value = self._findFailure()
+
+        if exc_value is None:
+            self.type, self.value, tb = sys.exc_info()
+            if self.type is None:
+                raise NoCurrentExceptionError()
+            stackOffset = 1
+        elif exc_type is None:
+            if isinstance(exc_value, Exception):
+                self.type = exc_value.__class__
+            else: #allow arbitrary objects.
+                self.type = type(exc_value)
+            self.value = exc_value
+        else:
+            self.type = exc_type
+            self.value = exc_value
+        if isinstance(self.value, Failure):
+            self.__dict__ = self.value.__dict__
+            return
+        if tb is None:
+            if exc_tb:
+                tb = exc_tb
+            elif _PY3:
+                tb = self.value.__traceback__
+
+        frames = self.frames = []
+        stack = self.stack = []
+
+        # added 2003-06-23 by Chris Armstrong. Yes, I actually have a
+        # use case where I need this traceback object, and I've made
+        # sure that it'll be cleaned up.
+        self.tb = tb
+
+        if tb:
+            f = tb.tb_frame
+        elif not isinstance(self.value, Failure):
+            # we don't do frame introspection since it's expensive,
+            # and if we were passed a plain exception with no
+            # traceback, it's not useful anyway
+            f = stackOffset = None
+
+        while stackOffset and f:
+            # This excludes this Failure.__init__ frame from the
+            # stack, leaving it to start with our caller instead.
+            f = f.f_back
+            stackOffset -= 1
+
+        # Keeps the *full* stack.  Formerly in spread.pb.print_excFullStack:
+        #
+        #   The need for this function arises from the fact that several
+        #   PB classes have the peculiar habit of discarding exceptions
+        #   with bareword "except:"s.  This premature exception
+        #   catching means tracebacks generated here don't tend to show
+        #   what called upon the PB object.
+
+        while f:
+            if captureVars:
+                localz = f.f_locals.copy()
+                if f.f_locals is f.f_globals:
+                    globalz = {}
+                else:
+                    globalz = f.f_globals.copy()
+                for d in globalz, localz:
+                    if "__builtins__" in d:
+                        del d["__builtins__"]
+                localz = localz.items()
+                globalz = globalz.items()
+            else:
+                localz = globalz = ()
+            stack.insert(0, (
+                f.f_code.co_name,
+                f.f_code.co_filename,
+                f.f_lineno,
+                localz,
+                globalz,
+                ))
+            f = f.f_back
+
+        while tb is not None:
+            f = tb.tb_frame
+            if captureVars:
+                localz = f.f_locals.copy()
+                if f.f_locals is f.f_globals:
+                    globalz = {}
+                else:
+                    globalz = f.f_globals.copy()
+                for d in globalz, localz:
+                    if "__builtins__" in d:
+                        del d["__builtins__"]
+                localz = list(localz.items())
+                globalz = list(globalz.items())
+            else:
+                localz = globalz = ()
+            frames.append((
+                f.f_code.co_name,
+                f.f_code.co_filename,
+                tb.tb_lineno,
+                localz,
+                globalz,
+                ))
+            tb = tb.tb_next
+        if inspect.isclass(self.type) and issubclass(self.type, Exception):
+            parentCs = getmro(self.type)
+            self.parents = list(map(reflect.qual, parentCs))
+        else:
+            self.parents = [self.type]
+
+    def trap(self, *errorTypes):
+        """Trap this failure if its type is in a predetermined list.
+
+        This allows you to trap a Failure in an error callback.  It will be
+        automatically re-raised if it is not a type that you expect.
+
+        The reason for having this particular API is because it's very useful
+        in Deferred errback chains::
+
+            def _ebFoo(self, failure):
+                r = failure.trap(Spam, Eggs)
+                print 'The Failure is due to either Spam or Eggs!'
+                if r == Spam:
+                    print 'Spam did it!'
+                elif r == Eggs:
+                    print 'Eggs did it!'
+
+        If the failure is not a Spam or an Eggs, then the Failure will be
+        'passed on' to the next errback. In Python 2 the Failure will be
+        raised; in Python 3 the underlying exception will be re-raised.
+
+        @type errorTypes: L{Exception}
+        """
+        error = self.check(*errorTypes)
+        if not error:
+            if _PY3:
+                self.raiseException()
+            else:
+                raise self
+        return error
+
+    def check(self, *errorTypes):
+        """Check if this failure's type is in a predetermined list.
+
+        @type errorTypes: list of L{Exception} classes or
+                          fully-qualified class names.
+        @returns: the matching L{Exception} type, or None if no match.
+        """
+        for error in errorTypes:
+            err = error
+            if inspect.isclass(error) and issubclass(error, Exception):
+                err = reflect.qual(error)
+            if err in self.parents:
+                return error
+        return None
+
+
+    # It would be nice to use twisted.python.compat.reraise, but that breaks
+    # the stack exploration in _findFailure; possibly this can be fixed in
+    # #5931.
+    if _PY3:
+        def raiseException(self):
+            raise self.value.with_traceback(self.tb)
+    else:
+        exec("""def raiseException(self):
+    raise self.type, self.value, self.tb""")
+
+    raiseException.__doc__ = (
+        """
+        raise the original exception, preserving traceback
+        information if available.
+        """)
+
+
+    def throwExceptionIntoGenerator(self, g):
+        """
+        Throw the original exception into the given generator,
+        preserving traceback information if available.
+
+        @return: The next value yielded from the generator.
+        @raise StopIteration: If there are no more values in the generator.
+        @raise anything else: Anything that the generator raises.
+        """
+        return g.throw(self.type, self.value, self.tb)
+
+
+    def _findFailure(cls):
+        """
+        Find the failure that represents the exception currently in context.
+        """
+        tb = sys.exc_info()[-1]
+        if not tb:
+            return
+
+        secondLastTb = None
+        lastTb = tb
+        while lastTb.tb_next:
+            secondLastTb = lastTb
+            lastTb = lastTb.tb_next
+
+        lastFrame = lastTb.tb_frame
+
+        # NOTE: f_locals.get('self') is used rather than
+        # f_locals['self'] because psyco frames do not contain
+        # anything in their locals() dicts.  psyco makes debugging
+        # difficult anyhow, so losing the Failure objects (and thus
+        # the tracebacks) here when it is used is not that big a deal.
+
+        # handle raiseException-originated exceptions
+        if lastFrame.f_code is cls.raiseException.__code__:
+            return lastFrame.f_locals.get('self')
+
+        # handle throwExceptionIntoGenerator-originated exceptions
+        # this is tricky, and differs if the exception was caught
+        # inside the generator, or above it:
+
+        # it is only really originating from
+        # throwExceptionIntoGenerator if the bottom of the traceback
+        # is a yield.
+        # Pyrex and Cython extensions create traceback frames
+        # with no co_code, but they can't yield so we know it's okay to just return here.
+        if ((not lastFrame.f_code.co_code) or
+            lastFrame.f_code.co_code[lastTb.tb_lasti] != cls._yieldOpcode):
+            return
+
+        # if the exception was caught above the generator.throw
+        # (outside the generator), it will appear in the tb (as the
+        # second last item):
+        if secondLastTb:
+            frame = secondLastTb.tb_frame
+            if frame.f_code is cls.throwExceptionIntoGenerator.__code__:
+                return frame.f_locals.get('self')
+
+        # if the exception was caught below the generator.throw
+        # (inside the generator), it will appear in the frames' linked
+        # list, above the top-level traceback item (which must be the
+        # generator frame itself, thus its caller is
+        # throwExceptionIntoGenerator).
+        frame = tb.tb_frame.f_back
+        if frame and frame.f_code is cls.throwExceptionIntoGenerator.__code__:
+            return frame.f_locals.get('self')
+
+    _findFailure = classmethod(_findFailure)
+
+    def __repr__(self):
+        return "<%s %s>" % (self.__class__, self.type)
+
+    def __str__(self):
+        return "[Failure instance: %s]" % self.getBriefTraceback()
+
+    def __getstate__(self):
+        """Avoid pickling objects in the traceback.
+        """
+        if self.pickled:
+            return self.__dict__
+        c = self.__dict__.copy()
+
+        c['frames'] = [
+            [
+                v[0], v[1], v[2],
+                _safeReprVars(v[3]),
+                _safeReprVars(v[4]),
+            ] for v in self.frames
+        ]
+
+        # added 2003-06-23. See comment above in __init__
+        c['tb'] = None
+
+        if self.stack is not None:
+            # XXX: This is a band-aid.  I can't figure out where these
+            # (failure.stack is None) instances are coming from.
+            c['stack'] = [
+                [
+                    v[0], v[1], v[2],
+                    _safeReprVars(v[3]),
+                    _safeReprVars(v[4]),
+                ] for v in self.stack
+            ]
+
+        c['pickled'] = 1
+        return c
+
+
+    def cleanFailure(self):
+        """
+        Remove references to other objects, replacing them with strings.
+
+        On Python 3, this will also set the C{__traceback__} attribute of the
+        exception instance to C{None}.
+        """
+        self.__dict__ = self.__getstate__()
+        if _PY3:
+            self.value.__traceback__ = None
+
+
+    def getTracebackObject(self):
+        """
+        Get an object that represents this Failure's stack that can be passed
+        to traceback.extract_tb.
+
+        If the original traceback object is still present, return that. If this
+        traceback object has been lost but we still have the information,
+        return a fake traceback object (see L{_Traceback}). If there is no
+        traceback information at all, return None.
+        """
+        if self.tb is not None:
+            return self.tb
+        elif len(self.frames) > 0:
+            return _Traceback(self.frames)
+        else:
+            return None
+
+    def getErrorMessage(self):
+        """Get a string of the exception which caused this Failure."""
+        if isinstance(self.value, Failure):
+            return self.value.getErrorMessage()
+        return reflect.safe_str(self.value)
+
+    def getBriefTraceback(self):
+        io = StringIO()
+        self.printBriefTraceback(file=io)
+        return io.getvalue()
+
+    def getTraceback(self, elideFrameworkCode=0, detail='default'):
+        io = StringIO()
+        self.printTraceback(file=io, elideFrameworkCode=elideFrameworkCode, detail=detail)
+        return io.getvalue()
+
+
+    def printTraceback(self, file=None, elideFrameworkCode=False, detail='default'):
+        """
+        Emulate Python's standard error reporting mechanism.
+
+        @param file: If specified, a file-like object to which to write the
+            traceback.
+
+        @param elideFrameworkCode: A flag indicating whether to attempt to
+            remove uninteresting frames from within Twisted itself from the
+            output.
+
+        @param detail: A string indicating how much information to include
+            in the traceback.  Must be one of C{'brief'}, C{'default'}, or
+            C{'verbose'}.
+        """
+        if file is None:
+            from twisted.python import log
+            file = log.logerr
+        w = file.write
+
+        if detail == 'verbose' and not self.captureVars:
+            # We don't have any locals or globals, so rather than show them as
+            # empty make the output explicitly say that we don't have them at
+            # all.
+            formatDetail = 'verbose-vars-not-captured'
+        else:
+            formatDetail = detail
+
+        # Preamble
+        if detail == 'verbose':
+            w( '*--- Failure #%d%s---\n' %
+               (self.count,
+                (self.pickled and ' (pickled) ') or ' '))
+        elif detail == 'brief':
+            if self.frames:
+                hasFrames = 'Traceback'
+            else:
+                hasFrames = 'Traceback (failure with no frames)'
+            w("%s: %s: %s\n" % (
+                    hasFrames,
+                    reflect.safe_str(self.type),
+                    reflect.safe_str(self.value)))
+        else:
+            w( 'Traceback (most recent call last):\n')
+
+        # Frames, formatted in appropriate style
+        if self.frames:
+            if not elideFrameworkCode:
+                format_frames(self.stack[-traceupLength:], w, formatDetail)
+                w("%s\n" % (EXCEPTION_CAUGHT_HERE,))
+            format_frames(self.frames, w, formatDetail)
+        elif not detail == 'brief':
+            # Yeah, it's not really a traceback, despite looking like one...
+            w("Failure: ")
+
+        # postamble, if any
+        if not detail == 'brief':
+            w("%s: %s\n" % (reflect.qual(self.type),
+                            reflect.safe_str(self.value)))
+
+        # chaining
+        if isinstance(self.value, Failure):
+            # TODO: indentation for chained failures?
+            file.write(" (chained Failure)\n")
+            self.value.printTraceback(file, elideFrameworkCode, detail)
+        if detail == 'verbose':
+            w('*--- End of Failure #%d ---\n' % self.count)
+
+
+    def printBriefTraceback(self, file=None, elideFrameworkCode=0):
+        """Print a traceback as densely as possible.
+        """
+        self.printTraceback(file, elideFrameworkCode, detail='brief')
+
+    def printDetailedTraceback(self, file=None, elideFrameworkCode=0):
+        """Print a traceback with detailed locals and globals information.
+        """
+        self.printTraceback(file, elideFrameworkCode, detail='verbose')
+
+
+def _safeReprVars(varsDictItems):
+    """
+    Convert a list of (name, object) pairs into (name, repr) pairs.
+
+    L{twisted.python.reflect.safe_repr} is used to generate the repr, so no
+    exceptions will be raised by faulty C{__repr__} methods.
+
+    @param varsDictItems: a sequence of (name, value) pairs as returned by e.g.
+        C{locals().items()}.
+    @returns: a sequence of (name, repr) pairs.
+    """
+    return [(name, reflect.safe_repr(obj)) for (name, obj) in varsDictItems]
+
+
+# slyphon: make post-morteming exceptions tweakable
+
+DO_POST_MORTEM = True
+
+def _debuginit(self, exc_value=None, exc_type=None, exc_tb=None,
+               captureVars=False,
+               Failure__init__=Failure.__init__):
+    """
+    Initialize failure object, possibly spawning pdb.
+    """
+    if (exc_value, exc_type, exc_tb) == (None, None, None):
+        exc = sys.exc_info()
+        if not exc[0] == self.__class__ and DO_POST_MORTEM:
+            try:
+                strrepr = str(exc[1])
+            except:
+                strrepr = "broken str"
+            print("Jumping into debugger for post-mortem of exception '%s':" % (strrepr,))
+            import pdb
+            pdb.post_mortem(exc[2])
+    Failure__init__(self, exc_value, exc_type, exc_tb, captureVars)
+
+
+def startDebugMode():
+    """Enable debug hooks for Failures."""
+    Failure.__init__ = _debuginit
diff --git a/ThirdParty/Twisted/twisted/python/fakepwd.py b/ThirdParty/Twisted/twisted/python/fakepwd.py
new file mode 100644
index 0000000..183b30c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/fakepwd.py
@@ -0,0 +1,219 @@
+# -*- test-case-name: twisted.python.test.test_fakepwd -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+L{twisted.python.fakepwd} provides a fake implementation of the L{pwd} API.
+"""
+
+
+__all__ = ['UserDatabase', 'ShadowDatabase']
+
+
+class _UserRecord(object):
+    """
+    L{_UserRecord} holds the user data for a single user in L{UserDatabase}.
+    It corresponds to L{pwd.struct_passwd}.  See that class for attribute
+    documentation.
+    """
+    def __init__(self, name, password, uid, gid, gecos, home, shell):
+        self.pw_name = name
+        self.pw_passwd = password
+        self.pw_uid = uid
+        self.pw_gid = gid
+        self.pw_gecos = gecos
+        self.pw_dir = home
+        self.pw_shell = shell
+
+
+    def __len__(self):
+        return 7
+
+
+    def __getitem__(self, index):
+        return (
+            self.pw_name, self.pw_passwd, self.pw_uid,
+            self.pw_gid, self.pw_gecos, self.pw_dir, self.pw_shell)[index]
+
+
+
+class UserDatabase(object):
+    """
+    L{UserDatabase} holds a traditional POSIX user data in memory and makes it
+    available via the same API as L{pwd}.
+
+    @ivar _users: A C{list} of L{_UserRecord} instances holding all user data
+        added to this database.
+    """
+    def __init__(self):
+        self._users = []
+
+
+    def addUser(self, username, password, uid, gid, gecos, home, shell):
+        """
+        Add a new user record to this database.
+
+        @param username: The value for the C{pw_name} field of the user
+            record to add.
+        @type username: C{str}
+
+        @param password: The value for the C{pw_passwd} field of the user
+            record to add.
+        @type password: C{str}
+
+        @param uid: The value for the C{pw_uid} field of the user record to
+            add.
+        @type uid: C{int}
+
+        @param gid: The value for the C{pw_gid} field of the user record to
+            add.
+        @type gid: C{int}
+
+        @param gecos: The value for the C{pw_gecos} field of the user record
+            to add.
+        @type gecos: C{str}
+
+        @param home: The value for the C{pw_dir} field of the user record to
+            add.
+        @type home: C{str}
+
+        @param shell: The value for the C{pw_shell} field of the user record to
+            add.
+        @type shell: C{str}
+        """
+        self._users.append(_UserRecord(
+            username, password, uid, gid, gecos, home, shell))
+
+
+    def getpwuid(self, uid):
+        """
+        Return the user record corresponding to the given uid.
+        """
+        for entry in self._users:
+            if entry.pw_uid == uid:
+                return entry
+        raise KeyError()
+
+
+    def getpwnam(self, name):
+        """
+        Return the user record corresponding to the given username.
+        """
+        for entry in self._users:
+            if entry.pw_name == name:
+                return entry
+        raise KeyError()
+
+
+    def getpwall(self):
+        """
+        Return a list of all user records.
+        """
+        return self._users
+
+
+
+class _ShadowRecord(object):
+    """
+    L{_ShadowRecord} holds the shadow user data for a single user in
+    L{ShadowDatabase}.  It corresponds to C{spwd.struct_spwd}.  See that class
+    for attribute documentation.
+    """
+    def __init__(self, username, password, lastChange, min, max, warn, inact,
+                 expire, flag):
+        self.sp_nam = username
+        self.sp_pwd = password
+        self.sp_lstchg = lastChange
+        self.sp_min = min
+        self.sp_max = max
+        self.sp_warn = warn
+        self.sp_inact = inact
+        self.sp_expire = expire
+        self.sp_flag = flag
+
+
+    def __len__(self):
+        return 9
+
+
+    def __getitem__(self, index):
+        return (
+            self.sp_nam, self.sp_pwd, self.sp_lstchg, self.sp_min,
+            self.sp_max, self.sp_warn, self.sp_inact, self.sp_expire,
+            self.sp_flag)[index]
+
+
+
+class ShadowDatabase(object):
+    """
+    L{ShadowDatabase} holds a shadow user database in memory and makes it
+    available via the same API as C{spwd}.
+
+    @ivar _users: A C{list} of L{_ShadowRecord} instances holding all user data
+        added to this database.
+
+    @since: 12.0
+    """
+    def __init__(self):
+        self._users = []
+
+
+    def addUser(self, username, password, lastChange, min, max, warn, inact,
+                expire, flag):
+        """
+        Add a new user record to this database.
+
+        @param username: The value for the C{sp_nam} field of the user record to
+            add.
+        @type username: C{str}
+
+        @param password: The value for the C{sp_pwd} field of the user record to
+            add.
+        @type password: C{str}
+
+        @param lastChange: The value for the C{sp_lstchg} field of the user
+            record to add.
+        @type lastChange: C{int}
+
+        @param min: The value for the C{sp_min} field of the user record to add.
+        @type min: C{int}
+
+        @param max: The value for the C{sp_max} field of the user record to add.
+        @type max: C{int}
+
+        @param warn: The value for the C{sp_warn} field of the user record to
+            add.
+        @type warn: C{int}
+
+        @param inact: The value for the C{sp_inact} field of the user record to
+            add.
+        @type inact: C{int}
+
+        @param expire: The value for the C{sp_expire} field of the user record
+            to add.
+        @type expire: C{int}
+
+        @param flag: The value for the C{sp_flag} field of the user record to
+            add.
+        @type flag: C{int}
+        """
+        self._users.append(_ShadowRecord(
+                username, password, lastChange,
+                min, max, warn, inact, expire, flag))
+
+
+    def getspnam(self, username):
+        """
+        Return the shadow user record corresponding to the given username.
+        """
+        for entry in self._users:
+            if entry.sp_nam == username:
+                return entry
+        raise KeyError
+
+
+    def getspall(self):
+        """
+        Return a list of all shadow user records.
+        """
+        return self._users
diff --git a/ThirdParty/Twisted/twisted/python/filepath.py b/ThirdParty/Twisted/twisted/python/filepath.py
new file mode 100644
index 0000000..e5d819c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/filepath.py
@@ -0,0 +1,1429 @@
+# -*- test-case-name: twisted.test.test_paths -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Object-oriented filesystem path representation.
+"""
+
+from __future__ import division, absolute_import
+
+import os
+import errno
+import random
+import base64
+from hashlib import sha1
+
+from os.path import isabs, exists, normpath, abspath, splitext
+from os.path import basename, dirname
+from os.path import join as joinpath
+from os import sep as slash
+from os import listdir, utime, stat
+
+from stat import S_ISREG, S_ISDIR, S_IMODE, S_ISBLK, S_ISSOCK
+from stat import S_IRUSR, S_IWUSR, S_IXUSR
+from stat import S_IRGRP, S_IWGRP, S_IXGRP
+from stat import S_IROTH, S_IWOTH, S_IXOTH
+
+from zope.interface import Interface, Attribute, implementer
+
+# Please keep this as light as possible on other Twisted imports; many, many
+# things import this module, and it would be good if it could easily be
+# modified for inclusion in the standard library.  --glyph
+
+from twisted.python.compat import comparable, cmp
+from twisted.python.runtime import platform
+
+from twisted.python.win32 import ERROR_FILE_NOT_FOUND, ERROR_PATH_NOT_FOUND
+from twisted.python.win32 import ERROR_INVALID_NAME, ERROR_DIRECTORY, O_BINARY
+from twisted.python.win32 import WindowsError
+
+from twisted.python._utilpy3 import FancyEqMixin
+
+
+_CREATE_FLAGS = (os.O_EXCL |
+                 os.O_CREAT |
+                 os.O_RDWR |
+                 O_BINARY)
+
+
+def _stub_islink(path):
+    """
+    Always return 'false' if the operating system does not support symlinks.
+
+    @param path: a path string.
+    @type path: L{str}
+    @return: false
+    """
+    return False
+
+
+islink = getattr(os.path, 'islink', _stub_islink)
+randomBytes = os.urandom
+armor = base64.urlsafe_b64encode
+
+
+
+class IFilePath(Interface):
+    """
+    File path object.
+
+    A file path represents a location for a file-like-object and can be
+    organized into a hierarchy; a file path can can children which are
+    themselves file paths.
+
+    A file path has a name which unique identifies it in the context of its
+    parent (if it has one); a file path can not have two children with the same
+    name.  This name is referred to as the file path's "base name".
+
+    A series of such names can be used to locate nested children of a file path;
+    such a series is referred to as the child's "path", relative to the parent.
+    In this case, each name in the path is referred to as a "path segment"; the
+    child's base name is the segment in the path.
+
+    When representing a file path as a string, a "path separator" is used to
+    delimit the path segments within the string.  For a file system path, that
+    would be C{os.sep}.
+
+    Note that the values of child names may be restricted.  For example, a file
+    system path will not allow the use of the path separator in a name, and
+    certain names (eg. C{"."} and C{".."}) may be reserved or have special
+    meanings.
+
+    @since: 12.1
+    """
+    sep = Attribute("The path separator to use in string representations")
+
+    def child(name):
+        """
+        Obtain a direct child of this file path.  The child may or may not
+        exist.
+
+        @param name: the name of a child of this path. C{name} must be a direct
+            child of this path and may not contain a path separator.
+        @return: the child of this path with the given C{name}.
+        @raise InsecurePath: if C{name} describes a file path that is not a
+            direct child of this file path.
+        """
+
+    def open(mode="r"):
+        """
+        Opens this file path with the given mode.
+        @return: a file-like-object.
+        @raise Exception: if this file path cannot be opened.
+        """
+
+    def changed():
+        """
+        Clear any cached information about the state of this path on disk.
+        """
+
+    def getsize():
+        """
+        @return: the size of the file at this file path in bytes.
+        @raise Exception: if the size cannot be obtained.
+        """
+
+    def getModificationTime():
+        """
+        Retrieve the time of last access from this file.
+
+        @return: a number of seconds from the epoch.
+        @rtype: float
+        """
+
+    def getStatusChangeTime():
+        """
+        Retrieve the time of the last status change for this file.
+
+        @return: a number of seconds from the epoch.
+        @rtype: float
+        """
+
+    def getAccessTime():
+        """
+        Retrieve the time that this file was last accessed.
+
+        @return: a number of seconds from the epoch.
+        @rtype: float
+        """
+
+    def exists():
+        """
+        @return: C{True} if the file at this file path exists, C{False}
+            otherwise.
+        """
+
+    def isdir():
+        """
+        @return: C{True} if the file at this file path is a directory, C{False}
+            otherwise.
+        """
+
+    def isfile():
+        """
+        @return: C{True} if the file at this file path is a regular file,
+            C{False} otherwise.
+        """
+
+    def children():
+        """
+        @return: a sequence of the children of the directory at this file path.
+        @raise Exception: if the file at this file path is not a directory.
+        """
+
+    def basename():
+        """
+        @return: the base name of this file path.
+        """
+
+    def parent():
+        """
+        A file path for the directory containing the file at this file path.
+        """
+
+    def sibling(name):
+        """
+        A file path for the directory containing the file at this file path.
+        @param name: the name of a sibling of this path. C{name} must be a direct
+            sibling of this path and may not contain a path separator.
+
+        @return: a sibling file path of this one.
+        """
+
+class InsecurePath(Exception):
+    """
+    Error that is raised when the path provided to FilePath is invalid.
+    """
+
+
+
+class LinkError(Exception):
+    """
+    An error with symlinks - either that there are cyclical symlinks or that
+    symlink are not supported on this platform.
+    """
+
+
+
+class UnlistableError(OSError):
+    """
+    An exception which is used to distinguish between errors which mean 'this
+    is not a directory you can list' and other, more catastrophic errors.
+
+    This error will try to look as much like the original error as possible,
+    while still being catchable as an independent type.
+
+    @ivar originalException: the actual original exception instance, either an
+    L{OSError} or a L{WindowsError}.
+    """
+    def __init__(self, originalException):
+        """
+        Create an UnlistableError exception.
+
+        @param originalException: an instance of OSError.
+        """
+        self.__dict__.update(originalException.__dict__)
+        self.originalException = originalException
+
+
+
+class _WindowsUnlistableError(UnlistableError, WindowsError):
+    """
+    This exception is raised on Windows, for compatibility with previous
+    releases of FilePath where unportable programs may have done "except
+    WindowsError:" around a call to children().
+
+    It is private because all application code may portably catch
+    L{UnlistableError} instead.
+    """
+
+
+
+def _secureEnoughString():
+    """
+    Create a pseudorandom, 16-character string for use in secure filenames.
+
+    @rtype: C{bytes}
+    """
+    return armor(sha1(randomBytes(64)).digest())[:16]
+
+
+
+class AbstractFilePath(object):
+    """
+    Abstract implementation of an IFilePath; must be completed by a subclass.
+
+    This class primarily exists to provide common implementations of certain
+    methods in IFilePath. It is *not* a required parent class for IFilePath
+    implementations, just a useful starting point.
+    """
+
+    def getContent(self):
+        fp = self.open()
+        try:
+            return fp.read()
+        finally:
+            fp.close()
+
+
+    def parents(self):
+        """
+        @return: an iterator of all the ancestors of this path, from the most
+        recent (its immediate parent) to the root of its filesystem.
+        """
+        path = self
+        parent = path.parent()
+        # root.parent() == root, so this means "are we the root"
+        while path != parent:
+            yield parent
+            path = parent
+            parent = parent.parent()
+
+
+    def children(self):
+        """
+        List the children of this path object.
+
+        @raise OSError: If an error occurs while listing the directory.  If the
+        error is 'serious', meaning that the operation failed due to an access
+        violation, exhaustion of some kind of resource (file descriptors or
+        memory), OSError or a platform-specific variant will be raised.
+
+        @raise UnlistableError: If the inability to list the directory is due
+        to this path not existing or not being a directory, the more specific
+        OSError subclass L{UnlistableError} is raised instead.
+
+        @return: an iterable of all currently-existing children of this object
+        accessible with L{_PathHelper.child}.
+        """
+        try:
+            subnames = self.listdir()
+        except WindowsError as winErrObj:
+            # WindowsError is an OSError subclass, so if not for this clause
+            # the OSError clause below would be handling these.  Windows error
+            # codes aren't the same as POSIX error codes, so we need to handle
+            # them differently.
+
+            # Under Python 2.5 on Windows, WindowsError has a winerror
+            # attribute and an errno attribute.  The winerror attribute is
+            # bound to the Windows error code while the errno attribute is
+            # bound to a translation of that code to a perhaps equivalent POSIX
+            # error number.
+
+            # Under Python 2.4 on Windows, WindowsError only has an errno
+            # attribute.  It is bound to the Windows error code.
+
+            # For simplicity of code and to keep the number of paths through
+            # this suite minimal, we grab the Windows error code under either
+            # version.
+
+            # Furthermore, attempting to use os.listdir on a non-existent path
+            # in Python 2.4 will result in a Windows error code of
+            # ERROR_PATH_NOT_FOUND.  However, in Python 2.5,
+            # ERROR_FILE_NOT_FOUND results instead. -exarkun
+            winerror = getattr(winErrObj, 'winerror', winErrObj.errno)
+            if winerror not in (ERROR_PATH_NOT_FOUND,
+                                ERROR_FILE_NOT_FOUND,
+                                ERROR_INVALID_NAME,
+                                ERROR_DIRECTORY):
+                raise
+            raise _WindowsUnlistableError(winErrObj)
+        except OSError as ose:
+            if ose.errno not in (errno.ENOENT, errno.ENOTDIR):
+                # Other possible errors here, according to linux manpages:
+                # EACCES, EMIFLE, ENFILE, ENOMEM.  None of these seem like the
+                # sort of thing which should be handled normally. -glyph
+                raise
+            raise UnlistableError(ose)
+        return map(self.child, subnames)
+
+    def walk(self, descend=None):
+        """
+        Yield myself, then each of my children, and each of those children's
+        children in turn.  The optional argument C{descend} is a predicate that
+        takes a FilePath, and determines whether or not that FilePath is
+        traversed/descended into.  It will be called with each path for which
+        C{isdir} returns C{True}.  If C{descend} is not specified, all
+        directories will be traversed (including symbolic links which refer to
+        directories).
+
+        @param descend: A one-argument callable that will return True for
+            FilePaths that should be traversed, False otherwise.
+
+        @return: a generator yielding FilePath-like objects.
+        """
+        yield self
+        if self.isdir():
+            for c in self.children():
+                # we should first see if it's what we want, then we
+                # can walk through the directory
+                if (descend is None or descend(c)):
+                    for subc in c.walk(descend):
+                        if os.path.realpath(self.path).startswith(
+                            os.path.realpath(subc.path)):
+                            raise LinkError("Cycle in file graph.")
+                        yield subc
+                else:
+                    yield c
+
+
+    def sibling(self, path):
+        """
+        Return a L{FilePath} with the same directory as this instance but with a
+        basename of C{path}.
+
+        @param path: The basename of the L{FilePath} to return.
+        @type path: C{str}
+
+        @rtype: L{FilePath}
+        """
+        return self.parent().child(path)
+
+
+    def descendant(self, segments):
+        """
+        Retrieve a child or child's child of this path.
+
+        @param segments: A sequence of path segments as C{str} instances.
+
+        @return: A L{FilePath} constructed by looking up the C{segments[0]}
+            child of this path, the C{segments[1]} child of that path, and so
+            on.
+
+        @since: 10.2
+        """
+        path = self
+        for name in segments:
+            path = path.child(name)
+        return path
+
+
+    def segmentsFrom(self, ancestor):
+        """
+        Return a list of segments between a child and its ancestor.
+
+        For example, in the case of a path X representing /a/b/c/d and a path Y
+        representing /a/b, C{Y.segmentsFrom(X)} will return C{['c',
+        'd']}.
+
+        @param ancestor: an instance of the same class as self, ostensibly an
+        ancestor of self.
+
+        @raise: ValueError if the 'ancestor' parameter is not actually an
+        ancestor, i.e. a path for /x/y/z is passed as an ancestor for /a/b/c/d.
+
+        @return: a list of strs
+        """
+        # this might be an unnecessarily inefficient implementation but it will
+        # work on win32 and for zipfiles; later I will deterimine if the
+        # obvious fast implemenation does the right thing too
+        f = self
+        p = f.parent()
+        segments = []
+        while f != ancestor and p != f:
+            segments[0:0] = [f.basename()]
+            f = p
+            p = p.parent()
+        if f == ancestor and segments:
+            return segments
+        raise ValueError("%r not parent of %r" % (ancestor, self))
+
+
+    # new in 8.0
+    def __hash__(self):
+        """
+        Hash the same as another FilePath with the same path as mine.
+        """
+        return hash((self.__class__, self.path))
+
+
+    # pending deprecation in 8.0
+    def getmtime(self):
+        """
+        Deprecated.  Use getModificationTime instead.
+        """
+        return int(self.getModificationTime())
+
+
+    def getatime(self):
+        """
+        Deprecated.  Use getAccessTime instead.
+        """
+        return int(self.getAccessTime())
+
+
+    def getctime(self):
+        """
+        Deprecated.  Use getStatusChangeTime instead.
+        """
+        return int(self.getStatusChangeTime())
+
+
+
+class RWX(FancyEqMixin, object):
+    """
+    A class representing read/write/execute permissions for a single user
+    category (i.e. user/owner, group, or other/world).  Instantiate with
+    three boolean values: readable? writable? executable?.
+
+    @type read: C{bool}
+    @ivar read: Whether permission to read is given
+
+    @type write: C{bool}
+    @ivar write: Whether permission to write is given
+
+    @type execute: C{bool}
+    @ivar execute: Whether permission to execute is given
+
+    @since: 11.1
+    """
+    compareAttributes = ('read', 'write', 'execute')
+    def __init__(self, readable, writable, executable):
+        self.read = readable
+        self.write = writable
+        self.execute = executable
+
+
+    def __repr__(self):
+        return "RWX(read=%s, write=%s, execute=%s)" % (
+            self.read, self.write, self.execute)
+
+
+    def shorthand(self):
+        """
+        Returns a short string representing the permission bits.  Looks like
+        part of what is printed by command line utilities such as 'ls -l'
+        (e.g. 'rwx')
+        """
+        returnval = ['r', 'w', 'x']
+        i = 0
+        for val in (self.read, self.write, self.execute):
+            if not val:
+                returnval[i] = '-'
+            i += 1
+        return ''.join(returnval)
+
+
+
+class Permissions(FancyEqMixin, object):
+    """
+    A class representing read/write/execute permissions.  Instantiate with any
+    portion of the file's mode that includes the permission bits.
+
+    @type user: L{RWX}
+    @ivar user: User/Owner permissions
+
+    @type group: L{RWX}
+    @ivar group: Group permissions
+
+    @type other: L{RWX}
+    @ivar other: Other/World permissions
+
+    @since: 11.1
+    """
+
+    compareAttributes = ('user', 'group', 'other')
+
+    def __init__(self, statModeInt):
+        self.user, self.group, self.other = (
+            [RWX(*[statModeInt & bit > 0 for bit in bitGroup]) for bitGroup in
+             [[S_IRUSR, S_IWUSR, S_IXUSR],
+              [S_IRGRP, S_IWGRP, S_IXGRP],
+              [S_IROTH, S_IWOTH, S_IXOTH]]]
+        )
+
+
+    def __repr__(self):
+        return "[%s | %s | %s]" % (
+            str(self.user), str(self.group), str(self.other))
+
+
+    def shorthand(self):
+        """
+        Returns a short string representing the permission bits.  Looks like
+        what is printed by command line utilities such as 'ls -l'
+        (e.g. 'rwx-wx--x')
+        """
+        return "".join(
+            [x.shorthand() for x in (self.user, self.group, self.other)])
+
+
+
+ at comparable
+ at implementer(IFilePath)
+class FilePath(AbstractFilePath):
+    """
+    I am a path on the filesystem that only permits 'downwards' access.
+
+    Instantiate me with a pathname (for example,
+    FilePath('/home/myuser/public_html')) and I will attempt to only provide
+    access to files which reside inside that path.  I may be a path to a file,
+    a directory, or a file which does not exist.
+
+    The correct way to use me is to instantiate me, and then do ALL filesystem
+    access through me.  In other words, do not import the 'os' module; if you
+    need to open a file, call my 'open' method.  If you need to list a
+    directory, call my 'path' method.
+
+    Even if you pass me a relative path, I will convert that to an absolute
+    path internally.
+
+    Note: although time-related methods do return floating-point results, they
+    may still be only second resolution depending on the platform and the last
+    value passed to L{os.stat_float_times}.  If you want greater-than-second
+    precision, call C{os.stat_float_times(True)}, or use Python 2.5.
+    Greater-than-second precision is only available in Windows on Python2.5 and
+    later.
+
+    On both Python 2 and Python 3, paths can only be bytes.
+
+    @type alwaysCreate: C{bool}
+    @ivar alwaysCreate: When opening this file, only succeed if the file does
+        not already exist.
+
+    @type path: C{bytes}
+    @ivar path: The path from which 'downward' traversal is permitted.
+
+    @ivar statinfo: The currently cached status information about the file on
+        the filesystem that this L{FilePath} points to.  This attribute is
+        C{None} if the file is in an indeterminate state (either this
+        L{FilePath} has not yet had cause to call C{stat()} yet or
+        L{FilePath.changed} indicated that new information is required), 0 if
+        C{stat()} was called and returned an error (i.e. the path did not exist
+        when C{stat()} was called), or a C{stat_result} object that describes
+        the last known status of the underlying file (or directory, as the case
+        may be).  Trust me when I tell you that you do not want to use this
+        attribute.  Instead, use the methods on L{FilePath} which give you
+        information about it, like C{getsize()}, C{isdir()},
+        C{getModificationTime()}, and so on.
+    @type statinfo: C{int} or L{types.NoneType} or L{os.stat_result}
+    """
+
+    statinfo = None
+    path = None
+
+    sep = slash.encode("ascii")
+
+    def __init__(self, path, alwaysCreate=False):
+        """
+        Convert a path string to an absolute path if necessary and initialize
+        the L{FilePath} with the result.
+        """
+        self.path = abspath(path)
+        self.alwaysCreate = alwaysCreate
+
+    def __getstate__(self):
+        """
+        Support serialization by discarding cached L{os.stat} results and
+        returning everything else.
+        """
+        d = self.__dict__.copy()
+        if 'statinfo' in d:
+            del d['statinfo']
+        return d
+
+
+    def child(self, path):
+        """
+        Create and return a new L{FilePath} representing a path contained by
+        C{self}.
+
+        @param path: The base name of the new L{FilePath}.  If this contains
+            directory separators or parent references it will be rejected.
+        @type path: C{bytes}
+
+        @raise InsecurePath: If the result of combining this path with C{path}
+            would result in a path which is not a direct child of this path.
+        """
+        if platform.isWindows() and path.count(b":"):
+            # Catch paths like C:blah that don't have a slash
+            raise InsecurePath("%r contains a colon." % (path,))
+        norm = normpath(path)
+        if self.sep in norm:
+            raise InsecurePath("%r contains one or more directory separators" % (path,))
+        newpath = abspath(joinpath(self.path, norm))
+        if not newpath.startswith(self.path):
+            raise InsecurePath("%r is not a child of %s" % (newpath, self.path))
+        return self.clonePath(newpath)
+
+
+    def preauthChild(self, path):
+        """
+        Use me if `path' might have slashes in it, but you know they're safe.
+
+        (NOT slashes at the beginning. It still needs to be a _child_).
+        """
+        newpath = abspath(joinpath(self.path, normpath(path)))
+        if not newpath.startswith(self.path):
+            raise InsecurePath("%s is not a child of %s" % (newpath, self.path))
+        return self.clonePath(newpath)
+
+    def childSearchPreauth(self, *paths):
+        """Return my first existing child with a name in 'paths'.
+
+        paths is expected to be a list of *pre-secured* path fragments; in most
+        cases this will be specified by a system administrator and not an
+        arbitrary user.
+
+        If no appropriately-named children exist, this will return None.
+        """
+        p = self.path
+        for child in paths:
+            jp = joinpath(p, child)
+            if exists(jp):
+                return self.clonePath(jp)
+
+    def siblingExtensionSearch(self, *exts):
+        """Attempt to return a path with my name, given multiple possible
+        extensions.
+
+        Each extension in exts will be tested and the first path which exists
+        will be returned.  If no path exists, None will be returned.  If '' is
+        in exts, then if the file referred to by this path exists, 'self' will
+        be returned.
+
+        The extension '*' has a magic meaning, which means "any path that
+        begins with self.path+'.' is acceptable".
+        """
+        p = self.path
+        for ext in exts:
+            if not ext and self.exists():
+                return self
+            if ext == b'*':
+                basedot = basename(p) + b'.'
+                for fn in listdir(dirname(p)):
+                    if fn.startswith(basedot):
+                        return self.clonePath(joinpath(dirname(p), fn))
+            p2 = p + ext
+            if exists(p2):
+                return self.clonePath(p2)
+
+
+    def realpath(self):
+        """
+        Returns the absolute target as a FilePath if self is a link, self
+        otherwise.  The absolute link is the ultimate file or directory the
+        link refers to (for instance, if the link refers to another link, and
+        another...).  If the filesystem does not support symlinks, or
+        if the link is cyclical, raises a LinkError.
+
+        Behaves like L{os.path.realpath} in that it does not resolve link
+        names in the middle (ex. /x/y/z, y is a link to w - realpath on z
+        will return /x/y/z, not /x/w/z).
+
+        @return: FilePath of the target path
+        @raises LinkError: if links are not supported or links are cyclical.
+        """
+        if self.islink():
+            result = os.path.realpath(self.path)
+            if result == self.path:
+                raise LinkError("Cyclical link - will loop forever")
+            return self.clonePath(result)
+        return self
+
+
+    def siblingExtension(self, ext):
+        return self.clonePath(self.path+ext)
+
+
+    def linkTo(self, linkFilePath):
+        """
+        Creates a symlink to self to at the path in the L{FilePath}
+        C{linkFilePath}.  Only works on posix systems due to its dependence on
+        C{os.symlink}.  Propagates C{OSError}s up from C{os.symlink} if
+        C{linkFilePath.parent()} does not exist, or C{linkFilePath} already
+        exists.
+
+        @param linkFilePath: a FilePath representing the link to be created
+        @type linkFilePath: L{FilePath}
+        """
+        os.symlink(self.path, linkFilePath.path)
+
+
+    def open(self, mode='r'):
+        """
+        Open this file using C{mode} or for writing if C{alwaysCreate} is
+        C{True}.
+
+        In all cases the file is opened in binary mode, so it is not necessary
+        to include C{b} in C{mode}.
+
+        @param mode: The mode to open the file in.  Default is C{r}.
+        @type mode: C{str}
+        @raises AssertionError: If C{a} is included in the mode and
+            C{alwaysCreate} is C{True}.
+        @rtype: C{file}
+        @return: An open C{file} object.
+        """
+        if self.alwaysCreate:
+            assert 'a' not in mode, ("Appending not supported when "
+                                     "alwaysCreate == True")
+            return self.create()
+        # This hack is necessary because of a bug in Python 2.7 on Windows:
+        # http://bugs.python.org/issue7686
+        mode = mode.replace('b', '')
+        return open(self.path, mode + 'b')
+
+    # stat methods below
+
+    def restat(self, reraise=True):
+        """
+        Re-calculate cached effects of 'stat'.  To refresh information on this path
+        after you know the filesystem may have changed, call this method.
+
+        @param reraise: a boolean.  If true, re-raise exceptions from
+        L{os.stat}; otherwise, mark this path as not existing, and remove any
+        cached stat information.
+
+        @raise Exception: is C{reraise} is C{True} and an exception occurs while
+            reloading metadata.
+        """
+        try:
+            self.statinfo = stat(self.path)
+        except OSError:
+            self.statinfo = 0
+            if reraise:
+                raise
+
+
+    def changed(self):
+        """
+        Clear any cached information about the state of this path on disk.
+
+        @since: 10.1.0
+        """
+        self.statinfo = None
+
+
+    def chmod(self, mode):
+        """
+        Changes the permissions on self, if possible.  Propagates errors from
+        C{os.chmod} up.
+
+        @param mode: integer representing the new permissions desired (same as
+            the command line chmod)
+        @type mode: C{int}
+        """
+        os.chmod(self.path, mode)
+
+
+    def getsize(self):
+        st = self.statinfo
+        if not st:
+            self.restat()
+            st = self.statinfo
+        return st.st_size
+
+
+    def getModificationTime(self):
+        """
+        Retrieve the time of last access from this file.
+
+        @return: a number of seconds from the epoch.
+        @rtype: float
+        """
+        st = self.statinfo
+        if not st:
+            self.restat()
+            st = self.statinfo
+        return float(st.st_mtime)
+
+
+    def getStatusChangeTime(self):
+        """
+        Retrieve the time of the last status change for this file.
+
+        @return: a number of seconds from the epoch.
+        @rtype: float
+        """
+        st = self.statinfo
+        if not st:
+            self.restat()
+            st = self.statinfo
+        return float(st.st_ctime)
+
+
+    def getAccessTime(self):
+        """
+        Retrieve the time that this file was last accessed.
+
+        @return: a number of seconds from the epoch.
+        @rtype: float
+        """
+        st = self.statinfo
+        if not st:
+            self.restat()
+            st = self.statinfo
+        return float(st.st_atime)
+
+
+    def getInodeNumber(self):
+        """
+        Retrieve the file serial number, also called inode number, which
+        distinguishes this file from all other files on the same device.
+
+        @raise: NotImplementedError if the platform is Windows, since the
+                inode number would be a dummy value for all files in Windows
+        @return: a number representing the file serial number
+        @rtype: C{int}
+        @since: 11.0
+        """
+        if platform.isWindows():
+            raise NotImplementedError
+
+        st = self.statinfo
+        if not st:
+            self.restat()
+            st = self.statinfo
+        return st.st_ino
+
+
+    def getDevice(self):
+        """
+        Retrieves the device containing the file.  The inode number and device
+        number together uniquely identify the file, but the device number is
+        not necessarily consistent across reboots or system crashes.
+
+        @raise: NotImplementedError if the platform is Windows, since the
+                device number would be 0 for all partitions on a Windows
+                platform
+        @return: a number representing the device
+        @rtype: C{int}
+        @since: 11.0
+        """
+        if platform.isWindows():
+            raise NotImplementedError
+
+        st = self.statinfo
+        if not st:
+            self.restat()
+            st = self.statinfo
+        return st.st_dev
+
+
+    def getNumberOfHardLinks(self):
+        """
+        Retrieves the number of hard links to the file.  This count keeps
+        track of how many directories have entries for this file.  If the
+        count is ever decremented to zero then the file itself is discarded
+        as soon as no process still holds it open.  Symbolic links are not
+        counted in the total.
+
+        @raise: NotImplementedError if the platform is Windows, since Windows
+                doesn't maintain a link count for directories, and os.stat
+                does not set st_nlink on Windows anyway.
+        @return: the number of hard links to the file
+        @rtype: C{int}
+        @since: 11.0
+        """
+        if platform.isWindows():
+            raise NotImplementedError
+
+        st = self.statinfo
+        if not st:
+            self.restat()
+            st = self.statinfo
+        return st.st_nlink
+
+
+    def getUserID(self):
+        """
+        Returns the user ID of the file's owner.
+
+        @raise: NotImplementedError if the platform is Windows, since the UID
+                is always 0 on Windows
+        @return: the user ID of the file's owner
+        @rtype: C{int}
+        @since: 11.0
+        """
+        if platform.isWindows():
+            raise NotImplementedError
+
+        st = self.statinfo
+        if not st:
+            self.restat()
+            st = self.statinfo
+        return st.st_uid
+
+
+    def getGroupID(self):
+        """
+        Returns the group ID of the file.
+
+        @raise: NotImplementedError if the platform is Windows, since the GID
+                is always 0 on windows
+        @return: the group ID of the file
+        @rtype: C{int}
+        @since: 11.0
+        """
+        if platform.isWindows():
+            raise NotImplementedError
+
+        st = self.statinfo
+        if not st:
+            self.restat()
+            st = self.statinfo
+        return st.st_gid
+
+
+    def getPermissions(self):
+        """
+        Returns the permissions of the file.  Should also work on Windows,
+        however, those permissions may not be what is expected in Windows.
+
+        @return: the permissions for the file
+        @rtype: L{Permissions}
+        @since: 11.1
+        """
+        st = self.statinfo
+        if not st:
+            self.restat()
+            st = self.statinfo
+        return Permissions(S_IMODE(st.st_mode))
+
+
+    def exists(self):
+        """
+        Check if this L{FilePath} exists.
+
+        @return: C{True} if the stats of C{path} can be retrieved successfully,
+            C{False} in the other cases.
+        @rtype: C{bool}
+        """
+        if self.statinfo:
+            return True
+        else:
+            self.restat(False)
+            if self.statinfo:
+                return True
+            else:
+                return False
+
+
+    def isdir(self):
+        """
+        @return: C{True} if this L{FilePath} refers to a directory, C{False}
+            otherwise.
+        """
+        st = self.statinfo
+        if not st:
+            self.restat(False)
+            st = self.statinfo
+            if not st:
+                return False
+        return S_ISDIR(st.st_mode)
+
+
+    def isfile(self):
+        """
+        @return: C{True} if this L{FilePath} points to a regular file (not a
+            directory, socket, named pipe, etc), C{False} otherwise.
+        """
+        st = self.statinfo
+        if not st:
+            self.restat(False)
+            st = self.statinfo
+            if not st:
+                return False
+        return S_ISREG(st.st_mode)
+
+
+    def isBlockDevice(self):
+        """
+        Returns whether the underlying path is a block device.
+
+        @return: C{True} if it is a block device, C{False} otherwise
+        @rtype: C{bool}
+        @since: 11.1
+        """
+        st = self.statinfo
+        if not st:
+            self.restat(False)
+            st = self.statinfo
+            if not st:
+                return False
+        return S_ISBLK(st.st_mode)
+
+
+    def isSocket(self):
+        """
+        Returns whether the underlying path is a socket.
+
+        @return: C{True} if it is a socket, C{False} otherwise
+        @rtype: C{bool}
+        @since: 11.1
+        """
+        st = self.statinfo
+        if not st:
+            self.restat(False)
+            st = self.statinfo
+            if not st:
+                return False
+        return S_ISSOCK(st.st_mode)
+
+
+    def islink(self):
+        """
+        @return: C{True} if this L{FilePath} points to a symbolic link.
+        """
+        # We can't use cached stat results here, because that is the stat of
+        # the destination - (see #1773) which in *every case* but this one is
+        # the right thing to use.  We could call lstat here and use that, but
+        # it seems unlikely we'd actually save any work that way.  -glyph
+        return islink(self.path)
+
+
+    def isabs(self):
+        """
+        @return: C{True}, always.
+        """
+        return isabs(self.path)
+
+
+    def listdir(self):
+        """
+        List the base names of the direct children of this L{FilePath}.
+
+        @return: a C{list} of C{bytes} giving the names of the contents of the
+            directory this L{FilePath} refers to.  These names are relative to
+            this L{FilePath}.
+
+        @raise: Anything the platform C{os.listdir} implementation might raise
+            (typically OSError).
+        """
+        return listdir(self.path)
+
+
+    def splitext(self):
+        """
+        @return: tuple where the first item is the filename and second item is
+            the file extension. See Python docs for C{os.path.splitext}
+        """
+        return splitext(self.path)
+
+
+    def __repr__(self):
+        return 'FilePath(%r)' % (self.path,)
+
+
+    def touch(self):
+        """
+        Updates the access and last modification times of the file at this
+        file path to the current time. Also creates the file if it does not
+        already exist.
+
+        @raise Exception: if unable to create or modify the last modification
+            time of the file.
+        """
+        try:
+            self.open('a').close()
+        except IOError:
+            pass
+        utime(self.path, None)
+
+
+    def remove(self):
+        """
+        Removes the file or directory that is represented by self.  If
+        C{self.path} is a directory, recursively remove all its children
+        before removing the directory. If it's a file or link, just delete it.
+        """
+        if self.isdir() and not self.islink():
+            for child in self.children():
+                child.remove()
+            os.rmdir(self.path)
+        else:
+            os.remove(self.path)
+        self.changed()
+
+
+    def makedirs(self):
+        """
+        Create all directories not yet existing in C{path} segments, using
+        C{os.makedirs}.
+        """
+        return os.makedirs(self.path)
+
+
+    def globChildren(self, pattern):
+        """
+        Assuming I am representing a directory, return a list of
+        FilePaths representing my children that match the given
+        pattern.
+        """
+        import glob
+        path = self.path[-1] == b'/' and self.path + pattern or self.sep.join([self.path, pattern])
+        return map(self.clonePath, glob.glob(path))
+
+
+    def basename(self):
+        """
+        @return: The final component of the L{FilePath}'s path (Everything after
+            the final path separator).
+        @rtype: C{bytes}
+        """
+        return basename(self.path)
+
+
+    def dirname(self):
+        """
+        @return: All of the components of the L{FilePath}'s path except the last
+            one (everything up to the final path separator).
+        @rtype: C{bytes}
+        """
+        return dirname(self.path)
+
+
+    def parent(self):
+        """
+        @return: A L{FilePath} representing the path which directly contains
+            this L{FilePath}.
+        """
+        return self.clonePath(self.dirname())
+
+
+    def setContent(self, content, ext=b'.new'):
+        """
+        Replace the file at this path with a new file that contains the given
+        bytes, trying to avoid data-loss in the meanwhile.
+
+        On UNIX-like platforms, this method does its best to ensure that by the
+        time this method returns, either the old contents I{or} the new contents
+        of the file will be present at this path for subsequent readers
+        regardless of premature device removal, program crash, or power loss,
+        making the following assumptions:
+
+            - your filesystem is journaled (i.e. your filesystem will not
+              I{itself} lose data due to power loss)
+
+            - your filesystem's C{rename()} is atomic
+
+            - your filesystem will not discard new data while preserving new
+              metadata (see U{http://mjg59.livejournal.com/108257.html} for more
+              detail)
+
+        On most versions of Windows there is no atomic C{rename()} (see
+        U{http://bit.ly/win32-overwrite} for more information), so this method
+        is slightly less helpful.  There is a small window where the file at
+        this path may be deleted before the new file is moved to replace it:
+        however, the new file will be fully written and flushed beforehand so in
+        the unlikely event that there is a crash at that point, it should be
+        possible for the user to manually recover the new version of their data.
+        In the future, Twisted will support atomic file moves on those versions
+        of Windows which I{do} support them: see U{Twisted ticket
+        3004<http://twistedmatrix.com/trac/ticket/3004>}.
+
+        This method should be safe for use by multiple concurrent processes, but
+        note that it is not easy to predict which process's contents will
+        ultimately end up on disk if they invoke this method at close to the
+        same time.
+
+        @param content: The desired contents of the file at this path.
+
+        @type content: L{bytes}
+
+        @param ext: An extension to append to the temporary filename used to
+            store the bytes while they are being written.  This can be used to
+            make sure that temporary files can be identified by their suffix,
+            for cleanup in case of crashes.
+
+        @type ext: C{bytes}
+        """
+        sib = self.temporarySibling(ext)
+        f = sib.open('w')
+        try:
+            f.write(content)
+        finally:
+            f.close()
+        if platform.isWindows() and exists(self.path):
+            os.unlink(self.path)
+        os.rename(sib.path, self.path)
+
+
+    def __cmp__(self, other):
+        if not isinstance(other, FilePath):
+            return NotImplemented
+        return cmp(self.path, other.path)
+
+
+    def createDirectory(self):
+        """
+        Create the directory the L{FilePath} refers to.
+
+        @see: L{makedirs}
+
+        @raise OSError: If the directory cannot be created.
+        """
+        os.mkdir(self.path)
+
+
+    def requireCreate(self, val=1):
+        self.alwaysCreate = val
+
+
+    def create(self):
+        """
+        Exclusively create a file, only if this file previously did not exist.
+        """
+        fdint = os.open(self.path, _CREATE_FLAGS)
+
+        # XXX TODO: 'name' attribute of returned files is not mutable or
+        # settable via fdopen, so this file is slighly less functional than the
+        # one returned from 'open' by default.  send a patch to Python...
+
+        return os.fdopen(fdint, 'w+b')
+
+
+    def temporarySibling(self, extension=b""):
+        """
+        Construct a path referring to a sibling of this path.
+
+        The resulting path will be unpredictable, so that other subprocesses
+        should neither accidentally attempt to refer to the same path before it
+        is created, nor they should other processes be able to guess its name in
+        advance.
+
+        @param extension: A suffix to append to the created filename.  (Note
+            that if you want an extension with a '.' you must include the '.'
+            yourself.)
+
+        @type extension: C{bytes}
+
+        @return: a path object with the given extension suffix, C{alwaysCreate}
+            set to True.
+
+        @rtype: L{FilePath}
+        """
+        sib = self.sibling(_secureEnoughString() + self.basename() + extension)
+        sib.requireCreate()
+        return sib
+
+
+    _chunkSize = 2 ** 2 ** 2 ** 2
+
+    def copyTo(self, destination, followLinks=True):
+        """
+        Copies self to destination.
+
+        If self doesn't exist, an OSError is raised.
+
+        If self is a directory, this method copies its children (but not
+        itself) recursively to destination - if destination does not exist as a
+        directory, this method creates it.  If destination is a file, an
+        IOError will be raised.
+
+        If self is a file, this method copies it to destination.  If
+        destination is a file, this method overwrites it.  If destination is a
+        directory, an IOError will be raised.
+
+        If self is a link (and followLinks is False), self will be copied
+        over as a new symlink with the same target as returned by os.readlink.
+        That means that if it is absolute, both the old and new symlink will
+        link to the same thing.  If it's relative, then perhaps not (and
+        it's also possible that this relative link will be broken).
+
+        File/directory permissions and ownership will NOT be copied over.
+
+        If followLinks is True, symlinks are followed so that they're treated
+        as their targets.  In other words, if self is a link, the link's target
+        will be copied.  If destination is a link, self will be copied to the
+        destination's target (the actual destination will be destination's
+        target).  Symlinks under self (if self is a directory) will be
+        followed and its target's children be copied recursively.
+
+        If followLinks is False, symlinks will be copied over as symlinks.
+
+        @param destination: the destination (a FilePath) to which self
+            should be copied
+        @param followLinks: whether symlinks in self should be treated as links
+            or as their targets
+        """
+        if self.islink() and not followLinks:
+            os.symlink(os.readlink(self.path), destination.path)
+            return
+        # XXX TODO: *thorough* audit and documentation of the exact desired
+        # semantics of this code.  Right now the behavior of existent
+        # destination symlinks is convenient, and quite possibly correct, but
+        # its security properties need to be explained.
+        if self.isdir():
+            if not destination.exists():
+                destination.createDirectory()
+            for child in self.children():
+                destChild = destination.child(child.basename())
+                child.copyTo(destChild, followLinks)
+        elif self.isfile():
+            writefile = destination.open('w')
+            try:
+                readfile = self.open()
+                try:
+                    while 1:
+                        # XXX TODO: optionally use os.open, os.read and O_DIRECT
+                        # and use os.fstatvfs to determine chunk sizes and make
+                        # *****sure**** copy is page-atomic; the following is
+                        # good enough for 99.9% of everybody and won't take a
+                        # week to audit though.
+                        chunk = readfile.read(self._chunkSize)
+                        writefile.write(chunk)
+                        if len(chunk) < self._chunkSize:
+                            break
+                finally:
+                    readfile.close()
+            finally:
+                writefile.close()
+        elif not self.exists():
+            raise OSError(errno.ENOENT, "No such file or directory")
+        else:
+            # If you see the following message because you want to copy
+            # symlinks, fifos, block devices, character devices, or unix
+            # sockets, please feel free to add support to do sensible things in
+            # reaction to those types!
+            raise NotImplementedError(
+                "Only copying of files and directories supported")
+
+
+    def moveTo(self, destination, followLinks=True):
+        """
+        Move self to destination - basically renaming self to whatever
+        destination is named.  If destination is an already-existing directory,
+        moves all children to destination if destination is empty.  If
+        destination is a non-empty directory, or destination is a file, an
+        OSError will be raised.
+
+        If moving between filesystems, self needs to be copied, and everything
+        that applies to copyTo applies to moveTo.
+
+        @param destination: the destination (a FilePath) to which self
+            should be copied
+        @param followLinks: whether symlinks in self should be treated as links
+            or as their targets (only applicable when moving between
+            filesystems)
+        """
+        try:
+            os.rename(self.path, destination.path)
+        except OSError as ose:
+            if ose.errno == errno.EXDEV:
+                # man 2 rename, ubuntu linux 5.10 "breezy":
+
+                #   oldpath and newpath are not on the same mounted filesystem.
+                #   (Linux permits a filesystem to be mounted at multiple
+                #   points, but rename(2) does not work across different mount
+                #   points, even if the same filesystem is mounted on both.)
+
+                # that means it's time to copy trees of directories!
+                secsib = destination.temporarySibling()
+                self.copyTo(secsib, followLinks) # slow
+                secsib.moveTo(destination, followLinks) # visible
+
+                # done creating new stuff.  let's clean me up.
+                mysecsib = self.temporarySibling()
+                self.moveTo(mysecsib, followLinks) # visible
+                mysecsib.remove() # slow
+            else:
+                raise
+        else:
+            self.changed()
+            destination.changed()
+
+
+FilePath.clonePath = FilePath
diff --git a/ThirdParty/Twisted/twisted/python/finalize.py b/ThirdParty/Twisted/twisted/python/finalize.py
new file mode 100644
index 0000000..8b99bf6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/finalize.py
@@ -0,0 +1,46 @@
+
+"""
+A module for externalized finalizers.
+"""
+
+import weakref
+
+garbageKey = 0
+
+def callbackFactory(num, fins):
+    def _cb(w):
+        del refs[num]
+        for fx in fins:
+            fx()
+    return _cb
+
+refs = {}
+
+def register(inst):
+    global garbageKey
+    garbageKey += 1
+    r = weakref.ref(inst, callbackFactory(garbageKey, inst.__finalizers__()))
+    refs[garbageKey] = r
+
+if __name__ == '__main__':
+    def fin():
+        print 'I am _so_ dead.'
+
+    class Finalizeable:
+        """
+        An un-sucky __del__
+        """
+
+        def __finalizers__(self):
+            """
+            I'm going away.
+            """
+            return [fin]
+
+    f = Finalizeable()
+    f.f2 = f
+    register(f)
+    del f
+    import gc
+    gc.collect()
+    print 'deled'
diff --git a/ThirdParty/Twisted/twisted/python/formmethod.py b/ThirdParty/Twisted/twisted/python/formmethod.py
new file mode 100644
index 0000000..b4d905e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/formmethod.py
@@ -0,0 +1,363 @@
+# -*- test-case-name: twisted.test.test_formmethod -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Form-based method objects.
+
+This module contains support for descriptive method signatures that can be used
+to format methods.
+"""
+
+import calendar
+
+class FormException(Exception):
+    """An error occurred calling the form method.
+    """
+    def __init__(self, *args, **kwargs):
+        Exception.__init__(self, *args)
+        self.descriptions = kwargs
+
+
+class InputError(FormException):
+    """
+    An error occurred with some input.
+    """
+
+
+class Argument:
+    """Base class for form arguments."""
+
+    # default value for argument, if no other default is given
+    defaultDefault = None
+
+    def __init__(self, name, default=None, shortDesc=None,
+                 longDesc=None, hints=None, allowNone=1):
+        self.name = name
+        self.allowNone = allowNone
+        if default is None:
+            default = self.defaultDefault
+        self.default = default
+        self.shortDesc = shortDesc
+        self.longDesc = longDesc
+        if not hints:
+            hints = {}
+        self.hints = hints
+
+    def addHints(self, **kwargs):
+        self.hints.update(kwargs)
+
+    def getHint(self, name, default=None):
+        return self.hints.get(name, default)
+
+    def getShortDescription(self):
+        return self.shortDesc or self.name.capitalize()
+
+    def getLongDescription(self):
+        return self.longDesc or '' #self.shortDesc or "The %s." % self.name
+
+    def coerce(self, val):
+        """Convert the value to the correct format."""
+        raise NotImplementedError("implement in subclass")
+
+
+class String(Argument):
+    """A single string.
+    """
+    defaultDefault = ''
+    min = 0
+    max = None
+    
+    def __init__(self, name, default=None, shortDesc=None,
+                 longDesc=None, hints=None, allowNone=1, min=0, max=None):
+        Argument.__init__(self, name, default=default, shortDesc=shortDesc,
+                          longDesc=longDesc, hints=hints, allowNone=allowNone)
+        self.min = min
+        self.max = max
+    
+    def coerce(self, val):
+        s = str(val)
+        if len(s) < self.min:
+            raise InputError("Value must be at least %s characters long" % self.min)
+        if self.max != None and len(s) > self.max:
+            raise InputError("Value must be at most %s characters long" % self.max)
+        return str(val)
+
+
+class Text(String):
+    """A long string.
+    """
+
+
+class Password(String):
+    """A string which should be obscured when input.
+    """
+
+
+class VerifiedPassword(String):
+    """A string that should be obscured when input and needs verification."""
+    
+    def coerce(self, vals):
+        if len(vals) != 2 or vals[0] != vals[1]:
+            raise InputError("Please enter the same password twice.")
+        s = str(vals[0])
+        if len(s) < self.min:
+            raise InputError("Value must be at least %s characters long" % self.min)
+        if self.max != None and len(s) > self.max:
+            raise InputError("Value must be at most %s characters long" % self.max)
+        return s
+
+
+class Hidden(String):
+    """A string which is not displayed.
+
+    The passed default is used as the value.
+    """
+
+
+class Integer(Argument):
+    """A single integer.
+    """
+    defaultDefault = None
+
+    def __init__(self, name, allowNone=1, default=None, shortDesc=None,
+                 longDesc=None, hints=None):
+        #although Argument now has allowNone, that was recently added, and
+        #putting it at the end kept things which relied on argument order
+        #from breaking.  However, allowNone originally was in here, so
+        #I have to keep the same order, to prevent breaking code that
+        #depends on argument order only
+        Argument.__init__(self, name, default, shortDesc, longDesc, hints,
+                          allowNone)
+
+    def coerce(self, val):
+        if not val.strip() and self.allowNone:
+            return None
+        try:
+            return int(val)
+        except ValueError:
+            raise InputError("%s is not valid, please enter a whole number, e.g. 10" % val)
+
+
+class IntegerRange(Integer):
+
+    def __init__(self, name, min, max, allowNone=1, default=None, shortDesc=None,
+                 longDesc=None, hints=None):
+        self.min = min
+        self.max = max
+        Integer.__init__(self, name, allowNone=allowNone, default=default, shortDesc=shortDesc,
+                         longDesc=longDesc, hints=hints)
+
+    def coerce(self, val):
+        result = Integer.coerce(self, val)
+        if self.allowNone and result == None:
+            return result
+        if result < self.min:
+            raise InputError("Value %s is too small, it should be at least %s" % (result, self.min))
+        if result > self.max:
+            raise InputError("Value %s is too large, it should be at most %s" % (result, self.max))
+        return result
+
+
+class Float(Argument):
+
+    defaultDefault = None
+
+    def __init__(self, name, allowNone=1, default=None, shortDesc=None,
+                 longDesc=None, hints=None):
+        #although Argument now has allowNone, that was recently added, and
+        #putting it at the end kept things which relied on argument order
+        #from breaking.  However, allowNone originally was in here, so
+        #I have to keep the same order, to prevent breaking code that
+        #depends on argument order only
+        Argument.__init__(self, name, default, shortDesc, longDesc, hints,
+                          allowNone)
+
+
+    def coerce(self, val):
+        if not val.strip() and self.allowNone:
+            return None
+        try:
+            return float(val)
+        except ValueError:
+            raise InputError("Invalid float: %s" % val)
+
+
+class Choice(Argument):
+    """
+    The result of a choice between enumerated types.  The choices should
+    be a list of tuples of tag, value, and description.  The tag will be
+    the value returned if the user hits "Submit", and the description
+    is the bale for the enumerated type.  default is a list of all the
+    values (seconds element in choices).  If no defaults are specified,
+    initially the first item will be selected.  Only one item can (should)
+    be selected at once.
+    """
+    def __init__(self, name, choices=[], default=[], shortDesc=None,
+                 longDesc=None, hints=None, allowNone=1):
+        self.choices = choices
+        if choices and not default:
+            default.append(choices[0][1])
+        Argument.__init__(self, name, default, shortDesc, longDesc, hints, allowNone=allowNone)
+
+    def coerce(self, inIdent):
+        for ident, val, desc in self.choices:
+            if ident == inIdent:
+                return val
+        else:
+            raise InputError("Invalid Choice: %s" % inIdent)
+
+
+class Flags(Argument):
+    """
+    The result of a checkbox group or multi-menu.  The flags should be a
+    list of tuples of tag, value, and description. The tag will be
+    the value returned if the user hits "Submit", and the description
+    is the bale for the enumerated type.  default is a list of all the
+    values (second elements in flags).  If no defaults are specified,
+    initially nothing will be selected.  Several items may be selected at
+    once.
+    """
+    def __init__(self, name, flags=(), default=(), shortDesc=None,
+                 longDesc=None, hints=None, allowNone=1):
+        self.flags = flags
+        Argument.__init__(self, name, default, shortDesc, longDesc, hints, allowNone=allowNone)
+
+    def coerce(self, inFlagKeys):
+        if not inFlagKeys:
+            return []
+        outFlags = []
+        for inFlagKey in inFlagKeys:
+            for flagKey, flagVal, flagDesc in self.flags:
+                if inFlagKey == flagKey:
+                    outFlags.append(flagVal)
+                    break
+            else:
+                raise InputError("Invalid Flag: %s" % inFlagKey)
+        return outFlags
+
+
+class CheckGroup(Flags):
+    pass
+
+
+class RadioGroup(Choice):
+    pass
+
+
+class Boolean(Argument):
+    def coerce(self, inVal):
+        if not inVal:
+            return 0
+        lInVal = str(inVal).lower()
+        if lInVal in ('no', 'n', 'f', 'false', '0'):
+            return 0
+        return 1
+
+class File(Argument):
+    def __init__(self, name, allowNone=1, shortDesc=None, longDesc=None,
+                 hints=None):
+        Argument.__init__(self, name, None, shortDesc, longDesc, hints,
+                          allowNone=allowNone)
+
+    def coerce(self, file):
+        if not file and self.allowNone:
+            return None
+        elif file:
+            return file
+        else:
+            raise InputError("Invalid File")
+
+def positiveInt(x):
+    x = int(x)
+    if x <= 0: raise ValueError
+    return x
+
+class Date(Argument):
+    """A date -- (year, month, day) tuple."""
+
+    defaultDefault = None
+
+    def __init__(self, name, allowNone=1, default=None, shortDesc=None,
+                 longDesc=None, hints=None):
+        Argument.__init__(self, name, default, shortDesc, longDesc, hints)
+        self.allowNone = allowNone
+        if not allowNone:
+            self.defaultDefault = (1970, 1, 1)
+    
+    def coerce(self, args):
+        """Return tuple of ints (year, month, day)."""
+        if tuple(args) == ("", "", "") and self.allowNone:
+            return None
+        
+        try:
+            year, month, day = map(positiveInt, args)
+        except ValueError:
+            raise InputError("Invalid date")
+        if (month, day) == (2, 29):
+            if not calendar.isleap(year):
+                raise InputError("%d was not a leap year" % year)
+            else:
+                return year, month, day
+        try:
+            mdays = calendar.mdays[month]
+        except IndexError:
+            raise InputError("Invalid date")
+        if day > mdays:
+            raise InputError("Invalid date")
+        return year, month, day
+
+
+class Submit(Choice):
+    """Submit button or a reasonable facsimile thereof."""
+
+    def __init__(self, name, choices=[("Submit", "submit", "Submit form")],
+                 reset=0, shortDesc=None, longDesc=None, allowNone=0, hints=None):
+        Choice.__init__(self, name, choices=choices, shortDesc=shortDesc,
+                        longDesc=longDesc, hints=hints)
+        self.allowNone = allowNone
+        self.reset = reset
+
+    def coerce(self, value):
+        if self.allowNone and not value:
+            return None
+        else:
+            return Choice.coerce(self, value)
+
+
+class PresentationHint:
+    """
+    A hint to a particular system.
+    """
+
+
+class MethodSignature:
+
+    def __init__(self, *sigList):
+        """
+        """
+        self.methodSignature = sigList
+
+    def getArgument(self, name):
+        for a in self.methodSignature:
+            if a.name == name:
+                return a
+
+    def method(self, callable, takesRequest=False):
+        return FormMethod(self, callable, takesRequest)
+
+
+class FormMethod:
+    """A callable object with a signature."""
+
+    def __init__(self, signature, callable, takesRequest=False):
+        self.signature = signature
+        self.callable = callable
+        self.takesRequest = takesRequest
+
+    def getArgs(self):
+        return tuple(self.signature.methodSignature)
+
+    def call(self,*args,**kw):
+        return self.callable(*args,**kw)
diff --git a/ThirdParty/Twisted/twisted/python/hashlib.py b/ThirdParty/Twisted/twisted/python/hashlib.py
new file mode 100644
index 0000000..f3ee0fe
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/hashlib.py
@@ -0,0 +1,24 @@
+# -*- test-case-name: twisted.python.test.test_hashlib -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+L{twisted.python.hashlib} presents a subset of the interface provided by
+U{hashlib<http://docs.python.org/library/hashlib.html>}.  The subset is the
+interface required by various parts of Twisted.  This allows application code
+to transparently use APIs which existed before C{hashlib} was introduced or to
+use C{hashlib} if it is available.
+"""
+
+
+try:
+    _hashlib = __import__("hashlib")
+except ImportError:
+    from md5 import md5
+    from sha import sha as sha1
+else:
+    md5  = _hashlib.md5
+    sha1 = _hashlib.sha1
+
+
+__all__ = ["md5", "sha1"]
diff --git a/ThirdParty/Twisted/twisted/python/hook.py b/ThirdParty/Twisted/twisted/python/hook.py
new file mode 100644
index 0000000..4142b0a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/hook.py
@@ -0,0 +1,176 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+
+"""
+I define support for hookable instance methods.
+
+These are methods which you can register pre-call and post-call external
+functions to augment their functionality.  People familiar with more esoteric
+languages may think of these as \"method combinations\".
+
+This could be used to add optional preconditions, user-extensible callbacks
+(a-la emacs) or a thread-safety mechanism.
+
+The four exported calls are:
+
+   - L{addPre}
+   - L{addPost}
+   - L{removePre}
+   - L{removePost}
+
+All have the signature (class, methodName, callable), and the callable they
+take must always have the signature (instance, *args, **kw) unless the
+particular signature of the method they hook is known.
+
+Hooks should typically not throw exceptions, however, no effort will be made by
+this module to prevent them from doing so.  Pre-hooks will always be called,
+but post-hooks will only be called if the pre-hooks do not raise any exceptions
+(they will still be called if the main method raises an exception).  The return
+values and exception status of the main method will be propogated (assuming
+none of the hooks raise an exception).  Hooks will be executed in the order in
+which they are added.
+"""
+
+
+### Public Interface
+
+class HookError(Exception):
+    "An error which will fire when an invariant is violated."
+
+def addPre(klass, name, func):
+    """hook.addPre(klass, name, func) -> None
+
+    Add a function to be called before the method klass.name is invoked.
+    """
+
+    _addHook(klass, name, PRE, func)
+
+def addPost(klass, name, func):
+    """hook.addPost(klass, name, func) -> None
+
+    Add a function to be called after the method klass.name is invoked.
+    """
+    _addHook(klass, name, POST, func)
+
+def removePre(klass, name, func):
+    """hook.removePre(klass, name, func) -> None
+
+    Remove a function (previously registered with addPre) so that it
+    is no longer executed before klass.name.
+    """
+
+    _removeHook(klass, name, PRE, func)
+
+def removePost(klass, name, func):
+    """hook.removePre(klass, name, func) -> None
+
+    Remove a function (previously registered with addPost) so that it
+    is no longer executed after klass.name.
+    """
+    _removeHook(klass, name, POST, func)
+
+### "Helper" functions.
+
+hooked_func = """
+
+import %(module)s
+
+def %(name)s(*args, **kw):
+    klazz = %(module)s.%(klass)s
+    for preMethod in klazz.%(preName)s:
+        preMethod(*args, **kw)
+    try:
+        return klazz.%(originalName)s(*args, **kw)
+    finally:
+        for postMethod in klazz.%(postName)s:
+            postMethod(*args, **kw)
+"""
+
+_PRE = '__hook_pre_%s_%s_%s__'
+_POST = '__hook_post_%s_%s_%s__'
+_ORIG = '__hook_orig_%s_%s_%s__'
+
+
+def _XXX(k,n,s):
+    """
+    String manipulation garbage.
+    """
+    x = s % (k.__module__.replace('.', '_'), k.__name__, n)
+    return x
+
+def PRE(k,n):
+    "(private) munging to turn a method name into a pre-hook-method-name"
+    return _XXX(k,n,_PRE)
+
+def POST(k,n):
+    "(private) munging to turn a method name into a post-hook-method-name"
+    return _XXX(k,n,_POST)
+
+def ORIG(k,n):
+    "(private) munging to turn a method name into an `original' identifier"
+    return _XXX(k,n,_ORIG)
+
+
+def _addHook(klass, name, phase, func):
+    "(private) adds a hook to a method on a class"
+    _enhook(klass, name)
+
+    if not hasattr(klass, phase(klass, name)):
+        setattr(klass, phase(klass, name), [])
+
+    phaselist = getattr(klass, phase(klass, name))
+    phaselist.append(func)
+
+
+def _removeHook(klass, name, phase, func):
+    "(private) removes a hook from a method on a class"
+    phaselistname = phase(klass, name)
+    if not hasattr(klass, ORIG(klass,name)):
+        raise HookError("no hooks present!")
+
+    phaselist = getattr(klass, phase(klass, name))
+    try: phaselist.remove(func)
+    except ValueError:
+        raise HookError("hook %s not found in removal list for %s"%
+                    (name,klass))
+
+    if not getattr(klass, PRE(klass,name)) and not getattr(klass, POST(klass, name)):
+        _dehook(klass, name)
+
+def _enhook(klass, name):
+    "(private) causes a certain method name to be hooked on a class"
+    if hasattr(klass, ORIG(klass, name)):
+        return
+
+    def newfunc(*args, **kw):
+        for preMethod in getattr(klass, PRE(klass, name)):
+            preMethod(*args, **kw)
+        try:
+            return getattr(klass, ORIG(klass, name))(*args, **kw)
+        finally:
+            for postMethod in getattr(klass, POST(klass, name)):
+                postMethod(*args, **kw)
+    try:
+        newfunc.func_name = name
+    except TypeError:
+        # Older python's don't let you do this
+        pass
+
+    oldfunc = getattr(klass, name).im_func
+    setattr(klass, ORIG(klass, name), oldfunc)
+    setattr(klass, PRE(klass, name), [])
+    setattr(klass, POST(klass, name), [])
+    setattr(klass, name, newfunc)
+
+def _dehook(klass, name):
+    "(private) causes a certain method name no longer to be hooked on a class"
+
+    if not hasattr(klass, ORIG(klass, name)):
+        raise HookError("Cannot unhook!")
+    setattr(klass, name, getattr(klass, ORIG(klass,name)))
+    delattr(klass, PRE(klass,name))
+    delattr(klass, POST(klass,name))
+    delattr(klass, ORIG(klass,name))
diff --git a/ThirdParty/Twisted/twisted/python/htmlizer.py b/ThirdParty/Twisted/twisted/python/htmlizer.py
new file mode 100644
index 0000000..c95fb00
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/htmlizer.py
@@ -0,0 +1,91 @@
+# -*- test-case-name: twisted.python.test.test_htmlizer -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+HTML rendering of Python source.
+"""
+
+import tokenize, cgi, keyword
+import reflect
+
+class TokenPrinter:
+
+    currentCol, currentLine = 0, 1
+    lastIdentifier = parameters = 0
+
+    def __init__(self, writer):
+        self.writer = writer
+
+    def printtoken(self, type, token, (srow, scol), (erow, ecol), line):
+        #print "printtoken(%r,%r,%r,(%r,%r),(%r,%r),%r), row=%r,col=%r" % (
+        #    self, type, token, srow,scol, erow,ecol, line,
+        #    self.currentLine, self.currentCol)
+        if self.currentLine < srow:
+            self.writer('\n'*(srow-self.currentLine))
+            self.currentLine, self.currentCol = srow, 0
+        self.writer(' '*(scol-self.currentCol))
+        if self.lastIdentifier:
+            type = "identifier"
+            self.parameters = 1
+        elif type == tokenize.NAME:
+             if keyword.iskeyword(token):
+                 type = 'keyword'
+             else:
+                 if self.parameters:
+                     type = 'parameter'
+                 else:
+                     type = 'variable'
+        else:
+            type = tokenize.tok_name.get(type).lower()
+        self.writer(token, type)
+        self.currentCol = ecol
+        self.currentLine += token.count('\n')
+        if self.currentLine != erow:
+            self.currentCol = 0
+        self.lastIdentifier = token in ('def', 'class')
+        if token == ':':
+            self.parameters = 0
+
+
+class HTMLWriter:
+
+    noSpan = []
+
+    def __init__(self, writer):
+        self.writer = writer
+        noSpan = []
+        reflect.accumulateClassList(self.__class__, "noSpan", noSpan)
+        self.noSpan = noSpan
+
+    def write(self, token, type=None):
+        token = cgi.escape(token)
+        if (type is None) or (type in self.noSpan):
+            self.writer(token)
+        else:
+            self.writer('<span class="py-src-%s">%s</span>' %
+                        (type, token))
+
+
+class SmallerHTMLWriter(HTMLWriter):
+    """HTMLWriter that doesn't generate spans for some junk.
+
+    Results in much smaller HTML output.
+    """
+    noSpan = ["endmarker", "indent", "dedent", "op", "newline", "nl"]
+
+def filter(inp, out, writer=HTMLWriter):
+    out.write('<pre>')
+    printer = TokenPrinter(writer(out.write).write).printtoken
+    try:
+        tokenize.tokenize(inp.readline, printer)
+    except tokenize.TokenError:
+        pass
+    out.write('</pre>\n')
+
+def main():
+    import sys
+    filter(open(sys.argv[1]), sys.stdout)
+
+if __name__ == '__main__':
+   main()
diff --git a/ThirdParty/Twisted/twisted/python/lockfile.py b/ThirdParty/Twisted/twisted/python/lockfile.py
new file mode 100644
index 0000000..a044957
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/lockfile.py
@@ -0,0 +1,214 @@
+# -*- test-case-name: twisted.test.test_lockfile -*-
+# Copyright (c) 2005 Divmod, Inc.
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Filesystem-based interprocess mutex.
+"""
+
+__metaclass__ = type
+
+import errno, os
+
+from time import time as _uniquefloat
+
+from twisted.python.runtime import platform
+
+def unique():
+    return str(long(_uniquefloat() * 1000))
+
+from os import rename
+if not platform.isWindows():
+    from os import kill
+    from os import symlink
+    from os import readlink
+    from os import remove as rmlink
+    _windows = False
+else:
+    _windows = True
+
+    try:
+        from win32api import OpenProcess
+        import pywintypes
+    except ImportError:
+        kill = None
+    else:
+        ERROR_ACCESS_DENIED = 5
+        ERROR_INVALID_PARAMETER = 87
+
+        def kill(pid, signal):
+            try:
+                OpenProcess(0, 0, pid)
+            except pywintypes.error, e:
+                if e.args[0] == ERROR_ACCESS_DENIED:
+                    return
+                elif e.args[0] == ERROR_INVALID_PARAMETER:
+                    raise OSError(errno.ESRCH, None)
+                raise
+            else:
+                raise RuntimeError("OpenProcess is required to fail.")
+
+    _open = file
+
+    # XXX Implement an atomic thingamajig for win32
+    def symlink(value, filename):
+        newlinkname = filename+"."+unique()+'.newlink'
+        newvalname = os.path.join(newlinkname,"symlink")
+        os.mkdir(newlinkname)
+        f = _open(newvalname,'wcb')
+        f.write(value)
+        f.flush()
+        f.close()
+        try:
+            rename(newlinkname, filename)
+        except:
+            os.remove(newvalname)
+            os.rmdir(newlinkname)
+            raise
+
+    def readlink(filename):
+        try:
+            fObj = _open(os.path.join(filename,'symlink'), 'rb')
+        except IOError, e:
+            if e.errno == errno.ENOENT or e.errno == errno.EIO:
+                raise OSError(e.errno, None)
+            raise
+        else:
+            result = fObj.read()
+            fObj.close()
+            return result
+
+    def rmlink(filename):
+        os.remove(os.path.join(filename, 'symlink'))
+        os.rmdir(filename)
+
+
+
+class FilesystemLock:
+    """
+    A mutex.
+
+    This relies on the filesystem property that creating
+    a symlink is an atomic operation and that it will
+    fail if the symlink already exists.  Deleting the
+    symlink will release the lock.
+
+    @ivar name: The name of the file associated with this lock.
+
+    @ivar clean: Indicates whether this lock was released cleanly by its
+        last owner.  Only meaningful after C{lock} has been called and
+        returns True.
+
+    @ivar locked: Indicates whether the lock is currently held by this
+        object.
+    """
+
+    clean = None
+    locked = False
+
+    def __init__(self, name):
+        self.name = name
+
+
+    def lock(self):
+        """
+        Acquire this lock.
+
+        @rtype: C{bool}
+        @return: True if the lock is acquired, false otherwise.
+
+        @raise: Any exception os.symlink() may raise, other than
+        EEXIST.
+        """
+        clean = True
+        while True:
+            try:
+                symlink(str(os.getpid()), self.name)
+            except OSError, e:
+                if _windows and e.errno in (errno.EACCES, errno.EIO):
+                    # The lock is in the middle of being deleted because we're
+                    # on Windows where lock removal isn't atomic.  Give up, we
+                    # don't know how long this is going to take.
+                    return False
+                if e.errno == errno.EEXIST:
+                    try:
+                        pid = readlink(self.name)
+                    except OSError, e:
+                        if e.errno == errno.ENOENT:
+                            # The lock has vanished, try to claim it in the
+                            # next iteration through the loop.
+                            continue
+                        raise
+                    except IOError, e:
+                        if _windows and e.errno == errno.EACCES:
+                            # The lock is in the middle of being
+                            # deleted because we're on Windows where
+                            # lock removal isn't atomic.  Give up, we
+                            # don't know how long this is going to
+                            # take.
+                            return False
+                        raise
+                    try:
+                        if kill is not None:
+                            kill(int(pid), 0)
+                    except OSError, e:
+                        if e.errno == errno.ESRCH:
+                            # The owner has vanished, try to claim it in the next
+                            # iteration through the loop.
+                            try:
+                                rmlink(self.name)
+                            except OSError, e:
+                                if e.errno == errno.ENOENT:
+                                    # Another process cleaned up the lock.
+                                    # Race them to acquire it in the next
+                                    # iteration through the loop.
+                                    continue
+                                raise
+                            clean = False
+                            continue
+                        raise
+                    return False
+                raise
+            self.locked = True
+            self.clean = clean
+            return True
+
+
+    def unlock(self):
+        """
+        Release this lock.
+
+        This deletes the directory with the given name.
+
+        @raise: Any exception os.readlink() may raise, or
+        ValueError if the lock is not owned by this process.
+        """
+        pid = readlink(self.name)
+        if int(pid) != os.getpid():
+            raise ValueError("Lock %r not owned by this process" % (self.name,))
+        rmlink(self.name)
+        self.locked = False
+
+
+def isLocked(name):
+    """Determine if the lock of the given name is held or not.
+
+    @type name: C{str}
+    @param name: The filesystem path to the lock to test
+
+    @rtype: C{bool}
+    @return: True if the lock is held, False otherwise.
+    """
+    l = FilesystemLock(name)
+    result = None
+    try:
+        result = l.lock()
+    finally:
+        if result:
+            l.unlock()
+    return not result
+
+
+__all__ = ['FilesystemLock', 'isLocked']
+
diff --git a/ThirdParty/Twisted/twisted/python/log.py b/ThirdParty/Twisted/twisted/python/log.py
new file mode 100644
index 0000000..53a0ef2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/log.py
@@ -0,0 +1,629 @@
+# -*- test-case-name: twisted.test.test_log -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Logging and metrics infrastructure.
+"""
+
+from __future__ import division, absolute_import
+
+import sys
+import time
+import warnings
+from datetime import datetime
+import logging
+
+from zope.interface import Interface
+
+from twisted.python.compat import unicode, _PY3
+from twisted.python import context
+from twisted.python import _reflectpy3 as reflect
+from twisted.python import _utilpy3 as util
+from twisted.python import failure
+from twisted.python.threadable import synchronize
+
+
+class ILogContext:
+    """
+    Actually, this interface is just a synonym for the dictionary interface,
+    but it serves as a key for the default information in a log.
+
+    I do not inherit from C{Interface} because the world is a cruel place.
+    """
+
+
+
+class ILogObserver(Interface):
+    """
+    An observer which can do something with log events.
+
+    Given that most log observers are actually bound methods, it's okay to not
+    explicitly declare provision of this interface.
+    """
+    def __call__(eventDict):
+        """
+        Log an event.
+
+        @type eventDict: C{dict} with C{str} keys.
+        @param eventDict: A dictionary with arbitrary keys.  However, these
+            keys are often available:
+              - C{message}: A C{tuple} of C{str} containing messages to be
+                logged.
+              - C{system}: A C{str} which indicates the "system" which is
+                generating this event.
+              - C{isError}: A C{bool} indicating whether this event represents
+                an error.
+              - C{failure}: A L{failure.Failure} instance
+              - C{why}: Used as header of the traceback in case of errors.
+              - C{format}: A string format used in place of C{message} to
+                customize the event.  The intent is for the observer to format
+                a message by doing something like C{format % eventDict}.
+        """
+
+
+
+context.setDefault(ILogContext,
+                   {"isError": 0,
+                    "system": "-"})
+
+def callWithContext(ctx, func, *args, **kw):
+    newCtx = context.get(ILogContext).copy()
+    newCtx.update(ctx)
+    return context.call({ILogContext: newCtx}, func, *args, **kw)
+
+def callWithLogger(logger, func, *args, **kw):
+    """
+    Utility method which wraps a function in a try:/except:, logs a failure if
+    one occurrs, and uses the system's logPrefix.
+    """
+    try:
+        lp = logger.logPrefix()
+    except KeyboardInterrupt:
+        raise
+    except:
+        lp = '(buggy logPrefix method)'
+        err(system=lp)
+    try:
+        return callWithContext({"system": lp}, func, *args, **kw)
+    except KeyboardInterrupt:
+        raise
+    except:
+        err(system=lp)
+
+
+
+_keepErrors = 0
+_keptErrors = []
+_ignoreErrors = []
+
+
+def err(_stuff=None, _why=None, **kw):
+    """
+    Write a failure to the log.
+
+    The C{_stuff} and C{_why} parameters use an underscore prefix to lessen
+    the chance of colliding with a keyword argument the application wishes
+    to pass.  It is intended that they be supplied with arguments passed
+    positionally, not by keyword.
+
+    @param _stuff: The failure to log.  If C{_stuff} is C{None} a new
+        L{Failure} will be created from the current exception state.  If
+        C{_stuff} is an C{Exception} instance it will be wrapped in a
+        L{Failure}.
+    @type _stuff: C{NoneType}, C{Exception}, or L{Failure}.
+
+    @param _why: The source of this failure.  This will be logged along with
+        C{_stuff} and should describe the context in which the failure
+        occurred.
+    @type _why: C{str}
+    """
+    if _stuff is None:
+        _stuff = failure.Failure()
+    if isinstance(_stuff, failure.Failure):
+        if _keepErrors:
+            if _ignoreErrors:
+                keep = 0
+                for err in _ignoreErrors:
+                    r = _stuff.check(err)
+                    if r:
+                        keep = 0
+                        break
+                    else:
+                        keep = 1
+                if keep:
+                    _keptErrors.append(_stuff)
+            else:
+                _keptErrors.append(_stuff)
+        msg(failure=_stuff, why=_why, isError=1, **kw)
+    elif isinstance(_stuff, Exception):
+        msg(failure=failure.Failure(_stuff), why=_why, isError=1, **kw)
+    else:
+        msg(repr(_stuff), why=_why, isError=1, **kw)
+
+deferr = err
+
+
+class Logger:
+    """
+    This represents a class which may 'own' a log. Used by subclassing.
+    """
+    def logPrefix(self):
+        """
+        Override this method to insert custom logging behavior.  Its
+        return value will be inserted in front of every line.  It may
+        be called more times than the number of output lines.
+        """
+        return '-'
+
+
+
+class LogPublisher:
+    """
+    Class for singleton log message publishing.
+    """
+
+    synchronized = ['msg']
+
+    def __init__(self):
+        self.observers = []
+
+    def addObserver(self, other):
+        """
+        Add a new observer.
+
+        @type other: Provider of L{ILogObserver}
+        @param other: A callable object that will be called with each new log
+            message (a dict).
+        """
+        assert callable(other)
+        self.observers.append(other)
+
+    def removeObserver(self, other):
+        """
+        Remove an observer.
+        """
+        self.observers.remove(other)
+
+    def msg(self, *message, **kw):
+        """
+        Log a new message.
+
+        The message should be a native string, i.e. bytes on Python 2 and
+        Unicode on Python 3. For compatibility with both use the native string
+        syntax, for example::
+
+        >>> log.msg('Hello, world.')
+
+        You MUST avoid passing in Unicode on Python 2, and the form::
+
+        >>> log.msg('Hello ', 'world.')
+
+        This form only works (sometimes) by accident.
+        """
+        actualEventDict = (context.get(ILogContext) or {}).copy()
+        actualEventDict.update(kw)
+        actualEventDict['message'] = message
+        actualEventDict['time'] = time.time()
+        for i in range(len(self.observers) - 1, -1, -1):
+            try:
+                self.observers[i](actualEventDict)
+            except KeyboardInterrupt:
+                # Don't swallow keyboard interrupt!
+                raise
+            except UnicodeEncodeError:
+                raise
+            except:
+                observer = self.observers[i]
+                self.observers[i] = lambda event: None
+                try:
+                    self._err(failure.Failure(),
+                        "Log observer %s failed." % (observer,))
+                except:
+                    # Sometimes err() will throw an exception,
+                    # e.g. RuntimeError due to blowing the stack; if that
+                    # happens, there's not much we can do...
+                    pass
+                self.observers[i] = observer
+
+
+    def _err(self, failure, why):
+        """
+        Log a failure.
+
+        Similar in functionality to the global {err} function, but the failure
+        gets published only to observers attached to this publisher.
+
+        @param failure: The failure to log.
+        @type failure: L{Failure}.
+
+        @param why: The source of this failure.  This will be logged along with
+            the C{failure} and should describe the context in which the failure
+            occurred.
+        @type why: C{str}
+        """
+        self.msg(failure=failure, why=why, isError=1)
+
+
+    def showwarning(self, message, category, filename, lineno, file=None,
+                    line=None):
+        """
+        Twisted-enabled wrapper around L{warnings.showwarning}.
+
+        If C{file} is C{None}, the default behaviour is to emit the warning to
+        the log system, otherwise the original L{warnings.showwarning} Python
+        function is called.
+        """
+        if file is None:
+            self.msg(warning=message, category=reflect.qual(category),
+                     filename=filename, lineno=lineno,
+                     format="%(filename)s:%(lineno)s: %(category)s: %(warning)s")
+        else:
+            if sys.version_info < (2, 6):
+                _oldshowwarning(message, category, filename, lineno, file)
+            else:
+                _oldshowwarning(message, category, filename, lineno, file, line)
+
+synchronize(LogPublisher)
+
+
+
+try:
+    theLogPublisher
+except NameError:
+    theLogPublisher = LogPublisher()
+    addObserver = theLogPublisher.addObserver
+    removeObserver = theLogPublisher.removeObserver
+    msg = theLogPublisher.msg
+    showwarning = theLogPublisher.showwarning
+
+
+
+def _safeFormat(fmtString, fmtDict):
+    """
+    Try to format the string C{fmtString} using C{fmtDict} arguments,
+    swallowing all errors to always return a string.
+    """
+    # There's a way we could make this if not safer at least more
+    # informative: perhaps some sort of str/repr wrapper objects
+    # could be wrapped around the things inside of C{fmtDict}. That way
+    # if the event dict contains an object with a bad __repr__, we
+    # can only cry about that individual object instead of the
+    # entire event dict.
+    try:
+        text = fmtString % fmtDict
+    except KeyboardInterrupt:
+        raise
+    except:
+        try:
+            text = ('Invalid format string or unformattable object in log message: %r, %s' % (fmtString, fmtDict))
+        except:
+            try:
+                text = 'UNFORMATTABLE OBJECT WRITTEN TO LOG with fmt %r, MESSAGE LOST' % (fmtString,)
+            except:
+                text = 'PATHOLOGICAL ERROR IN BOTH FORMAT STRING AND MESSAGE DETAILS, MESSAGE LOST'
+    return text
+
+
+def textFromEventDict(eventDict):
+    """
+    Extract text from an event dict passed to a log observer. If it cannot
+    handle the dict, it returns None.
+
+    The possible keys of eventDict are:
+     - C{message}: by default, it holds the final text. It's required, but can
+       be empty if either C{isError} or C{format} is provided (the first
+       having the priority).
+     - C{isError}: boolean indicating the nature of the event.
+     - C{failure}: L{failure.Failure} instance, required if the event is an
+       error.
+     - C{why}: if defined, used as header of the traceback in case of errors.
+     - C{format}: string format used in place of C{message} to customize
+       the event. It uses all keys present in C{eventDict} to format
+       the text.
+    Other keys will be used when applying the C{format}, or ignored.
+    """
+    edm = eventDict['message']
+    if not edm:
+        if eventDict['isError'] and 'failure' in eventDict:
+            text = ((eventDict.get('why') or 'Unhandled Error')
+                    + '\n' + eventDict['failure'].getTraceback())
+        elif 'format' in eventDict:
+            text = _safeFormat(eventDict['format'], eventDict)
+        else:
+            # we don't know how to log this
+            return
+    else:
+        text = ' '.join(map(reflect.safe_str, edm))
+    return text
+
+
+class FileLogObserver:
+    """
+    Log observer that writes to a file-like object.
+
+    @type timeFormat: C{str} or C{NoneType}
+    @ivar timeFormat: If not C{None}, the format string passed to strftime().
+    """
+    timeFormat = None
+
+    def __init__(self, f):
+        self.write = f.write
+        self.flush = f.flush
+
+    def getTimezoneOffset(self, when):
+        """
+        Return the current local timezone offset from UTC.
+
+        @type when: C{int}
+        @param when: POSIX (ie, UTC) timestamp for which to find the offset.
+
+        @rtype: C{int}
+        @return: The number of seconds offset from UTC.  West is positive,
+        east is negative.
+        """
+        offset = datetime.utcfromtimestamp(when) - datetime.fromtimestamp(when)
+        return offset.days * (60 * 60 * 24) + offset.seconds
+
+    def formatTime(self, when):
+        """
+        Format the given UTC value as a string representing that time in the
+        local timezone.
+
+        By default it's formatted as a ISO8601-like string (ISO8601 date and
+        ISO8601 time separated by a space). It can be customized using the
+        C{timeFormat} attribute, which will be used as input for the underlying
+        C{time.strftime} call.
+
+        @type when: C{int}
+        @param when: POSIX (ie, UTC) timestamp for which to find the offset.
+
+        @rtype: C{str}
+        """
+        if self.timeFormat is not None:
+            return time.strftime(self.timeFormat, time.localtime(when))
+
+        tzOffset = -self.getTimezoneOffset(when)
+        when = datetime.utcfromtimestamp(when + tzOffset)
+        tzHour = abs(int(tzOffset / 60 / 60))
+        tzMin = abs(int(tzOffset / 60 % 60))
+        if tzOffset < 0:
+            tzSign = '-'
+        else:
+            tzSign = '+'
+        return '%d-%02d-%02d %02d:%02d:%02d%s%02d%02d' % (
+            when.year, when.month, when.day,
+            when.hour, when.minute, when.second,
+            tzSign, tzHour, tzMin)
+
+    def emit(self, eventDict):
+        text = textFromEventDict(eventDict)
+        if text is None:
+            return
+
+        timeStr = self.formatTime(eventDict['time'])
+        fmtDict = {'system': eventDict['system'], 'text': text.replace("\n", "\n\t")}
+        msgStr = _safeFormat("[%(system)s] %(text)s\n", fmtDict)
+
+        util.untilConcludes(self.write, timeStr + " " + msgStr)
+        util.untilConcludes(self.flush)  # Hoorj!
+
+    def start(self):
+        """
+        Start observing log events.
+        """
+        addObserver(self.emit)
+
+    def stop(self):
+        """
+        Stop observing log events.
+        """
+        removeObserver(self.emit)
+
+
+class PythonLoggingObserver(object):
+    """
+    Output twisted messages to Python standard library L{logging} module.
+
+    WARNING: specific logging configurations (example: network) can lead to
+    a blocking system. Nothing is done here to prevent that, so be sure to not
+    use this: code within Twisted, such as twisted.web, assumes that logging
+    does not block.
+    """
+
+    def __init__(self, loggerName="twisted"):
+        """
+        @param loggerName: identifier used for getting logger.
+        @type loggerName: C{str}
+        """
+        self.logger = logging.getLogger(loggerName)
+
+    def emit(self, eventDict):
+        """
+        Receive a twisted log entry, format it and bridge it to python.
+
+        By default the logging level used is info; log.err produces error
+        level, and you can customize the level by using the C{logLevel} key::
+
+        >>> log.msg('debugging', logLevel=logging.DEBUG)
+
+        """
+        if 'logLevel' in eventDict:
+            level = eventDict['logLevel']
+        elif eventDict['isError']:
+            level = logging.ERROR
+        else:
+            level = logging.INFO
+        text = textFromEventDict(eventDict)
+        if text is None:
+            return
+        self.logger.log(level, text)
+
+    def start(self):
+        """
+        Start observing log events.
+        """
+        addObserver(self.emit)
+
+    def stop(self):
+        """
+        Stop observing log events.
+        """
+        removeObserver(self.emit)
+
+
+class StdioOnnaStick:
+    """
+    Class that pretends to be stdout/err, and turns writes into log messages.
+
+    @ivar isError: boolean indicating whether this is stderr, in which cases
+                   log messages will be logged as errors.
+
+    @ivar encoding: unicode encoding used to encode any unicode strings
+                    written to this object.
+    """
+
+    closed = 0
+    softspace = 0
+    mode = 'wb'
+    name = '<stdio (log)>'
+
+    def __init__(self, isError=0, encoding=None):
+        self.isError = isError
+        if encoding is None:
+            encoding = sys.getdefaultencoding()
+        self.encoding = encoding
+        self.buf = ''
+  
+    def close(self):
+        pass
+
+    def fileno(self):
+        return -1
+
+    def flush(self):
+        pass
+
+    def read(self):
+        raise IOError("can't read from the log!")
+
+    readline = read
+    readlines = read
+    seek = read
+    tell = read
+
+    def write(self, data):
+        if not _PY3 and isinstance(data, unicode):
+            data = data.encode(self.encoding)
+        d = (self.buf + data).split('\n')
+        self.buf = d[-1]
+        messages = d[0:-1]
+        for message in messages:
+            msg(message, printed=1, isError=self.isError)
+
+    def writelines(self, lines):
+        for line in lines:
+            if not _PY3 and isinstance(line, unicode):
+                line = line.encode(self.encoding)
+            msg(line, printed=1, isError=self.isError)
+
+
+try:
+    _oldshowwarning
+except NameError:
+    _oldshowwarning = None
+
+
+def startLogging(file, *a, **kw):
+    """
+    Initialize logging to a specified file.
+
+    @return: A L{FileLogObserver} if a new observer is added, None otherwise.
+    """
+    if isinstance(file, StdioOnnaStick):
+        return
+    flo = FileLogObserver(file)
+    startLoggingWithObserver(flo.emit, *a, **kw)
+    return flo
+
+
+
+def startLoggingWithObserver(observer, setStdout=1):
+    """
+    Initialize logging to a specified observer. If setStdout is true
+    (defaults to yes), also redirect sys.stdout and sys.stderr
+    to the specified file.
+    """
+    global defaultObserver, _oldshowwarning
+    if not _oldshowwarning:
+        _oldshowwarning = warnings.showwarning
+        warnings.showwarning = showwarning
+    if defaultObserver:
+        defaultObserver.stop()
+        defaultObserver = None
+    addObserver(observer)
+    msg("Log opened.")
+    if setStdout:
+        sys.stdout = logfile
+        sys.stderr = logerr
+
+
+class NullFile:
+    softspace = 0
+    def read(self): pass
+    def write(self, bytes): pass
+    def flush(self): pass
+    def close(self): pass
+
+
+def discardLogs():
+    """
+    Throw away all logs.
+    """
+    global logfile
+    logfile = NullFile()
+
+
+# Prevent logfile from being erased on reload.  This only works in cpython.
+try:
+    logfile
+except NameError:
+    logfile = StdioOnnaStick(0, getattr(sys.stdout, "encoding", None))
+    logerr = StdioOnnaStick(1, getattr(sys.stderr, "encoding", None))
+
+
+
+class DefaultObserver:
+    """
+    Default observer.
+
+    Will ignore all non-error messages and send error messages to sys.stderr.
+    Will be removed when startLogging() is called for the first time.
+    """
+    stderr = sys.stderr
+
+    def _emit(self, eventDict):
+        if eventDict["isError"]:
+            if 'failure' in eventDict:
+                text = ((eventDict.get('why') or 'Unhandled Error')
+                        + '\n' + eventDict['failure'].getTraceback())
+            else:
+                text = " ".join([str(m) for m in eventDict["message"]]) + "\n"
+
+            self.stderr.write(text)
+            self.stderr.flush()
+
+    def start(self):
+        addObserver(self._emit)
+
+    def stop(self):
+        removeObserver(self._emit)
+
+
+
+try:
+    defaultObserver
+except NameError:
+    defaultObserver = DefaultObserver()
+    defaultObserver.start()
+
diff --git a/ThirdParty/Twisted/twisted/python/logfile.py b/ThirdParty/Twisted/twisted/python/logfile.py
new file mode 100644
index 0000000..f652271
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/logfile.py
@@ -0,0 +1,323 @@
+# -*- test-case-name: twisted.test.test_logfile -*-
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+A rotating, browsable log file.
+"""
+
+# System Imports
+import os, glob, time, stat
+
+from twisted.python import threadable
+
+
+
+class BaseLogFile:
+    """
+    The base class for a log file that can be rotated.
+    """
+
+    synchronized = ["write", "rotate"]
+
+    def __init__(self, name, directory, defaultMode=None):
+        """
+        Create a log file.
+
+        @param name: name of the file
+        @param directory: directory holding the file
+        @param defaultMode: permissions used to create the file. Default to
+        current permissions of the file if the file exists.
+        """
+        self.directory = directory
+        self.name = name
+        self.path = os.path.join(directory, name)
+        if defaultMode is None and os.path.exists(self.path):
+            self.defaultMode = stat.S_IMODE(os.stat(self.path)[stat.ST_MODE])
+        else:
+            self.defaultMode = defaultMode
+        self._openFile()
+
+    def fromFullPath(cls, filename, *args, **kwargs):
+        """
+        Construct a log file from a full file path.
+        """
+        logPath = os.path.abspath(filename)
+        return cls(os.path.basename(logPath),
+                   os.path.dirname(logPath), *args, **kwargs)
+    fromFullPath = classmethod(fromFullPath)
+
+    def shouldRotate(self):
+        """
+        Override with a method to that returns true if the log
+        should be rotated.
+        """
+        raise NotImplementedError
+
+    def _openFile(self):
+        """
+        Open the log file.
+        """
+        self.closed = False
+        if os.path.exists(self.path):
+            self._file = file(self.path, "r+", 1)
+            self._file.seek(0, 2)
+        else:
+            if self.defaultMode is not None:
+                # Set the lowest permissions
+                oldUmask = os.umask(0777)
+                try:
+                    self._file = file(self.path, "w+", 1)
+                finally:
+                    os.umask(oldUmask)
+            else:
+                self._file = file(self.path, "w+", 1)
+        if self.defaultMode is not None:
+            try:
+                os.chmod(self.path, self.defaultMode)
+            except OSError:
+                # Probably /dev/null or something?
+                pass
+
+    def __getstate__(self):
+        state = self.__dict__.copy()
+        del state["_file"]
+        return state
+
+    def __setstate__(self, state):
+        self.__dict__ = state
+        self._openFile()
+
+    def write(self, data):
+        """
+        Write some data to the file.
+        """
+        if self.shouldRotate():
+            self.flush()
+            self.rotate()
+        self._file.write(data)
+
+    def flush(self):
+        """
+        Flush the file.
+        """
+        self._file.flush()
+
+    def close(self):
+        """
+        Close the file.
+
+        The file cannot be used once it has been closed.
+        """
+        self.closed = True
+        self._file.close()
+        self._file = None
+
+
+    def reopen(self):
+        """
+        Reopen the log file. This is mainly useful if you use an external log
+        rotation tool, which moves under your feet.
+
+        Note that on Windows you probably need a specific API to rename the
+        file, as it's not supported to simply use os.rename, for example.
+        """
+        self.close()
+        self._openFile()
+
+
+    def getCurrentLog(self):
+        """
+        Return a LogReader for the current log file.
+        """
+        return LogReader(self.path)
+
+
+class LogFile(BaseLogFile):
+    """
+    A log file that can be rotated.
+
+    A rotateLength of None disables automatic log rotation.
+    """
+    def __init__(self, name, directory, rotateLength=1000000, defaultMode=None,
+                 maxRotatedFiles=None):
+        """
+        Create a log file rotating on length.
+
+        @param name: file name.
+        @type name: C{str}
+        @param directory: path of the log file.
+        @type directory: C{str}
+        @param rotateLength: size of the log file where it rotates. Default to
+            1M.
+        @type rotateLength: C{int}
+        @param defaultMode: mode used to create the file.
+        @type defaultMode: C{int}
+        @param maxRotatedFiles: if not None, max number of log files the class
+            creates. Warning: it removes all log files above this number.
+        @type maxRotatedFiles: C{int}
+        """
+        BaseLogFile.__init__(self, name, directory, defaultMode)
+        self.rotateLength = rotateLength
+        self.maxRotatedFiles = maxRotatedFiles
+
+    def _openFile(self):
+        BaseLogFile._openFile(self)
+        self.size = self._file.tell()
+
+    def shouldRotate(self):
+        """
+        Rotate when the log file size is larger than rotateLength.
+        """
+        return self.rotateLength and self.size >= self.rotateLength
+
+    def getLog(self, identifier):
+        """
+        Given an integer, return a LogReader for an old log file.
+        """
+        filename = "%s.%d" % (self.path, identifier)
+        if not os.path.exists(filename):
+            raise ValueError, "no such logfile exists"
+        return LogReader(filename)
+
+    def write(self, data):
+        """
+        Write some data to the file.
+        """
+        BaseLogFile.write(self, data)
+        self.size += len(data)
+
+    def rotate(self):
+        """
+        Rotate the file and create a new one.
+
+        If it's not possible to open new logfile, this will fail silently,
+        and continue logging to old logfile.
+        """
+        if not (os.access(self.directory, os.W_OK) and os.access(self.path, os.W_OK)):
+            return
+        logs = self.listLogs()
+        logs.reverse()
+        for i in logs:
+            if self.maxRotatedFiles is not None and i >= self.maxRotatedFiles:
+                os.remove("%s.%d" % (self.path, i))
+            else:
+                os.rename("%s.%d" % (self.path, i), "%s.%d" % (self.path, i + 1))
+        self._file.close()
+        os.rename(self.path, "%s.1" % self.path)
+        self._openFile()
+
+    def listLogs(self):
+        """
+        Return sorted list of integers - the old logs' identifiers.
+        """
+        result = []
+        for name in glob.glob("%s.*" % self.path):
+            try:
+                counter = int(name.split('.')[-1])
+                if counter:
+                    result.append(counter)
+            except ValueError:
+                pass
+        result.sort()
+        return result
+
+    def __getstate__(self):
+        state = BaseLogFile.__getstate__(self)
+        del state["size"]
+        return state
+
+threadable.synchronize(LogFile)
+
+
+class DailyLogFile(BaseLogFile):
+    """A log file that is rotated daily (at or after midnight localtime)
+    """
+    def _openFile(self):
+        BaseLogFile._openFile(self)
+        self.lastDate = self.toDate(os.stat(self.path)[8])
+
+    def shouldRotate(self):
+        """Rotate when the date has changed since last write"""
+        return self.toDate() > self.lastDate
+
+    def toDate(self, *args):
+        """Convert a unixtime to (year, month, day) localtime tuple,
+        or return the current (year, month, day) localtime tuple.
+
+        This function primarily exists so you may overload it with
+        gmtime, or some cruft to make unit testing possible.
+        """
+        # primarily so this can be unit tested easily
+        return time.localtime(*args)[:3]
+
+    def suffix(self, tupledate):
+        """Return the suffix given a (year, month, day) tuple or unixtime"""
+        try:
+            return '_'.join(map(str, tupledate))
+        except:
+            # try taking a float unixtime
+            return '_'.join(map(str, self.toDate(tupledate)))
+
+    def getLog(self, identifier):
+        """Given a unix time, return a LogReader for an old log file."""
+        if self.toDate(identifier) == self.lastDate:
+            return self.getCurrentLog()
+        filename = "%s.%s" % (self.path, self.suffix(identifier))
+        if not os.path.exists(filename):
+            raise ValueError, "no such logfile exists"
+        return LogReader(filename)
+
+    def write(self, data):
+        """Write some data to the log file"""
+        BaseLogFile.write(self, data)
+        # Guard against a corner case where time.time()
+        # could potentially run backwards to yesterday.
+        # Primarily due to network time.
+        self.lastDate = max(self.lastDate, self.toDate())
+
+    def rotate(self):
+        """Rotate the file and create a new one.
+
+        If it's not possible to open new logfile, this will fail silently,
+        and continue logging to old logfile.
+        """
+        if not (os.access(self.directory, os.W_OK) and os.access(self.path, os.W_OK)):
+            return
+        newpath = "%s.%s" % (self.path, self.suffix(self.lastDate))
+        if os.path.exists(newpath):
+            return
+        self._file.close()
+        os.rename(self.path, newpath)
+        self._openFile()
+
+    def __getstate__(self):
+        state = BaseLogFile.__getstate__(self)
+        del state["lastDate"]
+        return state
+
+threadable.synchronize(DailyLogFile)
+
+
+class LogReader:
+    """Read from a log file."""
+
+    def __init__(self, name):
+        self._file = file(name, "r")
+
+    def readLines(self, lines=10):
+        """Read a list of lines from the log file.
+
+        This doesn't returns all of the files lines - call it multiple times.
+        """
+        result = []
+        for i in range(lines):
+            line = self._file.readline()
+            if not line:
+                break
+            result.append(line)
+        return result
+
+    def close(self):
+        self._file.close()
diff --git a/ThirdParty/Twisted/twisted/python/modules.py b/ThirdParty/Twisted/twisted/python/modules.py
new file mode 100644
index 0000000..307970c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/modules.py
@@ -0,0 +1,758 @@
+# -*- test-case-name: twisted.test.test_modules -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module aims to provide a unified, object-oriented view of Python's
+runtime hierarchy.
+
+Python is a very dynamic language with wide variety of introspection utilities.
+However, these utilities can be hard to use, because there is no consistent
+API.  The introspection API in python is made up of attributes (__name__,
+__module__, func_name, etc) on instances, modules, classes and functions which
+vary between those four types, utility modules such as 'inspect' which provide
+some functionality, the 'imp' module, the "compiler" module, the semantics of
+PEP 302 support, and setuptools, among other things.
+
+At the top, you have "PythonPath", an abstract representation of sys.path which
+includes methods to locate top-level modules, with or without loading them.
+The top-level exposed functions in this module for accessing the system path
+are "walkModules", "iterModules", and "getModule".
+
+From most to least specific, here are the objects provided::
+
+                  PythonPath  # sys.path
+                      |
+                      v
+                  PathEntry   # one entry on sys.path: an importer
+                      |
+                      v
+                 PythonModule # a module or package that can be loaded
+                      |
+                      v
+                 PythonAttribute # an attribute of a module (function or class)
+                      |
+                      v
+                 PythonAttribute # an attribute of a function or class
+                      |
+                      v
+                     ...
+
+Here's an example of idiomatic usage: this is what you would do to list all of
+the modules outside the standard library's python-files directory::
+
+    import os
+    stdlibdir = os.path.dirname(os.__file__)
+
+    from twisted.python.modules import iterModules
+
+    for modinfo in iterModules():
+        if (modinfo.pathEntry.filePath.path != stdlibdir
+            and not modinfo.isPackage()):
+            print 'unpackaged: %s: %s' % (
+                modinfo.name, modinfo.filePath.path)
+"""
+
+__metaclass__ = type
+
+# let's try to keep path imports to a minimum...
+from os.path import dirname, split as splitpath
+
+import sys
+import zipimport
+import inspect
+import warnings
+from zope.interface import Interface, implements
+
+from twisted.python.components import registerAdapter
+from twisted.python.filepath import FilePath, UnlistableError
+from twisted.python.zippath import ZipArchive
+from twisted.python.reflect import namedAny
+
+_nothing = object()
+
+PYTHON_EXTENSIONS = ['.py']
+OPTIMIZED_MODE = __doc__ is None
+if OPTIMIZED_MODE:
+    PYTHON_EXTENSIONS.append('.pyo')
+else:
+    PYTHON_EXTENSIONS.append('.pyc')
+
+def _isPythonIdentifier(string):
+    """
+    cheezy fake test for proper identifier-ness.
+
+    @param string: a str which might or might not be a valid python identifier.
+
+    @return: True or False
+    """
+    return (' ' not in string and
+            '.' not in string and
+            '-' not in string)
+
+
+
+def _isPackagePath(fpath):
+    # Determine if a FilePath-like object is a Python package.  TODO: deal with
+    # __init__module.(so|dll|pyd)?
+    extless = fpath.splitext()[0]
+    basend = splitpath(extless)[1]
+    return basend == "__init__"
+
+
+
+class _ModuleIteratorHelper:
+    """
+    This mixin provides common behavior between python module and path entries,
+    since the mechanism for searching sys.path and __path__ attributes is
+    remarkably similar.
+    """
+
+    def iterModules(self):
+        """
+        Loop over the modules present below this entry or package on PYTHONPATH.
+
+        For modules which are not packages, this will yield nothing.
+
+        For packages and path entries, this will only yield modules one level
+        down; i.e. if there is a package a.b.c, iterModules on a will only
+        return a.b.  If you want to descend deeply, use walkModules.
+
+        @return: a generator which yields PythonModule instances that describe
+        modules which can be, or have been, imported.
+        """
+        yielded = {}
+        if not self.filePath.exists():
+            return
+
+        for placeToLook in self._packagePaths():
+            try:
+                children = placeToLook.children()
+            except UnlistableError:
+                continue
+
+            children.sort()
+            for potentialTopLevel in children:
+                ext = potentialTopLevel.splitext()[1]
+                potentialBasename = potentialTopLevel.basename()[:-len(ext)]
+                if ext in PYTHON_EXTENSIONS:
+                    # TODO: this should be a little choosier about which path entry
+                    # it selects first, and it should do all the .so checking and
+                    # crud
+                    if not _isPythonIdentifier(potentialBasename):
+                        continue
+                    modname = self._subModuleName(potentialBasename)
+                    if modname.split(".")[-1] == '__init__':
+                        # This marks the directory as a package so it can't be
+                        # a module.
+                        continue
+                    if modname not in yielded:
+                        yielded[modname] = True
+                        pm = PythonModule(modname, potentialTopLevel, self._getEntry())
+                        assert pm != self
+                        yield pm
+                else:
+                    if (ext or not _isPythonIdentifier(potentialBasename)
+                        or not potentialTopLevel.isdir()):
+                        continue
+                    modname = self._subModuleName(potentialTopLevel.basename())
+                    for ext in PYTHON_EXTENSIONS:
+                        initpy = potentialTopLevel.child("__init__"+ext)
+                        if initpy.exists() and modname not in yielded:
+                            yielded[modname] = True
+                            pm = PythonModule(modname, initpy, self._getEntry())
+                            assert pm != self
+                            yield pm
+                            break
+
+    def walkModules(self, importPackages=False):
+        """
+        Similar to L{iterModules}, this yields self, and then every module in my
+        package or entry, and every submodule in each package or entry.
+
+        In other words, this is deep, and L{iterModules} is shallow.
+        """
+        yield self
+        for package in self.iterModules():
+            for module in package.walkModules(importPackages=importPackages):
+                yield module
+
+    def _subModuleName(self, mn):
+        """
+        This is a hook to provide packages with the ability to specify their names
+        as a prefix to submodules here.
+        """
+        return mn
+
+    def _packagePaths(self):
+        """
+        Implement in subclasses to specify where to look for modules.
+
+        @return: iterable of FilePath-like objects.
+        """
+        raise NotImplementedError()
+
+    def _getEntry(self):
+        """
+        Implement in subclasses to specify what path entry submodules will come
+        from.
+
+        @return: a PathEntry instance.
+        """
+        raise NotImplementedError()
+
+
+    def __getitem__(self, modname):
+        """
+        Retrieve a module from below this path or package.
+
+        @param modname: a str naming a module to be loaded.  For entries, this
+        is a top-level, undotted package name, and for packages it is the name
+        of the module without the package prefix.  For example, if you have a
+        PythonModule representing the 'twisted' package, you could use::
+
+            twistedPackageObj['python']['modules']
+
+        to retrieve this module.
+
+        @raise: KeyError if the module is not found.
+
+        @return: a PythonModule.
+        """
+        for module in self.iterModules():
+            if module.name == self._subModuleName(modname):
+                return module
+        raise KeyError(modname)
+
+    def __iter__(self):
+        """
+        Implemented to raise NotImplementedError for clarity, so that attempting to
+        loop over this object won't call __getitem__.
+
+        Note: in the future there might be some sensible default for iteration,
+        like 'walkEverything', so this is deliberately untested and undefined
+        behavior.
+        """
+        raise NotImplementedError()
+
+class PythonAttribute:
+    """
+    I represent a function, class, or other object that is present.
+
+    @ivar name: the fully-qualified python name of this attribute.
+
+    @ivar onObject: a reference to a PythonModule or other PythonAttribute that
+    is this attribute's logical parent.
+
+    @ivar name: the fully qualified python name of the attribute represented by
+    this class.
+    """
+    def __init__(self, name, onObject, loaded, pythonValue):
+        """
+        Create a PythonAttribute.  This is a private constructor.  Do not construct
+        me directly, use PythonModule.iterAttributes.
+
+        @param name: the FQPN
+        @param onObject: see ivar
+        @param loaded: always True, for now
+        @param pythonValue: the value of the attribute we're pointing to.
+        """
+        self.name = name
+        self.onObject = onObject
+        self._loaded = loaded
+        self.pythonValue = pythonValue
+
+    def __repr__(self):
+        return 'PythonAttribute<%r>'%(self.name,)
+
+    def isLoaded(self):
+        """
+        Return a boolean describing whether the attribute this describes has
+        actually been loaded into memory by importing its module.
+
+        Note: this currently always returns true; there is no Python parser
+        support in this module yet.
+        """
+        return self._loaded
+
+    def load(self, default=_nothing):
+        """
+        Load the value associated with this attribute.
+
+        @return: an arbitrary Python object, or 'default' if there is an error
+        loading it.
+        """
+        return self.pythonValue
+
+    def iterAttributes(self):
+        for name, val in inspect.getmembers(self.load()):
+            yield PythonAttribute(self.name+'.'+name, self, True, val)
+
+class PythonModule(_ModuleIteratorHelper):
+    """
+    Representation of a module which could be imported from sys.path.
+
+    @ivar name: the fully qualified python name of this module.
+
+    @ivar filePath: a FilePath-like object which points to the location of this
+    module.
+
+    @ivar pathEntry: a L{PathEntry} instance which this module was located
+    from.
+    """
+
+    def __init__(self, name, filePath, pathEntry):
+        """
+        Create a PythonModule.  Do not construct this directly, instead inspect a
+        PythonPath or other PythonModule instances.
+
+        @param name: see ivar
+        @param filePath: see ivar
+        @param pathEntry: see ivar
+        """
+        assert not name.endswith(".__init__")
+        self.name = name
+        self.filePath = filePath
+        self.parentPath = filePath.parent()
+        self.pathEntry = pathEntry
+
+    def _getEntry(self):
+        return self.pathEntry
+
+    def __repr__(self):
+        """
+        Return a string representation including the module name.
+        """
+        return 'PythonModule<%r>' % (self.name,)
+
+
+    def isLoaded(self):
+        """
+        Determine if the module is loaded into sys.modules.
+
+        @return: a boolean: true if loaded, false if not.
+        """
+        return self.pathEntry.pythonPath.moduleDict.get(self.name) is not None
+
+
+    def iterAttributes(self):
+        """
+        List all the attributes defined in this module.
+
+        Note: Future work is planned here to make it possible to list python
+        attributes on a module without loading the module by inspecting ASTs or
+        bytecode, but currently any iteration of PythonModule objects insists
+        they must be loaded, and will use inspect.getmodule.
+
+        @raise NotImplementedError: if this module is not loaded.
+
+        @return: a generator yielding PythonAttribute instances describing the
+        attributes of this module.
+        """
+        if not self.isLoaded():
+            raise NotImplementedError(
+                "You can't load attributes from non-loaded modules yet.")
+        for name, val in inspect.getmembers(self.load()):
+            yield PythonAttribute(self.name+'.'+name, self, True, val)
+
+    def isPackage(self):
+        """
+        Returns true if this module is also a package, and might yield something
+        from iterModules.
+        """
+        return _isPackagePath(self.filePath)
+
+    def load(self, default=_nothing):
+        """
+        Load this module.
+
+        @param default: if specified, the value to return in case of an error.
+
+        @return: a genuine python module.
+
+        @raise: any type of exception.  Importing modules is a risky business;
+        the erorrs of any code run at module scope may be raised from here, as
+        well as ImportError if something bizarre happened to the system path
+        between the discovery of this PythonModule object and the attempt to
+        import it.  If you specify a default, the error will be swallowed
+        entirely, and not logged.
+
+        @rtype: types.ModuleType.
+        """
+        try:
+            return self.pathEntry.pythonPath.moduleLoader(self.name)
+        except:                 # this needs more thought...
+            if default is not _nothing:
+                return default
+            raise
+
+    def __eq__(self, other):
+        """
+        PythonModules with the same name are equal.
+        """
+        if not isinstance(other, PythonModule):
+            return False
+        return other.name == self.name
+
+    def __ne__(self, other):
+        """
+        PythonModules with different names are not equal.
+        """
+        if not isinstance(other, PythonModule):
+            return True
+        return other.name != self.name
+
+    def walkModules(self, importPackages=False):
+        if importPackages and self.isPackage():
+            self.load()
+        return super(PythonModule, self).walkModules(importPackages=importPackages)
+
+    def _subModuleName(self, mn):
+        """
+        submodules of this module are prefixed with our name.
+        """
+        return self.name + '.' + mn
+
+    def _packagePaths(self):
+        """
+        Yield a sequence of FilePath-like objects which represent path segments.
+        """
+        if not self.isPackage():
+            return
+        if self.isLoaded():
+            load = self.load()
+            if hasattr(load, '__path__'):
+                for fn in load.__path__:
+                    if fn == self.parentPath.path:
+                        # this should _really_ exist.
+                        assert self.parentPath.exists()
+                        yield self.parentPath
+                    else:
+                        smp = self.pathEntry.pythonPath._smartPath(fn)
+                        if smp.exists():
+                            yield smp
+        else:
+            yield self.parentPath
+
+
+class PathEntry(_ModuleIteratorHelper):
+    """
+    I am a proxy for a single entry on sys.path.
+
+    @ivar filePath: a FilePath-like object pointing at the filesystem location
+    or archive file where this path entry is stored.
+
+    @ivar pythonPath: a PythonPath instance.
+    """
+    def __init__(self, filePath, pythonPath):
+        """
+        Create a PathEntry.  This is a private constructor.
+        """
+        self.filePath = filePath
+        self.pythonPath = pythonPath
+
+    def _getEntry(self):
+        return self
+
+    def __repr__(self):
+        return 'PathEntry<%r>' % (self.filePath,)
+
+    def _packagePaths(self):
+        yield self.filePath
+
+class IPathImportMapper(Interface):
+    """
+    This is an internal interface, used to map importers to factories for
+    FilePath-like objects.
+    """
+    def mapPath(self, pathLikeString):
+        """
+        Return a FilePath-like object.
+
+        @param pathLikeString: a path-like string, like one that might be
+        passed to an import hook.
+
+        @return: a L{FilePath}, or something like it (currently only a
+        L{ZipPath}, but more might be added later).
+        """
+
+class _DefaultMapImpl:
+    """ Wrapper for the default importer, i.e. None.  """
+    implements(IPathImportMapper)
+    def mapPath(self, fsPathString):
+        return FilePath(fsPathString)
+_theDefaultMapper = _DefaultMapImpl()
+
+class _ZipMapImpl:
+    """ IPathImportMapper implementation for zipimport.ZipImporter.  """
+    implements(IPathImportMapper)
+    def __init__(self, importer):
+        self.importer = importer
+
+    def mapPath(self, fsPathString):
+        """
+        Map the given FS path to a ZipPath, by looking at the ZipImporter's
+        "archive" attribute and using it as our ZipArchive root, then walking
+        down into the archive from there.
+
+        @return: a L{zippath.ZipPath} or L{zippath.ZipArchive} instance.
+        """
+        za = ZipArchive(self.importer.archive)
+        myPath = FilePath(self.importer.archive)
+        itsPath = FilePath(fsPathString)
+        if myPath == itsPath:
+            return za
+        # This is NOT a general-purpose rule for sys.path or __file__:
+        # zipimport specifically uses regular OS path syntax in its pathnames,
+        # even though zip files specify that slashes are always the separator,
+        # regardless of platform.
+        segs = itsPath.segmentsFrom(myPath)
+        zp = za
+        for seg in segs:
+            zp = zp.child(seg)
+        return zp
+
+registerAdapter(_ZipMapImpl, zipimport.zipimporter, IPathImportMapper)
+
+def _defaultSysPathFactory():
+    """
+    Provide the default behavior of PythonPath's sys.path factory, which is to
+    return the current value of sys.path.
+
+    @return: L{sys.path}
+    """
+    return sys.path
+
+
+class PythonPath:
+    """
+    I represent the very top of the Python object-space, the module list in
+    sys.path and the modules list in sys.modules.
+
+    @ivar _sysPath: a sequence of strings like sys.path.  This attribute is
+    read-only.
+
+    @ivar moduleDict: a dictionary mapping string module names to module
+    objects, like sys.modules.
+
+    @ivar sysPathHooks: a list of PEP-302 path hooks, like sys.path_hooks.
+
+    @ivar moduleLoader: a function that takes a fully-qualified python name and
+    returns a module, like twisted.python.reflect.namedAny.
+    """
+
+    def __init__(self,
+                 sysPath=None,
+                 moduleDict=sys.modules,
+                 sysPathHooks=sys.path_hooks,
+                 importerCache=sys.path_importer_cache,
+                 moduleLoader=namedAny,
+                 sysPathFactory=None):
+        """
+        Create a PythonPath.  You almost certainly want to use
+        modules.theSystemPath, or its aliased methods, rather than creating a
+        new instance yourself, though.
+
+        All parameters are optional, and if unspecified, will use 'system'
+        equivalents that makes this PythonPath like the global L{theSystemPath}
+        instance.
+
+        @param sysPath: a sys.path-like list to use for this PythonPath, to
+        specify where to load modules from.
+
+        @param moduleDict: a sys.modules-like dictionary to use for keeping
+        track of what modules this PythonPath has loaded.
+
+        @param sysPathHooks: sys.path_hooks-like list of PEP-302 path hooks to
+        be used for this PythonPath, to determie which importers should be
+        used.
+
+        @param importerCache: a sys.path_importer_cache-like list of PEP-302
+        importers.  This will be used in conjunction with the given
+        sysPathHooks.
+
+        @param moduleLoader: a module loader function which takes a string and
+        returns a module.  That is to say, it is like L{namedAny} - *not* like
+        L{__import__}.
+
+        @param sysPathFactory: a 0-argument callable which returns the current
+        value of a sys.path-like list of strings.  Specify either this, or
+        sysPath, not both.  This alternative interface is provided because the
+        way the Python import mechanism works, you can re-bind the 'sys.path'
+        name and that is what is used for current imports, so it must be a
+        factory rather than a value to deal with modification by rebinding
+        rather than modification by mutation.  Note: it is not recommended to
+        rebind sys.path.  Although this mechanism can deal with that, it is a
+        subtle point which some tools that it is easy for tools which interact
+        with sys.path to miss.
+        """
+        if sysPath is not None:
+            sysPathFactory = lambda : sysPath
+        elif sysPathFactory is None:
+            sysPathFactory = _defaultSysPathFactory
+        self._sysPathFactory = sysPathFactory
+        self._sysPath = sysPath
+        self.moduleDict = moduleDict
+        self.sysPathHooks = sysPathHooks
+        self.importerCache = importerCache
+        self.moduleLoader = moduleLoader
+
+
+    def _getSysPath(self):
+        """
+        Retrieve the current value of the module search path list.
+        """
+        return self._sysPathFactory()
+
+    sysPath = property(_getSysPath)
+
+    def _findEntryPathString(self, modobj):
+        """
+        Determine where a given Python module object came from by looking at path
+        entries.
+        """
+        topPackageObj = modobj
+        while '.' in topPackageObj.__name__:
+            topPackageObj = self.moduleDict['.'.join(
+                    topPackageObj.__name__.split('.')[:-1])]
+        if _isPackagePath(FilePath(topPackageObj.__file__)):
+            # if package 'foo' is on sys.path at /a/b/foo, package 'foo's
+            # __file__ will be /a/b/foo/__init__.py, and we are looking for
+            # /a/b here, the path-entry; so go up two steps.
+            rval = dirname(dirname(topPackageObj.__file__))
+        else:
+            # the module is completely top-level, not within any packages.  The
+            # path entry it's on is just its dirname.
+            rval = dirname(topPackageObj.__file__)
+
+        # There are probably some awful tricks that an importer could pull
+        # which would break this, so let's just make sure... it's a loaded
+        # module after all, which means that its path MUST be in
+        # path_importer_cache according to PEP 302 -glyph
+        if rval not in self.importerCache:
+            warnings.warn(
+                "%s (for module %s) not in path importer cache "
+                "(PEP 302 violation - check your local configuration)." % (
+                    rval, modobj.__name__),
+                stacklevel=3)
+
+        return rval
+
+    def _smartPath(self, pathName):
+        """
+        Given a path entry from sys.path which may refer to an importer,
+        return the appropriate FilePath-like instance.
+
+        @param pathName: a str describing the path.
+
+        @return: a FilePath-like object.
+        """
+        importr = self.importerCache.get(pathName, _nothing)
+        if importr is _nothing:
+            for hook in self.sysPathHooks:
+                try:
+                    importr = hook(pathName)
+                except ImportError:
+                    pass
+            if importr is _nothing: # still
+                importr = None
+        return IPathImportMapper(importr, _theDefaultMapper).mapPath(pathName)
+
+    def iterEntries(self):
+        """
+        Iterate the entries on my sysPath.
+
+        @return: a generator yielding PathEntry objects
+        """
+        for pathName in self.sysPath:
+            fp = self._smartPath(pathName)
+            yield PathEntry(fp, self)
+
+
+    def __getitem__(self, modname):
+        """
+        Get a python module by its given fully-qualified name.
+
+        @param modname: The fully-qualified Python module name to load.
+
+        @type modname: C{str}
+
+        @return: an object representing the module identified by C{modname}
+
+        @rtype: L{PythonModule}
+
+        @raise KeyError: if the module name is not a valid module name, or no
+            such module can be identified as loadable.
+        """
+        # See if the module is already somewhere in Python-land.
+        moduleObject = self.moduleDict.get(modname)
+        if moduleObject is not None:
+            # we need 2 paths; one of the path entry and one for the module.
+            pe = PathEntry(
+                self._smartPath(
+                    self._findEntryPathString(moduleObject)),
+                self)
+            mp = self._smartPath(moduleObject.__file__)
+            return PythonModule(modname, mp, pe)
+
+        # Recurse if we're trying to get a submodule.
+        if '.' in modname:
+            pkg = self
+            for name in modname.split('.'):
+                pkg = pkg[name]
+            return pkg
+
+        # Finally do the slowest possible thing and iterate
+        for module in self.iterModules():
+            if module.name == modname:
+                return module
+        raise KeyError(modname)
+
+
+    def __repr__(self):
+        """
+        Display my sysPath and moduleDict in a string representation.
+        """
+        return "PythonPath(%r,%r)" % (self.sysPath, self.moduleDict)
+
+    def iterModules(self):
+        """
+        Yield all top-level modules on my sysPath.
+        """
+        for entry in self.iterEntries():
+            for module in entry.iterModules():
+                yield module
+
+    def walkModules(self, importPackages=False):
+        """
+        Similar to L{iterModules}, this yields every module on the path, then every
+        submodule in each package or entry.
+        """
+        for package in self.iterModules():
+            for module in package.walkModules(importPackages=False):
+                yield module
+
+theSystemPath = PythonPath()
+
+def walkModules(importPackages=False):
+    """
+    Deeply iterate all modules on the global python path.
+
+    @param importPackages: Import packages as they are seen.
+    """
+    return theSystemPath.walkModules(importPackages=importPackages)
+
+def iterModules():
+    """
+    Iterate all modules and top-level packages on the global Python path, but
+    do not descend into packages.
+
+    @param importPackages: Import packages as they are seen.
+    """
+    return theSystemPath.iterModules()
+
+def getModule(moduleName):
+    """
+    Retrieve a module from the system path.
+    """
+    return theSystemPath[moduleName]
diff --git a/ThirdParty/Twisted/twisted/python/monkey.py b/ThirdParty/Twisted/twisted/python/monkey.py
new file mode 100644
index 0000000..4911f87
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/monkey.py
@@ -0,0 +1,75 @@
+# -*- test-case-name: twisted.test.test_monkey -*-
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from __future__ import division, absolute_import
+
+
+class MonkeyPatcher(object):
+    """
+    Cover up attributes with new objects. Neat for monkey-patching things for
+    unit-testing purposes.
+    """
+
+    def __init__(self, *patches):
+        # List of patches to apply in (obj, name, value).
+        self._patchesToApply = []
+        # List of the original values for things that have been patched.
+        # (obj, name, value) format.
+        self._originals = []
+        for patch in patches:
+            self.addPatch(*patch)
+
+
+    def addPatch(self, obj, name, value):
+        """
+        Add a patch so that the attribute C{name} on C{obj} will be assigned to
+        C{value} when C{patch} is called or during C{runWithPatches}.
+
+        You can restore the original values with a call to restore().
+        """
+        self._patchesToApply.append((obj, name, value))
+
+
+    def _alreadyPatched(self, obj, name):
+        """
+        Has the C{name} attribute of C{obj} already been patched by this
+        patcher?
+        """
+        for o, n, v in self._originals:
+            if (o, n) == (obj, name):
+                return True
+        return False
+
+
+    def patch(self):
+        """
+        Apply all of the patches that have been specified with L{addPatch}.
+        Reverse this operation using L{restore}.
+        """
+        for obj, name, value in self._patchesToApply:
+            if not self._alreadyPatched(obj, name):
+                self._originals.append((obj, name, getattr(obj, name)))
+            setattr(obj, name, value)
+
+
+    def restore(self):
+        """
+        Restore all original values to any patched objects.
+        """
+        while self._originals:
+            obj, name, value = self._originals.pop()
+            setattr(obj, name, value)
+
+
+    def runWithPatches(self, f, *args, **kw):
+        """
+        Apply each patch already specified. Then run the function f with the
+        given args and kwargs. Restore everything when done.
+        """
+        self.patch()
+        try:
+            return f(*args, **kw)
+        finally:
+            self.restore()
diff --git a/ThirdParty/Twisted/twisted/python/procutils.py b/ThirdParty/Twisted/twisted/python/procutils.py
new file mode 100644
index 0000000..26ff95d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/procutils.py
@@ -0,0 +1,45 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Utilities for dealing with processes.
+"""
+
+import os
+
+def which(name, flags=os.X_OK):
+    """Search PATH for executable files with the given name.
+    
+    On newer versions of MS-Windows, the PATHEXT environment variable will be
+    set to the list of file extensions for files considered executable. This
+    will normally include things like ".EXE". This fuction will also find files
+    with the given name ending with any of these extensions.
+
+    On MS-Windows the only flag that has any meaning is os.F_OK. Any other
+    flags will be ignored.
+    
+    @type name: C{str}
+    @param name: The name for which to search.
+    
+    @type flags: C{int}
+    @param flags: Arguments to L{os.access}.
+    
+    @rtype: C{list}
+    @param: A list of the full paths to files found, in the
+    order in which they were found.
+    """
+    result = []
+    exts = filter(None, os.environ.get('PATHEXT', '').split(os.pathsep))
+    path = os.environ.get('PATH', None)
+    if path is None:
+        return []
+    for p in os.environ.get('PATH', '').split(os.pathsep):
+        p = os.path.join(p, name)
+        if os.access(p, flags):
+            result.append(p)
+        for e in exts:
+            pext = p + e
+            if os.access(pext, flags):
+                result.append(pext)
+    return result
+
diff --git a/ThirdParty/Twisted/twisted/python/randbytes.py b/ThirdParty/Twisted/twisted/python/randbytes.py
new file mode 100644
index 0000000..4062ed2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/randbytes.py
@@ -0,0 +1,150 @@
+# -*- test-case-name: twisted.test.test_randbytes -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Cryptographically secure random implementation, with fallback on normal random.
+"""
+
+from __future__ import division, absolute_import
+
+import warnings, os, random, string
+
+from twisted.python.compat import _PY3
+
+getrandbits = getattr(random, 'getrandbits', None)
+
+if _PY3:
+    _fromhex = bytes.fromhex
+else:
+    def _fromhex(hexBytes):
+        return hexBytes.decode('hex')
+
+
+class SecureRandomNotAvailable(RuntimeError):
+    """
+    Exception raised when no secure random algorithm is found.
+    """
+
+
+
+class SourceNotAvailable(RuntimeError):
+    """
+    Internal exception used when a specific random source is not available.
+    """
+
+
+
+class RandomFactory(object):
+    """
+    Factory providing L{secureRandom} and L{insecureRandom} methods.
+
+    You shouldn't have to instantiate this class, use the module level
+    functions instead: it is an implementation detail and could be removed or
+    changed arbitrarily.
+    """
+
+    # This variable is no longer used, and will eventually be removed.
+    randomSources = ()
+
+    getrandbits = getrandbits
+
+
+    def _osUrandom(self, nbytes):
+        """
+        Wrapper around C{os.urandom} that cleanly manage its absence.
+        """
+        try:
+            return os.urandom(nbytes)
+        except (AttributeError, NotImplementedError) as e:
+            raise SourceNotAvailable(e)
+
+
+    def secureRandom(self, nbytes, fallback=False):
+        """
+        Return a number of secure random bytes.
+
+        @param nbytes: number of bytes to generate.
+        @type nbytes: C{int}
+        @param fallback: Whether the function should fallback on non-secure
+            random or not.  Default to C{False}.
+        @type fallback: C{bool}
+
+        @return: a string of random bytes.
+        @rtype: C{str}
+        """
+        try:
+            return self._osUrandom(nbytes)
+        except SourceNotAvailable:
+            pass
+
+        if fallback:
+            warnings.warn(
+                "urandom unavailable - "
+                "proceeding with non-cryptographically secure random source",
+                category=RuntimeWarning,
+                stacklevel=2)
+            return self.insecureRandom(nbytes)
+        else:
+            raise SecureRandomNotAvailable("No secure random source available")
+
+
+    def _randBits(self, nbytes):
+        """
+        Wrapper around C{os.getrandbits}.
+        """
+        if self.getrandbits is not None:
+            n = self.getrandbits(nbytes * 8)
+            hexBytes = ("%%0%dx" % (nbytes * 2)) % n
+            return _fromhex(hexBytes)
+        raise SourceNotAvailable("random.getrandbits is not available")
+
+
+    if _PY3:
+        _maketrans = bytes.maketrans
+        def _randModule(self, nbytes):
+            """
+            Wrapper around the C{random} module.
+            """
+            return b"".join([
+                    bytes([random.choice(self._BYTES)]) for i in range(nbytes)])
+    else:
+        _maketrans = string.maketrans
+        def _randModule(self, nbytes):
+            """
+            Wrapper around the C{random} module.
+            """
+            return b"".join([
+                    random.choice(self._BYTES) for i in range(nbytes)])
+
+    _BYTES = _maketrans(b'', b'')
+
+
+    def insecureRandom(self, nbytes):
+        """
+        Return a number of non secure random bytes.
+
+        @param nbytes: number of bytes to generate.
+        @type nbytes: C{int}
+
+        @return: a string of random bytes.
+        @rtype: C{str}
+        """
+        for src in ("_randBits", "_randModule"):
+            try:
+                return getattr(self, src)(nbytes)
+            except SourceNotAvailable:
+                pass
+
+
+
+factory = RandomFactory()
+
+secureRandom = factory.secureRandom
+
+insecureRandom = factory.insecureRandom
+
+del factory
+
+
+__all__ = ["secureRandom", "insecureRandom", "SecureRandomNotAvailable"]
diff --git a/ThirdParty/Twisted/twisted/python/rebuild.py b/ThirdParty/Twisted/twisted/python/rebuild.py
new file mode 100644
index 0000000..28a7675
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/rebuild.py
@@ -0,0 +1,271 @@
+# -*- test-case-name: twisted.test.test_rebuild -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+*Real* reloading support for Python.
+"""
+
+# System Imports
+import sys
+import types
+import time
+import linecache
+
+# Sibling Imports
+from twisted.python import log, reflect
+
+lastRebuild = time.time()
+
+
+class Sensitive:
+    """
+    A utility mixin that's sensitive to rebuilds.
+
+    This is a mixin for classes (usually those which represent collections of
+    callbacks) to make sure that their code is up-to-date before running.
+    """
+
+    lastRebuild = lastRebuild
+
+    def needRebuildUpdate(self):
+        yn = (self.lastRebuild < lastRebuild)
+        return yn
+
+    def rebuildUpToDate(self):
+        self.lastRebuild = time.time()
+
+    def latestVersionOf(self, anObject):
+        """
+        Get the latest version of an object.
+
+        This can handle just about anything callable; instances, functions,
+        methods, and classes.
+        """
+        t = type(anObject)
+        if t == types.FunctionType:
+            return latestFunction(anObject)
+        elif t == types.MethodType:
+            if anObject.im_self is None:
+                return getattr(anObject.im_class, anObject.__name__)
+            else:
+                return getattr(anObject.im_self, anObject.__name__)
+        elif t == types.InstanceType:
+            # Kick it, if it's out of date.
+            getattr(anObject, 'nothing', None)
+            return anObject
+        elif t == types.ClassType:
+            return latestClass(anObject)
+        else:
+            log.msg('warning returning anObject!')
+            return anObject
+
+_modDictIDMap = {}
+
+def latestFunction(oldFunc):
+    """
+    Get the latest version of a function.
+    """
+    # This may be CPython specific, since I believe jython instantiates a new
+    # module upon reload.
+    dictID = id(oldFunc.func_globals)
+    module = _modDictIDMap.get(dictID)
+    if module is None:
+        return oldFunc
+    return getattr(module, oldFunc.__name__)
+
+
+def latestClass(oldClass):
+    """
+    Get the latest version of a class.
+    """
+    module = reflect.namedModule(oldClass.__module__)
+    newClass = getattr(module, oldClass.__name__)
+    newBases = [latestClass(base) for base in newClass.__bases__]
+
+    try:
+        # This makes old-style stuff work
+        newClass.__bases__ = tuple(newBases)
+        return newClass
+    except TypeError:
+        if newClass.__module__ == "__builtin__":
+            # __builtin__ members can't be reloaded sanely
+            return newClass
+        ctor = getattr(newClass, '__metaclass__', type)
+        return ctor(newClass.__name__, tuple(newBases), dict(newClass.__dict__))
+
+
+class RebuildError(Exception):
+    """
+    Exception raised when trying to rebuild a class whereas it's not possible.
+    """
+
+
+def updateInstance(self):
+    """
+    Updates an instance to be current.
+    """
+    try:
+        self.__class__ = latestClass(self.__class__)
+    except TypeError:
+        if hasattr(self.__class__, '__slots__'):
+            raise RebuildError("Can't rebuild class with __slots__ on Python < 2.6")
+        else:
+            raise
+
+
+def __getattr__(self, name):
+    """
+    A getattr method to cause a class to be refreshed.
+    """
+    if name == '__del__':
+        raise AttributeError("Without this, Python segfaults.")
+    updateInstance(self)
+    log.msg("(rebuilding stale %s instance (%s))" % (reflect.qual(self.__class__), name))
+    result = getattr(self, name)
+    return result
+
+
+def rebuild(module, doLog=1):
+    """
+    Reload a module and do as much as possible to replace its references.
+    """
+    global lastRebuild
+    lastRebuild = time.time()
+    if hasattr(module, 'ALLOW_TWISTED_REBUILD'):
+        # Is this module allowed to be rebuilt?
+        if not module.ALLOW_TWISTED_REBUILD:
+            raise RuntimeError("I am not allowed to be rebuilt.")
+    if doLog:
+        log.msg('Rebuilding %s...' % str(module.__name__))
+
+    ## Safely handle adapter re-registration
+    from twisted.python import components
+    components.ALLOW_DUPLICATES = True
+
+    d = module.__dict__
+    _modDictIDMap[id(d)] = module
+    newclasses = {}
+    classes = {}
+    functions = {}
+    values = {}
+    if doLog:
+        log.msg('  (scanning %s): ' % str(module.__name__))
+    for k, v in d.items():
+        if type(v) == types.ClassType:
+            # Failure condition -- instances of classes with buggy
+            # __hash__/__cmp__ methods referenced at the module level...
+            if v.__module__ == module.__name__:
+                classes[v] = 1
+                if doLog:
+                    log.logfile.write("c")
+                    log.logfile.flush()
+        elif type(v) == types.FunctionType:
+            if v.func_globals is module.__dict__:
+                functions[v] = 1
+                if doLog:
+                    log.logfile.write("f")
+                    log.logfile.flush()
+        elif isinstance(v, type):
+            if v.__module__ == module.__name__:
+                newclasses[v] = 1
+                if doLog:
+                    log.logfile.write("o")
+                    log.logfile.flush()
+
+    values.update(classes)
+    values.update(functions)
+    fromOldModule = values.__contains__
+    newclasses = newclasses.keys()
+    classes = classes.keys()
+    functions = functions.keys()
+
+    if doLog:
+        log.msg('')
+        log.msg('  (reload   %s)' % str(module.__name__))
+
+    # Boom.
+    reload(module)
+    # Make sure that my traceback printing will at least be recent...
+    linecache.clearcache()
+
+    if doLog:
+        log.msg('  (cleaning %s): ' % str(module.__name__))
+
+    for clazz in classes:
+        if getattr(module, clazz.__name__) is clazz:
+            log.msg("WARNING: class %s not replaced by reload!" % reflect.qual(clazz))
+        else:
+            if doLog:
+                log.logfile.write("x")
+                log.logfile.flush()
+            clazz.__bases__ = ()
+            clazz.__dict__.clear()
+            clazz.__getattr__ = __getattr__
+            clazz.__module__ = module.__name__
+    if newclasses:
+        import gc
+    for nclass in newclasses:
+        ga = getattr(module, nclass.__name__)
+        if ga is nclass:
+            log.msg("WARNING: new-class %s not replaced by reload!" % reflect.qual(nclass))
+        else:
+            for r in gc.get_referrers(nclass):
+                if getattr(r, '__class__', None) is nclass:
+                    r.__class__ = ga
+    if doLog:
+        log.msg('')
+        log.msg('  (fixing   %s): ' % str(module.__name__))
+    modcount = 0
+    for mk, mod in sys.modules.items():
+        modcount = modcount + 1
+        if mod == module or mod is None:
+            continue
+
+        if not hasattr(mod, '__file__'):
+            # It's a builtin module; nothing to replace here.
+            continue
+
+        if hasattr(mod, '__bundle__'):
+            # PyObjC has a few buggy objects which segfault if you hash() them.
+            # It doesn't make sense to try rebuilding extension modules like
+            # this anyway, so don't try.
+            continue
+
+        changed = 0
+
+        for k, v in mod.__dict__.items():
+            try:
+                hash(v)
+            except Exception:
+                continue
+            if fromOldModule(v):
+                if type(v) == types.ClassType:
+                    if doLog:
+                        log.logfile.write("c")
+                        log.logfile.flush()
+                    nv = latestClass(v)
+                else:
+                    if doLog:
+                        log.logfile.write("f")
+                        log.logfile.flush()
+                    nv = latestFunction(v)
+                changed = 1
+                setattr(mod, k, nv)
+            else:
+                # Replace bases of non-module classes just to be sure.
+                if type(v) == types.ClassType:
+                    for base in v.__bases__:
+                        if fromOldModule(base):
+                            latestClass(v)
+        if doLog and not changed and ((modcount % 10) ==0) :
+            log.logfile.write(".")
+            log.logfile.flush()
+
+    components.ALLOW_DUPLICATES = False
+    if doLog:
+        log.msg('')
+        log.msg('   Rebuilt %s.' % str(module.__name__))
+    return module
+
diff --git a/ThirdParty/Twisted/twisted/python/reflect.py b/ThirdParty/Twisted/twisted/python/reflect.py
new file mode 100644
index 0000000..541d99f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/reflect.py
@@ -0,0 +1,537 @@
+# -*- test-case-name: twisted.test.test_reflect -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Standardized versions of various cool and/or strange things that you can do
+with Python's reflection capabilities.
+"""
+
+import sys
+import types
+import pickle
+import weakref
+import re
+import warnings
+
+try:
+    from collections import deque
+except ImportError:
+    deque = list
+
+RegexType = type(re.compile(""))
+
+
+try:
+    from cStringIO import StringIO
+except ImportError:
+    from StringIO import StringIO
+
+from twisted.python._utilpy3 import unsignedID
+from twisted.python.deprecate import deprecated, deprecatedModuleAttribute
+from twisted.python.deprecate import _fullyQualifiedName as fullyQualifiedName
+from twisted.python.versions import Version
+
+from twisted.python._reflectpy3 import (
+    prefixedMethods, accumulateMethods, prefixedMethodNames,
+    addMethodNamesToDict)
+from twisted.python._reflectpy3 import namedModule, namedObject, namedClass
+from twisted.python._reflectpy3 import InvalidName, ModuleNotFound
+from twisted.python._reflectpy3 import ObjectNotFound, namedAny
+from twisted.python._reflectpy3 import filenameToModuleName
+from twisted.python._reflectpy3 import qual, safe_str, safe_repr
+
+class Settable:
+    """
+    A mixin class for syntactic sugar.  Lets you assign attributes by
+    calling with keyword arguments; for example, C{x(a=b,c=d,y=z)} is the
+    same as C{x.a=b;x.c=d;x.y=z}.  The most useful place for this is
+    where you don't want to name a variable, but you do want to set
+    some attributes; for example, C{X()(y=z,a=b)}.
+    """
+
+    deprecatedModuleAttribute(
+        Version("Twisted", 12, 1, 0),
+        "Settable is old and untested. Please write your own version of this "
+        "functionality if you need it.", "twisted.python.reflect", "Settable")
+
+    def __init__(self, **kw):
+        self(**kw)
+
+    def __call__(self,**kw):
+        for key,val in kw.items():
+            setattr(self,key,val)
+        return self
+
+
+class AccessorType(type):
+    """
+    Metaclass that generates properties automatically.
+
+    This is for Python 2.2 and up.
+
+    Using this metaclass for your class will give you explicit accessor
+    methods; a method called set_foo, will automatically create a property
+    'foo' that uses set_foo as a setter method. Same for get_foo and del_foo.
+
+    Note that this will only work on methods that are present on class
+    creation. If you add methods after the class is defined they will not
+    automatically become properties. Likewise, class attributes will only
+    be used if they are present upon class creation, and no getter function
+    was set - if a getter is present, the class attribute will be ignored.
+
+    This is a 2.2-only alternative to the Accessor mixin - just set in your
+    class definition::
+
+        __metaclass__ = AccessorType
+
+    """
+
+    deprecatedModuleAttribute(
+        Version("Twisted", 12, 1, 0),
+        "AccessorType is old and untested. Please write your own version of "
+        "this functionality if you need it.", "twisted.python.reflect",
+        "AccessorType")
+
+    def __init__(self, name, bases, d):
+        type.__init__(self, name, bases, d)
+        accessors = {}
+        prefixs = ["get_", "set_", "del_"]
+        for k in d.keys():
+            v = getattr(self, k)
+            for i in range(3):
+                if k.startswith(prefixs[i]):
+                    accessors.setdefault(k[4:], [None, None, None])[i] = v
+        for name, (getter, setter, deler) in accessors.items():
+            # create default behaviours for the property - if we leave
+            # the getter as None we won't be able to getattr, etc..
+            if getter is None:
+                if hasattr(self, name):
+                    value = getattr(self, name)
+                    def getter(this, value=value, name=name):
+                        if name in this.__dict__:
+                            return this.__dict__[name]
+                        else:
+                            return value
+                else:
+                    def getter(this, name=name):
+                        if name in this.__dict__:
+                            return this.__dict__[name]
+                        else:
+                            raise AttributeError("no such attribute %r" % name)
+            if setter is None:
+                def setter(this, value, name=name):
+                    this.__dict__[name] = value
+            if deler is None:
+                def deler(this, name=name):
+                    del this.__dict__[name]
+            setattr(self, name, property(getter, setter, deler, ""))
+
+
+class PropertyAccessor(object):
+    """
+    A mixin class for Python 2.2 that uses AccessorType.
+
+    This provides compatability with the pre-2.2 Accessor mixin, up
+    to a point.
+
+    Extending this class will give you explicit accessor methods; a
+    method called set_foo, for example, is the same as an if statement
+    in __setattr__ looking for 'foo'.  Same for get_foo and del_foo.
+
+    There are also reallyDel and reallySet methods, so you can
+    override specifics in subclasses without clobbering __setattr__
+    and __getattr__, or using non-2.1 compatible code.
+
+    There is are incompatibilities with Accessor - accessor
+    methods added after class creation will *not* be detected. OTOH,
+    this method is probably way faster.
+
+    In addition, class attributes will only be used if no getter
+    was defined, and instance attributes will not override getter methods
+    whereas in original Accessor the class attribute or instance attribute
+    would override the getter method.
+    """
+    # addendum to above:
+    # The behaviour of Accessor is wrong IMHO, and I've found bugs
+    # caused by it.
+    #  -- itamar
+
+    deprecatedModuleAttribute(
+        Version("Twisted", 12, 1, 0),
+        "PropertyAccessor is old and untested. Please write your own version "
+        "of this functionality if you need it.", "twisted.python.reflect",
+        "PropertyAccessor")
+    __metaclass__ = AccessorType
+
+    def reallySet(self, k, v):
+        self.__dict__[k] = v
+
+    def reallyDel(self, k):
+        del self.__dict__[k]
+
+
+class Accessor:
+    """
+    Extending this class will give you explicit accessor methods; a
+    method called C{set_foo}, for example, is the same as an if statement
+    in L{__setattr__} looking for C{'foo'}.  Same for C{get_foo} and
+    C{del_foo}.  There are also L{reallyDel} and L{reallySet} methods,
+    so you can override specifics in subclasses without clobbering
+    L{__setattr__} and L{__getattr__}.
+
+    This implementation is for Python 2.1.
+    """
+
+    deprecatedModuleAttribute(
+        Version("Twisted", 12, 1, 0),
+        "Accessor is an implementation for Python 2.1 which is no longer "
+        "supported by Twisted.", "twisted.python.reflect", "Accessor")
+
+    def __setattr__(self, k,v):
+        kstring='set_%s'%k
+        if hasattr(self.__class__,kstring):
+            return getattr(self,kstring)(v)
+        else:
+            self.reallySet(k,v)
+
+    def __getattr__(self, k):
+        kstring='get_%s'%k
+        if hasattr(self.__class__,kstring):
+            return getattr(self,kstring)()
+        raise AttributeError("%s instance has no accessor for: %s" % (qual(self.__class__),k))
+
+    def __delattr__(self, k):
+        kstring='del_%s'%k
+        if hasattr(self.__class__,kstring):
+            getattr(self,kstring)()
+            return
+        self.reallyDel(k)
+
+    def reallySet(self, k,v):
+        """
+        *actually* set self.k to v without incurring side-effects.
+        This is a hook to be overridden by subclasses.
+        """
+        if k == "__dict__":
+            self.__dict__.clear()
+            self.__dict__.update(v)
+        else:
+            self.__dict__[k]=v
+
+    def reallyDel(self, k):
+        """
+        *actually* del self.k without incurring side-effects.  This is a
+        hook to be overridden by subclasses.
+        """
+        del self.__dict__[k]
+
+# just in case
+OriginalAccessor = Accessor
+deprecatedModuleAttribute(
+    Version("Twisted", 12, 1, 0),
+    "OriginalAccessor is a reference to class twisted.python.reflect.Accessor "
+    "which is deprecated.", "twisted.python.reflect", "OriginalAccessor")
+
+
+class Summer(Accessor):
+    """
+    Extend from this class to get the capability to maintain 'related
+    sums'.  Have a tuple in your class like the following::
+
+        sums=(('amount','credit','credit_total'),
+              ('amount','debit','debit_total'))
+
+    and the 'credit_total' member of the 'credit' member of self will
+    always be incremented when the 'amount' member of self is
+    incremented, similiarly for the debit versions.
+    """
+
+    deprecatedModuleAttribute(
+        Version("Twisted", 12, 1, 0),
+        "Summer is a child class of twisted.python.reflect.Accessor which is " 
+        "deprecated.", "twisted.python.reflect", "Summer")
+
+    def reallySet(self, k,v):
+        "This method does the work."
+        for sum in self.sums:
+            attr=sum[0]
+            obj=sum[1]
+            objattr=sum[2]
+            if k == attr:
+                try:
+                    oldval=getattr(self, attr)
+                except:
+                    oldval=0
+                diff=v-oldval
+                if hasattr(self, obj):
+                    ob=getattr(self,obj)
+                    if ob is not None:
+                        try:oldobjval=getattr(ob, objattr)
+                        except:oldobjval=0.0
+                        setattr(ob,objattr,oldobjval+diff)
+
+            elif k == obj:
+                if hasattr(self, attr):
+                    x=getattr(self,attr)
+                    setattr(self,attr,0)
+                    y=getattr(self,k)
+                    Accessor.reallySet(self,k,v)
+                    setattr(self,attr,x)
+                    Accessor.reallySet(self,y,v)
+        Accessor.reallySet(self,k,v)
+
+
+class QueueMethod:
+    """
+    I represent a method that doesn't exist yet.
+    """
+    def __init__(self, name, calls):
+        self.name = name
+        self.calls = calls
+    def __call__(self, *args):
+        self.calls.append((self.name, args))
+
+
+def funcinfo(function):
+    """
+    this is more documentation for myself than useful code.
+    """
+    warnings.warn(
+        "[v2.5] Use inspect.getargspec instead of twisted.python.reflect.funcinfo",
+        DeprecationWarning,
+        stacklevel=2)
+    code=function.func_code
+    name=function.func_name
+    argc=code.co_argcount
+    argv=code.co_varnames[:argc]
+    defaults=function.func_defaults
+
+    out = []
+
+    out.append('The function %s accepts %s arguments' % (name ,argc))
+    if defaults:
+        required=argc-len(defaults)
+        out.append('It requires %s arguments' % required)
+        out.append('The arguments required are: %s' % argv[:required])
+        out.append('additional arguments are:')
+        for i in range(argc-required):
+            j=i+required
+            out.append('%s which has a default of' % (argv[j], defaults[i]))
+    return out
+
+
+ISNT=0
+WAS=1
+IS=2
+
+
+def fullFuncName(func):
+    qualName = (str(pickle.whichmodule(func, func.__name__)) + '.' + func.__name__)
+    if namedObject(qualName) is not func:
+        raise Exception("Couldn't find %s as %s." % (func, qualName))
+    return qualName
+
+
+
+def getcurrent(clazz):
+    assert type(clazz) == types.ClassType, 'must be a class...'
+    module = namedModule(clazz.__module__)
+    currclass = getattr(module, clazz.__name__, None)
+    if currclass is None:
+        return clazz
+    return currclass
+
+
+def getClass(obj):
+    """
+    Return the class or type of object 'obj'.
+    Returns sensible result for oldstyle and newstyle instances and types.
+    """
+    if hasattr(obj, '__class__'):
+        return obj.__class__
+    else:
+        return type(obj)
+
+# class graph nonsense
+
+# I should really have a better name for this...
+def isinst(inst,clazz):
+    if type(inst) != types.InstanceType or type(clazz)!= types.ClassType:
+        return isinstance(inst,clazz)
+    cl = inst.__class__
+    cl2 = getcurrent(cl)
+    clazz = getcurrent(clazz)
+    if issubclass(cl2,clazz):
+        if cl == cl2:
+            return WAS
+        else:
+            inst.__class__ = cl2
+            return IS
+    else:
+        return ISNT
+
+
+
+## the following were factored out of usage
+
+ at deprecated(Version("Twisted", 11, 0, 0), "inspect.getmro")
+def allYourBase(classObj, baseClass=None):
+    """
+    allYourBase(classObj, baseClass=None) -> list of all base
+    classes that are subclasses of baseClass, unless it is None,
+    in which case all bases will be added.
+    """
+    l = []
+    _accumulateBases(classObj, l, baseClass)
+    return l
+
+
+ at deprecated(Version("Twisted", 11, 0, 0), "inspect.getmro")
+def accumulateBases(classObj, l, baseClass=None):
+    _accumulateBases(classObj, l, baseClass)
+
+
+def _accumulateBases(classObj, l, baseClass=None):
+    for base in classObj.__bases__:
+        if baseClass is None or issubclass(base, baseClass):
+            l.append(base)
+        _accumulateBases(base, l, baseClass)
+
+
+def accumulateClassDict(classObj, attr, adict, baseClass=None):
+    """
+    Accumulate all attributes of a given name in a class hierarchy into a single dictionary.
+
+    Assuming all class attributes of this name are dictionaries.
+    If any of the dictionaries being accumulated have the same key, the
+    one highest in the class heirarchy wins.
+    (XXX: If \"higest\" means \"closest to the starting class\".)
+
+    Ex::
+
+      class Soy:
+        properties = {\"taste\": \"bland\"}
+    
+      class Plant:
+        properties = {\"colour\": \"green\"}
+    
+      class Seaweed(Plant):
+        pass
+    
+      class Lunch(Soy, Seaweed):
+        properties = {\"vegan\": 1 }
+    
+      dct = {}
+    
+      accumulateClassDict(Lunch, \"properties\", dct)
+    
+      print dct
+
+    {\"taste\": \"bland\", \"colour\": \"green\", \"vegan\": 1}
+    """
+    for base in classObj.__bases__:
+        accumulateClassDict(base, attr, adict)
+    if baseClass is None or baseClass in classObj.__bases__:
+        adict.update(classObj.__dict__.get(attr, {}))
+
+
+def accumulateClassList(classObj, attr, listObj, baseClass=None):
+    """
+    Accumulate all attributes of a given name in a class heirarchy into a single list.
+
+    Assuming all class attributes of this name are lists.
+    """
+    for base in classObj.__bases__:
+        accumulateClassList(base, attr, listObj)
+    if baseClass is None or baseClass in classObj.__bases__:
+        listObj.extend(classObj.__dict__.get(attr, []))
+
+
+def isSame(a, b):
+    return (a is b)
+
+
+def isLike(a, b):
+    return (a == b)
+
+
+def modgrep(goal):
+    return objgrep(sys.modules, goal, isLike, 'sys.modules')
+
+
+def isOfType(start, goal):
+    return ((type(start) is goal) or
+            (isinstance(start, types.InstanceType) and
+             start.__class__ is goal))
+
+
+def findInstances(start, t):
+    return objgrep(start, t, isOfType)
+
+
+def objgrep(start, goal, eq=isLike, path='', paths=None, seen=None, showUnknowns=0, maxDepth=None):
+    """
+    An insanely CPU-intensive process for finding stuff.
+    """
+    if paths is None:
+        paths = []
+    if seen is None:
+        seen = {}
+    if eq(start, goal):
+        paths.append(path)
+    if id(start) in seen:
+        if seen[id(start)] is start:
+            return
+    if maxDepth is not None:
+        if maxDepth == 0:
+            return
+        maxDepth -= 1
+    seen[id(start)] = start
+    if isinstance(start, types.DictionaryType):
+        for k, v in start.items():
+            objgrep(k, goal, eq, path+'{'+repr(v)+'}', paths, seen, showUnknowns, maxDepth)
+            objgrep(v, goal, eq, path+'['+repr(k)+']', paths, seen, showUnknowns, maxDepth)
+    elif isinstance(start, (list, tuple, deque)):
+        for idx in xrange(len(start)):
+            objgrep(start[idx], goal, eq, path+'['+str(idx)+']', paths, seen, showUnknowns, maxDepth)
+    elif isinstance(start, types.MethodType):
+        objgrep(start.im_self, goal, eq, path+'.im_self', paths, seen, showUnknowns, maxDepth)
+        objgrep(start.im_func, goal, eq, path+'.im_func', paths, seen, showUnknowns, maxDepth)
+        objgrep(start.im_class, goal, eq, path+'.im_class', paths, seen, showUnknowns, maxDepth)
+    elif hasattr(start, '__dict__'):
+        for k, v in start.__dict__.items():
+            objgrep(v, goal, eq, path+'.'+k, paths, seen, showUnknowns, maxDepth)
+        if isinstance(start, types.InstanceType):
+            objgrep(start.__class__, goal, eq, path+'.__class__', paths, seen, showUnknowns, maxDepth)
+    elif isinstance(start, weakref.ReferenceType):
+        objgrep(start(), goal, eq, path+'()', paths, seen, showUnknowns, maxDepth)
+    elif (isinstance(start, types.StringTypes+
+                    (types.IntType, types.FunctionType,
+                     types.BuiltinMethodType, RegexType, types.FloatType,
+                     types.NoneType, types.FileType)) or
+          type(start).__name__ in ('wrapper_descriptor', 'method_descriptor',
+                                   'member_descriptor', 'getset_descriptor')):
+        pass
+    elif showUnknowns:
+        print 'unknown type', type(start), start
+    return paths
+
+
+
+__all__ = [
+    'InvalidName', 'ModuleNotFound', 'ObjectNotFound',
+
+    'ISNT', 'WAS', 'IS',
+
+    'Settable', 'AccessorType', 'PropertyAccessor', 'Accessor', 'Summer',
+    'QueueMethod', 'OriginalAccessor',
+
+    'funcinfo', 'fullFuncName', 'qual', 'getcurrent', 'getClass', 'isinst',
+    'namedModule', 'namedObject', 'namedClass', 'namedAny',
+    'safe_repr', 'safe_str', 'allYourBase', 'accumulateBases',
+    'prefixedMethodNames', 'addMethodNamesToDict', 'prefixedMethods',
+    'accumulateMethods',
+    'accumulateClassDict', 'accumulateClassList', 'isSame', 'isLike',
+    'modgrep', 'isOfType', 'findInstances', 'objgrep', 'filenameToModuleName',
+    'fullyQualifiedName']
diff --git a/ThirdParty/Twisted/twisted/python/release.py b/ThirdParty/Twisted/twisted/python/release.py
new file mode 100644
index 0000000..2454792
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/release.py
@@ -0,0 +1,63 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+A release-automation toolkit.
+
+Don't use this outside of Twisted.
+
+Maintainer: Christopher Armstrong
+"""
+
+import os
+
+
+# errors
+
+class DirectoryExists(OSError):
+    """
+    Some directory exists when it shouldn't.
+    """
+    pass
+
+
+
+class DirectoryDoesntExist(OSError):
+    """
+    Some directory doesn't exist when it should.
+    """
+    pass
+
+
+
+class CommandFailed(OSError):
+    pass
+
+
+
+# utilities
+
+def sh(command, null=True, prompt=False):
+    """
+    I'll try to execute C{command}, and if C{prompt} is true, I'll
+    ask before running it.  If the command returns something other
+    than 0, I'll raise C{CommandFailed(command)}.
+    """
+    print "--$", command
+
+    if prompt:
+        if raw_input("run ?? ").startswith('n'):
+            return
+    if null:
+        command = "%s > /dev/null" % command
+    if os.system(command) != 0:
+        raise CommandFailed(command)
+
+
+
+def runChdirSafe(f, *args, **kw):
+    origdir = os.path.abspath('.')
+    try:
+        return f(*args, **kw)
+    finally:
+        os.chdir(origdir)
diff --git a/ThirdParty/Twisted/twisted/python/roots.py b/ThirdParty/Twisted/twisted/python/roots.py
new file mode 100644
index 0000000..ee3c8a3
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/roots.py
@@ -0,0 +1,248 @@
+# -*- test-case-name: twisted.test.test_roots -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Twisted Python Roots: an abstract hierarchy representation for Twisted.
+
+Maintainer: Glyph Lefkowitz
+"""
+
+# System imports
+import types
+from twisted.python import reflect
+
+class NotSupportedError(NotImplementedError):
+    """
+    An exception meaning that the tree-manipulation operation
+    you're attempting to perform is not supported.
+    """
+
+
+class Request:
+    """I am an abstract representation of a request for an entity.
+
+    I also function as the response.  The request is responded to by calling
+    self.write(data) until there is no data left and then calling
+    self.finish().
+    """
+    # This attribute should be set to the string name of the protocol being
+    # responded to (e.g. HTTP or FTP)
+    wireProtocol = None
+    def write(self, data):
+        """Add some data to the response to this request.
+        """
+        raise NotImplementedError("%s.write" % reflect.qual(self.__class__))
+
+    def finish(self):
+        """The response to this request is finished; flush all data to the network stream.
+        """
+        raise NotImplementedError("%s.finish" % reflect.qual(self.__class__))
+
+
+class Entity:
+    """I am a terminal object in a hierarchy, with no children.
+
+    I represent a null interface; certain non-instance objects (strings and
+    integers, notably) are Entities.
+
+    Methods on this class are suggested to be implemented, but are not
+    required, and will be emulated on a per-protocol basis for types which do
+    not handle them.
+    """
+    def render(self, request):
+        """
+        I produce a stream of bytes for the request, by calling request.write()
+        and request.finish().
+        """
+        raise NotImplementedError("%s.render" % reflect.qual(self.__class__))
+
+
+class Collection:
+    """I represent a static collection of entities.
+
+    I contain methods designed to represent collections that can be dynamically
+    created.
+    """
+
+    def __init__(self, entities=None):
+        """Initialize me.
+        """
+        if entities is not None:
+            self.entities = entities
+        else:
+            self.entities = {}
+
+    def getStaticEntity(self, name):
+        """Get an entity that was added to me using putEntity.
+
+        This method will return 'None' if it fails.
+        """
+        return self.entities.get(name)
+
+    def getDynamicEntity(self, name, request):
+        """Subclass this to generate an entity on demand.
+
+        This method should return 'None' if it fails.
+        """
+
+    def getEntity(self, name, request):
+        """Retrieve an entity from me.
+
+        I will first attempt to retrieve an entity statically; static entities
+        will obscure dynamic ones.  If that fails, I will retrieve the entity
+        dynamically.
+
+        If I cannot retrieve an entity, I will return 'None'.
+        """
+        ent = self.getStaticEntity(name)
+        if ent is not None:
+            return ent
+        ent = self.getDynamicEntity(name, request)
+        if ent is not None:
+            return ent
+        return None
+
+    def putEntity(self, name, entity):
+        """Store a static reference on 'name' for 'entity'.
+
+        Raises a KeyError if the operation fails.
+        """
+        self.entities[name] = entity
+
+    def delEntity(self, name):
+        """Remove a static reference for 'name'.
+
+        Raises a KeyError if the operation fails.
+        """
+        del self.entities[name]
+
+    def storeEntity(self, name, request):
+        """Store an entity for 'name', based on the content of 'request'.
+        """
+        raise NotSupportedError("%s.storeEntity" % reflect.qual(self.__class__))
+
+    def removeEntity(self, name, request):
+        """Remove an entity for 'name', based on the content of 'request'.
+        """
+        raise NotSupportedError("%s.removeEntity" % reflect.qual(self.__class__))
+
+    def listStaticEntities(self):
+        """Retrieve a list of all name, entity pairs that I store references to.
+
+        See getStaticEntity.
+        """
+        return self.entities.items()
+
+    def listDynamicEntities(self, request):
+        """A list of all name, entity that I can generate on demand.
+
+        See getDynamicEntity.
+        """
+        return []
+
+    def listEntities(self, request):
+        """Retrieve a list of all name, entity pairs I contain.
+
+        See getEntity.
+        """
+        return self.listStaticEntities() + self.listDynamicEntities(request)
+
+    def listStaticNames(self):
+        """Retrieve a list of the names of entities that I store references to.
+
+        See getStaticEntity.
+        """
+        return self.entities.keys()
+
+
+    def listDynamicNames(self):
+        """Retrieve a list of the names of entities that I store references to.
+
+        See getDynamicEntity.
+        """
+        return []
+
+
+    def listNames(self, request):
+        """Retrieve a list of all names for entities that I contain.
+
+        See getEntity.
+        """
+        return self.listStaticNames()
+
+
+class ConstraintViolation(Exception):
+    """An exception raised when a constraint is violated.
+    """
+
+
+class Constrained(Collection):
+    """A collection that has constraints on its names and/or entities."""
+
+    def nameConstraint(self, name):
+        """A method that determines whether an entity may be added to me with a given name.
+
+        If the constraint is satisfied, return 1; if the constraint is not
+        satisfied, either return 0 or raise a descriptive ConstraintViolation.
+        """
+        return 1
+
+    def entityConstraint(self, entity):
+        """A method that determines whether an entity may be added to me.
+
+        If the constraint is satisfied, return 1; if the constraint is not
+        satisfied, either return 0 or raise a descriptive ConstraintViolation.
+        """
+        return 1
+
+    def reallyPutEntity(self, name, entity):
+        Collection.putEntity(self, name, entity)
+
+    def putEntity(self, name, entity):
+        """Store an entity if it meets both constraints.
+
+        Otherwise raise a ConstraintViolation.
+        """
+        if self.nameConstraint(name):
+            if self.entityConstraint(entity):
+                self.reallyPutEntity(name, entity)
+            else:
+                raise ConstraintViolation("Entity constraint violated.")
+        else:
+            raise ConstraintViolation("Name constraint violated.")
+
+
+class Locked(Constrained):
+    """A collection that can be locked from adding entities."""
+
+    locked = 0
+
+    def lock(self):
+        self.locked = 1
+
+    def entityConstraint(self, entity):
+        return not self.locked
+
+
+class Homogenous(Constrained):
+    """A homogenous collection of entities.
+
+    I will only contain entities that are an instance of the class or type
+    specified by my 'entityType' attribute.
+    """
+
+    entityType = types.InstanceType
+
+    def entityConstraint(self, entity):
+        if isinstance(entity, self.entityType):
+            return 1
+        else:
+            raise ConstraintViolation("%s of incorrect type (%s)" %
+                                      (entity, self.entityType))
+
+    def getNameType(self):
+        return "Name"
+
+    def getEntityType(self):
+        return self.entityType.__name__
diff --git a/ThirdParty/Twisted/twisted/python/runtime.py b/ThirdParty/Twisted/twisted/python/runtime.py
new file mode 100644
index 0000000..fe43ab7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/runtime.py
@@ -0,0 +1,154 @@
+# -*- test-case-name: twisted.python.test.test_runtime -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from __future__ import division, absolute_import
+
+# System imports
+import os
+import sys
+import time
+import imp
+
+from twisted.python import compat
+
+if compat._PY3:
+    _winregModule = "winreg"
+    _threadModule = "_thread"
+else:
+    _winregModule = "_winreg"
+    _threadModule = "thread"
+
+
+
+def shortPythonVersion():
+    """
+    Returns the Python version as a dot-separated string.
+    """
+    return "%s.%s.%s" % sys.version_info[:3]
+
+
+
+knownPlatforms = {
+    'nt': 'win32',
+    'ce': 'win32',
+    'posix': 'posix',
+    'java': 'java',
+    'org.python.modules.os': 'java',
+    }
+
+
+
+_timeFunctions = {
+    #'win32': time.clock,
+    'win32': time.time,
+    }
+
+
+
+class Platform:
+    """Gives us information about the platform we're running on"""
+
+    type = knownPlatforms.get(os.name)
+    seconds = staticmethod(_timeFunctions.get(type, time.time))
+    _platform = sys.platform
+
+    def __init__(self, name=None, platform=None):
+        if name is not None:
+            self.type = knownPlatforms.get(name)
+            self.seconds = _timeFunctions.get(self.type, time.time)
+        if platform is not None:
+            self._platform = platform
+
+
+    def isKnown(self):
+        """Do we know about this platform?"""
+        return self.type != None
+
+
+    def getType(self):
+        """Return 'posix', 'win32' or 'java'"""
+        return self.type
+
+
+    def isMacOSX(self):
+        """Check if current platform is Mac OS X.
+
+        @return: C{True} if the current platform has been detected as OS X
+        @rtype: C{bool}
+        """
+        return self._platform == "darwin"
+
+
+    def isWinNT(self):
+        """Are we running in Windows NT?"""
+        if self.getType() == 'win32':
+            winreg = __import__(_winregModule)
+            try:
+                k = winreg.OpenKeyEx(
+                        winreg.HKEY_LOCAL_MACHINE,
+                        r'Software\Microsoft\Windows NT\CurrentVersion')
+                winreg.QueryValueEx(k, 'SystemRoot')
+                return 1
+            except WindowsError:
+                return 0
+        # not windows NT
+        return 0
+
+
+    def isWindows(self):
+        return self.getType() == 'win32'
+
+
+    def isVista(self):
+        """
+        Check if current platform is Windows Vista or Windows Server 2008.
+
+        @return: C{True} if the current platform has been detected as Vista
+        @rtype: C{bool}
+        """
+        if getattr(sys, "getwindowsversion", None) is not None:
+            return sys.getwindowsversion()[0] == 6
+        else:
+            return False
+
+
+    def isLinux(self):
+        """
+        Check if current platform is Linux.
+
+        @return: C{True} if the current platform has been detected as Linux.
+        @rtype: C{bool}
+        """
+        return self._platform.startswith("linux")
+
+
+    def supportsThreads(self):
+        """Can threads be created?
+        """
+        try:
+            return imp.find_module(_threadModule)[0] is None
+        except ImportError:
+            return False
+
+
+    def supportsINotify(self):
+        """
+        Return C{True} if we can use the inotify API on this platform.
+
+        @since: 10.1
+        """
+        try:
+            from twisted.python._inotify import INotifyError, init
+        except ImportError:
+            return False
+        try:
+            os.close(init())
+        except INotifyError:
+            return False
+        return True
+
+
+platform = Platform()
+platformType = platform.getType()
+seconds = platform.seconds
diff --git a/ThirdParty/Twisted/twisted/python/sendmsg.c b/ThirdParty/Twisted/twisted/python/sendmsg.c
new file mode 100644
index 0000000..9f2fd8c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/sendmsg.c
@@ -0,0 +1,511 @@
+/*
+ * Copyright (c) Twisted Matrix Laboratories.
+ * See LICENSE for details.
+ */
+
+#define PY_SSIZE_T_CLEAN 1
+#include <Python.h>
+
+#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN)
+/* This may cause some warnings, but if you want to get rid of them, upgrade
+ * your Python version.  */
+typedef int Py_ssize_t;
+#endif
+
+#include <sys/types.h>
+#include <sys/socket.h>
+#include <signal.h>
+
+#include <sys/param.h>
+
+#ifdef BSD
+#include <sys/uio.h>
+#endif
+
+/*
+ * As per
+ * <http://pubs.opengroup.org/onlinepubs/007904875/basedefs/sys/socket.h.html
+ * #tag_13_61_05>:
+ *
+ *     "To forestall portability problems, it is recommended that applications
+ *     not use values larger than (2**31)-1 for the socklen_t type."
+ */
+
+#define SOCKLEN_MAX 0x7FFFFFFF
+
+PyObject *sendmsg_socket_error;
+
+static PyObject *sendmsg_sendmsg(PyObject *self, PyObject *args, PyObject *keywds);
+static PyObject *sendmsg_recvmsg(PyObject *self, PyObject *args, PyObject *keywds);
+static PyObject *sendmsg_getsockfam(PyObject *self, PyObject *args, PyObject *keywds);
+
+static char sendmsg_doc[] = "\
+Bindings for sendmsg(2), recvmsg(2), and a minimal helper for inspecting\n\
+address family of a socket.\n\
+";
+
+static char sendmsg_sendmsg_doc[] = "\
+Wrap the C sendmsg(2) function for sending \"messages\" on a socket.\n\
+\n\
+ at param fd: The file descriptor of the socket over which to send a message.\n\
+ at type fd: C{int}\n\
+\n\
+ at param data: Bytes to write to the socket.\n\
+ at type data: C{str}\n\
+\n\
+ at param flags: Flags to affect how the message is sent.  See the C{MSG_}\n\
+    constants in the sendmsg(2) manual page.  By default no flags are set.\n\
+ at type flags: C{int}\n\
+\n\
+ at param ancillary: Extra data to send over the socket outside of the normal\n\
+    datagram or stream mechanism.  By default no ancillary data is sent.\n\
+ at type ancillary: C{list} of C{tuple} of C{int}, C{int}, and C{str}.\n\
+\n\
+ at raise OverflowError: Raised if too much ancillary data is given.\n\
+ at raise socket.error: Raised if the underlying syscall indicates an error.\n\
+\n\
+ at return: The return value of the underlying syscall, if it succeeds.\n\
+";
+
+static char sendmsg_recvmsg_doc[] = "\
+Wrap the C recvmsg(2) function for receiving \"messages\" on a socket.\n\
+\n\
+ at param fd: The file descriptor of the socket over which to receve a message.\n\
+ at type fd: C{int}\n\
+\n\
+ at param flags: Flags to affect how the message is sent.  See the C{MSG_}\n\
+    constants in the sendmsg(2) manual page.  By default no flags are set.\n\
+ at type flags: C{int}\n\
+\n\
+ at param maxsize: The maximum number of bytes to receive from the socket\n\
+    using the datagram or stream mechanism.  The default maximum is 8192.\n\
+ at type maxsize: C{int}\n\
+\n\
+ at param cmsg_size: The maximum number of bytes to receive from the socket\n\
+    outside of the normal datagram or stream mechanism.  The default maximum is 4096.\n\
+\n\
+ at raise OverflowError: Raised if too much ancillary data is given.\n\
+ at raise socket.error: Raised if the underlying syscall indicates an error.\n\
+\n\
+ at return: A C{tuple} of three elements: the bytes received using the\n\
+    datagram/stream mechanism, flags as an C{int} describing the data\n\
+    received, and a C{list} of C{tuples} giving ancillary received data.\n\
+";
+
+static char sendmsg_getsockfam_doc[] = "\
+Retrieve the address family of a given socket.\n\
+\n\
+ at param fd: The file descriptor of the socket the address family of which\n\
+    to retrieve.\n\
+ at type fd: C{int}\n\
+\n\
+ at raise socket.error: Raised if the underlying getsockname call indicates\n\
+    an error.\n\
+\n\
+ at return: A C{int} representing the address family of the socket.  For\n\
+    example, L{socket.AF_INET}, L{socket.AF_INET6}, or L{socket.AF_UNIX}.\n\
+";
+
+static PyMethodDef sendmsg_methods[] = {
+    {"send1msg", (PyCFunction) sendmsg_sendmsg, METH_VARARGS | METH_KEYWORDS,
+     sendmsg_sendmsg_doc},
+    {"recv1msg", (PyCFunction) sendmsg_recvmsg, METH_VARARGS | METH_KEYWORDS,
+     sendmsg_recvmsg_doc},
+    {"getsockfam", (PyCFunction) sendmsg_getsockfam,
+     METH_VARARGS | METH_KEYWORDS, sendmsg_getsockfam_doc},
+    {NULL, NULL, 0, NULL}
+};
+
+
+PyMODINIT_FUNC initsendmsg(void) {
+    PyObject *module;
+
+    sendmsg_socket_error = NULL; /* Make sure that this has a known value
+                                    before doing anything that might exit. */
+
+    module = Py_InitModule3("sendmsg", sendmsg_methods, sendmsg_doc);
+
+    if (!module) {
+        return;
+    }
+
+    /*
+      The following is the only value mentioned by POSIX:
+      http://www.opengroup.org/onlinepubs/9699919799/basedefs/sys_socket.h.html
+    */
+
+    if (-1 == PyModule_AddIntConstant(module, "SCM_RIGHTS", SCM_RIGHTS)) {
+        return;
+    }
+
+
+    /* BSD, Darwin, Hurd */
+#if defined(SCM_CREDS)
+    if (-1 == PyModule_AddIntConstant(module, "SCM_CREDS", SCM_CREDS)) {
+        return;
+    }
+#endif
+
+    /* Linux */
+#if defined(SCM_CREDENTIALS)
+    if (-1 == PyModule_AddIntConstant(module, "SCM_CREDENTIALS", SCM_CREDENTIALS)) {
+        return;
+    }
+#endif
+
+    /* Apparently everywhere, but not standardized. */
+#if defined(SCM_TIMESTAMP)
+    if (-1 == PyModule_AddIntConstant(module, "SCM_TIMESTAMP", SCM_TIMESTAMP)) {
+        return;
+    }
+#endif
+
+    module = PyImport_ImportModule("socket");
+    if (!module) {
+        return;
+    }
+
+    sendmsg_socket_error = PyObject_GetAttrString(module, "error");
+    if (!sendmsg_socket_error) {
+        return;
+    }
+}
+
+static PyObject *sendmsg_sendmsg(PyObject *self, PyObject *args, PyObject *keywds) {
+
+    int fd;
+    int flags = 0;
+    Py_ssize_t sendmsg_result, iovec_length;
+    struct msghdr message_header;
+    struct iovec iov[1];
+    PyObject *ancillary = NULL;
+    PyObject *iterator = NULL;
+    PyObject *item = NULL;
+    PyObject *result_object = NULL;
+
+    static char *kwlist[] = {"fd", "data", "flags", "ancillary", NULL};
+
+    if (!PyArg_ParseTupleAndKeywords(
+            args, keywds, "it#|iO:sendmsg", kwlist,
+            &fd,
+            &iov[0].iov_base,
+            &iovec_length,
+            &flags,
+            &ancillary)) {
+        return NULL;
+    }
+
+    iov[0].iov_len = iovec_length;
+
+    message_header.msg_name = NULL;
+    message_header.msg_namelen = 0;
+
+    message_header.msg_iov = iov;
+    message_header.msg_iovlen = 1;
+
+    message_header.msg_control = NULL;
+    message_header.msg_controllen = 0;
+
+    message_header.msg_flags = 0;
+
+    if (ancillary) {
+
+        if (!PyList_Check(ancillary)) {
+            PyErr_Format(PyExc_TypeError,
+                         "send1msg argument 3 expected list, got %s",
+                         ancillary->ob_type->tp_name);
+            goto finished;
+        }
+
+        iterator = PyObject_GetIter(ancillary);
+
+        if (iterator == NULL) {
+            goto finished;
+        }
+
+        size_t all_data_len = 0;
+
+        /* First we need to know how big the buffer needs to be in order to
+           have enough space for all of the messages. */
+        while ( (item = PyIter_Next(iterator)) ) {
+            int type, level;
+            Py_ssize_t data_len;
+            size_t prev_all_data_len;
+            char *data;
+
+            if (!PyTuple_Check(item)) {
+                PyErr_Format(PyExc_TypeError,
+                             "send1msg argument 3 expected list of tuple, "
+                             "got list containing %s",
+                             item->ob_type->tp_name);
+                goto finished;
+            }
+
+            if (!PyArg_ParseTuple(
+                        item, "iit#:sendmsg ancillary data (level, type, data)",
+                        &level, &type, &data, &data_len)) {
+                goto finished;
+            }
+
+            prev_all_data_len = all_data_len;
+            all_data_len += CMSG_SPACE(data_len);
+
+            Py_DECREF(item);
+            item = NULL;
+
+            if (all_data_len < prev_all_data_len) {
+                PyErr_Format(PyExc_OverflowError,
+                             "Too much msg_control to fit in a size_t: %zu",
+                             prev_all_data_len);
+                goto finished;
+            }
+        }
+
+        Py_DECREF(iterator);
+        iterator = NULL;
+
+        /* Allocate the buffer for all of the ancillary elements, if we have
+         * any.  */
+        if (all_data_len) {
+            if (all_data_len > SOCKLEN_MAX) {
+                PyErr_Format(PyExc_OverflowError,
+                             "Too much msg_control to fit in a socklen_t: %zu",
+                             all_data_len);
+                goto finished;
+            }
+            message_header.msg_control = PyMem_Malloc(all_data_len);
+            if (!message_header.msg_control) {
+                PyErr_NoMemory();
+                goto finished;
+            }
+        } else {
+            message_header.msg_control = NULL;
+        }
+        message_header.msg_controllen = (socklen_t) all_data_len;
+
+        iterator = PyObject_GetIter(ancillary); /* again */
+
+        if (!iterator) {
+            goto finished;
+        }
+
+        /* Unpack the tuples into the control message. */
+        struct cmsghdr *control_message = CMSG_FIRSTHDR(&message_header);
+        while ( (item = PyIter_Next(iterator)) ) {
+            int type, level;
+            Py_ssize_t data_len;
+            size_t data_size;
+            unsigned char *data, *cmsg_data;
+
+            /* We explicitly allocated enough space for all ancillary data
+               above; if there isn't enough room, all bets are off. */
+            assert(control_message);
+
+            if (!PyArg_ParseTuple(item,
+                                  "iit#:sendmsg ancillary data (level, type, data)",
+                                  &level,
+                                  &type,
+                                  &data,
+                                  &data_len)) {
+                goto finished;
+            }
+
+            control_message->cmsg_level = level;
+            control_message->cmsg_type = type;
+            data_size = CMSG_LEN(data_len);
+
+            if (data_size > SOCKLEN_MAX) {
+                PyErr_Format(PyExc_OverflowError,
+                             "CMSG_LEN(%zd) > SOCKLEN_MAX", data_len);
+                goto finished;
+            }
+
+            control_message->cmsg_len = (socklen_t) data_size;
+
+            cmsg_data = CMSG_DATA(control_message);
+            memcpy(cmsg_data, data, data_len);
+
+            Py_DECREF(item);
+            item = NULL;
+
+            control_message = CMSG_NXTHDR(&message_header, control_message);
+        }
+        Py_DECREF(iterator);
+        iterator = NULL;
+
+        if (PyErr_Occurred()) {
+            goto finished;
+        }
+    }
+
+    sendmsg_result = sendmsg(fd, &message_header, flags);
+
+    if (sendmsg_result < 0) {
+        PyErr_SetFromErrno(sendmsg_socket_error);
+        goto finished;
+    }
+
+    result_object = Py_BuildValue("n", sendmsg_result);
+
+ finished:
+
+    if (item) {
+        Py_DECREF(item);
+        item = NULL;
+    }
+    if (iterator) {
+        Py_DECREF(iterator);
+        iterator = NULL;
+    }
+    if (message_header.msg_control) {
+        PyMem_Free(message_header.msg_control);
+        message_header.msg_control = NULL;
+    }
+    return result_object;
+}
+
+static PyObject *sendmsg_recvmsg(PyObject *self, PyObject *args, PyObject *keywds) {
+    int fd = -1;
+    int flags = 0;
+    int maxsize = 8192;
+    int cmsg_size = 4096;
+    size_t cmsg_space;
+    size_t cmsg_overhead;
+    Py_ssize_t recvmsg_result;
+
+    struct msghdr message_header;
+    struct cmsghdr *control_message;
+    struct iovec iov[1];
+    char *cmsgbuf;
+    PyObject *ancillary;
+    PyObject *final_result = NULL;
+
+    static char *kwlist[] = {"fd", "flags", "maxsize", "cmsg_size", NULL};
+
+    if (!PyArg_ParseTupleAndKeywords(args, keywds, "i|iii:recvmsg", kwlist,
+                                     &fd, &flags, &maxsize, &cmsg_size)) {
+        return NULL;
+    }
+
+    cmsg_space = CMSG_SPACE(cmsg_size);
+
+    /* overflow check */
+    if (cmsg_space > SOCKLEN_MAX) {
+        PyErr_Format(PyExc_OverflowError,
+                     "CMSG_SPACE(cmsg_size) greater than SOCKLEN_MAX: %d",
+                     cmsg_size);
+        return NULL;
+    }
+
+    message_header.msg_name = NULL;
+    message_header.msg_namelen = 0;
+
+    iov[0].iov_len = maxsize;
+    iov[0].iov_base = PyMem_Malloc(maxsize);
+
+    if (!iov[0].iov_base) {
+        PyErr_NoMemory();
+        return NULL;
+    }
+
+    message_header.msg_iov = iov;
+    message_header.msg_iovlen = 1;
+
+    cmsgbuf = PyMem_Malloc(cmsg_space);
+
+    if (!cmsgbuf) {
+        PyMem_Free(iov[0].iov_base);
+        PyErr_NoMemory();
+        return NULL;
+    }
+
+    memset(cmsgbuf, 0, cmsg_space);
+    message_header.msg_control = cmsgbuf;
+    /* see above for overflow check */
+    message_header.msg_controllen = (socklen_t) cmsg_space;
+
+    recvmsg_result = recvmsg(fd, &message_header, flags);
+    if (recvmsg_result < 0) {
+        PyErr_SetFromErrno(sendmsg_socket_error);
+        goto finished;
+    }
+
+    ancillary = PyList_New(0);
+    if (!ancillary) {
+        goto finished;
+    }
+
+    for (control_message = CMSG_FIRSTHDR(&message_header);
+         control_message;
+         control_message = CMSG_NXTHDR(&message_header,
+                                       control_message)) {
+        PyObject *entry;
+
+        /* Some platforms apparently always fill out the ancillary data
+           structure with a single bogus value if none is provided; ignore it,
+           if that is the case. */
+
+        if ((!(control_message->cmsg_level)) &&
+            (!(control_message->cmsg_type))) {
+            continue;
+        }
+
+        /*
+         * Figure out how much of the cmsg size is cmsg structure overhead - in
+         * other words, how much is not part of the application data.  This lets
+         * us compute the right application data size below.  There should
+         * really be a CMSG_ macro for this.
+         */
+        cmsg_overhead = (char*)CMSG_DATA(control_message) - (char*)control_message;
+
+        entry = Py_BuildValue(
+            "(iis#)",
+            control_message->cmsg_level,
+            control_message->cmsg_type,
+            CMSG_DATA(control_message),
+            (Py_ssize_t) (control_message->cmsg_len - cmsg_overhead));
+
+        if (!entry) {
+            Py_DECREF(ancillary);
+            goto finished;
+        }
+
+        if (PyList_Append(ancillary, entry) < 0) {
+            Py_DECREF(ancillary);
+            Py_DECREF(entry);
+            goto finished;
+        } else {
+            Py_DECREF(entry);
+        }
+    }
+
+    final_result = Py_BuildValue(
+        "s#iO",
+        iov[0].iov_base,
+        recvmsg_result,
+        message_header.msg_flags,
+        ancillary);
+
+    Py_DECREF(ancillary);
+
+  finished:
+    PyMem_Free(iov[0].iov_base);
+    PyMem_Free(cmsgbuf);
+    return final_result;
+}
+
+static PyObject *sendmsg_getsockfam(PyObject *self, PyObject *args,
+                                    PyObject *keywds) {
+    int fd;
+    struct sockaddr sa;
+    static char *kwlist[] = {"fd", NULL};
+    if (!PyArg_ParseTupleAndKeywords(args, keywds, "i", kwlist, &fd)) {
+        return NULL;
+    }
+    socklen_t sz = sizeof(sa);
+    if (getsockname(fd, &sa, &sz)) {
+        PyErr_SetFromErrno(sendmsg_socket_error);
+        return NULL;
+    }
+    return Py_BuildValue("i", sa.sa_family);
+}
diff --git a/ThirdParty/Twisted/twisted/python/shortcut.py b/ThirdParty/Twisted/twisted/python/shortcut.py
new file mode 100644
index 0000000..6d6546b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/shortcut.py
@@ -0,0 +1,76 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""Creation of  Windows shortcuts.
+
+Requires win32all.
+"""
+
+from win32com.shell import shell
+import pythoncom
+import os
+
+
+def open(filename):
+    """Open an existing shortcut for reading.
+
+    @return: The shortcut object
+    @rtype: Shortcut
+    """
+    sc=Shortcut()
+    sc.load(filename)
+    return sc
+
+
+class Shortcut:
+    """A shortcut on Win32.
+    >>> sc=Shortcut(path, arguments, description, workingdir, iconpath, iconidx)
+    @param path: Location of the target
+    @param arguments: If path points to an executable, optional arguments to
+                      pass
+    @param description: Human-readable decription of target
+    @param workingdir: Directory from which target is launched
+    @param iconpath: Filename that contains an icon for the shortcut
+    @param iconidx: If iconpath is set, optional index of the icon desired
+    """
+
+    def __init__(self, 
+                 path=None,
+                 arguments=None, 
+                 description=None,
+                 workingdir=None,
+                 iconpath=None,
+                 iconidx=0):
+        self._base = pythoncom.CoCreateInstance(
+            shell.CLSID_ShellLink, None,
+            pythoncom.CLSCTX_INPROC_SERVER, shell.IID_IShellLink
+        )
+        data = map(None, 
+                   ['"%s"' % os.path.abspath(path), arguments, description,
+                    os.path.abspath(workingdir), os.path.abspath(iconpath)], 
+                   ("SetPath", "SetArguments", "SetDescription",
+                   "SetWorkingDirectory") )
+        for value, function in data:
+            if value and function:
+                # call function on each non-null value
+                getattr(self, function)(value)
+        if iconpath:
+            self.SetIconLocation(iconpath, iconidx)
+
+    def load( self, filename ):
+        """Read a shortcut file from disk."""
+        self._base.QueryInterface(pythoncom.IID_IPersistFile).Load(filename)
+    
+    def save( self, filename ):
+        """Write the shortcut to disk.
+
+        The file should be named something.lnk.
+        """
+        self._base.QueryInterface(pythoncom.IID_IPersistFile).Save(filename, 0)
+    
+    def __getattr__( self, name ):
+        if name != "_base":
+            return getattr(self._base, name)
+        raise AttributeError, "%s instance has no attribute %s" % \
+                (self.__class__.__name__, name)
diff --git a/ThirdParty/Twisted/twisted/python/syslog.py b/ThirdParty/Twisted/twisted/python/syslog.py
new file mode 100644
index 0000000..88d8d02
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/syslog.py
@@ -0,0 +1,107 @@
+# -*- test-case-name: twisted.python.test.test_syslog -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Classes and utility functions for integrating Twisted and syslog.
+
+You probably want to call L{startLogging}.
+"""
+
+syslog = __import__('syslog')
+
+from twisted.python import log
+
+# These defaults come from the Python 2.3 syslog docs.
+DEFAULT_OPTIONS = 0
+DEFAULT_FACILITY = syslog.LOG_USER
+
+
+
+class SyslogObserver:
+    """
+    A log observer for logging to syslog.
+
+    See L{twisted.python.log} for context.
+
+    This logObserver will automatically use LOG_ALERT priority for logged
+    failures (such as from C{log.err()}), but you can use any priority and
+    facility by setting the 'C{syslogPriority}' and 'C{syslogFacility}' keys in
+    the event dict.
+    """
+    openlog = syslog.openlog
+    syslog = syslog.syslog
+
+    def __init__(self, prefix, options=DEFAULT_OPTIONS,
+                 facility=DEFAULT_FACILITY):
+        """
+        @type prefix: C{str}
+        @param prefix: The syslog prefix to use.
+
+        @type options: C{int}
+        @param options: A bitvector represented as an integer of the syslog
+            options to use.
+
+        @type facility: C{int}
+        @param facility: An indication to the syslog daemon of what sort of
+            program this is (essentially, an additional arbitrary metadata
+            classification for messages sent to syslog by this observer).
+        """
+        self.openlog(prefix, options, facility)
+
+
+    def emit(self, eventDict):
+        """
+        Send a message event to the I{syslog}.
+
+        @param eventDict: The event to send.  If it has no C{'message'} key, it
+            will be ignored.  Otherwise, if it has C{'syslogPriority'} and/or
+            C{'syslogFacility'} keys, these will be used as the syslog priority
+            and facility.  If it has no C{'syslogPriority'} key but a true
+            value for the C{'isError'} key, the B{LOG_ALERT} priority will be
+            used; if it has a false value for C{'isError'}, B{LOG_INFO} will be
+            used.  If the C{'message'} key is multiline, each line will be sent
+            to the syslog separately.
+        """
+        # Figure out what the message-text is.
+        text = log.textFromEventDict(eventDict)
+        if text is None:
+            return
+
+        # Figure out what syslog parameters we might need to use.
+        priority = syslog.LOG_INFO
+        facility = 0
+        if eventDict['isError']:
+            priority = syslog.LOG_ALERT
+        if 'syslogPriority' in eventDict:
+            priority = int(eventDict['syslogPriority'])
+        if 'syslogFacility' in eventDict:
+            facility = int(eventDict['syslogFacility'])
+
+        # Break the message up into lines and send them.
+        lines = text.split('\n')
+        while lines[-1:] == ['']:
+            lines.pop()
+
+        firstLine = True
+        for line in lines:
+            if firstLine:
+                firstLine = False
+            else:
+                line = '\t' + line
+            self.syslog(priority | facility,
+                        '[%s] %s' % (eventDict['system'], line))
+
+
+
+def startLogging(prefix='Twisted', options=DEFAULT_OPTIONS,
+                 facility=DEFAULT_FACILITY, setStdout=1):
+    """
+    Send all Twisted logging output to syslog from now on.
+
+    The prefix, options and facility arguments are passed to
+    C{syslog.openlog()}, see the Python syslog documentation for details. For
+    other parameters, see L{twisted.python.log.startLoggingWithObserver}.
+    """
+    obs = SyslogObserver(prefix, options, facility)
+    log.startLoggingWithObserver(obs.emit, setStdout=setStdout)
diff --git a/ThirdParty/Twisted/twisted/python/systemd.py b/ThirdParty/Twisted/twisted/python/systemd.py
new file mode 100644
index 0000000..d20fa04
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/systemd.py
@@ -0,0 +1,87 @@
+# -*- test-case-name: twisted.python.test.test_systemd -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Integration with systemd.
+
+Currently only the minimum APIs necessary for using systemd's socket activation
+feature are supported.
+"""
+
+__all__ = ['ListenFDs']
+
+from os import getpid
+
+
+class ListenFDs(object):
+    """
+    L{ListenFDs} provides access to file descriptors inherited from systemd.
+
+    Typically L{ListenFDs.fromEnvironment} should be used to construct a new
+    instance of L{ListenFDs}.
+
+    @cvar _START: File descriptors inherited from systemd are always
+        consecutively numbered, with a fixed lowest "starting" descriptor.  This
+        gives the default starting descriptor.  Since this must agree with the
+        value systemd is using, it typically should not be overridden.
+    @type _START: C{int}
+
+    @ivar _descriptors: A C{list} of C{int} giving the descriptors which were
+        inherited.
+    """
+    _START = 3
+
+    def __init__(self, descriptors):
+        """
+        @param descriptors: The descriptors which will be returned from calls to
+            C{inheritedDescriptors}.
+        """
+        self._descriptors = descriptors
+
+
+    @classmethod
+    def fromEnvironment(cls, environ=None, start=None):
+        """
+        @param environ: A dictionary-like object to inspect to discover
+            inherited descriptors.  By default, C{None}, indicating that the
+            real process environment should be inspected.  The default is
+            suitable for typical usage.
+
+        @param start: An integer giving the lowest value of an inherited
+            descriptor systemd will give us.  By default, C{None}, indicating
+            the known correct (that is, in agreement with systemd) value will be
+            used.  The default is suitable for typical usage.
+
+        @return: A new instance of C{cls} which can be used to look up the
+            descriptors which have been inherited.
+        """
+        if environ is None:
+            from os import environ
+        if start is None:
+            start = cls._START
+
+        descriptors = []
+
+        try:
+            pid = int(environ['LISTEN_PID'])
+        except (KeyError, ValueError):
+            pass
+        else:
+            if pid == getpid():
+                try:
+                    count = int(environ['LISTEN_FDS'])
+                except (KeyError, ValueError):
+                    pass
+                else:
+                    descriptors = range(start, start + count)
+                    del environ['LISTEN_PID'], environ['LISTEN_FDS']
+
+        return cls(descriptors)
+
+
+    def inheritedDescriptors(self):
+        """
+        @return: The configured list of descriptors.
+        """
+        return list(self._descriptors)
diff --git a/ThirdParty/Twisted/twisted/python/text.py b/ThirdParty/Twisted/twisted/python/text.py
new file mode 100644
index 0000000..9887e70
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/text.py
@@ -0,0 +1,208 @@
+# -*- test-case-name: twisted.test.test_text -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Miscellany of text-munging functions.
+"""
+
+
+def stringyString(object, indentation=''):
+    """
+    Expansive string formatting for sequence types.
+
+    C{list.__str__} and C{dict.__str__} use C{repr()} to display their
+    elements.  This function also turns these sequence types
+    into strings, but uses C{str()} on their elements instead.
+
+    Sequence elements are also displayed on seperate lines, and nested
+    sequences have nested indentation.
+    """
+    braces = ''
+    sl = []
+
+    if type(object) is dict:
+        braces = '{}'
+        for key, value in object.items():
+            value = stringyString(value, indentation + '   ')
+            if isMultiline(value):
+                if endsInNewline(value):
+                    value = value[:-len('\n')]
+                sl.append("%s %s:\n%s" % (indentation, key, value))
+            else:
+                # Oops.  Will have to move that indentation.
+                sl.append("%s %s: %s" % (indentation, key,
+                                         value[len(indentation) + 3:]))
+
+    elif type(object) is tuple or type(object) is list:
+        if type(object) is tuple:
+            braces = '()'
+        else:
+            braces = '[]'
+
+        for element in object:
+            element = stringyString(element, indentation + ' ')
+            sl.append(element.rstrip() + ',')
+    else:
+        sl[:] = map(lambda s, i=indentation: i + s,
+                   str(object).split('\n'))
+
+    if not sl:
+        sl.append(indentation)
+
+    if braces:
+        sl[0] = indentation + braces[0] + sl[0][len(indentation) + 1:]
+        sl[-1] = sl[-1] + braces[-1]
+
+    s = "\n".join(sl)
+
+    if isMultiline(s) and not endsInNewline(s):
+        s = s + '\n'
+
+    return s
+
+
+def isMultiline(s):
+    """
+    Returns C{True} if this string has a newline in it.
+    """
+    return (s.find('\n') != -1)
+
+
+def endsInNewline(s):
+    """
+    Returns C{True} if this string ends in a newline.
+    """
+    return (s[-len('\n'):] == '\n')
+
+
+def greedyWrap(inString, width=80):
+    """
+    Given a string and a column width, return a list of lines.
+
+    Caveat: I'm use a stupid greedy word-wrapping
+    algorythm.  I won't put two spaces at the end
+    of a sentence.  I don't do full justification.
+    And no, I've never even *heard* of hypenation.
+    """
+
+    outLines = []
+
+    #eww, evil hacks to allow paragraphs delimited by two \ns :(
+    if inString.find('\n\n') >= 0:
+        paragraphs = inString.split('\n\n')
+        for para in paragraphs:
+            outLines.extend(greedyWrap(para, width) + [''])
+        return outLines
+    inWords = inString.split()
+
+    column = 0
+    ptr_line = 0
+    while inWords:
+        column = column + len(inWords[ptr_line])
+        ptr_line = ptr_line + 1
+
+        if (column > width):
+            if ptr_line == 1:
+                # This single word is too long, it will be the whole line.
+                pass
+            else:
+                # We've gone too far, stop the line one word back.
+                ptr_line = ptr_line - 1
+            (l, inWords) = (inWords[0:ptr_line], inWords[ptr_line:])
+            outLines.append(' '.join(l))
+
+            ptr_line = 0
+            column = 0
+        elif not (len(inWords) > ptr_line):
+            # Clean up the last bit.
+            outLines.append(' '.join(inWords))
+            del inWords[:]
+        else:
+            # Space
+            column = column + 1
+    # next word
+
+    return outLines
+
+
+wordWrap = greedyWrap
+
+
+def removeLeadingBlanks(lines):
+    ret = []
+    for line in lines:
+        if ret or line.strip():
+            ret.append(line)
+    return ret
+
+
+def removeLeadingTrailingBlanks(s):
+    lines = removeLeadingBlanks(s.split('\n'))
+    lines.reverse()
+    lines = removeLeadingBlanks(lines)
+    lines.reverse()
+    return '\n'.join(lines)+'\n'
+
+
+def splitQuoted(s):
+    """
+    Like a string split, but don't break substrings inside quotes.
+
+    >>> splitQuoted('the "hairy monkey" likes pie')
+    ['the', 'hairy monkey', 'likes', 'pie']
+
+    Another one of those "someone must have a better solution for
+    this" things.  This implementation is a VERY DUMB hack done too
+    quickly.
+    """
+    out = []
+    quot = None
+    phrase = None
+    for word in s.split():
+        if phrase is None:
+            if word and (word[0] in ("\"", "'")):
+                quot = word[0]
+                word = word[1:]
+                phrase = []
+
+        if phrase is None:
+            out.append(word)
+        else:
+            if word and (word[-1] == quot):
+                word = word[:-1]
+                phrase.append(word)
+                out.append(" ".join(phrase))
+                phrase = None
+            else:
+                phrase.append(word)
+
+    return out
+
+
+def strFile(p, f, caseSensitive=True):
+    """
+    Find whether string C{p} occurs in a read()able object C{f}.
+
+    @rtype: C{bool}
+    """
+    buf = ""
+    buf_len = max(len(p), 2**2**2**2)
+    if not caseSensitive:
+        p = p.lower()
+    while 1:
+        r = f.read(buf_len-len(p))
+        if not caseSensitive:
+            r = r.lower()
+        bytes_read = len(r)
+        if bytes_read == 0:
+            return False
+        l = len(buf)+bytes_read-buf_len
+        if l <= 0:
+            buf = buf + r
+        else:
+            buf = buf[l:] + r
+        if buf.find(p) != -1:
+            return True
+
diff --git a/ThirdParty/Twisted/twisted/python/threadable.py b/ThirdParty/Twisted/twisted/python/threadable.py
new file mode 100644
index 0000000..c75847d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/threadable.py
@@ -0,0 +1,139 @@
+# -*- test-case-name: twisted.python.test_threadable -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+A module to provide some very basic threading primitives, such as
+synchronization.
+"""
+
+from __future__ import division, absolute_import
+
+from functools import wraps
+
+class DummyLock(object):
+    """
+    Hack to allow locks to be unpickled on an unthreaded system.
+    """
+
+    def __reduce__(self):
+        return (unpickle_lock, ())
+
+
+
+def unpickle_lock():
+    if threadingmodule is not None:
+        return XLock()
+    else:
+        return DummyLock()
+unpickle_lock.__safe_for_unpickling__ = True
+
+
+
+def _synchPre(self):
+    if '_threadable_lock' not in self.__dict__:
+        _synchLockCreator.acquire()
+        if '_threadable_lock' not in self.__dict__:
+            self.__dict__['_threadable_lock'] = XLock()
+        _synchLockCreator.release()
+    self._threadable_lock.acquire()
+
+
+
+def _synchPost(self):
+    self._threadable_lock.release()
+
+
+
+def _sync(klass, function):
+    @wraps(function)
+    def sync(self, *args, **kwargs):
+        _synchPre(self)
+        try:
+            return function(self, *args, **kwargs)
+        finally:
+            _synchPost(self)
+    return sync
+
+
+
+def synchronize(*klasses):
+    """
+    Make all methods listed in each class' synchronized attribute synchronized.
+
+    The synchronized attribute should be a list of strings, consisting of the
+    names of methods that must be synchronized. If we are running in threaded
+    mode these methods will be wrapped with a lock.
+    """
+    if threadingmodule is not None:
+        for klass in klasses:
+            for methodName in klass.synchronized:
+                sync = _sync(klass, klass.__dict__[methodName])
+                setattr(klass, methodName, sync)
+
+
+
+def init(with_threads=1):
+    """Initialize threading.
+
+    Don't bother calling this.  If it needs to happen, it will happen.
+    """
+    global threaded, _synchLockCreator, XLock
+
+    if with_threads:
+        if not threaded:
+            if threadingmodule is not None:
+                threaded = True
+
+                class XLock(threadingmodule._RLock, object):
+                    def __reduce__(self):
+                        return (unpickle_lock, ())
+
+                _synchLockCreator = XLock()
+            else:
+                raise RuntimeError("Cannot initialize threading, platform lacks thread support")
+    else:
+        if threaded:
+            raise RuntimeError("Cannot uninitialize threads")
+        else:
+            pass
+
+
+
+_dummyID = object()
+def getThreadID():
+    if threadingmodule is None:
+        return _dummyID
+    return threadingmodule.currentThread().ident
+
+
+
+def isInIOThread():
+    """Are we in the thread responsable for I/O requests (the event loop)?
+    """
+    return ioThread == getThreadID()
+
+
+
+def registerAsIOThread():
+    """Mark the current thread as responsable for I/O requests.
+    """
+    global ioThread
+    ioThread = getThreadID()
+
+
+ioThread = None
+threaded = False
+
+
+
+try:
+    import threading as threadingmodule
+except ImportError:
+    threadingmodule = None
+else:
+    init(True)
+
+
+
+__all__ = ['isInIOThread', 'registerAsIOThread', 'getThreadID', 'XLock']
diff --git a/ThirdParty/Twisted/twisted/python/threadpool.py b/ThirdParty/Twisted/twisted/python/threadpool.py
new file mode 100644
index 0000000..a98e660
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/threadpool.py
@@ -0,0 +1,245 @@
+# -*- test-case-name: twisted.test.test_threadpool -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+twisted.python.threadpool: a pool of threads to which we dispatch tasks.
+
+In most cases you can just use C{reactor.callInThread} and friends
+instead of creating a thread pool directly.
+"""
+
+from __future__ import division, absolute_import
+
+try:
+    from Queue import Queue
+except ImportError:
+    from queue import Queue
+import threading
+import copy
+
+from twisted.python import log, context, failure
+
+
+WorkerStop = object()
+
+
+class ThreadPool:
+    """
+    This class (hopefully) generalizes the functionality of a pool of
+    threads to which work can be dispatched.
+
+    L{callInThread} and L{stop} should only be called from
+    a single thread, unless you make a subclass where L{stop} and
+    L{_startSomeWorkers} are synchronized.
+    """
+    min = 5
+    max = 20
+    joined = False
+    started = False
+    workers = 0
+    name = None
+
+    threadFactory = threading.Thread
+    currentThread = staticmethod(threading.currentThread)
+
+    def __init__(self, minthreads=5, maxthreads=20, name=None):
+        """
+        Create a new threadpool.
+
+        @param minthreads: minimum number of threads in the pool
+        @param maxthreads: maximum number of threads in the pool
+        """
+        assert minthreads >= 0, 'minimum is negative'
+        assert minthreads <= maxthreads, 'minimum is greater than maximum'
+        self.q = Queue(0)
+        self.min = minthreads
+        self.max = maxthreads
+        self.name = name
+        self.waiters = []
+        self.threads = []
+        self.working = []
+
+
+    def start(self):
+        """
+        Start the threadpool.
+        """
+        self.joined = False
+        self.started = True
+        # Start some threads.
+        self.adjustPoolsize()
+
+
+    def startAWorker(self):
+        self.workers += 1
+        name = "PoolThread-%s-%s" % (self.name or id(self), self.workers)
+        newThread = self.threadFactory(target=self._worker, name=name)
+        self.threads.append(newThread)
+        newThread.start()
+
+
+    def stopAWorker(self):
+        self.q.put(WorkerStop)
+        self.workers -= 1
+
+
+    def __setstate__(self, state):
+        self.__dict__ = state
+        ThreadPool.__init__(self, self.min, self.max)
+
+
+    def __getstate__(self):
+        state = {}
+        state['min'] = self.min
+        state['max'] = self.max
+        return state
+
+
+    def _startSomeWorkers(self):
+        neededSize = self.q.qsize() + len(self.working)
+        # Create enough, but not too many
+        while self.workers < min(self.max, neededSize):
+            self.startAWorker()
+
+
+    def callInThread(self, func, *args, **kw):
+        """
+        Call a callable object in a separate thread.
+
+        @param func: callable object to be called in separate thread
+
+        @param *args: positional arguments to be passed to C{func}
+
+        @param **kw: keyword args to be passed to C{func}
+        """
+        self.callInThreadWithCallback(None, func, *args, **kw)
+
+
+    def callInThreadWithCallback(self, onResult, func, *args, **kw):
+        """
+        Call a callable object in a separate thread and call C{onResult}
+        with the return value, or a L{twisted.python.failure.Failure}
+        if the callable raises an exception.
+
+        The callable is allowed to block, but the C{onResult} function
+        must not block and should perform as little work as possible.
+
+        A typical action for C{onResult} for a threadpool used with a
+        Twisted reactor would be to schedule a
+        L{twisted.internet.defer.Deferred} to fire in the main
+        reactor thread using C{.callFromThread}.  Note that C{onResult}
+        is called inside the separate thread, not inside the reactor thread.
+
+        @param onResult: a callable with the signature C{(success, result)}.
+            If the callable returns normally, C{onResult} is called with
+            C{(True, result)} where C{result} is the return value of the
+            callable. If the callable throws an exception, C{onResult} is
+            called with C{(False, failure)}.
+
+            Optionally, C{onResult} may be C{None}, in which case it is not
+            called at all.
+
+        @param func: callable object to be called in separate thread
+
+        @param *args: positional arguments to be passed to C{func}
+
+        @param **kwargs: keyword arguments to be passed to C{func}
+        """
+        if self.joined:
+            return
+        ctx = context.theContextTracker.currentContext().contexts[-1]
+        o = (ctx, func, args, kw, onResult)
+        self.q.put(o)
+        if self.started:
+            self._startSomeWorkers()
+
+
+    def _worker(self):
+        """
+        Method used as target of the created threads: retrieve a task to run
+        from the threadpool, run it, and proceed to the next task until
+        threadpool is stopped.
+        """
+        ct = self.currentThread()
+        o = self.q.get()
+        while o is not WorkerStop:
+            self.working.append(ct)
+            ctx, function, args, kwargs, onResult = o
+            del o
+
+            try:
+                result = context.call(ctx, function, *args, **kwargs)
+                success = True
+            except:
+                success = False
+                if onResult is None:
+                    context.call(ctx, log.err)
+                    result = None
+                else:
+                    result = failure.Failure()
+
+            del function, args, kwargs
+
+            self.working.remove(ct)
+
+            if onResult is not None:
+                try:
+                    context.call(ctx, onResult, success, result)
+                except:
+                    context.call(ctx, log.err)
+
+            del ctx, onResult, result
+
+            self.waiters.append(ct)
+            o = self.q.get()
+            self.waiters.remove(ct)
+
+        self.threads.remove(ct)
+
+
+    def stop(self):
+        """
+        Shutdown the threads in the threadpool.
+        """
+        self.joined = True
+        threads = copy.copy(self.threads)
+        while self.workers:
+            self.q.put(WorkerStop)
+            self.workers -= 1
+
+        # and let's just make sure
+        # FIXME: threads that have died before calling stop() are not joined.
+        for thread in threads:
+            thread.join()
+
+
+    def adjustPoolsize(self, minthreads=None, maxthreads=None):
+        if minthreads is None:
+            minthreads = self.min
+        if maxthreads is None:
+            maxthreads = self.max
+
+        assert minthreads >= 0, 'minimum is negative'
+        assert minthreads <= maxthreads, 'minimum is greater than maximum'
+
+        self.min = minthreads
+        self.max = maxthreads
+        if not self.started:
+            return
+
+        # Kill of some threads if we have too many.
+        while self.workers > self.max:
+            self.stopAWorker()
+        # Start some threads if we have too few.
+        while self.workers < self.min:
+            self.startAWorker()
+        # Start some threads if there is a need.
+        self._startSomeWorkers()
+
+
+    def dumpStats(self):
+        log.msg('queue: %s'   % self.q.queue)
+        log.msg('waiters: %s' % self.waiters)
+        log.msg('workers: %s' % self.working)
+        log.msg('total: %s'   % self.threads)
diff --git a/ThirdParty/Twisted/twisted/python/twisted-completion.zsh b/ThirdParty/Twisted/twisted/python/twisted-completion.zsh
new file mode 100644
index 0000000..70cb89e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/twisted-completion.zsh
@@ -0,0 +1,33 @@
+#compdef twistd trial conch cftp tapconvert ckeygen lore pyhtmlizer tap2deb tkconch manhole tap2rpm
+#
+# This is the ZSH completion file for Twisted commands. It calls the current
+# command-line with the special "--_shell-completion" option which is handled
+# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
+# handle the completions for this particular command-line.
+#
+# 3rd parties that wish to provide zsh completion for commands that
+# use t.p.usage may copy this file and change the first line to reference
+# the name(s) of their command(s).
+#
+# This file is included in the official Zsh distribution as
+# Completion/Unix/Command/_twisted
+
+# redirect stderr to /dev/null otherwise deprecation warnings may get puked all
+# over the user's terminal if completing options for a deprecated command.
+# Redirect stderr to a file to debug errors.
+local cmd output
+cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
+output=$("$cmd[@]" 2>/dev/null)
+
+if [[ $output == "#compdef "* ]]; then
+    # Looks like we got a valid completion function - so eval it to produce
+    # the completion matches.
+    eval $output
+else
+    echo "\nCompletion error running command:" ${(qqq)cmd}
+    echo -n "If output below is unhelpful you may need to edit this file and "
+    echo    "redirect stderr to a file."
+    echo "Expected completion function, but instead got:" 
+    echo $output
+    return 1
+fi
diff --git a/ThirdParty/Twisted/twisted/python/urlpath.py b/ThirdParty/Twisted/twisted/python/urlpath.py
new file mode 100644
index 0000000..1c15f09
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/urlpath.py
@@ -0,0 +1,122 @@
+# -*- test-case-name: twisted.test.test_paths -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+
+import urlparse
+import urllib
+
+class URLPath:
+    def __init__(self, scheme='', netloc='localhost', path='',
+                 query='', fragment=''):
+        self.scheme = scheme or 'http'
+        self.netloc = netloc
+        self.path = path or '/'
+        self.query = query
+        self.fragment = fragment
+
+    _qpathlist = None
+    _uqpathlist = None
+    
+    def pathList(self, unquote=0, copy=1):
+        if self._qpathlist is None:
+            self._qpathlist = self.path.split('/')
+            self._uqpathlist = map(urllib.unquote, self._qpathlist)
+        if unquote:
+            result = self._uqpathlist
+        else:
+            result = self._qpathlist
+        if copy:
+            return result[:]
+        else:
+            return result
+
+    def fromString(klass, st):
+        t = urlparse.urlsplit(st)
+        u = klass(*t)
+        return u
+
+    fromString = classmethod(fromString)
+
+    def fromRequest(klass, request):
+        return klass.fromString(request.prePathURL())
+
+    fromRequest = classmethod(fromRequest)
+
+    def _pathMod(self, newpathsegs, keepQuery):
+        if keepQuery:
+            query = self.query
+        else:
+            query = ''
+        return URLPath(self.scheme,
+                        self.netloc,
+                        '/'.join(newpathsegs),
+                        query)
+
+    def sibling(self, path, keepQuery=0):
+        l = self.pathList()
+        l[-1] = path
+        return self._pathMod(l, keepQuery)
+
+    def child(self, path, keepQuery=0):
+        l = self.pathList()
+        if l[-1] == '':
+            l[-1] = path
+        else:
+            l.append(path)
+        return self._pathMod(l, keepQuery)
+
+    def parent(self, keepQuery=0):
+        l = self.pathList()
+        if l[-1] == '':
+            del l[-2]
+        else:
+            # We are a file, such as http://example.com/foo/bar
+            # our parent directory is http://example.com/
+            l.pop()
+            l[-1] = ''
+        return self._pathMod(l, keepQuery)
+
+    def here(self, keepQuery=0):
+        l = self.pathList()
+        if l[-1] != '':
+            l[-1] = ''
+        return self._pathMod(l, keepQuery)
+
+    def click(self, st):
+        """Return a path which is the URL where a browser would presumably take
+        you if you clicked on a link with an HREF as given.
+        """
+        scheme, netloc, path, query, fragment = urlparse.urlsplit(st)
+        if not scheme:
+            scheme = self.scheme
+        if not netloc:
+            netloc = self.netloc
+            if not path:
+                path = self.path
+                if not query:
+                    query = self.query
+            elif path[0] != '/':
+                l = self.pathList()
+                l[-1] = path
+                path = '/'.join(l)
+        
+        return URLPath(scheme,
+                        netloc,
+                        path,
+                        query,
+                        fragment)
+
+
+    
+    def __str__(self):
+        x = urlparse.urlunsplit((
+            self.scheme, self.netloc, self.path,
+            self.query, self.fragment))
+        return x
+
+    def __repr__(self):
+        return ('URLPath(scheme=%r, netloc=%r, path=%r, query=%r, fragment=%r)'
+                % (self.scheme, self.netloc, self.path, self.query, self.fragment))
+
diff --git a/ThirdParty/Twisted/twisted/python/usage.py b/ThirdParty/Twisted/twisted/python/usage.py
new file mode 100644
index 0000000..9280ae2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/usage.py
@@ -0,0 +1,973 @@
+# -*- test-case-name: twisted.test.test_usage -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+twisted.python.usage is a module for parsing/handling the
+command line of your program.
+
+For information on how to use it, see
+U{http://twistedmatrix.com/projects/core/documentation/howto/options.html},
+or doc/core/howto/options.xhtml in your Twisted directory.
+"""
+
+# System Imports
+import os
+import sys
+import getopt
+from os import path
+
+# Sibling Imports
+from twisted.python import reflect, text, util
+
+
+class UsageError(Exception):
+    pass
+
+
+error = UsageError
+
+
+class CoerceParameter(object):
+    """
+    Utility class that can corce a parameter before storing it.
+    """
+    def __init__(self, options, coerce):
+        """
+        @param options: parent Options object
+        @param coerce: callable used to coerce the value.
+        """
+        self.options = options
+        self.coerce = coerce
+        self.doc = getattr(self.coerce, 'coerceDoc', '')
+
+    def dispatch(self, parameterName, value):
+        """
+        When called in dispatch, do the coerce for C{value} and save the
+        returned value.
+        """
+        if value is None:
+            raise UsageError("Parameter '%s' requires an argument."
+                             % (parameterName,))
+        try:
+            value = self.coerce(value)
+        except ValueError, e:
+            raise UsageError("Parameter type enforcement failed: %s" % (e,))
+
+        self.options.opts[parameterName] = value
+
+
+class Options(dict):
+    """
+    An option list parser class
+
+    C{optFlags} and C{optParameters} are lists of available parameters
+    which your program can handle. The difference between the two
+    is the 'flags' have an on(1) or off(0) state (off by default)
+    whereas 'parameters' have an assigned value, with an optional
+    default. (Compare '--verbose' and '--verbosity=2')
+
+    optFlags is assigned a list of lists. Each list represents
+    a flag parameter, as so::
+
+    |    optFlags = [['verbose', 'v', 'Makes it tell you what it doing.'],
+    |                ['quiet', 'q', 'Be vewy vewy quiet.']]
+
+    As you can see, the first item is the long option name
+    (prefixed with '--' on the command line), followed by the
+    short option name (prefixed with '-'), and the description.
+    The description is used for the built-in handling of the
+    --help switch, which prints a usage summary.
+
+    C{optParameters} is much the same, except the list also contains
+    a default value::
+
+    | optParameters = [['outfile', 'O', 'outfile.log', 'Description...']]
+
+    A coerce function can also be specified as the last element: it will be
+    called with the argument and should return the value that will be stored
+    for the option. This function can have a C{coerceDoc} attribute which
+    will be appended to the documentation of the option.
+
+    subCommands is a list of 4-tuples of (command name, command shortcut,
+    parser class, documentation).  If the first non-option argument found is
+    one of the given command names, an instance of the given parser class is
+    instantiated and given the remainder of the arguments to parse and
+    self.opts[command] is set to the command name.  For example::
+
+    | subCommands = [
+    |      ['inquisition', 'inquest', InquisitionOptions,
+    |           'Perform an inquisition'],
+    |      ['holyquest', 'quest', HolyQuestOptions,
+    |           'Embark upon a holy quest']
+    |  ]
+
+    In this case, C{"<program> holyquest --horseback --for-grail"} will cause
+    C{HolyQuestOptions} to be instantiated and asked to parse
+    C{['--horseback', '--for-grail']}.  Currently, only the first sub-command
+    is parsed, and all options following it are passed to its parser.  If a
+    subcommand is found, the subCommand attribute is set to its name and the
+    subOptions attribute is set to the Option instance that parses the
+    remaining options. If a subcommand is not given to parseOptions,
+    the subCommand attribute will be None. You can also mark one of
+    the subCommands to be the default.
+
+    | defaultSubCommand = 'holyquest'
+
+    In this case, the subCommand attribute will never be None, and
+    the subOptions attribute will always be set.
+
+    If you want to handle your own options, define a method named
+    C{opt_paramname} that takes C{(self, option)} as arguments. C{option}
+    will be whatever immediately follows the parameter on the
+    command line. Options fully supports the mapping interface, so you
+    can do things like C{'self["option"] = val'} in these methods.
+
+    Shell tab-completion is supported by this class, for zsh only at present.
+    Zsh ships with a stub file ("completion function") which, for Twisted
+    commands, performs tab-completion on-the-fly using the support provided
+    by this class. The stub file lives in our tree at
+    C{twisted/python/twisted-completion.zsh}, and in the Zsh tree at
+    C{Completion/Unix/Command/_twisted}.
+
+    Tab-completion is based upon the contents of the optFlags and optParameters
+    lists. And, optionally, additional metadata may be provided by assigning a
+    special attribute, C{compData}, which should be an instance of
+    C{Completions}. See that class for details of what can and should be
+    included - and see the howto for additional help using these features -
+    including how third-parties may take advantage of tab-completion for their
+    own commands.
+
+    Advanced functionality is covered in the howto documentation,
+    available at
+    U{http://twistedmatrix.com/projects/core/documentation/howto/options.html},
+    or doc/core/howto/options.xhtml in your Twisted directory.
+    """
+
+    subCommand = None
+    defaultSubCommand = None
+    parent = None
+    completionData = None
+    _shellCompFile = sys.stdout # file to use if shell completion is requested
+    def __init__(self):
+        super(Options, self).__init__()
+
+        self.opts = self
+        self.defaults = {}
+
+        # These are strings/lists we will pass to getopt
+        self.longOpt = []
+        self.shortOpt = ''
+        self.docs = {}
+        self.synonyms = {}
+        self._dispatch = {}
+
+
+        collectors = [
+            self._gather_flags,
+            self._gather_parameters,
+            self._gather_handlers,
+            ]
+
+        for c in collectors:
+            (longOpt, shortOpt, docs, settings, synonyms, dispatch) = c()
+            self.longOpt.extend(longOpt)
+            self.shortOpt = self.shortOpt + shortOpt
+            self.docs.update(docs)
+
+            self.opts.update(settings)
+            self.defaults.update(settings)
+
+            self.synonyms.update(synonyms)
+            self._dispatch.update(dispatch)
+
+    def __hash__(self):
+        """
+        Define a custom hash function so that Options instances can be used
+        as dictionary keys.  This is an internal feature used to implement
+        the parser.  Do not rely on it in application code.
+        """
+        return int(id(self) % sys.maxint)
+
+    def opt_help(self):
+        """
+        Display this help and exit.
+        """
+        print self.__str__()
+        sys.exit(0)
+
+    def opt_version(self):
+        """
+        Display Twisted version and exit.
+        """
+        from twisted import copyright
+        print "Twisted version:", copyright.version
+        sys.exit(0)
+
+    #opt_h = opt_help # this conflicted with existing 'host' options.
+
+    def parseOptions(self, options=None):
+        """
+        The guts of the command-line parser.
+        """
+
+        if options is None:
+            options = sys.argv[1:]
+
+        # we really do need to place the shell completion check here, because
+        # if we used an opt_shell_completion method then it would be possible
+        # for other opt_* methods to be run first, and they could possibly
+        # raise validation errors which would result in error output on the
+        # terminal of the user performing shell completion. Validation errors
+        # would occur quite frequently, in fact, because users often initiate
+        # tab-completion while they are editing an unfinished command-line.
+        if len(options) > 1 and options[-2] == "--_shell-completion":
+            from twisted.python import _shellcomp
+            cmdName = path.basename(sys.argv[0])
+            _shellcomp.shellComplete(self, cmdName, options,
+                                     self._shellCompFile)
+            sys.exit(0)
+
+        try:
+            opts, args = getopt.getopt(options,
+                                       self.shortOpt, self.longOpt)
+        except getopt.error, e:
+            raise UsageError(str(e))
+
+        for opt, arg in opts:
+            if opt[1] == '-':
+                opt = opt[2:]
+            else:
+                opt = opt[1:]
+
+            optMangled = opt
+            if optMangled not in self.synonyms:
+                optMangled = opt.replace("-", "_")
+                if optMangled not in self.synonyms:
+                    raise UsageError("No such option '%s'" % (opt,))
+
+            optMangled = self.synonyms[optMangled]
+            if isinstance(self._dispatch[optMangled], CoerceParameter):
+                self._dispatch[optMangled].dispatch(optMangled, arg)
+            else:
+                self._dispatch[optMangled](optMangled, arg)
+
+        if (getattr(self, 'subCommands', None)
+            and (args or self.defaultSubCommand is not None)):
+            if not args:
+                args = [self.defaultSubCommand]
+            sub, rest = args[0], args[1:]
+            for (cmd, short, parser, doc) in self.subCommands:
+                if sub == cmd or sub == short:
+                    self.subCommand = cmd
+                    self.subOptions = parser()
+                    self.subOptions.parent = self
+                    self.subOptions.parseOptions(rest)
+                    break
+            else:
+                raise UsageError("Unknown command: %s" % sub)
+        else:
+            try:
+                self.parseArgs(*args)
+            except TypeError:
+                raise UsageError("Wrong number of arguments.")
+
+        self.postOptions()
+
+    def postOptions(self):
+        """
+        I am called after the options are parsed.
+
+        Override this method in your subclass to do something after
+        the options have been parsed and assigned, like validate that
+        all options are sane.
+        """
+
+    def parseArgs(self):
+        """
+        I am called with any leftover arguments which were not options.
+
+        Override me to do something with the remaining arguments on
+        the command line, those which were not flags or options. e.g.
+        interpret them as a list of files to operate on.
+
+        Note that if there more arguments on the command line
+        than this method accepts, parseArgs will blow up with
+        a getopt.error.  This means if you don't override me,
+        parseArgs will blow up if I am passed any arguments at
+        all!
+        """
+
+    def _generic_flag(self, flagName, value=None):
+        if value not in ('', None):
+            raise UsageError("Flag '%s' takes no argument."
+                             " Not even \"%s\"." % (flagName, value))
+
+        self.opts[flagName] = 1
+
+    def _gather_flags(self):
+        """
+        Gather up boolean (flag) options.
+        """
+
+        longOpt, shortOpt = [], ''
+        docs, settings, synonyms, dispatch = {}, {}, {}, {}
+
+        flags = []
+        reflect.accumulateClassList(self.__class__, 'optFlags', flags)
+
+        for flag in flags:
+            long, short, doc = util.padTo(3, flag)
+            if not long:
+                raise ValueError("A flag cannot be without a name.")
+
+            docs[long] = doc
+            settings[long] = 0
+            if short:
+                shortOpt = shortOpt + short
+                synonyms[short] = long
+            longOpt.append(long)
+            synonyms[long] = long
+            dispatch[long] = self._generic_flag
+
+        return longOpt, shortOpt, docs, settings, synonyms, dispatch
+
+    def _gather_parameters(self):
+        """
+        Gather options which take a value.
+        """
+        longOpt, shortOpt = [], ''
+        docs, settings, synonyms, dispatch = {}, {}, {}, {}
+
+        parameters = []
+
+        reflect.accumulateClassList(self.__class__, 'optParameters',
+                                    parameters)
+
+        synonyms = {}
+
+        for parameter in parameters:
+            long, short, default, doc, paramType = util.padTo(5, parameter)
+            if not long:
+                raise ValueError("A parameter cannot be without a name.")
+
+            docs[long] = doc
+            settings[long] = default
+            if short:
+                shortOpt = shortOpt + short + ':'
+                synonyms[short] = long
+            longOpt.append(long + '=')
+            synonyms[long] = long
+            if paramType is not None:
+                dispatch[long] = CoerceParameter(self, paramType)
+            else:
+                dispatch[long] = CoerceParameter(self, str)
+
+        return longOpt, shortOpt, docs, settings, synonyms, dispatch
+
+
+    def _gather_handlers(self):
+        """
+        Gather up options with their own handler methods.
+
+        This returns a tuple of many values.  Amongst those values is a
+        synonyms dictionary, mapping all of the possible aliases (C{str})
+        for an option to the longest spelling of that option's name
+        C({str}).
+
+        Another element is a dispatch dictionary, mapping each user-facing
+        option name (with - substituted for _) to a callable to handle that
+        option.
+        """
+
+        longOpt, shortOpt = [], ''
+        docs, settings, synonyms, dispatch = {}, {}, {}, {}
+
+        dct = {}
+        reflect.addMethodNamesToDict(self.__class__, dct, "opt_")
+
+        for name in dct.keys():
+            method = getattr(self, 'opt_'+name)
+
+            takesArg = not flagFunction(method, name)
+
+            prettyName = name.replace('_', '-')
+            doc = getattr(method, '__doc__', None)
+            if doc:
+                ## Only use the first line.
+                #docs[name] = doc.split('\n')[0]
+                docs[prettyName] = doc
+            else:
+                docs[prettyName] = self.docs.get(prettyName)
+
+            synonyms[prettyName] = prettyName
+
+            # A little slight-of-hand here makes dispatching much easier
+            # in parseOptions, as it makes all option-methods have the
+            # same signature.
+            if takesArg:
+                fn = lambda name, value, m=method: m(value)
+            else:
+                # XXX: This won't raise a TypeError if it's called
+                # with a value when it shouldn't be.
+                fn = lambda name, value=None, m=method: m()
+
+            dispatch[prettyName] = fn
+
+            if len(name) == 1:
+                shortOpt = shortOpt + name
+                if takesArg:
+                    shortOpt = shortOpt + ':'
+            else:
+                if takesArg:
+                    prettyName = prettyName + '='
+                longOpt.append(prettyName)
+
+        reverse_dct = {}
+        # Map synonyms
+        for name in dct.keys():
+            method = getattr(self, 'opt_' + name)
+            if method not in reverse_dct:
+                reverse_dct[method] = []
+            reverse_dct[method].append(name.replace('_', '-'))
+
+        cmpLength = lambda a, b: cmp(len(a), len(b))
+
+        for method, names in reverse_dct.items():
+            if len(names) < 2:
+                continue
+            names_ = names[:]
+            names_.sort(cmpLength)
+            longest = names_.pop()
+            for name in names_:
+                synonyms[name] = longest
+
+        return longOpt, shortOpt, docs, settings, synonyms, dispatch
+
+
+    def __str__(self):
+        return self.getSynopsis() + '\n' + self.getUsage(width=None)
+
+    def getSynopsis(self):
+        """
+        Returns a string containing a description of these options and how to
+        pass them to the executed file.
+        """
+
+        default = "%s%s" % (path.basename(sys.argv[0]),
+                            (self.longOpt and " [options]") or '')
+        if self.parent is None:
+            default = "Usage: %s%s" % (path.basename(sys.argv[0]),
+                                       (self.longOpt and " [options]") or '')
+        else:
+            default = '%s' % ((self.longOpt and "[options]") or '')
+        synopsis = getattr(self, "synopsis", default)
+
+        synopsis = synopsis.rstrip()
+
+        if self.parent is not None:
+            synopsis = ' '.join((self.parent.getSynopsis(),
+                                 self.parent.subCommand, synopsis))
+
+        return synopsis
+
+    def getUsage(self, width=None):
+        # If subOptions exists by now, then there was probably an error while
+        # parsing its options.
+        if hasattr(self, 'subOptions'):
+            return self.subOptions.getUsage(width=width)
+
+        if not width:
+            width = int(os.environ.get('COLUMNS', '80'))
+
+        if hasattr(self, 'subCommands'):
+            cmdDicts = []
+            for (cmd, short, parser, desc) in self.subCommands:
+                cmdDicts.append(
+                    {'long': cmd,
+                     'short': short,
+                     'doc': desc,
+                     'optType': 'command',
+                     'default': None
+                    })
+            chunks = docMakeChunks(cmdDicts, width)
+            commands = 'Commands:\n' + ''.join(chunks)
+        else:
+            commands = ''
+
+        longToShort = {}
+        for key, value in self.synonyms.items():
+            longname = value
+            if (key != longname) and (len(key) == 1):
+                longToShort[longname] = key
+            else:
+                if longname not in longToShort:
+                    longToShort[longname] = None
+                else:
+                    pass
+
+        optDicts = []
+        for opt in self.longOpt:
+            if opt[-1] == '=':
+                optType = 'parameter'
+                opt = opt[:-1]
+            else:
+                optType = 'flag'
+
+            optDicts.append(
+                {'long': opt,
+                 'short': longToShort[opt],
+                 'doc': self.docs[opt],
+                 'optType': optType,
+                 'default': self.defaults.get(opt, None),
+                 'dispatch': self._dispatch.get(opt, None)
+                 })
+
+        if not (getattr(self, "longdesc", None) is None):
+            longdesc = self.longdesc
+        else:
+            import __main__
+            if getattr(__main__, '__doc__', None):
+                longdesc = __main__.__doc__
+            else:
+                longdesc = ''
+
+        if longdesc:
+            longdesc = ('\n' +
+                        '\n'.join(text.wordWrap(longdesc, width)).strip()
+                        + '\n')
+
+        if optDicts:
+            chunks = docMakeChunks(optDicts, width)
+            s = "Options:\n%s" % (''.join(chunks))
+        else:
+            s = "Options: None\n"
+
+        return s + longdesc + commands
+
+    #def __repr__(self):
+    #    XXX: It'd be cool if we could return a succinct representation
+    #        of which flags and options are set here.
+
+
+_ZSH = 'zsh'
+_BASH = 'bash'
+
+class Completer(object):
+    """
+    A completion "action" - provides completion possibilities for a particular
+    command-line option. For example we might provide the user a fixed list of
+    choices, or files/dirs according to a glob.
+
+    This class produces no completion matches itself - see the various
+    subclasses for specific completion functionality.
+    """
+    _descr = None
+    def __init__(self, descr=None, repeat=False):
+        """
+        @type descr: C{str}
+        @param descr: An optional descriptive string displayed above matches.
+
+        @type repeat: C{bool}
+        @param repeat: A flag, defaulting to False, indicating whether this
+            C{Completer} should repeat - that is, be used to complete more
+            than one command-line word. This may ONLY be set to True for
+            actions in the C{extraActions} keyword argument to C{Completions}.
+            And ONLY if it is the LAST (or only) action in the C{extraActions}
+            list.
+        """
+        if descr is not None:
+            self._descr = descr
+        self._repeat = repeat
+
+
+    def _getRepeatFlag(self):
+        if self._repeat:
+            return "*"
+        else:
+            return ""
+    _repeatFlag = property(_getRepeatFlag)
+
+
+    def _description(self, optName):
+        if self._descr is not None:
+            return self._descr
+        else:
+            return optName
+
+
+    def _shellCode(self, optName, shellType):
+        """
+        Fetch a fragment of shell code representing this action which is
+        suitable for use by the completion system in _shellcomp.py
+
+        @type optName: C{str}
+        @param optName: The long name of the option this action is being
+            used for.
+
+        @type shellType: C{str}
+        @param shellType: One of the supported shell constants e.g.
+            C{twisted.python.usage._ZSH}
+        """
+        if shellType == _ZSH:
+            return "%s:%s:" % (self._repeatFlag,
+                               self._description(optName))
+        raise NotImplementedError("Unknown shellType %r" % (shellType,))
+
+
+
+class CompleteFiles(Completer):
+    """
+    Completes file names based on a glob pattern
+    """
+    def __init__(self, globPattern='*', **kw):
+        Completer.__init__(self, **kw)
+        self._globPattern = globPattern
+
+
+    def _description(self, optName):
+        if self._descr is not None:
+            return "%s (%s)" % (self._descr, self._globPattern)
+        else:
+            return "%s (%s)" % (optName, self._globPattern)
+
+
+    def _shellCode(self, optName, shellType):
+        if shellType == _ZSH:
+            return "%s:%s:_files -g \"%s\"" % (self._repeatFlag,
+                                               self._description(optName),
+                                               self._globPattern,)
+        raise NotImplementedError("Unknown shellType %r" % (shellType,))
+
+
+
+class CompleteDirs(Completer):
+    """
+    Completes directory names
+    """
+    def _shellCode(self, optName, shellType):
+        if shellType == _ZSH:
+            return "%s:%s:_directories" % (self._repeatFlag,
+                                           self._description(optName))
+        raise NotImplementedError("Unknown shellType %r" % (shellType,))
+
+
+
+class CompleteList(Completer):
+    """
+    Completes based on a fixed list of words
+    """
+    def __init__(self, items, **kw):
+        Completer.__init__(self, **kw)
+        self._items = items
+
+    def _shellCode(self, optName, shellType):
+        if shellType == _ZSH:
+            return "%s:%s:(%s)" % (self._repeatFlag,
+                                   self._description(optName),
+                                   " ".join(self._items,))
+        raise NotImplementedError("Unknown shellType %r" % (shellType,))
+
+
+
+class CompleteMultiList(Completer):
+    """
+    Completes multiple comma-separated items based on a fixed list of words
+    """
+    def __init__(self, items, **kw):
+        Completer.__init__(self, **kw)
+        self._items = items
+
+    def _shellCode(self, optName, shellType):
+        if shellType == _ZSH:
+            return "%s:%s:_values -s , '%s' %s" % (self._repeatFlag,
+                                                   self._description(optName),
+                                                   self._description(optName),
+                                                   " ".join(self._items))
+        raise NotImplementedError("Unknown shellType %r" % (shellType,))
+
+
+
+class CompleteUsernames(Completer):
+    """
+    Complete usernames
+    """
+    def _shellCode(self, optName, shellType):
+        if shellType == _ZSH:
+            return "%s:%s:_users" % (self._repeatFlag,
+                                     self._description(optName))
+        raise NotImplementedError("Unknown shellType %r" % (shellType,))
+
+
+
+class CompleteGroups(Completer):
+    """
+    Complete system group names
+    """
+    _descr = 'group'
+    def _shellCode(self, optName, shellType):
+        if shellType == _ZSH:
+            return "%s:%s:_groups" % (self._repeatFlag,
+                                      self._description(optName))
+        raise NotImplementedError("Unknown shellType %r" % (shellType,))
+
+
+
+class CompleteHostnames(Completer):
+    """
+    Complete hostnames
+    """
+    def _shellCode(self, optName, shellType):
+        if shellType == _ZSH:
+            return "%s:%s:_hosts" % (self._repeatFlag,
+                                     self._description(optName))
+        raise NotImplementedError("Unknown shellType %r" % (shellType,))
+
+
+
+class CompleteUserAtHost(Completer):
+    """
+    A completion action which produces matches in any of these forms::
+        <username>
+        <hostname>
+        <username>@<hostname>
+    """
+    _descr = 'host | user at host'
+    def _shellCode(self, optName, shellType):
+        if shellType == _ZSH:
+            # Yes this looks insane but it does work. For bonus points
+            # add code to grep 'Hostname' lines from ~/.ssh/config
+            return ('%s:%s:{_ssh;if compset -P "*@"; '
+                    'then _wanted hosts expl "remote host name" _ssh_hosts '
+                    '&& ret=0 elif compset -S "@*"; then _wanted users '
+                    'expl "login name" _ssh_users -S "" && ret=0 '
+                    'else if (( $+opt_args[-l] )); then tmp=() '
+                    'else tmp=( "users:login name:_ssh_users -qS@" ) fi; '
+                    '_alternative "hosts:remote host name:_ssh_hosts" "$tmp[@]"'
+                    ' && ret=0 fi}' % (self._repeatFlag,
+                                       self._description(optName)))
+        raise NotImplementedError("Unknown shellType %r" % (shellType,))
+
+
+
+class CompleteNetInterfaces(Completer):
+    """
+    Complete network interface names
+    """
+    def _shellCode(self, optName, shellType):
+        if shellType == _ZSH:
+            return "%s:%s:_net_interfaces" % (self._repeatFlag,
+                                              self._description(optName))
+        raise NotImplementedError("Unknown shellType %r" % (shellType,))
+
+
+
+class Completions(object):
+    """
+    Extra metadata for the shell tab-completion system.
+
+    @type descriptions: C{dict}
+    @ivar descriptions: ex. C{{"foo" : "use this description for foo instead"}}
+        A dict mapping long option names to alternate descriptions.  When this
+        variable is defined, the descriptions contained here will override
+        those descriptions provided in the optFlags and optParameters
+        variables.
+
+    @type multiUse: C{list}
+    @ivar multiUse: ex. C{ ["foo", "bar"] }
+        An iterable containing those long option names which may appear on the
+        command line more than once. By default, options will only be completed
+        one time.
+
+    @type mutuallyExclusive: C{list} of C{tuple}
+    @ivar mutuallyExclusive: ex. C{ [("foo", "bar"), ("bar", "baz")] }
+        A sequence of sequences, with each sub-sequence containing those long
+        option names that are mutually exclusive. That is, those options that
+        cannot appear on the command line together.
+
+    @type optActions: C{dict}
+    @ivar optActions: A dict mapping long option names to shell "actions".
+        These actions define what may be completed as the argument to the
+        given option. By default, all files/dirs will be completed if no
+        action is given. For example::
+
+            {"foo"    : CompleteFiles("*.py", descr="python files"),
+             "bar"    : CompleteList(["one", "two", "three"]),
+             "colors" : CompleteMultiList(["red", "green", "blue"])}
+
+        Callables may instead be given for the values in this dict. The
+        callable should accept no arguments, and return a C{Completer}
+        instance used as the action in the same way as the literal actions in
+        the example above.
+
+        As you can see in the example above. The "foo" option will have files
+        that end in .py completed when the user presses Tab. The "bar"
+        option will have either of the strings "one", "two", or "three"
+        completed when the user presses Tab.
+
+        "colors" will allow multiple arguments to be completed, seperated by
+        commas. The possible arguments are red, green, and blue. Examples::
+
+            my_command --foo some-file.foo --colors=red,green
+            my_command --colors=green
+            my_command --colors=green,blue
+
+        Descriptions for the actions may be given with the optional C{descr}
+        keyword argument. This is separate from the description of the option
+        itself.
+
+        Normally Zsh does not show these descriptions unless you have
+        "verbose" completion turned on. Turn on verbosity with this in your
+        ~/.zshrc::
+
+            zstyle ':completion:*' verbose yes
+            zstyle ':completion:*:descriptions' format '%B%d%b'
+
+    @type extraActions: C{list}
+    @ivar extraActions: Extra arguments are those arguments typically
+        appearing at the end of the command-line, which are not associated
+        with any particular named option. That is, the arguments that are
+        given to the parseArgs() method of your usage.Options subclass. For
+        example::
+            [CompleteFiles(descr="file to read from"),
+             Completer(descr="book title")]
+
+        In the example above, the 1st non-option argument will be described as
+        "file to read from" and all file/dir names will be completed (*). The
+        2nd non-option argument will be described as "book title", but no
+        actual completion matches will be produced.
+
+        See the various C{Completer} subclasses for other types of things which
+        may be tab-completed (users, groups, network interfaces, etc).
+
+        Also note the C{repeat=True} flag which may be passed to any of the
+        C{Completer} classes. This is set to allow the C{Completer} instance
+        to be re-used for subsequent command-line words. See the C{Completer}
+        docstring for details.
+    """
+    def __init__(self, descriptions={}, multiUse=[],
+                 mutuallyExclusive=[], optActions={}, extraActions=[]):
+        self.descriptions = descriptions
+        self.multiUse = multiUse
+        self.mutuallyExclusive = mutuallyExclusive
+        self.optActions = optActions
+        self.extraActions = extraActions
+
+
+
+def docMakeChunks(optList, width=80):
+    """
+    Makes doc chunks for option declarations.
+
+    Takes a list of dictionaries, each of which may have one or more
+    of the keys 'long', 'short', 'doc', 'default', 'optType'.
+
+    Returns a list of strings.
+    The strings may be multiple lines,
+    all of them end with a newline.
+    """
+
+    # XXX: sanity check to make sure we have a sane combination of keys.
+
+    maxOptLen = 0
+    for opt in optList:
+        optLen = len(opt.get('long', ''))
+        if optLen:
+            if opt.get('optType', None) == "parameter":
+                # these take up an extra character
+                optLen = optLen + 1
+            maxOptLen = max(optLen, maxOptLen)
+
+    colWidth1 = maxOptLen + len("  -s, --  ")
+    colWidth2 = width - colWidth1
+    # XXX - impose some sane minimum limit.
+    # Then if we don't have enough room for the option and the doc
+    # to share one line, they can take turns on alternating lines.
+
+    colFiller1 = " " * colWidth1
+
+    optChunks = []
+    seen = {}
+    for opt in optList:
+        if opt.get('short', None) in seen or opt.get('long', None) in seen:
+            continue
+        for x in opt.get('short', None), opt.get('long', None):
+            if x is not None:
+                seen[x] = 1
+
+        optLines = []
+        comma = " "
+        if opt.get('short', None):
+            short = "-%c" % (opt['short'],)
+        else:
+            short = ''
+
+        if opt.get('long', None):
+            long = opt['long']
+            if opt.get("optType", None) == "parameter":
+                long = long + '='
+
+            long = "%-*s" % (maxOptLen, long)
+            if short:
+                comma = ","
+        else:
+            long = " " * (maxOptLen + len('--'))
+
+        if opt.get('optType', None) == 'command':
+            column1 = '    %s      ' % long
+        else:
+            column1 = "  %2s%c --%s  " % (short, comma, long)
+
+        if opt.get('doc', ''):
+            doc = opt['doc'].strip()
+        else:
+            doc = ''
+
+        if (opt.get("optType", None) == "parameter") \
+           and not (opt.get('default', None) is None):
+            doc = "%s [default: %s]" % (doc, opt['default'])
+
+        if (opt.get("optType", None) == "parameter") \
+           and opt.get('dispatch', None) is not None:
+            d = opt['dispatch']
+            if isinstance(d, CoerceParameter) and d.doc:
+                doc = "%s. %s" % (doc, d.doc)
+
+        if doc:
+            column2_l = text.wordWrap(doc, colWidth2)
+        else:
+            column2_l = ['']
+
+        optLines.append("%s%s\n" % (column1, column2_l.pop(0)))
+
+        for line in column2_l:
+            optLines.append("%s%s\n" % (colFiller1, line))
+
+        optChunks.append(''.join(optLines))
+
+    return optChunks
+
+
+def flagFunction(method, name=None):
+    reqArgs = method.im_func.func_code.co_argcount
+    if reqArgs > 2:
+        raise UsageError('Invalid Option function for %s' %
+                         (name or method.func_name))
+    if reqArgs == 2:
+        # argName = method.im_func.func_code.co_varnames[1]
+        return 0
+    return 1
+
+
+def portCoerce(value):
+    """
+    Coerce a string value to an int port number, and checks the validity.
+    """
+    value = int(value)
+    if value < 0 or value > 65535:
+        raise ValueError("Port number not in range: %s" % (value,))
+    return value
+portCoerce.coerceDoc = "Must be an int between 0 and 65535."
+
+
diff --git a/ThirdParty/Twisted/twisted/python/util.py b/ThirdParty/Twisted/twisted/python/util.py
new file mode 100644
index 0000000..807ef45
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/util.py
@@ -0,0 +1,754 @@
+# -*- test-case-name: twisted.python.test.test_util -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import os, sys, errno, inspect, warnings
+import types
+try:
+    import pwd, grp
+except ImportError:
+    pwd = grp = None
+try:
+    from os import setgroups, getgroups
+except ImportError:
+    setgroups = getgroups = None
+from UserDict import UserDict
+
+from twisted.python._utilpy3 import (FancyEqMixin, setIDFunction, unsignedID,
+                                     untilConcludes, runWithWarningsSuppressed,
+                                     FancyStrMixin, nameToLabel, InsensitiveDict)
+
+
+
+class OrderedDict(UserDict):
+    """A UserDict that preserves insert order whenever possible."""
+    def __init__(self, dict=None, **kwargs):
+        self._order = []
+        self.data = {}
+        if dict is not None:
+            if hasattr(dict,'keys'):
+                self.update(dict)
+            else:
+                for k,v in dict: # sequence
+                    self[k] = v
+        if len(kwargs):
+            self.update(kwargs)
+    def __repr__(self):
+        return '{'+', '.join([('%r: %r' % item) for item in self.items()])+'}'
+
+    def __setitem__(self, key, value):
+        if not self.has_key(key):
+            self._order.append(key)
+        UserDict.__setitem__(self, key, value)
+
+    def copy(self):
+        return self.__class__(self)
+
+    def __delitem__(self, key):
+        UserDict.__delitem__(self, key)
+        self._order.remove(key)
+
+    def iteritems(self):
+        for item in self._order:
+            yield (item, self[item])
+
+    def items(self):
+        return list(self.iteritems())
+
+    def itervalues(self):
+        for item in self._order:
+            yield self[item]
+
+    def values(self):
+        return list(self.itervalues())
+
+    def iterkeys(self):
+        return iter(self._order)
+
+    def keys(self):
+        return list(self._order)
+
+    def popitem(self):
+        key = self._order[-1]
+        value = self[key]
+        del self[key]
+        return (key, value)
+
+    def setdefault(self, item, default):
+        if self.has_key(item):
+            return self[item]
+        self[item] = default
+        return default
+
+    def update(self, d):
+        for k, v in d.items():
+            self[k] = v
+
+def uniquify(lst):
+    """Make the elements of a list unique by inserting them into a dictionary.
+    This must not change the order of the input lst.
+    """
+    dct = {}
+    result = []
+    for k in lst:
+        if k not in dct:
+            result.append(k)
+        dct[k] = 1
+    return result
+
+def padTo(n, seq, default=None):
+    """
+    Pads a sequence out to n elements,
+
+    filling in with a default value if it is not long enough.
+
+    If the input sequence is longer than n, raises ValueError.
+
+    Details, details:
+    This returns a new list; it does not extend the original sequence.
+    The new list contains the values of the original sequence, not copies.
+    """
+
+    if len(seq) > n:
+        raise ValueError, "%d elements is more than %d." % (len(seq), n)
+
+    blank = [default] * n
+
+    blank[:len(seq)] = list(seq)
+
+    return blank
+
+
+def getPluginDirs():
+    warnings.warn(
+        "twisted.python.util.getPluginDirs is deprecated since Twisted 12.2.",
+        DeprecationWarning, stacklevel=2)
+    import twisted
+    systemPlugins = os.path.join(os.path.dirname(os.path.dirname(
+                            os.path.abspath(twisted.__file__))), 'plugins')
+    userPlugins = os.path.expanduser("~/TwistedPlugins")
+    confPlugins = os.path.expanduser("~/.twisted")
+    allPlugins = filter(os.path.isdir, [systemPlugins, userPlugins, confPlugins])
+    return allPlugins
+
+
+def addPluginDir():
+    warnings.warn(
+        "twisted.python.util.addPluginDir is deprecated since Twisted 12.2.",
+        DeprecationWarning, stacklevel=2)
+    sys.path.extend(getPluginDirs())
+
+
+def sibpath(path, sibling):
+    """
+    Return the path to a sibling of a file in the filesystem.
+
+    This is useful in conjunction with the special C{__file__} attribute
+    that Python provides for modules, so modules can load associated
+    resource files.
+    """
+    return os.path.join(os.path.dirname(os.path.abspath(path)), sibling)
+
+
+def _getpass(prompt):
+    """
+    Helper to turn IOErrors into KeyboardInterrupts.
+    """
+    import getpass
+    try:
+        return getpass.getpass(prompt)
+    except IOError, e:
+        if e.errno == errno.EINTR:
+            raise KeyboardInterrupt
+        raise
+    except EOFError:
+        raise KeyboardInterrupt
+
+def getPassword(prompt = 'Password: ', confirm = 0, forceTTY = 0,
+                confirmPrompt = 'Confirm password: ',
+                mismatchMessage = "Passwords don't match."):
+    """Obtain a password by prompting or from stdin.
+
+    If stdin is a terminal, prompt for a new password, and confirm (if
+    C{confirm} is true) by asking again to make sure the user typed the same
+    thing, as keystrokes will not be echoed.
+
+    If stdin is not a terminal, and C{forceTTY} is not true, read in a line
+    and use it as the password, less the trailing newline, if any.  If
+    C{forceTTY} is true, attempt to open a tty and prompt for the password
+    using it.  Raise a RuntimeError if this is not possible.
+
+    @returns: C{str}
+    """
+    isaTTY = hasattr(sys.stdin, 'isatty') and sys.stdin.isatty()
+
+    old = None
+    try:
+        if not isaTTY:
+            if forceTTY:
+                try:
+                    old = sys.stdin, sys.stdout
+                    sys.stdin = sys.stdout = open('/dev/tty', 'r+')
+                except:
+                    raise RuntimeError("Cannot obtain a TTY")
+            else:
+                password = sys.stdin.readline()
+                if password[-1] == '\n':
+                    password = password[:-1]
+                return password
+
+        while 1:
+            try1 = _getpass(prompt)
+            if not confirm:
+                return try1
+            try2 = _getpass(confirmPrompt)
+            if try1 == try2:
+                return try1
+            else:
+                sys.stderr.write(mismatchMessage + "\n")
+    finally:
+        if old:
+            sys.stdin.close()
+            sys.stdin, sys.stdout = old
+
+
+def println(*a):
+    sys.stdout.write(' '.join(map(str, a))+'\n')
+
+# XXX
+# This does not belong here
+# But where does it belong?
+
+def str_xor(s, b):
+    return ''.join([chr(ord(c) ^ b) for c in s])
+
+
+def makeStatBar(width, maxPosition, doneChar = '=', undoneChar = '-', currentChar = '>'):
+    """
+    Creates a function that will return a string representing a progress bar.
+    """
+    aValue = width / float(maxPosition)
+    def statBar(position, force = 0, last = ['']):
+        assert len(last) == 1, "Don't mess with the last parameter."
+        done = int(aValue * position)
+        toDo = width - done - 2
+        result = "[%s%s%s]" % (doneChar * done, currentChar, undoneChar * toDo)
+        if force:
+            last[0] = result
+            return result
+        if result == last[0]:
+            return ''
+        last[0] = result
+        return result
+
+    statBar.__doc__ = """statBar(position, force = 0) -> '[%s%s%s]'-style progress bar
+
+    returned string is %d characters long, and the range goes from 0..%d.
+    The 'position' argument is where the '%s' will be drawn.  If force is false,
+    '' will be returned instead if the resulting progress bar is identical to the
+    previously returned progress bar.
+""" % (doneChar * 3, currentChar, undoneChar * 3, width, maxPosition, currentChar)
+    return statBar
+
+
+def spewer(frame, s, ignored):
+    """
+    A trace function for sys.settrace that prints every function or method call.
+    """
+    from twisted.python import reflect
+    if frame.f_locals.has_key('self'):
+        se = frame.f_locals['self']
+        if hasattr(se, '__class__'):
+            k = reflect.qual(se.__class__)
+        else:
+            k = reflect.qual(type(se))
+        print 'method %s of %s at %s' % (
+            frame.f_code.co_name, k, id(se)
+        )
+    else:
+        print 'function %s in %s, line %s' % (
+            frame.f_code.co_name,
+            frame.f_code.co_filename,
+            frame.f_lineno)
+
+
+def searchupwards(start, files=[], dirs=[]):
+    """
+    Walk upwards from start, looking for a directory containing
+    all files and directories given as arguments::
+    >>> searchupwards('.', ['foo.txt'], ['bar', 'bam'])
+
+    If not found, return None
+    """
+    start=os.path.abspath(start)
+    parents=start.split(os.sep)
+    exists=os.path.exists; join=os.sep.join; isdir=os.path.isdir
+    while len(parents):
+        candidate=join(parents)+os.sep
+        allpresent=1
+        for f in files:
+            if not exists("%s%s" % (candidate, f)):
+                allpresent=0
+                break
+        if allpresent:
+            for d in dirs:
+                if not isdir("%s%s" % (candidate, d)):
+                    allpresent=0
+                    break
+        if allpresent: return candidate
+        parents.pop(-1)
+    return None
+
+
+class LineLog:
+    """
+    A limited-size line-based log, useful for logging line-based
+    protocols such as SMTP.
+
+    When the log fills up, old entries drop off the end.
+    """
+    def __init__(self, size=10):
+        """
+        Create a new log, with size lines of storage (default 10).
+        A log size of 0 (or less) means an infinite log.
+        """
+        if size < 0:
+            size = 0
+        self.log = [None]*size
+        self.size = size
+
+    def append(self,line):
+        if self.size:
+            self.log[:-1] = self.log[1:]
+            self.log[-1] = line
+        else:
+            self.log.append(line)
+
+    def str(self):
+        return '\n'.join(filter(None,self.log))
+
+    def __getitem__(self, item):
+        return filter(None,self.log)[item]
+
+    def clear(self):
+        """Empty the log"""
+        self.log = [None]*self.size
+
+
+def raises(exception, f, *args, **kwargs):
+    """
+    Determine whether the given call raises the given exception.
+    """
+    try:
+        f(*args, **kwargs)
+    except exception:
+        return 1
+    return 0
+
+
+class IntervalDifferential:
+    """
+    Given a list of intervals, generate the amount of time to sleep between
+    "instants".
+
+    For example, given 7, 11 and 13, the three (infinite) sequences::
+
+        7 14 21 28 35 ...
+        11 22 33 44 ...
+        13 26 39 52 ...
+
+    will be generated, merged, and used to produce::
+
+        (7, 0) (4, 1) (2, 2) (1, 0) (7, 0) (1, 1) (4, 2) (2, 0) (5, 1) (2, 0)
+
+    New intervals may be added or removed as iteration proceeds using the
+    proper methods.
+    """
+
+    def __init__(self, intervals, default=60):
+        """
+        @type intervals: C{list} of C{int}, C{long}, or C{float} param
+        @param intervals: The intervals between instants.
+
+        @type default: C{int}, C{long}, or C{float}
+        @param default: The duration to generate if the intervals list
+        becomes empty.
+        """
+        self.intervals = intervals[:]
+        self.default = default
+
+    def __iter__(self):
+        return _IntervalDifferentialIterator(self.intervals, self.default)
+
+
+class _IntervalDifferentialIterator:
+    def __init__(self, i, d):
+
+        self.intervals = [[e, e, n] for (e, n) in zip(i, range(len(i)))]
+        self.default = d
+        self.last = 0
+
+    def next(self):
+        if not self.intervals:
+            return (self.default, None)
+        last, index = self.intervals[0][0], self.intervals[0][2]
+        self.intervals[0][0] += self.intervals[0][1]
+        self.intervals.sort()
+        result = last - self.last
+        self.last = last
+        return result, index
+
+    def addInterval(self, i):
+        if self.intervals:
+            delay = self.intervals[0][0] - self.intervals[0][1]
+            self.intervals.append([delay + i, i, len(self.intervals)])
+            self.intervals.sort()
+        else:
+            self.intervals.append([i, i, 0])
+
+    def removeInterval(self, interval):
+        for i in range(len(self.intervals)):
+            if self.intervals[i][1] == interval:
+                index = self.intervals[i][2]
+                del self.intervals[i]
+                for i in self.intervals:
+                    if i[2] > index:
+                        i[2] -= 1
+                return
+        raise ValueError, "Specified interval not in IntervalDifferential"
+
+
+
+try:
+    # Python 2.7 / Python 3.3
+    from os import initgroups as _c_initgroups
+except ImportError:
+    try:
+        # Python 2.6
+        from twisted.python._initgroups import initgroups as _c_initgroups
+    except ImportError:
+        _c_initgroups = None
+
+
+
+if pwd is None or grp is None or setgroups is None or getgroups is None:
+    def initgroups(uid, primaryGid):
+        """
+        Do nothing.
+
+        Underlying platform support require to manipulate groups is missing.
+        """
+else:
+    # Fallback to the inefficient Python version
+    def _setgroups_until_success(l):
+        while(1):
+            # NASTY NASTY HACK (but glibc does it so it must be okay):
+            # In case sysconfig didn't give the right answer, find the limit
+            # on max groups by just looping, trying to set fewer and fewer
+            # groups each time until it succeeds.
+            try:
+                setgroups(l)
+            except ValueError:
+                # This exception comes from python itself restricting
+                # number of groups allowed.
+                if len(l) > 1:
+                    del l[-1]
+                else:
+                    raise
+            except OSError, e:
+                if e.errno == errno.EINVAL and len(l) > 1:
+                    # This comes from the OS saying too many groups
+                    del l[-1]
+                else:
+                    raise
+            else:
+                # Success, yay!
+                return
+
+    def initgroups(uid, primaryGid):
+        """
+        Initializes the group access list.
+
+        If the C extension is present, we're calling it, which in turn calls
+        initgroups(3).
+
+        If not, this is done by reading the group database /etc/group and using
+        all groups of which C{uid} is a member.  The additional group
+        C{primaryGid} is also added to the list.
+
+        If the given user is a member of more than C{NGROUPS}, arbitrary
+        groups will be silently discarded to bring the number below that
+        limit.
+
+        @type uid: C{int}
+        @param uid: The UID for which to look up group information.
+
+        @type primaryGid: C{int} or C{NoneType}
+        @param primaryGid: If provided, an additional GID to include when
+            setting the groups.
+        """
+        if _c_initgroups is not None:
+            return _c_initgroups(pwd.getpwuid(uid)[0], primaryGid)
+        try:
+            # Try to get the maximum number of groups
+            max_groups = os.sysconf("SC_NGROUPS_MAX")
+        except:
+            # No predefined limit
+            max_groups = 0
+
+        username = pwd.getpwuid(uid)[0]
+        l = []
+        if primaryGid is not None:
+            l.append(primaryGid)
+        for groupname, password, gid, userlist in grp.getgrall():
+            if username in userlist:
+                l.append(gid)
+                if len(l) == max_groups:
+                    break # No more groups, ignore any more
+        try:
+            _setgroups_until_success(l)
+        except OSError, e:
+            # We might be able to remove this code now that we
+            # don't try to setgid/setuid even when not asked to.
+            if e.errno == errno.EPERM:
+                for g in getgroups():
+                    if g not in l:
+                        raise
+            else:
+                raise
+
+
+
+def switchUID(uid, gid, euid=False):
+    """
+    Attempts to switch the uid/euid and gid/egid for the current process.
+
+    If C{uid} is the same value as L{os.getuid} (or L{os.geteuid}),
+    this function will issue a L{UserWarning} and not raise an exception.
+
+    @type uid: C{int} or C{NoneType}
+    @param uid: the UID (or EUID) to switch the current process to. This
+                parameter will be ignored if the value is C{None}.
+
+    @type gid: C{int} or C{NoneType}
+    @param gid: the GID (or EGID) to switch the current process to. This
+                parameter will be ignored if the value is C{None}.
+
+    @type euid: C{bool}
+    @param euid: if True, set only effective user-id rather than real user-id.
+                 (This option has no effect unless the process is running
+                 as root, in which case it means not to shed all
+                 privileges, retaining the option to regain privileges
+                 in cases such as spawning processes. Use with caution.)
+    """
+    if euid:
+        setuid = os.seteuid
+        setgid = os.setegid
+        getuid = os.geteuid
+    else:
+        setuid = os.setuid
+        setgid = os.setgid
+        getuid = os.getuid
+    if gid is not None:
+        setgid(gid)
+    if uid is not None:
+        if uid == getuid():
+            uidText = (euid and "euid" or "uid")
+            actionText = "tried to drop privileges and set%s %s" % (uidText, uid)
+            problemText = "%s is already %s" % (uidText, getuid())
+            warnings.warn("%s but %s; should we be root? Continuing."
+                          % (actionText, problemText))
+        else:
+            initgroups(uid, gid)
+            setuid(uid)
+
+
+class SubclassableCStringIO(object):
+    """
+    A wrapper around cStringIO to allow for subclassing.
+    """
+    __csio = None
+
+    def __init__(self, *a, **kw):
+        from cStringIO import StringIO
+        self.__csio = StringIO(*a, **kw)
+
+    def __iter__(self):
+        return self.__csio.__iter__()
+
+    def next(self):
+        return self.__csio.next()
+
+    def close(self):
+        return self.__csio.close()
+
+    def isatty(self):
+        return self.__csio.isatty()
+
+    def seek(self, pos, mode=0):
+        return self.__csio.seek(pos, mode)
+
+    def tell(self):
+        return self.__csio.tell()
+
+    def read(self, n=-1):
+        return self.__csio.read(n)
+
+    def readline(self, length=None):
+        return self.__csio.readline(length)
+
+    def readlines(self, sizehint=0):
+        return self.__csio.readlines(sizehint)
+
+    def truncate(self, size=None):
+        return self.__csio.truncate(size)
+
+    def write(self, s):
+        return self.__csio.write(s)
+
+    def writelines(self, list):
+        return self.__csio.writelines(list)
+
+    def flush(self):
+        return self.__csio.flush()
+
+    def getvalue(self):
+        return self.__csio.getvalue()
+
+
+
+def mergeFunctionMetadata(f, g):
+    """
+    Overwrite C{g}'s name and docstring with values from C{f}.  Update
+    C{g}'s instance dictionary with C{f}'s.
+
+    To use this function safely you must use the return value. In Python 2.3,
+    L{mergeFunctionMetadata} will create a new function. In later versions of
+    Python, C{g} will be mutated and returned.
+
+    @return: A function that has C{g}'s behavior and metadata merged from
+        C{f}.
+    """
+    try:
+        g.__name__ = f.__name__
+    except TypeError:
+        try:
+            merged = types.FunctionType(
+                g.func_code, g.func_globals,
+                f.__name__, inspect.getargspec(g)[-1],
+                g.func_closure)
+        except TypeError:
+            pass
+    else:
+        merged = g
+    try:
+        merged.__doc__ = f.__doc__
+    except (TypeError, AttributeError):
+        pass
+    try:
+        merged.__dict__.update(g.__dict__)
+        merged.__dict__.update(f.__dict__)
+    except (TypeError, AttributeError):
+        pass
+    merged.__module__ = f.__module__
+    return merged
+
+
+
+def uidFromString(uidString):
+    """
+    Convert a user identifier, as a string, into an integer UID.
+
+    @type uid: C{str}
+    @param uid: A string giving the base-ten representation of a UID or the
+        name of a user which can be converted to a UID via L{pwd.getpwnam}.
+
+    @rtype: C{int}
+    @return: The integer UID corresponding to the given string.
+
+    @raise ValueError: If the user name is supplied and L{pwd} is not
+        available.
+    """
+    try:
+        return int(uidString)
+    except ValueError:
+        if pwd is None:
+            raise
+        return pwd.getpwnam(uidString)[2]
+
+
+
+def gidFromString(gidString):
+    """
+    Convert a group identifier, as a string, into an integer GID.
+
+    @type uid: C{str}
+    @param uid: A string giving the base-ten representation of a GID or the
+        name of a group which can be converted to a GID via L{grp.getgrnam}.
+
+    @rtype: C{int}
+    @return: The integer GID corresponding to the given string.
+
+    @raise ValueError: If the group name is supplied and L{grp} is not
+        available.
+    """
+    try:
+        return int(gidString)
+    except ValueError:
+        if grp is None:
+            raise
+        return grp.getgrnam(gidString)[2]
+
+
+
+def runAsEffectiveUser(euid, egid, function, *args, **kwargs):
+    """
+    Run the given function wrapped with seteuid/setegid calls.
+
+    This will try to minimize the number of seteuid/setegid calls, comparing
+    current and wanted permissions
+
+    @param euid: effective UID used to call the function.
+    @type euid: C{int}
+
+    @type egid: effective GID used to call the function.
+    @param egid: C{int}
+
+    @param function: the function run with the specific permission.
+    @type function: any callable
+
+    @param *args: arguments passed to C{function}
+    @param **kwargs: keyword arguments passed to C{function}
+    """
+    uid, gid = os.geteuid(), os.getegid()
+    if uid == euid and gid == egid:
+        return function(*args, **kwargs)
+    else:
+        if uid != 0 and (uid != euid or gid != egid):
+            os.seteuid(0)
+        if gid != egid:
+            os.setegid(egid)
+        if euid != 0 and (euid != uid or gid != egid):
+            os.seteuid(euid)
+        try:
+            return function(*args, **kwargs)
+        finally:
+            if euid != 0 and (uid != euid or gid != egid):
+                os.seteuid(0)
+            if gid != egid:
+                os.setegid(gid)
+            if uid != 0 and (uid != euid or gid != egid):
+                os.seteuid(uid)
+
+
+
+__all__ = [
+    "uniquify", "padTo", "getPluginDirs", "addPluginDir", "sibpath",
+    "getPassword", "println", "makeStatBar", "OrderedDict",
+    "InsensitiveDict", "spewer", "searchupwards", "LineLog",
+    "raises", "IntervalDifferential", "FancyStrMixin", "FancyEqMixin",
+    "switchUID", "SubclassableCStringIO", "unsignedID", "mergeFunctionMetadata",
+    "nameToLabel", "uidFromString", "gidFromString", "runAsEffectiveUser",
+    "untilConcludes",
+    "runWithWarningsSuppressed",
+    ]
diff --git a/ThirdParty/Twisted/twisted/python/versions.py b/ThirdParty/Twisted/twisted/python/versions.py
new file mode 100644
index 0000000..91e914f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/versions.py
@@ -0,0 +1,258 @@
+# -*- test-case-name: twisted.python.test.test_versions -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Versions for Python packages.
+
+See L{Version}.
+"""
+
+from __future__ import division, absolute_import
+
+import sys, os
+
+from twisted.python.compat import cmp, comparable, nativeString
+
+ at comparable
+class _inf(object):
+    """
+    An object that is bigger than all other objects.
+    """
+    def __cmp__(self, other):
+        """
+        @param other: Another object.
+        @type other: any
+
+        @return: 0 if other is inf, 1 otherwise.
+        @rtype: C{int}
+        """
+        if other is _inf:
+            return 0
+        return 1
+
+_inf = _inf()
+
+
+
+class IncomparableVersions(TypeError):
+    """
+    Two versions could not be compared.
+    """
+
+
+
+ at comparable
+class Version(object):
+    """
+    An object that represents a three-part version number.
+
+    If running from an svn checkout, include the revision number in
+    the version string.
+    """
+    def __init__(self, package, major, minor, micro, prerelease=None):
+        """
+        @param package: Name of the package that this is a version of.
+        @type package: C{str}
+        @param major: The major version number.
+        @type major: C{int}
+        @param minor: The minor version number.
+        @type minor: C{int}
+        @param micro: The micro version number.
+        @type micro: C{int}
+        @param prerelease: The prerelease number.
+        @type prerelease: C{int}
+        """
+        self.package = package
+        self.major = major
+        self.minor = minor
+        self.micro = micro
+        self.prerelease = prerelease
+
+
+    def short(self):
+        """
+        Return a string in canonical short version format,
+        <major>.<minor>.<micro>[+rSVNVer].
+        """
+        s = self.base()
+        svnver = self._getSVNVersion()
+        if svnver:
+            s += '+r' + nativeString(svnver)
+        return s
+
+
+    def base(self):
+        """
+        Like L{short}, but without the +rSVNVer.
+        """
+        if self.prerelease is None:
+            pre = ""
+        else:
+            pre = "pre%s" % (self.prerelease,)
+        return '%d.%d.%d%s' % (self.major,
+                               self.minor,
+                               self.micro,
+                               pre)
+
+
+    def __repr__(self):
+        svnver = self._formatSVNVersion()
+        if svnver:
+            svnver = '  #' + svnver
+        if self.prerelease is None:
+            prerelease = ""
+        else:
+            prerelease = ", prerelease=%r" % (self.prerelease,)
+        return '%s(%r, %d, %d, %d%s)%s' % (
+            self.__class__.__name__,
+            self.package,
+            self.major,
+            self.minor,
+            self.micro,
+            prerelease,
+            svnver)
+
+
+    def __str__(self):
+        return '[%s, version %s]' % (
+            self.package,
+            self.short())
+
+
+    def __cmp__(self, other):
+        """
+        Compare two versions, considering major versions, minor versions, micro
+        versions, then prereleases.
+
+        A version with a prerelease is always less than a version without a
+        prerelease. If both versions have prereleases, they will be included in
+        the comparison.
+
+        @param other: Another version.
+        @type other: L{Version}
+
+        @return: NotImplemented when the other object is not a Version, or one
+            of -1, 0, or 1.
+
+        @raise IncomparableVersions: when the package names of the versions
+            differ.
+        """
+        if not isinstance(other, self.__class__):
+            return NotImplemented
+        if self.package != other.package:
+            raise IncomparableVersions("%r != %r"
+                                       % (self.package, other.package))
+
+        if self.prerelease is None:
+            prerelease = _inf
+        else:
+            prerelease = self.prerelease
+
+        if other.prerelease is None:
+            otherpre = _inf
+        else:
+            otherpre = other.prerelease
+
+        x = cmp((self.major,
+                    self.minor,
+                    self.micro,
+                    prerelease),
+                   (other.major,
+                    other.minor,
+                    other.micro,
+                    otherpre))
+        return x
+
+
+    def _parseSVNEntries_4(self, entriesFile):
+        """
+        Given a readable file object which represents a .svn/entries file in
+        format version 4, return the revision as a string.  We do this by
+        reading first XML element in the document that has a 'revision'
+        attribute.
+        """
+        from xml.dom.minidom import parse
+        doc = parse(entriesFile).documentElement
+        for node in doc.childNodes:
+            if hasattr(node, 'getAttribute'):
+                rev = node.getAttribute('revision')
+                if rev is not None:
+                    return rev.encode('ascii')
+
+
+    def _parseSVNEntries_8(self, entriesFile):
+        """
+        Given a readable file object which represents a .svn/entries file in
+        format version 8, return the revision as a string.
+        """
+        entriesFile.readline()
+        entriesFile.readline()
+        entriesFile.readline()
+        return entriesFile.readline().strip()
+
+
+    # Add handlers for version 9 and 10 formats, which are the same as
+    # version 8 as far as revision information is concerned.
+    _parseSVNEntries_9 = _parseSVNEntries_8
+    _parseSVNEntriesTenPlus = _parseSVNEntries_8
+
+
+    def _getSVNVersion(self):
+        """
+        Figure out the SVN revision number based on the existance of
+        <package>/.svn/entries, and its contents. This requires discovering the
+        format version from the 'format' file and parsing the entries file
+        accordingly.
+
+        @return: None or string containing SVN Revision number.
+        """
+        mod = sys.modules.get(self.package)
+        if mod:
+            svn = os.path.join(os.path.dirname(mod.__file__), '.svn')
+            if not os.path.exists(svn):
+                # It's not an svn working copy
+                return None
+
+            formatFile = os.path.join(svn, 'format')
+            if os.path.exists(formatFile):
+                # It looks like a less-than-version-10 working copy.
+                with open(formatFile, 'rb') as fObj:
+                    format = fObj.read().strip()
+                parser = getattr(self, '_parseSVNEntries_' + format.decode('ascii'), None)
+            else:
+                # It looks like a version-10-or-greater working copy, which
+                # has version information in the entries file.
+                parser = self._parseSVNEntriesTenPlus
+
+            if parser is None:
+                return b'Unknown'
+
+            entriesFile = os.path.join(svn, 'entries')
+            entries = open(entriesFile, 'rb')
+            try:
+                try:
+                    return parser(entries)
+                finally:
+                    entries.close()
+            except:
+                return b'Unknown'
+
+
+    def _formatSVNVersion(self):
+        ver = self._getSVNVersion()
+        if ver is None:
+            return ''
+        return ' (SVN r%s)' % (ver,)
+
+
+
+def getVersionString(version):
+    """
+    Get a friendly string for the given version object.
+
+    @param version: A L{Version} object.
+    @return: A string containing the package and short version number.
+    """
+    result = '%s %s' % (version.package, version.short())
+    return result
diff --git a/ThirdParty/Twisted/twisted/python/win32.py b/ThirdParty/Twisted/twisted/python/win32.py
new file mode 100644
index 0000000..1bc5b14
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/win32.py
@@ -0,0 +1,166 @@
+# -*- test-case-name: twisted.python.test.test_win32 -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Win32 utilities.
+
+See also twisted.python.shortcut.
+
+ at var O_BINARY: the 'binary' mode flag on Windows, or 0 on other platforms, so it
+    may safely be OR'ed into a mask for os.open.
+"""
+
+from __future__ import division, absolute_import
+
+import re
+import os
+
+try:
+    import win32api
+    import win32con
+except ImportError:
+    pass
+
+from twisted.python.runtime import platform
+
+# http://msdn.microsoft.com/library/default.asp?url=/library/en-us/debug/base/system_error_codes.asp
+ERROR_FILE_NOT_FOUND = 2
+ERROR_PATH_NOT_FOUND = 3
+ERROR_INVALID_NAME = 123
+ERROR_DIRECTORY = 267
+
+O_BINARY = getattr(os, "O_BINARY", 0)
+
+class FakeWindowsError(OSError):
+    """
+    Stand-in for sometimes-builtin exception on platforms for which it
+    is missing.
+    """
+
+try:
+    WindowsError = WindowsError
+except NameError:
+    WindowsError = FakeWindowsError
+
+# XXX fix this to use python's builtin _winreg?
+
+def getProgramsMenuPath():
+    """Get the path to the Programs menu.
+
+    Probably will break on non-US Windows.
+
+    @returns: the filesystem location of the common Start Menu->Programs.
+    """
+    if not platform.isWinNT():
+        return "C:\\Windows\\Start Menu\\Programs"
+    keyname = 'SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\Explorer\\Shell Folders'
+    hShellFolders = win32api.RegOpenKeyEx(win32con.HKEY_LOCAL_MACHINE,
+                                          keyname, 0, win32con.KEY_READ)
+    return win32api.RegQueryValueEx(hShellFolders, 'Common Programs')[0]
+
+
+def getProgramFilesPath():
+    """Get the path to the Program Files folder."""
+    keyname = 'SOFTWARE\\Microsoft\\Windows\\CurrentVersion'
+    currentV = win32api.RegOpenKeyEx(win32con.HKEY_LOCAL_MACHINE,
+                                     keyname, 0, win32con.KEY_READ)
+    return win32api.RegQueryValueEx(currentV, 'ProgramFilesDir')[0]
+
+_cmdLineQuoteRe = re.compile(r'(\\*)"')
+_cmdLineQuoteRe2 = re.compile(r'(\\+)\Z')
+def cmdLineQuote(s):
+    """
+    Internal method for quoting a single command-line argument.
+
+    @param s: an unquoted string that you want to quote so that something that
+        does cmd.exe-style unquoting will interpret it as a single argument,
+        even if it contains spaces.
+    @type s: C{str}
+
+    @return: a quoted string.
+    @rtype: C{str}
+    """
+    quote = ((" " in s) or ("\t" in s) or ('"' in s) or s == '') and '"' or ''
+    return quote + _cmdLineQuoteRe2.sub(r"\1\1", _cmdLineQuoteRe.sub(r'\1\1\\"', s)) + quote
+
+def quoteArguments(arguments):
+    """
+    Quote an iterable of command-line arguments for passing to CreateProcess or
+    a similar API.  This allows the list passed to C{reactor.spawnProcess} to
+    match the child process's C{sys.argv} properly.
+
+    @param arglist: an iterable of C{str}, each unquoted.
+
+    @return: a single string, with the given sequence quoted as necessary.
+    """
+    return ' '.join([cmdLineQuote(a) for a in arguments])
+
+
+class _ErrorFormatter(object):
+    """
+    Formatter for Windows error messages.
+
+    @ivar winError: A callable which takes one integer error number argument
+        and returns an L{exceptions.WindowsError} instance for that error (like
+        L{ctypes.WinError}).
+
+    @ivar formatMessage: A callable which takes one integer error number
+        argument and returns a C{str} giving the message for that error (like
+        L{win32api.FormatMessage}).
+
+    @ivar errorTab: A mapping from integer error numbers to C{str} messages
+        which correspond to those erorrs (like L{socket.errorTab}).
+    """
+    def __init__(self, WinError, FormatMessage, errorTab):
+        self.winError = WinError
+        self.formatMessage = FormatMessage
+        self.errorTab = errorTab
+
+    def fromEnvironment(cls):
+        """
+        Get as many of the platform-specific error translation objects as
+        possible and return an instance of C{cls} created with them.
+        """
+        try:
+            from ctypes import WinError
+        except ImportError:
+            WinError = None
+        try:
+            from win32api import FormatMessage
+        except ImportError:
+            FormatMessage = None
+        try:
+            from socket import errorTab
+        except ImportError:
+            errorTab = None
+        return cls(WinError, FormatMessage, errorTab)
+    fromEnvironment = classmethod(fromEnvironment)
+
+
+    def formatError(self, errorcode):
+        """
+        Returns the string associated with a Windows error message, such as the
+        ones found in socket.error.
+
+        Attempts direct lookup against the win32 API via ctypes and then
+        pywin32 if available), then in the error table in the socket module,
+        then finally defaulting to C{os.strerror}.
+
+        @param errorcode: the Windows error code
+        @type errorcode: C{int}
+
+        @return: The error message string
+        @rtype: C{str}
+        """
+        if self.winError is not None:
+            return self.winError(errorcode).strerror
+        if self.formatMessage is not None:
+            return self.formatMessage(errorcode)
+        if self.errorTab is not None:
+            result = self.errorTab.get(errorcode)
+            if result is not None:
+                return result
+        return os.strerror(errorcode)
+
+formatError = _ErrorFormatter.fromEnvironment().formatError
diff --git a/ThirdParty/Twisted/twisted/python/zippath.py b/ThirdParty/Twisted/twisted/python/zippath.py
new file mode 100644
index 0000000..a82f253
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/zippath.py
@@ -0,0 +1,268 @@
+# -*- test-case-name: twisted.test.test_paths.ZipFilePathTestCase -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module contains implementations of IFilePath for zip files.
+
+See the constructor for ZipArchive for use.
+"""
+
+__metaclass__ = type
+
+import os
+import time
+import errno
+
+
+# Python 2.6 includes support for incremental unzipping of zipfiles, and
+# thus obviates the need for ChunkingZipFile.
+import sys
+if sys.version_info[:2] >= (2, 6):
+    _USE_ZIPFILE = True
+    from zipfile import ZipFile
+else:
+    _USE_ZIPFILE = False
+    from twisted.python.zipstream import ChunkingZipFile
+
+from twisted.python.filepath import IFilePath, FilePath, AbstractFilePath
+
+from zope.interface import implements
+
+# using FilePath here exclusively rather than os to make sure that we don't do
+# anything OS-path-specific here.
+
+ZIP_PATH_SEP = '/'              # In zipfiles, "/" is universally used as the
+                                # path separator, regardless of platform.
+
+
+class ZipPath(AbstractFilePath):
+    """
+    I represent a file or directory contained within a zip file.
+    """
+
+    implements(IFilePath)
+
+    sep = ZIP_PATH_SEP
+
+    def __init__(self, archive, pathInArchive):
+        """
+        Don't construct me directly.  Use ZipArchive.child().
+
+        @param archive: a ZipArchive instance.
+
+        @param pathInArchive: a ZIP_PATH_SEP-separated string.
+        """
+        self.archive = archive
+        self.pathInArchive = pathInArchive
+        # self.path pretends to be os-specific because that's the way the
+        # 'zipimport' module does it.
+        self.path = os.path.join(archive.zipfile.filename,
+                                 *(self.pathInArchive.split(ZIP_PATH_SEP)))
+
+    def __cmp__(self, other):
+        if not isinstance(other, ZipPath):
+            return NotImplemented
+        return cmp((self.archive, self.pathInArchive),
+                   (other.archive, other.pathInArchive))
+
+
+    def __repr__(self):
+        parts = [os.path.abspath(self.archive.path)]
+        parts.extend(self.pathInArchive.split(ZIP_PATH_SEP))
+        path = os.sep.join(parts)
+        return "ZipPath('%s')" % (path.encode('string-escape'),)
+
+
+    def parent(self):
+        splitup = self.pathInArchive.split(ZIP_PATH_SEP)
+        if len(splitup) == 1:
+            return self.archive
+        return ZipPath(self.archive, ZIP_PATH_SEP.join(splitup[:-1]))
+
+
+    def child(self, path):
+        """
+        Return a new ZipPath representing a path in C{self.archive} which is
+        a child of this path.
+
+        @note: Requesting the C{".."} (or other special name) child will not
+            cause L{InsecurePath} to be raised since these names do not have
+            any special meaning inside a zip archive.  Be particularly
+            careful with the C{path} attribute (if you absolutely must use
+            it) as this means it may include special names with special
+            meaning outside of the context of a zip archive.
+        """
+        return ZipPath(self.archive, ZIP_PATH_SEP.join([self.pathInArchive, path]))
+
+
+    def sibling(self, path):
+        return self.parent().child(path)
+
+    # preauthChild = child
+
+    def exists(self):
+        return self.isdir() or self.isfile()
+
+    def isdir(self):
+        return self.pathInArchive in self.archive.childmap
+
+    def isfile(self):
+        return self.pathInArchive in self.archive.zipfile.NameToInfo
+
+    def islink(self):
+        return False
+
+    def listdir(self):
+        if self.exists():
+            if self.isdir():
+                return self.archive.childmap[self.pathInArchive].keys()
+            else:
+                raise OSError(errno.ENOTDIR, "Leaf zip entry listed")
+        else:
+            raise OSError(errno.ENOENT, "Non-existent zip entry listed")
+
+
+    def splitext(self):
+        """
+        Return a value similar to that returned by os.path.splitext.
+        """
+        # This happens to work out because of the fact that we use OS-specific
+        # path separators in the constructor to construct our fake 'path'
+        # attribute.
+        return os.path.splitext(self.path)
+
+
+    def basename(self):
+        return self.pathInArchive.split(ZIP_PATH_SEP)[-1]
+
+    def dirname(self):
+        # XXX NOTE: This API isn't a very good idea on filepath, but it's even
+        # less meaningful here.
+        return self.parent().path
+
+    def open(self, mode="r"):
+        if _USE_ZIPFILE:
+            return self.archive.zipfile.open(self.pathInArchive, mode=mode)
+        else:
+            # XXX oh man, is this too much hax?
+            self.archive.zipfile.mode = mode
+            return self.archive.zipfile.readfile(self.pathInArchive)
+
+    def changed(self):
+        pass
+
+    def getsize(self):
+        """
+        Retrieve this file's size.
+
+        @return: file size, in bytes
+        """
+
+        return self.archive.zipfile.NameToInfo[self.pathInArchive].file_size
+
+    def getAccessTime(self):
+        """
+        Retrieve this file's last access-time.  This is the same as the last access
+        time for the archive.
+
+        @return: a number of seconds since the epoch
+        """
+        return self.archive.getAccessTime()
+
+
+    def getModificationTime(self):
+        """
+        Retrieve this file's last modification time.  This is the time of
+        modification recorded in the zipfile.
+
+        @return: a number of seconds since the epoch.
+        """
+        return time.mktime(
+            self.archive.zipfile.NameToInfo[self.pathInArchive].date_time
+            + (0, 0, 0))
+
+
+    def getStatusChangeTime(self):
+        """
+        Retrieve this file's last modification time.  This name is provided for
+        compatibility, and returns the same value as getmtime.
+
+        @return: a number of seconds since the epoch.
+        """
+        return self.getModificationTime()
+
+
+
+class ZipArchive(ZipPath):
+    """ I am a FilePath-like object which can wrap a zip archive as if it were a
+    directory.
+    """
+    archive = property(lambda self: self)
+    def __init__(self, archivePathname):
+        """Create a ZipArchive, treating the archive at archivePathname as a zip file.
+
+        @param archivePathname: a str, naming a path in the filesystem.
+        """
+        if _USE_ZIPFILE:
+            self.zipfile = ZipFile(archivePathname)
+        else:
+            self.zipfile = ChunkingZipFile(archivePathname)
+        self.path = archivePathname
+        self.pathInArchive = ''
+        # zipfile is already wasting O(N) memory on cached ZipInfo instances,
+        # so there's no sense in trying to do this lazily or intelligently
+        self.childmap = {}      # map parent: list of children
+
+        for name in self.zipfile.namelist():
+            name = name.split(ZIP_PATH_SEP)
+            for x in range(len(name)):
+                child = name[-x]
+                parent = ZIP_PATH_SEP.join(name[:-x])
+                if parent not in self.childmap:
+                    self.childmap[parent] = {}
+                self.childmap[parent][child] = 1
+            parent = ''
+
+    def child(self, path):
+        """
+        Create a ZipPath pointing at a path within the archive.
+
+        @param path: a str with no path separators in it, either '/' or the
+        system path separator, if it's different.
+        """
+        return ZipPath(self, path)
+
+    def exists(self):
+        """
+        Returns true if the underlying archive exists.
+        """
+        return FilePath(self.zipfile.filename).exists()
+
+
+    def getAccessTime(self):
+        """
+        Return the archive file's last access time.
+        """
+        return FilePath(self.zipfile.filename).getAccessTime()
+
+
+    def getModificationTime(self):
+        """
+        Return the archive file's modification time.
+        """
+        return FilePath(self.zipfile.filename).getModificationTime()
+
+
+    def getStatusChangeTime(self):
+        """
+        Return the archive file's status change time.
+        """
+        return FilePath(self.zipfile.filename).getStatusChangeTime()
+
+
+    def __repr__(self):
+        return 'ZipArchive(%r)' % (os.path.abspath(self.path),)
+
+
+__all__ = ['ZipArchive', 'ZipPath']
diff --git a/ThirdParty/Twisted/twisted/python/zipstream.py b/ThirdParty/Twisted/twisted/python/zipstream.py
new file mode 100644
index 0000000..7ce8c12
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/zipstream.py
@@ -0,0 +1,319 @@
+# -*- test-case-name: twisted.python.test.test_zipstream -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+An incremental approach to unzipping files.  This allows you to unzip a little
+bit of a file at a time, which means you can report progress as a file unzips.
+"""
+
+import zipfile
+import os.path
+import zlib
+import struct
+
+
+_fileHeaderSize = struct.calcsize(zipfile.structFileHeader)
+
+class ChunkingZipFile(zipfile.ZipFile):
+    """
+    A C{ZipFile} object which, with L{readfile}, also gives you access to a
+    file-like object for each entry.
+    """
+
+    def readfile(self, name):
+        """
+        Return file-like object for name.
+        """
+        if self.mode not in ("r", "a"):
+            raise RuntimeError('read() requires mode "r" or "a"')
+        if not self.fp:
+            raise RuntimeError(
+                "Attempt to read ZIP archive that was already closed")
+        zinfo = self.getinfo(name)
+
+        self.fp.seek(zinfo.header_offset, 0)
+
+        fheader = self.fp.read(_fileHeaderSize)
+        if fheader[0:4] != zipfile.stringFileHeader:
+            raise zipfile.BadZipfile("Bad magic number for file header")
+
+        fheader = struct.unpack(zipfile.structFileHeader, fheader)
+        fname = self.fp.read(fheader[zipfile._FH_FILENAME_LENGTH])
+
+        if fheader[zipfile._FH_EXTRA_FIELD_LENGTH]:
+            self.fp.read(fheader[zipfile._FH_EXTRA_FIELD_LENGTH])
+
+        if fname != zinfo.orig_filename:
+            raise zipfile.BadZipfile(
+                'File name in directory "%s" and header "%s" differ.' % (
+                    zinfo.orig_filename, fname))
+
+        if zinfo.compress_type == zipfile.ZIP_STORED:
+            return ZipFileEntry(self, zinfo.compress_size)
+        elif zinfo.compress_type == zipfile.ZIP_DEFLATED:
+            return DeflatedZipFileEntry(self, zinfo.compress_size)
+        else:
+            raise zipfile.BadZipfile(
+                "Unsupported compression method %d for file %s" %
+                    (zinfo.compress_type, name))
+
+
+
+class _FileEntry(object):
+    """
+    Abstract superclass of both compressed and uncompressed variants of
+    file-like objects within a zip archive.
+
+    @ivar chunkingZipFile: a chunking zip file.
+    @type chunkingZipFile: L{ChunkingZipFile}
+
+    @ivar length: The number of bytes within the zip file that represent this
+    file.  (This is the size on disk, not the number of decompressed bytes
+    which will result from reading it.)
+
+    @ivar fp: the underlying file object (that contains pkzip data).  Do not
+    touch this, please.  It will quite likely move or go away.
+
+    @ivar closed: File-like 'closed' attribute; True before this file has been
+    closed, False after.
+    @type closed: C{bool}
+
+    @ivar finished: An older, broken synonym for 'closed'.  Do not touch this,
+    please.
+    @type finished: C{int}
+    """
+    def __init__(self, chunkingZipFile, length):
+        """
+        Create a L{_FileEntry} from a L{ChunkingZipFile}.
+        """
+        self.chunkingZipFile = chunkingZipFile
+        self.fp = self.chunkingZipFile.fp
+        self.length = length
+        self.finished = 0
+        self.closed = False
+
+
+    def isatty(self):
+        """
+        Returns false because zip files should not be ttys
+        """
+        return False
+
+
+    def close(self):
+        """
+        Close self (file-like object)
+        """
+        self.closed = True
+        self.finished = 1
+        del self.fp
+
+
+    def readline(self):
+        """
+        Read a line.
+        """
+        bytes = ""
+        for byte in iter(lambda : self.read(1), ""):
+            bytes += byte
+            if byte == "\n":
+                break
+        return bytes
+
+
+    def next(self):
+        """
+        Implement next as file does (like readline, except raises StopIteration
+        at EOF)
+        """
+        nextline = self.readline()
+        if nextline:
+            return nextline
+        raise StopIteration()
+
+
+    def readlines(self):
+        """
+        Returns a list of all the lines
+        """
+        return list(self)
+
+
+    def xreadlines(self):
+        """
+        Returns an iterator (so self)
+        """
+        return self
+
+
+    def __iter__(self):
+        """
+        Returns an iterator (so self)
+        """
+        return self
+
+
+
+class ZipFileEntry(_FileEntry):
+    """
+    File-like object used to read an uncompressed entry in a ZipFile
+    """
+
+    def __init__(self, chunkingZipFile, length):
+        _FileEntry.__init__(self, chunkingZipFile, length)
+        self.readBytes = 0
+
+
+    def tell(self):
+        return self.readBytes
+
+
+    def read(self, n=None):
+        if n is None:
+            n = self.length - self.readBytes
+        if n == 0 or self.finished:
+            return ''
+        data = self.chunkingZipFile.fp.read(
+            min(n, self.length - self.readBytes))
+        self.readBytes += len(data)
+        if self.readBytes == self.length or len(data) <  n:
+            self.finished = 1
+        return data
+
+
+
+class DeflatedZipFileEntry(_FileEntry):
+    """
+    File-like object used to read a deflated entry in a ZipFile
+    """
+
+    def __init__(self, chunkingZipFile, length):
+        _FileEntry.__init__(self, chunkingZipFile, length)
+        self.returnedBytes = 0
+        self.readBytes = 0
+        self.decomp = zlib.decompressobj(-15)
+        self.buffer = ""
+
+
+    def tell(self):
+        return self.returnedBytes
+
+
+    def read(self, n=None):
+        if self.finished:
+            return ""
+        if n is None:
+            result = [self.buffer,]
+            result.append(
+                self.decomp.decompress(
+                    self.chunkingZipFile.fp.read(
+                        self.length - self.readBytes)))
+            result.append(self.decomp.decompress("Z"))
+            result.append(self.decomp.flush())
+            self.buffer = ""
+            self.finished = 1
+            result = "".join(result)
+            self.returnedBytes += len(result)
+            return result
+        else:
+            while len(self.buffer) < n:
+                data = self.chunkingZipFile.fp.read(
+                    min(n, 1024, self.length - self.readBytes))
+                self.readBytes += len(data)
+                if not data:
+                    result = (self.buffer
+                              + self.decomp.decompress("Z")
+                              + self.decomp.flush())
+                    self.finished = 1
+                    self.buffer = ""
+                    self.returnedBytes += len(result)
+                    return result
+                else:
+                    self.buffer += self.decomp.decompress(data)
+            result = self.buffer[:n]
+            self.buffer = self.buffer[n:]
+            self.returnedBytes += len(result)
+            return result
+
+
+
+DIR_BIT = 16
+
+
+def countZipFileChunks(filename, chunksize):
+    """
+    Predict the number of chunks that will be extracted from the entire
+    zipfile, given chunksize blocks.
+    """
+    totalchunks = 0
+    zf = ChunkingZipFile(filename)
+    for info in zf.infolist():
+        totalchunks += countFileChunks(info, chunksize)
+    return totalchunks
+
+
+def countFileChunks(zipinfo, chunksize):
+    """
+    Count the number of chunks that will result from the given C{ZipInfo}.
+
+    @param zipinfo: a C{zipfile.ZipInfo} instance describing an entry in a zip
+    archive to be counted.
+
+    @return: the number of chunks present in the zip file.  (Even an empty file
+    counts as one chunk.)
+    @rtype: C{int}
+    """
+    count, extra = divmod(zipinfo.file_size, chunksize)
+    if extra > 0:
+        count += 1
+    return count or 1
+
+
+
+def unzipIterChunky(filename, directory='.', overwrite=0,
+                    chunksize=4096):
+    """
+    Return a generator for the zipfile.  This implementation will yield after
+    every chunksize uncompressed bytes, or at the end of a file, whichever
+    comes first.
+
+    The value it yields is the number of chunks left to unzip.
+    """
+    czf = ChunkingZipFile(filename, 'r')
+    if not os.path.exists(directory):
+        os.makedirs(directory)
+    remaining = countZipFileChunks(filename, chunksize)
+    names = czf.namelist()
+    infos = czf.infolist()
+
+    for entry, info in zip(names, infos):
+        isdir = info.external_attr & DIR_BIT
+        f = os.path.join(directory, entry)
+        if isdir:
+            # overwrite flag only applies to files
+            if not os.path.exists(f):
+                os.makedirs(f)
+            remaining -= 1
+            yield remaining
+        else:
+            # create the directory the file will be in first,
+            # since we can't guarantee it exists
+            fdir = os.path.split(f)[0]
+            if not os.path.exists(fdir):
+                os.makedirs(fdir)
+            if overwrite or not os.path.exists(f):
+                outfile = file(f, 'wb')
+                fp = czf.readfile(entry)
+                if info.file_size == 0:
+                    remaining -= 1
+                    yield remaining
+                while fp.tell() < info.file_size:
+                    hunk = fp.read(chunksize)
+                    outfile.write(hunk)
+                    remaining -= 1
+                    yield remaining
+                outfile.close()
+            else:
+                remaining -= countFileChunks(info, chunksize)
+                yield remaining
diff --git a/ThirdParty/Twisted/twisted/python/zsh/README.txt b/ThirdParty/Twisted/twisted/python/zsh/README.txt
new file mode 100644
index 0000000..2a7b1c2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/zsh/README.txt
@@ -0,0 +1,9 @@
+THIS DIRECTORY AND ALL FILES INCLUDED ARE DEPRECATED.
+
+These are the old zsh completion functions for Twisted commands... they used
+to contain full completion functions, but now they've simply been replaced
+by the current "stub" code that delegates completion control to Twisted.
+
+This directory and included files need to remain for several years in order
+to provide backwards-compatibility with an old version of the Twisted
+stub function that was shipped with Zsh.
diff --git a/ThirdParty/Twisted/twisted/python/zsh/_cftp b/ThirdParty/Twisted/twisted/python/zsh/_cftp
new file mode 100644
index 0000000..e89fcdb
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/zsh/_cftp
@@ -0,0 +1,34 @@
+#compdef cftp
+# This file is deprecated. See README.
+
+# This is the ZSH completion file for Twisted commands. It calls the current
+# command-line with the special "--_shell-completion" option which is handled
+# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
+# handle the completions for this particular command-line.
+#
+# 3rd parties that wish to provide zsh completion for commands that
+# use t.p.usage may copy this file and change the first line to reference
+# the name(s) of their command(s).
+#
+# This file is included in the official Zsh distribution as
+# Completion/Unix/Command/_twisted
+
+# redirect stderr to /dev/null otherwise deprecation warnings may get puked
+# all over the user's terminal if completing options for mktap or other
+# deprecated commands. Redirect stderr to a file to debug errors.
+local cmd output
+cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
+output=$("$cmd[@]" 2>/dev/null)
+
+if [[ $output == "#compdef "* ]]; then
+    # Looks like we got a valid completion function - so eval it to produce
+    # the completion matches.
+    eval $output
+else
+    echo "\nCompletion error running command:" ${(qqq)cmd}
+    echo -n "If output below is unhelpful you may need to edit this file and "
+    echo    "redirect stderr to a file."
+    echo "Expected completion function, but instead got:" 
+    echo $output
+    return 1
+fi
diff --git a/ThirdParty/Twisted/twisted/python/zsh/_ckeygen b/ThirdParty/Twisted/twisted/python/zsh/_ckeygen
new file mode 100644
index 0000000..38050a0
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/zsh/_ckeygen
@@ -0,0 +1,34 @@
+#compdef ckeygen
+# This file is deprecated. See README.
+
+# This is the ZSH completion file for Twisted commands. It calls the current
+# command-line with the special "--_shell-completion" option which is handled
+# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
+# handle the completions for this particular command-line.
+#
+# 3rd parties that wish to provide zsh completion for commands that
+# use t.p.usage may copy this file and change the first line to reference
+# the name(s) of their command(s).
+#
+# This file is included in the official Zsh distribution as
+# Completion/Unix/Command/_twisted
+
+# redirect stderr to /dev/null otherwise deprecation warnings may get puked
+# all over the user's terminal if completing options for mktap or other
+# deprecated commands. Redirect stderr to a file to debug errors.
+local cmd output
+cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
+output=$("$cmd[@]" 2>/dev/null)
+
+if [[ $output == "#compdef "* ]]; then
+    # Looks like we got a valid completion function - so eval it to produce
+    # the completion matches.
+    eval $output
+else
+    echo "\nCompletion error running command:" ${(qqq)cmd}
+    echo -n "If output below is unhelpful you may need to edit this file and "
+    echo    "redirect stderr to a file."
+    echo "Expected completion function, but instead got:" 
+    echo $output
+    return 1
+fi
diff --git a/ThirdParty/Twisted/twisted/python/zsh/_conch b/ThirdParty/Twisted/twisted/python/zsh/_conch
new file mode 100644
index 0000000..e3ac3b6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/zsh/_conch
@@ -0,0 +1,34 @@
+#compdef conch
+# This file is deprecated. See README.
+
+# This is the ZSH completion file for Twisted commands. It calls the current
+# command-line with the special "--_shell-completion" option which is handled
+# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
+# handle the completions for this particular command-line.
+#
+# 3rd parties that wish to provide zsh completion for commands that
+# use t.p.usage may copy this file and change the first line to reference
+# the name(s) of their command(s).
+#
+# This file is included in the official Zsh distribution as
+# Completion/Unix/Command/_twisted
+
+# redirect stderr to /dev/null otherwise deprecation warnings may get puked
+# all over the user's terminal if completing options for mktap or other
+# deprecated commands. Redirect stderr to a file to debug errors.
+local cmd output
+cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
+output=$("$cmd[@]" 2>/dev/null)
+
+if [[ $output == "#compdef "* ]]; then
+    # Looks like we got a valid completion function - so eval it to produce
+    # the completion matches.
+    eval $output
+else
+    echo "\nCompletion error running command:" ${(qqq)cmd}
+    echo -n "If output below is unhelpful you may need to edit this file and "
+    echo    "redirect stderr to a file."
+    echo "Expected completion function, but instead got:" 
+    echo $output
+    return 1
+fi
diff --git a/ThirdParty/Twisted/twisted/python/zsh/_lore b/ThirdParty/Twisted/twisted/python/zsh/_lore
new file mode 100644
index 0000000..8b1c328
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/zsh/_lore
@@ -0,0 +1,34 @@
+#compdef lore
+# This file is deprecated. See README.
+
+# This is the ZSH completion file for Twisted commands. It calls the current
+# command-line with the special "--_shell-completion" option which is handled
+# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
+# handle the completions for this particular command-line.
+#
+# 3rd parties that wish to provide zsh completion for commands that
+# use t.p.usage may copy this file and change the first line to reference
+# the name(s) of their command(s).
+#
+# This file is included in the official Zsh distribution as
+# Completion/Unix/Command/_twisted
+
+# redirect stderr to /dev/null otherwise deprecation warnings may get puked
+# all over the user's terminal if completing options for mktap or other
+# deprecated commands. Redirect stderr to a file to debug errors.
+local cmd output
+cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
+output=$("$cmd[@]" 2>/dev/null)
+
+if [[ $output == "#compdef "* ]]; then
+    # Looks like we got a valid completion function - so eval it to produce
+    # the completion matches.
+    eval $output
+else
+    echo "\nCompletion error running command:" ${(qqq)cmd}
+    echo -n "If output below is unhelpful you may need to edit this file and "
+    echo    "redirect stderr to a file."
+    echo "Expected completion function, but instead got:" 
+    echo $output
+    return 1
+fi
diff --git a/ThirdParty/Twisted/twisted/python/zsh/_manhole b/ThirdParty/Twisted/twisted/python/zsh/_manhole
new file mode 100644
index 0000000..54ec99f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/zsh/_manhole
@@ -0,0 +1,34 @@
+#compdef manhole
+# This file is deprecated. See README.
+
+# This is the ZSH completion file for Twisted commands. It calls the current
+# command-line with the special "--_shell-completion" option which is handled
+# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
+# handle the completions for this particular command-line.
+#
+# 3rd parties that wish to provide zsh completion for commands that
+# use t.p.usage may copy this file and change the first line to reference
+# the name(s) of their command(s).
+#
+# This file is included in the official Zsh distribution as
+# Completion/Unix/Command/_twisted
+
+# redirect stderr to /dev/null otherwise deprecation warnings may get puked
+# all over the user's terminal if completing options for mktap or other
+# deprecated commands. Redirect stderr to a file to debug errors.
+local cmd output
+cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
+output=$("$cmd[@]" 2>/dev/null)
+
+if [[ $output == "#compdef "* ]]; then
+    # Looks like we got a valid completion function - so eval it to produce
+    # the completion matches.
+    eval $output
+else
+    echo "\nCompletion error running command:" ${(qqq)cmd}
+    echo -n "If output below is unhelpful you may need to edit this file and "
+    echo    "redirect stderr to a file."
+    echo "Expected completion function, but instead got:" 
+    echo $output
+    return 1
+fi
diff --git a/ThirdParty/Twisted/twisted/python/zsh/_mktap b/ThirdParty/Twisted/twisted/python/zsh/_mktap
new file mode 100644
index 0000000..2a08ea4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/zsh/_mktap
@@ -0,0 +1,34 @@
+#compdef mktap
+# This file is deprecated. See README.
+
+# This is the ZSH completion file for Twisted commands. It calls the current
+# command-line with the special "--_shell-completion" option which is handled
+# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
+# handle the completions for this particular command-line.
+#
+# 3rd parties that wish to provide zsh completion for commands that
+# use t.p.usage may copy this file and change the first line to reference
+# the name(s) of their command(s).
+#
+# This file is included in the official Zsh distribution as
+# Completion/Unix/Command/_twisted
+
+# redirect stderr to /dev/null otherwise deprecation warnings may get puked
+# all over the user's terminal if completing options for mktap or other
+# deprecated commands. Redirect stderr to a file to debug errors.
+local cmd output
+cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
+output=$("$cmd[@]" 2>/dev/null)
+
+if [[ $output == "#compdef "* ]]; then
+    # Looks like we got a valid completion function - so eval it to produce
+    # the completion matches.
+    eval $output
+else
+    echo "\nCompletion error running command:" ${(qqq)cmd}
+    echo -n "If output below is unhelpful you may need to edit this file and "
+    echo    "redirect stderr to a file."
+    echo "Expected completion function, but instead got:" 
+    echo $output
+    return 1
+fi
diff --git a/ThirdParty/Twisted/twisted/python/zsh/_pyhtmlizer b/ThirdParty/Twisted/twisted/python/zsh/_pyhtmlizer
new file mode 100644
index 0000000..2fd2d6d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/zsh/_pyhtmlizer
@@ -0,0 +1,34 @@
+#compdef pyhtmlizer
+# This file is deprecated. See README.
+
+# This is the ZSH completion file for Twisted commands. It calls the current
+# command-line with the special "--_shell-completion" option which is handled
+# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
+# handle the completions for this particular command-line.
+#
+# 3rd parties that wish to provide zsh completion for commands that
+# use t.p.usage may copy this file and change the first line to reference
+# the name(s) of their command(s).
+#
+# This file is included in the official Zsh distribution as
+# Completion/Unix/Command/_twisted
+
+# redirect stderr to /dev/null otherwise deprecation warnings may get puked
+# all over the user's terminal if completing options for mktap or other
+# deprecated commands. Redirect stderr to a file to debug errors.
+local cmd output
+cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
+output=$("$cmd[@]" 2>/dev/null)
+
+if [[ $output == "#compdef "* ]]; then
+    # Looks like we got a valid completion function - so eval it to produce
+    # the completion matches.
+    eval $output
+else
+    echo "\nCompletion error running command:" ${(qqq)cmd}
+    echo -n "If output below is unhelpful you may need to edit this file and "
+    echo    "redirect stderr to a file."
+    echo "Expected completion function, but instead got:" 
+    echo $output
+    return 1
+fi
diff --git a/ThirdParty/Twisted/twisted/python/zsh/_tap2deb b/ThirdParty/Twisted/twisted/python/zsh/_tap2deb
new file mode 100644
index 0000000..b4e0836
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/zsh/_tap2deb
@@ -0,0 +1,34 @@
+#compdef tap2deb
+# This file is deprecated. See README.
+
+# This is the ZSH completion file for Twisted commands. It calls the current
+# command-line with the special "--_shell-completion" option which is handled
+# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
+# handle the completions for this particular command-line.
+#
+# 3rd parties that wish to provide zsh completion for commands that
+# use t.p.usage may copy this file and change the first line to reference
+# the name(s) of their command(s).
+#
+# This file is included in the official Zsh distribution as
+# Completion/Unix/Command/_twisted
+
+# redirect stderr to /dev/null otherwise deprecation warnings may get puked
+# all over the user's terminal if completing options for mktap or other
+# deprecated commands. Redirect stderr to a file to debug errors.
+local cmd output
+cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
+output=$("$cmd[@]" 2>/dev/null)
+
+if [[ $output == "#compdef "* ]]; then
+    # Looks like we got a valid completion function - so eval it to produce
+    # the completion matches.
+    eval $output
+else
+    echo "\nCompletion error running command:" ${(qqq)cmd}
+    echo -n "If output below is unhelpful you may need to edit this file and "
+    echo    "redirect stderr to a file."
+    echo "Expected completion function, but instead got:" 
+    echo $output
+    return 1
+fi
diff --git a/ThirdParty/Twisted/twisted/python/zsh/_tap2rpm b/ThirdParty/Twisted/twisted/python/zsh/_tap2rpm
new file mode 100644
index 0000000..10a083f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/zsh/_tap2rpm
@@ -0,0 +1,34 @@
+#compdef tap2rpm
+# This file is deprecated. See README.
+
+# This is the ZSH completion file for Twisted commands. It calls the current
+# command-line with the special "--_shell-completion" option which is handled
+# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
+# handle the completions for this particular command-line.
+#
+# 3rd parties that wish to provide zsh completion for commands that
+# use t.p.usage may copy this file and change the first line to reference
+# the name(s) of their command(s).
+#
+# This file is included in the official Zsh distribution as
+# Completion/Unix/Command/_twisted
+
+# redirect stderr to /dev/null otherwise deprecation warnings may get puked
+# all over the user's terminal if completing options for mktap or other
+# deprecated commands. Redirect stderr to a file to debug errors.
+local cmd output
+cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
+output=$("$cmd[@]" 2>/dev/null)
+
+if [[ $output == "#compdef "* ]]; then
+    # Looks like we got a valid completion function - so eval it to produce
+    # the completion matches.
+    eval $output
+else
+    echo "\nCompletion error running command:" ${(qqq)cmd}
+    echo -n "If output below is unhelpful you may need to edit this file and "
+    echo    "redirect stderr to a file."
+    echo "Expected completion function, but instead got:" 
+    echo $output
+    return 1
+fi
diff --git a/ThirdParty/Twisted/twisted/python/zsh/_tapconvert b/ThirdParty/Twisted/twisted/python/zsh/_tapconvert
new file mode 100644
index 0000000..41a0e4d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/zsh/_tapconvert
@@ -0,0 +1,34 @@
+#compdef tapconvert
+# This file is deprecated. See README.
+
+# This is the ZSH completion file for Twisted commands. It calls the current
+# command-line with the special "--_shell-completion" option which is handled
+# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
+# handle the completions for this particular command-line.
+#
+# 3rd parties that wish to provide zsh completion for commands that
+# use t.p.usage may copy this file and change the first line to reference
+# the name(s) of their command(s).
+#
+# This file is included in the official Zsh distribution as
+# Completion/Unix/Command/_twisted
+
+# redirect stderr to /dev/null otherwise deprecation warnings may get puked
+# all over the user's terminal if completing options for mktap or other
+# deprecated commands. Redirect stderr to a file to debug errors.
+local cmd output
+cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
+output=$("$cmd[@]" 2>/dev/null)
+
+if [[ $output == "#compdef "* ]]; then
+    # Looks like we got a valid completion function - so eval it to produce
+    # the completion matches.
+    eval $output
+else
+    echo "\nCompletion error running command:" ${(qqq)cmd}
+    echo -n "If output below is unhelpful you may need to edit this file and "
+    echo    "redirect stderr to a file."
+    echo "Expected completion function, but instead got:" 
+    echo $output
+    return 1
+fi
diff --git a/ThirdParty/Twisted/twisted/python/zsh/_tkconch b/ThirdParty/Twisted/twisted/python/zsh/_tkconch
new file mode 100644
index 0000000..3af1f12
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/zsh/_tkconch
@@ -0,0 +1,34 @@
+#compdef tkconch
+# This file is deprecated. See README.
+
+# This is the ZSH completion file for Twisted commands. It calls the current
+# command-line with the special "--_shell-completion" option which is handled
+# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
+# handle the completions for this particular command-line.
+#
+# 3rd parties that wish to provide zsh completion for commands that
+# use t.p.usage may copy this file and change the first line to reference
+# the name(s) of their command(s).
+#
+# This file is included in the official Zsh distribution as
+# Completion/Unix/Command/_twisted
+
+# redirect stderr to /dev/null otherwise deprecation warnings may get puked
+# all over the user's terminal if completing options for mktap or other
+# deprecated commands. Redirect stderr to a file to debug errors.
+local cmd output
+cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
+output=$("$cmd[@]" 2>/dev/null)
+
+if [[ $output == "#compdef "* ]]; then
+    # Looks like we got a valid completion function - so eval it to produce
+    # the completion matches.
+    eval $output
+else
+    echo "\nCompletion error running command:" ${(qqq)cmd}
+    echo -n "If output below is unhelpful you may need to edit this file and "
+    echo    "redirect stderr to a file."
+    echo "Expected completion function, but instead got:" 
+    echo $output
+    return 1
+fi
diff --git a/ThirdParty/Twisted/twisted/python/zsh/_tkmktap b/ThirdParty/Twisted/twisted/python/zsh/_tkmktap
new file mode 100644
index 0000000..0e3bdaa
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/zsh/_tkmktap
@@ -0,0 +1,34 @@
+#compdef tkmktap
+# This file is deprecated. See README.
+
+# This is the ZSH completion file for Twisted commands. It calls the current
+# command-line with the special "--_shell-completion" option which is handled
+# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
+# handle the completions for this particular command-line.
+#
+# 3rd parties that wish to provide zsh completion for commands that
+# use t.p.usage may copy this file and change the first line to reference
+# the name(s) of their command(s).
+#
+# This file is included in the official Zsh distribution as
+# Completion/Unix/Command/_twisted
+
+# redirect stderr to /dev/null otherwise deprecation warnings may get puked
+# all over the user's terminal if completing options for mktap or other
+# deprecated commands. Redirect stderr to a file to debug errors.
+local cmd output
+cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
+output=$("$cmd[@]" 2>/dev/null)
+
+if [[ $output == "#compdef "* ]]; then
+    # Looks like we got a valid completion function - so eval it to produce
+    # the completion matches.
+    eval $output
+else
+    echo "\nCompletion error running command:" ${(qqq)cmd}
+    echo -n "If output below is unhelpful you may need to edit this file and "
+    echo    "redirect stderr to a file."
+    echo "Expected completion function, but instead got:" 
+    echo $output
+    return 1
+fi
diff --git a/ThirdParty/Twisted/twisted/python/zsh/_trial b/ThirdParty/Twisted/twisted/python/zsh/_trial
new file mode 100644
index 0000000..b692f44
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/zsh/_trial
@@ -0,0 +1,34 @@
+#compdef trial
+# This file is deprecated. See README.
+
+# This is the ZSH completion file for Twisted commands. It calls the current
+# command-line with the special "--_shell-completion" option which is handled
+# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
+# handle the completions for this particular command-line.
+#
+# 3rd parties that wish to provide zsh completion for commands that
+# use t.p.usage may copy this file and change the first line to reference
+# the name(s) of their command(s).
+#
+# This file is included in the official Zsh distribution as
+# Completion/Unix/Command/_twisted
+
+# redirect stderr to /dev/null otherwise deprecation warnings may get puked
+# all over the user's terminal if completing options for mktap or other
+# deprecated commands. Redirect stderr to a file to debug errors.
+local cmd output
+cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
+output=$("$cmd[@]" 2>/dev/null)
+
+if [[ $output == "#compdef "* ]]; then
+    # Looks like we got a valid completion function - so eval it to produce
+    # the completion matches.
+    eval $output
+else
+    echo "\nCompletion error running command:" ${(qqq)cmd}
+    echo -n "If output below is unhelpful you may need to edit this file and "
+    echo    "redirect stderr to a file."
+    echo "Expected completion function, but instead got:" 
+    echo $output
+    return 1
+fi
diff --git a/ThirdParty/Twisted/twisted/python/zsh/_twistd b/ThirdParty/Twisted/twisted/python/zsh/_twistd
new file mode 100644
index 0000000..171224f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/zsh/_twistd
@@ -0,0 +1,34 @@
+#compdef twistd
+# This file is deprecated. See README.
+
+# This is the ZSH completion file for Twisted commands. It calls the current
+# command-line with the special "--_shell-completion" option which is handled
+# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
+# handle the completions for this particular command-line.
+#
+# 3rd parties that wish to provide zsh completion for commands that
+# use t.p.usage may copy this file and change the first line to reference
+# the name(s) of their command(s).
+#
+# This file is included in the official Zsh distribution as
+# Completion/Unix/Command/_twisted
+
+# redirect stderr to /dev/null otherwise deprecation warnings may get puked
+# all over the user's terminal if completing options for mktap or other
+# deprecated commands. Redirect stderr to a file to debug errors.
+local cmd output
+cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
+output=$("$cmd[@]" 2>/dev/null)
+
+if [[ $output == "#compdef "* ]]; then
+    # Looks like we got a valid completion function - so eval it to produce
+    # the completion matches.
+    eval $output
+else
+    echo "\nCompletion error running command:" ${(qqq)cmd}
+    echo -n "If output below is unhelpful you may need to edit this file and "
+    echo    "redirect stderr to a file."
+    echo "Expected completion function, but instead got:" 
+    echo $output
+    return 1
+fi
diff --git a/ThirdParty/Twisted/twisted/python/zsh/_websetroot b/ThirdParty/Twisted/twisted/python/zsh/_websetroot
new file mode 100644
index 0000000..58ae550
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/zsh/_websetroot
@@ -0,0 +1,34 @@
+#compdef websetroot
+# This file is deprecated. See README.
+
+# This is the ZSH completion file for Twisted commands. It calls the current
+# command-line with the special "--_shell-completion" option which is handled
+# by twisted.python.usage. t.p.usage then generates zsh code on stdout to
+# handle the completions for this particular command-line.
+#
+# 3rd parties that wish to provide zsh completion for commands that
+# use t.p.usage may copy this file and change the first line to reference
+# the name(s) of their command(s).
+#
+# This file is included in the official Zsh distribution as
+# Completion/Unix/Command/_twisted
+
+# redirect stderr to /dev/null otherwise deprecation warnings may get puked
+# all over the user's terminal if completing options for mktap or other
+# deprecated commands. Redirect stderr to a file to debug errors.
+local cmd output
+cmd=("$words[@]" --_shell-completion zsh:$CURRENT)
+output=$("$cmd[@]" 2>/dev/null)
+
+if [[ $output == "#compdef "* ]]; then
+    # Looks like we got a valid completion function - so eval it to produce
+    # the completion matches.
+    eval $output
+else
+    echo "\nCompletion error running command:" ${(qqq)cmd}
+    echo -n "If output below is unhelpful you may need to edit this file and "
+    echo    "redirect stderr to a file."
+    echo "Expected completion function, but instead got:" 
+    echo $output
+    return 1
+fi
diff --git a/ThirdParty/Twisted/twisted/python/zshcomp.py b/ThirdParty/Twisted/twisted/python/zshcomp.py
new file mode 100644
index 0000000..89389fe
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/python/zshcomp.py
@@ -0,0 +1,824 @@
+# -*- test-case-name: twisted.python.test.test_zshcomp -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+"""
+Rebuild the completion functions for the currently active version of Twisted::
+    $ python zshcomp.py -i
+
+This module implements a zsh code generator which generates completion code for
+commands that use twisted.python.usage. This is the stuff that makes pressing
+Tab at the command line work.
+
+Maintainer: Eric Mangold
+
+To build completion functions for your own commands, and not Twisted commands,
+then just do something like this::
+
+    o = mymodule.MyOptions()
+    f = file('_mycommand', 'w')
+    Builder("mycommand", o, f).write()
+
+Then all you have to do is place the generated file somewhere in your
+C{$fpath}, and restart zsh. Note the "site-functions" directory in your
+C{$fpath} where you may install 3rd-party completion functions (like the one
+you're building). Call C{siteFunctionsPath} to locate this directory
+programmatically.
+
+SPECIAL CLASS VARIABLES. You may set these on your usage.Options subclass::
+
+    zsh_altArgDescr
+    zsh_multiUse
+    zsh_mutuallyExclusive
+    zsh_actions
+    zsh_actionDescr
+    zsh_extras
+
+Here is what they mean (with examples)::
+
+    zsh_altArgDescr = {"foo":"use this description for foo instead"}
+        A dict mapping long option names to alternate descriptions.  When this
+        variable is present, the descriptions contained here will override
+        those descriptions provided in the optFlags and optParameters
+        variables.
+
+    zsh_multiUse = ["foo", "bar"]
+        A sequence containing those long option names which may appear on the
+        command line more than once. By default, options will only be completed
+        one time.
+
+    zsh_mutuallyExclusive = [("foo", "bar"), ("bar", "baz")]
+        A sequence of sequences, with each sub-sequence containing those long
+        option names that are mutually exclusive. That is, those options that
+        cannot appear on the command line together.
+
+    zsh_actions = {"foo":'_files -g "*.foo"', "bar":"(one two three)",
+            "colors":"_values -s , 'colors to use' red green blue"}
+        A dict mapping long option names to Zsh "actions". These actions
+        define what will be completed as the argument to the given option.  By
+        default, all files/dirs will be completed if no action is given.
+
+        Callables may instead be given for the values in this dict. The
+        callable should accept no arguments, and return a string that will be
+        used as the zsh "action" in the same way as the literal strings in the
+        examples above.
+
+        As you can see in the example above. The "foo" option will have files
+        that end in .foo completed when the user presses Tab. The "bar"
+        option will have either of the strings "one", "two", or "three"
+        completed when the user presses Tab.
+
+        "colors" will allow multiple arguments to be completed, seperated by
+        commas. The possible arguments are red, green, and blue. Examples::
+
+            my_command --foo some-file.foo --colors=red,green
+            my_command --colors=green
+            my_command --colors=green,blue
+
+        Actions may take many forms, and it is beyond the scope of this
+        document to illustrate them all. Please refer to the documention for
+        the Zsh _arguments function. zshcomp is basically a front-end to Zsh's
+        _arguments completion function.
+
+        That documentation is available on the zsh web site at this URL:
+        U{http://zsh.sunsite.dk/Doc/Release/zsh_19.html#SEC124}
+
+    zsh_actionDescr = {"logfile":"log file name", "random":"random seed"}
+        A dict mapping long option names to a description for the corresponding
+        zsh "action". These descriptions are show above the generated matches
+        when the user is doing completions for this option.
+
+        Normally Zsh does not show these descriptions unless you have
+        "verbose" completion turned on. Turn on verbosity with this in your
+        ~/.zshrc::
+
+            zstyle ':completion:*' verbose yes
+            zstyle ':completion:*:descriptions' format '%B%d%b'
+
+    zsh_extras = [":file to read from:action", ":file to write to:action"]
+        A sequence of extra arguments that will be passed verbatim to Zsh's
+        _arguments completion function. The _arguments function does all the
+        hard work of doing command line completions. You can see how zshcomp
+        invokes the _arguments call by looking at the generated completion
+        files that this module creates.
+
+   *** NOTE ***
+
+        You will need to use this variable to describe completions for normal
+        command line arguments. That is, those arguments that are not
+        associated with an option. That is, the arguments that are given to the
+        parseArgs method of your usage.Options subclass.
+
+        In the example above, the 1st non-option argument will be described as
+        "file to read from" and completion options will be generated in
+        accordance with the "action". (See above about zsh "actions") The
+        2nd non-option argument will be described as "file to write to" and
+        the action will be interpreted likewise.
+
+        Things you can put here are all documented under the _arguments
+        function here: U{http://zsh.sunsite.dk/Doc/Release/zsh_19.html#SEC124}
+
+Zsh Notes:
+
+To enable advanced completion add something like this to your ~/.zshrc::
+
+    autoload -U compinit
+    compinit
+
+For some extra verbosity, and general niceness add these lines too::
+
+    zstyle ':completion:*' verbose yes
+    zstyle ':completion:*:descriptions' format '%B%d%b'
+    zstyle ':completion:*:messages' format '%d'
+    zstyle ':completion:*:warnings' format 'No matches for: %d'
+
+Have fun!
+"""
+import warnings
+warnings.warn(
+    "zshcomp is deprecated as of Twisted 11.1. Shell tab-completion is now "
+    "handled by twisted.python.usage.", DeprecationWarning, stacklevel=2)
+
+import itertools, sys, commands, os.path
+
+from twisted.python import reflect, util, usage
+from twisted.application.service import IServiceMaker
+
+
+
+class MyOptions(usage.Options):
+    """
+    Options for this file
+    """
+    longdesc = ""
+    synopsis = "Usage: python zshcomp.py [--install | -i] | <output directory>"
+    optFlags = [["install", "i",
+                 'Output files to the "installation" directory ' \
+                 '(twisted/python/zsh in the currently active ' \
+                 'Twisted package)']]
+    optParameters = [["directory", "d", None,
+                      "Output files to this directory"]]
+
+
+    def postOptions(self):
+        if self['install'] and self['directory']:
+            raise usage.UsageError, "Can't have --install and " \
+                                    "--directory at the same time"
+        if not self['install'] and not self['directory']:
+            raise usage.UsageError, "Not enough arguments"
+        if self['directory'] and not os.path.isdir(self['directory']):
+            raise usage.UsageError, "%s is not a directory" % self['directory']
+
+
+
+class Builder:
+    def __init__(self, cmd_name, options, file):
+        """
+        @type cmd_name: C{str}
+        @param cmd_name: The name of the command
+
+        @type options: C{twisted.usage.Options}
+        @param options: The C{twisted.usage.Options} instance defined for
+                        this command
+
+        @type file: C{file}
+        @param file: The C{file} to write the completion function to
+        """
+
+        self.cmd_name = cmd_name
+        self.options = options
+        self.file = file
+
+
+    def write(self):
+        """
+        Write the completion function to the file given to __init__
+        @return: C{None}
+        """
+        # by default, we just write out a single call to _arguments
+        self.file.write('#compdef %s\n' % (self.cmd_name,))
+        gen = ArgumentsGenerator(self.cmd_name, self.options, self.file)
+        gen.write()
+
+
+
+class SubcommandBuilder(Builder):
+    """
+    Use this builder for commands that have sub-commands. twisted.python.usage
+    has the notion of sub-commands that are defined using an entirely seperate
+    Options class.
+    """
+    interface = None
+    subcmdLabel = None
+
+
+    def write(self):
+        """
+        Write the completion function to the file given to __init__
+        @return: C{None}
+        """
+        self.file.write('#compdef %s\n' % (self.cmd_name,))
+        self.file.write('local _zsh_subcmds_array\n_zsh_subcmds_array=(\n')
+        from twisted import plugin as newplugin
+        plugins = newplugin.getPlugins(self.interface)
+
+        for p in plugins:
+            self.file.write('"%s:%s"\n' % (p.tapname, p.description))
+        self.file.write(")\n\n")
+
+        self.options.__class__.zsh_extras = ['*::subcmd:->subcmd']
+        gen = ArgumentsGenerator(self.cmd_name, self.options, self.file)
+        gen.write()
+
+        self.file.write("""if (( CURRENT == 1 )); then
+  _describe "%s" _zsh_subcmds_array && ret=0
+fi
+(( ret )) || return 0
+
+service="$words[1]"
+
+case $service in\n""" % (self.subcmdLabel,))
+
+        plugins = newplugin.getPlugins(self.interface)
+        for p in plugins:
+            self.file.write(p.tapname + ")\n")
+            gen = ArgumentsGenerator(p.tapname, p.options(), self.file)
+            gen.write()
+            self.file.write(";;\n")
+        self.file.write("*) _message \"don't know how to" \
+                        " complete $service\";;\nesac")
+
+
+
+class MktapBuilder(SubcommandBuilder):
+    """
+    Builder for the mktap command
+    """
+    interface = IServiceMaker
+    subcmdLabel = 'tap to build'
+
+
+
+class TwistdBuilder(SubcommandBuilder):
+    """
+    Builder for the twistd command
+    """
+    interface = IServiceMaker
+    subcmdLabel = 'service to run'
+
+
+
+class ArgumentsGenerator:
+    """
+    Generate a call to the zsh _arguments completion function
+    based on data in a usage.Options subclass
+    """
+    def __init__(self, cmd_name, options, file):
+        """
+        @type cmd_name: C{str}
+        @param cmd_name: The name of the command
+
+        @type options: C{twisted.usage.Options}
+        @param options: The C{twisted.usage.Options} instance defined
+                        for this command
+
+        @type file: C{file}
+        @param file: The C{file} to write the completion function to
+        """
+        self.cmd_name = cmd_name
+        self.options = options
+        self.file = file
+
+        self.altArgDescr = {}
+        self.actionDescr = {}
+        self.multiUse = []
+        self.mutuallyExclusive = []
+        self.actions = {}
+        self.extras = []
+
+        aCL = reflect.accumulateClassList
+        aCD = reflect.accumulateClassDict
+
+        aCD(options.__class__, 'zsh_altArgDescr', self.altArgDescr)
+        aCD(options.__class__, 'zsh_actionDescr', self.actionDescr)
+        aCL(options.__class__, 'zsh_multiUse', self.multiUse)
+        aCL(options.__class__, 'zsh_mutuallyExclusive',
+            self.mutuallyExclusive)
+        aCD(options.__class__, 'zsh_actions', self.actions)
+        aCL(options.__class__, 'zsh_extras', self.extras)
+
+        optFlags = []
+        optParams = []
+
+        aCL(options.__class__, 'optFlags', optFlags)
+        aCL(options.__class__, 'optParameters', optParams)
+
+        for i, optList in enumerate(optFlags):
+            if len(optList) != 3:
+                optFlags[i] = util.padTo(3, optList)
+
+        for i, optList in enumerate(optParams):
+            if len(optList) != 4:
+                optParams[i] = util.padTo(4, optList)
+
+
+        self.optFlags = optFlags
+        self.optParams = optParams
+
+        optParams_d = {}
+        for optList in optParams:
+            optParams_d[optList[0]] = optList[1:]
+        self.optParams_d = optParams_d
+
+        optFlags_d = {}
+        for optList in optFlags:
+            optFlags_d[optList[0]] = optList[1:]
+        self.optFlags_d = optFlags_d
+
+        optAll_d = {}
+        optAll_d.update(optParams_d)
+        optAll_d.update(optFlags_d)
+        self.optAll_d = optAll_d
+
+        self.addAdditionalOptions()
+
+        # makes sure none of the zsh_ data structures reference option
+        # names that don't exist. (great for catching typos)
+        self.verifyZshNames()
+
+        self.excludes = self.makeExcludesDict()
+
+
+    def write(self):
+        """
+        Write the zsh completion code to the file given to __init__
+        @return: C{None}
+        """
+        self.writeHeader()
+        self.writeExtras()
+        self.writeOptions()
+        self.writeFooter()
+
+
+    def writeHeader(self):
+        """
+        This is the start of the code that calls _arguments
+        @return: C{None}
+        """
+        self.file.write('_arguments -s -A "-*" \\\n')
+
+
+    def writeOptions(self):
+        """
+        Write out zsh code for each option in this command
+        @return: C{None}
+        """
+        optNames = self.optAll_d.keys()
+        optNames.sort()
+        for longname in optNames:
+            self.writeOpt(longname)
+
+
+    def writeExtras(self):
+        """
+        Write out the "extras" list. These are just passed verbatim to the
+        _arguments call
+        @return: C{None}
+        """
+        for s in self.extras:
+            self.file.write(escape(s))
+            self.file.write(' \\\n')
+
+
+    def writeFooter(self):
+        """
+        Write the last bit of code that finishes the call to _arguments
+        @return: C{None}
+        """
+        self.file.write('&& return 0\n')
+
+
+    def verifyZshNames(self):
+        """
+        Ensure that none of the names given in zsh_* variables are typoed
+        @return: C{None}
+        @raise ValueError: Raised if unknown option names have been given in
+                           zsh_* variables
+        """
+        def err(name):
+            raise ValueError, "Unknown option name \"%s\" found while\n" \
+                "examining zsh_ attributes for the %s command" % (
+                    name, self.cmd_name)
+
+        for name in itertools.chain(self.altArgDescr, self.actionDescr,
+        self.actions, self.multiUse):
+            if name not in self.optAll_d:
+                err(name)
+
+        for seq in self.mutuallyExclusive:
+            for name in seq:
+                if name not in self.optAll_d:
+                    err(name)
+
+
+    def excludeStr(self, longname, buildShort=False):
+        """
+        Generate an "exclusion string" for the given option
+
+        @type longname: C{str}
+        @param longname: The long name of the option
+                         (i.e. "verbose" instead of "v")
+
+        @type buildShort: C{bool}
+        @param buildShort: May be True to indicate we're building an excludes
+                           string for the short option that correspondes to
+                           the given long opt
+
+        @return: The generated C{str}
+        """
+        if longname in self.excludes:
+            exclusions = self.excludes[longname][:]
+        else:
+            exclusions = []
+
+        # if longname isn't a multiUse option (can't appear on the cmd line more
+        # than once), then we have to exclude the short option if we're
+        # building for the long option, and vice versa.
+        if longname not in self.multiUse:
+            if buildShort is False:
+                short = self.getShortOption(longname)
+                if short is not None:
+                    exclusions.append(short)
+            else:
+                exclusions.append(longname)
+
+        if not exclusions:
+            return ''
+
+        strings = []
+        for optName in exclusions:
+            if len(optName) == 1:
+                # short option
+                strings.append("-" + optName)
+            else:
+                strings.append("--" + optName)
+        return "(%s)" % " ".join(strings)
+
+
+    def makeExcludesDict(self):
+        """
+        @return: A C{dict} that maps each option name appearing in
+        self.mutuallyExclusive to a list of those option names that
+        is it mutually exclusive with (can't appear on the cmd line with)
+        """
+
+        #create a mapping of long option name -> single character name
+        longToShort = {}
+        for optList in itertools.chain(self.optParams, self.optFlags):
+            try:
+                if optList[1] != None:
+                    longToShort[optList[0]] = optList[1]
+            except IndexError:
+                pass
+
+        excludes = {}
+        for lst in self.mutuallyExclusive:
+            for i, longname in enumerate(lst):
+                tmp = []
+                tmp.extend(lst[:i])
+                tmp.extend(lst[i+1:])
+                for name in tmp[:]:
+                    if name in longToShort:
+                        tmp.append(longToShort[name])
+
+                if longname in excludes:
+                    excludes[longname].extend(tmp)
+                else:
+                    excludes[longname] = tmp
+        return excludes
+
+
+    def writeOpt(self, longname):
+        """
+        Write out the zsh code for the given argument. This is just part of the
+        one big call to _arguments
+
+        @type longname: C{str}
+        @param longname: The long name of the option
+            (i.e. "verbose" instead of "v")
+
+        @return: C{None}
+        """
+        if longname in self.optFlags_d:
+            # It's a flag option. Not one that takes a parameter.
+            long_field = "--%s" % longname
+        else:
+            long_field = "--%s=" % longname
+
+        short = self.getShortOption(longname)
+        if short != None:
+            short_field = "-" + short
+        else:
+            short_field = ''
+
+        descr = self.getDescription(longname)
+        descr_field = descr.replace("[", "\[")
+        descr_field = descr_field.replace("]", "\]")
+        descr_field = '[%s]' % descr_field
+
+        if longname in self.actionDescr:
+            actionDescr_field = self.actionDescr[longname]
+        else:
+            actionDescr_field = descr
+
+        action_field = self.getAction(longname)
+        if longname in self.multiUse:
+            multi_field = '*'
+        else:
+            multi_field = ''
+
+        longExclusions_field = self.excludeStr(longname)
+
+        if short:
+            #we have to write an extra line for the short option if we have one
+            shortExclusions_field = self.excludeStr(longname, buildShort=True)
+            self.file.write(escape('%s%s%s%s%s' % (shortExclusions_field,
+                multi_field, short_field, descr_field, action_field)))
+            self.file.write(' \\\n')
+
+        self.file.write(escape('%s%s%s%s%s' % (longExclusions_field,
+            multi_field, long_field, descr_field, action_field)))
+        self.file.write(' \\\n')
+
+
+    def getAction(self, longname):
+        """
+        Return a zsh "action" string for the given argument
+        @return: C{str}
+        """
+        if longname in self.actions:
+            if callable(self.actions[longname]):
+                action = self.actions[longname]()
+            else:
+                action = self.actions[longname]
+            return ":%s:%s" % (self.getActionDescr(longname), action)
+        if longname in self.optParams_d:
+            return ':%s:_files' % self.getActionDescr(longname)
+        return ''
+
+
+    def getActionDescr(self, longname):
+        """
+        Return the description to be used when this argument is completed
+        @return: C{str}
+        """
+        if longname in self.actionDescr:
+            return self.actionDescr[longname]
+        else:
+            return longname
+
+
+    def getDescription(self, longname):
+        """
+        Return the description to be used for this argument
+        @return: C{str}
+        """
+        #check if we have an alternate descr for this arg, and if so use it
+        if longname in self.altArgDescr:
+            return self.altArgDescr[longname]
+
+        #otherwise we have to get it from the optFlags or optParams
+        try:
+            descr = self.optFlags_d[longname][1]
+        except KeyError:
+            try:
+                descr = self.optParams_d[longname][2]
+            except KeyError:
+                descr = None
+
+        if descr is not None:
+            return descr
+
+        # lets try to get it from the opt_foo method doc string if there is one
+        longMangled = longname.replace('-', '_') # this is what t.p.usage does
+        obj = getattr(self.options, 'opt_%s' % longMangled, None)
+        if obj:
+            descr = descrFromDoc(obj)
+            if descr is not None:
+                return descr
+
+        return longname # we really ought to have a good description to use
+
+
+    def getShortOption(self, longname):
+        """
+        Return the short option letter or None
+        @return: C{str} or C{None}
+        """
+        optList = self.optAll_d[longname]
+        try:
+            return optList[0] or None
+        except IndexError:
+            pass
+
+
+    def addAdditionalOptions(self):
+        """
+        Add additional options to the optFlags and optParams lists.
+        These will be defined by 'opt_foo' methods of the Options subclass
+        @return: C{None}
+        """
+        methodsDict = {}
+        reflect.accumulateMethods(self.options, methodsDict, 'opt_')
+        methodToShort = {}
+        for name in methodsDict.copy():
+            if len(name) == 1:
+                methodToShort[methodsDict[name]] = name
+                del methodsDict[name]
+
+        for methodName, methodObj in methodsDict.items():
+            longname = methodName.replace('_', '-') # t.p.usage does this
+            # if this option is already defined by the optFlags or
+            # optParameters then we don't want to override that data
+            if longname in self.optAll_d:
+                continue
+
+            descr = self.getDescription(longname)
+
+            short = None
+            if methodObj in methodToShort:
+                short = methodToShort[methodObj]
+
+            reqArgs = methodObj.im_func.func_code.co_argcount
+            if reqArgs == 2:
+                self.optParams.append([longname, short, None, descr])
+                self.optParams_d[longname] = [short, None, descr]
+                self.optAll_d[longname] = [short, None, descr]
+            elif reqArgs == 1:
+                self.optFlags.append([longname, short, descr])
+                self.optFlags_d[longname] = [short, descr]
+                self.optAll_d[longname] = [short, None, descr]
+            else:
+                raise TypeError, '%r has wrong number ' \
+                                 'of arguments' % (methodObj,)
+
+
+
+def descrFromDoc(obj):
+    """
+    Generate an appropriate description from docstring of the given object
+    """
+    if obj.__doc__ is None:
+        return None
+
+    lines = obj.__doc__.split("\n")
+    descr = None
+    try:
+        if lines[0] != "" and not lines[0].isspace():
+            descr = lines[0].lstrip()
+        # skip first line if it's blank
+        elif lines[1] != "" and not lines[1].isspace():
+            descr = lines[1].lstrip()
+    except IndexError:
+        pass
+    return descr
+
+
+
+def firstLine(s):
+    """
+    Return the first line of the given string
+    """
+    try:
+        i = s.index('\n')
+        return s[:i]
+    except ValueError:
+        return s
+
+
+
+def escape(str):
+    """
+    Shell escape the given string
+    """
+    return commands.mkarg(str)[1:]
+
+
+
+def siteFunctionsPath():
+    """
+    Return the path to the system-wide site-functions directory or
+    C{None} if it cannot be determined
+    """
+    try:
+        cmd = "zsh -f -c 'echo ${(M)fpath:#/*/site-functions}'"
+        output = commands.getoutput(cmd)
+        if os.path.isdir(output):
+            return output
+    except:
+        pass
+
+
+
+generateFor = [('conch', 'twisted.conch.scripts.conch', 'ClientOptions'),
+               ('mktap', 'twisted.scripts.mktap', 'FirstPassOptions'),
+               ('trial', 'twisted.scripts.trial', 'Options'),
+               ('cftp', 'twisted.conch.scripts.cftp', 'ClientOptions'),
+               ('tapconvert', 'twisted.scripts.tapconvert', 'ConvertOptions'),
+               ('twistd', 'twisted.scripts.twistd', 'ServerOptions'),
+               ('ckeygen', 'twisted.conch.scripts.ckeygen', 'GeneralOptions'),
+               ('lore', 'twisted.lore.scripts.lore', 'Options'),
+               ('pyhtmlizer', 'twisted.scripts.htmlizer', 'Options'),
+               ('tap2deb', 'twisted.scripts.tap2deb', 'MyOptions'),
+               ('tkconch', 'twisted.conch.scripts.tkconch', 'GeneralOptions'),
+               ('manhole', 'twisted.scripts.manhole', 'MyOptions'),
+               ('tap2rpm', 'twisted.scripts.tap2rpm', 'MyOptions'),
+               ]
+
+specialBuilders = {'mktap'  : MktapBuilder,
+                   'twistd' : TwistdBuilder}
+
+
+
+def makeCompFunctionFiles(out_path, generateFor=generateFor,
+                          specialBuilders=specialBuilders):
+    """
+    Generate completion function files in the given directory for all
+    twisted commands
+
+    @type out_path: C{str}
+    @param out_path: The path to the directory to generate completion function
+                     fils in
+
+    @param generateFor: Sequence in the form of the 'generateFor' top-level
+                        variable as defined in this module. Indicates what
+                        commands to build completion files for.
+
+    @param specialBuilders: Sequence in the form of the 'specialBuilders'
+                            top-level variable as defined in this module.
+                            Indicates what commands require a special
+                            Builder class.
+
+    @return: C{list} of 2-tuples of the form (cmd_name, error) indicating
+             commands that we skipped building completions for. cmd_name
+             is the name of the skipped command, and error is the Exception
+             that was raised when trying to import the script module.
+             Commands are usually skipped due to a missing dependency,
+             e.g. Tkinter.
+    """
+    skips = []
+    for cmd_name, module_name, class_name in generateFor:
+        if module_name is None:
+            # create empty file
+            f = _openCmdFile(out_path, cmd_name)
+            f.close()
+            continue
+        try:
+            m = __import__('%s' % (module_name,), None, None, (class_name))
+            f = _openCmdFile(out_path, cmd_name)
+            o = getattr(m, class_name)() # instantiate Options class
+
+            if cmd_name in specialBuilders:
+                b = specialBuilders[cmd_name](cmd_name, o, f)
+                b.write()
+            else:
+                b = Builder(cmd_name, o, f)
+                b.write()
+        except Exception, e:
+            skips.append( (cmd_name, e) )
+            continue
+    return skips
+
+
+
+def _openCmdFile(out_path, cmd_name):
+    return file(os.path.join(out_path, '_'+cmd_name), 'w')
+
+
+
+def run():
+    options = MyOptions()
+    try:
+        options.parseOptions(sys.argv[1:])
+    except usage.UsageError, e:
+        print e
+        print options.getUsage()
+        sys.exit(2)
+
+    if options['install']:
+        import twisted
+        dir = os.path.join(os.path.dirname(twisted.__file__), "python", "zsh")
+        skips = makeCompFunctionFiles(dir)
+    else:
+        skips = makeCompFunctionFiles(options['directory'])
+
+    for cmd_name, error in skips:
+        sys.stderr.write("zshcomp: Skipped building for %s. Script module " \
+                         "could not be imported:\n" % (cmd_name,))
+        sys.stderr.write(str(error)+'\n')
+    if skips:
+        sys.exit(3)
+
+
+
+if __name__ == '__main__':
+    run()
diff --git a/ThirdParty/Twisted/twisted/runner/__init__.py b/ThirdParty/Twisted/twisted/runner/__init__.py
new file mode 100644
index 0000000..06b5d4b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/runner/__init__.py
@@ -0,0 +1,15 @@
+"""
+Twisted runer: run and monitor processes
+
+Maintainer: Andrew Bennetts
+
+classic inetd(8) support:
+Future Plans: The basic design should be final.  There are some bugs that need
+fixing regarding UDP and Sun-RPC support.  Perhaps some day xinetd
+compatibility will be added.
+
+procmon:monitor and restart processes
+"""
+
+from twisted.runner._version import version
+__version__ = version.short()
diff --git a/ThirdParty/Twisted/twisted/runner/_version.py b/ThirdParty/Twisted/twisted/runner/_version.py
new file mode 100644
index 0000000..337e08d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/runner/_version.py
@@ -0,0 +1,3 @@
+# This is an auto-generated file. Do not edit it.
+from twisted.python import versions
+version = versions.Version('twisted.runner', 12, 3, 0)
diff --git a/ThirdParty/Twisted/twisted/runner/inetd.py b/ThirdParty/Twisted/twisted/runner/inetd.py
new file mode 100644
index 0000000..010b89e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/runner/inetd.py
@@ -0,0 +1,70 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+# 
+
+"""
+Twisted inetd.
+
+Maintainer: Andrew Bennetts
+
+Future Plans: Bugfixes.  Specifically for UDP and Sun-RPC, which don't work
+correctly yet.
+"""
+
+import os
+
+from twisted.internet import process, reactor, fdesc
+from twisted.internet.protocol import Protocol, ServerFactory
+from twisted.protocols import wire
+
+# A dict of known 'internal' services (i.e. those that don't involve spawning
+# another process.
+internalProtocols = {
+    'echo': wire.Echo,
+    'chargen': wire.Chargen,
+    'discard': wire.Discard,
+    'daytime': wire.Daytime,
+    'time': wire.Time,
+}
+            
+
+class InetdProtocol(Protocol):
+    """Forks a child process on connectionMade, passing the socket as fd 0."""
+    def connectionMade(self):
+        sockFD = self.transport.fileno()
+        childFDs = {0: sockFD, 1: sockFD}
+        if self.factory.stderrFile:
+            childFDs[2] = self.factory.stderrFile.fileno()
+
+        # processes run by inetd expect blocking sockets
+        # FIXME: maybe this should be done in process.py?  are other uses of
+        #        Process possibly affected by this?
+        fdesc.setBlocking(sockFD)
+        if childFDs.has_key(2):
+            fdesc.setBlocking(childFDs[2])
+
+        service = self.factory.service
+        uid = service.user
+        gid = service.group
+
+        # don't tell Process to change our UID/GID if it's what we
+        # already are
+        if uid == os.getuid():
+            uid = None
+        if gid == os.getgid():
+            gid = None
+
+        process.Process(None, service.program, service.programArgs, os.environ,
+                        None, None, uid, gid, childFDs)
+
+        reactor.removeReader(self.transport)
+        reactor.removeWriter(self.transport)
+                        
+
+class InetdFactory(ServerFactory):
+    protocol = InetdProtocol
+    stderrFile = None
+    
+    def __init__(self, service):
+        self.service = service
diff --git a/ThirdParty/Twisted/twisted/runner/inetdconf.py b/ThirdParty/Twisted/twisted/runner/inetdconf.py
new file mode 100644
index 0000000..f06a2ab
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/runner/inetdconf.py
@@ -0,0 +1,194 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+# 
+"""
+Parser for inetd.conf files
+
+Maintainer: Andrew Bennetts
+
+Future Plans: xinetd configuration file support?
+"""
+
+# Various exceptions
+class InvalidConfError(Exception):
+    """Invalid configuration file"""
+
+
+class InvalidInetdConfError(InvalidConfError):
+    """Invalid inetd.conf file"""
+
+
+class InvalidServicesConfError(InvalidConfError):
+    """Invalid services file"""
+
+
+class InvalidRPCServicesConfError(InvalidConfError):
+    """Invalid rpc services file"""
+
+
+class UnknownService(Exception):
+    """Unknown service name"""
+
+
+class SimpleConfFile:
+    """Simple configuration file parser superclass.
+
+    Filters out comments and empty lines (which includes lines that only 
+    contain comments).
+
+    To use this class, override parseLine or parseFields.
+    """
+    
+    commentChar = '#'
+    defaultFilename = None
+    
+    def parseFile(self, file=None):
+        """Parse a configuration file
+        
+        If file is None and self.defaultFilename is set, it will open
+        defaultFilename and use it.
+        """
+        if file is None and self.defaultFilename:
+            file = open(self.defaultFilename,'r')
+            
+        for line in file.readlines():
+            # Strip out comments
+            comment = line.find(self.commentChar)
+            if comment != -1:
+                line = line[:comment]
+
+            # Strip whitespace
+            line = line.strip()
+
+            # Skip empty lines (and lines which only contain comments)
+            if not line:
+                continue
+
+            self.parseLine(line)
+
+    def parseLine(self, line):
+        """Override this.
+        
+        By default, this will split the line on whitespace and call
+        self.parseFields (catching any errors).
+        """
+        try:
+            self.parseFields(*line.split())
+        except ValueError:
+            raise InvalidInetdConfError, 'Invalid line: ' + repr(line)
+    
+    def parseFields(self, *fields):
+        """Override this."""
+
+
+class InetdService:
+    """A simple description of an inetd service."""
+    name = None
+    port = None
+    socketType = None
+    protocol = None
+    wait = None
+    user = None
+    group = None
+    program = None
+    programArgs = None
+    
+    def __init__(self, name, port, socketType, protocol, wait, user, group,
+                 program, programArgs):
+        self.name = name
+        self.port = port
+        self.socketType = socketType
+        self.protocol = protocol
+        self.wait = wait
+        self.user = user
+        self.group = group
+        self.program = program
+        self.programArgs = programArgs
+
+
+class InetdConf(SimpleConfFile):
+    """Configuration parser for a traditional UNIX inetd(8)"""
+
+    defaultFilename = '/etc/inetd.conf'
+    
+    def __init__(self, knownServices=None):
+        self.services = []
+        
+        if knownServices is None:
+            knownServices = ServicesConf()
+            knownServices.parseFile()
+        self.knownServices = knownServices
+
+    def parseFields(self, serviceName, socketType, protocol, wait, user,
+                    program, *programArgs):
+        """Parse an inetd.conf file.
+
+        Implemented from the description in the Debian inetd.conf man page.
+        """
+        # Extract user (and optional group)
+        user, group = (user.split('.') + [None])[:2]
+
+        # Find the port for a service
+        port = self.knownServices.services.get((serviceName, protocol), None)
+        if not port and not protocol.startswith('rpc/'):
+            # FIXME: Should this be discarded/ignored, rather than throwing
+            #        an exception?
+            try:
+                port = int(serviceName)
+                serviceName = 'unknown'
+            except:
+                raise UnknownService, "Unknown service: %s (%s)" \
+                      % (serviceName, protocol)
+
+        self.services.append(InetdService(serviceName, port, socketType,
+                                          protocol, wait, user, group, program,
+                                          programArgs))
+            
+            
+class ServicesConf(SimpleConfFile):
+    """/etc/services parser
+    
+    @ivar services: dict mapping service names to (port, protocol) tuples.
+    """
+    
+    defaultFilename = '/etc/services'
+
+    def __init__(self):
+        self.services = {}
+
+    def parseFields(self, name, portAndProtocol, *aliases):
+        try:
+            port, protocol = portAndProtocol.split('/')
+            port = long(port)
+        except:
+            raise InvalidServicesConfError, 'Invalid port/protocol:' + \
+                                            repr(portAndProtocol)
+
+        self.services[(name, protocol)] = port
+        for alias in aliases:
+            self.services[(alias, protocol)] = port
+
+
+class RPCServicesConf(SimpleConfFile):
+    """/etc/rpc parser
+
+    @ivar self.services: dict mapping rpc service names to rpc ports.
+    """
+
+    defaultFilename = '/etc/rpc'
+
+    def __init__(self):
+        self.services = {}
+    
+    def parseFields(self, name, port, *aliases):
+        try:
+            port = long(port)
+        except:
+            raise InvalidRPCServicesConfError, 'Invalid port:' + repr(port)
+                        
+        self.services[name] = port
+        for alias in aliases:
+            self.services[alias] = port
+
+
diff --git a/ThirdParty/Twisted/twisted/runner/inetdtap.py b/ThirdParty/Twisted/twisted/runner/inetdtap.py
new file mode 100644
index 0000000..3e62877
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/runner/inetdtap.py
@@ -0,0 +1,163 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+# 
+
+"""
+Twisted inetd TAP support
+
+Maintainer: Andrew Bennetts
+
+Future Plans: more configurability.
+"""
+
+import os, pwd, grp, socket
+
+from twisted.runner import inetd, inetdconf
+from twisted.python import log, usage
+from twisted.internet.protocol import ServerFactory
+from twisted.application import internet, service as appservice
+
+try:
+    import portmap
+    rpcOk = 1
+except ImportError:
+    rpcOk = 0
+
+
+# Protocol map
+protocolDict = {'tcp': socket.IPPROTO_TCP, 'udp': socket.IPPROTO_UDP}
+
+
+class Options(usage.Options):
+
+    optParameters = [
+        ['rpc', 'r', '/etc/rpc', 'RPC procedure table file'],
+        ['file', 'f', '/etc/inetd.conf', 'Service configuration file']
+    ]
+
+    optFlags = [['nointernal', 'i', "Don't run internal services"]]
+
+    compData = usage.Completions(
+        optActions={"file": usage.CompleteFiles('*.conf')}
+        )
+
+class RPCServer(internet.TCPServer):
+
+    def __init__(self, rpcVersions, rpcConf, proto, service):
+        internet.TCPServer.__init__(0, ServerFactory())
+        self.rpcConf = rpcConf
+        self.proto = proto
+        self.service = service
+
+    def startService(self):
+        internet.TCPServer.startService(self)
+        import portmap
+        portNo = self._port.getHost()[2]
+        service = self.service
+        for version in rpcVersions:
+            portmap.set(self.rpcConf.services[name], version, self.proto,
+                        portNo)
+            inetd.forkPassingFD(service.program, service.programArgs,
+                                os.environ, service.user, service.group, p)
+
+def makeService(config):
+    s = appservice.MultiService()
+    conf = inetdconf.InetdConf()
+    conf.parseFile(open(config['file']))
+
+    rpcConf = inetdconf.RPCServicesConf()
+    try:
+        rpcConf.parseFile(open(config['rpc']))
+    except:
+        # We'll survive even if we can't read /etc/rpc
+        log.deferr()
+    
+    for service in conf.services:
+        rpc = service.protocol.startswith('rpc/')
+        protocol = service.protocol
+
+        if rpc and not rpcOk:
+            log.msg('Skipping rpc service due to lack of rpc support')
+            continue
+
+        if rpc:
+            # RPC has extra options, so extract that
+            protocol = protocol[4:]     # trim 'rpc/'
+            if not protocolDict.has_key(protocol):
+                log.msg('Bad protocol: ' + protocol)
+                continue
+            
+            try:
+                name, rpcVersions = service.name.split('/')
+            except ValueError:
+                log.msg('Bad RPC service/version: ' + service.name)
+                continue
+
+            if not rpcConf.services.has_key(name):
+                log.msg('Unknown RPC service: ' + repr(service.name))
+                continue
+
+            try:
+                if '-' in rpcVersions:
+                    start, end = map(int, rpcVersions.split('-'))
+                    rpcVersions = range(start, end+1)
+                else:
+                    rpcVersions = [int(rpcVersions)]
+            except ValueError:
+                log.msg('Bad RPC versions: ' + str(rpcVersions))
+                continue
+            
+        if (protocol, service.socketType) not in [('tcp', 'stream'),
+                                                  ('udp', 'dgram')]:
+            log.msg('Skipping unsupported type/protocol: %s/%s'
+                    % (service.socketType, service.protocol))
+            continue
+
+        # Convert the username into a uid (if necessary)
+        try:
+            service.user = int(service.user)
+        except ValueError:
+            try:
+                service.user = pwd.getpwnam(service.user)[2]
+            except KeyError:
+                log.msg('Unknown user: ' + service.user)
+                continue
+
+        # Convert the group name into a gid (if necessary)
+        if service.group is None:
+            # If no group was specified, use the user's primary group
+            service.group = pwd.getpwuid(service.user)[3]
+        else:
+            try:
+                service.group = int(service.group)
+            except ValueError:
+                try:
+                    service.group = grp.getgrnam(service.group)[2]
+                except KeyError:
+                    log.msg('Unknown group: ' + service.group)
+                    continue
+
+        if service.program == 'internal':
+            if config['nointernal']:
+                continue
+
+            # Internal services can use a standard ServerFactory
+            if not inetd.internalProtocols.has_key(service.name):
+                log.msg('Unknown internal service: ' + service.name)
+                continue
+            factory = ServerFactory()
+            factory.protocol = inetd.internalProtocols[service.name]
+        elif rpc:
+            i = RPCServer(rpcVersions, rpcConf, proto, service)
+            i.setServiceParent(s)
+            continue
+        else:
+            # Non-internal non-rpc services use InetdFactory
+            factory = inetd.InetdFactory(service)
+
+        if protocol == 'tcp':
+            internet.TCPServer(service.port, factory).setServiceParent(s)
+        elif protocol == 'udp':
+            raise RuntimeError("not supporting UDP")
+    return s
diff --git a/ThirdParty/Twisted/twisted/runner/portmap.c b/ThirdParty/Twisted/twisted/runner/portmap.c
new file mode 100644
index 0000000..ca0c1c9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/runner/portmap.c
@@ -0,0 +1,57 @@
+/*
+ * Copyright (c) 2001-2004 Twisted Matrix Laboratories.
+ * See LICENSE for details.
+
+ * 
+ */
+
+/* portmap.c: A simple Python wrapper for pmap_set(3) and pmap_unset(3) */
+
+#include <Python.h>
+#include <rpc/rpc.h>
+#include <rpc/pmap_clnt.h>
+
+static PyObject * portmap_set(PyObject *self, PyObject *args)
+{
+	unsigned long program, version;
+	int protocol;
+	unsigned short port;
+	
+	if (!PyArg_ParseTuple(args, "llih:set", 
+			      &program, &version, &protocol, &port))
+		return NULL;
+
+	pmap_unset(program, version);
+	pmap_set(program, version, protocol, port);
+	
+	Py_INCREF(Py_None);
+	return Py_None;
+}
+
+static PyObject * portmap_unset(PyObject *self, PyObject *args)
+{
+	unsigned long program, version;
+	
+	if (!PyArg_ParseTuple(args, "ll:unset",
+			      &program, &version))
+		return NULL;
+
+	pmap_unset(program, version);
+	
+	Py_INCREF(Py_None);
+	return Py_None;
+}
+
+static PyMethodDef PortmapMethods[] = {
+	{"set", portmap_set, METH_VARARGS, 
+	 "Set an entry in the portmapper."},
+	{"unset", portmap_unset, METH_VARARGS,
+	 "Unset an entry in the portmapper."},
+	{NULL, NULL, 0, NULL}
+};
+
+void initportmap(void)
+{
+	(void) Py_InitModule("portmap", PortmapMethods);
+}
+
diff --git a/ThirdParty/Twisted/twisted/runner/procmon.py b/ThirdParty/Twisted/twisted/runner/procmon.py
new file mode 100644
index 0000000..3515995
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/runner/procmon.py
@@ -0,0 +1,310 @@
+# -*- test-case-name: twisted.runner.test.test_procmon -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Support for starting, monitoring, and restarting child process.
+"""
+import warnings
+
+from twisted.python import log
+from twisted.internet import error, protocol, reactor as _reactor
+from twisted.application import service
+from twisted.protocols import basic
+
+class DummyTransport:
+
+    disconnecting = 0
+
+transport = DummyTransport()
+
+class LineLogger(basic.LineReceiver):
+
+    tag = None
+    delimiter = '\n'
+
+    def lineReceived(self, line):
+        log.msg('[%s] %s' % (self.tag, line))
+
+
+class LoggingProtocol(protocol.ProcessProtocol):
+
+    service = None
+    name = None
+    empty = 1
+
+    def connectionMade(self):
+        self.output = LineLogger()
+        self.output.tag = self.name
+        self.output.makeConnection(transport)
+
+
+    def outReceived(self, data):
+        self.output.dataReceived(data)
+        self.empty = data[-1] == '\n'
+
+    errReceived = outReceived
+
+
+    def processEnded(self, reason):
+        if not self.empty:
+            self.output.dataReceived('\n')
+        self.service.connectionLost(self.name)
+
+
+class ProcessMonitor(service.Service):
+    """
+    ProcessMonitor runs processes, monitors their progress, and restarts
+    them when they die.
+
+    The ProcessMonitor will not attempt to restart a process that appears to
+    die instantly -- with each "instant" death (less than 1 second, by
+    default), it will delay approximately twice as long before restarting
+    it.  A successful run will reset the counter.
+
+    The primary interface is L{addProcess} and L{removeProcess}. When the
+    service is running (that is, when the application it is attached to is
+    running), adding a process automatically starts it.
+
+    Each process has a name. This name string must uniquely identify the
+    process.  In particular, attempting to add two processes with the same
+    name will result in a C{KeyError}.
+
+    @type threshold: C{float}
+    @ivar threshold: How long a process has to live before the death is
+        considered instant, in seconds.  The default value is 1 second.
+
+    @type killTime: C{float}
+    @ivar killTime: How long a process being killed has to get its affairs
+        in order before it gets killed with an unmaskable signal.  The
+        default value is 5 seconds.
+
+    @type minRestartDelay: C{float}
+    @ivar minRestartDelay: The minimum time (in seconds) to wait before
+        attempting to restart a process.  Default 1s.
+
+    @type maxRestartDelay: C{float}
+    @ivar maxRestartDelay: The maximum time (in seconds) to wait before
+        attempting to restart a process.  Default 3600s (1h).
+
+    @type _reactor: L{IReactorProcess} provider
+    @ivar _reactor: A provider of L{IReactorProcess} and L{IReactorTime}
+        which will be used to spawn processes and register delayed calls.
+
+    """
+    threshold = 1
+    killTime = 5
+    minRestartDelay = 1
+    maxRestartDelay = 3600
+
+
+    def __init__(self, reactor=_reactor):
+        self._reactor = reactor
+
+        self.processes = {}
+        self.protocols = {}
+        self.delay = {}
+        self.timeStarted = {}
+        self.murder = {}
+        self.restart = {}
+
+
+    def __getstate__(self):
+        dct = service.Service.__getstate__(self)
+        del dct['_reactor']
+        dct['protocols'] = {}
+        dct['delay'] = {}
+        dct['timeStarted'] = {}
+        dct['murder'] = {}
+        dct['restart'] = {}
+        return dct
+
+
+    def addProcess(self, name, args, uid=None, gid=None, env={}):
+        """
+        Add a new monitored process and start it immediately if the
+        L{ProcessMonitor} service is running.
+
+        Note that args are passed to the system call, not to the shell. If
+        running the shell is desired, the common idiom is to use
+        C{ProcessMonitor.addProcess("name", ['/bin/sh', '-c', shell_script])}
+
+        @param name: A name for this process.  This value must be
+            unique across all processes added to this monitor.
+        @type name: C{str}
+        @param args: The argv sequence for the process to launch.
+        @param uid: The user ID to use to run the process.  If C{None},
+            the current UID is used.
+        @type uid: C{int}
+        @param gid: The group ID to use to run the process.  If C{None},
+            the current GID is used.
+        @type uid: C{int}
+        @param env: The environment to give to the launched process. See
+            L{IReactorProcess.spawnProcess}'s C{env} parameter.
+        @type env: C{dict}
+        @raises: C{KeyError} if a process with the given name already
+            exists
+        """
+        if name in self.processes:
+            raise KeyError("remove %s first" % (name,))
+        self.processes[name] = args, uid, gid, env
+        self.delay[name] = self.minRestartDelay
+        if self.running:
+            self.startProcess(name)
+
+
+    def removeProcess(self, name):
+        """
+        Stop the named process and remove it from the list of monitored
+        processes.
+
+        @type name: C{str}
+        @param name: A string that uniquely identifies the process.
+        """
+        self.stopProcess(name)
+        del self.processes[name]
+
+
+    def startService(self):
+        """
+        Start all monitored processes.
+        """
+        service.Service.startService(self)
+        for name in self.processes:
+            self.startProcess(name)
+
+
+    def stopService(self):
+        """
+        Stop all monitored processes and cancel all scheduled process restarts.
+        """
+        service.Service.stopService(self)
+
+        # Cancel any outstanding restarts
+        for name, delayedCall in self.restart.items():
+            if delayedCall.active():
+                delayedCall.cancel()
+
+        for name in self.processes:
+            self.stopProcess(name)
+
+
+    def connectionLost(self, name):
+        """
+        Called when a monitored processes exits. If
+        L{ProcessMonitor.running} is C{True} (ie the service is started), the
+        process will be restarted.
+        If the process had been running for more than
+        L{ProcessMonitor.threshold} seconds it will be restarted immediately.
+        If the process had been running for less than
+        L{ProcessMonitor.threshold} seconds, the restart will be delayed and
+        each time the process dies before the configured threshold, the restart
+        delay will be doubled - up to a maximum delay of maxRestartDelay sec.
+
+        @type name: C{str}
+        @param name: A string that uniquely identifies the process
+            which exited.
+        """
+        # Cancel the scheduled _forceStopProcess function if the process
+        # dies naturally
+        if name in self.murder:
+            if self.murder[name].active():
+                self.murder[name].cancel()
+            del self.murder[name]
+
+        del self.protocols[name]
+
+        if self._reactor.seconds() - self.timeStarted[name] < self.threshold:
+            # The process died too fast - backoff
+            nextDelay = self.delay[name]
+            self.delay[name] = min(self.delay[name] * 2, self.maxRestartDelay)
+
+        else:
+            # Process had been running for a significant amount of time
+            # restart immediately
+            nextDelay = 0
+            self.delay[name] = self.minRestartDelay
+
+        # Schedule a process restart if the service is running
+        if self.running and name in self.processes:
+            self.restart[name] = self._reactor.callLater(nextDelay,
+                                                         self.startProcess,
+                                                         name)
+
+
+    def startProcess(self, name):
+        """
+        @param name: The name of the process to be started
+        """
+        # If a protocol instance already exists, it means the process is
+        # already running
+        if name in self.protocols:
+            return
+
+        args, uid, gid, env = self.processes[name]
+
+        proto = LoggingProtocol()
+        proto.service = self
+        proto.name = name
+        self.protocols[name] = proto
+        self.timeStarted[name] = self._reactor.seconds()
+        self._reactor.spawnProcess(proto, args[0], args, uid=uid,
+                                          gid=gid, env=env)
+
+
+    def _forceStopProcess(self, proc):
+        """
+        @param proc: An L{IProcessTransport} provider
+        """
+        try:
+            proc.signalProcess('KILL')
+        except error.ProcessExitedAlready:
+            pass
+
+
+    def stopProcess(self, name):
+        """
+        @param name: The name of the process to be stopped
+        """
+        if name not in self.processes:
+            raise KeyError('Unrecognized process name: %s' % (name,))
+
+        proto = self.protocols.get(name, None)
+        if proto is not None:
+            proc = proto.transport
+            try:
+                proc.signalProcess('TERM')
+            except error.ProcessExitedAlready:
+                pass
+            else:
+                self.murder[name] = self._reactor.callLater(
+                                            self.killTime,
+                                            self._forceStopProcess, proc)
+
+
+    def restartAll(self):
+        """
+        Restart all processes. This is useful for third party management
+        services to allow a user to restart servers because of an outside change
+        in circumstances -- for example, a new version of a library is
+        installed.
+        """
+        for name in self.processes:
+            self.stopProcess(name)
+
+
+    def __repr__(self):
+        l = []
+        for name, proc in self.processes.items():
+            uidgid = ''
+            if proc[1] is not None:
+                uidgid = str(proc[1])
+            if proc[2] is not None:
+                uidgid += ':'+str(proc[2])
+
+            if uidgid:
+                uidgid = '(' + uidgid + ')'
+            l.append('%r%s: %r' % (name, uidgid, proc[0]))
+        return ('<' + self.__class__.__name__ + ' '
+                + ' '.join(l)
+                + '>')
diff --git a/ThirdParty/Twisted/twisted/runner/procmontap.py b/ThirdParty/Twisted/twisted/runner/procmontap.py
new file mode 100644
index 0000000..c0e72a4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/runner/procmontap.py
@@ -0,0 +1,73 @@
+# -*- test-case-name: twisted.runner.test.test_procmontap -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Support for creating a service which runs a process monitor.
+"""
+
+from twisted.python import usage
+from twisted.runner.procmon import ProcessMonitor
+
+
+class Options(usage.Options):
+    """
+    Define the options accepted by the I{twistd procmon} plugin.
+    """
+
+    synopsis = "[procmon options] commandline"
+
+    optParameters = [["threshold", "t", 1, "How long a process has to live "
+                      "before the death is considered instant, in seconds.",
+                      float],
+                     ["killtime", "k", 5, "How long a process being killed "
+                      "has to get its affairs in order before it gets killed "
+                      "with an unmaskable signal.",
+                      float],
+                     ["minrestartdelay", "m", 1, "The minimum time (in "
+                      "seconds) to wait before attempting to restart a "
+                      "process", float],
+                     ["maxrestartdelay", "M", 3600, "The maximum time (in "
+                      "seconds) to wait before attempting to restart a "
+                      "process", float]]
+
+    optFlags = []
+
+
+    longdesc = """\
+procmon runs processes, monitors their progress, and restarts them when they
+die.
+
+procmon will not attempt to restart a process that appears to die instantly;
+with each "instant" death (less than 1 second, by default), it will delay
+approximately twice as long before restarting it. A successful run will reset
+the counter.
+
+Eg twistd procmon sleep 10"""
+
+    def parseArgs(self, *args):
+        """
+        Grab the command line that is going to be started and monitored
+        """
+        self['args'] = args
+
+
+    def postOptions(self):
+        """
+        Check for dependencies.
+        """
+        if len(self["args"]) < 1:
+            raise usage.UsageError("Please specify a process commandline")
+
+
+
+def makeService(config):
+    s = ProcessMonitor()
+
+    s.threshold = config["threshold"]
+    s.killTime = config["killtime"]
+    s.minRestartDelay = config["minrestartdelay"]
+    s.maxRestartDelay = config["maxrestartdelay"]
+
+    s.addProcess(" ".join(config["args"]), config["args"])
+    return s
diff --git a/ThirdParty/Twisted/twisted/runner/test/__init__.py b/ThirdParty/Twisted/twisted/runner/test/__init__.py
new file mode 100644
index 0000000..e6c22ba
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/runner/test/__init__.py
@@ -0,0 +1,6 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test package for Twisted Runner.
+"""
diff --git a/ThirdParty/Twisted/twisted/runner/test/test_procmon.py b/ThirdParty/Twisted/twisted/runner/test/test_procmon.py
new file mode 100644
index 0000000..d5217a0
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/runner/test/test_procmon.py
@@ -0,0 +1,477 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.runner.procmon}.
+"""
+
+from twisted.trial import unittest
+from twisted.runner.procmon import LoggingProtocol, ProcessMonitor
+from twisted.internet.error import (ProcessDone, ProcessTerminated,
+                                    ProcessExitedAlready)
+from twisted.internet.task import Clock
+from twisted.python.failure import Failure
+from twisted.test.proto_helpers import MemoryReactor
+
+
+
+class DummyProcess(object):
+    """
+    An incomplete and fake L{IProcessTransport} implementation for testing how
+    L{ProcessMonitor} behaves when its monitored processes exit.
+
+    @ivar _terminationDelay: the delay in seconds after which the DummyProcess
+        will appear to exit when it receives a TERM signal
+    """
+
+    pid = 1
+    proto = None
+
+    _terminationDelay = 1
+
+    def __init__(self, reactor, executable, args, environment, path,
+                 proto, uid=None, gid=None, usePTY=0, childFDs=None):
+
+        self.proto = proto
+
+        self._reactor = reactor
+        self._executable = executable
+        self._args = args
+        self._environment = environment
+        self._path = path
+        self._uid = uid
+        self._gid = gid
+        self._usePTY = usePTY
+        self._childFDs = childFDs
+
+
+    def signalProcess(self, signalID):
+        """
+        A partial implementation of signalProcess which can only handle TERM and
+        KILL signals.
+         - When a TERM signal is given, the dummy process will appear to exit
+           after L{DummyProcess._terminationDelay} seconds with exit code 0
+         - When a KILL signal is given, the dummy process will appear to exit
+           immediately with exit code 1.
+
+        @param signalID: The signal name or number to be issued to the process.
+        @type signalID: C{str}
+        """
+        params = {
+            "TERM": (self._terminationDelay, 0),
+            "KILL": (0, 1)
+        }
+
+        if self.pid is None:
+            raise ProcessExitedAlready()
+
+        if signalID in params:
+            delay, status = params[signalID]
+            self._signalHandler = self._reactor.callLater(
+                delay, self.processEnded, status)
+
+
+    def processEnded(self, status):
+        """
+        Deliver the process ended event to C{self.proto}.
+        """
+        self.pid = None
+        statusMap = {
+            0: ProcessDone,
+            1: ProcessTerminated,
+        }
+        self.proto.processEnded(Failure(statusMap[status](status)))
+
+
+
+class DummyProcessReactor(MemoryReactor, Clock):
+    """
+    @ivar spawnedProcesses: a list that keeps track of the fake process
+        instances built by C{spawnProcess}.
+    @type spawnedProcesses: C{list}
+    """
+    def __init__(self):
+        MemoryReactor.__init__(self)
+        Clock.__init__(self)
+
+        self.spawnedProcesses = []
+
+
+    def spawnProcess(self, processProtocol, executable, args=(), env={},
+                     path=None, uid=None, gid=None, usePTY=0,
+                     childFDs=None):
+        """
+        Fake L{reactor.spawnProcess}, that logs all the process
+        arguments and returns a L{DummyProcess}.
+        """
+
+        proc = DummyProcess(self, executable, args, env, path,
+                            processProtocol, uid, gid, usePTY, childFDs)
+        processProtocol.makeConnection(proc)
+        self.spawnedProcesses.append(proc)
+        return proc
+
+
+
+class ProcmonTests(unittest.TestCase):
+    """
+    Tests for L{ProcessMonitor}.
+    """
+
+    def setUp(self):
+        """
+        Create an L{ProcessMonitor} wrapped around a fake reactor.
+        """
+        self.reactor = DummyProcessReactor()
+        self.pm = ProcessMonitor(reactor=self.reactor)
+        self.pm.minRestartDelay = 2
+        self.pm.maxRestartDelay = 10
+        self.pm.threshold = 10
+
+
+    def test_getStateIncludesProcesses(self):
+        """
+        The list of monitored processes must be included in the pickle state.
+        """
+        self.pm.addProcess("foo", ["arg1", "arg2"],
+                           uid=1, gid=2, env={})
+        self.assertEqual(self.pm.__getstate__()['processes'],
+                          {'foo': (['arg1', 'arg2'], 1, 2, {})})
+
+
+    def test_getStateExcludesReactor(self):
+        """
+        The private L{ProcessMonitor._reactor} instance variable should not be
+        included in the pickle state.
+        """
+        self.assertNotIn('_reactor', self.pm.__getstate__())
+
+
+    def test_addProcess(self):
+        """
+        L{ProcessMonitor.addProcess} only starts the named program if
+        L{ProcessMonitor.startService} has been called.
+        """
+        self.pm.addProcess("foo", ["arg1", "arg2"],
+                           uid=1, gid=2, env={})
+        self.assertEqual(self.pm.protocols, {})
+        self.assertEqual(self.pm.processes,
+                          {"foo": (["arg1", "arg2"], 1, 2, {})})
+        self.pm.startService()
+        self.reactor.advance(0)
+        self.assertEqual(self.pm.protocols.keys(), ["foo"])
+
+
+    def test_addProcessDuplicateKeyError(self):
+        """
+        L{ProcessMonitor.addProcess} raises a C{KeyError} if a process with the
+        given name already exists.
+        """
+        self.pm.addProcess("foo", ["arg1", "arg2"],
+                           uid=1, gid=2, env={})
+        self.assertRaises(KeyError, self.pm.addProcess,
+                          "foo", ["arg1", "arg2"], uid=1, gid=2, env={})
+
+
+    def test_addProcessEnv(self):
+        """
+        L{ProcessMonitor.addProcess} takes an C{env} parameter that is passed to
+        L{IReactorProcess.spawnProcess}.
+        """
+        fakeEnv = {"KEY": "value"}
+        self.pm.startService()
+        self.pm.addProcess("foo", ["foo"], uid=1, gid=2, env=fakeEnv)
+        self.reactor.advance(0)
+        self.assertEqual(
+            self.reactor.spawnedProcesses[0]._environment, fakeEnv)
+
+
+    def test_removeProcess(self):
+        """
+        L{ProcessMonitor.removeProcess} removes the process from the public
+        processes list.
+        """
+        self.pm.startService()
+        self.pm.addProcess("foo", ["foo"])
+        self.assertEqual(len(self.pm.processes), 1)
+        self.pm.removeProcess("foo")
+        self.assertEqual(len(self.pm.processes), 0)
+
+
+    def test_removeProcessUnknownKeyError(self):
+        """
+        L{ProcessMonitor.removeProcess} raises a C{KeyError} if the given
+        process name isn't recognised.
+        """
+        self.pm.startService()
+        self.assertRaises(KeyError, self.pm.removeProcess, "foo")
+
+
+    def test_startProcess(self):
+        """
+        When a process has been started, an instance of L{LoggingProtocol} will
+        be added to the L{ProcessMonitor.protocols} dict and the start time of
+        the process will be recorded in the L{ProcessMonitor.timeStarted}
+        dictionary.
+        """
+        self.pm.addProcess("foo", ["foo"])
+        self.pm.startProcess("foo")
+        self.assertIsInstance(self.pm.protocols["foo"], LoggingProtocol)
+        self.assertIn("foo", self.pm.timeStarted.keys())
+
+
+    def test_startProcessAlreadyStarted(self):
+        """
+        L{ProcessMonitor.startProcess} silently returns if the named process is
+        already started.
+        """
+        self.pm.addProcess("foo", ["foo"])
+        self.pm.startProcess("foo")
+        self.assertIdentical(None, self.pm.startProcess("foo"))
+
+
+    def test_startProcessUnknownKeyError(self):
+        """
+        L{ProcessMonitor.startProcess} raises a C{KeyError} if the given
+        process name isn't recognised.
+        """
+        self.assertRaises(KeyError, self.pm.startProcess, "foo")
+
+
+    def test_stopProcessNaturalTermination(self):
+        """
+        L{ProcessMonitor.stopProcess} immediately sends a TERM signal to the
+        named process.
+        """
+        self.pm.startService()
+        self.pm.addProcess("foo", ["foo"])
+        self.assertIn("foo", self.pm.protocols)
+
+        # Configure fake process to die 1 second after receiving term signal
+        timeToDie = self.pm.protocols["foo"].transport._terminationDelay = 1
+
+        # Advance the reactor to just before the short lived process threshold
+        # and leave enough time for the process to die
+        self.reactor.advance(self.pm.threshold)
+        # Then signal the process to stop
+        self.pm.stopProcess("foo")
+
+        # Advance the reactor just enough to give the process time to die and
+        # verify that the process restarts
+        self.reactor.advance(timeToDie)
+
+        # We expect it to be restarted immediately
+        self.assertEqual(self.reactor.seconds(),
+                         self.pm.timeStarted["foo"])
+
+
+    def test_stopProcessForcedKill(self):
+        """
+        L{ProcessMonitor.stopProcess} kills a process which fails to terminate
+        naturally within L{ProcessMonitor.killTime} seconds.
+        """
+        self.pm.startService()
+        self.pm.addProcess("foo", ["foo"])
+        self.assertIn("foo", self.pm.protocols)
+        self.reactor.advance(self.pm.threshold)
+        proc = self.pm.protocols["foo"].transport
+        # Arrange for the fake process to live longer than the killTime
+        proc._terminationDelay = self.pm.killTime + 1
+        self.pm.stopProcess("foo")
+        # If process doesn't die before the killTime, procmon should
+        # terminate it
+        self.reactor.advance(self.pm.killTime - 1)
+        self.assertEqual(0.0, self.pm.timeStarted["foo"])
+
+        self.reactor.advance(1)
+        # We expect it to be immediately restarted
+        self.assertEqual(self.reactor.seconds(), self.pm.timeStarted["foo"])
+
+
+    def test_stopProcessUnknownKeyError(self):
+        """
+        L{ProcessMonitor.stopProcess} raises a C{KeyError} if the given process
+        name isn't recognised.
+        """
+        self.assertRaises(KeyError, self.pm.stopProcess, "foo")
+
+
+    def test_stopProcessAlreadyStopped(self):
+        """
+        L{ProcessMonitor.stopProcess} silently returns if the named process
+        is already stopped. eg Process has crashed and a restart has been
+        rescheduled, but in the meantime, the service is stopped.
+        """
+        self.pm.addProcess("foo", ["foo"])
+        self.assertIdentical(None, self.pm.stopProcess("foo"))
+
+
+    def test_connectionLostLongLivedProcess(self):
+        """
+        L{ProcessMonitor.connectionLost} should immediately restart a process
+        if it has been running longer than L{ProcessMonitor.threshold} seconds.
+        """
+        self.pm.addProcess("foo", ["foo"])
+        # Schedule the process to start
+        self.pm.startService()
+        # advance the reactor to start the process
+        self.reactor.advance(0)
+        self.assertIn("foo", self.pm.protocols)
+        # Long time passes
+        self.reactor.advance(self.pm.threshold)
+        # Process dies after threshold
+        self.pm.protocols["foo"].processEnded(Failure(ProcessDone(0)))
+        self.assertNotIn("foo", self.pm.protocols)
+        # Process should be restarted immediately
+        self.reactor.advance(0)
+        self.assertIn("foo", self.pm.protocols)
+
+
+    def test_connectionLostMurderCancel(self):
+        """
+        L{ProcessMonitor.connectionLost} cancels a scheduled process killer and
+        deletes the DelayedCall from the L{ProcessMonitor.murder} list.
+        """
+        self.pm.addProcess("foo", ["foo"])
+        # Schedule the process to start
+        self.pm.startService()
+        # Advance 1s to start the process then ask ProcMon to stop it
+        self.reactor.advance(1)
+        self.pm.stopProcess("foo")
+        # A process killer has been scheduled, delayedCall is active
+        self.assertIn("foo", self.pm.murder)
+        delayedCall = self.pm.murder["foo"]
+        self.assertTrue(delayedCall.active())
+        # Advance to the point at which the dummy process exits
+        self.reactor.advance(
+            self.pm.protocols["foo"].transport._terminationDelay)
+        # Now the delayedCall has been cancelled and deleted
+        self.assertFalse(delayedCall.active())
+        self.assertNotIn("foo", self.pm.murder)
+
+
+    def test_connectionLostProtocolDeletion(self):
+        """
+        L{ProcessMonitor.connectionLost} removes the corresponding
+        ProcessProtocol instance from the L{ProcessMonitor.protocols} list.
+        """
+        self.pm.startService()
+        self.pm.addProcess("foo", ["foo"])
+        self.assertIn("foo", self.pm.protocols)
+        self.pm.protocols["foo"].transport.signalProcess("KILL")
+        self.reactor.advance(
+            self.pm.protocols["foo"].transport._terminationDelay)
+        self.assertNotIn("foo", self.pm.protocols)
+
+
+    def test_connectionLostMinMaxRestartDelay(self):
+        """
+        L{ProcessMonitor.connectionLost} will wait at least minRestartDelay s
+        and at most maxRestartDelay s
+        """
+        self.pm.minRestartDelay = 2
+        self.pm.maxRestartDelay = 3
+
+        self.pm.startService()
+        self.pm.addProcess("foo", ["foo"])
+
+        self.assertEqual(self.pm.delay["foo"], self.pm.minRestartDelay)
+        self.reactor.advance(self.pm.threshold - 1)
+        self.pm.protocols["foo"].processEnded(Failure(ProcessDone(0)))
+        self.assertEqual(self.pm.delay["foo"], self.pm.maxRestartDelay)
+
+
+    def test_connectionLostBackoffDelayDoubles(self):
+        """
+        L{ProcessMonitor.connectionLost} doubles the restart delay each time
+        the process dies too quickly.
+        """
+        self.pm.startService()
+        self.pm.addProcess("foo", ["foo"])
+        self.reactor.advance(self.pm.threshold - 1) #9s
+        self.assertIn("foo", self.pm.protocols)
+        self.assertEqual(self.pm.delay["foo"], self.pm.minRestartDelay)
+        # process dies within the threshold and should not restart immediately
+        self.pm.protocols["foo"].processEnded(Failure(ProcessDone(0)))
+        self.assertEqual(self.pm.delay["foo"], self.pm.minRestartDelay * 2)
+
+
+    def test_startService(self):
+        """
+        L{ProcessMonitor.startService} starts all monitored processes.
+        """
+        self.pm.addProcess("foo", ["foo"])
+        # Schedule the process to start
+        self.pm.startService()
+        # advance the reactor to start the process
+        self.reactor.advance(0)
+        self.assertTrue("foo" in self.pm.protocols)
+
+
+    def test_stopService(self):
+        """
+        L{ProcessMonitor.stopService} should stop all monitored processes.
+        """
+        self.pm.addProcess("foo", ["foo"])
+        self.pm.addProcess("bar", ["bar"])
+        # Schedule the process to start
+        self.pm.startService()
+        # advance the reactor to start the processes
+        self.reactor.advance(self.pm.threshold)
+        self.assertIn("foo", self.pm.protocols)
+        self.assertIn("bar", self.pm.protocols)
+
+        self.reactor.advance(1)
+
+        self.pm.stopService()
+        # Advance to beyond the killTime - all monitored processes
+        # should have exited
+        self.reactor.advance(self.pm.killTime + 1)
+        # The processes shouldn't be restarted
+        self.assertEqual({}, self.pm.protocols)
+
+
+    def test_stopServiceCancelRestarts(self):
+        """
+        L{ProcessMonitor.stopService} should cancel any scheduled process
+        restarts.
+        """
+        self.pm.addProcess("foo", ["foo"])
+        # Schedule the process to start
+        self.pm.startService()
+        # advance the reactor to start the processes
+        self.reactor.advance(self.pm.threshold)
+        self.assertIn("foo", self.pm.protocols)
+
+        self.reactor.advance(1)
+        # Kill the process early
+        self.pm.protocols["foo"].processEnded(Failure(ProcessDone(0)))
+        self.assertTrue(self.pm.restart['foo'].active())
+        self.pm.stopService()
+        # Scheduled restart should have been cancelled
+        self.assertFalse(self.pm.restart['foo'].active())
+
+
+    def test_stopServiceCleanupScheduledRestarts(self):
+        """
+        L{ProcessMonitor.stopService} should cancel all scheduled process
+        restarts.
+        """
+        self.pm.threshold = 5
+        self.pm.minRestartDelay = 5
+        # Start service and add a process (started immediately)
+        self.pm.startService()
+        self.pm.addProcess("foo", ["foo"])
+        # Stop the process after 1s
+        self.reactor.advance(1)
+        self.pm.stopProcess("foo")
+        # Wait 1s for it to exit it will be scheduled to restart 5s later
+        self.reactor.advance(1)
+        # Meanwhile stop the service
+        self.pm.stopService()
+        # Advance to beyond the process restart time
+        self.reactor.advance(6)
+        # The process shouldn't have restarted because stopService has cancelled
+        # all pending process restarts.
+        self.assertEqual(self.pm.protocols, {})
+
diff --git a/ThirdParty/Twisted/twisted/runner/test/test_procmontap.py b/ThirdParty/Twisted/twisted/runner/test/test_procmontap.py
new file mode 100644
index 0000000..de394f4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/runner/test/test_procmontap.py
@@ -0,0 +1,87 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.runner.procmontap}.
+"""
+
+from twisted.python.usage import UsageError
+from twisted.trial import unittest
+from twisted.runner.procmon import ProcessMonitor
+from twisted.runner import procmontap as tap
+
+
+class ProcessMonitorTapTest(unittest.TestCase):
+    """
+    Tests for L{twisted.runner.procmontap}'s option parsing and makeService
+    method.
+    """
+
+    def test_commandLineRequired(self):
+        """
+        The command line arguments must be provided.
+        """
+        opt = tap.Options()
+        self.assertRaises(UsageError, opt.parseOptions, [])
+
+
+    def test_threshold(self):
+        """
+        The threshold option is recognised as a parameter and coerced to
+        float.
+        """
+        opt = tap.Options()
+        opt.parseOptions(['--threshold', '7.5', 'foo'])
+        self.assertEqual(opt['threshold'], 7.5)
+
+
+    def test_killTime(self):
+        """
+        The killtime option is recognised as a parameter and coerced to float.
+        """
+        opt = tap.Options()
+        opt.parseOptions(['--killtime', '7.5', 'foo'])
+        self.assertEqual(opt['killtime'], 7.5)
+
+
+    def test_minRestartDelay(self):
+        """
+        The minrestartdelay option is recognised as a parameter and coerced to
+        float.
+        """
+        opt = tap.Options()
+        opt.parseOptions(['--minrestartdelay', '7.5', 'foo'])
+        self.assertEqual(opt['minrestartdelay'], 7.5)
+
+
+    def test_maxRestartDelay(self):
+        """
+        The maxrestartdelay option is recognised as a parameter and coerced to
+        float.
+        """
+        opt = tap.Options()
+        opt.parseOptions(['--maxrestartdelay', '7.5', 'foo'])
+        self.assertEqual(opt['maxrestartdelay'], 7.5)
+
+
+    def test_parameterDefaults(self):
+        """
+        The parameters all have default values
+        """
+        opt = tap.Options()
+        opt.parseOptions(['foo'])
+        self.assertEqual(opt['threshold'], 1)
+        self.assertEqual(opt['killtime'], 5)
+        self.assertEqual(opt['minrestartdelay'], 1)
+        self.assertEqual(opt['maxrestartdelay'], 3600)
+
+
+    def test_makeService(self):
+        """
+        The command line gets added as a process to the ProcessMontor.
+        """
+        opt = tap.Options()
+        opt.parseOptions(['ping', '-c', '3', '8.8.8.8'])
+        s = tap.makeService(opt)
+        self.assertIsInstance(s, ProcessMonitor)
+        self.assertIn('ping -c 3 8.8.8.8', s.processes)
diff --git a/ThirdParty/Twisted/twisted/runner/topfiles/NEWS b/ThirdParty/Twisted/twisted/runner/topfiles/NEWS
new file mode 100644
index 0000000..2123b9d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/runner/topfiles/NEWS
@@ -0,0 +1,113 @@
+Ticket numbers in this file can be looked up by visiting
+http://twistedmatrix.com/trac/ticket/<number>
+
+Twisted Runner 12.3.0 (2012-12-20)
+==================================
+
+No significant changes have been made for this release.
+
+
+Twisted Runner 12.2.0 (2012-08-26)
+==================================
+
+No significant changes have been made for this release.
+
+
+Twisted Runner 12.1.0 (2012-06-02)
+==================================
+
+Deprecations and Removals
+-------------------------
+ - ProcessMonitor.active, consistencyDelay, and consistency in
+   twisted.runner.procmon were deprecated since 10.1 have been
+   removed. (#5517)
+
+
+Twisted Runner 12.0.0 (2012-02-10)
+==================================
+
+No significant changes have been made for this release.
+
+
+Twisted Runner 11.1.0 (2011-11-15)
+==================================
+
+No significant changes have been made for this release.
+
+
+Twisted Runner 11.0.0 (2011-04-01)
+==================================
+
+No significant changes have been made for this release.
+
+
+Twisted Runner 10.2.0 (2010-11-29)
+==================================
+
+No significant changes have been made for this release.
+
+
+Twisted Runner 10.1.0 (2010-06-27)
+==================================
+
+Features
+--------
+ - twistd now has a procmon subcommand plugin - a convenient way to
+   monitor and automatically restart another process. (#4356)
+
+Deprecations and Removals
+-------------------------
+ - twisted.runner.procmon.ProcessMonitor's active, consistency, and
+   consistencyDelay attributes are now deprecated. (#1763)
+
+Other
+-----
+ - #3775
+
+
+Twisted Runner 10.0.0 (2010-03-01)
+==================================
+
+Other
+-----
+ - #3961
+
+
+Twisted Runner 9.0.0 (2009-11-24)
+=================================
+
+Features
+--------
+ - procmon.ProcessMonitor.addProcess now accepts an 'env' parameter which allows
+   users to specify the environment in which a process will be run (#3691)
+
+Other
+-----
+ - #3540
+
+
+Runner 8.2.0 (2008-12-16)
+=========================
+
+No interesting changes since Twisted 8.0.
+
+8.0.0 (2008-03-17)
+==================
+
+Misc
+----
+ - Remove all "API Stability" markers (#2847)
+
+
+0.2.0 (2006-05-24)
+==================
+
+Fixes
+-----
+ - Fix a bug that broke inetdtap.RPCServer.
+ - Misc: #1142
+
+
+0.1.0
+=====
+ - Pass *blocking* sockets to subprocesses run by inetd
diff --git a/ThirdParty/Twisted/twisted/runner/topfiles/README b/ThirdParty/Twisted/twisted/runner/topfiles/README
new file mode 100644
index 0000000..7dfd23f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/runner/topfiles/README
@@ -0,0 +1,3 @@
+Twisted Runner 12.3.0
+
+Twisted Runner depends on Twisted.
diff --git a/ThirdParty/Twisted/twisted/runner/topfiles/setup.py b/ThirdParty/Twisted/twisted/runner/topfiles/setup.py
new file mode 100644
index 0000000..27f65d3
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/runner/topfiles/setup.py
@@ -0,0 +1,35 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+try:
+    from twisted.python.dist import setup, ConditionalExtension as Extension
+except ImportError:
+    raise SystemExit("twisted.python.dist module not found.  Make sure you "
+                     "have installed the Twisted core package before "
+                     "attempting to install any other Twisted projects.")
+
+extensions = [
+    Extension("twisted.runner.portmap",
+              ["twisted/runner/portmap.c"],
+              condition=lambda builder: builder._check_header("rpc/rpc.h")),
+]
+
+if __name__ == '__main__':
+    setup(
+        twisted_subproject="runner",
+        # metadata
+        name="Twisted Runner",
+        description="Twisted Runner is a process management library and inetd "
+                    "replacement.",
+        author="Twisted Matrix Laboratories",
+        author_email="twisted-python at twistedmatrix.com",
+        maintainer="Andrew Bennetts",
+        url="http://twistedmatrix.com/trac/wiki/TwistedRunner",
+        license="MIT",
+        long_description="""\
+Twisted Runner contains code useful for persistent process management
+with Python and Twisted, and has an almost full replacement for inetd.
+""",
+        # build stuff
+        conditionalExtensions=extensions,
+    )
diff --git a/ThirdParty/Twisted/twisted/scripts/__init__.py b/ThirdParty/Twisted/twisted/scripts/__init__.py
new file mode 100644
index 0000000..fcde968
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/scripts/__init__.py
@@ -0,0 +1,27 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Subpackage containing the modules that implement the command line tools.
+
+Note that these are imported by top-level scripts which are intended to be
+invoked directly from a shell.
+"""
+
+from twisted.python.versions import Version
+from twisted.python.deprecate import deprecatedModuleAttribute
+
+
+deprecatedModuleAttribute(
+    Version("Twisted", 11, 1, 0),
+    "Seek unzipping software outside of Twisted.",
+    __name__,
+    "tkunzip")
+
+deprecatedModuleAttribute(
+    Version("Twisted", 12, 1, 0),
+    "tapconvert has been deprecated.",
+    __name__,
+    "tapconvert")
+
+del Version, deprecatedModuleAttribute
diff --git a/ThirdParty/Twisted/twisted/scripts/_twistd_unix.py b/ThirdParty/Twisted/twisted/scripts/_twistd_unix.py
new file mode 100644
index 0000000..786249b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/scripts/_twistd_unix.py
@@ -0,0 +1,349 @@
+# -*- test-case-name: twisted.test.test_twistd -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import os, errno, sys
+
+from twisted.python import log, syslog, logfile, usage
+from twisted.python.util import switchUID, uidFromString, gidFromString
+from twisted.application import app, service
+from twisted.internet.interfaces import IReactorDaemonize
+from twisted import copyright
+
+
+def _umask(value):
+    return int(value, 8)
+
+
+class ServerOptions(app.ServerOptions):
+    synopsis = "Usage: twistd [options]"
+
+    optFlags = [['nodaemon','n',  "don't daemonize, don't use default umask of 0077"],
+                ['originalname', None, "Don't try to change the process name"],
+                ['syslog', None,   "Log to syslog, not to file"],
+                ['euid', '',
+                 "Set only effective user-id rather than real user-id. "
+                 "(This option has no effect unless the server is running as "
+                 "root, in which case it means not to shed all privileges "
+                 "after binding ports, retaining the option to regain "
+                 "privileges in cases such as spawning processes. "
+                 "Use with caution.)"],
+               ]
+
+    optParameters = [
+                     ['prefix', None,'twisted',
+                      "use the given prefix when syslogging"],
+                     ['pidfile','','twistd.pid',
+                      "Name of the pidfile"],
+                     ['chroot', None, None,
+                      'Chroot to a supplied directory before running'],
+                     ['uid', 'u', None, "The uid to run as.", uidFromString],
+                     ['gid', 'g', None, "The gid to run as.", gidFromString],
+                     ['umask', None, None,
+                      "The (octal) file creation mask to apply.", _umask],
+                    ]
+
+    compData = usage.Completions(
+        optActions={"pidfile": usage.CompleteFiles("*.pid"),
+                    "chroot": usage.CompleteDirs(descr="chroot directory"),
+                    "gid": usage.CompleteGroups(descr="gid to run as"),
+                    "uid": usage.CompleteUsernames(descr="uid to run as"),
+                    "prefix": usage.Completer(descr="syslog prefix"),
+                    },
+        )
+
+    def opt_version(self):
+        """Print version information and exit.
+        """
+        print 'twistd (the Twisted daemon) %s' % copyright.version
+        print copyright.copyright
+        sys.exit()
+
+
+    def postOptions(self):
+        app.ServerOptions.postOptions(self)
+        if self['pidfile']:
+            self['pidfile'] = os.path.abspath(self['pidfile'])
+
+
+def checkPID(pidfile):
+    if not pidfile:
+        return
+    if os.path.exists(pidfile):
+        try:
+            pid = int(open(pidfile).read())
+        except ValueError:
+            sys.exit('Pidfile %s contains non-numeric value' % pidfile)
+        try:
+            os.kill(pid, 0)
+        except OSError, why:
+            if why[0] == errno.ESRCH:
+                # The pid doesnt exists.
+                log.msg('Removing stale pidfile %s' % pidfile, isError=True)
+                os.remove(pidfile)
+            else:
+                sys.exit("Can't check status of PID %s from pidfile %s: %s" %
+                         (pid, pidfile, why[1]))
+        else:
+            sys.exit("""\
+Another twistd server is running, PID %s\n
+This could either be a previously started instance of your application or a
+different application entirely. To start a new one, either run it in some other
+directory, or use the --pidfile and --logfile parameters to avoid clashes.
+""" %  pid)
+
+
+
+class UnixAppLogger(app.AppLogger):
+    """
+    A logger able to log to syslog, to files, and to stdout.
+
+    @ivar _syslog: A flag indicating whether to use syslog instead of file
+        logging.
+    @type _syslog: C{bool}
+
+    @ivar _syslogPrefix: If C{sysLog} is C{True}, the string prefix to use for
+        syslog messages.
+    @type _syslogPrefix: C{str}
+
+    @ivar _nodaemon: A flag indicating the process will not be daemonizing.
+    @type _nodaemon: C{bool}
+    """
+
+    def __init__(self, options):
+        app.AppLogger.__init__(self, options)
+        self._syslog = options.get("syslog", False)
+        self._syslogPrefix = options.get("prefix", "")
+        self._nodaemon = options.get("nodaemon", False)
+
+
+    def _getLogObserver(self):
+        """
+        Create and return a suitable log observer for the given configuration.
+
+        The observer will go to syslog using the prefix C{_syslogPrefix} if
+        C{_syslog} is true.  Otherwise, it will go to the file named
+        C{_logfilename} or, if C{_nodaemon} is true and C{_logfilename} is
+        C{"-"}, to stdout.
+
+        @return: An object suitable to be passed to C{log.addObserver}.
+        """
+        if self._syslog:
+            return syslog.SyslogObserver(self._syslogPrefix).emit
+
+        if self._logfilename == '-':
+            if not self._nodaemon:
+                sys.exit('Daemons cannot log to stdout, exiting!')
+            logFile = sys.stdout
+        elif self._nodaemon and not self._logfilename:
+            logFile = sys.stdout
+        else:
+            if not self._logfilename:
+                self._logfilename = 'twistd.log'
+            logFile = logfile.LogFile.fromFullPath(self._logfilename)
+            try:
+                import signal
+            except ImportError:
+                pass
+            else:
+                # Override if signal is set to None or SIG_DFL (0)
+                if not signal.getsignal(signal.SIGUSR1):
+                    def rotateLog(signal, frame):
+                        from twisted.internet import reactor
+                        reactor.callFromThread(logFile.rotate)
+                    signal.signal(signal.SIGUSR1, rotateLog)
+        return log.FileLogObserver(logFile).emit
+
+
+
+def daemonize(reactor, os):
+    """
+    Daemonizes the application on Unix. This is done by the usual double
+    forking approach.
+
+    @see: U{http://code.activestate.com/recipes/278731/}
+    @see: W. Richard Stevens, "Advanced Programming in the Unix Environment",
+          1992, Addison-Wesley, ISBN 0-201-56317-7
+
+    @param reactor: The reactor in use.  If it provides L{IReactorDaemonize},
+        its daemonization-related callbacks will be invoked.
+
+    @param os: An object like the os module to use to perform the daemonization.
+    """
+
+    ## If the reactor requires hooks to be called for daemonization, call them.
+    ## Currently the only reactor which provides/needs that is KQueueReactor.
+    if IReactorDaemonize.providedBy(reactor):
+        reactor.beforeDaemonize()
+
+    if os.fork():   # launch child and...
+        os._exit(0) # kill off parent
+    os.setsid()
+    if os.fork():   # launch child and...
+        os._exit(0) # kill off parent again.
+    null = os.open('/dev/null', os.O_RDWR)
+    for i in range(3):
+        try:
+            os.dup2(null, i)
+        except OSError, e:
+            if e.errno != errno.EBADF:
+                raise
+    os.close(null)
+
+    if IReactorDaemonize.providedBy(reactor):
+        reactor.afterDaemonize()
+
+
+
+def launchWithName(name):
+    if name and name != sys.argv[0]:
+        exe = os.path.realpath(sys.executable)
+        log.msg('Changing process name to ' + name)
+        os.execv(exe, [name, sys.argv[0], '--originalname'] + sys.argv[1:])
+
+
+
+class UnixApplicationRunner(app.ApplicationRunner):
+    """
+    An ApplicationRunner which does Unix-specific things, like fork,
+    shed privileges, and maintain a PID file.
+    """
+    loggerFactory = UnixAppLogger
+
+    def preApplication(self):
+        """
+        Do pre-application-creation setup.
+        """
+        checkPID(self.config['pidfile'])
+        self.config['nodaemon'] = (self.config['nodaemon']
+                                   or self.config['debug'])
+        self.oldstdout = sys.stdout
+        self.oldstderr = sys.stderr
+
+
+    def postApplication(self):
+        """
+        To be called after the application is created: start the
+        application and run the reactor. After the reactor stops,
+        clean up PID files and such.
+        """
+        self.startApplication(self.application)
+        self.startReactor(None, self.oldstdout, self.oldstderr)
+        self.removePID(self.config['pidfile'])
+
+
+    def removePID(self, pidfile):
+        """
+        Remove the specified PID file, if possible.  Errors are logged, not
+        raised.
+
+        @type pidfile: C{str}
+        @param pidfile: The path to the PID tracking file.
+        """
+        if not pidfile:
+            return
+        try:
+            os.unlink(pidfile)
+        except OSError, e:
+            if e.errno == errno.EACCES or e.errno == errno.EPERM:
+                log.msg("Warning: No permission to delete pid file")
+            else:
+                log.err(e, "Failed to unlink PID file")
+        except:
+            log.err(None, "Failed to unlink PID file")
+
+
+    def setupEnvironment(self, chroot, rundir, nodaemon, umask, pidfile):
+        """
+        Set the filesystem root, the working directory, and daemonize.
+
+        @type chroot: C{str} or L{NoneType}
+        @param chroot: If not None, a path to use as the filesystem root (using
+            L{os.chroot}).
+
+        @type rundir: C{str}
+        @param rundir: The path to set as the working directory.
+
+        @type nodaemon: C{bool}
+        @param nodaemon: A flag which, if set, indicates that daemonization
+            should not be done.
+
+        @type umask: C{int} or L{NoneType}
+        @param umask: The value to which to change the process umask.
+
+        @type pidfile: C{str} or L{NoneType}
+        @param pidfile: If not C{None}, the path to a file into which to put
+            the PID of this process.
+        """
+        daemon = not nodaemon
+
+        if chroot is not None:
+            os.chroot(chroot)
+            if rundir == '.':
+                rundir = '/'
+        os.chdir(rundir)
+        if daemon and umask is None:
+            umask = 077
+        if umask is not None:
+            os.umask(umask)
+        if daemon:
+            from twisted.internet import reactor
+            daemonize(reactor, os)
+        if pidfile:
+            f = open(pidfile,'wb')
+            f.write(str(os.getpid()))
+            f.close()
+
+
+    def shedPrivileges(self, euid, uid, gid):
+        """
+        Change the UID and GID or the EUID and EGID of this process.
+
+        @type euid: C{bool}
+        @param euid: A flag which, if set, indicates that only the I{effective}
+            UID and GID should be set.
+
+        @type uid: C{int} or C{NoneType}
+        @param uid: If not C{None}, the UID to which to switch.
+
+        @type gid: C{int} or C{NoneType}
+        @param gid: If not C{None}, the GID to which to switch.
+        """
+        if uid is not None or gid is not None:
+            extra = euid and 'e' or ''
+            desc = '%suid/%sgid %s/%s' % (extra, extra, uid, gid)
+            try:
+                switchUID(uid, gid, euid)
+            except OSError:
+                log.msg('failed to set %s (are you root?) -- exiting.' % desc)
+                sys.exit(1)
+            else:
+                log.msg('set %s' % desc)
+
+
+    def startApplication(self, application):
+        """
+        Configure global process state based on the given application and run
+        the application.
+
+        @param application: An object which can be adapted to
+            L{service.IProcess} and L{service.IService}.
+        """
+        process = service.IProcess(application)
+        if not self.config['originalname']:
+            launchWithName(process.processName)
+        self.setupEnvironment(
+            self.config['chroot'], self.config['rundir'],
+            self.config['nodaemon'], self.config['umask'],
+            self.config['pidfile'])
+
+        service.IService(application).privilegedStartService()
+
+        uid, gid = self.config['uid'], self.config['gid']
+        if uid is None:
+            uid = process.uid
+        if gid is None:
+            gid = process.gid
+
+        self.shedPrivileges(self.config['euid'], uid, gid)
+        app.startApplication(application, not self.config['no_save'])
diff --git a/ThirdParty/Twisted/twisted/scripts/_twistw.py b/ThirdParty/Twisted/twisted/scripts/_twistw.py
new file mode 100644
index 0000000..153b58a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/scripts/_twistw.py
@@ -0,0 +1,50 @@
+# -*- test-case-name: twisted.test.test_twistd -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.python import log
+from twisted.application import app, service, internet
+from twisted import copyright
+import sys, os
+
+
+
+class ServerOptions(app.ServerOptions):
+    synopsis = "Usage: twistd [options]"
+
+    optFlags = [['nodaemon','n',  "(for backwards compatability)."],
+                ]
+
+    def opt_version(self):
+        """Print version information and exit.
+        """
+        print 'twistd (the Twisted Windows runner) %s' % copyright.version
+        print copyright.copyright
+        sys.exit()
+
+
+
+class WindowsApplicationRunner(app.ApplicationRunner):
+    """
+    An ApplicationRunner which avoids unix-specific things. No
+    forking, no PID files, no privileges.
+    """
+
+    def preApplication(self):
+        """
+        Do pre-application-creation setup.
+        """
+        self.oldstdout = sys.stdout
+        self.oldstderr = sys.stderr
+        os.chdir(self.config['rundir'])
+
+
+    def postApplication(self):
+        """
+        Start the application and run the reactor.
+        """
+        service.IService(self.application).privilegedStartService()
+        app.startApplication(self.application, not self.config['no_save'])
+        app.startApplication(internet.TimerService(0.1, lambda:None), 0)
+        self.startReactor(None, self.oldstdout, self.oldstderr)
+        log.msg("Server Shut Down.")
diff --git a/ThirdParty/Twisted/twisted/scripts/htmlizer.py b/ThirdParty/Twisted/twisted/scripts/htmlizer.py
new file mode 100644
index 0000000..4357809
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/scripts/htmlizer.py
@@ -0,0 +1,69 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+
+"""HTML pretty-printing for Python source code."""
+
+__version__ = '$Revision: 1.8 $'[11:-2]
+
+from twisted.python import htmlizer, usage
+from twisted import copyright
+
+import os, sys
+
+header = '''<html><head>
+<title>%(title)s</title>
+<meta name=\"Generator\" content="%(generator)s" />
+%(alternate)s
+%(stylesheet)s
+</head>
+<body>
+'''
+footer = """</body>"""
+
+styleLink = '<link rel="stylesheet" href="%s" type="text/css" />'
+alternateLink = '<link rel="alternate" href="%(source)s" type="text/x-python" />'
+
+class Options(usage.Options):
+    synopsis = """%s [options] source.py
+    """ % (
+        os.path.basename(sys.argv[0]),)
+
+    optParameters = [
+        ('stylesheet', 's', None, "URL of stylesheet to link to."),
+        ]
+
+    compData = usage.Completions(
+        extraActions=[usage.CompleteFiles('*.py', descr='source python file')]
+        )
+
+    def parseArgs(self, filename):
+        self['filename'] = filename
+
+def run():
+    options = Options()
+    try:
+        options.parseOptions()
+    except usage.UsageError, e:
+        print str(e)
+        sys.exit(1)
+    filename = options['filename']
+    if options.get('stylesheet') is not None:
+        stylesheet = styleLink % (options['stylesheet'],)
+    else:
+        stylesheet = ''
+
+    output = open(filename + '.html', 'w')
+    try:
+        output.write(header % {
+            'title': filename,
+            'generator': 'htmlizer/%s' % (copyright.longversion,),
+            'alternate': alternateLink % {'source': filename},
+            'stylesheet': stylesheet
+            })
+        htmlizer.filter(open(filename), output,
+                        htmlizer.SmallerHTMLWriter)
+        output.write(footer)
+    finally:
+        output.close()
diff --git a/ThirdParty/Twisted/twisted/scripts/manhole.py b/ThirdParty/Twisted/twisted/scripts/manhole.py
new file mode 100644
index 0000000..06adffb
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/scripts/manhole.py
@@ -0,0 +1,69 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Start a L{twisted.manhole} client.
+"""
+
+import sys
+
+from twisted.python import usage
+
+def run():
+    config = MyOptions()
+    try:
+        config.parseOptions()
+    except usage.UsageError, e:
+        print str(e)
+        print str(config)
+        sys.exit(1)
+
+    run_gtk2(config)
+
+    from twisted.internet import reactor
+    reactor.run()
+
+
+def run_gtk2(config):
+    # Put these off until after we parse options, so we know what reactor
+    # to load.
+    from twisted.internet import gtk2reactor
+    gtk2reactor.install()
+
+    # Put this off until after we parse options, or else gnome eats them.
+    sys.argv[:] = ['manhole']
+    from twisted.manhole.ui import gtk2manhole
+
+    o = config.opts
+    defaults = {
+        'host': o['host'],
+        'port': o['port'],
+        'identityName': o['user'],
+        'password': o['password'],
+        'serviceName': o['service'],
+        'perspectiveName': o['perspective']
+        }
+    w = gtk2manhole.ManholeWindow()
+    w.setDefaults(defaults)
+    w.login()
+
+
+pbportno = 8787
+
+class MyOptions(usage.Options):
+    optParameters=[("user", "u", "guest", "username"),
+                   ("password", "w", "guest"),
+                   ("service", "s", "twisted.manhole", "PB Service"),
+                   ("host", "h", "localhost"),
+                   ("port", "p", str(pbportno)),
+                   ("perspective", "P", "",
+                    "PB Perspective to ask for "
+                    "(if different than username)")]
+
+    compData = usage.Completions(
+        optActions={"host": usage.CompleteHostnames(),
+                    "user": usage.CompleteUsernames()}
+        )
+
+if __name__ == '__main__':
+    run()
diff --git a/ThirdParty/Twisted/twisted/scripts/tap2deb.py b/ThirdParty/Twisted/twisted/scripts/tap2deb.py
new file mode 100644
index 0000000..0fc883f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/scripts/tap2deb.py
@@ -0,0 +1,281 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+
+import sys, os, string, shutil
+
+from twisted.python import usage
+
+class MyOptions(usage.Options):
+    optFlags = [["unsigned", "u"]]
+    optParameters = [["tapfile", "t", "twistd.tap"],
+                  ["maintainer", "m", "", "The maintainer's name and email in a specific format: "
+                   "'John Doe <johndoe at example.com>'"],
+                  ["protocol", "p", ""],
+                  ["description", "e", ""],
+                  ["long_description", "l", ""],
+                  ["set-version", "V", "1.0"],
+                  ["debfile", "d", None],
+                  ["type", "y", "tap", "type of configuration: 'tap', 'xml, 'source' or 'python' for .tac files"]]
+
+    compData = usage.Completions(
+        optActions={
+            "type": usage.CompleteList(["tap", "xml", "source", "python"]),
+            "debfile": usage.CompleteFiles("*.deb")}
+        )
+
+    def postOptions(self):
+        if not self["maintainer"]:
+            raise usage.UsageError, "maintainer must be specified."
+
+
+type_dict = {
+'tap': 'file',
+'python': 'python',
+'source': 'source',
+'xml': 'xml',
+}
+
+def save_to_file(file, text):
+    f = open(file, 'w')
+    f.write(text)
+    f.close()
+
+
+def run():
+
+    try:
+        config = MyOptions()
+        config.parseOptions()
+    except usage.error, ue:
+        sys.exit("%s: %s" % (sys.argv[0], ue))
+
+    tap_file = config['tapfile']
+    base_tap_file = os.path.basename(config['tapfile'])
+    protocol = (config['protocol'] or os.path.splitext(base_tap_file)[0])
+    deb_file = config['debfile'] or 'twisted-'+protocol
+    version = config['set-version']
+    maintainer = config['maintainer']
+    description = config['description'] or ('A Twisted-based server for %(protocol)s' %
+                                            vars())
+    long_description = config['long_description'] or 'Automatically created by tap2deb'
+    twistd_option = type_dict[config['type']]
+    date = string.strip(os.popen('822-date').read())
+    directory = deb_file + '-' + version
+    python_version = '%s.%s' % sys.version_info[:2]
+
+    if os.path.exists(os.path.join('.build', directory)):
+        os.system('rm -rf %s' % os.path.join('.build', directory))
+    os.makedirs(os.path.join('.build', directory, 'debian'))
+
+    shutil.copy(tap_file, os.path.join('.build', directory))
+
+    save_to_file(os.path.join('.build', directory, 'debian', 'README.Debian'), 
+    '''This package was auto-generated by tap2deb\n''')
+
+    save_to_file(os.path.join('.build', directory, 'debian', 'conffiles'), 
+    '''\
+/etc/init.d/%(deb_file)s
+/etc/default/%(deb_file)s
+/etc/%(base_tap_file)s
+''' % vars())
+
+    save_to_file(os.path.join('.build', directory, 'debian', 'default'), 
+    '''\
+pidfile=/var/run/%(deb_file)s.pid
+rundir=/var/lib/%(deb_file)s/
+file=/etc/%(tap_file)s
+logfile=/var/log/%(deb_file)s.log
+ ''' % vars())
+
+    save_to_file(os.path.join('.build', directory, 'debian', 'init.d'),
+    '''\
+#!/bin/sh
+
+PATH=/sbin:/bin:/usr/sbin:/usr/bin
+
+pidfile=/var/run/%(deb_file)s.pid \
+rundir=/var/lib/%(deb_file)s/ \
+file=/etc/%(tap_file)s \
+logfile=/var/log/%(deb_file)s.log
+
+[ -r /etc/default/%(deb_file)s ] && . /etc/default/%(deb_file)s
+
+test -x /usr/bin/twistd%(python_version)s || exit 0
+test -r $file || exit 0
+test -r /usr/share/%(deb_file)s/package-installed || exit 0
+
+
+case "$1" in
+    start)
+        echo -n "Starting %(deb_file)s: twistd"
+        start-stop-daemon --start --quiet --exec /usr/bin/twistd%(python_version)s -- \
+                          --pidfile=$pidfile \
+                          --rundir=$rundir \
+                          --%(twistd_option)s=$file \
+                          --logfile=$logfile
+        echo "."
+    ;;
+
+    stop)
+        echo -n "Stopping %(deb_file)s: twistd"
+        start-stop-daemon --stop --quiet  \
+            --pidfile $pidfile
+        echo "."
+    ;;
+
+    restart)
+        $0 stop
+        $0 start
+    ;;
+
+    force-reload)
+        $0 restart
+    ;;
+
+    *)
+        echo "Usage: /etc/init.d/%(deb_file)s {start|stop|restart|force-reload}" >&2
+        exit 1
+    ;;
+esac
+
+exit 0
+''' % vars())
+
+    os.chmod(os.path.join('.build', directory, 'debian', 'init.d'), 0755)
+
+    save_to_file(os.path.join('.build', directory, 'debian', 'postinst'),
+    '''\
+#!/bin/sh
+update-rc.d %(deb_file)s defaults >/dev/null
+invoke-rc.d %(deb_file)s start
+''' % vars())
+
+    save_to_file(os.path.join('.build', directory, 'debian', 'prerm'),
+    '''\
+#!/bin/sh
+invoke-rc.d %(deb_file)s stop
+''' % vars())
+
+    save_to_file(os.path.join('.build', directory, 'debian', 'postrm'),
+    '''\
+#!/bin/sh
+if [ "$1" = purge ]; then
+        update-rc.d %(deb_file)s remove >/dev/null
+fi
+''' % vars())
+
+    save_to_file(os.path.join('.build', directory, 'debian', 'changelog'),
+    '''\
+%(deb_file)s (%(version)s) unstable; urgency=low
+
+  * Created by tap2deb
+
+ -- %(maintainer)s  %(date)s
+
+''' % vars())
+
+    save_to_file(os.path.join('.build', directory, 'debian', 'control'),
+    '''\
+Source: %(deb_file)s
+Section: net
+Priority: extra
+Maintainer: %(maintainer)s
+Build-Depends-Indep: debhelper
+Standards-Version: 3.5.6
+
+Package: %(deb_file)s
+Architecture: all
+Depends: python%(python_version)s-twisted
+Description: %(description)s
+ %(long_description)s
+''' % vars())
+
+    save_to_file(os.path.join('.build', directory, 'debian', 'copyright'),
+    '''\
+This package was auto-debianized by %(maintainer)s on
+%(date)s
+
+It was auto-generated by tap2deb
+
+Upstream Author(s): 
+Moshe Zadka <moshez at twistedmatrix.com> -- tap2deb author
+
+Copyright:
+
+Insert copyright here.
+''' % vars())
+
+    save_to_file(os.path.join('.build', directory, 'debian', 'dirs'),
+    '''\
+etc/init.d
+etc/default
+var/lib/%(deb_file)s
+usr/share/doc/%(deb_file)s
+usr/share/%(deb_file)s
+''' % vars())
+
+    save_to_file(os.path.join('.build', directory, 'debian', 'rules'),
+    '''\
+#!/usr/bin/make -f
+
+export DH_COMPAT=1
+
+build: build-stamp
+build-stamp:
+	dh_testdir
+	touch build-stamp
+
+clean:
+	dh_testdir
+	dh_testroot
+	rm -f build-stamp install-stamp
+	dh_clean
+
+install: install-stamp
+install-stamp: build-stamp
+	dh_testdir
+	dh_testroot
+	dh_clean -k
+	dh_installdirs
+
+	# Add here commands to install the package into debian/tmp.
+	cp %(base_tap_file)s debian/tmp/etc/
+	cp debian/init.d debian/tmp/etc/init.d/%(deb_file)s
+	cp debian/default debian/tmp/etc/default/%(deb_file)s
+	cp debian/copyright debian/tmp/usr/share/doc/%(deb_file)s/
+	cp debian/README.Debian debian/tmp/usr/share/doc/%(deb_file)s/
+	touch debian/tmp/usr/share/%(deb_file)s/package-installed
+	touch install-stamp
+
+binary-arch: build install
+
+binary-indep: build install
+	dh_testdir
+	dh_testroot
+	dh_strip
+	dh_compress
+	dh_installchangelogs
+	dh_fixperms
+	dh_installdeb
+	dh_shlibdeps
+	dh_gencontrol
+	dh_md5sums
+	dh_builddeb
+
+source diff:
+	@echo >&2 'source and diff are obsolete - use dpkg-source -b'; false
+
+binary: binary-indep binary-arch
+.PHONY: build clean binary-indep binary-arch binary install
+''' % vars())
+
+    os.chmod(os.path.join('.build', directory, 'debian', 'rules'), 0755)
+
+    os.chdir('.build/%(directory)s' % vars())
+    os.system('dpkg-buildpackage -rfakeroot'+ ['', ' -uc -us'][config['unsigned']])
+
+if __name__ == '__main__':
+    run()
+
diff --git a/ThirdParty/Twisted/twisted/scripts/tap2rpm.py b/ThirdParty/Twisted/twisted/scripts/tap2rpm.py
new file mode 100644
index 0000000..30149b7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/scripts/tap2rpm.py
@@ -0,0 +1,331 @@
+# -*- test-case-name: twisted.scripts.test.test_tap2rpm -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import sys, os, shutil, time, glob
+import subprocess
+import tempfile
+import tarfile
+from StringIO import StringIO
+import warnings
+
+from twisted.python import usage, log, versions, deprecate
+
+
+#################################
+#  data that goes in /etc/inittab
+initFileData = '''\
+#!/bin/sh
+#
+#  Startup script for a Twisted service.
+#
+#  chkconfig: - 85 15
+#  description: Start-up script for the Twisted service "%(tap_file)s".
+
+PATH=/usr/bin:/bin:/usr/sbin:/sbin
+
+pidfile=/var/run/%(rpm_file)s.pid
+rundir=/var/lib/twisted-taps/%(rpm_file)s/
+file=/etc/twisted-taps/%(tap_file)s
+logfile=/var/log/%(rpm_file)s.log
+
+#  load init function library
+. /etc/init.d/functions
+
+[ -r /etc/default/%(rpm_file)s ] && . /etc/default/%(rpm_file)s
+
+#  check for required files
+if [ ! -x /usr/bin/twistd ]
+then
+        echo "$0: Aborting, no /usr/bin/twistd found"
+        exit 0
+fi
+if [ ! -r "$file" ]
+then
+        echo "$0: Aborting, no file $file found."
+        exit 0
+fi
+
+#  set up run directory if necessary
+if [ ! -d "${rundir}" ]
+then
+        mkdir -p "${rundir}"
+fi
+
+
+case "$1" in
+        start)
+                echo -n "Starting %(rpm_file)s: twistd"
+                daemon twistd  \\
+                                --pidfile=$pidfile \\
+                                --rundir=$rundir \\
+                                --%(twistd_option)s=$file \\
+                                --logfile=$logfile
+                status %(rpm_file)s
+                ;;
+
+        stop)
+                echo -n "Stopping %(rpm_file)s: twistd"
+                kill `cat "${pidfile}"`
+                status %(rpm_file)s
+                ;;
+
+        restart)
+                "${0}" stop
+                "${0}" start
+                ;;
+
+    *)
+                echo "Usage: ${0} {start|stop|restart|}" >&2
+                exit 1
+                ;;
+esac
+
+exit 0
+'''
+
+#######################################
+#  the data for creating the spec file
+specFileData = '''\
+Summary:    %(description)s
+Name:       %(rpm_file)s
+Version:    %(version)s
+Release:    1
+License:    Unknown
+Group:      Networking/Daemons
+Source:     %(tarfile_basename)s
+BuildRoot:  %%{_tmppath}/%%{name}-%%{version}-root
+Requires:   /usr/bin/twistd
+BuildArch:  noarch
+
+%%description
+%(long_description)s
+
+%%prep
+%%setup
+%%build
+
+%%install
+[ ! -z "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != '/' ] \
+                && rm -rf "$RPM_BUILD_ROOT"
+mkdir -p "$RPM_BUILD_ROOT"/etc/twisted-taps
+mkdir -p "$RPM_BUILD_ROOT"/etc/init.d
+mkdir -p "$RPM_BUILD_ROOT"/var/lib/twisted-taps
+cp "%(tap_file)s" "$RPM_BUILD_ROOT"/etc/twisted-taps/
+cp "%(rpm_file)s.init" "$RPM_BUILD_ROOT"/etc/init.d/"%(rpm_file)s"
+
+%%clean
+[ ! -z "$RPM_BUILD_ROOT" -a "$RPM_BUILD_ROOT" != '/' ] \
+                && rm -rf "$RPM_BUILD_ROOT"
+
+%%post
+/sbin/chkconfig --add %(rpm_file)s
+/sbin/chkconfig --level 35 %(rpm_file)s
+/etc/init.d/%(rpm_file)s start
+
+%%preun
+/etc/init.d/%(rpm_file)s stop
+/sbin/chkconfig --del %(rpm_file)s
+
+%%files
+%%defattr(-,root,root)
+%%attr(0755,root,root) /etc/init.d/%(rpm_file)s
+%%attr(0660,root,root) /etc/twisted-taps/%(tap_file)s
+
+%%changelog
+* %(date)s %(maintainer)s
+- Created by tap2rpm: %(rpm_file)s (%(version)s)
+'''
+
+###############################
+class MyOptions(usage.Options):
+    optFlags = [['quiet', 'q']]
+    optParameters = [
+                     ["tapfile", "t", "twistd.tap"],
+                     ["maintainer", "m", "tap2rpm"],
+                     ["protocol", "p", None],
+                     ["description", "e", None],
+                     ["long_description", "l",
+                         "Automatically created by tap2rpm"],
+                     ["set-version", "V", "1.0"],
+                     ["rpmfile", "r", None],
+                     ["type", "y", "tap", "type of configuration: 'tap', 'xml, "
+                      "'source' or 'python'"],
+                    ]
+
+    compData = usage.Completions(
+        optActions={"type": usage.CompleteList(["tap", "xml", "source",
+                                                "python"]),
+                    "rpmfile": usage.CompleteFiles("*.rpm")}
+        )
+
+    def postOptions(self):
+        """
+        Calculate the default values for certain command-line options.
+        """
+        # Options whose defaults depend on other parameters.
+        if self['protocol'] is None:
+            base_tapfile = os.path.basename(self['tapfile'])
+            self['protocol'] = os.path.splitext(base_tapfile)[0]
+        if self['description'] is None:
+            self['description'] = "A TCP server for %s" % (self['protocol'],)
+        if self['rpmfile'] is None:
+            self['rpmfile'] = 'twisted-%s' % (self['protocol'],)
+
+        # Values that aren't options, but are calculated from options and are
+        # handy to have around.
+        self['twistd_option'] = type_dict[self['type']]
+        self['release-name'] = '%s-%s' % (self['rpmfile'], self['set-version'])
+
+
+    def opt_unsigned(self):
+        """
+        Generate an unsigned rather than a signed RPM. (DEPRECATED; unsigned
+        is the default)
+        """
+        msg = deprecate.getDeprecationWarningString(
+            self.opt_unsigned, versions.Version("Twisted", 12, 1, 0))
+        warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
+
+    # Maintain the -u short flag
+    opt_u = opt_unsigned
+
+
+type_dict = {
+    'tap': 'file',
+    'python': 'python',
+    'source': 'source',
+    'xml': 'xml',
+}
+
+
+
+##########################
+def makeBuildDir():
+    """
+    Set up the temporary directory for building RPMs.
+
+    Returns: buildDir, a randomly-named subdirectory of baseDir.
+    """
+    tmpDir = tempfile.mkdtemp()
+    #  set up initial directory contents
+    os.makedirs(os.path.join(tmpDir, 'RPMS', 'noarch'))
+    os.makedirs(os.path.join(tmpDir, 'SPECS'))
+    os.makedirs(os.path.join(tmpDir, 'BUILD'))
+    os.makedirs(os.path.join(tmpDir, 'SOURCES'))
+    os.makedirs(os.path.join(tmpDir, 'SRPMS'))
+
+    log.msg(format="Created RPM build structure in %(path)r",
+            path=tmpDir)
+    return tmpDir
+
+
+
+def setupBuildFiles(buildDir, config):
+    """
+    Create files required to build an RPM in the build directory.
+    """
+    # Create the source tarball in the SOURCES directory.
+    tarballName = "%s.tar" % (config['release-name'],)
+    tarballPath = os.path.join(buildDir, "SOURCES", tarballName)
+    tarballHandle = tarfile.open(tarballPath, "w")
+
+    sourceDirInfo = tarfile.TarInfo(config['release-name'])
+    sourceDirInfo.type = tarfile.DIRTYPE
+    sourceDirInfo.mode = 0755
+    tarballHandle.addfile(sourceDirInfo)
+
+    tapFileBase = os.path.basename(config['tapfile'])
+
+    initFileInfo = tarfile.TarInfo(
+            os.path.join(
+                config['release-name'],
+                '%s.init' % config['rpmfile'],
+            )
+        )
+    initFileInfo.type = tarfile.REGTYPE
+    initFileInfo.mode = 0755
+    initFileRealData = initFileData % {
+            'tap_file': tapFileBase,
+            'rpm_file': config['release-name'],
+            'twistd_option': config['twistd_option'],
+        }
+    initFileInfo.size = len(initFileRealData)
+    tarballHandle.addfile(initFileInfo, StringIO(initFileRealData))
+
+    tapFileHandle = open(config['tapfile'], 'rb')
+    tapFileInfo = tarballHandle.gettarinfo(
+            arcname=os.path.join(config['release-name'], tapFileBase),
+            fileobj=tapFileHandle,
+        )
+    tapFileInfo.mode = 0644
+    tarballHandle.addfile(tapFileInfo, tapFileHandle)
+
+    tarballHandle.close()
+
+    log.msg(format="Created dummy source tarball %(tarballPath)r",
+            tarballPath=tarballPath)
+
+    # Create the spec file in the SPECS directory.
+    specName = "%s.spec" % (config['release-name'],)
+    specPath = os.path.join(buildDir, "SPECS", specName)
+    specHandle = open(specPath, "w")
+    specFileRealData = specFileData % {
+            'description': config['description'],
+            'rpm_file': config['rpmfile'],
+            'version': config['set-version'],
+            'tarfile_basename': tarballName,
+            'tap_file': tapFileBase,
+            'date': time.strftime('%a %b %d %Y', time.localtime(time.time())),
+            'maintainer': config['maintainer'],
+            'long_description': config['long_description'],
+        }
+    specHandle.write(specFileRealData)
+    specHandle.close()
+
+    log.msg(format="Created RPM spec file %(specPath)r",
+            specPath=specPath)
+
+    return specPath
+
+
+
+def run(options=None):
+    #  parse options
+    try:
+        config = MyOptions()
+        config.parseOptions(options)
+    except usage.error, ue:
+         sys.exit("%s: %s" % (sys.argv[0], ue))
+
+    #  create RPM build environment
+    tmpDir = makeBuildDir()
+    specPath = setupBuildFiles(tmpDir, config)
+
+    #  build rpm
+    job = subprocess.Popen([
+            "rpmbuild",
+            "-vv",
+            "--define", "_topdir %s" % (tmpDir,),
+            "-ba", specPath,
+        ], stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+    stdout, _ = job.communicate()
+
+    # If there was a problem, show people what it was.
+    if job.returncode != 0:
+        print stdout
+
+    #  copy the RPMs to the local directory
+    rpmPath = glob.glob(os.path.join(tmpDir, 'RPMS', 'noarch', '*'))[0]
+    srpmPath = glob.glob(os.path.join(tmpDir, 'SRPMS', '*'))[0]
+    if not config['quiet']:
+        print 'Writing "%s"...' % os.path.basename(rpmPath)
+    shutil.copy(rpmPath, '.')
+    if not config['quiet']:
+        print 'Writing "%s"...' % os.path.basename(srpmPath)
+    shutil.copy(srpmPath, '.')
+
+    #  remove the build directory
+    shutil.rmtree(tmpDir)
+
+    return [os.path.basename(rpmPath), os.path.basename(srpmPath)]
diff --git a/ThirdParty/Twisted/twisted/scripts/tapconvert.py b/ThirdParty/Twisted/twisted/scripts/tapconvert.py
new file mode 100644
index 0000000..4c994a0
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/scripts/tapconvert.py
@@ -0,0 +1,57 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import sys, getpass
+
+from twisted.python import usage
+from twisted.application import app
+from twisted.persisted import sob
+
+
+class ConvertOptions(usage.Options):
+    synopsis = "Usage: tapconvert [options]"
+    optParameters = [
+        ['in',      'i', None,     "The filename of the tap to read from"],
+        ['out',     'o', None,     "A filename to write the tap to"],
+        ['typein',  'f', 'guess',
+         "The  format to use; this can be 'guess', 'python', "
+         "'pickle', 'xml', or 'source'."],
+        ['typeout', 't', 'source',
+         "The output format to use; this can be 'pickle', 'xml', or 'source'."],
+        ]
+
+    optFlags = [
+        ['decrypt', 'd', "The specified tap/aos/xml file is encrypted."],
+        ['encrypt', 'e', "Encrypt file before writing"]
+        ]
+
+    compData = usage.Completions(
+        optActions={"typein": usage.CompleteList(["guess", "python", "pickle",
+                                                  "xml", "source"]),
+                    "typeout": usage.CompleteList(["pickle", "xml", "source"]),
+                    "in": usage.CompleteFiles(descr="tap file to read from"),
+                    "out": usage.CompleteFiles(descr="tap file to write to"),
+                    }
+        )
+
+    def postOptions(self):
+        if self['in'] is None:
+            raise usage.UsageError("%s\nYou must specify the input filename."
+                                   % self)
+        if self["typein"] == "guess":
+            try:
+                self["typein"] = sob.guessType(self["in"])
+            except KeyError:
+                raise usage.UsageError("Could not guess type for '%s'" %
+                                       self["typein"])
+
+def run():
+    options = ConvertOptions()
+    try:
+        options.parseOptions(sys.argv[1:])
+    except usage.UsageError, e:
+        print e
+    else:
+        app.convertStyle(options["in"], options["typein"],
+                     options.opts['decrypt'] or getpass.getpass('Passphrase: '),
+                     options["out"], options['typeout'], options["encrypt"])
diff --git a/ThirdParty/Twisted/twisted/scripts/test/__init__.py b/ThirdParty/Twisted/twisted/scripts/test/__init__.py
new file mode 100644
index 0000000..c04ed1c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/scripts/test/__init__.py
@@ -0,0 +1,6 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test package for L{twisted.scripts}.
+"""
diff --git a/ThirdParty/Twisted/twisted/scripts/test/test_scripts.py b/ThirdParty/Twisted/twisted/scripts/test/test_scripts.py
new file mode 100644
index 0000000..2cbff50
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/scripts/test/test_scripts.py
@@ -0,0 +1,201 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for the command-line scripts in the top-level I{bin/} directory.
+
+Tests for actual functionality belong elsewhere, written in a way that doesn't
+involve launching child processes.
+"""
+
+from os import devnull, getcwd, chdir
+from sys import executable
+from subprocess import PIPE, Popen
+
+from twisted.trial.unittest import SkipTest, TestCase
+from twisted.python.modules import getModule
+from twisted.python.filepath import FilePath
+from twisted.python.test.test_shellcomp import ZshScriptTestMixin
+
+
+
+def outputFromPythonScript(script, *args):
+    """
+    Synchronously run a Python script, with the same Python interpreter that
+    ran the process calling this function, using L{Popen}, using the given
+    command-line arguments, with standard input and standard error both
+    redirected to L{os.devnull}, and return its output as a string.
+
+    @param script: The path to the script.
+    @type script: L{FilePath}
+
+    @param args: The command-line arguments to follow the script in its
+        invocation (the desired C{sys.argv[1:]}).
+    @type args: L{tuple} of L{str}
+
+    @return: the output passed to the proces's C{stdout}, without any messages
+        from C{stderr}.
+    @rtype: L{bytes}
+    """
+    nullInput = file(devnull, "rb")
+    nullError = file(devnull, "wb")
+    stdout = Popen([executable, script.path] + list(args),
+                   stdout=PIPE, stderr=nullError, stdin=nullInput).stdout.read()
+    nullInput.close()
+    nullError.close()
+    return stdout
+
+
+
+class ScriptTestsMixin:
+    """
+    Mixin for L{TestCase} subclasses which defines a helper function for testing
+    a Twisted-using script.
+    """
+    bin = getModule("twisted").pathEntry.filePath.child("bin")
+
+    def scriptTest(self, name):
+        """
+        Verify that the given script runs and uses the version of Twisted
+        currently being tested.
+
+        This only works when running tests against a vcs checkout of Twisted,
+        since it relies on the scripts being in the place they are kept in
+        version control, and exercises their logic for finding the right version
+        of Twisted to use in that situation.
+
+        @param name: A path fragment, relative to the I{bin} directory of a
+            Twisted source checkout, identifying a script to test.
+        @type name: C{str}
+
+        @raise SkipTest: if the script is not where it is expected to be.
+        """
+        script = self.bin.preauthChild(name)
+        if not script.exists():
+            raise SkipTest(
+                "Script tests do not apply to installed configuration.")
+
+        from twisted.copyright import version
+        scriptVersion = outputFromPythonScript(script, '--version')
+
+        self.assertIn(str(version), scriptVersion)
+
+
+
+class ScriptTests(TestCase, ScriptTestsMixin):
+    """
+    Tests for the core scripts.
+    """
+    def test_twistd(self):
+        self.scriptTest("twistd")
+
+
+    def test_twistdPathInsert(self):
+        """
+        The twistd script adds the current working directory to sys.path so
+        that it's able to import modules from it.
+        """
+        script = self.bin.child("twistd")
+        if not script.exists():
+            raise SkipTest(
+                "Script tests do not apply to installed configuration.")
+        cwd = getcwd()
+        self.addCleanup(chdir, cwd)
+        testDir = FilePath(self.mktemp())
+        testDir.makedirs()
+        chdir(testDir.path)
+        testDir.child("bar.tac").setContent(
+            "import sys\n"
+            "print sys.path\n")
+        output = outputFromPythonScript(script, '-ny', 'bar.tac')
+        self.assertIn(repr(testDir.path), output)
+
+
+    def test_manhole(self):
+        self.scriptTest("manhole")
+
+
+    def test_trial(self):
+        self.scriptTest("trial")
+
+
+    def test_trialPathInsert(self):
+        """
+        The trial script adds the current working directory to sys.path so that
+        it's able to import modules from it.
+        """
+        script = self.bin.child("trial")
+        if not script.exists():
+            raise SkipTest(
+                "Script tests do not apply to installed configuration.")
+        cwd = getcwd()
+        self.addCleanup(chdir, cwd)
+        testDir = FilePath(self.mktemp())
+        testDir.makedirs()
+        chdir(testDir.path)
+        testDir.child("foo.py").setContent("")
+        output = outputFromPythonScript(script, 'foo')
+        self.assertIn("PASSED", output)
+
+
+    def test_pyhtmlizer(self):
+        self.scriptTest("pyhtmlizer")
+
+
+    def test_tap2rpm(self):
+        self.scriptTest("tap2rpm")
+
+
+    def test_tap2deb(self):
+        self.scriptTest("tap2deb")
+
+
+    def test_tapconvert(self):
+        self.scriptTest("tapconvert")
+
+
+    def test_deprecatedTkunzip(self):
+        """
+        The entire L{twisted.scripts.tkunzip} module, part of the old Windows
+        installer tool chain, is deprecated.
+        """
+        from twisted.scripts import tkunzip
+        warnings = self.flushWarnings(
+            offendingFunctions=[self.test_deprecatedTkunzip])
+        self.assertEqual(DeprecationWarning, warnings[0]['category'])
+        self.assertEqual(
+            "twisted.scripts.tkunzip was deprecated in Twisted 11.1.0: "
+            "Seek unzipping software outside of Twisted.",
+            warnings[0]['message'])
+        self.assertEqual(1, len(warnings))
+
+
+    def test_deprecatedTapconvert(self):
+        """
+        The entire L{twisted.scripts.tapconvert} module is deprecated.
+        """
+        from twisted.scripts import tapconvert
+        warnings = self.flushWarnings(
+            offendingFunctions=[self.test_deprecatedTapconvert])
+        self.assertEqual(DeprecationWarning, warnings[0]['category'])
+        self.assertEqual(
+            "twisted.scripts.tapconvert was deprecated in Twisted 12.1.0: "
+            "tapconvert has been deprecated.",
+            warnings[0]['message'])
+        self.assertEqual(1, len(warnings))
+
+
+
+class ZshIntegrationTestCase(TestCase, ZshScriptTestMixin):
+    """
+    Test that zsh completion functions are generated without error
+    """
+    generateFor = [('twistd', 'twisted.scripts.twistd.ServerOptions'),
+                   ('trial', 'twisted.scripts.trial.Options'),
+                   ('pyhtmlizer', 'twisted.scripts.htmlizer.Options'),
+                   ('tap2rpm', 'twisted.scripts.tap2rpm.MyOptions'),
+                   ('tap2deb', 'twisted.scripts.tap2deb.MyOptions'),
+                   ('tapconvert', 'twisted.scripts.tapconvert.ConvertOptions'),
+                   ('manhole', 'twisted.scripts.manhole.MyOptions')
+                   ]
+
diff --git a/ThirdParty/Twisted/twisted/scripts/test/test_tap2rpm.py b/ThirdParty/Twisted/twisted/scripts/test/test_tap2rpm.py
new file mode 100644
index 0000000..509e69c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/scripts/test/test_tap2rpm.py
@@ -0,0 +1,399 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.scripts.tap2rpm}.
+"""
+import os
+
+from twisted.trial.unittest import TestCase, SkipTest
+from twisted.python import procutils
+from twisted.python import versions
+from twisted.python import deprecate
+from twisted.python.failure import Failure
+from twisted.internet import utils
+from twisted.scripts import tap2rpm
+
+# When we query the RPM metadata, we get back a string we'll have to parse, so
+# we'll use suitably rare delimiter characters to split on. Luckily, ASCII
+# defines some for us!
+RECORD_SEPARATOR = "\x1E"
+UNIT_SEPARATOR = "\x1F"
+
+
+
+def _makeRPMs(tapfile=None, maintainer=None, protocol=None, description=None,
+        longDescription=None, setVersion=None, rpmfile=None, type_=None):
+    """
+    Helper function to invoke tap2rpm with the given parameters.
+    """
+    args = []
+
+    if not tapfile:
+        tapfile = "dummy-tap-file"
+        handle = open(tapfile, "w")
+        handle.write("# Dummy TAP file\n")
+        handle.close()
+
+    args.extend(["--quiet", "--tapfile", tapfile])
+
+    if maintainer:
+        args.extend(["--maintainer", maintainer])
+    if protocol:
+        args.extend(["--protocol", protocol])
+    if description:
+        args.extend(["--description", description])
+    if longDescription:
+        args.extend(["--long_description", longDescription])
+    if setVersion:
+        args.extend(["--set-version", setVersion])
+    if rpmfile:
+        args.extend(["--rpmfile", rpmfile])
+    if type_:
+        args.extend(["--type", type_])
+
+    return tap2rpm.run(args)
+
+
+
+def _queryRPMTags(rpmfile, taglist):
+    """
+    Helper function to read the given header tags from the given RPM file.
+
+    Returns a Deferred that fires with dictionary mapping a tag name to a list
+    of the associated values in the RPM header. If a tag has only a single
+    value in the header (like NAME or VERSION), it will be returned as a 1-item
+    list.
+
+    Run "rpm --querytags" to see what tags can be queried.
+    """
+
+    # Build a query format string that will return appropriately delimited
+    # results. Every field is treated as an array field, so single-value tags
+    # like VERSION will be returned as 1-item lists.
+    queryFormat = RECORD_SEPARATOR.join([
+            "[%%{%s}%s]" % (tag, UNIT_SEPARATOR) for tag in taglist
+           ])
+
+    def parseTagValues(output):
+        res = {}
+
+        for tag, values in zip(taglist, output.split(RECORD_SEPARATOR)):
+            values = values.strip(UNIT_SEPARATOR).split(UNIT_SEPARATOR)
+            res[tag] = values
+
+        return res
+
+    def checkErrorResult(failure):
+        # The current rpm packages on Debian and Ubuntu don't properly set up
+        # the RPM database, which causes rpm to print a harmless warning to
+        # stderr. Unfortunately, .getProcessOutput() assumes all warnings are
+        # catastrophic and panics whenever it sees one.
+        #
+        # See also:
+        #   http://twistedmatrix.com/trac/ticket/3292#comment:42
+        #   http://bugs.debian.org/cgi-bin/bugreport.cgi?bug=551669
+        #   http://rpm.org/ticket/106
+
+        failure.trap(IOError)
+
+        # Depending on kernel scheduling, we might read the whole error
+        # message, or only the first few bytes.
+        if str(failure.value).startswith("got stderr: 'error: "):
+            newFailure = Failure(SkipTest("rpm is missing its package "
+                    "database. Run 'sudo rpm -qa > /dev/null' to create one."))
+        else:
+            # Not the exception we were looking for; we should report the
+            # original failure.
+            newFailure = failure
+
+        # We don't want to raise the exception right away; we want to wait for
+        # the process to exit, otherwise we'll get extra useless errors
+        # reported.
+        d = failure.value.processEnded
+        d.addBoth(lambda _: newFailure)
+        return d
+
+    d = utils.getProcessOutput("rpm",
+            ("-q", "--queryformat", queryFormat, "-p", rpmfile))
+    d.addCallbacks(parseTagValues, checkErrorResult)
+    return d
+
+
+
+class TestTap2RPM(TestCase):
+
+
+    def setUp(self):
+        return self._checkForRpmbuild()
+
+
+    def _checkForRpmbuild(self):
+        """
+        tap2rpm requires rpmbuild; skip tests if rpmbuild is not present.
+        """
+        if not procutils.which("rpmbuild"):
+            raise SkipTest("rpmbuild must be present to test tap2rpm")
+
+
+    def _makeTapFile(self, basename="dummy"):
+        """
+        Make a temporary .tap file and returns the absolute path.
+        """
+        path = basename + ".tap"
+        handle = open(path, "w")
+        handle.write("# Dummy .tap file")
+        handle.close()
+        return path
+
+
+    def _verifyRPMTags(self, rpmfile, **tags):
+        """
+        Check the given file has the given tags set to the given values.
+        """
+
+        d = _queryRPMTags(rpmfile, tags.keys())
+        d.addCallback(self.assertEqual, tags)
+        return d
+
+
+    def test_optionDefaults(self):
+        """
+        Commandline options should default to sensible values.
+
+        "sensible" here is defined as "the same values that previous versions
+        defaulted to".
+        """
+        config = tap2rpm.MyOptions()
+        config.parseOptions([])
+
+        self.assertEqual(config['tapfile'], 'twistd.tap')
+        self.assertEqual(config['maintainer'], 'tap2rpm')
+        self.assertEqual(config['protocol'], 'twistd')
+        self.assertEqual(config['description'], 'A TCP server for twistd')
+        self.assertEqual(config['long_description'],
+                'Automatically created by tap2rpm')
+        self.assertEqual(config['set-version'], '1.0')
+        self.assertEqual(config['rpmfile'], 'twisted-twistd')
+        self.assertEqual(config['type'], 'tap')
+        self.assertEqual(config['quiet'], False)
+        self.assertEqual(config['twistd_option'], 'file')
+        self.assertEqual(config['release-name'], 'twisted-twistd-1.0')
+
+
+    def test_protocolCalculatedFromTapFile(self):
+        """
+        The protocol name defaults to a value based on the tapfile value.
+        """
+        config = tap2rpm.MyOptions()
+        config.parseOptions(['--tapfile', 'pancakes.tap'])
+
+        self.assertEqual(config['tapfile'], 'pancakes.tap')
+        self.assertEqual(config['protocol'], 'pancakes')
+
+
+    def test_optionsDefaultToProtocolValue(self):
+        """
+        Many options default to a value calculated from the protocol name.
+        """
+        config = tap2rpm.MyOptions()
+        config.parseOptions([
+                '--tapfile', 'sausages.tap',
+                '--protocol', 'eggs',
+            ])
+
+        self.assertEqual(config['tapfile'], 'sausages.tap')
+        self.assertEqual(config['maintainer'], 'tap2rpm')
+        self.assertEqual(config['protocol'], 'eggs')
+        self.assertEqual(config['description'], 'A TCP server for eggs')
+        self.assertEqual(config['long_description'],
+                'Automatically created by tap2rpm')
+        self.assertEqual(config['set-version'], '1.0')
+        self.assertEqual(config['rpmfile'], 'twisted-eggs')
+        self.assertEqual(config['type'], 'tap')
+        self.assertEqual(config['quiet'], False)
+        self.assertEqual(config['twistd_option'], 'file')
+        self.assertEqual(config['release-name'], 'twisted-eggs-1.0')
+
+
+    def test_releaseNameDefaultsToRpmfileValue(self):
+        """
+        The release-name option is calculated from rpmfile and set-version.
+        """
+        config = tap2rpm.MyOptions()
+        config.parseOptions([
+                "--rpmfile", "beans",
+                "--set-version", "1.2.3",
+            ])
+
+        self.assertEqual(config['release-name'], 'beans-1.2.3')
+
+
+    def test_basicOperation(self):
+        """
+        Calling tap2rpm should produce an RPM and SRPM with default metadata.
+        """
+        basename = "frenchtoast"
+
+        # Create RPMs based on a TAP file with this name.
+        rpm, srpm = _makeRPMs(tapfile=self._makeTapFile(basename))
+
+        # Verify the resulting RPMs have the correct tags.
+        d = self._verifyRPMTags(rpm,
+                NAME=["twisted-%s" % (basename,)],
+                VERSION=["1.0"],
+                RELEASE=["1"],
+                SUMMARY=["A TCP server for %s" % (basename,)],
+                DESCRIPTION=["Automatically created by tap2rpm"],
+            )
+        d.addCallback(lambda _: self._verifyRPMTags(srpm,
+                NAME=["twisted-%s" % (basename,)],
+                VERSION=["1.0"],
+                RELEASE=["1"],
+                SUMMARY=["A TCP server for %s" % (basename,)],
+                DESCRIPTION=["Automatically created by tap2rpm"],
+            ))
+
+        return d
+
+
+    def test_protocolOverride(self):
+        """
+        Setting 'protocol' should change the name of the resulting package.
+        """
+        basename = "acorn"
+        protocol = "banana"
+
+        # Create RPMs based on a TAP file with this name.
+        rpm, srpm = _makeRPMs(tapfile=self._makeTapFile(basename),
+                protocol=protocol)
+
+        # Verify the resulting RPMs have the correct tags.
+        d = self._verifyRPMTags(rpm,
+                NAME=["twisted-%s" % (protocol,)],
+                SUMMARY=["A TCP server for %s" % (protocol,)],
+            )
+        d.addCallback(lambda _: self._verifyRPMTags(srpm,
+                NAME=["twisted-%s" % (protocol,)],
+                SUMMARY=["A TCP server for %s" % (protocol,)],
+            ))
+
+        return d
+
+
+    def test_rpmfileOverride(self):
+        """
+        Setting 'rpmfile' should change the name of the resulting package.
+        """
+        basename = "cherry"
+        rpmfile = "donut"
+
+        # Create RPMs based on a TAP file with this name.
+        rpm, srpm = _makeRPMs(tapfile=self._makeTapFile(basename),
+                rpmfile=rpmfile)
+
+        # Verify the resulting RPMs have the correct tags.
+        d = self._verifyRPMTags(rpm,
+                NAME=[rpmfile],
+                SUMMARY=["A TCP server for %s" % (basename,)],
+            )
+        d.addCallback(lambda _: self._verifyRPMTags(srpm,
+                NAME=[rpmfile],
+                SUMMARY=["A TCP server for %s" % (basename,)],
+            ))
+
+        return d
+
+
+    def test_descriptionOverride(self):
+        """
+        Setting 'description' should change the SUMMARY tag.
+        """
+        description = "eggplant"
+
+        # Create RPMs based on a TAP file with this name.
+        rpm, srpm = _makeRPMs(tapfile=self._makeTapFile(),
+                description=description)
+
+        # Verify the resulting RPMs have the correct tags.
+        d = self._verifyRPMTags(rpm,
+                SUMMARY=[description],
+            )
+        d.addCallback(lambda _: self._verifyRPMTags(srpm,
+                SUMMARY=[description],
+            ))
+
+        return d
+
+
+    def test_longDescriptionOverride(self):
+        """
+        Setting 'longDescription' should change the DESCRIPTION tag.
+        """
+        longDescription = "fig"
+
+        # Create RPMs based on a TAP file with this name.
+        rpm, srpm = _makeRPMs(tapfile=self._makeTapFile(),
+                longDescription=longDescription)
+
+        # Verify the resulting RPMs have the correct tags.
+        d = self._verifyRPMTags(rpm,
+                DESCRIPTION=[longDescription],
+            )
+        d.addCallback(lambda _: self._verifyRPMTags(srpm,
+                DESCRIPTION=[longDescription],
+            ))
+
+        return d
+
+
+    def test_setVersionOverride(self):
+        """
+        Setting 'setVersion' should change the RPM's version info.
+        """
+        version = "123.456"
+
+        # Create RPMs based on a TAP file with this name.
+        rpm, srpm = _makeRPMs(tapfile=self._makeTapFile(),
+                setVersion=version)
+
+        # Verify the resulting RPMs have the correct tags.
+        d = self._verifyRPMTags(rpm,
+                VERSION=["123.456"],
+                RELEASE=["1"],
+            )
+        d.addCallback(lambda _: self._verifyRPMTags(srpm,
+                VERSION=["123.456"],
+                RELEASE=["1"],
+            ))
+
+        return d
+
+
+    def test_tapInOtherDirectory(self):
+        """
+        tap2rpm handles tapfiles outside the current directory.
+        """
+        # Make a tapfile outside the current directory.
+        tempdir = self.mktemp()
+        os.mkdir(tempdir)
+        tapfile = self._makeTapFile(os.path.join(tempdir, "bacon"))
+
+        # Try and make an RPM from that tapfile.
+        _makeRPMs(tapfile=tapfile)
+
+
+    def test_unsignedFlagDeprecationWarning(self):
+        """
+        The 'unsigned' flag in tap2rpm should be deprecated, and its use
+        should raise a warning as such.
+        """
+        config = tap2rpm.MyOptions()
+        config.parseOptions(['--unsigned'])
+        warnings = self.flushWarnings()
+        self.assertEqual(DeprecationWarning, warnings[0]['category'])
+        self.assertEqual(
+            deprecate.getDeprecationWarningString(
+                config.opt_unsigned, versions.Version("Twisted", 12, 1, 0)),
+            warnings[0]['message'])
+        self.assertEqual(1, len(warnings))
diff --git a/ThirdParty/Twisted/twisted/scripts/tkunzip.py b/ThirdParty/Twisted/twisted/scripts/tkunzip.py
new file mode 100644
index 0000000..e2a4629
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/scripts/tkunzip.py
@@ -0,0 +1,290 @@
+# -*- test-case-name: twisted.scripts.test.test_scripts -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Post-install GUI to compile to pyc and unpack twisted doco.
+"""
+
+import sys
+import zipfile
+import py_compile
+
+# we're going to ignore failures to import tkinter and fall back
+# to using the console if the required dll is not found
+
+# Scary kludge to work around tk84.dll bug:
+# https://sourceforge.net/tracker/index.php?func=detail&aid=814654&group_id=5470&atid=105470
+# Without which(): you get a windows missing-dll popup message
+from twisted.python.procutils import which
+tkdll='tk84.dll'
+if which(tkdll) or which('DLLs/%s' % tkdll):
+    try:
+        import Tkinter
+        from Tkinter import *
+        from twisted.internet import tksupport
+    except ImportError:
+        pass
+
+# twisted
+from twisted.internet import reactor, defer
+from twisted.python import failure, log, zipstream, util, usage, log
+# local
+import os.path
+
+class ProgressBar:
+    def __init__(self, master=None, orientation="horizontal",
+                 min=0, max=100, width=100, height=18,
+                 doLabel=1, appearance="sunken",
+                 fillColor="blue", background="gray",
+                 labelColor="yellow", labelFont="Arial",
+                 labelText="", labelFormat="%d%%",
+                 value=0, bd=2):
+        # preserve various values
+        self.master=master
+        self.orientation=orientation
+        self.min=min
+        self.max=max
+        self.width=width
+        self.height=height
+        self.doLabel=doLabel
+        self.fillColor=fillColor
+        self.labelFont= labelFont
+        self.labelColor=labelColor
+        self.background=background
+        self.labelText=labelText
+        self.labelFormat=labelFormat
+        self.value=value
+        self.frame=Frame(master, relief=appearance, bd=bd)
+        self.canvas=Canvas(self.frame, height=height, width=width, bd=0,
+                           highlightthickness=0, background=background)
+        self.scale=self.canvas.create_rectangle(0, 0, width, height,
+                                                fill=fillColor)
+        self.label=self.canvas.create_text(self.canvas.winfo_reqwidth() / 2,
+                                           height / 2, text=labelText,
+                                           anchor="c", fill=labelColor,
+                                           font=self.labelFont)
+        self.update()
+        self.canvas.pack(side='top', fill='x', expand='no')
+
+    def pack(self, *args, **kwargs):
+        self.frame.pack(*args, **kwargs)
+
+    def updateProgress(self, newValue, newMax=None):
+        if newMax:
+            self.max = newMax
+        self.value = newValue
+        self.update()
+
+    def update(self):
+        # Trim the values to be between min and max
+        value=self.value
+        if value > self.max:
+            value = self.max
+        if value < self.min:
+            value = self.min
+        # Adjust the rectangle
+        if self.orientation == "horizontal":
+            self.canvas.coords(self.scale, 0, 0,
+              float(value) / self.max * self.width, self.height)
+        else:
+            self.canvas.coords(self.scale, 0,
+                               self.height - (float(value) /
+                                              self.max*self.height),
+                               self.width, self.height)
+        # Now update the colors
+        self.canvas.itemconfig(self.scale, fill=self.fillColor)
+        self.canvas.itemconfig(self.label, fill=self.labelColor)
+        # And update the label
+        if self.doLabel:
+            if value:
+                if value >= 0:
+                    pvalue = int((float(value) / float(self.max)) *
+                                   100.0)
+                else:
+                    pvalue = 0
+                self.canvas.itemconfig(self.label, text=self.labelFormat
+                                         % pvalue)
+            else:
+                self.canvas.itemconfig(self.label, text='')
+        else:
+            self.canvas.itemconfig(self.label, text=self.labelFormat %
+                                   self.labelText)
+        self.canvas.update_idletasks()
+
+
+class Progressor:
+    """A base class to make it simple to hook a progress bar up to a process.
+    """
+    def __init__(self, title, *args, **kwargs):
+        self.title=title
+        self.stopping=0
+        self.bar=None
+        self.iterator=None
+        self.remaining=1000
+
+    def setBar(self, bar, max):
+        self.bar=bar
+        bar.updateProgress(0, max)
+        return self
+
+    def setIterator(self, iterator):
+        self.iterator=iterator
+        return self
+
+    def updateBar(self, deferred):
+        b=self.bar
+        try:
+            b.updateProgress(b.max - self.remaining)
+        except TclError:
+            self.stopping=1
+        except:
+            deferred.errback(failure.Failure())
+
+    def processAll(self, root):
+        assert self.bar and self.iterator, "must setBar and setIterator"
+        self.root=root
+        root.title(self.title)
+        d=defer.Deferred()
+        d.addErrback(log.err)
+        reactor.callLater(0.1, self.processOne, d)
+        return d
+
+    def processOne(self, deferred):
+        if self.stopping:
+            deferred.callback(self.root)
+            return
+
+        try:
+            self.remaining=self.iterator.next()
+        except StopIteration:
+            self.stopping=1
+        except:
+            deferred.errback(failure.Failure())
+
+        if self.remaining%10==0:
+            reactor.callLater(0, self.updateBar, deferred)
+        if self.remaining%100==0:
+            log.msg(self.remaining)
+        reactor.callLater(0, self.processOne, deferred)
+
+def compiler(path):
+    """A generator for compiling files to .pyc"""
+    def justlist(arg, directory, names):
+        pynames=[os.path.join(directory, n) for n in names
+                 if n.endswith('.py')]
+        arg.extend(pynames)
+    all=[]
+    os.path.walk(path, justlist, all)
+
+    remaining=len(all)
+    i=zip(all, range(remaining-1, -1, -1))
+    for f, remaining in i:
+        py_compile.compile(f)
+        yield remaining
+
+class TkunzipOptions(usage.Options):
+    optParameters=[["zipfile", "z", "", "a zipfile"],
+                   ["ziptargetdir", "t", ".", "where to extract zipfile"],
+                   ["compiledir", "c", "", "a directory to compile"],
+                   ]
+    optFlags=[["use-console", "C", "show in the console, not graphically"],
+              ["shell-exec", "x", """\
+spawn a new console to show output (implies -C)"""],
+              ]
+
+def countPys(countl, directory, names):
+    sofar=countl[0]
+    sofar=sofar+len([f for f in names if f.endswith('.py')])
+    countl[0]=sofar
+    return sofar
+
+def countPysRecursive(path):
+    countl=[0]
+    os.path.walk(path, countPys, countl)
+    return countl[0]
+
+def run(argv=sys.argv):
+    log.startLogging(file('tkunzip.log', 'w'))
+    opt=TkunzipOptions()
+    try:
+        opt.parseOptions(argv[1:])
+    except usage.UsageError, e:
+        print str(opt)
+        print str(e)
+        sys.exit(1)
+
+    if opt['use-console']:
+        # this should come before shell-exec to prevent infinite loop
+        return doItConsolicious(opt)
+    if opt['shell-exec'] or not 'Tkinter' in sys.modules:
+        from distutils import sysconfig
+        from twisted.scripts import tkunzip
+        myfile=tkunzip.__file__
+        exe=os.path.join(sysconfig.get_config_var('prefix'), 'python.exe')
+        return os.system('%s %s --use-console %s' % (exe, myfile,
+                                                     ' '.join(argv[1:])))
+    return doItTkinterly(opt)
+
+def doItConsolicious(opt):
+    # reclaim stdout/stderr from log
+    sys.stdout = sys.__stdout__
+    sys.stderr = sys.__stderr__
+    if opt['zipfile']:
+        print 'Unpacking documentation...'
+        for n in zipstream.unzipIter(opt['zipfile'], opt['ziptargetdir']):
+            if n % 100 == 0:
+                print n,
+            if n % 1000 == 0:
+                print
+        print 'Done unpacking.'
+
+    if opt['compiledir']:
+        print 'Compiling to pyc...'
+        import compileall
+        compileall.compile_dir(opt["compiledir"])
+        print 'Done compiling.'
+
+def doItTkinterly(opt):
+    root=Tkinter.Tk()
+    root.withdraw()
+    root.title('One Moment.')
+    root.protocol('WM_DELETE_WINDOW', reactor.stop)
+    tksupport.install(root)
+
+    prog=ProgressBar(root, value=0, labelColor="black", width=200)
+    prog.pack()
+
+    # callback immediately
+    d=defer.succeed(root).addErrback(log.err)
+
+    def deiconify(root):
+        root.deiconify()
+        return root
+
+    d.addCallback(deiconify)
+
+    if opt['zipfile']:
+        uz=Progressor('Unpacking documentation...')
+        max=zipstream.countZipFileChunks(opt['zipfile'], 4096)
+        uz.setBar(prog, max)
+        uz.setIterator(zipstream.unzipIterChunky(opt['zipfile'],
+                                                 opt['ziptargetdir']))
+        d.addCallback(uz.processAll)
+
+    if opt['compiledir']:
+        comp=Progressor('Compiling to pyc...')
+        comp.setBar(prog, countPysRecursive(opt['compiledir']))
+        comp.setIterator(compiler(opt['compiledir']))
+        d.addCallback(comp.processAll)
+
+    def stop(ignore):
+        reactor.stop()
+        root.destroy()
+    d.addCallback(stop)
+
+    reactor.run()
+
+
+if __name__=='__main__':
+    run()
diff --git a/ThirdParty/Twisted/twisted/scripts/trial.py b/ThirdParty/Twisted/twisted/scripts/trial.py
new file mode 100644
index 0000000..f317841
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/scripts/trial.py
@@ -0,0 +1,521 @@
+# -*- test-case-name: twisted.trial.test.test_script -*-
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+import sys, os, random, gc, pdb, time, warnings
+
+from twisted.internet import defer
+from twisted.application import app
+from twisted.python import usage, reflect, failure
+from twisted.python.filepath import FilePath
+from twisted import plugin
+from twisted.python.util import spewer
+from twisted.python.compat import set
+from twisted.trial import runner, itrial, reporter
+
+
+# Yea, this is stupid.  Leave it for for command-line compatibility for a
+# while, though.
+TBFORMAT_MAP = {
+    'plain': 'default',
+    'default': 'default',
+    'emacs': 'brief',
+    'brief': 'brief',
+    'cgitb': 'verbose',
+    'verbose': 'verbose'
+    }
+
+
+def _parseLocalVariables(line):
+    """
+    Accepts a single line in Emacs local variable declaration format and
+    returns a dict of all the variables {name: value}.
+    Raises ValueError if 'line' is in the wrong format.
+
+    See http://www.gnu.org/software/emacs/manual/html_node/File-Variables.html
+    """
+    paren = '-*-'
+    start = line.find(paren) + len(paren)
+    end = line.rfind(paren)
+    if start == -1 or end == -1:
+        raise ValueError("%r not a valid local variable declaration" % (line,))
+    items = line[start:end].split(';')
+    localVars = {}
+    for item in items:
+        if len(item.strip()) == 0:
+            continue
+        split = item.split(':')
+        if len(split) != 2:
+            raise ValueError("%r contains invalid declaration %r"
+                             % (line, item))
+        localVars[split[0].strip()] = split[1].strip()
+    return localVars
+
+
+def loadLocalVariables(filename):
+    """
+    Accepts a filename and attempts to load the Emacs variable declarations
+    from that file, simulating what Emacs does.
+
+    See http://www.gnu.org/software/emacs/manual/html_node/File-Variables.html
+    """
+    f = file(filename, "r")
+    lines = [f.readline(), f.readline()]
+    f.close()
+    for line in lines:
+        try:
+            return _parseLocalVariables(line)
+        except ValueError:
+            pass
+    return {}
+
+
+def getTestModules(filename):
+    testCaseVar = loadLocalVariables(filename).get('test-case-name', None)
+    if testCaseVar is None:
+        return []
+    return testCaseVar.split(',')
+
+
+def isTestFile(filename):
+    """
+    Returns true if 'filename' looks like a file containing unit tests.
+    False otherwise.  Doesn't care whether filename exists.
+    """
+    basename = os.path.basename(filename)
+    return (basename.startswith('test_')
+            and os.path.splitext(basename)[1] == ('.py'))
+
+
+def _reporterAction():
+    return usage.CompleteList([p.longOpt for p in
+                               plugin.getPlugins(itrial.IReporter)])
+
+
+
+class _BasicOptions(object):
+    """
+    Basic options shared between trial and its local workers.
+    """
+    synopsis = """%s [options] [[file|package|module|TestCase|testmethod]...]
+    """ % (os.path.basename(sys.argv[0]),)
+
+    longdesc = ("trial loads and executes a suite of unit tests, obtained "
+                "from modules, packages and files listed on the command line.")
+
+    optFlags = [["help", "h"],
+                ["no-recurse", "N", "Don't recurse into packages"],
+                ['help-reporters', None,
+                 "Help on available output plugins (reporters)"],
+                ["rterrors", "e", "realtime errors, print out tracebacks as "
+                 "soon as they occur"],
+                ["unclean-warnings", None,
+                 "Turn dirty reactor errors into warnings"],
+                ["force-gc", None, "Have Trial run gc.collect() before and "
+                 "after each test case."],
+                ]
+
+    optParameters = [
+        ["random", "z", None,
+         "Run tests in random order using the specified seed"],
+        ['temp-directory', None, '_trial_temp',
+         'Path to use as working directory for tests.'],
+        ['reporter', None, 'verbose',
+         'The reporter to use for this test run.  See --help-reporters for '
+         'more info.']]
+
+    compData = usage.Completions(
+        optActions={"reporter": _reporterAction,
+                    "logfile": usage.CompleteFiles(descr="log file name"),
+                    "random": usage.Completer(descr="random seed")},
+        extraActions=[usage.CompleteFiles(
+                "*.py", descr="file | module | package | TestCase | testMethod",
+                repeat=True)],
+        )
+
+    fallbackReporter = reporter.TreeReporter
+    tracer = None
+
+    def __init__(self):
+        self['tests'] = set()
+        usage.Options.__init__(self)
+
+
+    def coverdir(self):
+        """
+        Return a L{FilePath} representing the directory into which coverage
+        results should be written.
+        """
+        coverdir = 'coverage'
+        result = FilePath(self['temp-directory']).child(coverdir)
+        print "Setting coverage directory to %s." % (result.path,)
+        return result
+
+
+    def opt_coverage(self):
+        """
+        Generate coverage information in the coverage file in the
+        directory specified by the temp-directory option.
+        """
+        import trace
+        self.tracer = trace.Trace(count=1, trace=0)
+        sys.settrace(self.tracer.globaltrace)
+        self['coverage'] = True
+
+
+    def opt_testmodule(self, filename):
+        """
+        Filename to grep for test cases (-*- test-case-name).
+        """
+        # If the filename passed to this parameter looks like a test module
+        # we just add that to the test suite.
+        #
+        # If not, we inspect it for an Emacs buffer local variable called
+        # 'test-case-name'.  If that variable is declared, we try to add its
+        # value to the test suite as a module.
+        #
+        # This parameter allows automated processes (like Buildbot) to pass
+        # a list of files to Trial with the general expectation of "these files,
+        # whatever they are, will get tested"
+        if not os.path.isfile(filename):
+            sys.stderr.write("File %r doesn't exist\n" % (filename,))
+            return
+        filename = os.path.abspath(filename)
+        if isTestFile(filename):
+            self['tests'].add(filename)
+        else:
+            self['tests'].update(getTestModules(filename))
+
+
+    def opt_spew(self):
+        """
+        Print an insanely verbose log of everything that happens.  Useful
+        when debugging freezes or locks in complex code.
+        """
+        sys.settrace(spewer)
+
+
+    def opt_help_reporters(self):
+        synopsis = ("Trial's output can be customized using plugins called "
+                    "Reporters. You can\nselect any of the following "
+                    "reporters using --reporter=<foo>\n")
+        print synopsis
+        for p in plugin.getPlugins(itrial.IReporter):
+            print '   ', p.longOpt, '\t', p.description
+        print
+        sys.exit(0)
+
+
+    def opt_disablegc(self):
+        """
+        Disable the garbage collector
+        """
+        self["disablegc"] = True
+        gc.disable()
+
+
+    def opt_tbformat(self, opt):
+        """
+        Specify the format to display tracebacks with. Valid formats are
+        'plain', 'emacs', and 'cgitb' which uses the nicely verbose stdlib
+        cgitb.text function
+        """
+        try:
+            self['tbformat'] = TBFORMAT_MAP[opt]
+        except KeyError:
+            raise usage.UsageError(
+                "tbformat must be 'plain', 'emacs', or 'cgitb'.")
+
+
+    def opt_recursionlimit(self, arg):
+        """
+        see sys.setrecursionlimit()
+        """
+        try:
+            sys.setrecursionlimit(int(arg))
+        except (TypeError, ValueError):
+            raise usage.UsageError(
+                "argument to recursionlimit must be an integer")
+        else:
+            self["recursionlimit"] = int(arg)
+
+
+    def opt_random(self, option):
+        try:
+            self['random'] = long(option)
+        except ValueError:
+            raise usage.UsageError(
+                "Argument to --random must be a positive integer")
+        else:
+            if self['random'] < 0:
+                raise usage.UsageError(
+                    "Argument to --random must be a positive integer")
+            elif self['random'] == 0:
+                self['random'] = long(time.time() * 100)
+
+
+    def opt_without_module(self, option):
+        """
+        Fake the lack of the specified modules, separated with commas.
+        """
+        self["without-module"] = option
+        for module in option.split(","):
+            if module in sys.modules:
+                warnings.warn("Module '%s' already imported, "
+                              "disabling anyway." % (module,),
+                              category=RuntimeWarning)
+            sys.modules[module] = None
+
+
+    def parseArgs(self, *args):
+        self['tests'].update(args)
+
+
+    def _loadReporterByName(self, name):
+        for p in plugin.getPlugins(itrial.IReporter):
+            qual = "%s.%s" % (p.module, p.klass)
+            if p.longOpt == name:
+                return reflect.namedAny(qual)
+        raise usage.UsageError("Only pass names of Reporter plugins to "
+                               "--reporter. See --help-reporters for "
+                               "more info.")
+
+
+    def postOptions(self):
+        # Only load reporters now, as opposed to any earlier, to avoid letting
+        # application-defined plugins muck up reactor selecting by importing
+        # t.i.reactor and causing the default to be installed.
+        self['reporter'] = self._loadReporterByName(self['reporter'])
+        if 'tbformat' not in self:
+            self['tbformat'] = 'default'
+
+
+
+class Options(_BasicOptions, usage.Options, app.ReactorSelectionMixin):
+    """
+    Options to the trial command line tool.
+
+    @ivar _workerFlags: List of flags which are accepted by trial distributed
+        workers. This is used by C{_getWorkerArguments} to build the command
+        line arguments.
+    @type _workerFlags: C{list}
+
+    @ivar _workerParameters: List of parameter which are accepted by trial
+        distrubuted workers. This is used by C{_getWorkerArguments} to build
+        the command line arguments.
+    @type _workerParameters: C{list}
+    """
+
+    optFlags = [
+                ["debug", "b", "Run tests in a debugger. If that debugger is "
+                 "pdb, will load '.pdbrc' from current directory if it exists."
+                ],
+                ["debug-stacktraces", "B", "Report Deferred creation and "
+                 "callback stack traces"],
+                ["nopm", None, "don't automatically jump into debugger for "
+                 "postmorteming of exceptions"],
+                ["dry-run", 'n', "do everything but run the tests"],
+                ["profile", None, "Run tests under the Python profiler"],
+                ["until-failure", "u", "Repeat test until it fails"],
+                ]
+
+    optParameters = [
+        ["debugger", None, "pdb", "the fully qualified name of a debugger to "
+         "use if --debug is passed"],
+        ["logfile", "l", "test.log", "log file name"],
+        ["jobs", "j", None, "Number of local workers to run"]
+        ]
+
+    compData = usage.Completions(
+        optActions = {
+            "tbformat": usage.CompleteList(["plain", "emacs", "cgitb"]),
+            "reporter": _reporterAction,
+            },
+        )
+
+    _workerFlags = ["disablegc", "force-gc", "coverage"]
+    _workerParameters = ["recursionlimit", "reactor", "without-module"]
+
+    fallbackReporter = reporter.TreeReporter
+    extra = None
+    tracer = None
+
+
+    def opt_jobs(self, number):
+        """
+        Number of local workers to run, a strictly positive integer.
+        """
+        try:
+            number = int(number)
+        except ValueError:
+            raise usage.UsageError(
+                "Expecting integer argument to jobs, got '%s'" % number)
+        if number <= 0:
+            raise usage.UsageError(
+                "Argument to jobs must be a strictly positive integer")
+        self["jobs"] = number
+
+
+    def _getWorkerArguments(self):
+        """
+        Return a list of options to pass to distributed workers.
+        """
+        args = []
+        for option in self._workerFlags:
+            if self.get(option) is not None:
+                if self[option]:
+                    args.append("--%s" % (option,))
+        for option in self._workerParameters:
+            if self.get(option) is not None:
+                args.extend(["--%s" % (option,), str(self[option])])
+        return args
+
+
+    def postOptions(self):
+        _BasicOptions.postOptions(self)
+        if self['jobs']:
+            for option in ['debug', 'profile', 'debug-stacktraces']:
+                if self[option]:
+                    raise usage.UsageError(
+                        "You can't specify --%s when using --jobs" % option)
+        if self['nopm']:
+            if not self['debug']:
+                raise usage.UsageError("You must specify --debug when using "
+                                       "--nopm ")
+            failure.DO_POST_MORTEM = False
+
+
+
+def _initialDebugSetup(config):
+    # do this part of debug setup first for easy debugging of import failures
+    if config['debug']:
+        failure.startDebugMode()
+    if config['debug'] or config['debug-stacktraces']:
+        defer.setDebugging(True)
+
+
+
+def _getSuite(config):
+    loader = _getLoader(config)
+    recurse = not config['no-recurse']
+    return loader.loadByNames(config['tests'], recurse)
+
+
+
+def _getLoader(config):
+    loader = runner.TestLoader()
+    if config['random']:
+        randomer = random.Random()
+        randomer.seed(config['random'])
+        loader.sorter = lambda x : randomer.random()
+        print 'Running tests shuffled with seed %d\n' % config['random']
+    if not config['until-failure']:
+        loader.suiteFactory = runner.DestructiveTestSuite
+    return loader
+
+
+def _wrappedPdb():
+    """
+    Wrap an instance of C{pdb.Pdb} with readline support and load any .rcs.
+
+    """
+
+    dbg = pdb.Pdb()
+    try:
+        import readline
+    except ImportError:
+        print "readline module not available"
+        sys.exc_clear()
+    for path in ('.pdbrc', 'pdbrc'):
+        if os.path.exists(path):
+            try:
+                rcFile = file(path, 'r')
+            except IOError:
+                sys.exc_clear()
+            else:
+                dbg.rcLines.extend(rcFile.readlines())
+    return dbg
+
+
+class _DebuggerNotFound(Exception):
+    """
+    A debugger import failed.
+
+    Used to allow translating these errors into usage error messages.
+
+    """
+
+
+
+def _makeRunner(config):
+    """
+    Return a trial runner class set up with the parameters extracted from
+    C{config}.
+
+    @return: A trial runner instance.
+    @rtype: L{runner.TrialRunner} or C{DistTrialRunner} depending on the
+        configuration.
+    """
+    cls = runner.TrialRunner
+    args = {'reporterFactory': config['reporter'],
+            'tracebackFormat': config['tbformat'],
+            'realTimeErrors': config['rterrors'],
+            'uncleanWarnings': config['unclean-warnings'],
+            'logfile': config['logfile'],
+            'workingDirectory': config['temp-directory']}
+    if config['dry-run']:
+        args['mode'] = runner.TrialRunner.DRY_RUN
+    elif config['jobs']:
+        from twisted.trial._dist.disttrial import DistTrialRunner
+        cls = DistTrialRunner
+        args['workerNumber'] = config['jobs']
+        args['workerArguments'] = config._getWorkerArguments()
+    else:
+        if config['debug']:
+            args['mode'] = runner.TrialRunner.DEBUG
+            debugger = config['debugger']
+
+            if debugger != 'pdb':
+                try:
+                    args['debugger'] = reflect.namedAny(debugger)
+                except reflect.ModuleNotFound:
+                    raise _DebuggerNotFound(
+                        '%r debugger could not be found.' % (debugger,))
+            else:
+                args['debugger'] = _wrappedPdb()
+
+        args['profile'] = config['profile']
+        args['forceGarbageCollection'] = config['force-gc']
+
+    return cls(**args)
+
+
+
+def run():
+    if len(sys.argv) == 1:
+        sys.argv.append("--help")
+    config = Options()
+    try:
+        config.parseOptions()
+    except usage.error, ue:
+        raise SystemExit, "%s: %s" % (sys.argv[0], ue)
+    _initialDebugSetup(config)
+
+    try:
+        trialRunner = _makeRunner(config)
+    except _DebuggerNotFound as e:
+        raise SystemExit('%s: %s' % (sys.argv[0], str(e)))
+
+    suite = _getSuite(config)
+    if config['until-failure']:
+        test_result = trialRunner.runUntilFailure(suite)
+    else:
+        test_result = trialRunner.run(suite)
+    if config.tracer:
+        sys.settrace(None)
+        results = config.tracer.results()
+        results.write_results(show_missing=1, summary=False,
+                              coverdir=config.coverdir().path)
+    sys.exit(not test_result.wasSuccessful())
diff --git a/ThirdParty/Twisted/twisted/scripts/twistd.py b/ThirdParty/Twisted/twisted/scripts/twistd.py
new file mode 100644
index 0000000..c2b53c7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/scripts/twistd.py
@@ -0,0 +1,30 @@
+# -*- test-case-name: twisted.test.test_twistd -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+The Twisted Daemon: platform-independent interface.
+
+ at author: Christopher Armstrong
+"""
+
+from twisted.application import app
+
+from twisted.python.runtime import platformType
+if platformType == "win32":
+    from twisted.scripts._twistw import ServerOptions, \
+        WindowsApplicationRunner as _SomeApplicationRunner
+else:
+    from twisted.scripts._twistd_unix import ServerOptions, \
+        UnixApplicationRunner as _SomeApplicationRunner
+
+
+def runApp(config):
+    _SomeApplicationRunner(config).run()
+
+
+def run():
+    app.run(runApp, ServerOptions)
+
+
+__all__ = ['run', 'runApp']
diff --git a/ThirdParty/Twisted/twisted/spread/__init__.py b/ThirdParty/Twisted/twisted/spread/__init__.py
new file mode 100644
index 0000000..e38b149
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/spread/__init__.py
@@ -0,0 +1,12 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Twisted Spread: Spreadable (Distributed) Computing.
+
+Future Plans: PB, Jelly and Banana need to be optimized.
+
+ at author: Glyph Lefkowitz
+"""
diff --git a/ThirdParty/Twisted/twisted/spread/banana.py b/ThirdParty/Twisted/twisted/spread/banana.py
new file mode 100644
index 0000000..edae9c6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/spread/banana.py
@@ -0,0 +1,358 @@
+# -*- test-case-name: twisted.test.test_banana -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Banana -- s-exp based protocol.
+
+Future Plans: This module is almost entirely stable.  The same caveat applies
+to it as applies to L{twisted.spread.jelly}, however.  Read its future plans
+for more details.
+
+ at author: Glyph Lefkowitz
+"""
+
+import copy, cStringIO, struct
+
+from twisted.internet import protocol
+from twisted.persisted import styles
+from twisted.python import log
+
+class BananaError(Exception):
+    pass
+
+def int2b128(integer, stream):
+    if integer == 0:
+        stream(chr(0))
+        return
+    assert integer > 0, "can only encode positive integers"
+    while integer:
+        stream(chr(integer & 0x7f))
+        integer = integer >> 7
+
+
+def b1282int(st):
+    """
+    Convert an integer represented as a base 128 string into an C{int} or
+    C{long}.
+
+    @param st: The integer encoded in a string.
+    @type st: C{str}
+
+    @return: The integer value extracted from the string.
+    @rtype: C{int} or C{long}
+    """
+    e = 1
+    i = 0
+    for char in st:
+        n = ord(char)
+        i += (n * e)
+        e <<= 7
+    return i
+
+
+# delimiter characters.
+LIST     = chr(0x80)
+INT      = chr(0x81)
+STRING   = chr(0x82)
+NEG      = chr(0x83)
+FLOAT    = chr(0x84)
+# "optional" -- these might be refused by a low-level implementation.
+LONGINT  = chr(0x85)
+LONGNEG  = chr(0x86)
+# really optional; this is is part of the 'pb' vocabulary
+VOCAB    = chr(0x87)
+
+HIGH_BIT_SET = chr(0x80)
+
+def setPrefixLimit(limit):
+    """
+    Set the limit on the prefix length for all Banana connections
+    established after this call.
+
+    The prefix length limit determines how many bytes of prefix a banana
+    decoder will allow before rejecting a potential object as too large.
+
+    @type limit: C{int}
+    @param limit: The number of bytes of prefix for banana to allow when
+    decoding.
+    """
+    global _PREFIX_LIMIT
+    _PREFIX_LIMIT = limit
+setPrefixLimit(64)
+
+SIZE_LIMIT = 640 * 1024   # 640k is all you'll ever need :-)
+
+class Banana(protocol.Protocol, styles.Ephemeral):
+    knownDialects = ["pb", "none"]
+
+    prefixLimit = None
+    sizeLimit = SIZE_LIMIT
+
+    def setPrefixLimit(self, limit):
+        """
+        Set the prefix limit for decoding done by this protocol instance.
+
+        @see: L{setPrefixLimit}
+        """
+        self.prefixLimit = limit
+        self._smallestLongInt = -2 ** (limit * 7) + 1
+        self._smallestInt = -2 ** 31
+        self._largestInt = 2 ** 31 - 1
+        self._largestLongInt = 2 ** (limit * 7) - 1
+
+
+    def connectionReady(self):
+        """Surrogate for connectionMade
+        Called after protocol negotiation.
+        """
+
+    def _selectDialect(self, dialect):
+        self.currentDialect = dialect
+        self.connectionReady()
+
+    def callExpressionReceived(self, obj):
+        if self.currentDialect:
+            self.expressionReceived(obj)
+        else:
+            # this is the first message we've received
+            if self.isClient:
+                # if I'm a client I have to respond
+                for serverVer in obj:
+                    if serverVer in self.knownDialects:
+                        self.sendEncoded(serverVer)
+                        self._selectDialect(serverVer)
+                        break
+                else:
+                    # I can't speak any of those dialects.
+                    log.msg("The client doesn't speak any of the protocols "
+                            "offered by the server: disconnecting.")
+                    self.transport.loseConnection()
+            else:
+                if obj in self.knownDialects:
+                    self._selectDialect(obj)
+                else:
+                    # the client just selected a protocol that I did not suggest.
+                    log.msg("The client selected a protocol the server didn't "
+                            "suggest and doesn't know: disconnecting.")
+                    self.transport.loseConnection()
+
+
+    def connectionMade(self):
+        self.setPrefixLimit(_PREFIX_LIMIT)
+        self.currentDialect = None
+        if not self.isClient:
+            self.sendEncoded(self.knownDialects)
+
+
+    def gotItem(self, item):
+        l = self.listStack
+        if l:
+            l[-1][1].append(item)
+        else:
+            self.callExpressionReceived(item)
+
+    buffer = ''
+
+    def dataReceived(self, chunk):
+        buffer = self.buffer + chunk
+        listStack = self.listStack
+        gotItem = self.gotItem
+        while buffer:
+            assert self.buffer != buffer, "This ain't right: %s %s" % (repr(self.buffer), repr(buffer))
+            self.buffer = buffer
+            pos = 0
+            for ch in buffer:
+                if ch >= HIGH_BIT_SET:
+                    break
+                pos = pos + 1
+            else:
+                if pos > self.prefixLimit:
+                    raise BananaError("Security precaution: more than %d bytes of prefix" % (self.prefixLimit,))
+                return
+            num = buffer[:pos]
+            typebyte = buffer[pos]
+            rest = buffer[pos+1:]
+            if len(num) > self.prefixLimit:
+                raise BananaError("Security precaution: longer than %d bytes worth of prefix" % (self.prefixLimit,))
+            if typebyte == LIST:
+                num = b1282int(num)
+                if num > SIZE_LIMIT:
+                    raise BananaError("Security precaution: List too long.")
+                listStack.append((num, []))
+                buffer = rest
+            elif typebyte == STRING:
+                num = b1282int(num)
+                if num > SIZE_LIMIT:
+                    raise BananaError("Security precaution: String too long.")
+                if len(rest) >= num:
+                    buffer = rest[num:]
+                    gotItem(rest[:num])
+                else:
+                    return
+            elif typebyte == INT:
+                buffer = rest
+                num = b1282int(num)
+                gotItem(num)
+            elif typebyte == LONGINT:
+                buffer = rest
+                num = b1282int(num)
+                gotItem(num)
+            elif typebyte == LONGNEG:
+                buffer = rest
+                num = b1282int(num)
+                gotItem(-num)
+            elif typebyte == NEG:
+                buffer = rest
+                num = -b1282int(num)
+                gotItem(num)
+            elif typebyte == VOCAB:
+                buffer = rest
+                num = b1282int(num)
+                gotItem(self.incomingVocabulary[num])
+            elif typebyte == FLOAT:
+                if len(rest) >= 8:
+                    buffer = rest[8:]
+                    gotItem(struct.unpack("!d", rest[:8])[0])
+                else:
+                    return
+            else:
+                raise NotImplementedError(("Invalid Type Byte %r" % (typebyte,)))
+            while listStack and (len(listStack[-1][1]) == listStack[-1][0]):
+                item = listStack.pop()[1]
+                gotItem(item)
+        self.buffer = ''
+
+
+    def expressionReceived(self, lst):
+        """Called when an expression (list, string, or int) is received.
+        """
+        raise NotImplementedError()
+
+
+    outgoingVocabulary = {
+        # Jelly Data Types
+        'None'           :  1,
+        'class'          :  2,
+        'dereference'    :  3,
+        'reference'      :  4,
+        'dictionary'     :  5,
+        'function'       :  6,
+        'instance'       :  7,
+        'list'           :  8,
+        'module'         :  9,
+        'persistent'     : 10,
+        'tuple'          : 11,
+        'unpersistable'  : 12,
+
+        # PB Data Types
+        'copy'           : 13,
+        'cache'          : 14,
+        'cached'         : 15,
+        'remote'         : 16,
+        'local'          : 17,
+        'lcache'         : 18,
+
+        # PB Protocol Messages
+        'version'        : 19,
+        'login'          : 20,
+        'password'       : 21,
+        'challenge'      : 22,
+        'logged_in'      : 23,
+        'not_logged_in'  : 24,
+        'cachemessage'   : 25,
+        'message'        : 26,
+        'answer'         : 27,
+        'error'          : 28,
+        'decref'         : 29,
+        'decache'        : 30,
+        'uncache'        : 31,
+        }
+
+    incomingVocabulary = {}
+    for k, v in outgoingVocabulary.items():
+        incomingVocabulary[v] = k
+
+    def __init__(self, isClient=1):
+        self.listStack = []
+        self.outgoingSymbols = copy.copy(self.outgoingVocabulary)
+        self.outgoingSymbolCount = 0
+        self.isClient = isClient
+
+    def sendEncoded(self, obj):
+        io = cStringIO.StringIO()
+        self._encode(obj, io.write)
+        value = io.getvalue()
+        self.transport.write(value)
+
+    def _encode(self, obj, write):
+        if isinstance(obj, (list, tuple)):
+            if len(obj) > SIZE_LIMIT:
+                raise BananaError(
+                    "list/tuple is too long to send (%d)" % (len(obj),))
+            int2b128(len(obj), write)
+            write(LIST)
+            for elem in obj:
+                self._encode(elem, write)
+        elif isinstance(obj, (int, long)):
+            if obj < self._smallestLongInt or obj > self._largestLongInt:
+                raise BananaError(
+                    "int/long is too large to send (%d)" % (obj,))
+            if obj < self._smallestInt:
+                int2b128(-obj, write)
+                write(LONGNEG)
+            elif obj < 0:
+                int2b128(-obj, write)
+                write(NEG)
+            elif obj <= self._largestInt:
+                int2b128(obj, write)
+                write(INT)
+            else:
+                int2b128(obj, write)
+                write(LONGINT)
+        elif isinstance(obj, float):
+            write(FLOAT)
+            write(struct.pack("!d", obj))
+        elif isinstance(obj, str):
+            # TODO: an API for extending banana...
+            if self.currentDialect == "pb" and obj in self.outgoingSymbols:
+                symbolID = self.outgoingSymbols[obj]
+                int2b128(symbolID, write)
+                write(VOCAB)
+            else:
+                if len(obj) > SIZE_LIMIT:
+                    raise BananaError(
+                        "string is too long to send (%d)" % (len(obj),))
+                int2b128(len(obj), write)
+                write(STRING)
+                write(obj)
+        else:
+            raise BananaError("could not send object: %r" % (obj,))
+
+
+# For use from the interactive interpreter
+_i = Banana()
+_i.connectionMade()
+_i._selectDialect("none")
+
+
+def encode(lst):
+    """Encode a list s-expression."""
+    io = cStringIO.StringIO()
+    _i.transport = io
+    _i.sendEncoded(lst)
+    return io.getvalue()
+
+
+def decode(st):
+    """
+    Decode a banana-encoded string.
+    """
+    l = []
+    _i.expressionReceived = l.append
+    try:
+        _i.dataReceived(st)
+    finally:
+        _i.buffer = ''
+        del _i.expressionReceived
+    return l[0]
diff --git a/ThirdParty/Twisted/twisted/spread/flavors.py b/ThirdParty/Twisted/twisted/spread/flavors.py
new file mode 100644
index 0000000..61d6b80
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/spread/flavors.py
@@ -0,0 +1,590 @@
+# -*- test-case-name: twisted.test.test_pb -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module represents flavors of remotely acessible objects.
+
+Currently this is only objects accessible through Perspective Broker, but will
+hopefully encompass all forms of remote access which can emulate subsets of PB
+(such as XMLRPC or SOAP).
+
+Future Plans: Optimization.  Exploitation of new-style object model.
+Optimizations to this module should not affect external-use semantics at all,
+but may have a small impact on users who subclass and override methods.
+
+ at author: Glyph Lefkowitz
+"""
+
+# NOTE: this module should NOT import pb; it is supposed to be a module which
+# abstractly defines remotely accessible types.  Many of these types expect to
+# be serialized by Jelly, but they ought to be accessible through other
+# mechanisms (like XMLRPC)
+
+# system imports
+import sys
+from zope.interface import implements, Interface
+
+# twisted imports
+from twisted.python import log, reflect
+
+# sibling imports
+from jelly import setUnjellyableForClass, setUnjellyableForClassTree, setUnjellyableFactoryForClass, unjellyableRegistry
+from jelly import Jellyable, Unjellyable, _newDummyLike
+from jelly import setInstanceState, getInstanceState
+
+# compatibility
+setCopierForClass = setUnjellyableForClass
+setCopierForClassTree = setUnjellyableForClassTree
+setFactoryForClass = setUnjellyableFactoryForClass
+copyTags = unjellyableRegistry
+
+copy_atom = "copy"
+cache_atom = "cache"
+cached_atom = "cached"
+remote_atom = "remote"
+
+
+class NoSuchMethod(AttributeError):
+    """Raised if there is no such remote method"""
+
+
+class IPBRoot(Interface):
+    """Factory for root Referenceable objects for PB servers."""
+
+    def rootObject(broker):
+        """Return root Referenceable for broker."""
+
+
+class Serializable(Jellyable):
+    """An object that can be passed remotely.
+
+    I am a style of object which can be serialized by Perspective
+    Broker.  Objects which wish to be referenceable or copied remotely
+    have to subclass Serializable.  However, clients of Perspective
+    Broker will probably not want to directly subclass Serializable; the
+    Flavors of transferable objects are listed below.
+
+    What it means to be \"Serializable\" is that an object can be
+    passed to or returned from a remote method.  Certain basic types
+    (dictionaries, lists, tuples, numbers, strings) are serializable by
+    default; however, classes need to choose a specific serialization
+    style: L{Referenceable}, L{Viewable}, L{Copyable} or L{Cacheable}.
+
+    You may also pass C{[lists, dictionaries, tuples]} of L{Serializable}
+    instances to or return them from remote methods, as many levels deep
+    as you like.
+    """
+
+    def processUniqueID(self):
+        """Return an ID which uniquely represents this object for this process.
+
+        By default, this uses the 'id' builtin, but can be overridden to
+        indicate that two values are identity-equivalent (such as proxies
+        for the same object).
+        """
+
+        return id(self)
+
+class Referenceable(Serializable):
+    perspective = None
+    """I am an object sent remotely as a direct reference.
+
+    When one of my subclasses is sent as an argument to or returned
+    from a remote method call, I will be serialized by default as a
+    direct reference.
+
+    This means that the peer will be able to call methods on me;
+    a method call xxx() from my peer will be resolved to methods
+    of the name remote_xxx.
+    """
+
+    def remoteMessageReceived(self, broker, message, args, kw):
+        """A remote message has been received.  Dispatch it appropriately.
+
+        The default implementation is to dispatch to a method called
+        'remote_messagename' and call it with the same arguments.
+        """
+        args = broker.unserialize(args)
+        kw = broker.unserialize(kw)
+        method = getattr(self, "remote_%s" % message, None)
+        if method is None:
+            raise NoSuchMethod("No such method: remote_%s" % (message,))
+        try:
+            state = method(*args, **kw)
+        except TypeError:
+            log.msg("%s didn't accept %s and %s" % (method, args, kw))
+            raise
+        return broker.serialize(state, self.perspective)
+
+    def jellyFor(self, jellier):
+        """(internal)
+
+        Return a tuple which will be used as the s-expression to
+        serialize this to a peer.
+        """
+
+        return ["remote", jellier.invoker.registerReference(self)]
+
+
+class Root(Referenceable):
+    """I provide a root object to L{pb.Broker}s for a L{pb.BrokerFactory}.
+
+    When a L{pb.BrokerFactory} produces a L{pb.Broker}, it supplies that
+    L{pb.Broker} with an object named \"root\".  That object is obtained
+    by calling my rootObject method.
+    """
+
+    implements(IPBRoot)
+    
+    def rootObject(self, broker):
+        """A L{pb.BrokerFactory} is requesting to publish me as a root object.
+
+        When a L{pb.BrokerFactory} is sending me as the root object, this
+        method will be invoked to allow per-broker versions of an
+        object.  By default I return myself.
+        """
+        return self
+
+
+class ViewPoint(Referenceable):
+    """
+    I act as an indirect reference to an object accessed through a
+    L{pb.Perspective}.
+
+    Simply put, I combine an object with a perspective so that when a
+    peer calls methods on the object I refer to, the method will be
+    invoked with that perspective as a first argument, so that it can
+    know who is calling it.
+
+    While L{Viewable} objects will be converted to ViewPoints by default
+    when they are returned from or sent as arguments to a remote
+    method, any object may be manually proxied as well. (XXX: Now that
+    this class is no longer named C{Proxy}, this is the only occourance
+    of the term 'proxied' in this docstring, and may be unclear.)
+
+    This can be useful when dealing with L{pb.Perspective}s, L{Copyable}s,
+    and L{Cacheable}s.  It is legal to implement a method as such on
+    a perspective::
+
+     | def perspective_getViewPointForOther(self, name):
+     |     defr = self.service.getPerspectiveRequest(name)
+     |     defr.addCallbacks(lambda x, self=self: ViewPoint(self, x), log.msg)
+     |     return defr
+
+    This will allow you to have references to Perspective objects in two
+    different ways.  One is through the initial 'attach' call -- each
+    peer will have a L{pb.RemoteReference} to their perspective directly.  The
+    other is through this method; each peer can get a L{pb.RemoteReference} to
+    all other perspectives in the service; but that L{pb.RemoteReference} will
+    be to a L{ViewPoint}, not directly to the object.
+
+    The practical offshoot of this is that you can implement 2 varieties
+    of remotely callable methods on this Perspective; view_xxx and
+    C{perspective_xxx}. C{view_xxx} methods will follow the rules for
+    ViewPoint methods (see ViewPoint.L{remoteMessageReceived}), and
+    C{perspective_xxx} methods will follow the rules for Perspective
+    methods.
+    """
+
+    def __init__(self, perspective, object):
+        """Initialize me with a Perspective and an Object.
+        """
+        self.perspective = perspective
+        self.object = object
+
+    def processUniqueID(self):
+        """Return an ID unique to a proxy for this perspective+object combination.
+        """
+        return (id(self.perspective), id(self.object))
+
+    def remoteMessageReceived(self, broker, message, args, kw):
+        """A remote message has been received.  Dispatch it appropriately.
+
+        The default implementation is to dispatch to a method called
+        'C{view_messagename}' to my Object and call it on my object with
+        the same arguments, modified by inserting my Perspective as
+        the first argument.
+        """
+        args = broker.unserialize(args, self.perspective)
+        kw = broker.unserialize(kw, self.perspective)
+        method = getattr(self.object, "view_%s" % message)
+        try:
+            state = method(*(self.perspective,)+args, **kw)
+        except TypeError:
+            log.msg("%s didn't accept %s and %s" % (method, args, kw))
+            raise
+        rv = broker.serialize(state, self.perspective, method, args, kw)
+        return rv
+
+
+class Viewable(Serializable):
+    """I will be converted to a L{ViewPoint} when passed to or returned from a remote method.
+
+    The beginning of a peer's interaction with a PB Service is always
+    through a perspective.  However, if a C{perspective_xxx} method returns
+    a Viewable, it will be serialized to the peer as a response to that
+    method.
+    """
+
+    def jellyFor(self, jellier):
+        """Serialize a L{ViewPoint} for me and the perspective of the given broker.
+        """
+        return ViewPoint(jellier.invoker.serializingPerspective, self).jellyFor(jellier)
+
+
+
+class Copyable(Serializable):
+    """Subclass me to get copied each time you are returned from or passed to a remote method.
+
+    When I am returned from or passed to a remote method call, I will be
+    converted into data via a set of callbacks (see my methods for more
+    info).  That data will then be serialized using Jelly, and sent to
+    the peer.
+
+    The peer will then look up the type to represent this with; see
+    L{RemoteCopy} for details.
+    """
+
+    def getStateToCopy(self):
+        """Gather state to send when I am serialized for a peer.
+
+        I will default to returning self.__dict__.  Override this to
+        customize this behavior.
+        """
+
+        return self.__dict__
+
+    def getStateToCopyFor(self, perspective):
+        """
+        Gather state to send when I am serialized for a particular
+        perspective.
+
+        I will default to calling L{getStateToCopy}.  Override this to
+        customize this behavior.
+        """
+
+        return self.getStateToCopy()
+
+    def getTypeToCopy(self):
+        """Determine what type tag to send for me.
+
+        By default, send the string representation of my class
+        (package.module.Class); normally this is adequate, but
+        you may override this to change it.
+        """
+
+        return reflect.qual(self.__class__)
+
+    def getTypeToCopyFor(self, perspective):
+        """Determine what type tag to send for me.
+
+        By default, defer to self.L{getTypeToCopy}() normally this is
+        adequate, but you may override this to change it.
+        """
+
+        return self.getTypeToCopy()
+
+    def jellyFor(self, jellier):
+        """Assemble type tag and state to copy for this broker.
+
+        This will call L{getTypeToCopyFor} and L{getStateToCopy}, and
+        return an appropriate s-expression to represent me.
+        """
+
+        if jellier.invoker is None:
+            return getInstanceState(self, jellier)
+        p = jellier.invoker.serializingPerspective
+        t = self.getTypeToCopyFor(p)
+        state = self.getStateToCopyFor(p)
+        sxp = jellier.prepare(self)
+        sxp.extend([t, jellier.jelly(state)])
+        return jellier.preserve(self, sxp)
+
+
+class Cacheable(Copyable):
+    """A cached instance.
+
+    This means that it's copied; but there is some logic to make sure
+    that it's only copied once.  Additionally, when state is retrieved,
+    it is passed a "proto-reference" to the state as it will exist on
+    the client.
+
+    XXX: The documentation for this class needs work, but it's the most
+    complex part of PB and it is inherently difficult to explain.
+    """
+
+    def getStateToCacheAndObserveFor(self, perspective, observer):
+        """
+        Get state to cache on the client and client-cache reference
+        to observe locally.
+
+        This is similiar to getStateToCopyFor, but it additionally
+        passes in a reference to the client-side RemoteCache instance
+        that will be created when it is unserialized.  This allows
+        Cacheable instances to keep their RemoteCaches up to date when
+        they change, such that no changes can occur between the point
+        at which the state is initially copied and the client receives
+        it that are not propogated.
+        """
+
+        return self.getStateToCopyFor(perspective)
+
+    def jellyFor(self, jellier):
+        """Return an appropriate tuple to serialize me.
+
+        Depending on whether this broker has cached me or not, this may
+        return either a full state or a reference to an existing cache.
+        """
+        if jellier.invoker is None:
+            return getInstanceState(self, jellier)
+        luid = jellier.invoker.cachedRemotelyAs(self, 1)
+        if luid is None:
+            luid = jellier.invoker.cacheRemotely(self)
+            p = jellier.invoker.serializingPerspective
+            type_ = self.getTypeToCopyFor(p)
+            observer = RemoteCacheObserver(jellier.invoker, self, p)
+            state = self.getStateToCacheAndObserveFor(p, observer)
+            l = jellier.prepare(self)
+            jstate = jellier.jelly(state)
+            l.extend([type_, luid, jstate])
+            return jellier.preserve(self, l)
+        else:
+            return cached_atom, luid
+
+    def stoppedObserving(self, perspective, observer):
+        """This method is called when a client has stopped observing me.
+
+        The 'observer' argument is the same as that passed in to
+        getStateToCacheAndObserveFor.
+        """
+
+
+
+class RemoteCopy(Unjellyable):
+    """I am a remote copy of a Copyable object.
+
+    When the state from a L{Copyable} object is received, an instance will
+    be created based on the copy tags table (see setUnjellyableForClass) and
+    sent the L{setCopyableState} message.  I provide a reasonable default
+    implementation of that message; subclass me if you wish to serve as
+    a copier for remote data.
+
+    NOTE: copiers are invoked with no arguments.  Do not implement a
+    constructor which requires args in a subclass of L{RemoteCopy}!
+    """
+
+    def setCopyableState(self, state):
+        """I will be invoked with the state to copy locally.
+
+        'state' is the data returned from the remote object's
+        'getStateToCopyFor' method, which will often be the remote
+        object's dictionary (or a filtered approximation of it depending
+        on my peer's perspective).
+        """
+
+        self.__dict__ = state
+
+    def unjellyFor(self, unjellier, jellyList):
+        if unjellier.invoker is None:
+            return setInstanceState(self, unjellier, jellyList)
+        self.setCopyableState(unjellier.unjelly(jellyList[1]))
+        return self
+
+
+
+class RemoteCache(RemoteCopy, Serializable):
+    """A cache is a local representation of a remote L{Cacheable} object.
+
+    This represents the last known state of this object.  It may
+    also have methods invoked on it -- in order to update caches,
+    the cached class generates a L{pb.RemoteReference} to this object as
+    it is originally sent.
+
+    Much like copy, I will be invoked with no arguments.  Do not
+    implement a constructor that requires arguments in one of my
+    subclasses.
+    """
+
+    def remoteMessageReceived(self, broker, message, args, kw):
+        """A remote message has been received.  Dispatch it appropriately.
+
+        The default implementation is to dispatch to a method called
+        'C{observe_messagename}' and call it on my  with the same arguments.
+        """
+
+        args = broker.unserialize(args)
+        kw = broker.unserialize(kw)
+        method = getattr(self, "observe_%s" % message)
+        try:
+            state = method(*args, **kw)
+        except TypeError:
+            log.msg("%s didn't accept %s and %s" % (method, args, kw))
+            raise
+        return broker.serialize(state, None, method, args, kw)
+
+    def jellyFor(self, jellier):
+        """serialize me (only for the broker I'm for) as the original cached reference
+        """
+        if jellier.invoker is None:
+            return getInstanceState(self, jellier)
+        assert jellier.invoker is self.broker, "You cannot exchange cached proxies between brokers."
+        return 'lcache', self.luid
+
+
+    def unjellyFor(self, unjellier, jellyList):
+        if unjellier.invoker is None:
+            return setInstanceState(self, unjellier, jellyList)
+        self.broker = unjellier.invoker
+        self.luid = jellyList[1]
+        cProxy = _newDummyLike(self)
+        # XXX questionable whether this was a good design idea...
+        init = getattr(cProxy, "__init__", None)
+        if init:
+            init()
+        unjellier.invoker.cacheLocally(jellyList[1], self)
+        cProxy.setCopyableState(unjellier.unjelly(jellyList[2]))
+        # Might have changed due to setCopyableState method; we'll assume that
+        # it's bad form to do so afterwards.
+        self.__dict__ = cProxy.__dict__
+        # chomp, chomp -- some existing code uses "self.__dict__ =", some uses
+        # "__dict__.update".  This is here in order to handle both cases.
+        self.broker = unjellier.invoker
+        self.luid = jellyList[1]
+        return cProxy
+
+##     def __really_del__(self):
+##         """Final finalization call, made after all remote references have been lost.
+##         """
+
+    def __cmp__(self, other):
+        """Compare me [to another RemoteCache.
+        """
+        if isinstance(other, self.__class__):
+            return cmp(id(self.__dict__), id(other.__dict__))
+        else:
+            return cmp(id(self.__dict__), other)
+
+    def __hash__(self):
+        """Hash me.
+        """
+        return int(id(self.__dict__) % sys.maxint)
+
+    broker = None
+    luid = None
+
+    def __del__(self):
+        """Do distributed reference counting on finalize.
+        """
+        try:
+            # log.msg( ' --- decache: %s %s' % (self, self.luid) )
+            if self.broker:
+                self.broker.decCacheRef(self.luid)
+        except:
+            log.deferr()
+
+def unjellyCached(unjellier, unjellyList):
+    luid = unjellyList[1]
+    cNotProxy = unjellier.invoker.cachedLocallyAs(luid)
+    cProxy = _newDummyLike(cNotProxy)
+    return cProxy
+
+setUnjellyableForClass("cached", unjellyCached)
+
+def unjellyLCache(unjellier, unjellyList):
+    luid = unjellyList[1]
+    obj = unjellier.invoker.remotelyCachedForLUID(luid)
+    return obj
+
+setUnjellyableForClass("lcache", unjellyLCache)
+
+def unjellyLocal(unjellier, unjellyList):
+    obj = unjellier.invoker.localObjectForID(unjellyList[1])
+    return obj
+
+setUnjellyableForClass("local", unjellyLocal)
+
+class RemoteCacheMethod:
+    """A method on a reference to a L{RemoteCache}.
+    """
+
+    def __init__(self, name, broker, cached, perspective):
+        """(internal) initialize.
+        """
+        self.name = name
+        self.broker = broker
+        self.perspective = perspective
+        self.cached = cached
+
+    def __cmp__(self, other):
+        return cmp((self.name, self.broker, self.perspective, self.cached), other)
+
+    def __hash__(self):
+        return hash((self.name, self.broker, self.perspective, self.cached))
+
+    def __call__(self, *args, **kw):
+        """(internal) action method.
+        """
+        cacheID = self.broker.cachedRemotelyAs(self.cached)
+        if cacheID is None:
+            from pb import ProtocolError
+            raise ProtocolError("You can't call a cached method when the object hasn't been given to the peer yet.")
+        return self.broker._sendMessage('cache', self.perspective, cacheID, self.name, args, kw)
+
+class RemoteCacheObserver:
+    """I am a reverse-reference to the peer's L{RemoteCache}.
+
+    I am generated automatically when a cache is serialized.  I
+    represent a reference to the client's L{RemoteCache} object that
+    will represent a particular L{Cacheable}; I am the additional
+    object passed to getStateToCacheAndObserveFor.
+    """
+
+    def __init__(self, broker, cached, perspective):
+        """(internal) Initialize me.
+
+        @param broker: a L{pb.Broker} instance.
+
+        @param cached: a L{Cacheable} instance that this L{RemoteCacheObserver}
+            corresponds to.
+
+        @param perspective: a reference to the perspective who is observing this.
+        """
+
+        self.broker = broker
+        self.cached = cached
+        self.perspective = perspective
+
+    def __repr__(self):
+        return "<RemoteCacheObserver(%s, %s, %s) at %s>" % (
+            self.broker, self.cached, self.perspective, id(self))
+
+    def __hash__(self):
+        """Generate a hash unique to all L{RemoteCacheObserver}s for this broker/perspective/cached triplet
+        """
+
+        return (  (hash(self.broker) % 2**10)
+                + (hash(self.perspective) % 2**10)
+                + (hash(self.cached) % 2**10))
+
+    def __cmp__(self, other):
+        """Compare me to another L{RemoteCacheObserver}.
+        """
+
+        return cmp((self.broker, self.perspective, self.cached), other)
+
+    def callRemote(self, _name, *args, **kw):
+        """(internal) action method.
+        """
+        cacheID = self.broker.cachedRemotelyAs(self.cached)
+        if cacheID is None:
+            from pb import ProtocolError
+            raise ProtocolError("You can't call a cached method when the "
+                                "object hasn't been given to the peer yet.")
+        return self.broker._sendMessage('cache', self.perspective, cacheID,
+                                        _name, args, kw)
+
+    def remoteMethod(self, key):
+        """Get a L{pb.RemoteMethod} for this key.
+        """
+        return RemoteCacheMethod(key, self.broker, self.cached, self.perspective)
diff --git a/ThirdParty/Twisted/twisted/spread/interfaces.py b/ThirdParty/Twisted/twisted/spread/interfaces.py
new file mode 100644
index 0000000..6d48d00
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/spread/interfaces.py
@@ -0,0 +1,28 @@
+"""
+Twisted Spread Interfaces.
+
+This module is unused so far. It's also undecided whether this module
+will remain monolithic.
+"""
+
+from zope.interface import Interface
+
+class IJellyable(Interface):
+    def jellyFor(jellier):
+        """
+        Jelly myself for jellier.
+        """
+
+class IUnjellyable(Interface):
+    def unjellyFor(jellier, jellyList):
+        """
+        Unjelly myself for the jellier.
+
+        @param jellier: A stateful object which exists for the lifetime of a
+        single call to L{unjelly}.
+
+        @param jellyList: The C{list} which represents the jellied state of the
+        object to be unjellied.
+
+        @return: The object which results from unjellying.
+        """
diff --git a/ThirdParty/Twisted/twisted/spread/jelly.py b/ThirdParty/Twisted/twisted/spread/jelly.py
new file mode 100644
index 0000000..1879530
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/spread/jelly.py
@@ -0,0 +1,1151 @@
+# -*- test-case-name: twisted.test.test_jelly -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+S-expression-based persistence of python objects.
+
+It does something very much like L{Pickle<pickle>}; however, pickle's main goal
+seems to be efficiency (both in space and time); jelly's main goals are
+security, human readability, and portability to other environments.
+
+This is how Jelly converts various objects to s-expressions.
+
+Boolean::
+    True --> ['boolean', 'true']
+
+Integer::
+    1 --> 1
+
+List::
+    [1, 2] --> ['list', 1, 2]
+
+String::
+    \"hello\" --> \"hello\"
+
+Float::
+    2.3 --> 2.3
+
+Dictionary::
+    {'a': 1, 'b': 'c'} --> ['dictionary', ['b', 'c'], ['a', 1]]
+
+Module::
+    UserString --> ['module', 'UserString']
+
+Class::
+    UserString.UserString --> ['class', ['module', 'UserString'], 'UserString']
+
+Function::
+    string.join --> ['function', 'join', ['module', 'string']]
+
+Instance: s is an instance of UserString.UserString, with a __dict__
+{'data': 'hello'}::
+    [\"UserString.UserString\", ['dictionary', ['data', 'hello']]]
+
+Class Method: UserString.UserString.center::
+    ['method', 'center', ['None'], ['class', ['module', 'UserString'],
+     'UserString']]
+
+Instance Method: s.center, where s is an instance of UserString.UserString::
+    ['method', 'center', ['instance', ['reference', 1, ['class',
+    ['module', 'UserString'], 'UserString']], ['dictionary', ['data', 'd']]],
+    ['dereference', 1]]
+
+The C{set} builtin and the C{sets.Set} class are serialized to the same
+thing, and unserialized to C{set} if available, else to C{sets.Set}. It means
+that there's a possibility of type switching in the serialization process. The
+solution is to always use C{set} if possible, and only use C{sets.Set} under
+Python 2.3; this can be accomplished by using L{twisted.python.compat.set}.
+
+The same rule applies for C{frozenset} and C{sets.ImmutableSet}.
+
+ at author: Glyph Lefkowitz
+"""
+
+# System Imports
+import pickle
+import types
+import warnings
+from types import StringType
+from types import UnicodeType
+from types import IntType
+from types import TupleType
+from types import ListType
+from types import LongType
+from types import FloatType
+from types import FunctionType
+from types import MethodType
+from types import ModuleType
+from types import DictionaryType
+from types import InstanceType
+from types import NoneType
+from types import ClassType
+import copy
+
+import datetime
+from types import BooleanType
+
+try:
+    import decimal
+except ImportError:
+    decimal = None
+
+try:
+    _set = set
+except NameError:
+    _set = None
+
+try:
+    # Filter out deprecation warning for Python >= 2.6
+    warnings.filterwarnings("ignore", category=DeprecationWarning,
+        message="the sets module is deprecated", append=True)
+    import sets as _sets
+finally:
+    warnings.filters.pop()
+
+
+from zope.interface import implements
+
+# Twisted Imports
+from twisted.python.reflect import namedObject, qual
+from twisted.persisted.crefutil import NotKnown, _Tuple, _InstanceMethod
+from twisted.persisted.crefutil import _DictKeyAndValue, _Dereference
+from twisted.persisted.crefutil import _Container
+from twisted.python.compat import reduce
+
+from twisted.spread.interfaces import IJellyable, IUnjellyable
+
+DictTypes = (DictionaryType,)
+
+None_atom = "None"                  # N
+# code
+class_atom = "class"                # c
+module_atom = "module"              # m
+function_atom = "function"          # f
+
+# references
+dereference_atom = 'dereference'    # D
+persistent_atom = 'persistent'      # p
+reference_atom = 'reference'        # r
+
+# mutable collections
+dictionary_atom = "dictionary"      # d
+list_atom = 'list'                  # l
+set_atom = 'set'
+
+# immutable collections
+#   (assignment to __dict__ and __class__ still might go away!)
+tuple_atom = "tuple"                # t
+instance_atom = 'instance'          # i
+frozenset_atom = 'frozenset'
+
+
+# errors
+unpersistable_atom = "unpersistable"# u
+unjellyableRegistry = {}
+unjellyableFactoryRegistry = {}
+
+_NO_STATE = object()
+
+def _newInstance(cls, state=_NO_STATE):
+    """
+    Make a new instance of a class without calling its __init__ method.
+    Supports both new- and old-style classes.
+
+    @param state: A C{dict} used to update C{inst.__dict__} or C{_NO_STATE}
+        to skip this part of initialization.
+
+    @return: A new instance of C{cls}.
+    """
+    if not isinstance(cls, types.ClassType):
+        # new-style
+        inst = cls.__new__(cls)
+
+        if state is not _NO_STATE:
+            inst.__dict__.update(state) # Copy 'instance' behaviour
+    else:
+        if state is not _NO_STATE:
+            inst = InstanceType(cls, state)
+        else:
+            inst = InstanceType(cls)
+    return inst
+
+
+
+def _maybeClass(classnamep):
+    try:
+        object
+    except NameError:
+        isObject = 0
+    else:
+        isObject = isinstance(classnamep, type)
+    if isinstance(classnamep, ClassType) or isObject:
+        return qual(classnamep)
+    return classnamep
+
+
+
+def setUnjellyableForClass(classname, unjellyable):
+    """
+    Set which local class will represent a remote type.
+
+    If you have written a Copyable class that you expect your client to be
+    receiving, write a local "copy" class to represent it, then call::
+
+        jellier.setUnjellyableForClass('module.package.Class', MyCopier).
+
+    Call this at the module level immediately after its class
+    definition. MyCopier should be a subclass of RemoteCopy.
+
+    The classname may be a special tag returned by
+    'Copyable.getTypeToCopyFor' rather than an actual classname.
+
+    This call is also for cached classes, since there will be no
+    overlap.  The rules are the same.
+    """
+
+    global unjellyableRegistry
+    classname = _maybeClass(classname)
+    unjellyableRegistry[classname] = unjellyable
+    globalSecurity.allowTypes(classname)
+
+
+
+def setUnjellyableFactoryForClass(classname, copyFactory):
+    """
+    Set the factory to construct a remote instance of a type::
+
+      jellier.setUnjellyableFactoryForClass('module.package.Class', MyFactory)
+
+    Call this at the module level immediately after its class definition.
+    C{copyFactory} should return an instance or subclass of
+    L{RemoteCopy<pb.RemoteCopy>}.
+
+    Similar to L{setUnjellyableForClass} except it uses a factory instead
+    of creating an instance.
+    """
+
+    global unjellyableFactoryRegistry
+    classname = _maybeClass(classname)
+    unjellyableFactoryRegistry[classname] = copyFactory
+    globalSecurity.allowTypes(classname)
+
+
+
+def setUnjellyableForClassTree(module, baseClass, prefix=None):
+    """
+    Set all classes in a module derived from C{baseClass} as copiers for
+    a corresponding remote class.
+
+    When you have a heirarchy of Copyable (or Cacheable) classes on one
+    side, and a mirror structure of Copied (or RemoteCache) classes on the
+    other, use this to setUnjellyableForClass all your Copieds for the
+    Copyables.
+
+    Each copyTag (the \"classname\" argument to getTypeToCopyFor, and
+    what the Copyable's getTypeToCopyFor returns) is formed from
+    adding a prefix to the Copied's class name.  The prefix defaults
+    to module.__name__.  If you wish the copy tag to consist of solely
+    the classname, pass the empty string \'\'.
+
+    @param module: a module object from which to pull the Copied classes.
+        (passing sys.modules[__name__] might be useful)
+
+    @param baseClass: the base class from which all your Copied classes derive.
+
+    @param prefix: the string prefixed to classnames to form the
+        unjellyableRegistry.
+    """
+    if prefix is None:
+        prefix = module.__name__
+
+    if prefix:
+        prefix = "%s." % prefix
+
+    for i in dir(module):
+        i_ = getattr(module, i)
+        if type(i_) == types.ClassType:
+            if issubclass(i_, baseClass):
+                setUnjellyableForClass('%s%s' % (prefix, i), i_)
+
+
+
+def getInstanceState(inst, jellier):
+    """
+    Utility method to default to 'normal' state rules in serialization.
+    """
+    if hasattr(inst, "__getstate__"):
+        state = inst.__getstate__()
+    else:
+        state = inst.__dict__
+    sxp = jellier.prepare(inst)
+    sxp.extend([qual(inst.__class__), jellier.jelly(state)])
+    return jellier.preserve(inst, sxp)
+
+
+
+def setInstanceState(inst, unjellier, jellyList):
+    """
+    Utility method to default to 'normal' state rules in unserialization.
+    """
+    state = unjellier.unjelly(jellyList[1])
+    if hasattr(inst, "__setstate__"):
+        inst.__setstate__(state)
+    else:
+        inst.__dict__ = state
+    return inst
+
+
+
+class Unpersistable:
+    """
+    This is an instance of a class that comes back when something couldn't be
+    unpersisted.
+    """
+
+    def __init__(self, reason):
+        """
+        Initialize an unpersistable object with a descriptive C{reason} string.
+        """
+        self.reason = reason
+
+
+    def __repr__(self):
+        return "Unpersistable(%s)" % repr(self.reason)
+
+
+
+class Jellyable:
+    """
+    Inherit from me to Jelly yourself directly with the `getStateFor'
+    convenience method.
+    """
+    implements(IJellyable)
+
+    def getStateFor(self, jellier):
+        return self.__dict__
+
+
+    def jellyFor(self, jellier):
+        """
+        @see: L{twisted.spread.interfaces.IJellyable.jellyFor}
+        """
+        sxp = jellier.prepare(self)
+        sxp.extend([
+            qual(self.__class__),
+            jellier.jelly(self.getStateFor(jellier))])
+        return jellier.preserve(self, sxp)
+
+
+
+class Unjellyable:
+    """
+    Inherit from me to Unjelly yourself directly with the
+    C{setStateFor} convenience method.
+    """
+    implements(IUnjellyable)
+
+    def setStateFor(self, unjellier, state):
+        self.__dict__ = state
+
+
+    def unjellyFor(self, unjellier, jellyList):
+        """
+        Perform the inverse operation of L{Jellyable.jellyFor}.
+
+        @see: L{twisted.spread.interfaces.IUnjellyable.unjellyFor}
+        """
+        state = unjellier.unjelly(jellyList[1])
+        self.setStateFor(unjellier, state)
+        return self
+
+
+
+class _Jellier:
+    """
+    (Internal) This class manages state for a call to jelly()
+    """
+
+    def __init__(self, taster, persistentStore, invoker):
+        """
+        Initialize.
+        """
+        self.taster = taster
+        # `preserved' is a dict of previously seen instances.
+        self.preserved = {}
+        # `cooked' is a dict of previously backreferenced instances to their
+        # `ref' lists.
+        self.cooked = {}
+        self.cooker = {}
+        self._ref_id = 1
+        self.persistentStore = persistentStore
+        self.invoker = invoker
+
+
+    def _cook(self, object):
+        """
+        (internal) Backreference an object.
+
+        Notes on this method for the hapless future maintainer: If I've already
+        gone through the prepare/preserve cycle on the specified object (it is
+        being referenced after the serializer is \"done with\" it, e.g. this
+        reference is NOT circular), the copy-in-place of aList is relevant,
+        since the list being modified is the actual, pre-existing jelly
+        expression that was returned for that object. If not, it's technically
+        superfluous, since the value in self.preserved didn't need to be set,
+        but the invariant that self.preserved[id(object)] is a list is
+        convenient because that means we don't have to test and create it or
+        not create it here, creating fewer code-paths.  that's why
+        self.preserved is always set to a list.
+
+        Sorry that this code is so hard to follow, but Python objects are
+        tricky to persist correctly. -glyph
+        """
+        aList = self.preserved[id(object)]
+        newList = copy.copy(aList)
+        # make a new reference ID
+        refid = self._ref_id
+        self._ref_id = self._ref_id + 1
+        # replace the old list in-place, so that we don't have to track the
+        # previous reference to it.
+        aList[:] = [reference_atom, refid, newList]
+        self.cooked[id(object)] = [dereference_atom, refid]
+        return aList
+
+
+    def prepare(self, object):
+        """
+        (internal) Create a list for persisting an object to.  This will allow
+        backreferences to be made internal to the object. (circular
+        references).
+
+        The reason this needs to happen is that we don't generate an ID for
+        every object, so we won't necessarily know which ID the object will
+        have in the future.  When it is 'cooked' ( see _cook ), it will be
+        assigned an ID, and the temporary placeholder list created here will be
+        modified in-place to create an expression that gives this object an ID:
+        [reference id# [object-jelly]].
+        """
+
+        # create a placeholder list to be preserved
+        self.preserved[id(object)] = []
+        # keep a reference to this object around, so it doesn't disappear!
+        # (This isn't always necessary, but for cases where the objects are
+        # dynamically generated by __getstate__ or getStateToCopyFor calls, it
+        # is; id() will return the same value for a different object if it gets
+        # garbage collected.  This may be optimized later.)
+        self.cooker[id(object)] = object
+        return []
+
+
+    def preserve(self, object, sexp):
+        """
+        (internal) Mark an object's persistent list for later referral.
+        """
+        # if I've been cooked in the meanwhile,
+        if id(object) in self.cooked:
+            # replace the placeholder empty list with the real one
+            self.preserved[id(object)][2] = sexp
+            # but give this one back.
+            sexp = self.preserved[id(object)]
+        else:
+            self.preserved[id(object)] = sexp
+        return sexp
+
+    constantTypes = {types.StringType : 1, types.IntType : 1,
+                     types.FloatType : 1, types.LongType : 1}
+
+
+    def _checkMutable(self,obj):
+        objId = id(obj)
+        if objId in self.cooked:
+            return self.cooked[objId]
+        if objId in self.preserved:
+            self._cook(obj)
+            return self.cooked[objId]
+
+
+    def jelly(self, obj):
+        if isinstance(obj, Jellyable):
+            preRef = self._checkMutable(obj)
+            if preRef:
+                return preRef
+            return obj.jellyFor(self)
+        objType = type(obj)
+        if self.taster.isTypeAllowed(qual(objType)):
+            # "Immutable" Types
+            if ((objType is StringType) or
+                (objType is IntType) or
+                (objType is LongType) or
+                (objType is FloatType)):
+                return obj
+            elif objType is MethodType:
+                return ["method",
+                        obj.im_func.__name__,
+                        self.jelly(obj.im_self),
+                        self.jelly(obj.im_class)]
+
+            elif UnicodeType and objType is UnicodeType:
+                return ['unicode', obj.encode('UTF-8')]
+            elif objType is NoneType:
+                return ['None']
+            elif objType is FunctionType:
+                name = obj.__name__
+                return ['function', str(pickle.whichmodule(obj, obj.__name__))
+                        + '.' +
+                        name]
+            elif objType is ModuleType:
+                return ['module', obj.__name__]
+            elif objType is BooleanType:
+                return ['boolean', obj and 'true' or 'false']
+            elif objType is datetime.datetime:
+                if obj.tzinfo:
+                    raise NotImplementedError(
+                        "Currently can't jelly datetime objects with tzinfo")
+                return ['datetime', '%s %s %s %s %s %s %s' % (
+                    obj.year, obj.month, obj.day, obj.hour,
+                    obj.minute, obj.second, obj.microsecond)]
+            elif objType is datetime.time:
+                if obj.tzinfo:
+                    raise NotImplementedError(
+                        "Currently can't jelly datetime objects with tzinfo")
+                return ['time', '%s %s %s %s' % (obj.hour, obj.minute,
+                                                 obj.second, obj.microsecond)]
+            elif objType is datetime.date:
+                return ['date', '%s %s %s' % (obj.year, obj.month, obj.day)]
+            elif objType is datetime.timedelta:
+                return ['timedelta', '%s %s %s' % (obj.days, obj.seconds,
+                                                   obj.microseconds)]
+            elif objType is ClassType or issubclass(objType, type):
+                return ['class', qual(obj)]
+            elif decimal is not None and objType is decimal.Decimal:
+                return self.jelly_decimal(obj)
+            else:
+                preRef = self._checkMutable(obj)
+                if preRef:
+                    return preRef
+                # "Mutable" Types
+                sxp = self.prepare(obj)
+                if objType is ListType:
+                    sxp.extend(self._jellyIterable(list_atom, obj))
+                elif objType is TupleType:
+                    sxp.extend(self._jellyIterable(tuple_atom, obj))
+                elif objType in DictTypes:
+                    sxp.append(dictionary_atom)
+                    for key, val in obj.items():
+                        sxp.append([self.jelly(key), self.jelly(val)])
+                elif (_set is not None and objType is set or
+                      objType is _sets.Set):
+                    sxp.extend(self._jellyIterable(set_atom, obj))
+                elif (_set is not None and objType is frozenset or
+                      objType is _sets.ImmutableSet):
+                    sxp.extend(self._jellyIterable(frozenset_atom, obj))
+                else:
+                    className = qual(obj.__class__)
+                    persistent = None
+                    if self.persistentStore:
+                        persistent = self.persistentStore(obj, self)
+                    if persistent is not None:
+                        sxp.append(persistent_atom)
+                        sxp.append(persistent)
+                    elif self.taster.isClassAllowed(obj.__class__):
+                        sxp.append(className)
+                        if hasattr(obj, "__getstate__"):
+                            state = obj.__getstate__()
+                        else:
+                            state = obj.__dict__
+                        sxp.append(self.jelly(state))
+                    else:
+                        self.unpersistable(
+                            "instance of class %s deemed insecure" %
+                            qual(obj.__class__), sxp)
+                return self.preserve(obj, sxp)
+        else:
+            if objType is InstanceType:
+                raise InsecureJelly("Class not allowed for instance: %s %s" %
+                                    (obj.__class__, obj))
+            raise InsecureJelly("Type not allowed for object: %s %s" %
+                                (objType, obj))
+
+
+    def _jellyIterable(self, atom, obj):
+        """
+        Jelly an iterable object.
+
+        @param atom: the identifier atom of the object.
+        @type atom: C{str}
+
+        @param obj: any iterable object.
+        @type obj: C{iterable}
+
+        @return: a generator of jellied data.
+        @rtype: C{generator}
+        """
+        yield atom
+        for item in obj:
+            yield self.jelly(item)
+
+
+    def jelly_decimal(self, d):
+        """
+        Jelly a decimal object.
+
+        @param d: a decimal object to serialize.
+        @type d: C{decimal.Decimal}
+
+        @return: jelly for the decimal object.
+        @rtype: C{list}
+        """
+        sign, guts, exponent = d.as_tuple()
+        value = reduce(lambda left, right: left * 10 + right, guts)
+        if sign:
+            value = -value
+        return ['decimal', value, exponent]
+
+
+    def unpersistable(self, reason, sxp=None):
+        """
+        (internal) Returns an sexp: (unpersistable "reason").  Utility method
+        for making note that a particular object could not be serialized.
+        """
+        if sxp is None:
+            sxp = []
+        sxp.append(unpersistable_atom)
+        sxp.append(reason)
+        return sxp
+
+
+
+class _Unjellier:
+
+    def __init__(self, taster, persistentLoad, invoker):
+        self.taster = taster
+        self.persistentLoad = persistentLoad
+        self.references = {}
+        self.postCallbacks = []
+        self.invoker = invoker
+
+
+    def unjellyFull(self, obj):
+        o = self.unjelly(obj)
+        for m in self.postCallbacks:
+            m()
+        return o
+
+
+    def unjelly(self, obj):
+        if type(obj) is not types.ListType:
+            return obj
+        jelType = obj[0]
+        if not self.taster.isTypeAllowed(jelType):
+            raise InsecureJelly(jelType)
+        regClass = unjellyableRegistry.get(jelType)
+        if regClass is not None:
+            if isinstance(regClass, ClassType):
+                inst = _Dummy() # XXX chomp, chomp
+                inst.__class__ = regClass
+                method = inst.unjellyFor
+            elif isinstance(regClass, type):
+                # regClass.__new__ does not call regClass.__init__
+                inst = regClass.__new__(regClass)
+                method = inst.unjellyFor
+            else:
+                method = regClass # this is how it ought to be done
+            val = method(self, obj)
+            if hasattr(val, 'postUnjelly'):
+                self.postCallbacks.append(inst.postUnjelly)
+            return val
+        regFactory = unjellyableFactoryRegistry.get(jelType)
+        if regFactory is not None:
+            state = self.unjelly(obj[1])
+            inst = regFactory(state)
+            if hasattr(inst, 'postUnjelly'):
+                self.postCallbacks.append(inst.postUnjelly)
+            return inst
+        thunk = getattr(self, '_unjelly_%s'%jelType, None)
+        if thunk is not None:
+            ret = thunk(obj[1:])
+        else:
+            nameSplit = jelType.split('.')
+            modName = '.'.join(nameSplit[:-1])
+            if not self.taster.isModuleAllowed(modName):
+                raise InsecureJelly(
+                    "Module %s not allowed (in type %s)." % (modName, jelType))
+            clz = namedObject(jelType)
+            if not self.taster.isClassAllowed(clz):
+                raise InsecureJelly("Class %s not allowed." % jelType)
+            if hasattr(clz, "__setstate__"):
+                ret = _newInstance(clz)
+                state = self.unjelly(obj[1])
+                ret.__setstate__(state)
+            else:
+                state = self.unjelly(obj[1])
+                ret = _newInstance(clz, state)
+            if hasattr(clz, 'postUnjelly'):
+                self.postCallbacks.append(ret.postUnjelly)
+        return ret
+
+
+    def _unjelly_None(self, exp):
+        return None
+
+
+    def _unjelly_unicode(self, exp):
+        if UnicodeType:
+            return unicode(exp[0], "UTF-8")
+        else:
+            return Unpersistable("Could not unpersist unicode: %s" % (exp[0],))
+
+
+    def _unjelly_decimal(self, exp):
+        """
+        Unjelly decimal objects, if decimal is available. If not, return a
+        L{Unpersistable} object instead.
+        """
+        if decimal is None:
+            return Unpersistable(
+                "Could not unpersist decimal: %s" % (exp[0] * (10**exp[1]),))
+        value = exp[0]
+        exponent = exp[1]
+        if value < 0:
+            sign = 1
+        else:
+            sign = 0
+        guts = decimal.Decimal(value).as_tuple()[1]
+        return decimal.Decimal((sign, guts, exponent))
+
+
+    def _unjelly_boolean(self, exp):
+        if BooleanType:
+            assert exp[0] in ('true', 'false')
+            return exp[0] == 'true'
+        else:
+            return Unpersistable("Could not unpersist boolean: %s" % (exp[0],))
+
+
+    def _unjelly_datetime(self, exp):
+        return datetime.datetime(*map(int, exp[0].split()))
+
+
+    def _unjelly_date(self, exp):
+        return datetime.date(*map(int, exp[0].split()))
+
+
+    def _unjelly_time(self, exp):
+        return datetime.time(*map(int, exp[0].split()))
+
+
+    def _unjelly_timedelta(self, exp):
+        days, seconds, microseconds = map(int, exp[0].split())
+        return datetime.timedelta(
+            days=days, seconds=seconds, microseconds=microseconds)
+
+
+    def unjellyInto(self, obj, loc, jel):
+        o = self.unjelly(jel)
+        if isinstance(o, NotKnown):
+            o.addDependant(obj, loc)
+        obj[loc] = o
+        return o
+
+
+    def _unjelly_dereference(self, lst):
+        refid = lst[0]
+        x = self.references.get(refid)
+        if x is not None:
+            return x
+        der = _Dereference(refid)
+        self.references[refid] = der
+        return der
+
+
+    def _unjelly_reference(self, lst):
+        refid = lst[0]
+        exp = lst[1]
+        o = self.unjelly(exp)
+        ref = self.references.get(refid)
+        if (ref is None):
+            self.references[refid] = o
+        elif isinstance(ref, NotKnown):
+            ref.resolveDependants(o)
+            self.references[refid] = o
+        else:
+            assert 0, "Multiple references with same ID!"
+        return o
+
+
+    def _unjelly_tuple(self, lst):
+        l = range(len(lst))
+        finished = 1
+        for elem in l:
+            if isinstance(self.unjellyInto(l, elem, lst[elem]), NotKnown):
+                finished = 0
+        if finished:
+            return tuple(l)
+        else:
+            return _Tuple(l)
+
+
+    def _unjelly_list(self, lst):
+        l = range(len(lst))
+        for elem in l:
+            self.unjellyInto(l, elem, lst[elem])
+        return l
+
+
+    def _unjellySetOrFrozenset(self, lst, containerType):
+        """
+        Helper method to unjelly set or frozenset.
+
+        @param lst: the content of the set.
+        @type lst: C{list}
+
+        @param containerType: the type of C{set} to use.
+        """
+        l = range(len(lst))
+        finished = True
+        for elem in l:
+            data = self.unjellyInto(l, elem, lst[elem])
+            if isinstance(data, NotKnown):
+                finished = False
+        if not finished:
+            return _Container(l, containerType)
+        else:
+            return containerType(l)
+
+
+    def _unjelly_set(self, lst):
+        """
+        Unjelly set using either the C{set} builtin if available, or
+        C{sets.Set} as fallback.
+        """
+        if _set is not None:
+            containerType = set
+        else:
+            containerType = _sets.Set
+        return self._unjellySetOrFrozenset(lst, containerType)
+
+
+    def _unjelly_frozenset(self, lst):
+        """
+        Unjelly frozenset using either the C{frozenset} builtin if available,
+        or C{sets.ImmutableSet} as fallback.
+        """
+        if _set is not None:
+            containerType = frozenset
+        else:
+            containerType = _sets.ImmutableSet
+        return self._unjellySetOrFrozenset(lst, containerType)
+
+
+    def _unjelly_dictionary(self, lst):
+        d = {}
+        for k, v in lst:
+            kvd = _DictKeyAndValue(d)
+            self.unjellyInto(kvd, 0, k)
+            self.unjellyInto(kvd, 1, v)
+        return d
+
+
+    def _unjelly_module(self, rest):
+        moduleName = rest[0]
+        if type(moduleName) != types.StringType:
+            raise InsecureJelly(
+                "Attempted to unjelly a module with a non-string name.")
+        if not self.taster.isModuleAllowed(moduleName):
+            raise InsecureJelly(
+                "Attempted to unjelly module named %r" % (moduleName,))
+        mod = __import__(moduleName, {}, {},"x")
+        return mod
+
+
+    def _unjelly_class(self, rest):
+        clist = rest[0].split('.')
+        modName = '.'.join(clist[:-1])
+        if not self.taster.isModuleAllowed(modName):
+            raise InsecureJelly("module %s not allowed" % modName)
+        klaus = namedObject(rest[0])
+        objType = type(klaus)
+        if objType not in (types.ClassType, types.TypeType):
+            raise InsecureJelly(
+                "class %r unjellied to something that isn't a class: %r" % (
+                    rest[0], klaus))
+        if not self.taster.isClassAllowed(klaus):
+            raise InsecureJelly("class not allowed: %s" % qual(klaus))
+        return klaus
+
+
+    def _unjelly_function(self, rest):
+        modSplit = rest[0].split('.')
+        modName = '.'.join(modSplit[:-1])
+        if not self.taster.isModuleAllowed(modName):
+            raise InsecureJelly("Module not allowed: %s"% modName)
+        # XXX do I need an isFunctionAllowed?
+        function = namedObject(rest[0])
+        return function
+
+
+    def _unjelly_persistent(self, rest):
+        if self.persistentLoad:
+            pload = self.persistentLoad(rest[0], self)
+            return pload
+        else:
+            return Unpersistable("Persistent callback not found")
+
+
+    def _unjelly_instance(self, rest):
+        clz = self.unjelly(rest[0])
+        if type(clz) is not types.ClassType:
+            raise InsecureJelly("Instance found with non-class class.")
+        if hasattr(clz, "__setstate__"):
+            inst = _newInstance(clz, {})
+            state = self.unjelly(rest[1])
+            inst.__setstate__(state)
+        else:
+            state = self.unjelly(rest[1])
+            inst = _newInstance(clz, state)
+        if hasattr(clz, 'postUnjelly'):
+            self.postCallbacks.append(inst.postUnjelly)
+        return inst
+
+
+    def _unjelly_unpersistable(self, rest):
+        return Unpersistable("Unpersistable data: %s" % (rest[0],))
+
+
+    def _unjelly_method(self, rest):
+        """
+        (internal) Unjelly a method.
+        """
+        im_name = rest[0]
+        im_self = self.unjelly(rest[1])
+        im_class = self.unjelly(rest[2])
+        if type(im_class) is not types.ClassType:
+            raise InsecureJelly("Method found with non-class class.")
+        if im_name in im_class.__dict__:
+            if im_self is None:
+                im = getattr(im_class, im_name)
+            elif isinstance(im_self, NotKnown):
+                im = _InstanceMethod(im_name, im_self, im_class)
+            else:
+                im = MethodType(im_class.__dict__[im_name], im_self, im_class)
+        else:
+            raise TypeError('instance method changed')
+        return im
+
+
+
+class _Dummy:
+    """
+    (Internal) Dummy class, used for unserializing instances.
+    """
+
+
+
+class _DummyNewStyle(object):
+    """
+    (Internal) Dummy class, used for unserializing instances of new-style
+    classes.
+    """
+
+
+def _newDummyLike(instance):
+    """
+    Create a new instance like C{instance}.
+
+    The new instance has the same class and instance dictionary as the given
+    instance.
+
+    @return: The new instance.
+    """
+    if isinstance(instance.__class__, type):
+        # New-style class
+        dummy = _DummyNewStyle()
+    else:
+        # Classic class
+        dummy = _Dummy()
+    dummy.__class__ = instance.__class__
+    dummy.__dict__ = instance.__dict__
+    return dummy
+
+
+#### Published Interface.
+
+
+class InsecureJelly(Exception):
+    """
+    This exception will be raised when a jelly is deemed `insecure'; e.g. it
+    contains a type, class, or module disallowed by the specified `taster'
+    """
+
+
+
+class DummySecurityOptions:
+    """
+    DummySecurityOptions() -> insecure security options
+    Dummy security options -- this class will allow anything.
+    """
+
+    def isModuleAllowed(self, moduleName):
+        """
+        DummySecurityOptions.isModuleAllowed(moduleName) -> boolean
+        returns 1 if a module by that name is allowed, 0 otherwise
+        """
+        return 1
+
+
+    def isClassAllowed(self, klass):
+        """
+        DummySecurityOptions.isClassAllowed(class) -> boolean
+        Assumes the module has already been allowed.  Returns 1 if the given
+        class is allowed, 0 otherwise.
+        """
+        return 1
+
+
+    def isTypeAllowed(self, typeName):
+        """
+        DummySecurityOptions.isTypeAllowed(typeName) -> boolean
+        Returns 1 if the given type is allowed, 0 otherwise.
+        """
+        return 1
+
+
+
+class SecurityOptions:
+    """
+    This will by default disallow everything, except for 'none'.
+    """
+
+    basicTypes = ["dictionary", "list", "tuple",
+                  "reference", "dereference", "unpersistable",
+                  "persistent", "long_int", "long", "dict"]
+
+    def __init__(self):
+        """
+        SecurityOptions() initialize.
+        """
+        # I don't believe any of these types can ever pose a security hazard,
+        # except perhaps "reference"...
+        self.allowedTypes = {"None": 1,
+                             "bool": 1,
+                             "boolean": 1,
+                             "string": 1,
+                             "str": 1,
+                             "int": 1,
+                             "float": 1,
+                             "datetime": 1,
+                             "time": 1,
+                             "date": 1,
+                             "timedelta": 1,
+                             "NoneType": 1}
+        if hasattr(types, 'UnicodeType'):
+            self.allowedTypes['unicode'] = 1
+        if decimal is not None:
+            self.allowedTypes['decimal'] = 1
+        self.allowedTypes['set'] = 1
+        self.allowedTypes['frozenset'] = 1
+        self.allowedModules = {}
+        self.allowedClasses = {}
+
+
+    def allowBasicTypes(self):
+        """
+        Allow all `basic' types.  (Dictionary and list.  Int, string, and float
+        are implicitly allowed.)
+        """
+        self.allowTypes(*self.basicTypes)
+
+
+    def allowTypes(self, *types):
+        """
+        SecurityOptions.allowTypes(typeString): Allow a particular type, by its
+        name.
+        """
+        for typ in types:
+            if not isinstance(typ, str):
+                typ = qual(typ)
+            self.allowedTypes[typ] = 1
+
+
+    def allowInstancesOf(self, *classes):
+        """
+        SecurityOptions.allowInstances(klass, klass, ...): allow instances
+        of the specified classes
+
+        This will also allow the 'instance', 'class' (renamed 'classobj' in
+        Python 2.3), and 'module' types, as well as basic types.
+        """
+        self.allowBasicTypes()
+        self.allowTypes("instance", "class", "classobj", "module")
+        for klass in classes:
+            self.allowTypes(qual(klass))
+            self.allowModules(klass.__module__)
+            self.allowedClasses[klass] = 1
+
+
+    def allowModules(self, *modules):
+        """
+        SecurityOptions.allowModules(module, module, ...): allow modules by
+        name. This will also allow the 'module' type.
+        """
+        for module in modules:
+            if type(module) == types.ModuleType:
+                module = module.__name__
+            self.allowedModules[module] = 1
+
+
+    def isModuleAllowed(self, moduleName):
+        """
+        SecurityOptions.isModuleAllowed(moduleName) -> boolean
+        returns 1 if a module by that name is allowed, 0 otherwise
+        """
+        return moduleName in self.allowedModules
+
+
+    def isClassAllowed(self, klass):
+        """
+        SecurityOptions.isClassAllowed(class) -> boolean
+        Assumes the module has already been allowed.  Returns 1 if the given
+        class is allowed, 0 otherwise.
+        """
+        return klass in self.allowedClasses
+
+
+    def isTypeAllowed(self, typeName):
+        """
+        SecurityOptions.isTypeAllowed(typeName) -> boolean
+        Returns 1 if the given type is allowed, 0 otherwise.
+        """
+        return (typeName in self.allowedTypes or '.' in typeName)
+
+
+globalSecurity = SecurityOptions()
+globalSecurity.allowBasicTypes()
+
+
+
+def jelly(object, taster=DummySecurityOptions(), persistentStore=None,
+          invoker=None):
+    """
+    Serialize to s-expression.
+
+    Returns a list which is the serialized representation of an object.  An
+    optional 'taster' argument takes a SecurityOptions and will mark any
+    insecure objects as unpersistable rather than serializing them.
+    """
+    return _Jellier(taster, persistentStore, invoker).jelly(object)
+
+
+
+def unjelly(sexp, taster=DummySecurityOptions(), persistentLoad=None,
+            invoker=None):
+    """
+    Unserialize from s-expression.
+
+    Takes an list that was the result from a call to jelly() and unserializes
+    an arbitrary object from it.  The optional 'taster' argument, an instance
+    of SecurityOptions, will cause an InsecureJelly exception to be raised if a
+    disallowed type, module, or class attempted to unserialize.
+    """
+    return _Unjellier(taster, persistentLoad, invoker).unjellyFull(sexp)
diff --git a/ThirdParty/Twisted/twisted/spread/pb.py b/ThirdParty/Twisted/twisted/spread/pb.py
new file mode 100644
index 0000000..7e9a5b6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/spread/pb.py
@@ -0,0 +1,1434 @@
+# -*- test-case-name: twisted.test.test_pb -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Perspective Broker
+
+\"This isn\'t a professional opinion, but it's probably got enough
+internet to kill you.\" --glyph
+
+Introduction
+============
+
+This is a broker for proxies for and copies of objects.  It provides a
+translucent interface layer to those proxies.
+
+The protocol is not opaque, because it provides objects which represent the
+remote proxies and require no context (server references, IDs) to operate on.
+
+It is not transparent because it does I{not} attempt to make remote objects
+behave identically, or even similiarly, to local objects.  Method calls are
+invoked asynchronously, and specific rules are applied when serializing
+arguments.
+
+To get started, begin with L{PBClientFactory} and L{PBServerFactory}.
+
+ at author: Glyph Lefkowitz
+"""
+
+import random
+import types
+
+from zope.interface import implements, Interface
+
+# Twisted Imports
+from twisted.python import log, failure, reflect
+from twisted.python.hashlib import md5
+from twisted.internet import defer, protocol
+from twisted.cred.portal import Portal
+from twisted.cred.credentials import IAnonymous, ICredentials
+from twisted.cred.credentials import IUsernameHashedPassword, Anonymous
+from twisted.persisted import styles
+from twisted.python.components import registerAdapter
+
+from twisted.spread.interfaces import IJellyable, IUnjellyable
+from twisted.spread.jelly import jelly, unjelly, globalSecurity
+from twisted.spread import banana
+
+from twisted.spread.flavors import Serializable
+from twisted.spread.flavors import Referenceable, NoSuchMethod
+from twisted.spread.flavors import Root, IPBRoot
+from twisted.spread.flavors import ViewPoint
+from twisted.spread.flavors import Viewable
+from twisted.spread.flavors import Copyable
+from twisted.spread.flavors import Jellyable
+from twisted.spread.flavors import Cacheable
+from twisted.spread.flavors import RemoteCopy
+from twisted.spread.flavors import RemoteCache
+from twisted.spread.flavors import RemoteCacheObserver
+from twisted.spread.flavors import copyTags
+
+from twisted.spread.flavors import setUnjellyableForClass
+from twisted.spread.flavors import setUnjellyableFactoryForClass
+from twisted.spread.flavors import setUnjellyableForClassTree
+# These three are backwards compatibility aliases for the previous three.
+# Ultimately they should be deprecated. -exarkun
+from twisted.spread.flavors import setCopierForClass
+from twisted.spread.flavors import setFactoryForClass
+from twisted.spread.flavors import setCopierForClassTree
+
+
+MAX_BROKER_REFS = 1024
+
+portno = 8787
+
+
+
+class ProtocolError(Exception):
+    """
+    This error is raised when an invalid protocol statement is received.
+    """
+
+
+
+class DeadReferenceError(ProtocolError):
+    """
+    This error is raised when a method is called on a dead reference (one whose
+    broker has been disconnected).
+    """
+
+
+
+class Error(Exception):
+    """
+    This error can be raised to generate known error conditions.
+
+    When a PB callable method (perspective_, remote_, view_) raises
+    this error, it indicates that a traceback should not be printed,
+    but instead, the string representation of the exception should be
+    sent.
+    """
+
+
+
+class RemoteError(Exception):
+    """
+    This class is used to wrap a string-ified exception from the remote side to
+    be able to reraise it. (Raising string exceptions is no longer possible in
+    Python 2.6+)
+
+    The value of this exception will be a str() representation of the remote
+    value.
+
+    @ivar remoteType: The full import path of the exception class which was
+        raised on the remote end.
+    @type remoteType: C{str}
+
+    @ivar remoteTraceback: The remote traceback.
+    @type remoteTraceback: C{str}
+
+    @note: It's not possible to include the remoteTraceback if this exception is
+        thrown into a generator. It must be accessed as an attribute.
+    """
+    def __init__(self, remoteType, value, remoteTraceback):
+        Exception.__init__(self, value)
+        self.remoteType = remoteType
+        self.remoteTraceback = remoteTraceback
+
+
+
+class RemoteMethod:
+    """
+    This is a translucent reference to a remote message.
+    """
+    def __init__(self, obj, name):
+        """
+        Initialize with a L{RemoteReference} and the name of this message.
+        """
+        self.obj = obj
+        self.name = name
+
+
+    def __cmp__(self, other):
+        return cmp((self.obj, self.name), other)
+
+
+    def __hash__(self):
+        return hash((self.obj, self.name))
+
+
+    def __call__(self, *args, **kw):
+        """
+        Asynchronously invoke a remote method.
+        """
+        return self.obj.broker._sendMessage('',self.obj.perspective,
+            self.obj.luid, self.name, args, kw)
+
+
+
+class PBConnectionLost(Exception):
+    pass
+
+
+
+class IPerspective(Interface):
+    """
+    per*spec*tive, n. : The relationship of aspects of a subject to each
+    other and to a whole: 'a perspective of history'; 'a need to view
+    the problem in the proper perspective'.
+
+    This is a Perspective Broker-specific wrapper for an avatar. That
+    is to say, a PB-published view on to the business logic for the
+    system's concept of a 'user'.
+
+    The concept of attached/detached is no longer implemented by the
+    framework. The realm is expected to implement such semantics if
+    needed.
+    """
+
+    def perspectiveMessageReceived(broker, message, args, kwargs):
+        """
+        This method is called when a network message is received.
+
+        @arg broker: The Perspective Broker.
+
+        @type message: str
+        @arg message: The name of the method called by the other end.
+
+        @type args: list in jelly format
+        @arg args: The arguments that were passed by the other end. It
+                   is recommend that you use the `unserialize' method of the
+                   broker to decode this.
+
+        @type kwargs: dict in jelly format
+        @arg kwargs: The keyword arguments that were passed by the
+                     other end.  It is recommended that you use the
+                     `unserialize' method of the broker to decode this.
+
+        @rtype: A jelly list.
+        @return: It is recommended that you use the `serialize' method
+                 of the broker on whatever object you need to return to
+                 generate the return value.
+        """
+
+
+
+class Avatar:
+    """
+    A default IPerspective implementor.
+
+    This class is intended to be subclassed, and a realm should return
+    an instance of such a subclass when IPerspective is requested of
+    it.
+
+    A peer requesting a perspective will receive only a
+    L{RemoteReference} to a pb.Avatar.  When a method is called on
+    that L{RemoteReference}, it will translate to a method on the
+    remote perspective named 'perspective_methodname'.  (For more
+    information on invoking methods on other objects, see
+    L{flavors.ViewPoint}.)
+    """
+
+    implements(IPerspective)
+
+    def perspectiveMessageReceived(self, broker, message, args, kw):
+        """
+        This method is called when a network message is received.
+
+        This will call::
+
+            self.perspective_%(message)s(*broker.unserialize(args),
+                                         **broker.unserialize(kw))
+
+        to handle the method; subclasses of Avatar are expected to
+        implement methods using this naming convention.
+        """
+
+        args = broker.unserialize(args, self)
+        kw = broker.unserialize(kw, self)
+        method = getattr(self, "perspective_%s" % message)
+        try:
+            state = method(*args, **kw)
+        except TypeError:
+            log.msg("%s didn't accept %s and %s" % (method, args, kw))
+            raise
+        return broker.serialize(state, self, method, args, kw)
+
+
+
+class AsReferenceable(Referenceable):
+    """
+    A reference directed towards another object.
+    """
+
+    def __init__(self, object, messageType="remote"):
+        self.remoteMessageReceived = getattr(
+            object, messageType + "MessageReceived")
+
+
+
+class RemoteReference(Serializable, styles.Ephemeral):
+    """
+    A translucent reference to a remote object.
+
+    I may be a reference to a L{flavors.ViewPoint}, a
+    L{flavors.Referenceable}, or an L{IPerspective} implementor (e.g.,
+    pb.Avatar).  From the client's perspective, it is not possible to
+    tell which except by convention.
+
+    I am a \"translucent\" reference because although no additional
+    bookkeeping overhead is given to the application programmer for
+    manipulating a reference, return values are asynchronous.
+
+    See also L{twisted.internet.defer}.
+
+    @ivar broker: The broker I am obtained through.
+    @type broker: L{Broker}
+    """
+
+    implements(IUnjellyable)
+
+    def __init__(self, perspective, broker, luid, doRefCount):
+        """(internal) Initialize me with a broker and a locally-unique ID.
+
+        The ID is unique only to the particular Perspective Broker
+        instance.
+        """
+        self.luid = luid
+        self.broker = broker
+        self.doRefCount = doRefCount
+        self.perspective = perspective
+        self.disconnectCallbacks = []
+
+    def notifyOnDisconnect(self, callback):
+        """Register a callback to be called if our broker gets disconnected.
+
+        This callback will be called with one argument, this instance.
+        """
+        assert callable(callback)
+        self.disconnectCallbacks.append(callback)
+        if len(self.disconnectCallbacks) == 1:
+            self.broker.notifyOnDisconnect(self._disconnected)
+
+    def dontNotifyOnDisconnect(self, callback):
+        """Remove a callback that was registered with notifyOnDisconnect."""
+        self.disconnectCallbacks.remove(callback)
+        if not self.disconnectCallbacks:
+            self.broker.dontNotifyOnDisconnect(self._disconnected)
+
+    def _disconnected(self):
+        """Called if we are disconnected and have callbacks registered."""
+        for callback in self.disconnectCallbacks:
+            callback(self)
+        self.disconnectCallbacks = None
+
+    def jellyFor(self, jellier):
+        """If I am being sent back to where I came from, serialize as a local backreference.
+        """
+        if jellier.invoker:
+            assert self.broker == jellier.invoker, "Can't send references to brokers other than their own."
+            return "local", self.luid
+        else:
+            return "unpersistable", "References cannot be serialized"
+
+    def unjellyFor(self, unjellier, unjellyList):
+        self.__init__(unjellier.invoker.unserializingPerspective, unjellier.invoker, unjellyList[1], 1)
+        return self
+
+    def callRemote(self, _name, *args, **kw):
+        """Asynchronously invoke a remote method.
+
+        @type _name: C{str}
+        @param _name:  the name of the remote method to invoke
+        @param args: arguments to serialize for the remote function
+        @param kw:  keyword arguments to serialize for the remote function.
+        @rtype:   L{twisted.internet.defer.Deferred}
+        @returns: a Deferred which will be fired when the result of
+                  this remote call is received.
+        """
+        # note that we use '_name' instead of 'name' so the user can call
+        # remote methods with 'name' as a keyword parameter, like this:
+        #  ref.callRemote("getPeopleNamed", count=12, name="Bob")
+
+        return self.broker._sendMessage('',self.perspective, self.luid,
+                                        _name, args, kw)
+
+    def remoteMethod(self, key):
+        """Get a L{RemoteMethod} for this key.
+        """
+        return RemoteMethod(self, key)
+
+    def __cmp__(self,other):
+        """Compare me [to another L{RemoteReference}].
+        """
+        if isinstance(other, RemoteReference):
+            if other.broker == self.broker:
+                return cmp(self.luid, other.luid)
+        return cmp(self.broker, other)
+
+    def __hash__(self):
+        """Hash me.
+        """
+        return self.luid
+
+    def __del__(self):
+        """Do distributed reference counting on finalization.
+        """
+        if self.doRefCount:
+            self.broker.sendDecRef(self.luid)
+
+setUnjellyableForClass("remote", RemoteReference)
+
+class Local:
+    """(internal) A reference to a local object.
+    """
+
+    def __init__(self, object, perspective=None):
+        """Initialize.
+        """
+        self.object = object
+        self.perspective = perspective
+        self.refcount = 1
+
+    def __repr__(self):
+        return "<pb.Local %r ref:%s>" % (self.object, self.refcount)
+
+    def incref(self):
+        """Increment and return my reference count.
+        """
+        self.refcount = self.refcount + 1
+        return self.refcount
+
+    def decref(self):
+        """Decrement and return my reference count.
+        """
+        self.refcount = self.refcount - 1
+        return self.refcount
+
+
+##
+# Failure
+##
+
+class CopyableFailure(failure.Failure, Copyable):
+    """
+    A L{flavors.RemoteCopy} and L{flavors.Copyable} version of
+    L{twisted.python.failure.Failure} for serialization.
+    """
+
+    unsafeTracebacks = 0
+
+    def getStateToCopy(self):
+        """
+        Collect state related to the exception which occurred, discarding
+        state which cannot reasonably be serialized.
+        """
+        state = self.__dict__.copy()
+        state['tb'] = None
+        state['frames'] = []
+        state['stack'] = []
+        state['value'] = str(self.value) # Exception instance
+        if isinstance(self.type, str):
+            state['type'] = self.type
+        else:
+            state['type'] = reflect.qual(self.type) # Exception class
+        if self.unsafeTracebacks:
+            state['traceback'] = self.getTraceback()
+        else:
+            state['traceback'] = 'Traceback unavailable\n'
+        return state
+
+
+
+class CopiedFailure(RemoteCopy, failure.Failure):
+    """
+    A L{CopiedFailure} is a L{pb.RemoteCopy} of a L{failure.Failure}
+    transfered via PB.
+
+    @ivar type: The full import path of the exception class which was raised on
+        the remote end.
+    @type type: C{str}
+
+    @ivar value: A str() representation of the remote value.
+    @type value: L{CopiedFailure} or C{str}
+
+    @ivar traceback: The remote traceback.
+    @type traceback: C{str}
+    """
+
+    def printTraceback(self, file=None, elideFrameworkCode=0, detail='default'):
+        if file is None:
+            file = log.logfile
+        file.write("Traceback from remote host -- ")
+        file.write(self.traceback)
+        file.write(self.type + ": " + self.value)
+        file.write('\n')
+
+
+    def throwExceptionIntoGenerator(self, g):
+        """
+        Throw the original exception into the given generator, preserving
+        traceback information if available. In the case of a L{CopiedFailure}
+        where the exception type is a string, a L{pb.RemoteError} is thrown
+        instead.
+
+        @return: The next value yielded from the generator.
+        @raise StopIteration: If there are no more values in the generator.
+        @raise RemoteError: The wrapped remote exception.
+        """
+        return g.throw(RemoteError(self.type, self.value, self.traceback))
+
+    printBriefTraceback = printTraceback
+    printDetailedTraceback = printTraceback
+
+setUnjellyableForClass(CopyableFailure, CopiedFailure)
+
+
+
+def failure2Copyable(fail, unsafeTracebacks=0):
+    f = types.InstanceType(CopyableFailure, fail.__dict__)
+    f.unsafeTracebacks = unsafeTracebacks
+    return f
+
+
+
+class Broker(banana.Banana):
+    """I am a broker for objects.
+    """
+
+    version = 6
+    username = None
+    factory = None
+
+    def __init__(self, isClient=1, security=globalSecurity):
+        banana.Banana.__init__(self, isClient)
+        self.disconnected = 0
+        self.disconnects = []
+        self.failures = []
+        self.connects = []
+        self.localObjects = {}
+        self.security = security
+        self.pageProducers = []
+        self.currentRequestID = 0
+        self.currentLocalID = 0
+        self.unserializingPerspective = None
+        # Some terms:
+        #  PUID: process unique ID; return value of id() function.  type "int".
+        #  LUID: locally unique ID; an ID unique to an object mapped over this
+        #        connection. type "int"
+        #  GUID: (not used yet) globally unique ID; an ID for an object which
+        #        may be on a redirected or meta server.  Type as yet undecided.
+        # Dictionary mapping LUIDs to local objects.
+        # set above to allow root object to be assigned before connection is made
+        # self.localObjects = {}
+        # Dictionary mapping PUIDs to LUIDs.
+        self.luids = {}
+        # Dictionary mapping LUIDs to local (remotely cached) objects. Remotely
+        # cached means that they're objects which originate here, and were
+        # copied remotely.
+        self.remotelyCachedObjects = {}
+        # Dictionary mapping PUIDs to (cached) LUIDs
+        self.remotelyCachedLUIDs = {}
+        # Dictionary mapping (remote) LUIDs to (locally cached) objects.
+        self.locallyCachedObjects = {}
+        self.waitingForAnswers = {}
+
+        # Mapping from LUIDs to weakref objects with callbacks for performing
+        # any local cleanup which may be necessary for the corresponding
+        # object once it no longer exists.
+        self._localCleanup = {}
+
+
+    def resumeProducing(self):
+        """Called when the consumer attached to me runs out of buffer.
+        """
+        # Go backwards over the list so we can remove indexes from it as we go
+        for pageridx in xrange(len(self.pageProducers)-1, -1, -1):
+            pager = self.pageProducers[pageridx]
+            pager.sendNextPage()
+            if not pager.stillPaging():
+                del self.pageProducers[pageridx]
+        if not self.pageProducers:
+            self.transport.unregisterProducer()
+
+    # Streaming producer methods; not necessary to implement.
+    def pauseProducing(self):
+        pass
+
+    def stopProducing(self):
+        pass
+
+    def registerPageProducer(self, pager):
+        self.pageProducers.append(pager)
+        if len(self.pageProducers) == 1:
+            self.transport.registerProducer(self, 0)
+
+    def expressionReceived(self, sexp):
+        """Evaluate an expression as it's received.
+        """
+        if isinstance(sexp, types.ListType):
+            command = sexp[0]
+            methodName = "proto_%s" % command
+            method = getattr(self, methodName, None)
+            if method:
+                method(*sexp[1:])
+            else:
+                self.sendCall("didNotUnderstand", command)
+        else:
+            raise ProtocolError("Non-list expression received.")
+
+
+    def proto_version(self, vnum):
+        """Protocol message: (version version-number)
+
+        Check to make sure that both ends of the protocol are speaking
+        the same version dialect.
+        """
+
+        if vnum != self.version:
+            raise ProtocolError("Version Incompatibility: %s %s" % (self.version, vnum))
+
+
+    def sendCall(self, *exp):
+        """Utility method to send an expression to the other side of the connection.
+        """
+        self.sendEncoded(exp)
+
+    def proto_didNotUnderstand(self, command):
+        """Respond to stock 'C{didNotUnderstand}' message.
+
+        Log the command that was not understood and continue. (Note:
+        this will probably be changed to close the connection or raise
+        an exception in the future.)
+        """
+        log.msg("Didn't understand command: %r" % command)
+
+    def connectionReady(self):
+        """Initialize. Called after Banana negotiation is done.
+        """
+        self.sendCall("version", self.version)
+        for notifier in self.connects:
+            try:
+                notifier()
+            except:
+                log.deferr()
+        self.connects = None
+        if self.factory: # in tests we won't have factory
+            self.factory.clientConnectionMade(self)
+
+    def connectionFailed(self):
+        # XXX should never get called anymore? check!
+        for notifier in self.failures:
+            try:
+                notifier()
+            except:
+                log.deferr()
+        self.failures = None
+
+    waitingForAnswers = None
+
+    def connectionLost(self, reason):
+        """The connection was lost.
+        """
+        self.disconnected = 1
+        # nuke potential circular references.
+        self.luids = None
+        if self.waitingForAnswers:
+            for d in self.waitingForAnswers.values():
+                try:
+                    d.errback(failure.Failure(PBConnectionLost(reason)))
+                except:
+                    log.deferr()
+        # Assure all Cacheable.stoppedObserving are called
+        for lobj in self.remotelyCachedObjects.values():
+            cacheable = lobj.object
+            perspective = lobj.perspective
+            try:
+                cacheable.stoppedObserving(perspective, RemoteCacheObserver(self, cacheable, perspective))
+            except:
+                log.deferr()
+        # Loop on a copy to prevent notifiers to mixup
+        # the list by calling dontNotifyOnDisconnect
+        for notifier in self.disconnects[:]:
+            try:
+                notifier()
+            except:
+                log.deferr()
+        self.disconnects = None
+        self.waitingForAnswers = None
+        self.localSecurity = None
+        self.remoteSecurity = None
+        self.remotelyCachedObjects = None
+        self.remotelyCachedLUIDs = None
+        self.locallyCachedObjects = None
+        self.localObjects = None
+
+    def notifyOnDisconnect(self, notifier):
+        """Call the given callback when the Broker disconnects."""
+        assert callable(notifier)
+        self.disconnects.append(notifier)
+
+    def notifyOnFail(self, notifier):
+        """Call the given callback if the Broker fails to connect."""
+        assert callable(notifier)
+        self.failures.append(notifier)
+
+    def notifyOnConnect(self, notifier):
+        """Call the given callback when the Broker connects."""
+        assert callable(notifier)
+        if self.connects is None:
+            try:
+                notifier()
+            except:
+                log.err()
+        else:
+            self.connects.append(notifier)
+
+    def dontNotifyOnDisconnect(self, notifier):
+        """Remove a callback from list of disconnect callbacks."""
+        try:
+            self.disconnects.remove(notifier)
+        except ValueError:
+            pass
+
+    def localObjectForID(self, luid):
+        """
+        Get a local object for a locally unique ID.
+
+        @return: An object previously stored with L{registerReference} or
+            C{None} if there is no object which corresponds to the given
+            identifier.
+        """
+        lob = self.localObjects.get(luid)
+        if lob is None:
+            return
+        return lob.object
+
+    maxBrokerRefsViolations = 0
+
+    def registerReference(self, object):
+        """Get an ID for a local object.
+
+        Store a persistent reference to a local object and map its id()
+        to a generated, session-unique ID and return that ID.
+        """
+
+        assert object is not None
+        puid = object.processUniqueID()
+        luid = self.luids.get(puid)
+        if luid is None:
+            if len(self.localObjects) > MAX_BROKER_REFS:
+                self.maxBrokerRefsViolations = self.maxBrokerRefsViolations + 1
+                if self.maxBrokerRefsViolations > 3:
+                    self.transport.loseConnection()
+                    raise Error("Maximum PB reference count exceeded.  "
+                                "Goodbye.")
+                raise Error("Maximum PB reference count exceeded.")
+
+            luid = self.newLocalID()
+            self.localObjects[luid] = Local(object)
+            self.luids[puid] = luid
+        else:
+            self.localObjects[luid].incref()
+        return luid
+
+    def setNameForLocal(self, name, object):
+        """Store a special (string) ID for this object.
+
+        This is how you specify a 'base' set of objects that the remote
+        protocol can connect to.
+        """
+        assert object is not None
+        self.localObjects[name] = Local(object)
+
+    def remoteForName(self, name):
+        """Returns an object from the remote name mapping.
+
+        Note that this does not check the validity of the name, only
+        creates a translucent reference for it.
+        """
+        return RemoteReference(None, self, name, 0)
+
+    def cachedRemotelyAs(self, instance, incref=0):
+        """Returns an ID that says what this instance is cached as remotely, or C{None} if it's not.
+        """
+
+        puid = instance.processUniqueID()
+        luid = self.remotelyCachedLUIDs.get(puid)
+        if (luid is not None) and (incref):
+            self.remotelyCachedObjects[luid].incref()
+        return luid
+
+    def remotelyCachedForLUID(self, luid):
+        """Returns an instance which is cached remotely, with this LUID.
+        """
+        return self.remotelyCachedObjects[luid].object
+
+    def cacheRemotely(self, instance):
+        """
+        XXX"""
+        puid = instance.processUniqueID()
+        luid = self.newLocalID()
+        if len(self.remotelyCachedObjects) > MAX_BROKER_REFS:
+            self.maxBrokerRefsViolations = self.maxBrokerRefsViolations + 1
+            if self.maxBrokerRefsViolations > 3:
+                self.transport.loseConnection()
+                raise Error("Maximum PB cache count exceeded.  "
+                            "Goodbye.")
+            raise Error("Maximum PB cache count exceeded.")
+
+        self.remotelyCachedLUIDs[puid] = luid
+        # This table may not be necessary -- for now, it's to make sure that no
+        # monkey business happens with id(instance)
+        self.remotelyCachedObjects[luid] = Local(instance, self.serializingPerspective)
+        return luid
+
+    def cacheLocally(self, cid, instance):
+        """(internal)
+
+        Store a non-filled-out cached instance locally.
+        """
+        self.locallyCachedObjects[cid] = instance
+
+    def cachedLocallyAs(self, cid):
+        instance = self.locallyCachedObjects[cid]
+        return instance
+
+    def serialize(self, object, perspective=None, method=None, args=None, kw=None):
+        """Jelly an object according to the remote security rules for this broker.
+        """
+
+        if isinstance(object, defer.Deferred):
+            object.addCallbacks(self.serialize, lambda x: x,
+                                callbackKeywords={
+                'perspective': perspective,
+                'method': method,
+                'args': args,
+                'kw': kw
+                })
+            return object
+
+        # XXX This call is NOT REENTRANT and testing for reentrancy is just
+        # crazy, so it likely won't be.  Don't ever write methods that call the
+        # broker's serialize() method recursively (e.g. sending a method call
+        # from within a getState (this causes concurrency problems anyway so
+        # you really, really shouldn't do it))
+
+        # self.jellier = _NetJellier(self)
+        self.serializingPerspective = perspective
+        self.jellyMethod = method
+        self.jellyArgs = args
+        self.jellyKw = kw
+        try:
+            return jelly(object, self.security, None, self)
+        finally:
+            self.serializingPerspective = None
+            self.jellyMethod = None
+            self.jellyArgs = None
+            self.jellyKw = None
+
+    def unserialize(self, sexp, perspective = None):
+        """Unjelly an sexp according to the local security rules for this broker.
+        """
+
+        self.unserializingPerspective = perspective
+        try:
+            return unjelly(sexp, self.security, None, self)
+        finally:
+            self.unserializingPerspective = None
+
+    def newLocalID(self):
+        """Generate a new LUID.
+        """
+        self.currentLocalID = self.currentLocalID + 1
+        return self.currentLocalID
+
+    def newRequestID(self):
+        """Generate a new request ID.
+        """
+        self.currentRequestID = self.currentRequestID + 1
+        return self.currentRequestID
+
+    def _sendMessage(self, prefix, perspective, objectID, message, args, kw):
+        pbc = None
+        pbe = None
+        answerRequired = 1
+        if 'pbcallback' in kw:
+            pbc = kw['pbcallback']
+            del kw['pbcallback']
+        if 'pberrback' in kw:
+            pbe = kw['pberrback']
+            del kw['pberrback']
+        if 'pbanswer' in kw:
+            assert (not pbe) and (not pbc), "You can't specify a no-answer requirement."
+            answerRequired = kw['pbanswer']
+            del kw['pbanswer']
+        if self.disconnected:
+            raise DeadReferenceError("Calling Stale Broker")
+        try:
+            netArgs = self.serialize(args, perspective=perspective, method=message)
+            netKw = self.serialize(kw, perspective=perspective, method=message)
+        except:
+            return defer.fail(failure.Failure())
+        requestID = self.newRequestID()
+        if answerRequired:
+            rval = defer.Deferred()
+            self.waitingForAnswers[requestID] = rval
+            if pbc or pbe:
+                log.msg('warning! using deprecated "pbcallback"')
+                rval.addCallbacks(pbc, pbe)
+        else:
+            rval = None
+        self.sendCall(prefix+"message", requestID, objectID, message, answerRequired, netArgs, netKw)
+        return rval
+
+    def proto_message(self, requestID, objectID, message, answerRequired, netArgs, netKw):
+        self._recvMessage(self.localObjectForID, requestID, objectID, message, answerRequired, netArgs, netKw)
+    def proto_cachemessage(self, requestID, objectID, message, answerRequired, netArgs, netKw):
+        self._recvMessage(self.cachedLocallyAs, requestID, objectID, message, answerRequired, netArgs, netKw)
+
+    def _recvMessage(self, findObjMethod, requestID, objectID, message, answerRequired, netArgs, netKw):
+        """Received a message-send.
+
+        Look up message based on object, unserialize the arguments, and
+        invoke it with args, and send an 'answer' or 'error' response.
+        """
+        try:
+            object = findObjMethod(objectID)
+            if object is None:
+                raise Error("Invalid Object ID")
+            netResult = object.remoteMessageReceived(self, message, netArgs, netKw)
+        except Error, e:
+            if answerRequired:
+                # If the error is Jellyable or explicitly allowed via our
+                # security options, send it back and let the code on the
+                # other end deal with unjellying.  If it isn't Jellyable,
+                # wrap it in a CopyableFailure, which ensures it can be
+                # unjellied on the other end.  We have to do this because
+                # all errors must be sent back.
+                if isinstance(e, Jellyable) or self.security.isClassAllowed(e.__class__):
+                    self._sendError(e, requestID)
+                else:
+                    self._sendError(CopyableFailure(e), requestID)
+        except:
+            if answerRequired:
+                log.msg("Peer will receive following PB traceback:", isError=True)
+                f = CopyableFailure()
+                self._sendError(f, requestID)
+            log.err()
+        else:
+            if answerRequired:
+                if isinstance(netResult, defer.Deferred):
+                    args = (requestID,)
+                    netResult.addCallbacks(self._sendAnswer, self._sendFailureOrError,
+                                           callbackArgs=args, errbackArgs=args)
+                    # XXX Should this be done somewhere else?
+                else:
+                    self._sendAnswer(netResult, requestID)
+    ##
+    # success
+    ##
+
+    def _sendAnswer(self, netResult, requestID):
+        """(internal) Send an answer to a previously sent message.
+        """
+        self.sendCall("answer", requestID, netResult)
+
+    def proto_answer(self, requestID, netResult):
+        """(internal) Got an answer to a previously sent message.
+
+        Look up the appropriate callback and call it.
+        """
+        d = self.waitingForAnswers[requestID]
+        del self.waitingForAnswers[requestID]
+        d.callback(self.unserialize(netResult))
+
+    ##
+    # failure
+    ##
+    def _sendFailureOrError(self, fail, requestID):
+        """
+        Call L{_sendError} or L{_sendFailure}, depending on whether C{fail}
+        represents an L{Error} subclass or not.
+        """
+        if fail.check(Error) is None:
+            self._sendFailure(fail, requestID)
+        else:
+            self._sendError(fail, requestID)
+
+
+    def _sendFailure(self, fail, requestID):
+        """Log error and then send it."""
+        log.msg("Peer will receive following PB traceback:")
+        log.err(fail)
+        self._sendError(fail, requestID)
+
+    def _sendError(self, fail, requestID):
+        """(internal) Send an error for a previously sent message.
+        """
+        if isinstance(fail, failure.Failure):
+            # If the failures value is jellyable or allowed through security,
+            # send the value
+            if (isinstance(fail.value, Jellyable) or
+                self.security.isClassAllowed(fail.value.__class__)):
+                fail = fail.value
+            elif not isinstance(fail, CopyableFailure):
+                fail = failure2Copyable(fail, self.factory.unsafeTracebacks)
+        if isinstance(fail, CopyableFailure):
+            fail.unsafeTracebacks = self.factory.unsafeTracebacks
+        self.sendCall("error", requestID, self.serialize(fail))
+
+    def proto_error(self, requestID, fail):
+        """(internal) Deal with an error.
+        """
+        d = self.waitingForAnswers[requestID]
+        del self.waitingForAnswers[requestID]
+        d.errback(self.unserialize(fail))
+
+    ##
+    # refcounts
+    ##
+
+    def sendDecRef(self, objectID):
+        """(internal) Send a DECREF directive.
+        """
+        self.sendCall("decref", objectID)
+
+    def proto_decref(self, objectID):
+        """(internal) Decrement the reference count of an object.
+
+        If the reference count is zero, it will free the reference to this
+        object.
+        """
+        refs = self.localObjects[objectID].decref()
+        if refs == 0:
+            puid = self.localObjects[objectID].object.processUniqueID()
+            del self.luids[puid]
+            del self.localObjects[objectID]
+            self._localCleanup.pop(puid, lambda: None)()
+
+    ##
+    # caching
+    ##
+
+    def decCacheRef(self, objectID):
+        """(internal) Send a DECACHE directive.
+        """
+        self.sendCall("decache", objectID)
+
+    def proto_decache(self, objectID):
+        """(internal) Decrement the reference count of a cached object.
+
+        If the reference count is zero, free the reference, then send an
+        'uncached' directive.
+        """
+        refs = self.remotelyCachedObjects[objectID].decref()
+        # log.msg('decaching: %s #refs: %s' % (objectID, refs))
+        if refs == 0:
+            lobj = self.remotelyCachedObjects[objectID]
+            cacheable = lobj.object
+            perspective = lobj.perspective
+            # TODO: force_decache needs to be able to force-invalidate a
+            # cacheable reference.
+            try:
+                cacheable.stoppedObserving(perspective, RemoteCacheObserver(self, cacheable, perspective))
+            except:
+                log.deferr()
+            puid = cacheable.processUniqueID()
+            del self.remotelyCachedLUIDs[puid]
+            del self.remotelyCachedObjects[objectID]
+            self.sendCall("uncache", objectID)
+
+    def proto_uncache(self, objectID):
+        """(internal) Tell the client it is now OK to uncache an object.
+        """
+        # log.msg("uncaching locally %d" % objectID)
+        obj = self.locallyCachedObjects[objectID]
+        obj.broker = None
+##         def reallyDel(obj=obj):
+##             obj.__really_del__()
+##         obj.__del__ = reallyDel
+        del self.locallyCachedObjects[objectID]
+
+
+
+def respond(challenge, password):
+    """Respond to a challenge.
+
+    This is useful for challenge/response authentication.
+    """
+    m = md5()
+    m.update(password)
+    hashedPassword = m.digest()
+    m = md5()
+    m.update(hashedPassword)
+    m.update(challenge)
+    doubleHashedPassword = m.digest()
+    return doubleHashedPassword
+
+def challenge():
+    """I return some random data."""
+    crap = ''
+    for x in range(random.randrange(15,25)):
+        crap = crap + chr(random.randint(65,90))
+    crap = md5(crap).digest()
+    return crap
+
+
+class PBClientFactory(protocol.ClientFactory):
+    """
+    Client factory for PB brokers.
+
+    As with all client factories, use with reactor.connectTCP/SSL/etc..
+    getPerspective and getRootObject can be called either before or
+    after the connect.
+    """
+
+    protocol = Broker
+    unsafeTracebacks = False
+
+    def __init__(self, unsafeTracebacks=False, security=globalSecurity):
+        """
+        @param unsafeTracebacks: if set, tracebacks for exceptions will be sent
+            over the wire.
+        @type unsafeTracebacks: C{bool}
+
+        @param security: security options used by the broker, default to
+            C{globalSecurity}.
+        @type security: L{twisted.spread.jelly.SecurityOptions}
+        """
+        self.unsafeTracebacks = unsafeTracebacks
+        self.security = security
+        self._reset()
+
+
+    def buildProtocol(self, addr):
+        """
+        Build the broker instance, passing the security options to it.
+        """
+        p = self.protocol(isClient=True, security=self.security)
+        p.factory = self
+        return p
+
+
+    def _reset(self):
+        self.rootObjectRequests = [] # list of deferred
+        self._broker = None
+        self._root = None
+
+    def _failAll(self, reason):
+        deferreds = self.rootObjectRequests
+        self._reset()
+        for d in deferreds:
+            d.errback(reason)
+
+    def clientConnectionFailed(self, connector, reason):
+        self._failAll(reason)
+
+    def clientConnectionLost(self, connector, reason, reconnecting=0):
+        """Reconnecting subclasses should call with reconnecting=1."""
+        if reconnecting:
+            # any pending requests will go to next connection attempt
+            # so we don't fail them.
+            self._broker = None
+            self._root = None
+        else:
+            self._failAll(reason)
+
+    def clientConnectionMade(self, broker):
+        self._broker = broker
+        self._root = broker.remoteForName("root")
+        ds = self.rootObjectRequests
+        self.rootObjectRequests = []
+        for d in ds:
+            d.callback(self._root)
+
+    def getRootObject(self):
+        """Get root object of remote PB server.
+
+        @return: Deferred of the root object.
+        """
+        if self._broker and not self._broker.disconnected:
+           return defer.succeed(self._root)
+        d = defer.Deferred()
+        self.rootObjectRequests.append(d)
+        return d
+
+    def disconnect(self):
+        """If the factory is connected, close the connection.
+
+        Note that if you set up the factory to reconnect, you will need to
+        implement extra logic to prevent automatic reconnection after this
+        is called.
+        """
+        if self._broker:
+            self._broker.transport.loseConnection()
+
+    def _cbSendUsername(self, root, username, password, client):
+        return root.callRemote("login", username).addCallback(
+            self._cbResponse, password, client)
+
+    def _cbResponse(self, (challenge, challenger), password, client):
+        return challenger.callRemote("respond", respond(challenge, password), client)
+
+
+    def _cbLoginAnonymous(self, root, client):
+        """
+        Attempt an anonymous login on the given remote root object.
+
+        @type root: L{RemoteReference}
+        @param root: The object on which to attempt the login, most likely
+            returned by a call to L{PBClientFactory.getRootObject}.
+
+        @param client: A jellyable object which will be used as the I{mind}
+            parameter for the login attempt.
+
+        @rtype: L{Deferred}
+        @return: A L{Deferred} which will be called back with a
+            L{RemoteReference} to an avatar when anonymous login succeeds, or
+            which will errback if anonymous login fails.
+        """
+        return root.callRemote("loginAnonymous", client)
+
+
+    def login(self, credentials, client=None):
+        """
+        Login and get perspective from remote PB server.
+
+        Currently the following credentials are supported::
+
+            L{twisted.cred.credentials.IUsernamePassword}
+            L{twisted.cred.credentials.IAnonymous}
+
+        @rtype: L{Deferred}
+        @return: A L{Deferred} which will be called back with a
+            L{RemoteReference} for the avatar logged in to, or which will
+            errback if login fails.
+        """
+        d = self.getRootObject()
+
+        if IAnonymous.providedBy(credentials):
+            d.addCallback(self._cbLoginAnonymous, client)
+        else:
+            d.addCallback(
+                self._cbSendUsername, credentials.username,
+                credentials.password, client)
+        return d
+
+
+
+class PBServerFactory(protocol.ServerFactory):
+    """
+    Server factory for perspective broker.
+
+    Login is done using a Portal object, whose realm is expected to return
+    avatars implementing IPerspective. The credential checkers in the portal
+    should accept IUsernameHashedPassword or IUsernameMD5Password.
+
+    Alternatively, any object providing or adaptable to L{IPBRoot} can be
+    used instead of a portal to provide the root object of the PB server.
+    """
+
+    unsafeTracebacks = False
+
+    # object broker factory
+    protocol = Broker
+
+    def __init__(self, root, unsafeTracebacks=False, security=globalSecurity):
+        """
+        @param root: factory providing the root Referenceable used by the broker.
+        @type root: object providing or adaptable to L{IPBRoot}.
+
+        @param unsafeTracebacks: if set, tracebacks for exceptions will be sent
+            over the wire.
+        @type unsafeTracebacks: C{bool}
+
+        @param security: security options used by the broker, default to
+            C{globalSecurity}.
+        @type security: L{twisted.spread.jelly.SecurityOptions}
+        """
+        self.root = IPBRoot(root)
+        self.unsafeTracebacks = unsafeTracebacks
+        self.security = security
+
+
+    def buildProtocol(self, addr):
+        """
+        Return a Broker attached to the factory (as the service provider).
+        """
+        proto = self.protocol(isClient=False, security=self.security)
+        proto.factory = self
+        proto.setNameForLocal("root", self.root.rootObject(proto))
+        return proto
+
+    def clientConnectionMade(self, protocol):
+        # XXX does this method make any sense?
+        pass
+
+
+class IUsernameMD5Password(ICredentials):
+    """
+    I encapsulate a username and a hashed password.
+
+    This credential is used for username/password over PB. CredentialCheckers
+    which check this kind of credential must store the passwords in plaintext
+    form or as a MD5 digest.
+
+    @type username: C{str} or C{Deferred}
+    @ivar username: The username associated with these credentials.
+    """
+
+    def checkPassword(password):
+        """
+        Validate these credentials against the correct password.
+
+        @type password: C{str}
+        @param password: The correct, plaintext password against which to
+            check.
+
+        @rtype: C{bool} or L{Deferred}
+        @return: C{True} if the credentials represented by this object match the
+            given password, C{False} if they do not, or a L{Deferred} which will
+            be called back with one of these values.
+        """
+
+    def checkMD5Password(password):
+        """
+        Validate these credentials against the correct MD5 digest of the
+        password.
+
+        @type password: C{str}
+        @param password: The correct MD5 digest of a password against which to
+            check.
+
+        @rtype: C{bool} or L{Deferred}
+        @return: C{True} if the credentials represented by this object match the
+            given digest, C{False} if they do not, or a L{Deferred} which will
+            be called back with one of these values.
+        """
+
+
+class _PortalRoot:
+    """Root object, used to login to portal."""
+
+    implements(IPBRoot)
+
+    def __init__(self, portal):
+        self.portal = portal
+
+    def rootObject(self, broker):
+        return _PortalWrapper(self.portal, broker)
+
+registerAdapter(_PortalRoot, Portal, IPBRoot)
+
+
+
+class _JellyableAvatarMixin:
+    """
+    Helper class for code which deals with avatars which PB must be capable of
+    sending to a peer.
+    """
+    def _cbLogin(self, (interface, avatar, logout)):
+        """
+        Ensure that the avatar to be returned to the client is jellyable and
+        set up disconnection notification to call the realm's logout object.
+        """
+        if not IJellyable.providedBy(avatar):
+            avatar = AsReferenceable(avatar, "perspective")
+
+        puid = avatar.processUniqueID()
+
+        # only call logout once, whether the connection is dropped (disconnect)
+        # or a logout occurs (cleanup), and be careful to drop the reference to
+        # it in either case
+        logout = [ logout ]
+        def maybeLogout():
+            if not logout:
+                return
+            fn = logout[0]
+            del logout[0]
+            fn()
+        self.broker._localCleanup[puid] = maybeLogout
+        self.broker.notifyOnDisconnect(maybeLogout)
+
+        return avatar
+
+
+
+class _PortalWrapper(Referenceable, _JellyableAvatarMixin):
+    """
+    Root Referenceable object, used to login to portal.
+    """
+
+    def __init__(self, portal, broker):
+        self.portal = portal
+        self.broker = broker
+
+
+    def remote_login(self, username):
+        """
+        Start of username/password login.
+        """
+        c = challenge()
+        return c, _PortalAuthChallenger(self.portal, self.broker, username, c)
+
+
+    def remote_loginAnonymous(self, mind):
+        """
+        Attempt an anonymous login.
+
+        @param mind: An object to use as the mind parameter to the portal login
+            call (possibly None).
+
+        @rtype: L{Deferred}
+        @return: A Deferred which will be called back with an avatar when login
+            succeeds or which will be errbacked if login fails somehow.
+        """
+        d = self.portal.login(Anonymous(), mind, IPerspective)
+        d.addCallback(self._cbLogin)
+        return d
+
+
+
+class _PortalAuthChallenger(Referenceable, _JellyableAvatarMixin):
+    """
+    Called with response to password challenge.
+    """
+    implements(IUsernameHashedPassword, IUsernameMD5Password)
+
+    def __init__(self, portal, broker, username, challenge):
+        self.portal = portal
+        self.broker = broker
+        self.username = username
+        self.challenge = challenge
+
+
+    def remote_respond(self, response, mind):
+        self.response = response
+        d = self.portal.login(self, mind, IPerspective)
+        d.addCallback(self._cbLogin)
+        return d
+
+
+    # IUsernameHashedPassword:
+    def checkPassword(self, password):
+        return self.checkMD5Password(md5(password).digest())
+
+
+    # IUsernameMD5Password
+    def checkMD5Password(self, md5Password):
+        md = md5()
+        md.update(md5Password)
+        md.update(self.challenge)
+        correct = md.digest()
+        return self.response == correct
+
+
+__all__ = [
+    # Everything from flavors is exposed publically here.
+    'IPBRoot', 'Serializable', 'Referenceable', 'NoSuchMethod', 'Root',
+    'ViewPoint', 'Viewable', 'Copyable', 'Jellyable', 'Cacheable',
+    'RemoteCopy', 'RemoteCache', 'RemoteCacheObserver', 'copyTags',
+    'setUnjellyableForClass', 'setUnjellyableFactoryForClass',
+    'setUnjellyableForClassTree',
+    'setCopierForClass', 'setFactoryForClass', 'setCopierForClassTree',
+
+    'MAX_BROKER_REFS', 'portno',
+
+    'ProtocolError', 'DeadReferenceError', 'Error', 'PBConnectionLost',
+    'RemoteMethod', 'IPerspective', 'Avatar', 'AsReferenceable',
+    'RemoteReference', 'CopyableFailure', 'CopiedFailure', 'failure2Copyable',
+    'Broker', 'respond', 'challenge', 'PBClientFactory', 'PBServerFactory',
+    'IUsernameMD5Password',
+    ]
diff --git a/ThirdParty/Twisted/twisted/spread/publish.py b/ThirdParty/Twisted/twisted/spread/publish.py
new file mode 100644
index 0000000..5bc1868
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/spread/publish.py
@@ -0,0 +1,142 @@
+# -*- test-case-name: twisted.test.test_pb -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Persistently cached objects for PB.
+
+Maintainer: Glyph Lefkowitz
+
+Future Plans: None known.
+"""
+
+import time
+
+from twisted.internet import defer
+from twisted.spread import banana, jelly, flavors
+
+
+class Publishable(flavors.Cacheable):
+    """An object whose cached state persists across sessions.
+    """
+    def __init__(self, publishedID):
+        self.republish()
+        self.publishedID = publishedID
+
+    def republish(self):
+        """Set the timestamp to current and (TODO) update all observers.
+        """
+        self.timestamp = time.time()
+
+    def view_getStateToPublish(self, perspective):
+        '(internal)'
+        return self.getStateToPublishFor(perspective)
+    
+    def getStateToPublishFor(self, perspective):
+        """Implement me to special-case your state for a perspective.
+        """
+        return self.getStateToPublish()
+
+    def getStateToPublish(self):
+        """Implement me to return state to copy as part of the publish phase.
+        """
+        raise NotImplementedError("%s.getStateToPublishFor" % self.__class__)
+
+    def getStateToCacheAndObserveFor(self, perspective, observer):
+        """Get all necessary metadata to keep a clientside cache.
+        """
+        if perspective:
+            pname = perspective.perspectiveName
+            sname = perspective.getService().serviceName
+        else:
+            pname = "None"
+            sname = "None"
+
+        return {"remote": flavors.ViewPoint(perspective, self),
+                "publishedID": self.publishedID,
+                "perspective": pname,
+                "service": sname,
+                "timestamp": self.timestamp}
+
+class RemotePublished(flavors.RemoteCache):
+    """The local representation of remote Publishable object.
+    """
+    isActivated = 0
+    _wasCleanWhenLoaded = 0
+    def getFileName(self, ext='pub'):
+        return ("%s-%s-%s.%s" %
+                (self.service, self.perspective, str(self.publishedID), ext))
+    
+    def setCopyableState(self, state):
+        self.__dict__.update(state)
+        self._activationListeners = []
+        try:
+            dataFile = file(self.getFileName(), "rb")
+            data = dataFile.read()
+            dataFile.close()
+        except IOError:
+            recent = 0
+        else:
+            newself = jelly.unjelly(banana.decode(data))
+            recent = (newself.timestamp == self.timestamp)
+        if recent:
+            self._cbGotUpdate(newself.__dict__)
+            self._wasCleanWhenLoaded = 1
+        else:
+            self.remote.callRemote('getStateToPublish').addCallbacks(self._cbGotUpdate)
+
+    def __getstate__(self):
+        other = self.__dict__.copy()
+        # Remove PB-specific attributes
+        del other['broker']
+        del other['remote']
+        del other['luid']
+        # remove my own runtime-tracking stuff
+        del other['_activationListeners']
+        del other['isActivated']
+        return other
+
+    def _cbGotUpdate(self, newState):
+        self.__dict__.update(newState)
+        self.isActivated = 1
+        # send out notifications
+        for listener in self._activationListeners:
+            listener(self)
+        self._activationListeners = []
+        self.activated()
+        dataFile = file(self.getFileName(), "wb")
+        dataFile.write(banana.encode(jelly.jelly(self)))
+        dataFile.close()
+
+
+    def activated(self):
+        """Implement this method if you want to be notified when your
+        publishable subclass is activated.
+        """
+        
+    def callWhenActivated(self, callback):
+        """Externally register for notification when this publishable has received all relevant data.
+        """
+        if self.isActivated:
+            callback(self)
+        else:
+            self._activationListeners.append(callback)
+
+def whenReady(d):
+    """
+    Wrap a deferred returned from a pb method in another deferred that
+    expects a RemotePublished as a result.  This will allow you to wait until
+    the result is really available.
+
+    Idiomatic usage would look like::
+
+        publish.whenReady(serverObject.getMeAPublishable()).addCallback(lookAtThePublishable)
+    """
+    d2 = defer.Deferred()
+    d.addCallbacks(_pubReady, d2.errback,
+                   callbackArgs=(d2,))
+    return d2
+
+def _pubReady(result, d2):
+    '(internal)'
+    result.callWhenActivated(d2.callback)
diff --git a/ThirdParty/Twisted/twisted/spread/ui/__init__.py b/ThirdParty/Twisted/twisted/spread/ui/__init__.py
new file mode 100644
index 0000000..56bf766
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/spread/ui/__init__.py
@@ -0,0 +1,12 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Twisted Spread UI: UI utilities for various toolkits connecting to PB.
+"""
+
+# Undeprecating this until someone figures out a real plan for alternatives to spread.ui.
+##import warnings
+##warnings.warn("twisted.spread.ui is deprecated. Please do not use.", DeprecationWarning)
diff --git a/ThirdParty/Twisted/twisted/spread/ui/gtk2util.py b/ThirdParty/Twisted/twisted/spread/ui/gtk2util.py
new file mode 100644
index 0000000..a576388
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/spread/ui/gtk2util.py
@@ -0,0 +1,218 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import gtk
+
+from twisted import copyright
+from twisted.internet import defer
+from twisted.python import failure, log, util
+from twisted.spread import pb
+from twisted.cred.credentials import UsernamePassword
+
+from twisted.internet import error as netError
+
+def login(client=None, **defaults):
+    """
+    @param host:
+    @param port:
+    @param identityName:
+    @param password:
+    @param serviceName:
+    @param perspectiveName:
+
+    @returntype: Deferred RemoteReference of Perspective
+    """
+    d = defer.Deferred()
+    LoginDialog(client, d, defaults)
+    return d
+
+class GladeKeeper:
+    """
+    @cvar gladefile: The file in which the glade GUI definition is kept.
+    @type gladefile: str
+
+    @cvar _widgets: Widgets that should be attached to me as attributes.
+    @type _widgets: list of strings
+    """
+
+    gladefile = None
+    _widgets = ()
+
+    def __init__(self):
+        from gtk import glade
+        self.glade = glade.XML(self.gladefile)
+
+        # mold can go away when we get a newer pygtk (post 1.99.14)
+        mold = {}
+        for k in dir(self):
+            mold[k] = getattr(self, k)
+        self.glade.signal_autoconnect(mold)
+        self._setWidgets()
+
+    def _setWidgets(self):
+        get_widget = self.glade.get_widget
+        for widgetName in self._widgets:
+            setattr(self, "_" + widgetName, get_widget(widgetName))
+
+
+class LoginDialog(GladeKeeper):
+    # IdentityConnector host port identityName password
+    # requestLogin -> identityWrapper or login failure
+    # requestService serviceName perspectiveName client
+
+    # window killed
+    # cancel button pressed
+    # login button activated
+
+    fields = ['host','port','identityName','password',
+              'perspectiveName']
+
+    _widgets = ("hostEntry", "portEntry", "identityNameEntry", "passwordEntry",
+                "perspectiveNameEntry", "statusBar",
+                "loginDialog")
+
+    _advancedControls = ['perspectiveLabel', 'perspectiveNameEntry',
+                         'protocolLabel', 'versionLabel']
+
+    gladefile = util.sibpath(__file__, "login2.glade")
+
+    _timeoutID = None
+
+    def __init__(self, client, deferred, defaults):
+        self.client = client
+        self.deferredResult = deferred
+
+        GladeKeeper.__init__(self)
+
+        self.setDefaults(defaults)
+        self._loginDialog.show()
+
+
+    def setDefaults(self, defaults):
+        if not defaults.has_key('port'):
+            defaults['port'] = str(pb.portno)
+        elif isinstance(defaults['port'], (int, long)):
+            defaults['port'] = str(defaults['port'])
+
+        for k, v in defaults.iteritems():
+            if k in self.fields:
+                widget = getattr(self, "_%sEntry" % (k,))
+                widget.set_text(v)
+
+    def _setWidgets(self):
+        GladeKeeper._setWidgets(self)
+        self._statusContext = self._statusBar.get_context_id("Login dialog.")
+        get_widget = self.glade.get_widget
+        get_widget("versionLabel").set_text(copyright.longversion)
+        get_widget("protocolLabel").set_text("Protocol PB-%s" %
+                                             (pb.Broker.version,))
+
+    def _on_loginDialog_response(self, widget, response):
+        handlers = {gtk.RESPONSE_NONE: self._windowClosed,
+                   gtk.RESPONSE_DELETE_EVENT: self._windowClosed,
+                   gtk.RESPONSE_OK: self._doLogin,
+                   gtk.RESPONSE_CANCEL: self._cancelled}
+        handler = handlers.get(response)
+        if handler is not None:
+            handler()
+        else:
+            log.msg("Unexpected dialog response %r from %s" % (response,
+                                                               widget))
+
+    def _on_loginDialog_close(self, widget, userdata=None):
+        self._windowClosed()
+
+    def _on_loginDialog_destroy_event(self, widget, userdata=None):
+        self._windowClosed()
+
+    def _cancelled(self):
+        if not self.deferredResult.called:
+            self.deferredResult.errback(netError.UserError("User hit Cancel."))
+        self._loginDialog.destroy()
+
+    def _windowClosed(self, reason=None):
+        if not self.deferredResult.called:
+            self.deferredResult.errback(netError.UserError("Window closed."))
+
+    def _doLogin(self):
+        idParams = {}
+
+        idParams['host'] = self._hostEntry.get_text()
+        idParams['port'] = self._portEntry.get_text()
+        idParams['identityName'] = self._identityNameEntry.get_text()
+        idParams['password'] = self._passwordEntry.get_text()
+
+        try:
+            idParams['port'] = int(idParams['port'])
+        except ValueError:
+            pass
+
+        f = pb.PBClientFactory()
+        from twisted.internet import reactor
+        reactor.connectTCP(idParams['host'], idParams['port'], f)
+        creds = UsernamePassword(idParams['identityName'], idParams['password'])
+        d = f.login(creds, self.client)
+        def _timeoutLogin():
+            self._timeoutID = None
+            d.errback(failure.Failure(defer.TimeoutError("Login timed out.")))
+        self._timeoutID = reactor.callLater(30, _timeoutLogin)
+        d.addCallbacks(self._cbGotPerspective, self._ebFailedLogin)
+        self.statusMsg("Contacting server...")
+
+        # serviceName = self._serviceNameEntry.get_text()
+        # perspectiveName = self._perspectiveNameEntry.get_text()
+        # if not perspectiveName:
+        #     perspectiveName = idParams['identityName']
+
+        # d = _identityConnector.requestService(serviceName, perspectiveName,
+        #                                       self.client)
+        # d.addCallbacks(self._cbGotPerspective, self._ebFailedLogin)
+        # setCursor to waiting
+
+    def _cbGotPerspective(self, perspective):
+        self.statusMsg("Connected to server.")
+        if self._timeoutID is not None:
+            self._timeoutID.cancel()
+            self._timeoutID = None
+        self.deferredResult.callback(perspective)
+        # clear waiting cursor
+        self._loginDialog.destroy()
+
+    def _ebFailedLogin(self, reason):
+        if isinstance(reason, failure.Failure):
+            reason = reason.value
+        self.statusMsg(reason)
+        if isinstance(reason, (unicode, str)):
+            text = reason
+        else:
+            text = unicode(reason)
+        msg = gtk.MessageDialog(self._loginDialog,
+                                gtk.DIALOG_DESTROY_WITH_PARENT,
+                                gtk.MESSAGE_ERROR,
+                                gtk.BUTTONS_CLOSE,
+                                text)
+        msg.show_all()
+        msg.connect("response", lambda *a: msg.destroy())
+
+        # hostname not found
+        # host unreachable
+        # connection refused
+        # authentication failed
+        # no such service
+        # no such perspective
+        # internal server error
+
+    def _on_advancedButton_toggled(self, widget, userdata=None):
+        active = widget.get_active()
+        if active:
+            op = "show"
+        else:
+            op = "hide"
+        for widgetName in self._advancedControls:
+            widget = self.glade.get_widget(widgetName)
+            getattr(widget, op)()
+
+    def statusMsg(self, text):
+        if not isinstance(text, (unicode, str)):
+            text = unicode(text)
+        return self._statusBar.push(self._statusContext, text)
diff --git a/ThirdParty/Twisted/twisted/spread/ui/login2.glade b/ThirdParty/Twisted/twisted/spread/ui/login2.glade
new file mode 100644
index 0000000..046bb8c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/spread/ui/login2.glade
@@ -0,0 +1,461 @@
+<?xml version="1.0" standalone="no"?> <!--*- mode: xml -*-->
+<!DOCTYPE glade-interface SYSTEM "http://glade.gnome.org/glade-2.0.dtd">
+
+<glade-interface>
+
+<widget class="GtkDialog" id="loginDialog">
+  <property name="title" translatable="yes">Login</property>
+  <property name="type">GTK_WINDOW_TOPLEVEL</property>
+  <property name="window_position">GTK_WIN_POS_NONE</property>
+  <property name="modal">False</property>
+  <property name="resizable">True</property>
+  <property name="destroy_with_parent">True</property>
+  <property name="has_separator">True</property>
+  <signal name="response" handler="_on_loginDialog_response" last_modification_time="Sat, 25 Jan 2003 13:52:57 GMT"/>
+  <signal name="close" handler="_on_loginDialog_close" last_modification_time="Sat, 25 Jan 2003 13:53:04 GMT"/>
+
+  <child internal-child="vbox">
+    <widget class="GtkVBox" id="dialog-vbox1">
+      <property name="visible">True</property>
+      <property name="homogeneous">False</property>
+      <property name="spacing">0</property>
+
+      <child internal-child="action_area">
+	<widget class="GtkHButtonBox" id="dialog-action_area1">
+	  <property name="visible">True</property>
+	  <property name="layout_style">GTK_BUTTONBOX_END</property>
+
+	  <child>
+	    <widget class="GtkButton" id="cancelbutton1">
+	      <property name="visible">True</property>
+	      <property name="can_default">True</property>
+	      <property name="can_focus">True</property>
+	      <property name="label">gtk-cancel</property>
+	      <property name="use_stock">True</property>
+	      <property name="relief">GTK_RELIEF_NORMAL</property>
+	      <property name="response_id">-6</property>
+	    </widget>
+	  </child>
+
+	  <child>
+	    <widget class="GtkButton" id="loginButton">
+	      <property name="visible">True</property>
+	      <property name="can_default">True</property>
+	      <property name="has_default">True</property>
+	      <property name="can_focus">True</property>
+	      <property name="relief">GTK_RELIEF_NORMAL</property>
+	      <property name="response_id">-5</property>
+
+	      <child>
+		<widget class="GtkAlignment" id="alignment1">
+		  <property name="visible">True</property>
+		  <property name="xalign">0.5</property>
+		  <property name="yalign">0.5</property>
+		  <property name="xscale">0</property>
+		  <property name="yscale">0</property>
+
+		  <child>
+		    <widget class="GtkHBox" id="hbox2">
+		      <property name="visible">True</property>
+		      <property name="homogeneous">False</property>
+		      <property name="spacing">2</property>
+
+		      <child>
+			<widget class="GtkImage" id="image1">
+			  <property name="visible">True</property>
+			  <property name="stock">gtk-ok</property>
+			  <property name="icon_size">4</property>
+			  <property name="xalign">0.5</property>
+			  <property name="yalign">0.5</property>
+			  <property name="xpad">0</property>
+			  <property name="ypad">0</property>
+			</widget>
+			<packing>
+			  <property name="padding">0</property>
+			  <property name="expand">False</property>
+			  <property name="fill">False</property>
+			</packing>
+		      </child>
+
+		      <child>
+			<widget class="GtkLabel" id="label9">
+			  <property name="visible">True</property>
+			  <property name="label" translatable="yes">_Login</property>
+			  <property name="use_underline">True</property>
+			  <property name="use_markup">False</property>
+			  <property name="justify">GTK_JUSTIFY_LEFT</property>
+			  <property name="wrap">False</property>
+			  <property name="selectable">False</property>
+			  <property name="xalign">0.5</property>
+			  <property name="yalign">0.5</property>
+			  <property name="xpad">0</property>
+			  <property name="ypad">0</property>
+			</widget>
+			<packing>
+			  <property name="padding">0</property>
+			  <property name="expand">False</property>
+			  <property name="fill">False</property>
+			</packing>
+		      </child>
+		    </widget>
+		  </child>
+		</widget>
+	      </child>
+	    </widget>
+	  </child>
+	</widget>
+	<packing>
+	  <property name="padding">0</property>
+	  <property name="expand">False</property>
+	  <property name="fill">True</property>
+	  <property name="pack_type">GTK_PACK_END</property>
+	</packing>
+      </child>
+
+      <child>
+	<widget class="GtkStatusbar" id="statusBar">
+	  <property name="visible">True</property>
+	  <property name="has_resize_grip">False</property>
+	</widget>
+	<packing>
+	  <property name="padding">0</property>
+	  <property name="expand">False</property>
+	  <property name="fill">False</property>
+	  <property name="pack_type">GTK_PACK_END</property>
+	</packing>
+      </child>
+
+      <child>
+	<widget class="GtkTable" id="table1">
+	  <property name="visible">True</property>
+	  <property name="n_rows">6</property>
+	  <property name="n_columns">2</property>
+	  <property name="homogeneous">False</property>
+	  <property name="row_spacing">2</property>
+	  <property name="column_spacing">0</property>
+
+	  <child>
+	    <widget class="GtkLabel" id="hostLabel">
+	      <property name="visible">True</property>
+	      <property name="label" translatable="yes">_Host:</property>
+	      <property name="use_underline">True</property>
+	      <property name="use_markup">False</property>
+	      <property name="justify">GTK_JUSTIFY_LEFT</property>
+	      <property name="wrap">False</property>
+	      <property name="selectable">False</property>
+	      <property name="xalign">0.9</property>
+	      <property name="yalign">0.5</property>
+	      <property name="xpad">0</property>
+	      <property name="ypad">0</property>
+	      <property name="mnemonic_widget">hostEntry</property>
+	      <accessibility>
+		<atkrelation target="hostEntry" type="label-for"/>
+		<atkrelation target="portEntry" type="label-for"/>
+	      </accessibility>
+	    </widget>
+	    <packing>
+	      <property name="left_attach">0</property>
+	      <property name="right_attach">1</property>
+	      <property name="top_attach">0</property>
+	      <property name="bottom_attach">1</property>
+	      <property name="x_options">fill</property>
+	      <property name="y_options"></property>
+	    </packing>
+	  </child>
+
+	  <child>
+	    <widget class="GtkHBox" id="hbox1">
+	      <property name="visible">True</property>
+	      <property name="homogeneous">False</property>
+	      <property name="spacing">0</property>
+
+	      <child>
+		<widget class="GtkEntry" id="hostEntry">
+		  <property name="visible">True</property>
+		  <property name="tooltip" translatable="yes">The name of a host to connect to.</property>
+		  <property name="can_focus">True</property>
+		  <property name="has_focus">True</property>
+		  <property name="editable">True</property>
+		  <property name="visibility">True</property>
+		  <property name="max_length">0</property>
+		  <property name="text" translatable="yes">localhost</property>
+		  <property name="has_frame">True</property>
+		  <property name="invisible_char" translatable="yes">*</property>
+		  <property name="activates_default">True</property>
+		  <accessibility>
+		    <atkrelation target="hostLabel" type="labelled-by"/>
+		  </accessibility>
+		</widget>
+		<packing>
+		  <property name="padding">0</property>
+		  <property name="expand">True</property>
+		  <property name="fill">True</property>
+		</packing>
+	      </child>
+
+	      <child>
+		<widget class="GtkEntry" id="portEntry">
+		  <property name="visible">True</property>
+		  <property name="tooltip" translatable="yes">The number of a port to connect on.</property>
+		  <property name="can_focus">True</property>
+		  <property name="editable">True</property>
+		  <property name="visibility">True</property>
+		  <property name="max_length">0</property>
+		  <property name="text" translatable="yes">8787</property>
+		  <property name="has_frame">True</property>
+		  <property name="invisible_char" translatable="yes">*</property>
+		  <property name="activates_default">True</property>
+		  <property name="width_chars">5</property>
+		</widget>
+		<packing>
+		  <property name="padding">0</property>
+		  <property name="expand">False</property>
+		  <property name="fill">True</property>
+		</packing>
+	      </child>
+	    </widget>
+	    <packing>
+	      <property name="left_attach">1</property>
+	      <property name="right_attach">2</property>
+	      <property name="top_attach">0</property>
+	      <property name="bottom_attach">1</property>
+	      <property name="y_options">fill</property>
+	    </packing>
+	  </child>
+
+	  <child>
+	    <widget class="GtkLabel" id="nameLabel">
+	      <property name="visible">True</property>
+	      <property name="label" translatable="yes">_Name:</property>
+	      <property name="use_underline">True</property>
+	      <property name="use_markup">False</property>
+	      <property name="justify">GTK_JUSTIFY_LEFT</property>
+	      <property name="wrap">False</property>
+	      <property name="selectable">False</property>
+	      <property name="xalign">0.9</property>
+	      <property name="yalign">0.5</property>
+	      <property name="xpad">0</property>
+	      <property name="ypad">0</property>
+	      <property name="mnemonic_widget">identityNameEntry</property>
+	    </widget>
+	    <packing>
+	      <property name="left_attach">0</property>
+	      <property name="right_attach">1</property>
+	      <property name="top_attach">1</property>
+	      <property name="bottom_attach">2</property>
+	      <property name="x_options">fill</property>
+	      <property name="y_options"></property>
+	    </packing>
+	  </child>
+
+	  <child>
+	    <widget class="GtkEntry" id="identityNameEntry">
+	      <property name="visible">True</property>
+	      <property name="tooltip" translatable="yes">An identity to log in as.</property>
+	      <property name="can_focus">True</property>
+	      <property name="editable">True</property>
+	      <property name="visibility">True</property>
+	      <property name="max_length">0</property>
+	      <property name="text" translatable="yes"></property>
+	      <property name="has_frame">True</property>
+	      <property name="invisible_char" translatable="yes">*</property>
+	      <property name="activates_default">True</property>
+	    </widget>
+	    <packing>
+	      <property name="left_attach">1</property>
+	      <property name="right_attach">2</property>
+	      <property name="top_attach">1</property>
+	      <property name="bottom_attach">2</property>
+	      <property name="y_options"></property>
+	    </packing>
+	  </child>
+
+	  <child>
+	    <widget class="GtkEntry" id="passwordEntry">
+	      <property name="visible">True</property>
+	      <property name="tooltip" translatable="yes">The Identity's log-in password.</property>
+	      <property name="can_focus">True</property>
+	      <property name="editable">True</property>
+	      <property name="visibility">False</property>
+	      <property name="max_length">0</property>
+	      <property name="text" translatable="yes"></property>
+	      <property name="has_frame">True</property>
+	      <property name="invisible_char" translatable="yes">*</property>
+	      <property name="activates_default">True</property>
+	    </widget>
+	    <packing>
+	      <property name="left_attach">1</property>
+	      <property name="right_attach">2</property>
+	      <property name="top_attach">2</property>
+	      <property name="bottom_attach">3</property>
+	      <property name="y_options"></property>
+	    </packing>
+	  </child>
+
+	  <child>
+	    <widget class="GtkLabel" id="passwordLabel">
+	      <property name="visible">True</property>
+	      <property name="label" translatable="yes">_Password:</property>
+	      <property name="use_underline">True</property>
+	      <property name="use_markup">False</property>
+	      <property name="justify">GTK_JUSTIFY_LEFT</property>
+	      <property name="wrap">False</property>
+	      <property name="selectable">False</property>
+	      <property name="xalign">0.9</property>
+	      <property name="yalign">0.5</property>
+	      <property name="xpad">0</property>
+	      <property name="ypad">0</property>
+	      <property name="mnemonic_widget">passwordEntry</property>
+	    </widget>
+	    <packing>
+	      <property name="left_attach">0</property>
+	      <property name="right_attach">1</property>
+	      <property name="top_attach">2</property>
+	      <property name="bottom_attach">3</property>
+	      <property name="x_options">fill</property>
+	      <property name="y_options"></property>
+	    </packing>
+	  </child>
+
+	  <child>
+	    <widget class="GtkLabel" id="perspectiveLabel">
+	      <property name="label" translatable="yes">Perspective:</property>
+	      <property name="use_underline">False</property>
+	      <property name="use_markup">False</property>
+	      <property name="justify">GTK_JUSTIFY_LEFT</property>
+	      <property name="wrap">False</property>
+	      <property name="selectable">False</property>
+	      <property name="xalign">0.9</property>
+	      <property name="yalign">0.5</property>
+	      <property name="xpad">0</property>
+	      <property name="ypad">0</property>
+	    </widget>
+	    <packing>
+	      <property name="left_attach">0</property>
+	      <property name="right_attach">1</property>
+	      <property name="top_attach">5</property>
+	      <property name="bottom_attach">6</property>
+	      <property name="x_options">fill</property>
+	      <property name="y_options"></property>
+	    </packing>
+	  </child>
+
+	  <child>
+	    <widget class="GtkEntry" id="perspectiveNameEntry">
+	      <property name="tooltip" translatable="yes">The name of a Perspective to request.</property>
+	      <property name="can_focus">True</property>
+	      <property name="editable">True</property>
+	      <property name="visibility">True</property>
+	      <property name="max_length">0</property>
+	      <property name="text" translatable="yes"></property>
+	      <property name="has_frame">True</property>
+	      <property name="invisible_char" translatable="yes">*</property>
+	      <property name="activates_default">False</property>
+	    </widget>
+	    <packing>
+	      <property name="left_attach">1</property>
+	      <property name="right_attach">2</property>
+	      <property name="top_attach">5</property>
+	      <property name="bottom_attach">6</property>
+	      <property name="y_options"></property>
+	    </packing>
+	  </child>
+
+	  <child>
+	    <widget class="GtkVBox" id="vbox1">
+	      <property name="visible">True</property>
+	      <property name="homogeneous">False</property>
+	      <property name="spacing">0</property>
+
+	      <child>
+		<widget class="GtkLabel" id="protocolLabel">
+		  <property name="label" translatable="yes">Insert Protocol Version Here</property>
+		  <property name="use_underline">False</property>
+		  <property name="use_markup">False</property>
+		  <property name="justify">GTK_JUSTIFY_LEFT</property>
+		  <property name="wrap">False</property>
+		  <property name="selectable">False</property>
+		  <property name="xalign">0.5</property>
+		  <property name="yalign">0.5</property>
+		  <property name="xpad">0</property>
+		  <property name="ypad">0</property>
+		</widget>
+		<packing>
+		  <property name="padding">0</property>
+		  <property name="expand">False</property>
+		  <property name="fill">False</property>
+		</packing>
+	      </child>
+
+	      <child>
+		<widget class="GtkLabel" id="versionLabel">
+		  <property name="label" translatable="yes">Insert Twisted Version Here</property>
+		  <property name="use_underline">False</property>
+		  <property name="use_markup">False</property>
+		  <property name="justify">GTK_JUSTIFY_LEFT</property>
+		  <property name="wrap">False</property>
+		  <property name="selectable">False</property>
+		  <property name="xalign">0.5</property>
+		  <property name="yalign">0.5</property>
+		  <property name="xpad">0</property>
+		  <property name="ypad">0</property>
+		</widget>
+		<packing>
+		  <property name="padding">0</property>
+		  <property name="expand">False</property>
+		  <property name="fill">False</property>
+		</packing>
+	      </child>
+	    </widget>
+	    <packing>
+	      <property name="left_attach">0</property>
+	      <property name="right_attach">2</property>
+	      <property name="top_attach">4</property>
+	      <property name="bottom_attach">5</property>
+	      <property name="x_options">fill</property>
+	      <property name="y_options">fill</property>
+	    </packing>
+	  </child>
+
+	  <child>
+	    <widget class="GtkAlignment" id="alignment2">
+	      <property name="visible">True</property>
+	      <property name="xalign">1</property>
+	      <property name="yalign">0.5</property>
+	      <property name="xscale">0</property>
+	      <property name="yscale">1</property>
+
+	      <child>
+		<widget class="GtkToggleButton" id="advancedButton">
+		  <property name="visible">True</property>
+		  <property name="tooltip" translatable="yes">Advanced options.</property>
+		  <property name="can_focus">True</property>
+		  <property name="label" translatable="yes">Advanced >></property>
+		  <property name="use_underline">True</property>
+		  <property name="relief">GTK_RELIEF_NORMAL</property>
+		  <property name="active">False</property>
+		  <property name="inconsistent">False</property>
+		  <signal name="toggled" handler="_on_advancedButton_toggled" object="Login" last_modification_time="Sat, 25 Jan 2003 13:47:17 GMT"/>
+		</widget>
+	      </child>
+	    </widget>
+	    <packing>
+	      <property name="left_attach">0</property>
+	      <property name="right_attach">2</property>
+	      <property name="top_attach">3</property>
+	      <property name="bottom_attach">4</property>
+	      <property name="y_options"></property>
+	    </packing>
+	  </child>
+	</widget>
+	<packing>
+	  <property name="padding">0</property>
+	  <property name="expand">False</property>
+	  <property name="fill">False</property>
+	</packing>
+      </child>
+    </widget>
+  </child>
+</widget>
+
+</glade-interface>
diff --git a/ThirdParty/Twisted/twisted/spread/ui/tktree.py b/ThirdParty/Twisted/twisted/spread/ui/tktree.py
new file mode 100644
index 0000000..8fbe462
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/spread/ui/tktree.py
@@ -0,0 +1,204 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+What I want it to look like:
+
++- One
+| \- Two
+| |- Three
+| |- Four
+| +- Five
+| | \- Six
+| |- Seven
++- Eight
+| \- Nine
+"""
+
+import os
+from Tkinter import *
+
+class Node:
+    def __init__(self):
+        """
+        Do whatever you want here.
+        """
+        self.item=None
+    def getName(self):
+        """
+        Return the name of this node in the tree.
+        """
+        pass
+    def isExpandable(self):
+        """
+        Return true if this node is expandable.
+        """
+        return len(self.getSubNodes())>0
+    def getSubNodes(self):
+        """
+        Return the sub nodes of this node.
+        """
+        return []
+    def gotDoubleClick(self):
+        """
+        Called when we are double clicked.
+        """
+        pass
+    def updateMe(self):
+        """
+        Call me when something about me changes, so that my representation
+        changes.
+        """
+        if self.item:
+            self.item.update()
+
+class FileNode(Node):
+    def __init__(self,name):
+        Node.__init__(self)
+        self.name=name
+    def getName(self):
+        return os.path.basename(self.name)
+    def isExpandable(self):
+        return os.path.isdir(self.name)
+    def getSubNodes(self):
+        names=map(lambda x,n=self.name:os.path.join(n,x),os.listdir(self.name))
+        return map(FileNode,names)
+
+class TreeItem:
+    def __init__(self,widget,parent,node):
+        self.widget=widget
+        self.node=node
+        node.item=self
+        if self.node.isExpandable():
+            self.expand=0
+        else:
+            self.expand=None
+        self.parent=parent
+        if parent:
+            self.level=self.parent.level+1
+        else:
+            self.level=0
+        self.first=0 # gets set in Tree.expand()
+        self.subitems=[]
+    def __del__(self):
+        del self.node
+        del self.widget
+    def __repr__(self):
+        return "<Item for Node %s at level %s>"%(self.node.getName(),self.level)
+    def render(self):
+        """
+        Override in a subclass.
+        """
+        raise NotImplementedError
+    def update(self):
+        self.widget.update(self)
+
+class ListboxTreeItem(TreeItem):
+    def render(self):
+        start=self.level*"|    "
+        if self.expand==None and not self.first:
+            start=start+"|"
+        elif self.expand==0:
+            start=start+"L"
+        elif self.expand==1:
+            start=start+"+"
+        else:
+            start=start+"\\"
+        r=[start+"- "+self.node.getName()]
+        if self.expand:
+            for i in self.subitems:
+                r.extend(i.render())
+        return r
+
+class ListboxTree:
+    def __init__(self,parent=None,**options):
+        self.box=apply(Listbox,[parent],options)
+        self.box.bind("<Double-1>",self.flip)
+        self.roots=[]
+        self.items=[]
+    def pack(self,*args,**kw):
+        """
+        for packing.
+        """
+        apply(self.box.pack,args,kw)
+    def grid(self,*args,**kw):
+        """
+        for gridding.
+        """
+        apply(self.box.grid,args,kw)
+    def yview(self,*args,**kw):
+        """
+        for scrolling.
+        """
+        apply(self.box.yview,args,kw)
+    def addRoot(self,node):
+        r=ListboxTreeItem(self,None,node)
+        self.roots.append(r)
+        self.items.append(r)
+        self.box.insert(END,r.render()[0])
+        return r
+    def curselection(self):
+        c=self.box.curselection()
+        if not c: return
+        return self.items[int(c[0])]
+    def flip(self,*foo):
+        if not self.box.curselection(): return
+        item=self.items[int(self.box.curselection()[0])]
+        if item.expand==None: return
+        if not item.expand:
+            self.expand(item)
+        else:
+            self.close(item)
+        item.node.gotDoubleClick()
+    def expand(self,item):
+        if item.expand or item.expand==None: return
+        item.expand=1
+        item.subitems=map(lambda x,i=item,s=self:ListboxTreeItem(s,i,x),item.node.getSubNodes())
+        if item.subitems:
+            item.subitems[0].first=1
+        i=self.items.index(item)
+        self.items,after=self.items[:i+1],self.items[i+1:]
+        self.items=self.items+item.subitems+after
+        c=self.items.index(item)
+        self.box.delete(c)
+        r=item.render()
+        for i in r:
+            self.box.insert(c,i)
+            c=c+1
+    def close(self,item):
+        if not item.expand: return
+        item.expand=0
+        length=len(item.subitems)
+        for i in item.subitems:
+            self.close(i)
+        c=self.items.index(item)
+        del self.items[c+1:c+1+length]
+        for i in range(length+1):
+            self.box.delete(c)
+        self.box.insert(c,item.render()[0])
+    def remove(self,item):
+        if item.expand:
+            self.close(item)
+        c=self.items.index(item)
+        del self.items[c]
+        if item.parent:
+            item.parent.subitems.remove(item)
+        self.box.delete(c)
+    def update(self,item):
+        if item.expand==None:
+            c=self.items.index(item)
+            self.box.delete(c)
+            self.box.insert(c,item.render()[0])
+        elif item.expand:
+            self.close(item)
+            self.expand(item)
+
+if __name__=="__main__":
+    tk=Tk()
+    s=Scrollbar()
+    t=ListboxTree(tk,yscrollcommand=s.set)
+    t.pack(side=LEFT,fill=BOTH)
+    s.config(command=t.yview)
+    s.pack(side=RIGHT,fill=Y)
+    t.addRoot(FileNode("C:/"))
+    #mainloop()
diff --git a/ThirdParty/Twisted/twisted/spread/ui/tkutil.py b/ThirdParty/Twisted/twisted/spread/ui/tkutil.py
new file mode 100644
index 0000000..2aee67d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/spread/ui/tkutil.py
@@ -0,0 +1,397 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""Utilities for building L{PB<twisted.spread.pb>} clients with L{Tkinter}.
+"""
+from Tkinter import *
+from tkSimpleDialog import _QueryString
+from tkFileDialog import _Dialog
+from twisted.spread import pb
+from twisted.internet import reactor
+from twisted import copyright
+
+import string
+
+#normalFont = Font("-adobe-courier-medium-r-normal-*-*-120-*-*-m-*-iso8859-1")
+#boldFont = Font("-adobe-courier-bold-r-normal-*-*-120-*-*-m-*-iso8859-1")
+#errorFont = Font("-adobe-courier-medium-o-normal-*-*-120-*-*-m-*-iso8859-1")
+
+class _QueryPassword(_QueryString):
+    def body(self, master):
+
+        w = Label(master, text=self.prompt, justify=LEFT)
+        w.grid(row=0, padx=5, sticky=W)
+
+        self.entry = Entry(master, name="entry",show="*")
+        self.entry.grid(row=1, padx=5, sticky=W+E)
+
+        if self.initialvalue:
+            self.entry.insert(0, self.initialvalue)
+            self.entry.select_range(0, END)
+
+        return self.entry
+
+def askpassword(title, prompt, **kw):
+    '''get a password from the user
+
+    @param title: the dialog title
+    @param prompt: the label text
+    @param **kw: see L{SimpleDialog} class
+
+    @returns: a string
+    '''
+    d = apply(_QueryPassword, (title, prompt), kw)
+    return d.result
+
+def grid_setexpand(widget):
+    cols,rows=widget.grid_size()
+    for i in range(cols):
+        widget.columnconfigure(i,weight=1)
+    for i in range(rows):
+        widget.rowconfigure(i,weight=1)
+
+class CList(Frame):
+    def __init__(self,parent,labels,disablesorting=0,**kw):
+        Frame.__init__(self,parent)
+        self.labels=labels
+        self.lists=[]
+        self.disablesorting=disablesorting
+        kw["exportselection"]=0
+        for i in range(len(labels)):
+            b=Button(self,text=labels[i],anchor=W,height=1,pady=0)
+            b.config(command=lambda s=self,i=i:s.setSort(i))
+            b.grid(column=i,row=0,sticky=N+E+W)
+            box=apply(Listbox,(self,),kw)
+            box.grid(column=i,row=1,sticky=N+E+S+W)
+            self.lists.append(box)
+        grid_setexpand(self)
+        self.rowconfigure(0,weight=0)
+        self._callall("bind",'<Button-1>',self.Button1)
+        self._callall("bind",'<B1-Motion>',self.Button1)
+        self.bind('<Up>',self.UpKey)
+        self.bind('<Down>',self.DownKey)
+        self.sort=None
+
+    def _callall(self,funcname,*args,**kw):
+        rets=[]
+        for l in self.lists:
+            func=getattr(l,funcname)
+            ret=apply(func,args,kw)
+            if ret!=None: rets.append(ret)
+        if rets: return rets
+
+    def Button1(self,e):
+        index=self.nearest(e.y)
+        self.select_clear(0,END)
+        self.select_set(index)
+        self.activate(index)
+        return "break"
+
+    def UpKey(self,e):
+        index=self.index(ACTIVE)
+        if index:
+            self.select_clear(0,END)
+            self.select_set(index-1)
+        return "break"
+
+    def DownKey(self,e):
+        index=self.index(ACTIVE)
+        if index!=self.size()-1:
+            self.select_clear(0,END)
+            self.select_set(index+1)
+        return "break"
+
+    def setSort(self,index):
+        if self.sort==None:
+            self.sort=[index,1]
+        elif self.sort[0]==index:
+            self.sort[1]=-self.sort[1]
+        else:
+            self.sort=[index,1]
+        self._sort()
+
+    def _sort(self):
+        if self.disablesorting:
+            return
+        if self.sort==None:
+            return
+        ind,direc=self.sort
+        li=list(self.get(0,END))
+        li.sort(lambda x,y,i=ind,d=direc:d*cmp(x[i],y[i]))
+        self.delete(0,END)
+        for l in li:
+            self._insert(END,l)
+    def activate(self,index):
+        self._callall("activate",index)
+
+   # def bbox(self,index):
+   #     return self._callall("bbox",index)
+
+    def curselection(self):
+        return self.lists[0].curselection()
+
+    def delete(self,*args):
+        apply(self._callall,("delete",)+args)
+
+    def get(self,*args):
+        bad=apply(self._callall,("get",)+args)
+        if len(args)==1:
+            return bad
+        ret=[]
+        for i in range(len(bad[0])):
+            r=[]
+            for j in range(len(bad)):
+                r.append(bad[j][i])
+            ret.append(r)
+        return ret
+
+    def index(self,index):
+        return self.lists[0].index(index)
+
+    def insert(self,index,items):
+        self._insert(index,items)
+        self._sort()
+
+    def _insert(self,index,items):
+        for i in range(len(items)):
+            self.lists[i].insert(index,items[i])
+
+    def nearest(self,y):
+        return self.lists[0].nearest(y)
+
+    def see(self,index):
+        self._callall("see",index)
+
+    def size(self):
+        return self.lists[0].size()
+
+    def selection_anchor(self,index):
+        self._callall("selection_anchor",index)
+
+    select_anchor=selection_anchor
+
+    def selection_clear(self,*args):
+        apply(self._callall,("selection_clear",)+args)
+
+    select_clear=selection_clear
+
+    def selection_includes(self,index):
+        return self.lists[0].select_includes(index)
+
+    select_includes=selection_includes
+
+    def selection_set(self,*args):
+        apply(self._callall,("selection_set",)+args)
+
+    select_set=selection_set
+
+    def xview(self,*args):
+        if not args: return self.lists[0].xview()
+        apply(self._callall,("xview",)+args)
+
+    def yview(self,*args):
+        if not args: return self.lists[0].yview()
+        apply(self._callall,("yview",)+args)
+
+class ProgressBar:
+    def __init__(self, master=None, orientation="horizontal",
+                 min=0, max=100, width=100, height=18,
+                 doLabel=1, appearance="sunken",
+                 fillColor="blue", background="gray",
+                 labelColor="yellow", labelFont="Verdana",
+                 labelText="", labelFormat="%d%%",
+                 value=0, bd=2):
+        # preserve various values
+        self.master=master
+        self.orientation=orientation
+        self.min=min
+        self.max=max
+        self.width=width
+        self.height=height
+        self.doLabel=doLabel
+        self.fillColor=fillColor
+        self.labelFont= labelFont
+        self.labelColor=labelColor
+        self.background=background
+        self.labelText=labelText
+        self.labelFormat=labelFormat
+        self.value=value
+        self.frame=Frame(master, relief=appearance, bd=bd)
+        self.canvas=Canvas(self.frame, height=height, width=width, bd=0,
+                           highlightthickness=0, background=background)
+        self.scale=self.canvas.create_rectangle(0, 0, width, height,
+                                                fill=fillColor)
+        self.label=self.canvas.create_text(self.canvas.winfo_reqwidth() / 2,
+                                           height / 2, text=labelText,
+                                           anchor="c", fill=labelColor,
+                                           font=self.labelFont)
+        self.update()
+        self.canvas.pack(side='top', fill='x', expand='no')
+
+    def updateProgress(self, newValue, newMax=None):
+        if newMax:
+            self.max = newMax
+        self.value = newValue
+        self.update()
+
+    def update(self):
+        # Trim the values to be between min and max
+        value=self.value
+        if value > self.max:
+            value = self.max
+        if value < self.min:
+            value = self.min
+        # Adjust the rectangle
+        if self.orientation == "horizontal":
+            self.canvas.coords(self.scale, 0, 0,
+              float(value) / self.max * self.width, self.height)
+        else:
+            self.canvas.coords(self.scale, 0,
+                               self.height - (float(value) /
+                                              self.max*self.height),
+                               self.width, self.height)
+        # Now update the colors
+        self.canvas.itemconfig(self.scale, fill=self.fillColor)
+        self.canvas.itemconfig(self.label, fill=self.labelColor)
+        # And update the label
+        if self.doLabel:
+            if value:
+                if value >= 0:
+                    pvalue = int((float(value) / float(self.max)) *
+                                   100.0)
+                else:
+                    pvalue = 0
+                self.canvas.itemconfig(self.label, text=self.labelFormat
+                                         % pvalue)
+            else:
+                self.canvas.itemconfig(self.label, text='')
+        else:
+            self.canvas.itemconfig(self.label, text=self.labelFormat %
+                                   self.labelText)
+        self.canvas.update_idletasks()
+
+class DirectoryBrowser(_Dialog):
+    command = "tk_chooseDirectory"
+
+def askdirectory(**options):
+    "Ask for a directory to save to."
+
+    return apply(DirectoryBrowser, (), options).show()
+
+class GenericLogin(Toplevel):
+    def __init__(self,callback,buttons):
+        Toplevel.__init__(self)
+        self.callback=callback
+        Label(self,text="Twisted v%s"%copyright.version).grid(column=0,row=0,columnspan=2)
+        self.entries={}
+        row=1
+        for stuff in buttons:
+            label,value=stuff[:2]
+            if len(stuff)==3:
+                dict=stuff[2]
+            else: dict={}
+            Label(self,text=label+": ").grid(column=0,row=row)
+            e=apply(Entry,(self,),dict)
+            e.grid(column=1,row=row)
+            e.insert(0,value)
+            self.entries[label]=e
+            row=row+1
+        Button(self,text="Login",command=self.doLogin).grid(column=0,row=row)
+        Button(self,text="Cancel",command=self.close).grid(column=1,row=row)
+        self.protocol('WM_DELETE_WINDOW',self.close)
+
+    def close(self):
+        self.tk.quit()
+        self.destroy()
+
+    def doLogin(self):
+        values={}
+        for k in self.entries.keys():
+            values[string.lower(k)]=self.entries[k].get()
+        self.callback(values)
+        self.destroy()
+
+class Login(Toplevel):
+    def __init__(self,
+                 callback,
+                 referenced = None,
+                 initialUser = "guest",
+                 initialPassword = "guest",
+                 initialHostname = "localhost",
+                 initialService  = "",
+                 initialPortno   = pb.portno):
+        Toplevel.__init__(self)
+        version_label = Label(self,text="Twisted v%s" % copyright.version)
+        self.pbReferenceable = referenced
+        self.pbCallback = callback
+        # version_label.show()
+        self.username = Entry(self)
+        self.password = Entry(self,show='*')
+        self.hostname = Entry(self)
+        self.service  = Entry(self)
+        self.port     = Entry(self)
+
+        self.username.insert(0,initialUser)
+        self.password.insert(0,initialPassword)
+        self.service.insert(0,initialService)
+        self.hostname.insert(0,initialHostname)
+        self.port.insert(0,str(initialPortno))
+
+        userlbl=Label(self,text="Username:")
+        passlbl=Label(self,text="Password:")
+        servicelbl=Label(self,text="Service:")
+        hostlbl=Label(self,text="Hostname:")
+        portlbl=Label(self,text="Port #:")
+        self.logvar=StringVar()
+        self.logvar.set("Protocol PB-%s"%pb.Broker.version)
+        self.logstat  = Label(self,textvariable=self.logvar)
+        self.okbutton = Button(self,text="Log In", command=self.login)
+
+        version_label.grid(column=0,row=0,columnspan=2)
+        z=0
+        for i in [[userlbl,self.username],
+                  [passlbl,self.password],
+                  [hostlbl,self.hostname],
+                  [servicelbl,self.service],
+                  [portlbl,self.port]]:
+            i[0].grid(column=0,row=z+1)
+            i[1].grid(column=1,row=z+1)
+            z = z+1
+
+        self.logstat.grid(column=0,row=6,columnspan=2)
+        self.okbutton.grid(column=0,row=7,columnspan=2)
+
+        self.protocol('WM_DELETE_WINDOW',self.tk.quit)
+
+    def loginReset(self):
+        self.logvar.set("Idle.")
+
+    def loginReport(self, txt):
+        self.logvar.set(txt)
+        self.after(30000, self.loginReset)
+
+    def login(self):
+        host = self.hostname.get()
+        port = self.port.get()
+        service = self.service.get()
+        try:
+            port = int(port)
+        except:
+            pass
+        user = self.username.get()
+        pswd = self.password.get()
+        pb.connect(host, port, user, pswd, service,
+                   client=self.pbReferenceable).addCallback(self.pbCallback).addErrback(
+            self.couldNotConnect)
+
+    def couldNotConnect(self,f):
+        self.loginReport("could not connect:"+f.getErrorMessage())
+
+if __name__=="__main__":
+    root=Tk()
+    o=CList(root,["Username","Online","Auto-Logon","Gateway"])
+    o.pack()
+    for i in range(0,16,4):
+        o.insert(END,[i,i+1,i+2,i+3])
+    mainloop()
diff --git a/ThirdParty/Twisted/twisted/spread/util.py b/ThirdParty/Twisted/twisted/spread/util.py
new file mode 100644
index 0000000..3c529b4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/spread/util.py
@@ -0,0 +1,215 @@
+# -*- test-case-name: twisted.test.test_pb -*-
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Utility classes for spread.
+"""
+
+from twisted.internet import defer
+from twisted.python.failure import Failure
+from twisted.spread import pb
+from twisted.protocols import basic
+from twisted.internet import interfaces
+
+from zope.interface import implements
+
+
+class LocalMethod:
+    def __init__(self, local, name):
+        self.local = local
+        self.name = name
+
+    def __call__(self, *args, **kw):
+        return self.local.callRemote(self.name, *args, **kw)
+
+
+class LocalAsRemote:
+    """
+    A class useful for emulating the effects of remote behavior locally.
+    """
+    reportAllTracebacks = 1
+
+    def callRemote(self, name, *args, **kw):
+        """
+        Call a specially-designated local method.
+
+        self.callRemote('x') will first try to invoke a method named
+        sync_x and return its result (which should probably be a
+        Deferred).  Second, it will look for a method called async_x,
+        which will be called and then have its result (or Failure)
+        automatically wrapped in a Deferred.
+        """
+        if hasattr(self, 'sync_'+name):
+            return getattr(self, 'sync_'+name)(*args, **kw)
+        try:
+            method = getattr(self, "async_" + name)
+            return defer.succeed(method(*args, **kw))
+        except:
+            f = Failure()
+            if self.reportAllTracebacks:
+                f.printTraceback()
+            return defer.fail(f)
+
+    def remoteMethod(self, name):
+        return LocalMethod(self, name)
+
+
+class LocalAsyncForwarder:
+    """
+    A class useful for forwarding a locally-defined interface.
+    """
+
+    def __init__(self, forwarded, interfaceClass, failWhenNotImplemented=0):
+        assert interfaceClass.providedBy(forwarded)
+        self.forwarded = forwarded
+        self.interfaceClass = interfaceClass
+        self.failWhenNotImplemented = failWhenNotImplemented
+
+    def _callMethod(self, method, *args, **kw):
+        return getattr(self.forwarded, method)(*args, **kw)
+
+    def callRemote(self, method, *args, **kw):
+        if self.interfaceClass.queryDescriptionFor(method):
+            result = defer.maybeDeferred(self._callMethod, method, *args, **kw)
+            return result
+        elif self.failWhenNotImplemented:
+            return defer.fail(
+                Failure(NotImplementedError,
+                        "No Such Method in Interface: %s" % method))
+        else:
+            return defer.succeed(None)
+
+
+class Pager:
+    """
+    I am an object which pages out information.
+    """
+    def __init__(self, collector, callback=None, *args, **kw):
+        """
+        Create a pager with a Reference to a remote collector and
+        an optional callable to invoke upon completion.
+        """
+        if callable(callback):
+            self.callback = callback
+            self.callbackArgs = args
+            self.callbackKeyword = kw
+        else:
+            self.callback = None
+        self._stillPaging = 1
+        self.collector = collector
+        collector.broker.registerPageProducer(self)
+
+    def stillPaging(self):
+        """
+        (internal) Method called by Broker.
+        """
+        if not self._stillPaging:
+            self.collector.callRemote("endedPaging")
+            if self.callback is not None:
+                self.callback(*self.callbackArgs, **self.callbackKeyword)
+        return self._stillPaging
+
+    def sendNextPage(self):
+        """
+        (internal) Method called by Broker.
+        """
+        self.collector.callRemote("gotPage", self.nextPage())
+
+    def nextPage(self):
+        """
+        Override this to return an object to be sent to my collector.
+        """
+        raise NotImplementedError()
+
+    def stopPaging(self):
+        """
+        Call this when you're done paging.
+        """
+        self._stillPaging = 0
+
+
+class StringPager(Pager):
+    """
+    A simple pager that splits a string into chunks.
+    """
+    def __init__(self, collector, st, chunkSize=8192, callback=None, *args, **kw):
+        self.string = st
+        self.pointer = 0
+        self.chunkSize = chunkSize
+        Pager.__init__(self, collector, callback, *args, **kw)
+
+    def nextPage(self):
+        val = self.string[self.pointer:self.pointer+self.chunkSize]
+        self.pointer += self.chunkSize
+        if self.pointer >= len(self.string):
+            self.stopPaging()
+        return val
+
+
+class FilePager(Pager):
+    """
+    Reads a file in chunks and sends the chunks as they come.
+    """
+    implements(interfaces.IConsumer)
+
+    def __init__(self, collector, fd, callback=None, *args, **kw):
+        self.chunks = []
+        Pager.__init__(self, collector, callback, *args, **kw)
+        self.startProducing(fd)
+
+    def startProducing(self, fd):
+        self.deferred = basic.FileSender().beginFileTransfer(fd, self)
+        self.deferred.addBoth(lambda x : self.stopPaging())
+
+    def registerProducer(self, producer, streaming):
+        self.producer = producer
+        if not streaming:
+            self.producer.resumeProducing()
+
+    def unregisterProducer(self):
+        self.producer = None
+
+    def write(self, chunk):
+        self.chunks.append(chunk)
+
+    def sendNextPage(self):
+        """
+        Get the first chunk read and send it to collector.
+        """
+        if not self.chunks:
+            return
+        val = self.chunks.pop(0)
+        self.producer.resumeProducing()
+        self.collector.callRemote("gotPage", val)
+
+
+# Utility paging stuff.
+class CallbackPageCollector(pb.Referenceable):
+    """
+    I receive pages from the peer. You may instantiate a Pager with a
+    remote reference to me. I will call the callback with a list of pages
+    once they are all received.
+    """
+    def __init__(self, callback):
+        self.pages = []
+        self.callback = callback
+
+    def remote_gotPage(self, page):
+        self.pages.append(page)
+
+    def remote_endedPaging(self):
+        self.callback(self.pages)
+
+
+def getAllPages(referenceable, methodName, *args, **kw):
+    """
+    A utility method that will call a remote method which expects a
+    PageCollector as the first argument.
+    """
+    d = defer.Deferred()
+    referenceable.callRemote(methodName, CallbackPageCollector(d.callback), *args, **kw)
+    return d
+
diff --git a/ThirdParty/Twisted/twisted/tap/__init__.py b/ThirdParty/Twisted/twisted/tap/__init__.py
new file mode 100644
index 0000000..3736107
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/tap/__init__.py
@@ -0,0 +1,10 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+
+Twisted TAP: Twisted Application Persistence builders for other Twisted servers.
+
+"""
diff --git a/ThirdParty/Twisted/twisted/tap/ftp.py b/ThirdParty/Twisted/twisted/tap/ftp.py
new file mode 100644
index 0000000..735ab4b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/tap/ftp.py
@@ -0,0 +1,69 @@
+# -*- test-case-name: twisted.test.test_ftp_options -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+I am the support module for making a ftp server with twistd.
+"""
+
+from twisted.application import internet
+from twisted.cred import portal, checkers, strcred
+from twisted.protocols import ftp
+
+from twisted.python import usage, deprecate, versions
+
+import warnings
+
+
+
+class Options(usage.Options, strcred.AuthOptionMixin):
+    synopsis = """[options].
+    WARNING: This FTP server is probably INSECURE do not use it.
+    """
+    optParameters = [
+        ["port", "p", "2121",              "set the port number"],
+        ["root", "r", "/usr/local/ftp",    "define the root of the ftp-site."],
+        ["userAnonymous", "", "anonymous", "Name of the anonymous user."]
+    ]
+
+    compData = usage.Completions(
+        optActions={"root": usage.CompleteDirs(descr="root of the ftp site")}
+        )
+
+    longdesc = ''
+
+    def __init__(self, *a, **kw):
+        usage.Options.__init__(self, *a, **kw)
+        self.addChecker(checkers.AllowAnonymousAccess())
+
+
+    def opt_password_file(self, filename):
+        """
+        Specify a file containing username:password login info for
+        authenticated connections. (DEPRECATED; see --help-auth instead)
+        """
+        self['password-file'] = filename
+        msg = deprecate.getDeprecationWarningString(
+            self.opt_password_file, versions.Version('Twisted', 11, 1, 0))
+        warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
+        self.addChecker(checkers.FilePasswordDB(filename, cache=True))
+
+
+
+def makeService(config):
+    f = ftp.FTPFactory()
+
+    r = ftp.FTPRealm(config['root'])
+    p = portal.Portal(r, config.get('credCheckers', []))
+
+    f.tld = config['root']
+    f.userAnonymous = config['userAnonymous']
+    f.portal = p
+    f.protocol = ftp.FTP
+
+    try:
+        portno = int(config['port'])
+    except KeyError:
+        portno = 2121
+    return internet.TCPServer(portno, f)
diff --git a/ThirdParty/Twisted/twisted/tap/manhole.py b/ThirdParty/Twisted/twisted/tap/manhole.py
new file mode 100644
index 0000000..8d727fa
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/tap/manhole.py
@@ -0,0 +1,54 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+I am the support module for making a manhole server with twistd.
+"""
+
+from twisted.manhole import service
+from twisted.spread import pb
+from twisted.python import usage, util
+from twisted.cred import portal, checkers
+from twisted.application import strports
+import os, sys
+
+class Options(usage.Options):
+    synopsis = "[options]"
+    optParameters = [
+           ["user", "u", "admin", "Name of user to allow to log in"],
+           ["port", "p", str(pb.portno), "Port to listen on"],
+    ]
+
+    optFlags = [
+        ["tracebacks", "T", "Allow tracebacks to be sent over the network"],
+    ]
+
+    compData = usage.Completions(
+        optActions={"user": usage.CompleteUsernames()}
+        )
+
+    def opt_password(self, password):
+        """Required.  '-' will prompt or read a password from stdin.
+        """
+        # If standard input is a terminal, I prompt for a password and
+        # confirm it.  Otherwise, I use the first line from standard
+        # input, stripping off a trailing newline if there is one.
+        if password in ('', '-'):
+            self['password'] = util.getPassword(confirm=1)
+        else:
+            self['password'] = password
+    opt_w = opt_password
+
+    def postOptions(self):
+        if not self.has_key('password'):
+            self.opt_password('-')
+
+def makeService(config):
+    port, user, password = config['port'], config['user'], config['password']
+    p = portal.Portal(
+        service.Realm(service.Service(config["tracebacks"], config.get('namespace'))),
+        [checkers.InMemoryUsernamePasswordDatabaseDontUse(**{user: password})]
+    )
+    return strports.service(port, pb.PBServerFactory(p, config["tracebacks"]))
diff --git a/ThirdParty/Twisted/twisted/tap/portforward.py b/ThirdParty/Twisted/twisted/tap/portforward.py
new file mode 100644
index 0000000..2ad3f36
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/tap/portforward.py
@@ -0,0 +1,27 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Support module for making a port forwarder with twistd.
+"""
+from twisted.protocols import portforward
+from twisted.python import usage
+from twisted.application import strports
+
+class Options(usage.Options):
+    synopsis = "[options]"
+    longdesc = 'Port Forwarder.'
+    optParameters = [
+          ["port", "p", "6666","Set the port number."],
+          ["host", "h", "localhost","Set the host."],
+          ["dest_port", "d", 6665,"Set the destination port."],
+    ]
+
+    compData = usage.Completions(
+        optActions={"host": usage.CompleteHostnames()}
+        )
+
+def makeService(config):
+    f = portforward.ProxyFactory(config['host'], int(config['dest_port']))
+    return strports.service(config['port'], f)
diff --git a/ThirdParty/Twisted/twisted/tap/socks.py b/ThirdParty/Twisted/twisted/tap/socks.py
new file mode 100644
index 0000000..e0780ad
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/tap/socks.py
@@ -0,0 +1,38 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+I am a support module for making SOCKSv4 servers with twistd.
+"""
+
+from twisted.protocols import socks
+from twisted.python import usage
+from twisted.application import internet
+import sys
+
+class Options(usage.Options):
+    synopsis = "[-i <interface>] [-p <port>] [-l <file>]"
+    optParameters = [["interface", "i", "127.0.0.1", "local interface to which we listen"],
+                  ["port", "p", 1080, "Port on which to listen"],
+                  ["log", "l", None, "file to log connection data to"]]
+
+    compData = usage.Completions(
+        optActions={"log": usage.CompleteFiles("*.log"),
+                    "interface": usage.CompleteNetInterfaces()}
+        )
+
+    longdesc = "Makes a SOCKSv4 server."
+
+def makeService(config):
+    if config["interface"] != "127.0.0.1":
+        print
+        print "WARNING:"
+        print "  You have chosen to listen on a non-local interface."
+        print "  This may allow intruders to access your local network"
+        print "  if you run this on a firewall."
+        print
+    t = socks.SOCKSv4Factory(config['log'])
+    portno = int(config['port'])
+    return internet.TCPServer(portno, t, interface=config['interface'])
diff --git a/ThirdParty/Twisted/twisted/tap/telnet.py b/ThirdParty/Twisted/twisted/tap/telnet.py
new file mode 100644
index 0000000..bc0c802
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/tap/telnet.py
@@ -0,0 +1,32 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Support module for making a telnet server with twistd.
+"""
+
+from twisted.manhole import telnet
+from twisted.python import usage
+from twisted.application import strports
+
+class Options(usage.Options):
+    synopsis = "[options]"
+    longdesc = "Makes a telnet server to a Python shell."
+    optParameters = [
+         ["username", "u", "admin","set the login username"],
+         ["password", "w", "changeme","set the password"],
+         ["port", "p", "4040", "port to listen on"],
+    ]
+
+    compData = usage.Completions(
+        optActions={"username": usage.CompleteUsernames()}
+        )
+
+def makeService(config):
+    t = telnet.ShellFactory()
+    t.username, t.password = config['username'], config['password']
+    s = strports.service(config['port'], t)
+    t.setService(s)
+    return s
diff --git a/ThirdParty/Twisted/twisted/test/__init__.py b/ThirdParty/Twisted/twisted/test/__init__.py
new file mode 100644
index 0000000..ff5a9d5
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/__init__.py
@@ -0,0 +1,10 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+
+Twisted Test: Unit Tests for Twisted.
+
+"""
diff --git a/ThirdParty/Twisted/twisted/test/_preamble.py b/ThirdParty/Twisted/twisted/test/_preamble.py
new file mode 100644
index 0000000..e3e794e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/_preamble.py
@@ -0,0 +1,17 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+# This makes sure Twisted-using child processes used in the test suite import
+# the correct version of Twisted (ie, the version of Twisted under test).
+
+# This is a copy of the bin/_preamble.py script because it's not clear how to
+# use the functionality for both things without having a copy.
+
+import sys, os
+
+path = os.path.abspath(sys.argv[0])
+while os.path.dirname(path) != path:
+    if os.path.exists(os.path.join(path, 'twisted', '__init__.py')):
+        sys.path.insert(0, path)
+        break
+    path = os.path.dirname(path)
diff --git a/ThirdParty/Twisted/twisted/test/crash_test_dummy.py b/ThirdParty/Twisted/twisted/test/crash_test_dummy.py
new file mode 100644
index 0000000..5a30bd4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/crash_test_dummy.py
@@ -0,0 +1,34 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+from twisted.python import components
+from zope.interface import implements, Interface
+
+def foo():
+    return 2
+
+class X:
+    def __init__(self, x):
+        self.x = x
+
+    def do(self):
+        #print 'X',self.x,'doing!'
+        pass
+
+
+class XComponent(components.Componentized):
+    pass
+
+class IX(Interface):
+    pass
+
+class XA(components.Adapter):
+    implements(IX)
+
+    def method(self):
+        # Kick start :(
+        pass
+
+components.registerAdapter(XA, X, IX)
diff --git a/ThirdParty/Twisted/twisted/test/iosim.py b/ThirdParty/Twisted/twisted/test/iosim.py
new file mode 100644
index 0000000..afa80f9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/iosim.py
@@ -0,0 +1,270 @@
+# -*- test-case-name: twisted.test.test_amp.TLSTest -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Utilities and helpers for simulating a network
+"""
+
+import itertools
+
+try:
+    from OpenSSL.SSL import Error as NativeOpenSSLError
+except ImportError:
+    pass
+
+from zope.interface import implements, directlyProvides
+
+from twisted.python.failure import Failure
+from twisted.internet import error
+from twisted.internet import interfaces
+
+class TLSNegotiation:
+    def __init__(self, obj, connectState):
+        self.obj = obj
+        self.connectState = connectState
+        self.sent = False
+        self.readyToSend = connectState
+
+    def __repr__(self):
+        return 'TLSNegotiation(%r)' % (self.obj,)
+
+    def pretendToVerify(self, other, tpt):
+        # Set the transport problems list here?  disconnections?
+        # hmmmmm... need some negative path tests.
+
+        if not self.obj.iosimVerify(other.obj):
+            tpt.disconnectReason = NativeOpenSSLError()
+            tpt.loseConnection()
+
+
+class FakeTransport:
+    """
+    A wrapper around a file-like object to make it behave as a Transport.
+
+    This doesn't actually stream the file to the attached protocol,
+    and is thus useful mainly as a utility for debugging protocols.
+    """
+    implements(interfaces.ITransport,
+               interfaces.ITLSTransport) # ha ha not really
+
+    _nextserial = itertools.count().next
+    closed = 0
+    disconnecting = 0
+    disconnected = 0
+    disconnectReason = error.ConnectionDone("Connection done")
+    producer = None
+    streamingProducer = 0
+    tls = None
+
+    def __init__(self):
+        self.stream = []
+        self.serial = self._nextserial()
+
+    def __repr__(self):
+        return 'FakeTransport<%s,%s,%s>' % (
+            self.isServer and 'S' or 'C', self.serial,
+            self.protocol.__class__.__name__)
+
+    def write(self, data):
+        if self.tls is not None:
+            self.tlsbuf.append(data)
+        else:
+            self.stream.append(data)
+
+    def _checkProducer(self):
+        # Cheating; this is called at "idle" times to allow producers to be
+        # found and dealt with
+        if self.producer:
+            self.producer.resumeProducing()
+
+    def registerProducer(self, producer, streaming):
+        """From abstract.FileDescriptor
+        """
+        self.producer = producer
+        self.streamingProducer = streaming
+        if not streaming:
+            producer.resumeProducing()
+
+    def unregisterProducer(self):
+        self.producer = None
+
+    def stopConsuming(self):
+        self.unregisterProducer()
+        self.loseConnection()
+
+    def writeSequence(self, iovec):
+        self.write("".join(iovec))
+
+    def loseConnection(self):
+        self.disconnecting = True
+
+    def reportDisconnect(self):
+        if self.tls is not None:
+            # We were in the middle of negotiating!  Must have been a TLS problem.
+            err = NativeOpenSSLError()
+        else:
+            err = self.disconnectReason
+        self.protocol.connectionLost(Failure(err))
+
+    def getPeer(self):
+        # XXX: According to ITransport, this should return an IAddress!
+        return 'file', 'file'
+
+    def getHost(self):
+        # XXX: According to ITransport, this should return an IAddress!
+        return 'file'
+
+    def resumeProducing(self):
+        # Never sends data anyways
+        pass
+
+    def pauseProducing(self):
+        # Never sends data anyways
+        pass
+
+    def stopProducing(self):
+        self.loseConnection()
+
+    def startTLS(self, contextFactory, beNormal=True):
+        # Nothing's using this feature yet, but startTLS has an undocumented
+        # second argument which defaults to true; if set to False, servers will
+        # behave like clients and clients will behave like servers.
+        connectState = self.isServer ^ beNormal
+        self.tls = TLSNegotiation(contextFactory, connectState)
+        self.tlsbuf = []
+
+    def getOutBuffer(self):
+        S = self.stream
+        if S:
+            self.stream = []
+            return ''.join(S)
+        elif self.tls is not None:
+            if self.tls.readyToSend:
+                # Only _send_ the TLS negotiation "packet" if I'm ready to.
+                self.tls.sent = True
+                return self.tls
+            else:
+                return None
+        else:
+            return None
+
+    def bufferReceived(self, buf):
+        if isinstance(buf, TLSNegotiation):
+            assert self.tls is not None # By the time you're receiving a
+                                        # negotiation, you have to have called
+                                        # startTLS already.
+            if self.tls.sent:
+                self.tls.pretendToVerify(buf, self)
+                self.tls = None # we're done with the handshake if we've gotten
+                                # this far... although maybe it failed...?
+                # TLS started!  Unbuffer...
+                b, self.tlsbuf = self.tlsbuf, None
+                self.writeSequence(b)
+                directlyProvides(self, interfaces.ISSLTransport)
+            else:
+                # We haven't sent our own TLS negotiation: time to do that!
+                self.tls.readyToSend = True
+        else:
+            self.protocol.dataReceived(buf)
+
+
+
+def makeFakeClient(c):
+    ft = FakeTransport()
+    ft.isServer = False
+    ft.protocol = c
+    return ft
+
+def makeFakeServer(s):
+    ft = FakeTransport()
+    ft.isServer = True
+    ft.protocol = s
+    return ft
+
+class IOPump:
+    """Utility to pump data between clients and servers for protocol testing.
+
+    Perhaps this is a utility worthy of being in protocol.py?
+    """
+    def __init__(self, client, server, clientIO, serverIO, debug):
+        self.client = client
+        self.server = server
+        self.clientIO = clientIO
+        self.serverIO = serverIO
+        self.debug = debug
+
+    def flush(self, debug=False):
+        """Pump until there is no more input or output.
+
+        Returns whether any data was moved.
+        """
+        result = False
+        for x in range(1000):
+            if self.pump(debug):
+                result = True
+            else:
+                break
+        else:
+            assert 0, "Too long"
+        return result
+
+
+    def pump(self, debug=False):
+        """Move data back and forth.
+
+        Returns whether any data was moved.
+        """
+        if self.debug or debug:
+            print '-- GLUG --'
+        sData = self.serverIO.getOutBuffer()
+        cData = self.clientIO.getOutBuffer()
+        self.clientIO._checkProducer()
+        self.serverIO._checkProducer()
+        if self.debug or debug:
+            print '.'
+            # XXX slightly buggy in the face of incremental output
+            if cData:
+                print 'C: '+repr(cData)
+            if sData:
+                print 'S: '+repr(sData)
+        if cData:
+            self.serverIO.bufferReceived(cData)
+        if sData:
+            self.clientIO.bufferReceived(sData)
+        if cData or sData:
+            return True
+        if (self.serverIO.disconnecting and
+            not self.serverIO.disconnected):
+            if self.debug or debug:
+                print '* C'
+            self.serverIO.disconnected = True
+            self.clientIO.disconnecting = True
+            self.clientIO.reportDisconnect()
+            return True
+        if self.clientIO.disconnecting and not self.clientIO.disconnected:
+            if self.debug or debug:
+                print '* S'
+            self.clientIO.disconnected = True
+            self.serverIO.disconnecting = True
+            self.serverIO.reportDisconnect()
+            return True
+        return False
+
+
+def connectedServerAndClient(ServerClass, ClientClass,
+                             clientTransportFactory=makeFakeClient,
+                             serverTransportFactory=makeFakeServer,
+                             debug=False):
+    """Returns a 3-tuple: (client, server, pump)
+    """
+    c = ClientClass()
+    s = ServerClass()
+    cio = clientTransportFactory(c)
+    sio = serverTransportFactory(s)
+    c.makeConnection(cio)
+    s.makeConnection(sio)
+    pump = IOPump(c, s, cio, sio, debug)
+    # kick off server greeting, etc
+    pump.flush()
+    return c, s, pump
diff --git a/ThirdParty/Twisted/twisted/test/mock_win32process.py b/ThirdParty/Twisted/twisted/test/mock_win32process.py
new file mode 100644
index 0000000..b70bdca
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/mock_win32process.py
@@ -0,0 +1,48 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This is a mock win32process module.
+
+The purpose of this module is mock process creation for the PID test.
+
+CreateProcess(...) will spawn a process, and always return a PID of 42.
+"""
+
+import win32process
+GetExitCodeProcess = win32process.GetExitCodeProcess
+STARTUPINFO = win32process.STARTUPINFO
+
+STARTF_USESTDHANDLES = win32process.STARTF_USESTDHANDLES
+
+
+def CreateProcess(appName,
+                  cmdline,
+                  procSecurity,
+                  threadSecurity,
+                  inheritHandles,
+                  newEnvironment,
+                  env,
+                  workingDir,
+                  startupInfo):
+    """
+    This function mocks the generated pid aspect of the win32.CreateProcess
+    function.
+      - the true win32process.CreateProcess is called
+      - return values are harvested in a tuple.
+      - all return values from createProcess are passed back to the calling
+        function except for the pid, the returned pid is hardcoded to 42
+    """
+
+    hProcess, hThread, dwPid, dwTid = win32process.CreateProcess(
+                      appName,
+                      cmdline,
+                      procSecurity,
+                      threadSecurity,
+                      inheritHandles,
+                      newEnvironment,
+                      env,
+                      workingDir,
+                      startupInfo)
+    dwPid = 42
+    return (hProcess, hThread, dwPid, dwTid)
diff --git a/ThirdParty/Twisted/twisted/test/myrebuilder1.py b/ThirdParty/Twisted/twisted/test/myrebuilder1.py
new file mode 100644
index 0000000..f53e8c7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/myrebuilder1.py
@@ -0,0 +1,15 @@
+
+class A:
+    def a(self):
+        return 'a'
+try:
+    object
+except NameError:
+    pass
+else:
+    class B(object, A):
+        def b(self):
+            return 'b'
+class Inherit(A):
+    def a(self):
+        return 'c'
diff --git a/ThirdParty/Twisted/twisted/test/myrebuilder2.py b/ThirdParty/Twisted/twisted/test/myrebuilder2.py
new file mode 100644
index 0000000..d2a0d10
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/myrebuilder2.py
@@ -0,0 +1,16 @@
+
+class A:
+    def a(self):
+        return 'b'
+try:
+    object
+except NameError:
+    pass
+else:
+    class B(A, object):
+        def b(self):
+            return 'c'
+
+class Inherit(A):
+    def a(self):
+        return 'd'
diff --git a/ThirdParty/Twisted/twisted/test/plugin_basic.py b/ThirdParty/Twisted/twisted/test/plugin_basic.py
new file mode 100644
index 0000000..a4c297b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/plugin_basic.py
@@ -0,0 +1,57 @@
+# Copyright (c) 2005 Divmod, Inc.
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+# Don't change the docstring, it's part of the tests
+"""
+I'm a test drop-in.  The plugin system's unit tests use me.  No one
+else should.
+"""
+
+from zope.interface import classProvides
+
+from twisted.plugin import IPlugin
+from twisted.test.test_plugin import ITestPlugin, ITestPlugin2
+
+
+
+class TestPlugin:
+    """
+    A plugin used solely for testing purposes.
+    """
+
+    classProvides(ITestPlugin,
+                  IPlugin)
+
+    def test1():
+        pass
+    test1 = staticmethod(test1)
+
+
+
+class AnotherTestPlugin:
+    """
+    Another plugin used solely for testing purposes.
+    """
+
+    classProvides(ITestPlugin2,
+                  IPlugin)
+
+    def test():
+        pass
+    test = staticmethod(test)
+
+
+
+class ThirdTestPlugin:
+    """
+    Another plugin used solely for testing purposes.
+    """
+
+    classProvides(ITestPlugin2,
+                  IPlugin)
+
+    def test():
+        pass
+    test = staticmethod(test)
+
diff --git a/ThirdParty/Twisted/twisted/test/plugin_extra1.py b/ThirdParty/Twisted/twisted/test/plugin_extra1.py
new file mode 100644
index 0000000..9e4c8d4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/plugin_extra1.py
@@ -0,0 +1,23 @@
+# Copyright (c) 2005 Divmod, Inc.
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test plugin used in L{twisted.test.test_plugin}.
+"""
+
+from zope.interface import classProvides
+
+from twisted.plugin import IPlugin
+from twisted.test.test_plugin import ITestPlugin
+
+
+
+class FourthTestPlugin:
+    classProvides(ITestPlugin,
+                  IPlugin)
+
+    def test1():
+        pass
+    test1 = staticmethod(test1)
+
diff --git a/ThirdParty/Twisted/twisted/test/plugin_extra2.py b/ThirdParty/Twisted/twisted/test/plugin_extra2.py
new file mode 100644
index 0000000..a6b3f09
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/plugin_extra2.py
@@ -0,0 +1,35 @@
+# Copyright (c) 2005 Divmod, Inc.
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test plugin used in L{twisted.test.test_plugin}.
+"""
+
+from zope.interface import classProvides
+
+from twisted.plugin import IPlugin
+from twisted.test.test_plugin import ITestPlugin
+
+
+
+class FourthTestPlugin:
+    classProvides(ITestPlugin,
+                  IPlugin)
+
+    def test1():
+        pass
+    test1 = staticmethod(test1)
+
+
+
+class FifthTestPlugin:
+    """
+    More documentation: I hate you.
+    """
+    classProvides(ITestPlugin,
+                  IPlugin)
+
+    def test1():
+        pass
+    test1 = staticmethod(test1)
diff --git a/ThirdParty/Twisted/twisted/test/process_cmdline.py b/ThirdParty/Twisted/twisted/test/process_cmdline.py
new file mode 100644
index 0000000..bd250de
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/process_cmdline.py
@@ -0,0 +1,5 @@
+"""Write to stdout the command line args it received, one per line."""
+
+import sys
+for x in sys.argv[1:]:
+    print x
diff --git a/ThirdParty/Twisted/twisted/test/process_echoer.py b/ThirdParty/Twisted/twisted/test/process_echoer.py
new file mode 100644
index 0000000..8a7bf6d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/process_echoer.py
@@ -0,0 +1,11 @@
+"""Write back all data it receives."""
+
+import sys
+
+data = sys.stdin.read(1)
+while data:
+    sys.stdout.write(data)
+    sys.stdout.flush()
+    data = sys.stdin.read(1)
+sys.stderr.write("byebye")
+sys.stderr.flush()
diff --git a/ThirdParty/Twisted/twisted/test/process_fds.py b/ThirdParty/Twisted/twisted/test/process_fds.py
new file mode 100644
index 0000000..e2273c1
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/process_fds.py
@@ -0,0 +1,40 @@
+
+"""Write to a handful of file descriptors, to test the childFDs= argument of
+reactor.spawnProcess()
+"""
+
+import os, sys
+
+debug = 0
+
+if debug: stderr = os.fdopen(2, "w")
+
+if debug: print >>stderr, "this is stderr"
+
+abcd = os.read(0, 4)
+if debug: print >>stderr, "read(0):", abcd
+if abcd != "abcd":
+    sys.exit(1)
+
+if debug: print >>stderr, "os.write(1, righto)"
+
+os.write(1, "righto")
+
+efgh = os.read(3, 4)
+if debug: print >>stderr, "read(3):", efgh
+if efgh != "efgh":
+    sys.exit(2)
+
+if debug: print >>stderr, "os.close(4)"
+os.close(4)
+
+eof = os.read(5, 4)
+if debug: print >>stderr, "read(5):", eof
+if eof != "":
+    sys.exit(3)
+
+if debug: print >>stderr, "os.write(1, closed)"
+os.write(1, "closed")
+
+if debug: print >>stderr, "sys.exit(0)"
+sys.exit(0)
diff --git a/ThirdParty/Twisted/twisted/test/process_linger.py b/ThirdParty/Twisted/twisted/test/process_linger.py
new file mode 100644
index 0000000..a95a8d2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/process_linger.py
@@ -0,0 +1,17 @@
+
+"""Write to a file descriptor and then close it, waiting a few seconds before
+quitting. This serves to make sure SIGCHLD is actually being noticed.
+"""
+
+import os, sys, time
+
+print "here is some text"
+time.sleep(1)
+print "goodbye"
+os.close(1)
+os.close(2)
+
+time.sleep(2)
+
+sys.exit(0)
+
diff --git a/ThirdParty/Twisted/twisted/test/process_reader.py b/ThirdParty/Twisted/twisted/test/process_reader.py
new file mode 100644
index 0000000..be37a7c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/process_reader.py
@@ -0,0 +1,12 @@
+"""Script used by test_process.TestTwoProcesses"""
+
+# run until stdin is closed, then quit
+
+import sys
+
+while 1:
+    d = sys.stdin.read()
+    if len(d) == 0:
+        sys.exit(0)
+        
+
diff --git a/ThirdParty/Twisted/twisted/test/process_signal.py b/ThirdParty/Twisted/twisted/test/process_signal.py
new file mode 100644
index 0000000..f2ff108
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/process_signal.py
@@ -0,0 +1,8 @@
+import sys, signal
+
+signal.signal(signal.SIGINT, signal.SIG_DFL)
+if getattr(signal, "SIGHUP", None) is not None:
+    signal.signal(signal.SIGHUP, signal.SIG_DFL)
+print 'ok, signal us'
+sys.stdin.read()
+sys.exit(1)
diff --git a/ThirdParty/Twisted/twisted/test/process_stdinreader.py b/ThirdParty/Twisted/twisted/test/process_stdinreader.py
new file mode 100644
index 0000000..f060db4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/process_stdinreader.py
@@ -0,0 +1,23 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""Script used by twisted.test.test_process on win32."""
+
+import sys, time, os, msvcrt
+msvcrt.setmode(sys.stdout.fileno(), os.O_BINARY)
+msvcrt.setmode(sys.stderr.fileno(), os.O_BINARY)
+
+
+sys.stdout.write("out\n")
+sys.stdout.flush()
+sys.stderr.write("err\n")
+sys.stderr.flush()
+
+data = sys.stdin.read()
+
+sys.stdout.write(data)
+sys.stdout.write("\nout\n")
+sys.stderr.write("err\n")
+
+sys.stdout.flush()
+sys.stderr.flush()
diff --git a/ThirdParty/Twisted/twisted/test/process_tester.py b/ThirdParty/Twisted/twisted/test/process_tester.py
new file mode 100644
index 0000000..d9779b1
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/process_tester.py
@@ -0,0 +1,37 @@
+"""Test program for processes."""
+
+import sys, os
+
+test_file_match = "process_test.log.*"
+test_file = "process_test.log.%d" % os.getpid()
+
+def main():
+    f = open(test_file, 'wb')
+    
+    # stage 1
+    bytes = sys.stdin.read(4)
+    f.write("one: %r\n" % bytes)
+    # stage 2
+    sys.stdout.write(bytes)
+    sys.stdout.flush()
+    os.close(sys.stdout.fileno())
+    
+    # and a one, and a two, and a...
+    bytes = sys.stdin.read(4)
+    f.write("two: %r\n" % bytes)
+    
+    # stage 3
+    sys.stderr.write(bytes)
+    sys.stderr.flush()
+    os.close(sys.stderr.fileno())
+    
+    # stage 4
+    bytes = sys.stdin.read(4)
+    f.write("three: %r\n" % bytes)
+
+    # exit with status code 23
+    sys.exit(23)
+
+
+if __name__ == '__main__':
+    main()
diff --git a/ThirdParty/Twisted/twisted/test/process_tty.py b/ThirdParty/Twisted/twisted/test/process_tty.py
new file mode 100644
index 0000000..9dab638
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/process_tty.py
@@ -0,0 +1,6 @@
+"""Test to make sure we can open /dev/tty"""
+
+f = open("/dev/tty", "r+")
+a = f.readline()
+f.write(a)
+f.close()
diff --git a/ThirdParty/Twisted/twisted/test/process_twisted.py b/ThirdParty/Twisted/twisted/test/process_twisted.py
new file mode 100644
index 0000000..2071090
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/process_twisted.py
@@ -0,0 +1,43 @@
+"""A process that reads from stdin and out using Twisted."""
+
+### Twisted Preamble
+# This makes sure that users don't have to set up their environment
+# specially in order to run these programs from bin/.
+import sys, os
+pos = os.path.abspath(sys.argv[0]).find(os.sep+'Twisted')
+if pos != -1:
+    sys.path.insert(0, os.path.abspath(sys.argv[0])[:pos+8])
+sys.path.insert(0, os.curdir)
+### end of preamble
+
+
+from twisted.python import log
+from zope.interface import implements
+from twisted.internet import interfaces
+
+log.startLogging(sys.stderr)
+
+from twisted.internet import protocol, reactor, stdio
+
+
+class Echo(protocol.Protocol):
+    implements(interfaces.IHalfCloseableProtocol)
+    
+    def connectionMade(self):
+        print "connection made"
+    
+    def dataReceived(self, data):
+        self.transport.write(data)
+
+    def readConnectionLost(self):
+        print "readConnectionLost"
+        self.transport.loseConnection()
+    def writeConnectionLost(self):
+        print "writeConnectionLost"
+    
+    def connectionLost(self, reason):
+        print "connectionLost", reason
+        reactor.stop()
+
+stdio.StandardIO(Echo())
+reactor.run()
diff --git a/ThirdParty/Twisted/twisted/test/proto_helpers.py b/ThirdParty/Twisted/twisted/test/proto_helpers.py
new file mode 100644
index 0000000..9529949
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/proto_helpers.py
@@ -0,0 +1,573 @@
+# -*- test-case-name: twisted.test.test_stringtransport -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Assorted functionality which is commonly useful when writing unit tests.
+"""
+
+from __future__ import division, absolute_import
+
+from socket import AF_INET, AF_INET6
+from io import BytesIO
+
+from zope.interface import implementer
+
+from twisted.python.compat import unicode
+from twisted.internet.interfaces import (
+    ITransport, IConsumer, IPushProducer, IConnector)
+from twisted.internet.interfaces import (
+    IReactorTCP, IReactorSSL, IReactorUNIX, IReactorSocket)
+from twisted.internet.interfaces import IListeningPort
+from twisted.internet.abstract import isIPv6Address
+from twisted.internet.error import UnsupportedAddressFamily
+from twisted.protocols import basic
+from twisted.internet import protocol, error, address
+
+from twisted.internet.address import IPv4Address, UNIXAddress, IPv6Address
+
+
+class AccumulatingProtocol(protocol.Protocol):
+    """
+    L{AccumulatingProtocol} is an L{IProtocol} implementation which collects
+    the data delivered to it and can fire a Deferred when it is connected or
+    disconnected.
+
+    @ivar made: A flag indicating whether C{connectionMade} has been called.
+    @ivar data: Bytes giving all the data passed to C{dataReceived}.
+    @ivar closed: A flag indicated whether C{connectionLost} has been called.
+    @ivar closedReason: The value of the I{reason} parameter passed to
+        C{connectionLost}.
+    @ivar closedDeferred: If set to a L{Deferred}, this will be fired when
+        C{connectionLost} is called.
+    """
+    made = closed = 0
+    closedReason = None
+
+    closedDeferred = None
+
+    data = b""
+
+    factory = None
+
+    def connectionMade(self):
+        self.made = 1
+        if (self.factory is not None and
+            self.factory.protocolConnectionMade is not None):
+            d = self.factory.protocolConnectionMade
+            self.factory.protocolConnectionMade = None
+            d.callback(self)
+
+    def dataReceived(self, data):
+        self.data += data
+
+    def connectionLost(self, reason):
+        self.closed = 1
+        self.closedReason = reason
+        if self.closedDeferred is not None:
+            d, self.closedDeferred = self.closedDeferred, None
+            d.callback(None)
+
+
+class LineSendingProtocol(basic.LineReceiver):
+    lostConn = False
+
+    def __init__(self, lines, start = True):
+        self.lines = lines[:]
+        self.response = []
+        self.start = start
+
+    def connectionMade(self):
+        if self.start:
+            for line in self.lines:
+                self.sendLine(line)
+
+    def lineReceived(self, line):
+        if not self.start:
+            for line in self.lines:
+                self.sendLine(line)
+            self.lines = []
+        self.response.append(line)
+
+    def connectionLost(self, reason):
+        self.lostConn = True
+
+
+class FakeDatagramTransport:
+    noAddr = object()
+
+    def __init__(self):
+        self.written = []
+
+    def write(self, packet, addr=noAddr):
+        self.written.append((packet, addr))
+
+
+
+ at implementer(ITransport, IConsumer, IPushProducer)
+class StringTransport:
+    """
+    A transport implementation which buffers data in memory and keeps track of
+    its other state without providing any behavior.
+
+    L{StringTransport} has a number of attributes which are not part of any of
+    the interfaces it claims to implement.  These attributes are provided for
+    testing purposes.  Implementation code should not use any of these
+    attributes; they are not provided by other transports.
+
+    @ivar disconnecting: A C{bool} which is C{False} until L{loseConnection} is
+        called, then C{True}.
+
+    @ivar producer: If a producer is currently registered, C{producer} is a
+        reference to it.  Otherwise, C{None}.
+
+    @ivar streaming: If a producer is currently registered, C{streaming} refers
+        to the value of the second parameter passed to C{registerProducer}.
+
+    @ivar hostAddr: C{None} or an object which will be returned as the host
+        address of this transport.  If C{None}, a nasty tuple will be returned
+        instead.
+
+    @ivar peerAddr: C{None} or an object which will be returned as the peer
+        address of this transport.  If C{None}, a nasty tuple will be returned
+        instead.
+
+    @ivar producerState: The state of this L{StringTransport} in its capacity
+        as an L{IPushProducer}.  One of C{'producing'}, C{'paused'}, or
+        C{'stopped'}.
+
+    @ivar io: A L{BytesIO} which holds the data which has been written to this
+        transport since the last call to L{clear}.  Use L{value} instead of
+        accessing this directly.
+    """
+
+    disconnecting = False
+
+    producer = None
+    streaming = None
+
+    hostAddr = None
+    peerAddr = None
+
+    producerState = 'producing'
+
+    def __init__(self, hostAddress=None, peerAddress=None):
+        self.clear()
+        if hostAddress is not None:
+            self.hostAddr = hostAddress
+        if peerAddress is not None:
+            self.peerAddr = peerAddress
+        self.connected = True
+
+    def clear(self):
+        """
+        Discard all data written to this transport so far.
+
+        This is not a transport method.  It is intended for tests.  Do not use
+        it in implementation code.
+        """
+        self.io = BytesIO()
+
+
+    def value(self):
+        """
+        Retrieve all data which has been buffered by this transport.
+
+        This is not a transport method.  It is intended for tests.  Do not use
+        it in implementation code.
+
+        @return: A C{bytes} giving all data written to this transport since the
+            last call to L{clear}.
+        @rtype: C{bytes}
+        """
+        return self.io.getvalue()
+
+
+    # ITransport
+    def write(self, data):
+        if isinstance(data, unicode): # no, really, I mean it
+            raise TypeError("Data must not be unicode")
+        self.io.write(data)
+
+
+    def writeSequence(self, data):
+        self.io.write(b''.join(data))
+
+
+    def loseConnection(self):
+        """
+        Close the connection. Does nothing besides toggle the C{disconnecting}
+        instance variable to C{True}.
+        """
+        self.disconnecting = True
+
+
+    def getPeer(self):
+        if self.peerAddr is None:
+            return address.IPv4Address('TCP', '192.168.1.1', 54321)
+        return self.peerAddr
+
+
+    def getHost(self):
+        if self.hostAddr is None:
+            return address.IPv4Address('TCP', '10.0.0.1', 12345)
+        return self.hostAddr
+
+
+    # IConsumer
+    def registerProducer(self, producer, streaming):
+        if self.producer is not None:
+            raise RuntimeError("Cannot register two producers")
+        self.producer = producer
+        self.streaming = streaming
+
+
+    def unregisterProducer(self):
+        if self.producer is None:
+            raise RuntimeError(
+                "Cannot unregister a producer unless one is registered")
+        self.producer = None
+        self.streaming = None
+
+
+    # IPushProducer
+    def _checkState(self):
+        if self.disconnecting:
+            raise RuntimeError(
+                "Cannot resume producing after loseConnection")
+        if self.producerState == 'stopped':
+            raise RuntimeError("Cannot resume a stopped producer")
+
+
+    def pauseProducing(self):
+        self._checkState()
+        self.producerState = 'paused'
+
+
+    def stopProducing(self):
+        self.producerState = 'stopped'
+
+
+    def resumeProducing(self):
+        self._checkState()
+        self.producerState = 'producing'
+
+
+
+class StringTransportWithDisconnection(StringTransport):
+    def loseConnection(self):
+        if self.connected:
+            self.connected = False
+            self.protocol.connectionLost(error.ConnectionDone("Bye."))
+
+
+
+class StringIOWithoutClosing(BytesIO):
+    """
+    A BytesIO that can't be closed.
+    """
+    def close(self):
+        """
+        Do nothing.
+        """
+
+
+
+ at implementer(IListeningPort)
+class _FakePort(object):
+    """
+    A fake L{IListeningPort} to be used in tests.
+
+    @ivar _hostAddress: The L{IAddress} this L{IListeningPort} is pretending
+        to be listening on.
+    """
+
+    def __init__(self, hostAddress):
+        """
+        @param hostAddress: An L{IAddress} this L{IListeningPort} should
+            pretend to be listening on.
+        """
+        self._hostAddress = hostAddress
+
+
+    def startListening(self):
+        """
+        Fake L{IListeningPort.startListening} that doesn't do anything.
+        """
+
+
+    def stopListening(self):
+        """
+        Fake L{IListeningPort.stopListening} that doesn't do anything.
+        """
+
+
+    def getHost(self):
+        """
+        Fake L{IListeningPort.getHost} that returns our L{IAddress}.
+        """
+        return self._hostAddress
+
+
+
+ at implementer(IConnector)
+class _FakeConnector(object):
+    """
+    A fake L{IConnector} that allows us to inspect if it has been told to stop
+    connecting.
+
+    @ivar stoppedConnecting: has this connector's
+        L{FakeConnector.stopConnecting} method been invoked yet?
+
+    @ivar _address: An L{IAddress} provider that represents our destination.
+    """
+
+    stoppedConnecting = False
+
+    def __init__(self, address):
+        """
+        @param address: An L{IAddress} provider that represents this
+            connector's destination.
+        """
+        self._address = address
+
+
+    def stopConnecting(self):
+        """
+        Implement L{IConnector.stopConnecting} and set
+        L{FakeConnector.stoppedConnecting} to C{True}
+        """
+        self.stoppedConnecting = True
+
+
+    def disconnect(self):
+        """
+        Implement L{IConnector.disconnect} as a no-op.
+        """
+
+
+    def connect(self):
+        """
+        Implement L{IConnector.connect} as a no-op.
+        """
+
+
+    def getDestination(self):
+        """
+        Implement L{IConnector.getDestination} to return the C{address} passed
+        to C{__init__}.
+        """
+        return self._address
+
+
+
+ at implementer(IReactorTCP, IReactorSSL, IReactorUNIX, IReactorSocket)
+class MemoryReactor(object):
+    """
+    A fake reactor to be used in tests.  This reactor doesn't actually do
+    much that's useful yet.  It accepts TCP connection setup attempts, but
+    they will never succeed.
+
+    @ivar tcpClients: a list that keeps track of connection attempts (ie, calls
+        to C{connectTCP}).
+    @type tcpClients: C{list}
+
+    @ivar tcpServers: a list that keeps track of server listen attempts (ie, calls
+        to C{listenTCP}).
+    @type tcpServers: C{list}
+
+    @ivar sslClients: a list that keeps track of connection attempts (ie,
+        calls to C{connectSSL}).
+    @type sslClients: C{list}
+
+    @ivar sslServers: a list that keeps track of server listen attempts (ie,
+        calls to C{listenSSL}).
+    @type sslServers: C{list}
+
+    @ivar unixClients: a list that keeps track of connection attempts (ie,
+        calls to C{connectUNIX}).
+    @type unixClients: C{list}
+
+    @ivar unixServers: a list that keeps track of server listen attempts (ie,
+        calls to C{listenUNIX}).
+    @type unixServers: C{list}
+
+    @ivar adoptedPorts: a list that keeps track of server listen attempts (ie,
+        calls to C{adoptStreamPort}).
+    """
+
+    def __init__(self):
+        """
+        Initialize the tracking lists.
+        """
+        self.tcpClients = []
+        self.tcpServers = []
+        self.sslClients = []
+        self.sslServers = []
+        self.unixClients = []
+        self.unixServers = []
+        self.adoptedPorts = []
+
+
+    def adoptStreamPort(self, fileno, addressFamily, factory):
+        """
+        Fake L{IReactorSocket.adoptStreamPort}, that logs the call and returns
+        an L{IListeningPort}.
+        """
+        if addressFamily == AF_INET:
+            addr = IPv4Address('TCP', '0.0.0.0', 1234)
+        elif addressFamily == AF_INET6:
+            addr = IPv6Address('TCP', '::', 1234)
+        else:
+            raise UnsupportedAddressFamily()
+
+        self.adoptedPorts.append((fileno, addressFamily, factory))
+        return _FakePort(addr)
+
+
+    def listenTCP(self, port, factory, backlog=50, interface=''):
+        """
+        Fake L{reactor.listenTCP}, that logs the call and returns an
+        L{IListeningPort}.
+        """
+        self.tcpServers.append((port, factory, backlog, interface))
+        if isIPv6Address(interface):
+            address = IPv6Address('TCP', interface, port)
+        else:
+            address = IPv4Address('TCP', '0.0.0.0', port)
+        return _FakePort(address)
+
+
+    def connectTCP(self, host, port, factory, timeout=30, bindAddress=None):
+        """
+        Fake L{reactor.connectTCP}, that logs the call and returns an
+        L{IConnector}.
+        """
+        self.tcpClients.append((host, port, factory, timeout, bindAddress))
+        if isIPv6Address(host):
+            conn = _FakeConnector(IPv6Address('TCP', host, port))
+        else:
+            conn = _FakeConnector(IPv4Address('TCP', host, port))
+        factory.startedConnecting(conn)
+        return conn
+
+
+    def listenSSL(self, port, factory, contextFactory,
+                  backlog=50, interface=''):
+        """
+        Fake L{reactor.listenSSL}, that logs the call and returns an
+        L{IListeningPort}.
+        """
+        self.sslServers.append((port, factory, contextFactory,
+                                backlog, interface))
+        return _FakePort(IPv4Address('TCP', '0.0.0.0', port))
+
+
+    def connectSSL(self, host, port, factory, contextFactory,
+                   timeout=30, bindAddress=None):
+        """
+        Fake L{reactor.connectSSL}, that logs the call and returns an
+        L{IConnector}.
+        """
+        self.sslClients.append((host, port, factory, contextFactory,
+                                timeout, bindAddress))
+        conn = _FakeConnector(IPv4Address('TCP', host, port))
+        factory.startedConnecting(conn)
+        return conn
+
+
+    def listenUNIX(self, address, factory,
+                   backlog=50, mode=0o666, wantPID=0):
+        """
+        Fake L{reactor.listenUNIX}, that logs the call and returns an
+        L{IListeningPort}.
+        """
+        self.unixServers.append((address, factory, backlog, mode, wantPID))
+        return _FakePort(UNIXAddress(address))
+
+
+    def connectUNIX(self, address, factory, timeout=30, checkPID=0):
+        """
+        Fake L{reactor.connectUNIX}, that logs the call and returns an
+        L{IConnector}.
+        """
+        self.unixClients.append((address, factory, timeout, checkPID))
+        conn = _FakeConnector(UNIXAddress(address))
+        factory.startedConnecting(conn)
+        return conn
+
+
+
+ at implementer(IReactorTCP, IReactorSSL, IReactorUNIX, IReactorSocket)
+class RaisingMemoryReactor(object):
+    """
+    A fake reactor to be used in tests.  It accepts TCP connection setup
+    attempts, but they will fail.
+
+    @ivar _listenException: An instance of an L{Exception}
+    @ivar _connectException: An instance of an L{Exception}
+    """
+
+    def __init__(self, listenException=None, connectException=None):
+        """
+        @param listenException: An instance of an L{Exception} to raise when any
+            C{listen} method is called.
+
+        @param connectException: An instance of an L{Exception} to raise when
+            any C{connect} method is called.
+        """
+        self._listenException = listenException
+        self._connectException = connectException
+
+
+    def adoptStreamPort(self, fileno, addressFamily, factory):
+        """
+        Fake L{IReactorSocket.adoptStreamPort}, that raises
+        L{self._listenException}.
+        """
+        raise self._listenException
+
+
+    def listenTCP(self, port, factory, backlog=50, interface=''):
+        """
+        Fake L{reactor.listenTCP}, that raises L{self._listenException}.
+        """
+        raise self._listenException
+
+
+    def connectTCP(self, host, port, factory, timeout=30, bindAddress=None):
+        """
+        Fake L{reactor.connectTCP}, that raises L{self._connectException}.
+        """
+        raise self._connectException
+
+
+    def listenSSL(self, port, factory, contextFactory,
+                  backlog=50, interface=''):
+        """
+        Fake L{reactor.listenSSL}, that raises L{self._listenException}.
+        """
+        raise self._listenException
+
+
+    def connectSSL(self, host, port, factory, contextFactory,
+                   timeout=30, bindAddress=None):
+        """
+        Fake L{reactor.connectSSL}, that raises L{self._connectException}.
+        """
+        raise self._connectException
+
+
+    def listenUNIX(self, address, factory,
+                   backlog=50, mode=0o666, wantPID=0):
+        """
+        Fake L{reactor.listenUNIX}, that raises L{self._listenException}.
+        """
+        raise self._listenException
+
+
+    def connectUNIX(self, address, factory, timeout=30, checkPID=0):
+        """
+        Fake L{reactor.connectUNIX}, that raises L{self._connectException}.
+        """
+        raise self._connectException
diff --git a/ThirdParty/Twisted/twisted/test/raiser.c b/ThirdParty/Twisted/twisted/test/raiser.c
new file mode 100644
index 0000000..b9ba176
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/raiser.c
@@ -0,0 +1,1443 @@
+/* Generated by Cython 0.14.1 on Tue Mar  8 19:41:56 2011 */
+
+#define PY_SSIZE_T_CLEAN
+#include "Python.h"
+#ifndef Py_PYTHON_H
+    #error Python headers needed to compile C extensions, please install development version of Python.
+#else
+
+#include <stddef.h> /* For offsetof */
+#ifndef offsetof
+#define offsetof(type, member) ( (size_t) & ((type*)0) -> member )
+#endif
+
+#if !defined(WIN32) && !defined(MS_WINDOWS)
+  #ifndef __stdcall
+    #define __stdcall
+  #endif
+  #ifndef __cdecl
+    #define __cdecl
+  #endif
+  #ifndef __fastcall
+    #define __fastcall
+  #endif
+#endif
+
+#ifndef DL_IMPORT
+  #define DL_IMPORT(t) t
+#endif
+#ifndef DL_EXPORT
+  #define DL_EXPORT(t) t
+#endif
+
+#ifndef PY_LONG_LONG
+  #define PY_LONG_LONG LONG_LONG
+#endif
+
+#if PY_VERSION_HEX < 0x02040000
+  #define METH_COEXIST 0
+  #define PyDict_CheckExact(op) (Py_TYPE(op) == &PyDict_Type)
+  #define PyDict_Contains(d,o)   PySequence_Contains(d,o)
+#endif
+
+#if PY_VERSION_HEX < 0x02050000
+  typedef int Py_ssize_t;
+  #define PY_SSIZE_T_MAX INT_MAX
+  #define PY_SSIZE_T_MIN INT_MIN
+  #define PY_FORMAT_SIZE_T ""
+  #define PyInt_FromSsize_t(z) PyInt_FromLong(z)
+  #define PyInt_AsSsize_t(o)   PyInt_AsLong(o)
+  #define PyNumber_Index(o)    PyNumber_Int(o)
+  #define PyIndex_Check(o)     PyNumber_Check(o)
+  #define PyErr_WarnEx(category, message, stacklevel) PyErr_Warn(category, message)
+#endif
+
+#if PY_VERSION_HEX < 0x02060000
+  #define Py_REFCNT(ob) (((PyObject*)(ob))->ob_refcnt)
+  #define Py_TYPE(ob)   (((PyObject*)(ob))->ob_type)
+  #define Py_SIZE(ob)   (((PyVarObject*)(ob))->ob_size)
+  #define PyVarObject_HEAD_INIT(type, size) \
+          PyObject_HEAD_INIT(type) size,
+  #define PyType_Modified(t)
+
+  typedef struct {
+     void *buf;
+     PyObject *obj;
+     Py_ssize_t len;
+     Py_ssize_t itemsize;
+     int readonly;
+     int ndim;
+     char *format;
+     Py_ssize_t *shape;
+     Py_ssize_t *strides;
+     Py_ssize_t *suboffsets;
+     void *internal;
+  } Py_buffer;
+
+  #define PyBUF_SIMPLE 0
+  #define PyBUF_WRITABLE 0x0001
+  #define PyBUF_FORMAT 0x0004
+  #define PyBUF_ND 0x0008
+  #define PyBUF_STRIDES (0x0010 | PyBUF_ND)
+  #define PyBUF_C_CONTIGUOUS (0x0020 | PyBUF_STRIDES)
+  #define PyBUF_F_CONTIGUOUS (0x0040 | PyBUF_STRIDES)
+  #define PyBUF_ANY_CONTIGUOUS (0x0080 | PyBUF_STRIDES)
+  #define PyBUF_INDIRECT (0x0100 | PyBUF_STRIDES)
+
+#endif
+
+#if PY_MAJOR_VERSION < 3
+  #define __Pyx_BUILTIN_MODULE_NAME "__builtin__"
+#else
+  #define __Pyx_BUILTIN_MODULE_NAME "builtins"
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+  #define Py_TPFLAGS_CHECKTYPES 0
+  #define Py_TPFLAGS_HAVE_INDEX 0
+#endif
+
+#if (PY_VERSION_HEX < 0x02060000) || (PY_MAJOR_VERSION >= 3)
+  #define Py_TPFLAGS_HAVE_NEWBUFFER 0
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+  #define PyBaseString_Type            PyUnicode_Type
+  #define PyStringObject               PyUnicodeObject
+  #define PyString_Type                PyUnicode_Type
+  #define PyString_Check               PyUnicode_Check
+  #define PyString_CheckExact          PyUnicode_CheckExact
+#endif
+
+#if PY_VERSION_HEX < 0x02060000
+  #define PyBytesObject                PyStringObject
+  #define PyBytes_Type                 PyString_Type
+  #define PyBytes_Check                PyString_Check
+  #define PyBytes_CheckExact           PyString_CheckExact
+  #define PyBytes_FromString           PyString_FromString
+  #define PyBytes_FromStringAndSize    PyString_FromStringAndSize
+  #define PyBytes_FromFormat           PyString_FromFormat
+  #define PyBytes_DecodeEscape         PyString_DecodeEscape
+  #define PyBytes_AsString             PyString_AsString
+  #define PyBytes_AsStringAndSize      PyString_AsStringAndSize
+  #define PyBytes_Size                 PyString_Size
+  #define PyBytes_AS_STRING            PyString_AS_STRING
+  #define PyBytes_GET_SIZE             PyString_GET_SIZE
+  #define PyBytes_Repr                 PyString_Repr
+  #define PyBytes_Concat               PyString_Concat
+  #define PyBytes_ConcatAndDel         PyString_ConcatAndDel
+#endif
+
+#if PY_VERSION_HEX < 0x02060000
+  #define PySet_Check(obj)             PyObject_TypeCheck(obj, &PySet_Type)
+  #define PyFrozenSet_Check(obj)       PyObject_TypeCheck(obj, &PyFrozenSet_Type)
+#endif
+#ifndef PySet_CheckExact
+  #define PySet_CheckExact(obj)        (Py_TYPE(obj) == &PySet_Type)
+#endif
+
+#define __Pyx_TypeCheck(obj, type) PyObject_TypeCheck(obj, (PyTypeObject *)type)
+
+#if PY_MAJOR_VERSION >= 3
+  #define PyIntObject                  PyLongObject
+  #define PyInt_Type                   PyLong_Type
+  #define PyInt_Check(op)              PyLong_Check(op)
+  #define PyInt_CheckExact(op)         PyLong_CheckExact(op)
+  #define PyInt_FromString             PyLong_FromString
+  #define PyInt_FromUnicode            PyLong_FromUnicode
+  #define PyInt_FromLong               PyLong_FromLong
+  #define PyInt_FromSize_t             PyLong_FromSize_t
+  #define PyInt_FromSsize_t            PyLong_FromSsize_t
+  #define PyInt_AsLong                 PyLong_AsLong
+  #define PyInt_AS_LONG                PyLong_AS_LONG
+  #define PyInt_AsSsize_t              PyLong_AsSsize_t
+  #define PyInt_AsUnsignedLongMask     PyLong_AsUnsignedLongMask
+  #define PyInt_AsUnsignedLongLongMask PyLong_AsUnsignedLongLongMask
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+  #define PyBoolObject                 PyLongObject
+#endif
+
+
+#if PY_MAJOR_VERSION >= 3
+  #define __Pyx_PyNumber_Divide(x,y)         PyNumber_TrueDivide(x,y)
+  #define __Pyx_PyNumber_InPlaceDivide(x,y)  PyNumber_InPlaceTrueDivide(x,y)
+#else
+  #define __Pyx_PyNumber_Divide(x,y)         PyNumber_Divide(x,y)
+  #define __Pyx_PyNumber_InPlaceDivide(x,y)  PyNumber_InPlaceDivide(x,y)
+#endif
+
+#if (PY_MAJOR_VERSION < 3) || (PY_VERSION_HEX >= 0x03010300)
+  #define __Pyx_PySequence_GetSlice(obj, a, b) PySequence_GetSlice(obj, a, b)
+  #define __Pyx_PySequence_SetSlice(obj, a, b, value) PySequence_SetSlice(obj, a, b, value)
+  #define __Pyx_PySequence_DelSlice(obj, a, b) PySequence_DelSlice(obj, a, b)
+#else
+  #define __Pyx_PySequence_GetSlice(obj, a, b) (unlikely(!(obj)) ? \
+        (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), (PyObject*)0) : \
+        (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_GetSlice(obj, a, b)) : \
+            (PyErr_Format(PyExc_TypeError, "'%.200s' object is unsliceable", (obj)->ob_type->tp_name), (PyObject*)0)))
+  #define __Pyx_PySequence_SetSlice(obj, a, b, value) (unlikely(!(obj)) ? \
+        (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \
+        (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_SetSlice(obj, a, b, value)) : \
+            (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice assignment", (obj)->ob_type->tp_name), -1)))
+  #define __Pyx_PySequence_DelSlice(obj, a, b) (unlikely(!(obj)) ? \
+        (PyErr_SetString(PyExc_SystemError, "null argument to internal routine"), -1) : \
+        (likely((obj)->ob_type->tp_as_mapping) ? (PySequence_DelSlice(obj, a, b)) : \
+            (PyErr_Format(PyExc_TypeError, "'%.200s' object doesn't support slice deletion", (obj)->ob_type->tp_name), -1)))
+#endif
+
+#if PY_MAJOR_VERSION >= 3
+  #define PyMethod_New(func, self, klass) ((self) ? PyMethod_New(func, self) : PyInstanceMethod_New(func))
+#endif
+
+#if PY_VERSION_HEX < 0x02050000
+  #define __Pyx_GetAttrString(o,n)   PyObject_GetAttrString((o),((char *)(n)))
+  #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),((char *)(n)),(a))
+  #define __Pyx_DelAttrString(o,n)   PyObject_DelAttrString((o),((char *)(n)))
+#else
+  #define __Pyx_GetAttrString(o,n)   PyObject_GetAttrString((o),(n))
+  #define __Pyx_SetAttrString(o,n,a) PyObject_SetAttrString((o),(n),(a))
+  #define __Pyx_DelAttrString(o,n)   PyObject_DelAttrString((o),(n))
+#endif
+
+#if PY_VERSION_HEX < 0x02050000
+  #define __Pyx_NAMESTR(n) ((char *)(n))
+  #define __Pyx_DOCSTR(n)  ((char *)(n))
+#else
+  #define __Pyx_NAMESTR(n) (n)
+  #define __Pyx_DOCSTR(n)  (n)
+#endif
+
+#ifdef __cplusplus
+#define __PYX_EXTERN_C extern "C"
+#else
+#define __PYX_EXTERN_C extern
+#endif
+
+#if defined(WIN32) || defined(MS_WINDOWS)
+#define _USE_MATH_DEFINES
+#endif
+#include <math.h>
+#define __PYX_HAVE_API__twisted__test__raiser
+
+#ifdef PYREX_WITHOUT_ASSERTIONS
+#define CYTHON_WITHOUT_ASSERTIONS
+#endif
+
+
+/* inline attribute */
+#ifndef CYTHON_INLINE
+  #if defined(__GNUC__)
+    #define CYTHON_INLINE __inline__
+  #elif defined(_MSC_VER)
+    #define CYTHON_INLINE __inline
+  #elif defined (__STDC_VERSION__) && __STDC_VERSION__ >= 199901L
+    #define CYTHON_INLINE inline
+  #else
+    #define CYTHON_INLINE
+  #endif
+#endif
+
+/* unused attribute */
+#ifndef CYTHON_UNUSED
+# if defined(__GNUC__)
+#   if !(defined(__cplusplus)) || (__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ >= 4))
+#     define CYTHON_UNUSED __attribute__ ((__unused__))
+#   else
+#     define CYTHON_UNUSED
+#   endif
+# elif defined(__ICC) || defined(__INTEL_COMPILER)
+#   define CYTHON_UNUSED __attribute__ ((__unused__))
+# else
+#   define CYTHON_UNUSED
+# endif
+#endif
+
+typedef struct {PyObject **p; char *s; const long n; const char* encoding; const char is_unicode; const char is_str; const char intern; } __Pyx_StringTabEntry; /*proto*/
+
+
+/* Type Conversion Predeclarations */
+
+#define __Pyx_PyBytes_FromUString(s) PyBytes_FromString((char*)s)
+#define __Pyx_PyBytes_AsUString(s)   ((unsigned char*) PyBytes_AsString(s))
+
+#define __Pyx_PyBool_FromLong(b) ((b) ? (Py_INCREF(Py_True), Py_True) : (Py_INCREF(Py_False), Py_False))
+static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject*);
+static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x);
+
+static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject*);
+static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t);
+static CYTHON_INLINE size_t __Pyx_PyInt_AsSize_t(PyObject*);
+
+#define __pyx_PyFloat_AsDouble(x) (PyFloat_CheckExact(x) ? PyFloat_AS_DOUBLE(x) : PyFloat_AsDouble(x))
+
+
+#ifdef __GNUC__
+/* Test for GCC > 2.95 */
+#if __GNUC__ > 2 || (__GNUC__ == 2 && (__GNUC_MINOR__ > 95))
+#define likely(x)   __builtin_expect(!!(x), 1)
+#define unlikely(x) __builtin_expect(!!(x), 0)
+#else /* __GNUC__ > 2 ... */
+#define likely(x)   (x)
+#define unlikely(x) (x)
+#endif /* __GNUC__ > 2 ... */
+#else /* __GNUC__ */
+#define likely(x)   (x)
+#define unlikely(x) (x)
+#endif /* __GNUC__ */
+    
+static PyObject *__pyx_m;
+static PyObject *__pyx_b;
+static PyObject *__pyx_empty_tuple;
+static PyObject *__pyx_empty_bytes;
+static int __pyx_lineno;
+static int __pyx_clineno = 0;
+static const char * __pyx_cfilenm= __FILE__;
+static const char *__pyx_filename;
+
+
+static const char *__pyx_f[] = {
+  "raiser.pyx",
+};
+
+/* Type declarations */
+
+#ifndef CYTHON_REFNANNY
+  #define CYTHON_REFNANNY 0
+#endif
+
+#if CYTHON_REFNANNY
+  typedef struct {
+    void (*INCREF)(void*, PyObject*, int);
+    void (*DECREF)(void*, PyObject*, int);
+    void (*GOTREF)(void*, PyObject*, int);
+    void (*GIVEREF)(void*, PyObject*, int);
+    void* (*SetupContext)(const char*, int, const char*);
+    void (*FinishContext)(void**);
+  } __Pyx_RefNannyAPIStruct;
+  static __Pyx_RefNannyAPIStruct *__Pyx_RefNanny = NULL;
+  static __Pyx_RefNannyAPIStruct * __Pyx_RefNannyImportAPI(const char *modname) {
+    PyObject *m = NULL, *p = NULL;
+    void *r = NULL;
+    m = PyImport_ImportModule((char *)modname);
+    if (!m) goto end;
+    p = PyObject_GetAttrString(m, (char *)"RefNannyAPI");
+    if (!p) goto end;
+    r = PyLong_AsVoidPtr(p);
+  end:
+    Py_XDECREF(p);
+    Py_XDECREF(m);
+    return (__Pyx_RefNannyAPIStruct *)r;
+  }
+  #define __Pyx_RefNannySetupContext(name)           void *__pyx_refnanny = __Pyx_RefNanny->SetupContext((name), __LINE__, __FILE__)
+  #define __Pyx_RefNannyFinishContext()           __Pyx_RefNanny->FinishContext(&__pyx_refnanny)
+  #define __Pyx_INCREF(r) __Pyx_RefNanny->INCREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+  #define __Pyx_DECREF(r) __Pyx_RefNanny->DECREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+  #define __Pyx_GOTREF(r) __Pyx_RefNanny->GOTREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+  #define __Pyx_GIVEREF(r) __Pyx_RefNanny->GIVEREF(__pyx_refnanny, (PyObject *)(r), __LINE__)
+  #define __Pyx_XDECREF(r) do { if((r) != NULL) {__Pyx_DECREF(r);} } while(0)
+#else
+  #define __Pyx_RefNannySetupContext(name)
+  #define __Pyx_RefNannyFinishContext()
+  #define __Pyx_INCREF(r) Py_INCREF(r)
+  #define __Pyx_DECREF(r) Py_DECREF(r)
+  #define __Pyx_GOTREF(r)
+  #define __Pyx_GIVEREF(r)
+  #define __Pyx_XDECREF(r) Py_XDECREF(r)
+#endif /* CYTHON_REFNANNY */
+#define __Pyx_XGIVEREF(r) do { if((r) != NULL) {__Pyx_GIVEREF(r);} } while(0)
+#define __Pyx_XGOTREF(r) do { if((r) != NULL) {__Pyx_GOTREF(r);} } while(0)
+
+static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name); /*proto*/
+
+static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb); /*proto*/
+static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb); /*proto*/
+
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb); /*proto*/
+
+static PyObject *__Pyx_FindPy2Metaclass(PyObject *bases); /*proto*/
+
+static PyObject *__Pyx_CreateClass(PyObject *bases, PyObject *dict, PyObject *name,
+                                   PyObject *modname); /*proto*/
+
+static CYTHON_INLINE unsigned char __Pyx_PyInt_AsUnsignedChar(PyObject *);
+
+static CYTHON_INLINE unsigned short __Pyx_PyInt_AsUnsignedShort(PyObject *);
+
+static CYTHON_INLINE unsigned int __Pyx_PyInt_AsUnsignedInt(PyObject *);
+
+static CYTHON_INLINE char __Pyx_PyInt_AsChar(PyObject *);
+
+static CYTHON_INLINE short __Pyx_PyInt_AsShort(PyObject *);
+
+static CYTHON_INLINE int __Pyx_PyInt_AsInt(PyObject *);
+
+static CYTHON_INLINE signed char __Pyx_PyInt_AsSignedChar(PyObject *);
+
+static CYTHON_INLINE signed short __Pyx_PyInt_AsSignedShort(PyObject *);
+
+static CYTHON_INLINE signed int __Pyx_PyInt_AsSignedInt(PyObject *);
+
+static CYTHON_INLINE int __Pyx_PyInt_AsLongDouble(PyObject *);
+
+static CYTHON_INLINE unsigned long __Pyx_PyInt_AsUnsignedLong(PyObject *);
+
+static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_AsUnsignedLongLong(PyObject *);
+
+static CYTHON_INLINE long __Pyx_PyInt_AsLong(PyObject *);
+
+static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_AsLongLong(PyObject *);
+
+static CYTHON_INLINE signed long __Pyx_PyInt_AsSignedLong(PyObject *);
+
+static CYTHON_INLINE signed PY_LONG_LONG __Pyx_PyInt_AsSignedLongLong(PyObject *);
+
+static void __Pyx_AddTraceback(const char *funcname); /*proto*/
+
+static int __Pyx_InitStrings(__Pyx_StringTabEntry *t); /*proto*/
+/* Module declarations from twisted.test.raiser */
+
+#define __Pyx_MODULE_NAME "twisted.test.raiser"
+static int __pyx_module_is_main_twisted__test__raiser = 0;
+
+/* Implementation of twisted.test.raiser */
+static PyObject *__pyx_builtin_Exception;
+static char __pyx_k_1[] = "This function is intentionally broken";
+static char __pyx_k_3[] = "\nA trivial extension that just raises an exception.\nSee L{twisted.test.test_failure.test_failureConstructionWithMungedStackSucceeds}.\n";
+static char __pyx_k_4[] = "\n    A speficic exception only used to be identified in tests.\n    ";
+static char __pyx_k_5[] = "twisted.test.raiser";
+static char __pyx_k____main__[] = "__main__";
+static char __pyx_k____test__[] = "__test__";
+static char __pyx_k__Exception[] = "Exception";
+static char __pyx_k__raiseException[] = "raiseException";
+static char __pyx_k__RaiserException[] = "RaiserException";
+static PyObject *__pyx_kp_s_1;
+static PyObject *__pyx_kp_s_4;
+static PyObject *__pyx_n_s_5;
+static PyObject *__pyx_n_s__Exception;
+static PyObject *__pyx_n_s__RaiserException;
+static PyObject *__pyx_n_s____main__;
+static PyObject *__pyx_n_s____test__;
+static PyObject *__pyx_n_s__raiseException;
+static PyObject *__pyx_k_tuple_2;
+
+/* "twisted/test/raiser.pyx":17
+ * 
+ * 
+ * def raiseException():             # <<<<<<<<<<<<<<
+ *     """
+ *     Raise L{RaiserException}.
+ */
+
+static PyObject *__pyx_pf_7twisted_4test_6raiser_raiseException(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused); /*proto*/
+static char __pyx_doc_7twisted_4test_6raiser_raiseException[] = "\n    Raise L{RaiserException}.\n    ";
+static PyMethodDef __pyx_mdef_7twisted_4test_6raiser_raiseException = {__Pyx_NAMESTR("raiseException"), (PyCFunction)__pyx_pf_7twisted_4test_6raiser_raiseException, METH_NOARGS, __Pyx_DOCSTR(__pyx_doc_7twisted_4test_6raiser_raiseException)};
+static PyObject *__pyx_pf_7twisted_4test_6raiser_raiseException(PyObject *__pyx_self, CYTHON_UNUSED PyObject *unused) {
+  PyObject *__pyx_r = NULL;
+  PyObject *__pyx_t_1 = NULL;
+  PyObject *__pyx_t_2 = NULL;
+  __Pyx_RefNannySetupContext("raiseException");
+  __pyx_self = __pyx_self;
+
+  /* "twisted/test/raiser.pyx":21
+ *     Raise L{RaiserException}.
+ *     """
+ *     raise RaiserException("This function is intentionally broken")             # <<<<<<<<<<<<<<
+ */
+  __pyx_t_1 = __Pyx_GetName(__pyx_m, __pyx_n_s__RaiserException); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  __pyx_t_2 = PyObject_Call(__pyx_t_1, ((PyObject *)__pyx_k_tuple_2), NULL); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_2);
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+  __Pyx_Raise(__pyx_t_2, 0, 0);
+  __Pyx_DECREF(__pyx_t_2); __pyx_t_2 = 0;
+  {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+
+  __pyx_r = Py_None; __Pyx_INCREF(Py_None);
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_1);
+  __Pyx_XDECREF(__pyx_t_2);
+  __Pyx_AddTraceback("twisted.test.raiser.raiseException");
+  __pyx_r = NULL;
+  __pyx_L0:;
+  __Pyx_XGIVEREF(__pyx_r);
+  __Pyx_RefNannyFinishContext();
+  return __pyx_r;
+}
+
+static PyMethodDef __pyx_methods[] = {
+  {0, 0, 0, 0}
+};
+
+#if PY_MAJOR_VERSION >= 3
+static struct PyModuleDef __pyx_moduledef = {
+    PyModuleDef_HEAD_INIT,
+    __Pyx_NAMESTR("raiser"),
+    __Pyx_DOCSTR(__pyx_k_3), /* m_doc */
+    -1, /* m_size */
+    __pyx_methods /* m_methods */,
+    NULL, /* m_reload */
+    NULL, /* m_traverse */
+    NULL, /* m_clear */
+    NULL /* m_free */
+};
+#endif
+
+static __Pyx_StringTabEntry __pyx_string_tab[] = {
+  {&__pyx_kp_s_1, __pyx_k_1, sizeof(__pyx_k_1), 0, 0, 1, 0},
+  {&__pyx_kp_s_4, __pyx_k_4, sizeof(__pyx_k_4), 0, 0, 1, 0},
+  {&__pyx_n_s_5, __pyx_k_5, sizeof(__pyx_k_5), 0, 0, 1, 1},
+  {&__pyx_n_s__Exception, __pyx_k__Exception, sizeof(__pyx_k__Exception), 0, 0, 1, 1},
+  {&__pyx_n_s__RaiserException, __pyx_k__RaiserException, sizeof(__pyx_k__RaiserException), 0, 0, 1, 1},
+  {&__pyx_n_s____main__, __pyx_k____main__, sizeof(__pyx_k____main__), 0, 0, 1, 1},
+  {&__pyx_n_s____test__, __pyx_k____test__, sizeof(__pyx_k____test__), 0, 0, 1, 1},
+  {&__pyx_n_s__raiseException, __pyx_k__raiseException, sizeof(__pyx_k__raiseException), 0, 0, 1, 1},
+  {0, 0, 0, 0, 0, 0, 0}
+};
+static int __Pyx_InitCachedBuiltins(void) {
+  __pyx_builtin_Exception = __Pyx_GetName(__pyx_b, __pyx_n_s__Exception); if (!__pyx_builtin_Exception) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 11; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  return 0;
+  __pyx_L1_error:;
+  return -1;
+}
+
+static int __Pyx_InitCachedConstants(void) {
+  __Pyx_RefNannySetupContext("__Pyx_InitCachedConstants");
+
+  /* "twisted/test/raiser.pyx":21
+ *     Raise L{RaiserException}.
+ *     """
+ *     raise RaiserException("This function is intentionally broken")             # <<<<<<<<<<<<<<
+ */
+  __pyx_k_tuple_2 = PyTuple_New(1); if (unlikely(!__pyx_k_tuple_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 21; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(((PyObject *)__pyx_k_tuple_2));
+  __Pyx_INCREF(((PyObject *)__pyx_kp_s_1));
+  PyTuple_SET_ITEM(__pyx_k_tuple_2, 0, ((PyObject *)__pyx_kp_s_1));
+  __Pyx_GIVEREF(((PyObject *)__pyx_kp_s_1));
+  __Pyx_GIVEREF(((PyObject *)__pyx_k_tuple_2));
+  __Pyx_RefNannyFinishContext();
+  return 0;
+  __pyx_L1_error:;
+  __Pyx_RefNannyFinishContext();
+  return -1;
+}
+
+static int __Pyx_InitGlobals(void) {
+  if (__Pyx_InitStrings(__pyx_string_tab) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+  return 0;
+  __pyx_L1_error:;
+  return -1;
+}
+
+#if PY_MAJOR_VERSION < 3
+PyMODINIT_FUNC initraiser(void); /*proto*/
+PyMODINIT_FUNC initraiser(void)
+#else
+PyMODINIT_FUNC PyInit_raiser(void); /*proto*/
+PyMODINIT_FUNC PyInit_raiser(void)
+#endif
+{
+  PyObject *__pyx_t_1 = NULL;
+  PyObject *__pyx_t_2 = NULL;
+  PyObject *__pyx_t_3 = NULL;
+  #if CYTHON_REFNANNY
+  void* __pyx_refnanny = NULL;
+  __Pyx_RefNanny = __Pyx_RefNannyImportAPI("refnanny");
+  if (!__Pyx_RefNanny) {
+      PyErr_Clear();
+      __Pyx_RefNanny = __Pyx_RefNannyImportAPI("Cython.Runtime.refnanny");
+      if (!__Pyx_RefNanny)
+          Py_FatalError("failed to import 'refnanny' module");
+  }
+  __pyx_refnanny = __Pyx_RefNanny->SetupContext("PyMODINIT_FUNC PyInit_raiser(void)", __LINE__, __FILE__);
+  #endif
+  __pyx_empty_tuple = PyTuple_New(0); if (unlikely(!__pyx_empty_tuple)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_empty_bytes = PyBytes_FromStringAndSize("", 0); if (unlikely(!__pyx_empty_bytes)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  #ifdef __pyx_binding_PyCFunctionType_USED
+  if (__pyx_binding_PyCFunctionType_init() < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  #endif
+  /*--- Library function declarations ---*/
+  /*--- Threads initialization code ---*/
+  #if defined(__PYX_FORCE_INIT_THREADS) && __PYX_FORCE_INIT_THREADS
+  #ifdef WITH_THREAD /* Python build with threading support? */
+  PyEval_InitThreads();
+  #endif
+  #endif
+  /*--- Module creation code ---*/
+  #if PY_MAJOR_VERSION < 3
+  __pyx_m = Py_InitModule4(__Pyx_NAMESTR("raiser"), __pyx_methods, __Pyx_DOCSTR(__pyx_k_3), 0, PYTHON_API_VERSION);
+  #else
+  __pyx_m = PyModule_Create(&__pyx_moduledef);
+  #endif
+  if (!__pyx_m) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+  #if PY_MAJOR_VERSION < 3
+  Py_INCREF(__pyx_m);
+  #endif
+  __pyx_b = PyImport_AddModule(__Pyx_NAMESTR(__Pyx_BUILTIN_MODULE_NAME));
+  if (!__pyx_b) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+  if (__Pyx_SetAttrString(__pyx_m, "__builtins__", __pyx_b) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+  /*--- Initialize various global constants etc. ---*/
+  if (unlikely(__Pyx_InitGlobals() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  if (__pyx_module_is_main_twisted__test__raiser) {
+    if (__Pyx_SetAttrString(__pyx_m, "__name__", __pyx_n_s____main__) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;};
+  }
+  /*--- Builtin init code ---*/
+  if (unlikely(__Pyx_InitCachedBuiltins() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  /*--- Constants init code ---*/
+  if (unlikely(__Pyx_InitCachedConstants() < 0)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  /*--- Global init code ---*/
+  /*--- Function export code ---*/
+  /*--- Type init code ---*/
+  /*--- Type import code ---*/
+  /*--- Function import code ---*/
+  /*--- Execution code ---*/
+
+  /* "twisted/test/raiser.pyx":11
+ * 
+ * 
+ * class RaiserException(Exception):             # <<<<<<<<<<<<<<
+ *     """
+ *     A speficic exception only used to be identified in tests.
+ */
+  __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 11; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(((PyObject *)__pyx_t_1));
+  __pyx_t_2 = PyTuple_New(1); if (unlikely(!__pyx_t_2)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 11; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(((PyObject *)__pyx_t_2));
+  __Pyx_INCREF(__pyx_builtin_Exception);
+  PyTuple_SET_ITEM(__pyx_t_2, 0, __pyx_builtin_Exception);
+  __Pyx_GIVEREF(__pyx_builtin_Exception);
+  if (PyDict_SetItemString(((PyObject *)__pyx_t_1), "__doc__", ((PyObject *)__pyx_kp_s_4)) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 11; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __pyx_t_3 = __Pyx_CreateClass(((PyObject *)__pyx_t_2), ((PyObject *)__pyx_t_1), __pyx_n_s__RaiserException, __pyx_n_s_5); if (unlikely(!__pyx_t_3)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 11; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_3);
+  __Pyx_DECREF(((PyObject *)__pyx_t_2)); __pyx_t_2 = 0;
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__RaiserException, __pyx_t_3) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 11; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_3); __pyx_t_3 = 0;
+  __Pyx_DECREF(((PyObject *)__pyx_t_1)); __pyx_t_1 = 0;
+
+  /* "twisted/test/raiser.pyx":17
+ * 
+ * 
+ * def raiseException():             # <<<<<<<<<<<<<<
+ *     """
+ *     Raise L{RaiserException}.
+ */
+  __pyx_t_1 = PyCFunction_NewEx(&__pyx_mdef_7twisted_4test_6raiser_raiseException, NULL, __pyx_n_s_5); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 17; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(__pyx_t_1);
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s__raiseException, __pyx_t_1) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 17; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(__pyx_t_1); __pyx_t_1 = 0;
+
+  /* "twisted/test/raiser.pyx":1
+ * # Copyright (c) Twisted Matrix Laboratories.             # <<<<<<<<<<<<<<
+ * # See LICENSE for details.
+ * 
+ */
+  __pyx_t_1 = PyDict_New(); if (unlikely(!__pyx_t_1)) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_GOTREF(((PyObject *)__pyx_t_1));
+  if (PyObject_SetAttr(__pyx_m, __pyx_n_s____test__, ((PyObject *)__pyx_t_1)) < 0) {__pyx_filename = __pyx_f[0]; __pyx_lineno = 1; __pyx_clineno = __LINE__; goto __pyx_L1_error;}
+  __Pyx_DECREF(((PyObject *)__pyx_t_1)); __pyx_t_1 = 0;
+  goto __pyx_L0;
+  __pyx_L1_error:;
+  __Pyx_XDECREF(__pyx_t_1);
+  __Pyx_XDECREF(__pyx_t_2);
+  __Pyx_XDECREF(__pyx_t_3);
+  if (__pyx_m) {
+    __Pyx_AddTraceback("init twisted.test.raiser");
+    Py_DECREF(__pyx_m); __pyx_m = 0;
+  } else if (!PyErr_Occurred()) {
+    PyErr_SetString(PyExc_ImportError, "init twisted.test.raiser");
+  }
+  __pyx_L0:;
+  __Pyx_RefNannyFinishContext();
+  #if PY_MAJOR_VERSION < 3
+  return;
+  #else
+  return __pyx_m;
+  #endif
+}
+
+/* Runtime support code */
+
+static PyObject *__Pyx_GetName(PyObject *dict, PyObject *name) {
+    PyObject *result;
+    result = PyObject_GetAttr(dict, name);
+    if (!result)
+        PyErr_SetObject(PyExc_NameError, name);
+    return result;
+}
+
+static CYTHON_INLINE void __Pyx_ErrRestore(PyObject *type, PyObject *value, PyObject *tb) {
+    PyObject *tmp_type, *tmp_value, *tmp_tb;
+    PyThreadState *tstate = PyThreadState_GET();
+
+    tmp_type = tstate->curexc_type;
+    tmp_value = tstate->curexc_value;
+    tmp_tb = tstate->curexc_traceback;
+    tstate->curexc_type = type;
+    tstate->curexc_value = value;
+    tstate->curexc_traceback = tb;
+    Py_XDECREF(tmp_type);
+    Py_XDECREF(tmp_value);
+    Py_XDECREF(tmp_tb);
+}
+
+static CYTHON_INLINE void __Pyx_ErrFetch(PyObject **type, PyObject **value, PyObject **tb) {
+    PyThreadState *tstate = PyThreadState_GET();
+    *type = tstate->curexc_type;
+    *value = tstate->curexc_value;
+    *tb = tstate->curexc_traceback;
+
+    tstate->curexc_type = 0;
+    tstate->curexc_value = 0;
+    tstate->curexc_traceback = 0;
+}
+
+
+#if PY_MAJOR_VERSION < 3
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb) {
+    Py_XINCREF(type);
+    Py_XINCREF(value);
+    Py_XINCREF(tb);
+    /* First, check the traceback argument, replacing None with NULL. */
+    if (tb == Py_None) {
+        Py_DECREF(tb);
+        tb = 0;
+    }
+    else if (tb != NULL && !PyTraceBack_Check(tb)) {
+        PyErr_SetString(PyExc_TypeError,
+            "raise: arg 3 must be a traceback or None");
+        goto raise_error;
+    }
+    /* Next, replace a missing value with None */
+    if (value == NULL) {
+        value = Py_None;
+        Py_INCREF(value);
+    }
+    #if PY_VERSION_HEX < 0x02050000
+    if (!PyClass_Check(type))
+    #else
+    if (!PyType_Check(type))
+    #endif
+    {
+        /* Raising an instance.  The value should be a dummy. */
+        if (value != Py_None) {
+            PyErr_SetString(PyExc_TypeError,
+                "instance exception may not have a separate value");
+            goto raise_error;
+        }
+        /* Normalize to raise <class>, <instance> */
+        Py_DECREF(value);
+        value = type;
+        #if PY_VERSION_HEX < 0x02050000
+            if (PyInstance_Check(type)) {
+                type = (PyObject*) ((PyInstanceObject*)type)->in_class;
+                Py_INCREF(type);
+            }
+            else {
+                type = 0;
+                PyErr_SetString(PyExc_TypeError,
+                    "raise: exception must be an old-style class or instance");
+                goto raise_error;
+            }
+        #else
+            type = (PyObject*) Py_TYPE(type);
+            Py_INCREF(type);
+            if (!PyType_IsSubtype((PyTypeObject *)type, (PyTypeObject *)PyExc_BaseException)) {
+                PyErr_SetString(PyExc_TypeError,
+                    "raise: exception class must be a subclass of BaseException");
+                goto raise_error;
+            }
+        #endif
+    }
+
+    __Pyx_ErrRestore(type, value, tb);
+    return;
+raise_error:
+    Py_XDECREF(value);
+    Py_XDECREF(type);
+    Py_XDECREF(tb);
+    return;
+}
+
+#else /* Python 3+ */
+
+static void __Pyx_Raise(PyObject *type, PyObject *value, PyObject *tb) {
+    if (tb == Py_None) {
+        tb = 0;
+    } else if (tb && !PyTraceBack_Check(tb)) {
+        PyErr_SetString(PyExc_TypeError,
+            "raise: arg 3 must be a traceback or None");
+        goto bad;
+    }
+    if (value == Py_None)
+        value = 0;
+
+    if (PyExceptionInstance_Check(type)) {
+        if (value) {
+            PyErr_SetString(PyExc_TypeError,
+                "instance exception may not have a separate value");
+            goto bad;
+        }
+        value = type;
+        type = (PyObject*) Py_TYPE(value);
+    } else if (!PyExceptionClass_Check(type)) {
+        PyErr_SetString(PyExc_TypeError,
+            "raise: exception class must be a subclass of BaseException");
+        goto bad;
+    }
+
+    PyErr_SetObject(type, value);
+
+    if (tb) {
+        PyThreadState *tstate = PyThreadState_GET();
+        PyObject* tmp_tb = tstate->curexc_traceback;
+        if (tb != tmp_tb) {
+            Py_INCREF(tb);
+            tstate->curexc_traceback = tb;
+            Py_XDECREF(tmp_tb);
+        }
+    }
+
+bad:
+    return;
+}
+#endif
+
+static PyObject *__Pyx_FindPy2Metaclass(PyObject *bases) {
+    PyObject *metaclass;
+    /* Default metaclass */
+#if PY_MAJOR_VERSION < 3
+    if (PyTuple_Check(bases) && PyTuple_GET_SIZE(bases) > 0) {
+        PyObject *base = PyTuple_GET_ITEM(bases, 0);
+        metaclass = PyObject_GetAttrString(base, "__class__");
+        if (!metaclass) {
+            PyErr_Clear();
+            metaclass = (PyObject*) Py_TYPE(base);
+        }
+    } else {
+        metaclass = (PyObject *) &PyClass_Type;
+    }
+#else
+    if (PyTuple_Check(bases) && PyTuple_GET_SIZE(bases) > 0) {
+        PyObject *base = PyTuple_GET_ITEM(bases, 0);
+        metaclass = (PyObject*) Py_TYPE(base);
+    } else {
+        metaclass = (PyObject *) &PyType_Type;
+    }
+#endif
+    Py_INCREF(metaclass);
+    return metaclass;
+}
+
+static PyObject *__Pyx_CreateClass(PyObject *bases, PyObject *dict, PyObject *name,
+                                   PyObject *modname) {
+    PyObject *result;
+    PyObject *metaclass;
+
+    if (PyDict_SetItemString(dict, "__module__", modname) < 0)
+        return NULL;
+
+    /* Python2 __metaclass__ */
+    metaclass = PyDict_GetItemString(dict, "__metaclass__");
+    if (metaclass) {
+        Py_INCREF(metaclass);
+    } else {
+        metaclass = __Pyx_FindPy2Metaclass(bases);
+    }
+    result = PyObject_CallFunctionObjArgs(metaclass, name, bases, dict, NULL);
+    Py_DECREF(metaclass);
+    return result;
+}
+
+static CYTHON_INLINE unsigned char __Pyx_PyInt_AsUnsignedChar(PyObject* x) {
+    const unsigned char neg_one = (unsigned char)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(unsigned char) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(unsigned char)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to unsigned char" :
+                    "value too large to convert to unsigned char");
+            }
+            return (unsigned char)-1;
+        }
+        return (unsigned char)val;
+    }
+    return (unsigned char)__Pyx_PyInt_AsUnsignedLong(x);
+}
+
+static CYTHON_INLINE unsigned short __Pyx_PyInt_AsUnsignedShort(PyObject* x) {
+    const unsigned short neg_one = (unsigned short)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(unsigned short) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(unsigned short)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to unsigned short" :
+                    "value too large to convert to unsigned short");
+            }
+            return (unsigned short)-1;
+        }
+        return (unsigned short)val;
+    }
+    return (unsigned short)__Pyx_PyInt_AsUnsignedLong(x);
+}
+
+static CYTHON_INLINE unsigned int __Pyx_PyInt_AsUnsignedInt(PyObject* x) {
+    const unsigned int neg_one = (unsigned int)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(unsigned int) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(unsigned int)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to unsigned int" :
+                    "value too large to convert to unsigned int");
+            }
+            return (unsigned int)-1;
+        }
+        return (unsigned int)val;
+    }
+    return (unsigned int)__Pyx_PyInt_AsUnsignedLong(x);
+}
+
+static CYTHON_INLINE char __Pyx_PyInt_AsChar(PyObject* x) {
+    const char neg_one = (char)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(char) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(char)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to char" :
+                    "value too large to convert to char");
+            }
+            return (char)-1;
+        }
+        return (char)val;
+    }
+    return (char)__Pyx_PyInt_AsLong(x);
+}
+
+static CYTHON_INLINE short __Pyx_PyInt_AsShort(PyObject* x) {
+    const short neg_one = (short)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(short) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(short)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to short" :
+                    "value too large to convert to short");
+            }
+            return (short)-1;
+        }
+        return (short)val;
+    }
+    return (short)__Pyx_PyInt_AsLong(x);
+}
+
+static CYTHON_INLINE int __Pyx_PyInt_AsInt(PyObject* x) {
+    const int neg_one = (int)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(int) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(int)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to int" :
+                    "value too large to convert to int");
+            }
+            return (int)-1;
+        }
+        return (int)val;
+    }
+    return (int)__Pyx_PyInt_AsLong(x);
+}
+
+static CYTHON_INLINE signed char __Pyx_PyInt_AsSignedChar(PyObject* x) {
+    const signed char neg_one = (signed char)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(signed char) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(signed char)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to signed char" :
+                    "value too large to convert to signed char");
+            }
+            return (signed char)-1;
+        }
+        return (signed char)val;
+    }
+    return (signed char)__Pyx_PyInt_AsSignedLong(x);
+}
+
+static CYTHON_INLINE signed short __Pyx_PyInt_AsSignedShort(PyObject* x) {
+    const signed short neg_one = (signed short)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(signed short) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(signed short)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to signed short" :
+                    "value too large to convert to signed short");
+            }
+            return (signed short)-1;
+        }
+        return (signed short)val;
+    }
+    return (signed short)__Pyx_PyInt_AsSignedLong(x);
+}
+
+static CYTHON_INLINE signed int __Pyx_PyInt_AsSignedInt(PyObject* x) {
+    const signed int neg_one = (signed int)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(signed int) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(signed int)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to signed int" :
+                    "value too large to convert to signed int");
+            }
+            return (signed int)-1;
+        }
+        return (signed int)val;
+    }
+    return (signed int)__Pyx_PyInt_AsSignedLong(x);
+}
+
+static CYTHON_INLINE int __Pyx_PyInt_AsLongDouble(PyObject* x) {
+    const int neg_one = (int)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+    if (sizeof(int) < sizeof(long)) {
+        long val = __Pyx_PyInt_AsLong(x);
+        if (unlikely(val != (long)(int)val)) {
+            if (!unlikely(val == -1 && PyErr_Occurred())) {
+                PyErr_SetString(PyExc_OverflowError,
+                    (is_unsigned && unlikely(val < 0)) ?
+                    "can't convert negative value to int" :
+                    "value too large to convert to int");
+            }
+            return (int)-1;
+        }
+        return (int)val;
+    }
+    return (int)__Pyx_PyInt_AsLong(x);
+}
+
+static CYTHON_INLINE unsigned long __Pyx_PyInt_AsUnsignedLong(PyObject* x) {
+    const unsigned long neg_one = (unsigned long)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+#if PY_VERSION_HEX < 0x03000000
+    if (likely(PyInt_Check(x))) {
+        long val = PyInt_AS_LONG(x);
+        if (is_unsigned && unlikely(val < 0)) {
+            PyErr_SetString(PyExc_OverflowError,
+                            "can't convert negative value to unsigned long");
+            return (unsigned long)-1;
+        }
+        return (unsigned long)val;
+    } else
+#endif
+    if (likely(PyLong_Check(x))) {
+        if (is_unsigned) {
+            if (unlikely(Py_SIZE(x) < 0)) {
+                PyErr_SetString(PyExc_OverflowError,
+                                "can't convert negative value to unsigned long");
+                return (unsigned long)-1;
+            }
+            return PyLong_AsUnsignedLong(x);
+        } else {
+            return PyLong_AsLong(x);
+        }
+    } else {
+        unsigned long val;
+        PyObject *tmp = __Pyx_PyNumber_Int(x);
+        if (!tmp) return (unsigned long)-1;
+        val = __Pyx_PyInt_AsUnsignedLong(tmp);
+        Py_DECREF(tmp);
+        return val;
+    }
+}
+
+static CYTHON_INLINE unsigned PY_LONG_LONG __Pyx_PyInt_AsUnsignedLongLong(PyObject* x) {
+    const unsigned PY_LONG_LONG neg_one = (unsigned PY_LONG_LONG)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+#if PY_VERSION_HEX < 0x03000000
+    if (likely(PyInt_Check(x))) {
+        long val = PyInt_AS_LONG(x);
+        if (is_unsigned && unlikely(val < 0)) {
+            PyErr_SetString(PyExc_OverflowError,
+                            "can't convert negative value to unsigned PY_LONG_LONG");
+            return (unsigned PY_LONG_LONG)-1;
+        }
+        return (unsigned PY_LONG_LONG)val;
+    } else
+#endif
+    if (likely(PyLong_Check(x))) {
+        if (is_unsigned) {
+            if (unlikely(Py_SIZE(x) < 0)) {
+                PyErr_SetString(PyExc_OverflowError,
+                                "can't convert negative value to unsigned PY_LONG_LONG");
+                return (unsigned PY_LONG_LONG)-1;
+            }
+            return PyLong_AsUnsignedLongLong(x);
+        } else {
+            return PyLong_AsLongLong(x);
+        }
+    } else {
+        unsigned PY_LONG_LONG val;
+        PyObject *tmp = __Pyx_PyNumber_Int(x);
+        if (!tmp) return (unsigned PY_LONG_LONG)-1;
+        val = __Pyx_PyInt_AsUnsignedLongLong(tmp);
+        Py_DECREF(tmp);
+        return val;
+    }
+}
+
+static CYTHON_INLINE long __Pyx_PyInt_AsLong(PyObject* x) {
+    const long neg_one = (long)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+#if PY_VERSION_HEX < 0x03000000
+    if (likely(PyInt_Check(x))) {
+        long val = PyInt_AS_LONG(x);
+        if (is_unsigned && unlikely(val < 0)) {
+            PyErr_SetString(PyExc_OverflowError,
+                            "can't convert negative value to long");
+            return (long)-1;
+        }
+        return (long)val;
+    } else
+#endif
+    if (likely(PyLong_Check(x))) {
+        if (is_unsigned) {
+            if (unlikely(Py_SIZE(x) < 0)) {
+                PyErr_SetString(PyExc_OverflowError,
+                                "can't convert negative value to long");
+                return (long)-1;
+            }
+            return PyLong_AsUnsignedLong(x);
+        } else {
+            return PyLong_AsLong(x);
+        }
+    } else {
+        long val;
+        PyObject *tmp = __Pyx_PyNumber_Int(x);
+        if (!tmp) return (long)-1;
+        val = __Pyx_PyInt_AsLong(tmp);
+        Py_DECREF(tmp);
+        return val;
+    }
+}
+
+static CYTHON_INLINE PY_LONG_LONG __Pyx_PyInt_AsLongLong(PyObject* x) {
+    const PY_LONG_LONG neg_one = (PY_LONG_LONG)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+#if PY_VERSION_HEX < 0x03000000
+    if (likely(PyInt_Check(x))) {
+        long val = PyInt_AS_LONG(x);
+        if (is_unsigned && unlikely(val < 0)) {
+            PyErr_SetString(PyExc_OverflowError,
+                            "can't convert negative value to PY_LONG_LONG");
+            return (PY_LONG_LONG)-1;
+        }
+        return (PY_LONG_LONG)val;
+    } else
+#endif
+    if (likely(PyLong_Check(x))) {
+        if (is_unsigned) {
+            if (unlikely(Py_SIZE(x) < 0)) {
+                PyErr_SetString(PyExc_OverflowError,
+                                "can't convert negative value to PY_LONG_LONG");
+                return (PY_LONG_LONG)-1;
+            }
+            return PyLong_AsUnsignedLongLong(x);
+        } else {
+            return PyLong_AsLongLong(x);
+        }
+    } else {
+        PY_LONG_LONG val;
+        PyObject *tmp = __Pyx_PyNumber_Int(x);
+        if (!tmp) return (PY_LONG_LONG)-1;
+        val = __Pyx_PyInt_AsLongLong(tmp);
+        Py_DECREF(tmp);
+        return val;
+    }
+}
+
+static CYTHON_INLINE signed long __Pyx_PyInt_AsSignedLong(PyObject* x) {
+    const signed long neg_one = (signed long)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+#if PY_VERSION_HEX < 0x03000000
+    if (likely(PyInt_Check(x))) {
+        long val = PyInt_AS_LONG(x);
+        if (is_unsigned && unlikely(val < 0)) {
+            PyErr_SetString(PyExc_OverflowError,
+                            "can't convert negative value to signed long");
+            return (signed long)-1;
+        }
+        return (signed long)val;
+    } else
+#endif
+    if (likely(PyLong_Check(x))) {
+        if (is_unsigned) {
+            if (unlikely(Py_SIZE(x) < 0)) {
+                PyErr_SetString(PyExc_OverflowError,
+                                "can't convert negative value to signed long");
+                return (signed long)-1;
+            }
+            return PyLong_AsUnsignedLong(x);
+        } else {
+            return PyLong_AsLong(x);
+        }
+    } else {
+        signed long val;
+        PyObject *tmp = __Pyx_PyNumber_Int(x);
+        if (!tmp) return (signed long)-1;
+        val = __Pyx_PyInt_AsSignedLong(tmp);
+        Py_DECREF(tmp);
+        return val;
+    }
+}
+
+static CYTHON_INLINE signed PY_LONG_LONG __Pyx_PyInt_AsSignedLongLong(PyObject* x) {
+    const signed PY_LONG_LONG neg_one = (signed PY_LONG_LONG)-1, const_zero = 0;
+    const int is_unsigned = neg_one > const_zero;
+#if PY_VERSION_HEX < 0x03000000
+    if (likely(PyInt_Check(x))) {
+        long val = PyInt_AS_LONG(x);
+        if (is_unsigned && unlikely(val < 0)) {
+            PyErr_SetString(PyExc_OverflowError,
+                            "can't convert negative value to signed PY_LONG_LONG");
+            return (signed PY_LONG_LONG)-1;
+        }
+        return (signed PY_LONG_LONG)val;
+    } else
+#endif
+    if (likely(PyLong_Check(x))) {
+        if (is_unsigned) {
+            if (unlikely(Py_SIZE(x) < 0)) {
+                PyErr_SetString(PyExc_OverflowError,
+                                "can't convert negative value to signed PY_LONG_LONG");
+                return (signed PY_LONG_LONG)-1;
+            }
+            return PyLong_AsUnsignedLongLong(x);
+        } else {
+            return PyLong_AsLongLong(x);
+        }
+    } else {
+        signed PY_LONG_LONG val;
+        PyObject *tmp = __Pyx_PyNumber_Int(x);
+        if (!tmp) return (signed PY_LONG_LONG)-1;
+        val = __Pyx_PyInt_AsSignedLongLong(tmp);
+        Py_DECREF(tmp);
+        return val;
+    }
+}
+
+#include "compile.h"
+#include "frameobject.h"
+#include "traceback.h"
+
+static void __Pyx_AddTraceback(const char *funcname) {
+    PyObject *py_srcfile = 0;
+    PyObject *py_funcname = 0;
+    PyObject *py_globals = 0;
+    PyCodeObject *py_code = 0;
+    PyFrameObject *py_frame = 0;
+
+    #if PY_MAJOR_VERSION < 3
+    py_srcfile = PyString_FromString(__pyx_filename);
+    #else
+    py_srcfile = PyUnicode_FromString(__pyx_filename);
+    #endif
+    if (!py_srcfile) goto bad;
+    if (__pyx_clineno) {
+        #if PY_MAJOR_VERSION < 3
+        py_funcname = PyString_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, __pyx_clineno);
+        #else
+        py_funcname = PyUnicode_FromFormat( "%s (%s:%d)", funcname, __pyx_cfilenm, __pyx_clineno);
+        #endif
+    }
+    else {
+        #if PY_MAJOR_VERSION < 3
+        py_funcname = PyString_FromString(funcname);
+        #else
+        py_funcname = PyUnicode_FromString(funcname);
+        #endif
+    }
+    if (!py_funcname) goto bad;
+    py_globals = PyModule_GetDict(__pyx_m);
+    if (!py_globals) goto bad;
+    py_code = PyCode_New(
+        0,            /*int argcount,*/
+        #if PY_MAJOR_VERSION >= 3
+        0,            /*int kwonlyargcount,*/
+        #endif
+        0,            /*int nlocals,*/
+        0,            /*int stacksize,*/
+        0,            /*int flags,*/
+        __pyx_empty_bytes, /*PyObject *code,*/
+        __pyx_empty_tuple,  /*PyObject *consts,*/
+        __pyx_empty_tuple,  /*PyObject *names,*/
+        __pyx_empty_tuple,  /*PyObject *varnames,*/
+        __pyx_empty_tuple,  /*PyObject *freevars,*/
+        __pyx_empty_tuple,  /*PyObject *cellvars,*/
+        py_srcfile,   /*PyObject *filename,*/
+        py_funcname,  /*PyObject *name,*/
+        __pyx_lineno,   /*int firstlineno,*/
+        __pyx_empty_bytes  /*PyObject *lnotab*/
+    );
+    if (!py_code) goto bad;
+    py_frame = PyFrame_New(
+        PyThreadState_GET(), /*PyThreadState *tstate,*/
+        py_code,             /*PyCodeObject *code,*/
+        py_globals,          /*PyObject *globals,*/
+        0                    /*PyObject *locals*/
+    );
+    if (!py_frame) goto bad;
+    py_frame->f_lineno = __pyx_lineno;
+    PyTraceBack_Here(py_frame);
+bad:
+    Py_XDECREF(py_srcfile);
+    Py_XDECREF(py_funcname);
+    Py_XDECREF(py_code);
+    Py_XDECREF(py_frame);
+}
+
+static int __Pyx_InitStrings(__Pyx_StringTabEntry *t) {
+    while (t->p) {
+        #if PY_MAJOR_VERSION < 3
+        if (t->is_unicode) {
+            *t->p = PyUnicode_DecodeUTF8(t->s, t->n - 1, NULL);
+        } else if (t->intern) {
+            *t->p = PyString_InternFromString(t->s);
+        } else {
+            *t->p = PyString_FromStringAndSize(t->s, t->n - 1);
+        }
+        #else  /* Python 3+ has unicode identifiers */
+        if (t->is_unicode | t->is_str) {
+            if (t->intern) {
+                *t->p = PyUnicode_InternFromString(t->s);
+            } else if (t->encoding) {
+                *t->p = PyUnicode_Decode(t->s, t->n - 1, t->encoding, NULL);
+            } else {
+                *t->p = PyUnicode_FromStringAndSize(t->s, t->n - 1);
+            }
+        } else {
+            *t->p = PyBytes_FromStringAndSize(t->s, t->n - 1);
+        }
+        #endif
+        if (!*t->p)
+            return -1;
+        ++t;
+    }
+    return 0;
+}
+
+/* Type Conversion Functions */
+
+static CYTHON_INLINE int __Pyx_PyObject_IsTrue(PyObject* x) {
+   int is_true = x == Py_True;
+   if (is_true | (x == Py_False) | (x == Py_None)) return is_true;
+   else return PyObject_IsTrue(x);
+}
+
+static CYTHON_INLINE PyObject* __Pyx_PyNumber_Int(PyObject* x) {
+  PyNumberMethods *m;
+  const char *name = NULL;
+  PyObject *res = NULL;
+#if PY_VERSION_HEX < 0x03000000
+  if (PyInt_Check(x) || PyLong_Check(x))
+#else
+  if (PyLong_Check(x))
+#endif
+    return Py_INCREF(x), x;
+  m = Py_TYPE(x)->tp_as_number;
+#if PY_VERSION_HEX < 0x03000000
+  if (m && m->nb_int) {
+    name = "int";
+    res = PyNumber_Int(x);
+  }
+  else if (m && m->nb_long) {
+    name = "long";
+    res = PyNumber_Long(x);
+  }
+#else
+  if (m && m->nb_int) {
+    name = "int";
+    res = PyNumber_Long(x);
+  }
+#endif
+  if (res) {
+#if PY_VERSION_HEX < 0x03000000
+    if (!PyInt_Check(res) && !PyLong_Check(res)) {
+#else
+    if (!PyLong_Check(res)) {
+#endif
+      PyErr_Format(PyExc_TypeError,
+                   "__%s__ returned non-%s (type %.200s)",
+                   name, name, Py_TYPE(res)->tp_name);
+      Py_DECREF(res);
+      return NULL;
+    }
+  }
+  else if (!PyErr_Occurred()) {
+    PyErr_SetString(PyExc_TypeError,
+                    "an integer is required");
+  }
+  return res;
+}
+
+static CYTHON_INLINE Py_ssize_t __Pyx_PyIndex_AsSsize_t(PyObject* b) {
+  Py_ssize_t ival;
+  PyObject* x = PyNumber_Index(b);
+  if (!x) return -1;
+  ival = PyInt_AsSsize_t(x);
+  Py_DECREF(x);
+  return ival;
+}
+
+static CYTHON_INLINE PyObject * __Pyx_PyInt_FromSize_t(size_t ival) {
+#if PY_VERSION_HEX < 0x02050000
+   if (ival <= LONG_MAX)
+       return PyInt_FromLong((long)ival);
+   else {
+       unsigned char *bytes = (unsigned char *) &ival;
+       int one = 1; int little = (int)*(unsigned char*)&one;
+       return _PyLong_FromByteArray(bytes, sizeof(size_t), little, 0);
+   }
+#else
+   return PyInt_FromSize_t(ival);
+#endif
+}
+
+static CYTHON_INLINE size_t __Pyx_PyInt_AsSize_t(PyObject* x) {
+   unsigned PY_LONG_LONG val = __Pyx_PyInt_AsUnsignedLongLong(x);
+   if (unlikely(val == (unsigned PY_LONG_LONG)-1 && PyErr_Occurred())) {
+       return (size_t)-1;
+   } else if (unlikely(val != (unsigned PY_LONG_LONG)(size_t)val)) {
+       PyErr_SetString(PyExc_OverflowError,
+                       "value too large to convert to size_t");
+       return (size_t)-1;
+   }
+   return (size_t)val;
+}
+
+
+#endif /* Py_PYTHON_H */
diff --git a/ThirdParty/Twisted/twisted/test/raiser.pyx b/ThirdParty/Twisted/twisted/test/raiser.pyx
new file mode 100644
index 0000000..820540e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/raiser.pyx
@@ -0,0 +1,21 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+A trivial extension that just raises an exception.
+See L{twisted.test.test_failure.test_failureConstructionWithMungedStackSucceeds}.
+"""
+
+
+
+class RaiserException(Exception):
+    """
+    A speficic exception only used to be identified in tests.
+    """
+
+
+def raiseException():
+    """
+    Raise L{RaiserException}.
+    """
+    raise RaiserException("This function is intentionally broken")
diff --git a/ThirdParty/Twisted/twisted/test/reflect_helper_IE.py b/ThirdParty/Twisted/twisted/test/reflect_helper_IE.py
new file mode 100644
index 0000000..614d948
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/reflect_helper_IE.py
@@ -0,0 +1,4 @@
+
+# Helper for a test_reflect test
+
+import idonotexist
diff --git a/ThirdParty/Twisted/twisted/test/reflect_helper_VE.py b/ThirdParty/Twisted/twisted/test/reflect_helper_VE.py
new file mode 100644
index 0000000..e19507f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/reflect_helper_VE.py
@@ -0,0 +1,4 @@
+
+# Helper for a test_reflect test
+
+raise ValueError("Stuff is broken and things")
diff --git a/ThirdParty/Twisted/twisted/test/reflect_helper_ZDE.py b/ThirdParty/Twisted/twisted/test/reflect_helper_ZDE.py
new file mode 100644
index 0000000..0c53583
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/reflect_helper_ZDE.py
@@ -0,0 +1,4 @@
+
+# Helper module for a test_reflect test
+
+1//0
diff --git a/ThirdParty/Twisted/twisted/test/server.pem b/ThirdParty/Twisted/twisted/test/server.pem
new file mode 100644
index 0000000..80ef9dc
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/server.pem
@@ -0,0 +1,36 @@
+-----BEGIN CERTIFICATE-----
+MIIDBjCCAm+gAwIBAgIBATANBgkqhkiG9w0BAQQFADB7MQswCQYDVQQGEwJTRzER
+MA8GA1UEChMITTJDcnlwdG8xFDASBgNVBAsTC00yQ3J5cHRvIENBMSQwIgYDVQQD
+ExtNMkNyeXB0byBDZXJ0aWZpY2F0ZSBNYXN0ZXIxHTAbBgkqhkiG9w0BCQEWDm5n
+cHNAcG9zdDEuY29tMB4XDTAwMDkxMDA5NTEzMFoXDTAyMDkxMDA5NTEzMFowUzEL
+MAkGA1UEBhMCU0cxETAPBgNVBAoTCE0yQ3J5cHRvMRIwEAYDVQQDEwlsb2NhbGhv
+c3QxHTAbBgkqhkiG9w0BCQEWDm5ncHNAcG9zdDEuY29tMFwwDQYJKoZIhvcNAQEB
+BQADSwAwSAJBAKy+e3dulvXzV7zoTZWc5TzgApr8DmeQHTYC8ydfzH7EECe4R1Xh
+5kwIzOuuFfn178FBiS84gngaNcrFi0Z5fAkCAwEAAaOCAQQwggEAMAkGA1UdEwQC
+MAAwLAYJYIZIAYb4QgENBB8WHU9wZW5TU0wgR2VuZXJhdGVkIENlcnRpZmljYXRl
+MB0GA1UdDgQWBBTPhIKSvnsmYsBVNWjj0m3M2z0qVTCBpQYDVR0jBIGdMIGagBT7
+hyNp65w6kxXlxb8pUU/+7Sg4AaF/pH0wezELMAkGA1UEBhMCU0cxETAPBgNVBAoT
+CE0yQ3J5cHRvMRQwEgYDVQQLEwtNMkNyeXB0byBDQTEkMCIGA1UEAxMbTTJDcnlw
+dG8gQ2VydGlmaWNhdGUgTWFzdGVyMR0wGwYJKoZIhvcNAQkBFg5uZ3BzQHBvc3Qx
+LmNvbYIBADANBgkqhkiG9w0BAQQFAAOBgQA7/CqT6PoHycTdhEStWNZde7M/2Yc6
+BoJuVwnW8YxGO8Sn6UJ4FeffZNcYZddSDKosw8LtPOeWoK3JINjAk5jiPQ2cww++
+7QGG/g5NDjxFZNDJP1dGiLAxPW6JXwov4v0FmdzfLOZ01jDcgQQZqEpYlgpuI5JE
+WUQ9Ho4EzbYCOQ==
+-----END CERTIFICATE-----
+-----BEGIN RSA PRIVATE KEY-----
+MIIBPAIBAAJBAKy+e3dulvXzV7zoTZWc5TzgApr8DmeQHTYC8ydfzH7EECe4R1Xh
+5kwIzOuuFfn178FBiS84gngaNcrFi0Z5fAkCAwEAAQJBAIqm/bz4NA1H++Vx5Ewx
+OcKp3w19QSaZAwlGRtsUxrP7436QjnREM3Bm8ygU11BjkPVmtrKm6AayQfCHqJoT
+ZIECIQDW0BoMoL0HOYM/mrTLhaykYAVqgIeJsPjvkEhTFXWBuQIhAM3deFAvWNu4
+nklUQ37XsCT2c9tmNt1LAT+slG2JOTTRAiAuXDtC/m3NYVwyHfFm+zKHRzHkClk2
+HjubeEgjpj32AQIhAJqMGTaZVOwevTXvvHwNEH+vRWsAYU/gbx+OQB+7VOcBAiEA
+oolb6NMg/R3enNPvS1O4UU1H8wpaF77L4yiSWlE0p4w=
+-----END RSA PRIVATE KEY-----
+-----BEGIN CERTIFICATE REQUEST-----
+MIIBDTCBuAIBADBTMQswCQYDVQQGEwJTRzERMA8GA1UEChMITTJDcnlwdG8xEjAQ
+BgNVBAMTCWxvY2FsaG9zdDEdMBsGCSqGSIb3DQEJARYObmdwc0Bwb3N0MS5jb20w
+XDANBgkqhkiG9w0BAQEFAANLADBIAkEArL57d26W9fNXvOhNlZzlPOACmvwOZ5Ad
+NgLzJ1/MfsQQJ7hHVeHmTAjM664V+fXvwUGJLziCeBo1ysWLRnl8CQIDAQABoAAw
+DQYJKoZIhvcNAQEEBQADQQA7uqbrNTjVWpF6By5ZNPvhZ4YdFgkeXFVWi5ao/TaP
+Vq4BG021fJ9nlHRtr4rotpgHDX1rr+iWeHKsx4+5DRSy
+-----END CERTIFICATE REQUEST-----
diff --git a/ThirdParty/Twisted/twisted/test/ssl_helpers.py b/ThirdParty/Twisted/twisted/test/ssl_helpers.py
new file mode 100644
index 0000000..04a55d7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/ssl_helpers.py
@@ -0,0 +1,37 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Helper classes for twisted.test.test_ssl.
+
+They are in a separate module so they will not prevent test_ssl importing if
+pyOpenSSL is unavailable.
+"""
+from __future__ import division, absolute_import
+
+from twisted.python.compat import nativeString
+from twisted.internet import ssl
+from twisted.python.filepath import FilePath
+
+from OpenSSL import SSL
+
+certPath = nativeString(FilePath(__file__.encode("utf-8")
+                    ).sibling(b"server.pem").path)
+
+
+class ClientTLSContext(ssl.ClientContextFactory):
+    isClient = 1
+    def getContext(self):
+        return SSL.Context(SSL.TLSv1_METHOD)
+
+class ServerTLSContext:
+    isClient = 0
+
+    def __init__(self, filename=certPath):
+        self.filename = filename
+
+    def getContext(self):
+        ctx = SSL.Context(SSL.TLSv1_METHOD)
+        ctx.use_certificate_file(self.filename)
+        ctx.use_privatekey_file(self.filename)
+        return ctx
diff --git a/ThirdParty/Twisted/twisted/test/stdio_test_consumer.py b/ThirdParty/Twisted/twisted/test/stdio_test_consumer.py
new file mode 100644
index 0000000..8254387
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/stdio_test_consumer.py
@@ -0,0 +1,39 @@
+# -*- test-case-name: twisted.test.test_stdio.StandardInputOutputTestCase.test_consumer -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Main program for the child process run by
+L{twisted.test.test_stdio.StandardInputOutputTestCase.test_consumer} to test
+that process transports implement IConsumer properly.
+"""
+
+import sys, _preamble
+
+from twisted.python import log, reflect
+from twisted.internet import stdio, protocol
+from twisted.protocols import basic
+
+def failed(err):
+    log.startLogging(sys.stderr)
+    log.err(err)
+
+class ConsumerChild(protocol.Protocol):
+    def __init__(self, junkPath):
+        self.junkPath = junkPath
+
+    def connectionMade(self):
+        d = basic.FileSender().beginFileTransfer(file(self.junkPath), self.transport)
+        d.addErrback(failed)
+        d.addCallback(lambda ign: self.transport.loseConnection())
+
+
+    def connectionLost(self, reason):
+        reactor.stop()
+
+
+if __name__ == '__main__':
+    reflect.namedAny(sys.argv[1]).install()
+    from twisted.internet import reactor
+    stdio.StandardIO(ConsumerChild(sys.argv[2]))
+    reactor.run()
diff --git a/ThirdParty/Twisted/twisted/test/stdio_test_halfclose.py b/ThirdParty/Twisted/twisted/test/stdio_test_halfclose.py
new file mode 100644
index 0000000..b80a8f9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/stdio_test_halfclose.py
@@ -0,0 +1,66 @@
+# -*- test-case-name: twisted.test.test_stdio.StandardInputOutputTestCase.test_readConnectionLost -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Main program for the child process run by
+L{twisted.test.test_stdio.StandardInputOutputTestCase.test_readConnectionLost}
+to test that IHalfCloseableProtocol.readConnectionLost works for process
+transports.
+"""
+
+import sys, _preamble
+
+from zope.interface import implements
+
+from twisted.internet.interfaces import IHalfCloseableProtocol
+from twisted.internet import stdio, protocol
+from twisted.python import reflect, log
+
+
+class HalfCloseProtocol(protocol.Protocol):
+    """
+    A protocol to hook up to stdio and observe its transport being
+    half-closed.  If all goes as expected, C{exitCode} will be set to C{0};
+    otherwise it will be set to C{1} to indicate failure.
+    """
+    implements(IHalfCloseableProtocol)
+
+    exitCode = None
+
+    def connectionMade(self):
+        """
+        Signal the parent process that we're ready.
+        """
+        self.transport.write("x")
+
+
+    def readConnectionLost(self):
+        """
+        This is the desired event.  Once it has happened, stop the reactor so
+        the process will exit.
+        """
+        self.exitCode = 0
+        reactor.stop()
+
+
+    def connectionLost(self, reason):
+        """
+        This may only be invoked after C{readConnectionLost}.  If it happens
+        otherwise, mark it as an error and shut down.
+        """
+        if self.exitCode is None:
+            self.exitCode = 1
+            log.err(reason, "Unexpected call to connectionLost")
+        reactor.stop()
+
+
+
+if __name__ == '__main__':
+    reflect.namedAny(sys.argv[1]).install()
+    log.startLogging(file(sys.argv[2], 'w'))
+    from twisted.internet import reactor
+    protocol = HalfCloseProtocol()
+    stdio.StandardIO(protocol)
+    reactor.run()
+    sys.exit(protocol.exitCode)
diff --git a/ThirdParty/Twisted/twisted/test/stdio_test_hostpeer.py b/ThirdParty/Twisted/twisted/test/stdio_test_hostpeer.py
new file mode 100644
index 0000000..1e6f014
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/stdio_test_hostpeer.py
@@ -0,0 +1,32 @@
+# -*- test-case-name: twisted.test.test_stdio.StandardInputOutputTestCase.test_hostAndPeer -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Main program for the child process run by
+L{twisted.test.test_stdio.StandardInputOutputTestCase.test_hostAndPeer} to test
+that ITransport.getHost() and ITransport.getPeer() work for process transports.
+"""
+
+import sys, _preamble
+
+from twisted.internet import stdio, protocol
+from twisted.python import reflect
+
+class HostPeerChild(protocol.Protocol):
+    def connectionMade(self):
+        self.transport.write('\n'.join([
+            str(self.transport.getHost()),
+            str(self.transport.getPeer())]))
+        self.transport.loseConnection()
+
+
+    def connectionLost(self, reason):
+        reactor.stop()
+
+
+if __name__ == '__main__':
+    reflect.namedAny(sys.argv[1]).install()
+    from twisted.internet import reactor
+    stdio.StandardIO(HostPeerChild())
+    reactor.run()
diff --git a/ThirdParty/Twisted/twisted/test/stdio_test_lastwrite.py b/ThirdParty/Twisted/twisted/test/stdio_test_lastwrite.py
new file mode 100644
index 0000000..2b70514
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/stdio_test_lastwrite.py
@@ -0,0 +1,45 @@
+# -*- test-case-name: twisted.test.test_stdio.StandardInputOutputTestCase.test_lastWriteReceived -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Main program for the child process run by
+L{twisted.test.test_stdio.StandardInputOutputTestCase.test_lastWriteReceived}
+to test that L{os.write} can be reliably used after
+L{twisted.internet.stdio.StandardIO} has finished.
+"""
+
+import sys, _preamble
+
+from twisted.internet.protocol import Protocol
+from twisted.internet.stdio import StandardIO
+from twisted.python.reflect import namedAny
+
+
+class LastWriteChild(Protocol):
+    def __init__(self, reactor, magicString):
+        self.reactor = reactor
+        self.magicString = magicString
+
+
+    def connectionMade(self):
+        self.transport.write(self.magicString)
+        self.transport.loseConnection()
+
+
+    def connectionLost(self, reason):
+        self.reactor.stop()
+
+
+
+def main(reactor, magicString):
+    p = LastWriteChild(reactor, magicString)
+    StandardIO(p)
+    reactor.run()
+
+
+
+if __name__ == '__main__':
+    namedAny(sys.argv[1]).install()
+    from twisted.internet import reactor
+    main(reactor, sys.argv[2])
diff --git a/ThirdParty/Twisted/twisted/test/stdio_test_loseconn.py b/ThirdParty/Twisted/twisted/test/stdio_test_loseconn.py
new file mode 100644
index 0000000..7f95a01
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/stdio_test_loseconn.py
@@ -0,0 +1,48 @@
+# -*- test-case-name: twisted.test.test_stdio.StandardInputOutputTestCase.test_loseConnection -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Main program for the child process run by
+L{twisted.test.test_stdio.StandardInputOutputTestCase.test_loseConnection} to
+test that ITransport.loseConnection() works for process transports.
+"""
+
+import sys, _preamble
+
+from twisted.internet.error import ConnectionDone
+from twisted.internet import stdio, protocol
+from twisted.python import reflect, log
+
+class LoseConnChild(protocol.Protocol):
+    exitCode = 0
+
+    def connectionMade(self):
+        self.transport.loseConnection()
+
+
+    def connectionLost(self, reason):
+        """
+        Check that C{reason} is a L{Failure} wrapping a L{ConnectionDone}
+        instance and stop the reactor.  If C{reason} is wrong for some reason,
+        log something about that in C{self.errorLogFile} and make sure the
+        process exits with a non-zero status.
+        """
+        try:
+            try:
+                reason.trap(ConnectionDone)
+            except:
+                log.err(None, "Problem with reason passed to connectionLost")
+                self.exitCode = 1
+        finally:
+            reactor.stop()
+
+
+if __name__ == '__main__':
+    reflect.namedAny(sys.argv[1]).install()
+    log.startLogging(file(sys.argv[2], 'w'))
+    from twisted.internet import reactor
+    protocol = LoseConnChild()
+    stdio.StandardIO(protocol)
+    reactor.run()
+    sys.exit(protocol.exitCode)
diff --git a/ThirdParty/Twisted/twisted/test/stdio_test_producer.py b/ThirdParty/Twisted/twisted/test/stdio_test_producer.py
new file mode 100644
index 0000000..5c0b501
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/stdio_test_producer.py
@@ -0,0 +1,55 @@
+# -*- test-case-name: twisted.test.test_stdio.StandardInputOutputTestCase.test_producer -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Main program for the child process run by
+L{twisted.test.test_stdio.StandardInputOutputTestCase.test_producer} to test
+that process transports implement IProducer properly.
+"""
+
+import sys, _preamble
+
+from twisted.internet import stdio, protocol
+from twisted.python import log, reflect
+
+class ProducerChild(protocol.Protocol):
+    _paused = False
+    buf = ''
+
+    def connectionLost(self, reason):
+        log.msg("*****OVER*****")
+        reactor.callLater(1, reactor.stop)
+        # reactor.stop()
+
+
+    def dataReceived(self, bytes):
+        self.buf += bytes
+        if self._paused:
+            log.startLogging(sys.stderr)
+            log.msg("dataReceived while transport paused!")
+            self.transport.loseConnection()
+        else:
+            self.transport.write(bytes)
+            if self.buf.endswith('\n0\n'):
+                self.transport.loseConnection()
+            else:
+                self.pause()
+
+
+    def pause(self):
+        self._paused = True
+        self.transport.pauseProducing()
+        reactor.callLater(0.01, self.unpause)
+
+
+    def unpause(self):
+        self._paused = False
+        self.transport.resumeProducing()
+
+
+if __name__ == '__main__':
+    reflect.namedAny(sys.argv[1]).install()
+    from twisted.internet import reactor
+    stdio.StandardIO(ProducerChild())
+    reactor.run()
diff --git a/ThirdParty/Twisted/twisted/test/stdio_test_write.py b/ThirdParty/Twisted/twisted/test/stdio_test_write.py
new file mode 100644
index 0000000..9f92c94
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/stdio_test_write.py
@@ -0,0 +1,31 @@
+# -*- test-case-name: twisted.test.test_stdio.StandardInputOutputTestCase.test_write -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Main program for the child process run by
+L{twisted.test.test_stdio.StandardInputOutputTestCase.test_write} to test that
+ITransport.write() works for process transports.
+"""
+
+import sys, _preamble
+
+from twisted.internet import stdio, protocol
+from twisted.python import reflect
+
+class WriteChild(protocol.Protocol):
+    def connectionMade(self):
+        for ch in 'ok!':
+            self.transport.write(ch)
+        self.transport.loseConnection()
+
+
+    def connectionLost(self, reason):
+        reactor.stop()
+
+
+if __name__ == '__main__':
+    reflect.namedAny(sys.argv[1]).install()
+    from twisted.internet import reactor
+    stdio.StandardIO(WriteChild())
+    reactor.run()
diff --git a/ThirdParty/Twisted/twisted/test/stdio_test_writeseq.py b/ThirdParty/Twisted/twisted/test/stdio_test_writeseq.py
new file mode 100644
index 0000000..aeab716
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/stdio_test_writeseq.py
@@ -0,0 +1,30 @@
+# -*- test-case-name: twisted.test.test_stdio.StandardInputOutputTestCase.test_writeSequence -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Main program for the child process run by
+L{twisted.test.test_stdio.StandardInputOutputTestCase.test_writeSequence} to test that
+ITransport.writeSequence() works for process transports.
+"""
+
+import sys, _preamble
+
+from twisted.internet import stdio, protocol
+from twisted.python import reflect
+
+class WriteSequenceChild(protocol.Protocol):
+    def connectionMade(self):
+        self.transport.writeSequence(list('ok!'))
+        self.transport.loseConnection()
+
+
+    def connectionLost(self, reason):
+        reactor.stop()
+
+
+if __name__ == '__main__':
+    reflect.namedAny(sys.argv[1]).install()
+    from twisted.internet import reactor
+    stdio.StandardIO(WriteSequenceChild())
+    reactor.run()
diff --git a/ThirdParty/Twisted/twisted/test/test_abstract.py b/ThirdParty/Twisted/twisted/test/test_abstract.py
new file mode 100644
index 0000000..eb1d05a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_abstract.py
@@ -0,0 +1,85 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for generic file descriptor based reactor support code.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.trial.unittest import TestCase
+
+from twisted.internet.abstract import isIPAddress
+
+
+class AddressTests(TestCase):
+    """
+    Tests for address-related functionality.
+    """
+    def test_decimalDotted(self):
+        """
+        L{isIPAddress} should return C{True} for any decimal dotted
+        representation of an IPv4 address.
+        """
+        self.assertTrue(isIPAddress('0.1.2.3'))
+        self.assertTrue(isIPAddress('252.253.254.255'))
+
+
+    def test_shortDecimalDotted(self):
+        """
+        L{isIPAddress} should return C{False} for a dotted decimal
+        representation with fewer or more than four octets.
+        """
+        self.assertFalse(isIPAddress('0'))
+        self.assertFalse(isIPAddress('0.1'))
+        self.assertFalse(isIPAddress('0.1.2'))
+        self.assertFalse(isIPAddress('0.1.2.3.4'))
+
+
+    def test_invalidLetters(self):
+        """
+        L{isIPAddress} should return C{False} for any non-decimal dotted
+        representation including letters.
+        """
+        self.assertFalse(isIPAddress('a.2.3.4'))
+        self.assertFalse(isIPAddress('1.b.3.4'))
+
+
+    def test_invalidPunctuation(self):
+        """
+        L{isIPAddress} should return C{False} for a string containing
+        strange punctuation.
+        """
+        self.assertFalse(isIPAddress(','))
+        self.assertFalse(isIPAddress('1,2'))
+        self.assertFalse(isIPAddress('1,2,3'))
+        self.assertFalse(isIPAddress('1.,.3,4'))
+
+
+    def test_emptyString(self):
+        """
+        L{isIPAddress} should return C{False} for the empty string.
+        """
+        self.assertFalse(isIPAddress(''))
+
+
+    def test_invalidNegative(self):
+        """
+        L{isIPAddress} should return C{False} for negative decimal values.
+        """
+        self.assertFalse(isIPAddress('-1'))
+        self.assertFalse(isIPAddress('1.-2'))
+        self.assertFalse(isIPAddress('1.2.-3'))
+        self.assertFalse(isIPAddress('1.2.-3.4'))
+
+
+    def test_invalidPositive(self):
+        """
+        L{isIPAddress} should return C{False} for a string containing
+        positive decimal values greater than 255.
+        """
+        self.assertFalse(isIPAddress('256.0.0.0'))
+        self.assertFalse(isIPAddress('0.256.0.0'))
+        self.assertFalse(isIPAddress('0.0.256.0'))
+        self.assertFalse(isIPAddress('0.0.0.256'))
+        self.assertFalse(isIPAddress('256.256.256.256'))
diff --git a/ThirdParty/Twisted/twisted/test/test_adbapi.py b/ThirdParty/Twisted/twisted/test/test_adbapi.py
new file mode 100644
index 0000000..92ff601
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_adbapi.py
@@ -0,0 +1,819 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Tests for twisted.enterprise.adbapi.
+"""
+
+from twisted.trial import unittest
+
+import os, stat
+import types
+
+from twisted.enterprise.adbapi import ConnectionPool, ConnectionLost
+from twisted.enterprise.adbapi import Connection, Transaction
+from twisted.internet import reactor, defer, interfaces
+from twisted.python.failure import Failure
+
+
+simple_table_schema = """
+CREATE TABLE simple (
+  x integer
+)
+"""
+
+
+class ADBAPITestBase:
+    """Test the asynchronous DB-API code."""
+
+    openfun_called = {}
+
+    if interfaces.IReactorThreads(reactor, None) is None:
+        skip = "ADB-API requires threads, no way to test without them"
+
+    def extraSetUp(self):
+        """
+        Set up the database and create a connection pool pointing at it.
+        """
+        self.startDB()
+        self.dbpool = self.makePool(cp_openfun=self.openfun)
+        self.dbpool.start()
+
+
+    def tearDown(self):
+        d =  self.dbpool.runOperation('DROP TABLE simple')
+        d.addCallback(lambda res: self.dbpool.close())
+        d.addCallback(lambda res: self.stopDB())
+        return d
+
+    def openfun(self, conn):
+        self.openfun_called[conn] = True
+
+    def checkOpenfunCalled(self, conn=None):
+        if not conn:
+            self.failUnless(self.openfun_called)
+        else:
+            self.failUnless(self.openfun_called.has_key(conn))
+
+    def testPool(self):
+        d = self.dbpool.runOperation(simple_table_schema)
+        if self.test_failures:
+            d.addCallback(self._testPool_1_1)
+            d.addCallback(self._testPool_1_2)
+            d.addCallback(self._testPool_1_3)
+            d.addCallback(self._testPool_1_4)
+            d.addCallback(lambda res: self.flushLoggedErrors())
+        d.addCallback(self._testPool_2)
+        d.addCallback(self._testPool_3)
+        d.addCallback(self._testPool_4)
+        d.addCallback(self._testPool_5)
+        d.addCallback(self._testPool_6)
+        d.addCallback(self._testPool_7)
+        d.addCallback(self._testPool_8)
+        d.addCallback(self._testPool_9)
+        return d
+
+    def _testPool_1_1(self, res):
+        d = defer.maybeDeferred(self.dbpool.runQuery, "select * from NOTABLE")
+        d.addCallbacks(lambda res: self.fail('no exception'),
+                       lambda f: None)
+        return d
+
+    def _testPool_1_2(self, res):
+        d = defer.maybeDeferred(self.dbpool.runOperation,
+                                "deletexxx from NOTABLE")
+        d.addCallbacks(lambda res: self.fail('no exception'),
+                       lambda f: None)
+        return d
+
+    def _testPool_1_3(self, res):
+        d = defer.maybeDeferred(self.dbpool.runInteraction,
+                                self.bad_interaction)
+        d.addCallbacks(lambda res: self.fail('no exception'),
+                       lambda f: None)
+        return d
+
+    def _testPool_1_4(self, res):
+        d = defer.maybeDeferred(self.dbpool.runWithConnection,
+                                self.bad_withConnection)
+        d.addCallbacks(lambda res: self.fail('no exception'),
+                       lambda f: None)
+        return d
+
+    def _testPool_2(self, res):
+        # verify simple table is empty
+        sql = "select count(1) from simple"
+        d = self.dbpool.runQuery(sql)
+        def _check(row):
+            self.failUnless(int(row[0][0]) == 0, "Interaction not rolled back")
+            self.checkOpenfunCalled()
+        d.addCallback(_check)
+        return d
+
+    def _testPool_3(self, res):
+        sql = "select count(1) from simple"
+        inserts = []
+        # add some rows to simple table (runOperation)
+        for i in range(self.num_iterations):
+            sql = "insert into simple(x) values(%d)" % i
+            inserts.append(self.dbpool.runOperation(sql))
+        d = defer.gatherResults(inserts)
+
+        def _select(res):
+            # make sure they were added (runQuery)
+            sql = "select x from simple order by x";
+            d = self.dbpool.runQuery(sql)
+            return d
+        d.addCallback(_select)
+
+        def _check(rows):
+            self.failUnless(len(rows) == self.num_iterations,
+                            "Wrong number of rows")
+            for i in range(self.num_iterations):
+                self.failUnless(len(rows[i]) == 1, "Wrong size row")
+                self.failUnless(rows[i][0] == i, "Values not returned.")
+        d.addCallback(_check)
+
+        return d
+
+    def _testPool_4(self, res):
+        # runInteraction
+        d = self.dbpool.runInteraction(self.interaction)
+        d.addCallback(lambda res: self.assertEqual(res, "done"))
+        return d
+
+    def _testPool_5(self, res):
+        # withConnection
+        d = self.dbpool.runWithConnection(self.withConnection)
+        d.addCallback(lambda res: self.assertEqual(res, "done"))
+        return d
+
+    def _testPool_6(self, res):
+        # Test a withConnection cannot be closed
+        d = self.dbpool.runWithConnection(self.close_withConnection)
+        return d
+
+    def _testPool_7(self, res):
+        # give the pool a workout
+        ds = []
+        for i in range(self.num_iterations):
+            sql = "select x from simple where x = %d" % i
+            ds.append(self.dbpool.runQuery(sql))
+        dlist = defer.DeferredList(ds, fireOnOneErrback=True)
+        def _check(result):
+            for i in range(self.num_iterations):
+                self.failUnless(result[i][1][0][0] == i, "Value not returned")
+        dlist.addCallback(_check)
+        return dlist
+
+    def _testPool_8(self, res):
+        # now delete everything
+        ds = []
+        for i in range(self.num_iterations):
+            sql = "delete from simple where x = %d" % i
+            ds.append(self.dbpool.runOperation(sql))
+        dlist = defer.DeferredList(ds, fireOnOneErrback=True)
+        return dlist
+
+    def _testPool_9(self, res):
+        # verify simple table is empty
+        sql = "select count(1) from simple"
+        d = self.dbpool.runQuery(sql)
+        def _check(row):
+            self.failUnless(int(row[0][0]) == 0,
+                            "Didn't successfully delete table contents")
+            self.checkConnect()
+        d.addCallback(_check)
+        return d
+
+    def checkConnect(self):
+        """Check the connect/disconnect synchronous calls."""
+        conn = self.dbpool.connect()
+        self.checkOpenfunCalled(conn)
+        curs = conn.cursor()
+        curs.execute("insert into simple(x) values(1)")
+        curs.execute("select x from simple")
+        res = curs.fetchall()
+        self.assertEqual(len(res), 1)
+        self.assertEqual(len(res[0]), 1)
+        self.assertEqual(res[0][0], 1)
+        curs.execute("delete from simple")
+        curs.execute("select x from simple")
+        self.assertEqual(len(curs.fetchall()), 0)
+        curs.close()
+        self.dbpool.disconnect(conn)
+
+    def interaction(self, transaction):
+        transaction.execute("select x from simple order by x")
+        for i in range(self.num_iterations):
+            row = transaction.fetchone()
+            self.failUnless(len(row) == 1, "Wrong size row")
+            self.failUnless(row[0] == i, "Value not returned.")
+        # should test this, but gadfly throws an exception instead
+        #self.failUnless(transaction.fetchone() is None, "Too many rows")
+        return "done"
+
+    def bad_interaction(self, transaction):
+        if self.can_rollback:
+            transaction.execute("insert into simple(x) values(0)")
+
+        transaction.execute("select * from NOTABLE")
+
+    def withConnection(self, conn):
+        curs = conn.cursor()
+        try:
+            curs.execute("select x from simple order by x")
+            for i in range(self.num_iterations):
+                row = curs.fetchone()
+                self.failUnless(len(row) == 1, "Wrong size row")
+                self.failUnless(row[0] == i, "Value not returned.")
+            # should test this, but gadfly throws an exception instead
+            #self.failUnless(transaction.fetchone() is None, "Too many rows")
+        finally:
+            curs.close()
+        return "done"
+
+    def close_withConnection(self, conn):
+        conn.close()
+
+    def bad_withConnection(self, conn):
+        curs = conn.cursor()
+        try:
+            curs.execute("select * from NOTABLE")
+        finally:
+            curs.close()
+
+
+class ReconnectTestBase:
+    """Test the asynchronous DB-API code with reconnect."""
+
+    if interfaces.IReactorThreads(reactor, None) is None:
+        skip = "ADB-API requires threads, no way to test without them"
+
+    def extraSetUp(self):
+        """
+        Skip the test if C{good_sql} is unavailable.  Otherwise, set up the
+        database, create a connection pool pointed at it, and set up a simple
+        schema in it.
+        """
+        if self.good_sql is None:
+            raise unittest.SkipTest('no good sql for reconnect test')
+        self.startDB()
+        self.dbpool = self.makePool(cp_max=1, cp_reconnect=True,
+                                    cp_good_sql=self.good_sql)
+        self.dbpool.start()
+        return self.dbpool.runOperation(simple_table_schema)
+
+
+    def tearDown(self):
+        d = self.dbpool.runOperation('DROP TABLE simple')
+        d.addCallback(lambda res: self.dbpool.close())
+        d.addCallback(lambda res: self.stopDB())
+        return d
+
+    def testPool(self):
+        d = defer.succeed(None)
+        d.addCallback(self._testPool_1)
+        d.addCallback(self._testPool_2)
+        if not self.early_reconnect:
+            d.addCallback(self._testPool_3)
+        d.addCallback(self._testPool_4)
+        d.addCallback(self._testPool_5)
+        return d
+
+    def _testPool_1(self, res):
+        sql = "select count(1) from simple"
+        d = self.dbpool.runQuery(sql)
+        def _check(row):
+            self.failUnless(int(row[0][0]) == 0, "Table not empty")
+        d.addCallback(_check)
+        return d
+
+    def _testPool_2(self, res):
+        # reach in and close the connection manually
+        self.dbpool.connections.values()[0].close()
+
+    def _testPool_3(self, res):
+        sql = "select count(1) from simple"
+        d = defer.maybeDeferred(self.dbpool.runQuery, sql)
+        d.addCallbacks(lambda res: self.fail('no exception'),
+                       lambda f: None)
+        return d
+
+    def _testPool_4(self, res):
+        sql = "select count(1) from simple"
+        d = self.dbpool.runQuery(sql)
+        def _check(row):
+            self.failUnless(int(row[0][0]) == 0, "Table not empty")
+        d.addCallback(_check)
+        return d
+
+    def _testPool_5(self, res):
+        self.flushLoggedErrors()
+        sql = "select * from NOTABLE" # bad sql
+        d = defer.maybeDeferred(self.dbpool.runQuery, sql)
+        d.addCallbacks(lambda res: self.fail('no exception'),
+                       lambda f: self.failIf(f.check(ConnectionLost)))
+        return d
+
+
+class DBTestConnector:
+    """A class which knows how to test for the presence of
+    and establish a connection to a relational database.
+
+    To enable test cases  which use a central, system database,
+    you must create a database named DB_NAME with a user DB_USER
+    and password DB_PASS with full access rights to database DB_NAME.
+    """
+
+    TEST_PREFIX = None # used for creating new test cases
+
+    DB_NAME = "twisted_test"
+    DB_USER = 'twisted_test'
+    DB_PASS = 'twisted_test'
+
+    DB_DIR = None # directory for database storage
+
+    nulls_ok = True # nulls supported
+    trailing_spaces_ok = True # trailing spaces in strings preserved
+    can_rollback = True # rollback supported
+    test_failures = True # test bad sql?
+    escape_slashes = True # escape \ in sql?
+    good_sql = ConnectionPool.good_sql
+    early_reconnect = True # cursor() will fail on closed connection
+    can_clear = True # can try to clear out tables when starting
+
+    num_iterations = 50 # number of iterations for test loops
+                        # (lower this for slow db's)
+
+    def setUp(self):
+        self.DB_DIR = self.mktemp()
+        os.mkdir(self.DB_DIR)
+        if not self.can_connect():
+            raise unittest.SkipTest('%s: Cannot access db' % self.TEST_PREFIX)
+        return self.extraSetUp()
+
+    def can_connect(self):
+        """Return true if this database is present on the system
+        and can be used in a test."""
+        raise NotImplementedError()
+
+    def startDB(self):
+        """Take any steps needed to bring database up."""
+        pass
+
+    def stopDB(self):
+        """Bring database down, if needed."""
+        pass
+
+    def makePool(self, **newkw):
+        """Create a connection pool with additional keyword arguments."""
+        args, kw = self.getPoolArgs()
+        kw = kw.copy()
+        kw.update(newkw)
+        return ConnectionPool(*args, **kw)
+
+    def getPoolArgs(self):
+        """Return a tuple (args, kw) of list and keyword arguments
+        that need to be passed to ConnectionPool to create a connection
+        to this database."""
+        raise NotImplementedError()
+
+class GadflyConnector(DBTestConnector):
+    TEST_PREFIX = 'Gadfly'
+
+    nulls_ok = False
+    can_rollback = False
+    escape_slashes = False
+    good_sql = 'select * from simple where 1=0'
+
+    num_iterations = 1 # slow
+
+    def can_connect(self):
+        try: import gadfly
+        except: return False
+        if not getattr(gadfly, 'connect', None):
+            gadfly.connect = gadfly.gadfly
+        return True
+
+    def startDB(self):
+        import gadfly
+        conn = gadfly.gadfly()
+        conn.startup(self.DB_NAME, self.DB_DIR)
+
+        # gadfly seems to want us to create something to get the db going
+        cursor = conn.cursor()
+        cursor.execute("create table x (x integer)")
+        conn.commit()
+        conn.close()
+
+    def getPoolArgs(self):
+        args = ('gadfly', self.DB_NAME, self.DB_DIR)
+        kw = {'cp_max': 1}
+        return args, kw
+
+class SQLiteConnector(DBTestConnector):
+    TEST_PREFIX = 'SQLite'
+
+    escape_slashes = False
+
+    num_iterations = 1 # slow
+
+    def can_connect(self):
+        try: import sqlite
+        except: return False
+        return True
+
+    def startDB(self):
+        self.database = os.path.join(self.DB_DIR, self.DB_NAME)
+        if os.path.exists(self.database):
+            os.unlink(self.database)
+
+    def getPoolArgs(self):
+        args = ('sqlite',)
+        kw = {'database': self.database, 'cp_max': 1}
+        return args, kw
+
+class PyPgSQLConnector(DBTestConnector):
+    TEST_PREFIX = "PyPgSQL"
+
+    def can_connect(self):
+        try: from pyPgSQL import PgSQL
+        except: return False
+        try:
+            conn = PgSQL.connect(database=self.DB_NAME, user=self.DB_USER,
+                                 password=self.DB_PASS)
+            conn.close()
+            return True
+        except:
+            return False
+
+    def getPoolArgs(self):
+        args = ('pyPgSQL.PgSQL',)
+        kw = {'database': self.DB_NAME, 'user': self.DB_USER,
+              'password': self.DB_PASS, 'cp_min': 0}
+        return args, kw
+
+class PsycopgConnector(DBTestConnector):
+    TEST_PREFIX = 'Psycopg'
+
+    def can_connect(self):
+        try: import psycopg
+        except: return False
+        try:
+            conn = psycopg.connect(database=self.DB_NAME, user=self.DB_USER,
+                                   password=self.DB_PASS)
+            conn.close()
+            return True
+        except:
+            return False
+
+    def getPoolArgs(self):
+        args = ('psycopg',)
+        kw = {'database': self.DB_NAME, 'user': self.DB_USER,
+              'password': self.DB_PASS, 'cp_min': 0}
+        return args, kw
+
+class MySQLConnector(DBTestConnector):
+    TEST_PREFIX = 'MySQL'
+
+    trailing_spaces_ok = False
+    can_rollback = False
+    early_reconnect = False
+
+    def can_connect(self):
+        try: import MySQLdb
+        except: return False
+        try:
+            conn = MySQLdb.connect(db=self.DB_NAME, user=self.DB_USER,
+                                   passwd=self.DB_PASS)
+            conn.close()
+            return True
+        except:
+            return False
+
+    def getPoolArgs(self):
+        args = ('MySQLdb',)
+        kw = {'db': self.DB_NAME, 'user': self.DB_USER, 'passwd': self.DB_PASS}
+        return args, kw
+
+class FirebirdConnector(DBTestConnector):
+    TEST_PREFIX = 'Firebird'
+
+    test_failures = False # failure testing causes problems
+    escape_slashes = False
+    good_sql = None # firebird doesn't handle failed sql well
+    can_clear = False # firebird is not so good
+
+    num_iterations = 5 # slow
+
+    def can_connect(self):
+        try: import kinterbasdb
+        except: return False
+        try:
+            self.startDB()
+            self.stopDB()
+            return True
+        except:
+            return False
+
+
+    def startDB(self):
+        import kinterbasdb
+        self.DB_NAME = os.path.join(self.DB_DIR, DBTestConnector.DB_NAME)
+        os.chmod(self.DB_DIR, stat.S_IRWXU + stat.S_IRWXG + stat.S_IRWXO)
+        sql = 'create database "%s" user "%s" password "%s"'
+        sql %= (self.DB_NAME, self.DB_USER, self.DB_PASS);
+        conn = kinterbasdb.create_database(sql)
+        conn.close()
+
+
+    def getPoolArgs(self):
+        args = ('kinterbasdb',)
+        kw = {'database': self.DB_NAME, 'host': '127.0.0.1',
+              'user': self.DB_USER, 'password': self.DB_PASS}
+        return args, kw
+
+    def stopDB(self):
+        import kinterbasdb
+        conn = kinterbasdb.connect(database=self.DB_NAME,
+                                   host='127.0.0.1', user=self.DB_USER,
+                                   password=self.DB_PASS)
+        conn.drop_database()
+
+def makeSQLTests(base, suffix, globals):
+    """
+    Make a test case for every db connector which can connect.
+
+    @param base: Base class for test case. Additional base classes
+                 will be a DBConnector subclass and unittest.TestCase
+    @param suffix: A suffix used to create test case names. Prefixes
+                   are defined in the DBConnector subclasses.
+    """
+    connectors = [GadflyConnector, SQLiteConnector, PyPgSQLConnector,
+                  PsycopgConnector, MySQLConnector, FirebirdConnector]
+    for connclass in connectors:
+        name = connclass.TEST_PREFIX + suffix
+        klass = types.ClassType(name, (connclass, base, unittest.TestCase),
+                                base.__dict__)
+        globals[name] = klass
+
+# GadflyADBAPITestCase SQLiteADBAPITestCase PyPgSQLADBAPITestCase
+# PsycopgADBAPITestCase MySQLADBAPITestCase FirebirdADBAPITestCase
+makeSQLTests(ADBAPITestBase, 'ADBAPITestCase', globals())
+
+# GadflyReconnectTestCase SQLiteReconnectTestCase PyPgSQLReconnectTestCase
+# PsycopgReconnectTestCase MySQLReconnectTestCase FirebirdReconnectTestCase
+makeSQLTests(ReconnectTestBase, 'ReconnectTestCase', globals())
+
+
+
+class FakePool(object):
+    """
+    A fake L{ConnectionPool} for tests.
+
+    @ivar connectionFactory: factory for making connections returned by the
+        C{connect} method.
+    @type connectionFactory: any callable
+    """
+    reconnect = True
+    noisy = True
+
+    def __init__(self, connectionFactory):
+        self.connectionFactory = connectionFactory
+
+
+    def connect(self):
+        """
+        Return an instance of C{self.connectionFactory}.
+        """
+        return self.connectionFactory()
+
+
+    def disconnect(self, connection):
+        """
+        Do nothing.
+        """
+
+
+
+class ConnectionTestCase(unittest.TestCase):
+    """
+    Tests for the L{Connection} class.
+    """
+
+    def test_rollbackErrorLogged(self):
+        """
+        If an error happens during rollback, L{ConnectionLost} is raised but
+        the original error is logged.
+        """
+        class ConnectionRollbackRaise(object):
+            def rollback(self):
+                raise RuntimeError("problem!")
+
+        pool = FakePool(ConnectionRollbackRaise)
+        connection = Connection(pool)
+        self.assertRaises(ConnectionLost, connection.rollback)
+        errors = self.flushLoggedErrors(RuntimeError)
+        self.assertEqual(len(errors), 1)
+        self.assertEqual(errors[0].value.args[0], "problem!")
+
+
+
+class TransactionTestCase(unittest.TestCase):
+    """
+    Tests for the L{Transaction} class.
+    """
+
+    def test_reopenLogErrorIfReconnect(self):
+        """
+        If the cursor creation raises an error in L{Transaction.reopen}, it
+        reconnects but log the error occurred.
+        """
+        class ConnectionCursorRaise(object):
+            count = 0
+
+            def reconnect(self):
+                pass
+
+            def cursor(self):
+                if self.count == 0:
+                    self.count += 1
+                    raise RuntimeError("problem!")
+
+        pool = FakePool(None)
+        transaction = Transaction(pool, ConnectionCursorRaise())
+        transaction.reopen()
+        errors = self.flushLoggedErrors(RuntimeError)
+        self.assertEqual(len(errors), 1)
+        self.assertEqual(errors[0].value.args[0], "problem!")
+
+
+
+class NonThreadPool(object):
+    def callInThreadWithCallback(self, onResult, f, *a, **kw):
+        success = True
+        try:
+            result = f(*a, **kw)
+        except Exception, e:
+            success = False
+            result = Failure()
+        onResult(success, result)
+
+
+
+class DummyConnectionPool(ConnectionPool):
+    """
+    A testable L{ConnectionPool};
+    """
+    threadpool = NonThreadPool()
+
+    def __init__(self):
+        """
+        Don't forward init call.
+        """
+        self.reactor = reactor
+
+
+
+class EventReactor(object):
+    """
+    Partial L{IReactorCore} implementation with simple event-related
+    methods.
+
+    @ivar _running: A C{bool} indicating whether the reactor is pretending
+        to have been started already or not.
+
+    @ivar triggers: A C{list} of pending system event triggers.
+    """
+    def __init__(self, running):
+        self._running = running
+        self.triggers = []
+
+
+    def callWhenRunning(self, function):
+        if self._running:
+            function()
+        else:
+            return self.addSystemEventTrigger('after', 'startup', function)
+
+
+    def addSystemEventTrigger(self, phase, event, trigger):
+        handle = (phase, event, trigger)
+        self.triggers.append(handle)
+        return handle
+
+
+    def removeSystemEventTrigger(self, handle):
+        self.triggers.remove(handle)
+
+
+
+class ConnectionPoolTestCase(unittest.TestCase):
+    """
+    Unit tests for L{ConnectionPool}.
+    """
+
+    def test_runWithConnectionRaiseOriginalError(self):
+        """
+        If rollback fails, L{ConnectionPool.runWithConnection} raises the
+        original exception and log the error of the rollback.
+        """
+        class ConnectionRollbackRaise(object):
+            def __init__(self, pool):
+                pass
+
+            def rollback(self):
+                raise RuntimeError("problem!")
+
+        def raisingFunction(connection):
+            raise ValueError("foo")
+
+        pool = DummyConnectionPool()
+        pool.connectionFactory = ConnectionRollbackRaise
+        d = pool.runWithConnection(raisingFunction)
+        d = self.assertFailure(d, ValueError)
+        def cbFailed(ignored):
+            errors = self.flushLoggedErrors(RuntimeError)
+            self.assertEqual(len(errors), 1)
+            self.assertEqual(errors[0].value.args[0], "problem!")
+        d.addCallback(cbFailed)
+        return d
+
+
+    def test_closeLogError(self):
+        """
+        L{ConnectionPool._close} logs exceptions.
+        """
+        class ConnectionCloseRaise(object):
+            def close(self):
+                raise RuntimeError("problem!")
+
+        pool = DummyConnectionPool()
+        pool._close(ConnectionCloseRaise())
+
+        errors = self.flushLoggedErrors(RuntimeError)
+        self.assertEqual(len(errors), 1)
+        self.assertEqual(errors[0].value.args[0], "problem!")
+
+
+    def test_runWithInteractionRaiseOriginalError(self):
+        """
+        If rollback fails, L{ConnectionPool.runInteraction} raises the
+        original exception and log the error of the rollback.
+        """
+        class ConnectionRollbackRaise(object):
+            def __init__(self, pool):
+                pass
+
+            def rollback(self):
+                raise RuntimeError("problem!")
+
+        class DummyTransaction(object):
+            def __init__(self, pool, connection):
+                pass
+
+        def raisingFunction(transaction):
+            raise ValueError("foo")
+
+        pool = DummyConnectionPool()
+        pool.connectionFactory = ConnectionRollbackRaise
+        pool.transactionFactory = DummyTransaction
+
+        d = pool.runInteraction(raisingFunction)
+        d = self.assertFailure(d, ValueError)
+        def cbFailed(ignored):
+            errors = self.flushLoggedErrors(RuntimeError)
+            self.assertEqual(len(errors), 1)
+            self.assertEqual(errors[0].value.args[0], "problem!")
+        d.addCallback(cbFailed)
+        return d
+
+
+    def test_unstartedClose(self):
+        """
+        If L{ConnectionPool.close} is called without L{ConnectionPool.start}
+        having been called, the pool's startup event is cancelled.
+        """
+        reactor = EventReactor(False)
+        pool = ConnectionPool('twisted.test.test_adbapi', cp_reactor=reactor)
+        # There should be a startup trigger waiting.
+        self.assertEqual(reactor.triggers, [('after', 'startup', pool._start)])
+        pool.close()
+        # But not anymore.
+        self.assertFalse(reactor.triggers)
+
+
+    def test_startedClose(self):
+        """
+        If L{ConnectionPool.close} is called after it has been started, but
+        not by its shutdown trigger, the shutdown trigger is cancelled.
+        """
+        reactor = EventReactor(True)
+        pool = ConnectionPool('twisted.test.test_adbapi', cp_reactor=reactor)
+        # There should be a shutdown trigger waiting.
+        self.assertEqual(reactor.triggers, [('during', 'shutdown', pool.finalClose)])
+        pool.close()
+        # But not anymore.
+        self.assertFalse(reactor.triggers)
diff --git a/ThirdParty/Twisted/twisted/test/test_amp.py b/ThirdParty/Twisted/twisted/test/test_amp.py
new file mode 100644
index 0000000..325af40
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_amp.py
@@ -0,0 +1,3178 @@
+# Copyright (c) 2005 Divmod, Inc.
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.protocols.amp}.
+"""
+
+import datetime
+import decimal
+
+from zope.interface import implements
+from zope.interface.verify import verifyClass, verifyObject
+
+from twisted.python.util import setIDFunction
+from twisted.python import filepath
+from twisted.python.failure import Failure
+from twisted.protocols import amp
+from twisted.trial import unittest
+from twisted.internet import protocol, defer, error, reactor, interfaces
+from twisted.test import iosim
+from twisted.test.proto_helpers import StringTransport
+
+ssl = None
+try:
+    from twisted.internet import ssl
+except ImportError:
+    pass
+
+if ssl and not ssl.supported:
+    ssl = None
+
+if ssl is None:
+    skipSSL = "SSL not available"
+else:
+    skipSSL = None
+
+
+class TestProto(protocol.Protocol):
+    """
+    A trivial protocol for use in testing where a L{Protocol} is expected.
+
+    @ivar instanceId: the id of this instance
+    @ivar onConnLost: deferred that will fired when the connection is lost
+    @ivar dataToSend: data to send on the protocol
+    """
+
+    instanceCount = 0
+
+    def __init__(self, onConnLost, dataToSend):
+        self.onConnLost = onConnLost
+        self.dataToSend = dataToSend
+        self.instanceId = TestProto.instanceCount
+        TestProto.instanceCount = TestProto.instanceCount + 1
+
+
+    def connectionMade(self):
+        self.data = []
+        self.transport.write(self.dataToSend)
+
+
+    def dataReceived(self, bytes):
+        self.data.append(bytes)
+
+
+    def connectionLost(self, reason):
+        self.onConnLost.callback(self.data)
+
+
+    def __repr__(self):
+        """
+        Custom repr for testing to avoid coupling amp tests with repr from
+        L{Protocol}
+
+        Returns a string which contains a unique identifier that can be looked
+        up using the instanceId property::
+
+            <TestProto #3>
+        """
+        return "<TestProto #%d>" % (self.instanceId,)
+
+
+
+class SimpleSymmetricProtocol(amp.AMP):
+
+    def sendHello(self, text):
+        return self.callRemoteString(
+            "hello",
+            hello=text)
+
+    def amp_HELLO(self, box):
+        return amp.Box(hello=box['hello'])
+
+    def amp_HOWDOYOUDO(self, box):
+        return amp.QuitBox(howdoyoudo='world')
+
+
+
+class UnfriendlyGreeting(Exception):
+    """Greeting was insufficiently kind.
+    """
+
+class DeathThreat(Exception):
+    """Greeting was insufficiently kind.
+    """
+
+class UnknownProtocol(Exception):
+    """Asked to switch to the wrong protocol.
+    """
+
+
+class TransportPeer(amp.Argument):
+    # this serves as some informal documentation for how to get variables from
+    # the protocol or your environment and pass them to methods as arguments.
+    def retrieve(self, d, name, proto):
+        return ''
+
+    def fromStringProto(self, notAString, proto):
+        return proto.transport.getPeer()
+
+    def toBox(self, name, strings, objects, proto):
+        return
+
+
+
+class Hello(amp.Command):
+
+    commandName = 'hello'
+
+    arguments = [('hello', amp.String()),
+                 ('optional', amp.Boolean(optional=True)),
+                 ('print', amp.Unicode(optional=True)),
+                 ('from', TransportPeer(optional=True)),
+                 ('mixedCase', amp.String(optional=True)),
+                 ('dash-arg', amp.String(optional=True)),
+                 ('underscore_arg', amp.String(optional=True))]
+
+    response = [('hello', amp.String()),
+                ('print', amp.Unicode(optional=True))]
+
+    errors = {UnfriendlyGreeting: 'UNFRIENDLY'}
+
+    fatalErrors = {DeathThreat: 'DEAD'}
+
+class NoAnswerHello(Hello):
+    commandName = Hello.commandName
+    requiresAnswer = False
+
+class FutureHello(amp.Command):
+    commandName = 'hello'
+
+    arguments = [('hello', amp.String()),
+                 ('optional', amp.Boolean(optional=True)),
+                 ('print', amp.Unicode(optional=True)),
+                 ('from', TransportPeer(optional=True)),
+                 ('bonus', amp.String(optional=True)), # addt'l arguments
+                                                       # should generally be
+                                                       # added at the end, and
+                                                       # be optional...
+                 ]
+
+    response = [('hello', amp.String()),
+                ('print', amp.Unicode(optional=True))]
+
+    errors = {UnfriendlyGreeting: 'UNFRIENDLY'}
+
+class WTF(amp.Command):
+    """
+    An example of an invalid command.
+    """
+
+
+class BrokenReturn(amp.Command):
+    """ An example of a perfectly good command, but the handler is going to return
+    None...
+    """
+
+    commandName = 'broken_return'
+
+class Goodbye(amp.Command):
+    # commandName left blank on purpose: this tests implicit command names.
+    response = [('goodbye', amp.String())]
+    responseType = amp.QuitBox
+
+class Howdoyoudo(amp.Command):
+    commandName = 'howdoyoudo'
+    # responseType = amp.QuitBox
+
+class WaitForever(amp.Command):
+    commandName = 'wait_forever'
+
+class GetList(amp.Command):
+    commandName = 'getlist'
+    arguments = [('length', amp.Integer())]
+    response = [('body', amp.AmpList([('x', amp.Integer())]))]
+
+class DontRejectMe(amp.Command):
+    commandName = 'dontrejectme'
+    arguments = [
+            ('magicWord', amp.Unicode()),
+            ('list', amp.AmpList([('name', amp.Unicode())], optional=True)),
+            ]
+    response = [('response', amp.Unicode())]
+
+class SecuredPing(amp.Command):
+    # XXX TODO: actually make this refuse to send over an insecure connection
+    response = [('pinged', amp.Boolean())]
+
+class TestSwitchProto(amp.ProtocolSwitchCommand):
+    commandName = 'Switch-Proto'
+
+    arguments = [
+        ('name', amp.String()),
+        ]
+    errors = {UnknownProtocol: 'UNKNOWN'}
+
+class SingleUseFactory(protocol.ClientFactory):
+    def __init__(self, proto):
+        self.proto = proto
+        self.proto.factory = self
+
+    def buildProtocol(self, addr):
+        p, self.proto = self.proto, None
+        return p
+
+    reasonFailed = None
+
+    def clientConnectionFailed(self, connector, reason):
+        self.reasonFailed = reason
+        return
+
+THING_I_DONT_UNDERSTAND = 'gwebol nargo'
+class ThingIDontUnderstandError(Exception):
+    pass
+
+class FactoryNotifier(amp.AMP):
+    factory = None
+    def connectionMade(self):
+        if self.factory is not None:
+            self.factory.theProto = self
+            if hasattr(self.factory, 'onMade'):
+                self.factory.onMade.callback(None)
+
+    def emitpong(self):
+        from twisted.internet.interfaces import ISSLTransport
+        if not ISSLTransport.providedBy(self.transport):
+            raise DeathThreat("only send secure pings over secure channels")
+        return {'pinged': True}
+    SecuredPing.responder(emitpong)
+
+
+class SimpleSymmetricCommandProtocol(FactoryNotifier):
+    maybeLater = None
+    def __init__(self, onConnLost=None):
+        amp.AMP.__init__(self)
+        self.onConnLost = onConnLost
+
+    def sendHello(self, text):
+        return self.callRemote(Hello, hello=text)
+
+    def sendUnicodeHello(self, text, translation):
+        return self.callRemote(Hello, hello=text, Print=translation)
+
+    greeted = False
+
+    def cmdHello(self, hello, From, optional=None, Print=None,
+                 mixedCase=None, dash_arg=None, underscore_arg=None):
+        assert From == self.transport.getPeer()
+        if hello == THING_I_DONT_UNDERSTAND:
+            raise ThingIDontUnderstandError()
+        if hello.startswith('fuck'):
+            raise UnfriendlyGreeting("Don't be a dick.")
+        if hello == 'die':
+            raise DeathThreat("aieeeeeeeee")
+        result = dict(hello=hello)
+        if Print is not None:
+            result.update(dict(Print=Print))
+        self.greeted = True
+        return result
+    Hello.responder(cmdHello)
+
+    def cmdGetlist(self, length):
+        return {'body': [dict(x=1)] * length}
+    GetList.responder(cmdGetlist)
+
+    def okiwont(self, magicWord, list=None):
+        if list is None:
+            response = u'list omitted'
+        else:
+            response = u'%s accepted' % (list[0]['name'])
+        return dict(response=response)
+    DontRejectMe.responder(okiwont)
+
+    def waitforit(self):
+        self.waiting = defer.Deferred()
+        return self.waiting
+    WaitForever.responder(waitforit)
+
+    def howdo(self):
+        return dict(howdoyoudo='world')
+    Howdoyoudo.responder(howdo)
+
+    def saybye(self):
+        return dict(goodbye="everyone")
+    Goodbye.responder(saybye)
+
+    def switchToTestProtocol(self, fail=False):
+        if fail:
+            name = 'no-proto'
+        else:
+            name = 'test-proto'
+        p = TestProto(self.onConnLost, SWITCH_CLIENT_DATA)
+        return self.callRemote(
+            TestSwitchProto,
+            SingleUseFactory(p), name=name).addCallback(lambda ign: p)
+
+    def switchit(self, name):
+        if name == 'test-proto':
+            return TestProto(self.onConnLost, SWITCH_SERVER_DATA)
+        raise UnknownProtocol(name)
+    TestSwitchProto.responder(switchit)
+
+    def donothing(self):
+        return None
+    BrokenReturn.responder(donothing)
+
+
+class DeferredSymmetricCommandProtocol(SimpleSymmetricCommandProtocol):
+    def switchit(self, name):
+        if name == 'test-proto':
+            self.maybeLaterProto = TestProto(self.onConnLost, SWITCH_SERVER_DATA)
+            self.maybeLater = defer.Deferred()
+            return self.maybeLater
+        raise UnknownProtocol(name)
+    TestSwitchProto.responder(switchit)
+
+class BadNoAnswerCommandProtocol(SimpleSymmetricCommandProtocol):
+    def badResponder(self, hello, From, optional=None, Print=None,
+                     mixedCase=None, dash_arg=None, underscore_arg=None):
+        """
+        This responder does nothing and forgets to return a dictionary.
+        """
+    NoAnswerHello.responder(badResponder)
+
+class NoAnswerCommandProtocol(SimpleSymmetricCommandProtocol):
+    def goodNoAnswerResponder(self, hello, From, optional=None, Print=None,
+                              mixedCase=None, dash_arg=None, underscore_arg=None):
+        return dict(hello=hello+"-noanswer")
+    NoAnswerHello.responder(goodNoAnswerResponder)
+
+def connectedServerAndClient(ServerClass=SimpleSymmetricProtocol,
+                             ClientClass=SimpleSymmetricProtocol,
+                             *a, **kw):
+    """Returns a 3-tuple: (client, server, pump)
+    """
+    return iosim.connectedServerAndClient(
+        ServerClass, ClientClass,
+        *a, **kw)
+
+class TotallyDumbProtocol(protocol.Protocol):
+    buf = ''
+    def dataReceived(self, data):
+        self.buf += data
+
+class LiteralAmp(amp.AMP):
+    def __init__(self):
+        self.boxes = []
+
+    def ampBoxReceived(self, box):
+        self.boxes.append(box)
+        return
+
+
+
+class AmpBoxTests(unittest.TestCase):
+    """
+    Test a few essential properties of AMP boxes, mostly with respect to
+    serialization correctness.
+    """
+
+    def test_serializeStr(self):
+        """
+        Make sure that strs serialize to strs.
+        """
+        a = amp.AmpBox(key='value')
+        self.assertEqual(type(a.serialize()), str)
+
+    def test_serializeUnicodeKeyRaises(self):
+        """
+        Verify that TypeError is raised when trying to serialize Unicode keys.
+        """
+        a = amp.AmpBox(**{u'key': 'value'})
+        self.assertRaises(TypeError, a.serialize)
+
+    def test_serializeUnicodeValueRaises(self):
+        """
+        Verify that TypeError is raised when trying to serialize Unicode
+        values.
+        """
+        a = amp.AmpBox(key=u'value')
+        self.assertRaises(TypeError, a.serialize)
+
+
+
+class ParsingTest(unittest.TestCase):
+
+    def test_booleanValues(self):
+        """
+        Verify that the Boolean parser parses 'True' and 'False', but nothing
+        else.
+        """
+        b = amp.Boolean()
+        self.assertEqual(b.fromString("True"), True)
+        self.assertEqual(b.fromString("False"), False)
+        self.assertRaises(TypeError, b.fromString, "ninja")
+        self.assertRaises(TypeError, b.fromString, "true")
+        self.assertRaises(TypeError, b.fromString, "TRUE")
+        self.assertEqual(b.toString(True), 'True')
+        self.assertEqual(b.toString(False), 'False')
+
+    def test_pathValueRoundTrip(self):
+        """
+        Verify the 'Path' argument can parse and emit a file path.
+        """
+        fp = filepath.FilePath(self.mktemp())
+        p = amp.Path()
+        s = p.toString(fp)
+        v = p.fromString(s)
+        self.assertNotIdentical(fp, v) # sanity check
+        self.assertEqual(fp, v)
+
+
+    def test_sillyEmptyThing(self):
+        """
+        Test that empty boxes raise an error; they aren't supposed to be sent
+        on purpose.
+        """
+        a = amp.AMP()
+        return self.assertRaises(amp.NoEmptyBoxes, a.ampBoxReceived, amp.Box())
+
+
+    def test_ParsingRoundTrip(self):
+        """
+        Verify that various kinds of data make it through the encode/parse
+        round-trip unharmed.
+        """
+        c, s, p = connectedServerAndClient(ClientClass=LiteralAmp,
+                                           ServerClass=LiteralAmp)
+
+        SIMPLE = ('simple', 'test')
+        CE = ('ceq', ': ')
+        CR = ('crtest', 'test\r')
+        LF = ('lftest', 'hello\n')
+        NEWLINE = ('newline', 'test\r\none\r\ntwo')
+        NEWLINE2 = ('newline2', 'test\r\none\r\n two')
+        BODYTEST = ('body', 'blah\r\n\r\ntesttest')
+
+        testData = [
+            [SIMPLE],
+            [SIMPLE, BODYTEST],
+            [SIMPLE, CE],
+            [SIMPLE, CR],
+            [SIMPLE, CE, CR, LF],
+            [CE, CR, LF],
+            [SIMPLE, NEWLINE, CE, NEWLINE2],
+            [BODYTEST, SIMPLE, NEWLINE]
+            ]
+
+        for test in testData:
+            jb = amp.Box()
+            jb.update(dict(test))
+            jb._sendTo(c)
+            p.flush()
+            self.assertEqual(s.boxes[-1], jb)
+
+
+
+class FakeLocator(object):
+    """
+    This is a fake implementation of the interface implied by
+    L{CommandLocator}.
+    """
+    def __init__(self):
+        """
+        Remember the given keyword arguments as a set of responders.
+        """
+        self.commands = {}
+
+
+    def locateResponder(self, commandName):
+        """
+        Look up and return a function passed as a keyword argument of the given
+        name to the constructor.
+        """
+        return self.commands[commandName]
+
+
+class FakeSender:
+    """
+    This is a fake implementation of the 'box sender' interface implied by
+    L{AMP}.
+    """
+    def __init__(self):
+        """
+        Create a fake sender and initialize the list of received boxes and
+        unhandled errors.
+        """
+        self.sentBoxes = []
+        self.unhandledErrors = []
+        self.expectedErrors = 0
+
+
+    def expectError(self):
+        """
+        Expect one error, so that the test doesn't fail.
+        """
+        self.expectedErrors += 1
+
+
+    def sendBox(self, box):
+        """
+        Accept a box, but don't do anything.
+        """
+        self.sentBoxes.append(box)
+
+
+    def unhandledError(self, failure):
+        """
+        Deal with failures by instantly re-raising them for easier debugging.
+        """
+        self.expectedErrors -= 1
+        if self.expectedErrors < 0:
+            failure.raiseException()
+        else:
+            self.unhandledErrors.append(failure)
+
+
+
+class CommandDispatchTests(unittest.TestCase):
+    """
+    The AMP CommandDispatcher class dispatches converts AMP boxes into commands
+    and responses using Command.responder decorator.
+
+    Note: Originally, AMP's factoring was such that many tests for this
+    functionality are now implemented as full round-trip tests in L{AMPTest}.
+    Future tests should be written at this level instead, to ensure API
+    compatibility and to provide more granular, readable units of test
+    coverage.
+    """
+
+    def setUp(self):
+        """
+        Create a dispatcher to use.
+        """
+        self.locator = FakeLocator()
+        self.sender = FakeSender()
+        self.dispatcher = amp.BoxDispatcher(self.locator)
+        self.dispatcher.startReceivingBoxes(self.sender)
+
+
+    def test_receivedAsk(self):
+        """
+        L{CommandDispatcher.ampBoxReceived} should locate the appropriate
+        command in its responder lookup, based on the '_ask' key.
+        """
+        received = []
+        def thunk(box):
+            received.append(box)
+            return amp.Box({"hello": "goodbye"})
+        input = amp.Box(_command="hello",
+                        _ask="test-command-id",
+                        hello="world")
+        self.locator.commands['hello'] = thunk
+        self.dispatcher.ampBoxReceived(input)
+        self.assertEqual(received, [input])
+
+
+    def test_sendUnhandledError(self):
+        """
+        L{CommandDispatcher} should relay its unhandled errors in responding to
+        boxes to its boxSender.
+        """
+        err = RuntimeError("something went wrong, oh no")
+        self.sender.expectError()
+        self.dispatcher.unhandledError(Failure(err))
+        self.assertEqual(len(self.sender.unhandledErrors), 1)
+        self.assertEqual(self.sender.unhandledErrors[0].value, err)
+
+
+    def test_unhandledSerializationError(self):
+        """
+        Errors during serialization ought to be relayed to the sender's
+        unhandledError method.
+        """
+        err = RuntimeError("something undefined went wrong")
+        def thunk(result):
+            class BrokenBox(amp.Box):
+                def _sendTo(self, proto):
+                    raise err
+            return BrokenBox()
+        self.locator.commands['hello'] = thunk
+        input = amp.Box(_command="hello",
+                        _ask="test-command-id",
+                        hello="world")
+        self.sender.expectError()
+        self.dispatcher.ampBoxReceived(input)
+        self.assertEqual(len(self.sender.unhandledErrors), 1)
+        self.assertEqual(self.sender.unhandledErrors[0].value, err)
+
+
+    def test_callRemote(self):
+        """
+        L{CommandDispatcher.callRemote} should emit a properly formatted '_ask'
+        box to its boxSender and record an outstanding L{Deferred}.  When a
+        corresponding '_answer' packet is received, the L{Deferred} should be
+        fired, and the results translated via the given L{Command}'s response
+        de-serialization.
+        """
+        D = self.dispatcher.callRemote(Hello, hello='world')
+        self.assertEqual(self.sender.sentBoxes,
+                          [amp.AmpBox(_command="hello",
+                                      _ask="1",
+                                      hello="world")])
+        answers = []
+        D.addCallback(answers.append)
+        self.assertEqual(answers, [])
+        self.dispatcher.ampBoxReceived(amp.AmpBox({'hello': "yay",
+                                                   'print': "ignored",
+                                                   '_answer': "1"}))
+        self.assertEqual(answers, [dict(hello="yay",
+                                         Print=u"ignored")])
+
+
+    def _localCallbackErrorLoggingTest(self, callResult):
+        """
+        Verify that C{callResult} completes with a C{None} result and that an
+        unhandled error has been logged.
+        """
+        finalResult = []
+        callResult.addBoth(finalResult.append)
+
+        self.assertEqual(1, len(self.sender.unhandledErrors))
+        self.assertIsInstance(
+            self.sender.unhandledErrors[0].value, ZeroDivisionError)
+
+        self.assertEqual([None], finalResult)
+
+
+    def test_callRemoteSuccessLocalCallbackErrorLogging(self):
+        """
+        If the last callback on the L{Deferred} returned by C{callRemote} (added
+        by application code calling C{callRemote}) fails, the failure is passed
+        to the sender's C{unhandledError} method.
+        """
+        self.sender.expectError()
+
+        callResult = self.dispatcher.callRemote(Hello, hello='world')
+        callResult.addCallback(lambda result: 1 // 0)
+
+        self.dispatcher.ampBoxReceived(amp.AmpBox({
+                    'hello': "yay", 'print': "ignored", '_answer': "1"}))
+
+        self._localCallbackErrorLoggingTest(callResult)
+
+
+    def test_callRemoteErrorLocalCallbackErrorLogging(self):
+        """
+        Like L{test_callRemoteSuccessLocalCallbackErrorLogging}, but for the
+        case where the L{Deferred} returned by C{callRemote} fails.
+        """
+        self.sender.expectError()
+
+        callResult = self.dispatcher.callRemote(Hello, hello='world')
+        callResult.addErrback(lambda result: 1 // 0)
+
+        self.dispatcher.ampBoxReceived(amp.AmpBox({
+                    '_error': '1', '_error_code': 'bugs',
+                    '_error_description': 'stuff'}))
+
+        self._localCallbackErrorLoggingTest(callResult)
+
+
+
+class SimpleGreeting(amp.Command):
+    """
+    A very simple greeting command that uses a few basic argument types.
+    """
+    commandName = 'simple'
+    arguments = [('greeting', amp.Unicode()),
+                 ('cookie', amp.Integer())]
+    response = [('cookieplus', amp.Integer())]
+
+
+
+class TestLocator(amp.CommandLocator):
+    """
+    A locator which implements a responder to the 'simple' command.
+    """
+    def __init__(self):
+        self.greetings = []
+
+
+    def greetingResponder(self, greeting, cookie):
+        self.greetings.append((greeting, cookie))
+        return dict(cookieplus=cookie + 3)
+    greetingResponder = SimpleGreeting.responder(greetingResponder)
+
+
+
+class OverridingLocator(TestLocator):
+    """
+    A locator which overrides the responder to the 'simple' command.
+    """
+
+    def greetingResponder(self, greeting, cookie):
+        """
+        Return a different cookieplus than L{TestLocator.greetingResponder}.
+        """
+        self.greetings.append((greeting, cookie))
+        return dict(cookieplus=cookie + 4)
+    greetingResponder = SimpleGreeting.responder(greetingResponder)
+
+
+
+class InheritingLocator(OverridingLocator):
+    """
+    This locator should inherit the responder from L{OverridingLocator}.
+    """
+
+
+
+class OverrideLocatorAMP(amp.AMP):
+    def __init__(self):
+        amp.AMP.__init__(self)
+        self.customResponder = object()
+        self.expectations = {"custom": self.customResponder}
+        self.greetings = []
+
+
+    def lookupFunction(self, name):
+        """
+        Override the deprecated lookupFunction function.
+        """
+        if name in self.expectations:
+            result = self.expectations[name]
+            return result
+        else:
+            return super(OverrideLocatorAMP, self).lookupFunction(name)
+
+
+    def greetingResponder(self, greeting, cookie):
+        self.greetings.append((greeting, cookie))
+        return dict(cookieplus=cookie + 3)
+    greetingResponder = SimpleGreeting.responder(greetingResponder)
+
+
+
+
+class CommandLocatorTests(unittest.TestCase):
+    """
+    The CommandLocator should enable users to specify responders to commands as
+    functions that take structured objects, annotated with metadata.
+    """
+
+    def _checkSimpleGreeting(self, locatorClass, expected):
+        """
+        Check that a locator of type C{locatorClass} finds a responder
+        for command named I{simple} and that the found responder answers
+        with the C{expected} result to a C{SimpleGreeting<"ni hao", 5>}
+        command.
+        """
+        locator = locatorClass()
+        responderCallable = locator.locateResponder("simple")
+        result = responderCallable(amp.Box(greeting="ni hao", cookie="5"))
+        def done(values):
+            self.assertEqual(values, amp.AmpBox(cookieplus=str(expected)))
+        return result.addCallback(done)
+
+
+    def test_responderDecorator(self):
+        """
+        A method on a L{CommandLocator} subclass decorated with a L{Command}
+        subclass's L{responder} decorator should be returned from
+        locateResponder, wrapped in logic to serialize and deserialize its
+        arguments.
+        """
+        return self._checkSimpleGreeting(TestLocator, 8)
+
+
+    def test_responderOverriding(self):
+        """
+        L{CommandLocator} subclasses can override a responder inherited from
+        a base class by using the L{Command.responder} decorator to register
+        a new responder method.
+        """
+        return self._checkSimpleGreeting(OverridingLocator, 9)
+
+
+    def test_responderInheritance(self):
+        """
+        Responder lookup follows the same rules as normal method lookup
+        rules, particularly with respect to inheritance.
+        """
+        return self._checkSimpleGreeting(InheritingLocator, 9)
+
+
+    def test_lookupFunctionDeprecatedOverride(self):
+        """
+        Subclasses which override locateResponder under its old name,
+        lookupFunction, should have the override invoked instead.  (This tests
+        an AMP subclass, because in the version of the code that could invoke
+        this deprecated code path, there was no L{CommandLocator}.)
+        """
+        locator = OverrideLocatorAMP()
+        customResponderObject = self.assertWarns(
+            PendingDeprecationWarning,
+            "Override locateResponder, not lookupFunction.",
+            __file__, lambda : locator.locateResponder("custom"))
+        self.assertEqual(locator.customResponder, customResponderObject)
+        # Make sure upcalling works too
+        normalResponderObject = self.assertWarns(
+            PendingDeprecationWarning,
+            "Override locateResponder, not lookupFunction.",
+            __file__, lambda : locator.locateResponder("simple"))
+        result = normalResponderObject(amp.Box(greeting="ni hao", cookie="5"))
+        def done(values):
+            self.assertEqual(values, amp.AmpBox(cookieplus='8'))
+        return result.addCallback(done)
+
+
+    def test_lookupFunctionDeprecatedInvoke(self):
+        """
+        Invoking locateResponder under its old name, lookupFunction, should
+        emit a deprecation warning, but do the same thing.
+        """
+        locator = TestLocator()
+        responderCallable = self.assertWarns(
+            PendingDeprecationWarning,
+            "Call locateResponder, not lookupFunction.", __file__,
+            lambda : locator.lookupFunction("simple"))
+        result = responderCallable(amp.Box(greeting="ni hao", cookie="5"))
+        def done(values):
+            self.assertEqual(values, amp.AmpBox(cookieplus='8'))
+        return result.addCallback(done)
+
+
+
+SWITCH_CLIENT_DATA = 'Success!'
+SWITCH_SERVER_DATA = 'No, really.  Success.'
+
+
+class BinaryProtocolTests(unittest.TestCase):
+    """
+    Tests for L{amp.BinaryBoxProtocol}.
+
+    @ivar _boxSender: After C{startReceivingBoxes} is called, the L{IBoxSender}
+        which was passed to it.
+    """
+
+    def setUp(self):
+        """
+        Keep track of all boxes received by this test in its capacity as an
+        L{IBoxReceiver} implementor.
+        """
+        self.boxes = []
+        self.data = []
+
+
+    def startReceivingBoxes(self, sender):
+        """
+        Implement L{IBoxReceiver.startReceivingBoxes} to just remember the
+        value passed in.
+        """
+        self._boxSender = sender
+
+
+    def ampBoxReceived(self, box):
+        """
+        A box was received by the protocol.
+        """
+        self.boxes.append(box)
+
+    stopReason = None
+    def stopReceivingBoxes(self, reason):
+        """
+        Record the reason that we stopped receiving boxes.
+        """
+        self.stopReason = reason
+
+
+    # fake ITransport
+    def getPeer(self):
+        return 'no peer'
+
+
+    def getHost(self):
+        return 'no host'
+
+
+    def write(self, data):
+        self.data.append(data)
+
+
+    def test_startReceivingBoxes(self):
+        """
+        When L{amp.BinaryBoxProtocol} is connected to a transport, it calls
+        C{startReceivingBoxes} on its L{IBoxReceiver} with itself as the
+        L{IBoxSender} parameter.
+        """
+        protocol = amp.BinaryBoxProtocol(self)
+        protocol.makeConnection(None)
+        self.assertIdentical(self._boxSender, protocol)
+
+
+    def test_sendBoxInStartReceivingBoxes(self):
+        """
+        The L{IBoxReceiver} which is started when L{amp.BinaryBoxProtocol} is
+        connected to a transport can call C{sendBox} on the L{IBoxSender}
+        passed to it before C{startReceivingBoxes} returns and have that box
+        sent.
+        """
+        class SynchronouslySendingReceiver:
+            def startReceivingBoxes(self, sender):
+                sender.sendBox(amp.Box({'foo': 'bar'}))
+
+        transport = StringTransport()
+        protocol = amp.BinaryBoxProtocol(SynchronouslySendingReceiver())
+        protocol.makeConnection(transport)
+        self.assertEqual(
+            transport.value(),
+            '\x00\x03foo\x00\x03bar\x00\x00')
+
+
+    def test_receiveBoxStateMachine(self):
+        """
+        When a binary box protocol receives:
+            * a key
+            * a value
+            * an empty string
+        it should emit a box and send it to its boxReceiver.
+        """
+        a = amp.BinaryBoxProtocol(self)
+        a.stringReceived("hello")
+        a.stringReceived("world")
+        a.stringReceived("")
+        self.assertEqual(self.boxes, [amp.AmpBox(hello="world")])
+
+
+    def test_firstBoxFirstKeyExcessiveLength(self):
+        """
+        L{amp.BinaryBoxProtocol} drops its connection if the length prefix for
+        the first a key it receives is larger than 255.
+        """
+        transport = StringTransport()
+        protocol = amp.BinaryBoxProtocol(self)
+        protocol.makeConnection(transport)
+        protocol.dataReceived('\x01\x00')
+        self.assertTrue(transport.disconnecting)
+
+
+    def test_firstBoxSubsequentKeyExcessiveLength(self):
+        """
+        L{amp.BinaryBoxProtocol} drops its connection if the length prefix for
+        a subsequent key in the first box it receives is larger than 255.
+        """
+        transport = StringTransport()
+        protocol = amp.BinaryBoxProtocol(self)
+        protocol.makeConnection(transport)
+        protocol.dataReceived('\x00\x01k\x00\x01v')
+        self.assertFalse(transport.disconnecting)
+        protocol.dataReceived('\x01\x00')
+        self.assertTrue(transport.disconnecting)
+
+
+    def test_subsequentBoxFirstKeyExcessiveLength(self):
+        """
+        L{amp.BinaryBoxProtocol} drops its connection if the length prefix for
+        the first key in a subsequent box it receives is larger than 255.
+        """
+        transport = StringTransport()
+        protocol = amp.BinaryBoxProtocol(self)
+        protocol.makeConnection(transport)
+        protocol.dataReceived('\x00\x01k\x00\x01v\x00\x00')
+        self.assertFalse(transport.disconnecting)
+        protocol.dataReceived('\x01\x00')
+        self.assertTrue(transport.disconnecting)
+
+
+    def test_excessiveKeyFailure(self):
+        """
+        If L{amp.BinaryBoxProtocol} disconnects because it received a key
+        length prefix which was too large, the L{IBoxReceiver}'s
+        C{stopReceivingBoxes} method is called with a L{TooLong} failure.
+        """
+        protocol = amp.BinaryBoxProtocol(self)
+        protocol.makeConnection(StringTransport())
+        protocol.dataReceived('\x01\x00')
+        protocol.connectionLost(
+            Failure(error.ConnectionDone("simulated connection done")))
+        self.stopReason.trap(amp.TooLong)
+        self.assertTrue(self.stopReason.value.isKey)
+        self.assertFalse(self.stopReason.value.isLocal)
+        self.assertIdentical(self.stopReason.value.value, None)
+        self.assertIdentical(self.stopReason.value.keyName, None)
+
+
+    def test_unhandledErrorWithTransport(self):
+        """
+        L{amp.BinaryBoxProtocol.unhandledError} logs the failure passed to it
+        and disconnects its transport.
+        """
+        transport = StringTransport()
+        protocol = amp.BinaryBoxProtocol(self)
+        protocol.makeConnection(transport)
+        protocol.unhandledError(Failure(RuntimeError("Fake error")))
+        self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError)))
+        self.assertTrue(transport.disconnecting)
+
+
+    def test_unhandledErrorWithoutTransport(self):
+        """
+        L{amp.BinaryBoxProtocol.unhandledError} completes without error when
+        there is no associated transport.
+        """
+        protocol = amp.BinaryBoxProtocol(self)
+        protocol.makeConnection(StringTransport())
+        protocol.connectionLost(Failure(Exception("Simulated")))
+        protocol.unhandledError(Failure(RuntimeError("Fake error")))
+        self.assertEqual(1, len(self.flushLoggedErrors(RuntimeError)))
+
+
+    def test_receiveBoxData(self):
+        """
+        When a binary box protocol receives the serialized form of an AMP box,
+        it should emit a similar box to its boxReceiver.
+        """
+        a = amp.BinaryBoxProtocol(self)
+        a.dataReceived(amp.Box({"testKey": "valueTest",
+                                "anotherKey": "anotherValue"}).serialize())
+        self.assertEqual(self.boxes,
+                          [amp.Box({"testKey": "valueTest",
+                                    "anotherKey": "anotherValue"})])
+
+
+    def test_receiveLongerBoxData(self):
+        """
+        An L{amp.BinaryBoxProtocol} can receive serialized AMP boxes with
+        values of up to (2 ** 16 - 1) bytes.
+        """
+        length = (2 ** 16 - 1)
+        value = 'x' * length
+        transport = StringTransport()
+        protocol = amp.BinaryBoxProtocol(self)
+        protocol.makeConnection(transport)
+        protocol.dataReceived(amp.Box({'k': value}).serialize())
+        self.assertEqual(self.boxes, [amp.Box({'k': value})])
+        self.assertFalse(transport.disconnecting)
+
+
+    def test_sendBox(self):
+        """
+        When a binary box protocol sends a box, it should emit the serialized
+        bytes of that box to its transport.
+        """
+        a = amp.BinaryBoxProtocol(self)
+        a.makeConnection(self)
+        aBox = amp.Box({"testKey": "valueTest",
+                        "someData": "hello"})
+        a.makeConnection(self)
+        a.sendBox(aBox)
+        self.assertEqual(''.join(self.data), aBox.serialize())
+
+
+    def test_connectionLostStopSendingBoxes(self):
+        """
+        When a binary box protocol loses its connection, it should notify its
+        box receiver that it has stopped receiving boxes.
+        """
+        a = amp.BinaryBoxProtocol(self)
+        a.makeConnection(self)
+        connectionFailure = Failure(RuntimeError())
+        a.connectionLost(connectionFailure)
+        self.assertIdentical(self.stopReason, connectionFailure)
+
+
+    def test_protocolSwitch(self):
+        """
+        L{BinaryBoxProtocol} has the capacity to switch to a different protocol
+        on a box boundary.  When a protocol is in the process of switching, it
+        cannot receive traffic.
+        """
+        otherProto = TestProto(None, "outgoing data")
+        test = self
+        class SwitchyReceiver:
+            switched = False
+            def startReceivingBoxes(self, sender):
+                pass
+            def ampBoxReceived(self, box):
+                test.assertFalse(self.switched,
+                                 "Should only receive one box!")
+                self.switched = True
+                a._lockForSwitch()
+                a._switchTo(otherProto)
+        a = amp.BinaryBoxProtocol(SwitchyReceiver())
+        anyOldBox = amp.Box({"include": "lots",
+                             "of": "data"})
+        a.makeConnection(self)
+        # Include a 0-length box at the beginning of the next protocol's data,
+        # to make sure that AMP doesn't eat the data or try to deliver extra
+        # boxes either...
+        moreThanOneBox = anyOldBox.serialize() + "\x00\x00Hello, world!"
+        a.dataReceived(moreThanOneBox)
+        self.assertIdentical(otherProto.transport, self)
+        self.assertEqual("".join(otherProto.data), "\x00\x00Hello, world!")
+        self.assertEqual(self.data, ["outgoing data"])
+        a.dataReceived("more data")
+        self.assertEqual("".join(otherProto.data),
+                          "\x00\x00Hello, world!more data")
+        self.assertRaises(amp.ProtocolSwitched, a.sendBox, anyOldBox)
+
+
+    def test_protocolSwitchEmptyBuffer(self):
+        """
+        After switching to a different protocol, if no extra bytes beyond
+        the switch box were delivered, an empty string is not passed to the
+        switched protocol's C{dataReceived} method.
+        """
+        a = amp.BinaryBoxProtocol(self)
+        a.makeConnection(self)
+        otherProto = TestProto(None, "")
+        a._switchTo(otherProto)
+        self.assertEqual(otherProto.data, [])
+
+
+    def test_protocolSwitchInvalidStates(self):
+        """
+        In order to make sure the protocol never gets any invalid data sent
+        into the middle of a box, it must be locked for switching before it is
+        switched.  It can only be unlocked if the switch failed, and attempting
+        to send a box while it is locked should raise an exception.
+        """
+        a = amp.BinaryBoxProtocol(self)
+        a.makeConnection(self)
+        sampleBox = amp.Box({"some": "data"})
+        a._lockForSwitch()
+        self.assertRaises(amp.ProtocolSwitched, a.sendBox, sampleBox)
+        a._unlockFromSwitch()
+        a.sendBox(sampleBox)
+        self.assertEqual(''.join(self.data), sampleBox.serialize())
+        a._lockForSwitch()
+        otherProto = TestProto(None, "outgoing data")
+        a._switchTo(otherProto)
+        self.assertRaises(amp.ProtocolSwitched, a._unlockFromSwitch)
+
+
+    def test_protocolSwitchLoseConnection(self):
+        """
+        When the protocol is switched, it should notify its nested protocol of
+        disconnection.
+        """
+        class Loser(protocol.Protocol):
+            reason = None
+            def connectionLost(self, reason):
+                self.reason = reason
+        connectionLoser = Loser()
+        a = amp.BinaryBoxProtocol(self)
+        a.makeConnection(self)
+        a._lockForSwitch()
+        a._switchTo(connectionLoser)
+        connectionFailure = Failure(RuntimeError())
+        a.connectionLost(connectionFailure)
+        self.assertEqual(connectionLoser.reason, connectionFailure)
+
+
+    def test_protocolSwitchLoseClientConnection(self):
+        """
+        When the protocol is switched, it should notify its nested client
+        protocol factory of disconnection.
+        """
+        class ClientLoser:
+            reason = None
+            def clientConnectionLost(self, connector, reason):
+                self.reason = reason
+        a = amp.BinaryBoxProtocol(self)
+        connectionLoser = protocol.Protocol()
+        clientLoser = ClientLoser()
+        a.makeConnection(self)
+        a._lockForSwitch()
+        a._switchTo(connectionLoser, clientLoser)
+        connectionFailure = Failure(RuntimeError())
+        a.connectionLost(connectionFailure)
+        self.assertEqual(clientLoser.reason, connectionFailure)
+
+
+
+class AMPTest(unittest.TestCase):
+
+    def test_interfaceDeclarations(self):
+        """
+        The classes in the amp module ought to implement the interfaces that
+        are declared for their benefit.
+        """
+        for interface, implementation in [(amp.IBoxSender, amp.BinaryBoxProtocol),
+                                          (amp.IBoxReceiver, amp.BoxDispatcher),
+                                          (amp.IResponderLocator, amp.CommandLocator),
+                                          (amp.IResponderLocator, amp.SimpleStringLocator),
+                                          (amp.IBoxSender, amp.AMP),
+                                          (amp.IBoxReceiver, amp.AMP),
+                                          (amp.IResponderLocator, amp.AMP)]:
+            self.failUnless(interface.implementedBy(implementation),
+                            "%s does not implements(%s)" % (implementation, interface))
+
+
+    def test_helloWorld(self):
+        """
+        Verify that a simple command can be sent and its response received with
+        the simple low-level string-based API.
+        """
+        c, s, p = connectedServerAndClient()
+        L = []
+        HELLO = 'world'
+        c.sendHello(HELLO).addCallback(L.append)
+        p.flush()
+        self.assertEqual(L[0]['hello'], HELLO)
+
+
+    def test_wireFormatRoundTrip(self):
+        """
+        Verify that mixed-case, underscored and dashed arguments are mapped to
+        their python names properly.
+        """
+        c, s, p = connectedServerAndClient()
+        L = []
+        HELLO = 'world'
+        c.sendHello(HELLO).addCallback(L.append)
+        p.flush()
+        self.assertEqual(L[0]['hello'], HELLO)
+
+
+    def test_helloWorldUnicode(self):
+        """
+        Verify that unicode arguments can be encoded and decoded.
+        """
+        c, s, p = connectedServerAndClient(
+            ServerClass=SimpleSymmetricCommandProtocol,
+            ClientClass=SimpleSymmetricCommandProtocol)
+        L = []
+        HELLO = 'world'
+        HELLO_UNICODE = 'wor\u1234ld'
+        c.sendUnicodeHello(HELLO, HELLO_UNICODE).addCallback(L.append)
+        p.flush()
+        self.assertEqual(L[0]['hello'], HELLO)
+        self.assertEqual(L[0]['Print'], HELLO_UNICODE)
+
+
+    def test_callRemoteStringRequiresAnswerFalse(self):
+        """
+        L{BoxDispatcher.callRemoteString} returns C{None} if C{requiresAnswer}
+        is C{False}.
+        """
+        c, s, p = connectedServerAndClient()
+        ret = c.callRemoteString("WTF", requiresAnswer=False)
+        self.assertIdentical(ret, None)
+
+
+    def test_unknownCommandLow(self):
+        """
+        Verify that unknown commands using low-level APIs will be rejected with an
+        error, but will NOT terminate the connection.
+        """
+        c, s, p = connectedServerAndClient()
+        L = []
+        def clearAndAdd(e):
+            """
+            You can't propagate the error...
+            """
+            e.trap(amp.UnhandledCommand)
+            return "OK"
+        c.callRemoteString("WTF").addErrback(clearAndAdd).addCallback(L.append)
+        p.flush()
+        self.assertEqual(L.pop(), "OK")
+        HELLO = 'world'
+        c.sendHello(HELLO).addCallback(L.append)
+        p.flush()
+        self.assertEqual(L[0]['hello'], HELLO)
+
+
+    def test_unknownCommandHigh(self):
+        """
+        Verify that unknown commands using high-level APIs will be rejected with an
+        error, but will NOT terminate the connection.
+        """
+        c, s, p = connectedServerAndClient()
+        L = []
+        def clearAndAdd(e):
+            """
+            You can't propagate the error...
+            """
+            e.trap(amp.UnhandledCommand)
+            return "OK"
+        c.callRemote(WTF).addErrback(clearAndAdd).addCallback(L.append)
+        p.flush()
+        self.assertEqual(L.pop(), "OK")
+        HELLO = 'world'
+        c.sendHello(HELLO).addCallback(L.append)
+        p.flush()
+        self.assertEqual(L[0]['hello'], HELLO)
+
+
+    def test_brokenReturnValue(self):
+        """
+        It can be very confusing if you write some code which responds to a
+        command, but gets the return value wrong.  Most commonly you end up
+        returning None instead of a dictionary.
+
+        Verify that if that happens, the framework logs a useful error.
+        """
+        L = []
+        SimpleSymmetricCommandProtocol().dispatchCommand(
+            amp.AmpBox(_command=BrokenReturn.commandName)).addErrback(L.append)
+        L[0].trap(amp.BadLocalReturn)
+        self.failUnlessIn('None', repr(L[0].value))
+
+
+    def test_unknownArgument(self):
+        """
+        Verify that unknown arguments are ignored, and not passed to a Python
+        function which can't accept them.
+        """
+        c, s, p = connectedServerAndClient(
+            ServerClass=SimpleSymmetricCommandProtocol,
+            ClientClass=SimpleSymmetricCommandProtocol)
+        L = []
+        HELLO = 'world'
+        # c.sendHello(HELLO).addCallback(L.append)
+        c.callRemote(FutureHello,
+                     hello=HELLO,
+                     bonus="I'm not in the book!").addCallback(
+            L.append)
+        p.flush()
+        self.assertEqual(L[0]['hello'], HELLO)
+
+
+    def test_simpleReprs(self):
+        """
+        Verify that the various Box objects repr properly, for debugging.
+        """
+        self.assertEqual(type(repr(amp._SwitchBox('a'))), str)
+        self.assertEqual(type(repr(amp.QuitBox())), str)
+        self.assertEqual(type(repr(amp.AmpBox())), str)
+        self.failUnless("AmpBox" in repr(amp.AmpBox()))
+
+
+    def test_innerProtocolInRepr(self):
+        """
+        Verify that L{AMP} objects output their innerProtocol when set.
+        """
+        otherProto = TestProto(None, "outgoing data")
+        a = amp.AMP()
+        a.innerProtocol = otherProto
+        def fakeID(obj):
+            return {a: 0x1234}.get(obj, id(obj))
+        self.addCleanup(setIDFunction, setIDFunction(fakeID))
+
+        self.assertEqual(
+            repr(a), "<AMP inner <TestProto #%d> at 0x1234>" % (
+                otherProto.instanceId,))
+
+
+    def test_innerProtocolNotInRepr(self):
+        """
+        Verify that L{AMP} objects do not output 'inner' when no innerProtocol
+        is set.
+        """
+        a = amp.AMP()
+        def fakeID(obj):
+            return {a: 0x4321}.get(obj, id(obj))
+        self.addCleanup(setIDFunction, setIDFunction(fakeID))
+        self.assertEqual(repr(a), "<AMP at 0x4321>")
+
+
+    def test_simpleSSLRepr(self):
+        """
+        L{amp._TLSBox.__repr__} returns a string.
+        """
+        self.assertEqual(type(repr(amp._TLSBox())), str)
+
+    test_simpleSSLRepr.skip = skipSSL
+
+
+    def test_keyTooLong(self):
+        """
+        Verify that a key that is too long will immediately raise a synchronous
+        exception.
+        """
+        c, s, p = connectedServerAndClient()
+        x = "H" * (0xff+1)
+        tl = self.assertRaises(amp.TooLong,
+                               c.callRemoteString, "Hello",
+                               **{x: "hi"})
+        self.assertTrue(tl.isKey)
+        self.assertTrue(tl.isLocal)
+        self.assertIdentical(tl.keyName, None)
+        self.assertEqual(tl.value, x)
+        self.assertIn(str(len(x)), repr(tl))
+        self.assertIn("key", repr(tl))
+
+
+    def test_valueTooLong(self):
+        """
+        Verify that attempting to send value longer than 64k will immediately
+        raise an exception.
+        """
+        c, s, p = connectedServerAndClient()
+        x = "H" * (0xffff+1)
+        tl = self.assertRaises(amp.TooLong, c.sendHello, x)
+        p.flush()
+        self.failIf(tl.isKey)
+        self.failUnless(tl.isLocal)
+        self.assertEqual(tl.keyName, 'hello')
+        self.failUnlessIdentical(tl.value, x)
+        self.failUnless(str(len(x)) in repr(tl))
+        self.failUnless("value" in repr(tl))
+        self.failUnless('hello' in repr(tl))
+
+
+    def test_helloWorldCommand(self):
+        """
+        Verify that a simple command can be sent and its response received with
+        the high-level value parsing API.
+        """
+        c, s, p = connectedServerAndClient(
+            ServerClass=SimpleSymmetricCommandProtocol,
+            ClientClass=SimpleSymmetricCommandProtocol)
+        L = []
+        HELLO = 'world'
+        c.sendHello(HELLO).addCallback(L.append)
+        p.flush()
+        self.assertEqual(L[0]['hello'], HELLO)
+
+
+    def test_helloErrorHandling(self):
+        """
+        Verify that if a known error type is raised and handled, it will be
+        properly relayed to the other end of the connection and translated into
+        an exception, and no error will be logged.
+        """
+        L=[]
+        c, s, p = connectedServerAndClient(
+            ServerClass=SimpleSymmetricCommandProtocol,
+            ClientClass=SimpleSymmetricCommandProtocol)
+        HELLO = 'fuck you'
+        c.sendHello(HELLO).addErrback(L.append)
+        p.flush()
+        L[0].trap(UnfriendlyGreeting)
+        self.assertEqual(str(L[0].value), "Don't be a dick.")
+
+
+    def test_helloFatalErrorHandling(self):
+        """
+        Verify that if a known, fatal error type is raised and handled, it will
+        be properly relayed to the other end of the connection and translated
+        into an exception, no error will be logged, and the connection will be
+        terminated.
+        """
+        L=[]
+        c, s, p = connectedServerAndClient(
+            ServerClass=SimpleSymmetricCommandProtocol,
+            ClientClass=SimpleSymmetricCommandProtocol)
+        HELLO = 'die'
+        c.sendHello(HELLO).addErrback(L.append)
+        p.flush()
+        L.pop().trap(DeathThreat)
+        c.sendHello(HELLO).addErrback(L.append)
+        p.flush()
+        L.pop().trap(error.ConnectionDone)
+
+
+
+    def test_helloNoErrorHandling(self):
+        """
+        Verify that if an unknown error type is raised, it will be relayed to
+        the other end of the connection and translated into an exception, it
+        will be logged, and then the connection will be dropped.
+        """
+        L=[]
+        c, s, p = connectedServerAndClient(
+            ServerClass=SimpleSymmetricCommandProtocol,
+            ClientClass=SimpleSymmetricCommandProtocol)
+        HELLO = THING_I_DONT_UNDERSTAND
+        c.sendHello(HELLO).addErrback(L.append)
+        p.flush()
+        ure = L.pop()
+        ure.trap(amp.UnknownRemoteError)
+        c.sendHello(HELLO).addErrback(L.append)
+        cl = L.pop()
+        cl.trap(error.ConnectionDone)
+        # The exception should have been logged.
+        self.failUnless(self.flushLoggedErrors(ThingIDontUnderstandError))
+
+
+
+    def test_lateAnswer(self):
+        """
+        Verify that a command that does not get answered until after the
+        connection terminates will not cause any errors.
+        """
+        c, s, p = connectedServerAndClient(
+            ServerClass=SimpleSymmetricCommandProtocol,
+            ClientClass=SimpleSymmetricCommandProtocol)
+        L = []
+        c.callRemote(WaitForever).addErrback(L.append)
+        p.flush()
+        self.assertEqual(L, [])
+        s.transport.loseConnection()
+        p.flush()
+        L.pop().trap(error.ConnectionDone)
+        # Just make sure that it doesn't error...
+        s.waiting.callback({})
+        return s.waiting
+
+
+    def test_requiresNoAnswer(self):
+        """
+        Verify that a command that requires no answer is run.
+        """
+        c, s, p = connectedServerAndClient(
+            ServerClass=SimpleSymmetricCommandProtocol,
+            ClientClass=SimpleSymmetricCommandProtocol)
+        HELLO = 'world'
+        c.callRemote(NoAnswerHello, hello=HELLO)
+        p.flush()
+        self.failUnless(s.greeted)
+
+
+    def test_requiresNoAnswerFail(self):
+        """
+        Verify that commands sent after a failed no-answer request do not complete.
+        """
+        L=[]
+        c, s, p = connectedServerAndClient(
+            ServerClass=SimpleSymmetricCommandProtocol,
+            ClientClass=SimpleSymmetricCommandProtocol)
+        HELLO = 'fuck you'
+        c.callRemote(NoAnswerHello, hello=HELLO)
+        p.flush()
+        # This should be logged locally.
+        self.failUnless(self.flushLoggedErrors(amp.RemoteAmpError))
+        HELLO = 'world'
+        c.callRemote(Hello, hello=HELLO).addErrback(L.append)
+        p.flush()
+        L.pop().trap(error.ConnectionDone)
+        self.failIf(s.greeted)
+
+
+    def test_noAnswerResponderBadAnswer(self):
+        """
+        Verify that responders of requiresAnswer=False commands have to return
+        a dictionary anyway.
+
+        (requiresAnswer is a hint from the _client_ - the server may be called
+        upon to answer commands in any case, if the client wants to know when
+        they complete.)
+        """
+        c, s, p = connectedServerAndClient(
+            ServerClass=BadNoAnswerCommandProtocol,
+            ClientClass=SimpleSymmetricCommandProtocol)
+        c.callRemote(NoAnswerHello, hello="hello")
+        p.flush()
+        le = self.flushLoggedErrors(amp.BadLocalReturn)
+        self.assertEqual(len(le), 1)
+
+
+    def test_noAnswerResponderAskedForAnswer(self):
+        """
+        Verify that responders with requiresAnswer=False will actually respond
+        if the client sets requiresAnswer=True.  In other words, verify that
+        requiresAnswer is a hint honored only by the client.
+        """
+        c, s, p = connectedServerAndClient(
+            ServerClass=NoAnswerCommandProtocol,
+            ClientClass=SimpleSymmetricCommandProtocol)
+        L = []
+        c.callRemote(Hello, hello="Hello!").addCallback(L.append)
+        p.flush()
+        self.assertEqual(len(L), 1)
+        self.assertEqual(L, [dict(hello="Hello!-noanswer",
+                                   Print=None)])  # Optional response argument
+
+
+    def test_ampListCommand(self):
+        """
+        Test encoding of an argument that uses the AmpList encoding.
+        """
+        c, s, p = connectedServerAndClient(
+            ServerClass=SimpleSymmetricCommandProtocol,
+            ClientClass=SimpleSymmetricCommandProtocol)
+        L = []
+        c.callRemote(GetList, length=10).addCallback(L.append)
+        p.flush()
+        values = L.pop().get('body')
+        self.assertEqual(values, [{'x': 1}] * 10)
+
+
+    def test_optionalAmpListOmitted(self):
+        """
+        Sending a command with an omitted AmpList argument that is
+        designated as optional does not raise an InvalidSignature error.
+        """
+        c, s, p = connectedServerAndClient(
+            ServerClass=SimpleSymmetricCommandProtocol,
+            ClientClass=SimpleSymmetricCommandProtocol)
+        L = []
+        c.callRemote(DontRejectMe, magicWord=u'please').addCallback(L.append)
+        p.flush()
+        response = L.pop().get('response')
+        self.assertEqual(response, 'list omitted')
+
+
+    def test_optionalAmpListPresent(self):
+        """
+        Sanity check that optional AmpList arguments are processed normally.
+        """
+        c, s, p = connectedServerAndClient(
+            ServerClass=SimpleSymmetricCommandProtocol,
+            ClientClass=SimpleSymmetricCommandProtocol)
+        L = []
+        c.callRemote(DontRejectMe, magicWord=u'please',
+                list=[{'name': 'foo'}]).addCallback(L.append)
+        p.flush()
+        response = L.pop().get('response')
+        self.assertEqual(response, 'foo accepted')
+
+
+    def test_failEarlyOnArgSending(self):
+        """
+        Verify that if we pass an invalid argument list (omitting an argument),
+        an exception will be raised.
+        """
+        self.assertRaises(amp.InvalidSignature, Hello)
+
+
+    def test_doubleProtocolSwitch(self):
+        """
+        As a debugging aid, a protocol system should raise a
+        L{ProtocolSwitched} exception when asked to switch a protocol that is
+        already switched.
+        """
+        serverDeferred = defer.Deferred()
+        serverProto = SimpleSymmetricCommandProtocol(serverDeferred)
+        clientDeferred = defer.Deferred()
+        clientProto = SimpleSymmetricCommandProtocol(clientDeferred)
+        c, s, p = connectedServerAndClient(ServerClass=lambda: serverProto,
+                                           ClientClass=lambda: clientProto)
+        def switched(result):
+            self.assertRaises(amp.ProtocolSwitched, c.switchToTestProtocol)
+            self.testSucceeded = True
+        c.switchToTestProtocol().addCallback(switched)
+        p.flush()
+        self.failUnless(self.testSucceeded)
+
+
+    def test_protocolSwitch(self, switcher=SimpleSymmetricCommandProtocol,
+                            spuriousTraffic=False,
+                            spuriousError=False):
+        """
+        Verify that it is possible to switch to another protocol mid-connection and
+        send data to it successfully.
+        """
+        self.testSucceeded = False
+
+        serverDeferred = defer.Deferred()
+        serverProto = switcher(serverDeferred)
+        clientDeferred = defer.Deferred()
+        clientProto = switcher(clientDeferred)
+        c, s, p = connectedServerAndClient(ServerClass=lambda: serverProto,
+                                           ClientClass=lambda: clientProto)
+
+        if spuriousTraffic:
+            wfdr = []           # remote
+            c.callRemote(WaitForever).addErrback(wfdr.append)
+        switchDeferred = c.switchToTestProtocol()
+        if spuriousTraffic:
+            self.assertRaises(amp.ProtocolSwitched, c.sendHello, 'world')
+
+        def cbConnsLost(((serverSuccess, serverData),
+                         (clientSuccess, clientData))):
+            self.failUnless(serverSuccess)
+            self.failUnless(clientSuccess)
+            self.assertEqual(''.join(serverData), SWITCH_CLIENT_DATA)
+            self.assertEqual(''.join(clientData), SWITCH_SERVER_DATA)
+            self.testSucceeded = True
+
+        def cbSwitch(proto):
+            return defer.DeferredList(
+                [serverDeferred, clientDeferred]).addCallback(cbConnsLost)
+
+        switchDeferred.addCallback(cbSwitch)
+        p.flush()
+        if serverProto.maybeLater is not None:
+            serverProto.maybeLater.callback(serverProto.maybeLaterProto)
+            p.flush()
+        if spuriousTraffic:
+            # switch is done here; do this here to make sure that if we're
+            # going to corrupt the connection, we do it before it's closed.
+            if spuriousError:
+                s.waiting.errback(amp.RemoteAmpError(
+                        "SPURIOUS",
+                        "Here's some traffic in the form of an error."))
+            else:
+                s.waiting.callback({})
+            p.flush()
+        c.transport.loseConnection() # close it
+        p.flush()
+        self.failUnless(self.testSucceeded)
+
+
+    def test_protocolSwitchDeferred(self):
+        """
+        Verify that protocol-switching even works if the value returned from
+        the command that does the switch is deferred.
+        """
+        return self.test_protocolSwitch(switcher=DeferredSymmetricCommandProtocol)
+
+
+    def test_protocolSwitchFail(self, switcher=SimpleSymmetricCommandProtocol):
+        """
+        Verify that if we try to switch protocols and it fails, the connection
+        stays up and we can go back to speaking AMP.
+        """
+        self.testSucceeded = False
+
+        serverDeferred = defer.Deferred()
+        serverProto = switcher(serverDeferred)
+        clientDeferred = defer.Deferred()
+        clientProto = switcher(clientDeferred)
+        c, s, p = connectedServerAndClient(ServerClass=lambda: serverProto,
+                                           ClientClass=lambda: clientProto)
+        L = []
+        c.switchToTestProtocol(fail=True).addErrback(L.append)
+        p.flush()
+        L.pop().trap(UnknownProtocol)
+        self.failIf(self.testSucceeded)
+        # It's a known error, so let's send a "hello" on the same connection;
+        # it should work.
+        c.sendHello('world').addCallback(L.append)
+        p.flush()
+        self.assertEqual(L.pop()['hello'], 'world')
+
+
+    def test_trafficAfterSwitch(self):
+        """
+        Verify that attempts to send traffic after a switch will not corrupt
+        the nested protocol.
+        """
+        return self.test_protocolSwitch(spuriousTraffic=True)
+
+
+    def test_errorAfterSwitch(self):
+        """
+        Returning an error after a protocol switch should record the underlying
+        error.
+        """
+        return self.test_protocolSwitch(spuriousTraffic=True,
+                                        spuriousError=True)
+
+
+    def test_quitBoxQuits(self):
+        """
+        Verify that commands with a responseType of QuitBox will in fact
+        terminate the connection.
+        """
+        c, s, p = connectedServerAndClient(
+            ServerClass=SimpleSymmetricCommandProtocol,
+            ClientClass=SimpleSymmetricCommandProtocol)
+
+        L = []
+        HELLO = 'world'
+        GOODBYE = 'everyone'
+        c.sendHello(HELLO).addCallback(L.append)
+        p.flush()
+        self.assertEqual(L.pop()['hello'], HELLO)
+        c.callRemote(Goodbye).addCallback(L.append)
+        p.flush()
+        self.assertEqual(L.pop()['goodbye'], GOODBYE)
+        c.sendHello(HELLO).addErrback(L.append)
+        L.pop().trap(error.ConnectionDone)
+
+
+    def test_basicLiteralEmit(self):
+        """
+        Verify that the command dictionaries for a callRemoteN look correct
+        after being serialized and parsed.
+        """
+        c, s, p = connectedServerAndClient()
+        L = []
+        s.ampBoxReceived = L.append
+        c.callRemote(Hello, hello='hello test', mixedCase='mixed case arg test',
+                     dash_arg='x', underscore_arg='y')
+        p.flush()
+        self.assertEqual(len(L), 1)
+        for k, v in [('_command', Hello.commandName),
+                     ('hello', 'hello test'),
+                     ('mixedCase', 'mixed case arg test'),
+                     ('dash-arg', 'x'),
+                     ('underscore_arg', 'y')]:
+            self.assertEqual(L[-1].pop(k), v)
+        L[-1].pop('_ask')
+        self.assertEqual(L[-1], {})
+
+
+    def test_basicStructuredEmit(self):
+        """
+        Verify that a call similar to basicLiteralEmit's is handled properly with
+        high-level quoting and passing to Python methods, and that argument
+        names are correctly handled.
+        """
+        L = []
+        class StructuredHello(amp.AMP):
+            def h(self, *a, **k):
+                L.append((a, k))
+                return dict(hello='aaa')
+            Hello.responder(h)
+        c, s, p = connectedServerAndClient(ServerClass=StructuredHello)
+        c.callRemote(Hello, hello='hello test', mixedCase='mixed case arg test',
+                     dash_arg='x', underscore_arg='y').addCallback(L.append)
+        p.flush()
+        self.assertEqual(len(L), 2)
+        self.assertEqual(L[0],
+                          ((), dict(
+                    hello='hello test',
+                    mixedCase='mixed case arg test',
+                    dash_arg='x',
+                    underscore_arg='y',
+
+                    # XXX - should optional arguments just not be passed?
+                    # passing None seems a little odd, looking at the way it
+                    # turns out here... -glyph
+                    From=('file', 'file'),
+                    Print=None,
+                    optional=None,
+                    )))
+        self.assertEqual(L[1], dict(Print=None, hello='aaa'))
+
+class PretendRemoteCertificateAuthority:
+    def checkIsPretendRemote(self):
+        return True
+
+class IOSimCert:
+    verifyCount = 0
+
+    def options(self, *ign):
+        return self
+
+    def iosimVerify(self, otherCert):
+        """
+        This isn't a real certificate, and wouldn't work on a real socket, but
+        iosim specifies a different API so that we don't have to do any crypto
+        math to demonstrate that the right functions get called in the right
+        places.
+        """
+        assert otherCert is self
+        self.verifyCount += 1
+        return True
+
+class OKCert(IOSimCert):
+    def options(self, x):
+        assert x.checkIsPretendRemote()
+        return self
+
+class GrumpyCert(IOSimCert):
+    def iosimVerify(self, otherCert):
+        self.verifyCount += 1
+        return False
+
+class DroppyCert(IOSimCert):
+    def __init__(self, toDrop):
+        self.toDrop = toDrop
+
+    def iosimVerify(self, otherCert):
+        self.verifyCount += 1
+        self.toDrop.loseConnection()
+        return True
+
+class SecurableProto(FactoryNotifier):
+
+    factory = None
+
+    def verifyFactory(self):
+        return [PretendRemoteCertificateAuthority()]
+
+    def getTLSVars(self):
+        cert = self.certFactory()
+        verify = self.verifyFactory()
+        return dict(
+            tls_localCertificate=cert,
+            tls_verifyAuthorities=verify)
+    amp.StartTLS.responder(getTLSVars)
+
+
+
+class TLSTest(unittest.TestCase):
+    def test_startingTLS(self):
+        """
+        Verify that starting TLS and succeeding at handshaking sends all the
+        notifications to all the right places.
+        """
+        cli, svr, p = connectedServerAndClient(
+            ServerClass=SecurableProto,
+            ClientClass=SecurableProto)
+
+        okc = OKCert()
+        svr.certFactory = lambda : okc
+
+        cli.callRemote(
+            amp.StartTLS, tls_localCertificate=okc,
+            tls_verifyAuthorities=[PretendRemoteCertificateAuthority()])
+
+        # let's buffer something to be delivered securely
+        L = []
+        cli.callRemote(SecuredPing).addCallback(L.append)
+        p.flush()
+        # once for client once for server
+        self.assertEqual(okc.verifyCount, 2)
+        L = []
+        cli.callRemote(SecuredPing).addCallback(L.append)
+        p.flush()
+        self.assertEqual(L[0], {'pinged': True})
+
+
+    def test_startTooManyTimes(self):
+        """
+        Verify that the protocol will complain if we attempt to renegotiate TLS,
+        which we don't support.
+        """
+        cli, svr, p = connectedServerAndClient(
+            ServerClass=SecurableProto,
+            ClientClass=SecurableProto)
+
+        okc = OKCert()
+        svr.certFactory = lambda : okc
+
+        cli.callRemote(amp.StartTLS,
+                       tls_localCertificate=okc,
+                       tls_verifyAuthorities=[PretendRemoteCertificateAuthority()])
+        p.flush()
+        cli.noPeerCertificate = True # this is totally fake
+        self.assertRaises(
+            amp.OnlyOneTLS,
+            cli.callRemote,
+            amp.StartTLS,
+            tls_localCertificate=okc,
+            tls_verifyAuthorities=[PretendRemoteCertificateAuthority()])
+
+
+    def test_negotiationFailed(self):
+        """
+        Verify that starting TLS and failing on both sides at handshaking sends
+        notifications to all the right places and terminates the connection.
+        """
+
+        badCert = GrumpyCert()
+
+        cli, svr, p = connectedServerAndClient(
+            ServerClass=SecurableProto,
+            ClientClass=SecurableProto)
+        svr.certFactory = lambda : badCert
+
+        cli.callRemote(amp.StartTLS,
+                       tls_localCertificate=badCert)
+
+        p.flush()
+        # once for client once for server - but both fail
+        self.assertEqual(badCert.verifyCount, 2)
+        d = cli.callRemote(SecuredPing)
+        p.flush()
+        self.assertFailure(d, iosim.NativeOpenSSLError)
+
+
+    def test_negotiationFailedByClosing(self):
+        """
+        Verify that starting TLS and failing by way of a lost connection
+        notices that it is probably an SSL problem.
+        """
+
+        cli, svr, p = connectedServerAndClient(
+            ServerClass=SecurableProto,
+            ClientClass=SecurableProto)
+        droppyCert = DroppyCert(svr.transport)
+        svr.certFactory = lambda : droppyCert
+
+        cli.callRemote(amp.StartTLS, tls_localCertificate=droppyCert)
+
+        p.flush()
+
+        self.assertEqual(droppyCert.verifyCount, 2)
+
+        d = cli.callRemote(SecuredPing)
+        p.flush()
+
+        # it might be a good idea to move this exception somewhere more
+        # reasonable.
+        self.assertFailure(d, error.PeerVerifyError)
+
+    skip = skipSSL
+
+
+
+class TLSNotAvailableTest(unittest.TestCase):
+    """
+    Tests what happened when ssl is not available in current installation.
+    """
+
+    def setUp(self):
+        """
+        Disable ssl in amp.
+        """
+        self.ssl = amp.ssl
+        amp.ssl = None
+
+
+    def tearDown(self):
+        """
+        Restore ssl module.
+        """
+        amp.ssl = self.ssl
+
+
+    def test_callRemoteError(self):
+        """
+        Check that callRemote raises an exception when called with a
+        L{amp.StartTLS}.
+        """
+        cli, svr, p = connectedServerAndClient(
+            ServerClass=SecurableProto,
+            ClientClass=SecurableProto)
+
+        okc = OKCert()
+        svr.certFactory = lambda : okc
+
+        return self.assertFailure(cli.callRemote(
+            amp.StartTLS, tls_localCertificate=okc,
+            tls_verifyAuthorities=[PretendRemoteCertificateAuthority()]),
+            RuntimeError)
+
+
+    def test_messageReceivedError(self):
+        """
+        When a client with SSL enabled talks to a server without SSL, it
+        should return a meaningful error.
+        """
+        svr = SecurableProto()
+        okc = OKCert()
+        svr.certFactory = lambda : okc
+        box = amp.Box()
+        box['_command'] = 'StartTLS'
+        box['_ask'] = '1'
+        boxes = []
+        svr.sendBox = boxes.append
+        svr.makeConnection(StringTransport())
+        svr.ampBoxReceived(box)
+        self.assertEqual(boxes,
+            [{'_error_code': 'TLS_ERROR',
+              '_error': '1',
+              '_error_description': 'TLS not available'}])
+
+
+
+class InheritedError(Exception):
+    """
+    This error is used to check inheritance.
+    """
+
+
+
+class OtherInheritedError(Exception):
+    """
+    This is a distinct error for checking inheritance.
+    """
+
+
+
+class BaseCommand(amp.Command):
+    """
+    This provides a command that will be subclassed.
+    """
+    errors = {InheritedError: 'INHERITED_ERROR'}
+
+
+
+class InheritedCommand(BaseCommand):
+    """
+    This is a command which subclasses another command but does not override
+    anything.
+    """
+
+
+
+class AddErrorsCommand(BaseCommand):
+    """
+    This is a command which subclasses another command but adds errors to the
+    list.
+    """
+    arguments = [('other', amp.Boolean())]
+    errors = {OtherInheritedError: 'OTHER_INHERITED_ERROR'}
+
+
+
+class NormalCommandProtocol(amp.AMP):
+    """
+    This is a protocol which responds to L{BaseCommand}, and is used to test
+    that inheritance does not interfere with the normal handling of errors.
+    """
+    def resp(self):
+        raise InheritedError()
+    BaseCommand.responder(resp)
+
+
+
+class InheritedCommandProtocol(amp.AMP):
+    """
+    This is a protocol which responds to L{InheritedCommand}, and is used to
+    test that inherited commands inherit their bases' errors if they do not
+    respond to any of their own.
+    """
+    def resp(self):
+        raise InheritedError()
+    InheritedCommand.responder(resp)
+
+
+
+class AddedCommandProtocol(amp.AMP):
+    """
+    This is a protocol which responds to L{AddErrorsCommand}, and is used to
+    test that inherited commands can add their own new types of errors, but
+    still respond in the same way to their parents types of errors.
+    """
+    def resp(self, other):
+        if other:
+            raise OtherInheritedError()
+        else:
+            raise InheritedError()
+    AddErrorsCommand.responder(resp)
+
+
+
+class CommandInheritanceTests(unittest.TestCase):
+    """
+    These tests verify that commands inherit error conditions properly.
+    """
+
+    def errorCheck(self, err, proto, cmd, **kw):
+        """
+        Check that the appropriate kind of error is raised when a given command
+        is sent to a given protocol.
+        """
+        c, s, p = connectedServerAndClient(ServerClass=proto,
+                                           ClientClass=proto)
+        d = c.callRemote(cmd, **kw)
+        d2 = self.failUnlessFailure(d, err)
+        p.flush()
+        return d2
+
+
+    def test_basicErrorPropagation(self):
+        """
+        Verify that errors specified in a superclass are respected normally
+        even if it has subclasses.
+        """
+        return self.errorCheck(
+            InheritedError, NormalCommandProtocol, BaseCommand)
+
+
+    def test_inheritedErrorPropagation(self):
+        """
+        Verify that errors specified in a superclass command are propagated to
+        its subclasses.
+        """
+        return self.errorCheck(
+            InheritedError, InheritedCommandProtocol, InheritedCommand)
+
+
+    def test_inheritedErrorAddition(self):
+        """
+        Verify that new errors specified in a subclass of an existing command
+        are honored even if the superclass defines some errors.
+        """
+        return self.errorCheck(
+            OtherInheritedError, AddedCommandProtocol, AddErrorsCommand, other=True)
+
+
+    def test_additionWithOriginalError(self):
+        """
+        Verify that errors specified in a command's superclass are respected
+        even if that command defines new errors itself.
+        """
+        return self.errorCheck(
+            InheritedError, AddedCommandProtocol, AddErrorsCommand, other=False)
+
+
+def _loseAndPass(err, proto):
+    # be specific, pass on the error to the client.
+    err.trap(error.ConnectionLost, error.ConnectionDone)
+    del proto.connectionLost
+    proto.connectionLost(err)
+
+
+class LiveFireBase:
+    """
+    Utility for connected reactor-using tests.
+    """
+
+    def setUp(self):
+        """
+        Create an amp server and connect a client to it.
+        """
+        from twisted.internet import reactor
+        self.serverFactory = protocol.ServerFactory()
+        self.serverFactory.protocol = self.serverProto
+        self.clientFactory = protocol.ClientFactory()
+        self.clientFactory.protocol = self.clientProto
+        self.clientFactory.onMade = defer.Deferred()
+        self.serverFactory.onMade = defer.Deferred()
+        self.serverPort = reactor.listenTCP(0, self.serverFactory)
+        self.addCleanup(self.serverPort.stopListening)
+        self.clientConn = reactor.connectTCP(
+            '127.0.0.1', self.serverPort.getHost().port,
+            self.clientFactory)
+        self.addCleanup(self.clientConn.disconnect)
+        def getProtos(rlst):
+            self.cli = self.clientFactory.theProto
+            self.svr = self.serverFactory.theProto
+        dl = defer.DeferredList([self.clientFactory.onMade,
+                                 self.serverFactory.onMade])
+        return dl.addCallback(getProtos)
+
+    def tearDown(self):
+        """
+        Cleanup client and server connections, and check the error got at
+        C{connectionLost}.
+        """
+        L = []
+        for conn in self.cli, self.svr:
+            if conn.transport is not None:
+                # depend on amp's function connection-dropping behavior
+                d = defer.Deferred().addErrback(_loseAndPass, conn)
+                conn.connectionLost = d.errback
+                conn.transport.loseConnection()
+                L.append(d)
+        return defer.gatherResults(L
+            ).addErrback(lambda first: first.value.subFailure)
+
+
+def show(x):
+    import sys
+    sys.stdout.write(x+'\n')
+    sys.stdout.flush()
+
+
+def tempSelfSigned():
+    from twisted.internet import ssl
+
+    sharedDN = ssl.DN(CN='shared')
+    key = ssl.KeyPair.generate()
+    cr = key.certificateRequest(sharedDN)
+    sscrd = key.signCertificateRequest(
+        sharedDN, cr, lambda dn: True, 1234567)
+    cert = key.newCertificate(sscrd)
+    return cert
+
+if ssl is not None:
+    tempcert = tempSelfSigned()
+
+
+class LiveFireTLSTestCase(LiveFireBase, unittest.TestCase):
+    clientProto = SecurableProto
+    serverProto = SecurableProto
+    def test_liveFireCustomTLS(self):
+        """
+        Using real, live TLS, actually negotiate a connection.
+
+        This also looks at the 'peerCertificate' attribute's correctness, since
+        that's actually loaded using OpenSSL calls, but the main purpose is to
+        make sure that we didn't miss anything obvious in iosim about TLS
+        negotiations.
+        """
+
+        cert = tempcert
+
+        self.svr.verifyFactory = lambda : [cert]
+        self.svr.certFactory = lambda : cert
+        # only needed on the server, we specify the client below.
+
+        def secured(rslt):
+            x = cert.digest()
+            def pinged(rslt2):
+                # Interesting.  OpenSSL won't even _tell_ us about the peer
+                # cert until we negotiate.  we should be able to do this in
+                # 'secured' instead, but it looks like we can't.  I think this
+                # is a bug somewhere far deeper than here.
+                self.assertEqual(x, self.cli.hostCertificate.digest())
+                self.assertEqual(x, self.cli.peerCertificate.digest())
+                self.assertEqual(x, self.svr.hostCertificate.digest())
+                self.assertEqual(x, self.svr.peerCertificate.digest())
+            return self.cli.callRemote(SecuredPing).addCallback(pinged)
+        return self.cli.callRemote(amp.StartTLS,
+                                   tls_localCertificate=cert,
+                                   tls_verifyAuthorities=[cert]).addCallback(secured)
+
+    skip = skipSSL
+
+
+
+class SlightlySmartTLS(SimpleSymmetricCommandProtocol):
+    """
+    Specific implementation of server side protocol with different
+    management of TLS.
+    """
+    def getTLSVars(self):
+        """
+        @return: the global C{tempcert} certificate as local certificate.
+        """
+        return dict(tls_localCertificate=tempcert)
+    amp.StartTLS.responder(getTLSVars)
+
+
+class PlainVanillaLiveFire(LiveFireBase, unittest.TestCase):
+
+    clientProto = SimpleSymmetricCommandProtocol
+    serverProto = SimpleSymmetricCommandProtocol
+
+    def test_liveFireDefaultTLS(self):
+        """
+        Verify that out of the box, we can start TLS to at least encrypt the
+        connection, even if we don't have any certificates to use.
+        """
+        def secured(result):
+            return self.cli.callRemote(SecuredPing)
+        return self.cli.callRemote(amp.StartTLS).addCallback(secured)
+
+    skip = skipSSL
+
+
+
+class WithServerTLSVerification(LiveFireBase, unittest.TestCase):
+    clientProto = SimpleSymmetricCommandProtocol
+    serverProto = SlightlySmartTLS
+
+    def test_anonymousVerifyingClient(self):
+        """
+        Verify that anonymous clients can verify server certificates.
+        """
+        def secured(result):
+            return self.cli.callRemote(SecuredPing)
+        return self.cli.callRemote(amp.StartTLS,
+                                   tls_verifyAuthorities=[tempcert]
+            ).addCallback(secured)
+
+    skip = skipSSL
+
+
+
+class ProtocolIncludingArgument(amp.Argument):
+    """
+    An L{amp.Argument} which encodes its parser and serializer
+    arguments *including the protocol* into its parsed and serialized
+    forms.
+    """
+
+    def fromStringProto(self, string, protocol):
+        """
+        Don't decode anything; just return all possible information.
+
+        @return: A two-tuple of the input string and the protocol.
+        """
+        return (string, protocol)
+
+    def toStringProto(self, obj, protocol):
+        """
+        Encode identifying information about L{object} and protocol
+        into a string for later verification.
+
+        @type obj: L{object}
+        @type protocol: L{amp.AMP}
+        """
+        return "%s:%s" % (id(obj), id(protocol))
+
+
+
+class ProtocolIncludingCommand(amp.Command):
+    """
+    A command that has argument and response schemas which use
+    L{ProtocolIncludingArgument}.
+    """
+    arguments = [('weird', ProtocolIncludingArgument())]
+    response = [('weird', ProtocolIncludingArgument())]
+
+
+
+class MagicSchemaCommand(amp.Command):
+    """
+    A command which overrides L{parseResponse}, L{parseArguments}, and
+    L{makeResponse}.
+    """
+    def parseResponse(self, strings, protocol):
+        """
+        Don't do any parsing, just jam the input strings and protocol
+        onto the C{protocol.parseResponseArguments} attribute as a
+        two-tuple. Return the original strings.
+        """
+        protocol.parseResponseArguments = (strings, protocol)
+        return strings
+    parseResponse = classmethod(parseResponse)
+
+
+    def parseArguments(cls, strings, protocol):
+        """
+        Don't do any parsing, just jam the input strings and protocol
+        onto the C{protocol.parseArgumentsArguments} attribute as a
+        two-tuple. Return the original strings.
+        """
+        protocol.parseArgumentsArguments = (strings, protocol)
+        return strings
+    parseArguments = classmethod(parseArguments)
+
+
+    def makeArguments(cls, objects, protocol):
+        """
+        Don't do any serializing, just jam the input strings and protocol
+        onto the C{protocol.makeArgumentsArguments} attribute as a
+        two-tuple. Return the original strings.
+        """
+        protocol.makeArgumentsArguments = (objects, protocol)
+        return objects
+    makeArguments = classmethod(makeArguments)
+
+
+
+class NoNetworkProtocol(amp.AMP):
+    """
+    An L{amp.AMP} subclass which overrides private methods to avoid
+    testing the network. It also provides a responder for
+    L{MagicSchemaCommand} that does nothing, so that tests can test
+    aspects of the interaction of L{amp.Command}s and L{amp.AMP}.
+
+    @ivar parseArgumentsArguments: Arguments that have been passed to any
+    L{MagicSchemaCommand}, if L{MagicSchemaCommand} has been handled by
+    this protocol.
+
+    @ivar parseResponseArguments: Responses that have been returned from a
+    L{MagicSchemaCommand}, if L{MagicSchemaCommand} has been handled by
+    this protocol.
+
+    @ivar makeArgumentsArguments: Arguments that have been serialized by any
+    L{MagicSchemaCommand}, if L{MagicSchemaCommand} has been handled by
+    this protocol.
+    """
+    def _sendBoxCommand(self, commandName, strings, requiresAnswer):
+        """
+        Return a Deferred which fires with the original strings.
+        """
+        return defer.succeed(strings)
+
+    MagicSchemaCommand.responder(lambda s, weird: {})
+
+
+
+class MyBox(dict):
+    """
+    A unique dict subclass.
+    """
+
+
+
+class ProtocolIncludingCommandWithDifferentCommandType(
+    ProtocolIncludingCommand):
+    """
+    A L{ProtocolIncludingCommand} subclass whose commandType is L{MyBox}
+    """
+    commandType = MyBox
+
+
+
+class CommandTestCase(unittest.TestCase):
+    """
+    Tests for L{amp.Argument} and L{amp.Command}.
+    """
+    def test_argumentInterface(self):
+        """
+        L{Argument} instances provide L{amp.IArgumentType}.
+        """
+        self.assertTrue(verifyObject(amp.IArgumentType, amp.Argument()))
+
+
+    def test_parseResponse(self):
+        """
+        There should be a class method of Command which accepts a
+        mapping of argument names to serialized forms and returns a
+        similar mapping whose values have been parsed via the
+        Command's response schema.
+        """
+        protocol = object()
+        result = 'whatever'
+        strings = {'weird': result}
+        self.assertEqual(
+            ProtocolIncludingCommand.parseResponse(strings, protocol),
+            {'weird': (result, protocol)})
+
+
+    def test_callRemoteCallsParseResponse(self):
+        """
+        Making a remote call on a L{amp.Command} subclass which
+        overrides the C{parseResponse} method should call that
+        C{parseResponse} method to get the response.
+        """
+        client = NoNetworkProtocol()
+        thingy = "weeoo"
+        response = client.callRemote(MagicSchemaCommand, weird=thingy)
+        def gotResponse(ign):
+            self.assertEqual(client.parseResponseArguments,
+                              ({"weird": thingy}, client))
+        response.addCallback(gotResponse)
+        return response
+
+
+    def test_parseArguments(self):
+        """
+        There should be a class method of L{amp.Command} which accepts
+        a mapping of argument names to serialized forms and returns a
+        similar mapping whose values have been parsed via the
+        command's argument schema.
+        """
+        protocol = object()
+        result = 'whatever'
+        strings = {'weird': result}
+        self.assertEqual(
+            ProtocolIncludingCommand.parseArguments(strings, protocol),
+            {'weird': (result, protocol)})
+
+
+    def test_responderCallsParseArguments(self):
+        """
+        Making a remote call on a L{amp.Command} subclass which
+        overrides the C{parseArguments} method should call that
+        C{parseArguments} method to get the arguments.
+        """
+        protocol = NoNetworkProtocol()
+        responder = protocol.locateResponder(MagicSchemaCommand.commandName)
+        argument = object()
+        response = responder(dict(weird=argument))
+        response.addCallback(
+            lambda ign: self.assertEqual(protocol.parseArgumentsArguments,
+                                         ({"weird": argument}, protocol)))
+        return response
+
+
+    def test_makeArguments(self):
+        """
+        There should be a class method of L{amp.Command} which accepts
+        a mapping of argument names to objects and returns a similar
+        mapping whose values have been serialized via the command's
+        argument schema.
+        """
+        protocol = object()
+        argument = object()
+        objects = {'weird': argument}
+        self.assertEqual(
+            ProtocolIncludingCommand.makeArguments(objects, protocol),
+            {'weird': "%d:%d" % (id(argument), id(protocol))})
+
+
+    def test_makeArgumentsUsesCommandType(self):
+        """
+        L{amp.Command.makeArguments}'s return type should be the type
+        of the result of L{amp.Command.commandType}.
+        """
+        protocol = object()
+        objects = {"weird": "whatever"}
+
+        result = ProtocolIncludingCommandWithDifferentCommandType.makeArguments(
+            objects, protocol)
+        self.assertIdentical(type(result), MyBox)
+
+
+    def test_callRemoteCallsMakeArguments(self):
+        """
+        Making a remote call on a L{amp.Command} subclass which
+        overrides the C{makeArguments} method should call that
+        C{makeArguments} method to get the response.
+        """
+        client = NoNetworkProtocol()
+        argument = object()
+        response = client.callRemote(MagicSchemaCommand, weird=argument)
+        def gotResponse(ign):
+            self.assertEqual(client.makeArgumentsArguments,
+                             ({"weird": argument}, client))
+        response.addCallback(gotResponse)
+        return response
+
+
+    def test_extraArgumentsDisallowed(self):
+        """
+        L{Command.makeArguments} raises L{amp.InvalidSignature} if the objects
+        dictionary passed to it includes a key which does not correspond to the
+        Python identifier for a defined argument.
+        """
+        self.assertRaises(
+            amp.InvalidSignature,
+            Hello.makeArguments,
+            dict(hello="hello", bogusArgument=object()), None)
+
+
+    def test_wireSpellingDisallowed(self):
+        """
+        If a command argument conflicts with a Python keyword, the
+        untransformed argument name is not allowed as a key in the dictionary
+        passed to L{Command.makeArguments}.  If it is supplied,
+        L{amp.InvalidSignature} is raised.
+
+        This may be a pointless implementation restriction which may be lifted.
+        The current behavior is tested to verify that such arguments are not
+        silently dropped on the floor (the previous behavior).
+        """
+        self.assertRaises(
+            amp.InvalidSignature,
+            Hello.makeArguments,
+            dict(hello="required", **{"print": "print value"}),
+            None)
+
+
+class ListOfTestsMixin:
+    """
+    Base class for testing L{ListOf}, a parameterized zero-or-more argument
+    type.
+
+    @ivar elementType: Subclasses should set this to an L{Argument}
+        instance.  The tests will make a L{ListOf} using this.
+
+    @ivar strings: Subclasses should set this to a dictionary mapping some
+        number of keys to the correct serialized form for some example
+        values.  These should agree with what L{elementType}
+        produces/accepts.
+
+    @ivar objects: Subclasses should set this to a dictionary with the same
+        keys as C{strings} and with values which are the lists which should
+        serialize to the values in the C{strings} dictionary.
+    """
+    def test_toBox(self):
+        """
+        L{ListOf.toBox} extracts the list of objects from the C{objects}
+        dictionary passed to it, using the C{name} key also passed to it,
+        serializes each of the elements in that list using the L{Argument}
+        instance previously passed to its initializer, combines the serialized
+        results, and inserts the result into the C{strings} dictionary using
+        the same C{name} key.
+        """
+        stringList = amp.ListOf(self.elementType)
+        strings = amp.AmpBox()
+        for key in self.objects:
+            stringList.toBox(key, strings, self.objects.copy(), None)
+        self.assertEqual(strings, self.strings)
+
+
+    def test_fromBox(self):
+        """
+        L{ListOf.fromBox} reverses the operation performed by L{ListOf.toBox}.
+        """
+        stringList = amp.ListOf(self.elementType)
+        objects = {}
+        for key in self.strings:
+            stringList.fromBox(key, self.strings.copy(), objects, None)
+        self.assertEqual(objects, self.objects)
+
+
+
+class ListOfStringsTests(unittest.TestCase, ListOfTestsMixin):
+    """
+    Tests for L{ListOf} combined with L{amp.String}.
+    """
+    elementType = amp.String()
+
+    strings = {
+        "empty": "",
+        "single": "\x00\x03foo",
+        "multiple": "\x00\x03bar\x00\x03baz\x00\x04quux"}
+
+    objects = {
+        "empty": [],
+        "single": ["foo"],
+        "multiple": ["bar", "baz", "quux"]}
+
+
+class ListOfIntegersTests(unittest.TestCase, ListOfTestsMixin):
+    """
+    Tests for L{ListOf} combined with L{amp.Integer}.
+    """
+    elementType = amp.Integer()
+
+    huge = (
+        9999999999999999999999999999999999999999999999999999999999 *
+        9999999999999999999999999999999999999999999999999999999999)
+
+    strings = {
+        "empty": "",
+        "single": "\x00\x0210",
+        "multiple": "\x00\x011\x00\x0220\x00\x03500",
+        "huge": "\x00\x74%d" % (huge,),
+        "negative": "\x00\x02-1"}
+
+    objects = {
+        "empty": [],
+        "single": [10],
+        "multiple": [1, 20, 500],
+        "huge": [huge],
+        "negative": [-1]}
+
+
+
+class ListOfUnicodeTests(unittest.TestCase, ListOfTestsMixin):
+    """
+    Tests for L{ListOf} combined with L{amp.Unicode}.
+    """
+    elementType = amp.Unicode()
+
+    strings = {
+        "empty": "",
+        "single": "\x00\x03foo",
+        "multiple": "\x00\x03\xe2\x98\x83\x00\x05Hello\x00\x05world"}
+
+    objects = {
+        "empty": [],
+        "single": [u"foo"],
+        "multiple": [u"\N{SNOWMAN}", u"Hello", u"world"]}
+
+
+
+class ListOfDecimalTests(unittest.TestCase, ListOfTestsMixin):
+    """
+    Tests for L{ListOf} combined with L{amp.Decimal}.
+    """
+    elementType = amp.Decimal()
+
+    strings = {
+        "empty": "",
+        "single": "\x00\x031.1",
+        "extreme": "\x00\x08Infinity\x00\x09-Infinity",
+        "scientist": "\x00\x083.141E+5\x00\x0a0.00003141\x00\x083.141E-7"
+                     "\x00\x09-3.141E+5\x00\x0b-0.00003141\x00\x09-3.141E-7",
+        "engineer": "\x00\x04%s\x00\x06%s" % (
+            decimal.Decimal("0e6").to_eng_string(),
+            decimal.Decimal("1.5E-9").to_eng_string()),
+    }
+
+    objects = {
+        "empty": [],
+        "single": [decimal.Decimal("1.1")],
+        "extreme": [
+            decimal.Decimal("Infinity"),
+            decimal.Decimal("-Infinity"),
+        ],
+        # exarkun objected to AMP supporting engineering notation because
+        # it was redundant, until we realised that 1E6 has less precision
+        # than 1000000 and is represented differently.  But they compare
+        # and even hash equally.  There were tears.
+        "scientist": [
+            decimal.Decimal("3.141E5"),
+            decimal.Decimal("3.141e-5"),
+            decimal.Decimal("3.141E-7"),
+            decimal.Decimal("-3.141e5"),
+            decimal.Decimal("-3.141E-5"),
+            decimal.Decimal("-3.141e-7"),
+        ],
+        "engineer": [
+            decimal.Decimal("0e6"),
+            decimal.Decimal("1.5E-9"),
+        ],
+     }
+
+
+
+class ListOfDecimalNanTests(unittest.TestCase, ListOfTestsMixin):
+    """
+    Tests for L{ListOf} combined with L{amp.Decimal} for not-a-number values.
+    """
+    elementType = amp.Decimal()
+
+    strings = {
+        "nan": "\x00\x03NaN\x00\x04-NaN\x00\x04sNaN\x00\x05-sNaN",
+    }
+
+    objects = {
+        "nan": [
+            decimal.Decimal("NaN"),
+            decimal.Decimal("-NaN"),
+            decimal.Decimal("sNaN"),
+            decimal.Decimal("-sNaN"),
+        ]
+    }
+
+    def test_fromBox(self):
+        """
+        L{ListOf.fromBox} reverses the operation performed by L{ListOf.toBox}.
+        """
+        # Helpers.  Decimal.is_{qnan,snan,signed}() are new in 2.6 (or 2.5.2,
+        # but who's counting).
+        def is_qnan(decimal):
+            return 'NaN' in str(decimal) and 'sNaN' not in str(decimal)
+
+        def is_snan(decimal):
+            return 'sNaN' in str(decimal)
+
+        def is_signed(decimal):
+            return '-' in str(decimal)
+
+        # NaN values have unusual equality semantics, so this method is
+        # overridden to compare the resulting objects in a way which works with
+        # NaNs.
+        stringList = amp.ListOf(self.elementType)
+        objects = {}
+        for key in self.strings:
+            stringList.fromBox(key, self.strings.copy(), objects, None)
+        n = objects["nan"]
+        self.assertTrue(is_qnan(n[0]) and not is_signed(n[0]))
+        self.assertTrue(is_qnan(n[1]) and is_signed(n[1]))
+        self.assertTrue(is_snan(n[2]) and not is_signed(n[2]))
+        self.assertTrue(is_snan(n[3]) and is_signed(n[3]))
+
+
+
+class DecimalTests(unittest.TestCase):
+    """
+    Tests for L{amp.Decimal}.
+    """
+    def test_nonDecimal(self):
+        """
+        L{amp.Decimal.toString} raises L{ValueError} if passed an object which
+        is not an instance of C{decimal.Decimal}.
+        """
+        argument = amp.Decimal()
+        self.assertRaises(ValueError, argument.toString, "1.234")
+        self.assertRaises(ValueError, argument.toString, 1.234)
+        self.assertRaises(ValueError, argument.toString, 1234)
+
+
+
+class ListOfDateTimeTests(unittest.TestCase, ListOfTestsMixin):
+    """
+    Tests for L{ListOf} combined with L{amp.DateTime}.
+    """
+    elementType = amp.DateTime()
+
+    strings = {
+        "christmas":
+            "\x00\x202010-12-25T00:00:00.000000-00:00"
+            "\x00\x202010-12-25T00:00:00.000000-00:00",
+        "christmas in eu": "\x00\x202010-12-25T00:00:00.000000+01:00",
+        "christmas in iran": "\x00\x202010-12-25T00:00:00.000000+03:30",
+        "christmas in nyc": "\x00\x202010-12-25T00:00:00.000000-05:00",
+        "previous tests": "\x00\x202010-12-25T00:00:00.000000+03:19"
+                          "\x00\x202010-12-25T00:00:00.000000-06:59",
+    }
+
+    objects = {
+        "christmas": [
+            datetime.datetime(2010, 12, 25, 0, 0, 0, tzinfo=amp.utc),
+            datetime.datetime(2010, 12, 25, 0, 0, 0,
+                tzinfo=amp._FixedOffsetTZInfo('+', 0, 0)),
+        ],
+        "christmas in eu": [
+            datetime.datetime(2010, 12, 25, 0, 0, 0,
+                tzinfo=amp._FixedOffsetTZInfo('+', 1, 0)),
+        ],
+        "christmas in iran": [
+            datetime.datetime(2010, 12, 25, 0, 0, 0,
+                tzinfo=amp._FixedOffsetTZInfo('+', 3, 30)),
+        ],
+        "christmas in nyc": [
+            datetime.datetime(2010, 12, 25, 0, 0, 0,
+                tzinfo=amp._FixedOffsetTZInfo('-', 5, 0)),
+        ],
+        "previous tests": [
+            datetime.datetime(2010, 12, 25, 0, 0, 0,
+                tzinfo=amp._FixedOffsetTZInfo('+', 3, 19)),
+            datetime.datetime(2010, 12, 25, 0, 0, 0,
+                tzinfo=amp._FixedOffsetTZInfo('-', 6, 59)),
+        ],
+    }
+
+
+
+class ListOfOptionalTests(unittest.TestCase):
+    """
+    Tests to ensure L{ListOf} AMP arguments can be omitted from AMP commands
+    via the 'optional' flag.
+    """
+    def test_requiredArgumentWithNoneValueRaisesTypeError(self):
+        """
+        L{ListOf.toBox} raises C{TypeError} when passed a value of C{None}
+        for the argument.
+        """
+        stringList = amp.ListOf(amp.Integer())
+        self.assertRaises(
+            TypeError, stringList.toBox, 'omitted', amp.AmpBox(),
+            {'omitted': None}, None)
+
+
+    def test_optionalArgumentWithNoneValueOmitted(self):
+        """
+        L{ListOf.toBox} silently omits serializing any argument with a
+        value of C{None} that is designated as optional for the protocol.
+        """
+        stringList = amp.ListOf(amp.Integer(), optional=True)
+        strings = amp.AmpBox()
+        stringList.toBox('omitted', strings, {'omitted': None}, None)
+        self.assertEqual(strings, {})
+
+
+    def test_requiredArgumentWithKeyMissingRaisesKeyError(self):
+        """
+        L{ListOf.toBox} raises C{KeyError} if the argument's key is not
+        present in the objects dictionary.
+        """
+        stringList = amp.ListOf(amp.Integer())
+        self.assertRaises(
+            KeyError, stringList.toBox, 'ommited', amp.AmpBox(),
+            {'someOtherKey': 0}, None)
+
+
+    def test_optionalArgumentWithKeyMissingOmitted(self):
+        """
+        L{ListOf.toBox} silently omits serializing any argument designated
+        as optional whose key is not present in the objects dictionary.
+        """
+        stringList = amp.ListOf(amp.Integer(), optional=True)
+        stringList.toBox('ommited', amp.AmpBox(), {'someOtherKey': 0}, None)
+
+
+    def test_omittedOptionalArgumentDeserializesAsNone(self):
+        """
+        L{ListOf.fromBox} correctly reverses the operation performed by
+        L{ListOf.toBox} for optional arguments.
+        """
+        stringList = amp.ListOf(amp.Integer(), optional=True)
+        objects = {}
+        stringList.fromBox('omitted', {}, objects, None)
+        self.assertEqual(objects, {'omitted': None})
+
+
+
+class UNIXStringTransport(object):
+    """
+    An in-memory implementation of L{interfaces.IUNIXTransport} which collects
+    all data given to it for later inspection.
+
+    @ivar _queue: A C{list} of the data which has been given to this transport,
+        eg via C{write} or C{sendFileDescriptor}.  Elements are two-tuples of a
+        string (identifying the destination of the data) and the data itself.
+    """
+    implements(interfaces.IUNIXTransport)
+
+    def __init__(self, descriptorFuzz):
+        """
+        @param descriptorFuzz: An offset to apply to descriptors.
+        @type descriptorFuzz: C{int}
+        """
+        self._fuzz = descriptorFuzz
+        self._queue = []
+
+
+    def sendFileDescriptor(self, descriptor):
+        self._queue.append((
+                'fileDescriptorReceived', descriptor + self._fuzz))
+
+
+    def write(self, data):
+        self._queue.append(('dataReceived', data))
+
+
+    def writeSequence(self, seq):
+        for data in seq:
+            self.write(data)
+
+
+    def loseConnection(self):
+        self._queue.append(('connectionLost', Failure(ConnectionLost())))
+
+
+    def getHost(self):
+        return UNIXAddress('/tmp/some-path')
+
+
+    def getPeer(self):
+        return UNIXAddress('/tmp/another-path')
+
+# Minimal evidence that we got the signatures right
+verifyClass(interfaces.ITransport, UNIXStringTransport)
+verifyClass(interfaces.IUNIXTransport, UNIXStringTransport)
+
+
+class DescriptorTests(unittest.TestCase):
+    """
+    Tests for L{amp.Descriptor}, an argument type for passing a file descriptor
+    over an AMP connection over a UNIX domain socket.
+    """
+    def setUp(self):
+        self.fuzz = 3
+        self.transport = UNIXStringTransport(descriptorFuzz=self.fuzz)
+        self.protocol = amp.BinaryBoxProtocol(
+            amp.BoxDispatcher(amp.CommandLocator()))
+        self.protocol.makeConnection(self.transport)
+
+
+    def test_fromStringProto(self):
+        """
+        L{Descriptor.fromStringProto} constructs a file descriptor value by
+        extracting a previously received file descriptor corresponding to the
+        wire value of the argument from the L{_DescriptorExchanger} state of the
+        protocol passed to it.
+
+        This is a whitebox test which involves direct L{_DescriptorExchanger}
+        state inspection.
+        """
+        argument = amp.Descriptor()
+        self.protocol.fileDescriptorReceived(5)
+        self.protocol.fileDescriptorReceived(3)
+        self.protocol.fileDescriptorReceived(1)
+        self.assertEqual(
+            5, argument.fromStringProto("0", self.protocol))
+        self.assertEqual(
+            3, argument.fromStringProto("1", self.protocol))
+        self.assertEqual(
+            1, argument.fromStringProto("2", self.protocol))
+        self.assertEqual({}, self.protocol._descriptors)
+
+
+    def test_toStringProto(self):
+        """
+        To send a file descriptor, L{Descriptor.toStringProto} uses the
+        L{IUNIXTransport.sendFileDescriptor} implementation of the transport of
+        the protocol passed to it to copy the file descriptor.  Each subsequent
+        descriptor sent over a particular AMP connection is assigned the next
+        integer value, starting from 0.  The base ten string representation of
+        this value is the byte encoding of the argument.
+
+        This is a whitebox test which involves direct L{_DescriptorExchanger}
+        state inspection and mutation.
+        """
+        argument = amp.Descriptor()
+        self.assertEqual("0", argument.toStringProto(2, self.protocol))
+        self.assertEqual(
+            ("fileDescriptorReceived", 2 + self.fuzz), self.transport._queue.pop(0))
+        self.assertEqual("1", argument.toStringProto(4, self.protocol))
+        self.assertEqual(
+            ("fileDescriptorReceived", 4 + self.fuzz), self.transport._queue.pop(0))
+        self.assertEqual("2", argument.toStringProto(6, self.protocol))
+        self.assertEqual(
+            ("fileDescriptorReceived", 6 + self.fuzz), self.transport._queue.pop(0))
+        self.assertEqual({}, self.protocol._descriptors)
+
+
+    def test_roundTrip(self):
+        """
+        L{amp.Descriptor.fromBox} can interpret an L{amp.AmpBox} constructed by
+        L{amp.Descriptor.toBox} to reconstruct a file descriptor value.
+        """
+        name = "alpha"
+        strings = {}
+        descriptor = 17
+        sendObjects = {name: descriptor}
+
+        argument = amp.Descriptor()
+        argument.toBox(name, strings, sendObjects.copy(), self.protocol)
+
+        receiver = amp.BinaryBoxProtocol(
+            amp.BoxDispatcher(amp.CommandLocator()))
+        for event in self.transport._queue:
+            getattr(receiver, event[0])(*event[1:])
+
+        receiveObjects = {}
+        argument.fromBox(name, strings.copy(), receiveObjects, receiver)
+
+        # Make sure we got the descriptor.  Adjust by fuzz to be more convincing
+        # of having gone through L{IUNIXTransport.sendFileDescriptor}, not just
+        # converted to a string and then parsed back into an integer.
+        self.assertEqual(descriptor + self.fuzz, receiveObjects[name])
+
+
+
+class DateTimeTests(unittest.TestCase):
+    """
+    Tests for L{amp.DateTime}, L{amp._FixedOffsetTZInfo}, and L{amp.utc}.
+    """
+    string = '9876-01-23T12:34:56.054321-01:23'
+    tzinfo = amp._FixedOffsetTZInfo('-', 1, 23)
+    object = datetime.datetime(9876, 1, 23, 12, 34, 56, 54321, tzinfo)
+
+    def test_invalidString(self):
+        """
+        L{amp.DateTime.fromString} raises L{ValueError} when passed a string
+        which does not represent a timestamp in the proper format.
+        """
+        d = amp.DateTime()
+        self.assertRaises(ValueError, d.fromString, 'abc')
+
+
+    def test_invalidDatetime(self):
+        """
+        L{amp.DateTime.toString} raises L{ValueError} when passed a naive
+        datetime (a datetime with no timezone information).
+        """
+        d = amp.DateTime()
+        self.assertRaises(ValueError, d.toString,
+            datetime.datetime(2010, 12, 25, 0, 0, 0))
+
+
+    def test_fromString(self):
+        """
+        L{amp.DateTime.fromString} returns a C{datetime.datetime} with all of
+        its fields populated from the string passed to it.
+        """
+        argument = amp.DateTime()
+        value = argument.fromString(self.string)
+        self.assertEqual(value, self.object)
+
+
+    def test_toString(self):
+        """
+        L{amp.DateTime.toString} returns a C{str} in the wire format including
+        all of the information from the C{datetime.datetime} passed into it,
+        including the timezone offset.
+        """
+        argument = amp.DateTime()
+        value = argument.toString(self.object)
+        self.assertEqual(value, self.string)
+
+
+
+class FixedOffsetTZInfoTests(unittest.TestCase):
+    """
+    Tests for L{amp._FixedOffsetTZInfo} and L{amp.utc}.
+    """
+
+    def test_tzname(self):
+        """
+        L{amp.utc.tzname} returns C{"+00:00"}.
+        """
+        self.assertEqual(amp.utc.tzname(None), '+00:00')
+
+
+    def test_dst(self):
+        """
+        L{amp.utc.dst} returns a zero timedelta.
+        """
+        self.assertEqual(amp.utc.dst(None), datetime.timedelta(0))
+
+
+    def test_utcoffset(self):
+        """
+        L{amp.utc.utcoffset} returns a zero timedelta.
+        """
+        self.assertEqual(amp.utc.utcoffset(None), datetime.timedelta(0))
+
+
+    def test_badSign(self):
+        """
+        L{amp._FixedOffsetTZInfo} raises L{ValueError} if passed an offset sign
+        other than C{'+'} or C{'-'}.
+        """
+        self.assertRaises(ValueError, amp._FixedOffsetTZInfo, '?', 0, 0)
+
+
+
+if not interfaces.IReactorSSL.providedBy(reactor):
+    skipMsg = 'This test case requires SSL support in the reactor'
+    TLSTest.skip = skipMsg
+    LiveFireTLSTestCase.skip = skipMsg
+    PlainVanillaLiveFire.skip = skipMsg
+    WithServerTLSVerification.skip = skipMsg
diff --git a/ThirdParty/Twisted/twisted/test/test_application.py b/ThirdParty/Twisted/twisted/test/test_application.py
new file mode 100644
index 0000000..a5f9c28
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_application.py
@@ -0,0 +1,841 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.application} and its interaction with
+L{twisted.persisted.sob}.
+"""
+
+import copy, os, pickle
+from StringIO import StringIO
+
+from twisted.trial import unittest, util
+from twisted.application import service, internet, app
+from twisted.persisted import sob
+from twisted.python import usage
+from twisted.internet import interfaces, defer
+from twisted.protocols import wire, basic
+from twisted.internet import protocol, reactor
+from twisted.application import reactors
+from twisted.test.proto_helpers import MemoryReactor
+
+
+class Dummy:
+    processName=None
+
+class TestService(unittest.TestCase):
+
+    def testName(self):
+        s = service.Service()
+        s.setName("hello")
+        self.assertEqual(s.name, "hello")
+
+    def testParent(self):
+        s = service.Service()
+        p = service.MultiService()
+        s.setServiceParent(p)
+        self.assertEqual(list(p), [s])
+        self.assertEqual(s.parent, p)
+
+    def testApplicationAsParent(self):
+        s = service.Service()
+        p = service.Application("")
+        s.setServiceParent(p)
+        self.assertEqual(list(service.IServiceCollection(p)), [s])
+        self.assertEqual(s.parent, service.IServiceCollection(p))
+
+    def testNamedChild(self):
+        s = service.Service()
+        p = service.MultiService()
+        s.setName("hello")
+        s.setServiceParent(p)
+        self.assertEqual(list(p), [s])
+        self.assertEqual(s.parent, p)
+        self.assertEqual(p.getServiceNamed("hello"), s)
+
+    def testDoublyNamedChild(self):
+        s = service.Service()
+        p = service.MultiService()
+        s.setName("hello")
+        s.setServiceParent(p)
+        self.failUnlessRaises(RuntimeError, s.setName, "lala")
+
+    def testDuplicateNamedChild(self):
+        s = service.Service()
+        p = service.MultiService()
+        s.setName("hello")
+        s.setServiceParent(p)
+        s = service.Service()
+        s.setName("hello")
+        self.failUnlessRaises(RuntimeError, s.setServiceParent, p)
+
+    def testDisowning(self):
+        s = service.Service()
+        p = service.MultiService()
+        s.setServiceParent(p)
+        self.assertEqual(list(p), [s])
+        self.assertEqual(s.parent, p)
+        s.disownServiceParent()
+        self.assertEqual(list(p), [])
+        self.assertEqual(s.parent, None)
+
+    def testRunning(self):
+        s = service.Service()
+        self.assert_(not s.running)
+        s.startService()
+        self.assert_(s.running)
+        s.stopService()
+        self.assert_(not s.running)
+
+    def testRunningChildren1(self):
+        s = service.Service()
+        p = service.MultiService()
+        s.setServiceParent(p)
+        self.assert_(not s.running)
+        self.assert_(not p.running)
+        p.startService()
+        self.assert_(s.running)
+        self.assert_(p.running)
+        p.stopService()
+        self.assert_(not s.running)
+        self.assert_(not p.running)
+
+    def testRunningChildren2(self):
+        s = service.Service()
+        def checkRunning():
+            self.assert_(s.running)
+        t = service.Service()
+        t.stopService = checkRunning
+        t.startService = checkRunning
+        p = service.MultiService()
+        s.setServiceParent(p)
+        t.setServiceParent(p)
+        p.startService()
+        p.stopService()
+
+    def testAddingIntoRunning(self):
+        p = service.MultiService()
+        p.startService()
+        s = service.Service()
+        self.assert_(not s.running)
+        s.setServiceParent(p)
+        self.assert_(s.running)
+        s.disownServiceParent()
+        self.assert_(not s.running)
+
+    def testPrivileged(self):
+        s = service.Service()
+        def pss():
+            s.privilegedStarted = 1
+        s.privilegedStartService = pss
+        s1 = service.Service()
+        p = service.MultiService()
+        s.setServiceParent(p)
+        s1.setServiceParent(p)
+        p.privilegedStartService()
+        self.assert_(s.privilegedStarted)
+
+    def testCopying(self):
+        s = service.Service()
+        s.startService()
+        s1 = copy.copy(s)
+        self.assert_(not s1.running)
+        self.assert_(s.running)
+
+
+if hasattr(os, "getuid"):
+    curuid = os.getuid()
+    curgid = os.getgid()
+else:
+    curuid = curgid = 0
+
+
+class TestProcess(unittest.TestCase):
+
+    def testID(self):
+        p = service.Process(5, 6)
+        self.assertEqual(p.uid, 5)
+        self.assertEqual(p.gid, 6)
+
+    def testDefaults(self):
+        p = service.Process(5)
+        self.assertEqual(p.uid, 5)
+        self.assertEqual(p.gid, None)
+        p = service.Process(gid=5)
+        self.assertEqual(p.uid, None)
+        self.assertEqual(p.gid, 5)
+        p = service.Process()
+        self.assertEqual(p.uid, None)
+        self.assertEqual(p.gid, None)
+
+    def testProcessName(self):
+        p = service.Process()
+        self.assertEqual(p.processName, None)
+        p.processName = 'hello'
+        self.assertEqual(p.processName, 'hello')
+
+
+class TestInterfaces(unittest.TestCase):
+
+    def testService(self):
+        self.assert_(service.IService.providedBy(service.Service()))
+
+    def testMultiService(self):
+        self.assert_(service.IService.providedBy(service.MultiService()))
+        self.assert_(service.IServiceCollection.providedBy(service.MultiService()))
+
+    def testProcess(self):
+        self.assert_(service.IProcess.providedBy(service.Process()))
+
+
+class TestApplication(unittest.TestCase):
+
+    def testConstructor(self):
+        service.Application("hello")
+        service.Application("hello", 5)
+        service.Application("hello", 5, 6)
+
+    def testProcessComponent(self):
+        a = service.Application("hello")
+        self.assertEqual(service.IProcess(a).uid, None)
+        self.assertEqual(service.IProcess(a).gid, None)
+        a = service.Application("hello", 5)
+        self.assertEqual(service.IProcess(a).uid, 5)
+        self.assertEqual(service.IProcess(a).gid, None)
+        a = service.Application("hello", 5, 6)
+        self.assertEqual(service.IProcess(a).uid, 5)
+        self.assertEqual(service.IProcess(a).gid, 6)
+
+    def testServiceComponent(self):
+        a = service.Application("hello")
+        self.assert_(service.IService(a) is service.IServiceCollection(a))
+        self.assertEqual(service.IService(a).name, "hello")
+        self.assertEqual(service.IService(a).parent, None)
+
+    def testPersistableComponent(self):
+        a = service.Application("hello")
+        p = sob.IPersistable(a)
+        self.assertEqual(p.style, 'pickle')
+        self.assertEqual(p.name, 'hello')
+        self.assert_(p.original is a)
+
+class TestLoading(unittest.TestCase):
+
+    def test_simpleStoreAndLoad(self):
+        a = service.Application("hello")
+        p = sob.IPersistable(a)
+        for style in 'source pickle'.split():
+            p.setStyle(style)
+            p.save()
+            a1 = service.loadApplication("hello.ta"+style[0], style)
+            self.assertEqual(service.IService(a1).name, "hello")
+        f = open("hello.tac", 'w')
+        f.writelines([
+        "from twisted.application import service\n",
+        "application = service.Application('hello')\n",
+        ])
+        f.close()
+        a1 = service.loadApplication("hello.tac", 'python')
+        self.assertEqual(service.IService(a1).name, "hello")
+
+
+
+class TestAppSupport(unittest.TestCase):
+
+    def testPassphrase(self):
+        self.assertEqual(app.getPassphrase(0), None)
+
+    def testLoadApplication(self):
+        """
+        Test loading an application file in different dump format.
+        """
+        a = service.Application("hello")
+        baseconfig = {'file': None, 'source': None, 'python':None}
+        for style in 'source pickle'.split():
+            config = baseconfig.copy()
+            config[{'pickle': 'file'}.get(style, style)] = 'helloapplication'
+            sob.IPersistable(a).setStyle(style)
+            sob.IPersistable(a).save(filename='helloapplication')
+            a1 = app.getApplication(config, None)
+            self.assertEqual(service.IService(a1).name, "hello")
+        config = baseconfig.copy()
+        config['python'] = 'helloapplication'
+        f = open("helloapplication", 'w')
+        f.writelines([
+        "from twisted.application import service\n",
+        "application = service.Application('hello')\n",
+        ])
+        f.close()
+        a1 = app.getApplication(config, None)
+        self.assertEqual(service.IService(a1).name, "hello")
+
+    def test_convertStyle(self):
+        appl = service.Application("lala")
+        for instyle in 'source pickle'.split():
+            for outstyle in 'source pickle'.split():
+                sob.IPersistable(appl).setStyle(instyle)
+                sob.IPersistable(appl).save(filename="converttest")
+                app.convertStyle("converttest", instyle, None,
+                                 "converttest.out", outstyle, 0)
+                appl2 = service.loadApplication("converttest.out", outstyle)
+                self.assertEqual(service.IService(appl2).name, "lala")
+
+
+    def test_startApplication(self):
+        appl = service.Application("lala")
+        app.startApplication(appl, 0)
+        self.assert_(service.IService(appl).running)
+
+
+class Foo(basic.LineReceiver):
+    def connectionMade(self):
+        self.transport.write('lalala\r\n')
+    def lineReceived(self, line):
+        self.factory.line = line
+        self.transport.loseConnection()
+    def connectionLost(self, reason):
+        self.factory.d.callback(self.factory.line)
+
+
+class DummyApp:
+    processName = None
+    def addService(self, service):
+        self.services[service.name] = service
+    def removeService(self, service):
+        del self.services[service.name]
+
+
+class TimerTarget:
+    def __init__(self):
+        self.l = []
+    def append(self, what):
+        self.l.append(what)
+
+class TestEcho(wire.Echo):
+    def connectionLost(self, reason):
+        self.d.callback(True)
+
+class TestInternet2(unittest.TestCase):
+
+    def testTCP(self):
+        s = service.MultiService()
+        s.startService()
+        factory = protocol.ServerFactory()
+        factory.protocol = TestEcho
+        TestEcho.d = defer.Deferred()
+        t = internet.TCPServer(0, factory)
+        t.setServiceParent(s)
+        num = t._port.getHost().port
+        factory = protocol.ClientFactory()
+        factory.d = defer.Deferred()
+        factory.protocol = Foo
+        factory.line = None
+        internet.TCPClient('127.0.0.1', num, factory).setServiceParent(s)
+        factory.d.addCallback(self.assertEqual, 'lalala')
+        factory.d.addCallback(lambda x : s.stopService())
+        factory.d.addCallback(lambda x : TestEcho.d)
+        return factory.d
+
+
+    def test_UDP(self):
+        """
+        Test L{internet.UDPServer} with a random port: starting the service
+        should give it valid port, and stopService should free it so that we
+        can start a server on the same port again.
+        """
+        if not interfaces.IReactorUDP(reactor, None):
+            raise unittest.SkipTest("This reactor does not support UDP sockets")
+        p = protocol.DatagramProtocol()
+        t = internet.UDPServer(0, p)
+        t.startService()
+        num = t._port.getHost().port
+        self.assertNotEquals(num, 0)
+        def onStop(ignored):
+            t = internet.UDPServer(num, p)
+            t.startService()
+            return t.stopService()
+        return defer.maybeDeferred(t.stopService).addCallback(onStop)
+
+
+    def testPrivileged(self):
+        factory = protocol.ServerFactory()
+        factory.protocol = TestEcho
+        TestEcho.d = defer.Deferred()
+        t = internet.TCPServer(0, factory)
+        t.privileged = 1
+        t.privilegedStartService()
+        num = t._port.getHost().port
+        factory = protocol.ClientFactory()
+        factory.d = defer.Deferred()
+        factory.protocol = Foo
+        factory.line = None
+        c = internet.TCPClient('127.0.0.1', num, factory)
+        c.startService()
+        factory.d.addCallback(self.assertEqual, 'lalala')
+        factory.d.addCallback(lambda x : c.stopService())
+        factory.d.addCallback(lambda x : t.stopService())
+        factory.d.addCallback(lambda x : TestEcho.d)
+        return factory.d
+
+    def testConnectionGettingRefused(self):
+        factory = protocol.ServerFactory()
+        factory.protocol = wire.Echo
+        t = internet.TCPServer(0, factory)
+        t.startService()
+        num = t._port.getHost().port
+        t.stopService()
+        d = defer.Deferred()
+        factory = protocol.ClientFactory()
+        factory.clientConnectionFailed = lambda *args: d.callback(None)
+        c = internet.TCPClient('127.0.0.1', num, factory)
+        c.startService()
+        return d
+
+    def testUNIX(self):
+        # FIXME: This test is far too dense.  It needs comments.
+        #  -- spiv, 2004-11-07
+        if not interfaces.IReactorUNIX(reactor, None):
+            raise unittest.SkipTest, "This reactor does not support UNIX domain sockets"
+        s = service.MultiService()
+        s.startService()
+        factory = protocol.ServerFactory()
+        factory.protocol = TestEcho
+        TestEcho.d = defer.Deferred()
+        t = internet.UNIXServer('echo.skt', factory)
+        t.setServiceParent(s)
+        factory = protocol.ClientFactory()
+        factory.protocol = Foo
+        factory.d = defer.Deferred()
+        factory.line = None
+        internet.UNIXClient('echo.skt', factory).setServiceParent(s)
+        factory.d.addCallback(self.assertEqual, 'lalala')
+        factory.d.addCallback(lambda x : s.stopService())
+        factory.d.addCallback(lambda x : TestEcho.d)
+        factory.d.addCallback(self._cbTestUnix, factory, s)
+        return factory.d
+
+    def _cbTestUnix(self, ignored, factory, s):
+        TestEcho.d = defer.Deferred()
+        factory.line = None
+        factory.d = defer.Deferred()
+        s.startService()
+        factory.d.addCallback(self.assertEqual, 'lalala')
+        factory.d.addCallback(lambda x : s.stopService())
+        factory.d.addCallback(lambda x : TestEcho.d)
+        return factory.d
+
+    def testVolatile(self):
+        if not interfaces.IReactorUNIX(reactor, None):
+            raise unittest.SkipTest, "This reactor does not support UNIX domain sockets"
+        factory = protocol.ServerFactory()
+        factory.protocol = wire.Echo
+        t = internet.UNIXServer('echo.skt', factory)
+        t.startService()
+        self.failIfIdentical(t._port, None)
+        t1 = copy.copy(t)
+        self.assertIdentical(t1._port, None)
+        t.stopService()
+        self.assertIdentical(t._port, None)
+        self.failIf(t.running)
+
+        factory = protocol.ClientFactory()
+        factory.protocol = wire.Echo
+        t = internet.UNIXClient('echo.skt', factory)
+        t.startService()
+        self.failIfIdentical(t._connection, None)
+        t1 = copy.copy(t)
+        self.assertIdentical(t1._connection, None)
+        t.stopService()
+        self.assertIdentical(t._connection, None)
+        self.failIf(t.running)
+
+    def testStoppingServer(self):
+        if not interfaces.IReactorUNIX(reactor, None):
+            raise unittest.SkipTest, "This reactor does not support UNIX domain sockets"
+        factory = protocol.ServerFactory()
+        factory.protocol = wire.Echo
+        t = internet.UNIXServer('echo.skt', factory)
+        t.startService()
+        t.stopService()
+        self.failIf(t.running)
+        factory = protocol.ClientFactory()
+        d = defer.Deferred()
+        factory.clientConnectionFailed = lambda *args: d.callback(None)
+        reactor.connectUNIX('echo.skt', factory)
+        return d
+
+    def testPickledTimer(self):
+        target = TimerTarget()
+        t0 = internet.TimerService(1, target.append, "hello")
+        t0.startService()
+        s = pickle.dumps(t0)
+        t0.stopService()
+
+        t = pickle.loads(s)
+        self.failIf(t.running)
+
+    def testBrokenTimer(self):
+        d = defer.Deferred()
+        t = internet.TimerService(1, lambda: 1 // 0)
+        oldFailed = t._failed
+        def _failed(why):
+            oldFailed(why)
+            d.callback(None)
+        t._failed = _failed
+        t.startService()
+        d.addCallback(lambda x : t.stopService)
+        d.addCallback(lambda x : self.assertEqual(
+            [ZeroDivisionError],
+            [o.value.__class__ for o in self.flushLoggedErrors(ZeroDivisionError)]))
+        return d
+
+
+    def test_everythingThere(self):
+        """
+        L{twisted.application.internet} dynamically defines a set of
+        L{service.Service} subclasses that in general have corresponding
+        reactor.listenXXX or reactor.connectXXX calls.
+        """
+        trans = 'TCP UNIX SSL UDP UNIXDatagram Multicast'.split()
+        for tran in trans[:]:
+            if not getattr(interfaces, "IReactor" + tran)(reactor, None):
+                trans.remove(tran)
+        for tran in trans:
+            for side in 'Server Client'.split():
+                if tran == "Multicast" and side == "Client":
+                    continue
+                self.assertTrue(hasattr(internet, tran + side))
+                method = getattr(internet, tran + side).method
+                prefix = {'Server': 'listen', 'Client': 'connect'}[side]
+                self.assertTrue(hasattr(reactor, prefix + method) or
+                        (prefix == "connect" and method == "UDP"))
+                o = getattr(internet, tran + side)()
+                self.assertEqual(service.IService(o), o)
+
+
+    def test_importAll(self):
+        """
+        L{twisted.application.internet} dynamically defines L{service.Service}
+        subclasses. This test ensures that the subclasses exposed by C{__all__}
+        are valid attributes of the module.
+        """
+        for cls in internet.__all__:
+            self.assertTrue(
+                hasattr(internet, cls),
+                '%s not importable from twisted.application.internet' % (cls,))
+
+
+    def test_reactorParametrizationInServer(self):
+        """
+        L{internet._AbstractServer} supports a C{reactor} keyword argument
+        that can be used to parametrize the reactor used to listen for
+        connections.
+        """
+        reactor = MemoryReactor()
+
+        factory = object()
+        t = internet.TCPServer(1234, factory, reactor=reactor)
+        t.startService()
+        self.assertEqual(reactor.tcpServers.pop()[:2], (1234, factory))
+
+
+    def test_reactorParametrizationInClient(self):
+        """
+        L{internet._AbstractClient} supports a C{reactor} keyword arguments
+        that can be used to parametrize the reactor used to create new client
+        connections.
+        """
+        reactor = MemoryReactor()
+
+        factory = protocol.ClientFactory()
+        t = internet.TCPClient('127.0.0.1', 1234, factory, reactor=reactor)
+        t.startService()
+        self.assertEqual(
+            reactor.tcpClients.pop()[:3], ('127.0.0.1', 1234, factory))
+
+
+    def test_reactorParametrizationInServerMultipleStart(self):
+        """
+        Like L{test_reactorParametrizationInServer}, but stop and restart the
+        service and check that the given reactor is still used.
+        """
+        reactor = MemoryReactor()
+
+        factory = protocol.Factory()
+        t = internet.TCPServer(1234, factory, reactor=reactor)
+        t.startService()
+        self.assertEqual(reactor.tcpServers.pop()[:2], (1234, factory))
+        t.stopService()
+        t.startService()
+        self.assertEqual(reactor.tcpServers.pop()[:2], (1234, factory))
+
+
+    def test_reactorParametrizationInClientMultipleStart(self):
+        """
+        Like L{test_reactorParametrizationInClient}, but stop and restart the
+        service and check that the given reactor is still used.
+        """
+        reactor = MemoryReactor()
+
+        factory = protocol.ClientFactory()
+        t = internet.TCPClient('127.0.0.1', 1234, factory, reactor=reactor)
+        t.startService()
+        self.assertEqual(
+            reactor.tcpClients.pop()[:3], ('127.0.0.1', 1234, factory))
+        t.stopService()
+        t.startService()
+        self.assertEqual(
+            reactor.tcpClients.pop()[:3], ('127.0.0.1', 1234, factory))
+
+
+
+class TestTimerBasic(unittest.TestCase):
+
+    def testTimerRuns(self):
+        d = defer.Deferred()
+        self.t = internet.TimerService(1, d.callback, 'hello')
+        self.t.startService()
+        d.addCallback(self.assertEqual, 'hello')
+        d.addCallback(lambda x : self.t.stopService())
+        d.addCallback(lambda x : self.failIf(self.t.running))
+        return d
+
+    def tearDown(self):
+        return self.t.stopService()
+
+    def testTimerRestart(self):
+        # restart the same TimerService
+        d1 = defer.Deferred()
+        d2 = defer.Deferred()
+        work = [(d2, "bar"), (d1, "foo")]
+        def trigger():
+            d, arg = work.pop()
+            d.callback(arg)
+        self.t = internet.TimerService(1, trigger)
+        self.t.startService()
+        def onFirstResult(result):
+            self.assertEqual(result, 'foo')
+            return self.t.stopService()
+        def onFirstStop(ignored):
+            self.failIf(self.t.running)
+            self.t.startService()
+            return d2
+        def onSecondResult(result):
+            self.assertEqual(result, 'bar')
+            self.t.stopService()
+        d1.addCallback(onFirstResult)
+        d1.addCallback(onFirstStop)
+        d1.addCallback(onSecondResult)
+        return d1
+
+    def testTimerLoops(self):
+        l = []
+        def trigger(data, number, d):
+            l.append(data)
+            if len(l) == number:
+                d.callback(l)
+        d = defer.Deferred()
+        self.t = internet.TimerService(0.01, trigger, "hello", 10, d)
+        self.t.startService()
+        d.addCallback(self.assertEqual, ['hello'] * 10)
+        d.addCallback(lambda x : self.t.stopService())
+        return d
+
+
+class FakeReactor(reactors.Reactor):
+    """
+    A fake reactor with a hooked install method.
+    """
+
+    def __init__(self, install, *args, **kwargs):
+        """
+        @param install: any callable that will be used as install method.
+        @type install: C{callable}
+        """
+        reactors.Reactor.__init__(self, *args, **kwargs)
+        self.install = install
+
+
+
+class PluggableReactorTestCase(unittest.TestCase):
+    """
+    Tests for the reactor discovery/inspection APIs.
+    """
+
+    def setUp(self):
+        """
+        Override the L{reactors.getPlugins} function, normally bound to
+        L{twisted.plugin.getPlugins}, in order to control which
+        L{IReactorInstaller} plugins are seen as available.
+
+        C{self.pluginResults} can be customized and will be used as the
+        result of calls to C{reactors.getPlugins}.
+        """
+        self.pluginCalls = []
+        self.pluginResults = []
+        self.originalFunction = reactors.getPlugins
+        reactors.getPlugins = self._getPlugins
+
+
+    def tearDown(self):
+        """
+        Restore the original L{reactors.getPlugins}.
+        """
+        reactors.getPlugins = self.originalFunction
+
+
+    def _getPlugins(self, interface, package=None):
+        """
+        Stand-in for the real getPlugins method which records its arguments
+        and returns a fixed result.
+        """
+        self.pluginCalls.append((interface, package))
+        return list(self.pluginResults)
+
+
+    def test_getPluginReactorTypes(self):
+        """
+        Test that reactor plugins are returned from L{getReactorTypes}
+        """
+        name = 'fakereactortest'
+        package = __name__ + '.fakereactor'
+        description = 'description'
+        self.pluginResults = [reactors.Reactor(name, package, description)]
+        reactorTypes = reactors.getReactorTypes()
+
+        self.assertEqual(
+            self.pluginCalls,
+            [(reactors.IReactorInstaller, None)])
+
+        for r in reactorTypes:
+            if r.shortName == name:
+                self.assertEqual(r.description, description)
+                break
+        else:
+            self.fail("Reactor plugin not present in getReactorTypes() result")
+
+
+    def test_reactorInstallation(self):
+        """
+        Test that L{reactors.Reactor.install} loads the correct module and
+        calls its install attribute.
+        """
+        installed = []
+        def install():
+            installed.append(True)
+        installer = FakeReactor(install,
+                                'fakereactortest', __name__, 'described')
+        installer.install()
+        self.assertEqual(installed, [True])
+
+
+    def test_installReactor(self):
+        """
+        Test that the L{reactors.installReactor} function correctly installs
+        the specified reactor.
+        """
+        installed = []
+        def install():
+            installed.append(True)
+        name = 'fakereactortest'
+        package = __name__
+        description = 'description'
+        self.pluginResults = [FakeReactor(install, name, package, description)]
+        reactors.installReactor(name)
+        self.assertEqual(installed, [True])
+
+
+    def test_installNonExistentReactor(self):
+        """
+        Test that L{reactors.installReactor} raises L{reactors.NoSuchReactor}
+        when asked to install a reactor which it cannot find.
+        """
+        self.pluginResults = []
+        self.assertRaises(
+            reactors.NoSuchReactor,
+            reactors.installReactor, 'somereactor')
+
+
+    def test_installNotAvailableReactor(self):
+        """
+        Test that L{reactors.installReactor} raises an exception when asked to
+        install a reactor which doesn't work in this environment.
+        """
+        def install():
+            raise ImportError("Missing foo bar")
+        name = 'fakereactortest'
+        package = __name__
+        description = 'description'
+        self.pluginResults = [FakeReactor(install, name, package, description)]
+        self.assertRaises(ImportError, reactors.installReactor, name)
+
+
+    def test_reactorSelectionMixin(self):
+        """
+        Test that the reactor selected is installed as soon as possible, ie
+        when the option is parsed.
+        """
+        executed = []
+        INSTALL_EVENT = 'reactor installed'
+        SUBCOMMAND_EVENT = 'subcommands loaded'
+
+        class ReactorSelectionOptions(usage.Options, app.ReactorSelectionMixin):
+            def subCommands(self):
+                executed.append(SUBCOMMAND_EVENT)
+                return [('subcommand', None, lambda: self, 'test subcommand')]
+            subCommands = property(subCommands)
+
+        def install():
+            executed.append(INSTALL_EVENT)
+        self.pluginResults = [
+            FakeReactor(install, 'fakereactortest', __name__, 'described')
+        ]
+
+        options = ReactorSelectionOptions()
+        options.parseOptions(['--reactor', 'fakereactortest', 'subcommand'])
+        self.assertEqual(executed[0], INSTALL_EVENT)
+        self.assertEqual(executed.count(INSTALL_EVENT), 1)
+        self.assertEqual(options["reactor"], "fakereactortest")
+
+
+    def test_reactorSelectionMixinNonExistent(self):
+        """
+        Test that the usage mixin exits when trying to use a non existent
+        reactor (the name not matching to any reactor), giving an error
+        message.
+        """
+        class ReactorSelectionOptions(usage.Options, app.ReactorSelectionMixin):
+            pass
+        self.pluginResults = []
+
+        options = ReactorSelectionOptions()
+        options.messageOutput = StringIO()
+        e = self.assertRaises(usage.UsageError, options.parseOptions,
+                              ['--reactor', 'fakereactortest', 'subcommand'])
+        self.assertIn("fakereactortest", e.args[0])
+        self.assertIn("help-reactors", e.args[0])
+
+
+    def test_reactorSelectionMixinNotAvailable(self):
+        """
+        Test that the usage mixin exits when trying to use a reactor not
+        available (the reactor raises an error at installation), giving an
+        error message.
+        """
+        class ReactorSelectionOptions(usage.Options, app.ReactorSelectionMixin):
+            pass
+        message = "Missing foo bar"
+        def install():
+            raise ImportError(message)
+
+        name = 'fakereactortest'
+        package = __name__
+        description = 'description'
+        self.pluginResults = [FakeReactor(install, name, package, description)]
+
+        options = ReactorSelectionOptions()
+        options.messageOutput = StringIO()
+        e =  self.assertRaises(usage.UsageError, options.parseOptions,
+                               ['--reactor', 'fakereactortest', 'subcommand'])
+        self.assertIn(message, e.args[0])
+        self.assertIn("help-reactors", e.args[0])
diff --git a/ThirdParty/Twisted/twisted/test/test_banana.py b/ThirdParty/Twisted/twisted/test/test_banana.py
new file mode 100644
index 0000000..c4b69de
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_banana.py
@@ -0,0 +1,278 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import StringIO
+import sys
+
+# Twisted Imports
+from twisted.trial import unittest
+from twisted.spread import banana
+from twisted.python import failure
+from twisted.internet import protocol, main
+
+
+class MathTestCase(unittest.TestCase):
+    def testInt2b128(self):
+        funkylist = range(0,100) + range(1000,1100) + range(1000000,1000100) + [1024 **10l]
+        for i in funkylist:
+            x = StringIO.StringIO()
+            banana.int2b128(i, x.write)
+            v = x.getvalue()
+            y = banana.b1282int(v)
+            assert y == i, "y = %s; i = %s" % (y,i)
+
+class BananaTestCase(unittest.TestCase):
+
+    encClass = banana.Banana
+
+    def setUp(self):
+        self.io = StringIO.StringIO()
+        self.enc = self.encClass()
+        self.enc.makeConnection(protocol.FileWrapper(self.io))
+        self.enc._selectDialect("none")
+        self.enc.expressionReceived = self.putResult
+
+    def putResult(self, result):
+        self.result = result
+
+    def tearDown(self):
+        self.enc.connectionLost(failure.Failure(main.CONNECTION_DONE))
+        del self.enc
+
+    def testString(self):
+        self.enc.sendEncoded("hello")
+        l = []
+        self.enc.dataReceived(self.io.getvalue())
+        assert self.result == 'hello'
+
+    def test_int(self):
+        """
+        A positive integer less than 2 ** 32 should round-trip through
+        banana without changing value and should come out represented
+        as an C{int} (regardless of the type which was encoded).
+        """
+        for value in (10151, 10151L):
+            self.enc.sendEncoded(value)
+            self.enc.dataReceived(self.io.getvalue())
+            self.assertEqual(self.result, 10151)
+            self.assertIsInstance(self.result, int)
+
+
+    def test_largeLong(self):
+        """
+        Integers greater than 2 ** 32 and less than -2 ** 32 should
+        round-trip through banana without changing value and should
+        come out represented as C{int} instances if the value fits
+        into that type on the receiving platform.
+        """
+        for exp in (32, 64, 128, 256):
+            for add in (0, 1):
+                m = 2 ** exp + add
+                for n in (m, -m-1):
+                    self.io.truncate(0)
+                    self.enc.sendEncoded(n)
+                    self.enc.dataReceived(self.io.getvalue())
+                    self.assertEqual(self.result, n)
+                    if n > sys.maxint or n < -sys.maxint - 1:
+                        self.assertIsInstance(self.result, long)
+                    else:
+                        self.assertIsInstance(self.result, int)
+
+
+    def _getSmallest(self):
+        # How many bytes of prefix our implementation allows
+        bytes = self.enc.prefixLimit
+        # How many useful bits we can extract from that based on Banana's
+        # base-128 representation.
+        bits = bytes * 7
+        # The largest number we _should_ be able to encode
+        largest = 2 ** bits - 1
+        # The smallest number we _shouldn't_ be able to encode
+        smallest = largest + 1
+        return smallest
+
+
+    def test_encodeTooLargeLong(self):
+        """
+        Test that a long above the implementation-specific limit is rejected
+        as too large to be encoded.
+        """
+        smallest = self._getSmallest()
+        self.assertRaises(banana.BananaError, self.enc.sendEncoded, smallest)
+
+
+    def test_decodeTooLargeLong(self):
+        """
+        Test that a long above the implementation specific limit is rejected
+        as too large to be decoded.
+        """
+        smallest = self._getSmallest()
+        self.enc.setPrefixLimit(self.enc.prefixLimit * 2)
+        self.enc.sendEncoded(smallest)
+        encoded = self.io.getvalue()
+        self.io.truncate(0)
+        self.enc.setPrefixLimit(self.enc.prefixLimit // 2)
+
+        self.assertRaises(banana.BananaError, self.enc.dataReceived, encoded)
+
+
+    def _getLargest(self):
+        return -self._getSmallest()
+
+
+    def test_encodeTooSmallLong(self):
+        """
+        Test that a negative long below the implementation-specific limit is
+        rejected as too small to be encoded.
+        """
+        largest = self._getLargest()
+        self.assertRaises(banana.BananaError, self.enc.sendEncoded, largest)
+
+
+    def test_decodeTooSmallLong(self):
+        """
+        Test that a negative long below the implementation specific limit is
+        rejected as too small to be decoded.
+        """
+        largest = self._getLargest()
+        self.enc.setPrefixLimit(self.enc.prefixLimit * 2)
+        self.enc.sendEncoded(largest)
+        encoded = self.io.getvalue()
+        self.io.truncate(0)
+        self.enc.setPrefixLimit(self.enc.prefixLimit // 2)
+
+        self.assertRaises(banana.BananaError, self.enc.dataReceived, encoded)
+
+
+    def testNegativeLong(self):
+        self.enc.sendEncoded(-1015l)
+        self.enc.dataReceived(self.io.getvalue())
+        assert self.result == -1015l, "should be -1015l, got %s" % self.result
+
+    def testInteger(self):
+        self.enc.sendEncoded(1015)
+        self.enc.dataReceived(self.io.getvalue())
+        assert self.result == 1015, "should be 1015, got %s" % self.result
+
+    def testNegative(self):
+        self.enc.sendEncoded(-1015)
+        self.enc.dataReceived(self.io.getvalue())
+        assert self.result == -1015, "should be -1015, got %s" % self.result
+
+    def testFloat(self):
+        self.enc.sendEncoded(1015.)
+        self.enc.dataReceived(self.io.getvalue())
+        assert self.result == 1015.
+
+    def testList(self):
+        foo = [1, 2, [3, 4], [30.5, 40.2], 5, ["six", "seven", ["eight", 9]], [10], []]
+        self.enc.sendEncoded(foo)
+        self.enc.dataReceived(self.io.getvalue())
+        assert self.result == foo, "%s!=%s" % (repr(self.result), repr(self.result))
+
+    def testPartial(self):
+        foo = [1, 2, [3, 4], [30.5, 40.2], 5,
+               ["six", "seven", ["eight", 9]], [10],
+               # TODO: currently the C implementation's a bit buggy...
+               sys.maxint * 3l, sys.maxint * 2l, sys.maxint * -2l]
+        self.enc.sendEncoded(foo)
+        for byte in self.io.getvalue():
+            self.enc.dataReceived(byte)
+        assert self.result == foo, "%s!=%s" % (repr(self.result), repr(foo))
+
+    def feed(self, data):
+        for byte in data:
+            self.enc.dataReceived(byte)
+    def testOversizedList(self):
+        data = '\x02\x01\x01\x01\x01\x80'
+        # list(size=0x0101010102, about 4.3e9)
+        self.failUnlessRaises(banana.BananaError, self.feed, data)
+    def testOversizedString(self):
+        data = '\x02\x01\x01\x01\x01\x82'
+        # string(size=0x0101010102, about 4.3e9)
+        self.failUnlessRaises(banana.BananaError, self.feed, data)
+
+    def testCrashString(self):
+        crashString = '\x00\x00\x00\x00\x04\x80'
+        # string(size=0x0400000000, about 17.2e9)
+
+        #  cBanana would fold that into a 32-bit 'int', then try to allocate
+        #  a list with PyList_New(). cBanana ignored the NULL return value,
+        #  so it would segfault when trying to free the imaginary list.
+
+        # This variant doesn't segfault straight out in my environment.
+        # Instead, it takes up large amounts of CPU and memory...
+        #crashString = '\x00\x00\x00\x00\x01\x80'
+        # print repr(crashString)
+        #self.failUnlessRaises(Exception, self.enc.dataReceived, crashString)
+        try:
+            # should now raise MemoryError
+            self.enc.dataReceived(crashString)
+        except banana.BananaError:
+            pass
+
+    def testCrashNegativeLong(self):
+        # There was a bug in cBanana which relied on negating a negative integer
+        # always giving a postive result, but for the lowest possible number in
+        # 2s-complement arithmetic, that's not true, i.e.
+        #     long x = -2147483648;
+        #     long y = -x;
+        #     x == y;  /* true! */
+        # (assuming 32-bit longs)
+        self.enc.sendEncoded(-2147483648)
+        self.enc.dataReceived(self.io.getvalue())
+        assert self.result == -2147483648, "should be -2147483648, got %s" % self.result
+
+
+    def test_sizedIntegerTypes(self):
+        """
+        Test that integers below the maximum C{INT} token size cutoff are
+        serialized as C{INT} or C{NEG} and that larger integers are
+        serialized as C{LONGINT} or C{LONGNEG}.
+        """
+        def encoded(n):
+            self.io.seek(0)
+            self.io.truncate()
+            self.enc.sendEncoded(n)
+            return self.io.getvalue()
+
+        baseIntIn = +2147483647
+        baseNegIn = -2147483648
+
+        baseIntOut = '\x7f\x7f\x7f\x07\x81'
+        self.assertEqual(encoded(baseIntIn - 2), '\x7d' + baseIntOut)
+        self.assertEqual(encoded(baseIntIn - 1), '\x7e' + baseIntOut)
+        self.assertEqual(encoded(baseIntIn - 0), '\x7f' + baseIntOut)
+
+        baseLongIntOut = '\x00\x00\x00\x08\x85'
+        self.assertEqual(encoded(baseIntIn + 1), '\x00' + baseLongIntOut)
+        self.assertEqual(encoded(baseIntIn + 2), '\x01' + baseLongIntOut)
+        self.assertEqual(encoded(baseIntIn + 3), '\x02' + baseLongIntOut)
+
+        baseNegOut = '\x7f\x7f\x7f\x07\x83'
+        self.assertEqual(encoded(baseNegIn + 2), '\x7e' + baseNegOut)
+        self.assertEqual(encoded(baseNegIn + 1), '\x7f' + baseNegOut)
+        self.assertEqual(encoded(baseNegIn + 0), '\x00\x00\x00\x00\x08\x83')
+
+        baseLongNegOut = '\x00\x00\x00\x08\x86'
+        self.assertEqual(encoded(baseNegIn - 1), '\x01' + baseLongNegOut)
+        self.assertEqual(encoded(baseNegIn - 2), '\x02' + baseLongNegOut)
+        self.assertEqual(encoded(baseNegIn - 3), '\x03' + baseLongNegOut)
+
+
+
+class GlobalCoderTests(unittest.TestCase):
+    """
+    Tests for the free functions L{banana.encode} and L{banana.decode}.
+    """
+    def test_statelessDecode(self):
+        """
+        Test that state doesn't carry over between calls to L{banana.decode}.
+        """
+        # Banana encoding of 2 ** 449
+        undecodable = '\x7f' * 65 + '\x85'
+        self.assertRaises(banana.BananaError, banana.decode, undecodable)
+
+        # Banana encoding of 1
+        decodable = '\x01\x81'
+        self.assertEqual(banana.decode(decodable), 1)
diff --git a/ThirdParty/Twisted/twisted/test/test_compat.py b/ThirdParty/Twisted/twisted/test/test_compat.py
new file mode 100644
index 0000000..8f87b3e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_compat.py
@@ -0,0 +1,623 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Tests for L{twisted.python.compat}.
+"""
+
+from __future__ import division, absolute_import
+
+import socket, sys, traceback
+
+from twisted.trial import unittest
+
+from twisted.python.compat import set, frozenset, reduce, execfile, _PY3
+from twisted.python.compat import comparable, cmp, nativeString, networkString
+from twisted.python.compat import unicode as unicodeCompat, lazyByteSlice
+from twisted.python.compat import reraise, NativeStringIO, iterbytes, intToBytes
+from twisted.python.filepath import FilePath
+
+
+class CompatTestCase(unittest.SynchronousTestCase):
+    """
+    Various utility functions in C{twisted.python.compat} provide same
+    functionality as modern Python variants.
+    """
+
+    def test_set(self):
+        """
+        L{set} should behave like the expected set interface.
+        """
+        a = set()
+        a.add('b')
+        a.add('c')
+        a.add('a')
+        b = list(a)
+        b.sort()
+        self.assertEqual(b, ['a', 'b', 'c'])
+        a.remove('b')
+        b = list(a)
+        b.sort()
+        self.assertEqual(b, ['a', 'c'])
+
+        a.discard('d')
+
+        b = set(['r', 's'])
+        d = a.union(b)
+        b = list(d)
+        b.sort()
+        self.assertEqual(b, ['a', 'c', 'r', 's'])
+
+
+    def test_frozenset(self):
+        """
+        L{frozenset} should behave like the expected frozenset interface.
+        """
+        a = frozenset(['a', 'b'])
+        self.assertRaises(AttributeError, getattr, a, "add")
+        self.assertEqual(sorted(a), ['a', 'b'])
+
+        b = frozenset(['r', 's'])
+        d = a.union(b)
+        b = list(d)
+        b.sort()
+        self.assertEqual(b, ['a', 'b', 'r', 's'])
+
+
+    def test_reduce(self):
+        """
+        L{reduce} should behave like the builtin reduce.
+        """
+        self.assertEqual(15, reduce(lambda x, y: x + y, [1, 2, 3, 4, 5]))
+        self.assertEqual(16, reduce(lambda x, y: x + y, [1, 2, 3, 4, 5], 1))
+
+
+
+class IPv6Tests(unittest.SynchronousTestCase):
+    """
+    C{inet_pton} and C{inet_ntop} implementations support IPv6.
+    """
+
+    def testNToP(self):
+        from twisted.python.compat import inet_ntop
+
+        f = lambda a: inet_ntop(socket.AF_INET6, a)
+        g = lambda a: inet_ntop(socket.AF_INET, a)
+
+        self.assertEqual('::', f('\x00' * 16))
+        self.assertEqual('::1', f('\x00' * 15 + '\x01'))
+        self.assertEqual(
+            'aef:b01:506:1001:ffff:9997:55:170',
+            f('\x0a\xef\x0b\x01\x05\x06\x10\x01\xff\xff\x99\x97\x00\x55\x01\x70'))
+
+        self.assertEqual('1.0.1.0', g('\x01\x00\x01\x00'))
+        self.assertEqual('170.85.170.85', g('\xaa\x55\xaa\x55'))
+        self.assertEqual('255.255.255.255', g('\xff\xff\xff\xff'))
+
+        self.assertEqual('100::', f('\x01' + '\x00' * 15))
+        self.assertEqual('100::1', f('\x01' + '\x00' * 14 + '\x01'))
+
+    def testPToN(self):
+        from twisted.python.compat import inet_pton
+
+        f = lambda a: inet_pton(socket.AF_INET6, a)
+        g = lambda a: inet_pton(socket.AF_INET, a)
+
+        self.assertEqual('\x00\x00\x00\x00', g('0.0.0.0'))
+        self.assertEqual('\xff\x00\xff\x00', g('255.0.255.0'))
+        self.assertEqual('\xaa\xaa\xaa\xaa', g('170.170.170.170'))
+
+        self.assertEqual('\x00' * 16, f('::'))
+        self.assertEqual('\x00' * 16, f('0::0'))
+        self.assertEqual('\x00\x01' + '\x00' * 14, f('1::'))
+        self.assertEqual(
+            '\x45\xef\x76\xcb\x00\x1a\x56\xef\xaf\xeb\x0b\xac\x19\x24\xae\xae',
+            f('45ef:76cb:1a:56ef:afeb:bac:1924:aeae'))
+
+        self.assertEqual('\x00' * 14 + '\x00\x01', f('::1'))
+        self.assertEqual('\x00' * 12 + '\x01\x02\x03\x04', f('::1.2.3.4'))
+        self.assertEqual(
+            '\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05\x00\x06\x01\x02\x03\xff',
+            f('1:2:3:4:5:6:1.2.3.255'))
+
+        for badaddr in ['1:2:3:4:5:6:7:8:', ':1:2:3:4:5:6:7:8', '1::2::3',
+                        '1:::3', ':::', '1:2', '::1.2', '1.2.3.4::',
+                        'abcd:1.2.3.4:abcd:abcd:abcd:abcd:abcd',
+                        '1234:1.2.3.4:1234:1234:1234:1234:1234:1234',
+                        '1.2.3.4']:
+            self.assertRaises(ValueError, f, badaddr)
+
+if _PY3:
+    IPv6Tests.skip = "These tests are only relevant to old versions of Python"
+
+
+
+class ExecfileCompatTestCase(unittest.SynchronousTestCase):
+    """
+    Tests for the Python 3-friendly L{execfile} implementation.
+    """
+
+    def writeScript(self, content):
+        """
+        Write L{content} to a new temporary file, returning the L{FilePath}
+        for the new file.
+        """
+        path = self.mktemp()
+        with open(path, "wb") as f:
+            f.write(content.encode("ascii"))
+        return FilePath(path.encode("utf-8"))
+
+
+    def test_execfileGlobals(self):
+        """
+        L{execfile} executes the specified file in the given global namespace.
+        """
+        script = self.writeScript(u"foo += 1\n")
+        globalNamespace = {"foo": 1}
+        execfile(script.path, globalNamespace)
+        self.assertEqual(2, globalNamespace["foo"])
+
+
+    def test_execfileGlobalsAndLocals(self):
+        """
+        L{execfile} executes the specified file in the given global and local
+        namespaces.
+        """
+        script = self.writeScript(u"foo += 1\n")
+        globalNamespace = {"foo": 10}
+        localNamespace = {"foo": 20}
+        execfile(script.path, globalNamespace, localNamespace)
+        self.assertEqual(10, globalNamespace["foo"])
+        self.assertEqual(21, localNamespace["foo"])
+
+
+    def test_execfileUniversalNewlines(self):
+        """
+        L{execfile} reads in the specified file using universal newlines so
+        that scripts written on one platform will work on another.
+        """
+        for lineEnding in u"\n", u"\r", u"\r\n":
+            script = self.writeScript(u"foo = 'okay'" + lineEnding)
+            globalNamespace = {"foo": None}
+            execfile(script.path, globalNamespace)
+            self.assertEqual("okay", globalNamespace["foo"])
+
+
+
+class PY3Tests(unittest.SynchronousTestCase):
+    """
+    Identification of Python 2 vs. Python 3.
+    """
+
+    def test_python2(self):
+        """
+        On Python 2, C{_PY3} is False.
+        """
+        if sys.version.startswith("2."):
+            self.assertFalse(_PY3)
+
+
+    def test_python3(self):
+        """
+        On Python 3, C{_PY3} is True.
+        """
+        if sys.version.startswith("3."):
+            self.assertTrue(_PY3)
+
+
+
+ at comparable
+class Comparable(object):
+    """
+    Objects that can be compared to each other, but not others.
+    """
+    def __init__(self, value):
+        self.value = value
+
+
+    def __cmp__(self, other):
+        if not isinstance(other, Comparable):
+            return NotImplemented
+        return cmp(self.value, other.value)
+
+
+
+class ComparableTests(unittest.SynchronousTestCase):
+    """
+    L{comparable} decorated classes emulate Python 2's C{__cmp__} semantics.
+    """
+
+    def test_equality(self):
+        """
+        Instances of a class that is decorated by C{comparable} support
+        equality comparisons.
+        """
+        # Make explicitly sure we're using ==:
+        self.assertTrue(Comparable(1) == Comparable(1))
+        self.assertFalse(Comparable(2) == Comparable(1))
+
+
+    def test_nonEquality(self):
+        """
+        Instances of a class that is decorated by C{comparable} support
+        inequality comparisons.
+        """
+        # Make explicitly sure we're using !=:
+        self.assertFalse(Comparable(1) != Comparable(1))
+        self.assertTrue(Comparable(2) != Comparable(1))
+
+
+    def test_greaterThan(self):
+        """
+        Instances of a class that is decorated by C{comparable} support
+        greater-than comparisons.
+        """
+        self.assertTrue(Comparable(2) > Comparable(1))
+        self.assertFalse(Comparable(0) > Comparable(3))
+
+
+    def test_greaterThanOrEqual(self):
+        """
+        Instances of a class that is decorated by C{comparable} support
+        greater-than-or-equal comparisons.
+        """
+        self.assertTrue(Comparable(1) >= Comparable(1))
+        self.assertTrue(Comparable(2) >= Comparable(1))
+        self.assertFalse(Comparable(0) >= Comparable(3))
+
+
+    def test_lessThan(self):
+        """
+        Instances of a class that is decorated by C{comparable} support
+        less-than comparisons.
+        """
+        self.assertTrue(Comparable(0) < Comparable(3))
+        self.assertFalse(Comparable(2) < Comparable(0))
+
+
+    def test_lessThanOrEqual(self):
+        """
+        Instances of a class that is decorated by C{comparable} support
+        less-than-or-equal comparisons.
+        """
+        self.assertTrue(Comparable(3) <= Comparable(3))
+        self.assertTrue(Comparable(0) <= Comparable(3))
+        self.assertFalse(Comparable(2) <= Comparable(0))
+
+
+
+class Python3ComparableTests(unittest.SynchronousTestCase):
+    """
+    Python 3-specific functionality of C{comparable}.
+    """
+
+    def test_notImplementedEquals(self):
+        """
+        Instances of a class that is decorated by C{comparable} support
+        returning C{NotImplemented} from C{__eq__} if it is returned by the
+        underlying C{__cmp__} call.
+        """
+        self.assertEqual(Comparable(1).__eq__(object()), NotImplemented)
+
+
+    def test_notImplementedNotEquals(self):
+        """
+        Instances of a class that is decorated by C{comparable} support
+        returning C{NotImplemented} from C{__ne__} if it is returned by the
+        underlying C{__cmp__} call.
+        """
+        self.assertEqual(Comparable(1).__ne__(object()), NotImplemented)
+
+
+    def test_notImplementedGreaterThan(self):
+        """
+        Instances of a class that is decorated by C{comparable} support
+        returning C{NotImplemented} from C{__gt__} if it is returned by the
+        underlying C{__cmp__} call.
+        """
+        self.assertEqual(Comparable(1).__gt__(object()), NotImplemented)
+
+
+    def test_notImplementedLessThan(self):
+        """
+        Instances of a class that is decorated by C{comparable} support
+        returning C{NotImplemented} from C{__lt__} if it is returned by the
+        underlying C{__cmp__} call.
+        """
+        self.assertEqual(Comparable(1).__lt__(object()), NotImplemented)
+
+
+    def test_notImplementedGreaterThanEquals(self):
+        """
+        Instances of a class that is decorated by C{comparable} support
+        returning C{NotImplemented} from C{__ge__} if it is returned by the
+        underlying C{__cmp__} call.
+        """
+        self.assertEqual(Comparable(1).__ge__(object()), NotImplemented)
+
+
+    def test_notImplementedLessThanEquals(self):
+        """
+        Instances of a class that is decorated by C{comparable} support
+        returning C{NotImplemented} from C{__le__} if it is returned by the
+        underlying C{__cmp__} call.
+        """
+        self.assertEqual(Comparable(1).__le__(object()), NotImplemented)
+
+if not _PY3:
+    # On Python 2, we just use __cmp__ directly, so checking detailed
+    # comparison methods doesn't makes sense.
+    Python3ComparableTests.skip = "Python 3 only."
+
+
+
+class CmpTests(unittest.SynchronousTestCase):
+    """
+    L{cmp} should behave like the built-in Python 2 C{cmp}.
+    """
+
+    def test_equals(self):
+        """
+        L{cmp} returns 0 for equal objects.
+        """
+        self.assertEqual(cmp(u"a", u"a"), 0)
+        self.assertEqual(cmp(1, 1), 0)
+        self.assertEqual(cmp([1], [1]), 0)
+
+
+    def test_greaterThan(self):
+        """
+        L{cmp} returns 1 if its first argument is bigger than its second.
+        """
+        self.assertEqual(cmp(4, 0), 1)
+        self.assertEqual(cmp(b"z", b"a"), 1)
+
+
+    def test_lessThan(self):
+        """
+        L{cmp} returns -1 if its first argument is smaller than its second.
+        """
+        self.assertEqual(cmp(0.1, 2.3), -1)
+        self.assertEqual(cmp(b"a", b"d"), -1)
+
+
+
+class StringTests(unittest.SynchronousTestCase):
+    """
+    Compatibility functions and types for strings.
+    """
+
+    def assertNativeString(self, original, expected):
+        """
+        Raise an exception indicating a failed test if the output of
+        C{nativeString(original)} is unequal to the expected string, or is not
+        a native string.
+        """
+        self.assertEqual(nativeString(original), expected)
+        self.assertIsInstance(nativeString(original), str)
+
+
+    def test_nonASCIIBytesToString(self):
+        """
+        C{nativeString} raises a C{UnicodeError} if input bytes are not ASCII
+        decodable.
+        """
+        self.assertRaises(UnicodeError, nativeString, b"\xFF")
+
+
+    def test_nonASCIIUnicodeToString(self):
+        """
+        C{nativeString} raises a C{UnicodeError} if input Unicode is not ASCII
+        encodable.
+        """
+        self.assertRaises(UnicodeError, nativeString, u"\u1234")
+
+
+    def test_bytesToString(self):
+        """
+        C{nativeString} converts bytes to the native string format, assuming
+        an ASCII encoding if applicable.
+        """
+        self.assertNativeString(b"hello", "hello")
+
+
+    def test_unicodeToString(self):
+        """
+        C{nativeString} converts unicode to the native string format, assuming
+        an ASCII encoding if applicable.
+        """
+        self.assertNativeString(u"Good day", "Good day")
+
+
+    def test_stringToString(self):
+        """
+        C{nativeString} leaves native strings as native strings.
+        """
+        self.assertNativeString("Hello!", "Hello!")
+
+
+    def test_unexpectedType(self):
+        """
+        C{nativeString} raises a C{TypeError} if given an object that is not a
+        string of some sort.
+        """
+        self.assertRaises(TypeError, nativeString, 1)
+
+
+    def test_unicode(self):
+        """
+        C{compat.unicode} is C{str} on Python 3, C{unicode} on Python 2.
+        """
+        if _PY3:
+            expected = str
+        else:
+            expected = unicode
+        self.assertTrue(unicodeCompat is expected)
+
+
+    def test_nativeStringIO(self):
+        """
+        L{NativeStringIO} is a file-like object that stores native strings in
+        memory.
+        """
+        f = NativeStringIO()
+        f.write("hello")
+        f.write(" there")
+        self.assertEqual(f.getvalue(), "hello there")
+
+
+
+class NetworkStringTests(unittest.SynchronousTestCase):
+    """
+    Tests for L{networkString}.
+    """
+    def test_bytes(self):
+        """
+        L{networkString} returns a C{bytes} object passed to it unmodified.
+        """
+        self.assertEqual(b"foo", networkString(b"foo"))
+
+
+    def test_bytesOutOfRange(self):
+        """
+        L{networkString} raises C{UnicodeError} if passed a C{bytes} instance
+        containing bytes not used by ASCII.
+        """
+        self.assertRaises(
+            UnicodeError, networkString, u"\N{SNOWMAN}".encode('utf-8'))
+    if _PY3:
+        test_bytes.skip = test_bytesOutOfRange.skip = (
+            "Bytes behavior of networkString only provided on Python 2.")
+
+    def test_unicode(self):
+        """
+        L{networkString} returns a C{unicode} object passed to it encoded into a
+        C{bytes} instance.
+        """
+        self.assertEqual(b"foo", networkString(u"foo"))
+
+
+    def test_unicodeOutOfRange(self):
+        """
+        L{networkString} raises L{UnicodeError} if passed a C{unicode} instance
+        containing characters not encodable in ASCII.
+        """
+        self.assertRaises(
+            UnicodeError, networkString, u"\N{SNOWMAN}")
+    if not _PY3:
+        test_unicode.skip = test_unicodeOutOfRange.skip = (
+            "Unicode behavior of networkString only provided on Python 3.")
+
+
+    def test_nonString(self):
+        """
+        L{networkString} raises L{TypeError} if passed a non-string object or
+        the wrong type of string object.
+        """
+        self.assertRaises(TypeError, networkString, object())
+        if _PY3:
+            self.assertRaises(TypeError, networkString, b"bytes")
+        else:
+            self.assertRaises(TypeError, networkString, u"text")
+
+
+
+class ReraiseTests(unittest.SynchronousTestCase):
+    """
+    L{reraise} re-raises exceptions on both Python 2 and Python 3.
+    """
+
+    def test_reraiseWithNone(self):
+        """
+        Calling L{reraise} with an exception instance and a traceback of
+        C{None} re-raises it with a new traceback.
+        """
+        try:
+            1/0
+        except:
+            typ, value, tb = sys.exc_info()
+        try:
+            reraise(value, None)
+        except:
+            typ2, value2, tb2 = sys.exc_info()
+            self.assertEqual(typ2, ZeroDivisionError)
+            self.assertTrue(value is value2)
+            self.assertNotEqual(traceback.format_tb(tb)[-1],
+                                traceback.format_tb(tb2)[-1])
+        else:
+            self.fail("The exception was not raised.")
+
+
+    def test_reraiseWithTraceback(self):
+        """
+        Calling L{reraise} with an exception instance and a traceback
+        re-raises the exception with the given traceback.
+        """
+        try:
+            1/0
+        except:
+            typ, value, tb = sys.exc_info()
+        try:
+            reraise(value, tb)
+        except:
+            typ2, value2, tb2 = sys.exc_info()
+            self.assertEqual(typ2, ZeroDivisionError)
+            self.assertTrue(value is value2)
+            self.assertEqual(traceback.format_tb(tb)[-1],
+                             traceback.format_tb(tb2)[-1])
+        else:
+            self.fail("The exception was not raised.")
+
+
+
+class Python3BytesTests(unittest.SynchronousTestCase):
+    """
+    Tests for L{iterbytes}, L{intToBytes}, L{lazyByteSlice}.
+    """
+
+    def test_iteration(self):
+        """
+        When L{iterbytes} is called with a bytestring, the returned object
+        can be iterated over, resulting in the individual bytes of the
+        bytestring.
+        """
+        input = b"abcd"
+        result = list(iterbytes(input))
+        self.assertEqual(result, [b'a', b'b', b'c', b'd'])
+
+
+    def test_intToBytes(self):
+        """
+        When L{intToBytes} is called with an integer, the result is an
+        ASCII-encoded string representation of the number.
+        """
+        self.assertEqual(intToBytes(213), b"213")
+
+
+    def test_lazyByteSliceNoOffset(self):
+        """
+        L{lazyByteSlice} called with some bytes returns a semantically equal version
+        of these bytes.
+        """
+        data = b'123XYZ'
+        self.assertEqual(bytes(lazyByteSlice(data)), data)
+
+
+    def test_lazyByteSliceOffset(self):
+        """
+        L{lazyByteSlice} called with some bytes and an offset returns a semantically
+        equal version of these bytes starting at the given offset.
+        """
+        data = b'123XYZ'
+        self.assertEqual(bytes(lazyByteSlice(data, 2)), data[2:])
+
+
+    def test_lazyByteSliceOffsetAndLength(self):
+        """
+        L{lazyByteSlice} called with some bytes, an offset and a length returns a
+        semantically equal version of these bytes starting at the given
+        offset, up to the given length.
+        """
+        data = b'123XYZ'
+        self.assertEqual(bytes(lazyByteSlice(data, 2, 3)), data[2:5])
diff --git a/ThirdParty/Twisted/twisted/test/test_context.py b/ThirdParty/Twisted/twisted/test/test_context.py
new file mode 100644
index 0000000..e17fcf7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_context.py
@@ -0,0 +1,51 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.python.context}.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.trial.unittest import SynchronousTestCase
+
+from twisted.python import context
+
+class ContextTest(SynchronousTestCase):
+    """
+    Tests for the module-scope APIs for L{twisted.python.context}.
+    """
+    def test_notPresentIfNotSet(self):
+        """
+        Arbitrary keys which have not been set in the context have an associated
+        value of C{None}.
+        """
+        self.assertEqual(context.get("x"), None)
+
+
+    def test_setByCall(self):
+        """
+        Values may be associated with keys by passing them in a dictionary as
+        the first argument to L{twisted.python.context.call}.
+        """
+        self.assertEqual(context.call({"x": "y"}, context.get, "x"), "y")
+
+
+    def test_unsetAfterCall(self):
+        """
+        After a L{twisted.python.context.call} completes, keys specified in the
+        call are no longer associated with the values from that call.
+        """
+        context.call({"x": "y"}, lambda: None)
+        self.assertEqual(context.get("x"), None)
+
+
+    def test_setDefault(self):
+        """
+        A default value may be set for a key in the context using
+        L{twisted.python.context.setDefault}.
+        """
+        key = object()
+        self.addCleanup(context.defaultContextDict.pop, key, None)
+        context.setDefault(key, "y")
+        self.assertEqual("y", context.get(key))
diff --git a/ThirdParty/Twisted/twisted/test/test_cooperator.py b/ThirdParty/Twisted/twisted/test/test_cooperator.py
new file mode 100644
index 0000000..4bf41ef
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_cooperator.py
@@ -0,0 +1,671 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module contains tests for L{twisted.internet.task.Cooperator} and
+related functionality.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.internet import reactor, defer, task
+from twisted.trial import unittest
+
+
+
+class FakeDelayedCall(object):
+    """
+    Fake delayed call which lets us simulate the scheduler.
+    """
+    def __init__(self, func):
+        """
+        A function to run, later.
+        """
+        self.func = func
+        self.cancelled = False
+
+
+    def cancel(self):
+        """
+        Don't run my function later.
+        """
+        self.cancelled = True
+
+
+
+class FakeScheduler(object):
+    """
+    A fake scheduler for testing against.
+    """
+    def __init__(self):
+        """
+        Create a fake scheduler with a list of work to do.
+        """
+        self.work = []
+
+
+    def __call__(self, thunk):
+        """
+        Schedule a unit of work to be done later.
+        """
+        unit = FakeDelayedCall(thunk)
+        self.work.append(unit)
+        return unit
+
+
+    def pump(self):
+        """
+        Do all of the work that is currently available to be done.
+        """
+        work, self.work = self.work, []
+        for unit in work:
+            if not unit.cancelled:
+                unit.func()
+
+
+
+class TestCooperator(unittest.TestCase):
+    RESULT = 'done'
+
+    def ebIter(self, err):
+        err.trap(task.SchedulerStopped)
+        return self.RESULT
+
+
+    def cbIter(self, ign):
+        self.fail()
+
+
+    def testStoppedRejectsNewTasks(self):
+        """
+        Test that Cooperators refuse new tasks when they have been stopped.
+        """
+        def testwith(stuff):
+            c = task.Cooperator()
+            c.stop()
+            d = c.coiterate(iter(()), stuff)
+            d.addCallback(self.cbIter)
+            d.addErrback(self.ebIter)
+            return d.addCallback(lambda result:
+                                 self.assertEqual(result, self.RESULT))
+        return testwith(None).addCallback(lambda ign: testwith(defer.Deferred()))
+
+
+    def testStopRunning(self):
+        """
+        Test that a running iterator will not run to completion when the
+        cooperator is stopped.
+        """
+        c = task.Cooperator()
+        def myiter():
+            for myiter.value in range(3):
+                yield myiter.value
+        myiter.value = -1
+        d = c.coiterate(myiter())
+        d.addCallback(self.cbIter)
+        d.addErrback(self.ebIter)
+        c.stop()
+        def doasserts(result):
+            self.assertEqual(result, self.RESULT)
+            self.assertEqual(myiter.value, -1)
+        d.addCallback(doasserts)
+        return d
+
+
+    def testStopOutstanding(self):
+        """
+        An iterator run with L{Cooperator.coiterate} paused on a L{Deferred}
+        yielded by that iterator will fire its own L{Deferred} (the one
+        returned by C{coiterate}) when L{Cooperator.stop} is called.
+        """
+        testControlD = defer.Deferred()
+        outstandingD = defer.Deferred()
+        def myiter():
+            reactor.callLater(0, testControlD.callback, None)
+            yield outstandingD
+            self.fail()
+        c = task.Cooperator()
+        d = c.coiterate(myiter())
+        def stopAndGo(ign):
+            c.stop()
+            outstandingD.callback('arglebargle')
+
+        testControlD.addCallback(stopAndGo)
+        d.addCallback(self.cbIter)
+        d.addErrback(self.ebIter)
+
+        return d.addCallback(
+            lambda result: self.assertEqual(result, self.RESULT))
+
+
+    def testUnexpectedError(self):
+        c = task.Cooperator()
+        def myiter():
+            if 0:
+                yield None
+            else:
+                raise RuntimeError()
+        d = c.coiterate(myiter())
+        return self.assertFailure(d, RuntimeError)
+
+
+    def testUnexpectedErrorActuallyLater(self):
+        def myiter():
+            D = defer.Deferred()
+            reactor.callLater(0, D.errback, RuntimeError())
+            yield D
+
+        c = task.Cooperator()
+        d = c.coiterate(myiter())
+        return self.assertFailure(d, RuntimeError)
+
+
+    def testUnexpectedErrorNotActuallyLater(self):
+        def myiter():
+            yield defer.fail(RuntimeError())
+
+        c = task.Cooperator()
+        d = c.coiterate(myiter())
+        return self.assertFailure(d, RuntimeError)
+
+
+    def testCooperation(self):
+        L = []
+        def myiter(things):
+            for th in things:
+                L.append(th)
+                yield None
+
+        groupsOfThings = ['abc', (1, 2, 3), 'def', (4, 5, 6)]
+
+        c = task.Cooperator()
+        tasks = []
+        for stuff in groupsOfThings:
+            tasks.append(c.coiterate(myiter(stuff)))
+
+        return defer.DeferredList(tasks).addCallback(
+            lambda ign: self.assertEqual(tuple(L), sum(zip(*groupsOfThings), ())))
+
+
+    def testResourceExhaustion(self):
+        output = []
+        def myiter():
+            for i in range(100):
+                output.append(i)
+                if i == 9:
+                    _TPF.stopped = True
+                yield i
+
+        class _TPF:
+            stopped = False
+            def __call__(self):
+                return self.stopped
+
+        c = task.Cooperator(terminationPredicateFactory=_TPF)
+        c.coiterate(myiter()).addErrback(self.ebIter)
+        c._delayedCall.cancel()
+        # testing a private method because only the test case will ever care
+        # about this, so we have to carefully clean up after ourselves.
+        c._tick()
+        c.stop()
+        self.failUnless(_TPF.stopped)
+        self.assertEqual(output, list(range(10)))
+
+
+    def testCallbackReCoiterate(self):
+        """
+        If a callback to a deferred returned by coiterate calls coiterate on
+        the same Cooperator, we should make sure to only do the minimal amount
+        of scheduling work.  (This test was added to demonstrate a specific bug
+        that was found while writing the scheduler.)
+        """
+        calls = []
+
+        class FakeCall:
+            def __init__(self, func):
+                self.func = func
+
+            def __repr__(self):
+                return '<FakeCall %r>' % (self.func,)
+
+        def sched(f):
+            self.failIf(calls, repr(calls))
+            calls.append(FakeCall(f))
+            return calls[-1]
+
+        c = task.Cooperator(scheduler=sched, terminationPredicateFactory=lambda: lambda: True)
+        d = c.coiterate(iter(()))
+
+        done = []
+        def anotherTask(ign):
+            c.coiterate(iter(())).addBoth(done.append)
+
+        d.addCallback(anotherTask)
+
+        work = 0
+        while not done:
+            work += 1
+            while calls:
+                calls.pop(0).func()
+                work += 1
+            if work > 50:
+                self.fail("Cooperator took too long")
+
+
+    def test_removingLastTaskStopsScheduledCall(self):
+        """
+        If the last task in a Cooperator is removed, the scheduled call for
+        the next tick is cancelled, since it is no longer necessary.
+
+        This behavior is useful for tests that want to assert they have left
+        no reactor state behind when they're done.
+        """
+        calls = [None]
+        def sched(f):
+            calls[0] = FakeDelayedCall(f)
+            return calls[0]
+        coop = task.Cooperator(scheduler=sched)
+
+        # Add two task; this should schedule the tick:
+        task1 = coop.cooperate(iter([1, 2]))
+        task2 = coop.cooperate(iter([1, 2]))
+        self.assertEqual(calls[0].func, coop._tick)
+
+        # Remove first task; scheduled call should still be going:
+        task1.stop()
+        self.assertEqual(calls[0].cancelled, False)
+        self.assertEqual(coop._delayedCall, calls[0])
+
+        # Remove second task; scheduled call should be cancelled:
+        task2.stop()
+        self.assertEqual(calls[0].cancelled, True)
+        self.assertEqual(coop._delayedCall, None)
+
+        # Add another task; scheduled call will be recreated:
+        task3 = coop.cooperate(iter([1, 2]))
+        self.assertEqual(calls[0].cancelled, False)
+        self.assertEqual(coop._delayedCall, calls[0])
+
+
+
+class UnhandledException(Exception):
+    """
+    An exception that should go unhandled.
+    """
+
+
+
+class AliasTests(unittest.TestCase):
+    """
+    Integration test to verify that the global singleton aliases do what
+    they're supposed to.
+    """
+
+    def test_cooperate(self):
+        """
+        L{twisted.internet.task.cooperate} ought to run the generator that it is
+        """
+        d = defer.Deferred()
+        def doit():
+            yield 1
+            yield 2
+            yield 3
+            d.callback("yay")
+        it = doit()
+        theTask = task.cooperate(it)
+        self.assertIn(theTask, task._theCooperator._tasks)
+        return d
+
+
+
+class RunStateTests(unittest.TestCase):
+    """
+    Tests to verify the behavior of L{CooperativeTask.pause},
+    L{CooperativeTask.resume}, L{CooperativeTask.stop}, exhausting the
+    underlying iterator, and their interactions with each other.
+    """
+
+    def setUp(self):
+        """
+        Create a cooperator with a fake scheduler and a termination predicate
+        that ensures only one unit of work will take place per tick.
+        """
+        self._doDeferNext = False
+        self._doStopNext = False
+        self._doDieNext = False
+        self.work = []
+        self.scheduler = FakeScheduler()
+        self.cooperator = task.Cooperator(
+            scheduler=self.scheduler,
+            # Always stop after one iteration of work (return a function which
+            # returns a function which always returns True)
+            terminationPredicateFactory=lambda: lambda: True)
+        self.task = self.cooperator.cooperate(self.worker())
+        self.cooperator.start()
+
+
+    def worker(self):
+        """
+        This is a sample generator which yields Deferreds when we are testing
+        deferral and an ascending integer count otherwise.
+        """
+        i = 0
+        while True:
+            i += 1
+            if self._doDeferNext:
+                self._doDeferNext = False
+                d = defer.Deferred()
+                self.work.append(d)
+                yield d
+            elif self._doStopNext:
+                return
+            elif self._doDieNext:
+                raise UnhandledException()
+            else:
+                self.work.append(i)
+                yield i
+
+
+    def tearDown(self):
+        """
+        Drop references to interesting parts of the fixture to allow Deferred
+        errors to be noticed when things start failing.
+        """
+        del self.task
+        del self.scheduler
+
+
+    def deferNext(self):
+        """
+        Defer the next result from my worker iterator.
+        """
+        self._doDeferNext = True
+
+
+    def stopNext(self):
+        """
+        Make the next result from my worker iterator be completion (raising
+        StopIteration).
+        """
+        self._doStopNext = True
+
+
+    def dieNext(self):
+        """
+        Make the next result from my worker iterator be raising an
+        L{UnhandledException}.
+        """
+        def ignoreUnhandled(failure):
+            failure.trap(UnhandledException)
+            return None
+        self._doDieNext = True
+
+
+    def test_pauseResume(self):
+        """
+        Cooperators should stop running their tasks when they're paused, and
+        start again when they're resumed.
+        """
+        # first, sanity check
+        self.scheduler.pump()
+        self.assertEqual(self.work, [1])
+        self.scheduler.pump()
+        self.assertEqual(self.work, [1, 2])
+
+        # OK, now for real
+        self.task.pause()
+        self.scheduler.pump()
+        self.assertEqual(self.work, [1, 2])
+        self.task.resume()
+        # Resuming itself shoult not do any work
+        self.assertEqual(self.work, [1, 2])
+        self.scheduler.pump()
+        # But when the scheduler rolls around again...
+        self.assertEqual(self.work, [1, 2, 3])
+
+
+    def test_resumeNotPaused(self):
+        """
+        L{CooperativeTask.resume} should raise a L{TaskNotPaused} exception if
+        it was not paused; e.g. if L{CooperativeTask.pause} was not invoked
+        more times than L{CooperativeTask.resume} on that object.
+        """
+        self.assertRaises(task.NotPaused, self.task.resume)
+        self.task.pause()
+        self.task.resume()
+        self.assertRaises(task.NotPaused, self.task.resume)
+
+
+    def test_pauseTwice(self):
+        """
+        Pauses on tasks should behave like a stack. If a task is paused twice,
+        it needs to be resumed twice.
+        """
+        # pause once
+        self.task.pause()
+        self.scheduler.pump()
+        self.assertEqual(self.work, [])
+        # pause twice
+        self.task.pause()
+        self.scheduler.pump()
+        self.assertEqual(self.work, [])
+        # resume once (it shouldn't)
+        self.task.resume()
+        self.scheduler.pump()
+        self.assertEqual(self.work, [])
+        # resume twice (now it should go)
+        self.task.resume()
+        self.scheduler.pump()
+        self.assertEqual(self.work, [1])
+
+
+    def test_pauseWhileDeferred(self):
+        """
+        C{pause()}ing a task while it is waiting on an outstanding
+        L{defer.Deferred} should put the task into a state where the
+        outstanding L{defer.Deferred} must be called back I{and} the task is
+        C{resume}d before it will continue processing.
+        """
+        self.deferNext()
+        self.scheduler.pump()
+        self.assertEqual(len(self.work), 1)
+        self.failUnless(isinstance(self.work[0], defer.Deferred))
+        self.scheduler.pump()
+        self.assertEqual(len(self.work), 1)
+        self.task.pause()
+        self.scheduler.pump()
+        self.assertEqual(len(self.work), 1)
+        self.task.resume()
+        self.scheduler.pump()
+        self.assertEqual(len(self.work), 1)
+        self.work[0].callback("STUFF!")
+        self.scheduler.pump()
+        self.assertEqual(len(self.work), 2)
+        self.assertEqual(self.work[1], 2)
+
+
+    def test_whenDone(self):
+        """
+        L{CooperativeTask.whenDone} returns a Deferred which fires when the
+        Cooperator's iterator is exhausted.  It returns a new Deferred each
+        time it is called; callbacks added to other invocations will not modify
+        the value that subsequent invocations will fire with.
+        """
+
+        deferred1 = self.task.whenDone()
+        deferred2 = self.task.whenDone()
+        results1 = []
+        results2 = []
+        final1 = []
+        final2 = []
+
+        def callbackOne(result):
+            results1.append(result)
+            return 1
+
+        def callbackTwo(result):
+            results2.append(result)
+            return 2
+
+        deferred1.addCallback(callbackOne)
+        deferred2.addCallback(callbackTwo)
+
+        deferred1.addCallback(final1.append)
+        deferred2.addCallback(final2.append)
+
+        # exhaust the task iterator
+        # callbacks fire
+        self.stopNext()
+        self.scheduler.pump()
+
+        self.assertEqual(len(results1), 1)
+        self.assertEqual(len(results2), 1)
+
+        self.assertIdentical(results1[0], self.task._iterator)
+        self.assertIdentical(results2[0], self.task._iterator)
+
+        self.assertEqual(final1, [1])
+        self.assertEqual(final2, [2])
+
+
+    def test_whenDoneError(self):
+        """
+        L{CooperativeTask.whenDone} returns a L{defer.Deferred} that will fail
+        when the iterable's C{next} method raises an exception, with that
+        exception.
+        """
+        deferred1 = self.task.whenDone()
+        results = []
+        deferred1.addErrback(results.append)
+        self.dieNext()
+        self.scheduler.pump()
+        self.assertEqual(len(results), 1)
+        self.assertEqual(results[0].check(UnhandledException), UnhandledException)
+
+
+    def test_whenDoneStop(self):
+        """
+        L{CooperativeTask.whenDone} returns a L{defer.Deferred} that fails with
+        L{TaskStopped} when the C{stop} method is called on that
+        L{CooperativeTask}.
+        """
+        deferred1 = self.task.whenDone()
+        errors = []
+        deferred1.addErrback(errors.append)
+        self.task.stop()
+        self.assertEqual(len(errors), 1)
+        self.assertEqual(errors[0].check(task.TaskStopped), task.TaskStopped)
+
+
+    def test_whenDoneAlreadyDone(self):
+        """
+        L{CooperativeTask.whenDone} will return a L{defer.Deferred} that will
+        succeed immediately if its iterator has already completed.
+        """
+        self.stopNext()
+        self.scheduler.pump()
+        results = []
+        self.task.whenDone().addCallback(results.append)
+        self.assertEqual(results, [self.task._iterator])
+
+
+    def test_stopStops(self):
+        """
+        C{stop()}ping a task should cause it to be removed from the run just as
+        C{pause()}ing, with the distinction that C{resume()} will raise a
+        L{TaskStopped} exception.
+        """
+        self.task.stop()
+        self.scheduler.pump()
+        self.assertEqual(len(self.work), 0)
+        self.assertRaises(task.TaskStopped, self.task.stop)
+        self.assertRaises(task.TaskStopped, self.task.pause)
+        # Sanity check - it's still not scheduled, is it?
+        self.scheduler.pump()
+        self.assertEqual(self.work, [])
+
+
+    def test_pauseStopResume(self):
+        """
+        C{resume()}ing a paused, stopped task should be a no-op; it should not
+        raise an exception, because it's paused, but neither should it actually
+        do more work from the task.
+        """
+        self.task.pause()
+        self.task.stop()
+        self.task.resume()
+        self.scheduler.pump()
+        self.assertEqual(self.work, [])
+
+
+    def test_stopDeferred(self):
+        """
+        As a corrolary of the interaction of C{pause()} and C{unpause()},
+        C{stop()}ping a task which is waiting on a L{Deferred} should cause the
+        task to gracefully shut down, meaning that it should not be unpaused
+        when the deferred fires.
+        """
+        self.deferNext()
+        self.scheduler.pump()
+        d = self.work.pop()
+        self.assertEqual(self.task._pauseCount, 1)
+        results = []
+        d.addBoth(results.append)
+        self.scheduler.pump()
+        self.task.stop()
+        self.scheduler.pump()
+        d.callback(7)
+        self.scheduler.pump()
+        # Let's make sure that Deferred doesn't come out fried with an
+        # unhandled error that will be logged.  The value is None, rather than
+        # our test value, 7, because this Deferred is returned to and consumed
+        # by the cooperator code.  Its callback therefore has no contract.
+        self.assertEqual(results, [None])
+        # But more importantly, no further work should have happened.
+        self.assertEqual(self.work, [])
+
+
+    def test_stopExhausted(self):
+        """
+        C{stop()}ping a L{CooperativeTask} whose iterator has been exhausted
+        should raise L{TaskDone}.
+        """
+        self.stopNext()
+        self.scheduler.pump()
+        self.assertRaises(task.TaskDone, self.task.stop)
+
+
+    def test_stopErrored(self):
+        """
+        C{stop()}ping a L{CooperativeTask} whose iterator has encountered an
+        error should raise L{TaskFailed}.
+        """
+        self.dieNext()
+        self.scheduler.pump()
+        self.assertRaises(task.TaskFailed, self.task.stop)
+
+
+    def test_stopCooperatorReentrancy(self):
+        """
+        If a callback of a L{Deferred} from L{CooperativeTask.whenDone} calls
+        C{Cooperator.stop} on its L{CooperativeTask._cooperator}, the
+        L{Cooperator} will stop, but the L{CooperativeTask} whose callback is
+        calling C{stop} should already be considered 'stopped' by the time the
+        callback is running, and therefore removed from the
+        L{CoooperativeTask}.
+        """
+        callbackPhases = []
+        def stopit(result):
+            callbackPhases.append(result)
+            self.cooperator.stop()
+            # "done" here is a sanity check to make sure that we get all the
+            # way through the callback; i.e. stop() shouldn't be raising an
+            # exception due to the stopped-ness of our main task.
+            callbackPhases.append("done")
+        self.task.whenDone().addCallback(stopit)
+        self.stopNext()
+        self.scheduler.pump()
+        self.assertEqual(callbackPhases, [self.task._iterator, "done"])
+
+
+
diff --git a/ThirdParty/Twisted/twisted/test/test_defer.py b/ThirdParty/Twisted/twisted/test/test_defer.py
new file mode 100644
index 0000000..c0691cc
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_defer.py
@@ -0,0 +1,2030 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test cases for defer module.
+"""
+
+from __future__ import division, absolute_import
+
+import gc, traceback
+
+from twisted.python.compat import _PY3
+from twisted.trial import unittest
+from twisted.internet import defer
+from twisted.python import failure, log
+from twisted.python._utilpy3 import unsignedID
+
+class GenericError(Exception):
+    pass
+
+
+
+class ImmediateFailureMixin(object):
+    """
+    Add additional assertion methods.
+    """
+
+    def assertImmediateFailure(self, deferred, exception):
+        """
+        Assert that the given Deferred current result is a Failure with the
+        given exception.
+
+        @return: The exception instance in the Deferred.
+        """
+        failures = []
+        deferred.addErrback(failures.append)
+        self.assertEqual(len(failures), 1)
+        self.assertTrue(failures[0].check(exception))
+        return failures[0].value
+
+
+
+class DeferredTestCase(unittest.SynchronousTestCase, ImmediateFailureMixin):
+
+    def setUp(self):
+        self.callbackResults = None
+        self.errbackResults = None
+        self.callback2Results = None
+        # Restore the debug flag to its original state when done.
+        self.addCleanup(defer.setDebugging, defer.getDebugging())
+
+    def _callback(self, *args, **kw):
+        self.callbackResults = args, kw
+        return args[0]
+
+    def _callback2(self, *args, **kw):
+        self.callback2Results = args, kw
+
+    def _errback(self, *args, **kw):
+        self.errbackResults = args, kw
+
+    def testCallbackWithoutArgs(self):
+        deferred = defer.Deferred()
+        deferred.addCallback(self._callback)
+        deferred.callback("hello")
+        self.assertEqual(self.errbackResults, None)
+        self.assertEqual(self.callbackResults, (('hello',), {}))
+
+    def testCallbackWithArgs(self):
+        deferred = defer.Deferred()
+        deferred.addCallback(self._callback, "world")
+        deferred.callback("hello")
+        self.assertEqual(self.errbackResults, None)
+        self.assertEqual(self.callbackResults, (('hello', 'world'), {}))
+
+    def testCallbackWithKwArgs(self):
+        deferred = defer.Deferred()
+        deferred.addCallback(self._callback, world="world")
+        deferred.callback("hello")
+        self.assertEqual(self.errbackResults, None)
+        self.assertEqual(self.callbackResults,
+                             (('hello',), {'world': 'world'}))
+
+    def testTwoCallbacks(self):
+        deferred = defer.Deferred()
+        deferred.addCallback(self._callback)
+        deferred.addCallback(self._callback2)
+        deferred.callback("hello")
+        self.assertEqual(self.errbackResults, None)
+        self.assertEqual(self.callbackResults,
+                             (('hello',), {}))
+        self.assertEqual(self.callback2Results,
+                             (('hello',), {}))
+
+    def testDeferredList(self):
+        defr1 = defer.Deferred()
+        defr2 = defer.Deferred()
+        defr3 = defer.Deferred()
+        dl = defer.DeferredList([defr1, defr2, defr3])
+        result = []
+        def cb(resultList, result=result):
+            result.extend(resultList)
+        def catch(err):
+            return None
+        dl.addCallbacks(cb, cb)
+        defr1.callback("1")
+        defr2.addErrback(catch)
+        # "catch" is added to eat the GenericError that will be passed on by
+        # the DeferredList's callback on defr2. If left unhandled, the
+        # Failure object would cause a log.err() warning about "Unhandled
+        # error in Deferred". Twisted's pyunit watches for log.err calls and
+        # treats them as failures. So "catch" must eat the error to prevent
+        # it from flunking the test.
+        defr2.errback(GenericError("2"))
+        defr3.callback("3")
+        self.assertEqual([result[0],
+                    #result[1][1] is now a Failure instead of an Exception
+                              (result[1][0], str(result[1][1].value)),
+                              result[2]],
+
+                             [(defer.SUCCESS, "1"),
+                              (defer.FAILURE, "2"),
+                              (defer.SUCCESS, "3")])
+
+    def testEmptyDeferredList(self):
+        result = []
+        def cb(resultList, result=result):
+            result.append(resultList)
+
+        dl = defer.DeferredList([])
+        dl.addCallbacks(cb)
+        self.assertEqual(result, [[]])
+
+        result[:] = []
+        dl = defer.DeferredList([], fireOnOneCallback=1)
+        dl.addCallbacks(cb)
+        self.assertEqual(result, [])
+
+    def testDeferredListFireOnOneError(self):
+        defr1 = defer.Deferred()
+        defr2 = defer.Deferred()
+        defr3 = defer.Deferred()
+        dl = defer.DeferredList([defr1, defr2, defr3], fireOnOneErrback=1)
+        result = []
+        dl.addErrback(result.append)
+
+        # consume errors after they pass through the DeferredList (to avoid
+        # 'Unhandled error in Deferred'.
+        def catch(err):
+            return None
+        defr2.addErrback(catch)
+
+        # fire one Deferred's callback, no result yet
+        defr1.callback("1")
+        self.assertEqual(result, [])
+
+        # fire one Deferred's errback -- now we have a result
+        defr2.errback(GenericError("from def2"))
+        self.assertEqual(len(result), 1)
+
+        # extract the result from the list
+        aFailure = result[0]
+
+        # the type of the failure is a FirstError
+        self.failUnless(issubclass(aFailure.type, defer.FirstError),
+            'issubclass(aFailure.type, defer.FirstError) failed: '
+            "failure's type is %r" % (aFailure.type,)
+        )
+
+        firstError = aFailure.value
+
+        # check that the GenericError("2") from the deferred at index 1
+        # (defr2) is intact inside failure.value
+        self.assertEqual(firstError.subFailure.type, GenericError)
+        self.assertEqual(firstError.subFailure.value.args, ("from def2",))
+        self.assertEqual(firstError.index, 1)
+
+
+    def testDeferredListDontConsumeErrors(self):
+        d1 = defer.Deferred()
+        dl = defer.DeferredList([d1])
+
+        errorTrap = []
+        d1.addErrback(errorTrap.append)
+
+        result = []
+        dl.addCallback(result.append)
+
+        d1.errback(GenericError('Bang'))
+        self.assertEqual('Bang', errorTrap[0].value.args[0])
+        self.assertEqual(1, len(result))
+        self.assertEqual('Bang', result[0][0][1].value.args[0])
+
+    def testDeferredListConsumeErrors(self):
+        d1 = defer.Deferred()
+        dl = defer.DeferredList([d1], consumeErrors=True)
+
+        errorTrap = []
+        d1.addErrback(errorTrap.append)
+
+        result = []
+        dl.addCallback(result.append)
+
+        d1.errback(GenericError('Bang'))
+        self.assertEqual([], errorTrap)
+        self.assertEqual(1, len(result))
+        self.assertEqual('Bang', result[0][0][1].value.args[0])
+
+    def testDeferredListFireOnOneErrorWithAlreadyFiredDeferreds(self):
+        # Create some deferreds, and errback one
+        d1 = defer.Deferred()
+        d2 = defer.Deferred()
+        d1.errback(GenericError('Bang'))
+
+        # *Then* build the DeferredList, with fireOnOneErrback=True
+        dl = defer.DeferredList([d1, d2], fireOnOneErrback=True)
+        result = []
+        dl.addErrback(result.append)
+        self.assertEqual(1, len(result))
+
+        d1.addErrback(lambda e: None)  # Swallow error
+
+    def testDeferredListWithAlreadyFiredDeferreds(self):
+        # Create some deferreds, and err one, call the other
+        d1 = defer.Deferred()
+        d2 = defer.Deferred()
+        d1.errback(GenericError('Bang'))
+        d2.callback(2)
+
+        # *Then* build the DeferredList
+        dl = defer.DeferredList([d1, d2])
+
+        result = []
+        dl.addCallback(result.append)
+
+        self.assertEqual(1, len(result))
+
+        d1.addErrback(lambda e: None)  # Swallow error
+
+
+    def testImmediateSuccess(self):
+        l = []
+        d = defer.succeed("success")
+        d.addCallback(l.append)
+        self.assertEqual(l, ["success"])
+
+
+    def testImmediateFailure(self):
+        l = []
+        d = defer.fail(GenericError("fail"))
+        d.addErrback(l.append)
+        self.assertEqual(str(l[0].value), "fail")
+
+    def testPausedFailure(self):
+        l = []
+        d = defer.fail(GenericError("fail"))
+        d.pause()
+        d.addErrback(l.append)
+        self.assertEqual(l, [])
+        d.unpause()
+        self.assertEqual(str(l[0].value), "fail")
+
+    def testCallbackErrors(self):
+        l = []
+        d = defer.Deferred().addCallback(lambda _: 1 // 0).addErrback(l.append)
+        d.callback(1)
+        self.assert_(isinstance(l[0].value, ZeroDivisionError))
+        l = []
+        d = defer.Deferred().addCallback(
+            lambda _: failure.Failure(ZeroDivisionError())).addErrback(l.append)
+        d.callback(1)
+        self.assert_(isinstance(l[0].value, ZeroDivisionError))
+
+    def testUnpauseBeforeCallback(self):
+        d = defer.Deferred()
+        d.pause()
+        d.addCallback(self._callback)
+        d.unpause()
+
+    def testReturnDeferred(self):
+        d = defer.Deferred()
+        d2 = defer.Deferred()
+        d2.pause()
+        d.addCallback(lambda r, d2=d2: d2)
+        d.addCallback(self._callback)
+        d.callback(1)
+        assert self.callbackResults is None, "Should not have been called yet."
+        d2.callback(2)
+        assert self.callbackResults is None, "Still should not have been called yet."
+        d2.unpause()
+        assert self.callbackResults[0][0] == 2, "Result should have been from second deferred:%s" % (self.callbackResults,)
+
+
+    def test_chainedPausedDeferredWithResult(self):
+        """
+        When a paused Deferred with a result is returned from a callback on
+        another Deferred, the other Deferred is chained to the first and waits
+        for it to be unpaused.
+        """
+        expected = object()
+        paused = defer.Deferred()
+        paused.callback(expected)
+        paused.pause()
+        chained = defer.Deferred()
+        chained.addCallback(lambda ignored: paused)
+        chained.callback(None)
+
+        result = []
+        chained.addCallback(result.append)
+        self.assertEqual(result, [])
+        paused.unpause()
+        self.assertEqual(result, [expected])
+
+
+    def test_pausedDeferredChained(self):
+        """
+        A paused Deferred encountered while pushing a result forward through a
+        chain does not prevent earlier Deferreds from continuing to execute
+        their callbacks.
+        """
+        first = defer.Deferred()
+        second = defer.Deferred()
+        first.addCallback(lambda ignored: second)
+        first.callback(None)
+        first.pause()
+        second.callback(None)
+        result = []
+        second.addCallback(result.append)
+        self.assertEqual(result, [None])
+
+
+    def test_gatherResults(self):
+        # test successful list of deferreds
+        l = []
+        defer.gatherResults([defer.succeed(1), defer.succeed(2)]).addCallback(l.append)
+        self.assertEqual(l, [[1, 2]])
+        # test failing list of deferreds
+        l = []
+        dl = [defer.succeed(1), defer.fail(ValueError())]
+        defer.gatherResults(dl).addErrback(l.append)
+        self.assertEqual(len(l), 1)
+        self.assert_(isinstance(l[0], failure.Failure))
+        # get rid of error
+        dl[1].addErrback(lambda e: 1)
+
+
+    def test_gatherResultsWithConsumeErrors(self):
+        """
+        If a L{Deferred} in the list passed to L{gatherResults} fires with a
+        failure and C{consumerErrors} is C{True}, the failure is converted to a
+        C{None} result on that L{Deferred}.
+        """
+        # test successful list of deferreds
+        dgood = defer.succeed(1)
+        dbad = defer.fail(RuntimeError("oh noes"))
+        d = defer.gatherResults([dgood, dbad], consumeErrors=True)
+        unconsumedErrors = []
+        dbad.addErrback(unconsumedErrors.append)
+        gatheredErrors = []
+        d.addErrback(gatheredErrors.append)
+
+        self.assertEqual((len(unconsumedErrors), len(gatheredErrors)),
+                         (0, 1))
+        self.assertIsInstance(gatheredErrors[0].value, defer.FirstError)
+        firstError = gatheredErrors[0].value.subFailure
+        self.assertIsInstance(firstError.value, RuntimeError)
+
+
+    def test_maybeDeferredSync(self):
+        """
+        L{defer.maybeDeferred} should retrieve the result of a synchronous
+        function and pass it to its resulting L{defer.Deferred}.
+        """
+        S, E = [], []
+        d = defer.maybeDeferred((lambda x: x + 5), 10)
+        d.addCallbacks(S.append, E.append)
+        self.assertEqual(E, [])
+        self.assertEqual(S, [15])
+
+
+    def test_maybeDeferredSyncError(self):
+        """
+        L{defer.maybeDeferred} should catch exception raised by a synchronous
+        function and errback its resulting L{defer.Deferred} with it.
+        """
+        S, E = [], []
+        try:
+            '10' + 5
+        except TypeError as e:
+            expected = str(e)
+        d = defer.maybeDeferred((lambda x: x + 5), '10')
+        d.addCallbacks(S.append, E.append)
+        self.assertEqual(S, [])
+        self.assertEqual(len(E), 1)
+        self.assertEqual(str(E[0].value), expected)
+
+
+    def test_maybeDeferredAsync(self):
+        """
+        L{defer.maybeDeferred} should let L{defer.Deferred} instance pass by
+        so that original result is the same.
+        """
+        d = defer.Deferred()
+        d2 = defer.maybeDeferred(lambda: d)
+        d.callback('Success')
+        result = []
+        d2.addCallback(result.append)
+        self.assertEqual(result, ['Success'])
+
+
+    def test_maybeDeferredAsyncError(self):
+        """
+        L{defer.maybeDeferred} should let L{defer.Deferred} instance pass by
+        so that L{failure.Failure} returned by the original instance is the
+        same.
+        """
+        d = defer.Deferred()
+        d2 = defer.maybeDeferred(lambda: d)
+        d.errback(failure.Failure(RuntimeError()))
+        self.assertImmediateFailure(d2, RuntimeError)
+
+
+    def test_innerCallbacksPreserved(self):
+        """
+        When a L{Deferred} encounters a result which is another L{Deferred}
+        which is waiting on a third L{Deferred}, the middle L{Deferred}'s
+        callbacks are executed after the third L{Deferred} fires and before the
+        first receives a result.
+        """
+        results = []
+        failures = []
+        inner = defer.Deferred()
+        def cb(result):
+            results.append(('start-of-cb', result))
+            d = defer.succeed('inner')
+            def firstCallback(result):
+                results.append(('firstCallback', 'inner'))
+                return inner
+            def secondCallback(result):
+                results.append(('secondCallback', result))
+                return result * 2
+            d.addCallback(firstCallback).addCallback(secondCallback)
+            d.addErrback(failures.append)
+            return d
+        outer = defer.succeed('outer')
+        outer.addCallback(cb)
+        inner.callback('orange')
+        outer.addCallback(results.append)
+        inner.addErrback(failures.append)
+        outer.addErrback(failures.append)
+        self.assertEqual([], failures)
+        self.assertEqual(
+            results,
+            [('start-of-cb', 'outer'),
+             ('firstCallback', 'inner'),
+             ('secondCallback', 'orange'),
+             'orangeorange'])
+
+
+    def test_continueCallbackNotFirst(self):
+        """
+        The continue callback of a L{Deferred} waiting for another L{Deferred}
+        is not necessarily the first one. This is somewhat a whitebox test
+        checking that we search for that callback among the whole list of
+        callbacks.
+        """
+        results = []
+        failures = []
+        a = defer.Deferred()
+
+        def cb(result):
+            results.append(('cb', result))
+            d = defer.Deferred()
+
+            def firstCallback(ignored):
+                results.append(('firstCallback', ignored))
+                return defer.gatherResults([a])
+
+            def secondCallback(result):
+                results.append(('secondCallback', result))
+
+            d.addCallback(firstCallback)
+            d.addCallback(secondCallback)
+            d.addErrback(failures.append)
+            d.callback(None)
+            return d
+
+        outer = defer.succeed('outer')
+        outer.addCallback(cb)
+        outer.addErrback(failures.append)
+        self.assertEqual([('cb', 'outer'), ('firstCallback', None)], results)
+        a.callback('withers')
+        self.assertEqual([], failures)
+        self.assertEqual(
+            results,
+            [('cb', 'outer'),
+             ('firstCallback', None),
+             ('secondCallback', ['withers'])])
+
+
+    def test_callbackOrderPreserved(self):
+        """
+        A callback added to a L{Deferred} after a previous callback attached
+        another L{Deferred} as a result is run after the callbacks of the other
+        L{Deferred} are run.
+        """
+        results = []
+        failures = []
+        a = defer.Deferred()
+
+        def cb(result):
+            results.append(('cb', result))
+            d = defer.Deferred()
+
+            def firstCallback(ignored):
+                results.append(('firstCallback', ignored))
+                return defer.gatherResults([a])
+
+            def secondCallback(result):
+                results.append(('secondCallback', result))
+
+            d.addCallback(firstCallback)
+            d.addCallback(secondCallback)
+            d.addErrback(failures.append)
+            d.callback(None)
+            return d
+
+        outer = defer.Deferred()
+        outer.addCallback(cb)
+        outer.addCallback(lambda x: results.append('final'))
+        outer.addErrback(failures.append)
+        outer.callback('outer')
+        self.assertEqual([('cb', 'outer'), ('firstCallback', None)], results)
+        a.callback('withers')
+        self.assertEqual([], failures)
+        self.assertEqual(
+            results,
+            [('cb', 'outer'),
+             ('firstCallback', None),
+             ('secondCallback', ['withers']), 'final'])
+
+
+    def test_reentrantRunCallbacks(self):
+        """
+        A callback added to a L{Deferred} by a callback on that L{Deferred}
+        should be added to the end of the callback chain.
+        """
+        deferred = defer.Deferred()
+        called = []
+        def callback3(result):
+            called.append(3)
+        def callback2(result):
+            called.append(2)
+        def callback1(result):
+            called.append(1)
+            deferred.addCallback(callback3)
+        deferred.addCallback(callback1)
+        deferred.addCallback(callback2)
+        deferred.callback(None)
+        self.assertEqual(called, [1, 2, 3])
+
+
+    def test_nonReentrantCallbacks(self):
+        """
+        A callback added to a L{Deferred} by a callback on that L{Deferred}
+        should not be executed until the running callback returns.
+        """
+        deferred = defer.Deferred()
+        called = []
+        def callback2(result):
+            called.append(2)
+        def callback1(result):
+            called.append(1)
+            deferred.addCallback(callback2)
+            self.assertEqual(called, [1])
+        deferred.addCallback(callback1)
+        deferred.callback(None)
+        self.assertEqual(called, [1, 2])
+
+
+    def test_reentrantRunCallbacksWithFailure(self):
+        """
+        After an exception is raised by a callback which was added to a
+        L{Deferred} by a callback on that L{Deferred}, the L{Deferred} should
+        call the first errback with a L{Failure} wrapping that exception.
+        """
+        exceptionMessage = "callback raised exception"
+        deferred = defer.Deferred()
+        def callback2(result):
+            raise Exception(exceptionMessage)
+        def callback1(result):
+            deferred.addCallback(callback2)
+        deferred.addCallback(callback1)
+        deferred.callback(None)
+        exception = self.assertImmediateFailure(deferred, Exception)
+        self.assertEqual(exception.args, (exceptionMessage,))
+
+
+    def test_synchronousImplicitChain(self):
+        """
+        If a first L{Deferred} with a result is returned from a callback on a
+        second L{Deferred}, the result of the second L{Deferred} becomes the
+        result of the first L{Deferred} and the result of the first L{Deferred}
+        becomes C{None}.
+        """
+        result = object()
+        first = defer.succeed(result)
+        second = defer.Deferred()
+        second.addCallback(lambda ign: first)
+        second.callback(None)
+
+        results = []
+        first.addCallback(results.append)
+        self.assertIdentical(results[0], None)
+        second.addCallback(results.append)
+        self.assertIdentical(results[1], result)
+
+
+    def test_asynchronousImplicitChain(self):
+        """
+        If a first L{Deferred} without a result is returned from a callback on
+        a second L{Deferred}, the result of the second L{Deferred} becomes the
+        result of the first L{Deferred} as soon as the first L{Deferred} has
+        one and the result of the first L{Deferred} becomes C{None}.
+        """
+        first = defer.Deferred()
+        second = defer.Deferred()
+        second.addCallback(lambda ign: first)
+        second.callback(None)
+
+        firstResult = []
+        first.addCallback(firstResult.append)
+        secondResult = []
+        second.addCallback(secondResult.append)
+
+        self.assertEqual(firstResult, [])
+        self.assertEqual(secondResult, [])
+
+        result = object()
+        first.callback(result)
+
+        self.assertEqual(firstResult, [None])
+        self.assertEqual(secondResult, [result])
+
+
+    def test_synchronousImplicitErrorChain(self):
+        """
+        If a first L{Deferred} with a L{Failure} result is returned from a
+        callback on a second L{Deferred}, the first L{Deferred}'s result is
+        converted to L{None} and no unhandled error is logged when it is
+        garbage collected.
+        """
+        first = defer.fail(RuntimeError("First Deferred's Failure"))
+        second = defer.Deferred()
+        second.addCallback(lambda ign, first=first: first)
+        second.callback(None)
+        firstResult = []
+        first.addCallback(firstResult.append)
+        self.assertIdentical(firstResult[0], None)
+        self.assertImmediateFailure(second, RuntimeError)
+
+
+    def test_asynchronousImplicitErrorChain(self):
+        """
+        Let C{a} and C{b} be two L{Deferred}s.
+
+        If C{a} has no result and is returned from a callback on C{b} then when
+        C{a} fails, C{b}'s result becomes the L{Failure} that was C{a}'s result,
+        the result of C{a} becomes C{None} so that no unhandled error is logged
+        when it is garbage collected.
+        """
+        first = defer.Deferred()
+        second = defer.Deferred()
+        second.addCallback(lambda ign: first)
+        second.callback(None)
+        secondError = []
+        second.addErrback(secondError.append)
+
+        firstResult = []
+        first.addCallback(firstResult.append)
+        secondResult = []
+        second.addCallback(secondResult.append)
+
+        self.assertEqual(firstResult, [])
+        self.assertEqual(secondResult, [])
+
+        first.errback(RuntimeError("First Deferred's Failure"))
+        self.assertTrue(secondError[0].check(RuntimeError))
+        self.assertEqual(firstResult, [None])
+        self.assertEqual(len(secondResult), 1)
+
+
+    def test_doubleAsynchronousImplicitChaining(self):
+        """
+        L{Deferred} chaining is transitive.
+
+        In other words, let A, B, and C be Deferreds.  If C is returned from a
+        callback on B and B is returned from a callback on A then when C fires,
+        A fires.
+        """
+        first = defer.Deferred()
+        second = defer.Deferred()
+        second.addCallback(lambda ign: first)
+        third = defer.Deferred()
+        third.addCallback(lambda ign: second)
+
+        thirdResult = []
+        third.addCallback(thirdResult.append)
+
+        result = object()
+        # After this, second is waiting for first to tell it to continue.
+        second.callback(None)
+        # And after this, third is waiting for second to tell it to continue.
+        third.callback(None)
+
+        # Still waiting
+        self.assertEqual(thirdResult, [])
+
+        # This will tell second to continue which will tell third to continue.
+        first.callback(result)
+
+        self.assertEqual(thirdResult, [result])
+
+
+    def test_nestedAsynchronousChainedDeferreds(self):
+        """
+        L{Deferred}s can have callbacks that themselves return L{Deferred}s.
+        When these "inner" L{Deferred}s fire (even asynchronously), the
+        callback chain continues.
+        """
+        results = []
+        failures = []
+
+        # A Deferred returned in the inner callback.
+        inner = defer.Deferred()
+
+        def cb(result):
+            results.append(('start-of-cb', result))
+            d = defer.succeed('inner')
+
+            def firstCallback(result):
+                results.append(('firstCallback', 'inner'))
+                # Return a Deferred that definitely has not fired yet, so we
+                # can fire the Deferreds out of order.
+                return inner
+
+            def secondCallback(result):
+                results.append(('secondCallback', result))
+                return result * 2
+
+            d.addCallback(firstCallback).addCallback(secondCallback)
+            d.addErrback(failures.append)
+            return d
+
+        # Create a synchronous Deferred that has a callback 'cb' that returns
+        # a Deferred 'd' that has fired but is now waiting on an unfired
+        # Deferred 'inner'.
+        outer = defer.succeed('outer')
+        outer.addCallback(cb)
+        outer.addCallback(results.append)
+        # At this point, the callback 'cb' has been entered, and the first
+        # callback of 'd' has been called.
+        self.assertEqual(
+            results, [('start-of-cb', 'outer'), ('firstCallback', 'inner')])
+
+        # Once the inner Deferred is fired, processing of the outer Deferred's
+        # callback chain continues.
+        inner.callback('orange')
+
+        # Make sure there are no errors.
+        inner.addErrback(failures.append)
+        outer.addErrback(failures.append)
+        self.assertEqual(
+            [], failures, "Got errbacks but wasn't expecting any.")
+
+        self.assertEqual(
+            results,
+            [('start-of-cb', 'outer'),
+             ('firstCallback', 'inner'),
+             ('secondCallback', 'orange'),
+             'orangeorange'])
+
+
+    def test_nestedAsynchronousChainedDeferredsWithExtraCallbacks(self):
+        """
+        L{Deferred}s can have callbacks that themselves return L{Deferred}s.
+        These L{Deferred}s can have other callbacks added before they are
+        returned, which subtly changes the callback chain. When these "inner"
+        L{Deferred}s fire (even asynchronously), the outer callback chain
+        continues.
+        """
+        results = []
+        failures = []
+
+        # A Deferred returned in the inner callback after a callback is
+        # added explicitly and directly to it.
+        inner = defer.Deferred()
+
+        def cb(result):
+            results.append(('start-of-cb', result))
+            d = defer.succeed('inner')
+
+            def firstCallback(ignored):
+                results.append(('firstCallback', ignored))
+                # Return a Deferred that definitely has not fired yet with a
+                # result-transforming callback so we can fire the Deferreds
+                # out of order and see how the callback affects the ultimate
+                # results.
+                return inner.addCallback(lambda x: [x])
+
+            def secondCallback(result):
+                results.append(('secondCallback', result))
+                return result * 2
+
+            d.addCallback(firstCallback)
+            d.addCallback(secondCallback)
+            d.addErrback(failures.append)
+            return d
+
+        # Create a synchronous Deferred that has a callback 'cb' that returns
+        # a Deferred 'd' that has fired but is now waiting on an unfired
+        # Deferred 'inner'.
+        outer = defer.succeed('outer')
+        outer.addCallback(cb)
+        outer.addCallback(results.append)
+        # At this point, the callback 'cb' has been entered, and the first
+        # callback of 'd' has been called.
+        self.assertEqual(
+            results, [('start-of-cb', 'outer'), ('firstCallback', 'inner')])
+
+        # Once the inner Deferred is fired, processing of the outer Deferred's
+        # callback chain continues.
+        inner.callback('withers')
+
+        # Make sure there are no errors.
+        outer.addErrback(failures.append)
+        inner.addErrback(failures.append)
+        self.assertEqual(
+            [], failures, "Got errbacks but wasn't expecting any.")
+
+        self.assertEqual(
+            results,
+            [('start-of-cb', 'outer'),
+             ('firstCallback', 'inner'),
+             ('secondCallback', ['withers']),
+             ['withers', 'withers']])
+
+
+    def test_chainDeferredRecordsExplicitChain(self):
+        """
+        When we chain a L{Deferred}, that chaining is recorded explicitly.
+        """
+        a = defer.Deferred()
+        b = defer.Deferred()
+        b.chainDeferred(a)
+        self.assertIdentical(a._chainedTo, b)
+
+
+    def test_explicitChainClearedWhenResolved(self):
+        """
+        Any recorded chaining is cleared once the chaining is resolved, since
+        it no longer exists.
+
+        In other words, if one L{Deferred} is recorded as depending on the
+        result of another, and I{that} L{Deferred} has fired, then the
+        dependency is resolved and we no longer benefit from recording it.
+        """
+        a = defer.Deferred()
+        b = defer.Deferred()
+        b.chainDeferred(a)
+        b.callback(None)
+        self.assertIdentical(a._chainedTo, None)
+
+
+    def test_chainDeferredRecordsImplicitChain(self):
+        """
+        We can chain L{Deferred}s implicitly by adding callbacks that return
+        L{Deferred}s. When this chaining happens, we record it explicitly as
+        soon as we can find out about it.
+        """
+        a = defer.Deferred()
+        b = defer.Deferred()
+        a.addCallback(lambda ignored: b)
+        a.callback(None)
+        self.assertIdentical(a._chainedTo, b)
+
+
+    def test_repr(self):
+        """
+        The C{repr()} of a L{Deferred} contains the class name and a
+        representation of the internal Python ID.
+        """
+        d = defer.Deferred()
+        address = hex(unsignedID(d))
+        self.assertEqual(
+            repr(d), '<Deferred at %s>' % (address,))
+
+
+    def test_reprWithResult(self):
+        """
+        If a L{Deferred} has been fired, then its C{repr()} contains its
+        result.
+        """
+        d = defer.Deferred()
+        d.callback('orange')
+        self.assertEqual(
+            repr(d), "<Deferred at %s current result: 'orange'>" % (
+                hex(unsignedID(d))))
+
+
+    def test_reprWithChaining(self):
+        """
+        If a L{Deferred} C{a} has been fired, but is waiting on another
+        L{Deferred} C{b} that appears in its callback chain, then C{repr(a)}
+        says that it is waiting on C{b}.
+        """
+        a = defer.Deferred()
+        b = defer.Deferred()
+        b.chainDeferred(a)
+        self.assertEqual(
+            repr(a), "<Deferred at %s waiting on Deferred at %s>" % (
+                hex(unsignedID(a)), hex(unsignedID(b))))
+
+
+    def test_boundedStackDepth(self):
+        """
+        The depth of the call stack does not grow as more L{Deferred} instances
+        are chained together.
+        """
+        def chainDeferreds(howMany):
+            stack = []
+            def recordStackDepth(ignored):
+                stack.append(len(traceback.extract_stack()))
+
+            top = defer.Deferred()
+            innerDeferreds = [defer.Deferred() for ignored in range(howMany)]
+            originalInners = innerDeferreds[:]
+            last = defer.Deferred()
+
+            inner = innerDeferreds.pop()
+            top.addCallback(lambda ign, inner=inner: inner)
+            top.addCallback(recordStackDepth)
+
+            while innerDeferreds:
+                newInner = innerDeferreds.pop()
+                inner.addCallback(lambda ign, inner=newInner: inner)
+                inner = newInner
+            inner.addCallback(lambda ign: last)
+
+            top.callback(None)
+            for inner in originalInners:
+                inner.callback(None)
+
+            # Sanity check - the record callback is not intended to have
+            # fired yet.
+            self.assertEqual(stack, [])
+
+            # Now fire the last thing and return the stack depth at which the
+            # callback was invoked.
+            last.callback(None)
+            return stack[0]
+
+        # Callbacks should be invoked at the same stack depth regardless of
+        # how many Deferreds are chained.
+        self.assertEqual(chainDeferreds(1), chainDeferreds(2))
+
+
+    def test_resultOfDeferredResultOfDeferredOfFiredDeferredCalled(self):
+        """
+        Given three Deferreds, one chained to the next chained to the next,
+        callbacks on the middle Deferred which are added after the chain is
+        created are called once the last Deferred fires.
+
+        This is more of a regression-style test.  It doesn't exercise any
+        particular code path through the current implementation of Deferred, but
+        it does exercise a broken codepath through one of the variations of the
+        implementation proposed as a resolution to ticket #411.
+        """
+        first = defer.Deferred()
+        second = defer.Deferred()
+        third = defer.Deferred()
+        first.addCallback(lambda ignored: second)
+        second.addCallback(lambda ignored: third)
+        second.callback(None)
+        first.callback(None)
+        third.callback(None)
+        L = []
+        second.addCallback(L.append)
+        self.assertEqual(L, [None])
+
+
+    def test_errbackWithNoArgsNoDebug(self):
+        """
+        C{Deferred.errback()} creates a failure from the current Python
+        exception.  When Deferred.debug is not set no globals or locals are
+        captured in that failure.
+        """
+        defer.setDebugging(False)
+        d = defer.Deferred()
+        l = []
+        exc = GenericError("Bang")
+        try:
+            raise exc
+        except:
+            d.errback()
+        d.addErrback(l.append)
+        fail = l[0]
+        self.assertEqual(fail.value, exc)
+        localz, globalz = fail.frames[0][-2:]
+        self.assertEqual([], localz)
+        self.assertEqual([], globalz)
+
+
+    def test_errbackWithNoArgs(self):
+        """
+        C{Deferred.errback()} creates a failure from the current Python
+        exception.  When Deferred.debug is set globals and locals are captured
+        in that failure.
+        """
+        defer.setDebugging(True)
+        d = defer.Deferred()
+        l = []
+        exc = GenericError("Bang")
+        try:
+            raise exc
+        except:
+            d.errback()
+        d.addErrback(l.append)
+        fail = l[0]
+        self.assertEqual(fail.value, exc)
+        localz, globalz = fail.frames[0][-2:]
+        self.assertNotEquals([], localz)
+        self.assertNotEquals([], globalz)
+
+
+    def test_errorInCallbackDoesNotCaptureVars(self):
+        """
+        An error raised by a callback creates a Failure.  The Failure captures
+        locals and globals if and only if C{Deferred.debug} is set.
+        """
+        d = defer.Deferred()
+        d.callback(None)
+        defer.setDebugging(False)
+        def raiseError(ignored):
+            raise GenericError("Bang")
+        d.addCallback(raiseError)
+        l = []
+        d.addErrback(l.append)
+        fail = l[0]
+        localz, globalz = fail.frames[0][-2:]
+        self.assertEqual([], localz)
+        self.assertEqual([], globalz)
+
+
+    def test_errorInCallbackCapturesVarsWhenDebugging(self):
+        """
+        An error raised by a callback creates a Failure.  The Failure captures
+        locals and globals if and only if C{Deferred.debug} is set.
+        """
+        d = defer.Deferred()
+        d.callback(None)
+        defer.setDebugging(True)
+        def raiseError(ignored):
+            raise GenericError("Bang")
+        d.addCallback(raiseError)
+        l = []
+        d.addErrback(l.append)
+        fail = l[0]
+        localz, globalz = fail.frames[0][-2:]
+        self.assertNotEquals([], localz)
+        self.assertNotEquals([], globalz)
+
+
+
+class FirstErrorTests(unittest.SynchronousTestCase):
+    """
+    Tests for L{FirstError}.
+    """
+    def test_repr(self):
+        """
+        The repr of a L{FirstError} instance includes the repr of the value of
+        the sub-failure and the index which corresponds to the L{FirstError}.
+        """
+        exc = ValueError("some text")
+        try:
+            raise exc
+        except:
+            f = failure.Failure()
+
+        error = defer.FirstError(f, 3)
+        self.assertEqual(
+            repr(error),
+            "FirstError[#3, %s]" % (repr(exc),))
+
+
+    def test_str(self):
+        """
+        The str of a L{FirstError} instance includes the str of the
+        sub-failure and the index which corresponds to the L{FirstError}.
+        """
+        exc = ValueError("some text")
+        try:
+            raise exc
+        except:
+            f = failure.Failure()
+
+        error = defer.FirstError(f, 5)
+        self.assertEqual(
+            str(error),
+            "FirstError[#5, %s]" % (str(f),))
+
+
+    def test_comparison(self):
+        """
+        L{FirstError} instances compare equal to each other if and only if
+        their failure and index compare equal.  L{FirstError} instances do not
+        compare equal to instances of other types.
+        """
+        try:
+            1 // 0
+        except:
+            firstFailure = failure.Failure()
+
+        one = defer.FirstError(firstFailure, 13)
+        anotherOne = defer.FirstError(firstFailure, 13)
+
+        try:
+            raise ValueError("bar")
+        except:
+            secondFailure = failure.Failure()
+
+        another = defer.FirstError(secondFailure, 9)
+
+        self.assertTrue(one == anotherOne)
+        self.assertFalse(one == another)
+        self.assertTrue(one != another)
+        self.assertFalse(one != anotherOne)
+
+        self.assertFalse(one == 10)
+
+
+
+class AlreadyCalledTestCase(unittest.SynchronousTestCase):
+    def setUp(self):
+        self._deferredWasDebugging = defer.getDebugging()
+        defer.setDebugging(True)
+
+    def tearDown(self):
+        defer.setDebugging(self._deferredWasDebugging)
+
+    def _callback(self, *args, **kw):
+        pass
+    def _errback(self, *args, **kw):
+        pass
+
+    def _call_1(self, d):
+        d.callback("hello")
+    def _call_2(self, d):
+        d.callback("twice")
+    def _err_1(self, d):
+        d.errback(failure.Failure(RuntimeError()))
+    def _err_2(self, d):
+        d.errback(failure.Failure(RuntimeError()))
+
+    def testAlreadyCalled_CC(self):
+        d = defer.Deferred()
+        d.addCallbacks(self._callback, self._errback)
+        self._call_1(d)
+        self.failUnlessRaises(defer.AlreadyCalledError, self._call_2, d)
+
+    def testAlreadyCalled_CE(self):
+        d = defer.Deferred()
+        d.addCallbacks(self._callback, self._errback)
+        self._call_1(d)
+        self.failUnlessRaises(defer.AlreadyCalledError, self._err_2, d)
+
+    def testAlreadyCalled_EE(self):
+        d = defer.Deferred()
+        d.addCallbacks(self._callback, self._errback)
+        self._err_1(d)
+        self.failUnlessRaises(defer.AlreadyCalledError, self._err_2, d)
+
+    def testAlreadyCalled_EC(self):
+        d = defer.Deferred()
+        d.addCallbacks(self._callback, self._errback)
+        self._err_1(d)
+        self.failUnlessRaises(defer.AlreadyCalledError, self._call_2, d)
+
+
+    def _count(self, linetype, func, lines, expected):
+        count = 0
+        for line in lines:
+            if (line.startswith(' %s:' % linetype) and
+                line.endswith(' %s' % func)):
+                count += 1
+        self.failUnless(count == expected)
+
+    def _check(self, e, caller, invoker1, invoker2):
+        # make sure the debugging information is vaguely correct
+        lines = e.args[0].split("\n")
+        # the creator should list the creator (testAlreadyCalledDebug) but not
+        # _call_1 or _call_2 or other invokers
+        self._count('C', caller, lines, 1)
+        self._count('C', '_call_1', lines, 0)
+        self._count('C', '_call_2', lines, 0)
+        self._count('C', '_err_1', lines, 0)
+        self._count('C', '_err_2', lines, 0)
+        # invoker should list the first invoker but not the second
+        self._count('I', invoker1, lines, 1)
+        self._count('I', invoker2, lines, 0)
+
+    def testAlreadyCalledDebug_CC(self):
+        d = defer.Deferred()
+        d.addCallbacks(self._callback, self._errback)
+        self._call_1(d)
+        try:
+            self._call_2(d)
+        except defer.AlreadyCalledError as e:
+            self._check(e, "testAlreadyCalledDebug_CC", "_call_1", "_call_2")
+        else:
+            self.fail("second callback failed to raise AlreadyCalledError")
+
+    def testAlreadyCalledDebug_CE(self):
+        d = defer.Deferred()
+        d.addCallbacks(self._callback, self._errback)
+        self._call_1(d)
+        try:
+            self._err_2(d)
+        except defer.AlreadyCalledError as e:
+            self._check(e, "testAlreadyCalledDebug_CE", "_call_1", "_err_2")
+        else:
+            self.fail("second errback failed to raise AlreadyCalledError")
+
+    def testAlreadyCalledDebug_EC(self):
+        d = defer.Deferred()
+        d.addCallbacks(self._callback, self._errback)
+        self._err_1(d)
+        try:
+            self._call_2(d)
+        except defer.AlreadyCalledError as e:
+            self._check(e, "testAlreadyCalledDebug_EC", "_err_1", "_call_2")
+        else:
+            self.fail("second callback failed to raise AlreadyCalledError")
+
+    def testAlreadyCalledDebug_EE(self):
+        d = defer.Deferred()
+        d.addCallbacks(self._callback, self._errback)
+        self._err_1(d)
+        try:
+            self._err_2(d)
+        except defer.AlreadyCalledError as e:
+            self._check(e, "testAlreadyCalledDebug_EE", "_err_1", "_err_2")
+        else:
+            self.fail("second errback failed to raise AlreadyCalledError")
+
+    def testNoDebugging(self):
+        defer.setDebugging(False)
+        d = defer.Deferred()
+        d.addCallbacks(self._callback, self._errback)
+        self._call_1(d)
+        try:
+            self._call_2(d)
+        except defer.AlreadyCalledError as e:
+            self.failIf(e.args)
+        else:
+            self.fail("second callback failed to raise AlreadyCalledError")
+
+
+    def testSwitchDebugging(self):
+        # Make sure Deferreds can deal with debug state flipping
+        # around randomly.  This is covering a particular fixed bug.
+        defer.setDebugging(False)
+        d = defer.Deferred()
+        d.addBoth(lambda ign: None)
+        defer.setDebugging(True)
+        d.callback(None)
+
+        defer.setDebugging(False)
+        d = defer.Deferred()
+        d.callback(None)
+        defer.setDebugging(True)
+        d.addBoth(lambda ign: None)
+
+
+
+class DeferredCancellerTest(unittest.SynchronousTestCase):
+    def setUp(self):
+        self.callbackResults = None
+        self.errbackResults = None
+        self.callback2Results = None
+        self.cancellerCallCount = 0
+
+
+    def tearDown(self):
+        # Sanity check that the canceller was called at most once.
+        self.assertTrue(self.cancellerCallCount in (0, 1))
+
+
+    def _callback(self, data):
+        self.callbackResults = data
+        return data
+
+
+    def _callback2(self, data):
+        self.callback2Results = data
+
+
+    def _errback(self, data):
+        self.errbackResults = data
+
+
+    def test_noCanceller(self):
+        """
+        A L{defer.Deferred} without a canceller must errback with a
+        L{defer.CancelledError} and not callback.
+        """
+        d = defer.Deferred()
+        d.addCallbacks(self._callback, self._errback)
+        d.cancel()
+        self.assertEqual(self.errbackResults.type, defer.CancelledError)
+        self.assertEqual(self.callbackResults, None)
+
+
+    def test_raisesAfterCancelAndCallback(self):
+        """
+        A L{defer.Deferred} without a canceller, when cancelled must allow
+        a single extra call to callback, and raise
+        L{defer.AlreadyCalledError} if callbacked or errbacked thereafter.
+        """
+        d = defer.Deferred()
+        d.addCallbacks(self._callback, self._errback)
+        d.cancel()
+
+        # A single extra callback should be swallowed.
+        d.callback(None)
+
+        # But a second call to callback or errback is not.
+        self.assertRaises(defer.AlreadyCalledError, d.callback, None)
+        self.assertRaises(defer.AlreadyCalledError, d.errback, Exception())
+
+
+    def test_raisesAfterCancelAndErrback(self):
+        """
+        A L{defer.Deferred} without a canceller, when cancelled must allow
+        a single extra call to errback, and raise
+        L{defer.AlreadyCalledError} if callbacked or errbacked thereafter.
+        """
+        d = defer.Deferred()
+        d.addCallbacks(self._callback, self._errback)
+        d.cancel()
+
+        # A single extra errback should be swallowed.
+        d.errback(Exception())
+
+        # But a second call to callback or errback is not.
+        self.assertRaises(defer.AlreadyCalledError, d.callback, None)
+        self.assertRaises(defer.AlreadyCalledError, d.errback, Exception())
+
+
+    def test_noCancellerMultipleCancelsAfterCancelAndCallback(self):
+        """
+        A L{Deferred} without a canceller, when cancelled and then
+        callbacked, ignores multiple cancels thereafter.
+        """
+        d = defer.Deferred()
+        d.addCallbacks(self._callback, self._errback)
+        d.cancel()
+        currentFailure = self.errbackResults
+        # One callback will be ignored
+        d.callback(None)
+        # Cancel should have no effect.
+        d.cancel()
+        self.assertIdentical(currentFailure, self.errbackResults)
+
+
+    def test_noCancellerMultipleCancelsAfterCancelAndErrback(self):
+        """
+        A L{defer.Deferred} without a canceller, when cancelled and then
+        errbacked, ignores multiple cancels thereafter.
+        """
+        d = defer.Deferred()
+        d.addCallbacks(self._callback, self._errback)
+        d.cancel()
+        self.assertEqual(self.errbackResults.type, defer.CancelledError)
+        currentFailure = self.errbackResults
+        # One errback will be ignored
+        d.errback(GenericError())
+        # I.e., we should still have a CancelledError.
+        self.assertEqual(self.errbackResults.type, defer.CancelledError)
+        d.cancel()
+        self.assertIdentical(currentFailure, self.errbackResults)
+
+
+    def test_noCancellerMultipleCancel(self):
+        """
+        Calling cancel multiple times on a deferred with no canceller
+        results in a L{defer.CancelledError}. Subsequent calls to cancel
+        do not cause an error.
+        """
+        d = defer.Deferred()
+        d.addCallbacks(self._callback, self._errback)
+        d.cancel()
+        self.assertEqual(self.errbackResults.type, defer.CancelledError)
+        currentFailure = self.errbackResults
+        d.cancel()
+        self.assertIdentical(currentFailure, self.errbackResults)
+
+
+    def test_cancellerMultipleCancel(self):
+        """
+        Verify that calling cancel multiple times on a deferred with a
+        canceller that does not errback results in a
+        L{defer.CancelledError} and that subsequent calls to cancel do not
+        cause an error and that after all that, the canceller was only
+        called once.
+        """
+        def cancel(d):
+            self.cancellerCallCount += 1
+
+        d = defer.Deferred(canceller=cancel)
+        d.addCallbacks(self._callback, self._errback)
+        d.cancel()
+        self.assertEqual(self.errbackResults.type, defer.CancelledError)
+        currentFailure = self.errbackResults
+        d.cancel()
+        self.assertIdentical(currentFailure, self.errbackResults)
+        self.assertEqual(self.cancellerCallCount, 1)
+
+
+    def test_simpleCanceller(self):
+        """
+        Verify that a L{defer.Deferred} calls its specified canceller when
+        it is cancelled, and that further call/errbacks raise
+        L{defer.AlreadyCalledError}.
+        """
+        def cancel(d):
+            self.cancellerCallCount += 1
+
+        d = defer.Deferred(canceller=cancel)
+        d.addCallbacks(self._callback, self._errback)
+        d.cancel()
+        self.assertEqual(self.cancellerCallCount, 1)
+        self.assertEqual(self.errbackResults.type, defer.CancelledError)
+
+        # Test that further call/errbacks are *not* swallowed
+        self.assertRaises(defer.AlreadyCalledError, d.callback, None)
+        self.assertRaises(defer.AlreadyCalledError, d.errback, Exception())
+
+
+    def test_cancellerArg(self):
+        """
+        Verify that a canceller is given the correct deferred argument.
+        """
+        def cancel(d1):
+            self.assertIdentical(d1, d)
+        d = defer.Deferred(canceller=cancel)
+        d.addCallbacks(self._callback, self._errback)
+        d.cancel()
+
+
+    def test_cancelAfterCallback(self):
+        """
+        Test that cancelling a deferred after it has been callbacked does
+        not cause an error.
+        """
+        def cancel(d):
+            self.cancellerCallCount += 1
+            d.errback(GenericError())
+        d = defer.Deferred(canceller=cancel)
+        d.addCallbacks(self._callback, self._errback)
+        d.callback('biff!')
+        d.cancel()
+        self.assertEqual(self.cancellerCallCount, 0)
+        self.assertEqual(self.errbackResults, None)
+        self.assertEqual(self.callbackResults, 'biff!')
+
+
+    def test_cancelAfterErrback(self):
+        """
+        Test that cancelling a L{Deferred} after it has been errbacked does
+        not result in a L{defer.CancelledError}.
+        """
+        def cancel(d):
+            self.cancellerCallCount += 1
+            d.errback(GenericError())
+        d = defer.Deferred(canceller=cancel)
+        d.addCallbacks(self._callback, self._errback)
+        d.errback(GenericError())
+        d.cancel()
+        self.assertEqual(self.cancellerCallCount, 0)
+        self.assertEqual(self.errbackResults.type, GenericError)
+        self.assertEqual(self.callbackResults, None)
+
+
+    def test_cancellerThatErrbacks(self):
+        """
+        Test a canceller which errbacks its deferred.
+        """
+        def cancel(d):
+            self.cancellerCallCount += 1
+            d.errback(GenericError())
+        d = defer.Deferred(canceller=cancel)
+        d.addCallbacks(self._callback, self._errback)
+        d.cancel()
+        self.assertEqual(self.cancellerCallCount, 1)
+        self.assertEqual(self.errbackResults.type, GenericError)
+
+
+    def test_cancellerThatCallbacks(self):
+        """
+        Test a canceller which calls its deferred.
+        """
+        def cancel(d):
+            self.cancellerCallCount += 1
+            d.callback('hello!')
+        d = defer.Deferred(canceller=cancel)
+        d.addCallbacks(self._callback, self._errback)
+        d.cancel()
+        self.assertEqual(self.cancellerCallCount, 1)
+        self.assertEqual(self.callbackResults, 'hello!')
+        self.assertEqual(self.errbackResults, None)
+
+
+    def test_cancelNestedDeferred(self):
+        """
+        Verify that a Deferred, a, which is waiting on another Deferred, b,
+        returned from one of its callbacks, will propagate
+        L{defer.CancelledError} when a is cancelled.
+        """
+        def innerCancel(d):
+            self.cancellerCallCount += 1
+        def cancel(d):
+            self.assert_(False)
+
+        b = defer.Deferred(canceller=innerCancel)
+        a = defer.Deferred(canceller=cancel)
+        a.callback(None)
+        a.addCallback(lambda data: b)
+        a.cancel()
+        a.addCallbacks(self._callback, self._errback)
+        # The cancel count should be one (the cancellation done by B)
+        self.assertEqual(self.cancellerCallCount, 1)
+        # B's canceller didn't errback, so defer.py will have called errback
+        # with a CancelledError.
+        self.assertEqual(self.errbackResults.type, defer.CancelledError)
+
+
+
+class LogTestCase(unittest.SynchronousTestCase):
+    """
+    Test logging of unhandled errors.
+    """
+
+    def setUp(self):
+        """
+        Add a custom observer to observer logging.
+        """
+        self.c = []
+        log.addObserver(self.c.append)
+
+    def tearDown(self):
+        """
+        Remove the observer.
+        """
+        log.removeObserver(self.c.append)
+
+
+    def _loggedErrors(self):
+        return [e for e in self.c if e["isError"]]
+
+
+    def _check(self):
+        """
+        Check the output of the log observer to see if the error is present.
+        """
+        c2 = self._loggedErrors()
+        self.assertEqual(len(c2), 2)
+        c2[1]["failure"].trap(ZeroDivisionError)
+        self.flushLoggedErrors(ZeroDivisionError)
+
+    def test_errorLog(self):
+        """
+        Verify that when a L{Deferred} with no references to it is fired,
+        and its final result (the one not handled by any callback) is an
+        exception, that exception will be logged immediately.
+        """
+        defer.Deferred().addCallback(lambda x: 1 // 0).callback(1)
+        gc.collect()
+        self._check()
+
+    def test_errorLogWithInnerFrameRef(self):
+        """
+        Same as L{test_errorLog}, but with an inner frame.
+        """
+        def _subErrorLogWithInnerFrameRef():
+            d = defer.Deferred()
+            d.addCallback(lambda x: 1 // 0)
+            d.callback(1)
+
+        _subErrorLogWithInnerFrameRef()
+        gc.collect()
+        self._check()
+
+    def test_errorLogWithInnerFrameCycle(self):
+        """
+        Same as L{test_errorLogWithInnerFrameRef}, plus create a cycle.
+        """
+        def _subErrorLogWithInnerFrameCycle():
+            d = defer.Deferred()
+            d.addCallback(lambda x, d=d: 1 // 0)
+            d._d = d
+            d.callback(1)
+
+        _subErrorLogWithInnerFrameCycle()
+        gc.collect()
+        self._check()
+
+
+    def test_chainedErrorCleanup(self):
+        """
+        If one Deferred with an error result is returned from a callback on
+        another Deferred, when the first Deferred is garbage collected it does
+        not log its error.
+        """
+        d = defer.Deferred()
+        d.addCallback(lambda ign: defer.fail(RuntimeError("zoop")))
+        d.callback(None)
+
+        # Sanity check - this isn't too interesting, but we do want the original
+        # Deferred to have gotten the failure.
+        results = []
+        errors = []
+        d.addCallbacks(results.append, errors.append)
+        self.assertEqual(results, [])
+        self.assertEqual(len(errors), 1)
+        errors[0].trap(Exception)
+
+        # Get rid of any references we might have to the inner Deferred (none of
+        # these should really refer to it, but we're just being safe).
+        del results, errors, d
+        # Force a collection cycle so that there's a chance for an error to be
+        # logged, if it's going to be logged.
+        gc.collect()
+        # And make sure it is not.
+        self.assertEqual(self._loggedErrors(), [])
+
+
+    def test_errorClearedByChaining(self):
+        """
+        If a Deferred with a failure result has an errback which chains it to
+        another Deferred, the initial failure is cleared by the errback so it is
+        not logged.
+        """
+        # Start off with a Deferred with a failure for a result
+        bad = defer.fail(Exception("oh no"))
+        good = defer.Deferred()
+        # Give it a callback that chains it to another Deferred
+        bad.addErrback(lambda ignored: good)
+        # That's all, clean it up.  No Deferred here still has a failure result,
+        # so nothing should be logged.
+        good = bad = None
+        gc.collect()
+        self.assertEqual(self._loggedErrors(), [])
+
+
+
+class DeferredTestCaseII(unittest.SynchronousTestCase):
+    def setUp(self):
+        self.callbackRan = 0
+
+    def testDeferredListEmpty(self):
+        """Testing empty DeferredList."""
+        dl = defer.DeferredList([])
+        dl.addCallback(self.cb_empty)
+
+    def cb_empty(self, res):
+        self.callbackRan = 1
+        self.assertEqual([], res)
+
+    def tearDown(self):
+        self.failUnless(self.callbackRan, "Callback was never run.")
+
+
+
+class OtherPrimitives(unittest.SynchronousTestCase, ImmediateFailureMixin):
+    def _incr(self, result):
+        self.counter += 1
+
+    def setUp(self):
+        self.counter = 0
+
+    def testLock(self):
+        lock = defer.DeferredLock()
+        lock.acquire().addCallback(self._incr)
+        self.failUnless(lock.locked)
+        self.assertEqual(self.counter, 1)
+
+        lock.acquire().addCallback(self._incr)
+        self.failUnless(lock.locked)
+        self.assertEqual(self.counter, 1)
+
+        lock.release()
+        self.failUnless(lock.locked)
+        self.assertEqual(self.counter, 2)
+
+        lock.release()
+        self.failIf(lock.locked)
+        self.assertEqual(self.counter, 2)
+
+        self.assertRaises(TypeError, lock.run)
+
+        firstUnique = object()
+        secondUnique = object()
+
+        controlDeferred = defer.Deferred()
+        def helper(self, b):
+            self.b = b
+            return controlDeferred
+
+        resultDeferred = lock.run(helper, self=self, b=firstUnique)
+        self.failUnless(lock.locked)
+        self.assertEqual(self.b, firstUnique)
+
+        resultDeferred.addCallback(lambda x: setattr(self, 'result', x))
+
+        lock.acquire().addCallback(self._incr)
+        self.failUnless(lock.locked)
+        self.assertEqual(self.counter, 2)
+
+        controlDeferred.callback(secondUnique)
+        self.assertEqual(self.result, secondUnique)
+        self.failUnless(lock.locked)
+        self.assertEqual(self.counter, 3)
+
+        d = lock.acquire().addBoth(lambda x: setattr(self, 'result', x))
+        d.cancel()
+        self.assertEqual(self.result.type, defer.CancelledError)
+
+        lock.release()
+        self.failIf(lock.locked)
+
+
+    def test_cancelLockAfterAcquired(self):
+        """
+        When canceling a L{Deferred} from a L{DeferredLock} that already
+        has the lock, the cancel should have no effect.
+        """
+        def _failOnErrback(_):
+            self.fail("Unexpected errback call!")
+        lock = defer.DeferredLock()
+        d = lock.acquire()
+        d.addErrback(_failOnErrback)
+        d.cancel()
+
+
+    def test_cancelLockBeforeAcquired(self):
+        """
+        When canceling a L{Deferred} from a L{DeferredLock} that does not
+        yet have the lock (i.e., the L{Deferred} has not fired), the cancel
+        should cause a L{defer.CancelledError} failure.
+        """
+        lock = defer.DeferredLock()
+        lock.acquire()
+        d = lock.acquire()
+        d.cancel()
+        self.assertImmediateFailure(d, defer.CancelledError)
+
+
+    def testSemaphore(self):
+        N = 13
+        sem = defer.DeferredSemaphore(N)
+
+        controlDeferred = defer.Deferred()
+        def helper(self, arg):
+            self.arg = arg
+            return controlDeferred
+
+        results = []
+        uniqueObject = object()
+        resultDeferred = sem.run(helper, self=self, arg=uniqueObject)
+        resultDeferred.addCallback(results.append)
+        resultDeferred.addCallback(self._incr)
+        self.assertEqual(results, [])
+        self.assertEqual(self.arg, uniqueObject)
+        controlDeferred.callback(None)
+        self.assertEqual(results.pop(), None)
+        self.assertEqual(self.counter, 1)
+
+        self.counter = 0
+        for i in range(1, 1 + N):
+            sem.acquire().addCallback(self._incr)
+            self.assertEqual(self.counter, i)
+
+
+        success = []
+        def fail(r):
+            success.append(False)
+        def succeed(r):
+            success.append(True)
+        d = sem.acquire().addCallbacks(fail, succeed)
+        d.cancel()
+        self.assertEqual(success, [True])
+
+        sem.acquire().addCallback(self._incr)
+        self.assertEqual(self.counter, N)
+
+        sem.release()
+        self.assertEqual(self.counter, N + 1)
+
+        for i in range(1, 1 + N):
+            sem.release()
+            self.assertEqual(self.counter, N + 1)
+
+
+    def test_semaphoreInvalidTokens(self):
+        """
+        If the token count passed to L{DeferredSemaphore} is less than one
+        then L{ValueError} is raised.
+        """
+        self.assertRaises(ValueError, defer.DeferredSemaphore, 0)
+        self.assertRaises(ValueError, defer.DeferredSemaphore, -1)
+
+
+    def test_cancelSemaphoreAfterAcquired(self):
+        """
+        When canceling a L{Deferred} from a L{DeferredSemaphore} that
+        already has the semaphore, the cancel should have no effect.
+        """
+        def _failOnErrback(_):
+            self.fail("Unexpected errback call!")
+
+        sem = defer.DeferredSemaphore(1)
+        d = sem.acquire()
+        d.addErrback(_failOnErrback)
+        d.cancel()
+
+
+    def test_cancelSemaphoreBeforeAcquired(self):
+        """
+        When canceling a L{Deferred} from a L{DeferredSemaphore} that does
+        not yet have the semaphore (i.e., the L{Deferred} has not fired),
+        the cancel should cause a L{defer.CancelledError} failure.
+        """
+        sem = defer.DeferredSemaphore(1)
+        sem.acquire()
+        d = sem.acquire()
+        d.cancel()
+        self.assertImmediateFailure(d, defer.CancelledError)
+
+
+    def testQueue(self):
+        N, M = 2, 2
+        queue = defer.DeferredQueue(N, M)
+
+        gotten = []
+
+        for i in range(M):
+            queue.get().addCallback(gotten.append)
+        self.assertRaises(defer.QueueUnderflow, queue.get)
+
+        for i in range(M):
+            queue.put(i)
+            self.assertEqual(gotten, list(range(i + 1)))
+        for i in range(N):
+            queue.put(N + i)
+            self.assertEqual(gotten, list(range(M)))
+        self.assertRaises(defer.QueueOverflow, queue.put, None)
+
+        gotten = []
+        for i in range(N):
+            queue.get().addCallback(gotten.append)
+            self.assertEqual(gotten, list(range(N, N + i + 1)))
+
+        queue = defer.DeferredQueue()
+        gotten = []
+        for i in range(N):
+            queue.get().addCallback(gotten.append)
+        for i in range(N):
+            queue.put(i)
+        self.assertEqual(gotten, list(range(N)))
+
+        queue = defer.DeferredQueue(size=0)
+        self.assertRaises(defer.QueueOverflow, queue.put, None)
+
+        queue = defer.DeferredQueue(backlog=0)
+        self.assertRaises(defer.QueueUnderflow, queue.get)
+
+
+    def test_cancelQueueAfterSynchronousGet(self):
+        """
+        When canceling a L{Deferred} from a L{DeferredQueue} that already has
+        a result, the cancel should have no effect.
+        """
+        def _failOnErrback(_):
+            self.fail("Unexpected errback call!")
+
+        queue = defer.DeferredQueue()
+        d = queue.get()
+        d.addErrback(_failOnErrback)
+        queue.put(None)
+        d.cancel()
+
+
+    def test_cancelQueueAfterGet(self):
+        """
+        When canceling a L{Deferred} from a L{DeferredQueue} that does not
+        have a result (i.e., the L{Deferred} has not fired), the cancel
+        causes a L{defer.CancelledError} failure. If the queue has a result
+        later on, it doesn't try to fire the deferred.
+        """
+        queue = defer.DeferredQueue()
+        d = queue.get()
+        d.cancel()
+        self.assertImmediateFailure(d, defer.CancelledError)
+        def cb(ignore):
+            # If the deferred is still linked with the deferred queue, it will
+            # fail with an AlreadyCalledError
+            queue.put(None)
+            return queue.get().addCallback(self.assertIdentical, None)
+        d.addCallback(cb)
+        done = []
+        d.addCallback(done.append)
+        self.assertEqual(len(done), 1)
+
+
+# Enable on Python 3 as part of #5960:
+if not _PY3:
+    from twisted.internet import reactor
+    from twisted.internet.task import Clock
+
+    class DeferredFilesystemLockTestCase(unittest.TestCase):
+        """
+        Test the behavior of L{DeferredFilesystemLock}
+        """
+
+        def setUp(self):
+            self.clock = Clock()
+            self.lock = defer.DeferredFilesystemLock(self.mktemp(),
+                                                     scheduler=self.clock)
+
+
+        def test_waitUntilLockedWithNoLock(self):
+            """
+            Test that the lock can be acquired when no lock is held
+            """
+            d = self.lock.deferUntilLocked(timeout=1)
+
+            return d
+
+
+        def test_waitUntilLockedWithTimeoutLocked(self):
+            """
+            Test that the lock can not be acquired when the lock is held
+            for longer than the timeout.
+            """
+            self.failUnless(self.lock.lock())
+
+            d = self.lock.deferUntilLocked(timeout=5.5)
+            self.assertFailure(d, defer.TimeoutError)
+
+            self.clock.pump([1] * 10)
+
+            return d
+
+
+        def test_waitUntilLockedWithTimeoutUnlocked(self):
+            """
+            Test that a lock can be acquired while a lock is held
+            but the lock is unlocked before our timeout.
+            """
+            def onTimeout(f):
+                f.trap(defer.TimeoutError)
+                self.fail("Should not have timed out")
+
+            self.failUnless(self.lock.lock())
+
+            self.clock.callLater(1, self.lock.unlock)
+            d = self.lock.deferUntilLocked(timeout=10)
+            d.addErrback(onTimeout)
+
+            self.clock.pump([1] * 10)
+
+            return d
+
+
+        def test_defaultScheduler(self):
+            """
+            Test that the default scheduler is set up properly.
+            """
+            lock = defer.DeferredFilesystemLock(self.mktemp())
+
+            self.assertEqual(lock._scheduler, reactor)
+
+
+        def test_concurrentUsage(self):
+            """
+            Test that an appropriate exception is raised when attempting
+            to use deferUntilLocked concurrently.
+            """
+            self.lock.lock()
+            self.clock.callLater(1, self.lock.unlock)
+
+            d = self.lock.deferUntilLocked()
+            d2 = self.lock.deferUntilLocked()
+
+            self.assertFailure(d2, defer.AlreadyTryingToLockError)
+
+            self.clock.advance(1)
+
+            return d
+
+
+        def test_multipleUsages(self):
+            """
+            Test that a DeferredFilesystemLock can be used multiple times
+            """
+            def lockAquired(ign):
+                self.lock.unlock()
+                d = self.lock.deferUntilLocked()
+                return d
+
+            self.lock.lock()
+            self.clock.callLater(1, self.lock.unlock)
+
+            d = self.lock.deferUntilLocked()
+            d.addCallback(lockAquired)
+
+            self.clock.advance(1)
+
+            return d
diff --git a/ThirdParty/Twisted/twisted/test/test_defgen.py b/ThirdParty/Twisted/twisted/test/test_defgen.py
new file mode 100644
index 0000000..0947312
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_defgen.py
@@ -0,0 +1,301 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet.defer.deferredGenerator} and related APIs.
+"""
+
+from __future__ import division, absolute_import
+
+import sys
+
+from twisted.internet import reactor
+
+from twisted.trial import unittest
+
+from twisted.internet.defer import waitForDeferred, deferredGenerator, Deferred
+from twisted.internet.defer import inlineCallbacks, returnValue
+from twisted.internet import defer
+
+
+def getThing():
+    d = Deferred()
+    reactor.callLater(0, d.callback, "hi")
+    return d
+
+def getOwie():
+    d = Deferred()
+    def CRAP():
+        d.errback(ZeroDivisionError('OMG'))
+    reactor.callLater(0, CRAP)
+    return d
+
+# NOTE: most of the tests in DeferredGeneratorTests are duplicated
+# with slightly different syntax for the InlineCallbacksTests below.
+
+class TerminalException(Exception):
+    pass
+
+class BaseDefgenTests:
+    """
+    This class sets up a bunch of test cases which will test both
+    deferredGenerator and inlineCallbacks based generators. The subclasses
+    DeferredGeneratorTests and InlineCallbacksTests each provide the actual
+    generator implementations tested.
+    """
+
+    def testBasics(self):
+        """
+        Test that a normal deferredGenerator works.  Tests yielding a
+        deferred which callbacks, as well as a deferred errbacks. Also
+        ensures returning a final value works.
+        """
+
+        return self._genBasics().addCallback(self.assertEqual, 'WOOSH')
+
+    def testBuggy(self):
+        """
+        Ensure that a buggy generator properly signals a Failure
+        condition on result deferred.
+        """
+        return self.assertFailure(self._genBuggy(), ZeroDivisionError)
+
+    def testNothing(self):
+        """Test that a generator which never yields results in None."""
+
+        return self._genNothing().addCallback(self.assertEqual, None)
+
+    def testHandledTerminalFailure(self):
+        """
+        Create a Deferred Generator which yields a Deferred which fails and
+        handles the exception which results.  Assert that the Deferred
+        Generator does not errback its Deferred.
+        """
+        return self._genHandledTerminalFailure().addCallback(self.assertEqual, None)
+
+    def testHandledTerminalAsyncFailure(self):
+        """
+        Just like testHandledTerminalFailure, only with a Deferred which fires
+        asynchronously with an error.
+        """
+        d = defer.Deferred()
+        deferredGeneratorResultDeferred = self._genHandledTerminalAsyncFailure(d)
+        d.errback(TerminalException("Handled Terminal Failure"))
+        return deferredGeneratorResultDeferred.addCallback(
+            self.assertEqual, None)
+
+    def testStackUsage(self):
+        """
+        Make sure we don't blow the stack when yielding immediately
+        available deferreds.
+        """
+        return self._genStackUsage().addCallback(self.assertEqual, 0)
+
+    def testStackUsage2(self):
+        """
+        Make sure we don't blow the stack when yielding immediately
+        available values.
+        """
+        return self._genStackUsage2().addCallback(self.assertEqual, 0)
+
+
+
+
+class DeferredGeneratorTests(BaseDefgenTests, unittest.TestCase):
+
+    # First provide all the generator impls necessary for BaseDefgenTests
+    def _genBasics(self):
+
+        x = waitForDeferred(getThing())
+        yield x
+        x = x.getResult()
+
+        self.assertEqual(x, "hi")
+
+        ow = waitForDeferred(getOwie())
+        yield ow
+        try:
+            ow.getResult()
+        except ZeroDivisionError as e:
+            self.assertEqual(str(e), 'OMG')
+        yield "WOOSH"
+        return
+    _genBasics = deferredGenerator(_genBasics)
+
+    def _genBuggy(self):
+        yield waitForDeferred(getThing())
+        1//0
+    _genBuggy = deferredGenerator(_genBuggy)
+
+
+    def _genNothing(self):
+        if 0: yield 1
+    _genNothing = deferredGenerator(_genNothing)
+
+    def _genHandledTerminalFailure(self):
+        x = waitForDeferred(defer.fail(TerminalException("Handled Terminal Failure")))
+        yield x
+        try:
+            x.getResult()
+        except TerminalException:
+            pass
+    _genHandledTerminalFailure = deferredGenerator(_genHandledTerminalFailure)
+
+
+    def _genHandledTerminalAsyncFailure(self, d):
+        x = waitForDeferred(d)
+        yield x
+        try:
+            x.getResult()
+        except TerminalException:
+            pass
+    _genHandledTerminalAsyncFailure = deferredGenerator(_genHandledTerminalAsyncFailure)
+
+
+    def _genStackUsage(self):
+        for x in range(5000):
+            # Test with yielding a deferred
+            x = waitForDeferred(defer.succeed(1))
+            yield x
+            x = x.getResult()
+        yield 0
+    _genStackUsage = deferredGenerator(_genStackUsage)
+
+    def _genStackUsage2(self):
+        for x in range(5000):
+            # Test with yielding a random value
+            yield 1
+        yield 0
+    _genStackUsage2 = deferredGenerator(_genStackUsage2)
+
+    # Tests unique to deferredGenerator
+
+    def testDeferredYielding(self):
+        """
+        Ensure that yielding a Deferred directly is trapped as an
+        error.
+        """
+        # See the comment _deferGenerator about d.callback(Deferred).
+        def _genDeferred():
+            yield getThing()
+        _genDeferred = deferredGenerator(_genDeferred)
+
+        return self.assertFailure(_genDeferred(), TypeError)
+
+
+
+class InlineCallbacksTests(BaseDefgenTests, unittest.TestCase):
+    # First provide all the generator impls necessary for BaseDefgenTests
+
+    def _genBasics(self):
+
+        x = yield getThing()
+
+        self.assertEqual(x, "hi")
+
+        try:
+            ow = yield getOwie()
+        except ZeroDivisionError as e:
+            self.assertEqual(str(e), 'OMG')
+        returnValue("WOOSH")
+    _genBasics = inlineCallbacks(_genBasics)
+
+    def _genBuggy(self):
+        yield getThing()
+        1/0
+    _genBuggy = inlineCallbacks(_genBuggy)
+
+
+    def _genNothing(self):
+        if 0: yield 1
+    _genNothing = inlineCallbacks(_genNothing)
+
+
+    def _genHandledTerminalFailure(self):
+        try:
+            x = yield defer.fail(TerminalException("Handled Terminal Failure"))
+        except TerminalException:
+            pass
+    _genHandledTerminalFailure = inlineCallbacks(_genHandledTerminalFailure)
+
+
+    def _genHandledTerminalAsyncFailure(self, d):
+        try:
+            x = yield d
+        except TerminalException:
+            pass
+    _genHandledTerminalAsyncFailure = inlineCallbacks(
+        _genHandledTerminalAsyncFailure)
+
+
+    def _genStackUsage(self):
+        for x in range(5000):
+            # Test with yielding a deferred
+            x = yield defer.succeed(1)
+        returnValue(0)
+    _genStackUsage = inlineCallbacks(_genStackUsage)
+
+    def _genStackUsage2(self):
+        for x in range(5000):
+            # Test with yielding a random value
+            yield 1
+        returnValue(0)
+    _genStackUsage2 = inlineCallbacks(_genStackUsage2)
+
+    # Tests unique to inlineCallbacks
+
+    def testYieldNonDeferrred(self):
+        """
+        Ensure that yielding a non-deferred passes it back as the
+        result of the yield expression.
+        """
+        def _test():
+            x = yield 5
+            returnValue(5)
+        _test = inlineCallbacks(_test)
+
+        return _test().addCallback(self.assertEqual, 5)
+
+    def testReturnNoValue(self):
+        """Ensure a standard python return results in a None result."""
+        def _noReturn():
+            yield 5
+            return
+        _noReturn = inlineCallbacks(_noReturn)
+
+        return _noReturn().addCallback(self.assertEqual, None)
+
+    def testReturnValue(self):
+        """Ensure that returnValue works."""
+        def _return():
+            yield 5
+            returnValue(6)
+        _return = inlineCallbacks(_return)
+
+        return _return().addCallback(self.assertEqual, 6)
+
+
+    def test_nonGeneratorReturn(self):
+        """
+        Ensure that C{TypeError} with a message about L{inlineCallbacks} is
+        raised when a non-generator returns something other than a generator.
+        """
+        def _noYield():
+            return 5
+        _noYield = inlineCallbacks(_noYield)
+
+        self.assertIn("inlineCallbacks",
+            str(self.assertRaises(TypeError, _noYield)))
+
+
+    def test_nonGeneratorReturnValue(self):
+        """
+        Ensure that C{TypeError} with a message about L{inlineCallbacks} is
+        raised when a non-generator calls L{returnValue}.
+        """
+        def _noYield():
+            returnValue(5)
+        _noYield = inlineCallbacks(_noYield)
+
+        self.assertIn("inlineCallbacks",
+            str(self.assertRaises(TypeError, _noYield)))
diff --git a/ThirdParty/Twisted/twisted/test/test_dict.py b/ThirdParty/Twisted/twisted/test/test_dict.py
new file mode 100644
index 0000000..3ebb67e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_dict.py
@@ -0,0 +1,22 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+from twisted.trial import unittest
+from twisted.protocols import dict
+
+paramString = "\"This is a dqstring \\w\\i\\t\\h boring stuff like: \\\"\" and t\\hes\\\"e are a\\to\\ms"
+goodparams = ["This is a dqstring with boring stuff like: \"", "and", "thes\"e", "are", "atoms"]
+
+class ParamTest(unittest.TestCase):
+    def testParseParam(self):
+        """Testing command response handling"""
+        params = []
+        rest = paramString
+        while 1:
+            (param, rest) = dict.parseParam(rest)
+            if param == None:
+                break
+            params.append(param)
+        self.assertEqual(params, goodparams)#, "DictClient.parseParam returns unexpected results")
diff --git a/ThirdParty/Twisted/twisted/test/test_digestauth.py b/ThirdParty/Twisted/twisted/test/test_digestauth.py
new file mode 100644
index 0000000..41368a0
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_digestauth.py
@@ -0,0 +1,671 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.cred._digest} and the associated bits in
+L{twisted.cred.credentials}.
+"""
+
+from zope.interface.verify import verifyObject
+from twisted.trial.unittest import TestCase
+from twisted.python.hashlib import md5, sha1
+from twisted.internet.address import IPv4Address
+from twisted.cred.error import LoginFailed
+from twisted.cred.credentials import calcHA1, calcHA2, IUsernameDigestHash
+from twisted.cred.credentials import calcResponse, DigestCredentialFactory
+
+def b64encode(s):
+    return s.encode('base64').strip()
+
+
+class FakeDigestCredentialFactory(DigestCredentialFactory):
+    """
+    A Fake Digest Credential Factory that generates a predictable
+    nonce and opaque
+    """
+    def __init__(self, *args, **kwargs):
+        super(FakeDigestCredentialFactory, self).__init__(*args, **kwargs)
+        self.privateKey = "0"
+
+
+    def _generateNonce(self):
+        """
+        Generate a static nonce
+        """
+        return '178288758716122392881254770685'
+
+
+    def _getTime(self):
+        """
+        Return a stable time
+        """
+        return 0
+
+
+
+class DigestAuthTests(TestCase):
+    """
+    L{TestCase} mixin class which defines a number of tests for
+    L{DigestCredentialFactory}.  Because this mixin defines C{setUp}, it
+    must be inherited before L{TestCase}.
+    """
+    def setUp(self):
+        """
+        Create a DigestCredentialFactory for testing
+        """
+        self.username = "foobar"
+        self.password = "bazquux"
+        self.realm = "test realm"
+        self.algorithm = "md5"
+        self.cnonce = "29fc54aa1641c6fa0e151419361c8f23"
+        self.qop = "auth"
+        self.uri = "/write/"
+        self.clientAddress = IPv4Address('TCP', '10.2.3.4', 43125)
+        self.method = 'GET'
+        self.credentialFactory = DigestCredentialFactory(
+            self.algorithm, self.realm)
+
+
+    def test_MD5HashA1(self, _algorithm='md5', _hash=md5):
+        """
+        L{calcHA1} accepts the C{'md5'} algorithm and returns an MD5 hash of
+        its parameters, excluding the nonce and cnonce.
+        """
+        nonce = 'abc123xyz'
+        hashA1 = calcHA1(_algorithm, self.username, self.realm, self.password,
+                         nonce, self.cnonce)
+        a1 = '%s:%s:%s' % (self.username, self.realm, self.password)
+        expected = _hash(a1).hexdigest()
+        self.assertEqual(hashA1, expected)
+
+
+    def test_MD5SessionHashA1(self):
+        """
+        L{calcHA1} accepts the C{'md5-sess'} algorithm and returns an MD5 hash
+        of its parameters, including the nonce and cnonce.
+        """
+        nonce = 'xyz321abc'
+        hashA1 = calcHA1('md5-sess', self.username, self.realm, self.password,
+                         nonce, self.cnonce)
+        a1 = '%s:%s:%s' % (self.username, self.realm, self.password)
+        ha1 = md5(a1).digest()
+        a1 = '%s:%s:%s' % (ha1, nonce, self.cnonce)
+        expected = md5(a1).hexdigest()
+        self.assertEqual(hashA1, expected)
+
+
+    def test_SHAHashA1(self):
+        """
+        L{calcHA1} accepts the C{'sha'} algorithm and returns a SHA hash of its
+        parameters, excluding the nonce and cnonce.
+        """
+        self.test_MD5HashA1('sha', sha1)
+
+
+    def test_MD5HashA2Auth(self, _algorithm='md5', _hash=md5):
+        """
+        L{calcHA2} accepts the C{'md5'} algorithm and returns an MD5 hash of
+        its arguments, excluding the entity hash for QOP other than
+        C{'auth-int'}.
+        """
+        method = 'GET'
+        hashA2 = calcHA2(_algorithm, method, self.uri, 'auth', None)
+        a2 = '%s:%s' % (method, self.uri)
+        expected = _hash(a2).hexdigest()
+        self.assertEqual(hashA2, expected)
+
+
+    def test_MD5HashA2AuthInt(self, _algorithm='md5', _hash=md5):
+        """
+        L{calcHA2} accepts the C{'md5'} algorithm and returns an MD5 hash of
+        its arguments, including the entity hash for QOP of C{'auth-int'}.
+        """
+        method = 'GET'
+        hentity = 'foobarbaz'
+        hashA2 = calcHA2(_algorithm, method, self.uri, 'auth-int', hentity)
+        a2 = '%s:%s:%s' % (method, self.uri, hentity)
+        expected = _hash(a2).hexdigest()
+        self.assertEqual(hashA2, expected)
+
+
+    def test_MD5SessHashA2Auth(self):
+        """
+        L{calcHA2} accepts the C{'md5-sess'} algorithm and QOP of C{'auth'} and
+        returns the same value as it does for the C{'md5'} algorithm.
+        """
+        self.test_MD5HashA2Auth('md5-sess')
+
+
+    def test_MD5SessHashA2AuthInt(self):
+        """
+        L{calcHA2} accepts the C{'md5-sess'} algorithm and QOP of C{'auth-int'}
+        and returns the same value as it does for the C{'md5'} algorithm.
+        """
+        self.test_MD5HashA2AuthInt('md5-sess')
+
+
+    def test_SHAHashA2Auth(self):
+        """
+        L{calcHA2} accepts the C{'sha'} algorithm and returns a SHA hash of
+        its arguments, excluding the entity hash for QOP other than
+        C{'auth-int'}.
+        """
+        self.test_MD5HashA2Auth('sha', sha1)
+
+
+    def test_SHAHashA2AuthInt(self):
+        """
+        L{calcHA2} accepts the C{'sha'} algorithm and returns a SHA hash of
+        its arguments, including the entity hash for QOP of C{'auth-int'}.
+        """
+        self.test_MD5HashA2AuthInt('sha', sha1)
+
+
+    def test_MD5HashResponse(self, _algorithm='md5', _hash=md5):
+        """
+        L{calcResponse} accepts the C{'md5'} algorithm and returns an MD5 hash
+        of its parameters, excluding the nonce count, client nonce, and QoP
+        value if the nonce count and client nonce are C{None}
+        """
+        hashA1 = 'abc123'
+        hashA2 = '789xyz'
+        nonce = 'lmnopq'
+
+        response = '%s:%s:%s' % (hashA1, nonce, hashA2)
+        expected = _hash(response).hexdigest()
+
+        digest = calcResponse(hashA1, hashA2, _algorithm, nonce, None, None,
+                              None)
+        self.assertEqual(expected, digest)
+
+
+    def test_MD5SessionHashResponse(self):
+        """
+        L{calcResponse} accepts the C{'md5-sess'} algorithm and returns an MD5
+        hash of its parameters, excluding the nonce count, client nonce, and
+        QoP value if the nonce count and client nonce are C{None}
+        """
+        self.test_MD5HashResponse('md5-sess')
+
+
+    def test_SHAHashResponse(self):
+        """
+        L{calcResponse} accepts the C{'sha'} algorithm and returns a SHA hash
+        of its parameters, excluding the nonce count, client nonce, and QoP
+        value if the nonce count and client nonce are C{None}
+        """
+        self.test_MD5HashResponse('sha', sha1)
+
+
+    def test_MD5HashResponseExtra(self, _algorithm='md5', _hash=md5):
+        """
+        L{calcResponse} accepts the C{'md5'} algorithm and returns an MD5 hash
+        of its parameters, including the nonce count, client nonce, and QoP
+        value if they are specified.
+        """
+        hashA1 = 'abc123'
+        hashA2 = '789xyz'
+        nonce = 'lmnopq'
+        nonceCount = '00000004'
+        clientNonce = 'abcxyz123'
+        qop = 'auth'
+
+        response = '%s:%s:%s:%s:%s:%s' % (
+            hashA1, nonce, nonceCount, clientNonce, qop, hashA2)
+        expected = _hash(response).hexdigest()
+
+        digest = calcResponse(
+            hashA1, hashA2, _algorithm, nonce, nonceCount, clientNonce, qop)
+        self.assertEqual(expected, digest)
+
+
+    def test_MD5SessionHashResponseExtra(self):
+        """
+        L{calcResponse} accepts the C{'md5-sess'} algorithm and returns an MD5
+        hash of its parameters, including the nonce count, client nonce, and
+        QoP value if they are specified.
+        """
+        self.test_MD5HashResponseExtra('md5-sess')
+
+
+    def test_SHAHashResponseExtra(self):
+        """
+        L{calcResponse} accepts the C{'sha'} algorithm and returns a SHA hash
+        of its parameters, including the nonce count, client nonce, and QoP
+        value if they are specified.
+        """
+        self.test_MD5HashResponseExtra('sha', sha1)
+
+
+    def formatResponse(self, quotes=True, **kw):
+        """
+        Format all given keyword arguments and their values suitably for use as
+        the value of an HTTP header.
+
+        @types quotes: C{bool}
+        @param quotes: A flag indicating whether to quote the values of each
+            field in the response.
+
+        @param **kw: Keywords and C{str} values which will be treated as field
+            name/value pairs to include in the result.
+
+        @rtype: C{str}
+        @return: The given fields formatted for use as an HTTP header value.
+        """
+        if 'username' not in kw:
+            kw['username'] = self.username
+        if 'realm' not in kw:
+            kw['realm'] = self.realm
+        if 'algorithm' not in kw:
+            kw['algorithm'] = self.algorithm
+        if 'qop' not in kw:
+            kw['qop'] = self.qop
+        if 'cnonce' not in kw:
+            kw['cnonce'] = self.cnonce
+        if 'uri' not in kw:
+            kw['uri'] = self.uri
+        if quotes:
+            quote = '"'
+        else:
+            quote = ''
+        return ', '.join([
+                '%s=%s%s%s' % (k, quote, v, quote)
+                for (k, v)
+                in kw.iteritems()
+                if v is not None])
+
+
+    def getDigestResponse(self, challenge, ncount):
+        """
+        Calculate the response for the given challenge
+        """
+        nonce = challenge.get('nonce')
+        algo = challenge.get('algorithm').lower()
+        qop = challenge.get('qop')
+
+        ha1 = calcHA1(
+            algo, self.username, self.realm, self.password, nonce, self.cnonce)
+        ha2 = calcHA2(algo, "GET", self.uri, qop, None)
+        expected = calcResponse(ha1, ha2, algo, nonce, ncount, self.cnonce, qop)
+        return expected
+
+
+    def test_response(self, quotes=True):
+        """
+        L{DigestCredentialFactory.decode} accepts a digest challenge response
+        and parses it into an L{IUsernameHashedPassword} provider.
+        """
+        challenge = self.credentialFactory.getChallenge(self.clientAddress.host)
+
+        nc = "00000001"
+        clientResponse = self.formatResponse(
+            quotes=quotes,
+            nonce=challenge['nonce'],
+            response=self.getDigestResponse(challenge, nc),
+            nc=nc,
+            opaque=challenge['opaque'])
+        creds = self.credentialFactory.decode(
+            clientResponse, self.method, self.clientAddress.host)
+        self.assertTrue(creds.checkPassword(self.password))
+        self.assertFalse(creds.checkPassword(self.password + 'wrong'))
+
+
+    def test_responseWithoutQuotes(self):
+        """
+        L{DigestCredentialFactory.decode} accepts a digest challenge response
+        which does not quote the values of its fields and parses it into an
+        L{IUsernameHashedPassword} provider in the same way it would a
+        response which included quoted field values.
+        """
+        self.test_response(False)
+
+
+    def test_caseInsensitiveAlgorithm(self):
+        """
+        The case of the algorithm value in the response is ignored when
+        checking the credentials.
+        """
+        self.algorithm = 'MD5'
+        self.test_response()
+
+
+    def test_md5DefaultAlgorithm(self):
+        """
+        The algorithm defaults to MD5 if it is not supplied in the response.
+        """
+        self.algorithm = None
+        self.test_response()
+
+
+    def test_responseWithoutClientIP(self):
+        """
+        L{DigestCredentialFactory.decode} accepts a digest challenge response
+        even if the client address it is passed is C{None}.
+        """
+        challenge = self.credentialFactory.getChallenge(None)
+
+        nc = "00000001"
+        clientResponse = self.formatResponse(
+            nonce=challenge['nonce'],
+            response=self.getDigestResponse(challenge, nc),
+            nc=nc,
+            opaque=challenge['opaque'])
+        creds = self.credentialFactory.decode(clientResponse, self.method, None)
+        self.assertTrue(creds.checkPassword(self.password))
+        self.assertFalse(creds.checkPassword(self.password + 'wrong'))
+
+
+    def test_multiResponse(self):
+        """
+        L{DigestCredentialFactory.decode} handles multiple responses to a
+        single challenge.
+        """
+        challenge = self.credentialFactory.getChallenge(self.clientAddress.host)
+
+        nc = "00000001"
+        clientResponse = self.formatResponse(
+            nonce=challenge['nonce'],
+            response=self.getDigestResponse(challenge, nc),
+            nc=nc,
+            opaque=challenge['opaque'])
+
+        creds = self.credentialFactory.decode(clientResponse, self.method,
+                                              self.clientAddress.host)
+        self.assertTrue(creds.checkPassword(self.password))
+        self.assertFalse(creds.checkPassword(self.password + 'wrong'))
+
+        nc = "00000002"
+        clientResponse = self.formatResponse(
+            nonce=challenge['nonce'],
+            response=self.getDigestResponse(challenge, nc),
+            nc=nc,
+            opaque=challenge['opaque'])
+
+        creds = self.credentialFactory.decode(clientResponse, self.method,
+                                              self.clientAddress.host)
+        self.assertTrue(creds.checkPassword(self.password))
+        self.assertFalse(creds.checkPassword(self.password + 'wrong'))
+
+
+    def test_failsWithDifferentMethod(self):
+        """
+        L{DigestCredentialFactory.decode} returns an L{IUsernameHashedPassword}
+        provider which rejects a correct password for the given user if the
+        challenge response request is made using a different HTTP method than
+        was used to request the initial challenge.
+        """
+        challenge = self.credentialFactory.getChallenge(self.clientAddress.host)
+
+        nc = "00000001"
+        clientResponse = self.formatResponse(
+            nonce=challenge['nonce'],
+            response=self.getDigestResponse(challenge, nc),
+            nc=nc,
+            opaque=challenge['opaque'])
+        creds = self.credentialFactory.decode(clientResponse, 'POST',
+                                              self.clientAddress.host)
+        self.assertFalse(creds.checkPassword(self.password))
+        self.assertFalse(creds.checkPassword(self.password + 'wrong'))
+
+
+    def test_noUsername(self):
+        """
+        L{DigestCredentialFactory.decode} raises L{LoginFailed} if the response
+        has no username field or if the username field is empty.
+        """
+        # Check for no username
+        e = self.assertRaises(
+            LoginFailed,
+            self.credentialFactory.decode,
+            self.formatResponse(username=None),
+            self.method, self.clientAddress.host)
+        self.assertEqual(str(e), "Invalid response, no username given.")
+
+        # Check for an empty username
+        e = self.assertRaises(
+            LoginFailed,
+            self.credentialFactory.decode,
+            self.formatResponse(username=""),
+            self.method, self.clientAddress.host)
+        self.assertEqual(str(e), "Invalid response, no username given.")
+
+
+    def test_noNonce(self):
+        """
+        L{DigestCredentialFactory.decode} raises L{LoginFailed} if the response
+        has no nonce.
+        """
+        e = self.assertRaises(
+            LoginFailed,
+            self.credentialFactory.decode,
+            self.formatResponse(opaque="abc123"),
+            self.method, self.clientAddress.host)
+        self.assertEqual(str(e), "Invalid response, no nonce given.")
+
+
+    def test_noOpaque(self):
+        """
+        L{DigestCredentialFactory.decode} raises L{LoginFailed} if the response
+        has no opaque.
+        """
+        e = self.assertRaises(
+            LoginFailed,
+            self.credentialFactory.decode,
+            self.formatResponse(),
+            self.method, self.clientAddress.host)
+        self.assertEqual(str(e), "Invalid response, no opaque given.")
+
+
+    def test_checkHash(self):
+        """
+        L{DigestCredentialFactory.decode} returns an L{IUsernameDigestHash}
+        provider which can verify a hash of the form 'username:realm:password'.
+        """
+        challenge = self.credentialFactory.getChallenge(self.clientAddress.host)
+
+        nc = "00000001"
+        clientResponse = self.formatResponse(
+            nonce=challenge['nonce'],
+            response=self.getDigestResponse(challenge, nc),
+            nc=nc,
+            opaque=challenge['opaque'])
+
+        creds = self.credentialFactory.decode(clientResponse, self.method,
+                                              self.clientAddress.host)
+        self.assertTrue(verifyObject(IUsernameDigestHash, creds))
+
+        cleartext = '%s:%s:%s' % (self.username, self.realm, self.password)
+        hash = md5(cleartext)
+        self.assertTrue(creds.checkHash(hash.hexdigest()))
+        hash.update('wrong')
+        self.assertFalse(creds.checkHash(hash.hexdigest()))
+
+
+    def test_invalidOpaque(self):
+        """
+        L{DigestCredentialFactory.decode} raises L{LoginFailed} when the opaque
+        value does not contain all the required parts.
+        """
+        credentialFactory = FakeDigestCredentialFactory(self.algorithm,
+                                                        self.realm)
+        challenge = credentialFactory.getChallenge(self.clientAddress.host)
+
+        exc = self.assertRaises(
+            LoginFailed,
+            credentialFactory._verifyOpaque,
+            'badOpaque',
+            challenge['nonce'],
+            self.clientAddress.host)
+        self.assertEqual(str(exc), 'Invalid response, invalid opaque value')
+
+        badOpaque = 'foo-' + b64encode('nonce,clientip')
+
+        exc = self.assertRaises(
+            LoginFailed,
+            credentialFactory._verifyOpaque,
+            badOpaque,
+            challenge['nonce'],
+            self.clientAddress.host)
+        self.assertEqual(str(exc), 'Invalid response, invalid opaque value')
+
+        exc = self.assertRaises(
+            LoginFailed,
+            credentialFactory._verifyOpaque,
+            '',
+            challenge['nonce'],
+            self.clientAddress.host)
+        self.assertEqual(str(exc), 'Invalid response, invalid opaque value')
+
+        badOpaque = (
+            'foo-' + b64encode('%s,%s,foobar' % (
+                    challenge['nonce'],
+                    self.clientAddress.host)))
+        exc = self.assertRaises(
+            LoginFailed,
+            credentialFactory._verifyOpaque,
+            badOpaque,
+            challenge['nonce'],
+            self.clientAddress.host)
+        self.assertEqual(
+            str(exc), 'Invalid response, invalid opaque/time values')
+
+
+    def test_incompatibleNonce(self):
+        """
+        L{DigestCredentialFactory.decode} raises L{LoginFailed} when the given
+        nonce from the response does not match the nonce encoded in the opaque.
+        """
+        credentialFactory = FakeDigestCredentialFactory(self.algorithm, self.realm)
+        challenge = credentialFactory.getChallenge(self.clientAddress.host)
+
+        badNonceOpaque = credentialFactory._generateOpaque(
+            '1234567890',
+            self.clientAddress.host)
+
+        exc = self.assertRaises(
+            LoginFailed,
+            credentialFactory._verifyOpaque,
+            badNonceOpaque,
+            challenge['nonce'],
+            self.clientAddress.host)
+        self.assertEqual(
+            str(exc),
+            'Invalid response, incompatible opaque/nonce values')
+
+        exc = self.assertRaises(
+            LoginFailed,
+            credentialFactory._verifyOpaque,
+            badNonceOpaque,
+            '',
+            self.clientAddress.host)
+        self.assertEqual(
+            str(exc),
+            'Invalid response, incompatible opaque/nonce values')
+
+
+    def test_incompatibleClientIP(self):
+        """
+        L{DigestCredentialFactory.decode} raises L{LoginFailed} when the
+        request comes from a client IP other than what is encoded in the
+        opaque.
+        """
+        credentialFactory = FakeDigestCredentialFactory(self.algorithm, self.realm)
+        challenge = credentialFactory.getChallenge(self.clientAddress.host)
+
+        badAddress = '10.0.0.1'
+        # Sanity check
+        self.assertNotEqual(self.clientAddress.host, badAddress)
+
+        badNonceOpaque = credentialFactory._generateOpaque(
+            challenge['nonce'], badAddress)
+
+        self.assertRaises(
+            LoginFailed,
+            credentialFactory._verifyOpaque,
+            badNonceOpaque,
+            challenge['nonce'],
+            self.clientAddress.host)
+
+
+    def test_oldNonce(self):
+        """
+        L{DigestCredentialFactory.decode} raises L{LoginFailed} when the given
+        opaque is older than C{DigestCredentialFactory.CHALLENGE_LIFETIME_SECS}
+        """
+        credentialFactory = FakeDigestCredentialFactory(self.algorithm,
+                                                        self.realm)
+        challenge = credentialFactory.getChallenge(self.clientAddress.host)
+
+        key = '%s,%s,%s' % (challenge['nonce'],
+                            self.clientAddress.host,
+                            '-137876876')
+        digest = md5(key + credentialFactory.privateKey).hexdigest()
+        ekey = b64encode(key)
+
+        oldNonceOpaque = '%s-%s' % (digest, ekey.strip('\n'))
+
+        self.assertRaises(
+            LoginFailed,
+            credentialFactory._verifyOpaque,
+            oldNonceOpaque,
+            challenge['nonce'],
+            self.clientAddress.host)
+
+
+    def test_mismatchedOpaqueChecksum(self):
+        """
+        L{DigestCredentialFactory.decode} raises L{LoginFailed} when the opaque
+        checksum fails verification.
+        """
+        credentialFactory = FakeDigestCredentialFactory(self.algorithm,
+                                                        self.realm)
+        challenge = credentialFactory.getChallenge(self.clientAddress.host)
+
+        key = '%s,%s,%s' % (challenge['nonce'],
+                            self.clientAddress.host,
+                            '0')
+
+        digest = md5(key + 'this is not the right pkey').hexdigest()
+        badChecksum = '%s-%s' % (digest, b64encode(key))
+
+        self.assertRaises(
+            LoginFailed,
+            credentialFactory._verifyOpaque,
+            badChecksum,
+            challenge['nonce'],
+            self.clientAddress.host)
+
+
+    def test_incompatibleCalcHA1Options(self):
+        """
+        L{calcHA1} raises L{TypeError} when any of the pszUsername, pszRealm,
+        or pszPassword arguments are specified with the preHA1 keyword
+        argument.
+        """
+        arguments = (
+            ("user", "realm", "password", "preHA1"),
+            (None, "realm", None, "preHA1"),
+            (None, None, "password", "preHA1"),
+            )
+
+        for pszUsername, pszRealm, pszPassword, preHA1 in arguments:
+            self.assertRaises(
+                TypeError,
+                calcHA1,
+                "md5",
+                pszUsername,
+                pszRealm,
+                pszPassword,
+                "nonce",
+                "cnonce",
+                preHA1=preHA1)
+
+
+    def test_noNewlineOpaque(self):
+        """
+        L{DigestCredentialFactory._generateOpaque} returns a value without
+        newlines, regardless of the length of the nonce.
+        """
+        opaque = self.credentialFactory._generateOpaque(
+            "long nonce " * 10, None)
+        self.assertNotIn('\n', opaque)
diff --git a/ThirdParty/Twisted/twisted/test/test_dirdbm.py b/ThirdParty/Twisted/twisted/test/test_dirdbm.py
new file mode 100644
index 0000000..8bd240f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_dirdbm.py
@@ -0,0 +1,170 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test cases for dirdbm module.
+"""
+
+import os, shutil, glob
+
+from twisted.trial import unittest
+from twisted.persisted import dirdbm
+
+
+
+class DirDbmTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.path = self.mktemp()
+        self.dbm = dirdbm.open(self.path)
+        self.items = (('abc', 'foo'), ('/lalal', '\000\001'), ('\000\012', 'baz'))
+
+
+    def testAll(self):
+        k = "//==".decode("base64")
+        self.dbm[k] = "a"
+        self.dbm[k] = "a"
+        self.assertEqual(self.dbm[k], "a")
+
+
+    def testRebuildInteraction(self):
+        from twisted.persisted import dirdbm
+        from twisted.python import rebuild
+
+        s = dirdbm.Shelf('dirdbm.rebuild.test')
+        s['key'] = 'value'
+        rebuild.rebuild(dirdbm)
+        # print s['key']
+
+
+    def testDbm(self):
+        d = self.dbm
+
+        # insert keys
+        keys = []
+        values = set()
+        for k, v in self.items:
+            d[k] = v
+            keys.append(k)
+            values.add(v)
+        keys.sort()
+
+        # check they exist
+        for k, v in self.items:
+            assert d.has_key(k), "has_key() failed"
+            assert d[k] == v, "database has wrong value"
+
+        # check non existent key
+        try:
+            d["XXX"]
+        except KeyError:
+            pass
+        else:
+            assert 0, "didn't raise KeyError on non-existent key"
+
+        # check keys(), values() and items()
+        dbkeys = list(d.keys())
+        dbvalues = set(d.values())
+        dbitems = set(d.items())
+        dbkeys.sort()
+        items = set(self.items)
+        assert keys == dbkeys, ".keys() output didn't match: %s != %s" % (repr(keys), repr(dbkeys))
+        assert values == dbvalues, ".values() output didn't match: %s != %s" % (repr(values), repr(dbvalues))
+        assert items == dbitems, "items() didn't match: %s != %s" % (repr(items), repr(dbitems))
+
+        copyPath = self.mktemp()
+        d2 = d.copyTo(copyPath)
+
+        copykeys = list(d.keys())
+        copyvalues = set(d.values())
+        copyitems = set(d.items())
+        copykeys.sort()
+
+        assert dbkeys == copykeys, ".copyTo().keys() didn't match: %s != %s" % (repr(dbkeys), repr(copykeys))
+        assert dbvalues == copyvalues, ".copyTo().values() didn't match: %s != %s" % (repr(dbvalues), repr(copyvalues))
+        assert dbitems == copyitems, ".copyTo().items() didn't match: %s != %s" % (repr(dbkeys), repr(copyitems))
+
+        d2.clear()
+        assert len(d2.keys()) == len(d2.values()) == len(d2.items()) == 0, ".clear() failed"
+        shutil.rmtree(copyPath)
+
+        # delete items
+        for k, v in self.items:
+            del d[k]
+            assert not d.has_key(k), "has_key() even though we deleted it"
+        assert len(d.keys()) == 0, "database has keys"
+        assert len(d.values()) == 0, "database has values"
+        assert len(d.items()) == 0, "database has items"
+
+
+    def testModificationTime(self):
+        import time
+        # the mtime value for files comes from a different place than the
+        # gettimeofday() system call. On linux, gettimeofday() can be
+        # slightly ahead (due to clock drift which gettimeofday() takes into
+        # account but which open()/write()/close() do not), and if we are
+        # close to the edge of the next second, time.time() can give a value
+        # which is larger than the mtime which results from a subsequent
+        # write(). I consider this a kernel bug, but it is beyond the scope
+        # of this test. Thus we keep the range of acceptability to 3 seconds time.
+        # -warner
+        self.dbm["k"] = "v"
+        self.assert_(abs(time.time() - self.dbm.getModificationTime("k")) <= 3)
+
+
+    def testRecovery(self):
+        """DirDBM: test recovery from directory after a faked crash"""
+        k = self.dbm._encode("key1")
+        f = open(os.path.join(self.path, k + ".rpl"), "wb")
+        f.write("value")
+        f.close()
+
+        k2 = self.dbm._encode("key2")
+        f = open(os.path.join(self.path, k2), "wb")
+        f.write("correct")
+        f.close()
+        f = open(os.path.join(self.path, k2 + ".rpl"), "wb")
+        f.write("wrong")
+        f.close()
+
+        f = open(os.path.join(self.path, "aa.new"), "wb")
+        f.write("deleted")
+        f.close()
+
+        dbm = dirdbm.DirDBM(self.path)
+        assert dbm["key1"] == "value"
+        assert dbm["key2"] == "correct"
+        assert not glob.glob(os.path.join(self.path, "*.new"))
+        assert not glob.glob(os.path.join(self.path, "*.rpl"))
+
+
+    def test_nonStringKeys(self):
+        """
+        L{dirdbm.DirDBM} operations only support string keys: other types
+        should raise a C{AssertionError}. This really ought to be a
+        C{TypeError}, but it'll stay like this for backward compatibility.
+        """
+        self.assertRaises(AssertionError, self.dbm.__setitem__, 2, "3")
+        try:
+            self.assertRaises(AssertionError, self.dbm.__setitem__, "2", 3)
+        except unittest.FailTest:
+            # dirdbm.Shelf.__setitem__ supports non-string values
+            self.assertIsInstance(self.dbm, dirdbm.Shelf)
+        self.assertRaises(AssertionError, self.dbm.__getitem__, 2)
+        self.assertRaises(AssertionError, self.dbm.__delitem__, 2)
+        self.assertRaises(AssertionError, self.dbm.has_key, 2)
+        self.assertRaises(AssertionError, self.dbm.__contains__, 2)
+        self.assertRaises(AssertionError, self.dbm.getModificationTime, 2)
+
+
+
+class ShelfTestCase(DirDbmTestCase):
+
+    def setUp(self):
+        self.path = self.mktemp()
+        self.dbm = dirdbm.Shelf(self.path)
+        self.items = (('abc', 'foo'), ('/lalal', '\000\001'), ('\000\012', 'baz'),
+                      ('int', 12), ('float', 12.0), ('tuple', (None, 12)))
+
+
+testCases = [DirDbmTestCase, ShelfTestCase]
diff --git a/ThirdParty/Twisted/twisted/test/test_doc.py b/ThirdParty/Twisted/twisted/test/test_doc.py
new file mode 100644
index 0000000..795fd87
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_doc.py
@@ -0,0 +1,104 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import inspect, glob
+from os import path
+
+from twisted.trial import unittest
+from twisted.python import reflect
+from twisted.python.modules import getModule
+
+
+
+def errorInFile(f, line=17, name=''):
+    """
+    Return a filename formatted so emacs will recognize it as an error point
+
+    @param line: Line number in file.  Defaults to 17 because that's about how
+        long the copyright headers are.
+    """
+    return '%s:%d:%s' % (f, line, name)
+    # return 'File "%s", line %d, in %s' % (f, line, name)
+
+
+class DocCoverage(unittest.TestCase):
+    """
+    Looking for docstrings in all modules and packages.
+    """
+    def setUp(self):
+        self.packageNames = []
+        for mod in getModule('twisted').walkModules():
+            if mod.isPackage():
+                self.packageNames.append(mod.name)
+
+
+    def testModules(self):
+        """
+        Looking for docstrings in all modules.
+        """
+        docless = []
+        for packageName in self.packageNames:
+            if packageName in ('twisted.test',):
+                # because some stuff in here behaves oddly when imported
+                continue
+            try:
+                package = reflect.namedModule(packageName)
+            except ImportError, e:
+                # This is testing doc coverage, not importability.
+                # (Really, I don't want to deal with the fact that I don't
+                #  have pyserial installed.)
+                # print e
+                pass
+            else:
+                docless.extend(self.modulesInPackage(packageName, package))
+        self.failIf(docless, "No docstrings in module files:\n"
+                    "%s" % ('\n'.join(map(errorInFile, docless)),))
+
+
+    def modulesInPackage(self, packageName, package):
+        docless = []
+        directory = path.dirname(package.__file__)
+        for modfile in glob.glob(path.join(directory, '*.py')):
+            moduleName = inspect.getmodulename(modfile)
+            if moduleName == '__init__':
+                # These are tested by test_packages.
+                continue
+            elif moduleName in ('spelunk_gnome','gtkmanhole'):
+                # argh special case pygtk evil argh.  How does epydoc deal
+                # with this?
+                continue
+            try:
+                module = reflect.namedModule('.'.join([packageName,
+                                                       moduleName]))
+            except Exception, e:
+                # print moduleName, "misbehaved:", e
+                pass
+            else:
+                if not inspect.getdoc(module):
+                    docless.append(modfile)
+        return docless
+
+
+    def testPackages(self):
+        """
+        Looking for docstrings in all packages.
+        """
+        docless = []
+        for packageName in self.packageNames:
+            try:
+                package = reflect.namedModule(packageName)
+            except Exception, e:
+                # This is testing doc coverage, not importability.
+                # (Really, I don't want to deal with the fact that I don't
+                #  have pyserial installed.)
+                # print e
+                pass
+            else:
+                if not inspect.getdoc(package):
+                    docless.append(package.__file__.replace('.pyc','.py'))
+        self.failIf(docless, "No docstrings for package files\n"
+                    "%s" % ('\n'.join(map(errorInFile, docless),)))
+
+
+    # This test takes a while and doesn't come close to passing.  :(
+    testModules.skip = "Activate me when you feel like writing docstrings, and fixing GTK crashing bugs."
diff --git a/ThirdParty/Twisted/twisted/test/test_epoll.py b/ThirdParty/Twisted/twisted/test/test_epoll.py
new file mode 100644
index 0000000..b96e06f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_epoll.py
@@ -0,0 +1,158 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for epoll wrapper.
+"""
+
+import socket, errno, time
+
+from twisted.trial import unittest
+from twisted.python.util import untilConcludes
+
+try:
+    from twisted.python import _epoll
+except ImportError:
+    _epoll = None
+
+
+class EPoll(unittest.TestCase):
+    """
+    Tests for the low-level epoll bindings.
+    """
+    def setUp(self):
+        """
+        Create a listening server port and a list with which to keep track
+        of created sockets.
+        """
+        self.serverSocket = socket.socket()
+        self.serverSocket.bind(('127.0.0.1', 0))
+        self.serverSocket.listen(1)
+        self.connections = [self.serverSocket]
+
+
+    def tearDown(self):
+        """
+        Close any sockets which were opened by the test.
+        """
+        for skt in self.connections:
+            skt.close()
+
+
+    def _connectedPair(self):
+        """
+        Return the two sockets which make up a new TCP connection.
+        """
+        client = socket.socket()
+        client.setblocking(False)
+        try:
+            client.connect(('127.0.0.1', self.serverSocket.getsockname()[1]))
+        except socket.error, e:
+            self.assertEqual(e.args[0], errno.EINPROGRESS)
+        else:
+            raise unittest.FailTest("Connect should have raised EINPROGRESS")
+        server, addr = self.serverSocket.accept()
+
+        self.connections.extend((client, server))
+        return client, server
+
+
+    def test_create(self):
+        """
+        Test the creation of an epoll object.
+        """
+        try:
+            p = _epoll.epoll(16)
+        except OSError, e:
+            raise unittest.FailTest(str(e))
+        else:
+            p.close()
+
+
+    def test_badCreate(self):
+        """
+        Test that attempting to create an epoll object with some random
+        objects raises a TypeError.
+        """
+        self.assertRaises(TypeError, _epoll.epoll, 1, 2, 3)
+        self.assertRaises(TypeError, _epoll.epoll, 'foo')
+        self.assertRaises(TypeError, _epoll.epoll, None)
+        self.assertRaises(TypeError, _epoll.epoll, ())
+        self.assertRaises(TypeError, _epoll.epoll, ['foo'])
+        self.assertRaises(TypeError, _epoll.epoll, {})
+
+
+    def test_add(self):
+        """
+        Test adding a socket to an epoll object.
+        """
+        server, client = self._connectedPair()
+
+        p = _epoll.epoll(2)
+        try:
+            p._control(_epoll.CTL_ADD, server.fileno(), _epoll.IN | _epoll.OUT)
+            p._control(_epoll.CTL_ADD, client.fileno(), _epoll.IN | _epoll.OUT)
+        finally:
+            p.close()
+
+
+    def test_controlAndWait(self):
+        """
+        Test waiting on an epoll object which has had some sockets added to
+        it.
+        """
+        client, server = self._connectedPair()
+
+        p = _epoll.epoll(16)
+        p._control(_epoll.CTL_ADD, client.fileno(), _epoll.IN | _epoll.OUT |
+                   _epoll.ET)
+        p._control(_epoll.CTL_ADD, server.fileno(), _epoll.IN | _epoll.OUT |
+                   _epoll.ET)
+
+        now = time.time()
+        events = untilConcludes(p.wait, 4, 1000)
+        then = time.time()
+        self.failIf(then - now > 0.01)
+
+        events.sort()
+        expected = [(client.fileno(), _epoll.OUT),
+                    (server.fileno(), _epoll.OUT)]
+        expected.sort()
+
+        self.assertEqual(events, expected)
+
+        now = time.time()
+        events = untilConcludes(p.wait, 4, 200)
+        then = time.time()
+        self.failUnless(then - now > 0.1)
+        self.failIf(events)
+
+        client.send("Hello!")
+        server.send("world!!!")
+
+        now = time.time()
+        events = untilConcludes(p.wait, 4, 1000)
+        then = time.time()
+        self.failIf(then - now > 0.01)
+
+        events.sort()
+        expected = [(client.fileno(), _epoll.IN | _epoll.OUT),
+                    (server.fileno(), _epoll.IN | _epoll.OUT)]
+        expected.sort()
+
+        self.assertEqual(events, expected)
+
+if _epoll is None:
+    EPoll.skip = "_epoll module unavailable"
+else:
+    try:
+        e = _epoll.epoll(16)
+    except IOError, exc:
+        if exc.errno == errno.ENOSYS:
+            del exc
+            EPoll.skip = "epoll support missing from platform"
+        else:
+            raise
+    else:
+        e.close()
+        del e
diff --git a/ThirdParty/Twisted/twisted/test/test_error.py b/ThirdParty/Twisted/twisted/test/test_error.py
new file mode 100644
index 0000000..917905f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_error.py
@@ -0,0 +1,251 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from __future__ import division, absolute_import
+
+import socket, errno
+from twisted.trial import unittest
+from twisted.internet import error
+from twisted.python.runtime import platformType
+
+
+class TestStringification(unittest.SynchronousTestCase):
+    """Test that the exceptions have useful stringifications.
+    """
+
+    listOfTests = [
+        #(output, exception[, args[, kwargs]]),
+
+        ("An error occurred binding to an interface.",
+         error.BindError),
+
+        ("An error occurred binding to an interface: foo.",
+         error.BindError, ['foo']),
+
+        ("An error occurred binding to an interface: foo bar.",
+         error.BindError, ['foo', 'bar']),
+
+        ("Couldn't listen on eth0:4242: Foo.",
+         error.CannotListenError,
+         ('eth0', 4242, socket.error('Foo'))),
+
+        ("Message is too long to send.",
+         error.MessageLengthError),
+
+        ("Message is too long to send: foo bar.",
+         error.MessageLengthError, ['foo', 'bar']),
+
+        ("DNS lookup failed.",
+         error.DNSLookupError),
+
+        ("DNS lookup failed: foo bar.",
+         error.DNSLookupError, ['foo', 'bar']),
+
+        ("An error occurred while connecting.",
+         error.ConnectError),
+
+        ("An error occurred while connecting: someOsError.",
+         error.ConnectError, ['someOsError']),
+
+        ("An error occurred while connecting: foo.",
+         error.ConnectError, [], {'string': 'foo'}),
+
+        ("An error occurred while connecting: someOsError: foo.",
+         error.ConnectError, ['someOsError', 'foo']),
+
+        ("Couldn't bind.",
+         error.ConnectBindError),
+
+        ("Couldn't bind: someOsError.",
+         error.ConnectBindError, ['someOsError']),
+
+        ("Couldn't bind: someOsError: foo.",
+         error.ConnectBindError, ['someOsError', 'foo']),
+
+        ("Hostname couldn't be looked up.",
+         error.UnknownHostError),
+
+        ("No route to host.",
+         error.NoRouteError),
+
+        ("Connection was refused by other side.",
+         error.ConnectionRefusedError),
+
+        ("TCP connection timed out.",
+         error.TCPTimedOutError),
+
+        ("File used for UNIX socket is no good.",
+         error.BadFileError),
+
+        ("Service name given as port is unknown.",
+         error.ServiceNameUnknownError),
+
+        ("User aborted connection.",
+         error.UserError),
+
+        ("User timeout caused connection failure.",
+         error.TimeoutError),
+
+        ("An SSL error occurred.",
+         error.SSLError),
+
+        ("Connection to the other side was lost in a non-clean fashion.",
+         error.ConnectionLost),
+
+        ("Connection to the other side was lost in a non-clean fashion: foo bar.",
+         error.ConnectionLost, ['foo', 'bar']),
+
+        ("Connection was closed cleanly.",
+         error.ConnectionDone),
+
+        ("Connection was closed cleanly: foo bar.",
+         error.ConnectionDone, ['foo', 'bar']),
+
+        ("Uh.", #TODO nice docstring, you've got there.
+         error.ConnectionFdescWentAway),
+
+        ("Tried to cancel an already-called event.",
+         error.AlreadyCalled),
+
+        ("Tried to cancel an already-called event: foo bar.",
+         error.AlreadyCalled, ['foo', 'bar']),
+
+        ("Tried to cancel an already-cancelled event.",
+         error.AlreadyCancelled),
+
+        ("Tried to cancel an already-cancelled event: x 2.",
+         error.AlreadyCancelled, ["x", "2"]),
+
+        ("A process has ended without apparent errors: process finished with exit code 0.",
+         error.ProcessDone,
+         [None]),
+
+        ("A process has ended with a probable error condition: process ended.",
+         error.ProcessTerminated),
+
+        ("A process has ended with a probable error condition: process ended with exit code 42.",
+         error.ProcessTerminated,
+         [],
+         {'exitCode': 42}),
+
+        ("A process has ended with a probable error condition: process ended by signal SIGBUS.",
+         error.ProcessTerminated,
+         [],
+         {'signal': 'SIGBUS'}),
+
+        ("The Connector was not connecting when it was asked to stop connecting.",
+         error.NotConnectingError),
+
+        ("The Connector was not connecting when it was asked to stop connecting: x 13.",
+         error.NotConnectingError, ["x", "13"]),
+
+        ("The Port was not listening when it was asked to stop listening.",
+         error.NotListeningError),
+
+        ("The Port was not listening when it was asked to stop listening: a 12.",
+         error.NotListeningError, ["a", "12"]),
+        ]
+
+    def testThemAll(self):
+        for entry in self.listOfTests:
+            output = entry[0]
+            exception = entry[1]
+            try:
+                args = entry[2]
+            except IndexError:
+                args = ()
+            try:
+                kwargs = entry[3]
+            except IndexError:
+                kwargs = {}
+
+            self.assertEqual(
+                str(exception(*args, **kwargs)),
+                output)
+
+
+    def test_connectionLostSubclassOfConnectionClosed(self):
+        """
+        L{error.ConnectionClosed} is a superclass of L{error.ConnectionLost}.
+        """
+        self.assertTrue(issubclass(error.ConnectionLost,
+                                   error.ConnectionClosed))
+
+
+    def test_connectionDoneSubclassOfConnectionClosed(self):
+        """
+        L{error.ConnectionClosed} is a superclass of L{error.ConnectionDone}.
+        """
+        self.assertTrue(issubclass(error.ConnectionDone,
+                                   error.ConnectionClosed))
+
+
+    def test_connectingCancelledError(self):
+        """
+        L{error.ConnectingCancelledError} has an C{address} attribute.
+        """
+        address = object()
+        e = error.ConnectingCancelledError(address)
+        self.assertIdentical(e.address, address)
+
+
+
+class GetConnectErrorTests(unittest.SynchronousTestCase):
+    """
+    Given an exception instance thrown by C{socket.connect},
+    L{error.getConnectError} returns the appropriate high-level Twisted
+    exception instance.
+    """
+
+    def assertErrnoException(self, errno, expectedClass):
+        """
+        When called with a tuple with the given errno,
+        L{error.getConnectError} returns an exception which is an instance of
+        the expected class.
+        """
+        e = (errno, "lalala")
+        result = error.getConnectError(e)
+        self.assertCorrectException(errno, "lalala", result, expectedClass)
+
+
+    def assertCorrectException(self, errno, message, result, expectedClass):
+        """
+        The given result of L{error.getConnectError} has the given attributes
+        (C{osError} and C{args}), and is an instance of the given class.
+        """
+
+        # Want exact class match, not inherited classes, so no isinstance():
+        self.assertEqual(result.__class__, expectedClass)
+        self.assertEqual(result.osError, errno)
+        self.assertEqual(result.args, (message,))
+
+
+    def test_errno(self):
+        """
+        L{error.getConnectError} converts based on errno for C{socket.error}.
+        """
+        self.assertErrnoException(errno.ENETUNREACH, error.NoRouteError)
+        self.assertErrnoException(errno.ECONNREFUSED, error.ConnectionRefusedError)
+        self.assertErrnoException(errno.ETIMEDOUT, error.TCPTimedOutError)
+        if platformType == "win32":
+            self.assertErrnoException(errno.WSAECONNREFUSED, error.ConnectionRefusedError)
+            self.assertErrnoException(errno.WSAENETUNREACH, error.NoRouteError)
+
+
+    def test_gaierror(self):
+        """
+        L{error.getConnectError} converts to a L{error.UnknownHostError} given
+        a C{socket.gaierror} instance.
+        """
+        result = error.getConnectError(socket.gaierror(12, "hello"))
+        self.assertCorrectException(12, "hello", result, error.UnknownHostError)
+
+
+    def test_nonTuple(self):
+        """
+        L{error.getConnectError} converts to a L{error.ConnectError} given
+        an argument that cannot be unpacked.
+        """
+        e = Exception()
+        result = error.getConnectError(e)
+        self.assertCorrectException(None, e, result, error.ConnectError)
diff --git a/ThirdParty/Twisted/twisted/test/test_explorer.py b/ThirdParty/Twisted/twisted/test/test_explorer.py
new file mode 100644
index 0000000..2b8fcf0
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_explorer.py
@@ -0,0 +1,236 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Test cases for explorer
+"""
+
+from twisted.trial import unittest
+
+from twisted.manhole import explorer
+
+import types
+
+"""
+# Tests:
+
+ Get an ObjectLink.  Browse ObjectLink.identifier.  Is it the same?
+
+ Watch Object.  Make sure an ObjectLink is received when:
+   Call a method.
+   Set an attribute.
+
+ Have an Object with a setattr class.  Watch it.
+   Do both the navite setattr and the watcher get called?
+
+ Sequences with circular references.  Does it blow up?
+"""
+
+class SomeDohickey:
+    def __init__(self, *a):
+        self.__dict__['args'] = a
+
+    def bip(self):
+        return self.args
+
+
+class TestBrowser(unittest.TestCase):
+    def setUp(self):
+        self.pool = explorer.explorerPool
+        self.pool.clear()
+        self.testThing = ["How many stairs must a man climb down?",
+                          SomeDohickey(42)]
+
+    def test_chain(self):
+        "Following a chain of Explorers."
+        xplorer = self.pool.getExplorer(self.testThing, 'testThing')
+        self.assertEqual(xplorer.id, id(self.testThing))
+        self.assertEqual(xplorer.identifier, 'testThing')
+
+        dxplorer = xplorer.get_elements()[1]
+        self.assertEqual(dxplorer.id, id(self.testThing[1]))
+
+class Watcher:
+    zero = 0
+    def __init__(self):
+        self.links = []
+
+    def receiveBrowserObject(self, olink):
+        self.links.append(olink)
+
+    def setZero(self):
+        self.zero = len(self.links)
+
+    def len(self):
+        return len(self.links) - self.zero
+
+
+class SetattrDohickey:
+    def __setattr__(self, k, v):
+        v = list(str(v))
+        v.reverse()
+        self.__dict__[k] = ''.join(v)
+
+class MiddleMan(SomeDohickey, SetattrDohickey):
+    pass
+
+# class TestWatch(unittest.TestCase):
+class FIXME_Watch:
+    def setUp(self):
+        self.globalNS = globals().copy()
+        self.localNS = {}
+        self.browser = explorer.ObjectBrowser(self.globalNS, self.localNS)
+        self.watcher = Watcher()
+
+    def test_setAttrPlain(self):
+        "Triggering a watcher response by setting an attribute."
+
+        testThing = SomeDohickey('pencil')
+        self.browser.watchObject(testThing, 'testThing',
+                                 self.watcher.receiveBrowserObject)
+        self.watcher.setZero()
+
+        testThing.someAttr = 'someValue'
+
+        self.assertEqual(testThing.someAttr, 'someValue')
+        self.failUnless(self.watcher.len())
+        olink = self.watcher.links[-1]
+        self.assertEqual(olink.id, id(testThing))
+
+    def test_setAttrChain(self):
+        "Setting an attribute on a watched object that has __setattr__"
+        testThing = MiddleMan('pencil')
+
+        self.browser.watchObject(testThing, 'testThing',
+                                 self.watcher.receiveBrowserObject)
+        self.watcher.setZero()
+
+        testThing.someAttr = 'ZORT'
+
+        self.assertEqual(testThing.someAttr, 'TROZ')
+        self.failUnless(self.watcher.len())
+        olink = self.watcher.links[-1]
+        self.assertEqual(olink.id, id(testThing))
+
+
+    def test_method(self):
+        "Triggering a watcher response by invoking a method."
+
+        for testThing in (SomeDohickey('pencil'), MiddleMan('pencil')):
+            self.browser.watchObject(testThing, 'testThing',
+                                     self.watcher.receiveBrowserObject)
+            self.watcher.setZero()
+
+            rval = testThing.bip()
+            self.assertEqual(rval, ('pencil',))
+
+            self.failUnless(self.watcher.len())
+            olink = self.watcher.links[-1]
+            self.assertEqual(olink.id, id(testThing))
+
+
+def function_noArgs():
+    "A function which accepts no arguments at all."
+    return
+
+def function_simple(a, b, c):
+    "A function which accepts several arguments."
+    return a, b, c
+
+def function_variable(*a, **kw):
+    "A function which accepts a variable number of args and keywords."
+    return a, kw
+
+def function_crazy((alpha, beta), c, d=range(4), **kw):
+    "A function with a mad crazy signature."
+    return alpha, beta, c, d, kw
+
+class TestBrowseFunction(unittest.TestCase):
+
+    def setUp(self):
+        self.pool = explorer.explorerPool
+        self.pool.clear()
+
+    def test_sanity(self):
+        """Basic checks for browse_function.
+
+        Was the proper type returned?  Does it have the right name and ID?
+        """
+        for f_name in ('function_noArgs', 'function_simple',
+                       'function_variable', 'function_crazy'):
+            f = eval(f_name)
+
+            xplorer = self.pool.getExplorer(f, f_name)
+
+            self.assertEqual(xplorer.id, id(f))
+
+            self.failUnless(isinstance(xplorer, explorer.ExplorerFunction))
+
+            self.assertEqual(xplorer.name, f_name)
+
+    def test_signature_noArgs(self):
+        """Testing zero-argument function signature.
+        """
+
+        xplorer = self.pool.getExplorer(function_noArgs, 'function_noArgs')
+
+        self.assertEqual(len(xplorer.signature), 0)
+
+    def test_signature_simple(self):
+        """Testing simple function signature.
+        """
+
+        xplorer = self.pool.getExplorer(function_simple, 'function_simple')
+
+        expected_signature = ('a','b','c')
+
+        self.assertEqual(xplorer.signature.name, expected_signature)
+
+    def test_signature_variable(self):
+        """Testing variable-argument function signature.
+        """
+
+        xplorer = self.pool.getExplorer(function_variable,
+                                        'function_variable')
+
+        expected_names = ('a','kw')
+        signature = xplorer.signature
+
+        self.assertEqual(signature.name, expected_names)
+        self.failUnless(signature.is_varlist(0))
+        self.failUnless(signature.is_keyword(1))
+
+    def test_signature_crazy(self):
+        """Testing function with crazy signature.
+        """
+        xplorer = self.pool.getExplorer(function_crazy, 'function_crazy')
+
+        signature = xplorer.signature
+
+        expected_signature = [{'name': 'c'},
+                              {'name': 'd',
+                               'default': range(4)},
+                              {'name': 'kw',
+                               'keywords': 1}]
+
+        # The name of the first argument seems to be indecipherable,
+        # but make sure it has one (and no default).
+        self.failUnless(signature.get_name(0))
+        self.failUnless(not signature.get_default(0)[0])
+
+        self.assertEqual(signature.get_name(1), 'c')
+
+        # Get a list of values from a list of ExplorerImmutables.
+        arg_2_default = map(lambda l: l.value,
+                            signature.get_default(2)[1].get_elements())
+
+        self.assertEqual(signature.get_name(2), 'd')
+        self.assertEqual(arg_2_default, range(4))
+
+        self.assertEqual(signature.get_name(3), 'kw')
+        self.failUnless(signature.is_keyword(3))
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/ThirdParty/Twisted/twisted/test/test_factories.py b/ThirdParty/Twisted/twisted/test/test_factories.py
new file mode 100644
index 0000000..dffaf5a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_factories.py
@@ -0,0 +1,145 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test code for basic Factory classes.
+"""
+
+from __future__ import division, absolute_import
+
+import pickle
+
+from twisted.trial.unittest import TestCase
+
+from twisted.internet.task import Clock
+from twisted.internet.protocol import ReconnectingClientFactory, Protocol
+
+
+class FakeConnector(object):
+    """
+    A fake connector class, to be used to mock connections failed or lost.
+    """
+
+    def stopConnecting(self):
+        pass
+
+
+    def connect(self):
+        pass
+
+
+
+class ReconnectingFactoryTestCase(TestCase):
+    """
+    Tests for L{ReconnectingClientFactory}.
+    """
+
+    def test_stopTryingWhenConnected(self):
+        """
+        If a L{ReconnectingClientFactory} has C{stopTrying} called while it is
+        connected, it does not subsequently attempt to reconnect if the
+        connection is later lost.
+        """
+        class NoConnectConnector(object):
+            def stopConnecting(self):
+                raise RuntimeError("Shouldn't be called, we're connected.")
+            def connect(self):
+                raise RuntimeError("Shouldn't be reconnecting.")
+
+        c = ReconnectingClientFactory()
+        c.protocol = Protocol
+        # Let's pretend we've connected:
+        c.buildProtocol(None)
+        # Now we stop trying, then disconnect:
+        c.stopTrying()
+        c.clientConnectionLost(NoConnectConnector(), None)
+        self.assertFalse(c.continueTrying)
+
+
+    def test_stopTryingDoesNotReconnect(self):
+        """
+        Calling stopTrying on a L{ReconnectingClientFactory} doesn't attempt a
+        retry on any active connector.
+        """
+        class FactoryAwareFakeConnector(FakeConnector):
+            attemptedRetry = False
+
+            def stopConnecting(self):
+                """
+                Behave as though an ongoing connection attempt has now
+                failed, and notify the factory of this.
+                """
+                f.clientConnectionFailed(self, None)
+
+            def connect(self):
+                """
+                Record an attempt to reconnect, since this is what we
+                are trying to avoid.
+                """
+                self.attemptedRetry = True
+
+        f = ReconnectingClientFactory()
+        f.clock = Clock()
+
+        # simulate an active connection - stopConnecting on this connector should
+        # be triggered when we call stopTrying
+        f.connector = FactoryAwareFakeConnector()
+        f.stopTrying()
+
+        # make sure we never attempted to retry
+        self.assertFalse(f.connector.attemptedRetry)
+        self.assertFalse(f.clock.getDelayedCalls())
+
+
+    def test_serializeUnused(self):
+        """
+        A L{ReconnectingClientFactory} which hasn't been used for anything
+        can be pickled and unpickled and end up with the same state.
+        """
+        original = ReconnectingClientFactory()
+        reconstituted = pickle.loads(pickle.dumps(original))
+        self.assertEqual(original.__dict__, reconstituted.__dict__)
+
+
+    def test_serializeWithClock(self):
+        """
+        The clock attribute of L{ReconnectingClientFactory} is not serialized,
+        and the restored value sets it to the default value, the reactor.
+        """
+        clock = Clock()
+        original = ReconnectingClientFactory()
+        original.clock = clock
+        reconstituted = pickle.loads(pickle.dumps(original))
+        self.assertIdentical(reconstituted.clock, None)
+
+
+    def test_deserializationResetsParameters(self):
+        """
+        A L{ReconnectingClientFactory} which is unpickled does not have an
+        L{IConnector} and has its reconnecting timing parameters reset to their
+        initial values.
+        """
+        factory = ReconnectingClientFactory()
+        factory.clientConnectionFailed(FakeConnector(), None)
+        self.addCleanup(factory.stopTrying)
+
+        serialized = pickle.dumps(factory)
+        unserialized = pickle.loads(serialized)
+        self.assertEqual(unserialized.connector, None)
+        self.assertEqual(unserialized._callID, None)
+        self.assertEqual(unserialized.retries, 0)
+        self.assertEqual(unserialized.delay, factory.initialDelay)
+        self.assertEqual(unserialized.continueTrying, True)
+
+
+    def test_parametrizedClock(self):
+        """
+        The clock used by L{ReconnectingClientFactory} can be parametrized, so
+        that one can cleanly test reconnections.
+        """
+        clock = Clock()
+        factory = ReconnectingClientFactory()
+        factory.clock = clock
+
+        factory.clientConnectionLost(FakeConnector(), None)
+        self.assertEqual(len(clock.calls), 1)
diff --git a/ThirdParty/Twisted/twisted/test/test_failure.py b/ThirdParty/Twisted/twisted/test/test_failure.py
new file mode 100644
index 0000000..368ebc6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_failure.py
@@ -0,0 +1,781 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test cases for failure module.
+"""
+from __future__ import division, absolute_import
+
+import re
+import sys
+import traceback
+import pdb
+
+from twisted.python.compat import NativeStringIO, _PY3
+from twisted.internet import defer
+
+from twisted.trial.unittest import SynchronousTestCase
+
+from twisted.python import failure
+
+try:
+    from twisted.test import raiser
+except ImportError:
+    raiser = None
+
+
+def getDivisionFailure(*args, **kwargs):
+    """
+    Make a C{Failure} of a divide-by-zero error.
+
+    @param args: Any C{*args} are passed to Failure's constructor.
+    @param kwargs: Any C{**kwargs} are passed to Failure's constructor.
+    """
+    try:
+        1/0
+    except:
+        f = failure.Failure(*args, **kwargs)
+    return f
+
+
+class FailureTestCase(SynchronousTestCase):
+
+    def testFailAndTrap(self):
+        """Trapping a failure."""
+        try:
+            raise NotImplementedError('test')
+        except:
+            f = failure.Failure()
+        error = f.trap(SystemExit, RuntimeError)
+        self.assertEqual(error, RuntimeError)
+        self.assertEqual(f.type, NotImplementedError)
+
+
+    def test_notTrapped(self):
+        """Making sure trap doesn't trap what it shouldn't."""
+        exception = ValueError()
+        try:
+            raise exception
+        except:
+            f = failure.Failure()
+
+        # On Python 2, the same failure is reraised:
+        if not _PY3:
+            untrapped = self.assertRaises(failure.Failure, f.trap, OverflowError)
+            self.assertIdentical(f, untrapped)
+
+        # On both Python 2 and Python 3, the underlying exception is passed
+        # on:
+        try:
+            f.trap(OverflowError)
+        except:
+            untrapped = failure.Failure()
+            self.assertIdentical(untrapped.value, exception)
+        else:
+            self.fail("Exception was not re-raised.")
+
+
+    def assertStartsWith(self, s, prefix):
+        """
+        Assert that s starts with a particular prefix.
+        """
+        self.assertTrue(s.startswith(prefix),
+                        '%r is not the start of %r' % (prefix, s))
+
+
+    def test_printingSmokeTest(self):
+        """
+        None of the print* methods fail when called.
+        """
+        f = getDivisionFailure()
+        out = NativeStringIO()
+        f.printDetailedTraceback(out)
+        self.assertStartsWith(out.getvalue(), '*--- Failure')
+        out = NativeStringIO()
+        f.printBriefTraceback(out)
+        self.assertStartsWith(out.getvalue(), 'Traceback')
+        out = NativeStringIO()
+        f.printTraceback(out)
+        self.assertStartsWith(out.getvalue(), 'Traceback')
+
+
+    def test_printingCapturedVarsSmokeTest(self):
+        """
+        None of the print* methods fail when called on a L{Failure} constructed
+        with C{captureVars=True}.
+
+        Local variables on the stack can be seen in the detailed traceback.
+        """
+        exampleLocalVar = 'xyzzy'
+        f = getDivisionFailure(captureVars=True)
+        out = NativeStringIO()
+        f.printDetailedTraceback(out)
+        self.assertStartsWith(out.getvalue(), '*--- Failure')
+        self.assertNotEqual(None, re.search('exampleLocalVar.*xyzzy',
+                                            out.getvalue()))
+        out = NativeStringIO()
+        f.printBriefTraceback(out)
+        self.assertStartsWith(out.getvalue(), 'Traceback')
+        out = NativeStringIO()
+        f.printTraceback(out)
+        self.assertStartsWith(out.getvalue(), 'Traceback')
+
+
+    def test_printingCapturedVarsCleanedSmokeTest(self):
+        """
+        C{printDetailedTraceback} includes information about local variables on
+        the stack after C{cleanFailure} has been called.
+        """
+        exampleLocalVar = 'xyzzy'
+        f = getDivisionFailure(captureVars=True)
+        f.cleanFailure()
+        out = NativeStringIO()
+        f.printDetailedTraceback(out)
+        self.assertNotEqual(None, re.search('exampleLocalVar.*xyzzy',
+                                            out.getvalue()))
+
+
+    def test_printingNoVars(self):
+        """
+        Calling C{Failure()} with no arguments does not capture any locals or
+        globals, so L{printDetailedTraceback} cannot show them in its output.
+        """
+        out = NativeStringIO()
+        f = getDivisionFailure()
+        f.printDetailedTraceback(out)
+        # There should be no variables in the detailed output.  Variables are
+        # printed on lines with 2 leading spaces.
+        linesWithVars = [line for line in out.getvalue().splitlines()
+                         if line.startswith('  ')]
+        self.assertEqual([], linesWithVars)
+        self.assertIn(
+            'Capture of Locals and Globals disabled', out.getvalue())
+
+
+    def test_printingCaptureVars(self):
+        """
+        Calling C{Failure(captureVars=True)} captures the locals and globals
+        for its stack frames, so L{printDetailedTraceback} will show them in
+        its output.
+        """
+        out = NativeStringIO()
+        f = getDivisionFailure(captureVars=True)
+        f.printDetailedTraceback(out)
+        # Variables are printed on lines with 2 leading spaces.
+        linesWithVars = [line for line in out.getvalue().splitlines()
+                         if line.startswith('  ')]
+        self.assertNotEqual([], linesWithVars)
+
+
+    def testExplictPass(self):
+        e = RuntimeError()
+        f = failure.Failure(e)
+        f.trap(RuntimeError)
+        self.assertEqual(f.value, e)
+
+
+    def _getInnermostFrameLine(self, f):
+        try:
+            f.raiseException()
+        except ZeroDivisionError:
+            tb = traceback.extract_tb(sys.exc_info()[2])
+            return tb[-1][-1]
+        else:
+            raise Exception(
+                "f.raiseException() didn't raise ZeroDivisionError!?")
+
+
+    def testRaiseExceptionWithTB(self):
+        f = getDivisionFailure()
+        innerline = self._getInnermostFrameLine(f)
+        self.assertEqual(innerline, '1/0')
+
+
+    def testLackOfTB(self):
+        f = getDivisionFailure()
+        f.cleanFailure()
+        innerline = self._getInnermostFrameLine(f)
+        self.assertEqual(innerline, '1/0')
+
+    testLackOfTB.todo = "the traceback is not preserved, exarkun said he'll try to fix this! god knows how"
+    if _PY3:
+        del testLackOfTB # fix in ticket #6008
+
+
+    def test_stringExceptionConstruction(self):
+        """
+        Constructing a C{Failure} with a string as its exception value raises
+        a C{TypeError}, as this is no longer supported as of Python 2.6.
+        """
+        exc = self.assertRaises(TypeError, failure.Failure, "ono!")
+        self.assertIn("Strings are not supported by Failure", str(exc))
+
+
+    def testConstructionFails(self):
+        """
+        Creating a Failure with no arguments causes it to try to discover the
+        current interpreter exception state.  If no such state exists, creating
+        the Failure should raise a synchronous exception.
+        """
+        self.assertRaises(failure.NoCurrentExceptionError, failure.Failure)
+
+
+    def test_getTracebackObject(self):
+        """
+        If the C{Failure} has not been cleaned, then C{getTracebackObject}
+        returns the traceback object that captured in its constructor.
+        """
+        f = getDivisionFailure()
+        self.assertEqual(f.getTracebackObject(), f.tb)
+
+
+    def test_getTracebackObjectFromCaptureVars(self):
+        """
+        C{captureVars=True} has no effect on the result of
+        C{getTracebackObject}.
+        """
+        try:
+            1/0
+        except ZeroDivisionError:
+            noVarsFailure = failure.Failure()
+            varsFailure = failure.Failure(captureVars=True)
+        self.assertEqual(noVarsFailure.getTracebackObject(), varsFailure.tb)
+
+
+    def test_getTracebackObjectFromClean(self):
+        """
+        If the Failure has been cleaned, then C{getTracebackObject} returns an
+        object that looks the same to L{traceback.extract_tb}.
+        """
+        f = getDivisionFailure()
+        expected = traceback.extract_tb(f.getTracebackObject())
+        f.cleanFailure()
+        observed = traceback.extract_tb(f.getTracebackObject())
+        self.assertNotEqual(None, expected)
+        self.assertEqual(expected, observed)
+
+
+    def test_getTracebackObjectFromCaptureVarsAndClean(self):
+        """
+        If the Failure was created with captureVars, then C{getTracebackObject}
+        returns an object that looks the same to L{traceback.extract_tb}.
+        """
+        f = getDivisionFailure(captureVars=True)
+        expected = traceback.extract_tb(f.getTracebackObject())
+        f.cleanFailure()
+        observed = traceback.extract_tb(f.getTracebackObject())
+        self.assertEqual(expected, observed)
+
+
+    def test_getTracebackObjectWithoutTraceback(self):
+        """
+        L{failure.Failure}s need not be constructed with traceback objects. If
+        a C{Failure} has no traceback information at all, C{getTracebackObject}
+        just returns None.
+
+        None is a good value, because traceback.extract_tb(None) -> [].
+        """
+        f = failure.Failure(Exception("some error"))
+        self.assertEqual(f.getTracebackObject(), None)
+
+
+    def test_tracebackFromExceptionInPython3(self):
+        """
+        If a L{failure.Failure} is constructed with an exception but no
+        traceback in Python 3, the traceback will be extracted from the
+        exception's C{__traceback__} attribute.
+        """
+        try:
+            1/0
+        except:
+            klass, exception, tb = sys.exc_info()
+        f = failure.Failure(exception)
+        self.assertIdentical(f.tb, tb)
+
+
+    def test_cleanFailureRemovesTracebackInPython3(self):
+        """
+        L{failure.Failure.cleanFailure} sets the C{__traceback__} attribute of
+        the exception to C{None} in Python 3.
+        """
+        f = getDivisionFailure()
+        self.assertNotEqual(f.tb, None)
+        self.assertIdentical(f.value.__traceback__, f.tb)
+        f.cleanFailure()
+        self.assertIdentical(f.value.__traceback__, None)
+
+    if not _PY3:
+        test_tracebackFromExceptionInPython3.skip = "Python 3 only."
+        test_cleanFailureRemovesTracebackInPython3.skip = "Python 3 only."
+
+
+
+class BrokenStr(Exception):
+    """
+    An exception class the instances of which cannot be presented as strings via
+    C{str}.
+    """
+    def __str__(self):
+        # Could raise something else, but there's no point as yet.
+        raise self
+
+
+
+class BrokenExceptionMetaclass(type):
+    """
+    A metaclass for an exception type which cannot be presented as a string via
+    C{str}.
+    """
+    def __str__(self):
+        raise ValueError("You cannot make a string out of me.")
+
+
+
+class BrokenExceptionType(Exception, object):
+    """
+    The aforementioned exception type which cnanot be presented as a string via
+    C{str}.
+    """
+    __metaclass__ = BrokenExceptionMetaclass
+
+
+
+class GetTracebackTests(SynchronousTestCase):
+    """
+    Tests for L{Failure.getTraceback}.
+    """
+    def _brokenValueTest(self, detail):
+        """
+        Construct a L{Failure} with an exception that raises an exception from
+        its C{__str__} method and then call C{getTraceback} with the specified
+        detail and verify that it returns a string.
+        """
+        x = BrokenStr()
+        f = failure.Failure(x)
+        traceback = f.getTraceback(detail=detail)
+        self.assertIsInstance(traceback, str)
+
+
+    def test_brokenValueBriefDetail(self):
+        """
+        A L{Failure} might wrap an exception with a C{__str__} method which
+        raises an exception.  In this case, calling C{getTraceback} on the
+        failure with the C{"brief"} detail does not raise an exception.
+        """
+        self._brokenValueTest("brief")
+
+
+    def test_brokenValueDefaultDetail(self):
+        """
+        Like test_brokenValueBriefDetail, but for the C{"default"} detail case.
+        """
+        self._brokenValueTest("default")
+
+
+    def test_brokenValueVerboseDetail(self):
+        """
+        Like test_brokenValueBriefDetail, but for the C{"default"} detail case.
+        """
+        self._brokenValueTest("verbose")
+
+
+    def _brokenTypeTest(self, detail):
+        """
+        Construct a L{Failure} with an exception type that raises an exception
+        from its C{__str__} method and then call C{getTraceback} with the
+        specified detail and verify that it returns a string.
+        """
+        f = failure.Failure(BrokenExceptionType())
+        traceback = f.getTraceback(detail=detail)
+        self.assertIsInstance(traceback, str)
+
+
+    def test_brokenTypeBriefDetail(self):
+        """
+        A L{Failure} might wrap an exception the type object of which has a
+        C{__str__} method which raises an exception.  In this case, calling
+        C{getTraceback} on the failure with the C{"brief"} detail does not raise
+        an exception.
+        """
+        self._brokenTypeTest("brief")
+
+
+    def test_brokenTypeDefaultDetail(self):
+        """
+        Like test_brokenTypeBriefDetail, but for the C{"default"} detail case.
+        """
+        self._brokenTypeTest("default")
+
+
+    def test_brokenTypeVerboseDetail(self):
+        """
+        Like test_brokenTypeBriefDetail, but for the C{"verbose"} detail case.
+        """
+        self._brokenTypeTest("verbose")
+
+
+
+class FindFailureTests(SynchronousTestCase):
+    """
+    Tests for functionality related to L{Failure._findFailure}.
+    """
+
+    def test_findNoFailureInExceptionHandler(self):
+        """
+        Within an exception handler, _findFailure should return
+        C{None} in case no Failure is associated with the current
+        exception.
+        """
+        try:
+            1/0
+        except:
+            self.assertEqual(failure.Failure._findFailure(), None)
+        else:
+            self.fail("No exception raised from 1/0!?")
+
+
+    def test_findNoFailure(self):
+        """
+        Outside of an exception handler, _findFailure should return None.
+        """
+        self.assertEqual(sys.exc_info()[-1], None) #environment sanity check
+        self.assertEqual(failure.Failure._findFailure(), None)
+
+
+    def test_findFailure(self):
+        """
+        Within an exception handler, it should be possible to find the
+        original Failure that caused the current exception (if it was
+        caused by raiseException).
+        """
+        f = getDivisionFailure()
+        f.cleanFailure()
+        try:
+            f.raiseException()
+        except:
+            self.assertEqual(failure.Failure._findFailure(), f)
+        else:
+            self.fail("No exception raised from raiseException!?")
+
+
+    def test_failureConstructionFindsOriginalFailure(self):
+        """
+        When a Failure is constructed in the context of an exception
+        handler that is handling an exception raised by
+        raiseException, the new Failure should be chained to that
+        original Failure.
+        """
+        f = getDivisionFailure()
+        f.cleanFailure()
+        try:
+            f.raiseException()
+        except:
+            newF = failure.Failure()
+            self.assertEqual(f.getTraceback(), newF.getTraceback())
+        else:
+            self.fail("No exception raised from raiseException!?")
+
+
+    def test_failureConstructionWithMungedStackSucceeds(self):
+        """
+        Pyrex and Cython are known to insert fake stack frames so as to give
+        more Python-like tracebacks. These stack frames with empty code objects
+        should not break extraction of the exception.
+        """
+        try:
+            raiser.raiseException()
+        except raiser.RaiserException:
+            f = failure.Failure()
+            self.assertTrue(f.check(raiser.RaiserException))
+        else:
+            self.fail("No exception raised from extension?!")
+
+
+    if raiser is None:
+        skipMsg = "raiser extension not available"
+        test_failureConstructionWithMungedStackSucceeds.skip = skipMsg
+
+
+
+class TestFormattableTraceback(SynchronousTestCase):
+    """
+    Whitebox tests that show that L{failure._Traceback} constructs objects that
+    can be used by L{traceback.extract_tb}.
+
+    If the objects can be used by L{traceback.extract_tb}, then they can be
+    formatted using L{traceback.format_tb} and friends.
+    """
+
+    def test_singleFrame(self):
+        """
+        A C{_Traceback} object constructed with a single frame should be able
+        to be passed to L{traceback.extract_tb}, and we should get a singleton
+        list containing a (filename, lineno, methodname, line) tuple.
+        """
+        tb = failure._Traceback([['method', 'filename.py', 123, {}, {}]])
+        # Note that we don't need to test that extract_tb correctly extracts
+        # the line's contents. In this case, since filename.py doesn't exist,
+        # it will just use None.
+        self.assertEqual(traceback.extract_tb(tb),
+                         [('filename.py', 123, 'method', None)])
+
+
+    def test_manyFrames(self):
+        """
+        A C{_Traceback} object constructed with multiple frames should be able
+        to be passed to L{traceback.extract_tb}, and we should get a list
+        containing a tuple for each frame.
+        """
+        tb = failure._Traceback([
+            ['method1', 'filename.py', 123, {}, {}],
+            ['method2', 'filename.py', 235, {}, {}]])
+        self.assertEqual(traceback.extract_tb(tb),
+                         [('filename.py', 123, 'method1', None),
+                          ('filename.py', 235, 'method2', None)])
+
+
+
+class TestFrameAttributes(SynchronousTestCase):
+    """
+    _Frame objects should possess some basic attributes that qualify them as
+    fake python Frame objects.
+    """
+
+    def test_fakeFrameAttributes(self):
+        """
+        L{_Frame} instances have the C{f_globals} and C{f_locals} attributes
+        bound to C{dict} instance.  They also have the C{f_code} attribute
+        bound to something like a code object.
+        """
+        frame = failure._Frame("dummyname", "dummyfilename")
+        self.assertIsInstance(frame.f_globals, dict)
+        self.assertIsInstance(frame.f_locals, dict)
+        self.assertIsInstance(frame.f_code, failure._Code)
+
+
+
+class TestDebugMode(SynchronousTestCase):
+    """
+    Failure's debug mode should allow jumping into the debugger.
+    """
+
+    def setUp(self):
+        """
+        Override pdb.post_mortem so we can make sure it's called.
+        """
+        # Make sure any changes we make are reversed:
+        post_mortem = pdb.post_mortem
+        if _PY3:
+            origInit = failure.Failure.__init__
+        else:
+            origInit = failure.Failure.__dict__['__init__']
+        def restore():
+            pdb.post_mortem = post_mortem
+            if _PY3:
+                failure.Failure.__init__ = origInit
+            else:
+                failure.Failure.__dict__['__init__'] = origInit
+        self.addCleanup(restore)
+
+        self.result = []
+        pdb.post_mortem = self.result.append
+        failure.startDebugMode()
+
+
+    def test_regularFailure(self):
+        """
+        If startDebugMode() is called, calling Failure() will first call
+        pdb.post_mortem with the traceback.
+        """
+        try:
+            1/0
+        except:
+            typ, exc, tb = sys.exc_info()
+            f = failure.Failure()
+        self.assertEqual(self.result, [tb])
+        self.assertEqual(f.captureVars, False)
+
+
+    def test_captureVars(self):
+        """
+        If startDebugMode() is called, passing captureVars to Failure() will
+        not blow up.
+        """
+        try:
+            1/0
+        except:
+            typ, exc, tb = sys.exc_info()
+            f = failure.Failure(captureVars=True)
+        self.assertEqual(self.result, [tb])
+        self.assertEqual(f.captureVars, True)
+
+
+
+class ExtendedGeneratorTests(SynchronousTestCase):
+    """
+    Tests C{failure.Failure} support for generator features added in Python 2.5
+    """
+
+    def test_inlineCallbacksTracebacks(self):
+        """
+        inlineCallbacks that re-raise tracebacks into their deferred
+        should not lose their tracebacks.
+        """
+        f = getDivisionFailure()
+        d = defer.Deferred()
+        try:
+            f.raiseException()
+        except:
+            d.errback()
+
+        failures = []
+        def collect_error(result):
+            failures.append(result)
+
+        def ic(d):
+            yield d
+        ic = defer.inlineCallbacks(ic)
+        ic(d).addErrback(collect_error)
+
+        newFailure, = failures
+        self.assertEqual(
+            traceback.extract_tb(newFailure.getTracebackObject())[-1][-1],
+            "1/0"
+        )
+
+
+    def _throwIntoGenerator(self, f, g):
+        try:
+            f.throwExceptionIntoGenerator(g)
+        except StopIteration:
+            pass
+        else:
+            self.fail("throwExceptionIntoGenerator should have raised "
+                      "StopIteration")
+
+    def test_throwExceptionIntoGenerator(self):
+        """
+        It should be possible to throw the exception that a Failure
+        represents into a generator.
+        """
+        stuff = []
+        def generator():
+            try:
+                yield
+            except:
+                stuff.append(sys.exc_info())
+            else:
+                self.fail("Yield should have yielded exception.")
+        g = generator()
+        f = getDivisionFailure()
+        next(g)
+        self._throwIntoGenerator(f, g)
+
+        self.assertEqual(stuff[0][0], ZeroDivisionError)
+        self.assertTrue(isinstance(stuff[0][1], ZeroDivisionError))
+
+        self.assertEqual(traceback.extract_tb(stuff[0][2])[-1][-1], "1/0")
+
+
+    def test_findFailureInGenerator(self):
+        """
+        Within an exception handler, it should be possible to find the
+        original Failure that caused the current exception (if it was
+        caused by throwExceptionIntoGenerator).
+        """
+        f = getDivisionFailure()
+        f.cleanFailure()
+
+        foundFailures = []
+        def generator():
+            try:
+                yield
+            except:
+                foundFailures.append(failure.Failure._findFailure())
+            else:
+                self.fail("No exception sent to generator")
+
+        g = generator()
+        next(g)
+        self._throwIntoGenerator(f, g)
+
+        self.assertEqual(foundFailures, [f])
+
+
+    def test_failureConstructionFindsOriginalFailure(self):
+        """
+        When a Failure is constructed in the context of an exception
+        handler that is handling an exception raised by
+        throwExceptionIntoGenerator, the new Failure should be chained to that
+        original Failure.
+        """
+        f = getDivisionFailure()
+        f.cleanFailure()
+
+        newFailures = []
+
+        def generator():
+            try:
+                yield
+            except:
+                newFailures.append(failure.Failure())
+            else:
+                self.fail("No exception sent to generator")
+        g = generator()
+        next(g)
+        self._throwIntoGenerator(f, g)
+
+        self.assertEqual(len(newFailures), 1)
+        self.assertEqual(newFailures[0].getTraceback(), f.getTraceback())
+
+    if _PY3:
+        test_inlineCallbacksTracebacks.todo = (
+            "Python 3 support to be fixed in #5949")
+        test_findFailureInGenerator.todo = (
+            "Python 3 support to be fixed in #5949")
+        test_failureConstructionFindsOriginalFailure.todo = (
+            "Python 3 support to be fixed in #5949")
+        # Remove these three lines in #6008:
+        del test_findFailureInGenerator
+        del test_failureConstructionFindsOriginalFailure
+        del test_inlineCallbacksTracebacks
+
+
+    def test_ambiguousFailureInGenerator(self):
+        """
+        When a generator reraises a different exception,
+        L{Failure._findFailure} inside the generator should find the reraised
+        exception rather than original one.
+        """
+        def generator():
+            try:
+                try:
+                    yield
+                except:
+                    [][1]
+            except:
+                self.assertIsInstance(failure.Failure().value, IndexError)
+        g = generator()
+        next(g)
+        f = getDivisionFailure()
+        self._throwIntoGenerator(f, g)
+
+
+    def test_ambiguousFailureFromGenerator(self):
+        """
+        When a generator reraises a different exception,
+        L{Failure._findFailure} above the generator should find the reraised
+        exception rather than original one.
+        """
+        def generator():
+            try:
+                yield
+            except:
+                [][1]
+        g = generator()
+        next(g)
+        f = getDivisionFailure()
+        try:
+            self._throwIntoGenerator(f, g)
+        except:
+            self.assertIsInstance(failure.Failure().value, IndexError)
diff --git a/ThirdParty/Twisted/twisted/test/test_fdesc.py b/ThirdParty/Twisted/twisted/test/test_fdesc.py
new file mode 100644
index 0000000..0198e8d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_fdesc.py
@@ -0,0 +1,266 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet.fdesc}.
+"""
+
+import os, sys
+import errno
+
+try:
+    import fcntl
+except ImportError:
+    skip = "not supported on this platform"
+else:
+    from twisted.internet import fdesc
+
+from twisted.python._utilpy3 import untilConcludes
+from twisted.trial import unittest
+
+
+
+class NonBlockingTestCase(unittest.SynchronousTestCase):
+    """
+    Tests for L{fdesc.setNonBlocking} and L{fdesc.setBlocking}.
+    """
+
+    def test_setNonBlocking(self):
+        """
+        L{fdesc.setNonBlocking} sets a file description to non-blocking.
+        """
+        r, w = os.pipe()
+        self.addCleanup(os.close, r)
+        self.addCleanup(os.close, w)
+        self.assertFalse(fcntl.fcntl(r, fcntl.F_GETFL) & os.O_NONBLOCK)
+        fdesc.setNonBlocking(r)
+        self.assertTrue(fcntl.fcntl(r, fcntl.F_GETFL) & os.O_NONBLOCK)
+
+
+    def test_setBlocking(self):
+        """
+        L{fdesc.setBlocking} sets a file description to blocking.
+        """
+        r, w = os.pipe()
+        self.addCleanup(os.close, r)
+        self.addCleanup(os.close, w)
+        fdesc.setNonBlocking(r)
+        fdesc.setBlocking(r)
+        self.assertFalse(fcntl.fcntl(r, fcntl.F_GETFL) & os.O_NONBLOCK)
+
+
+
+class ReadWriteTestCase(unittest.SynchronousTestCase):
+    """
+    Tests for L{fdesc.readFromFD}, L{fdesc.writeToFD}.
+    """
+
+    def setUp(self):
+        """
+        Create a non-blocking pipe that can be used in tests.
+        """
+        self.r, self.w = os.pipe()
+        fdesc.setNonBlocking(self.r)
+        fdesc.setNonBlocking(self.w)
+
+
+    def tearDown(self):
+        """
+        Close pipes.
+        """
+        try:
+            os.close(self.w)
+        except OSError:
+            pass
+        try:
+            os.close(self.r)
+        except OSError:
+            pass
+
+
+    def write(self, d):
+        """
+        Write data to the pipe.
+        """
+        return fdesc.writeToFD(self.w, d)
+
+
+    def read(self):
+        """
+        Read data from the pipe.
+        """
+        l = []
+        res = fdesc.readFromFD(self.r, l.append)
+        if res is None:
+            if l:
+                return l[0]
+            else:
+                return b""
+        else:
+            return res
+
+
+    def test_writeAndRead(self):
+        """
+        Test that the number of bytes L{fdesc.writeToFD} reports as written
+        with its return value are seen by L{fdesc.readFromFD}.
+        """
+        n = self.write(b"hello")
+        self.failUnless(n > 0)
+        s = self.read()
+        self.assertEqual(len(s), n)
+        self.assertEqual(b"hello"[:n], s)
+
+
+    def test_writeAndReadLarge(self):
+        """
+        Similar to L{test_writeAndRead}, but use a much larger string to verify
+        the behavior for that case.
+        """
+        orig = b"0123456879" * 10000
+        written = self.write(orig)
+        self.failUnless(written > 0)
+        result = []
+        resultlength = 0
+        i = 0
+        while resultlength < written or i < 50:
+            result.append(self.read())
+            resultlength += len(result[-1])
+            # Increment a counter to be sure we'll exit at some point
+            i += 1
+        result = b"".join(result)
+        self.assertEqual(len(result), written)
+        self.assertEqual(orig[:written], result)
+
+
+    def test_readFromEmpty(self):
+        """
+        Verify that reading from a file descriptor with no data does not raise
+        an exception and does not result in the callback function being called.
+        """
+        l = []
+        result = fdesc.readFromFD(self.r, l.append)
+        self.assertEqual(l, [])
+        self.assertEqual(result, None)
+
+
+    def test_readFromCleanClose(self):
+        """
+        Test that using L{fdesc.readFromFD} on a cleanly closed file descriptor
+        returns a connection done indicator.
+        """
+        os.close(self.w)
+        self.assertEqual(self.read(), fdesc.CONNECTION_DONE)
+
+
+    def test_writeToClosed(self):
+        """
+        Verify that writing with L{fdesc.writeToFD} when the read end is closed
+        results in a connection lost indicator.
+        """
+        os.close(self.r)
+        self.assertEqual(self.write(b"s"), fdesc.CONNECTION_LOST)
+
+
+    def test_readFromInvalid(self):
+        """
+        Verify that reading with L{fdesc.readFromFD} when the read end is
+        closed results in a connection lost indicator.
+        """
+        os.close(self.r)
+        self.assertEqual(self.read(), fdesc.CONNECTION_LOST)
+
+
+    def test_writeToInvalid(self):
+        """
+        Verify that writing with L{fdesc.writeToFD} when the write end is
+        closed results in a connection lost indicator.
+        """
+        os.close(self.w)
+        self.assertEqual(self.write(b"s"), fdesc.CONNECTION_LOST)
+
+
+    def test_writeErrors(self):
+        """
+        Test error path for L{fdesc.writeTod}.
+        """
+        oldOsWrite = os.write
+        def eagainWrite(fd, data):
+            err = OSError()
+            err.errno = errno.EAGAIN
+            raise err
+        os.write = eagainWrite
+        try:
+            self.assertEqual(self.write(b"s"), 0)
+        finally:
+            os.write = oldOsWrite
+
+        def eintrWrite(fd, data):
+            err = OSError()
+            err.errno = errno.EINTR
+            raise err
+        os.write = eintrWrite
+        try:
+            self.assertEqual(self.write(b"s"), 0)
+        finally:
+            os.write = oldOsWrite
+
+
+
+class CloseOnExecTests(unittest.SynchronousTestCase):
+    """
+    Tests for L{fdesc._setCloseOnExec} and L{fdesc._unsetCloseOnExec}.
+    """
+    program = '''
+import os, errno
+try:
+    os.write(%d, b'lul')
+except OSError as e:
+    if e.errno == errno.EBADF:
+        os._exit(0)
+    os._exit(5)
+except:
+    os._exit(10)
+else:
+    os._exit(20)
+'''
+
+    def _execWithFileDescriptor(self, fObj):
+        pid = os.fork()
+        if pid == 0:
+            try:
+                os.execv(sys.executable, [sys.executable, '-c', self.program % (fObj.fileno(),)])
+            except:
+                import traceback
+                traceback.print_exc()
+                os._exit(30)
+        else:
+            # On Linux wait(2) doesn't seem ever able to fail with EINTR but
+            # POSIX seems to allow it and on OS X it happens quite a lot.
+            return untilConcludes(os.waitpid, pid, 0)[1]
+
+
+    def test_setCloseOnExec(self):
+        """
+        A file descriptor passed to L{fdesc._setCloseOnExec} is not inherited
+        by a new process image created with one of the exec family of
+        functions.
+        """
+        with open(self.mktemp(), 'wb') as fObj:
+            fdesc._setCloseOnExec(fObj.fileno())
+            status = self._execWithFileDescriptor(fObj)
+            self.assertTrue(os.WIFEXITED(status))
+            self.assertEqual(os.WEXITSTATUS(status), 0)
+
+
+    def test_unsetCloseOnExec(self):
+        """
+        A file descriptor passed to L{fdesc._unsetCloseOnExec} is inherited by
+        a new process image created with one of the exec family of functions.
+        """
+        with open(self.mktemp(), 'wb') as fObj:
+            fdesc._setCloseOnExec(fObj.fileno())
+            fdesc._unsetCloseOnExec(fObj.fileno())
+            status = self._execWithFileDescriptor(fObj)
+            self.assertTrue(os.WIFEXITED(status))
+            self.assertEqual(os.WEXITSTATUS(status), 20)
diff --git a/ThirdParty/Twisted/twisted/test/test_finger.py b/ThirdParty/Twisted/twisted/test/test_finger.py
new file mode 100644
index 0000000..c0c2e09
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_finger.py
@@ -0,0 +1,67 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.protocols.finger}.
+"""
+
+from twisted.trial import unittest
+from twisted.protocols import finger
+from twisted.test.proto_helpers import StringTransport
+
+
+class FingerTestCase(unittest.TestCase):
+    """
+    Tests for L{finger.Finger}.
+    """
+    def setUp(self):
+        """
+        Create and connect a L{finger.Finger} instance.
+        """
+        self.transport = StringTransport()
+        self.protocol = finger.Finger()
+        self.protocol.makeConnection(self.transport)
+
+
+    def test_simple(self):
+        """
+        When L{finger.Finger} receives a CR LF terminated line, it responds
+        with the default user status message - that no such user exists.
+        """
+        self.protocol.dataReceived("moshez\r\n")
+        self.assertEqual(
+            self.transport.value(),
+            "Login: moshez\nNo such user\n")
+
+
+    def test_simpleW(self):
+        """
+        The behavior for a query which begins with C{"/w"} is the same as the
+        behavior for one which does not.  The user is reported as not existing.
+        """
+        self.protocol.dataReceived("/w moshez\r\n")
+        self.assertEqual(
+            self.transport.value(),
+            "Login: moshez\nNo such user\n")
+
+
+    def test_forwarding(self):
+        """
+        When L{finger.Finger} receives a request for a remote user, it responds
+        with a message rejecting the request.
+        """
+        self.protocol.dataReceived("moshez at example.com\r\n")
+        self.assertEqual(
+            self.transport.value(),
+            "Finger forwarding service denied\n")
+
+
+    def test_list(self):
+        """
+        When L{finger.Finger} receives a blank line, it responds with a message
+        rejecting the request for all online users.
+        """
+        self.protocol.dataReceived("\r\n")
+        self.assertEqual(
+            self.transport.value(),
+            "Finger online list denied\n")
diff --git a/ThirdParty/Twisted/twisted/test/test_formmethod.py b/ThirdParty/Twisted/twisted/test/test_formmethod.py
new file mode 100644
index 0000000..845c4e2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_formmethod.py
@@ -0,0 +1,98 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Test cases for formmethod module.
+"""
+
+from twisted.trial import unittest
+
+from twisted.python import formmethod
+
+
+class ArgumentTestCase(unittest.TestCase):
+
+    def argTest(self, argKlass, testPairs, badValues, *args, **kwargs):
+        arg = argKlass("name", *args, **kwargs)
+        for val, result in testPairs:
+            self.assertEqual(arg.coerce(val), result)
+        for val in badValues:
+            self.assertRaises(formmethod.InputError, arg.coerce, val)
+
+
+    def test_argument(self):
+        """
+        Test that corce correctly raises NotImplementedError.
+        """
+        arg = formmethod.Argument("name")
+        self.assertRaises(NotImplementedError, arg.coerce, "")
+
+
+    def testString(self):
+        self.argTest(formmethod.String, [("a", "a"), (1, "1"), ("", "")], ())
+        self.argTest(formmethod.String, [("ab", "ab"), ("abc", "abc")], ("2", ""), min=2)
+        self.argTest(formmethod.String, [("ab", "ab"), ("a", "a")], ("223213", "345x"), max=3)
+        self.argTest(formmethod.String, [("ab", "ab"), ("add", "add")], ("223213", "x"), min=2, max=3)
+    
+    def testInt(self):
+        self.argTest(formmethod.Integer, [("3", 3), ("-2", -2), ("", None)], ("q", "2.3"))
+        self.argTest(formmethod.Integer, [("3", 3), ("-2", -2)], ("q", "2.3", ""), allowNone=0)
+
+    def testFloat(self):
+        self.argTest(formmethod.Float, [("3", 3.0), ("-2.3", -2.3), ("", None)], ("q", "2.3z"))
+        self.argTest(formmethod.Float, [("3", 3.0), ("-2.3", -2.3)], ("q", "2.3z", ""),
+                     allowNone=0)
+
+    def testChoice(self):
+        choices = [("a", "apple", "an apple"),
+                   ("b", "banana", "ook")]
+        self.argTest(formmethod.Choice, [("a", "apple"), ("b", "banana")],
+                     ("c", 1), choices=choices)
+
+    def testFlags(self):
+        flags =  [("a", "apple", "an apple"),
+                  ("b", "banana", "ook")]
+        self.argTest(formmethod.Flags,
+                     [(["a"], ["apple"]), (["b", "a"], ["banana", "apple"])],
+                     (["a", "c"], ["fdfs"]),
+                     flags=flags)
+
+    def testBoolean(self):
+        tests =  [("yes", 1), ("", 0), ("False", 0), ("no", 0)]
+        self.argTest(formmethod.Boolean, tests, ())
+
+
+    def test_file(self):
+        """
+        Test the correctness of the coerce function.
+        """
+        arg = formmethod.File("name", allowNone=0)
+        self.assertEqual(arg.coerce("something"), "something")
+        self.assertRaises(formmethod.InputError, arg.coerce, None)
+        arg2 = formmethod.File("name")
+        self.assertEqual(arg2.coerce(None), None)
+
+
+    def testDate(self):
+        goodTests = { 
+            ("2002", "12", "21"): (2002, 12, 21),
+            ("1996", "2", "29"): (1996, 2, 29),
+            ("", "", ""): None,
+            }.items()
+        badTests = [("2002", "2", "29"), ("xx", "2", "3"),
+                    ("2002", "13", "1"), ("1999", "12","32"),
+                    ("2002", "1"), ("2002", "2", "3", "4")]
+        self.argTest(formmethod.Date, goodTests, badTests)
+
+    def testRangedInteger(self):
+        goodTests = {"0": 0, "12": 12, "3": 3}.items()
+        badTests = ["-1", "x", "13", "-2000", "3.4"]
+        self.argTest(formmethod.IntegerRange, goodTests, badTests, 0, 12)
+
+    def testVerifiedPassword(self):
+        goodTests = {("foo", "foo"): "foo", ("ab", "ab"): "ab"}.items()
+        badTests = [("ab", "a"), ("12345", "12345"), ("", ""), ("a", "a"), ("a",), ("a", "a", "a")]
+        self.argTest(formmethod.VerifiedPassword, goodTests, badTests, min=2, max=4)
+
+        
diff --git a/ThirdParty/Twisted/twisted/test/test_ftp.py b/ThirdParty/Twisted/twisted/test/test_ftp.py
new file mode 100644
index 0000000..03208bb
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_ftp.py
@@ -0,0 +1,3202 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+FTP tests.
+"""
+
+import os
+import errno
+from StringIO import StringIO
+import getpass
+
+from zope.interface import implements
+from zope.interface.verify import verifyClass
+
+from twisted.trial import unittest, util
+from twisted.python.randbytes import insecureRandom
+from twisted.cred.portal import IRealm
+from twisted.protocols import basic
+from twisted.internet import reactor, task, protocol, defer, error
+from twisted.internet.interfaces import IConsumer
+from twisted.cred.error import UnauthorizedLogin
+from twisted.cred import portal, checkers, credentials
+from twisted.python import failure, filepath, runtime
+from twisted.test import proto_helpers
+
+from twisted.protocols import ftp, loopback
+
+
+_changeDirectorySuppression = util.suppress(
+    category=DeprecationWarning,
+    message=(
+        r"FTPClient\.changeDirectory is deprecated in Twisted 8\.2 and "
+        r"newer\.  Use FTPClient\.cwd instead\."))
+
+if runtime.platform.isWindows():
+    nonPOSIXSkip = "Cannot run on Windows"
+else:
+    nonPOSIXSkip = None
+
+
+class Dummy(basic.LineReceiver):
+    logname = None
+    def __init__(self):
+        self.lines = []
+        self.rawData = []
+    def connectionMade(self):
+        self.f = self.factory   # to save typing in pdb :-)
+    def lineReceived(self,line):
+        self.lines.append(line)
+    def rawDataReceived(self, data):
+        self.rawData.append(data)
+    def lineLengthExceeded(self, line):
+        pass
+
+
+class _BufferingProtocol(protocol.Protocol):
+    def connectionMade(self):
+        self.buffer = ''
+        self.d = defer.Deferred()
+    def dataReceived(self, data):
+        self.buffer += data
+    def connectionLost(self, reason):
+        self.d.callback(self)
+
+
+
+class FTPServerTestCase(unittest.TestCase):
+    """
+    Simple tests for an FTP server with the default settings.
+
+    @ivar clientFactory: class used as ftp client.
+    """
+    clientFactory = ftp.FTPClientBasic
+    userAnonymous = "anonymous"
+
+    def setUp(self):
+        # Create a directory
+        self.directory = self.mktemp()
+        os.mkdir(self.directory)
+        self.dirPath = filepath.FilePath(self.directory)
+
+        # Start the server
+        p = portal.Portal(ftp.FTPRealm(
+            anonymousRoot=self.directory,
+            userHome=self.directory,
+            ))
+        p.registerChecker(checkers.AllowAnonymousAccess(),
+                          credentials.IAnonymous)
+
+        users_checker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
+        self.username = "test-user"
+        self.password = "test-password"
+        users_checker.addUser(self.username, self.password)
+        p.registerChecker(users_checker, credentials.IUsernamePassword)
+
+        self.factory = ftp.FTPFactory(portal=p,
+                                      userAnonymous=self.userAnonymous)
+        port = reactor.listenTCP(0, self.factory, interface="127.0.0.1")
+        self.addCleanup(port.stopListening)
+
+        # Hook the server's buildProtocol to make the protocol instance
+        # accessible to tests.
+        buildProtocol = self.factory.buildProtocol
+        d1 = defer.Deferred()
+        def _rememberProtocolInstance(addr):
+            # Done hooking this.
+            del self.factory.buildProtocol
+
+            protocol = buildProtocol(addr)
+            self.serverProtocol = protocol.wrappedProtocol
+            def cleanupServer():
+                if self.serverProtocol.transport is not None:
+                    self.serverProtocol.transport.loseConnection()
+            self.addCleanup(cleanupServer)
+            d1.callback(None)
+            return protocol
+        self.factory.buildProtocol = _rememberProtocolInstance
+
+        # Connect a client to it
+        portNum = port.getHost().port
+        clientCreator = protocol.ClientCreator(reactor, self.clientFactory)
+        d2 = clientCreator.connectTCP("127.0.0.1", portNum)
+        def gotClient(client):
+            self.client = client
+            self.addCleanup(self.client.transport.loseConnection)
+        d2.addCallback(gotClient)
+        return defer.gatherResults([d1, d2])
+
+    def assertCommandResponse(self, command, expectedResponseLines,
+                              chainDeferred=None):
+        """Asserts that a sending an FTP command receives the expected
+        response.
+
+        Returns a Deferred.  Optionally accepts a deferred to chain its actions
+        to.
+        """
+        if chainDeferred is None:
+            chainDeferred = defer.succeed(None)
+
+        def queueCommand(ignored):
+            d = self.client.queueStringCommand(command)
+            def gotResponse(responseLines):
+                self.assertEqual(expectedResponseLines, responseLines)
+            return d.addCallback(gotResponse)
+        return chainDeferred.addCallback(queueCommand)
+
+    def assertCommandFailed(self, command, expectedResponse=None,
+                            chainDeferred=None):
+        if chainDeferred is None:
+            chainDeferred = defer.succeed(None)
+
+        def queueCommand(ignored):
+            return self.client.queueStringCommand(command)
+        chainDeferred.addCallback(queueCommand)
+        self.assertFailure(chainDeferred, ftp.CommandFailed)
+        def failed(exception):
+            if expectedResponse is not None:
+                self.assertEqual(
+                    expectedResponse, exception.args[0])
+        return chainDeferred.addCallback(failed)
+
+    def _anonymousLogin(self):
+        d = self.assertCommandResponse(
+            'USER anonymous',
+            ['331 Guest login ok, type your email address as password.'])
+        return self.assertCommandResponse(
+            'PASS test at twistedmatrix.com',
+            ['230 Anonymous login ok, access restrictions apply.'],
+            chainDeferred=d)
+
+    def _userLogin(self):
+        """Authenticates the FTP client using the test account."""
+        d = self.assertCommandResponse(
+            'USER %s' % (self.username),
+            ['331 Password required for %s.' % (self.username)])
+        return self.assertCommandResponse(
+            'PASS %s' % (self.password),
+            ['230 User logged in, proceed'],
+            chainDeferred=d)
+
+
+class FTPAnonymousTestCase(FTPServerTestCase):
+    """
+    Simple tests for an FTP server with different anonymous username.
+    The new anonymous username used in this test case is "guest"
+    """
+    userAnonymous = "guest"
+
+    def test_anonymousLogin(self):
+        """
+        Tests whether the changing of the anonymous username is working or not.
+        The FTP server should not comply about the need of password for the
+        username 'guest', letting it login as anonymous asking just an email
+        address as password.
+        """
+        d = self.assertCommandResponse(
+            'USER guest',
+            ['331 Guest login ok, type your email address as password.'])
+        return self.assertCommandResponse(
+            'PASS test at twistedmatrix.com',
+            ['230 Anonymous login ok, access restrictions apply.'],
+            chainDeferred=d)
+
+
+class BasicFTPServerTestCase(FTPServerTestCase):
+    def testNotLoggedInReply(self):
+        """
+        When not logged in, most commands other than USER and PASS should
+        get NOT_LOGGED_IN errors, but some can be called before USER and PASS.
+        """
+        loginRequiredCommandList = ['CDUP', 'CWD', 'LIST', 'MODE', 'PASV',
+            'PWD', 'RETR', 'STRU', 'SYST', 'TYPE']
+        loginNotRequiredCommandList = ['FEAT']
+
+        # Issue commands, check responses
+        def checkFailResponse(exception, command):
+            failureResponseLines = exception.args[0]
+            self.failUnless(failureResponseLines[-1].startswith("530"),
+                            "%s - Response didn't start with 530: %r"
+                            % (command, failureResponseLines[-1],))
+
+        def checkPassResponse(result, command):
+            result = result[0]
+            self.failIf(result.startswith("530"),
+                            "%s - Response start with 530: %r"
+                            % (command, result,))
+
+        deferreds = []
+        for command in loginRequiredCommandList:
+            deferred = self.client.queueStringCommand(command)
+            self.assertFailure(deferred, ftp.CommandFailed)
+            deferred.addCallback(checkFailResponse, command)
+            deferreds.append(deferred)
+
+        for command in loginNotRequiredCommandList:
+            deferred = self.client.queueStringCommand(command)
+            deferred.addCallback(checkPassResponse, command)
+            deferreds.append(deferred)
+
+        return defer.DeferredList(deferreds, fireOnOneErrback=True)
+
+    def testPASSBeforeUSER(self):
+        """
+        Issuing PASS before USER should give an error.
+        """
+        return self.assertCommandFailed(
+            'PASS foo',
+            ["503 Incorrect sequence of commands: "
+             "USER required before PASS"])
+
+    def testNoParamsForUSER(self):
+        """
+        Issuing USER without a username is a syntax error.
+        """
+        return self.assertCommandFailed(
+            'USER',
+            ['500 Syntax error: USER requires an argument.'])
+
+    def testNoParamsForPASS(self):
+        """
+        Issuing PASS without a password is a syntax error.
+        """
+        d = self.client.queueStringCommand('USER foo')
+        return self.assertCommandFailed(
+            'PASS',
+            ['500 Syntax error: PASS requires an argument.'],
+            chainDeferred=d)
+
+    def testAnonymousLogin(self):
+        return self._anonymousLogin()
+
+    def testQuit(self):
+        """
+        Issuing QUIT should return a 221 message.
+        """
+        d = self._anonymousLogin()
+        return self.assertCommandResponse(
+            'QUIT',
+            ['221 Goodbye.'],
+            chainDeferred=d)
+
+    def testAnonymousLoginDenied(self):
+        # Reconfigure the server to disallow anonymous access, and to have an
+        # IUsernamePassword checker that always rejects.
+        self.factory.allowAnonymous = False
+        denyAlwaysChecker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
+        self.factory.portal.registerChecker(denyAlwaysChecker,
+                                            credentials.IUsernamePassword)
+
+        # Same response code as allowAnonymous=True, but different text.
+        d = self.assertCommandResponse(
+            'USER anonymous',
+            ['331 Password required for anonymous.'])
+
+        # It will be denied.  No-one can login.
+        d = self.assertCommandFailed(
+            'PASS test at twistedmatrix.com',
+            ['530 Sorry, Authentication failed.'],
+            chainDeferred=d)
+
+        # It's not just saying that.  You aren't logged in.
+        d = self.assertCommandFailed(
+            'PWD',
+            ['530 Please login with USER and PASS.'],
+            chainDeferred=d)
+        return d
+
+
+    def test_anonymousWriteDenied(self):
+        """
+        When an anonymous user attempts to edit the server-side filesystem, they
+        will receive a 550 error with a descriptive message.
+        """
+        d = self._anonymousLogin()
+        return self.assertCommandFailed(
+              'MKD newdir',
+              ['550 Anonymous users are forbidden to change the filesystem'],
+              chainDeferred=d)
+
+
+    def testUnknownCommand(self):
+        d = self._anonymousLogin()
+        return self.assertCommandFailed(
+            'GIBBERISH',
+            ["502 Command 'GIBBERISH' not implemented"],
+            chainDeferred=d)
+
+    def testRETRBeforePORT(self):
+        d = self._anonymousLogin()
+        return self.assertCommandFailed(
+            'RETR foo',
+            ["503 Incorrect sequence of commands: "
+             "PORT or PASV required before RETR"],
+            chainDeferred=d)
+
+    def testSTORBeforePORT(self):
+        d = self._anonymousLogin()
+        return self.assertCommandFailed(
+            'STOR foo',
+            ["503 Incorrect sequence of commands: "
+             "PORT or PASV required before STOR"],
+            chainDeferred=d)
+
+    def testBadCommandArgs(self):
+        d = self._anonymousLogin()
+        self.assertCommandFailed(
+            'MODE z',
+            ["504 Not implemented for parameter 'z'."],
+            chainDeferred=d)
+        self.assertCommandFailed(
+            'STRU I',
+            ["504 Not implemented for parameter 'I'."],
+            chainDeferred=d)
+        return d
+
+    def testDecodeHostPort(self):
+        self.assertEqual(ftp.decodeHostPort('25,234,129,22,100,23'),
+                ('25.234.129.22', 25623))
+        nums = range(6)
+        for i in range(6):
+            badValue = list(nums)
+            badValue[i] = 256
+            s = ','.join(map(str, badValue))
+            self.assertRaises(ValueError, ftp.decodeHostPort, s)
+
+    def testPASV(self):
+        # Login
+        wfd = defer.waitForDeferred(self._anonymousLogin())
+        yield wfd
+        wfd.getResult()
+
+        # Issue a PASV command, and extract the host and port from the response
+        pasvCmd = defer.waitForDeferred(self.client.queueStringCommand('PASV'))
+        yield pasvCmd
+        responseLines = pasvCmd.getResult()
+        host, port = ftp.decodeHostPort(responseLines[-1][4:])
+
+        # Make sure the server is listening on the port it claims to be
+        self.assertEqual(port, self.serverProtocol.dtpPort.getHost().port)
+
+        # Semi-reasonable way to force cleanup
+        self.serverProtocol.transport.loseConnection()
+    testPASV = defer.deferredGenerator(testPASV)
+
+    def test_SYST(self):
+        """SYST command will always return UNIX Type: L8"""
+        d = self._anonymousLogin()
+        self.assertCommandResponse('SYST', ["215 UNIX Type: L8"],
+                                   chainDeferred=d)
+        return d
+
+    def test_RNFRandRNTO(self):
+        """
+        Sending the RNFR command followed by RNTO, with valid filenames, will
+        perform a successful rename operation.
+        """
+        # Create user home folder with a 'foo' file.
+        self.dirPath.child(self.username).createDirectory()
+        self.dirPath.child(self.username).child('foo').touch()
+
+        d = self._userLogin()
+        self.assertCommandResponse(
+            'RNFR foo',
+            ["350 Requested file action pending further information."],
+            chainDeferred=d)
+        self.assertCommandResponse(
+            'RNTO bar',
+            ["250 Requested File Action Completed OK"],
+            chainDeferred=d)
+
+        def check_rename(result):
+            self.assertTrue(
+                self.dirPath.child(self.username).child('bar').exists())
+            return result
+
+        d.addCallback(check_rename)
+        return d
+
+    def test_RNFRwithoutRNTO(self):
+        """
+        Sending the RNFR command followed by any command other than RNTO
+        should return an error informing users that RNFR should be followed
+        by RNTO.
+        """
+        d = self._anonymousLogin()
+        self.assertCommandResponse(
+            'RNFR foo',
+            ["350 Requested file action pending further information."],
+            chainDeferred=d)
+        self.assertCommandFailed(
+            'OTHER don-tcare',
+            ["503 Incorrect sequence of commands: RNTO required after RNFR"],
+            chainDeferred=d)
+        return d
+
+    def test_portRangeForwardError(self):
+        """
+        Exceptions other than L{error.CannotListenError} which are raised by
+        C{listenFactory} should be raised to the caller of L{FTP.getDTPPort}.
+        """
+        def listenFactory(portNumber, factory):
+            raise RuntimeError()
+        self.serverProtocol.listenFactory = listenFactory
+
+        self.assertRaises(RuntimeError, self.serverProtocol.getDTPPort,
+                          protocol.Factory())
+
+
+    def test_portRange(self):
+        """
+        L{FTP.passivePortRange} should determine the ports which
+        L{FTP.getDTPPort} attempts to bind. If no port from that iterator can
+        be bound, L{error.CannotListenError} should be raised, otherwise the
+        first successful result from L{FTP.listenFactory} should be returned.
+        """
+        def listenFactory(portNumber, factory):
+            if portNumber in (22032, 22033, 22034):
+                raise error.CannotListenError('localhost', portNumber, 'error')
+            return portNumber
+        self.serverProtocol.listenFactory = listenFactory
+
+        port = self.serverProtocol.getDTPPort(protocol.Factory())
+        self.assertEqual(port, 0)
+
+        self.serverProtocol.passivePortRange = xrange(22032, 65536)
+        port = self.serverProtocol.getDTPPort(protocol.Factory())
+        self.assertEqual(port, 22035)
+
+        self.serverProtocol.passivePortRange = xrange(22032, 22035)
+        self.assertRaises(error.CannotListenError,
+                          self.serverProtocol.getDTPPort,
+                          protocol.Factory())
+
+
+    def test_portRangeInheritedFromFactory(self):
+        """
+        The L{FTP} instances created by L{ftp.FTPFactory.buildProtocol} have
+        their C{passivePortRange} attribute set to the same object the
+        factory's C{passivePortRange} attribute is set to.
+        """
+        portRange = xrange(2017, 2031)
+        self.factory.passivePortRange = portRange
+        protocol = self.factory.buildProtocol(None)
+        self.assertEqual(portRange, protocol.wrappedProtocol.passivePortRange)
+
+    def testFEAT(self):
+        """
+        When the server receives 'FEAT', it should report the list of supported
+        features. (Additionally, ensure that the server reports various
+        particular features that are supported by all Twisted FTP servers.)
+        """
+        d = self.client.queueStringCommand('FEAT')
+        def gotResponse(responseLines):
+            self.assertEqual('211-Features:', responseLines[0])
+            self.assertTrue(' MDTM' in responseLines)
+            self.assertTrue(' PASV' in responseLines)
+            self.assertTrue(' TYPE A;I' in responseLines)
+            self.assertTrue(' SIZE' in responseLines)
+            self.assertEqual('211 End', responseLines[-1])
+        return d.addCallback(gotResponse)
+
+    def testOPTS(self):
+        """
+        When the server receives 'OPTS something', it should report
+        that the FTP server does not support the option called 'something'.
+        """
+        d = self._anonymousLogin()
+        self.assertCommandFailed(
+            'OPTS something',
+            ["502 Option 'something' not implemented."],
+            chainDeferred=d,
+            )
+        return d
+
+class FTPServerTestCaseAdvancedClient(FTPServerTestCase):
+    """
+    Test FTP server with the L{ftp.FTPClient} class.
+    """
+    clientFactory = ftp.FTPClient
+
+    def test_anonymousSTOR(self):
+        """
+        Try to make an STOR as anonymous, and check that we got a permission
+        denied error.
+        """
+        def eb(res):
+            res.trap(ftp.CommandFailed)
+            self.assertEqual(res.value.args[0][0],
+                '550 foo: Permission denied.')
+        d1, d2 = self.client.storeFile('foo')
+        d2.addErrback(eb)
+        return defer.gatherResults([d1, d2])
+
+
+    def test_STORwriteError(self):
+        """
+        Any errors during writing a file inside a STOR should be returned to
+        the client.
+        """
+        # Make a failing file writer.
+        class FailingFileWriter(ftp._FileWriter):
+            def receive(self):
+                return defer.fail(ftp.IsNotADirectoryError("blah"))
+
+        def failingSTOR(a, b):
+            return defer.succeed(FailingFileWriter(None))
+
+        # Monkey patch the shell so it returns a file writer that will
+        # fail.
+        self.patch(ftp.FTPAnonymousShell, 'openForWriting', failingSTOR)
+
+        def eb(res):
+            self.flushLoggedErrors()
+            res.trap(ftp.CommandFailed)
+            self.assertEqual(
+                res.value.args[0][0],
+                "550 Cannot rmd, blah is not a directory")
+        d1, d2 = self.client.storeFile('failing_file')
+        d2.addErrback(eb)
+        return defer.gatherResults([d1, d2])
+
+    def test_RETRreadError(self):
+        """
+        Any errors during reading a file inside a RETR should be returned to
+        the client.
+        """
+        # Make a failing file reading.
+        class FailingFileReader(ftp._FileReader):
+            def send(self, consumer):
+                return defer.fail(ftp.IsADirectoryError("blah"))
+
+        def failingRETR(a, b):
+            return defer.succeed(FailingFileReader(None))
+
+        # Monkey patch the shell so it returns a file reader that will
+        # fail.
+        self.patch(ftp.FTPAnonymousShell, 'openForReading', failingRETR)
+
+        def check_response(failure):
+            self.flushLoggedErrors()
+            failure.trap(ftp.CommandFailed)
+            self.assertEqual(
+                failure.value.args[0][0],
+                "125 Data connection already open, starting transfer")
+            self.assertEqual(
+                failure.value.args[0][1],
+                "550 blah: is a directory")
+
+        proto = _BufferingProtocol()
+        d = self.client.retrieveFile('failing_file', proto)
+        d.addErrback(check_response)
+        return d
+
+
+class FTPServerPasvDataConnectionTestCase(FTPServerTestCase):
+    def _makeDataConnection(self, ignored=None):
+        # Establish a passive data connection (i.e. client connecting to
+        # server).
+        d = self.client.queueStringCommand('PASV')
+        def gotPASV(responseLines):
+            host, port = ftp.decodeHostPort(responseLines[-1][4:])
+            cc = protocol.ClientCreator(reactor, _BufferingProtocol)
+            return cc.connectTCP('127.0.0.1', port)
+        return d.addCallback(gotPASV)
+
+    def _download(self, command, chainDeferred=None):
+        if chainDeferred is None:
+            chainDeferred = defer.succeed(None)
+
+        chainDeferred.addCallback(self._makeDataConnection)
+        def queueCommand(downloader):
+            # wait for the command to return, and the download connection to be
+            # closed.
+            d1 = self.client.queueStringCommand(command)
+            d2 = downloader.d
+            return defer.gatherResults([d1, d2])
+        chainDeferred.addCallback(queueCommand)
+
+        def downloadDone((ignored, downloader)):
+            return downloader.buffer
+        return chainDeferred.addCallback(downloadDone)
+
+    def testEmptyLIST(self):
+        # Login
+        d = self._anonymousLogin()
+
+        # No files, so the file listing should be empty
+        self._download('LIST', chainDeferred=d)
+        def checkEmpty(result):
+            self.assertEqual('', result)
+        return d.addCallback(checkEmpty)
+
+    def testTwoDirLIST(self):
+        # Make some directories
+        os.mkdir(os.path.join(self.directory, 'foo'))
+        os.mkdir(os.path.join(self.directory, 'bar'))
+
+        # Login
+        d = self._anonymousLogin()
+
+        # We expect 2 lines because there are two files.
+        self._download('LIST', chainDeferred=d)
+        def checkDownload(download):
+            self.assertEqual(2, len(download[:-2].split('\r\n')))
+        d.addCallback(checkDownload)
+
+        # Download a names-only listing.
+        self._download('NLST ', chainDeferred=d)
+        def checkDownload(download):
+            filenames = download[:-2].split('\r\n')
+            filenames.sort()
+            self.assertEqual(['bar', 'foo'], filenames)
+        d.addCallback(checkDownload)
+
+        # Download a listing of the 'foo' subdirectory.  'foo' has no files, so
+        # the file listing should be empty.
+        self._download('LIST foo', chainDeferred=d)
+        def checkDownload(download):
+            self.assertEqual('', download)
+        d.addCallback(checkDownload)
+
+        # Change the current working directory to 'foo'.
+        def chdir(ignored):
+            return self.client.queueStringCommand('CWD foo')
+        d.addCallback(chdir)
+
+        # Download a listing from within 'foo', and again it should be empty,
+        # because LIST uses the working directory by default.
+        self._download('LIST', chainDeferred=d)
+        def checkDownload(download):
+            self.assertEqual('', download)
+        return d.addCallback(checkDownload)
+
+    def testManyLargeDownloads(self):
+        # Login
+        d = self._anonymousLogin()
+
+        # Download a range of different size files
+        for size in range(100000, 110000, 500):
+            fObj = file(os.path.join(self.directory, '%d.txt' % (size,)), 'wb')
+            fObj.write('x' * size)
+            fObj.close()
+
+            self._download('RETR %d.txt' % (size,), chainDeferred=d)
+            def checkDownload(download, size=size):
+                self.assertEqual(size, len(download))
+            d.addCallback(checkDownload)
+        return d
+
+
+    def test_downloadFolder(self):
+        """
+        When RETR is called for a folder, it will fail complaining that
+        the path is a folder.
+        """
+        # Make a directory in the current working directory
+        self.dirPath.child('foo').createDirectory()
+        # Login
+        d = self._anonymousLogin()
+        d.addCallback(self._makeDataConnection)
+
+        def retrFolder(downloader):
+            downloader.transport.loseConnection()
+            deferred = self.client.queueStringCommand('RETR foo')
+            return deferred
+        d.addCallback(retrFolder)
+
+        def failOnSuccess(result):
+            raise AssertionError('Downloading a folder should not succeed.')
+        d.addCallback(failOnSuccess)
+
+        def checkError(failure):
+            failure.trap(ftp.CommandFailed)
+            self.assertEqual(
+                ['550 foo: is a directory'], failure.value.message)
+            current_errors = self.flushLoggedErrors()
+            self.assertEqual(
+                0, len(current_errors),
+                'No errors should be logged while downloading a folder.')
+        d.addErrback(checkError)
+        return d
+
+
+    def test_NLSTEmpty(self):
+        """
+        NLST with no argument returns the directory listing for the current
+        working directory.
+        """
+        # Login
+        d = self._anonymousLogin()
+
+        # Touch a file in the current working directory
+        self.dirPath.child('test.txt').touch()
+        # Make a directory in the current working directory
+        self.dirPath.child('foo').createDirectory()
+
+        self._download('NLST ', chainDeferred=d)
+        def checkDownload(download):
+            filenames = download[:-2].split('\r\n')
+            filenames.sort()
+            self.assertEqual(['foo', 'test.txt'], filenames)
+        return d.addCallback(checkDownload)
+
+
+    def test_NLSTNonexistent(self):
+        """
+        NLST on a non-existent file/directory returns nothing.
+        """
+        # Login
+        d = self._anonymousLogin()
+
+        self._download('NLST nonexistent.txt', chainDeferred=d)
+        def checkDownload(download):
+            self.assertEqual('', download)
+        return d.addCallback(checkDownload)
+
+
+    def test_NLSTOnPathToFile(self):
+        """
+        NLST on an existent file returns only the path to that file.
+        """
+        # Login
+        d = self._anonymousLogin()
+
+        # Touch a file in the current working directory
+        self.dirPath.child('test.txt').touch()
+
+        self._download('NLST test.txt', chainDeferred=d)
+        def checkDownload(download):
+            filenames = download[:-2].split('\r\n')
+            self.assertEqual(['test.txt'], filenames)
+        return d.addCallback(checkDownload)
+
+
+
+class FTPServerPortDataConnectionTestCase(FTPServerPasvDataConnectionTestCase):
+    def setUp(self):
+        self.dataPorts = []
+        return FTPServerPasvDataConnectionTestCase.setUp(self)
+
+    def _makeDataConnection(self, ignored=None):
+        # Establish an active data connection (i.e. server connecting to
+        # client).
+        deferred = defer.Deferred()
+        class DataFactory(protocol.ServerFactory):
+            protocol = _BufferingProtocol
+            def buildProtocol(self, addr):
+                p = protocol.ServerFactory.buildProtocol(self, addr)
+                reactor.callLater(0, deferred.callback, p)
+                return p
+        dataPort = reactor.listenTCP(0, DataFactory(), interface='127.0.0.1')
+        self.dataPorts.append(dataPort)
+        cmd = 'PORT ' + ftp.encodeHostPort('127.0.0.1', dataPort.getHost().port)
+        self.client.queueStringCommand(cmd)
+        return deferred
+
+    def tearDown(self):
+        l = [defer.maybeDeferred(port.stopListening) for port in self.dataPorts]
+        d = defer.maybeDeferred(
+            FTPServerPasvDataConnectionTestCase.tearDown, self)
+        l.append(d)
+        return defer.DeferredList(l, fireOnOneErrback=True)
+
+    def testPORTCannotConnect(self):
+        # Login
+        d = self._anonymousLogin()
+
+        # Listen on a port, and immediately stop listening as a way to find a
+        # port number that is definitely closed.
+        def loggedIn(ignored):
+            port = reactor.listenTCP(0, protocol.Factory(),
+                                     interface='127.0.0.1')
+            portNum = port.getHost().port
+            d = port.stopListening()
+            d.addCallback(lambda _: portNum)
+            return d
+        d.addCallback(loggedIn)
+
+        # Tell the server to connect to that port with a PORT command, and
+        # verify that it fails with the right error.
+        def gotPortNum(portNum):
+            return self.assertCommandFailed(
+                'PORT ' + ftp.encodeHostPort('127.0.0.1', portNum),
+                ["425 Can't open data connection."])
+        return d.addCallback(gotPortNum)
+
+
+
+class DTPFactoryTests(unittest.TestCase):
+    """
+    Tests for L{ftp.DTPFactory}.
+    """
+    def setUp(self):
+        """
+        Create a fake protocol interpreter and a L{ftp.DTPFactory} instance to
+        test.
+        """
+        self.reactor = task.Clock()
+
+        class ProtocolInterpreter(object):
+            dtpInstance = None
+
+        self.protocolInterpreter = ProtocolInterpreter()
+        self.factory = ftp.DTPFactory(
+            self.protocolInterpreter, None, self.reactor)
+
+
+    def test_setTimeout(self):
+        """
+        L{ftp.DTPFactory.setTimeout} uses the reactor passed to its initializer
+        to set up a timed event to time out the DTP setup after the specified
+        number of seconds.
+        """
+        # Make sure the factory's deferred fails with the right exception, and
+        # make it so we can tell exactly when it fires.
+        finished = []
+        d = self.assertFailure(self.factory.deferred, ftp.PortConnectionError)
+        d.addCallback(finished.append)
+
+        self.factory.setTimeout(6)
+
+        # Advance the clock almost to the timeout
+        self.reactor.advance(5)
+
+        # Nothing should have happened yet.
+        self.assertFalse(finished)
+
+        # Advance it to the configured timeout.
+        self.reactor.advance(1)
+
+        # Now the Deferred should have failed with TimeoutError.
+        self.assertTrue(finished)
+
+        # There should also be no calls left in the reactor.
+        self.assertFalse(self.reactor.calls)
+
+
+    def test_buildProtocolOnce(self):
+        """
+        A L{ftp.DTPFactory} instance's C{buildProtocol} method can be used once
+        to create a L{ftp.DTP} instance.
+        """
+        protocol = self.factory.buildProtocol(None)
+        self.assertIsInstance(protocol, ftp.DTP)
+
+        # A subsequent call returns None.
+        self.assertIdentical(self.factory.buildProtocol(None), None)
+
+
+    def test_timeoutAfterConnection(self):
+        """
+        If a timeout has been set up using L{ftp.DTPFactory.setTimeout}, it is
+        cancelled by L{ftp.DTPFactory.buildProtocol}.
+        """
+        self.factory.setTimeout(10)
+        protocol = self.factory.buildProtocol(None)
+        # Make sure the call is no longer active.
+        self.assertFalse(self.reactor.calls)
+
+
+    def test_connectionAfterTimeout(self):
+        """
+        If L{ftp.DTPFactory.buildProtocol} is called after the timeout
+        specified by L{ftp.DTPFactory.setTimeout} has elapsed, C{None} is
+        returned.
+        """
+        # Handle the error so it doesn't get logged.
+        d = self.assertFailure(self.factory.deferred, ftp.PortConnectionError)
+
+        # Set up the timeout and then cause it to elapse so the Deferred does
+        # fail.
+        self.factory.setTimeout(10)
+        self.reactor.advance(10)
+
+        # Try to get a protocol - we should not be able to.
+        self.assertIdentical(self.factory.buildProtocol(None), None)
+
+        # Make sure the Deferred is doing the right thing.
+        return d
+
+
+    def test_timeoutAfterConnectionFailed(self):
+        """
+        L{ftp.DTPFactory.deferred} fails with L{PortConnectionError} when
+        L{ftp.DTPFactory.clientConnectionFailed} is called.  If the timeout
+        specified with L{ftp.DTPFactory.setTimeout} expires after that, nothing
+        additional happens.
+        """
+        finished = []
+        d = self.assertFailure(self.factory.deferred, ftp.PortConnectionError)
+        d.addCallback(finished.append)
+
+        self.factory.setTimeout(10)
+        self.assertFalse(finished)
+        self.factory.clientConnectionFailed(None, None)
+        self.assertTrue(finished)
+        self.reactor.advance(10)
+        return d
+
+
+    def test_connectionFailedAfterTimeout(self):
+        """
+        If L{ftp.DTPFactory.clientConnectionFailed} is called after the timeout
+        specified by L{ftp.DTPFactory.setTimeout} has elapsed, nothing beyond
+        the normal timeout before happens.
+        """
+        # Handle the error so it doesn't get logged.
+        d = self.assertFailure(self.factory.deferred, ftp.PortConnectionError)
+
+        # Set up the timeout and then cause it to elapse so the Deferred does
+        # fail.
+        self.factory.setTimeout(10)
+        self.reactor.advance(10)
+
+        # Now fail the connection attempt.  This should do nothing.  In
+        # particular, it should not raise an exception.
+        self.factory.clientConnectionFailed(None, defer.TimeoutError("foo"))
+
+        # Give the Deferred to trial so it can make sure it did what we
+        # expected.
+        return d
+
+
+
+# -- Client Tests -----------------------------------------------------------
+
+class PrintLines(protocol.Protocol):
+    """Helper class used by FTPFileListingTests."""
+
+    def __init__(self, lines):
+        self._lines = lines
+
+    def connectionMade(self):
+        for line in self._lines:
+            self.transport.write(line + "\r\n")
+        self.transport.loseConnection()
+
+
+class MyFTPFileListProtocol(ftp.FTPFileListProtocol):
+    def __init__(self):
+        self.other = []
+        ftp.FTPFileListProtocol.__init__(self)
+
+    def unknownLine(self, line):
+        self.other.append(line)
+
+
+class FTPFileListingTests(unittest.TestCase):
+    def getFilesForLines(self, lines):
+        fileList = MyFTPFileListProtocol()
+        d = loopback.loopbackAsync(PrintLines(lines), fileList)
+        d.addCallback(lambda _: (fileList.files, fileList.other))
+        return d
+
+    def testOneLine(self):
+        # This example line taken from the docstring for FTPFileListProtocol
+        line = '-rw-r--r--   1 root     other        531 Jan 29 03:26 README'
+        def check(((file,), other)):
+            self.failIf(other, 'unexpect unparsable lines: %s' % repr(other))
+            self.failUnless(file['filetype'] == '-', 'misparsed fileitem')
+            self.failUnless(file['perms'] == 'rw-r--r--', 'misparsed perms')
+            self.failUnless(file['owner'] == 'root', 'misparsed fileitem')
+            self.failUnless(file['group'] == 'other', 'misparsed fileitem')
+            self.failUnless(file['size'] == 531, 'misparsed fileitem')
+            self.failUnless(file['date'] == 'Jan 29 03:26', 'misparsed fileitem')
+            self.failUnless(file['filename'] == 'README', 'misparsed fileitem')
+            self.failUnless(file['nlinks'] == 1, 'misparsed nlinks')
+            self.failIf(file['linktarget'], 'misparsed linktarget')
+        return self.getFilesForLines([line]).addCallback(check)
+
+    def testVariantLines(self):
+        line1 = 'drw-r--r--   2 root     other        531 Jan  9  2003 A'
+        line2 = 'lrw-r--r--   1 root     other          1 Jan 29 03:26 B -> A'
+        line3 = 'woohoo! '
+        def check(((file1, file2), (other,))):
+            self.failUnless(other == 'woohoo! \r', 'incorrect other line')
+            # file 1
+            self.failUnless(file1['filetype'] == 'd', 'misparsed fileitem')
+            self.failUnless(file1['perms'] == 'rw-r--r--', 'misparsed perms')
+            self.failUnless(file1['owner'] == 'root', 'misparsed owner')
+            self.failUnless(file1['group'] == 'other', 'misparsed group')
+            self.failUnless(file1['size'] == 531, 'misparsed size')
+            self.failUnless(file1['date'] == 'Jan  9  2003', 'misparsed date')
+            self.failUnless(file1['filename'] == 'A', 'misparsed filename')
+            self.failUnless(file1['nlinks'] == 2, 'misparsed nlinks')
+            self.failIf(file1['linktarget'], 'misparsed linktarget')
+            # file 2
+            self.failUnless(file2['filetype'] == 'l', 'misparsed fileitem')
+            self.failUnless(file2['perms'] == 'rw-r--r--', 'misparsed perms')
+            self.failUnless(file2['owner'] == 'root', 'misparsed owner')
+            self.failUnless(file2['group'] == 'other', 'misparsed group')
+            self.failUnless(file2['size'] == 1, 'misparsed size')
+            self.failUnless(file2['date'] == 'Jan 29 03:26', 'misparsed date')
+            self.failUnless(file2['filename'] == 'B', 'misparsed filename')
+            self.failUnless(file2['nlinks'] == 1, 'misparsed nlinks')
+            self.failUnless(file2['linktarget'] == 'A', 'misparsed linktarget')
+        return self.getFilesForLines([line1, line2, line3]).addCallback(check)
+
+    def testUnknownLine(self):
+        def check((files, others)):
+            self.failIf(files, 'unexpected file entries')
+            self.failUnless(others == ['ABC\r', 'not a file\r'],
+                            'incorrect unparsable lines: %s' % repr(others))
+        return self.getFilesForLines(['ABC', 'not a file']).addCallback(check)
+
+    def test_filenameWithUnescapedSpace(self):
+        '''
+        Will parse filenames and linktargets containing unescaped
+        space characters.
+        '''
+        line1 = 'drw-r--r--   2 root     other        531 Jan  9  2003 A B'
+        line2 = (
+            'lrw-r--r--   1 root     other          1 Jan 29 03:26 '
+            'B A -> D C/A B'
+            )
+
+        def check((files, others)):
+            self.assertEqual([], others, 'unexpected others entries')
+            self.assertEqual(
+                'A B', files[0]['filename'], 'misparsed filename')
+            self.assertEqual(
+                'B A', files[1]['filename'], 'misparsed filename')
+            self.assertEqual(
+                'D C/A B', files[1]['linktarget'], 'misparsed linktarget')
+        return self.getFilesForLines([line1, line2]).addCallback(check)
+
+    def test_filenameWithEscapedSpace(self):
+        '''
+        Will parse filenames and linktargets containing escaped
+        space characters.
+        '''
+        line1 = 'drw-r--r--   2 root     other        531 Jan  9  2003 A\ B'
+        line2 = (
+            'lrw-r--r--   1 root     other          1 Jan 29 03:26 '
+            'B A -> D\ C/A B'
+            )
+
+        def check((files, others)):
+            self.assertEqual([], others, 'unexpected others entries')
+            self.assertEqual(
+                'A B', files[0]['filename'], 'misparsed filename')
+            self.assertEqual(
+                'B A', files[1]['filename'], 'misparsed filename')
+            self.assertEqual(
+                'D C/A B', files[1]['linktarget'], 'misparsed linktarget')
+        return self.getFilesForLines([line1, line2]).addCallback(check)
+
+    def testYear(self):
+        # This example derived from bug description in issue 514.
+        fileList = ftp.FTPFileListProtocol()
+        exampleLine = (
+            '-rw-r--r--   1 root     other        531 Jan 29 2003 README\n')
+        class PrintLine(protocol.Protocol):
+            def connectionMade(self):
+                self.transport.write(exampleLine)
+                self.transport.loseConnection()
+
+        def check(ignored):
+            file = fileList.files[0]
+            self.failUnless(file['size'] == 531, 'misparsed fileitem')
+            self.failUnless(file['date'] == 'Jan 29 2003', 'misparsed fileitem')
+            self.failUnless(file['filename'] == 'README', 'misparsed fileitem')
+
+        d = loopback.loopbackAsync(PrintLine(), fileList)
+        return d.addCallback(check)
+
+
+class FTPClientTests(unittest.TestCase):
+
+    def testFailedRETR(self):
+        f = protocol.Factory()
+        f.noisy = 0
+        port = reactor.listenTCP(0, f, interface="127.0.0.1")
+        self.addCleanup(port.stopListening)
+        portNum = port.getHost().port
+        # This test data derived from a bug report by ranty on #twisted
+        responses = ['220 ready, dude (vsFTPd 1.0.0: beat me, break me)',
+                     # USER anonymous
+                     '331 Please specify the password.',
+                     # PASS twisted at twistedmatrix.com
+                     '230 Login successful. Have fun.',
+                     # TYPE I
+                     '200 Binary it is, then.',
+                     # PASV
+                     '227 Entering Passive Mode (127,0,0,1,%d,%d)' %
+                     (portNum >> 8, portNum & 0xff),
+                     # RETR /file/that/doesnt/exist
+                     '550 Failed to open file.']
+        f.buildProtocol = lambda addr: PrintLines(responses)
+
+        client = ftp.FTPClient(passive=1)
+        cc = protocol.ClientCreator(reactor, ftp.FTPClient, passive=1)
+        d = cc.connectTCP('127.0.0.1', portNum)
+        def gotClient(client):
+            p = protocol.Protocol()
+            return client.retrieveFile('/file/that/doesnt/exist', p)
+        d.addCallback(gotClient)
+        return self.assertFailure(d, ftp.CommandFailed)
+
+    def test_errbacksUponDisconnect(self):
+        """
+        Test the ftp command errbacks when a connection lost happens during
+        the operation.
+        """
+        ftpClient = ftp.FTPClient()
+        tr = proto_helpers.StringTransportWithDisconnection()
+        ftpClient.makeConnection(tr)
+        tr.protocol = ftpClient
+        d = ftpClient.list('some path', Dummy())
+        m = []
+        def _eb(failure):
+            m.append(failure)
+            return None
+        d.addErrback(_eb)
+        from twisted.internet.main import CONNECTION_LOST
+        ftpClient.connectionLost(failure.Failure(CONNECTION_LOST))
+        self.failUnless(m, m)
+        return d
+
+
+
+class FTPClientTestCase(unittest.TestCase):
+    """
+    Test advanced FTP client commands.
+    """
+    def setUp(self):
+        """
+        Create a FTP client and connect it to fake transport.
+        """
+        self.client = ftp.FTPClient()
+        self.transport = proto_helpers.StringTransportWithDisconnection()
+        self.client.makeConnection(self.transport)
+        self.transport.protocol = self.client
+
+
+    def tearDown(self):
+        """
+        Deliver disconnection notification to the client so that it can
+        perform any cleanup which may be required.
+        """
+        self.client.connectionLost(error.ConnectionLost())
+
+
+    def _testLogin(self):
+        """
+        Test the login part.
+        """
+        self.assertEqual(self.transport.value(), '')
+        self.client.lineReceived(
+            '331 Guest login ok, type your email address as password.')
+        self.assertEqual(self.transport.value(), 'USER anonymous\r\n')
+        self.transport.clear()
+        self.client.lineReceived(
+            '230 Anonymous login ok, access restrictions apply.')
+        self.assertEqual(self.transport.value(), 'TYPE I\r\n')
+        self.transport.clear()
+        self.client.lineReceived('200 Type set to I.')
+
+
+    def test_CDUP(self):
+        """
+        Test the CDUP command.
+
+        L{ftp.FTPClient.cdup} should return a Deferred which fires with a
+        sequence of one element which is the string the server sent
+        indicating that the command was executed successfully.
+
+        (XXX - This is a bad API)
+        """
+        def cbCdup(res):
+            self.assertEqual(res[0], '250 Requested File Action Completed OK')
+
+        self._testLogin()
+        d = self.client.cdup().addCallback(cbCdup)
+        self.assertEqual(self.transport.value(), 'CDUP\r\n')
+        self.transport.clear()
+        self.client.lineReceived('250 Requested File Action Completed OK')
+        return d
+
+
+    def test_failedCDUP(self):
+        """
+        Test L{ftp.FTPClient.cdup}'s handling of a failed CDUP command.
+
+        When the CDUP command fails, the returned Deferred should errback
+        with L{ftp.CommandFailed}.
+        """
+        self._testLogin()
+        d = self.client.cdup()
+        self.assertFailure(d, ftp.CommandFailed)
+        self.assertEqual(self.transport.value(), 'CDUP\r\n')
+        self.transport.clear()
+        self.client.lineReceived('550 ..: No such file or directory')
+        return d
+
+
+    def test_PWD(self):
+        """
+        Test the PWD command.
+
+        L{ftp.FTPClient.pwd} should return a Deferred which fires with a
+        sequence of one element which is a string representing the current
+        working directory on the server.
+
+        (XXX - This is a bad API)
+        """
+        def cbPwd(res):
+            self.assertEqual(ftp.parsePWDResponse(res[0]), "/bar/baz")
+
+        self._testLogin()
+        d = self.client.pwd().addCallback(cbPwd)
+        self.assertEqual(self.transport.value(), 'PWD\r\n')
+        self.client.lineReceived('257 "/bar/baz"')
+        return d
+
+
+    def test_failedPWD(self):
+        """
+        Test a failure in PWD command.
+
+        When the PWD command fails, the returned Deferred should errback
+        with L{ftp.CommandFailed}.
+        """
+        self._testLogin()
+        d = self.client.pwd()
+        self.assertFailure(d, ftp.CommandFailed)
+        self.assertEqual(self.transport.value(), 'PWD\r\n')
+        self.client.lineReceived('550 /bar/baz: No such file or directory')
+        return d
+
+
+    def test_CWD(self):
+        """
+        Test the CWD command.
+
+        L{ftp.FTPClient.cwd} should return a Deferred which fires with a
+        sequence of one element which is the string the server sent
+        indicating that the command was executed successfully.
+
+        (XXX - This is a bad API)
+        """
+        def cbCwd(res):
+            self.assertEqual(res[0], '250 Requested File Action Completed OK')
+
+        self._testLogin()
+        d = self.client.cwd("bar/foo").addCallback(cbCwd)
+        self.assertEqual(self.transport.value(), 'CWD bar/foo\r\n')
+        self.client.lineReceived('250 Requested File Action Completed OK')
+        return d
+
+
+    def test_failedCWD(self):
+        """
+        Test a failure in CWD command.
+
+        When the PWD command fails, the returned Deferred should errback
+        with L{ftp.CommandFailed}.
+        """
+        self._testLogin()
+        d = self.client.cwd("bar/foo")
+        self.assertFailure(d, ftp.CommandFailed)
+        self.assertEqual(self.transport.value(), 'CWD bar/foo\r\n')
+        self.client.lineReceived('550 bar/foo: No such file or directory')
+        return d
+
+
+    def test_passiveRETR(self):
+        """
+        Test the RETR command in passive mode: get a file and verify its
+        content.
+
+        L{ftp.FTPClient.retrieveFile} should return a Deferred which fires
+        with the protocol instance passed to it after the download has
+        completed.
+
+        (XXX - This API should be based on producers and consumers)
+        """
+        def cbRetr(res, proto):
+            self.assertEqual(proto.buffer, 'x' * 1000)
+
+        def cbConnect(host, port, factory):
+            self.assertEqual(host, '127.0.0.1')
+            self.assertEqual(port, 12345)
+            proto = factory.buildProtocol((host, port))
+            proto.makeConnection(proto_helpers.StringTransport())
+            self.client.lineReceived(
+                '150 File status okay; about to open data connection.')
+            proto.dataReceived("x" * 1000)
+            proto.connectionLost(failure.Failure(error.ConnectionDone("")))
+
+        self.client.connectFactory = cbConnect
+        self._testLogin()
+        proto = _BufferingProtocol()
+        d = self.client.retrieveFile("spam", proto)
+        d.addCallback(cbRetr, proto)
+        self.assertEqual(self.transport.value(), 'PASV\r\n')
+        self.transport.clear()
+        self.client.lineReceived('227 Entering Passive Mode (%s).' %
+            (ftp.encodeHostPort('127.0.0.1', 12345),))
+        self.assertEqual(self.transport.value(), 'RETR spam\r\n')
+        self.transport.clear()
+        self.client.lineReceived('226 Transfer Complete.')
+        return d
+
+
+    def test_RETR(self):
+        """
+        Test the RETR command in non-passive mode.
+
+        Like L{test_passiveRETR} but in the configuration where the server
+        establishes the data connection to the client, rather than the other
+        way around.
+        """
+        self.client.passive = False
+
+        def generatePort(portCmd):
+            portCmd.text = 'PORT %s' % (ftp.encodeHostPort('127.0.0.1', 9876),)
+            portCmd.protocol.makeConnection(proto_helpers.StringTransport())
+            portCmd.protocol.dataReceived("x" * 1000)
+            portCmd.protocol.connectionLost(
+                failure.Failure(error.ConnectionDone("")))
+
+        def cbRetr(res, proto):
+            self.assertEqual(proto.buffer, 'x' * 1000)
+
+        self.client.generatePortCommand = generatePort
+        self._testLogin()
+        proto = _BufferingProtocol()
+        d = self.client.retrieveFile("spam", proto)
+        d.addCallback(cbRetr, proto)
+        self.assertEqual(self.transport.value(), 'PORT %s\r\n' %
+            (ftp.encodeHostPort('127.0.0.1', 9876),))
+        self.transport.clear()
+        self.client.lineReceived('200 PORT OK')
+        self.assertEqual(self.transport.value(), 'RETR spam\r\n')
+        self.transport.clear()
+        self.client.lineReceived('226 Transfer Complete.')
+        return d
+
+
+    def test_failedRETR(self):
+        """
+        Try to RETR an unexisting file.
+
+        L{ftp.FTPClient.retrieveFile} should return a Deferred which
+        errbacks with L{ftp.CommandFailed} if the server indicates the file
+        cannot be transferred for some reason.
+        """
+        def cbConnect(host, port, factory):
+            self.assertEqual(host, '127.0.0.1')
+            self.assertEqual(port, 12345)
+            proto = factory.buildProtocol((host, port))
+            proto.makeConnection(proto_helpers.StringTransport())
+            self.client.lineReceived(
+                '150 File status okay; about to open data connection.')
+            proto.connectionLost(failure.Failure(error.ConnectionDone("")))
+
+        self.client.connectFactory = cbConnect
+        self._testLogin()
+        proto = _BufferingProtocol()
+        d = self.client.retrieveFile("spam", proto)
+        self.assertFailure(d, ftp.CommandFailed)
+        self.assertEqual(self.transport.value(), 'PASV\r\n')
+        self.transport.clear()
+        self.client.lineReceived('227 Entering Passive Mode (%s).' %
+            (ftp.encodeHostPort('127.0.0.1', 12345),))
+        self.assertEqual(self.transport.value(), 'RETR spam\r\n')
+        self.transport.clear()
+        self.client.lineReceived('550 spam: No such file or directory')
+        return d
+
+
+    def test_lostRETR(self):
+        """
+        Try a RETR, but disconnect during the transfer.
+        L{ftp.FTPClient.retrieveFile} should return a Deferred which
+        errbacks with L{ftp.ConnectionLost)
+        """
+        self.client.passive = False
+
+        l = []
+        def generatePort(portCmd):
+            portCmd.text = 'PORT %s' % (ftp.encodeHostPort('127.0.0.1', 9876),)
+            tr = proto_helpers.StringTransportWithDisconnection()
+            portCmd.protocol.makeConnection(tr)
+            tr.protocol = portCmd.protocol
+            portCmd.protocol.dataReceived("x" * 500)
+            l.append(tr)
+
+        self.client.generatePortCommand = generatePort
+        self._testLogin()
+        proto = _BufferingProtocol()
+        d = self.client.retrieveFile("spam", proto)
+        self.assertEqual(self.transport.value(), 'PORT %s\r\n' %
+            (ftp.encodeHostPort('127.0.0.1', 9876),))
+        self.transport.clear()
+        self.client.lineReceived('200 PORT OK')
+        self.assertEqual(self.transport.value(), 'RETR spam\r\n')
+
+        self.assert_(l)
+        l[0].loseConnection()
+        self.transport.loseConnection()
+        self.assertFailure(d, ftp.ConnectionLost)
+        return d
+
+
+    def test_passiveSTOR(self):
+        """
+        Test the STOR command: send a file and verify its content.
+
+        L{ftp.FTPClient.storeFile} should return a two-tuple of Deferreds.
+        The first of which should fire with a protocol instance when the
+        data connection has been established and is responsible for sending
+        the contents of the file.  The second of which should fire when the
+        upload has completed, the data connection has been closed, and the
+        server has acknowledged receipt of the file.
+
+        (XXX - storeFile should take a producer as an argument, instead, and
+        only return a Deferred which fires when the upload has succeeded or
+        failed).
+        """
+        tr = proto_helpers.StringTransport()
+        def cbStore(sender):
+            self.client.lineReceived(
+                '150 File status okay; about to open data connection.')
+            sender.transport.write("x" * 1000)
+            sender.finish()
+            sender.connectionLost(failure.Failure(error.ConnectionDone("")))
+
+        def cbFinish(ign):
+            self.assertEqual(tr.value(), "x" * 1000)
+
+        def cbConnect(host, port, factory):
+            self.assertEqual(host, '127.0.0.1')
+            self.assertEqual(port, 12345)
+            proto = factory.buildProtocol((host, port))
+            proto.makeConnection(tr)
+
+        self.client.connectFactory = cbConnect
+        self._testLogin()
+        d1, d2 = self.client.storeFile("spam")
+        d1.addCallback(cbStore)
+        d2.addCallback(cbFinish)
+        self.assertEqual(self.transport.value(), 'PASV\r\n')
+        self.transport.clear()
+        self.client.lineReceived('227 Entering Passive Mode (%s).' %
+            (ftp.encodeHostPort('127.0.0.1', 12345),))
+        self.assertEqual(self.transport.value(), 'STOR spam\r\n')
+        self.transport.clear()
+        self.client.lineReceived('226 Transfer Complete.')
+        return defer.gatherResults([d1, d2])
+
+
+    def test_failedSTOR(self):
+        """
+        Test a failure in the STOR command.
+
+        If the server does not acknowledge successful receipt of the
+        uploaded file, the second Deferred returned by
+        L{ftp.FTPClient.storeFile} should errback with L{ftp.CommandFailed}.
+        """
+        tr = proto_helpers.StringTransport()
+        def cbStore(sender):
+            self.client.lineReceived(
+                '150 File status okay; about to open data connection.')
+            sender.transport.write("x" * 1000)
+            sender.finish()
+            sender.connectionLost(failure.Failure(error.ConnectionDone("")))
+
+        def cbConnect(host, port, factory):
+            self.assertEqual(host, '127.0.0.1')
+            self.assertEqual(port, 12345)
+            proto = factory.buildProtocol((host, port))
+            proto.makeConnection(tr)
+
+        self.client.connectFactory = cbConnect
+        self._testLogin()
+        d1, d2 = self.client.storeFile("spam")
+        d1.addCallback(cbStore)
+        self.assertFailure(d2, ftp.CommandFailed)
+        self.assertEqual(self.transport.value(), 'PASV\r\n')
+        self.transport.clear()
+        self.client.lineReceived('227 Entering Passive Mode (%s).' %
+            (ftp.encodeHostPort('127.0.0.1', 12345),))
+        self.assertEqual(self.transport.value(), 'STOR spam\r\n')
+        self.transport.clear()
+        self.client.lineReceived(
+            '426 Transfer aborted.  Data connection closed.')
+        return defer.gatherResults([d1, d2])
+
+
+    def test_STOR(self):
+        """
+        Test the STOR command in non-passive mode.
+
+        Like L{test_passiveSTOR} but in the configuration where the server
+        establishes the data connection to the client, rather than the other
+        way around.
+        """
+        tr = proto_helpers.StringTransport()
+        self.client.passive = False
+        def generatePort(portCmd):
+            portCmd.text = 'PORT %s' % ftp.encodeHostPort('127.0.0.1', 9876)
+            portCmd.protocol.makeConnection(tr)
+
+        def cbStore(sender):
+            self.assertEqual(self.transport.value(), 'PORT %s\r\n' %
+                (ftp.encodeHostPort('127.0.0.1', 9876),))
+            self.transport.clear()
+            self.client.lineReceived('200 PORT OK')
+            self.assertEqual(self.transport.value(), 'STOR spam\r\n')
+            self.transport.clear()
+            self.client.lineReceived(
+                '150 File status okay; about to open data connection.')
+            sender.transport.write("x" * 1000)
+            sender.finish()
+            sender.connectionLost(failure.Failure(error.ConnectionDone("")))
+            self.client.lineReceived('226 Transfer Complete.')
+
+        def cbFinish(ign):
+            self.assertEqual(tr.value(), "x" * 1000)
+
+        self.client.generatePortCommand = generatePort
+        self._testLogin()
+        d1, d2 = self.client.storeFile("spam")
+        d1.addCallback(cbStore)
+        d2.addCallback(cbFinish)
+        return defer.gatherResults([d1, d2])
+
+
+    def test_passiveLIST(self):
+        """
+        Test the LIST command.
+
+        L{ftp.FTPClient.list} should return a Deferred which fires with a
+        protocol instance which was passed to list after the command has
+        succeeded.
+
+        (XXX - This is a very unfortunate API; if my understanding is
+        correct, the results are always at least line-oriented, so allowing
+        a per-line parser function to be specified would make this simpler,
+        but a default implementation should really be provided which knows
+        how to deal with all the formats used in real servers, so
+        application developers never have to care about this insanity.  It
+        would also be nice to either get back a Deferred of a list of
+        filenames or to be able to consume the files as they are received
+        (which the current API does allow, but in a somewhat inconvenient
+        fashion) -exarkun)
+        """
+        def cbList(res, fileList):
+            fls = [f["filename"] for f in fileList.files]
+            expected = ["foo", "bar", "baz"]
+            expected.sort()
+            fls.sort()
+            self.assertEqual(fls, expected)
+
+        def cbConnect(host, port, factory):
+            self.assertEqual(host, '127.0.0.1')
+            self.assertEqual(port, 12345)
+            proto = factory.buildProtocol((host, port))
+            proto.makeConnection(proto_helpers.StringTransport())
+            self.client.lineReceived(
+                '150 File status okay; about to open data connection.')
+            sending = [
+                '-rw-r--r--    0 spam      egg      100 Oct 10 2006 foo\r\n',
+                '-rw-r--r--    3 spam      egg      100 Oct 10 2006 bar\r\n',
+                '-rw-r--r--    4 spam      egg      100 Oct 10 2006 baz\r\n',
+            ]
+            for i in sending:
+                proto.dataReceived(i)
+            proto.connectionLost(failure.Failure(error.ConnectionDone("")))
+
+        self.client.connectFactory = cbConnect
+        self._testLogin()
+        fileList = ftp.FTPFileListProtocol()
+        d = self.client.list('foo/bar', fileList).addCallback(cbList, fileList)
+        self.assertEqual(self.transport.value(), 'PASV\r\n')
+        self.transport.clear()
+        self.client.lineReceived('227 Entering Passive Mode (%s).' %
+            (ftp.encodeHostPort('127.0.0.1', 12345),))
+        self.assertEqual(self.transport.value(), 'LIST foo/bar\r\n')
+        self.client.lineReceived('226 Transfer Complete.')
+        return d
+
+
+    def test_LIST(self):
+        """
+        Test the LIST command in non-passive mode.
+
+        Like L{test_passiveLIST} but in the configuration where the server
+        establishes the data connection to the client, rather than the other
+        way around.
+        """
+        self.client.passive = False
+        def generatePort(portCmd):
+            portCmd.text = 'PORT %s' % (ftp.encodeHostPort('127.0.0.1', 9876),)
+            portCmd.protocol.makeConnection(proto_helpers.StringTransport())
+            self.client.lineReceived(
+                '150 File status okay; about to open data connection.')
+            sending = [
+                '-rw-r--r--    0 spam      egg      100 Oct 10 2006 foo\r\n',
+                '-rw-r--r--    3 spam      egg      100 Oct 10 2006 bar\r\n',
+                '-rw-r--r--    4 spam      egg      100 Oct 10 2006 baz\r\n',
+            ]
+            for i in sending:
+                portCmd.protocol.dataReceived(i)
+            portCmd.protocol.connectionLost(
+                failure.Failure(error.ConnectionDone("")))
+
+        def cbList(res, fileList):
+            fls = [f["filename"] for f in fileList.files]
+            expected = ["foo", "bar", "baz"]
+            expected.sort()
+            fls.sort()
+            self.assertEqual(fls, expected)
+
+        self.client.generatePortCommand = generatePort
+        self._testLogin()
+        fileList = ftp.FTPFileListProtocol()
+        d = self.client.list('foo/bar', fileList).addCallback(cbList, fileList)
+        self.assertEqual(self.transport.value(), 'PORT %s\r\n' %
+            (ftp.encodeHostPort('127.0.0.1', 9876),))
+        self.transport.clear()
+        self.client.lineReceived('200 PORT OK')
+        self.assertEqual(self.transport.value(), 'LIST foo/bar\r\n')
+        self.transport.clear()
+        self.client.lineReceived('226 Transfer Complete.')
+        return d
+
+
+    def test_failedLIST(self):
+        """
+        Test a failure in LIST command.
+
+        L{ftp.FTPClient.list} should return a Deferred which fails with
+        L{ftp.CommandFailed} if the server indicates the indicated path is
+        invalid for some reason.
+        """
+        def cbConnect(host, port, factory):
+            self.assertEqual(host, '127.0.0.1')
+            self.assertEqual(port, 12345)
+            proto = factory.buildProtocol((host, port))
+            proto.makeConnection(proto_helpers.StringTransport())
+            self.client.lineReceived(
+                '150 File status okay; about to open data connection.')
+            proto.connectionLost(failure.Failure(error.ConnectionDone("")))
+
+        self.client.connectFactory = cbConnect
+        self._testLogin()
+        fileList = ftp.FTPFileListProtocol()
+        d = self.client.list('foo/bar', fileList)
+        self.assertFailure(d, ftp.CommandFailed)
+        self.assertEqual(self.transport.value(), 'PASV\r\n')
+        self.transport.clear()
+        self.client.lineReceived('227 Entering Passive Mode (%s).' %
+            (ftp.encodeHostPort('127.0.0.1', 12345),))
+        self.assertEqual(self.transport.value(), 'LIST foo/bar\r\n')
+        self.client.lineReceived('550 foo/bar: No such file or directory')
+        return d
+
+
+    def test_NLST(self):
+        """
+        Test the NLST command in non-passive mode.
+
+        L{ftp.FTPClient.nlst} should return a Deferred which fires with a
+        list of filenames when the list command has completed.
+        """
+        self.client.passive = False
+        def generatePort(portCmd):
+            portCmd.text = 'PORT %s' % (ftp.encodeHostPort('127.0.0.1', 9876),)
+            portCmd.protocol.makeConnection(proto_helpers.StringTransport())
+            self.client.lineReceived(
+                '150 File status okay; about to open data connection.')
+            portCmd.protocol.dataReceived('foo\r\n')
+            portCmd.protocol.dataReceived('bar\r\n')
+            portCmd.protocol.dataReceived('baz\r\n')
+            portCmd.protocol.connectionLost(
+                failure.Failure(error.ConnectionDone("")))
+
+        def cbList(res, proto):
+            fls = proto.buffer.splitlines()
+            expected = ["foo", "bar", "baz"]
+            expected.sort()
+            fls.sort()
+            self.assertEqual(fls, expected)
+
+        self.client.generatePortCommand = generatePort
+        self._testLogin()
+        lstproto = _BufferingProtocol()
+        d = self.client.nlst('foo/bar', lstproto).addCallback(cbList, lstproto)
+        self.assertEqual(self.transport.value(), 'PORT %s\r\n' %
+            (ftp.encodeHostPort('127.0.0.1', 9876),))
+        self.transport.clear()
+        self.client.lineReceived('200 PORT OK')
+        self.assertEqual(self.transport.value(), 'NLST foo/bar\r\n')
+        self.client.lineReceived('226 Transfer Complete.')
+        return d
+
+
+    def test_passiveNLST(self):
+        """
+        Test the NLST command.
+
+        Like L{test_passiveNLST} but in the configuration where the server
+        establishes the data connection to the client, rather than the other
+        way around.
+        """
+        def cbList(res, proto):
+            fls = proto.buffer.splitlines()
+            expected = ["foo", "bar", "baz"]
+            expected.sort()
+            fls.sort()
+            self.assertEqual(fls, expected)
+
+        def cbConnect(host, port, factory):
+            self.assertEqual(host, '127.0.0.1')
+            self.assertEqual(port, 12345)
+            proto = factory.buildProtocol((host, port))
+            proto.makeConnection(proto_helpers.StringTransport())
+            self.client.lineReceived(
+                '150 File status okay; about to open data connection.')
+            proto.dataReceived('foo\r\n')
+            proto.dataReceived('bar\r\n')
+            proto.dataReceived('baz\r\n')
+            proto.connectionLost(failure.Failure(error.ConnectionDone("")))
+
+        self.client.connectFactory = cbConnect
+        self._testLogin()
+        lstproto = _BufferingProtocol()
+        d = self.client.nlst('foo/bar', lstproto).addCallback(cbList, lstproto)
+        self.assertEqual(self.transport.value(), 'PASV\r\n')
+        self.transport.clear()
+        self.client.lineReceived('227 Entering Passive Mode (%s).' %
+            (ftp.encodeHostPort('127.0.0.1', 12345),))
+        self.assertEqual(self.transport.value(), 'NLST foo/bar\r\n')
+        self.client.lineReceived('226 Transfer Complete.')
+        return d
+
+
+    def test_failedNLST(self):
+        """
+        Test a failure in NLST command.
+
+        L{ftp.FTPClient.nlst} should return a Deferred which fails with
+        L{ftp.CommandFailed} if the server indicates the indicated path is
+        invalid for some reason.
+        """
+        tr = proto_helpers.StringTransport()
+        def cbConnect(host, port, factory):
+            self.assertEqual(host, '127.0.0.1')
+            self.assertEqual(port, 12345)
+            proto = factory.buildProtocol((host, port))
+            proto.makeConnection(tr)
+            self.client.lineReceived(
+                '150 File status okay; about to open data connection.')
+            proto.connectionLost(failure.Failure(error.ConnectionDone("")))
+
+        self.client.connectFactory = cbConnect
+        self._testLogin()
+        lstproto = _BufferingProtocol()
+        d = self.client.nlst('foo/bar', lstproto)
+        self.assertFailure(d, ftp.CommandFailed)
+        self.assertEqual(self.transport.value(), 'PASV\r\n')
+        self.transport.clear()
+        self.client.lineReceived('227 Entering Passive Mode (%s).' %
+            (ftp.encodeHostPort('127.0.0.1', 12345),))
+        self.assertEqual(self.transport.value(), 'NLST foo/bar\r\n')
+        self.client.lineReceived('550 foo/bar: No such file or directory')
+        return d
+
+
+    def test_changeDirectoryDeprecated(self):
+        """
+        L{ftp.FTPClient.changeDirectory} is deprecated and the direct caller of
+        it is warned of this.
+        """
+        self._testLogin()
+        d = self.assertWarns(
+            DeprecationWarning,
+            "FTPClient.changeDirectory is deprecated in Twisted 8.2 and "
+            "newer.  Use FTPClient.cwd instead.",
+            __file__,
+            lambda: self.client.changeDirectory('.'))
+        # This is necessary to make the Deferred fire.  The Deferred needs
+        # to fire so that tearDown doesn't cause it to errback and fail this
+        # or (more likely) a later test.
+        self.client.lineReceived('250 success')
+        return d
+
+
+    def test_changeDirectory(self):
+        """
+        Test the changeDirectory method.
+
+        L{ftp.FTPClient.changeDirectory} should return a Deferred which fires
+        with True if succeeded.
+        """
+        def cbCd(res):
+            self.assertEqual(res, True)
+
+        self._testLogin()
+        d = self.client.changeDirectory("bar/foo").addCallback(cbCd)
+        self.assertEqual(self.transport.value(), 'CWD bar/foo\r\n')
+        self.client.lineReceived('250 Requested File Action Completed OK')
+        return d
+    test_changeDirectory.suppress = [_changeDirectorySuppression]
+
+
+    def test_failedChangeDirectory(self):
+        """
+        Test a failure in the changeDirectory method.
+
+        The behaviour here is the same as a failed CWD.
+        """
+        self._testLogin()
+        d = self.client.changeDirectory("bar/foo")
+        self.assertFailure(d, ftp.CommandFailed)
+        self.assertEqual(self.transport.value(), 'CWD bar/foo\r\n')
+        self.client.lineReceived('550 bar/foo: No such file or directory')
+        return d
+    test_failedChangeDirectory.suppress = [_changeDirectorySuppression]
+
+
+    def test_strangeFailedChangeDirectory(self):
+        """
+        Test a strange failure in changeDirectory method.
+
+        L{ftp.FTPClient.changeDirectory} is stricter than CWD as it checks
+        code 250 for success.
+        """
+        self._testLogin()
+        d = self.client.changeDirectory("bar/foo")
+        self.assertFailure(d, ftp.CommandFailed)
+        self.assertEqual(self.transport.value(), 'CWD bar/foo\r\n')
+        self.client.lineReceived('252 I do what I want !')
+        return d
+    test_strangeFailedChangeDirectory.suppress = [_changeDirectorySuppression]
+
+
+    def test_renameFromTo(self):
+        """
+        L{ftp.FTPClient.rename} issues I{RNTO} and I{RNFR} commands and returns
+        a L{Deferred} which fires when a file has successfully been renamed.
+        """
+        self._testLogin()
+
+        d = self.client.rename("/spam", "/ham")
+        self.assertEqual(self.transport.value(), 'RNFR /spam\r\n')
+        self.transport.clear()
+
+        fromResponse = (
+            '350 Requested file action pending further information.\r\n')
+        self.client.lineReceived(fromResponse)
+        self.assertEqual(self.transport.value(), 'RNTO /ham\r\n')
+        toResponse = (
+            '250 Requested File Action Completed OK')
+        self.client.lineReceived(toResponse)
+
+        d.addCallback(self.assertEqual, ([fromResponse], [toResponse]))
+        return d
+
+
+    def test_renameFromToEscapesPaths(self):
+        """
+        L{ftp.FTPClient.rename} issues I{RNTO} and I{RNFR} commands with paths
+        escaped according to U{http://cr.yp.to/ftp/filesystem.html}.
+        """
+        self._testLogin()
+
+        fromFile = "/foo/ba\nr/baz"
+        toFile = "/qu\nux"
+        self.client.rename(fromFile, toFile)
+        self.client.lineReceived("350 ")
+        self.client.lineReceived("250 ")
+        self.assertEqual(
+            self.transport.value(),
+            "RNFR /foo/ba\x00r/baz\r\n"
+            "RNTO /qu\x00ux\r\n")
+
+
+    def test_renameFromToFailingOnFirstError(self):
+        """
+        The L{Deferred} returned by L{ftp.FTPClient.rename} is errbacked with
+        L{CommandFailed} if the I{RNFR} command receives an error response code
+        (for example, because the file does not exist).
+        """
+        self._testLogin()
+
+        d = self.client.rename("/spam", "/ham")
+        self.assertEqual(self.transport.value(), 'RNFR /spam\r\n')
+        self.transport.clear()
+
+        self.client.lineReceived('550 Requested file unavailable.\r\n')
+        # The RNTO should not execute since the RNFR failed.
+        self.assertEqual(self.transport.value(), '')
+
+        return self.assertFailure(d, ftp.CommandFailed)
+
+
+    def test_renameFromToFailingOnRenameTo(self):
+        """
+        The L{Deferred} returned by L{ftp.FTPClient.rename} is errbacked with
+        L{CommandFailed} if the I{RNTO} command receives an error response code
+        (for example, because the destination directory does not exist).
+        """
+        self._testLogin()
+
+        d = self.client.rename("/spam", "/ham")
+        self.assertEqual(self.transport.value(), 'RNFR /spam\r\n')
+        self.transport.clear()
+
+        self.client.lineReceived('350 Requested file action pending further information.\r\n')
+        self.assertEqual(self.transport.value(), 'RNTO /ham\r\n')
+        self.client.lineReceived('550 Requested file unavailable.\r\n')
+        return self.assertFailure(d, ftp.CommandFailed)
+
+
+    def test_makeDirectory(self):
+        """
+        L{ftp.FTPClient.makeDirectory} issues a I{MKD} command and returns a
+        L{Deferred} which is called back with the server's response if the
+        directory is created.
+        """
+        self._testLogin()
+
+        d = self.client.makeDirectory("/spam")
+        self.assertEqual(self.transport.value(), 'MKD /spam\r\n')
+        self.client.lineReceived('257 "/spam" created.')
+        return d.addCallback(self.assertEqual, ['257 "/spam" created.'])
+
+
+    def test_makeDirectoryPathEscape(self):
+        """
+        L{ftp.FTPClient.makeDirectory} escapes the path name it sends according
+        to U{http://cr.yp.to/ftp/filesystem.html}.
+        """
+        self._testLogin()
+        d = self.client.makeDirectory("/sp\nam")
+        self.assertEqual(self.transport.value(), 'MKD /sp\x00am\r\n')
+        # This is necessary to make the Deferred fire.  The Deferred needs
+        # to fire so that tearDown doesn't cause it to errback and fail this
+        # or (more likely) a later test.
+        self.client.lineReceived('257 win')
+        return d
+
+
+    def test_failedMakeDirectory(self):
+        """
+        L{ftp.FTPClient.makeDirectory} returns a L{Deferred} which is errbacked
+        with L{CommandFailed} if the server returns an error response code.
+        """
+        self._testLogin()
+
+        d = self.client.makeDirectory("/spam")
+        self.assertEqual(self.transport.value(), 'MKD /spam\r\n')
+        self.client.lineReceived('550 PERMISSION DENIED')
+        return self.assertFailure(d, ftp.CommandFailed)
+
+
+    def test_getDirectory(self):
+        """
+        Test the getDirectory method.
+
+        L{ftp.FTPClient.getDirectory} should return a Deferred which fires with
+        the current directory on the server. It wraps PWD command.
+        """
+        def cbGet(res):
+            self.assertEqual(res, "/bar/baz")
+
+        self._testLogin()
+        d = self.client.getDirectory().addCallback(cbGet)
+        self.assertEqual(self.transport.value(), 'PWD\r\n')
+        self.client.lineReceived('257 "/bar/baz"')
+        return d
+
+
+    def test_failedGetDirectory(self):
+        """
+        Test a failure in getDirectory method.
+
+        The behaviour should be the same as PWD.
+        """
+        self._testLogin()
+        d = self.client.getDirectory()
+        self.assertFailure(d, ftp.CommandFailed)
+        self.assertEqual(self.transport.value(), 'PWD\r\n')
+        self.client.lineReceived('550 /bar/baz: No such file or directory')
+        return d
+
+
+    def test_anotherFailedGetDirectory(self):
+        """
+        Test a different failure in getDirectory method.
+
+        The response should be quoted to be parsed, so it returns an error
+        otherwise.
+        """
+        self._testLogin()
+        d = self.client.getDirectory()
+        self.assertFailure(d, ftp.CommandFailed)
+        self.assertEqual(self.transport.value(), 'PWD\r\n')
+        self.client.lineReceived('257 /bar/baz')
+        return d
+
+
+    def test_removeFile(self):
+        """
+        L{ftp.FTPClient.removeFile} sends a I{DELE} command to the server for
+        the indicated file and returns a Deferred which fires after the server
+        sends a 250 response code.
+        """
+        self._testLogin()
+        d = self.client.removeFile("/tmp/test")
+        self.assertEqual(self.transport.value(), 'DELE /tmp/test\r\n')
+        response = '250 Requested file action okay, completed.'
+        self.client.lineReceived(response)
+        return d.addCallback(self.assertEqual, [response])
+
+
+    def test_failedRemoveFile(self):
+        """
+        If the server returns a response code other than 250 in response to a
+        I{DELE} sent by L{ftp.FTPClient.removeFile}, the L{Deferred} returned
+        by C{removeFile} is errbacked with a L{Failure} wrapping a
+        L{CommandFailed}.
+        """
+        self._testLogin()
+        d = self.client.removeFile("/tmp/test")
+        self.assertEqual(self.transport.value(), 'DELE /tmp/test\r\n')
+        response = '501 Syntax error in parameters or arguments.'
+        self.client.lineReceived(response)
+        d = self.assertFailure(d, ftp.CommandFailed)
+        d.addCallback(lambda exc: self.assertEqual(exc.args, ([response],)))
+        return d
+
+
+    def test_unparsableRemoveFileResponse(self):
+        """
+        If the server returns a response line which cannot be parsed, the
+        L{Deferred} returned by L{ftp.FTPClient.removeFile} is errbacked with a
+        L{BadResponse} containing the response.
+        """
+        self._testLogin()
+        d = self.client.removeFile("/tmp/test")
+        response = '765 blah blah blah'
+        self.client.lineReceived(response)
+        d = self.assertFailure(d, ftp.BadResponse)
+        d.addCallback(lambda exc: self.assertEqual(exc.args, ([response],)))
+        return d
+
+
+    def test_multilineRemoveFileResponse(self):
+        """
+        If the server returns multiple response lines, the L{Deferred} returned
+        by L{ftp.FTPClient.removeFile} is still fired with a true value if the
+        ultimate response code is 250.
+        """
+        self._testLogin()
+        d = self.client.removeFile("/tmp/test")
+        response = ['250-perhaps a progress report',
+                    '250 okay']
+        map(self.client.lineReceived, response)
+        return d.addCallback(self.assertTrue)
+
+
+    def test_removeDirectory(self):
+        """
+        L{ftp.FTPClient.removeDirectory} sends a I{RMD} command to the server
+        for the indicated directory and returns a Deferred which fires after
+        the server sends a 250 response code.
+        """
+        self._testLogin()
+        d = self.client.removeDirectory('/tmp/test')
+        self.assertEqual(self.transport.value(), 'RMD /tmp/test\r\n')
+        response = '250 Requested file action okay, completed.'
+        self.client.lineReceived(response)
+        return d.addCallback(self.assertEqual, [response])
+
+
+    def test_failedRemoveDirectory(self):
+        """
+        If the server returns a response code other than 250 in response to a
+        I{RMD} sent by L{ftp.FTPClient.removeDirectory}, the L{Deferred}
+        returned by C{removeDirectory} is errbacked with a L{Failure} wrapping
+        a L{CommandFailed}.
+        """
+        self._testLogin()
+        d = self.client.removeDirectory("/tmp/test")
+        self.assertEqual(self.transport.value(), 'RMD /tmp/test\r\n')
+        response = '501 Syntax error in parameters or arguments.'
+        self.client.lineReceived(response)
+        d = self.assertFailure(d, ftp.CommandFailed)
+        d.addCallback(lambda exc: self.assertEqual(exc.args, ([response],)))
+        return d
+
+
+    def test_unparsableRemoveDirectoryResponse(self):
+        """
+        If the server returns a response line which cannot be parsed, the
+        L{Deferred} returned by L{ftp.FTPClient.removeDirectory} is errbacked
+        with a L{BadResponse} containing the response.
+        """
+        self._testLogin()
+        d = self.client.removeDirectory("/tmp/test")
+        response = '765 blah blah blah'
+        self.client.lineReceived(response)
+        d = self.assertFailure(d, ftp.BadResponse)
+        d.addCallback(lambda exc: self.assertEqual(exc.args, ([response],)))
+        return d
+
+
+    def test_multilineRemoveDirectoryResponse(self):
+        """
+        If the server returns multiple response lines, the L{Deferred} returned
+        by L{ftp.FTPClient.removeDirectory} is still fired with a true value
+         if the ultimate response code is 250.
+        """
+        self._testLogin()
+        d = self.client.removeDirectory("/tmp/test")
+        response = ['250-perhaps a progress report',
+                    '250 okay']
+        map(self.client.lineReceived, response)
+        return d.addCallback(self.assertTrue)
+
+
+
+class FTPClientBasicTests(unittest.TestCase):
+
+    def testGreeting(self):
+        # The first response is captured as a greeting.
+        ftpClient = ftp.FTPClientBasic()
+        ftpClient.lineReceived('220 Imaginary FTP.')
+        self.assertEqual(['220 Imaginary FTP.'], ftpClient.greeting)
+
+    def testResponseWithNoMessage(self):
+        # Responses with no message are still valid, i.e. three digits followed
+        # by a space is complete response.
+        ftpClient = ftp.FTPClientBasic()
+        ftpClient.lineReceived('220 ')
+        self.assertEqual(['220 '], ftpClient.greeting)
+
+    def testMultilineResponse(self):
+        ftpClient = ftp.FTPClientBasic()
+        ftpClient.transport = proto_helpers.StringTransport()
+        ftpClient.lineReceived('220 Imaginary FTP.')
+
+        # Queue (and send) a dummy command, and set up a callback to capture the
+        # result
+        deferred = ftpClient.queueStringCommand('BLAH')
+        result = []
+        deferred.addCallback(result.append)
+        deferred.addErrback(self.fail)
+
+        # Send the first line of a multiline response.
+        ftpClient.lineReceived('210-First line.')
+        self.assertEqual([], result)
+
+        # Send a second line, again prefixed with "nnn-".
+        ftpClient.lineReceived('123-Second line.')
+        self.assertEqual([], result)
+
+        # Send a plain line of text, no prefix.
+        ftpClient.lineReceived('Just some text.')
+        self.assertEqual([], result)
+
+        # Now send a short (less than 4 chars) line.
+        ftpClient.lineReceived('Hi')
+        self.assertEqual([], result)
+
+        # Now send an empty line.
+        ftpClient.lineReceived('')
+        self.assertEqual([], result)
+
+        # And a line with 3 digits in it, and nothing else.
+        ftpClient.lineReceived('321')
+        self.assertEqual([], result)
+
+        # Now finish it.
+        ftpClient.lineReceived('210 Done.')
+        self.assertEqual(
+            ['210-First line.',
+             '123-Second line.',
+             'Just some text.',
+             'Hi',
+             '',
+             '321',
+             '210 Done.'], result[0])
+
+
+    def test_noPasswordGiven(self):
+        """
+        Passing None as the password avoids sending the PASS command.
+        """
+        # Create a client, and give it a greeting.
+        ftpClient = ftp.FTPClientBasic()
+        ftpClient.transport = proto_helpers.StringTransport()
+        ftpClient.lineReceived('220 Welcome to Imaginary FTP.')
+
+        # Queue a login with no password
+        ftpClient.queueLogin('bob', None)
+        self.assertEqual('USER bob\r\n', ftpClient.transport.value())
+
+        # Clear the test buffer, acknowledge the USER command.
+        ftpClient.transport.clear()
+        ftpClient.lineReceived('200 Hello bob.')
+
+        # The client shouldn't have sent anything more (i.e. it shouldn't have
+        # sent a PASS command).
+        self.assertEqual('', ftpClient.transport.value())
+
+
+    def test_noPasswordNeeded(self):
+        """
+        Receiving a 230 response to USER prevents PASS from being sent.
+        """
+        # Create a client, and give it a greeting.
+        ftpClient = ftp.FTPClientBasic()
+        ftpClient.transport = proto_helpers.StringTransport()
+        ftpClient.lineReceived('220 Welcome to Imaginary FTP.')
+
+        # Queue a login with no password
+        ftpClient.queueLogin('bob', 'secret')
+        self.assertEqual('USER bob\r\n', ftpClient.transport.value())
+
+        # Clear the test buffer, acknowledge the USER command with a 230
+        # response code.
+        ftpClient.transport.clear()
+        ftpClient.lineReceived('230 Hello bob.  No password needed.')
+
+        # The client shouldn't have sent anything more (i.e. it shouldn't have
+        # sent a PASS command).
+        self.assertEqual('', ftpClient.transport.value())
+
+
+
+class PathHandling(unittest.TestCase):
+    def testNormalizer(self):
+        for inp, outp in [('a', ['a']),
+                          ('/a', ['a']),
+                          ('/', []),
+                          ('a/b/c', ['a', 'b', 'c']),
+                          ('/a/b/c', ['a', 'b', 'c']),
+                          ('/a/', ['a']),
+                          ('a/', ['a'])]:
+            self.assertEqual(ftp.toSegments([], inp), outp)
+
+        for inp, outp in [('b', ['a', 'b']),
+                          ('b/', ['a', 'b']),
+                          ('/b', ['b']),
+                          ('/b/', ['b']),
+                          ('b/c', ['a', 'b', 'c']),
+                          ('b/c/', ['a', 'b', 'c']),
+                          ('/b/c', ['b', 'c']),
+                          ('/b/c/', ['b', 'c'])]:
+            self.assertEqual(ftp.toSegments(['a'], inp), outp)
+
+        for inp, outp in [('//', []),
+                          ('//a', ['a']),
+                          ('a//', ['a']),
+                          ('a//b', ['a', 'b'])]:
+            self.assertEqual(ftp.toSegments([], inp), outp)
+
+        for inp, outp in [('//', []),
+                          ('//b', ['b']),
+                          ('b//c', ['a', 'b', 'c'])]:
+            self.assertEqual(ftp.toSegments(['a'], inp), outp)
+
+        for inp, outp in [('..', []),
+                          ('../', []),
+                          ('a/..', ['x']),
+                          ('/a/..', []),
+                          ('/a/b/..', ['a']),
+                          ('/a/b/../', ['a']),
+                          ('/a/b/../c', ['a', 'c']),
+                          ('/a/b/../c/', ['a', 'c']),
+                          ('/a/b/../../c', ['c']),
+                          ('/a/b/../../c/', ['c']),
+                          ('/a/b/../../c/..', []),
+                          ('/a/b/../../c/../', [])]:
+            self.assertEqual(ftp.toSegments(['x'], inp), outp)
+
+        for inp in ['..', '../', 'a/../..', 'a/../../',
+                    '/..', '/../', '/a/../..', '/a/../../',
+                    '/a/b/../../..']:
+            self.assertRaises(ftp.InvalidPath, ftp.toSegments, [], inp)
+
+        for inp in ['../..', '../../', '../a/../..']:
+            self.assertRaises(ftp.InvalidPath, ftp.toSegments, ['x'], inp)
+
+
+class BaseFTPRealmTests(unittest.TestCase):
+    """
+    Tests for L{ftp.BaseFTPRealm}, a base class to help define L{IFTPShell}
+    realms with different user home directory policies.
+    """
+    def test_interface(self):
+        """
+        L{ftp.BaseFTPRealm} implements L{IRealm}.
+        """
+        self.assertTrue(verifyClass(IRealm, ftp.BaseFTPRealm))
+
+
+    def test_getHomeDirectory(self):
+        """
+        L{ftp.BaseFTPRealm} calls its C{getHomeDirectory} method with the
+        avatarId being requested to determine the home directory for that
+        avatar.
+        """
+        result = filepath.FilePath(self.mktemp())
+        avatars = []
+        class TestRealm(ftp.BaseFTPRealm):
+            def getHomeDirectory(self, avatarId):
+                avatars.append(avatarId)
+                return result
+
+        realm = TestRealm(self.mktemp())
+        iface, avatar, logout = realm.requestAvatar(
+            "alice at example.com", None, ftp.IFTPShell)
+        self.assertIsInstance(avatar, ftp.FTPShell)
+        self.assertEqual(avatar.filesystemRoot, result)
+
+
+    def test_anonymous(self):
+        """
+        L{ftp.BaseFTPRealm} returns an L{ftp.FTPAnonymousShell} instance for
+        anonymous avatar requests.
+        """
+        anonymous = self.mktemp()
+        realm = ftp.BaseFTPRealm(anonymous)
+        iface, avatar, logout = realm.requestAvatar(
+            checkers.ANONYMOUS, None, ftp.IFTPShell)
+        self.assertIsInstance(avatar, ftp.FTPAnonymousShell)
+        self.assertEqual(avatar.filesystemRoot, filepath.FilePath(anonymous))
+
+
+    def test_notImplemented(self):
+        """
+        L{ftp.BaseFTPRealm.getHomeDirectory} should be overridden by a subclass
+        and raises L{NotImplementedError} if it is not.
+        """
+        realm = ftp.BaseFTPRealm(self.mktemp())
+        self.assertRaises(NotImplementedError, realm.getHomeDirectory, object())
+
+
+
+class FTPRealmTestCase(unittest.TestCase):
+    """
+    Tests for L{ftp.FTPRealm}.
+    """
+    def test_getHomeDirectory(self):
+        """
+        L{ftp.FTPRealm} accepts an extra directory to its initializer and treats
+        the avatarId passed to L{ftp.FTPRealm.getHomeDirectory} as a single path
+        segment to construct a child of that directory.
+        """
+        base = '/path/to/home'
+        realm = ftp.FTPRealm(self.mktemp(), base)
+        home = realm.getHomeDirectory('alice at example.com')
+        self.assertEqual(
+            filepath.FilePath(base).child('alice at example.com'), home)
+
+
+    def test_defaultHomeDirectory(self):
+        """
+        If no extra directory is passed to L{ftp.FTPRealm}, it uses C{"/home"}
+        as the base directory containing all user home directories.
+        """
+        realm = ftp.FTPRealm(self.mktemp())
+        home = realm.getHomeDirectory('alice at example.com')
+        self.assertEqual(filepath.FilePath('/home/alice at example.com'), home)
+
+
+
+class SystemFTPRealmTests(unittest.TestCase):
+    """
+    Tests for L{ftp.SystemFTPRealm}.
+    """
+    skip = nonPOSIXSkip
+
+    def test_getHomeDirectory(self):
+        """
+        L{ftp.SystemFTPRealm.getHomeDirectory} treats the avatarId passed to it
+        as a username in the underlying platform and returns that account's home
+        directory.
+        """
+        # Try to pick a username that will have a home directory.
+        user = getpass.getuser()
+
+        # Try to find their home directory in a different way than used by the
+        # implementation.  Maybe this is silly and can only introduce spurious
+        # failures due to system-specific configurations.
+        import pwd
+        expected = pwd.getpwnam(user).pw_dir
+
+        realm = ftp.SystemFTPRealm(self.mktemp())
+        home = realm.getHomeDirectory(user)
+        self.assertEqual(home, filepath.FilePath(expected))
+
+
+    def test_noSuchUser(self):
+        """
+        L{ftp.SystemFTPRealm.getHomeDirectory} raises L{UnauthorizedLogin} when
+        passed a username which has no corresponding home directory in the
+        system's accounts database.
+        """
+        user = insecureRandom(4).encode('hex')
+        realm = ftp.SystemFTPRealm(self.mktemp())
+        self.assertRaises(UnauthorizedLogin, realm.getHomeDirectory, user)
+
+
+
+class ErrnoToFailureTestCase(unittest.TestCase):
+    """
+    Tests for L{ftp.errnoToFailure} errno checking.
+    """
+
+    def test_notFound(self):
+        """
+        C{errno.ENOENT} should be translated to L{ftp.FileNotFoundError}.
+        """
+        d = ftp.errnoToFailure(errno.ENOENT, "foo")
+        return self.assertFailure(d, ftp.FileNotFoundError)
+
+
+    def test_permissionDenied(self):
+        """
+        C{errno.EPERM} should be translated to L{ftp.PermissionDeniedError}.
+        """
+        d = ftp.errnoToFailure(errno.EPERM, "foo")
+        return self.assertFailure(d, ftp.PermissionDeniedError)
+
+
+    def test_accessDenied(self):
+        """
+        C{errno.EACCES} should be translated to L{ftp.PermissionDeniedError}.
+        """
+        d = ftp.errnoToFailure(errno.EACCES, "foo")
+        return self.assertFailure(d, ftp.PermissionDeniedError)
+
+
+    def test_notDirectory(self):
+        """
+        C{errno.ENOTDIR} should be translated to L{ftp.IsNotADirectoryError}.
+        """
+        d = ftp.errnoToFailure(errno.ENOTDIR, "foo")
+        return self.assertFailure(d, ftp.IsNotADirectoryError)
+
+
+    def test_fileExists(self):
+        """
+        C{errno.EEXIST} should be translated to L{ftp.FileExistsError}.
+        """
+        d = ftp.errnoToFailure(errno.EEXIST, "foo")
+        return self.assertFailure(d, ftp.FileExistsError)
+
+
+    def test_isDirectory(self):
+        """
+        C{errno.EISDIR} should be translated to L{ftp.IsADirectoryError}.
+        """
+        d = ftp.errnoToFailure(errno.EISDIR, "foo")
+        return self.assertFailure(d, ftp.IsADirectoryError)
+
+
+    def test_passThrough(self):
+        """
+        If an unknown errno is passed to L{ftp.errnoToFailure}, it should let
+        the originating exception pass through.
+        """
+        try:
+            raise RuntimeError("bar")
+        except:
+            d = ftp.errnoToFailure(-1, "foo")
+            return self.assertFailure(d, RuntimeError)
+
+
+
+class AnonymousFTPShellTestCase(unittest.TestCase):
+    """
+    Test anynomous shell properties.
+    """
+
+    def test_anonymousWrite(self):
+        """
+        Check that L{ftp.FTPAnonymousShell} returns an error when trying to
+        open it in write mode.
+        """
+        shell = ftp.FTPAnonymousShell('')
+        d = shell.openForWriting(('foo',))
+        self.assertFailure(d, ftp.PermissionDeniedError)
+        return d
+
+
+
+class IFTPShellTestsMixin:
+    """
+    Generic tests for the C{IFTPShell} interface.
+    """
+
+    def directoryExists(self, path):
+        """
+        Test if the directory exists at C{path}.
+
+        @param path: the relative path to check.
+        @type path: C{str}.
+
+        @return: C{True} if C{path} exists and is a directory, C{False} if
+            it's not the case
+        @rtype: C{bool}
+        """
+        raise NotImplementedError()
+
+
+    def createDirectory(self, path):
+        """
+        Create a directory in C{path}.
+
+        @param path: the relative path of the directory to create, with one
+            segment.
+        @type path: C{str}
+        """
+        raise NotImplementedError()
+
+
+    def fileExists(self, path):
+        """
+        Test if the file exists at C{path}.
+
+        @param path: the relative path to check.
+        @type path: C{str}.
+
+        @return: C{True} if C{path} exists and is a file, C{False} if it's not
+            the case.
+        @rtype: C{bool}
+        """
+        raise NotImplementedError()
+
+
+    def createFile(self, path, fileContent=''):
+        """
+        Create a file named C{path} with some content.
+
+        @param path: the relative path of the file to create, without
+            directory.
+        @type path: C{str}
+
+        @param fileContent: the content of the file.
+        @type fileContent: C{str}
+        """
+        raise NotImplementedError()
+
+
+    def test_createDirectory(self):
+        """
+        C{directoryExists} should report correctly about directory existence,
+        and C{createDirectory} should create a directory detectable by
+        C{directoryExists}.
+        """
+        self.assertFalse(self.directoryExists('bar'))
+        self.createDirectory('bar')
+        self.assertTrue(self.directoryExists('bar'))
+
+
+    def test_createFile(self):
+        """
+        C{fileExists} should report correctly about file existence, and
+        C{createFile} should create a file detectable by C{fileExists}.
+        """
+        self.assertFalse(self.fileExists('file.txt'))
+        self.createFile('file.txt')
+        self.assertTrue(self.fileExists('file.txt'))
+
+
+    def test_makeDirectory(self):
+        """
+        Create a directory and check it ends in the filesystem.
+        """
+        d = self.shell.makeDirectory(('foo',))
+        def cb(result):
+            self.assertTrue(self.directoryExists('foo'))
+        return d.addCallback(cb)
+
+
+    def test_makeDirectoryError(self):
+        """
+        Creating a directory that already exists should fail with a
+        C{ftp.FileExistsError}.
+        """
+        self.createDirectory('foo')
+        d = self.shell.makeDirectory(('foo',))
+        return self.assertFailure(d, ftp.FileExistsError)
+
+
+    def test_removeDirectory(self):
+        """
+        Try to remove a directory and check it's removed from the filesystem.
+        """
+        self.createDirectory('bar')
+        d = self.shell.removeDirectory(('bar',))
+        def cb(result):
+            self.assertFalse(self.directoryExists('bar'))
+        return d.addCallback(cb)
+
+
+    def test_removeDirectoryOnFile(self):
+        """
+        removeDirectory should not work in file and fail with a
+        C{ftp.IsNotADirectoryError}.
+        """
+        self.createFile('file.txt')
+        d = self.shell.removeDirectory(('file.txt',))
+        return self.assertFailure(d, ftp.IsNotADirectoryError)
+
+
+    def test_removeNotExistingDirectory(self):
+        """
+        Removing directory that doesn't exist should fail with a
+        C{ftp.FileNotFoundError}.
+        """
+        d = self.shell.removeDirectory(('bar',))
+        return self.assertFailure(d, ftp.FileNotFoundError)
+
+
+    def test_removeFile(self):
+        """
+        Try to remove a file and check it's removed from the filesystem.
+        """
+        self.createFile('file.txt')
+        d = self.shell.removeFile(('file.txt',))
+        def cb(res):
+            self.assertFalse(self.fileExists('file.txt'))
+        d.addCallback(cb)
+        return d
+
+
+    def test_removeFileOnDirectory(self):
+        """
+        removeFile should not work on directory.
+        """
+        self.createDirectory('ned')
+        d = self.shell.removeFile(('ned',))
+        return self.assertFailure(d, ftp.IsADirectoryError)
+
+
+    def test_removeNotExistingFile(self):
+        """
+        Try to remove a non existent file, and check it raises a
+        L{ftp.FileNotFoundError}.
+        """
+        d = self.shell.removeFile(('foo',))
+        return self.assertFailure(d, ftp.FileNotFoundError)
+
+
+    def test_list(self):
+        """
+        Check the output of the list method.
+        """
+        self.createDirectory('ned')
+        self.createFile('file.txt')
+        d = self.shell.list(('.',))
+        def cb(l):
+            l.sort()
+            self.assertEqual(l,
+                [('file.txt', []), ('ned', [])])
+        return d.addCallback(cb)
+
+
+    def test_listWithStat(self):
+        """
+        Check the output of list with asked stats.
+        """
+        self.createDirectory('ned')
+        self.createFile('file.txt')
+        d = self.shell.list(('.',), ('size', 'permissions',))
+        def cb(l):
+            l.sort()
+            self.assertEqual(len(l), 2)
+            self.assertEqual(l[0][0], 'file.txt')
+            self.assertEqual(l[1][0], 'ned')
+            # Size and permissions are reported differently between platforms
+            # so just check they are present
+            self.assertEqual(len(l[0][1]), 2)
+            self.assertEqual(len(l[1][1]), 2)
+        return d.addCallback(cb)
+
+
+    def test_listWithInvalidStat(self):
+        """
+        Querying an invalid stat should result to a C{AttributeError}.
+        """
+        self.createDirectory('ned')
+        d = self.shell.list(('.',), ('size', 'whateverstat',))
+        return self.assertFailure(d, AttributeError)
+
+
+    def test_listFile(self):
+        """
+        Check the output of the list method on a file.
+        """
+        self.createFile('file.txt')
+        d = self.shell.list(('file.txt',))
+        def cb(l):
+            l.sort()
+            self.assertEqual(l,
+                [('file.txt', [])])
+        return d.addCallback(cb)
+
+
+    def test_listNotExistingDirectory(self):
+        """
+        list on a directory that doesn't exist should fail with a
+        L{ftp.FileNotFoundError}.
+        """
+        d = self.shell.list(('foo',))
+        return self.assertFailure(d, ftp.FileNotFoundError)
+
+
+    def test_access(self):
+        """
+        Try to access a resource.
+        """
+        self.createDirectory('ned')
+        d = self.shell.access(('ned',))
+        return d
+
+
+    def test_accessNotFound(self):
+        """
+        access should fail on a resource that doesn't exist.
+        """
+        d = self.shell.access(('foo',))
+        return self.assertFailure(d, ftp.FileNotFoundError)
+
+
+    def test_openForReading(self):
+        """
+        Check that openForReading returns an object providing C{ftp.IReadFile}.
+        """
+        self.createFile('file.txt')
+        d = self.shell.openForReading(('file.txt',))
+        def cb(res):
+            self.assertTrue(ftp.IReadFile.providedBy(res))
+        d.addCallback(cb)
+        return d
+
+
+    def test_openForReadingNotFound(self):
+        """
+        openForReading should fail with a C{ftp.FileNotFoundError} on a file
+        that doesn't exist.
+        """
+        d = self.shell.openForReading(('ned',))
+        return self.assertFailure(d, ftp.FileNotFoundError)
+
+
+    def test_openForReadingOnDirectory(self):
+        """
+        openForReading should not work on directory.
+        """
+        self.createDirectory('ned')
+        d = self.shell.openForReading(('ned',))
+        return self.assertFailure(d, ftp.IsADirectoryError)
+
+
+    def test_openForWriting(self):
+        """
+        Check that openForWriting returns an object providing C{ftp.IWriteFile}.
+        """
+        d = self.shell.openForWriting(('foo',))
+        def cb1(res):
+            self.assertTrue(ftp.IWriteFile.providedBy(res))
+            return res.receive().addCallback(cb2)
+        def cb2(res):
+            self.assertTrue(IConsumer.providedBy(res))
+        d.addCallback(cb1)
+        return d
+
+
+    def test_openForWritingExistingDirectory(self):
+        """
+        openForWriting should not be able to open a directory that already
+        exists.
+        """
+        self.createDirectory('ned')
+        d = self.shell.openForWriting(('ned',))
+        return self.assertFailure(d, ftp.IsADirectoryError)
+
+
+    def test_openForWritingInNotExistingDirectory(self):
+        """
+        openForWring should fail with a L{ftp.FileNotFoundError} if you specify
+        a file in a directory that doesn't exist.
+        """
+        self.createDirectory('ned')
+        d = self.shell.openForWriting(('ned', 'idonotexist', 'foo'))
+        return self.assertFailure(d, ftp.FileNotFoundError)
+
+
+    def test_statFile(self):
+        """
+        Check the output of the stat method on a file.
+        """
+        fileContent = 'wobble\n'
+        self.createFile('file.txt', fileContent)
+        d = self.shell.stat(('file.txt',), ('size', 'directory'))
+        def cb(res):
+            self.assertEqual(res[0], len(fileContent))
+            self.assertFalse(res[1])
+        d.addCallback(cb)
+        return d
+
+
+    def test_statDirectory(self):
+        """
+        Check the output of the stat method on a directory.
+        """
+        self.createDirectory('ned')
+        d = self.shell.stat(('ned',), ('size', 'directory'))
+        def cb(res):
+            self.assertTrue(res[1])
+        d.addCallback(cb)
+        return d
+
+
+    def test_statOwnerGroup(self):
+        """
+        Check the owner and groups stats.
+        """
+        self.createDirectory('ned')
+        d = self.shell.stat(('ned',), ('owner', 'group'))
+        def cb(res):
+            self.assertEqual(len(res), 2)
+        d.addCallback(cb)
+        return d
+
+
+    def test_statNotExisting(self):
+        """
+        stat should fail with L{ftp.FileNotFoundError} on a file that doesn't
+        exist.
+        """
+        d = self.shell.stat(('foo',), ('size', 'directory'))
+        return self.assertFailure(d, ftp.FileNotFoundError)
+
+
+    def test_invalidStat(self):
+        """
+        Querying an invalid stat should result to a C{AttributeError}.
+        """
+        self.createDirectory('ned')
+        d = self.shell.stat(('ned',), ('size', 'whateverstat'))
+        return self.assertFailure(d, AttributeError)
+
+
+    def test_rename(self):
+        """
+        Try to rename a directory.
+        """
+        self.createDirectory('ned')
+        d = self.shell.rename(('ned',), ('foo',))
+        def cb(res):
+            self.assertTrue(self.directoryExists('foo'))
+            self.assertFalse(self.directoryExists('ned'))
+        return d.addCallback(cb)
+
+
+    def test_renameNotExisting(self):
+        """
+        Renaming a directory that doesn't exist should fail with
+        L{ftp.FileNotFoundError}.
+        """
+        d = self.shell.rename(('foo',), ('bar',))
+        return self.assertFailure(d, ftp.FileNotFoundError)
+
+
+
+class FTPShellTestCase(unittest.TestCase, IFTPShellTestsMixin):
+    """
+    Tests for the C{ftp.FTPShell} object.
+    """
+
+    def setUp(self):
+        """
+        Create a root directory and instantiate a shell.
+        """
+        self.root = filepath.FilePath(self.mktemp())
+        self.root.createDirectory()
+        self.shell = ftp.FTPShell(self.root)
+
+
+    def directoryExists(self, path):
+        """
+        Test if the directory exists at C{path}.
+        """
+        return self.root.child(path).isdir()
+
+
+    def createDirectory(self, path):
+        """
+        Create a directory in C{path}.
+        """
+        return self.root.child(path).createDirectory()
+
+
+    def fileExists(self, path):
+        """
+        Test if the file exists at C{path}.
+        """
+        return self.root.child(path).isfile()
+
+
+    def createFile(self, path, fileContent=''):
+        """
+        Create a file named C{path} with some content.
+        """
+        return self.root.child(path).setContent(fileContent)
+
+
+
+class TestConsumer(object):
+    """
+    A simple consumer for tests. It only works with non-streaming producers.
+
+    @ivar producer: an object providing
+        L{twisted.internet.interfaces.IPullProducer}.
+    """
+
+    implements(IConsumer)
+    producer = None
+
+    def registerProducer(self, producer, streaming):
+        """
+        Simple register of producer, checks that no register has happened
+        before.
+        """
+        assert self.producer is None
+        self.buffer = []
+        self.producer = producer
+        self.producer.resumeProducing()
+
+
+    def unregisterProducer(self):
+        """
+        Unregister the producer, it should be done after a register.
+        """
+        assert self.producer is not None
+        self.producer = None
+
+
+    def write(self, data):
+        """
+        Save the data received.
+        """
+        self.buffer.append(data)
+        self.producer.resumeProducing()
+
+
+
+class TestProducer(object):
+    """
+    A dumb producer.
+    """
+
+    def __init__(self, toProduce, consumer):
+        """
+        @param toProduce: data to write
+        @type toProduce: C{str}
+        @param consumer: the consumer of data.
+        @type consumer: C{IConsumer}
+        """
+        self.toProduce = toProduce
+        self.consumer = consumer
+
+
+    def start(self):
+        """
+        Send the data to consume.
+        """
+        self.consumer.write(self.toProduce)
+
+
+
+class IReadWriteTestsMixin:
+    """
+    Generic tests for the C{IReadFile} and C{IWriteFile} interfaces.
+    """
+
+    def getFileReader(self, content):
+        """
+        Return an object providing C{IReadFile}, ready to send data C{content}.
+        """
+        raise NotImplementedError()
+
+
+    def getFileWriter(self):
+        """
+        Return an object providing C{IWriteFile}, ready to receive data.
+        """
+        raise NotImplementedError()
+
+
+    def getFileContent(self):
+        """
+        Return the content of the file used.
+        """
+        raise NotImplementedError()
+
+
+    def test_read(self):
+        """
+        Test L{ftp.IReadFile}: the implementation should have a send method
+        returning a C{Deferred} which fires when all the data has been sent
+        to the consumer, and the data should be correctly send to the consumer.
+        """
+        content = 'wobble\n'
+        consumer = TestConsumer()
+        def cbGet(reader):
+            return reader.send(consumer).addCallback(cbSend)
+        def cbSend(res):
+            self.assertEqual("".join(consumer.buffer), content)
+        return self.getFileReader(content).addCallback(cbGet)
+
+
+    def test_write(self):
+        """
+        Test L{ftp.IWriteFile}: the implementation should have a receive
+        method returning a C{Deferred} which fires with a consumer ready to
+        receive data to be written. It should also have a close() method that
+        returns a Deferred.
+        """
+        content = 'elbbow\n'
+        def cbGet(writer):
+            return writer.receive().addCallback(cbReceive, writer)
+        def cbReceive(consumer, writer):
+            producer = TestProducer(content, consumer)
+            consumer.registerProducer(None, True)
+            producer.start()
+            consumer.unregisterProducer()
+            return writer.close().addCallback(cbClose)
+        def cbClose(ignored):
+            self.assertEqual(self.getFileContent(), content)
+        return self.getFileWriter().addCallback(cbGet)
+
+
+
+class FTPReadWriteTestCase(unittest.TestCase, IReadWriteTestsMixin):
+    """
+    Tests for C{ftp._FileReader} and C{ftp._FileWriter}, the objects returned
+    by the shell in C{openForReading}/C{openForWriting}.
+    """
+
+    def setUp(self):
+        """
+        Create a temporary file used later.
+        """
+        self.root = filepath.FilePath(self.mktemp())
+        self.root.createDirectory()
+        self.shell = ftp.FTPShell(self.root)
+        self.filename = "file.txt"
+
+
+    def getFileReader(self, content):
+        """
+        Return a C{ftp._FileReader} instance with a file opened for reading.
+        """
+        self.root.child(self.filename).setContent(content)
+        return self.shell.openForReading((self.filename,))
+
+
+    def getFileWriter(self):
+        """
+        Return a C{ftp._FileWriter} instance with a file opened for writing.
+        """
+        return self.shell.openForWriting((self.filename,))
+
+
+    def getFileContent(self):
+        """
+        Return the content of the temporary file.
+        """
+        return self.root.child(self.filename).getContent()
+
+
+class CloseTestWriter:
+    implements(ftp.IWriteFile)
+    closeStarted = False
+    def receive(self):
+        self.s = StringIO()
+        fc = ftp.FileConsumer(self.s)
+        return defer.succeed(fc)
+    def close(self):
+        self.closeStarted = True
+        return self.d
+
+class CloseTestShell:
+    def openForWriting(self, segs):
+        return defer.succeed(self.writer)
+
+class FTPCloseTest(unittest.TestCase):
+    """Tests that the server invokes IWriteFile.close"""
+
+    def test_write(self):
+        """Confirm that FTP uploads (i.e. ftp_STOR) correctly call and wait
+        upon the IWriteFile object's close() method"""
+        f = ftp.FTP()
+        f.workingDirectory = ["root"]
+        f.shell = CloseTestShell()
+        f.shell.writer = CloseTestWriter()
+        f.shell.writer.d = defer.Deferred()
+        f.factory = ftp.FTPFactory()
+        f.factory.timeOut = None
+        f.makeConnection(StringIO())
+
+        di = ftp.DTP()
+        di.factory = ftp.DTPFactory(f)
+        f.dtpInstance = di
+        di.makeConnection(None)#
+
+        stor_done = []
+        d = f.ftp_STOR("path")
+        d.addCallback(stor_done.append)
+        # the writer is still receiving data
+        self.assertFalse(f.shell.writer.closeStarted, "close() called early")
+        di.dataReceived("some data here")
+        self.assertFalse(f.shell.writer.closeStarted, "close() called early")
+        di.connectionLost("reason is ignored")
+        # now we should be waiting in close()
+        self.assertTrue(f.shell.writer.closeStarted, "close() not called")
+        self.assertFalse(stor_done)
+        f.shell.writer.d.callback("allow close() to finish")
+        self.assertTrue(stor_done)
+
+        return d # just in case an errback occurred
+
+
+
+class FTPResponseCodeTests(unittest.TestCase):
+    """
+    Tests relating directly to response codes.
+    """
+    def test_unique(self):
+        """
+        All of the response code globals (for example C{RESTART_MARKER_REPLY} or
+        C{USR_NAME_OK_NEED_PASS}) have unique values and are present in the
+        C{RESPONSE} dictionary.
+        """
+        allValues = set(ftp.RESPONSE)
+        seenValues = set()
+
+        for key, value in vars(ftp).items():
+            if isinstance(value, str) and key.isupper():
+                self.assertIn(
+                    value, allValues,
+                    "Code %r with value %r missing from RESPONSE dict" % (
+                        key, value))
+                self.assertNotIn(
+                    value, seenValues,
+                    "Duplicate code %r with value %r" % (key, value))
+                seenValues.add(value)
+
diff --git a/ThirdParty/Twisted/twisted/test/test_ftp_options.py b/ThirdParty/Twisted/twisted/test/test_ftp_options.py
new file mode 100644
index 0000000..e668502
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_ftp_options.py
@@ -0,0 +1,80 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.tap.ftp}.
+"""
+
+from twisted.trial.unittest import TestCase
+
+from twisted.cred import credentials, error
+from twisted.tap.ftp import Options
+from twisted.python import versions
+from twisted.python.filepath import FilePath
+
+
+
+class FTPOptionsTestCase(TestCase):
+    """
+    Tests for the command line option parser used for C{twistd ftp}.
+    """
+
+    usernamePassword = ('iamuser', 'thisispassword')
+
+    def setUp(self):
+        """
+        Create a file with two users.
+        """
+        self.filename = self.mktemp()
+        f = FilePath(self.filename)
+        f.setContent(':'.join(self.usernamePassword))
+        self.options = Options()
+
+
+    def test_passwordfileDeprecation(self):
+        """
+        The C{--password-file} option will emit a warning stating that
+        said option is deprecated.
+        """
+        self.callDeprecated(
+            versions.Version("Twisted", 11, 1, 0),
+            self.options.opt_password_file, self.filename)
+
+
+    def test_authAdded(self):
+        """
+        The C{--auth} command-line option will add a checker to the list of
+        checkers
+        """
+        numCheckers = len(self.options['credCheckers'])
+        self.options.parseOptions(['--auth', 'file:' + self.filename])
+        self.assertEqual(len(self.options['credCheckers']), numCheckers + 1)
+
+
+    def test_authFailure(self):
+        """
+        The checker created by the C{--auth} command-line option returns a
+        L{Deferred} that fails with L{UnauthorizedLogin} when
+        presented with credentials that are unknown to that checker.
+        """
+        self.options.parseOptions(['--auth', 'file:' + self.filename])
+        checker = self.options['credCheckers'][-1]
+        invalid = credentials.UsernamePassword(self.usernamePassword[0], 'fake')
+        return (checker.requestAvatarId(invalid)
+            .addCallbacks(
+                lambda ignore: self.fail("Wrong password should raise error"),
+                lambda err: err.trap(error.UnauthorizedLogin)))
+
+
+    def test_authSuccess(self):
+        """
+        The checker created by the C{--auth} command-line option returns a
+        L{Deferred} that returns the avatar id when presented with credentials
+        that are known to that checker.
+        """
+        self.options.parseOptions(['--auth', 'file:' + self.filename])
+        checker = self.options['credCheckers'][-1]
+        correct = credentials.UsernamePassword(*self.usernamePassword)
+        return checker.requestAvatarId(correct).addCallback(
+            lambda username: self.assertEqual(username, correct.username)
+        )
diff --git a/ThirdParty/Twisted/twisted/test/test_hook.py b/ThirdParty/Twisted/twisted/test/test_hook.py
new file mode 100644
index 0000000..7d17f76
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_hook.py
@@ -0,0 +1,150 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Test cases for twisted.hook module.
+"""
+
+from twisted.python import hook
+from twisted.trial import unittest
+
+class BaseClass:
+    """
+    dummy class to help in testing.
+    """
+    def __init__(self):
+        """
+        dummy initializer
+        """
+        self.calledBasePre = 0
+        self.calledBasePost = 0
+        self.calledBase = 0
+
+    def func(self, a, b):
+        """
+        dummy method
+        """
+        assert a == 1
+        assert b == 2
+        self.calledBase = self.calledBase + 1
+
+
+class SubClass(BaseClass):
+    """
+    another dummy class
+    """
+    def __init__(self):
+        """
+        another dummy initializer
+        """
+        BaseClass.__init__(self)
+        self.calledSubPre = 0
+        self.calledSubPost = 0
+        self.calledSub = 0
+
+    def func(self, a, b):
+        """
+        another dummy function
+        """
+        assert a == 1
+        assert b == 2
+        BaseClass.func(self, a, b)
+        self.calledSub = self.calledSub + 1
+
+_clean_BaseClass = BaseClass.__dict__.copy()
+_clean_SubClass = SubClass.__dict__.copy()
+
+def basePre(base, a, b):
+    """
+    a pre-hook for the base class
+    """
+    base.calledBasePre = base.calledBasePre + 1
+
+def basePost(base, a, b):
+    """
+    a post-hook for the base class
+    """
+    base.calledBasePost = base.calledBasePost + 1
+
+def subPre(sub, a, b):
+    """
+    a pre-hook for the subclass
+    """
+    sub.calledSubPre = sub.calledSubPre + 1
+
+def subPost(sub, a, b):
+    """
+    a post-hook for the subclass
+    """
+    sub.calledSubPost = sub.calledSubPost + 1
+
+class HookTestCase(unittest.TestCase):
+    """
+    test case to make sure hooks are called
+    """
+    def setUp(self):
+        """Make sure we have clean versions of our classes."""
+        BaseClass.__dict__.clear()
+        BaseClass.__dict__.update(_clean_BaseClass)
+        SubClass.__dict__.clear()
+        SubClass.__dict__.update(_clean_SubClass)
+
+    def testBaseHook(self):
+        """make sure that the base class's hook is called reliably
+        """
+        base = BaseClass()
+        self.assertEqual(base.calledBase, 0)
+        self.assertEqual(base.calledBasePre, 0)
+        base.func(1,2)
+        self.assertEqual(base.calledBase, 1)
+        self.assertEqual(base.calledBasePre, 0)
+        hook.addPre(BaseClass, "func", basePre)
+        base.func(1, b=2)
+        self.assertEqual(base.calledBase, 2)
+        self.assertEqual(base.calledBasePre, 1)
+        hook.addPost(BaseClass, "func", basePost)
+        base.func(1, b=2)
+        self.assertEqual(base.calledBasePost, 1)
+        self.assertEqual(base.calledBase, 3)
+        self.assertEqual(base.calledBasePre, 2)
+        hook.removePre(BaseClass, "func", basePre)
+        hook.removePost(BaseClass, "func", basePost)
+        base.func(1, b=2)
+        self.assertEqual(base.calledBasePost, 1)
+        self.assertEqual(base.calledBase, 4)
+        self.assertEqual(base.calledBasePre, 2)
+
+    def testSubHook(self):
+        """test interactions between base-class hooks and subclass hooks
+        """
+        sub = SubClass()
+        self.assertEqual(sub.calledSub, 0)
+        self.assertEqual(sub.calledBase, 0)
+        sub.func(1, b=2)
+        self.assertEqual(sub.calledSub, 1)
+        self.assertEqual(sub.calledBase, 1)
+        hook.addPre(SubClass, 'func', subPre)
+        self.assertEqual(sub.calledSub, 1)
+        self.assertEqual(sub.calledBase, 1)
+        self.assertEqual(sub.calledSubPre, 0)
+        self.assertEqual(sub.calledBasePre, 0)
+        sub.func(1, b=2)
+        self.assertEqual(sub.calledSub, 2)
+        self.assertEqual(sub.calledBase, 2)
+        self.assertEqual(sub.calledSubPre, 1)
+        self.assertEqual(sub.calledBasePre, 0)
+        # let the pain begin
+        hook.addPre(BaseClass, 'func', basePre)
+        BaseClass.func(sub, 1, b=2)
+        # sub.func(1, b=2)
+        self.assertEqual(sub.calledBase, 3)
+        self.assertEqual(sub.calledBasePre, 1, str(sub.calledBasePre))
+        sub.func(1, b=2)
+        self.assertEqual(sub.calledBasePre, 2)
+        self.assertEqual(sub.calledBase, 4)
+        self.assertEqual(sub.calledSubPre, 2)
+        self.assertEqual(sub.calledSub, 3)
+
+testCases = [HookTestCase]
diff --git a/ThirdParty/Twisted/twisted/test/test_htb.py b/ThirdParty/Twisted/twisted/test/test_htb.py
new file mode 100644
index 0000000..ee4cc27
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_htb.py
@@ -0,0 +1,109 @@
+# -*- Python -*-
+
+__version__ = '$Revision: 1.3 $'[11:-2]
+
+from twisted.trial import unittest
+from twisted.protocols import htb
+
+class DummyClock:
+    time = 0
+    def set(self, when):
+        self.time = when
+
+    def __call__(self):
+        return self.time
+
+class SomeBucket(htb.Bucket):
+    maxburst = 100
+    rate = 2
+
+class TestBucketBase(unittest.TestCase):
+    def setUp(self):
+        self._realTimeFunc = htb.time
+        self.clock = DummyClock()
+        htb.time = self.clock
+
+    def tearDown(self):
+        htb.time = self._realTimeFunc
+
+class TestBucket(TestBucketBase):
+    def testBucketSize(self):
+        """Testing the size of the bucket."""
+        b = SomeBucket()
+        fit = b.add(1000)
+        self.assertEqual(100, fit)
+
+    def testBucketDrain(self):
+        """Testing the bucket's drain rate."""
+        b = SomeBucket()
+        fit = b.add(1000)
+        self.clock.set(10)
+        fit = b.add(1000)
+        self.assertEqual(20, fit)
+
+    def test_bucketEmpty(self):
+        """
+        L{htb.Bucket.drip} returns C{True} if the bucket is empty after that drip.
+        """
+        b = SomeBucket()
+        b.add(20)
+        self.clock.set(9)
+        empty = b.drip()
+        self.assertFalse(empty)
+        self.clock.set(10)
+        empty = b.drip()
+        self.assertTrue(empty)
+
+class TestBucketNesting(TestBucketBase):
+    def setUp(self):
+        TestBucketBase.setUp(self)
+        self.parent = SomeBucket()
+        self.child1 = SomeBucket(self.parent)
+        self.child2 = SomeBucket(self.parent)
+
+    def testBucketParentSize(self):
+        # Use up most of the parent bucket.
+        self.child1.add(90)
+        fit = self.child2.add(90)
+        self.assertEqual(10, fit)
+
+    def testBucketParentRate(self):
+        # Make the parent bucket drain slower.
+        self.parent.rate = 1
+        # Fill both child1 and parent.
+        self.child1.add(100)
+        self.clock.set(10)
+        fit = self.child1.add(100)
+        # How much room was there?  The child bucket would have had 20,
+        # but the parent bucket only ten (so no, it wouldn't make too much
+        # sense to have a child bucket draining faster than its parent in a real
+        # application.)
+        self.assertEqual(10, fit)
+
+
+# TODO: Test the Transport stuff?
+
+from test_pcp import DummyConsumer
+
+class ConsumerShaperTest(TestBucketBase):
+    def setUp(self):
+        TestBucketBase.setUp(self)
+        self.underlying = DummyConsumer()
+        self.bucket = SomeBucket()
+        self.shaped = htb.ShapedConsumer(self.underlying, self.bucket)
+
+    def testRate(self):
+        # Start off with a full bucket, so the burst-size dosen't factor in
+        # to the calculations.
+        delta_t = 10
+        self.bucket.add(100)
+        self.shaped.write("x" * 100)
+        self.clock.set(delta_t)
+        self.shaped.resumeProducing()
+        self.assertEqual(len(self.underlying.getvalue()),
+                             delta_t * self.bucket.rate)
+
+    def testBucketRefs(self):
+        self.assertEqual(self.bucket._refcount, 1)
+        self.shaped.stopProducing()
+        self.assertEqual(self.bucket._refcount, 0)
diff --git a/ThirdParty/Twisted/twisted/test/test_ident.py b/ThirdParty/Twisted/twisted/test/test_ident.py
new file mode 100644
index 0000000..9f69322
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_ident.py
@@ -0,0 +1,194 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Test cases for twisted.protocols.ident module.
+"""
+
+import struct
+
+from twisted.protocols import ident
+from twisted.python import failure
+from twisted.internet import error
+from twisted.internet import defer
+
+from twisted.trial import unittest
+from twisted.test.proto_helpers import StringTransport
+
+
+
+class ClassParserTestCase(unittest.TestCase):
+    """
+    Test parsing of ident responses.
+    """
+
+    def setUp(self):
+        """
+        Create a ident client used in tests.
+        """
+        self.client = ident.IdentClient()
+
+
+    def test_indentError(self):
+        """
+        'UNKNOWN-ERROR' error should map to the L{ident.IdentError} exception.
+        """
+        d = defer.Deferred()
+        self.client.queries.append((d, 123, 456))
+        self.client.lineReceived('123, 456 : ERROR : UNKNOWN-ERROR')
+        return self.assertFailure(d, ident.IdentError)
+
+
+    def test_noUSerError(self):
+        """
+        'NO-USER' error should map to the L{ident.NoUser} exception.
+        """
+        d = defer.Deferred()
+        self.client.queries.append((d, 234, 456))
+        self.client.lineReceived('234, 456 : ERROR : NO-USER')
+        return self.assertFailure(d, ident.NoUser)
+
+
+    def test_invalidPortError(self):
+        """
+        'INVALID-PORT' error should map to the L{ident.InvalidPort} exception.
+        """
+        d = defer.Deferred()
+        self.client.queries.append((d, 345, 567))
+        self.client.lineReceived('345, 567 :  ERROR : INVALID-PORT')
+        return self.assertFailure(d, ident.InvalidPort)
+
+
+    def test_hiddenUserError(self):
+        """
+        'HIDDEN-USER' error should map to the L{ident.HiddenUser} exception.
+        """
+        d = defer.Deferred()
+        self.client.queries.append((d, 567, 789))
+        self.client.lineReceived('567, 789 : ERROR : HIDDEN-USER')
+        return self.assertFailure(d, ident.HiddenUser)
+
+
+    def test_lostConnection(self):
+        """
+        A pending query which failed because of a ConnectionLost should
+        receive an L{ident.IdentError}.
+        """
+        d = defer.Deferred()
+        self.client.queries.append((d, 765, 432))
+        self.client.connectionLost(failure.Failure(error.ConnectionLost()))
+        return self.assertFailure(d, ident.IdentError)
+
+
+
+class TestIdentServer(ident.IdentServer):
+    def lookup(self, serverAddress, clientAddress):
+        return self.resultValue
+
+
+class TestErrorIdentServer(ident.IdentServer):
+    def lookup(self, serverAddress, clientAddress):
+        raise self.exceptionType()
+
+
+class NewException(RuntimeError):
+    pass
+
+
+class ServerParserTestCase(unittest.TestCase):
+    def testErrors(self):
+        p = TestErrorIdentServer()
+        p.makeConnection(StringTransport())
+        L = []
+        p.sendLine = L.append
+
+        p.exceptionType = ident.IdentError
+        p.lineReceived('123, 345')
+        self.assertEqual(L[0], '123, 345 : ERROR : UNKNOWN-ERROR')
+
+        p.exceptionType = ident.NoUser
+        p.lineReceived('432, 210')
+        self.assertEqual(L[1], '432, 210 : ERROR : NO-USER')
+
+        p.exceptionType = ident.InvalidPort
+        p.lineReceived('987, 654')
+        self.assertEqual(L[2], '987, 654 : ERROR : INVALID-PORT')
+
+        p.exceptionType = ident.HiddenUser
+        p.lineReceived('756, 827')
+        self.assertEqual(L[3], '756, 827 : ERROR : HIDDEN-USER')
+
+        p.exceptionType = NewException
+        p.lineReceived('987, 789')
+        self.assertEqual(L[4], '987, 789 : ERROR : UNKNOWN-ERROR')
+        errs = self.flushLoggedErrors(NewException)
+        self.assertEqual(len(errs), 1)
+
+        for port in -1, 0, 65536, 65537:
+            del L[:]
+            p.lineReceived('%d, 5' % (port,))
+            p.lineReceived('5, %d' % (port,))
+            self.assertEqual(
+                L, ['%d, 5 : ERROR : INVALID-PORT' % (port,),
+                    '5, %d : ERROR : INVALID-PORT' % (port,)])
+
+    def testSuccess(self):
+        p = TestIdentServer()
+        p.makeConnection(StringTransport())
+        L = []
+        p.sendLine = L.append
+
+        p.resultValue = ('SYS', 'USER')
+        p.lineReceived('123, 456')
+        self.assertEqual(L[0], '123, 456 : USERID : SYS : USER')
+
+
+if struct.pack('=L', 1)[0] == '\x01':
+    _addr1 = '0100007F'
+    _addr2 = '04030201'
+else:
+    _addr1 = '7F000001'
+    _addr2 = '01020304'
+
+
+class ProcMixinTestCase(unittest.TestCase):
+    line = ('4: %s:0019 %s:02FA 0A 00000000:00000000 '
+            '00:00000000 00000000     0        0 10927 1 f72a5b80 '
+            '3000 0 0 2 -1') % (_addr1, _addr2)
+
+    def testDottedQuadFromHexString(self):
+        p = ident.ProcServerMixin()
+        self.assertEqual(p.dottedQuadFromHexString(_addr1), '127.0.0.1')
+
+    def testUnpackAddress(self):
+        p = ident.ProcServerMixin()
+        self.assertEqual(p.unpackAddress(_addr1 + ':0277'),
+                          ('127.0.0.1', 631))
+
+    def testLineParser(self):
+        p = ident.ProcServerMixin()
+        self.assertEqual(
+            p.parseLine(self.line),
+            (('127.0.0.1', 25), ('1.2.3.4', 762), 0))
+
+    def testExistingAddress(self):
+        username = []
+        p = ident.ProcServerMixin()
+        p.entries = lambda: iter([self.line])
+        p.getUsername = lambda uid: (username.append(uid), 'root')[1]
+        self.assertEqual(
+            p.lookup(('127.0.0.1', 25), ('1.2.3.4', 762)),
+            (p.SYSTEM_NAME, 'root'))
+        self.assertEqual(username, [0])
+
+    def testNonExistingAddress(self):
+        p = ident.ProcServerMixin()
+        p.entries = lambda: iter([self.line])
+        self.assertRaises(ident.NoUser, p.lookup, ('127.0.0.1', 26),
+                                                  ('1.2.3.4', 762))
+        self.assertRaises(ident.NoUser, p.lookup, ('127.0.0.1', 25),
+                                                  ('1.2.3.5', 762))
+        self.assertRaises(ident.NoUser, p.lookup, ('127.0.0.1', 25),
+                                                  ('1.2.3.4', 763))
+
diff --git a/ThirdParty/Twisted/twisted/test/test_import.py b/ThirdParty/Twisted/twisted/test/test_import.py
new file mode 100644
index 0000000..821b9bf
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_import.py
@@ -0,0 +1,75 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.trial import unittest
+from twisted.python.runtime import platformType
+
+
+class AtLeastImportTestCase(unittest.TestCase):
+    """
+    I test that there are no syntax errors which will not allow importing.
+    """
+
+    failureException = ImportError
+
+    def test_misc(self):
+        """
+        Test importing other miscellaneous modules.
+        """
+        from twisted import copyright
+
+    def test_persisted(self):
+        """
+        Test importing persisted.
+        """
+        from twisted.persisted import dirdbm
+        from twisted.persisted import styles
+
+    def test_internet(self):
+        """
+        Test importing internet.
+        """
+        from twisted.internet import tcp
+        from twisted.internet import main   
+        from twisted.internet import abstract
+        from twisted.internet import udp
+        from twisted.internet import protocol
+        from twisted.internet import defer
+
+    def test_unix(self):
+        """
+        Test internet modules for unix.
+        """
+        from twisted.internet import stdio
+        from twisted.internet import process
+        from twisted.internet import unix
+
+    if platformType != "posix":
+        test_unix.skip = "UNIX-only modules"
+
+    def test_spread(self):
+        """
+        Test importing spreadables.
+        """
+        from twisted.spread import pb
+        from twisted.spread import jelly
+        from twisted.spread import banana
+        from twisted.spread import flavors
+
+    def test_twistedPython(self):
+        """
+        Test importing C{twisted.python}.
+        """
+        from twisted.python import hook
+        from twisted.python import log
+        from twisted.python import reflect
+        from twisted.python import usage
+
+    def test_protocols(self):
+        """
+        Test importing protocols.
+        """
+        from twisted.protocols import basic
+        from twisted.protocols import ftp
+        from twisted.protocols import telnet
+        from twisted.protocols import policies
diff --git a/ThirdParty/Twisted/twisted/test/test_internet.py b/ThirdParty/Twisted/twisted/test/test_internet.py
new file mode 100644
index 0000000..516258f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_internet.py
@@ -0,0 +1,1419 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for lots of functionality provided by L{twisted.internet}.
+"""
+
+from __future__ import division, absolute_import
+
+import os
+import sys
+import time
+
+from twisted.python.compat import _PY3
+from twisted.trial import unittest
+from twisted.internet import reactor, protocol, error, abstract, defer
+from twisted.internet import interfaces, base
+
+try:
+    from twisted.internet import ssl
+except ImportError:
+    ssl = None
+if ssl and not ssl.supported:
+    ssl = None
+
+from twisted.internet.defer import Deferred, maybeDeferred
+from twisted.python import runtime
+if not _PY3:
+    from twisted.python import util
+
+
+class ThreePhaseEventTests(unittest.TestCase):
+    """
+    Tests for the private implementation helpers for system event triggers.
+    """
+    def setUp(self):
+        """
+        Create a trigger, an argument, and an event to be used by tests.
+        """
+        self.trigger = lambda x: None
+        self.arg = object()
+        self.event = base._ThreePhaseEvent()
+
+
+    def test_addInvalidPhase(self):
+        """
+        L{_ThreePhaseEvent.addTrigger} should raise L{KeyError} when called
+        with an invalid phase.
+        """
+        self.assertRaises(
+            KeyError,
+            self.event.addTrigger, 'xxx', self.trigger, self.arg)
+
+
+    def test_addBeforeTrigger(self):
+        """
+        L{_ThreePhaseEvent.addTrigger} should accept C{'before'} as a phase, a
+        callable, and some arguments and add the callable with the arguments to
+        the before list.
+        """
+        self.event.addTrigger('before', self.trigger, self.arg)
+        self.assertEqual(
+            self.event.before,
+            [(self.trigger, (self.arg,), {})])
+
+
+    def test_addDuringTrigger(self):
+        """
+        L{_ThreePhaseEvent.addTrigger} should accept C{'during'} as a phase, a
+        callable, and some arguments and add the callable with the arguments to
+        the during list.
+        """
+        self.event.addTrigger('during', self.trigger, self.arg)
+        self.assertEqual(
+            self.event.during,
+            [(self.trigger, (self.arg,), {})])
+
+
+    def test_addAfterTrigger(self):
+        """
+        L{_ThreePhaseEvent.addTrigger} should accept C{'after'} as a phase, a
+        callable, and some arguments and add the callable with the arguments to
+        the after list.
+        """
+        self.event.addTrigger('after', self.trigger, self.arg)
+        self.assertEqual(
+            self.event.after,
+            [(self.trigger, (self.arg,), {})])
+
+
+    def test_removeTrigger(self):
+        """
+        L{_ThreePhaseEvent.removeTrigger} should accept an opaque object
+        previously returned by L{_ThreePhaseEvent.addTrigger} and remove the
+        associated trigger.
+        """
+        handle = self.event.addTrigger('before', self.trigger, self.arg)
+        self.event.removeTrigger(handle)
+        self.assertEqual(self.event.before, [])
+
+
+    def test_removeNonexistentTrigger(self):
+        """
+        L{_ThreePhaseEvent.removeTrigger} should raise L{ValueError} when given
+        an object not previously returned by L{_ThreePhaseEvent.addTrigger}.
+        """
+        self.assertRaises(ValueError, self.event.removeTrigger, object())
+
+
+    def test_removeRemovedTrigger(self):
+        """
+        L{_ThreePhaseEvent.removeTrigger} should raise L{ValueError} the second
+        time it is called with an object returned by
+        L{_ThreePhaseEvent.addTrigger}.
+        """
+        handle = self.event.addTrigger('before', self.trigger, self.arg)
+        self.event.removeTrigger(handle)
+        self.assertRaises(ValueError, self.event.removeTrigger, handle)
+
+
+    def test_removeAlmostValidTrigger(self):
+        """
+        L{_ThreePhaseEvent.removeTrigger} should raise L{ValueError} if it is
+        given a trigger handle which resembles a valid trigger handle aside
+        from its phase being incorrect.
+        """
+        self.assertRaises(
+            KeyError,
+            self.event.removeTrigger, ('xxx', self.trigger, (self.arg,), {}))
+
+
+    def test_fireEvent(self):
+        """
+        L{_ThreePhaseEvent.fireEvent} should call I{before}, I{during}, and
+        I{after} phase triggers in that order.
+        """
+        events = []
+        self.event.addTrigger('after', events.append, ('first', 'after'))
+        self.event.addTrigger('during', events.append, ('first', 'during'))
+        self.event.addTrigger('before', events.append, ('first', 'before'))
+        self.event.addTrigger('before', events.append, ('second', 'before'))
+        self.event.addTrigger('during', events.append, ('second', 'during'))
+        self.event.addTrigger('after', events.append, ('second', 'after'))
+
+        self.assertEqual(events, [])
+        self.event.fireEvent()
+        self.assertEqual(events,
+                         [('first', 'before'), ('second', 'before'),
+                          ('first', 'during'), ('second', 'during'),
+                          ('first', 'after'), ('second', 'after')])
+
+
+    def test_asynchronousBefore(self):
+        """
+        L{_ThreePhaseEvent.fireEvent} should wait for any L{Deferred} returned
+        by a I{before} phase trigger before proceeding to I{during} events.
+        """
+        events = []
+        beforeResult = Deferred()
+        self.event.addTrigger('before', lambda: beforeResult)
+        self.event.addTrigger('during', events.append, 'during')
+        self.event.addTrigger('after', events.append, 'after')
+
+        self.assertEqual(events, [])
+        self.event.fireEvent()
+        self.assertEqual(events, [])
+        beforeResult.callback(None)
+        self.assertEqual(events, ['during', 'after'])
+
+
+    def test_beforeTriggerException(self):
+        """
+        If a before-phase trigger raises a synchronous exception, it should be
+        logged and the remaining triggers should be run.
+        """
+        events = []
+
+        class DummyException(Exception):
+            pass
+
+        def raisingTrigger():
+            raise DummyException()
+
+        self.event.addTrigger('before', raisingTrigger)
+        self.event.addTrigger('before', events.append, 'before')
+        self.event.addTrigger('during', events.append, 'during')
+        self.event.fireEvent()
+        self.assertEqual(events, ['before', 'during'])
+        errors = self.flushLoggedErrors(DummyException)
+        self.assertEqual(len(errors), 1)
+
+
+    def test_duringTriggerException(self):
+        """
+        If a during-phase trigger raises a synchronous exception, it should be
+        logged and the remaining triggers should be run.
+        """
+        events = []
+
+        class DummyException(Exception):
+            pass
+
+        def raisingTrigger():
+            raise DummyException()
+
+        self.event.addTrigger('during', raisingTrigger)
+        self.event.addTrigger('during', events.append, 'during')
+        self.event.addTrigger('after', events.append, 'after')
+        self.event.fireEvent()
+        self.assertEqual(events, ['during', 'after'])
+        errors = self.flushLoggedErrors(DummyException)
+        self.assertEqual(len(errors), 1)
+
+
+    def test_synchronousRemoveAlreadyExecutedBefore(self):
+        """
+        If a before-phase trigger tries to remove another before-phase trigger
+        which has already run, a warning should be emitted.
+        """
+        events = []
+
+        def removeTrigger():
+            self.event.removeTrigger(beforeHandle)
+
+        beforeHandle = self.event.addTrigger('before', events.append, ('first', 'before'))
+        self.event.addTrigger('before', removeTrigger)
+        self.event.addTrigger('before', events.append, ('second', 'before'))
+        self.assertWarns(
+            DeprecationWarning,
+            "Removing already-fired system event triggers will raise an "
+            "exception in a future version of Twisted.",
+            __file__,
+            self.event.fireEvent)
+        self.assertEqual(events, [('first', 'before'), ('second', 'before')])
+
+
+    def test_synchronousRemovePendingBefore(self):
+        """
+        If a before-phase trigger removes another before-phase trigger which
+        has not yet run, the removed trigger should not be run.
+        """
+        events = []
+        self.event.addTrigger(
+            'before', lambda: self.event.removeTrigger(beforeHandle))
+        beforeHandle = self.event.addTrigger(
+            'before', events.append, ('first', 'before'))
+        self.event.addTrigger('before', events.append, ('second', 'before'))
+        self.event.fireEvent()
+        self.assertEqual(events, [('second', 'before')])
+
+
+    def test_synchronousBeforeRemovesDuring(self):
+        """
+        If a before-phase trigger removes a during-phase trigger, the
+        during-phase trigger should not be run.
+        """
+        events = []
+        self.event.addTrigger(
+            'before', lambda: self.event.removeTrigger(duringHandle))
+        duringHandle = self.event.addTrigger('during', events.append, 'during')
+        self.event.addTrigger('after', events.append, 'after')
+        self.event.fireEvent()
+        self.assertEqual(events, ['after'])
+
+
+    def test_asynchronousBeforeRemovesDuring(self):
+        """
+        If a before-phase trigger returns a L{Deferred} and later removes a
+        during-phase trigger before the L{Deferred} fires, the during-phase
+        trigger should not be run.
+        """
+        events = []
+        beforeResult = Deferred()
+        self.event.addTrigger('before', lambda: beforeResult)
+        duringHandle = self.event.addTrigger('during', events.append, 'during')
+        self.event.addTrigger('after', events.append, 'after')
+        self.event.fireEvent()
+        self.event.removeTrigger(duringHandle)
+        beforeResult.callback(None)
+        self.assertEqual(events, ['after'])
+
+
+    def test_synchronousBeforeRemovesConspicuouslySimilarDuring(self):
+        """
+        If a before-phase trigger removes a during-phase trigger which is
+        identical to an already-executed before-phase trigger aside from their
+        phases, no warning should be emitted and the during-phase trigger
+        should not be run.
+        """
+        events = []
+        def trigger():
+            events.append('trigger')
+        self.event.addTrigger('before', trigger)
+        self.event.addTrigger(
+            'before', lambda: self.event.removeTrigger(duringTrigger))
+        duringTrigger = self.event.addTrigger('during', trigger)
+        self.event.fireEvent()
+        self.assertEqual(events, ['trigger'])
+
+
+    def test_synchronousRemovePendingDuring(self):
+        """
+        If a during-phase trigger removes another during-phase trigger which
+        has not yet run, the removed trigger should not be run.
+        """
+        events = []
+        self.event.addTrigger(
+            'during', lambda: self.event.removeTrigger(duringHandle))
+        duringHandle = self.event.addTrigger(
+            'during', events.append, ('first', 'during'))
+        self.event.addTrigger(
+            'during', events.append, ('second', 'during'))
+        self.event.fireEvent()
+        self.assertEqual(events, [('second', 'during')])
+
+
+    def test_triggersRunOnce(self):
+        """
+        A trigger should only be called on the first call to
+        L{_ThreePhaseEvent.fireEvent}.
+        """
+        events = []
+        self.event.addTrigger('before', events.append, 'before')
+        self.event.addTrigger('during', events.append, 'during')
+        self.event.addTrigger('after', events.append, 'after')
+        self.event.fireEvent()
+        self.event.fireEvent()
+        self.assertEqual(events, ['before', 'during', 'after'])
+
+
+    def test_finishedBeforeTriggersCleared(self):
+        """
+        The temporary list L{_ThreePhaseEvent.finishedBefore} should be emptied
+        and the state reset to C{'BASE'} before the first during-phase trigger
+        executes.
+        """
+        events = []
+        def duringTrigger():
+            events.append('during')
+            self.assertEqual(self.event.finishedBefore, [])
+            self.assertEqual(self.event.state, 'BASE')
+        self.event.addTrigger('before', events.append, 'before')
+        self.event.addTrigger('during', duringTrigger)
+        self.event.fireEvent()
+        self.assertEqual(events, ['before', 'during'])
+
+
+
+class SystemEventTestCase(unittest.TestCase):
+    """
+    Tests for the reactor's implementation of the C{fireSystemEvent},
+    C{addSystemEventTrigger}, and C{removeSystemEventTrigger} methods of the
+    L{IReactorCore} interface.
+
+    @ivar triggers: A list of the handles to triggers which have been added to
+        the reactor.
+    """
+    def setUp(self):
+        """
+        Create an empty list in which to store trigger handles.
+        """
+        self.triggers = []
+
+
+    def tearDown(self):
+        """
+        Remove all remaining triggers from the reactor.
+        """
+        while self.triggers:
+            trigger = self.triggers.pop()
+            try:
+                reactor.removeSystemEventTrigger(trigger)
+            except (ValueError, KeyError):
+                pass
+
+
+    def addTrigger(self, event, phase, func):
+        """
+        Add a trigger to the reactor and remember it in C{self.triggers}.
+        """
+        t = reactor.addSystemEventTrigger(event, phase, func)
+        self.triggers.append(t)
+        return t
+
+
+    def removeTrigger(self, trigger):
+        """
+        Remove a trigger by its handle from the reactor and from
+        C{self.triggers}.
+        """
+        reactor.removeSystemEventTrigger(trigger)
+        self.triggers.remove(trigger)
+
+
+    def _addSystemEventTriggerTest(self, phase):
+        eventType = 'test'
+        events = []
+        def trigger():
+            events.append(None)
+        self.addTrigger(phase, eventType, trigger)
+        self.assertEqual(events, [])
+        reactor.fireSystemEvent(eventType)
+        self.assertEqual(events, [None])
+
+
+    def test_beforePhase(self):
+        """
+        L{IReactorCore.addSystemEventTrigger} should accept the C{'before'}
+        phase and not call the given object until the right event is fired.
+        """
+        self._addSystemEventTriggerTest('before')
+
+
+    def test_duringPhase(self):
+        """
+        L{IReactorCore.addSystemEventTrigger} should accept the C{'during'}
+        phase and not call the given object until the right event is fired.
+        """
+        self._addSystemEventTriggerTest('during')
+
+
+    def test_afterPhase(self):
+        """
+        L{IReactorCore.addSystemEventTrigger} should accept the C{'after'}
+        phase and not call the given object until the right event is fired.
+        """
+        self._addSystemEventTriggerTest('after')
+
+
+    def test_unknownPhase(self):
+        """
+        L{IReactorCore.addSystemEventTrigger} should reject phases other than
+        C{'before'}, C{'during'}, or C{'after'}.
+        """
+        eventType = 'test'
+        self.assertRaises(
+            KeyError, self.addTrigger, 'xxx', eventType, lambda: None)
+
+
+    def test_beforePreceedsDuring(self):
+        """
+        L{IReactorCore.addSystemEventTrigger} should call triggers added to the
+        C{'before'} phase before it calls triggers added to the C{'during'}
+        phase.
+        """
+        eventType = 'test'
+        events = []
+        def beforeTrigger():
+            events.append('before')
+        def duringTrigger():
+            events.append('during')
+        self.addTrigger('before', eventType, beforeTrigger)
+        self.addTrigger('during', eventType, duringTrigger)
+        self.assertEqual(events, [])
+        reactor.fireSystemEvent(eventType)
+        self.assertEqual(events, ['before', 'during'])
+
+
+    def test_duringPreceedsAfter(self):
+        """
+        L{IReactorCore.addSystemEventTrigger} should call triggers added to the
+        C{'during'} phase before it calls triggers added to the C{'after'}
+        phase.
+        """
+        eventType = 'test'
+        events = []
+        def duringTrigger():
+            events.append('during')
+        def afterTrigger():
+            events.append('after')
+        self.addTrigger('during', eventType, duringTrigger)
+        self.addTrigger('after', eventType, afterTrigger)
+        self.assertEqual(events, [])
+        reactor.fireSystemEvent(eventType)
+        self.assertEqual(events, ['during', 'after'])
+
+
+    def test_beforeReturnsDeferred(self):
+        """
+        If a trigger added to the C{'before'} phase of an event returns a
+        L{Deferred}, the C{'during'} phase should be delayed until it is called
+        back.
+        """
+        triggerDeferred = Deferred()
+        eventType = 'test'
+        events = []
+        def beforeTrigger():
+            return triggerDeferred
+        def duringTrigger():
+            events.append('during')
+        self.addTrigger('before', eventType, beforeTrigger)
+        self.addTrigger('during', eventType, duringTrigger)
+        self.assertEqual(events, [])
+        reactor.fireSystemEvent(eventType)
+        self.assertEqual(events, [])
+        triggerDeferred.callback(None)
+        self.assertEqual(events, ['during'])
+
+
+    def test_multipleBeforeReturnDeferred(self):
+        """
+        If more than one trigger added to the C{'before'} phase of an event
+        return L{Deferred}s, the C{'during'} phase should be delayed until they
+        are all called back.
+        """
+        firstDeferred = Deferred()
+        secondDeferred = Deferred()
+        eventType = 'test'
+        events = []
+        def firstBeforeTrigger():
+            return firstDeferred
+        def secondBeforeTrigger():
+            return secondDeferred
+        def duringTrigger():
+            events.append('during')
+        self.addTrigger('before', eventType, firstBeforeTrigger)
+        self.addTrigger('before', eventType, secondBeforeTrigger)
+        self.addTrigger('during', eventType, duringTrigger)
+        self.assertEqual(events, [])
+        reactor.fireSystemEvent(eventType)
+        self.assertEqual(events, [])
+        firstDeferred.callback(None)
+        self.assertEqual(events, [])
+        secondDeferred.callback(None)
+        self.assertEqual(events, ['during'])
+
+
+    def test_subsequentBeforeTriggerFiresPriorBeforeDeferred(self):
+        """
+        If a trigger added to the C{'before'} phase of an event calls back a
+        L{Deferred} returned by an earlier trigger in the C{'before'} phase of
+        the same event, the remaining C{'before'} triggers for that event
+        should be run and any further L{Deferred}s waited on before proceeding
+        to the C{'during'} events.
+        """
+        eventType = 'test'
+        events = []
+        firstDeferred = Deferred()
+        secondDeferred = Deferred()
+        def firstBeforeTrigger():
+            return firstDeferred
+        def secondBeforeTrigger():
+            firstDeferred.callback(None)
+        def thirdBeforeTrigger():
+            events.append('before')
+            return secondDeferred
+        def duringTrigger():
+            events.append('during')
+        self.addTrigger('before', eventType, firstBeforeTrigger)
+        self.addTrigger('before', eventType, secondBeforeTrigger)
+        self.addTrigger('before', eventType, thirdBeforeTrigger)
+        self.addTrigger('during', eventType, duringTrigger)
+        self.assertEqual(events, [])
+        reactor.fireSystemEvent(eventType)
+        self.assertEqual(events, ['before'])
+        secondDeferred.callback(None)
+        self.assertEqual(events, ['before', 'during'])
+
+
+    def test_removeSystemEventTrigger(self):
+        """
+        A trigger removed with L{IReactorCore.removeSystemEventTrigger} should
+        not be called when the event fires.
+        """
+        eventType = 'test'
+        events = []
+        def firstBeforeTrigger():
+            events.append('first')
+        def secondBeforeTrigger():
+            events.append('second')
+        self.addTrigger('before', eventType, firstBeforeTrigger)
+        self.removeTrigger(
+            self.addTrigger('before', eventType, secondBeforeTrigger))
+        self.assertEqual(events, [])
+        reactor.fireSystemEvent(eventType)
+        self.assertEqual(events, ['first'])
+
+
+    def test_removeNonExistentSystemEventTrigger(self):
+        """
+        Passing an object to L{IReactorCore.removeSystemEventTrigger} which was
+        not returned by a previous call to
+        L{IReactorCore.addSystemEventTrigger} or which has already been passed
+        to C{removeSystemEventTrigger} should result in L{TypeError},
+        L{KeyError}, or L{ValueError} being raised.
+        """
+        b = self.addTrigger('during', 'test', lambda: None)
+        self.removeTrigger(b)
+        self.assertRaises(
+            TypeError, reactor.removeSystemEventTrigger, None)
+        self.assertRaises(
+            ValueError, reactor.removeSystemEventTrigger, b)
+        self.assertRaises(
+            KeyError,
+            reactor.removeSystemEventTrigger,
+            (b[0], ('xxx',) + b[1][1:]))
+
+
+    def test_interactionBetweenDifferentEvents(self):
+        """
+        L{IReactorCore.fireSystemEvent} should behave the same way for a
+        particular system event regardless of whether Deferreds are being
+        waited on for a different system event.
+        """
+        events = []
+
+        firstEvent = 'first-event'
+        firstDeferred = Deferred()
+        def beforeFirstEvent():
+            events.append(('before', 'first'))
+            return firstDeferred
+        def afterFirstEvent():
+            events.append(('after', 'first'))
+
+        secondEvent = 'second-event'
+        secondDeferred = Deferred()
+        def beforeSecondEvent():
+            events.append(('before', 'second'))
+            return secondDeferred
+        def afterSecondEvent():
+            events.append(('after', 'second'))
+
+        self.addTrigger('before', firstEvent, beforeFirstEvent)
+        self.addTrigger('after', firstEvent, afterFirstEvent)
+        self.addTrigger('before', secondEvent, beforeSecondEvent)
+        self.addTrigger('after', secondEvent, afterSecondEvent)
+
+        self.assertEqual(events, [])
+
+        # After this, firstEvent should be stuck before 'during' waiting for
+        # firstDeferred.
+        reactor.fireSystemEvent(firstEvent)
+        self.assertEqual(events, [('before', 'first')])
+
+        # After this, secondEvent should be stuck before 'during' waiting for
+        # secondDeferred.
+        reactor.fireSystemEvent(secondEvent)
+        self.assertEqual(events, [('before', 'first'), ('before', 'second')])
+
+        # After this, firstEvent should have finished completely, but
+        # secondEvent should be at the same place.
+        firstDeferred.callback(None)
+        self.assertEqual(events, [('before', 'first'), ('before', 'second'),
+                                  ('after', 'first')])
+
+        # After this, secondEvent should have finished completely.
+        secondDeferred.callback(None)
+        self.assertEqual(events, [('before', 'first'), ('before', 'second'),
+                                  ('after', 'first'), ('after', 'second')])
+
+
+
+class TimeTestCase(unittest.TestCase):
+    """
+    Tests for the IReactorTime part of the reactor.
+    """
+
+
+    def test_seconds(self):
+        """
+        L{twisted.internet.reactor.seconds} should return something
+        like a number.
+
+        1. This test specifically does not assert any relation to the
+           "system time" as returned by L{time.time} or
+           L{twisted.python.runtime.seconds}, because at some point we
+           may find a better option for scheduling calls than
+           wallclock-time.
+        2. This test *also* does not assert anything about the type of
+           the result, because operations may not return ints or
+           floats: For example, datetime-datetime == timedelta(0).
+        """
+        now = reactor.seconds()
+        self.assertEqual(now-now+now, now)
+
+
+    def test_callLaterUsesReactorSecondsInDelayedCall(self):
+        """
+        L{reactor.callLater} should use the reactor's seconds factory
+        to produce the time at which the DelayedCall will be called.
+        """
+        oseconds = reactor.seconds
+        reactor.seconds = lambda: 100
+        try:
+            call = reactor.callLater(5, lambda: None)
+            self.assertEqual(call.getTime(), 105)
+        finally:
+            reactor.seconds = oseconds
+
+
+    def test_callLaterUsesReactorSecondsAsDelayedCallSecondsFactory(self):
+        """
+        L{reactor.callLater} should propagate its own seconds factory
+        to the DelayedCall to use as its own seconds factory.
+        """
+        oseconds = reactor.seconds
+        reactor.seconds = lambda: 100
+        try:
+            call = reactor.callLater(5, lambda: None)
+            self.assertEqual(call.seconds(), 100)
+        finally:
+            reactor.seconds = oseconds
+
+
+    def test_callLater(self):
+        """
+        Test that a DelayedCall really calls the function it is
+        supposed to call.
+        """
+        d = Deferred()
+        reactor.callLater(0, d.callback, None)
+        d.addCallback(self.assertEqual, None)
+        return d
+
+
+    def test_cancelDelayedCall(self):
+        """
+        Test that when a DelayedCall is cancelled it does not run.
+        """
+        called = []
+        def function():
+            called.append(None)
+        call = reactor.callLater(0, function)
+        call.cancel()
+
+        # Schedule a call in two "iterations" to check to make sure that the
+        # above call never ran.
+        d = Deferred()
+        def check():
+            try:
+                self.assertEqual(called, [])
+            except:
+                d.errback()
+            else:
+                d.callback(None)
+        reactor.callLater(0, reactor.callLater, 0, check)
+        return d
+
+
+    def test_cancelCancelledDelayedCall(self):
+        """
+        Test that cancelling a DelayedCall which has already been cancelled
+        raises the appropriate exception.
+        """
+        call = reactor.callLater(0, lambda: None)
+        call.cancel()
+        self.assertRaises(error.AlreadyCancelled, call.cancel)
+
+
+    def test_cancelCalledDelayedCallSynchronous(self):
+        """
+        Test that cancelling a DelayedCall in the DelayedCall's function as
+        that function is being invoked by the DelayedCall raises the
+        appropriate exception.
+        """
+        d = Deferred()
+        def later():
+            try:
+                self.assertRaises(error.AlreadyCalled, call.cancel)
+            except:
+                d.errback()
+            else:
+                d.callback(None)
+        call = reactor.callLater(0, later)
+        return d
+
+
+    def test_cancelCalledDelayedCallAsynchronous(self):
+        """
+        Test that cancelling a DelayedCall after it has run its function
+        raises the appropriate exception.
+        """
+        d = Deferred()
+        def check():
+            try:
+                self.assertRaises(error.AlreadyCalled, call.cancel)
+            except:
+                d.errback()
+            else:
+                d.callback(None)
+        def later():
+            reactor.callLater(0, check)
+        call = reactor.callLater(0, later)
+        return d
+
+
+    def testCallLaterTime(self):
+        d = reactor.callLater(10, lambda: None)
+        try:
+            self.failUnless(d.getTime() - (time.time() + 10) < 1)
+        finally:
+            d.cancel()
+
+    def testCallLaterOrder(self):
+        l = []
+        l2 = []
+        def f(x):
+            l.append(x)
+        def f2(x):
+            l2.append(x)
+        def done():
+            self.assertEqual(l, range(20))
+        def done2():
+            self.assertEqual(l2, range(10))
+
+        for n in range(10):
+            reactor.callLater(0, f, n)
+        for n in range(10):
+            reactor.callLater(0, f, n+10)
+            reactor.callLater(0.1, f2, n)
+
+        reactor.callLater(0, done)
+        reactor.callLater(0.1, done2)
+        d = Deferred()
+        reactor.callLater(0.2, d.callback, None)
+        return d
+
+    testCallLaterOrder.todo = "See bug 1396"
+    testCallLaterOrder.skip = "Trial bug, todo doesn't work! See bug 1397"
+    def testCallLaterOrder2(self):
+        # This time destroy the clock resolution so that it fails reliably
+        # even on systems that don't have a crappy clock resolution.
+
+        def seconds():
+            return int(time.time())
+
+        base_original = base.seconds
+        runtime_original = runtime.seconds
+        base.seconds = seconds
+        runtime.seconds = seconds
+
+        def cleanup(x):
+            runtime.seconds = runtime_original
+            base.seconds = base_original
+            return x
+        return maybeDeferred(self.testCallLaterOrder).addBoth(cleanup)
+
+    testCallLaterOrder2.todo = "See bug 1396"
+    testCallLaterOrder2.skip = "Trial bug, todo doesn't work! See bug 1397"
+
+    def testDelayedCallStringification(self):
+        # Mostly just make sure str() isn't going to raise anything for
+        # DelayedCalls within reason.
+        dc = reactor.callLater(0, lambda x, y: None, 'x', y=10)
+        str(dc)
+        dc.reset(5)
+        str(dc)
+        dc.cancel()
+        str(dc)
+
+        dc = reactor.callLater(0, lambda: None, x=[({'hello': u'world'}, 10j), reactor], *range(10))
+        str(dc)
+        dc.cancel()
+        str(dc)
+
+        def calledBack(ignored):
+            str(dc)
+        d = Deferred().addCallback(calledBack)
+        dc = reactor.callLater(0, d.callback, None)
+        str(dc)
+        return d
+
+
+    def testDelayedCallSecondsOverride(self):
+        """
+        Test that the C{seconds} argument to DelayedCall gets used instead of
+        the default timing function, if it is not None.
+        """
+        def seconds():
+            return 10
+        dc = base.DelayedCall(5, lambda: None, (), {}, lambda dc: None,
+                              lambda dc: None, seconds)
+        self.assertEqual(dc.getTime(), 5)
+        dc.reset(3)
+        self.assertEqual(dc.getTime(), 13)
+
+
+class CallFromThreadTests(unittest.TestCase):
+    def testWakeUp(self):
+        # Make sure other threads can wake up the reactor
+        d = Deferred()
+        def wake():
+            time.sleep(0.1)
+            # callFromThread will call wakeUp for us
+            reactor.callFromThread(d.callback, None)
+        reactor.callInThread(wake)
+        return d
+
+    if interfaces.IReactorThreads(reactor, None) is None:
+        testWakeUp.skip = "Nothing to wake up for without thread support"
+
+    def _stopCallFromThreadCallback(self):
+        self.stopped = True
+
+    def _callFromThreadCallback(self, d):
+        reactor.callFromThread(self._callFromThreadCallback2, d)
+        reactor.callLater(0, self._stopCallFromThreadCallback)
+
+    def _callFromThreadCallback2(self, d):
+        try:
+            self.assert_(self.stopped)
+        except:
+            # Send the error to the deferred
+            d.errback()
+        else:
+            d.callback(None)
+
+    def testCallFromThreadStops(self):
+        """
+        Ensure that callFromThread from inside a callFromThread
+        callback doesn't sit in an infinite loop and lets other
+        things happen too.
+        """
+        self.stopped = False
+        d = defer.Deferred()
+        reactor.callFromThread(self._callFromThreadCallback, d)
+        return d
+
+
+class DelayedTestCase(unittest.TestCase):
+    def setUp(self):
+        self.finished = 0
+        self.counter = 0
+        self.timers = {}
+        self.deferred = defer.Deferred()
+
+    def tearDown(self):
+        for t in self.timers.values():
+            t.cancel()
+
+    def checkTimers(self):
+        l1 = self.timers.values()
+        l2 = list(reactor.getDelayedCalls())
+
+        # There should be at least the calls we put in.  There may be other
+        # calls that are none of our business and that we should ignore,
+        # though.
+
+        missing = []
+        for dc in l1:
+            if dc not in l2:
+                missing.append(dc)
+        if missing:
+            self.finished = 1
+        self.failIf(missing, "Should have been missing no calls, instead was missing " + repr(missing))
+
+    def callback(self, tag):
+        del self.timers[tag]
+        self.checkTimers()
+
+    def addCallback(self, tag):
+        self.callback(tag)
+        self.addTimer(15, self.callback)
+
+    def done(self, tag):
+        self.finished = 1
+        self.callback(tag)
+        self.deferred.callback(None)
+
+    def addTimer(self, when, callback):
+        self.timers[self.counter] = reactor.callLater(when * 0.01, callback,
+                                                      self.counter)
+        self.counter += 1
+        self.checkTimers()
+
+    def testGetDelayedCalls(self):
+        if not hasattr(reactor, "getDelayedCalls"):
+            return
+        # This is not a race because we don't do anything which might call
+        # the reactor until we have all the timers set up. If we did, this
+        # test might fail on slow systems.
+        self.checkTimers()
+        self.addTimer(35, self.done)
+        self.addTimer(20, self.callback)
+        self.addTimer(30, self.callback)
+        which = self.counter
+        self.addTimer(29, self.callback)
+        self.addTimer(25, self.addCallback)
+        self.addTimer(26, self.callback)
+
+        self.timers[which].cancel()
+        del self.timers[which]
+        self.checkTimers()
+
+        self.deferred.addCallback(lambda x : self.checkTimers())
+        return self.deferred
+
+
+    def test_active(self):
+        """
+        L{IDelayedCall.active} returns False once the call has run.
+        """
+        dcall = reactor.callLater(0.01, self.deferred.callback, True)
+        self.assertEqual(dcall.active(), True)
+
+        def checkDeferredCall(success):
+            self.assertEqual(dcall.active(), False)
+            return success
+
+        self.deferred.addCallback(checkDeferredCall)
+
+        return self.deferred
+
+
+
+resolve_helper = """
+import %(reactor)s
+%(reactor)s.install()
+from twisted.internet import reactor
+
+class Foo:
+    def __init__(self):
+        reactor.callWhenRunning(self.start)
+        self.timer = reactor.callLater(3, self.failed)
+    def start(self):
+        reactor.resolve('localhost').addBoth(self.done)
+    def done(self, res):
+        print 'done', res
+        reactor.stop()
+    def failed(self):
+        print 'failed'
+        self.timer = None
+        reactor.stop()
+f = Foo()
+reactor.run()
+"""
+
+class ChildResolveProtocol(protocol.ProcessProtocol):
+    def __init__(self, onCompletion):
+        self.onCompletion = onCompletion
+
+    def connectionMade(self):
+        self.output = []
+        self.error = []
+
+    def outReceived(self, out):
+        self.output.append(out)
+
+    def errReceived(self, err):
+        self.error.append(err)
+
+    def processEnded(self, reason):
+        self.onCompletion.callback((reason, self.output, self.error))
+        self.onCompletion = None
+
+
+class Resolve(unittest.TestCase):
+    def testChildResolve(self):
+        # I've seen problems with reactor.run under gtk2reactor. Spawn a
+        # child which just does reactor.resolve after the reactor has
+        # started, fail if it does not complete in a timely fashion.
+        helperPath = os.path.abspath(self.mktemp())
+        helperFile = open(helperPath, 'w')
+
+        # Eeueuuggg
+        reactorName = reactor.__module__
+
+        helperFile.write(resolve_helper % {'reactor': reactorName})
+        helperFile.close()
+
+        env = os.environ.copy()
+        env['PYTHONPATH'] = os.pathsep.join(sys.path)
+
+        helperDeferred = Deferred()
+        helperProto = ChildResolveProtocol(helperDeferred)
+
+        reactor.spawnProcess(helperProto, sys.executable, ("python", "-u", helperPath), env)
+
+        def cbFinished(result):
+            (reason, output, error) = result
+            # If the output is "done 127.0.0.1\n" we don't really care what
+            # else happened.
+            output = ''.join(output)
+            if output != 'done 127.0.0.1\n':
+                self.fail((
+                    "The child process failed to produce the desired results:\n"
+                    "   Reason for termination was: %r\n"
+                    "   Output stream was: %r\n"
+                    "   Error stream was: %r\n") % (reason.getErrorMessage(), output, ''.join(error)))
+
+        helperDeferred.addCallback(cbFinished)
+        return helperDeferred
+
+if not interfaces.IReactorProcess(reactor, None):
+    Resolve.skip = "cannot run test: reactor doesn't support IReactorProcess"
+
+
+
+class CallFromThreadTestCase(unittest.TestCase):
+    """
+    Task scheduling from threads tests.
+    """
+    if interfaces.IReactorThreads(reactor, None) is None:
+        skip = "Nothing to test without thread support"
+
+    def setUp(self):
+        self.counter = 0
+        self.deferred = Deferred()
+
+
+    def schedule(self, *args, **kwargs):
+        """
+        Override in subclasses.
+        """
+        reactor.callFromThread(*args, **kwargs)
+
+
+    def test_lotsOfThreadsAreScheduledCorrectly(self):
+        """
+        L{IReactorThreads.callFromThread} can be used to schedule a large
+        number of calls in the reactor thread.
+        """
+        def addAndMaybeFinish():
+            self.counter += 1
+            if self.counter == 100:
+                self.deferred.callback(True)
+
+        for i in range(100):
+            self.schedule(addAndMaybeFinish)
+
+        return self.deferred
+
+
+    def test_threadsAreRunInScheduledOrder(self):
+        """
+        Callbacks should be invoked in the order they were scheduled.
+        """
+        order = []
+
+        def check(_):
+            self.assertEqual(order, [1, 2, 3])
+
+        self.deferred.addCallback(check)
+        self.schedule(order.append, 1)
+        self.schedule(order.append, 2)
+        self.schedule(order.append, 3)
+        self.schedule(reactor.callFromThread, self.deferred.callback, None)
+
+        return self.deferred
+
+
+    def test_scheduledThreadsNotRunUntilReactorRuns(self):
+        """
+        Scheduled tasks should not be run until the reactor starts running.
+        """
+        def incAndFinish():
+            self.counter = 1
+            self.deferred.callback(True)
+        self.schedule(incAndFinish)
+
+        # Callback shouldn't have fired yet.
+        self.assertEqual(self.counter, 0)
+
+        return self.deferred
+
+
+
+class MyProtocol(protocol.Protocol):
+    """
+    Sample protocol.
+    """
+
+class MyFactory(protocol.Factory):
+    """
+    Sample factory.
+    """
+
+    protocol = MyProtocol
+
+
+class ProtocolTestCase(unittest.TestCase):
+
+    def testFactory(self):
+        factory = MyFactory()
+        protocol = factory.buildProtocol(None)
+        self.assertEqual(protocol.factory, factory)
+        self.assert_( isinstance(protocol, factory.protocol) )
+
+
+class DummyProducer(object):
+    """
+    Very uninteresting producer implementation used by tests to ensure the
+    right methods are called by the consumer with which it is registered.
+
+    @type events: C{list} of C{str}
+    @ivar events: The producer/consumer related events which have happened to
+    this producer.  Strings in this list may be C{'resume'}, C{'stop'}, or
+    C{'pause'}.  Elements are added as they occur.
+    """
+
+    def __init__(self):
+        self.events = []
+
+
+    def resumeProducing(self):
+        self.events.append('resume')
+
+
+    def stopProducing(self):
+        self.events.append('stop')
+
+
+    def pauseProducing(self):
+        self.events.append('pause')
+
+
+
+class SillyDescriptor(abstract.FileDescriptor):
+    """
+    A descriptor whose data buffer gets filled very fast.
+
+    Useful for testing FileDescriptor's IConsumer interface, since
+    the data buffer fills as soon as at least four characters are
+    written to it, and gets emptied in a single doWrite() cycle.
+    """
+    bufferSize = 3
+    connected = True
+
+    def writeSomeData(self, data):
+        """
+        Always write all data.
+        """
+        return len(data)
+
+
+    def startWriting(self):
+        """
+        Do nothing: bypass the reactor.
+        """
+    stopWriting = startWriting
+
+
+
+class ReentrantProducer(DummyProducer):
+    """
+    Similar to L{DummyProducer}, but with a resumeProducing method which calls
+    back into an L{IConsumer} method of the consumer against which it is
+    registered.
+
+    @ivar consumer: The consumer with which this producer has been or will
+    be registered.
+
+    @ivar methodName: The name of the method to call on the consumer inside
+    C{resumeProducing}.
+
+    @ivar methodArgs: The arguments to pass to the consumer method invoked in
+    C{resumeProducing}.
+    """
+    def __init__(self, consumer, methodName, *methodArgs):
+        super(ReentrantProducer, self).__init__()
+        self.consumer = consumer
+        self.methodName = methodName
+        self.methodArgs = methodArgs
+
+
+    def resumeProducing(self):
+        super(ReentrantProducer, self).resumeProducing()
+        getattr(self.consumer, self.methodName)(*self.methodArgs)
+
+
+
+class TestProducer(unittest.TestCase):
+    """
+    Test abstract.FileDescriptor's consumer interface.
+    """
+    def test_doubleProducer(self):
+        """
+        Verify that registering a non-streaming producer invokes its
+        resumeProducing() method and that you can only register one producer
+        at a time.
+        """
+        fd = abstract.FileDescriptor()
+        fd.connected = 1
+        dp = DummyProducer()
+        fd.registerProducer(dp, 0)
+        self.assertEqual(dp.events, ['resume'])
+        self.assertRaises(RuntimeError, fd.registerProducer, DummyProducer(), 0)
+
+
+    def test_unconnectedFileDescriptor(self):
+        """
+        Verify that registering a producer when the connection has already
+        been closed invokes its stopProducing() method.
+        """
+        fd = abstract.FileDescriptor()
+        fd.disconnected = 1
+        dp = DummyProducer()
+        fd.registerProducer(dp, 0)
+        self.assertEqual(dp.events, ['stop'])
+
+
+    def _dontPausePullConsumerTest(self, methodName):
+        """
+        Pull consumers don't get their C{pauseProducing} method called if the
+        descriptor buffer fills up.
+
+        @param _methodName: Either 'write', or 'writeSequence', indicating
+            which transport method to write data to.
+        """
+        descriptor = SillyDescriptor()
+        producer = DummyProducer()
+        descriptor.registerProducer(producer, streaming=False)
+        self.assertEqual(producer.events, ['resume'])
+        del producer.events[:]
+
+        # Fill up the descriptor's write buffer so we can observe whether or
+        # not it pauses its producer in that case.
+        if methodName == "writeSequence":
+            descriptor.writeSequence([b'1', b'2', b'3', b'4'])
+        else:
+            descriptor.write(b'1234')
+
+        self.assertEqual(producer.events, [])
+
+
+    def test_dontPausePullConsumerOnWrite(self):
+        """
+        Verify that FileDescriptor does not call producer.pauseProducing() on a
+        non-streaming pull producer in response to a L{IConsumer.write} call
+        which results in a full write buffer. Issue #2286.
+        """
+        return self._dontPausePullConsumerTest('write')
+
+
+    def test_dontPausePullConsumerOnWriteSequence(self):
+        """
+        Like L{test_dontPausePullConsumerOnWrite}, but for a call to
+        C{writeSequence} rather than L{IConsumer.write}.
+
+        C{writeSequence} is not part of L{IConsumer}, but
+        L{abstract.FileDescriptor} has supported consumery behavior in response
+        to calls to L{writeSequence} forever.
+        """
+        return self._dontPausePullConsumerTest('writeSequence')
+
+
+    def _reentrantStreamingProducerTest(self, methodName):
+        descriptor = SillyDescriptor()
+        if methodName == "writeSequence":
+            data = [b's', b'p', b'am']
+        else:
+            data = b"spam"
+        producer = ReentrantProducer(descriptor, methodName, data)
+        descriptor.registerProducer(producer, streaming=True)
+
+        # Start things off by filling up the descriptor's buffer so it will
+        # pause its producer.
+        getattr(descriptor, methodName)(data)
+
+        # Sanity check - make sure that worked.
+        self.assertEqual(producer.events, ['pause'])
+        del producer.events[:]
+
+        # After one call to doWrite, the buffer has been emptied so the
+        # FileDescriptor should resume its producer.  That will result in an
+        # immediate call to FileDescriptor.write which will again fill the
+        # buffer and result in the producer being paused.
+        descriptor.doWrite()
+        self.assertEqual(producer.events, ['resume', 'pause'])
+        del producer.events[:]
+
+        # After a second call to doWrite, the exact same thing should have
+        # happened.  Prior to the bugfix for which this test was written,
+        # FileDescriptor would have incorrectly believed its producer was
+        # already resumed (it was paused) and so not resume it again.
+        descriptor.doWrite()
+        self.assertEqual(producer.events, ['resume', 'pause'])
+
+
+    def test_reentrantStreamingProducerUsingWrite(self):
+        """
+        Verify that FileDescriptor tracks producer's paused state correctly.
+        Issue #811, fixed in revision r12857.
+        """
+        return self._reentrantStreamingProducerTest('write')
+
+
+    def test_reentrantStreamingProducerUsingWriteSequence(self):
+        """
+        Like L{test_reentrantStreamingProducerUsingWrite}, but for calls to
+        C{writeSequence}.
+
+        C{writeSequence} is B{not} part of L{IConsumer}, however
+        C{abstract.FileDescriptor} has supported consumery behavior in response
+        to calls to C{writeSequence} forever.
+        """
+        return self._reentrantStreamingProducerTest('writeSequence')
+
+
+
+class PortStringification(unittest.TestCase):
+    if interfaces.IReactorTCP(reactor, None) is not None:
+        def testTCP(self):
+            p = reactor.listenTCP(0, protocol.ServerFactory())
+            portNo = p.getHost().port
+            self.assertNotEqual(str(p).find(str(portNo)), -1,
+                                "%d not found in %s" % (portNo, p))
+            return p.stopListening()
+
+    if interfaces.IReactorUDP(reactor, None) is not None:
+        def testUDP(self):
+            p = reactor.listenUDP(0, protocol.DatagramProtocol())
+            portNo = p.getHost().port
+            self.assertNotEqual(str(p).find(str(portNo)), -1,
+                                "%d not found in %s" % (portNo, p))
+            return p.stopListening()
+
+    if interfaces.IReactorSSL(reactor, None) is not None and ssl:
+        def testSSL(self, ssl=ssl):
+            pem = util.sibpath(__file__, 'server.pem')
+            p = reactor.listenSSL(0, protocol.ServerFactory(), ssl.DefaultOpenSSLContextFactory(pem, pem))
+            portNo = p.getHost().port
+            self.assertNotEqual(str(p).find(str(portNo)), -1,
+                                "%d not found in %s" % (portNo, p))
+            return p.stopListening()
+
+        if _PY3:
+            testSSL.skip = ("Re-enable once the Python 3 SSL port is done.")
+
diff --git a/ThirdParty/Twisted/twisted/test/test_iutils.py b/ThirdParty/Twisted/twisted/test/test_iutils.py
new file mode 100644
index 0000000..22fd26b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_iutils.py
@@ -0,0 +1,259 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test running processes with the APIs in L{twisted.internet.utils}.
+"""
+
+import warnings, os, stat, sys, signal
+
+from twisted.python.runtime import platform
+from twisted.trial import unittest
+from twisted.internet import error, reactor, utils, interfaces
+
+
+class ProcessUtilsTests(unittest.TestCase):
+    """
+    Test running a process using L{getProcessOutput}, L{getProcessValue}, and
+    L{getProcessOutputAndValue}.
+    """
+
+    if interfaces.IReactorProcess(reactor, None) is None:
+        skip = "reactor doesn't implement IReactorProcess"
+
+    output = None
+    value = None
+    exe = sys.executable
+
+    def makeSourceFile(self, sourceLines):
+        """
+        Write the given list of lines to a text file and return the absolute
+        path to it.
+        """
+        script = self.mktemp()
+        scriptFile = file(script, 'wt')
+        scriptFile.write(os.linesep.join(sourceLines) + os.linesep)
+        scriptFile.close()
+        return os.path.abspath(script)
+
+
+    def test_output(self):
+        """
+        L{getProcessOutput} returns a L{Deferred} which fires with the complete
+        output of the process it runs after that process exits.
+        """
+        scriptFile = self.makeSourceFile([
+                "import sys",
+                "for s in 'hello world\\n':",
+                "    sys.stdout.write(s)",
+                "    sys.stdout.flush()"])
+        d = utils.getProcessOutput(self.exe, ['-u', scriptFile])
+        return d.addCallback(self.assertEqual, "hello world\n")
+
+
+    def test_outputWithErrorIgnored(self):
+        """
+        The L{Deferred} returned by L{getProcessOutput} is fired with an
+        L{IOError} L{Failure} if the child process writes to stderr.
+        """
+        # make sure stderr raises an error normally
+        scriptFile = self.makeSourceFile([
+            'import sys',
+            'sys.stderr.write("hello world\\n")'
+            ])
+
+        d = utils.getProcessOutput(self.exe, ['-u', scriptFile])
+        d = self.assertFailure(d, IOError)
+        def cbFailed(err):
+            return self.assertFailure(err.processEnded, error.ProcessDone)
+        d.addCallback(cbFailed)
+        return d
+
+
+    def test_outputWithErrorCollected(self):
+        """
+        If a C{True} value is supplied for the C{errortoo} parameter to
+        L{getProcessOutput}, the returned L{Deferred} fires with the child's
+        stderr output as well as its stdout output.
+        """
+        scriptFile = self.makeSourceFile([
+            'import sys',
+            # Write the same value to both because ordering isn't guaranteed so
+            # this simplifies the test.
+            'sys.stdout.write("foo")',
+            'sys.stdout.flush()',
+            'sys.stderr.write("foo")',
+            'sys.stderr.flush()'])
+
+        d = utils.getProcessOutput(self.exe, ['-u', scriptFile], errortoo=True)
+        return d.addCallback(self.assertEqual, "foofoo")
+
+
+    def test_value(self):
+        """
+        The L{Deferred} returned by L{getProcessValue} is fired with the exit
+        status of the child process.
+        """
+        scriptFile = self.makeSourceFile(["raise SystemExit(1)"])
+
+        d = utils.getProcessValue(self.exe, ['-u', scriptFile])
+        return d.addCallback(self.assertEqual, 1)
+
+
+    def test_outputAndValue(self):
+        """
+        The L{Deferred} returned by L{getProcessOutputAndValue} fires with a
+        three-tuple, the elements of which give the data written to the child's
+        stdout, the data written to the child's stderr, and the exit status of
+        the child.
+        """
+        exe = sys.executable
+        scriptFile = self.makeSourceFile([
+            "import sys",
+            "sys.stdout.write('hello world!\\n')",
+            "sys.stderr.write('goodbye world!\\n')",
+            "sys.exit(1)"
+            ])
+
+        def gotOutputAndValue((out, err, code)):
+            self.assertEqual(out, "hello world!\n")
+            self.assertEqual(err, "goodbye world!" + os.linesep)
+            self.assertEqual(code, 1)
+        d = utils.getProcessOutputAndValue(self.exe, ["-u", scriptFile])
+        return d.addCallback(gotOutputAndValue)
+
+
+    def test_outputSignal(self):
+        """
+        If the child process exits because of a signal, the L{Deferred}
+        returned by L{getProcessOutputAndValue} fires a L{Failure} of a tuple
+        containing the the child's stdout, stderr, and the signal which caused
+        it to exit.
+        """
+        # Use SIGKILL here because it's guaranteed to be delivered. Using
+        # SIGHUP might not work in, e.g., a buildbot slave run under the
+        # 'nohup' command.
+        scriptFile = self.makeSourceFile([
+            "import sys, os, signal",
+            "sys.stdout.write('stdout bytes\\n')",
+            "sys.stderr.write('stderr bytes\\n')",
+            "sys.stdout.flush()",
+            "sys.stderr.flush()",
+            "os.kill(os.getpid(), signal.SIGKILL)"])
+
+        def gotOutputAndValue((out, err, sig)):
+            self.assertEqual(out, "stdout bytes\n")
+            self.assertEqual(err, "stderr bytes\n")
+            self.assertEqual(sig, signal.SIGKILL)
+
+        d = utils.getProcessOutputAndValue(self.exe, ['-u', scriptFile])
+        d = self.assertFailure(d, tuple)
+        return d.addCallback(gotOutputAndValue)
+
+    if platform.isWindows():
+        test_outputSignal.skip = "Windows doesn't have real signals."
+
+
+    def _pathTest(self, utilFunc, check):
+        dir = os.path.abspath(self.mktemp())
+        os.makedirs(dir)
+        scriptFile = self.makeSourceFile([
+                "import os, sys",
+                "sys.stdout.write(os.getcwd())"])
+        d = utilFunc(self.exe, ['-u', scriptFile], path=dir)
+        d.addCallback(check, dir)
+        return d
+
+
+    def test_getProcessOutputPath(self):
+        """
+        L{getProcessOutput} runs the given command with the working directory
+        given by the C{path} parameter.
+        """
+        return self._pathTest(utils.getProcessOutput, self.assertEqual)
+
+
+    def test_getProcessValuePath(self):
+        """
+        L{getProcessValue} runs the given command with the working directory
+        given by the C{path} parameter.
+        """
+        def check(result, ignored):
+            self.assertEqual(result, 0)
+        return self._pathTest(utils.getProcessValue, check)
+
+
+    def test_getProcessOutputAndValuePath(self):
+        """
+        L{getProcessOutputAndValue} runs the given command with the working
+        directory given by the C{path} parameter.
+        """
+        def check((out, err, status), dir):
+            self.assertEqual(out, dir)
+            self.assertEqual(status, 0)
+        return self._pathTest(utils.getProcessOutputAndValue, check)
+
+
+    def _defaultPathTest(self, utilFunc, check):
+        # Make another directory to mess around with.
+        dir = os.path.abspath(self.mktemp())
+        os.makedirs(dir)
+
+        scriptFile = self.makeSourceFile([
+                "import os, sys, stat",
+                # Fix the permissions so we can report the working directory.
+                # On OS X (and maybe elsewhere), os.getcwd() fails with EACCES
+                # if +x is missing from the working directory.
+                "os.chmod(%r, stat.S_IXUSR)" % (dir,),
+                "sys.stdout.write(os.getcwd())"])
+
+        # Switch to it, but make sure we switch back
+        self.addCleanup(os.chdir, os.getcwd())
+        os.chdir(dir)
+
+        # Get rid of all its permissions, but make sure they get cleaned up
+        # later, because otherwise it might be hard to delete the trial
+        # temporary directory.
+        self.addCleanup(
+            os.chmod, dir, stat.S_IMODE(os.stat('.').st_mode))
+        os.chmod(dir, 0)
+
+        d = utilFunc(self.exe, ['-u', scriptFile])
+        d.addCallback(check, dir)
+        return d
+
+
+    def test_getProcessOutputDefaultPath(self):
+        """
+        If no value is supplied for the C{path} parameter, L{getProcessOutput}
+        runs the given command in the same working directory as the parent
+        process and succeeds even if the current working directory is not
+        accessible.
+        """
+        return self._defaultPathTest(utils.getProcessOutput, self.assertEqual)
+
+
+    def test_getProcessValueDefaultPath(self):
+        """
+        If no value is supplied for the C{path} parameter, L{getProcessValue}
+        runs the given command in the same working directory as the parent
+        process and succeeds even if the current working directory is not
+        accessible.
+        """
+        def check(result, ignored):
+            self.assertEqual(result, 0)
+        return self._defaultPathTest(utils.getProcessValue, check)
+
+
+    def test_getProcessOutputAndValueDefaultPath(self):
+        """
+        If no value is supplied for the C{path} parameter,
+        L{getProcessOutputAndValue} runs the given command in the same working
+        directory as the parent process and succeeds even if the current
+        working directory is not accessible.
+        """
+        def check((out, err, status), dir):
+            self.assertEqual(out, dir)
+            self.assertEqual(status, 0)
+        return self._defaultPathTest(
+            utils.getProcessOutputAndValue, check)
diff --git a/ThirdParty/Twisted/twisted/test/test_jelly.py b/ThirdParty/Twisted/twisted/test/test_jelly.py
new file mode 100644
index 0000000..132e05f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_jelly.py
@@ -0,0 +1,671 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test cases for L{jelly} object serialization.
+"""
+
+import datetime
+
+try:
+    import decimal
+except ImportError:
+    decimal = None
+
+from twisted.spread import jelly, pb
+from twisted.python.compat import set, frozenset
+from twisted.trial import unittest
+from twisted.test.proto_helpers import StringTransport
+
+
+class TestNode(object, jelly.Jellyable):
+    """
+    An object to test jellyfying of new style class instances.
+    """
+    classAttr = 4
+
+    def __init__(self, parent=None):
+        if parent:
+            self.id = parent.id + 1
+            parent.children.append(self)
+        else:
+            self.id = 1
+        self.parent = parent
+        self.children = []
+
+
+
+class A:
+    """
+    Dummy class.
+    """
+
+    def amethod(self):
+        """
+        Method tp be used in serialization tests.
+        """
+
+
+
+def afunc(self):
+    """
+    A dummy function to test function serialization.
+    """
+
+
+
+class B:
+    """
+    Dummy class.
+    """
+
+    def bmethod(self):
+        """
+        Method to be used in serialization tests.
+        """
+
+
+
+class C:
+    """
+    Dummy class.
+    """
+
+    def cmethod(self):
+        """
+        Method to be used in serialization tests.
+        """
+
+
+
+class D(object):
+    """
+    Dummy new-style class.
+    """
+
+
+
+class E(object):
+    """
+    Dummy new-style class with slots.
+    """
+
+    __slots__ = ("x", "y")
+
+    def __init__(self, x=None, y=None):
+        self.x = x
+        self.y = y
+
+
+    def __getstate__(self):
+        return {"x" : self.x, "y" : self.y}
+
+
+    def __setstate__(self, state):
+        self.x = state["x"]
+        self.y = state["y"]
+
+
+
+class SimpleJellyTest:
+    def __init__(self, x, y):
+        self.x = x
+        self.y = y
+
+    def isTheSameAs(self, other):
+        return self.__dict__ == other.__dict__
+
+
+
+class JellyTestCase(unittest.TestCase):
+    """
+    Testcases for L{jelly} module serialization.
+
+    @cvar decimalData: serialized version of decimal data, to be used in tests.
+    @type decimalData: C{list}
+    """
+
+    def _testSecurity(self, inputList, atom):
+        """
+        Helper test method to test security options for a type.
+
+        @param inputList: a sample input for the type.
+        @param inputList: C{list}
+
+        @param atom: atom identifier for the type.
+        @type atom: C{str}
+        """
+        c = jelly.jelly(inputList)
+        taster = jelly.SecurityOptions()
+        taster.allowBasicTypes()
+        # By default, it should succeed
+        jelly.unjelly(c, taster)
+        taster.allowedTypes.pop(atom)
+        # But it should raise an exception when disallowed
+        self.assertRaises(jelly.InsecureJelly, jelly.unjelly, c, taster)
+
+
+    def test_methodSelfIdentity(self):
+        a = A()
+        b = B()
+        a.bmethod = b.bmethod
+        b.a = a
+        im_ = jelly.unjelly(jelly.jelly(b)).a.bmethod
+        self.assertEqual(im_.im_class, im_.im_self.__class__)
+
+
+    def test_methodsNotSelfIdentity(self):
+        """
+        If a class change after an instance has been created, L{jelly.unjelly}
+        shoud raise a C{TypeError} when trying to unjelly the instance.
+        """
+        a = A()
+        b = B()
+        c = C()
+        a.bmethod = c.cmethod
+        b.a = a
+        savecmethod = C.cmethod
+        del C.cmethod
+        try:
+            self.assertRaises(TypeError, jelly.unjelly, jelly.jelly(b))
+        finally:
+            C.cmethod = savecmethod
+
+
+    def test_newStyle(self):
+        n = D()
+        n.x = 1
+        n2 = D()
+        n.n2 = n2
+        n.n3 = n2
+        c = jelly.jelly(n)
+        m = jelly.unjelly(c)
+        self.assertIsInstance(m, D)
+        self.assertIdentical(m.n2, m.n3)
+
+
+    def test_newStyleWithSlots(self):
+        """
+        A class defined with I{slots} can be jellied and unjellied with the
+        values for its attributes preserved.
+        """
+        n = E()
+        n.x = 1
+        c = jelly.jelly(n)
+        m = jelly.unjelly(c)
+        self.assertIsInstance(m, E)
+        self.assertEqual(n.x, 1)
+
+
+    def test_typeOldStyle(self):
+        """
+        Test that an old style class type can be jellied and unjellied
+        to the original type.
+        """
+        t = [C]
+        r = jelly.unjelly(jelly.jelly(t))
+        self.assertEqual(t, r)
+
+
+    def test_typeNewStyle(self):
+        """
+        Test that a new style class type can be jellied and unjellied
+        to the original type.
+        """
+        t = [D]
+        r = jelly.unjelly(jelly.jelly(t))
+        self.assertEqual(t, r)
+
+
+    def test_typeBuiltin(self):
+        """
+        Test that a builtin type can be jellied and unjellied to the original
+        type.
+        """
+        t = [str]
+        r = jelly.unjelly(jelly.jelly(t))
+        self.assertEqual(t, r)
+
+
+    def test_dateTime(self):
+        dtn = datetime.datetime.now()
+        dtd = datetime.datetime.now() - dtn
+        input = [dtn, dtd]
+        c = jelly.jelly(input)
+        output = jelly.unjelly(c)
+        self.assertEqual(input, output)
+        self.assertNotIdentical(input, output)
+
+
+    def test_decimal(self):
+        """
+        Jellying L{decimal.Decimal} instances and then unjellying the result
+        should produce objects which represent the values of the original
+        inputs.
+        """
+        inputList = [decimal.Decimal('9.95'),
+                     decimal.Decimal(0),
+                     decimal.Decimal(123456),
+                     decimal.Decimal('-78.901')]
+        c = jelly.jelly(inputList)
+        output = jelly.unjelly(c)
+        self.assertEqual(inputList, output)
+        self.assertNotIdentical(inputList, output)
+
+
+    decimalData = ['list', ['decimal', 995, -2], ['decimal', 0, 0],
+                           ['decimal', 123456, 0], ['decimal', -78901, -3]]
+
+
+    def test_decimalUnjelly(self):
+        """
+        Unjellying the s-expressions produced by jelly for L{decimal.Decimal}
+        instances should result in L{decimal.Decimal} instances with the values
+        represented by the s-expressions.
+
+        This test also verifies that C{self.decimalData} contains valid jellied
+        data.  This is important since L{test_decimalMissing} re-uses
+        C{self.decimalData} and is expected to be unable to produce
+        L{decimal.Decimal} instances even though the s-expression correctly
+        represents a list of them.
+        """
+        expected = [decimal.Decimal('9.95'),
+                    decimal.Decimal(0),
+                    decimal.Decimal(123456),
+                    decimal.Decimal('-78.901')]
+        output = jelly.unjelly(self.decimalData)
+        self.assertEqual(output, expected)
+
+
+    def test_decimalMissing(self):
+        """
+        If decimal is unavailable on the unjelly side, L{jelly.unjelly} should
+        gracefully return L{jelly.Unpersistable} objects.
+        """
+        self.patch(jelly, 'decimal', None)
+        output = jelly.unjelly(self.decimalData)
+        self.assertEqual(len(output), 4)
+        for i in range(4):
+            self.assertIsInstance(output[i], jelly.Unpersistable)
+        self.assertEqual(output[0].reason,
+            "Could not unpersist decimal: 9.95")
+        self.assertEqual(output[1].reason,
+            "Could not unpersist decimal: 0")
+        self.assertEqual(output[2].reason,
+            "Could not unpersist decimal: 123456")
+        self.assertEqual(output[3].reason,
+            "Could not unpersist decimal: -78.901")
+
+
+    def test_decimalSecurity(self):
+        """
+        By default, C{decimal} objects should be allowed by
+        L{jelly.SecurityOptions}. If not allowed, L{jelly.unjelly} should raise
+        L{jelly.InsecureJelly} when trying to unjelly it.
+        """
+        inputList = [decimal.Decimal('9.95')]
+        self._testSecurity(inputList, "decimal")
+
+    if decimal is None:
+        skipReason = "decimal not available"
+        test_decimal.skip = skipReason
+        test_decimalUnjelly.skip = skipReason
+        test_decimalSecurity.skip = skipReason
+
+
+    def test_set(self):
+        """
+        Jellying C{set} instances and then unjellying the result
+        should produce objects which represent the values of the original
+        inputs.
+        """
+        inputList = [set([1, 2, 3])]
+        output = jelly.unjelly(jelly.jelly(inputList))
+        self.assertEqual(inputList, output)
+        self.assertNotIdentical(inputList, output)
+
+
+    def test_frozenset(self):
+        """
+        Jellying C{frozenset} instances and then unjellying the result
+        should produce objects which represent the values of the original
+        inputs.
+        """
+        inputList = [frozenset([1, 2, 3])]
+        output = jelly.unjelly(jelly.jelly(inputList))
+        self.assertEqual(inputList, output)
+        self.assertNotIdentical(inputList, output)
+
+
+    def test_setSecurity(self):
+        """
+        By default, C{set} objects should be allowed by
+        L{jelly.SecurityOptions}. If not allowed, L{jelly.unjelly} should raise
+        L{jelly.InsecureJelly} when trying to unjelly it.
+        """
+        inputList = [set([1, 2, 3])]
+        self._testSecurity(inputList, "set")
+
+
+    def test_frozensetSecurity(self):
+        """
+        By default, C{frozenset} objects should be allowed by
+        L{jelly.SecurityOptions}. If not allowed, L{jelly.unjelly} should raise
+        L{jelly.InsecureJelly} when trying to unjelly it.
+        """
+        inputList = [frozenset([1, 2, 3])]
+        self._testSecurity(inputList, "frozenset")
+
+
+    def test_oldSets(self):
+        """
+        Test jellying C{sets.Set}: it should serialize to the same thing as
+        C{set} jelly, and be unjellied as C{set} if available.
+        """
+        inputList = [jelly._sets.Set([1, 2, 3])]
+        inputJelly = jelly.jelly(inputList)
+        self.assertEqual(inputJelly, jelly.jelly([set([1, 2, 3])]))
+        output = jelly.unjelly(inputJelly)
+        # Even if the class is different, it should coerce to the same list
+        self.assertEqual(list(inputList[0]), list(output[0]))
+        if set is jelly._sets.Set:
+            self.assertIsInstance(output[0], jelly._sets.Set)
+        else:
+            self.assertIsInstance(output[0], set)
+
+
+    def test_oldImmutableSets(self):
+        """
+        Test jellying C{sets.ImmutableSet}: it should serialize to the same
+        thing as C{frozenset} jelly, and be unjellied as C{frozenset} if
+        available.
+        """
+        inputList = [jelly._sets.ImmutableSet([1, 2, 3])]
+        inputJelly = jelly.jelly(inputList)
+        self.assertEqual(inputJelly, jelly.jelly([frozenset([1, 2, 3])]))
+        output = jelly.unjelly(inputJelly)
+        # Even if the class is different, it should coerce to the same list
+        self.assertEqual(list(inputList[0]), list(output[0]))
+        if frozenset is jelly._sets.ImmutableSet:
+            self.assertIsInstance(output[0], jelly._sets.ImmutableSet)
+        else:
+            self.assertIsInstance(output[0], frozenset)
+
+
+    def test_simple(self):
+        """
+        Simplest test case.
+        """
+        self.failUnless(SimpleJellyTest('a', 'b').isTheSameAs(
+                        SimpleJellyTest('a', 'b')))
+        a = SimpleJellyTest(1, 2)
+        cereal = jelly.jelly(a)
+        b = jelly.unjelly(cereal)
+        self.failUnless(a.isTheSameAs(b))
+
+
+    def test_identity(self):
+        """
+        Test to make sure that objects retain identity properly.
+        """
+        x = []
+        y = (x)
+        x.append(y)
+        x.append(y)
+        self.assertIdentical(x[0], x[1])
+        self.assertIdentical(x[0][0], x)
+        s = jelly.jelly(x)
+        z = jelly.unjelly(s)
+        self.assertIdentical(z[0], z[1])
+        self.assertIdentical(z[0][0], z)
+
+
+    def test_unicode(self):
+        x = unicode('blah')
+        y = jelly.unjelly(jelly.jelly(x))
+        self.assertEqual(x, y)
+        self.assertEqual(type(x), type(y))
+
+
+    def test_stressReferences(self):
+        reref = []
+        toplevelTuple = ({'list': reref}, reref)
+        reref.append(toplevelTuple)
+        s = jelly.jelly(toplevelTuple)
+        z = jelly.unjelly(s)
+        self.assertIdentical(z[0]['list'], z[1])
+        self.assertIdentical(z[0]['list'][0], z)
+
+
+    def test_moreReferences(self):
+        a = []
+        t = (a,)
+        a.append((t,))
+        s = jelly.jelly(t)
+        z = jelly.unjelly(s)
+        self.assertIdentical(z[0][0][0], z)
+
+
+    def test_typeSecurity(self):
+        """
+        Test for type-level security of serialization.
+        """
+        taster = jelly.SecurityOptions()
+        dct = jelly.jelly({})
+        self.assertRaises(jelly.InsecureJelly, jelly.unjelly, dct, taster)
+
+
+    def test_newStyleClasses(self):
+        j = jelly.jelly(D)
+        uj = jelly.unjelly(D)
+        self.assertIdentical(D, uj)
+
+
+    def test_lotsaTypes(self):
+        """
+        Test for all types currently supported in jelly
+        """
+        a = A()
+        jelly.unjelly(jelly.jelly(a))
+        jelly.unjelly(jelly.jelly(a.amethod))
+        items = [afunc, [1, 2, 3], not bool(1), bool(1), 'test', 20.3,
+                 (1, 2, 3), None, A, unittest, {'a': 1}, A.amethod]
+        for i in items:
+            self.assertEqual(i, jelly.unjelly(jelly.jelly(i)))
+
+
+    def test_setState(self):
+        global TupleState
+        class TupleState:
+            def __init__(self, other):
+                self.other = other
+            def __getstate__(self):
+                return (self.other,)
+            def __setstate__(self, state):
+                self.other = state[0]
+            def __hash__(self):
+                return hash(self.other)
+        a = A()
+        t1 = TupleState(a)
+        t2 = TupleState(a)
+        t3 = TupleState((t1, t2))
+        d = {t1: t1, t2: t2, t3: t3, "t3": t3}
+        t3prime = jelly.unjelly(jelly.jelly(d))["t3"]
+        self.assertIdentical(t3prime.other[0].other, t3prime.other[1].other)
+
+
+    def test_classSecurity(self):
+        """
+        Test for class-level security of serialization.
+        """
+        taster = jelly.SecurityOptions()
+        taster.allowInstancesOf(A, B)
+        a = A()
+        b = B()
+        c = C()
+        # add a little complexity to the data
+        a.b = b
+        a.c = c
+        # and a backreference
+        a.x = b
+        b.c = c
+        # first, a friendly insecure serialization
+        friendly = jelly.jelly(a, taster)
+        x = jelly.unjelly(friendly, taster)
+        self.assertIsInstance(x.c, jelly.Unpersistable)
+        # now, a malicious one
+        mean = jelly.jelly(a)
+        self.assertRaises(jelly.InsecureJelly, jelly.unjelly, mean, taster)
+        self.assertIdentical(x.x, x.b, "Identity mismatch")
+        # test class serialization
+        friendly = jelly.jelly(A, taster)
+        x = jelly.unjelly(friendly, taster)
+        self.assertIdentical(x, A, "A came back: %s" % x)
+
+
+    def test_unjellyable(self):
+        """
+        Test that if Unjellyable is used to deserialize a jellied object,
+        state comes out right.
+        """
+        class JellyableTestClass(jelly.Jellyable):
+            pass
+        jelly.setUnjellyableForClass(JellyableTestClass, jelly.Unjellyable)
+        input = JellyableTestClass()
+        input.attribute = 'value'
+        output = jelly.unjelly(jelly.jelly(input))
+        self.assertEqual(output.attribute, 'value')
+        self.assertIsInstance(output, jelly.Unjellyable)
+
+
+    def test_persistentStorage(self):
+        perst = [{}, 1]
+        def persistentStore(obj, jel, perst = perst):
+            perst[1] = perst[1] + 1
+            perst[0][perst[1]] = obj
+            return str(perst[1])
+
+        def persistentLoad(pidstr, unj, perst = perst):
+            pid = int(pidstr)
+            return perst[0][pid]
+
+        a = SimpleJellyTest(1, 2)
+        b = SimpleJellyTest(3, 4)
+        c = SimpleJellyTest(5, 6)
+
+        a.b = b
+        a.c = c
+        c.b = b
+
+        jel = jelly.jelly(a, persistentStore = persistentStore)
+        x = jelly.unjelly(jel, persistentLoad = persistentLoad)
+
+        self.assertIdentical(x.b, x.c.b)
+        self.failUnless(perst[0], "persistentStore was not called.")
+        self.assertIdentical(x.b, a.b, "Persistent storage identity failure.")
+
+
+    def test_newStyleClassesAttributes(self):
+        n = TestNode()
+        n1 = TestNode(n)
+        n11 = TestNode(n1)
+        n2 = TestNode(n)
+        # Jelly it
+        jel = jelly.jelly(n)
+        m = jelly.unjelly(jel)
+        # Check that it has been restored ok
+        self._check_newstyle(n, m)
+
+
+    def _check_newstyle(self, a, b):
+        self.assertEqual(a.id, b.id)
+        self.assertEqual(a.classAttr, 4)
+        self.assertEqual(b.classAttr, 4)
+        self.assertEqual(len(a.children), len(b.children))
+        for x, y in zip(a.children, b.children):
+            self._check_newstyle(x, y)
+
+
+    def test_referenceable(self):
+        """
+        A L{pb.Referenceable} instance jellies to a structure which unjellies to
+        a L{pb.RemoteReference}.  The C{RemoteReference} has a I{luid} that
+        matches up with the local object key in the L{pb.Broker} which sent the
+        L{Referenceable}.
+        """
+        ref = pb.Referenceable()
+        jellyBroker = pb.Broker()
+        jellyBroker.makeConnection(StringTransport())
+        j = jelly.jelly(ref, invoker=jellyBroker)
+
+        unjellyBroker = pb.Broker()
+        unjellyBroker.makeConnection(StringTransport())
+
+        uj = jelly.unjelly(j, invoker=unjellyBroker)
+        self.assertIn(uj.luid, jellyBroker.localObjects)
+
+
+
+class ClassA(pb.Copyable, pb.RemoteCopy):
+    def __init__(self):
+        self.ref = ClassB(self)
+
+
+
+class ClassB(pb.Copyable, pb.RemoteCopy):
+    def __init__(self, ref):
+        self.ref = ref
+
+
+
+class CircularReferenceTestCase(unittest.TestCase):
+    """
+    Tests for circular references handling in the jelly/unjelly process.
+    """
+
+    def test_simpleCircle(self):
+        jelly.setUnjellyableForClass(ClassA, ClassA)
+        jelly.setUnjellyableForClass(ClassB, ClassB)
+        a = jelly.unjelly(jelly.jelly(ClassA()))
+        self.assertIdentical(a.ref.ref, a,
+            "Identity not preserved in circular reference")
+
+
+    def test_circleWithInvoker(self):
+        class DummyInvokerClass:
+            pass
+        dummyInvoker = DummyInvokerClass()
+        dummyInvoker.serializingPerspective = None
+        a0 = ClassA()
+        jelly.setUnjellyableForClass(ClassA, ClassA)
+        jelly.setUnjellyableForClass(ClassB, ClassB)
+        j = jelly.jelly(a0, invoker=dummyInvoker)
+        a1 = jelly.unjelly(j)
+        self.failUnlessIdentical(a1.ref.ref, a1,
+            "Identity not preserved in circular reference")
+
+
+    def test_set(self):
+        """
+        Check that a C{set} can contain a circular reference and be serialized
+        and unserialized without losing the reference.
+        """
+        s = set()
+        a = SimpleJellyTest(s, None)
+        s.add(a)
+        res = jelly.unjelly(jelly.jelly(a))
+        self.assertIsInstance(res.x, set)
+        self.assertEqual(list(res.x), [res])
+
+
+    def test_frozenset(self):
+        """
+        Check that a C{frozenset} can contain a circular reference and be
+        serializeserialized without losing the reference.
+        """
+        a = SimpleJellyTest(None, None)
+        s = frozenset([a])
+        a.x = s
+        res = jelly.unjelly(jelly.jelly(a))
+        self.assertIsInstance(res.x, frozenset)
+        self.assertEqual(list(res.x), [res])
diff --git a/ThirdParty/Twisted/twisted/test/test_lockfile.py b/ThirdParty/Twisted/twisted/test/test_lockfile.py
new file mode 100644
index 0000000..41cfb65
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_lockfile.py
@@ -0,0 +1,445 @@
+# Copyright (c) 2005 Divmod, Inc.
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.python.lockfile}.
+"""
+
+import os, errno
+
+from twisted.trial import unittest
+from twisted.python import lockfile
+from twisted.python.runtime import platform
+
+skipKill = None
+if platform.isWindows():
+    try:
+        from win32api import OpenProcess
+        import pywintypes
+    except ImportError:
+        skipKill = ("On windows, lockfile.kill is not implemented in the "
+                    "absence of win32api and/or pywintypes.")
+
+class UtilTests(unittest.TestCase):
+    """
+    Tests for the helper functions used to implement L{FilesystemLock}.
+    """
+    def test_symlinkEEXIST(self):
+        """
+        L{lockfile.symlink} raises L{OSError} with C{errno} set to L{EEXIST}
+        when an attempt is made to create a symlink which already exists.
+        """
+        name = self.mktemp()
+        lockfile.symlink('foo', name)
+        exc = self.assertRaises(OSError, lockfile.symlink, 'foo', name)
+        self.assertEqual(exc.errno, errno.EEXIST)
+
+
+    def test_symlinkEIOWindows(self):
+        """
+        L{lockfile.symlink} raises L{OSError} with C{errno} set to L{EIO} when
+        the underlying L{rename} call fails with L{EIO}.
+
+        Renaming a file on Windows may fail if the target of the rename is in
+        the process of being deleted (directory deletion appears not to be
+        atomic).
+        """
+        name = self.mktemp()
+        def fakeRename(src, dst):
+            raise IOError(errno.EIO, None)
+        self.patch(lockfile, 'rename', fakeRename)
+        exc = self.assertRaises(IOError, lockfile.symlink, name, "foo")
+        self.assertEqual(exc.errno, errno.EIO)
+    if not platform.isWindows():
+        test_symlinkEIOWindows.skip = (
+            "special rename EIO handling only necessary and correct on "
+            "Windows.")
+
+
+    def test_readlinkENOENT(self):
+        """
+        L{lockfile.readlink} raises L{OSError} with C{errno} set to L{ENOENT}
+        when an attempt is made to read a symlink which does not exist.
+        """
+        name = self.mktemp()
+        exc = self.assertRaises(OSError, lockfile.readlink, name)
+        self.assertEqual(exc.errno, errno.ENOENT)
+
+
+    def test_readlinkEACCESWindows(self):
+        """
+        L{lockfile.readlink} raises L{OSError} with C{errno} set to L{EACCES}
+        on Windows when the underlying file open attempt fails with C{EACCES}.
+
+        Opening a file on Windows may fail if the path is inside a directory
+        which is in the process of being deleted (directory deletion appears
+        not to be atomic).
+        """
+        name = self.mktemp()
+        def fakeOpen(path, mode):
+            raise IOError(errno.EACCES, None)
+        self.patch(lockfile, '_open', fakeOpen)
+        exc = self.assertRaises(IOError, lockfile.readlink, name)
+        self.assertEqual(exc.errno, errno.EACCES)
+    if not platform.isWindows():
+        test_readlinkEACCESWindows.skip = (
+            "special readlink EACCES handling only necessary and correct on "
+            "Windows.")
+
+
+    def test_kill(self):
+        """
+        L{lockfile.kill} returns without error if passed the PID of a
+        process which exists and signal C{0}.
+        """
+        lockfile.kill(os.getpid(), 0)
+    test_kill.skip = skipKill
+
+
+    def test_killESRCH(self):
+        """
+        L{lockfile.kill} raises L{OSError} with errno of L{ESRCH} if
+        passed a PID which does not correspond to any process.
+        """
+        # Hopefully there is no process with PID 2 ** 31 - 1
+        exc = self.assertRaises(OSError, lockfile.kill, 2 ** 31 - 1, 0)
+        self.assertEqual(exc.errno, errno.ESRCH)
+    test_killESRCH.skip = skipKill
+
+
+    def test_noKillCall(self):
+        """
+        Verify that when L{lockfile.kill} does end up as None (e.g. on Windows
+        without pywin32), it doesn't end up being called and raising a
+        L{TypeError}.
+        """
+        self.patch(lockfile, "kill", None)
+        fl = lockfile.FilesystemLock(self.mktemp())
+        fl.lock()
+        self.assertFalse(fl.lock())
+
+
+
+class LockingTestCase(unittest.TestCase):
+    def _symlinkErrorTest(self, errno):
+        def fakeSymlink(source, dest):
+            raise OSError(errno, None)
+        self.patch(lockfile, 'symlink', fakeSymlink)
+
+        lockf = self.mktemp()
+        lock = lockfile.FilesystemLock(lockf)
+        exc = self.assertRaises(OSError, lock.lock)
+        self.assertEqual(exc.errno, errno)
+
+
+    def test_symlinkError(self):
+        """
+        An exception raised by C{symlink} other than C{EEXIST} is passed up to
+        the caller of L{FilesystemLock.lock}.
+        """
+        self._symlinkErrorTest(errno.ENOSYS)
+
+
+    def test_symlinkErrorPOSIX(self):
+        """
+        An L{OSError} raised by C{symlink} on a POSIX platform with an errno of
+        C{EACCES} or C{EIO} is passed to the caller of L{FilesystemLock.lock}.
+
+        On POSIX, unlike on Windows, these are unexpected errors which cannot
+        be handled by L{FilesystemLock}.
+        """
+        self._symlinkErrorTest(errno.EACCES)
+        self._symlinkErrorTest(errno.EIO)
+    if platform.isWindows():
+        test_symlinkErrorPOSIX.skip = (
+            "POSIX-specific error propagation not expected on Windows.")
+
+
+    def test_cleanlyAcquire(self):
+        """
+        If the lock has never been held, it can be acquired and the C{clean}
+        and C{locked} attributes are set to C{True}.
+        """
+        lockf = self.mktemp()
+        lock = lockfile.FilesystemLock(lockf)
+        self.assertTrue(lock.lock())
+        self.assertTrue(lock.clean)
+        self.assertTrue(lock.locked)
+
+
+    def test_cleanlyRelease(self):
+        """
+        If a lock is released cleanly, it can be re-acquired and the C{clean}
+        and C{locked} attributes are set to C{True}.
+        """
+        lockf = self.mktemp()
+        lock = lockfile.FilesystemLock(lockf)
+        self.assertTrue(lock.lock())
+        lock.unlock()
+        self.assertFalse(lock.locked)
+
+        lock = lockfile.FilesystemLock(lockf)
+        self.assertTrue(lock.lock())
+        self.assertTrue(lock.clean)
+        self.assertTrue(lock.locked)
+
+
+    def test_cannotLockLocked(self):
+        """
+        If a lock is currently locked, it cannot be locked again.
+        """
+        lockf = self.mktemp()
+        firstLock = lockfile.FilesystemLock(lockf)
+        self.assertTrue(firstLock.lock())
+
+        secondLock = lockfile.FilesystemLock(lockf)
+        self.assertFalse(secondLock.lock())
+        self.assertFalse(secondLock.locked)
+
+
+    def test_uncleanlyAcquire(self):
+        """
+        If a lock was held by a process which no longer exists, it can be
+        acquired, the C{clean} attribute is set to C{False}, and the
+        C{locked} attribute is set to C{True}.
+        """
+        owner = 12345
+
+        def fakeKill(pid, signal):
+            if signal != 0:
+                raise OSError(errno.EPERM, None)
+            if pid == owner:
+                raise OSError(errno.ESRCH, None)
+
+        lockf = self.mktemp()
+        self.patch(lockfile, 'kill', fakeKill)
+        lockfile.symlink(str(owner), lockf)
+
+        lock = lockfile.FilesystemLock(lockf)
+        self.assertTrue(lock.lock())
+        self.assertFalse(lock.clean)
+        self.assertTrue(lock.locked)
+
+        self.assertEqual(lockfile.readlink(lockf), str(os.getpid()))
+
+
+    def test_lockReleasedBeforeCheck(self):
+        """
+        If the lock is initially held but then released before it can be
+        examined to determine if the process which held it still exists, it is
+        acquired and the C{clean} and C{locked} attributes are set to C{True}.
+        """
+        def fakeReadlink(name):
+            # Pretend to be another process releasing the lock.
+            lockfile.rmlink(lockf)
+            # Fall back to the real implementation of readlink.
+            readlinkPatch.restore()
+            return lockfile.readlink(name)
+        readlinkPatch = self.patch(lockfile, 'readlink', fakeReadlink)
+
+        def fakeKill(pid, signal):
+            if signal != 0:
+                raise OSError(errno.EPERM, None)
+            if pid == 43125:
+                raise OSError(errno.ESRCH, None)
+        self.patch(lockfile, 'kill', fakeKill)
+
+        lockf = self.mktemp()
+        lock = lockfile.FilesystemLock(lockf)
+        lockfile.symlink(str(43125), lockf)
+        self.assertTrue(lock.lock())
+        self.assertTrue(lock.clean)
+        self.assertTrue(lock.locked)
+
+
+    def test_lockReleasedDuringAcquireSymlink(self):
+        """
+        If the lock is released while an attempt is made to acquire
+        it, the lock attempt fails and C{FilesystemLock.lock} returns
+        C{False}.  This can happen on Windows when L{lockfile.symlink}
+        fails with L{IOError} of C{EIO} because another process is in
+        the middle of a call to L{os.rmdir} (implemented in terms of
+        RemoveDirectory) which is not atomic.
+        """
+        def fakeSymlink(src, dst):
+            # While another process id doing os.rmdir which the Windows
+            # implementation of rmlink does, a rename call will fail with EIO.
+            raise OSError(errno.EIO, None)
+
+        self.patch(lockfile, 'symlink', fakeSymlink)
+
+        lockf = self.mktemp()
+        lock = lockfile.FilesystemLock(lockf)
+        self.assertFalse(lock.lock())
+        self.assertFalse(lock.locked)
+    if not platform.isWindows():
+        test_lockReleasedDuringAcquireSymlink.skip = (
+            "special rename EIO handling only necessary and correct on "
+            "Windows.")
+
+
+    def test_lockReleasedDuringAcquireReadlink(self):
+        """
+        If the lock is initially held but is released while an attempt
+        is made to acquire it, the lock attempt fails and
+        L{FilesystemLock.lock} returns C{False}.
+        """
+        def fakeReadlink(name):
+            # While another process is doing os.rmdir which the
+            # Windows implementation of rmlink does, a readlink call
+            # will fail with EACCES.
+            raise IOError(errno.EACCES, None)
+        readlinkPatch = self.patch(lockfile, 'readlink', fakeReadlink)
+
+        lockf = self.mktemp()
+        lock = lockfile.FilesystemLock(lockf)
+        lockfile.symlink(str(43125), lockf)
+        self.assertFalse(lock.lock())
+        self.assertFalse(lock.locked)
+    if not platform.isWindows():
+        test_lockReleasedDuringAcquireReadlink.skip = (
+            "special readlink EACCES handling only necessary and correct on "
+            "Windows.")
+
+
+    def _readlinkErrorTest(self, exceptionType, errno):
+        def fakeReadlink(name):
+            raise exceptionType(errno, None)
+        self.patch(lockfile, 'readlink', fakeReadlink)
+
+        lockf = self.mktemp()
+
+        # Make it appear locked so it has to use readlink
+        lockfile.symlink(str(43125), lockf)
+
+        lock = lockfile.FilesystemLock(lockf)
+        exc = self.assertRaises(exceptionType, lock.lock)
+        self.assertEqual(exc.errno, errno)
+        self.assertFalse(lock.locked)
+
+
+    def test_readlinkError(self):
+        """
+        An exception raised by C{readlink} other than C{ENOENT} is passed up to
+        the caller of L{FilesystemLock.lock}.
+        """
+        self._readlinkErrorTest(OSError, errno.ENOSYS)
+        self._readlinkErrorTest(IOError, errno.ENOSYS)
+
+
+    def test_readlinkErrorPOSIX(self):
+        """
+        Any L{IOError} raised by C{readlink} on a POSIX platform passed to the
+        caller of L{FilesystemLock.lock}.
+
+        On POSIX, unlike on Windows, these are unexpected errors which cannot
+        be handled by L{FilesystemLock}.
+        """
+        self._readlinkErrorTest(IOError, errno.ENOSYS)
+        self._readlinkErrorTest(IOError, errno.EACCES)
+    if platform.isWindows():
+        test_readlinkErrorPOSIX.skip = (
+            "POSIX-specific error propagation not expected on Windows.")
+
+
+    def test_lockCleanedUpConcurrently(self):
+        """
+        If a second process cleans up the lock after a first one checks the
+        lock and finds that no process is holding it, the first process does
+        not fail when it tries to clean up the lock.
+        """
+        def fakeRmlink(name):
+            rmlinkPatch.restore()
+            # Pretend to be another process cleaning up the lock.
+            lockfile.rmlink(lockf)
+            # Fall back to the real implementation of rmlink.
+            return lockfile.rmlink(name)
+        rmlinkPatch = self.patch(lockfile, 'rmlink', fakeRmlink)
+
+        def fakeKill(pid, signal):
+            if signal != 0:
+                raise OSError(errno.EPERM, None)
+            if pid == 43125:
+                raise OSError(errno.ESRCH, None)
+        self.patch(lockfile, 'kill', fakeKill)
+
+        lockf = self.mktemp()
+        lock = lockfile.FilesystemLock(lockf)
+        lockfile.symlink(str(43125), lockf)
+        self.assertTrue(lock.lock())
+        self.assertTrue(lock.clean)
+        self.assertTrue(lock.locked)
+
+
+    def test_rmlinkError(self):
+        """
+        An exception raised by L{rmlink} other than C{ENOENT} is passed up
+        to the caller of L{FilesystemLock.lock}.
+        """
+        def fakeRmlink(name):
+            raise OSError(errno.ENOSYS, None)
+        self.patch(lockfile, 'rmlink', fakeRmlink)
+
+        def fakeKill(pid, signal):
+            if signal != 0:
+                raise OSError(errno.EPERM, None)
+            if pid == 43125:
+                raise OSError(errno.ESRCH, None)
+        self.patch(lockfile, 'kill', fakeKill)
+
+        lockf = self.mktemp()
+
+        # Make it appear locked so it has to use readlink
+        lockfile.symlink(str(43125), lockf)
+
+        lock = lockfile.FilesystemLock(lockf)
+        exc = self.assertRaises(OSError, lock.lock)
+        self.assertEqual(exc.errno, errno.ENOSYS)
+        self.assertFalse(lock.locked)
+
+
+    def test_killError(self):
+        """
+        If L{kill} raises an exception other than L{OSError} with errno set to
+        C{ESRCH}, the exception is passed up to the caller of
+        L{FilesystemLock.lock}.
+        """
+        def fakeKill(pid, signal):
+            raise OSError(errno.EPERM, None)
+        self.patch(lockfile, 'kill', fakeKill)
+
+        lockf = self.mktemp()
+
+        # Make it appear locked so it has to use readlink
+        lockfile.symlink(str(43125), lockf)
+
+        lock = lockfile.FilesystemLock(lockf)
+        exc = self.assertRaises(OSError, lock.lock)
+        self.assertEqual(exc.errno, errno.EPERM)
+        self.assertFalse(lock.locked)
+
+
+    def test_unlockOther(self):
+        """
+        L{FilesystemLock.unlock} raises L{ValueError} if called for a lock
+        which is held by a different process.
+        """
+        lockf = self.mktemp()
+        lockfile.symlink(str(os.getpid() + 1), lockf)
+        lock = lockfile.FilesystemLock(lockf)
+        self.assertRaises(ValueError, lock.unlock)
+
+
+    def test_isLocked(self):
+        """
+        L{isLocked} returns C{True} if the named lock is currently locked,
+        C{False} otherwise.
+        """
+        lockf = self.mktemp()
+        self.assertFalse(lockfile.isLocked(lockf))
+        lock = lockfile.FilesystemLock(lockf)
+        self.assertTrue(lock.lock())
+        self.assertTrue(lockfile.isLocked(lockf))
+        lock.unlock()
+        self.assertFalse(lockfile.isLocked(lockf))
diff --git a/ThirdParty/Twisted/twisted/test/test_log.py b/ThirdParty/Twisted/twisted/test/test_log.py
new file mode 100644
index 0000000..75aa08b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_log.py
@@ -0,0 +1,842 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.python.log}.
+"""
+
+from __future__ import division, absolute_import, print_function
+
+from twisted.python.compat import _PY3, NativeStringIO as StringIO
+
+import os, sys, time, logging, warnings, calendar
+
+
+from twisted.trial import unittest
+
+from twisted.python import log, failure
+
+
+class FakeWarning(Warning):
+    """
+    A unique L{Warning} subclass used by tests for interactions of
+    L{twisted.python.log} with the L{warnings} module.
+    """
+
+
+
+class LogTest(unittest.SynchronousTestCase):
+
+    def setUp(self):
+        self.catcher = []
+        self.observer = self.catcher.append
+        log.addObserver(self.observer)
+        self.addCleanup(log.removeObserver, self.observer)
+
+
+    def testObservation(self):
+        catcher = self.catcher
+        log.msg("test", testShouldCatch=True)
+        i = catcher.pop()
+        self.assertEqual(i["message"][0], "test")
+        self.assertEqual(i["testShouldCatch"], True)
+        self.assertIn("time", i)
+        self.assertEqual(len(catcher), 0)
+
+
+    def testContext(self):
+        catcher = self.catcher
+        log.callWithContext({"subsystem": "not the default",
+                             "subsubsystem": "a",
+                             "other": "c"},
+                            log.callWithContext,
+                            {"subsubsystem": "b"}, log.msg, "foo", other="d")
+        i = catcher.pop()
+        self.assertEqual(i['subsubsystem'], 'b')
+        self.assertEqual(i['subsystem'], 'not the default')
+        self.assertEqual(i['other'], 'd')
+        self.assertEqual(i['message'][0], 'foo')
+
+    def testErrors(self):
+        for e, ig in [("hello world","hello world"),
+                      (KeyError(), KeyError),
+                      (failure.Failure(RuntimeError()), RuntimeError)]:
+            log.err(e)
+            i = self.catcher.pop()
+            self.assertEqual(i['isError'], 1)
+            self.flushLoggedErrors(ig)
+
+    def testErrorsWithWhy(self):
+        for e, ig in [("hello world","hello world"),
+                      (KeyError(), KeyError),
+                      (failure.Failure(RuntimeError()), RuntimeError)]:
+            log.err(e, 'foobar')
+            i = self.catcher.pop()
+            self.assertEqual(i['isError'], 1)
+            self.assertEqual(i['why'], 'foobar')
+            self.flushLoggedErrors(ig)
+
+
+    def test_erroneousErrors(self):
+        """
+        Exceptions raised by log observers are logged but the observer which
+        raised the exception remains registered with the publisher.  These
+        exceptions do not prevent the event from being sent to other observers
+        registered with the publisher.
+        """
+        L1 = []
+        L2 = []
+        def broken(events):
+            1 // 0
+
+        for observer in [L1.append, broken, L2.append]:
+            log.addObserver(observer)
+            self.addCleanup(log.removeObserver, observer)
+
+        for i in range(3):
+            # Reset the lists for simpler comparison.
+            L1[:] = []
+            L2[:] = []
+
+            # Send out the event which will break one of the observers.
+            log.msg("Howdy, y'all.")
+
+            # The broken observer should have caused this to be logged.
+            excs = self.flushLoggedErrors(ZeroDivisionError)
+            del self.catcher[:]
+            self.assertEqual(len(excs), 1)
+
+            # Both other observers should have seen the message.
+            self.assertEqual(len(L1), 2)
+            self.assertEqual(len(L2), 2)
+
+            # The order is slightly wrong here.  The first event should be
+            # delivered to all observers; then, errors should be delivered.
+            self.assertEqual(L1[1]['message'], ("Howdy, y'all.",))
+            self.assertEqual(L2[0]['message'], ("Howdy, y'all.",))
+
+
+    def test_doubleErrorDoesNotRemoveObserver(self):
+        """
+        If logging causes an error, make sure that if logging the fact that
+        logging failed also causes an error, the log observer is not removed.
+        """
+        events = []
+        errors = []
+        publisher = log.LogPublisher()
+
+        class FailingObserver(object):
+            calls = 0
+            def log(self, msg, **kwargs):
+                # First call raises RuntimeError:
+                self.calls += 1
+                if self.calls < 2:
+                    raise RuntimeError("Failure #%s" % (self.calls,))
+                else:
+                    events.append(msg)
+
+        observer = FailingObserver()
+        publisher.addObserver(observer.log)
+        self.assertEqual(publisher.observers, [observer.log])
+
+        try:
+            # When observer throws, the publisher attempts to log the fact by
+            # calling self._err()... which also fails with recursion error:
+            oldError = publisher._err
+
+            def failingErr(failure, why, **kwargs):
+                errors.append(failure.value)
+                raise RuntimeError("Fake recursion error")
+
+            publisher._err = failingErr
+            publisher.msg("error in first observer")
+        finally:
+            publisher._err = oldError
+            # Observer should still exist; we do this in finally since before
+            # bug was fixed the test would fail due to uncaught exception, so
+            # we want failing assert too in that case:
+            self.assertEqual(publisher.observers, [observer.log])
+
+        # The next message should succeed:
+        publisher.msg("but this should succeed")
+
+        self.assertEqual(observer.calls, 2)
+        self.assertEqual(len(events), 1)
+        self.assertEqual(events[0]['message'], ("but this should succeed",))
+        self.assertEqual(len(errors), 1)
+        self.assertIsInstance(errors[0], RuntimeError)
+
+
+    def test_showwarning(self):
+        """
+        L{twisted.python.log.showwarning} emits the warning as a message
+        to the Twisted logging system.
+        """
+        publisher = log.LogPublisher()
+        publisher.addObserver(self.observer)
+
+        publisher.showwarning(
+            FakeWarning("unique warning message"), FakeWarning,
+            "warning-filename.py", 27)
+        event = self.catcher.pop()
+        self.assertEqual(
+            event['format'] % event,
+            'warning-filename.py:27: twisted.test.test_log.FakeWarning: '
+            'unique warning message')
+        self.assertEqual(self.catcher, [])
+
+        # Python 2.6 requires that any function used to override the
+        # warnings.showwarning API accept a "line" parameter or a
+        # deprecation warning is emitted.
+        publisher.showwarning(
+            FakeWarning("unique warning message"), FakeWarning,
+            "warning-filename.py", 27, line=object())
+        event = self.catcher.pop()
+        self.assertEqual(
+            event['format'] % event,
+            'warning-filename.py:27: twisted.test.test_log.FakeWarning: '
+            'unique warning message')
+        self.assertEqual(self.catcher, [])
+
+
+    def test_warningToFile(self):
+        """
+        L{twisted.python.log.showwarning} passes warnings with an explicit file
+        target on to the underlying Python warning system.
+        """
+        # log.showwarning depends on _oldshowwarning being set, which only
+        # happens in startLogging(), which doesn't happen if you're not
+        # running under trial. So this test only passes by accident of runner
+        # environment.
+        if log._oldshowwarning is None:
+            raise unittest.SkipTest("Currently this test only runs under trial.")
+        message = "another unique message"
+        category = FakeWarning
+        filename = "warning-filename.py"
+        lineno = 31
+
+        output = StringIO()
+        log.showwarning(message, category, filename, lineno, file=output)
+
+        self.assertEqual(
+            output.getvalue(),
+            warnings.formatwarning(message, category, filename, lineno))
+
+        # In Python 2.6, warnings.showwarning accepts a "line" argument which
+        # gives the source line the warning message is to include.
+        if sys.version_info >= (2, 6):
+            line = "hello world"
+            output = StringIO()
+            log.showwarning(message, category, filename, lineno, file=output,
+                            line=line)
+
+            self.assertEqual(
+                output.getvalue(),
+                warnings.formatwarning(message, category, filename, lineno,
+                                       line))
+
+
+    def test_publisherReportsBrokenObserversPrivately(self):
+        """
+        Log publisher does not use the global L{log.err} when reporting broken
+        observers.
+        """
+        errors = []
+        def logError(eventDict):
+            if eventDict.get("isError"):
+                errors.append(eventDict["failure"].value)
+
+        def fail(eventDict):
+            raise RuntimeError("test_publisherLocalyReportsBrokenObservers")
+
+        publisher = log.LogPublisher()
+        publisher.addObserver(logError)
+        publisher.addObserver(fail)
+
+        publisher.msg("Hello!")
+        self.assertEqual(publisher.observers, [logError, fail])
+        self.assertEqual(len(errors), 1)
+        self.assertIsInstance(errors[0], RuntimeError)
+
+
+
+class FakeFile(list):
+    def write(self, bytes):
+        self.append(bytes)
+
+    def flush(self):
+        pass
+
+class EvilStr:
+    def __str__(self):
+        1//0
+
+class EvilRepr:
+    def __str__(self):
+        return "Happy Evil Repr"
+    def __repr__(self):
+        1//0
+
+class EvilReprStr(EvilStr, EvilRepr):
+    pass
+
+class LogPublisherTestCaseMixin:
+    def setUp(self):
+        """
+        Add a log observer which records log events in C{self.out}.  Also,
+        make sure the default string encoding is ASCII so that
+        L{testSingleUnicode} can test the behavior of logging unencodable
+        unicode messages.
+        """
+        self.out = FakeFile()
+        self.lp = log.LogPublisher()
+        self.flo = log.FileLogObserver(self.out)
+        self.lp.addObserver(self.flo.emit)
+
+        try:
+            str(u'\N{VULGAR FRACTION ONE HALF}')
+        except UnicodeEncodeError:
+            # This is the behavior we want - don't change anything.
+            self._origEncoding = None
+        else:
+            if _PY3:
+                self._origEncoding = None
+                return
+            reload(sys)
+            self._origEncoding = sys.getdefaultencoding()
+            sys.setdefaultencoding('ascii')
+
+
+    def tearDown(self):
+        """
+        Verify that everything written to the fake file C{self.out} was a
+        C{str}.  Also, restore the default string encoding to its previous
+        setting, if it was modified by L{setUp}.
+        """
+        for chunk in self.out:
+            self.failUnless(isinstance(chunk, str), "%r was not a string" % (chunk,))
+
+        if self._origEncoding is not None:
+            sys.setdefaultencoding(self._origEncoding)
+            del sys.setdefaultencoding
+
+
+
+class LogPublisherTestCase(LogPublisherTestCaseMixin, unittest.SynchronousTestCase):
+    def testSingleString(self):
+        self.lp.msg("Hello, world.")
+        self.assertEqual(len(self.out), 1)
+
+
+    def testMultipleString(self):
+        # Test some stupid behavior that will be deprecated real soon.
+        # If you are reading this and trying to learn how the logging
+        # system works, *do not use this feature*.
+        self.lp.msg("Hello, ", "world.")
+        self.assertEqual(len(self.out), 1)
+
+
+    def test_singleUnicode(self):
+        """
+        L{log.LogPublisher.msg} does not accept non-ASCII Unicode on Python 2,
+        logging an error instead.
+
+        On Python 3, where Unicode is default message type, the message is
+        logged normally.
+        """
+        message = u"Hello, \N{VULGAR FRACTION ONE HALF} world."
+        self.lp.msg(message)
+        self.assertEqual(len(self.out), 1)
+        if _PY3:
+            self.assertIn(message, self.out[0])
+        else:
+            self.assertIn('with str error', self.out[0])
+            self.assertIn('UnicodeEncodeError', self.out[0])
+
+
+
+class FileObserverTestCase(LogPublisherTestCaseMixin, unittest.SynchronousTestCase):
+    """
+    Tests for L{log.FileObserver}.
+    """
+
+    def test_getTimezoneOffset(self):
+        """
+        Attempt to verify that L{FileLogObserver.getTimezoneOffset} returns
+        correct values for the current C{TZ} environment setting.  Do this
+        by setting C{TZ} to various well-known values and asserting that the
+        reported offset is correct.
+        """
+        localDaylightTuple = (2006, 6, 30, 0, 0, 0, 4, 181, 1)
+        utcDaylightTimestamp = time.mktime(localDaylightTuple)
+        localStandardTuple = (2007, 1, 31, 0, 0, 0, 2, 31, 0)
+        utcStandardTimestamp = time.mktime(localStandardTuple)
+
+        originalTimezone = os.environ.get('TZ', None)
+        try:
+            # Test something west of UTC
+            os.environ['TZ'] = 'America/New_York'
+            time.tzset()
+            self.assertEqual(
+                self.flo.getTimezoneOffset(utcDaylightTimestamp),
+                14400)
+            self.assertEqual(
+                self.flo.getTimezoneOffset(utcStandardTimestamp),
+                18000)
+
+            # Test something east of UTC
+            os.environ['TZ'] = 'Europe/Berlin'
+            time.tzset()
+            self.assertEqual(
+                self.flo.getTimezoneOffset(utcDaylightTimestamp),
+                -7200)
+            self.assertEqual(
+                self.flo.getTimezoneOffset(utcStandardTimestamp),
+                -3600)
+
+            # Test a timezone that doesn't have DST
+            os.environ['TZ'] = 'Africa/Johannesburg'
+            time.tzset()
+            self.assertEqual(
+                self.flo.getTimezoneOffset(utcDaylightTimestamp),
+                -7200)
+            self.assertEqual(
+                self.flo.getTimezoneOffset(utcStandardTimestamp),
+                -7200)
+        finally:
+            if originalTimezone is None:
+                del os.environ['TZ']
+            else:
+                os.environ['TZ'] = originalTimezone
+            time.tzset()
+    if getattr(time, 'tzset', None) is None:
+        test_getTimezoneOffset.skip = (
+            "Platform cannot change timezone, cannot verify correct offsets "
+            "in well-known timezones.")
+
+
+    def test_timeFormatting(self):
+        """
+        Test the method of L{FileLogObserver} which turns a timestamp into a
+        human-readable string.
+        """
+        when = calendar.timegm((2001, 2, 3, 4, 5, 6, 7, 8, 0))
+
+        # Pretend to be in US/Eastern for a moment
+        self.flo.getTimezoneOffset = lambda when: 18000
+        self.assertEqual(self.flo.formatTime(when), '2001-02-02 23:05:06-0500')
+
+        # Okay now we're in Eastern Europe somewhere
+        self.flo.getTimezoneOffset = lambda when: -3600
+        self.assertEqual(self.flo.formatTime(when), '2001-02-03 05:05:06+0100')
+
+        # And off in the Pacific or someplace like that
+        self.flo.getTimezoneOffset = lambda when: -39600
+        self.assertEqual(self.flo.formatTime(when), '2001-02-03 15:05:06+1100')
+
+        # One of those weird places with a half-hour offset timezone
+        self.flo.getTimezoneOffset = lambda when: 5400
+        self.assertEqual(self.flo.formatTime(when), '2001-02-03 02:35:06-0130')
+
+        # Half-hour offset in the other direction
+        self.flo.getTimezoneOffset = lambda when: -5400
+        self.assertEqual(self.flo.formatTime(when), '2001-02-03 05:35:06+0130')
+
+        # Test an offset which is between 0 and 60 minutes to make sure the
+        # sign comes out properly in that case.
+        self.flo.getTimezoneOffset = lambda when: 1800
+        self.assertEqual(self.flo.formatTime(when), '2001-02-03 03:35:06-0030')
+
+        # Test an offset between 0 and 60 minutes in the other direction.
+        self.flo.getTimezoneOffset = lambda when: -1800
+        self.assertEqual(self.flo.formatTime(when), '2001-02-03 04:35:06+0030')
+
+        # If a strftime-format string is present on the logger, it should
+        # use that instead.  Note we don't assert anything about day, hour
+        # or minute because we cannot easily control what time.strftime()
+        # thinks the local timezone is.
+        self.flo.timeFormat = '%Y %m'
+        self.assertEqual(self.flo.formatTime(when), '2001 02')
+
+
+    def test_loggingAnObjectWithBroken__str__(self):
+        #HELLO, MCFLY
+        self.lp.msg(EvilStr())
+        self.assertEqual(len(self.out), 1)
+        # Logging system shouldn't need to crap itself for this trivial case
+        self.assertNotIn('UNFORMATTABLE', self.out[0])
+
+
+    def test_formattingAnObjectWithBroken__str__(self):
+        self.lp.msg(format='%(blat)s', blat=EvilStr())
+        self.assertEqual(len(self.out), 1)
+        self.assertIn('Invalid format string or unformattable object', self.out[0])
+
+
+    def test_brokenSystem__str__(self):
+        self.lp.msg('huh', system=EvilStr())
+        self.assertEqual(len(self.out), 1)
+        self.assertIn('Invalid format string or unformattable object', self.out[0])
+
+
+    def test_formattingAnObjectWithBroken__repr__Indirect(self):
+        self.lp.msg(format='%(blat)s', blat=[EvilRepr()])
+        self.assertEqual(len(self.out), 1)
+        self.assertIn('UNFORMATTABLE OBJECT', self.out[0])
+
+
+    def test_systemWithBroker__repr__Indirect(self):
+        self.lp.msg('huh', system=[EvilRepr()])
+        self.assertEqual(len(self.out), 1)
+        self.assertIn('UNFORMATTABLE OBJECT', self.out[0])
+
+
+    def test_simpleBrokenFormat(self):
+        self.lp.msg(format='hooj %s %s', blat=1)
+        self.assertEqual(len(self.out), 1)
+        self.assertIn('Invalid format string or unformattable object', self.out[0])
+
+
+    def test_ridiculousFormat(self):
+        self.lp.msg(format=42, blat=1)
+        self.assertEqual(len(self.out), 1)
+        self.assertIn('Invalid format string or unformattable object', self.out[0])
+
+
+    def test_evilFormat__repr__And__str__(self):
+        self.lp.msg(format=EvilReprStr(), blat=1)
+        self.assertEqual(len(self.out), 1)
+        self.assertIn('PATHOLOGICAL', self.out[0])
+
+
+    def test_strangeEventDict(self):
+        """
+        This kind of eventDict used to fail silently, so test it does.
+        """
+        self.lp.msg(message='', isError=False)
+        self.assertEqual(len(self.out), 0)
+
+
+    def _startLoggingCleanup(self):
+        """
+        Cleanup after a startLogging() call that mutates the hell out of some
+        global state.
+        """
+        origShowwarnings = log._oldshowwarning
+        self.addCleanup(setattr, log, "_oldshowwarning", origShowwarnings)
+        self.addCleanup(setattr, sys, 'stdout', sys.stdout)
+        self.addCleanup(setattr, sys, 'stderr', sys.stderr)
+
+    def test_startLogging(self):
+        """
+        startLogging() installs FileLogObserver and overrides sys.stdout and
+        sys.stderr.
+        """
+        origStdout, origStderr = sys.stdout, sys.stderr
+        self._startLoggingCleanup()
+        # When done with test, reset stdout and stderr to current values:
+        fakeFile = StringIO()
+        observer = log.startLogging(fakeFile)
+        self.addCleanup(observer.stop)
+        log.msg("Hello!")
+        self.assertIn("Hello!", fakeFile.getvalue())
+        self.assertIsInstance(sys.stdout, log.StdioOnnaStick)
+        self.assertEqual(sys.stdout.isError, False)
+        encoding = getattr(origStdout, "encoding", None)
+        if not encoding:
+            encoding = sys.getdefaultencoding()
+        self.assertEqual(sys.stdout.encoding, encoding)
+        self.assertIsInstance(sys.stderr, log.StdioOnnaStick)
+        self.assertEqual(sys.stderr.isError, True)
+        encoding = getattr(origStderr, "encoding", None)
+        if not encoding:
+            encoding = sys.getdefaultencoding()
+        self.assertEqual(sys.stderr.encoding, encoding)
+
+
+    def test_startLoggingTwice(self):
+        """
+        There are some obscure error conditions that can occur when logging is
+        started twice. See http://twistedmatrix.com/trac/ticket/3289 for more
+        information.
+        """
+        self._startLoggingCleanup()
+        # The bug is particular to the way that the t.p.log 'global' function
+        # handle stdout. If we use our own stream, the error doesn't occur. If
+        # we use our own LogPublisher, the error doesn't occur.
+        sys.stdout = StringIO()
+
+        def showError(eventDict):
+            if eventDict['isError']:
+                sys.__stdout__.write(eventDict['failure'].getTraceback())
+
+        log.addObserver(showError)
+        self.addCleanup(log.removeObserver, showError)
+        observer = log.startLogging(sys.stdout)
+        self.addCleanup(observer.stop)
+        # At this point, we expect that sys.stdout is a StdioOnnaStick object.
+        self.assertIsInstance(sys.stdout, log.StdioOnnaStick)
+        fakeStdout = sys.stdout
+        observer = log.startLogging(sys.stdout)
+        self.assertIdentical(sys.stdout, fakeStdout)
+
+
+    def test_startLoggingOverridesWarning(self):
+        """
+        startLogging() overrides global C{warnings.showwarning} such that
+        warnings go to Twisted log observers.
+        """
+        self._startLoggingCleanup()
+        # Ugggh, pretend we're starting from newly imported module:
+        log._oldshowwarning = None
+        fakeFile = StringIO()
+        observer = log.startLogging(fakeFile)
+        self.addCleanup(observer.stop)
+        warnings.warn("hello!")
+        output = fakeFile.getvalue()
+        self.assertIn("UserWarning: hello!", output)
+
+
+
+class PythonLoggingObserverTestCase(unittest.SynchronousTestCase):
+    """
+    Test the bridge with python logging module.
+    """
+    def setUp(self):
+        self.out = StringIO()
+
+        rootLogger = logging.getLogger("")
+        self.originalLevel = rootLogger.getEffectiveLevel()
+        rootLogger.setLevel(logging.DEBUG)
+        self.hdlr = logging.StreamHandler(self.out)
+        fmt = logging.Formatter(logging.BASIC_FORMAT)
+        self.hdlr.setFormatter(fmt)
+        rootLogger.addHandler(self.hdlr)
+
+        self.lp = log.LogPublisher()
+        self.obs = log.PythonLoggingObserver()
+        self.lp.addObserver(self.obs.emit)
+
+    def tearDown(self):
+        rootLogger = logging.getLogger("")
+        rootLogger.removeHandler(self.hdlr)
+        rootLogger.setLevel(self.originalLevel)
+        logging.shutdown()
+
+    def test_singleString(self):
+        """
+        Test simple output, and default log level.
+        """
+        self.lp.msg("Hello, world.")
+        self.assertIn("Hello, world.", self.out.getvalue())
+        self.assertIn("INFO", self.out.getvalue())
+
+    def test_errorString(self):
+        """
+        Test error output.
+        """
+        self.lp.msg(failure=failure.Failure(ValueError("That is bad.")), isError=True)
+        self.assertIn("ERROR", self.out.getvalue())
+
+    def test_formatString(self):
+        """
+        Test logging with a format.
+        """
+        self.lp.msg(format="%(bar)s oo %(foo)s", bar="Hello", foo="world")
+        self.assertIn("Hello oo world", self.out.getvalue())
+
+    def test_customLevel(self):
+        """
+        Test the logLevel keyword for customizing level used.
+        """
+        self.lp.msg("Spam egg.", logLevel=logging.DEBUG)
+        self.assertIn("Spam egg.", self.out.getvalue())
+        self.assertIn("DEBUG", self.out.getvalue())
+        self.out.seek(0, 0)
+        self.out.truncate()
+        self.lp.msg("Foo bar.", logLevel=logging.WARNING)
+        self.assertIn("Foo bar.", self.out.getvalue())
+        self.assertIn("WARNING", self.out.getvalue())
+
+    def test_strangeEventDict(self):
+        """
+        Verify that an event dictionary which is not an error and has an empty
+        message isn't recorded.
+        """
+        self.lp.msg(message='', isError=False)
+        self.assertEqual(self.out.getvalue(), '')
+
+
+class PythonLoggingIntegrationTestCase(unittest.SynchronousTestCase):
+    """
+    Test integration of python logging bridge.
+    """
+    def test_startStopObserver(self):
+        """
+        Test that start and stop methods of the observer actually register
+        and unregister to the log system.
+        """
+        oldAddObserver = log.addObserver
+        oldRemoveObserver = log.removeObserver
+        l = []
+        try:
+            log.addObserver = l.append
+            log.removeObserver = l.remove
+            obs = log.PythonLoggingObserver()
+            obs.start()
+            self.assertEqual(l[0], obs.emit)
+            obs.stop()
+            self.assertEqual(len(l), 0)
+        finally:
+            log.addObserver = oldAddObserver
+            log.removeObserver = oldRemoveObserver
+
+    def test_inheritance(self):
+        """
+        Test that we can inherit L{log.PythonLoggingObserver} and use super:
+        that's basically a validation that L{log.PythonLoggingObserver} is
+        new-style class.
+        """
+        class MyObserver(log.PythonLoggingObserver):
+            def emit(self, eventDict):
+                super(MyObserver, self).emit(eventDict)
+        obs = MyObserver()
+        l = []
+        oldEmit = log.PythonLoggingObserver.emit
+        try:
+            log.PythonLoggingObserver.emit = l.append
+            obs.emit('foo')
+            self.assertEqual(len(l), 1)
+        finally:
+            log.PythonLoggingObserver.emit = oldEmit
+
+
+
+class DefaultObserverTestCase(unittest.SynchronousTestCase):
+    """
+    Test the default observer.
+    """
+
+    def test_failureLogger(self):
+        """
+        The reason argument passed to log.err() appears in the report
+        generated by DefaultObserver.
+        """
+        self.catcher = []
+        self.observer = self.catcher.append
+        log.addObserver(self.observer)
+        self.addCleanup(log.removeObserver, self.observer)
+
+        obs = log.DefaultObserver()
+        obs.stderr = StringIO()
+        obs.start()
+        self.addCleanup(obs.stop)
+
+        reason = "The reason."
+        log.err(Exception(), reason)
+        errors = self.flushLoggedErrors()
+
+        self.assertIn(reason, obs.stderr.getvalue())
+        self.assertEqual(len(errors), 1)
+
+
+
+class StdioOnnaStickTestCase(unittest.SynchronousTestCase):
+    """
+    StdioOnnaStick should act like the normal sys.stdout object.
+    """
+
+    def setUp(self):
+        self.resultLogs = []
+        log.addObserver(self.resultLogs.append)
+
+
+    def tearDown(self):
+        log.removeObserver(self.resultLogs.append)
+
+
+    def getLogMessages(self):
+        return ["".join(d['message']) for d in self.resultLogs]
+
+
+    def test_write(self):
+        """
+        Writing to a StdioOnnaStick instance results in Twisted log messages.
+
+        Log messages are generated every time a '\n' is encountered.
+        """
+        stdio = log.StdioOnnaStick()
+        stdio.write("Hello there\nThis is a test")
+        self.assertEqual(self.getLogMessages(), ["Hello there"])
+        stdio.write("!\n")
+        self.assertEqual(self.getLogMessages(), ["Hello there", "This is a test!"])
+
+
+    def test_metadata(self):
+        """
+        The log messages written by StdioOnnaStick have printed=1 keyword, and
+        by default are not errors.
+        """
+        stdio = log.StdioOnnaStick()
+        stdio.write("hello\n")
+        self.assertEqual(self.resultLogs[0]['isError'], False)
+        self.assertEqual(self.resultLogs[0]['printed'], True)
+
+
+    def test_writeLines(self):
+        """
+        Writing lines to a StdioOnnaStick results in Twisted log messages.
+        """
+        stdio = log.StdioOnnaStick()
+        stdio.writelines(["log 1", "log 2"])
+        self.assertEqual(self.getLogMessages(), ["log 1", "log 2"])
+
+
+    def test_print(self):
+        """
+        When StdioOnnaStick is set as sys.stdout, prints become log messages.
+        """
+        oldStdout = sys.stdout
+        sys.stdout = log.StdioOnnaStick()
+        self.addCleanup(setattr, sys, "stdout", oldStdout)
+        print("This", end=" ")
+        print("is a test")
+        self.assertEqual(self.getLogMessages(), ["This is a test"])
+
+
+    def test_error(self):
+        """
+        StdioOnnaStick created with isError=True log messages as errors.
+        """
+        stdio = log.StdioOnnaStick(isError=True)
+        stdio.write("log 1\n")
+        self.assertEqual(self.resultLogs[0]['isError'], True)
+
+
+    def test_unicode(self):
+        """
+        StdioOnnaStick converts unicode prints to byte strings on Python 2, in
+        order to be compatible with the normal stdout/stderr objects.
+
+        On Python 3, the prints are left unmodified.
+        """
+        unicodeString = u"Hello, \N{VULGAR FRACTION ONE HALF} world."
+        stdio = log.StdioOnnaStick(encoding="utf-8")
+        self.assertEqual(stdio.encoding, "utf-8")
+        stdio.write(unicodeString + u"\n")
+        stdio.writelines([u"Also, " + unicodeString])
+        oldStdout = sys.stdout
+        sys.stdout = stdio
+        self.addCleanup(setattr, sys, "stdout", oldStdout)
+        # This should go to the log, utf-8 encoded too:
+        print(unicodeString)
+        if _PY3:
+            self.assertEqual(self.getLogMessages(),
+                             [unicodeString,
+                              u"Also, " + unicodeString,
+                              unicodeString])
+        else:
+            self.assertEqual(self.getLogMessages(),
+                             [unicodeString.encode("utf-8"),
+                              (u"Also, " + unicodeString).encode("utf-8"),
+                              unicodeString.encode("utf-8")])
diff --git a/ThirdParty/Twisted/twisted/test/test_logfile.py b/ThirdParty/Twisted/twisted/test/test_logfile.py
new file mode 100644
index 0000000..e7db238
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_logfile.py
@@ -0,0 +1,320 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import os, time, stat, errno
+
+from twisted.trial import unittest
+from twisted.python import logfile, runtime
+
+
+class LogFileTestCase(unittest.TestCase):
+    """
+    Test the rotating log file.
+    """
+
+    def setUp(self):
+        self.dir = self.mktemp()
+        os.makedirs(self.dir)
+        self.name = "test.log"
+        self.path = os.path.join(self.dir, self.name)
+
+
+    def tearDown(self):
+        """
+        Restore back write rights on created paths: if tests modified the
+        rights, that will allow the paths to be removed easily afterwards.
+        """
+        os.chmod(self.dir, 0777)
+        if os.path.exists(self.path):
+            os.chmod(self.path, 0777)
+
+
+    def testWriting(self):
+        log = logfile.LogFile(self.name, self.dir)
+        log.write("123")
+        log.write("456")
+        log.flush()
+        log.write("7890")
+        log.close()
+
+        f = open(self.path, "r")
+        self.assertEqual(f.read(), "1234567890")
+        f.close()
+
+    def testRotation(self):
+        # this logfile should rotate every 10 bytes
+        log = logfile.LogFile(self.name, self.dir, rotateLength=10)
+
+        # test automatic rotation
+        log.write("123")
+        log.write("4567890")
+        log.write("1" * 11)
+        self.assert_(os.path.exists("%s.1" % self.path))
+        self.assert_(not os.path.exists("%s.2" % self.path))
+        log.write('')
+        self.assert_(os.path.exists("%s.1" % self.path))
+        self.assert_(os.path.exists("%s.2" % self.path))
+        self.assert_(not os.path.exists("%s.3" % self.path))
+        log.write("3")
+        self.assert_(not os.path.exists("%s.3" % self.path))
+
+        # test manual rotation
+        log.rotate()
+        self.assert_(os.path.exists("%s.3" % self.path))
+        self.assert_(not os.path.exists("%s.4" % self.path))
+        log.close()
+
+        self.assertEqual(log.listLogs(), [1, 2, 3])
+
+    def testAppend(self):
+        log = logfile.LogFile(self.name, self.dir)
+        log.write("0123456789")
+        log.close()
+
+        log = logfile.LogFile(self.name, self.dir)
+        self.assertEqual(log.size, 10)
+        self.assertEqual(log._file.tell(), log.size)
+        log.write("abc")
+        self.assertEqual(log.size, 13)
+        self.assertEqual(log._file.tell(), log.size)
+        f = log._file
+        f.seek(0, 0)
+        self.assertEqual(f.read(), "0123456789abc")
+        log.close()
+
+    def testLogReader(self):
+        log = logfile.LogFile(self.name, self.dir)
+        log.write("abc\n")
+        log.write("def\n")
+        log.rotate()
+        log.write("ghi\n")
+        log.flush()
+
+        # check reading logs
+        self.assertEqual(log.listLogs(), [1])
+        reader = log.getCurrentLog()
+        reader._file.seek(0)
+        self.assertEqual(reader.readLines(), ["ghi\n"])
+        self.assertEqual(reader.readLines(), [])
+        reader.close()
+        reader = log.getLog(1)
+        self.assertEqual(reader.readLines(), ["abc\n", "def\n"])
+        self.assertEqual(reader.readLines(), [])
+        reader.close()
+
+        # check getting illegal log readers
+        self.assertRaises(ValueError, log.getLog, 2)
+        self.assertRaises(TypeError, log.getLog, "1")
+
+        # check that log numbers are higher for older logs
+        log.rotate()
+        self.assertEqual(log.listLogs(), [1, 2])
+        reader = log.getLog(1)
+        reader._file.seek(0)
+        self.assertEqual(reader.readLines(), ["ghi\n"])
+        self.assertEqual(reader.readLines(), [])
+        reader.close()
+        reader = log.getLog(2)
+        self.assertEqual(reader.readLines(), ["abc\n", "def\n"])
+        self.assertEqual(reader.readLines(), [])
+        reader.close()
+
+    def testModePreservation(self):
+        """
+        Check rotated files have same permissions as original.
+        """
+        f = open(self.path, "w").close()
+        os.chmod(self.path, 0707)
+        mode = os.stat(self.path)[stat.ST_MODE]
+        log = logfile.LogFile(self.name, self.dir)
+        log.write("abc")
+        log.rotate()
+        self.assertEqual(mode, os.stat(self.path)[stat.ST_MODE])
+
+
+    def test_noPermission(self):
+        """
+        Check it keeps working when permission on dir changes.
+        """
+        log = logfile.LogFile(self.name, self.dir)
+        log.write("abc")
+
+        # change permissions so rotation would fail
+        os.chmod(self.dir, 0555)
+
+        # if this succeeds, chmod doesn't restrict us, so we can't
+        # do the test
+        try:
+            f = open(os.path.join(self.dir,"xxx"), "w")
+        except (OSError, IOError):
+            pass
+        else:
+            f.close()
+            return
+
+        log.rotate() # this should not fail
+
+        log.write("def")
+        log.flush()
+
+        f = log._file
+        self.assertEqual(f.tell(), 6)
+        f.seek(0, 0)
+        self.assertEqual(f.read(), "abcdef")
+        log.close()
+
+
+    def test_maxNumberOfLog(self):
+        """
+        Test it respect the limit on the number of files when maxRotatedFiles
+        is not None.
+        """
+        log = logfile.LogFile(self.name, self.dir, rotateLength=10,
+                              maxRotatedFiles=3)
+        log.write("1" * 11)
+        log.write("2" * 11)
+        self.failUnless(os.path.exists("%s.1" % self.path))
+
+        log.write("3" * 11)
+        self.failUnless(os.path.exists("%s.2" % self.path))
+
+        log.write("4" * 11)
+        self.failUnless(os.path.exists("%s.3" % self.path))
+        self.assertEqual(file("%s.3" % self.path).read(), "1" * 11)
+
+        log.write("5" * 11)
+        self.assertEqual(file("%s.3" % self.path).read(), "2" * 11)
+        self.failUnless(not os.path.exists("%s.4" % self.path))
+
+    def test_fromFullPath(self):
+        """
+        Test the fromFullPath method.
+        """
+        log1 = logfile.LogFile(self.name, self.dir, 10, defaultMode=0777)
+        log2 = logfile.LogFile.fromFullPath(self.path, 10, defaultMode=0777)
+        self.assertEqual(log1.name, log2.name)
+        self.assertEqual(os.path.abspath(log1.path), log2.path)
+        self.assertEqual(log1.rotateLength, log2.rotateLength)
+        self.assertEqual(log1.defaultMode, log2.defaultMode)
+
+    def test_defaultPermissions(self):
+        """
+        Test the default permission of the log file: if the file exist, it
+        should keep the permission.
+        """
+        f = file(self.path, "w")
+        os.chmod(self.path, 0707)
+        currentMode = stat.S_IMODE(os.stat(self.path)[stat.ST_MODE])
+        f.close()
+        log1 = logfile.LogFile(self.name, self.dir)
+        self.assertEqual(stat.S_IMODE(os.stat(self.path)[stat.ST_MODE]),
+                          currentMode)
+
+
+    def test_specifiedPermissions(self):
+        """
+        Test specifying the permissions used on the log file.
+        """
+        log1 = logfile.LogFile(self.name, self.dir, defaultMode=0066)
+        mode = stat.S_IMODE(os.stat(self.path)[stat.ST_MODE])
+        if runtime.platform.isWindows():
+            # The only thing we can get here is global read-only
+            self.assertEqual(mode, 0444)
+        else:
+            self.assertEqual(mode, 0066)
+
+
+    def test_reopen(self):
+        """
+        L{logfile.LogFile.reopen} allows to rename the currently used file and
+        make L{logfile.LogFile} create a new file.
+        """
+        log1 = logfile.LogFile(self.name, self.dir)
+        log1.write("hello1")
+        savePath = os.path.join(self.dir, "save.log")
+        os.rename(self.path, savePath)
+        log1.reopen()
+        log1.write("hello2")
+        log1.close()
+
+        f = open(self.path, "r")
+        self.assertEqual(f.read(), "hello2")
+        f.close()
+        f = open(savePath, "r")
+        self.assertEqual(f.read(), "hello1")
+        f.close()
+
+    if runtime.platform.isWindows():
+        test_reopen.skip = "Can't test reopen on Windows"
+
+
+    def test_nonExistentDir(self):
+        """
+        Specifying an invalid directory to L{LogFile} raises C{IOError}.
+        """
+        e = self.assertRaises(
+            IOError, logfile.LogFile, self.name, 'this_dir_does_not_exist')
+        self.assertEqual(e.errno, errno.ENOENT)
+
+
+
+class RiggedDailyLogFile(logfile.DailyLogFile):
+    _clock = 0.0
+
+    def _openFile(self):
+        logfile.DailyLogFile._openFile(self)
+        # rig the date to match _clock, not mtime
+        self.lastDate = self.toDate()
+
+    def toDate(self, *args):
+        if args:
+            return time.gmtime(*args)[:3]
+        return time.gmtime(self._clock)[:3]
+
+class DailyLogFileTestCase(unittest.TestCase):
+    """
+    Test rotating log file.
+    """
+
+    def setUp(self):
+        self.dir = self.mktemp()
+        os.makedirs(self.dir)
+        self.name = "testdaily.log"
+        self.path = os.path.join(self.dir, self.name)
+
+
+    def testWriting(self):
+        log = RiggedDailyLogFile(self.name, self.dir)
+        log.write("123")
+        log.write("456")
+        log.flush()
+        log.write("7890")
+        log.close()
+
+        f = open(self.path, "r")
+        self.assertEqual(f.read(), "1234567890")
+        f.close()
+
+    def testRotation(self):
+        # this logfile should rotate every 10 bytes
+        log = RiggedDailyLogFile(self.name, self.dir)
+        days = [(self.path + '.' + log.suffix(day * 86400)) for day in range(3)]
+
+        # test automatic rotation
+        log._clock = 0.0    # 1970/01/01 00:00.00
+        log.write("123")
+        log._clock = 43200  # 1970/01/01 12:00.00
+        log.write("4567890")
+        log._clock = 86400  # 1970/01/02 00:00.00
+        log.write("1" * 11)
+        self.assert_(os.path.exists(days[0]))
+        self.assert_(not os.path.exists(days[1]))
+        log._clock = 172800 # 1970/01/03 00:00.00
+        log.write('')
+        self.assert_(os.path.exists(days[0]))
+        self.assert_(os.path.exists(days[1]))
+        self.assert_(not os.path.exists(days[2]))
+        log._clock = 259199 # 1970/01/03 23:59.59
+        log.write("3")
+        self.assert_(not os.path.exists(days[2]))
+
diff --git a/ThirdParty/Twisted/twisted/test/test_loopback.py b/ThirdParty/Twisted/twisted/test/test_loopback.py
new file mode 100644
index 0000000..21a93b5
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_loopback.py
@@ -0,0 +1,431 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test case for L{twisted.protocols.loopback}.
+"""
+
+from __future__ import division, absolute_import
+
+from zope.interface import implementer
+
+from twisted.python.compat import _PY3, intToBytes
+from twisted.trial import unittest
+from twisted.trial.util import suppress as SUPPRESS
+from twisted.protocols import basic, loopback
+from twisted.internet import defer
+from twisted.internet.protocol import Protocol
+from twisted.internet.defer import Deferred
+from twisted.internet.interfaces import IAddress, IPushProducer, IPullProducer
+from twisted.internet import reactor, interfaces
+
+
+class SimpleProtocol(basic.LineReceiver):
+    def __init__(self):
+        self.conn = defer.Deferred()
+        self.lines = []
+        self.connLost = []
+
+    def connectionMade(self):
+        self.conn.callback(None)
+
+    def lineReceived(self, line):
+        self.lines.append(line)
+
+    def connectionLost(self, reason):
+        self.connLost.append(reason)
+
+
+class DoomProtocol(SimpleProtocol):
+    i = 0
+    def lineReceived(self, line):
+        self.i += 1
+        if self.i < 4:
+            # by this point we should have connection closed,
+            # but just in case we didn't we won't ever send 'Hello 4'
+            self.sendLine(b"Hello " + intToBytes(self.i))
+        SimpleProtocol.lineReceived(self, line)
+        if self.lines[-1] == b"Hello 3":
+            self.transport.loseConnection()
+
+
+class LoopbackTestCaseMixin:
+    def testRegularFunction(self):
+        s = SimpleProtocol()
+        c = SimpleProtocol()
+
+        def sendALine(result):
+            s.sendLine(b"THIS IS LINE ONE!")
+            s.transport.loseConnection()
+        s.conn.addCallback(sendALine)
+
+        def check(ignored):
+            self.assertEqual(c.lines, [b"THIS IS LINE ONE!"])
+            self.assertEqual(len(s.connLost), 1)
+            self.assertEqual(len(c.connLost), 1)
+        d = defer.maybeDeferred(self.loopbackFunc, s, c)
+        d.addCallback(check)
+        return d
+
+    def testSneakyHiddenDoom(self):
+        s = DoomProtocol()
+        c = DoomProtocol()
+
+        def sendALine(result):
+            s.sendLine(b"DOOM LINE")
+        s.conn.addCallback(sendALine)
+
+        def check(ignored):
+            self.assertEqual(s.lines, [b'Hello 1', b'Hello 2', b'Hello 3'])
+            self.assertEqual(
+                c.lines, [b'DOOM LINE', b'Hello 1', b'Hello 2', b'Hello 3'])
+            self.assertEqual(len(s.connLost), 1)
+            self.assertEqual(len(c.connLost), 1)
+        d = defer.maybeDeferred(self.loopbackFunc, s, c)
+        d.addCallback(check)
+        return d
+
+
+
+class LoopbackAsyncTestCase(LoopbackTestCaseMixin, unittest.TestCase):
+    loopbackFunc = staticmethod(loopback.loopbackAsync)
+
+
+    def test_makeConnection(self):
+        """
+        Test that the client and server protocol both have makeConnection
+        invoked on them by loopbackAsync.
+        """
+        class TestProtocol(Protocol):
+            transport = None
+            def makeConnection(self, transport):
+                self.transport = transport
+
+        server = TestProtocol()
+        client = TestProtocol()
+        loopback.loopbackAsync(server, client)
+        self.failIfEqual(client.transport, None)
+        self.failIfEqual(server.transport, None)
+
+
+    def _hostpeertest(self, get, testServer):
+        """
+        Test one of the permutations of client/server host/peer.
+        """
+        class TestProtocol(Protocol):
+            def makeConnection(self, transport):
+                Protocol.makeConnection(self, transport)
+                self.onConnection.callback(transport)
+
+        if testServer:
+            server = TestProtocol()
+            d = server.onConnection = Deferred()
+            client = Protocol()
+        else:
+            server = Protocol()
+            client = TestProtocol()
+            d = client.onConnection = Deferred()
+
+        loopback.loopbackAsync(server, client)
+
+        def connected(transport):
+            host = getattr(transport, get)()
+            self.failUnless(IAddress.providedBy(host))
+
+        return d.addCallback(connected)
+
+
+    def test_serverHost(self):
+        """
+        Test that the server gets a transport with a properly functioning
+        implementation of L{ITransport.getHost}.
+        """
+        return self._hostpeertest("getHost", True)
+
+
+    def test_serverPeer(self):
+        """
+        Like C{test_serverHost} but for L{ITransport.getPeer}
+        """
+        return self._hostpeertest("getPeer", True)
+
+
+    def test_clientHost(self, get="getHost"):
+        """
+        Test that the client gets a transport with a properly functioning
+        implementation of L{ITransport.getHost}.
+        """
+        return self._hostpeertest("getHost", False)
+
+
+    def test_clientPeer(self):
+        """
+        Like C{test_clientHost} but for L{ITransport.getPeer}.
+        """
+        return self._hostpeertest("getPeer", False)
+
+
+    def _greetingtest(self, write, testServer):
+        """
+        Test one of the permutations of write/writeSequence client/server.
+
+        @param write: The name of the method to test, C{"write"} or
+            C{"writeSequence"}.
+        """
+        class GreeteeProtocol(Protocol):
+            bytes = b""
+            def dataReceived(self, bytes):
+                self.bytes += bytes
+                if self.bytes == b"bytes":
+                    self.received.callback(None)
+
+        class GreeterProtocol(Protocol):
+            def connectionMade(self):
+                if write == "write":
+                    self.transport.write(b"bytes")
+                else:
+                    self.transport.writeSequence([b"byt", b"es"])
+
+        if testServer:
+            server = GreeterProtocol()
+            client = GreeteeProtocol()
+            d = client.received = Deferred()
+        else:
+            server = GreeteeProtocol()
+            d = server.received = Deferred()
+            client = GreeterProtocol()
+
+        loopback.loopbackAsync(server, client)
+        return d
+
+
+    def test_clientGreeting(self):
+        """
+        Test that on a connection where the client speaks first, the server
+        receives the bytes sent by the client.
+        """
+        return self._greetingtest("write", False)
+
+
+    def test_clientGreetingSequence(self):
+        """
+        Like C{test_clientGreeting}, but use C{writeSequence} instead of
+        C{write} to issue the greeting.
+        """
+        return self._greetingtest("writeSequence", False)
+
+
+    def test_serverGreeting(self, write="write"):
+        """
+        Test that on a connection where the server speaks first, the client
+        receives the bytes sent by the server.
+        """
+        return self._greetingtest("write", True)
+
+
+    def test_serverGreetingSequence(self):
+        """
+        Like C{test_serverGreeting}, but use C{writeSequence} instead of
+        C{write} to issue the greeting.
+        """
+        return self._greetingtest("writeSequence", True)
+
+
+    def _producertest(self, producerClass):
+        toProduce = list(map(intToBytes, range(0, 10)))
+
+        class ProducingProtocol(Protocol):
+            def connectionMade(self):
+                self.producer = producerClass(list(toProduce))
+                self.producer.start(self.transport)
+
+        class ReceivingProtocol(Protocol):
+            bytes = b""
+            def dataReceived(self, data):
+                self.bytes += data
+                if self.bytes == b''.join(toProduce):
+                    self.received.callback((client, server))
+
+        server = ProducingProtocol()
+        client = ReceivingProtocol()
+        client.received = Deferred()
+
+        loopback.loopbackAsync(server, client)
+        return client.received
+
+
+    def test_pushProducer(self):
+        """
+        Test a push producer registered against a loopback transport.
+        """
+        @implementer(IPushProducer)
+        class PushProducer(object):
+            resumed = False
+
+            def __init__(self, toProduce):
+                self.toProduce = toProduce
+
+            def resumeProducing(self):
+                self.resumed = True
+
+            def start(self, consumer):
+                self.consumer = consumer
+                consumer.registerProducer(self, True)
+                self._produceAndSchedule()
+
+            def _produceAndSchedule(self):
+                if self.toProduce:
+                    self.consumer.write(self.toProduce.pop(0))
+                    reactor.callLater(0, self._produceAndSchedule)
+                else:
+                    self.consumer.unregisterProducer()
+        d = self._producertest(PushProducer)
+
+        def finished(results):
+            (client, server) = results
+            self.assertFalse(
+                server.producer.resumed,
+                "Streaming producer should not have been resumed.")
+        d.addCallback(finished)
+        return d
+
+
+    def test_pullProducer(self):
+        """
+        Test a pull producer registered against a loopback transport.
+        """
+        @implementer(IPullProducer)
+        class PullProducer(object):
+            def __init__(self, toProduce):
+                self.toProduce = toProduce
+
+            def start(self, consumer):
+                self.consumer = consumer
+                self.consumer.registerProducer(self, False)
+
+            def resumeProducing(self):
+                self.consumer.write(self.toProduce.pop(0))
+                if not self.toProduce:
+                    self.consumer.unregisterProducer()
+        return self._producertest(PullProducer)
+
+
+    def test_writeNotReentrant(self):
+        """
+        L{loopback.loopbackAsync} does not call a protocol's C{dataReceived}
+        method while that protocol's transport's C{write} method is higher up
+        on the stack.
+        """
+        class Server(Protocol):
+            def dataReceived(self, bytes):
+                self.transport.write(b"bytes")
+
+        class Client(Protocol):
+            ready = False
+
+            def connectionMade(self):
+                reactor.callLater(0, self.go)
+
+            def go(self):
+                self.transport.write(b"foo")
+                self.ready = True
+
+            def dataReceived(self, bytes):
+                self.wasReady = self.ready
+                self.transport.loseConnection()
+
+
+        server = Server()
+        client = Client()
+        d = loopback.loopbackAsync(client, server)
+        def cbFinished(ignored):
+            self.assertTrue(client.wasReady)
+        d.addCallback(cbFinished)
+        return d
+
+
+    def test_pumpPolicy(self):
+        """
+        The callable passed as the value for the C{pumpPolicy} parameter to
+        L{loopbackAsync} is called with a L{_LoopbackQueue} of pending bytes
+        and a protocol to which they should be delivered.
+        """
+        pumpCalls = []
+        def dummyPolicy(queue, target):
+            bytes = []
+            while queue:
+                bytes.append(queue.get())
+            pumpCalls.append((target, bytes))
+
+        client = Protocol()
+        server = Protocol()
+
+        finished = loopback.loopbackAsync(server, client, dummyPolicy)
+        self.assertEqual(pumpCalls, [])
+
+        client.transport.write(b"foo")
+        client.transport.write(b"bar")
+        server.transport.write(b"baz")
+        server.transport.write(b"quux")
+        server.transport.loseConnection()
+
+        def cbComplete(ignored):
+            self.assertEqual(
+                pumpCalls,
+                # The order here is somewhat arbitrary.  The implementation
+                # happens to always deliver data to the client first.
+                [(client, [b"baz", b"quux", None]),
+                 (server, [b"foo", b"bar"])])
+        finished.addCallback(cbComplete)
+        return finished
+
+
+    def test_identityPumpPolicy(self):
+        """
+        L{identityPumpPolicy} is a pump policy which calls the target's
+        C{dataReceived} method one for each string in the queue passed to it.
+        """
+        bytes = []
+        client = Protocol()
+        client.dataReceived = bytes.append
+        queue = loopback._LoopbackQueue()
+        queue.put(b"foo")
+        queue.put(b"bar")
+        queue.put(None)
+
+        loopback.identityPumpPolicy(queue, client)
+
+        self.assertEqual(bytes, [b"foo", b"bar"])
+
+
+    def test_collapsingPumpPolicy(self):
+        """
+        L{collapsingPumpPolicy} is a pump policy which calls the target's
+        C{dataReceived} only once with all of the strings in the queue passed
+        to it joined together.
+        """
+        bytes = []
+        client = Protocol()
+        client.dataReceived = bytes.append
+        queue = loopback._LoopbackQueue()
+        queue.put(b"foo")
+        queue.put(b"bar")
+        queue.put(None)
+
+        loopback.collapsingPumpPolicy(queue, client)
+
+        self.assertEqual(bytes, [b"foobar"])
+
+
+
+class LoopbackTCPTestCase(LoopbackTestCaseMixin, unittest.TestCase):
+    loopbackFunc = staticmethod(loopback.loopbackTCP)
+
+
+class LoopbackUNIXTestCase(LoopbackTestCaseMixin, unittest.TestCase):
+    loopbackFunc = staticmethod(loopback.loopbackUNIX)
+
+    if interfaces.IReactorUNIX(reactor, None) is None:
+        skip = "Current reactor does not support UNIX sockets"
+    elif _PY3:
+        skip = "UNIX sockets not supported on Python 3.  See #6136"
diff --git a/ThirdParty/Twisted/twisted/test/test_manhole.py b/ThirdParty/Twisted/twisted/test/test_manhole.py
new file mode 100644
index 0000000..fa7d0c7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_manhole.py
@@ -0,0 +1,75 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+from twisted.trial import unittest
+from twisted.manhole import service
+from twisted.spread.util import LocalAsRemote
+
+class Dummy:
+    pass
+
+class DummyTransport:
+    def getHost(self):
+        return 'INET', '127.0.0.1', 0
+
+class DummyManholeClient(LocalAsRemote):
+    zero = 0
+    broker = Dummy()
+    broker.transport = DummyTransport()
+
+    def __init__(self):
+        self.messages = []
+
+    def console(self, messages):
+        self.messages.extend(messages)
+
+    def receiveExplorer(self, xplorer):
+        pass
+
+    def setZero(self):
+        self.zero = len(self.messages)
+
+    def getMessages(self):
+        return self.messages[self.zero:]
+
+    # local interface
+    sync_console = console
+    sync_receiveExplorer = receiveExplorer
+    sync_setZero = setZero
+    sync_getMessages = getMessages
+
+class ManholeTest(unittest.TestCase):
+    """Various tests for the manhole service.
+
+    Both the the importIdentity and importMain tests are known to fail
+    when the __name__ in the manhole namespace is set to certain
+    values.
+    """
+    def setUp(self):
+        self.service = service.Service()
+        self.p = service.Perspective(self.service)
+        self.client = DummyManholeClient()
+        self.p.attached(self.client, None)
+
+    def test_importIdentity(self):
+        """Making sure imported module is the same as one previously loaded.
+        """
+        self.p.perspective_do("from twisted.manhole import service")
+        self.client.setZero()
+        self.p.perspective_do("int(service is sys.modules['twisted.manhole.service'])")
+        msg = self.client.getMessages()[0]
+        self.assertEqual(msg, ('result',"1\n"))
+
+    def test_importMain(self):
+        """Trying to import __main__"""
+        self.client.setZero()
+        self.p.perspective_do("import __main__")
+        if self.client.getMessages():
+            msg = self.client.getMessages()[0]
+            if msg[0] in ("exception","stderr"):
+                self.fail(msg[1])
+
+#if __name__=='__main__':
+#    unittest.main()
diff --git a/ThirdParty/Twisted/twisted/test/test_memcache.py b/ThirdParty/Twisted/twisted/test/test_memcache.py
new file mode 100644
index 0000000..7c25e98
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_memcache.py
@@ -0,0 +1,663 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test the memcache client protocol.
+"""
+
+from twisted.internet.error import ConnectionDone
+
+from twisted.protocols.memcache import MemCacheProtocol, NoSuchCommand
+from twisted.protocols.memcache import ClientError, ServerError
+
+from twisted.trial.unittest import TestCase
+from twisted.test.proto_helpers import StringTransportWithDisconnection
+from twisted.internet.task import Clock
+from twisted.internet.defer import Deferred, gatherResults, TimeoutError
+from twisted.internet.defer import DeferredList
+
+
+
+class CommandMixin:
+    """
+    Setup and tests for basic invocation of L{MemCacheProtocol} commands.
+    """
+
+    def _test(self, d, send, recv, result):
+        """
+        Helper test method to test the resulting C{Deferred} of a
+        L{MemCacheProtocol} command.
+        """
+        raise NotImplementedError()
+
+
+    def test_get(self):
+        """
+        L{MemCacheProtocol.get} returns a L{Deferred} which is called back with
+        the value and the flag associated with the given key if the server
+        returns a successful result.
+        """
+        return self._test(self.proto.get("foo"), "get foo\r\n",
+            "VALUE foo 0 3\r\nbar\r\nEND\r\n", (0, "bar"))
+
+
+    def test_emptyGet(self):
+        """
+        Test getting a non-available key: it succeeds but return C{None} as
+        value and C{0} as flag.
+        """
+        return self._test(self.proto.get("foo"), "get foo\r\n",
+            "END\r\n", (0, None))
+
+
+    def test_getMultiple(self):
+        """
+        L{MemCacheProtocol.getMultiple} returns a L{Deferred} which is called
+        back with a dictionary of flag, value for each given key.
+        """
+        return self._test(self.proto.getMultiple(['foo', 'cow']),
+            "get foo cow\r\n",
+            "VALUE foo 0 3\r\nbar\r\nVALUE cow 0 7\r\nchicken\r\nEND\r\n",
+            {'cow': (0, 'chicken'), 'foo': (0, 'bar')})
+
+
+    def test_getMultipleWithEmpty(self):
+        """
+        When L{MemCacheProtocol.getMultiple} is called with non-available keys,
+        the corresponding tuples are (0, None).
+        """
+        return self._test(self.proto.getMultiple(['foo', 'cow']),
+            "get foo cow\r\n",
+            "VALUE cow 1 3\r\nbar\r\nEND\r\n",
+            {'cow': (1, 'bar'), 'foo': (0, None)})
+
+
+    def test_set(self):
+        """
+        L{MemCacheProtocol.set} returns a L{Deferred} which is called back with
+        C{True} when the operation succeeds.
+        """
+        return self._test(self.proto.set("foo", "bar"),
+            "set foo 0 0 3\r\nbar\r\n", "STORED\r\n", True)
+
+
+    def test_add(self):
+        """
+        L{MemCacheProtocol.add} returns a L{Deferred} which is called back with
+        C{True} when the operation succeeds.
+        """
+        return self._test(self.proto.add("foo", "bar"),
+            "add foo 0 0 3\r\nbar\r\n", "STORED\r\n", True)
+
+
+    def test_replace(self):
+        """
+        L{MemCacheProtocol.replace} returns a L{Deferred} which is called back
+        with C{True} when the operation succeeds.
+        """
+        return self._test(self.proto.replace("foo", "bar"),
+            "replace foo 0 0 3\r\nbar\r\n", "STORED\r\n", True)
+
+
+    def test_errorAdd(self):
+        """
+        Test an erroneous add: if a L{MemCacheProtocol.add} is called but the
+        key already exists on the server, it returns a B{NOT STORED} answer,
+        which calls back the resulting L{Deferred} with C{False}.
+        """
+        return self._test(self.proto.add("foo", "bar"),
+            "add foo 0 0 3\r\nbar\r\n", "NOT STORED\r\n", False)
+
+
+    def test_errorReplace(self):
+        """
+        Test an erroneous replace: if a L{MemCacheProtocol.replace} is called
+        but the key doesn't exist on the server, it returns a B{NOT STORED}
+        answer, which calls back the resulting L{Deferred} with C{False}.
+        """
+        return self._test(self.proto.replace("foo", "bar"),
+            "replace foo 0 0 3\r\nbar\r\n", "NOT STORED\r\n", False)
+
+
+    def test_delete(self):
+        """
+        L{MemCacheProtocol.delete} returns a L{Deferred} which is called back
+        with C{True} when the server notifies a success.
+        """
+        return self._test(self.proto.delete("bar"), "delete bar\r\n",
+            "DELETED\r\n", True)
+
+
+    def test_errorDelete(self):
+        """
+        Test a error during a delete: if key doesn't exist on the server, it
+        returns a B{NOT FOUND} answer which calls back the resulting L{Deferred}
+        with C{False}.
+        """
+        return self._test(self.proto.delete("bar"), "delete bar\r\n",
+            "NOT FOUND\r\n", False)
+
+
+    def test_increment(self):
+        """
+        Test incrementing a variable: L{MemCacheProtocol.increment} returns a
+        L{Deferred} which is called back with the incremented value of the
+        given key.
+        """
+        return self._test(self.proto.increment("foo"), "incr foo 1\r\n",
+            "4\r\n", 4)
+
+
+    def test_decrement(self):
+        """
+        Test decrementing a variable: L{MemCacheProtocol.decrement} returns a
+        L{Deferred} which is called back with the decremented value of the
+        given key.
+        """
+        return self._test(
+            self.proto.decrement("foo"), "decr foo 1\r\n", "5\r\n", 5)
+
+
+    def test_incrementVal(self):
+        """
+        L{MemCacheProtocol.increment} takes an optional argument C{value} which
+        replaces the default value of 1 when specified.
+        """
+        return self._test(self.proto.increment("foo", 8), "incr foo 8\r\n",
+            "4\r\n", 4)
+
+
+    def test_decrementVal(self):
+        """
+        L{MemCacheProtocol.decrement} takes an optional argument C{value} which
+        replaces the default value of 1 when specified.
+        """
+        return self._test(self.proto.decrement("foo", 3), "decr foo 3\r\n",
+            "5\r\n", 5)
+
+
+    def test_stats(self):
+        """
+        Test retrieving server statistics via the L{MemCacheProtocol.stats}
+        command: it parses the data sent by the server and calls back the
+        resulting L{Deferred} with a dictionary of the received statistics.
+        """
+        return self._test(self.proto.stats(), "stats\r\n",
+            "STAT foo bar\r\nSTAT egg spam\r\nEND\r\n",
+            {"foo": "bar", "egg": "spam"})
+
+
+    def test_statsWithArgument(self):
+        """
+        L{MemCacheProtocol.stats} takes an optional C{str} argument which,
+        if specified, is sent along with the I{STAT} command.  The I{STAT}
+        responses from the server are parsed as key/value pairs and returned
+        as a C{dict} (as in the case where the argument is not specified).
+        """
+        return self._test(self.proto.stats("blah"), "stats blah\r\n",
+            "STAT foo bar\r\nSTAT egg spam\r\nEND\r\n",
+            {"foo": "bar", "egg": "spam"})
+
+
+    def test_version(self):
+        """
+        Test version retrieval via the L{MemCacheProtocol.version} command: it
+        returns a L{Deferred} which is called back with the version sent by the
+        server.
+        """
+        return self._test(self.proto.version(), "version\r\n",
+            "VERSION 1.1\r\n", "1.1")
+
+
+    def test_flushAll(self):
+        """
+        L{MemCacheProtocol.flushAll} returns a L{Deferred} which is called back
+        with C{True} if the server acknowledges success.
+        """
+        return self._test(self.proto.flushAll(), "flush_all\r\n",
+            "OK\r\n", True)
+
+
+
+class MemCacheTestCase(CommandMixin, TestCase):
+    """
+    Test client protocol class L{MemCacheProtocol}.
+    """
+
+    def setUp(self):
+        """
+        Create a memcache client, connect it to a string protocol, and make it
+        use a deterministic clock.
+        """
+        self.proto = MemCacheProtocol()
+        self.clock = Clock()
+        self.proto.callLater = self.clock.callLater
+        self.transport = StringTransportWithDisconnection()
+        self.transport.protocol = self.proto
+        self.proto.makeConnection(self.transport)
+
+
+    def _test(self, d, send, recv, result):
+        """
+        Implementation of C{_test} which checks that the command sends C{send}
+        data, and that upon reception of C{recv} the result is C{result}.
+
+        @param d: the resulting deferred from the memcache command.
+        @type d: C{Deferred}
+
+        @param send: the expected data to be sent.
+        @type send: C{str}
+
+        @param recv: the data to simulate as reception.
+        @type recv: C{str}
+
+        @param result: the expected result.
+        @type result: C{any}
+        """
+        def cb(res):
+            self.assertEqual(res, result)
+        self.assertEqual(self.transport.value(), send)
+        d.addCallback(cb)
+        self.proto.dataReceived(recv)
+        return d
+
+
+    def test_invalidGetResponse(self):
+        """
+        If the value returned doesn't match the expected key of the current
+        C{get} command, an error is raised in L{MemCacheProtocol.dataReceived}.
+        """
+        self.proto.get("foo")
+        s = "spamegg"
+        self.assertRaises(RuntimeError,
+            self.proto.dataReceived,
+            "VALUE bar 0 %s\r\n%s\r\nEND\r\n" % (len(s), s))
+
+
+    def test_invalidMultipleGetResponse(self):
+        """
+        If the value returned doesn't match one the expected keys of the
+        current multiple C{get} command, an error is raised error in
+        L{MemCacheProtocol.dataReceived}.
+        """
+        self.proto.getMultiple(["foo", "bar"])
+        s = "spamegg"
+        self.assertRaises(RuntimeError,
+            self.proto.dataReceived,
+            "VALUE egg 0 %s\r\n%s\r\nEND\r\n" % (len(s), s))
+
+
+    def test_timeOut(self):
+        """
+        Test the timeout on outgoing requests: when timeout is detected, all
+        current commands fail with a L{TimeoutError}, and the connection is
+        closed.
+        """
+        d1 = self.proto.get("foo")
+        d2 = self.proto.get("bar")
+        d3 = Deferred()
+        self.proto.connectionLost = d3.callback
+
+        self.clock.advance(self.proto.persistentTimeOut)
+        self.assertFailure(d1, TimeoutError)
+        self.assertFailure(d2, TimeoutError)
+        def checkMessage(error):
+            self.assertEqual(str(error), "Connection timeout")
+        d1.addCallback(checkMessage)
+        return gatherResults([d1, d2, d3])
+
+
+    def test_timeoutRemoved(self):
+        """
+        When a request gets a response, no pending timeout call remains around.
+        """
+        d = self.proto.get("foo")
+
+        self.clock.advance(self.proto.persistentTimeOut - 1)
+        self.proto.dataReceived("VALUE foo 0 3\r\nbar\r\nEND\r\n")
+
+        def check(result):
+            self.assertEqual(result, (0, "bar"))
+            self.assertEqual(len(self.clock.calls), 0)
+        d.addCallback(check)
+        return d
+
+
+    def test_timeOutRaw(self):
+        """
+        Test the timeout when raw mode was started: the timeout is not reset
+        until all the data has been received, so we can have a L{TimeoutError}
+        when waiting for raw data.
+        """
+        d1 = self.proto.get("foo")
+        d2 = Deferred()
+        self.proto.connectionLost = d2.callback
+
+        self.proto.dataReceived("VALUE foo 0 10\r\n12345")
+        self.clock.advance(self.proto.persistentTimeOut)
+        self.assertFailure(d1, TimeoutError)
+        return gatherResults([d1, d2])
+
+
+    def test_timeOutStat(self):
+        """
+        Test the timeout when stat command has started: the timeout is not
+        reset until the final B{END} is received.
+        """
+        d1 = self.proto.stats()
+        d2 = Deferred()
+        self.proto.connectionLost = d2.callback
+
+        self.proto.dataReceived("STAT foo bar\r\n")
+        self.clock.advance(self.proto.persistentTimeOut)
+        self.assertFailure(d1, TimeoutError)
+        return gatherResults([d1, d2])
+
+
+    def test_timeoutPipelining(self):
+        """
+        When two requests are sent, a timeout call remains around for the
+        second request, and its timeout time is correct.
+        """
+        d1 = self.proto.get("foo")
+        d2 = self.proto.get("bar")
+        d3 = Deferred()
+        self.proto.connectionLost = d3.callback
+
+        self.clock.advance(self.proto.persistentTimeOut - 1)
+        self.proto.dataReceived("VALUE foo 0 3\r\nbar\r\nEND\r\n")
+
+        def check(result):
+            self.assertEqual(result, (0, "bar"))
+            self.assertEqual(len(self.clock.calls), 1)
+            for i in range(self.proto.persistentTimeOut):
+                self.clock.advance(1)
+            return self.assertFailure(d2, TimeoutError).addCallback(checkTime)
+        def checkTime(ignored):
+            # Check that the timeout happened C{self.proto.persistentTimeOut}
+            # after the last response
+            self.assertEqual(
+                self.clock.seconds(), 2 * self.proto.persistentTimeOut - 1)
+        d1.addCallback(check)
+        return d1
+
+
+    def test_timeoutNotReset(self):
+        """
+        Check that timeout is not resetted for every command, but keep the
+        timeout from the first command without response.
+        """
+        d1 = self.proto.get("foo")
+        d3 = Deferred()
+        self.proto.connectionLost = d3.callback
+
+        self.clock.advance(self.proto.persistentTimeOut - 1)
+        d2 = self.proto.get("bar")
+        self.clock.advance(1)
+        self.assertFailure(d1, TimeoutError)
+        self.assertFailure(d2, TimeoutError)
+        return gatherResults([d1, d2, d3])
+
+
+    def test_timeoutCleanDeferreds(self):
+        """
+        C{timeoutConnection} cleans the list of commands that it fires with
+        C{TimeoutError}: C{connectionLost} doesn't try to fire them again, but
+        sets the disconnected state so that future commands fail with a
+        C{RuntimeError}.
+        """
+        d1 = self.proto.get("foo")
+        self.clock.advance(self.proto.persistentTimeOut)
+        self.assertFailure(d1, TimeoutError)
+        d2 = self.proto.get("bar")
+        self.assertFailure(d2, RuntimeError)
+        return gatherResults([d1, d2])
+
+
+    def test_connectionLost(self):
+        """
+        When disconnection occurs while commands are still outstanding, the
+        commands fail.
+        """
+        d1 = self.proto.get("foo")
+        d2 = self.proto.get("bar")
+        self.transport.loseConnection()
+        done = DeferredList([d1, d2], consumeErrors=True)
+        def checkFailures(results):
+            for success, result in results:
+                self.assertFalse(success)
+                result.trap(ConnectionDone)
+        return done.addCallback(checkFailures)
+
+
+    def test_tooLongKey(self):
+        """
+        An error is raised when trying to use a too long key: the called
+        command returns a L{Deferred} which fails with a L{ClientError}.
+        """
+        d1 = self.assertFailure(self.proto.set("a" * 500, "bar"), ClientError)
+        d2 = self.assertFailure(self.proto.increment("a" * 500), ClientError)
+        d3 = self.assertFailure(self.proto.get("a" * 500), ClientError)
+        d4 = self.assertFailure(
+            self.proto.append("a" * 500, "bar"), ClientError)
+        d5 = self.assertFailure(
+            self.proto.prepend("a" * 500, "bar"), ClientError)
+        d6 = self.assertFailure(
+            self.proto.getMultiple(["foo", "a" * 500]), ClientError)
+        return gatherResults([d1, d2, d3, d4, d5, d6])
+
+
+    def test_invalidCommand(self):
+        """
+        When an unknown command is sent directly (not through public API), the
+        server answers with an B{ERROR} token, and the command fails with
+        L{NoSuchCommand}.
+        """
+        d = self.proto._set("egg", "foo", "bar", 0, 0, "")
+        self.assertEqual(self.transport.value(), "egg foo 0 0 3\r\nbar\r\n")
+        self.assertFailure(d, NoSuchCommand)
+        self.proto.dataReceived("ERROR\r\n")
+        return d
+
+
+    def test_clientError(self):
+        """
+        Test the L{ClientError} error: when the server sends a B{CLIENT_ERROR}
+        token, the originating command fails with L{ClientError}, and the error
+        contains the text sent by the server.
+        """
+        a = "eggspamm"
+        d = self.proto.set("foo", a)
+        self.assertEqual(self.transport.value(),
+                          "set foo 0 0 8\r\neggspamm\r\n")
+        self.assertFailure(d, ClientError)
+        def check(err):
+            self.assertEqual(str(err), "We don't like egg and spam")
+        d.addCallback(check)
+        self.proto.dataReceived("CLIENT_ERROR We don't like egg and spam\r\n")
+        return d
+
+
+    def test_serverError(self):
+        """
+        Test the L{ServerError} error: when the server sends a B{SERVER_ERROR}
+        token, the originating command fails with L{ServerError}, and the error
+        contains the text sent by the server.
+        """
+        a = "eggspamm"
+        d = self.proto.set("foo", a)
+        self.assertEqual(self.transport.value(),
+                          "set foo 0 0 8\r\neggspamm\r\n")
+        self.assertFailure(d, ServerError)
+        def check(err):
+            self.assertEqual(str(err), "zomg")
+        d.addCallback(check)
+        self.proto.dataReceived("SERVER_ERROR zomg\r\n")
+        return d
+
+
+    def test_unicodeKey(self):
+        """
+        Using a non-string key as argument to commands raises an error.
+        """
+        d1 = self.assertFailure(self.proto.set(u"foo", "bar"), ClientError)
+        d2 = self.assertFailure(self.proto.increment(u"egg"), ClientError)
+        d3 = self.assertFailure(self.proto.get(1), ClientError)
+        d4 = self.assertFailure(self.proto.delete(u"bar"), ClientError)
+        d5 = self.assertFailure(self.proto.append(u"foo", "bar"), ClientError)
+        d6 = self.assertFailure(self.proto.prepend(u"foo", "bar"), ClientError)
+        d7 = self.assertFailure(
+            self.proto.getMultiple(["egg", 1]), ClientError)
+        return gatherResults([d1, d2, d3, d4, d5, d6, d7])
+
+
+    def test_unicodeValue(self):
+        """
+        Using a non-string value raises an error.
+        """
+        return self.assertFailure(self.proto.set("foo", u"bar"), ClientError)
+
+
+    def test_pipelining(self):
+        """
+        Multiple requests can be sent subsequently to the server, and the
+        protocol orders the responses correctly and dispatch to the
+        corresponding client command.
+        """
+        d1 = self.proto.get("foo")
+        d1.addCallback(self.assertEqual, (0, "bar"))
+        d2 = self.proto.set("bar", "spamspamspam")
+        d2.addCallback(self.assertEqual, True)
+        d3 = self.proto.get("egg")
+        d3.addCallback(self.assertEqual, (0, "spam"))
+        self.assertEqual(self.transport.value(),
+            "get foo\r\nset bar 0 0 12\r\nspamspamspam\r\nget egg\r\n")
+        self.proto.dataReceived("VALUE foo 0 3\r\nbar\r\nEND\r\n"
+                                "STORED\r\n"
+                                "VALUE egg 0 4\r\nspam\r\nEND\r\n")
+        return gatherResults([d1, d2, d3])
+
+
+    def test_getInChunks(self):
+        """
+        If the value retrieved by a C{get} arrive in chunks, the protocol
+        is able to reconstruct it and to produce the good value.
+        """
+        d = self.proto.get("foo")
+        d.addCallback(self.assertEqual, (0, "0123456789"))
+        self.assertEqual(self.transport.value(), "get foo\r\n")
+        self.proto.dataReceived("VALUE foo 0 10\r\n0123456")
+        self.proto.dataReceived("789")
+        self.proto.dataReceived("\r\nEND")
+        self.proto.dataReceived("\r\n")
+        return d
+
+
+    def test_append(self):
+        """
+        L{MemCacheProtocol.append} behaves like a L{MemCacheProtocol.set}
+        method: it returns a L{Deferred} which is called back with C{True} when
+        the operation succeeds.
+        """
+        return self._test(self.proto.append("foo", "bar"),
+            "append foo 0 0 3\r\nbar\r\n", "STORED\r\n", True)
+
+
+    def test_prepend(self):
+        """
+        L{MemCacheProtocol.prepend} behaves like a L{MemCacheProtocol.set}
+        method: it returns a L{Deferred} which is called back with C{True} when
+        the operation succeeds.
+        """
+        return self._test(self.proto.prepend("foo", "bar"),
+            "prepend foo 0 0 3\r\nbar\r\n", "STORED\r\n", True)
+
+
+    def test_gets(self):
+        """
+        L{MemCacheProtocol.get} handles an additional cas result when
+        C{withIdentifier} is C{True} and forward it in the resulting
+        L{Deferred}.
+        """
+        return self._test(self.proto.get("foo", True), "gets foo\r\n",
+            "VALUE foo 0 3 1234\r\nbar\r\nEND\r\n", (0, "1234", "bar"))
+
+
+    def test_emptyGets(self):
+        """
+        Test getting a non-available key with gets: it succeeds but return
+        C{None} as value, C{0} as flag and an empty cas value.
+        """
+        return self._test(self.proto.get("foo", True), "gets foo\r\n",
+            "END\r\n", (0, "", None))
+
+
+    def test_getsMultiple(self):
+        """
+        L{MemCacheProtocol.getMultiple} handles an additional cas field in the
+        returned tuples if C{withIdentifier} is C{True}.
+        """
+        return self._test(self.proto.getMultiple(["foo", "bar"], True),
+            "gets foo bar\r\n",
+            "VALUE foo 0 3 1234\r\negg\r\nVALUE bar 0 4 2345\r\nspam\r\nEND\r\n",
+            {'bar': (0, '2345', 'spam'), 'foo': (0, '1234', 'egg')})
+
+
+    def test_getsMultipleWithEmpty(self):
+        """
+        When getting a non-available key with L{MemCacheProtocol.getMultiple}
+        when C{withIdentifier} is C{True}, the other keys are retrieved
+        correctly, and the non-available key gets a tuple of C{0} as flag,
+        C{None} as value, and an empty cas value.
+        """
+        return self._test(self.proto.getMultiple(["foo", "bar"], True),
+            "gets foo bar\r\n",
+            "VALUE foo 0 3 1234\r\negg\r\nEND\r\n",
+            {'bar': (0, '', None), 'foo': (0, '1234', 'egg')})
+
+
+    def test_checkAndSet(self):
+        """
+        L{MemCacheProtocol.checkAndSet} passes an additional cas identifier
+        that the server handles to check if the data has to be updated.
+        """
+        return self._test(self.proto.checkAndSet("foo", "bar", cas="1234"),
+            "cas foo 0 0 3 1234\r\nbar\r\n", "STORED\r\n", True)
+
+
+    def test_casUnknowKey(self):
+        """
+        When L{MemCacheProtocol.checkAndSet} response is C{EXISTS}, the
+        resulting L{Deferred} fires with C{False}.
+        """
+        return self._test(self.proto.checkAndSet("foo", "bar", cas="1234"),
+            "cas foo 0 0 3 1234\r\nbar\r\n", "EXISTS\r\n", False)
+
+
+
+class CommandFailureTests(CommandMixin, TestCase):
+    """
+    Tests for correct failure of commands on a disconnected
+    L{MemCacheProtocol}.
+    """
+
+    def setUp(self):
+        """
+        Create a disconnected memcache client, using a deterministic clock.
+        """
+        self.proto = MemCacheProtocol()
+        self.clock = Clock()
+        self.proto.callLater = self.clock.callLater
+        self.transport = StringTransportWithDisconnection()
+        self.transport.protocol = self.proto
+        self.proto.makeConnection(self.transport)
+        self.transport.loseConnection()
+
+
+    def _test(self, d, send, recv, result):
+        """
+        Implementation of C{_test} which checks that the command fails with
+        C{RuntimeError} because the transport is disconnected. All the
+        parameters except C{d} are ignored.
+        """
+        return self.assertFailure(d, RuntimeError)
diff --git a/ThirdParty/Twisted/twisted/test/test_modules.py b/ThirdParty/Twisted/twisted/test/test_modules.py
new file mode 100644
index 0000000..ad75bc6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_modules.py
@@ -0,0 +1,494 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for twisted.python.modules, abstract access to imported or importable
+objects.
+"""
+
+import sys
+import itertools
+import zipfile
+import compileall
+
+import twisted
+from twisted.trial.unittest import TestCase
+
+from twisted.python import modules
+from twisted.python.filepath import FilePath
+from twisted.python.reflect import namedAny
+
+from twisted.python.test.modules_helpers import TwistedModulesMixin
+from twisted.python.test.test_zippath import zipit
+
+
+class TwistedModulesTestCase(TwistedModulesMixin, TestCase):
+    """
+    Base class for L{modules} test cases.
+    """
+    def findByIteration(self, modname, where=modules, importPackages=False):
+        """
+        You don't ever actually want to do this, so it's not in the public
+        API, but sometimes we want to compare the result of an iterative call
+        with a lookup call and make sure they're the same for test purposes.
+        """
+        for modinfo in where.walkModules(importPackages=importPackages):
+            if modinfo.name == modname:
+                return modinfo
+        self.fail("Unable to find module %r through iteration." % (modname,))
+
+
+
+class BasicTests(TwistedModulesTestCase):
+
+    def test_namespacedPackages(self):
+        """
+        Duplicate packages are not yielded when iterating over namespace
+        packages.
+        """
+        # Force pkgutil to be loaded already, since the probe package being
+        # created depends on it, and the replaceSysPath call below will make
+        # pretty much everything unimportable.
+        __import__('pkgutil')
+
+        namespaceBoilerplate = (
+            'import pkgutil; '
+            '__path__ = pkgutil.extend_path(__path__, __name__)')
+
+        # Create two temporary directories with packages:
+        #
+        #   entry:
+        #       test_package/
+        #           __init__.py
+        #           nested_package/
+        #               __init__.py
+        #               module.py
+        #
+        #   anotherEntry:
+        #       test_package/
+        #           __init__.py
+        #           nested_package/
+        #               __init__.py
+        #               module2.py
+        #
+        # test_package and test_package.nested_package are namespace packages,
+        # and when both of these are in sys.path, test_package.nested_package
+        # should become a virtual package containing both "module" and
+        # "module2"
+
+        entry = self.pathEntryWithOnePackage()
+        testPackagePath = entry.child('test_package')
+        testPackagePath.child('__init__.py').setContent(namespaceBoilerplate)
+
+        nestedEntry = testPackagePath.child('nested_package')
+        nestedEntry.makedirs()
+        nestedEntry.child('__init__.py').setContent(namespaceBoilerplate)
+        nestedEntry.child('module.py').setContent('')
+
+        anotherEntry = self.pathEntryWithOnePackage()
+        anotherPackagePath = anotherEntry.child('test_package')
+        anotherPackagePath.child('__init__.py').setContent(namespaceBoilerplate)
+
+        anotherNestedEntry = anotherPackagePath.child('nested_package')
+        anotherNestedEntry.makedirs()
+        anotherNestedEntry.child('__init__.py').setContent(namespaceBoilerplate)
+        anotherNestedEntry.child('module2.py').setContent('')
+
+        self.replaceSysPath([entry.path, anotherEntry.path])
+
+        module = modules.getModule('test_package')
+
+        # We have to use importPackages=True in order to resolve the namespace
+        # packages, so we remove the imported packages from sys.modules after
+        # walking
+        try:
+            walkedNames = [
+                mod.name for mod in module.walkModules(importPackages=True)]
+        finally:
+            for module in sys.modules.keys():
+                if module.startswith('test_package'):
+                    del sys.modules[module]
+
+        expected = [
+            'test_package',
+            'test_package.nested_package',
+            'test_package.nested_package.module',
+            'test_package.nested_package.module2',
+            ]
+
+        self.assertEqual(walkedNames, expected)
+
+
+    def test_unimportablePackageGetItem(self):
+        """
+        If a package has been explicitly forbidden from importing by setting a
+        C{None} key in sys.modules under its name,
+        L{modules.PythonPath.__getitem__} should still be able to retrieve an
+        unloaded L{modules.PythonModule} for that package.
+        """
+        shouldNotLoad = []
+        path = modules.PythonPath(sysPath=[self.pathEntryWithOnePackage().path],
+                                  moduleLoader=shouldNotLoad.append,
+                                  importerCache={},
+                                  sysPathHooks={},
+                                  moduleDict={'test_package': None})
+        self.assertEqual(shouldNotLoad, [])
+        self.assertEqual(path['test_package'].isLoaded(), False)
+
+
+    def test_unimportablePackageWalkModules(self):
+        """
+        If a package has been explicitly forbidden from importing by setting a
+        C{None} key in sys.modules under its name, L{modules.walkModules} should
+        still be able to retrieve an unloaded L{modules.PythonModule} for that
+        package.
+        """
+        existentPath = self.pathEntryWithOnePackage()
+        self.replaceSysPath([existentPath.path])
+        self.replaceSysModules({"test_package": None})
+
+        walked = list(modules.walkModules())
+        self.assertEqual([m.name for m in walked],
+                          ["test_package"])
+        self.assertEqual(walked[0].isLoaded(), False)
+
+
+    def test_nonexistentPaths(self):
+        """
+        Verify that L{modules.walkModules} ignores entries in sys.path which
+        do not exist in the filesystem.
+        """
+        existentPath = self.pathEntryWithOnePackage()
+
+        nonexistentPath = FilePath(self.mktemp())
+        self.failIf(nonexistentPath.exists())
+
+        self.replaceSysPath([existentPath.path])
+
+        expected = [modules.getModule("test_package")]
+
+        beforeModules = list(modules.walkModules())
+        sys.path.append(nonexistentPath.path)
+        afterModules = list(modules.walkModules())
+
+        self.assertEqual(beforeModules, expected)
+        self.assertEqual(afterModules, expected)
+
+
+    def test_nonDirectoryPaths(self):
+        """
+        Verify that L{modules.walkModules} ignores entries in sys.path which
+        refer to regular files in the filesystem.
+        """
+        existentPath = self.pathEntryWithOnePackage()
+
+        nonDirectoryPath = FilePath(self.mktemp())
+        self.failIf(nonDirectoryPath.exists())
+        nonDirectoryPath.setContent("zip file or whatever\n")
+
+        self.replaceSysPath([existentPath.path])
+
+        beforeModules = list(modules.walkModules())
+        sys.path.append(nonDirectoryPath.path)
+        afterModules = list(modules.walkModules())
+
+        self.assertEqual(beforeModules, afterModules)
+
+
+    def test_twistedShowsUp(self):
+        """
+        Scrounge around in the top-level module namespace and make sure that
+        Twisted shows up, and that the module thusly obtained is the same as
+        the module that we find when we look for it explicitly by name.
+        """
+        self.assertEqual(modules.getModule('twisted'),
+                          self.findByIteration("twisted"))
+
+
+    def test_dottedNames(self):
+        """
+        Verify that the walkModules APIs will give us back subpackages, not just
+        subpackages.
+        """
+        self.assertEqual(
+            modules.getModule('twisted.python'),
+            self.findByIteration("twisted.python",
+                                 where=modules.getModule('twisted')))
+
+
+    def test_onlyTopModules(self):
+        """
+        Verify that the iterModules API will only return top-level modules and
+        packages, not submodules or subpackages.
+        """
+        for module in modules.iterModules():
+            self.failIf(
+                '.' in module.name,
+                "no nested modules should be returned from iterModules: %r"
+                % (module.filePath))
+
+
+    def test_loadPackagesAndModules(self):
+        """
+        Verify that we can locate and load packages, modules, submodules, and
+        subpackages.
+        """
+        for n in ['os',
+                  'twisted',
+                  'twisted.python',
+                  'twisted.python.reflect']:
+            m = namedAny(n)
+            self.failUnlessIdentical(
+                modules.getModule(n).load(),
+                m)
+            self.failUnlessIdentical(
+                self.findByIteration(n).load(),
+                m)
+
+
+    def test_pathEntriesOnPath(self):
+        """
+        Verify that path entries discovered via module loading are, in fact, on
+        sys.path somewhere.
+        """
+        for n in ['os',
+                  'twisted',
+                  'twisted.python',
+                  'twisted.python.reflect']:
+            self.failUnlessIn(
+                modules.getModule(n).pathEntry.filePath.path,
+                sys.path)
+
+
+    def test_alwaysPreferPy(self):
+        """
+        Verify that .py files will always be preferred to .pyc files, regardless of
+        directory listing order.
+        """
+        mypath = FilePath(self.mktemp())
+        mypath.createDirectory()
+        pp = modules.PythonPath(sysPath=[mypath.path])
+        originalSmartPath = pp._smartPath
+        def _evilSmartPath(pathName):
+            o = originalSmartPath(pathName)
+            originalChildren = o.children
+            def evilChildren():
+                # normally this order is random; let's make sure it always
+                # comes up .pyc-first.
+                x = originalChildren()
+                x.sort()
+                x.reverse()
+                return x
+            o.children = evilChildren
+            return o
+        mypath.child("abcd.py").setContent('\n')
+        compileall.compile_dir(mypath.path, quiet=True)
+        # sanity check
+        self.assertEqual(len(mypath.children()), 2)
+        pp._smartPath = _evilSmartPath
+        self.assertEqual(pp['abcd'].filePath,
+                          mypath.child('abcd.py'))
+
+
+    def test_packageMissingPath(self):
+        """
+        A package can delete its __path__ for some reasons,
+        C{modules.PythonPath} should be able to deal with it.
+        """
+        mypath = FilePath(self.mktemp())
+        mypath.createDirectory()
+        pp = modules.PythonPath(sysPath=[mypath.path])
+        subpath = mypath.child("abcd")
+        subpath.createDirectory()
+        subpath.child("__init__.py").setContent('del __path__\n')
+        sys.path.append(mypath.path)
+        __import__("abcd")
+        try:
+            l = list(pp.walkModules())
+            self.assertEqual(len(l), 1)
+            self.assertEqual(l[0].name, 'abcd')
+        finally:
+            del sys.modules['abcd']
+            sys.path.remove(mypath.path)
+
+
+
+class PathModificationTest(TwistedModulesTestCase):
+    """
+    These tests share setup/cleanup behavior of creating a dummy package and
+    stuffing some code in it.
+    """
+
+    _serialnum = itertools.count().next # used to generate serial numbers for
+                                        # package names.
+
+    def setUp(self):
+        self.pathExtensionName = self.mktemp()
+        self.pathExtension = FilePath(self.pathExtensionName)
+        self.pathExtension.createDirectory()
+        self.packageName = "pyspacetests%d" % (self._serialnum(),)
+        self.packagePath = self.pathExtension.child(self.packageName)
+        self.packagePath.createDirectory()
+        self.packagePath.child("__init__.py").setContent("")
+        self.packagePath.child("a.py").setContent("")
+        self.packagePath.child("b.py").setContent("")
+        self.packagePath.child("c__init__.py").setContent("")
+        self.pathSetUp = False
+
+
+    def _setupSysPath(self):
+        assert not self.pathSetUp
+        self.pathSetUp = True
+        sys.path.append(self.pathExtensionName)
+
+
+    def _underUnderPathTest(self, doImport=True):
+        moddir2 = self.mktemp()
+        fpmd = FilePath(moddir2)
+        fpmd.createDirectory()
+        fpmd.child("foozle.py").setContent("x = 123\n")
+        self.packagePath.child("__init__.py").setContent(
+            "__path__.append(%r)\n" % (moddir2,))
+        # Cut here
+        self._setupSysPath()
+        modinfo = modules.getModule(self.packageName)
+        self.assertEqual(
+            self.findByIteration(self.packageName+".foozle", modinfo,
+                                 importPackages=doImport),
+            modinfo['foozle'])
+        self.assertEqual(modinfo['foozle'].load().x, 123)
+
+
+    def test_underUnderPathAlreadyImported(self):
+        """
+        Verify that iterModules will honor the __path__ of already-loaded packages.
+        """
+        self._underUnderPathTest()
+
+
+    def test_underUnderPathNotAlreadyImported(self):
+        """
+        Verify that iterModules will honor the __path__ of already-loaded packages.
+        """
+        self._underUnderPathTest(False)
+
+
+    test_underUnderPathNotAlreadyImported.todo = (
+        "This may be impossible but it sure would be nice.")
+
+
+    def _listModules(self):
+        pkginfo = modules.getModule(self.packageName)
+        nfni = [modinfo.name.split(".")[-1] for modinfo in
+                pkginfo.iterModules()]
+        nfni.sort()
+        self.assertEqual(nfni, ['a', 'b', 'c__init__'])
+
+
+    def test_listingModules(self):
+        """
+        Make sure the module list comes back as we expect from iterModules on a
+        package, whether zipped or not.
+        """
+        self._setupSysPath()
+        self._listModules()
+
+
+    def test_listingModulesAlreadyImported(self):
+        """
+        Make sure the module list comes back as we expect from iterModules on a
+        package, whether zipped or not, even if the package has already been
+        imported.
+        """
+        self._setupSysPath()
+        namedAny(self.packageName)
+        self._listModules()
+
+
+    def tearDown(self):
+        # Intentionally using 'assert' here, this is not a test assertion, this
+        # is just an "oh fuck what is going ON" assertion. -glyph
+        if self.pathSetUp:
+            HORK = "path cleanup failed: don't be surprised if other tests break"
+            assert sys.path.pop() is self.pathExtensionName, HORK+", 1"
+            assert self.pathExtensionName not in sys.path, HORK+", 2"
+
+
+
+class RebindingTest(PathModificationTest):
+    """
+    These tests verify that the default path interrogation API works properly
+    even when sys.path has been rebound to a different object.
+    """
+    def _setupSysPath(self):
+        assert not self.pathSetUp
+        self.pathSetUp = True
+        self.savedSysPath = sys.path
+        sys.path = sys.path[:]
+        sys.path.append(self.pathExtensionName)
+
+
+    def tearDown(self):
+        """
+        Clean up sys.path by re-binding our original object.
+        """
+        if self.pathSetUp:
+            sys.path = self.savedSysPath
+
+
+
+class ZipPathModificationTest(PathModificationTest):
+    def _setupSysPath(self):
+        assert not self.pathSetUp
+        zipit(self.pathExtensionName, self.pathExtensionName+'.zip')
+        self.pathExtensionName += '.zip'
+        assert zipfile.is_zipfile(self.pathExtensionName)
+        PathModificationTest._setupSysPath(self)
+
+
+class PythonPathTestCase(TestCase):
+    """
+    Tests for the class which provides the implementation for all of the
+    public API of L{twisted.python.modules}, L{PythonPath}.
+    """
+    def test_unhandledImporter(self):
+        """
+        Make sure that the behavior when encountering an unknown importer
+        type is not catastrophic failure.
+        """
+        class SecretImporter(object):
+            pass
+
+        def hook(name):
+            return SecretImporter()
+
+        syspath = ['example/path']
+        sysmodules = {}
+        syshooks = [hook]
+        syscache = {}
+        def sysloader(name):
+            return None
+        space = modules.PythonPath(
+            syspath, sysmodules, syshooks, syscache, sysloader)
+        entries = list(space.iterEntries())
+        self.assertEqual(len(entries), 1)
+        self.assertRaises(KeyError, lambda: entries[0]['module'])
+
+
+    def test_inconsistentImporterCache(self):
+        """
+        If the path a module loaded with L{PythonPath.__getitem__} is not
+        present in the path importer cache, a warning is emitted, but the
+        L{PythonModule} is returned as usual.
+        """
+        space = modules.PythonPath([], sys.modules, [], {})
+        thisModule = space[__name__]
+        warnings = self.flushWarnings([self.test_inconsistentImporterCache])
+        self.assertEqual(warnings[0]['category'], UserWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            FilePath(twisted.__file__).parent().dirname() +
+            " (for module " + __name__ + ") not in path importer cache "
+            "(PEP 302 violation - check your local configuration).")
+        self.assertEqual(len(warnings), 1)
+        self.assertEqual(thisModule.name, __name__)
diff --git a/ThirdParty/Twisted/twisted/test/test_monkey.py b/ThirdParty/Twisted/twisted/test/test_monkey.py
new file mode 100644
index 0000000..d289df4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_monkey.py
@@ -0,0 +1,164 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.python.monkey}.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.trial import unittest
+from twisted.python.monkey import MonkeyPatcher
+
+
+class TestObj:
+    def __init__(self):
+        self.foo = 'foo value'
+        self.bar = 'bar value'
+        self.baz = 'baz value'
+
+
+
+class MonkeyPatcherTest(unittest.SynchronousTestCase):
+    """
+    Tests for L{MonkeyPatcher} monkey-patching class.
+    """
+
+    def setUp(self):
+        self.testObject = TestObj()
+        self.originalObject = TestObj()
+        self.monkeyPatcher = MonkeyPatcher()
+
+
+    def test_empty(self):
+        """
+        A monkey patcher without patches shouldn't change a thing.
+        """
+        self.monkeyPatcher.patch()
+
+        # We can't assert that all state is unchanged, but at least we can
+        # check our test object.
+        self.assertEqual(self.originalObject.foo, self.testObject.foo)
+        self.assertEqual(self.originalObject.bar, self.testObject.bar)
+        self.assertEqual(self.originalObject.baz, self.testObject.baz)
+
+
+    def test_constructWithPatches(self):
+        """
+        Constructing a L{MonkeyPatcher} with patches should add all of the
+        given patches to the patch list.
+        """
+        patcher = MonkeyPatcher((self.testObject, 'foo', 'haha'),
+                                (self.testObject, 'bar', 'hehe'))
+        patcher.patch()
+        self.assertEqual('haha', self.testObject.foo)
+        self.assertEqual('hehe', self.testObject.bar)
+        self.assertEqual(self.originalObject.baz, self.testObject.baz)
+
+
+    def test_patchExisting(self):
+        """
+        Patching an attribute that exists sets it to the value defined in the
+        patch.
+        """
+        self.monkeyPatcher.addPatch(self.testObject, 'foo', 'haha')
+        self.monkeyPatcher.patch()
+        self.assertEqual(self.testObject.foo, 'haha')
+
+
+    def test_patchNonExisting(self):
+        """
+        Patching a non-existing attribute fails with an C{AttributeError}.
+        """
+        self.monkeyPatcher.addPatch(self.testObject, 'nowhere',
+                                    'blow up please')
+        self.assertRaises(AttributeError, self.monkeyPatcher.patch)
+
+
+    def test_patchAlreadyPatched(self):
+        """
+        Adding a patch for an object and attribute that already have a patch
+        overrides the existing patch.
+        """
+        self.monkeyPatcher.addPatch(self.testObject, 'foo', 'blah')
+        self.monkeyPatcher.addPatch(self.testObject, 'foo', 'BLAH')
+        self.monkeyPatcher.patch()
+        self.assertEqual(self.testObject.foo, 'BLAH')
+        self.monkeyPatcher.restore()
+        self.assertEqual(self.testObject.foo, self.originalObject.foo)
+
+
+    def test_restoreTwiceIsANoOp(self):
+        """
+        Restoring an already-restored monkey patch is a no-op.
+        """
+        self.monkeyPatcher.addPatch(self.testObject, 'foo', 'blah')
+        self.monkeyPatcher.patch()
+        self.monkeyPatcher.restore()
+        self.assertEqual(self.testObject.foo, self.originalObject.foo)
+        self.monkeyPatcher.restore()
+        self.assertEqual(self.testObject.foo, self.originalObject.foo)
+
+
+    def test_runWithPatchesDecoration(self):
+        """
+        runWithPatches should run the given callable, passing in all arguments
+        and keyword arguments, and return the return value of the callable.
+        """
+        log = []
+
+        def f(a, b, c=None):
+            log.append((a, b, c))
+            return 'foo'
+
+        result = self.monkeyPatcher.runWithPatches(f, 1, 2, c=10)
+        self.assertEqual('foo', result)
+        self.assertEqual([(1, 2, 10)], log)
+
+
+    def test_repeatedRunWithPatches(self):
+        """
+        We should be able to call the same function with runWithPatches more
+        than once. All patches should apply for each call.
+        """
+        def f():
+            return (self.testObject.foo, self.testObject.bar,
+                    self.testObject.baz)
+
+        self.monkeyPatcher.addPatch(self.testObject, 'foo', 'haha')
+        result = self.monkeyPatcher.runWithPatches(f)
+        self.assertEqual(
+            ('haha', self.originalObject.bar, self.originalObject.baz), result)
+        result = self.monkeyPatcher.runWithPatches(f)
+        self.assertEqual(
+            ('haha', self.originalObject.bar, self.originalObject.baz),
+            result)
+
+
+    def test_runWithPatchesRestores(self):
+        """
+        C{runWithPatches} should restore the original values after the function
+        has executed.
+        """
+        self.monkeyPatcher.addPatch(self.testObject, 'foo', 'haha')
+        self.assertEqual(self.originalObject.foo, self.testObject.foo)
+        self.monkeyPatcher.runWithPatches(lambda: None)
+        self.assertEqual(self.originalObject.foo, self.testObject.foo)
+
+
+    def test_runWithPatchesRestoresOnException(self):
+        """
+        Test runWithPatches restores the original values even when the function
+        raises an exception.
+        """
+        def _():
+            self.assertEqual(self.testObject.foo, 'haha')
+            self.assertEqual(self.testObject.bar, 'blahblah')
+            raise RuntimeError("Something went wrong!")
+
+        self.monkeyPatcher.addPatch(self.testObject, 'foo', 'haha')
+        self.monkeyPatcher.addPatch(self.testObject, 'bar', 'blahblah')
+
+        self.assertRaises(RuntimeError, self.monkeyPatcher.runWithPatches, _)
+        self.assertEqual(self.testObject.foo, self.originalObject.foo)
+        self.assertEqual(self.testObject.bar, self.originalObject.bar)
diff --git a/ThirdParty/Twisted/twisted/test/test_newcred.py b/ThirdParty/Twisted/twisted/test/test_newcred.py
new file mode 100644
index 0000000..01660cd
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_newcred.py
@@ -0,0 +1,445 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.cred}, now with 30% more starch.
+"""
+
+
+import hmac
+from zope.interface import implements, Interface
+
+from twisted.trial import unittest
+from twisted.cred import portal, checkers, credentials, error
+from twisted.python import components
+from twisted.internet import defer
+from twisted.internet.defer import deferredGenerator as dG, waitForDeferred as wFD
+
+try:
+    from crypt import crypt
+except ImportError:
+    crypt = None
+
+try:
+    from twisted.cred.pamauth import callIntoPAM
+except ImportError:
+    pamauth = None
+else:
+    from twisted.cred import pamauth
+
+
+class ITestable(Interface):
+    pass
+
+class TestAvatar:
+    def __init__(self, name):
+        self.name = name
+        self.loggedIn = False
+        self.loggedOut = False
+
+    def login(self):
+        assert not self.loggedIn
+        self.loggedIn = True
+
+    def logout(self):
+        self.loggedOut = True
+
+class Testable(components.Adapter):
+    implements(ITestable)
+
+# components.Interface(TestAvatar).adaptWith(Testable, ITestable)
+
+components.registerAdapter(Testable, TestAvatar, ITestable)
+
+class IDerivedCredentials(credentials.IUsernamePassword):
+    pass
+
+class DerivedCredentials(object):
+    implements(IDerivedCredentials, ITestable)
+
+    def __init__(self, username, password):
+        self.username = username
+        self.password = password
+
+    def checkPassword(self, password):
+        return password == self.password
+
+
+class TestRealm:
+    implements(portal.IRealm)
+    def __init__(self):
+        self.avatars = {}
+
+    def requestAvatar(self, avatarId, mind, *interfaces):
+        if avatarId in self.avatars:
+            avatar = self.avatars[avatarId]
+        else:
+            avatar = TestAvatar(avatarId)
+            self.avatars[avatarId] = avatar
+        avatar.login()
+        return (interfaces[0], interfaces[0](avatar),
+                avatar.logout)
+
+class NewCredTest(unittest.TestCase):
+    def setUp(self):
+        r = self.realm = TestRealm()
+        p = self.portal = portal.Portal(r)
+        up = self.checker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
+        up.addUser("bob", "hello")
+        p.registerChecker(up)
+
+    def testListCheckers(self):
+        expected = [credentials.IUsernamePassword, credentials.IUsernameHashedPassword]
+        got = self.portal.listCredentialsInterfaces()
+        expected.sort()
+        got.sort()
+        self.assertEqual(got, expected)
+
+    def testBasicLogin(self):
+        l = []; f = []
+        self.portal.login(credentials.UsernamePassword("bob", "hello"),
+                          self, ITestable).addCallback(
+            l.append).addErrback(f.append)
+        if f:
+            raise f[0]
+        # print l[0].getBriefTraceback()
+        iface, impl, logout = l[0]
+        # whitebox
+        self.assertEqual(iface, ITestable)
+        self.failUnless(iface.providedBy(impl),
+                        "%s does not implement %s" % (impl, iface))
+        # greybox
+        self.failUnless(impl.original.loggedIn)
+        self.failUnless(not impl.original.loggedOut)
+        logout()
+        self.failUnless(impl.original.loggedOut)
+
+    def test_derivedInterface(self):
+        """
+        Login with credentials implementing an interface inheriting from an
+        interface registered with a checker (but not itself registered).
+        """
+        l = []
+        f = []
+        self.portal.login(DerivedCredentials("bob", "hello"), self, ITestable
+            ).addCallback(l.append
+            ).addErrback(f.append)
+        if f:
+            raise f[0]
+        iface, impl, logout = l[0]
+        # whitebox
+        self.assertEqual(iface, ITestable)
+        self.failUnless(iface.providedBy(impl),
+                        "%s does not implement %s" % (impl, iface))
+        # greybox
+        self.failUnless(impl.original.loggedIn)
+        self.failUnless(not impl.original.loggedOut)
+        logout()
+        self.failUnless(impl.original.loggedOut)
+
+    def testFailedLogin(self):
+        l = []
+        self.portal.login(credentials.UsernamePassword("bob", "h3llo"),
+                          self, ITestable).addErrback(
+            lambda x: x.trap(error.UnauthorizedLogin)).addCallback(l.append)
+        self.failUnless(l)
+        self.assertEqual(error.UnauthorizedLogin, l[0])
+
+    def testFailedLoginName(self):
+        l = []
+        self.portal.login(credentials.UsernamePassword("jay", "hello"),
+                          self, ITestable).addErrback(
+            lambda x: x.trap(error.UnauthorizedLogin)).addCallback(l.append)
+        self.failUnless(l)
+        self.assertEqual(error.UnauthorizedLogin, l[0])
+
+
+class CramMD5CredentialsTestCase(unittest.TestCase):
+    def testIdempotentChallenge(self):
+        c = credentials.CramMD5Credentials()
+        chal = c.getChallenge()
+        self.assertEqual(chal, c.getChallenge())
+
+    def testCheckPassword(self):
+        c = credentials.CramMD5Credentials()
+        chal = c.getChallenge()
+        c.response = hmac.HMAC('secret', chal).hexdigest()
+        self.failUnless(c.checkPassword('secret'))
+
+    def testWrongPassword(self):
+        c = credentials.CramMD5Credentials()
+        self.failIf(c.checkPassword('secret'))
+
+class OnDiskDatabaseTestCase(unittest.TestCase):
+    users = [
+        ('user1', 'pass1'),
+        ('user2', 'pass2'),
+        ('user3', 'pass3'),
+    ]
+
+
+    def testUserLookup(self):
+        dbfile = self.mktemp()
+        db = checkers.FilePasswordDB(dbfile)
+        f = file(dbfile, 'w')
+        for (u, p) in self.users:
+            f.write('%s:%s\n' % (u, p))
+        f.close()
+
+        for (u, p) in self.users:
+            self.failUnlessRaises(KeyError, db.getUser, u.upper())
+            self.assertEqual(db.getUser(u), (u, p))
+
+    def testCaseInSensitivity(self):
+        dbfile = self.mktemp()
+        db = checkers.FilePasswordDB(dbfile, caseSensitive=0)
+        f = file(dbfile, 'w')
+        for (u, p) in self.users:
+            f.write('%s:%s\n' % (u, p))
+        f.close()
+
+        for (u, p) in self.users:
+            self.assertEqual(db.getUser(u.upper()), (u, p))
+
+    def testRequestAvatarId(self):
+        dbfile = self.mktemp()
+        db = checkers.FilePasswordDB(dbfile, caseSensitive=0)
+        f = file(dbfile, 'w')
+        for (u, p) in self.users:
+            f.write('%s:%s\n' % (u, p))
+        f.close()
+        creds = [credentials.UsernamePassword(u, p) for u, p in self.users]
+        d = defer.gatherResults(
+            [defer.maybeDeferred(db.requestAvatarId, c) for c in creds])
+        d.addCallback(self.assertEqual, [u for u, p in self.users])
+        return d
+
+    def testRequestAvatarId_hashed(self):
+        dbfile = self.mktemp()
+        db = checkers.FilePasswordDB(dbfile, caseSensitive=0)
+        f = file(dbfile, 'w')
+        for (u, p) in self.users:
+            f.write('%s:%s\n' % (u, p))
+        f.close()
+        creds = [credentials.UsernameHashedPassword(u, p) for u, p in self.users]
+        d = defer.gatherResults(
+            [defer.maybeDeferred(db.requestAvatarId, c) for c in creds])
+        d.addCallback(self.assertEqual, [u for u, p in self.users])
+        return d
+
+
+
+class HashedPasswordOnDiskDatabaseTestCase(unittest.TestCase):
+    users = [
+        ('user1', 'pass1'),
+        ('user2', 'pass2'),
+        ('user3', 'pass3'),
+    ]
+
+
+    def hash(self, u, p, s):
+        return crypt(p, s)
+
+    def setUp(self):
+        dbfile = self.mktemp()
+        self.db = checkers.FilePasswordDB(dbfile, hash=self.hash)
+        f = file(dbfile, 'w')
+        for (u, p) in self.users:
+            f.write('%s:%s\n' % (u, crypt(p, u[:2])))
+        f.close()
+        r = TestRealm()
+        self.port = portal.Portal(r)
+        self.port.registerChecker(self.db)
+
+    def testGoodCredentials(self):
+        goodCreds = [credentials.UsernamePassword(u, p) for u, p in self.users]
+        d = defer.gatherResults([self.db.requestAvatarId(c) for c in goodCreds])
+        d.addCallback(self.assertEqual, [u for u, p in self.users])
+        return d
+
+    def testGoodCredentials_login(self):
+        goodCreds = [credentials.UsernamePassword(u, p) for u, p in self.users]
+        d = defer.gatherResults([self.port.login(c, None, ITestable)
+                                 for c in goodCreds])
+        d.addCallback(lambda x: [a.original.name for i, a, l in x])
+        d.addCallback(self.assertEqual, [u for u, p in self.users])
+        return d
+
+    def testBadCredentials(self):
+        badCreds = [credentials.UsernamePassword(u, 'wrong password')
+                    for u, p in self.users]
+        d = defer.DeferredList([self.port.login(c, None, ITestable)
+                                for c in badCreds], consumeErrors=True)
+        d.addCallback(self._assertFailures, error.UnauthorizedLogin)
+        return d
+
+    def testHashedCredentials(self):
+        hashedCreds = [credentials.UsernameHashedPassword(u, crypt(p, u[:2]))
+                       for u, p in self.users]
+        d = defer.DeferredList([self.port.login(c, None, ITestable)
+                                for c in hashedCreds], consumeErrors=True)
+        d.addCallback(self._assertFailures, error.UnhandledCredentials)
+        return d
+
+    def _assertFailures(self, failures, *expectedFailures):
+        for flag, failure in failures:
+            self.assertEqual(flag, defer.FAILURE)
+            failure.trap(*expectedFailures)
+        return None
+
+    if crypt is None:
+        skip = "crypt module not available"
+
+class PluggableAuthenticationModulesTest(unittest.TestCase):
+
+    def setUp(self):
+        """
+        Replace L{pamauth.callIntoPAM} with a dummy implementation with
+        easily-controlled behavior.
+        """
+        self._oldCallIntoPAM = pamauth.callIntoPAM
+        pamauth.callIntoPAM = self.callIntoPAM
+
+
+    def tearDown(self):
+        """
+        Restore the original value of L{pamauth.callIntoPAM}.
+        """
+        pamauth.callIntoPAM = self._oldCallIntoPAM
+
+
+    def callIntoPAM(self, service, user, conv):
+        if service != 'Twisted':
+            raise error.UnauthorizedLogin('bad service: %s' % service)
+        if user != 'testuser':
+            raise error.UnauthorizedLogin('bad username: %s' % user)
+        questions = [
+                (1, "Password"),
+                (2, "Message w/ Input"),
+                (3, "Message w/o Input"),
+                ]
+        replies = conv(questions)
+        if replies != [
+            ("password", 0),
+            ("entry", 0),
+            ("", 0)
+            ]:
+                raise error.UnauthorizedLogin('bad conversion: %s' % repr(replies))
+        return 1
+
+    def _makeConv(self, d):
+        def conv(questions):
+            return defer.succeed([(d[t], 0) for t, q in questions])
+        return conv
+
+    def testRequestAvatarId(self):
+        db = checkers.PluggableAuthenticationModulesChecker()
+        conv = self._makeConv({1:'password', 2:'entry', 3:''})
+        creds = credentials.PluggableAuthenticationModules('testuser',
+                conv)
+        d = db.requestAvatarId(creds)
+        d.addCallback(self.assertEqual, 'testuser')
+        return d
+
+    def testBadCredentials(self):
+        db = checkers.PluggableAuthenticationModulesChecker()
+        conv = self._makeConv({1:'', 2:'', 3:''})
+        creds = credentials.PluggableAuthenticationModules('testuser',
+                conv)
+        d = db.requestAvatarId(creds)
+        self.assertFailure(d, error.UnauthorizedLogin)
+        return d
+
+    def testBadUsername(self):
+        db = checkers.PluggableAuthenticationModulesChecker()
+        conv = self._makeConv({1:'password', 2:'entry', 3:''})
+        creds = credentials.PluggableAuthenticationModules('baduser',
+                conv)
+        d = db.requestAvatarId(creds)
+        self.assertFailure(d, error.UnauthorizedLogin)
+        return d
+
+    if not pamauth:
+        skip = "Can't run without PyPAM"
+
+class CheckersMixin:
+    def testPositive(self):
+        for chk in self.getCheckers():
+            for (cred, avatarId) in self.getGoodCredentials():
+                r = wFD(chk.requestAvatarId(cred))
+                yield r
+                self.assertEqual(r.getResult(), avatarId)
+    testPositive = dG(testPositive)
+
+    def testNegative(self):
+        for chk in self.getCheckers():
+            for cred in self.getBadCredentials():
+                r = wFD(chk.requestAvatarId(cred))
+                yield r
+                self.assertRaises(error.UnauthorizedLogin, r.getResult)
+    testNegative = dG(testNegative)
+
+class HashlessFilePasswordDBMixin:
+    credClass = credentials.UsernamePassword
+    diskHash = None
+    networkHash = staticmethod(lambda x: x)
+
+    _validCredentials = [
+        ('user1', 'password1'),
+        ('user2', 'password2'),
+        ('user3', 'password3')]
+
+    def getGoodCredentials(self):
+        for u, p in self._validCredentials:
+            yield self.credClass(u, self.networkHash(p)), u
+
+    def getBadCredentials(self):
+        for u, p in [('user1', 'password3'),
+                     ('user2', 'password1'),
+                     ('bloof', 'blarf')]:
+            yield self.credClass(u, self.networkHash(p))
+
+    def getCheckers(self):
+        diskHash = self.diskHash or (lambda x: x)
+        hashCheck = self.diskHash and (lambda username, password, stored: self.diskHash(password))
+
+        for cache in True, False:
+            fn = self.mktemp()
+            fObj = file(fn, 'w')
+            for u, p in self._validCredentials:
+                fObj.write('%s:%s\n' % (u, diskHash(p)))
+            fObj.close()
+            yield checkers.FilePasswordDB(fn, cache=cache, hash=hashCheck)
+
+            fn = self.mktemp()
+            fObj = file(fn, 'w')
+            for u, p in self._validCredentials:
+                fObj.write('%s dingle dongle %s\n' % (diskHash(p), u))
+            fObj.close()
+            yield checkers.FilePasswordDB(fn, ' ', 3, 0, cache=cache, hash=hashCheck)
+
+            fn = self.mktemp()
+            fObj = file(fn, 'w')
+            for u, p in self._validCredentials:
+                fObj.write('zip,zap,%s,zup,%s\n' % (u.title(), diskHash(p)))
+            fObj.close()
+            yield checkers.FilePasswordDB(fn, ',', 2, 4, False, cache=cache, hash=hashCheck)
+
+class LocallyHashedFilePasswordDBMixin(HashlessFilePasswordDBMixin):
+    diskHash = staticmethod(lambda x: x.encode('hex'))
+
+class NetworkHashedFilePasswordDBMixin(HashlessFilePasswordDBMixin):
+    networkHash = staticmethod(lambda x: x.encode('hex'))
+    class credClass(credentials.UsernameHashedPassword):
+        def checkPassword(self, password):
+            return self.hashed.decode('hex') == password
+
+class HashlessFilePasswordDBCheckerTestCase(HashlessFilePasswordDBMixin, CheckersMixin, unittest.TestCase):
+    pass
+
+class LocallyHashedFilePasswordDBCheckerTestCase(LocallyHashedFilePasswordDBMixin, CheckersMixin, unittest.TestCase):
+    pass
+
+class NetworkHashedFilePasswordDBCheckerTestCase(NetworkHashedFilePasswordDBMixin, CheckersMixin, unittest.TestCase):
+    pass
+
diff --git a/ThirdParty/Twisted/twisted/test/test_nmea.py b/ThirdParty/Twisted/twisted/test/test_nmea.py
new file mode 100644
index 0000000..9c4afbc
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_nmea.py
@@ -0,0 +1,115 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+ 
+
+"""Test cases for the NMEA GPS protocol"""
+
+import StringIO
+
+from twisted.trial import unittest
+from twisted.internet import reactor, protocol
+from twisted.python import reflect
+
+from twisted.protocols.gps import nmea
+
+class StringIOWithNoClose(StringIO.StringIO):
+    def close(self):
+        pass
+
+class ResultHarvester:
+    def __init__(self):
+        self.results = []
+
+    def __call__(self, *args):
+        self.results.append(args)
+
+    def performTest(self, function, *args, **kwargs):
+        l = len(self.results)
+        try:
+            function(*args, **kwargs)
+        except Exception, e:
+            self.results.append(e)
+        if l == len(self.results):
+            self.results.append(NotImplementedError())
+
+class NMEATester(nmea.NMEAReceiver):
+    ignore_invalid_sentence = 0
+    ignore_checksum_mismatch = 0
+    ignore_unknown_sentencetypes = 0
+    convert_dates_before_y2k = 1
+
+    def connectionMade(self):
+        self.resultHarvester = ResultHarvester()
+        for fn in reflect.prefixedMethodNames(self.__class__, 'decode_'):
+            setattr(self, 'handle_' + fn, self.resultHarvester)
+        
+class NMEAReceiverTestCase(unittest.TestCase):
+    messages = (
+        # fix - signal acquired
+        "$GPGGA,231713.0,3910.413,N,07641.994,W,1,05,1.35,00044,M,-033,M,,*69",
+        # fix - signal not acquired
+        "$GPGGA,235947.000,0000.0000,N,00000.0000,E,0,00,0.0,0.0,M,,,,0000*00",
+        # junk
+        "lkjasdfkl!@#(*$!@(*#(ASDkfjasdfLMASDCVKAW!@#($)!(@#)(*",
+        # fix - signal acquired (invalid checksum)
+        "$GPGGA,231713.0,3910.413,N,07641.994,W,1,05,1.35,00044,M,-033,M,,*68",
+        # invalid sentence
+        "$GPGGX,231713.0,3910.413,N,07641.994,W,1,05,1.35,00044,M,-033,M,,*68",
+        # position acquired
+        "$GPGLL,4250.5589,S,14718.5084,E,092204.999,A*2D",
+        # position not acquired
+        "$GPGLL,0000.0000,N,00000.0000,E,235947.000,V*2D",
+        # active satellites (no fix)
+        "$GPGSA,A,1,,,,,,,,,,,,,0.0,0.0,0.0*30",
+        # active satellites
+        "$GPGSA,A,3,01,20,19,13,,,,,,,,,40.4,24.4,32.2*0A",
+        # positiontime (no fix)
+        "$GPRMC,235947.000,V,0000.0000,N,00000.0000,E,,,041299,,*1D",
+        # positiontime
+        "$GPRMC,092204.999,A,4250.5589,S,14718.5084,E,0.00,89.68,211200,,*25",
+        # course over ground (no fix - not implemented)
+        "$GPVTG,,T,,M,,N,,K*4E",
+        # course over ground (not implemented)
+        "$GPVTG,89.68,T,,M,0.00,N,0.0,K*5F",
+    )
+    results = (
+        (83833.0, 39.17355, -76.6999, nmea.POSFIX_SPS, 5, 1.35, (44.0, 'M'), (-33.0, 'M'), None),
+        (86387.0, 0.0, 0.0, 0, 0, 0.0, (0.0, 'M'), None, None),
+        nmea.InvalidSentence(),
+        nmea.InvalidChecksum(),
+        nmea.InvalidSentence(),
+        (-42.842648333333337, 147.30847333333332, 33724.999000000003, 1),
+        (0.0, 0.0, 86387.0, 0),
+        ((None, None, None, None, None, None, None, None, None, None, None, None), (nmea.MODE_AUTO, nmea.MODE_NOFIX), 0.0, 0.0, 0.0),
+        ((1, 20, 19, 13, None, None, None, None, None, None, None, None), (nmea.MODE_AUTO, nmea.MODE_3D), 40.4, 24.4, 32.2),
+        (0.0, 0.0, None, None, 86387.0, (1999, 12, 4), None),
+        (-42.842648333333337, 147.30847333333332, 0.0, 89.68, 33724.999, (2000, 12, 21), None),
+        NotImplementedError(),
+        NotImplementedError(),
+    )
+    def testGPSMessages(self):
+        dummy = NMEATester()
+        dummy.makeConnection(protocol.FileWrapper(StringIOWithNoClose()))
+        for line in self.messages:
+            dummy.resultHarvester.performTest(dummy.lineReceived, line) 
+        def munge(myTuple):
+            if type(myTuple) != type(()):
+                return
+            newTuple = []
+            for v in myTuple:
+                if type(v) == type(1.1):
+                    v = float(int(v * 10000.0)) * 0.0001
+                newTuple.append(v)
+            return tuple(newTuple)
+        for (message, expectedResult, actualResult) in zip(self.messages, self.results, dummy.resultHarvester.results):
+            expectedResult = munge(expectedResult)
+            actualResult = munge(actualResult)
+            if isinstance(expectedResult, Exception):
+                if isinstance(actualResult, Exception):
+                    self.assertEqual(expectedResult.__class__, actualResult.__class__, "\nInput:\n%s\nExpected:\n%s.%s\nResults:\n%s.%s\n" % (message, expectedResult.__class__.__module__, expectedResult.__class__.__name__, actualResult.__class__.__module__, actualResult.__class__.__name__))
+                else:
+                    self.assertEqual(1, 0, "\nInput:\n%s\nExpected:\n%s.%s\nResults:\n%r\n" % (message, expectedResult.__class__.__module__, expectedResult.__class__.__name__, actualResult))
+            else:
+              self.assertEqual(expectedResult, actualResult, "\nInput:\n%s\nExpected: %r\nResults: %r\n" % (message, expectedResult, actualResult))
+
+testCases = [NMEAReceiverTestCase]
diff --git a/ThirdParty/Twisted/twisted/test/test_paths.py b/ThirdParty/Twisted/twisted/test/test_paths.py
new file mode 100644
index 0000000..a657f75
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_paths.py
@@ -0,0 +1,1510 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test cases covering L{twisted.python.filepath}.
+"""
+
+from __future__ import division, absolute_import
+
+import os, time, pickle, errno, stat
+import contextlib
+from pprint import pformat
+
+from twisted.python.compat import set, _PY3
+from twisted.python.win32 import WindowsError, ERROR_DIRECTORY
+from twisted.python import filepath
+from twisted.python.runtime import platform
+
+from twisted.trial.unittest import SkipTest, SynchronousTestCase as TestCase
+
+from zope.interface.verify import verifyObject
+
+
+class BytesTestCase(TestCase):
+    """
+    Override default method implementations to support byte paths.
+    """
+    def mktemp(self):
+        """
+        Return a temporary path, encoded as bytes.
+        """
+        return TestCase.mktemp(self).encode("utf-8")
+
+
+
+class AbstractFilePathTestCase(BytesTestCase):
+
+    f1content = b"file 1"
+    f2content = b"file 2"
+
+
+    def _mkpath(self, *p):
+        x = os.path.abspath(os.path.join(self.cmn, *p))
+        self.all.append(x)
+        return x
+
+
+    def subdir(self, *dirname):
+        os.mkdir(self._mkpath(*dirname))
+
+
+    def subfile(self, *dirname):
+        return open(self._mkpath(*dirname), "wb")
+
+
+    def setUp(self):
+        self.now = time.time()
+        cmn = self.cmn = os.path.abspath(self.mktemp())
+        self.all = [cmn]
+        os.mkdir(cmn)
+        self.subdir(b"sub1")
+        f = self.subfile(b"file1")
+        f.write(self.f1content)
+        f.close()
+        f = self.subfile(b"sub1", b"file2")
+        f.write(self.f2content)
+        f.close()
+        self.subdir(b'sub3')
+        f = self.subfile(b"sub3", b"file3.ext1")
+        f.close()
+        f = self.subfile(b"sub3", b"file3.ext2")
+        f.close()
+        f = self.subfile(b"sub3", b"file3.ext3")
+        f.close()
+        self.path = filepath.FilePath(cmn)
+        self.root = filepath.FilePath(b"/")
+
+
+    def test_segmentsFromPositive(self):
+        """
+        Verify that the segments between two paths are correctly identified.
+        """
+        self.assertEqual(
+            self.path.child(b"a").child(b"b").child(b"c").segmentsFrom(self.path),
+            [b"a", b"b", b"c"])
+
+    def test_segmentsFromNegative(self):
+        """
+        Verify that segmentsFrom notices when the ancestor isn't an ancestor.
+        """
+        self.assertRaises(
+            ValueError,
+            self.path.child(b"a").child(b"b").child(b"c").segmentsFrom,
+                self.path.child(b"d").child(b"c").child(b"e"))
+
+
+    def test_walk(self):
+        """
+        Verify that walking the path gives the same result as the known file
+        hierarchy.
+        """
+        x = [foo.path for foo in self.path.walk()]
+        self.assertEqual(set(x), set(self.all))
+
+
+    def test_parents(self):
+        """
+        L{FilePath.parents()} should return an iterator of every ancestor of
+        the L{FilePath} in question.
+        """
+        L = []
+        pathobj = self.path.child(b"a").child(b"b").child(b"c")
+        fullpath = pathobj.path
+        lastpath = fullpath
+        thispath = os.path.dirname(fullpath)
+        while lastpath != self.root.path:
+            L.append(thispath)
+            lastpath = thispath
+            thispath = os.path.dirname(thispath)
+        self.assertEqual([x.path for x in pathobj.parents()], L)
+
+
+    def test_validSubdir(self):
+        """
+        Verify that a valid subdirectory will show up as a directory, but not as a
+        file, not as a symlink, and be listable.
+        """
+        sub1 = self.path.child(b'sub1')
+        self.failUnless(sub1.exists(),
+                        "This directory does exist.")
+        self.failUnless(sub1.isdir(),
+                        "It's a directory.")
+        self.failUnless(not sub1.isfile(),
+                        "It's a directory.")
+        self.failUnless(not sub1.islink(),
+                        "It's a directory.")
+        self.assertEqual(sub1.listdir(),
+                             [b'file2'])
+
+
+    def test_invalidSubdir(self):
+        """
+        Verify that a subdirectory that doesn't exist is reported as such.
+        """
+        sub2 = self.path.child(b'sub2')
+        self.failIf(sub2.exists(),
+                    "This directory does not exist.")
+
+    def test_validFiles(self):
+        """
+        Make sure that we can read existent non-empty files.
+        """
+        f1 = self.path.child(b'file1')
+        with contextlib.closing(f1.open()) as f:
+            self.assertEqual(f.read(), self.f1content)
+        f2 = self.path.child(b'sub1').child(b'file2')
+        with contextlib.closing(f2.open()) as f:
+            self.assertEqual(f.read(), self.f2content)
+
+
+    def test_multipleChildSegments(self):
+        """
+        C{fp.descendant([a, b, c])} returns the same L{FilePath} as is returned
+        by C{fp.child(a).child(b).child(c)}.
+        """
+        multiple = self.path.descendant([b'a', b'b', b'c'])
+        single = self.path.child(b'a').child(b'b').child(b'c')
+        self.assertEqual(multiple, single)
+
+
+    def test_dictionaryKeys(self):
+        """
+        Verify that path instances are usable as dictionary keys.
+        """
+        f1 = self.path.child(b'file1')
+        f1prime = self.path.child(b'file1')
+        f2 = self.path.child(b'file2')
+        dictoid = {}
+        dictoid[f1] = 3
+        dictoid[f1prime] = 4
+        self.assertEqual(dictoid[f1], 4)
+        self.assertEqual(list(dictoid.keys()), [f1])
+        self.assertTrue(list(dictoid.keys())[0] is f1)
+        self.assertFalse(list(dictoid.keys())[0] is f1prime) # sanity check
+        dictoid[f2] = 5
+        self.assertEqual(dictoid[f2], 5)
+        self.assertEqual(len(dictoid), 2)
+
+
+    def test_dictionaryKeyWithString(self):
+        """
+        Verify that path instances are usable as dictionary keys which do not clash
+        with their string counterparts.
+        """
+        f1 = self.path.child(b'file1')
+        dictoid = {f1: 'hello'}
+        dictoid[f1.path] = 'goodbye'
+        self.assertEqual(len(dictoid), 2)
+
+
+    def test_childrenNonexistentError(self):
+        """
+        Verify that children raises the appropriate exception for non-existent
+        directories.
+        """
+        self.assertRaises(filepath.UnlistableError,
+                          self.path.child(b'not real').children)
+
+    def test_childrenNotDirectoryError(self):
+        """
+        Verify that listdir raises the appropriate exception for attempting to list
+        a file rather than a directory.
+        """
+        self.assertRaises(filepath.UnlistableError,
+                          self.path.child(b'file1').children)
+
+
+    def test_newTimesAreFloats(self):
+        """
+        Verify that all times returned from the various new time functions are ints
+        (and hopefully therefore 'high precision').
+        """
+        for p in self.path, self.path.child(b'file1'):
+            self.assertEqual(type(p.getAccessTime()), float)
+            self.assertEqual(type(p.getModificationTime()), float)
+            self.assertEqual(type(p.getStatusChangeTime()), float)
+
+
+    def test_oldTimesAreInts(self):
+        """
+        Verify that all times returned from the various time functions are
+        integers, for compatibility.
+        """
+        for p in self.path, self.path.child(b'file1'):
+            self.assertEqual(type(p.getatime()), int)
+            self.assertEqual(type(p.getmtime()), int)
+            self.assertEqual(type(p.getctime()), int)
+
+
+
+class FakeWindowsPath(filepath.FilePath):
+    """
+    A test version of FilePath which overrides listdir to raise L{WindowsError}.
+    """
+
+    def listdir(self):
+        """
+        @raise WindowsError: always.
+        """
+        raise WindowsError(
+            ERROR_DIRECTORY,
+            "A directory's validness was called into question")
+
+
+
+class ListingCompatibilityTests(BytesTestCase):
+    """
+    These tests verify compatibility with legacy behavior of directory listing.
+    """
+
+    def test_windowsErrorExcept(self):
+        """
+        Verify that when a WindowsError is raised from listdir, catching
+        WindowsError works.
+        """
+        fwp = FakeWindowsPath(self.mktemp())
+        self.assertRaises(filepath.UnlistableError, fwp.children)
+        self.assertRaises(WindowsError, fwp.children)
+
+
+    def test_alwaysCatchOSError(self):
+        """
+        Verify that in the normal case where a directory does not exist, we will
+        get an OSError.
+        """
+        fp = filepath.FilePath(self.mktemp())
+        self.assertRaises(OSError, fp.children)
+
+
+    def test_keepOriginalAttributes(self):
+        """
+        Verify that the Unlistable exception raised will preserve the attributes of
+        the previously-raised exception.
+        """
+        fp = filepath.FilePath(self.mktemp())
+        ose = self.assertRaises(OSError, fp.children)
+        d1 = list(ose.__dict__.keys())
+        d1.remove('originalException')
+        d2 = list(ose.originalException.__dict__.keys())
+        d1.sort()
+        d2.sort()
+        self.assertEqual(d1, d2)
+
+
+
+class ExplodingFile:
+    """
+    A C{file}-alike which raises exceptions from its I/O methods and keeps track
+    of whether it has been closed.
+
+    @ivar closed: A C{bool} which is C{False} until C{close} is called, then it
+        is C{True}.
+    """
+    closed = False
+
+    def read(self, n=0):
+        """
+        @raise IOError: Always raised.
+        """
+        raise IOError()
+
+
+    def write(self, what):
+        """
+        @raise IOError: Always raised.
+        """
+        raise IOError()
+
+
+    def close(self):
+        """
+        Mark the file as having been closed.
+        """
+        self.closed = True
+
+
+
+class TrackingFilePath(filepath.FilePath):
+    """
+    A subclass of L{filepath.FilePath} which maintains a list of all other paths
+    created by clonePath.
+
+    @ivar trackingList: A list of all paths created by this path via
+        C{clonePath} (which also includes paths created by methods like
+        C{parent}, C{sibling}, C{child}, etc (and all paths subsequently created
+        by those paths, etc).
+
+    @type trackingList: C{list} of L{TrackingFilePath}
+
+    @ivar openedFiles: A list of all file objects opened by this
+        L{TrackingFilePath} or any other L{TrackingFilePath} in C{trackingList}.
+
+    @type openedFiles: C{list} of C{file}
+    """
+
+    def __init__(self, path, alwaysCreate=False, trackingList=None):
+        filepath.FilePath.__init__(self, path, alwaysCreate)
+        if trackingList is None:
+            trackingList = []
+        self.trackingList = trackingList
+        self.openedFiles = []
+
+
+    def open(self, *a, **k):
+        """
+        Override 'open' to track all files opened by this path.
+        """
+        f = filepath.FilePath.open(self, *a, **k)
+        self.openedFiles.append(f)
+        return f
+
+
+    def openedPaths(self):
+        """
+        Return a list of all L{TrackingFilePath}s associated with this
+        L{TrackingFilePath} that have had their C{open()} method called.
+        """
+        return [path for path in self.trackingList if path.openedFiles]
+
+
+    def clonePath(self, name):
+        """
+        Override L{filepath.FilePath.clonePath} to give the new path a reference
+        to the same tracking list.
+        """
+        clone = TrackingFilePath(name, trackingList=self.trackingList)
+        self.trackingList.append(clone)
+        return clone
+
+
+
+class ExplodingFilePath(filepath.FilePath):
+    """
+    A specialized L{FilePath} which always returns an instance of
+    L{ExplodingFile} from its C{open} method.
+
+    @ivar fp: The L{ExplodingFile} instance most recently returned from the
+        C{open} method.
+    """
+
+    def __init__(self, pathName, originalExploder=None):
+        """
+        Initialize an L{ExplodingFilePath} with a name and a reference to the
+
+        @param pathName: The path name as passed to L{filepath.FilePath}.
+        @type pathName: C{str}
+
+        @param originalExploder: The L{ExplodingFilePath} to associate opened
+        files with.
+        @type originalExploder: L{ExplodingFilePath}
+        """
+        filepath.FilePath.__init__(self, pathName)
+        if originalExploder is None:
+            originalExploder = self
+        self._originalExploder = originalExploder
+
+
+    def open(self, mode=None):
+        """
+        Create, save, and return a new C{ExplodingFile}.
+
+        @param mode: Present for signature compatibility.  Ignored.
+
+        @return: A new C{ExplodingFile}.
+        """
+        f = self._originalExploder.fp = ExplodingFile()
+        return f
+
+
+    def clonePath(self, name):
+        return ExplodingFilePath(name, self._originalExploder)
+
+
+
+class PermissionsTestCase(BytesTestCase):
+    """
+    Test Permissions and RWX classes
+    """
+
+    def assertNotUnequal(self, first, second, msg=None):
+        """
+        Tests that C{first} != C{second} is false.  This method tests the
+        __ne__ method, as opposed to L{assertEqual} (C{first} == C{second}),
+        which tests the __eq__ method.
+
+        Note: this should really be part of trial
+        """
+        if first != second:
+            if msg is None:
+                msg = '';
+            if len(msg) > 0:
+                msg += '\n'
+            raise self.failureException(
+                '%snot not unequal (__ne__ not implemented correctly):'
+                '\na = %s\nb = %s\n'
+                % (msg, pformat(first), pformat(second)))
+        return first
+
+
+    def test_rwxFromBools(self):
+        """
+        L{RWX}'s constructor takes a set of booleans
+        """
+        for r in (True, False):
+            for w in (True, False):
+                for x in (True, False):
+                    rwx = filepath.RWX(r, w, x)
+                    self.assertEqual(rwx.read, r)
+                    self.assertEqual(rwx.write, w)
+                    self.assertEqual(rwx.execute, x)
+        rwx = filepath.RWX(True, True, True)
+        self.assertTrue(rwx.read and rwx.write and rwx.execute)
+
+
+    def test_rwxEqNe(self):
+        """
+        L{RWX}'s created with the same booleans are equivalent.  If booleans
+        are different, they are not equal.
+        """
+        for r in (True, False):
+            for w in (True, False):
+                for x in (True, False):
+                    self.assertEqual(filepath.RWX(r, w, x),
+                                      filepath.RWX(r, w, x))
+                    self.assertNotUnequal(filepath.RWX(r, w, x),
+                                          filepath.RWX(r, w, x))
+        self.assertNotEqual(filepath.RWX(True, True, True),
+                            filepath.RWX(True, True, False))
+        self.assertNotEqual(3, filepath.RWX(True, True, True))
+
+
+    def test_rwxShorthand(self):
+        """
+        L{RWX}'s shorthand string should be 'rwx' if read, write, and execute
+        permission bits are true.  If any of those permissions bits are false,
+        the character is replaced by a '-'.
+        """
+
+        def getChar(val, letter):
+            if val:
+                return letter
+            return '-'
+
+        for r in (True, False):
+            for w in (True, False):
+                for x in (True, False):
+                    rwx = filepath.RWX(r, w, x)
+                    self.assertEqual(rwx.shorthand(),
+                                      getChar(r, 'r') +
+                                      getChar(w, 'w') +
+                                      getChar(x, 'x'))
+        self.assertEqual(filepath.RWX(True, False, True).shorthand(), "r-x")
+
+
+    def test_permissionsFromStat(self):
+        """
+        L{Permissions}'s constructor takes a valid permissions bitmask and
+        parsaes it to produce the correct set of boolean permissions.
+        """
+        def _rwxFromStat(statModeInt, who):
+            def getPermissionBit(what, who):
+                return (statModeInt &
+                        getattr(stat, "S_I%s%s" % (what, who))) > 0
+            return filepath.RWX(*[getPermissionBit(what, who) for what in
+                         ('R', 'W', 'X')])
+
+        for u in range(0, 8):
+            for g in range(0, 8):
+                for o in range(0, 8):
+                    chmodString = "%d%d%d" % (u, g, o)
+                    chmodVal = int(chmodString, 8)
+                    perm = filepath.Permissions(chmodVal)
+                    self.assertEqual(perm.user,
+                                      _rwxFromStat(chmodVal, "USR"),
+                                      "%s: got user: %s" %
+                                      (chmodString, perm.user))
+                    self.assertEqual(perm.group,
+                                      _rwxFromStat(chmodVal, "GRP"),
+                                      "%s: got group: %s" %
+                                      (chmodString, perm.group))
+                    self.assertEqual(perm.other,
+                                      _rwxFromStat(chmodVal, "OTH"),
+                                      "%s: got other: %s" %
+                                      (chmodString, perm.other))
+        perm = filepath.Permissions(0o777)
+        for who in ("user", "group", "other"):
+            for what in ("read", "write", "execute"):
+                self.assertTrue(getattr(getattr(perm, who), what))
+
+
+    def test_permissionsEq(self):
+        """
+        Two L{Permissions}'s that are created with the same bitmask
+        are equivalent
+        """
+        self.assertEqual(filepath.Permissions(0o777),
+                          filepath.Permissions(0o777))
+        self.assertNotUnequal(filepath.Permissions(0o777),
+                              filepath.Permissions(0o777))
+        self.assertNotEqual(filepath.Permissions(0o777),
+                            filepath.Permissions(0o700))
+        self.assertNotEqual(3, filepath.Permissions(0o777))
+
+
+    def test_permissionsShorthand(self):
+        """
+        L{Permissions}'s shorthand string is the RWX shorthand string for its
+        user permission bits, group permission bits, and other permission bits
+        concatenated together, without a space.
+        """
+        for u in range(0, 8):
+            for g in range(0, 8):
+                for o in range(0, 8):
+                    perm = filepath.Permissions(int("0o%d%d%d" % (u, g, o), 8))
+                    self.assertEqual(perm.shorthand(),
+                                      ''.join(x.shorthand() for x in (
+                                          perm.user, perm.group, perm.other)))
+        self.assertEqual(filepath.Permissions(0o770).shorthand(), "rwxrwx---")
+
+
+
+class FilePathTestCase(AbstractFilePathTestCase):
+    """
+    Test various L{FilePath} path manipulations.
+    """
+
+
+    def test_verifyObject(self):
+        """
+        FilePaths implement IFilePath.
+        """
+
+        self.assertTrue(verifyObject(filepath.IFilePath, self.path))
+
+
+    def test_chmod(self):
+        """
+        L{FilePath.chmod} modifies the permissions of
+        the passed file as expected (using C{os.stat} to check). We use some
+        basic modes that should work everywhere (even on Windows).
+        """
+        for mode in (0o555, 0o777):
+            self.path.child(b"sub1").chmod(mode)
+            self.assertEqual(
+                stat.S_IMODE(os.stat(self.path.child(b"sub1").path).st_mode),
+                mode)
+
+
+    def symlink(self, target, name):
+        """
+        Create a symbolic link named C{name} pointing at C{target}.
+
+        @type target: C{str}
+        @type name: C{str}
+        @raise SkipTest: raised if symbolic links are not supported on the
+            host platform.
+        """
+        if getattr(os, 'symlink', None) is None:
+            raise SkipTest(
+                "Platform does not support symbolic links.")
+        os.symlink(target, name)
+
+
+    def createLinks(self):
+        """
+        Create several symbolic links to files and directories.
+        """
+        subdir = self.path.child(b"sub1")
+        self.symlink(subdir.path, self._mkpath(b"sub1.link"))
+        self.symlink(subdir.child(b"file2").path, self._mkpath(b"file2.link"))
+        self.symlink(subdir.child(b"file2").path,
+                     self._mkpath(b"sub1", b"sub1.file2.link"))
+
+
+    def test_realpathSymlink(self):
+        """
+        L{FilePath.realpath} returns the path of the ultimate target of a
+        symlink.
+        """
+        self.createLinks()
+        self.symlink(self.path.child(b"file2.link").path,
+                     self.path.child(b"link.link").path)
+        self.assertEqual(self.path.child(b"link.link").realpath(),
+                          self.path.child(b"sub1").child(b"file2"))
+
+
+    def test_realpathCyclicalSymlink(self):
+        """
+        L{FilePath.realpath} raises L{filepath.LinkError} if the path is a
+        symbolic link which is part of a cycle.
+        """
+        self.symlink(self.path.child(b"link1").path, self.path.child(b"link2").path)
+        self.symlink(self.path.child(b"link2").path, self.path.child(b"link1").path)
+        self.assertRaises(filepath.LinkError,
+                          self.path.child(b"link2").realpath)
+
+
+    def test_realpathNoSymlink(self):
+        """
+        L{FilePath.realpath} returns the path itself if the path is not a
+        symbolic link.
+        """
+        self.assertEqual(self.path.child(b"sub1").realpath(),
+                          self.path.child(b"sub1"))
+
+
+    def test_walkCyclicalSymlink(self):
+        """
+        Verify that walking a path with a cyclical symlink raises an error
+        """
+        self.createLinks()
+        self.symlink(self.path.child(b"sub1").path,
+                     self.path.child(b"sub1").child(b"sub1.loopylink").path)
+        def iterateOverPath():
+            return [foo.path for foo in self.path.walk()]
+        self.assertRaises(filepath.LinkError, iterateOverPath)
+
+
+    def test_walkObeysDescendWithCyclicalSymlinks(self):
+        """
+        Verify that, after making a path with cyclical symlinks, when the
+        supplied C{descend} predicate returns C{False}, the target is not
+        traversed, as if it was a simple symlink.
+        """
+        self.createLinks()
+        # we create cyclical symlinks
+        self.symlink(self.path.child(b"sub1").path,
+                     self.path.child(b"sub1").child(b"sub1.loopylink").path)
+        def noSymLinks(path):
+            return not path.islink()
+        def iterateOverPath():
+            return [foo.path for foo in self.path.walk(descend=noSymLinks)]
+        self.assertTrue(iterateOverPath())
+
+
+    def test_walkObeysDescend(self):
+        """
+        Verify that when the supplied C{descend} predicate returns C{False},
+        the target is not traversed.
+        """
+        self.createLinks()
+        def noSymLinks(path):
+            return not path.islink()
+        x = [foo.path for foo in self.path.walk(descend=noSymLinks)]
+        self.assertEqual(set(x), set(self.all))
+
+
+    def test_getAndSet(self):
+        content = b'newcontent'
+        self.path.child(b'new').setContent(content)
+        newcontent = self.path.child(b'new').getContent()
+        self.assertEqual(content, newcontent)
+        content = b'content'
+        self.path.child(b'new').setContent(content, b'.tmp')
+        newcontent = self.path.child(b'new').getContent()
+        self.assertEqual(content, newcontent)
+
+
+    def test_getContentFileClosing(self):
+        """
+        If reading from the underlying file raises an exception,
+        L{FilePath.getContent} raises that exception after closing the file.
+        """
+        fp = ExplodingFilePath(b"")
+        self.assertRaises(IOError, fp.getContent)
+        self.assertTrue(fp.fp.closed)
+
+
+    def test_setContentFileClosing(self):
+        """
+        If writing to the underlying file raises an exception,
+        L{FilePath.setContent} raises that exception after closing the file.
+        """
+        fp = ExplodingFilePath(b"")
+        self.assertRaises(IOError, fp.setContent, b"blah")
+        self.assertTrue(fp.fp.closed)
+
+
+    def test_setContentNameCollision(self):
+        """
+        L{FilePath.setContent} will use a different temporary filename on each
+        invocation, so that multiple processes, threads, or reentrant
+        invocations will not collide with each other.
+        """
+        fp = TrackingFilePath(self.mktemp())
+        fp.setContent(b"alpha")
+        fp.setContent(b"beta")
+
+        # Sanity check: setContent should only open one derivative path each
+        # time to store the temporary file.
+        openedSiblings = fp.openedPaths()
+        self.assertEqual(len(openedSiblings), 2)
+        self.assertNotEqual(openedSiblings[0], openedSiblings[1])
+
+
+    def test_setContentExtension(self):
+        """
+        L{FilePath.setContent} creates temporary files with a user-supplied
+        extension, so that if it is somehow interrupted while writing them, the
+        file that it leaves behind will be identifiable.
+        """
+        fp = TrackingFilePath(self.mktemp())
+        fp.setContent(b"hello")
+        opened = fp.openedPaths()
+        self.assertEqual(len(opened), 1)
+        self.assertTrue(opened[0].basename().endswith(b".new"),
+                        "%s does not end with default '.new' extension" % (
+                            opened[0].basename()))
+        fp.setContent(b"goodbye", b"-something-else")
+        opened = fp.openedPaths()
+        self.assertEqual(len(opened), 2)
+        self.assertTrue(opened[1].basename().endswith(b"-something-else"),
+                        "%s does not end with -something-else extension" % (
+                            opened[1].basename()))
+
+
+    def test_symbolicLink(self):
+        """
+        Verify the behavior of the C{isLink} method against links and
+        non-links. Also check that the symbolic link shares the directory
+        property with its target.
+        """
+        s4 = self.path.child(b"sub4")
+        s3 = self.path.child(b"sub3")
+        self.symlink(s3.path, s4.path)
+        self.assertTrue(s4.islink())
+        self.assertFalse(s3.islink())
+        self.assertTrue(s4.isdir())
+        self.assertTrue(s3.isdir())
+
+
+    def test_linkTo(self):
+        """
+        Verify that symlink creates a valid symlink that is both a link and a
+        file if its target is a file, or a directory if its target is a
+        directory.
+        """
+        targetLinks = [
+            (self.path.child(b"sub2"), self.path.child(b"sub2.link")),
+            (self.path.child(b"sub2").child(b"file3.ext1"),
+             self.path.child(b"file3.ext1.link"))
+            ]
+        for target, link in targetLinks:
+            target.linkTo(link)
+            self.assertTrue(link.islink(), "This is a link")
+            self.assertEqual(target.isdir(), link.isdir())
+            self.assertEqual(target.isfile(), link.isfile())
+
+
+    def test_linkToErrors(self):
+        """
+        Verify C{linkTo} fails in the following case:
+            - the target is in a directory that doesn't exist
+            - the target already exists
+        """
+        self.assertRaises(OSError, self.path.child(b"file1").linkTo,
+                          self.path.child(b'nosub').child(b'file1'))
+        self.assertRaises(OSError, self.path.child(b"file1").linkTo,
+                          self.path.child(b'sub1').child(b'file2'))
+
+
+    if not getattr(os, "symlink", None):
+        skipMsg = "Your platform does not support symbolic links."
+        test_symbolicLink.skip = skipMsg
+        test_linkTo.skip = skipMsg
+        test_linkToErrors.skip = skipMsg
+
+
+    def testMultiExt(self):
+        f3 = self.path.child(b'sub3').child(b'file3')
+        exts = b'.foo', b'.bar', b'ext1', b'ext2', b'ext3'
+        self.failIf(f3.siblingExtensionSearch(*exts))
+        f3e = f3.siblingExtension(b".foo")
+        f3e.touch()
+        self.failIf(not f3.siblingExtensionSearch(*exts).exists())
+        self.failIf(not f3.siblingExtensionSearch(b'*').exists())
+        f3e.remove()
+        self.failIf(f3.siblingExtensionSearch(*exts))
+
+    def testPreauthChild(self):
+        fp = filepath.FilePath(b'.')
+        fp.preauthChild(b'foo/bar')
+        self.assertRaises(filepath.InsecurePath, fp.child, b'/foo')
+
+    def testStatCache(self):
+        p = self.path.child(b'stattest')
+        p.touch()
+        self.assertEqual(p.getsize(), 0)
+        self.assertEqual(abs(p.getmtime() - time.time()) // 20, 0)
+        self.assertEqual(abs(p.getctime() - time.time()) // 20, 0)
+        self.assertEqual(abs(p.getatime() - time.time()) // 20, 0)
+        self.assertEqual(p.exists(), True)
+        self.assertEqual(p.exists(), True)
+        # OOB removal: FilePath.remove() will automatically restat
+        os.remove(p.path)
+        # test caching
+        self.assertEqual(p.exists(), True)
+        p.restat(reraise=False)
+        self.assertEqual(p.exists(), False)
+        self.assertEqual(p.islink(), False)
+        self.assertEqual(p.isdir(), False)
+        self.assertEqual(p.isfile(), False)
+
+    def testPersist(self):
+        newpath = pickle.loads(pickle.dumps(self.path))
+        self.assertEqual(self.path.__class__, newpath.__class__)
+        self.assertEqual(self.path.path, newpath.path)
+
+    def testInsecureUNIX(self):
+        self.assertRaises(filepath.InsecurePath, self.path.child, b"..")
+        self.assertRaises(filepath.InsecurePath, self.path.child, b"/etc")
+        self.assertRaises(filepath.InsecurePath, self.path.child, b"../..")
+
+    def testInsecureWin32(self):
+        self.assertRaises(filepath.InsecurePath, self.path.child, b"..\\..")
+        self.assertRaises(filepath.InsecurePath, self.path.child, b"C:randomfile")
+
+    if platform.getType() != 'win32':
+        testInsecureWin32.skip = "Test will run only on Windows."
+
+
+    def testInsecureWin32Whacky(self):
+        """
+        Windows has 'special' filenames like NUL and CON and COM1 and LPR
+        and PRN and ... god knows what else.  They can be located anywhere in
+        the filesystem.  For obvious reasons, we do not wish to normally permit
+        access to these.
+        """
+        self.assertRaises(filepath.InsecurePath, self.path.child, b"CON")
+        self.assertRaises(filepath.InsecurePath, self.path.child, b"C:CON")
+        self.assertRaises(filepath.InsecurePath, self.path.child, r"C:\CON")
+
+    if platform.getType() != 'win32':
+        testInsecureWin32Whacky.skip = "Test will run only on Windows."
+
+
+    def testComparison(self):
+        self.assertEqual(filepath.FilePath(b'a'),
+                          filepath.FilePath(b'a'))
+        self.failUnless(filepath.FilePath(b'z') >
+                        filepath.FilePath(b'a'))
+        self.failUnless(filepath.FilePath(b'z') >=
+                        filepath.FilePath(b'a'))
+        self.failUnless(filepath.FilePath(b'a') >=
+                        filepath.FilePath(b'a'))
+        self.failUnless(filepath.FilePath(b'a') <=
+                        filepath.FilePath(b'a'))
+        self.failUnless(filepath.FilePath(b'a') <
+                        filepath.FilePath(b'z'))
+        self.failUnless(filepath.FilePath(b'a') <=
+                        filepath.FilePath(b'z'))
+        self.failUnless(filepath.FilePath(b'a') !=
+                        filepath.FilePath(b'z'))
+        self.failUnless(filepath.FilePath(b'z') !=
+                        filepath.FilePath(b'a'))
+
+        self.failIf(filepath.FilePath(b'z') !=
+                    filepath.FilePath(b'z'))
+
+
+    def test_descendantOnly(self):
+        """
+        If C{".."} is in the sequence passed to L{FilePath.descendant},
+        L{InsecurePath} is raised.
+        """
+        self.assertRaises(
+            filepath.InsecurePath, self.path.descendant, [b'a', b'..'])
+
+
+    def testSibling(self):
+        p = self.path.child(b'sibling_start')
+        ts = p.sibling(b'sibling_test')
+        self.assertEqual(ts.dirname(), p.dirname())
+        self.assertEqual(ts.basename(), b'sibling_test')
+        ts.createDirectory()
+        self.assertIn(ts, self.path.children())
+
+    def testTemporarySibling(self):
+        ts = self.path.temporarySibling()
+        self.assertEqual(ts.dirname(), self.path.dirname())
+        self.assertNotIn(ts.basename(), self.path.listdir())
+        ts.createDirectory()
+        self.assertIn(ts, self.path.parent().children())
+
+
+    def test_temporarySiblingExtension(self):
+        """
+        If L{FilePath.temporarySibling} is given an extension argument, it will
+        produce path objects with that extension appended to their names.
+        """
+        testExtension = b".test-extension"
+        ts = self.path.temporarySibling(testExtension)
+        self.assertTrue(ts.basename().endswith(testExtension),
+                        "%s does not end with %s" % (
+                            ts.basename(), testExtension))
+
+
+    def test_removeDirectory(self):
+        """
+        L{FilePath.remove} on a L{FilePath} that refers to a directory will
+        recursively delete its contents.
+        """
+        self.path.remove()
+        self.failIf(self.path.exists())
+
+
+    def test_removeWithSymlink(self):
+        """
+        For a path which is a symbolic link, L{FilePath.remove} just deletes
+        the link, not the target.
+        """
+        link = self.path.child(b"sub1.link")
+        # setUp creates the sub1 child
+        self.symlink(self.path.child(b"sub1").path, link.path)
+        link.remove()
+        self.assertFalse(link.exists())
+        self.assertTrue(self.path.child(b"sub1").exists())
+
+
+    def test_copyToDirectory(self):
+        """
+        L{FilePath.copyTo} makes a copy of all the contents of the directory
+        named by that L{FilePath} if it is able to do so.
+        """
+        oldPaths = list(self.path.walk()) # Record initial state
+        fp = filepath.FilePath(self.mktemp())
+        self.path.copyTo(fp)
+        self.path.remove()
+        fp.copyTo(self.path)
+        newPaths = list(self.path.walk()) # Record double-copy state
+        newPaths.sort()
+        oldPaths.sort()
+        self.assertEqual(newPaths, oldPaths)
+
+
+    def test_copyToMissingDestFileClosing(self):
+        """
+        If an exception is raised while L{FilePath.copyTo} is trying to open
+        source file to read from, the destination file is closed and the
+        exception is raised to the caller of L{FilePath.copyTo}.
+        """
+        nosuch = self.path.child(b"nothere")
+        # Make it look like something to copy, even though it doesn't exist.
+        # This could happen if the file is deleted between the isfile check and
+        # the file actually being opened.
+        nosuch.isfile = lambda: True
+
+        # We won't get as far as writing to this file, but it's still useful for
+        # tracking whether we closed it.
+        destination = ExplodingFilePath(self.mktemp())
+
+        self.assertRaises(IOError, nosuch.copyTo, destination)
+        self.assertTrue(destination.fp.closed)
+
+
+    def test_copyToFileClosing(self):
+        """
+        If an exception is raised while L{FilePath.copyTo} is copying bytes
+        between two regular files, the source and destination files are closed
+        and the exception propagates to the caller of L{FilePath.copyTo}.
+        """
+        destination = ExplodingFilePath(self.mktemp())
+        source = ExplodingFilePath(__file__)
+        self.assertRaises(IOError, source.copyTo, destination)
+        self.assertTrue(source.fp.closed)
+        self.assertTrue(destination.fp.closed)
+
+
+    def test_copyToDirectoryItself(self):
+        """
+        L{FilePath.copyTo} fails with an OSError or IOError (depending on
+        platform, as it propagates errors from open() and write()) when
+        attempting to copy a directory to a child of itself.
+        """
+        self.assertRaises((OSError, IOError),
+                          self.path.copyTo, self.path.child(b'file1'))
+
+
+    def test_copyToWithSymlink(self):
+        """
+        Verify that copying with followLinks=True copies symlink targets
+        instead of symlinks
+        """
+        self.symlink(self.path.child(b"sub1").path,
+                     self.path.child(b"link1").path)
+        fp = filepath.FilePath(self.mktemp())
+        self.path.copyTo(fp)
+        self.assertFalse(fp.child(b"link1").islink())
+        self.assertEqual([x.basename() for x in fp.child(b"sub1").children()],
+                          [x.basename() for x in fp.child(b"link1").children()])
+
+
+    def test_copyToWithoutSymlink(self):
+        """
+        Verify that copying with followLinks=False copies symlinks as symlinks
+        """
+        self.symlink(b"sub1", self.path.child(b"link1").path)
+        fp = filepath.FilePath(self.mktemp())
+        self.path.copyTo(fp, followLinks=False)
+        self.assertTrue(fp.child(b"link1").islink())
+        self.assertEqual(os.readlink(self.path.child(b"link1").path),
+                          os.readlink(fp.child(b"link1").path))
+
+
+    def test_copyToMissingSource(self):
+        """
+        If the source path is missing, L{FilePath.copyTo} raises L{OSError}.
+        """
+        path = filepath.FilePath(self.mktemp())
+        exc = self.assertRaises(OSError, path.copyTo, b'some other path')
+        self.assertEqual(exc.errno, errno.ENOENT)
+
+
+    def test_moveTo(self):
+        """
+        Verify that moving an entire directory results into another directory
+        with the same content.
+        """
+        oldPaths = list(self.path.walk()) # Record initial state
+        fp = filepath.FilePath(self.mktemp())
+        self.path.moveTo(fp)
+        fp.moveTo(self.path)
+        newPaths = list(self.path.walk()) # Record double-move state
+        newPaths.sort()
+        oldPaths.sort()
+        self.assertEqual(newPaths, oldPaths)
+
+
+    def test_moveToExistsCache(self):
+        """
+        A L{FilePath} that has been moved aside with L{FilePath.moveTo} no
+        longer registers as existing.  Its previously non-existent target
+        exists, though, as it was created by the call to C{moveTo}.
+        """
+        fp = filepath.FilePath(self.mktemp())
+        fp2 = filepath.FilePath(self.mktemp())
+        fp.touch()
+
+        # Both a sanity check (make sure the file status looks right) and an
+        # enticement for stat-caching logic to kick in and remember that these
+        # exist / don't exist.
+        self.assertEqual(fp.exists(), True)
+        self.assertEqual(fp2.exists(), False)
+
+        fp.moveTo(fp2)
+        self.assertEqual(fp.exists(), False)
+        self.assertEqual(fp2.exists(), True)
+
+
+    def test_moveToExistsCacheCrossMount(self):
+        """
+        The assertion of test_moveToExistsCache should hold in the case of a
+        cross-mount move.
+        """
+        self.setUpFaultyRename()
+        self.test_moveToExistsCache()
+
+
+    def test_moveToSizeCache(self, hook=lambda : None):
+        """
+        L{FilePath.moveTo} clears its destination's status cache, such that
+        calls to L{FilePath.getsize} after the call to C{moveTo} will report the
+        new size, not the old one.
+
+        This is a separate test from C{test_moveToExistsCache} because it is
+        intended to cover the fact that the destination's cache is dropped;
+        test_moveToExistsCache doesn't cover this case because (currently) a
+        file that doesn't exist yet does not cache the fact of its non-
+        existence.
+        """
+        fp = filepath.FilePath(self.mktemp())
+        fp2 = filepath.FilePath(self.mktemp())
+        fp.setContent(b"1234")
+        fp2.setContent(b"1234567890")
+        hook()
+
+        # Sanity check / kick off caching.
+        self.assertEqual(fp.getsize(), 4)
+        self.assertEqual(fp2.getsize(), 10)
+        # Actually attempting to replace a file on Windows would fail with
+        # ERROR_ALREADY_EXISTS, but we don't need to test that, just the cached
+        # metadata, so, delete the file ...
+        os.remove(fp2.path)
+        # ... but don't clear the status cache, as fp2.remove() would.
+        self.assertEqual(fp2.getsize(), 10)
+
+        fp.moveTo(fp2)
+        self.assertEqual(fp2.getsize(), 4)
+
+
+    def test_moveToSizeCacheCrossMount(self):
+        """
+        The assertion of test_moveToSizeCache should hold in the case of a
+        cross-mount move.
+        """
+        self.test_moveToSizeCache(hook=self.setUpFaultyRename)
+
+
+    def test_moveToError(self):
+        """
+        Verify error behavior of moveTo: it should raises one of OSError or
+        IOError if you want to move a path into one of its child. It's simply
+        the error raised by the underlying rename system call.
+        """
+        self.assertRaises((OSError, IOError), self.path.moveTo, self.path.child(b'file1'))
+
+
+    def setUpFaultyRename(self):
+        """
+        Set up a C{os.rename} that will fail with L{errno.EXDEV} on first call.
+        This is used to simulate a cross-device rename failure.
+
+        @return: a list of pair (src, dest) of calls to C{os.rename}
+        @rtype: C{list} of C{tuple}
+        """
+        invokedWith = []
+        def faultyRename(src, dest):
+            invokedWith.append((src, dest))
+            if len(invokedWith) == 1:
+                raise OSError(errno.EXDEV, 'Test-induced failure simulating '
+                                           'cross-device rename failure')
+            return originalRename(src, dest)
+
+        originalRename = os.rename
+        self.patch(os, "rename", faultyRename)
+        return invokedWith
+
+
+    def test_crossMountMoveTo(self):
+        """
+        C{moveTo} should be able to handle C{EXDEV} error raised by
+        C{os.rename} when trying to move a file on a different mounted
+        filesystem.
+        """
+        invokedWith = self.setUpFaultyRename()
+        # Bit of a whitebox test - force os.rename, which moveTo tries
+        # before falling back to a slower method, to fail, forcing moveTo to
+        # use the slower behavior.
+        self.test_moveTo()
+        # A bit of a sanity check for this whitebox test - if our rename
+        # was never invoked, the test has probably fallen into disrepair!
+        self.assertTrue(invokedWith)
+
+
+    def test_crossMountMoveToWithSymlink(self):
+        """
+        By default, when moving a symlink, it should follow the link and
+        actually copy the content of the linked node.
+        """
+        invokedWith = self.setUpFaultyRename()
+        f2 = self.path.child(b'file2')
+        f3 = self.path.child(b'file3')
+        self.symlink(self.path.child(b'file1').path, f2.path)
+        f2.moveTo(f3)
+        self.assertFalse(f3.islink())
+        self.assertEqual(f3.getContent(), b'file 1')
+        self.assertTrue(invokedWith)
+
+
+    def test_crossMountMoveToWithoutSymlink(self):
+        """
+        Verify that moveTo called with followLinks=False actually create
+        another symlink.
+        """
+        invokedWith = self.setUpFaultyRename()
+        f2 = self.path.child(b'file2')
+        f3 = self.path.child(b'file3')
+        self.symlink(self.path.child(b'file1').path, f2.path)
+        f2.moveTo(f3, followLinks=False)
+        self.assertTrue(f3.islink())
+        self.assertEqual(f3.getContent(), b'file 1')
+        self.assertTrue(invokedWith)
+
+
+    def test_createBinaryMode(self):
+        """
+        L{FilePath.create} should always open (and write to) files in binary
+        mode; line-feed octets should be unmodified.
+
+        (While this test should pass on all platforms, it is only really
+        interesting on platforms which have the concept of binary mode, i.e.
+        Windows platforms.)
+        """
+        path = filepath.FilePath(self.mktemp())
+        f = path.create()
+        self.failUnless("b" in f.mode)
+        f.write(b"\n")
+        f.close()
+        read = open(path.path, "rb").read()
+        self.assertEqual(read, b"\n")
+
+
+    def testOpen(self):
+        # Opening a file for reading when it does not already exist is an error
+        nonexistent = self.path.child(b'nonexistent')
+        e = self.assertRaises(IOError, nonexistent.open)
+        self.assertEqual(e.errno, errno.ENOENT)
+
+        # Opening a file for writing when it does not exist is okay
+        writer = self.path.child(b'writer')
+        f = writer.open('w')
+        f.write(b'abc\ndef')
+        f.close()
+
+        # Make sure those bytes ended up there - and test opening a file for
+        # reading when it does exist at the same time
+        f = writer.open()
+        self.assertEqual(f.read(), b'abc\ndef')
+        f.close()
+
+        # Re-opening that file in write mode should erase whatever was there.
+        f = writer.open('w')
+        f.close()
+        f = writer.open()
+        self.assertEqual(f.read(), b'')
+        f.close()
+
+        # Put some bytes in a file so we can test that appending does not
+        # destroy them.
+        appender = self.path.child(b'appender')
+        f = appender.open('w')
+        f.write(b'abc')
+        f.close()
+
+        f = appender.open('a')
+        f.write(b'def')
+        f.close()
+
+        f = appender.open('r')
+        self.assertEqual(f.read(), b'abcdef')
+        f.close()
+
+        # read/write should let us do both without erasing those bytes
+        f = appender.open('r+')
+        self.assertEqual(f.read(), b'abcdef')
+        # ANSI C *requires* an fseek or an fgetpos between an fread and an
+        # fwrite or an fwrite and a fread.  We can't reliable get Python to
+        # invoke fgetpos, so we seek to a 0 byte offset from the current
+        # position instead.  Also, Python sucks for making this seek
+        # relative to 1 instead of a symbolic constant representing the
+        # current file position.
+        f.seek(0, 1)
+        # Put in some new bytes for us to test for later.
+        f.write(b'ghi')
+        f.close()
+
+        # Make sure those new bytes really showed up
+        f = appender.open('r')
+        self.assertEqual(f.read(), b'abcdefghi')
+        f.close()
+
+        # write/read should let us do both, but erase anything that's there
+        # already.
+        f = appender.open('w+')
+        self.assertEqual(f.read(), b'')
+        f.seek(0, 1) # Don't forget this!
+        f.write(b'123')
+        f.close()
+
+        # super append mode should let us read and write and also position the
+        # cursor at the end of the file, without erasing everything.
+        f = appender.open('a+')
+
+        # The order of these lines may seem surprising, but it is necessary.
+        # The cursor is not at the end of the file until after the first write.
+        f.write(b'456')
+        f.seek(0, 1) # Asinine.
+        self.assertEqual(f.read(), b'')
+
+        f.seek(0, 0)
+        self.assertEqual(f.read(), b'123456')
+        f.close()
+
+        # Opening a file exclusively must fail if that file exists already.
+        nonexistent.requireCreate(True)
+        nonexistent.open('w').close()
+        existent = nonexistent
+        del nonexistent
+        self.assertRaises((OSError, IOError), existent.open)
+
+
+    def test_openWithExplicitBinaryMode(self):
+        """
+        Due to a bug in Python 2.7 on Windows including multiple 'b'
+        characters in the mode passed to the built-in open() will cause an
+        error.  FilePath.open() ensures that only a single 'b' character is
+        included in the mode passed to the built-in open().
+
+        See http://bugs.python.org/issue7686 for details about the bug.
+        """
+        writer = self.path.child(b'explicit-binary')
+        file = writer.open('wb')
+        file.write(b'abc\ndef')
+        file.close()
+        self.assertTrue(writer.exists)
+
+
+    def test_openWithRedundantExplicitBinaryModes(self):
+        """
+        Due to a bug in Python 2.7 on Windows including multiple 'b'
+        characters in the mode passed to the built-in open() will cause an
+        error.  No matter how many 'b' modes are specified, FilePath.open()
+        ensures that only a single 'b' character is included in the mode
+        passed to the built-in open().
+
+        See http://bugs.python.org/issue7686 for details about the bug.
+        """
+        writer = self.path.child(b'multiple-binary')
+        file = writer.open('wbb')
+        file.write(b'abc\ndef')
+        file.close()
+        self.assertTrue(writer.exists)
+
+
+    def test_existsCache(self):
+        """
+        Check that C{filepath.FilePath.exists} correctly restat the object if
+        an operation has occurred in the mean time.
+        """
+        fp = filepath.FilePath(self.mktemp())
+        self.assertEqual(fp.exists(), False)
+
+        fp.makedirs()
+        self.assertEqual(fp.exists(), True)
+
+
+    def test_changed(self):
+        """
+        L{FilePath.changed} indicates that the L{FilePath} has changed, but does
+        not re-read the status information from the filesystem until it is
+        queried again via another method, such as C{getsize}.
+        """
+        fp = filepath.FilePath(self.mktemp())
+        fp.setContent(b"12345")
+        self.assertEqual(fp.getsize(), 5)
+
+        # Someone else comes along and changes the file.
+        fObj = open(fp.path, 'wb')
+        fObj.write(b"12345678")
+        fObj.close()
+
+        # Sanity check for caching: size should still be 5.
+        self.assertEqual(fp.getsize(), 5)
+        fp.changed()
+
+        # This path should look like we don't know what status it's in, not that
+        # we know that it didn't exist when last we checked.
+        self.assertEqual(fp.statinfo, None)
+        self.assertEqual(fp.getsize(), 8)
+
+
+    def test_getPermissions_POSIX(self):
+        """
+        Getting permissions for a file returns a L{Permissions} object for
+        POSIX platforms (which supports separate user, group, and other
+        permissions bits.
+        """
+        for mode in (0o777, 0o700):
+            self.path.child(b"sub1").chmod(mode)
+            self.assertEqual(self.path.child(b"sub1").getPermissions(),
+                              filepath.Permissions(mode))
+        self.path.child(b"sub1").chmod(0o764) #sanity check
+        self.assertEqual(self.path.child(b"sub1").getPermissions().shorthand(),
+                          "rwxrw-r--")
+
+
+    def test_getPermissions_Windows(self):
+        """
+        Getting permissions for a file returns a L{Permissions} object in
+        Windows.  Windows requires a different test, because user permissions
+        = group permissions = other permissions.  Also, chmod may not be able
+        to set the execute bit, so we are skipping tests that set the execute
+        bit.
+        """
+        for mode in (0o777, 0o555):
+            self.path.child(b"sub1").chmod(mode)
+            self.assertEqual(self.path.child(b"sub1").getPermissions(),
+                              filepath.Permissions(mode))
+        self.path.child(b"sub1").chmod(0o511) #sanity check to make sure that
+        # user=group=other permissions
+        self.assertEqual(self.path.child(b"sub1").getPermissions().shorthand(),
+                          "r-xr-xr-x")
+
+
+    def test_whetherBlockOrSocket(self):
+        """
+        Ensure that a file is not a block or socket
+        """
+        self.assertFalse(self.path.isBlockDevice())
+        self.assertFalse(self.path.isSocket())
+
+
+    def test_statinfoBitsNotImplementedInWindows(self):
+        """
+        Verify that certain file stats are not available on Windows
+        """
+        self.assertRaises(NotImplementedError, self.path.getInodeNumber)
+        self.assertRaises(NotImplementedError, self.path.getDevice)
+        self.assertRaises(NotImplementedError, self.path.getNumberOfHardLinks)
+        self.assertRaises(NotImplementedError, self.path.getUserID)
+        self.assertRaises(NotImplementedError, self.path.getGroupID)
+
+
+    def test_statinfoBitsAreNumbers(self):
+        """
+        Verify that file inode/device/nlinks/uid/gid stats are numbers in
+        a POSIX environment
+        """
+        if _PY3:
+            numbers = int
+        else:
+            numbers = (int, long)
+        c = self.path.child(b'file1')
+        for p in self.path, c:
+            self.assertIsInstance(p.getInodeNumber(), numbers)
+            self.assertIsInstance(p.getDevice(), numbers)
+            self.assertIsInstance(p.getNumberOfHardLinks(), numbers)
+            self.assertIsInstance(p.getUserID(), numbers)
+            self.assertIsInstance(p.getGroupID(), numbers)
+        self.assertEqual(self.path.getUserID(), c.getUserID())
+        self.assertEqual(self.path.getGroupID(), c.getGroupID())
+
+
+    def test_statinfoNumbersAreValid(self):
+        """
+        Verify that the right numbers come back from the right accessor methods
+        for file inode/device/nlinks/uid/gid (in a POSIX environment)
+        """
+        # specify fake statinfo information
+        class FakeStat:
+            st_ino = 200
+            st_dev = 300
+            st_nlink = 400
+            st_uid = 500
+            st_gid = 600
+
+        # monkey patch in a fake restat method for self.path
+        fake = FakeStat()
+        def fakeRestat(*args, **kwargs):
+            self.path.statinfo = fake
+        self.path.restat = fakeRestat
+
+        # ensure that restat will need to be called to get values
+        self.path.statinfo = None
+
+        self.assertEqual(self.path.getInodeNumber(), fake.st_ino)
+        self.assertEqual(self.path.getDevice(), fake.st_dev)
+        self.assertEqual(self.path.getNumberOfHardLinks(), fake.st_nlink)
+        self.assertEqual(self.path.getUserID(), fake.st_uid)
+        self.assertEqual(self.path.getGroupID(), fake.st_gid)
+
+
+    if platform.isWindows():
+        test_statinfoBitsAreNumbers.skip = True
+        test_statinfoNumbersAreValid.skip = True
+        test_getPermissions_POSIX.skip = True
+    else:
+        test_statinfoBitsNotImplementedInWindows.skip = "Test will run only on Windows."
+        test_getPermissions_Windows.skip = "Test will run only on Windows."
+
diff --git a/ThirdParty/Twisted/twisted/test/test_pb.py b/ThirdParty/Twisted/twisted/test/test_pb.py
new file mode 100644
index 0000000..4616708
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_pb.py
@@ -0,0 +1,1846 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for Perspective Broker module.
+
+TODO: update protocol level tests to use new connection API, leaving
+only specific tests for old API.
+"""
+
+# issue1195 TODOs: replace pump.pump() with something involving Deferreds.
+# Clean up warning suppression.
+
+import sys, os, time, gc, weakref
+
+from cStringIO import StringIO
+from zope.interface import implements, Interface
+
+from twisted.trial import unittest
+from twisted.spread import pb, util, publish, jelly
+from twisted.internet import protocol, main, reactor
+from twisted.internet.error import ConnectionRefusedError
+from twisted.internet.defer import Deferred, gatherResults, succeed
+from twisted.protocols.policies import WrappingFactory
+from twisted.python import failure, log
+from twisted.cred.error import UnauthorizedLogin, UnhandledCredentials
+from twisted.cred import portal, checkers, credentials
+
+
+class Dummy(pb.Viewable):
+    def view_doNothing(self, user):
+        if isinstance(user, DummyPerspective):
+            return 'hello world!'
+        else:
+            return 'goodbye, cruel world!'
+
+
+class DummyPerspective(pb.Avatar):
+    """
+    An L{IPerspective} avatar which will be used in some tests.
+    """
+    def perspective_getDummyViewPoint(self):
+        return Dummy()
+
+
+
+class DummyRealm(object):
+    implements(portal.IRealm)
+
+    def requestAvatar(self, avatarId, mind, *interfaces):
+        for iface in interfaces:
+            if iface is pb.IPerspective:
+                return iface, DummyPerspective(avatarId), lambda: None
+
+
+class IOPump:
+    """
+    Utility to pump data between clients and servers for protocol testing.
+
+    Perhaps this is a utility worthy of being in protocol.py?
+    """
+    def __init__(self, client, server, clientIO, serverIO):
+        self.client = client
+        self.server = server
+        self.clientIO = clientIO
+        self.serverIO = serverIO
+
+
+    def flush(self):
+        """
+        Pump until there is no more input or output or until L{stop} is called.
+        This does not run any timers, so don't use it with any code that calls
+        reactor.callLater.
+        """
+        # failsafe timeout
+        self._stop = False
+        timeout = time.time() + 5
+        while not self._stop and self.pump():
+            if time.time() > timeout:
+                return
+
+
+    def stop(self):
+        """
+        Stop a running L{flush} operation, even if data remains to be
+        transferred.
+        """
+        self._stop = True
+
+
+    def pump(self):
+        """
+        Move data back and forth.
+
+        Returns whether any data was moved.
+        """
+        self.clientIO.seek(0)
+        self.serverIO.seek(0)
+        cData = self.clientIO.read()
+        sData = self.serverIO.read()
+        self.clientIO.seek(0)
+        self.serverIO.seek(0)
+        self.clientIO.truncate()
+        self.serverIO.truncate()
+        self.client.transport._checkProducer()
+        self.server.transport._checkProducer()
+        for byte in cData:
+            self.server.dataReceived(byte)
+        for byte in sData:
+            self.client.dataReceived(byte)
+        if cData or sData:
+            return 1
+        else:
+            return 0
+
+
+
+def connectedServerAndClient(realm=None):
+    """
+    Connect a client and server L{Broker} together with an L{IOPump}
+
+    @param realm: realm to use, defaulting to a L{DummyRealm}
+
+    @returns: a 3-tuple (client, server, pump).
+    """
+    realm = realm or DummyRealm()
+    clientBroker = pb.Broker()
+    checker = checkers.InMemoryUsernamePasswordDatabaseDontUse(guest='guest')
+    factory = pb.PBServerFactory(portal.Portal(realm, [checker]))
+    serverBroker = factory.buildProtocol(('127.0.0.1',))
+
+    clientTransport = StringIO()
+    serverTransport = StringIO()
+    clientBroker.makeConnection(protocol.FileWrapper(clientTransport))
+    serverBroker.makeConnection(protocol.FileWrapper(serverTransport))
+    pump = IOPump(clientBroker, serverBroker, clientTransport, serverTransport)
+    # Challenge-response authentication:
+    pump.flush()
+    return clientBroker, serverBroker, pump
+
+
+class SimpleRemote(pb.Referenceable):
+    def remote_thunk(self, arg):
+        self.arg = arg
+        return arg + 1
+
+    def remote_knuth(self, arg):
+        raise Exception()
+
+
+class NestedRemote(pb.Referenceable):
+    def remote_getSimple(self):
+        return SimpleRemote()
+
+
+class SimpleCopy(pb.Copyable):
+    def __init__(self):
+        self.x = 1
+        self.y = {"Hello":"World"}
+        self.z = ['test']
+
+
+class SimpleLocalCopy(pb.RemoteCopy):
+    pass
+
+pb.setUnjellyableForClass(SimpleCopy, SimpleLocalCopy)
+
+
+class SimpleFactoryCopy(pb.Copyable):
+    """
+    @cvar allIDs: hold every created instances of this class.
+    @type allIDs: C{dict}
+    """
+    allIDs = {}
+    def __init__(self, id):
+        self.id = id
+        SimpleFactoryCopy.allIDs[id] = self
+
+
+def createFactoryCopy(state):
+    """
+    Factory of L{SimpleFactoryCopy}, getting a created instance given the
+    C{id} found in C{state}.
+    """
+    stateId = state.get("id", None)
+    if stateId is None:
+        raise RuntimeError("factory copy state has no 'id' member %s" %
+                           (repr(state),))
+    if not stateId in SimpleFactoryCopy.allIDs:
+        raise RuntimeError("factory class has no ID: %s" %
+                           (SimpleFactoryCopy.allIDs,))
+    inst = SimpleFactoryCopy.allIDs[stateId]
+    if not inst:
+        raise RuntimeError("factory method found no object with id")
+    return inst
+
+pb.setUnjellyableFactoryForClass(SimpleFactoryCopy, createFactoryCopy)
+
+
+class NestedCopy(pb.Referenceable):
+    def remote_getCopy(self):
+        return SimpleCopy()
+
+    def remote_getFactory(self, value):
+        return SimpleFactoryCopy(value)
+
+
+
+class SimpleCache(pb.Cacheable):
+    def __init___(self):
+        self.x = 1
+        self.y = {"Hello":"World"}
+        self.z = ['test']
+
+
+class NestedComplicatedCache(pb.Referenceable):
+    def __init__(self):
+        self.c = VeryVeryComplicatedCacheable()
+
+    def remote_getCache(self):
+        return self.c
+
+
+class VeryVeryComplicatedCacheable(pb.Cacheable):
+    def __init__(self):
+        self.x = 1
+        self.y = 2
+        self.foo = 3
+
+    def setFoo4(self):
+        self.foo = 4
+        self.observer.callRemote('foo',4)
+
+    def getStateToCacheAndObserveFor(self, perspective, observer):
+        self.observer = observer
+        return {"x": self.x,
+                "y": self.y,
+                "foo": self.foo}
+
+    def stoppedObserving(self, perspective, observer):
+        log.msg("stopped observing")
+        observer.callRemote("end")
+        if observer == self.observer:
+            self.observer = None
+
+
+class RatherBaroqueCache(pb.RemoteCache):
+    def observe_foo(self, newFoo):
+        self.foo = newFoo
+
+    def observe_end(self):
+        log.msg("the end of things")
+
+pb.setUnjellyableForClass(VeryVeryComplicatedCacheable, RatherBaroqueCache)
+
+
+class SimpleLocalCache(pb.RemoteCache):
+    def setCopyableState(self, state):
+        self.__dict__.update(state)
+
+    def checkMethod(self):
+        return self.check
+
+    def checkSelf(self):
+        return self
+
+    def check(self):
+        return 1
+
+pb.setUnjellyableForClass(SimpleCache, SimpleLocalCache)
+
+
+class NestedCache(pb.Referenceable):
+    def __init__(self):
+        self.x = SimpleCache()
+
+    def remote_getCache(self):
+        return [self.x,self.x]
+
+    def remote_putCache(self, cache):
+        return (self.x is cache)
+
+
+class Observable(pb.Referenceable):
+    def __init__(self):
+        self.observers = []
+
+    def remote_observe(self, obs):
+        self.observers.append(obs)
+
+    def remote_unobserve(self, obs):
+        self.observers.remove(obs)
+
+    def notify(self, obj):
+        for observer in self.observers:
+            observer.callRemote('notify', self, obj)
+
+
+class DeferredRemote(pb.Referenceable):
+    def __init__(self):
+        self.run = 0
+
+    def runMe(self, arg):
+        self.run = arg
+        return arg + 1
+
+    def dontRunMe(self, arg):
+        assert 0, "shouldn't have been run!"
+
+    def remote_doItLater(self):
+        """
+        Return a L{Deferred} to be fired on client side. When fired,
+        C{self.runMe} is called.
+        """
+        d = Deferred()
+        d.addCallbacks(self.runMe, self.dontRunMe)
+        self.d = d
+        return d
+
+
+class Observer(pb.Referenceable):
+    notified = 0
+    obj = None
+    def remote_notify(self, other, obj):
+        self.obj = obj
+        self.notified = self.notified + 1
+        other.callRemote('unobserve',self)
+
+
+class NewStyleCopy(pb.Copyable, pb.RemoteCopy, object):
+    def __init__(self, s):
+        self.s = s
+pb.setUnjellyableForClass(NewStyleCopy, NewStyleCopy)
+
+
+class NewStyleCopy2(pb.Copyable, pb.RemoteCopy, object):
+    allocated = 0
+    initialized = 0
+    value = 1
+
+    def __new__(self):
+        NewStyleCopy2.allocated += 1
+        inst = object.__new__(self)
+        inst.value = 2
+        return inst
+
+    def __init__(self):
+        NewStyleCopy2.initialized += 1
+
+pb.setUnjellyableForClass(NewStyleCopy2, NewStyleCopy2)
+
+
+class NewStyleCacheCopy(pb.Cacheable, pb.RemoteCache, object):
+    def getStateToCacheAndObserveFor(self, perspective, observer):
+        return self.__dict__
+
+pb.setUnjellyableForClass(NewStyleCacheCopy, NewStyleCacheCopy)
+
+
+class Echoer(pb.Root):
+    def remote_echo(self, st):
+        return st
+
+
+class CachedReturner(pb.Root):
+    def __init__(self, cache):
+        self.cache = cache
+    def remote_giveMeCache(self, st):
+        return self.cache
+
+
+class NewStyleTestCase(unittest.TestCase):
+    def setUp(self):
+        """
+        Create a pb server using L{Echoer} protocol and connect a client to it.
+        """
+        self.serverFactory = pb.PBServerFactory(Echoer())
+        self.wrapper = WrappingFactory(self.serverFactory)
+        self.server = reactor.listenTCP(0, self.wrapper)
+        clientFactory = pb.PBClientFactory()
+        reactor.connectTCP("localhost", self.server.getHost().port,
+                           clientFactory)
+        def gotRoot(ref):
+            self.ref = ref
+        return clientFactory.getRootObject().addCallback(gotRoot)
+
+
+    def tearDown(self):
+        """
+        Close client and server connections, reset values of L{NewStyleCopy2}
+        class variables.
+        """
+        NewStyleCopy2.allocated = 0
+        NewStyleCopy2.initialized = 0
+        NewStyleCopy2.value = 1
+        self.ref.broker.transport.loseConnection()
+        # Disconnect any server-side connections too.
+        for proto in self.wrapper.protocols:
+            proto.transport.loseConnection()
+        return self.server.stopListening()
+
+    def test_newStyle(self):
+        """
+        Create a new style object, send it over the wire, and check the result.
+        """
+        orig = NewStyleCopy("value")
+        d = self.ref.callRemote("echo", orig)
+        def cb(res):
+            self.failUnless(isinstance(res, NewStyleCopy))
+            self.assertEqual(res.s, "value")
+            self.failIf(res is orig) # no cheating :)
+        d.addCallback(cb)
+        return d
+
+    def test_alloc(self):
+        """
+        Send a new style object and check the number of allocations.
+        """
+        orig = NewStyleCopy2()
+        self.assertEqual(NewStyleCopy2.allocated, 1)
+        self.assertEqual(NewStyleCopy2.initialized, 1)
+        d = self.ref.callRemote("echo", orig)
+        def cb(res):
+            # receiving the response creates a third one on the way back
+            self.failUnless(isinstance(res, NewStyleCopy2))
+            self.assertEqual(res.value, 2)
+            self.assertEqual(NewStyleCopy2.allocated, 3)
+            self.assertEqual(NewStyleCopy2.initialized, 1)
+            self.failIf(res is orig) # no cheating :)
+        # sending the object creates a second one on the far side
+        d.addCallback(cb)
+        return d
+
+
+
+class ConnectionNotifyServerFactory(pb.PBServerFactory):
+    """
+    A server factory which stores the last connection and fires a
+    L{Deferred} on connection made. This factory can handle only one
+    client connection.
+
+    @ivar protocolInstance: the last protocol instance.
+    @type protocolInstance: C{pb.Broker}
+
+    @ivar connectionMade: the deferred fired upon connection.
+    @type connectionMade: C{Deferred}
+    """
+    protocolInstance = None
+
+    def __init__(self, root):
+        """
+        Initialize the factory.
+        """
+        pb.PBServerFactory.__init__(self, root)
+        self.connectionMade = Deferred()
+
+
+    def clientConnectionMade(self, protocol):
+        """
+        Store the protocol and fire the connection deferred.
+        """
+        self.protocolInstance = protocol
+        d, self.connectionMade = self.connectionMade, None
+        if d is not None:
+            d.callback(None)
+
+
+
+class NewStyleCachedTestCase(unittest.TestCase):
+    def setUp(self):
+        """
+        Create a pb server using L{CachedReturner} protocol and connect a
+        client to it.
+        """
+        self.orig = NewStyleCacheCopy()
+        self.orig.s = "value"
+        self.server = reactor.listenTCP(0,
+            ConnectionNotifyServerFactory(CachedReturner(self.orig)))
+        clientFactory = pb.PBClientFactory()
+        reactor.connectTCP("localhost", self.server.getHost().port,
+                           clientFactory)
+        def gotRoot(ref):
+            self.ref = ref
+        d1 = clientFactory.getRootObject().addCallback(gotRoot)
+        d2 = self.server.factory.connectionMade
+        return gatherResults([d1, d2])
+
+
+    def tearDown(self):
+        """
+        Close client and server connections.
+        """
+        self.server.factory.protocolInstance.transport.loseConnection()
+        self.ref.broker.transport.loseConnection()
+        return self.server.stopListening()
+
+
+    def test_newStyleCache(self):
+        """
+        A new-style cacheable object can be retrieved and re-retrieved over a
+        single connection.  The value of an attribute of the cacheable can be
+        accessed on the receiving side.
+        """
+        d = self.ref.callRemote("giveMeCache", self.orig)
+        def cb(res, again):
+            self.assertIsInstance(res, NewStyleCacheCopy)
+            self.assertEqual("value", res.s)
+            # no cheating :)
+            self.assertNotIdentical(self.orig, res)
+
+            if again:
+                # Save a reference so it stays alive for the rest of this test
+                self.res = res
+                # And ask for it again to exercise the special re-jelly logic in
+                # Cacheable.
+                return self.ref.callRemote("giveMeCache", self.orig)
+        d.addCallback(cb, True)
+        d.addCallback(cb, False)
+        return d
+
+
+
+class BrokerTestCase(unittest.TestCase):
+    thunkResult = None
+
+    def tearDown(self):
+        try:
+            # from RemotePublished.getFileName
+            os.unlink('None-None-TESTING.pub')
+        except OSError:
+            pass
+
+    def thunkErrorBad(self, error):
+        self.fail("This should cause a return value, not %s" % (error,))
+
+    def thunkResultGood(self, result):
+        self.thunkResult = result
+
+    def thunkErrorGood(self, tb):
+        pass
+
+    def thunkResultBad(self, result):
+        self.fail("This should cause an error, not %s" % (result,))
+
+    def test_reference(self):
+        c, s, pump = connectedServerAndClient()
+
+        class X(pb.Referenceable):
+            def remote_catch(self,arg):
+                self.caught = arg
+
+        class Y(pb.Referenceable):
+            def remote_throw(self, a, b):
+                a.callRemote('catch', b)
+
+        s.setNameForLocal("y", Y())
+        y = c.remoteForName("y")
+        x = X()
+        z = X()
+        y.callRemote('throw', x, z)
+        pump.pump()
+        pump.pump()
+        pump.pump()
+        self.assertIdentical(x.caught, z, "X should have caught Z")
+
+        # make sure references to remote methods are equals
+        self.assertEqual(y.remoteMethod('throw'), y.remoteMethod('throw'))
+
+    def test_result(self):
+        c, s, pump = connectedServerAndClient()
+        for x, y in (c, s), (s, c):
+            # test reflexivity
+            foo = SimpleRemote()
+            x.setNameForLocal("foo", foo)
+            bar = y.remoteForName("foo")
+            self.expectedThunkResult = 8
+            bar.callRemote('thunk',self.expectedThunkResult - 1
+                ).addCallbacks(self.thunkResultGood, self.thunkErrorBad)
+            # Send question.
+            pump.pump()
+            # Send response.
+            pump.pump()
+            # Shouldn't require any more pumping than that...
+            self.assertEqual(self.thunkResult, self.expectedThunkResult,
+                              "result wasn't received.")
+
+    def refcountResult(self, result):
+        self.nestedRemote = result
+
+    def test_tooManyRefs(self):
+        l = []
+        e = []
+        c, s, pump = connectedServerAndClient()
+        foo = NestedRemote()
+        s.setNameForLocal("foo", foo)
+        x = c.remoteForName("foo")
+        for igno in xrange(pb.MAX_BROKER_REFS + 10):
+            if s.transport.closed or c.transport.closed:
+                break
+            x.callRemote("getSimple").addCallbacks(l.append, e.append)
+            pump.pump()
+        expected = (pb.MAX_BROKER_REFS - 1)
+        self.assertTrue(s.transport.closed, "transport was not closed")
+        self.assertEqual(len(l), expected,
+                          "expected %s got %s" % (expected, len(l)))
+
+    def test_copy(self):
+        c, s, pump = connectedServerAndClient()
+        foo = NestedCopy()
+        s.setNameForLocal("foo", foo)
+        x = c.remoteForName("foo")
+        x.callRemote('getCopy'
+            ).addCallbacks(self.thunkResultGood, self.thunkErrorBad)
+        pump.pump()
+        pump.pump()
+        self.assertEqual(self.thunkResult.x, 1)
+        self.assertEqual(self.thunkResult.y['Hello'], 'World')
+        self.assertEqual(self.thunkResult.z[0], 'test')
+
+    def test_observe(self):
+        c, s, pump = connectedServerAndClient()
+
+        # this is really testing the comparison between remote objects, to make
+        # sure that you can *UN*observe when you have an observer architecture.
+        a = Observable()
+        b = Observer()
+        s.setNameForLocal("a", a)
+        ra = c.remoteForName("a")
+        ra.callRemote('observe',b)
+        pump.pump()
+        a.notify(1)
+        pump.pump()
+        pump.pump()
+        a.notify(10)
+        pump.pump()
+        pump.pump()
+        self.assertNotIdentical(b.obj, None, "didn't notify")
+        self.assertEqual(b.obj, 1, 'notified too much')
+
+    def test_defer(self):
+        c, s, pump = connectedServerAndClient()
+        d = DeferredRemote()
+        s.setNameForLocal("d", d)
+        e = c.remoteForName("d")
+        pump.pump(); pump.pump()
+        results = []
+        e.callRemote('doItLater').addCallback(results.append)
+        pump.pump(); pump.pump()
+        self.assertFalse(d.run, "Deferred method run too early.")
+        d.d.callback(5)
+        self.assertEqual(d.run, 5, "Deferred method run too late.")
+        pump.pump(); pump.pump()
+        self.assertEqual(results[0], 6, "Incorrect result.")
+
+
+    def test_refcount(self):
+        c, s, pump = connectedServerAndClient()
+        foo = NestedRemote()
+        s.setNameForLocal("foo", foo)
+        bar = c.remoteForName("foo")
+        bar.callRemote('getSimple'
+            ).addCallbacks(self.refcountResult, self.thunkErrorBad)
+
+        # send question
+        pump.pump()
+        # send response
+        pump.pump()
+
+        # delving into internal structures here, because GC is sort of
+        # inherently internal.
+        rluid = self.nestedRemote.luid
+        self.assertIn(rluid, s.localObjects)
+        del self.nestedRemote
+        # nudge the gc
+        if sys.hexversion >= 0x2000000:
+            gc.collect()
+        # try to nudge the GC even if we can't really
+        pump.pump()
+        pump.pump()
+        pump.pump()
+        self.assertNotIn(rluid, s.localObjects)
+
+    def test_cache(self):
+        c, s, pump = connectedServerAndClient()
+        obj = NestedCache()
+        obj2 = NestedComplicatedCache()
+        vcc = obj2.c
+        s.setNameForLocal("obj", obj)
+        s.setNameForLocal("xxx", obj2)
+        o2 = c.remoteForName("obj")
+        o3 = c.remoteForName("xxx")
+        coll = []
+        o2.callRemote("getCache"
+            ).addCallback(coll.append).addErrback(coll.append)
+        o2.callRemote("getCache"
+            ).addCallback(coll.append).addErrback(coll.append)
+        complex = []
+        o3.callRemote("getCache").addCallback(complex.append)
+        o3.callRemote("getCache").addCallback(complex.append)
+        pump.flush()
+        # `worst things first'
+        self.assertEqual(complex[0].x, 1)
+        self.assertEqual(complex[0].y, 2)
+        self.assertEqual(complex[0].foo, 3)
+
+        vcc.setFoo4()
+        pump.flush()
+        self.assertEqual(complex[0].foo, 4)
+        self.assertEqual(len(coll), 2)
+        cp = coll[0][0]
+        self.assertIdentical(cp.checkMethod().im_self, cp,
+                             "potential refcounting issue")
+        self.assertIdentical(cp.checkSelf(), cp,
+                             "other potential refcounting issue")
+        col2 = []
+        o2.callRemote('putCache',cp).addCallback(col2.append)
+        pump.flush()
+        # The objects were the same (testing lcache identity)
+        self.assertTrue(col2[0])
+        # test equality of references to methods
+        self.assertEqual(o2.remoteMethod("getCache"),
+                          o2.remoteMethod("getCache"))
+
+        # now, refcounting (similiar to testRefCount)
+        luid = cp.luid
+        baroqueLuid = complex[0].luid
+        self.assertIn(luid, s.remotelyCachedObjects,
+                      "remote cache doesn't have it")
+        del coll
+        del cp
+        pump.flush()
+        del complex
+        del col2
+        # extra nudge...
+        pump.flush()
+        # del vcc.observer
+        # nudge the gc
+        if sys.hexversion >= 0x2000000:
+            gc.collect()
+        # try to nudge the GC even if we can't really
+        pump.flush()
+        # The GC is done with it.
+        self.assertNotIn(luid, s.remotelyCachedObjects,
+                         "Server still had it after GC")
+        self.assertNotIn(luid, c.locallyCachedObjects,
+                         "Client still had it after GC")
+        self.assertNotIn(baroqueLuid, s.remotelyCachedObjects,
+                         "Server still had complex after GC")
+        self.assertNotIn(baroqueLuid, c.locallyCachedObjects,
+                         "Client still had complex after GC")
+        self.assertIdentical(vcc.observer, None, "observer was not removed")
+
+    def test_publishable(self):
+        try:
+            os.unlink('None-None-TESTING.pub') # from RemotePublished.getFileName
+        except OSError:
+            pass # Sometimes it's not there.
+        c, s, pump = connectedServerAndClient()
+        foo = GetPublisher()
+        # foo.pub.timestamp = 1.0
+        s.setNameForLocal("foo", foo)
+        bar = c.remoteForName("foo")
+        accum = []
+        bar.callRemote('getPub').addCallbacks(accum.append, self.thunkErrorBad)
+        pump.flush()
+        obj = accum.pop()
+        self.assertEqual(obj.activateCalled, 1)
+        self.assertEqual(obj.isActivated, 1)
+        self.assertEqual(obj.yayIGotPublished, 1)
+        # timestamp's dirty, we don't have a cache file
+        self.assertEqual(obj._wasCleanWhenLoaded, 0)
+        c, s, pump = connectedServerAndClient()
+        s.setNameForLocal("foo", foo)
+        bar = c.remoteForName("foo")
+        bar.callRemote('getPub').addCallbacks(accum.append, self.thunkErrorBad)
+        pump.flush()
+        obj = accum.pop()
+        # timestamp's clean, our cache file is up-to-date
+        self.assertEqual(obj._wasCleanWhenLoaded, 1)
+
+    def gotCopy(self, val):
+        self.thunkResult = val.id
+
+
+    def test_factoryCopy(self):
+        c, s, pump = connectedServerAndClient()
+        ID = 99
+        obj = NestedCopy()
+        s.setNameForLocal("foo", obj)
+        x = c.remoteForName("foo")
+        x.callRemote('getFactory', ID
+            ).addCallbacks(self.gotCopy, self.thunkResultBad)
+        pump.pump()
+        pump.pump()
+        pump.pump()
+        self.assertEqual(self.thunkResult, ID,
+            "ID not correct on factory object %s" % (self.thunkResult,))
+
+
+bigString = "helloworld" * 50
+
+callbackArgs = None
+callbackKeyword = None
+
+def finishedCallback(*args, **kw):
+    global callbackArgs, callbackKeyword
+    callbackArgs = args
+    callbackKeyword = kw
+
+
+class Pagerizer(pb.Referenceable):
+    def __init__(self, callback, *args, **kw):
+        self.callback, self.args, self.kw = callback, args, kw
+
+    def remote_getPages(self, collector):
+        util.StringPager(collector, bigString, 100,
+                         self.callback, *self.args, **self.kw)
+        self.args = self.kw = None
+
+
+class FilePagerizer(pb.Referenceable):
+    pager = None
+
+    def __init__(self, filename, callback, *args, **kw):
+        self.filename = filename
+        self.callback, self.args, self.kw = callback, args, kw
+
+    def remote_getPages(self, collector):
+        self.pager = util.FilePager(collector, file(self.filename),
+                                    self.callback, *self.args, **self.kw)
+        self.args = self.kw = None
+
+
+
+class PagingTestCase(unittest.TestCase):
+    """
+    Test pb objects sending data by pages.
+    """
+
+    def setUp(self):
+        """
+        Create a file used to test L{util.FilePager}.
+        """
+        self.filename = self.mktemp()
+        fd = file(self.filename, 'w')
+        fd.write(bigString)
+        fd.close()
+
+
+    def test_pagingWithCallback(self):
+        """
+        Test L{util.StringPager}, passing a callback to fire when all pages
+        are sent.
+        """
+        c, s, pump = connectedServerAndClient()
+        s.setNameForLocal("foo", Pagerizer(finishedCallback, 'hello', value=10))
+        x = c.remoteForName("foo")
+        l = []
+        util.getAllPages(x, "getPages").addCallback(l.append)
+        while not l:
+            pump.pump()
+        self.assertEqual(''.join(l[0]), bigString,
+                          "Pages received not equal to pages sent!")
+        self.assertEqual(callbackArgs, ('hello',),
+                          "Completed callback not invoked")
+        self.assertEqual(callbackKeyword, {'value': 10},
+                          "Completed callback not invoked")
+
+
+    def test_pagingWithoutCallback(self):
+        """
+        Test L{util.StringPager} without a callback.
+        """
+        c, s, pump = connectedServerAndClient()
+        s.setNameForLocal("foo", Pagerizer(None))
+        x = c.remoteForName("foo")
+        l = []
+        util.getAllPages(x, "getPages").addCallback(l.append)
+        while not l:
+            pump.pump()
+        self.assertEqual(''.join(l[0]), bigString,
+                          "Pages received not equal to pages sent!")
+
+
+    def test_emptyFilePaging(self):
+        """
+        Test L{util.FilePager}, sending an empty file.
+        """
+        filenameEmpty = self.mktemp()
+        fd = file(filenameEmpty, 'w')
+        fd.close()
+        c, s, pump = connectedServerAndClient()
+        pagerizer = FilePagerizer(filenameEmpty, None)
+        s.setNameForLocal("bar", pagerizer)
+        x = c.remoteForName("bar")
+        l = []
+        util.getAllPages(x, "getPages").addCallback(l.append)
+        ttl = 10
+        while not l and ttl > 0:
+            pump.pump()
+            ttl -= 1
+        if not ttl:
+            self.fail('getAllPages timed out')
+        self.assertEqual(''.join(l[0]), '',
+                          "Pages received not equal to pages sent!")
+
+
+    def test_filePagingWithCallback(self):
+        """
+        Test L{util.FilePager}, passing a callback to fire when all pages
+        are sent, and verify that the pager doesn't keep chunks in memory.
+        """
+        c, s, pump = connectedServerAndClient()
+        pagerizer = FilePagerizer(self.filename, finishedCallback,
+                                  'frodo', value = 9)
+        s.setNameForLocal("bar", pagerizer)
+        x = c.remoteForName("bar")
+        l = []
+        util.getAllPages(x, "getPages").addCallback(l.append)
+        while not l:
+            pump.pump()
+        self.assertEqual(''.join(l[0]), bigString,
+                          "Pages received not equal to pages sent!")
+        self.assertEqual(callbackArgs, ('frodo',),
+                          "Completed callback not invoked")
+        self.assertEqual(callbackKeyword, {'value': 9},
+                          "Completed callback not invoked")
+        self.assertEqual(pagerizer.pager.chunks, [])
+
+
+    def test_filePagingWithoutCallback(self):
+        """
+        Test L{util.FilePager} without a callback.
+        """
+        c, s, pump = connectedServerAndClient()
+        pagerizer = FilePagerizer(self.filename, None)
+        s.setNameForLocal("bar", pagerizer)
+        x = c.remoteForName("bar")
+        l = []
+        util.getAllPages(x, "getPages").addCallback(l.append)
+        while not l:
+            pump.pump()
+        self.assertEqual(''.join(l[0]), bigString,
+                          "Pages received not equal to pages sent!")
+        self.assertEqual(pagerizer.pager.chunks, [])
+
+
+
+class DumbPublishable(publish.Publishable):
+    def getStateToPublish(self):
+        return {"yayIGotPublished": 1}
+
+
+class DumbPub(publish.RemotePublished):
+    def activated(self):
+        self.activateCalled = 1
+
+
+class GetPublisher(pb.Referenceable):
+    def __init__(self):
+        self.pub = DumbPublishable("TESTING")
+
+    def remote_getPub(self):
+        return self.pub
+
+
+pb.setUnjellyableForClass(DumbPublishable, DumbPub)
+
+class DisconnectionTestCase(unittest.TestCase):
+    """
+    Test disconnection callbacks.
+    """
+
+    def error(self, *args):
+        raise RuntimeError("I shouldn't have been called: %s" % (args,))
+
+
+    def gotDisconnected(self):
+        """
+        Called on broker disconnect.
+        """
+        self.gotCallback = 1
+
+    def objectDisconnected(self, o):
+        """
+        Called on RemoteReference disconnect.
+        """
+        self.assertEqual(o, self.remoteObject)
+        self.objectCallback = 1
+
+    def test_badSerialization(self):
+        c, s, pump = connectedServerAndClient()
+        pump.pump()
+        s.setNameForLocal("o", BadCopySet())
+        g = c.remoteForName("o")
+        l = []
+        g.callRemote("setBadCopy", BadCopyable()).addErrback(l.append)
+        pump.flush()
+        self.assertEqual(len(l), 1)
+
+    def test_disconnection(self):
+        c, s, pump = connectedServerAndClient()
+        pump.pump()
+        s.setNameForLocal("o", SimpleRemote())
+
+        # get a client reference to server object
+        r = c.remoteForName("o")
+        pump.pump()
+        pump.pump()
+        pump.pump()
+
+        # register and then unregister disconnect callbacks
+        # making sure they get unregistered
+        c.notifyOnDisconnect(self.error)
+        self.assertIn(self.error, c.disconnects)
+        c.dontNotifyOnDisconnect(self.error)
+        self.assertNotIn(self.error, c.disconnects)
+
+        r.notifyOnDisconnect(self.error)
+        self.assertIn(r._disconnected, c.disconnects)
+        self.assertIn(self.error, r.disconnectCallbacks)
+        r.dontNotifyOnDisconnect(self.error)
+        self.assertNotIn(r._disconnected, c.disconnects)
+        self.assertNotIn(self.error, r.disconnectCallbacks)
+
+        # register disconnect callbacks
+        c.notifyOnDisconnect(self.gotDisconnected)
+        r.notifyOnDisconnect(self.objectDisconnected)
+        self.remoteObject = r
+
+        # disconnect
+        c.connectionLost(failure.Failure(main.CONNECTION_DONE))
+        self.assertTrue(self.gotCallback)
+        self.assertTrue(self.objectCallback)
+
+
+class FreakOut(Exception):
+    pass
+
+
+class BadCopyable(pb.Copyable):
+    def getStateToCopyFor(self, p):
+        raise FreakOut()
+
+
+class BadCopySet(pb.Referenceable):
+    def remote_setBadCopy(self, bc):
+        return None
+
+
+class LocalRemoteTest(util.LocalAsRemote):
+    reportAllTracebacks = 0
+
+    def sync_add1(self, x):
+        return x + 1
+
+    def async_add(self, x=0, y=1):
+        return x + y
+
+    def async_fail(self):
+        raise RuntimeError()
+
+
+
+class MyPerspective(pb.Avatar):
+    """
+    @ivar loggedIn: set to C{True} when the avatar is logged in.
+    @type loggedIn: C{bool}
+
+    @ivar loggedOut: set to C{True} when the avatar is logged out.
+    @type loggedOut: C{bool}
+    """
+    implements(pb.IPerspective)
+
+    loggedIn = loggedOut = False
+
+    def __init__(self, avatarId):
+        self.avatarId = avatarId
+
+
+    def perspective_getAvatarId(self):
+        """
+        Return the avatar identifier which was used to access this avatar.
+        """
+        return self.avatarId
+
+
+    def perspective_getViewPoint(self):
+        return MyView()
+
+
+    def perspective_add(self, a, b):
+        """
+        Add the given objects and return the result.  This is a method
+        unavailable on L{Echoer}, so it can only be invoked by authenticated
+        users who received their avatar from L{TestRealm}.
+        """
+        return a + b
+
+
+    def logout(self):
+        self.loggedOut = True
+
+
+
+class TestRealm(object):
+    """
+    A realm which repeatedly gives out a single instance of L{MyPerspective}
+    for non-anonymous logins and which gives out a new instance of L{Echoer}
+    for each anonymous login.
+
+    @ivar lastPerspective: The L{MyPerspective} most recently created and
+        returned from C{requestAvatar}.
+
+    @ivar perspectiveFactory: A one-argument callable which will be used to
+        create avatars to be returned from C{requestAvatar}.
+    """
+    perspectiveFactory = MyPerspective
+
+    lastPerspective = None
+
+    def requestAvatar(self, avatarId, mind, interface):
+        """
+        Verify that the mind and interface supplied have the expected values
+        (this should really be done somewhere else, like inside a test method)
+        and return an avatar appropriate for the given identifier.
+        """
+        assert interface == pb.IPerspective
+        assert mind == "BRAINS!"
+        if avatarId is checkers.ANONYMOUS:
+            return pb.IPerspective, Echoer(), lambda: None
+        else:
+            self.lastPerspective = self.perspectiveFactory(avatarId)
+            self.lastPerspective.loggedIn = True
+            return (
+                pb.IPerspective, self.lastPerspective,
+                self.lastPerspective.logout)
+
+
+
+class MyView(pb.Viewable):
+
+    def view_check(self, user):
+        return isinstance(user, MyPerspective)
+
+
+
+class LeakyRealm(TestRealm):
+    """
+    A realm which hangs onto a reference to the mind object in its logout
+    function.
+    """
+    def __init__(self, mindEater):
+        """
+        Create a L{LeakyRealm}.
+
+        @param mindEater: a callable that will be called with the C{mind}
+        object when it is available
+        """
+        self._mindEater = mindEater
+
+
+    def requestAvatar(self, avatarId, mind, interface):
+        self._mindEater(mind)
+        persp = self.perspectiveFactory(avatarId)
+        return (pb.IPerspective, persp, lambda : (mind, persp.logout()))
+
+
+
+class NewCredLeakTests(unittest.TestCase):
+    """
+    Tests to try to trigger memory leaks.
+    """
+    def test_logoutLeak(self):
+        """
+        The server does not leak a reference when the client disconnects
+        suddenly, even if the cred logout function forms a reference cycle with
+        the perspective.
+        """
+        # keep a weak reference to the mind object, which we can verify later
+        # evaluates to None, thereby ensuring the reference leak is fixed.
+        self.mindRef = None
+        def setMindRef(mind):
+            self.mindRef = weakref.ref(mind)
+
+        clientBroker, serverBroker, pump = connectedServerAndClient(
+                LeakyRealm(setMindRef))
+
+        # log in from the client
+        connectionBroken = []
+        root = clientBroker.remoteForName("root")
+        d = root.callRemote("login", 'guest')
+        def cbResponse((challenge, challenger)):
+            mind = SimpleRemote()
+            return challenger.callRemote("respond",
+                    pb.respond(challenge, 'guest'), mind)
+        d.addCallback(cbResponse)
+        def connectionLost(_):
+            pump.stop() # don't try to pump data anymore - it won't work
+            connectionBroken.append(1)
+            serverBroker.connectionLost(failure.Failure(RuntimeError("boom")))
+        d.addCallback(connectionLost)
+
+        # flush out the response and connectionLost
+        pump.flush()
+        self.assertEqual(connectionBroken, [1])
+
+        # and check for lingering references - requestAvatar sets mindRef
+        # to a weakref to the mind; this object should be gc'd, and thus
+        # the ref should return None
+        gc.collect()
+        self.assertEqual(self.mindRef(), None)
+
+
+
+class NewCredTestCase(unittest.TestCase):
+    """
+    Tests related to the L{twisted.cred} support in PB.
+    """
+    def setUp(self):
+        """
+        Create a portal with no checkers and wrap it around a simple test
+        realm.  Set up a PB server on a TCP port which serves perspectives
+        using that portal.
+        """
+        self.realm = TestRealm()
+        self.portal = portal.Portal(self.realm)
+        self.factory = ConnectionNotifyServerFactory(self.portal)
+        self.port = reactor.listenTCP(0, self.factory, interface="127.0.0.1")
+        self.portno = self.port.getHost().port
+
+
+    def tearDown(self):
+        """
+        Shut down the TCP port created by L{setUp}.
+        """
+        return self.port.stopListening()
+
+
+    def getFactoryAndRootObject(self, clientFactory=pb.PBClientFactory):
+        """
+        Create a connection to the test server.
+
+        @param clientFactory: the factory class used to create the connection.
+
+        @return: a tuple (C{factory}, C{deferred}), where factory is an
+            instance of C{clientFactory} and C{deferred} the L{Deferred} firing
+            with the PB root object.
+        """
+        factory = clientFactory()
+        rootObjDeferred = factory.getRootObject()
+        connector = reactor.connectTCP('127.0.0.1', self.portno, factory)
+        self.addCleanup(connector.disconnect)
+        return factory, rootObjDeferred
+
+
+    def test_getRootObject(self):
+        """
+        Assert only that L{PBClientFactory.getRootObject}'s Deferred fires with
+        a L{RemoteReference}.
+        """
+        factory, rootObjDeferred = self.getFactoryAndRootObject()
+
+        def gotRootObject(rootObj):
+            self.assertIsInstance(rootObj, pb.RemoteReference)
+            disconnectedDeferred = Deferred()
+            rootObj.notifyOnDisconnect(disconnectedDeferred.callback)
+            factory.disconnect()
+            return disconnectedDeferred
+
+        return rootObjDeferred.addCallback(gotRootObject)
+
+
+    def test_deadReferenceError(self):
+        """
+        Test that when a connection is lost, calling a method on a
+        RemoteReference obtained from it raises DeadReferenceError.
+        """
+        factory, rootObjDeferred = self.getFactoryAndRootObject()
+
+        def gotRootObject(rootObj):
+            disconnectedDeferred = Deferred()
+            rootObj.notifyOnDisconnect(disconnectedDeferred.callback)
+
+            def lostConnection(ign):
+                self.assertRaises(
+                    pb.DeadReferenceError,
+                    rootObj.callRemote, 'method')
+
+            disconnectedDeferred.addCallback(lostConnection)
+            factory.disconnect()
+            return disconnectedDeferred
+
+        return rootObjDeferred.addCallback(gotRootObject)
+
+
+    def test_clientConnectionLost(self):
+        """
+        Test that if the L{reconnecting} flag is passed with a True value then
+        a remote call made from a disconnection notification callback gets a
+        result successfully.
+        """
+        class ReconnectOnce(pb.PBClientFactory):
+            reconnectedAlready = False
+            def clientConnectionLost(self, connector, reason):
+                reconnecting = not self.reconnectedAlready
+                self.reconnectedAlready = True
+                if reconnecting:
+                    connector.connect()
+                return pb.PBClientFactory.clientConnectionLost(
+                    self, connector, reason, reconnecting)
+
+        factory, rootObjDeferred = self.getFactoryAndRootObject(ReconnectOnce)
+
+        def gotRootObject(rootObj):
+            self.assertIsInstance(rootObj, pb.RemoteReference)
+
+            d = Deferred()
+            rootObj.notifyOnDisconnect(d.callback)
+            factory.disconnect()
+
+            def disconnected(ign):
+                d = factory.getRootObject()
+
+                def gotAnotherRootObject(anotherRootObj):
+                    self.assertIsInstance(anotherRootObj, pb.RemoteReference)
+
+                    d = Deferred()
+                    anotherRootObj.notifyOnDisconnect(d.callback)
+                    factory.disconnect()
+                    return d
+                return d.addCallback(gotAnotherRootObject)
+            return d.addCallback(disconnected)
+        return rootObjDeferred.addCallback(gotRootObject)
+
+
+    def test_immediateClose(self):
+        """
+        Test that if a Broker loses its connection without receiving any bytes,
+        it doesn't raise any exceptions or log any errors.
+        """
+        serverProto = self.factory.buildProtocol(('127.0.0.1', 12345))
+        serverProto.makeConnection(protocol.FileWrapper(StringIO()))
+        serverProto.connectionLost(failure.Failure(main.CONNECTION_DONE))
+
+
+    def test_loginConnectionRefused(self):
+        """
+        L{PBClientFactory.login} returns a L{Deferred} which is errbacked
+        with the L{ConnectionRefusedError} if the underlying connection is
+        refused.
+        """
+        clientFactory = pb.PBClientFactory()
+        loginDeferred = clientFactory.login(
+            credentials.UsernamePassword("foo", "bar"))
+        clientFactory.clientConnectionFailed(
+            None,
+            failure.Failure(
+                ConnectionRefusedError("Test simulated refused connection")))
+        return self.assertFailure(loginDeferred, ConnectionRefusedError)
+
+
+    def _disconnect(self, ignore, factory):
+        """
+        Helper method disconnecting the given client factory and returning a
+        C{Deferred} that will fire when the server connection has noticed the
+        disconnection.
+        """
+        disconnectedDeferred = Deferred()
+        self.factory.protocolInstance.notifyOnDisconnect(
+            lambda: disconnectedDeferred.callback(None))
+        factory.disconnect()
+        return disconnectedDeferred
+
+
+    def test_loginLogout(self):
+        """
+        Test that login can be performed with IUsernamePassword credentials and
+        that when the connection is dropped the avatar is logged out.
+        """
+        self.portal.registerChecker(
+            checkers.InMemoryUsernamePasswordDatabaseDontUse(user='pass'))
+        factory = pb.PBClientFactory()
+        creds = credentials.UsernamePassword("user", "pass")
+
+        # NOTE: real code probably won't need anything where we have the
+        # "BRAINS!" argument, passing None is fine. We just do it here to
+        # test that it is being passed. It is used to give additional info to
+        # the realm to aid perspective creation, if you don't need that,
+        # ignore it.
+        mind = "BRAINS!"
+
+        d = factory.login(creds, mind)
+        def cbLogin(perspective):
+            self.assertTrue(self.realm.lastPerspective.loggedIn)
+            self.assertIsInstance(perspective, pb.RemoteReference)
+            return self._disconnect(None, factory)
+        d.addCallback(cbLogin)
+
+        def cbLogout(ignored):
+            self.assertTrue(self.realm.lastPerspective.loggedOut)
+        d.addCallback(cbLogout)
+
+        connector = reactor.connectTCP("127.0.0.1", self.portno, factory)
+        self.addCleanup(connector.disconnect)
+        return d
+
+
+    def test_logoutAfterDecref(self):
+        """
+        If a L{RemoteReference} to an L{IPerspective} avatar is decrefed and
+        there remain no other references to the avatar on the server, the
+        avatar is garbage collected and the logout method called.
+        """
+        loggedOut = Deferred()
+
+        class EventPerspective(pb.Avatar):
+            """
+            An avatar which fires a Deferred when it is logged out.
+            """
+            def __init__(self, avatarId):
+                pass
+
+            def logout(self):
+                loggedOut.callback(None)
+
+        self.realm.perspectiveFactory = EventPerspective
+
+        self.portal.registerChecker(
+            checkers.InMemoryUsernamePasswordDatabaseDontUse(foo='bar'))
+        factory = pb.PBClientFactory()
+        d = factory.login(
+            credentials.UsernamePassword('foo', 'bar'), "BRAINS!")
+        def cbLoggedIn(avatar):
+            # Just wait for the logout to happen, as it should since the
+            # reference to the avatar will shortly no longer exists.
+            return loggedOut
+        d.addCallback(cbLoggedIn)
+        def cbLoggedOut(ignored):
+            # Verify that the server broker's _localCleanup dict isn't growing
+            # without bound.
+            self.assertEqual(self.factory.protocolInstance._localCleanup, {})
+        d.addCallback(cbLoggedOut)
+        d.addCallback(self._disconnect, factory)
+        connector = reactor.connectTCP("127.0.0.1", self.portno, factory)
+        self.addCleanup(connector.disconnect)
+        return d
+
+
+    def test_concurrentLogin(self):
+        """
+        Two different correct login attempts can be made on the same root
+        object at the same time and produce two different resulting avatars.
+        """
+        self.portal.registerChecker(
+            checkers.InMemoryUsernamePasswordDatabaseDontUse(
+                foo='bar', baz='quux'))
+        factory = pb.PBClientFactory()
+
+        firstLogin = factory.login(
+            credentials.UsernamePassword('foo', 'bar'), "BRAINS!")
+        secondLogin = factory.login(
+            credentials.UsernamePassword('baz', 'quux'), "BRAINS!")
+        d = gatherResults([firstLogin, secondLogin])
+        def cbLoggedIn((first, second)):
+            return gatherResults([
+                    first.callRemote('getAvatarId'),
+                    second.callRemote('getAvatarId')])
+        d.addCallback(cbLoggedIn)
+        def cbAvatarIds((first, second)):
+            self.assertEqual(first, 'foo')
+            self.assertEqual(second, 'baz')
+        d.addCallback(cbAvatarIds)
+        d.addCallback(self._disconnect, factory)
+
+        connector = reactor.connectTCP('127.0.0.1', self.portno, factory)
+        self.addCleanup(connector.disconnect)
+        return d
+
+
+    def test_badUsernamePasswordLogin(self):
+        """
+        Test that a login attempt with an invalid user or invalid password
+        fails in the appropriate way.
+        """
+        self.portal.registerChecker(
+            checkers.InMemoryUsernamePasswordDatabaseDontUse(user='pass'))
+        factory = pb.PBClientFactory()
+
+        firstLogin = factory.login(
+            credentials.UsernamePassword('nosuchuser', 'pass'))
+        secondLogin = factory.login(
+            credentials.UsernamePassword('user', 'wrongpass'))
+
+        self.assertFailure(firstLogin, UnauthorizedLogin)
+        self.assertFailure(secondLogin, UnauthorizedLogin)
+        d = gatherResults([firstLogin, secondLogin])
+
+        def cleanup(ignore):
+            errors = self.flushLoggedErrors(UnauthorizedLogin)
+            self.assertEqual(len(errors), 2)
+            return self._disconnect(None, factory)
+        d.addCallback(cleanup)
+
+        connector = reactor.connectTCP("127.0.0.1", self.portno, factory)
+        self.addCleanup(connector.disconnect)
+        return d
+
+
+    def test_anonymousLogin(self):
+        """
+        Verify that a PB server using a portal configured with an checker which
+        allows IAnonymous credentials can be logged into using IAnonymous
+        credentials.
+        """
+        self.portal.registerChecker(checkers.AllowAnonymousAccess())
+        factory = pb.PBClientFactory()
+        d = factory.login(credentials.Anonymous(), "BRAINS!")
+
+        def cbLoggedIn(perspective):
+            return perspective.callRemote('echo', 123)
+        d.addCallback(cbLoggedIn)
+
+        d.addCallback(self.assertEqual, 123)
+
+        d.addCallback(self._disconnect, factory)
+
+        connector = reactor.connectTCP("127.0.0.1", self.portno, factory)
+        self.addCleanup(connector.disconnect)
+        return d
+
+
+    def test_anonymousLoginNotPermitted(self):
+        """
+        Verify that without an anonymous checker set up, anonymous login is
+        rejected.
+        """
+        self.portal.registerChecker(
+            checkers.InMemoryUsernamePasswordDatabaseDontUse(user='pass'))
+        factory = pb.PBClientFactory()
+        d = factory.login(credentials.Anonymous(), "BRAINS!")
+        self.assertFailure(d, UnhandledCredentials)
+
+        def cleanup(ignore):
+            errors = self.flushLoggedErrors(UnhandledCredentials)
+            self.assertEqual(len(errors), 1)
+            return self._disconnect(None, factory)
+        d.addCallback(cleanup)
+
+        connector = reactor.connectTCP('127.0.0.1', self.portno, factory)
+        self.addCleanup(connector.disconnect)
+        return d
+
+
+    def test_anonymousLoginWithMultipleCheckers(self):
+        """
+        Like L{test_anonymousLogin} but against a portal with a checker for
+        both IAnonymous and IUsernamePassword.
+        """
+        self.portal.registerChecker(checkers.AllowAnonymousAccess())
+        self.portal.registerChecker(
+            checkers.InMemoryUsernamePasswordDatabaseDontUse(user='pass'))
+        factory = pb.PBClientFactory()
+        d = factory.login(credentials.Anonymous(), "BRAINS!")
+
+        def cbLogin(perspective):
+            return perspective.callRemote('echo', 123)
+        d.addCallback(cbLogin)
+
+        d.addCallback(self.assertEqual, 123)
+
+        d.addCallback(self._disconnect, factory)
+
+        connector = reactor.connectTCP('127.0.0.1', self.portno, factory)
+        self.addCleanup(connector.disconnect)
+        return d
+
+
+    def test_authenticatedLoginWithMultipleCheckers(self):
+        """
+        Like L{test_anonymousLoginWithMultipleCheckers} but check that
+        username/password authentication works.
+        """
+        self.portal.registerChecker(checkers.AllowAnonymousAccess())
+        self.portal.registerChecker(
+            checkers.InMemoryUsernamePasswordDatabaseDontUse(user='pass'))
+        factory = pb.PBClientFactory()
+        d = factory.login(
+            credentials.UsernamePassword('user', 'pass'), "BRAINS!")
+
+        def cbLogin(perspective):
+            return perspective.callRemote('add', 100, 23)
+        d.addCallback(cbLogin)
+
+        d.addCallback(self.assertEqual, 123)
+
+        d.addCallback(self._disconnect, factory)
+
+        connector = reactor.connectTCP('127.0.0.1', self.portno, factory)
+        self.addCleanup(connector.disconnect)
+        return d
+
+
+    def test_view(self):
+        """
+        Verify that a viewpoint can be retrieved after authenticating with
+        cred.
+        """
+        self.portal.registerChecker(
+            checkers.InMemoryUsernamePasswordDatabaseDontUse(user='pass'))
+        factory = pb.PBClientFactory()
+        d = factory.login(
+            credentials.UsernamePassword("user", "pass"), "BRAINS!")
+
+        def cbLogin(perspective):
+            return perspective.callRemote("getViewPoint")
+        d.addCallback(cbLogin)
+
+        def cbView(viewpoint):
+            return viewpoint.callRemote("check")
+        d.addCallback(cbView)
+
+        d.addCallback(self.assertTrue)
+
+        d.addCallback(self._disconnect, factory)
+
+        connector = reactor.connectTCP("127.0.0.1", self.portno, factory)
+        self.addCleanup(connector.disconnect)
+        return d
+
+
+
+class NonSubclassingPerspective:
+    implements(pb.IPerspective)
+
+    def __init__(self, avatarId):
+        pass
+
+    # IPerspective implementation
+    def perspectiveMessageReceived(self, broker, message, args, kwargs):
+        args = broker.unserialize(args, self)
+        kwargs = broker.unserialize(kwargs, self)
+        return broker.serialize((message, args, kwargs))
+
+    # Methods required by TestRealm
+    def logout(self):
+        self.loggedOut = True
+
+
+
+class NSPTestCase(unittest.TestCase):
+    """
+    Tests for authentication against a realm where the L{IPerspective}
+    implementation is not a subclass of L{Avatar}.
+    """
+    def setUp(self):
+        self.realm = TestRealm()
+        self.realm.perspectiveFactory = NonSubclassingPerspective
+        self.portal = portal.Portal(self.realm)
+        self.checker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
+        self.checker.addUser("user", "pass")
+        self.portal.registerChecker(self.checker)
+        self.factory = WrappingFactory(pb.PBServerFactory(self.portal))
+        self.port = reactor.listenTCP(0, self.factory, interface="127.0.0.1")
+        self.addCleanup(self.port.stopListening)
+        self.portno = self.port.getHost().port
+
+
+    def test_NSP(self):
+        """
+        An L{IPerspective} implementation which does not subclass
+        L{Avatar} can expose remote methods for the client to call.
+        """
+        factory = pb.PBClientFactory()
+        d = factory.login(credentials.UsernamePassword('user', 'pass'),
+                          "BRAINS!")
+        reactor.connectTCP('127.0.0.1', self.portno, factory)
+        d.addCallback(lambda p: p.callRemote('ANYTHING', 'here', bar='baz'))
+        d.addCallback(self.assertEqual,
+                      ('ANYTHING', ('here',), {'bar': 'baz'}))
+        def cleanup(ignored):
+            factory.disconnect()
+            for p in self.factory.protocols:
+                p.transport.loseConnection()
+        d.addCallback(cleanup)
+        return d
+
+
+
+class IForwarded(Interface):
+    """
+    Interface used for testing L{util.LocalAsyncForwarder}.
+    """
+
+    def forwardMe():
+        """
+        Simple synchronous method.
+        """
+
+    def forwardDeferred():
+        """
+        Simple asynchronous method.
+        """
+
+
+class Forwarded:
+    """
+    Test implementation of L{IForwarded}.
+
+    @ivar forwarded: set if C{forwardMe} is called.
+    @type forwarded: C{bool}
+    @ivar unforwarded: set if C{dontForwardMe} is called.
+    @type unforwarded: C{bool}
+    """
+    implements(IForwarded)
+    forwarded = False
+    unforwarded = False
+
+    def forwardMe(self):
+        """
+        Set a local flag to test afterwards.
+        """
+        self.forwarded = True
+
+    def dontForwardMe(self):
+        """
+        Set a local flag to test afterwards. This should not be called as it's
+        not in the interface.
+        """
+        self.unforwarded = True
+
+    def forwardDeferred(self):
+        """
+        Asynchronously return C{True}.
+        """
+        return succeed(True)
+
+
+class SpreadUtilTestCase(unittest.TestCase):
+    """
+    Tests for L{twisted.spread.util}.
+    """
+
+    def test_sync(self):
+        """
+        Call a synchronous method of a L{util.LocalAsRemote} object and check
+        the result.
+        """
+        o = LocalRemoteTest()
+        self.assertEqual(o.callRemote("add1", 2), 3)
+
+    def test_async(self):
+        """
+        Call an asynchronous method of a L{util.LocalAsRemote} object and check
+        the result.
+        """
+        o = LocalRemoteTest()
+        o = LocalRemoteTest()
+        d = o.callRemote("add", 2, y=4)
+        self.assertIsInstance(d, Deferred)
+        d.addCallback(self.assertEqual, 6)
+        return d
+
+    def test_asyncFail(self):
+        """
+        Test a asynchronous failure on a remote method call.
+        """
+        o = LocalRemoteTest()
+        d = o.callRemote("fail")
+        def eb(f):
+            self.assertTrue(isinstance(f, failure.Failure))
+            f.trap(RuntimeError)
+        d.addCallbacks(lambda res: self.fail("supposed to fail"), eb)
+        return d
+
+    def test_remoteMethod(self):
+        """
+        Test the C{remoteMethod} facility of L{util.LocalAsRemote}.
+        """
+        o = LocalRemoteTest()
+        m = o.remoteMethod("add1")
+        self.assertEqual(m(3), 4)
+
+    def test_localAsyncForwarder(self):
+        """
+        Test a call to L{util.LocalAsyncForwarder} using L{Forwarded} local
+        object.
+        """
+        f = Forwarded()
+        lf = util.LocalAsyncForwarder(f, IForwarded)
+        lf.callRemote("forwardMe")
+        self.assertTrue(f.forwarded)
+        lf.callRemote("dontForwardMe")
+        self.assertFalse(f.unforwarded)
+        rr = lf.callRemote("forwardDeferred")
+        l = []
+        rr.addCallback(l.append)
+        self.assertEqual(l[0], 1)
+
+
+
+class PBWithSecurityOptionsTest(unittest.TestCase):
+    """
+    Test security customization.
+    """
+
+    def test_clientDefaultSecurityOptions(self):
+        """
+        By default, client broker should use C{jelly.globalSecurity} as
+        security settings.
+        """
+        factory = pb.PBClientFactory()
+        broker = factory.buildProtocol(None)
+        self.assertIdentical(broker.security, jelly.globalSecurity)
+
+
+    def test_serverDefaultSecurityOptions(self):
+        """
+        By default, server broker should use C{jelly.globalSecurity} as
+        security settings.
+        """
+        factory = pb.PBServerFactory(Echoer())
+        broker = factory.buildProtocol(None)
+        self.assertIdentical(broker.security, jelly.globalSecurity)
+
+
+    def test_clientSecurityCustomization(self):
+        """
+        Check that the security settings are passed from the client factory to
+        the broker object.
+        """
+        security = jelly.SecurityOptions()
+        factory = pb.PBClientFactory(security=security)
+        broker = factory.buildProtocol(None)
+        self.assertIdentical(broker.security, security)
+
+
+    def test_serverSecurityCustomization(self):
+        """
+        Check that the security settings are passed from the server factory to
+        the broker object.
+        """
+        security = jelly.SecurityOptions()
+        factory = pb.PBServerFactory(Echoer(), security=security)
+        broker = factory.buildProtocol(None)
+        self.assertIdentical(broker.security, security)
diff --git a/ThirdParty/Twisted/twisted/test/test_pbfailure.py b/ThirdParty/Twisted/twisted/test/test_pbfailure.py
new file mode 100644
index 0000000..91cd6ba
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_pbfailure.py
@@ -0,0 +1,475 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for error handling in PB.
+"""
+
+import sys
+from StringIO import StringIO
+
+from twisted.trial import unittest
+
+from twisted.spread import pb, flavors, jelly
+from twisted.internet import reactor, defer
+from twisted.python import log
+
+##
+# test exceptions
+##
+class AsynchronousException(Exception):
+    """
+    Helper used to test remote methods which return Deferreds which fail with
+    exceptions which are not L{pb.Error} subclasses.
+    """
+
+
+class SynchronousException(Exception):
+    """
+    Helper used to test remote methods which raise exceptions which are not
+    L{pb.Error} subclasses.
+    """
+
+
+class AsynchronousError(pb.Error):
+    """
+    Helper used to test remote methods which return Deferreds which fail with
+    exceptions which are L{pb.Error} subclasses.
+    """
+
+
+class SynchronousError(pb.Error):
+    """
+    Helper used to test remote methods which raise exceptions which are
+    L{pb.Error} subclasses.
+    """
+
+
+#class JellyError(flavors.Jellyable, pb.Error): pass
+class JellyError(flavors.Jellyable, pb.Error, pb.RemoteCopy):
+    pass
+
+
+class SecurityError(pb.Error, pb.RemoteCopy):
+    pass
+
+pb.setUnjellyableForClass(JellyError, JellyError)
+pb.setUnjellyableForClass(SecurityError, SecurityError)
+pb.globalSecurity.allowInstancesOf(SecurityError)
+
+
+####
+# server-side
+####
+class SimpleRoot(pb.Root):
+    def remote_asynchronousException(self):
+        """
+        Fail asynchronously with a non-pb.Error exception.
+        """
+        return defer.fail(AsynchronousException("remote asynchronous exception"))
+
+    def remote_synchronousException(self):
+        """
+        Fail synchronously with a non-pb.Error exception.
+        """
+        raise SynchronousException("remote synchronous exception")
+
+    def remote_asynchronousError(self):
+        """
+        Fail asynchronously with a pb.Error exception.
+        """
+        return defer.fail(AsynchronousError("remote asynchronous error"))
+
+    def remote_synchronousError(self):
+        """
+        Fail synchronously with a pb.Error exception.
+        """
+        raise SynchronousError("remote synchronous error")
+
+    def remote_unknownError(self):
+        """
+        Fail with error that is not known to client.
+        """
+        class UnknownError(pb.Error):
+            pass
+        raise UnknownError("I'm not known to client!")
+
+    def remote_jelly(self):
+        self.raiseJelly()
+
+    def remote_security(self):
+        self.raiseSecurity()
+
+    def remote_deferredJelly(self):
+        d = defer.Deferred()
+        d.addCallback(self.raiseJelly)
+        d.callback(None)
+        return d
+
+    def remote_deferredSecurity(self):
+        d = defer.Deferred()
+        d.addCallback(self.raiseSecurity)
+        d.callback(None)
+        return d
+
+    def raiseJelly(self, results=None):
+        raise JellyError("I'm jellyable!")
+
+    def raiseSecurity(self, results=None):
+        raise SecurityError("I'm secure!")
+
+
+
+class SaveProtocolServerFactory(pb.PBServerFactory):
+    """
+    A L{pb.PBServerFactory} that saves the latest connected client in
+    C{protocolInstance}.
+    """
+    protocolInstance = None
+
+    def clientConnectionMade(self, protocol):
+        """
+        Keep track of the given protocol.
+        """
+        self.protocolInstance = protocol
+
+
+
+class PBConnTestCase(unittest.TestCase):
+    unsafeTracebacks = 0
+
+    def setUp(self):
+        self._setUpServer()
+        self._setUpClient()
+
+    def _setUpServer(self):
+        self.serverFactory = SaveProtocolServerFactory(SimpleRoot())
+        self.serverFactory.unsafeTracebacks = self.unsafeTracebacks
+        self.serverPort = reactor.listenTCP(0, self.serverFactory, interface="127.0.0.1")
+
+    def _setUpClient(self):
+        portNo = self.serverPort.getHost().port
+        self.clientFactory = pb.PBClientFactory()
+        self.clientConnector = reactor.connectTCP("127.0.0.1", portNo, self.clientFactory)
+
+    def tearDown(self):
+        if self.serverFactory.protocolInstance is not None:
+            self.serverFactory.protocolInstance.transport.loseConnection()
+        return defer.gatherResults([
+            self._tearDownServer(),
+            self._tearDownClient()])
+
+    def _tearDownServer(self):
+        return defer.maybeDeferred(self.serverPort.stopListening)
+
+    def _tearDownClient(self):
+        self.clientConnector.disconnect()
+        return defer.succeed(None)
+
+
+
+class PBFailureTest(PBConnTestCase):
+    compare = unittest.TestCase.assertEqual
+
+
+    def _exceptionTest(self, method, exceptionType, flush):
+        def eb(err):
+            err.trap(exceptionType)
+            self.compare(err.traceback, "Traceback unavailable\n")
+            if flush:
+                errs = self.flushLoggedErrors(exceptionType)
+                self.assertEqual(len(errs), 1)
+            return (err.type, err.value, err.traceback)
+        d = self.clientFactory.getRootObject()
+        def gotRootObject(root):
+            d = root.callRemote(method)
+            d.addErrback(eb)
+            return d
+        d.addCallback(gotRootObject)
+        return d
+
+
+    def test_asynchronousException(self):
+        """
+        Test that a Deferred returned by a remote method which already has a
+        Failure correctly has that error passed back to the calling side.
+        """
+        return self._exceptionTest(
+            'asynchronousException', AsynchronousException, True)
+
+
+    def test_synchronousException(self):
+        """
+        Like L{test_asynchronousException}, but for a method which raises an
+        exception synchronously.
+        """
+        return self._exceptionTest(
+            'synchronousException', SynchronousException, True)
+
+
+    def test_asynchronousError(self):
+        """
+        Like L{test_asynchronousException}, but for a method which returns a
+        Deferred failing with an L{pb.Error} subclass.
+        """
+        return self._exceptionTest(
+            'asynchronousError', AsynchronousError, False)
+
+
+    def test_synchronousError(self):
+        """
+        Like L{test_asynchronousError}, but for a method which synchronously
+        raises a L{pb.Error} subclass.
+        """
+        return self._exceptionTest(
+            'synchronousError', SynchronousError, False)
+
+
+    def _success(self, result, expectedResult):
+        self.assertEqual(result, expectedResult)
+        return result
+
+
+    def _addFailingCallbacks(self, remoteCall, expectedResult, eb):
+        remoteCall.addCallbacks(self._success, eb,
+                                callbackArgs=(expectedResult,))
+        return remoteCall
+
+
+    def _testImpl(self, method, expected, eb, exc=None):
+        """
+        Call the given remote method and attach the given errback to the
+        resulting Deferred.  If C{exc} is not None, also assert that one
+        exception of that type was logged.
+        """
+        rootDeferred = self.clientFactory.getRootObject()
+        def gotRootObj(obj):
+            failureDeferred = self._addFailingCallbacks(obj.callRemote(method), expected, eb)
+            if exc is not None:
+                def gotFailure(err):
+                    self.assertEqual(len(self.flushLoggedErrors(exc)), 1)
+                    return err
+                failureDeferred.addBoth(gotFailure)
+            return failureDeferred
+        rootDeferred.addCallback(gotRootObj)
+        return rootDeferred
+
+
+    def test_jellyFailure(self):
+        """
+        Test that an exception which is a subclass of L{pb.Error} has more
+        information passed across the network to the calling side.
+        """
+        def failureJelly(fail):
+            fail.trap(JellyError)
+            self.failIf(isinstance(fail.type, str))
+            self.failUnless(isinstance(fail.value, fail.type))
+            return 43
+        return self._testImpl('jelly', 43, failureJelly)
+
+
+    def test_deferredJellyFailure(self):
+        """
+        Test that a Deferred which fails with a L{pb.Error} is treated in
+        the same way as a synchronously raised L{pb.Error}.
+        """
+        def failureDeferredJelly(fail):
+            fail.trap(JellyError)
+            self.failIf(isinstance(fail.type, str))
+            self.failUnless(isinstance(fail.value, fail.type))
+            return 430
+        return self._testImpl('deferredJelly', 430, failureDeferredJelly)
+
+
+    def test_unjellyableFailure(self):
+        """
+        An non-jellyable L{pb.Error} subclass raised by a remote method is
+        turned into a Failure with a type set to the FQPN of the exception
+        type.
+        """
+        def failureUnjellyable(fail):
+            self.assertEqual(
+                fail.type, 'twisted.test.test_pbfailure.SynchronousError')
+            return 431
+        return self._testImpl('synchronousError', 431, failureUnjellyable)
+
+
+    def test_unknownFailure(self):
+        """
+        Test that an exception which is a subclass of L{pb.Error} but not
+        known on the client side has its type set properly.
+        """
+        def failureUnknown(fail):
+            self.assertEqual(
+                fail.type, 'twisted.test.test_pbfailure.UnknownError')
+            return 4310
+        return self._testImpl('unknownError', 4310, failureUnknown)
+
+
+    def test_securityFailure(self):
+        """
+        Test that even if an exception is not explicitly jellyable (by being
+        a L{pb.Jellyable} subclass), as long as it is an L{pb.Error}
+        subclass it receives the same special treatment.
+        """
+        def failureSecurity(fail):
+            fail.trap(SecurityError)
+            self.failIf(isinstance(fail.type, str))
+            self.failUnless(isinstance(fail.value, fail.type))
+            return 4300
+        return self._testImpl('security', 4300, failureSecurity)
+
+
+    def test_deferredSecurity(self):
+        """
+        Test that a Deferred which fails with a L{pb.Error} which is not
+        also a L{pb.Jellyable} is treated in the same way as a synchronously
+        raised exception of the same type.
+        """
+        def failureDeferredSecurity(fail):
+            fail.trap(SecurityError)
+            self.failIf(isinstance(fail.type, str))
+            self.failUnless(isinstance(fail.value, fail.type))
+            return 43000
+        return self._testImpl('deferredSecurity', 43000, failureDeferredSecurity)
+
+
+    def test_noSuchMethodFailure(self):
+        """
+        Test that attempting to call a method which is not defined correctly
+        results in an AttributeError on the calling side.
+        """
+        def failureNoSuch(fail):
+            fail.trap(pb.NoSuchMethod)
+            self.compare(fail.traceback, "Traceback unavailable\n")
+            return 42000
+        return self._testImpl('nosuch', 42000, failureNoSuch, AttributeError)
+
+
+    def test_copiedFailureLogging(self):
+        """
+        Test that a copied failure received from a PB call can be logged
+        locally.
+
+        Note: this test needs some serious help: all it really tests is that
+        log.err(copiedFailure) doesn't raise an exception.
+        """
+        d = self.clientFactory.getRootObject()
+
+        def connected(rootObj):
+            return rootObj.callRemote('synchronousException')
+        d.addCallback(connected)
+
+        def exception(failure):
+            log.err(failure)
+            errs = self.flushLoggedErrors(SynchronousException)
+            self.assertEqual(len(errs), 2)
+        d.addErrback(exception)
+
+        return d
+
+
+    def test_throwExceptionIntoGenerator(self):
+        """
+        L{pb.CopiedFailure.throwExceptionIntoGenerator} will throw a
+        L{RemoteError} into the given paused generator at the point where it
+        last yielded.
+        """
+        original = pb.CopyableFailure(AttributeError("foo"))
+        copy = jelly.unjelly(jelly.jelly(original, invoker=DummyInvoker()))
+        exception = []
+        def generatorFunc():
+            try:
+                yield None
+            except pb.RemoteError, exc:
+                exception.append(exc)
+            else:
+                self.fail("RemoteError not raised")
+        gen = generatorFunc()
+        gen.send(None)
+        self.assertRaises(StopIteration, copy.throwExceptionIntoGenerator, gen)
+        self.assertEqual(len(exception), 1)
+        exc = exception[0]
+        self.assertEqual(exc.remoteType, "exceptions.AttributeError")
+        self.assertEqual(exc.args, ("foo",))
+        self.assertEqual(exc.remoteTraceback, 'Traceback unavailable\n')
+
+    if sys.version_info[:2] < (2, 5):
+        test_throwExceptionIntoGenerator.skip = (
+            "throwExceptionIntoGenerator is not supported in Python < 2.5")
+
+
+
+class PBFailureTestUnsafe(PBFailureTest):
+    compare = unittest.TestCase.failIfEquals
+    unsafeTracebacks = 1
+
+
+
+class DummyInvoker(object):
+    """
+    A behaviorless object to be used as the invoker parameter to
+    L{jelly.jelly}.
+    """
+    serializingPerspective = None
+
+
+
+class FailureJellyingTests(unittest.TestCase):
+    """
+    Tests for the interaction of jelly and failures.
+    """
+    def test_unjelliedFailureCheck(self):
+        """
+        An unjellied L{CopyableFailure} has a check method which behaves the
+        same way as the original L{CopyableFailure}'s check method.
+        """
+        original = pb.CopyableFailure(ZeroDivisionError())
+        self.assertIdentical(
+            original.check(ZeroDivisionError), ZeroDivisionError)
+        self.assertIdentical(original.check(ArithmeticError), ArithmeticError)
+        copied = jelly.unjelly(jelly.jelly(original, invoker=DummyInvoker()))
+        self.assertIdentical(
+            copied.check(ZeroDivisionError), ZeroDivisionError)
+        self.assertIdentical(copied.check(ArithmeticError), ArithmeticError)
+
+
+    def test_twiceUnjelliedFailureCheck(self):
+        """
+        The object which results from jellying a L{CopyableFailure}, unjellying
+        the result, creating a new L{CopyableFailure} from the result of that,
+        jellying it, and finally unjellying the result of that has a check
+        method which behaves the same way as the original L{CopyableFailure}'s
+        check method.
+        """
+        original = pb.CopyableFailure(ZeroDivisionError())
+        self.assertIdentical(
+            original.check(ZeroDivisionError), ZeroDivisionError)
+        self.assertIdentical(original.check(ArithmeticError), ArithmeticError)
+        copiedOnce = jelly.unjelly(
+            jelly.jelly(original, invoker=DummyInvoker()))
+        derivative = pb.CopyableFailure(copiedOnce)
+        copiedTwice = jelly.unjelly(
+            jelly.jelly(derivative, invoker=DummyInvoker()))
+        self.assertIdentical(
+            copiedTwice.check(ZeroDivisionError), ZeroDivisionError)
+        self.assertIdentical(
+            copiedTwice.check(ArithmeticError), ArithmeticError)
+
+
+    def test_printTracebackIncludesValue(self):
+        """
+        When L{CopiedFailure.printTraceback} is used to print a copied failure
+        which was unjellied from a L{CopyableFailure} with C{unsafeTracebacks}
+        set to C{False}, the string representation of the exception value is
+        included in the output.
+        """
+        original = pb.CopyableFailure(Exception("some reason"))
+        copied = jelly.unjelly(jelly.jelly(original, invoker=DummyInvoker()))
+        output = StringIO()
+        copied.printTraceback(output)
+        self.assertEqual(
+            "Traceback from remote host -- Traceback unavailable\n"
+            "exceptions.Exception: some reason\n",
+            output.getvalue())
+
diff --git a/ThirdParty/Twisted/twisted/test/test_pcp.py b/ThirdParty/Twisted/twisted/test/test_pcp.py
new file mode 100644
index 0000000..71de8bb
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_pcp.py
@@ -0,0 +1,368 @@
+# -*- Python -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+__version__ = '$Revision: 1.5 $'[11:-2]
+
+from StringIO import StringIO
+from twisted.trial import unittest
+from twisted.protocols import pcp
+
+# Goal:
+
+# Take a Protocol instance.  Own all outgoing data - anything that
+# would go to p.transport.write.  Own all incoming data - anything
+# that comes to p.dataReceived.
+
+# I need:
+# Something with the AbstractFileDescriptor interface.
+# That is:
+#  - acts as a Transport
+#    - has a method write()
+#    - which buffers
+#  - acts as a Consumer
+#    - has a registerProducer, unRegisterProducer
+#    - tells the Producer to back off (pauseProducing) when its buffer is full.
+#    - tells the Producer to resumeProducing when its buffer is not so full.
+#  - acts as a Producer
+#    - calls registerProducer
+#    - calls write() on consumers
+#    - honors requests to pause/resume producing
+#    - honors stopProducing, and passes it along to upstream Producers
+
+
+class DummyTransport:
+    """A dumb transport to wrap around."""
+
+    def __init__(self):
+        self._writes = []
+
+    def write(self, data):
+        self._writes.append(data)
+
+    def getvalue(self):
+        return ''.join(self._writes)
+
+class DummyProducer:
+    resumed = False
+    stopped = False
+    paused = False
+
+    def __init__(self, consumer):
+        self.consumer = consumer
+
+    def resumeProducing(self):
+        self.resumed = True
+        self.paused = False
+
+    def pauseProducing(self):
+        self.paused = True
+
+    def stopProducing(self):
+        self.stopped = True
+
+
+class DummyConsumer(DummyTransport):
+    producer = None
+    finished = False
+    unregistered = True
+
+    def registerProducer(self, producer, streaming):
+        self.producer = (producer, streaming)
+
+    def unregisterProducer(self):
+        self.unregistered = True
+
+    def finish(self):
+        self.finished = True
+
+class TransportInterfaceTest(unittest.TestCase):
+    proxyClass = pcp.BasicProducerConsumerProxy
+
+    def setUp(self):
+        self.underlying = DummyConsumer()
+        self.transport = self.proxyClass(self.underlying)
+
+    def testWrite(self):
+        self.transport.write("some bytes")
+
+class ConsumerInterfaceTest:
+    """Test ProducerConsumerProxy as a Consumer.
+
+    Normally we have ProducingServer -> ConsumingTransport.
+
+    If I am to go between (Server -> Shaper -> Transport), I have to
+    play the role of Consumer convincingly for the ProducingServer.
+    """
+
+    def setUp(self):
+        self.underlying = DummyConsumer()
+        self.consumer = self.proxyClass(self.underlying)
+        self.producer = DummyProducer(self.consumer)
+
+    def testRegisterPush(self):
+        self.consumer.registerProducer(self.producer, True)
+        ## Consumer should NOT have called PushProducer.resumeProducing
+        self.failIf(self.producer.resumed)
+
+    ## I'm I'm just a proxy, should I only do resumeProducing when
+    ## I get poked myself?
+    #def testRegisterPull(self):
+    #    self.consumer.registerProducer(self.producer, False)
+    #    ## Consumer SHOULD have called PushProducer.resumeProducing
+    #    self.failUnless(self.producer.resumed)
+
+    def testUnregister(self):
+        self.consumer.registerProducer(self.producer, False)
+        self.consumer.unregisterProducer()
+        # Now when the consumer would ordinarily want more data, it
+        # shouldn't ask producer for it.
+        # The most succinct way to trigger "want more data" is to proxy for
+        # a PullProducer and have someone ask me for data.
+        self.producer.resumed = False
+        self.consumer.resumeProducing()
+        self.failIf(self.producer.resumed)
+
+    def testFinish(self):
+        self.consumer.registerProducer(self.producer, False)
+        self.consumer.finish()
+        # I guess finish should behave like unregister?
+        self.producer.resumed = False
+        self.consumer.resumeProducing()
+        self.failIf(self.producer.resumed)
+
+
+class ProducerInterfaceTest:
+    """Test ProducerConsumerProxy as a Producer.
+
+    Normally we have ProducingServer -> ConsumingTransport.
+
+    If I am to go between (Server -> Shaper -> Transport), I have to
+    play the role of Producer convincingly for the ConsumingTransport.
+    """
+
+    def setUp(self):
+        self.consumer = DummyConsumer()
+        self.producer = self.proxyClass(self.consumer)
+
+    def testRegistersProducer(self):
+        self.assertEqual(self.consumer.producer[0], self.producer)
+
+    def testPause(self):
+        self.producer.pauseProducing()
+        self.producer.write("yakkity yak")
+        self.failIf(self.consumer.getvalue(),
+                    "Paused producer should not have sent data.")
+
+    def testResume(self):
+        self.producer.pauseProducing()
+        self.producer.resumeProducing()
+        self.producer.write("yakkity yak")
+        self.assertEqual(self.consumer.getvalue(), "yakkity yak")
+
+    def testResumeNoEmptyWrite(self):
+        self.producer.pauseProducing()
+        self.producer.resumeProducing()
+        self.assertEqual(len(self.consumer._writes), 0,
+                             "Resume triggered an empty write.")
+
+    def testResumeBuffer(self):
+        self.producer.pauseProducing()
+        self.producer.write("buffer this")
+        self.producer.resumeProducing()
+        self.assertEqual(self.consumer.getvalue(), "buffer this")
+
+    def testStop(self):
+        self.producer.stopProducing()
+        self.producer.write("yakkity yak")
+        self.failIf(self.consumer.getvalue(),
+                    "Stopped producer should not have sent data.")
+
+
+class PCP_ConsumerInterfaceTest(ConsumerInterfaceTest, unittest.TestCase):
+    proxyClass = pcp.BasicProducerConsumerProxy
+
+class PCPII_ConsumerInterfaceTest(ConsumerInterfaceTest, unittest.TestCase):
+    proxyClass = pcp.ProducerConsumerProxy
+
+class PCP_ProducerInterfaceTest(ProducerInterfaceTest, unittest.TestCase):
+    proxyClass = pcp.BasicProducerConsumerProxy
+
+class PCPII_ProducerInterfaceTest(ProducerInterfaceTest, unittest.TestCase):
+    proxyClass = pcp.ProducerConsumerProxy
+
+class ProducerProxyTest(unittest.TestCase):
+    """Producer methods on me should be relayed to the Producer I proxy.
+    """
+    proxyClass = pcp.BasicProducerConsumerProxy
+
+    def setUp(self):
+        self.proxy = self.proxyClass(None)
+        self.parentProducer = DummyProducer(self.proxy)
+        self.proxy.registerProducer(self.parentProducer, True)
+
+    def testStop(self):
+        self.proxy.stopProducing()
+        self.failUnless(self.parentProducer.stopped)
+
+
+class ConsumerProxyTest(unittest.TestCase):
+    """Consumer methods on me should be relayed to the Consumer I proxy.
+    """
+    proxyClass = pcp.BasicProducerConsumerProxy
+
+    def setUp(self):
+        self.underlying = DummyConsumer()
+        self.consumer = self.proxyClass(self.underlying)
+
+    def testWrite(self):
+        # NOTE: This test only valid for streaming (Push) systems.
+        self.consumer.write("some bytes")
+        self.assertEqual(self.underlying.getvalue(), "some bytes")
+
+    def testFinish(self):
+        self.consumer.finish()
+        self.failUnless(self.underlying.finished)
+
+    def testUnregister(self):
+        self.consumer.unregisterProducer()
+        self.failUnless(self.underlying.unregistered)
+
+
+class PullProducerTest:
+    def setUp(self):
+        self.underlying = DummyConsumer()
+        self.proxy = self.proxyClass(self.underlying)
+        self.parentProducer = DummyProducer(self.proxy)
+        self.proxy.registerProducer(self.parentProducer, True)
+
+    def testHoldWrites(self):
+        self.proxy.write("hello")
+        # Consumer should get no data before it says resumeProducing.
+        self.failIf(self.underlying.getvalue(),
+                    "Pulling Consumer got data before it pulled.")
+
+    def testPull(self):
+        self.proxy.write("hello")
+        self.proxy.resumeProducing()
+        self.assertEqual(self.underlying.getvalue(), "hello")
+
+    def testMergeWrites(self):
+        self.proxy.write("hello ")
+        self.proxy.write("sunshine")
+        self.proxy.resumeProducing()
+        nwrites = len(self.underlying._writes)
+        self.assertEqual(nwrites, 1, "Pull resulted in %d writes instead "
+                             "of 1." % (nwrites,))
+        self.assertEqual(self.underlying.getvalue(), "hello sunshine")
+
+
+    def testLateWrite(self):
+        # consumer sends its initial pull before we have data
+        self.proxy.resumeProducing()
+        self.proxy.write("data")
+        # This data should answer that pull request.
+        self.assertEqual(self.underlying.getvalue(), "data")
+
+class PCP_PullProducerTest(PullProducerTest, unittest.TestCase):
+    class proxyClass(pcp.BasicProducerConsumerProxy):
+        iAmStreaming = False
+
+class PCPII_PullProducerTest(PullProducerTest, unittest.TestCase):
+    class proxyClass(pcp.ProducerConsumerProxy):
+        iAmStreaming = False
+
+# Buffering!
+
+class BufferedConsumerTest(unittest.TestCase):
+    """As a consumer, ask the producer to pause after too much data."""
+
+    proxyClass = pcp.ProducerConsumerProxy
+
+    def setUp(self):
+        self.underlying = DummyConsumer()
+        self.proxy = self.proxyClass(self.underlying)
+        self.proxy.bufferSize = 100
+
+        self.parentProducer = DummyProducer(self.proxy)
+        self.proxy.registerProducer(self.parentProducer, True)
+
+    def testRegisterPull(self):
+        self.proxy.registerProducer(self.parentProducer, False)
+        ## Consumer SHOULD have called PushProducer.resumeProducing
+        self.failUnless(self.parentProducer.resumed)
+
+    def testPauseIntercept(self):
+        self.proxy.pauseProducing()
+        self.failIf(self.parentProducer.paused)
+
+    def testResumeIntercept(self):
+        self.proxy.pauseProducing()
+        self.proxy.resumeProducing()
+        # With a streaming producer, just because the proxy was resumed is
+        # not necessarily a reason to resume the parent producer.  The state
+        # of the buffer should decide that.
+        self.failIf(self.parentProducer.resumed)
+
+    def testTriggerPause(self):
+        """Make sure I say \"when.\""""
+
+        # Pause the proxy so data sent to it builds up in its buffer.
+        self.proxy.pauseProducing()
+        self.failIf(self.parentProducer.paused, "don't pause yet")
+        self.proxy.write("x" * 51)
+        self.failIf(self.parentProducer.paused, "don't pause yet")
+        self.proxy.write("x" * 51)
+        self.failUnless(self.parentProducer.paused)
+
+    def testTriggerResume(self):
+        """Make sure I resumeProducing when my buffer empties."""
+        self.proxy.pauseProducing()
+        self.proxy.write("x" * 102)
+        self.failUnless(self.parentProducer.paused, "should be paused")
+        self.proxy.resumeProducing()
+        # Resuming should have emptied my buffer, so I should tell my
+        # parent to resume too.
+        self.failIf(self.parentProducer.paused,
+                    "Producer should have resumed.")
+        self.failIf(self.proxy.producerPaused)
+
+class BufferedPullTests(unittest.TestCase):
+    class proxyClass(pcp.ProducerConsumerProxy):
+        iAmStreaming = False
+
+        def _writeSomeData(self, data):
+            pcp.ProducerConsumerProxy._writeSomeData(self, data[:100])
+            return min(len(data), 100)
+
+    def setUp(self):
+        self.underlying = DummyConsumer()
+        self.proxy = self.proxyClass(self.underlying)
+        self.proxy.bufferSize = 100
+
+        self.parentProducer = DummyProducer(self.proxy)
+        self.proxy.registerProducer(self.parentProducer, False)
+
+    def testResumePull(self):
+        # If proxy has no data to send on resumeProducing, it had better pull
+        # some from its PullProducer.
+        self.parentProducer.resumed = False
+        self.proxy.resumeProducing()
+        self.failUnless(self.parentProducer.resumed)
+
+    def testLateWriteBuffering(self):
+        # consumer sends its initial pull before we have data
+        self.proxy.resumeProducing()
+        self.proxy.write("datum" * 21)
+        # This data should answer that pull request.
+        self.assertEqual(self.underlying.getvalue(), "datum" * 20)
+        # but there should be some left over
+        self.assertEqual(self.proxy._buffer, ["datum"])
+
+
+# TODO:
+#  test that web request finishing bug (when we weren't proxying
+#    unregisterProducer but were proxying finish, web file transfers
+#    would hang on the last block.)
+#  test what happens if writeSomeBytes decided to write zero bytes.
diff --git a/ThirdParty/Twisted/twisted/test/test_persisted.py b/ThirdParty/Twisted/twisted/test/test_persisted.py
new file mode 100644
index 0000000..4a80791
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_persisted.py
@@ -0,0 +1,377 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+# System Imports
+import sys
+
+from twisted.trial import unittest
+
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+
+try:
+    import cStringIO as StringIO
+except ImportError:
+    import StringIO
+
+# Twisted Imports
+from twisted.persisted import styles, aot, crefutil
+
+
+class VersionTestCase(unittest.TestCase):
+    def testNullVersionUpgrade(self):
+        global NullVersioned
+        class NullVersioned:
+            ok = 0
+        pkcl = pickle.dumps(NullVersioned())
+        class NullVersioned(styles.Versioned):
+            persistenceVersion = 1
+            def upgradeToVersion1(self):
+                self.ok = 1
+        mnv = pickle.loads(pkcl)
+        styles.doUpgrade()
+        assert mnv.ok, "initial upgrade not run!"
+
+    def testVersionUpgrade(self):
+        global MyVersioned
+        class MyVersioned(styles.Versioned):
+            persistenceVersion = 2
+            persistenceForgets = ['garbagedata']
+            v3 = 0
+            v4 = 0
+
+            def __init__(self):
+                self.somedata = 'xxx'
+                self.garbagedata = lambda q: 'cant persist'
+
+            def upgradeToVersion3(self):
+                self.v3 += 1
+
+            def upgradeToVersion4(self):
+                self.v4 += 1
+        mv = MyVersioned()
+        assert not (mv.v3 or mv.v4), "hasn't been upgraded yet"
+        pickl = pickle.dumps(mv)
+        MyVersioned.persistenceVersion = 4
+        obj = pickle.loads(pickl)
+        styles.doUpgrade()
+        assert obj.v3, "didn't do version 3 upgrade"
+        assert obj.v4, "didn't do version 4 upgrade"
+        pickl = pickle.dumps(obj)
+        obj = pickle.loads(pickl)
+        styles.doUpgrade()
+        assert obj.v3 == 1, "upgraded unnecessarily"
+        assert obj.v4 == 1, "upgraded unnecessarily"
+    
+    def testNonIdentityHash(self):
+        global ClassWithCustomHash
+        class ClassWithCustomHash(styles.Versioned):
+            def __init__(self, unique, hash):
+                self.unique = unique
+                self.hash = hash
+            def __hash__(self):
+                return self.hash
+        
+        v1 = ClassWithCustomHash('v1', 0)
+        v2 = ClassWithCustomHash('v2', 0)
+
+        pkl = pickle.dumps((v1, v2))
+        del v1, v2
+        ClassWithCustomHash.persistenceVersion = 1
+        ClassWithCustomHash.upgradeToVersion1 = lambda self: setattr(self, 'upgraded', True)
+        v1, v2 = pickle.loads(pkl)
+        styles.doUpgrade()
+        self.assertEqual(v1.unique, 'v1')
+        self.assertEqual(v2.unique, 'v2')
+        self.failUnless(v1.upgraded)
+        self.failUnless(v2.upgraded)
+    
+    def testUpgradeDeserializesObjectsRequiringUpgrade(self):
+        global ToyClassA, ToyClassB
+        class ToyClassA(styles.Versioned):
+            pass
+        class ToyClassB(styles.Versioned):
+            pass
+        x = ToyClassA()
+        y = ToyClassB()
+        pklA, pklB = pickle.dumps(x), pickle.dumps(y)
+        del x, y
+        ToyClassA.persistenceVersion = 1
+        def upgradeToVersion1(self):
+            self.y = pickle.loads(pklB)
+            styles.doUpgrade()
+        ToyClassA.upgradeToVersion1 = upgradeToVersion1
+        ToyClassB.persistenceVersion = 1
+        ToyClassB.upgradeToVersion1 = lambda self: setattr(self, 'upgraded', True)
+
+        x = pickle.loads(pklA)
+        styles.doUpgrade()
+        self.failUnless(x.y.upgraded)
+
+
+
+class VersionedSubClass(styles.Versioned):
+    pass
+
+
+
+class SecondVersionedSubClass(styles.Versioned):
+    pass
+
+
+
+class VersionedSubSubClass(VersionedSubClass):
+    pass
+
+
+
+class VersionedDiamondSubClass(VersionedSubSubClass, SecondVersionedSubClass):
+    pass
+
+
+
+class AybabtuTests(unittest.TestCase):
+    """
+    L{styles._aybabtu} gets all of classes in the inheritance hierarchy of its
+    argument that are strictly between L{Versioned} and the class itself.
+    """
+
+    def test_aybabtuStrictEmpty(self):
+        """
+        L{styles._aybabtu} of L{Versioned} itself is an empty list.
+        """
+        self.assertEqual(styles._aybabtu(styles.Versioned), [])
+
+
+    def test_aybabtuStrictSubclass(self):
+        """
+        There are no classes I{between} L{VersionedSubClass} and L{Versioned},
+        so L{styles._aybabtu} returns an empty list.
+        """
+        self.assertEqual(styles._aybabtu(VersionedSubClass), [])
+
+
+    def test_aybabtuSubsubclass(self):
+        """
+        With a sub-sub-class of L{Versioned}, L{styles._aybabtu} returns a list
+        containing the intervening subclass.
+        """
+        self.assertEqual(styles._aybabtu(VersionedSubSubClass),
+                         [VersionedSubClass])
+
+
+    def test_aybabtuStrict(self):
+        """
+        For a diamond-shaped inheritance graph, L{styles._aybabtu} returns a
+        list containing I{both} intermediate subclasses.
+        """
+        self.assertEqual(
+            styles._aybabtu(VersionedDiamondSubClass),
+            [VersionedSubSubClass, VersionedSubClass, SecondVersionedSubClass])
+
+
+
+class MyEphemeral(styles.Ephemeral):
+
+    def __init__(self, x):
+        self.x = x
+
+
+class EphemeralTestCase(unittest.TestCase):
+
+    def testEphemeral(self):
+        o = MyEphemeral(3)
+        self.assertEqual(o.__class__, MyEphemeral)
+        self.assertEqual(o.x, 3)
+        
+        pickl = pickle.dumps(o)
+        o = pickle.loads(pickl)
+        
+        self.assertEqual(o.__class__, styles.Ephemeral)
+        self.assert_(not hasattr(o, 'x'))
+
+
+class Pickleable:
+
+    def __init__(self, x):
+        self.x = x
+    
+    def getX(self):
+        return self.x
+
+class A:
+    """
+    dummy class
+    """
+    def amethod(self):
+        pass
+
+class B:
+    """
+    dummy class
+    """
+    def bmethod(self):
+        pass
+
+def funktion():
+    pass
+
+class PicklingTestCase(unittest.TestCase):
+    """Test pickling of extra object types."""
+    
+    def testModule(self):
+        pickl = pickle.dumps(styles)
+        o = pickle.loads(pickl)
+        self.assertEqual(o, styles)
+    
+    def testClassMethod(self):
+        pickl = pickle.dumps(Pickleable.getX)
+        o = pickle.loads(pickl)
+        self.assertEqual(o, Pickleable.getX)
+    
+    def testInstanceMethod(self):
+        obj = Pickleable(4)
+        pickl = pickle.dumps(obj.getX)
+        o = pickle.loads(pickl)
+        self.assertEqual(o(), 4)
+        self.assertEqual(type(o), type(obj.getX))
+    
+    def testStringIO(self):
+        f = StringIO.StringIO()
+        f.write("abc")
+        pickl = pickle.dumps(f)
+        o = pickle.loads(pickl)
+        self.assertEqual(type(o), type(f))
+        self.assertEqual(f.getvalue(), "abc")
+
+
+class EvilSourceror:
+    def __init__(self, x):
+        self.a = self
+        self.a.b = self
+        self.a.b.c = x
+
+class NonDictState:
+    def __getstate__(self):
+        return self.state
+    def __setstate__(self, state):
+        self.state = state
+
+class AOTTestCase(unittest.TestCase):
+    def testSimpleTypes(self):
+        obj = (1, 2.0, 3j, True, slice(1, 2, 3), 'hello', u'world', sys.maxint + 1, None, Ellipsis)
+        rtObj = aot.unjellyFromSource(aot.jellyToSource(obj))
+        self.assertEqual(obj, rtObj)
+
+    def testMethodSelfIdentity(self):
+        a = A()
+        b = B()
+        a.bmethod = b.bmethod
+        b.a = a
+        im_ = aot.unjellyFromSource(aot.jellyToSource(b)).a.bmethod
+        self.assertEqual(im_.im_class, im_.im_self.__class__)
+
+
+    def test_methodNotSelfIdentity(self):
+        """
+        If a class change after an instance has been created,
+        L{aot.unjellyFromSource} shoud raise a C{TypeError} when trying to
+        unjelly the instance.
+        """
+        a = A()
+        b = B()
+        a.bmethod = b.bmethod
+        b.a = a
+        savedbmethod = B.bmethod
+        del B.bmethod
+        try:
+            self.assertRaises(TypeError, aot.unjellyFromSource,
+                              aot.jellyToSource(b))
+        finally:
+            B.bmethod = savedbmethod
+
+
+    def test_unsupportedType(self):
+        """
+        L{aot.jellyToSource} should raise a C{TypeError} when trying to jelly
+        an unknown type.
+        """
+        try:
+            set
+        except:
+            from sets import Set as set
+        self.assertRaises(TypeError, aot.jellyToSource, set())
+
+
+    def testBasicIdentity(self):
+        # Anyone wanting to make this datastructure more complex, and thus this
+        # test more comprehensive, is welcome to do so.
+        aj = aot.AOTJellier().jellyToAO
+        d = {'hello': 'world', "method": aj}
+        l = [1, 2, 3,
+             "he\tllo\n\n\"x world!",
+             u"goodbye \n\t\u1010 world!",
+             1, 1.0, 100 ** 100l, unittest, aot.AOTJellier, d,
+             funktion
+             ]
+        t = tuple(l)
+        l.append(l)
+        l.append(t)
+        l.append(t)
+        uj = aot.unjellyFromSource(aot.jellyToSource([l, l]))
+        assert uj[0] is uj[1]
+        assert uj[1][0:5] == l[0:5]
+
+
+    def testNonDictState(self):
+        a = NonDictState()
+        a.state = "meringue!"
+        assert aot.unjellyFromSource(aot.jellyToSource(a)).state == a.state
+
+    def testCopyReg(self):
+        s = "foo_bar"
+        sio = StringIO.StringIO()
+        sio.write(s)
+        uj = aot.unjellyFromSource(aot.jellyToSource(sio))
+        # print repr(uj.__dict__)
+        assert uj.getvalue() == s
+
+    def testFunkyReferences(self):
+        o = EvilSourceror(EvilSourceror([]))
+        j1 = aot.jellyToAOT(o)
+        oj = aot.unjellyFromAOT(j1)
+
+        assert oj.a is oj
+        assert oj.a.b is oj.b
+        assert oj.c is not oj.c.c
+
+
+class CrefUtilTestCase(unittest.TestCase):
+    """
+    Tests for L{crefutil}.
+    """
+
+    def test_dictUnknownKey(self):
+        """
+        L{crefutil._DictKeyAndValue} only support keys C{0} and C{1}.
+        """
+        d = crefutil._DictKeyAndValue({})
+        self.assertRaises(RuntimeError, d.__setitem__, 2, 3)
+
+
+    def test_deferSetMultipleTimes(self):
+        """
+        L{crefutil._Defer} can be assigned a key only one time.
+        """
+        d = crefutil._Defer()
+        d[0] = 1
+        self.assertRaises(RuntimeError, d.__setitem__, 0, 1)
+
+
+
+testCases = [VersionTestCase, EphemeralTestCase, PicklingTestCase]
+
diff --git a/ThirdParty/Twisted/twisted/test/test_plugin.py b/ThirdParty/Twisted/twisted/test/test_plugin.py
new file mode 100644
index 0000000..c33ecf1
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_plugin.py
@@ -0,0 +1,719 @@
+# Copyright (c) 2005 Divmod, Inc.
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for Twisted plugin system.
+"""
+
+import sys, errno, os, time
+import compileall
+
+from zope.interface import Interface
+
+from twisted.trial import unittest
+from twisted.python.log import textFromEventDict, addObserver, removeObserver
+from twisted.python.filepath import FilePath
+from twisted.python.util import mergeFunctionMetadata
+
+from twisted import plugin
+
+
+
+class ITestPlugin(Interface):
+    """
+    A plugin for use by the plugin system's unit tests.
+
+    Do not use this.
+    """
+
+
+
+class ITestPlugin2(Interface):
+    """
+    See L{ITestPlugin}.
+    """
+
+
+
+class PluginTestCase(unittest.TestCase):
+    """
+    Tests which verify the behavior of the current, active Twisted plugins
+    directory.
+    """
+
+    def setUp(self):
+        """
+        Save C{sys.path} and C{sys.modules}, and create a package for tests.
+        """
+        self.originalPath = sys.path[:]
+        self.savedModules = sys.modules.copy()
+
+        self.root = FilePath(self.mktemp())
+        self.root.createDirectory()
+        self.package = self.root.child('mypackage')
+        self.package.createDirectory()
+        self.package.child('__init__.py').setContent("")
+
+        FilePath(__file__).sibling('plugin_basic.py'
+            ).copyTo(self.package.child('testplugin.py'))
+
+        self.originalPlugin = "testplugin"
+
+        sys.path.insert(0, self.root.path)
+        import mypackage
+        self.module = mypackage
+
+
+    def tearDown(self):
+        """
+        Restore C{sys.path} and C{sys.modules} to their original values.
+        """
+        sys.path[:] = self.originalPath
+        sys.modules.clear()
+        sys.modules.update(self.savedModules)
+
+
+    def _unimportPythonModule(self, module, deleteSource=False):
+        modulePath = module.__name__.split('.')
+        packageName = '.'.join(modulePath[:-1])
+        moduleName = modulePath[-1]
+
+        delattr(sys.modules[packageName], moduleName)
+        del sys.modules[module.__name__]
+        for ext in ['c', 'o'] + (deleteSource and [''] or []):
+            try:
+                os.remove(module.__file__ + ext)
+            except OSError, ose:
+                if ose.errno != errno.ENOENT:
+                    raise
+
+
+    def _clearCache(self):
+        """
+        Remove the plugins B{droping.cache} file.
+        """
+        self.package.child('dropin.cache').remove()
+
+
+    def _withCacheness(meth):
+        """
+        This is a paranoid test wrapper, that calls C{meth} 2 times, clear the
+        cache, and calls it 2 other times. It's supposed to ensure that the
+        plugin system behaves correctly no matter what the state of the cache
+        is.
+        """
+        def wrapped(self):
+            meth(self)
+            meth(self)
+            self._clearCache()
+            meth(self)
+            meth(self)
+        return mergeFunctionMetadata(meth, wrapped)
+
+
+    def test_cache(self):
+        """
+        Check that the cache returned by L{plugin.getCache} hold the plugin
+        B{testplugin}, and that this plugin has the properties we expect:
+        provide L{TestPlugin}, has the good name and description, and can be
+        loaded successfully.
+        """
+        cache = plugin.getCache(self.module)
+
+        dropin = cache[self.originalPlugin]
+        self.assertEqual(dropin.moduleName,
+                          'mypackage.%s' % (self.originalPlugin,))
+        self.assertIn("I'm a test drop-in.", dropin.description)
+
+        # Note, not the preferred way to get a plugin by its interface.
+        p1 = [p for p in dropin.plugins if ITestPlugin in p.provided][0]
+        self.assertIdentical(p1.dropin, dropin)
+        self.assertEqual(p1.name, "TestPlugin")
+
+        # Check the content of the description comes from the plugin module
+        # docstring
+        self.assertEqual(
+            p1.description.strip(),
+            "A plugin used solely for testing purposes.")
+        self.assertEqual(p1.provided, [ITestPlugin, plugin.IPlugin])
+        realPlugin = p1.load()
+        # The plugin should match the class present in sys.modules
+        self.assertIdentical(
+            realPlugin,
+            sys.modules['mypackage.%s' % (self.originalPlugin,)].TestPlugin)
+
+        # And it should also match if we import it classicly
+        import mypackage.testplugin as tp
+        self.assertIdentical(realPlugin, tp.TestPlugin)
+
+    test_cache = _withCacheness(test_cache)
+
+
+    def test_plugins(self):
+        """
+        L{plugin.getPlugins} should return the list of plugins matching the
+        specified interface (here, L{ITestPlugin2}), and these plugins
+        should be instances of classes with a C{test} method, to be sure
+        L{plugin.getPlugins} load classes correctly.
+        """
+        plugins = list(plugin.getPlugins(ITestPlugin2, self.module))
+
+        self.assertEqual(len(plugins), 2)
+
+        names = ['AnotherTestPlugin', 'ThirdTestPlugin']
+        for p in plugins:
+            names.remove(p.__name__)
+            p.test()
+
+    test_plugins = _withCacheness(test_plugins)
+
+
+    def test_detectNewFiles(self):
+        """
+        Check that L{plugin.getPlugins} is able to detect plugins added at
+        runtime.
+        """
+        FilePath(__file__).sibling('plugin_extra1.py'
+            ).copyTo(self.package.child('pluginextra.py'))
+        try:
+            # Check that the current situation is clean
+            self.failIfIn('mypackage.pluginextra', sys.modules)
+            self.failIf(hasattr(sys.modules['mypackage'], 'pluginextra'),
+                        "mypackage still has pluginextra module")
+
+            plgs = list(plugin.getPlugins(ITestPlugin, self.module))
+
+            # We should find 2 plugins: the one in testplugin, and the one in
+            # pluginextra
+            self.assertEqual(len(plgs), 2)
+
+            names = ['TestPlugin', 'FourthTestPlugin']
+            for p in plgs:
+                names.remove(p.__name__)
+                p.test1()
+        finally:
+            self._unimportPythonModule(
+                sys.modules['mypackage.pluginextra'],
+                True)
+
+    test_detectNewFiles = _withCacheness(test_detectNewFiles)
+
+
+    def test_detectFilesChanged(self):
+        """
+        Check that if the content of a plugin change, L{plugin.getPlugins} is
+        able to detect the new plugins added.
+        """
+        FilePath(__file__).sibling('plugin_extra1.py'
+            ).copyTo(self.package.child('pluginextra.py'))
+        try:
+            plgs = list(plugin.getPlugins(ITestPlugin, self.module))
+            # Sanity check
+            self.assertEqual(len(plgs), 2)
+
+            FilePath(__file__).sibling('plugin_extra2.py'
+                ).copyTo(self.package.child('pluginextra.py'))
+
+            # Fake out Python.
+            self._unimportPythonModule(sys.modules['mypackage.pluginextra'])
+
+            # Make sure additions are noticed
+            plgs = list(plugin.getPlugins(ITestPlugin, self.module))
+
+            self.assertEqual(len(plgs), 3)
+
+            names = ['TestPlugin', 'FourthTestPlugin', 'FifthTestPlugin']
+            for p in plgs:
+                names.remove(p.__name__)
+                p.test1()
+        finally:
+            self._unimportPythonModule(
+                sys.modules['mypackage.pluginextra'],
+                True)
+
+    test_detectFilesChanged = _withCacheness(test_detectFilesChanged)
+
+
+    def test_detectFilesRemoved(self):
+        """
+        Check that when a dropin file is removed, L{plugin.getPlugins} doesn't
+        return it anymore.
+        """
+        FilePath(__file__).sibling('plugin_extra1.py'
+            ).copyTo(self.package.child('pluginextra.py'))
+        try:
+            # Generate a cache with pluginextra in it.
+            list(plugin.getPlugins(ITestPlugin, self.module))
+
+        finally:
+            self._unimportPythonModule(
+                sys.modules['mypackage.pluginextra'],
+                True)
+        plgs = list(plugin.getPlugins(ITestPlugin, self.module))
+        self.assertEqual(1, len(plgs))
+
+    test_detectFilesRemoved = _withCacheness(test_detectFilesRemoved)
+
+
+    def test_nonexistentPathEntry(self):
+        """
+        Test that getCache skips over any entries in a plugin package's
+        C{__path__} which do not exist.
+        """
+        path = self.mktemp()
+        self.failIf(os.path.exists(path))
+        # Add the test directory to the plugins path
+        self.module.__path__.append(path)
+        try:
+            plgs = list(plugin.getPlugins(ITestPlugin, self.module))
+            self.assertEqual(len(plgs), 1)
+        finally:
+            self.module.__path__.remove(path)
+
+    test_nonexistentPathEntry = _withCacheness(test_nonexistentPathEntry)
+
+
+    def test_nonDirectoryChildEntry(self):
+        """
+        Test that getCache skips over any entries in a plugin package's
+        C{__path__} which refer to children of paths which are not directories.
+        """
+        path = FilePath(self.mktemp())
+        self.failIf(path.exists())
+        path.touch()
+        child = path.child("test_package").path
+        self.module.__path__.append(child)
+        try:
+            plgs = list(plugin.getPlugins(ITestPlugin, self.module))
+            self.assertEqual(len(plgs), 1)
+        finally:
+            self.module.__path__.remove(child)
+
+    test_nonDirectoryChildEntry = _withCacheness(test_nonDirectoryChildEntry)
+
+
+    def test_deployedMode(self):
+        """
+        The C{dropin.cache} file may not be writable: the cache should still be
+        attainable, but an error should be logged to show that the cache
+        couldn't be updated.
+        """
+        # Generate the cache
+        plugin.getCache(self.module)
+
+        cachepath = self.package.child('dropin.cache')
+
+        # Add a new plugin
+        FilePath(__file__).sibling('plugin_extra1.py'
+            ).copyTo(self.package.child('pluginextra.py'))
+
+        os.chmod(self.package.path, 0500)
+        # Change the right of dropin.cache too for windows
+        os.chmod(cachepath.path, 0400)
+        self.addCleanup(os.chmod, self.package.path, 0700)
+        self.addCleanup(os.chmod, cachepath.path, 0700)
+
+        # Start observing log events to see the warning
+        events = []
+        addObserver(events.append)
+        self.addCleanup(removeObserver, events.append)
+
+        cache = plugin.getCache(self.module)
+        # The new plugin should be reported
+        self.assertIn('pluginextra', cache)
+        self.assertIn(self.originalPlugin, cache)
+
+        # Make sure something was logged about the cache.
+        expected = "Unable to write to plugin cache %s: error number %d" % (
+            cachepath.path, errno.EPERM)
+        for event in events:
+            if expected in textFromEventDict(event):
+                break
+        else:
+            self.fail(
+                "Did not observe unwriteable cache warning in log "
+                "events: %r" % (events,))
+
+
+
+# This is something like the Twisted plugins file.
+pluginInitFile = """
+from twisted.plugin import pluginPackagePaths
+__path__.extend(pluginPackagePaths(__name__))
+__all__ = []
+"""
+
+def pluginFileContents(name):
+    return (
+        "from zope.interface import classProvides\n"
+        "from twisted.plugin import IPlugin\n"
+        "from twisted.test.test_plugin import ITestPlugin\n"
+        "\n"
+        "class %s(object):\n"
+        "    classProvides(IPlugin, ITestPlugin)\n") % (name,)
+
+
+def _createPluginDummy(entrypath, pluginContent, real, pluginModule):
+    """
+    Create a plugindummy package.
+    """
+    entrypath.createDirectory()
+    pkg = entrypath.child('plugindummy')
+    pkg.createDirectory()
+    if real:
+        pkg.child('__init__.py').setContent('')
+    plugs = pkg.child('plugins')
+    plugs.createDirectory()
+    if real:
+        plugs.child('__init__.py').setContent(pluginInitFile)
+    plugs.child(pluginModule + '.py').setContent(pluginContent)
+    return plugs
+
+
+
+class DeveloperSetupTests(unittest.TestCase):
+    """
+    These tests verify things about the plugin system without actually
+    interacting with the deployed 'twisted.plugins' package, instead creating a
+    temporary package.
+    """
+
+    def setUp(self):
+        """
+        Create a complex environment with multiple entries on sys.path, akin to
+        a developer's environment who has a development (trunk) checkout of
+        Twisted, a system installed version of Twisted (for their operating
+        system's tools) and a project which provides Twisted plugins.
+        """
+        self.savedPath = sys.path[:]
+        self.savedModules = sys.modules.copy()
+        self.fakeRoot = FilePath(self.mktemp())
+        self.fakeRoot.createDirectory()
+        self.systemPath = self.fakeRoot.child('system_path')
+        self.devPath = self.fakeRoot.child('development_path')
+        self.appPath = self.fakeRoot.child('application_path')
+        self.systemPackage = _createPluginDummy(
+            self.systemPath, pluginFileContents('system'),
+            True, 'plugindummy_builtin')
+        self.devPackage = _createPluginDummy(
+            self.devPath, pluginFileContents('dev'),
+            True, 'plugindummy_builtin')
+        self.appPackage = _createPluginDummy(
+            self.appPath, pluginFileContents('app'),
+            False, 'plugindummy_app')
+
+        # Now we're going to do the system installation.
+        sys.path.extend([x.path for x in [self.systemPath,
+                                          self.appPath]])
+        # Run all the way through the plugins list to cause the
+        # L{plugin.getPlugins} generator to write cache files for the system
+        # installation.
+        self.getAllPlugins()
+        self.sysplug = self.systemPath.child('plugindummy').child('plugins')
+        self.syscache = self.sysplug.child('dropin.cache')
+        # Make sure there's a nice big difference in modification times so that
+        # we won't re-build the system cache.
+        now = time.time()
+        os.utime(
+            self.sysplug.child('plugindummy_builtin.py').path,
+            (now - 5000,) * 2)
+        os.utime(self.syscache.path, (now - 2000,) * 2)
+        # For extra realism, let's make sure that the system path is no longer
+        # writable.
+        self.lockSystem()
+        self.resetEnvironment()
+
+
+    def lockSystem(self):
+        """
+        Lock the system directories, as if they were unwritable by this user.
+        """
+        os.chmod(self.sysplug.path, 0555)
+        os.chmod(self.syscache.path, 0555)
+
+
+    def unlockSystem(self):
+        """
+        Unlock the system directories, as if they were writable by this user.
+        """
+        os.chmod(self.sysplug.path, 0777)
+        os.chmod(self.syscache.path, 0777)
+
+
+    def getAllPlugins(self):
+        """
+        Get all the plugins loadable from our dummy package, and return their
+        short names.
+        """
+        # Import the module we just added to our path.  (Local scope because
+        # this package doesn't exist outside of this test.)
+        import plugindummy.plugins
+        x = list(plugin.getPlugins(ITestPlugin, plugindummy.plugins))
+        return [plug.__name__ for plug in x]
+
+
+    def resetEnvironment(self):
+        """
+        Change the environment to what it should be just as the test is
+        starting.
+        """
+        self.unsetEnvironment()
+        sys.path.extend([x.path for x in [self.devPath,
+                                          self.systemPath,
+                                          self.appPath]])
+
+    def unsetEnvironment(self):
+        """
+        Change the Python environment back to what it was before the test was
+        started.
+        """
+        sys.modules.clear()
+        sys.modules.update(self.savedModules)
+        sys.path[:] = self.savedPath
+
+
+    def tearDown(self):
+        """
+        Reset the Python environment to what it was before this test ran, and
+        restore permissions on files which were marked read-only so that the
+        directory may be cleanly cleaned up.
+        """
+        self.unsetEnvironment()
+        # Normally we wouldn't "clean up" the filesystem like this (leaving
+        # things for post-test inspection), but if we left the permissions the
+        # way they were, we'd be leaving files around that the buildbots
+        # couldn't delete, and that would be bad.
+        self.unlockSystem()
+
+
+    def test_developmentPluginAvailability(self):
+        """
+        Plugins added in the development path should be loadable, even when
+        the (now non-importable) system path contains its own idea of the
+        list of plugins for a package.  Inversely, plugins added in the
+        system path should not be available.
+        """
+        # Run 3 times: uncached, cached, and then cached again to make sure we
+        # didn't overwrite / corrupt the cache on the cached try.
+        for x in range(3):
+            names = self.getAllPlugins()
+            names.sort()
+            self.assertEqual(names, ['app', 'dev'])
+
+
+    def test_freshPyReplacesStalePyc(self):
+        """
+        Verify that if a stale .pyc file on the PYTHONPATH is replaced by a
+        fresh .py file, the plugins in the new .py are picked up rather than
+        the stale .pyc, even if the .pyc is still around.
+        """
+        mypath = self.appPackage.child("stale.py")
+        mypath.setContent(pluginFileContents('one'))
+        # Make it super stale
+        x = time.time() - 1000
+        os.utime(mypath.path, (x, x))
+        pyc = mypath.sibling('stale.pyc')
+        # compile it
+        compileall.compile_dir(self.appPackage.path, quiet=1)
+        os.utime(pyc.path, (x, x))
+        # Eliminate the other option.
+        mypath.remove()
+        # Make sure it's the .pyc path getting cached.
+        self.resetEnvironment()
+        # Sanity check.
+        self.assertIn('one', self.getAllPlugins())
+        self.failIfIn('two', self.getAllPlugins())
+        self.resetEnvironment()
+        mypath.setContent(pluginFileContents('two'))
+        self.failIfIn('one', self.getAllPlugins())
+        self.assertIn('two', self.getAllPlugins())
+
+
+    def test_newPluginsOnReadOnlyPath(self):
+        """
+        Verify that a failure to write the dropin.cache file on a read-only
+        path will not affect the list of plugins returned.
+
+        Note: this test should pass on both Linux and Windows, but may not
+        provide useful coverage on Windows due to the different meaning of
+        "read-only directory".
+        """
+        self.unlockSystem()
+        self.sysplug.child('newstuff.py').setContent(pluginFileContents('one'))
+        self.lockSystem()
+
+        # Take the developer path out, so that the system plugins are actually
+        # examined.
+        sys.path.remove(self.devPath.path)
+
+        # Start observing log events to see the warning
+        events = []
+        addObserver(events.append)
+        self.addCleanup(removeObserver, events.append)
+
+        self.assertIn('one', self.getAllPlugins())
+
+        # Make sure something was logged about the cache.
+        expected = "Unable to write to plugin cache %s: error number %d" % (
+            self.syscache.path, errno.EPERM)
+        for event in events:
+            if expected in textFromEventDict(event):
+                break
+        else:
+            self.fail(
+                "Did not observe unwriteable cache warning in log "
+                "events: %r" % (events,))
+
+
+
+class AdjacentPackageTests(unittest.TestCase):
+    """
+    Tests for the behavior of the plugin system when there are multiple
+    installed copies of the package containing the plugins being loaded.
+    """
+
+    def setUp(self):
+        """
+        Save the elements of C{sys.path} and the items of C{sys.modules}.
+        """
+        self.originalPath = sys.path[:]
+        self.savedModules = sys.modules.copy()
+
+
+    def tearDown(self):
+        """
+        Restore C{sys.path} and C{sys.modules} to their original values.
+        """
+        sys.path[:] = self.originalPath
+        sys.modules.clear()
+        sys.modules.update(self.savedModules)
+
+
+    def createDummyPackage(self, root, name, pluginName):
+        """
+        Create a directory containing a Python package named I{dummy} with a
+        I{plugins} subpackage.
+
+        @type root: L{FilePath}
+        @param root: The directory in which to create the hierarchy.
+
+        @type name: C{str}
+        @param name: The name of the directory to create which will contain
+            the package.
+
+        @type pluginName: C{str}
+        @param pluginName: The name of a module to create in the
+            I{dummy.plugins} package.
+
+        @rtype: L{FilePath}
+        @return: The directory which was created to contain the I{dummy}
+            package.
+        """
+        directory = root.child(name)
+        package = directory.child('dummy')
+        package.makedirs()
+        package.child('__init__.py').setContent('')
+        plugins = package.child('plugins')
+        plugins.makedirs()
+        plugins.child('__init__.py').setContent(pluginInitFile)
+        pluginModule = plugins.child(pluginName + '.py')
+        pluginModule.setContent(pluginFileContents(name))
+        return directory
+
+
+    def test_hiddenPackageSamePluginModuleNameObscured(self):
+        """
+        Only plugins from the first package in sys.path should be returned by
+        getPlugins in the case where there are two Python packages by the same
+        name installed, each with a plugin module by a single name.
+        """
+        root = FilePath(self.mktemp())
+        root.makedirs()
+
+        firstDirectory = self.createDummyPackage(root, 'first', 'someplugin')
+        secondDirectory = self.createDummyPackage(root, 'second', 'someplugin')
+
+        sys.path.append(firstDirectory.path)
+        sys.path.append(secondDirectory.path)
+
+        import dummy.plugins
+
+        plugins = list(plugin.getPlugins(ITestPlugin, dummy.plugins))
+        self.assertEqual(['first'], [p.__name__ for p in plugins])
+
+
+    def test_hiddenPackageDifferentPluginModuleNameObscured(self):
+        """
+        Plugins from the first package in sys.path should be returned by
+        getPlugins in the case where there are two Python packages by the same
+        name installed, each with a plugin module by a different name.
+        """
+        root = FilePath(self.mktemp())
+        root.makedirs()
+
+        firstDirectory = self.createDummyPackage(root, 'first', 'thisplugin')
+        secondDirectory = self.createDummyPackage(root, 'second', 'thatplugin')
+
+        sys.path.append(firstDirectory.path)
+        sys.path.append(secondDirectory.path)
+
+        import dummy.plugins
+
+        plugins = list(plugin.getPlugins(ITestPlugin, dummy.plugins))
+        self.assertEqual(['first'], [p.__name__ for p in plugins])
+
+
+
+class PackagePathTests(unittest.TestCase):
+    """
+    Tests for L{plugin.pluginPackagePaths} which constructs search paths for
+    plugin packages.
+    """
+
+    def setUp(self):
+        """
+        Save the elements of C{sys.path}.
+        """
+        self.originalPath = sys.path[:]
+
+
+    def tearDown(self):
+        """
+        Restore C{sys.path} to its original value.
+        """
+        sys.path[:] = self.originalPath
+
+
+    def test_pluginDirectories(self):
+        """
+        L{plugin.pluginPackagePaths} should return a list containing each
+        directory in C{sys.path} with a suffix based on the supplied package
+        name.
+        """
+        foo = FilePath('foo')
+        bar = FilePath('bar')
+        sys.path = [foo.path, bar.path]
+        self.assertEqual(
+            plugin.pluginPackagePaths('dummy.plugins'),
+            [foo.child('dummy').child('plugins').path,
+             bar.child('dummy').child('plugins').path])
+
+
+    def test_pluginPackagesExcluded(self):
+        """
+        L{plugin.pluginPackagePaths} should exclude directories which are
+        Python packages.  The only allowed plugin package (the only one
+        associated with a I{dummy} package which Python will allow to be
+        imported) will already be known to the caller of
+        L{plugin.pluginPackagePaths} and will most commonly already be in
+        the C{__path__} they are about to mutate.
+        """
+        root = FilePath(self.mktemp())
+        foo = root.child('foo').child('dummy').child('plugins')
+        foo.makedirs()
+        foo.child('__init__.py').setContent('')
+        sys.path = [root.child('foo').path, root.child('bar').path]
+        self.assertEqual(
+            plugin.pluginPackagePaths('dummy.plugins'),
+            [root.child('bar').child('dummy').child('plugins').path])
diff --git a/ThirdParty/Twisted/twisted/test/test_policies.py b/ThirdParty/Twisted/twisted/test/test_policies.py
new file mode 100644
index 0000000..956cf19
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_policies.py
@@ -0,0 +1,854 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test code for policies.
+"""
+from __future__ import division, absolute_import
+
+from zope.interface import Interface, implementer, implementedBy
+
+from twisted.python.compat import NativeStringIO, _PY3
+from twisted.trial import unittest
+from twisted.test.proto_helpers import StringTransport
+from twisted.test.proto_helpers import StringTransportWithDisconnection
+
+from twisted.internet import protocol, reactor, address, defer, task
+from twisted.protocols import policies
+
+
+
+class SimpleProtocol(protocol.Protocol):
+
+    connected = disconnected = 0
+    buffer = b""
+
+    def __init__(self):
+        self.dConnected = defer.Deferred()
+        self.dDisconnected = defer.Deferred()
+
+    def connectionMade(self):
+        self.connected = 1
+        self.dConnected.callback('')
+
+    def connectionLost(self, reason):
+        self.disconnected = 1
+        self.dDisconnected.callback('')
+
+    def dataReceived(self, data):
+        self.buffer += data
+
+
+
+class SillyFactory(protocol.ClientFactory):
+
+    def __init__(self, p):
+        self.p = p
+
+    def buildProtocol(self, addr):
+        return self.p
+
+
+class EchoProtocol(protocol.Protocol):
+    paused = False
+
+    def pauseProducing(self):
+        self.paused = True
+
+    def resumeProducing(self):
+        self.paused = False
+
+    def stopProducing(self):
+        pass
+
+    def dataReceived(self, data):
+        self.transport.write(data)
+
+
+
+class Server(protocol.ServerFactory):
+    """
+    A simple server factory using L{EchoProtocol}.
+    """
+    protocol = EchoProtocol
+
+
+
+class TestableThrottlingFactory(policies.ThrottlingFactory):
+    """
+    L{policies.ThrottlingFactory} using a L{task.Clock} for tests.
+    """
+
+    def __init__(self, clock, *args, **kwargs):
+        """
+        @param clock: object providing a callLater method that can be used
+            for tests.
+        @type clock: C{task.Clock} or alike.
+        """
+        policies.ThrottlingFactory.__init__(self, *args, **kwargs)
+        self.clock = clock
+
+
+    def callLater(self, period, func):
+        """
+        Forward to the testable clock.
+        """
+        return self.clock.callLater(period, func)
+
+
+
+class TestableTimeoutFactory(policies.TimeoutFactory):
+    """
+    L{policies.TimeoutFactory} using a L{task.Clock} for tests.
+    """
+
+    def __init__(self, clock, *args, **kwargs):
+        """
+        @param clock: object providing a callLater method that can be used
+            for tests.
+        @type clock: C{task.Clock} or alike.
+        """
+        policies.TimeoutFactory.__init__(self, *args, **kwargs)
+        self.clock = clock
+
+
+    def callLater(self, period, func):
+        """
+        Forward to the testable clock.
+        """
+        return self.clock.callLater(period, func)
+
+
+
+class WrapperTestCase(unittest.TestCase):
+    """
+    Tests for L{WrappingFactory} and L{ProtocolWrapper}.
+    """
+    def test_protocolFactoryAttribute(self):
+        """
+        Make sure protocol.factory is the wrapped factory, not the wrapping
+        factory.
+        """
+        f = Server()
+        wf = policies.WrappingFactory(f)
+        p = wf.buildProtocol(address.IPv4Address('TCP', '127.0.0.1', 35))
+        self.assertIdentical(p.wrappedProtocol.factory, f)
+
+
+    def test_transportInterfaces(self):
+        """
+        The transport wrapper passed to the wrapped protocol's
+        C{makeConnection} provides the same interfaces as are provided by the
+        original transport.
+        """
+        class IStubTransport(Interface):
+            pass
+
+        @implementer(IStubTransport)
+        class StubTransport:
+            pass
+
+        # Looking up what ProtocolWrapper implements also mutates the class.
+        # It adds __implemented__ and __providedBy__ attributes to it.  These
+        # prevent __getattr__ from causing the IStubTransport.providedBy call
+        # below from returning True.  If, by accident, nothing else causes
+        # these attributes to be added to ProtocolWrapper, the test will pass,
+        # but the interface will only be provided until something does trigger
+        # their addition.  So we just trigger it right now to be sure.
+        implementedBy(policies.ProtocolWrapper)
+
+        proto = protocol.Protocol()
+        wrapper = policies.ProtocolWrapper(policies.WrappingFactory(None), proto)
+
+        wrapper.makeConnection(StubTransport())
+        self.assertTrue(IStubTransport.providedBy(proto.transport))
+
+
+    def test_factoryLogPrefix(self):
+        """
+        L{WrappingFactory.logPrefix} is customized to mention both the original
+        factory and the wrapping factory.
+        """
+        server = Server()
+        factory = policies.WrappingFactory(server)
+        self.assertEqual("Server (WrappingFactory)", factory.logPrefix())
+
+
+    def test_factoryLogPrefixFallback(self):
+        """
+        If the wrapped factory doesn't have a L{logPrefix} method,
+        L{WrappingFactory.logPrefix} falls back to the factory class name.
+        """
+        class NoFactory(object):
+            pass
+
+        server = NoFactory()
+        factory = policies.WrappingFactory(server)
+        self.assertEqual("NoFactory (WrappingFactory)", factory.logPrefix())
+
+
+    def test_protocolLogPrefix(self):
+        """
+        L{ProtocolWrapper.logPrefix} is customized to mention both the original
+        protocol and the wrapper.
+        """
+        server = Server()
+        factory = policies.WrappingFactory(server)
+        protocol = factory.buildProtocol(
+            address.IPv4Address('TCP', '127.0.0.1', 35))
+        self.assertEqual("EchoProtocol (ProtocolWrapper)",
+                         protocol.logPrefix())
+
+
+    def test_protocolLogPrefixFallback(self):
+        """
+        If the wrapped protocol doesn't have a L{logPrefix} method,
+        L{ProtocolWrapper.logPrefix} falls back to the protocol class name.
+        """
+        class NoProtocol(object):
+            pass
+
+        server = Server()
+        server.protocol = NoProtocol
+        factory = policies.WrappingFactory(server)
+        protocol = factory.buildProtocol(
+            address.IPv4Address('TCP', '127.0.0.1', 35))
+        self.assertEqual("NoProtocol (ProtocolWrapper)",
+                         protocol.logPrefix())
+
+
+    def _getWrapper(self):
+        """
+        Return L{policies.ProtocolWrapper} that has been connected to a
+        L{StringTransport}.
+        """
+        wrapper = policies.ProtocolWrapper(policies.WrappingFactory(Server()),
+                                           protocol.Protocol())
+        transport = StringTransport()
+        wrapper.makeConnection(transport)
+        return wrapper
+
+
+    def test_getHost(self):
+        """
+        L{policies.ProtocolWrapper.getHost} calls C{getHost} on the underlying
+        transport.
+        """
+        wrapper = self._getWrapper()
+        self.assertEqual(wrapper.getHost(), wrapper.transport.getHost())
+
+
+    def test_getPeer(self):
+        """
+        L{policies.ProtocolWrapper.getPeer} calls C{getPeer} on the underlying
+        transport.
+        """
+        wrapper = self._getWrapper()
+        self.assertEqual(wrapper.getPeer(), wrapper.transport.getPeer())
+
+
+    def test_registerProducer(self):
+        """
+        L{policies.ProtocolWrapper.registerProducer} calls C{registerProducer}
+        on the underlying transport.
+        """
+        wrapper = self._getWrapper()
+        producer = object()
+        wrapper.registerProducer(producer, True)
+        self.assertIdentical(wrapper.transport.producer, producer)
+        self.assertTrue(wrapper.transport.streaming)
+
+
+    def test_unregisterProducer(self):
+        """
+        L{policies.ProtocolWrapper.unregisterProducer} calls
+        C{unregisterProducer} on the underlying transport.
+        """
+        wrapper = self._getWrapper()
+        producer = object()
+        wrapper.registerProducer(producer, True)
+        wrapper.unregisterProducer()
+        self.assertIdentical(wrapper.transport.producer, None)
+        self.assertIdentical(wrapper.transport.streaming, None)
+
+
+    def test_stopConsuming(self):
+        """
+        L{policies.ProtocolWrapper.stopConsuming} calls C{stopConsuming} on
+        the underlying transport.
+        """
+        wrapper = self._getWrapper()
+        result = []
+        wrapper.transport.stopConsuming = lambda: result.append(True)
+        wrapper.stopConsuming()
+        self.assertEqual(result, [True])
+
+
+    def test_startedConnecting(self):
+        """
+        L{policies.WrappingFactory.startedConnecting} calls
+        C{startedConnecting} on the underlying factory.
+        """
+        result = []
+        class Factory(object):
+            def startedConnecting(self, connector):
+                result.append(connector)
+
+        wrapper = policies.WrappingFactory(Factory())
+        connector = object()
+        wrapper.startedConnecting(connector)
+        self.assertEqual(result, [connector])
+
+
+    def test_clientConnectionLost(self):
+        """
+        L{policies.WrappingFactory.clientConnectionLost} calls
+        C{clientConnectionLost} on the underlying factory.
+        """
+        result = []
+        class Factory(object):
+            def clientConnectionLost(self, connector, reason):
+                result.append((connector, reason))
+
+        wrapper = policies.WrappingFactory(Factory())
+        connector = object()
+        reason = object()
+        wrapper.clientConnectionLost(connector, reason)
+        self.assertEqual(result, [(connector, reason)])
+
+
+    def test_clientConnectionFailed(self):
+        """
+        L{policies.WrappingFactory.clientConnectionFailed} calls
+        C{clientConnectionFailed} on the underlying factory.
+        """
+        result = []
+        class Factory(object):
+            def clientConnectionFailed(self, connector, reason):
+                result.append((connector, reason))
+
+        wrapper = policies.WrappingFactory(Factory())
+        connector = object()
+        reason = object()
+        wrapper.clientConnectionFailed(connector, reason)
+        self.assertEqual(result, [(connector, reason)])
+
+
+
+class WrappingFactory(policies.WrappingFactory):
+    protocol = lambda s, f, p: p
+
+    def startFactory(self):
+        policies.WrappingFactory.startFactory(self)
+        self.deferred.callback(None)
+
+
+
+class ThrottlingTestCase(unittest.TestCase):
+    """
+    Tests for L{policies.ThrottlingFactory}.
+    """
+
+    def test_limit(self):
+        """
+        Full test using a custom server limiting number of connections.
+        """
+        server = Server()
+        c1, c2, c3, c4 = [SimpleProtocol() for i in range(4)]
+        tServer = policies.ThrottlingFactory(server, 2)
+        wrapTServer = WrappingFactory(tServer)
+        wrapTServer.deferred = defer.Deferred()
+
+        # Start listening
+        p = reactor.listenTCP(0, wrapTServer, interface="127.0.0.1")
+        n = p.getHost().port
+
+        def _connect123(results):
+            reactor.connectTCP("127.0.0.1", n, SillyFactory(c1))
+            c1.dConnected.addCallback(
+                lambda r: reactor.connectTCP("127.0.0.1", n, SillyFactory(c2)))
+            c2.dConnected.addCallback(
+                lambda r: reactor.connectTCP("127.0.0.1", n, SillyFactory(c3)))
+            return c3.dDisconnected
+
+        def _check123(results):
+            self.assertEqual([c.connected for c in (c1, c2, c3)], [1, 1, 1])
+            self.assertEqual([c.disconnected for c in (c1, c2, c3)], [0, 0, 1])
+            self.assertEqual(len(tServer.protocols.keys()), 2)
+            return results
+
+        def _lose1(results):
+            # disconnect one protocol and now another should be able to connect
+            c1.transport.loseConnection()
+            return c1.dDisconnected
+
+        def _connect4(results):
+            reactor.connectTCP("127.0.0.1", n, SillyFactory(c4))
+            return c4.dConnected
+
+        def _check4(results):
+            self.assertEqual(c4.connected, 1)
+            self.assertEqual(c4.disconnected, 0)
+            return results
+
+        def _cleanup(results):
+            for c in c2, c4:
+                c.transport.loseConnection()
+            return defer.DeferredList([
+                defer.maybeDeferred(p.stopListening),
+                c2.dDisconnected,
+                c4.dDisconnected])
+
+        wrapTServer.deferred.addCallback(_connect123)
+        wrapTServer.deferred.addCallback(_check123)
+        wrapTServer.deferred.addCallback(_lose1)
+        wrapTServer.deferred.addCallback(_connect4)
+        wrapTServer.deferred.addCallback(_check4)
+        wrapTServer.deferred.addCallback(_cleanup)
+        return wrapTServer.deferred
+
+
+    def test_writeLimit(self):
+        """
+        Check the writeLimit parameter: write data, and check for the pause
+        status.
+        """
+        server = Server()
+        tServer = TestableThrottlingFactory(task.Clock(), server, writeLimit=10)
+        port = tServer.buildProtocol(address.IPv4Address('TCP', '127.0.0.1', 0))
+        tr = StringTransportWithDisconnection()
+        tr.protocol = port
+        port.makeConnection(tr)
+        port.producer = port.wrappedProtocol
+
+        port.dataReceived(b"0123456789")
+        port.dataReceived(b"abcdefghij")
+        self.assertEqual(tr.value(), b"0123456789abcdefghij")
+        self.assertEqual(tServer.writtenThisSecond, 20)
+        self.assertFalse(port.wrappedProtocol.paused)
+
+        # at this point server should've written 20 bytes, 10 bytes
+        # above the limit so writing should be paused around 1 second
+        # from 'now', and resumed a second after that
+        tServer.clock.advance(1.05)
+        self.assertEqual(tServer.writtenThisSecond, 0)
+        self.assertTrue(port.wrappedProtocol.paused)
+
+        tServer.clock.advance(1.05)
+        self.assertEqual(tServer.writtenThisSecond, 0)
+        self.assertFalse(port.wrappedProtocol.paused)
+
+
+    def test_readLimit(self):
+        """
+        Check the readLimit parameter: read data and check for the pause
+        status.
+        """
+        server = Server()
+        tServer = TestableThrottlingFactory(task.Clock(), server, readLimit=10)
+        port = tServer.buildProtocol(address.IPv4Address('TCP', '127.0.0.1', 0))
+        tr = StringTransportWithDisconnection()
+        tr.protocol = port
+        port.makeConnection(tr)
+
+        port.dataReceived(b"0123456789")
+        port.dataReceived(b"abcdefghij")
+        self.assertEqual(tr.value(), b"0123456789abcdefghij")
+        self.assertEqual(tServer.readThisSecond, 20)
+
+        tServer.clock.advance(1.05)
+        self.assertEqual(tServer.readThisSecond, 0)
+        self.assertEqual(tr.producerState, 'paused')
+
+        tServer.clock.advance(1.05)
+        self.assertEqual(tServer.readThisSecond, 0)
+        self.assertEqual(tr.producerState, 'producing')
+
+        tr.clear()
+        port.dataReceived(b"0123456789")
+        port.dataReceived(b"abcdefghij")
+        self.assertEqual(tr.value(), b"0123456789abcdefghij")
+        self.assertEqual(tServer.readThisSecond, 20)
+
+        tServer.clock.advance(1.05)
+        self.assertEqual(tServer.readThisSecond, 0)
+        self.assertEqual(tr.producerState, 'paused')
+
+        tServer.clock.advance(1.05)
+        self.assertEqual(tServer.readThisSecond, 0)
+        self.assertEqual(tr.producerState, 'producing')
+
+
+
+class TimeoutTestCase(unittest.TestCase):
+    """
+    Tests for L{policies.TimeoutFactory}.
+    """
+
+    def setUp(self):
+        """
+        Create a testable, deterministic clock, and a set of
+        server factory/protocol/transport.
+        """
+        self.clock = task.Clock()
+        wrappedFactory = protocol.ServerFactory()
+        wrappedFactory.protocol = SimpleProtocol
+        self.factory = TestableTimeoutFactory(self.clock, wrappedFactory, 3)
+        self.proto = self.factory.buildProtocol(
+            address.IPv4Address('TCP', '127.0.0.1', 12345))
+        self.transport = StringTransportWithDisconnection()
+        self.transport.protocol = self.proto
+        self.proto.makeConnection(self.transport)
+
+
+    def test_timeout(self):
+        """
+        Make sure that when a TimeoutFactory accepts a connection, it will
+        time out that connection if no data is read or written within the
+        timeout period.
+        """
+        # Let almost 3 time units pass
+        self.clock.pump([0.0, 0.5, 1.0, 1.0, 0.4])
+        self.failIf(self.proto.wrappedProtocol.disconnected)
+
+        # Now let the timer elapse
+        self.clock.pump([0.0, 0.2])
+        self.failUnless(self.proto.wrappedProtocol.disconnected)
+
+
+    def test_sendAvoidsTimeout(self):
+        """
+        Make sure that writing data to a transport from a protocol
+        constructed by a TimeoutFactory resets the timeout countdown.
+        """
+        # Let half the countdown period elapse
+        self.clock.pump([0.0, 0.5, 1.0])
+        self.failIf(self.proto.wrappedProtocol.disconnected)
+
+        # Send some data (self.proto is the /real/ proto's transport, so this
+        # is the write that gets called)
+        self.proto.write(b'bytes bytes bytes')
+
+        # More time passes, putting us past the original timeout
+        self.clock.pump([0.0, 1.0, 1.0])
+        self.failIf(self.proto.wrappedProtocol.disconnected)
+
+        # Make sure writeSequence delays timeout as well
+        self.proto.writeSequence([b'bytes'] * 3)
+
+        # Tick tock
+        self.clock.pump([0.0, 1.0, 1.0])
+        self.failIf(self.proto.wrappedProtocol.disconnected)
+
+        # Don't write anything more, just let the timeout expire
+        self.clock.pump([0.0, 2.0])
+        self.failUnless(self.proto.wrappedProtocol.disconnected)
+
+
+    def test_receiveAvoidsTimeout(self):
+        """
+        Make sure that receiving data also resets the timeout countdown.
+        """
+        # Let half the countdown period elapse
+        self.clock.pump([0.0, 1.0, 0.5])
+        self.failIf(self.proto.wrappedProtocol.disconnected)
+
+        # Some bytes arrive, they should reset the counter
+        self.proto.dataReceived(b'bytes bytes bytes')
+
+        # We pass the original timeout
+        self.clock.pump([0.0, 1.0, 1.0])
+        self.failIf(self.proto.wrappedProtocol.disconnected)
+
+        # Nothing more arrives though, the new timeout deadline is passed,
+        # the connection should be dropped.
+        self.clock.pump([0.0, 1.0, 1.0])
+        self.failUnless(self.proto.wrappedProtocol.disconnected)
+
+
+
+class TimeoutTester(protocol.Protocol, policies.TimeoutMixin):
+    """
+    A testable protocol with timeout facility.
+
+    @ivar timedOut: set to C{True} if a timeout has been detected.
+    @type timedOut: C{bool}
+    """
+    timeOut  = 3
+    timedOut = False
+
+    def __init__(self, clock):
+        """
+        Initialize the protocol with a C{task.Clock} object.
+        """
+        self.clock = clock
+
+
+    def connectionMade(self):
+        """
+        Upon connection, set the timeout.
+        """
+        self.setTimeout(self.timeOut)
+
+
+    def dataReceived(self, data):
+        """
+        Reset the timeout on data.
+        """
+        self.resetTimeout()
+        protocol.Protocol.dataReceived(self, data)
+
+
+    def connectionLost(self, reason=None):
+        """
+        On connection lost, cancel all timeout operations.
+        """
+        self.setTimeout(None)
+
+
+    def timeoutConnection(self):
+        """
+        Flags the timedOut variable to indicate the timeout of the connection.
+        """
+        self.timedOut = True
+
+
+    def callLater(self, timeout, func, *args, **kwargs):
+        """
+        Override callLater to use the deterministic clock.
+        """
+        return self.clock.callLater(timeout, func, *args, **kwargs)
+
+
+
+class TestTimeout(unittest.TestCase):
+    """
+    Tests for L{policies.TimeoutMixin}.
+    """
+
+    def setUp(self):
+        """
+        Create a testable, deterministic clock and a C{TimeoutTester} instance.
+        """
+        self.clock = task.Clock()
+        self.proto = TimeoutTester(self.clock)
+
+
+    def test_overriddenCallLater(self):
+        """
+        Test that the callLater of the clock is used instead of
+        C{reactor.callLater}.
+        """
+        self.proto.setTimeout(10)
+        self.assertEqual(len(self.clock.calls), 1)
+
+
+    def test_timeout(self):
+        """
+        Check that the protocol does timeout at the time specified by its
+        C{timeOut} attribute.
+        """
+        self.proto.makeConnection(StringTransport())
+
+        # timeOut value is 3
+        self.clock.pump([0, 0.5, 1.0, 1.0])
+        self.failIf(self.proto.timedOut)
+        self.clock.pump([0, 1.0])
+        self.failUnless(self.proto.timedOut)
+
+
+    def test_noTimeout(self):
+        """
+        Check that receiving data is delaying the timeout of the connection.
+        """
+        self.proto.makeConnection(StringTransport())
+
+        self.clock.pump([0, 0.5, 1.0, 1.0])
+        self.failIf(self.proto.timedOut)
+        self.proto.dataReceived(b'hello there')
+        self.clock.pump([0, 1.0, 1.0, 0.5])
+        self.failIf(self.proto.timedOut)
+        self.clock.pump([0, 1.0])
+        self.failUnless(self.proto.timedOut)
+
+
+    def test_resetTimeout(self):
+        """
+        Check that setting a new value for timeout cancel the previous value
+        and install a new timeout.
+        """
+        self.proto.timeOut = None
+        self.proto.makeConnection(StringTransport())
+
+        self.proto.setTimeout(1)
+        self.assertEqual(self.proto.timeOut, 1)
+
+        self.clock.pump([0, 0.9])
+        self.failIf(self.proto.timedOut)
+        self.clock.pump([0, 0.2])
+        self.failUnless(self.proto.timedOut)
+
+
+    def test_cancelTimeout(self):
+        """
+        Setting the timeout to C{None} cancel any timeout operations.
+        """
+        self.proto.timeOut = 5
+        self.proto.makeConnection(StringTransport())
+
+        self.proto.setTimeout(None)
+        self.assertEqual(self.proto.timeOut, None)
+
+        self.clock.pump([0, 5, 5, 5])
+        self.failIf(self.proto.timedOut)
+
+
+    def test_return(self):
+        """
+        setTimeout should return the value of the previous timeout.
+        """
+        self.proto.timeOut = 5
+
+        self.assertEqual(self.proto.setTimeout(10), 5)
+        self.assertEqual(self.proto.setTimeout(None), 10)
+        self.assertEqual(self.proto.setTimeout(1), None)
+        self.assertEqual(self.proto.timeOut, 1)
+
+        # Clean up the DelayedCall
+        self.proto.setTimeout(None)
+
+
+
+class LimitTotalConnectionsFactoryTestCase(unittest.TestCase):
+    """Tests for policies.LimitTotalConnectionsFactory"""
+    def testConnectionCounting(self):
+        # Make a basic factory
+        factory = policies.LimitTotalConnectionsFactory()
+        factory.protocol = protocol.Protocol
+
+        # connectionCount starts at zero
+        self.assertEqual(0, factory.connectionCount)
+
+        # connectionCount increments as connections are made
+        p1 = factory.buildProtocol(None)
+        self.assertEqual(1, factory.connectionCount)
+        p2 = factory.buildProtocol(None)
+        self.assertEqual(2, factory.connectionCount)
+
+        # and decrements as they are lost
+        p1.connectionLost(None)
+        self.assertEqual(1, factory.connectionCount)
+        p2.connectionLost(None)
+        self.assertEqual(0, factory.connectionCount)
+
+    def testConnectionLimiting(self):
+        # Make a basic factory with a connection limit of 1
+        factory = policies.LimitTotalConnectionsFactory()
+        factory.protocol = protocol.Protocol
+        factory.connectionLimit = 1
+
+        # Make a connection
+        p = factory.buildProtocol(None)
+        self.assertNotEqual(None, p)
+        self.assertEqual(1, factory.connectionCount)
+
+        # Try to make a second connection, which will exceed the connection
+        # limit.  This should return None, because overflowProtocol is None.
+        self.assertEqual(None, factory.buildProtocol(None))
+        self.assertEqual(1, factory.connectionCount)
+
+        # Define an overflow protocol
+        class OverflowProtocol(protocol.Protocol):
+            def connectionMade(self):
+                factory.overflowed = True
+        factory.overflowProtocol = OverflowProtocol
+        factory.overflowed = False
+
+        # Try to make a second connection again, now that we have an overflow
+        # protocol.  Note that overflow connections count towards the connection
+        # count.
+        op = factory.buildProtocol(None)
+        op.makeConnection(None) # to trigger connectionMade
+        self.assertEqual(True, factory.overflowed)
+        self.assertEqual(2, factory.connectionCount)
+
+        # Close the connections.
+        p.connectionLost(None)
+        self.assertEqual(1, factory.connectionCount)
+        op.connectionLost(None)
+        self.assertEqual(0, factory.connectionCount)
+
+
+class WriteSequenceEchoProtocol(EchoProtocol):
+    def dataReceived(self, bytes):
+        if bytes.find(b'vector!') != -1:
+            self.transport.writeSequence([bytes])
+        else:
+            EchoProtocol.dataReceived(self, bytes)
+
+class TestLoggingFactory(policies.TrafficLoggingFactory):
+    openFile = None
+    def open(self, name):
+        assert self.openFile is None, "open() called too many times"
+        self.openFile = NativeStringIO()
+        return self.openFile
+
+
+
+class LoggingFactoryTestCase(unittest.TestCase):
+    """
+    Tests for L{policies.TrafficLoggingFactory}.
+    """
+
+    def test_thingsGetLogged(self):
+        """
+        Check the output produced by L{policies.TrafficLoggingFactory}.
+        """
+        wrappedFactory = Server()
+        wrappedFactory.protocol = WriteSequenceEchoProtocol
+        t = StringTransportWithDisconnection()
+        f = TestLoggingFactory(wrappedFactory, 'test')
+        p = f.buildProtocol(('1.2.3.4', 5678))
+        t.protocol = p
+        p.makeConnection(t)
+
+        v = f.openFile.getvalue()
+        self.assertIn('*', v)
+        self.failIf(t.value())
+
+        p.dataReceived(b'here are some bytes')
+
+        v = f.openFile.getvalue()
+        self.assertIn("C 1: %r" % (b'here are some bytes',), v)
+        self.assertIn("S 1: %r" % (b'here are some bytes',), v)
+        self.assertEqual(t.value(), b'here are some bytes')
+
+        t.clear()
+        p.dataReceived(b'prepare for vector! to the extreme')
+        v = f.openFile.getvalue()
+        self.assertIn("SV 1: %r" % ([b'prepare for vector! to the extreme'],), v)
+        self.assertEqual(t.value(), b'prepare for vector! to the extreme')
+
+        p.loseConnection()
+
+        v = f.openFile.getvalue()
+        self.assertIn('ConnectionDone', v)
+
+
+    def test_counter(self):
+        """
+        Test counter management with the resetCounter method.
+        """
+        wrappedFactory = Server()
+        f = TestLoggingFactory(wrappedFactory, 'test')
+        self.assertEqual(f._counter, 0)
+        f.buildProtocol(('1.2.3.4', 5678))
+        self.assertEqual(f._counter, 1)
+        # Reset log file
+        f.openFile = None
+        f.buildProtocol(('1.2.3.4', 5679))
+        self.assertEqual(f._counter, 2)
+
+        f.resetCounter()
+        self.assertEqual(f._counter, 0)
+
diff --git a/ThirdParty/Twisted/twisted/test/test_postfix.py b/ThirdParty/Twisted/twisted/test/test_postfix.py
new file mode 100644
index 0000000..0f80a46
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_postfix.py
@@ -0,0 +1,108 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test cases for twisted.protocols.postfix module.
+"""
+
+from twisted.trial import unittest
+from twisted.protocols import postfix
+from twisted.test.proto_helpers import StringTransport
+
+
+class PostfixTCPMapQuoteTestCase(unittest.TestCase):
+    data = [
+        # (raw, quoted, [aliasQuotedForms]),
+        ('foo', 'foo'),
+        ('foo bar', 'foo%20bar'),
+        ('foo\tbar', 'foo%09bar'),
+        ('foo\nbar', 'foo%0Abar', 'foo%0abar'),
+        ('foo\r\nbar', 'foo%0D%0Abar', 'foo%0D%0abar', 'foo%0d%0Abar', 'foo%0d%0abar'),
+        ('foo ', 'foo%20'),
+        (' foo', '%20foo'),
+        ]
+
+    def testData(self):
+        for entry in self.data:
+            raw = entry[0]
+            quoted = entry[1:]
+
+            self.assertEqual(postfix.quote(raw), quoted[0])
+            for q in quoted:
+                self.assertEqual(postfix.unquote(q), raw)
+
+class PostfixTCPMapServerTestCase:
+    data = {
+        # 'key': 'value',
+        }
+
+    chat = [
+        # (input, expected_output),
+        ]
+
+    def test_chat(self):
+        """
+        Test that I{get} and I{put} commands are responded to correctly by
+        L{postfix.PostfixTCPMapServer} when its factory is an instance of
+        L{postifx.PostfixTCPMapDictServerFactory}.
+        """
+        factory = postfix.PostfixTCPMapDictServerFactory(self.data)
+        transport = StringTransport()
+
+        protocol = postfix.PostfixTCPMapServer()
+        protocol.service = factory
+        protocol.factory = factory
+        protocol.makeConnection(transport)
+
+        for input, expected_output in self.chat:
+            protocol.lineReceived(input)
+            self.assertEqual(
+                transport.value(), expected_output,
+                'For %r, expected %r but got %r' % (
+                    input, expected_output, transport.value()))
+            transport.clear()
+        protocol.setTimeout(None)
+
+
+    def test_deferredChat(self):
+        """
+        Test that I{get} and I{put} commands are responded to correctly by
+        L{postfix.PostfixTCPMapServer} when its factory is an instance of
+        L{postifx.PostfixTCPMapDeferringDictServerFactory}.
+        """
+        factory = postfix.PostfixTCPMapDeferringDictServerFactory(self.data)
+        transport = StringTransport()
+
+        protocol = postfix.PostfixTCPMapServer()
+        protocol.service = factory
+        protocol.factory = factory
+        protocol.makeConnection(transport)
+
+        for input, expected_output in self.chat:
+            protocol.lineReceived(input)
+            self.assertEqual(
+                transport.value(), expected_output,
+                'For %r, expected %r but got %r' % (
+                    input, expected_output, transport.value()))
+            transport.clear()
+        protocol.setTimeout(None)
+
+
+
+class Valid(PostfixTCPMapServerTestCase, unittest.TestCase):
+    data = {
+        'foo': 'ThisIs Foo',
+        'bar': ' bar really is found\r\n',
+        }
+    chat = [
+        ('get', "400 Command 'get' takes 1 parameters.\n"),
+        ('get foo bar', "500 \n"),
+        ('put', "400 Command 'put' takes 2 parameters.\n"),
+        ('put foo', "400 Command 'put' takes 2 parameters.\n"),
+        ('put foo bar baz', "500 put is not implemented yet.\n"),
+        ('put foo bar', '500 put is not implemented yet.\n'),
+        ('get foo', '200 ThisIs%20Foo\n'),
+        ('get bar', '200 %20bar%20really%20is%20found%0D%0A\n'),
+        ('get baz', '500 \n'),
+        ('foo', '400 unknown command\n'),
+        ]
diff --git a/ThirdParty/Twisted/twisted/test/test_process.py b/ThirdParty/Twisted/twisted/test/test_process.py
new file mode 100644
index 0000000..9d766cb
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_process.py
@@ -0,0 +1,2561 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test running processes.
+"""
+
+import gzip
+import os
+import sys
+import signal
+import StringIO
+import errno
+import gc
+import stat
+import operator
+try:
+    import fcntl
+except ImportError:
+    fcntl = process = None
+else:
+    from twisted.internet import process
+
+
+from zope.interface.verify import verifyObject
+
+from twisted.python.log import msg
+from twisted.internet import reactor, protocol, error, interfaces, defer
+from twisted.trial import unittest
+from twisted.python import util, runtime, procutils
+from twisted.python.compat import set
+
+
+
+class StubProcessProtocol(protocol.ProcessProtocol):
+    """
+    ProcessProtocol counter-implementation: all methods on this class raise an
+    exception, so instances of this may be used to verify that only certain
+    methods are called.
+    """
+    def outReceived(self, data):
+        raise NotImplementedError()
+
+    def errReceived(self, data):
+        raise NotImplementedError()
+
+    def inConnectionLost(self):
+        raise NotImplementedError()
+
+    def outConnectionLost(self):
+        raise NotImplementedError()
+
+    def errConnectionLost(self):
+        raise NotImplementedError()
+
+
+
+class ProcessProtocolTests(unittest.TestCase):
+    """
+    Tests for behavior provided by the process protocol base class,
+    L{protocol.ProcessProtocol}.
+    """
+    def test_interface(self):
+        """
+        L{ProcessProtocol} implements L{IProcessProtocol}.
+        """
+        verifyObject(interfaces.IProcessProtocol, protocol.ProcessProtocol())
+
+
+    def test_outReceived(self):
+        """
+        Verify that when stdout is delivered to
+        L{ProcessProtocol.childDataReceived}, it is forwarded to
+        L{ProcessProtocol.outReceived}.
+        """
+        received = []
+        class OutProtocol(StubProcessProtocol):
+            def outReceived(self, data):
+                received.append(data)
+
+        bytes = "bytes"
+        p = OutProtocol()
+        p.childDataReceived(1, bytes)
+        self.assertEqual(received, [bytes])
+
+
+    def test_errReceived(self):
+        """
+        Similar to L{test_outReceived}, but for stderr.
+        """
+        received = []
+        class ErrProtocol(StubProcessProtocol):
+            def errReceived(self, data):
+                received.append(data)
+
+        bytes = "bytes"
+        p = ErrProtocol()
+        p.childDataReceived(2, bytes)
+        self.assertEqual(received, [bytes])
+
+
+    def test_inConnectionLost(self):
+        """
+        Verify that when stdin close notification is delivered to
+        L{ProcessProtocol.childConnectionLost}, it is forwarded to
+        L{ProcessProtocol.inConnectionLost}.
+        """
+        lost = []
+        class InLostProtocol(StubProcessProtocol):
+            def inConnectionLost(self):
+                lost.append(None)
+
+        p = InLostProtocol()
+        p.childConnectionLost(0)
+        self.assertEqual(lost, [None])
+
+
+    def test_outConnectionLost(self):
+        """
+        Similar to L{test_inConnectionLost}, but for stdout.
+        """
+        lost = []
+        class OutLostProtocol(StubProcessProtocol):
+            def outConnectionLost(self):
+                lost.append(None)
+
+        p = OutLostProtocol()
+        p.childConnectionLost(1)
+        self.assertEqual(lost, [None])
+
+
+    def test_errConnectionLost(self):
+        """
+        Similar to L{test_inConnectionLost}, but for stderr.
+        """
+        lost = []
+        class ErrLostProtocol(StubProcessProtocol):
+            def errConnectionLost(self):
+                lost.append(None)
+
+        p = ErrLostProtocol()
+        p.childConnectionLost(2)
+        self.assertEqual(lost, [None])
+
+
+
+class TrivialProcessProtocol(protocol.ProcessProtocol):
+    """
+    Simple process protocol for tests purpose.
+
+    @ivar outData: data received from stdin
+    @ivar errData: data received from stderr
+    """
+
+    def __init__(self, d):
+        """
+        Create the deferred that will be fired at the end, and initialize
+        data structures.
+        """
+        self.deferred = d
+        self.outData = []
+        self.errData = []
+
+    def processEnded(self, reason):
+        self.reason = reason
+        self.deferred.callback(None)
+
+    def outReceived(self, data):
+        self.outData.append(data)
+
+    def errReceived(self, data):
+        self.errData.append(data)
+
+
+class TestProcessProtocol(protocol.ProcessProtocol):
+
+    def connectionMade(self):
+        self.stages = [1]
+        self.data = ''
+        self.err = ''
+        self.transport.write("abcd")
+
+    def childDataReceived(self, childFD, data):
+        """
+        Override and disable the dispatch provided by the base class to ensure
+        that it is really this method which is being called, and the transport
+        is not going directly to L{outReceived} or L{errReceived}.
+        """
+        if childFD == 1:
+            self.data += data
+        elif childFD == 2:
+            self.err += data
+
+
+    def childConnectionLost(self, childFD):
+        """
+        Similarly to L{childDataReceived}, disable the automatic dispatch
+        provided by the base implementation to verify that the transport is
+        calling this method directly.
+        """
+        if childFD == 1:
+            self.stages.append(2)
+            if self.data != "abcd":
+                raise RuntimeError(
+                    "Data was %r instead of 'abcd'" % (self.data,))
+            self.transport.write("1234")
+        elif childFD == 2:
+            self.stages.append(3)
+            if self.err != "1234":
+                raise RuntimeError(
+                    "Err was %r instead of '1234'" % (self.err,))
+            self.transport.write("abcd")
+            self.stages.append(4)
+        elif childFD == 0:
+            self.stages.append(5)
+
+    def processEnded(self, reason):
+        self.reason = reason
+        self.deferred.callback(None)
+
+
+class EchoProtocol(protocol.ProcessProtocol):
+
+    s = "1234567" * 1001
+    n = 10
+    finished = 0
+
+    failure = None
+
+    def __init__(self, onEnded):
+        self.onEnded = onEnded
+        self.count = 0
+
+    def connectionMade(self):
+        assert self.n > 2
+        for i in range(self.n - 2):
+            self.transport.write(self.s)
+        # test writeSequence
+        self.transport.writeSequence([self.s, self.s])
+        self.buffer = self.s * self.n
+
+    def outReceived(self, data):
+        if buffer(self.buffer, self.count, len(data)) != buffer(data):
+            self.failure = ("wrong bytes received", data, self.count)
+            self.transport.closeStdin()
+        else:
+            self.count += len(data)
+            if self.count == len(self.buffer):
+                self.transport.closeStdin()
+
+    def processEnded(self, reason):
+        self.finished = 1
+        if not reason.check(error.ProcessDone):
+            self.failure = "process didn't terminate normally: " + str(reason)
+        self.onEnded.callback(self)
+
+
+
+class SignalProtocol(protocol.ProcessProtocol):
+    """
+    A process protocol that sends a signal when data is first received.
+
+    @ivar deferred: deferred firing on C{processEnded}.
+    @type deferred: L{defer.Deferred}
+
+    @ivar signal: the signal to send to the process.
+    @type signal: C{str}
+
+    @ivar signaled: A flag tracking whether the signal has been sent to the
+        child or not yet.  C{False} until it is sent, then C{True}.
+    @type signaled: C{bool}
+    """
+
+    def __init__(self, deferred, sig):
+        self.deferred = deferred
+        self.signal = sig
+        self.signaled = False
+
+
+    def outReceived(self, data):
+        """
+        Handle the first output from the child process (which indicates it
+        is set up and ready to receive the signal) by sending the signal to
+        it.  Also log all output to help with debugging.
+        """
+        msg("Received %r from child stdout" % (data,))
+        if not self.signaled:
+            self.signaled = True
+            self.transport.signalProcess(self.signal)
+
+
+    def errReceived(self, data):
+        """
+        Log all data received from the child's stderr to help with
+        debugging.
+        """
+        msg("Received %r from child stderr" % (data,))
+
+
+    def processEnded(self, reason):
+        """
+        Callback C{self.deferred} with C{None} if C{reason} is a
+        L{error.ProcessTerminated} failure with C{exitCode} set to C{None},
+        C{signal} set to C{self.signal}, and C{status} holding the status code
+        of the exited process. Otherwise, errback with a C{ValueError}
+        describing the problem.
+        """
+        msg("Child exited: %r" % (reason.getTraceback(),))
+        if not reason.check(error.ProcessTerminated):
+            return self.deferred.errback(
+                ValueError("wrong termination: %s" % (reason,)))
+        v = reason.value
+        if isinstance(self.signal, str):
+            signalValue = getattr(signal, 'SIG' + self.signal)
+        else:
+            signalValue = self.signal
+        if v.exitCode is not None:
+            return self.deferred.errback(
+                ValueError("SIG%s: exitCode is %s, not None" %
+                           (self.signal, v.exitCode)))
+        if v.signal != signalValue:
+            return self.deferred.errback(
+                ValueError("SIG%s: .signal was %s, wanted %s" %
+                           (self.signal, v.signal, signalValue)))
+        if os.WTERMSIG(v.status) != signalValue:
+            return self.deferred.errback(
+                ValueError('SIG%s: %s' % (self.signal, os.WTERMSIG(v.status))))
+        self.deferred.callback(None)
+
+
+
+class TestManyProcessProtocol(TestProcessProtocol):
+    def __init__(self):
+        self.deferred = defer.Deferred()
+
+    def processEnded(self, reason):
+        self.reason = reason
+        if reason.check(error.ProcessDone):
+            self.deferred.callback(None)
+        else:
+            self.deferred.errback(reason)
+
+
+
+class UtilityProcessProtocol(protocol.ProcessProtocol):
+    """
+    Helper class for launching a Python process and getting a result from it.
+
+    @ivar program: A string giving a Python program for the child process to
+    run.
+    """
+    program = None
+
+    def run(cls, reactor, argv, env):
+        """
+        Run a Python process connected to a new instance of this protocol
+        class.  Return the protocol instance.
+
+        The Python process is given C{self.program} on the command line to
+        execute, in addition to anything specified by C{argv}.  C{env} is
+        the complete environment.
+        """
+        exe = sys.executable
+        self = cls()
+        reactor.spawnProcess(
+            self, exe, [exe, "-c", self.program] + argv, env=env)
+        return self
+    run = classmethod(run)
+
+
+    def __init__(self):
+        self.bytes = []
+        self.requests = []
+
+
+    def parseChunks(self, bytes):
+        """
+        Called with all bytes received on stdout when the process exits.
+        """
+        raise NotImplementedError()
+
+
+    def getResult(self):
+        """
+        Return a Deferred which will fire with the result of L{parseChunks}
+        when the child process exits.
+        """
+        d = defer.Deferred()
+        self.requests.append(d)
+        return d
+
+
+    def _fireResultDeferreds(self, result):
+        """
+        Callback all Deferreds returned up until now by L{getResult}
+        with the given result object.
+        """
+        requests = self.requests
+        self.requests = None
+        for d in requests:
+            d.callback(result)
+
+
+    def outReceived(self, bytes):
+        """
+        Accumulate output from the child process in a list.
+        """
+        self.bytes.append(bytes)
+
+
+    def processEnded(self, reason):
+        """
+        Handle process termination by parsing all received output and firing
+        any waiting Deferreds.
+        """
+        self._fireResultDeferreds(self.parseChunks(self.bytes))
+
+
+
+
+class GetArgumentVector(UtilityProcessProtocol):
+    """
+    Protocol which will read a serialized argv from a process and
+    expose it to interested parties.
+    """
+    program = (
+        "from sys import stdout, argv\n"
+        "stdout.write(chr(0).join(argv))\n"
+        "stdout.flush()\n")
+
+    def parseChunks(self, chunks):
+        """
+        Parse the output from the process to which this protocol was
+        connected, which is a single unterminated line of \\0-separated
+        strings giving the argv of that process.  Return this as a list of
+        str objects.
+        """
+        return ''.join(chunks).split('\0')
+
+
+
+class GetEnvironmentDictionary(UtilityProcessProtocol):
+    """
+    Protocol which will read a serialized environment dict from a process
+    and expose it to interested parties.
+    """
+    program = (
+        "from sys import stdout\n"
+        "from os import environ\n"
+        "items = environ.iteritems()\n"
+        "stdout.write(chr(0).join([k + chr(0) + v for k, v in items]))\n"
+        "stdout.flush()\n")
+
+    def parseChunks(self, chunks):
+        """
+        Parse the output from the process to which this protocol was
+        connected, which is a single unterminated line of \\0-separated
+        strings giving key value pairs of the environment from that process.
+        Return this as a dictionary.
+        """
+        environString = ''.join(chunks)
+        if not environString:
+            return {}
+        environ = iter(environString.split('\0'))
+        d = {}
+        while 1:
+            try:
+                k = environ.next()
+            except StopIteration:
+                break
+            else:
+                v = environ.next()
+                d[k] = v
+        return d
+
+
+
+class ProcessTestCase(unittest.TestCase):
+    """Test running a process."""
+
+    usePTY = False
+
+    def testStdio(self):
+        """twisted.internet.stdio test."""
+        exe = sys.executable
+        scriptPath = util.sibpath(__file__, "process_twisted.py")
+        p = Accumulator()
+        d = p.endedDeferred = defer.Deferred()
+        env = {"PYTHONPATH": os.pathsep.join(sys.path)}
+        reactor.spawnProcess(p, exe, [exe, "-u", scriptPath], env=env,
+                             path=None, usePTY=self.usePTY)
+        p.transport.write("hello, world")
+        p.transport.write("abc")
+        p.transport.write("123")
+        p.transport.closeStdin()
+
+        def processEnded(ign):
+            self.assertEqual(p.outF.getvalue(), "hello, worldabc123",
+                              "Output follows:\n"
+                              "%s\n"
+                              "Error message from process_twisted follows:\n"
+                              "%s\n" % (p.outF.getvalue(), p.errF.getvalue()))
+        return d.addCallback(processEnded)
+
+
+    def test_unsetPid(self):
+        """
+        Test if pid is None/non-None before/after process termination.  This
+        reuses process_echoer.py to get a process that blocks on stdin.
+        """
+        finished = defer.Deferred()
+        p = TrivialProcessProtocol(finished)
+        exe = sys.executable
+        scriptPath = util.sibpath(__file__, "process_echoer.py")
+        procTrans = reactor.spawnProcess(p, exe,
+                                    [exe, scriptPath], env=None)
+        self.failUnless(procTrans.pid)
+
+        def afterProcessEnd(ignored):
+            self.assertEqual(procTrans.pid, None)
+
+        p.transport.closeStdin()
+        return finished.addCallback(afterProcessEnd)
+
+
+    def test_process(self):
+        """
+        Test running a process: check its output, it exitCode, some property of
+        signalProcess.
+        """
+        exe = sys.executable
+        scriptPath = util.sibpath(__file__, "process_tester.py")
+        d = defer.Deferred()
+        p = TestProcessProtocol()
+        p.deferred = d
+        reactor.spawnProcess(p, exe, [exe, "-u", scriptPath], env=None)
+        def check(ignored):
+            self.assertEqual(p.stages, [1, 2, 3, 4, 5])
+            f = p.reason
+            f.trap(error.ProcessTerminated)
+            self.assertEqual(f.value.exitCode, 23)
+            # would .signal be available on non-posix?
+            # self.assertEqual(f.value.signal, None)
+            self.assertRaises(
+                error.ProcessExitedAlready, p.transport.signalProcess, 'INT')
+            try:
+                import process_tester, glob
+                for f in glob.glob(process_tester.test_file_match):
+                    os.remove(f)
+            except:
+                pass
+        d.addCallback(check)
+        return d
+
+    def testManyProcesses(self):
+
+        def _check(results, protocols):
+            for p in protocols:
+                self.assertEqual(p.stages, [1, 2, 3, 4, 5], "[%d] stages = %s" % (id(p.transport), str(p.stages)))
+                # test status code
+                f = p.reason
+                f.trap(error.ProcessTerminated)
+                self.assertEqual(f.value.exitCode, 23)
+
+        exe = sys.executable
+        scriptPath = util.sibpath(__file__, "process_tester.py")
+        args = [exe, "-u", scriptPath]
+        protocols = []
+        deferreds = []
+
+        for i in xrange(50):
+            p = TestManyProcessProtocol()
+            protocols.append(p)
+            reactor.spawnProcess(p, exe, args, env=None)
+            deferreds.append(p.deferred)
+
+        deferredList = defer.DeferredList(deferreds, consumeErrors=True)
+        deferredList.addCallback(_check, protocols)
+        return deferredList
+
+
+    def test_echo(self):
+        """
+        A spawning a subprocess which echoes its stdin to its stdout via
+        C{reactor.spawnProcess} will result in that echoed output being
+        delivered to outReceived.
+        """
+        finished = defer.Deferred()
+        p = EchoProtocol(finished)
+
+        exe = sys.executable
+        scriptPath = util.sibpath(__file__, "process_echoer.py")
+        reactor.spawnProcess(p, exe, [exe, scriptPath], env=None)
+
+        def asserts(ignored):
+            self.failIf(p.failure, p.failure)
+            self.failUnless(hasattr(p, 'buffer'))
+            self.assertEqual(len(''.join(p.buffer)), len(p.s * p.n))
+
+        def takedownProcess(err):
+            p.transport.closeStdin()
+            return err
+
+        return finished.addCallback(asserts).addErrback(takedownProcess)
+
+
+    def testCommandLine(self):
+        args = [r'a\"b ', r'a\b ', r' a\\"b', r' a\\b', r'"foo bar" "', '\tab', '"\\', 'a"b', "a'b"]
+        pyExe = sys.executable
+        scriptPath = util.sibpath(__file__, "process_cmdline.py")
+        p = Accumulator()
+        d = p.endedDeferred = defer.Deferred()
+        reactor.spawnProcess(p, pyExe, [pyExe, "-u", scriptPath]+args, env=None,
+                             path=None)
+
+        def processEnded(ign):
+            self.assertEqual(p.errF.getvalue(), "")
+            recvdArgs = p.outF.getvalue().splitlines()
+            self.assertEqual(recvdArgs, args)
+        return d.addCallback(processEnded)
+
+
+    def test_wrongArguments(self):
+        """
+        Test invalid arguments to spawnProcess: arguments and environment
+        must only contains string or unicode, and not null bytes.
+        """
+        exe = sys.executable
+        p = protocol.ProcessProtocol()
+
+        badEnvs = [
+            {"foo": 2},
+            {"foo": "egg\0a"},
+            {3: "bar"},
+            {"bar\0foo": "bar"}]
+
+        badArgs = [
+            [exe, 2],
+            "spam",
+            [exe, "foo\0bar"]]
+
+        # Sanity check - this will fail for people who have mucked with
+        # their site configuration in a stupid way, but there's nothing we
+        # can do about that.
+        badUnicode = u'\N{SNOWMAN}'
+        try:
+            badUnicode.encode(sys.getdefaultencoding())
+        except UnicodeEncodeError:
+            # Okay, that unicode doesn't encode, put it in as a bad environment
+            # key.
+            badEnvs.append({badUnicode: 'value for bad unicode key'})
+            badEnvs.append({'key for bad unicode value': badUnicode})
+            badArgs.append([exe, badUnicode])
+        else:
+            # It _did_ encode.  Most likely, Gtk2 is being used and the
+            # default system encoding is UTF-8, which can encode anything.
+            # In any case, if implicit unicode -> str conversion works for
+            # that string, we can't test that TypeError gets raised instead,
+            # so just leave it off.
+            pass
+
+        for env in badEnvs:
+            self.assertRaises(
+                TypeError,
+                reactor.spawnProcess, p, exe, [exe, "-c", ""], env=env)
+
+        for args in badArgs:
+            self.assertRaises(
+                TypeError,
+                reactor.spawnProcess, p, exe, args, env=None)
+
+
+    # Use upper-case so that the environment key test uses an upper case
+    # name: some versions of Windows only support upper case environment
+    # variable names, and I think Python (as of 2.5) doesn't use the right
+    # syscall for lowercase or mixed case names to work anyway.
+    okayUnicode = u"UNICODE"
+    encodedValue = "UNICODE"
+
+    def _deprecatedUnicodeSupportTest(self, processProtocolClass, argv=[], env={}):
+        """
+        Check that a deprecation warning is emitted when passing unicode to
+        spawnProcess for an argv value or an environment key or value.
+        Check that the warning is of the right type, has the right message,
+        and refers to the correct file.  Unfortunately, don't check that the
+        line number is correct, because that is too hard for me to figure
+        out.
+
+        @param processProtocolClass: A L{UtilityProcessProtocol} subclass
+        which will be instantiated to communicate with the child process.
+
+        @param argv: The argv argument to spawnProcess.
+
+        @param env: The env argument to spawnProcess.
+
+        @return: A Deferred which fires when the test is complete.
+        """
+        # Sanity to check to make sure we can actually encode this unicode
+        # with the default system encoding.  This may be excessively
+        # paranoid. -exarkun
+        self.assertEqual(
+            self.okayUnicode.encode(sys.getdefaultencoding()),
+            self.encodedValue)
+
+        p = self.assertWarns(DeprecationWarning,
+            "Argument strings and environment keys/values passed to "
+            "reactor.spawnProcess should be str, not unicode.", __file__,
+            processProtocolClass.run, reactor, argv, env)
+        return p.getResult()
+
+
+    def test_deprecatedUnicodeArgvSupport(self):
+        """
+        Test that a unicode string passed for an argument value is allowed
+        if it can be encoded with the default system encoding, but that a
+        deprecation warning is emitted.
+        """
+        d = self._deprecatedUnicodeSupportTest(GetArgumentVector, argv=[self.okayUnicode])
+        def gotArgVector(argv):
+            self.assertEqual(argv, ['-c', self.encodedValue])
+        d.addCallback(gotArgVector)
+        return d
+
+
+    def test_deprecatedUnicodeEnvKeySupport(self):
+        """
+        Test that a unicode string passed for the key of the environment
+        dictionary is allowed if it can be encoded with the default system
+        encoding, but that a deprecation warning is emitted.
+        """
+        d = self._deprecatedUnicodeSupportTest(
+            GetEnvironmentDictionary, env={self.okayUnicode: self.encodedValue})
+        def gotEnvironment(environ):
+            self.assertEqual(environ[self.encodedValue], self.encodedValue)
+        d.addCallback(gotEnvironment)
+        return d
+
+
+    def test_deprecatedUnicodeEnvValueSupport(self):
+        """
+        Test that a unicode string passed for the value of the environment
+        dictionary is allowed if it can be encoded with the default system
+        encoding, but that a deprecation warning is emitted.
+        """
+        d = self._deprecatedUnicodeSupportTest(
+            GetEnvironmentDictionary, env={self.encodedValue: self.okayUnicode})
+        def gotEnvironment(environ):
+            # On Windows, the environment contains more things than we
+            # specified, so only make sure that at least the key we wanted
+            # is there, rather than testing the dictionary for exact
+            # equality.
+            self.assertEqual(environ[self.encodedValue], self.encodedValue)
+        d.addCallback(gotEnvironment)
+        return d
+
+
+
+class TwoProcessProtocol(protocol.ProcessProtocol):
+    num = -1
+    finished = 0
+    def __init__(self):
+        self.deferred = defer.Deferred()
+    def outReceived(self, data):
+        pass
+    def processEnded(self, reason):
+        self.finished = 1
+        self.deferred.callback(None)
+
+class TestTwoProcessesBase:
+    def setUp(self):
+        self.processes = [None, None]
+        self.pp = [None, None]
+        self.done = 0
+        self.verbose = 0
+
+    def createProcesses(self, usePTY=0):
+        exe = sys.executable
+        scriptPath = util.sibpath(__file__, "process_reader.py")
+        for num in (0,1):
+            self.pp[num] = TwoProcessProtocol()
+            self.pp[num].num = num
+            p = reactor.spawnProcess(self.pp[num],
+                                     exe, [exe, "-u", scriptPath], env=None,
+                                     usePTY=usePTY)
+            self.processes[num] = p
+
+    def close(self, num):
+        if self.verbose: print "closing stdin [%d]" % num
+        p = self.processes[num]
+        pp = self.pp[num]
+        self.failIf(pp.finished, "Process finished too early")
+        p.loseConnection()
+        if self.verbose: print self.pp[0].finished, self.pp[1].finished
+
+    def _onClose(self):
+        return defer.gatherResults([ p.deferred for p in self.pp ])
+
+    def testClose(self):
+        if self.verbose: print "starting processes"
+        self.createProcesses()
+        reactor.callLater(1, self.close, 0)
+        reactor.callLater(2, self.close, 1)
+        return self._onClose()
+
+class TestTwoProcessesNonPosix(TestTwoProcessesBase, unittest.TestCase):
+    pass
+
+class TestTwoProcessesPosix(TestTwoProcessesBase, unittest.TestCase):
+    def tearDown(self):
+        for pp, pr in zip(self.pp, self.processes):
+            if not pp.finished:
+                try:
+                    os.kill(pr.pid, signal.SIGTERM)
+                except OSError:
+                    # If the test failed the process may already be dead
+                    # The error here is only noise
+                    pass
+        return self._onClose()
+
+    def kill(self, num):
+        if self.verbose: print "kill [%d] with SIGTERM" % num
+        p = self.processes[num]
+        pp = self.pp[num]
+        self.failIf(pp.finished, "Process finished too early")
+        os.kill(p.pid, signal.SIGTERM)
+        if self.verbose: print self.pp[0].finished, self.pp[1].finished
+
+    def testKill(self):
+        if self.verbose: print "starting processes"
+        self.createProcesses(usePTY=0)
+        reactor.callLater(1, self.kill, 0)
+        reactor.callLater(2, self.kill, 1)
+        return self._onClose()
+
+    def testClosePty(self):
+        if self.verbose: print "starting processes"
+        self.createProcesses(usePTY=1)
+        reactor.callLater(1, self.close, 0)
+        reactor.callLater(2, self.close, 1)
+        return self._onClose()
+
+    def testKillPty(self):
+        if self.verbose: print "starting processes"
+        self.createProcesses(usePTY=1)
+        reactor.callLater(1, self.kill, 0)
+        reactor.callLater(2, self.kill, 1)
+        return self._onClose()
+
+class FDChecker(protocol.ProcessProtocol):
+    state = 0
+    data = ""
+    failed = None
+
+    def __init__(self, d):
+        self.deferred = d
+
+    def fail(self, why):
+        self.failed = why
+        self.deferred.callback(None)
+
+    def connectionMade(self):
+        self.transport.writeToChild(0, "abcd")
+        self.state = 1
+
+    def childDataReceived(self, childFD, data):
+        if self.state == 1:
+            if childFD != 1:
+                self.fail("read '%s' on fd %d (not 1) during state 1" \
+                          % (childFD, data))
+                return
+            self.data += data
+            #print "len", len(self.data)
+            if len(self.data) == 6:
+                if self.data != "righto":
+                    self.fail("got '%s' on fd1, expected 'righto'" \
+                              % self.data)
+                    return
+                self.data = ""
+                self.state = 2
+                #print "state2", self.state
+                self.transport.writeToChild(3, "efgh")
+                return
+        if self.state == 2:
+            self.fail("read '%s' on fd %s during state 2" % (childFD, data))
+            return
+        if self.state == 3:
+            if childFD != 1:
+                self.fail("read '%s' on fd %s (not 1) during state 3" \
+                          % (childFD, data))
+                return
+            self.data += data
+            if len(self.data) == 6:
+                if self.data != "closed":
+                    self.fail("got '%s' on fd1, expected 'closed'" \
+                              % self.data)
+                    return
+                self.state = 4
+            return
+        if self.state == 4:
+            self.fail("read '%s' on fd %s during state 4" % (childFD, data))
+            return
+
+    def childConnectionLost(self, childFD):
+        if self.state == 1:
+            self.fail("got connectionLost(%d) during state 1" % childFD)
+            return
+        if self.state == 2:
+            if childFD != 4:
+                self.fail("got connectionLost(%d) (not 4) during state 2" \
+                          % childFD)
+                return
+            self.state = 3
+            self.transport.closeChildFD(5)
+            return
+
+    def processEnded(self, status):
+        rc = status.value.exitCode
+        if self.state != 4:
+            self.fail("processEnded early, rc %d" % rc)
+            return
+        if status.value.signal != None:
+            self.fail("processEnded with signal %s" % status.value.signal)
+            return
+        if rc != 0:
+            self.fail("processEnded with rc %d" % rc)
+            return
+        self.deferred.callback(None)
+
+
+class FDTest(unittest.TestCase):
+
+    def testFD(self):
+        exe = sys.executable
+        scriptPath = util.sibpath(__file__, "process_fds.py")
+        d = defer.Deferred()
+        p = FDChecker(d)
+        reactor.spawnProcess(p, exe, [exe, "-u", scriptPath], env=None,
+                             path=None,
+                             childFDs={0:"w", 1:"r", 2:2,
+                                       3:"w", 4:"r", 5:"w"})
+        d.addCallback(lambda x : self.failIf(p.failed, p.failed))
+        return d
+
+    def testLinger(self):
+        # See what happens when all the pipes close before the process
+        # actually stops. This test *requires* SIGCHLD catching to work,
+        # as there is no other way to find out the process is done.
+        exe = sys.executable
+        scriptPath = util.sibpath(__file__, "process_linger.py")
+        p = Accumulator()
+        d = p.endedDeferred = defer.Deferred()
+        reactor.spawnProcess(p, exe, [exe, "-u", scriptPath], env=None,
+                             path=None,
+                             childFDs={1:"r", 2:2},
+                             )
+        def processEnded(ign):
+            self.assertEqual(p.outF.getvalue(),
+                                 "here is some text\ngoodbye\n")
+        return d.addCallback(processEnded)
+
+
+
+class Accumulator(protocol.ProcessProtocol):
+    """Accumulate data from a process."""
+
+    closed = 0
+    endedDeferred = None
+
+    def connectionMade(self):
+        self.outF = StringIO.StringIO()
+        self.errF = StringIO.StringIO()
+
+    def outReceived(self, d):
+        self.outF.write(d)
+
+    def errReceived(self, d):
+        self.errF.write(d)
+
+    def outConnectionLost(self):
+        pass
+
+    def errConnectionLost(self):
+        pass
+
+    def processEnded(self, reason):
+        self.closed = 1
+        if self.endedDeferred is not None:
+            d, self.endedDeferred = self.endedDeferred, None
+            d.callback(None)
+
+
+class PosixProcessBase:
+    """
+    Test running processes.
+    """
+    usePTY = False
+
+    def getCommand(self, commandName):
+        """
+        Return the path of the shell command named C{commandName}, looking at
+        common locations.
+        """
+        if os.path.exists('/bin/%s' % (commandName,)):
+            cmd = '/bin/%s' % (commandName,)
+        elif os.path.exists('/usr/bin/%s' % (commandName,)):
+            cmd = '/usr/bin/%s' % (commandName,)
+        else:
+            raise RuntimeError(
+                "%s not found in /bin or /usr/bin" % (commandName,))
+        return cmd
+
+    def testNormalTermination(self):
+        cmd = self.getCommand('true')
+
+        d = defer.Deferred()
+        p = TrivialProcessProtocol(d)
+        reactor.spawnProcess(p, cmd, ['true'], env=None,
+                             usePTY=self.usePTY)
+        def check(ignored):
+            p.reason.trap(error.ProcessDone)
+            self.assertEqual(p.reason.value.exitCode, 0)
+            self.assertEqual(p.reason.value.signal, None)
+        d.addCallback(check)
+        return d
+
+
+    def test_abnormalTermination(self):
+        """
+        When a process terminates with a system exit code set to 1,
+        C{processEnded} is called with a L{error.ProcessTerminated} error,
+        the C{exitCode} attribute reflecting the system exit code.
+        """
+        exe = sys.executable
+
+        d = defer.Deferred()
+        p = TrivialProcessProtocol(d)
+        reactor.spawnProcess(p, exe, [exe, '-c', 'import sys; sys.exit(1)'],
+                             env=None, usePTY=self.usePTY)
+
+        def check(ignored):
+            p.reason.trap(error.ProcessTerminated)
+            self.assertEqual(p.reason.value.exitCode, 1)
+            self.assertEqual(p.reason.value.signal, None)
+        d.addCallback(check)
+        return d
+
+
+    def _testSignal(self, sig):
+        exe = sys.executable
+        scriptPath = util.sibpath(__file__, "process_signal.py")
+        d = defer.Deferred()
+        p = SignalProtocol(d, sig)
+        reactor.spawnProcess(p, exe, [exe, "-u", scriptPath], env=None,
+                             usePTY=self.usePTY)
+        return d
+
+
+    def test_signalHUP(self):
+        """
+        Sending the SIGHUP signal to a running process interrupts it, and
+        C{processEnded} is called with a L{error.ProcessTerminated} instance
+        with the C{exitCode} set to C{None} and the C{signal} attribute set to
+        C{signal.SIGHUP}. C{os.WTERMSIG} can also be used on the C{status}
+        attribute to extract the signal value.
+        """
+        return self._testSignal('HUP')
+
+
+    def test_signalINT(self):
+        """
+        Sending the SIGINT signal to a running process interrupts it, and
+        C{processEnded} is called with a L{error.ProcessTerminated} instance
+        with the C{exitCode} set to C{None} and the C{signal} attribute set to
+        C{signal.SIGINT}. C{os.WTERMSIG} can also be used on the C{status}
+        attribute to extract the signal value.
+        """
+        return self._testSignal('INT')
+
+
+    def test_signalKILL(self):
+        """
+        Sending the SIGKILL signal to a running process interrupts it, and
+        C{processEnded} is called with a L{error.ProcessTerminated} instance
+        with the C{exitCode} set to C{None} and the C{signal} attribute set to
+        C{signal.SIGKILL}. C{os.WTERMSIG} can also be used on the C{status}
+        attribute to extract the signal value.
+        """
+        return self._testSignal('KILL')
+
+
+    def test_signalTERM(self):
+        """
+        Sending the SIGTERM signal to a running process interrupts it, and
+        C{processEnded} is called with a L{error.ProcessTerminated} instance
+        with the C{exitCode} set to C{None} and the C{signal} attribute set to
+        C{signal.SIGTERM}. C{os.WTERMSIG} can also be used on the C{status}
+        attribute to extract the signal value.
+        """
+        return self._testSignal('TERM')
+
+
+    def test_childSignalHandling(self):
+        """
+        The disposition of signals which are ignored in the parent
+        process is reset to the default behavior for the child
+        process.
+        """
+        # Somewhat arbitrarily select SIGUSR1 here.  It satisfies our
+        # requirements that:
+        #    - The interpreter not fiddle around with the handler
+        #      behind our backs at startup time (this disqualifies
+        #      signals like SIGINT and SIGPIPE).
+        #    - The default behavior is to exit.
+        #
+        # This lets us send the signal to the child and then verify
+        # that it exits with a status code indicating that it was
+        # indeed the signal which caused it to exit.
+        which = signal.SIGUSR1
+
+        # Ignore the signal in the parent (and make sure we clean it
+        # up).
+        handler = signal.signal(which, signal.SIG_IGN)
+        self.addCleanup(signal.signal, signal.SIGUSR1, handler)
+
+        # Now do the test.
+        return self._testSignal(signal.SIGUSR1)
+
+
+    def test_executionError(self):
+        """
+        Raise an error during execvpe to check error management.
+        """
+        cmd = self.getCommand('false')
+
+        d = defer.Deferred()
+        p = TrivialProcessProtocol(d)
+        def buggyexecvpe(command, args, environment):
+            raise RuntimeError("Ouch")
+        oldexecvpe = os.execvpe
+        os.execvpe = buggyexecvpe
+        try:
+            reactor.spawnProcess(p, cmd, ['false'], env=None,
+                                 usePTY=self.usePTY)
+
+            def check(ignored):
+                errData = "".join(p.errData + p.outData)
+                self.assertIn("Upon execvpe", errData)
+                self.assertIn("Ouch", errData)
+            d.addCallback(check)
+        finally:
+            os.execvpe = oldexecvpe
+        return d
+
+
+    def test_errorInProcessEnded(self):
+        """
+        The handler which reaps a process is removed when the process is
+        reaped, even if the protocol's C{processEnded} method raises an
+        exception.
+        """
+        connected = defer.Deferred()
+        ended = defer.Deferred()
+
+        # This script runs until we disconnect its transport.
+        pythonExecutable = sys.executable
+        scriptPath = util.sibpath(__file__, "process_echoer.py")
+
+        class ErrorInProcessEnded(protocol.ProcessProtocol):
+            """
+            A protocol that raises an error in C{processEnded}.
+            """
+            def makeConnection(self, transport):
+                connected.callback(transport)
+
+            def processEnded(self, reason):
+                reactor.callLater(0, ended.callback, None)
+                raise RuntimeError("Deliberate error")
+
+        # Launch the process.
+        reactor.spawnProcess(
+            ErrorInProcessEnded(), pythonExecutable,
+            [pythonExecutable, scriptPath],
+            env=None, path=None)
+
+        pid = []
+        def cbConnected(transport):
+            pid.append(transport.pid)
+            # There's now a reap process handler registered.
+            self.assertIn(transport.pid, process.reapProcessHandlers)
+
+            # Kill the process cleanly, triggering an error in the protocol.
+            transport.loseConnection()
+        connected.addCallback(cbConnected)
+
+        def checkTerminated(ignored):
+            # The exception was logged.
+            excs = self.flushLoggedErrors(RuntimeError)
+            self.assertEqual(len(excs), 1)
+            # The process is no longer scheduled for reaping.
+            self.assertNotIn(pid[0], process.reapProcessHandlers)
+        ended.addCallback(checkTerminated)
+
+        return ended
+
+
+
+class MockSignal(object):
+    """
+    Neuter L{signal.signal}, but pass other attributes unscathed
+    """
+    def signal(self, sig, action):
+        return signal.getsignal(sig)
+
+    def __getattr__(self, attr):
+        return getattr(signal, attr)
+
+
+class MockOS(object):
+    """
+    The mock OS: overwrite L{os}, L{fcntl} and {sys} functions with fake ones.
+
+    @ivar exited: set to True when C{_exit} is called.
+    @type exited: C{bool}
+
+    @ivar O_RDWR: dumb value faking C{os.O_RDWR}.
+    @type O_RDWR: C{int}
+
+    @ivar O_NOCTTY: dumb value faking C{os.O_NOCTTY}.
+    @type O_NOCTTY: C{int}
+
+    @ivar WNOHANG: dumb value faking C{os.WNOHANG}.
+    @type WNOHANG: C{int}
+
+    @ivar raiseFork: if not C{None}, subsequent calls to fork will raise this
+        object.
+    @type raiseFork: C{NoneType} or C{Exception}
+
+    @ivar raiseExec: if set, subsequent calls to execvpe will raise an error.
+    @type raiseExec: C{bool}
+
+    @ivar fdio: fake file object returned by calls to fdopen.
+    @type fdio: C{StringIO.StringIO}
+
+    @ivar actions: hold names of some actions executed by the object, in order
+        of execution.
+
+    @type actions: C{list} of C{str}
+
+    @ivar closed: keep track of the file descriptor closed.
+    @param closed: C{list} of C{int}
+
+    @ivar child: whether fork return for the child or the parent.
+    @type child: C{bool}
+
+    @ivar pipeCount: count the number of time that C{os.pipe} has been called.
+    @type pipeCount: C{int}
+
+    @ivar raiseWaitPid: if set, subsequent calls to waitpid will raise
+        the error specified.
+    @type raiseWaitPid: C{None} or a class
+
+    @ivar waitChild: if set, subsequent calls to waitpid will return it.
+    @type waitChild: C{None} or a tuple
+
+    @ivar euid: the uid returned by the fake C{os.geteuid}
+    @type euid: C{int}
+
+    @ivar egid: the gid returned by the fake C{os.getegid}
+    @type egid: C{int}
+
+    @ivar seteuidCalls: stored results of C{os.seteuid} calls.
+    @type seteuidCalls: C{list}
+
+    @ivar setegidCalls: stored results of C{os.setegid} calls.
+    @type setegidCalls: C{list}
+
+    @ivar path: the path returned by C{os.path.expanduser}.
+    @type path: C{str}
+
+    @ivar raiseKill: if set, subsequent call to kill will raise the error
+        specified.
+    @type raiseKill: C{None} or an exception instance.
+    """
+    exited = False
+    raiseExec = False
+    fdio = None
+    child = True
+    raiseWaitPid = None
+    raiseFork = None
+    waitChild = None
+    euid = 0
+    egid = 0
+    path = None
+    raiseKill = None
+
+    def __init__(self):
+        """
+        Initialize data structures.
+        """
+        self.actions = []
+        self.closed = []
+        self.pipeCount = 0
+        self.O_RDWR = -1
+        self.O_NOCTTY = -2
+        self.WNOHANG = -4
+        self.WEXITSTATUS = lambda x: 0
+        self.WIFEXITED = lambda x: 1
+        self.seteuidCalls = []
+        self.setegidCalls = []
+
+
+    def open(self, dev, flags):
+        """
+        Fake C{os.open}. Return a non fd number to be sure it's not used
+        elsewhere.
+        """
+        return -3
+
+
+    def fstat(self, fd):
+        """
+        Fake C{os.fstat}.  Return a C{os.stat_result} filled with garbage.
+        """
+        return os.stat_result((0,) * 10)
+
+
+    def fdopen(self, fd, flag):
+        """
+        Fake C{os.fdopen}. Return a StringIO object whose content can be tested
+        later via C{self.fdio}.
+        """
+        self.fdio = StringIO.StringIO()
+        return self.fdio
+
+
+    def setsid(self):
+        """
+        Fake C{os.setsid}. Do nothing.
+        """
+
+
+    def fork(self):
+        """
+        Fake C{os.fork}. Save the action in C{self.actions}, and return 0 if
+        C{self.child} is set, or a dumb number.
+        """
+        self.actions.append(('fork', gc.isenabled()))
+        if self.raiseFork is not None:
+            raise self.raiseFork
+        elif self.child:
+            # Child result is 0
+            return 0
+        else:
+            return 21
+
+
+    def close(self, fd):
+        """
+        Fake C{os.close}, saving the closed fd in C{self.closed}.
+        """
+        self.closed.append(fd)
+
+
+    def dup2(self, fd1, fd2):
+        """
+        Fake C{os.dup2}. Do nothing.
+        """
+
+
+    def write(self, fd, data):
+        """
+        Fake C{os.write}. Do nothing.
+        """
+
+
+    def execvpe(self, command, args, env):
+        """
+        Fake C{os.execvpe}. Save the action, and raise an error if
+        C{self.raiseExec} is set.
+        """
+        self.actions.append('exec')
+        if self.raiseExec:
+            raise RuntimeError("Bar")
+
+
+    def pipe(self):
+        """
+        Fake C{os.pipe}. Return non fd numbers to be sure it's not used
+        elsewhere, and increment C{self.pipeCount}. This is used to uniquify
+        the result.
+        """
+        self.pipeCount += 1
+        return - 2 * self.pipeCount + 1,  - 2 * self.pipeCount
+
+
+    def ttyname(self, fd):
+        """
+        Fake C{os.ttyname}. Return a dumb string.
+        """
+        return "foo"
+
+
+    def _exit(self, code):
+        """
+        Fake C{os._exit}. Save the action, set the C{self.exited} flag, and
+        raise C{SystemError}.
+        """
+        self.actions.append('exit')
+        self.exited = True
+        # Don't forget to raise an error, or you'll end up in parent
+        # code path.
+        raise SystemError()
+
+
+    def ioctl(self, fd, flags, arg):
+        """
+        Override C{fcntl.ioctl}. Do nothing.
+        """
+
+
+    def setNonBlocking(self, fd):
+        """
+        Override C{fdesc.setNonBlocking}. Do nothing.
+        """
+
+
+    def waitpid(self, pid, options):
+        """
+        Override C{os.waitpid}. Return values meaning that the child process
+        has exited, save executed action.
+        """
+        self.actions.append('waitpid')
+        if self.raiseWaitPid is not None:
+            raise self.raiseWaitPid
+        if self.waitChild is not None:
+            return self.waitChild
+        return 1, 0
+
+
+    def settrace(self, arg):
+        """
+        Override C{sys.settrace} to keep coverage working.
+        """
+
+
+    def getgid(self):
+        """
+        Override C{os.getgid}. Return a dumb number.
+        """
+        return 1235
+
+
+    def getuid(self):
+        """
+        Override C{os.getuid}. Return a dumb number.
+        """
+        return 1237
+
+
+    def setuid(self, val):
+        """
+        Override C{os.setuid}. Do nothing.
+        """
+        self.actions.append(('setuid', val))
+
+
+    def setgid(self, val):
+        """
+        Override C{os.setgid}. Do nothing.
+        """
+        self.actions.append(('setgid', val))
+
+
+    def setregid(self, val1, val2):
+        """
+        Override C{os.setregid}. Do nothing.
+        """
+        self.actions.append(('setregid', val1, val2))
+
+
+    def setreuid(self, val1, val2):
+        """
+        Override C{os.setreuid}.  Save the action.
+        """
+        self.actions.append(('setreuid', val1, val2))
+
+
+    def switchUID(self, uid, gid):
+        """
+        Override C{util.switchuid}. Save the action.
+        """
+        self.actions.append(('switchuid', uid, gid))
+
+
+    def openpty(self):
+        """
+        Override C{pty.openpty}, returning fake file descriptors.
+        """
+        return -12, -13
+
+
+    def geteuid(self):
+        """
+        Mock C{os.geteuid}, returning C{self.euid} instead.
+        """
+        return self.euid
+
+
+    def getegid(self):
+        """
+        Mock C{os.getegid}, returning C{self.egid} instead.
+        """
+        return self.egid
+
+
+    def seteuid(self, egid):
+        """
+        Mock C{os.seteuid}, store result.
+        """
+        self.seteuidCalls.append(egid)
+
+
+    def setegid(self, egid):
+        """
+        Mock C{os.setegid}, store result.
+        """
+        self.setegidCalls.append(egid)
+
+
+    def expanduser(self, path):
+        """
+        Mock C{os.path.expanduser}.
+        """
+        return self.path
+
+
+    def getpwnam(self, user):
+        """
+        Mock C{pwd.getpwnam}.
+        """
+        return 0, 0, 1, 2
+
+
+    def listdir(self, path):
+        """
+        Override C{os.listdir}, returning fake contents of '/dev/fd'
+        """
+        return "-1", "-2"
+
+
+    def kill(self, pid, signalID):
+        """
+        Override C{os.kill}: save the action and raise C{self.raiseKill} if
+        specified.
+        """
+        self.actions.append(('kill', pid, signalID))
+        if self.raiseKill is not None:
+            raise self.raiseKill
+
+
+
+if process is not None:
+    class DumbProcessWriter(process.ProcessWriter):
+        """
+        A fake L{process.ProcessWriter} used for tests.
+        """
+
+        def startReading(self):
+            """
+            Here's the faking: don't do anything here.
+            """
+
+
+
+    class DumbProcessReader(process.ProcessReader):
+        """
+        A fake L{process.ProcessReader} used for tests.
+        """
+
+        def startReading(self):
+            """
+            Here's the faking: don't do anything here.
+            """
+
+
+
+    class DumbPTYProcess(process.PTYProcess):
+        """
+        A fake L{process.PTYProcess} used for tests.
+        """
+
+        def startReading(self):
+            """
+            Here's the faking: don't do anything here.
+            """
+
+
+
+class MockProcessTestCase(unittest.TestCase):
+    """
+    Mock a process runner to test forked child code path.
+    """
+    if process is None:
+        skip = "twisted.internet.process is never used on Windows"
+
+    def setUp(self):
+        """
+        Replace L{process} os, fcntl, sys, switchUID, fdesc and pty modules
+        with the mock class L{MockOS}.
+        """
+        if gc.isenabled():
+            self.addCleanup(gc.enable)
+        else:
+            self.addCleanup(gc.disable)
+        self.mockos = MockOS()
+        self.mockos.euid = 1236
+        self.mockos.egid = 1234
+        self.patch(process, "os", self.mockos)
+        self.patch(process, "fcntl", self.mockos)
+        self.patch(process, "sys", self.mockos)
+        self.patch(process, "switchUID", self.mockos.switchUID)
+        self.patch(process, "fdesc", self.mockos)
+        self.patch(process.Process, "processReaderFactory", DumbProcessReader)
+        self.patch(process.Process, "processWriterFactory", DumbProcessWriter)
+        self.patch(process, "pty", self.mockos)
+
+        self.mocksig = MockSignal()
+        self.patch(process, "signal", self.mocksig)
+
+
+    def tearDown(self):
+        """
+        Reset processes registered for reap.
+        """
+        process.reapProcessHandlers = {}
+
+
+    def test_mockFork(self):
+        """
+        Test a classic spawnProcess. Check the path of the client code:
+        fork, exec, exit.
+        """
+        gc.enable()
+
+        cmd = '/mock/ouch'
+
+        d = defer.Deferred()
+        p = TrivialProcessProtocol(d)
+        try:
+            reactor.spawnProcess(p, cmd, ['ouch'], env=None,
+                                 usePTY=False)
+        except SystemError:
+            self.assert_(self.mockos.exited)
+            self.assertEqual(
+                self.mockos.actions, [("fork", False), "exec", "exit"])
+        else:
+            self.fail("Should not be here")
+
+        # It should leave the garbage collector disabled.
+        self.assertFalse(gc.isenabled())
+
+
+    def _mockForkInParentTest(self):
+        """
+        Assert that in the main process, spawnProcess disables the garbage
+        collector, calls fork, closes the pipe file descriptors it created for
+        the child process, and calls waitpid.
+        """
+        self.mockos.child = False
+        cmd = '/mock/ouch'
+
+        d = defer.Deferred()
+        p = TrivialProcessProtocol(d)
+        reactor.spawnProcess(p, cmd, ['ouch'], env=None,
+                             usePTY=False)
+        # It should close the first read pipe, and the 2 last write pipes
+        self.assertEqual(set(self.mockos.closed), set([-1, -4, -6]))
+        self.assertEqual(self.mockos.actions, [("fork", False), "waitpid"])
+
+
+    def test_mockForkInParentGarbageCollectorEnabled(self):
+        """
+        The garbage collector should be enabled when L{reactor.spawnProcess}
+        returns if it was initially enabled.
+
+        @see L{_mockForkInParentTest}
+        """
+        gc.enable()
+        self._mockForkInParentTest()
+        self.assertTrue(gc.isenabled())
+
+
+    def test_mockForkInParentGarbageCollectorDisabled(self):
+        """
+        The garbage collector should be disabled when L{reactor.spawnProcess}
+        returns if it was initially disabled.
+
+        @see L{_mockForkInParentTest}
+        """
+        gc.disable()
+        self._mockForkInParentTest()
+        self.assertFalse(gc.isenabled())
+
+
+    def test_mockForkTTY(self):
+        """
+        Test a TTY spawnProcess: check the path of the client code:
+        fork, exec, exit.
+        """
+        cmd = '/mock/ouch'
+
+        d = defer.Deferred()
+        p = TrivialProcessProtocol(d)
+        try:
+            reactor.spawnProcess(p, cmd, ['ouch'], env=None,
+                                 usePTY=True)
+        except SystemError:
+            self.assert_(self.mockos.exited)
+            self.assertEqual(
+                self.mockos.actions, [("fork", False), "exec", "exit"])
+        else:
+            self.fail("Should not be here")
+
+
+    def _mockWithForkError(self):
+        """
+        Assert that if the fork call fails, no other process setup calls are
+        made and that spawnProcess raises the exception fork raised.
+        """
+        self.mockos.raiseFork = OSError(errno.EAGAIN, None)
+        protocol = TrivialProcessProtocol(None)
+        self.assertRaises(OSError, reactor.spawnProcess, protocol, None)
+        self.assertEqual(self.mockos.actions, [("fork", False)])
+
+
+    def test_mockWithForkErrorGarbageCollectorEnabled(self):
+        """
+        The garbage collector should be enabled when L{reactor.spawnProcess}
+        raises because L{os.fork} raised, if it was initially enabled.
+        """
+        gc.enable()
+        self._mockWithForkError()
+        self.assertTrue(gc.isenabled())
+
+
+    def test_mockWithForkErrorGarbageCollectorDisabled(self):
+        """
+        The garbage collector should be disabled when
+        L{reactor.spawnProcess} raises because L{os.fork} raised, if it was
+        initially disabled.
+        """
+        gc.disable()
+        self._mockWithForkError()
+        self.assertFalse(gc.isenabled())
+
+
+    def test_mockForkErrorCloseFDs(self):
+        """
+        When C{os.fork} raises an exception, the file descriptors created
+        before are closed and don't leak.
+        """
+        self._mockWithForkError()
+        self.assertEqual(set(self.mockos.closed), set([-1, -4, -6, -2, -3, -5]))
+
+
+    def test_mockForkErrorGivenFDs(self):
+        """
+        When C{os.forks} raises an exception and that file descriptors have
+        been specified with the C{childFDs} arguments of
+        L{reactor.spawnProcess}, they are not closed.
+        """
+        self.mockos.raiseFork = OSError(errno.EAGAIN, None)
+        protocol = TrivialProcessProtocol(None)
+        self.assertRaises(OSError, reactor.spawnProcess, protocol, None,
+            childFDs={0: -10, 1: -11, 2: -13})
+        self.assertEqual(self.mockos.actions, [("fork", False)])
+        self.assertEqual(self.mockos.closed, [])
+
+        # We can also put "r" or "w" to let twisted create the pipes
+        self.assertRaises(OSError, reactor.spawnProcess, protocol, None,
+            childFDs={0: "r", 1: -11, 2: -13})
+        self.assertEqual(set(self.mockos.closed), set([-1, -2]))
+
+
+    def test_mockForkErrorClosePTY(self):
+        """
+        When C{os.fork} raises an exception, the file descriptors created by
+        C{pty.openpty} are closed and don't leak, when C{usePTY} is set to
+        C{True}.
+        """
+        self.mockos.raiseFork = OSError(errno.EAGAIN, None)
+        protocol = TrivialProcessProtocol(None)
+        self.assertRaises(OSError, reactor.spawnProcess, protocol, None,
+                          usePTY=True)
+        self.assertEqual(self.mockos.actions, [("fork", False)])
+        self.assertEqual(set(self.mockos.closed), set([-12, -13]))
+
+
+    def test_mockForkErrorPTYGivenFDs(self):
+        """
+        If a tuple is passed to C{usePTY} to specify slave and master file
+        descriptors and that C{os.fork} raises an exception, these file
+        descriptors aren't closed.
+        """
+        self.mockos.raiseFork = OSError(errno.EAGAIN, None)
+        protocol = TrivialProcessProtocol(None)
+        self.assertRaises(OSError, reactor.spawnProcess, protocol, None,
+                          usePTY=(-20, -21, 'foo'))
+        self.assertEqual(self.mockos.actions, [("fork", False)])
+        self.assertEqual(self.mockos.closed, [])
+
+
+    def test_mockWithExecError(self):
+        """
+        Spawn a process but simulate an error during execution in the client
+        path: C{os.execvpe} raises an error. It should close all the standard
+        fds, try to print the error encountered, and exit cleanly.
+        """
+        cmd = '/mock/ouch'
+
+        d = defer.Deferred()
+        p = TrivialProcessProtocol(d)
+        self.mockos.raiseExec = True
+        try:
+            reactor.spawnProcess(p, cmd, ['ouch'], env=None,
+                                 usePTY=False)
+        except SystemError:
+            self.assert_(self.mockos.exited)
+            self.assertEqual(
+                self.mockos.actions, [("fork", False), "exec", "exit"])
+            # Check that fd have been closed
+            self.assertIn(0, self.mockos.closed)
+            self.assertIn(1, self.mockos.closed)
+            self.assertIn(2, self.mockos.closed)
+            # Check content of traceback
+            self.assertIn("RuntimeError: Bar", self.mockos.fdio.getvalue())
+        else:
+            self.fail("Should not be here")
+
+
+    def test_mockSetUid(self):
+        """
+        Try creating a process with setting its uid: it's almost the same path
+        as the standard path, but with a C{switchUID} call before the exec.
+        """
+        cmd = '/mock/ouch'
+
+        d = defer.Deferred()
+        p = TrivialProcessProtocol(d)
+        try:
+            reactor.spawnProcess(p, cmd, ['ouch'], env=None,
+                                 usePTY=False, uid=8080)
+        except SystemError:
+            self.assert_(self.mockos.exited)
+            self.assertEqual(self.mockos.actions,
+                [('setuid', 0), ('setgid', 0), ('fork', False),
+                  ('switchuid', 8080, 1234), 'exec', 'exit'])
+        else:
+            self.fail("Should not be here")
+
+
+    def test_mockSetUidInParent(self):
+        """
+        Try creating a process with setting its uid, in the parent path: it
+        should switch to root before fork, then restore initial uid/gids.
+        """
+        self.mockos.child = False
+        cmd = '/mock/ouch'
+
+        d = defer.Deferred()
+        p = TrivialProcessProtocol(d)
+        reactor.spawnProcess(p, cmd, ['ouch'], env=None,
+                             usePTY=False, uid=8080)
+        self.assertEqual(self.mockos.actions,
+            [('setuid', 0), ('setgid', 0), ('fork', False),
+             ('setregid', 1235, 1234), ('setreuid', 1237, 1236), 'waitpid'])
+
+
+    def test_mockPTYSetUid(self):
+        """
+        Try creating a PTY process with setting its uid: it's almost the same
+        path as the standard path, but with a C{switchUID} call before the
+        exec.
+        """
+        cmd = '/mock/ouch'
+
+        d = defer.Deferred()
+        p = TrivialProcessProtocol(d)
+        try:
+            reactor.spawnProcess(p, cmd, ['ouch'], env=None,
+                                 usePTY=True, uid=8081)
+        except SystemError:
+            self.assert_(self.mockos.exited)
+            self.assertEqual(self.mockos.actions,
+                [('setuid', 0), ('setgid', 0), ('fork', False),
+                  ('switchuid', 8081, 1234), 'exec', 'exit'])
+        else:
+            self.fail("Should not be here")
+
+
+    def test_mockPTYSetUidInParent(self):
+        """
+        Try creating a PTY process with setting its uid, in the parent path: it
+        should switch to root before fork, then restore initial uid/gids.
+        """
+        self.mockos.child = False
+        cmd = '/mock/ouch'
+
+        d = defer.Deferred()
+        p = TrivialProcessProtocol(d)
+        oldPTYProcess = process.PTYProcess
+        try:
+            process.PTYProcess = DumbPTYProcess
+            reactor.spawnProcess(p, cmd, ['ouch'], env=None,
+                                 usePTY=True, uid=8080)
+        finally:
+            process.PTYProcess = oldPTYProcess
+        self.assertEqual(self.mockos.actions,
+            [('setuid', 0), ('setgid', 0), ('fork', False),
+             ('setregid', 1235, 1234), ('setreuid', 1237, 1236), 'waitpid'])
+
+
+    def test_mockWithWaitError(self):
+        """
+        Test that reapProcess logs errors raised.
+        """
+        self.mockos.child = False
+        cmd = '/mock/ouch'
+        self.mockos.waitChild = (0, 0)
+
+        d = defer.Deferred()
+        p = TrivialProcessProtocol(d)
+        proc = reactor.spawnProcess(p, cmd, ['ouch'], env=None,
+                             usePTY=False)
+        self.assertEqual(self.mockos.actions, [("fork", False), "waitpid"])
+
+        self.mockos.raiseWaitPid = OSError()
+        proc.reapProcess()
+        errors = self.flushLoggedErrors()
+        self.assertEqual(len(errors), 1)
+        errors[0].trap(OSError)
+
+
+    def test_mockErrorECHILDInReapProcess(self):
+        """
+        Test that reapProcess doesn't log anything when waitpid raises a
+        C{OSError} with errno C{ECHILD}.
+        """
+        self.mockos.child = False
+        cmd = '/mock/ouch'
+        self.mockos.waitChild = (0, 0)
+
+        d = defer.Deferred()
+        p = TrivialProcessProtocol(d)
+        proc = reactor.spawnProcess(p, cmd, ['ouch'], env=None,
+                                    usePTY=False)
+        self.assertEqual(self.mockos.actions, [("fork", False), "waitpid"])
+
+        self.mockos.raiseWaitPid = OSError()
+        self.mockos.raiseWaitPid.errno = errno.ECHILD
+        # This should not produce any errors
+        proc.reapProcess()
+
+
+    def test_mockErrorInPipe(self):
+        """
+        If C{os.pipe} raises an exception after some pipes where created, the
+        created pipes are closed and don't leak.
+        """
+        pipes = [-1, -2, -3, -4]
+        def pipe():
+            try:
+                return pipes.pop(0), pipes.pop(0)
+            except IndexError:
+                raise OSError()
+        self.mockos.pipe = pipe
+        protocol = TrivialProcessProtocol(None)
+        self.assertRaises(OSError, reactor.spawnProcess, protocol, None)
+        self.assertEqual(self.mockos.actions, [])
+        self.assertEqual(set(self.mockos.closed), set([-4, -3, -2, -1]))
+
+
+    def test_mockErrorInForkRestoreUID(self):
+        """
+        If C{os.fork} raises an exception and a UID change has been made, the
+        previous UID and GID are restored.
+        """
+        self.mockos.raiseFork = OSError(errno.EAGAIN, None)
+        protocol = TrivialProcessProtocol(None)
+        self.assertRaises(OSError, reactor.spawnProcess, protocol, None,
+                          uid=8080)
+        self.assertEqual(self.mockos.actions,
+            [('setuid', 0), ('setgid', 0), ("fork", False),
+             ('setregid', 1235, 1234), ('setreuid', 1237, 1236)])
+
+
+    def test_kill(self):
+        """
+        L{process.Process.signalProcess} calls C{os.kill} translating the given
+        signal string to the PID.
+        """
+        self.mockos.child = False
+        self.mockos.waitChild = (0, 0)
+        cmd = '/mock/ouch'
+        p = TrivialProcessProtocol(None)
+        proc = reactor.spawnProcess(p, cmd, ['ouch'], env=None, usePTY=False)
+        proc.signalProcess("KILL")
+        self.assertEqual(self.mockos.actions,
+            [('fork', False), 'waitpid', ('kill', 21, signal.SIGKILL)])
+
+
+    def test_killExited(self):
+        """
+        L{process.Process.signalProcess} raises L{error.ProcessExitedAlready}
+        if the process has exited.
+        """
+        self.mockos.child = False
+        cmd = '/mock/ouch'
+        p = TrivialProcessProtocol(None)
+        proc = reactor.spawnProcess(p, cmd, ['ouch'], env=None, usePTY=False)
+        # We didn't specify a waitpid value, so the waitpid call in
+        # registerReapProcessHandler has already reaped the process
+        self.assertRaises(error.ProcessExitedAlready,
+                          proc.signalProcess, "KILL")
+
+
+    def test_killExitedButNotDetected(self):
+        """
+        L{process.Process.signalProcess} raises L{error.ProcessExitedAlready}
+        if the process has exited but that twisted hasn't seen it (for example,
+        if the process has been waited outside of twisted): C{os.kill} then
+        raise C{OSError} with C{errno.ESRCH} as errno.
+        """
+        self.mockos.child = False
+        self.mockos.waitChild = (0, 0)
+        cmd = '/mock/ouch'
+        p = TrivialProcessProtocol(None)
+        proc = reactor.spawnProcess(p, cmd, ['ouch'], env=None, usePTY=False)
+        self.mockos.raiseKill = OSError(errno.ESRCH, "Not found")
+        self.assertRaises(error.ProcessExitedAlready,
+                          proc.signalProcess, "KILL")
+
+
+    def test_killErrorInKill(self):
+        """
+        L{process.Process.signalProcess} doesn't mask C{OSError} exceptions if
+        the errno is different from C{errno.ESRCH}.
+        """
+        self.mockos.child = False
+        self.mockos.waitChild = (0, 0)
+        cmd = '/mock/ouch'
+        p = TrivialProcessProtocol(None)
+        proc = reactor.spawnProcess(p, cmd, ['ouch'], env=None, usePTY=False)
+        self.mockos.raiseKill = OSError(errno.EINVAL, "Invalid signal")
+        err = self.assertRaises(OSError,
+                                proc.signalProcess, "KILL")
+        self.assertEquals(err.errno, errno.EINVAL)
+
+
+
+class PosixProcessTestCase(unittest.TestCase, PosixProcessBase):
+    # add two non-pty test cases
+
+    def test_stderr(self):
+        """
+        Bytes written to stderr by the spawned process are passed to the
+        C{errReceived} callback on the C{ProcessProtocol} passed to
+        C{spawnProcess}.
+        """
+        cmd = sys.executable
+
+        value = "42"
+
+        p = Accumulator()
+        d = p.endedDeferred = defer.Deferred()
+        reactor.spawnProcess(p, cmd,
+                             [cmd, "-c",
+                              "import sys; sys.stderr.write('%s')" % (value,)],
+                             env=None, path="/tmp",
+                             usePTY=self.usePTY)
+
+        def processEnded(ign):
+            self.assertEqual(value, p.errF.getvalue())
+        return d.addCallback(processEnded)
+
+
+    def testProcess(self):
+        cmd = self.getCommand('gzip')
+        s = "there's no place like home!\n" * 3
+        p = Accumulator()
+        d = p.endedDeferred = defer.Deferred()
+        reactor.spawnProcess(p, cmd, [cmd, "-c"], env=None, path="/tmp",
+                             usePTY=self.usePTY)
+        p.transport.write(s)
+        p.transport.closeStdin()
+
+        def processEnded(ign):
+            f = p.outF
+            f.seek(0, 0)
+            gf = gzip.GzipFile(fileobj=f)
+            self.assertEqual(gf.read(), s)
+        return d.addCallback(processEnded)
+
+
+
+class PosixProcessTestCasePTY(unittest.TestCase, PosixProcessBase):
+    """
+    Just like PosixProcessTestCase, but use ptys instead of pipes.
+    """
+    usePTY = True
+    # PTYs only offer one input and one output. What still makes sense?
+    # testNormalTermination
+    # test_abnormalTermination
+    # testSignal
+    # testProcess, but not without p.transport.closeStdin
+    #  might be solveable: TODO: add test if so
+
+    def testOpeningTTY(self):
+        exe = sys.executable
+        scriptPath = util.sibpath(__file__, "process_tty.py")
+        p = Accumulator()
+        d = p.endedDeferred = defer.Deferred()
+        reactor.spawnProcess(p, exe, [exe, "-u", scriptPath], env=None,
+                            path=None, usePTY=self.usePTY)
+        p.transport.write("hello world!\n")
+
+        def processEnded(ign):
+            self.assertRaises(
+                error.ProcessExitedAlready, p.transport.signalProcess, 'HUP')
+            self.assertEqual(
+                p.outF.getvalue(),
+                "hello world!\r\nhello world!\r\n",
+                "Error message from process_tty follows:\n\n%s\n\n" % p.outF.getvalue())
+        return d.addCallback(processEnded)
+
+
+    def testBadArgs(self):
+        pyExe = sys.executable
+        pyArgs = [pyExe, "-u", "-c", "print 'hello'"]
+        p = Accumulator()
+        self.assertRaises(ValueError, reactor.spawnProcess, p, pyExe, pyArgs,
+            usePTY=1, childFDs={1:'r'})
+
+
+
+class Win32SignalProtocol(SignalProtocol):
+    """
+    A win32-specific process protocol that handles C{processEnded}
+    differently: processes should exit with exit code 1.
+    """
+
+    def processEnded(self, reason):
+        """
+        Callback C{self.deferred} with C{None} if C{reason} is a
+        L{error.ProcessTerminated} failure with C{exitCode} set to 1.
+        Otherwise, errback with a C{ValueError} describing the problem.
+        """
+        if not reason.check(error.ProcessTerminated):
+            return self.deferred.errback(
+                ValueError("wrong termination: %s" % (reason,)))
+        v = reason.value
+        if v.exitCode != 1:
+            return self.deferred.errback(
+                ValueError("Wrong exit code: %s" % (reason.exitCode,)))
+        self.deferred.callback(None)
+
+
+
+class Win32ProcessTestCase(unittest.TestCase):
+    """
+    Test process programs that are packaged with twisted.
+    """
+
+    def testStdinReader(self):
+        pyExe = sys.executable
+        scriptPath = util.sibpath(__file__, "process_stdinreader.py")
+        p = Accumulator()
+        d = p.endedDeferred = defer.Deferred()
+        reactor.spawnProcess(p, pyExe, [pyExe, "-u", scriptPath], env=None,
+                             path=None)
+        p.transport.write("hello, world")
+        p.transport.closeStdin()
+
+        def processEnded(ign):
+            self.assertEqual(p.errF.getvalue(), "err\nerr\n")
+            self.assertEqual(p.outF.getvalue(), "out\nhello, world\nout\n")
+        return d.addCallback(processEnded)
+
+
+    def testBadArgs(self):
+        pyExe = sys.executable
+        pyArgs = [pyExe, "-u", "-c", "print 'hello'"]
+        p = Accumulator()
+        self.assertRaises(ValueError,
+            reactor.spawnProcess, p, pyExe, pyArgs, uid=1)
+        self.assertRaises(ValueError,
+            reactor.spawnProcess, p, pyExe, pyArgs, gid=1)
+        self.assertRaises(ValueError,
+            reactor.spawnProcess, p, pyExe, pyArgs, usePTY=1)
+        self.assertRaises(ValueError,
+            reactor.spawnProcess, p, pyExe, pyArgs, childFDs={1:'r'})
+
+
+    def _testSignal(self, sig):
+        exe = sys.executable
+        scriptPath = util.sibpath(__file__, "process_signal.py")
+        d = defer.Deferred()
+        p = Win32SignalProtocol(d, sig)
+        reactor.spawnProcess(p, exe, [exe, "-u", scriptPath], env=None)
+        return d
+
+
+    def test_signalTERM(self):
+        """
+        Sending the SIGTERM signal terminates a created process, and
+        C{processEnded} is called with a L{error.ProcessTerminated} instance
+        with the C{exitCode} attribute set to 1.
+        """
+        return self._testSignal('TERM')
+
+
+    def test_signalINT(self):
+        """
+        Sending the SIGINT signal terminates a created process, and
+        C{processEnded} is called with a L{error.ProcessTerminated} instance
+        with the C{exitCode} attribute set to 1.
+        """
+        return self._testSignal('INT')
+
+
+    def test_signalKILL(self):
+        """
+        Sending the SIGKILL signal terminates a created process, and
+        C{processEnded} is called with a L{error.ProcessTerminated} instance
+        with the C{exitCode} attribute set to 1.
+        """
+        return self._testSignal('KILL')
+
+
+    def test_closeHandles(self):
+        """
+        The win32 handles should be properly closed when the process exits.
+        """
+        import win32api
+
+        connected = defer.Deferred()
+        ended = defer.Deferred()
+
+        class SimpleProtocol(protocol.ProcessProtocol):
+            """
+            A protocol that fires deferreds when connected and disconnected.
+            """
+            def makeConnection(self, transport):
+                connected.callback(transport)
+
+            def processEnded(self, reason):
+                ended.callback(None)
+
+        p = SimpleProtocol()
+
+        pyExe = sys.executable
+        pyArgs = [pyExe, "-u", "-c", "print 'hello'"]
+        proc = reactor.spawnProcess(p, pyExe, pyArgs)
+
+        def cbConnected(transport):
+            self.assertIdentical(transport, proc)
+            # perform a basic validity test on the handles
+            win32api.GetHandleInformation(proc.hProcess)
+            win32api.GetHandleInformation(proc.hThread)
+            # And save their values for later
+            self.hProcess = proc.hProcess
+            self.hThread = proc.hThread
+        connected.addCallback(cbConnected)
+
+        def checkTerminated(ignored):
+            # The attributes on the process object must be reset...
+            self.assertIdentical(proc.pid, None)
+            self.assertIdentical(proc.hProcess, None)
+            self.assertIdentical(proc.hThread, None)
+            # ...and the handles must be closed.
+            self.assertRaises(win32api.error,
+                              win32api.GetHandleInformation, self.hProcess)
+            self.assertRaises(win32api.error,
+                              win32api.GetHandleInformation, self.hThread)
+        ended.addCallback(checkTerminated)
+
+        return defer.gatherResults([connected, ended])
+
+
+
+class Win32UnicodeEnvironmentTest(unittest.TestCase):
+    """
+    Tests for Unicode environment on Windows
+    """
+    goodKey = u'UNICODE'
+    goodValue = u'UNICODE'
+
+    def test_encodableUnicodeEnvironment(self):
+        """
+        Test C{os.environ} (inherited by every subprocess on Windows) that
+        contains an ascii-encodable Unicode string. This is different from
+        passing Unicode environment explicitly to spawnProcess (which is not
+        supported).
+        """
+        os.environ[self.goodKey] = self.goodValue
+        self.addCleanup(operator.delitem, os.environ, self.goodKey)
+
+        p = GetEnvironmentDictionary.run(reactor, [], {})
+        def gotEnvironment(environ):
+            self.assertEqual(
+                environ[self.goodKey.encode('ascii')],
+                self.goodValue.encode('ascii'))
+        return p.getResult().addCallback(gotEnvironment)
+
+
+
+class Dumbwin32procPidTest(unittest.TestCase):
+    """
+    Simple test for the pid attribute of Process on win32.
+    """
+
+    def test_pid(self):
+        """
+        Launch process with mock win32process. The only mock aspect of this
+        module is that the pid of the process created will always be 42.
+        """
+        from twisted.internet import _dumbwin32proc
+        from twisted.test import mock_win32process
+        self.patch(_dumbwin32proc, "win32process", mock_win32process)
+        exe = sys.executable
+        scriptPath = util.sibpath(__file__, "process_cmdline.py")
+
+        d = defer.Deferred()
+        processProto = TrivialProcessProtocol(d)
+        comspec = str(os.environ["COMSPEC"])
+        cmd = [comspec, "/c", exe, scriptPath]
+
+        p = _dumbwin32proc.Process(reactor,
+                                  processProto,
+                                  None,
+                                  cmd,
+                                  {},
+                                  None)
+        self.assertEqual(42, p.pid)
+        self.assertEqual("<Process pid=42>", repr(p))
+
+        def pidCompleteCb(result):
+            self.assertEqual(None, p.pid)
+        return d.addCallback(pidCompleteCb)
+
+
+
+class UtilTestCase(unittest.TestCase):
+    """
+    Tests for process-related helper functions (currently only
+    L{procutils.which}.
+    """
+    def setUp(self):
+        """
+        Create several directories and files, some of which are executable
+        and some of which are not.  Save the current PATH setting.
+        """
+        j = os.path.join
+
+        base = self.mktemp()
+
+        self.foo = j(base, "foo")
+        self.baz = j(base, "baz")
+        self.foobar = j(self.foo, "bar")
+        self.foobaz = j(self.foo, "baz")
+        self.bazfoo = j(self.baz, "foo")
+        self.bazbar = j(self.baz, "bar")
+
+        for d in self.foobar, self.foobaz, self.bazfoo, self.bazbar:
+            os.makedirs(d)
+
+        for name, mode in [(j(self.foobaz, "executable"), 0700),
+                           (j(self.foo, "executable"), 0700),
+                           (j(self.bazfoo, "executable"), 0700),
+                           (j(self.bazfoo, "executable.bin"), 0700),
+                           (j(self.bazbar, "executable"), 0)]:
+            f = file(name, "w")
+            f.close()
+            os.chmod(name, mode)
+
+        self.oldPath = os.environ.get('PATH', None)
+        os.environ['PATH'] = os.pathsep.join((
+            self.foobar, self.foobaz, self.bazfoo, self.bazbar))
+
+
+    def tearDown(self):
+        """
+        Restore the saved PATH setting, and set all created files readable
+        again so that they can be deleted easily.
+        """
+        os.chmod(os.path.join(self.bazbar, "executable"), stat.S_IWUSR)
+        if self.oldPath is None:
+            try:
+                del os.environ['PATH']
+            except KeyError:
+                pass
+        else:
+            os.environ['PATH'] = self.oldPath
+
+
+    def test_whichWithoutPATH(self):
+        """
+        Test that if C{os.environ} does not have a C{'PATH'} key,
+        L{procutils.which} returns an empty list.
+        """
+        del os.environ['PATH']
+        self.assertEqual(procutils.which("executable"), [])
+
+
+    def testWhich(self):
+        j = os.path.join
+        paths = procutils.which("executable")
+        expectedPaths = [j(self.foobaz, "executable"),
+                         j(self.bazfoo, "executable")]
+        if runtime.platform.isWindows():
+            expectedPaths.append(j(self.bazbar, "executable"))
+        self.assertEqual(paths, expectedPaths)
+
+
+    def testWhichPathExt(self):
+        j = os.path.join
+        old = os.environ.get('PATHEXT', None)
+        os.environ['PATHEXT'] = os.pathsep.join(('.bin', '.exe', '.sh'))
+        try:
+            paths = procutils.which("executable")
+        finally:
+            if old is None:
+                del os.environ['PATHEXT']
+            else:
+                os.environ['PATHEXT'] = old
+        expectedPaths = [j(self.foobaz, "executable"),
+                         j(self.bazfoo, "executable"),
+                         j(self.bazfoo, "executable.bin")]
+        if runtime.platform.isWindows():
+            expectedPaths.append(j(self.bazbar, "executable"))
+        self.assertEqual(paths, expectedPaths)
+
+
+
+class ClosingPipesProcessProtocol(protocol.ProcessProtocol):
+    output = ''
+    errput = ''
+
+    def __init__(self, outOrErr):
+        self.deferred = defer.Deferred()
+        self.outOrErr = outOrErr
+
+    def processEnded(self, reason):
+        self.deferred.callback(reason)
+
+    def outReceived(self, data):
+        self.output += data
+
+    def errReceived(self, data):
+        self.errput += data
+
+
+
+class ClosingPipes(unittest.TestCase):
+
+    def doit(self, fd):
+        """
+        Create a child process and close one of its output descriptors using
+        L{IProcessTransport.closeStdout} or L{IProcessTransport.closeStderr}.
+        Return a L{Deferred} which fires after verifying that the descriptor was
+        really closed.
+        """
+        p = ClosingPipesProcessProtocol(True)
+        self.assertFailure(p.deferred, error.ProcessTerminated)
+        p.deferred.addCallback(self._endProcess, p)
+        reactor.spawnProcess(
+            p, sys.executable, [
+                sys.executable, '-u', '-c',
+                'raw_input()\n'
+                'import sys, os, time\n'
+                # Give the system a bit of time to notice the closed
+                # descriptor.  Another option would be to poll() for HUP
+                # instead of relying on an os.write to fail with SIGPIPE.
+                # However, that wouldn't work on OS X (or Windows?).
+                'for i in range(1000):\n'
+                '    os.write(%d, "foo\\n")\n'
+                '    time.sleep(0.01)\n'
+                'sys.exit(42)\n' % (fd,)
+                ],
+            env=None)
+
+        if fd == 1:
+            p.transport.closeStdout()
+        elif fd == 2:
+            p.transport.closeStderr()
+        else:
+            raise RuntimeError
+
+        # Give the close time to propagate
+        p.transport.write('go\n')
+
+        # make the buggy case not hang
+        p.transport.closeStdin()
+        return p.deferred
+
+
+    def _endProcess(self, reason, p):
+        """
+        Check that a failed write prevented the process from getting to its
+        custom exit code.
+        """
+        # child must not get past that write without raising
+        self.assertNotEquals(
+            reason.exitCode, 42, 'process reason was %r' % reason)
+        self.assertEqual(p.output, '')
+        return p.errput
+
+
+    def test_stdout(self):
+        """
+        ProcessProtocol.transport.closeStdout actually closes the pipe.
+        """
+        d = self.doit(1)
+        def _check(errput):
+            self.assertIn('OSError', errput)
+            if runtime.platform.getType() != 'win32':
+                self.assertIn('Broken pipe', errput)
+        d.addCallback(_check)
+        return d
+
+
+    def test_stderr(self):
+        """
+        ProcessProtocol.transport.closeStderr actually closes the pipe.
+        """
+        d = self.doit(2)
+        def _check(errput):
+            # there should be no stderr open, so nothing for it to
+            # write the error to.
+            self.assertEqual(errput, '')
+        d.addCallback(_check)
+        return d
+
+
+skipMessage = "wrong platform or reactor doesn't support IReactorProcess"
+if (runtime.platform.getType() != 'posix') or (not interfaces.IReactorProcess(reactor, None)):
+    PosixProcessTestCase.skip = skipMessage
+    PosixProcessTestCasePTY.skip = skipMessage
+    TestTwoProcessesPosix.skip = skipMessage
+    FDTest.skip = skipMessage
+
+if (runtime.platform.getType() != 'win32') or (not interfaces.IReactorProcess(reactor, None)):
+    Win32ProcessTestCase.skip = skipMessage
+    TestTwoProcessesNonPosix.skip = skipMessage
+    Dumbwin32procPidTest.skip = skipMessage
+    Win32UnicodeEnvironmentTest.skip = skipMessage
+
+if not interfaces.IReactorProcess(reactor, None):
+    ProcessTestCase.skip = skipMessage
+    ClosingPipes.skip = skipMessage
+
diff --git a/ThirdParty/Twisted/twisted/test/test_protocols.py b/ThirdParty/Twisted/twisted/test/test_protocols.py
new file mode 100644
index 0000000..e2915d9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_protocols.py
@@ -0,0 +1,236 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test cases for twisted.protocols package.
+"""
+
+from twisted.trial import unittest
+from twisted.protocols import wire, portforward
+from twisted.internet import reactor, defer, address, protocol
+from twisted.test import proto_helpers
+
+
+class WireTestCase(unittest.TestCase):
+    """
+    Test wire protocols.
+    """
+
+    def test_echo(self):
+        """
+        Test wire.Echo protocol: send some data and check it send it back.
+        """
+        t = proto_helpers.StringTransport()
+        a = wire.Echo()
+        a.makeConnection(t)
+        a.dataReceived("hello")
+        a.dataReceived("world")
+        a.dataReceived("how")
+        a.dataReceived("are")
+        a.dataReceived("you")
+        self.assertEqual(t.value(), "helloworldhowareyou")
+
+
+    def test_who(self):
+        """
+        Test wire.Who protocol.
+        """
+        t = proto_helpers.StringTransport()
+        a = wire.Who()
+        a.makeConnection(t)
+        self.assertEqual(t.value(), "root\r\n")
+
+
+    def test_QOTD(self):
+        """
+        Test wire.QOTD protocol.
+        """
+        t = proto_helpers.StringTransport()
+        a = wire.QOTD()
+        a.makeConnection(t)
+        self.assertEqual(t.value(),
+                          "An apple a day keeps the doctor away.\r\n")
+
+
+    def test_discard(self):
+        """
+        Test wire.Discard protocol.
+        """
+        t = proto_helpers.StringTransport()
+        a = wire.Discard()
+        a.makeConnection(t)
+        a.dataReceived("hello")
+        a.dataReceived("world")
+        a.dataReceived("how")
+        a.dataReceived("are")
+        a.dataReceived("you")
+        self.assertEqual(t.value(), "")
+
+
+
+class TestableProxyClientFactory(portforward.ProxyClientFactory):
+    """
+    Test proxy client factory that keeps the last created protocol instance.
+
+    @ivar protoInstance: the last instance of the protocol.
+    @type protoInstance: L{portforward.ProxyClient}
+    """
+
+    def buildProtocol(self, addr):
+        """
+        Create the protocol instance and keeps track of it.
+        """
+        proto = portforward.ProxyClientFactory.buildProtocol(self, addr)
+        self.protoInstance = proto
+        return proto
+
+
+
+class TestableProxyFactory(portforward.ProxyFactory):
+    """
+    Test proxy factory that keeps the last created protocol instance.
+
+    @ivar protoInstance: the last instance of the protocol.
+    @type protoInstance: L{portforward.ProxyServer}
+
+    @ivar clientFactoryInstance: client factory used by C{protoInstance} to
+        create forward connections.
+    @type clientFactoryInstance: L{TestableProxyClientFactory}
+    """
+
+    def buildProtocol(self, addr):
+        """
+        Create the protocol instance, keeps track of it, and makes it use
+        C{clientFactoryInstance} as client factory.
+        """
+        proto = portforward.ProxyFactory.buildProtocol(self, addr)
+        self.clientFactoryInstance = TestableProxyClientFactory()
+        # Force the use of this specific instance
+        proto.clientProtocolFactory = lambda: self.clientFactoryInstance
+        self.protoInstance = proto
+        return proto
+
+
+
+class Portforwarding(unittest.TestCase):
+    """
+    Test port forwarding.
+    """
+
+    def setUp(self):
+        self.serverProtocol = wire.Echo()
+        self.clientProtocol = protocol.Protocol()
+        self.openPorts = []
+
+
+    def tearDown(self):
+        try:
+            self.proxyServerFactory.protoInstance.transport.loseConnection()
+        except AttributeError:
+            pass
+        try:
+            pi = self.proxyServerFactory.clientFactoryInstance.protoInstance
+            pi.transport.loseConnection()
+        except AttributeError:
+            pass
+        try:
+            self.clientProtocol.transport.loseConnection()
+        except AttributeError:
+            pass
+        try:
+            self.serverProtocol.transport.loseConnection()
+        except AttributeError:
+            pass
+        return defer.gatherResults(
+            [defer.maybeDeferred(p.stopListening) for p in self.openPorts])
+
+
+    def test_portforward(self):
+        """
+        Test port forwarding through Echo protocol.
+        """
+        realServerFactory = protocol.ServerFactory()
+        realServerFactory.protocol = lambda: self.serverProtocol
+        realServerPort = reactor.listenTCP(0, realServerFactory,
+                                           interface='127.0.0.1')
+        self.openPorts.append(realServerPort)
+        self.proxyServerFactory = TestableProxyFactory('127.0.0.1',
+                                realServerPort.getHost().port)
+        proxyServerPort = reactor.listenTCP(0, self.proxyServerFactory,
+                                            interface='127.0.0.1')
+        self.openPorts.append(proxyServerPort)
+
+        nBytes = 1000
+        received = []
+        d = defer.Deferred()
+
+        def testDataReceived(data):
+            received.extend(data)
+            if len(received) >= nBytes:
+                self.assertEqual(''.join(received), 'x' * nBytes)
+                d.callback(None)
+
+        self.clientProtocol.dataReceived = testDataReceived
+
+        def testConnectionMade():
+            self.clientProtocol.transport.write('x' * nBytes)
+
+        self.clientProtocol.connectionMade = testConnectionMade
+
+        clientFactory = protocol.ClientFactory()
+        clientFactory.protocol = lambda: self.clientProtocol
+
+        reactor.connectTCP(
+            '127.0.0.1', proxyServerPort.getHost().port, clientFactory)
+
+        return d
+
+
+    def test_registerProducers(self):
+        """
+        The proxy client registers itself as a producer of the proxy server and
+        vice versa.
+        """
+        # create a ProxyServer instance
+        addr = address.IPv4Address('TCP', '127.0.0.1', 0)
+        server = portforward.ProxyFactory('127.0.0.1', 0).buildProtocol(addr)
+
+        # set the reactor for this test
+        reactor = proto_helpers.MemoryReactor()
+        server.reactor = reactor
+
+        # make the connection
+        serverTransport = proto_helpers.StringTransport()
+        server.makeConnection(serverTransport)
+
+        # check that the ProxyClientFactory is connecting to the backend
+        self.assertEqual(len(reactor.tcpClients), 1)
+        # get the factory instance and check it's the one we expect
+        host, port, clientFactory, timeout, _ = reactor.tcpClients[0]
+        self.assertIsInstance(clientFactory, portforward.ProxyClientFactory)
+
+        # Connect it
+        client = clientFactory.buildProtocol(addr)
+        clientTransport = proto_helpers.StringTransport()
+        client.makeConnection(clientTransport)
+
+        # check that the producers are registered
+        self.assertIdentical(clientTransport.producer, serverTransport)
+        self.assertIdentical(serverTransport.producer, clientTransport)
+        # check the streaming attribute in both transports
+        self.assertTrue(clientTransport.streaming)
+        self.assertTrue(serverTransport.streaming)
+
+
+
+class StringTransportTestCase(unittest.TestCase):
+    """
+    Test L{proto_helpers.StringTransport} helper behaviour.
+    """
+
+    def test_noUnicode(self):
+        """
+        Test that L{proto_helpers.StringTransport} doesn't accept unicode data.
+        """
+        s = proto_helpers.StringTransport()
+        self.assertRaises(TypeError, s.write, u'foo')
diff --git a/ThirdParty/Twisted/twisted/test/test_randbytes.py b/ThirdParty/Twisted/twisted/test/test_randbytes.py
new file mode 100644
index 0000000..ff3ae00
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_randbytes.py
@@ -0,0 +1,121 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test cases for L{twisted.python.randbytes}.
+"""
+
+from __future__ import division, absolute_import
+
+import os
+
+from twisted.trial import unittest
+from twisted.python import randbytes
+
+
+
+class SecureRandomTestCaseBase(object):
+    """
+    Base class for secureRandom test cases.
+    """
+
+    def _check(self, source):
+        """
+        The given random bytes source should return the number of bytes
+        requested each time it is called and should probably not return the
+        same bytes on two consecutive calls (although this is a perfectly
+        legitimate occurrence and rejecting it may generate a spurious failure
+        -- maybe we'll get lucky and the heat death with come first).
+        """
+        for nbytes in range(17, 25):
+            s = source(nbytes)
+            self.assertEqual(len(s), nbytes)
+            s2 = source(nbytes)
+            self.assertEqual(len(s2), nbytes)
+            # This is crude but hey
+            self.assertNotEquals(s2, s)
+
+
+
+class SecureRandomTestCase(SecureRandomTestCaseBase, unittest.TestCase):
+    """
+    Test secureRandom under normal conditions.
+    """
+
+    def test_normal(self):
+        """
+        L{randbytes.secureRandom} should return a string of the requested
+        length and make some effort to make its result otherwise unpredictable.
+        """
+        self._check(randbytes.secureRandom)
+
+
+
+class ConditionalSecureRandomTestCase(SecureRandomTestCaseBase,
+                                      unittest.SynchronousTestCase):
+    """
+    Test random sources one by one, then remove it to.
+    """
+
+    def setUp(self):
+        """
+        Create a L{randbytes.RandomFactory} to use in the tests.
+        """
+        self.factory = randbytes.RandomFactory()
+
+
+    def errorFactory(self, nbytes):
+        """
+        A factory raising an error when a source is not available.
+        """
+        raise randbytes.SourceNotAvailable()
+
+
+    def test_osUrandom(self):
+        """
+        L{RandomFactory._osUrandom} should work as a random source whenever
+        L{os.urandom} is available.
+        """
+        self._check(self.factory._osUrandom)
+
+
+    def test_withoutAnything(self):
+        """
+        Remove all secure sources and assert it raises a failure. Then try the
+        fallback parameter.
+        """
+        self.factory._osUrandom = self.errorFactory
+        self.assertRaises(randbytes.SecureRandomNotAvailable,
+                          self.factory.secureRandom, 18)
+        def wrapper():
+            return self.factory.secureRandom(18, fallback=True)
+        s = self.assertWarns(
+            RuntimeWarning,
+            "urandom unavailable - "
+            "proceeding with non-cryptographically secure random source",
+            __file__,
+            wrapper)
+        self.assertEqual(len(s), 18)
+
+
+
+class RandomTestCaseBase(SecureRandomTestCaseBase, unittest.SynchronousTestCase):
+    """
+    'Normal' random test cases.
+    """
+
+    def test_normal(self):
+        """
+        Test basic case.
+        """
+        self._check(randbytes.insecureRandom)
+
+
+    def test_withoutGetrandbits(self):
+        """
+        Test C{insecureRandom} without C{random.getrandbits}.
+        """
+        factory = randbytes.RandomFactory()
+        factory.getrandbits = None
+        self._check(factory.insecureRandom)
+
diff --git a/ThirdParty/Twisted/twisted/test/test_rebuild.py b/ThirdParty/Twisted/twisted/test/test_rebuild.py
new file mode 100644
index 0000000..dfeca9d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_rebuild.py
@@ -0,0 +1,252 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+import sys, os
+import types
+
+from twisted.trial import unittest
+from twisted.python import rebuild
+
+import crash_test_dummy
+f = crash_test_dummy.foo
+
+class Foo: pass
+class Bar(Foo): pass
+class Baz(object): pass
+class Buz(Bar, Baz): pass
+
+class HashRaisesRuntimeError:
+    """
+    Things that don't hash (raise an Exception) should be ignored by the
+    rebuilder.
+
+    @ivar hashCalled: C{bool} set to True when __hash__ is called.
+    """
+    def __init__(self):
+        self.hashCalled = False
+
+    def __hash__(self):
+        self.hashCalled = True
+        raise RuntimeError('not a TypeError!')
+
+
+
+unhashableObject = None # set in test_hashException
+
+
+
+class RebuildTestCase(unittest.TestCase):
+    """
+    Simple testcase for rebuilding, to at least exercise the code.
+    """
+    def setUp(self):
+        self.libPath = self.mktemp()
+        os.mkdir(self.libPath)
+        self.fakelibPath = os.path.join(self.libPath, 'twisted_rebuild_fakelib')
+        os.mkdir(self.fakelibPath)
+        file(os.path.join(self.fakelibPath, '__init__.py'), 'w').close()
+        sys.path.insert(0, self.libPath)
+
+    def tearDown(self):
+        sys.path.remove(self.libPath)
+
+    def testFileRebuild(self):
+        from twisted.python.util import sibpath
+        import shutil, time
+        shutil.copyfile(sibpath(__file__, "myrebuilder1.py"),
+                        os.path.join(self.fakelibPath, "myrebuilder.py"))
+        from twisted_rebuild_fakelib import myrebuilder
+        a = myrebuilder.A()
+        try:
+            object
+        except NameError:
+            pass
+        else:
+            from twisted.test import test_rebuild
+            b = myrebuilder.B()
+            class C(myrebuilder.B):
+                pass
+            test_rebuild.C = C
+            c = C()
+        i = myrebuilder.Inherit()
+        self.assertEqual(a.a(), 'a')
+        # necessary because the file has not "changed" if a second has not gone
+        # by in unix.  This sucks, but it's not often that you'll be doing more
+        # than one reload per second.
+        time.sleep(1.1)
+        shutil.copyfile(sibpath(__file__, "myrebuilder2.py"),
+                        os.path.join(self.fakelibPath, "myrebuilder.py"))
+        rebuild.rebuild(myrebuilder)
+        try:
+            object
+        except NameError:
+            pass
+        else:
+            b2 = myrebuilder.B()
+            self.assertEqual(b2.b(), 'c')
+            self.assertEqual(b.b(), 'c')
+        self.assertEqual(i.a(), 'd')
+        self.assertEqual(a.a(), 'b')
+        # more work to be done on new-style classes
+        # self.assertEqual(c.b(), 'c')
+
+    def testRebuild(self):
+        """
+        Rebuilding an unchanged module.
+        """
+        # This test would actually pass if rebuild was a no-op, but it
+        # ensures rebuild doesn't break stuff while being a less
+        # complex test than testFileRebuild.
+
+        x = crash_test_dummy.X('a')
+
+        rebuild.rebuild(crash_test_dummy, doLog=False)
+        # Instance rebuilding is triggered by attribute access.
+        x.do()
+        self.failUnlessIdentical(x.__class__, crash_test_dummy.X)
+
+        self.failUnlessIdentical(f, crash_test_dummy.foo)
+
+    def testComponentInteraction(self):
+        x = crash_test_dummy.XComponent()
+        x.setAdapter(crash_test_dummy.IX, crash_test_dummy.XA)
+        oldComponent = x.getComponent(crash_test_dummy.IX)
+        rebuild.rebuild(crash_test_dummy, 0)
+        newComponent = x.getComponent(crash_test_dummy.IX)
+
+        newComponent.method()
+
+        self.assertEqual(newComponent.__class__, crash_test_dummy.XA)
+
+        # Test that a duplicate registerAdapter is not allowed
+        from twisted.python import components
+        self.failUnlessRaises(ValueError, components.registerAdapter,
+                              crash_test_dummy.XA, crash_test_dummy.X,
+                              crash_test_dummy.IX)
+
+    def testUpdateInstance(self):
+        global Foo, Buz
+
+        b = Buz()
+
+        class Foo:
+            def foo(self):
+                pass
+        class Buz(Bar, Baz):
+            x = 10
+
+        rebuild.updateInstance(b)
+        assert hasattr(b, 'foo'), "Missing method on rebuilt instance"
+        assert hasattr(b, 'x'), "Missing class attribute on rebuilt instance"
+
+    def testBananaInteraction(self):
+        from twisted.python import rebuild
+        from twisted.spread import banana
+        rebuild.latestClass(banana.Banana)
+
+
+    def test_hashException(self):
+        """
+        Rebuilding something that has a __hash__ that raises a non-TypeError
+        shouldn't cause rebuild to die.
+        """
+        global unhashableObject
+        unhashableObject = HashRaisesRuntimeError()
+        def _cleanup():
+            global unhashableObject
+            unhashableObject = None
+        self.addCleanup(_cleanup)
+        rebuild.rebuild(rebuild)
+        self.assertEqual(unhashableObject.hashCalled, True)
+
+
+
+class NewStyleTestCase(unittest.TestCase):
+    """
+    Tests for rebuilding new-style classes of various sorts.
+    """
+    def setUp(self):
+        self.m = types.ModuleType('whipping')
+        sys.modules['whipping'] = self.m
+
+
+    def tearDown(self):
+        del sys.modules['whipping']
+        del self.m
+
+
+    def test_slots(self):
+        """
+        Try to rebuild a new style class with slots defined.
+        """
+        classDefinition = (
+            "class SlottedClass(object):\n"
+            "    __slots__ = ['a']\n")
+
+        exec classDefinition in self.m.__dict__
+        inst = self.m.SlottedClass()
+        inst.a = 7
+        exec classDefinition in self.m.__dict__
+        rebuild.updateInstance(inst)
+        self.assertEqual(inst.a, 7)
+        self.assertIdentical(type(inst), self.m.SlottedClass)
+
+    if sys.version_info < (2, 6):
+        test_slots.skip = "__class__ assignment for class with slots is only available starting Python 2.6"
+
+
+    def test_errorSlots(self):
+        """
+        Try to rebuild a new style class with slots defined: this should fail.
+        """
+        classDefinition = (
+            "class SlottedClass(object):\n"
+            "    __slots__ = ['a']\n")
+
+        exec classDefinition in self.m.__dict__
+        inst = self.m.SlottedClass()
+        inst.a = 7
+        exec classDefinition in self.m.__dict__
+        self.assertRaises(rebuild.RebuildError, rebuild.updateInstance, inst)
+
+    if sys.version_info >= (2, 6):
+        test_errorSlots.skip = "__class__ assignment for class with slots should work starting Python 2.6"
+
+
+    def test_typeSubclass(self):
+        """
+        Try to rebuild a base type subclass.
+        """
+        classDefinition = (
+            "class ListSubclass(list):\n"
+            "    pass\n")
+
+        exec classDefinition in self.m.__dict__
+        inst = self.m.ListSubclass()
+        inst.append(2)
+        exec classDefinition in self.m.__dict__
+        rebuild.updateInstance(inst)
+        self.assertEqual(inst[0], 2)
+        self.assertIdentical(type(inst), self.m.ListSubclass)
+
+
+    def test_instanceSlots(self):
+        """
+        Test that when rebuilding an instance with a __slots__ attribute, it
+        fails accurately instead of giving a L{rebuild.RebuildError}.
+        """
+        classDefinition = (
+            "class NotSlottedClass(object):\n"
+            "    pass\n")
+
+        exec classDefinition in self.m.__dict__
+        inst = self.m.NotSlottedClass()
+        inst.__slots__ = ['a']
+        classDefinition = (
+            "class NotSlottedClass:\n"
+            "    pass\n")
+        exec classDefinition in self.m.__dict__
+        # Moving from new-style class to old-style should fail.
+        self.assertRaises(TypeError, rebuild.updateInstance, inst)
+
diff --git a/ThirdParty/Twisted/twisted/test/test_reflect.py b/ThirdParty/Twisted/twisted/test/test_reflect.py
new file mode 100644
index 0000000..0964bc6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_reflect.py
@@ -0,0 +1,419 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test cases for twisted.reflect module.
+"""
+
+import weakref, os
+try:
+    from ihooks import ModuleImporter
+except ImportError:
+    ModuleImporter = None
+
+try:
+    from collections import deque
+except ImportError:
+    deque = None
+
+from twisted.trial import unittest
+from twisted.python import reflect, util
+from twisted.python.versions import Version
+from twisted.python.test.test_reflectpy3 import LookupsTestCase
+
+
+class SettableTest(unittest.TestCase):
+    def setUp(self):
+        self.setter = reflect.Settable()
+
+    def tearDown(self):
+        del self.setter
+
+    def testSet(self):
+        self.setter(a=1, b=2)
+        self.assertEqual(self.setter.a, 1)
+        self.assertEqual(self.setter.b, 2)
+
+
+
+class AccessorTester(reflect.Accessor):
+
+    def set_x(self, x):
+        self.y = x
+        self.reallySet('x', x)
+
+
+    def get_z(self):
+        self.q = 1
+        return 1
+
+
+    def del_z(self):
+        self.reallyDel("q")
+
+
+
+class PropertyAccessorTester(reflect.PropertyAccessor):
+    """
+    Test class to check L{reflect.PropertyAccessor} functionalities.
+    """
+    r = 0
+
+    def set_r(self, r):
+        self.s = r
+
+
+    def set_x(self, x):
+        self.y = x
+        self.reallySet('x', x)
+
+
+    def get_z(self):
+        self.q = 1
+        return 1
+
+
+    def del_z(self):
+        self.reallyDel("q")
+
+
+
+class AccessorTest(unittest.TestCase):
+    def setUp(self):
+        self.tester = AccessorTester()
+
+    def testSet(self):
+        self.tester.x = 1
+        self.assertEqual(self.tester.x, 1)
+        self.assertEqual(self.tester.y, 1)
+
+    def testGet(self):
+        self.assertEqual(self.tester.z, 1)
+        self.assertEqual(self.tester.q, 1)
+
+    def testDel(self):
+        self.tester.z
+        self.assertEqual(self.tester.q, 1)
+        del self.tester.z
+        self.assertEqual(hasattr(self.tester, "q"), 0)
+        self.tester.x = 1
+        del self.tester.x
+        self.assertEqual(hasattr(self.tester, "x"), 0)
+
+
+
+class PropertyAccessorTest(AccessorTest):
+    """
+    Tests for L{reflect.PropertyAccessor}, using L{PropertyAccessorTester}.
+    """
+
+    def setUp(self):
+        self.tester = PropertyAccessorTester()
+
+
+    def test_setWithDefaultValue(self):
+        """
+        If an attribute is present in the class, it can be retrieved by
+        default.
+        """
+        self.assertEqual(self.tester.r, 0)
+        self.tester.r = 1
+        self.assertEqual(self.tester.r, 0)
+        self.assertEqual(self.tester.s, 1)
+
+
+    def test_getValueInDict(self):
+        """
+        The attribute value can be overriden by directly modifying the value in
+        C{__dict__}.
+        """
+        self.tester.__dict__["r"] = 10
+        self.assertEqual(self.tester.r, 10)
+
+
+    def test_notYetInDict(self):
+        """
+        If a getter is defined on an attribute but without any default value,
+        it raises C{AttributeError} when trying to access it.
+        """
+        self.assertRaises(AttributeError, getattr, self.tester, "x")
+
+
+
+class ImportHooksLookupTests(unittest.TestCase, LookupsTestCase):
+    """
+    Tests for lookup methods in the presence of L{ihooks}-style import hooks.
+    Runs all of the tests from L{LookupsTestCase} after installing a custom
+    import hook.
+    """
+    skip = ("ihooks support is broken, and has probably been broken since "
+            "Python 2.6. On the other hand, no one should use ihooks.")
+
+
+    def setUp(self):
+        """
+        Perturb the normal import behavior subtly by installing an import
+        hook.  No custom behavior is provided, but this adds some extra
+        frames to the call stack, which L{namedAny} must be able to account
+        for.
+        """
+        self.importer = ModuleImporter()
+        self.importer.install()
+
+
+    def tearDown(self):
+        """
+        Uninstall the custom import hook.
+        """
+        self.importer.uninstall()
+
+# Prevent trial from re-running these unnecessarily:
+del LookupsTestCase
+
+
+
+class ObjectGrep(unittest.TestCase):
+    def test_dictionary(self):
+        """
+        Test references search through a dictionnary, as a key or as a value.
+        """
+        o = object()
+        d1 = {None: o}
+        d2 = {o: None}
+
+        self.assertIn("[None]", reflect.objgrep(d1, o, reflect.isSame))
+        self.assertIn("{None}", reflect.objgrep(d2, o, reflect.isSame))
+
+    def test_list(self):
+        """
+        Test references search through a list.
+        """
+        o = object()
+        L = [None, o]
+
+        self.assertIn("[1]", reflect.objgrep(L, o, reflect.isSame))
+
+    def test_tuple(self):
+        """
+        Test references search through a tuple.
+        """
+        o = object()
+        T = (o, None)
+
+        self.assertIn("[0]", reflect.objgrep(T, o, reflect.isSame))
+
+    def test_instance(self):
+        """
+        Test references search through an object attribute.
+        """
+        class Dummy:
+            pass
+        o = object()
+        d = Dummy()
+        d.o = o
+
+        self.assertIn(".o", reflect.objgrep(d, o, reflect.isSame))
+
+    def test_weakref(self):
+        """
+        Test references search through a weakref object.
+        """
+        class Dummy:
+            pass
+        o = Dummy()
+        w1 = weakref.ref(o)
+
+        self.assertIn("()", reflect.objgrep(w1, o, reflect.isSame))
+
+    def test_boundMethod(self):
+        """
+        Test references search through method special attributes.
+        """
+        class Dummy:
+            def dummy(self):
+                pass
+        o = Dummy()
+        m = o.dummy
+
+        self.assertIn(".im_self", reflect.objgrep(m, m.im_self, reflect.isSame))
+        self.assertIn(".im_class", reflect.objgrep(m, m.im_class, reflect.isSame))
+        self.assertIn(".im_func", reflect.objgrep(m, m.im_func, reflect.isSame))
+
+    def test_everything(self):
+        """
+        Test references search using complex set of objects.
+        """
+        class Dummy:
+            def method(self):
+                pass
+
+        o = Dummy()
+        D1 = {(): "baz", None: "Quux", o: "Foosh"}
+        L = [None, (), D1, 3]
+        T = (L, {}, Dummy())
+        D2 = {0: "foo", 1: "bar", 2: T}
+        i = Dummy()
+        i.attr = D2
+        m = i.method
+        w = weakref.ref(m)
+
+        self.assertIn("().im_self.attr[2][0][2]{'Foosh'}", reflect.objgrep(w, o, reflect.isSame))
+
+    def test_depthLimit(self):
+        """
+        Test the depth of references search.
+        """
+        a = []
+        b = [a]
+        c = [a, b]
+        d = [a, c]
+
+        self.assertEqual(['[0]'], reflect.objgrep(d, a, reflect.isSame, maxDepth=1))
+        self.assertEqual(['[0]', '[1][0]'], reflect.objgrep(d, a, reflect.isSame, maxDepth=2))
+        self.assertEqual(['[0]', '[1][0]', '[1][1][0]'], reflect.objgrep(d, a, reflect.isSame, maxDepth=3))
+
+    def test_deque(self):
+        """
+        Test references search through a deque object. Only for Python > 2.3.
+        """
+        o = object()
+        D = deque()
+        D.append(None)
+        D.append(o)
+
+        self.assertIn("[1]", reflect.objgrep(D, o, reflect.isSame))
+
+    if deque is None:
+        test_deque.skip = "Deque not available"
+
+
+class GetClass(unittest.TestCase):
+    def testOld(self):
+        class OldClass:
+            pass
+        old = OldClass()
+        self.assertIn(reflect.getClass(OldClass).__name__, ('class', 'classobj'))
+        self.assertEqual(reflect.getClass(old).__name__, 'OldClass')
+
+    def testNew(self):
+        class NewClass(object):
+            pass
+        new = NewClass()
+        self.assertEqual(reflect.getClass(NewClass).__name__, 'type')
+        self.assertEqual(reflect.getClass(new).__name__, 'NewClass')
+
+
+
+class DeprecationTestCase(unittest.TestCase):
+    """
+    Test deprecations in twisted.python.reflect
+    """
+
+    def test_allYourBase(self):
+        """
+        Test deprecation of L{reflect.allYourBase}. See #5481 for removal.
+        """
+        self.callDeprecated(
+            (Version("Twisted", 11, 0, 0), "inspect.getmro"),
+            reflect.allYourBase, DeprecationTestCase)
+
+
+    def test_accumulateBases(self):
+        """
+        Test deprecation of L{reflect.accumulateBases}. See #5481 for removal.
+        """
+        l = []
+        self.callDeprecated(
+            (Version("Twisted", 11, 0, 0), "inspect.getmro"),
+            reflect.accumulateBases, DeprecationTestCase, l, None)
+
+
+    def lookForDeprecationWarning(self, testMethod, attributeName, warningMsg):
+        """
+        Test deprecation of attribute 'reflect.attributeName' by calling
+        'reflect.testMethod' and verifying the warning message
+        'reflect.warningMsg'
+
+        @param testMethod: Name of the offending function to be used with
+            flushWarnings
+        @type testmethod: C{str}
+
+        @param attributeName: Name of attribute to be checked for deprecation
+        @type attributeName: C{str}
+
+        @param warningMsg: Deprecation warning message
+        @type warningMsg: C{str}
+        """
+        warningsShown = self.flushWarnings([testMethod])
+        self.assertEqual(len(warningsShown), 1)
+        self.assertIdentical(warningsShown[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warningsShown[0]['message'],
+            "twisted.python.reflect." + attributeName + " "
+            "was deprecated in Twisted 12.1.0: " + warningMsg + ".")
+
+
+    def test_settable(self):
+        """
+        Test deprecation of L{reflect.Settable}.
+        """
+        reflect.Settable()
+        self.lookForDeprecationWarning(
+            self.test_settable, "Settable",
+            "Settable is old and untested. Please write your own version of this "
+            "functionality if you need it")
+
+
+    def test_accessorType(self):
+        """
+        Test deprecation of L{reflect.AccessorType}.
+        """
+        reflect.AccessorType(' ', ( ), { })
+        self.lookForDeprecationWarning(
+            self.test_accessorType, "AccessorType",
+            "AccessorType is old and untested. Please write your own version of "
+            "this functionality if you need it")
+
+
+    def test_propertyAccessor(self):
+        """
+        Test deprecation of L{reflect.PropertyAccessor}.
+        """
+        reflect.PropertyAccessor()
+        self.lookForDeprecationWarning(
+            self.test_propertyAccessor, "PropertyAccessor",
+            "PropertyAccessor is old and untested. Please write your own "
+            "version of this functionality if you need it")
+
+
+    def test_accessor(self):
+        """
+        Test deprecation of L{reflect.Accessor}.
+        """
+        reflect.Accessor()
+        self.lookForDeprecationWarning(
+            self.test_accessor, "Accessor",
+            "Accessor is an implementation for Python 2.1 which is no longer "
+            "supported by Twisted")
+
+
+    def test_originalAccessor(self):
+        """
+        Test deprecation of L{reflect.OriginalAccessor}.
+        """
+        reflect.OriginalAccessor()
+        self.lookForDeprecationWarning(
+            self.test_originalAccessor, "OriginalAccessor",
+            "OriginalAccessor is a reference to class "
+            "twisted.python.reflect.Accessor which is deprecated")
+
+
+    def test_summer(self):
+        """
+        Test deprecation of L{reflect.Summer}.
+        """
+        reflect.Summer()
+        self.lookForDeprecationWarning(
+            self.test_summer, "Summer",
+            "Summer is a child class of twisted.python.reflect.Accessor which "
+            "is deprecated")
diff --git a/ThirdParty/Twisted/twisted/test/test_roots.py b/ThirdParty/Twisted/twisted/test/test_roots.py
new file mode 100644
index 0000000..c9fd39e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_roots.py
@@ -0,0 +1,63 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.trial import unittest
+from twisted.python import roots
+import types
+
+class RootsTest(unittest.TestCase):
+
+    def testExceptions(self):
+        request = roots.Request()
+        try:
+            request.write("blah")
+        except NotImplementedError:
+            pass
+        else:
+            self.fail()
+        try:
+            request.finish()
+        except NotImplementedError:
+            pass
+        else:
+            self.fail()
+
+    def testCollection(self):
+        collection = roots.Collection()
+        collection.putEntity("x", 'test')
+        self.assertEqual(collection.getStaticEntity("x"),
+                             'test')
+        collection.delEntity("x")
+        self.assertEqual(collection.getStaticEntity('x'),
+                             None)
+        try:
+            collection.storeEntity("x", None)
+        except NotImplementedError:
+            pass
+        else:
+            self.fail()
+        try:
+            collection.removeEntity("x", None)
+        except NotImplementedError:
+            pass
+        else:
+            self.fail()
+
+    def testConstrained(self):
+        class const(roots.Constrained):
+            def nameConstraint(self, name):
+                return (name == 'x')
+        c = const()
+        self.assertEqual(c.putEntity('x', 'test'), None)
+        self.failUnlessRaises(roots.ConstraintViolation,
+                              c.putEntity, 'y', 'test')
+
+
+    def testHomogenous(self):
+        h = roots.Homogenous()
+        h.entityType = types.IntType
+        h.putEntity('a', 1)
+        self.assertEqual(h.getStaticEntity('a'),1 )
+        self.failUnlessRaises(roots.ConstraintViolation,
+                              h.putEntity, 'x', 'y')
+
diff --git a/ThirdParty/Twisted/twisted/test/test_setup.py b/ThirdParty/Twisted/twisted/test/test_setup.py
new file mode 100644
index 0000000..da21fef
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_setup.py
@@ -0,0 +1,61 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for C{setup.py}, Twisted's distutils integration file.
+"""
+
+from __future__ import division, absolute_import
+
+import os, sys
+
+import twisted
+from twisted.trial.unittest import SynchronousTestCase
+from twisted.python.filepath import FilePath
+from twisted.python.dist import getExtensions
+
+# Get rid of the UTF-8 encoding and bytes topfiles segment when FilePath
+# supports unicode.  #2366, #4736, #5203.  Also #4743, which requires checking
+# setup.py, not just the topfiles directory.
+if not FilePath(twisted.__file__.encode('utf-8')).sibling(b'topfiles').child(b'setup.py').exists():
+    sourceSkip = "Only applies to source checkout of Twisted"
+else:
+    sourceSkip = None
+
+
+class TwistedExtensionsTests(SynchronousTestCase):
+    if sourceSkip is not None:
+        skip = sourceSkip
+
+    def setUp(self):
+        """
+        Change the working directory to the parent of the C{twisted} package so
+        that L{twisted.python.dist.getExtensions} finds Twisted's own extension
+        definitions.
+        """
+        self.addCleanup(os.chdir, os.getcwd())
+        os.chdir(FilePath(twisted.__file__).parent().parent().path)
+
+
+    def test_initgroups(self):
+        """
+        If C{os.initgroups} is present (Python 2.7 and Python 3.3 and newer),
+        L{twisted.python._initgroups} is not returned as an extension to build
+        from L{getExtensions}.
+        """
+        extensions = getExtensions()
+        found = None
+        for extension in extensions:
+            if extension.name == "twisted.python._initgroups":
+                found = extension
+
+        if sys.version_info[:2] >= (2, 7):
+            self.assertIdentical(
+                None, found,
+                "Should not have found twisted.python._initgroups extension "
+                "definition.")
+        else:
+            self.assertNotIdentical(
+                None, found,
+                "Should have found twisted.python._initgroups extension "
+                "definition.")
diff --git a/ThirdParty/Twisted/twisted/test/test_shortcut.py b/ThirdParty/Twisted/twisted/test/test_shortcut.py
new file mode 100644
index 0000000..fdcb775
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_shortcut.py
@@ -0,0 +1,26 @@
+"""Test win32 shortcut script
+"""
+
+from twisted.trial import unittest
+
+import os
+if os.name == 'nt':
+
+    skipWindowsNopywin32 = None
+    try:
+        from twisted.python import shortcut
+    except ImportError:
+        skipWindowsNopywin32 = ("On windows, twisted.python.shortcut is not "
+                                "available in the absence of win32com.")
+    import os.path
+    import sys
+
+    class ShortcutTest(unittest.TestCase):
+        def testCreate(self):
+            s1=shortcut.Shortcut("test_shortcut.py")
+            tempname=self.mktemp() + '.lnk'
+            s1.save(tempname)
+            self.assert_(os.path.exists(tempname))
+            sc=shortcut.open(tempname)
+            self.assert_(sc.GetPath(0)[0].endswith('test_shortcut.py'))
+    ShortcutTest.skip = skipWindowsNopywin32
diff --git a/ThirdParty/Twisted/twisted/test/test_sip.py b/ThirdParty/Twisted/twisted/test/test_sip.py
new file mode 100644
index 0000000..73f09d4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_sip.py
@@ -0,0 +1,984 @@
+# -*- test-case-name: twisted.test.test_sip -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""Session Initialization Protocol tests."""
+
+from twisted.trial import unittest, util
+from twisted.protocols import sip
+from twisted.internet import defer, reactor, utils
+from twisted.python.versions import Version
+
+from twisted.test import proto_helpers
+
+from twisted import cred
+import twisted.cred.portal
+import twisted.cred.checkers
+
+from zope.interface import implements
+
+
+# request, prefixed by random CRLFs
+request1 = "\n\r\n\n\r" + """\
+INVITE sip:foo SIP/2.0
+From: mo
+To: joe
+Content-Length: 4
+
+abcd""".replace("\n", "\r\n")
+
+# request, no content-length
+request2 = """INVITE sip:foo SIP/2.0
+From: mo
+To: joe
+
+1234""".replace("\n", "\r\n")
+
+# request, with garbage after
+request3 = """INVITE sip:foo SIP/2.0
+From: mo
+To: joe
+Content-Length: 4
+
+1234
+
+lalalal""".replace("\n", "\r\n")
+
+# three requests
+request4 = """INVITE sip:foo SIP/2.0
+From: mo
+To: joe
+Content-Length: 0
+
+INVITE sip:loop SIP/2.0
+From: foo
+To: bar
+Content-Length: 4
+
+abcdINVITE sip:loop SIP/2.0
+From: foo
+To: bar
+Content-Length: 4
+
+1234""".replace("\n", "\r\n")
+
+# response, no content
+response1 = """SIP/2.0 200 OK
+From:  foo
+To:bar
+Content-Length: 0
+
+""".replace("\n", "\r\n")
+
+# short header version
+request_short = """\
+INVITE sip:foo SIP/2.0
+f: mo
+t: joe
+l: 4
+
+abcd""".replace("\n", "\r\n")
+
+request_natted = """\
+INVITE sip:foo SIP/2.0
+Via: SIP/2.0/UDP 10.0.0.1:5060;rport
+
+""".replace("\n", "\r\n")
+
+# multiline headers (example from RFC 3621).
+response_multiline = """\
+SIP/2.0 200 OK
+Via: SIP/2.0/UDP server10.biloxi.com
+    ;branch=z9hG4bKnashds8;received=192.0.2.3
+Via: SIP/2.0/UDP bigbox3.site3.atlanta.com
+    ;branch=z9hG4bK77ef4c2312983.1;received=192.0.2.2
+Via: SIP/2.0/UDP pc33.atlanta.com
+    ;branch=z9hG4bK776asdhds ;received=192.0.2.1
+To: Bob <sip:bob at biloxi.com>;tag=a6c85cf
+From: Alice <sip:alice at atlanta.com>;tag=1928301774
+Call-ID: a84b4c76e66710 at pc33.atlanta.com
+CSeq: 314159 INVITE
+Contact: <sip:bob at 192.0.2.4>
+Content-Type: application/sdp
+Content-Length: 0
+\n""".replace("\n", "\r\n")
+
+class TestRealm:
+    def requestAvatar(self, avatarId, mind, *interfaces):
+        return sip.IContact, None, lambda: None
+
+class MessageParsingTestCase(unittest.TestCase):
+    def setUp(self):
+        self.l = []
+        self.parser = sip.MessagesParser(self.l.append)
+
+    def feedMessage(self, message):
+        self.parser.dataReceived(message)
+        self.parser.dataDone()
+
+    def validateMessage(self, m, method, uri, headers, body):
+        """Validate Requests."""
+        self.assertEqual(m.method, method)
+        self.assertEqual(m.uri.toString(), uri)
+        self.assertEqual(m.headers, headers)
+        self.assertEqual(m.body, body)
+        self.assertEqual(m.finished, 1)
+
+    def testSimple(self):
+        l = self.l
+        self.feedMessage(request1)
+        self.assertEqual(len(l), 1)
+        self.validateMessage(
+            l[0], "INVITE", "sip:foo",
+            {"from": ["mo"], "to": ["joe"], "content-length": ["4"]},
+            "abcd")
+
+    def testTwoMessages(self):
+        l = self.l
+        self.feedMessage(request1)
+        self.feedMessage(request2)
+        self.assertEqual(len(l), 2)
+        self.validateMessage(
+            l[0], "INVITE", "sip:foo",
+            {"from": ["mo"], "to": ["joe"], "content-length": ["4"]},
+            "abcd")
+        self.validateMessage(l[1], "INVITE", "sip:foo",
+                             {"from": ["mo"], "to": ["joe"]},
+                             "1234")
+
+    def testGarbage(self):
+        l = self.l
+        self.feedMessage(request3)
+        self.assertEqual(len(l), 1)
+        self.validateMessage(
+            l[0], "INVITE", "sip:foo",
+            {"from": ["mo"], "to": ["joe"], "content-length": ["4"]},
+            "1234")
+
+    def testThreeInOne(self):
+        l = self.l
+        self.feedMessage(request4)
+        self.assertEqual(len(l), 3)
+        self.validateMessage(
+            l[0], "INVITE", "sip:foo",
+            {"from": ["mo"], "to": ["joe"], "content-length": ["0"]},
+            "")
+        self.validateMessage(
+            l[1], "INVITE", "sip:loop",
+            {"from": ["foo"], "to": ["bar"], "content-length": ["4"]},
+            "abcd")
+        self.validateMessage(
+            l[2], "INVITE", "sip:loop",
+            {"from": ["foo"], "to": ["bar"], "content-length": ["4"]},
+            "1234")
+
+    def testShort(self):
+        l = self.l
+        self.feedMessage(request_short)
+        self.assertEqual(len(l), 1)
+        self.validateMessage(
+            l[0], "INVITE", "sip:foo",
+            {"from": ["mo"], "to": ["joe"], "content-length": ["4"]},
+            "abcd")
+
+    def testSimpleResponse(self):
+        l = self.l
+        self.feedMessage(response1)
+        self.assertEqual(len(l), 1)
+        m = l[0]
+        self.assertEqual(m.code, 200)
+        self.assertEqual(m.phrase, "OK")
+        self.assertEqual(
+            m.headers,
+            {"from": ["foo"], "to": ["bar"], "content-length": ["0"]})
+        self.assertEqual(m.body, "")
+        self.assertEqual(m.finished, 1)
+
+
+    def test_multiLine(self):
+        """
+        A header may be split across multiple lines.  Subsequent lines begin
+        with C{" "} or C{"\\t"}.
+        """
+        l = self.l
+        self.feedMessage(response_multiline)
+        self.assertEquals(len(l), 1)
+        m = l[0]
+        self.assertEquals(
+            m.headers['via'][0],
+            "SIP/2.0/UDP server10.biloxi.com;"
+            "branch=z9hG4bKnashds8;received=192.0.2.3")
+        self.assertEquals(
+            m.headers['via'][1],
+            "SIP/2.0/UDP bigbox3.site3.atlanta.com;"
+            "branch=z9hG4bK77ef4c2312983.1;received=192.0.2.2")
+        self.assertEquals(
+            m.headers['via'][2],
+            "SIP/2.0/UDP pc33.atlanta.com;"
+            "branch=z9hG4bK776asdhds ;received=192.0.2.1")
+
+
+
+class MessageParsingTestCase2(MessageParsingTestCase):
+    """Same as base class, but feed data char by char."""
+
+    def feedMessage(self, message):
+        for c in message:
+            self.parser.dataReceived(c)
+        self.parser.dataDone()
+
+
+class MakeMessageTestCase(unittest.TestCase):
+
+    def testRequest(self):
+        r = sip.Request("INVITE", "sip:foo")
+        r.addHeader("foo", "bar")
+        self.assertEqual(
+            r.toString(),
+            "INVITE sip:foo SIP/2.0\r\nFoo: bar\r\n\r\n")
+
+    def testResponse(self):
+        r = sip.Response(200, "OK")
+        r.addHeader("foo", "bar")
+        r.addHeader("Content-Length", "4")
+        r.bodyDataReceived("1234")
+        self.assertEqual(
+            r.toString(),
+            "SIP/2.0 200 OK\r\nFoo: bar\r\nContent-Length: 4\r\n\r\n1234")
+
+    def testStatusCode(self):
+        r = sip.Response(200)
+        self.assertEqual(r.toString(), "SIP/2.0 200 OK\r\n\r\n")
+
+
+class ViaTestCase(unittest.TestCase):
+
+    def checkRoundtrip(self, v):
+        s = v.toString()
+        self.assertEqual(s, sip.parseViaHeader(s).toString())
+
+    def testExtraWhitespace(self):
+        v1 = sip.parseViaHeader('SIP/2.0/UDP 192.168.1.1:5060')
+        v2 = sip.parseViaHeader('SIP/2.0/UDP     192.168.1.1:5060')
+        self.assertEqual(v1.transport, v2.transport)
+        self.assertEqual(v1.host, v2.host)
+        self.assertEqual(v1.port, v2.port)
+
+    def test_complex(self):
+        """
+        Test parsing a Via header with one of everything.
+        """
+        s = ("SIP/2.0/UDP first.example.com:4000;ttl=16;maddr=224.2.0.1"
+             " ;branch=a7c6a8dlze (Example)")
+        v = sip.parseViaHeader(s)
+        self.assertEqual(v.transport, "UDP")
+        self.assertEqual(v.host, "first.example.com")
+        self.assertEqual(v.port, 4000)
+        self.assertEqual(v.rport, None)
+        self.assertEqual(v.rportValue, None)
+        self.assertEqual(v.rportRequested, False)
+        self.assertEqual(v.ttl, 16)
+        self.assertEqual(v.maddr, "224.2.0.1")
+        self.assertEqual(v.branch, "a7c6a8dlze")
+        self.assertEqual(v.hidden, 0)
+        self.assertEqual(v.toString(),
+                          "SIP/2.0/UDP first.example.com:4000"
+                          ";ttl=16;branch=a7c6a8dlze;maddr=224.2.0.1")
+        self.checkRoundtrip(v)
+
+    def test_simple(self):
+        """
+        Test parsing a simple Via header.
+        """
+        s = "SIP/2.0/UDP example.com;hidden"
+        v = sip.parseViaHeader(s)
+        self.assertEqual(v.transport, "UDP")
+        self.assertEqual(v.host, "example.com")
+        self.assertEqual(v.port, 5060)
+        self.assertEqual(v.rport, None)
+        self.assertEqual(v.rportValue, None)
+        self.assertEqual(v.rportRequested, False)
+        self.assertEqual(v.ttl, None)
+        self.assertEqual(v.maddr, None)
+        self.assertEqual(v.branch, None)
+        self.assertEqual(v.hidden, True)
+        self.assertEqual(v.toString(),
+                          "SIP/2.0/UDP example.com:5060;hidden")
+        self.checkRoundtrip(v)
+
+    def testSimpler(self):
+        v = sip.Via("example.com")
+        self.checkRoundtrip(v)
+
+
+    def test_deprecatedRPort(self):
+        """
+        Setting rport to True is deprecated, but still produces a Via header
+        with the expected properties.
+        """
+        v = sip.Via("foo.bar", rport=True)
+
+        warnings = self.flushWarnings(
+            offendingFunctions=[self.test_deprecatedRPort])
+        self.assertEqual(len(warnings), 1)
+        self.assertEqual(
+            warnings[0]['message'],
+            'rport=True is deprecated since Twisted 9.0.')
+        self.assertEqual(
+            warnings[0]['category'],
+            DeprecationWarning)
+
+        self.assertEqual(v.toString(), "SIP/2.0/UDP foo.bar:5060;rport")
+        self.assertEqual(v.rport, True)
+        self.assertEqual(v.rportRequested, True)
+        self.assertEqual(v.rportValue, None)
+
+
+    def test_rport(self):
+        """
+        An rport setting of None should insert the parameter with no value.
+        """
+        v = sip.Via("foo.bar", rport=None)
+        self.assertEqual(v.toString(), "SIP/2.0/UDP foo.bar:5060;rport")
+        self.assertEqual(v.rportRequested, True)
+        self.assertEqual(v.rportValue, None)
+
+
+    def test_rportValue(self):
+        """
+        An rport numeric setting should insert the parameter with the number
+        value given.
+        """
+        v = sip.Via("foo.bar", rport=1)
+        self.assertEqual(v.toString(), "SIP/2.0/UDP foo.bar:5060;rport=1")
+        self.assertEqual(v.rportRequested, False)
+        self.assertEqual(v.rportValue, 1)
+        self.assertEqual(v.rport, 1)
+
+
+    def testNAT(self):
+        s = "SIP/2.0/UDP 10.0.0.1:5060;received=22.13.1.5;rport=12345"
+        v = sip.parseViaHeader(s)
+        self.assertEqual(v.transport, "UDP")
+        self.assertEqual(v.host, "10.0.0.1")
+        self.assertEqual(v.port, 5060)
+        self.assertEqual(v.received, "22.13.1.5")
+        self.assertEqual(v.rport, 12345)
+
+        self.assertNotEquals(v.toString().find("rport=12345"), -1)
+
+
+    def test_unknownParams(self):
+       """
+       Parsing and serializing Via headers with unknown parameters should work.
+       """
+       s = "SIP/2.0/UDP example.com:5060;branch=a12345b;bogus;pie=delicious"
+       v = sip.parseViaHeader(s)
+       self.assertEqual(v.toString(), s)
+
+
+
+class URLTestCase(unittest.TestCase):
+
+    def testRoundtrip(self):
+        for url in [
+            "sip:j.doe at big.com",
+            "sip:j.doe:secret at big.com;transport=tcp",
+            "sip:j.doe at big.com?subject=project",
+            "sip:example.com",
+            ]:
+            self.assertEqual(sip.parseURL(url).toString(), url)
+
+    def testComplex(self):
+        s = ("sip:user:pass at hosta:123;transport=udp;user=phone;method=foo;"
+             "ttl=12;maddr=1.2.3.4;blah;goo=bar?a=b&c=d")
+        url = sip.parseURL(s)
+        for k, v in [("username", "user"), ("password", "pass"),
+                     ("host", "hosta"), ("port", 123),
+                     ("transport", "udp"), ("usertype", "phone"),
+                     ("method", "foo"), ("ttl", 12),
+                     ("maddr", "1.2.3.4"), ("other", ["blah", "goo=bar"]),
+                     ("headers", {"a": "b", "c": "d"})]:
+            self.assertEqual(getattr(url, k), v)
+
+
+class ParseTestCase(unittest.TestCase):
+
+    def testParseAddress(self):
+        for address, name, urls, params in [
+            ('"A. G. Bell" <sip:foo at example.com>',
+             "A. G. Bell", "sip:foo at example.com", {}),
+            ("Anon <sip:foo at example.com>", "Anon", "sip:foo at example.com", {}),
+            ("sip:foo at example.com", "", "sip:foo at example.com", {}),
+            ("<sip:foo at example.com>", "", "sip:foo at example.com", {}),
+            ("foo <sip:foo at example.com>;tag=bar;foo=baz", "foo",
+             "sip:foo at example.com", {"tag": "bar", "foo": "baz"}),
+            ]:
+            gname, gurl, gparams = sip.parseAddress(address)
+            self.assertEqual(name, gname)
+            self.assertEqual(gurl.toString(), urls)
+            self.assertEqual(gparams, params)
+
+
+class DummyLocator:
+    implements(sip.ILocator)
+    def getAddress(self, logicalURL):
+        return defer.succeed(sip.URL("server.com", port=5060))
+
+class FailingLocator:
+    implements(sip.ILocator)
+    def getAddress(self, logicalURL):
+        return defer.fail(LookupError())
+
+
+class ProxyTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.proxy = sip.Proxy("127.0.0.1")
+        self.proxy.locator = DummyLocator()
+        self.sent = []
+        self.proxy.sendMessage = lambda dest, msg: self.sent.append((dest, msg))
+
+    def testRequestForward(self):
+        r = sip.Request("INVITE", "sip:foo")
+        r.addHeader("via", sip.Via("1.2.3.4").toString())
+        r.addHeader("via", sip.Via("1.2.3.5").toString())
+        r.addHeader("foo", "bar")
+        r.addHeader("to", "<sip:joe at server.com>")
+        r.addHeader("contact", "<sip:joe at 1.2.3.5>")
+        self.proxy.datagramReceived(r.toString(), ("1.2.3.4", 5060))
+        self.assertEqual(len(self.sent), 1)
+        dest, m = self.sent[0]
+        self.assertEqual(dest.port, 5060)
+        self.assertEqual(dest.host, "server.com")
+        self.assertEqual(m.uri.toString(), "sip:foo")
+        self.assertEqual(m.method, "INVITE")
+        self.assertEqual(m.headers["via"],
+                          ["SIP/2.0/UDP 127.0.0.1:5060",
+                           "SIP/2.0/UDP 1.2.3.4:5060",
+                           "SIP/2.0/UDP 1.2.3.5:5060"])
+
+
+    def testReceivedRequestForward(self):
+        r = sip.Request("INVITE", "sip:foo")
+        r.addHeader("via", sip.Via("1.2.3.4").toString())
+        r.addHeader("foo", "bar")
+        r.addHeader("to", "<sip:joe at server.com>")
+        r.addHeader("contact", "<sip:joe at 1.2.3.4>")
+        self.proxy.datagramReceived(r.toString(), ("1.1.1.1", 5060))
+        dest, m = self.sent[0]
+        self.assertEqual(m.headers["via"],
+                          ["SIP/2.0/UDP 127.0.0.1:5060",
+                           "SIP/2.0/UDP 1.2.3.4:5060;received=1.1.1.1"])
+
+
+    def testResponseWrongVia(self):
+        # first via must match proxy's address
+        r = sip.Response(200)
+        r.addHeader("via", sip.Via("foo.com").toString())
+        self.proxy.datagramReceived(r.toString(), ("1.1.1.1", 5060))
+        self.assertEqual(len(self.sent), 0)
+
+    def testResponseForward(self):
+        r = sip.Response(200)
+        r.addHeader("via", sip.Via("127.0.0.1").toString())
+        r.addHeader("via", sip.Via("client.com", port=1234).toString())
+        self.proxy.datagramReceived(r.toString(), ("1.1.1.1", 5060))
+        self.assertEqual(len(self.sent), 1)
+        dest, m = self.sent[0]
+        self.assertEqual((dest.host, dest.port), ("client.com", 1234))
+        self.assertEqual(m.code, 200)
+        self.assertEqual(m.headers["via"], ["SIP/2.0/UDP client.com:1234"])
+
+    def testReceivedResponseForward(self):
+        r = sip.Response(200)
+        r.addHeader("via", sip.Via("127.0.0.1").toString())
+        r.addHeader(
+            "via",
+            sip.Via("10.0.0.1", received="client.com").toString())
+        self.proxy.datagramReceived(r.toString(), ("1.1.1.1", 5060))
+        self.assertEqual(len(self.sent), 1)
+        dest, m = self.sent[0]
+        self.assertEqual((dest.host, dest.port), ("client.com", 5060))
+
+    def testResponseToUs(self):
+        r = sip.Response(200)
+        r.addHeader("via", sip.Via("127.0.0.1").toString())
+        l = []
+        self.proxy.gotResponse = lambda *a: l.append(a)
+        self.proxy.datagramReceived(r.toString(), ("1.1.1.1", 5060))
+        self.assertEqual(len(l), 1)
+        m, addr = l[0]
+        self.assertEqual(len(m.headers.get("via", [])), 0)
+        self.assertEqual(m.code, 200)
+
+    def testLoop(self):
+        r = sip.Request("INVITE", "sip:foo")
+        r.addHeader("via", sip.Via("1.2.3.4").toString())
+        r.addHeader("via", sip.Via("127.0.0.1").toString())
+        self.proxy.datagramReceived(r.toString(), ("client.com", 5060))
+        self.assertEqual(self.sent, [])
+
+    def testCantForwardRequest(self):
+        r = sip.Request("INVITE", "sip:foo")
+        r.addHeader("via", sip.Via("1.2.3.4").toString())
+        r.addHeader("to", "<sip:joe at server.com>")
+        self.proxy.locator = FailingLocator()
+        self.proxy.datagramReceived(r.toString(), ("1.2.3.4", 5060))
+        self.assertEqual(len(self.sent), 1)
+        dest, m = self.sent[0]
+        self.assertEqual((dest.host, dest.port), ("1.2.3.4", 5060))
+        self.assertEqual(m.code, 404)
+        self.assertEqual(m.headers["via"], ["SIP/2.0/UDP 1.2.3.4:5060"])
+
+    def testCantForwardResponse(self):
+        pass
+
+    #testCantForwardResponse.skip = "not implemented yet"
+
+
+class RegistrationTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.proxy = sip.RegisterProxy(host="127.0.0.1")
+        self.registry = sip.InMemoryRegistry("bell.example.com")
+        self.proxy.registry = self.proxy.locator = self.registry
+        self.sent = []
+        self.proxy.sendMessage = lambda dest, msg: self.sent.append((dest, msg))
+    setUp = utils.suppressWarnings(setUp,
+        util.suppress(category=DeprecationWarning,
+            message=r'twisted.protocols.sip.DigestAuthorizer was deprecated'))
+
+    def tearDown(self):
+        for d, uri in self.registry.users.values():
+            d.cancel()
+        del self.proxy
+
+    def register(self):
+        r = sip.Request("REGISTER", "sip:bell.example.com")
+        r.addHeader("to", "sip:joe at bell.example.com")
+        r.addHeader("contact", "sip:joe at client.com:1234")
+        r.addHeader("via", sip.Via("client.com").toString())
+        self.proxy.datagramReceived(r.toString(), ("client.com", 5060))
+
+    def unregister(self):
+        r = sip.Request("REGISTER", "sip:bell.example.com")
+        r.addHeader("to", "sip:joe at bell.example.com")
+        r.addHeader("contact", "*")
+        r.addHeader("via", sip.Via("client.com").toString())
+        r.addHeader("expires", "0")
+        self.proxy.datagramReceived(r.toString(), ("client.com", 5060))
+
+    def testRegister(self):
+        self.register()
+        dest, m = self.sent[0]
+        self.assertEqual((dest.host, dest.port), ("client.com", 5060))
+        self.assertEqual(m.code, 200)
+        self.assertEqual(m.headers["via"], ["SIP/2.0/UDP client.com:5060"])
+        self.assertEqual(m.headers["to"], ["sip:joe at bell.example.com"])
+        self.assertEqual(m.headers["contact"], ["sip:joe at client.com:5060"])
+        self.failUnless(
+            int(m.headers["expires"][0]) in (3600, 3601, 3599, 3598))
+        self.assertEqual(len(self.registry.users), 1)
+        dc, uri = self.registry.users["joe"]
+        self.assertEqual(uri.toString(), "sip:joe at client.com:5060")
+        d = self.proxy.locator.getAddress(sip.URL(username="joe",
+                                                  host="bell.example.com"))
+        d.addCallback(lambda desturl : (desturl.host, desturl.port))
+        d.addCallback(self.assertEqual, ('client.com', 5060))
+        return d
+
+    def testUnregister(self):
+        self.register()
+        self.unregister()
+        dest, m = self.sent[1]
+        self.assertEqual((dest.host, dest.port), ("client.com", 5060))
+        self.assertEqual(m.code, 200)
+        self.assertEqual(m.headers["via"], ["SIP/2.0/UDP client.com:5060"])
+        self.assertEqual(m.headers["to"], ["sip:joe at bell.example.com"])
+        self.assertEqual(m.headers["contact"], ["sip:joe at client.com:5060"])
+        self.assertEqual(m.headers["expires"], ["0"])
+        self.assertEqual(self.registry.users, {})
+
+
+    def addPortal(self):
+        r = TestRealm()
+        p = cred.portal.Portal(r)
+        c = cred.checkers.InMemoryUsernamePasswordDatabaseDontUse()
+        c.addUser('userXname at 127.0.0.1', 'passXword')
+        p.registerChecker(c)
+        self.proxy.portal = p
+
+    def testFailedAuthentication(self):
+        self.addPortal()
+        self.register()
+
+        self.assertEqual(len(self.registry.users), 0)
+        self.assertEqual(len(self.sent), 1)
+        dest, m = self.sent[0]
+        self.assertEqual(m.code, 401)
+
+
+    def test_basicAuthentication(self):
+        """
+        Test that registration with basic authentication suceeds.
+        """
+        self.addPortal()
+        self.proxy.authorizers = self.proxy.authorizers.copy()
+        self.proxy.authorizers['basic'] = sip.BasicAuthorizer()
+        warnings = self.flushWarnings(
+            offendingFunctions=[self.test_basicAuthentication])
+        self.assertEqual(len(warnings), 1)
+        self.assertEqual(
+            warnings[0]['message'],
+            "twisted.protocols.sip.BasicAuthorizer was deprecated in "
+            "Twisted 9.0.0")
+        self.assertEqual(
+            warnings[0]['category'],
+            DeprecationWarning)
+        r = sip.Request("REGISTER", "sip:bell.example.com")
+        r.addHeader("to", "sip:joe at bell.example.com")
+        r.addHeader("contact", "sip:joe at client.com:1234")
+        r.addHeader("via", sip.Via("client.com").toString())
+        r.addHeader("authorization",
+                    "Basic " + "userXname:passXword".encode('base64'))
+        self.proxy.datagramReceived(r.toString(), ("client.com", 5060))
+
+        self.assertEqual(len(self.registry.users), 1)
+        self.assertEqual(len(self.sent), 1)
+        dest, m = self.sent[0]
+        self.assertEqual(m.code, 200)
+
+
+    def test_failedBasicAuthentication(self):
+        """
+        Failed registration with basic authentication results in an
+        unauthorized error response.
+        """
+        self.addPortal()
+        self.proxy.authorizers = self.proxy.authorizers.copy()
+        self.proxy.authorizers['basic'] = sip.BasicAuthorizer()
+        warnings = self.flushWarnings(
+            offendingFunctions=[self.test_failedBasicAuthentication])
+        self.assertEqual(len(warnings), 1)
+        self.assertEqual(
+            warnings[0]['message'],
+            "twisted.protocols.sip.BasicAuthorizer was deprecated in "
+            "Twisted 9.0.0")
+        self.assertEqual(
+            warnings[0]['category'],
+            DeprecationWarning)
+        r = sip.Request("REGISTER", "sip:bell.example.com")
+        r.addHeader("to", "sip:joe at bell.example.com")
+        r.addHeader("contact", "sip:joe at client.com:1234")
+        r.addHeader("via", sip.Via("client.com").toString())
+        r.addHeader(
+            "authorization", "Basic " + "userXname:password".encode('base64'))
+        self.proxy.datagramReceived(r.toString(), ("client.com", 5060))
+
+        self.assertEqual(len(self.registry.users), 0)
+        self.assertEqual(len(self.sent), 1)
+        dest, m = self.sent[0]
+        self.assertEqual(m.code, 401)
+
+
+    def testWrongDomainRegister(self):
+        r = sip.Request("REGISTER", "sip:wrong.com")
+        r.addHeader("to", "sip:joe at bell.example.com")
+        r.addHeader("contact", "sip:joe at client.com:1234")
+        r.addHeader("via", sip.Via("client.com").toString())
+        self.proxy.datagramReceived(r.toString(), ("client.com", 5060))
+        self.assertEqual(len(self.sent), 0)
+
+    def testWrongToDomainRegister(self):
+        r = sip.Request("REGISTER", "sip:bell.example.com")
+        r.addHeader("to", "sip:joe at foo.com")
+        r.addHeader("contact", "sip:joe at client.com:1234")
+        r.addHeader("via", sip.Via("client.com").toString())
+        self.proxy.datagramReceived(r.toString(), ("client.com", 5060))
+        self.assertEqual(len(self.sent), 0)
+
+    def testWrongDomainLookup(self):
+        self.register()
+        url = sip.URL(username="joe", host="foo.com")
+        d = self.proxy.locator.getAddress(url)
+        self.assertFailure(d, LookupError)
+        return d
+
+    def testNoContactLookup(self):
+        self.register()
+        url = sip.URL(username="jane", host="bell.example.com")
+        d = self.proxy.locator.getAddress(url)
+        self.assertFailure(d, LookupError)
+        return d
+
+
+class Client(sip.Base):
+
+    def __init__(self):
+        sip.Base.__init__(self)
+        self.received = []
+        self.deferred = defer.Deferred()
+
+    def handle_response(self, response, addr):
+        self.received.append(response)
+        self.deferred.callback(self.received)
+
+
+class LiveTest(unittest.TestCase):
+
+    def setUp(self):
+        self.proxy = sip.RegisterProxy(host="127.0.0.1")
+        self.registry = sip.InMemoryRegistry("bell.example.com")
+        self.proxy.registry = self.proxy.locator = self.registry
+        self.serverPort = reactor.listenUDP(
+            0, self.proxy, interface="127.0.0.1")
+        self.client = Client()
+        self.clientPort = reactor.listenUDP(
+            0, self.client, interface="127.0.0.1")
+        self.serverAddress = (self.serverPort.getHost().host,
+                              self.serverPort.getHost().port)
+    setUp = utils.suppressWarnings(setUp,
+        util.suppress(category=DeprecationWarning,
+            message=r'twisted.protocols.sip.DigestAuthorizer was deprecated'))
+
+    def tearDown(self):
+        for d, uri in self.registry.users.values():
+            d.cancel()
+        d1 = defer.maybeDeferred(self.clientPort.stopListening)
+        d2 = defer.maybeDeferred(self.serverPort.stopListening)
+        return defer.gatherResults([d1, d2])
+
+    def testRegister(self):
+        p = self.clientPort.getHost().port
+        r = sip.Request("REGISTER", "sip:bell.example.com")
+        r.addHeader("to", "sip:joe at bell.example.com")
+        r.addHeader("contact", "sip:joe at 127.0.0.1:%d" % p)
+        r.addHeader("via", sip.Via("127.0.0.1", port=p).toString())
+        self.client.sendMessage(
+            sip.URL(host="127.0.0.1", port=self.serverAddress[1]), r)
+        d = self.client.deferred
+        def check(received):
+            self.assertEqual(len(received), 1)
+            r = received[0]
+            self.assertEqual(r.code, 200)
+        d.addCallback(check)
+        return d
+
+    def test_amoralRPort(self):
+        """
+        rport is allowed without a value, apparently because server
+        implementors might be too stupid to check the received port
+        against 5060 and see if they're equal, and because client
+        implementors might be too stupid to bind to port 5060, or set a
+        value on the rport parameter they send if they bind to another
+        port.
+        """
+        p = self.clientPort.getHost().port
+        r = sip.Request("REGISTER", "sip:bell.example.com")
+        r.addHeader("to", "sip:joe at bell.example.com")
+        r.addHeader("contact", "sip:joe at 127.0.0.1:%d" % p)
+        r.addHeader("via", sip.Via("127.0.0.1", port=p, rport=True).toString())
+        warnings = self.flushWarnings(
+            offendingFunctions=[self.test_amoralRPort])
+        self.assertEqual(len(warnings), 1)
+        self.assertEqual(
+            warnings[0]['message'],
+            'rport=True is deprecated since Twisted 9.0.')
+        self.assertEqual(
+            warnings[0]['category'],
+            DeprecationWarning)
+        self.client.sendMessage(sip.URL(host="127.0.0.1",
+                                        port=self.serverAddress[1]),
+                                r)
+        d = self.client.deferred
+        def check(received):
+            self.assertEqual(len(received), 1)
+            r = received[0]
+            self.assertEqual(r.code, 200)
+        d.addCallback(check)
+        return d
+
+
+
+registerRequest = """
+REGISTER sip:intarweb.us SIP/2.0\r
+Via: SIP/2.0/UDP 192.168.1.100:50609\r
+From: <sip:exarkun at intarweb.us:50609>\r
+To: <sip:exarkun at intarweb.us:50609>\r
+Contact: "exarkun" <sip:exarkun at 192.168.1.100:50609>\r
+Call-ID: 94E7E5DAF39111D791C6000393764646 at intarweb.us\r
+CSeq: 9898 REGISTER\r
+Expires: 500\r
+User-Agent: X-Lite build 1061\r
+Content-Length: 0\r
+\r
+"""
+
+challengeResponse = """\
+SIP/2.0 401 Unauthorized\r
+Via: SIP/2.0/UDP 192.168.1.100:50609;received=127.0.0.1;rport=5632\r
+To: <sip:exarkun at intarweb.us:50609>\r
+From: <sip:exarkun at intarweb.us:50609>\r
+Call-ID: 94E7E5DAF39111D791C6000393764646 at intarweb.us\r
+CSeq: 9898 REGISTER\r
+WWW-Authenticate: Digest nonce="92956076410767313901322208775",opaque="1674186428",qop-options="auth",algorithm="MD5",realm="intarweb.us"\r
+\r
+"""
+
+authRequest = """\
+REGISTER sip:intarweb.us SIP/2.0\r
+Via: SIP/2.0/UDP 192.168.1.100:50609\r
+From: <sip:exarkun at intarweb.us:50609>\r
+To: <sip:exarkun at intarweb.us:50609>\r
+Contact: "exarkun" <sip:exarkun at 192.168.1.100:50609>\r
+Call-ID: 94E7E5DAF39111D791C6000393764646 at intarweb.us\r
+CSeq: 9899 REGISTER\r
+Expires: 500\r
+Authorization: Digest username="exarkun",realm="intarweb.us",nonce="92956076410767313901322208775",response="4a47980eea31694f997369214292374b",uri="sip:intarweb.us",algorithm=MD5,opaque="1674186428"\r
+User-Agent: X-Lite build 1061\r
+Content-Length: 0\r
+\r
+"""
+
+okResponse = """\
+SIP/2.0 200 OK\r
+Via: SIP/2.0/UDP 192.168.1.100:50609;received=127.0.0.1;rport=5632\r
+To: <sip:exarkun at intarweb.us:50609>\r
+From: <sip:exarkun at intarweb.us:50609>\r
+Call-ID: 94E7E5DAF39111D791C6000393764646 at intarweb.us\r
+CSeq: 9899 REGISTER\r
+Contact: sip:exarkun at 127.0.0.1:5632\r
+Expires: 3600\r
+Content-Length: 0\r
+\r
+"""
+
+class FakeDigestAuthorizer(sip.DigestAuthorizer):
+    def generateNonce(self):
+        return '92956076410767313901322208775'
+    def generateOpaque(self):
+        return '1674186428'
+
+
+class FakeRegistry(sip.InMemoryRegistry):
+    """Make sure expiration is always seen to be 3600.
+
+    Otherwise slow reactors fail tests incorrectly.
+    """
+
+    def _cbReg(self, reg):
+        if 3600 < reg.secondsToExpiry or reg.secondsToExpiry < 3598:
+            raise RuntimeError(
+                "bad seconds to expire: %s" % reg.secondsToExpiry)
+        reg.secondsToExpiry = 3600
+        return reg
+
+    def getRegistrationInfo(self, uri):
+        d = sip.InMemoryRegistry.getRegistrationInfo(self, uri)
+        return d.addCallback(self._cbReg)
+
+    def registerAddress(self, domainURL, logicalURL, physicalURL):
+        d = sip.InMemoryRegistry.registerAddress(
+            self, domainURL, logicalURL, physicalURL)
+        return d.addCallback(self._cbReg)
+
+class AuthorizationTestCase(unittest.TestCase):
+    def setUp(self):
+        self.proxy = sip.RegisterProxy(host="intarweb.us")
+        self.proxy.authorizers = self.proxy.authorizers.copy()
+        self.proxy.authorizers['digest'] = FakeDigestAuthorizer()
+
+        self.registry = FakeRegistry("intarweb.us")
+        self.proxy.registry = self.proxy.locator = self.registry
+        self.transport = proto_helpers.FakeDatagramTransport()
+        self.proxy.transport = self.transport
+
+        r = TestRealm()
+        p = cred.portal.Portal(r)
+        c = cred.checkers.InMemoryUsernamePasswordDatabaseDontUse()
+        c.addUser('exarkun at intarweb.us', 'password')
+        p.registerChecker(c)
+        self.proxy.portal = p
+    setUp = utils.suppressWarnings(setUp,
+        util.suppress(category=DeprecationWarning,
+            message=r'twisted.protocols.sip.DigestAuthorizer was deprecated'))
+
+    def tearDown(self):
+        for d, uri in self.registry.users.values():
+            d.cancel()
+        del self.proxy
+
+    def testChallenge(self):
+        self.proxy.datagramReceived(registerRequest, ("127.0.0.1", 5632))
+
+        self.assertEqual(
+            self.transport.written[-1],
+            ((challengeResponse, ("127.0.0.1", 5632)))
+        )
+        self.transport.written = []
+
+        self.proxy.datagramReceived(authRequest, ("127.0.0.1", 5632))
+
+        self.assertEqual(
+            self.transport.written[-1],
+            ((okResponse, ("127.0.0.1", 5632)))
+        )
+    testChallenge.suppress = [
+        util.suppress(
+            category=DeprecationWarning,
+            message=r'twisted.protocols.sip.DigestAuthorizer was deprecated'),
+        util.suppress(
+            category=DeprecationWarning,
+            message=r'twisted.protocols.sip.DigestedCredentials was deprecated'),
+        util.suppress(
+            category=DeprecationWarning,
+            message=r'twisted.protocols.sip.DigestCalcHA1 was deprecated'),
+        util.suppress(
+            category=DeprecationWarning,
+            message=r'twisted.protocols.sip.DigestCalcResponse was deprecated')]
+
+
+
+class DeprecationTests(unittest.TestCase):
+    """
+    Tests for deprecation of obsolete components of L{twisted.protocols.sip}.
+    """
+
+    def test_deprecatedDigestCalcHA1(self):
+        """
+        L{sip.DigestCalcHA1} is deprecated.
+        """
+        self.callDeprecated(Version("Twisted", 9, 0, 0),
+                            sip.DigestCalcHA1, '', '', '', '', '', '')
+
+
+    def test_deprecatedDigestCalcResponse(self):
+        """
+        L{sip.DigestCalcResponse} is deprecated.
+        """
+        self.callDeprecated(Version("Twisted", 9, 0, 0),
+                            sip.DigestCalcResponse, '', '', '', '', '', '', '',
+                            '')
+
+    def test_deprecatedBasicAuthorizer(self):
+        """
+        L{sip.BasicAuthorizer} is deprecated.
+        """
+        self.callDeprecated(Version("Twisted", 9, 0, 0), sip.BasicAuthorizer)
+
+
+    def test_deprecatedDigestAuthorizer(self):
+        """
+        L{sip.DigestAuthorizer} is deprecated.
+        """
+        self.callDeprecated(Version("Twisted", 9, 0, 0), sip.DigestAuthorizer)
+
+
+    def test_deprecatedDigestedCredentials(self):
+        """
+        L{sip.DigestedCredentials} is deprecated.
+        """
+        self.callDeprecated(Version("Twisted", 9, 0, 0),
+                            sip.DigestedCredentials, '', {}, {})
diff --git a/ThirdParty/Twisted/twisted/test/test_sob.py b/ThirdParty/Twisted/twisted/test/test_sob.py
new file mode 100644
index 0000000..76c33a8
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_sob.py
@@ -0,0 +1,172 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+import sys, os
+
+try:
+    import Crypto.Cipher.AES
+except ImportError:
+    Crypto = None
+
+from twisted.trial import unittest
+from twisted.persisted import sob
+from twisted.python import components
+
+class Dummy(components.Componentized):
+    pass
+
+objects = [
+1,
+"hello",
+(1, "hello"),
+[1, "hello"],
+{1:"hello"},
+]
+
+class FakeModule(object):
+    pass
+
+class PersistTestCase(unittest.TestCase):
+    def testStyles(self):
+        for o in objects:
+            p = sob.Persistent(o, '')
+            for style in 'source pickle'.split():
+                p.setStyle(style)
+                p.save(filename='persisttest.'+style)
+                o1 = sob.load('persisttest.'+style, style)
+                self.assertEqual(o, o1)
+
+    def testStylesBeingSet(self):
+        o = Dummy()
+        o.foo = 5
+        o.setComponent(sob.IPersistable, sob.Persistent(o, 'lala'))
+        for style in 'source pickle'.split():
+            sob.IPersistable(o).setStyle(style)
+            sob.IPersistable(o).save(filename='lala.'+style)
+            o1 = sob.load('lala.'+style, style)
+            self.assertEqual(o.foo, o1.foo)
+            self.assertEqual(sob.IPersistable(o1).style, style)
+
+
+    def testNames(self):
+        o = [1,2,3]
+        p = sob.Persistent(o, 'object')
+        for style in 'source pickle'.split():
+            p.setStyle(style)
+            p.save()
+            o1 = sob.load('object.ta'+style[0], style)
+            self.assertEqual(o, o1)
+            for tag in 'lala lolo'.split():
+                p.save(tag)
+                o1 = sob.load('object-'+tag+'.ta'+style[0], style)
+                self.assertEqual(o, o1)
+      
+    def testEncryptedStyles(self):
+        for o in objects:
+            phrase='once I was the king of spain'
+            p = sob.Persistent(o, '')
+            for style in 'source pickle'.split():
+                p.setStyle(style)
+                p.save(filename='epersisttest.'+style, passphrase=phrase)
+                o1 = sob.load('epersisttest.'+style, style, phrase)
+                self.assertEqual(o, o1)
+    if Crypto is None:
+        testEncryptedStyles.skip = "PyCrypto required for encrypted config"
+
+    def testPython(self):
+        f = open("persisttest.python", 'w')
+        f.write('foo=[1,2,3] ')
+        f.close()
+        o = sob.loadValueFromFile('persisttest.python', 'foo')
+        self.assertEqual(o, [1,2,3])
+
+    def testEncryptedPython(self):
+        phrase='once I was the king of spain'
+        f = open("epersisttest.python", 'w')
+        f.write(
+            sob._encrypt(phrase, 'foo=[1,2,3]'))
+        f.close()
+        o = sob.loadValueFromFile('epersisttest.python', 'foo', phrase)
+        self.assertEqual(o, [1,2,3])
+    if Crypto is None:
+        testEncryptedPython.skip = "PyCrypto required for encrypted config"
+
+    def testTypeGuesser(self):
+        self.assertRaises(KeyError, sob.guessType, "file.blah")
+        self.assertEqual('python', sob.guessType("file.py"))
+        self.assertEqual('python', sob.guessType("file.tac"))
+        self.assertEqual('python', sob.guessType("file.etac"))
+        self.assertEqual('pickle', sob.guessType("file.tap"))
+        self.assertEqual('pickle', sob.guessType("file.etap"))
+        self.assertEqual('source', sob.guessType("file.tas"))
+        self.assertEqual('source', sob.guessType("file.etas"))
+
+    def testEverythingEphemeralGetattr(self):
+        """
+        Verify that _EverythingEphermal.__getattr__ works.
+        """
+        self.fakeMain.testMainModGetattr = 1
+
+        dirname = self.mktemp()
+        os.mkdir(dirname)
+
+        filename = os.path.join(dirname, 'persisttest.ee_getattr')
+
+        f = file(filename, 'w')
+        f.write('import __main__\n')
+        f.write('if __main__.testMainModGetattr != 1: raise AssertionError\n')
+        f.write('app = None\n')
+        f.close()
+
+        sob.load(filename, 'source')
+
+    def testEverythingEphemeralSetattr(self):
+        """
+        Verify that _EverythingEphemeral.__setattr__ won't affect __main__.
+        """
+        self.fakeMain.testMainModSetattr = 1
+
+        dirname = self.mktemp()
+        os.mkdir(dirname)
+
+        filename = os.path.join(dirname, 'persisttest.ee_setattr')
+        f = file(filename, 'w')
+        f.write('import __main__\n')
+        f.write('__main__.testMainModSetattr = 2\n')
+        f.write('app = None\n')
+        f.close()
+
+        sob.load(filename, 'source')
+
+        self.assertEqual(self.fakeMain.testMainModSetattr, 1)
+
+    def testEverythingEphemeralException(self):
+        """
+        Test that an exception during load() won't cause _EE to mask __main__
+        """
+        dirname = self.mktemp()
+        os.mkdir(dirname)
+        filename = os.path.join(dirname, 'persisttest.ee_exception')
+
+        f = file(filename, 'w')
+        f.write('raise ValueError\n')
+        f.close()
+
+        self.assertRaises(ValueError, sob.load, filename, 'source')
+        self.assertEqual(type(sys.modules['__main__']), FakeModule)
+
+    def setUp(self):
+        """
+        Replace the __main__ module with a fake one, so that it can be mutated
+        in tests
+        """
+        self.realMain = sys.modules['__main__']
+        self.fakeMain = sys.modules['__main__'] = FakeModule()
+
+    def tearDown(self):
+        """
+        Restore __main__ to its original value
+        """
+        sys.modules['__main__'] = self.realMain
+
diff --git a/ThirdParty/Twisted/twisted/test/test_socks.py b/ThirdParty/Twisted/twisted/test/test_socks.py
new file mode 100644
index 0000000..ebcb843
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_socks.py
@@ -0,0 +1,498 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.protocol.socks}, an implementation of the SOCKSv4 and
+SOCKSv4a protocols.
+"""
+
+import struct, socket
+
+from twisted.trial import unittest
+from twisted.test import proto_helpers
+from twisted.internet import defer, address, reactor
+from twisted.internet.error import DNSLookupError
+from twisted.protocols import socks
+
+
+class StringTCPTransport(proto_helpers.StringTransport):
+    stringTCPTransport_closing = False
+    peer = None
+
+    def getPeer(self):
+        return self.peer
+
+    def getHost(self):
+        return address.IPv4Address('TCP', '2.3.4.5', 42)
+
+    def loseConnection(self):
+        self.stringTCPTransport_closing = True
+
+
+
+class FakeResolverReactor:
+    """
+    Bare-bones reactor with deterministic behavior for the resolve method.
+    """
+    def __init__(self, names):
+        """
+        @type names: C{dict} containing C{str} keys and C{str} values.
+        @param names: A hostname to IP address mapping. The IP addresses are
+            stringified dotted quads.
+        """
+        self.names = names
+
+
+    def resolve(self, hostname):
+        """
+        Resolve a hostname by looking it up in the C{names} dictionary.
+        """
+        try:
+            return defer.succeed(self.names[hostname])
+        except KeyError:
+            return defer.fail(
+                DNSLookupError("FakeResolverReactor couldn't find " + hostname))
+
+
+
+class SOCKSv4Driver(socks.SOCKSv4):
+    # last SOCKSv4Outgoing instantiated
+    driver_outgoing = None
+
+    # last SOCKSv4IncomingFactory instantiated
+    driver_listen = None
+
+    def connectClass(self, host, port, klass, *args):
+        # fake it
+        proto = klass(*args)
+        proto.transport = StringTCPTransport()
+        proto.transport.peer = address.IPv4Address('TCP', host, port)
+        proto.connectionMade()
+        self.driver_outgoing = proto
+        return defer.succeed(proto)
+
+    def listenClass(self, port, klass, *args):
+        # fake it
+        factory = klass(*args)
+        self.driver_listen = factory
+        if port == 0:
+            port = 1234
+        return defer.succeed(('6.7.8.9', port))
+
+
+
+class Connect(unittest.TestCase):
+    """
+    Tests for SOCKS and SOCKSv4a connect requests using the L{SOCKSv4} protocol.
+    """
+    def setUp(self):
+        self.sock = SOCKSv4Driver()
+        self.sock.transport = StringTCPTransport()
+        self.sock.connectionMade()
+        self.sock.reactor = FakeResolverReactor({"localhost":"127.0.0.1"})
+
+
+    def tearDown(self):
+        outgoing = self.sock.driver_outgoing
+        if outgoing is not None:
+            self.assert_(outgoing.transport.stringTCPTransport_closing,
+                         "Outgoing SOCKS connections need to be closed.")
+
+
+    def test_simple(self):
+        self.sock.dataReceived(
+            struct.pack('!BBH', 4, 1, 34)
+            + socket.inet_aton('1.2.3.4')
+            + 'fooBAR'
+            + '\0')
+        sent = self.sock.transport.value()
+        self.sock.transport.clear()
+        self.assertEqual(sent,
+                         struct.pack('!BBH', 0, 90, 34)
+                         + socket.inet_aton('1.2.3.4'))
+        self.assert_(not self.sock.transport.stringTCPTransport_closing)
+        self.assert_(self.sock.driver_outgoing is not None)
+
+        # pass some data through
+        self.sock.dataReceived('hello, world')
+        self.assertEqual(self.sock.driver_outgoing.transport.value(),
+                         'hello, world')
+
+        # the other way around
+        self.sock.driver_outgoing.dataReceived('hi there')
+        self.assertEqual(self.sock.transport.value(), 'hi there')
+
+        self.sock.connectionLost('fake reason')
+
+
+    def test_socks4aSuccessfulResolution(self):
+        """
+        If the destination IP address has zeros for the first three octets and
+        non-zero for the fourth octet, the client is attempting a v4a
+        connection.  A hostname is specified after the user ID string and the
+        server connects to the address that hostname resolves to.
+
+        @see: U{http://en.wikipedia.org/wiki/SOCKS#SOCKS_4a_protocol}
+        """
+        # send the domain name "localhost" to be resolved
+        clientRequest = (
+            struct.pack('!BBH', 4, 1, 34)
+            + socket.inet_aton('0.0.0.1')
+            + 'fooBAZ\0'
+            + 'localhost\0')
+
+        # Deliver the bytes one by one to exercise the protocol's buffering
+        # logic. FakeResolverReactor's resolve method is invoked to "resolve"
+        # the hostname.
+        for byte in clientRequest:
+            self.sock.dataReceived(byte)
+
+        sent = self.sock.transport.value()
+        self.sock.transport.clear()
+
+        # Verify that the server responded with the address which will be
+        # connected to.
+        self.assertEqual(
+            sent,
+            struct.pack('!BBH', 0, 90, 34) + socket.inet_aton('127.0.0.1'))
+        self.assertFalse(self.sock.transport.stringTCPTransport_closing)
+        self.assertNotIdentical(self.sock.driver_outgoing, None)
+
+        # Pass some data through and verify it is forwarded to the outgoing
+        # connection.
+        self.sock.dataReceived('hello, world')
+        self.assertEqual(
+            self.sock.driver_outgoing.transport.value(), 'hello, world')
+
+        # Deliver some data from the output connection and verify it is
+        # passed along to the incoming side.
+        self.sock.driver_outgoing.dataReceived('hi there')
+        self.assertEqual(self.sock.transport.value(), 'hi there')
+
+        self.sock.connectionLost('fake reason')
+
+
+    def test_socks4aFailedResolution(self):
+        """
+        Failed hostname resolution on a SOCKSv4a packet results in a 91 error
+        response and the connection getting closed.
+        """
+        # send the domain name "failinghost" to be resolved
+        clientRequest = (
+            struct.pack('!BBH', 4, 1, 34)
+            + socket.inet_aton('0.0.0.1')
+            + 'fooBAZ\0'
+            + 'failinghost\0')
+
+        # Deliver the bytes one by one to exercise the protocol's buffering
+        # logic. FakeResolverReactor's resolve method is invoked to "resolve"
+        # the hostname.
+        for byte in clientRequest:
+            self.sock.dataReceived(byte)
+
+        # Verify that the server responds with a 91 error.
+        sent = self.sock.transport.value()
+        self.assertEqual(
+            sent,
+            struct.pack('!BBH', 0, 91, 0) + socket.inet_aton('0.0.0.0'))
+
+        # A failed resolution causes the transport to drop the connection.
+        self.assertTrue(self.sock.transport.stringTCPTransport_closing)
+        self.assertIdentical(self.sock.driver_outgoing, None)
+
+
+    def test_accessDenied(self):
+        self.sock.authorize = lambda code, server, port, user: 0
+        self.sock.dataReceived(
+            struct.pack('!BBH', 4, 1, 4242)
+            + socket.inet_aton('10.2.3.4')
+            + 'fooBAR'
+            + '\0')
+        self.assertEqual(self.sock.transport.value(),
+                         struct.pack('!BBH', 0, 91, 0)
+                         + socket.inet_aton('0.0.0.0'))
+        self.assert_(self.sock.transport.stringTCPTransport_closing)
+        self.assertIdentical(self.sock.driver_outgoing, None)
+
+
+    def test_eofRemote(self):
+        self.sock.dataReceived(
+            struct.pack('!BBH', 4, 1, 34)
+            + socket.inet_aton('1.2.3.4')
+            + 'fooBAR'
+            + '\0')
+        sent = self.sock.transport.value()
+        self.sock.transport.clear()
+
+        # pass some data through
+        self.sock.dataReceived('hello, world')
+        self.assertEqual(self.sock.driver_outgoing.transport.value(),
+                         'hello, world')
+
+        # now close it from the server side
+        self.sock.driver_outgoing.transport.loseConnection()
+        self.sock.driver_outgoing.connectionLost('fake reason')
+
+
+    def test_eofLocal(self):
+        self.sock.dataReceived(
+            struct.pack('!BBH', 4, 1, 34)
+            + socket.inet_aton('1.2.3.4')
+            + 'fooBAR'
+            + '\0')
+        sent = self.sock.transport.value()
+        self.sock.transport.clear()
+
+        # pass some data through
+        self.sock.dataReceived('hello, world')
+        self.assertEqual(self.sock.driver_outgoing.transport.value(),
+                         'hello, world')
+
+        # now close it from the client side
+        self.sock.connectionLost('fake reason')
+
+
+
+class Bind(unittest.TestCase):
+    """
+    Tests for SOCKS and SOCKSv4a bind requests using the L{SOCKSv4} protocol.
+    """
+    def setUp(self):
+        self.sock = SOCKSv4Driver()
+        self.sock.transport = StringTCPTransport()
+        self.sock.connectionMade()
+        self.sock.reactor = FakeResolverReactor({"localhost":"127.0.0.1"})
+
+##     def tearDown(self):
+##         # TODO ensure the listen port is closed
+##         listen = self.sock.driver_listen
+##         if listen is not None:
+##             self.assert_(incoming.transport.stringTCPTransport_closing,
+##                     "Incoming SOCKS connections need to be closed.")
+
+    def test_simple(self):
+        self.sock.dataReceived(
+            struct.pack('!BBH', 4, 2, 34)
+            + socket.inet_aton('1.2.3.4')
+            + 'fooBAR'
+            + '\0')
+        sent = self.sock.transport.value()
+        self.sock.transport.clear()
+        self.assertEqual(sent,
+                         struct.pack('!BBH', 0, 90, 1234)
+                         + socket.inet_aton('6.7.8.9'))
+        self.assert_(not self.sock.transport.stringTCPTransport_closing)
+        self.assert_(self.sock.driver_listen is not None)
+
+        # connect
+        incoming = self.sock.driver_listen.buildProtocol(('1.2.3.4', 5345))
+        self.assertNotIdentical(incoming, None)
+        incoming.transport = StringTCPTransport()
+        incoming.connectionMade()
+
+        # now we should have the second reply packet
+        sent = self.sock.transport.value()
+        self.sock.transport.clear()
+        self.assertEqual(sent,
+                         struct.pack('!BBH', 0, 90, 0)
+                         + socket.inet_aton('0.0.0.0'))
+        self.assert_(not self.sock.transport.stringTCPTransport_closing)
+
+        # pass some data through
+        self.sock.dataReceived('hello, world')
+        self.assertEqual(incoming.transport.value(),
+                         'hello, world')
+
+        # the other way around
+        incoming.dataReceived('hi there')
+        self.assertEqual(self.sock.transport.value(), 'hi there')
+
+        self.sock.connectionLost('fake reason')
+
+
+    def test_socks4a(self):
+        """
+        If the destination IP address has zeros for the first three octets and
+        non-zero for the fourth octet, the client is attempting a v4a
+        connection.  A hostname is specified after the user ID string and the
+        server connects to the address that hostname resolves to.
+
+        @see: U{http://en.wikipedia.org/wiki/SOCKS#SOCKS_4a_protocol}
+        """
+        # send the domain name "localhost" to be resolved
+        clientRequest = (
+            struct.pack('!BBH', 4, 2, 34)
+            + socket.inet_aton('0.0.0.1')
+            + 'fooBAZ\0'
+            + 'localhost\0')
+
+        # Deliver the bytes one by one to exercise the protocol's buffering
+        # logic. FakeResolverReactor's resolve method is invoked to "resolve"
+        # the hostname.
+        for byte in clientRequest:
+            self.sock.dataReceived(byte)
+
+        sent = self.sock.transport.value()
+        self.sock.transport.clear()
+
+        # Verify that the server responded with the address which will be
+        # connected to.
+        self.assertEqual(
+            sent,
+            struct.pack('!BBH', 0, 90, 1234) + socket.inet_aton('6.7.8.9'))
+        self.assertFalse(self.sock.transport.stringTCPTransport_closing)
+        self.assertNotIdentical(self.sock.driver_listen, None)
+
+        # connect
+        incoming = self.sock.driver_listen.buildProtocol(('127.0.0.1', 5345))
+        self.assertNotIdentical(incoming, None)
+        incoming.transport = StringTCPTransport()
+        incoming.connectionMade()
+
+        # now we should have the second reply packet
+        sent = self.sock.transport.value()
+        self.sock.transport.clear()
+        self.assertEqual(sent,
+                         struct.pack('!BBH', 0, 90, 0)
+                         + socket.inet_aton('0.0.0.0'))
+        self.assertNotIdentical(
+            self.sock.transport.stringTCPTransport_closing, None)
+
+        # Deliver some data from the output connection and verify it is
+        # passed along to the incoming side.
+        self.sock.dataReceived('hi there')
+        self.assertEqual(incoming.transport.value(), 'hi there')
+
+        # the other way around
+        incoming.dataReceived('hi there')
+        self.assertEqual(self.sock.transport.value(), 'hi there')
+
+        self.sock.connectionLost('fake reason')
+
+
+    def test_socks4aFailedResolution(self):
+        """
+        Failed hostname resolution on a SOCKSv4a packet results in a 91 error
+        response and the connection getting closed.
+        """
+        # send the domain name "failinghost" to be resolved
+        clientRequest = (
+            struct.pack('!BBH', 4, 2, 34)
+            + socket.inet_aton('0.0.0.1')
+            + 'fooBAZ\0'
+            + 'failinghost\0')
+
+        # Deliver the bytes one by one to exercise the protocol's buffering
+        # logic. FakeResolverReactor's resolve method is invoked to "resolve"
+        # the hostname.
+        for byte in clientRequest:
+            self.sock.dataReceived(byte)
+
+        # Verify that the server responds with a 91 error.
+        sent = self.sock.transport.value()
+        self.assertEqual(
+            sent,
+            struct.pack('!BBH', 0, 91, 0) + socket.inet_aton('0.0.0.0'))
+
+        # A failed resolution causes the transport to drop the connection.
+        self.assertTrue(self.sock.transport.stringTCPTransport_closing)
+        self.assertIdentical(self.sock.driver_outgoing, None)
+
+
+    def test_accessDenied(self):
+        self.sock.authorize = lambda code, server, port, user: 0
+        self.sock.dataReceived(
+            struct.pack('!BBH', 4, 2, 4242)
+            + socket.inet_aton('10.2.3.4')
+            + 'fooBAR'
+            + '\0')
+        self.assertEqual(self.sock.transport.value(),
+                         struct.pack('!BBH', 0, 91, 0)
+                         + socket.inet_aton('0.0.0.0'))
+        self.assert_(self.sock.transport.stringTCPTransport_closing)
+        self.assertIdentical(self.sock.driver_listen, None)
+
+    def test_eofRemote(self):
+        self.sock.dataReceived(
+            struct.pack('!BBH', 4, 2, 34)
+            + socket.inet_aton('1.2.3.4')
+            + 'fooBAR'
+            + '\0')
+        sent = self.sock.transport.value()
+        self.sock.transport.clear()
+
+        # connect
+        incoming = self.sock.driver_listen.buildProtocol(('1.2.3.4', 5345))
+        self.assertNotIdentical(incoming, None)
+        incoming.transport = StringTCPTransport()
+        incoming.connectionMade()
+
+        # now we should have the second reply packet
+        sent = self.sock.transport.value()
+        self.sock.transport.clear()
+        self.assertEqual(sent,
+                         struct.pack('!BBH', 0, 90, 0)
+                         + socket.inet_aton('0.0.0.0'))
+        self.assert_(not self.sock.transport.stringTCPTransport_closing)
+
+        # pass some data through
+        self.sock.dataReceived('hello, world')
+        self.assertEqual(incoming.transport.value(),
+                         'hello, world')
+
+        # now close it from the server side
+        incoming.transport.loseConnection()
+        incoming.connectionLost('fake reason')
+
+    def test_eofLocal(self):
+        self.sock.dataReceived(
+            struct.pack('!BBH', 4, 2, 34)
+            + socket.inet_aton('1.2.3.4')
+            + 'fooBAR'
+            + '\0')
+        sent = self.sock.transport.value()
+        self.sock.transport.clear()
+
+        # connect
+        incoming = self.sock.driver_listen.buildProtocol(('1.2.3.4', 5345))
+        self.assertNotIdentical(incoming, None)
+        incoming.transport = StringTCPTransport()
+        incoming.connectionMade()
+
+        # now we should have the second reply packet
+        sent = self.sock.transport.value()
+        self.sock.transport.clear()
+        self.assertEqual(sent,
+                         struct.pack('!BBH', 0, 90, 0)
+                         + socket.inet_aton('0.0.0.0'))
+        self.assert_(not self.sock.transport.stringTCPTransport_closing)
+
+        # pass some data through
+        self.sock.dataReceived('hello, world')
+        self.assertEqual(incoming.transport.value(),
+                         'hello, world')
+
+        # now close it from the client side
+        self.sock.connectionLost('fake reason')
+
+    def test_badSource(self):
+        self.sock.dataReceived(
+            struct.pack('!BBH', 4, 2, 34)
+            + socket.inet_aton('1.2.3.4')
+            + 'fooBAR'
+            + '\0')
+        sent = self.sock.transport.value()
+        self.sock.transport.clear()
+
+        # connect from WRONG address
+        incoming = self.sock.driver_listen.buildProtocol(('1.6.6.6', 666))
+        self.assertIdentical(incoming, None)
+
+        # Now we should have the second reply packet and it should
+        # be a failure. The connection should be closing.
+        sent = self.sock.transport.value()
+        self.sock.transport.clear()
+        self.assertEqual(sent,
+                         struct.pack('!BBH', 0, 91, 0)
+                         + socket.inet_aton('0.0.0.0'))
+        self.assert_(self.sock.transport.stringTCPTransport_closing)
diff --git a/ThirdParty/Twisted/twisted/test/test_ssl.py b/ThirdParty/Twisted/twisted/test/test_ssl.py
new file mode 100644
index 0000000..136bc2d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_ssl.py
@@ -0,0 +1,727 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for twisted SSL support.
+"""
+from __future__ import division, absolute_import
+
+from twisted.python.filepath import FilePath
+from twisted.trial import unittest
+from twisted.internet import protocol, reactor, interfaces, defer
+from twisted.internet.error import ConnectionDone
+from twisted.protocols import basic
+from twisted.python.runtime import platform
+from twisted.test.test_tcp import ProperlyCloseFilesMixin
+
+import os, errno
+
+try:
+    from OpenSSL import SSL, crypto
+    from twisted.internet import ssl
+    from twisted.test.ssl_helpers import ClientTLSContext, certPath
+except ImportError:
+    def _noSSL():
+        # ugh, make pyflakes happy.
+        global SSL
+        global ssl
+        SSL = ssl = None
+    _noSSL()
+
+try:
+    from twisted.protocols import tls as newTLS
+except ImportError:
+    # Assuming SSL exists, we're using old version in reactor (i.e. non-protocol)
+    newTLS = None
+
+
+class UnintelligentProtocol(basic.LineReceiver):
+    """
+    @ivar deferred: a deferred that will fire at connection lost.
+    @type deferred: L{defer.Deferred}
+
+    @cvar pretext: text sent before TLS is set up.
+    @type pretext: C{bytes}
+
+    @cvar posttext: text sent after TLS is set up.
+    @type posttext: C{bytes}
+    """
+    pretext = [
+        b"first line",
+        b"last thing before tls starts",
+        b"STARTTLS"]
+
+    posttext = [
+        b"first thing after tls started",
+        b"last thing ever"]
+
+    def __init__(self):
+        self.deferred = defer.Deferred()
+
+
+    def connectionMade(self):
+        for l in self.pretext:
+            self.sendLine(l)
+
+
+    def lineReceived(self, line):
+        if line == b"READY":
+            self.transport.startTLS(ClientTLSContext(), self.factory.client)
+            for l in self.posttext:
+                self.sendLine(l)
+            self.transport.loseConnection()
+
+
+    def connectionLost(self, reason):
+        self.deferred.callback(None)
+
+
+
+class LineCollector(basic.LineReceiver):
+    """
+    @ivar deferred: a deferred that will fire at connection lost.
+    @type deferred: L{defer.Deferred}
+
+    @ivar doTLS: whether the protocol is initiate TLS or not.
+    @type doTLS: C{bool}
+
+    @ivar fillBuffer: if set to True, it will send lots of data once
+        C{STARTTLS} is received.
+    @type fillBuffer: C{bool}
+    """
+
+    def __init__(self, doTLS, fillBuffer=False):
+        self.doTLS = doTLS
+        self.fillBuffer = fillBuffer
+        self.deferred = defer.Deferred()
+
+
+    def connectionMade(self):
+        self.factory.rawdata = b''
+        self.factory.lines = []
+
+
+    def lineReceived(self, line):
+        self.factory.lines.append(line)
+        if line == b'STARTTLS':
+            if self.fillBuffer:
+                for x in range(500):
+                    self.sendLine(b'X' * 1000)
+            self.sendLine(b'READY')
+            if self.doTLS:
+                ctx = ServerTLSContext(
+                    privateKeyFileName=certPath,
+                    certificateFileName=certPath,
+                )
+                self.transport.startTLS(ctx, self.factory.server)
+            else:
+                self.setRawMode()
+
+
+    def rawDataReceived(self, data):
+        self.factory.rawdata += data
+        self.transport.loseConnection()
+
+
+    def connectionLost(self, reason):
+        self.deferred.callback(None)
+
+
+
+class SingleLineServerProtocol(protocol.Protocol):
+    """
+    A protocol that sends a single line of data at C{connectionMade}.
+    """
+
+    def connectionMade(self):
+        self.transport.write(b"+OK <some crap>\r\n")
+        self.transport.getPeerCertificate()
+
+
+
+class RecordingClientProtocol(protocol.Protocol):
+    """
+    @ivar deferred: a deferred that will fire with first received content.
+    @type deferred: L{defer.Deferred}
+    """
+
+    def __init__(self):
+        self.deferred = defer.Deferred()
+
+
+    def connectionMade(self):
+        self.transport.getPeerCertificate()
+
+
+    def dataReceived(self, data):
+        self.deferred.callback(data)
+
+
+
+class ImmediatelyDisconnectingProtocol(protocol.Protocol):
+    """
+    A protocol that disconnect immediately on connection. It fires the
+    C{connectionDisconnected} deferred of its factory on connetion lost.
+    """
+
+    def connectionMade(self):
+        self.transport.loseConnection()
+
+
+    def connectionLost(self, reason):
+        self.factory.connectionDisconnected.callback(None)
+
+
+
+def generateCertificateObjects(organization, organizationalUnit):
+    """
+    Create a certificate for given C{organization} and C{organizationalUnit}.
+
+    @return: a tuple of (key, request, certificate) objects.
+    """
+    pkey = crypto.PKey()
+    pkey.generate_key(crypto.TYPE_RSA, 512)
+    req = crypto.X509Req()
+    subject = req.get_subject()
+    subject.O = organization
+    subject.OU = organizationalUnit
+    req.set_pubkey(pkey)
+    req.sign(pkey, "md5")
+
+    # Here comes the actual certificate
+    cert = crypto.X509()
+    cert.set_serial_number(1)
+    cert.gmtime_adj_notBefore(0)
+    cert.gmtime_adj_notAfter(60) # Testing certificates need not be long lived
+    cert.set_issuer(req.get_subject())
+    cert.set_subject(req.get_subject())
+    cert.set_pubkey(req.get_pubkey())
+    cert.sign(pkey, "md5")
+
+    return pkey, req, cert
+
+
+
+def generateCertificateFiles(basename, organization, organizationalUnit):
+    """
+    Create certificate files key, req and cert prefixed by C{basename} for
+    given C{organization} and C{organizationalUnit}.
+    """
+    pkey, req, cert = generateCertificateObjects(organization, organizationalUnit)
+
+    for ext, obj, dumpFunc in [
+        ('key', pkey, crypto.dump_privatekey),
+        ('req', req, crypto.dump_certificate_request),
+        ('cert', cert, crypto.dump_certificate)]:
+        fName = os.extsep.join((basename, ext)).encode("utf-8")
+        FilePath(fName).setContent(dumpFunc(crypto.FILETYPE_PEM, obj))
+
+
+
+class ContextGeneratingMixin:
+    """
+    Offer methods to create L{ssl.DefaultOpenSSLContextFactory} for both client
+    and server.
+
+    @ivar clientBase: prefix of client certificate files.
+    @type clientBase: C{str}
+
+    @ivar serverBase: prefix of server certificate files.
+    @type serverBase: C{str}
+
+    @ivar clientCtxFactory: a generated context factory to be used in
+        C{reactor.connectSSL}.
+    @type clientCtxFactory: L{ssl.DefaultOpenSSLContextFactory}
+
+    @ivar serverCtxFactory: a generated context factory to be used in
+        C{reactor.listenSSL}.
+    @type serverCtxFactory: L{ssl.DefaultOpenSSLContextFactory}
+    """
+
+    def makeContextFactory(self, org, orgUnit, *args, **kwArgs):
+        base = self.mktemp()
+        generateCertificateFiles(base, org, orgUnit)
+        serverCtxFactory = ssl.DefaultOpenSSLContextFactory(
+            os.extsep.join((base, 'key')),
+            os.extsep.join((base, 'cert')),
+            *args, **kwArgs)
+
+        return base, serverCtxFactory
+
+
+    def setupServerAndClient(self, clientArgs, clientKwArgs, serverArgs,
+                             serverKwArgs):
+        self.clientBase, self.clientCtxFactory = self.makeContextFactory(
+            *clientArgs, **clientKwArgs)
+        self.serverBase, self.serverCtxFactory = self.makeContextFactory(
+            *serverArgs, **serverKwArgs)
+
+
+
+if SSL is not None:
+    class ServerTLSContext(ssl.DefaultOpenSSLContextFactory):
+        """
+        A context factory with a default method set to L{SSL.TLSv1_METHOD}.
+        """
+        isClient = False
+
+        def __init__(self, *args, **kw):
+            kw['sslmethod'] = SSL.TLSv1_METHOD
+            ssl.DefaultOpenSSLContextFactory.__init__(self, *args, **kw)
+
+
+
+class StolenTCPTestCase(ProperlyCloseFilesMixin, unittest.TestCase):
+    """
+    For SSL transports, test many of the same things which are tested for
+    TCP transports.
+    """
+
+    def createServer(self, address, portNumber, factory):
+        """
+        Create an SSL server with a certificate using L{IReactorSSL.listenSSL}.
+        """
+        cert = ssl.PrivateCertificate.loadPEM(FilePath(certPath).getContent())
+        contextFactory = cert.options()
+        return reactor.listenSSL(
+            portNumber, factory, contextFactory, interface=address)
+
+
+    def connectClient(self, address, portNumber, clientCreator):
+        """
+        Create an SSL client using L{IReactorSSL.connectSSL}.
+        """
+        contextFactory = ssl.CertificateOptions()
+        return clientCreator.connectSSL(address, portNumber, contextFactory)
+
+
+    def getHandleExceptionType(self):
+        """
+        Return L{SSL.Error} as the expected error type which will be raised by
+        a write to the L{OpenSSL.SSL.Connection} object after it has been
+        closed.
+        """
+        return SSL.Error
+
+
+    def getHandleErrorCode(self):
+        """
+        Return the argument L{SSL.Error} will be constructed with for this
+        case.  This is basically just a random OpenSSL implementation detail.
+        It would be better if this test worked in a way which did not require
+        this.
+        """
+        # Windows 2000 SP 4 and Windows XP SP 2 give back WSAENOTSOCK for
+        # SSL.Connection.write for some reason.  The twisted.protocols.tls
+        # implementation of IReactorSSL doesn't suffer from this imprecation,
+        # though, since it is isolated from the Windows I/O layer (I suppose?).
+
+        # If test_properlyCloseFiles waited for the SSL handshake to complete
+        # and performed an orderly shutdown, then this would probably be a
+        # little less weird: writing to a shutdown SSL connection has a more
+        # well-defined failure mode (or at least it should).
+
+        # So figure out if twisted.protocols.tls is in use.  If it can be
+        # imported, it should be.
+        try:
+            import twisted.protocols.tls
+        except ImportError:
+            # It isn't available, so we expect WSAENOTSOCK if we're on Windows.
+            if platform.getType() == 'win32':
+                return errno.WSAENOTSOCK
+
+        # Otherwise, we expect an error about how we tried to write to a
+        # shutdown connection.  This is terribly implementation-specific.
+        return [('SSL routines', 'SSL_write', 'protocol is shutdown')]
+
+
+
+class TLSTestCase(unittest.TestCase):
+    """
+    Tests for startTLS support.
+
+    @ivar fillBuffer: forwarded to L{LineCollector.fillBuffer}
+    @type fillBuffer: C{bool}
+    """
+    fillBuffer = False
+
+    clientProto = None
+    serverProto = None
+
+
+    def tearDown(self):
+        if self.clientProto.transport is not None:
+            self.clientProto.transport.loseConnection()
+        if self.serverProto.transport is not None:
+            self.serverProto.transport.loseConnection()
+
+
+    def _runTest(self, clientProto, serverProto, clientIsServer=False):
+        """
+        Helper method to run TLS tests.
+
+        @param clientProto: protocol instance attached to the client
+            connection.
+        @param serverProto: protocol instance attached to the server
+            connection.
+        @param clientIsServer: flag indicated if client should initiate
+            startTLS instead of server.
+
+        @return: a L{defer.Deferred} that will fire when both connections are
+            lost.
+        """
+        self.clientProto = clientProto
+        cf = self.clientFactory = protocol.ClientFactory()
+        cf.protocol = lambda: clientProto
+        if clientIsServer:
+            cf.server = False
+        else:
+            cf.client = True
+
+        self.serverProto = serverProto
+        sf = self.serverFactory = protocol.ServerFactory()
+        sf.protocol = lambda: serverProto
+        if clientIsServer:
+            sf.client = False
+        else:
+            sf.server = True
+
+        port = reactor.listenTCP(0, sf, interface="127.0.0.1")
+        self.addCleanup(port.stopListening)
+
+        reactor.connectTCP('127.0.0.1', port.getHost().port, cf)
+
+        return defer.gatherResults([clientProto.deferred, serverProto.deferred])
+
+
+    def test_TLS(self):
+        """
+        Test for server and client startTLS: client should received data both
+        before and after the startTLS.
+        """
+        def check(ignore):
+            self.assertEqual(
+                self.serverFactory.lines,
+                UnintelligentProtocol.pretext + UnintelligentProtocol.posttext
+            )
+        d = self._runTest(UnintelligentProtocol(),
+                          LineCollector(True, self.fillBuffer))
+        return d.addCallback(check)
+
+
+    def test_unTLS(self):
+        """
+        Test for server startTLS not followed by a startTLS in client: the data
+        received after server startTLS should be received as raw.
+        """
+        def check(ignored):
+            self.assertEqual(
+                self.serverFactory.lines,
+                UnintelligentProtocol.pretext
+            )
+            self.failUnless(self.serverFactory.rawdata,
+                            "No encrypted bytes received")
+        d = self._runTest(UnintelligentProtocol(),
+                          LineCollector(False, self.fillBuffer))
+        return d.addCallback(check)
+
+
+    def test_backwardsTLS(self):
+        """
+        Test startTLS first initiated by client.
+        """
+        def check(ignored):
+            self.assertEqual(
+                self.clientFactory.lines,
+                UnintelligentProtocol.pretext + UnintelligentProtocol.posttext
+            )
+        d = self._runTest(LineCollector(True, self.fillBuffer),
+                          UnintelligentProtocol(), True)
+        return d.addCallback(check)
+
+
+
+class SpammyTLSTestCase(TLSTestCase):
+    """
+    Test TLS features with bytes sitting in the out buffer.
+    """
+    fillBuffer = True
+
+
+
+class BufferingTestCase(unittest.TestCase):
+    serverProto = None
+    clientProto = None
+
+
+    def tearDown(self):
+        if self.serverProto.transport is not None:
+            self.serverProto.transport.loseConnection()
+        if self.clientProto.transport is not None:
+            self.clientProto.transport.loseConnection()
+
+
+    def test_openSSLBuffering(self):
+        serverProto = self.serverProto = SingleLineServerProtocol()
+        clientProto = self.clientProto = RecordingClientProtocol()
+
+        server = protocol.ServerFactory()
+        client = self.client = protocol.ClientFactory()
+
+        server.protocol = lambda: serverProto
+        client.protocol = lambda: clientProto
+
+        sCTX = ssl.DefaultOpenSSLContextFactory(certPath, certPath)
+        cCTX = ssl.ClientContextFactory()
+
+        port = reactor.listenSSL(0, server, sCTX, interface='127.0.0.1')
+        self.addCleanup(port.stopListening)
+
+        reactor.connectSSL('127.0.0.1', port.getHost().port, client, cCTX)
+
+        return clientProto.deferred.addCallback(
+            self.assertEqual, b"+OK <some crap>\r\n")
+
+
+
+class ConnectionLostTestCase(unittest.TestCase, ContextGeneratingMixin):
+    """
+    SSL connection closing tests.
+    """
+
+    def testImmediateDisconnect(self):
+        org = "twisted.test.test_ssl"
+        self.setupServerAndClient(
+            (org, org + ", client"), {},
+            (org, org + ", server"), {})
+
+        # Set up a server, connect to it with a client, which should work since our verifiers
+        # allow anything, then disconnect.
+        serverProtocolFactory = protocol.ServerFactory()
+        serverProtocolFactory.protocol = protocol.Protocol
+        self.serverPort = serverPort = reactor.listenSSL(0,
+            serverProtocolFactory, self.serverCtxFactory)
+
+        clientProtocolFactory = protocol.ClientFactory()
+        clientProtocolFactory.protocol = ImmediatelyDisconnectingProtocol
+        clientProtocolFactory.connectionDisconnected = defer.Deferred()
+        clientConnector = reactor.connectSSL('127.0.0.1',
+            serverPort.getHost().port, clientProtocolFactory, self.clientCtxFactory)
+
+        return clientProtocolFactory.connectionDisconnected.addCallback(
+            lambda ignoredResult: self.serverPort.stopListening())
+
+
+    def test_bothSidesLoseConnection(self):
+        """
+        Both sides of SSL connection close connection; the connections should
+        close cleanly, and only after the underlying TCP connection has
+        disconnected.
+        """
+        class CloseAfterHandshake(protocol.Protocol):
+            gotData = False
+
+            def __init__(self):
+                self.done = defer.Deferred()
+
+            def connectionMade(self):
+                self.transport.write(b"a")
+
+            def dataReceived(self, data):
+                # If we got data, handshake is over:
+                self.gotData = True
+                self.transport.loseConnection()
+
+            def connectionLost(self, reason):
+                if not self.gotData:
+                    reason = RuntimeError("We never received the data!")
+                self.done.errback(reason)
+                del self.done
+
+        org = "twisted.test.test_ssl"
+        self.setupServerAndClient(
+            (org, org + ", client"), {},
+            (org, org + ", server"), {})
+
+        serverProtocol = CloseAfterHandshake()
+        serverProtocolFactory = protocol.ServerFactory()
+        serverProtocolFactory.protocol = lambda: serverProtocol
+        serverPort = reactor.listenSSL(0,
+            serverProtocolFactory, self.serverCtxFactory)
+        self.addCleanup(serverPort.stopListening)
+
+        clientProtocol = CloseAfterHandshake()
+        clientProtocolFactory = protocol.ClientFactory()
+        clientProtocolFactory.protocol = lambda: clientProtocol
+        clientConnector = reactor.connectSSL('127.0.0.1',
+            serverPort.getHost().port, clientProtocolFactory, self.clientCtxFactory)
+
+        def checkResult(failure):
+            failure.trap(ConnectionDone)
+        return defer.gatherResults(
+            [clientProtocol.done.addErrback(checkResult),
+             serverProtocol.done.addErrback(checkResult)])
+
+    if newTLS is None:
+        test_bothSidesLoseConnection.skip = "Old SSL code doesn't always close cleanly."
+
+
+    def testFailedVerify(self):
+        org = "twisted.test.test_ssl"
+        self.setupServerAndClient(
+            (org, org + ", client"), {},
+            (org, org + ", server"), {})
+
+        def verify(*a):
+            return False
+        self.clientCtxFactory.getContext().set_verify(SSL.VERIFY_PEER, verify)
+
+        serverConnLost = defer.Deferred()
+        serverProtocol = protocol.Protocol()
+        serverProtocol.connectionLost = serverConnLost.callback
+        serverProtocolFactory = protocol.ServerFactory()
+        serverProtocolFactory.protocol = lambda: serverProtocol
+        self.serverPort = serverPort = reactor.listenSSL(0,
+            serverProtocolFactory, self.serverCtxFactory)
+
+        clientConnLost = defer.Deferred()
+        clientProtocol = protocol.Protocol()
+        clientProtocol.connectionLost = clientConnLost.callback
+        clientProtocolFactory = protocol.ClientFactory()
+        clientProtocolFactory.protocol = lambda: clientProtocol
+        clientConnector = reactor.connectSSL('127.0.0.1',
+            serverPort.getHost().port, clientProtocolFactory, self.clientCtxFactory)
+
+        dl = defer.DeferredList([serverConnLost, clientConnLost], consumeErrors=True)
+        return dl.addCallback(self._cbLostConns)
+
+
+    def _cbLostConns(self, results):
+        (sSuccess, sResult), (cSuccess, cResult) = results
+
+        self.failIf(sSuccess)
+        self.failIf(cSuccess)
+
+        acceptableErrors = [SSL.Error]
+
+        # Rather than getting a verification failure on Windows, we are getting
+        # a connection failure.  Without something like sslverify proxying
+        # in-between we can't fix up the platform's errors, so let's just
+        # specifically say it is only OK in this one case to keep the tests
+        # passing.  Normally we'd like to be as strict as possible here, so
+        # we're not going to allow this to report errors incorrectly on any
+        # other platforms.
+
+        if platform.isWindows():
+            from twisted.internet.error import ConnectionLost
+            acceptableErrors.append(ConnectionLost)
+
+        sResult.trap(*acceptableErrors)
+        cResult.trap(*acceptableErrors)
+
+        return self.serverPort.stopListening()
+
+
+
+class FakeContext:
+    """
+    L{OpenSSL.SSL.Context} double which can more easily be inspected.
+    """
+    def __init__(self, method):
+        self._method = method
+        self._options = 0
+
+
+    def set_options(self, options):
+        self._options |= options
+
+
+    def use_certificate_file(self, fileName):
+        pass
+
+
+    def use_privatekey_file(self, fileName):
+        pass
+
+
+
+class DefaultOpenSSLContextFactoryTests(unittest.TestCase):
+    """
+    Tests for L{ssl.DefaultOpenSSLContextFactory}.
+    """
+    def setUp(self):
+        # pyOpenSSL Context objects aren't introspectable enough.  Pass in
+        # an alternate context factory so we can inspect what is done to it.
+        self.contextFactory = ssl.DefaultOpenSSLContextFactory(
+            certPath, certPath, _contextFactory=FakeContext)
+        self.context = self.contextFactory.getContext()
+
+
+    def test_method(self):
+        """
+        L{ssl.DefaultOpenSSLContextFactory.getContext} returns an SSL context
+        which can use SSLv3 or TLSv1 but not SSLv2.
+        """
+        # SSLv23_METHOD allows SSLv2, SSLv3, or TLSv1
+        self.assertEqual(self.context._method, SSL.SSLv23_METHOD)
+
+        # And OP_NO_SSLv2 disables the SSLv2 support.
+        self.assertTrue(self.context._options & SSL.OP_NO_SSLv2)
+
+        # Make sure SSLv3 and TLSv1 aren't disabled though.
+        self.assertFalse(self.context._options & SSL.OP_NO_SSLv3)
+        self.assertFalse(self.context._options & SSL.OP_NO_TLSv1)
+
+
+    def test_missingCertificateFile(self):
+        """
+        Instantiating L{ssl.DefaultOpenSSLContextFactory} with a certificate
+        filename which does not identify an existing file results in the
+        initializer raising L{OpenSSL.SSL.Error}.
+        """
+        self.assertRaises(
+            SSL.Error,
+            ssl.DefaultOpenSSLContextFactory, certPath, self.mktemp())
+
+
+    def test_missingPrivateKeyFile(self):
+        """
+        Instantiating L{ssl.DefaultOpenSSLContextFactory} with a private key
+        filename which does not identify an existing file results in the
+        initializer raising L{OpenSSL.SSL.Error}.
+        """
+        self.assertRaises(
+            SSL.Error,
+            ssl.DefaultOpenSSLContextFactory, self.mktemp(), certPath)
+
+
+
+class ClientContextFactoryTests(unittest.TestCase):
+    """
+    Tests for L{ssl.ClientContextFactory}.
+    """
+    def setUp(self):
+        self.contextFactory = ssl.ClientContextFactory()
+        self.contextFactory._contextFactory = FakeContext
+        self.context = self.contextFactory.getContext()
+
+
+    def test_method(self):
+        """
+        L{ssl.ClientContextFactory.getContext} returns a context which can use
+        SSLv3 or TLSv1 but not SSLv2.
+        """
+        self.assertEqual(self.context._method, SSL.SSLv23_METHOD)
+        self.assertTrue(self.context._options & SSL.OP_NO_SSLv2)
+        self.assertFalse(self.context._options & SSL.OP_NO_SSLv3)
+        self.assertFalse(self.context._options & SSL.OP_NO_TLSv1)
+
+
+
+if interfaces.IReactorSSL(reactor, None) is None:
+    for tCase in [StolenTCPTestCase, TLSTestCase, SpammyTLSTestCase,
+                  BufferingTestCase, ConnectionLostTestCase,
+                  DefaultOpenSSLContextFactoryTests,
+                  ClientContextFactoryTests]:
+        tCase.skip = "Reactor does not support SSL, cannot run SSL tests"
+
diff --git a/ThirdParty/Twisted/twisted/test/test_sslverify.py b/ThirdParty/Twisted/twisted/test/test_sslverify.py
new file mode 100644
index 0000000..65bef5c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_sslverify.py
@@ -0,0 +1,566 @@
+# Copyright 2005 Divmod, Inc.  See LICENSE file for details
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet._sslverify}.
+"""
+
+from __future__ import division, absolute_import
+
+import itertools
+
+try:
+    from OpenSSL import SSL
+    from OpenSSL.crypto import PKey, X509, X509Req
+    from OpenSSL.crypto import TYPE_RSA
+    from twisted.internet import _sslverify as sslverify
+except ImportError:
+    pass
+
+from twisted.python.compat import nativeString
+from twisted.trial import unittest
+from twisted.internet import protocol, defer, reactor
+
+from twisted.internet.error import CertificateError, ConnectionLost
+from twisted.internet import interfaces
+
+
+# A couple of static PEM-format certificates to be used by various tests.
+A_HOST_CERTIFICATE_PEM = """
+-----BEGIN CERTIFICATE-----
+        MIIC2jCCAkMCAjA5MA0GCSqGSIb3DQEBBAUAMIG0MQswCQYDVQQGEwJVUzEiMCAG
+        A1UEAxMZZXhhbXBsZS50d2lzdGVkbWF0cml4LmNvbTEPMA0GA1UEBxMGQm9zdG9u
+        MRwwGgYDVQQKExNUd2lzdGVkIE1hdHJpeCBMYWJzMRYwFAYDVQQIEw1NYXNzYWNo
+        dXNldHRzMScwJQYJKoZIhvcNAQkBFhhub2JvZHlAdHdpc3RlZG1hdHJpeC5jb20x
+        ETAPBgNVBAsTCFNlY3VyaXR5MB4XDTA2MDgxNjAxMDEwOFoXDTA3MDgxNjAxMDEw
+        OFowgbQxCzAJBgNVBAYTAlVTMSIwIAYDVQQDExlleGFtcGxlLnR3aXN0ZWRtYXRy
+        aXguY29tMQ8wDQYDVQQHEwZCb3N0b24xHDAaBgNVBAoTE1R3aXN0ZWQgTWF0cml4
+        IExhYnMxFjAUBgNVBAgTDU1hc3NhY2h1c2V0dHMxJzAlBgkqhkiG9w0BCQEWGG5v
+        Ym9keUB0d2lzdGVkbWF0cml4LmNvbTERMA8GA1UECxMIU2VjdXJpdHkwgZ8wDQYJ
+        KoZIhvcNAQEBBQADgY0AMIGJAoGBAMzH8CDF/U91y/bdbdbJKnLgnyvQ9Ig9ZNZp
+        8hpsu4huil60zF03+Lexg2l1FIfURScjBuaJMR6HiMYTMjhzLuByRZ17KW4wYkGi
+        KXstz03VIKy4Tjc+v4aXFI4XdRw10gGMGQlGGscXF/RSoN84VoDKBfOMWdXeConJ
+        VyC4w3iJAgMBAAEwDQYJKoZIhvcNAQEEBQADgYEAviMT4lBoxOgQy32LIgZ4lVCj
+        JNOiZYg8GMQ6y0ugp86X80UjOvkGtNf/R7YgED/giKRN/q/XJiLJDEhzknkocwmO
+        S+4b2XpiaZYxRyKWwL221O7CGmtWYyZl2+92YYmmCiNzWQPfP6BOMlfax0AGLHls
+        fXzCWdG0O/3Lk2SRM0I=
+-----END CERTIFICATE-----
+"""
+
+A_PEER_CERTIFICATE_PEM = """
+-----BEGIN CERTIFICATE-----
+        MIIC3jCCAkcCAjA6MA0GCSqGSIb3DQEBBAUAMIG2MQswCQYDVQQGEwJVUzEiMCAG
+        A1UEAxMZZXhhbXBsZS50d2lzdGVkbWF0cml4LmNvbTEPMA0GA1UEBxMGQm9zdG9u
+        MRwwGgYDVQQKExNUd2lzdGVkIE1hdHJpeCBMYWJzMRYwFAYDVQQIEw1NYXNzYWNo
+        dXNldHRzMSkwJwYJKoZIhvcNAQkBFhpzb21lYm9keUB0d2lzdGVkbWF0cml4LmNv
+        bTERMA8GA1UECxMIU2VjdXJpdHkwHhcNMDYwODE2MDEwMTU2WhcNMDcwODE2MDEw
+        MTU2WjCBtjELMAkGA1UEBhMCVVMxIjAgBgNVBAMTGWV4YW1wbGUudHdpc3RlZG1h
+        dHJpeC5jb20xDzANBgNVBAcTBkJvc3RvbjEcMBoGA1UEChMTVHdpc3RlZCBNYXRy
+        aXggTGFiczEWMBQGA1UECBMNTWFzc2FjaHVzZXR0czEpMCcGCSqGSIb3DQEJARYa
+        c29tZWJvZHlAdHdpc3RlZG1hdHJpeC5jb20xETAPBgNVBAsTCFNlY3VyaXR5MIGf
+        MA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCnm+WBlgFNbMlHehib9ePGGDXF+Nz4
+        CjGuUmVBaXCRCiVjg3kSDecwqfb0fqTksBZ+oQ1UBjMcSh7OcvFXJZnUesBikGWE
+        JE4V8Bjh+RmbJ1ZAlUPZ40bAkww0OpyIRAGMvKG+4yLFTO4WDxKmfDcrOb6ID8WJ
+        e1u+i3XGkIf/5QIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAD4Oukm3YYkhedUepBEA
+        vvXIQhVDqL7mk6OqYdXmNj6R7ZMC8WWvGZxrzDI1bZuB+4aIxxd1FXC3UOHiR/xg
+        i9cDl1y8P/qRp4aEBNF6rI0D4AxTbfnHQx4ERDAOShJdYZs/2zifPJ6va6YvrEyr
+        yqDtGhklsWW3ZwBzEh5VEOUp
+-----END CERTIFICATE-----
+"""
+
+
+
+def counter(counter=itertools.count()):
+    """
+    Each time we're called, return the next integer in the natural numbers.
+    """
+    return next(counter)
+
+
+
+def makeCertificate(**kw):
+    keypair = PKey()
+    keypair.generate_key(TYPE_RSA, 512)
+
+    certificate = X509()
+    certificate.gmtime_adj_notBefore(0)
+    certificate.gmtime_adj_notAfter(60 * 60 * 24 * 365) # One year
+    for xname in certificate.get_issuer(), certificate.get_subject():
+        for (k, v) in kw.items():
+            setattr(xname, k, nativeString(v))
+
+    certificate.set_serial_number(counter())
+    certificate.set_pubkey(keypair)
+    certificate.sign(keypair, "md5")
+
+    return keypair, certificate
+
+
+
+class DataCallbackProtocol(protocol.Protocol):
+    def dataReceived(self, data):
+        d, self.factory.onData = self.factory.onData, None
+        if d is not None:
+            d.callback(data)
+
+    def connectionLost(self, reason):
+        d, self.factory.onLost = self.factory.onLost, None
+        if d is not None:
+            d.errback(reason)
+
+class WritingProtocol(protocol.Protocol):
+    byte = b'x'
+    def connectionMade(self):
+        self.transport.write(self.byte)
+
+    def connectionLost(self, reason):
+        self.factory.onLost.errback(reason)
+
+
+class OpenSSLOptions(unittest.TestCase):
+    serverPort = clientConn = None
+    onServerLost = onClientLost = None
+
+    sKey = None
+    sCert = None
+    cKey = None
+    cCert = None
+
+    def setUp(self):
+        """
+        Create class variables of client and server certificates.
+        """
+        self.sKey, self.sCert = makeCertificate(
+            O=b"Server Test Certificate",
+            CN=b"server")
+        self.cKey, self.cCert = makeCertificate(
+            O=b"Client Test Certificate",
+            CN=b"client")
+
+    def tearDown(self):
+        if self.serverPort is not None:
+            self.serverPort.stopListening()
+        if self.clientConn is not None:
+            self.clientConn.disconnect()
+
+        L = []
+        if self.onServerLost is not None:
+            L.append(self.onServerLost)
+        if self.onClientLost is not None:
+            L.append(self.onClientLost)
+
+        return defer.DeferredList(L, consumeErrors=True)
+
+    def loopback(self, serverCertOpts, clientCertOpts,
+                 onServerLost=None, onClientLost=None, onData=None):
+        if onServerLost is None:
+            self.onServerLost = onServerLost = defer.Deferred()
+        if onClientLost is None:
+            self.onClientLost = onClientLost = defer.Deferred()
+        if onData is None:
+            onData = defer.Deferred()
+
+        serverFactory = protocol.ServerFactory()
+        serverFactory.protocol = DataCallbackProtocol
+        serverFactory.onLost = onServerLost
+        serverFactory.onData = onData
+
+        clientFactory = protocol.ClientFactory()
+        clientFactory.protocol = WritingProtocol
+        clientFactory.onLost = onClientLost
+
+        self.serverPort = reactor.listenSSL(0, serverFactory, serverCertOpts)
+        self.clientConn = reactor.connectSSL('127.0.0.1',
+                self.serverPort.getHost().port, clientFactory, clientCertOpts)
+
+    def test_abbreviatingDistinguishedNames(self):
+        """
+        Check that abbreviations used in certificates correctly map to
+        complete names.
+        """
+        self.assertEqual(
+                sslverify.DN(CN=b'a', OU=b'hello'),
+                sslverify.DistinguishedName(commonName=b'a',
+                                            organizationalUnitName=b'hello'))
+        self.assertNotEquals(
+                sslverify.DN(CN=b'a', OU=b'hello'),
+                sslverify.DN(CN=b'a', OU=b'hello', emailAddress=b'xxx'))
+        dn = sslverify.DN(CN=b'abcdefg')
+        self.assertRaises(AttributeError, setattr, dn, 'Cn', b'x')
+        self.assertEqual(dn.CN, dn.commonName)
+        dn.CN = b'bcdefga'
+        self.assertEqual(dn.CN, dn.commonName)
+
+
+    def testInspectDistinguishedName(self):
+        n = sslverify.DN(commonName=b'common name',
+                         organizationName=b'organization name',
+                         organizationalUnitName=b'organizational unit name',
+                         localityName=b'locality name',
+                         stateOrProvinceName=b'state or province name',
+                         countryName=b'country name',
+                         emailAddress=b'email address')
+        s = n.inspect()
+        for k in [
+            'common name',
+            'organization name',
+            'organizational unit name',
+            'locality name',
+            'state or province name',
+            'country name',
+            'email address']:
+            self.assertIn(k, s, "%r was not in inspect output." % (k,))
+            self.assertIn(k.title(), s, "%r was not in inspect output." % (k,))
+
+
+    def testInspectDistinguishedNameWithoutAllFields(self):
+        n = sslverify.DN(localityName=b'locality name')
+        s = n.inspect()
+        for k in [
+            'common name',
+            'organization name',
+            'organizational unit name',
+            'state or province name',
+            'country name',
+            'email address']:
+            self.assertNotIn(k, s, "%r was in inspect output." % (k,))
+            self.assertNotIn(k.title(), s, "%r was in inspect output." % (k,))
+        self.assertIn('locality name', s)
+        self.assertIn('Locality Name', s)
+
+
+    def test_inspectCertificate(self):
+        """
+        Test that the C{inspect} method of L{sslverify.Certificate} returns
+        a human-readable string containing some basic information about the
+        certificate.
+        """
+        c = sslverify.Certificate.loadPEM(A_HOST_CERTIFICATE_PEM)
+        self.assertEqual(
+            c.inspect().split('\n'),
+            ["Certificate For Subject:",
+             "               Common Name: example.twistedmatrix.com",
+             "              Country Name: US",
+             "             Email Address: nobody at twistedmatrix.com",
+             "             Locality Name: Boston",
+             "         Organization Name: Twisted Matrix Labs",
+             "  Organizational Unit Name: Security",
+             "    State Or Province Name: Massachusetts",
+             "",
+             "Issuer:",
+             "               Common Name: example.twistedmatrix.com",
+             "              Country Name: US",
+             "             Email Address: nobody at twistedmatrix.com",
+             "             Locality Name: Boston",
+             "         Organization Name: Twisted Matrix Labs",
+             "  Organizational Unit Name: Security",
+             "    State Or Province Name: Massachusetts",
+             "",
+             "Serial Number: 12345",
+             "Digest: C4:96:11:00:30:C3:EC:EE:A3:55:AA:ED:8C:84:85:18",
+             "Public Key with Hash: ff33994c80812aa95a79cdb85362d054"])
+
+
+    def test_certificateOptionsSerialization(self):
+        """
+        Test that __setstate__(__getstate__()) round-trips properly.
+        """
+        firstOpts = sslverify.OpenSSLCertificateOptions(
+            privateKey=self.sKey,
+            certificate=self.sCert,
+            method=SSL.SSLv3_METHOD,
+            verify=True,
+            caCerts=[self.sCert],
+            verifyDepth=2,
+            requireCertificate=False,
+            verifyOnce=False,
+            enableSingleUseKeys=False,
+            enableSessions=False,
+            fixBrokenPeers=True,
+            enableSessionTickets=True)
+        context = firstOpts.getContext()
+        self.assertIdentical(context, firstOpts._context)
+        self.assertNotIdentical(context, None)
+        state = firstOpts.__getstate__()
+        self.assertNotIn("_context", state)
+
+        opts = sslverify.OpenSSLCertificateOptions()
+        opts.__setstate__(state)
+        self.assertEqual(opts.privateKey, self.sKey)
+        self.assertEqual(opts.certificate, self.sCert)
+        self.assertEqual(opts.method, SSL.SSLv3_METHOD)
+        self.assertEqual(opts.verify, True)
+        self.assertEqual(opts.caCerts, [self.sCert])
+        self.assertEqual(opts.verifyDepth, 2)
+        self.assertEqual(opts.requireCertificate, False)
+        self.assertEqual(opts.verifyOnce, False)
+        self.assertEqual(opts.enableSingleUseKeys, False)
+        self.assertEqual(opts.enableSessions, False)
+        self.assertEqual(opts.fixBrokenPeers, True)
+        self.assertEqual(opts.enableSessionTickets, True)
+
+
+    def test_certificateOptionsSessionTickets(self):
+        """
+        Enabling session tickets should not set the OP_NO_TICKET option.
+        """
+        opts = sslverify.OpenSSLCertificateOptions(enableSessionTickets=True)
+        ctx = opts.getContext()
+        self.assertEqual(0, ctx.set_options(0) & 0x00004000)
+
+
+    def test_certificateOptionsSessionTicketsDisabled(self):
+        """
+        Enabling session tickets should set the OP_NO_TICKET option.
+        """
+        opts = sslverify.OpenSSLCertificateOptions(enableSessionTickets=False)
+        ctx = opts.getContext()
+        self.assertEqual(0x00004000, ctx.set_options(0) & 0x00004000)
+
+
+    def test_allowedAnonymousClientConnection(self):
+        """
+        Check that anonymous connections are allowed when certificates aren't
+        required on the server.
+        """
+        onData = defer.Deferred()
+        self.loopback(sslverify.OpenSSLCertificateOptions(privateKey=self.sKey,
+                            certificate=self.sCert, requireCertificate=False),
+                      sslverify.OpenSSLCertificateOptions(
+                          requireCertificate=False),
+                      onData=onData)
+
+        return onData.addCallback(
+            lambda result: self.assertEqual(result, WritingProtocol.byte))
+
+
+    def test_refusedAnonymousClientConnection(self):
+        """
+        Check that anonymous connections are refused when certificates are
+        required on the server.
+        """
+        onServerLost = defer.Deferred()
+        onClientLost = defer.Deferred()
+        self.loopback(sslverify.OpenSSLCertificateOptions(privateKey=self.sKey,
+                            certificate=self.sCert, verify=True,
+                            caCerts=[self.sCert], requireCertificate=True),
+                      sslverify.OpenSSLCertificateOptions(
+                          requireCertificate=False),
+                      onServerLost=onServerLost,
+                      onClientLost=onClientLost)
+
+        d = defer.DeferredList([onClientLost, onServerLost],
+                               consumeErrors=True)
+
+
+        def afterLost(result):
+            ((cSuccess, cResult), (sSuccess, sResult)) = result
+            self.failIf(cSuccess)
+            self.failIf(sSuccess)
+            # Win32 fails to report the SSL Error, and report a connection lost
+            # instead: there is a race condition so that's not totally
+            # surprising (see ticket #2877 in the tracker)
+            self.assertIsInstance(cResult.value, (SSL.Error, ConnectionLost))
+            self.assertIsInstance(sResult.value, SSL.Error)
+
+        return d.addCallback(afterLost)
+
+    def test_failedCertificateVerification(self):
+        """
+        Check that connecting with a certificate not accepted by the server CA
+        fails.
+        """
+        onServerLost = defer.Deferred()
+        onClientLost = defer.Deferred()
+        self.loopback(sslverify.OpenSSLCertificateOptions(privateKey=self.sKey,
+                            certificate=self.sCert, verify=False,
+                            requireCertificate=False),
+                      sslverify.OpenSSLCertificateOptions(verify=True,
+                            requireCertificate=False, caCerts=[self.cCert]),
+                      onServerLost=onServerLost,
+                      onClientLost=onClientLost)
+
+        d = defer.DeferredList([onClientLost, onServerLost],
+                               consumeErrors=True)
+        def afterLost(result):
+            ((cSuccess, cResult), (sSuccess, sResult)) = result
+            self.failIf(cSuccess)
+            self.failIf(sSuccess)
+
+        return d.addCallback(afterLost)
+
+    def test_successfulCertificateVerification(self):
+        """
+        Test a successful connection with client certificate validation on
+        server side.
+        """
+        onData = defer.Deferred()
+        self.loopback(sslverify.OpenSSLCertificateOptions(privateKey=self.sKey,
+                            certificate=self.sCert, verify=False,
+                            requireCertificate=False),
+                      sslverify.OpenSSLCertificateOptions(verify=True,
+                            requireCertificate=True, caCerts=[self.sCert]),
+                      onData=onData)
+
+        return onData.addCallback(
+                lambda result: self.assertEqual(result, WritingProtocol.byte))
+
+    def test_successfulSymmetricSelfSignedCertificateVerification(self):
+        """
+        Test a successful connection with validation on both server and client
+        sides.
+        """
+        onData = defer.Deferred()
+        self.loopback(sslverify.OpenSSLCertificateOptions(privateKey=self.sKey,
+                            certificate=self.sCert, verify=True,
+                            requireCertificate=True, caCerts=[self.cCert]),
+                      sslverify.OpenSSLCertificateOptions(privateKey=self.cKey,
+                            certificate=self.cCert, verify=True,
+                            requireCertificate=True, caCerts=[self.sCert]),
+                      onData=onData)
+
+        return onData.addCallback(
+                lambda result: self.assertEqual(result, WritingProtocol.byte))
+
+    def test_verification(self):
+        """
+        Check certificates verification building custom certificates data.
+        """
+        clientDN = sslverify.DistinguishedName(commonName='client')
+        clientKey = sslverify.KeyPair.generate()
+        clientCertReq = clientKey.certificateRequest(clientDN)
+
+        serverDN = sslverify.DistinguishedName(commonName='server')
+        serverKey = sslverify.KeyPair.generate()
+        serverCertReq = serverKey.certificateRequest(serverDN)
+
+        clientSelfCertReq = clientKey.certificateRequest(clientDN)
+        clientSelfCertData = clientKey.signCertificateRequest(
+                clientDN, clientSelfCertReq, lambda dn: True, 132)
+        clientSelfCert = clientKey.newCertificate(clientSelfCertData)
+
+        serverSelfCertReq = serverKey.certificateRequest(serverDN)
+        serverSelfCertData = serverKey.signCertificateRequest(
+                serverDN, serverSelfCertReq, lambda dn: True, 516)
+        serverSelfCert = serverKey.newCertificate(serverSelfCertData)
+
+        clientCertData = serverKey.signCertificateRequest(
+                serverDN, clientCertReq, lambda dn: True, 7)
+        clientCert = clientKey.newCertificate(clientCertData)
+
+        serverCertData = clientKey.signCertificateRequest(
+                clientDN, serverCertReq, lambda dn: True, 42)
+        serverCert = serverKey.newCertificate(serverCertData)
+
+        onData = defer.Deferred()
+
+        serverOpts = serverCert.options(serverSelfCert)
+        clientOpts = clientCert.options(clientSelfCert)
+
+        self.loopback(serverOpts,
+                      clientOpts,
+                      onData=onData)
+
+        return onData.addCallback(
+                lambda result: self.assertEqual(result, WritingProtocol.byte))
+
+
+
+if interfaces.IReactorSSL(reactor, None) is None:
+    OpenSSLOptions.skip = "Reactor does not support SSL, cannot run SSL tests"
+
+
+
+class _NotSSLTransport:
+    def getHandle(self):
+        return self
+
+class _MaybeSSLTransport:
+    def getHandle(self):
+        return self
+
+    def get_peer_certificate(self):
+        return None
+
+    def get_host_certificate(self):
+        return None
+
+
+class _ActualSSLTransport:
+    def getHandle(self):
+        return self
+
+    def get_host_certificate(self):
+        return sslverify.Certificate.loadPEM(A_HOST_CERTIFICATE_PEM).original
+
+    def get_peer_certificate(self):
+        return sslverify.Certificate.loadPEM(A_PEER_CERTIFICATE_PEM).original
+
+
+class Constructors(unittest.TestCase):
+    def test_peerFromNonSSLTransport(self):
+        """
+        Verify that peerFromTransport raises an exception if the transport
+        passed is not actually an SSL transport.
+        """
+        x = self.assertRaises(CertificateError,
+                              sslverify.Certificate.peerFromTransport,
+                              _NotSSLTransport())
+        self.failUnless(str(x).startswith("non-TLS"))
+
+    def test_peerFromBlankSSLTransport(self):
+        """
+        Verify that peerFromTransport raises an exception if the transport
+        passed is an SSL transport, but doesn't have a peer certificate.
+        """
+        x = self.assertRaises(CertificateError,
+                              sslverify.Certificate.peerFromTransport,
+                              _MaybeSSLTransport())
+        self.failUnless(str(x).startswith("TLS"))
+
+    def test_hostFromNonSSLTransport(self):
+        """
+        Verify that hostFromTransport raises an exception if the transport
+        passed is not actually an SSL transport.
+        """
+        x = self.assertRaises(CertificateError,
+                              sslverify.Certificate.hostFromTransport,
+                              _NotSSLTransport())
+        self.failUnless(str(x).startswith("non-TLS"))
+
+    def test_hostFromBlankSSLTransport(self):
+        """
+        Verify that hostFromTransport raises an exception if the transport
+        passed is an SSL transport, but doesn't have a host certificate.
+        """
+        x = self.assertRaises(CertificateError,
+                              sslverify.Certificate.hostFromTransport,
+                              _MaybeSSLTransport())
+        self.failUnless(str(x).startswith("TLS"))
+
+
+    def test_hostFromSSLTransport(self):
+        """
+        Verify that hostFromTransport successfully creates the correct
+        certificate if passed a valid SSL transport.
+        """
+        self.assertEqual(
+            sslverify.Certificate.hostFromTransport(
+                _ActualSSLTransport()).serialNumber(),
+            12345)
+
+    def test_peerFromSSLTransport(self):
+        """
+        Verify that peerFromTransport successfully creates the correct
+        certificate if passed a valid SSL transport.
+        """
+        self.assertEqual(
+            sslverify.Certificate.peerFromTransport(
+                _ActualSSLTransport()).serialNumber(),
+            12346)
+
+
+
+if interfaces.IReactorSSL(reactor, None) is None:
+    Constructors.skip = "Reactor does not support SSL, cannot run SSL tests"
diff --git a/ThirdParty/Twisted/twisted/test/test_stateful.py b/ThirdParty/Twisted/twisted/test/test_stateful.py
new file mode 100644
index 0000000..ce72474
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_stateful.py
@@ -0,0 +1,81 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Test cases for twisted.protocols.stateful
+"""
+
+from twisted.trial.unittest import TestCase
+from twisted.protocols.test import test_basic
+from twisted.protocols.stateful import StatefulProtocol
+
+from struct import pack, unpack, calcsize
+
+
+class MyInt32StringReceiver(StatefulProtocol):
+    """
+    A stateful Int32StringReceiver.
+    """
+    MAX_LENGTH = 99999
+    structFormat = "!I"
+    prefixLength = calcsize(structFormat)
+
+    def getInitialState(self):
+        return self._getHeader, 4
+
+    def lengthLimitExceeded(self, length):
+        self.transport.loseConnection()
+
+    def _getHeader(self, msg):
+        length, = unpack("!i", msg)
+        if length > self.MAX_LENGTH:
+            self.lengthLimitExceeded(length)
+            return
+        return self._getString, length
+
+    def _getString(self, msg):
+        self.stringReceived(msg)
+        return self._getHeader, 4
+
+    def stringReceived(self, msg):
+        """
+        Override this.
+        """
+        raise NotImplementedError
+
+    def sendString(self, data):
+        """
+        Send an int32-prefixed string to the other end of the connection.
+        """
+        self.transport.write(pack(self.structFormat, len(data)) + data)
+
+
+class TestInt32(MyInt32StringReceiver):
+    def connectionMade(self):
+        self.received = []
+
+    def stringReceived(self, s):
+        self.received.append(s)
+
+    MAX_LENGTH = 50
+    closed = 0
+
+    def connectionLost(self, reason):
+        self.closed = 1
+
+
+class Int32TestCase(TestCase, test_basic.IntNTestCaseMixin):
+    protocol = TestInt32
+    strings = ["a", "b" * 16]
+    illegalStrings = ["\x10\x00\x00\x00aaaaaa"]
+    partialStrings = ["\x00\x00\x00", "hello there", ""]
+
+    def test_bigReceive(self):
+        r = self.getProtocol()
+        big = ""
+        for s in self.strings * 4:
+            big += pack("!i", len(s)) + s
+        r.dataReceived(big)
+        self.assertEqual(r.received, self.strings * 4)
+
diff --git a/ThirdParty/Twisted/twisted/test/test_stdio.py b/ThirdParty/Twisted/twisted/test/test_stdio.py
new file mode 100644
index 0000000..3da754c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_stdio.py
@@ -0,0 +1,371 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet.stdio}.
+"""
+
+import os, sys, itertools
+
+from twisted.trial import unittest
+from twisted.python import filepath, log
+from twisted.python.runtime import platform
+from twisted.internet import error, defer, protocol, stdio, reactor
+from twisted.test.test_tcp import ConnectionLostNotifyingProtocol
+
+
+# A short string which is intended to appear here and nowhere else,
+# particularly not in any random garbage output CPython unavoidable
+# generates (such as in warning text and so forth).  This is searched
+# for in the output from stdio_test_lastwrite.py and if it is found at
+# the end, the functionality works.
+UNIQUE_LAST_WRITE_STRING = 'xyz123abc Twisted is great!'
+
+skipWindowsNopywin32 = None
+if platform.isWindows():
+    try:
+        import win32process
+    except ImportError:
+        skipWindowsNopywin32 = ("On windows, spawnProcess is not available "
+                                "in the absence of win32process.")
+
+
+class StandardIOTestProcessProtocol(protocol.ProcessProtocol):
+    """
+    Test helper for collecting output from a child process and notifying
+    something when it exits.
+
+    @ivar onConnection: A L{defer.Deferred} which will be called back with
+    C{None} when the connection to the child process is established.
+
+    @ivar onCompletion: A L{defer.Deferred} which will be errbacked with the
+    failure associated with the child process exiting when it exits.
+
+    @ivar onDataReceived: A L{defer.Deferred} which will be called back with
+    this instance whenever C{childDataReceived} is called, or C{None} to
+    suppress these callbacks.
+
+    @ivar data: A C{dict} mapping file descriptors to strings containing all
+    bytes received from the child process on each file descriptor.
+    """
+    onDataReceived = None
+
+    def __init__(self):
+        self.onConnection = defer.Deferred()
+        self.onCompletion = defer.Deferred()
+        self.data = {}
+
+
+    def connectionMade(self):
+        self.onConnection.callback(None)
+
+
+    def childDataReceived(self, name, bytes):
+        """
+        Record all bytes received from the child process in the C{data}
+        dictionary.  Fire C{onDataReceived} if it is not C{None}.
+        """
+        self.data[name] = self.data.get(name, '') + bytes
+        if self.onDataReceived is not None:
+            d, self.onDataReceived = self.onDataReceived, None
+            d.callback(self)
+
+
+    def processEnded(self, reason):
+        self.onCompletion.callback(reason)
+
+
+
+class StandardInputOutputTestCase(unittest.TestCase):
+
+    skip = skipWindowsNopywin32
+
+    def _spawnProcess(self, proto, sibling, *args, **kw):
+        """
+        Launch a child Python process and communicate with it using the
+        given ProcessProtocol.
+
+        @param proto: A L{ProcessProtocol} instance which will be connected
+        to the child process.
+
+        @param sibling: The basename of a file containing the Python program
+        to run in the child process.
+
+        @param *args: strings which will be passed to the child process on
+        the command line as C{argv[2:]}.
+
+        @param **kw: additional arguments to pass to L{reactor.spawnProcess}.
+
+        @return: The L{IProcessTransport} provider for the spawned process.
+        """
+        import twisted
+        subenv = dict(os.environ)
+        subenv['PYTHONPATH'] = os.pathsep.join(
+            [os.path.abspath(
+                    os.path.dirname(os.path.dirname(twisted.__file__))),
+             subenv.get('PYTHONPATH', '')
+             ])
+        args = [sys.executable,
+             filepath.FilePath(__file__).sibling(sibling).path,
+             reactor.__class__.__module__] + list(args)
+        return reactor.spawnProcess(
+            proto,
+            sys.executable,
+            args,
+            env=subenv,
+            **kw)
+
+
+    def _requireFailure(self, d, callback):
+        def cb(result):
+            self.fail("Process terminated with non-Failure: %r" % (result,))
+        def eb(err):
+            return callback(err)
+        return d.addCallbacks(cb, eb)
+
+
+    def test_loseConnection(self):
+        """
+        Verify that a protocol connected to L{StandardIO} can disconnect
+        itself using C{transport.loseConnection}.
+        """
+        errorLogFile = self.mktemp()
+        log.msg("Child process logging to " + errorLogFile)
+        p = StandardIOTestProcessProtocol()
+        d = p.onCompletion
+        self._spawnProcess(p, 'stdio_test_loseconn.py', errorLogFile)
+
+        def processEnded(reason):
+            # Copy the child's log to ours so it's more visible.
+            for line in file(errorLogFile):
+                log.msg("Child logged: " + line.rstrip())
+
+            self.failIfIn(1, p.data)
+            reason.trap(error.ProcessDone)
+        return self._requireFailure(d, processEnded)
+
+
+    def test_readConnectionLost(self):
+        """
+        When stdin is closed and the protocol connected to it implements
+        L{IHalfCloseableProtocol}, the protocol's C{readConnectionLost} method
+        is called.
+        """
+        errorLogFile = self.mktemp()
+        log.msg("Child process logging to " + errorLogFile)
+        p = StandardIOTestProcessProtocol()
+        p.onDataReceived = defer.Deferred()
+
+        def cbBytes(ignored):
+            d = p.onCompletion
+            p.transport.closeStdin()
+            return d
+        p.onDataReceived.addCallback(cbBytes)
+
+        def processEnded(reason):
+            reason.trap(error.ProcessDone)
+        d = self._requireFailure(p.onDataReceived, processEnded)
+
+        self._spawnProcess(
+            p, 'stdio_test_halfclose.py', errorLogFile)
+        return d
+
+
+    def test_lastWriteReceived(self):
+        """
+        Verify that a write made directly to stdout using L{os.write}
+        after StandardIO has finished is reliably received by the
+        process reading that stdout.
+        """
+        p = StandardIOTestProcessProtocol()
+
+        # Note: the OS X bug which prompted the addition of this test
+        # is an apparent race condition involving non-blocking PTYs.
+        # Delaying the parent process significantly increases the
+        # likelihood of the race going the wrong way.  If you need to
+        # fiddle with this code at all, uncommenting the next line
+        # will likely make your life much easier.  It is commented out
+        # because it makes the test quite slow.
+
+        # p.onConnection.addCallback(lambda ign: __import__('time').sleep(5))
+
+        try:
+            self._spawnProcess(
+                p, 'stdio_test_lastwrite.py', UNIQUE_LAST_WRITE_STRING,
+                usePTY=True)
+        except ValueError, e:
+            # Some platforms don't work with usePTY=True
+            raise unittest.SkipTest(str(e))
+
+        def processEnded(reason):
+            """
+            Asserts that the parent received the bytes written by the child
+            immediately after the child starts.
+            """
+            self.assertTrue(
+                p.data[1].endswith(UNIQUE_LAST_WRITE_STRING),
+                "Received %r from child, did not find expected bytes." % (
+                    p.data,))
+            reason.trap(error.ProcessDone)
+        return self._requireFailure(p.onCompletion, processEnded)
+
+
+    def test_hostAndPeer(self):
+        """
+        Verify that the transport of a protocol connected to L{StandardIO}
+        has C{getHost} and C{getPeer} methods.
+        """
+        p = StandardIOTestProcessProtocol()
+        d = p.onCompletion
+        self._spawnProcess(p, 'stdio_test_hostpeer.py')
+
+        def processEnded(reason):
+            host, peer = p.data[1].splitlines()
+            self.failUnless(host)
+            self.failUnless(peer)
+            reason.trap(error.ProcessDone)
+        return self._requireFailure(d, processEnded)
+
+
+    def test_write(self):
+        """
+        Verify that the C{write} method of the transport of a protocol
+        connected to L{StandardIO} sends bytes to standard out.
+        """
+        p = StandardIOTestProcessProtocol()
+        d = p.onCompletion
+
+        self._spawnProcess(p, 'stdio_test_write.py')
+
+        def processEnded(reason):
+            self.assertEqual(p.data[1], 'ok!')
+            reason.trap(error.ProcessDone)
+        return self._requireFailure(d, processEnded)
+
+
+    def test_writeSequence(self):
+        """
+        Verify that the C{writeSequence} method of the transport of a
+        protocol connected to L{StandardIO} sends bytes to standard out.
+        """
+        p = StandardIOTestProcessProtocol()
+        d = p.onCompletion
+
+        self._spawnProcess(p, 'stdio_test_writeseq.py')
+
+        def processEnded(reason):
+            self.assertEqual(p.data[1], 'ok!')
+            reason.trap(error.ProcessDone)
+        return self._requireFailure(d, processEnded)
+
+
+    def _junkPath(self):
+        junkPath = self.mktemp()
+        junkFile = file(junkPath, 'w')
+        for i in xrange(1024):
+            junkFile.write(str(i) + '\n')
+        junkFile.close()
+        return junkPath
+
+
+    def test_producer(self):
+        """
+        Verify that the transport of a protocol connected to L{StandardIO}
+        is a working L{IProducer} provider.
+        """
+        p = StandardIOTestProcessProtocol()
+        d = p.onCompletion
+
+        written = []
+        toWrite = range(100)
+
+        def connectionMade(ign):
+            if toWrite:
+                written.append(str(toWrite.pop()) + "\n")
+                proc.write(written[-1])
+                reactor.callLater(0.01, connectionMade, None)
+
+        proc = self._spawnProcess(p, 'stdio_test_producer.py')
+
+        p.onConnection.addCallback(connectionMade)
+
+        def processEnded(reason):
+            self.assertEqual(p.data[1], ''.join(written))
+            self.failIf(toWrite, "Connection lost with %d writes left to go." % (len(toWrite),))
+            reason.trap(error.ProcessDone)
+        return self._requireFailure(d, processEnded)
+
+
+    def test_consumer(self):
+        """
+        Verify that the transport of a protocol connected to L{StandardIO}
+        is a working L{IConsumer} provider.
+        """
+        p = StandardIOTestProcessProtocol()
+        d = p.onCompletion
+
+        junkPath = self._junkPath()
+
+        self._spawnProcess(p, 'stdio_test_consumer.py', junkPath)
+
+        def processEnded(reason):
+            self.assertEqual(p.data[1], file(junkPath).read())
+            reason.trap(error.ProcessDone)
+        return self._requireFailure(d, processEnded)
+
+
+    def test_normalFileStandardOut(self):
+        """
+        If L{StandardIO} is created with a file descriptor which refers to a
+        normal file (ie, a file from the filesystem), L{StandardIO.write}
+        writes bytes to that file.  In particular, it does not immediately
+        consider the file closed or call its protocol's C{connectionLost}
+        method.
+        """
+        onConnLost = defer.Deferred()
+        proto = ConnectionLostNotifyingProtocol(onConnLost)
+        path = filepath.FilePath(self.mktemp())
+        self.normal = normal = path.open('w')
+        self.addCleanup(normal.close)
+
+        kwargs = dict(stdout=normal.fileno())
+        if not platform.isWindows():
+            # Make a fake stdin so that StandardIO doesn't mess with the *real*
+            # stdin.
+            r, w = os.pipe()
+            self.addCleanup(os.close, r)
+            self.addCleanup(os.close, w)
+            kwargs['stdin'] = r
+        connection = stdio.StandardIO(proto, **kwargs)
+
+        # The reactor needs to spin a bit before it might have incorrectly
+        # decided stdout is closed.  Use this counter to keep track of how
+        # much we've let it spin.  If it closes before we expected, this
+        # counter will have a value that's too small and we'll know.
+        howMany = 5
+        count = itertools.count()
+
+        def spin():
+            for value in count:
+                if value == howMany:
+                    connection.loseConnection()
+                    return
+                connection.write(str(value))
+                break
+            reactor.callLater(0, spin)
+        reactor.callLater(0, spin)
+
+        # Once the connection is lost, make sure the counter is at the
+        # appropriate value.
+        def cbLost(reason):
+            self.assertEqual(count.next(), howMany + 1)
+            self.assertEqual(
+                path.getContent(),
+                ''.join(map(str, range(howMany))))
+        onConnLost.addCallback(cbLost)
+        return onConnLost
+
+    if platform.isWindows():
+        test_normalFileStandardOut.skip = (
+            "StandardIO does not accept stdout as an argument to Windows.  "
+            "Testing redirection to a file is therefore harder.")
diff --git a/ThirdParty/Twisted/twisted/test/test_strcred.py b/ThirdParty/Twisted/twisted/test/test_strcred.py
new file mode 100644
index 0000000..7233a58
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_strcred.py
@@ -0,0 +1,657 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.cred.strcred}.
+"""
+
+import os
+import StringIO
+
+from twisted import plugin
+from twisted.trial import unittest
+from twisted.cred import credentials, checkers, error, strcred
+from twisted.plugins import cred_file, cred_anonymous
+from twisted.python import usage
+from twisted.python.filepath import FilePath
+from twisted.python.fakepwd import UserDatabase
+
+try:
+    import crypt
+except ImportError:
+    crypt = None
+
+try:
+    import pwd
+except ImportError:
+    pwd = None
+
+try:
+    import spwd
+except ImportError:
+    spwd = None
+
+
+
+def getInvalidAuthType():
+    """
+    Helper method to produce an auth type that doesn't exist.
+    """
+    invalidAuthType = 'ThisPluginDoesNotExist'
+    while (invalidAuthType in
+           [factory.authType for factory in strcred.findCheckerFactories()]):
+        invalidAuthType += '_'
+    return invalidAuthType
+
+
+
+class TestPublicAPI(unittest.TestCase):
+
+    def test_emptyDescription(self):
+        """
+        Test that the description string cannot be empty.
+        """
+        iat = getInvalidAuthType()
+        self.assertRaises(strcred.InvalidAuthType, strcred.makeChecker, iat)
+        self.assertRaises(
+            strcred.InvalidAuthType, strcred.findCheckerFactory, iat)
+
+
+    def test_invalidAuthType(self):
+        """
+        Test that an unrecognized auth type raises an exception.
+        """
+        iat = getInvalidAuthType()
+        self.assertRaises(strcred.InvalidAuthType, strcred.makeChecker, iat)
+        self.assertRaises(
+            strcred.InvalidAuthType, strcred.findCheckerFactory, iat)
+
+
+
+class TestStrcredFunctions(unittest.TestCase):
+
+    def test_findCheckerFactories(self):
+        """
+        Test that findCheckerFactories returns all available plugins.
+        """
+        availablePlugins = list(strcred.findCheckerFactories())
+        for plg in plugin.getPlugins(strcred.ICheckerFactory):
+            self.assertIn(plg, availablePlugins)
+
+
+    def test_findCheckerFactory(self):
+        """
+        Test that findCheckerFactory returns the first plugin
+        available for a given authentication type.
+        """
+        self.assertIdentical(strcred.findCheckerFactory('file'),
+                             cred_file.theFileCheckerFactory)
+
+
+
+class TestMemoryChecker(unittest.TestCase):
+
+    def setUp(self):
+        self.admin = credentials.UsernamePassword('admin', 'asdf')
+        self.alice = credentials.UsernamePassword('alice', 'foo')
+        self.badPass = credentials.UsernamePassword('alice', 'foobar')
+        self.badUser = credentials.UsernamePassword('x', 'yz')
+        self.checker = strcred.makeChecker('memory:admin:asdf:alice:foo')
+
+
+    def test_isChecker(self):
+        """
+        Verifies that strcred.makeChecker('memory') returns an object
+        that implements the L{ICredentialsChecker} interface.
+        """
+        self.assertTrue(checkers.ICredentialsChecker.providedBy(self.checker))
+        self.assertIn(credentials.IUsernamePassword,
+                      self.checker.credentialInterfaces)
+
+
+    def test_badFormatArgString(self):
+        """
+        Test that an argument string which does not contain user:pass
+        pairs (i.e., an odd number of ':' characters) raises an exception.
+        """
+        self.assertRaises(strcred.InvalidAuthArgumentString,
+                          strcred.makeChecker, 'memory:a:b:c')
+
+
+    def test_memoryCheckerSucceeds(self):
+        """
+        Test that the checker works with valid credentials.
+        """
+        def _gotAvatar(username):
+            self.assertEqual(username, self.admin.username)
+        return (self.checker
+                .requestAvatarId(self.admin)
+                .addCallback(_gotAvatar))
+
+
+    def test_memoryCheckerFailsUsername(self):
+        """
+        Test that the checker fails with an invalid username.
+        """
+        return self.assertFailure(self.checker.requestAvatarId(self.badUser),
+                                  error.UnauthorizedLogin)
+
+
+    def test_memoryCheckerFailsPassword(self):
+        """
+        Test that the checker fails with an invalid password.
+        """
+        return self.assertFailure(self.checker.requestAvatarId(self.badPass),
+                                  error.UnauthorizedLogin)
+
+
+
+class TestAnonymousChecker(unittest.TestCase):
+
+    def test_isChecker(self):
+        """
+        Verifies that strcred.makeChecker('anonymous') returns an object
+        that implements the L{ICredentialsChecker} interface.
+        """
+        checker = strcred.makeChecker('anonymous')
+        self.assertTrue(checkers.ICredentialsChecker.providedBy(checker))
+        self.assertIn(credentials.IAnonymous, checker.credentialInterfaces)
+
+
+    def testAnonymousAccessSucceeds(self):
+        """
+        Test that we can log in anonymously using this checker.
+        """
+        checker = strcred.makeChecker('anonymous')
+        request = checker.requestAvatarId(credentials.Anonymous())
+        def _gotAvatar(avatar):
+            self.assertIdentical(checkers.ANONYMOUS, avatar)
+        return request.addCallback(_gotAvatar)
+
+
+
+class TestUnixChecker(unittest.TestCase):
+    users = {
+        'admin': 'asdf',
+        'alice': 'foo',
+        }
+
+
+    def _spwd(self, username):
+        return (username, crypt.crypt(self.users[username], 'F/'),
+                0, 0, 99999, 7, -1, -1, -1)
+
+
+    def setUp(self):
+        self.admin = credentials.UsernamePassword('admin', 'asdf')
+        self.alice = credentials.UsernamePassword('alice', 'foo')
+        self.badPass = credentials.UsernamePassword('alice', 'foobar')
+        self.badUser = credentials.UsernamePassword('x', 'yz')
+        self.checker = strcred.makeChecker('unix')
+
+        # Hack around the pwd and spwd modules, since we can't really
+        # go about reading your /etc/passwd or /etc/shadow files
+        if pwd:
+            database = UserDatabase()
+            for username, password in self.users.items():
+                database.addUser(
+                    username, crypt.crypt(password, 'F/'),
+                    1000, 1000, username, '/home/' + username, '/bin/sh')
+            self.patch(pwd, 'getpwnam', database.getpwnam)
+        if spwd:
+            self._spwd_getspnam = spwd.getspnam
+            spwd.getspnam = self._spwd
+
+
+    def tearDown(self):
+        if spwd:
+            spwd.getspnam = self._spwd_getspnam
+
+
+    def test_isChecker(self):
+        """
+        Verifies that strcred.makeChecker('unix') returns an object
+        that implements the L{ICredentialsChecker} interface.
+        """
+        self.assertTrue(checkers.ICredentialsChecker.providedBy(self.checker))
+        self.assertIn(credentials.IUsernamePassword,
+                      self.checker.credentialInterfaces)
+
+
+    def test_unixCheckerSucceeds(self):
+        """
+        Test that the checker works with valid credentials.
+        """
+        def _gotAvatar(username):
+            self.assertEqual(username, self.admin.username)
+        return (self.checker
+                .requestAvatarId(self.admin)
+                .addCallback(_gotAvatar))
+
+
+    def test_unixCheckerFailsUsername(self):
+        """
+        Test that the checker fails with an invalid username.
+        """
+        return self.assertFailure(self.checker.requestAvatarId(self.badUser),
+                                  error.UnauthorizedLogin)
+
+
+    def test_unixCheckerFailsPassword(self):
+        """
+        Test that the checker fails with an invalid password.
+        """
+        return self.assertFailure(self.checker.requestAvatarId(self.badPass),
+                                  error.UnauthorizedLogin)
+
+
+    if None in (pwd, spwd, crypt):
+        availability = []
+        for module, name in ((pwd, "pwd"), (spwd, "swpd"), (crypt, "crypt")):
+            if module is None:
+                availability += [name]
+        for method in (test_unixCheckerSucceeds,
+                       test_unixCheckerFailsUsername,
+                       test_unixCheckerFailsPassword):
+            method.skip = ("Required module(s) are unavailable: " +
+                           ", ".join(availability))
+
+
+
+class TestFileDBChecker(unittest.TestCase):
+    """
+    Test for the --auth=file:... file checker.
+    """
+
+    def setUp(self):
+        self.admin = credentials.UsernamePassword('admin', 'asdf')
+        self.alice = credentials.UsernamePassword('alice', 'foo')
+        self.badPass = credentials.UsernamePassword('alice', 'foobar')
+        self.badUser = credentials.UsernamePassword('x', 'yz')
+        self.filename = self.mktemp()
+        FilePath(self.filename).setContent('admin:asdf\nalice:foo\n')
+        self.checker = strcred.makeChecker('file:' + self.filename)
+
+
+    def _fakeFilename(self):
+        filename = '/DoesNotExist'
+        while os.path.exists(filename):
+            filename += '_'
+        return filename
+
+
+    def test_isChecker(self):
+        """
+        Verifies that strcred.makeChecker('memory') returns an object
+        that implements the L{ICredentialsChecker} interface.
+        """
+        self.assertTrue(checkers.ICredentialsChecker.providedBy(self.checker))
+        self.assertIn(credentials.IUsernamePassword,
+                      self.checker.credentialInterfaces)
+
+
+    def test_fileCheckerSucceeds(self):
+        """
+        Test that the checker works with valid credentials.
+        """
+        def _gotAvatar(username):
+            self.assertEqual(username, self.admin.username)
+        return (self.checker
+                .requestAvatarId(self.admin)
+                .addCallback(_gotAvatar))
+
+
+    def test_fileCheckerFailsUsername(self):
+        """
+        Test that the checker fails with an invalid username.
+        """
+        return self.assertFailure(self.checker.requestAvatarId(self.badUser),
+                                  error.UnauthorizedLogin)
+
+
+    def test_fileCheckerFailsPassword(self):
+        """
+        Test that the checker fails with an invalid password.
+        """
+        return self.assertFailure(self.checker.requestAvatarId(self.badPass),
+                                  error.UnauthorizedLogin)
+
+
+    def test_failsWithEmptyFilename(self):
+        """
+        Test that an empty filename raises an error.
+        """
+        self.assertRaises(ValueError, strcred.makeChecker, 'file')
+        self.assertRaises(ValueError, strcred.makeChecker, 'file:')
+
+
+    def test_warnWithBadFilename(self):
+        """
+        When the file auth plugin is given a file that doesn't exist, it
+        should produce a warning.
+        """
+        oldOutput = cred_file.theFileCheckerFactory.errorOutput
+        newOutput = StringIO.StringIO()
+        cred_file.theFileCheckerFactory.errorOutput = newOutput
+        checker = strcred.makeChecker('file:' + self._fakeFilename())
+        cred_file.theFileCheckerFactory.errorOutput = oldOutput
+        self.assertIn(cred_file.invalidFileWarning, newOutput.getvalue())
+
+
+
+class TestSSHChecker(unittest.TestCase):
+    """
+    Tests for the --auth=sshkey:... checker.  The majority of the tests for the
+    ssh public key database checker are in
+    L{twisted.conch.test.test_checkers.SSHPublicKeyDatabaseTestCase}.
+    """
+
+    try:
+        import Crypto
+        import pyasn1
+    except ImportError:
+        skip = "PyCrypto is not available"
+
+
+    def test_isChecker(self):
+        """
+        Verifies that strcred.makeChecker('sshkey') returns an object
+        that implements the L{ICredentialsChecker} interface.
+        """
+        sshChecker = strcred.makeChecker('sshkey')
+        self.assertTrue(checkers.ICredentialsChecker.providedBy(sshChecker))
+        self.assertIn(
+            credentials.ISSHPrivateKey, sshChecker.credentialInterfaces)
+
+
+
+class DummyOptions(usage.Options, strcred.AuthOptionMixin):
+    """
+    Simple options for testing L{strcred.AuthOptionMixin}.
+    """
+
+
+
+class TestCheckerOptions(unittest.TestCase):
+
+    def test_createsList(self):
+        """
+        Test that the --auth command line creates a list in the
+        Options instance and appends values to it.
+        """
+        options = DummyOptions()
+        options.parseOptions(['--auth', 'memory'])
+        self.assertEqual(len(options['credCheckers']), 1)
+        options = DummyOptions()
+        options.parseOptions(['--auth', 'memory', '--auth', 'memory'])
+        self.assertEqual(len(options['credCheckers']), 2)
+
+
+    def test_invalidAuthError(self):
+        """
+        Test that the --auth command line raises an exception when it
+        gets a parameter it doesn't understand.
+        """
+        options = DummyOptions()
+        # If someone adds a 'ThisPluginDoesNotExist' then this unit
+        # test should still run.
+        invalidParameter = getInvalidAuthType()
+        self.assertRaises(
+            usage.UsageError,
+            options.parseOptions, ['--auth', invalidParameter])
+        self.assertRaises(
+            usage.UsageError,
+            options.parseOptions, ['--help-auth-type', invalidParameter])
+
+
+    def test_createsDictionary(self):
+        """
+        Test that the --auth command line creates a dictionary
+        mapping supported interfaces to the list of credentials
+        checkers that support it.
+        """
+        options = DummyOptions()
+        options.parseOptions(['--auth', 'memory', '--auth', 'anonymous'])
+        chd = options['credInterfaces']
+        self.assertEqual(len(chd[credentials.IAnonymous]), 1)
+        self.assertEqual(len(chd[credentials.IUsernamePassword]), 1)
+        chdAnonymous = chd[credentials.IAnonymous][0]
+        chdUserPass = chd[credentials.IUsernamePassword][0]
+        self.assertTrue(checkers.ICredentialsChecker.providedBy(chdAnonymous))
+        self.assertTrue(checkers.ICredentialsChecker.providedBy(chdUserPass))
+        self.assertIn(credentials.IAnonymous,
+                      chdAnonymous.credentialInterfaces)
+        self.assertIn(credentials.IUsernamePassword,
+                      chdUserPass.credentialInterfaces)
+
+
+    def test_credInterfacesProvidesLists(self):
+        """
+        Test that when two --auth arguments are passed along which
+        support the same interface, a list with both is created.
+        """
+        options = DummyOptions()
+        options.parseOptions(['--auth', 'memory', '--auth', 'unix'])
+        self.assertEqual(
+            options['credCheckers'],
+            options['credInterfaces'][credentials.IUsernamePassword])
+
+
+    def test_listDoesNotDisplayDuplicates(self):
+        """
+        Test that the list for --help-auth does not duplicate items.
+        """
+        authTypes = []
+        options = DummyOptions()
+        for cf in options._checkerFactoriesForOptHelpAuth():
+            self.assertNotIn(cf.authType, authTypes)
+            authTypes.append(cf.authType)
+
+
+    def test_displaysListCorrectly(self):
+        """
+        Test that the --help-auth argument correctly displays all
+        available authentication plugins, then exits.
+        """
+        newStdout = StringIO.StringIO()
+        options = DummyOptions()
+        options.authOutput = newStdout
+        self.assertRaises(SystemExit, options.parseOptions, ['--help-auth'])
+        for checkerFactory in strcred.findCheckerFactories():
+            self.assertIn(checkerFactory.authType, newStdout.getvalue())
+
+
+    def test_displaysHelpCorrectly(self):
+        """
+        Test that the --help-auth-for argument will correctly display
+        the help file for a particular authentication plugin.
+        """
+        newStdout = StringIO.StringIO()
+        options = DummyOptions()
+        options.authOutput = newStdout
+        self.assertRaises(
+            SystemExit, options.parseOptions, ['--help-auth-type', 'file'])
+        for line in cred_file.theFileCheckerFactory.authHelp:
+            if line.strip():
+                self.assertIn(line.strip(), newStdout.getvalue())
+
+
+    def test_unexpectedException(self):
+        """
+        When the checker specified by --auth raises an unexpected error, it
+        should be caught and re-raised within a L{usage.UsageError}.
+        """
+        options = DummyOptions()
+        err = self.assertRaises(usage.UsageError, options.parseOptions,
+                                ['--auth', 'file'])
+        self.assertEqual(str(err),
+                          "Unexpected error: 'file' requires a filename")
+
+
+
+class OptionsForUsernamePassword(usage.Options, strcred.AuthOptionMixin):
+    supportedInterfaces = (credentials.IUsernamePassword,)
+
+
+
+class OptionsForUsernameHashedPassword(usage.Options, strcred.AuthOptionMixin):
+    supportedInterfaces = (credentials.IUsernameHashedPassword,)
+
+
+
+class OptionsSupportsAllInterfaces(usage.Options, strcred.AuthOptionMixin):
+    supportedInterfaces = None
+
+
+
+class OptionsSupportsNoInterfaces(usage.Options, strcred.AuthOptionMixin):
+    supportedInterfaces = []
+
+
+
+class TestLimitingInterfaces(unittest.TestCase):
+    """
+    Tests functionality that allows an application to limit the
+    credential interfaces it can support. For the purposes of this
+    test, we use IUsernameHashedPassword, although this will never
+    really be used by the command line.
+
+    (I have, to date, not thought of a half-decent way for a user to
+    specify a hash algorithm via the command-line. Nor do I think it's
+    very useful.)
+
+    I should note that, at first, this test is counter-intuitive,
+    because we're using the checker with a pre-defined hash function
+    as the 'bad' checker. See the documentation for
+    L{twisted.cred.checkers.FilePasswordDB.hash} for more details.
+    """
+
+    def setUp(self):
+        self.filename = self.mktemp()
+        file(self.filename, 'w').write('admin:asdf\nalice:foo\n')
+        self.goodChecker = checkers.FilePasswordDB(self.filename)
+        self.badChecker = checkers.FilePasswordDB(
+            self.filename, hash=self._hash)
+        self.anonChecker = checkers.AllowAnonymousAccess()
+
+
+    def _hash(self, networkUsername, networkPassword, storedPassword):
+        """
+        A dumb hash that doesn't really do anything.
+        """
+        return networkPassword
+
+
+    def test_supportsInterface(self):
+        """
+        Test that the supportsInterface method behaves appropriately.
+        """
+        options = OptionsForUsernamePassword()
+        self.assertTrue(
+            options.supportsInterface(credentials.IUsernamePassword))
+        self.assertFalse(
+            options.supportsInterface(credentials.IAnonymous))
+        self.assertRaises(
+            strcred.UnsupportedInterfaces, options.addChecker,
+            self.anonChecker)
+
+
+    def test_supportsAllInterfaces(self):
+        """
+        Test that the supportsInterface method behaves appropriately
+        when the supportedInterfaces attribute is None.
+        """
+        options = OptionsSupportsAllInterfaces()
+        self.assertTrue(
+            options.supportsInterface(credentials.IUsernamePassword))
+        self.assertTrue(
+            options.supportsInterface(credentials.IAnonymous))
+
+
+    def test_supportsCheckerFactory(self):
+        """
+        Test that the supportsCheckerFactory method behaves appropriately.
+        """
+        options = OptionsForUsernamePassword()
+        fileCF = cred_file.theFileCheckerFactory
+        anonCF = cred_anonymous.theAnonymousCheckerFactory
+        self.assertTrue(options.supportsCheckerFactory(fileCF))
+        self.assertFalse(options.supportsCheckerFactory(anonCF))
+
+
+    def test_canAddSupportedChecker(self):
+        """
+        Test that when addChecker is called with a checker that
+        implements at least one of the interfaces our application
+        supports, it is successful.
+        """
+        options = OptionsForUsernamePassword()
+        options.addChecker(self.goodChecker)
+        iface = options.supportedInterfaces[0]
+        # Test that we did get IUsernamePassword
+        self.assertIdentical(
+            options['credInterfaces'][iface][0], self.goodChecker)
+        self.assertIdentical(options['credCheckers'][0], self.goodChecker)
+        # Test that we didn't get IUsernameHashedPassword
+        self.assertEqual(len(options['credInterfaces'][iface]), 1)
+        self.assertEqual(len(options['credCheckers']), 1)
+
+
+    def test_failOnAddingUnsupportedChecker(self):
+        """
+        Test that when addChecker is called with a checker that does
+        not implement any supported interfaces, it fails.
+        """
+        options = OptionsForUsernameHashedPassword()
+        self.assertRaises(strcred.UnsupportedInterfaces,
+                          options.addChecker, self.badChecker)
+
+
+    def test_unsupportedInterfaceError(self):
+        """
+        Test that the --auth command line raises an exception when it
+        gets a checker we don't support.
+        """
+        options = OptionsSupportsNoInterfaces()
+        authType = cred_anonymous.theAnonymousCheckerFactory.authType
+        self.assertRaises(
+            usage.UsageError,
+            options.parseOptions, ['--auth', authType])
+
+
+    def test_helpAuthLimitsOutput(self):
+        """
+        Test that --help-auth will only list checkers that purport to
+        supply at least one of the credential interfaces our
+        application can use.
+        """
+        options = OptionsForUsernamePassword()
+        for factory in options._checkerFactoriesForOptHelpAuth():
+            invalid = True
+            for interface in factory.credentialInterfaces:
+                if options.supportsInterface(interface):
+                    invalid = False
+            if invalid:
+                raise strcred.UnsupportedInterfaces()
+
+
+    def test_helpAuthTypeLimitsOutput(self):
+        """
+        Test that --help-auth-type will display a warning if you get
+        help for an authType that does not supply at least one of the
+        credential interfaces our application can use.
+        """
+        options = OptionsForUsernamePassword()
+        # Find an interface that we can use for our test
+        invalidFactory = None
+        for factory in strcred.findCheckerFactories():
+            if not options.supportsCheckerFactory(factory):
+                invalidFactory = factory
+                break
+        self.assertNotIdentical(invalidFactory, None)
+        # Capture output and make sure the warning is there
+        newStdout = StringIO.StringIO()
+        options.authOutput = newStdout
+        self.assertRaises(SystemExit, options.parseOptions,
+                          ['--help-auth-type', 'anonymous'])
+        self.assertIn(strcred.notSupportedWarning, newStdout.getvalue())
diff --git a/ThirdParty/Twisted/twisted/test/test_strerror.py b/ThirdParty/Twisted/twisted/test/test_strerror.py
new file mode 100644
index 0000000..ce14051
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_strerror.py
@@ -0,0 +1,151 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test strerror
+"""
+
+import socket
+import os
+
+from twisted.trial.unittest import TestCase
+from twisted.internet.tcp import ECONNABORTED
+from twisted.python.win32 import _ErrorFormatter, formatError
+from twisted.python.runtime import platform
+
+
+class _MyWindowsException(OSError):
+    """
+    An exception type like L{ctypes.WinError}, but available on all platforms.
+    """
+
+
+
+class ErrorFormatingTestCase(TestCase):
+    """
+    Tests for C{_ErrorFormatter.formatError}.
+    """
+    probeErrorCode = ECONNABORTED
+    probeMessage = "correct message value"
+
+    def test_strerrorFormatting(self):
+        """
+        L{_ErrorFormatter.formatError} should use L{os.strerror} to format
+        error messages if it is constructed without any better mechanism.
+        """
+        formatter = _ErrorFormatter(None, None, None)
+        message = formatter.formatError(self.probeErrorCode)
+        self.assertEqual(message, os.strerror(self.probeErrorCode))
+
+
+    def test_emptyErrorTab(self):
+        """
+        L{_ErrorFormatter.formatError} should use L{os.strerror} to format
+        error messages if it is constructed with only an error tab which does
+        not contain the error code it is called with.
+        """
+        error = 1
+        # Sanity check
+        self.assertNotEqual(self.probeErrorCode, error)
+        formatter = _ErrorFormatter(None, None, {error: 'wrong message'})
+        message = formatter.formatError(self.probeErrorCode)
+        self.assertEqual(message, os.strerror(self.probeErrorCode))
+
+
+    def test_errorTab(self):
+        """
+        L{_ErrorFormatter.formatError} should use C{errorTab} if it is supplied
+        and contains the requested error code.
+        """
+        formatter = _ErrorFormatter(
+            None, None, {self.probeErrorCode: self.probeMessage})
+        message = formatter.formatError(self.probeErrorCode)
+        self.assertEqual(message, self.probeMessage)
+
+
+    def test_formatMessage(self):
+        """
+        L{_ErrorFormatter.formatError} should return the return value of
+        C{formatMessage} if it is supplied.
+        """
+        formatCalls = []
+        def formatMessage(errorCode):
+            formatCalls.append(errorCode)
+            return self.probeMessage
+        formatter = _ErrorFormatter(
+            None, formatMessage, {self.probeErrorCode: 'wrong message'})
+        message = formatter.formatError(self.probeErrorCode)
+        self.assertEqual(message, self.probeMessage)
+        self.assertEqual(formatCalls, [self.probeErrorCode])
+
+
+    def test_winError(self):
+        """
+        L{_ErrorFormatter.formatError} should return the message argument from
+        the exception L{winError} returns, if L{winError} is supplied.
+        """
+        winCalls = []
+        def winError(errorCode):
+            winCalls.append(errorCode)
+            return _MyWindowsException(errorCode, self.probeMessage)
+        formatter = _ErrorFormatter(
+            winError,
+            lambda error: 'formatMessage: wrong message',
+            {self.probeErrorCode: 'errorTab: wrong message'})
+        message = formatter.formatError(self.probeErrorCode)
+        self.assertEqual(message, self.probeMessage)
+
+
+    def test_fromEnvironment(self):
+        """
+        L{_ErrorFormatter.fromEnvironment} should create an L{_ErrorFormatter}
+        instance with attributes populated from available modules.
+        """
+        formatter = _ErrorFormatter.fromEnvironment()
+
+        if formatter.winError is not None:
+            from ctypes import WinError
+            self.assertEqual(
+                formatter.formatError(self.probeErrorCode),
+                WinError(self.probeErrorCode).strerror)
+            formatter.winError = None
+
+        if formatter.formatMessage is not None:
+            from win32api import FormatMessage
+            self.assertEqual(
+                formatter.formatError(self.probeErrorCode),
+                FormatMessage(self.probeErrorCode))
+            formatter.formatMessage = None
+
+        if formatter.errorTab is not None:
+            from socket import errorTab
+            self.assertEqual(
+                formatter.formatError(self.probeErrorCode),
+                errorTab[self.probeErrorCode])
+
+    if platform.getType() != "win32":
+        test_fromEnvironment.skip = "Test will run only on Windows."
+
+
+    def test_correctLookups(self):
+        """
+        Given an known-good errno, make sure that formatMessage gives results
+        matching either C{socket.errorTab}, C{ctypes.WinError}, or
+        C{win32api.FormatMessage}.
+        """
+        acceptable = [socket.errorTab[ECONNABORTED]]
+        try:
+            from ctypes import WinError
+            acceptable.append(WinError(ECONNABORTED).strerror)
+        except ImportError:
+            pass
+        try:
+            from win32api import FormatMessage
+            acceptable.append(FormatMessage(ECONNABORTED))
+        except ImportError:
+            pass
+
+        self.assertIn(formatError(ECONNABORTED), acceptable)
+
+    if platform.getType() != "win32":
+        test_correctLookups.skip = "Test will run only on Windows."
diff --git a/ThirdParty/Twisted/twisted/test/test_stringtransport.py b/ThirdParty/Twisted/twisted/test/test_stringtransport.py
new file mode 100644
index 0000000..ca12098
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_stringtransport.py
@@ -0,0 +1,279 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.test.proto_helpers}.
+"""
+
+from zope.interface.verify import verifyObject
+
+from twisted.internet.interfaces import (ITransport, IPushProducer, IConsumer,
+    IReactorTCP, IReactorSSL, IReactorUNIX, IAddress, IListeningPort,
+    IConnector)
+from twisted.internet.address import IPv4Address
+from twisted.trial.unittest import TestCase
+from twisted.test.proto_helpers import (StringTransport, MemoryReactor,
+    RaisingMemoryReactor)
+from twisted.internet.protocol import ClientFactory, Factory
+
+
+class StringTransportTests(TestCase):
+    """
+    Tests for L{twisted.test.proto_helpers.StringTransport}.
+    """
+    def setUp(self):
+        self.transport = StringTransport()
+
+
+    def test_interfaces(self):
+        """
+        L{StringTransport} instances provide L{ITransport}, L{IPushProducer},
+        and L{IConsumer}.
+        """
+        self.assertTrue(verifyObject(ITransport, self.transport))
+        self.assertTrue(verifyObject(IPushProducer, self.transport))
+        self.assertTrue(verifyObject(IConsumer, self.transport))
+
+
+    def test_registerProducer(self):
+        """
+        L{StringTransport.registerProducer} records the arguments supplied to
+        it as instance attributes.
+        """
+        producer = object()
+        streaming = object()
+        self.transport.registerProducer(producer, streaming)
+        self.assertIdentical(self.transport.producer, producer)
+        self.assertIdentical(self.transport.streaming, streaming)
+
+
+    def test_disallowedRegisterProducer(self):
+        """
+        L{StringTransport.registerProducer} raises L{RuntimeError} if a
+        producer is already registered.
+        """
+        producer = object()
+        self.transport.registerProducer(producer, True)
+        self.assertRaises(
+            RuntimeError, self.transport.registerProducer, object(), False)
+        self.assertIdentical(self.transport.producer, producer)
+        self.assertTrue(self.transport.streaming)
+
+
+    def test_unregisterProducer(self):
+        """
+        L{StringTransport.unregisterProducer} causes the transport to forget
+        about the registered producer and makes it possible to register a new
+        one.
+        """
+        oldProducer = object()
+        newProducer = object()
+        self.transport.registerProducer(oldProducer, False)
+        self.transport.unregisterProducer()
+        self.assertIdentical(self.transport.producer, None)
+        self.transport.registerProducer(newProducer, True)
+        self.assertIdentical(self.transport.producer, newProducer)
+        self.assertTrue(self.transport.streaming)
+
+
+    def test_invalidUnregisterProducer(self):
+        """
+        L{StringTransport.unregisterProducer} raises L{RuntimeError} if called
+        when no producer is registered.
+        """
+        self.assertRaises(RuntimeError, self.transport.unregisterProducer)
+
+
+    def test_initialProducerState(self):
+        """
+        L{StringTransport.producerState} is initially C{'producing'}.
+        """
+        self.assertEqual(self.transport.producerState, 'producing')
+
+
+    def test_pauseProducing(self):
+        """
+        L{StringTransport.pauseProducing} changes the C{producerState} of the
+        transport to C{'paused'}.
+        """
+        self.transport.pauseProducing()
+        self.assertEqual(self.transport.producerState, 'paused')
+
+
+    def test_resumeProducing(self):
+        """
+        L{StringTransport.resumeProducing} changes the C{producerState} of the
+        transport to C{'producing'}.
+        """
+        self.transport.pauseProducing()
+        self.transport.resumeProducing()
+        self.assertEqual(self.transport.producerState, 'producing')
+
+
+    def test_stopProducing(self):
+        """
+        L{StringTransport.stopProducing} changes the C{'producerState'} of the
+        transport to C{'stopped'}.
+        """
+        self.transport.stopProducing()
+        self.assertEqual(self.transport.producerState, 'stopped')
+
+
+    def test_stoppedTransportCannotPause(self):
+        """
+        L{StringTransport.pauseProducing} raises L{RuntimeError} if the
+        transport has been stopped.
+        """
+        self.transport.stopProducing()
+        self.assertRaises(RuntimeError, self.transport.pauseProducing)
+
+
+    def test_stoppedTransportCannotResume(self):
+        """
+        L{StringTransport.resumeProducing} raises L{RuntimeError} if the
+        transport has been stopped.
+        """
+        self.transport.stopProducing()
+        self.assertRaises(RuntimeError, self.transport.resumeProducing)
+
+
+    def test_disconnectingTransportCannotPause(self):
+        """
+        L{StringTransport.pauseProducing} raises L{RuntimeError} if the
+        transport is being disconnected.
+        """
+        self.transport.loseConnection()
+        self.assertRaises(RuntimeError, self.transport.pauseProducing)
+
+
+    def test_disconnectingTransportCannotResume(self):
+        """
+        L{StringTransport.resumeProducing} raises L{RuntimeError} if the
+        transport is being disconnected.
+        """
+        self.transport.loseConnection()
+        self.assertRaises(RuntimeError, self.transport.resumeProducing)
+
+
+    def test_loseConnectionSetsDisconnecting(self):
+        """
+        L{StringTransport.loseConnection} toggles the C{disconnecting} instance
+        variable to C{True}.
+        """
+        self.assertFalse(self.transport.disconnecting)
+        self.transport.loseConnection()
+        self.assertTrue(self.transport.disconnecting)
+
+
+    def test_specifiedHostAddress(self):
+        """
+        If a host address is passed to L{StringTransport.__init__}, that
+        value is returned from L{StringTransport.getHost}.
+        """
+        address = object()
+        self.assertIdentical(StringTransport(address).getHost(), address)
+
+
+    def test_specifiedPeerAddress(self):
+        """
+        If a peer address is passed to L{StringTransport.__init__}, that
+        value is returned from L{StringTransport.getPeer}.
+        """        
+        address = object()
+        self.assertIdentical(
+            StringTransport(peerAddress=address).getPeer(), address)
+
+
+    def test_defaultHostAddress(self):
+        """
+        If no host address is passed to L{StringTransport.__init__}, an
+        L{IPv4Address} is returned from L{StringTransport.getHost}.
+        """
+        address = StringTransport().getHost()
+        self.assertIsInstance(address, IPv4Address)
+
+
+    def test_defaultPeerAddress(self):
+        """
+        If no peer address is passed to L{StringTransport.__init__}, an
+        L{IPv4Address} is returned from L{StringTransport.getPeer}.
+        """
+        address = StringTransport().getPeer()
+        self.assertIsInstance(address, IPv4Address)
+
+
+
+class ReactorTests(TestCase):
+    """
+    Tests for L{MemoryReactor} and L{RaisingMemoryReactor}.
+    """
+
+    def test_memoryReactorProvides(self):
+        """
+        L{MemoryReactor} provides all of the attributes described by the
+        interfaces it advertises.
+        """
+        memoryReactor = MemoryReactor()
+        verifyObject(IReactorTCP, memoryReactor)
+        verifyObject(IReactorSSL, memoryReactor)
+        verifyObject(IReactorUNIX, memoryReactor)
+
+
+    def test_raisingReactorProvides(self):
+        """
+        L{RaisingMemoryReactor} provides all of the attributes described by the
+        interfaces it advertises.
+        """
+        raisingReactor = RaisingMemoryReactor()
+        verifyObject(IReactorTCP, raisingReactor)
+        verifyObject(IReactorSSL, raisingReactor)
+        verifyObject(IReactorUNIX, raisingReactor)
+
+
+    def test_connectDestination(self):
+        """
+        L{MemoryReactor.connectTCP}, L{MemoryReactor.connectSSL}, and
+        L{MemoryReactor.connectUNIX} will return an L{IConnector} whose
+        C{getDestination} method returns an L{IAddress} with attributes which
+        reflect the values passed.
+        """
+        memoryReactor = MemoryReactor()
+        for connector in [memoryReactor.connectTCP(
+                              "test.example.com", 8321, ClientFactory()),
+                          memoryReactor.connectSSL(
+                              "test.example.com", 8321, ClientFactory(),
+                              None)]:
+            verifyObject(IConnector, connector)
+            address = connector.getDestination()
+            verifyObject(IAddress, address)
+            self.assertEqual(address.host, "test.example.com")
+            self.assertEqual(address.port, 8321)
+        connector = memoryReactor.connectUNIX("/fake/path", ClientFactory())
+        verifyObject(IConnector, connector)
+        address = connector.getDestination()
+        verifyObject(IAddress, address)
+        self.assertEqual(address.name, "/fake/path")
+
+
+    def test_listenDefaultHost(self):
+        """
+        L{MemoryReactor.listenTCP}, L{MemoryReactor.listenSSL} and
+        L{MemoryReactor.listenUNIX} will return an L{IListeningPort} whose
+        C{getHost} method returns an L{IAddress}; C{listenTCP} and C{listenSSL}
+        will have a default host of C{'0.0.0.0'}, and a port that reflects the
+        value passed, and C{listenUNIX} will have a name that reflects the path
+        passed.
+        """
+        memoryReactor = MemoryReactor()
+        for port in [memoryReactor.listenTCP(8242, Factory()),
+                     memoryReactor.listenSSL(8242, Factory(), None)]:
+            verifyObject(IListeningPort, port)
+            address = port.getHost()
+            verifyObject(IAddress, address)
+            self.assertEqual(address.host, '0.0.0.0')
+            self.assertEqual(address.port, 8242)
+        port = memoryReactor.listenUNIX("/path/to/socket", Factory())
+        verifyObject(IListeningPort, port)
+        address = port.getHost()
+        verifyObject(IAddress, address)
+        self.assertEqual(address.name, "/path/to/socket")
diff --git a/ThirdParty/Twisted/twisted/test/test_strports.py b/ThirdParty/Twisted/twisted/test/test_strports.py
new file mode 100644
index 0000000..fd081ec
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_strports.py
@@ -0,0 +1,133 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.application.strports}.
+"""
+
+from twisted.trial.unittest import TestCase
+from twisted.application import strports
+from twisted.application import internet
+from twisted.internet.test.test_endpoints import ParserTestCase
+from twisted.internet.protocol import Factory
+from twisted.internet.endpoints import TCP4ServerEndpoint, UNIXServerEndpoint
+
+
+
+class DeprecatedParseTestCase(ParserTestCase):
+    """
+    L{strports.parse} is deprecated.  It's an alias for a method that is now
+    private in L{twisted.internet.endpoints}.
+    """
+
+    def parse(self, *a, **kw):
+        result = strports.parse(*a, **kw)
+        warnings = self.flushWarnings([self.parse])
+        self.assertEqual(len(warnings), 1)
+        self.assertEqual(
+            warnings[0]['message'],
+            "twisted.application.strports.parse was deprecated "
+            "in Twisted 10.2.0: in favor of twisted.internet.endpoints.serverFromString")
+        return result
+
+
+    def test_simpleNumeric(self):
+        """
+        Base numeric ports should be parsed as TCP.
+        """
+        self.assertEqual(self.parse('80', self.f),
+                         ('TCP', (80, self.f), {'interface':'', 'backlog':50}))
+
+
+    def test_allKeywords(self):
+        """
+        A collection of keyword arguments with no prefixed type, like 'port=80',
+        will be parsed as keyword arguments to 'tcp'.
+        """
+        self.assertEqual(self.parse('port=80', self.f),
+                         ('TCP', (80, self.f), {'interface':'', 'backlog':50}))
+
+
+
+class ServiceTestCase(TestCase):
+    """
+    Tests for L{strports.service}.
+    """
+
+    def test_service(self):
+        """
+        L{strports.service} returns a L{StreamServerEndpointService}
+        constructed with an endpoint produced from
+        L{endpoint.serverFromString}, using the same syntax.
+        """
+        reactor = object() # the cake is a lie
+        aFactory = Factory()
+        aGoodPort = 1337
+        svc = strports.service(
+            'tcp:'+str(aGoodPort), aFactory, reactor=reactor)
+        self.assertIsInstance(svc, internet.StreamServerEndpointService)
+
+        # See twisted.application.test.test_internet.TestEndpointService.
+        # test_synchronousRaiseRaisesSynchronously
+        self.assertEqual(svc._raiseSynchronously, True)
+        self.assertIsInstance(svc.endpoint, TCP4ServerEndpoint)
+        # Maybe we should implement equality for endpoints.
+        self.assertEqual(svc.endpoint._port, aGoodPort)
+        self.assertIdentical(svc.factory, aFactory)
+        self.assertIdentical(svc.endpoint._reactor, reactor)
+
+
+    def test_serviceDefaultReactor(self):
+        """
+        L{strports.service} will use the default reactor when none is provided
+        as an argument.
+        """
+        from twisted.internet import reactor as globalReactor
+        aService = strports.service("tcp:80", None)
+        self.assertIdentical(aService.endpoint._reactor, globalReactor)
+
+
+    def test_serviceDeprecatedDefault(self):
+        """
+        L{strports.service} still accepts a 'default' argument, which will
+        affect the parsing of 'default' (i.e. 'not containing a colon')
+        endpoint descriptions, but this behavior is deprecated.
+        """
+        svc = strports.service("8080", None, "unix")
+        self.assertIsInstance(svc.endpoint, UNIXServerEndpoint)
+        warnings = self.flushWarnings([self.test_serviceDeprecatedDefault])
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            "The 'default' parameter was deprecated in Twisted 10.2.0.  "
+            "Use qualified endpoint descriptions; for example, 'tcp:8080'.")
+        self.assertEqual(len(warnings), 1)
+
+        # Almost the same case, but slightly tricky - explicitly passing the old
+        # default value, None, also must trigger a deprecation warning.
+        svc = strports.service("tcp:8080", None, None)
+        self.assertIsInstance(svc.endpoint, TCP4ServerEndpoint)
+        warnings = self.flushWarnings([self.test_serviceDeprecatedDefault])
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            "The 'default' parameter was deprecated in Twisted 10.2.0.")
+        self.assertEqual(len(warnings), 1)
+
+
+    def test_serviceDeprecatedUnqualified(self):
+        """
+        Unqualified strport descriptions, i.e. "8080", are deprecated.
+        """
+        svc = strports.service("8080", None)
+        self.assertIsInstance(svc.endpoint, TCP4ServerEndpoint)
+        warnings = self.flushWarnings(
+            [self.test_serviceDeprecatedUnqualified])
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            "Unqualified strport description passed to 'service'."
+            "Use qualified endpoint descriptions; for example, 'tcp:8080'.")
+        self.assertEqual(len(warnings), 1)
+
+
diff --git a/ThirdParty/Twisted/twisted/test/test_task.py b/ThirdParty/Twisted/twisted/test/test_task.py
new file mode 100644
index 0000000..d7edf40
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_task.py
@@ -0,0 +1,1029 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.internet.task}.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.python.compat import set
+
+from twisted.trial import unittest
+
+from twisted.internet import interfaces, task, reactor, defer, error
+from twisted.internet.main import installReactor
+from twisted.internet.test.modulehelpers import NoReactor
+
+# Be compatible with any jerks who used our private stuff
+Clock = task.Clock
+
+from twisted.python import failure
+
+
+class TestableLoopingCall(task.LoopingCall):
+    def __init__(self, clock, *a, **kw):
+        super(TestableLoopingCall, self).__init__(*a, **kw)
+        self.clock = clock
+
+
+
+class TestException(Exception):
+    pass
+
+
+
+class ClockTestCase(unittest.TestCase):
+    """
+    Test the non-wallclock based clock implementation.
+    """
+    def testSeconds(self):
+        """
+        Test that the C{seconds} method of the fake clock returns fake time.
+        """
+        c = task.Clock()
+        self.assertEqual(c.seconds(), 0)
+
+
+    def testCallLater(self):
+        """
+        Test that calls can be scheduled for later with the fake clock and
+        hands back an L{IDelayedCall}.
+        """
+        c = task.Clock()
+        call = c.callLater(1, lambda a, b: None, 1, b=2)
+        self.failUnless(interfaces.IDelayedCall.providedBy(call))
+        self.assertEqual(call.getTime(), 1)
+        self.failUnless(call.active())
+
+
+    def testCallLaterCancelled(self):
+        """
+        Test that calls can be cancelled.
+        """
+        c = task.Clock()
+        call = c.callLater(1, lambda a, b: None, 1, b=2)
+        call.cancel()
+        self.failIf(call.active())
+
+
+    def test_callLaterOrdering(self):
+        """
+        Test that the DelayedCall returned is not one previously
+        created.
+        """
+        c = task.Clock()
+        call1 = c.callLater(10, lambda a, b: None, 1, b=2)
+        call2 = c.callLater(1, lambda a, b: None, 3, b=4)
+        self.failIf(call1 is call2)
+
+
+    def testAdvance(self):
+        """
+        Test that advancing the clock will fire some calls.
+        """
+        events = []
+        c = task.Clock()
+        call = c.callLater(2, lambda: events.append(None))
+        c.advance(1)
+        self.assertEqual(events, [])
+        c.advance(1)
+        self.assertEqual(events, [None])
+        self.failIf(call.active())
+
+
+    def testAdvanceCancel(self):
+        """
+        Test attemping to cancel the call in a callback.
+
+        AlreadyCalled should be raised, not for example a ValueError from
+        removing the call from Clock.calls. This requires call.called to be
+        set before the callback is called.
+        """
+        c = task.Clock()
+        def cb():
+            self.assertRaises(error.AlreadyCalled, call.cancel)
+        call = c.callLater(1, cb)
+        c.advance(1)
+
+
+    def testCallLaterDelayed(self):
+        """
+        Test that calls can be delayed.
+        """
+        events = []
+        c = task.Clock()
+        call = c.callLater(1, lambda a, b: events.append((a, b)), 1, b=2)
+        call.delay(1)
+        self.assertEqual(call.getTime(), 2)
+        c.advance(1.5)
+        self.assertEqual(events, [])
+        c.advance(1.0)
+        self.assertEqual(events, [(1, 2)])
+
+
+    def testCallLaterResetLater(self):
+        """
+        Test that calls can have their time reset to a later time.
+        """
+        events = []
+        c = task.Clock()
+        call = c.callLater(2, lambda a, b: events.append((a, b)), 1, b=2)
+        c.advance(1)
+        call.reset(3)
+        self.assertEqual(call.getTime(), 4)
+        c.advance(2)
+        self.assertEqual(events, [])
+        c.advance(1)
+        self.assertEqual(events, [(1, 2)])
+
+
+    def testCallLaterResetSooner(self):
+        """
+        Test that calls can have their time reset to an earlier time.
+        """
+        events = []
+        c = task.Clock()
+        call = c.callLater(4, lambda a, b: events.append((a, b)), 1, b=2)
+        call.reset(3)
+        self.assertEqual(call.getTime(), 3)
+        c.advance(3)
+        self.assertEqual(events, [(1, 2)])
+
+
+    def test_getDelayedCalls(self):
+        """
+        Test that we can get a list of all delayed calls
+        """
+        c = task.Clock()
+        call = c.callLater(1, lambda x: None)
+        call2 = c.callLater(2, lambda x: None)
+
+        calls = c.getDelayedCalls()
+
+        self.assertEqual(set([call, call2]), set(calls))
+
+
+    def test_getDelayedCallsEmpty(self):
+        """
+        Test that we get an empty list from getDelayedCalls on a newly
+        constructed Clock.
+        """
+        c = task.Clock()
+        self.assertEqual(c.getDelayedCalls(), [])
+
+
+    def test_providesIReactorTime(self):
+        c = task.Clock()
+        self.failUnless(interfaces.IReactorTime.providedBy(c),
+                        "Clock does not provide IReactorTime")
+
+
+    def test_callLaterKeepsCallsOrdered(self):
+        """
+        The order of calls scheduled by L{task.Clock.callLater} is honored when
+        adding a new call via calling L{task.Clock.callLater} again.
+
+        For example, if L{task.Clock.callLater} is invoked with a callable "A"
+        and a time t0, and then the L{IDelayedCall} which results from that is
+        C{reset} to a later time t2 which is greater than t0, and I{then}
+        L{task.Clock.callLater} is invoked again with a callable "B", and time
+        t1 which is less than t2 but greater than t0, "B" will be invoked before
+        "A".
+        """
+        result = []
+        expected = [('b', 2.0), ('a', 3.0)]
+        clock = task.Clock()
+        logtime = lambda n: result.append((n, clock.seconds()))
+
+        call_a = clock.callLater(1.0, logtime, "a")
+        call_a.reset(3.0)
+        clock.callLater(2.0, logtime, "b")
+
+        clock.pump([1]*3)
+        self.assertEqual(result, expected)
+
+
+    def test_callLaterResetKeepsCallsOrdered(self):
+        """
+        The order of calls scheduled by L{task.Clock.callLater} is honored when
+        re-scheduling an existing call via L{IDelayedCall.reset} on the result
+        of a previous call to C{callLater}.
+
+        For example, if L{task.Clock.callLater} is invoked with a callable "A"
+        and a time t0, and then L{task.Clock.callLater} is invoked again with a
+        callable "B", and time t1 greater than t0, and finally the
+        L{IDelayedCall} for "A" is C{reset} to a later time, t2, which is
+        greater than t1, "B" will be invoked before "A".
+        """
+        result = []
+        expected = [('b', 2.0), ('a', 3.0)]
+        clock = task.Clock()
+        logtime = lambda n: result.append((n, clock.seconds()))
+
+        call_a = clock.callLater(1.0, logtime, "a")
+        clock.callLater(2.0, logtime, "b")
+        call_a.reset(3.0)
+
+        clock.pump([1]*3)
+        self.assertEqual(result, expected)
+
+
+    def test_callLaterResetInsideCallKeepsCallsOrdered(self):
+        """
+        The order of calls scheduled by L{task.Clock.callLater} is honored when
+        re-scheduling an existing call via L{IDelayedCall.reset} on the result
+        of a previous call to C{callLater}, even when that call to C{reset}
+        occurs within the callable scheduled by C{callLater} itself.
+        """
+        result = []
+        expected = [('c', 3.0), ('b', 4.0)]
+        clock = task.Clock()
+        logtime = lambda n: result.append((n, clock.seconds()))
+
+        call_b = clock.callLater(2.0, logtime, "b")
+        def a():
+            call_b.reset(3.0)
+
+        clock.callLater(1.0, a)
+        clock.callLater(3.0, logtime, "c")
+
+        clock.pump([0.5] * 10)
+        self.assertEqual(result, expected)
+
+
+
+class LoopTestCase(unittest.TestCase):
+    """
+    Tests for L{task.LoopingCall} based on a fake L{IReactorTime}
+    implementation.
+    """
+    def test_defaultClock(self):
+        """
+        L{LoopingCall}'s default clock should be the reactor.
+        """
+        call = task.LoopingCall(lambda: None)
+        self.assertEqual(call.clock, reactor)
+
+
+    def test_callbackTimeSkips(self):
+        """
+        When more time than the defined interval passes during the execution
+        of a callback, L{LoopingCall} should schedule the next call for the
+        next interval which is still in the future.
+        """
+        times = []
+        callDuration = None
+        clock = task.Clock()
+        def aCallback():
+            times.append(clock.seconds())
+            clock.advance(callDuration)
+        call = task.LoopingCall(aCallback)
+        call.clock = clock
+
+        # Start a LoopingCall with a 0.5 second increment, and immediately call
+        # the callable.
+        callDuration = 2
+        call.start(0.5)
+
+        # Verify that the callable was called, and since it was immediate, with
+        # no skips.
+        self.assertEqual(times, [0])
+
+        # The callback should have advanced the clock by the callDuration.
+        self.assertEqual(clock.seconds(), callDuration)
+
+        # An iteration should have occurred at 2, but since 2 is the present
+        # and not the future, it is skipped.
+
+        clock.advance(0)
+        self.assertEqual(times, [0])
+
+        # 2.5 is in the future, and is not skipped.
+        callDuration = 1
+        clock.advance(0.5)
+        self.assertEqual(times, [0, 2.5])
+        self.assertEqual(clock.seconds(), 3.5)
+
+        # Another iteration should have occurred, but it is again the
+        # present and not the future, so it is skipped as well.
+        clock.advance(0)
+        self.assertEqual(times, [0, 2.5])
+
+        # 4 is in the future, and is not skipped.
+        callDuration = 0
+        clock.advance(0.5)
+        self.assertEqual(times, [0, 2.5, 4])
+        self.assertEqual(clock.seconds(), 4)
+
+
+    def test_reactorTimeSkips(self):
+        """
+        When more time than the defined interval passes between when
+        L{LoopingCall} schedules itself to run again and when it actually
+        runs again, it should schedule the next call for the next interval
+        which is still in the future.
+        """
+        times = []
+        clock = task.Clock()
+        def aCallback():
+            times.append(clock.seconds())
+
+        # Start a LoopingCall that tracks the time passed, with a 0.5 second
+        # increment.
+        call = task.LoopingCall(aCallback)
+        call.clock = clock
+        call.start(0.5)
+
+        # Initially, no time should have passed!
+        self.assertEqual(times, [0])
+
+        # Advance the clock by 2 seconds (2 seconds should have passed)
+        clock.advance(2)
+        self.assertEqual(times, [0, 2])
+
+        # Advance the clock by 1 second (3 total should have passed)
+        clock.advance(1)
+        self.assertEqual(times, [0, 2, 3])
+
+        # Advance the clock by 0 seconds (this should have no effect!)
+        clock.advance(0)
+        self.assertEqual(times, [0, 2, 3])
+
+
+    def test_reactorTimeCountSkips(self):
+        """
+        When L{LoopingCall} schedules itself to run again, if more than the
+        specified interval has passed, it should schedule the next call for the
+        next interval which is still in the future. If it was created
+        using L{LoopingCall.withCount}, a positional argument will be
+        inserted at the beginning of the argument list, indicating the number
+        of calls that should have been made.
+        """
+        times = []
+        clock = task.Clock()
+        def aCallback(numCalls):
+            times.append((clock.seconds(), numCalls))
+
+        # Start a LoopingCall that tracks the time passed, and the number of
+        # skips, with a 0.5 second increment.
+        call = task.LoopingCall.withCount(aCallback)
+        call.clock = clock
+        INTERVAL = 0.5
+        REALISTIC_DELAY = 0.01
+        call.start(INTERVAL)
+
+        # Initially, no seconds should have passed, and one calls should have
+        # been made.
+        self.assertEqual(times, [(0, 1)])
+
+        # After the interval (plus a small delay, to account for the time that
+        # the reactor takes to wake up and process the LoopingCall), we should
+        # still have only made one call.
+        clock.advance(INTERVAL + REALISTIC_DELAY)
+        self.assertEqual(times, [(0, 1), (INTERVAL + REALISTIC_DELAY, 1)])
+
+        # After advancing the clock by three intervals (plus a small delay to
+        # account for the reactor), we should have skipped two calls; one less
+        # than the number of intervals which have completely elapsed. Along
+        # with the call we did actually make, the final number of calls is 3.
+        clock.advance((3 * INTERVAL) + REALISTIC_DELAY)
+        self.assertEqual(times,
+                         [(0, 1), (INTERVAL + REALISTIC_DELAY, 1),
+                          ((4 * INTERVAL) + (2 * REALISTIC_DELAY), 3)])
+
+        # Advancing the clock by 0 seconds should not cause any changes!
+        clock.advance(0)
+        self.assertEqual(times,
+                         [(0, 1), (INTERVAL + REALISTIC_DELAY, 1),
+                          ((4 * INTERVAL) + (2 * REALISTIC_DELAY), 3)])
+
+
+    def test_countLengthyIntervalCounts(self):
+        """
+        L{LoopingCall.withCount} counts only calls that were expected to be
+        made.  So, if more than one, but less than two intervals pass between
+        invocations, it won't increase the count above 1.  For example, a
+        L{LoopingCall} with interval T expects to be invoked at T, 2T, 3T, etc.
+        However, the reactor takes some time to get around to calling it, so in
+        practice it will be called at T+something, 2T+something, 3T+something;
+        and due to other things going on in the reactor, "something" is
+        variable.  It won't increase the count unless "something" is greater
+        than T.  So if the L{LoopingCall} is invoked at T, 2.75T, and 3T,
+        the count has not increased, even though the distance between
+        invocation 1 and invocation 2 is 1.75T.
+        """
+        times = []
+        clock = task.Clock()
+        def aCallback(count):
+            times.append((clock.seconds(), count))
+
+        # Start a LoopingCall that tracks the time passed, and the number of
+        # calls, with a 0.5 second increment.
+        call = task.LoopingCall.withCount(aCallback)
+        call.clock = clock
+        INTERVAL = 0.5
+        REALISTIC_DELAY = 0.01
+        call.start(INTERVAL)
+        self.assertEqual(times.pop(), (0, 1))
+
+        # About one interval... So far, so good
+        clock.advance(INTERVAL + REALISTIC_DELAY)
+        self.assertEqual(times.pop(), (INTERVAL + REALISTIC_DELAY, 1))
+
+        # Oh no, something delayed us for a while.
+        clock.advance(INTERVAL * 1.75)
+        self.assertEqual(times.pop(), ((2.75 * INTERVAL) + REALISTIC_DELAY, 1))
+
+        # Back on track!  We got invoked when we expected this time.
+        clock.advance(INTERVAL * 0.25)
+        self.assertEqual(times.pop(), ((3.0 * INTERVAL) + REALISTIC_DELAY, 1))
+
+
+    def testBasicFunction(self):
+        # Arrange to have time advanced enough so that our function is
+        # called a few times.
+        # Only need to go to 2.5 to get 3 calls, since the first call
+        # happens before any time has elapsed.
+        timings = [0.05, 0.1, 0.1]
+
+        clock = task.Clock()
+
+        L = []
+        def foo(a, b, c=None, d=None):
+            L.append((a, b, c, d))
+
+        lc = TestableLoopingCall(clock, foo, "a", "b", d="d")
+        D = lc.start(0.1)
+
+        theResult = []
+        def saveResult(result):
+            theResult.append(result)
+        D.addCallback(saveResult)
+
+        clock.pump(timings)
+
+        self.assertEqual(len(L), 3,
+                          "got %d iterations, not 3" % (len(L),))
+
+        for (a, b, c, d) in L:
+            self.assertEqual(a, "a")
+            self.assertEqual(b, "b")
+            self.assertEqual(c, None)
+            self.assertEqual(d, "d")
+
+        lc.stop()
+        self.assertIdentical(theResult[0], lc)
+
+        # Make sure it isn't planning to do anything further.
+        self.failIf(clock.calls)
+
+
+    def testDelayedStart(self):
+        timings = [0.05, 0.1, 0.1]
+
+        clock = task.Clock()
+
+        L = []
+        lc = TestableLoopingCall(clock, L.append, None)
+        d = lc.start(0.1, now=False)
+
+        theResult = []
+        def saveResult(result):
+            theResult.append(result)
+        d.addCallback(saveResult)
+
+        clock.pump(timings)
+
+        self.assertEqual(len(L), 2,
+                          "got %d iterations, not 2" % (len(L),))
+        lc.stop()
+        self.assertIdentical(theResult[0], lc)
+
+        self.failIf(clock.calls)
+
+
+    def testBadDelay(self):
+        lc = task.LoopingCall(lambda: None)
+        self.assertRaises(ValueError, lc.start, -1)
+
+
+    # Make sure that LoopingCall.stop() prevents any subsequent calls.
+    def _stoppingTest(self, delay):
+        ran = []
+        def foo():
+            ran.append(None)
+
+        clock = task.Clock()
+        lc = TestableLoopingCall(clock, foo)
+        lc.start(delay, now=False)
+        lc.stop()
+        self.failIf(ran)
+        self.failIf(clock.calls)
+
+
+    def testStopAtOnce(self):
+        return self._stoppingTest(0)
+
+
+    def testStoppingBeforeDelayedStart(self):
+        return self._stoppingTest(10)
+
+
+    def test_reset(self):
+        """
+        Test that L{LoopingCall} can be reset.
+        """
+        ran = []
+        def foo():
+            ran.append(None)
+
+        c = task.Clock()
+        lc = TestableLoopingCall(c, foo)
+        lc.start(2, now=False)
+        c.advance(1)
+        lc.reset()
+        c.advance(1)
+        self.assertEqual(ran, [])
+        c.advance(1)
+        self.assertEqual(ran, [None])
+
+
+    def test_reprFunction(self):
+        """
+        L{LoopingCall.__repr__} includes the wrapped function's name.
+        """
+        self.assertEqual(repr(task.LoopingCall(installReactor, 1, key=2)),
+                         "LoopingCall<None>(installReactor, *(1,), **{'key': 2})")
+
+
+    def test_reprMethod(self):
+        """
+        L{LoopingCall.__repr__} includes the wrapped method's full name.
+        """
+        self.assertEqual(
+            repr(task.LoopingCall(TestableLoopingCall.__init__)),
+            "LoopingCall<None>(TestableLoopingCall.__init__, *(), **{})")
+
+
+
+class ReactorLoopTestCase(unittest.TestCase):
+    # Slightly inferior tests which exercise interactions with an actual
+    # reactor.
+    def testFailure(self):
+        def foo(x):
+            raise TestException(x)
+
+        lc = task.LoopingCall(foo, "bar")
+        return self.assertFailure(lc.start(0.1), TestException)
+
+
+    def testFailAndStop(self):
+        def foo(x):
+            lc.stop()
+            raise TestException(x)
+
+        lc = task.LoopingCall(foo, "bar")
+        return self.assertFailure(lc.start(0.1), TestException)
+
+
+    def testEveryIteration(self):
+        ran = []
+
+        def foo():
+            ran.append(None)
+            if len(ran) > 5:
+                lc.stop()
+
+        lc = task.LoopingCall(foo)
+        d = lc.start(0)
+        def stopped(ign):
+            self.assertEqual(len(ran), 6)
+        return d.addCallback(stopped)
+
+
+    def testStopAtOnceLater(self):
+        # Ensure that even when LoopingCall.stop() is called from a
+        # reactor callback, it still prevents any subsequent calls.
+        d = defer.Deferred()
+        def foo():
+            d.errback(failure.DefaultException(
+                "This task also should never get called."))
+        self._lc = task.LoopingCall(foo)
+        self._lc.start(1, now=False)
+        reactor.callLater(0, self._callback_for_testStopAtOnceLater, d)
+        return d
+
+
+    def _callback_for_testStopAtOnceLater(self, d):
+        self._lc.stop()
+        reactor.callLater(0, d.callback, "success")
+
+    def testWaitDeferred(self):
+        # Tests if the callable isn't scheduled again before the returned
+        # deferred has fired.
+        timings = [0.2, 0.8]
+        clock = task.Clock()
+
+        def foo():
+            d = defer.Deferred()
+            d.addCallback(lambda _: lc.stop())
+            clock.callLater(1, d.callback, None)
+            return d
+
+        lc = TestableLoopingCall(clock, foo)
+        lc.start(0.2)
+        clock.pump(timings)
+        self.failIf(clock.calls)
+
+    def testFailurePropagation(self):
+        # Tests if the failure of the errback of the deferred returned by the
+        # callable is propagated to the lc errback.
+        #
+        # To make sure this test does not hang trial when LoopingCall does not
+        # wait for the callable's deferred, it also checks there are no
+        # calls in the clock's callLater queue.
+        timings = [0.3]
+        clock = task.Clock()
+
+        def foo():
+            d = defer.Deferred()
+            clock.callLater(0.3, d.errback, TestException())
+            return d
+
+        lc = TestableLoopingCall(clock, foo)
+        d = lc.start(1)
+        self.assertFailure(d, TestException)
+
+        clock.pump(timings)
+        self.failIf(clock.calls)
+        return d
+
+
+    def test_deferredWithCount(self):
+        """
+        In the case that the function passed to L{LoopingCall.withCount}
+        returns a deferred, which does not fire before the next interval
+        elapses, the function should not be run again. And if a function call
+        is skipped in this fashion, the appropriate count should be
+        provided.
+        """
+        testClock = task.Clock()
+        d = defer.Deferred()
+        deferredCounts = []
+
+        def countTracker(possibleCount):
+            # Keep a list of call counts
+            deferredCounts.append(possibleCount)
+            # Return a deferred, but only on the first request
+            if len(deferredCounts) == 1:
+                return d
+            else:
+                return None
+
+        # Start a looping call for our countTracker function
+        # Set the increment to 0.2, and do not call the function on startup.
+        lc = task.LoopingCall.withCount(countTracker)
+        lc.clock = testClock
+        d = lc.start(0.2, now=False)
+
+        # Confirm that nothing has happened yet.
+        self.assertEqual(deferredCounts, [])
+
+        # Advance the clock by 0.2 and then 0.4;
+        testClock.pump([0.2, 0.4])
+        # We should now have exactly one count (of 1 call)
+        self.assertEqual(len(deferredCounts), 1)
+
+        # Fire the deferred, and advance the clock by another 0.2
+        d.callback(None)
+        testClock.pump([0.2])
+        # We should now have exactly 2 counts...
+        self.assertEqual(len(deferredCounts), 2)
+        # The first count should be 1 (one call)
+        # The second count should be 3 (calls were missed at about 0.6 and 0.8)
+        self.assertEqual(deferredCounts, [1, 3])
+
+
+
+class DeferLaterTests(unittest.TestCase):
+    """
+    Tests for L{task.deferLater}.
+    """
+    def test_callback(self):
+        """
+        The L{Deferred} returned by L{task.deferLater} is called back after
+        the specified delay with the result of the function passed in.
+        """
+        results = []
+        flag = object()
+        def callable(foo, bar):
+            results.append((foo, bar))
+            return flag
+
+        clock = task.Clock()
+        d = task.deferLater(clock, 3, callable, 'foo', bar='bar')
+        d.addCallback(self.assertIdentical, flag)
+        clock.advance(2)
+        self.assertEqual(results, [])
+        clock.advance(1)
+        self.assertEqual(results, [('foo', 'bar')])
+        return d
+
+
+    def test_errback(self):
+        """
+        The L{Deferred} returned by L{task.deferLater} is errbacked if the
+        supplied function raises an exception.
+        """
+        def callable():
+            raise TestException()
+
+        clock = task.Clock()
+        d = task.deferLater(clock, 1, callable)
+        clock.advance(1)
+        return self.assertFailure(d, TestException)
+
+
+    def test_cancel(self):
+        """
+        The L{Deferred} returned by L{task.deferLater} can be
+        cancelled to prevent the call from actually being performed.
+        """
+        called = []
+        clock = task.Clock()
+        d = task.deferLater(clock, 1, called.append, None)
+        d.cancel()
+        def cbCancelled(ignored):
+            # Make sure there are no calls outstanding.
+            self.assertEqual([], clock.getDelayedCalls())
+            # And make sure the call didn't somehow happen already.
+            self.assertFalse(called)
+        self.assertFailure(d, defer.CancelledError)
+        d.addCallback(cbCancelled)
+        return d
+
+
+
+class _FakeReactor(object):
+
+    def __init__(self):
+        self._running = False
+        self._clock = task.Clock()
+        self.callLater = self._clock.callLater
+        self.seconds = self._clock.seconds
+        self.getDelayedCalls = self._clock.getDelayedCalls
+        self._whenRunning = []
+        self._shutdownTriggers = {'before': [], 'during': []}
+
+
+    def callWhenRunning(self, callable, *args, **kwargs):
+        if self._whenRunning is None:
+            callable(*args, **kwargs)
+        else:
+            self._whenRunning.append((callable, args, kwargs))
+
+
+    def addSystemEventTrigger(self, phase, event, callable, *args):
+        assert phase in ('before', 'during')
+        assert event == 'shutdown'
+        self._shutdownTriggers[phase].append((callable, args))
+
+
+    def run(self):
+        """
+        Call timed events until there are no more or the reactor is stopped.
+
+        @raise RuntimeError: When no timed events are left and the reactor is
+            still running.
+        """
+        self._running = True
+        whenRunning = self._whenRunning
+        self._whenRunning = None
+        for callable, args, kwargs in whenRunning:
+            callable(*args, **kwargs)
+        while self._running:
+            calls = self.getDelayedCalls()
+            if not calls:
+                raise RuntimeError("No DelayedCalls left")
+            self._clock.advance(calls[0].getTime() - self.seconds())
+        shutdownTriggers = self._shutdownTriggers
+        self._shutdownTriggers = None
+        for (trigger, args) in shutdownTriggers['before'] + shutdownTriggers['during']:
+            trigger(*args)
+
+
+    def stop(self):
+        """
+        Stop the reactor.
+        """
+        if not self._running:
+            raise error.ReactorNotRunning()
+        self._running = False
+
+
+
+class ReactTests(unittest.SynchronousTestCase):
+    """
+    Tests for L{twisted.internet.task.react}.
+    """
+
+    def test_runsUntilAsyncCallback(self):
+        """
+        L{task.react} runs the reactor until the L{Deferred} returned by the
+        function it is passed is called back, then stops it.
+        """
+        timePassed = []
+        def main(reactor):
+            finished = defer.Deferred()
+            reactor.callLater(1, timePassed.append, True)
+            reactor.callLater(2, finished.callback, None)
+            return finished
+        r = _FakeReactor()
+        exitError = self.assertRaises(
+            SystemExit, task.react, main, [], _reactor=r)
+        self.assertEqual(0, exitError.code)
+        self.assertEqual(timePassed, [True])
+        self.assertEqual(r.seconds(), 2)
+
+
+    def test_runsUntilSyncCallback(self):
+        """
+        L{task.react} returns quickly if the L{Deferred} returned by the
+        function it is passed has already been called back at the time it is
+        returned.
+        """
+        def main(reactor):
+            return defer.succeed(None)
+        r = _FakeReactor()
+        exitError = self.assertRaises(
+            SystemExit, task.react, main, [], _reactor=r)
+        self.assertEqual(0, exitError.code)
+        self.assertEqual(r.seconds(), 0)
+
+
+    def test_runsUntilAsyncErrback(self):
+        """
+        L{task.react} runs the reactor until the L{defer.Deferred} returned by
+        the function it is passed is errbacked, then it stops the reactor and
+        reports the error.
+        """
+        class ExpectedException(Exception):
+            pass
+
+        def main(reactor):
+            finished = defer.Deferred()
+            reactor.callLater(1, finished.errback, ExpectedException())
+            return finished
+        r = _FakeReactor()
+        exitError = self.assertRaises(
+            SystemExit, task.react, main, [], _reactor=r)
+
+        self.assertEqual(1, exitError.code)
+
+        errors = self.flushLoggedErrors(ExpectedException)
+        self.assertEqual(len(errors), 1)
+
+
+    def test_runsUntilSyncErrback(self):
+        """
+        L{task.react} returns quickly if the L{defer.Deferred} returned by the
+        function it is passed has already been errbacked at the time it is
+        returned.
+        """
+        class ExpectedException(Exception):
+            pass
+
+        def main(reactor):
+            return defer.fail(ExpectedException())
+        r = _FakeReactor()
+        exitError = self.assertRaises(
+            SystemExit, task.react, main, [], _reactor=r)
+        self.assertEqual(1, exitError.code)
+        self.assertEqual(r.seconds(), 0)
+        errors = self.flushLoggedErrors(ExpectedException)
+        self.assertEqual(len(errors), 1)
+
+
+    def test_singleStopCallback(self):
+        """
+        L{task.react} doesn't try to stop the reactor if the L{defer.Deferred}
+        the function it is passed is called back after the reactor has already
+        been stopped.
+        """
+        def main(reactor):
+            reactor.callLater(1, reactor.stop)
+            finished = defer.Deferred()
+            reactor.addSystemEventTrigger(
+                'during', 'shutdown', finished.callback, None)
+            return finished
+        r = _FakeReactor()
+        exitError = self.assertRaises(
+            SystemExit, task.react, main, [], _reactor=r)
+        self.assertEqual(r.seconds(), 1)
+
+        self.assertEqual(0, exitError.code)
+
+
+    def test_singleStopErrback(self):
+        """
+        L{task.react} doesn't try to stop the reactor if the L{defer.Deferred}
+        the function it is passed is errbacked after the reactor has already
+        been stopped.
+        """
+        class ExpectedException(Exception):
+            pass
+
+        def main(reactor):
+            reactor.callLater(1, reactor.stop)
+            finished = defer.Deferred()
+            reactor.addSystemEventTrigger(
+                'during', 'shutdown', finished.errback, ExpectedException())
+            return finished
+        r = _FakeReactor()
+        exitError = self.assertRaises(
+            SystemExit, task.react, main, [], _reactor=r)
+
+        self.assertEqual(1, exitError.code)
+
+        self.assertEqual(r.seconds(), 1)
+        errors = self.flushLoggedErrors(ExpectedException)
+        self.assertEqual(len(errors), 1)
+
+
+    def test_arguments(self):
+        """
+        L{task.react} passes the elements of the list it is passed as
+        positional arguments to the function it is passed.
+        """
+        args = []
+        def main(reactor, x, y, z):
+            args.extend((x, y, z))
+            return defer.succeed(None)
+        r = _FakeReactor()
+        exitError = self.assertRaises(
+            SystemExit, task.react, main, [1, 2, 3], _reactor=r)
+        self.assertEqual(0, exitError.code)
+        self.assertEqual(args, [1, 2, 3])
+
+
+    def test_defaultReactor(self):
+        """
+        L{twisted.internet.reactor} is used if no reactor argument is passed to
+        L{task.react}.
+        """
+        def main(reactor):
+            self.passedReactor = reactor
+            return defer.succeed(None)
+
+        reactor = _FakeReactor()
+        with NoReactor():
+            installReactor(reactor)
+            exitError = self.assertRaises(SystemExit, task.react, main, [])
+            self.assertEqual(0, exitError.code)
+        self.assertIdentical(reactor, self.passedReactor)
+
+
+    def test_exitWithDefinedCode(self):
+        """
+        L{task.react} forwards the exit code specified by the C{SystemExit}
+        error returned by the passed function, if any.
+        """
+        def main(reactor):
+            return defer.fail(SystemExit(23))
+        r = _FakeReactor()
+        exitError = self.assertRaises(
+            SystemExit, task.react, main, [], _reactor=r)
+        self.assertEqual(23, exitError.code)
+
+
+    def test_synchronousStop(self):
+        """
+        L{task.react} handles when the reactor is stopped just before the
+        returned L{Deferred} fires.
+        """
+        def main(reactor):
+            d = defer.Deferred()
+            def stop():
+                reactor.stop()
+                d.callback(None)
+            reactor.callWhenRunning(stop)
+            return d
+        r = _FakeReactor()
+        exitError = self.assertRaises(
+            SystemExit, task.react, main, [], _reactor=r)
+        self.assertEqual(0, exitError.code)
+
+
+    def test_asynchronousStop(self):
+        """
+        L{task.react} handles when the reactor is stopped and the
+        returned L{Deferred} doesn't fire.
+        """
+        def main(reactor):
+            reactor.callLater(1, reactor.stop)
+            return defer.Deferred()
+        r = _FakeReactor()
+        exitError = self.assertRaises(
+            SystemExit, task.react, main, [], _reactor=r)
+        self.assertEqual(0, exitError.code)
diff --git a/ThirdParty/Twisted/twisted/test/test_tcp.py b/ThirdParty/Twisted/twisted/test/test_tcp.py
new file mode 100644
index 0000000..23d6ce9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_tcp.py
@@ -0,0 +1,1829 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for implementations of L{IReactorTCP}.
+"""
+
+from __future__ import division, absolute_import
+
+import socket, random, errno
+from functools import wraps
+
+from zope.interface import implementer
+
+from twisted.trial import unittest
+
+from twisted.python.log import msg
+from twisted.internet import protocol, reactor, defer, interfaces
+from twisted.internet import error
+from twisted.internet.address import IPv4Address
+from twisted.internet.interfaces import IHalfCloseableProtocol, IPullProducer
+from twisted.protocols import policies
+from twisted.test.proto_helpers import AccumulatingProtocol
+
+
+def loopUntil(predicate, interval=0):
+    """
+    Poor excuse for an event notification helper.  This polls a condition and
+    calls back a Deferred when it is seen to be true.
+
+    Do not use this function.
+    """
+    from twisted.internet import task
+    d = defer.Deferred()
+    def check():
+        res = predicate()
+        if res:
+            d.callback(res)
+    call = task.LoopingCall(check)
+    def stop(result):
+        call.stop()
+        return result
+    d.addCallback(stop)
+    d2 = call.start(interval)
+    d2.addErrback(d.errback)
+    return d
+
+
+
+class ClosingProtocol(protocol.Protocol):
+
+    def connectionMade(self):
+        msg("ClosingProtocol.connectionMade")
+        self.transport.loseConnection()
+
+    def connectionLost(self, reason):
+        msg("ClosingProtocol.connectionLost")
+        reason.trap(error.ConnectionDone)
+
+
+
+class ClosingFactory(protocol.ServerFactory):
+    """
+    Factory that closes port immediately.
+    """
+
+    _cleanerUpper = None
+
+    def buildProtocol(self, conn):
+        self._cleanerUpper = self.port.stopListening()
+        return ClosingProtocol()
+
+
+    def cleanUp(self):
+        """
+        Clean-up for tests to wait for the port to stop listening.
+        """
+        if self._cleanerUpper is None:
+            return self.port.stopListening()
+        return self._cleanerUpper
+
+
+
+class MyProtocolFactoryMixin(object):
+    """
+    Mixin for factories which create L{AccumulatingProtocol} instances.
+
+    @type protocolFactory: no-argument callable
+    @ivar protocolFactory: Factory for protocols - takes the place of the
+        typical C{protocol} attribute of factories (but that name is used by
+        this class for something else).
+
+    @type protocolConnectionMade: L{NoneType} or L{defer.Deferred}
+    @ivar protocolConnectionMade: When an instance of L{AccumulatingProtocol}
+        is connected, if this is not C{None}, the L{Deferred} will be called
+        back with the protocol instance and the attribute set to C{None}.
+
+    @type protocolConnectionLost: L{NoneType} or L{defer.Deferred}
+    @ivar protocolConnectionLost: When an instance of L{AccumulatingProtocol}
+        is created, this will be set as its C{closedDeferred} attribute and
+        then this attribute will be set to C{None} so the L{defer.Deferred} is
+        not used by more than one protocol.
+
+    @ivar protocol: The most recently created L{AccumulatingProtocol} instance
+        which was returned from C{buildProtocol}.
+
+    @type called: C{int}
+    @ivar called: A counter which is incremented each time C{buildProtocol}
+        is called.
+
+    @ivar peerAddresses: A C{list} of the addresses passed to C{buildProtocol}.
+    """
+    protocolFactory = AccumulatingProtocol
+
+    protocolConnectionMade = None
+    protocolConnectionLost = None
+    protocol = None
+    called = 0
+
+    def __init__(self):
+        self.peerAddresses = []
+
+
+    def buildProtocol(self, addr):
+        """
+        Create a L{AccumulatingProtocol} and set it up to be able to perform
+        callbacks.
+        """
+        self.peerAddresses.append(addr)
+        self.called += 1
+        p = self.protocolFactory()
+        p.factory = self
+        p.closedDeferred = self.protocolConnectionLost
+        self.protocolConnectionLost = None
+        self.protocol = p
+        return p
+
+
+
+class MyServerFactory(MyProtocolFactoryMixin, protocol.ServerFactory):
+    """
+    Server factory which creates L{AccumulatingProtocol} instances.
+    """
+
+
+
+class MyClientFactory(MyProtocolFactoryMixin, protocol.ClientFactory):
+    """
+    Client factory which creates L{AccumulatingProtocol} instances.
+    """
+    failed = 0
+    stopped = 0
+
+    def __init__(self):
+        MyProtocolFactoryMixin.__init__(self)
+        self.deferred = defer.Deferred()
+        self.failDeferred = defer.Deferred()
+
+    def clientConnectionFailed(self, connector, reason):
+        self.failed = 1
+        self.reason = reason
+        self.failDeferred.callback(None)
+
+    def clientConnectionLost(self, connector, reason):
+        self.lostReason = reason
+        self.deferred.callback(None)
+
+    def stopFactory(self):
+        self.stopped = 1
+
+
+
+class ListeningTestCase(unittest.TestCase):
+
+    def test_listen(self):
+        """
+        L{IReactorTCP.listenTCP} returns an object which provides
+        L{IListeningPort}.
+        """
+        f = MyServerFactory()
+        p1 = reactor.listenTCP(0, f, interface="127.0.0.1")
+        self.addCleanup(p1.stopListening)
+        self.failUnless(interfaces.IListeningPort.providedBy(p1))
+
+
+    def testStopListening(self):
+        """
+        The L{IListeningPort} returned by L{IReactorTCP.listenTCP} can be
+        stopped with its C{stopListening} method.  After the L{Deferred} it
+        (optionally) returns has been called back, the port number can be bound
+        to a new server.
+        """
+        f = MyServerFactory()
+        port = reactor.listenTCP(0, f, interface="127.0.0.1")
+        n = port.getHost().port
+
+        def cbStopListening(ignored):
+            # Make sure we can rebind the port right away
+            port = reactor.listenTCP(n, f, interface="127.0.0.1")
+            return port.stopListening()
+
+        d = defer.maybeDeferred(port.stopListening)
+        d.addCallback(cbStopListening)
+        return d
+
+
+    def testNumberedInterface(self):
+        f = MyServerFactory()
+        # listen only on the loopback interface
+        p1 = reactor.listenTCP(0, f, interface='127.0.0.1')
+        return p1.stopListening()
+
+    def testPortRepr(self):
+        f = MyServerFactory()
+        p = reactor.listenTCP(0, f)
+        portNo = str(p.getHost().port)
+        self.failIf(repr(p).find(portNo) == -1)
+        def stoppedListening(ign):
+            self.failIf(repr(p).find(portNo) != -1)
+        d = defer.maybeDeferred(p.stopListening)
+        return d.addCallback(stoppedListening)
+
+
+    def test_serverRepr(self):
+        """
+        Check that the repr string of the server transport get the good port
+        number if the server listens on 0.
+        """
+        server = MyServerFactory()
+        serverConnMade = server.protocolConnectionMade = defer.Deferred()
+        port = reactor.listenTCP(0, server)
+        self.addCleanup(port.stopListening)
+
+        client = MyClientFactory()
+        clientConnMade = client.protocolConnectionMade = defer.Deferred()
+        connector = reactor.connectTCP("127.0.0.1",
+                                       port.getHost().port, client)
+        self.addCleanup(connector.disconnect)
+        def check(result):
+            serverProto, clientProto = result
+            portNumber = port.getHost().port
+            self.assertEqual(
+                repr(serverProto.transport),
+                "<AccumulatingProtocol #0 on %s>" % (portNumber,))
+            serverProto.transport.loseConnection()
+            clientProto.transport.loseConnection()
+        return defer.gatherResults([serverConnMade, clientConnMade]
+            ).addCallback(check)
+
+
+    def test_restartListening(self):
+        """
+        Stop and then try to restart a L{tcp.Port}: after a restart, the
+        server should be able to handle client connections.
+        """
+        serverFactory = MyServerFactory()
+        port = reactor.listenTCP(0, serverFactory, interface="127.0.0.1")
+        self.addCleanup(port.stopListening)
+
+        def cbStopListening(ignored):
+            port.startListening()
+
+            client = MyClientFactory()
+            serverFactory.protocolConnectionMade = defer.Deferred()
+            client.protocolConnectionMade = defer.Deferred()
+            connector = reactor.connectTCP("127.0.0.1",
+                                           port.getHost().port, client)
+            self.addCleanup(connector.disconnect)
+            return defer.gatherResults([serverFactory.protocolConnectionMade,
+                                        client.protocolConnectionMade]
+                ).addCallback(close)
+
+        def close(result):
+            serverProto, clientProto = result
+            clientProto.transport.loseConnection()
+            serverProto.transport.loseConnection()
+
+        d = defer.maybeDeferred(port.stopListening)
+        d.addCallback(cbStopListening)
+        return d
+
+
+    def test_exceptInStop(self):
+        """
+        If the server factory raises an exception in C{stopFactory}, the
+        deferred returned by L{tcp.Port.stopListening} should fail with the
+        corresponding error.
+        """
+        serverFactory = MyServerFactory()
+        def raiseException():
+            raise RuntimeError("An error")
+        serverFactory.stopFactory = raiseException
+        port = reactor.listenTCP(0, serverFactory, interface="127.0.0.1")
+
+        return self.assertFailure(port.stopListening(), RuntimeError)
+
+
+    def test_restartAfterExcept(self):
+        """
+        Even if the server factory raise an exception in C{stopFactory}, the
+        corresponding C{tcp.Port} instance should be in a sane state and can
+        be restarted.
+        """
+        serverFactory = MyServerFactory()
+        def raiseException():
+            raise RuntimeError("An error")
+        serverFactory.stopFactory = raiseException
+        port = reactor.listenTCP(0, serverFactory, interface="127.0.0.1")
+        self.addCleanup(port.stopListening)
+
+        def cbStopListening(ignored):
+            del serverFactory.stopFactory
+            port.startListening()
+
+            client = MyClientFactory()
+            serverFactory.protocolConnectionMade = defer.Deferred()
+            client.protocolConnectionMade = defer.Deferred()
+            connector = reactor.connectTCP("127.0.0.1",
+                                           port.getHost().port, client)
+            self.addCleanup(connector.disconnect)
+            return defer.gatherResults([serverFactory.protocolConnectionMade,
+                                        client.protocolConnectionMade]
+                ).addCallback(close)
+
+        def close(result):
+            serverProto, clientProto = result
+            clientProto.transport.loseConnection()
+            serverProto.transport.loseConnection()
+
+        return self.assertFailure(port.stopListening(), RuntimeError
+            ).addCallback(cbStopListening)
+
+
+    def test_directConnectionLostCall(self):
+        """
+        If C{connectionLost} is called directly on a port object, it succeeds
+        (and doesn't expect the presence of a C{deferred} attribute).
+
+        C{connectionLost} is called by L{reactor.disconnectAll} at shutdown.
+        """
+        serverFactory = MyServerFactory()
+        port = reactor.listenTCP(0, serverFactory, interface="127.0.0.1")
+        portNumber = port.getHost().port
+        port.connectionLost(None)
+
+        client = MyClientFactory()
+        serverFactory.protocolConnectionMade = defer.Deferred()
+        client.protocolConnectionMade = defer.Deferred()
+        reactor.connectTCP("127.0.0.1", portNumber, client)
+        def check(ign):
+            client.reason.trap(error.ConnectionRefusedError)
+        return client.failDeferred.addCallback(check)
+
+
+    def test_exceptInConnectionLostCall(self):
+        """
+        If C{connectionLost} is called directory on a port object and that the
+        server factory raises an exception in C{stopFactory}, the exception is
+        passed through to the caller.
+
+        C{connectionLost} is called by L{reactor.disconnectAll} at shutdown.
+        """
+        serverFactory = MyServerFactory()
+        def raiseException():
+            raise RuntimeError("An error")
+        serverFactory.stopFactory = raiseException
+        port = reactor.listenTCP(0, serverFactory, interface="127.0.0.1")
+        self.assertRaises(RuntimeError, port.connectionLost, None)
+
+
+
+def callWithSpew(f):
+    from twisted.python.util import spewerWithLinenums as spewer
+    import sys
+    sys.settrace(spewer)
+    try:
+        f()
+    finally:
+        sys.settrace(None)
+
+class LoopbackTestCase(unittest.TestCase):
+    """
+    Test loopback connections.
+    """
+    def test_closePortInProtocolFactory(self):
+        """
+        A port created with L{IReactorTCP.listenTCP} can be connected to with
+        L{IReactorTCP.connectTCP}.
+        """
+        f = ClosingFactory()
+        port = reactor.listenTCP(0, f, interface="127.0.0.1")
+        f.port = port
+        self.addCleanup(f.cleanUp)
+        portNumber = port.getHost().port
+        clientF = MyClientFactory()
+        reactor.connectTCP("127.0.0.1", portNumber, clientF)
+        def check(x):
+            self.assertTrue(clientF.protocol.made)
+            self.assertTrue(port.disconnected)
+            clientF.lostReason.trap(error.ConnectionDone)
+        return clientF.deferred.addCallback(check)
+
+    def _trapCnxDone(self, obj):
+        getattr(obj, 'trap', lambda x: None)(error.ConnectionDone)
+
+
+    def _connectedClientAndServerTest(self, callback):
+        """
+        Invoke the given callback with a client protocol and a server protocol
+        which have been connected to each other.
+        """
+        serverFactory = MyServerFactory()
+        serverConnMade = defer.Deferred()
+        serverFactory.protocolConnectionMade = serverConnMade
+        port = reactor.listenTCP(0, serverFactory, interface="127.0.0.1")
+        self.addCleanup(port.stopListening)
+
+        portNumber = port.getHost().port
+        clientF = MyClientFactory()
+        clientConnMade = defer.Deferred()
+        clientF.protocolConnectionMade = clientConnMade
+        reactor.connectTCP("127.0.0.1", portNumber, clientF)
+
+        connsMade = defer.gatherResults([serverConnMade, clientConnMade])
+        def connected(result):
+            serverProtocol, clientProtocol = result
+            callback(serverProtocol, clientProtocol)
+            serverProtocol.transport.loseConnection()
+            clientProtocol.transport.loseConnection()
+        connsMade.addCallback(connected)
+        return connsMade
+
+
+    def test_tcpNoDelay(self):
+        """
+        The transport of a protocol connected with L{IReactorTCP.connectTCP} or
+        L{IReactor.TCP.listenTCP} can have its I{TCP_NODELAY} state inspected
+        and manipulated with L{ITCPTransport.getTcpNoDelay} and
+        L{ITCPTransport.setTcpNoDelay}.
+        """
+        def check(serverProtocol, clientProtocol):
+            for p in [serverProtocol, clientProtocol]:
+                transport = p.transport
+                self.assertEqual(transport.getTcpNoDelay(), 0)
+                transport.setTcpNoDelay(1)
+                self.assertEqual(transport.getTcpNoDelay(), 1)
+                transport.setTcpNoDelay(0)
+                self.assertEqual(transport.getTcpNoDelay(), 0)
+        return self._connectedClientAndServerTest(check)
+
+
+    def test_tcpKeepAlive(self):
+        """
+        The transport of a protocol connected with L{IReactorTCP.connectTCP} or
+        L{IReactor.TCP.listenTCP} can have its I{SO_KEEPALIVE} state inspected
+        and manipulated with L{ITCPTransport.getTcpKeepAlive} and
+        L{ITCPTransport.setTcpKeepAlive}.
+        """
+        def check(serverProtocol, clientProtocol):
+            for p in [serverProtocol, clientProtocol]:
+                transport = p.transport
+                self.assertEqual(transport.getTcpKeepAlive(), 0)
+                transport.setTcpKeepAlive(1)
+                self.assertEqual(transport.getTcpKeepAlive(), 1)
+                transport.setTcpKeepAlive(0)
+                self.assertEqual(transport.getTcpKeepAlive(), 0)
+        return self._connectedClientAndServerTest(check)
+
+
+    def testFailing(self):
+        clientF = MyClientFactory()
+        # XXX we assume no one is listening on TCP port 69
+        reactor.connectTCP("127.0.0.1", 69, clientF, timeout=5)
+        def check(ignored):
+            clientF.reason.trap(error.ConnectionRefusedError)
+        return clientF.failDeferred.addCallback(check)
+
+
+    def test_connectionRefusedErrorNumber(self):
+        """
+        Assert that the error number of the ConnectionRefusedError is
+        ECONNREFUSED, and not some other socket related error.
+        """
+
+        # Bind a number of ports in the operating system.  We will attempt
+        # to connect to these in turn immediately after closing them, in the
+        # hopes that no one else has bound them in the mean time.  Any
+        # connection which succeeds is ignored and causes us to move on to
+        # the next port.  As soon as a connection attempt fails, we move on
+        # to making an assertion about how it failed.  If they all succeed,
+        # the test will fail.
+
+        # It would be nice to have a simpler, reliable way to cause a
+        # connection failure from the platform.
+        #
+        # On Linux (2.6.15), connecting to port 0 always fails.  FreeBSD
+        # (5.4) rejects the connection attempt with EADDRNOTAVAIL.
+        #
+        # On FreeBSD (5.4), listening on a port and then repeatedly
+        # connecting to it without ever accepting any connections eventually
+        # leads to an ECONNREFUSED.  On Linux (2.6.15), a seemingly
+        # unbounded number of connections succeed.
+
+        serverSockets = []
+        for i in range(10):
+            serverSocket = socket.socket()
+            serverSocket.bind(('127.0.0.1', 0))
+            serverSocket.listen(1)
+            serverSockets.append(serverSocket)
+        random.shuffle(serverSockets)
+
+        clientCreator = protocol.ClientCreator(reactor, protocol.Protocol)
+
+        def tryConnectFailure():
+            def connected(proto):
+                """
+                Darn.  Kill it and try again, if there are any tries left.
+                """
+                proto.transport.loseConnection()
+                if serverSockets:
+                    return tryConnectFailure()
+                self.fail("Could not fail to connect - could not test errno for that case.")
+
+            serverSocket = serverSockets.pop()
+            serverHost, serverPort = serverSocket.getsockname()
+            serverSocket.close()
+
+            connectDeferred = clientCreator.connectTCP(serverHost, serverPort)
+            connectDeferred.addCallback(connected)
+            return connectDeferred
+
+        refusedDeferred = tryConnectFailure()
+        self.assertFailure(refusedDeferred, error.ConnectionRefusedError)
+        def connRefused(exc):
+            self.assertEqual(exc.osError, errno.ECONNREFUSED)
+        refusedDeferred.addCallback(connRefused)
+        def cleanup(passthrough):
+            while serverSockets:
+                serverSockets.pop().close()
+            return passthrough
+        refusedDeferred.addBoth(cleanup)
+        return refusedDeferred
+
+
+    def test_connectByServiceFail(self):
+        """
+        Connecting to a named service which does not exist raises
+        L{error.ServiceNameUnknownError}.
+        """
+        self.assertRaises(
+            error.ServiceNameUnknownError,
+            reactor.connectTCP,
+            "127.0.0.1", "thisbetternotexist", MyClientFactory())
+
+
+    def test_connectByService(self):
+        """
+        L{IReactorTCP.connectTCP} accepts the name of a service instead of a
+        port number and connects to the port number associated with that
+        service, as defined by L{socket.getservbyname}.
+        """
+        serverFactory = MyServerFactory()
+        serverConnMade = defer.Deferred()
+        serverFactory.protocolConnectionMade = serverConnMade
+        port = reactor.listenTCP(0, serverFactory, interface="127.0.0.1")
+        self.addCleanup(port.stopListening)
+        portNumber = port.getHost().port
+        clientFactory = MyClientFactory()
+        clientConnMade = defer.Deferred()
+        clientFactory.protocolConnectionMade = clientConnMade
+
+        def fakeGetServicePortByName(serviceName, protocolName):
+            if serviceName == 'http' and protocolName == 'tcp':
+                return portNumber
+            return 10
+        self.patch(socket, 'getservbyname', fakeGetServicePortByName)
+
+        reactor.connectTCP('127.0.0.1', 'http', clientFactory)
+
+        connMade = defer.gatherResults([serverConnMade, clientConnMade])
+        def connected(result):
+            serverProtocol, clientProtocol = result
+            self.assertTrue(
+                serverFactory.called,
+                "Server factory was not called upon to build a protocol.")
+            serverProtocol.transport.loseConnection()
+            clientProtocol.transport.loseConnection()
+        connMade.addCallback(connected)
+        return connMade
+
+
+class StartStopFactory(protocol.Factory):
+
+    started = 0
+    stopped = 0
+
+    def startFactory(self):
+        if self.started or self.stopped:
+            raise RuntimeError
+        self.started = 1
+
+    def stopFactory(self):
+        if not self.started or self.stopped:
+            raise RuntimeError
+        self.stopped = 1
+
+
+class ClientStartStopFactory(MyClientFactory):
+
+    started = 0
+    stopped = 0
+
+    def __init__(self, *a, **kw):
+        MyClientFactory.__init__(self, *a, **kw)
+        self.whenStopped = defer.Deferred()
+
+    def startFactory(self):
+        if self.started or self.stopped:
+            raise RuntimeError
+        self.started = 1
+
+    def stopFactory(self):
+        if not self.started or self.stopped:
+            raise RuntimeError
+        self.stopped = 1
+        self.whenStopped.callback(True)
+
+
+class FactoryTestCase(unittest.TestCase):
+    """Tests for factories."""
+
+    def test_serverStartStop(self):
+        """
+        The factory passed to L{IReactorTCP.listenTCP} should be started only
+        when it transitions from being used on no ports to being used on one
+        port and should be stopped only when it transitions from being used on
+        one port to being used on no ports.
+        """
+        # Note - this test doesn't need to use listenTCP.  It is exercising
+        # logic implemented in Factory.doStart and Factory.doStop, so it could
+        # just call that directly.  Some other test can make sure that
+        # listenTCP and stopListening correctly call doStart and
+        # doStop. -exarkun
+
+        f = StartStopFactory()
+
+        # listen on port
+        p1 = reactor.listenTCP(0, f, interface='127.0.0.1')
+        self.addCleanup(p1.stopListening)
+
+        self.assertEqual((f.started, f.stopped), (1, 0))
+
+        # listen on two more ports
+        p2 = reactor.listenTCP(0, f, interface='127.0.0.1')
+        p3 = reactor.listenTCP(0, f, interface='127.0.0.1')
+
+        self.assertEqual((f.started, f.stopped), (1, 0))
+
+        # close two ports
+        d1 = defer.maybeDeferred(p1.stopListening)
+        d2 = defer.maybeDeferred(p2.stopListening)
+        closedDeferred = defer.gatherResults([d1, d2])
+        def cbClosed(ignored):
+            self.assertEqual((f.started, f.stopped), (1, 0))
+            # Close the last port
+            return p3.stopListening()
+        closedDeferred.addCallback(cbClosed)
+
+        def cbClosedAll(ignored):
+            self.assertEqual((f.started, f.stopped), (1, 1))
+        closedDeferred.addCallback(cbClosedAll)
+        return closedDeferred
+
+
+    def test_clientStartStop(self):
+        """
+        The factory passed to L{IReactorTCP.connectTCP} should be started when
+        the connection attempt starts and stopped when it is over.
+        """
+        f = ClosingFactory()
+        p = reactor.listenTCP(0, f, interface="127.0.0.1")
+        f.port = p
+        self.addCleanup(f.cleanUp)
+        portNumber = p.getHost().port
+
+        factory = ClientStartStopFactory()
+        reactor.connectTCP("127.0.0.1", portNumber, factory)
+        self.assertTrue(factory.started)
+        return loopUntil(lambda: factory.stopped)
+
+
+
+class CannotBindTestCase(unittest.TestCase):
+    """
+    Tests for correct behavior when a reactor cannot bind to the required TCP
+    port.
+    """
+
+    def test_cannotBind(self):
+        """
+        L{IReactorTCP.listenTCP} raises L{error.CannotListenError} if the
+        address to listen on is already in use.
+        """
+        f = MyServerFactory()
+
+        p1 = reactor.listenTCP(0, f, interface='127.0.0.1')
+        self.addCleanup(p1.stopListening)
+        n = p1.getHost().port
+        dest = p1.getHost()
+        self.assertEqual(dest.type, "TCP")
+        self.assertEqual(dest.host, "127.0.0.1")
+        self.assertEqual(dest.port, n)
+
+        # make sure new listen raises error
+        self.assertRaises(error.CannotListenError,
+                          reactor.listenTCP, n, f, interface='127.0.0.1')
+
+
+
+    def _fireWhenDoneFunc(self, d, f):
+        """Returns closure that when called calls f and then callbacks d.
+        """
+        @wraps(f)
+        def newf(*args, **kw):
+            rtn = f(*args, **kw)
+            d.callback('')
+            return rtn
+        return newf
+
+
+    def test_clientBind(self):
+        """
+        L{IReactorTCP.connectTCP} calls C{Factory.clientConnectionFailed} with
+        L{error.ConnectBindError} if the bind address specified is already in
+        use.
+        """
+        theDeferred = defer.Deferred()
+        sf = MyServerFactory()
+        sf.startFactory = self._fireWhenDoneFunc(theDeferred, sf.startFactory)
+        p = reactor.listenTCP(0, sf, interface="127.0.0.1")
+        self.addCleanup(p.stopListening)
+
+        def _connect1(results):
+            d = defer.Deferred()
+            cf1 = MyClientFactory()
+            cf1.buildProtocol = self._fireWhenDoneFunc(d, cf1.buildProtocol)
+            reactor.connectTCP("127.0.0.1", p.getHost().port, cf1,
+                               bindAddress=("127.0.0.1", 0))
+            d.addCallback(_conmade, cf1)
+            return d
+
+        def _conmade(results, cf1):
+            d = defer.Deferred()
+            cf1.protocol.connectionMade = self._fireWhenDoneFunc(
+                d, cf1.protocol.connectionMade)
+            d.addCallback(_check1connect2, cf1)
+            return d
+
+        def _check1connect2(results, cf1):
+            self.assertEqual(cf1.protocol.made, 1)
+
+            d1 = defer.Deferred()
+            d2 = defer.Deferred()
+            port = cf1.protocol.transport.getHost().port
+            cf2 = MyClientFactory()
+            cf2.clientConnectionFailed = self._fireWhenDoneFunc(
+                d1, cf2.clientConnectionFailed)
+            cf2.stopFactory = self._fireWhenDoneFunc(d2, cf2.stopFactory)
+            reactor.connectTCP("127.0.0.1", p.getHost().port, cf2,
+                               bindAddress=("127.0.0.1", port))
+            d1.addCallback(_check2failed, cf1, cf2)
+            d2.addCallback(_check2stopped, cf1, cf2)
+            dl = defer.DeferredList([d1, d2])
+            dl.addCallback(_stop, cf1, cf2)
+            return dl
+
+        def _check2failed(results, cf1, cf2):
+            self.assertEqual(cf2.failed, 1)
+            cf2.reason.trap(error.ConnectBindError)
+            self.assertTrue(cf2.reason.check(error.ConnectBindError))
+            return results
+
+        def _check2stopped(results, cf1, cf2):
+            self.assertEqual(cf2.stopped, 1)
+            return results
+
+        def _stop(results, cf1, cf2):
+            d = defer.Deferred()
+            d.addCallback(_check1cleanup, cf1)
+            cf1.stopFactory = self._fireWhenDoneFunc(d, cf1.stopFactory)
+            cf1.protocol.transport.loseConnection()
+            return d
+
+        def _check1cleanup(results, cf1):
+            self.assertEqual(cf1.stopped, 1)
+
+        theDeferred.addCallback(_connect1)
+        return theDeferred
+
+
+
+class MyOtherClientFactory(protocol.ClientFactory):
+    def buildProtocol(self, address):
+        self.address = address
+        self.protocol = AccumulatingProtocol()
+        return self.protocol
+
+
+
+class LocalRemoteAddressTestCase(unittest.TestCase):
+    """
+    Tests for correct getHost/getPeer values and that the correct address is
+    passed to buildProtocol.
+    """
+    def test_hostAddress(self):
+        """
+        L{IListeningPort.getHost} returns the same address as a client
+        connection's L{ITCPTransport.getPeer}.
+        """
+        serverFactory = MyServerFactory()
+        serverFactory.protocolConnectionLost = defer.Deferred()
+        serverConnectionLost = serverFactory.protocolConnectionLost
+        port = reactor.listenTCP(0, serverFactory, interface='127.0.0.1')
+        self.addCleanup(port.stopListening)
+        n = port.getHost().port
+
+        clientFactory = MyClientFactory()
+        onConnection = clientFactory.protocolConnectionMade = defer.Deferred()
+        connector = reactor.connectTCP('127.0.0.1', n, clientFactory)
+
+        def check(ignored):
+            self.assertEqual([port.getHost()], clientFactory.peerAddresses)
+            self.assertEqual(
+                port.getHost(), clientFactory.protocol.transport.getPeer())
+        onConnection.addCallback(check)
+
+        def cleanup(ignored):
+            # Clean up the client explicitly here so that tear down of
+            # the server side of the connection begins, then wait for
+            # the server side to actually disconnect.
+            connector.disconnect()
+            return serverConnectionLost
+        onConnection.addCallback(cleanup)
+
+        return onConnection
+
+
+
+class WriterProtocol(protocol.Protocol):
+    def connectionMade(self):
+        # use everything ITransport claims to provide. If something here
+        # fails, the exception will be written to the log, but it will not
+        # directly flunk the test. The test will fail when maximum number of
+        # iterations have passed and the writer's factory.done has not yet
+        # been set.
+        self.transport.write(b"Hello Cleveland!\n")
+        seq = [b"Goodbye", b" cruel", b" world", b"\n"]
+        self.transport.writeSequence(seq)
+        peer = self.transport.getPeer()
+        if peer.type != "TCP":
+            msg("getPeer returned non-TCP socket: %s" % (peer,))
+            self.factory.problem = 1
+        us = self.transport.getHost()
+        if us.type != "TCP":
+            msg("getHost returned non-TCP socket: %s" % (us,))
+            self.factory.problem = 1
+        self.factory.done = 1
+
+        self.transport.loseConnection()
+
+class ReaderProtocol(protocol.Protocol):
+    def dataReceived(self, data):
+        self.factory.data += data
+    def connectionLost(self, reason):
+        self.factory.done = 1
+
+class WriterClientFactory(protocol.ClientFactory):
+    def __init__(self):
+        self.done = 0
+        self.data = b""
+    def buildProtocol(self, addr):
+        p = ReaderProtocol()
+        p.factory = self
+        self.protocol = p
+        return p
+
+class WriteDataTestCase(unittest.TestCase):
+    """
+    Test that connected TCP sockets can actually write data. Try to exercise
+    the entire ITransport interface.
+    """
+
+    def test_writer(self):
+        """
+        L{ITCPTransport.write} and L{ITCPTransport.writeSequence} send bytes to
+        the other end of the connection.
+        """
+        f = protocol.Factory()
+        f.protocol = WriterProtocol
+        f.done = 0
+        f.problem = 0
+        wrappedF = WiredFactory(f)
+        p = reactor.listenTCP(0, wrappedF, interface="127.0.0.1")
+        self.addCleanup(p.stopListening)
+        n = p.getHost().port
+        clientF = WriterClientFactory()
+        wrappedClientF = WiredFactory(clientF)
+        reactor.connectTCP("127.0.0.1", n, wrappedClientF)
+
+        def check(ignored):
+            self.failUnless(f.done, "writer didn't finish, it probably died")
+            self.failUnless(f.problem == 0, "writer indicated an error")
+            self.failUnless(clientF.done,
+                            "client didn't see connection dropped")
+            expected = b"".join([b"Hello Cleveland!\n",
+                                b"Goodbye", b" cruel", b" world", b"\n"])
+            self.failUnless(clientF.data == expected,
+                            "client didn't receive all the data it expected")
+        d = defer.gatherResults([wrappedF.onDisconnect,
+                                 wrappedClientF.onDisconnect])
+        return d.addCallback(check)
+
+
+    def test_writeAfterShutdownWithoutReading(self):
+        """
+        A TCP transport which is written to after the connection has been shut
+        down should notify its protocol that the connection has been lost, even
+        if the TCP transport is not actively being monitored for read events
+        (ie, pauseProducing was called on it).
+        """
+        # This is an unpleasant thing.  Generally tests shouldn't skip or
+        # run based on the name of the reactor being used (most tests
+        # shouldn't care _at all_ what reactor is being used, in fact).  The
+        # Gtk reactor cannot pass this test, though, because it fails to
+        # implement IReactorTCP entirely correctly.  Gtk is quite old at
+        # this point, so it's more likely that gtkreactor will be deprecated
+        # and removed rather than fixed to handle this case correctly.
+        # Since this is a pre-existing (and very long-standing) issue with
+        # the Gtk reactor, there's no reason for it to prevent this test
+        # being added to exercise the other reactors, for which the behavior
+        # was also untested but at least works correctly (now).  See #2833
+        # for information on the status of gtkreactor.
+        if reactor.__class__.__name__ == 'IOCPReactor':
+            raise unittest.SkipTest(
+                "iocpreactor does not, in fact, stop reading immediately after "
+                "pauseProducing is called. This results in a bonus disconnection "
+                "notification. Under some circumstances, it might be possible to "
+                "not receive this notifications (specifically, pauseProducing, "
+                "deliver some data, proceed with this test).")
+        if reactor.__class__.__name__ == 'GtkReactor':
+            raise unittest.SkipTest(
+                "gtkreactor does not implement unclean disconnection "
+                "notification correctly.  This might more properly be "
+                "a todo, but due to technical limitations it cannot be.")
+
+        # Called back after the protocol for the client side of the connection
+        # has paused its transport, preventing it from reading, therefore
+        # preventing it from noticing the disconnection before the rest of the
+        # actions which are necessary to trigger the case this test is for have
+        # been taken.
+        clientPaused = defer.Deferred()
+
+        # Called back when the protocol for the server side of the connection
+        # has received connection lost notification.
+        serverLost = defer.Deferred()
+
+        class Disconnecter(protocol.Protocol):
+            """
+            Protocol for the server side of the connection which disconnects
+            itself in a callback on clientPaused and publishes notification
+            when its connection is actually lost.
+            """
+            def connectionMade(self):
+                """
+                Set up a callback on clientPaused to lose the connection.
+                """
+                msg('Disconnector.connectionMade')
+                def disconnect(ignored):
+                    msg('Disconnector.connectionMade disconnect')
+                    self.transport.loseConnection()
+                    msg('loseConnection called')
+                clientPaused.addCallback(disconnect)
+
+            def connectionLost(self, reason):
+                """
+                Notify observers that the server side of the connection has
+                ended.
+                """
+                msg('Disconnecter.connectionLost')
+                serverLost.callback(None)
+                msg('serverLost called back')
+
+        # Create the server port to which a connection will be made.
+        server = protocol.ServerFactory()
+        server.protocol = Disconnecter
+        port = reactor.listenTCP(0, server, interface='127.0.0.1')
+        self.addCleanup(port.stopListening)
+        addr = port.getHost()
+
+        @implementer(IPullProducer)
+        class Infinite(object):
+            """
+            A producer which will write to its consumer as long as
+            resumeProducing is called.
+
+            @ivar consumer: The L{IConsumer} which will be written to.
+            """
+
+            def __init__(self, consumer):
+                self.consumer = consumer
+
+            def resumeProducing(self):
+                msg('Infinite.resumeProducing')
+                self.consumer.write(b'x')
+                msg('Infinite.resumeProducing wrote to consumer')
+
+            def stopProducing(self):
+                msg('Infinite.stopProducing')
+
+
+        class UnreadingWriter(protocol.Protocol):
+            """
+            Trivial protocol which pauses its transport immediately and then
+            writes some bytes to it.
+            """
+            def connectionMade(self):
+                msg('UnreadingWriter.connectionMade')
+                self.transport.pauseProducing()
+                clientPaused.callback(None)
+                msg('clientPaused called back')
+                def write(ignored):
+                    msg('UnreadingWriter.connectionMade write')
+                    # This needs to be enough bytes to spill over into the
+                    # userspace Twisted send buffer - if it all fits into
+                    # the kernel, Twisted won't even poll for OUT events,
+                    # which means it won't poll for any events at all, so
+                    # the disconnection is never noticed.  This is due to
+                    # #1662.  When #1662 is fixed, this test will likely
+                    # need to be adjusted, otherwise connection lost
+                    # notification will happen too soon and the test will
+                    # probably begin to fail with ConnectionDone instead of
+                    # ConnectionLost (in any case, it will no longer be
+                    # entirely correct).
+                    producer = Infinite(self.transport)
+                    msg('UnreadingWriter.connectionMade write created producer')
+                    self.transport.registerProducer(producer, False)
+                    msg('UnreadingWriter.connectionMade write registered producer')
+                serverLost.addCallback(write)
+
+        # Create the client and initiate the connection
+        client = MyClientFactory()
+        client.protocolFactory = UnreadingWriter
+        clientConnectionLost = client.deferred
+        def cbClientLost(ignored):
+            msg('cbClientLost')
+            return client.lostReason
+        clientConnectionLost.addCallback(cbClientLost)
+        msg('Connecting to %s:%s' % (addr.host, addr.port))
+        reactor.connectTCP(addr.host, addr.port, client)
+
+        # By the end of the test, the client should have received notification
+        # of unclean disconnection.
+        msg('Returning Deferred')
+        return self.assertFailure(clientConnectionLost, error.ConnectionLost)
+
+
+
+class ConnectionLosingProtocol(protocol.Protocol):
+    def connectionMade(self):
+        self.transport.write(b"1")
+        self.transport.loseConnection()
+        self.master._connectionMade()
+        self.master.ports.append(self.transport)
+
+
+
+class NoopProtocol(protocol.Protocol):
+    def connectionMade(self):
+        self.d = defer.Deferred()
+        self.master.serverConns.append(self.d)
+
+    def connectionLost(self, reason):
+        self.d.callback(True)
+
+
+
+class ConnectionLostNotifyingProtocol(protocol.Protocol):
+    """
+    Protocol which fires a Deferred which was previously passed to
+    its initializer when the connection is lost.
+
+    @ivar onConnectionLost: The L{Deferred} which will be fired in
+        C{connectionLost}.
+
+    @ivar lostConnectionReason: C{None} until the connection is lost, then a
+        reference to the reason passed to C{connectionLost}.
+    """
+    def __init__(self, onConnectionLost):
+        self.lostConnectionReason = None
+        self.onConnectionLost = onConnectionLost
+
+
+    def connectionLost(self, reason):
+        self.lostConnectionReason = reason
+        self.onConnectionLost.callback(self)
+
+
+
+class HandleSavingProtocol(ConnectionLostNotifyingProtocol):
+    """
+    Protocol which grabs the platform-specific socket handle and
+    saves it as an attribute on itself when the connection is
+    established.
+    """
+    def makeConnection(self, transport):
+        """
+        Save the platform-specific socket handle for future
+        introspection.
+        """
+        self.handle = transport.getHandle()
+        return protocol.Protocol.makeConnection(self, transport)
+
+
+
+class ProperlyCloseFilesMixin:
+    """
+    Tests for platform resources properly being cleaned up.
+    """
+    def createServer(self, address, portNumber, factory):
+        """
+        Bind a server port to which connections will be made.  The server
+        should use the given protocol factory.
+
+        @return: The L{IListeningPort} for the server created.
+        """
+        raise NotImplementedError()
+
+
+    def connectClient(self, address, portNumber, clientCreator):
+        """
+        Establish a connection to the given address using the given
+        L{ClientCreator} instance.
+
+        @return: A Deferred which will fire with the connected protocol instance.
+        """
+        raise NotImplementedError()
+
+
+    def getHandleExceptionType(self):
+        """
+        Return the exception class which will be raised when an operation is
+        attempted on a closed platform handle.
+        """
+        raise NotImplementedError()
+
+
+    def getHandleErrorCode(self):
+        """
+        Return the errno expected to result from writing to a closed
+        platform socket handle.
+        """
+        # These platforms have been seen to give EBADF:
+        #
+        #  Linux 2.4.26, Linux 2.6.15, OS X 10.4, FreeBSD 5.4
+        #  Windows 2000 SP 4, Windows XP SP 2
+        return errno.EBADF
+
+
+    def test_properlyCloseFiles(self):
+        """
+        Test that lost connections properly have their underlying socket
+        resources cleaned up.
+        """
+        onServerConnectionLost = defer.Deferred()
+        serverFactory = protocol.ServerFactory()
+        serverFactory.protocol = lambda: ConnectionLostNotifyingProtocol(
+            onServerConnectionLost)
+        serverPort = self.createServer('127.0.0.1', 0, serverFactory)
+
+        onClientConnectionLost = defer.Deferred()
+        serverAddr = serverPort.getHost()
+        clientCreator = protocol.ClientCreator(
+            reactor, lambda: HandleSavingProtocol(onClientConnectionLost))
+        clientDeferred = self.connectClient(
+            serverAddr.host, serverAddr.port, clientCreator)
+
+        def clientConnected(client):
+            """
+            Disconnect the client.  Return a Deferred which fires when both
+            the client and the server have received disconnect notification.
+            """
+            client.transport.write(
+                b'some bytes to make sure the connection is set up')
+            client.transport.loseConnection()
+            return defer.gatherResults([
+                onClientConnectionLost, onServerConnectionLost])
+        clientDeferred.addCallback(clientConnected)
+
+        def clientDisconnected(result):
+            """
+            Verify that the underlying platform socket handle has been
+            cleaned up.
+            """
+            client, server = result
+            client.lostConnectionReason.trap(error.ConnectionClosed)
+            server.lostConnectionReason.trap(error.ConnectionClosed)
+            expectedErrorCode = self.getHandleErrorCode()
+            err = self.assertRaises(
+                self.getHandleExceptionType(), client.handle.send, b'bytes')
+            self.assertEqual(err.args[0], expectedErrorCode)
+        clientDeferred.addCallback(clientDisconnected)
+
+        def cleanup(passthrough):
+            """
+            Shut down the server port.  Return a Deferred which fires when
+            this has completed.
+            """
+            result = defer.maybeDeferred(serverPort.stopListening)
+            result.addCallback(lambda ign: passthrough)
+            return result
+        clientDeferred.addBoth(cleanup)
+
+        return clientDeferred
+
+
+
+class ProperlyCloseFilesTestCase(unittest.TestCase, ProperlyCloseFilesMixin):
+    """
+    Test that the sockets created by L{IReactorTCP.connectTCP} are cleaned up
+    when the connection they are associated with is closed.
+    """
+    def createServer(self, address, portNumber, factory):
+        """
+        Create a TCP server using L{IReactorTCP.listenTCP}.
+        """
+        return reactor.listenTCP(portNumber, factory, interface=address)
+
+
+    def connectClient(self, address, portNumber, clientCreator):
+        """
+        Create a TCP client using L{IReactorTCP.connectTCP}.
+        """
+        return clientCreator.connectTCP(address, portNumber)
+
+
+    def getHandleExceptionType(self):
+        """
+        Return L{socket.error} as the expected error type which will be
+        raised by a write to the low-level socket object after it has been
+        closed.
+        """
+        return socket.error
+
+
+
+class WiredForDeferreds(policies.ProtocolWrapper):
+    def __init__(self, factory, wrappedProtocol):
+        policies.ProtocolWrapper.__init__(self, factory, wrappedProtocol)
+
+    def connectionMade(self):
+        policies.ProtocolWrapper.connectionMade(self)
+        self.factory.onConnect.callback(None)
+
+    def connectionLost(self, reason):
+        policies.ProtocolWrapper.connectionLost(self, reason)
+        self.factory.onDisconnect.callback(None)
+
+
+
+class WiredFactory(policies.WrappingFactory):
+    protocol = WiredForDeferreds
+
+    def __init__(self, wrappedFactory):
+        policies.WrappingFactory.__init__(self, wrappedFactory)
+        self.onConnect = defer.Deferred()
+        self.onDisconnect = defer.Deferred()
+
+
+
+class AddressTestCase(unittest.TestCase):
+    """
+    Tests for address-related interactions with client and server protocols.
+    """
+    def setUp(self):
+        """
+        Create a port and connected client/server pair which can be used
+        to test factory behavior related to addresses.
+
+        @return: A L{defer.Deferred} which will be called back when both the
+            client and server protocols have received their connection made
+            callback.
+        """
+        class RememberingWrapper(protocol.ClientFactory):
+            """
+            Simple wrapper factory which records the addresses which are
+            passed to its L{buildProtocol} method and delegates actual
+            protocol creation to another factory.
+
+            @ivar addresses: A list of the objects passed to buildProtocol.
+            @ivar factory: The wrapped factory to which protocol creation is
+                delegated.
+            """
+            def __init__(self, factory):
+                self.addresses = []
+                self.factory = factory
+
+            # Only bother to pass on buildProtocol calls to the wrapped
+            # factory - doStart, doStop, etc aren't necessary for this test
+            # to pass.
+            def buildProtocol(self, addr):
+                """
+                Append the given address to C{self.addresses} and forward
+                the call to C{self.factory}.
+                """
+                self.addresses.append(addr)
+                return self.factory.buildProtocol(addr)
+
+        # Make a server which we can receive connection and disconnection
+        # notification for, and which will record the address passed to its
+        # buildProtocol.
+        self.server = MyServerFactory()
+        self.serverConnMade = self.server.protocolConnectionMade = defer.Deferred()
+        self.serverConnLost = self.server.protocolConnectionLost = defer.Deferred()
+        # RememberingWrapper is a ClientFactory, but ClientFactory is-a
+        # ServerFactory, so this is okay.
+        self.serverWrapper = RememberingWrapper(self.server)
+
+        # Do something similar for a client.
+        self.client = MyClientFactory()
+        self.clientConnMade = self.client.protocolConnectionMade = defer.Deferred()
+        self.clientConnLost = self.client.protocolConnectionLost = defer.Deferred()
+        self.clientWrapper = RememberingWrapper(self.client)
+
+        self.port = reactor.listenTCP(0, self.serverWrapper, interface='127.0.0.1')
+        self.connector = reactor.connectTCP(
+            self.port.getHost().host, self.port.getHost().port, self.clientWrapper)
+
+        return defer.gatherResults([self.serverConnMade, self.clientConnMade])
+
+
+    def tearDown(self):
+        """
+        Disconnect the client/server pair and shutdown the port created in
+        L{setUp}.
+        """
+        self.connector.disconnect()
+        return defer.gatherResults([
+            self.serverConnLost, self.clientConnLost,
+            defer.maybeDeferred(self.port.stopListening)])
+
+
+    def test_buildProtocolClient(self):
+        """
+        L{ClientFactory.buildProtocol} should be invoked with the address of
+        the server to which a connection has been established, which should
+        be the same as the address reported by the C{getHost} method of the
+        transport of the server protocol and as the C{getPeer} method of the
+        transport of the client protocol.
+        """
+        serverHost = self.server.protocol.transport.getHost()
+        clientPeer = self.client.protocol.transport.getPeer()
+
+        self.assertEqual(
+            self.clientWrapper.addresses,
+            [IPv4Address('TCP', serverHost.host, serverHost.port)])
+        self.assertEqual(
+            self.clientWrapper.addresses,
+            [IPv4Address('TCP', clientPeer.host, clientPeer.port)])
+
+
+
+class LargeBufferWriterProtocol(protocol.Protocol):
+
+    # Win32 sockets cannot handle single huge chunks of bytes.  Write one
+    # massive string to make sure Twisted deals with this fact.
+
+    def connectionMade(self):
+        # write 60MB
+        self.transport.write(b'X'*self.factory.len)
+        self.factory.done = 1
+        self.transport.loseConnection()
+
+class LargeBufferReaderProtocol(protocol.Protocol):
+    def dataReceived(self, data):
+        self.factory.len += len(data)
+    def connectionLost(self, reason):
+        self.factory.done = 1
+
+class LargeBufferReaderClientFactory(protocol.ClientFactory):
+    def __init__(self):
+        self.done = 0
+        self.len = 0
+    def buildProtocol(self, addr):
+        p = LargeBufferReaderProtocol()
+        p.factory = self
+        self.protocol = p
+        return p
+
+
+class FireOnClose(policies.ProtocolWrapper):
+    """A wrapper around a protocol that makes it fire a deferred when
+    connectionLost is called.
+    """
+    def connectionLost(self, reason):
+        policies.ProtocolWrapper.connectionLost(self, reason)
+        self.factory.deferred.callback(None)
+
+
+class FireOnCloseFactory(policies.WrappingFactory):
+    protocol = FireOnClose
+
+    def __init__(self, wrappedFactory):
+        policies.WrappingFactory.__init__(self, wrappedFactory)
+        self.deferred = defer.Deferred()
+
+
+class LargeBufferTestCase(unittest.TestCase):
+    """Test that buffering large amounts of data works.
+    """
+
+    datalen = 60*1024*1024
+    def testWriter(self):
+        f = protocol.Factory()
+        f.protocol = LargeBufferWriterProtocol
+        f.done = 0
+        f.problem = 0
+        f.len = self.datalen
+        wrappedF = FireOnCloseFactory(f)
+        p = reactor.listenTCP(0, wrappedF, interface="127.0.0.1")
+        self.addCleanup(p.stopListening)
+        n = p.getHost().port
+        clientF = LargeBufferReaderClientFactory()
+        wrappedClientF = FireOnCloseFactory(clientF)
+        reactor.connectTCP("127.0.0.1", n, wrappedClientF)
+
+        d = defer.gatherResults([wrappedF.deferred, wrappedClientF.deferred])
+        def check(ignored):
+            self.failUnless(f.done, "writer didn't finish, it probably died")
+            self.failUnless(clientF.len == self.datalen,
+                            "client didn't receive all the data it expected "
+                            "(%d != %d)" % (clientF.len, self.datalen))
+            self.failUnless(clientF.done,
+                            "client didn't see connection dropped")
+        return d.addCallback(check)
+
+
+ at implementer(IHalfCloseableProtocol)
+class MyHCProtocol(AccumulatingProtocol):
+
+
+    readHalfClosed = False
+    writeHalfClosed = False
+
+    def readConnectionLost(self):
+        self.readHalfClosed = True
+        # Invoke notification logic from the base class to simplify testing.
+        if self.writeHalfClosed:
+            self.connectionLost(None)
+
+    def writeConnectionLost(self):
+        self.writeHalfClosed = True
+        # Invoke notification logic from the base class to simplify testing.
+        if self.readHalfClosed:
+            self.connectionLost(None)
+
+
+class MyHCFactory(protocol.ServerFactory):
+
+    called = 0
+    protocolConnectionMade = None
+
+    def buildProtocol(self, addr):
+        self.called += 1
+        p = MyHCProtocol()
+        p.factory = self
+        self.protocol = p
+        return p
+
+
+class HalfCloseTestCase(unittest.TestCase):
+    """Test half-closing connections."""
+
+    def setUp(self):
+        self.f = f = MyHCFactory()
+        self.p = p = reactor.listenTCP(0, f, interface="127.0.0.1")
+        self.addCleanup(p.stopListening)
+        d = loopUntil(lambda :p.connected)
+
+        self.cf = protocol.ClientCreator(reactor, MyHCProtocol)
+
+        d.addCallback(lambda _: self.cf.connectTCP(p.getHost().host,
+                                                   p.getHost().port))
+        d.addCallback(self._setUp)
+        return d
+
+    def _setUp(self, client):
+        self.client = client
+        self.clientProtoConnectionLost = self.client.closedDeferred = defer.Deferred()
+        self.assertEqual(self.client.transport.connected, 1)
+        # Wait for the server to notice there is a connection, too.
+        return loopUntil(lambda: getattr(self.f, 'protocol', None) is not None)
+
+    def tearDown(self):
+        self.assertEqual(self.client.closed, 0)
+        self.client.transport.loseConnection()
+        d = defer.maybeDeferred(self.p.stopListening)
+        d.addCallback(lambda ign: self.clientProtoConnectionLost)
+        d.addCallback(self._tearDown)
+        return d
+
+    def _tearDown(self, ignored):
+        self.assertEqual(self.client.closed, 1)
+        # because we did half-close, the server also needs to
+        # closed explicitly.
+        self.assertEqual(self.f.protocol.closed, 0)
+        d = defer.Deferred()
+        def _connectionLost(reason):
+            self.f.protocol.closed = 1
+            d.callback(None)
+        self.f.protocol.connectionLost = _connectionLost
+        self.f.protocol.transport.loseConnection()
+        d.addCallback(lambda x:self.assertEqual(self.f.protocol.closed, 1))
+        return d
+
+    def testCloseWriteCloser(self):
+        client = self.client
+        f = self.f
+        t = client.transport
+
+        t.write(b"hello")
+        d = loopUntil(lambda :len(t._tempDataBuffer) == 0)
+        def loseWrite(ignored):
+            t.loseWriteConnection()
+            return loopUntil(lambda :t._writeDisconnected)
+        def check(ignored):
+            self.assertEqual(client.closed, False)
+            self.assertEqual(client.writeHalfClosed, True)
+            self.assertEqual(client.readHalfClosed, False)
+            return loopUntil(lambda :f.protocol.readHalfClosed)
+        def write(ignored):
+            w = client.transport.write
+            w(b" world")
+            w(b"lalala fooled you")
+            self.assertEqual(0, len(client.transport._tempDataBuffer))
+            self.assertEqual(f.protocol.data, b"hello")
+            self.assertEqual(f.protocol.closed, False)
+            self.assertEqual(f.protocol.readHalfClosed, True)
+        return d.addCallback(loseWrite).addCallback(check).addCallback(write)
+
+    def testWriteCloseNotification(self):
+        f = self.f
+        f.protocol.transport.loseWriteConnection()
+
+        d = defer.gatherResults([
+            loopUntil(lambda :f.protocol.writeHalfClosed),
+            loopUntil(lambda :self.client.readHalfClosed)])
+        d.addCallback(lambda _: self.assertEqual(
+            f.protocol.readHalfClosed, False))
+        return d
+
+
+class HalfClose2TestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.f = f = MyServerFactory()
+        self.f.protocolConnectionMade = defer.Deferred()
+        self.p = p = reactor.listenTCP(0, f, interface="127.0.0.1")
+
+        # XXX we don't test server side yet since we don't do it yet
+        d = protocol.ClientCreator(reactor, AccumulatingProtocol).connectTCP(
+            p.getHost().host, p.getHost().port)
+        d.addCallback(self._gotClient)
+        return d
+
+    def _gotClient(self, client):
+        self.client = client
+        # Now wait for the server to catch up - it doesn't matter if this
+        # Deferred has already fired and gone away, in that case we'll
+        # return None and not wait at all, which is precisely correct.
+        return self.f.protocolConnectionMade
+
+    def tearDown(self):
+        self.client.transport.loseConnection()
+        return self.p.stopListening()
+
+    def testNoNotification(self):
+        """
+        TCP protocols support half-close connections, but not all of them
+        support being notified of write closes.  In this case, test that
+        half-closing the connection causes the peer's connection to be
+        closed.
+        """
+        self.client.transport.write(b"hello")
+        self.client.transport.loseWriteConnection()
+        self.f.protocol.closedDeferred = d = defer.Deferred()
+        self.client.closedDeferred = d2 = defer.Deferred()
+        d.addCallback(lambda x:
+                      self.assertEqual(self.f.protocol.data, b'hello'))
+        d.addCallback(lambda x: self.assertEqual(self.f.protocol.closed, True))
+        return defer.gatherResults([d, d2])
+
+    def testShutdownException(self):
+        """
+        If the other side has already closed its connection,
+        loseWriteConnection should pass silently.
+        """
+        self.f.protocol.transport.loseConnection()
+        self.client.transport.write(b"X")
+        self.client.transport.loseWriteConnection()
+        self.f.protocol.closedDeferred = d = defer.Deferred()
+        self.client.closedDeferred = d2 = defer.Deferred()
+        d.addCallback(lambda x:
+                      self.assertEqual(self.f.protocol.closed, True))
+        return defer.gatherResults([d, d2])
+
+
+class HalfCloseBuggyApplicationTests(unittest.TestCase):
+    """
+    Test half-closing connections where notification code has bugs.
+    """
+
+    def setUp(self):
+        """
+        Set up a server and connect a client to it.  Return a Deferred which
+        only fires once this is done.
+        """
+        self.serverFactory = MyHCFactory()
+        self.serverFactory.protocolConnectionMade = defer.Deferred()
+        self.port = reactor.listenTCP(
+            0, self.serverFactory, interface="127.0.0.1")
+        self.addCleanup(self.port.stopListening)
+        addr = self.port.getHost()
+        creator = protocol.ClientCreator(reactor, MyHCProtocol)
+        clientDeferred = creator.connectTCP(addr.host, addr.port)
+        def setClient(clientProtocol):
+            self.clientProtocol = clientProtocol
+        clientDeferred.addCallback(setClient)
+        return defer.gatherResults([
+            self.serverFactory.protocolConnectionMade,
+            clientDeferred])
+
+
+    def aBug(self, *args):
+        """
+        Fake implementation of a callback which illegally raises an
+        exception.
+        """
+        raise RuntimeError("ONO I AM BUGGY CODE")
+
+
+    def _notificationRaisesTest(self):
+        """
+        Helper for testing that an exception is logged by the time the
+        client protocol loses its connection.
+        """
+        closed = self.clientProtocol.closedDeferred = defer.Deferred()
+        self.clientProtocol.transport.loseWriteConnection()
+        def check(ignored):
+            errors = self.flushLoggedErrors(RuntimeError)
+            self.assertEqual(len(errors), 1)
+        closed.addCallback(check)
+        return closed
+
+
+    def test_readNotificationRaises(self):
+        """
+        If C{readConnectionLost} raises an exception when the transport
+        calls it to notify the protocol of that event, the exception should
+        be logged and the protocol should be disconnected completely.
+        """
+        self.serverFactory.protocol.readConnectionLost = self.aBug
+        return self._notificationRaisesTest()
+
+
+    def test_writeNotificationRaises(self):
+        """
+        If C{writeConnectionLost} raises an exception when the transport
+        calls it to notify the protocol of that event, the exception should
+        be logged and the protocol should be disconnected completely.
+        """
+        self.clientProtocol.writeConnectionLost = self.aBug
+        return self._notificationRaisesTest()
+
+
+
+class LogTestCase(unittest.TestCase):
+    """
+    Test logging facility of TCP base classes.
+    """
+
+    def test_logstrClientSetup(self):
+        """
+        Check that the log customization of the client transport happens
+        once the client is connected.
+        """
+        server = MyServerFactory()
+
+        client = MyClientFactory()
+        client.protocolConnectionMade = defer.Deferred()
+
+        port = reactor.listenTCP(0, server, interface='127.0.0.1')
+        self.addCleanup(port.stopListening)
+
+        connector = reactor.connectTCP(
+            port.getHost().host, port.getHost().port, client)
+        self.addCleanup(connector.disconnect)
+
+        # It should still have the default value
+        self.assertEqual(connector.transport.logstr,
+                          "Uninitialized")
+
+        def cb(ign):
+            self.assertEqual(connector.transport.logstr,
+                              "AccumulatingProtocol,client")
+        client.protocolConnectionMade.addCallback(cb)
+        return client.protocolConnectionMade
+
+
+
+class PauseProducingTestCase(unittest.TestCase):
+    """
+    Test some behaviors of pausing the production of a transport.
+    """
+
+    def test_pauseProducingInConnectionMade(self):
+        """
+        In C{connectionMade} of a client protocol, C{pauseProducing} used to be
+        ignored: this test is here to ensure it's not ignored.
+        """
+        server = MyServerFactory()
+
+        client = MyClientFactory()
+        client.protocolConnectionMade = defer.Deferred()
+
+        port = reactor.listenTCP(0, server, interface='127.0.0.1')
+        self.addCleanup(port.stopListening)
+
+        connector = reactor.connectTCP(
+            port.getHost().host, port.getHost().port, client)
+        self.addCleanup(connector.disconnect)
+
+        def checkInConnectionMade(proto):
+            tr = proto.transport
+            # The transport should already be monitored
+            self.assertIn(tr, reactor.getReaders() +
+                              reactor.getWriters())
+            proto.transport.pauseProducing()
+            self.assertNotIn(tr, reactor.getReaders() +
+                                 reactor.getWriters())
+            d = defer.Deferred()
+            d.addCallback(checkAfterConnectionMade)
+            reactor.callLater(0, d.callback, proto)
+            return d
+        def checkAfterConnectionMade(proto):
+            tr = proto.transport
+            # The transport should still not be monitored
+            self.assertNotIn(tr, reactor.getReaders() +
+                                 reactor.getWriters())
+        client.protocolConnectionMade.addCallback(checkInConnectionMade)
+        return client.protocolConnectionMade
+
+    if not interfaces.IReactorFDSet.providedBy(reactor):
+        test_pauseProducingInConnectionMade.skip = "Reactor not providing IReactorFDSet"
+
+
+
+class CallBackOrderTestCase(unittest.TestCase):
+    """
+    Test the order of reactor callbacks
+    """
+
+    def test_loseOrder(self):
+        """
+        Check that Protocol.connectionLost is called before factory's
+        clientConnectionLost
+        """
+        server = MyServerFactory()
+        server.protocolConnectionMade = (defer.Deferred()
+                .addCallback(lambda proto: self.addCleanup(
+                             proto.transport.loseConnection)))
+
+        client = MyClientFactory()
+        client.protocolConnectionLost = defer.Deferred()
+        client.protocolConnectionMade = defer.Deferred()
+
+        def _cbCM(res):
+            """
+            protocol.connectionMade callback
+            """
+            reactor.callLater(0, client.protocol.transport.loseConnection)
+
+        client.protocolConnectionMade.addCallback(_cbCM)
+
+        port = reactor.listenTCP(0, server, interface='127.0.0.1')
+        self.addCleanup(port.stopListening)
+
+        connector = reactor.connectTCP(
+            port.getHost().host, port.getHost().port, client)
+        self.addCleanup(connector.disconnect)
+
+        def _cbCCL(res):
+            """
+            factory.clientConnectionLost callback
+            """
+            return 'CCL'
+
+        def _cbCL(res):
+            """
+            protocol.connectionLost callback
+            """
+            return 'CL'
+
+        def _cbGather(res):
+            self.assertEqual(res, ['CL', 'CCL'])
+
+        d = defer.gatherResults([
+                client.protocolConnectionLost.addCallback(_cbCL),
+                client.deferred.addCallback(_cbCCL)])
+        return d.addCallback(_cbGather)
+
+
+
+try:
+    import resource
+except ImportError:
+    pass
+else:
+    numRounds = resource.getrlimit(resource.RLIMIT_NOFILE)[0] + 10
+    ProperlyCloseFilesTestCase.numberRounds = numRounds
diff --git a/ThirdParty/Twisted/twisted/test/test_tcp_internals.py b/ThirdParty/Twisted/twisted/test/test_tcp_internals.py
new file mode 100644
index 0000000..be4a77f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_tcp_internals.py
@@ -0,0 +1,255 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Whitebox tests for TCP APIs.
+"""
+
+from __future__ import division, absolute_import
+
+import errno, socket, os
+
+try:
+    import resource
+except ImportError:
+    resource = None
+
+from twisted.python.compat import _PY3
+from twisted.trial.unittest import TestCase
+
+from twisted.python import log
+from twisted.internet.tcp import ECONNABORTED, ENOMEM, ENFILE, EMFILE, ENOBUFS, EINPROGRESS, Port
+from twisted.internet.protocol import ServerFactory
+from twisted.python.runtime import platform
+from twisted.internet.defer import maybeDeferred, gatherResults
+from twisted.internet import reactor, interfaces
+
+
+class PlatformAssumptionsTestCase(TestCase):
+    """
+    Test assumptions about platform behaviors.
+    """
+    if _PY3:
+        skip = "Port when Python 3 supports twisted.internet.process (#5987)"
+
+    socketLimit = 8192
+
+    def setUp(self):
+        self.openSockets = []
+        if resource is not None:
+            # On some buggy platforms we might leak FDs, and the test will
+            # fail creating the initial two sockets we *do* want to
+            # succeed. So, we make the soft limit the current number of fds
+            # plus two more (for the two sockets we want to succeed). If we've
+            # leaked too many fds for that to work, there's nothing we can
+            # do.
+            from twisted.internet.process import _listOpenFDs
+            newLimit = len(_listOpenFDs()) + 2
+            self.originalFileLimit = resource.getrlimit(resource.RLIMIT_NOFILE)
+            resource.setrlimit(resource.RLIMIT_NOFILE, (newLimit, self.originalFileLimit[1]))
+            self.socketLimit = newLimit + 100
+
+
+    def tearDown(self):
+        while self.openSockets:
+            self.openSockets.pop().close()
+        if resource is not None:
+            # OS X implicitly lowers the hard limit in the setrlimit call
+            # above.  Retrieve the new hard limit to pass in to this
+            # setrlimit call, so that it doesn't give us a permission denied
+            # error.
+            currentHardLimit = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
+            newSoftLimit = min(self.originalFileLimit[0], currentHardLimit)
+            resource.setrlimit(resource.RLIMIT_NOFILE, (newSoftLimit, currentHardLimit))
+
+
+    def socket(self):
+        """
+        Create and return a new socket object, also tracking it so it can be
+        closed in the test tear down.
+        """
+        s = socket.socket()
+        self.openSockets.append(s)
+        return s
+
+
+    def test_acceptOutOfFiles(self):
+        """
+        Test that the platform accept(2) call fails with either L{EMFILE} or
+        L{ENOBUFS} when there are too many file descriptors open.
+        """
+        # Make a server to which to connect
+        port = self.socket()
+        port.bind(('127.0.0.1', 0))
+        serverPortNumber = port.getsockname()[1]
+        port.listen(5)
+
+        # Make a client to use to connect to the server
+        client = self.socket()
+        client.setblocking(False)
+
+        # Use up all the rest of the file descriptors.
+        for i in xrange(self.socketLimit):
+            try:
+                self.socket()
+            except socket.error as e:
+                if e.args[0] in (EMFILE, ENOBUFS):
+                    # The desired state has been achieved.
+                    break
+                else:
+                    # Some unexpected error occurred.
+                    raise
+        else:
+            self.fail("Could provoke neither EMFILE nor ENOBUFS from platform.")
+
+        # Non-blocking connect is supposed to fail, but this is not true
+        # everywhere (e.g. freeBSD)
+        self.assertIn(client.connect_ex(('127.0.0.1', serverPortNumber)),
+                      (0, EINPROGRESS))
+
+        # Make sure that the accept call fails in the way we expect.
+        exc = self.assertRaises(socket.error, port.accept)
+        self.assertIn(exc.args[0], (EMFILE, ENOBUFS))
+    if platform.getType() == "win32":
+        test_acceptOutOfFiles.skip = (
+            "Windows requires an unacceptably large amount of resources to "
+            "provoke this behavior in the naive manner.")
+
+
+
+class SelectReactorTestCase(TestCase):
+    """
+    Tests for select-specific failure conditions.
+    """
+
+    def setUp(self):
+        self.ports = []
+        self.messages = []
+        log.addObserver(self.messages.append)
+
+
+    def tearDown(self):
+        log.removeObserver(self.messages.append)
+        return gatherResults([
+            maybeDeferred(p.stopListening)
+            for p in self.ports])
+
+
+    def port(self, portNumber, factory, interface):
+        """
+        Create, start, and return a new L{Port}, also tracking it so it can
+        be stopped in the test tear down.
+        """
+        p = Port(portNumber, factory, interface=interface)
+        p.startListening()
+        self.ports.append(p)
+        return p
+
+
+    def _acceptFailureTest(self, socketErrorNumber):
+        """
+        Test behavior in the face of an exception from C{accept(2)}.
+
+        On any exception which indicates the platform is unable or unwilling
+        to allocate further resources to us, the existing port should remain
+        listening, a message should be logged, and the exception should not
+        propagate outward from doRead.
+
+        @param socketErrorNumber: The errno to simulate from accept.
+        """
+        class FakeSocket(object):
+            """
+            Pretend to be a socket in an overloaded system.
+            """
+            def accept(self):
+                raise socket.error(
+                    socketErrorNumber, os.strerror(socketErrorNumber))
+
+        factory = ServerFactory()
+        port = self.port(0, factory, interface='127.0.0.1')
+        originalSocket = port.socket
+        try:
+            port.socket = FakeSocket()
+
+            port.doRead()
+
+            expectedFormat = "Could not accept new connection (%s)"
+            expectedErrorCode = errno.errorcode[socketErrorNumber]
+            expectedMessage = expectedFormat % (expectedErrorCode,)
+            for msg in self.messages:
+                if msg.get('message') == (expectedMessage,):
+                    break
+            else:
+                self.fail("Log event for failed accept not found in "
+                          "%r" % (self.messages,))
+        finally:
+            port.socket = originalSocket
+
+
+    def test_tooManyFilesFromAccept(self):
+        """
+        C{accept(2)} can fail with C{EMFILE} when there are too many open file
+        descriptors in the process.  Test that this doesn't negatively impact
+        any other existing connections.
+
+        C{EMFILE} mainly occurs on Linux when the open file rlimit is
+        encountered.
+        """
+        return self._acceptFailureTest(EMFILE)
+
+
+    def test_noBufferSpaceFromAccept(self):
+        """
+        Similar to L{test_tooManyFilesFromAccept}, but test the case where
+        C{accept(2)} fails with C{ENOBUFS}.
+
+        This mainly occurs on Windows and FreeBSD, but may be possible on
+        Linux and other platforms as well.
+        """
+        return self._acceptFailureTest(ENOBUFS)
+
+
+    def test_connectionAbortedFromAccept(self):
+        """
+        Similar to L{test_tooManyFilesFromAccept}, but test the case where
+        C{accept(2)} fails with C{ECONNABORTED}.
+
+        It is not clear whether this is actually possible for TCP
+        connections on modern versions of Linux.
+        """
+        return self._acceptFailureTest(ECONNABORTED)
+
+
+    def test_noFilesFromAccept(self):
+        """
+        Similar to L{test_tooManyFilesFromAccept}, but test the case where
+        C{accept(2)} fails with C{ENFILE}.
+
+        This can occur on Linux when the system has exhausted (!) its supply
+        of inodes.
+        """
+        return self._acceptFailureTest(ENFILE)
+    if platform.getType() == 'win32':
+        test_noFilesFromAccept.skip = "Windows accept(2) cannot generate ENFILE"
+
+
+    def test_noMemoryFromAccept(self):
+        """
+        Similar to L{test_tooManyFilesFromAccept}, but test the case where
+        C{accept(2)} fails with C{ENOMEM}.
+
+        On Linux at least, this can sensibly occur, even in a Python program
+        (which eats memory like no ones business), when memory has become
+        fragmented or low memory has been filled (d_alloc calls
+        kmem_cache_alloc calls kmalloc - kmalloc only allocates out of low
+        memory).
+        """
+        return self._acceptFailureTest(ENOMEM)
+    if platform.getType() == 'win32':
+        test_noMemoryFromAccept.skip = "Windows accept(2) cannot generate ENOMEM"
+
+if not interfaces.IReactorFDSet.providedBy(reactor):
+    skipMsg = 'This test only applies to reactors that implement IReactorFDset'
+    PlatformAssumptionsTestCase.skip = skipMsg
+    SelectReactorTestCase.skip = skipMsg
+
diff --git a/ThirdParty/Twisted/twisted/test/test_text.py b/ThirdParty/Twisted/twisted/test/test_text.py
new file mode 100644
index 0000000..4b2d38c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_text.py
@@ -0,0 +1,242 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.python.text}.
+"""
+
+from cStringIO import StringIO
+
+from twisted.trial import unittest
+from twisted.python import text
+
+
+sampleText = \
+"""Every attempt to employ mathematical methods in the study of chemical
+questions must be considered profoundly irrational and contrary to the
+spirit of chemistry ...  If mathematical analysis should ever hold a
+prominent place in chemistry - an aberration which is happily almost
+impossible - it would occasion a rapid and widespread degeneration of that
+science.
+
+           --  Auguste Comte, Philosophie Positive, Paris, 1838
+"""
+
+
+class WrapTest(unittest.TestCase):
+    """
+    Tests for L{text.greedyWrap}.
+    """
+    def setUp(self):
+        self.lineWidth = 72
+        self.sampleSplitText = sampleText.split()
+        self.output = text.wordWrap(sampleText, self.lineWidth)
+
+
+    def test_wordCount(self):
+        """
+        Compare the number of words.
+        """
+        words = []
+        for line in self.output:
+            words.extend(line.split())
+        wordCount = len(words)
+        sampleTextWordCount = len(self.sampleSplitText)
+
+        self.assertEqual(wordCount, sampleTextWordCount)
+
+
+    def test_wordMatch(self):
+        """
+        Compare the lists of words.
+        """
+        words = []
+        for line in self.output:
+            words.extend(line.split())
+
+        # Using assertEqual here prints out some
+        # rather too long lists.
+        self.failUnless(self.sampleSplitText == words)
+
+
+    def test_lineLength(self):
+        """
+        Check the length of the lines.
+        """
+        failures = []
+        for line in self.output:
+            if not len(line) <= self.lineWidth:
+                failures.append(len(line))
+
+        if failures:
+            self.fail("%d of %d lines were too long.\n"
+                      "%d < %s" % (len(failures), len(self.output),
+                                   self.lineWidth, failures))
+
+    def test_doubleNewline(self):
+        """
+        Allow paragraphs delimited by two \ns.
+        """
+        sampleText = "et\n\nphone\nhome."
+        result = text.wordWrap(sampleText, self.lineWidth)
+        self.assertEqual(result, ["et", "", "phone home.", ""]) 
+
+
+
+class LineTest(unittest.TestCase):
+    """
+    Tests for L{isMultiline} and L{endsInNewline}.
+    """
+    def test_isMultiline(self):
+        """
+        L{text.isMultiline} returns C{True} if the string has a newline in it.
+        """
+        s = 'This code\n "breaks."'
+        m = text.isMultiline(s)
+        self.assertTrue(m)
+
+        s = 'This code does not "break."'
+        m = text.isMultiline(s)
+        self.assertFalse(m)
+
+
+    def test_endsInNewline(self):
+        """
+        L{text.endsInNewline} returns C{True} if the string ends in a newline.
+        """
+        s = 'newline\n'
+        m = text.endsInNewline(s)
+        self.assertTrue(m)
+
+        s = 'oldline'
+        m = text.endsInNewline(s)
+        self.assertFalse(m)
+
+
+
+class StringyStringTest(unittest.TestCase):
+    """
+    Tests for L{text.stringyString}.
+    """
+    def test_tuple(self):
+        """
+        Tuple elements are displayed on separate lines.
+        """
+        s = ('a', 'b')
+        m = text.stringyString(s)
+        self.assertEqual(m, '(a,\n b,)\n')
+
+
+    def test_dict(self):
+        """
+        Dicts elements are displayed using C{str()}.
+        """
+        s = {'a': 0}
+        m = text.stringyString(s)
+        self.assertEqual(m, '{a: 0}')
+
+
+    def test_list(self):
+        """
+        List elements are displayed on separate lines using C{str()}.
+        """
+        s = ['a', 'b']
+        m = text.stringyString(s)
+        self.assertEqual(m, '[a,\n b,]\n')
+
+
+
+class SplitTest(unittest.TestCase):
+    """
+    Tests for L{text.splitQuoted}.
+    """
+    def test_oneWord(self):
+        """
+        Splitting strings with one-word phrases.
+        """
+        s = 'This code "works."'
+        r = text.splitQuoted(s)
+        self.assertEqual(['This', 'code', 'works.'], r)
+
+
+    def test_multiWord(self):
+        s = 'The "hairy monkey" likes pie.'
+        r = text.splitQuoted(s)
+        self.assertEqual(['The', 'hairy monkey', 'likes', 'pie.'], r)
+
+    # Some of the many tests that would fail:
+
+    #def test_preserveWhitespace(self):
+    #    phrase = '"MANY     SPACES"'
+    #    s = 'With %s between.' % (phrase,)
+    #    r = text.splitQuoted(s)
+    #    self.assertEqual(['With', phrase, 'between.'], r)
+
+    #def test_escapedSpace(self):
+    #    s = r"One\ Phrase"
+    #    r = text.splitQuoted(s)
+    #    self.assertEqual(["One Phrase"], r)
+
+
+
+class StrFileTest(unittest.TestCase):
+    def setUp(self):
+        self.io = StringIO("this is a test string")
+
+    def tearDown(self):
+        pass
+
+    def test_1_f(self):
+        self.assertEqual(False, text.strFile("x", self.io))
+
+    def test_1_1(self):
+        self.assertEqual(True, text.strFile("t", self.io))
+
+    def test_1_2(self):
+        self.assertEqual(True, text.strFile("h", self.io))
+
+    def test_1_3(self):
+        self.assertEqual(True, text.strFile("i", self.io))
+
+    def test_1_4(self):
+        self.assertEqual(True, text.strFile("s", self.io))
+
+    def test_1_5(self):
+        self.assertEqual(True, text.strFile("n", self.io))
+
+    def test_1_6(self):
+        self.assertEqual(True, text.strFile("g", self.io))
+
+    def test_3_1(self):
+        self.assertEqual(True, text.strFile("thi", self.io))
+
+    def test_3_2(self):
+        self.assertEqual(True, text.strFile("his", self.io))
+
+    def test_3_3(self):
+        self.assertEqual(True, text.strFile("is ", self.io))
+
+    def test_3_4(self):
+        self.assertEqual(True, text.strFile("ing", self.io))
+
+    def test_3_f(self):
+        self.assertEqual(False, text.strFile("bla", self.io))
+
+    def test_large_1(self):
+        self.assertEqual(True, text.strFile("this is a test", self.io))
+
+    def test_large_2(self):
+        self.assertEqual(True, text.strFile("is a test string", self.io))
+
+    def test_large_f(self):
+        self.assertEqual(False, text.strFile("ds jhfsa k fdas", self.io))
+
+    def test_overlarge_f(self):
+        self.assertEqual(False, text.strFile("djhsakj dhsa fkhsa s,mdbnfsauiw bndasdf hreew", self.io))
+
+    def test_self(self):
+        self.assertEqual(True, text.strFile("this is a test string", self.io))
+
+    def test_insensitive(self):
+        self.assertEqual(True, text.strFile("ThIs is A test STRING", self.io, False))
+
diff --git a/ThirdParty/Twisted/twisted/test/test_threadable.py b/ThirdParty/Twisted/twisted/test/test_threadable.py
new file mode 100644
index 0000000..1227bc7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_threadable.py
@@ -0,0 +1,132 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.python.threadable}.
+"""
+
+from __future__ import division, absolute_import
+
+import sys, pickle
+
+try:
+    import threading
+except ImportError:
+    threadingSkip = "Platform lacks thread support"
+else:
+    threadingSkip = None
+
+from twisted.python.compat import _PY3
+from twisted.trial import unittest
+
+from twisted.python import threadable
+
+class TestObject:
+    synchronized = ['aMethod']
+
+    x = -1
+    y = 1
+
+    def aMethod(self):
+        for i in range(10):
+            self.x, self.y = self.y, self.x
+            self.z = self.x + self.y
+            assert self.z == 0, "z == %d, not 0 as expected" % (self.z,)
+
+threadable.synchronize(TestObject)
+
+class SynchronizationTestCase(unittest.SynchronousTestCase):
+    def setUp(self):
+        """
+        Reduce the CPython check interval so that thread switches happen much
+        more often, hopefully exercising more possible race conditions.  Also,
+        delay actual test startup until the reactor has been started.
+        """
+        if _PY3:
+            if getattr(sys, 'getswitchinterval', None) is not None:
+                self.addCleanup(sys.setswitchinterval, sys.getswitchinterval())
+                sys.setswitchinterval(0.0000001)
+        else:
+            if getattr(sys, 'getcheckinterval', None) is not None:
+                self.addCleanup(sys.setcheckinterval, sys.getcheckinterval())
+                sys.setcheckinterval(7)
+
+
+    def test_synchronizedName(self):
+        """
+        The name of a synchronized method is inaffected by the synchronization
+        decorator.
+        """
+        self.assertEqual("aMethod", TestObject.aMethod.__name__)
+
+
+    def test_isInIOThread(self):
+        """
+        L{threadable.isInIOThread} returns C{True} if and only if it is called
+        in the same thread as L{threadable.registerAsIOThread}.
+        """
+        threadable.registerAsIOThread()
+        foreignResult = []
+        t = threading.Thread(
+            target=lambda: foreignResult.append(threadable.isInIOThread()))
+        t.start()
+        t.join()
+        self.assertFalse(
+            foreignResult[0], "Non-IO thread reported as IO thread")
+        self.assertTrue(
+            threadable.isInIOThread(), "IO thread reported as not IO thread")
+
+
+    def testThreadedSynchronization(self):
+        o = TestObject()
+
+        errors = []
+
+        def callMethodLots():
+            try:
+                for i in range(1000):
+                    o.aMethod()
+            except AssertionError as e:
+                errors.append(str(e))
+
+        threads = []
+        for x in range(5):
+            t = threading.Thread(target=callMethodLots)
+            threads.append(t)
+            t.start()
+
+        for t in threads:
+            t.join()
+
+        if errors:
+            raise unittest.FailTest(errors)
+
+    if threadingSkip is not None:
+        testThreadedSynchronization.skip = threadingSkip
+        test_isInIOThread.skip = threadingSkip
+
+
+    def testUnthreadedSynchronization(self):
+        o = TestObject()
+        for i in range(1000):
+            o.aMethod()
+
+
+
+class SerializationTestCase(unittest.SynchronousTestCase):
+    def testPickling(self):
+        lock = threadable.XLock()
+        lockType = type(lock)
+        lockPickle = pickle.dumps(lock)
+        newLock = pickle.loads(lockPickle)
+        self.assertTrue(isinstance(newLock, lockType))
+
+    if threadingSkip is not None:
+        testPickling.skip = threadingSkip
+
+
+    def testUnpickling(self):
+        lockPickle = b'ctwisted.python.threadable\nunpickle_lock\np0\n(tp1\nRp2\n.'
+        lock = pickle.loads(lockPickle)
+        newPickle = pickle.dumps(lock, 2)
+        newLock = pickle.loads(newPickle)
diff --git a/ThirdParty/Twisted/twisted/test/test_threadpool.py b/ThirdParty/Twisted/twisted/test/test_threadpool.py
new file mode 100644
index 0000000..bc02049
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_threadpool.py
@@ -0,0 +1,542 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.python.threadpool}
+"""
+
+from __future__ import division, absolute_import
+
+import pickle, time, weakref, gc, threading
+
+from twisted.python.compat import _PY3
+from twisted.trial import unittest
+from twisted.python import threadpool, threadable, failure, context
+from twisted.internet.defer import Deferred
+
+#
+# See the end of this module for the remainder of the imports.
+#
+
+class Synchronization(object):
+    failures = 0
+
+    def __init__(self, N, waiting):
+        self.N = N
+        self.waiting = waiting
+        self.lock = threading.Lock()
+        self.runs = []
+
+    def run(self):
+        # This is the testy part: this is supposed to be invoked
+        # serially from multiple threads.  If that is actually the
+        # case, we will never fail to acquire this lock.  If it is
+        # *not* the case, we might get here while someone else is
+        # holding the lock.
+        if self.lock.acquire(False):
+            if not len(self.runs) % 5:
+                time.sleep(0.0002) # Constant selected based on
+                                   # empirical data to maximize the
+                                   # chance of a quick failure if this
+                                   # code is broken.
+            self.lock.release()
+        else:
+            self.failures += 1
+
+        # This is just the only way I can think of to wake up the test
+        # method.  It doesn't actually have anything to do with the
+        # test.
+        self.lock.acquire()
+        self.runs.append(None)
+        if len(self.runs) == self.N:
+            self.waiting.release()
+        self.lock.release()
+
+    synchronized = ["run"]
+threadable.synchronize(Synchronization)
+
+
+
+class ThreadPoolTestCase(unittest.SynchronousTestCase):
+    """
+    Test threadpools.
+    """
+
+    def getTimeout(self):
+        """
+        Return number of seconds to wait before giving up.
+        """
+        return 5 # Really should be order of magnitude less
+
+
+    def _waitForLock(self, lock):
+        # We could just use range(), but then we use an extra 30MB of memory
+        # on Python 2:
+        if _PY3:
+            items = range(1000000)
+        else:
+            items = xrange(1000000)
+        for i in items:
+            if lock.acquire(False):
+                break
+            time.sleep(1e-5)
+        else:
+            self.fail("A long time passed without succeeding")
+
+
+    def test_attributes(self):
+        """
+        L{ThreadPool.min} and L{ThreadPool.max} are set to the values passed to
+        L{ThreadPool.__init__}.
+        """
+        pool = threadpool.ThreadPool(12, 22)
+        self.assertEqual(pool.min, 12)
+        self.assertEqual(pool.max, 22)
+
+
+    def test_start(self):
+        """
+        L{ThreadPool.start} creates the minimum number of threads specified.
+        """
+        pool = threadpool.ThreadPool(0, 5)
+        pool.start()
+        self.addCleanup(pool.stop)
+        self.assertEqual(len(pool.threads), 0)
+
+        pool = threadpool.ThreadPool(3, 10)
+        self.assertEqual(len(pool.threads), 0)
+        pool.start()
+        self.addCleanup(pool.stop)
+        self.assertEqual(len(pool.threads), 3)
+
+
+    def test_threadCreationArguments(self):
+        """
+        Test that creating threads in the threadpool with application-level
+        objects as arguments doesn't results in those objects never being
+        freed, with the thread maintaining a reference to them as long as it
+        exists.
+        """
+        tp = threadpool.ThreadPool(0, 1)
+        tp.start()
+        self.addCleanup(tp.stop)
+
+        # Sanity check - no threads should have been started yet.
+        self.assertEqual(tp.threads, [])
+
+        # Here's our function
+        def worker(arg):
+            pass
+        # weakref needs an object subclass
+        class Dumb(object):
+            pass
+        # And here's the unique object
+        unique = Dumb()
+
+        workerRef = weakref.ref(worker)
+        uniqueRef = weakref.ref(unique)
+
+        # Put some work in
+        tp.callInThread(worker, unique)
+
+        # Add an event to wait completion
+        event = threading.Event()
+        tp.callInThread(event.set)
+        event.wait(self.getTimeout())
+
+        del worker
+        del unique
+        gc.collect()
+        self.assertEqual(uniqueRef(), None)
+        self.assertEqual(workerRef(), None)
+
+
+    def test_threadCreationArgumentsCallInThreadWithCallback(self):
+        """
+        As C{test_threadCreationArguments} above, but for
+        callInThreadWithCallback.
+        """
+
+        tp = threadpool.ThreadPool(0, 1)
+        tp.start()
+        self.addCleanup(tp.stop)
+
+        # Sanity check - no threads should have been started yet.
+        self.assertEqual(tp.threads, [])
+
+        # this holds references obtained in onResult
+        refdict = {} # name -> ref value
+
+        onResultWait = threading.Event()
+        onResultDone = threading.Event()
+
+        resultRef = []
+
+        # result callback
+        def onResult(success, result):
+            onResultWait.wait(self.getTimeout())
+            refdict['workerRef'] = workerRef()
+            refdict['uniqueRef'] = uniqueRef()
+            onResultDone.set()
+            resultRef.append(weakref.ref(result))
+
+        # Here's our function
+        def worker(arg, test):
+            return Dumb()
+
+        # weakref needs an object subclass
+        class Dumb(object):
+            pass
+
+        # And here's the unique object
+        unique = Dumb()
+
+        onResultRef = weakref.ref(onResult)
+        workerRef = weakref.ref(worker)
+        uniqueRef = weakref.ref(unique)
+
+        # Put some work in
+        tp.callInThreadWithCallback(onResult, worker, unique, test=unique)
+
+        del worker
+        del unique
+        gc.collect()
+
+        # let onResult collect the refs
+        onResultWait.set()
+        # wait for onResult
+        onResultDone.wait(self.getTimeout())
+
+        self.assertEqual(uniqueRef(), None)
+        self.assertEqual(workerRef(), None)
+
+        # XXX There's a race right here - has onResult in the worker thread
+        # returned and the locals in _worker holding it and the result been
+        # deleted yet?
+
+        del onResult
+        gc.collect()
+        self.assertEqual(onResultRef(), None)
+        self.assertEqual(resultRef[0](), None)
+
+
+    def test_persistence(self):
+        """
+        Threadpools can be pickled and unpickled, which should preserve the
+        number of threads and other parameters.
+        """
+        pool = threadpool.ThreadPool(7, 20)
+
+        self.assertEqual(pool.min, 7)
+        self.assertEqual(pool.max, 20)
+
+        # check that unpickled threadpool has same number of threads
+        copy = pickle.loads(pickle.dumps(pool))
+
+        self.assertEqual(copy.min, 7)
+        self.assertEqual(copy.max, 20)
+
+
+    def _threadpoolTest(self, method):
+        """
+        Test synchronization of calls made with C{method}, which should be
+        one of the mechanisms of the threadpool to execute work in threads.
+        """
+        # This is a schizophrenic test: it seems to be trying to test
+        # both the callInThread()/dispatch() behavior of the ThreadPool as well
+        # as the serialization behavior of threadable.synchronize().  It
+        # would probably make more sense as two much simpler tests.
+        N = 10
+
+        tp = threadpool.ThreadPool()
+        tp.start()
+        self.addCleanup(tp.stop)
+
+        waiting = threading.Lock()
+        waiting.acquire()
+        actor = Synchronization(N, waiting)
+
+        for i in range(N):
+            method(tp, actor)
+
+        self._waitForLock(waiting)
+
+        self.failIf(actor.failures, "run() re-entered %d times" %
+                                    (actor.failures,))
+
+
+    def test_callInThread(self):
+        """
+        Call C{_threadpoolTest} with C{callInThread}.
+        """
+        return self._threadpoolTest(
+            lambda tp, actor: tp.callInThread(actor.run))
+
+
+    def test_callInThreadException(self):
+        """
+        L{ThreadPool.callInThread} logs exceptions raised by the callable it
+        is passed.
+        """
+        class NewError(Exception):
+            pass
+
+        def raiseError():
+            raise NewError()
+
+        tp = threadpool.ThreadPool(0, 1)
+        tp.callInThread(raiseError)
+        tp.start()
+        tp.stop()
+
+        errors = self.flushLoggedErrors(NewError)
+        self.assertEqual(len(errors), 1)
+
+
+    def test_callInThreadWithCallback(self):
+        """
+        L{ThreadPool.callInThreadWithCallback} calls C{onResult} with a
+        two-tuple of C{(True, result)} where C{result} is the value returned
+        by the callable supplied.
+        """
+        waiter = threading.Lock()
+        waiter.acquire()
+
+        results = []
+
+        def onResult(success, result):
+            waiter.release()
+            results.append(success)
+            results.append(result)
+
+        tp = threadpool.ThreadPool(0, 1)
+        tp.callInThreadWithCallback(onResult, lambda : "test")
+        tp.start()
+
+        try:
+            self._waitForLock(waiter)
+        finally:
+            tp.stop()
+
+        self.assertTrue(results[0])
+        self.assertEqual(results[1], "test")
+
+
+    def test_callInThreadWithCallbackExceptionInCallback(self):
+        """
+        L{ThreadPool.callInThreadWithCallback} calls C{onResult} with a
+        two-tuple of C{(False, failure)} where C{failure} represents the
+        exception raised by the callable supplied.
+        """
+        class NewError(Exception):
+            pass
+
+        def raiseError():
+            raise NewError()
+
+        waiter = threading.Lock()
+        waiter.acquire()
+
+        results = []
+
+        def onResult(success, result):
+            waiter.release()
+            results.append(success)
+            results.append(result)
+
+        tp = threadpool.ThreadPool(0, 1)
+        tp.callInThreadWithCallback(onResult, raiseError)
+        tp.start()
+
+        try:
+            self._waitForLock(waiter)
+        finally:
+            tp.stop()
+
+        self.assertFalse(results[0])
+        self.assertTrue(isinstance(results[1], failure.Failure))
+        self.assertTrue(issubclass(results[1].type, NewError))
+
+
+    def test_callInThreadWithCallbackExceptionInOnResult(self):
+        """
+        L{ThreadPool.callInThreadWithCallback} logs the exception raised by
+        C{onResult}.
+        """
+        class NewError(Exception):
+            pass
+
+        waiter = threading.Lock()
+        waiter.acquire()
+
+        results = []
+
+        def onResult(success, result):
+            results.append(success)
+            results.append(result)
+            raise NewError()
+
+        tp = threadpool.ThreadPool(0, 1)
+        tp.callInThreadWithCallback(onResult, lambda : None)
+        tp.callInThread(waiter.release)
+        tp.start()
+
+        try:
+            self._waitForLock(waiter)
+        finally:
+            tp.stop()
+
+        errors = self.flushLoggedErrors(NewError)
+        self.assertEqual(len(errors), 1)
+
+        self.assertTrue(results[0])
+        self.assertEqual(results[1], None)
+
+
+    def test_callbackThread(self):
+        """
+        L{ThreadPool.callInThreadWithCallback} calls the function it is
+        given and the C{onResult} callback in the same thread.
+        """
+        threadIds = []
+
+        event = threading.Event()
+
+        def onResult(success, result):
+            threadIds.append(threading.currentThread().ident)
+            event.set()
+
+        def func():
+            threadIds.append(threading.currentThread().ident)
+
+        tp = threadpool.ThreadPool(0, 1)
+        tp.callInThreadWithCallback(onResult, func)
+        tp.start()
+        self.addCleanup(tp.stop)
+
+        event.wait(self.getTimeout())
+        self.assertEqual(len(threadIds), 2)
+        self.assertEqual(threadIds[0], threadIds[1])
+
+
+    def test_callbackContext(self):
+        """
+        The context L{ThreadPool.callInThreadWithCallback} is invoked in is
+        shared by the context the callable and C{onResult} callback are
+        invoked in.
+        """
+        myctx = context.theContextTracker.currentContext().contexts[-1]
+        myctx['testing'] = 'this must be present'
+
+        contexts = []
+
+        event = threading.Event()
+
+        def onResult(success, result):
+            ctx = context.theContextTracker.currentContext().contexts[-1]
+            contexts.append(ctx)
+            event.set()
+
+        def func():
+            ctx = context.theContextTracker.currentContext().contexts[-1]
+            contexts.append(ctx)
+
+        tp = threadpool.ThreadPool(0, 1)
+        tp.callInThreadWithCallback(onResult, func)
+        tp.start()
+        self.addCleanup(tp.stop)
+
+        event.wait(self.getTimeout())
+
+        self.assertEqual(len(contexts), 2)
+        self.assertEqual(myctx, contexts[0])
+        self.assertEqual(myctx, contexts[1])
+
+
+    def test_existingWork(self):
+        """
+        Work added to the threadpool before its start should be executed once
+        the threadpool is started: this is ensured by trying to release a lock
+        previously acquired.
+        """
+        waiter = threading.Lock()
+        waiter.acquire()
+
+        tp = threadpool.ThreadPool(0, 1)
+        tp.callInThread(waiter.release) # before start()
+        tp.start()
+
+        try:
+            self._waitForLock(waiter)
+        finally:
+            tp.stop()
+
+
+
+class RaceConditionTestCase(unittest.SynchronousTestCase):
+
+    def getTimeout(self):
+        """
+        Return number of seconds to wait before giving up.
+        """
+        return 5 # Really should be order of magnitude less
+
+
+    def setUp(self):
+        self.event = threading.Event()
+        self.threadpool = threadpool.ThreadPool(0, 10)
+        self.threadpool.start()
+
+
+    def tearDown(self):
+        del self.event
+        self.threadpool.stop()
+        del self.threadpool
+
+
+    def test_synchronization(self):
+        """
+        Test a race condition: ensure that actions run in the pool synchronize
+        with actions run in the main thread.
+        """
+        timeout = self.getTimeout()
+        self.threadpool.callInThread(self.event.set)
+        self.event.wait(timeout)
+        self.event.clear()
+        for i in range(3):
+            self.threadpool.callInThread(self.event.wait)
+        self.threadpool.callInThread(self.event.set)
+        self.event.wait(timeout)
+        if not self.event.isSet():
+            self.event.set()
+            self.fail("Actions not synchronized")
+
+
+    def test_singleThread(self):
+        """
+        The submission of a new job to a thread pool in response to the
+        C{onResult} callback does not cause a new thread to be added to the
+        thread pool.
+
+        This requires that the thread which calls C{onResult} to have first
+        marked itself as available so that when the new job is queued, that
+        thread may be considered to run it.  This is desirable so that when
+        only N jobs are ever being executed in the thread pool at once only
+        N threads will ever be created.
+        """
+        # Ensure no threads running
+        self.assertEqual(self.threadpool.workers, 0)
+
+        event = threading.Event()
+        event.clear()
+
+        def onResult(success, counter):
+            event.set()
+
+        for i in range(10):
+            self.threadpool.callInThreadWithCallback(
+                onResult, lambda: None)
+            event.wait()
+            event.clear()
+
+        self.assertEqual(self.threadpool.workers, 1)
diff --git a/ThirdParty/Twisted/twisted/test/test_threads.py b/ThirdParty/Twisted/twisted/test/test_threads.py
new file mode 100644
index 0000000..69ee19d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_threads.py
@@ -0,0 +1,421 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Test methods in twisted.internet.threads and reactor thread APIs.
+"""
+
+from __future__ import division, absolute_import
+
+import sys, os, time
+
+from twisted.trial import unittest
+
+from twisted.python.compat import _PY3
+from twisted.internet import reactor, defer, interfaces, threads, protocol, error
+from twisted.python import failure, threadable, log, threadpool
+
+if _PY3:
+    xrange = range
+
+
+class ReactorThreadsTestCase(unittest.TestCase):
+    """
+    Tests for the reactor threading API.
+    """
+
+    def test_suggestThreadPoolSize(self):
+        """
+        Try to change maximum number of threads.
+        """
+        reactor.suggestThreadPoolSize(34)
+        self.assertEqual(reactor.threadpool.max, 34)
+        reactor.suggestThreadPoolSize(4)
+        self.assertEqual(reactor.threadpool.max, 4)
+
+
+    def _waitForThread(self):
+        """
+        The reactor's threadpool is only available when the reactor is running,
+        so to have a sane behavior during the tests we make a dummy
+        L{threads.deferToThread} call.
+        """
+        return threads.deferToThread(time.sleep, 0)
+
+
+    def test_callInThread(self):
+        """
+        Test callInThread functionality: set a C{threading.Event}, and check
+        that it's not in the main thread.
+        """
+        def cb(ign):
+            waiter = threading.Event()
+            result = []
+            def threadedFunc():
+                result.append(threadable.isInIOThread())
+                waiter.set()
+
+            reactor.callInThread(threadedFunc)
+            waiter.wait(120)
+            if not waiter.isSet():
+                self.fail("Timed out waiting for event.")
+            else:
+                self.assertEqual(result, [False])
+        return self._waitForThread().addCallback(cb)
+
+
+    def test_callFromThread(self):
+        """
+        Test callFromThread functionality: from the main thread, and from
+        another thread.
+        """
+        def cb(ign):
+            firedByReactorThread = defer.Deferred()
+            firedByOtherThread = defer.Deferred()
+
+            def threadedFunc():
+                reactor.callFromThread(firedByOtherThread.callback, None)
+
+            reactor.callInThread(threadedFunc)
+            reactor.callFromThread(firedByReactorThread.callback, None)
+
+            return defer.DeferredList(
+                [firedByReactorThread, firedByOtherThread],
+                fireOnOneErrback=True)
+        return self._waitForThread().addCallback(cb)
+
+
+    def test_wakerOverflow(self):
+        """
+        Try to make an overflow on the reactor waker using callFromThread.
+        """
+        def cb(ign):
+            self.failure = None
+            waiter = threading.Event()
+            def threadedFunction():
+                # Hopefully a hundred thousand queued calls is enough to
+                # trigger the error condition
+                for i in xrange(100000):
+                    try:
+                        reactor.callFromThread(lambda: None)
+                    except:
+                        self.failure = failure.Failure()
+                        break
+                waiter.set()
+            reactor.callInThread(threadedFunction)
+            waiter.wait(120)
+            if not waiter.isSet():
+                self.fail("Timed out waiting for event")
+            if self.failure is not None:
+                return defer.fail(self.failure)
+        return self._waitForThread().addCallback(cb)
+
+    def _testBlockingCallFromThread(self, reactorFunc):
+        """
+        Utility method to test L{threads.blockingCallFromThread}.
+        """
+        waiter = threading.Event()
+        results = []
+        errors = []
+        def cb1(ign):
+            def threadedFunc():
+                try:
+                    r = threads.blockingCallFromThread(reactor, reactorFunc)
+                except Exception as e:
+                    errors.append(e)
+                else:
+                    results.append(r)
+                waiter.set()
+
+            reactor.callInThread(threadedFunc)
+            return threads.deferToThread(waiter.wait, self.getTimeout())
+
+        def cb2(ign):
+            if not waiter.isSet():
+                self.fail("Timed out waiting for event")
+            return results, errors
+
+        return self._waitForThread().addCallback(cb1).addBoth(cb2)
+
+    def test_blockingCallFromThread(self):
+        """
+        Test blockingCallFromThread facility: create a thread, call a function
+        in the reactor using L{threads.blockingCallFromThread}, and verify the
+        result returned.
+        """
+        def reactorFunc():
+            return defer.succeed("foo")
+        def cb(res):
+            self.assertEqual(res[0][0], "foo")
+
+        return self._testBlockingCallFromThread(reactorFunc).addCallback(cb)
+
+    def test_asyncBlockingCallFromThread(self):
+        """
+        Test blockingCallFromThread as above, but be sure the resulting
+        Deferred is not already fired.
+        """
+        def reactorFunc():
+            d = defer.Deferred()
+            reactor.callLater(0.1, d.callback, "egg")
+            return d
+        def cb(res):
+            self.assertEqual(res[0][0], "egg")
+
+        return self._testBlockingCallFromThread(reactorFunc).addCallback(cb)
+
+    def test_errorBlockingCallFromThread(self):
+        """
+        Test error report for blockingCallFromThread.
+        """
+        def reactorFunc():
+            return defer.fail(RuntimeError("bar"))
+        def cb(res):
+            self.assert_(isinstance(res[1][0], RuntimeError))
+            self.assertEqual(res[1][0].args[0], "bar")
+
+        return self._testBlockingCallFromThread(reactorFunc).addCallback(cb)
+
+    def test_asyncErrorBlockingCallFromThread(self):
+        """
+        Test error report for blockingCallFromThread as above, but be sure the
+        resulting Deferred is not already fired.
+        """
+        def reactorFunc():
+            d = defer.Deferred()
+            reactor.callLater(0.1, d.errback, RuntimeError("spam"))
+            return d
+        def cb(res):
+            self.assert_(isinstance(res[1][0], RuntimeError))
+            self.assertEqual(res[1][0].args[0], "spam")
+
+        return self._testBlockingCallFromThread(reactorFunc).addCallback(cb)
+
+
+class Counter:
+    index = 0
+    problem = 0
+
+    def add(self):
+        """A non thread-safe method."""
+        next = self.index + 1
+        # another thread could jump in here and increment self.index on us
+        if next != self.index + 1:
+            self.problem = 1
+            raise ValueError
+        # or here, same issue but we wouldn't catch it. We'd overwrite
+        # their results, and the index will have lost a count. If
+        # several threads get in here, we will actually make the count
+        # go backwards when we overwrite it.
+        self.index = next
+
+
+
+class DeferredResultTestCase(unittest.TestCase):
+    """
+    Test twisted.internet.threads.
+    """
+
+    def setUp(self):
+        reactor.suggestThreadPoolSize(8)
+
+
+    def tearDown(self):
+        reactor.suggestThreadPoolSize(0)
+
+
+    def test_callMultiple(self):
+        """
+        L{threads.callMultipleInThread} calls multiple functions in a thread.
+        """
+        L = []
+        N = 10
+        d = defer.Deferred()
+
+        def finished():
+            self.assertEqual(L, list(range(N)))
+            d.callback(None)
+
+        threads.callMultipleInThread([
+            (L.append, (i,), {}) for i in xrange(N)
+            ] + [(reactor.callFromThread, (finished,), {})])
+        return d
+
+
+    def test_deferredResult(self):
+        """
+        L{threads.deferToThread} executes the function passed, and correctly
+        handles the positional and keyword arguments given.
+        """
+        d = threads.deferToThread(lambda x, y=5: x + y, 3, y=4)
+        d.addCallback(self.assertEqual, 7)
+        return d
+
+
+    def test_deferredFailure(self):
+        """
+        Check that L{threads.deferToThread} return a failure object
+        with an appropriate exception instance when the called
+        function raises an exception.
+        """
+        class NewError(Exception):
+            pass
+        def raiseError():
+            raise NewError()
+        d = threads.deferToThread(raiseError)
+        return self.assertFailure(d, NewError)
+
+
+    def test_deferredFailureAfterSuccess(self):
+        """
+        Check that a successfull L{threads.deferToThread} followed by a one
+        that raises an exception correctly result as a failure.
+        """
+        # set up a condition that causes cReactor to hang. These conditions
+        # can also be set by other tests when the full test suite is run in
+        # alphabetical order (test_flow.FlowTest.testThreaded followed by
+        # test_internet.ReactorCoreTestCase.testStop, to be precise). By
+        # setting them up explicitly here, we can reproduce the hang in a
+        # single precise test case instead of depending upon side effects of
+        # other tests.
+        #
+        # alas, this test appears to flunk the default reactor too
+
+        d = threads.deferToThread(lambda: None)
+        d.addCallback(lambda ign: threads.deferToThread(lambda: 1//0))
+        return self.assertFailure(d, ZeroDivisionError)
+
+
+
+class DeferToThreadPoolTestCase(unittest.TestCase):
+    """
+    Test L{twisted.internet.threads.deferToThreadPool}.
+    """
+
+    def setUp(self):
+        self.tp = threadpool.ThreadPool(0, 8)
+        self.tp.start()
+
+
+    def tearDown(self):
+        self.tp.stop()
+
+
+    def test_deferredResult(self):
+        """
+        L{threads.deferToThreadPool} executes the function passed, and
+        correctly handles the positional and keyword arguments given.
+        """
+        d = threads.deferToThreadPool(reactor, self.tp,
+                                      lambda x, y=5: x + y, 3, y=4)
+        d.addCallback(self.assertEqual, 7)
+        return d
+
+
+    def test_deferredFailure(self):
+        """
+        Check that L{threads.deferToThreadPool} return a failure object with an
+        appropriate exception instance when the called function raises an
+        exception.
+        """
+        class NewError(Exception):
+            pass
+        def raiseError():
+            raise NewError()
+        d = threads.deferToThreadPool(reactor, self.tp, raiseError)
+        return self.assertFailure(d, NewError)
+
+
+
+_callBeforeStartupProgram = """
+import time
+import %(reactor)s
+%(reactor)s.install()
+
+from twisted.internet import reactor
+
+def threadedCall():
+    print('threaded call')
+
+reactor.callInThread(threadedCall)
+
+# Spin very briefly to try to give the thread a chance to run, if it
+# is going to.  Is there a better way to achieve this behavior?
+for i in range(100):
+    time.sleep(0.0)
+"""
+
+
+class ThreadStartupProcessProtocol(protocol.ProcessProtocol):
+    def __init__(self, finished):
+        self.finished = finished
+        self.out = []
+        self.err = []
+
+    def outReceived(self, out):
+        self.out.append(out)
+
+    def errReceived(self, err):
+        self.err.append(err)
+
+    def processEnded(self, reason):
+        self.finished.callback((self.out, self.err, reason))
+
+
+
+class StartupBehaviorTestCase(unittest.TestCase):
+    """
+    Test cases for the behavior of the reactor threadpool near startup
+    boundary conditions.
+
+    In particular, this asserts that no threaded calls are attempted
+    until the reactor starts up, that calls attempted before it starts
+    are in fact executed once it has started, and that in both cases,
+    the reactor properly cleans itself up (which is tested for
+    somewhat implicitly, by requiring a child process be able to exit,
+    something it cannot do unless the threadpool has been properly
+    torn down).
+    """
+
+
+    def testCallBeforeStartupUnexecuted(self):
+        progname = self.mktemp()
+        progfile = file(progname, 'w')
+        progfile.write(_callBeforeStartupProgram % {'reactor': reactor.__module__})
+        progfile.close()
+
+        def programFinished(result):
+            (out, err, reason) = result
+            if reason.check(error.ProcessTerminated):
+                self.fail("Process did not exit cleanly (out: %s err: %s)" % (out, err))
+
+            if err:
+                log.msg("Unexpected output on standard error: %s" % (err,))
+            self.failIf(out, "Expected no output, instead received:\n%s" % (out,))
+
+        def programTimeout(err):
+            err.trap(error.TimeoutError)
+            proto.signalProcess('KILL')
+            return err
+
+        env = os.environ.copy()
+        env['PYTHONPATH'] = os.pathsep.join(sys.path)
+        d = defer.Deferred().addCallbacks(programFinished, programTimeout)
+        proto = ThreadStartupProcessProtocol(d)
+        reactor.spawnProcess(proto, sys.executable, ('python', progname), env)
+        return d
+
+
+
+if interfaces.IReactorThreads(reactor, None) is None:
+    for cls in (ReactorThreadsTestCase,
+                DeferredResultTestCase,
+                StartupBehaviorTestCase):
+        cls.skip = "No thread support, nothing to test here."
+else:
+    import threading
+
+if interfaces.IReactorProcess(reactor, None) is None:
+    for cls in (StartupBehaviorTestCase,):
+        cls.skip = "No process support, cannot run subprocess thread tests."
diff --git a/ThirdParty/Twisted/twisted/test/test_tpfile.py b/ThirdParty/Twisted/twisted/test/test_tpfile.py
new file mode 100644
index 0000000..655a166
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_tpfile.py
@@ -0,0 +1,52 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+from twisted.trial import unittest
+from twisted.protocols import loopback
+from twisted.protocols import basic
+from twisted.internet import protocol, abstract
+
+import StringIO
+
+class BufferingServer(protocol.Protocol):
+    buffer = ''
+    def dataReceived(self, data):
+        self.buffer += data
+
+class FileSendingClient(protocol.Protocol):
+    def __init__(self, f):
+        self.f = f
+    
+    def connectionMade(self):
+        s = basic.FileSender()
+        d = s.beginFileTransfer(self.f, self.transport, lambda x: x)
+        d.addCallback(lambda r: self.transport.loseConnection())
+
+class FileSenderTestCase(unittest.TestCase):
+    def testSendingFile(self):
+        testStr = 'xyz' * 100 + 'abc' * 100 + '123' * 100
+        s = BufferingServer()
+        c = FileSendingClient(StringIO.StringIO(testStr))
+        
+        d = loopback.loopbackTCP(s, c)
+        d.addCallback(lambda x : self.assertEqual(s.buffer, testStr))
+        return d
+
+    def testSendingEmptyFile(self):
+        fileSender = basic.FileSender()
+        consumer = abstract.FileDescriptor()
+        consumer.connected = 1
+        emptyFile = StringIO.StringIO('')
+
+        d = fileSender.beginFileTransfer(emptyFile, consumer, lambda x: x)
+
+        # The producer will be immediately exhausted, and so immediately
+        # unregistered
+        self.assertEqual(consumer.producer, None)
+
+        # Which means the Deferred from FileSender should have been called
+        self.failUnless(d.called, 
+                        'producer unregistered with deferred being called')
+
diff --git a/ThirdParty/Twisted/twisted/test/test_twistd.py b/ThirdParty/Twisted/twisted/test/test_twistd.py
new file mode 100644
index 0000000..d8ae688
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_twistd.py
@@ -0,0 +1,1549 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.application.app} and L{twisted.scripts.twistd}.
+"""
+
+import signal, inspect, errno
+
+import os, sys, StringIO
+
+try:
+    import pwd, grp
+except ImportError:
+    pwd = grp = None
+
+try:
+    import cPickle as pickle
+except ImportError:
+    import pickle
+
+from zope.interface import implements
+from zope.interface.verify import verifyObject
+
+from twisted.trial import unittest
+from twisted.test.test_process import MockOS
+
+from twisted import plugin
+from twisted.application.service import IServiceMaker
+from twisted.application import service, app, reactors
+from twisted.scripts import twistd
+from twisted.python import log
+from twisted.python.usage import UsageError
+from twisted.python.log import ILogObserver
+from twisted.python.versions import Version
+from twisted.python.components import Componentized
+from twisted.internet.defer import Deferred
+from twisted.internet.interfaces import IReactorDaemonize
+from twisted.python.fakepwd import UserDatabase
+
+try:
+    from twisted.python import syslog
+except ImportError:
+    syslog = None
+
+try:
+    from twisted.scripts import _twistd_unix
+except ImportError:
+    _twistd_unix = None
+else:
+    from twisted.scripts._twistd_unix import UnixApplicationRunner
+    from twisted.scripts._twistd_unix import UnixAppLogger
+
+try:
+    import profile
+except ImportError:
+    profile = None
+
+try:
+    import hotshot
+    import hotshot.stats
+except (ImportError, SystemExit):
+    # For some reasons, hotshot.stats seems to raise SystemExit on some
+    # distributions, probably when considered non-free.  See the import of
+    # this module in twisted.application.app for more details.
+    hotshot = None
+
+try:
+    import pstats
+    import cProfile
+except ImportError:
+    cProfile = None
+
+if getattr(os, 'setuid', None) is None:
+    setuidSkip = "Platform does not support --uid/--gid twistd options."
+else:
+    setuidSkip = None
+
+
+def patchUserDatabase(patch, user, uid, group, gid):
+    """
+    Patch L{pwd.getpwnam} so that it behaves as though only one user exists
+    and patch L{grp.getgrnam} so that it behaves as though only one group
+    exists.
+
+    @param patch: A function like L{TestCase.patch} which will be used to
+        install the fake implementations.
+
+    @type user: C{str}
+    @param user: The name of the single user which will exist.
+
+    @type uid: C{int}
+    @param uid: The UID of the single user which will exist.
+
+    @type group: C{str}
+    @param group: The name of the single user which will exist.
+
+    @type gid: C{int}
+    @param gid: The GID of the single group which will exist.
+    """
+    # Try not to be an unverified fake, but try not to depend on quirks of
+    # the system either (eg, run as a process with a uid and gid which
+    # equal each other, and so doesn't reliably test that uid is used where
+    # uid should be used and gid is used where gid should be used). -exarkun
+    pwent = pwd.getpwuid(os.getuid())
+    grent = grp.getgrgid(os.getgid())
+
+    database = UserDatabase()
+    database.addUser(
+        user, pwent.pw_passwd, uid, pwent.pw_gid,
+        pwent.pw_gecos, pwent.pw_dir, pwent.pw_shell)
+
+    def getgrnam(name):
+        result = list(grent)
+        result[result.index(grent.gr_name)] = group
+        result[result.index(grent.gr_gid)] = gid
+        result = tuple(result)
+        return {group: result}[name]
+
+    patch(pwd, "getpwnam", database.getpwnam)
+    patch(grp, "getgrnam", getgrnam)
+
+
+
+class MockServiceMaker(object):
+    """
+    A non-implementation of L{twisted.application.service.IServiceMaker}.
+    """
+    tapname = 'ueoa'
+
+    def makeService(self, options):
+        """
+        Take a L{usage.Options} instance and return a
+        L{service.IService} provider.
+        """
+        self.options = options
+        self.service = service.Service()
+        return self.service
+
+
+
+class CrippledAppLogger(app.AppLogger):
+    """
+    @see: CrippledApplicationRunner.
+    """
+
+    def start(self, application):
+        pass
+
+
+
+class CrippledApplicationRunner(twistd._SomeApplicationRunner):
+    """
+    An application runner that cripples the platform-specific runner and
+    nasty side-effect-having code so that we can use it without actually
+    running any environment-affecting code.
+    """
+    loggerFactory = CrippledAppLogger
+
+    def preApplication(self):
+        pass
+
+
+    def postApplication(self):
+        pass
+
+
+
+class ServerOptionsTest(unittest.TestCase):
+    """
+    Non-platform-specific tests for the pltaform-specific ServerOptions class.
+    """
+    def test_subCommands(self):
+        """
+        subCommands is built from IServiceMaker plugins, and is sorted
+        alphabetically.
+        """
+        class FakePlugin(object):
+            def __init__(self, name):
+                self.tapname = name
+                self._options = 'options for ' + name
+                self.description = 'description of ' + name
+
+            def options(self):
+                return self._options
+
+        apple = FakePlugin('apple')
+        banana = FakePlugin('banana')
+        coconut = FakePlugin('coconut')
+        donut = FakePlugin('donut')
+
+        def getPlugins(interface):
+            self.assertEqual(interface, IServiceMaker)
+            yield coconut
+            yield banana
+            yield donut
+            yield apple
+
+        config = twistd.ServerOptions()
+        self.assertEqual(config._getPlugins, plugin.getPlugins)
+        config._getPlugins = getPlugins
+
+        # "subCommands is a list of 4-tuples of (command name, command
+        # shortcut, parser class, documentation)."
+        subCommands = config.subCommands
+        expectedOrder = [apple, banana, coconut, donut]
+
+        for subCommand, expectedCommand in zip(subCommands, expectedOrder):
+            name, shortcut, parserClass, documentation = subCommand
+            self.assertEqual(name, expectedCommand.tapname)
+            self.assertEqual(shortcut, None)
+            self.assertEqual(parserClass(), expectedCommand._options),
+            self.assertEqual(documentation, expectedCommand.description)
+
+
+    def test_sortedReactorHelp(self):
+        """
+        Reactor names are listed alphabetically by I{--help-reactors}.
+        """
+        class FakeReactorInstaller(object):
+            def __init__(self, name):
+                self.shortName = 'name of ' + name
+                self.description = 'description of ' + name
+
+        apple = FakeReactorInstaller('apple')
+        banana = FakeReactorInstaller('banana')
+        coconut = FakeReactorInstaller('coconut')
+        donut = FakeReactorInstaller('donut')
+
+        def getReactorTypes():
+            yield coconut
+            yield banana
+            yield donut
+            yield apple
+
+        config = twistd.ServerOptions()
+        self.assertEqual(config._getReactorTypes, reactors.getReactorTypes)
+        config._getReactorTypes = getReactorTypes
+        config.messageOutput = StringIO.StringIO()
+
+        self.assertRaises(SystemExit, config.parseOptions, ['--help-reactors'])
+        helpOutput = config.messageOutput.getvalue()
+        indexes = []
+        for reactor in apple, banana, coconut, donut:
+            def getIndex(s):
+                self.assertIn(s, helpOutput)
+                indexes.append(helpOutput.index(s))
+
+            getIndex(reactor.shortName)
+            getIndex(reactor.description)
+
+        self.assertEqual(
+            indexes, sorted(indexes),
+            'reactor descriptions were not in alphabetical order: %r' % (
+                helpOutput,))
+
+
+    def test_postOptionsSubCommandCausesNoSave(self):
+        """
+        postOptions should set no_save to True when a subcommand is used.
+        """
+        config = twistd.ServerOptions()
+        config.subCommand = 'ueoa'
+        config.postOptions()
+        self.assertEqual(config['no_save'], True)
+
+
+    def test_postOptionsNoSubCommandSavesAsUsual(self):
+        """
+        If no sub command is used, postOptions should not touch no_save.
+        """
+        config = twistd.ServerOptions()
+        config.postOptions()
+        self.assertEqual(config['no_save'], False)
+
+
+    def test_listAllProfilers(self):
+        """
+        All the profilers that can be used in L{app.AppProfiler} are listed in
+        the help output.
+        """
+        config = twistd.ServerOptions()
+        helpOutput = str(config)
+        for profiler in app.AppProfiler.profilers:
+            self.assertIn(profiler, helpOutput)
+
+
+    def test_defaultUmask(self):
+        """
+        The default value for the C{umask} option is C{None}.
+        """
+        config = twistd.ServerOptions()
+        self.assertEqual(config['umask'], None)
+
+
+    def test_umask(self):
+        """
+        The value given for the C{umask} option is parsed as an octal integer
+        literal.
+        """
+        config = twistd.ServerOptions()
+        config.parseOptions(['--umask', '123'])
+        self.assertEqual(config['umask'], 83)
+        config.parseOptions(['--umask', '0123'])
+        self.assertEqual(config['umask'], 83)
+
+
+    def test_invalidUmask(self):
+        """
+        If a value is given for the C{umask} option which cannot be parsed as
+        an integer, L{UsageError} is raised by L{ServerOptions.parseOptions}.
+        """
+        config = twistd.ServerOptions()
+        self.assertRaises(UsageError, config.parseOptions, ['--umask', 'abcdef'])
+
+    if _twistd_unix is None:
+        msg = "twistd unix not available"
+        test_defaultUmask.skip = test_umask.skip = test_invalidUmask.skip = msg
+
+
+    def test_unimportableConfiguredLogObserver(self):
+        """
+        C{--logger} with an unimportable module raises a L{UsageError}.
+        """
+        config = twistd.ServerOptions()
+        e = self.assertRaises(UsageError, config.parseOptions,
+                          ['--logger', 'no.such.module.I.hope'])
+        self.assertTrue(e.args[0].startswith(
+                "Logger 'no.such.module.I.hope' could not be imported: "
+                "'no.such.module.I.hope' does not name an object"))
+        self.assertNotIn('\n', e.args[0])
+
+
+    def test_badAttributeWithConfiguredLogObserver(self):
+        """
+        C{--logger} with a non-existent object raises a L{UsageError}.
+        """
+        config = twistd.ServerOptions()
+        e = self.assertRaises(UsageError, config.parseOptions,
+                              ["--logger", "twisted.test.test_twistd.FOOBAR"])
+        self.assertTrue(e.args[0].startswith(
+                "Logger 'twisted.test.test_twistd.FOOBAR' could not be "
+                "imported: 'module' object has no attribute 'FOOBAR'"))
+        self.assertNotIn('\n', e.args[0])
+
+
+
+class TapFileTest(unittest.TestCase):
+    """
+    Test twistd-related functionality that requires a tap file on disk.
+    """
+
+    def setUp(self):
+        """
+        Create a trivial Application and put it in a tap file on disk.
+        """
+        self.tapfile = self.mktemp()
+        f = file(self.tapfile, 'wb')
+        pickle.dump(service.Application("Hi!"), f)
+        f.close()
+
+
+    def test_createOrGetApplicationWithTapFile(self):
+        """
+        Ensure that the createOrGetApplication call that 'twistd -f foo.tap'
+        makes will load the Application out of foo.tap.
+        """
+        config = twistd.ServerOptions()
+        config.parseOptions(['-f', self.tapfile])
+        application = CrippledApplicationRunner(config).createOrGetApplication()
+        self.assertEqual(service.IService(application).name, 'Hi!')
+
+
+
+class TestLoggerFactory(object):
+    """
+    A logger factory for L{TestApplicationRunner}.
+    """
+
+    def __init__(self, runner):
+        self.runner = runner
+
+
+    def start(self, application):
+        """
+        Save the logging start on the C{runner} instance.
+        """
+        self.runner.order.append("log")
+        self.runner.hadApplicationLogObserver = hasattr(self.runner,
+                                                        'application')
+
+
+    def stop(self):
+        """
+        Don't log anything.
+        """
+
+
+
+class TestApplicationRunner(app.ApplicationRunner):
+    """
+    An ApplicationRunner which tracks the environment in which its methods are
+    called.
+    """
+
+    def __init__(self, options):
+        app.ApplicationRunner.__init__(self, options)
+        self.order = []
+        self.logger = TestLoggerFactory(self)
+
+
+    def preApplication(self):
+        self.order.append("pre")
+        self.hadApplicationPreApplication = hasattr(self, 'application')
+
+
+    def postApplication(self):
+        self.order.append("post")
+        self.hadApplicationPostApplication = hasattr(self, 'application')
+
+
+
+class ApplicationRunnerTest(unittest.TestCase):
+    """
+    Non-platform-specific tests for the platform-specific ApplicationRunner.
+    """
+    def setUp(self):
+        config = twistd.ServerOptions()
+        self.serviceMaker = MockServiceMaker()
+        # Set up a config object like it's been parsed with a subcommand
+        config.loadedPlugins = {'test_command': self.serviceMaker}
+        config.subOptions = object()
+        config.subCommand = 'test_command'
+        self.config = config
+
+
+    def test_applicationRunnerGetsCorrectApplication(self):
+        """
+        Ensure that a twistd plugin gets used in appropriate ways: it
+        is passed its Options instance, and the service it returns is
+        added to the application.
+        """
+        arunner = CrippledApplicationRunner(self.config)
+        arunner.run()
+
+        self.assertIdentical(
+            self.serviceMaker.options, self.config.subOptions,
+            "ServiceMaker.makeService needs to be passed the correct "
+            "sub Command object.")
+        self.assertIdentical(
+            self.serviceMaker.service,
+            service.IService(arunner.application).services[0],
+            "ServiceMaker.makeService's result needs to be set as a child "
+            "of the Application.")
+
+
+    def test_preAndPostApplication(self):
+        """
+        Test thet preApplication and postApplication methods are
+        called by ApplicationRunner.run() when appropriate.
+        """
+        s = TestApplicationRunner(self.config)
+        s.run()
+        self.assertFalse(s.hadApplicationPreApplication)
+        self.assertTrue(s.hadApplicationPostApplication)
+        self.assertTrue(s.hadApplicationLogObserver)
+        self.assertEqual(s.order, ["pre", "log", "post"])
+
+
+    def _applicationStartsWithConfiguredID(self, argv, uid, gid):
+        """
+        Assert that given a particular command line, an application is started
+        as a particular UID/GID.
+
+        @param argv: A list of strings giving the options to parse.
+        @param uid: An integer giving the expected UID.
+        @param gid: An integer giving the expected GID.
+        """
+        self.config.parseOptions(argv)
+
+        events = []
+        class FakeUnixApplicationRunner(twistd._SomeApplicationRunner):
+            def setupEnvironment(self, chroot, rundir, nodaemon, umask,
+                                 pidfile):
+                events.append('environment')
+
+            def shedPrivileges(self, euid, uid, gid):
+                events.append(('privileges', euid, uid, gid))
+
+            def startReactor(self, reactor, oldstdout, oldstderr):
+                events.append('reactor')
+
+            def removePID(self, pidfile):
+                pass
+
+
+        class FakeService(object):
+            implements(service.IService, service.IProcess)
+
+            processName = None
+            uid = None
+            gid = None
+
+            def setName(self, name):
+                pass
+
+            def setServiceParent(self, parent):
+                pass
+
+            def disownServiceParent(self):
+                pass
+
+            def privilegedStartService(self):
+                events.append('privilegedStartService')
+
+            def startService(self):
+                events.append('startService')
+
+            def stopService(self):
+                pass
+
+        application = FakeService()
+        verifyObject(service.IService, application)
+        verifyObject(service.IProcess, application)
+
+        runner = FakeUnixApplicationRunner(self.config)
+        runner.preApplication()
+        runner.application = application
+        runner.postApplication()
+
+        self.assertEqual(
+            events,
+            ['environment', 'privilegedStartService',
+             ('privileges', False, uid, gid), 'startService', 'reactor'])
+
+
+    def test_applicationStartsWithConfiguredNumericIDs(self):
+        """
+        L{postApplication} should change the UID and GID to the values
+        specified as numeric strings by the configuration after running
+        L{service.IService.privilegedStartService} and before running
+        L{service.IService.startService}.
+        """
+        uid = 1234
+        gid = 4321
+        self._applicationStartsWithConfiguredID(
+            ["--uid", str(uid), "--gid", str(gid)], uid, gid)
+    test_applicationStartsWithConfiguredNumericIDs.skip = setuidSkip
+
+
+    def test_applicationStartsWithConfiguredNameIDs(self):
+        """
+        L{postApplication} should change the UID and GID to the values
+        specified as user and group names by the configuration after running
+        L{service.IService.privilegedStartService} and before running
+        L{service.IService.startService}.
+        """
+        user = "foo"
+        uid = 1234
+        group = "bar"
+        gid = 4321
+        patchUserDatabase(self.patch, user, uid, group, gid)
+        self._applicationStartsWithConfiguredID(
+            ["--uid", user, "--gid", group], uid, gid)
+    test_applicationStartsWithConfiguredNameIDs.skip = setuidSkip
+
+
+    def test_startReactorRunsTheReactor(self):
+        """
+        L{startReactor} calls L{reactor.run}.
+        """
+        reactor = DummyReactor()
+        runner = app.ApplicationRunner({
+                "profile": False,
+                "profiler": "profile",
+                "debug": False})
+        runner.startReactor(reactor, None, None)
+        self.assertTrue(
+            reactor.called, "startReactor did not call reactor.run()")
+
+
+
+class UnixApplicationRunnerSetupEnvironmentTests(unittest.TestCase):
+    """
+    Tests for L{UnixApplicationRunner.setupEnvironment}.
+
+    @ivar root: The root of the filesystem, or C{unset} if none has been
+        specified with a call to L{os.chroot} (patched for this TestCase with
+        L{UnixApplicationRunnerSetupEnvironmentTests.chroot ).
+
+    @ivar cwd: The current working directory of the process, or C{unset} if
+        none has been specified with a call to L{os.chdir} (patched for this
+        TestCase with L{UnixApplicationRunnerSetupEnvironmentTests.chdir).
+
+    @ivar mask: The current file creation mask of the process, or C{unset} if
+        none has been specified with a call to L{os.umask} (patched for this
+        TestCase with L{UnixApplicationRunnerSetupEnvironmentTests.umask).
+
+    @ivar daemon: A boolean indicating whether daemonization has been performed
+        by a call to L{_twistd_unix.daemonize} (patched for this TestCase with
+        L{UnixApplicationRunnerSetupEnvironmentTests.
+    """
+    if _twistd_unix is None:
+        skip = "twistd unix not available"
+
+    unset = object()
+
+    def setUp(self):
+        self.root = self.unset
+        self.cwd = self.unset
+        self.mask = self.unset
+        self.daemon = False
+        self.pid = os.getpid()
+        self.patch(os, 'chroot', lambda path: setattr(self, 'root', path))
+        self.patch(os, 'chdir', lambda path: setattr(self, 'cwd', path))
+        self.patch(os, 'umask', lambda mask: setattr(self, 'mask', mask))
+        self.patch(_twistd_unix, "daemonize", self.daemonize)
+        self.runner = UnixApplicationRunner({})
+
+
+    def daemonize(self, reactor, os):
+        """
+        Indicate that daemonization has happened and change the PID so that the
+        value written to the pidfile can be tested in the daemonization case.
+        """
+        self.daemon = True
+        self.patch(os, 'getpid', lambda: self.pid + 1)
+
+
+    def test_chroot(self):
+        """
+        L{UnixApplicationRunner.setupEnvironment} changes the root of the
+        filesystem if passed a non-C{None} value for the C{chroot} parameter.
+        """
+        self.runner.setupEnvironment("/foo/bar", ".", True, None, None)
+        self.assertEqual(self.root, "/foo/bar")
+
+
+    def test_noChroot(self):
+        """
+        L{UnixApplicationRunner.setupEnvironment} does not change the root of
+        the filesystem if passed C{None} for the C{chroot} parameter.
+        """
+        self.runner.setupEnvironment(None, ".", True, None, None)
+        self.assertIdentical(self.root, self.unset)
+
+
+    def test_changeWorkingDirectory(self):
+        """
+        L{UnixApplicationRunner.setupEnvironment} changes the working directory
+        of the process to the path given for the C{rundir} parameter.
+        """
+        self.runner.setupEnvironment(None, "/foo/bar", True, None, None)
+        self.assertEqual(self.cwd, "/foo/bar")
+
+
+    def test_daemonize(self):
+        """
+        L{UnixApplicationRunner.setupEnvironment} daemonizes the process if
+        C{False} is passed for the C{nodaemon} parameter.
+        """
+        self.runner.setupEnvironment(None, ".", False, None, None)
+        self.assertTrue(self.daemon)
+
+
+    def test_noDaemonize(self):
+        """
+        L{UnixApplicationRunner.setupEnvironment} does not daemonize the
+        process if C{True} is passed for the C{nodaemon} parameter.
+        """
+        self.runner.setupEnvironment(None, ".", True, None, None)
+        self.assertFalse(self.daemon)
+
+
+    def test_nonDaemonPIDFile(self):
+        """
+        L{UnixApplicationRunner.setupEnvironment} writes the process's PID to
+        the file specified by the C{pidfile} parameter.
+        """
+        pidfile = self.mktemp()
+        self.runner.setupEnvironment(None, ".", True, None, pidfile)
+        fObj = file(pidfile)
+        pid = int(fObj.read())
+        fObj.close()
+        self.assertEqual(pid, self.pid)
+
+
+    def test_daemonPIDFile(self):
+        """
+        L{UnixApplicationRunner.setupEnvironment} writes the daemonized
+        process's PID to the file specified by the C{pidfile} parameter if
+        C{nodaemon} is C{False}.
+        """
+        pidfile = self.mktemp()
+        self.runner.setupEnvironment(None, ".", False, None, pidfile)
+        fObj = file(pidfile)
+        pid = int(fObj.read())
+        fObj.close()
+        self.assertEqual(pid, self.pid + 1)
+
+
+    def test_umask(self):
+        """
+        L{UnixApplicationRunner.setupEnvironment} changes the process umask to
+        the value specified by the C{umask} parameter.
+        """
+        self.runner.setupEnvironment(None, ".", False, 123, None)
+        self.assertEqual(self.mask, 123)
+
+
+    def test_noDaemonizeNoUmask(self):
+        """
+        L{UnixApplicationRunner.setupEnvironment} doesn't change the process
+        umask if C{None} is passed for the C{umask} parameter and C{True} is
+        passed for the C{nodaemon} parameter.
+        """
+        self.runner.setupEnvironment(None, ".", True, None, None)
+        self.assertIdentical(self.mask, self.unset)
+
+
+    def test_daemonizedNoUmask(self):
+        """
+        L{UnixApplicationRunner.setupEnvironment} changes the process umask to
+        C{0077} if C{None} is passed for the C{umask} parameter and C{False} is
+        passed for the C{nodaemon} parameter.
+        """
+        self.runner.setupEnvironment(None, ".", False, None, None)
+        self.assertEqual(self.mask, 0077)
+
+
+
+class UnixApplicationRunnerStartApplicationTests(unittest.TestCase):
+    """
+    Tests for L{UnixApplicationRunner.startApplication}.
+    """
+    if _twistd_unix is None:
+        skip = "twistd unix not available"
+
+    def test_setupEnvironment(self):
+        """
+        L{UnixApplicationRunner.startApplication} calls
+        L{UnixApplicationRunner.setupEnvironment} with the chroot, rundir,
+        nodaemon, umask, and pidfile parameters from the configuration it is
+        constructed with.
+        """
+        options = twistd.ServerOptions()
+        options.parseOptions([
+                '--nodaemon',
+                '--umask', '0070',
+                '--chroot', '/foo/chroot',
+                '--rundir', '/foo/rundir',
+                '--pidfile', '/foo/pidfile'])
+        application = service.Application("test_setupEnvironment")
+        self.runner = UnixApplicationRunner(options)
+
+        args = []
+        def fakeSetupEnvironment(self, chroot, rundir, nodaemon, umask, pidfile):
+            args.extend((chroot, rundir, nodaemon, umask, pidfile))
+
+        # Sanity check
+        self.assertEqual(
+            inspect.getargspec(self.runner.setupEnvironment),
+            inspect.getargspec(fakeSetupEnvironment))
+
+        self.patch(UnixApplicationRunner, 'setupEnvironment', fakeSetupEnvironment)
+        self.patch(UnixApplicationRunner, 'shedPrivileges', lambda *a, **kw: None)
+        self.patch(app, 'startApplication', lambda *a, **kw: None)
+        self.runner.startApplication(application)
+
+        self.assertEqual(
+            args,
+            ['/foo/chroot', '/foo/rundir', True, 56, '/foo/pidfile'])
+
+
+
+class UnixApplicationRunnerRemovePID(unittest.TestCase):
+    """
+    Tests for L{UnixApplicationRunner.removePID}.
+    """
+    if _twistd_unix is None:
+        skip = "twistd unix not available"
+
+
+    def test_removePID(self):
+        """
+        L{UnixApplicationRunner.removePID} deletes the file the name of
+        which is passed to it.
+        """
+        runner = UnixApplicationRunner({})
+        path = self.mktemp()
+        os.makedirs(path)
+        pidfile = os.path.join(path, "foo.pid")
+        file(pidfile, "w").close()
+        runner.removePID(pidfile)
+        self.assertFalse(os.path.exists(pidfile))
+
+
+    def test_removePIDErrors(self):
+        """
+        Calling L{UnixApplicationRunner.removePID} with a non-existent filename logs
+        an OSError.
+        """
+        runner = UnixApplicationRunner({})
+        runner.removePID("fakepid")
+        errors = self.flushLoggedErrors(OSError)
+        self.assertEqual(len(errors), 1)
+        self.assertEqual(errors[0].value.errno, errno.ENOENT)
+
+
+
+class FakeNonDaemonizingReactor(object):
+    """
+    A dummy reactor, providing C{beforeDaemonize} and C{afterDaemonize} methods,
+    but not announcing this, and logging whether the methods have been called.
+
+    @ivar _beforeDaemonizeCalled: if C{beforeDaemonize} has been called or not.
+    @type _beforeDaemonizeCalled: C{bool}
+    @ivar _afterDaemonizeCalled: if C{afterDaemonize} has been called or not.
+    @type _afterDaemonizeCalled: C{bool}
+    """
+
+    def __init__(self):
+        self._beforeDaemonizeCalled = False
+        self._afterDaemonizeCalled = False
+
+    def beforeDaemonize(self):
+        self._beforeDaemonizeCalled = True
+
+    def afterDaemonize(self):
+        self._afterDaemonizeCalled = True
+
+
+
+class FakeDaemonizingReactor(FakeNonDaemonizingReactor):
+    """
+    A dummy reactor, providing C{beforeDaemonize} and C{afterDaemonize} methods,
+    announcing this, and logging whether the methods have been called.
+    """
+
+    implements(IReactorDaemonize)
+
+
+
+class ReactorDaemonizationTests(unittest.TestCase):
+    """
+    Tests for L{_twistd_unix.daemonize} and L{IReactorDaemonize}.
+    """
+    if _twistd_unix is None:
+        skip = "twistd unix not available"
+
+
+    def test_daemonizationHooksCalled(self):
+        """
+        L{_twistd_unix.daemonize} indeed calls
+        L{IReactorDaemonize.beforeDaemonize} and
+        L{IReactorDaemonize.afterDaemonize} if the reactor implements
+        L{IReactorDaemonize}.
+        """
+        reactor = FakeDaemonizingReactor()
+        os = MockOS()
+        _twistd_unix.daemonize(reactor, os)
+        self.assertTrue(reactor._beforeDaemonizeCalled)
+        self.assertTrue(reactor._afterDaemonizeCalled)
+
+
+    def test_daemonizationHooksNotCalled(self):
+        """
+        L{_twistd_unix.daemonize} does NOT call
+        L{IReactorDaemonize.beforeDaemonize} or
+        L{IReactorDaemonize.afterDaemonize} if the reactor does NOT
+        implement L{IReactorDaemonize}.
+        """
+        reactor = FakeNonDaemonizingReactor()
+        os = MockOS()
+        _twistd_unix.daemonize(reactor, os)
+        self.assertFalse(reactor._beforeDaemonizeCalled)
+        self.assertFalse(reactor._afterDaemonizeCalled)
+
+
+
+class DummyReactor(object):
+    """
+    A dummy reactor, only providing a C{run} method and checking that it
+    has been called.
+
+    @ivar called: if C{run} has been called or not.
+    @type called: C{bool}
+    """
+    called = False
+
+    def run(self):
+        """
+        A fake run method, checking that it's been called one and only time.
+        """
+        if self.called:
+            raise RuntimeError("Already called")
+        self.called = True
+
+
+
+class AppProfilingTestCase(unittest.TestCase):
+    """
+    Tests for L{app.AppProfiler}.
+    """
+
+    def test_profile(self):
+        """
+        L{app.ProfileRunner.run} should call the C{run} method of the reactor
+        and save profile data in the specified file.
+        """
+        config = twistd.ServerOptions()
+        config["profile"] = self.mktemp()
+        config["profiler"] = "profile"
+        profiler = app.AppProfiler(config)
+        reactor = DummyReactor()
+
+        profiler.run(reactor)
+
+        self.assertTrue(reactor.called)
+        data = file(config["profile"]).read()
+        self.assertIn("DummyReactor.run", data)
+        self.assertIn("function calls", data)
+
+    if profile is None:
+        test_profile.skip = "profile module not available"
+
+
+    def _testStats(self, statsClass, profile):
+        out = StringIO.StringIO()
+
+        # Patch before creating the pstats, because pstats binds self.stream to
+        # sys.stdout early in 2.5 and newer.
+        stdout = self.patch(sys, 'stdout', out)
+
+        # If pstats.Stats can load the data and then reformat it, then the
+        # right thing probably happened.
+        stats = statsClass(profile)
+        stats.print_stats()
+        stdout.restore()
+
+        data = out.getvalue()
+        self.assertIn("function calls", data)
+        self.assertIn("(run)", data)
+
+
+    def test_profileSaveStats(self):
+        """
+        With the C{savestats} option specified, L{app.ProfileRunner.run}
+        should save the raw stats object instead of a summary output.
+        """
+        config = twistd.ServerOptions()
+        config["profile"] = self.mktemp()
+        config["profiler"] = "profile"
+        config["savestats"] = True
+        profiler = app.AppProfiler(config)
+        reactor = DummyReactor()
+
+        profiler.run(reactor)
+
+        self.assertTrue(reactor.called)
+        self._testStats(pstats.Stats, config['profile'])
+
+    if profile is None:
+        test_profileSaveStats.skip = "profile module not available"
+
+
+    def test_withoutProfile(self):
+        """
+        When the C{profile} module is not present, L{app.ProfilerRunner.run}
+        should raise a C{SystemExit} exception.
+        """
+        savedModules = sys.modules.copy()
+
+        config = twistd.ServerOptions()
+        config["profiler"] = "profile"
+        profiler = app.AppProfiler(config)
+
+        sys.modules["profile"] = None
+        try:
+            self.assertRaises(SystemExit, profiler.run, None)
+        finally:
+            sys.modules.clear()
+            sys.modules.update(savedModules)
+
+
+    def test_profilePrintStatsError(self):
+        """
+        When an error happens during the print of the stats, C{sys.stdout}
+        should be restored to its initial value.
+        """
+        class ErroneousProfile(profile.Profile):
+            def print_stats(self):
+                raise RuntimeError("Boom")
+        self.patch(profile, "Profile", ErroneousProfile)
+
+        config = twistd.ServerOptions()
+        config["profile"] = self.mktemp()
+        config["profiler"] = "profile"
+        profiler = app.AppProfiler(config)
+        reactor = DummyReactor()
+
+        oldStdout = sys.stdout
+        self.assertRaises(RuntimeError, profiler.run, reactor)
+        self.assertIdentical(sys.stdout, oldStdout)
+
+    if profile is None:
+        test_profilePrintStatsError.skip = "profile module not available"
+
+
+    def test_hotshot(self):
+        """
+        L{app.HotshotRunner.run} should call the C{run} method of the reactor
+        and save profile data in the specified file.
+        """
+        config = twistd.ServerOptions()
+        config["profile"] = self.mktemp()
+        config["profiler"] = "hotshot"
+        profiler = app.AppProfiler(config)
+        reactor = DummyReactor()
+
+        profiler.run(reactor)
+
+        self.assertTrue(reactor.called)
+        data = file(config["profile"]).read()
+        self.assertIn("run", data)
+        self.assertIn("function calls", data)
+
+    if hotshot is None:
+        test_hotshot.skip = "hotshot module not available"
+
+
+    def test_hotshotSaveStats(self):
+        """
+        With the C{savestats} option specified, L{app.HotshotRunner.run} should
+        save the raw stats object instead of a summary output.
+        """
+        config = twistd.ServerOptions()
+        config["profile"] = self.mktemp()
+        config["profiler"] = "hotshot"
+        config["savestats"] = True
+        profiler = app.AppProfiler(config)
+        reactor = DummyReactor()
+
+        profiler.run(reactor)
+
+        self.assertTrue(reactor.called)
+        self._testStats(hotshot.stats.load, config['profile'])
+
+    if hotshot is None:
+        test_hotshotSaveStats.skip = "hotshot module not available"
+
+
+    def test_withoutHotshot(self):
+        """
+        When the C{hotshot} module is not present, L{app.HotshotRunner.run}
+        should raise a C{SystemExit} exception and log the C{ImportError}.
+        """
+        savedModules = sys.modules.copy()
+        sys.modules["hotshot"] = None
+
+        config = twistd.ServerOptions()
+        config["profiler"] = "hotshot"
+        profiler = app.AppProfiler(config)
+        try:
+            self.assertRaises(SystemExit, profiler.run, None)
+        finally:
+            sys.modules.clear()
+            sys.modules.update(savedModules)
+
+
+    def test_hotshotPrintStatsError(self):
+        """
+        When an error happens while printing the stats, C{sys.stdout}
+        should be restored to its initial value.
+        """
+        class ErroneousStats(pstats.Stats):
+            def print_stats(self):
+                raise RuntimeError("Boom")
+        self.patch(pstats, "Stats", ErroneousStats)
+
+        config = twistd.ServerOptions()
+        config["profile"] = self.mktemp()
+        config["profiler"] = "hotshot"
+        profiler = app.AppProfiler(config)
+        reactor = DummyReactor()
+
+        oldStdout = sys.stdout
+        self.assertRaises(RuntimeError, profiler.run, reactor)
+        self.assertIdentical(sys.stdout, oldStdout)
+
+    if hotshot is None:
+        test_hotshotPrintStatsError.skip = "hotshot module not available"
+
+
+    def test_cProfile(self):
+        """
+        L{app.CProfileRunner.run} should call the C{run} method of the
+        reactor and save profile data in the specified file.
+        """
+        config = twistd.ServerOptions()
+        config["profile"] = self.mktemp()
+        config["profiler"] = "cProfile"
+        profiler = app.AppProfiler(config)
+        reactor = DummyReactor()
+
+        profiler.run(reactor)
+
+        self.assertTrue(reactor.called)
+        data = file(config["profile"]).read()
+        self.assertIn("run", data)
+        self.assertIn("function calls", data)
+
+    if cProfile is None:
+        test_cProfile.skip = "cProfile module not available"
+
+
+    def test_cProfileSaveStats(self):
+        """
+        With the C{savestats} option specified,
+        L{app.CProfileRunner.run} should save the raw stats object
+        instead of a summary output.
+        """
+        config = twistd.ServerOptions()
+        config["profile"] = self.mktemp()
+        config["profiler"] = "cProfile"
+        config["savestats"] = True
+        profiler = app.AppProfiler(config)
+        reactor = DummyReactor()
+
+        profiler.run(reactor)
+
+        self.assertTrue(reactor.called)
+        self._testStats(pstats.Stats, config['profile'])
+
+    if cProfile is None:
+        test_cProfileSaveStats.skip = "cProfile module not available"
+
+
+    def test_withoutCProfile(self):
+        """
+        When the C{cProfile} module is not present,
+        L{app.CProfileRunner.run} should raise a C{SystemExit}
+        exception and log the C{ImportError}.
+        """
+        savedModules = sys.modules.copy()
+        sys.modules["cProfile"] = None
+
+        config = twistd.ServerOptions()
+        config["profiler"] = "cProfile"
+        profiler = app.AppProfiler(config)
+        try:
+            self.assertRaises(SystemExit, profiler.run, None)
+        finally:
+            sys.modules.clear()
+            sys.modules.update(savedModules)
+
+
+    def test_unknownProfiler(self):
+        """
+        Check that L{app.AppProfiler} raises L{SystemExit} when given an
+        unknown profiler name.
+        """
+        config = twistd.ServerOptions()
+        config["profile"] = self.mktemp()
+        config["profiler"] = "foobar"
+
+        error = self.assertRaises(SystemExit, app.AppProfiler, config)
+        self.assertEqual(str(error), "Unsupported profiler name: foobar")
+
+
+    def test_defaultProfiler(self):
+        """
+        L{app.Profiler} defaults to the hotshot profiler if not specified.
+        """
+        profiler = app.AppProfiler({})
+        self.assertEqual(profiler.profiler, "hotshot")
+
+
+    def test_profilerNameCaseInsentive(self):
+        """
+        The case of the profiler name passed to L{app.AppProfiler} is not
+        relevant.
+        """
+        profiler = app.AppProfiler({"profiler": "HotShot"})
+        self.assertEqual(profiler.profiler, "hotshot")
+
+
+
+def _patchFileLogObserver(patch):
+    """
+    Patch L{log.FileLogObserver} to record every call and keep a reference to
+    the passed log file for tests.
+
+    @param patch: a callback for patching (usually L{unittest.TestCase.patch}).
+
+    @return: the list that keeps track of the log files.
+    @rtype: C{list}
+    """
+    logFiles = []
+    oldFileLobObserver = log.FileLogObserver
+    def FileLogObserver(logFile):
+        logFiles.append(logFile)
+        return oldFileLobObserver(logFile)
+    patch(log, 'FileLogObserver', FileLogObserver)
+    return logFiles
+
+
+
+def _setupSyslog(testCase):
+    """
+    Make fake syslog, and return list to which prefix and then log
+    messages will be appended if it is used.
+    """
+    logMessages = []
+    class fakesyslogobserver(object):
+        def __init__(self, prefix):
+            logMessages.append(prefix)
+        def emit(self, eventDict):
+            logMessages.append(eventDict)
+    testCase.patch(syslog, "SyslogObserver", fakesyslogobserver)
+    return logMessages
+
+
+
+class AppLoggerTestCase(unittest.TestCase):
+    """
+    Tests for L{app.AppLogger}.
+
+    @ivar observers: list of observers installed during the tests.
+    @type observers: C{list}
+    """
+
+    def setUp(self):
+        """
+        Override L{log.addObserver} so that we can trace the observers
+        installed in C{self.observers}.
+        """
+        self.observers = []
+        def startLoggingWithObserver(observer):
+            self.observers.append(observer)
+            log.addObserver(observer)
+        self.patch(log, 'startLoggingWithObserver', startLoggingWithObserver)
+
+
+    def tearDown(self):
+        """
+        Remove all installed observers.
+        """
+        for observer in self.observers:
+            log.removeObserver(observer)
+
+
+    def _checkObserver(self, logs):
+        """
+        Ensure that initial C{twistd} logs are written to the given list.
+
+        @type logs: C{list}
+        @param logs: The list whose C{append} method was specified as the
+            initial log observer.
+        """
+        self.assertEqual(self.observers, [logs.append])
+        self.assertIn("starting up", logs[0]["message"][0])
+        self.assertIn("reactor class", logs[1]["message"][0])
+
+
+    def test_start(self):
+        """
+        L{app.AppLogger.start} calls L{log.addObserver}, and then writes some
+        messages about twistd and the reactor.
+        """
+        logger = app.AppLogger({})
+        observer = []
+        logger._getLogObserver = lambda: observer.append
+        logger.start(Componentized())
+        self._checkObserver(observer)
+
+
+    def test_startUsesApplicationLogObserver(self):
+        """
+        When the L{ILogObserver} component is available on the application,
+        that object will be used as the log observer instead of constructing a
+        new one.
+        """
+        application = Componentized()
+        logs = []
+        application.setComponent(ILogObserver, logs.append)
+        logger = app.AppLogger({})
+        logger.start(application)
+        self._checkObserver(logs)
+
+
+    def _setupConfiguredLogger(self, application, extraLogArgs={},
+                               appLogger=app.AppLogger):
+        """
+        Set up an AppLogger which exercises the C{logger} configuration option.
+
+        @type application: L{Componentized}
+        @param application: The L{Application} object to pass to
+            L{app.AppLogger.start}.
+        @type extraLogArgs: C{dict}
+        @param extraLogArgs: extra values to pass to AppLogger.
+        @type appLogger: L{AppLogger} class, or a subclass
+        @param appLogger: factory for L{AppLogger} instances.
+
+        @rtype: C{list}
+        @return: The logs accumulated by the log observer.
+        """
+        logs = []
+        logArgs = {"logger": lambda: logs.append}
+        logArgs.update(extraLogArgs)
+        logger = appLogger(logArgs)
+        logger.start(application)
+        return logs
+
+
+    def test_startUsesConfiguredLogObserver(self):
+        """
+        When the C{logger} key is specified in the configuration dictionary
+        (i.e., when C{--logger} is passed to twistd), the initial log observer
+        will be the log observer returned from the callable which the value
+        refers to in FQPN form.
+        """
+        application = Componentized()
+        self._checkObserver(self._setupConfiguredLogger(application))
+
+
+    def test_configuredLogObserverBeatsComponent(self):
+        """
+        C{--logger} takes precedence over a ILogObserver component set on
+        Application.
+        """
+        nonlogs = []
+        application = Componentized()
+        application.setComponent(ILogObserver, nonlogs.append)
+        self._checkObserver(self._setupConfiguredLogger(application))
+        self.assertEqual(nonlogs, [])
+
+
+    def test_configuredLogObserverBeatsSyslog(self):
+        """
+        C{--logger} takes precedence over a C{--syslog} command line
+        argument.
+        """
+        logs = _setupSyslog(self)
+        application = Componentized()
+        self._checkObserver(self._setupConfiguredLogger(application,
+                                                        {"syslog": True},
+                                                        UnixAppLogger))
+        self.assertEqual(logs, [])
+
+    if _twistd_unix is None or syslog is None:
+        test_configuredLogObserverBeatsSyslog.skip = "Not on POSIX, or syslog not available."
+
+
+    def test_configuredLogObserverBeatsLogfile(self):
+        """
+        C{--logger} takes precedence over a C{--logfile} command line
+        argument.
+        """
+        application = Componentized()
+        path = self.mktemp()
+        self._checkObserver(self._setupConfiguredLogger(application,
+                                                        {"logfile": "path"}))
+        self.assertFalse(os.path.exists(path))
+
+
+    def test_getLogObserverStdout(self):
+        """
+        When logfile is empty or set to C{-}, L{app.AppLogger._getLogObserver}
+        returns a log observer pointing at C{sys.stdout}.
+        """
+        logger = app.AppLogger({"logfile": "-"})
+        logFiles = _patchFileLogObserver(self.patch)
+
+        observer = logger._getLogObserver()
+
+        self.assertEqual(len(logFiles), 1)
+        self.assertIdentical(logFiles[0], sys.stdout)
+
+        logger = app.AppLogger({"logfile": ""})
+        observer = logger._getLogObserver()
+
+        self.assertEqual(len(logFiles), 2)
+        self.assertIdentical(logFiles[1], sys.stdout)
+
+
+    def test_getLogObserverFile(self):
+        """
+        When passing the C{logfile} option, L{app.AppLogger._getLogObserver}
+        returns a log observer pointing at the specified path.
+        """
+        logFiles = _patchFileLogObserver(self.patch)
+        filename = self.mktemp()
+        logger = app.AppLogger({"logfile": filename})
+
+        observer = logger._getLogObserver()
+
+        self.assertEqual(len(logFiles), 1)
+        self.assertEqual(logFiles[0].path,
+                          os.path.abspath(filename))
+
+
+    def test_stop(self):
+        """
+        L{app.AppLogger.stop} removes the observer created in C{start}, and
+        reinitialize its C{_observer} so that if C{stop} is called several
+        times it doesn't break.
+        """
+        removed = []
+        observer = object()
+        def remove(observer):
+            removed.append(observer)
+        self.patch(log, 'removeObserver', remove)
+        logger = app.AppLogger({})
+        logger._observer = observer
+        logger.stop()
+        self.assertEqual(removed, [observer])
+        logger.stop()
+        self.assertEqual(removed, [observer])
+        self.assertIdentical(logger._observer, None)
+
+
+
+class UnixAppLoggerTestCase(unittest.TestCase):
+    """
+    Tests for L{UnixAppLogger}.
+
+    @ivar signals: list of signal handlers installed.
+    @type signals: C{list}
+    """
+    if _twistd_unix is None:
+        skip = "twistd unix not available"
+
+    def setUp(self):
+        """
+        Fake C{signal.signal} for not installing the handlers but saving them
+        in C{self.signals}.
+        """
+        self.signals = []
+        def fakeSignal(sig, f):
+            self.signals.append((sig, f))
+        self.patch(signal, "signal", fakeSignal)
+
+
+    def test_getLogObserverStdout(self):
+        """
+        When non-daemonized and C{logfile} is empty or set to C{-},
+        L{UnixAppLogger._getLogObserver} returns a log observer pointing at
+        C{sys.stdout}.
+        """
+        logFiles = _patchFileLogObserver(self.patch)
+
+        logger = UnixAppLogger({"logfile": "-", "nodaemon": True})
+        observer = logger._getLogObserver()
+        self.assertEqual(len(logFiles), 1)
+        self.assertIdentical(logFiles[0], sys.stdout)
+
+        logger = UnixAppLogger({"logfile": "", "nodaemon": True})
+        observer = logger._getLogObserver()
+        self.assertEqual(len(logFiles), 2)
+        self.assertIdentical(logFiles[1], sys.stdout)
+
+
+    def test_getLogObserverStdoutDaemon(self):
+        """
+        When daemonized and C{logfile} is set to C{-},
+        L{UnixAppLogger._getLogObserver} raises C{SystemExit}.
+        """
+        logger = UnixAppLogger({"logfile": "-", "nodaemon": False})
+        error = self.assertRaises(SystemExit, logger._getLogObserver)
+        self.assertEqual(str(error), "Daemons cannot log to stdout, exiting!")
+
+
+    def test_getLogObserverFile(self):
+        """
+        When C{logfile} contains a file name, L{app.AppLogger._getLogObserver}
+        returns a log observer pointing at the specified path, and a signal
+        handler rotating the log is installed.
+        """
+        logFiles = _patchFileLogObserver(self.patch)
+        filename = self.mktemp()
+        logger = UnixAppLogger({"logfile": filename})
+        observer = logger._getLogObserver()
+
+        self.assertEqual(len(logFiles), 1)
+        self.assertEqual(logFiles[0].path,
+                          os.path.abspath(filename))
+
+        self.assertEqual(len(self.signals), 1)
+        self.assertEqual(self.signals[0][0], signal.SIGUSR1)
+
+        d = Deferred()
+        def rotate():
+            d.callback(None)
+        logFiles[0].rotate = rotate
+
+        rotateLog = self.signals[0][1]
+        rotateLog(None, None)
+        return d
+
+
+    def test_getLogObserverDontOverrideSignalHandler(self):
+        """
+        If a signal handler is already installed,
+        L{UnixAppLogger._getLogObserver} doesn't override it.
+        """
+        def fakeGetSignal(sig):
+            self.assertEqual(sig, signal.SIGUSR1)
+            return object()
+        self.patch(signal, "getsignal", fakeGetSignal)
+        filename = self.mktemp()
+        logger = UnixAppLogger({"logfile": filename})
+        observer = logger._getLogObserver()
+
+        self.assertEqual(self.signals, [])
+
+
+    def test_getLogObserverDefaultFile(self):
+        """
+        When daemonized and C{logfile} is empty, the observer returned by
+        L{UnixAppLogger._getLogObserver} points at C{twistd.log} in the current
+        directory.
+        """
+        logFiles = _patchFileLogObserver(self.patch)
+        logger = UnixAppLogger({"logfile": "", "nodaemon": False})
+        observer = logger._getLogObserver()
+
+        self.assertEqual(len(logFiles), 1)
+        self.assertEqual(logFiles[0].path,
+                          os.path.abspath("twistd.log"))
+
+
+    def test_getLogObserverSyslog(self):
+        """
+        If C{syslog} is set to C{True}, L{UnixAppLogger._getLogObserver} starts
+        a L{syslog.SyslogObserver} with given C{prefix}.
+        """
+        logs = _setupSyslog(self)
+        logger = UnixAppLogger({"syslog": True, "prefix": "test-prefix"})
+        observer = logger._getLogObserver()
+        self.assertEqual(logs, ["test-prefix"])
+        observer({"a": "b"})
+        self.assertEqual(logs, ["test-prefix", {"a": "b"}])
+
+    if syslog is None:
+        test_getLogObserverSyslog.skip = "Syslog not available"
+
+
+
diff --git a/ThirdParty/Twisted/twisted/test/test_twisted.py b/ThirdParty/Twisted/twisted/test/test_twisted.py
new file mode 100644
index 0000000..6ba6c46
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_twisted.py
@@ -0,0 +1,678 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for miscellaneous behaviors of the top-level L{twisted} package (ie, for
+the code in C{twisted/__init__.py}.
+"""
+
+from __future__ import division, absolute_import
+
+import sys
+from types import ModuleType, FunctionType
+
+from unittest import TestCase
+
+from twisted import _checkRequirements
+from twisted.python.compat import _PY3
+
+
+# This is somewhat generally useful and should probably be part of a public API
+# somewhere.  See #5977.
+class SetAsideModule(object):
+    """
+    L{SetAsideModule} is a context manager for temporarily removing a module
+    from C{sys.modules}.
+
+    @ivar name: The name of the module to remove.
+    """
+    def __init__(self, name):
+        self.name = name
+
+
+    def _unimport(self, name):
+        """
+        Find the given module and all of its hierarchically inferior modules in
+        C{sys.modules}, remove them from it, and return whatever was found.
+        """
+        modules = dict([
+                (name, module)
+                for (name, module)
+                in list(sys.modules.items())
+                if name == self.name or name.startswith(self.name + ".")])
+        for name in modules:
+            del sys.modules[name]
+        return modules
+
+
+    def __enter__(self):
+        self.modules = self._unimport(self.name)
+
+
+    def __exit__(self, excType, excValue, traceback):
+        self._unimport(self.name)
+        sys.modules.update(self.modules)
+
+
+
+# Copied from 2.7 stdlib.  Delete after Python 2.6 is no longer a
+# requirement.  See #5976.
+class _AssertRaisesContext(object):
+    """A context manager used to implement TestCase.assertRaises* methods."""
+
+    def __init__(self, expected, test_case, expected_regexp=None):
+        self.expected = expected
+        self.failureException = test_case.failureException
+        self.expected_regexp = expected_regexp
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exc_type, exc_value, tb):
+        if exc_type is None:
+            try:
+                exc_name = self.expected.__name__
+            except AttributeError:
+                exc_name = str(self.expected)
+            raise self.failureException(
+                "{0} not raised".format(exc_name))
+        if not issubclass(exc_type, self.expected):
+            # let unexpected exceptions pass through
+            return False
+        self.exception = exc_value # store for later retrieval
+        if self.expected_regexp is None:
+            return True
+
+        expected_regexp = self.expected_regexp
+        if isinstance(expected_regexp, basestring):
+            expected_regexp = re.compile(expected_regexp)
+        if not expected_regexp.search(str(exc_value)):
+            raise self.failureException('"%s" does not match "%s"' %
+                     (expected_regexp.pattern, str(exc_value)))
+        return True
+
+
+
+def _install(modules):
+    """
+    Take a mapping defining a package and turn it into real C{ModuleType}
+    instances in C{sys.modules}.
+
+    Consider these example::
+
+        a = {"foo": "bar"}
+        b = {"twisted": {"__version__": "42.6"}}
+        c = {"twisted": {"plugin": {"getPlugins": stub}}}
+
+    C{_install(a)} will place an item into C{sys.modules} with C{"foo"} as the
+    key and C{"bar" as the value.
+
+    C{_install(b)} will place an item into C{sys.modules} with C{"twisted"} as
+    the key.  The value will be a new module object.  The module will have a
+    C{"__version__"} attribute with C{"42.6"} as the value.
+
+    C{_install(c)} will place an item into C{sys.modules} with C{"twisted"} as
+    the key.  The value will be a new module object with a C{"plugin"}
+    attribute.  An item will also be placed into C{sys.modules} with the key
+    C{"twisted.plugin"} which refers to that module object.  That module will
+    have an attribute C{"getPlugins"} with a value of C{stub}.
+
+    @param modules: A mapping from names to definitions of modules.  The names
+        are native strings like C{"twisted"} or C{"unittest"}.  Values may be
+        arbitrary objects.  Any value which is not a dictionary will be added to
+        C{sys.modules} unmodified.  Any dictionary value indicates the value is
+        a new module and its items define the attributes of that module.  The
+        definition of this structure is recursive, so a value in the dictionary
+        may be a dictionary to trigger another level of processing.
+
+    @return: C{None}
+    """
+    result = {}
+    _makePackages(None, modules, result)
+    sys.modules.update(result)
+
+
+
+def _makePackages(parent, attributes, result):
+    """
+    Construct module objects (for either modules or packages).
+
+    @param parent: C{None} or a module object which is the Python package
+        containing all of the modules being created by this function call.  Its
+        name will be prepended to the name of all created modules.
+
+    @param attributes: A mapping giving the attributes of the particular module
+        object this call is creating.
+
+    @param result: A mapping which is populated with all created module names.
+        This is suitable for use in updating C{sys.modules}.
+
+    @return: A mapping of all of the attributes created by this call.  This is
+        suitable for populating the dictionary of C{parent}.
+
+    @see: L{_install}.
+    """
+    attrs = {}
+    for (name, value) in list(attributes.items()):
+        if parent is None:
+            if isinstance(value, dict):
+                module = ModuleType(name)
+                module.__dict__.update(_makePackages(module, value, result))
+                result[name] = module
+            else:
+                result[name] = value
+        else:
+            if isinstance(value, dict):
+                module = ModuleType(parent.__name__ + '.' + name)
+                module.__dict__.update(_makePackages(module, value, result))
+                result[parent.__name__ + '.' + name] = module
+                attrs[name] = module
+            else:
+                attrs[name] = value
+    return attrs
+
+
+
+class RequirementsTests(TestCase):
+    """
+    Tests for the import-time requirements checking.
+
+    @ivar unsupportedPythonVersion: The newest version of Python 2.x which is
+        not supported by Twisted.
+    @type unsupportedPythonVersion: C{tuple}
+
+    @ivar supportedPythonVersion: The oldest version of Python 2.x which is
+        supported by Twisted.
+    @type supportedPythonVersion: C{tuple}
+
+    @ivar supportedZopeInterfaceVersion: The oldest version of C{zope.interface}
+        which is supported by Twisted.
+    @type supportedZopeInterfaceVersion: C{tuple}
+    """
+    unsupportedPythonVersion = (2, 5)
+    supportedPythonVersion = (2, 6)
+
+    if _PY3:
+        supportedZopeInterfaceVersion = (4, 0, 0)
+    else:
+        supportedZopeInterfaceVersion = (3, 6, 0)
+
+    # Copied from 2.7 stdlib.  Delete after Python 2.6 is no longer a
+    # requirement.  See #5976.
+    def assertRaises(self, excClass, callableObj=None, *args, **kwargs):
+        """Fail unless an exception of class excClass is thrown
+           by callableObj when invoked with arguments args and keyword
+           arguments kwargs. If a different type of exception is
+           thrown, it will not be caught, and the test case will be
+           deemed to have suffered an error, exactly as for an
+           unexpected exception.
+
+           If called with callableObj omitted or None, will return a
+           context object used like this::
+
+                with self.assertRaises(SomeException):
+                    do_something()
+
+           The context manager keeps a reference to the exception as
+           the 'exception' attribute. This allows you to inspect the
+           exception after the assertion::
+
+               with self.assertRaises(SomeException) as cm:
+                   do_something()
+               the_exception = cm.exception
+               self.assertEqual(the_exception.error_code, 3)
+        """
+        context = _AssertRaisesContext(excClass, self)
+        if callableObj is None:
+            return context
+        with context:
+            callableObj(*args, **kwargs)
+
+
+    def setUp(self):
+        """
+        Save the original value of C{sys.version_info} so it can be restored
+        after the tests mess with it.
+        """
+        self.version = sys.version_info
+
+
+    def tearDown(self):
+        """
+        Restore the original values saved in L{setUp}.
+        """
+        sys.version_info = self.version
+
+
+    def test_oldPython(self):
+        """
+        L{_checkRequirements} raises L{ImportError} when run on a version of
+        Python that is too old.
+        """
+        sys.version_info = self.unsupportedPythonVersion
+        with self.assertRaises(ImportError) as raised:
+            _checkRequirements()
+        self.assertEqual(
+            "Twisted requires Python %d.%d or later." % self.supportedPythonVersion,
+            str(raised.exception))
+
+
+    def test_newPython(self):
+        """
+        L{_checkRequirements} returns C{None} when run on a version of Python
+        that is sufficiently new.
+        """
+        sys.version_info = self.supportedPythonVersion
+        self.assertEqual(None, _checkRequirements())
+
+
+    def test_missingZopeNamespace(self):
+        """
+        L{_checkRequirements} raises L{ImportError} when the C{zope} namespace
+        package is not installed.
+        """
+        with SetAsideModule("zope"):
+            # After an import for a module fails, it gets a None value in
+            # sys.modules as a cache of that negative result.  Future import
+            # attempts see it and fail fast without checking the system again.
+            sys.modules["zope"] = None
+            with self.assertRaises(ImportError) as raised:
+                _checkRequirements()
+            self.assertEqual(
+                "Twisted requires zope.interface %d.%d.%d or later: no module "
+                "named zope.interface." % self.supportedZopeInterfaceVersion,
+                str(raised.exception))
+
+
+    def test_missingZopeInterface(self):
+        """
+        L{_checkRequirements} raises L{ImportError} when the C{zope.interface}
+        package is not installed.
+        """
+        with SetAsideModule("zope"):
+            # Create a minimal module to represent the zope namespace package,
+            # but don't give it an "interface" attribute.
+            sys.modules["zope"] = ModuleType("zope")
+            with self.assertRaises(ImportError) as raised:
+                _checkRequirements()
+            self.assertEqual(
+                "Twisted requires zope.interface %d.%d.%d or later: no module "
+                "named zope.interface." % self.supportedZopeInterfaceVersion,
+                str(raised.exception))
+
+
+    def test_setupNoCheckRequirements(self):
+        """
+        L{_checkRequirements} doesn't check for C{zope.interface} compliance
+        when C{setuptools._TWISTED_NO_CHECK_REQUIREMENTS} is set.
+        """
+        with SetAsideModule("setuptools"):
+            setuptools = ModuleType("setuptools")
+            setuptools._TWISTED_NO_CHECK_REQUIREMENTS = True
+            sys.modules["setuptools"] = setuptools
+            with SetAsideModule("zope"):
+                sys.modules["zope"] = None
+                _checkRequirements()
+
+
+    def test_setupCheckRequirements(self):
+        """
+        L{_checkRequirements} checks for C{zope.interface} compliance when
+        C{setuptools} is imported but the C{_TWISTED_NO_CHECK_REQUIREMENTS} is
+        not set.
+        """
+        with SetAsideModule("setuptools"):
+            sys.modules["setuptools"] = ModuleType("setuptools")
+            with SetAsideModule("zope"):
+                sys.modules["zope"] = None
+                self.assertRaises(ImportError, _checkRequirements)
+
+
+    def test_noSetupCheckRequirements(self):
+        """
+        L{_checkRequirements} checks for C{zope.interface} compliance when
+        C{setuptools} is not imported.
+        """
+        with SetAsideModule("setuptools"):
+            sys.modules["setuptools"] = None
+            with SetAsideModule("zope"):
+                sys.modules["zope"] = None
+                self.assertRaises(ImportError, _checkRequirements)
+
+
+    if _PY3:
+        # Python 3 requires a version that isn't tripped up by the __qualname__
+        # special attribute.
+
+        def test_oldZopeInterface(self):
+            """
+            If the installed version of C{zope.interface} does not support the
+            C{implementer} class decorator, L{_checkRequirements} raises
+            L{ImportError} with a message explaining a newer version is
+            required.
+            """
+            with SetAsideModule("zope"):
+                _install(_zope38)
+                with self.assertRaises(ImportError) as raised:
+                    _checkRequirements()
+            self.assertEqual(
+                "Twisted requires zope.interface 4.0.0 or later.",
+                str(raised.exception))
+
+
+        def test_newZopeInterface(self):
+            """
+            If the installed version of C{zope.interface} does support the
+            C{implementer} class decorator, L{_checkRequirements} returns
+            C{None}.
+            """
+            with SetAsideModule("zope"):
+                _install(_zope40)
+                self.assertEqual(None, _checkRequirements())
+
+    else:
+        # Python 2 only requires a version that supports the class decorator
+        # version of declarations.
+
+        def test_oldZopeInterface(self):
+            """
+            L{_checkRequirements} raises L{ImportError} when the C{zope.interface}
+            package installed is old enough that C{implementer_only} is not included
+            (added in zope.interface 3.6).
+            """
+            with SetAsideModule("zope"):
+                _install(_zope35)
+                with self.assertRaises(ImportError) as raised:
+                    _checkRequirements()
+                self.assertEqual(
+                    "Twisted requires zope.interface 3.6.0 or later.",
+                    str(raised.exception))
+
+
+        def test_newZopeInterface(self):
+            """
+            L{_checkRequirements} returns C{None} when C{zope.interface} is
+            installed and new enough.
+            """
+            with SetAsideModule("zope"):
+                _install(_zope36)
+                self.assertEqual(None, _checkRequirements())
+
+
+
+class MakePackagesTests(TestCase):
+    """
+    Tests for L{_makePackages}, a helper for populating C{sys.modules} with
+    fictional modules.
+    """
+    def test_nonModule(self):
+        """
+        A non-C{dict} value in the attributes dictionary passed to L{_makePackages}
+        is preserved unchanged in the return value.
+        """
+        modules = {}
+        _makePackages(None, dict(reactor='reactor'), modules)
+        self.assertEqual(modules, dict(reactor='reactor'))
+
+
+    def test_moduleWithAttribute(self):
+        """
+        A C{dict} value in the attributes dictionary passed to L{_makePackages}
+        is turned into a L{ModuleType} instance with attributes populated from
+        the items of that C{dict} value.
+        """
+        modules = {}
+        _makePackages(None, dict(twisted=dict(version='123')), modules)
+        self.assertTrue(isinstance(modules, dict))
+        self.assertTrue(isinstance(modules['twisted'], ModuleType))
+        self.assertEqual('twisted', modules['twisted'].__name__)
+        self.assertEqual('123', modules['twisted'].version)
+
+
+    def test_packageWithModule(self):
+        """
+        Processing of the attributes dictionary is recursive, so a C{dict} value
+        it contains may itself contain a C{dict} value to the same effect.
+        """
+        modules = {}
+        _makePackages(None, dict(twisted=dict(web=dict(version='321'))), modules)
+        self.assertTrue(isinstance(modules, dict))
+        self.assertTrue(isinstance(modules['twisted'], ModuleType))
+        self.assertEqual('twisted', modules['twisted'].__name__)
+        self.assertTrue(isinstance(modules['twisted'].web, ModuleType))
+        self.assertEqual('twisted.web', modules['twisted'].web.__name__)
+        self.assertEqual('321', modules['twisted'].web.version)
+
+
+
+def _functionOnlyImplementer(*interfaces):
+    """
+    A fake implementation of L{zope.interface.implementer} which always behaves
+    like the version of that function provided by zope.interface 3.5 and older.
+    """
+    def check(obj):
+        """
+        If the decorated object is not a function, raise an exception.
+        """
+        if not isinstance(obj, FunctionType):
+            raise TypeError(
+                "Can't use implementer with classes.  "
+                "Use one of the class-declaration functions instead.")
+    return check
+
+
+
+def _classSupportingImplementer(*interfaces):
+    """
+    A fake implementation of L{zope.interface.implementer} which always
+    succeeds.  For the use it is put to, this is like the version of that
+    function provided by zope.interface 3.6 and newer.
+    """
+    def check(obj):
+        """
+        Do nothing at all.
+        """
+    return check
+
+
+
+class _SuccessInterface(object):
+    """
+    A fake implementation of L{zope.interface.Interface} with no behavior.  For
+    the use it is put to, this is equivalent to the behavior of the C{Interface}
+    provided by all versions of zope.interface.
+    """
+
+
+# Definition of a module somewhat like zope.interface 3.5.
+_zope35 = {
+    'zope': {
+        'interface': {
+            'Interface': _SuccessInterface,
+            'implementer': _functionOnlyImplementer,
+            },
+        },
+    }
+
+
+# Definition of a module somewhat like zope.interface 3.6.
+_zope36 = {
+    'zope': {
+        'interface': {
+            'Interface': _SuccessInterface,
+            'implementer': _classSupportingImplementer,
+            },
+        },
+    }
+
+
+class _Zope38OnPython3Module(object):
+    """
+    A pseudo-module which raises an exception when its C{interface} attribute is
+    accessed.  This is like the behavior of zope.interface 3.8 and earlier when
+    used with Python 3.3.
+    """
+    __path__ = []
+    __name__ = 'zope'
+
+    @property
+    def interface(self):
+        raise Exception(
+            "zope.interface.exceptions.InvalidInterface: "
+            "Concrete attribute, __qualname__")
+
+# Definition of a module somewhat like zope.interface 3.8 when it is used on Python 3.
+_zope38 = {
+    'zope': _Zope38OnPython3Module(),
+    }
+
+# Definition of a module somewhat like zope.interface 4.0.
+_zope40 = {
+    'zope': {
+        'interface': {
+            'Interface': _SuccessInterface,
+            'implementer': _classSupportingImplementer,
+            },
+        },
+    }
+
+
+class ZopeInterfaceTestsMixin(object):
+    """
+    Verify the C{zope.interface} fakes, only possible when a specific version of
+    the real C{zope.interface} package is installed on the system.
+
+    Subclass this and override C{install} to properly install and then remove
+    the given version of C{zope.interface}.
+    """
+    def test_zope35(self):
+        """
+        Version 3.5 of L{zope.interface} has a C{implementer} method which
+        cannot be used as a class decorator.
+        """
+        with SetAsideModule("zope"):
+            self.install((3, 5))
+            from zope.interface import Interface, implementer
+            class IDummy(Interface):
+                pass
+            try:
+                @implementer(IDummy)
+                class Dummy(object):
+                    pass
+            except TypeError as exc:
+                self.assertEqual(
+                    "Can't use implementer with classes.  "
+                    "Use one of the class-declaration functions instead.",
+                    str(exc))
+
+
+    def test_zope36(self):
+        """
+        Version 3.6 of L{zope.interface} has a C{implementer} method which can
+        be used as a class decorator.
+        """
+        with SetAsideModule("zope"):
+            self.install((3, 6))
+            from zope.interface import Interface, implementer
+            class IDummy(Interface):
+                pass
+            @implementer(IDummy)
+            class Dummy(object):
+                pass
+
+    if _PY3:
+        def test_zope38(self):
+            """
+            Version 3.8 of L{zope.interface} does not even import on Python 3.
+            """
+            with SetAsideModule("zope"):
+                self.install((3, 8))
+                try:
+                    from zope import interface
+                except Exception as exc:
+                    self.assertEqual(
+                        "zope.interface.exceptions.InvalidInterface: "
+                        "Concrete attribute, __qualname__",
+                        str(exc))
+                else:
+                    self.fail(
+                        "InvalidInterface was not raised by zope.interface import")
+
+
+        def test_zope40(self):
+            """
+            Version 4.0 of L{zope.interface} can import on Python 3 and, also on
+            Python 3, has an C{Interface} class which can be subclassed.
+            """
+            with SetAsideModule("zope"):
+                self.install((4, 0))
+                from zope.interface import Interface
+                class IDummy(Interface):
+                    pass
+
+
+class FakeZopeInterfaceTests(TestCase, ZopeInterfaceTestsMixin):
+    """
+    Apply the zope.interface tests to the fakes implemented in this module.
+    """
+    versions = {
+        (3, 5): _zope35,
+        (3, 6): _zope36,
+        (3, 8): _zope38,
+        (4, 0): _zope40,
+        }
+
+    def install(self, version):
+        """
+        Grab one of the fake module implementations and install it into
+        C{sys.modules} for use by the test.
+        """
+        _install(self.versions[version])
+
+
+# Python 2.6 stdlib unittest does not support skipping.  Use trial's
+# SynchronousTestCase instead.  When Python 2.6 support is dropped, this can
+# switch back to using the stdlib TestCase with its skip support.
+if sys.version_info[:2] == (2, 6):
+    from twisted.trial.unittest import SkipTest, SynchronousTestCase as TestCase
+else:
+    from unittest import SkipTest
+
+class RealZopeInterfaceTests(TestCase, ZopeInterfaceTestsMixin):
+    """
+    Apply whichever tests from L{ZopeInterfaceTestsMixin} are applicable to the
+    system-installed version of zope.interface.
+    """
+    def install(self, version):
+        """
+        Check to see if the system-installed version of zope.interface matches
+        the version requested.  If so, do nothing.  If not, skip the test (if
+        the desired version is not installed, there is no way to test its
+        behavior).  If the version of zope.interface cannot be determined
+        (because pkg_resources is not installed), skip the test.
+        """
+        # Use an unrelated, but unreliable, route to try to determine what
+        # version of zope.interface is installed on the system.  It's sort of
+        # okay to use this unreliable scheme here, since if it fails it only
+        # means we won't be able to run the tests.  Hopefully someone else
+        # managed to run the tests somewhere else.
+        try:
+            import pkg_resources
+        except ImportError as e:
+            raise SkipTest(
+                "Cannot determine system version of zope.interface: %s" % (e,))
+        else:
+            try:
+                pkg = pkg_resources.get_distribution("zope.interface")
+            except pkg_resources.DistributionNotFound as e:
+                raise SkipTest(
+                    "Cannot determine system version of zope.interface: %s" % (
+                        e,))
+            installed = pkg.version
+            versionTuple = tuple(
+                int(part) for part in installed.split('.')[:len(version)])
+            if versionTuple == version:
+                pass
+            else:
+                raise SkipTest("Mismatched system version of zope.interface")
+
diff --git a/ThirdParty/Twisted/twisted/test/test_udp.py b/ThirdParty/Twisted/twisted/test/test_udp.py
new file mode 100644
index 0000000..21d145c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_udp.py
@@ -0,0 +1,708 @@
+# -*- test-case-name: twisted.test.test_udp -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for implementations of L{IReactorUDP} and L{IReactorMulticast}.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.trial import unittest
+
+from twisted.python.compat import intToBytes
+from twisted.internet.defer import Deferred, gatherResults, maybeDeferred
+from twisted.internet import protocol, reactor, error, defer, interfaces, udp
+from twisted.python import runtime
+
+
+class Mixin:
+
+    started = 0
+    stopped = 0
+
+    startedDeferred = None
+
+    def __init__(self):
+        self.packets = []
+
+    def startProtocol(self):
+        self.started = 1
+        if self.startedDeferred is not None:
+            d, self.startedDeferred = self.startedDeferred, None
+            d.callback(None)
+
+    def stopProtocol(self):
+        self.stopped = 1
+
+
+class Server(Mixin, protocol.DatagramProtocol):
+    packetReceived = None
+    refused = 0
+
+
+    def datagramReceived(self, data, addr):
+        self.packets.append((data, addr))
+        if self.packetReceived is not None:
+            d, self.packetReceived = self.packetReceived, None
+            d.callback(None)
+
+
+
+class Client(Mixin, protocol.ConnectedDatagramProtocol):
+
+    packetReceived = None
+    refused = 0
+
+    def datagramReceived(self, data):
+        self.packets.append(data)
+        if self.packetReceived is not None:
+            d, self.packetReceived = self.packetReceived, None
+            d.callback(None)
+
+    def connectionFailed(self, failure):
+        if self.startedDeferred is not None:
+            d, self.startedDeferred = self.startedDeferred, None
+            d.errback(failure)
+        self.failure = failure
+
+    def connectionRefused(self):
+        if self.startedDeferred is not None:
+            d, self.startedDeferred = self.startedDeferred, None
+            d.errback(error.ConnectionRefusedError("yup"))
+        self.refused = 1
+
+
+class GoodClient(Server):
+
+    def connectionRefused(self):
+        if self.startedDeferred is not None:
+            d, self.startedDeferred = self.startedDeferred, None
+            d.errback(error.ConnectionRefusedError("yup"))
+        self.refused = 1
+
+
+
+class BadClientError(Exception):
+    """
+    Raised by BadClient at the end of every datagramReceived call to try and
+    screw stuff up.
+    """
+
+
+
+class BadClient(protocol.DatagramProtocol):
+    """
+    A DatagramProtocol which always raises an exception from datagramReceived.
+    Used to test error handling behavior in the reactor for that method.
+    """
+    d = None
+
+    def setDeferred(self, d):
+        """
+        Set the Deferred which will be called back when datagramReceived is
+        called.
+        """
+        self.d = d
+
+
+    def datagramReceived(self, bytes, addr):
+        if self.d is not None:
+            d, self.d = self.d, None
+            d.callback(bytes)
+        raise BadClientError("Application code is very buggy!")
+
+
+
+class UDPTestCase(unittest.TestCase):
+
+    def test_oldAddress(self):
+        """
+        The C{type} of the host address of a listening L{DatagramProtocol}'s
+        transport is C{"UDP"}.
+        """
+        server = Server()
+        d = server.startedDeferred = defer.Deferred()
+        p = reactor.listenUDP(0, server, interface="127.0.0.1")
+        def cbStarted(ignored):
+            addr = p.getHost()
+            self.assertEqual(addr.type, 'UDP')
+            return p.stopListening()
+        return d.addCallback(cbStarted)
+
+
+    def test_startStop(self):
+        """
+        The L{DatagramProtocol}'s C{startProtocol} and C{stopProtocol}
+        methods are called when its transports starts and stops listening,
+        respectively.
+        """
+        server = Server()
+        d = server.startedDeferred = defer.Deferred()
+        port1 = reactor.listenUDP(0, server, interface="127.0.0.1")
+        def cbStarted(ignored):
+            self.assertEqual(server.started, 1)
+            self.assertEqual(server.stopped, 0)
+            return port1.stopListening()
+        def cbStopped(ignored):
+            self.assertEqual(server.stopped, 1)
+        return d.addCallback(cbStarted).addCallback(cbStopped)
+
+
+    def test_rebind(self):
+        """
+        Re-listening with the same L{DatagramProtocol} re-invokes the
+        C{startProtocol} callback.
+        """
+        server = Server()
+        d = server.startedDeferred = defer.Deferred()
+        p = reactor.listenUDP(0, server, interface="127.0.0.1")
+
+        def cbStarted(ignored, port):
+            return port.stopListening()
+
+        def cbStopped(ignored):
+            d = server.startedDeferred = defer.Deferred()
+            p = reactor.listenUDP(0, server, interface="127.0.0.1")
+            return d.addCallback(cbStarted, p)
+
+        return d.addCallback(cbStarted, p)
+
+
+    def test_bindError(self):
+        """
+        A L{CannotListenError} exception is raised when attempting to bind a
+        second protocol instance to an already bound port
+        """
+        server = Server()
+        d = server.startedDeferred = defer.Deferred()
+        port = reactor.listenUDP(0, server, interface='127.0.0.1')
+
+        def cbStarted(ignored):
+            self.assertEqual(port.getHost(), server.transport.getHost())
+            server2 = Server()
+            self.assertRaises(
+                error.CannotListenError,
+                reactor.listenUDP, port.getHost().port, server2,
+                interface='127.0.0.1')
+        d.addCallback(cbStarted)
+
+        def cbFinished(ignored):
+            return port.stopListening()
+        d.addCallback(cbFinished)
+        return d
+
+
+    def test_sendPackets(self):
+        """
+        Datagrams can be sent with the transport's C{write} method and
+        received via the C{datagramReceived} callback method.
+        """
+        server = Server()
+        serverStarted = server.startedDeferred = defer.Deferred()
+        port1 = reactor.listenUDP(0, server, interface="127.0.0.1")
+
+        client = GoodClient()
+        clientStarted = client.startedDeferred = defer.Deferred()
+
+        def cbServerStarted(ignored):
+            self.port2 = reactor.listenUDP(0, client, interface="127.0.0.1")
+            return clientStarted
+
+        d = serverStarted.addCallback(cbServerStarted)
+
+        def cbClientStarted(ignored):
+            client.transport.connect("127.0.0.1",
+                                     server.transport.getHost().port)
+            cAddr = client.transport.getHost()
+            sAddr = server.transport.getHost()
+
+            serverSend = client.packetReceived = defer.Deferred()
+            server.transport.write(b"hello", (cAddr.host, cAddr.port))
+
+            clientWrites = [
+                (b"a",),
+                (b"b", None),
+                (b"c", (sAddr.host, sAddr.port))]
+
+            def cbClientSend(ignored):
+                if clientWrites:
+                    nextClientWrite = server.packetReceived = defer.Deferred()
+                    nextClientWrite.addCallback(cbClientSend)
+                    client.transport.write(*clientWrites.pop(0))
+                    return nextClientWrite
+
+            # No one will ever call .errback on either of these Deferreds,
+            # but there is a non-trivial amount of test code which might
+            # cause them to fail somehow.  So fireOnOneErrback=True.
+            return defer.DeferredList([
+                cbClientSend(None),
+                serverSend],
+                fireOnOneErrback=True)
+
+        d.addCallback(cbClientStarted)
+
+        def cbSendsFinished(ignored):
+            cAddr = client.transport.getHost()
+            sAddr = server.transport.getHost()
+            self.assertEqual(
+                client.packets,
+                [(b"hello", (sAddr.host, sAddr.port))])
+            clientAddr = (cAddr.host, cAddr.port)
+            self.assertEqual(
+                server.packets,
+                [(b"a", clientAddr),
+                 (b"b", clientAddr),
+                 (b"c", clientAddr)])
+
+        d.addCallback(cbSendsFinished)
+
+        def cbFinished(ignored):
+            return defer.DeferredList([
+                defer.maybeDeferred(port1.stopListening),
+                defer.maybeDeferred(self.port2.stopListening)],
+                fireOnOneErrback=True)
+
+        d.addCallback(cbFinished)
+        return d
+
+
+    def test_connectionRefused(self):
+        """
+        A L{ConnectionRefusedError} exception is raised when a connection
+        attempt is actively refused by the other end.
+
+        Note: This test assumes no one is listening on port 80 UDP.
+        """
+        client = GoodClient()
+        clientStarted = client.startedDeferred = defer.Deferred()
+        port = reactor.listenUDP(0, client, interface="127.0.0.1")
+
+        server = Server()
+        serverStarted = server.startedDeferred = defer.Deferred()
+        port2 = reactor.listenUDP(0, server, interface="127.0.0.1")
+
+        d = defer.DeferredList(
+            [clientStarted, serverStarted],
+            fireOnOneErrback=True)
+
+        def cbStarted(ignored):
+            connectionRefused = client.startedDeferred = defer.Deferred()
+            client.transport.connect("127.0.0.1", 80)
+
+            for i in range(10):
+                client.transport.write(intToBytes(i))
+                server.transport.write(intToBytes(i), ("127.0.0.1", 80))
+
+            return self.assertFailure(
+                connectionRefused,
+                error.ConnectionRefusedError)
+
+        d.addCallback(cbStarted)
+
+        def cbFinished(ignored):
+            return defer.DeferredList([
+                defer.maybeDeferred(port.stopListening),
+                defer.maybeDeferred(port2.stopListening)],
+                fireOnOneErrback=True)
+
+        d.addCallback(cbFinished)
+        return d
+
+
+    def test_badConnect(self):
+        """
+        A call to the transport's connect method fails with a L{ValueError}
+        when a non-IP address is passed as the host value.
+
+        A call to a transport's connect method fails with a L{RuntimeError}
+        when the transport is already connected.
+        """
+        client = GoodClient()
+        port = reactor.listenUDP(0, client, interface="127.0.0.1")
+        self.assertRaises(ValueError, client.transport.connect,
+                          "localhost", 80)
+        client.transport.connect("127.0.0.1", 80)
+        self.assertRaises(RuntimeError, client.transport.connect,
+                          "127.0.0.1", 80)
+        return port.stopListening()
+
+
+
+    def test_datagramReceivedError(self):
+        """
+        When datagramReceived raises an exception it is logged but the port
+        is not disconnected.
+        """
+        finalDeferred = defer.Deferred()
+
+        def cbCompleted(ign):
+            """
+            Flush the exceptions which the reactor should have logged and make
+            sure they're actually there.
+            """
+            errs = self.flushLoggedErrors(BadClientError)
+            self.assertEqual(len(errs), 2, "Incorrectly found %d errors, expected 2" % (len(errs),))
+        finalDeferred.addCallback(cbCompleted)
+
+        client = BadClient()
+        port = reactor.listenUDP(0, client, interface='127.0.0.1')
+
+        def cbCleanup(result):
+            """
+            Disconnect the port we started and pass on whatever was given to us
+            in case it was a Failure.
+            """
+            return defer.maybeDeferred(port.stopListening).addBoth(lambda ign: result)
+        finalDeferred.addBoth(cbCleanup)
+
+        addr = port.getHost()
+
+        # UDP is not reliable.  Try to send as many as 60 packets before giving
+        # up.  Conceivably, all sixty could be lost, but they probably won't be
+        # unless all UDP traffic is being dropped, and then the rest of these
+        # UDP tests will likely fail as well.  Ideally, this test (and probably
+        # others) wouldn't even use actual UDP traffic: instead, they would
+        # stub out the socket with a fake one which could be made to behave in
+        # whatever way the test desires.  Unfortunately, this is hard because
+        # of differences in various reactor implementations.
+        attempts = list(range(60))
+        succeededAttempts = []
+
+        def makeAttempt():
+            """
+            Send one packet to the listening BadClient.  Set up a 0.1 second
+            timeout to do re-transmits in case the packet is dropped.  When two
+            packets have been received by the BadClient, stop sending and let
+            the finalDeferred's callbacks do some assertions.
+            """
+            if not attempts:
+                try:
+                    self.fail("Not enough packets received")
+                except:
+                    finalDeferred.errback()
+
+            self.failIfIdentical(client.transport, None, "UDP Protocol lost its transport")
+
+            packet = intToBytes(attempts.pop(0))
+            packetDeferred = defer.Deferred()
+            client.setDeferred(packetDeferred)
+            client.transport.write(packet, (addr.host, addr.port))
+
+            def cbPacketReceived(packet):
+                """
+                A packet arrived.  Cancel the timeout for it, record it, and
+                maybe finish the test.
+                """
+                timeoutCall.cancel()
+                succeededAttempts.append(packet)
+                if len(succeededAttempts) == 2:
+                    # The second error has not yet been logged, since the
+                    # exception which causes it hasn't even been raised yet.
+                    # Give the datagramReceived call a chance to finish, then
+                    # let the test finish asserting things.
+                    reactor.callLater(0, finalDeferred.callback, None)
+                else:
+                    makeAttempt()
+
+            def ebPacketTimeout(err):
+                """
+                The packet wasn't received quickly enough.  Try sending another
+                one.  It doesn't matter if the packet for which this was the
+                timeout eventually arrives: makeAttempt throws away the
+                Deferred on which this function is the errback, so when
+                datagramReceived callbacks, so it won't be on this Deferred, so
+                it won't raise an AlreadyCalledError.
+                """
+                makeAttempt()
+
+            packetDeferred.addCallbacks(cbPacketReceived, ebPacketTimeout)
+            packetDeferred.addErrback(finalDeferred.errback)
+
+            timeoutCall = reactor.callLater(
+                0.1, packetDeferred.errback,
+                error.TimeoutError(
+                    "Timed out in testDatagramReceivedError"))
+
+        makeAttempt()
+        return finalDeferred
+
+
+    def test_NoWarningOnBroadcast(self):
+        """
+        C{'<broadcast>'} is an alternative way to say C{'255.255.255.255'}
+        ({socket.gethostbyname("<broadcast>")} returns C{'255.255.255.255'}),
+        so because it becomes a valid IP address, no deprecation warning about
+        passing hostnames to L{twisted.internet.udp.Port.write} needs to be
+        emitted by C{write()} in this case.
+        """
+        class fakeSocket:
+            def sendto(self, foo, bar):
+                pass
+
+        p = udp.Port(0, Server())
+        p.socket = fakeSocket()
+        p.write(b"test", ("<broadcast>", 1234))
+
+        warnings = self.flushWarnings([self.test_NoWarningOnBroadcast])
+        self.assertEqual(len(warnings), 0)
+
+
+
+class ReactorShutdownInteraction(unittest.TestCase):
+    """Test reactor shutdown interaction"""
+
+    def setUp(self):
+        """Start a UDP port"""
+        self.server = Server()
+        self.port = reactor.listenUDP(0, self.server, interface='127.0.0.1')
+
+    def tearDown(self):
+        """Stop the UDP port"""
+        return self.port.stopListening()
+
+    def testShutdownFromDatagramReceived(self):
+        """Test reactor shutdown while in a recvfrom() loop"""
+
+        # udp.Port's doRead calls recvfrom() in a loop, as an optimization.
+        # It is important this loop terminate under various conditions.
+        # Previously, if datagramReceived synchronously invoked
+        # reactor.stop(), under certain reactors, the Port's socket would
+        # synchronously disappear, causing an AttributeError inside that
+        # loop.  This was mishandled, causing the loop to spin forever.
+        # This test is primarily to ensure that the loop never spins
+        # forever.
+
+        finished = defer.Deferred()
+        pr = self.server.packetReceived = defer.Deferred()
+
+        def pktRece(ignored):
+            # Simulate reactor.stop() behavior :(
+            self.server.transport.connectionLost()
+            # Then delay this Deferred chain until the protocol has been
+            # disconnected, as the reactor should do in an error condition
+            # such as we are inducing.  This is very much a whitebox test.
+            reactor.callLater(0, finished.callback, None)
+        pr.addCallback(pktRece)
+
+        def flushErrors(ignored):
+            # We are breaking abstraction and calling private APIs, any
+            # number of horrible errors might occur.  As long as the reactor
+            # doesn't hang, this test is satisfied.  (There may be room for
+            # another, stricter test.)
+            self.flushLoggedErrors()
+        finished.addCallback(flushErrors)
+        self.server.transport.write(b'\0' * 64, ('127.0.0.1',
+                                    self.server.transport.getHost().port))
+        return finished
+
+
+
+class MulticastTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.server = Server()
+        self.client = Client()
+        # multicast won't work if we listen over loopback, apparently
+        self.port1 = reactor.listenMulticast(0, self.server)
+        self.port2 = reactor.listenMulticast(0, self.client)
+        self.client.transport.connect(
+            "127.0.0.1", self.server.transport.getHost().port)
+
+
+    def tearDown(self):
+        return gatherResults([
+            maybeDeferred(self.port1.stopListening),
+            maybeDeferred(self.port2.stopListening)])
+
+
+    def testTTL(self):
+        for o in self.client, self.server:
+            self.assertEqual(o.transport.getTTL(), 1)
+            o.transport.setTTL(2)
+            self.assertEqual(o.transport.getTTL(), 2)
+
+
+    def test_loopback(self):
+        """
+        Test that after loopback mode has been set, multicast packets are
+        delivered to their sender.
+        """
+        self.assertEqual(self.server.transport.getLoopbackMode(), 1)
+        addr = self.server.transport.getHost()
+        joined = self.server.transport.joinGroup("225.0.0.250")
+
+        def cbJoined(ignored):
+            d = self.server.packetReceived = Deferred()
+            self.server.transport.write(b"hello", ("225.0.0.250", addr.port))
+            return d
+        joined.addCallback(cbJoined)
+
+        def cbPacket(ignored):
+            self.assertEqual(len(self.server.packets), 1)
+            self.server.transport.setLoopbackMode(0)
+            self.assertEqual(self.server.transport.getLoopbackMode(), 0)
+            self.server.transport.write(b"hello", ("225.0.0.250", addr.port))
+
+            # This is fairly lame.
+            d = Deferred()
+            reactor.callLater(0, d.callback, None)
+            return d
+        joined.addCallback(cbPacket)
+
+        def cbNoPacket(ignored):
+            self.assertEqual(len(self.server.packets), 1)
+        joined.addCallback(cbNoPacket)
+
+        return joined
+
+
+    def test_interface(self):
+        """
+        Test C{getOutgoingInterface} and C{setOutgoingInterface}.
+        """
+        self.assertEqual(
+            self.client.transport.getOutgoingInterface(), "0.0.0.0")
+        self.assertEqual(
+            self.server.transport.getOutgoingInterface(), "0.0.0.0")
+
+        d1 = self.client.transport.setOutgoingInterface("127.0.0.1")
+        d2 = self.server.transport.setOutgoingInterface("127.0.0.1")
+        result = gatherResults([d1, d2])
+
+        def cbInterfaces(ignored):
+            self.assertEqual(
+                self.client.transport.getOutgoingInterface(), "127.0.0.1")
+            self.assertEqual(
+                self.server.transport.getOutgoingInterface(), "127.0.0.1")
+        result.addCallback(cbInterfaces)
+        return result
+
+
+    def test_joinLeave(self):
+        """
+        Test that multicast a group can be joined and left.
+        """
+        d = self.client.transport.joinGroup("225.0.0.250")
+
+        def clientJoined(ignored):
+            return self.client.transport.leaveGroup("225.0.0.250")
+        d.addCallback(clientJoined)
+
+        def clientLeft(ignored):
+            return self.server.transport.joinGroup("225.0.0.250")
+        d.addCallback(clientLeft)
+
+        def serverJoined(ignored):
+            return self.server.transport.leaveGroup("225.0.0.250")
+        d.addCallback(serverJoined)
+
+        return d
+
+
+    def test_joinFailure(self):
+        """
+        Test that an attempt to join an address which is not a multicast
+        address fails with L{error.MulticastJoinError}.
+        """
+        # 127.0.0.1 is not a multicast address, so joining it should fail.
+        return self.assertFailure(
+            self.client.transport.joinGroup("127.0.0.1"),
+            error.MulticastJoinError)
+    if runtime.platform.isWindows() and not runtime.platform.isVista():
+        test_joinFailure.todo = "Windows' multicast is wonky"
+
+
+    def test_multicast(self):
+        """
+        Test that a multicast group can be joined and messages sent to and
+        received from it.
+        """
+        c = Server()
+        p = reactor.listenMulticast(0, c)
+        addr = self.server.transport.getHost()
+
+        joined = self.server.transport.joinGroup("225.0.0.250")
+
+        def cbJoined(ignored):
+            d = self.server.packetReceived = Deferred()
+            c.transport.write(b"hello world", ("225.0.0.250", addr.port))
+            return d
+        joined.addCallback(cbJoined)
+
+        def cbPacket(ignored):
+            self.assertEqual(self.server.packets[0][0], b"hello world")
+        joined.addCallback(cbPacket)
+
+        def cleanup(passthrough):
+            result = maybeDeferred(p.stopListening)
+            result.addCallback(lambda ign: passthrough)
+            return result
+        joined.addCallback(cleanup)
+
+        return joined
+
+
+    def test_multiListen(self):
+        """
+        Test that multiple sockets can listen on the same multicast port and
+        that they both receive multicast messages directed to that address.
+        """
+        firstClient = Server()
+        firstPort = reactor.listenMulticast(
+            0, firstClient, listenMultiple=True)
+
+        portno = firstPort.getHost().port
+
+        secondClient = Server()
+        secondPort = reactor.listenMulticast(
+            portno, secondClient, listenMultiple=True)
+
+        theGroup = "225.0.0.250"
+        joined = gatherResults([self.server.transport.joinGroup(theGroup),
+                                firstPort.joinGroup(theGroup),
+                                secondPort.joinGroup(theGroup)])
+
+
+        def serverJoined(ignored):
+            d1 = firstClient.packetReceived = Deferred()
+            d2 = secondClient.packetReceived = Deferred()
+            firstClient.transport.write(b"hello world", (theGroup, portno))
+            return gatherResults([d1, d2])
+        joined.addCallback(serverJoined)
+
+        def gotPackets(ignored):
+            self.assertEqual(firstClient.packets[0][0], b"hello world")
+            self.assertEqual(secondClient.packets[0][0], b"hello world")
+        joined.addCallback(gotPackets)
+
+        def cleanup(passthrough):
+            result = gatherResults([
+                maybeDeferred(firstPort.stopListening),
+                maybeDeferred(secondPort.stopListening)])
+            result.addCallback(lambda ign: passthrough)
+            return result
+        joined.addBoth(cleanup)
+        return joined
+    if runtime.platform.isWindows():
+        test_multiListen.skip = ("on non-linux platforms it appears multiple "
+                                 "processes can listen, but not multiple sockets "
+                                 "in same process?")
+
+
+if not interfaces.IReactorUDP(reactor, None):
+    UDPTestCase.skip = "This reactor does not support UDP"
+    ReactorShutdownInteraction.skip = "This reactor does not support UDP"
+if not interfaces.IReactorMulticast(reactor, None):
+    MulticastTestCase.skip = "This reactor does not support multicast"
+
+def checkForLinux22():
+    import os
+    if os.path.exists("/proc/version"):
+        s = open("/proc/version").read()
+        if s.startswith("Linux version"):
+            s = s.split()[2]
+            if s.split(".")[:2] == ["2", "2"]:
+                f = MulticastTestCase.testInterface.im_func
+                f.todo = "figure out why this fails in linux 2.2"
+checkForLinux22()
diff --git a/ThirdParty/Twisted/twisted/test/test_unix.py b/ThirdParty/Twisted/twisted/test/test_unix.py
new file mode 100644
index 0000000..863f665
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_unix.py
@@ -0,0 +1,405 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for implementations of L{IReactorUNIX} and L{IReactorUNIXDatagram}.
+"""
+
+import stat, os, sys, types
+import socket
+
+from twisted.internet import interfaces, reactor, protocol, error, address, defer, utils
+from twisted.python import lockfile
+from twisted.trial import unittest
+
+from twisted.test.test_tcp import MyServerFactory, MyClientFactory
+
+
+class FailedConnectionClientFactory(protocol.ClientFactory):
+    def __init__(self, onFail):
+        self.onFail = onFail
+
+    def clientConnectionFailed(self, connector, reason):
+        self.onFail.errback(reason)
+
+
+
+class UnixSocketTestCase(unittest.TestCase):
+    """
+    Test unix sockets.
+    """
+    def test_peerBind(self):
+        """
+        The address passed to the server factory's C{buildProtocol} method and
+        the address returned by the connected protocol's transport's C{getPeer}
+        method match the address the client socket is bound to.
+        """
+        filename = self.mktemp()
+        peername = self.mktemp()
+        serverFactory = MyServerFactory()
+        connMade = serverFactory.protocolConnectionMade = defer.Deferred()
+        unixPort = reactor.listenUNIX(filename, serverFactory)
+        self.addCleanup(unixPort.stopListening)
+        unixSocket = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+        self.addCleanup(unixSocket.close)
+        unixSocket.bind(peername)
+        unixSocket.connect(filename)
+        def cbConnMade(proto):
+            expected = address.UNIXAddress(peername)
+            self.assertEqual(serverFactory.peerAddresses, [expected])
+            self.assertEqual(proto.transport.getPeer(), expected)
+        connMade.addCallback(cbConnMade)
+        return connMade
+
+
+    def test_dumber(self):
+        """
+        L{IReactorUNIX.connectUNIX} can be used to connect a client to a server
+        started with L{IReactorUNIX.listenUNIX}.
+        """
+        filename = self.mktemp()
+        serverFactory = MyServerFactory()
+        serverConnMade = defer.Deferred()
+        serverFactory.protocolConnectionMade = serverConnMade
+        unixPort = reactor.listenUNIX(filename, serverFactory)
+        self.addCleanup(unixPort.stopListening)
+        clientFactory = MyClientFactory()
+        clientConnMade = defer.Deferred()
+        clientFactory.protocolConnectionMade = clientConnMade
+        c = reactor.connectUNIX(filename, clientFactory)
+        d = defer.gatherResults([serverConnMade, clientConnMade])
+        def allConnected((serverProtocol, clientProtocol)):
+
+            # Incidental assertion which may or may not be redundant with some
+            # other test.  This probably deserves its own test method.
+            self.assertEqual(clientFactory.peerAddresses,
+                             [address.UNIXAddress(filename)])
+
+            clientProtocol.transport.loseConnection()
+            serverProtocol.transport.loseConnection()
+        d.addCallback(allConnected)
+        return d
+
+
+    def test_pidFile(self):
+        """
+        A lockfile is created and locked when L{IReactorUNIX.listenUNIX} is
+        called and released when the Deferred returned by the L{IListeningPort}
+        provider's C{stopListening} method is called back.
+        """
+        filename = self.mktemp()
+        serverFactory = MyServerFactory()
+        serverConnMade = defer.Deferred()
+        serverFactory.protocolConnectionMade = serverConnMade
+        unixPort = reactor.listenUNIX(filename, serverFactory, wantPID=True)
+        self.assertTrue(lockfile.isLocked(filename + ".lock"))
+
+        # XXX This part would test something about the checkPID parameter, but
+        # it doesn't actually.  It should be rewritten to test the several
+        # different possible behaviors.  -exarkun
+        clientFactory = MyClientFactory()
+        clientConnMade = defer.Deferred()
+        clientFactory.protocolConnectionMade = clientConnMade
+        c = reactor.connectUNIX(filename, clientFactory, checkPID=1)
+
+        d = defer.gatherResults([serverConnMade, clientConnMade])
+        def _portStuff((serverProtocol, clientProto)):
+
+            # Incidental assertion which may or may not be redundant with some
+            # other test.  This probably deserves its own test method.
+            self.assertEqual(clientFactory.peerAddresses,
+                             [address.UNIXAddress(filename)])
+
+            clientProto.transport.loseConnection()
+            serverProtocol.transport.loseConnection()
+            return unixPort.stopListening()
+        d.addCallback(_portStuff)
+
+        def _check(ignored):
+            self.failIf(lockfile.isLocked(filename + ".lock"), 'locked')
+        d.addCallback(_check)
+        return d
+
+
+    def test_socketLocking(self):
+        """
+        L{IReactorUNIX.listenUNIX} raises L{error.CannotListenError} if passed
+        the name of a file on which a server is already listening.
+        """
+        filename = self.mktemp()
+        serverFactory = MyServerFactory()
+        unixPort = reactor.listenUNIX(filename, serverFactory, wantPID=True)
+
+        self.assertRaises(
+            error.CannotListenError,
+            reactor.listenUNIX, filename, serverFactory, wantPID=True)
+
+        def stoppedListening(ign):
+            unixPort = reactor.listenUNIX(filename, serverFactory, wantPID=True)
+            return unixPort.stopListening()
+
+        return unixPort.stopListening().addCallback(stoppedListening)
+
+
+    def _uncleanSocketTest(self, callback):
+        self.filename = self.mktemp()
+        source = ("from twisted.internet import protocol, reactor\n"
+                  "reactor.listenUNIX(%r, protocol.ServerFactory(), wantPID=True)\n") % (self.filename,)
+        env = {'PYTHONPATH': os.pathsep.join(sys.path)}
+
+        d = utils.getProcessValue(sys.executable, ("-u", "-c", source), env=env)
+        d.addCallback(callback)
+        return d
+
+
+    def test_uncleanServerSocketLocking(self):
+        """
+        If passed C{True} for the C{wantPID} parameter, a server can be started
+        listening with L{IReactorUNIX.listenUNIX} when passed the name of a
+        file on which a previous server which has not exited cleanly has been
+        listening using the C{wantPID} option.
+        """
+        def ranStupidChild(ign):
+            # If this next call succeeds, our lock handling is correct.
+            p = reactor.listenUNIX(self.filename, MyServerFactory(), wantPID=True)
+            return p.stopListening()
+        return self._uncleanSocketTest(ranStupidChild)
+
+
+    def test_connectToUncleanServer(self):
+        """
+        If passed C{True} for the C{checkPID} parameter, a client connection
+        attempt made with L{IReactorUNIX.connectUNIX} fails with
+        L{error.BadFileError}.
+        """
+        def ranStupidChild(ign):
+            d = defer.Deferred()
+            f = FailedConnectionClientFactory(d)
+            c = reactor.connectUNIX(self.filename, f, checkPID=True)
+            return self.assertFailure(d, error.BadFileError)
+        return self._uncleanSocketTest(ranStupidChild)
+
+
+    def _reprTest(self, serverFactory, factoryName):
+        """
+        Test the C{__str__} and C{__repr__} implementations of a UNIX port when
+        used with the given factory.
+        """
+        filename = self.mktemp()
+        unixPort = reactor.listenUNIX(filename, serverFactory)
+
+        connectedString = "<%s on %r>" % (factoryName, filename)
+        self.assertEqual(repr(unixPort), connectedString)
+        self.assertEqual(str(unixPort), connectedString)
+
+        d = defer.maybeDeferred(unixPort.stopListening)
+        def stoppedListening(ign):
+            unconnectedString = "<%s (not listening)>" % (factoryName,)
+            self.assertEqual(repr(unixPort), unconnectedString)
+            self.assertEqual(str(unixPort), unconnectedString)
+        d.addCallback(stoppedListening)
+        return d
+
+
+    def test_reprWithClassicFactory(self):
+        """
+        The two string representations of the L{IListeningPort} returned by
+        L{IReactorUNIX.listenUNIX} contains the name of the classic factory
+        class being used and the filename on which the port is listening or
+        indicates that the port is not listening.
+        """
+        class ClassicFactory:
+            def doStart(self):
+                pass
+
+            def doStop(self):
+                pass
+
+        # Sanity check
+        self.assertIsInstance(ClassicFactory, types.ClassType)
+
+        return self._reprTest(
+            ClassicFactory(), "twisted.test.test_unix.ClassicFactory")
+
+
+    def test_reprWithNewStyleFactory(self):
+        """
+        The two string representations of the L{IListeningPort} returned by
+        L{IReactorUNIX.listenUNIX} contains the name of the new-style factory
+        class being used and the filename on which the port is listening or
+        indicates that the port is not listening.
+        """
+        class NewStyleFactory(object):
+            def doStart(self):
+                pass
+
+            def doStop(self):
+                pass
+
+        # Sanity check
+        self.assertIsInstance(NewStyleFactory, type)
+
+        return self._reprTest(
+            NewStyleFactory(), "twisted.test.test_unix.NewStyleFactory")
+
+
+
+class ClientProto(protocol.ConnectedDatagramProtocol):
+    started = stopped = False
+    gotback = None
+
+    def __init__(self):
+        self.deferredStarted = defer.Deferred()
+        self.deferredGotBack = defer.Deferred()
+
+    def stopProtocol(self):
+        self.stopped = True
+
+    def startProtocol(self):
+        self.started = True
+        self.deferredStarted.callback(None)
+
+    def datagramReceived(self, data):
+        self.gotback = data
+        self.deferredGotBack.callback(None)
+
+class ServerProto(protocol.DatagramProtocol):
+    started = stopped = False
+    gotwhat = gotfrom = None
+
+    def __init__(self):
+        self.deferredStarted = defer.Deferred()
+        self.deferredGotWhat = defer.Deferred()
+
+    def stopProtocol(self):
+        self.stopped = True
+
+    def startProtocol(self):
+        self.started = True
+        self.deferredStarted.callback(None)
+
+    def datagramReceived(self, data, addr):
+        self.gotfrom = addr
+        self.transport.write("hi back", addr)
+        self.gotwhat = data
+        self.deferredGotWhat.callback(None)
+
+
+
+class DatagramUnixSocketTestCase(unittest.TestCase):
+    """
+    Test datagram UNIX sockets.
+    """
+    def test_exchange(self):
+        """
+        Test that a datagram can be sent to and received by a server and vice
+        versa.
+        """
+        clientaddr = self.mktemp()
+        serveraddr = self.mktemp()
+        sp = ServerProto()
+        cp = ClientProto()
+        s = reactor.listenUNIXDatagram(serveraddr, sp)
+        self.addCleanup(s.stopListening)
+        c = reactor.connectUNIXDatagram(serveraddr, cp, bindAddress=clientaddr)
+        self.addCleanup(c.stopListening)
+
+        d = defer.gatherResults([sp.deferredStarted, cp.deferredStarted])
+        def write(ignored):
+            cp.transport.write("hi")
+            return defer.gatherResults([sp.deferredGotWhat,
+                                        cp.deferredGotBack])
+
+        def _cbTestExchange(ignored):
+            self.assertEqual("hi", sp.gotwhat)
+            self.assertEqual(clientaddr, sp.gotfrom)
+            self.assertEqual("hi back", cp.gotback)
+
+        d.addCallback(write)
+        d.addCallback(_cbTestExchange)
+        return d
+
+
+    def test_cannotListen(self):
+        """
+        L{IReactorUNIXDatagram.listenUNIXDatagram} raises
+        L{error.CannotListenError} if the unix socket specified is already in
+        use.
+        """
+        addr = self.mktemp()
+        p = ServerProto()
+        s = reactor.listenUNIXDatagram(addr, p)
+        self.failUnlessRaises(error.CannotListenError, reactor.listenUNIXDatagram, addr, p)
+        s.stopListening()
+        os.unlink(addr)
+
+    # test connecting to bound and connected (somewhere else) address
+
+    def _reprTest(self, serverProto, protocolName):
+        """
+        Test the C{__str__} and C{__repr__} implementations of a UNIX datagram
+        port when used with the given protocol.
+        """
+        filename = self.mktemp()
+        unixPort = reactor.listenUNIXDatagram(filename, serverProto)
+
+        connectedString = "<%s on %r>" % (protocolName, filename)
+        self.assertEqual(repr(unixPort), connectedString)
+        self.assertEqual(str(unixPort), connectedString)
+
+        stopDeferred = defer.maybeDeferred(unixPort.stopListening)
+        def stoppedListening(ign):
+            unconnectedString = "<%s (not listening)>" % (protocolName,)
+            self.assertEqual(repr(unixPort), unconnectedString)
+            self.assertEqual(str(unixPort), unconnectedString)
+        stopDeferred.addCallback(stoppedListening)
+        return stopDeferred
+
+
+    def test_reprWithClassicProtocol(self):
+        """
+        The two string representations of the L{IListeningPort} returned by
+        L{IReactorUNIXDatagram.listenUNIXDatagram} contains the name of the
+        classic protocol class being used and the filename on which the port is
+        listening or indicates that the port is not listening.
+        """
+        class ClassicProtocol:
+            def makeConnection(self, transport):
+                pass
+
+            def doStop(self):
+                pass
+
+        # Sanity check
+        self.assertIsInstance(ClassicProtocol, types.ClassType)
+
+        return self._reprTest(
+            ClassicProtocol(), "twisted.test.test_unix.ClassicProtocol")
+
+
+    def test_reprWithNewStyleProtocol(self):
+        """
+        The two string representations of the L{IListeningPort} returned by
+        L{IReactorUNIXDatagram.listenUNIXDatagram} contains the name of the
+        new-style protocol class being used and the filename on which the port
+        is listening or indicates that the port is not listening.
+        """
+        class NewStyleProtocol(object):
+            def makeConnection(self, transport):
+                pass
+
+            def doStop(self):
+                pass
+
+        # Sanity check
+        self.assertIsInstance(NewStyleProtocol, type)
+
+        return self._reprTest(
+            NewStyleProtocol(), "twisted.test.test_unix.NewStyleProtocol")
+
+
+
+if not interfaces.IReactorUNIX(reactor, None):
+    UnixSocketTestCase.skip = "This reactor does not support UNIX domain sockets"
+if not interfaces.IReactorUNIXDatagram(reactor, None):
+    DatagramUnixSocketTestCase.skip = "This reactor does not support UNIX datagram sockets"
diff --git a/ThirdParty/Twisted/twisted/test/test_usage.py b/ThirdParty/Twisted/twisted/test/test_usage.py
new file mode 100644
index 0000000..5a20f01
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/test_usage.py
@@ -0,0 +1,584 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.python.usage}, a command line option parsing library.
+"""
+
+from twisted.trial import unittest
+from twisted.python import usage
+
+
+class WellBehaved(usage.Options):
+    optParameters = [['long', 'w', 'default', 'and a docstring'],
+                     ['another', 'n', 'no docstring'],
+                     ['longonly', None, 'noshort'],
+                     ['shortless', None, 'except',
+                      'this one got docstring'],
+                  ]
+    optFlags = [['aflag', 'f',
+                 """
+
+                 flagallicious docstringness for this here
+
+                 """],
+                ['flout', 'o'],
+                ]
+
+    def opt_myflag(self):
+        self.opts['myflag'] = "PONY!"
+
+
+    def opt_myparam(self, value):
+        self.opts['myparam'] = "%s WITH A PONY!" % (value,)
+
+
+
+class ParseCorrectnessTest(unittest.TestCase):
+    """
+    Test Options.parseArgs for correct values under good conditions.
+    """
+    def setUp(self):
+        """
+        Instantiate and parseOptions a well-behaved Options class.
+        """
+
+        self.niceArgV = ("--long Alpha -n Beta "
+                         "--shortless Gamma -f --myflag "
+                         "--myparam Tofu").split()
+
+        self.nice = WellBehaved()
+
+        self.nice.parseOptions(self.niceArgV)
+
+    def test_checkParameters(self):
+        """
+        Checking that parameters have correct values.
+        """
+        self.assertEqual(self.nice.opts['long'], "Alpha")
+        self.assertEqual(self.nice.opts['another'], "Beta")
+        self.assertEqual(self.nice.opts['longonly'], "noshort")
+        self.assertEqual(self.nice.opts['shortless'], "Gamma")
+
+    def test_checkFlags(self):
+        """
+        Checking that flags have correct values.
+        """
+        self.assertEqual(self.nice.opts['aflag'], 1)
+        self.assertEqual(self.nice.opts['flout'], 0)
+
+    def test_checkCustoms(self):
+        """
+        Checking that custom flags and parameters have correct values.
+        """
+        self.assertEqual(self.nice.opts['myflag'], "PONY!")
+        self.assertEqual(self.nice.opts['myparam'], "Tofu WITH A PONY!")
+
+
+
+class TypedOptions(usage.Options):
+    optParameters = [
+        ['fooint', None, 392, 'Foo int', int],
+        ['foofloat', None, 4.23, 'Foo float', float],
+        ['eggint', None, None, 'Egg int without default', int],
+        ['eggfloat', None, None, 'Egg float without default', float],
+    ]
+
+    def opt_under_score(self, value):
+        """
+        This option has an underscore in its name to exercise the _ to -
+        translation.
+        """
+        self.underscoreValue = value
+    opt_u = opt_under_score
+
+
+
+class TypedTestCase(unittest.TestCase):
+    """
+    Test Options.parseArgs for options with forced types.
+    """
+    def setUp(self):
+        self.usage = TypedOptions()
+
+    def test_defaultValues(self):
+        """
+        Test parsing of default values.
+        """
+        argV = []
+        self.usage.parseOptions(argV)
+        self.assertEqual(self.usage.opts['fooint'], 392)
+        self.assert_(isinstance(self.usage.opts['fooint'], int))
+        self.assertEqual(self.usage.opts['foofloat'], 4.23)
+        self.assert_(isinstance(self.usage.opts['foofloat'], float))
+        self.assertEqual(self.usage.opts['eggint'], None)
+        self.assertEqual(self.usage.opts['eggfloat'], None)
+
+
+    def test_parsingValues(self):
+        """
+        Test basic parsing of int and float values.
+        """
+        argV = ("--fooint 912 --foofloat -823.1 "
+                "--eggint 32 --eggfloat 21").split()
+        self.usage.parseOptions(argV)
+        self.assertEqual(self.usage.opts['fooint'], 912)
+        self.assert_(isinstance(self.usage.opts['fooint'], int))
+        self.assertEqual(self.usage.opts['foofloat'], -823.1)
+        self.assert_(isinstance(self.usage.opts['foofloat'], float))
+        self.assertEqual(self.usage.opts['eggint'], 32)
+        self.assert_(isinstance(self.usage.opts['eggint'], int))
+        self.assertEqual(self.usage.opts['eggfloat'], 21.)
+        self.assert_(isinstance(self.usage.opts['eggfloat'], float))
+
+
+    def test_underscoreOption(self):
+        """
+        A dash in an option name is translated to an underscore before being
+        dispatched to a handler.
+        """
+        self.usage.parseOptions(['--under-score', 'foo'])
+        self.assertEqual(self.usage.underscoreValue, 'foo')
+
+
+    def test_underscoreOptionAlias(self):
+        """
+        An option name with a dash in it can have an alias.
+        """
+        self.usage.parseOptions(['-u', 'bar'])
+        self.assertEqual(self.usage.underscoreValue, 'bar')
+
+
+    def test_invalidValues(self):
+        """
+        Check that passing wrong values raises an error.
+        """
+        argV = "--fooint egg".split()
+        self.assertRaises(usage.UsageError, self.usage.parseOptions, argV)
+
+
+
+class WrongTypedOptions(usage.Options):
+    optParameters = [
+        ['barwrong', None, None, 'Bar with wrong coerce', 'he']
+    ]
+
+
+class WeirdCallableOptions(usage.Options):
+    def _bar(value):
+        raise RuntimeError("Ouch")
+    def _foo(value):
+        raise ValueError("Yay")
+    optParameters = [
+        ['barwrong', None, None, 'Bar with strange callable', _bar],
+        ['foowrong', None, None, 'Foo with strange callable', _foo]
+    ]
+
+
+class WrongTypedTestCase(unittest.TestCase):
+    """
+    Test Options.parseArgs for wrong coerce options.
+    """
+    def test_nonCallable(self):
+        """
+        Check that using a non callable type fails.
+        """
+        us =  WrongTypedOptions()
+        argV = "--barwrong egg".split()
+        self.assertRaises(TypeError, us.parseOptions, argV)
+
+    def test_notCalledInDefault(self):
+        """
+        Test that the coerce functions are not called if no values are
+        provided.
+        """
+        us = WeirdCallableOptions()
+        argV = []
+        us.parseOptions(argV)
+
+    def test_weirdCallable(self):
+        """
+        Test what happens when coerce functions raise errors.
+        """
+        us = WeirdCallableOptions()
+        argV = "--foowrong blah".split()
+        # ValueError is swallowed as UsageError
+        e = self.assertRaises(usage.UsageError, us.parseOptions, argV)
+        self.assertEqual(str(e), "Parameter type enforcement failed: Yay")
+
+        us = WeirdCallableOptions()
+        argV = "--barwrong blah".split()
+        # RuntimeError is not swallowed
+        self.assertRaises(RuntimeError, us.parseOptions, argV)
+
+
+class OutputTest(unittest.TestCase):
+    def test_uppercasing(self):
+        """
+        Error output case adjustment does not mangle options
+        """
+        opt = WellBehaved()
+        e = self.assertRaises(usage.UsageError,
+                              opt.parseOptions, ['-Z'])
+        self.assertEqual(str(e), 'option -Z not recognized')
+
+
+class InquisitionOptions(usage.Options):
+    optFlags = [
+        ('expect', 'e'),
+        ]
+    optParameters = [
+        ('torture-device', 't',
+         'comfy-chair',
+         'set preferred torture device'),
+        ]
+
+
+class HolyQuestOptions(usage.Options):
+    optFlags = [('horseback', 'h',
+                 'use a horse'),
+                ('for-grail', 'g'),
+                ]
+
+
+class SubCommandOptions(usage.Options):
+    optFlags = [('europian-swallow', None,
+                 'set default swallow type to Europian'),
+                ]
+    subCommands = [
+        ('inquisition', 'inquest', InquisitionOptions,
+            'Perform an inquisition'),
+        ('holyquest', 'quest', HolyQuestOptions,
+            'Embark upon a holy quest'),
+        ]
+
+
+class SubCommandTest(unittest.TestCase):
+
+    def test_simpleSubcommand(self):
+        o = SubCommandOptions()
+        o.parseOptions(['--europian-swallow', 'inquisition'])
+        self.assertEqual(o['europian-swallow'], True)
+        self.assertEqual(o.subCommand, 'inquisition')
+        self.failUnless(isinstance(o.subOptions, InquisitionOptions))
+        self.assertEqual(o.subOptions['expect'], False)
+        self.assertEqual(o.subOptions['torture-device'], 'comfy-chair')
+
+    def test_subcommandWithFlagsAndOptions(self):
+        o = SubCommandOptions()
+        o.parseOptions(['inquisition', '--expect', '--torture-device=feather'])
+        self.assertEqual(o['europian-swallow'], False)
+        self.assertEqual(o.subCommand, 'inquisition')
+        self.failUnless(isinstance(o.subOptions, InquisitionOptions))
+        self.assertEqual(o.subOptions['expect'], True)
+        self.assertEqual(o.subOptions['torture-device'], 'feather')
+
+    def test_subcommandAliasWithFlagsAndOptions(self):
+        o = SubCommandOptions()
+        o.parseOptions(['inquest', '--expect', '--torture-device=feather'])
+        self.assertEqual(o['europian-swallow'], False)
+        self.assertEqual(o.subCommand, 'inquisition')
+        self.failUnless(isinstance(o.subOptions, InquisitionOptions))
+        self.assertEqual(o.subOptions['expect'], True)
+        self.assertEqual(o.subOptions['torture-device'], 'feather')
+
+    def test_anotherSubcommandWithFlagsAndOptions(self):
+        o = SubCommandOptions()
+        o.parseOptions(['holyquest', '--for-grail'])
+        self.assertEqual(o['europian-swallow'], False)
+        self.assertEqual(o.subCommand, 'holyquest')
+        self.failUnless(isinstance(o.subOptions, HolyQuestOptions))
+        self.assertEqual(o.subOptions['horseback'], False)
+        self.assertEqual(o.subOptions['for-grail'], True)
+
+    def test_noSubcommand(self):
+        o = SubCommandOptions()
+        o.parseOptions(['--europian-swallow'])
+        self.assertEqual(o['europian-swallow'], True)
+        self.assertEqual(o.subCommand, None)
+        self.failIf(hasattr(o, 'subOptions'))
+
+    def test_defaultSubcommand(self):
+        o = SubCommandOptions()
+        o.defaultSubCommand = 'inquest'
+        o.parseOptions(['--europian-swallow'])
+        self.assertEqual(o['europian-swallow'], True)
+        self.assertEqual(o.subCommand, 'inquisition')
+        self.failUnless(isinstance(o.subOptions, InquisitionOptions))
+        self.assertEqual(o.subOptions['expect'], False)
+        self.assertEqual(o.subOptions['torture-device'], 'comfy-chair')
+
+    def test_subCommandParseOptionsHasParent(self):
+        class SubOpt(usage.Options):
+            def parseOptions(self, *a, **kw):
+                self.sawParent = self.parent
+                usage.Options.parseOptions(self, *a, **kw)
+        class Opt(usage.Options):
+            subCommands = [
+                ('foo', 'f', SubOpt, 'bar'),
+                ]
+        o = Opt()
+        o.parseOptions(['foo'])
+        self.failUnless(hasattr(o.subOptions, 'sawParent'))
+        self.assertEqual(o.subOptions.sawParent , o)
+
+    def test_subCommandInTwoPlaces(self):
+        """
+        The .parent pointer is correct even when the same Options class is
+        used twice.
+        """
+        class SubOpt(usage.Options):
+            pass
+        class OptFoo(usage.Options):
+            subCommands = [
+                ('foo', 'f', SubOpt, 'quux'),
+                ]
+        class OptBar(usage.Options):
+            subCommands = [
+                ('bar', 'b', SubOpt, 'quux'),
+                ]
+        oFoo = OptFoo()
+        oFoo.parseOptions(['foo'])
+        oBar=OptBar()
+        oBar.parseOptions(['bar'])
+        self.failUnless(hasattr(oFoo.subOptions, 'parent'))
+        self.failUnless(hasattr(oBar.subOptions, 'parent'))
+        self.failUnlessIdentical(oFoo.subOptions.parent, oFoo)
+        self.failUnlessIdentical(oBar.subOptions.parent, oBar)
+
+
+class HelpStringTest(unittest.TestCase):
+    def setUp(self):
+        """
+        Instantiate a well-behaved Options class.
+        """
+
+        self.niceArgV = ("--long Alpha -n Beta "
+                         "--shortless Gamma -f --myflag "
+                         "--myparam Tofu").split()
+
+        self.nice = WellBehaved()
+
+    def test_noGoBoom(self):
+        """
+        __str__ shouldn't go boom.
+        """
+        try:
+            self.nice.__str__()
+        except Exception, e:
+            self.fail(e)
+
+    def test_whitespaceStripFlagsAndParameters(self):
+        """
+        Extra whitespace in flag and parameters docs is stripped.
+        """
+        # We test this by making sure aflag and it's help string are on the
+        # same line.
+        lines = [s for s in str(self.nice).splitlines() if s.find("aflag")>=0]
+        self.failUnless(len(lines) > 0)
+        self.failUnless(lines[0].find("flagallicious") >= 0)
+
+
+class PortCoerceTestCase(unittest.TestCase):
+    """
+    Test the behavior of L{usage.portCoerce}.
+    """
+    def test_validCoerce(self):
+        """
+        Test the answers with valid input.
+        """
+        self.assertEqual(0, usage.portCoerce("0"))
+        self.assertEqual(3210, usage.portCoerce("3210"))
+        self.assertEqual(65535, usage.portCoerce("65535"))
+
+    def test_errorCoerce(self):
+        """
+        Test error path.
+        """
+        self.assertRaises(ValueError, usage.portCoerce, "")
+        self.assertRaises(ValueError, usage.portCoerce, "-21")
+        self.assertRaises(ValueError, usage.portCoerce, "212189")
+        self.assertRaises(ValueError, usage.portCoerce, "foo")
+
+
+
+class ZshCompleterTestCase(unittest.TestCase):
+    """
+    Test the behavior of the various L{twisted.usage.Completer} classes
+    for producing output usable by zsh tab-completion system.
+    """
+    def test_completer(self):
+        """
+        Completer produces zsh shell-code that produces no completion matches.
+        """
+        c = usage.Completer()
+        got = c._shellCode('some-option', usage._ZSH)
+        self.assertEqual(got, ':some-option:')
+
+        c = usage.Completer(descr='some action', repeat=True)
+        got = c._shellCode('some-option', usage._ZSH)
+        self.assertEqual(got, '*:some action:')
+
+
+    def test_files(self):
+        """
+        CompleteFiles produces zsh shell-code that completes file names
+        according to a glob.
+        """
+        c = usage.CompleteFiles()
+        got = c._shellCode('some-option', usage._ZSH)
+        self.assertEqual(got, ':some-option (*):_files -g "*"')
+
+        c = usage.CompleteFiles('*.py')
+        got = c._shellCode('some-option', usage._ZSH)
+        self.assertEqual(got, ':some-option (*.py):_files -g "*.py"')
+
+        c = usage.CompleteFiles('*.py', descr="some action", repeat=True)
+        got = c._shellCode('some-option', usage._ZSH)
+        self.assertEqual(got, '*:some action (*.py):_files -g "*.py"')
+
+
+    def test_dirs(self):
+        """
+        CompleteDirs produces zsh shell-code that completes directory names.
+        """
+        c = usage.CompleteDirs()
+        got = c._shellCode('some-option', usage._ZSH)
+        self.assertEqual(got, ':some-option:_directories')
+
+        c = usage.CompleteDirs(descr="some action", repeat=True)
+        got = c._shellCode('some-option', usage._ZSH)
+        self.assertEqual(got, '*:some action:_directories')
+
+
+    def test_list(self):
+        """
+        CompleteList produces zsh shell-code that completes words from a fixed
+        list of possibilities.
+        """
+        c = usage.CompleteList('ABC')
+        got = c._shellCode('some-option', usage._ZSH)
+        self.assertEqual(got, ':some-option:(A B C)')
+
+        c = usage.CompleteList(['1', '2', '3'])
+        got = c._shellCode('some-option', usage._ZSH)
+        self.assertEqual(got, ':some-option:(1 2 3)')
+
+        c = usage.CompleteList(['1', '2', '3'], descr='some action',
+                               repeat=True)
+        got = c._shellCode('some-option', usage._ZSH)
+        self.assertEqual(got, '*:some action:(1 2 3)')
+
+
+    def test_multiList(self):
+        """
+        CompleteMultiList produces zsh shell-code that completes multiple
+        comma-separated words from a fixed list of possibilities.
+        """
+        c = usage.CompleteMultiList('ABC')
+        got = c._shellCode('some-option', usage._ZSH)
+        self.assertEqual(got, ':some-option:_values -s , \'some-option\' A B C')
+
+        c = usage.CompleteMultiList(['1','2','3'])
+        got = c._shellCode('some-option', usage._ZSH)
+        self.assertEqual(got, ':some-option:_values -s , \'some-option\' 1 2 3')
+
+        c = usage.CompleteMultiList(['1','2','3'], descr='some action',
+                                    repeat=True)
+        got = c._shellCode('some-option', usage._ZSH)
+        expected = '*:some action:_values -s , \'some action\' 1 2 3'
+        self.assertEqual(got, expected)
+
+
+    def test_usernames(self):
+        """
+        CompleteUsernames produces zsh shell-code that completes system
+        usernames.
+        """
+        c = usage.CompleteUsernames()
+        out = c._shellCode('some-option', usage._ZSH)
+        self.assertEqual(out, ':some-option:_users')
+
+        c = usage.CompleteUsernames(descr='some action', repeat=True)
+        out = c._shellCode('some-option', usage._ZSH)
+        self.assertEqual(out, '*:some action:_users')
+
+
+    def test_groups(self):
+        """
+        CompleteGroups produces zsh shell-code that completes system group
+        names.
+        """
+        c = usage.CompleteGroups()
+        out = c._shellCode('some-option', usage._ZSH)
+        self.assertEqual(out, ':group:_groups')
+
+        c = usage.CompleteGroups(descr='some action', repeat=True)
+        out = c._shellCode('some-option', usage._ZSH)
+        self.assertEqual(out, '*:some action:_groups')
+
+
+    def test_hostnames(self):
+        """
+        CompleteHostnames produces zsh shell-code that completes hostnames.
+        """
+        c = usage.CompleteHostnames()
+        out = c._shellCode('some-option', usage._ZSH)
+        self.assertEqual(out, ':some-option:_hosts')
+
+        c = usage.CompleteHostnames(descr='some action', repeat=True)
+        out = c._shellCode('some-option', usage._ZSH)
+        self.assertEqual(out, '*:some action:_hosts')
+
+
+    def test_userAtHost(self):
+        """
+        CompleteUserAtHost produces zsh shell-code that completes hostnames or
+        a word of the form <username>@<hostname>.
+        """
+        c = usage.CompleteUserAtHost()
+        out = c._shellCode('some-option', usage._ZSH)
+        self.assertTrue(out.startswith(':host | user at host:'))
+
+        c = usage.CompleteUserAtHost(descr='some action', repeat=True)
+        out = c._shellCode('some-option', usage._ZSH)
+        self.assertTrue(out.startswith('*:some action:'))
+
+
+    def test_netInterfaces(self):
+        """
+        CompleteNetInterfaces produces zsh shell-code that completes system
+        network interface names.
+        """
+        c = usage.CompleteNetInterfaces()
+        out = c._shellCode('some-option', usage._ZSH)
+        self.assertEqual(out, ':some-option:_net_interfaces')
+
+        c = usage.CompleteNetInterfaces(descr='some action', repeat=True)
+        out = c._shellCode('some-option', usage._ZSH)
+        self.assertEqual(out, '*:some action:_net_interfaces')
+
+
+
+class CompleterNotImplementedTestCase(unittest.TestCase):
+    """
+    Using an unknown shell constant with the various Completer() classes
+    should raise NotImplementedError
+    """
+    def test_unknownShell(self):
+        """
+        Using an unknown shellType should raise NotImplementedError
+        """
+        classes = [usage.Completer, usage.CompleteFiles,
+                   usage.CompleteDirs, usage.CompleteList,
+                   usage.CompleteMultiList, usage.CompleteUsernames,
+                   usage.CompleteGroups, usage.CompleteHostnames,
+                   usage.CompleteUserAtHost, usage.CompleteNetInterfaces]
+
+        for cls in classes:
+            try:
+                action = cls()
+            except:
+                action = cls(None)
+            self.assertRaises(NotImplementedError, action._shellCode,
+                              None, "bad_shell_type")
diff --git a/ThirdParty/Twisted/twisted/test/testutils.py b/ThirdParty/Twisted/twisted/test/testutils.py
new file mode 100644
index 0000000..a310ea2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/test/testutils.py
@@ -0,0 +1,55 @@
+from cStringIO import StringIO
+from twisted.internet.protocol import FileWrapper
+
+class IOPump:
+    """Utility to pump data between clients and servers for protocol testing.
+
+    Perhaps this is a utility worthy of being in protocol.py?
+    """
+    def __init__(self, client, server, clientIO, serverIO):
+        self.client = client
+        self.server = server
+        self.clientIO = clientIO
+        self.serverIO = serverIO
+
+    def flush(self):
+        "Pump until there is no more input or output."
+        while self.pump():
+            pass
+
+    def pump(self):
+        """Move data back and forth.
+
+        Returns whether any data was moved.
+        """
+        self.clientIO.seek(0)
+        self.serverIO.seek(0)
+        cData = self.clientIO.read()
+        sData = self.serverIO.read()
+        self.clientIO.seek(0)
+        self.serverIO.seek(0)
+        self.clientIO.truncate()
+        self.serverIO.truncate()
+        for byte in cData:
+            self.server.dataReceived(byte)
+        for byte in sData:
+            self.client.dataReceived(byte)
+        if cData or sData:
+            return 1
+        else:
+            return 0
+
+
+def returnConnected(server, client):
+    """Take two Protocol instances and connect them.
+    """
+    cio = StringIO()
+    sio = StringIO()
+    client.makeConnection(FileWrapper(cio))
+    server.makeConnection(FileWrapper(sio))
+    pump = IOPump(client, server, cio, sio)
+    # Challenge-response authentication:
+    pump.flush()
+    # Uh...
+    pump.flush()
+    return pump
diff --git a/ThirdParty/Twisted/twisted/topfiles/CREDITS b/ThirdParty/Twisted/twisted/topfiles/CREDITS
new file mode 100644
index 0000000..a4eeece
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/topfiles/CREDITS
@@ -0,0 +1,60 @@
+The Matrix
+
+- Glyph "Glyph" Lefkowitz <glyph at twistedmatrix.com>
+  electric violin
+- Sean "Riley" Riley <sean at twistedmatrix.com>
+  grand piano
+- Allen "Dash" Short <washort at twistedmatrix.com>
+  vocals and guitar
+- Christopher "Radix" Armstrong <radix at twistedmatrix.com>
+  percussion
+- Paul "z3p" Swartz <z3p at twistedmatrix.com>
+  oboe
+- J�rgen "snibril" Hermann <jh at twistedmatrix.com>
+  synthesizer
+- Moshe "vertical" Zadka <moshez at twistedmatrix.com>
+  accordion
+- Benjamin Bruheim <grolgh at online.no>
+  kazoo
+- Travis B. "Nafai" Hartwell <nafai at twistedmatrix.com>
+  keyboards
+- Itamar "itamar" Shtull-Trauring <twisted at itamarst.org>
+  alto recorder
+- Andrew "spiv" Bennetts <andrew at puzzling.org>
+  glockenspiel
+- Kevin "Acapnotic" Turner <acapnotic at twistedmatrix.com>
+  trombone
+- Donovan "fzZzy" Preston <dp at twistedmatrix.com>
+  bass and harmonium
+- Jp "exarkun" Calderone <exarkun at twistedmatrix.com>
+  geopolitical sociographic dissonance engine
+- Gavin "skreech" Cooper <coop at coopweb.org> 
+  torque wrench
+- Jonathan "jml" Lange <jml at twistedmatrix.com>
+  pipe organ
+- Bob "etrepum" Ippolito <bob at redivi.com>
+  low frequency oscillator
+- Pavel "PenguinOfDoom" Pergamenshchik <ppergame at gmail.com>
+  electronic balalaika
+- Jonathan D. "slyphon" Simms <slyphon at twistedmatrix.com>
+  theramin and drums
+- Brian "warner" Warner <warner at twistedmatrix.com>
+  hertzian field renderer
+- Mary Gardiner <mary-twisted at puzzling.org>
+  krummhorn
+- Eric "teratorn" Mangold <teratorn at twistedmatrix.com>
+  serpentine bassoon
+- Tommi "Tv" Virtanen <tv at twistedmatrix.com>
+  didgeridoo
+- Justin "justinj" Johnson <justinj at twistedmatrix.com>
+  bass mandolin
+- Ralph "ralphm" Meijer <twisted at ralphm.ik.nu>
+  vocals and timbales
+- David "dreid" Reid <dreid at dreid.org>
+  banjo
+
+Extras
+
+- Jerry Hebert <jerry at cynics.org>
+- Nick Moffit <nick at zork.org>
+- Jeremy Fincher
diff --git a/ThirdParty/Twisted/twisted/topfiles/ChangeLog.Old b/ThirdParty/Twisted/twisted/topfiles/ChangeLog.Old
new file mode 100644
index 0000000..30594b2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/topfiles/ChangeLog.Old
@@ -0,0 +1,3888 @@
+2005-03-12  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/scripts/mktap.py, twisted/scripts/twistd.py,
+	twisted/application/app.py: Changed UID and GID defaults for Process
+	to None.  Changed mktap behavior to not specify UID and GID if they
+	are not given on the command line.  Changed application startup to
+	not change UID or GID if they are not given.  Changed twistd to add
+	UID and GID setting command line arguments.
+
+2005-02-10  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/internet/defer.py: DeferredLock, DeferredSemaphore, and
+	DeferredQueue added.
+
+	* twisted/test/test_defer.py: Tests for above mentioned three new
+	classes.
+
+2004-11-27  Brian Warner  <warner at lothar.com>
+
+	* util.py (SignalStateManager.save): don't save signal handlers
+	for SIGKILL and SIGSTOP, since we can't set them anyway.
+	Python2.4c1 raises an error when you try.
+
+2004-11-07  Brian Warner  <warner at lothar.com>
+
+	* twisted/test/test_internet.py: correctly check for SSL support.
+	Improve timeout for testCallLater and testGetDelayedCalls to avoid
+	spurious failures on slow test systems. Close sockets in
+	PortStringification to fix trial warnings.
+
+	* twisted/internet/ssl.py: add a comment describing the correct
+	way to import twisted.internet.ssl (since it might partially fail
+	if OpenSSL is not available)
+
+2004-11-06  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/trial/assertions.py: assertRaises/failUnlessRaises now 
+	returns the caught exception to allow tests to inspect the contents.
+
+2004-11-02  Brian Warner  <warner at lothar.com>
+
+	* loopback.py (loopbackTCP): use trial's spinWhile and spinUntil
+	primitives instead of doing reactor.iterate() ourselves. Make sure
+	to wait for everything before finishing.
+
+2004-10-26  Cory Dodt <corydodt at twistedmatrix.com>
+
+	* twisted/python/{which,process}.py,
+	  twisted/test/{test_wprocess,wprocess_for_testing}.py,
+	  twisted/internet/{default,error,wprocess,process}.py:  back out
+	  wprocess due to test failures in wprocess and new trial.  Resolves 
+	  issue 760.
+
+2004-10-24  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* TCP: Half-close of write and read for TCP connections, including
+	protocol notification for protocols that implement
+	IHalfCloseableProtocol.
+
+2004-10-07  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* Transports: Add a maximum to the number of bytes that will be
+	held in the write buffer even after they have been sent.  This
+	puts a maximum on the cost of writing faster than the network
+	can accommodate.
+
+2004-10-06  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* Transports: New TCP/SSL/etc. buffering algorithm. All writes are
+	now stored until next iteration before being written, and many
+	small writes are not expensive.
+
+2004-09-30  Brian Warner  <warner at lothar.com>
+
+	* glib2reactor.py: new reactor that uses just glib2, not gtk2.
+	This one doesn't require a DISPLAY, and cannot be used for GUI
+	apps.
+
+	* gtk2reactor.py: import gobject *after* pygtk.require, to make
+	sure we get the same versions of both
+
+2004-09-18  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted/internet/defer.py: Add deferredGenerator and
+	waitForDeferred. This lets you write kinda-sorta
+	synchronous-looking code that uses Deferreds. See the
+	waitForDeferred docstring.
+
+2004-09-11  Cory Dodt  <corydodt at twistedmatrix.com>
+
+	* twisted/python/{which,process}.py,
+	  twisted/test/{test_wprocess,wprocess_for_testing}.py,
+	  twisted/internet/{default,error,wprocess,process}.py: merge the
+	  "wprocess" branch which uses Trent Mick's process.py to enable
+	  spawnProcess in the default reactor on Windows
+
+2004-08-24  Brian Warner  <warner at lothar.com>
+
+	* twisted/application/internet.py (TimerService): make it possible
+	to restart a stopped TimerService. Threw out a lot of (apparently)
+	unnecessary code in the process. Make sure it gets pickled in a
+	not-running state too.
+	* twisted/test/test_application.py (TestInternet2.testTimer): test
+	the changes, and update the way the test peeks inside TimerService 
+
+2004-07-18  Paul Swartz <z3p at twistedmatrix.com>
+
+	* twisted/internet/utils.py: By passing errortoo=1, you can get 
+	stderr from getProcessOutput
+
+2004-07-18  Paul Swartz <z3p at twistedmatrix.com>
+
+	* twisted/conch/unix.py: if the utmp module is available, record
+	user logins/logouts into utmp/wtmp.
+
+2004-06-25  Paul Swartz  <z3p at twistedmatrix.com>
+	* twisted/conch/checkers.py: Use functionality of crypt module instead
+	of an external module.
+
+2004-06-25  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/spread/banana.py: Disabled automatic import and use of
+	cBanana.  PB will now use the pure-Python version of banana unless
+	cBanana is manually installed by the application.
+
+2004-06-12  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* twisted/conch/client: added -r flag to reconnect to the server if
+	the connection is lost (closes 623).
+
+2004-06-06  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/test/test_enterprise.py: test open callback and
+	connect/disconnect.
+
+	* twisted/enterprise/adbapi.py: add open callback support
+	and disconnect() method. Issue 480.
+
+2004-06-05  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/enterprise/adbapi.py: Don't log sql exceptions (issue 631).
+	Remove deprecated api.
+
+	* twisted/news/database.py: do not use adbapi.Augmentation
+
+2004-06-03  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/internet/gtk2reactor.py: The choice between glib event
+	loop and gtk+ event loop is determined by argument at reactor
+	install time.
+
+2004-05-31  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/enterprise/sqlreflector.py: don't use Augmentation
+
+	* twisted/enterprise/populate.sql: remove
+
+	* twisted/enterprise/schema.sql: remove
+
+	* twisted/enterprise/row.py: remove deprecated classes
+
+	* twisted/enterprise/dbgadgets.py: remove
+
+	* twisted/enterprise/dbcred.py: remove
+
+	* twisted/test/test_enterprise.py: Fix Firebird test case.
+
+2004-05-21  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/internet/gtk2reactor.py: use glib event loop directly
+	instead of gtk2's event loop if possible.
+
+2004-05-04  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted.news, twisted.protocols.nntp: Moved back into trunk
+	pending an alternate split-up strategy.
+
+2004-05-04  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted.internet.reactor.listenUDP: transport.write() on UDP
+	ports no longer supports unresolved hostnames (though deprecated
+	support still exists).
+
+2004-4-18  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted/lore/nevowlore.py, twisted/plugins.tml: Added Nevow
+	support for lore. See docstring of twisted.lore.nevowlore.
+	
+2004-4-18  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted.news, twisted.protocols.nntp: Moved into a third party
+	package. Deprecated backwards-compatibility exists by importing
+	from the third-party package if available.
+
+2004-4-11  Paul Swartz <z3p at twistedmatrix.com>
+
+	* twisted.conch: refactored the Conch client to separate connecting
+	to a server from user authentication from client-specific actions.
+
+2004-03-23  Andrew Bennetts <spiv at twistedmatrix.com>
+
+	* twisted.protocols.http: Small optimisation to HTTP implementation.
+	This changes return value of toChunk to a tuple of strings, rather
+	than one string.
+
+2004-4-3  Paul Swartz <z3p at twistedmatrix.com>
+
+	* twisted.python.lockfile: added lockfile support, based on
+	liblockfile.
+	* twisted.internet.unix.Port: added a wantPID kwarg.  If True, it
+	checks for and gets a lockfile for the UNIX socket.
+	* twisted.internet.unix.Connector: added a checkPID kwarg.  If True,
+        it checks that the lockfile for the socket is current.
+
+2004-03-23  Pavel Pergamenshchik  <pp64 at cornell.edu>
+
+	* twisted.internet.iocp: Support for Windows IO Completion Ports.
+	Use with "--reactor=iocp" parameter to twistd or trial.
+
+2004-03-20  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted.internet: getHost(), getPeer(), buildProtocol() etc.
+	all use address objects from twisted.internet.address.
+
+	* twisted/internet/udp.py: Connected UDP support is now part of
+	the standard listenUDP-resulting UDP transport using a connect()
+	method.
+
+2004-03-18  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/application/internet.py: Changed TimerService to
+	log errors from the function it calls.
+
+	* twisted/application/test_application.py: Added test case
+	for logging of exceptions from functions TimerService calls.
+
+2004-03-07  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.2.1alpha1.
+
+2004-03-03  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted/web/server.py: Fix UnsupportedMethod so that users'
+	allowedMethods are actually honored.
+
+	* twisted/web/resource.py: (Resource.render) If the resource has
+	an 'allowedMethods' attribute, pass it to UnsupportedMethod.
+
+2004-02-27  Andrew Bennetts  <spiv at twistedmatrix.com>
+
+	* twisted/internet/defer.py: Add consumeErrors flag to DeferredList.
+	This takes care of the most common use-case for the recently
+	deprecated addDeferred method.
+
+2004-02-28  Dave Peticolas <dave at krondo.com>
+
+	* setup.py: install tap2rpm as a bin script
+
+	* twisted/test/test_enterprise.py: Test Firebird db. Fix typos.
+
+2004-02-27  Andrew Bennetts  <spiv at twistedmatrix.com>
+
+	* twisted/internet/defer.py: Deprecated DeferredList.addDeferred.  It
+	isn't as useful as it looks, and can have surprising behaviour.
+
+2004-02-25  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted/protocols/dns.py: Fixed a bug in TCP support: It
+	wouldn't process any messages after the first, causing AXFR
+	queries to be totally broken (in addition to other problems in the
+	implementation of AXFR).
+
+	* twisted/names/client.py: Fixed the AXFR client (lookupZone),
+	thanks to DJB's wonderful documentation of the horribleness of
+	DNS.
+
+2004-02-25  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.2.0 final! Same as rc3.
+
+2004-02-24  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.2.0rc3 (same as rc2, with cBanana bug
+	fixed).
+
+2004-02-19  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* twisted/application/service.py (IService.disownServiceParent)
+	(IServiceCollection.removeService): These may return Deferred if they
+	have asynchronous side effects.
+
+2004-02-18  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.2.0rc2. Brown-paper bag release bug.
+
+2004-02-17  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.2.0rc1.
+
+2004-02-13  Brian Warner  <warner at lothar.com>
+
+	* doc/howto/faq.xhtml: add entry on transport.getPeer()
+
+2004-01-31  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.1.2alpha2 (problem with Debian packaging).
+
+2004-01-30  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.1.2alpha1.
+
+2004-01-23  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted/scripts/trial.py: trial now supports a --coverage
+	option, requiring Python 2.3.3. Give it a directory name (relative
+	to _trial_temp) to put code-coverage info in. It uses the stdlib
+	'trace' module.
+
+2004-01-21  Pavel Pergamenshchik  <pp64 at cornell.edu>
+
+	* twisted/protocols/stateful.py: A new way to write protocols!
+	Current state is encoded as a pair (func, len). As soon as len
+	of data arrives, func is called with that amount of data.  New
+	state is returned from func.
+	* twisted/test/test_stateful.py: Tests and an example, an
+	Int32StringReceiver implementation.
+
+2004-01-18  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted/web/resource.py: The default render method of Resource
+	now supports delegating to methods of the form "render_*" where
+	"*" is the HTTP method that was used to make the
+	request. Examples: request_GET, request_HEAD, request_CONNECT, and
+	so on. This won't break any existing code - when people want to
+	use the better API, they can stop overriding 'render' and instead
+	override individual render_* methods.
+
+2004-01-13  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/web/soap.py: Beginning of client SOAP support.
+
+2004-01-10  Andrew Bennetts <spiv at twistedmatrix.com>
+
+	* twisted/protocols/ftp.py: Added support for partial downloads
+	and uploads to FTPClient (see the offset parameter of retrieveFile).
+
+2004-01-09  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/protocols/imap4.py: Add IMessageCopier interface to allow
+	for optimized implementations of message copying.
+
+2004-01-06  Brian Warner  <warner at lothar.com>
+
+	* twisted/internet/default.py (PosixReactorBase.spawnProcess): add
+	a 'childFDs' argument which allows the child's file descriptors to
+	be arbitrarily mapped to parent FDs or pipes. This allows you to
+	set up additional pipes into the child (say for a GPG passphrase
+	or separate status information).
+
+	* twisted/internet/process.py (Process): add childFDs, split out
+	ProcessReader and ProcessWriter (so that Process itself is no
+	longer also reading stdout).
+
+	* twisted/internet/protocol.py (ProcessProtocol): add new
+	childDataReceived and childConnectionLost methods, which default
+	to invoking the old methods for backwards compatibility
+
+	* twisted/test/test_process.py (FDTest): add test for childFDs
+	mapping. Also add timeouts to most tests, and make all
+	reactor.iterate() loops wait 10ms between iterations to avoid
+	spamming the CPU quite so badly. Closes issue435.
+	* twisted/test/process_fds.py: new child process for FDTest
+
+	* doc/howto/process.xhtml: document childFDs argument, add example
+
+2004-01-04  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/internet/gladereactor.py: logs all network traffic for
+	TCP/SSL/Unix sockets, allowing traffic to be displayed.
+
+2004-01-04  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/test/test_enterprise.py: test deleting rows not in cache
+
+	* twisted/enterprise/reflector.py: deleted rows don't have to be
+	in cache
+
+	* doc/examples/row_example.py: use KeyFactory from row_util
+
+	* doc/examples/row_util.py: add KeyFactory
+
+2003-12-31  Brian Warner  <warner at lothar.com>
+
+	* twisted/internet/defer.py (Deferred.setTimeout): if the Deferred
+	has already been called, don't bother with the timeout. This
+	happens when trial.util.deferredResult is used with a timeout
+	argument and the Deferred was created by defer.succeed().
+	* twisted/test/test_defer.py
+	(DeferredTestCase.testImmediateSuccess2): test for same
+
+2003-12-31  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/protocols/ident.py: Client and server ident implementation
+	* twisted/test/test_ident.py: Test cases for ident protocol
+
+2003-12-29  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/spread/pb.py: Changed PBServerFactory to use "protocol"
+	instance attribute for Broker creation.
+
+2003-12-26  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/web/server.py: display of tracebacks on web pages can
+	now be disabled by setting displayTracebacks to False on the Site
+	or by using applicable tap option. Woven does not yet use
+	this attribute.
+
+2003-12-23  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/web/client.py: if Host header is passed, use that
+	instead of extracting from request URL.
+
+2003-12-14  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/test/test_enterprise.py: Frederico Di Gregorio's patch
+	adding a psycopg test case.
+
+2003-12-09  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.1.1, based on rc4.
+
+2003-12-06  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/internet/wxreactor.py: Added experimental wxPython reactor,
+	which seems to work better than the twisted.internet.wxsupport.
+
+2003-12-05  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* twisted/conch/ssh/filetransfer.py, session.py: added SFTPv3 support
+	to the Conch server.
+
+2003-12-04  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.1.1rc4, based on rc2. rc3 never happened!
+
+2003-12-04  Brian Warner  <warner at lothar.com>
+
+	* twisted/persisted/sob.py (Persistent): fix misspelled class name,
+	add compatibility binding to "Persistant" (sic).
+
+	* twisted/test/test_sob.py: use Persistent
+	* twisted/application/service.py (Application): use Persistent
+
+2003-12-03  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/protocols/imap4.py: Added support for the
+	IDLE command (RFC 2177).
+
+2003-12-03  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/python/log.py: Added exception handling to
+	log publishing code.  Observers which raise exceptions
+	will now be removed from the observer list.
+
+2003-12-02  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.1.1rc3.
+
+2003-12-01  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.1.1rc2 (from CVS HEAD).
+
+2003-12-01  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/python/runtime.py: Added seconds method to Platform
+	class.
+
+	* twisted/internet/base.py, twisted/internet/task.py: Changed
+	use of time.time() to use Platform.seconds() instead.
+
+2003-11-24  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/internet/abstract.py: Changed FileDescriptor's
+	registerProducer method to immediately call the given producer's
+	stopProducing method if the FileDescriptor is in the process of
+	or has finished disconnecting.
+
+2003-11-24  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/protocols/imap4.py: Fix incorrect behavior of closing the
+	mailbox in response to an EXPUNGE command.
+
+2003-11-21  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/trial/runner.py: Added missing calls to setUpClass and
+	tearDownClass in SingletonRunner.
+
+2003-11-21  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.1.1rc1.
+
+2003-11-20  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/protocols/imap4.py: Fixed incorrect generation of
+	INTERNALDATE information.
+
+2003-11-20  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/internet/abstract.py: Added an assert to
+	FileDescriptor.resumeProducing to prevent it from being
+	called when the transport is no longer connected.
+
+2003-11-20  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/internet/tasks.py: LoopingCall added.
+
+2003-10-14  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/internet/tasks.py: Deprecated scheduling API removed.
+
+2003-11-18  Jonathan Simms  <jonathan at embassynetworks.com>
+
+	* twisted/protocols/ftp.py: refactored to add cred support,
+	pipelining, security.
+	* twisted/test/test_ftp.py: tests for the new ftp
+
+2003-11-18  Sam Jordan  <sam at twistedmatrix.com>
+
+	* twisted/protocols/msn.py: support for MSNP8
+	* doc/examples/msn_example.py: small msn example
+
+2003-11-13  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* twisted/conch/ssh/agent.py: support for the OpenSSH agent protocol
+	* twisted/conch/ssh/connection.py: fix broken channel retrieval code
+	* twisted/conch/ssh/userauth.py: refactoring to allow use of the agent
+	* twisted/conch/ssj/transport.py: fix intermittent test failure
+	* twisted/internet/protocol.py: add UNIX socket support to
+	ClientCreator
+	* twisted/scripts/conch.py: use the key agent if available, also
+	agent forwarding
+
+2003-11-07  Brian Warner  <warner at lothar.com>
+
+	* twisted/application/app.py (getApplication): provide a more
+	constructive error message when a .tac file doesn't define
+	'application'. Closes issue387.
+
+2003-11-01  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* twisted/conch/ssh/common.py: use GMPy for faster math if it's
+	available
+
+2003-10-24  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.1.0 final. Same codebase as rc2.
+
+2003-10-24  Brian Warner  <warner at lothar.com>
+
+	* doc/howto/test-standard.xhtml: Add section on how to clean up.
+
+	* twisted/test/test_conch.py: improve post-test cleanup. Addresses
+	problems seen in issue343.
+
+	* twisted/internet/base.py (ReactorBase.callLater): prefix
+	"internal" parameter names with an underscore, to avoid colliding
+	with named parameters in the user's callback invocation. Closes
+	issue347.
+	(ReactorBase.addSystemEventTrigger)
+	(ReactorBase.callWhenRunning)
+	(ReactorBase.callInThread): same
+	* doc/howto/coding-standard.xhtml (Callback Arguments): explain why
+
+2003-10-22  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.1.0rc2.
+
+2003-10-21  Andrew Bennetts  <spiv at twistedmatrix.com>
+
+	* twisted/lore/tree.py, twisted/lore/lint.py,
+	doc/howto/stylesheet.css: add a plain 'listing' class, for file
+	listings that aren't python source or HTML.  This has slightly changed
+	the classes in the generated HTML, so custom stylesheets may need
+	updating.
+
+2003-10-16  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.1.0alpha3.
+
+2003-10-16  Brian Warner  <warner at lothar.com>
+
+	* doc/howto/pb-cred.xhtml: update for newcred. Closes issue172.
+
+2003-10-15  Brian Warner  <warner at lothar.com>
+
+	* twisted/internet/base.py: add optional debug code, enabled with
+	base.DelayedCall.debug=True . If active, the call stack which
+	invoked reactor.callLater will be recorded in each DelayedCall. If
+	an exception happens when the timer function is run, the creator
+	stack will be logged in addition to the usual log.deferr().
+
+	* twisted/internet/defer.py: add some optional debug code, enabled
+	with defer.Deferred.debug=True . If active, it will record a stack
+	trace when the Deferred is created, and another when it is first
+	invoked. AlreadyCalledErrors will be given these two stack traces,
+	making it slightly easier to find the source of the problem.
+
+2003-10-15  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.1.0alpha2 (alpha1 was dead in the water).
+
+2003-10-15  Brian Warner  <warner at lothar.com>
+
+	* setup.py: remove cReactor/ to the sandbox. Closes issue318.
+
+2003-10-14  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/web/static.py: registry no longer has support for
+	getting services based on their interfaces.
+
+2003-10-14  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.1.0alpha1.
+
+2003-10-13  Bob Ippolito  <bob at redivi.com>
+
+	* doc/howto/choosing-reactor.xhtml:
+	Added cfreactor/Cocoa information.
+
+	* doc/examples/cocoaDemo:
+	Removed, replaced by doc/examples/Cocoa cfreactor demos.
+
+	* doc/examples/Cocoa:
+	Moved from sandbox/etrepum/examples/PyObjC, cleaned up.
+
+	* twisted/internet/cfsupport, twisted/internet/cfreactor.py:
+	Moved from sandbox/etrepum, cleaned up.
+
+	* twisted/application/app.py:
+	Added 'cf' -> twisted.internet.cfreactor to reactorTypes
+
+	* setup.py:
+	sys.platform=='darwin' - build cfsupport, do not build cReactor.
+
+	* INSTALL:
+    Changed URL of pimp repository to shorter version.
+
+2003-10-12  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* bin/tktwistd, twisted/scripts/tktwistd.py, doc/man/tktwistd.1:
+	Removed.
+
+2003-10-12  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/spread/pb.py: Perspective Broker no longer sends
+	detailed tracebacks over the wire unless the "unsafeTracebacks"
+	attribute is set of the factory.
+
+2003-10-02  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* setup.py, twisted/test/test_dir.py, twisted/python/_c_dir.c:
+	Removed _c_dir extension module for portability and maintenance
+	reasons.
+
+2003-10-03  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/spread/util.py twisted/test/test_spread.py: Fix issue
+	286
+
+2003-10-01  Brian Warner  <warner at lothar.com>
+
+	* twisted/web/client.py (HTTPDownloader): accept either a filename
+	or a file-like object (it must respond to .write and .close, and
+	partial requests will not be used with file-like objects). errback
+	the deferred if an IOError occurs in .open, .write. or .close,
+	usually something like "permission denied" or "file system full".
+	Closes issue234.
+	* twisted/test/test_webclient.py (WebClientTestCase.write): verify
+	that the errback gets called
+
+	* twisted/scripts/trial.py (run): add --until-failure option to
+	re-run the test until something fails. Closes issue87.
+
+2003-09-30  Brian Warner  <warner at lothar.com>
+
+	* twisted/test/test_conch.py (testOurServerOpenSSHClient): replace
+	reactor.run() with .iterate calls: when using .run, exceptions in
+	the server cause a hang.
+
+2003-9-29  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/tap/procmon.py twisted/plugins.tml: remove procmon
+	tap. It was crufty and hard to port properly to new application.
+
+2003-09-29  Brian Warner  <warner at lothar.com>
+
+	* twisted/scripts/trial.py (Options.opt_reactor): make trial
+	accept the same reactor-name abbreviations as twistd does. Closes
+	issue69.
+	(top): add test-case-name tag
+
+	* doc/man/trial.1: document the change
+
+2003-09-28  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.0.8alpha3.
+
+2003-09-27  Cory Dodt <corydodt at yahoo.com>
+
+	* win32/main.aap win32/pyx.x-foo.iss.template win32/README.win32:
+	Be nice to people who don't install Python for "All Users" on win32.
+
+2003-9-18  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/application/strports.py twisted/test/test_strports.py:
+	New API/mini-language for defining ports
+
+2003-9-18  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/web/spider.py: removed, it was unmaintained.
+
+2003-09-19  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted/names/authority.py twisted/test/test_names.py
+	twisted/protocols/dns.py: Client and server support for TTLs on
+	all records. All Record_* types now take a ttl= keyword
+	argument. You can pass the ttl= argument to all the record classes
+	in your pyzones, too.
+
+2003-09-19  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/application/__init__.py twisted/application/app.py
+        twisted/application/compat.py twisted/application/internet.py
+        twisted/application/service.py twisted/scripts/twistd.py
+        twisted/scripts/twistw.py twisted/scripts/mktap.py
+        twisted/scripts/tapconvert.py bin/twistw: Update to new-style
+	applications.
+
+2003-09-19  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/names/client.py: Instantiation of theResolver global made
+	lazy.  As a result importing it directly will now fail if it has not
+	yet been created.  It should not be used directly anymore; instead,
+	use the module-scope lookup methods, or instantiate your own
+	resolver.
+
+	* twisted/mail/relaymanager.py: Instantiation of MXCalculator made
+	lazy.
+
+2003-09-18  Stephen Thorne  <stephen at thorne.id.au>
+
+	* twisted/web/distrib.py: Removed dependancy on twisted.web.widgets, and
+	instead using woven.
+
+2003-09-18  Stephen Thorne  <stephen at thorne.id.au>
+
+	* doc/howto/woven-reference.html: Added this new documentation file.
+	* doc/howto/index.html: Added woven-reference to index
+	* admin/: Added woven-reference.tex to book.tex
+
+2003-09-18  Stephen Thorne  <stephen at thorne.id.au>
+
+	* twisted/web/woven/widgets.py: Stop the 'Option' widget from having a
+    name="" attribute. Closes issue255.
+
+2003-09-16  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.0.8alpha1.
+
+	* .: Releasing Twisted 1.0.8alpha2 (Fixed Debian packages).
+
+2003-09-13  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.0.7 (no code changes since 1.0.7rc1).
+
+	* twisted/web/vhost.py: Un-gobble the path segment that a vhost eats
+	when the resource we're wrapping isLeaf. Potentially closes issue125.
+
+2003-09-12  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/web/microdom.py: lenient mode correctly handles <script>
+	tags with CDATA or comments protecting the code (closes issue #231).
+
+2003-09-10  Tommi Virtanen  <tv at twistedmatrix.com>
+
+	* HTTPS support for XML-RPC and web clients (closes issue #236).
+
+2003-08-29  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.0.7rc1.
+
+2003-09-12  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/spread/pb.py: new cred support for Perspective Broker.
+
+2003-08-26  Dave Peticolas  <dave at krondo.com>
+
+	* doc/howto/xmlrpc.html: document sub-handler and introspection
+
+	* twisted/test/test_xmlrpc.py: test introspection support
+
+	* twisted/web/xmlrpc.py: implement sub-handlers and introspection
+	support
+
+2003-08-23  Brian Warner  <warner at lothar.com>
+
+	* twisted/internet/gtk2reactor.py: force timeout values to be
+	integers, because recent pygtk's complain when they get floats
+
+2003-08-19  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.0.7alpha5.
+
+2003-08-18  Jp Calderone <exarkun at twistedmatrix.com>
+
+	* twisted/protocols/imap4.py: Remove support code for old versions
+	of IMailbox.fetch(); also change the interface once again (no
+	backwards compat this time) to require sequence numbers to be
+	returned, not just whatever the MessageSet spit out.
+
+2003-08-16  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/test/test_import.py: update for enterprise
+
+	* twisted/enterprise/sqlreflector.py: use dbpool directly
+
+	* twisted/enterprise/row.py: deprecate KeyFactory and StatementBatch
+
+	* twisted/enterprise/dbpassport.py: remove
+
+	* twisted/enterprise/dbgadgets.py: deprecate all
+
+	* twisted/enterprise/dbcred.py: deprecate all
+
+	* twisted/enterprise/adbapi.py: deprecate Augmentation. deprecate
+	crufty bits of ConnectionPool API.
+
+2003-08-11  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/enterprise/sqlreflector.py: fix docs
+
+2003-08-08  Donovan Preston  <dp at twistedmatrix.com>
+
+	* Added getAllPatterns API to Widget, which returns all nodes
+	which have the given pattern name.
+
+	* Refactored List widget to use getAllPatterns, so you can have
+	more than one listHeader, listFooter, and emptyList node.
+
+2003-08-08  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/internet/base.py: remove unused internal function.
+
+	* twisted/internet/gladereactor.py: remove unused internal function.
+	clean up imports.
+
+2003-08-07  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.0.7alpha4.
+
+2003-08-06  Donovan Preston <dp at twistedmatrix.com>
+
+	* Major woven optimizations.
+
+	* Removal of inspect-based hacks allowing backwards compatibility
+	with the old IModel interface. All your IModel methods should take
+	the request as the first argument now.
+
+	* Default to non-case-preserving when importing Woven templates,
+	and case-insensitive microdom. If you are using getPattern or
+	getAttribute in any of your woven code, you will have to make sure
+	to pass all lowercase strings.
+
+	* Removal of __eq__ magic methods in microdom. This was just
+	slowing woven down far too much, since without it python can
+	use identity when looking for a node in replaceChild. This means
+	you will have to explicitly use the isEqualToDocument or
+	isEqualToNode call if you are testing for the equality of microdom
+	nodes.
+
+	* Removal of usage of hasAttribute, getAttribute, removeAttribute
+	from woven for a speed gain at the expense of tying woven slightly
+	closer to microdom. Nobody will notice.
+
+	* Improved getPattern semantics thanks to a patch by Rich
+	Cavenaugh. getPattern will now not look for a pattern below any
+	nodes which have model= or view= directives on them.
+
+2003-08-04  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/python/usage.py: use parameter docs if handler
+	method has none. fixes bug displaying trial help.
+
+2003-07-31  Brian Warner  <warner at lothar.com>
+
+	* twisted/python/filepath.py (FilePath.__getstate__): allow
+	FilePath objects to survive unpersisting.
+
+2003-07-30  Brian Warner  <warner at lothar.com>
+
+	* doc/howto/faq.html: mention spawnProcess vs. os.environ
+
+	* doc/howto/test-standard.html: document usage of .todo and .skip
+
+2003-07-28  Brian Warner  <warner at lothar.com>
+
+	* twisted/python/_c_dir.c: hush compiler warning
+
+	* setup.py: add twisted.xish
+
+2003-07-28  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/spread/pb.py (PBClientFactory): a new, superior API for
+	starting PB connections. Create a factory, do a
+	reactor.connectTCP/SSL() etc., then factory.getPerspective().
+
+2003-07-27  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/test/test_enterprise.py: enable tests that depend on
+	cp_min and cp_max
+
+	* twisted/enterprise/adbapi.py: use threadpool to handle cp_min and
+	cp_max arguments
+
+	* twisted/test/test_threadpool.py: test existing work
+
+	* twisted/python/threadpool.py: check for existing work in start()
+
+2003-07-25  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/protocols/imap4.py: The fetch method of the IMailbox
+	interface has been changed to accept only a MessageSet and a uid
+	argument and to return an IMessage implementor.
+
+2003-07-24  Brian Warner  <warner at lothar.com>
+
+	* twisted/internet/cReactor/cDelayedCall.c: implement .active and
+	.getTime methods
+
+	* twisted/test/test_internet.py (InterfaceTestCase.wake): remove
+	reactor.initThreads() call. This is a private method which is
+	triggered internally by the current reactor when threadable.init
+	is called. It does not need to be called independently, and not
+	all reactors implement this particular method.
+
+	* twisted/test/test_threads.py: shuffle test cases, add timeouts
+	to avoid hanging tests. Added (disabled) test to trigger cReactor
+	hang (but unfortunately it fails under the default reactor)
+
+2003-07-23  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/internet/threads.py: avoid top-level reactor import
+
+2003-07-23  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/protocols/imap4.py: The fetch method of the IMailbox
+	interface has been changed to accept a list of (non-string)
+	objects representing the requested message parts.  Less knowledge
+	of the IMAP4 protocol should be required to properly implement
+	the interface.
+
+2003-07-23  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/test/test_enterprise.py: more tests
+
+2003-07-21  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/internet/base.py: implement callWhenRunning
+
+	* twisted/internet/interfaces.py: add callWhenRunning API
+
+	* twisted/test/test_pop3.py: string in string only works in 2.3
+
+2003-07-19  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.0.7alpha3 (for form and twisted.names
+	updates mentioned below).
+
+2003-07-19  Ying Li <cyli at ai.mit.edu>
+
+	* twisted/web/woven/form.py:  Changed form widgets so that if the
+	template already has the widget coded, merges the template widget
+	with the model widget (sets default values, etc.).
+
+	* twisted/web/woven/form.py, twisted/python/formmethod.py:  Can
+	format layout of checkgroups and radiogroups into tables, rows, or
+	columns.
+
+	* twisted/web/woven/form.py, twisted/python/formmethod.py: Added
+	file input widget (unable to retrieve filename or file type - have
+	to ask for that separately).
+
+2003-07-19  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted/protocols/dns.py, twisted/names: Twisted Names can now
+	return the `authoritative' bit. All of the resolvers in
+	twisted/names/authority.py now set it.
+
+2003-07-17  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.0.7alpha2 (Debian packages should be
+	correct	now)
+
+2003-07-17  Dave Peticolas  <dave at krondo.com>
+
+	* doc/howto/components.html: methods in interfaces do have self
+	parameters
+
+2003-07-18  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted/web/client.py: Added a `timeout' keyword argument to
+	getPage; If the web page takes longer than `timeout' to fetch,
+	defer.TimeoutError is errbacked.
+
+	* twisted/web/server.py, twisted/protocols/http.py: add `timeout'
+	argument to HTTPFactory and Site to specify how long to allow
+	connections to sit without communication before disconnecting
+	them.
+
+2003-07-18  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.0.7alpha1.
+
+2003-07-17  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/protocols/smtp.py: Address class changed to provide a
+	default domain for addresses missing a domain part.
+
+2003-07-16  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted/protocols/sux.py: In beExtremelyLenient mode, all data
+	in script elements is considered plain text and will not be parsed
+	for tags or entity references.
+
+2003-07-15  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/persisted/styles.py: better debugging output
+	for Ephemeral
+
+2003-07-14  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/cred/checkers.py, twisted/cred/credentials.py:
+	CramMD5Credentials and OnDiskUsernamePasswordDatabase added;
+	IUsernameHashedPassword also created for use by protocols that
+	do not receive plaintext passwords over the network.
+
+	* twisted/mail/, twisted/protocols/smtp.py: Addition of alias
+	support and authenticated ESMTP connections.  Several interfaces
+	changed, but deprecation warnings and backwards compatibility code
+	has been put in place to ease the change.
+
+2003-07-12  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted/web/util.py: Add a new ChildRedirector that, when placed
+	at /foo to redirect to /bar, will also redirect /foo/abc to
+	/bar/abc.
+
+	* twisted/web/scripts.py: Fixed ResourceScriptWrapper so that you
+	can now .putChild on the resource you create in an .rpy file that
+	is wrapped with this class.
+
+2003-07-06  Paul Swartz  <z3p at twistedmatrix.com>
+	* twisted/conch/[checkers,credentials,pamauth].py,
+	  twisted/conch/ssh/userauth.py, twisted/tap/conch.py: made PAM
+	  work again as an authentication.
+
+2003-07-05  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/test/test_enterprise.py: more tests. Add mysql test.
+
+2003-07-05  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/web/soap.py: Now requires SOAPpy v0.10.1, allow subclasses
+	to determine method publishing strategy.
+
+2004-07-05  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* bin/mailmail, doc/man/mailmail.1, twisted/scripts/mailmail.py:
+	sendmail replacement
+
+2003-07-04  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/test/test_enterprise.py: add sqlite. more tests.
+	Add Postgres test.
+
+	* twisted/enterprise/util.py: fix bug in getKeyColumn
+
+	* twisted/enterprise/sqlreflector.py: clean up imports
+
+	* twisted/enterprise/row.py: clean up imports
+
+	* twisted/enterprise/reflector.py: clean up imports
+
+2004-07-04  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/python/dir.c: Wrapper around opendir(3), readdir(3),
+	and scandir(3) for use by twisted.python.plugins.
+
+2003-07-03  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/news/database.py: NewsShelf.articleRequest() and
+	NewsShelf.bodyRequest() now expected to return a file-like object
+	in the last position of its returned three-tuple.  The old API
+	is still supported, but deprecated.
+
+2003-07-03  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/test/test_enterprise.py: add gadfly test
+
+	* twisted/web/woven/input.py: remove excess newline.
+
+	* twisted/trial/unittest.py: take out unused methodPrefix var
+
+	* twisted/enterprise/adbapi.py: accept 'noisy' kw arg. persist
+	noisy, min, and max args. just warn about non-dbapi db libs.
+
+	* twisted/enterprise/reflector.py: fix spelling
+
+	* twisted/enterprise/sqlreflector.py 80 columns, don't addToCache
+	in insertRow
+
+	* twisted/enterprise/xmlreflector.py: 80 columns
+
+2003-07-01  Brian Warner  <warner at lothar.com>
+
+	* sandbox/warner/fusd_twisted.py: experimental glue code for FUSD,
+	a system for implementing Linux device drivers in userspace
+
+2003-06-27  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.0.6rc3. Fixed a security bug in
+	twisted.web.
+
+	* .: Releasing Twisted 1.0.6rc4. One more twisted.web bug.
+
+	* .: Releasing Twisted 1.0.6.
+
+2003-06-26  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.0.6rc1.
+
+	* .: Releasing Twisted 1.0.6rc2. Pop3 had failing tests.
+
+2003-06-26  Clark C. Evans  <cce at twistedmatrix.com>
+
+	* twisted/flow/*.py: Moved Flow from the sandbox to
+	twisted.flow. The callback is dead. Long live the callback!
+
+2003-06-26  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/protocols/pop3.py: POP3.authenticateUserXYZ no longer
+	returns a Mailbox object. It now returns a 3-tuple. See
+	twisted.cred.portal.Portal.login for more details about the return
+	value.
+
+2003-06-24  Brian Warner  <warner at lothar.com>
+
+	* doc/howto/upgrading.html: Explain Versioned and rebuild()
+
+2003-06-23  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted/scripts/trial.py twisted/trial/reporter.py
+	doc/man/trial.1:
+
+	Added a --tbformat={plain,emacs} option to trial. Now the default
+	is to show the regular python traceback; if you want tracebacks
+	that look like compiler output for emacs, use --tbformat=emacs.
+
+2003-06-23  Cory Dodt <corydodt at yahoo.com>
+
+	* twisted/python/util.py twisted/web/microdom.py
+	twisted/test/test_{util,xml}.py: preserveCase and caseInsensitive
+	work on attribute names as well as element names.
+
+2003-06-22  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/internet/defer.py: Changed maybeDeferred API from
+	maybeDeferred(deferred, f, *args, **kw) to maybeDeferred(f, *args,
+	**kw).
+
+2003-06-19  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* twisted/conch/{checkers,credentials,realm}.py,
+	twisted/conch/ssh/userauth.py: Moved the Conch user authentication
+	code to use the new version of Cred.
+
+2003-06-19  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.0.6alpha3. There was a problem in
+	twisted.python.compat that was breaking the documentation
+	building. It is now fixed.
+
+2003-06-18  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.0.6alpha2.
+
+2003-06-16  Donovan Preston  <dp at twistedmatrix.com>
+
+	* twisted/web/woven/{controller,view,widgets}.py: Cleaned up the
+	output of Woven so it never leaves any woven-specific attributes
+	on the output HTML. Also, id attributes are not set on every
+	node with a View unless you are using LivePage.
+
+2003-06-11  Brian Warner  <warner at lothar.com>
+
+	* doc/howto/cvs-dev.html: add "Working from CVS" hints
+
+2003-06-10  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/internet/protocol.py: connection refused errors for
+	connected datagram protocols (connectUDP) are indicated using
+	callback, ConnectedDatagramProtocol.connectionRefused, rather
+	than an exception as before.
+
+2003-06-09  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted/trial/{unittest,runner}.py: Added setUpClass and
+	tearDownClass methods and invocations to twisted.trial. Implement
+	those methods in your TestCases if you want to manage resources on
+	a per-class level.
+
+2003-06-09  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/mail/relay.py: Default relaying rule change from all
+	local and all non-INET connections to all local and all UNIX
+	connections.
+
+2003-06-08  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/internet/interfaces.py: Added ITLSTransport interface,
+	subclassing ITCPTransport and adding one method - startTLS()
+
+	* twisted/internet/tcp.py: Connector class made to implement
+	ITLSTransport if TLS is available.
+
+2003-06-05  Brian Warner  <warner at lothar.com>
+
+	* twisted/conch/ssh/transport.py (ssh_KEX_DH_GEX_INIT): don't use
+	small values for DH parameter 'y'. openssh rejects these because they
+	make it trivial to reconstruct the shared secret. This caused a test
+	failure about 1024 times out of every 65536.
+
+	* twisted/test/test_dirdbm.py (DirDbmTestCase.testModificationTime):
+	dodge a kernel bug that lets mtime get skewed from time(), causing
+	an occasional test failure
+
+2003-06-03  Jp Calderone    <exarkun at twistedmatrix.com>
+
+	* twisted/__init__.py twisted/internet/app.py
+	* twisted/internet/unix.py twisted/internet/tcp.py
+	* twisted/manhole/ui/gtk2manhole.py twisted/protocols/dns.py
+	* twisted/protocols/smtp.py twisted/protocols/sux.py
+	* twisted/protocols/imap4.py twisted/protocols/sip.py
+	* twisted/protocols/htb.py twisted/protocols/pcp.py
+	* twisted/python/formmethod.py twisted/python/reflect.py
+	* twisted/python/util.py twisted/python/components.py
+	* twisted/spread/jelly.py twisted/spread/newjelly.py
+	* twisted/test/test_components.py twisted/test/test_rebuild.py
+	* twisted/test/test_trial.py twisted/test/test_world.py
+	* twisted/test/test_setup.py twisted/test/test_newjelly.py
+	* twisted/test/test_compat.py twisted/test/test_pcp.py
+	* twisted/test/test_log.py twisted/web/microdom.py
+	* twisted/web/woven/page.py twisted/popsicle/mailsicle.py
+	* twisted/trial/remote.py twisted/trial/unittest.py
+	* twisted/world/allocator.py twisted/world/compound.py
+	* twisted/world/database.py twisted/world/storable.py
+	* twisted/world/structfile.py twisted/world/typemap.py:
+
+	Remove direct usage of twisted.python.compat; Modify __builtin__
+	module to include forward-compatibility hacks.
+
+2003-05-30  Brian Warner  <warner at lothar.com>
+
+	* twisted/conch/ssh/keys.py (signData_dsa): Force DSS signature
+	blobs to be 20 bytes long. About 1% of the time, the sig numbers
+	would come out small and fit into 19 bytes, which would result in
+	an invalid signature.
+	* twisted/test/test_conch.py: remove special hacked test case used
+	to find that invalid-signature problem.
+
+2003-05-29  Brian Warner  <warner at lothar.com>
+
+	* twisted/python/formmethod.py: this module needs False from compat
+
+	* twisted/internet/process.py (ProcessWriter.writeSomeData):
+	Accomodate Mac OS-X, which sometimes raises OSError(EAGAIN)
+	instead of IOError(EAGAIN) when the pipe is full.
+
+2003-05-27  Brian Warner  <warner at lothar.com>
+
+	* twisted/test/test_process.py (EchoProtocol): try to close
+	occasional test failure. Do transport.closeStdin() instead of
+	loseConnection() because the child still has data to write (to
+	stderr). Closing all three streams takes away its voice, forces it
+	to exit with an error, and is probably causing problems.
+
+	* twisted/test/test_factories.py (testStopTrying): stop test after
+	5 seconds rather than 2000 iterations. Some reactors iterate at
+	different rates.
+
+2003-05-24  Brian Warner  <warner at lothar.com>
+
+	* twisted/scripts/trial.py (Options.opt_testmodule): ignore
+	deleted files, recognize twisted/test/* files as test cases
+
+2003-05-22  Brian Warner  <warner at lothar.com>
+
+	* twisted/test/test_newjelly.py (JellyTestCase.testUnicode): make
+	sure unicode strings don't mutate into plain ones
+
+2003-05-21  Brian Warner  <warner at lothar.com>
+
+	* twisted/internet/tcp.py (Connection.getTcpKeepAlive): Add
+	functions to control SO_KEEPALIVE bit on TCP sockets.
+	* twisted/internet/interfaces.py (ITCPTransport): ditto
+	* twisted/test/test_tcp.py (LoopbackTestCase.testTcpKeepAlive):
+	test it
+
+	* doc/howto/test-standard.html: document test-case-name format
+
+	* doc/howto/coding-standard.html: encourage test-case-name tags
+
+	* twisted/protocols/htb.py, twisted/protocols/irc.py,
+	twisted/protocols/pcp.py, twisted/python/text.py,
+	twisted/spread/pb.py, twisted/trial/remote.py: clean up
+	test-case-name tags
+
+	* twisted/scripts/trial.py (Options.opt_testmodule): try to handle
+	test-case-name tags the same way emacs does
+
+2003-05-21  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* bin/coil, doc/man/coil.1, doc/man/index.html: removed. Coil
+	isn't being maintained, pending a total rewrite.
+
+2003-05-20  Brian Warner  <warner at lothar.com>
+
+	* twisted/python/reflect.py (namedAny): re-raise ImportErrors that
+	happen inside the module being imported, instead of assuming that
+	it means the module doesn't exist.
+
+2003-05-19  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted/web/server.py: Added two new methods to Request objects:
+	rememberRootURL and getRootURL. Calling rememberRootURL will store
+	the already-processed part of the URL on the request, and calling
+	getRootURL will return it. This is so you can more easily link to
+	disparate parts of your web application.
+
+	* twisted/web/woven/{page,widgets}.py: Updated Woven to take
+	advantage of previously-mentioned Request changes. You can now say
+	`appRoot = True' in the Page subclass that is instantiated by your
+	.rpy (for example), and then use a RootRelativeLink widget
+	(exactly the same way you use a Link widget) to get a link
+	relative to your root .rpy.
+
+2003-05-16  Brian Warner  <warner at lothar.com>
+
+	* twisted/scripts/trial.py: catch failures during import of test
+	modules named on the command line too.
+
+	* twisted/trial/unittest.py (TestSuite.addModule): catch all failures
+	during import so that syntax errors in test files don't prevent
+	other tests from being run.
+
+	* twisted/trial/reporter.py (TextReporter): handle both Failures
+	and exception tuples in import errors. Emit the messages before the
+	last summary line so that test-result parsers can still find the
+	pass/fail counts.
+
+	* doc/howto/faq.html: Add note about Ephemeral in the
+	import-from-self twistd entry.
+
+2003-05-13  Brian Warner  <warner at lothar.com>
+
+	* twisted/trial/runner.py: sort tests by name within a TestCase
+
+2003-05-13  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted/internet/{default,internet}.py: Add an `active' method to
+	DelayedCall, which returns True if it hasn't been called or
+	cancelled.
+
+2003-05-13 Jonathan Lange <jml at twistedmatrix.com>
+
+	* twisted/trial/unittest.py twisted/scripts/trial.py
+	doc/man/trial.1: Add --recurse option to make trial search within
+	sub-packages for test modules.
+
+2003-5-12  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/lore/default.py twisted/lore/latex.py
+	  twisted/lore/lint.py twisted/lore/math.py twisted/lore/tree.py
+	  twisted/lore/lmath.py twisted/lore/slides.py:
+	  Added indexing support to LaTeX and lint, and made sure the
+	  config dictionary is passed to the tree processors [this is an
+	  API change which might have effect on Lore extensions!]. Rename
+	  math to lmath, to avoid some corner-case bugs where it gets mixed
+	  with the Python standard module "math".
+
+2003-05-11  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.0.6alpha1. There was a problem
+	with file descriptors in 1.0.5; some debugging information
+	has been added to this release. The problem should be fixed
+	by alpha2.
+
+2003-05-08  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.0.5 (same code-base as rc2).
+
+2003-05-08  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted/world: Added an object database to Twisted.  This is
+	still highly experimental!
+
+2003-5-6  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/trial/reporter.py twisted/scripts/trial.py: Add --timing
+	option to make the reporter output wall-clock time.
+
+2003-05-05  Brian Warner  <warner at lothar.com>
+
+	* setup.py (setup_args): s/licence/license/, preferred in python-2.3
+
+2003-05-05  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 1.0.5rc1.
+
+	* .: Releasing Twisted 1.0.5rc2 (only a Debian build problem fixed).
+
+2003-05-05  Brian Warner  <warner at lothar.com>
+
+	* twisted/trial/reporter.py: remove ResultTypes, it doesn't really
+	accomplish its goal
+
+	* twisted/trial/unittest.py: move log.startKeepingErrors() from
+	top-level to TestSuite.run(). This fixes the problem of errors
+	being eaten by code which imports unittest for other reasons (like
+	to use trial.remote reporting)
+
+2003-05-04  Brian Warner  <warner at lothar.com>
+
+	* twisted/trial/reporter.py (ResultTypes): export legal values for
+	Reporter.reportResults() so remote reporters know what to expect
+
+2003-05-03  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/internet/tcp.py, twisted/internet/ssl.py: TLS support
+	added to TCP connections; startTLS() method added to transport
+	objects to switch from unencrypted to encrypted mode.
+
+2003-05-02  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/internet/protocol.py: Added continueTrying attribute to
+	ReconnectingClientFactory, and increased the number of states where
+	stopTrying() will actually stop further connection attempts.
+
+2003-05-01  Brian Warner  <warner at lothar.com>
+
+	* twisted/test/test_trial.py: handle new trial layout
+	* twisted/trial/runner.py (runTest): utility function to help
+	test_trial
+	* twisted/trial/util.py (extract_tb): handle new trial layout,
+	ignore the right framework functions.
+
+2003-05-01  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted/python/context.py: call-stack context tree.
+
+	* twisted/python/components.py: support interface-to-interface
+	adapatation, IFoo(o) syntax for adaptation, context-based
+	registries and more.
+
+	* twisted/python/log.py: Totally rewritten logging system.
+
+2003-05-01  Brian Warner  <warner at lothar.com>
+
+	* twisted/internet/gtk2reactor.py (Gtk2Reactor._doReadOrWrite):
+	add Anthony's cached-Failure speedup to gtk2 too.
+
+2003-05-01  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/internet/tcp.py, twisted/internet/default.py: cache
+	Failures whose contents are always identical. Speeds up lost
+	connections considerably.
+
+	* twisted/python/failure.py: If you pass only an exception object
+	to Failure(), a stack will not be constructed. Speeds up Failure
+	creation in certain common cases where traceback printing isn't
+	required.
+
+2003-04-29  Brian Warner  <warner at lothar.com>
+
+	* twisted/test/test_process.py: make all child processes inherit
+	their parent's environment
+
+	* twisted/web/resource.py, twisted/python/roots.py: add
+	test-case-name tag
+
+	* twisted/web/resource.py (IResource)
+	  twisted/spread/refpath.py (PathReferenceAcquisitionContext.getIndex)
+	  twisted/python/roots.py (Collection.getEntity): appease pychecker
+
+2003-04-27  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* doc/examples/bananabench.py, twisted/internet/utils.py,
+	twisted/mail/bounce.py, twisted/persisted/styles.py,
+	twisted/python/log.py, twisted/python/reflect.py,
+	twisted/spread/pb.py, twisted/test/test_banana.py,
+	twisted/test/test_iutils.py, twisted/test/test_persisted.py,
+	twisted/test/test_process.py, twisted/web/domhelpers.py,
+	twisted/web/script.py, twisted/web/server.py, twisted/web/test.py:
+	Change the usage of cStringIO to fallback to StringIO if the former
+	is not available.
+
+	* twisted/im/gtkaccount.py, twisted/internet/app.py,
+	twisted/mail/relay.py, twisted/mail/relaymanager.py,
+	twisted/persisted/journal/base.py, twisted/persisted/dirdbm.py,
+	twisted/scripts/conch.py, twisted/scripts/tapconvert.py,
+	twisted/scripts/twistd.py, twisted/scripts/websetroot.py,
+	twisted/test/test_mvc.py, twisted/test/test_persisted.py,
+	twisted/web/woven/template.py, twisted/web/woven/view.py,
+	twisted/popsicle/picklesicle.py: Change the usage of cPickle to
+	fallback to pickle if the former is not available.
+
+	* doc/howto/coding-standard.html: Document the way to use extension
+	versions of modules for which there is a pure-python equivalent.
+
+2003-04-26  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/enterprise/adbapi.py: commit successful _runQuery calls
+	instead of rolling back
+
+2003-04-23  Brian Warner  <warner at lothar.com>
+
+	* doc/howto/telnet.html: Update example from twisted-0.15.5(!) to
+	1.0.4
+
+	* twisted/protocols/loopback.py: use reactor.iterate(0.01) so the
+	tests hammer the CPU slightly less
+
+	* twisted/test/test_trial.py (LoopbackTests.testError): .type is a
+	string
+	* twisted/trial/remote.py (JellyReporter.reportResults): stringify
+	.type and .value from Failures before jellying them.
+
+	* twisted/internet/base.py (ReactorBase.suggestThreadPoolSize):
+	don't let suggestThreadPoolSize(0) be the only reason threads are
+	initialized.
+
+	* twisted/python/log.py (err): always log Failures to the logfile. If
+	we're doing _keepErrors, then also add them to _keptErrors.
+
+	* twisted/trial/unittest.py (TestSuite.runOneTest): only do
+	reportResults once per test. Handle reactor.threadpool being None.
+
+2003-04-22  Bob Ippolito <bob at redivi.com>
+
+	* twisted/python/compat.py: Complete iter implementation with
+	__getitem__ hack for 2.1.  dict now supports the full 2.3 featureset.
+
+	* twisted/test/test_compat.py: Tests for compat module, so we know if
+	it works or not now ;)
+
+2003-04-22  Andrew Bennetts  <spiv at twistedmatrix.com>
+
+	* twisted/lore/latex.py: Handle cross-references and labels slightly
+	better, so that e.g. man/lore.html and howto/lore.html don't generate
+	conflicting labels.  Also, emit \loreref{...} instead of \pageref{...}
+	-- this isn't a standard LaTeX command, see admin/book.tex for an
+	example definition.  In HTML generation, all relative hrefs in <a>
+	tags are now munged from .html to .xhtml, unless class="absolute".
+
+2003-04-21  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/internet/interfaces.py: Added getServiceNamed, addService,
+	and removeService to IServiceCollection.
+
+2003-04-21  Brian Warner  <warner at lothar.com>
+
+	* twisted/web/woven/*.py: add test-case-name tags
+
+2003-04-21  Bob Ippolito <bob at redivi.com>
+
+	* twisted/web/static.py (File, DirectoryListing): DirectoryListing
+	now gets the directory listing from File.listNames, and no longer
+	calls os.listdir directly (unless a directory listing is not
+	specified in the DirectoryListing constructor).
+
+2003-04-19  Brian Warner  <warner at lothar.com>
+
+	* twisted/trial/remote.py (JellyReporter.cleanResults): handle
+	strings as testClass/method to unbreak tests
+
+	* twisted/trial/remote.py (JellyReporter.reportResults): send only
+	name of testClass/method to remote reporter, not whole class and
+	method. Also add .taster hook to DecodeReport to let users specify
+	their own security options.
+
+2003-04-17  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* .: Release 1.0.4 Final.
+
+2003-04-16  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* .: Release 1.0.4rc1.
+
+2003-04-15  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* admin/accepttests, admin/accepttests.py: Acceptance tests
+	turned into a Python module with no unguarded top-level code,
+	to make running acceptance tests selectively possible.
+
+2003-04-14  Brian Warner  <warner at lothar.com>
+
+	* twisted/python/threadable.py (init):
+	* twisted/spread/newjelly.py (SecurityOptions.allowBasicTypes):
+	* twisted/spread/jelly.py (SecurityOptions.allowBasicTypes):
+	Remove old apply() calls.
+
+	* twisted/spread/flavors.py (Copyable.jellyFor): Use proper
+	jellier .prepare/.preserve dance when .invoker is non-None. This
+	fixes jellying of circular references when passed through PB
+	connections.
+
+	* twisted/test/test_newjelly.py: add test case that sets .invoker
+	to verify that code path too
+
+2003-04-14  Jonathan Lange  <jml at ids.org.au>
+
+	* twisted/web/woven/controller.py (Controller): now, if getChild
+	cannot find the requested child, it will ask getDynamicChild -- a
+	method like getChild, but designed to be overriden by users.
+
+2003-04-13  Bob Ippolito  <bob at redivi.com>
+
+	* twisted/internet/app.py (DependentMultiService): a MultiService
+	to start services in insert order and stop them in reverse.  Uses
+	chained deferreds to ensure that if a startService or stopService
+	returns a deferred, then the next service in the queue will wait
+	until its dependency has finished.
+
+2003-04-12  Brian Warner  <warner at lothar.com>
+
+	* twisted/test/test_process.py (PosixProcessTestCasePTY): skip
+	testStdio, testStderr, and testProcess. PTYs do not have separate
+	stdout/stderr, so the tests just aren't relevant. testProcess
+	might be, but it requires support for closing the write side
+	separately from the read side, and I don't think our processPTY
+	can do that quite yet.
+
+	* twisted/test/test_tcp.py (LocalRemoteAddressTestCase): iterate
+	harder. some systems might not connect to localhost before
+	iterate() is called, flunking the test
+
+	* twisted/test/test_process.py: only install SIGCHLD handler if the
+	reactor offers a hook for it.
+
+	* twisted/test/test_policies.py (ThrottlingTestCase.doIterations):
+	add more iterations to accomodate reactors that do less IO per pass
+
+	* twisted/test/process_signal.py: reset SIGHUP to default handler,
+	fixes test failures in a 'nohup' environment
+
+	* twisted/test/test_process.py (PosixProcessTestCasePTY): remove
+	testClosePty.todo now that it works
+	(SignalProtocol.processEnded): Improve testSignal error messages
+
+	* twisted/internet/process.py (PTYProcess.connectionLost): Treat
+	PTYs more like sockets: loseConnection sets .disconnecting and
+	lets the write pipe drain, then the PTY is closed in
+	connectionLost.
+
+2003-04-12  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* twisted/plugins.tml, twisted/tap/ssh.py, twisted/tap/conch.py: moved
+	the conch server from 'mktap ssh' to 'mktap conch'.
+
+2003-04-12  Brian Warner  <warner at lothar.com>
+
+	* twisted/internet/gtk2reactor.py (Gtk2Reactor.doIteration): don't
+	process *all* events before exiting: lots of IO (like test cases which
+	do connect()s from inside connectionMade) will keep us from surfacing
+	from reactor.iterate(), causing a lockup.
+	* twisted/internet/gtkreactor.py (GtkReactor.doIteration): same. Use
+	the same code as gtk2reactor with minor gtk1-vs-gtk2 variations.
+
+2003-04-11  Brian Warner  <warner at lothar.com>
+
+	* twisted/internet/gtk2reactor.py (Gtk2Reactor.doIteration): use
+	timers to match the behavior of select()-based reactors.
+	reactor.iterate(delay) is thus defined to return after 'delay'
+	seconds, or earlier if something woke it up (like IO, or timers
+	expiring).
+
+2003-04-11  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/internet/defer.py: Added new, experimental function,
+	"maybeDeferred".  API is subject to change.
+
+2003-04-11  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/scripts/mktap.py: Sped up --debug and --progress by
+	introducing a two-pass option parser.
+
+2003-04-11  Brian Warner  <warner at lothar.com>
+
+	* twisted/internet/gtk2reactor.py: major fixes. Use different
+	POLLIN/OUT flags to robustly work around pygtk bug, change
+	callback() to behave more like pollreactor (since gtk uses poll
+	internally). doIteration now calls gtk.main_iteration in a
+	non-blocking way. Attempt to emulate doIteration(delay!=0) by
+	using time.sleep().
+
+	* twisted/internet/gtkreactor.py: same fixes as for gtk2reactor.
+	Instead of a pygtk bug we've got the limited gtk_input_add API,
+	which hides POLLHUP/POLLERR, so detecting closed fds might not be
+	as reliable.
+
+2003-04-11  Andrew Bennetts  <spiv at twistedmatrix.com>
+
+	* twisted/lore:
+	Added a "lore-slides" plugin, with HTML, Magicpoint and Prosper output
+	targets.  It's still a bit rough, but functional.
+
+2003-04-10  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* .: Release 1.0.4alpha2.
+
+2003-04-09  Brian Warner  <warner at lothar.com>
+
+	* twisted/scripts/trial.py (Options.opt_reactor): install reactor
+	before parseArgs() does an import and installs the default one
+
+	* twisted/internet/process.py: fix typo,
+	s/registerReapProccessHandler/registerReapProcessHandler)/
+
+2003-04-09  Jp Calderone <exarkun at twistedmatrix.com>
+
+	* twisted/internet/base.py: Change the sort order of DelayedCalls
+	and remove them from the end of the list instead of the beginning.
+	This changes O(n) complexity to O(1) complexity.
+
+2003-04-09  Brian Warner  <warner at lothar.com>
+
+	* twisted/test/test_jelly.py, test_newjelly: Test cleanup.
+	Parameterize the jelly module used by the tests, make test_jelly a
+	subclass of test_newjelly using a different jelly module: tests
+	should now be unified. Also change tests to use proper trial
+	self.failUnless() methods instead of bare assert().
+
+2003-04-09  Bob Ippolito  <bob at redivi.com>
+
+	* twisted/python/util.py (OrderedDict): added a UserDict subclass
+	that preserves insert order (for __repr__, items, values, keys).
+
+	* twisted/internet/app.py (Application, _AbstractServiceCollection):
+	Preserve service order, start services in order, stop them in reverse.
+
+2003-04-09  Andrew Bennetts  <spiv at twistedmatrix.com>
+
+	* twisted/protocols/ftp.py (FTPClient):
+	Added STOR support to FTPClient, as well as support for using
+	Producers or Consumers instead of Protocols for uploading/downloading.
+	* twisted/protocols/policies.py (TimeoutWrapper):
+	Added a timeout policy that can be used to automatically disconnect
+	inactive connections.
+
+2003-04-07  Brian Warner  <warner at lothar.com>
+
+	* twisted/test/test_banana.py (BananaTestCase): add Acapnotic's
+	crash-cBanana test case, and some others.
+
+	* twisted/spread/banana.py (Pynana.dataReceived): add 640k limit on
+	lists/tuples, parameterize the limit into banana.SIZE_LIMIT, define
+	and use BananaError on all problems. Impose 640k limit on outbound
+	lists/tuples/strings to catch problems on transmit side too.
+
+	* twisted/spread/cBanana.c (cBanana_dataReceived): check malloc()
+	return values to avoid segfault from oversized lists. Impose 640k
+	limit on length of incoming lists. Raise BananaError on these
+	checks instead of the previously-unreachable
+	cBanana.'cBanana.error' exception.
+
+	* twisted/test/test_process.py (TwoProcessProtocol): add test to make
+	sure killing one process doesn't take out a second one
+	(PosixProcessTestCasePTY): add variant that sets usePTY=1
+
+2003-04-06  Brian Warner  <warner at lothar.com>
+
+	* twisted/trial/{unittest.py,remote.py}, twisted/test/test_trial.py:
+	Collapse most reportFoo methods into a single reportResults() that
+	takes a resultType parameter. This anticipates the addition of .todo
+	test-case flags that will add two more resultTypes.
+	* twisted/trial/unittest.py: Add .todo flags: creates EXPECTED_FAILURE
+	and UNEXPECTED_SUCCESS resultTypes. Like .skip, the .todo can be
+	added either to the TestCase object or as a method attribute.
+
+2003-04-04  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* twisted/scripts/trial.py: Now takes whatever you throw at it on
+	the command line, be it a filename, or a dotted python name for a
+	package, module, TestCase, or test method; you no longer need to
+	use the -pmcfM switches (unless you really want to).
+
+	* twisted/protocols/htb.py: Egress traffic shaping for Consumers
+	and Transports, using Heirarchial Token Buckets, patterened after
+	Martin Devera's Hierarchical Token Bucket traffic shaper for the
+	Linux kernel.
+
+	* doc/examples/shaper.py: Demonstration of shaping traffic on a
+	web server.
+
+	* twisted/protocols/pcp.py: Producer/Consumer proxy, for when you
+	wish to install yourself between a Producer and a Consumer and
+	subvert the flow of data.
+
+2003-04-04  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/web/microdom.py: parseXML and parseXMLString functions
+	that are setup to use the correct settings for strict XML parsing
+	and manipulation.
+
+2003-03-31  Brian Warner  <warner at lothar.com>
+
+	* twisted/trial/unittest.py: use SkipTest's argument as a reason
+	and display it in the test results instead of the traceback. Allow
+	test methods and TestCase classes to define a .skip attribute
+	instead of raising SkipTest.
+
+2003-03-31  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* twisted/trial/remote.py: machine-readable trial output to allow
+	for the test runner and the results Reporter to be in seperate
+	processes.
+
+2003-03-15  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/internet/app.py: Renamed "factory" argument to
+	Application.listenUDP() to "proto"
+
+2003-03-13  Tommi Virtanen  <tv at twistedmatrix.com>
+
+	* twisted/tap/procmon.py, twisted/plugins.tml: support for mktapping
+	ProcessMonitors.
+
+2003-03-11  Bob Ippolito <bob at redivi.com>
+
+	* twisted/internet/: Replaced apply() in non-deprecated
+	twisted.internet modules with Direct Function Calls per
+	recommendation from PEP 290.
+
+	* twisted/web/client.py: HTTPPageGetter will now write
+	self.factory.postdata to the transport after the headers if the
+	attribute is present and is not None.  The factories, getPage and
+	downloadPage now accept keyword arguments for method, postdata,
+	and headers.  A Content-Length header will be automatically provided
+	for the given postdata if one isn't already present.  Note that
+	postdata is passed through raw; it is the user's responsibility to
+	provide a Content-Type header and preformatted postdata.  This change
+	should be backwards compatible.
+
+2003-03-05  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/internet/: reactor.run() now accepts a keyword
+	argument, installSignalHandlers, indicating if signal handlers
+	should be installed.
+
+2003-03-04  Tommi Virtanen  <tv at twistedmatrix.com>
+
+	* twisted/scripts/mktap.py, twisted/internet/app.py: mktap now
+	accepts --uid=0 and --gid=0 to really mean root, has command line
+	help for --uid=/--gid=, and understands user and group names in
+	addition to numbers.
+
+2003-03-04  Tommi Virtanen  <tv at twistedmatrix.com>
+
+	* twisted/scripts/tap2deb.py, doc/man/tap2deb.1: Option --version=
+	collided with global options, renamed to --set-version=.
+
+2003-03-01  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/scripts/twistd.py: Added --report-profile flag to twistd
+	daemon.
+
+2003-02-24  Brian Warner  <warner at lothar.com>
+
+	* twisted/internet/tcp.py, base.py: set FD_CLOEXEC on all new
+	sockets (if available), so they will be closed when spawnProcess
+	does its fork-and-exec.
+
+2003-02-23  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* twisted/scripts/manhole.py: 1.4 manhole now defaults to using a
+	GTK2 client where available.  Start manhole with the "--toolkit gtk1"
+	parameter if you want the old one back.
+
+2003-2-19  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/web/monitor.py: Monitor web sites.
+
+2003-2-20  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* twisted/internet/{app,default,interface,unix}.py: Add 'mode' argument
+	to the listenUNIX interface, which sets the filesystem mode for the
+	socket.
+
+2003-2-18  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Release 1.0.4alpha1.
+
+2003-2-18  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/web/server.py twisted/protocols/http.py: Add a way for
+	resources (and other interested parties) to know when a request has
+	finished, for normal or abnormal reasons.
+
+2003-02-17  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* twisted/scripts/conch.py: Added experimental support for connection
+	caching, where if a connection is already available to a server, the
+	client will multiplex another session over the existing connection,
+	rather that creating a new one.
+
+2003-02-16  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* doc/examples/echoserv.py: Rewrote main code to not create a .tap
+	file (examples should be simple, and demonstrate as few things as
+	possible each).
+
+	* doc/examples/echoclient.py: Added UDP echo protocol
+	implementation; it is unused by default, but easily enabled.
+
+2003-02-16  Cory Dodt  <corydodt at yahoo.com>
+
+	* twisted/lore/{latex,default}.py: provide a --config book option
+	to Lore, for producing book-level documents from an index page.
+
+2003-02-15  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/scripts/mktap.py, twisted/scripts/twistd.py: Added the
+	--appname and --originalname parameters, respectively.
+
+	* twisted/doc/man/mktap.py, twisted/doc/man/twistd.py: Documented
+	the above two new parameters.
+
+2003-02-12  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* twisted/python/text.py (docstringLStrip): 1.6 This will be going
+	away in favor of inspect.getdoc.
+
+2003-02-11  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* twisted/im/interfaces.py (IAccount): 1.4 New instance attribute:
+	"client".  Also, added methods getGroup and getPerson.
+
+	* twisted/im/basechat.py (ChatUI.getPerson, .getGroup): 1.7 No
+	longer accept a Class parameter.  The class of the person/group is
+	determined by the account they are obtained through.
+
+	* twisted/im/basesupport.py (AbstractPerson, AbstractGroup): 1.15
+	Hold a reference to account, not client.  Also, lose the "chatui"
+	parameter -- this may require follow-up.
+	(AbstractAccount.__setstate__): 1.15 remove this method.  (Why
+	was self.port = int(self.port) in __setstate__?)
+	(AbstractAccount): 1.15 implement getGroup and getPerson here,
+	using _groupFactory and _personFactory factory attributes.
+
+	* twisted/im/gtkchat.py (GtkChatClientUI.getPerson, .getGroup): 1.15
+	follow ChatUI interface changes.
+
+2003-02-09  Brian Warner  <warner at lothar.com>
+
+	* twisted/internet/error.py (ProcessDone,ProcessTerminated):
+	* twisted/internet/process.py (Process.maybeCallProcessEnded,
+	* twisted/internet/process.py (PTYProcess.maybeCallProcessEnded,
+	record the signal that killed the process in .signal, set .signal
+	to None if the process died of natural causes, set .exitCode to None
+	if the process died of a signal.
+	* twisted/test/test_process.py: verify .signal, .exitCode are set
+	to None when they ought to be, verify signal-death is reported with
+	ProcessTerminated and not ProcessDone
+
+	* ChangeLog: Set add-log-time-format to iso8601.
+
+2003-02-09  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing 1.0.3rc1.
+
+2003-02-08  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/tap/mail.py twisted/mail/tap.py twisted/plugins.tml:
+	Moved from tap to mail, trying to thin down twisted.tap a little.
+
+2003-02-07  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/lore/default.py twisted/lore/tree.py twisted/lore/latex.py
+	twisted/lore/man2lore.py twisted/lore/math.py
+	twisted/scripts/html2latex.py twisted/scripts/generatelore.py
+	twisted/scripts/hlint.py twisted/scripts/lore.py bin/lore
+	bin/generatelore bin/hlint bin/html2latex twisted/plugins.tml:
+	refactor lore to be cleaner, more usable and more extendible.
+	Removed old scripts, and combined them into one plugin-based script
+	which supports Lore, Math-Lore and Man pages and converts to
+	LaTeX, HTML and (man pages) to Lore.
+
+2003-02-06  Bob Ippolito  <bob at redivi.com>
+
+	* twisted/protocols/smtp.py: sendEmail supports multipartboundary
+	keyword argument, which is useful for doing HTML emails if passed
+	"alternative" as opposed to the default "mixed".  Uses 7bit
+	encoding for mime types that start with 'text', base64 otherwise.
+
+2003-02-04  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/internet/app.py: listenUNIX and unlistenUNIX methods added
+	to Application class.  These should be used in place of listenTCP
+	and unlistenTCP when UNIX sockets are desired.  The old,
+	undocumented behavior no longer works!  Also added connectUDP and
+	unlistenUDP to Application.
+
+2003-01-31  Cory Dodt  <corydodt at yahoo.com>
+
+	* twisted/lore/latex.py:  Don't treat comments like text nodes, just
+	  drop them.
+
+2003-01-30  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/internet/default.py
+	  twisted/internet/base.py
+	  twisted/internet/tcp.py
+	  twisted/internet/ssl.py
+	  twisted/internet/udp.py
+	  twisted/internet/unix.py
+
+	  Refactor of many internal classes, including Clients and
+	Connectors.  UNIX socket functionality moved out of the TCP classes
+	and into a new module, unix.py, and implementation of IReactorUNIX
+	by PosixReactorBase made conditional on platform UNIX socket
+	support.  Redundant inheritance cruft removed from various classes.
+
+	* twisted/internet/app.py: listenWith, unlistenWith, and connectWith
+	methods added to Application.
+
+	* twisted/internet/interfaces.py: IReactorArbitrary added.
+
+2003-01-30  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* twisted/manhole/service.py (IManholeClient.console): 1.35
+	exception messages now use a Failure.
+	(IManholeClient.listCapabilities): 1.35 Method to describe what
+	capabilities a client has, i.e. "I can receive Failures for
+	exceptions."
+
+2003-01-29  Donovan Preston  <dp at twistedmatrix.com>
+
+	* twisted/web/woven/controller.py
+	  twisted/web/woven/template.py
+	  twisted/web/woven/view.py
+	  twisted/web/woven/widgets.py Major woven codepath cleanup
+
+	* Uses a flat list of outstanding DOM nodes instead of
+	recursion to keep track of where Woven is in the page
+	rendering process
+
+	* Removes View's dependency on DOMTemplate as a base
+	class, in preparation for deprecation of DOMTemplate
+	(all of the same semantics are now directly implemented
+	in View). As a result, View has no base classes, making
+	the inheritance chain cleaner.
+
+	* Stores the namespace stacks (model, view, and controller
+	name lookup chain) in the View directly, and each widget
+	gets an immutable reference to it's position in the lookup
+	chain when it is created, making re-rendering Widgets more
+	reliable
+
+	* Represents the namespace stacks as a cons-like tuple
+	structure instead of mutable python lists, reducing
+	confusion and list-copying; instead of copying the current
+	stack lists each time a Widget is created, it just gets a
+	reference to the current tuples for each of the stacks
+
+2003-01-29  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing 1.0.2 Final.
+
+	* .: Releasing 1.0.3alpha1. Release Often :-D
+
+2003-01-29  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* twisted/internet/abstract.py (FileDescriptor.__init__): 1.36
+	Ephemeral.
+
+	* twisted/internet/tcp.py (Port.__getstate__): 1.100 As an
+	Ephemeral, this needs no __getstate__.
+
+2003-01-27  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* twisted/spread/ui/gtk2util.py (login): Perspective Broker login
+	dialog for GTK+ version 2.
+
+2003-01-26  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing 1.0.2rc1.
+
+	* .: Releasing 1.0.2rc2 (rc1 was dead in the water; hlint bug now
+	fixed).
+
+	* .: Releasing 1.0.2rc3 (rc2 was dead in the water;
+	twisted.lore.latex bug now fixed)
+
+2003-01-26  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* twisted/im/interfaces.py (IClient.__init__): 1.3 Accept a
+	logonDeferred parameter.  The client should call this back when
+	it is successfully logged in.
+
+	* twisted/im/basesupport.py
+	(AbstractClientMixin.registerAsAccountClient): 1.13 Gone.
+	chatui.registerAccountClient is called in AbstractAccount.logOn
+	instead.
+
+2003-01-22  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/web/xmlrpc.py: add docstring for Proxy. handle
+	serialization errors. check for empty deferred on connectionLost.
+
+	* twisted/test/test_internet.py: make sure wakeUp actually works
+
+2003-01-21  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/internet/defer.py: added utility method for
+	getting result of list of Deferreds as simple list.
+
+2003-1-20  Jp Calderone <exarkun at twistedmatrix.com>
+
+	* twisted/internet/interfaces.py: type argument removed from
+	IReactorCore.resolve method.  IReactorPluggableResolver interface
+	added.
+
+	* twisted/internet/base.py: IReactorPluggable added to
+	ReactorBase.__implements__ and ReactorBase.installResolver added.
+
+2003-1-18  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/trial/unittest.py twisted/scripts/trial.py: adding --summary
+
+2003-01-15  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing 1.0.2alpha3.
+
+2003-01-13  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing 1.0.2alpha2.
+
+2003-01-11  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/protocols/shoutcast.py: add client support for
+	Shoutcast MP3 streaming protocol.
+
+2003-01-10  Itamar Shtull-Trauring  <itamar at itamarst.org>
+
+	* twisted/scripts/twistd.py: in debug mode, jump into debugger for any
+	logged exception.
+
+2003-01-10  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/trial/unittest.py: enable test cruft checking
+
+	* twisted/test/test_policies.py: cleanup timers
+
+	* twisted/protocols/policies.py: start/stop bandwidth timers as needed
+
+	* twisted/test/test_internet.py: cleanup timers
+
+	* twisted/test/test_woven.py: expire sessions to clean up timers
+
+	* twisted/web/woven/guard.py: stop timer when session expires
+
+2003-1-9  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/web/google.py: Search google for best matches
+
+2003-01-09  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/protocols/http.py: start/stop log timer as needed
+
+2003-01-08  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/test/test_smtp.py: cleanup timers after test
+
+	* twisted/trial/unittest.py: keep errors that are logged and
+	submit them as test failures when tests are finished.
+
+	* twisted/python/log.py: if errors are being kept, don't print
+	them
+
+2003-1-8  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* doc/man/trial.1 twisted/scripts/trial.py: Add -l/--logfile argument
+	to allow giving a log file.
+
+	* twisted/trial/unittest.py: add SkipTest exception, which tests can
+	raise in their various test* method to skip a test which is not
+	excpected to pass.
+
+2003-01-08  Jonathan M. Lange  <jml at mumak.net>
+
+	* twisted/trial/*, bin/trial, twisted/scripts/trial.py,
+	doc/man/trial.1: Added 'trial', a new unit testing framework for
+	Twisted.
+
+	* twisted/test/test_*, admin/runtests: Moved existing tests over to
+	trial.
+
+2003-01-06  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted/python/microdom.py: Added beExtremelyLenient mode (for
+	parsing "tag soup").  While this isn't quite as lenient as Mozilla
+	or IE's code (it will, for example, translate
+	<div><i><b>foo</i>bar</b></div> to <div><i><b>foo</b></i>bar</div>
+	) I am still rather proud of the wide range of complete garbage
+	that it will mangle into at least reasonably similar XHTML-esque
+	documents.
+
+2003-01-05  Brian Warner  <warner at lothar.com>
+
+	* twisted/internet/cReactor/*, setup.py: Implement getDelayedCalls for
+	cReactor. Create cDelayedCall class, implement .cancel(), .reset(),
+	and .delay() for them.
+
+2003-01-03  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/python/components.py: Fix bug due to interaction between
+	Componentized subclasses and twisted.python.rebuild.rebuild()
+
+	* twisted/python/reflect.py: Removed backwards compatability hack
+	for deprecated name twisted.protocols.telnet.ShellFactory and empty
+	oldModules dictionary.
+
+2003-01-02  Brian Warner  <warner at lothar.com>
+
+	* twisted/test/test_internet.py (DelayedTestCase): add test
+	coverage for IReactorTime.getDelayedCalls
+
+2002-12-30  Brian Warner  <warner at lothar.com>
+
+	* pyunit/unittest.py (TestCase.__call__): clean the reactor between
+	tests: cancel any leftover reactor.callLater() timers. This helps
+	to keep deferred failures isolated to the test that caused them.
+
+2002-12-30  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* twisted/conch/*: added docstrings to most conch classes and functions
+
+2002-12-30  Brian Warner  <warner at lothar.com>
+
+	* twisted/spread/pb.py (Broker.connectionLost): clear localObjects
+	too, to break a circular reference involving AuthServs that could
+	keep the Broker (and any outstanding pb.Referenceables) alive
+	forever.
+
+2002-12-29  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/python/compat.py: Single module where all compatability
+	code for supporting old Python versions should be placed.
+
+2002-12-28  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted/web/woven/guard.py: Newer, better wrappers for
+	authentication and session management.  In particular a nice
+	feature of this new code is automatic negotiation with browsers on
+	whether cookies are enabled or not.
+
+2002-12-27  Paul Swartz <z3p at twistedmatrix.com>
+
+	* bin/tkconch: initial commit of tkconch, a SSH client using Tkinter
+	as a terminal emulator.  puts up a menu to configure when run without
+	arguments.
+
+	* twisted/conch/ui: moved ansi.py and tkvt100.py to t.c.ui so they are
+	away from the purely conch stuff.
+
+2002-12-25  Christmas Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing 1.0.2alpha1 - Merry Christmas!
+
+2002-12-25  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/protocols/dict.py: dict client protocol implementation
+	from Pavel "Pahan" Pergamenshchik (<pp64 at cornell.edu>)
+
+2002-12-23  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* doc/examples/testdns.py and doc/examples/dns-service.py added as
+	simple example of how to use new DNS client API.
+
+2002-12-23  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/web/xmlrpc.py:  added XML RPC client support
+
+2002-12-22  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* twisted/conch/ssh/keys.py, twisted/conch/ssh/asn1.py: support for
+	writing	public and private keys.
+
+	* bin/ckeygen: new script to create public/private key pairs
+
+2002-12-22  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/protocols/dns.py: Support for AFSDB, RP, and SRV RRs
+	added.
+
+2002-12-18  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted/persisted/dirdbm.py: copyTo and clear methods added
+	to DirDBM class
+
+2002-12-18  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* twisted/conch/ssh/connection.py, twisted/test/test_conch: fixes to
+	work on Python 2.1.
+
+	* twisted/internet/process.py: usePTY now can be an optional tuple of
+	(masterfd, slavefd, ttyname).
+
+2002-12-18  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/web/rewrite.py: it works now, even when used as a rootish
+	resource. Also, the request.path is massaged.
+
+2002-12-13  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/enterprise/util.py: support numeric type
+
+2002-12-13  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/web/client.py: add 301/302 support
+
+2002-12-13  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/test/test_ftp.py: give client time to start up (fixes
+	one test for gtk/gtk2 reactors)
+
+	* twisted/protocols/ftp.py: ftp client in passive mode should not
+	close data until both command and protocol are finished. (fixes
+	one test in gtk/gtk2 reactors)
+
+	* twisted/internet/gtkreactor.py: remove redundant code
+
+	* twisted/internet/gtk2reactor.py: remove redundant code
+
+	* twisted/internet/abstract.py: fix spelling in documentation
+
+2002-12-12  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/test/test_jelly.py: test class serialization
+
+	* twisted/spread/jelly.py: join module names with '.' in
+	_unjelly_class
+
+2002-12-12  Paul Swartz <z3p at twistedmatrix.com>
+
+	* twisted/conch/pamauth.py: added, gives support for authentication
+	using PAM.
+
+	* twisted/conch/*: support for the keyboard-interactive authentication
+	method which uses PAM.
+
+2002-12-12  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/python/log.py: add setStdout, set logfile to NullFile by
+	default.
+
+2002-12-11  Donovan Preston <dp at twistedmatrix.com>
+
+	* Added new woven example, Hello World.
+
+	* Updated woven howto to talk about Hello World. TODO: Finish refactoring
+	woven quotes example, then write more advanced woven howtos on writing
+	Widgets and InputHandlers.
+
+2002-12-11  Paul Swartz <z3p at twistedmatix.com>
+
+	* twisted/conch/*: enabled 'exec' on the server, disabled core dumps,
+	and some fixes
+
+2002-12-10  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* twisted/conch/*: many fixes to conch server, now works and can run
+	as root.
+
+	* twisted/conh/ssh/session.py: fix root exploit where a python shell was
+	left acessable to anyone.
+
+2002-12-10  Cory Dodt  <corydodt at yahoo.com>
+
+	* t/scripts/postinstall.py: new.  Create shortcut icons on win32.
+
+	* twisted-post-install.py: new.  Runs t/scripts/postinstall.py
+
+	* setup.py: copy twisted-post-install.py during install_scripts
+
+2002-12-09  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* twisted/internet/app.py: actually set the euid/egid if users ask
+
+2002-12-09  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/test/test_conch.py: wait for ssh process to finish
+
+	* twisted/scripts/postinstall.py: fix indentation
+
+	* twisted/conch/identity.py: fix indentation
+
+2002-12-09  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* twisted/conch/ssh/transport.py: don't accept host keys by default
+	because it's a huge security hole.
+
+2002-12-09  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/enterprise/util.py: handle None as null
+
+	* twisted/internet/interfaces.py: add missing 'self' argument
+
+2002-12-08  Dave Peticolas  <dave at krondo.com>
+
+	* pyunit/unittest.py: add missing 'self.' prefix to data member
+	reference
+
+	* twisted/enterprise/util.py: make sure quoted values are strings
+	(fixes bug storing boolean types)
+
+2002-12-06  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/test/test_internet.py: flush error to prevent failure
+	with non-destructive DeferredLists.
+
+	* twisted/test/test_ftp.py: flush FTPErrors to prevent failures
+	with non-destructive DeferredLists.
+
+	* twisted/test/test_defer.py: catch the errors to prevent failure
+	with non-destructive DeferredLists
+
+	* twisted/enterprise/util.py: add some postgres types. boolean
+	types need to be quoted. remove unused selectSQL variable.
+
+2002-12-05  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/enterprise/sqlreflector.py: fix some sql escaping
+	bugs. allow subclasses to override escaping semantics.
+
+	* twisted/enterprise/util.py: allow quote function's string escape
+	routine to be overridden with a keyword argument.
+
+2002-12-5  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/python/plugin.py: fixed a bug that got the wrong plugins.tml
+	if the package was installed in two different places
+
+	* twisted/inetd/*, twisted/runner/*: moved inetd to runner, to live in
+	harmony with procmon
+
+2002-12-04  Dave Peticolas  <dave at krondo.com>
+
+	* twisted/test/test_policies.py: Take the start time timestamp
+	immediately before creating the ThrottlingFactory, since the
+	factory starts timing when it is created.
+
+	* admin/runtests: Add a 'gtk2' test type to use the gtk2reactor
+	for the test suite.
+
+2002-12-2  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/web/client.py: web client
+
+2002-11-30  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* Summary of Conch changes:  An actual client (bin/conch) which is
+	mostly compatible with the OpenSSH client.  An optional C module to
+	speed up some of the math operations.  A bunch of other stuff has
+	changed too, but it's hard to summarize a month of work.
+
+2002-11-24  Donovan Preston  <dp at twistedmatrix.com>
+
+	* twisted/web/woven/*: Added the beginnings of a general framework for
+	asynchronously updating portions of woven pages that have already been
+	sent to the browser. Added controller.LiveController, page.LivePage,
+	and utils.ILivePage to contain code for dealing with keeping Views alive
+	for as long as the user is still looking at a page and has a live
+	Session object on the server; code for responding to model changed
+	notifications, rerendering Views that depend on those models that have
+	changed; code for sending these rerendered views as html fragments to
+	the browser; and javascript code to mutate the DOM of the live page
+	with the updated HTML.  Mozilla only for the moment; ie to come soon.
+
+	* twisted/web/woven/widgets.py: Added API for attaching Python functions
+	to widgets that fire when a given javascript event occurs in the
+	browser.
+	Widget.addEventHandler(self, eventName, handler, *args) and
+	Widget.onEvent(self, request, eventName, *args). The default onEvent
+	will dispatch to event handlers registered with addEventHandler.
+
+2002-11-24  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing 1.0.1.
+
+2002-11-23  Jp Calderone <exarkun at twistedmatrix.com>
+
+	* twisted/names/client.py, twisted/names/server.py: Client and
+	  server domain name APIs
+
+	* twisted/tap/dns.py: 'mktap dns'
+
+2002-11-23  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/scripts/twistd.py twisted/python/syslog.py: Add syslog support
+
+2002-11-23  Kevin Turner  <acapnotic at twistedmatrix.com>, Sam Jordan  <sam at twistedmatrix.com>
+
+	* twisted/protocols/irc.py (IRCClient.dccResume, dccAcceptResume):
+	Methods for mIRC-style resumed file transfers.
+	(IRCClient.dccDoSend, IRCClient.dccDoResume)
+	(IRCClient.dccDoAcceptResume, IRCClient.dccDoChat): These are for
+	clients to override to make DCC things happen.
+	(IRCClient.dcc_SEND, dcc_ACCEPT, dcc_RESUME, dcc_CHAT)
+	(IRCClient.ctcpQuery_DCC): Refactored to dispatch to dcc_* methods.
+	(DccFileReceiveBasic.__init__): takes a resumeOffset
+
+2002-11-20  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing 1.0.1rc1
+
+2002-11-16  Itamar Shtull-Trauring  <twisted at itamarst.org>
+
+	* Multicast UDP socket support in most reactors.
+
+2002-11-11  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* .: Releasing 1.0.1alpha4
+
+	* .: Releasing 1.0.1alpha3
+
+2002-11-10  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* .: Releasing 1.0.1alpha2
+
+	* twisted/web/static.py, twisted/tap/web.py: Changed 'mktap web'
+	to use --ignore-ext .ext so that you can assign order to the
+	extensions you want to ignore, and not accidentally catch bad
+	extensions.
+
+2002-11-04  Itamar Shtull-Trauring  <twisted at itamarst.org>
+
+	* twisted/internet/tksupport.py: new, better Tkinter integration.
+	Unlike before, run the reactor as usual, do *not* call Tkinter's
+	mainloop() yourself.
+
+2002-10-25  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/web/domhelpers.py twisted/python/domhelpers.py
+	twisted/lore/tree.py twisted/web/woven/widgets.py: Moved domhelpers
+	to twisted.web, and add to it all the generic dom-query functions
+	from twisted.lore.tree
+
+	* twisted/scripts/generatelore.py twisted/scripts/html2latex.py
+	bin/html2latex bin/generatelore twisted/lore/__init__.py
+	twisted/lore/latex.py twisted/lore/tree.py: Add the document generation
+	Twisted uses internally to the public interface.
+
+	* twisted/python/htmlizer.py: a Python->HTML colouriser
+
+2002-10-23  Itamar Shtull-Trauring  <twisted at itamarst.org>
+
+	* twisted/web/soap.py: experimental SOAP support, using SOAPpy.
+	See doc/examples/soap.py for sample usage.
+
+2002-10-22  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted/python/log.py: Two new features.
+	1) a stupid `debug' method that simply prefixes a message with "debug"
+	and indents it so it's easier to distinguish from normal messages.
+	This can eventually log to some magic "debug channel", once we have
+	that implemented.
+
+	2) implemented a custom warning handler; now warnings look sexy.
+	(the hackish overriding of warnings.showwarning is the recommended way
+	to do so, according to the library reference.)
+
+2002-10-22  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* setup.py: conditionalize cReactor on threads support too. This
+	is somewhat of a hack as it it done currently, but it's only necessary
+	on weird OSes like NetBSD. I assume any UNIX with thread support has
+	pthreads.
+
+	* twisted/internet/tksupport.py: tunable reactor iterate delay
+	parameter [by Jp Calderone]
+
+2002-10-17  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* bin/websetroot twisted/scripts/websetroot.py: Added a program to set
+	the root of a web server after the tap exists
+
+2002-10-14  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/web/vhost.py: add a virtual host monster to support twisted
+	sites behind a reverse proxy
+
+	* twisted/tap/web.py twisted/web/script.py
+	doc/man/mktap.1: adding an option to have a resource script as the root
+
+2002-10-13  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/internet/utils.py twisted/internet/process.py
+	twisted/internet/interfaces.py twisted/internet/default.py: Moved
+	utility functions into twisted.internet.utils
+
+2002-10-12  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/internet/process.py twisted/internet/interfaces.py
+	twisted/internet/default.py: Add utility method to get output of
+	programs.
+
+2002-10-11  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* twisted/internet/wxsupport.py: improved responsiveness of wxPython
+	GUI (50 FPS instead of 10 FPS).
+
+2002-10-08  Brian Warner  <warner at twistedmatrix.com>
+
+	* doc/howto: Added PB/cred and Application docs, updated Manhole
+	and Process docs. Moved Manhole from "Administrators" section to
+	"Developers" section.
+
+2002-10-10  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* .: Releasing 0.99.4
+
+2002-10-07  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* .: Release 0.99.4rc1
+
+	* twisted/protocols/http.py: backed out changes to HTTP that
+	broke 0.99.3 twisted.web.distrib.
+
+2002-10-7  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/web/script.py: Add ResourceTemplate which uses PTL for
+	creation of resources.
+
+2002-10-7  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/tap/web.py: It is now possibly to add processors via
+	the command line
+
+
+2002-10-04  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* twistd: when running in debug mode (-b), sending a SIGINT signal
+	to the process will drop into the debugger prompt.
+
+2002-10-5  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* .: Releasing 0.99.3
+
+2002-10-01  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* twisted/protocols/http.py: Fixed many bugs in protocol parsing,
+	found by new unit tests.
+
+2002-9-30  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/protocols/sux.py twisted/web/microdom.py: Made is possible
+	to sanely handle parse errors
+
+2002-09-26  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* twisted/internet/app.py (_AbstractServiceCollection.removeService):
+	(MultiService.removeService): inverse of addService
+	(ApplicationService.disownServiceParent): inverse of setServiceParent
+
+2002-9-27  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* .: Releasing 0.99.2
+
+2002-09-26  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted/web/microdom.py: Better string formatting of XML
+	elements is now available, to aid with debugging of web.woven
+	(among other applications).
+
+2002-09-25  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* twisted/tap/manhole.py: mktap manhole will now prompt for a
+	password or accept one from stdin if one is not provided on the
+	command line.
+
+2002-09-25  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* bin/tapconvert: made sure tapconvert program gets installed.
+
+2002-09-24  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* twisted/web/resource.py (Resource.wasModifiedSince): revoked,
+	not adding this after all.  Instead,
+
+	* twisted/protocols/http.py (Request.setLastModified)
+	(Request.setETag): these methods to set cache validation headers
+	for the request will return http.CACHED if the request is
+	conditional and this setting causes the condition to fail.
+
+2002-9-24  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* .: Releasing 0.99.2rc2
+
+2002-9-23  Donovan Preston <dp at twistedmatrix.com>
+
+	* Renaming domtemplate/domwidgets/dominput/wmvc to Woven
+	Woven - The Web Object Visualization Environment
+
+	* Created package twisted/web/woven
+
+	* Renamed domtemplate to template, domwidgets to widgets,
+	and dominput to input
+
+	* Refactored wmvc into three modules, model, view, and controller
+
+2002-9-23  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/spread/pb.py: add getObjectAtSSL, refactored into
+	getObjectRetreiver so more transports can be easily supported
+
+2002-09-21  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* twisted/protocols/http.py (Request.setLastModified): Use
+	setLastModified to set a timestamp on a http.Request object, and
+	it will add a Last-Modified header to the outgoing reply.
+
+	* twisted/web/resource.py (Resource.wasModifiedSince): companion
+	method, override this to get sensible handling of
+	If-Modified-Since conditional requests.
+
+2002-09-21  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted/web/static.py, twisted/web/script.py: Previously, it was
+	  not possible to use the same xmlmvc application (directory full
+	  of files and all) to interface to separate instances in the same
+	  server, without a considerable amount of hassle.  We have
+	  introduced a new "Registry" object which is passed to all .rpy
+	  and .epy scripts as "registry" in the namespace.  This is a
+	  componentized, so it can be used to associate different
+	  components for the same interface for different File instances
+	  which represent the same underlying directory.
+
+2002-09-20  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted/web/microdom.py: You can now specify tags that the
+	  parser will automatically close if they are not closed
+	  immediately.  This is to support output from HTML editors which
+	  will not output XML, but still have a predictable
+	  almost-but-not-quite XML structure.  Specifically it has been
+	  tested with Mozilla Composer.
+
+2002-9-20  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* Documenting for others
+
+	* setup.py: now setup.py can function as a module
+
+	* twisted/enterprise/xmlreflector.py: deprintified
+
+	* twisted/internet/abstract.py, twisted/internet/fdesc.py,
+	  twisted/internet/app.py, twisted/internet/gtkreactor.py,
+	  twisted/internet/main.py, twisted/internet/protocol.py,
+	  twisted/internet/ssl.py, twisted/internet/tksupport.py,
+	  twisted/internet/pollreactor.py, twisted/internet/defer.py:
+	  added and modified __all__
+
+	* twisted/internet/base.py: changed ReactorBase's __name__, added
+	__all__
+
+	* twisted/internet/default.py, twisted/internet/error.py,
+	  twisted/internet/process.py,
+	  twisted/internet/win32eventreactor.py: reaping all processes on
+	  SIGCHLD, changes in process's API
+
+	* twisted/python/components.py: added Adapter and setComponent
+
+	* twisted/python/log.py: logging several strings works
+
+	* twisted/python/reflect.py: fixed namedModule() to handle packages
+
+	* twisted/web/dom*.py: added submodels, moved to microdom, removed
+	unsafe code
+
+	* twisted/python/mvc.py: changed submodel support, added ListModel,
+	Wrapper
+
+	* twisted/web/microdom.py: minidom compat fixes
+
+2002-9-20  Jp Calderone <exarkun at twistedmatrix.com>
+
+	* twisted/internet/error.py twisted/internet/process.py:
+	ProcessEnded -> ProcessTerminated/ProcessDone. Now it is possible
+	to read off the error code.
+
+2002-9-19  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/scripts/twistd.py: Added ability to chroot. Moved directory
+	change to after loading of application.
+
+2002-9-19  Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted/*: changed print to log.msg
+
+	* bin/* twisted/scripts/*.py: move code from bin/ to modules
+
+	* twisted/inetd/*.py: inetd server in twisted
+
+	* twisted/protocols/sux.py twisted/web/microdom.py: XML parsing
+
+	* twisted/conch/*.py: better logging and protocol support
+
+	* twisted/cred/*.py: deprecation fixes
+
+	* twisted/internet/app.py: add encryption
+
+	* twisted/internet/base.py: fix deprecation, add DelayedCall,
+	move to connect*  from client*
+
+	* twisted/internet/error.py: errno mapping works on more platforms,
+	AlreadyCalled, AlreadyCancelled errors
+
+	* twisted/internet/gtkreactor.py: try requiring gtk1.2, timeout->idle
+
+	* twisted/internet/interfaces.py: added IDelayedCall IProcessTransports
+
+	* twisted/internet/javareactor.py: using failure, better dealing with
+	connection losing, new connect* API
+
+	* twisted/internet/process.py: dealing better with ending
+
+	* twisted/internet/protocol.py: factories have a "noisy" attribute,
+	added ReconnectingClientFactory BaseProtocol
+
+	* twisted/internet/ptypro.py: fixed traceback
+
+	* twisted/internet/reactor.py: better guessing of default
+
+	* twisted/internet/tcp.py: failure
+
+	* twisted/internet/win32eventreactor.py: update to new API, support GUI
+
+	* twisted/manhole/service.py: fix deprecation
+
+	* twisted/news/database.py: fix to be 2.1 compat., generating
+	message-id, bytes, lines, date headers, improved storage
+
+	* twisted/news/news.py: UsenetClientFactory, UsenetServerFactory
+
+	* twisted/persisted/marmalade.py: use twisted.web.microdom
+
+	* twisted/protocols/ftp.py: dito, data port uses new client API
+
+	* twisted/protocols/http.py: StringTransport instead of StringIO
+
+	* twisted/protocols/irc.py: stricter parsing, avoid flooding
+
+	* twisted/protocols/loopback.py: new reactor API, loopback over UNIX
+	sockets
+
+	* twisted/protocols/nntp.py: more lenient parsing, more protocol support
+
+	* twisted/protocols/oscar.py: new reactor API
+
+	* twisted/python/components.py: fix setAdapter add removeComponent
+
+	* twisted/python/failure.py: cleanFailure
+
+	* twisted/python/log.py: can now log multiple strings in one go
+
+	* twisted/python/logfile.py: fixed rotation
+
+	* twisted/python/rebuild.py: better 2.2 support
+
+	* twisted/python/util.py: getPassword
+
+	* twisted/scripts/mktap.py: better --help, --type, encryption
+
+	* twisted/spread/*.py: removed deprecation warnings
+
+	* twisted/spread/util.py: improved Pager
+
+	* twisted/tap/news.py: works saner now
+
+	* twisted/tap/ssh.py: can specify authorizer
+
+	* twisted/tap/words.py: can bind services to specific interfaces
+
+	* twisted/web/distrib.py: now works on java too
+
+	* twisted/web/domtemplate.py: improved cache
+
+	* twisted/web/error.py: ForbiddenResource
+
+	* twisted/web/html.py: lower-case tags
+
+	* twisted/web/server.py: use components
+
+	* twisted/web/static.py: added .flac, .ogg, properly 404/403,
+	lower-case tags
+
+	* twisted/web/twcgi.py: fixed for new process API
+
+	* twisted/web/widgets.py: lower-case tags
+
+	* twisted/web/xmlrpc.py: new abstraction for long running xml-rpc
+	commands, add __all__
+
+	* twisted/words/ircservice.py: new connectionLost API
+
+	* twisted/words/service.py: refactoring and error handling
+
+	* twisted/words/tendril.py: lots of fixes, it works now
+
+2002-09-17  Donovan Preston  <dp at twistedmatrix.com>
+
+	* Added better error reporting to WebMVC. To do this, I had to
+	remove the use of "class" and "id" attributes on nodes as
+	synonyms for "model", "view", and "controller". Overloading
+	these attributes for three purposes, not to mention their
+	usage by JavaScript and CSS, was just far too error-prone.
+
+2002-09-09  Andrew Bennetts  <spiv at twistedmatrix.com>
+
+	* twisted.inetd: An inetd(8) replacement.  TCP support should be
+	complete, but UDP and Sun-RPC support is still buggy.  This was
+	mainly written as a proof-of-concept for how to do a forking
+	super-server with Twisted, but is already usable.
+
+2002-08-30  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* Releasing Twisted 0.99.1rc4. There was a bug in the acquisition
+	code, as well as a typo in TwistedQuotes.
+
+2002-08-29  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* Releasing Twisted 0.99.1rc3. A bug in the release script
+	left .pyc files in the tarball.
+
+2002-08-29  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* Releasing Twisted 0.99.1rc2. There was a bug with circular
+	imports between modules in twisted.python.
+
+2002-08-28  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* Releasing Twisted 0.99.1rc1.
+
+2002-08-27  Donovan Preston  <dp at twistedmatrix.com>
+
+	* twisted.web.domtemplate: Look up templates in the directory of
+	the module containing the DOMTemplate doing the lookup before
+	going along with regular acquisition.
+
+2002-08-27  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted.*: Lots of minor fixes to make JavaReactor work again.
+
+2002-08-26  Andrew Bennetts  <andrew-twisted at puzzling.org>
+
+	* twisted.python.logfile: Added the ability to disable log
+	rotation if logRotation is None.
+
+2002-08-22  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted.news: Added a decent RDBM storage backend.
+
+2002-08-21  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* doc/howto/process.html: Process documentation, too!
+
+2002-08-20  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* doc/howto/clients.html: Client-writing documentation.
+
+2002-08-20  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted.protocols.nntp: More protocol implemented: SLAVE, XPATH,
+	XINDEX, XROVER, TAKETHIS, and CHECK.
+
+2002-08-19  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* bin, twisted.scripts.*: Migrated all bin/* scripts'
+	implementations to twisted/scripts. This means win32 users will
+	finally have access to all of the twisted scripts through .bat
+	files!
+
+2002-08-19  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted.news, twisted.protocols.nntp: Additional RFC977 support:
+	HELP and IHAVE implemented.
+
+2002-08-19  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* twisted.internet.{process,win32eventreactor,etc}: New and
+	hopefully final Process API, and improved Win32 GUI support.
+
+2002-08-18  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* Everything: Got rid of almost all usage of the `print' statement
+	as well as any usage of stdout. This will make it easier to
+	redirect boring log output and still write to stdout in your
+	scripts.
+
+2002-08-18  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* Releasing Twisted 0.99.0 final. No changes since rc9.
+
+2002-08-17  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* Releasing Twisted 0.99.0rc8, with a fix to tap2deb and
+	slightly updated options documentation.
+
+	* Releasing Twisted 0.99.0rc9 with fixes to release-twisted
+	and doc/howto/options.html.
+
+2002-08-16  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* Releasing Twisted 0.99.0rc6, with some fixes to setup.py
+	* Releasing Twisted 0.99.0rc7, __init__.py fixes.
+
+2002-08-15  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* Releasing Twisted 0.99.0rc5, with some one severe bug-fix and
+	a few smaller ones.
+
+2002-08-14  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* Releasing Twisted 0.99.0rc1! ON THE WAY TO 1.0, BABY!
+	* Releasing Twisted 0.99.0rc2! Sorry, typoed the version number in
+	copyright.py
+	* Releasing Twisted 0.99.0rc3! I HATE TAGGING!
+	* Releasing Twisted 0.99.0rc4, some very minor errors fixed.
+
+2002-08-14  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted.internet, twisted.cred: Applications and Authorizers are
+	now completely decoupled, save for a tiny backwards-compatibility.
+
+2002-08-10  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted.internet.defer, twisted.python.failure: Changes to
+	Deferred and Failure to make errbacks more consistent. error
+	callbacks are now *guaranteed* to be passed a Failure instance,
+	no matter what was passed to Deferred.errback().
+
+2002-08-07  Jp Calderone  <exarkun at twistedmatrix.com>
+
+	* twisted.python.usage: New "subcommands" feature for
+	usage.Options: Now, you can have nested commands
+	(`cvs commit'-style) for your usage.Options programs.
+
+2002-08-04  Bruce Mitchener  <bruce at twistedmatrix.com>
+
+	* twisted.internet: New `writeSequence' method on transport
+	objects: This can increase efficiency as compared to `write`ing
+	concatenated strings, by copying less data in memory.
+
+2002-08-02  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted.cred.service, twisted.internet.app: Application/Service
+	refactor: These two things should be less dependant on each other,
+	now.
+
+2002-07-31  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted.issues: After weeks of hacking in the secret (Austin,
+	TX) hideout with Allen Short, twisted.issues, the successor to
+	Twisted Bugs, is born. Featuring a paranoia-inducing chat-bot
+	interface!
+
+2002-07-30  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* twisted.internet.kqueue: Thanks to Matt Campbell, we now have a
+	new FreeBSD KQueue Reactor.
+
+2002-07-27  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* doc/fun/Twisted.Quotes: Added our seekrut Twisted.Quotes file to
+	Twisted proper.
+
+2002-07-26  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted.spread: "Paging" for PB: this is an abstraction for
+	sending big streams of data across a PB connection.
+
+
+2002-07-23  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* twisted.internet: Rewrite of client APIs. `reactor.clientXXX'
+	methods are now deprecated. See new reactor.connect*
+	documentation. Also Application-level client methods have been
+	reworked, see the Application documentation.
+
+2002-07-23  Bryce Wilcox-O'Hearn  <zooko at twistedmatrix.com>
+
+	* twisted.zoot: Application-level implementation of Gnutella.
+
+2002-07-21  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted.im, bin/im: GUI improvements to t-im, and renamed
+	bin/t-im to bin/im (and get rid of old twisted.words client).
+
+2002-07-15  Bryce Wilcox-O'Hearn  <zooko at twistedmatrix.com>
+
+	* twisted.protocols.gnutella: Twisted now has an implementation of
+	the Gnutella protocol.
+
+2002-07-15  Sean Riley  <sean at twistedmatrix.com>
+
+	* twisted.sister: Now featuring distributed login.
+
+2002-07-15  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* twisted.conch: A new implementation of ssh2, bringing Twisted
+	one step closer to being a complete replacement of all unix
+	services ;-)
+
+2002-07-14  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* Releasing Twisted 0.19.0! It's exactly the same as rc4.
+
+2002-07-13  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* Releasing Twisted 0.19.0rc4. All Known Issues in the README have
+	been fixed. This will hopefully be the last release candidate for
+	0.19.0.
+
+2002-07-07  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* Releasing Twisted 0.19.0rc3.
+
+2002-07-07  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* Releasing Twisted 0.19.0rc2.
+
+2002-07-07  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* Releasing Twisted 0.19.0rc1.
+
+2002-07-07  Keith Zaback  <krz at twistedmatrix.com>
+
+	* twisted.internet.cReactor: A new poll-based reactor written in
+	C. This is still very experimental and incomplete.
+
+2002-07-07  Donovan Preston  <dp at twistedmatrix.com>
+
+	* twisted.web.dom*: Better support in domtemplate/domwidgets etc
+	for Deferreds and Widgets. Also deprecated getTemplateMethods
+	method in favor of automatically looking up methods on the class
+	based on the attributes found in the template. There are some
+	minimal docs already, and better ones coming soon.
+
+2002-06-26  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted.internet.process,interfaces,default: Process now
+	supports SetUID: there are new UID/GID arguments to the process
+	spawning methods/constructors.
+
+2002-06-22  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* twisted.protocols.oscar: totally rewrote OSCAR protocol
+	implementation.
+
+2002-06-18  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted.internet.defer: Deprecated the arm method of Deferred
+	objects: the replacement is a pair of methods, pause and
+	unpause. After the pause method is called, it is guaranteed that
+	no call/errbacks will be called (at least) until unpause is
+	called.
+
+2002-06-10  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted/persisted/aot.py, bin/twistd,mktap, twisted/internet/app.py:
+
+	AOT (Abstract Object Tree) experimental source-persistence
+	mechanism. This is a more-concise, easier-to-edit alternative to
+	Twisted's XML persistence, for people who know how to edit Python
+	code. Also added appropriate options to mktap and twistd to
+	load/save .tas (Twisted Application Source) files.
+
+	I will be working on making the formatting better, soon, but it's
+	workable for now.
+
+2002-06-08  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted.internet, twisted.tap.web: Add a --https and related
+	options to 'mktap web'; web is now much more SSL-friendly.
+
+
+2002-06-02  Itamar Shtull-Trauring  <twisted at itamarst.org>
+
+	* twisted.internet: changed protocol factory interface - it now has
+	doStop and doStart which are called in reactors, not app.Application.
+	This turns start/stopFactory into an implementation-specific feature,
+	and also ensures they are only called once.
+
+2002-06-01  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 0.18.0
+
+2002-05-31  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted/coil/plugins/portforward.py, twisted/tap/portforward.py:
+	  Forgot to add these before rc1 :-) You can use the portforwarder
+	  with Coil and mktap again (previously "stupidproxy")
+
+	* twisted/web/static.py: Fixed a bunch of bugs related to redirection
+	for directories.
+
+	* .: Releasing Twisted 0.18.0rc2
+
+2002-05-30  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* Twisted no longer barfs when the Python XML packages aren't available.
+
+2002-05-29  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* .: Releasing Twisted 0.18.0rc1
+
+2002-05-25  Christopher Armstrong  <radix at twistedmatrix.com>
+
+	* twisted/spread/pb.py, twisted/internet/defer.py,
+	  twisted/python/failure.py, etc:
+
+	  Perspective broker now supports Failures! This should make writing
+	  robust PB clients *much* easier. What this means is that errbacks will
+	  recieve instances of t.python.failure.Failure instead of just strings
+	  containing the traceback -- so you can easily .trap() particular
+	  errors and handle them appropriately.
+
+2002-05-24  Itamar Shtull-Trauring, Moshe Zadka <moshez at twistedmatrix.com>
+
+	* twisted.mail cleanups:
+
+		* basic bounce support.
+
+		* removed telnet from mail tap
+
+		* mail domains now receive service in __init__
+
+		* split file system stuff into Queue (renamed from
+	          MessageCollection)
+
+		* Put a Queue in service
+
+	* twisted/protocol/smtp.py: changed SMTPClient API so that it returns
+	a file for the message content, instead of a string.
+
+2002-05-23  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* Twisted applications can now be persisted to XML files (.tax) with
+	the --xml option -- this is pretty verbose and needs some optimizations.
+
+2002-05-22  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted/persisted/marmalade.py: Marmalade: Jelly, with just a hint
+	of bitterness. An XML object serialization module designed so
+	people can hand-edit persisted objects (like Twisted Applications).
+
+2002-05-21  Itamar Shtull-Trauring  <twisted at itamarst.org>
+
+	* twisted/internet/gtkreactor.py: GTK+ support for win32; input_add
+	is not supported in win32 and had to be worked around.
+
+2002-05-20  Itamar Shtull-Trauring  <twisted at itamarst.org>
+
+	* twisted/pythor/defer.py, twisted/protocols/protocol.py,
+	  twisted/internet/defer.py, twisted/internet/protocol.py:
+
+	    Moved defer and protocol to twisted.internet to straighten
+	    out dependancies.
+
+2002-05-18  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted/metrics, twisted/forum: Metrics and Forum are no longer
+	a part of Twisted proper; They are now in different CVS modules, and
+	will be released separately.
+
+2002-05-15  Andrew Bennetts <andrew-twisted at puzzling.org>
+
+	* twisted/protocols/ftp.py: Small fixes to FTPClient that have
+	changed the interface slightly -- return values from callbacks
+	are now consistent for active and passive FTP.  Have a look at
+	doc/examples/ftpclient.py for details.
+
+2002-05-12  Itamar Shtull-Trauring  <twisted at itamarst.org>
+
+	* doc/specifications/banana.html: Documentation of the Banana protocol.
+
+2002-05-06  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted/im/gtkchat.py: Some more UI improvements to InstanceMessenger:
+	Nicks are now colorful (each nick is hashed to get a color) and
+	messages now have timestamps.
+
+2002-05-04  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* Reactor Refactor! Pretty much all of the twisted.internet.* classes
+	are being depracated in favor of a single, central class called the
+	"reactor". Interfaces are defined in twisted.internet.interfaces.
+	For a much more descriptive comment about this change, see
+	http://twistedmatrix.com/pipermail/twisted-commits/2002-May/002104.html.
+
+2002-05-04  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted/spread/pb.py: There is now some resource limiting in PB.
+	Clients can now have the number of references to an object limited.
+
+2002-04-29  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted/im/*: Refactored Twisted InstanceMessenger to seperate GUI
+	and logic. Also improved the UI a bit.
+
+2002-04-28  Itamar Shtull-Trauring  <twisted at itamarst.org>
+
+	* twisted/protocols/http.py: log hits using extended log format
+	and make web taps logfile configurable.
+
+2002-04-26  Itamar Shtull-Trauring  <twisted at itamarst.org>
+
+	* twisted/lumberjack/logfile.py: reversed order of rotated
+	logs - higer numbers are now older.
+
+2002-04-24  Itamar Shtull-Trauring  <twisted at itamarst.org>
+
+	* doc/examples/ircLogBot.py: We now have a sample IRC bot that logs
+	all messages to a file.
+
+2002-04-24  Itamar Shtull-Trauring  <twisted at itamarst.org>
+
+	* twisted/python/components.py: Twisted's interfaces are now
+	more like Zope's - __implements__ is an Interface subclass
+	or a tuple (or tuple of tuples). Additonally, an instance can
+	implement an interface even if its class doesn't have an
+	__implements__.
+
+2002-04-22  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted/python/usage.py: Minor niceties for usage.Options:
+	You can now look up the options of an Options object with
+	optObj['optName'], and you if you define opt_* methods with
+	underscores in them, using dashes on the command line will work.
+
+2002-04-21  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* twisted/scripts/mktap.py: No more --manhole* options, use
+	'--append=my.tap manhole' now.
+
+2002-04-20  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* .: Releasing Twisted 0.17.4.
+
+	* twisted/internet/tcp.py: Make unix domain sockets *really*
+	world-accessible, rather than just accessible by "other".
+
+2002-04-19  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* twisted/web/{server,twcgi}.py: Fixed POST bug in distributed
+	web servers.
+
+2002-04-19  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* .: Releasing Twisted 0.17.3.
+
+2002-04-19  Glyph Lefkowitz  <carmstro at twistedmatrix.com>
+
+	* twisted/web/distrib.py: Fix a bug where static.File transfers
+	over a distributed-web connection would not finish up properly.
+
+2002-04-18  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* .: Releasing Twisted 0.17.2.
+
+2002-04-18  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* twisted/news: A news server and NNTP protocol support courtesy of
+	exarkun. Another step towards Twisted implementations of EVERYTHING
+	IN THE WORLD!
+
+2002-04-17  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted/spread/pb.py: Errors during jelly serialization used to
+	just blow up; now they more properly return a Deferred Failure. This
+	will make hangs in PB apps (most notably distributed web) less common.
+
+2002-04-17  Donovan Preston <dp at twistedmatrix.com>
+
+	* Major changes to the capabilities of the static web server, in an
+	attempt to be able to use Twisted instead of Zope at work; my plan is to
+	capture many of the conveniences of Zope without the implicitness and
+	complexity that comes with working around implicit behavior when it fails.
+
+	1) .trp and .rpy support in the static web server:
+	Very simple handlers to allow you to easily add Resource objects
+		dynamically to a running server, by merely changing files on the
+		filesystem.
+	An .rpy file will be executed, and if a "resource" variable exists upon the
+		execution's completion, it will be returned.
+	A .trp file (twisted resource pickle) will be unpickled and returned. An
+		object unpickled from a .trp should either implement IResource itself,
+		or have a registered adapter in twisted.python.components.
+
+	2) Acquisition:
+	As resources are being looked up by repeated calls to getChild, this
+		change creates instances of
+		twisted.spread.refpath.PathReferenceAcquisitionContext and puts
+		them in the request as "request.pathRef"
+	Any method that has an instance of the request can then climb up
+		the parent tree using "request.pathRef['parentRef']['parentRef']
+	PathReferenceAcquisitionContext instances can be dereferenced to the
+		actual object using getObject
+	Convenience method: "locate"   returns a PathReference to first place
+		in the parent heirarchy a name is seen
+	Convenience method: "acquire"  somewhat like Zope acquisition;
+		mostly untested, may need fixes
+
+	3) DOM-based templating system:
+	A new templating system that allows python scripts to use the DOM
+		to manipulate the HTML node tree. Loosely based on Enhydra.
+	Subclasses of twisted.web.domtemplate.DOMTemplate can override
+		the templateFile attribute and the getTemplateMethods method;
+		ultimately, while templateFile is being parsed, the methods
+		specified will be called with instances of xml.dom.mindom.Node
+		as the first parameter, allowing the python code to manipulate
+		(see twisted.web.blog for an example)
+
+2002-04-17  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* twisted/web/static.py, twisted/tap/web.py: Added a new feature
+	that allows requests for /foo to return /foo.extension, which is
+	disabled by default. If you want a --static webserver that
+	uses this feature, use 'mktap web --static <dir> --allow_ignore_ext'.
+
+	* twisted/tap/web.py: Also switched --static to --path; it doesn't
+	make sense to call something that automatically executes cgis, epys,
+	rpys, php, etc., "static". :-)
+
+2002-04-14  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* HTTP 1.1 now supports persistent and pipelined connections.
+
+	User-visible API changes:
+	- Request.content is now a file-like object, instead of a string.
+	- Functions that incorrectly used Request.received instead of
+	  Request.getAllHeaders() will break.
+	- sendHeader, finishHeaders, sendStatus are all hidden now.
+
+2002-04-12  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* twisted/coil/plugins/tendril.py (TendrilConfigurator): New coil
+	configurator for words.tendril.
+
+2002-04-10  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* .: Releasing Twisted 0.17.0
+
+2002-04-10  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* twisted/bugs: Gone. Separate plugin package.
+	* twisted/eco: Gone. The king is dead. Long live the king!
+	(eco is no longer going to be developed, Pyrex has obviated it.)
+
+2002-04-10  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* twisted/protocols/irc.py: Some fix-ups to IRCClient and
+	DccFileReceive, from Joe Jordan (psy).
+
+2002-04-10  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* twisted/reality: Gone. This is now in a completely separate plugin
+	package.
+
+2002-04-09  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* win32 process support seems to *finally* be working correctly. Many
+	thanks to Drew Whitehouse for help with testing and debugging.
+
+2002-04-08  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* coil refactored yet again, this time to use components and adapters.
+	The design is now much cleaner.
+
+2002-04-08  Glyph Lefkowitz  <glyph at zelda.twistedmatrix.com>
+
+	* twisted/spread/jelly.py: Refactored jelly to provide (a) more
+	sane, language-portable API for efficient extensibility and (b)
+	final version of "wire" protocol.  This should be very close to
+	the last wire-protocol-breaking change to PB before
+	standardization happens.
+
+2002-04-04  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* Removed __getattr__ backwards compatibility in PB
+
+2002-04-03  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* twisted/python/usage.py, twisted/test/test_usage.py, bin/mktap, twisted/tap/*.py:
+	Made the usage.Options interface better -- options are now stored in the
+	'opts' dict. This is backwards compatible, and I added a deprecation warning.
+
+2002-04-01  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* .: Releasing Twisted 0.16.0.
+
+2002-03-29  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* Added Qt event loop support, written by Sirtaj Singh Kang and
+	Aleksandar Erkalovic.
+
+2002-03-29  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* Added a 'coil' command for configuring TAP files
+
+2002-03-15  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* XML-RPC published methods can now return Deferreds, and Twisted
+	will Do The Right Thing.
+
+2002-03-13  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* Refactored coil, the configuration mechanism for Twisted.
+	See twisted.coil and twisted.coil.plugins for examples of how
+	to use the new interface. Chris Armstrong did some UI improvements
+	for coil as well.
+
+	* Checked in win32 Process support, and fixed win32 event loop.
+
+2002-03-11  Glyph Lefkowitz  <glyph at janus.twistedmatrix.com>
+
+	* More robust shutdown sequence for default mainloop (other
+	mainloops should follow suit, but they didn't implement shutdown
+	callbacks properly before anyway...).  This allows for shutdown
+	callbacks to continue using the main loop.
+
+2002-03-09  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* Automatic log rotation for twistd. In addition, sending SIGUSR1
+	to twistd will rotate the log.
+
+2002-03-07  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* .: Releasing Twisted 0.15.5.
+
+2002-03-06  Glyph Lefkowitz  <glyph at zelda.twistedmatrix.com>
+
+	* twisted/web/html.py: Got rid of html.Interface. This was a really
+	old, really deprecated API.
+
+2002-03-06  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* twisted/web/widgets.py: Deprecated usage of Gadget.addFile(path)
+	and replaced it with Gadget.putPath(path, pathname). This is
+	a lot more flexible.
+
+2002-03-05  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* twisted/internet/win32.py: New win32 event loop, written by
+	Andrew Bennetts.
+
+	* twisted/tap/*: Changed the interface for creating tap modules - use
+	a method called updateApplication instead of getPorts. this
+	is a much more generic and useful mechanism.
+
+	* twisted/internet/task.py: Fixed a bug where the schedular wasn't
+	installed in some cases.
+
+2002-03-04  Moshe Zadka  <moshez at twistedmatrix.com>
+
+	* twisted/web/server.py: authorizer.Unauthorized->util.Unauthorized
+	(leftovers from removing .passport references.)
+
+	* twisted/names/dns.py: Added support for TTL.
+
+2002-03-02  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* .: Releasing Twisted 0.15.4.
+
+2002-03-02  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* twisted/words/ircservice.py: Send End-Of-MOTD message --
+	some clients rely on this for automatic joining of channels
+	and whatnot.
+
+2002-03-02  Moshe Zadka  <moshez at twistedmatrix.com>
+
+	* twisted/names/dns.py: Fixed bugs in DNS client
+
+2002-03-01  Moshe Zadka  <moshez at twistedmatrix.com>
+
+	* twisted/protocols/dns.py: Can now correctly serialize answers
+
+	* twisted/names/dns.py: Can now do simple serving of domains
+
+	* twisted/internet/stupid.py: Removed spurious debugging print
+
+2002-02-28  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* .: Releasing 0.15.3.
+
+2002-02-27  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* twisted/mail/*, twisted/plugins.tml: The Mail server is now
+	COILable.
+
+	* bin/twistd: security fix: use a secure umask (077, rather than 0)
+	for twistd.pid.
+
+2002-02-26  Allen Short  <washort at twistedmatrix.com>
+
+	* twisted/eco/eco.py, twisted/eco/sexpy.py: ECO now supports
+	backquoting and macros.
+
+2002-02-26  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* twisted/protocols/ftp.py, twisted/plugins.tml: Made the FTP
+	server COILable!
+
+2002-02-26  Benjamin Bruheim  <phed at twistedmatrix.com>
+
+	* twisted/web/distrib.py: Fixed a win32-compatibility bug.
+
+2002-02-24  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* twisted/protocols/socks.py: Made SOCKSv4 coilable, and fixed a
+	bug so it'd work with Mozilla.
+
+2002-02-24  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* .: Releasing Twisted 0.15.2.
+
+2002-02-24  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* setup.py: Added plugins.tml and instancemessenger.glade installs
+	so mktap and t-im work in a 'setup.py install' install.
+
+	* debian/rules: Install plugins.tml so mktap works in debian installs.
+
+	* doc/man/mktap.1, twistd.1: Updated the man pages to be more accurate.
+
+2002-02-24  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* bin/mktap: Better error reporting when we don't find
+	the plugins files.
+
+	* bin/twistd: Print out the *real* usage description rather than
+	barfing when we get bad command line arguments.
+
+2002-02-24  Moshe Zadka  <moshez at twistedmatrix.com>
+
+	* debian/rules: Install the instancemessenger.glade file, so IM
+	will work in debian installs.
+
+2002-02-24  Paul Swartz  <z3p at twistedmatrix.com>
+
+	* twisted/protocols/oscar.py, socks.py, toc.py: Fixed a security
+	hole in TOC where clients could call any method on the server.
+
+2002-02-23  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* twisted/tap/coil.py: There is now a tap-creator for COIL.
+
+	* twisted/internet/stupidproxy.py: Now with COILability!
+
+2002-02-23  Glyph Lefkowitz  <glyph at zelda.twistedmatrix.com>
+
+	* bin/mktap: mktap now uses Plugins instead of searching through
+	twisted.tap. Yay for unified configuration systems!
+
+
+2002-02-22  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* twisted/im, twisted/words: t-im can now do topic setting (words
+	only), fixed the Group Metadata-setting interface in the service.
+
+2002-02-22  Glyph Lefkowitz  <glyph at zelda.twistedmatrix.com>
+
+	* twisted/manhole: COIL can now load Plugins.
+
+2002-02-21  Glyph Lefkowitz  <glyph at zelda.twistedmatrix.com>
+
+	* twisted.spread.pb: Changed remote method invocations to be
+	called through .callRemote rather than implicitly by getattr, and
+	added LocalAsRemote utility class for emulating remote behavior.
+
+2002-02-21  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* twisted.protocols.ftp: Fixed a lot of serious bugs.
+
+2002-02-20  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* twisted.protocols.telnet: the python shell now supports
+	multi-line commands and can be configured using coil.
+
+2002-02-13  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* twisted.lumberjack: a log rotation and viewing service.
+	Currently only log rotation is supported.
+
+2002-02-12  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* twisted/words/ircservice.py (IRCChatter.irc_AWAY): Fix bug
+	where you can never come back from being away (at least using
+	epic4).  Closes: #%d
+
+2002-02-11  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* twisted/web/widgets.py: Changed Gadget.page to Gadget.pageFactory
+	for clarity (this is backwards-compatible).
+
+2002-02-10  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* twisted/spread/jelly.py:
+	* twisted/spread/banana.py:
+	* twisted/spread/pb.py: fixed bugs found by pychecker, got rid
+	of __ping__ method support, and added 'local_' methods to
+	RemoteReference
+
+	* twisted/persisted/styles.py: pychecker bug fixes
+
+2002-02-09  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* bin/eco: Created a command-line interpreter for ECO.
+
+	* doc/man/eco.1: man page for bin/eco
+
+2002-02-09  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* twisted/eco/eco.py: Reverted evaluator state back to functional-ness
+	:) And added functions (anonymous and global), and broke various
+	interfaces
+
+2002-02-09  Allen Short  <washort at twistedmatrix.com>
+
+	* twisted/eco/eco.py: Refactored evaluator into a class, improved
+	python-function argument signatures, and added and/or/not functions.
+
+2002-02-08  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* twisted/words/service.py, ircservice.py: Fixed annoying PING
+	bug, and added /topic support.
+
+2002-02-08  Glyph Lefkowitz  <glyph at twistedmatrix.com>
+
+	* twisted/eco: Initial prototype of ECO, the Elegant C Overlay
+	macro engine.
+
+2002-02-02  Paul Swartz <z3p at twistedmatrix.com>
+
+	* twisted/im/ircsupport.py:  Added support for the IRC protocol
+	to IM.
+
+2002-02-02  Chris Armstrong <carmstro at twistedmatrix.com>
+
+	* twisted/python/deferred.py: added Deferred.addErrback, so now
+	it's easy to attach errbacks to deferreds when you don't care
+	about plain results.
+
+	* twisted/im/chat.py, twisted/im/pbsupport.py: added support for
+	displaying topics.
+
+2002-02-02  Paul Swartz <z3p at twistedmatrix.com>
+
+	* SOCKSv4 support: there is now a protocols.socks, which contains
+	support for SOCKSv4, a TCP proxying protocol.  mktap also has
+	support for the new protocol.
+
+2002-02-02  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* twisted/words/ircservice.py (IRCChatter.receiveDirectMessage),
+	(IRCChatter.receiveGroupMessage),
+	(IRCChatter.irc_PRIVMSG): Added CTCP ACTION <-> emote translation
+
+2002-02-01  Paul Swartz <z3p at twistedmatrix.com>
+
+	* twisted/im/tocsupport.py: Added support for most of the TOC
+	protocol to IM.
+
+
+2002-02-01  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* twisted/im/*.py: added metadata/emote support to IM. "/me foo"
+	now triggers a backwards-compatible emote.
+
+
+2002-01-30  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* twisted/internet/tcp.py: Fixed the bug where startFactory() would
+	get called twice.
+
+2002-01-30  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* twisted/im: a new client for twisted.words (and eventually
+	much more) based on GTK+ and Glade. This is mainly glyph's
+	code, but I organized it for him to check in.
+
+	* twisted/words/service.py: metadata support for words messages
+	(only {'style': 'emote'} is standardized as of yet)
+
+2002-01-29  Itamar Shtull-Trauring <itamarst at twistedmatrix.com>
+
+	* Added hook to tcp.Port and ssl.Port for limiting acceptable
+	  connections - approveConnection(socket, addr).
+
+2002-01-27  Chris Armstrong  <carmstro at twistedmatrix.com>
+
+	* twisted/words/ircservice.py: You can now change the topic
+	of a channel with '/msg channelName topic <topic>' - note that
+	'channelName' does *not* include the '#'.
+
+2002-01-23  Glyph Lefkowitz  <glyph at zelda.twistedmatrix.com>
+
+	* Incompatible change to PB: all remote methods now return
+	Deferreds.  This doesn't break code in as many places as possible,
+	but the connection methods now work differently and have different
+	signatures.
+
+	* Incompatible change to Banana: Banana now really supports floats
+	and long integers.  This involved removing some nasty hackery that
+	was previously part of the protocol spec, so you'll need to
+	upgrade.
+
+	* Added a feature to Jelly: Jelly now supports unicode strings.
+
+	* Improved Twisted.Forums considerably: still needs work, but it's
+	growing into an example of what you can do with a Twisted.Web
+	application.
+
+	* Added Twisted.Web.Webpassport -- generic mechanism for web-based
+	login to arbitrary services.  This in conjunction with some code
+	in Forum that uses it.
+
+	* Incompatible change in Enterprise: all query methods now return
+	Deferreds, as well as take arguments in an order which makes it
+	possible to pass arbitrary argument lists for using the database's
+	formatting characters rather than python's.
+
+2002-01-15  Glyph Lefkowitz  <glyph at zelda.twistedmatrix.com>
+
+	* twisted/internet/passport.py: (and friends) Retrieval of
+	perspectives is now asynchronous, hooray (this took way too long)!
+	Perspectives may now be stored in external data sources.  Lurching
+	slowly towards a stable API for the Passport system, along with
+	Sean's recent commits of tools to manipulate it.
+
+2002-01-14  Kevin Turner  <acapnotic at twistedmatrix.com>
+
+	* twisted/python/explorer.py: reimplementated.  So it's better.
+	And yes, I broke the API.
+
+	* twisted/manhole/ui/spelunk_gnome.py: Less duplication of visages,
+	and they're draggable now too.
+
+2002-01-13  Itamar Shtull-Trauring  <itamarst at twistedmatrix.com>
+
+	* Changed twisted.enterprise.adabi so operations can accept lists
+	  of arguments. This allows us to use the database adaptor's native
+	  SQL quoting ability instead of either doing it ourselves, or the
+	  *current* way twisted does it (not doing it at all, AFAICT!).
+
+	  cursor.execute("INSERT INTO foo VALUES (%s, %d), "it's magic", 12)
+
+	  Problem is that different adaptors may have different codes for
+	  quoting.
+
+	* First go at database for twisted.bugs. I hate RDBMS. I hate web.
+
+--- 0.13.0 Release ---
+
+# Local Variables:
+# add-log-time-format: add-log-iso8601-time-string
+# End:
diff --git a/ThirdParty/Twisted/twisted/topfiles/NEWS b/ThirdParty/Twisted/twisted/topfiles/NEWS
new file mode 100644
index 0000000..0326237
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/topfiles/NEWS
@@ -0,0 +1,1912 @@
+Ticket numbers in this file can be looked up by visiting
+http://twistedmatrix.com/trac/ticket/<number>
+
+Twisted Core 12.3.0 (2012-12-20)
+================================
+
+Features
+--------
+ - The new -j flag to trial provides a trial runner supporting
+   multiple worker processes on the local machine, for parallel
+   testing. (#1784)
+ - twisted.internet.task.react, a new function, provides a simple API
+   for running the reactor until a single asynchronous function
+   completes. (#3270)
+ - twisted.protocols.ftp.FTP now handles FEAT and OPTS commands.
+   (#4515)
+ - trial now supports specifying a debugger other than pdb with the
+   --debugger command line flag. (#5794)
+ - twisted.python.util.runWithWarningsSuppressed has been added; it
+   runs a function with specified warning filters. (#5950)
+ - trial's skipping feature is now implemented in a way compatible with the
+   standard library unittest's runner. (#6006)
+ - The setup3.py script is now provided to provisionally support
+   building and installing an experimental, incomplete version of
+   Twisted in a Python 3 environment. (#6040)
+ - twisted.python.util.FancyStrMixin now supports arbitrary callables
+   to format attribute values. (#6063)
+ - Several new methods of twisted.trial.unittest.SynchronousTestCase
+   - `successResultOf`, `failureResultOf`, and `assertNoResult` -
+   have been added to make testing `Deferred`-using code easier.
+   (#6105)
+
+Bugfixes
+--------
+ - twisted.protocols.basic.LineReceiver now does not hit the maximum
+   stack recursion depth when the line and data mode is switched many
+   times. (#3050)
+ - twisted.protocols.ftp.FTPFileListProtocol fixed to support files
+   with space characters in their name. (#4986)
+ - gireactor and gtk3reactor no longer prevent gi.pygtkcompat from
+   working, and likewise can load if gi.pygtkcompat has previously
+   been enabled. (#5676)
+ - gtk2reactor now works again on FreeBSD, and perhaps other platforms
+   that were broken by gi interactions. (#5737)
+ - gireactor now works with certain older versions of gi that are
+   missing the threads_init() function. (#5790)
+ - Fixed a bug where twisted.python.sendmsg would sometimes fail with
+   obscure errors including "Message too long" or "Invalid argument"
+   on some 64-bit platforms. (#5867)
+ - twisted.internet.endpoints.TCP6ClientEndpoint now provides
+   twisted.internet.interfaces.IStreamClientEndpoint (#5876)
+ - twisted.internet.endpoints.AdoptedStreamServerEndpoint now provides
+   twisted.internet.interfaces.IStreamServerEndpoint. (#5878)
+ - Spawning subprocesses with PTYs now works on OS X 10.8. (#5880)
+ - twisted.internet.test.test_sigchld no longer incorrectly fails when
+   run after certain other tests. (#6161)
+ - twisted.internet.test.test_gireactor no longer fails when using
+   pygobject 3.4 and gtk 3.6 when X11 is unavailable. (#6170)
+ - twisted/python/sendmsg.c no longer fails to build on OpenBSD.
+   (#5907)
+
+Improved Documentation
+----------------------
+ - The endpoint howto now lists TCP IPv6 server endpoint in the list
+   of endpoints included with Twisted. (#5741)
+
+Deprecations and Removals
+-------------------------
+ - The minimum required version of zope.interface is now 3.6.0.
+   (#5683)
+ - twisted.internet.interfaces.IReactorArbitrary and
+   twisted.application.internet.GenericServer and GenericClient,
+   deprecated since Twisted 10.1, have been removed. (#5943)
+ - twisted.internet.interfaces.IFinishableConsumer, deprecated since
+   Twisted 11.1, has been removed. (#5944)
+ - twisted.python.failure has removed all support for string
+   exceptions. (#5948)
+ - assertTrue, assertEqual, and the other free-functions in
+   twisted.trial.unittest for writing assertions, deprecated since
+   prior to Twisted 2.3, have been removed. (#5963)
+ - Ports, connectors, wakers and other reactor-related types no longer
+   log a nice warning when they are erroneously pickled. Pickling of
+   such objects continues to be unsupported. (#5979)
+ - twisted.python.components.Componentized no longer inherits from
+   Versioned. (#5983)
+ - twisted.protocols.basic.NetstringReceiver.sendString no longer
+   accepts objects other than bytes; the removed behavior was
+   deprecated in Twisted 10.0. (#6025)
+ - The lookupRecord method of twisted.internet.interfaces.IResolver,
+   never implemented or called by Twisted, has been removed. (#6091)
+
+Other
+-----
+ - #4286, #4920, #5627, #5785, #5860, #5865, #5873, #5874, #5877,
+   #5879, #5884, #5885, #5886, #5891, #5896, #5897, #5899, #5900,
+   #5901, #5903, #5906, #5908, #5912, #5913, #5914, #5916, #5917,
+   #5931, #5932, #5933, #5934, #5935, #5939, #5942, #5947, #5956,
+   #5959, #5967, #5969, #5970, #5972, #5973, #5974, #5975, #5980,
+   #5985, #5986, #5990, #5995, #6002, #6003, #6005, #6007, #6009,
+   #6010, #6018, #6019, #6022, #6023, #6033, #6036, #6039, #6041,
+   #6043, #6052, #6053, #6054, #6055, #6060, #6061, #6065, #6067,
+   #6068, #6069, #6084, #6087, #6088, #6097, #6099, #6100, #6103,
+   #6109, #6114, #6139, #6140, #6141, #6142, #6157, #6158, #6159,
+   #6163, #6172, #6182, #6190, #6194, #6204, #6209
+
+
+Twisted Core 12.2.0 (2012-08-26)
+================================
+
+Features
+--------
+ - twisted.protocols.sip.MessageParser now handles multiline headers.
+   (#2198)
+ - twisted.internet.endpoints now provides StandardIOEndpoint, a
+   Standard I/O endpoint. (#4697)
+ - If a FTPCmdError occurs during twisted.protocols.ftp.FTP.ftp_RETR
+   sending the file (i.e. it is raised by the IReadFile.send method it
+   invokes), then it will use that to return an error to the client
+   rather than necessarily sending a 426 CNX_CLOSED_TXFR_ABORTED
+   error. (#4913)
+ - twisted.internet.interfaces.IReactorSocket.adoptStreamConnection is
+   implemented by some reactors as a way to add an existing
+   established connection to them. (#5570)
+ - twisted.internet.endpoints now provides TCP6ServerEndpoint, an IPv6
+   TCP server endpoint. (#5694)
+ - twisted.internet.endpoints now provides TCP6ClientEndpoint, an IPv6
+   TCP client endpoint. (#5695)
+ - twisted.internet.endpoints.serverFromString, the endpoint string
+   description feature, can now be used to create IPv6 TCP servers.
+   (#5699)
+ - twisted.internet.endpoints.serverFromString, the endpoint string
+   description feature, can now be used to create servers that run on
+   Standard I/O. (#5729)
+ - twisted.trial.unittest now offers SynchronousTestCase, a test case
+   base class that provides usability improvements but not reactor-
+   based testing features. (#5853)
+
+Bugfixes
+--------
+ - twisted.internet.Process.signalProcess now catches ESRCH raised by
+   os.kill call and raises ProcessExitedAlready instead. (#2420)
+ - TLSMemoryBIOProtocol (and therefore all SSL transports if pyOpenSSL
+   >= 0.10) now provides the interfaces already provided by the
+   underlying transport. (#5182)
+
+Deprecations and Removals
+-------------------------
+ - Python 2.5 is no longer supported. (#5553)
+ - The --extra option of trial, deprecated since 11.0, is removed now.
+   (#3374)
+ - addPluginDir and getPluginDirs in twisted.python.util are
+   deprecated now. (#4533)
+ - twisted.trial.runner.DocTestCase, deprecated in Twisted 8.0, has
+   been removed. (#5554)
+ - startKeepingErrors, flushErrors, ignoreErrors, and clearIgnores in
+   twisted.python.log (deprecated since Twisted 2.5) are removed now.
+   (#5765)
+ - unzip, unzipIter, and countZipFileEntries in
+   twisted.python.zipstream (deprecated in Twisted 11.0) are removed
+   now. (#5766)
+ - twisted.test.time_helpers, deprecated since Twisted 10.0, has been
+   removed. (#5820)
+
+Other
+-----
+ - #4244, #4532, #4930, #4999, #5129, #5138, #5385, #5521, #5655,
+   #5674, #5679, #5687, #5688, #5689, #5692, #5707, #5734, #5736,
+   #5745, #5746, #5747, #5749, #5784, #5816, #5817, #5818, #5819,
+   #5830, #5857, #5858, #5859, #5869, #5632
+
+
+Twisted Core 12.1.0 (2012-06-02)
+================================
+
+Features
+--------
+ - The kqueue reactor has been revived. (#1918)
+ - twisted.python.filepath now provides IFilePath, an interface for
+   file path objects. (#2176)
+ - New gtk3 and gobject-introspection reactors have been added.
+   (#4558)
+ - gtk and glib reactors now run I/O and scheduled events with lower
+   priority, to ensure the UI stays responsive. (#5067)
+ - IReactorTCP.connectTCP() can now accept IPv6 address literals
+   (although not hostnames) in order to support connecting to IPv6
+   hosts. (#5085)
+ - twisted.internet.interfaces.IReactorSocket, a new interface, is now
+   supported by some reactors to listen on sockets set up by external
+   software (eg systemd or launchd). (#5248)
+ - twisted.internet.endpoints.clientFromString now also supports
+   strings in the form of tcp:example.com:80 and ssl:example.com:4321
+   (#5358)
+ - twisted.python.constants.Flags now provides a way to define
+   collections of flags for bitvector-type uses. (#5384)
+ - The epoll(7)-based reactor is now the default reactor on Linux.
+   (#5478)
+ - twisted.python.runtime.platform.isLinux can be used to check if
+   Twisted is running on Linux. (#5491)
+ - twisted.internet.endpoints.serverFromString now recognizes a
+   "systemd" endpoint type, for listening on a server port inherited
+   from systemd. (#5575)
+ - Connections created using twisted.internet.interfaces.IReactorUNIX
+   now support sending and receiving file descriptors between
+   different processes. (#5615)
+ - twisted.internet.endpoints.clientFromString now supports UNIX
+   client endpoint strings with the path argument specified like
+   "unix:/foo/bar" in addition to the old style, "unix:path=/foo/bar".
+   (#5640)
+ - twisted.protocols.amp.Descriptor is a new AMP argument type which
+   supports passing file descriptors as AMP command arguments over
+   UNIX connections. (#5650)
+
+Bugfixes
+--------
+ - twisted.internet.abstract.FileDescriptor implements
+   twisted.internet.interfaces.IPushProducer instead of
+   twisted.internet.interfaces.IProducer.
+   twisted.internet.iocpreactor.abstract.FileHandle implements
+   twisted.internet.interfaces.IPushProducer instead of
+   twisted.internet.interfaces.IProducer. (#4386)
+ - The epoll reactor now supports reading/writing to regular files on
+   stdin/stdout. (#4429)
+ - Calling .cancel() on any Twisted-provided client endpoint
+   (TCP4ClientEndpoint, UNIXClientEndpoint, SSL4ClientEndpoint) now
+   works as documented, rather than logging an AlreadyCalledError.
+   (#4710)
+ - A leak of OVERLAPPED structures in some IOCP error cases has been
+   fixed. (#5372)
+ - twisted.internet._pollingfile._PollableWritePipe now checks for
+   outgoing unicode data in write() and writeSequence() instead of
+   checkWork(). (#5412)
+
+Improved Documentation
+----------------------
+ - "Working from Twisted's Subversion repository" links to UQDS and
+   Combinator are now updated. (#5545)
+ - Added tkinterdemo.py, an example of Tkinter integration. (#5631)
+
+Deprecations and Removals
+-------------------------
+ - The 'unsigned' flag to twisted.scripts.tap2rpm.MyOptions is now
+   deprecated.  (#4086)
+ - Removed the unreachable _fileUrandom method from
+   twisted.python.randbytes.RandomFactory. (#4530)
+ - twisted.persisted.journal is removed, deprecated since Twisted
+   11.0. (#4805)
+ - Support for pyOpenSSL 0.9 and older is now deprecated.  pyOpenSSL
+   0.10 or newer will soon be required in order to use Twisted's SSL
+   features. (#4974)
+ - backwardsCompatImplements and fixClassImplements are removed from
+   twisted.python.components, deprecated in 2006. (#5034)
+ - twisted.python.reflect.macro was removed, deprecated since Twisted
+   8.2. (#5035)
+ - twisted.python.text.docstringLStrip, deprecated since Twisted
+   10.2.0, has been removed (#5036)
+ - Removed the deprecated dispatch and dispatchWithCallback methods
+   from twisted.python.threadpool.ThreadPool (deprecated since 8.0)
+   (#5037)
+ - twisted.scripts.tapconvert is now deprecated. (#5038)
+ - twisted.python.reflect's Settable, AccessorType, PropertyAccessor,
+   Accessor, OriginalAccessor and Summer are now deprecated.  (#5451)
+ - twisted.python.threadpool.ThreadSafeList (deprecated in 10.1) is
+   removed. (#5473)
+ - twisted.application.app.initialLog, deprecated since Twisted 8.2.0,
+   has been removed. (#5480)
+ - twisted.spread.refpath was deleted, deprecated since Twisted 9.0.
+   (#5482)
+ - twisted.python.otp, deprecated since 9.0, is removed. (#5493)
+ - Removed `dsu`, `moduleMovedForSplit`, and `dict` from
+   twisted.python.util (deprecated since 10.2) (#5516)
+
+Other
+-----
+ - #2723, #3114, #3398, #4388, #4489, #5055, #5116, #5242, #5380,
+   #5392, #5447, #5457, #5484, #5489, #5492, #5494, #5512, #5523,
+   #5558, #5572, #5583, #5593, #5620, #5621, #5623, #5625, #5637,
+   #5652, #5653, #5656, #5657, #5660, #5673
+
+
+Twisted Core 12.0.0 (2012-02-10)
+================================
+
+Features
+--------
+ - The interface argument to IReactorTCP.listenTCP may now be an IPv6
+   address literal, allowing the creation of IPv6 TCP servers. (#5084)
+ - twisted.python.constants.Names now provides a way to define
+   collections of named constants, similar to the "enum type" feature
+   of C or Java. (#5382)
+ - twisted.python.constants.Values now provides a way to define
+   collections of named constants with arbitrary values. (#5383)
+
+Bugfixes
+--------
+ - Fixed an obscure case where connectionLost wasn't called on the
+   protocol when using half-close. (#3037)
+ - UDP ports handle socket errors better on Windows. (#3396)
+ - When idle, the gtk2 and glib2 reactors no longer wake up 10 times a
+   second. (#4376)
+ - Prevent a rare situation involving TLS transports, where a producer
+   may be erroneously left unpaused. (#5347)
+ - twisted.internet.iocpreactor.iocpsupport now has fewer 64-bit
+   compile warnings. (#5373)
+ - The GTK2 reactor is now more responsive on Windows. (#5396)
+ - TLS transports now correctly handle producer registration after the
+   connection has been lost. (#5439)
+ - twisted.protocols.htb.Bucket now empties properly with a non-zero
+   drip rate. (#5448)
+ - IReactorSSL and ITCPTransport.startTLS now synchronously propagate
+   errors from the getContext method of context factories, instead of
+   being capturing them and logging them as unhandled. (#5449)
+
+Improved Documentation
+----------------------
+ - The multicast documentation has been expanded. (#4262)
+ - twisted.internet.defer.Deferred now documents more return values.
+   (#5399)
+ - Show a better starting page at
+   http://twistedmatrix.com/documents/current (#5429)
+
+Deprecations and Removals
+-------------------------
+ - Remove the deprecated module twisted.enterprise.reflector. (#4108)
+ - Removed the deprecated module twisted.enterprise.row. (#4109)
+ - Remove the deprecated module twisted.enterprise.sqlreflector.
+   (#4110)
+ - Removed the deprecated module twisted.enterprise.util, as well as
+   twisted.enterprise.adbapi.safe. (#4111)
+ - Python 2.4 is no longer supported on any platform. (#5060)
+ - Removed printTraceback and noOperation from twisted.spread.pb,
+   deprecated since Twisted 8.2. (#5370)
+
+Other
+-----
+ - #1712, #2725, #5284, #5325, #5331, #5362, #5364, #5371, #5407,
+   #5427, #5430, #5431, #5440, #5441
+
+
+Twisted Core 11.1.0 (2011-11-15)
+================================
+
+Features
+--------
+ - TCP and TLS transports now support abortConnection() which, unlike
+   loseConnection(), always closes the connection immediately. (#78)
+ - Failures received over PB when tracebacks are disabled now display
+   the wrapped exception value when they are printed. (#581)
+ - twistd now has a --logger option, allowing the use of custom log
+   observers. (#638)
+ - The default reactor is now poll(2) on platforms that support it.
+   (#2234)
+ - twisted.internet.defer.inlineCallbacks(f) now raises TypeError when
+   f returns something other than a generator or uses returnValue as a
+   non-generator. (#2501)
+ - twisted.python.usage.Options now supports performing Zsh tab-
+   completion on demand. Tab-completion for Twisted commands is
+   supported out-of-the-box on any recent zsh release. Third-party
+   commands may take advantage of zsh completion by copying the
+   provided stub file. (#3078)
+ - twisted.protocols.portforward now uses flow control between its
+   client and server connections to avoid having to buffer an
+   unbounded amount of data when one connection is slower than the
+   other. (#3350)
+ - On Windows, the select, IOCP, and Gtk2 reactors now implement
+   IReactorWin32Events (most notably adding support for serial ports
+   to these reactors). (#4862)
+ - twisted.python.failure.Failure no longer captures the state of
+   locals and globals of all stack frames by default, because it is
+   expensive to do and rarely used.  You can pass captureVars=True to
+   Failure's constructor if you want to capture this data. (#5011)
+ - twisted.web.client now supports automatic content-decoding via
+   twisted.web.client.ContentDecoderAgent, gzip being supported for
+   now. (#5053)
+ - Protocols may now implement ILoggingContext to customize their
+   logging prefix.  twisted.protocols.policies.ProtocolWrapper and the
+   endpoints wrapper now take advantage of this feature to ensure the
+   application protocol is still reflected in logs. (#5062)
+ - AMP's raw message-parsing performance was increased by
+   approximately 12%. (#5075)
+ - Twisted is now installable on PyPy, because some incompatible C
+   extensions are no longer built. (#5158)
+ - twisted.internet.defer.gatherResults now accepts a consumeErrors
+   parameter, with the same meaning as the corresponding argument for
+   DeferredList. (#5159)
+ - Added RMD (remove directory) support to the FTP client. (#5259)
+ - Server factories may now implement ILoggingContext to customize the
+   name that is logged when the reactor uses one to start listening on
+   a port. (#5292)
+ - The implementations of ITransport.writeSequence will now raise
+   TypeError if passed unicode strings. (#3896)
+ - iocp reactor now operates correctly on 64 bit Python runtimes.
+   (#4669)
+ - twistd ftp now supports the cred plugin. (#4752)
+ - twisted.python.filepath.FilePath now has an API to retrieve the
+   permissions of the underlying file, and two methods to determine
+   whether it is a block device or a socket.  (#4813)
+ - twisted.trial.unittest.TestCase is now compatible with Python 2.7's
+   assertDictEqual method. (#5291)
+
+Bugfixes
+--------
+ - The IOCP reactor now does not try to erroneously pause non-
+   streaming producers. (#745)
+ - Unicode print statements no longer blow up when using Twisted's
+   logging system. (#1990)
+ - Process transports on Windows now support the `writeToChild` method
+   (but only for stdin). (#2838)
+ - Zsh tab-completion of Twisted commands no longer relies on
+   statically generated files, but instead generates results on-the-
+   fly - ensuring accurate tab-completion for the version of Twisted
+   actually in use. (#3078)
+ - LogPublishers don't use the global log publisher for reporting
+   broken observers anymore. (#3307)
+ - trial and twistd now add the current directory to sys.path even
+   when running as root or on Windows. mktap, tapconvert, and
+   pyhtmlizer no longer add the current directory to sys.path. (#3526)
+ - twisted.internet.win32eventreactor now stops immediately if
+   reactor.stop() is called from an IWriteDescriptor.doWrite
+   implementation instead of delaying shutdown for an arbitrary period
+   of time. (#3824)
+ - twisted.python.log now handles RuntimeErrors more gracefully, and
+   always restores log observers after an exception is raised. (#4379)
+ - twisted.spread now supports updating new-style RemoteCache
+   instances. (#4447)
+ - twisted.spread.pb.CopiedFailure will no longer be thrown into a
+   generator as a (deprecated) string exception but as a
+   twisted.spread.pb.RemoteException. (#4520)
+ - trial now gracefully handles the presence of objects in sys.modules
+   which respond to attributes being set on them by modifying
+   sys.modules. (#4748)
+ - twisted.python.deprecate.deprecatedModuleAttribute no longer
+   spuriously warns twice when used to deprecate a module within a
+   package.  This should make it easier to write unit tests for
+   deprecated modules. (#4806)
+ - When pyOpenSSL 0.10 or newer is available, SSL support now uses
+   Twisted for all I/O and only relies on OpenSSL for cryptography,
+   avoiding a number of tricky, potentially broken edge cases. (#4854)
+ - IStreamClientEndpointStringParser.parseStreamClient now correctly
+   describes how it will be called by clientFromString (#4956)
+ - twisted.internet.defer.Deferreds are 10 times faster at handling
+   exceptions raised from callbacks, except when setDebugging(True)
+   has been called. (#5011)
+ - twisted.python.filepath.FilePath.copyTo now raises OSError(ENOENT)
+   if the source path being copied does not exist. (#5017)
+ - twisted.python.modules now supports iterating over namespace
+   packages without yielding duplicates. (#5030)
+ - reactor.spawnProcess now uses the resource module to guess the
+   maximum possible open file descriptor when /dev/fd exists but gives
+   incorrect results. (#5052)
+ - The memory BIO TLS/SSL implementation now supports producers
+   correctly. (#5063)
+ - twisted.spread.pb.Broker no longer creates an uncollectable
+   reference cycle when the logout callback holds a reference to the
+   client mind object. (#5079)
+ - twisted.protocols.tls, and SSL/TLS support in general, now do clean
+   TLS close alerts when disconnecting. (#5118)
+ - twisted.persisted.styles no longer uses the deprecated allYourBase
+   function (#5193)
+ - Stream client endpoints now start (doStart) and stop (doStop) the
+   factory passed to the connect method, instead of a different
+   implementation-detail factory. (#5278)
+ - SSL ports now consistently report themselves as SSL rather than TCP
+   when logging their close message. (#5292)
+ - Serial ports now deliver connectionLost to the protocol when
+   closed. (#3690)
+ - win32eventreactor now behaves better in certain rare cases in which
+   it previously would have failed to deliver connection lost
+   notification to a protocol. (#5233)
+
+Improved Documentation
+----------------------
+ - Test driven development with Twisted and Trial is now documented in
+   a how-to. (#2443)
+ - A new howto-style document covering twisted.protocols.amp has been
+   added. (#3476)
+ - Added sample implementation of a Twisted push producer/consumer
+   system. (#3835)
+ - The "Deferred in Depth" tutorial now includes accurate output for
+   the deferred_ex2.py example. (#3941)
+ - The server howto now covers the Factory.buildProtocol method.
+   (#4761)
+ - The testing standard and the trial tutorial now recommend the
+   `assertEqual` form of assertions rather than the `assertEquals` to
+   coincide with the standard library unittest's preference. (#4989)
+ - twisted.python.filepath.FilePath's methods now have more complete
+   API documentation (docstrings). (#5027)
+ - The Clients howto now uses buildProtocol more explicitly, hopefully
+   making it easier to understand where Protocol instances come from.
+   (#5044)
+
+Deprecations and Removals
+-------------------------
+ - twisted.internet.interfaces.IFinishableConsumer is now deprecated.
+   (#2661)
+ - twisted.python.zshcomp is now deprecated in favor of the tab-
+   completion system in twisted.python.usage (#3078)
+ - The unzip and unzipIter functions in twisted.python.zipstream are
+   now deprecated. (#3666)
+ - Options.optStrings, deprecated for 7 years, has been removed.  Use
+   Options.optParameters instead. (#4552)
+ - Removed the deprecated twisted.python.dispatch module. (#5023)
+ - Removed the twisted.runner.procutils module that was deprecated in
+   Twisted 2.3. (#5049)
+ - Removed twisted.trial.runner.DocTestSuite, deprecated in Twisted
+   8.0. (#5111)
+ - twisted.scripts.tkunzip is now deprecated. (#5140)
+ - Deprecated option --password-file in twistd ftp (#4752)
+ - mktap, deprecated since Twisted 8.0, has been removed. (#5293)
+
+Other
+-----
+ - #1946, #2562, #2674, #3074, #3077, #3776, #4227, #4539, #4587,
+   #4619, #4624, #4629, #4683, #4690, #4702, #4778, #4944, #4945,
+   #4949, #4952, #4957, #4979, #4980, #4987, #4990, #4994, #4995,
+   #4997, #5003, #5008, #5009, #5012, #5019, #5042, #5046, #5051,
+   #5065, #5083, #5088, #5089, #5090, #5101, #5108, #5109, #5112,
+   #5114, #5125, #5128, #5131, #5136, #5139, #5144, #5146, #5147,
+   #5156, #5160, #5165, #5191, #5205, #5215, #5217, #5218, #5223,
+   #5243, #5244, #5250, #5254, #5261, #5266, #5273, #5299, #5301,
+   #5302, #5304, #5308, #5311, #5321, #5322, #5327, #5328, #5332,
+   #5336
+
+
+Twisted Core 11.0.0 (2011-04-01)
+================================
+
+Features
+--------
+ - The reactor is not restartable, but it would previously fail to
+   complain. Now, when you restart an unrestartable reactor, you get
+   an exception. (#2066)
+ - twisted.plugin now only emits a short log message, rather than a
+   full traceback, if there is a problem writing out the dropin cache
+   file. (#2409)
+ - Added a 'replacement' parameter to the
+   'twisted.python.deprecate.deprecated' decorator.  This allows
+   deprecations to unambiguously specify what they have been
+   deprecated in favor of. (#3047)
+ - Added access methods to FilePath for FilePath.statinfo's st_ino,
+   st_dev, st_nlink, st_uid, and st_gid fields.  This is in
+   preparation for the deprecation of FilePath.statinfo. (#4712)
+ - IPv4Address and UNIXAddress now have a __hash__ method. (#4783)
+ - twisted.protocols.ftp.FTP.ftp_STOR now catches `FTPCmdError`s
+   raised by the file writer, and returns the error back to the
+   client. (#4909)
+
+Bugfixes
+--------
+ - twistd will no longer fail if a non-root user passes --uid 'myuid'
+   as a command-line argument. Instead, it will emit an error message.
+   (#3172)
+ - IOCPReactor now sends immediate completions to the main loop
+   (#3233)
+ - trial can now load test methods from multiple classes, even if the
+   methods all happen to be inherited from the same base class.
+   (#3383)
+ - twisted.web.server will now produce a correct Allow header when a
+   particular render_FOO method is missing. (#3678)
+ - HEAD requests made to resources whose HEAD handling defaults to
+   calling render_GET now always receive a response with no body.
+   (#3684)
+ - trial now loads decorated test methods whether or not the decorator
+   preserves the original method name. (#3909)
+ - t.p.amp.AmpBox.serialize will now correctly consistently complain
+   when being fed Unicode. (#3931)
+ - twisted.internet.wxreactor now supports stopping more reliably.
+   (#3948)
+ - reactor.spawnProcess on Windows can now handle ASCII-encodable
+   Unicode strings in the system environment (#3964)
+ - When C-extensions are not complied for twisted, on python2.4, skip
+   a test in twisted.internet.test.test_process that may hang due to a
+   SIGCHLD related problem. Running 'python setup.py build_ext
+   --inplace' will compile the extension and cause the test to both
+   run and pass. (#4331)
+ - twisted.python.logfile.LogFile now raises a descriptive exception
+   when passed a log  directoy which does not exist. (#4701)
+ - Fixed a bug where Inotify will fail to add a filepatch to watchlist
+   after it has been added/ignored previously. (#4708)
+ - IPv4Address and UNIXAddress object comparison operators fixed
+   (#4817)
+ - twisted.internet.task.Clock now sorts the list of pending calls
+   before and after processing each call (#4823)
+ - ConnectionLost is now in twisted.internet.error.__all__ instead of
+   twisted.words.protocols.jabber.xmlstream.__all__. (#4856)
+ - twisted.internet.process now detects the most appropriate mechanism
+   to use for detecting the open file descriptors on a system, getting
+   Twisted working on FreeBSD even when fdescfs is not mounted.
+   (#4881)
+ - twisted.words.services referenced nonexistent
+   twisted.words.protocols.irc.IRC_NOSUCHCHANNEL. This has been fixed.
+   Related code has also received test cases. (#4915)
+
+Improved Documentation
+----------------------
+ - The INSTALL file now lists all of Twisted's dependencies. (#967)
+ - Added the stopService and startService methods to all finger
+   example files. (#3375)
+ - Missing reactor.run() calls were added in the UDP and client howto
+   documents. (#3834)
+ - The maxRetries attribute of
+   twisted.internet.protocols.RetryingClientFactory now has API
+   documentation. (#4618)
+ - Lore docs pointed to a template that no longer existed, this has
+   been fixed. (#4682)
+ - The `servers` argument to `twisted.names.client.createResolver` now
+   has more complete API documentation. (#4713)
+ - Linked to the Twisted endpoints tutorial from the Twisted core
+   howto list.  (#4773)
+ - The Endpoints howto now links to the API documentation. (#4774)
+ - The Quotes howto is now more clear in its PYTHONPATH setup
+   instructions. (#4785)
+ - The API documentation for DeferredList's fireOnOneCallback
+   parameter now gives the correct order of the elements of the result
+   tuple. (#4882)
+
+Deprecations and Removals
+-------------------------
+ - returning a value other than None from IProtocol.dataReceived was
+   deprecated (#2491)
+ - Deprecated the --extra option in trial.  (#3372)
+ - twisted.protocols._c_urlarg has been removed. (#4162)
+ - Remove the --report-profile option for twistd, deprecated since
+   2007. (#4236)
+ - Deprecated twisted.persisted.journal.  This library is no longer
+   maintained.  (#4298)
+ - Removed twisted.protocols.loopback.loopback, which has been
+   deprecated since Twisted 2.5. (#4547)
+ - __getitem__ __getslice__ and __eq__ (tuple comparison, indexing)
+   removed from twisted.internet.address.IPv4Address and
+   twisted.internet.address.UNIXAddress classes UNIXAddress and
+   IPv4Address properties _bwHack are now deprecated in
+   twisted.internet.address (#4817)
+ - twisted.python.reflect.allYourBase is now no longer used, replaced
+   with inspect.getmro (#4928)
+ - allYourBase and accumulateBases are now deprecated in favor of
+   inspect.getmro. (#4946)
+
+Other
+-----
+ - #555, #1982, #2618, #2665, #2666, #4035, #4247, #4567, #4636,
+   #4717, #4733, #4750, #4821, #4842, #4846, #4853, #4857, #4858,
+   #4863, #4864, #4865, #4866, #4867, #4868, #4869, #4870, #4871,
+   #4872, #4873, #4874, #4875, #4876, #4877, #4878, #4879, #4905,
+   #4906, #4908, #4934, #4955, #4960
+
+
+Twisted Core 10.2.0 (2010-11-29)
+================================
+
+Features
+--------
+ - twisted.internet.cfreactor has been significantly improved.  It now
+   runs, and passes, the test suite.  Many, many bugs in it have been
+   fixed, including several segfaults, as it now uses PyObjC and
+   longer requires C code in Twisted. (#1833)
+ - twisted.protocols.ftp.FTPRealm now accepts a parameter to override
+   "/home" as the container for user directories.  The new
+   BaseFTPRealm class in the same module also allows easy
+   implementation of custom user directory schemes. (#2179)
+ - twisted.python.filepath.FilePath and twisted.python.zippath.ZipPath
+   now have a descendant method to simplify code which calls the child
+   method repeatedly. (#3169)
+ - twisted.python.failure._Frame objects now support fake f_locals
+   attribute. (#4045)
+ - twisted.internet.endpoints now has 'serverFromString' and
+   'clientFromString' APIs for constructing endpoints from descriptive
+   strings. (#4473)
+ - The default trial reporter now combines reporting of tests with the
+   same result to shorten its summary output. (#4487)
+ - The new class twisted.protocols.ftp.SystemFTPRealm implements an
+   FTP realm which uses system accounts to select home directories.
+   (#4494)
+ - twisted.internet.reactor.spawnProcess now wastes less time trying
+   to close non-existent file descriptors on POSIX platforms. (#4522)
+ - twisted.internet.win32eventreactor now declares that it implements
+   a new twisted.internet.interfaces.IReactorWin32Events interface.
+   (#4523)
+ - twisted.application.service.IProcess now documents its attributes
+   using zope.interface.Attribute. (#4534)
+ - twisted.application.app.ReactorSelectionMixin now saves the value
+   of the --reactor option in the "reactor" key of the options object.
+   (#4563)
+ - twisted.internet.endpoints.serverFromString and clientFromString,
+   and therefore also twisted.application.strports.service, now
+   support plugins, so third parties may implement their own endpoint
+   types. (#4695)
+
+Bugfixes
+--------
+ - twisted.internet.defer.Deferred now handles chains iteratively
+   instead of recursively, preventing RuntimeError due to excessive
+   recursion when handling long Deferred chains. (#411)
+ - twisted.internet.cfreactor now works with trial. (#2556)
+ - twisted.enterprise.adbapi.ConnectionPool.close may now be called
+   even if the connection pool has not yet been started.  This will
+   prevent the pool from ever starting. (#2680)
+ - twisted.protocols.basic.NetstringReceiver raises
+   NetstringParseErrors for  invalid netstrings now. It handles empty
+   netstrings ("0:,") correctly, and  the performance for receiving
+   netstrings has been improved. (#4378)
+ - reactor.listenUDP now returns an object which declares that it
+   implements IListeningPort. (#4462)
+ - twisted.python.randbytes no longer uses PyCrypto as a secure random
+   number source (since it is not one). (#4468)
+ - twisted.internet.main.installReactor now blocks installation of
+   another reactor when using python -O (#4476)
+ - twisted.python.deprecate.deprecatedModuleAttribute now emits only
+   one warning when used to deprecate a package attribute which is a
+   module. (#4492)
+ - The "brief" mode of twisted.python.failure.Failure.getTraceback now
+   handles exceptions raised by the underlying exception's __str__
+   method. (#4501)
+ - twisted.words.xish.domish now correctly parses XML with namespaces
+   which include whitespace. (#4503)
+ - twisted.names.authority.FileAuthority now generates correct
+   negative caching hints, marks its referral NS RRs as non-
+   authoritative, and correctly generates referrals for ALL_RECORDS
+   requests. (#4513)
+ - twisted.internet.test.reactormixins.ReactorBuilder's attribute
+   `requiredInterface` (which should an interface) is now
+   `requiredInterfaces` (a list of interfaces) as originally described
+   per the documentation. (#4527)
+ - twisted.python.zippath.ZipPath.__repr__ now correctly formats paths
+   with ".." in them (by including it). (#4535)
+ - twisted.names.hosts.searchFileFor has been fixed against
+   refcounting dependency. (#4540)
+ - The POSIX process transports now declare that they implement
+   IProcessTransport. (#4585)
+ - Twisted can now be built with the LLVM clang compiler, with
+   'CC=clang python setup.py build'.  C code that caused errors with
+   this compiler has been removed. (#4652)
+ - trial now puts coverage data in the path specified by --temp-
+   directory, even if that option comes after --coverage on the
+   command line. (#4657)
+ - The unregisterProducer method of connection-oriented transports
+   will now cause the connection to be closed if there was a prior
+   call to loseConnection. (#4719)
+ - Fixed an issue where the new StreamServerEndpointService didn't log
+   listen errors.  (This was a bug not present in any previous
+   releases, as this class is new.) (#4731)
+
+Improved Documentation
+----------------------
+ - The trial man page now documents the meaning of the final line of
+   output of the default reporter. (#1384)
+ - The API documentation for twisted.internet.defer.DeferredList now
+   goes into more depth about the effects each of the __init__ flags
+   that class accepts. (#3595)
+ - There is now narrative documentation for the endpoints APIs, in the
+   'endpoints' core howto, as well as modifications to the 'writing
+   clients' and 'writing servers' core howto documents to indicate
+   that endpoints are now the preferred style of listening and
+   connecting. (#4478)
+ - trial's man page now documents the --disablegc option in more
+   detail. (#4511)
+ - trial's coverage output format is now documented in the trial man
+   page. (#4512)
+ - Broken links and spelling errors in the finger tutorial are now
+   fixed. (#4516)
+ - twisted.internet.threads.blockingCallFromThread's docstring is now
+   explicit about Deferred support. (#4517)
+ - twisted.python.zippath.ZipPath.child now documents its handling of
+   ".." (which is not special, making it different from
+   FilePath.child). (#4535)
+ - The API docs for twisted.internet.defer.Deferred now cover several
+   more of its (less interesting) attributes. (#4538)
+ - LineReceiver, NetstringReceiver, and IntNStringReceiver from
+   twisted.protocols.basic now have improved API documentation for
+   read callbacks and write methods. (#4542)
+ - Tidied up the Twisted Conch documentation for easier conversion.
+   (#4566)
+ - Use correct Twisted version for when cancellation was introduced in
+   the Deferred docstring. (#4614)
+ - The logging howto is now more clear about how the standard library
+   logging module and twisted.python.log can be integrated. (#4642)
+ - The finger tutorial still had references to .tap files. This
+   reference has now been removed. The documentation clarifies
+   "finger.tap" is a module and not a filename. (#4679)
+ - The finger tutorial had a broken link to the
+   twisted.application.service.Service class, which is now fixed.
+   Additionally, a minor typo ('verison') was fixed.  (#4681)
+ - twisted.protocols.policies.TimeoutMixin now has clearer API
+   documentation. (#4684)
+
+Deprecations and Removals
+-------------------------
+ - twisted.internet.defer.Deferred.setTimeout has been removed, after
+   being deprecated since Twisted 2.0. (#1702)
+ - twisted.internet.interfaces.IReactorTime.cancelCallLater
+   (deprecated since  2007) and
+   twisted.internet.interfaces.base.ReactorBase.cancelCallLater
+   (deprecated since 2002) have been removed. (#4076)
+ - Removed twisted.cred.util.py, which has been deprecated since
+   Twisted 8.3. (#4107)
+ - twisted.python.text.docstringLStrip was deprecated. (#4328)
+ - The module attributes `LENGTH`, `DATA`, `COMMA`, and `NUMBER` of
+   twisted.protocols.basic (previously used by `NetstringReceiver`)
+   are now deprecated. (#4541)
+ - twisted.protocols.basic.SafeNetstringReceiver, deprecated since
+   2001 (before Twisted 2.0), was removed. (#4546)
+ - twisted.python.threadable.whenThreaded, deprecated since Twisted
+   2.2.0, has been removed. (#4550)
+ - twisted.python.timeoutqueue, deprecated since Twisted 8.0, has been
+   removed. (#4551)
+ - iocpreactor transports can no longer be pickled. (#4617)
+
+Other
+-----
+ - #4300, #4475, #4477, #4504, #4556, #4562, #4564, #4569, #4608,
+   #4616, #4617, #4626, #4630, #4650, #4705
+
+
+Twisted Core 10.1.0 (2010-06-27)
+================================
+
+Features
+--------
+ - Add linux inotify support, allowing monitoring of file system
+   events. (#972)
+ - Deferreds now support cancellation. (#990)
+ - Added new "endpoint" interfaces in twisted.internet.interfaces,
+   which abstractly describe stream transport endpoints which can be
+   listened on or connected to.  Implementations for TCP and SSL
+   clients and servers are present in twisted.internet.endpoints.
+   Notably, client endpoints' connect() methods return cancellable
+   Deferreds, so code written to use them can bypass the awkward
+   "ClientFactory.clientConnectionFailed" and
+   "Connector.stopConnecting" methods, and handle errbacks from or
+   cancel the returned deferred, respectively. (#1442)
+ - twisted.protocols.amp.Integer's documentation now clarifies that
+   integers of arbitrary size are supported and that the wire format
+   is a base-10 representation. (#2650)
+ - twisted.protocols.amp now includes support for transferring
+   timestamps (amp.DateTime) and decimal values (amp.Decimal). (#2651)
+ - twisted.protocol.ftp.IWriteFile now has a close() method, which can
+   return a Deferred. Previously a STOR command would finish
+   immediately upon the receipt of the last byte of the uploaded file.
+   With close(), the backend can delay the finish until it has
+   performed some other slow action (like storing the data to a
+   virtual filesystem). (#3462)
+ - FilePath now calls os.stat() only when new status information is
+   required, rather than immediately when anything changes.  For some
+   applications this may result in fewer stat() calls.  Additionally,
+   FilePath has a new method, 'changed', which applications may use to
+   indicate that the FilePath may have been changed on disk and
+   therefore the next status information request must  fetch a new
+   stat result.  This is useful if external systems, such as C
+   libraries, may have changed files that Twisted applications are
+   referencing via a FilePath. (#4130)
+ - Documentation improvements are now summarized in the NEWS file.
+   (#4224)
+ - twisted.internet.task.deferLater now returns a cancellable
+   Deferred. (#4318)
+ - The connect methods of twisted.internet.protocol.ClientCreator now
+   return cancellable Deferreds. (#4329)
+ - twisted.spread.pb now has documentation covering some of its
+   limitations. (#4402)
+ - twisted.spread.jelly now supports jellying and unjellying classes
+   defined with slots if they also implement __getstate__ and
+   __setstate__. (#4430)
+ - twisted.protocols.amp.ListOf arguments can now be specified as
+   optional. (#4474)
+
+Bugfixes
+--------
+ - On POSIX platforms, reactors now support child processes in a way
+   which doesn't cause other syscalls to sometimes fail with EINTR (if
+   running on Python 2.6 or if Twisted's extension modules have been
+   built). (#733)
+ - Substrings are escaped before being passed to a regular expression
+   for searching to ensure that they don't get interpreted as part of
+   the expression. (#1893)
+ - twisted.internet.stdio now supports stdout being redirected to a
+   normal file (except when using epollreactor). (#2259)
+ -  (#2367)
+ - The tap2rpm script now works with modern versions of RPM. (#3292)
+ - twisted.python.modules.walkModules will now handle packages
+   explicitly precluded from importing by a None placed in
+   sys.modules. (#3419)
+ - ConnectedDatagramPort now uses stopListening when a connection
+   fails instead of the deprecated loseConnection. (#3425)
+ - twisted.python.filepath.FilePath.setContent is now safe for
+   multiple processes to use concurrently. (#3694)
+ - The mode argument to the methods of
+   twisted.internet.interfaces.IReactorUNIX is no longer deprecated.
+   (#4078)
+ - Do not include blacklisted projects when generating NEWS. (#4190)
+ - When generating NEWS for a project that had no significant changes,
+   include a section for that project and say that there were no
+   interesting changes. (#4191)
+ - Redundant 'b' mode is no longer passed to calls to FilePath.open
+   and FilePath.open itself now corrects the mode when multiple 'b'
+   characters are present, ensuring only one instance of 'b' is
+   provided, as a workaround for http://bugs.python.org/issue7686.
+   (#4207)
+ - HTML tags inside <pre> tags in the code snippets are now escaped.
+   (#4336)
+ - twisted.protocols.amp.CommandLocator now allows subclasses to
+   override responders inherited from base classes. (#4343)
+ - Fix a bunch of small but important defects in the INSTALL, README
+   and so forth. (#4346)
+ - The poll, epoll, glib2, and gtk2 reactors now all support half-
+   close in the twisted.internet.stdio.StandardIO transport. (#4352)
+ - twisted.application.internet no longer generates an extra and
+   invalid entry in its __all__ list for the nonexistent
+   MulticastClient. (#4373)
+ - Choosing a reactor documentation now says that only the select-
+   based reactor is a truly cross-platform reactor. (#4384)
+ - twisted.python.filepath.FilePath now no longer leaves files open,
+   to be closed by the garbage collector, when an exception is raised
+   in the implementation of setContent, getContent, or copyTo. (#4400)
+ - twisted.test.proto_helpers.StringTransport's getHost and getPeer
+   methods now return IPv4Address instances by default. (#4401)
+ - twisted.protocols.amp.BinaryBoxProtocol will no longer deliver an
+   empty string to a switched-to protocol's dataReceived method when
+   the BinaryBoxProtocol's buffer happened to be empty at the time of
+   the protocol switch. (#4405)
+ - IReactorUNIX.listenUNIX implementations now support abstract
+   namespace sockets on Linux. (#4421)
+ - Files opened with FilePath.create() (and therefore also files
+   opened via FilePath.open() on a path with alwaysCreate=True) will
+   now be opened in binary mode as advertised, so that they will
+   behave portably across platforms. (#4453)
+ - The subunit reporter now correctly reports import errors as errors,
+   rather than by crashing with an unrelated error. (#4496)
+
+Improved Documentation
+----------------------
+ - The finger tutorial example which introduces services now avoids
+   double-starting the loop to re-read its users file. (#4420)
+ - twisted.internet.defer.Deferred.callback's docstring now mentions
+   the implicit chaining feature. (#4439)
+ - doc/core/howto/listing/pb/chatclient.py can now actually send a
+   group message. (#4459)
+
+Deprecations and Removals
+-------------------------
+ - twisted.internet.interfaces.IReactorArbitrary,
+   twisted.application.internet.GenericServer, and
+   twisted.application.internet.GenericClient are now deprecated.
+   (#367)
+ - twisted.internet.gtkreactor is now deprecated. (#2833)
+ - twisted.trial.util.findObject has been deprecated. (#3108)
+ - twisted.python.threadpool.ThreadSafeList is deprecated and Jython
+   platform detection in Twisted core removed (#3725)
+ - twisted.internet.interfaces.IUDPConnectedTransport has been removed
+   (deprecated since Twisted 9.0). (#4077)
+ - Removed twisted.application.app.runWithProfiler, which has been
+   deprecated since Twisted 8.0. (#4090)
+ - Removed twisted.application.app.runWithHotshot, which has been
+   deprecated since Twisted 8.0. (#4091)
+ - Removed twisted.application.app.ApplicationRunner.startLogging,
+   which has been deprecated (doesn't say since when), as well as
+   support for the legacy
+   twisted.application.app.ApplicationRunner.getLogObserver method.
+   (#4092)
+ - twisted.application.app.reportProfile has been removed. (#4093)
+ - twisted.application.app.getLogFile has been removed. (#4094)
+ - Removed twisted.cred.util.py, which has been deprecated since
+   Twisted 8.3. (#4107)
+ - twisted.python.util.dsu is now deprecated. (#4339)
+ - In twisted.trial.util: FailureError, DirtyReactorWarning,
+   DirtyReactorError, and PendingTimedCallsError, which have all been
+   deprecated since Twisted 8.0, have been removed. (#4505)
+
+Other
+-----
+ - #1363, #1742, #3170, #3359, #3431, #3738, #4088, #4206, #4221,
+   #4239, #4257, #4272, #4274, #4287, #4291, #4293, #4309, #4316,
+   #4319, #4324, #4332, #4335, #4348, #4358, #4394, #4399, #4409,
+   #4418, #4443, #4449, #4479, #4485, #4486, #4497
+
+
+Twisted Core 10.0.0 (2010-03-01)
+================================
+
+Features
+--------
+ - The twistd man page now has a SIGNALS section. (#689)
+
+ - reactor.spawnProcess now will not emit a PotentialZombieWarning
+   when called before reactor.run, and there will be no potential for
+   zombie processes in this case. (#2078)
+
+ - High-throughput applications based on Perspective Broker should now
+   run noticably faster thanks to the use of a more efficient decoding
+   function in Twisted Spread. (#2310)
+
+ - Documentation for trac-post-commit-hook functionality in svn-dev
+   policy. (#3867)
+
+ - twisted.protocols.socks.SOCKSv4 now supports the SOCKSv4a protocol.
+   (#3886)
+
+ - Trial can now output test results according to the subunit
+   protocol, as long as Subunit is installed (see
+   https://launchpad.net/subunit). (#4004)
+
+ - twisted.protocols.amp now provides a ListOf argument type which can
+   be composed with some other argument types to create a zero or more
+   element sequence of that type. (#4116)
+
+ - If returnValue is invoked outside of a function decorated with
+   @inlineCallbacks, but causes a function thusly decorated to exit, a
+   DeprecationWarning will be emitted explaining this potentially
+   confusing behavior.  In a future release, this will cause an
+   exception. (#4157)
+
+ - twisted.python.logfile.BaseLogFile now has a reopen method allowing
+   you to use an external logrotate mechanism. (#4255)
+
+Bugfixes
+--------
+ - FTP.ftp_NLST now handles requests on invalid paths in a way
+   consistent with RFC 959. (#1342)
+
+ - twisted.python.util.initgroups now calls the low-level C initgroups
+   by default if available: the python version can create lots of I/O
+   with certain authentication setup to retrieve all the necessary
+   information. (#3226)
+
+ - startLogging now does nothing on subsequent invocations, thus
+   fixing a terrible infinite recursion bug that's only on edge case.
+   (#3289)
+
+ - Stringify non-string data to NetstringReceiver.sendString before
+   calculating the length so that the calculated length is equal to
+   the actual length of the transported data. (#3299)
+
+ - twisted.python.win32.cmdLineQuote now correctly quotes empty
+   strings arguments (#3876)
+
+ - Change the behavior of the Gtk2Reactor to register only one source
+   watch for each file descriptor, instead of one for reading and one
+   for writing. In particular, it fixes a bug with Glib under Windows
+   where we failed to notify when a client is connected. (#3925)
+
+ - Twisted Trial no longer crashes if it can't remove an old
+   _trial_temp directory.  (#4020)
+
+ - The optional _c_urlarg extension now handles unquote("") correctly
+   on platforms where malloc(0) returns NULL, such as AIX.  It also
+   compiles with less warnings. (#4142)
+
+ - On POSIX, child processes created with reactor.spawnProcess will no
+   longer automatically ignore the signals which the parent process
+   has set to be ignored. (#4199)
+
+ - All SOCKSv4a tests now use a dummy reactor with a deterministic
+   resolve method. (#4275)
+
+ - Prevent extraneous server, date and content-type headers in proxy
+   responses. (#4277)
+
+Deprecations and Removals
+-------------------------
+ - twisted.internet.error.PotentialZombieWarning is now deprecated.
+   (#2078)
+
+ - twisted.test.time_helpers is now deprecated. (#3719)
+
+ - The deprecated connectUDP method of IReactorUDP has now been
+   removed. (#4075)
+
+ - twisted.trial.unittest.TestCase now ignores the previously
+   deprecated setUpClass and tearDownClass methods. (#4175)
+
+Other
+-----
+ - #917, #2406, #2481, #2608, #2689, #2884, #3056, #3082, #3199,
+   #3480, #3592, #3718, #3935, #4066, #4083, #4154, #4166, #4169,
+   #4176, #4183, #4186, #4188, #4189, #4194, #4201, #4204, #4209,
+   #4222, #4234, #4235, #4238, #4240, #4245, #4251, #4264, #4268,
+   #4269, #4282
+
+
+Twisted Core 9.0.0 (2009-11-24)
+===============================
+
+Features
+--------
+ - LineReceiver.clearLineBuffer now returns the bytes that it cleared (#3573)
+ - twisted.protocols.amp now raises InvalidSignature when bad arguments are
+   passed to Command.makeArguments (#2808)
+ - IArgumentType was added to represent an existing but previously unspecified
+   interface in amp (#3468)
+ - Obscure python tricks have been removed from the finger tutorials (#2110)
+ - The digest auth implementations in twisted.web and twisted.protocolos.sip
+   have been merged together in twisted.cred (#3575)
+ - FilePath and ZipPath now has a parents() method which iterates up all of its
+   parents (#3588)
+ - reactors which support threads now have a getThreadPool method (#3591)
+ - The MemCache client implementation now allows arguments to the "stats"
+   command (#3661)
+ - The MemCache client now has a getMultiple method which allows fetching of
+   multiple values (#3171)
+ - twisted.spread.jelly can now unserialize some new-style classes (#2950)
+ - twisted.protocols.loopback.loopbackAsync now accepts a parameter to control
+   the data passed between client and server (#3820)
+ - The IOCP reactor now supports SSL (#593)
+ - Tasks in a twisted.internet.task.Cooperator can now be paused, resumed, and
+   cancelled (#2712)
+ - AmpList arguments can now be made optional (#3891)
+ - The syslog output observer now supports log levels (#3300)
+ - LoopingCall now supports reporting the number of intervals missed if it
+   isn't able to schedule calls fast enough (#3671)
+
+Fixes
+-----
+ - The deprecated md5 and sha modules are no longer used if the stdlib hashlib
+   module is available (#2763)
+ - An obscure deadlock involving waking up the reactor within signal handlers
+   in particular threads was fixed (#1997)
+ - The passivePortRange attribute of FTPFactory is now honored (#3593)
+ - TestCase.flushWarnings now flushes warnings even if they were produced by a
+   file that was renamed since it was byte compiled (#3598)
+ - Some internal file descriptors are now marked as close-on-exec, so these will
+   no longer be leaked to child processes (#3576)
+ - twisted.python.zipstream now correctly extracts the first file in a directory
+   as a file, and not an empty directory (#3625)
+ - proxyForInterface now returns classes which correctly *implement* interfaces
+   rather than *providing* them (#3646)
+ - SIP Via header parameters should now be correctly generated (#2194)
+ - The Deferred returned by stopListening would sometimes previously never fire
+   if an exception was raised by the underlying file descriptor's connectionLost
+   method. Now the Deferred will fire with a failure (#3654)
+ - The command-line tool "manhole" should now work with newer versions of pygtk
+   (#2464)
+ - When a DefaultOpenSSLContextFactory is instantiated with invalid parameters,
+   it will now raise an exception immediately instead of waiting for the first
+   connection (#3700)
+ - Twisted command line scripts should now work when installed in a virtualenv
+   (#3750)
+ - Trial will no longer delete temp directories which it did not create (#3481)
+ - Processes started on Windows should now be cleaned up properly in more cases
+   (#3893)
+ - Certain misbehaving importers will no longer cause twisted.python.modules
+   (and thus trial) to raise an exception, but rather issue a warning (#3913)
+ - MemCache client protocol methods will now fail when the transport has been
+   disconnected (#3643)
+ - In the AMP method callRemoteString, the requiresAnswer parameter is now
+   honored (#3999)
+ - Spawning a "script" (a file which starts with a #! line) on Windows running
+   Python 2.6 will now work instead of raising an exception about file mode
+   "ru" (#3567)
+ - FilePath's walk method now calls its "descend" parameter even on the first
+   level of children, instead of only on grandchildren. This allows for better
+   symlink cycle detection (#3911)
+ - Attempting to write unicode data to process pipes on Windows will no longer
+   result in arbitrarily encoded messages being written to the pipe, but instead
+   will immediately raise an error (#3930)
+ - The various twisted command line utilities will no longer print
+   ModuleType.__doc__ when Twisted was installed with setuptools (#4030)
+ - A Failure object will now be passed to connectionLost on stdio connections
+   on Windows, instead of an Exception object (#3922)
+
+Deprecations and Removals
+-------------------------
+ - twisted.persisted.marmalade was deleted after a long period of deprecation
+   (#876)
+ - Some remaining references to the long-gone plugins.tml system were removed
+   (#3246)
+ - SSLv2 is now disabled by default, but it can be re-enabled explicitly
+   (#3330)
+ - twisted.python.plugin has been removed (#1911)
+ - reactor.run will now raise a ReactorAlreadyRunning exception when it is
+   called reentrantly instead of warning a DeprecationWarning (#1785)
+ - twisted.spread.refpath is now deprecated because it is unmaintained,
+   untested, and has dubious value (#3723)
+ - The unused --quiet flag has been removed from the twistd command (#3003)
+
+Other
+-----
+ - #3545, #3490, #3544, #3537, #3455, #3315, #2281, #3564, #3570, #3571, #3486,
+   #3241, #3599, #3220, #1522, #3611, #3596, #3606, #3609, #3602, #3637, #3647,
+   #3632, #3675, #3673, #3686, #2217, #3685, #3688, #2456, #506, #3635, #2153,
+   #3581, #3708, #3714, #3717, #3698, #3747, #3704, #3707, #3713, #3720, #3692,
+   #3376, #3652, #3695, #3735, #3786, #3783, #3699, #3340, #3810, #3822, #3817,
+   #3791, #3859, #2459, #3677, #3883, #3894, #3861, #3822, #3852, #3875, #2722,
+   #3768, #3914, #3885, #2719, #3905, #3942, #2820, #3990, #3954, #1627, #2326,
+   #2972, #3253, #3937, #4058, #1200, #3639, #4079, #4063, #4050
+
+
+Core 8.2.0 (2008-12-16)
+=======================
+
+Features
+--------
+ - Reactors are slowly but surely becoming more isolated, thus improving
+   testability (#3198)
+ - FilePath has gained a realpath method, and FilePath.walk no longer infinitely
+   recurses in the case of a symlink causing a self-recursing filesystem tree
+   (#3098)
+ - FilePath's moveTo and copyTo methods now have an option to disable following
+   of symlinks (#3105)
+ - Private APIs are now included in the API documentation (#3268)
+ - hotshot is now the default profiler for the twistd --profile parameter and
+   using cProfile is now documented (#3355, #3356)
+ - Process protocols can now implement a processExited method, which is
+   distinct from processEnded in that it is called immediately when the child
+   has died, instead of waiting for all the file descriptors to be closed
+   (#1291)
+ - twistd now has a --umask option (#966, #3024)
+ - A new deferToThreadPool function exists in twisted.internet.threads (#2845)
+ - There is now an example of writing an FTP server in examples/ftpserver.py
+   (#1579)
+ - A new runAsEffectiveUser function has been added to twisted.python.util
+   (#2607)
+ - twisted.internet.utils.getProcessOutput now offers a mechanism for
+   waiting for the process to actually end, in the event of data received on
+   stderr (#3239)
+ - A fullyQualifiedName function has been added to twisted.python.reflect
+   (#3254)
+ - strports now defaults to managing access to a UNIX socket with a lock;
+   lockfile=0 can be included in the strports specifier to disable this
+   behavior (#2295)
+ - FTPClient now has a 'rename' method (#3335)
+ - FTPClient now has a 'makeDirectory' method (#3500)
+ - FTPClient now has a 'removeFile' method (#3491)
+ - flushWarnings, A new Trial method for testing warnings, has been added
+   (#3487, #3427, #3506)
+ - The log observer can now be configured in .tac files (#3534)
+
+Fixes
+-----
+ - TLS Session Tickets are now disabled by default, allowing connections to
+   certain servers which hang when an empty session ticket is received (like
+   GTalk) (#3463)
+ - twisted.enterprise.adbapi.ConnectionPool's noisy attribute now defaults to
+   False, as documented (#1806)
+ - Error handling and logging in adbapi is now much improved (#3244)
+ - TCP listeners can now be restarted (#2913)
+ - Doctests can now be rerun with trial's --until-failure option (#2713)
+ - Some memory leaks have been fixed in trial's --until-failure
+   implementation (#3119, #3269)
+ - Trial's summary reporter now prints correct runtime information and handles
+   the case of 0 tests (#3184)
+ - Trial and any other user of the 'namedAny' function now has better error
+   reporting in the case of invalid module names (#3259)
+ - Multiple instances of trial can now run in parallel in the same directory
+   by creating _trial_temp directories with an incremental suffix (#2338)
+ - Trial's failUnlessWarns method now works on Python 2.6 (#3223)
+ - twisted.python.log now hooks into the warnings system in a way compatible
+   with Python 2.6 (#3211)
+ - The GTK2 reactor is now better supported on Windows, but still not passing
+   the entire test suite (#3203)
+ - low-level failure handling in spawnProcess has been improved and no longer
+   leaks file descriptors (#2305, #1410)
+ - Perspective Broker avatars now have their logout functions called in more
+   cases (#392)
+ - Log observers which raise exceptions are no longer removed (#1069)
+ - transport.getPeer now always includes an IP address in the Address returned
+   instead of a hostname (#3059)
+ - Functions in twisted.internet.utils which spawn processes now avoid calling
+   chdir in the case where no working directory is passed, to avoid some
+   obscure permission errors (#3159)
+ - twisted.spread.publish.Publishable no longer corrupts line endings on
+   Windows (#2327)
+ - SelectReactor now properly detects when a TLS/TCP connection has been
+   disconnected (#3218)
+ - twisted.python.lockfile no longer raises an EEXIST OSError and is much
+   better supported on Windows (#3367)
+ - When ITLSTransport.startTLS is called while there is data in the write
+   buffer, TLS negotiation will now be delayed instead of the method raising
+   an exception (#686)
+ - The userAnonymous argument to FTPFactory is now honored (#3390)
+ - twisted.python.modules no longer tries to "fix" sys.modules after an import
+   error, which was just causing problems (#3388)
+ - setup.py no longer attempts to build extension modules when run with Jython
+   (#3410)
+ - AMP boxes can now be sent in IBoxReceiver.startReceivingBoxes (#3477)
+ - AMP connections are closed as soon as a key length larger than 255 is
+   received (#3478)
+ - Log events with timezone offsets between -1 and -59 minutes are now
+   correctly reported as negative (#3515)
+
+Deprecations and Removals
+-------------------------
+ - Trial's setUpClass and tearDownClass methods are now deprecated (#2903)
+ - problemsFromTransport has been removed in favor of the argument passed to
+   connectionLost (#2874)
+ - The mode parameter to methods of IReactorUNIX and IReactorUNIXDatagram are
+   deprecated in favor of applications taking other security precautions, since
+   the mode of a Unix socket is often not respected (#1068)
+ - Index access on instances of twisted.internet.defer.FirstError has been
+   removed in favor of the subFailure attribute (#3298)
+ - The 'changeDirectory' method of FTPClient has been deprecated in favor of
+   the 'cwd' method (#3491)
+
+Other
+-----
+
+ - #3202, #2869, #3225, #2955, #3237, #3196, #2355, #2881, #3054, #2374, #2918,
+   #3210, #3052, #3267, #3288, #2985, #3295, #3297, #2512, #3302, #1222, #2631,
+   #3306, #3116, #3215, #1489, #3319, #3320, #3321, #1255, #2169, #3182, #3323,
+   #3301, #3318, #3029, #3338, #3346, #1144, #3173, #3165, #685, #3357, #2582,
+   #3370, #2438, #1253, #637, #1971, #2208, #979, #1790, #1888, #1882, #1793,
+   #754, #1890, #1931, #1246, #1025, #3177, #2496, #2567, #3400, #2213, #2027,
+   #3415, #1262, #3422, #2500, #3414, #3045, #3111, #2974, #2947, #3222, #2878,
+   #3402, #2909, #3423, #1328, #1852, #3382, #3393, #2029, #3489, #1853, #2026,
+   #2375, #3502, #3482, #3504, #3505, #3507, #2605, #3519, #3520, #3121, #3484,
+   #3439, #3216, #3511, #3524, #3521, #3197, #2486, #2449, #2748, #3381, #3236,
+   #671
+
+
+8.1.0 (2008-05-18)
+==================
+
+Features
+--------
+
+ - twisted.internet.error.ConnectionClosed is a new exception which is the
+   superclass of ConnectionLost and ConnectionDone (#3137)
+ - Trial's CPU and memory performance should be better now (#3034)
+ - twisted.python.filepath.FilePath now has a chmod method (#3124)
+
+Fixes
+-----
+
+ - Some reactor re-entrancy regressions were fixed (#3146, #3168)
+ - A regression was fixed whereby constructing a Failure for an exception and
+   traceback raised out of a Pyrex extension would fail (#3132)
+ - CopyableFailures in PB can again be created from CopiedFailures (#3174)
+ - FilePath.remove, when called on a FilePath representing a symlink to a
+   directory, no longer removes the contents of the targeted directory, and
+   instead removes the symlink (#3097)
+ - FilePath now has a linkTo method for creating new symlinks (#3122)
+ - The docstring for Trial's addCleanup method now correctly specifies when
+   cleanup functions are run (#3131)
+ - assertWarns now deals better with multiple identical warnings (#2904)
+ - Various windows installer bugs were fixed (#3115, #3144, #3150, #3151, #3164)
+ - API links in the howto documentation have been corrected (#3130)
+ - The Win32 Process transport object now has a pid attribute (#1836)
+ - A doc bug in the twistd plugin howto which would inevitably lead to
+   confusion was fixed (#3183)
+ - A regression breaking IOCP introduced after the last release was fixed
+   (#3200)
+
+
+Deprecations and Removals
+-------------------------
+
+ - mktap is now fully deprecated, and will emit DeprecationWarnings when used
+   (#3127)
+
+Other
+-----
+ - #3079, #3118, #3120, #3145, #3069, #3149, #3186, #3208, #2762
+
+
+8.0.1 (2008-03-26)
+==================
+
+Fixes
+-----
+ - README no longer refers to obsolete trial command line option
+ - twistd no longer causes a bizarre DeprecationWarning about mktap
+
+
+8.0.0 (2008-03-17)
+==================
+
+Features
+--------
+
+ - The IOCP reactor has had many changes and is now greatly improved
+   (#1760, #3055)
+ - The main Twisted distribution is now easy_installable (#1286, #3110)
+ - twistd can now profile with cProfile (#2469)
+ - twisted.internet.defer contains a DeferredFilesystemLock which gives a
+   Deferred interface to lock file acquisition (#2180)
+ - twisted.python.modules is a new system for representing and manipulating
+   module paths (i.e. sys.path) (#1951)
+ - twisted.internet.fdesc now contains a writeToFD function, along with other
+   minor fixes (#2419)
+ - twisted.python.usage now allows optional type enforcement (#739)
+ - The reactor now has a blockingCallFromThread method for non-reactor threads
+   to use to wait for a reactor-scheduled call to return a result (#1042, #3030)
+ - Exceptions raised inside of inlineCallbacks-using functions now have a
+   better chance of coming with a meaningful traceback (#2639, #2803)
+ - twisted.python.randbytes now contains code for generating secure random
+   bytes (#2685)
+ - The classes in twisted.application.internet now accept a reactor parameter
+   for specifying the reactor to use for underlying calls to allow for better
+   testability (#2937)
+ - LoopingCall now allows you to specify the reactor to use to schedule new
+   calls, allowing much better testing techniques (#2633, #2634)
+ - twisted.internet.task.deferLater is a new API for scheduling calls and
+   getting deferreds which are fired with their results (#1875)
+ - objgrep now knows how to search through deque objects (#2323)
+ - twisted.python.log now contains a Twisted log observer which can forward
+   messages to the Python logging system (#1351)
+ - Log files now include seconds in the timestamps (#867)
+ - It is now possible to limit the number of log files to create during log
+   rotation (#1095)
+ - The interface required by the log context system is now documented as
+   ILoggingContext, and abstract.FileDescriptor now declares that it implements
+   it (#1272)
+ - There is now an example cred checker that uses a database via adbapi (#460)
+ - The epoll reactor is now documented in the choosing-reactors howto (#2539)
+ - There were improvements to the client howto (#222)
+ - Int8Receiver was added (#2315)
+ - Various refactorings to AMP introduced better testability and public
+   interfaces (#2657, #2667, #2656, #2664, #2810)
+ - twisted.protocol.policies.TrafficLoggingFactory now has a resetCounter
+   method (#2757)
+ - The FTP client can be told which port range within which to bind passive
+   transfer ports (#1904)
+ - twisted.protocols.memcache contains a new asynchronous memcache client
+   (#2506, #2957)
+ - PB now supports anonymous login (#439, #2312)
+ - twisted.spread.jelly now supports decimal objects (#2920)
+ - twisted.spread.jelly now supports all forms of sets (#2958)
+ - There is now an interface describing the API that process protocols must
+   provide (#3020)
+ - Trial reporting to core unittest TestResult objects has been improved (#2495)
+ - Trial's TestCase now has an addCleanup method which allows easy setup of
+   tear-down code (#2610, #2899)
+ - Trial's TestCase now has an assertIsInstance method (#2749)
+ - Trial's memory footprint and speed are greatly improved (#2275)
+ - At the end of trial runs, "PASSED" and "FAILED" messages are now colorized
+   (#2856)
+ - Tests which leave global state around in the reactor will now fail in
+   trial. A new option, --unclean-warnings, will convert these errors back into
+   warnings (#2091)
+ - Trial now has a --without-module command line for testing code in an
+   environment that lacks a particular Python module (#1795)
+ - Error reporting of failed assertEquals assertions now has much nicer
+   formatting (#2893)
+ - Trial now has methods for monkey-patching (#2598)
+ - Trial now has an ITestCase (#2898, #1950)
+ - The trial reporter API now has a 'done' method which is called at the end of
+   a test run (#2883)
+ - TestCase now has an assertWarns method which allows testing that functions
+   emit warnings (#2626, #2703)
+ - There are now no string exceptions in the entire Twisted code base (#2063)
+ - There is now a system for specifying credentials checkers with a string
+   (#2570)
+
+Fixes
+-----
+
+ - Some tests which were asserting the value of stderr have been changed
+   because Python uncontrollably writes bytes to stderr (#2405)
+ - Log files handle time zones with DST better (#2404)
+ - Subprocesses using PTYs on OS X that are handled by Twisted will now be able
+   to more reliably write the final bytes before they exit, allowing Twisted
+   code to more reliably receive them (#2371, #2858)
+ - Trial unit test reporting has been improved (#1901)
+ - The kqueue reactor handles connection failures better (#2172)
+ - It's now possible to run "trial foo/bar/" without an exception: trailing
+   slashes no longer cause problems (#2005)
+ - cred portals now better deal with implementations of inherited interfaces
+   (#2523)
+ - FTP error handling has been improved (#1160, 1107)
+ - Trial behaves better with respect to file locking on Windows (#2482)
+ - The FTP server now gives a better error when STOR is attempted during an
+   anonymous session (#1575)
+ - Trial now behaves better with tests that use the reactor's threadpool (#1832)
+ - twisted.python.reload now behaves better with new-style objects (#2297)
+ - LogFile's defaultMode parameter is now better implemented, preventing
+   potential security exploits (#2586)
+ - A minor obscure leak in thread pools was corrected (#1134)
+ - twisted.internet.task.Clock now returns the correct DelayedCall from
+   callLater, instead of returning the one scheduled for the furthest in the
+   future (#2691)
+ - twisted.spread.util.FilePager no longer unnecessarily buffers data in
+   memory (#1843, 2321)
+ - Asking for twistd or trial to use an unavailable reactor no longer prints a
+   traceback (#2457)
+ - System event triggers have fewer obscure bugs (#2509)
+ - Plugin discovery code is much better behaved, allowing multiple
+   installations of a package with plugins (#2339, #2769)
+ - Process and PTYProcess have been merged and some minor bugs have been fixed
+   (#2341)
+ - The reactor has less global state (#2545)
+ - Failure can now correctly represent and format errors caused by string
+   exceptions (#2830)
+ - The epoll reactor now has better error handling which now avoids the bug
+   causing 100% CPU usage in some cases (#2809)
+ - Errors raised during trial setUp or tearDown methods are now handled better
+   (#2837)
+ - A problem when deferred callbacks add new callbacks to the deferred that
+   they are a callback of was fixed (#2849)
+ - Log messages that are emitted during connectionMade now have the protocol
+   prefix correctly set (#2813)
+ - The string representation of a TCP Server connection now contains the actual
+   port that it's bound to when it was configured to listen on port 0 (#2826)
+ - There is better reporting of error codes for TCP failures on Windows (#2425)
+ - Process spawning has been made slightly more robust by disabling garbage
+   collection temporarily immediately after forking so that finalizers cannot
+   be executed in an unexpected environment (#2483)
+ - namedAny now detects import errors better (#698)
+ - Many fixes and improvements to the twisted.python.zipstream module have
+   been made (#2996)
+ - FilePager no longer blows up on empty files (#3023)
+ - twisted.python.util.FancyEqMixin has been improved to cooperate with objects
+   of other types (#2944)
+ - twisted.python.FilePath.exists now restats to prevent incorrect result
+   (#2896)
+ - twisted.python.util.mergeFunctionMetadata now also merges the __module__
+   attribute (#3049)
+ - It is now possible to call transport.pauseProducing within connectionMade on
+   TCP transports without it being ignored (#1780)
+ - twisted.python.versions now understands new SVN metadata format for fetching
+   the SVN revision number (#3058)
+ - It's now possible to use reactor.callWhenRunning(reactor.stop) on gtk2 and
+   glib2 reactors (#3011)
+
+Deprecations and removals
+-------------------------
+ - twisted.python.timeoutqueue is now deprecated (#2536)
+ - twisted.enterprise.row and twisted.enterprise.reflector are now deprecated
+   (#2387)
+ - twisted.enterprise.util is now deprecated (#3022)
+ - The dispatch and dispatchWithCallback methods of ThreadPool are now
+   deprecated (#2684)
+ - Starting the same reactor multiple times is now deprecated (#1785)
+ - The visit method of various test classes in trial has been deprecated (#2897)
+ - The --report-profile option to twistd and twisted.python.dxprofile are
+   deprecated (#2908)
+ - The upDownError method of Trial reporters is deprecated (#2883)
+
+Other
+-----
+
+ - #2396, #2211, #1921, #2378, #2247, #1603, #2463, #2530, #2426, #2356, #2574,
+ - #1844, #2575, #2655, #2640, #2670, #2688, #2543, #2743, #2744, #2745, #2746,
+ - #2742, #2741, #1730, #2831, #2216, #1192, #2848, #2767, #1220, #2727, #2643,
+ - #2669, #2866, #2867, #1879, #2766, #2855, #2547, #2857, #2862, #1264, #2735,
+ - #942, #2885, #2739, #2901, #2928, #2954, #2906, #2925, #2942, #2894, #2793,
+ - #2761, #2977, #2968, #2895, #3000, #2990, #2919, #2969, #2921, #3005, #421,
+ - #3031, #2940, #1181, #2783, #1049, #3053, #2847, #2941, #2876, #2886, #3086,
+ - #3095, #3109
+
+
+2.5.0 (2006-12-29)
+==================
+
+Twisted 2.5.0 is a major feature release, with several interesting new
+developments and a great number of bug fixes. Some of the highlights
+follow.
+
+ * AMP, the Asynchronous Messaging Protocol, was introduced.  AMP is
+   a protocol which provides request/response semantics over a
+   persistent connection in a very simple and extensible manner.
+
+ * An Epoll-based reactor was added, which can be used with twistd or
+   trial by passing "-r epoll" on the command line. This may improve
+   performance of certain high-traffic network applications.
+
+ * The 'twistd' command can now accept sub-commands which name an
+   application to run. For example, 'twistd web --path .' will start a
+   web server serving files out of the current directory. This
+   functionality is meant to replace the old way of doing things with
+   'mktap' and 'twistd -f'.
+
+ * Python 2.5 is now supported. Previous releases of Twisted were
+   broken by changes in the release of Python 2.5.
+
+ * 'inlineCallbacks' was added, which allows taking advantage of the
+   new 'yield' expression syntax in Python 2.5 to avoid writing
+   callbacks for Deferreds.
+
+In addition to these changes, there are many other minor features and
+a large number of bug fixes.
+
+Features
+--------
+ - log.err can now take a second argument for specifying information
+   about an error (#1399)
+ - A time-simulating test helper class, twisted.internet.task.Clock,
+   was added (#1757)
+ - Trial docstring improvements were made (#1604, #2133)
+ - New SSL features were added to twisted.internet.ssl, such as client
+   validation (#302)
+ - Python 2.5 is now supported (#1867)
+ - Trial's assertFailure now provides more information on failure (#1869)
+ - Trial can now be run on tests within a zipfile (#1940)
+ - AMP, a new simple protocol for asynchronous messaging, was added (#1715)
+ - Trial's colorful reporter now works on win32 (#1646)
+ - Trial test modules may now dynamically construct TestSuites (#1638, #2165)
+ - twistd can now make use of plugins to run applications (#1922, #2013)
+ - Twisted now works with the latest (unreleased) zope.interface (#2160)
+ - An epoll-based reactor, epollreactor, was added. It is selectable
+   with the -r options to twistd and trial (#1953)
+ - twistd and trial now use the plugin system to find reactors which
+   can be selected (#719)
+ - twisted.internet.defer.inlineCallbacks was added. It takes
+   advantage of Python 2.5's generators to offer a way to deal with
+   Deferreds without callbacks (#2100)
+
+Fixes
+-----
+ - Traceback formatting in Trial was improved (#1454, #1610)
+ - twisted.python.filepath.FilePath.islink now actually returns True when
+   appropriate (#1773)
+ - twisted.plugin now no longer raises spurious errors (#926)
+ - twisted.pb Cacheables may now be new-style classes (#1324)
+ - FileDescriptor now deals with producers in a more
+   interface-compliant and robust manner (#2286, #811)
+ - "setup.py build" and other setup.py commands which don't actually
+   install the software now work (#1835)
+ - wxreactor has had various fixes (#1235, #1574, #1688)
+
+Deprecations and Removals
+-------------------------
+ - The old twisted.cred API (Perspectives, Identities and such) was
+   removed (#1440)
+ - twisted.spread.newjelly was removed (#1831)
+ - Various deprecated things in twisted.python.components were
+   removed: Interface, MetaInterface, getAdapterClass, and
+   getAdapterClassWithInheritance (#1636)
+ - twisted.enterprise.xmlreflector was removed (#661)
+ - mktap is slowly on its way out, now that twistd supports plugins. It
+   is not yet officially deprecated (#2013)
+ - tkmktap was removed, because it wasn't working anyway (#2020)
+ - reactor.iterate calls made inside of a Trial test case are
+   deprecated (#2090)
+ - twisted.internet.qtreactor was removed: It has been moved to a
+   separate project. See http://twistedmatrix.com/trac/wiki/QTReactor
+   (#2130, #2137)
+ - threadedselectreactor is now not a directly usable reactor; it is
+   only meant to help in writing other reactors (#2126)
+ - twisted.python.reflect.funcinfo is deprecated (#2079)
+ - twisted.spread.sturdy, which was already completely broken, was
+   removed (#2299)
+
+
+Other
+-----
+The following changes are minor or closely related to other changes.
+
+ - #1783, #1786, #1788, #1648, #1734, #1609, #1800, #1818,
+   #1629, #1829, #491, #1816, #1824, #1855, #1797, #1637, #1371,
+   #1892, #1887, #1897, #1563, #1741, #1943, #1952, #1276,
+   #1837, #1726, #1963, #1965, #1973, #1976, #1991, #1936, #1113,
+   #630, #2002, #2040, #2044, #1617, #2045, #2055, #2056, #2022,
+   #2052, #1552, #1999, #1507, #2054, #1970, #1968, #662, #1910,
+   #1694, #1999, #1409, #2150, #2127, #2155, #1983, #2014, #2222,
+   #1067, #2136, #2065, #1430, #2173, #2212, #1871, #2147, #1199,
+   #2273, #428, #992, #815, #2024, #2292, #2125, #2139, #2291, #2174,
+   #2306, #2228, #2309, #2319, #2317, #2313, #2154, #1985, #1201
+
+
+2.4.0 (2006-05-21)
+==================
+
+Features
+--------
+ - twisted.internet.task.Cooperator (Added along with #1701).
+
+Fixes
+-----
+ - Errors in UDP protocols no longer unbind the UDP port (#1695).
+ - Misc: #1717, #1705, #1563, #1719, #1721, #1722, #1728.
+
+
+2.3.0 (2006-05-14)
+==================
+
+Features
+--------
+ - twisted-dev-mode's F9 now uses trial's --testmodule feature, rather than
+   trying to guess what tests to run.  This will break files using the "-x"
+   test-case-name hack (just use a comma separated list instead).
+ - API Documentation improvements.
+ - A new Producer/Consumer guide (#53)
+ - Better-defined error behavior in IReactorMulticast (#1578)
+ - IOCP Multicast support (#1500)
+ - Improved STDIO support on Windows. (#1553)
+ - LoopingCall supports Deferreds such that it will wait until a
+   Deferred has fired before rescheduling the next call (#1487)
+ - Added twisted.python.versions.Version, a structured representation
+   of Version information, including support for SVN revision numbers
+   (#1663)
+
+Fixes
+-----
+
+ - Many trial fixes, as usual
+ - All API documentation is now correctly formatted as epytext (#1545)
+ - twisted.python.filepath.FilePath.__repr__ is safer.
+ - Fix trial's "until-failure" mode. (#1453)
+ - deferredGenerator now no longer causes handled exceptions (or
+   results) to propagate to the resulting Deferred (#1709).
+ - Misc: #1483, #1495, #1503, #1532, #1539, #1559, #1509, #1538,
+   #1571, #1331, #1561, #737, #1562, #1573, #1594, #1607, #1407, #1615,
+   #1645, #1634, #1620, #1664, #1666, #1650, #1670, #1675, #1692, #1710,
+   #1668.
+
+Deprecations
+------------
+
+ - Removal of already-deprecated trial APIs: the assertions module,
+   util.deferredResult, util.deferredError, util.fireWhenDoneFunc,
+   util.spinUntil, util.spinWhile, util.extract_tb,
+   util.format_exception, util.suppress_warnings, unittest.wait,
+   util.wait
+ - The backwards compatibility layer of twisted.python.components
+   (e.g., backwardsCompatImplements, fixClassImplements, etc) has been
+   disabled. The functions still exist, but do nothing as to not break
+   user code outright (#1511)
+ - Deprecate the usage of the 'default' argument as a keyword argument
+   in Interface.__call__. Passing a second positional argument to
+   specify the default return value of the adaptation is still
+   supported.
+
+
+2.2.0 (2006-02-12)
+==================
+
+Features
+--------
+ - Twisted no longer works with Python 2.2
+ - FTP server supports more clients
+ - Process support on Windows
+ - twisted.internet.stdio improved (including Windows support!)
+ - Trial:
+   - Continued Trial refactoring
+   - Default trial reporter is verbose black&white when color isn't supported
+   - Deferreds returned in trial tests that don't fire before the
+     unittest timeout now have their errback fired with a TimeoutError
+   - raising SkipTest in setUp and setUpClass skips tests
+   - Test suites are failed if there are import errors
+
+Fixes
+-----
+ - iocpreactor fixes
+ - Threadpool fixes
+ - Fixed infinite loops in datagramReceived edge cases
+ - Issues resolved: 654, 773, 998, 1005, 1008, 1116, 1123, 1198, 1221,
+   1232, 1233, 1236, 1240, 1244, 1258, 1263, 1265, 1266, 1271, 1275,
+   1293, 1294, 1298, 1308, 1316, 1317, 1321, 1341, 1344, 1353, 1359,
+   1372, 1374, 1377, 1379, 1380, 1385, 1388, 1389, 1413, 1422, 1426,
+   1434, 1435, 1448, 1449, 1456
+
+Deprecations
+------------
+ - Trial:
+   - spinWhile and spinUntil
+   - util.wait
+   - extract_tb and format_exception
+   - util.suppressWarnings
+   - runReactor is gone
+
+
+2.1.0 (2005-11-06)
+==================
+
+Features
+--------
+ - threadedselectreactor, a reactor which potentially makes
+   integration with foreign event loops much simpler.
+ - major improvements to twisted.conch.insults, including many new widgets.
+ - adbapi ConnectionPools now have 'runWithConnection' which is
+   similar to runInteraction but gives you a connection object instead of
+   a transaction. [975]
+ - __file__ is now usable in tac files
+ - twisted.cred.pamauth now contains a PAM checker (moved from twisted.conch)
+ - twisted.protocols.policies.LimitTotalConnectionsFactory now exists,
+   which does as the name suggests
+ - twisted.protocols.ident now uses /proc/net/tcp on Linux [233]
+ - trial now recurses packages by default (a la the old -R parameter)
+ - (PB) Calling a remote method that doesn't exist now raises
+   NoSuchMethod instead of AttributeError.
+
+Fixes
+-----
+ - FTP client and server improvements
+ - Trial improvements: The code is now much simpler, and more stable.
+ - twisted.protocols.basic.FileSender now works with empty files
+ - Twisted should now be much more usable on Pythons without thread support.
+ - minor improvements to process code in win32eventreactor
+ - twistd -y (--python) now implies -o (--nosave). [539]
+ - improved lockfile handling especially with respect to unix sockets.
+ - deferredGenerator now no longer overuses the stack, which sometimes
+   caused stack overflows.
+ - Failure.raiseException now at least always raises the correct Exception.
+ - minor improvements to serialport code
+
+Deprecations
+------------
+ - twisted.python.componts.getAdapter. Use IFoo(o) instead.
+ - Adapter persistence (IFoo(x, persist=True)). Just don't use it.
+ - log.debug. It was equivalent to log.msg(), just use that.
+ - twisted.protocols.telnet. twisted.conch.telnet replaces it.
+ - Setting a trial reporter using a flag to 'trial'. Instead of 'trial
+   --bwverbose', for example, use 'trial --reporter=bwverbose'.
+ - trial --coverage will become a flag in Twisted 2.2.
+ - passing a fully-qualified python name to --reporter is
+   deprecated. Pass only names of Reporter plugins.
+ - trial --psyco.
+ - trial -R (--recurse) is now the default, so passing it is deprecated.
+ - trial --reporter-args. Use the plugin system to do this sort of thing.
+ - trial.assertions.assertionMethod and trial.unittest.assertionMethod
+   are both deprecated. Use instance methods on TestCases instead.
+ - trial's deferredResult, deferredError, and wait functions. Return
+   Deferreds from your test methods instead of using them.
+ - Raising unittest.SkipTest with no arguments. Give a reason for your skip.
+ - The Failure returned from a gatherResults and DeferredList is now
+   of type FirstError instead of a tuple of (Exception, index). It
+   supports a firstError[idx] syntax but that is deprecated. Use
+   firstError.subFailure and firstError.index instead.
+ - whenThreaded now simply calls the passed function synchronously.
+
+2.0.1 (2005-05-09)
+===================
+Minor bug fix release.
+
+SVN rev (file) - [bug number] description
+-----------------------------------------
+13307 (twisted/topfiles/README) - Mention support for python 2.4, too
+13324 (twisted/internet/defer.py) - [947] Fix DeferredQueue backlog/size limit.
+13354 (twisted/plugins/__init__.py) - Correct maintainer address.
+13355 (twisted/test/test_defer.py) - improvements to DeferredQueue test case
+13387 (setup.py) - add news to list of subprojects to install
+13332 (twisted/internet/posixbase.py) - Fix spelling error
+13366 (twisted/internet/qtreactor.py) - [957] [954] reactor.iterate fixes
+13368 (twisted/test/test_internet.py) - Fix DelayedCall test case
+13422 (twisted/internet/posixbase.py) - Remove log from _Win32Waker creation.
+13437 (twisted/plugin.py) - [958] Only write cache if there were changes.
+13666 (twisted/internet/gtkreactor.py,gtk2reactor.py) - Don't run callbacks
+          until the reactor is actually up and running
+13748 (twisted/internet/gtk2reactor.py) - [552] [994] Initialize threading properly.
+
+
+2.0.0 (2005-03-25)
+==================
+
+Major new features
+------------------
+ - Replaced home-grown components system with zope.interface.
+ - Split Twisted into multiple pieces.
+ - Relicensed: Now under the MIT license, rather than LGPL.
+ - Python 2.4 compatibility fixes
+ - Major efficiency improvements in TCP buffering algorithm.
+ - Major efficiency improvements in reactor.callLater/DelayedCall.
+ - Half-close support for TCP/SSL. (loseWriteConnection).
+
+Miscellaneous features/fixes
+----------------------------
+ - New plugin system: twisted.plugin
+ - Better debugging support. Control-C will break you into PDB.
+ - The twistd command has --uid --gid command line arguments.
+ - *Incompatibility: mktap defaults to not change UID/GID, instead of saving
+   the invoking user's UID/GID.
+ - Removed some functions that were deprecated since Twisted 1.0.
+ - ZSH tab-completion for twisted commands.
+
+ - More correct daemonization in twistd.
+ - twisted.python.log: do not close the log because of invalid format string.
+ - Disabled automatic import of cBanana.
+ - Boolean support for twisted.persisted.marmalade.
+ - Refactor of plugin and application HOWTO documentation
+ - Async HOWTO expanded greatly.
+ - twisted.python.usage outputs the actual defaults, not passed in values.
+
+twisted.trial
+-------------
+ - Rewritten, a bunch of bugs fixed, a few more added.
+
+twisted.internet
+----------------
+ - Multi-listen UDP multicast support
+ - protocol.ClientCreator has a connectSSL.
+ - defer.deferredGenerator: allows you to write Deferred code w/o callbacks.
+ - Deferred.setTimeout is now deprecated.
+ - New defer.DeferredLock/DeferredSemaphore/DeferredQueue.
+ - Add utils.getProcessOutputAndValue to get stdout/err/value.
+
+ - Default DNS resolver is now non-blocking.
+ - Increased default TCP accept backlog from 5 to 50.
+ - Make buffering large amounts of TCP data work on Windows.
+ - Fixed SSL disconnect to not wait for remote host. Fixes issue with firefox.
+ - Separate state for Deferred finalization so that GC-loops preventing
+   finalization don't occur.
+ - Many Process bugfixes
+ - Processes spawned on windows can successfully use sockets
+ - gtk2reactor can optionally use glib event loop instead of gtk
+ - gtk2reactor notifies gobject to initialize thread support
+ - Fix registering a streaming producer on a transport.
+ - Close client sockets explicitly after failed connections.
+ - ReconnectingClientFactory now continues attempting to reconnect after all
+   errors, not just those which are not UserErrors.
+
+twisted.protocols
+-----------------
+ - Portforward doesn't start reading from a client until a connection is made.
+ - Bugfixes in twisted.protocols.loopback
+ - Improve speed of twisted.protocols.LineReceiver.
+ - LineReceiver implements IProducer. (stop/pause/resumeProducing)
+ - SOCKSv4 properly closes connections
+
+twisted.enterprise
+------------------
+ - Add "new connection" callback to adbapi.ConnectionPool to allow for
+   custom db connection setup (cp_openfun)
+ - adbapi.ConnectionPool automatic reconnection support
+ - Don't log exceptions extraneously
+
+
+1.3.0 (2004-05-14)
+==================
+
+- Address objects for IPv4 and Unix addresses throughout twisted.internet.
+- Improved connected UDP APIs.
+- Refactored SSH client support.
+- Initial implementation of Windows I/O Completion Ports event loop.
+- Bug fixes and feature enhancements.
+- Nevow support for Lore (so your Lore documents can use Nevow directives).
+- This is the last release before Twisted begins splitting up.
diff --git a/ThirdParty/Twisted/twisted/topfiles/README b/ThirdParty/Twisted/twisted/topfiles/README
new file mode 100644
index 0000000..8a8c101
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/topfiles/README
@@ -0,0 +1,14 @@
+Twisted Core 12.3.0
+===================
+
+Twisted Core makes up the core parts of Twisted, including:
+
+ * Networking support (twisted.internet)
+ * Trial, the unit testing framework (twisted.trial)
+ * AMP, the Asynchronous Messaging Protocol (twisted.protocols.amp)
+ * Twisted Spread, a remote object system (twisted.spread)
+ * Utility code (twisted.python)
+ * Basic abstractions that multiple subprojects use
+   (twisted.cred, twisted.application, twisted.plugin)
+ * Database connectivity support (twisted.enterprise)
+ * A few basic protocols and protocol abstractions (twisted.protocols)
diff --git a/ThirdParty/Twisted/twisted/topfiles/setup.py b/ThirdParty/Twisted/twisted/topfiles/setup.py
new file mode 100755
index 0000000..2cf3cdd
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/topfiles/setup.py
@@ -0,0 +1,96 @@
+#!/usr/bin/env python
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Distutils installer for Twisted.
+"""
+
+import os
+import sys
+
+if sys.version_info < (2, 6):
+    print >>sys.stderr, "You must use at least Python 2.6 for Twisted"
+    sys.exit(3)
+
+if os.path.exists('twisted'):
+    sys.path.insert(0, '.') # eek! need this to import twisted. sorry.
+from twisted import copyright
+from twisted.python.dist import setup, ConditionalExtension as Extension
+from twisted.python.dist import getPackages, getDataFiles, getScripts
+from twisted.python.dist import twisted_subprojects, _isCPython, _hasEpoll
+
+
+extensions = [
+    Extension("twisted.test.raiser",
+              ["twisted/test/raiser.c"],
+              condition=lambda _: _isCPython),
+
+    Extension("twisted.python._epoll",
+              ["twisted/python/_epoll.c"],
+              condition=lambda builder: (_isCPython and _hasEpoll(builder) and
+                                         sys.version_info[:2] < (2, 6))),
+
+    Extension("twisted.internet.iocpreactor.iocpsupport",
+              ["twisted/internet/iocpreactor/iocpsupport/iocpsupport.c",
+               "twisted/internet/iocpreactor/iocpsupport/winsock_pointers.c"],
+              libraries=["ws2_32"],
+              condition=lambda _: _isCPython and sys.platform == "win32"),
+
+    Extension("twisted.python.sendmsg",
+              sources=["twisted/python/sendmsg.c"],
+              condition=lambda _: sys.platform != "win32"),
+]
+
+if sys.version_info[:2] <= (2, 6):
+    extensions.append(
+        Extension(
+            "twisted.python._initgroups",
+            ["twisted/python/_initgroups.c"]))
+
+
+# Figure out which plugins to include: all plugins except subproject ones
+subProjectsPlugins = ['twisted_%s.py' % subProject
+                      for subProject in twisted_subprojects]
+plugins = os.listdir(os.path.join(
+    os.path.dirname(os.path.abspath(copyright.__file__)), 'plugins'))
+plugins = [plugin[:-3] for plugin in plugins if plugin.endswith('.py') and
+           plugin not in subProjectsPlugins]
+
+
+
+setup_args = dict(
+    # metadata
+    name="Twisted Core",
+    version=copyright.version,
+    description="The core parts of the Twisted networking framework",
+    author="Twisted Matrix Laboratories",
+    author_email="twisted-python at twistedmatrix.com",
+    maintainer="Glyph Lefkowitz",
+    url="http://twistedmatrix.com/",
+    license="MIT",
+    long_description="""\
+This is the core of Twisted, including:
+ * Networking support (twisted.internet)
+ * Trial, the unit testing framework (twisted.trial)
+ * AMP, the Asynchronous Messaging Protocol (twisted.protocols.amp)
+ * Twisted Spread, a remote object system (twisted.spread)
+ * Utility code (twisted.python)
+ * Basic abstractions that multiple subprojects use
+   (twisted.cred, twisted.application, twisted.plugin)
+ * Database connectivity support (twisted.enterprise)
+ * A few basic protocols and protocol abstractions (twisted.protocols)
+""",
+
+    # build stuff
+    packages=getPackages('twisted',
+                         ignore=twisted_subprojects + ['plugins']),
+    plugins=plugins,
+    data_files=getDataFiles('twisted', ignore=twisted_subprojects),
+    conditionalExtensions=extensions,
+    scripts = getScripts(""),
+)
+
+
+if __name__ == '__main__':
+    setup(**setup_args)
diff --git a/ThirdParty/Twisted/twisted/trial/__init__.py b/ThirdParty/Twisted/twisted/trial/__init__.py
new file mode 100644
index 0000000..ad9423c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/__init__.py
@@ -0,0 +1,52 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+#
+# Maintainer: Jonathan Lange
+
+"""
+Asynchronous unit testing framework.
+
+Trial extends Python's builtin C{unittest} to provide support for asynchronous
+tests.
+
+Maintainer: Jonathan Lange
+
+Trial strives to be compatible with other Python xUnit testing frameworks.
+"Compatibility" is a difficult things to define. In practice, it means that:
+
+ - L{twisted.trial.unittest.TestCase} objects should be able to be used by
+   other test runners without those runners requiring special support for
+   Trial tests.
+
+ - Tests that subclass the standard library C{TestCase} and don't do anything
+   "too weird" should be able to be discoverable and runnable by the Trial
+   test runner without the authors of those tests having to jump through
+   hoops.
+
+ - Tests that implement the interface provided by the standard library
+   C{TestCase} should be runnable by the Trial runner.
+
+ - The Trial test runner and Trial L{unittest.TestCase} objects ought to be
+   able to use standard library C{TestResult} objects, and third party
+   C{TestResult} objects based on the standard library.
+
+This list is not necessarily exhaustive -- compatibility is hard to define.
+Contributors who discover more helpful ways of defining compatibility are
+encouraged to update this document.
+
+
+Examples:
+
+B{Timeouts} for tests should be implemented in the runner. If this is done,
+then timeouts could work for third-party TestCase objects as well as for
+L{twisted.trial.unittest.TestCase} objects. Further, Twisted C{TestCase}
+objects will run in other runners without timing out.
+See U{http://twistedmatrix.com/trac/ticket/2675}.
+
+Running tests in a temporary directory should be a feature of the test case,
+because often tests themselves rely on this behaviour. If the feature is
+implemented in the runner, then tests will change behaviour (possibly
+breaking) when run in a different test runner. Further, many tests don't even
+care about the filesystem.
+See U{http://twistedmatrix.com/trac/ticket/2916}.
+"""
diff --git a/ThirdParty/Twisted/twisted/trial/_asyncrunner.py b/ThirdParty/Twisted/twisted/trial/_asyncrunner.py
new file mode 100644
index 0000000..d1d8708
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/_asyncrunner.py
@@ -0,0 +1,235 @@
+# -*- test-case-name: twisted.trial.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Infrastructure for test running and suites.
+"""
+
+import doctest
+import warnings, gc
+
+from twisted.python import components
+
+from twisted.trial import itrial, reporter
+from twisted.trial._synctest import _logObserver
+
+pyunit = __import__('unittest')
+
+from zope.interface import implements
+
+def suiteVisit(suite, visitor):
+    """
+    Visit each test in C{suite} with C{visitor}.
+
+    Deprecated in Twisted 8.0.
+
+    @param visitor: A callable which takes a single argument, the L{TestCase}
+    instance to visit.
+    @return: None
+    """
+    warnings.warn("Test visitors deprecated in Twisted 8.0",
+                  category=DeprecationWarning)
+    for case in suite._tests:
+        visit = getattr(case, 'visit', None)
+        if visit is not None:
+            visit(visitor)
+        elif isinstance(case, pyunit.TestCase):
+            case = itrial.ITestCase(case)
+            case.visit(visitor)
+        elif isinstance(case, pyunit.TestSuite):
+            suiteVisit(case, visitor)
+        else:
+            case.visit(visitor)
+
+
+
+class TestSuite(pyunit.TestSuite):
+    """
+    Extend the standard library's C{TestSuite} with support for the visitor
+    pattern and a consistently overrideable C{run} method.
+    """
+
+    visit = suiteVisit
+
+    def __call__(self, result):
+        return self.run(result)
+
+
+    def run(self, result):
+        """
+        Call C{run} on every member of the suite.
+        """
+        # we implement this because Python 2.3 unittest defines this code
+        # in __call__, whereas 2.4 defines the code in run.
+        for test in self._tests:
+            if result.shouldStop:
+                break
+            test(result)
+        return result
+
+
+
+class TestDecorator(components.proxyForInterface(itrial.ITestCase,
+                                                 "_originalTest")):
+    """
+    Decorator for test cases.
+
+    @param _originalTest: The wrapped instance of test.
+    @type _originalTest: A provider of L{itrial.ITestCase}
+    """
+
+    implements(itrial.ITestCase)
+
+
+    def __call__(self, result):
+        """
+        Run the unit test.
+
+        @param result: A TestResult object.
+        """
+        return self.run(result)
+
+
+    def run(self, result):
+        """
+        Run the unit test.
+
+        @param result: A TestResult object.
+        """
+        return self._originalTest.run(
+            reporter._AdaptedReporter(result, self.__class__))
+
+
+
+def _clearSuite(suite):
+    """
+    Clear all tests from C{suite}.
+
+    This messes with the internals of C{suite}. In particular, it assumes that
+    the suite keeps all of its tests in a list in an instance variable called
+    C{_tests}.
+    """
+    suite._tests = []
+
+
+def decorate(test, decorator):
+    """
+    Decorate all test cases in C{test} with C{decorator}.
+
+    C{test} can be a test case or a test suite. If it is a test suite, then the
+    structure of the suite is preserved.
+
+    L{decorate} tries to preserve the class of the test suites it finds, but
+    assumes the presence of the C{_tests} attribute on the suite.
+
+    @param test: The C{TestCase} or C{TestSuite} to decorate.
+
+    @param decorator: A unary callable used to decorate C{TestCase}s.
+
+    @return: A decorated C{TestCase} or a C{TestSuite} containing decorated
+        C{TestCase}s.
+    """
+
+    try:
+        tests = iter(test)
+    except TypeError:
+        return decorator(test)
+
+    # At this point, we know that 'test' is a test suite.
+    _clearSuite(test)
+
+    for case in tests:
+        test.addTest(decorate(case, decorator))
+    return test
+
+
+
+class _PyUnitTestCaseAdapter(TestDecorator):
+    """
+    Adapt from pyunit.TestCase to ITestCase.
+    """
+
+
+    def visit(self, visitor):
+        """
+        Deprecated in Twisted 8.0.
+        """
+        warnings.warn("Test visitors deprecated in Twisted 8.0",
+                      category=DeprecationWarning)
+        visitor(self)
+
+
+
+class _BrokenIDTestCaseAdapter(_PyUnitTestCaseAdapter):
+    """
+    Adapter for pyunit-style C{TestCase} subclasses that have undesirable id()
+    methods. That is C{unittest.FunctionTestCase} and C{unittest.DocTestCase}.
+    """
+
+    def id(self):
+        """
+        Return the fully-qualified Python name of the doctest.
+        """
+        testID = self._originalTest.shortDescription()
+        if testID is not None:
+            return testID
+        return self._originalTest.id()
+
+
+
+class _ForceGarbageCollectionDecorator(TestDecorator):
+    """
+    Forces garbage collection to be run before and after the test. Any errors
+    logged during the post-test collection are added to the test result as
+    errors.
+    """
+
+    def run(self, result):
+        gc.collect()
+        TestDecorator.run(self, result)
+        _logObserver._add()
+        gc.collect()
+        for error in _logObserver.getErrors():
+            result.addError(self, error)
+        _logObserver.flushErrors()
+        _logObserver._remove()
+
+
+components.registerAdapter(
+    _PyUnitTestCaseAdapter, pyunit.TestCase, itrial.ITestCase)
+
+
+components.registerAdapter(
+    _BrokenIDTestCaseAdapter, pyunit.FunctionTestCase, itrial.ITestCase)
+
+
+_docTestCase = getattr(doctest, 'DocTestCase', None)
+if _docTestCase:
+    components.registerAdapter(
+        _BrokenIDTestCaseAdapter, _docTestCase, itrial.ITestCase)
+
+
+def _iterateTests(testSuiteOrCase):
+    """
+    Iterate through all of the test cases in C{testSuiteOrCase}.
+    """
+    try:
+        suite = iter(testSuiteOrCase)
+    except TypeError:
+        yield testSuiteOrCase
+    else:
+        for test in suite:
+            for subtest in _iterateTests(test):
+                yield subtest
+
+
+
+# Support for Python 2.3
+try:
+    iter(pyunit.TestSuite())
+except TypeError:
+    # Python 2.3's TestSuite doesn't support iteration. Let's monkey patch it!
+    def __iter__(self):
+        return iter(self._tests)
+    pyunit.TestSuite.__iter__ = __iter__
diff --git a/ThirdParty/Twisted/twisted/trial/_asynctest.py b/ThirdParty/Twisted/twisted/trial/_asynctest.py
new file mode 100644
index 0000000..a59b305
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/_asynctest.py
@@ -0,0 +1,415 @@
+# -*- test-case-name: twisted.trial.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Things likely to be used by writers of unit tests.
+
+Maintainer: Jonathan Lange
+"""
+
+from __future__ import division, absolute_import
+
+import warnings
+
+from zope.interface import implementer
+
+# We can't import reactor at module-level because this code runs before trial
+# installs a user-specified reactor, installing the default reactor and
+# breaking reactor installation. See also #6047.
+from twisted.internet import defer, _utilspy3 as utils
+from twisted.python import failure
+
+from twisted.trial import itrial, util
+from twisted.trial._synctest import (
+    FailTest, SkipTest, SynchronousTestCase)
+
+_wait_is_running = []
+
+ at implementer(itrial.ITestCase)
+class TestCase(SynchronousTestCase):
+    """
+    A unit test. The atom of the unit testing universe.
+
+    This class extends L{SynchronousTestCase} which extends C{unittest.TestCase}
+    from the standard library. The main feature is the ability to return
+    C{Deferred}s from tests and fixture methods and to have the suite wait for
+    those C{Deferred}s to fire.  Also provides new assertions such as
+    L{assertFailure}.
+
+    @ivar timeout: A real number of seconds. If set, the test will
+    raise an error if it takes longer than C{timeout} seconds.
+    If not set, util.DEFAULT_TIMEOUT_DURATION is used.
+    """
+
+    def __init__(self, methodName='runTest'):
+        """
+        Construct an asynchronous test case for C{methodName}.
+
+        @param methodName: The name of a method on C{self}. This method should
+        be a unit test. That is, it should be a short method that calls some of
+        the assert* methods. If C{methodName} is unspecified,
+        L{SynchronousTestCase.runTest} will be used as the test method. This is
+        mostly useful for testing Trial.
+        """
+        super(TestCase, self).__init__(methodName)
+
+
+    def assertFailure(self, deferred, *expectedFailures):
+        """
+        Fail if C{deferred} does not errback with one of C{expectedFailures}.
+        Returns the original Deferred with callbacks added. You will need
+        to return this Deferred from your test case.
+        """
+        def _cb(ignore):
+            raise self.failureException(
+                "did not catch an error, instead got %r" % (ignore,))
+
+        def _eb(failure):
+            if failure.check(*expectedFailures):
+                return failure.value
+            else:
+                output = ('\nExpected: %r\nGot:\n%s'
+                          % (expectedFailures, str(failure)))
+                raise self.failureException(output)
+        return deferred.addCallbacks(_cb, _eb)
+    failUnlessFailure = assertFailure
+
+
+    def _run(self, methodName, result):
+        from twisted.internet import reactor
+        timeout = self.getTimeout()
+        def onTimeout(d):
+            e = defer.TimeoutError("%r (%s) still running at %s secs"
+                % (self, methodName, timeout))
+            f = failure.Failure(e)
+            # try to errback the deferred that the test returns (for no gorram
+            # reason) (see issue1005 and test_errorPropagation in
+            # test_deferred)
+            try:
+                d.errback(f)
+            except defer.AlreadyCalledError:
+                # if the deferred has been called already but the *back chain
+                # is still unfinished, crash the reactor and report timeout
+                # error ourself.
+                reactor.crash()
+                self._timedOut = True # see self._wait
+                todo = self.getTodo()
+                if todo is not None and todo.expected(f):
+                    result.addExpectedFailure(self, f, todo)
+                else:
+                    result.addError(self, f)
+        onTimeout = utils.suppressWarnings(
+            onTimeout, util.suppress(category=DeprecationWarning))
+        method = getattr(self, methodName)
+        d = defer.maybeDeferred(
+            utils.runWithWarningsSuppressed, self._getSuppress(), method)
+        call = reactor.callLater(timeout, onTimeout, d)
+        d.addBoth(lambda x : call.active() and call.cancel() or x)
+        return d
+
+
+    def __call__(self, *args, **kwargs):
+        return self.run(*args, **kwargs)
+
+
+    def deferSetUp(self, ignored, result):
+        d = self._run('setUp', result)
+        d.addCallbacks(self.deferTestMethod, self._ebDeferSetUp,
+                       callbackArgs=(result,),
+                       errbackArgs=(result,))
+        return d
+
+
+    def _ebDeferSetUp(self, failure, result):
+        if failure.check(SkipTest):
+            result.addSkip(self, self._getSkipReason(self.setUp, failure.value))
+        else:
+            result.addError(self, failure)
+            if failure.check(KeyboardInterrupt):
+                result.stop()
+        return self.deferRunCleanups(None, result)
+
+
+    def deferTestMethod(self, ignored, result):
+        d = self._run(self._testMethodName, result)
+        d.addCallbacks(self._cbDeferTestMethod, self._ebDeferTestMethod,
+                       callbackArgs=(result,),
+                       errbackArgs=(result,))
+        d.addBoth(self.deferRunCleanups, result)
+        d.addBoth(self.deferTearDown, result)
+        return d
+
+
+    def _cbDeferTestMethod(self, ignored, result):
+        if self.getTodo() is not None:
+            result.addUnexpectedSuccess(self, self.getTodo())
+        else:
+            self._passed = True
+        return ignored
+
+
+    def _ebDeferTestMethod(self, f, result):
+        todo = self.getTodo()
+        if todo is not None and todo.expected(f):
+            result.addExpectedFailure(self, f, todo)
+        elif f.check(self.failureException, FailTest):
+            result.addFailure(self, f)
+        elif f.check(KeyboardInterrupt):
+            result.addError(self, f)
+            result.stop()
+        elif f.check(SkipTest):
+            result.addSkip(
+                self,
+                self._getSkipReason(getattr(self, self._testMethodName), f.value))
+        else:
+            result.addError(self, f)
+
+
+    def deferTearDown(self, ignored, result):
+        d = self._run('tearDown', result)
+        d.addErrback(self._ebDeferTearDown, result)
+        return d
+
+
+    def _ebDeferTearDown(self, failure, result):
+        result.addError(self, failure)
+        if failure.check(KeyboardInterrupt):
+            result.stop()
+        self._passed = False
+
+
+    def deferRunCleanups(self, ignored, result):
+        """
+        Run any scheduled cleanups and report errors (if any to the result
+        object.
+        """
+        d = self._runCleanups()
+        d.addCallback(self._cbDeferRunCleanups, result)
+        return d
+
+
+    def _cbDeferRunCleanups(self, cleanupResults, result):
+        for flag, failure in cleanupResults:
+            if flag == defer.FAILURE:
+                result.addError(self, failure)
+                if failure.check(KeyboardInterrupt):
+                    result.stop()
+                self._passed = False
+
+
+    def _cleanUp(self, result):
+        try:
+            clean = util._Janitor(self, result).postCaseCleanup()
+            if not clean:
+                self._passed = False
+        except:
+            result.addError(self, failure.Failure())
+            self._passed = False
+        for error in self._observer.getErrors():
+            result.addError(self, error)
+            self._passed = False
+        self.flushLoggedErrors()
+        self._removeObserver()
+        if self._passed:
+            result.addSuccess(self)
+
+
+    def _classCleanUp(self, result):
+        try:
+            util._Janitor(self, result).postClassCleanup()
+        except:
+            result.addError(self, failure.Failure())
+
+
+    def _makeReactorMethod(self, name):
+        """
+        Create a method which wraps the reactor method C{name}. The new
+        method issues a deprecation warning and calls the original.
+        """
+        def _(*a, **kw):
+            warnings.warn("reactor.%s cannot be used inside unit tests. "
+                          "In the future, using %s will fail the test and may "
+                          "crash or hang the test run."
+                          % (name, name),
+                          stacklevel=2, category=DeprecationWarning)
+            return self._reactorMethods[name](*a, **kw)
+        return _
+
+
+    def _deprecateReactor(self, reactor):
+        """
+        Deprecate C{iterate}, C{crash} and C{stop} on C{reactor}. That is,
+        each method is wrapped in a function that issues a deprecation
+        warning, then calls the original.
+
+        @param reactor: The Twisted reactor.
+        """
+        self._reactorMethods = {}
+        for name in ['crash', 'iterate', 'stop']:
+            self._reactorMethods[name] = getattr(reactor, name)
+            setattr(reactor, name, self._makeReactorMethod(name))
+
+
+    def _undeprecateReactor(self, reactor):
+        """
+        Restore the deprecated reactor methods. Undoes what
+        L{_deprecateReactor} did.
+
+        @param reactor: The Twisted reactor.
+        """
+        for name, method in self._reactorMethods.items():
+            setattr(reactor, name, method)
+        self._reactorMethods = {}
+
+
+    def _runCleanups(self):
+        """
+        Run the cleanups added with L{addCleanup} in order.
+
+        @return: A C{Deferred} that fires when all cleanups are run.
+        """
+        def _makeFunction(f, args, kwargs):
+            return lambda: f(*args, **kwargs)
+        callables = []
+        while len(self._cleanups) > 0:
+            f, args, kwargs = self._cleanups.pop()
+            callables.append(_makeFunction(f, args, kwargs))
+        return util._runSequentially(callables)
+
+
+    def _runFixturesAndTest(self, result):
+        """
+        Really run C{setUp}, the test method, and C{tearDown}.  Any of these may
+        return L{defer.Deferred}s. After they complete, do some reactor cleanup.
+
+        @param result: A L{TestResult} object.
+        """
+        from twisted.internet import reactor
+        self._deprecateReactor(reactor)
+        self._timedOut = False
+        try:
+            d = self.deferSetUp(None, result)
+            try:
+                self._wait(d)
+            finally:
+                self._cleanUp(result)
+                self._classCleanUp(result)
+        finally:
+            self._undeprecateReactor(reactor)
+
+
+    def addCleanup(self, f, *args, **kwargs):
+        """
+        Extend the base cleanup feature with support for cleanup functions which
+        return Deferreds.
+
+        If the function C{f} returns a Deferred, C{TestCase} will wait until the
+        Deferred has fired before proceeding to the next function.
+        """
+        return super(TestCase, self).addCleanup(f, *args, **kwargs)
+
+
+    def getSuppress(self):
+        return self._getSuppress()
+
+
+    def getTimeout(self):
+        """
+        Returns the timeout value set on this test. Checks on the instance
+        first, then the class, then the module, then packages. As soon as it
+        finds something with a C{timeout} attribute, returns that. Returns
+        L{util.DEFAULT_TIMEOUT_DURATION} if it cannot find anything. See
+        L{TestCase} docstring for more details.
+        """
+        timeout =  util.acquireAttribute(self._parents, 'timeout',
+                                         util.DEFAULT_TIMEOUT_DURATION)
+        try:
+            return float(timeout)
+        except (ValueError, TypeError):
+            # XXX -- this is here because sometimes people will have methods
+            # called 'timeout', or set timeout to 'orange', or something
+            # Particularly, test_news.NewsTestCase and ReactorCoreTestCase
+            # both do this.
+            warnings.warn("'timeout' attribute needs to be a number.",
+                          category=DeprecationWarning)
+            return util.DEFAULT_TIMEOUT_DURATION
+
+
+    def visit(self, visitor):
+        """
+        Visit this test case. Call C{visitor} with C{self} as a parameter.
+
+        Deprecated in Twisted 8.0.
+
+        @param visitor: A callable which expects a single parameter: a test
+        case.
+
+        @return: None
+        """
+        warnings.warn("Test visitors deprecated in Twisted 8.0",
+                      category=DeprecationWarning)
+        visitor(self)
+
+
+    def _wait(self, d, running=_wait_is_running):
+        """Take a Deferred that only ever callbacks. Block until it happens.
+        """
+        if running:
+            raise RuntimeError("_wait is not reentrant")
+
+        from twisted.internet import reactor
+        results = []
+        def append(any):
+            if results is not None:
+                results.append(any)
+        def crash(ign):
+            if results is not None:
+                reactor.crash()
+        crash = utils.suppressWarnings(
+            crash, util.suppress(message=r'reactor\.crash cannot be used.*',
+                                 category=DeprecationWarning))
+        def stop():
+            reactor.crash()
+        stop = utils.suppressWarnings(
+            stop, util.suppress(message=r'reactor\.crash cannot be used.*',
+                                category=DeprecationWarning))
+
+        running.append(None)
+        try:
+            d.addBoth(append)
+            if results:
+                # d might have already been fired, in which case append is
+                # called synchronously. Avoid any reactor stuff.
+                return
+            d.addBoth(crash)
+            reactor.stop = stop
+            try:
+                reactor.run()
+            finally:
+                del reactor.stop
+
+            # If the reactor was crashed elsewhere due to a timeout, hopefully
+            # that crasher also reported an error. Just return.
+            # _timedOut is most likely to be set when d has fired but hasn't
+            # completed its callback chain (see self._run)
+            if results or self._timedOut: #defined in run() and _run()
+                return
+
+            # If the timeout didn't happen, and we didn't get a result or
+            # a failure, then the user probably aborted the test, so let's
+            # just raise KeyboardInterrupt.
+
+            # FIXME: imagine this:
+            # web/test/test_webclient.py:
+            # exc = self.assertRaises(error.Error, wait, method(url))
+            #
+            # wait() will raise KeyboardInterrupt, and assertRaises will
+            # swallow it. Therefore, wait() raising KeyboardInterrupt is
+            # insufficient to stop trial. A suggested solution is to have
+            # this code set a "stop trial" flag, or otherwise notify trial
+            # that it should really try to stop as soon as possible.
+            raise KeyboardInterrupt()
+        finally:
+            results = None
+            running.pop()
diff --git a/ThirdParty/Twisted/twisted/trial/_dist/__init__.py b/ThirdParty/Twisted/twisted/trial/_dist/__init__.py
new file mode 100644
index 0000000..502e840
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/_dist/__init__.py
@@ -0,0 +1,47 @@
+# -*- test-case-name: twisted.trial._dist.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This package implements the distributed Trial test runner:
+
+  - The L{twisted.trial._dist.disttrial} module implements a test runner which
+    runs in a manager process and can launch additional worker processes in
+    which to run tests and gather up results from all of them.
+
+  - The L{twisted.trial._dist.options} module defines command line options used
+    to configure the distributed test runner.
+
+  - The L{twisted.trial._dist.managercommands} module defines AMP commands
+    which are sent from worker processes back to the manager process to report
+    the results of tests.
+
+  - The L{twisted.trial._dist.workercommands} module defines AMP commands which
+    are sent from the manager process to the worker processes to control the
+    execution of tests there.
+
+  - The L{twisted.trial._dist.distreporter} module defines a proxy for
+    L{twisted.trial.itrial.IReporter} which enforces the typical requirement
+    that results be passed to a reporter for only one test at a time, allowing
+    any reporter to be used with despite disttrial's simultaneously running
+    tests.
+
+  - The L{twisted.trial._dist.workerreporter} module implements a
+    L{twisted.trial.itrial.IReporter} which is used by worker processes and
+    reports results back to the manager process using AMP commands.
+
+  - The L{twisted.trial._dist.workertrial} module is a runnable script which is
+    the main point for worker processes.
+
+  - The L{twisted.trial._dist.worker} process defines the manager's AMP
+    protocol for accepting results from worker processes and a process protocol
+    for use running workers as local child processes (as opposed to
+    distributing them to another host).
+
+ at since: 12.3
+"""
+
+# File descriptors numbers used to set up pipes with the worker.
+_WORKER_AMP_STDIN = 3
+
+_WORKER_AMP_STDOUT = 4
diff --git a/ThirdParty/Twisted/twisted/trial/_dist/_preamble.py b/ThirdParty/Twisted/twisted/trial/_dist/_preamble.py
new file mode 100644
index 0000000..aa41bc5
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/_dist/_preamble.py
@@ -0,0 +1,23 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+# This makes sure that users don't have to set up their environment
+# specially in order to run trial -j properly.
+
+# This is a copy of the bin/_preamble.py script because it's not clear how to
+# use the functionality for both things without having a copy.
+
+import sys, os
+
+path = os.path.abspath(sys.argv[0])
+while os.path.dirname(path) != path:
+    if os.path.exists(os.path.join(path, 'twisted', '__init__.py')):
+        sys.path.insert(0, path)
+        break
+    path = os.path.dirname(path)
+
+# begin chdir armor
+sys.path[:] = map(os.path.abspath, sys.path)
+# end chdir armor
+
+sys.path.insert(0, os.path.abspath(os.getcwd()))
diff --git a/ThirdParty/Twisted/twisted/trial/_dist/distreporter.py b/ThirdParty/Twisted/twisted/trial/_dist/distreporter.py
new file mode 100644
index 0000000..bcbc43d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/_dist/distreporter.py
@@ -0,0 +1,94 @@
+# -*- test-case-name: twisted.trial._dist.test.test_distreporter -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+The reporter is not made to support concurrent test running, so we will
+hold test results in here and only send them to the reporter once the
+test is over.
+
+ at since: 12.3
+"""
+
+from zope.interface import implements
+from twisted.trial.itrial import IReporter
+from twisted.python.components import proxyForInterface
+
+
+
+class DistReporter(proxyForInterface(IReporter)):
+    """
+    See module docstring.
+    """
+
+    implements(IReporter)
+
+    def __init__(self, original):
+        super(DistReporter, self).__init__(original)
+        self.running = {}
+
+
+    def startTest(self, test):
+        """
+        Queue test starting.
+        """
+        self.running[test.id()] = []
+        self.running[test.id()].append((self.original.startTest, test))
+
+
+    def addFailure(self, test, fail):
+        """
+        Queue adding a failure.
+        """
+        self.running[test.id()].append((self.original.addFailure,
+                                        test, fail))
+
+
+    def addError(self, test, error):
+        """
+        Queue error adding.
+        """
+        self.running[test.id()].append((self.original.addError,
+                                        test, error))
+
+
+    def addSkip(self, test, reason):
+        """
+        Queue adding a skip.
+        """
+        self.running[test.id()].append((self.original.addSkip,
+                                        test, reason))
+
+
+    def addUnexpectedSuccess(self, test, todo):
+        """
+        Queue adding an unexpected success.
+        """
+        self.running[test.id()].append((self.original.addUnexpectedSuccess,
+                                        test, todo))
+
+
+    def addExpectedFailure(self, test, error, todo):
+        """
+        Queue adding an unexpected failure.
+        """
+        self.running[test.id()].append((self.original.addExpectedFailure,
+                                        test, error, todo))
+
+
+    def addSuccess(self, test):
+        """
+        Queue adding a success.
+        """
+        self.running[test.id()].append((self.original.addSuccess, test))
+
+
+    def stopTest(self, test):
+        """
+        Queue stopping the test, then unroll the queue.
+        """
+        self.running[test.id()].append((self.original.stopTest, test))
+        for step in self.running[test.id()]:
+            apply(step[0], step[1:])
+        del self.running[test.id()]
diff --git a/ThirdParty/Twisted/twisted/trial/_dist/disttrial.py b/ThirdParty/Twisted/twisted/trial/_dist/disttrial.py
new file mode 100644
index 0000000..2a76aee
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/_dist/disttrial.py
@@ -0,0 +1,252 @@
+# -*- test-case-name: twisted.trial._dist.test.test_disttrial -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module containts the trial distributed runner, the management class
+responsible for coordinating all of trial's behavior at the highest level.
+
+ at since: 12.3
+"""
+
+import sys, os
+
+from twisted.python.filepath import FilePath
+from twisted.python.modules import theSystemPath
+from twisted.internet.defer import DeferredList
+from twisted.internet.task import cooperate
+
+from twisted.trial.util import _unusedTestDirectory
+from twisted.trial.unittest import _iterateTests
+from twisted.trial._dist.worker import LocalWorker, LocalWorkerAMP
+from twisted.trial._dist.distreporter import DistReporter
+from twisted.trial.reporter import UncleanWarningsReporterWrapper
+from twisted.trial._dist import _WORKER_AMP_STDIN, _WORKER_AMP_STDOUT
+
+
+
+class DistTrialRunner(object):
+    """
+    A specialized runner for distributed trial. The runner launches a number of
+    local worker processes which will run tests.
+
+    @ivar _workerNumber: the number of workers to be spawned.
+    @type _workerNumber: C{int}
+
+    @ivar _stream: stream which the reporter will use.
+
+    @ivar _reporterFactory: the reporter class to be used.
+    """
+    _distReporterFactory = DistReporter
+
+    def _makeResult(self):
+        """
+        Make reporter factory, and wrap it with a L{DistReporter}.
+        """
+        reporter = self._reporterFactory(self._stream, self._tbformat,
+                                         realtime=self._rterrors)
+        if self._uncleanWarnings:
+            reporter = UncleanWarningsReporterWrapper(reporter)
+        return self._distReporterFactory(reporter)
+
+
+    def __init__(self, reporterFactory, workerNumber, workerArguments,
+                 stream=None,
+                 tracebackFormat='default',
+                 realTimeErrors=False,
+                 uncleanWarnings=False,
+                 logfile='test.log',
+                 workingDirectory='_trial_temp'):
+        self._workerNumber = workerNumber
+        self._workerArguments = workerArguments
+        self._reporterFactory = reporterFactory
+        if stream is None:
+            stream = sys.stdout
+        self._stream = stream
+        self._tbformat = tracebackFormat
+        self._rterrors = realTimeErrors
+        self._uncleanWarnings = uncleanWarnings
+        self._result = None
+        self._workingDirectory = workingDirectory
+        self._logFile = logfile
+        self._logFileObserver = None
+        self._logFileObject = None
+        self._logWarnings = False
+
+
+    def writeResults(self, result):
+        """
+        Write test run final outcome to result.
+
+        @param result: A C{TestResult} which will print errors and the summary.
+        """
+        result.done()
+
+
+    def createLocalWorkers(self, protocols, workingDirectory):
+        """
+        Create local worker protocol instances and return them.
+
+        @param protocols: An iterable of L{LocalWorkerAMP} instances.
+
+        @param workingDirectory: The base path in which we should run the
+            workers.
+        @type workingDirectory: C{str}
+
+        @return: A list of C{quantity} C{LocalWorker} instances.
+        """
+        return [LocalWorker(protocol,
+                            os.path.join(workingDirectory, str(x)),
+                            self._logFile)
+                for x, protocol in enumerate(protocols)]
+
+
+    def launchWorkerProcesses(self, spawner, protocols, arguments):
+        """
+        Spawn processes from a list of process protocols.
+
+        @param spawner: A C{IReactorProcess.spawnProcess} implementation.
+
+        @param protocols: An iterable of C{ProcessProtocol} instances.
+
+        @param arguments: Extra arguments passed to the processes.
+        """
+        workertrialPath = theSystemPath[
+            'twisted.trial._dist.workertrial'].filePath.path
+        childFDs = {0: 'w', 1: 'r', 2: 'r', _WORKER_AMP_STDIN: 'w',
+                    _WORKER_AMP_STDOUT: 'r'}
+        for worker in protocols:
+            args = [sys.executable, workertrialPath]
+            args.extend(arguments)
+            spawner(worker, sys.executable, args=args, childFDs=childFDs,
+                    env=os.environ)
+
+
+    def _driveWorker(self, worker, result, testCases, cooperate):
+        """
+        Drive a L{LocalWorkerAMP} instance, iterating the tests and calling
+        C{run} for every one of them.
+
+        @param worker: The L{LocalWorkerAMP} to drive.
+
+        @param result: The global L{DistReporter} instance.
+
+        @param testCases: The global list of tests to iterate.
+
+        @param cooperate: The cooperate function to use, to be customized in
+            tests.
+        @type cooperate: C{function}
+
+        @return: A C{Deferred} firing when all the tests are finished.
+        """
+
+        def resultErrback(error, case):
+            result.original.addFailure(case, error)
+            return error
+
+        def task(case):
+            d = worker.run(case, result)
+            d.addErrback(resultErrback, case)
+            return d
+
+        return cooperate(task(case) for case in testCases).whenDone()
+
+
+    def run(self, suite, reactor=None, cooperate=cooperate,
+            untilFailure=False):
+        """
+        Spawn local worker processes and load tests. After that, run them.
+
+        @param suite: A tests suite to be run.
+
+        @param reactor: The reactor to use, to be customized in tests.
+        @type reactor: A provider of
+            L{twisted.internet.interfaces.IReactorProcess}
+
+        @param cooperate: The cooperate function to use, to be customized in
+            tests.
+        @type cooperate: C{function}
+
+        @param untilFailure: If C{True}, continue to run the tests until they
+            fail.
+        @type untilFailure: C{bool}.
+
+        @return: The test result.
+        @rtype: L{DistReporter}
+        """
+        if reactor is None:
+            from twisted.internet import reactor
+        result = self._makeResult()
+        count = suite.countTestCases()
+        self._stream.write("Running %d tests.\n" % (count,))
+
+        if not count:
+            # Take a shortcut if there is no test
+            suite.run(result.original)
+            self.writeResults(result)
+            return result
+
+        testDir, testDirLock = _unusedTestDirectory(
+            FilePath(self._workingDirectory))
+        workerNumber = min(count, self._workerNumber)
+        ampWorkers = [LocalWorkerAMP() for x in xrange(workerNumber)]
+        workers = self.createLocalWorkers(ampWorkers, testDir.path)
+        processEndDeferreds = [worker.endDeferred for worker in workers]
+        self.launchWorkerProcesses(reactor.spawnProcess, workers,
+                                   self._workerArguments)
+
+        def runTests():
+            testCases = iter(list(_iterateTests(suite)))
+
+            workerDeferreds = []
+            for worker in ampWorkers:
+                workerDeferreds.append(
+                    self._driveWorker(worker, result, testCases,
+                                      cooperate=cooperate))
+            return DeferredList(workerDeferreds, consumeErrors=True,
+                                fireOnOneErrback=True)
+
+        stopping = []
+
+        def nextRun(ign):
+            self.writeResults(result)
+            if not untilFailure:
+                return
+            if not result.wasSuccessful():
+                return
+            d = runTests()
+            return d.addCallback(nextRun)
+
+        def stop(ign):
+            testDirLock.unlock()
+            if not stopping:
+                stopping.append(None)
+                reactor.stop()
+
+        def beforeShutDown():
+            if not stopping:
+                stopping.append(None)
+                d = DeferredList(processEndDeferreds, consumeErrors=True)
+                return d.addCallback(continueShutdown)
+
+        def continueShutdown(ign):
+            self.writeResults(result)
+            return ign
+
+        d = runTests()
+        d.addCallback(nextRun)
+        d.addBoth(stop)
+
+        reactor.addSystemEventTrigger('before', 'shutdown', beforeShutDown)
+        reactor.run()
+
+        return result
+
+
+    def runUntilFailure(self, suite):
+        """
+        Run the tests with local worker processes until they fail.
+
+        @param suite: A tests suite to be run.
+        """
+        return self.run(suite, untilFailure=True)
diff --git a/ThirdParty/Twisted/twisted/trial/_dist/managercommands.py b/ThirdParty/Twisted/twisted/trial/_dist/managercommands.py
new file mode 100644
index 0000000..35411a6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/_dist/managercommands.py
@@ -0,0 +1,76 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Commands for reporting test success of failure to the manager.
+
+ at since: 12.3
+"""
+
+from twisted.protocols.amp import Command, String, Boolean, ListOf
+
+
+
+class AddSuccess(Command):
+    """
+    Add a success.
+    """
+    arguments = [('testName', String())]
+    response = [('success', Boolean())]
+
+
+
+class AddError(Command):
+    """
+    Add an error.
+    """
+    arguments = [('testName', String()), ('error', String()),
+                 ('errorClass', String()), ('frames', ListOf(String()))]
+    response = [('success', Boolean())]
+
+
+
+class AddFailure(Command):
+    """
+    Add a failure.
+    """
+    arguments = [('testName', String()), ('fail', String()),
+                 ('failClass', String()), ('frames', ListOf(String()))]
+    response = [('success', Boolean())]
+
+
+
+class AddSkip(Command):
+    """
+    Add a skip.
+    """
+    arguments = [('testName', String()), ('reason', String())]
+    response = [('success', Boolean())]
+
+
+
+class AddExpectedFailure(Command):
+    """
+    Add an expected failure.
+    """
+    arguments = [('testName', String()), ('error', String()),
+                 ('todo', String())]
+    response = [('success', Boolean())]
+
+
+
+class AddUnexpectedSuccess(Command):
+    """
+    Add an unexpected success.
+    """
+    arguments = [('testName', String()), ('todo', String())]
+    response = [('success', Boolean())]
+
+
+
+class TestWrite(Command):
+    """
+    Write test log.
+    """
+    arguments = [('out', String())]
+    response = [('success', Boolean())]
diff --git a/ThirdParty/Twisted/twisted/trial/_dist/options.py b/ThirdParty/Twisted/twisted/trial/_dist/options.py
new file mode 100644
index 0000000..ee5ccd8
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/_dist/options.py
@@ -0,0 +1,30 @@
+# -*- test-case-name: twisted.trial._dist.test.test_options -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Options handling specific to trial's workers.
+
+ at since: 12.3
+"""
+
+from twisted.python.filepath import FilePath
+from twisted.python.usage import Options
+from twisted.scripts.trial import _BasicOptions
+from twisted.application.app import ReactorSelectionMixin
+
+
+
+class WorkerOptions(_BasicOptions, Options, ReactorSelectionMixin):
+    """
+    Options forwarded to the trial distributed worker.
+    """
+
+
+    def coverdir(self):
+        """
+        Return a L{FilePath} representing the directory into which coverage
+        results should be written.
+        """
+        return FilePath('coverage')
diff --git a/ThirdParty/Twisted/twisted/trial/_dist/test/__init__.py b/ThirdParty/Twisted/twisted/trial/_dist/test/__init__.py
new file mode 100644
index 0000000..36de39d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/_dist/test/__init__.py
@@ -0,0 +1,6 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Distributed trial test runner tests.
+"""
diff --git a/ThirdParty/Twisted/twisted/trial/_dist/test/test_distreporter.py b/ThirdParty/Twisted/twisted/trial/_dist/test/test_distreporter.py
new file mode 100644
index 0000000..fd6737f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/_dist/test/test_distreporter.py
@@ -0,0 +1,62 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.trial.distreporter}.
+"""
+
+from cStringIO import StringIO
+
+from twisted.trial._dist.distreporter import DistReporter
+from twisted.trial.unittest import TestCase
+from twisted.trial.reporter import TreeReporter
+
+
+
+class DistReporterTestCase(TestCase):
+    """
+    Tests for L{DistReporter}.
+    """
+
+    def setUp(self):
+        self.stream = StringIO()
+        self.distReporter = DistReporter(TreeReporter(self.stream))
+        self.test = TestCase()
+
+
+    def test_startSuccessStop(self):
+        """
+        Success output only gets sent to the stream after the test has stopped.
+        """
+        self.distReporter.startTest(self.test)
+        self.assertEqual(self.stream.getvalue(), "")
+        self.distReporter.addSuccess(self.test)
+        self.assertEqual(self.stream.getvalue(), "")
+        self.distReporter.stopTest(self.test)
+        self.assertNotEqual(self.stream.getvalue(), "")
+
+
+    def test_startErrorStop(self):
+        """
+        Error output only gets sent to the stream after the test has stopped.
+        """
+        self.distReporter.startTest(self.test)
+        self.assertEqual(self.stream.getvalue(), "")
+        self.distReporter.addError(self.test, "error")
+        self.assertEqual(self.stream.getvalue(), "")
+        self.distReporter.stopTest(self.test)
+        self.assertNotEqual(self.stream.getvalue(), "")
+
+
+    def test_forwardedMethods(self):
+        """
+        Calling methods of L{DistReporter} add calls to the running queue of
+        the test.
+        """
+        self.distReporter.startTest(self.test)
+        self.distReporter.addFailure(self.test, "foo")
+        self.distReporter.addError(self.test, "bar")
+        self.distReporter.addSkip(self.test, "egg")
+        self.distReporter.addUnexpectedSuccess(self.test, "spam")
+        self.distReporter.addExpectedFailure(self.test, "err", "foo")
+        self.assertEqual(len(self.distReporter.running[self.test.id()]), 6)
diff --git a/ThirdParty/Twisted/twisted/trial/_dist/test/test_disttrial.py b/ThirdParty/Twisted/twisted/trial/_dist/test/test_disttrial.py
new file mode 100644
index 0000000..024c037
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/_dist/test/test_disttrial.py
@@ -0,0 +1,372 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.trial._dist.disttrial}.
+"""
+
+import os
+from cStringIO import StringIO
+
+from twisted.internet.protocol import ProcessProtocol
+from twisted.internet.defer import fail, succeed
+from twisted.internet.task import Cooperator, deferLater
+from twisted.internet.main import CONNECTION_DONE
+from twisted.internet import reactor
+from twisted.python.failure import Failure
+from twisted.python.lockfile import FilesystemLock
+
+from twisted.test.test_cooperator import FakeScheduler
+
+from twisted.trial.unittest import TestCase
+from twisted.trial.reporter import Reporter, TreeReporter
+from twisted.trial.reporter import UncleanWarningsReporterWrapper
+from twisted.trial.runner import TrialSuite, ErrorHolder
+
+from twisted.trial._dist.disttrial import DistTrialRunner
+from twisted.trial._dist.distreporter import DistReporter
+from twisted.trial._dist.worker import LocalWorker
+
+
+
+class FakeTransport(object):
+    """
+    A simple fake process transport.
+    """
+
+    def writeToChild(self, fd, data):
+        """
+        Ignore write calls.
+        """
+
+
+
+class FakeReactor(object):
+    """
+    A simple fake reactor for testing purposes.
+    """
+    spawnCount = 0
+    stopCount = 0
+    runCount = 0
+
+    def spawnProcess(self, worker, *args, **kwargs):
+        worker.makeConnection(FakeTransport())
+        self.spawnCount += 1
+
+
+    def stop(self):
+        self.stopCount += 1
+
+
+    def run(self):
+        self.runCount += 1
+
+
+    def addSystemEventTrigger(self, *args, **kw):
+        pass
+
+
+
+class DistTrialRunnerTestCase(TestCase):
+    """
+    Tests for L{DistTrialRunner}.
+    """
+
+    def setUp(self):
+        """
+        Create a runner for testing.
+        """
+        self.runner = DistTrialRunner(TreeReporter, 4, [],
+                                      workingDirectory=self.mktemp())
+        self.runner._stream = StringIO()
+
+
+    def test_writeResults(self):
+        """
+        L{DistTrialRunner.writeResults} writes to the stream specified in the
+        init.
+        """
+        stringIO = StringIO()
+        result = DistReporter(Reporter(stringIO))
+        self.runner.writeResults(result)
+        self.assertTrue(stringIO.tell() > 0)
+
+
+    def test_createLocalWorkers(self):
+        """
+        C{createLocalWorkers} iterates the list of protocols and create one
+        L{LocalWorker} for each.
+        """
+        protocols = [object() for x in xrange(4)]
+        workers = self.runner.createLocalWorkers(protocols, "path")
+        for s in workers:
+            self.assertIsInstance(s, LocalWorker)
+        self.assertEqual(4, len(workers))
+
+
+    def test_launchWorkerProcesses(self):
+        """
+        Given a C{spawnProcess} function, C{launchWorkerProcess} launches a
+        python process with a existing path as its argument.
+        """
+        protocols = [ProcessProtocol() for i in range(4)]
+        arguments = []
+
+        def fakeSpawnProcess(processProtocol, executable, args=(), env={},
+                             path=None, uid=None, gid=None, usePTY=0,
+                             childFDs=None):
+            arguments.append(executable)
+            arguments.append(args[0])
+            arguments.append(args[1])
+            arguments.append(args[2])
+
+        self.runner.launchWorkerProcesses(
+            fakeSpawnProcess, protocols, ["foo"])
+        self.assertEqual(arguments[0], arguments[1])
+        self.assertTrue(os.path.exists(arguments[2]))
+        self.assertEqual("foo", arguments[3])
+
+
+    def test_run(self):
+        """
+        C{run} starts the reactor exactly once and spawns each of the workers
+        exactly once.
+        """
+        fakeReactor = FakeReactor()
+        suite = TrialSuite()
+        for i in xrange(10):
+            suite.addTest(TestCase())
+        self.runner.run(suite, fakeReactor)
+        self.assertEqual(fakeReactor.runCount, 1)
+        self.assertEqual(fakeReactor.spawnCount, self.runner._workerNumber)
+
+
+    def test_runUsedDirectory(self):
+        """
+        L{DistTrialRunner} checks if the test directory is already locked, and
+        if it is generates a name based on it.
+        """
+
+        class FakeReactorWithLock(FakeReactor):
+
+            def spawnProcess(oself, worker, *args, **kwargs):
+                self.assertEqual(os.path.abspath(worker._logDirectory),
+                                 os.path.abspath(
+                                     os.path.join(workingDirectory + "-1",
+                                                  str(oself.spawnCount))))
+                localLock = FilesystemLock(workingDirectory + "-1.lock")
+                self.assertFalse(localLock.lock())
+                oself.spawnCount += 1
+                worker.makeConnection(FakeTransport())
+                worker._ampProtocol.run = lambda *args: succeed(None)
+
+        newDirectory = self.mktemp()
+        os.mkdir(newDirectory)
+        workingDirectory = os.path.join(newDirectory, "_trial_temp")
+        lock = FilesystemLock(workingDirectory + ".lock")
+        lock.lock()
+        self.addCleanup(lock.unlock)
+        self.runner._workingDirectory = workingDirectory
+
+        fakeReactor = FakeReactorWithLock()
+        suite = TrialSuite()
+        for i in xrange(10):
+            suite.addTest(TestCase())
+        self.runner.run(suite, fakeReactor)
+
+
+    def test_minimalWorker(self):
+        """
+        L{DistTrialRunner} doesn't try to start more workers than the number of
+        tests.
+        """
+        fakeReactor = FakeReactor()
+        self.runner.run(TestCase(), fakeReactor)
+        self.assertEqual(fakeReactor.runCount, 1)
+        self.assertEqual(fakeReactor.spawnCount, 1)
+
+
+    def test_runUncleanWarnings(self):
+        """
+        Running with the C{unclean-warnings} option makes L{DistTrialRunner}
+        uses the L{UncleanWarningsReporterWrapper}.
+        """
+        fakeReactor = FakeReactor()
+        self.runner._uncleanWarnings = True
+        result = self.runner.run(TestCase(), fakeReactor)
+        self.assertIsInstance(result, DistReporter)
+        self.assertIsInstance(result.original,
+                              UncleanWarningsReporterWrapper)
+
+
+    def test_runWithoutTest(self):
+        """
+        When the suite contains no test, L{DistTrialRunner} takes a shortcut
+        path without launching any process or starting the reactor.
+        """
+        fakeReactor = object()
+        suite = TrialSuite()
+        result = self.runner.run(suite, fakeReactor)
+        self.assertIsInstance(result, DistReporter)
+        output = self.runner._stream.getvalue()
+        self.assertIn("Running 0 test", output)
+        self.assertIn("PASSED", output)
+
+
+    def test_runWithoutTestButWithAnError(self):
+        """
+        Even if there is no test, the suite can contain an error (most likely,
+        an import error): this should make the run fail, and the error should
+        be printed.
+        """
+        fakeReactor = object()
+        error = ErrorHolder("an error", Failure(RuntimeError("foo bar")))
+        result = self.runner.run(error, fakeReactor)
+        self.assertIsInstance(result, DistReporter)
+        output = self.runner._stream.getvalue()
+        self.assertIn("Running 0 test", output)
+        self.assertIn("foo bar", output)
+        self.assertIn("an error", output)
+        self.assertIn("errors=1", output)
+        self.assertIn("FAILED", output)
+
+
+    def test_runUnexpectedError(self):
+        """
+        If for some reasons we can't connect to the worker process, the test
+        suite catches and fails.
+        """
+
+        class FakeReactorWithFail(FakeReactor):
+
+            def spawnProcess(self, worker, *args, **kwargs):
+                worker.makeConnection(FakeTransport())
+                self.spawnCount += 1
+                worker._ampProtocol.run = self.failingRun
+
+            def failingRun(self, case, result):
+                return fail(RuntimeError("oops"))
+
+        scheduler = FakeScheduler()
+        cooperator = Cooperator(scheduler=scheduler)
+
+        fakeReactor = FakeReactorWithFail()
+        result = self.runner.run(TestCase(), fakeReactor,
+                                  cooperator.cooperate)
+        self.assertEqual(fakeReactor.runCount, 1)
+        self.assertEqual(fakeReactor.spawnCount, 1)
+        scheduler.pump()
+        self.assertEqual(1, len(result.original.failures))
+
+
+    def test_runStopAfterTests(self):
+        """
+        L{DistTrialRunner} calls C{reactor.stop} and unlocks the test directory
+        once the tests have run.
+        """
+        functions = []
+
+        class FakeReactorWithSuccess(FakeReactor):
+
+            def spawnProcess(self, worker, *args, **kwargs):
+                worker.makeConnection(FakeTransport())
+                self.spawnCount += 1
+                worker._ampProtocol.run = self.succeedingRun
+
+            def succeedingRun(self, case, result):
+                return succeed(None)
+
+            def addSystemEventTrigger(oself, phase, event, function):
+                self.assertEqual('before', phase)
+                self.assertEqual('shutdown', event)
+                functions.append(function)
+
+        workingDirectory = self.runner._workingDirectory
+
+        fakeReactor = FakeReactorWithSuccess()
+        self.runner.run(TestCase(), fakeReactor)
+
+        def check():
+            localLock = FilesystemLock(workingDirectory + ".lock")
+            self.assertTrue(localLock.lock())
+            self.assertEqual(1, fakeReactor.stopCount)
+            # We don't wait for the process deferreds here, so nothign is
+            # returned by the function before shutdown
+            self.assertIdentical(None, functions[0]())
+
+        return deferLater(reactor, 0, check)
+
+
+    def test_runWaitForProcessesDeferreds(self):
+        """
+        L{DistTrialRunner} waits for the worker processes to stop when the
+        reactor is stopping, and then unlocks the test directory, not trying to
+        stop the reactor again.
+        """
+        functions = []
+        workers = []
+
+        class FakeReactorWithEvent(FakeReactor):
+
+            def spawnProcess(self, worker, *args, **kwargs):
+                worker.makeConnection(FakeTransport())
+                workers.append(worker)
+
+            def addSystemEventTrigger(oself, phase, event, function):
+                self.assertEqual('before', phase)
+                self.assertEqual('shutdown', event)
+                functions.append(function)
+
+        workingDirectory = self.runner._workingDirectory
+
+        fakeReactor = FakeReactorWithEvent()
+        self.runner.run(TestCase(), fakeReactor)
+
+        def check(ign):
+            # Let the AMP deferreds fire
+            return deferLater(reactor, 0, realCheck)
+
+        def realCheck():
+            localLock = FilesystemLock(workingDirectory + ".lock")
+            self.assertTrue(localLock.lock())
+            # Stop is not called, as it ought to have been called before
+            self.assertEqual(0, fakeReactor.stopCount)
+
+        workers[0].processEnded(Failure(CONNECTION_DONE))
+        return functions[0]().addCallback(check)
+
+
+    def test_runUntilFailure(self):
+        """
+        L{DistTrialRunner} can run in C{untilFailure} mode where it will run
+        the given tests until they fail.
+        """
+        called = []
+
+        class FakeReactorWithSuccess(FakeReactor):
+
+            def spawnProcess(self, worker, *args, **kwargs):
+                worker.makeConnection(FakeTransport())
+                self.spawnCount += 1
+                worker._ampProtocol.run = self.succeedingRun
+
+            def succeedingRun(self, case, result):
+                called.append(None)
+                if len(called) == 5:
+                    return fail(RuntimeError("oops"))
+                return succeed(None)
+
+        fakeReactor = FakeReactorWithSuccess()
+
+        scheduler = FakeScheduler()
+        cooperator = Cooperator(scheduler=scheduler)
+
+        result = self.runner.run(
+            TestCase(), fakeReactor, cooperate=cooperator.cooperate,
+            untilFailure=True)
+        scheduler.pump()
+        self.assertEqual(5, len(called))
+        self.assertFalse(result.wasSuccessful())
+        output = self.runner._stream.getvalue()
+        self.assertIn("PASSED", output)
+        self.assertIn("FAIL", output)
diff --git a/ThirdParty/Twisted/twisted/trial/_dist/test/test_options.py b/ThirdParty/Twisted/twisted/trial/_dist/test/test_options.py
new file mode 100644
index 0000000..3adb25b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/_dist/test/test_options.py
@@ -0,0 +1,48 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for distributed trial's options management.
+"""
+
+import os, sys, gc
+
+from twisted.trial.unittest import TestCase
+from twisted.trial._dist.options import WorkerOptions
+
+
+
+class WorkerOptionsTestCase(TestCase):
+    """
+    Tests for L{WorkerOptions}.
+    """
+
+    def setUp(self):
+        """
+        Build an L{WorkerOptions} object to be used in the tests.
+        """
+        self.options = WorkerOptions()
+
+
+    def test_standardOptions(self):
+        """
+        L{WorkerOptions} supports a subset of standard options supported by
+        trial.
+        """
+        self.addCleanup(sys.setrecursionlimit, sys.getrecursionlimit())
+        if gc.isenabled():
+            self.addCleanup(gc.enable)
+        gc.enable()
+        self.options.parseOptions(["--recursionlimit", "2000", "--disablegc"])
+        self.assertEqual(2000, sys.getrecursionlimit())
+        self.assertFalse(gc.isenabled())
+
+
+    def test_coverage(self):
+        """
+        L{WorkerOptions.coverdir} returns the C{coverage} child directory of
+        the current directory to be used for storing coverage data.
+        """
+        self.assertEqual(
+            os.path.realpath(os.path.join(os.getcwd(), "coverage")),
+            self.options.coverdir().path)
diff --git a/ThirdParty/Twisted/twisted/trial/_dist/test/test_worker.py b/ThirdParty/Twisted/twisted/trial/_dist/test/test_worker.py
new file mode 100644
index 0000000..4e173b4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/_dist/test/test_worker.py
@@ -0,0 +1,473 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test for distributed trial worker side.
+"""
+
+import os
+from cStringIO import StringIO
+
+from zope.interface.verify import verifyObject
+
+from twisted.trial.reporter import TestResult
+from twisted.trial.unittest import TestCase
+from twisted.trial._dist.worker import (
+    LocalWorker, LocalWorkerAMP, LocalWorkerTransport, WorkerProtocol)
+from twisted.trial._dist import managercommands, workercommands
+
+from twisted.scripts import trial
+from twisted.test.proto_helpers import StringTransport
+
+from twisted.internet.interfaces import ITransport, IAddress
+from twisted.internet.defer import fail, succeed
+from twisted.internet.main import CONNECTION_DONE
+from twisted.internet.error import ConnectionDone
+from twisted.python.failure import Failure
+from twisted.protocols.amp import AMP
+
+
+
+class FakeAMP(AMP):
+    """
+    A fake amp protocol.
+    """
+
+
+
+class WorkerProtocolTestCase(TestCase):
+    """
+    Tests for L{WorkerProtocol}.
+    """
+
+    def setUp(self):
+        """
+        Set up a transport, a result stream and a protocol instance.
+        """
+        self.serverTransport = StringTransport()
+        self.clientTransport = StringTransport()
+        self.server = WorkerProtocol()
+        self.server.makeConnection(self.serverTransport)
+        self.client = FakeAMP()
+        self.client.makeConnection(self.clientTransport)
+
+
+    def test_run(self):
+        """
+        Calling the L{workercommands.Run} command on the client returns a
+        response with C{success} sets to C{True}.
+        """
+        d = self.client.callRemote(workercommands.Run, testCase="doesntexist")
+
+        def check(result):
+            self.assertTrue(result['success'])
+
+        d.addCallback(check)
+        self.server.dataReceived(self.clientTransport.value())
+        self.clientTransport.clear()
+        self.client.dataReceived(self.serverTransport.value())
+        self.serverTransport.clear()
+        return d
+
+
+    def test_start(self):
+        """
+        The C{start} command changes the current path.
+        """
+        curdir = os.path.realpath(os.path.curdir)
+        self.addCleanup(os.chdir, curdir)
+        self.server.start('..')
+        self.assertNotEqual(os.path.realpath(os.path.curdir), curdir)
+
+
+
+class LocalWorkerAMPTestCase(TestCase):
+    """
+    Test case for distributed trial's manager-side local worker AMP protocol
+    """
+
+    def setUp(self):
+        self.managerTransport = StringTransport()
+        self.managerAMP = LocalWorkerAMP()
+        self.managerAMP.makeConnection(self.managerTransport)
+        self.result = TestResult()
+        self.workerTransport = StringTransport()
+        self.worker = AMP()
+        self.worker.makeConnection(self.workerTransport)
+
+        config = trial.Options()
+        self.testName = "twisted.doesnexist"
+        config['tests'].add(self.testName)
+        self.testCase = trial._getSuite(config)._tests.pop()
+
+        self.managerAMP.run(self.testCase, self.result)
+        self.managerTransport.clear()
+
+
+    def pumpTransports(self):
+        """
+        Sends data from C{self.workerTransport} to C{self.managerAMP}, and then
+        data from C{self.managerTransport} back to C{self.worker}.
+        """
+        self.managerAMP.dataReceived(self.workerTransport.value())
+        self.workerTransport.clear()
+        self.worker.dataReceived(self.managerTransport.value())
+
+
+    def test_runSuccess(self):
+        """
+        Run a test, and succeed.
+        """
+        results = []
+
+        d = self.worker.callRemote(managercommands.AddSuccess,
+                                   testName=self.testName)
+        d.addCallback(lambda result: results.append(result['success']))
+        self.pumpTransports()
+
+        self.assertTrue(results)
+
+
+    def test_runExpectedFailure(self):
+        """
+        Run a test, and fail expectedly.
+        """
+        results = []
+
+        d = self.worker.callRemote(managercommands.AddExpectedFailure,
+                                   testName=self.testName, error='error',
+                                   todo='todoReason')
+        d.addCallback(lambda result: results.append(result['success']))
+        self.pumpTransports()
+
+        self.assertEqual(self.testCase, self.result.expectedFailures[0][0])
+        self.assertTrue(results)
+
+
+    def test_runError(self):
+        """
+        Run a test, and encounter an error.
+        """
+        results = []
+
+        d = self.worker.callRemote(managercommands.AddError,
+                                   testName=self.testName, error='error',
+                                   errorClass='exceptions.ValueError',
+                                   frames=[])
+        d.addCallback(lambda result: results.append(result['success']))
+        self.pumpTransports()
+
+        self.assertEqual(self.testCase, self.result.errors[0][0])
+        self.assertTrue(results)
+
+
+    def test_runErrorWithFrames(self):
+        """
+        L{LocalWorkerAMP._buildFailure} recreates the C{Failure.frames} from
+        the C{frames} argument passed to C{AddError}.
+        """
+        results = []
+
+        d = self.worker.callRemote(managercommands.AddError,
+                                   testName=self.testName, error='error',
+                                   errorClass='exceptions.ValueError',
+                                   frames=["file.py", "invalid code", "3"])
+        d.addCallback(lambda result: results.append(result['success']))
+        self.pumpTransports()
+
+        self.assertEqual(self.testCase, self.result.errors[0][0])
+        self.assertEqual(
+            [('file.py', 'invalid code', 3, [], [])],
+            self.result.errors[0][1].frames)
+        self.assertTrue(results)
+
+
+    def test_runFailure(self):
+        """
+        Run a test, and fail.
+        """
+        results = []
+
+        d = self.worker.callRemote(managercommands.AddFailure,
+                                   testName=self.testName, fail='fail',
+                                   failClass='exceptions.RuntimeError',
+                                   frames=[])
+        d.addCallback(lambda result: results.append(result['success']))
+        self.pumpTransports()
+
+        self.assertEqual(self.testCase, self.result.failures[0][0])
+        self.assertTrue(results)
+
+
+    def test_runSkip(self):
+        """
+        Run a test, but skip it.
+        """
+        results = []
+
+        d = self.worker.callRemote(managercommands.AddSkip,
+                                   testName=self.testName, reason='reason')
+        d.addCallback(lambda result: results.append(result['success']))
+        self.pumpTransports()
+
+        self.assertEqual(self.testCase, self.result.skips[0][0])
+        self.assertTrue(results)
+
+
+    def test_runUnexpectedSuccesses(self):
+        """
+        Run a test, and succeed unexpectedly.
+        """
+        results = []
+
+        d = self.worker.callRemote(managercommands.AddUnexpectedSuccess,
+                                   testName=self.testName,
+                                   todo='todo')
+        d.addCallback(lambda result: results.append(result['success']))
+        self.pumpTransports()
+
+        self.assertEqual(self.testCase, self.result.unexpectedSuccesses[0][0])
+        self.assertTrue(results)
+
+
+    def test_testWrite(self):
+        """
+        L{LocalWorkerAMP.testWrite} writes the data received to its test
+        stream.
+        """
+        results = []
+        stream = StringIO()
+        self.managerAMP.setTestStream(stream)
+
+
+        d = self.worker.callRemote(managercommands.TestWrite,
+                                   out="Some output")
+        d.addCallback(lambda result: results.append(result['success']))
+        self.pumpTransports()
+
+        self.assertEqual("Some output\n", stream.getvalue())
+        self.assertTrue(results)
+
+
+    def test_stopAfterRun(self):
+        """
+        L{LocalWorkerAMP.run} calls C{stopTest} on its test result once the
+        C{Run} commands has succeeded.
+        """
+        result = object()
+        stopped = []
+
+        def fakeCallRemote(command, testCase):
+            return succeed(result)
+
+        self.managerAMP.callRemote = fakeCallRemote
+
+        class StopTestResult(TestResult):
+
+            def stopTest(self, test):
+                stopped.append(test)
+
+
+        d = self.managerAMP.run(self.testCase, StopTestResult())
+        self.assertEqual([self.testCase], stopped)
+        return d.addCallback(self.assertIdentical, result)
+
+
+
+class FakeAMProtocol(AMP):
+    """
+    A fake implementation of L{AMP} for testing.
+    """
+    id = 0
+    dataString = ""
+
+    def dataReceived(self, data):
+        self.dataString += data
+
+
+    def setTestStream(self, stream):
+        self.testStream = stream
+
+
+
+class FakeTransport(object):
+    """
+    A fake process transport implementation for testing.
+    """
+    dataString = ""
+    calls = 0
+
+    def writeToChild(self, fd, data):
+        self.dataString += data
+
+
+    def loseConnection(self):
+        self.calls += 1
+
+
+
+class LocalWorkerTestCase(TestCase):
+    """
+    Tests for L{LocalWorker} and L{LocalWorkerTransport}.
+    """
+
+    def test_childDataReceived(self):
+        """
+        L{LocalWorker.childDataReceived} forwards the received data to linked
+        L{AMP} protocol if the right file descriptor, otherwise forwards to
+        C{ProcessProtocol.childDataReceived}.
+        """
+        fakeTransport = FakeTransport()
+        localWorker = LocalWorker(FakeAMProtocol(), '.', 'test.log')
+        localWorker.makeConnection(fakeTransport)
+        localWorker._outLog = StringIO()
+        localWorker.childDataReceived(4, "foo")
+        localWorker.childDataReceived(1, "bar")
+        self.assertEqual("foo", localWorker._ampProtocol.dataString)
+        self.assertEqual("bar", localWorker._outLog.getvalue())
+
+
+    def test_outReceived(self):
+        """
+        L{LocalWorker.outReceived} logs the output into its C{_outLog} log
+        file.
+        """
+        fakeTransport = FakeTransport()
+        localWorker = LocalWorker(FakeAMProtocol(), '.', 'test.log')
+        localWorker.makeConnection(fakeTransport)
+        localWorker._outLog = StringIO()
+        data = "The quick brown fox jumps over the lazy dog"
+        localWorker.outReceived(data)
+        self.assertEqual(data, localWorker._outLog.getvalue())
+
+
+    def test_errReceived(self):
+        """
+        L{LocalWorker.errReceived} logs the errors into its C{_errLog} log
+        file.
+        """
+        fakeTransport = FakeTransport()
+        localWorker = LocalWorker(FakeAMProtocol(), '.', 'test.log')
+        localWorker.makeConnection(fakeTransport)
+        localWorker._errLog = StringIO()
+        data = "The quick brown fox jumps over the lazy dog"
+        localWorker.errReceived(data)
+        self.assertEqual(data, localWorker._errLog.getvalue())
+
+
+    def test_write(self):
+        """
+        L{LocalWorkerTransport.write} forwards the written data to the given
+        transport.
+        """
+        transport = FakeTransport()
+        localTransport = LocalWorkerTransport(transport)
+        data = "The quick brown fox jumps over the lazy dog"
+        localTransport.write(data)
+        self.assertEqual(data, transport.dataString)
+
+
+    def test_writeSequence(self):
+        """
+        L{LocalWorkerTransport.writeSequence} forwards the written data to the
+        given transport.
+        """
+        transport = FakeTransport()
+        localTransport = LocalWorkerTransport(transport)
+        data = ("The quick ", "brown fox jumps ", "over the lazy dog")
+        localTransport.writeSequence(data)
+        self.assertEqual("".join(data), transport.dataString)
+
+
+    def test_loseConnection(self):
+        """
+        L{LocalWorkerTransport.loseConnection} forwards the call to the given
+        transport.
+        """
+        transport = FakeTransport()
+        localTransport = LocalWorkerTransport(transport)
+        localTransport.loseConnection()
+
+        self.assertEqual(transport.calls, 1)
+
+
+    def test_connectionLost(self):
+        """
+        L{LocalWorker.connectionLost} closes the log streams.
+        """
+
+        class FakeStream(object):
+            callNumber = 0
+
+            def close(self):
+                self.callNumber += 1
+
+
+        transport = FakeTransport()
+        localWorker = LocalWorker(FakeAMProtocol(), '.', 'test.log')
+        localWorker.makeConnection(transport)
+        localWorker._outLog = FakeStream()
+        localWorker._errLog = FakeStream()
+        localWorker.connectionLost(None)
+        self.assertEqual(localWorker._outLog.callNumber, 1)
+        self.assertEqual(localWorker._errLog.callNumber, 1)
+
+
+    def test_processEnded(self):
+        """
+        L{LocalWorker.processEnded} calls C{connectionLost} on itself and on
+        the L{AMP} protocol.
+        """
+
+        class FakeStream(object):
+            callNumber = 0
+
+            def close(self):
+                self.callNumber += 1
+
+
+        transport = FakeTransport()
+        protocol = FakeAMProtocol()
+        localWorker = LocalWorker(protocol, '.', 'test.log')
+        localWorker.makeConnection(transport)
+        localWorker._outLog = FakeStream()
+        localWorker.processEnded(Failure(CONNECTION_DONE))
+        self.assertEqual(localWorker._outLog.callNumber, 1)
+        self.assertIdentical(None, protocol.transport)
+        return self.assertFailure(localWorker.endDeferred, ConnectionDone)
+
+    def test_addresses(self):
+        """
+        L{LocalWorkerTransport.getPeer} and L{LocalWorkerTransport.getHost}
+        return L{IAddress} objects.
+        """
+        localTransport = LocalWorkerTransport(None)
+        self.assertTrue(verifyObject(IAddress, localTransport.getPeer()))
+        self.assertTrue(verifyObject(IAddress, localTransport.getHost()))
+
+
+    def test_transport(self):
+        """
+        L{LocalWorkerTransport} implements L{ITransport} to be able to be used
+        by L{AMP}.
+        """
+        localTransport = LocalWorkerTransport(None)
+        self.assertTrue(verifyObject(ITransport, localTransport))
+
+
+    def test_startError(self):
+        """
+        L{LocalWorker} swallows the exceptions returned by the L{AMP} protocol
+        start method, as it generates unnecessary errors.
+        """
+
+        def failCallRemote(command, directory):
+            return fail(RuntimeError("oops"))
+
+        transport = FakeTransport()
+        protocol = FakeAMProtocol()
+        protocol.callRemote = failCallRemote
+        localWorker = LocalWorker(protocol, '.', 'test.log')
+        localWorker.makeConnection(transport)
+
+        self.assertEqual([], self.flushLoggedErrors(RuntimeError))
diff --git a/ThirdParty/Twisted/twisted/trial/_dist/test/test_workerreporter.py b/ThirdParty/Twisted/twisted/trial/_dist/test/test_workerreporter.py
new file mode 100644
index 0000000..293c403
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/_dist/test/test_workerreporter.py
@@ -0,0 +1,119 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.trial._dist.workerreporter}.
+"""
+
+from twisted.python.failure import Failure
+from twisted.trial.unittest import TestCase, Todo
+from twisted.trial._dist.workerreporter import WorkerReporter
+from twisted.trial._dist import managercommands
+
+
+class FakeAMProtocol(object):
+    """
+    A fake C{AMP} implementations to track C{callRemote} calls.
+    """
+    id = 0
+    lastCall = None
+
+    def callRemote(self, command, **kwargs):
+        self.lastCall = command
+
+
+
+class WorkerReporterTestCase(TestCase):
+    """
+    Tests for L{WorkerReporter}.
+    """
+
+    def setUp(self):
+        self.fakeAMProtocol = FakeAMProtocol()
+        self.workerReporter = WorkerReporter(self.fakeAMProtocol)
+        self.test = TestCase()
+
+
+    def test_addSuccess(self):
+        """
+        L{WorkerReporter.addSuccess} sends a L{managercommands.AddSuccess}
+        command.
+        """
+        self.workerReporter.addSuccess(self.test)
+        self.assertEqual(self.fakeAMProtocol.lastCall,
+                         managercommands.AddSuccess)
+
+
+    def test_addError(self):
+        """
+        L{WorkerReporter.addError} sends a L{managercommands.AddError} command.
+        """
+        self.workerReporter.addError(self.test, Failure(RuntimeError('error')))
+        self.assertEqual(self.fakeAMProtocol.lastCall,
+                         managercommands.AddError)
+
+
+    def test_addErrorTuple(self):
+        """
+        Adding an error using L{WorkerReporter.addError} as a
+        C{sys.exc_info}-style tuple sends an L{managercommands.AddError}
+        command.
+        """
+        self.workerReporter.addError(
+            self.test, (RuntimeError, RuntimeError('error'), None))
+        self.assertEqual(self.fakeAMProtocol.lastCall,
+                         managercommands.AddError)
+
+
+    def test_addFailure(self):
+        """
+        L{WorkerReporter.addFailure} sends a L{managercommands.AddFailure}
+        command.
+        """
+        self.workerReporter.addFailure(self.test,
+                                       Failure(RuntimeError('fail')))
+        self.assertEqual(self.fakeAMProtocol.lastCall,
+                         managercommands.AddFailure)
+
+
+    def test_addFailureTuple(self):
+        """
+        Adding a failure using L{WorkerReporter.addFailure} as a
+        C{sys.exc_info}-style tuple sends an L{managercommands.AddFailure}
+        message.
+        """
+        self.workerReporter.addFailure(
+            self.test, (RuntimeError, RuntimeError('fail'), None))
+        self.assertEqual(self.fakeAMProtocol.lastCall,
+                         managercommands.AddFailure)
+
+
+    def test_addSkip(self):
+        """
+        L{WorkerReporter.addSkip} sends a L{managercommands.AddSkip} command.
+        """
+        self.workerReporter.addSkip(self.test, 'reason')
+        self.assertEqual(self.fakeAMProtocol.lastCall,
+                         managercommands.AddSkip)
+
+
+    def test_addExpectedFailure(self):
+        """
+        L{WorkerReporter.addExpectedFailure} sends a
+        L{managercommands.AddExpectedFailure} command.
+        protocol.
+        """
+        self.workerReporter.addExpectedFailure(
+            self.test, Failure(RuntimeError('error')), Todo('todo'))
+        self.assertEqual(self.fakeAMProtocol.lastCall,
+                         managercommands.AddExpectedFailure)
+
+
+    def test_addUnexpectedSuccess(self):
+        """
+        L{WorkerReporter.addUnexpectedSuccess} sends a
+        L{managercommands.AddUnexpectedSuccess} command.
+        """
+        self.workerReporter.addUnexpectedSuccess(self.test, Todo('todo'))
+        self.assertEqual(self.fakeAMProtocol.lastCall,
+                         managercommands.AddUnexpectedSuccess)
diff --git a/ThirdParty/Twisted/twisted/trial/_dist/test/test_workertrial.py b/ThirdParty/Twisted/twisted/trial/_dist/test/test_workertrial.py
new file mode 100644
index 0000000..b119321
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/_dist/test/test_workertrial.py
@@ -0,0 +1,149 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.trial._dist.workertrial}.
+"""
+
+import errno
+import os
+import sys
+from cStringIO import StringIO
+
+from twisted.protocols.amp import AMP
+from twisted.test.proto_helpers import StringTransport
+from twisted.trial.unittest import TestCase
+from twisted.trial._dist.workertrial import WorkerLogObserver, main
+from twisted.trial._dist import workertrial
+from twisted.trial._dist import workercommands, managercommands
+
+
+
+class FakeAMP(AMP):
+    """
+    A fake amp protocol.
+    """
+
+
+
+class WorkerLogObserverTestCase(TestCase):
+    """
+    Tests for L{WorkerLogObserver}.
+    """
+
+    def test_emit(self):
+        """
+        L{WorkerLogObserver} forwards data to L{managercommands.TestWrite}.
+        """
+        calls = []
+
+        class FakeClient(object):
+
+            def callRemote(self, method, **kwargs):
+                calls.append((method, kwargs))
+
+        observer = WorkerLogObserver(FakeClient())
+        observer.emit({'message': ['Some log']})
+        self.assertEqual(calls,
+            [(managercommands.TestWrite, {'out': 'Some log'})])
+
+
+
+class MainTestCase(TestCase):
+    """
+    Tests for L{main}.
+    """
+
+    def setUp(self):
+        self.readStream = StringIO()
+        self.writeStream = StringIO()
+        self.patch(os, 'fdopen', self.fdopen)
+        self.patch(workertrial, 'startLoggingWithObserver',
+                   self.startLoggingWithObserver)
+        self.addCleanup(setattr, sys, "argv", sys.argv)
+        sys.argv = ["trial"]
+
+
+    def fdopen(self, fd, mode=None):
+        """
+        Fake C{os.fdopen} implementation which returns C{self.readStream} for
+        the stdin fd and C{self.writeStream} for the stdout fd.
+        """
+        if fd == 3:
+            self.assertIdentical(None, mode)
+            return self.readStream
+        elif fd == 4:
+            self.assertEqual('w', mode)
+            return self.writeStream
+
+
+    def startLoggingWithObserver(self, emit, setStdout):
+        """
+        Override C{startLoggingWithObserver} for not starting logging.
+        """
+        self.assertFalse(setStdout)
+
+
+    def test_empty(self):
+        """
+        If no data is ever written, L{main} exits without writing data out.
+        """
+        main()
+        self.assertEqual('', self.writeStream.getvalue())
+
+
+    def test_forwardCommand(self):
+        """
+        L{main} forwards data from its input stream to a L{WorkerProtocol}
+        instance which writes data to the output stream.
+        """
+        client = FakeAMP()
+        clientTransport = StringTransport()
+        client.makeConnection(clientTransport)
+        client.callRemote(workercommands.Run, testCase="doesntexist")
+        self.readStream = clientTransport.io
+        self.readStream.seek(0, 0)
+        main()
+        self.assertIn(
+            "No module named 'doesntexist'", self.writeStream.getvalue())
+
+
+    def test_readInterrupted(self):
+        """
+        If reading the input stream fails with a C{IOError} with errno
+        C{EINTR}, L{main} ignores it and continues reading.
+        """
+
+        class FakeStream(object):
+            count = 0
+
+            def read(oself, size):
+                oself.count += 1
+                if oself.count == 1:
+                    raise IOError(errno.EINTR)
+                else:
+                    self.assertEqual((None, None, None), sys.exc_info())
+                return ''
+
+        self.readStream = FakeStream()
+        main()
+        self.assertEqual('', self.writeStream.getvalue())
+
+
+    def test_otherReadError(self):
+        """
+        L{main} only ignores C{IOError} with C{EINTR} errno: otherwise, the
+        error pops out.
+        """
+
+        class FakeStream(object):
+            count = 0
+
+            def read(oself, size):
+                oself.count += 1
+                if oself.count == 1:
+                    raise IOError("Something else")
+                return ''
+
+        self.readStream = FakeStream()
+        self.assertRaises(IOError, main)
diff --git a/ThirdParty/Twisted/twisted/trial/_dist/worker.py b/ThirdParty/Twisted/twisted/trial/_dist/worker.py
new file mode 100644
index 0000000..e9e128c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/_dist/worker.py
@@ -0,0 +1,328 @@
+# -*- test-case-name: twisted.trial._dist.test.test_worker -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This module implements the worker classes.
+
+ at since: 12.3
+"""
+
+import os
+
+from zope.interface import implements
+
+from twisted.internet.protocol import ProcessProtocol
+from twisted.internet.interfaces import ITransport, IAddress
+from twisted.internet.defer import Deferred
+from twisted.protocols.amp import AMP
+from twisted.python.failure import Failure
+from twisted.python.reflect import namedObject
+from twisted.trial.unittest import Todo
+from twisted.trial.runner import TrialSuite, TestLoader
+from twisted.trial._dist import workercommands, managercommands
+from twisted.trial._dist import _WORKER_AMP_STDIN, _WORKER_AMP_STDOUT
+from twisted.trial._dist.workerreporter import WorkerReporter
+
+
+
+class WorkerProtocol(AMP):
+    """
+    The worker-side trial distributed protocol.
+    """
+
+    def __init__(self, forceGarbageCollection=False):
+        self._loader = TestLoader()
+        self._result = WorkerReporter(self)
+        self._forceGarbageCollection = forceGarbageCollection
+
+
+    def run(self, testCase):
+        """
+        Run a test case by name.
+        """
+        case = self._loader.loadByName(testCase)
+        suite = TrialSuite([case], self._forceGarbageCollection)
+        suite.run(self._result)
+        return {'success': True}
+
+    workercommands.Run.responder(run)
+
+
+    def start(self, directory):
+        """
+        Set up the worker, moving into given directory for tests to run in
+        them.
+        """
+        os.chdir(directory)
+        return {'success': True}
+
+    workercommands.Start.responder(start)
+
+
+
+class LocalWorkerAMP(AMP):
+    """
+    Local implementation of the manager commands.
+    """
+
+    def addSuccess(self, testName):
+        """
+        Add a success to the reporter.
+        """
+        self._result.addSuccess(self._testCase)
+        return {'success': True}
+
+    managercommands.AddSuccess.responder(addSuccess)
+
+
+    def _buildFailure(self, error, errorClass, frames):
+        """
+        Helper to build a C{Failure} with some traceback.
+
+        @param error: An C{Exception} instance.
+
+        @param error: The class name of the C{error} class.
+
+        @param frames: A flat list of strings representing the information need
+            to approximatively rebuild C{Failure} frames.
+
+        @return: A L{Failure} instance with enough information about a test
+           error.
+        """
+        errorType = namedObject(errorClass)
+        failure = Failure(error, errorType)
+        for i in range(0, len(frames), 3):
+            failure.frames.append(
+                (frames[i], frames[i + 1], int(frames[i + 2]), [], []))
+        return failure
+
+
+    def addError(self, testName, error, errorClass, frames):
+        """
+        Add an error to the reporter.
+        """
+        failure = self._buildFailure(error, errorClass, frames)
+        self._result.addError(self._testCase, failure)
+        return {'success': True}
+
+    managercommands.AddError.responder(addError)
+
+
+    def addFailure(self, testName, fail, failClass, frames):
+        """
+        Add a failure to the reporter.
+        """
+        failure = self._buildFailure(fail, failClass, frames)
+        self._result.addFailure(self._testCase, failure)
+        return {'success': True}
+
+    managercommands.AddFailure.responder(addFailure)
+
+
+    def addSkip(self, testName, reason):
+        """
+        Add a skip to the reporter.
+        """
+        self._result.addSkip(self._testCase, reason)
+        return {'success': True}
+
+    managercommands.AddSkip.responder(addSkip)
+
+
+    def addExpectedFailure(self, testName, error, todo):
+        """
+        Add an expected failure to the reporter.
+        """
+        _todo = Todo(todo)
+        self._result.addExpectedFailure(self._testCase, error, _todo)
+        return {'success': True}
+
+    managercommands.AddExpectedFailure.responder(addExpectedFailure)
+
+
+    def addUnexpectedSuccess(self, testName, todo):
+        """
+        Add an unexpected success to the reporter.
+        """
+        self._result.addUnexpectedSuccess(self._testCase, todo)
+        return {'success': True}
+
+    managercommands.AddUnexpectedSuccess.responder(addUnexpectedSuccess)
+
+
+    def testWrite(self, out):
+        """
+        Print test output from the worker.
+        """
+        self._testStream.write(out + '\n')
+        self._testStream.flush()
+        return {'success': True}
+
+    managercommands.TestWrite.responder(testWrite)
+
+
+    def _stopTest(self, result):
+        """
+        Stop the current running test case, forwarding the result.
+        """
+        self._result.stopTest(self._testCase)
+        return result
+
+
+    def run(self, testCase, result):
+        """
+        Run a test.
+        """
+        self._testCase = testCase
+        self._result = result
+        self._result.startTest(testCase)
+        d = self.callRemote(workercommands.Run, testCase=testCase.id())
+        return d.addCallback(self._stopTest)
+
+
+    def setTestStream(self, stream):
+        """
+        Set the stream used to log output from tests.
+        """
+        self._testStream = stream
+
+
+
+class LocalWorkerAddress(object):
+    """
+    A L{IAddress} implementation meant to provide stub addresses for
+    L{ITransport.getPeer} and L{ITransport.getHost}.
+    """
+    implements(IAddress)
+
+
+
+class LocalWorkerTransport(object):
+    """
+    A stub transport implementation used to support L{AMP} over a
+    L{ProcessProtocol} transport.
+    """
+    implements(ITransport)
+
+    def __init__(self, transport):
+        self._transport = transport
+
+
+    def write(self, data):
+        """
+        Forward data to transport.
+        """
+        self._transport.writeToChild(_WORKER_AMP_STDIN, data)
+
+
+    def writeSequence(self, sequence):
+        """
+        Emulate C{writeSequence} by iterating data in the C{sequence}.
+        """
+        for data in sequence:
+            self._transport.writeToChild(_WORKER_AMP_STDIN, data)
+
+
+    def loseConnection(self):
+        """
+        Closes the transport.
+        """
+        self._transport.loseConnection()
+
+
+    def getHost(self):
+        """
+        Return a L{LocalWorkerAddress} instance.
+        """
+        return LocalWorkerAddress()
+
+
+    def getPeer(self):
+        """
+        Return a L{LocalWorkerAddress} instance.
+        """
+        return LocalWorkerAddress()
+
+
+
+class LocalWorker(ProcessProtocol):
+    """
+    Local process worker protocol. This worker runs as a local process and
+    communicates via stdin/out.
+
+    @ivar _ampProtocol: The L{AMP} protocol instance used to communicate with
+        the worker.
+
+    @ivar _logDirectory: The directory where logs will reside.
+
+    @ivar _logFile: The name of the main log file for tests output.
+    """
+
+    def __init__(self, ampProtocol, logDirectory, logFile):
+        self._ampProtocol = ampProtocol
+        self._logDirectory = logDirectory
+        self._logFile = logFile
+        self.endDeferred = Deferred()
+
+
+    def connectionMade(self):
+        """
+        When connection is made, create the AMP protocol instance.
+        """
+        self._ampProtocol.makeConnection(LocalWorkerTransport(self.transport))
+        if not os.path.exists(self._logDirectory):
+            os.makedirs(self._logDirectory)
+        self._outLog = file(os.path.join(self._logDirectory, 'out.log'), 'w')
+        self._errLog = file(os.path.join(self._logDirectory, 'err.log'), 'w')
+        testLog = file(os.path.join(self._logDirectory, self._logFile), 'w')
+        self._ampProtocol.setTestStream(testLog)
+        d = self._ampProtocol.callRemote(workercommands.Start,
+                                         directory=self._logDirectory)
+        # Ignore the potential errors, the test suite will fail properly and it
+        # would just print garbage.
+        d.addErrback(lambda x: None)
+
+
+    def connectionLost(self, reason):
+        """
+        On connection lost, close the log files that we're managing for stdin
+        and stdout.
+        """
+        self._outLog.close()
+        self._errLog.close()
+
+
+    def processEnded(self, reason):
+        """
+        When the process closes, call C{connectionLost} for cleanup purposes
+        and forward the information to the C{_ampProtocol}.
+        """
+        self.connectionLost(reason)
+        self._ampProtocol.connectionLost(reason)
+        self.endDeferred.callback(reason)
+
+
+    def outReceived(self, data):
+        """
+        Send data received from stdout to log.
+        """
+        self._outLog.write(data)
+
+
+    def errReceived(self, data):
+        """
+        Write error data to log.
+        """
+        self._errLog.write(data)
+
+
+    def childDataReceived(self, childFD, data):
+        """
+        Handle data received on the specific pipe for the C{_ampProtocol}.
+        """
+        if childFD == _WORKER_AMP_STDOUT:
+            self._ampProtocol.dataReceived(data)
+        else:
+            ProcessProtocol.childDataReceived(self, childFD, data)
diff --git a/ThirdParty/Twisted/twisted/trial/_dist/workercommands.py b/ThirdParty/Twisted/twisted/trial/_dist/workercommands.py
new file mode 100644
index 0000000..a786fb4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/_dist/workercommands.py
@@ -0,0 +1,28 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Commands for telling a worker to load tests or run tests.
+
+ at since: 12.3
+"""
+
+from twisted.protocols.amp import Command, String, Boolean
+
+
+
+class Run(Command):
+    """
+    Run a test.
+    """
+    arguments = [('testCase', String())]
+    response = [('success', Boolean())]
+
+
+
+class Start(Command):
+    """
+    Set up the worker process, giving the running directory.
+    """
+    arguments = [('directory', String())]
+    response = [('success', Boolean())]
diff --git a/ThirdParty/Twisted/twisted/trial/_dist/workerreporter.py b/ThirdParty/Twisted/twisted/trial/_dist/workerreporter.py
new file mode 100644
index 0000000..ab6e633
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/_dist/workerreporter.py
@@ -0,0 +1,123 @@
+# -*- test-case-name: twisted.trial._dist.test.test_workerreporter -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test reporter forwarding test results over trial distributed AMP commands.
+
+ at since: 12.3
+"""
+
+from twisted.python.failure import Failure
+from twisted.python.reflect import qual
+from twisted.trial.reporter import TestResult
+from twisted.trial._dist import managercommands
+
+
+
+class WorkerReporter(TestResult):
+    """
+    Reporter for trial's distributed workers. We send things not through a
+    stream, but through an C{AMP} protocol's C{callRemote} method.
+    """
+
+    def __init__(self, ampProtocol):
+        """
+        @param ampProtocol: The communication channel with the trial
+            distributed manager which collects all test results.
+        @type ampProtocol: C{AMP}
+        """
+        super(WorkerReporter, self).__init__()
+        self.ampProtocol = ampProtocol
+
+
+    def _getFailure(self, error):
+        """
+        Convert a C{sys.exc_info()}-style tuple to a L{Failure}, if necessary.
+        """
+        if isinstance(error, tuple):
+            return Failure(error[1], error[0], error[2])
+        return error
+
+
+    def _getFrames(self, failure):
+        """
+        Extract frames from a C{Failure} instance.
+        """
+        frames = []
+        for frame in failure.frames:
+            frames.extend([frame[0], frame[1], str(frame[2])])
+        return frames
+
+
+    def addSuccess(self, test):
+        """
+        Send a success over.
+        """
+        super(WorkerReporter, self).addSuccess(test)
+        self.ampProtocol.callRemote(managercommands.AddSuccess,
+                                    testName=test.id())
+
+
+    def addError(self, test, error):
+        """
+        Send an error over.
+        """
+        super(WorkerReporter, self).addError(test, error)
+        failure = self._getFailure(error)
+        frames = self._getFrames(failure)
+        self.ampProtocol.callRemote(managercommands.AddError,
+                                    testName=test.id(),
+                                    error=failure.getErrorMessage(),
+                                    errorClass=qual(failure.type),
+                                    frames=frames)
+
+
+    def addFailure(self, test, fail):
+        """
+        Send a Failure over.
+        """
+        super(WorkerReporter, self).addFailure(test, fail)
+        failure = self._getFailure(fail)
+        frames = self._getFrames(failure)
+        self.ampProtocol.callRemote(managercommands.AddFailure,
+                                    testName=test.id(),
+                                    fail=failure.getErrorMessage(),
+                                    failClass=qual(failure.type),
+                                    frames=frames)
+
+
+    def addSkip(self, test, reason):
+        """
+        Send a skip over.
+        """
+        super(WorkerReporter, self).addSkip(test, reason)
+        self.ampProtocol.callRemote(managercommands.AddSkip,
+                                    testName=test.id(), reason=str(reason))
+
+
+    def addExpectedFailure(self, test, error, todo):
+        """
+        Send an expected failure over.
+        """
+        super(WorkerReporter, self).addExpectedFailure(test, error, todo)
+        self.ampProtocol.callRemote(managercommands.AddExpectedFailure,
+                                    testName=test.id(),
+                                    error=error.getErrorMessage(),
+                                    todo=todo.reason)
+
+
+    def addUnexpectedSuccess(self, test, todo):
+        """
+        Send an unexpected success over.
+        """
+        super(WorkerReporter, self).addUnexpectedSuccess(test, todo)
+        self.ampProtocol.callRemote(managercommands.AddUnexpectedSuccess,
+                                    testName=test.id(), todo=todo.reason)
+
+
+    def printSummary(self):
+        """
+        I{Don't} print a summary
+        """
diff --git a/ThirdParty/Twisted/twisted/trial/_dist/workertrial.py b/ThirdParty/Twisted/twisted/trial/_dist/workertrial.py
new file mode 100644
index 0000000..a5598b8
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/_dist/workertrial.py
@@ -0,0 +1,94 @@
+# -*- test-case-name: twisted.trial._dist.test.test_workertrial -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Implementation of C{AMP} worker commands, and main executable entry point for
+the workers.
+
+ at since: 12.3
+"""
+
+import _preamble
+
+import sys
+import os
+import errno
+
+from twisted.internet.protocol import FileWrapper
+from twisted.python.log import startLoggingWithObserver, textFromEventDict
+from twisted.trial._dist.options import WorkerOptions
+from twisted.trial._dist import _WORKER_AMP_STDIN, _WORKER_AMP_STDOUT
+
+
+
+class WorkerLogObserver(object):
+    """
+    A log observer that forward its output to a C{AMP} protocol.
+    """
+
+    def __init__(self, protocol):
+        """
+        @param protocol: a connected C{AMP} protocol instance.
+        @type protocol: C{AMP}
+        """
+        self.protocol = protocol
+
+
+    def emit(self, eventDict):
+        """
+        Produce a log output.
+        """
+        from twisted.trial._dist import managercommands
+        text = textFromEventDict(eventDict)
+        if text is None:
+            return
+        self.protocol.callRemote(managercommands.TestWrite, out=text)
+
+
+
+def main():
+    """
+    Main function to be run if __name__ == "__main__".
+    """
+    config = WorkerOptions()
+    config.parseOptions()
+
+    from twisted.trial._dist.worker import WorkerProtocol
+    workerProtocol = WorkerProtocol(config['force-gc'])
+
+    protocolIn = os.fdopen(_WORKER_AMP_STDIN)
+    protocolOut = os.fdopen(_WORKER_AMP_STDOUT, 'w')
+    workerProtocol.makeConnection(FileWrapper(protocolOut))
+
+    observer = WorkerLogObserver(workerProtocol)
+    startLoggingWithObserver(observer.emit, False)
+
+    while True:
+        try:
+            r = protocolIn.read(1)
+        except IOError, e:
+            if e.args[0] == errno.EINTR:
+                sys.exc_clear()
+                continue
+            else:
+                raise
+        if r == '':
+            break
+        else:
+            workerProtocol.dataReceived(r)
+            protocolOut.flush()
+            sys.stdout.flush()
+            sys.stderr.flush()
+
+    if config.tracer:
+        sys.settrace(None)
+        results = config.tracer.results()
+        results.write_results(show_missing=True, summary=False,
+                              coverdir=config.coverdir().path)
+
+
+
+if __name__ == '__main__':
+    main()
diff --git a/ThirdParty/Twisted/twisted/trial/_synctest.py b/ThirdParty/Twisted/twisted/trial/_synctest.py
new file mode 100644
index 0000000..e3d7956
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/_synctest.py
@@ -0,0 +1,1252 @@
+# -*- test-case-name: twisted.trial.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Things likely to be used by writers of unit tests.
+
+Maintainer: Jonathan Lange
+"""
+
+from __future__ import division, absolute_import
+
+import inspect
+import os, warnings, sys, tempfile, types
+from pprint import pformat
+from dis import findlinestarts as _findlinestarts
+
+from twisted.python import failure, log, monkey
+from twisted.python._utilpy3 import runWithWarningsSuppressed
+from twisted.python.deprecate import (
+    getDeprecationWarningString, warnAboutFunction)
+
+from twisted.trial import itrial, util
+
+import unittest as pyunit
+
+# Python > 2.6 has skip support built-in:
+if getattr(pyunit, "SkipTest", None):
+    SkipTest = pyunit.SkipTest
+else:
+    class SkipTest(Exception):
+        """
+        Raise this (with a reason) to skip the current test. You may also set
+        method.skip to a reason string to skip it, or set class.skip to skip the
+        entire TestCase.
+        """
+
+
+
+class FailTest(AssertionError):
+    """Raised to indicate the current test has failed to pass."""
+
+
+
+class Todo(object):
+    """
+    Internal object used to mark a L{TestCase} as 'todo'. Tests marked 'todo'
+    are reported differently in Trial L{TestResult}s. If todo'd tests fail,
+    they do not fail the suite and the errors are reported in a separate
+    category. If todo'd tests succeed, Trial L{TestResult}s will report an
+    unexpected success.
+    """
+
+    def __init__(self, reason, errors=None):
+        """
+        @param reason: A string explaining why the test is marked 'todo'
+
+        @param errors: An iterable of exception types that the test is
+        expected to raise. If one of these errors is raised by the test, it
+        will be trapped. Raising any other kind of error will fail the test.
+        If C{None} is passed, then all errors will be trapped.
+        """
+        self.reason = reason
+        self.errors = errors
+
+    def __repr__(self):
+        return "<Todo reason=%r errors=%r>" % (self.reason, self.errors)
+
+    def expected(self, failure):
+        """
+        @param failure: A L{twisted.python.failure.Failure}.
+
+        @return: C{True} if C{failure} is expected, C{False} otherwise.
+        """
+        if self.errors is None:
+            return True
+        for error in self.errors:
+            if failure.check(error):
+                return True
+        return False
+
+
+def makeTodo(value):
+    """
+    Return a L{Todo} object built from C{value}.
+
+    If C{value} is a string, return a Todo that expects any exception with
+    C{value} as a reason. If C{value} is a tuple, the second element is used
+    as the reason and the first element as the excepted error(s).
+
+    @param value: A string or a tuple of C{(errors, reason)}, where C{errors}
+    is either a single exception class or an iterable of exception classes.
+
+    @return: A L{Todo} object.
+    """
+    if isinstance(value, str):
+        return Todo(reason=value)
+    if isinstance(value, tuple):
+        errors, reason = value
+        try:
+            errors = list(errors)
+        except TypeError:
+            errors = [errors]
+        return Todo(reason=reason, errors=errors)
+
+
+
+class _Warning(object):
+    """
+    A L{_Warning} instance represents one warning emitted through the Python
+    warning system (L{warnings}).  This is used to insulate callers of
+    L{_collectWarnings} from changes to the Python warnings system which might
+    otherwise require changes to the warning objects that function passes to
+    the observer object it accepts.
+
+    @ivar message: The string which was passed as the message parameter to
+        L{warnings.warn}.
+
+    @ivar category: The L{Warning} subclass which was passed as the category
+        parameter to L{warnings.warn}.
+
+    @ivar filename: The name of the file containing the definition of the code
+        object which was C{stacklevel} frames above the call to
+        L{warnings.warn}, where C{stacklevel} is the value of the C{stacklevel}
+        parameter passed to L{warnings.warn}.
+
+    @ivar lineno: The source line associated with the active instruction of the
+        code object object which was C{stacklevel} frames above the call to
+        L{warnings.warn}, where C{stacklevel} is the value of the C{stacklevel}
+        parameter passed to L{warnings.warn}.
+    """
+    def __init__(self, message, category, filename, lineno):
+        self.message = message
+        self.category = category
+        self.filename = filename
+        self.lineno = lineno
+
+
+def _setWarningRegistryToNone(modules):
+    """
+    Disable the per-module cache for every module found in C{modules}, typically
+    C{sys.modules}.
+
+    @param modules: Dictionary of modules, typically sys.module dict
+    """
+    for v in list(modules.values()):
+        if v is not None:
+            try:
+                v.__warningregistry__ = None
+            except:
+                # Don't specify a particular exception type to handle in case
+                # some wacky object raises some wacky exception in response to
+                # the setattr attempt.
+                pass
+
+
+def _collectWarnings(observeWarning, f, *args, **kwargs):
+    """
+    Call C{f} with C{args} positional arguments and C{kwargs} keyword arguments
+    and collect all warnings which are emitted as a result in a list.
+
+    @param observeWarning: A callable which will be invoked with a L{_Warning}
+        instance each time a warning is emitted.
+
+    @return: The return value of C{f(*args, **kwargs)}.
+    """
+    def showWarning(message, category, filename, lineno, file=None, line=None):
+        assert isinstance(message, Warning)
+        observeWarning(_Warning(
+                message.args[0], category, filename, lineno))
+
+    # Disable the per-module cache for every module otherwise if the warning
+    # which the caller is expecting us to collect was already emitted it won't
+    # be re-emitted by the call to f which happens below.
+    _setWarningRegistryToNone(sys.modules)
+
+    origFilters = warnings.filters[:]
+    origShow = warnings.showwarning
+    warnings.simplefilter('always')
+    try:
+        warnings.showwarning = showWarning
+        result = f(*args, **kwargs)
+    finally:
+        warnings.filters[:] = origFilters
+        warnings.showwarning = origShow
+    return result
+
+
+
+class UnsupportedTrialFeature(Exception):
+    """A feature of twisted.trial was used that pyunit cannot support."""
+
+
+
+class PyUnitResultAdapter(object):
+    """
+    Wrap a C{TestResult} from the standard library's C{unittest} so that it
+    supports the extended result types from Trial, and also supports
+    L{twisted.python.failure.Failure}s being passed to L{addError} and
+    L{addFailure}.
+    """
+
+    def __init__(self, original):
+        """
+        @param original: A C{TestResult} instance from C{unittest}.
+        """
+        self.original = original
+
+
+    def _exc_info(self, err):
+        return util.excInfoOrFailureToExcInfo(err)
+
+
+    def startTest(self, method):
+        self.original.startTest(method)
+
+
+    def stopTest(self, method):
+        self.original.stopTest(method)
+
+
+    def addFailure(self, test, fail):
+        self.original.addFailure(test, self._exc_info(fail))
+
+
+    def addError(self, test, error):
+        self.original.addError(test, self._exc_info(error))
+
+
+    def _unsupported(self, test, feature, info):
+        self.original.addFailure(
+            test,
+            (UnsupportedTrialFeature,
+             UnsupportedTrialFeature(feature, info),
+             None))
+
+
+    def addSkip(self, test, reason):
+        """
+        Report the skip as a failure.
+        """
+        # pyunit in Python 2.6 doesn't support skipping information:
+        if sys.version_info[:2] > (2, 6):
+            self.original.addSkip(test, reason)
+        else:
+            self._unsupported(test, 'skip', reason)
+
+
+    def addUnexpectedSuccess(self, test, todo):
+        """
+        Report the unexpected success as a failure.
+        """
+        self._unsupported(test, 'unexpected success', todo)
+
+
+    def addExpectedFailure(self, test, error):
+        """
+        Report the expected failure (i.e. todo) as a failure.
+        """
+        self._unsupported(test, 'expected failure', error)
+
+
+    def addSuccess(self, test):
+        self.original.addSuccess(test)
+
+
+    def upDownError(self, method, error, warn, printStatus):
+        pass
+
+
+
+class _Assertions(pyunit.TestCase, object):
+    """
+    Replaces many of the built-in TestCase assertions. In general, these
+    assertions provide better error messages and are easier to use in
+    callbacks.
+    """
+
+    def fail(self, msg=None):
+        """
+        Absolutely fail the test.  Do not pass go, do not collect $200.
+
+        @param msg: the message that will be displayed as the reason for the
+        failure
+        """
+        raise self.failureException(msg)
+
+
+    def assertFalse(self, condition, msg=None):
+        """
+        Fail the test if C{condition} evaluates to True.
+
+        @param condition: any object that defines __nonzero__
+        """
+        if condition:
+            raise self.failureException(msg)
+        return condition
+    assertNot = failUnlessFalse = failIf = assertFalse
+
+
+    def assertTrue(self, condition, msg=None):
+        """
+        Fail the test if C{condition} evaluates to False.
+
+        @param condition: any object that defines __nonzero__
+        """
+        if not condition:
+            raise self.failureException(msg)
+        return condition
+    assert_ = failUnlessTrue = failUnless = assertTrue
+
+
+    def assertRaises(self, exception, f, *args, **kwargs):
+        """
+        Fail the test unless calling the function C{f} with the given
+        C{args} and C{kwargs} raises C{exception}. The failure will report
+        the traceback and call stack of the unexpected exception.
+
+        @param exception: exception type that is to be expected
+        @param f: the function to call
+
+        @return: The raised exception instance, if it is of the given type.
+        @raise self.failureException: Raised if the function call does
+            not raise an exception or if it raises an exception of a
+            different type.
+        """
+        try:
+            result = f(*args, **kwargs)
+        except exception as inst:
+            return inst
+        except:
+            raise self.failureException('%s raised instead of %s:\n %s'
+                                        % (sys.exc_info()[0],
+                                           exception.__name__,
+                                           failure.Failure().getTraceback()))
+        else:
+            raise self.failureException('%s not raised (%r returned)'
+                                        % (exception.__name__, result))
+    failUnlessRaises = assertRaises
+
+
+    def assertEqual(self, first, second, msg=''):
+        """
+        Fail the test if C{first} and C{second} are not equal.
+
+        @param msg: A string describing the failure that's included in the
+            exception.
+        """
+        if not first == second:
+            if msg is None:
+                msg = ''
+            if len(msg) > 0:
+                msg += '\n'
+            raise self.failureException(
+                '%snot equal:\na = %s\nb = %s\n'
+                % (msg, pformat(first), pformat(second)))
+        return first
+    failUnlessEqual = failUnlessEquals = assertEquals = assertEqual
+
+
+    def assertIdentical(self, first, second, msg=None):
+        """
+        Fail the test if C{first} is not C{second}.  This is an
+        obect-identity-equality test, not an object equality
+        (i.e. C{__eq__}) test.
+
+        @param msg: if msg is None, then the failure message will be
+        '%r is not %r' % (first, second)
+        """
+        if first is not second:
+            raise self.failureException(msg or '%r is not %r' % (first, second))
+        return first
+    failUnlessIdentical = assertIdentical
+
+
+    def assertNotIdentical(self, first, second, msg=None):
+        """
+        Fail the test if C{first} is C{second}.  This is an
+        obect-identity-equality test, not an object equality
+        (i.e. C{__eq__}) test.
+
+        @param msg: if msg is None, then the failure message will be
+        '%r is %r' % (first, second)
+        """
+        if first is second:
+            raise self.failureException(msg or '%r is %r' % (first, second))
+        return first
+    failIfIdentical = assertNotIdentical
+
+
+    def assertNotEqual(self, first, second, msg=None):
+        """
+        Fail the test if C{first} == C{second}.
+
+        @param msg: if msg is None, then the failure message will be
+        '%r == %r' % (first, second)
+        """
+        if not first != second:
+            raise self.failureException(msg or '%r == %r' % (first, second))
+        return first
+    assertNotEquals = failIfEquals = failIfEqual = assertNotEqual
+
+
+    def assertIn(self, containee, container, msg=None):
+        """
+        Fail the test if C{containee} is not found in C{container}.
+
+        @param containee: the value that should be in C{container}
+        @param container: a sequence type, or in the case of a mapping type,
+                          will follow semantics of 'if key in dict.keys()'
+        @param msg: if msg is None, then the failure message will be
+                    '%r not in %r' % (first, second)
+        """
+        if containee not in container:
+            raise self.failureException(msg or "%r not in %r"
+                                        % (containee, container))
+        return containee
+    failUnlessIn = assertIn
+
+
+    def assertNotIn(self, containee, container, msg=None):
+        """
+        Fail the test if C{containee} is found in C{container}.
+
+        @param containee: the value that should not be in C{container}
+        @param container: a sequence type, or in the case of a mapping type,
+                          will follow semantics of 'if key in dict.keys()'
+        @param msg: if msg is None, then the failure message will be
+                    '%r in %r' % (first, second)
+        """
+        if containee in container:
+            raise self.failureException(msg or "%r in %r"
+                                        % (containee, container))
+        return containee
+    failIfIn = assertNotIn
+
+
+    def assertNotAlmostEqual(self, first, second, places=7, msg=None):
+        """
+        Fail if the two objects are equal as determined by their
+        difference rounded to the given number of decimal places
+        (default 7) and comparing to zero.
+
+        @note: decimal places (from zero) is usually not the same
+               as significant digits (measured from the most
+               signficant digit).
+
+        @note: included for compatiblity with PyUnit test cases
+        """
+        if round(second-first, places) == 0:
+            raise self.failureException(msg or '%r == %r within %r places'
+                                        % (first, second, places))
+        return first
+    assertNotAlmostEquals = failIfAlmostEqual = assertNotAlmostEqual
+    failIfAlmostEquals = assertNotAlmostEqual
+
+
+    def assertAlmostEqual(self, first, second, places=7, msg=None):
+        """
+        Fail if the two objects are unequal as determined by their
+        difference rounded to the given number of decimal places
+        (default 7) and comparing to zero.
+
+        @note: decimal places (from zero) is usually not the same
+               as significant digits (measured from the most
+               signficant digit).
+
+        @note: included for compatiblity with PyUnit test cases
+        """
+        if round(second-first, places) != 0:
+            raise self.failureException(msg or '%r != %r within %r places'
+                                        % (first, second, places))
+        return first
+    assertAlmostEquals = failUnlessAlmostEqual = assertAlmostEqual
+    failUnlessAlmostEquals = assertAlmostEqual
+
+
+    def assertApproximates(self, first, second, tolerance, msg=None):
+        """
+        Fail if C{first} - C{second} > C{tolerance}
+
+        @param msg: if msg is None, then the failure message will be
+                    '%r ~== %r' % (first, second)
+        """
+        if abs(first - second) > tolerance:
+            raise self.failureException(msg or "%s ~== %s" % (first, second))
+        return first
+    failUnlessApproximates = assertApproximates
+
+
+    def assertSubstring(self, substring, astring, msg=None):
+        """
+        Fail if C{substring} does not exist within C{astring}.
+        """
+        return self.failUnlessIn(substring, astring, msg)
+    failUnlessSubstring = assertSubstring
+
+
+    def assertNotSubstring(self, substring, astring, msg=None):
+        """
+        Fail if C{astring} contains C{substring}.
+        """
+        return self.failIfIn(substring, astring, msg)
+    failIfSubstring = assertNotSubstring
+
+
+    def assertWarns(self, category, message, filename, f,
+                    *args, **kwargs):
+        """
+        Fail if the given function doesn't generate the specified warning when
+        called. It calls the function, checks the warning, and forwards the
+        result of the function if everything is fine.
+
+        @param category: the category of the warning to check.
+        @param message: the output message of the warning to check.
+        @param filename: the filename where the warning should come from.
+        @param f: the function which is supposed to generate the warning.
+        @type f: any callable.
+        @param args: the arguments to C{f}.
+        @param kwargs: the keywords arguments to C{f}.
+
+        @return: the result of the original function C{f}.
+        """
+        warningsShown = []
+        result = _collectWarnings(warningsShown.append, f, *args, **kwargs)
+
+        if not warningsShown:
+            self.fail("No warnings emitted")
+        first = warningsShown[0]
+        for other in warningsShown[1:]:
+            if ((other.message, other.category)
+                != (first.message, first.category)):
+                self.fail("Can't handle different warnings")
+        self.assertEqual(first.message, message)
+        self.assertIdentical(first.category, category)
+
+        # Use starts with because of .pyc/.pyo issues.
+        self.failUnless(
+            filename.startswith(first.filename),
+            'Warning in %r, expected %r' % (first.filename, filename))
+
+        # It would be nice to be able to check the line number as well, but
+        # different configurations actually end up reporting different line
+        # numbers (generally the variation is only 1 line, but that's enough
+        # to fail the test erroneously...).
+        # self.assertEqual(lineno, xxx)
+
+        return result
+    failUnlessWarns = assertWarns
+
+
+    def assertIsInstance(self, instance, classOrTuple, message=None):
+        """
+        Fail if C{instance} is not an instance of the given class or of
+        one of the given classes.
+
+        @param instance: the object to test the type (first argument of the
+            C{isinstance} call).
+        @type instance: any.
+        @param classOrTuple: the class or classes to test against (second
+            argument of the C{isinstance} call).
+        @type classOrTuple: class, type, or tuple.
+
+        @param message: Custom text to include in the exception text if the
+            assertion fails.
+        """
+        if not isinstance(instance, classOrTuple):
+            if message is None:
+                suffix = ""
+            else:
+                suffix = ": " + message
+            self.fail("%r is not an instance of %s%s" % (
+                    instance, classOrTuple, suffix))
+    failUnlessIsInstance = assertIsInstance
+
+
+    def assertNotIsInstance(self, instance, classOrTuple):
+        """
+        Fail if C{instance} is an instance of the given class or of one of the
+        given classes.
+
+        @param instance: the object to test the type (first argument of the
+            C{isinstance} call).
+        @type instance: any.
+        @param classOrTuple: the class or classes to test against (second
+            argument of the C{isinstance} call).
+        @type classOrTuple: class, type, or tuple.
+        """
+        if isinstance(instance, classOrTuple):
+            self.fail("%r is an instance of %s" % (instance, classOrTuple))
+    failIfIsInstance = assertNotIsInstance
+
+
+    def successResultOf(self, deferred):
+        """
+        Return the current success result of C{deferred} or raise
+        C{self.failException}.
+
+        @param deferred: A L{Deferred<twisted.internet.defer.Deferred>} which
+            has a success result.  This means
+            L{Deferred.callback<twisted.internet.defer.Deferred.callback>} or
+            L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has
+            been called on it and it has reached the end of its callback chain
+            and the last callback or errback returned a non-L{failure.Failure}.
+        @type deferred: L{Deferred<twisted.internet.defer.Deferred>}
+
+        @raise SynchronousTestCase.failureException: If the
+            L{Deferred<twisted.internet.defer.Deferred>} has no result or has a
+            failure result.
+
+        @return: The result of C{deferred}.
+        """
+        result = []
+        deferred.addBoth(result.append)
+        if not result:
+            self.fail(
+                "Success result expected on %r, found no result instead" % (
+                    deferred,))
+        elif isinstance(result[0], failure.Failure):
+            self.fail(
+                "Success result expected on %r, "
+                "found failure result (%r) instead" % (deferred, result[0]))
+        else:
+            return result[0]
+
+
+    def failureResultOf(self, deferred):
+        """
+        Return the current failure result of C{deferred} or raise
+        C{self.failException}.
+
+        @param deferred: A L{Deferred<twisted.internet.defer.Deferred>} which
+            has a failure result.  This means
+            L{Deferred.callback<twisted.internet.defer.Deferred.callback>} or
+            L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has
+            been called on it and it has reached the end of its callback chain
+            and the last callback or errback raised an exception or returned a
+            L{failure.Failure}.
+        @type deferred: L{Deferred<twisted.internet.defer.Deferred>}
+
+        @raise SynchronousTestCase.failureException: If the
+            L{Deferred<twisted.internet.defer.Deferred>} has no result or has a
+            success result.
+
+        @return: The failure result of C{deferred}.
+        @rtype: L{failure.Failure}
+        """
+        result = []
+        deferred.addBoth(result.append)
+        if not result:
+            self.fail(
+                "Failure result expected on %r, found no result instead" % (
+                    deferred,))
+        elif not isinstance(result[0], failure.Failure):
+            self.fail(
+                "Failure result expected on %r, "
+                "found success result (%r) instead" % (deferred, result[0]))
+        else:
+            return result[0]
+
+
+
+    def assertNoResult(self, deferred):
+        """
+        Assert that C{deferred} does not have a result at this point.
+
+        @param deferred: A L{Deferred<twisted.internet.defer.Deferred>} without
+            a result.  This means that neither
+            L{Deferred.callback<twisted.internet.defer.Deferred.callback>} nor
+            L{Deferred.errback<twisted.internet.defer.Deferred.errback>} has
+            been called, or that the
+            L{Deferred<twisted.internet.defer.Deferred>} is waiting on another
+            L{Deferred<twisted.internet.defer.Deferred>} for a result.
+        @type deferred: L{Deferred<twisted.internet.defer.Deferred>}
+
+        @raise SynchronousTestCase.failureException: If the
+            L{Deferred<twisted.internet.defer.Deferred>} has a result.
+        """
+        result = []
+        deferred.addBoth(result.append)
+        if result:
+            self.fail(
+                "No result expected on %r, found %r instead" % (
+                    deferred, result[0]))
+
+
+
+class _LogObserver(object):
+    """
+    Observes the Twisted logs and catches any errors.
+
+    @ivar _errors: A C{list} of L{Failure} instances which were received as
+        error events from the Twisted logging system.
+
+    @ivar _added: A C{int} giving the number of times C{_add} has been called
+        less the number of times C{_remove} has been called; used to only add
+        this observer to the Twisted logging since once, regardless of the
+        number of calls to the add method.
+
+    @ivar _ignored: A C{list} of exception types which will not be recorded.
+    """
+
+    def __init__(self):
+        self._errors = []
+        self._added = 0
+        self._ignored = []
+
+
+    def _add(self):
+        if self._added == 0:
+            log.addObserver(self.gotEvent)
+        self._added += 1
+
+
+    def _remove(self):
+        self._added -= 1
+        if self._added == 0:
+            log.removeObserver(self.gotEvent)
+
+
+    def _ignoreErrors(self, *errorTypes):
+        """
+        Do not store any errors with any of the given types.
+        """
+        self._ignored.extend(errorTypes)
+
+
+    def _clearIgnores(self):
+        """
+        Stop ignoring any errors we might currently be ignoring.
+        """
+        self._ignored = []
+
+
+    def flushErrors(self, *errorTypes):
+        """
+        Flush errors from the list of caught errors. If no arguments are
+        specified, remove all errors. If arguments are specified, only remove
+        errors of those types from the stored list.
+        """
+        if errorTypes:
+            flushed = []
+            remainder = []
+            for f in self._errors:
+                if f.check(*errorTypes):
+                    flushed.append(f)
+                else:
+                    remainder.append(f)
+            self._errors = remainder
+        else:
+            flushed = self._errors
+            self._errors = []
+        return flushed
+
+
+    def getErrors(self):
+        """
+        Return a list of errors caught by this observer.
+        """
+        return self._errors
+
+
+    def gotEvent(self, event):
+        """
+        The actual observer method. Called whenever a message is logged.
+
+        @param event: A dictionary containing the log message. Actual
+        structure undocumented (see source for L{twisted.python.log}).
+        """
+        if event.get('isError', False) and 'failure' in event:
+            f = event['failure']
+            if len(self._ignored) == 0 or not f.check(*self._ignored):
+                self._errors.append(f)
+
+
+
+_logObserver = _LogObserver()
+
+
+class SynchronousTestCase(_Assertions):
+    """
+    A unit test. The atom of the unit testing universe.
+
+    This class extends C{unittest.TestCase} from the standard library.  A number
+    of convenient testing helpers are added, including logging and warning
+    integration, monkey-patching support, and more.
+
+    To write a unit test, subclass C{SynchronousTestCase} and define a method
+    (say, 'test_foo') on the subclass. To run the test, instantiate your
+    subclass with the name of the method, and call L{run} on the instance,
+    passing a L{TestResult} object.
+
+    The C{trial} script will automatically find any C{SynchronousTestCase}
+    subclasses defined in modules beginning with 'test_' and construct test
+    cases for all methods beginning with 'test'.
+
+    If an error is logged during the test run, the test will fail with an
+    error. See L{log.err}.
+
+    @ivar failureException: An exception class, defaulting to C{FailTest}. If
+    the test method raises this exception, it will be reported as a failure,
+    rather than an exception. All of the assertion methods raise this if the
+    assertion fails.
+
+    @ivar skip: C{None} or a string explaining why this test is to be
+    skipped. If defined, the test will not be run. Instead, it will be
+    reported to the result object as 'skipped' (if the C{TestResult} supports
+    skipping).
+
+    @ivar todo: C{None}, a string or a tuple of C{(errors, reason)} where
+    C{errors} is either an exception class or an iterable of exception
+    classes, and C{reason} is a string. See L{Todo} or L{makeTodo} for more
+    information.
+
+    @ivar suppress: C{None} or a list of tuples of C{(args, kwargs)} to be
+    passed to C{warnings.filterwarnings}. Use these to suppress warnings
+    raised in a test. Useful for testing deprecated code. See also
+    L{util.suppress}.
+    """
+    failureException = FailTest
+
+    def __init__(self, methodName='runTest'):
+        super(SynchronousTestCase, self).__init__(methodName)
+        self._passed = False
+        self._cleanups = []
+        self._testMethodName = methodName
+        testMethod = getattr(self, methodName)
+        self._parents = [
+            testMethod, self, sys.modules.get(self.__class__.__module__)]
+
+
+    # Override the comparison defined by the base TestCase which considers
+    # instances of the same class with the same _testMethodName to be
+    # equal.  Since trial puts TestCase instances into a set, that
+    # definition of comparison makes it impossible to run the same test
+    # method twice.  Most likely, trial should stop using a set to hold
+    # tests, but until it does, this is necessary on Python 2.6. -exarkun
+    def __eq__(self, other):
+        return self is other
+
+    def __ne__(self, other):
+        return self is not other
+
+    def __hash__(self):
+        return hash((self.__class__, self._testMethodName))
+
+
+    def shortDescription(self):
+        desc = super(SynchronousTestCase, self).shortDescription()
+        if desc is None:
+            return self._testMethodName
+        return desc
+
+
+    def getSkip(self):
+        """
+        Return the skip reason set on this test, if any is set. Checks on the
+        instance first, then the class, then the module, then packages. As
+        soon as it finds something with a C{skip} attribute, returns that.
+        Returns C{None} if it cannot find anything. See L{TestCase} docstring
+        for more details.
+        """
+        return util.acquireAttribute(self._parents, 'skip', None)
+
+
+    def getTodo(self):
+        """
+        Return a L{Todo} object if the test is marked todo. Checks on the
+        instance first, then the class, then the module, then packages. As
+        soon as it finds something with a C{todo} attribute, returns that.
+        Returns C{None} if it cannot find anything. See L{TestCase} docstring
+        for more details.
+        """
+        todo = util.acquireAttribute(self._parents, 'todo', None)
+        if todo is None:
+            return None
+        return makeTodo(todo)
+
+
+    def runTest(self):
+        """
+        If no C{methodName} argument is passed to the constructor, L{run} will
+        treat this method as the thing with the actual test inside.
+        """
+
+
+    def run(self, result):
+        """
+        Run the test case, storing the results in C{result}.
+
+        First runs C{setUp} on self, then runs the test method (defined in the
+        constructor), then runs C{tearDown}.  As with the standard library
+        L{unittest.TestCase}, the return value of these methods is disregarded.
+        In particular, returning a L{Deferred<twisted.internet.defer.Deferred>}
+        has no special additional consequences.
+
+        @param result: A L{TestResult} object.
+        """
+        log.msg("--> %s <--" % (self.id()))
+        new_result = itrial.IReporter(result, None)
+        if new_result is None:
+            result = PyUnitResultAdapter(result)
+        else:
+            result = new_result
+        result.startTest(self)
+        if self.getSkip(): # don't run test methods that are marked as .skip
+            result.addSkip(self, self.getSkip())
+            result.stopTest(self)
+            return
+
+        self._passed = False
+        self._warnings = []
+
+        self._installObserver()
+        # All the code inside _runFixturesAndTest will be run such that warnings
+        # emitted by it will be collected and retrievable by flushWarnings.
+        _collectWarnings(self._warnings.append, self._runFixturesAndTest, result)
+
+        # Any collected warnings which the test method didn't flush get
+        # re-emitted so they'll be logged or show up on stdout or whatever.
+        for w in self.flushWarnings():
+            try:
+                warnings.warn_explicit(**w)
+            except:
+                result.addError(self, failure.Failure())
+
+        result.stopTest(self)
+
+
+    def addCleanup(self, f, *args, **kwargs):
+        """
+        Add the given function to a list of functions to be called after the
+        test has run, but before C{tearDown}.
+
+        Functions will be run in reverse order of being added. This helps
+        ensure that tear down complements set up.
+
+        As with all aspects of L{SynchronousTestCase}, Deferreds are not
+        supported in cleanup functions.
+        """
+        self._cleanups.append((f, args, kwargs))
+
+
+    def patch(self, obj, attribute, value):
+        """
+        Monkey patch an object for the duration of the test.
+
+        The monkey patch will be reverted at the end of the test using the
+        L{addCleanup} mechanism.
+
+        The L{monkey.MonkeyPatcher} is returned so that users can restore and
+        re-apply the monkey patch within their tests.
+
+        @param obj: The object to monkey patch.
+        @param attribute: The name of the attribute to change.
+        @param value: The value to set the attribute to.
+        @return: A L{monkey.MonkeyPatcher} object.
+        """
+        monkeyPatch = monkey.MonkeyPatcher((obj, attribute, value))
+        monkeyPatch.patch()
+        self.addCleanup(monkeyPatch.restore)
+        return monkeyPatch
+
+
+    def flushLoggedErrors(self, *errorTypes):
+        """
+        Remove stored errors received from the log.
+
+        C{TestCase} stores each error logged during the run of the test and
+        reports them as errors during the cleanup phase (after C{tearDown}).
+
+        @param *errorTypes: If unspecifed, flush all errors. Otherwise, only
+        flush errors that match the given types.
+
+        @return: A list of failures that have been removed.
+        """
+        return self._observer.flushErrors(*errorTypes)
+
+
+    def flushWarnings(self, offendingFunctions=None):
+        """
+        Remove stored warnings from the list of captured warnings and return
+        them.
+
+        @param offendingFunctions: If C{None}, all warnings issued during the
+            currently running test will be flushed.  Otherwise, only warnings
+            which I{point} to a function included in this list will be flushed.
+            All warnings include a filename and source line number; if these
+            parts of a warning point to a source line which is part of a
+            function, then the warning I{points} to that function.
+        @type offendingFunctions: C{NoneType} or L{list} of functions or methods.
+
+        @raise ValueError: If C{offendingFunctions} is not C{None} and includes
+            an object which is not a L{types.FunctionType} or
+            L{types.MethodType} instance.
+
+        @return: A C{list}, each element of which is a C{dict} giving
+            information about one warning which was flushed by this call.  The
+            keys of each C{dict} are:
+
+                - C{'message'}: The string which was passed as the I{message}
+                  parameter to L{warnings.warn}.
+
+                - C{'category'}: The warning subclass which was passed as the
+                  I{category} parameter to L{warnings.warn}.
+
+                - C{'filename'}: The name of the file containing the definition
+                  of the code object which was C{stacklevel} frames above the
+                  call to L{warnings.warn}, where C{stacklevel} is the value of
+                  the C{stacklevel} parameter passed to L{warnings.warn}.
+
+                - C{'lineno'}: The source line associated with the active
+                  instruction of the code object object which was C{stacklevel}
+                  frames above the call to L{warnings.warn}, where
+                  C{stacklevel} is the value of the C{stacklevel} parameter
+                  passed to L{warnings.warn}.
+        """
+        if offendingFunctions is None:
+            toFlush = self._warnings[:]
+            self._warnings[:] = []
+        else:
+            toFlush = []
+            for aWarning in self._warnings:
+                for aFunction in offendingFunctions:
+                    if not isinstance(aFunction, (
+                            types.FunctionType, types.MethodType)):
+                        raise ValueError("%r is not a function or method" % (
+                                aFunction,))
+
+                    # inspect.getabsfile(aFunction) sometimes returns a
+                    # filename which disagrees with the filename the warning
+                    # system generates.  This seems to be because a
+                    # function's code object doesn't deal with source files
+                    # being renamed.  inspect.getabsfile(module) seems
+                    # better (or at least agrees with the warning system
+                    # more often), and does some normalization for us which
+                    # is desirable.  inspect.getmodule() is attractive, but
+                    # somewhat broken in Python < 2.6.  See Python bug 4845.
+                    aModule = sys.modules[aFunction.__module__]
+                    filename = inspect.getabsfile(aModule)
+
+                    if filename != os.path.normcase(aWarning.filename):
+                        continue
+                    lineStarts = list(_findlinestarts(aFunction.__code__))
+                    first = lineStarts[0][1]
+                    last = lineStarts[-1][1]
+                    if not (first <= aWarning.lineno <= last):
+                        continue
+                    # The warning points to this function, flush it and move on
+                    # to the next warning.
+                    toFlush.append(aWarning)
+                    break
+            # Remove everything which is being flushed.
+            list(map(self._warnings.remove, toFlush))
+
+        return [
+            {'message': w.message, 'category': w.category,
+             'filename': w.filename, 'lineno': w.lineno}
+            for w in toFlush]
+
+
+    def callDeprecated(self, version, f, *args, **kwargs):
+        """
+        Call a function that should have been deprecated at a specific version
+        and in favor of a specific alternative, and assert that it was thusly
+        deprecated.
+
+        @param version: A 2-sequence of (since, replacement), where C{since} is
+            a the first L{version<twisted.python.versions.Version>} that C{f}
+            should have been deprecated since, and C{replacement} is a suggested
+            replacement for the deprecated functionality, as described by
+            L{twisted.python.deprecate.deprecated}.  If there is no suggested
+            replacement, this parameter may also be simply a
+            L{version<twisted.python.versions.Version>} by itself.
+
+        @param f: The deprecated function to call.
+
+        @param args: The arguments to pass to C{f}.
+
+        @param kwargs: The keyword arguments to pass to C{f}.
+
+        @return: Whatever C{f} returns.
+
+        @raise: Whatever C{f} raises.  If any exception is
+            raised by C{f}, though, no assertions will be made about emitted
+            deprecations.
+
+        @raise FailTest: if no warnings were emitted by C{f}, or if the
+            L{DeprecationWarning} emitted did not produce the canonical
+            please-use-something-else message that is standard for Twisted
+            deprecations according to the given version and replacement.
+        """
+        result = f(*args, **kwargs)
+        warningsShown = self.flushWarnings([self.callDeprecated])
+        try:
+            info = list(version)
+        except TypeError:
+            since = version
+            replacement = None
+        else:
+            [since, replacement] = info
+
+        if len(warningsShown) == 0:
+            self.fail('%r is not deprecated.' % (f,))
+
+        observedWarning = warningsShown[0]['message']
+        expectedWarning = getDeprecationWarningString(
+            f, since, replacement=replacement)
+        self.assertEqual(expectedWarning, observedWarning)
+
+        return result
+
+
+    def mktemp(self):
+        """
+        Returns a unique name that may be used as either a temporary directory
+        or filename.
+
+        @note: you must call os.mkdir on the value returned from this method if
+            you wish to use it as a directory!
+
+        @return: C{str}
+        """
+        MAX_FILENAME = 32 # some platforms limit lengths of filenames
+        base = os.path.join(self.__class__.__module__[:MAX_FILENAME],
+                            self.__class__.__name__[:MAX_FILENAME],
+                            self._testMethodName[:MAX_FILENAME])
+        if not os.path.exists(base):
+            os.makedirs(base)
+        dirname = tempfile.mkdtemp('', '', base)
+        return os.path.join(dirname, 'temp')
+
+
+    def _getSuppress(self):
+        """
+        Returns any warning suppressions set for this test. Checks on the
+        instance first, then the class, then the module, then packages. As
+        soon as it finds something with a C{suppress} attribute, returns that.
+        Returns any empty list (i.e. suppress no warnings) if it cannot find
+        anything. See L{TestCase} docstring for more details.
+        """
+        return util.acquireAttribute(self._parents, 'suppress', [])
+
+
+    def _getSkipReason(self, method, skip):
+        """
+        Return the reason to use for skipping a test method.
+
+        @param method: The method which produced the skip.
+        @param skip: A L{SkipTest} instance raised by C{method}.
+        """
+        if len(skip.args) > 0:
+            return skip.args[0]
+
+        warnAboutFunction(
+            method,
+            "Do not raise unittest.SkipTest with no arguments! Give a reason "
+            "for skipping tests!")
+        return skip
+
+
+    def _run(self, suppress, todo, method, result):
+        """
+        Run a single method, either a test method or fixture.
+
+        @param suppress: Any warnings to suppress, as defined by the C{suppress}
+            attribute on this method, test case, or the module it is defined in.
+
+        @param todo: Any expected failure or failures, as defined by the C{todo}
+            attribute on this method, test case, or the module it is defined in.
+
+        @param method: The method to run.
+
+        @param result: The TestResult instance to which to report results.
+
+        @return: C{True} if the method fails and no further method/fixture calls
+            should be made, C{False} otherwise.
+        """
+        try:
+            runWithWarningsSuppressed(suppress, method)
+        except SkipTest as e:
+            result.addSkip(self, self._getSkipReason(method, e))
+        except:
+            reason = failure.Failure()
+            if todo is None or not todo.expected(reason):
+                if reason.check(self.failureException):
+                    addResult = result.addFailure
+                else:
+                    addResult = result.addError
+                addResult(self, reason)
+            else:
+                result.addExpectedFailure(self, reason, todo)
+        else:
+            return False
+        return True
+
+
+    def _runFixturesAndTest(self, result):
+        """
+        Run C{setUp}, a test method, test cleanups, and C{tearDown}.
+
+        @param result: The TestResult instance to which to report results.
+        """
+        suppress = self._getSuppress()
+        try:
+            if self._run(suppress, None, self.setUp, result):
+                return
+
+            todo = self.getTodo()
+            method = getattr(self, self._testMethodName)
+            if self._run(suppress, todo, method, result):
+                return
+        finally:
+            self._runCleanups(result)
+
+        if todo:
+            result.addUnexpectedSuccess(self, todo)
+
+        if self._run(suppress, None, self.tearDown, result):
+            return
+
+        passed = True
+        for error in self._observer.getErrors():
+            result.addError(self, error)
+            passed = False
+        self._observer.flushErrors()
+        self._removeObserver()
+
+        if passed and not todo:
+            result.addSuccess(self)
+
+
+    def _runCleanups(self, result):
+        """
+        Synchronously run any cleanups which have been added.
+        """
+        while len(self._cleanups) > 0:
+            f, args, kwargs = self._cleanups.pop()
+            try:
+                f(*args, **kwargs)
+            except:
+                f = failure.Failure()
+                result.addError(self, f)
+
+
+    def _installObserver(self):
+        self._observer = _logObserver
+        self._observer._add()
+
+
+    def _removeObserver(self):
+        self._observer._remove()
diff --git a/ThirdParty/Twisted/twisted/trial/itrial.py b/ThirdParty/Twisted/twisted/trial/itrial.py
new file mode 100644
index 0000000..1f81ea7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/itrial.py
@@ -0,0 +1,253 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Interfaces for Trial.
+
+Maintainer: Jonathan Lange
+"""
+
+from __future__ import division, absolute_import
+
+import zope.interface as zi
+from zope.interface import Attribute
+
+
+class ITestCase(zi.Interface):
+    """
+    The interface that a test case must implement in order to be used in Trial.
+    """
+
+    failureException = zi.Attribute(
+        "The exception class that is raised by failed assertions")
+
+
+    def __call__(result):
+        """
+        Run the test. Should always do exactly the same thing as run().
+        """
+
+
+    def countTestCases():
+        """
+        Return the number of tests in this test case. Usually 1.
+        """
+
+
+    def id():
+        """
+        Return a unique identifier for the test, usually the fully-qualified
+        Python name.
+        """
+
+
+    def run(result):
+        """
+        Run the test, storing the results in C{result}.
+
+        @param result: A L{TestResult}.
+        """
+
+
+    def shortDescription():
+        """
+        Return a short description of the test.
+        """
+
+
+
+class IReporter(zi.Interface):
+    """
+    I report results from a run of a test suite.
+    """
+
+    stream = zi.Attribute(
+        "Deprecated in Twisted 8.0. "
+        "The io-stream that this reporter will write to")
+    tbformat = zi.Attribute("Either 'default', 'brief', or 'verbose'")
+    args = zi.Attribute(
+        "Additional string argument passed from the command line")
+    shouldStop = zi.Attribute(
+        """
+        A boolean indicating that this reporter would like the test run to stop.
+        """)
+    separator = Attribute(
+        "Deprecated in Twisted 8.0. "
+        "A value which will occasionally be passed to the L{write} method.")
+    testsRun = Attribute(
+        """
+        The number of tests that seem to have been run according to this
+        reporter.
+        """)
+
+
+    def startTest(method):
+        """
+        Report the beginning of a run of a single test method.
+
+        @param method: an object that is adaptable to ITestMethod
+        """
+
+
+    def stopTest(method):
+        """
+        Report the status of a single test method
+
+        @param method: an object that is adaptable to ITestMethod
+        """
+
+
+    def startSuite(name):
+        """
+        Deprecated in Twisted 8.0.
+
+        Suites which wish to appear in reporter output should call this
+        before running their tests.
+        """
+
+
+    def endSuite(name):
+        """
+        Deprecated in Twisted 8.0.
+
+        Called at the end of a suite, if and only if that suite has called
+        C{startSuite}.
+        """
+
+
+    def cleanupErrors(errs):
+        """
+        Deprecated in Twisted 8.0.
+
+        Called when the reactor has been left in a 'dirty' state
+
+        @param errs: a list of L{twisted.python.failure.Failure}s
+        """
+
+
+    def upDownError(userMeth, warn=True, printStatus=True):
+        """
+        Deprecated in Twisted 8.0.
+
+        Called when an error occurs in a setUp* or tearDown* method
+
+        @param warn: indicates whether or not the reporter should emit a
+                     warning about the error
+        @type warn: Boolean
+        @param printStatus: indicates whether or not the reporter should
+                            print the name of the method and the status
+                            message appropriate for the type of error
+        @type printStatus: Boolean
+        """
+
+
+    def addSuccess(test):
+        """
+        Record that test passed.
+        """
+
+
+    def addError(test, error):
+        """
+        Record that a test has raised an unexpected exception.
+
+        @param test: The test that has raised an error.
+        @param error: The error that the test raised. It will either be a
+            three-tuple in the style of C{sys.exc_info()} or a
+            L{Failure<twisted.python.failure.Failure>} object.
+        """
+
+
+    def addFailure(test, failure):
+        """
+        Record that a test has failed with the given failure.
+
+        @param test: The test that has failed.
+        @param failure: The failure that the test failed with. It will
+            either be a three-tuple in the style of C{sys.exc_info()}
+            or a L{Failure<twisted.python.failure.Failure>} object.
+        """
+
+
+    def addExpectedFailure(test, failure, todo):
+        """
+        Record that the given test failed, and was expected to do so.
+
+        @type test: L{pyunit.TestCase}
+        @param test: The test which this is about.
+        @type error: L{failure.Failure}
+        @param error: The error which this test failed with.
+        @type todo: L{unittest.Todo}
+        @param todo: The reason for the test's TODO status.
+        """
+
+
+    def addUnexpectedSuccess(test, todo):
+        """
+        Record that the given test failed, and was expected to do so.
+
+        @type test: L{pyunit.TestCase}
+        @param test: The test which this is about.
+        @type todo: L{unittest.Todo}
+        @param todo: The reason for the test's TODO status.
+        """
+
+
+    def addSkip(test, reason):
+        """
+        Record that a test has been skipped for the given reason.
+
+        @param test: The test that has been skipped.
+        @param reason: An object that the test case has specified as the reason
+            for skipping the test.
+        """
+
+
+    def printSummary():
+        """
+        Deprecated in Twisted 8.0, use L{done} instead.
+
+        Present a summary of the test results.
+        """
+
+
+    def printErrors():
+        """
+        Deprecated in Twisted 8.0, use L{done} instead.
+
+        Present the errors that have occured during the test run. This method
+        will be called after all tests have been run.
+        """
+
+
+    def write(string):
+        """
+        Deprecated in Twisted 8.0, use L{done} instead.
+
+        Display a string to the user, without appending a new line.
+        """
+
+
+    def writeln(string):
+        """
+        Deprecated in Twisted 8.0, use L{done} instead.
+
+        Display a string to the user, appending a new line.
+        """
+
+    def wasSuccessful():
+        """
+        Return a boolean indicating whether all test results that were reported
+        to this reporter were successful or not.
+        """
+
+
+    def done():
+        """
+        Called when the test run is complete.
+
+        This gives the result object an opportunity to display a summary of
+        information to the user. Once you have called C{done} on an
+        L{IReporter} object, you should assume that the L{IReporter} object is
+        no longer usable.
+        """
diff --git a/ThirdParty/Twisted/twisted/trial/reporter.py b/ThirdParty/Twisted/twisted/trial/reporter.py
new file mode 100644
index 0000000..95bcab8
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/reporter.py
@@ -0,0 +1,1249 @@
+# -*- test-case-name: twisted.trial.test.test_reporter -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+#
+# Maintainer: Jonathan Lange
+
+"""
+Defines classes that handle the results of tests.
+"""
+
+from __future__ import division, absolute_import
+
+import sys
+import os
+import time
+import warnings
+import unittest as pyunit
+
+from zope.interface import implementer
+
+from twisted.python.compat import set
+from twisted.python import _reflectpy3 as reflect, log
+from twisted.python.components import proxyForInterface
+from twisted.python.failure import Failure
+from twisted.python._utilpy3 import untilConcludes
+try:
+    from collections import OrderedDict
+except ImportError:
+    from twisted.python.util import OrderedDict
+from twisted.trial import itrial, util
+
+try:
+    from subunit import TestProtocolClient
+except ImportError:
+    TestProtocolClient = None
+
+
+class BrokenTestCaseWarning(Warning):
+    """
+    Emitted as a warning when an exception occurs in one of setUp or tearDown.
+    """
+
+
+class SafeStream(object):
+    """
+    Wraps a stream object so that all C{write} calls are wrapped in
+    L{untilConcludes<twisted.python.util.untilConcludes>}.
+    """
+
+    def __init__(self, original):
+        self.original = original
+
+    def __getattr__(self, name):
+        return getattr(self.original, name)
+
+    def write(self, *a, **kw):
+        return untilConcludes(self.original.write, *a, **kw)
+
+
+
+ at implementer(itrial.IReporter)
+class TestResult(pyunit.TestResult, object):
+    """
+    Accumulates the results of several L{twisted.trial.unittest.TestCase}s.
+
+    @ivar successes: count the number of successes achieved by the test run.
+    @type successes: C{int}
+    """
+
+    def __init__(self):
+        super(TestResult, self).__init__()
+        self.skips = []
+        self.expectedFailures = []
+        self.unexpectedSuccesses = []
+        self.successes = 0
+        self._timings = []
+
+    def __repr__(self):
+        return ('<%s run=%d errors=%d failures=%d todos=%d dones=%d skips=%d>'
+                % (reflect.qual(self.__class__), self.testsRun,
+                   len(self.errors), len(self.failures),
+                   len(self.expectedFailures), len(self.skips),
+                   len(self.unexpectedSuccesses)))
+
+    def _getTime(self):
+        return time.time()
+
+    def _getFailure(self, error):
+        """
+        Convert a C{sys.exc_info()}-style tuple to a L{Failure}, if necessary.
+        """
+        if isinstance(error, tuple):
+            return Failure(error[1], error[0], error[2])
+        return error
+
+    def startTest(self, test):
+        """
+        This must be called before the given test is commenced.
+
+        @type test: L{pyunit.TestCase}
+        """
+        super(TestResult, self).startTest(test)
+        self._testStarted = self._getTime()
+
+    def stopTest(self, test):
+        """
+        This must be called after the given test is completed.
+
+        @type test: L{pyunit.TestCase}
+        """
+        super(TestResult, self).stopTest(test)
+        self._lastTime = self._getTime() - self._testStarted
+
+    def addFailure(self, test, fail):
+        """
+        Report a failed assertion for the given test.
+
+        @type test: L{pyunit.TestCase}
+        @type fail: L{Failure} or L{tuple}
+        """
+        self.failures.append((test, self._getFailure(fail)))
+
+    def addError(self, test, error):
+        """
+        Report an error that occurred while running the given test.
+
+        @type test: L{pyunit.TestCase}
+        @type error: L{Failure} or L{tuple}
+        """
+        self.errors.append((test, self._getFailure(error)))
+
+    def addSkip(self, test, reason):
+        """
+        Report that the given test was skipped.
+
+        In Trial, tests can be 'skipped'. Tests are skipped mostly because there
+        is some platform or configuration issue that prevents them from being
+        run correctly.
+
+        @type test: L{pyunit.TestCase}
+        @type reason: L{str}
+        """
+        self.skips.append((test, reason))
+
+    def addUnexpectedSuccess(self, test, todo):
+        """Report that the given test succeeded against expectations.
+
+        In Trial, tests can be marked 'todo'. That is, they are expected to fail.
+        When a test that is expected to fail instead succeeds, it should call
+        this method to report the unexpected success.
+
+        @type test: L{pyunit.TestCase}
+        @type todo: L{unittest.Todo}
+        """
+        # XXX - 'todo' should just be a string
+        self.unexpectedSuccesses.append((test, todo))
+
+    def addExpectedFailure(self, test, error, todo):
+        """Report that the given test failed, and was expected to do so.
+
+        In Trial, tests can be marked 'todo'. That is, they are expected to fail.
+
+        @type test: L{pyunit.TestCase}
+        @type error: L{Failure}
+        @type todo: L{unittest.Todo}
+        """
+        # XXX - 'todo' should just be a string
+        self.expectedFailures.append((test, error, todo))
+
+    def addSuccess(self, test):
+        """Report that the given test succeeded.
+
+        @type test: L{pyunit.TestCase}
+        """
+        self.successes += 1
+
+    def upDownError(self, method, error, warn, printStatus):
+        warnings.warn("upDownError is deprecated in Twisted 8.0.",
+                      category=DeprecationWarning, stacklevel=3)
+
+    def cleanupErrors(self, errs):
+        """Report an error that occurred during the cleanup between tests.
+        """
+        warnings.warn("Cleanup errors are actual errors. Use addError. "
+                      "Deprecated in Twisted 8.0",
+                      category=DeprecationWarning, stacklevel=2)
+
+    def startSuite(self, name):
+        warnings.warn("startSuite deprecated in Twisted 8.0",
+                      category=DeprecationWarning, stacklevel=2)
+
+    def endSuite(self, name):
+        warnings.warn("endSuite deprecated in Twisted 8.0",
+                      category=DeprecationWarning, stacklevel=2)
+
+
+    def done(self):
+        """
+        The test suite has finished running.
+        """
+
+
+
+ at implementer(itrial.IReporter)
+class TestResultDecorator(proxyForInterface(itrial.IReporter,
+                                            "_originalReporter")):
+    """
+    Base class for TestResult decorators.
+
+    @ivar _originalReporter: The wrapped instance of reporter.
+    @type _originalReporter: A provider of L{itrial.IReporter}
+    """
+
+
+
+ at implementer(itrial.IReporter)
+class UncleanWarningsReporterWrapper(TestResultDecorator):
+    """
+    A wrapper for a reporter that converts L{util.DirtyReactorAggregateError}s
+    to warnings.
+    """
+
+    def addError(self, test, error):
+        """
+        If the error is a L{util.DirtyReactorAggregateError}, instead of
+        reporting it as a normal error, throw a warning.
+        """
+
+        if (isinstance(error, Failure)
+            and error.check(util.DirtyReactorAggregateError)):
+            warnings.warn(error.getErrorMessage())
+        else:
+            self._originalReporter.addError(test, error)
+
+
+
+class _AdaptedReporter(TestResultDecorator):
+    """
+    TestResult decorator that makes sure that addError only gets tests that
+    have been adapted with a particular test adapter.
+    """
+
+    def __init__(self, original, testAdapter):
+        """
+        Construct an L{_AdaptedReporter}.
+
+        @param original: An {itrial.IReporter}.
+        @param testAdapter: A callable that returns an L{itrial.ITestCase}.
+        """
+        TestResultDecorator.__init__(self, original)
+        self.testAdapter = testAdapter
+
+
+    def addError(self, test, error):
+        """
+        See L{itrial.IReporter}.
+        """
+        test = self.testAdapter(test)
+        return self._originalReporter.addError(test, error)
+
+
+    def addExpectedFailure(self, test, failure, todo):
+        """
+        See L{itrial.IReporter}.
+        """
+        return self._originalReporter.addExpectedFailure(
+            self.testAdapter(test), failure, todo)
+
+
+    def addFailure(self, test, failure):
+        """
+        See L{itrial.IReporter}.
+        """
+        test = self.testAdapter(test)
+        return self._originalReporter.addFailure(test, failure)
+
+
+    def addSkip(self, test, skip):
+        """
+        See L{itrial.IReporter}.
+        """
+        test = self.testAdapter(test)
+        return self._originalReporter.addSkip(test, skip)
+
+
+    def addUnexpectedSuccess(self, test, todo):
+        """
+        See L{itrial.IReporter}.
+        """
+        test = self.testAdapter(test)
+        return self._originalReporter.addUnexpectedSuccess(test, todo)
+
+
+    def startTest(self, test):
+        """
+        See L{itrial.IReporter}.
+        """
+        return self._originalReporter.startTest(self.testAdapter(test))
+
+
+    def stopTest(self, test):
+        """
+        See L{itrial.IReporter}.
+        """
+        return self._originalReporter.stopTest(self.testAdapter(test))
+
+
+
+ at implementer(itrial.IReporter)
+class Reporter(TestResult):
+    """
+    A basic L{TestResult} with support for writing to a stream.
+
+    @ivar _startTime: The time when the first test was started. It defaults to
+        C{None}, which means that no test was actually launched.
+    @type _startTime: C{float} or C{NoneType}
+
+    @ivar _warningCache: A C{set} of tuples of warning message (file, line,
+        text, category) which have already been written to the output stream
+        during the currently executing test.  This is used to avoid writing
+        duplicates of the same warning to the output stream.
+    @type _warningCache: C{set}
+
+    @ivar _publisher: The log publisher which will be observed for warning
+        events.
+    @type _publisher: L{LogPublisher} (or another type sufficiently similar)
+    """
+
+    _separator = '-' * 79
+    _doubleSeparator = '=' * 79
+
+    def __init__(self, stream=sys.stdout, tbformat='default', realtime=False,
+                 publisher=None):
+        super(Reporter, self).__init__()
+        self._stream = SafeStream(stream)
+        self.tbformat = tbformat
+        self.realtime = realtime
+        self._startTime = None
+        self._warningCache = set()
+
+        # Start observing log events so as to be able to report warnings.
+        self._publisher = publisher
+        if publisher is not None:
+            publisher.addObserver(self._observeWarnings)
+
+
+    def _observeWarnings(self, event):
+        """
+        Observe warning events and write them to C{self._stream}.
+
+        This method is a log observer which will be registered with
+        C{self._publisher.addObserver}.
+
+        @param event: A C{dict} from the logging system.  If it has a
+            C{'warning'} key, a logged warning will be extracted from it and
+            possibly written to C{self.stream}.
+        """
+        if 'warning' in event:
+            key = (event['filename'], event['lineno'],
+                   event['category'].split('.')[-1],
+                   str(event['warning']))
+            if key not in self._warningCache:
+                self._warningCache.add(key)
+                self._stream.write('%s:%s: %s: %s\n' % key)
+
+
+    def stream(self):
+        warnings.warn("stream is deprecated in Twisted 8.0.",
+                      category=DeprecationWarning, stacklevel=2)
+        return self._stream
+    stream = property(stream)
+
+
+    def separator(self):
+        warnings.warn("separator is deprecated in Twisted 8.0.",
+                      category=DeprecationWarning, stacklevel=2)
+        return self._separator
+    separator = property(separator)
+
+
+    def startTest(self, test):
+        """
+        Called when a test begins to run. Records the time when it was first
+        called and resets the warning cache.
+
+        @param test: L{ITestCase}
+        """
+        super(Reporter, self).startTest(test)
+        if self._startTime is None:
+            self._startTime = self._getTime()
+        self._warningCache = set()
+
+
+    def addFailure(self, test, fail):
+        """
+        Called when a test fails. If C{realtime} is set, then it prints the
+        error to the stream.
+
+        @param test: L{ITestCase} that failed.
+        @param fail: L{failure.Failure} containing the error.
+        """
+        super(Reporter, self).addFailure(test, fail)
+        if self.realtime:
+            fail = self.failures[-1][1] # guarantee it's a Failure
+            self._write(self._formatFailureTraceback(fail))
+
+
+    def addError(self, test, error):
+        """
+        Called when a test raises an error. If C{realtime} is set, then it
+        prints the error to the stream.
+
+        @param test: L{ITestCase} that raised the error.
+        @param error: L{failure.Failure} containing the error.
+        """
+        error = self._getFailure(error)
+        super(Reporter, self).addError(test, error)
+        if self.realtime:
+            error = self.errors[-1][1] # guarantee it's a Failure
+            self._write(self._formatFailureTraceback(error))
+
+
+    def write(self, format, *args):
+        warnings.warn("write is deprecated in Twisted 8.0.",
+                      category=DeprecationWarning, stacklevel=2)
+        self._write(format, *args)
+
+
+    def _write(self, format, *args):
+        """
+        Safely write to the reporter's stream.
+
+        @param format: A format string to write.
+        @param *args: The arguments for the format string.
+        """
+        s = str(format)
+        assert isinstance(s, type(''))
+        if args:
+            self._stream.write(s % args)
+        else:
+            self._stream.write(s)
+        untilConcludes(self._stream.flush)
+
+
+    def writeln(self, format, *args):
+        warnings.warn("writeln is deprecated in Twisted 8.0.",
+                      category=DeprecationWarning, stacklevel=2)
+        self._writeln(format, *args)
+
+
+    def _writeln(self, format, *args):
+        """
+        Safely write a line to the reporter's stream. Newline is appended to
+        the format string.
+
+        @param format: A format string to write.
+        @param *args: The arguments for the format string.
+        """
+        self._write(format, *args)
+        self._write('\n')
+
+
+    def upDownError(self, method, error, warn, printStatus):
+        super(Reporter, self).upDownError(method, error, warn, printStatus)
+        if warn:
+            tbStr = self._formatFailureTraceback(error)
+            log.msg(tbStr)
+            msg = ("caught exception in %s, your TestCase is broken\n\n%s"
+                   % (method, tbStr))
+            warnings.warn(msg, BrokenTestCaseWarning, stacklevel=2)
+
+
+    def cleanupErrors(self, errs):
+        super(Reporter, self).cleanupErrors(errs)
+        warnings.warn("%s\n%s" % ("REACTOR UNCLEAN! traceback(s) follow: ",
+                                  self._formatFailureTraceback(errs)),
+                      BrokenTestCaseWarning)
+
+
+    def _trimFrames(self, frames):
+        # when a SynchronousTestCase method fails synchronously, the stack looks
+        # like this:
+        # [0]: SynchronousTestCase._run
+        # [1]:  _utilpy3.runWithWarningsSuppressed
+        # [2:-2]: code in the test method which failed
+        # [-1]: _synctest.fail
+
+        # when a TestCase method fails synchronously, the stack looks like this:
+        #  [0]: defer.maybeDeferred()
+        #  [1]: utils.runWithWarningsSuppressed()
+        #  [2:-2]: code in the test method which failed
+        #  [-1]: _synctest.fail
+
+        # when a method fails inside a Deferred (i.e., when the test method
+        # returns a Deferred, and that Deferred's errback fires), the stack
+        # captured inside the resulting Failure looks like this:
+        #  [0]: defer.Deferred._runCallbacks
+        #  [1:-2]: code in the testmethod which failed
+        #  [-1]: _synctest.fail
+
+        # as a result, we want to trim either [maybeDeferred,runWWS] or
+        # [Deferred._runCallbacks] or [SynchronousTestCase._run,runWWS] from the
+        # front, and trim the [unittest.fail] from the end.
+
+        # There is also another case, when the test method is badly defined and
+        # contains extra arguments.
+
+        newFrames = list(frames)
+
+        if len(frames) < 2:
+            return newFrames
+
+        firstMethod = newFrames[0][0]
+        firstFile = os.path.splitext(os.path.basename(newFrames[0][1]))[0]
+
+        secondMethod = newFrames[1][0]
+        secondFile = os.path.splitext(os.path.basename(newFrames[1][1]))[0]
+
+        syncCase = (("_run", "_synctest"), ("runWithWarningsSuppressed", "_utilpy3"))
+        asyncCase = (("maybeDeferred", "defer"), ("runWithWarningsSuppressed", "utils"))
+
+        twoFrames = ((firstMethod, firstFile), (secondMethod, secondFile))
+        if twoFrames in [syncCase, asyncCase]:
+            newFrames = newFrames[2:]
+        elif (firstMethod, firstFile) == ("_runCallbacks", "defer"):
+            newFrames = newFrames[1:]
+
+        if not newFrames:
+            # The method fails before getting called, probably an argument problem
+            return newFrames
+
+        last = newFrames[-1]
+        if (last[0].startswith('fail')
+            and os.path.splitext(os.path.basename(last[1]))[0] == '_synctest'):
+            newFrames = newFrames[:-1]
+
+        return newFrames
+
+
+    def _formatFailureTraceback(self, fail):
+        if isinstance(fail, str):
+            return fail.rstrip() + '\n'
+        fail.frames, frames = self._trimFrames(fail.frames), fail.frames
+        result = fail.getTraceback(detail=self.tbformat, elideFrameworkCode=True)
+        fail.frames = frames
+        return result
+
+
+    def _groupResults(self, results, formatter):
+        """
+        Group tests together based on their results.
+
+        @param results: An iterable of tuples of two or more elements.  The
+            first element of each tuple is a test case.  The remaining
+            elements describe the outcome of that test case.
+
+        @param formatter: A callable which turns a test case result into a
+            string.  The elements after the first of the tuples in
+            C{results} will be passed as positional arguments to
+            C{formatter}.
+
+        @return: A C{list} of two-tuples.  The first element of each tuple
+            is a unique string describing one result from at least one of
+            the test cases in C{results}.  The second element is a list of
+            the test cases which had that result.
+        """
+        groups = OrderedDict()
+        for content in results:
+            case = content[0]
+            outcome = content[1:]
+            key = formatter(*outcome)
+            groups.setdefault(key, []).append(case)
+        return groups.items()
+
+
+    def _printResults(self, flavor, errors, formatter):
+        """
+        Print a group of errors to the stream.
+
+        @param flavor: A string indicating the kind of error (e.g. 'TODO').
+        @param errors: A list of errors, often L{failure.Failure}s, but
+            sometimes 'todo' errors.
+        @param formatter: A callable that knows how to format the errors.
+        """
+        for reason, cases in self._groupResults(errors, formatter):
+            self._writeln(self._doubleSeparator)
+            self._writeln(flavor)
+            self._write(reason)
+            self._writeln('')
+            for case in cases:
+                self._writeln(case.id())
+
+
+    def _printExpectedFailure(self, error, todo):
+        return 'Reason: %r\n%s' % (todo.reason,
+                                   self._formatFailureTraceback(error))
+
+
+    def _printUnexpectedSuccess(self, todo):
+        ret = 'Reason: %r\n' % (todo.reason,)
+        if todo.errors:
+            ret += 'Expected errors: %s\n' % (', '.join(todo.errors),)
+        return ret
+
+
+    def printErrors(self):
+        """
+        Print all of the non-success results in full to the stream.
+        """
+        warnings.warn("printErrors is deprecated in Twisted 8.0.",
+                      category=DeprecationWarning, stacklevel=2)
+        self._printErrors()
+
+
+    def _printErrors(self):
+        """
+        Print all of the non-success results to the stream in full.
+        """
+        self._write('\n')
+        self._printResults('[SKIPPED]', self.skips, lambda x : '%s\n' % x)
+        self._printResults('[TODO]', self.expectedFailures,
+                           self._printExpectedFailure)
+        self._printResults('[FAIL]', self.failures,
+                           self._formatFailureTraceback)
+        self._printResults('[ERROR]', self.errors,
+                           self._formatFailureTraceback)
+        self._printResults('[SUCCESS!?!]', self.unexpectedSuccesses,
+                           self._printUnexpectedSuccess)
+
+
+    def _getSummary(self):
+        """
+        Return a formatted count of tests status results.
+        """
+        summaries = []
+        for stat in ("skips", "expectedFailures", "failures", "errors",
+                     "unexpectedSuccesses"):
+            num = len(getattr(self, stat))
+            if num:
+                summaries.append('%s=%d' % (stat, num))
+        if self.successes:
+           summaries.append('successes=%d' % (self.successes,))
+        summary = (summaries and ' ('+', '.join(summaries)+')') or ''
+        return summary
+
+
+    def printSummary(self):
+        """
+        Print a line summarising the test results to the stream.
+        """
+        warnings.warn("printSummary is deprecated in Twisted 8.0.",
+                      category=DeprecationWarning, stacklevel=2)
+        self._printSummary()
+
+
+    def _printSummary(self):
+        """
+        Print a line summarising the test results to the stream.
+        """
+        summary = self._getSummary()
+        if self.wasSuccessful():
+            status = "PASSED"
+        else:
+            status = "FAILED"
+        self._write("%s%s\n", status, summary)
+
+
+    def done(self):
+        """
+        Summarize the result of the test run.
+
+        The summary includes a report of all of the errors, todos, skips and
+        so forth that occurred during the run. It also includes the number of
+        tests that were run and how long it took to run them (not including
+        load time).
+
+        Expects that C{_printErrors}, C{_writeln}, C{_write}, C{_printSummary}
+        and C{_separator} are all implemented.
+        """
+        if self._publisher is not None:
+            self._publisher.removeObserver(self._observeWarnings)
+        self._printErrors()
+        self._writeln(self._separator)
+        if self._startTime is not None:
+            self._writeln('Ran %d tests in %.3fs', self.testsRun,
+                          time.time() - self._startTime)
+        self._write('\n')
+        self._printSummary()
+
+
+
+class MinimalReporter(Reporter):
+    """
+    A minimalist reporter that prints only a summary of the test result, in
+    the form of (timeTaken, #tests, #tests, #errors, #failures, #skips).
+    """
+
+    def _printErrors(self):
+        """
+        Don't print a detailed summary of errors. We only care about the
+        counts.
+        """
+
+
+    def _printSummary(self):
+        """
+        Print out a one-line summary of the form:
+        '%(runtime) %(number_of_tests) %(number_of_tests) %(num_errors)
+        %(num_failures) %(num_skips)'
+        """
+        numTests = self.testsRun
+        if self._startTime is not None:
+            timing = self._getTime() - self._startTime
+        else:
+            timing = 0
+        t = (timing, numTests, numTests,
+             len(self.errors), len(self.failures), len(self.skips))
+        self._writeln(' '.join(map(str, t)))
+
+
+
+class TextReporter(Reporter):
+    """
+    Simple reporter that prints a single character for each test as it runs,
+    along with the standard Trial summary text.
+    """
+
+    def addSuccess(self, test):
+        super(TextReporter, self).addSuccess(test)
+        self._write('.')
+
+
+    def addError(self, *args):
+        super(TextReporter, self).addError(*args)
+        self._write('E')
+
+
+    def addFailure(self, *args):
+        super(TextReporter, self).addFailure(*args)
+        self._write('F')
+
+
+    def addSkip(self, *args):
+        super(TextReporter, self).addSkip(*args)
+        self._write('S')
+
+
+    def addExpectedFailure(self, *args):
+        super(TextReporter, self).addExpectedFailure(*args)
+        self._write('T')
+
+
+    def addUnexpectedSuccess(self, *args):
+        super(TextReporter, self).addUnexpectedSuccess(*args)
+        self._write('!')
+
+
+
+class VerboseTextReporter(Reporter):
+    """
+    A verbose reporter that prints the name of each test as it is running.
+
+    Each line is printed with the name of the test, followed by the result of
+    that test.
+    """
+
+    # This is actually the bwverbose option
+
+    def startTest(self, tm):
+        self._write('%s ... ', tm.id())
+        super(VerboseTextReporter, self).startTest(tm)
+
+
+    def addSuccess(self, test):
+        super(VerboseTextReporter, self).addSuccess(test)
+        self._write('[OK]')
+
+
+    def addError(self, *args):
+        super(VerboseTextReporter, self).addError(*args)
+        self._write('[ERROR]')
+
+
+    def addFailure(self, *args):
+        super(VerboseTextReporter, self).addFailure(*args)
+        self._write('[FAILURE]')
+
+
+    def addSkip(self, *args):
+        super(VerboseTextReporter, self).addSkip(*args)
+        self._write('[SKIPPED]')
+
+
+    def addExpectedFailure(self, *args):
+        super(VerboseTextReporter, self).addExpectedFailure(*args)
+        self._write('[TODO]')
+
+
+    def addUnexpectedSuccess(self, *args):
+        super(VerboseTextReporter, self).addUnexpectedSuccess(*args)
+        self._write('[SUCCESS!?!]')
+
+
+    def stopTest(self, test):
+        super(VerboseTextReporter, self).stopTest(test)
+        self._write('\n')
+
+
+
+class TimingTextReporter(VerboseTextReporter):
+    """
+    Prints out each test as it is running, followed by the time taken for each
+    test to run.
+    """
+
+    def stopTest(self, method):
+        """
+        Mark the test as stopped, and write the time it took to run the test
+        to the stream.
+        """
+        super(TimingTextReporter, self).stopTest(method)
+        self._write("(%.03f secs)\n" % self._lastTime)
+
+
+
+class _AnsiColorizer(object):
+    """
+    A colorizer is an object that loosely wraps around a stream, allowing
+    callers to write text to the stream in a particular color.
+
+    Colorizer classes must implement C{supported()} and C{write(text, color)}.
+    """
+    _colors = dict(black=30, red=31, green=32, yellow=33,
+                   blue=34, magenta=35, cyan=36, white=37)
+
+    def __init__(self, stream):
+        self.stream = stream
+
+    def supported(cls, stream=sys.stdout):
+        """
+        A class method that returns True if the current platform supports
+        coloring terminal output using this method. Returns False otherwise.
+        """
+        if not stream.isatty():
+            return False # auto color only on TTYs
+        try:
+            import curses
+        except ImportError:
+            return False
+        else:
+            try:
+                try:
+                    return curses.tigetnum("colors") > 2
+                except curses.error:
+                    curses.setupterm()
+                    return curses.tigetnum("colors") > 2
+            except:
+                # guess false in case of error
+                return False
+    supported = classmethod(supported)
+
+    def write(self, text, color):
+        """
+        Write the given text to the stream in the given color.
+
+        @param text: Text to be written to the stream.
+
+        @param color: A string label for a color. e.g. 'red', 'white'.
+        """
+        color = self._colors[color]
+        self.stream.write('\x1b[%s;1m%s\x1b[0m' % (color, text))
+
+
+class _Win32Colorizer(object):
+    """
+    See _AnsiColorizer docstring.
+    """
+    def __init__(self, stream):
+        from win32console import GetStdHandle, STD_OUTPUT_HANDLE, \
+             FOREGROUND_RED, FOREGROUND_BLUE, FOREGROUND_GREEN, \
+             FOREGROUND_INTENSITY
+        red, green, blue, bold = (FOREGROUND_RED, FOREGROUND_GREEN,
+                                  FOREGROUND_BLUE, FOREGROUND_INTENSITY)
+        self.stream = stream
+        self.screenBuffer = GetStdHandle(STD_OUTPUT_HANDLE)
+        self._colors = {
+            'normal': red | green | blue,
+            'red': red | bold,
+            'green': green | bold,
+            'blue': blue | bold,
+            'yellow': red | green | bold,
+            'magenta': red | blue | bold,
+            'cyan': green | blue | bold,
+            'white': red | green | blue | bold
+            }
+
+    def supported(cls, stream=sys.stdout):
+        try:
+            import win32console
+            screenBuffer = win32console.GetStdHandle(
+                win32console.STD_OUTPUT_HANDLE)
+        except ImportError:
+            return False
+        import pywintypes
+        try:
+            screenBuffer.SetConsoleTextAttribute(
+                win32console.FOREGROUND_RED |
+                win32console.FOREGROUND_GREEN |
+                win32console.FOREGROUND_BLUE)
+        except pywintypes.error:
+            return False
+        else:
+            return True
+    supported = classmethod(supported)
+
+    def write(self, text, color):
+        color = self._colors[color]
+        self.screenBuffer.SetConsoleTextAttribute(color)
+        self.stream.write(text)
+        self.screenBuffer.SetConsoleTextAttribute(self._colors['normal'])
+
+
+class _NullColorizer(object):
+    """
+    See _AnsiColorizer docstring.
+    """
+    def __init__(self, stream):
+        self.stream = stream
+
+    def supported(cls, stream=sys.stdout):
+        return True
+    supported = classmethod(supported)
+
+    def write(self, text, color):
+        self.stream.write(text)
+
+
+
+ at implementer(itrial.IReporter)
+class SubunitReporter(object):
+    """
+    Reports test output via Subunit.
+
+    @ivar _subunit: The subunit protocol client that we are wrapping.
+
+    @ivar _successful: An internal variable, used to track whether we have
+        received only successful results.
+
+    @since: 10.0
+    """
+
+    def __init__(self, stream=sys.stdout, tbformat='default',
+                 realtime=False, publisher=None):
+        """
+        Construct a L{SubunitReporter}.
+
+        @param stream: A file-like object representing the stream to print
+            output to. Defaults to stdout.
+        @param tbformat: The format for tracebacks. Ignored, since subunit
+            always uses Python's standard format.
+        @param realtime: Whether or not to print exceptions in the middle
+            of the test results. Ignored, since subunit always does this.
+        @param publisher: The log publisher which will be preserved for
+            reporting events. Ignored, as it's not relevant to subunit.
+        """
+        if TestProtocolClient is None:
+            raise Exception("Subunit not available")
+        self._subunit = TestProtocolClient(stream)
+        self._successful = True
+
+
+    def done(self):
+        """
+        Record that the entire test suite run is finished.
+
+        We do nothing, since a summary clause is irrelevant to the subunit
+        protocol.
+        """
+        pass
+
+
+    def shouldStop(self):
+        """
+        Whether or not the test runner should stop running tests.
+        """
+        return self._subunit.shouldStop
+    shouldStop = property(shouldStop)
+
+
+    def stop(self):
+        """
+        Signal that the test runner should stop running tests.
+        """
+        return self._subunit.stop()
+
+
+    def wasSuccessful(self):
+        """
+        Has the test run been successful so far?
+
+        @return: C{True} if we have received no reports of errors or failures,
+            C{False} otherwise.
+        """
+        # Subunit has a bug in its implementation of wasSuccessful, see
+        # https://bugs.edge.launchpad.net/subunit/+bug/491090, so we can't
+        # simply forward it on.
+        return self._successful
+
+
+    def startTest(self, test):
+        """
+        Record that C{test} has started.
+        """
+        return self._subunit.startTest(test)
+
+
+    def stopTest(self, test):
+        """
+        Record that C{test} has completed.
+        """
+        return self._subunit.stopTest(test)
+
+
+    def addSuccess(self, test):
+        """
+        Record that C{test} was successful.
+        """
+        return self._subunit.addSuccess(test)
+
+
+    def addSkip(self, test, reason):
+        """
+        Record that C{test} was skipped for C{reason}.
+
+        Some versions of subunit don't have support for addSkip. In those
+        cases, the skip is reported as a success.
+
+        @param test: A unittest-compatible C{TestCase}.
+        @param reason: The reason for it being skipped. The C{str()} of this
+            object will be included in the subunit output stream.
+        """
+        addSkip = getattr(self._subunit, 'addSkip', None)
+        if addSkip is None:
+            self.addSuccess(test)
+        else:
+            self._subunit.addSkip(test, reason)
+
+
+    def addError(self, test, err):
+        """
+        Record that C{test} failed with an unexpected error C{err}.
+
+        Also marks the run as being unsuccessful, causing
+        L{SubunitReporter.wasSuccessful} to return C{False}.
+        """
+        self._successful = False
+        return self._subunit.addError(
+            test, util.excInfoOrFailureToExcInfo(err))
+
+
+    def addFailure(self, test, err):
+        """
+        Record that C{test} failed an assertion with the error C{err}.
+
+        Also marks the run as being unsuccessful, causing
+        L{SubunitReporter.wasSuccessful} to return C{False}.
+        """
+        self._successful = False
+        return self._subunit.addFailure(
+            test, util.excInfoOrFailureToExcInfo(err))
+
+
+    def addExpectedFailure(self, test, failure, todo):
+        """
+        Record an expected failure from a test.
+
+        Some versions of subunit do not implement this. For those versions, we
+        record a success.
+        """
+        failure = util.excInfoOrFailureToExcInfo(failure)
+        addExpectedFailure = getattr(self._subunit, 'addExpectedFailure', None)
+        if addExpectedFailure is None:
+            self.addSuccess(test)
+        else:
+            addExpectedFailure(test, failure)
+
+
+    def addUnexpectedSuccess(self, test, todo):
+        """
+        Record an unexpected success.
+
+        Since subunit has no way of expressing this concept, we record a
+        success on the subunit stream.
+        """
+        # Not represented in pyunit/subunit.
+        self.addSuccess(test)
+
+
+
+class TreeReporter(Reporter):
+    """
+    Print out the tests in the form a tree.
+
+    Tests are indented according to which class and module they belong.
+    Results are printed in ANSI color.
+    """
+
+    currentLine = ''
+    indent = '  '
+    columns = 79
+
+    FAILURE = 'red'
+    ERROR = 'red'
+    TODO = 'blue'
+    SKIP = 'blue'
+    TODONE = 'red'
+    SUCCESS = 'green'
+
+    def __init__(self, stream=sys.stdout, *args, **kwargs):
+        super(TreeReporter, self).__init__(stream, *args, **kwargs)
+        self._lastTest = []
+        for colorizer in [_Win32Colorizer, _AnsiColorizer, _NullColorizer]:
+            if colorizer.supported(stream):
+                self._colorizer = colorizer(stream)
+                break
+
+    def getDescription(self, test):
+        """
+        Return the name of the method which 'test' represents.  This is
+        what gets displayed in the leaves of the tree.
+
+        e.g. getDescription(TestCase('test_foo')) ==> test_foo
+        """
+        return test.id().split('.')[-1]
+
+    def addSuccess(self, test):
+        super(TreeReporter, self).addSuccess(test)
+        self.endLine('[OK]', self.SUCCESS)
+
+    def addError(self, *args):
+        super(TreeReporter, self).addError(*args)
+        self.endLine('[ERROR]', self.ERROR)
+
+    def addFailure(self, *args):
+        super(TreeReporter, self).addFailure(*args)
+        self.endLine('[FAIL]', self.FAILURE)
+
+    def addSkip(self, *args):
+        super(TreeReporter, self).addSkip(*args)
+        self.endLine('[SKIPPED]', self.SKIP)
+
+    def addExpectedFailure(self, *args):
+        super(TreeReporter, self).addExpectedFailure(*args)
+        self.endLine('[TODO]', self.TODO)
+
+    def addUnexpectedSuccess(self, *args):
+        super(TreeReporter, self).addUnexpectedSuccess(*args)
+        self.endLine('[SUCCESS!?!]', self.TODONE)
+
+    def _write(self, format, *args):
+        if args:
+            format = format % args
+        self.currentLine = format
+        super(TreeReporter, self)._write(self.currentLine)
+
+
+    def _getPreludeSegments(self, testID):
+        """
+        Return a list of all non-leaf segments to display in the tree.
+
+        Normally this is the module and class name.
+        """
+        segments = testID.split('.')[:-1]
+        if len(segments) == 0:
+            return segments
+        segments = [
+            seg for seg in ('.'.join(segments[:-1]), segments[-1])
+            if len(seg) > 0]
+        return segments
+
+
+    def _testPrelude(self, testID):
+        """
+        Write the name of the test to the stream, indenting it appropriately.
+
+        If the test is the first test in a new 'branch' of the tree, also
+        write all of the parents in that branch.
+        """
+        segments = self._getPreludeSegments(testID)
+        indentLevel = 0
+        for seg in segments:
+            if indentLevel < len(self._lastTest):
+                if seg != self._lastTest[indentLevel]:
+                    self._write('%s%s\n' % (self.indent * indentLevel, seg))
+            else:
+                self._write('%s%s\n' % (self.indent * indentLevel, seg))
+            indentLevel += 1
+        self._lastTest = segments
+
+
+    def cleanupErrors(self, errs):
+        self._colorizer.write('    cleanup errors', self.ERROR)
+        self.endLine('[ERROR]', self.ERROR)
+        super(TreeReporter, self).cleanupErrors(errs)
+
+    def upDownError(self, method, error, warn, printStatus):
+        self._colorizer.write("  %s" % method, self.ERROR)
+        if printStatus:
+            self.endLine('[ERROR]', self.ERROR)
+        super(TreeReporter, self).upDownError(method, error, warn, printStatus)
+
+    def startTest(self, test):
+        """
+        Called when C{test} starts. Writes the tests name to the stream using
+        a tree format.
+        """
+        self._testPrelude(test.id())
+        self._write('%s%s ... ' % (self.indent * (len(self._lastTest)),
+                                   self.getDescription(test)))
+        super(TreeReporter, self).startTest(test)
+
+
+    def endLine(self, message, color):
+        """
+        Print 'message' in the given color.
+
+        @param message: A string message, usually '[OK]' or something similar.
+        @param color: A string color, 'red', 'green' and so forth.
+        """
+        spaces = ' ' * (self.columns - len(self.currentLine) - len(message))
+        super(TreeReporter, self)._write(spaces)
+        self._colorizer.write(message, color)
+        super(TreeReporter, self)._write("\n")
+
+
+    def _printSummary(self):
+        """
+        Print a line summarising the test results to the stream, and color the
+        status result.
+        """
+        summary = self._getSummary()
+        if self.wasSuccessful():
+            status = "PASSED"
+            color = self.SUCCESS
+        else:
+            status = "FAILED"
+            color = self.FAILURE
+        self._colorizer.write(status, color)
+        self._write("%s\n", summary)
diff --git a/ThirdParty/Twisted/twisted/trial/runner.py b/ThirdParty/Twisted/twisted/trial/runner.py
new file mode 100644
index 0000000..0b691b0
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/runner.py
@@ -0,0 +1,846 @@
+# -*- test-case-name: twisted.trial.test.test_runner -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+A miscellany of code used to run Trial tests.
+
+Maintainer: Jonathan Lange
+"""
+
+__all__ = [
+    'suiteVisit', 'TestSuite',
+
+    'DestructiveTestSuite', 'DryRunVisitor',
+    'ErrorHolder', 'LoggedSuite', 'PyUnitTestCase',
+    'TestHolder', 'TestLoader', 'TrialRunner', 'TrialSuite',
+
+    'filenameToModule', 'isPackage', 'isPackageDirectory', 'isTestCase',
+    'name', 'samefile', 'NOT_IN_TEST',
+    ]
+
+import os, types, warnings, sys, inspect, imp
+import doctest, time
+
+from twisted.python import reflect, log, failure, modules, filepath
+from twisted.python.compat import set
+
+from twisted.internet import defer
+from twisted.trial import util, unittest
+from twisted.trial.itrial import ITestCase
+from twisted.trial.reporter import UncleanWarningsReporterWrapper
+
+# These are imported so that they remain in the public API for t.trial.runner
+from twisted.trial.unittest import suiteVisit, TestSuite
+
+from zope.interface import implements
+
+pyunit = __import__('unittest')
+
+
+
+def isPackage(module):
+    """Given an object return True if the object looks like a package"""
+    if not isinstance(module, types.ModuleType):
+        return False
+    basename = os.path.splitext(os.path.basename(module.__file__))[0]
+    return basename == '__init__'
+
+
+def isPackageDirectory(dirname):
+    """Is the directory at path 'dirname' a Python package directory?
+    Returns the name of the __init__ file (it may have a weird extension)
+    if dirname is a package directory.  Otherwise, returns False"""
+    for ext in zip(*imp.get_suffixes())[0]:
+        initFile = '__init__' + ext
+        if os.path.exists(os.path.join(dirname, initFile)):
+            return initFile
+    return False
+
+
+def samefile(filename1, filename2):
+    """
+    A hacky implementation of C{os.path.samefile}. Used by L{filenameToModule}
+    when the platform doesn't provide C{os.path.samefile}. Do not use this.
+    """
+    return os.path.abspath(filename1) == os.path.abspath(filename2)
+
+
+def filenameToModule(fn):
+    """
+    Given a filename, do whatever possible to return a module object matching
+    that file.
+
+    If the file in question is a module in Python path, properly import and
+    return that module. Otherwise, load the source manually.
+
+    @param fn: A filename.
+    @return: A module object.
+    @raise ValueError: If C{fn} does not exist.
+    """
+    if not os.path.exists(fn):
+        raise ValueError("%r doesn't exist" % (fn,))
+    try:
+        ret = reflect.namedAny(reflect.filenameToModuleName(fn))
+    except (ValueError, AttributeError):
+        # Couldn't find module.  The file 'fn' is not in PYTHONPATH
+        return _importFromFile(fn)
+    # ensure that the loaded module matches the file
+    retFile = os.path.splitext(ret.__file__)[0] + '.py'
+    # not all platforms (e.g. win32) have os.path.samefile
+    same = getattr(os.path, 'samefile', samefile)
+    if os.path.isfile(fn) and not same(fn, retFile):
+        del sys.modules[ret.__name__]
+        ret = _importFromFile(fn)
+    return ret
+
+
+def _importFromFile(fn, moduleName=None):
+    fn = _resolveDirectory(fn)
+    if not moduleName:
+        moduleName = os.path.splitext(os.path.split(fn)[-1])[0]
+    if moduleName in sys.modules:
+        return sys.modules[moduleName]
+    fd = open(fn, 'r')
+    try:
+        module = imp.load_source(moduleName, fn, fd)
+    finally:
+        fd.close()
+    return module
+
+
+def _resolveDirectory(fn):
+    if os.path.isdir(fn):
+        initFile = isPackageDirectory(fn)
+        if initFile:
+            fn = os.path.join(fn, initFile)
+        else:
+            raise ValueError('%r is not a package directory' % (fn,))
+    return fn
+
+
+def _getMethodNameInClass(method):
+    """
+    Find the attribute name on the method's class which refers to the method.
+
+    For some methods, notably decorators which have not had __name__ set correctly:
+
+    getattr(method.im_class, method.__name__) != method
+    """
+    if getattr(method.im_class, method.__name__, object()) != method:
+        for alias in dir(method.im_class):
+            if getattr(method.im_class, alias, object()) == method:
+                return alias
+    return method.__name__
+
+
+class DestructiveTestSuite(TestSuite):
+    """
+    A test suite which remove the tests once run, to minimize memory usage.
+    """
+
+    def run(self, result):
+        """
+        Almost the same as L{TestSuite.run}, but with C{self._tests} being
+        empty at the end.
+        """
+        while self._tests:
+            if result.shouldStop:
+                break
+            test = self._tests.pop(0)
+            test(result)
+        return result
+
+
+
+# When an error occurs outside of any test, the user will see this string
+# in place of a test's name.
+NOT_IN_TEST = "<not in test>"
+
+
+
+class LoggedSuite(TestSuite):
+    """
+    Any errors logged in this suite will be reported to the L{TestResult}
+    object.
+    """
+
+    def run(self, result):
+        """
+        Run the suite, storing all errors in C{result}. If an error is logged
+        while no tests are running, then it will be added as an error to
+        C{result}.
+
+        @param result: A L{TestResult} object.
+        """
+        observer = unittest._logObserver
+        observer._add()
+        super(LoggedSuite, self).run(result)
+        observer._remove()
+        for error in observer.getErrors():
+            result.addError(TestHolder(NOT_IN_TEST), error)
+        observer.flushErrors()
+
+
+
+class PyUnitTestCase(object):
+    """
+    DEPRECATED in Twisted 8.0.
+
+    This class decorates the pyunit.TestCase class, mainly to work around the
+    differences between unittest in Python 2.3, 2.4, and 2.5. These
+    differences are::
+
+        - The way doctest unittests describe themselves
+        - Where the implementation of TestCase.run is (used to be in __call__)
+        - Where the test method name is kept (mangled-private or non-mangled
+          private variable)
+
+    It also implements visit, which we like.
+    """
+
+    def __init__(self, test):
+        warnings.warn("Deprecated in Twisted 8.0.",
+                      category=DeprecationWarning)
+        self._test = test
+        test.id = self.id
+
+    def id(self):
+        cls = self._test.__class__
+        tmn = getattr(self._test, '_TestCase__testMethodName', None)
+        if tmn is None:
+            # python2.5's 'unittest' module is more sensible; but different.
+            tmn = self._test._testMethodName
+        return (cls.__module__ + '.' + cls.__name__ + '.' +
+                tmn)
+
+    def __repr__(self):
+        return 'PyUnitTestCase<%r>'%(self.id(),)
+
+    def __call__(self, results):
+        return self._test(results)
+
+
+    def visit(self, visitor):
+        """
+        Call the given visitor with the original, standard library, test case
+        that C{self} wraps. See L{unittest.TestCase.visit}.
+
+        Deprecated in Twisted 8.0.
+        """
+        warnings.warn("Test visitors deprecated in Twisted 8.0",
+                      category=DeprecationWarning)
+        visitor(self._test)
+
+
+    def __getattr__(self, name):
+        return getattr(self._test, name)
+
+
+
+class TrialSuite(TestSuite):
+    """
+    Suite to wrap around every single test in a C{trial} run. Used internally
+    by Trial to set up things necessary for Trial tests to work, regardless of
+    what context they are run in.
+    """
+
+    def __init__(self, tests=(), forceGarbageCollection=False):
+        if forceGarbageCollection:
+            newTests = []
+            for test in tests:
+                test = unittest.decorate(
+                    test, unittest._ForceGarbageCollectionDecorator)
+                newTests.append(test)
+            tests = newTests
+        suite = LoggedSuite(tests)
+        super(TrialSuite, self).__init__([suite])
+
+
+    def _bail(self):
+        from twisted.internet import reactor
+        d = defer.Deferred()
+        reactor.addSystemEventTrigger('after', 'shutdown',
+                                      lambda: d.callback(None))
+        reactor.fireSystemEvent('shutdown') # radix's suggestion
+        # As long as TestCase does crap stuff with the reactor we need to
+        # manually shutdown the reactor here, and that requires util.wait
+        # :(
+        # so that the shutdown event completes
+        unittest.TestCase('mktemp')._wait(d)
+
+    def run(self, result):
+        try:
+            TestSuite.run(self, result)
+        finally:
+            self._bail()
+
+
+def name(thing):
+    """
+    @param thing: an object from modules (instance of PythonModule,
+        PythonAttribute), a TestCase subclass, or an instance of a TestCase.
+    """
+    if isTestCase(thing):
+        # TestCase subclass
+        theName = reflect.qual(thing)
+    else:
+        # thing from trial, or thing from modules.
+        # this monstrosity exists so that modules' objects do not have to
+        # implement id(). -jml
+        try:
+            theName = thing.id()
+        except AttributeError:
+            theName = thing.name
+    return theName
+
+
+def isTestCase(obj):
+    """
+    @return: C{True} if C{obj} is a class that contains test cases, C{False}
+        otherwise. Used to find all the tests in a module.
+    """
+    try:
+        return issubclass(obj, pyunit.TestCase)
+    except TypeError:
+        return False
+
+
+
+class TestHolder(object):
+    """
+    Placeholder for a L{TestCase} inside a reporter. As far as a L{TestResult}
+    is concerned, this looks exactly like a unit test.
+    """
+
+    implements(ITestCase)
+
+    failureException = None
+
+    def __init__(self, description):
+        """
+        @param description: A string to be displayed L{TestResult}.
+        """
+        self.description = description
+
+
+    def __call__(self, result):
+        return self.run(result)
+
+
+    def id(self):
+        return self.description
+
+
+    def countTestCases(self):
+        return 0
+
+
+    def run(self, result):
+        """
+        This test is just a placeholder. Run the test successfully.
+
+        @param result: The C{TestResult} to store the results in.
+        @type result: L{twisted.trial.itrial.IReporter}.
+        """
+        result.startTest(self)
+        result.addSuccess(self)
+        result.stopTest(self)
+
+
+    def shortDescription(self):
+        return self.description
+
+
+
+class ErrorHolder(TestHolder):
+    """
+    Used to insert arbitrary errors into a test suite run. Provides enough
+    methods to look like a C{TestCase}, however, when it is run, it simply adds
+    an error to the C{TestResult}. The most common use-case is for when a
+    module fails to import.
+    """
+
+    def __init__(self, description, error):
+        """
+        @param description: A string used by C{TestResult}s to identify this
+        error. Generally, this is the name of a module that failed to import.
+
+        @param error: The error to be added to the result. Can be an `exc_info`
+        tuple or a L{twisted.python.failure.Failure}.
+        """
+        super(ErrorHolder, self).__init__(description)
+        self.error = util.excInfoOrFailureToExcInfo(error)
+
+
+    def __repr__(self):
+        return "<ErrorHolder description=%r error=%s%s>" % (
+            # Format the exception type and arguments explicitly, as exception
+            # objects do not have nice looking string formats on Python 2.4.
+            self.description, self.error[0].__name__, self.error[1].args)
+
+
+    def run(self, result):
+        """
+        Run the test, reporting the error.
+
+        @param result: The C{TestResult} to store the results in.
+        @type result: L{twisted.trial.itrial.IReporter}.
+        """
+        result.startTest(self)
+        result.addError(self, self.error)
+        result.stopTest(self)
+
+
+    def visit(self, visitor):
+        """
+        See L{unittest.TestCase.visit}.
+        """
+        visitor(self)
+
+
+
+class TestLoader(object):
+    """
+    I find tests inside function, modules, files -- whatever -- then return
+    them wrapped inside a Test (either a L{TestSuite} or a L{TestCase}).
+
+    @ivar methodPrefix: A string prefix. C{TestLoader} will assume that all the
+    methods in a class that begin with C{methodPrefix} are test cases.
+
+    @ivar modulePrefix: A string prefix. Every module in a package that begins
+    with C{modulePrefix} is considered a module full of tests.
+
+    @ivar forceGarbageCollection: A flag applied to each C{TestCase} loaded.
+    See L{unittest.TestCase} for more information.
+
+    @ivar sorter: A key function used to sort C{TestCase}s, test classes,
+    modules and packages.
+
+    @ivar suiteFactory: A callable which is passed a list of tests (which
+    themselves may be suites of tests). Must return a test suite.
+    """
+
+    methodPrefix = 'test'
+    modulePrefix = 'test_'
+
+    def __init__(self):
+        self.suiteFactory = TestSuite
+        self.sorter = name
+        self._importErrors = []
+
+    def sort(self, xs):
+        """
+        Sort the given things using L{sorter}.
+
+        @param xs: A list of test cases, class or modules.
+        """
+        return sorted(xs, key=self.sorter)
+
+    def findTestClasses(self, module):
+        """Given a module, return all Trial test classes"""
+        classes = []
+        for name, val in inspect.getmembers(module):
+            if isTestCase(val):
+                classes.append(val)
+        return self.sort(classes)
+
+    def findByName(self, name):
+        """
+        Return a Python object given a string describing it.
+
+        @param name: a string which may be either a filename or a
+        fully-qualified Python name.
+
+        @return: If C{name} is a filename, return the module. If C{name} is a
+        fully-qualified Python name, return the object it refers to.
+        """
+        if os.path.exists(name):
+            return filenameToModule(name)
+        return reflect.namedAny(name)
+
+    def loadModule(self, module):
+        """
+        Return a test suite with all the tests from a module.
+
+        Included are TestCase subclasses and doctests listed in the module's
+        __doctests__ module. If that's not good for you, put a function named
+        either C{testSuite} or C{test_suite} in your module that returns a
+        TestSuite, and I'll use the results of that instead.
+
+        If C{testSuite} and C{test_suite} are both present, then I'll use
+        C{testSuite}.
+        """
+        ## XXX - should I add an optional parameter to disable the check for
+        ## a custom suite.
+        ## OR, should I add another method
+        if not isinstance(module, types.ModuleType):
+            raise TypeError("%r is not a module" % (module,))
+        if hasattr(module, 'testSuite'):
+            return module.testSuite()
+        elif hasattr(module, 'test_suite'):
+            return module.test_suite()
+        suite = self.suiteFactory()
+        for testClass in self.findTestClasses(module):
+            suite.addTest(self.loadClass(testClass))
+        if not hasattr(module, '__doctests__'):
+            return suite
+        docSuite = self.suiteFactory()
+        for doctest in module.__doctests__:
+            docSuite.addTest(self.loadDoctests(doctest))
+        return self.suiteFactory([suite, docSuite])
+    loadTestsFromModule = loadModule
+
+    def loadClass(self, klass):
+        """
+        Given a class which contains test cases, return a sorted list of
+        C{TestCase} instances.
+        """
+        if not (isinstance(klass, type) or isinstance(klass, types.ClassType)):
+            raise TypeError("%r is not a class" % (klass,))
+        if not isTestCase(klass):
+            raise ValueError("%r is not a test case" % (klass,))
+        names = self.getTestCaseNames(klass)
+        tests = self.sort([self._makeCase(klass, self.methodPrefix+name)
+                           for name in names])
+        return self.suiteFactory(tests)
+    loadTestsFromTestCase = loadClass
+
+    def getTestCaseNames(self, klass):
+        """
+        Given a class that contains C{TestCase}s, return a list of names of
+        methods that probably contain tests.
+        """
+        return reflect.prefixedMethodNames(klass, self.methodPrefix)
+
+    def loadMethod(self, method):
+        """
+        Given a method of a C{TestCase} that represents a test, return a
+        C{TestCase} instance for that test.
+        """
+        if not isinstance(method, types.MethodType):
+            raise TypeError("%r not a method" % (method,))
+        return self._makeCase(method.im_class, _getMethodNameInClass(method))
+
+    def _makeCase(self, klass, methodName):
+        return klass(methodName)
+
+    def loadPackage(self, package, recurse=False):
+        """
+        Load tests from a module object representing a package, and return a
+        TestSuite containing those tests.
+
+        Tests are only loaded from modules whose name begins with 'test_'
+        (or whatever C{modulePrefix} is set to).
+
+        @param package: a types.ModuleType object (or reasonable facsimilie
+        obtained by importing) which may contain tests.
+
+        @param recurse: A boolean.  If True, inspect modules within packages
+        within the given package (and so on), otherwise, only inspect modules
+        in the package itself.
+
+        @raise: TypeError if 'package' is not a package.
+
+        @return: a TestSuite created with my suiteFactory, containing all the
+        tests.
+        """
+        if not isPackage(package):
+            raise TypeError("%r is not a package" % (package,))
+        pkgobj = modules.getModule(package.__name__)
+        if recurse:
+            discovery = pkgobj.walkModules()
+        else:
+            discovery = pkgobj.iterModules()
+        discovered = []
+        for disco in discovery:
+            if disco.name.split(".")[-1].startswith(self.modulePrefix):
+                discovered.append(disco)
+        suite = self.suiteFactory()
+        for modinfo in self.sort(discovered):
+            try:
+                module = modinfo.load()
+            except:
+                thingToAdd = ErrorHolder(modinfo.name, failure.Failure())
+            else:
+                thingToAdd = self.loadModule(module)
+            suite.addTest(thingToAdd)
+        return suite
+
+    def loadDoctests(self, module):
+        """
+        Return a suite of tests for all the doctests defined in C{module}.
+
+        @param module: A module object or a module name.
+        """
+        if isinstance(module, str):
+            try:
+                module = reflect.namedAny(module)
+            except:
+                return ErrorHolder(module, failure.Failure())
+        if not inspect.ismodule(module):
+            warnings.warn("trial only supports doctesting modules")
+            return
+        extraArgs = {}
+        if sys.version_info > (2, 4):
+            # Work around Python issue2604: DocTestCase.tearDown clobbers globs
+            def saveGlobals(test):
+                """
+                Save C{test.globs} and replace it with a copy so that if
+                necessary, the original will be available for the next test
+                run.
+                """
+                test._savedGlobals = getattr(test, '_savedGlobals', test.globs)
+                test.globs = test._savedGlobals.copy()
+            extraArgs['setUp'] = saveGlobals
+        return doctest.DocTestSuite(module, **extraArgs)
+
+    def loadAnything(self, thing, recurse=False):
+        """
+        Given a Python object, return whatever tests that are in it. Whatever
+        'in' might mean.
+
+        @param thing: A Python object. A module, method, class or package.
+        @param recurse: Whether or not to look in subpackages of packages.
+        Defaults to False.
+
+        @return: A C{TestCase} or C{TestSuite}.
+        """
+        if isinstance(thing, types.ModuleType):
+            if isPackage(thing):
+                return self.loadPackage(thing, recurse)
+            return self.loadModule(thing)
+        elif isinstance(thing, types.ClassType):
+            return self.loadClass(thing)
+        elif isinstance(thing, type):
+            return self.loadClass(thing)
+        elif isinstance(thing, types.MethodType):
+            return self.loadMethod(thing)
+        raise TypeError("No loader for %r. Unrecognized type" % (thing,))
+
+    def loadByName(self, name, recurse=False):
+        """
+        Given a string representing a Python object, return whatever tests
+        are in that object.
+
+        If C{name} is somehow inaccessible (e.g. the module can't be imported,
+        there is no Python object with that name etc) then return an
+        L{ErrorHolder}.
+
+        @param name: The fully-qualified name of a Python object.
+        """
+        try:
+            thing = self.findByName(name)
+        except:
+            return ErrorHolder(name, failure.Failure())
+        return self.loadAnything(thing, recurse)
+    loadTestsFromName = loadByName
+
+    def loadByNames(self, names, recurse=False):
+        """
+        Construct a TestSuite containing all the tests found in 'names', where
+        names is a list of fully qualified python names and/or filenames. The
+        suite returned will have no duplicate tests, even if the same object
+        is named twice.
+        """
+        things = []
+        errors = []
+        for name in names:
+            try:
+                things.append(self.findByName(name))
+            except:
+                errors.append(ErrorHolder(name, failure.Failure()))
+        suites = [self.loadAnything(thing, recurse)
+                  for thing in self._uniqueTests(things)]
+        suites.extend(errors)
+        return self.suiteFactory(suites)
+
+
+    def _uniqueTests(self, things):
+        """
+        Gather unique suite objects from loaded things. This will guarantee
+        uniqueness of inherited methods on TestCases which would otherwise hash
+        to same value and collapse to one test unexpectedly if using simpler
+        means: e.g. set().
+        """
+        entries = []
+        for thing in things:
+            if isinstance(thing, types.MethodType):
+                entries.append((thing, thing.im_class))
+            else:
+                entries.append((thing,))
+        return [entry[0] for entry in set(entries)]
+
+
+
+class DryRunVisitor(object):
+    """
+    A visitor that makes a reporter think that every test visited has run
+    successfully.
+    """
+
+    def __init__(self, reporter):
+        """
+        @param reporter: A C{TestResult} object.
+        """
+        self.reporter = reporter
+
+
+    def markSuccessful(self, testCase):
+        """
+        Convince the reporter that this test has been run successfully.
+        """
+        self.reporter.startTest(testCase)
+        self.reporter.addSuccess(testCase)
+        self.reporter.stopTest(testCase)
+
+
+
+class TrialRunner(object):
+    """
+    A specialised runner that the trial front end uses.
+    """
+
+    DEBUG = 'debug'
+    DRY_RUN = 'dry-run'
+
+    def _setUpTestdir(self):
+        self._tearDownLogFile()
+        currentDir = os.getcwd()
+        base = filepath.FilePath(self.workingDirectory)
+        testdir, self._testDirLock = util._unusedTestDirectory(base)
+        os.chdir(testdir.path)
+        return currentDir
+
+
+    def _tearDownTestdir(self, oldDir):
+        os.chdir(oldDir)
+        self._testDirLock.unlock()
+
+
+    _log = log
+    def _makeResult(self):
+        reporter = self.reporterFactory(self.stream, self.tbformat,
+                                        self.rterrors, self._log)
+        if self.uncleanWarnings:
+            reporter = UncleanWarningsReporterWrapper(reporter)
+        return reporter
+
+    def __init__(self, reporterFactory,
+                 mode=None,
+                 logfile='test.log',
+                 stream=sys.stdout,
+                 profile=False,
+                 tracebackFormat='default',
+                 realTimeErrors=False,
+                 uncleanWarnings=False,
+                 workingDirectory=None,
+                 forceGarbageCollection=False,
+                 debugger=None):
+        self.reporterFactory = reporterFactory
+        self.logfile = logfile
+        self.mode = mode
+        self.stream = stream
+        self.tbformat = tracebackFormat
+        self.rterrors = realTimeErrors
+        self.uncleanWarnings = uncleanWarnings
+        self._result = None
+        self.workingDirectory = workingDirectory or '_trial_temp'
+        self._logFileObserver = None
+        self._logFileObject = None
+        self._forceGarbageCollection = forceGarbageCollection
+        self.debugger = debugger
+        if profile:
+            self.run = util.profiled(self.run, 'profile.data')
+
+    def _tearDownLogFile(self):
+        if self._logFileObserver is not None:
+            log.removeObserver(self._logFileObserver.emit)
+            self._logFileObserver = None
+        if self._logFileObject is not None:
+            self._logFileObject.close()
+            self._logFileObject = None
+
+    def _setUpLogFile(self):
+        self._tearDownLogFile()
+        if self.logfile == '-':
+            logFile = sys.stdout
+        else:
+            logFile = file(self.logfile, 'a')
+        self._logFileObject = logFile
+        self._logFileObserver = log.FileLogObserver(logFile)
+        log.startLoggingWithObserver(self._logFileObserver.emit, 0)
+
+
+    def run(self, test):
+        """
+        Run the test or suite and return a result object.
+        """
+        test = unittest.decorate(test, ITestCase)
+        return self._runWithoutDecoration(test, self._forceGarbageCollection)
+
+
+    def _runWithoutDecoration(self, test, forceGarbageCollection=False):
+        """
+        Private helper that runs the given test but doesn't decorate it.
+        """
+        result = self._makeResult()
+        # decorate the suite with reactor cleanup and log starting
+        # This should move out of the runner and be presumed to be
+        # present
+        suite = TrialSuite([test], forceGarbageCollection)
+        startTime = time.time()
+        if self.mode == self.DRY_RUN:
+            for single in unittest._iterateTests(suite):
+                result.startTest(single)
+                result.addSuccess(single)
+                result.stopTest(single)
+        else:
+            if self.mode == self.DEBUG:
+                run = lambda: self.debugger.runcall(suite.run, result)
+            else:
+                run = lambda: suite.run(result)
+
+            oldDir = self._setUpTestdir()
+            try:
+                self._setUpLogFile()
+                run()
+            finally:
+                self._tearDownLogFile()
+                self._tearDownTestdir(oldDir)
+
+        endTime = time.time()
+        done = getattr(result, 'done', None)
+        if done is None:
+            warnings.warn(
+                "%s should implement done() but doesn't. Falling back to "
+                "printErrors() and friends." % reflect.qual(result.__class__),
+                category=DeprecationWarning, stacklevel=3)
+            result.printErrors()
+            result.writeln(result.separator)
+            result.writeln('Ran %d tests in %.3fs', result.testsRun,
+                           endTime - startTime)
+            result.write('\n')
+            result.printSummary()
+        else:
+            result.done()
+        return result
+
+
+    def runUntilFailure(self, test):
+        """
+        Repeatedly run C{test} until it fails.
+        """
+        count = 0
+        while True:
+            count += 1
+            self.stream.write("Test Pass %d\n" % (count,))
+            if count == 1:
+                result = self.run(test)
+            else:
+                result = self._runWithoutDecoration(test)
+            if result.testsRun == 0:
+                break
+            if not result.wasSuccessful():
+                break
+        return result
diff --git a/ThirdParty/Twisted/twisted/trial/test/__init__.py b/ThirdParty/Twisted/twisted/trial/test/__init__.py
new file mode 100644
index 0000000..8553df7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/__init__.py
@@ -0,0 +1,6 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Unit tests for the Trial unit-testing framework.
+"""
diff --git a/ThirdParty/Twisted/twisted/trial/test/detests.py b/ThirdParty/Twisted/twisted/trial/test/detests.py
new file mode 100644
index 0000000..237a989
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/detests.py
@@ -0,0 +1,203 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for Deferred handling by L{twisted.trial.unittest.TestCase}.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.trial import unittest
+from twisted.internet import defer, threads, reactor
+
+
+class DeferredSetUpOK(unittest.TestCase):
+    def setUp(self):
+        d = defer.succeed('value')
+        d.addCallback(self._cb_setUpCalled)
+        return d
+
+    def _cb_setUpCalled(self, ignored):
+        self._setUpCalled = True
+
+    def test_ok(self):
+        self.failUnless(self._setUpCalled)
+
+
+class DeferredSetUpFail(unittest.TestCase):
+    testCalled = False
+
+    def setUp(self):
+        return defer.fail(unittest.FailTest('i fail'))
+
+    def test_ok(self):
+        DeferredSetUpFail.testCalled = True
+        self.fail("I should not get called")
+
+
+class DeferredSetUpCallbackFail(unittest.TestCase):
+    testCalled = False
+
+    def setUp(self):
+        d = defer.succeed('value')
+        d.addCallback(self._cb_setUpCalled)
+        return d
+
+    def _cb_setUpCalled(self, ignored):
+        self.fail('deliberate failure')
+
+    def test_ok(self):
+        DeferredSetUpCallbackFail.testCalled = True
+
+
+class DeferredSetUpError(unittest.TestCase):
+    testCalled = False
+
+    def setUp(self):
+        return defer.fail(RuntimeError('deliberate error'))
+
+    def test_ok(self):
+        DeferredSetUpError.testCalled = True
+
+
+class DeferredSetUpNeverFire(unittest.TestCase):
+    testCalled = False
+
+    def setUp(self):
+        return defer.Deferred()
+
+    def test_ok(self):
+        DeferredSetUpNeverFire.testCalled = True
+
+
+class DeferredSetUpSkip(unittest.TestCase):
+    testCalled = False
+
+    def setUp(self):
+        d = defer.succeed('value')
+        d.addCallback(self._cb1)
+        return d
+
+    def _cb1(self, ignored):
+        raise unittest.SkipTest("skip me")
+
+    def test_ok(self):
+        DeferredSetUpSkip.testCalled = True
+
+
+class DeferredTests(unittest.TestCase):
+    touched = False
+
+    def _cb_fail(self, reason):
+        self.fail(reason)
+
+    def _cb_error(self, reason):
+        raise RuntimeError(reason)
+
+    def _cb_skip(self, reason):
+        raise unittest.SkipTest(reason)
+
+    def _touchClass(self, ignored):
+        self.__class__.touched = True
+
+    def setUp(self):
+        self.__class__.touched = False
+
+    def test_pass(self):
+        return defer.succeed('success')
+
+    def test_passGenerated(self):
+        self._touchClass(None)
+        yield None
+    test_passGenerated = defer.deferredGenerator(test_passGenerated)
+
+    def test_fail(self):
+        return defer.fail(self.failureException('I fail'))
+
+    def test_failureInCallback(self):
+        d = defer.succeed('fail')
+        d.addCallback(self._cb_fail)
+        return d
+
+    def test_errorInCallback(self):
+        d = defer.succeed('error')
+        d.addCallback(self._cb_error)
+        return d
+
+    def test_skip(self):
+        d = defer.succeed('skip')
+        d.addCallback(self._cb_skip)
+        d.addCallback(self._touchClass)
+        return d
+
+    def test_thread(self):
+        return threads.deferToThread(lambda : None)
+
+    def test_expectedFailure(self):
+        d = defer.succeed('todo')
+        d.addCallback(self._cb_error)
+        return d
+    test_expectedFailure.todo = "Expected failure"
+
+
+class TimeoutTests(unittest.TestCase):
+    timedOut = None
+
+    def test_pass(self):
+        d = defer.Deferred()
+        reactor.callLater(0, d.callback, 'hoorj!')
+        return d
+    test_pass.timeout = 2
+
+    def test_passDefault(self):
+        # test default timeout
+        d = defer.Deferred()
+        reactor.callLater(0, d.callback, 'hoorj!')
+        return d
+
+    def test_timeout(self):
+        return defer.Deferred()
+    test_timeout.timeout = 0.1
+
+    def test_timeoutZero(self):
+        return defer.Deferred()
+    test_timeoutZero.timeout = 0
+
+    def test_expectedFailure(self):
+        return defer.Deferred()
+    test_expectedFailure.timeout = 0.1
+    test_expectedFailure.todo = "i will get it right, eventually"
+
+    def test_skip(self):
+        return defer.Deferred()
+    test_skip.timeout = 0.1
+    test_skip.skip = "i will get it right, eventually"
+
+    def test_errorPropagation(self):
+        def timedOut(err):
+            self.__class__.timedOut = err
+            return err
+        d = defer.Deferred()
+        d.addErrback(timedOut)
+        return d
+    test_errorPropagation.timeout = 0.1
+
+    def test_calledButNeverCallback(self):
+        d = defer.Deferred()
+        def neverFire(r):
+            return defer.Deferred()
+        d.addCallback(neverFire)
+        d.callback(1)
+        return d
+    test_calledButNeverCallback.timeout = 0.1
+
+
+class TestClassTimeoutAttribute(unittest.TestCase):
+    timeout = 0.2
+
+    def setUp(self):
+        self.d = defer.Deferred()
+
+    def testMethod(self):
+        self.methodCalled = True
+        return self.d
diff --git a/ThirdParty/Twisted/twisted/trial/test/erroneous.py b/ThirdParty/Twisted/twisted/trial/test/erroneous.py
new file mode 100644
index 0000000..b7df3f7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/erroneous.py
@@ -0,0 +1,167 @@
+# -*- test-case-name: twisted.trial.test.test_tests -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Definitions of test cases with various interesting error-related behaviors, to
+be used by test modules to exercise different features of trial's test runner.
+
+See the L{twisted.trial.test.test_tests} module docstring for details about how
+this code is arranged.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.trial import unittest, util
+from twisted.internet import reactor, protocol, defer
+
+
+class FoolishError(Exception):
+    pass
+
+
+
+class FailureInSetUpMixin(object):
+    def setUp(self):
+        raise FoolishError("I am a broken setUp method")
+
+    def test_noop(self):
+        pass
+
+
+
+class SynchronousTestFailureInSetUp(
+    FailureInSetUpMixin, unittest.SynchronousTestCase):
+    pass
+
+
+
+class AsynchronousTestFailureInSetUp(
+    FailureInSetUpMixin, unittest.TestCase):
+    pass
+
+
+
+class FailureInTearDownMixin(object):
+    def tearDown(self):
+        raise FoolishError("I am a broken tearDown method")
+
+    def test_noop(self):
+        pass
+
+
+
+class SynchronousTestFailureInTearDown(
+    FailureInTearDownMixin, unittest.SynchronousTestCase):
+    pass
+
+
+
+class AsynchronousTestFailureInTearDown(
+    FailureInTearDownMixin, unittest.TestCase):
+    pass
+
+
+
+class TestRegularFail(unittest.SynchronousTestCase):
+    def test_fail(self):
+        self.fail("I fail")
+
+    def test_subfail(self):
+        self.subroutine()
+
+    def subroutine(self):
+        self.fail("I fail inside")
+
+class TestFailureInDeferredChain(unittest.TestCase):
+    def test_fail(self):
+        d = defer.Deferred()
+        d.addCallback(self._later)
+        reactor.callLater(0, d.callback, None)
+        return d
+    def _later(self, res):
+        self.fail("I fail later")
+
+
+
+class ErrorTest(unittest.SynchronousTestCase):
+    """
+    A test case which has a L{test_foo} which will raise an error.
+
+    @ivar ran: boolean indicating whether L{test_foo} has been run.
+    """
+    ran = False
+
+    def test_foo(self):
+        """
+        Set C{self.ran} to True and raise a C{ZeroDivisionError}
+        """
+        self.ran = True
+        1/0
+
+
+
+class TestSkipTestCase(unittest.SynchronousTestCase):
+    pass
+
+TestSkipTestCase.skip = "skipping this test"
+
+
+class DelayedCall(unittest.TestCase):
+    hiddenExceptionMsg = "something blew up"
+
+    def go(self):
+        raise RuntimeError(self.hiddenExceptionMsg)
+
+    def testHiddenException(self):
+        """
+        What happens if an error is raised in a DelayedCall and an error is
+        also raised in the test?
+
+        L{test_reporter.TestErrorReporting.testHiddenException} checks that
+        both errors get reported.
+
+        Note that this behaviour is deprecated. A B{real} test would return a
+        Deferred that got triggered by the callLater. This would guarantee the
+        delayed call error gets reported.
+        """
+        reactor.callLater(0, self.go)
+        reactor.iterate(0.01)
+        self.fail("Deliberate failure to mask the hidden exception")
+    testHiddenException.suppress = [util.suppress(
+        message=r'reactor\.iterate cannot be used.*',
+        category=DeprecationWarning)]
+
+
+class ReactorCleanupTests(unittest.TestCase):
+    def test_leftoverPendingCalls(self):
+        def _():
+            print('foo!')
+        reactor.callLater(10000.0, _)
+
+class SocketOpenTest(unittest.TestCase):
+    def test_socketsLeftOpen(self):
+        f = protocol.Factory()
+        f.protocol = protocol.Protocol
+        reactor.listenTCP(0, f)
+
+class TimingOutDeferred(unittest.TestCase):
+    def test_alpha(self):
+        pass
+
+    def test_deferredThatNeverFires(self):
+        self.methodCalled = True
+        d = defer.Deferred()
+        return d
+
+    def test_omega(self):
+        pass
+
+
+def unexpectedException(self):
+    """i will raise an unexpected exception...
+    ... *CAUSE THAT'S THE KINDA GUY I AM*
+
+    >>> 1/0
+    """
+
diff --git a/ThirdParty/Twisted/twisted/trial/test/mockcustomsuite.py b/ThirdParty/Twisted/twisted/trial/test/mockcustomsuite.py
new file mode 100644
index 0000000..4959ef3
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/mockcustomsuite.py
@@ -0,0 +1,21 @@
+# Copyright (c) 2006 Twisted Matrix Laboratories. See LICENSE for details
+
+"""
+Mock test module that contains a C{test_suite} method. L{runner.TestLoader}
+should load the tests from the C{test_suite}, not from the C{Foo} C{TestCase}.
+
+See {twisted.trial.test.test_loader.LoaderTest.test_loadModuleWith_test_suite}.
+"""
+
+
+from twisted.trial import unittest, runner
+
+class Foo(unittest.SynchronousTestCase):
+    def test_foo(self):
+        pass
+
+
+def test_suite():
+    ts = runner.TestSuite()
+    ts.name = "MyCustomSuite"
+    return ts
diff --git a/ThirdParty/Twisted/twisted/trial/test/mockcustomsuite2.py b/ThirdParty/Twisted/twisted/trial/test/mockcustomsuite2.py
new file mode 100644
index 0000000..d2a85c4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/mockcustomsuite2.py
@@ -0,0 +1,21 @@
+# Copyright (c) 2006 Twisted Matrix Laboratories. See LICENSE for details
+
+"""
+Mock test module that contains a C{testSuite} method. L{runner.TestLoader}
+should load the tests from the C{testSuite}, not from the C{Foo} C{TestCase}.
+
+See L{twisted.trial.test.test_loader.LoaderTest.test_loadModuleWith_testSuite}.
+"""
+
+
+from twisted.trial import unittest, runner
+
+class Foo(unittest.SynchronousTestCase):
+    def test_foo(self):
+        pass
+
+
+def testSuite():
+    ts = runner.TestSuite()
+    ts.name = "MyCustomSuite"
+    return ts
diff --git a/ThirdParty/Twisted/twisted/trial/test/mockcustomsuite3.py b/ThirdParty/Twisted/twisted/trial/test/mockcustomsuite3.py
new file mode 100644
index 0000000..16fddcf
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/mockcustomsuite3.py
@@ -0,0 +1,28 @@
+# Copyright (c) 2006 Twisted Matrix Laboratories. See LICENSE for details
+
+"""
+Mock test module that contains both a C{test_suite} and a C{testSuite} method.
+L{runner.TestLoader} should load the tests from the C{testSuite}, not from the
+C{Foo} C{TestCase} nor from the C{test_suite} method.
+
+See {twisted.trial.test.test_loader.LoaderTest.test_loadModuleWithBothCustom}.
+"""
+
+
+from twisted.trial import unittest, runner
+
+class Foo(unittest.SynchronousTestCase):
+    def test_foo(self):
+        pass
+
+
+def test_suite():
+    ts = runner.TestSuite()
+    ts.name = "test_suite"
+    return ts
+
+
+def testSuite():
+    ts = runner.TestSuite()
+    ts.name = "testSuite"
+    return ts
diff --git a/ThirdParty/Twisted/twisted/trial/test/mockdoctest.py b/ThirdParty/Twisted/twisted/trial/test/mockdoctest.py
new file mode 100644
index 0000000..7b038ac
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/mockdoctest.py
@@ -0,0 +1,104 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+# this module is a trivial class with doctests and a __test__ attribute
+# to test trial's doctest support with python2.4
+from __future__ import division, absolute_import
+
+class Counter(object):
+    """a simple counter object for testing trial's doctest support
+
+         >>> c = Counter()
+         >>> c.value()
+         0
+         >>> c += 3
+         >>> c.value()
+         3
+         >>> c.incr()
+         >>> c.value() == 4
+         True
+         >>> c == 4
+         True
+         >>> c != 9
+         True
+
+    """
+    _count = 0
+
+    def __init__(self, initialValue=0, maxval=None):
+        self._count = initialValue
+        self.maxval = maxval
+
+    def __iadd__(self, other):
+        """add other to my value and return self
+
+             >>> c = Counter(100)
+             >>> c += 333
+             >>> c == 433
+             True
+        """
+        if self.maxval is not None and ((self._count + other) > self.maxval):
+            raise ValueError, "sorry, counter got too big"
+        else:
+            self._count += other
+        return self
+
+    def __eq__(self, other):
+        """equality operator, compare other to my value()
+           
+           >>> c = Counter()
+           >>> c == 0
+           True
+           >>> c += 10
+           >>> c.incr()
+           >>> c == 10   # fail this test on purpose
+           True
+
+        """
+        return self._count == other
+
+    def __ne__(self, other):
+        """inequality operator
+
+             >>> c = Counter()
+             >>> c != 10
+             True
+        """
+        return not self.__eq__(other)
+
+    def incr(self):
+        """increment my value by 1
+
+             >>> from twisted.trial.test.mockdoctest import Counter
+             >>> c = Counter(10, 11)
+             >>> c.incr()
+             >>> c.value() == 11
+             True
+             >>> c.incr()
+             Traceback (most recent call last):
+               File "<stdin>", line 1, in ?
+               File "twisted/trial/test/mockdoctest.py", line 51, in incr
+                 self.__iadd__(1)
+               File "twisted/trial/test/mockdoctest.py", line 39, in __iadd__
+                 raise ValueError, "sorry, counter got too big"
+             ValueError: sorry, counter got too big
+        """
+        self.__iadd__(1)
+
+    def value(self):
+        """return this counter's value
+
+             >>> c = Counter(555)
+             >>> c.value() == 555
+             True
+        """
+        return self._count
+
+    def unexpectedException(self):
+        """i will raise an unexpected exception...
+        ... *CAUSE THAT'S THE KINDA GUY I AM*
+            
+              >>> 1/0
+        """
+
+
diff --git a/ThirdParty/Twisted/twisted/trial/test/moduleself.py b/ThirdParty/Twisted/twisted/trial/test/moduleself.py
new file mode 100644
index 0000000..2196611
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/moduleself.py
@@ -0,0 +1,7 @@
+# -*- test-case-name: twisted.trial.test.moduleself -*-
+from twisted.trial import unittest
+
+class Foo(unittest.SynchronousTestCase):
+
+    def testFoo(self):
+        pass
diff --git a/ThirdParty/Twisted/twisted/trial/test/moduletest.py b/ThirdParty/Twisted/twisted/trial/test/moduletest.py
new file mode 100644
index 0000000..c5e1d70
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/moduletest.py
@@ -0,0 +1,11 @@
+# -*- test-case-name: twisted.trial.test.test_test_visitor -*-
+
+# fodder for test_script, which parses files for emacs local variable
+# declarations.  This one is supposed to have:
+#    test-case-name: twisted.trial.test.test_test_visitor.
+# in the first line
+# The class declaration is irrelevant
+
+class Foo(object):
+    pass
+
diff --git a/ThirdParty/Twisted/twisted/trial/test/notpython b/ThirdParty/Twisted/twisted/trial/test/notpython
new file mode 100644
index 0000000..311485c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/notpython
@@ -0,0 +1,2 @@
+
+this isn't python
diff --git a/ThirdParty/Twisted/twisted/trial/test/novars.py b/ThirdParty/Twisted/twisted/trial/test/novars.py
new file mode 100644
index 0000000..93bc03d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/novars.py
@@ -0,0 +1,6 @@
+# fodder for test_script, which parses files for emacs local variable
+# declarations.  This one is supposed to have none.
+# The class declaration is irrelevant
+
+class Bar(object):
+    pass
diff --git a/ThirdParty/Twisted/twisted/trial/test/packages.py b/ThirdParty/Twisted/twisted/trial/test/packages.py
new file mode 100644
index 0000000..1019855
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/packages.py
@@ -0,0 +1,180 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+#
+
+"""
+Classes and functions used by L{twisted.trial.test.test_util}.
+"""
+
+from __future__ import division, absolute_import
+
+import sys, os
+from twisted.trial import unittest
+
+testModule = """
+from twisted.trial import unittest
+
+class FooTest(unittest.SynchronousTestCase):
+    def testFoo(self):
+        pass
+"""
+
+dosModule = testModule.replace('\n', '\r\n')
+
+
+testSample = """
+'''This module is used by test_loader to test the Trial test loading
+functionality. Do NOT change the number of tests in this module.
+Do NOT change the names the tests in this module.
+'''
+
+import unittest as pyunit
+from twisted.trial import unittest
+
+class FooTest(unittest.SynchronousTestCase):
+    def test_foo(self):
+        pass
+
+    def test_bar(self):
+        pass
+
+
+class PyunitTest(pyunit.TestCase):
+    def test_foo(self):
+        pass
+
+    def test_bar(self):
+        pass
+
+
+class NotATest(object):
+    def test_foo(self):
+        pass
+
+
+class AlphabetTest(unittest.SynchronousTestCase):
+    def test_a(self):
+        pass
+
+    def test_b(self):
+        pass
+
+    def test_c(self):
+        pass
+"""
+
+testInheritanceSample = """
+'''This module is used by test_loader to test the Trial test loading
+functionality. Do NOT change the number of tests in this module.
+Do NOT change the names the tests in this module.
+'''
+
+from twisted.trial import unittest
+
+class X(object):
+
+    def test_foo(self):
+        pass
+
+class A(unittest.SynchronousTestCase, X):
+    pass
+
+class B(unittest.SynchronousTestCase, X):
+    pass
+
+"""
+
+class PackageTest(unittest.SynchronousTestCase):
+    files = [
+        ('badpackage/__init__.py', 'frotz\n'),
+        ('badpackage/test_module.py', ''),
+        ('package2/__init__.py', ''),
+        ('package2/test_module.py', 'import frotz\n'),
+        ('package/__init__.py', ''),
+        ('package/frotz.py', 'frotz\n'),
+        ('package/test_bad_module.py',
+         'raise ZeroDivisionError("fake error")'),
+        ('package/test_dos_module.py', dosModule),
+        ('package/test_import_module.py', 'import frotz'),
+        ('package/test_module.py', testModule),
+        ('goodpackage/__init__.py', ''),
+        ('goodpackage/test_sample.py', testSample),
+        ('goodpackage/sub/__init__.py', ''),
+        ('goodpackage/sub/test_sample.py', testSample),
+        ('inheritancepackage/__init__.py', ''),
+        ('inheritancepackage/test_x.py', testInheritanceSample),
+        ]
+
+
+    def _toModuleName(self, filename):
+        name = os.path.splitext(filename)[0]
+        segs = name.split('/')
+        if segs[-1] == '__init__':
+            segs = segs[:-1]
+        return '.'.join(segs)
+
+
+    def getModules(self):
+        """
+        Return matching module names for files listed in C{self.files}.
+        """
+        return [self._toModuleName(filename) for (filename, code) in self.files]
+
+
+    def cleanUpModules(self):
+        modules = self.getModules()
+        modules.sort()
+        modules.reverse()
+        for module in modules:
+            try:
+                del sys.modules[module]
+            except KeyError:
+                pass
+
+
+    def createFiles(self, files, parentDir='.'):
+        for filename, contents in self.files:
+            filename = os.path.join(parentDir, filename)
+            self._createDirectory(filename)
+            fd = open(filename, 'w')
+            fd.write(contents)
+            fd.close()
+
+
+    def _createDirectory(self, filename):
+        directory = os.path.dirname(filename)
+        if not os.path.exists(directory):
+            os.makedirs(directory)
+
+
+    def setUp(self, parentDir=None):
+        if parentDir is None:
+            parentDir = self.mktemp()
+        self.parent = parentDir
+        self.createFiles(self.files, parentDir)
+
+
+    def tearDown(self):
+        self.cleanUpModules()
+
+
+
+class SysPathManglingTest(PackageTest):
+    def setUp(self, parent=None):
+        self.oldPath = sys.path[:]
+        self.newPath = sys.path[:]
+        if parent is None:
+            parent = self.mktemp()
+        PackageTest.setUp(self, parent)
+        self.newPath.append(self.parent)
+        self.mangleSysPath(self.newPath)
+
+
+    def tearDown(self):
+        PackageTest.tearDown(self)
+        self.mangleSysPath(self.oldPath)
+
+
+    def mangleSysPath(self, pathVar):
+        sys.path[:] = pathVar
+
diff --git a/ThirdParty/Twisted/twisted/trial/test/sample.py b/ThirdParty/Twisted/twisted/trial/test/sample.py
new file mode 100644
index 0000000..623efd0
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/sample.py
@@ -0,0 +1,108 @@
+"""This module is used by test_loader to test the Trial test loading
+functionality. Do NOT change the number of tests in this module.  Do NOT change
+the names the tests in this module.
+"""
+
+import unittest as pyunit
+from twisted.trial import unittest
+from twisted.python.util import mergeFunctionMetadata
+
+
+
+class FooTest(unittest.SynchronousTestCase):
+
+
+    def test_foo(self):
+        pass
+
+
+    def test_bar(self):
+        pass
+
+
+
+def badDecorator(fn):
+    """
+    Decorate a function without preserving the name of the original function.
+    Always return a function with the same name.
+    """
+    def nameCollision(*args, **kwargs):
+        return fn(*args, **kwargs)
+    return nameCollision
+
+
+
+def goodDecorator(fn):
+    """
+    Decorate a function and preserve the original name.
+    """
+    def nameCollision(*args, **kwargs):
+        return fn(*args, **kwargs)
+    return mergeFunctionMetadata(fn, nameCollision)
+
+
+
+class DecorationTest(unittest.SynchronousTestCase):
+    def test_badDecorator(self):
+        """
+        This test method is decorated in a way that gives it a confusing name
+        that collides with another method.
+        """
+    test_badDecorator = badDecorator(test_badDecorator)
+
+
+    def test_goodDecorator(self):
+        """
+        This test method is decorated in a way that preserves its name.
+        """
+    test_goodDecorator = goodDecorator(test_goodDecorator)
+
+
+    def renamedDecorator(self):
+        """
+        This is secretly a test method and will be decorated and then renamed so
+        test discovery can find it.
+        """
+    test_renamedDecorator = goodDecorator(renamedDecorator)
+
+
+    def nameCollision(self):
+        """
+        This isn't a test, it's just here to collide with tests.
+        """
+
+
+
+class PyunitTest(pyunit.TestCase):
+
+
+    def test_foo(self):
+        pass
+
+
+    def test_bar(self):
+        pass
+
+
+
+class NotATest(object):
+
+
+    def test_foo(self):
+        pass
+
+
+
+class AlphabetTest(unittest.SynchronousTestCase):
+
+
+    def test_a(self):
+        pass
+
+
+    def test_b(self):
+        pass
+
+
+    def test_c(self):
+        pass
diff --git a/ThirdParty/Twisted/twisted/trial/test/scripttest.py b/ThirdParty/Twisted/twisted/trial/test/scripttest.py
new file mode 100755
index 0000000..267c189
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/scripttest.py
@@ -0,0 +1,14 @@
+#!/usr/bin/env python
+# -*- test-case-name: twisted.trial.test.test_test_visitor,twisted.trial.test.test_class -*-
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+# fodder for test_script, which parses files for emacs local variable
+# declarations.  This one is supposed to have:
+#    test-case-name: twisted.trial.test.test_test_visitor
+# in the second line
+# The class declaration is irrelevant
+
+class Foo(object):
+    pass
diff --git a/ThirdParty/Twisted/twisted/trial/test/skipping.py b/ThirdParty/Twisted/twisted/trial/test/skipping.py
new file mode 100644
index 0000000..5c040b3
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/skipping.py
@@ -0,0 +1,270 @@
+# -*- test-case-name: twisted.trial.test.test_tests -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Definitions of test cases with various interesting behaviors, to be used by
+L{twisted.trial.test.test_tests} and other test modules to exercise different
+features of trial's test runner.
+
+See the L{twisted.trial.test.test_tests} module docstring for details about how
+this code is arranged.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.trial.unittest import (
+    SynchronousTestCase, TestCase, SkipTest, FailTest)
+
+
+class SkippingMixin(object):
+    def test_skip1(self):
+        raise SkipTest('skip1')
+
+    def test_skip2(self):
+        raise RuntimeError("I should not get raised")
+    test_skip2.skip = 'skip2'
+
+    def test_skip3(self):
+        self.fail('I should not fail')
+    test_skip3.skip = 'skip3'
+
+
+
+class SynchronousSkipping(SkippingMixin, SynchronousTestCase):
+    pass
+
+
+
+class AsynchronousSkipping(SkippingMixin, TestCase):
+    pass
+
+
+
+class SkippingSetUpMixin(object):
+    def setUp(self):
+        raise SkipTest('skipSetUp')
+
+    def test_1(self):
+        pass
+
+    def test_2(self):
+        pass
+
+
+class SynchronousSkippingSetUp(SkippingSetUpMixin, SynchronousTestCase):
+    pass
+
+
+
+class AsynchronousSkippingSetUp(SkippingSetUpMixin, TestCase):
+    pass
+
+
+
+class DeprecatedReasonlessSkipMixin(object):
+    def test_1(self):
+        raise SkipTest()
+
+
+
+class SynchronousDeprecatedReasonlessSkip(
+    DeprecatedReasonlessSkipMixin, SynchronousTestCase):
+    pass
+
+
+
+class AsynchronousDeprecatedReasonlessSkip(
+    DeprecatedReasonlessSkipMixin, TestCase):
+    pass
+
+
+
+class SkippedClassMixin(object):
+    skip = 'class'
+    def setUp(self):
+        self.__class__._setUpRan = True
+    def test_skip1(self):
+        raise SkipTest('skip1')
+    def test_skip2(self):
+        raise RuntimeError("Ought to skip me")
+    test_skip2.skip = 'skip2'
+    def test_skip3(self):
+        pass
+    def test_skip4(self):
+        raise RuntimeError("Skip me too")
+
+
+
+class SynchronousSkippedClass(SkippedClassMixin, SynchronousTestCase):
+    pass
+
+
+
+class AsynchronousSkippedClass(SkippedClassMixin, TestCase):
+    pass
+
+
+
+class TodoMixin(object):
+    def test_todo1(self):
+        self.fail("deliberate failure")
+    test_todo1.todo = "todo1"
+
+    def test_todo2(self):
+        raise RuntimeError("deliberate error")
+    test_todo2.todo = "todo2"
+
+    def test_todo3(self):
+        """unexpected success"""
+    test_todo3.todo = 'todo3'
+
+
+
+
+class SynchronousTodo(TodoMixin, SynchronousTestCase):
+    pass
+
+
+
+class AsynchronousTodo(TodoMixin, TestCase):
+    pass
+
+
+
+class SetUpTodoMixin(object):
+    def setUp(self):
+        raise RuntimeError("deliberate error")
+
+    def test_todo1(self):
+        pass
+    test_todo1.todo = "setUp todo1"
+
+
+
+class SynchronousSetUpTodo(SetUpTodoMixin, SynchronousTestCase):
+    pass
+
+
+
+class AsynchronousSetUpTodo(SetUpTodoMixin, TestCase):
+    pass
+
+
+
+class TearDownTodoMixin(object):
+    def tearDown(self):
+        raise RuntimeError("deliberate error")
+
+    def test_todo1(self):
+        pass
+    test_todo1.todo = "tearDown todo1"
+
+
+
+class SynchronousTearDownTodo(TearDownTodoMixin, SynchronousTestCase):
+    pass
+
+
+
+class AsynchronousTearDownTodo(TearDownTodoMixin, TestCase):
+    pass
+
+
+
+class TodoClassMixin(object):
+    todo = "class"
+    def test_todo1(self):
+        pass
+    test_todo1.todo = "method"
+    def test_todo2(self):
+        pass
+    def test_todo3(self):
+        self.fail("Deliberate Failure")
+    test_todo3.todo = "method"
+    def test_todo4(self):
+        self.fail("Deliberate Failure")
+
+
+
+class SynchronousTodoClass(TodoClassMixin, SynchronousTestCase):
+    pass
+
+
+
+class AsynchronousTodoClass(TodoClassMixin, TestCase):
+    pass
+
+
+
+class StrictTodoMixin(object):
+    def test_todo1(self):
+        raise RuntimeError("expected failure")
+    test_todo1.todo = (RuntimeError, "todo1")
+
+    def test_todo2(self):
+        raise RuntimeError("expected failure")
+    test_todo2.todo = ((RuntimeError, OSError), "todo2")
+
+    def test_todo3(self):
+        raise RuntimeError("we had no idea!")
+    test_todo3.todo = (OSError, "todo3")
+
+    def test_todo4(self):
+        raise RuntimeError("we had no idea!")
+    test_todo4.todo = ((OSError, SyntaxError), "todo4")
+
+    def test_todo5(self):
+        self.fail("deliberate failure")
+    test_todo5.todo = (FailTest, "todo5")
+
+    def test_todo6(self):
+        self.fail("deliberate failure")
+    test_todo6.todo = (RuntimeError, "todo6")
+
+    def test_todo7(self):
+        pass
+    test_todo7.todo = (RuntimeError, "todo7")
+
+
+
+class SynchronousStrictTodo(StrictTodoMixin, SynchronousTestCase):
+    pass
+
+
+
+class AsynchronousStrictTodo(StrictTodoMixin, TestCase):
+    pass
+
+
+
+class AddCleanupMixin(object):
+    def setUp(self):
+        self.log = ['setUp']
+
+    def brokenSetUp(self):
+        self.log = ['setUp']
+        raise RuntimeError("Deliberate failure")
+
+    def skippingSetUp(self):
+        self.log = ['setUp']
+        raise SkipTest("Don't do this")
+
+    def append(self, thing):
+        self.log.append(thing)
+
+    def tearDown(self):
+        self.log.append('tearDown')
+
+    def runTest(self):
+        self.log.append('runTest')
+
+
+
+class SynchronousAddCleanup(AddCleanupMixin, SynchronousTestCase):
+    pass
+
+
+
+class AsynchronousAddCleanup(AddCleanupMixin, TestCase):
+    pass
diff --git a/ThirdParty/Twisted/twisted/trial/test/suppression.py b/ThirdParty/Twisted/twisted/trial/test/suppression.py
new file mode 100644
index 0000000..de9287c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/suppression.py
@@ -0,0 +1,115 @@
+# -*- test-case-name: twisted.trial.test.test_tests -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test cases used to make sure that warning supression works at the module,
+method, and class levels.
+
+See the L{twisted.trial.test.test_tests} module docstring for details about how
+this code is arranged.
+"""
+
+from __future__ import division, absolute_import
+
+import warnings
+
+from twisted.python.compat import _PY3
+from twisted.trial import unittest, util
+
+
+
+METHOD_WARNING_MSG = "method warning message"
+CLASS_WARNING_MSG = "class warning message"
+MODULE_WARNING_MSG = "module warning message"
+
+class MethodWarning(Warning):
+    pass
+
+class ClassWarning(Warning):
+    pass
+
+class ModuleWarning(Warning):
+    pass
+
+class EmitMixin:
+    def _emit(self):
+        warnings.warn(METHOD_WARNING_MSG, MethodWarning)
+        warnings.warn(CLASS_WARNING_MSG, ClassWarning)
+        warnings.warn(MODULE_WARNING_MSG, ModuleWarning)
+
+
+class SuppressionMixin(EmitMixin):
+    suppress = [util.suppress(message=CLASS_WARNING_MSG)]
+
+    def testSuppressMethod(self):
+        self._emit()
+    testSuppressMethod.suppress = [util.suppress(message=METHOD_WARNING_MSG)]
+
+    def testSuppressClass(self):
+        self._emit()
+
+    def testOverrideSuppressClass(self):
+        self._emit()
+    testOverrideSuppressClass.suppress = []
+
+
+
+class SetUpSuppressionMixin(object):
+    def setUp(self):
+        self._emit()
+
+
+
+class TearDownSuppressionMixin(object):
+    def tearDown(self):
+        self._emit()
+
+
+
+class TestSuppression2Mixin(EmitMixin):
+    def testSuppressModule(self):
+        self._emit()
+
+
+
+suppress = [util.suppress(message=MODULE_WARNING_MSG)]
+
+
+class SynchronousTestSuppression(SuppressionMixin, unittest.SynchronousTestCase):
+    pass
+
+
+
+class SynchronousTestSetUpSuppression(SetUpSuppressionMixin, SynchronousTestSuppression):
+    pass
+
+
+
+class SynchronousTestTearDownSuppression(TearDownSuppressionMixin, SynchronousTestSuppression):
+    pass
+
+
+
+class SynchronousTestSuppression2(TestSuppression2Mixin, unittest.SynchronousTestCase):
+    pass
+
+
+
+class AsynchronousTestSuppression(SuppressionMixin, unittest.TestCase):
+    pass
+
+
+
+class AsynchronousTestSetUpSuppression(SetUpSuppressionMixin, AsynchronousTestSuppression):
+    pass
+
+
+
+class AsynchronousTestTearDownSuppression(TearDownSuppressionMixin, AsynchronousTestSuppression):
+    pass
+
+
+
+class AsynchronousTestSuppression2(TestSuppression2Mixin, unittest.TestCase):
+    pass
diff --git a/ThirdParty/Twisted/twisted/trial/test/test_assertions.py b/ThirdParty/Twisted/twisted/trial/test/test_assertions.py
new file mode 100644
index 0000000..b66bab9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/test_assertions.py
@@ -0,0 +1,1076 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for assertions provided by C{SynchronousTestCase} and C{TestCase},
+provided by L{twisted.trial.unittest}.
+
+L{TestFailureTests} demonstrates that L{SynchronousTestCase.fail} works, so that
+is the only method on C{twisted.trial.unittest.SynchronousTestCase} that is
+initially assumed to work.  The test classes are arranged so that the methods
+demonstrated to work earlier in the file are used by those later in the file
+(even though the runner will probably not run the tests in this order).
+"""
+
+from __future__ import division, absolute_import
+
+import warnings
+from pprint import pformat
+import unittest as pyunit
+
+from twisted.python._utilpy3 import FancyEqMixin
+from twisted.python._reflectpy3 import prefixedMethods, accumulateMethods
+from twisted.python.deprecate import deprecated
+from twisted.python.versions import Version, getVersionString
+from twisted.python.failure import Failure
+from twisted.trial import unittest
+from twisted.internet.defer import Deferred, fail, succeed
+
+class MockEquality(FancyEqMixin, object):
+    compareAttributes = ("name",)
+
+    def __init__(self, name):
+        self.name = name
+
+
+    def __repr__(self):
+        return "MockEquality(%s)" % (self.name,)
+
+
+class ComparisonError(object):
+    """
+    An object which raises exceptions from its comparison methods.
+    """
+    def _error(self, other):
+        raise ValueError("Comparison is broken")
+
+    __eq__ = __ne__ = _error
+
+
+
+class TestFailureTests(pyunit.TestCase):
+    """
+    Tests for the most basic functionality of L{SynchronousTestCase}, for
+    failing tests.
+
+    This class contains tests to demonstrate that L{SynchronousTestCase.fail}
+    can be used to fail a test, and that that failure is reflected in the test
+    result object.  This should be sufficient functionality so that further
+    tests can be built on L{SynchronousTestCase} instead of
+    L{unittest.TestCase}.  This depends on L{unittest.TestCase} working.
+    """
+    class FailingTest(unittest.SynchronousTestCase):
+        def test_fails(self):
+            self.fail("This test fails.")
+
+
+    def setUp(self):
+        """
+        Load a suite of one test which can be used to exercise the failure
+        handling behavior.
+        """
+        components = [
+            __name__, self.__class__.__name__, self.FailingTest.__name__]
+        self.loader = pyunit.TestLoader()
+        self.suite = self.loader.loadTestsFromName(".".join(components))
+        self.test = list(self.suite)[0]
+
+
+    def test_fail(self):
+        """
+        L{SynchronousTestCase.fail} raises
+        L{SynchronousTestCase.failureException} with the given argument.
+        """
+        try:
+            self.test.fail("failed")
+        except self.test.failureException as result:
+            self.assertEqual("failed", str(result))
+        else:
+            self.fail(
+                "SynchronousTestCase.fail method did not raise "
+                "SynchronousTestCase.failureException")
+
+
+    def test_failingExceptionFails(self):
+        """
+        When a test method raises L{SynchronousTestCase.failureException}, the test is
+        marked as having failed on the L{TestResult}.
+        """
+        result = pyunit.TestResult()
+        self.suite.run(result)
+        self.failIf(result.wasSuccessful())
+        self.assertEqual(result.errors, [])
+        self.assertEqual(len(result.failures), 1)
+        self.assertEqual(result.failures[0][0], self.test)
+
+
+
+class AssertFalseTests(unittest.SynchronousTestCase):
+    """
+    Tests for L{SynchronousTestCase}'s C{assertFalse} and C{failIf} assertion
+    methods.
+
+    This is pretty paranoid.  Still, a certain paranoia is healthy if you
+    are testing a unit testing framework.
+
+    @note: As of 11.2, C{assertFalse} is preferred over C{failIf}.
+    """
+    def _assertFalseFalse(self, method):
+        """
+        Perform the positive case test for C{failIf} or C{assertFalse}.
+
+        @param method: The test method to test.
+        """
+        for notTrue in [0, 0.0, False, None, (), []]:
+            result = method(notTrue, "failed on %r" % (notTrue,))
+            if result != notTrue:
+                self.fail("Did not return argument %r" % (notTrue,))
+
+
+    def _assertFalseTrue(self, method):
+        """
+        Perform the negative case test for C{failIf} or C{assertFalse}.
+
+        @param method: The test method to test.
+        """
+        for true in [1, True, 'cat', [1,2], (3,4)]:
+            try:
+                method(true, "failed on %r" % (true,))
+            except self.failureException as e:
+                if str(e) != "failed on %r" % (true,):
+                    self.fail("Raised incorrect exception on %r: %r" % (true, e))
+            else:
+                self.fail("Call to failIf(%r) didn't fail" % (true,))
+
+
+    def test_failIfFalse(self):
+        """
+        L{SynchronousTestCase.failIf} returns its argument if its argument is
+        not considered true.
+        """
+        self._assertFalseFalse(self.failIf)
+
+
+    def test_assertFalseFalse(self):
+        """
+        L{SynchronousTestCase.assertFalse} returns its argument if its argument
+        is not considered true.
+        """
+        self._assertFalseFalse(self.assertFalse)
+
+
+    def test_failIfTrue(self):
+        """
+        L{SynchronousTestCase.failIf} raises
+        L{SynchronousTestCase.failureException} if its argument is considered
+        true.
+        """
+        self._assertFalseTrue(self.failIf)
+
+
+    def test_assertFalseTrue(self):
+        """
+        L{SynchronousTestCase.assertFalse} raises
+        L{SynchronousTestCase.failureException} if its argument is considered
+        true.
+        """
+        self._assertFalseTrue(self.assertFalse)
+
+
+
+class AssertTrueTests(unittest.SynchronousTestCase):
+    """
+    Tests for L{SynchronousTestCase}'s C{assertTrue} and C{failUnless} assertion
+    methods.
+
+    This is pretty paranoid.  Still, a certain paranoia is healthy if you
+    are testing a unit testing framework.
+
+    @note: As of 11.2, C{assertTrue} is preferred over C{failUnless}.
+    """
+    def _assertTrueFalse(self, method):
+        """
+        Perform the negative case test for C{assertTrue} and C{failUnless}.
+
+        @param method: The test method to test.
+        """
+        for notTrue in [0, 0.0, False, None, (), []]:
+            try:
+                method(notTrue, "failed on %r" % (notTrue,))
+            except self.failureException as e:
+                if str(e) != "failed on %r" % (notTrue,):
+                    self.fail(
+                        "Raised incorrect exception on %r: %r" % (notTrue, e))
+            else:
+                self.fail(
+                    "Call to %s(%r) didn't fail" % (method.__name__, notTrue,))
+
+
+    def _assertTrueTrue(self, method):
+        """
+        Perform the positive case test for C{assertTrue} and C{failUnless}.
+
+        @param method: The test method to test.
+        """
+        for true in [1, True, 'cat', [1,2], (3,4)]:
+            result = method(true, "failed on %r" % (true,))
+            if result != true:
+                self.fail("Did not return argument %r" % (true,))
+
+
+    def test_assertTrueFalse(self):
+        """
+        L{SynchronousTestCase.assertTrue} raises
+        L{SynchronousTestCase.failureException} if its argument is not
+        considered true.
+        """
+        self._assertTrueFalse(self.assertTrue)
+
+
+    def test_failUnlessFalse(self):
+        """
+        L{SynchronousTestCase.failUnless} raises
+        L{SynchronousTestCase.failureException} if its argument is not
+        considered true.
+        """
+        self._assertTrueFalse(self.failUnless)
+
+
+    def test_assertTrueTrue(self):
+        """
+        L{SynchronousTestCase.assertTrue} returns its argument if its argument
+        is considered true.
+        """
+        self._assertTrueTrue(self.assertTrue)
+
+
+    def test_failUnlessTrue(self):
+        """
+        L{SynchronousTestCase.failUnless} returns its argument if its argument
+        is considered true.
+        """
+        self._assertTrueTrue(self.failUnless)
+
+
+
+class TestSynchronousAssertions(unittest.SynchronousTestCase):
+    """
+    Tests for L{SynchronousTestCase}'s assertion methods.  That is, failUnless*,
+    failIf*, assert* (not covered by other more specific test classes).
+
+    Note: As of 11.2, assertEqual is preferred over the failUnlessEqual(s)
+    variants.  Tests have been modified to reflect this preference.
+
+    This is pretty paranoid.  Still, a certain paranoia is healthy if you are
+    testing a unit testing framework.
+    """
+    def _testEqualPair(self, first, second):
+        x = self.assertEqual(first, second)
+        if x != first:
+            self.fail("assertEqual should return first parameter")
+
+
+    def _testUnequalPair(self, first, second):
+        try:
+            self.assertEqual(first, second)
+        except self.failureException as e:
+            expected = 'not equal:\na = %s\nb = %s\n' % (
+                pformat(first), pformat(second))
+            if str(e) != expected:
+                self.fail("Expected: %r; Got: %s" % (expected, str(e)))
+        else:
+            self.fail("Call to assertEqual(%r, %r) didn't fail"
+                      % (first, second))
+
+
+    def test_assertEqual_basic(self):
+        self._testEqualPair('cat', 'cat')
+        self._testUnequalPair('cat', 'dog')
+        self._testEqualPair([1], [1])
+        self._testUnequalPair([1], 'orange')
+
+
+    def test_assertEqual_custom(self):
+        x = MockEquality('first')
+        y = MockEquality('second')
+        z = MockEquality('first')
+        self._testEqualPair(x, x)
+        self._testEqualPair(x, z)
+        self._testUnequalPair(x, y)
+        self._testUnequalPair(y, z)
+
+
+    def test_assertEqualMessage(self):
+        """
+        When a message is passed to L{assertEqual}, it is included in the
+        error message.
+        """
+        exception = self.assertRaises(
+            self.failureException, self.assertEqual,
+            'foo', 'bar', 'message')
+        self.assertEqual(
+            str(exception),
+            "message\nnot equal:\na = 'foo'\nb = 'bar'\n")
+
+
+    def test_assertEqualNoneMessage(self):
+        """
+        If a message is specified as C{None}, it is not included in the error
+        message of L{assertEqual}.
+        """
+        exception = self.assertRaises(
+            self.failureException, self.assertEqual, 'foo', 'bar', None)
+        self.assertEqual(str(exception), "not equal:\na = 'foo'\nb = 'bar'\n")
+
+
+    def test_assertEqual_incomparable(self):
+        apple = ComparisonError()
+        orange = ["orange"]
+        try:
+            self.assertEqual(apple, orange)
+        except self.failureException:
+            self.fail("Fail raised when ValueError ought to have been raised.")
+        except ValueError:
+            # good. error not swallowed
+            pass
+        else:
+            self.fail("Comparing %r and %r should have raised an exception"
+                      % (apple, orange))
+
+
+    def _raiseError(self, error):
+        raise error
+
+
+    def test_failUnlessRaises_expected(self):
+        x = self.failUnlessRaises(ValueError, self._raiseError, ValueError)
+        self.failUnless(isinstance(x, ValueError),
+                        "Expect failUnlessRaises to return instance of raised "
+                        "exception.")
+
+    def test_failUnlessRaises_unexpected(self):
+        try:
+            self.failUnlessRaises(ValueError, self._raiseError, TypeError)
+        except TypeError:
+            self.fail("failUnlessRaises shouldn't re-raise unexpected "
+                      "exceptions")
+        except self.failureException:
+            # what we expect
+            pass
+        else:
+            self.fail("Expected exception wasn't raised. Should have failed")
+
+
+    def test_failUnlessRaises_noException(self):
+        try:
+            self.failUnlessRaises(ValueError, lambda : None)
+        except self.failureException as e:
+            self.assertEqual(str(e),
+                                 'ValueError not raised (None returned)')
+        else:
+            self.fail("Exception not raised. Should have failed")
+
+
+    def test_failUnlessRaises_failureException(self):
+        x = self.failUnlessRaises(self.failureException, self._raiseError,
+                                  self.failureException)
+        self.failUnless(isinstance(x, self.failureException),
+                        "Expected %r instance to be returned"
+                        % (self.failureException,))
+        try:
+            x = self.failUnlessRaises(self.failureException, self._raiseError,
+                                      ValueError)
+        except self.failureException:
+            # what we expect
+            pass
+        else:
+            self.fail("Should have raised exception")
+
+
+    def test_failIfEqual_basic(self):
+        x, y, z = [1], [2], [1]
+        ret = self.failIfEqual(x, y)
+        self.assertEqual(ret, x,
+                             "failIfEqual should return first parameter")
+        self.failUnlessRaises(self.failureException,
+                              self.failIfEqual, x, x)
+        self.failUnlessRaises(self.failureException,
+                              self.failIfEqual, x, z)
+
+
+    def test_failIfEqual_customEq(self):
+        x = MockEquality('first')
+        y = MockEquality('second')
+        z = MockEquality('fecund')
+        ret = self.failIfEqual(x, y)
+        self.assertEqual(ret, x,
+                             "failIfEqual should return first parameter")
+        self.failUnlessRaises(self.failureException,
+                              self.failIfEqual, x, x)
+        self.failIfEqual(x, z, "__ne__ should make these not equal")
+
+
+    def test_failIfIdenticalPositive(self):
+        """
+        C{failIfIdentical} returns its first argument if its first and second
+        arguments are not the same object.
+        """
+        x = object()
+        y = object()
+        result = self.failIfIdentical(x, y)
+        self.assertEqual(x, result)
+
+
+    def test_failIfIdenticalNegative(self):
+        """
+        C{failIfIdentical} raises C{failureException} if its first and second
+        arguments are the same object.
+        """
+        x = object()
+        self.failUnlessRaises(self.failureException,
+                              self.failIfIdentical, x, x)
+
+
+    def test_failUnlessIdentical(self):
+        x, y, z = [1], [1], [2]
+        ret = self.failUnlessIdentical(x, x)
+        self.assertEqual(ret, x,
+                             'failUnlessIdentical should return first '
+                             'parameter')
+        self.failUnlessRaises(self.failureException,
+                              self.failUnlessIdentical, x, y)
+        self.failUnlessRaises(self.failureException,
+                              self.failUnlessIdentical, x, z)
+
+    def test_failUnlessApproximates(self):
+        x, y, z = 1.0, 1.1, 1.2
+        self.failUnlessApproximates(x, x, 0.2)
+        ret = self.failUnlessApproximates(x, y, 0.2)
+        self.assertEqual(ret, x, "failUnlessApproximates should return "
+                             "first parameter")
+        self.failUnlessRaises(self.failureException,
+                              self.failUnlessApproximates, x, z, 0.1)
+        self.failUnlessRaises(self.failureException,
+                              self.failUnlessApproximates, x, y, 0.1)
+
+
+    def test_failUnlessAlmostEqual(self):
+        precision = 5
+        x = 8.000001
+        y = 8.00001
+        z = 8.000002
+        self.failUnlessAlmostEqual(x, x, precision)
+        ret = self.failUnlessAlmostEqual(x, z, precision)
+        self.assertEqual(ret, x, "failUnlessAlmostEqual should return "
+                             "first parameter (%r, %r)" % (ret, x))
+        self.failUnlessRaises(self.failureException,
+                              self.failUnlessAlmostEqual, x, y, precision)
+
+
+    def test_failIfAlmostEqual(self):
+        precision = 5
+        x = 8.000001
+        y = 8.00001
+        z = 8.000002
+        ret = self.failIfAlmostEqual(x, y, precision)
+        self.assertEqual(ret, x, "failIfAlmostEqual should return "
+                             "first parameter (%r, %r)" % (ret, x))
+        self.failUnlessRaises(self.failureException,
+                              self.failIfAlmostEqual, x, x, precision)
+        self.failUnlessRaises(self.failureException,
+                              self.failIfAlmostEqual, x, z, precision)
+
+
+    def test_failUnlessSubstring(self):
+        x = "cat"
+        y = "the dog sat"
+        z = "the cat sat"
+        self.failUnlessSubstring(x, x)
+        ret = self.failUnlessSubstring(x, z)
+        self.assertEqual(ret, x, 'should return first parameter')
+        self.failUnlessRaises(self.failureException,
+                              self.failUnlessSubstring, x, y)
+        self.failUnlessRaises(self.failureException,
+                              self.failUnlessSubstring, z, x)
+
+
+    def test_failIfSubstring(self):
+        x = "cat"
+        y = "the dog sat"
+        z = "the cat sat"
+        self.failIfSubstring(z, x)
+        ret = self.failIfSubstring(x, y)
+        self.assertEqual(ret, x, 'should return first parameter')
+        self.failUnlessRaises(self.failureException,
+                              self.failIfSubstring, x, x)
+        self.failUnlessRaises(self.failureException,
+                              self.failIfSubstring, x, z)
+
+
+    def test_assertIsInstance(self):
+        """
+        Test a true condition of assertIsInstance.
+        """
+        A = type('A', (object,), {})
+        a = A()
+        self.assertIsInstance(a, A)
+
+
+    def test_assertIsInstanceMultipleClasses(self):
+        """
+        Test a true condition of assertIsInstance with multiple classes.
+        """
+        A = type('A', (object,), {})
+        B = type('B', (object,), {})
+        a = A()
+        self.assertIsInstance(a, (A, B))
+
+
+    def test_assertIsInstanceError(self):
+        """
+        Test an error with assertIsInstance.
+        """
+        A = type('A', (object,), {})
+        B = type('B', (object,), {})
+        a = A()
+        self.assertRaises(self.failureException, self.assertIsInstance, a, B)
+
+
+    def test_assertIsInstanceErrorMultipleClasses(self):
+        """
+        Test an error with assertIsInstance and multiple classes.
+        """
+        A = type('A', (object,), {})
+        B = type('B', (object,), {})
+        C = type('C', (object,), {})
+        a = A()
+        self.assertRaises(self.failureException, self.assertIsInstance, a, (B, C))
+
+
+    def test_assertIsInstanceCustomMessage(self):
+        """
+        If L{TestCase.assertIsInstance} is passed a custom message as its 3rd
+        argument, the message is included in the failure exception raised when
+        the assertion fails.
+        """
+        exc = self.assertRaises(
+            self.failureException,
+            self.assertIsInstance, 3, str, "Silly assertion")
+        self.assertIn("Silly assertion", str(exc))
+
+
+    def test_assertNotIsInstance(self):
+        """
+        Test a true condition of assertNotIsInstance.
+        """
+        A = type('A', (object,), {})
+        B = type('B', (object,), {})
+        a = A()
+        self.assertNotIsInstance(a, B)
+
+
+    def test_assertNotIsInstanceMultipleClasses(self):
+        """
+        Test a true condition of assertNotIsInstance and multiple classes.
+        """
+        A = type('A', (object,), {})
+        B = type('B', (object,), {})
+        C = type('C', (object,), {})
+        a = A()
+        self.assertNotIsInstance(a, (B, C))
+
+
+    def test_assertNotIsInstanceError(self):
+        """
+        Test an error with assertNotIsInstance.
+        """
+        A = type('A', (object,), {})
+        a = A()
+        error = self.assertRaises(self.failureException,
+                                  self.assertNotIsInstance, a, A)
+        self.assertEqual(str(error), "%r is an instance of %s" % (a, A))
+
+
+    def test_assertNotIsInstanceErrorMultipleClasses(self):
+        """
+        Test an error with assertNotIsInstance and multiple classes.
+        """
+        A = type('A', (object,), {})
+        B = type('B', (object,), {})
+        a = A()
+        self.assertRaises(self.failureException, self.assertNotIsInstance, a, (A, B))
+
+
+    def test_assertDictEqual(self):
+        """
+        L{twisted.trial.unittest.TestCase} supports the C{assertDictEqual}
+        method inherited from the standard library in Python 2.7.
+        """
+        self.assertDictEqual({'a': 1}, {'a': 1})
+    if getattr(unittest.SynchronousTestCase, 'assertDictEqual', None) is None:
+        test_assertDictEqual.skip = (
+            "assertDictEqual is not available on this version of Python")
+
+
+
+class WarningAssertionTests(unittest.SynchronousTestCase):
+    def test_assertWarns(self):
+        """
+        Test basic assertWarns report.
+        """
+        def deprecated(a):
+            warnings.warn("Woo deprecated", category=DeprecationWarning)
+            return a
+        r = self.assertWarns(DeprecationWarning, "Woo deprecated", __file__,
+            deprecated, 123)
+        self.assertEqual(r, 123)
+
+
+    def test_assertWarnsRegistryClean(self):
+        """
+        Test that assertWarns cleans the warning registry, so the warning is
+        not swallowed the second time.
+        """
+        def deprecated(a):
+            warnings.warn("Woo deprecated", category=DeprecationWarning)
+            return a
+        r1 = self.assertWarns(DeprecationWarning, "Woo deprecated", __file__,
+            deprecated, 123)
+        self.assertEqual(r1, 123)
+        # The warning should be raised again
+        r2 = self.assertWarns(DeprecationWarning, "Woo deprecated", __file__,
+            deprecated, 321)
+        self.assertEqual(r2, 321)
+
+
+    def test_assertWarnsError(self):
+        """
+        Test assertWarns failure when no warning is generated.
+        """
+        def normal(a):
+            return a
+        self.assertRaises(self.failureException,
+            self.assertWarns, DeprecationWarning, "Woo deprecated", __file__,
+            normal, 123)
+
+
+    def test_assertWarnsWrongCategory(self):
+        """
+        Test assertWarns failure when the category is wrong.
+        """
+        def deprecated(a):
+            warnings.warn("Foo deprecated", category=DeprecationWarning)
+            return a
+        self.assertRaises(self.failureException,
+            self.assertWarns, UserWarning, "Foo deprecated", __file__,
+            deprecated, 123)
+
+
+    def test_assertWarnsWrongMessage(self):
+        """
+        Test assertWarns failure when the message is wrong.
+        """
+        def deprecated(a):
+            warnings.warn("Foo deprecated", category=DeprecationWarning)
+            return a
+        self.assertRaises(self.failureException,
+            self.assertWarns, DeprecationWarning, "Bar deprecated", __file__,
+            deprecated, 123)
+
+
+    def test_assertWarnsWrongFile(self):
+        """
+        If the warning emitted by a function refers to a different file than is
+        passed to C{assertWarns}, C{failureException} is raised.
+        """
+        def deprecated(a):
+            # stacklevel=2 points at the direct caller of the function.  The
+            # way assertRaises is invoked below, the direct caller will be
+            # something somewhere in trial, not something in this file.  In
+            # Python 2.5 and earlier, stacklevel of 0 resulted in a warning
+            # pointing to the warnings module itself.  Starting in Python 2.6,
+            # stacklevel of 0 and 1 both result in a warning pointing to *this*
+            # file, presumably due to the fact that the warn function is
+            # implemented in C and has no convenient Python
+            # filename/linenumber.
+            warnings.warn(
+                "Foo deprecated", category=DeprecationWarning, stacklevel=2)
+        self.assertRaises(
+            self.failureException,
+            # Since the direct caller isn't in this file, try to assert that
+            # the warning *does* point to this file, so that assertWarns raises
+            # an exception.
+            self.assertWarns, DeprecationWarning, "Foo deprecated", __file__,
+            deprecated, 123)
+
+    def test_assertWarnsOnClass(self):
+        """
+        Test assertWarns works when creating a class instance.
+        """
+        class Warn:
+            def __init__(self):
+                warnings.warn("Do not call me", category=RuntimeWarning)
+        r = self.assertWarns(RuntimeWarning, "Do not call me", __file__,
+            Warn)
+        self.assertTrue(isinstance(r, Warn))
+        r = self.assertWarns(RuntimeWarning, "Do not call me", __file__,
+            Warn)
+        self.assertTrue(isinstance(r, Warn))
+
+
+    def test_assertWarnsOnMethod(self):
+        """
+        Test assertWarns works when used on an instance method.
+        """
+        class Warn:
+            def deprecated(self, a):
+                warnings.warn("Bar deprecated", category=DeprecationWarning)
+                return a
+        w = Warn()
+        r = self.assertWarns(DeprecationWarning, "Bar deprecated", __file__,
+            w.deprecated, 321)
+        self.assertEqual(r, 321)
+        r = self.assertWarns(DeprecationWarning, "Bar deprecated", __file__,
+            w.deprecated, 321)
+        self.assertEqual(r, 321)
+
+
+    def test_assertWarnsOnCall(self):
+        """
+        Test assertWarns works on instance with C{__call__} method.
+        """
+        class Warn:
+            def __call__(self, a):
+                warnings.warn("Egg deprecated", category=DeprecationWarning)
+                return a
+        w = Warn()
+        r = self.assertWarns(DeprecationWarning, "Egg deprecated", __file__,
+            w, 321)
+        self.assertEqual(r, 321)
+        r = self.assertWarns(DeprecationWarning, "Egg deprecated", __file__,
+            w, 321)
+        self.assertEqual(r, 321)
+
+
+    def test_assertWarnsFilter(self):
+        """
+        Test assertWarns on a warning filterd by default.
+        """
+        def deprecated(a):
+            warnings.warn("Woo deprecated", category=PendingDeprecationWarning)
+            return a
+        r = self.assertWarns(PendingDeprecationWarning, "Woo deprecated",
+            __file__, deprecated, 123)
+        self.assertEqual(r, 123)
+
+
+    def test_assertWarnsMultipleWarnings(self):
+        """
+        C{assertWarns} does not raise an exception if the function it is passed
+        triggers the same warning more than once.
+        """
+        def deprecated():
+            warnings.warn("Woo deprecated", category=PendingDeprecationWarning)
+        def f():
+            deprecated()
+            deprecated()
+        self.assertWarns(
+            PendingDeprecationWarning, "Woo deprecated", __file__, f)
+
+
+    def test_assertWarnsDifferentWarnings(self):
+        """
+        For now, assertWarns is unable to handle multiple different warnings,
+        so it should raise an exception if it's the case.
+        """
+        def deprecated(a):
+            warnings.warn("Woo deprecated", category=DeprecationWarning)
+            warnings.warn("Another one", category=PendingDeprecationWarning)
+        e = self.assertRaises(self.failureException,
+                self.assertWarns, DeprecationWarning, "Woo deprecated",
+                __file__, deprecated, 123)
+        self.assertEqual(str(e), "Can't handle different warnings")
+
+
+    def test_assertWarnsAfterUnassertedWarning(self):
+        """
+        Warnings emitted before L{TestCase.assertWarns} is called do not get
+        flushed and do not alter the behavior of L{TestCase.assertWarns}.
+        """
+        class TheWarning(Warning):
+            pass
+
+        def f(message):
+            warnings.warn(message, category=TheWarning)
+        f("foo")
+        self.assertWarns(TheWarning, "bar", __file__, f, "bar")
+        [warning] = self.flushWarnings([f])
+        self.assertEqual(warning['message'], "foo")
+
+
+
+class TestResultOfAssertions(unittest.SynchronousTestCase):
+    """
+    Tests for L{SynchronousTestCase.successResultOf},
+    L{SynchronousTestCase.failureResultOf}, and
+    L{SynchronousTestCase.assertNoResult}.
+    """
+    result = object()
+    failure = Failure(Exception("Bad times"))
+
+    def test_withoutSuccessResult(self):
+        """
+        L{SynchronousTestCase.successResultOf} raises
+        L{SynchronousTestCase.failureException} when called with a L{Deferred}
+        with no current result.
+        """
+        self.assertRaises(
+            self.failureException, self.successResultOf, Deferred())
+
+
+    def test_successResultOfWithFailure(self):
+        """
+        L{SynchronousTestCase.successResultOf} raises
+        L{SynchronousTestCase.failureException} when called with a L{Deferred}
+        with a failure result.
+        """
+        self.assertRaises(
+            self.failureException, self.successResultOf, fail(self.failure))
+
+
+    def test_withoutFailureResult(self):
+        """
+        L{SynchronousTestCase.failureResultOf} raises
+        L{SynchronousTestCase.failureException} when called with a L{Deferred}
+        with no current result.
+        """
+        self.assertRaises(
+            self.failureException, self.failureResultOf, Deferred())
+
+
+    def test_failureResultOfWithSuccess(self):
+        """
+        L{SynchronousTestCase.failureResultOf} raises
+        L{SynchronousTestCase.failureException} when called with a L{Deferred}
+        with a success result.
+        """
+        self.assertRaises(
+            self.failureException, self.failureResultOf, succeed(self.result))
+
+
+    def test_withSuccessResult(self):
+        """
+        When passed a L{Deferred} which currently has a result (ie,
+        L{Deferred.addCallback} would cause the added callback to be called
+        before C{addCallback} returns), L{SynchronousTestCase.successResultOf}
+        returns that result.
+        """
+        self.assertIdentical(
+            self.result, self.successResultOf(succeed(self.result)))
+
+
+    def test_withFailureResult(self):
+        """
+        When passed a L{Deferred} which currently has a L{Failure} result (ie,
+        L{Deferred.addErrback} would cause the added errback to be called before
+        C{addErrback} returns), L{SynchronousTestCase.failureResultOf} returns
+        that L{Failure}.
+        """
+        self.assertIdentical(
+            self.failure, self.failureResultOf(fail(self.failure)))
+
+
+    def test_assertNoResultSuccess(self):
+        """
+        When passed a L{Deferred} which currently has a success result (see
+        L{test_withSuccessResult}), L{SynchronousTestCase.assertNoResult} raises
+        L{SynchronousTestCase.failureException}.
+        """
+        self.assertRaises(
+            self.failureException, self.assertNoResult, succeed(self.result))
+
+
+    def test_assertNoResultFailure(self):
+        """
+        When passed a L{Deferred} which currently has a failure result (see
+        L{test_withFailureResult}), L{SynchronousTestCase.assertNoResult} raises
+        L{SynchronousTestCase.failureException}.
+        """
+        self.assertRaises(
+            self.failureException, self.assertNoResult, fail(self.failure))
+
+
+    def test_assertNoResult(self):
+        """
+        When passed a L{Deferred} with no current result,
+        L{SynchronousTestCase.assertNoResult} raises no exception.
+        """
+        self.assertNoResult(Deferred())
+
+
+
+class TestAssertionNames(unittest.SynchronousTestCase):
+    """
+    Tests for consistency of naming within TestCase assertion methods
+    """
+    def _getAsserts(self):
+        dct = {}
+        accumulateMethods(self, dct, 'assert')
+        return [ dct[k] for k in dct if not k.startswith('Not') and k != '_' ]
+
+    def _name(self, x):
+        return x.__name__
+
+
+    def test_failUnlessMatchesAssert(self):
+        """
+        The C{failUnless*} test methods are a subset of the C{assert*} test
+        methods.  This is intended to ensure that methods using the
+        I{failUnless} naming scheme are not added without corresponding methods
+        using the I{assert} naming scheme.  The I{assert} naming scheme is
+        preferred, and new I{assert}-prefixed methods may be added without
+        corresponding I{failUnless}-prefixed methods.
+        """
+        asserts = set(self._getAsserts())
+        failUnlesses = set(prefixedMethods(self, 'failUnless'))
+        self.assertEqual(
+            failUnlesses, asserts.intersection(failUnlesses))
+
+
+    def test_failIf_matches_assertNot(self):
+        asserts = prefixedMethods(unittest.SynchronousTestCase, 'assertNot')
+        failIfs = prefixedMethods(unittest.SynchronousTestCase, 'failIf')
+        self.assertEqual(sorted(asserts, key=self._name),
+                             sorted(failIfs, key=self._name))
+
+    def test_equalSpelling(self):
+        for name, value in vars(self).items():
+            if not callable(value):
+                continue
+            if name.endswith('Equal'):
+                self.failUnless(hasattr(self, name+'s'),
+                                "%s but no %ss" % (name, name))
+                self.assertEqual(value, getattr(self, name+'s'))
+            if name.endswith('Equals'):
+                self.failUnless(hasattr(self, name[:-1]),
+                                "%s but no %s" % (name, name[:-1]))
+                self.assertEqual(value, getattr(self, name[:-1]))
+
+
+class TestCallDeprecated(unittest.SynchronousTestCase):
+    """
+    Test use of the L{SynchronousTestCase.callDeprecated} method with version objects.
+    """
+
+    version = Version('Twisted', 8, 0, 0)
+
+    def test_callDeprecatedSuppressesWarning(self):
+        """
+        callDeprecated calls a deprecated callable, suppressing the
+        deprecation warning.
+        """
+        self.callDeprecated(self.version, oldMethod, 'foo')
+        self.assertEqual(
+            self.flushWarnings(), [], "No warnings should be shown")
+
+
+    def test_callDeprecatedCallsFunction(self):
+        """
+        L{callDeprecated} actually calls the callable passed to it, and
+        forwards the result.
+        """
+        result = self.callDeprecated(self.version, oldMethod, 'foo')
+        self.assertEqual('foo', result)
+
+
+    def test_failsWithoutDeprecation(self):
+        """
+        L{callDeprecated} raises a test failure if the callable is not
+        deprecated.
+        """
+        def notDeprecated():
+            pass
+        exception = self.assertRaises(
+            self.failureException,
+            self.callDeprecated, self.version, notDeprecated)
+        self.assertEqual(
+            "%r is not deprecated." % notDeprecated, str(exception))
+
+
+    def test_failsWithIncorrectDeprecation(self):
+        """
+        callDeprecated raises a test failure if the callable was deprecated
+        at a different version to the one expected.
+        """
+        differentVersion = Version('Foo', 1, 2, 3)
+        exception = self.assertRaises(
+            self.failureException,
+            self.callDeprecated,
+            differentVersion, oldMethod, 'foo')
+        self.assertIn(getVersionString(self.version), str(exception))
+        self.assertIn(getVersionString(differentVersion), str(exception))
+
+
+    def test_nestedDeprecation(self):
+        """
+        L{callDeprecated} ignores all deprecations apart from the first.
+
+        Multiple warnings are generated when a deprecated function calls
+        another deprecated function. The first warning is the one generated by
+        the explicitly called function. That's the warning that we care about.
+        """
+        differentVersion = Version('Foo', 1, 2, 3)
+
+        def nestedDeprecation(*args):
+            return oldMethod(*args)
+        nestedDeprecation = deprecated(differentVersion)(nestedDeprecation)
+
+        self.callDeprecated(differentVersion, nestedDeprecation, 24)
+
+        # The oldMethod deprecation should have been emitted too, not captured
+        # by callDeprecated.  Flush it now to make sure it did happen and to
+        # prevent it from showing up on stdout.
+        warningsShown = self.flushWarnings()
+        self.assertEqual(len(warningsShown), 1)
+
+
+    def test_callDeprecationWithMessage(self):
+        """
+        L{callDeprecated} can take a message argument used to check the warning
+        emitted.
+        """
+        self.callDeprecated((self.version, "newMethod"),
+                            oldMethodReplaced, 1)
+
+
+    def test_callDeprecationWithWrongMessage(self):
+        """
+        If the message passed to L{callDeprecated} doesn't match,
+        L{callDeprecated} raises a test failure.
+        """
+        exception = self.assertRaises(
+            self.failureException,
+            self.callDeprecated,
+            (self.version, "something.wrong"),
+            oldMethodReplaced, 1)
+        self.assertIn(getVersionString(self.version), str(exception))
+        self.assertIn("please use newMethod instead", str(exception))
+
+
+
+
+ at deprecated(TestCallDeprecated.version)
+def oldMethod(x):
+    """
+    Deprecated method for testing.
+    """
+    return x
+
+
+ at deprecated(TestCallDeprecated.version, replacement="newMethod")
+def oldMethodReplaced(x):
+    """
+    Another deprecated method, which has been deprecated in favor of the
+    mythical 'newMethod'.
+    """
+    return 2 * x
diff --git a/ThirdParty/Twisted/twisted/trial/test/test_asyncassertions.py b/ThirdParty/Twisted/twisted/trial/test/test_asyncassertions.py
new file mode 100644
index 0000000..e9af9c7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/test_asyncassertions.py
@@ -0,0 +1,83 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for async assertions provided by C{twisted.trial.unittest.TestCase}.
+"""
+
+from __future__ import division, absolute_import
+
+import unittest as pyunit
+
+from twisted.python import failure
+from twisted.internet import defer
+from twisted.trial import unittest
+
+
+class TestAsynchronousAssertions(unittest.TestCase):
+    """
+    Tests for L{TestCase}'s asynchronous extensions to L{SynchronousTestCase}.
+    That is, assertFailure.
+    """
+    def test_assertFailure(self):
+        d = defer.maybeDeferred(lambda: 1/0)
+        return self.assertFailure(d, ZeroDivisionError)
+
+
+    def test_assertFailure_wrongException(self):
+        d = defer.maybeDeferred(lambda: 1/0)
+        self.assertFailure(d, OverflowError)
+        d.addCallbacks(lambda x: self.fail('Should have failed'),
+                       lambda x: x.trap(self.failureException))
+        return d
+
+
+    def test_assertFailure_noException(self):
+        d = defer.succeed(None)
+        self.assertFailure(d, ZeroDivisionError)
+        d.addCallbacks(lambda x: self.fail('Should have failed'),
+                       lambda x: x.trap(self.failureException))
+        return d
+
+
+    def test_assertFailure_moreInfo(self):
+        """
+        In the case of assertFailure failing, check that we get lots of
+        information about the exception that was raised.
+        """
+        try:
+            1/0
+        except ZeroDivisionError:
+            f = failure.Failure()
+            d = defer.fail(f)
+        d = self.assertFailure(d, RuntimeError)
+        d.addErrback(self._checkInfo, f)
+        return d
+
+
+    def _checkInfo(self, assertionFailure, f):
+        assert assertionFailure.check(self.failureException)
+        output = assertionFailure.getErrorMessage()
+        self.assertIn(f.getErrorMessage(), output)
+        self.assertIn(f.getBriefTraceback(), output)
+
+
+    def test_assertFailure_masked(self):
+        """
+        A single wrong assertFailure should fail the whole test.
+        """
+        class ExampleFailure(Exception):
+            pass
+
+        class TC(unittest.TestCase):
+            failureException = ExampleFailure
+            def test_assertFailure(self):
+                d = defer.maybeDeferred(lambda: 1/0)
+                self.assertFailure(d, OverflowError)
+                self.assertFailure(d, ZeroDivisionError)
+                return d
+
+        test = TC('test_assertFailure')
+        result = pyunit.TestResult()
+        test.run(result)
+        self.assertEqual(1, len(result.failures))
diff --git a/ThirdParty/Twisted/twisted/trial/test/test_deferred.py b/ThirdParty/Twisted/twisted/trial/test/test_deferred.py
new file mode 100644
index 0000000..1e7ce31
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/test_deferred.py
@@ -0,0 +1,236 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for returning Deferreds from a TestCase.
+"""
+
+from __future__ import division, absolute_import
+
+import unittest as pyunit
+
+from twisted.internet import defer
+from twisted.trial import unittest, reporter
+from twisted.trial import util
+from twisted.trial.test import detests
+
+
+class TestSetUp(unittest.TestCase):
+    def _loadSuite(self, klass):
+        loader = pyunit.TestLoader()
+        r = reporter.TestResult()
+        s = loader.loadTestsFromTestCase(klass)
+        return r, s
+
+    def test_success(self):
+        result, suite = self._loadSuite(detests.DeferredSetUpOK)
+        suite(result)
+        self.failUnless(result.wasSuccessful())
+        self.assertEqual(result.testsRun, 1)
+
+    def test_fail(self):
+        self.failIf(detests.DeferredSetUpFail.testCalled)
+        result, suite = self._loadSuite(detests.DeferredSetUpFail)
+        suite(result)
+        self.failIf(result.wasSuccessful())
+        self.assertEqual(result.testsRun, 1)
+        self.assertEqual(len(result.failures), 0)
+        self.assertEqual(len(result.errors), 1)
+        self.failIf(detests.DeferredSetUpFail.testCalled)
+
+    def test_callbackFail(self):
+        self.failIf(detests.DeferredSetUpCallbackFail.testCalled)
+        result, suite = self._loadSuite(detests.DeferredSetUpCallbackFail)
+        suite(result)
+        self.failIf(result.wasSuccessful())
+        self.assertEqual(result.testsRun, 1)
+        self.assertEqual(len(result.failures), 0)
+        self.assertEqual(len(result.errors), 1)
+        self.failIf(detests.DeferredSetUpCallbackFail.testCalled)
+
+    def test_error(self):
+        self.failIf(detests.DeferredSetUpError.testCalled)
+        result, suite = self._loadSuite(detests.DeferredSetUpError)
+        suite(result)
+        self.failIf(result.wasSuccessful())
+        self.assertEqual(result.testsRun, 1)
+        self.assertEqual(len(result.failures), 0)
+        self.assertEqual(len(result.errors), 1)
+        self.failIf(detests.DeferredSetUpError.testCalled)
+
+    def test_skip(self):
+        self.failIf(detests.DeferredSetUpSkip.testCalled)
+        result, suite = self._loadSuite(detests.DeferredSetUpSkip)
+        suite(result)
+        self.failUnless(result.wasSuccessful())
+        self.assertEqual(result.testsRun, 1)
+        self.assertEqual(len(result.failures), 0)
+        self.assertEqual(len(result.errors), 0)
+        self.assertEqual(len(result.skips), 1)
+        self.failIf(detests.DeferredSetUpSkip.testCalled)
+
+
+class TestNeverFire(unittest.TestCase):
+    def setUp(self):
+        self._oldTimeout = util.DEFAULT_TIMEOUT_DURATION
+        util.DEFAULT_TIMEOUT_DURATION = 0.1
+
+    def tearDown(self):
+        util.DEFAULT_TIMEOUT_DURATION = self._oldTimeout
+
+    def _loadSuite(self, klass):
+        loader = pyunit.TestLoader()
+        r = reporter.TestResult()
+        s = loader.loadTestsFromTestCase(klass)
+        return r, s
+
+    def test_setUp(self):
+        self.failIf(detests.DeferredSetUpNeverFire.testCalled)
+        result, suite = self._loadSuite(detests.DeferredSetUpNeverFire)
+        suite(result)
+        self.failIf(result.wasSuccessful())
+        self.assertEqual(result.testsRun, 1)
+        self.assertEqual(len(result.failures), 0)
+        self.assertEqual(len(result.errors), 1)
+        self.failIf(detests.DeferredSetUpNeverFire.testCalled)
+        self.failUnless(result.errors[0][1].check(defer.TimeoutError))
+
+
+class TestTester(unittest.TestCase):
+    def getTest(self, name):
+        raise NotImplementedError("must override me")
+
+    def runTest(self, name):
+        result = reporter.TestResult()
+        self.getTest(name).run(result)
+        return result
+
+
+class TestDeferred(TestTester):
+    def getTest(self, name):
+        return detests.DeferredTests(name)
+
+    def test_pass(self):
+        result = self.runTest('test_pass')
+        self.failUnless(result.wasSuccessful())
+        self.assertEqual(result.testsRun, 1)
+
+    def test_passGenerated(self):
+        result = self.runTest('test_passGenerated')
+        self.failUnless(result.wasSuccessful())
+        self.assertEqual(result.testsRun, 1)
+        self.failUnless(detests.DeferredTests.touched)
+
+    def test_fail(self):
+        result = self.runTest('test_fail')
+        self.failIf(result.wasSuccessful())
+        self.assertEqual(result.testsRun, 1)
+        self.assertEqual(len(result.failures), 1)
+
+    def test_failureInCallback(self):
+        result = self.runTest('test_failureInCallback')
+        self.failIf(result.wasSuccessful())
+        self.assertEqual(result.testsRun, 1)
+        self.assertEqual(len(result.failures), 1)
+
+    def test_errorInCallback(self):
+        result = self.runTest('test_errorInCallback')
+        self.failIf(result.wasSuccessful())
+        self.assertEqual(result.testsRun, 1)
+        self.assertEqual(len(result.errors), 1)
+
+    def test_skip(self):
+        result = self.runTest('test_skip')
+        self.failUnless(result.wasSuccessful())
+        self.assertEqual(result.testsRun, 1)
+        self.assertEqual(len(result.skips), 1)
+        self.failIf(detests.DeferredTests.touched)
+
+    def test_todo(self):
+        result = self.runTest('test_expectedFailure')
+        self.failUnless(result.wasSuccessful())
+        self.assertEqual(result.testsRun, 1)
+        self.assertEqual(len(result.errors), 0)
+        self.assertEqual(len(result.failures), 0)
+        self.assertEqual(len(result.expectedFailures), 1)
+
+    def test_thread(self):
+        result = self.runTest('test_thread')
+        self.assertEqual(result.testsRun, 1)
+        self.failUnless(result.wasSuccessful(), result.errors)
+
+
+
+class TestTimeout(TestTester):
+    def getTest(self, name):
+        return detests.TimeoutTests(name)
+
+    def _wasTimeout(self, error):
+        self.assertEqual(error.check(defer.TimeoutError),
+                             defer.TimeoutError)
+
+    def test_pass(self):
+        result = self.runTest('test_pass')
+        self.failUnless(result.wasSuccessful())
+        self.assertEqual(result.testsRun, 1)
+
+    def test_passDefault(self):
+        result = self.runTest('test_passDefault')
+        self.failUnless(result.wasSuccessful())
+        self.assertEqual(result.testsRun, 1)
+
+    def test_timeout(self):
+        result = self.runTest('test_timeout')
+        self.failIf(result.wasSuccessful())
+        self.assertEqual(result.testsRun, 1)
+        self.assertEqual(len(result.errors), 1)
+        self._wasTimeout(result.errors[0][1])
+
+    def test_timeoutZero(self):
+        result = self.runTest('test_timeoutZero')
+        self.failIf(result.wasSuccessful())
+        self.assertEqual(result.testsRun, 1)
+        self.assertEqual(len(result.errors), 1)
+        self._wasTimeout(result.errors[0][1])
+
+    def test_skip(self):
+        result = self.runTest('test_skip')
+        self.failUnless(result.wasSuccessful())
+        self.assertEqual(result.testsRun, 1)
+        self.assertEqual(len(result.skips), 1)
+
+    def test_todo(self):
+        result = self.runTest('test_expectedFailure')
+        self.failUnless(result.wasSuccessful())
+        self.assertEqual(result.testsRun, 1)
+        self.assertEqual(len(result.expectedFailures), 1)
+        self._wasTimeout(result.expectedFailures[0][1])
+
+    def test_errorPropagation(self):
+        result = self.runTest('test_errorPropagation')
+        self.failIf(result.wasSuccessful())
+        self.assertEqual(result.testsRun, 1)
+        self._wasTimeout(detests.TimeoutTests.timedOut)
+
+    def test_classTimeout(self):
+        loader = pyunit.TestLoader()
+        suite = loader.loadTestsFromTestCase(detests.TestClassTimeoutAttribute)
+        result = reporter.TestResult()
+        suite.run(result)
+        self.assertEqual(len(result.errors), 1)
+        self._wasTimeout(result.errors[0][1])
+
+    def test_callbackReturnsNonCallingDeferred(self):
+        #hacky timeout
+        # raises KeyboardInterrupt because Trial sucks
+        from twisted.internet import reactor
+        call = reactor.callLater(2, reactor.crash)
+        result = self.runTest('test_calledButNeverCallback')
+        if call.active():
+            call.cancel()
+        self.failIf(result.wasSuccessful())
+        self._wasTimeout(result.errors[0][1])
+
+
+# The test loader erroneously attempts to run this:
+del TestTester
diff --git a/ThirdParty/Twisted/twisted/trial/test/test_doctest.py b/ThirdParty/Twisted/twisted/trial/test/test_doctest.py
new file mode 100644
index 0000000..314f84f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/test_doctest.py
@@ -0,0 +1,64 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test Twisted's doctest support.
+"""
+
+from twisted.trial import itrial, runner, unittest, reporter
+from twisted.trial.test import mockdoctest
+
+
+class TestRunners(unittest.SynchronousTestCase):
+    """
+    Tests for Twisted's doctest support.
+    """
+
+    def test_id(self):
+        """
+        Check that the id() of the doctests' case object contains the FQPN of
+        the actual tests. We need this because id() has weird behaviour w/
+        doctest in Python 2.3.
+        """
+        loader = runner.TestLoader()
+        suite = loader.loadDoctests(mockdoctest)
+        idPrefix = 'twisted.trial.test.mockdoctest.Counter'
+        for test in suite._tests:
+            self.assertIn(idPrefix, itrial.ITestCase(test).id())
+
+
+    def test_basicTrialIntegration(self):
+        """
+        L{loadDoctests} loads all of the doctests in the given module.
+        """
+        loader = runner.TestLoader()
+        suite = loader.loadDoctests(mockdoctest)
+        self.assertEqual(7, suite.countTestCases())
+
+
+    def _testRun(self, suite):
+        """
+        Run C{suite} and check the result.
+        """
+        result = reporter.TestResult()
+        suite.run(result)
+        self.assertEqual(5, result.successes)
+        # doctest reports failures as errors in 2.3
+        self.assertEqual(2, len(result.errors) + len(result.failures))
+
+
+    def test_expectedResults(self, count=1):
+        """
+        Trial can correctly run doctests with its xUnit test APIs.
+        """
+        suite = runner.TestLoader().loadDoctests(mockdoctest)
+        self._testRun(suite)
+
+
+    def test_repeatable(self):
+        """
+        Doctests should be runnable repeatably.
+        """
+        suite = runner.TestLoader().loadDoctests(mockdoctest)
+        self._testRun(suite)
+        self._testRun(suite)
diff --git a/ThirdParty/Twisted/twisted/trial/test/test_keyboard.py b/ThirdParty/Twisted/twisted/trial/test/test_keyboard.py
new file mode 100644
index 0000000..ac428d7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/test_keyboard.py
@@ -0,0 +1,119 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for interrupting tests with Control-C.
+"""
+
+import StringIO
+
+from twisted.trial import unittest
+from twisted.trial import reporter, runner
+
+
+class TrialTest(unittest.SynchronousTestCase):
+    def setUp(self):
+        self.output = StringIO.StringIO()
+        self.reporter = reporter.TestResult()
+        self.loader = runner.TestLoader()
+
+
+class TestInterruptInTest(TrialTest):
+    class InterruptedTest(unittest.TestCase):
+        def test_02_raiseInterrupt(self):
+            raise KeyboardInterrupt
+
+        def test_01_doNothing(self):
+            pass
+
+        def test_03_doNothing(self):
+            TestInterruptInTest.test_03_doNothing_run = True
+
+    def setUp(self):
+        super(TestInterruptInTest, self).setUp()
+        self.suite = self.loader.loadClass(TestInterruptInTest.InterruptedTest)
+        TestInterruptInTest.test_03_doNothing_run = None
+
+    def test_setUpOK(self):
+        self.assertEqual(3, self.suite.countTestCases())
+        self.assertEqual(0, self.reporter.testsRun)
+        self.failIf(self.reporter.shouldStop)
+
+    def test_interruptInTest(self):
+        runner.TrialSuite([self.suite]).run(self.reporter)
+        self.failUnless(self.reporter.shouldStop)
+        self.assertEqual(2, self.reporter.testsRun)
+        self.failIf(TestInterruptInTest.test_03_doNothing_run,
+                    "test_03_doNothing ran.")
+
+
+class TestInterruptInSetUp(TrialTest):
+    testsRun = 0
+
+    class InterruptedTest(unittest.TestCase):
+        def setUp(self):
+            if TestInterruptInSetUp.testsRun > 0:
+                raise KeyboardInterrupt
+
+        def test_01(self):
+            TestInterruptInSetUp.testsRun += 1
+
+        def test_02(self):
+            TestInterruptInSetUp.testsRun += 1
+            TestInterruptInSetUp.test_02_run = True
+
+    def setUp(self):
+        super(TestInterruptInSetUp, self).setUp()
+        self.suite = self.loader.loadClass(
+            TestInterruptInSetUp.InterruptedTest)
+        TestInterruptInSetUp.test_02_run = False
+        TestInterruptInSetUp.testsRun = 0
+
+    def test_setUpOK(self):
+        self.assertEqual(0, TestInterruptInSetUp.testsRun)
+        self.assertEqual(2, self.suite.countTestCases())
+        self.assertEqual(0, self.reporter.testsRun)
+        self.failIf(self.reporter.shouldStop)
+
+    def test_interruptInSetUp(self):
+        runner.TrialSuite([self.suite]).run(self.reporter)
+        self.failUnless(self.reporter.shouldStop)
+        self.assertEqual(2, self.reporter.testsRun)
+        self.failIf(TestInterruptInSetUp.test_02_run,
+                    "test_02 ran")
+
+
+class TestInterruptInTearDown(TrialTest):
+    testsRun = 0
+
+    class InterruptedTest(unittest.TestCase):
+        def tearDown(self):
+            if TestInterruptInTearDown.testsRun > 0:
+                raise KeyboardInterrupt
+
+        def test_01(self):
+            TestInterruptInTearDown.testsRun += 1
+
+        def test_02(self):
+            TestInterruptInTearDown.testsRun += 1
+            TestInterruptInTearDown.test_02_run = True
+
+    def setUp(self):
+        super(TestInterruptInTearDown, self).setUp()
+        self.suite = self.loader.loadClass(
+            TestInterruptInTearDown.InterruptedTest)
+        TestInterruptInTearDown.testsRun = 0
+        TestInterruptInTearDown.test_02_run = False
+
+    def test_setUpOK(self):
+        self.assertEqual(0, TestInterruptInTearDown.testsRun)
+        self.assertEqual(2, self.suite.countTestCases())
+        self.assertEqual(0, self.reporter.testsRun)
+        self.failIf(self.reporter.shouldStop)
+
+    def test_interruptInTearDown(self):
+        runner.TrialSuite([self.suite]).run(self.reporter)
+        self.assertEqual(1, self.reporter.testsRun)
+        self.failUnless(self.reporter.shouldStop)
+        self.failIf(TestInterruptInTearDown.test_02_run,
+                    "test_02 ran")
diff --git a/ThirdParty/Twisted/twisted/trial/test/test_loader.py b/ThirdParty/Twisted/twisted/trial/test/test_loader.py
new file mode 100644
index 0000000..727c511
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/test_loader.py
@@ -0,0 +1,613 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for loading tests by name.
+"""
+
+import os
+import shutil
+import sys
+
+import unittest as pyunit
+
+from twisted.python import util
+from twisted.python.hashlib import md5
+from twisted.trial.test import packages
+from twisted.trial import runner, reporter, unittest
+from twisted.trial.itrial import ITestCase
+
+from twisted.python.modules import getModule
+
+
+
+def testNames(tests):
+    """
+    Return the id of each test within the given test suite or case.
+    """
+    names = []
+    for test in unittest._iterateTests(tests):
+        names.append(test.id())
+    return names
+
+
+
+class FinderTest(packages.PackageTest):
+    def setUp(self):
+        packages.PackageTest.setUp(self)
+        self.loader = runner.TestLoader()
+
+    def tearDown(self):
+        packages.PackageTest.tearDown(self)
+
+    def test_findPackage(self):
+        sample1 = self.loader.findByName('twisted')
+        import twisted as sample2
+        self.assertEqual(sample1, sample2)
+
+    def test_findModule(self):
+        sample1 = self.loader.findByName('twisted.trial.test.sample')
+        import sample as sample2
+        self.assertEqual(sample1, sample2)
+
+    def test_findFile(self):
+        path = util.sibpath(__file__, 'sample.py')
+        sample1 = self.loader.findByName(path)
+        import sample as sample2
+        self.assertEqual(sample1, sample2)
+
+    def test_findObject(self):
+        sample1 = self.loader.findByName('twisted.trial.test.sample.FooTest')
+        import sample
+        self.assertEqual(sample.FooTest, sample1)
+
+    def test_findNonModule(self):
+        self.failUnlessRaises(AttributeError,
+                              self.loader.findByName,
+                              'twisted.trial.test.nonexistent')
+
+    def test_findNonPackage(self):
+        self.failUnlessRaises(ValueError,
+                              self.loader.findByName,
+                              'nonextant')
+
+    def test_findNonFile(self):
+        path = util.sibpath(__file__, 'nonexistent.py')
+        self.failUnlessRaises(ValueError, self.loader.findByName, path)
+
+
+
+class FileTest(packages.SysPathManglingTest):
+    """
+    Tests for L{runner.filenameToModule}.
+    """
+    def test_notFile(self):
+        self.failUnlessRaises(ValueError,
+                              runner.filenameToModule, 'doesntexist')
+
+    def test_moduleInPath(self):
+        sample1 = runner.filenameToModule(util.sibpath(__file__, 'sample.py'))
+        import sample as sample2
+        self.assertEqual(sample2, sample1)
+
+
+    def test_moduleNotInPath(self):
+        """
+        If passed the path to a file containing the implementation of a
+        module within a package which is not on the import path,
+        L{runner.filenameToModule} returns a module object loosely
+        resembling the module defined by that file anyway.
+        """
+        # "test_sample" isn't actually the name of this module.  However,
+        # filenameToModule can't seem to figure that out.  So clean up this
+        # mis-named module.  It would be better if this weren't necessary
+        # and filenameToModule either didn't exist or added a correctly
+        # named module to sys.modules.
+        self.addCleanup(sys.modules.pop, 'test_sample', None)
+
+        self.mangleSysPath(self.oldPath)
+        sample1 = runner.filenameToModule(
+            os.path.join(self.parent, 'goodpackage', 'test_sample.py'))
+        self.mangleSysPath(self.newPath)
+        from goodpackage import test_sample as sample2
+        self.assertEqual(os.path.splitext(sample2.__file__)[0],
+                             os.path.splitext(sample1.__file__)[0])
+
+
+    def test_packageInPath(self):
+        package1 = runner.filenameToModule(os.path.join(self.parent,
+                                                        'goodpackage'))
+        import goodpackage
+        self.assertEqual(goodpackage, package1)
+
+
+    def test_packageNotInPath(self):
+        """
+        If passed the path to a directory which represents a package which
+        is not on the import path, L{runner.filenameToModule} returns a
+        module object loosely resembling the package defined by that
+        directory anyway.
+        """
+        # "__init__" isn't actually the name of the package!  However,
+        # filenameToModule is pretty stupid and decides that is its name
+        # after all.  Make sure it gets cleaned up.  See the comment in
+        # test_moduleNotInPath for possible courses of action related to
+        # this.
+        self.addCleanup(sys.modules.pop, "__init__")
+
+        self.mangleSysPath(self.oldPath)
+        package1 = runner.filenameToModule(
+            os.path.join(self.parent, 'goodpackage'))
+        self.mangleSysPath(self.newPath)
+        import goodpackage
+        self.assertEqual(os.path.splitext(goodpackage.__file__)[0],
+                             os.path.splitext(package1.__file__)[0])
+
+
+    def test_directoryNotPackage(self):
+        self.failUnlessRaises(ValueError, runner.filenameToModule,
+                              util.sibpath(__file__, 'directory'))
+
+    def test_filenameNotPython(self):
+        self.failUnlessRaises(ValueError, runner.filenameToModule,
+                              util.sibpath(__file__, 'notpython.py'))
+
+    def test_filenameMatchesPackage(self):
+        filename = os.path.join(self.parent, 'goodpackage.py')
+        fd = open(filename, 'w')
+        fd.write(packages.testModule)
+        fd.close()
+        try:
+            module = runner.filenameToModule(filename)
+            self.assertEqual(filename, module.__file__)
+        finally:
+            os.remove(filename)
+
+    def test_directory(self):
+        """
+        Test loader against a filesystem directory. It should handle
+        'path' and 'path/' the same way.
+        """
+        path  = util.sibpath(__file__, 'goodDirectory')
+        os.mkdir(path)
+        f = file(os.path.join(path, '__init__.py'), "w")
+        f.close()
+        try:
+            module = runner.filenameToModule(path)
+            self.assert_(module.__name__.endswith('goodDirectory'))
+            module = runner.filenameToModule(path + os.path.sep)
+            self.assert_(module.__name__.endswith('goodDirectory'))
+        finally:
+            shutil.rmtree(path)
+
+
+
+class LoaderTest(packages.SysPathManglingTest):
+    """
+    Tests for L{trial.TestLoader}.
+    """
+
+    def setUp(self):
+        self.loader = runner.TestLoader()
+        packages.SysPathManglingTest.setUp(self)
+
+
+    def test_sortCases(self):
+        import sample
+        suite = self.loader.loadClass(sample.AlphabetTest)
+        self.assertEqual(['test_a', 'test_b', 'test_c'],
+                             [test._testMethodName for test in suite._tests])
+        newOrder = ['test_b', 'test_c', 'test_a']
+        sortDict = dict(zip(newOrder, range(3)))
+        self.loader.sorter = lambda x : sortDict.get(x.shortDescription(), -1)
+        suite = self.loader.loadClass(sample.AlphabetTest)
+        self.assertEqual(newOrder,
+                             [test._testMethodName for test in suite._tests])
+
+
+    def test_loadMethod(self):
+        import sample
+        suite = self.loader.loadMethod(sample.FooTest.test_foo)
+        self.assertEqual(1, suite.countTestCases())
+        self.assertEqual('test_foo', suite._testMethodName)
+
+
+    def test_loadFailingMethod(self):
+        # test added for issue1353
+        import erroneous
+        suite = self.loader.loadMethod(erroneous.TestRegularFail.test_fail)
+        result = reporter.TestResult()
+        suite.run(result)
+        self.assertEqual(result.testsRun, 1)
+        self.assertEqual(len(result.failures), 1)
+
+
+    def test_loadNonMethod(self):
+        import sample
+        self.failUnlessRaises(TypeError, self.loader.loadMethod, sample)
+        self.failUnlessRaises(TypeError,
+                              self.loader.loadMethod, sample.FooTest)
+        self.failUnlessRaises(TypeError, self.loader.loadMethod, "string")
+        self.failUnlessRaises(TypeError,
+                              self.loader.loadMethod, ('foo', 'bar'))
+
+
+    def test_loadBadDecorator(self):
+        """
+        A decorated test method for which the decorator has failed to set the
+        method's __name__ correctly is loaded and its name in the class scope
+        discovered.
+        """
+        import sample
+        suite = self.loader.loadMethod(sample.DecorationTest.test_badDecorator)
+        self.assertEqual(1, suite.countTestCases())
+        self.assertEqual('test_badDecorator', suite._testMethodName)
+
+
+    def test_loadGoodDecorator(self):
+        """
+        A decorated test method for which the decorator has set the method's
+        __name__ correctly is loaded and the only name by which it goes is used.
+        """
+        import sample
+        suite = self.loader.loadMethod(
+            sample.DecorationTest.test_goodDecorator)
+        self.assertEqual(1, suite.countTestCases())
+        self.assertEqual('test_goodDecorator', suite._testMethodName)
+
+
+    def test_loadRenamedDecorator(self):
+        """
+        Load a decorated method which has been copied to a new name inside the
+        class.  Thus its __name__ and its key in the class's __dict__ no
+        longer match.
+        """
+        import sample
+        suite = self.loader.loadMethod(
+            sample.DecorationTest.test_renamedDecorator)
+        self.assertEqual(1, suite.countTestCases())
+        self.assertEqual('test_renamedDecorator', suite._testMethodName)
+
+
+    def test_loadClass(self):
+        import sample
+        suite = self.loader.loadClass(sample.FooTest)
+        self.assertEqual(2, suite.countTestCases())
+        self.assertEqual(['test_bar', 'test_foo'],
+                             [test._testMethodName for test in suite._tests])
+
+
+    def test_loadNonClass(self):
+        import sample
+        self.failUnlessRaises(TypeError, self.loader.loadClass, sample)
+        self.failUnlessRaises(TypeError,
+                              self.loader.loadClass, sample.FooTest.test_foo)
+        self.failUnlessRaises(TypeError, self.loader.loadClass, "string")
+        self.failUnlessRaises(TypeError,
+                              self.loader.loadClass, ('foo', 'bar'))
+
+
+    def test_loadNonTestCase(self):
+        import sample
+        self.failUnlessRaises(ValueError, self.loader.loadClass,
+                              sample.NotATest)
+
+
+    def test_loadModule(self):
+        import sample
+        suite = self.loader.loadModule(sample)
+        self.assertEqual(10, suite.countTestCases())
+
+
+    def test_loadNonModule(self):
+        import sample
+        self.failUnlessRaises(TypeError,
+                              self.loader.loadModule, sample.FooTest)
+        self.failUnlessRaises(TypeError,
+                              self.loader.loadModule, sample.FooTest.test_foo)
+        self.failUnlessRaises(TypeError, self.loader.loadModule, "string")
+        self.failUnlessRaises(TypeError,
+                              self.loader.loadModule, ('foo', 'bar'))
+
+
+    def test_loadPackage(self):
+        import goodpackage
+        suite = self.loader.loadPackage(goodpackage)
+        self.assertEqual(7, suite.countTestCases())
+
+
+    def test_loadNonPackage(self):
+        import sample
+        self.failUnlessRaises(TypeError,
+                              self.loader.loadPackage, sample.FooTest)
+        self.failUnlessRaises(TypeError,
+                              self.loader.loadPackage, sample.FooTest.test_foo)
+        self.failUnlessRaises(TypeError, self.loader.loadPackage, "string")
+        self.failUnlessRaises(TypeError,
+                              self.loader.loadPackage, ('foo', 'bar'))
+
+
+    def test_loadModuleAsPackage(self):
+        import sample
+        ## XXX -- should this instead raise a ValueError? -- jml
+        self.failUnlessRaises(TypeError, self.loader.loadPackage, sample)
+
+
+    def test_loadPackageRecursive(self):
+        import goodpackage
+        suite = self.loader.loadPackage(goodpackage, recurse=True)
+        self.assertEqual(14, suite.countTestCases())
+
+
+    def test_loadAnythingOnModule(self):
+        import sample
+        suite = self.loader.loadAnything(sample)
+        self.assertEqual(sample.__name__,
+                             suite._tests[0]._tests[0].__class__.__module__)
+
+
+    def test_loadAnythingOnClass(self):
+        import sample
+        suite = self.loader.loadAnything(sample.FooTest)
+        self.assertEqual(2, suite.countTestCases())
+
+
+    def test_loadAnythingOnMethod(self):
+        import sample
+        suite = self.loader.loadAnything(sample.FooTest.test_foo)
+        self.assertEqual(1, suite.countTestCases())
+
+
+    def test_loadAnythingOnPackage(self):
+        import goodpackage
+        suite = self.loader.loadAnything(goodpackage)
+        self.failUnless(isinstance(suite, self.loader.suiteFactory))
+        self.assertEqual(7, suite.countTestCases())
+
+
+    def test_loadAnythingOnPackageRecursive(self):
+        import goodpackage
+        suite = self.loader.loadAnything(goodpackage, recurse=True)
+        self.failUnless(isinstance(suite, self.loader.suiteFactory))
+        self.assertEqual(14, suite.countTestCases())
+
+
+    def test_loadAnythingOnString(self):
+        # the important thing about this test is not the string-iness
+        # but the non-handledness.
+        self.failUnlessRaises(TypeError,
+                              self.loader.loadAnything, "goodpackage")
+
+
+    def test_importErrors(self):
+        import package
+        suite = self.loader.loadPackage(package, recurse=True)
+        result = reporter.Reporter()
+        suite.run(result)
+        self.assertEqual(False, result.wasSuccessful())
+        self.assertEqual(2, len(result.errors))
+        errors = [test.id() for test, error in result.errors]
+        errors.sort()
+        self.assertEqual(errors, ['package.test_bad_module',
+                                      'package.test_import_module'])
+
+
+    def test_differentInstances(self):
+        """
+        L{TestLoader.loadClass} returns a suite with each test method
+        represented by a different instances of the L{TestCase} they are
+        defined on.
+        """
+        class DistinctInstances(pyunit.TestCase):
+            def test_1(self):
+                self.first = 'test1Run'
+
+            def test_2(self):
+                self.assertFalse(hasattr(self, 'first'))
+
+        suite = self.loader.loadClass(DistinctInstances)
+        result = reporter.Reporter()
+        suite.run(result)
+        self.assertTrue(result.wasSuccessful())
+
+
+    def test_loadModuleWith_test_suite(self):
+        """
+        Check that C{test_suite} is used when present and other L{TestCase}s are
+        not included.
+        """
+        from twisted.trial.test import mockcustomsuite
+        suite = self.loader.loadModule(mockcustomsuite)
+        self.assertEqual(0, suite.countTestCases())
+        self.assertEqual("MyCustomSuite", getattr(suite, 'name', None))
+
+
+    def test_loadModuleWith_testSuite(self):
+        """
+        Check that C{testSuite} is used when present and other L{TestCase}s are
+        not included.
+        """
+        from twisted.trial.test import mockcustomsuite2
+        suite = self.loader.loadModule(mockcustomsuite2)
+        self.assertEqual(0, suite.countTestCases())
+        self.assertEqual("MyCustomSuite", getattr(suite, 'name', None))
+
+
+    def test_loadModuleWithBothCustom(self):
+        """
+        Check that if C{testSuite} and C{test_suite} are both present in a
+        module then C{testSuite} gets priority.
+        """
+        from twisted.trial.test import mockcustomsuite3
+        suite = self.loader.loadModule(mockcustomsuite3)
+        self.assertEqual('testSuite', getattr(suite, 'name', None))
+
+
+    def test_customLoadRaisesAttributeError(self):
+        """
+        Make sure that any C{AttributeError}s raised by C{testSuite} are not
+        swallowed by L{TestLoader}.
+        """
+        def testSuite():
+            raise AttributeError('should be reraised')
+        from twisted.trial.test import mockcustomsuite2
+        mockcustomsuite2.testSuite, original = (testSuite,
+                                                mockcustomsuite2.testSuite)
+        try:
+            self.assertRaises(AttributeError, self.loader.loadModule,
+                              mockcustomsuite2)
+        finally:
+            mockcustomsuite2.testSuite = original
+
+
+    # XXX - duplicated and modified from test_script
+    def assertSuitesEqual(self, test1, test2):
+        names1 = testNames(test1)
+        names2 = testNames(test2)
+        names1.sort()
+        names2.sort()
+        self.assertEqual(names1, names2)
+
+
+    def test_loadByNamesDuplicate(self):
+        """
+        Check that loadByNames ignores duplicate names
+        """
+        module = 'twisted.trial.test.test_test_visitor'
+        suite1 = self.loader.loadByNames([module, module], True)
+        suite2 = self.loader.loadByName(module, True)
+        self.assertSuitesEqual(suite1, suite2)
+
+
+    def test_loadDifferentNames(self):
+        """
+        Check that loadByNames loads all the names that it is given
+        """
+        modules = ['goodpackage', 'package.test_module']
+        suite1 = self.loader.loadByNames(modules)
+        suite2 = runner.TestSuite(map(self.loader.loadByName, modules))
+        self.assertSuitesEqual(suite1, suite2)
+
+    def test_loadInheritedMethods(self):
+        """
+        Check that test methods names which are inherited from are all
+        loaded rather than just one.
+        """
+        methods = ['inheritancepackage.test_x.A.test_foo',
+                   'inheritancepackage.test_x.B.test_foo']
+        suite1 = self.loader.loadByNames(methods)
+        suite2 = runner.TestSuite(map(self.loader.loadByName, methods))
+        self.assertSuitesEqual(suite1, suite2)
+
+
+
+class ZipLoadingTest(LoaderTest):
+    def setUp(self):
+        from twisted.python.test.test_zippath import zipit
+        LoaderTest.setUp(self)
+        zipit(self.parent, self.parent+'.zip')
+        self.parent += '.zip'
+        self.mangleSysPath(self.oldPath+[self.parent])
+
+
+
+class PackageOrderingTest(packages.SysPathManglingTest):
+    if sys.version_info < (2, 4):
+        skip = (
+            "Python 2.3 import semantics make this behavior incorrect on that "
+            "version of Python as well as difficult to test.  The second "
+            "import of a package which raised an exception the first time it "
+            "was imported will succeed on Python 2.3, whereas it will fail on "
+            "later versions of Python.  Trial does not account for this, so "
+            "this test fails with inconsistencies between the expected and "
+            "the received loader errors.")
+
+    def setUp(self):
+        self.loader = runner.TestLoader()
+        self.topDir = self.mktemp()
+        parent = os.path.join(self.topDir, "uberpackage")
+        os.makedirs(parent)
+        file(os.path.join(parent, "__init__.py"), "wb").close()
+        packages.SysPathManglingTest.setUp(self, parent)
+        self.mangleSysPath(self.oldPath + [self.topDir])
+
+    def _trialSortAlgorithm(self, sorter):
+        """
+        Right now, halfway by accident, trial sorts like this:
+
+            1. all modules are grouped together in one list and sorted.
+
+            2. within each module, the classes are grouped together in one list
+               and sorted.
+
+            3. finally within each class, each test method is grouped together
+               in a list and sorted.
+
+        This attempts to return a sorted list of testable thingies following
+        those rules, so that we can compare the behavior of loadPackage.
+
+        The things that show as 'cases' are errors from modules which failed to
+        import, and test methods.  Let's gather all those together.
+        """
+        pkg = getModule('uberpackage')
+        testModules = []
+        for testModule in pkg.walkModules():
+            if testModule.name.split(".")[-1].startswith("test_"):
+                testModules.append(testModule)
+        sortedModules = sorted(testModules, key=sorter) # ONE
+        for modinfo in sortedModules:
+            # Now let's find all the classes.
+            module = modinfo.load(None)
+            if module is None:
+                yield modinfo
+            else:
+                testClasses = []
+                for attrib in modinfo.iterAttributes():
+                    if runner.isTestCase(attrib.load()):
+                        testClasses.append(attrib)
+                sortedClasses = sorted(testClasses, key=sorter) # TWO
+                for clsinfo in sortedClasses:
+                    testMethods = []
+                    for attr in clsinfo.iterAttributes():
+                        if attr.name.split(".")[-1].startswith('test'):
+                            testMethods.append(attr)
+                    sortedMethods = sorted(testMethods, key=sorter) # THREE
+                    for methinfo in sortedMethods:
+                        yield methinfo
+
+
+    def loadSortedPackages(self, sorter=runner.name):
+        """
+        Verify that packages are loaded in the correct order.
+        """
+        import uberpackage
+        self.loader.sorter = sorter
+        suite = self.loader.loadPackage(uberpackage, recurse=True)
+        # XXX: Work around strange, unexplained Zope crap.
+        # jml, 2007-11-15.
+        suite = unittest.decorate(suite, ITestCase)
+        resultingTests = list(unittest._iterateTests(suite))
+        manifest = list(self._trialSortAlgorithm(sorter))
+        for number, (manifestTest, actualTest) in enumerate(
+            zip(manifest, resultingTests)):
+            self.assertEqual(
+                 manifestTest.name, actualTest.id(),
+                 "#%d: %s != %s" %
+                 (number, manifestTest.name, actualTest.id()))
+        self.assertEqual(len(manifest), len(resultingTests))
+
+
+    def test_sortPackagesDefaultOrder(self):
+        self.loadSortedPackages()
+
+
+    def test_sortPackagesSillyOrder(self):
+        def sillySorter(s):
+            # This has to work on fully-qualified class names and class
+            # objects, which is silly, but it's the "spec", such as it is.
+#             if isinstance(s, type) or isinstance(s, types.ClassType):
+#                 return s.__module__+'.'+s.__name__
+            n = runner.name(s)
+            d = md5(n).hexdigest()
+            return d
+        self.loadSortedPackages(sillySorter)
diff --git a/ThirdParty/Twisted/twisted/trial/test/test_log.py b/ThirdParty/Twisted/twisted/trial/test/test_log.py
new file mode 100644
index 0000000..a286550
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/test_log.py
@@ -0,0 +1,235 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test the interaction between trial and errors logged during test run.
+"""
+from __future__ import division
+
+import time
+
+from twisted.internet import reactor, task
+from twisted.python import failure, log
+from twisted.trial import unittest, reporter
+
+
+def makeFailure():
+    """
+    Return a new, realistic failure.
+    """
+    try:
+        1/0
+    except ZeroDivisionError:
+        f = failure.Failure()
+    return f
+
+
+
+class Mask(object):
+    """
+    Hide C{MockTest}s from Trial's automatic test finder.
+    """
+    class FailureLoggingMixin(object):
+        def test_silent(self):
+            """
+            Don't log any errors.
+            """
+
+        def test_single(self):
+            """
+            Log a single error.
+            """
+            log.err(makeFailure())
+
+        def test_double(self):
+            """
+            Log two errors.
+            """
+            log.err(makeFailure())
+            log.err(makeFailure())
+
+
+    class SynchronousFailureLogging(FailureLoggingMixin, unittest.SynchronousTestCase):
+        pass
+
+
+    class AsynchronousFailureLogging(FailureLoggingMixin, unittest.TestCase):
+        def test_inCallback(self):
+            """
+            Log an error in an asynchronous callback.
+            """
+            return task.deferLater(reactor, 0, lambda: log.err(makeFailure()))
+
+
+
+class TestObserver(unittest.SynchronousTestCase):
+    """
+    Tests for L{unittest._LogObserver}, a helper for the implementation of
+    L{SynchronousTestCase.flushLoggedErrors}.
+    """
+    def setUp(self):
+        self.result = reporter.TestResult()
+        self.observer = unittest._LogObserver()
+
+
+    def test_msg(self):
+        """
+        Test that a standard log message doesn't go anywhere near the result.
+        """
+        self.observer.gotEvent({'message': ('some message',),
+                                'time': time.time(), 'isError': 0,
+                                'system': '-'})
+        self.assertEqual(self.observer.getErrors(), [])
+
+
+    def test_error(self):
+        """
+        Test that an observed error gets added to the result
+        """
+        f = makeFailure()
+        self.observer.gotEvent({'message': (),
+                                'time': time.time(), 'isError': 1,
+                                'system': '-', 'failure': f,
+                                'why': None})
+        self.assertEqual(self.observer.getErrors(), [f])
+
+
+    def test_flush(self):
+        """
+        Check that flushing the observer with no args removes all errors.
+        """
+        self.test_error()
+        flushed = self.observer.flushErrors()
+        self.assertEqual(self.observer.getErrors(), [])
+        self.assertEqual(len(flushed), 1)
+        self.assertTrue(flushed[0].check(ZeroDivisionError))
+
+
+    def _makeRuntimeFailure(self):
+        return failure.Failure(RuntimeError('test error'))
+
+
+    def test_flushByType(self):
+        """
+        Check that flushing the observer remove all failures of the given type.
+        """
+        self.test_error() # log a ZeroDivisionError to the observer
+        f = self._makeRuntimeFailure()
+        self.observer.gotEvent(dict(message=(), time=time.time(), isError=1,
+                                    system='-', failure=f, why=None))
+        flushed = self.observer.flushErrors(ZeroDivisionError)
+        self.assertEqual(self.observer.getErrors(), [f])
+        self.assertEqual(len(flushed), 1)
+        self.assertTrue(flushed[0].check(ZeroDivisionError))
+
+
+    def test_ignoreErrors(self):
+        """
+        Check that C{_ignoreErrors} actually causes errors to be ignored.
+        """
+        self.observer._ignoreErrors(ZeroDivisionError)
+        f = makeFailure()
+        self.observer.gotEvent({'message': (),
+                                'time': time.time(), 'isError': 1,
+                                'system': '-', 'failure': f,
+                                'why': None})
+        self.assertEqual(self.observer.getErrors(), [])
+
+
+    def test_clearIgnores(self):
+        """
+        Check that C{_clearIgnores} ensures that previously ignored errors
+        get captured.
+        """
+        self.observer._ignoreErrors(ZeroDivisionError)
+        self.observer._clearIgnores()
+        f = makeFailure()
+        self.observer.gotEvent({'message': (),
+                                'time': time.time(), 'isError': 1,
+                                'system': '-', 'failure': f,
+                                'why': None})
+        self.assertEqual(self.observer.getErrors(), [f])
+
+
+
+class LogErrorsMixin(object):
+    """
+    High-level tests demonstrating the expected behaviour of logged errors
+    during tests.
+    """
+
+    def setUp(self):
+        self.result = reporter.TestResult()
+
+    def tearDown(self):
+        self.flushLoggedErrors(ZeroDivisionError)
+
+
+    def test_singleError(self):
+        """
+        Test that a logged error gets reported as a test error.
+        """
+        test = self.MockTest('test_single')
+        test(self.result)
+        self.assertEqual(len(self.result.errors), 1)
+        self.assertTrue(self.result.errors[0][1].check(ZeroDivisionError),
+                        self.result.errors[0][1])
+        self.assertEqual(0, self.result.successes)
+
+
+    def test_twoErrors(self):
+        """
+        Test that when two errors get logged, they both get reported as test
+        errors.
+        """
+        test = self.MockTest('test_double')
+        test(self.result)
+        self.assertEqual(len(self.result.errors), 2)
+        self.assertEqual(0, self.result.successes)
+
+
+    def test_errorsIsolated(self):
+        """
+        Check that an error logged in one test doesn't fail the next test.
+        """
+        t1 = self.MockTest('test_single')
+        t2 = self.MockTest('test_silent')
+        t1(self.result)
+        t2(self.result)
+        self.assertEqual(len(self.result.errors), 1)
+        self.assertEqual(self.result.errors[0][0], t1)
+        self.assertEqual(1, self.result.successes)
+
+
+    def test_boundedObservers(self):
+        """
+        There are no extra log observers after a test runs.
+        """
+        # XXX trial is *all about* global log state.  It should really be fixed.
+        observer = unittest._LogObserver()
+        self.patch(unittest, '_logObserver', observer)
+        observers = log.theLogPublisher.observers[:]
+        test = self.MockTest()
+        test(self.result)
+        self.assertEqual(observers, log.theLogPublisher.observers)
+
+
+
+class SynchronousLogErrorsTests(LogErrorsMixin, unittest.SynchronousTestCase):
+    MockTest = Mask.SynchronousFailureLogging
+
+
+
+class AsynchronousLogErrorsTests(LogErrorsMixin, unittest.TestCase):
+    MockTest = Mask.AsynchronousFailureLogging
+
+    def test_inCallback(self):
+        """
+        Test that errors logged in callbacks get reported as test errors.
+        """
+        test = self.MockTest('test_inCallback')
+        test(self.result)
+        self.assertEqual(len(self.result.errors), 1)
+        self.assertTrue(self.result.errors[0][1].check(ZeroDivisionError),
+                        self.result.errors[0][1])
+
diff --git a/ThirdParty/Twisted/twisted/trial/test/test_output.py b/ThirdParty/Twisted/twisted/trial/test/test_output.py
new file mode 100644
index 0000000..5f8c7f9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/test_output.py
@@ -0,0 +1,179 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for the output generated by trial.
+"""
+
+import os, StringIO
+
+from twisted.scripts import trial
+from twisted.trial import runner
+from twisted.trial.test import packages
+
+
+def runTrial(*args):
+    from twisted.trial import reporter
+    config = trial.Options()
+    config.parseOptions(args)
+    output = StringIO.StringIO()
+    myRunner = runner.TrialRunner(
+        reporter.VerboseTextReporter,
+        stream=output,
+        workingDirectory=config['temp-directory'])
+    suite = trial._getSuite(config)
+    result = myRunner.run(suite)
+    return output.getvalue()
+
+
+class TestImportErrors(packages.SysPathManglingTest):
+    """Actually run trial as if on the command line and check that the output
+    is what we expect.
+    """
+
+    debug = False
+    parent = "_testImportErrors"
+    def runTrial(self, *args):
+        return runTrial('--temp-directory', self.mktemp(), *args)
+
+
+    def _print(self, stuff):
+        print stuff
+        return stuff
+
+
+    def assertIn(self, container, containee, *args, **kwargs):
+        # redefined to be useful in callbacks
+        super(TestImportErrors, self).assertIn(
+            containee, container, *args, **kwargs)
+        return container
+
+
+    def assertNotIn(self, container, containee, *args, **kwargs):
+        # redefined to be useful in callbacks
+        super(TestImportErrors, self).assertNotIn(
+            containee, container, *args, **kwargs)
+        return container
+
+
+    def test_trialRun(self):
+        self.runTrial()
+
+
+    def test_nonexistentModule(self):
+        d = self.runTrial('twisted.doesntexist')
+        self.assertIn(d, '[ERROR]')
+        self.assertIn(d, 'twisted.doesntexist')
+        return d
+
+
+    def test_nonexistentPackage(self):
+        d = self.runTrial('doesntexist')
+        self.assertIn(d, 'doesntexist')
+        self.assertIn(d, 'ModuleNotFound')
+        self.assertIn(d, '[ERROR]')
+        return d
+
+
+    def test_nonexistentPackageWithModule(self):
+        d = self.runTrial('doesntexist.barney')
+        self.assertIn(d, 'doesntexist.barney')
+        self.assertIn(d, 'ObjectNotFound')
+        self.assertIn(d, '[ERROR]')
+        return d
+
+
+    def test_badpackage(self):
+        d = self.runTrial('badpackage')
+        self.assertIn(d, '[ERROR]')
+        self.assertIn(d, 'badpackage')
+        self.assertNotIn(d, 'IOError')
+        return d
+
+
+    def test_moduleInBadpackage(self):
+        d = self.runTrial('badpackage.test_module')
+        self.assertIn(d, "[ERROR]")
+        self.assertIn(d, "badpackage.test_module")
+        self.assertNotIn(d, 'IOError')
+        return d
+
+
+    def test_badmodule(self):
+        d = self.runTrial('package.test_bad_module')
+        self.assertIn(d, '[ERROR]')
+        self.assertIn(d, 'package.test_bad_module')
+        self.assertNotIn(d, 'IOError')
+        self.assertNotIn(d, '<module ')
+        return d
+
+
+    def test_badimport(self):
+        d = self.runTrial('package.test_import_module')
+        self.assertIn(d, '[ERROR]')
+        self.assertIn(d, 'package.test_import_module')
+        self.assertNotIn(d, 'IOError')
+        self.assertNotIn(d, '<module ')
+        return d
+
+
+    def test_recurseImport(self):
+        d = self.runTrial('package')
+        self.assertIn(d, '[ERROR]')
+        self.assertIn(d, 'test_bad_module')
+        self.assertIn(d, 'test_import_module')
+        self.assertNotIn(d, '<module ')
+        self.assertNotIn(d, 'IOError')
+        return d
+
+
+    def test_recurseImportErrors(self):
+        d = self.runTrial('package2')
+        self.assertIn(d, '[ERROR]')
+        self.assertIn(d, 'package2')
+        self.assertIn(d, 'test_module')
+        self.assertIn(d, "No module named frotz")
+        self.assertNotIn(d, '<module ')
+        self.assertNotIn(d, 'IOError')
+        return d
+
+
+    def test_nonRecurseImportErrors(self):
+        d = self.runTrial('-N', 'package2')
+        self.assertIn(d, '[ERROR]')
+        self.assertIn(d, "No module named frotz")
+        self.assertNotIn(d, '<module ')
+        return d
+
+
+    def test_regularRun(self):
+        d = self.runTrial('package.test_module')
+        self.assertNotIn(d, '[ERROR]')
+        self.assertNotIn(d, 'IOError')
+        self.assertIn(d, 'OK')
+        self.assertIn(d, 'PASSED (successes=1)')
+        return d
+
+
+    def test_filename(self):
+        self.mangleSysPath(self.oldPath)
+        d = self.runTrial(
+            os.path.join(self.parent, 'package', 'test_module.py'))
+        self.assertNotIn(d, '[ERROR]')
+        self.assertNotIn(d, 'IOError')
+        self.assertIn(d, 'OK')
+        self.assertIn(d, 'PASSED (successes=1)')
+        return d
+
+
+    def test_dosFile(self):
+        ## XXX -- not really an output test, more of a script test
+        self.mangleSysPath(self.oldPath)
+        d = self.runTrial(
+            os.path.join(self.parent,
+                         'package', 'test_dos_module.py'))
+        self.assertNotIn(d, '[ERROR]')
+        self.assertNotIn(d, 'IOError')
+        self.assertIn(d, 'OK')
+        self.assertIn(d, 'PASSED (successes=1)')
+        return d
diff --git a/ThirdParty/Twisted/twisted/trial/test/test_plugins.py b/ThirdParty/Twisted/twisted/trial/test/test_plugins.py
new file mode 100644
index 0000000..518df5f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/test_plugins.py
@@ -0,0 +1,46 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+#
+# Maintainer: Jonathan Lange
+
+"""
+Tests for L{twisted.plugins.twisted_trial}.
+"""
+
+from twisted.plugin import getPlugins
+from twisted.trial import unittest
+from twisted.trial.itrial import IReporter
+
+
+class TestPlugins(unittest.SynchronousTestCase):
+    """
+    Tests for Trial's reporter plugins.
+    """
+
+    def getPluginsByLongOption(self, longOption):
+        """
+        Return the Trial reporter plugin with the given long option.
+
+        If more than one is found, raise ValueError. If none are found, raise
+        IndexError.
+        """
+        plugins = [
+            plugin for plugin in getPlugins(IReporter)
+            if plugin.longOpt == longOption]
+        if len(plugins) > 1:
+            raise ValueError(
+                "More than one plugin found with long option %r: %r"
+                % (longOption, plugins))
+        return plugins[0]
+
+
+    def test_subunitPlugin(self):
+        """
+        One of the reporter plugins is the subunit reporter plugin.
+        """
+        subunitPlugin = self.getPluginsByLongOption('subunit')
+        self.assertEqual('Subunit Reporter', subunitPlugin.name)
+        self.assertEqual('twisted.trial.reporter', subunitPlugin.module)
+        self.assertEqual('subunit', subunitPlugin.longOpt)
+        self.assertIdentical(None, subunitPlugin.shortOpt)
+        self.assertEqual('SubunitReporter', subunitPlugin.klass)
diff --git a/ThirdParty/Twisted/twisted/trial/test/test_pyunitcompat.py b/ThirdParty/Twisted/twisted/trial/test/test_pyunitcompat.py
new file mode 100644
index 0000000..c40907f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/test_pyunitcompat.py
@@ -0,0 +1,302 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from __future__ import division, absolute_import
+
+import sys
+import traceback
+
+from zope.interface import implementer
+
+from twisted.python.compat import _PY3
+from twisted.python.failure import Failure
+from twisted.trial import util
+from twisted.trial.unittest import SynchronousTestCase, PyUnitResultAdapter
+from twisted.trial.itrial import IReporter, ITestCase
+
+import unittest as pyunit
+
+
+class TestPyUnitTestCase(SynchronousTestCase):
+
+    class PyUnitTest(pyunit.TestCase):
+
+        def test_pass(self):
+            pass
+
+
+    def setUp(self):
+        self.original = self.PyUnitTest('test_pass')
+        self.test = ITestCase(self.original)
+
+
+    def test_visit(self):
+        """
+        Trial assumes that test cases implement visit().
+        """
+        log = []
+        def visitor(test):
+            log.append(test)
+        self.test.visit(visitor)
+        self.assertEqual(log, [self.test])
+    test_visit.suppress = [
+        util.suppress(category=DeprecationWarning,
+                      message="Test visitors deprecated in Twisted 8.0")]
+
+
+    def test_callable(self):
+        """
+        Tests must be callable in order to be used with Python's unittest.py.
+        """
+        self.assertTrue(callable(self.test),
+                        "%r is not callable." % (self.test,))
+
+# Remove this when we port twisted.trial._synctest to Python 3:
+if _PY3:
+    del TestPyUnitTestCase
+
+
+
+class TestPyUnitResult(SynchronousTestCase):
+    """
+    Tests to show that PyUnitResultAdapter wraps TestResult objects from the
+    standard library 'unittest' module in such a way as to make them usable and
+    useful from Trial.
+    """
+
+    # Once erroneous is ported to Python 3 this can be replaced with
+    # erroneous.ErrorTest:
+    class ErrorTest(SynchronousTestCase):
+        """
+        A test case which has a L{test_foo} which will raise an error.
+
+        @ivar ran: boolean indicating whether L{test_foo} has been run.
+        """
+        ran = False
+
+        def test_foo(self):
+            """
+            Set C{self.ran} to True and raise a C{ZeroDivisionError}
+            """
+            self.ran = True
+            1/0
+
+
+    def test_dontUseAdapterWhenReporterProvidesIReporter(self):
+        """
+        The L{PyUnitResultAdapter} is only used when the result passed to
+        C{run} does *not* provide L{IReporter}.
+        """
+        @implementer(IReporter)
+        class StubReporter(object):
+            """
+            A reporter which records data about calls made to it.
+
+            @ivar errors: Errors passed to L{addError}.
+            @ivar failures: Failures passed to L{addFailure}.
+            """
+
+            def __init__(self):
+                self.errors = []
+                self.failures = []
+
+            def startTest(self, test):
+                """
+                Do nothing.
+                """
+
+            def stopTest(self, test):
+                """
+                Do nothing.
+                """
+
+            def addError(self, test, error):
+                """
+                Record the error.
+                """
+                self.errors.append(error)
+
+        test = self.ErrorTest("test_foo")
+        result = StubReporter()
+        test.run(result)
+        self.assertIsInstance(result.errors[0], Failure)
+
+
+    def test_success(self):
+        class SuccessTest(SynchronousTestCase):
+            ran = False
+            def test_foo(s):
+                s.ran = True
+        test = SuccessTest('test_foo')
+        result = pyunit.TestResult()
+        test.run(result)
+
+        self.failUnless(test.ran)
+        self.assertEqual(1, result.testsRun)
+        self.failUnless(result.wasSuccessful())
+
+    def test_failure(self):
+        class FailureTest(SynchronousTestCase):
+            ran = False
+            def test_foo(s):
+                s.ran = True
+                s.fail('boom!')
+        test = FailureTest('test_foo')
+        result = pyunit.TestResult()
+        test.run(result)
+
+        self.failUnless(test.ran)
+        self.assertEqual(1, result.testsRun)
+        self.assertEqual(1, len(result.failures))
+        self.failIf(result.wasSuccessful())
+
+    def test_error(self):
+        test = self.ErrorTest('test_foo')
+        result = pyunit.TestResult()
+        test.run(result)
+
+        self.failUnless(test.ran)
+        self.assertEqual(1, result.testsRun)
+        self.assertEqual(1, len(result.errors))
+        self.failIf(result.wasSuccessful())
+
+    def test_setUpError(self):
+        class ErrorTest(SynchronousTestCase):
+            ran = False
+            def setUp(self):
+                1/0
+            def test_foo(s):
+                s.ran = True
+        test = ErrorTest('test_foo')
+        result = pyunit.TestResult()
+        test.run(result)
+
+        self.failIf(test.ran)
+        self.assertEqual(1, result.testsRun)
+        self.assertEqual(1, len(result.errors))
+        self.failIf(result.wasSuccessful())
+
+    def test_tracebackFromFailure(self):
+        """
+        Errors added through the L{PyUnitResultAdapter} have the same traceback
+        information as if there were no adapter at all.
+        """
+        try:
+            1/0
+        except ZeroDivisionError:
+            exc_info = sys.exc_info()
+            f = Failure()
+        pyresult = pyunit.TestResult()
+        result = PyUnitResultAdapter(pyresult)
+        result.addError(self, f)
+        self.assertEqual(pyresult.errors[0][1],
+                         ''.join(traceback.format_exception(*exc_info)))
+
+
+    def test_traceback(self):
+        """
+        As test_tracebackFromFailure, but covering more code.
+        """
+        class ErrorTest(SynchronousTestCase):
+            exc_info = None
+            def test_foo(self):
+                try:
+                    1/0
+                except ZeroDivisionError:
+                    self.exc_info = sys.exc_info()
+                    raise
+        test = ErrorTest('test_foo')
+        result = pyunit.TestResult()
+        test.run(result)
+
+        # We can't test that the tracebacks are equal, because Trial's
+        # machinery inserts a few extra frames on the top and we don't really
+        # want to trim them off without an extremely good reason.
+        #
+        # So, we just test that the result's stack ends with the the
+        # exception's stack.
+
+        expected_stack = ''.join(traceback.format_tb(test.exc_info[2]))
+        observed_stack = '\n'.join(result.errors[0][1].splitlines()[:-1])
+
+        self.assertEqual(expected_stack.strip(),
+                         observed_stack[-len(expected_stack):].strip())
+
+
+    def test_tracebackFromCleanFailure(self):
+        """
+        Errors added through the L{PyUnitResultAdapter} have the same
+        traceback information as if there were no adapter at all, even
+        if the Failure that held the information has been cleaned.
+        """
+        try:
+            1/0
+        except ZeroDivisionError:
+            exc_info = sys.exc_info()
+            f = Failure()
+        f.cleanFailure()
+        pyresult = pyunit.TestResult()
+        result = PyUnitResultAdapter(pyresult)
+        result.addError(self, f)
+        self.assertEqual(pyresult.errors[0][1],
+                         ''.join(traceback.format_exception(*exc_info)))
+
+
+    def test_trialSkip(self):
+        """
+        Skips using trial's skipping functionality are reported as skips in
+        the L{pyunit.TestResult}.
+        """
+        class SkipTest(SynchronousTestCase):
+            def test_skip(self):
+                1/0
+            test_skip.skip = "Let's skip!"
+
+        test = SkipTest('test_skip')
+        result = pyunit.TestResult()
+        test.run(result)
+        self.assertEqual(result.skipped, [(test, "Let's skip!")])
+
+
+    def test_pyunitSkip(self):
+        """
+        Skips using pyunit's skipping functionality are reported as skips in
+        the L{pyunit.TestResult}.
+        """
+        class SkipTest(SynchronousTestCase):
+            @pyunit.skip("skippy")
+            def test_skip(self):
+                1/0
+
+        test = SkipTest('test_skip')
+        result = pyunit.TestResult()
+        test.run(result)
+        self.assertEqual(result.skipped, [(test, "skippy")])
+
+
+    def test_skip26(self):
+        """
+        On Python 2.6, pyunit doesn't support skipping, so it gets added as a
+        failure to the L{pyunit.TestResult}.
+        """
+        class SkipTest(SynchronousTestCase):
+            def test_skip(self):
+                1/0
+            test_skip.skip = "Let's skip!"
+
+        test = SkipTest('test_skip')
+        result = pyunit.TestResult()
+        test.run(result)
+        self.assertEqual(len(result.failures), 1)
+        test2, reason = result.failures[0]
+        self.assertIdentical(test, test2)
+        self.assertIn("UnsupportedTrialFeature", reason)
+
+    if sys.version_info[:2] < (2, 7):
+        message = "pyunit doesn't support skipping in Python 2.6"
+        test_trialSkip.skip = message
+        test_pyunitSkip.skip = message
+        del message
+    else:
+        test_skip26.skip = "This test is only relevant to Python 2.6"
+
diff --git a/ThirdParty/Twisted/twisted/trial/test/test_reporter.py b/ThirdParty/Twisted/twisted/trial/test/test_reporter.py
new file mode 100644
index 0000000..34d44ca
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/test_reporter.py
@@ -0,0 +1,1657 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+#
+# Maintainer: Jonathan Lange
+
+"""
+Tests for L{twisted.trial.reporter}.
+"""
+from __future__ import division
+
+import errno, sys, os, re, StringIO
+from inspect import getmro
+
+from twisted.internet.utils import suppressWarnings
+from twisted.python import log
+from twisted.python.failure import Failure
+from twisted.trial import itrial, unittest, runner, reporter, util
+from twisted.trial.reporter import UncleanWarningsReporterWrapper
+from twisted.trial.test import erroneous
+from twisted.trial.unittest import makeTodo, SkipTest, Todo
+from twisted.trial.test import sample
+
+
+class BrokenStream(object):
+    """
+    Stream-ish object that raises a signal interrupt error. We use this to make
+    sure that Trial still manages to write what it needs to write.
+    """
+    written = False
+    flushed = False
+
+    def __init__(self, fObj):
+        self.fObj = fObj
+
+    def write(self, s):
+        if self.written:
+            return self.fObj.write(s)
+        self.written = True
+        raise IOError(errno.EINTR, "Interrupted write")
+
+    def flush(self):
+        if self.flushed:
+            return self.fObj.flush()
+        self.flushed = True
+        raise IOError(errno.EINTR, "Interrupted flush")
+
+
+class StringTest(unittest.SynchronousTestCase):
+    def stringComparison(self, expect, output):
+        output = filter(None, output)
+        self.failUnless(len(expect) <= len(output),
+                        "Must have more observed than expected"
+                        "lines %d < %d" % (len(output), len(expect)))
+        REGEX_PATTERN_TYPE = type(re.compile(''))
+        for line_number, (exp, out) in enumerate(zip(expect, output)):
+            if exp is None:
+                continue
+            elif isinstance(exp, str):
+                self.assertSubstring(exp, out, "Line %d: %r not in %r"
+                                     % (line_number, exp, out))
+            elif isinstance(exp, REGEX_PATTERN_TYPE):
+                self.failUnless(exp.match(out),
+                                "Line %d: %r did not match string %r"
+                                % (line_number, exp.pattern, out))
+            else:
+                raise TypeError("don't know what to do with object %r"
+                                % (exp,))
+
+
+class TestTestResult(unittest.SynchronousTestCase):
+    def setUp(self):
+        self.result = reporter.TestResult()
+
+    def test_pyunitAddError(self):
+        # pyunit passes an exc_info tuple directly to addError
+        try:
+            raise RuntimeError('foo')
+        except RuntimeError, excValue:
+            self.result.addError(self, sys.exc_info())
+        failure = self.result.errors[0][1]
+        self.assertEqual(excValue, failure.value)
+        self.assertEqual(RuntimeError, failure.type)
+
+    def test_pyunitAddFailure(self):
+        # pyunit passes an exc_info tuple directly to addFailure
+        try:
+            raise self.failureException('foo')
+        except self.failureException, excValue:
+            self.result.addFailure(self, sys.exc_info())
+        failure = self.result.failures[0][1]
+        self.assertEqual(excValue, failure.value)
+        self.assertEqual(self.failureException, failure.type)
+
+
+class TestReporterRealtime(TestTestResult):
+    def setUp(self):
+        output = StringIO.StringIO()
+        self.result = reporter.Reporter(output, realtime=True)
+
+
+class TestErrorReporting(StringTest):
+    doubleSeparator = re.compile(r'^=+$')
+
+    def setUp(self):
+        self.loader = runner.TestLoader()
+        self.output = StringIO.StringIO()
+        self.result = reporter.Reporter(self.output)
+
+    def getOutput(self, suite):
+        result = self.getResult(suite)
+        result.done()
+        return self.output.getvalue()
+
+    def getResult(self, suite):
+        suite.run(self.result)
+        return self.result
+
+    def test_formatErroredMethod(self):
+        """
+        A test method which runs and has an error recorded against it is
+        reported in the output stream with the I{ERROR} tag along with a summary
+        of what error was reported and the ID of the test.
+        """
+        cls = erroneous.SynchronousTestFailureInSetUp
+        suite = self.loader.loadClass(cls)
+        output = self.getOutput(suite).splitlines()
+        match = [
+            self.doubleSeparator,
+            '[ERROR]',
+            'Traceback (most recent call last):',
+            re.compile(r'^\s+File .*erroneous\.py., line \d+, in setUp$'),
+            re.compile(r'^\s+raise FoolishError.'
+                       r'.I am a broken setUp method..$'),
+            ('twisted.trial.test.erroneous.FoolishError: '
+             'I am a broken setUp method'),
+            '%s.%s.test_noop' % (cls.__module__, cls.__name__)]
+        self.stringComparison(match, output)
+
+
+    def test_formatFailedMethod(self):
+        """
+        A test method which runs and has a failure recorded against it is
+        reported in the output stream with the I{FAIL} tag along with a summary
+        of what failure was reported and the ID of the test.
+        """
+        suite = self.loader.loadMethod(erroneous.TestRegularFail.test_fail)
+        output = self.getOutput(suite).splitlines()
+        match = [
+            self.doubleSeparator,
+            '[FAIL]',
+            'Traceback (most recent call last):',
+            re.compile(r'^\s+File .*erroneous\.py., line \d+, in test_fail$'),
+            re.compile(r'^\s+self\.fail\("I fail"\)$'),
+            'twisted.trial.unittest.FailTest: I fail',
+            'twisted.trial.test.erroneous.TestRegularFail.test_fail',
+            ]
+        self.stringComparison(match, output)
+
+
+    def test_doctestError(self):
+        """
+        A problem encountered while running a doctest is reported in the output
+        stream with a I{FAIL} or I{ERROR} tag along with a summary of what
+        problem was encountered and the ID of the test.
+        """
+        from twisted.trial.test import erroneous
+        suite = unittest.decorate(
+            self.loader.loadDoctests(erroneous), itrial.ITestCase)
+        output = self.getOutput(suite)
+        path = 'twisted.trial.test.erroneous.unexpectedException'
+        for substring in ['1/0', 'ZeroDivisionError',
+                          'Exception raised:', path]:
+            self.assertSubstring(substring, output)
+        self.failUnless(re.search('Fail(ed|ure in) example:', output),
+                        "Couldn't match 'Failure in example: ' "
+                        "or 'Failed example: '")
+        expect = [self.doubleSeparator,
+                  re.compile(r'\[(ERROR|FAIL)\]')]
+        self.stringComparison(expect, output.splitlines())
+
+
+    def test_hiddenException(self):
+        """
+        Check that errors in C{DelayedCall}s get reported, even if the
+        test already has a failure.
+
+        Only really necessary for testing the deprecated style of tests that
+        use iterate() directly. See
+        L{erroneous.DelayedCall.testHiddenException} for more details.
+        """
+        test = erroneous.DelayedCall('testHiddenException')
+        output = self.getOutput(test).splitlines()
+        match = [
+            self.doubleSeparator,
+            '[FAIL]',
+            'Traceback (most recent call last):',
+            # Some irrelevant trial implementation details leak into the traceback:
+            re.compile(r'^\s+File .*$'),
+            re.compile(r'^\s+.*$'),
+            re.compile(r'^\s+File .*$'),
+            re.compile(r'^\s+.*$'),
+            re.compile(r'^\s+File .*$'),
+            re.compile(r'^\s+.*$'),
+            re.compile(r'^\s+File .*erroneous\.py., line \d+, in '
+                       'testHiddenException$'),
+            re.compile(r'^\s+self\.fail\("Deliberate failure to mask the '
+                       'hidden exception"\)$'),
+            'twisted.trial.unittest.FailTest: '
+            'Deliberate failure to mask the hidden exception',
+            'twisted.trial.test.erroneous.DelayedCall.testHiddenException',
+            self.doubleSeparator,
+            '[ERROR]',
+            'Traceback (most recent call last):',
+            re.compile(r'^\s+File .* in runUntilCurrent'),
+            re.compile(r'^\s+.*'),
+            re.compile('^\s+File .*erroneous\.py", line \d+, in go'),
+            re.compile('^\s+raise RuntimeError\(self.hiddenExceptionMsg\)'),
+            'exceptions.RuntimeError: something blew up',
+            'twisted.trial.test.erroneous.DelayedCall.testHiddenException',
+            ]
+        self.stringComparison(match, output)
+
+
+
+class TestUncleanWarningWrapperErrorReporting(TestErrorReporting):
+    """
+    Tests that the L{UncleanWarningsReporterWrapper} can sufficiently proxy
+    IReporter failure and error reporting methods to a L{reporter.Reporter}.
+    """
+    def setUp(self):
+        self.loader = runner.TestLoader()
+        self.output = StringIO.StringIO()
+        self.result = UncleanWarningsReporterWrapper(
+            reporter.Reporter(self.output))
+
+
+
+class TracebackHandling(unittest.SynchronousTestCase):
+    def getErrorFrames(self, test):
+        stream = StringIO.StringIO()
+        result = reporter.Reporter(stream)
+        test.run(result)
+        bads = result.failures + result.errors
+        assert len(bads) == 1
+        assert bads[0][0] == test
+        return result._trimFrames(bads[0][1].frames)
+
+    def checkFrames(self, observedFrames, expectedFrames):
+        for observed, expected in zip(observedFrames, expectedFrames):
+            self.assertEqual(observed[0], expected[0])
+            observedSegs = os.path.splitext(observed[1])[0].split(os.sep)
+            expectedSegs = expected[1].split('/')
+            self.assertEqual(observedSegs[-len(expectedSegs):],
+                             expectedSegs)
+        self.assertEqual(len(observedFrames), len(expectedFrames))
+
+    def test_basic(self):
+        test = erroneous.TestRegularFail('test_fail')
+        frames = self.getErrorFrames(test)
+        self.checkFrames(frames,
+                         [('test_fail', 'twisted/trial/test/erroneous')])
+
+    def test_subroutine(self):
+        test = erroneous.TestRegularFail('test_subfail')
+        frames = self.getErrorFrames(test)
+        self.checkFrames(frames,
+                         [('test_subfail', 'twisted/trial/test/erroneous'),
+                          ('subroutine', 'twisted/trial/test/erroneous')])
+
+    def test_deferred(self):
+        test = erroneous.TestFailureInDeferredChain('test_fail')
+        frames = self.getErrorFrames(test)
+        self.checkFrames(frames,
+                         [('_later', 'twisted/trial/test/erroneous')])
+
+    def test_noFrames(self):
+        result = reporter.Reporter(None)
+        self.assertEqual([], result._trimFrames([]))
+
+    def test_oneFrame(self):
+        result = reporter.Reporter(None)
+        self.assertEqual(['fake frame'], result._trimFrames(['fake frame']))
+
+
+class FormatFailures(StringTest):
+    def setUp(self):
+        try:
+            raise RuntimeError('foo')
+        except RuntimeError:
+            self.f = Failure()
+        self.f.frames = [
+            ['foo', 'foo/bar.py', 5, [('x', 5)], [('y', 'orange')]],
+            ['qux', 'foo/bar.py', 10, [('a', 'two')], [('b', 'MCMXCIX')]]
+            ]
+        self.stream = StringIO.StringIO()
+        self.result = reporter.Reporter(self.stream)
+
+    def test_formatDefault(self):
+        tb = self.result._formatFailureTraceback(self.f)
+        self.stringComparison([
+            'Traceback (most recent call last):',
+            '  File "foo/bar.py", line 5, in foo',
+            re.compile(r'^\s*$'),
+            '  File "foo/bar.py", line 10, in qux',
+            re.compile(r'^\s*$'),
+            'RuntimeError: foo'], tb.splitlines())
+
+    def test_formatString(self):
+        tb = '''
+  File "twisted/trial/unittest.py", line 256, in failUnlessSubstring
+    return self.failUnlessIn(substring, astring, msg)
+exceptions.TypeError: iterable argument required
+
+'''
+        expected = '''
+  File "twisted/trial/unittest.py", line 256, in failUnlessSubstring
+    return self.failUnlessIn(substring, astring, msg)
+exceptions.TypeError: iterable argument required
+'''
+        formatted = self.result._formatFailureTraceback(tb)
+        self.assertEqual(expected, formatted)
+
+    def test_mutation(self):
+        frames = self.f.frames[:]
+        # The call shouldn't mutate the frames.
+        self.result._formatFailureTraceback(self.f)
+        self.assertEqual(self.f.frames, frames)
+
+
+class PyunitTestNames(unittest.SynchronousTestCase):
+    def setUp(self):
+        self.stream = StringIO.StringIO()
+        self.test = sample.PyunitTest('test_foo')
+
+    def test_verboseReporter(self):
+        result = reporter.VerboseTextReporter(self.stream)
+        result.startTest(self.test)
+        output = self.stream.getvalue()
+        self.assertEqual(
+            output, 'twisted.trial.test.sample.PyunitTest.test_foo ... ')
+
+    def test_treeReporter(self):
+        result = reporter.TreeReporter(self.stream)
+        result.startTest(self.test)
+        output = self.stream.getvalue()
+        output = output.splitlines()[-1].strip()
+        self.assertEqual(output, result.getDescription(self.test) + ' ...')
+
+    def test_getDescription(self):
+        result = reporter.TreeReporter(self.stream)
+        output = result.getDescription(self.test)
+        self.assertEqual(output, 'test_foo')
+
+
+    def test_minimalReporter(self):
+        """
+        The summary of L{reporter.MinimalReporter} is a simple list of numbers,
+        indicating how many tests ran, how many failed etc.
+
+        The numbers represents:
+         * the run time of the tests
+         * the number of tests run, printed 2 times for legacy reasons
+         * the number of errors
+         * the number of failures
+         * the number of skips
+        """
+        result = reporter.MinimalReporter(self.stream)
+        self.test.run(result)
+        result._printSummary()
+        output = self.stream.getvalue().strip().split(' ')
+        self.assertEqual(output[1:], ['1', '1', '0', '0', '0'])
+
+
+    def test_minimalReporterTime(self):
+        """
+        L{reporter.MinimalReporter} reports the time to run the tests as first
+        data in its output.
+        """
+        times = [1.0, 1.2, 1.5, 1.9]
+        result = reporter.MinimalReporter(self.stream)
+        result._getTime = lambda: times.pop(0)
+        self.test.run(result)
+        result._printSummary()
+        output = self.stream.getvalue().strip().split(' ')
+        timer = output[0]
+        self.assertEqual(timer, "0.7")
+
+
+    def test_emptyMinimalReporter(self):
+        """
+        The summary of L{reporter.MinimalReporter} is a list of zeroes when no
+        test is actually run.
+        """
+        result = reporter.MinimalReporter(self.stream)
+        result._printSummary()
+        output = self.stream.getvalue().strip().split(' ')
+        self.assertEqual(output, ['0', '0', '0', '0', '0', '0'])
+
+
+
+class TestDirtyReactor(unittest.SynchronousTestCase):
+    """
+    The trial script has an option to treat L{DirtyReactorAggregateError}s as
+    warnings, as a migration tool for test authors. It causes a wrapper to be
+    placed around reporters that replaces L{DirtyReactorAggregatErrors} with
+    warnings.
+    """
+
+    def setUp(self):
+        self.dirtyError = Failure(
+            util.DirtyReactorAggregateError(['foo'], ['bar']))
+        self.output = StringIO.StringIO()
+        self.test = TestDirtyReactor('test_errorByDefault')
+
+
+    def test_errorByDefault(self):
+        """
+        L{DirtyReactorAggregateError}s are reported as errors with the default
+        Reporter.
+        """
+        result = reporter.Reporter(stream=self.output)
+        result.addError(self.test, self.dirtyError)
+        self.assertEqual(len(result.errors), 1)
+        self.assertEqual(result.errors[0][1], self.dirtyError)
+
+
+    def test_warningsEnabled(self):
+        """
+        L{DirtyReactorAggregateError}s are reported as warnings when using
+        the L{UncleanWarningsReporterWrapper}.
+        """
+        result = UncleanWarningsReporterWrapper(
+            reporter.Reporter(stream=self.output))
+        self.assertWarns(UserWarning, self.dirtyError.getErrorMessage(),
+                         reporter.__file__,
+                         result.addError, self.test, self.dirtyError)
+
+
+    def test_warningsMaskErrors(self):
+        """
+        L{DirtyReactorAggregateError}s are I{not} reported as errors if the
+        L{UncleanWarningsReporterWrapper} is used.
+        """
+        result = UncleanWarningsReporterWrapper(
+            reporter.Reporter(stream=self.output))
+        self.assertWarns(UserWarning, self.dirtyError.getErrorMessage(),
+                         reporter.__file__,
+                         result.addError, self.test, self.dirtyError)
+        self.assertEqual(result._originalReporter.errors, [])
+
+
+    def test_dealsWithThreeTuples(self):
+        """
+        Some annoying stuff can pass three-tuples to addError instead of
+        Failures (like PyUnit). The wrapper, of course, handles this case,
+        since it is a part of L{twisted.trial.itrial.IReporter}! But it does
+        not convert L{DirtyReactorAggregateError} to warnings in this case,
+        because nobody should be passing those in the form of three-tuples.
+        """
+        result = UncleanWarningsReporterWrapper(
+            reporter.Reporter(stream=self.output))
+        result.addError(self.test,
+                        (self.dirtyError.type, self.dirtyError.value, None))
+        self.assertEqual(len(result._originalReporter.errors), 1)
+        self.assertEqual(result._originalReporter.errors[0][1].type,
+                          self.dirtyError.type)
+        self.assertEqual(result._originalReporter.errors[0][1].value,
+                          self.dirtyError.value)
+
+
+
+class TrialTestNames(unittest.SynchronousTestCase):
+
+    def setUp(self):
+        self.stream = StringIO.StringIO()
+        self.test = sample.FooTest('test_foo')
+
+    def test_verboseReporter(self):
+        result = reporter.VerboseTextReporter(self.stream)
+        result.startTest(self.test)
+        output = self.stream.getvalue()
+        self.assertEqual(output, self.test.id() + ' ... ')
+
+    def test_treeReporter(self):
+        result = reporter.TreeReporter(self.stream)
+        result.startTest(self.test)
+        output = self.stream.getvalue()
+        output = output.splitlines()[-1].strip()
+        self.assertEqual(output, result.getDescription(self.test) + ' ...')
+
+    def test_treeReporterWithDocstrings(self):
+        """A docstring"""
+        result = reporter.TreeReporter(self.stream)
+        self.assertEqual(result.getDescription(self),
+                         'test_treeReporterWithDocstrings')
+
+    def test_getDescription(self):
+        result = reporter.TreeReporter(self.stream)
+        output = result.getDescription(self.test)
+        self.assertEqual(output, "test_foo")
+
+
+class TestSkip(unittest.SynchronousTestCase):
+    """
+    Tests for L{reporter.Reporter}'s handling of skips.
+    """
+    def setUp(self):
+        self.stream = StringIO.StringIO()
+        self.result = reporter.Reporter(self.stream)
+        self.test = sample.FooTest('test_foo')
+
+    def _getSkips(self, result):
+        """
+        Get the number of skips that happened to a reporter.
+        """
+        return len(result.skips)
+
+    def test_accumulation(self):
+        self.result.addSkip(self.test, 'some reason')
+        self.assertEqual(self._getSkips(self.result), 1)
+
+    def test_success(self):
+        self.result.addSkip(self.test, 'some reason')
+        self.assertEqual(True, self.result.wasSuccessful())
+
+
+    def test_summary(self):
+        """
+        The summary of a successful run with skips indicates that the test
+        suite passed and includes the number of skips.
+        """
+        self.result.addSkip(self.test, 'some reason')
+        self.result.done()
+        output = self.stream.getvalue().splitlines()[-1]
+        prefix = 'PASSED '
+        self.failUnless(output.startswith(prefix))
+        self.assertEqual(output[len(prefix):].strip(), '(skips=1)')
+
+
+    def test_basicErrors(self):
+        """
+        The output at the end of a test run with skips includes the reasons
+        for skipping those tests.
+        """
+        self.result.addSkip(self.test, 'some reason')
+        self.result.done()
+        output = self.stream.getvalue().splitlines()[3]
+        self.assertEqual(output.strip(), 'some reason')
+
+
+    def test_booleanSkip(self):
+        """
+        Tests can be skipped without specifying a reason by setting the 'skip'
+        attribute to True. When this happens, the test output includes 'True'
+        as the reason.
+        """
+        self.result.addSkip(self.test, True)
+        self.result.done()
+        output = self.stream.getvalue().splitlines()[3]
+        self.assertEqual(output, 'True')
+
+
+    def test_exceptionSkip(self):
+        """
+        Skips can be raised as errors. When this happens, the error is
+        included in the summary at the end of the test suite.
+        """
+        try:
+            1/0
+        except Exception, e:
+            error = e
+        self.result.addSkip(self.test, error)
+        self.result.done()
+        output = '\n'.join(self.stream.getvalue().splitlines()[3:5]).strip()
+        self.assertEqual(output, str(e))
+
+
+class UncleanWarningSkipTest(TestSkip):
+    """
+    Tests for skips on a L{reporter.Reporter} wrapped by an
+    L{UncleanWarningsReporterWrapper}.
+    """
+    def setUp(self):
+        TestSkip.setUp(self)
+        self.result = UncleanWarningsReporterWrapper(self.result)
+
+    def _getSkips(self, result):
+        """
+        Get the number of skips that happened to a reporter inside of an
+        unclean warnings reporter wrapper.
+        """
+        return len(result._originalReporter.skips)
+
+
+
+class TodoTest(unittest.SynchronousTestCase):
+    """
+    Tests for L{reporter.Reporter}'s handling of todos.
+    """
+
+    def setUp(self):
+        self.stream = StringIO.StringIO()
+        self.result = reporter.Reporter(self.stream)
+        self.test = sample.FooTest('test_foo')
+
+
+    def _getTodos(self, result):
+        """
+        Get the number of todos that happened to a reporter.
+        """
+        return len(result.expectedFailures)
+
+
+    def _getUnexpectedSuccesses(self, result):
+        """
+        Get the number of unexpected successes that happened to a reporter.
+        """
+        return len(result.unexpectedSuccesses)
+
+
+    def test_accumulation(self):
+        """
+        L{reporter.Reporter} accumulates the expected failures that it
+        is notified of.
+        """
+        self.result.addExpectedFailure(self.test, Failure(Exception()),
+                                       makeTodo('todo!'))
+        self.assertEqual(self._getTodos(self.result), 1)
+
+
+    def test_success(self):
+        """
+        A test run is still successful even if there are expected failures.
+        """
+        self.result.addExpectedFailure(self.test, Failure(Exception()),
+                                       makeTodo('todo!'))
+        self.assertEqual(True, self.result.wasSuccessful())
+
+
+    def test_unexpectedSuccess(self):
+        """
+        A test which is marked as todo but succeeds will have an unexpected
+        success reported to its result. A test run is still successful even
+        when this happens.
+        """
+        self.result.addUnexpectedSuccess(self.test, makeTodo("Heya!"))
+        self.assertEqual(True, self.result.wasSuccessful())
+        self.assertEqual(self._getUnexpectedSuccesses(self.result), 1)
+
+
+    def test_summary(self):
+        """
+        The reporter's C{printSummary} method should print the number of
+        expected failures that occured.
+        """
+        self.result.addExpectedFailure(self.test, Failure(Exception()),
+                                       makeTodo('some reason'))
+        self.result.done()
+        output = self.stream.getvalue().splitlines()[-1]
+        prefix = 'PASSED '
+        self.failUnless(output.startswith(prefix))
+        self.assertEqual(output[len(prefix):].strip(),
+                         '(expectedFailures=1)')
+
+
+    def test_basicErrors(self):
+        """
+        The reporter's L{printErrors} method should include the value of the
+        Todo.
+        """
+        self.result.addExpectedFailure(self.test, Failure(Exception()),
+                                       makeTodo('some reason'))
+        self.result.done()
+        output = self.stream.getvalue().splitlines()[3].strip()
+        self.assertEqual(output, "Reason: 'some reason'")
+
+
+    def test_booleanTodo(self):
+        """
+        Booleans CAN'T be used as the value of a todo. Maybe this sucks. This
+        is a test for current behavior, not a requirement.
+        """
+        self.result.addExpectedFailure(self.test, Failure(Exception()),
+                                       makeTodo(True))
+        self.assertRaises(Exception, self.result.done)
+
+
+    def test_exceptionTodo(self):
+        """
+        The exception for expected failures should be shown in the
+        C{printErrors} output.
+        """
+        try:
+            1/0
+        except Exception, e:
+            error = e
+        self.result.addExpectedFailure(self.test, Failure(error),
+                                       makeTodo("todo!"))
+        self.result.done()
+        output = '\n'.join(self.stream.getvalue().splitlines()[3:]).strip()
+        self.assertTrue(str(e) in output)
+
+
+
+class UncleanWarningTodoTest(TodoTest):
+    """
+    Tests for L{UncleanWarningsReporterWrapper}'s handling of todos.
+    """
+
+    def setUp(self):
+        TodoTest.setUp(self)
+        self.result = UncleanWarningsReporterWrapper(self.result)
+
+
+    def _getTodos(self, result):
+        """
+        Get the number of todos that happened to a reporter inside of an
+        unclean warnings reporter wrapper.
+        """
+        return len(result._originalReporter.expectedFailures)
+
+
+    def _getUnexpectedSuccesses(self, result):
+        """
+        Get the number of unexpected successes that happened to a reporter
+        inside of an unclean warnings reporter wrapper.
+        """
+        return len(result._originalReporter.unexpectedSuccesses)
+
+
+
+class MockColorizer:
+    """
+    Used by TestTreeReporter to make sure that output is colored correctly.
+    """
+
+    def __init__(self, stream):
+        self.log = []
+
+
+    def write(self, text, color):
+        self.log.append((color, text))
+
+
+
+class TestTreeReporter(unittest.SynchronousTestCase):
+    def setUp(self):
+        self.test = sample.FooTest('test_foo')
+        self.stream = StringIO.StringIO()
+        self.result = reporter.TreeReporter(self.stream)
+        self.result._colorizer = MockColorizer(self.stream)
+        self.log = self.result._colorizer.log
+
+    def makeError(self):
+        try:
+            1/0
+        except ZeroDivisionError:
+            f = Failure()
+        return f
+
+    def test_cleanupError(self):
+        """
+        Run cleanupErrors and check that the output is correct, and colored
+        correctly.
+        """
+        f = self.makeError()
+        self.result.cleanupErrors(f)
+        color, text = self.log[0]
+        self.assertEqual(color.strip(), self.result.ERROR)
+        self.assertEqual(text.strip(), 'cleanup errors')
+        color, text = self.log[1]
+        self.assertEqual(color.strip(), self.result.ERROR)
+        self.assertEqual(text.strip(), '[ERROR]')
+    test_cleanupError = suppressWarnings(
+        test_cleanupError,
+        util.suppress(category=reporter.BrokenTestCaseWarning),
+        util.suppress(category=DeprecationWarning))
+
+
+    def test_upDownError(self):
+        """
+        Run upDownError and check that the output is correct and colored
+        correctly.
+        """
+        self.result.upDownError("method", None, None, False)
+        color, text = self.log[0]
+        self.assertEqual(color.strip(), self.result.ERROR)
+        self.assertEqual(text.strip(), 'method')
+    test_upDownError = suppressWarnings(
+        test_upDownError,
+        util.suppress(category=DeprecationWarning,
+                      message="upDownError is deprecated in Twisted 8.0."))
+
+
+    def test_summaryColoredSuccess(self):
+        """
+        The summary in case of success should have a good count of successes
+        and be colored properly.
+        """
+        self.result.addSuccess(self.test)
+        self.result.done()
+        self.assertEqual(self.log[1], (self.result.SUCCESS, 'PASSED'))
+        self.assertEqual(
+            self.stream.getvalue().splitlines()[-1].strip(), "(successes=1)")
+
+
+    def test_summaryColoredFailure(self):
+        """
+        The summary in case of failure should have a good count of errors
+        and be colored properly.
+        """
+        try:
+            raise RuntimeError('foo')
+        except RuntimeError:
+            self.result.addError(self, sys.exc_info())
+        self.result.done()
+        self.assertEqual(self.log[1], (self.result.FAILURE, 'FAILED'))
+        self.assertEqual(
+            self.stream.getvalue().splitlines()[-1].strip(), "(errors=1)")
+
+
+    def test_getPrelude(self):
+        """
+        The tree needs to get the segments of the test ID that correspond
+        to the module and class that it belongs to.
+        """
+        self.assertEqual(
+            ['foo.bar', 'baz'],
+            self.result._getPreludeSegments('foo.bar.baz.qux'))
+        self.assertEqual(
+            ['foo', 'bar'],
+            self.result._getPreludeSegments('foo.bar.baz'))
+        self.assertEqual(
+            ['foo'],
+            self.result._getPreludeSegments('foo.bar'))
+        self.assertEqual([], self.result._getPreludeSegments('foo'))
+
+
+    def test_groupResults(self):
+        """
+        If two different tests have the same error, L{Reporter._groupResults}
+        includes them together in one of the tuples in the list it returns.
+        """
+        try:
+            raise RuntimeError('foo')
+        except RuntimeError:
+            self.result.addError(self, sys.exc_info())
+            self.result.addError(self.test, sys.exc_info())
+        try:
+            raise RuntimeError('bar')
+        except RuntimeError:
+            extra = sample.FooTest('test_bar')
+            self.result.addError(extra, sys.exc_info())
+        self.result.done()
+        grouped = self.result._groupResults(
+            self.result.errors, self.result._formatFailureTraceback)
+        self.assertEqual(grouped[0][1], [self, self.test])
+        self.assertEqual(grouped[1][1], [extra])
+
+
+    def test_printResults(self):
+        """
+        L{Reporter._printResults} uses the results list and formatter callable
+        passed to it to produce groups of results to write to its output stream.
+        """
+        def formatter(n):
+            return str(n) + '\n'
+        first = sample.FooTest('test_foo')
+        second = sample.FooTest('test_bar')
+        third = sample.PyunitTest('test_foo')
+        self.result._printResults(
+            'FOO', [(first, 1), (second, 1), (third, 2)], formatter)
+        self.assertEqual(
+            self.stream.getvalue(),
+            "%(double separator)s\n"
+            "FOO\n"
+            "1\n"
+            "\n"
+            "%(first)s\n"
+            "%(second)s\n"
+            "%(double separator)s\n"
+            "FOO\n"
+            "2\n"
+            "\n"
+            "%(third)s\n" % {
+                'double separator': self.result._doubleSeparator,
+                'first': first.id(),
+                'second': second.id(),
+                'third': third.id(),
+                })
+
+
+
+class TestReporterInterface(unittest.SynchronousTestCase):
+    """
+    Tests for the bare interface of a trial reporter.
+
+    Subclass this test case and provide a different 'resultFactory' to test
+    that a particular reporter implementation will work with the rest of
+    Trial.
+
+    @cvar resultFactory: A callable that returns a reporter to be tested. The
+        callable must take the same parameters as L{reporter.Reporter}.
+    """
+
+    resultFactory = reporter.Reporter
+
+    def setUp(self):
+        self.test = sample.FooTest('test_foo')
+        self.stream = StringIO.StringIO()
+        self.publisher = log.LogPublisher()
+        self.result = self.resultFactory(self.stream, publisher=self.publisher)
+
+
+    def test_shouldStopInitiallyFalse(self):
+        """
+        shouldStop is False to begin with.
+        """
+        self.assertEqual(False, self.result.shouldStop)
+
+
+    def test_shouldStopTrueAfterStop(self):
+        """
+        shouldStop becomes True soon as someone calls stop().
+        """
+        self.result.stop()
+        self.assertEqual(True, self.result.shouldStop)
+
+
+    def test_wasSuccessfulInitiallyTrue(self):
+        """
+        wasSuccessful() is True when there have been no results reported.
+        """
+        self.assertEqual(True, self.result.wasSuccessful())
+
+
+    def test_wasSuccessfulTrueAfterSuccesses(self):
+        """
+        wasSuccessful() is True when there have been only successes, False
+        otherwise.
+        """
+        self.result.addSuccess(self.test)
+        self.assertEqual(True, self.result.wasSuccessful())
+
+
+    def test_wasSuccessfulFalseAfterErrors(self):
+        """
+        wasSuccessful() becomes False after errors have been reported.
+        """
+        try:
+            1 / 0
+        except ZeroDivisionError:
+            self.result.addError(self.test, sys.exc_info())
+        self.assertEqual(False, self.result.wasSuccessful())
+
+
+    def test_wasSuccessfulFalseAfterFailures(self):
+        """
+        wasSuccessful() becomes False after failures have been reported.
+        """
+        try:
+            self.fail("foo")
+        except self.failureException:
+            self.result.addFailure(self.test, sys.exc_info())
+        self.assertEqual(False, self.result.wasSuccessful())
+
+
+
+class TestReporter(TestReporterInterface):
+    """
+    Tests for the base L{reporter.Reporter} class.
+    """
+
+    def setUp(self):
+        TestReporterInterface.setUp(self)
+        self._timer = 0
+        self.result._getTime = self._getTime
+
+
+    def _getTime(self):
+        self._timer += 1
+        return self._timer
+
+
+    def test_startStop(self):
+        self.result.startTest(self.test)
+        self.result.stopTest(self.test)
+        self.assertTrue(self.result._lastTime > 0)
+        self.assertEqual(self.result.testsRun, 1)
+        self.assertEqual(self.result.wasSuccessful(), True)
+
+
+    def test_brokenStream(self):
+        """
+        Test that the reporter safely writes to its stream.
+        """
+        result = self.resultFactory(stream=BrokenStream(self.stream))
+        result._writeln("Hello")
+        self.assertEqual(self.stream.getvalue(), 'Hello\n')
+        self.stream.truncate(0)
+        result._writeln("Hello %s!", 'World')
+        self.assertEqual(self.stream.getvalue(), 'Hello World!\n')
+
+
+    def test_printErrorsDeprecated(self):
+        """
+        L{IReporter.printErrors} was deprecated in Twisted 8.0.
+        """
+        def f():
+            self.result.printErrors()
+        self.assertWarns(
+            DeprecationWarning, "printErrors is deprecated in Twisted 8.0.",
+            __file__, f)
+
+
+    def test_printSummaryDeprecated(self):
+        """
+        L{IReporter.printSummary} was deprecated in Twisted 8.0.
+        """
+        def f():
+            self.result.printSummary()
+        self.assertWarns(
+            DeprecationWarning, "printSummary is deprecated in Twisted 8.0.",
+            __file__, f)
+
+
+    def test_writeDeprecated(self):
+        """
+        L{IReporter.write} was deprecated in Twisted 8.0.
+        """
+        def f():
+            self.result.write("")
+        self.assertWarns(
+            DeprecationWarning, "write is deprecated in Twisted 8.0.",
+            __file__, f)
+
+
+    def test_writelnDeprecated(self):
+        """
+        L{IReporter.writeln} was deprecated in Twisted 8.0.
+        """
+        def f():
+            self.result.writeln("")
+        self.assertWarns(
+            DeprecationWarning, "writeln is deprecated in Twisted 8.0.",
+            __file__, f)
+
+
+    def test_separatorDeprecated(self):
+        """
+        L{IReporter.separator} was deprecated in Twisted 8.0.
+        """
+        def f():
+            return self.result.separator
+        self.assertWarns(
+            DeprecationWarning, "separator is deprecated in Twisted 8.0.",
+            __file__, f)
+
+
+    def test_streamDeprecated(self):
+        """
+        L{IReporter.stream} was deprecated in Twisted 8.0.
+        """
+        def f():
+            return self.result.stream
+        self.assertWarns(
+            DeprecationWarning, "stream is deprecated in Twisted 8.0.",
+            __file__, f)
+
+
+    def test_upDownErrorDeprecated(self):
+        """
+        L{IReporter.upDownError} was deprecated in Twisted 8.0.
+        """
+        def f():
+            self.result.upDownError(None, None, None, None)
+        self.assertWarns(
+            DeprecationWarning, "upDownError is deprecated in Twisted 8.0.",
+            __file__, f)
+
+
+    def test_warning(self):
+        """
+        L{reporter.Reporter} observes warnings emitted by the Twisted log
+        system and writes them to its output stream.
+        """
+        message = RuntimeWarning("some warning text")
+        category = 'exceptions.RuntimeWarning'
+        filename = "path/to/some/file.py"
+        lineno = 71
+        self.publisher.msg(
+            warning=message, category=category,
+            filename=filename, lineno=lineno)
+        self.assertEqual(
+            self.stream.getvalue(),
+            "%s:%d: %s: %s\n" % (
+                filename, lineno, category.split('.')[-1], message))
+
+
+    def test_duplicateWarningSuppressed(self):
+        """
+        A warning emitted twice within a single test is only written to the
+        stream once.
+        """
+        # Emit the warning and assert that it shows up
+        self.test_warning()
+        # Emit the warning again and assert that the stream still only has one
+        # warning on it.
+        self.test_warning()
+
+
+    def test_warningEmittedForNewTest(self):
+        """
+        A warning emitted again after a new test has started is written to the
+        stream again.
+        """
+        test = self.__class__('test_warningEmittedForNewTest')
+        self.result.startTest(test)
+
+        # Clear whatever startTest wrote to the stream
+        self.stream.seek(0)
+        self.stream.truncate()
+
+        # Emit a warning (and incidentally, assert that it was emitted)
+        self.test_warning()
+
+        # Clean up from the first warning to simplify the rest of the
+        # assertions.
+        self.stream.seek(0)
+        self.stream.truncate()
+
+        # Stop the first test and start another one (it just happens to be the
+        # same one, but that doesn't matter)
+        self.result.stopTest(test)
+        self.result.startTest(test)
+
+        # Clean up the stopTest/startTest output
+        self.stream.seek(0)
+        self.stream.truncate()
+
+        # Emit the warning again and make sure it shows up
+        self.test_warning()
+
+
+    def test_stopObserving(self):
+        """
+        L{reporter.Reporter} stops observing log events when its C{done} method
+        is called.
+        """
+        self.result.done()
+        self.stream.seek(0)
+        self.stream.truncate()
+        self.publisher.msg(
+            warning=RuntimeWarning("some message"),
+            category='exceptions.RuntimeWarning',
+            filename="file/name.py", lineno=17)
+        self.assertEqual(self.stream.getvalue(), "")
+
+
+
+class TestSafeStream(unittest.SynchronousTestCase):
+    def test_safe(self):
+        """
+        Test that L{reporter.SafeStream} successfully write to its original
+        stream even if an interrupt happens during the write.
+        """
+        stream = StringIO.StringIO()
+        broken = BrokenStream(stream)
+        safe = reporter.SafeStream(broken)
+        safe.write("Hello")
+        self.assertEqual(stream.getvalue(), "Hello")
+
+
+
+class TestSubunitReporter(TestReporterInterface):
+    """
+    Tests for the subunit reporter.
+
+    This just tests that the subunit reporter implements the basic interface.
+    """
+
+    resultFactory = reporter.SubunitReporter
+
+
+    def setUp(self):
+        if reporter.TestProtocolClient is None:
+            raise SkipTest(
+                "Subunit not installed, cannot test SubunitReporter")
+        TestReporterInterface.setUp(self)
+
+
+    def assertForwardsToSubunit(self, methodName, *args, **kwargs):
+        """
+        Assert that 'methodName' on L{SubunitReporter} forwards to the
+        equivalent method on subunit.
+
+        Checks that the return value from subunit is returned from the
+        L{SubunitReporter} and that the reporter writes the same data to its
+        stream as subunit does to its own.
+
+        Assumes that the method on subunit has the same name as the method on
+        L{SubunitReporter}.
+        """
+        stream = StringIO.StringIO()
+        subunitClient = reporter.TestProtocolClient(stream)
+        subunitReturn = getattr(subunitClient, methodName)(*args, **kwargs)
+        subunitOutput = stream.getvalue()
+        reporterReturn = getattr(self.result, methodName)(*args, **kwargs)
+        self.assertEqual(subunitReturn, reporterReturn)
+        self.assertEqual(subunitOutput, self.stream.getvalue())
+
+
+    def removeMethod(self, klass, methodName):
+        """
+        Remove 'methodName' from 'klass'.
+
+        If 'klass' does not have a method named 'methodName', then
+        'removeMethod' succeeds silently.
+
+        If 'klass' does have a method named 'methodName', then it is removed
+        using delattr. Also, methods of the same name are removed from all
+        base classes of 'klass', thus removing the method entirely.
+
+        @param klass: The class to remove the method from.
+        @param methodName: The name of the method to remove.
+        """
+        method = getattr(klass, methodName, None)
+        if method is None:
+            return
+        for base in getmro(klass):
+            try:
+                delattr(base, methodName)
+            except (AttributeError, TypeError):
+                break
+            else:
+                self.addCleanup(setattr, base, methodName, method)
+
+
+    def test_subunitWithoutAddExpectedFailureInstalled(self):
+        """
+        Some versions of subunit don't have "addExpectedFailure". For these
+        versions, we report expected failures as successes.
+        """
+        self.removeMethod(reporter.TestProtocolClient, 'addExpectedFailure')
+        try:
+            1 / 0
+        except ZeroDivisionError:
+            self.result.addExpectedFailure(self.test, sys.exc_info(), "todo")
+        expectedFailureOutput = self.stream.getvalue()
+        self.stream.truncate(0)
+        self.result.addSuccess(self.test)
+        successOutput = self.stream.getvalue()
+        self.assertEqual(successOutput, expectedFailureOutput)
+
+
+    def test_subunitWithoutAddSkipInstalled(self):
+        """
+        Some versions of subunit don't have "addSkip". For these versions, we
+        report skips as successes.
+        """
+        self.removeMethod(reporter.TestProtocolClient, 'addSkip')
+        self.result.addSkip(self.test, "reason")
+        skipOutput = self.stream.getvalue()
+        self.stream.truncate(0)
+        self.result.addSuccess(self.test)
+        successOutput = self.stream.getvalue()
+        self.assertEqual(successOutput, skipOutput)
+
+
+    def test_addExpectedFailurePassedThrough(self):
+        """
+        Some versions of subunit have "addExpectedFailure". For these
+        versions, when we call 'addExpectedFailure' on the test result, we
+        pass the error and test through to the subunit client.
+        """
+        addExpectedFailureCalls = []
+        def addExpectedFailure(test, error):
+            addExpectedFailureCalls.append((test, error))
+
+        # Provide our own addExpectedFailure, whether or not the locally
+        # installed subunit has addExpectedFailure.
+        self.result._subunit.addExpectedFailure = addExpectedFailure
+        try:
+            1 / 0
+        except ZeroDivisionError:
+            exc_info = sys.exc_info()
+            self.result.addExpectedFailure(self.test, exc_info, 'todo')
+        self.assertEqual(addExpectedFailureCalls, [(self.test, exc_info)])
+
+
+    def test_addSkipSendsSubunitAddSkip(self):
+        """
+        Some versions of subunit have "addSkip". For these versions, when we
+        call 'addSkip' on the test result, we pass the test and reason through
+        to the subunit client.
+        """
+        addSkipCalls = []
+        def addSkip(test, reason):
+            addSkipCalls.append((test, reason))
+
+        # Provide our own addSkip, whether or not the locally-installed
+        # subunit has addSkip.
+        self.result._subunit.addSkip = addSkip
+        self.result.addSkip(self.test, 'reason')
+        self.assertEqual(addSkipCalls, [(self.test, 'reason')])
+
+
+    def test_doneDoesNothing(self):
+        """
+        The subunit reporter doesn't need to print out a summary -- the stream
+        of results is everything. Thus, done() does nothing.
+        """
+        self.result.done()
+        self.assertEqual('', self.stream.getvalue())
+
+
+    def test_startTestSendsSubunitStartTest(self):
+        """
+        SubunitReporter.startTest() sends the subunit 'startTest' message.
+        """
+        self.assertForwardsToSubunit('startTest', self.test)
+
+
+    def test_stopTestSendsSubunitStopTest(self):
+        """
+        SubunitReporter.stopTest() sends the subunit 'stopTest' message.
+        """
+        self.assertForwardsToSubunit('stopTest', self.test)
+
+
+    def test_addSuccessSendsSubunitAddSuccess(self):
+        """
+        SubunitReporter.addSuccess() sends the subunit 'addSuccess' message.
+        """
+        self.assertForwardsToSubunit('addSuccess', self.test)
+
+
+    def test_addErrorSendsSubunitAddError(self):
+        """
+        SubunitReporter.addError() sends the subunit 'addError' message.
+        """
+        try:
+            1 / 0
+        except ZeroDivisionError:
+            error = sys.exc_info()
+        self.assertForwardsToSubunit('addError', self.test, error)
+
+
+    def test_addFailureSendsSubunitAddFailure(self):
+        """
+        SubunitReporter.addFailure() sends the subunit 'addFailure' message.
+        """
+        try:
+            self.fail('hello')
+        except self.failureException:
+            failure = sys.exc_info()
+        self.assertForwardsToSubunit('addFailure', self.test, failure)
+
+
+    def test_addUnexpectedSuccessSendsSubunitAddSuccess(self):
+        """
+        SubunitReporter.addFailure() sends the subunit 'addSuccess' message,
+        since subunit doesn't model unexpected success.
+        """
+        stream = StringIO.StringIO()
+        subunitClient = reporter.TestProtocolClient(stream)
+        subunitClient.addSuccess(self.test)
+        subunitOutput = stream.getvalue()
+        self.result.addUnexpectedSuccess(self.test, 'todo')
+        self.assertEqual(subunitOutput, self.stream.getvalue())
+
+
+    def test_loadTimeErrors(self):
+        """
+        Load-time errors are reported like normal errors.
+        """
+        test = runner.TestLoader().loadByName('doesntexist')
+        test.run(self.result)
+        output = self.stream.getvalue()
+        # Just check that 'doesntexist' is in the output, rather than
+        # assembling the expected stack trace.
+        self.assertIn('doesntexist', output)
+
+
+
+class TestSubunitReporterNotInstalled(unittest.SynchronousTestCase):
+    """
+    Test behaviour when the subunit reporter is not installed.
+    """
+
+    def test_subunitNotInstalled(self):
+        """
+        If subunit is not installed, TestProtocolClient will be None, and
+        SubunitReporter will raise an error when you try to construct it.
+        """
+        stream = StringIO.StringIO()
+        self.patch(reporter, 'TestProtocolClient', None)
+        e = self.assertRaises(Exception, reporter.SubunitReporter, stream)
+        self.assertEqual("Subunit not available", str(e))
+
+
+
+class TestTimingReporter(TestReporter):
+    resultFactory = reporter.TimingTextReporter
+
+
+
+class LoggingReporter(reporter.Reporter):
+    """
+    Simple reporter that stores the last test that was passed to it.
+    """
+
+    def __init__(self, *args, **kwargs):
+        reporter.Reporter.__init__(self, *args, **kwargs)
+        self.test = None
+
+    def addError(self, test, error):
+        self.test = test
+
+    def addExpectedFailure(self, test, failure, todo):
+        self.test = test
+
+    def addFailure(self, test, failure):
+        self.test = test
+
+    def addSkip(self, test, skip):
+        self.test = test
+
+    def addUnexpectedSuccess(self, test, todo):
+        self.test = test
+
+    def startTest(self, test):
+        self.test = test
+
+    def stopTest(self, test):
+        self.test = test
+
+
+
+class TestAdaptedReporter(unittest.SynchronousTestCase):
+    """
+    L{reporter._AdaptedReporter} is a reporter wrapper that wraps all of the
+    tests it receives before passing them on to the original reporter.
+    """
+
+    def setUp(self):
+        self.wrappedResult = self.getWrappedResult()
+
+
+    def _testAdapter(self, test):
+        return test.id()
+
+
+    def assertWrapped(self, wrappedResult, test):
+        self.assertEqual(wrappedResult._originalReporter.test, self._testAdapter(test))
+
+
+    def getFailure(self, exceptionInstance):
+        """
+        Return a L{Failure} from raising the given exception.
+
+        @param exceptionInstance: The exception to raise.
+        @return: L{Failure}
+        """
+        try:
+            raise exceptionInstance
+        except:
+            return Failure()
+
+
+    def getWrappedResult(self):
+        result = LoggingReporter()
+        return reporter._AdaptedReporter(result, self._testAdapter)
+
+
+    def test_addError(self):
+        """
+        C{addError} wraps its test with the provided adapter.
+        """
+        self.wrappedResult.addError(self, self.getFailure(RuntimeError()))
+        self.assertWrapped(self.wrappedResult, self)
+
+
+    def test_addFailure(self):
+        """
+        C{addFailure} wraps its test with the provided adapter.
+        """
+        self.wrappedResult.addFailure(self, self.getFailure(AssertionError()))
+        self.assertWrapped(self.wrappedResult, self)
+
+
+    def test_addSkip(self):
+        """
+        C{addSkip} wraps its test with the provided adapter.
+        """
+        self.wrappedResult.addSkip(self, self.getFailure(SkipTest('no reason')))
+        self.assertWrapped(self.wrappedResult, self)
+
+
+    def test_startTest(self):
+        """
+        C{startTest} wraps its test with the provided adapter.
+        """
+        self.wrappedResult.startTest(self)
+        self.assertWrapped(self.wrappedResult, self)
+
+
+    def test_stopTest(self):
+        """
+        C{stopTest} wraps its test with the provided adapter.
+        """
+        self.wrappedResult.stopTest(self)
+        self.assertWrapped(self.wrappedResult, self)
+
+
+    def test_addExpectedFailure(self):
+        """
+        C{addExpectedFailure} wraps its test with the provided adapter.
+        """
+        self.wrappedResult.addExpectedFailure(
+            self, self.getFailure(RuntimeError()), Todo("no reason"))
+        self.assertWrapped(self.wrappedResult, self)
+
+
+    def test_addUnexpectedSuccess(self):
+        """
+        C{addUnexpectedSuccess} wraps its test with the provided adapter.
+        """
+        self.wrappedResult.addUnexpectedSuccess(self, Todo("no reason"))
+        self.assertWrapped(self.wrappedResult, self)
+
+
+
+class FakeStream(object):
+    """
+    A fake stream which C{isatty} method returns some predictable.
+
+    @ivar tty: returned value of C{isatty}.
+    @type tty: C{bool}
+    """
+
+    def __init__(self, tty=True):
+        self.tty = tty
+
+
+    def isatty(self):
+        return self.tty
+
+
+
+class AnsiColorizerTests(unittest.SynchronousTestCase):
+    """
+    Tests for L{reporter._AnsiColorizer}.
+    """
+
+    def setUp(self):
+        self.savedModules = sys.modules.copy()
+
+
+    def tearDown(self):
+        sys.modules.clear()
+        sys.modules.update(self.savedModules)
+
+
+    def test_supportedStdOutTTY(self):
+        """
+        L{reporter._AnsiColorizer.supported} returns C{False} if the given
+        stream is not a TTY.
+        """
+        self.assertFalse(reporter._AnsiColorizer.supported(FakeStream(False)))
+
+
+    def test_supportedNoCurses(self):
+        """
+        L{reporter._AnsiColorizer.supported} returns C{False} if the curses
+        module can't be imported.
+        """
+        sys.modules['curses'] = None
+        self.assertFalse(reporter._AnsiColorizer.supported(FakeStream()))
+
+
+    def test_supportedSetupTerm(self):
+        """
+        L{reporter._AnsiColorizer.supported} returns C{True} if
+        C{curses.tigetnum} returns more than 2 supported colors. It only tries
+        to call C{curses.setupterm} if C{curses.tigetnum} previously failed
+        with a C{curses.error}.
+        """
+        class fakecurses(object):
+            error = RuntimeError
+            setUp = 0
+
+            def setupterm(self):
+                self.setUp += 1
+
+            def tigetnum(self, value):
+                if self.setUp:
+                    return 3
+                else:
+                    raise self.error()
+
+        sys.modules['curses'] = fakecurses()
+        self.assertTrue(reporter._AnsiColorizer.supported(FakeStream()))
+        self.assertTrue(reporter._AnsiColorizer.supported(FakeStream()))
+
+        self.assertEqual(sys.modules['curses'].setUp, 1)
+
+
+    def test_supportedTigetNumWrongError(self):
+        """
+        L{reporter._AnsiColorizer.supported} returns C{False} and doesn't try
+        to call C{curses.setupterm} if C{curses.tigetnum} returns something
+        different than C{curses.error}.
+        """
+        class fakecurses(object):
+            error = RuntimeError
+
+            def tigetnum(self, value):
+                raise ValueError()
+
+        sys.modules['curses'] = fakecurses()
+        self.assertFalse(reporter._AnsiColorizer.supported(FakeStream()))
+
+
+    def test_supportedTigetNumNotEnoughColor(self):
+        """
+        L{reporter._AnsiColorizer.supported} returns C{False} if
+        C{curses.tigetnum} returns less than 2 supported colors.
+        """
+        class fakecurses(object):
+            error = RuntimeError
+
+            def tigetnum(self, value):
+                return 1
+
+        sys.modules['curses'] = fakecurses()
+        self.assertFalse(reporter._AnsiColorizer.supported(FakeStream()))
+
+
+    def test_supportedTigetNumErrors(self):
+        """
+        L{reporter._AnsiColorizer.supported} returns C{False} if
+        C{curses.tigetnum} raises an error, and calls C{curses.setupterm} once.
+        """
+        class fakecurses(object):
+            error = RuntimeError
+            setUp = 0
+
+            def setupterm(self):
+                self.setUp += 1
+
+            def tigetnum(self, value):
+                raise self.error()
+
+        sys.modules['curses'] = fakecurses()
+        self.assertFalse(reporter._AnsiColorizer.supported(FakeStream()))
+        self.assertEqual(sys.modules['curses'].setUp, 1)
diff --git a/ThirdParty/Twisted/twisted/trial/test/test_runner.py b/ThirdParty/Twisted/twisted/trial/test/test_runner.py
new file mode 100644
index 0000000..f14a5f2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/test_runner.py
@@ -0,0 +1,1022 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+#
+# Maintainer: Jonathan Lange
+# Author: Robert Collins
+
+
+import StringIO, os, pdb, sys
+from zope.interface import implements
+from zope.interface.verify import verifyObject
+
+from twisted.trial.itrial import IReporter, ITestCase
+from twisted.trial import unittest, runner, reporter, util
+from twisted.python import failure, log, reflect, filepath
+from twisted.python.filepath import FilePath
+from twisted.scripts import trial
+from twisted.plugins import twisted_trial
+from twisted import plugin
+from twisted.internet import defer
+
+
+pyunit = __import__('unittest')
+
+
+class CapturingDebugger(object):
+
+    def __init__(self):
+        self._calls = []
+
+    def runcall(self, *args, **kwargs):
+        self._calls.append('runcall')
+        args[0](*args[1:], **kwargs)
+
+
+
+class CapturingReporter(object):
+    """
+    Reporter that keeps a log of all actions performed on it.
+    """
+
+    implements(IReporter)
+
+    stream = None
+    tbformat = None
+    args = None
+    separator = None
+    testsRun = None
+
+    def __init__(self, stream=None, tbformat=None, rterrors=None,
+                 publisher=None):
+        """
+        Create a capturing reporter.
+        """
+        self._calls = []
+        self.shouldStop = False
+        self._stream = stream
+        self._tbformat = tbformat
+        self._rterrors = rterrors
+        self._publisher = publisher
+
+
+    def startTest(self, method):
+        """
+        Report the beginning of a run of a single test method
+        @param method: an object that is adaptable to ITestMethod
+        """
+        self._calls.append('startTest')
+
+
+    def stopTest(self, method):
+        """
+        Report the status of a single test method
+        @param method: an object that is adaptable to ITestMethod
+        """
+        self._calls.append('stopTest')
+
+
+    def cleanupErrors(self, errs):
+        """called when the reactor has been left in a 'dirty' state
+        @param errs: a list of L{twisted.python.failure.Failure}s
+        """
+        self._calls.append('cleanupError')
+
+
+    def addSuccess(self, test):
+        self._calls.append('addSuccess')
+
+
+    def done(self):
+        """
+        Do nothing. These tests don't care about done.
+        """
+
+
+
+class TrialRunnerTestsMixin:
+    """
+    Mixin defining tests for L{runner.TrialRunner}.
+    """
+    def tearDown(self):
+        self.runner._tearDownLogFile()
+
+
+    def test_empty(self):
+        """
+        Empty test method, used by the other tests.
+        """
+
+
+    def _getObservers(self):
+        return log.theLogPublisher.observers
+
+
+    def test_addObservers(self):
+        """
+        Any log system observers L{TrialRunner.run} adds are removed by the
+        time it returns.
+        """
+        originalCount = len(self._getObservers())
+        self.runner.run(self.test)
+        newCount = len(self._getObservers())
+        self.assertEqual(newCount, originalCount)
+
+
+    def test_logFileAlwaysActive(self):
+        """
+        Test that a new file is opened on each run.
+        """
+        oldSetUpLogFile = self.runner._setUpLogFile
+        l = []
+        def setUpLogFile():
+            oldSetUpLogFile()
+            l.append(self.runner._logFileObserver)
+        self.runner._setUpLogFile = setUpLogFile
+        self.runner.run(self.test)
+        self.runner.run(self.test)
+        self.assertEqual(len(l), 2)
+        self.failIf(l[0] is l[1], "Should have created a new file observer")
+
+
+    def test_logFileGetsClosed(self):
+        """
+        Test that file created is closed during the run.
+        """
+        oldSetUpLogFile = self.runner._setUpLogFile
+        l = []
+        def setUpLogFile():
+            oldSetUpLogFile()
+            l.append(self.runner._logFileObject)
+        self.runner._setUpLogFile = setUpLogFile
+        self.runner.run(self.test)
+        self.assertEqual(len(l), 1)
+        self.failUnless(l[0].closed)
+
+
+
+class TestTrialRunner(TrialRunnerTestsMixin, unittest.SynchronousTestCase):
+    """
+    Tests for L{runner.TrialRunner} with the feature to turn unclean errors
+    into warnings disabled.
+    """
+    def setUp(self):
+        self.stream = StringIO.StringIO()
+        self.runner = runner.TrialRunner(CapturingReporter, stream=self.stream)
+        self.test = TestTrialRunner('test_empty')
+
+
+    def test_publisher(self):
+        """
+        The reporter constructed by L{runner.TrialRunner} is passed
+        L{twisted.python.log} as the value for the C{publisher} parameter.
+        """
+        result = self.runner._makeResult()
+        self.assertIdentical(result._publisher, log)
+
+
+
+class TrialRunnerWithUncleanWarningsReporter(TrialRunnerTestsMixin,
+                                             unittest.SynchronousTestCase):
+    """
+    Tests for the TrialRunner's interaction with an unclean-error suppressing
+    reporter.
+    """
+
+    def setUp(self):
+        self.stream = StringIO.StringIO()
+        self.runner = runner.TrialRunner(CapturingReporter, stream=self.stream,
+                                         uncleanWarnings=True)
+        self.test = TestTrialRunner('test_empty')
+
+
+
+class DryRunMixin(object):
+
+    suppress = [util.suppress(
+        category=DeprecationWarning,
+        message="Test visitors deprecated in Twisted 8.0")]
+
+
+    def setUp(self):
+        self.log = []
+        self.stream = StringIO.StringIO()
+        self.runner = runner.TrialRunner(CapturingReporter,
+                                         runner.TrialRunner.DRY_RUN,
+                                         stream=self.stream)
+        self.makeTestFixtures()
+
+
+    def makeTestFixtures(self):
+        """
+        Set C{self.test} and C{self.suite}, where C{self.suite} is an empty
+        TestSuite.
+        """
+
+
+    def test_empty(self):
+        """
+        If there are no tests, the reporter should not receive any events to
+        report.
+        """
+        result = self.runner.run(runner.TestSuite())
+        self.assertEqual(result._calls, [])
+
+
+    def test_singleCaseReporting(self):
+        """
+        If we are running a single test, check the reporter starts, passes and
+        then stops the test during a dry run.
+        """
+        result = self.runner.run(self.test)
+        self.assertEqual(result._calls, ['startTest', 'addSuccess', 'stopTest'])
+
+
+    def test_testsNotRun(self):
+        """
+        When we are doing a dry run, the tests should not actually be run.
+        """
+        self.runner.run(self.test)
+        self.assertEqual(self.log, [])
+
+
+
+class SynchronousDryRunTest(DryRunMixin, unittest.SynchronousTestCase):
+    """
+    Check that 'dry run' mode works well with trial's L{SynchronousTestCase}.
+    """
+    def makeTestFixtures(self):
+        class PyunitCase(unittest.SynchronousTestCase):
+            def test_foo(self):
+                pass
+        self.test = PyunitCase('test_foo')
+        self.suite = pyunit.TestSuite()
+
+
+
+class DryRunTest(DryRunMixin, unittest.SynchronousTestCase):
+    """
+    Check that 'dry run' mode works well with Trial tests.
+    """
+    def makeTestFixtures(self):
+        class MockTest(unittest.TestCase):
+            def test_foo(test):
+                self.log.append('test_foo')
+        self.test = MockTest('test_foo')
+        self.suite = runner.TestSuite()
+
+
+
+class PyUnitDryRunTest(DryRunMixin, unittest.SynchronousTestCase):
+    """
+    Check that 'dry run' mode works well with stdlib unittest tests.
+    """
+    def makeTestFixtures(self):
+        class PyunitCase(pyunit.TestCase):
+            def test_foo(self):
+                pass
+        self.test = PyunitCase('test_foo')
+        self.suite = pyunit.TestSuite()
+
+
+
+class TestRunner(unittest.SynchronousTestCase):
+    def setUp(self):
+        self.config = trial.Options()
+        # whitebox hack a reporter in, because plugins are CACHED and will
+        # only reload if the FILE gets changed.
+
+        parts = reflect.qual(CapturingReporter).split('.')
+        package = '.'.join(parts[:-1])
+        klass = parts[-1]
+        plugins = [twisted_trial._Reporter(
+            "Test Helper Reporter",
+            package,
+            description="Utility for unit testing.",
+            longOpt="capturing",
+            shortOpt=None,
+            klass=klass)]
+
+
+        # XXX There should really be a general way to hook the plugin system
+        # for tests.
+        def getPlugins(iface, *a, **kw):
+            self.assertEqual(iface, IReporter)
+            return plugins + list(self.original(iface, *a, **kw))
+
+        self.original = plugin.getPlugins
+        plugin.getPlugins = getPlugins
+
+        self.standardReport = ['startTest', 'addSuccess', 'stopTest',
+                               'startTest', 'addSuccess', 'stopTest',
+                               'startTest', 'addSuccess', 'stopTest',
+                               'startTest', 'addSuccess', 'stopTest',
+                               'startTest', 'addSuccess', 'stopTest',
+                               'startTest', 'addSuccess', 'stopTest',
+                               'startTest', 'addSuccess', 'stopTest',
+                               'startTest', 'addSuccess', 'stopTest',
+                               'startTest', 'addSuccess', 'stopTest',
+                               'startTest', 'addSuccess', 'stopTest']
+
+
+    def tearDown(self):
+        plugin.getPlugins = self.original
+
+
+    def parseOptions(self, args):
+        self.config.parseOptions(args)
+
+
+    def getRunner(self):
+        r = trial._makeRunner(self.config)
+        r.stream = StringIO.StringIO()
+        # XXX The runner should always take care of cleaning this up itself.
+        # It's not clear why this is necessary.  The runner always tears down
+        # its log file.
+        self.addCleanup(r._tearDownLogFile)
+        # XXX The runner should always take care of cleaning this up itself as
+        # well.  It's necessary because TrialRunner._setUpTestdir might raise
+        # an exception preventing Reporter.done from being run, leaving the
+        # observer added by Reporter.__init__ still present in the system.
+        # Something better needs to happen inside
+        # TrialRunner._runWithoutDecoration to remove the need for this cludge.
+        r._log = log.LogPublisher()
+        return r
+
+
+    def test_runner_can_get_reporter(self):
+        self.parseOptions([])
+        result = self.config['reporter']
+        runner = self.getRunner()
+        self.assertEqual(result, runner._makeResult().__class__)
+
+
+    def test_runner_get_result(self):
+        self.parseOptions([])
+        runner = self.getRunner()
+        result = runner._makeResult()
+        self.assertEqual(result.__class__, self.config['reporter'])
+
+
+    def test_uncleanWarningsOffByDefault(self):
+        """
+        By default Trial sets the 'uncleanWarnings' option on the runner to
+        False. This means that dirty reactor errors will be reported as
+        errors. See L{test_reporter.TestDirtyReactor}.
+        """
+        self.parseOptions([])
+        runner = self.getRunner()
+        self.assertNotIsInstance(runner._makeResult(),
+                                 reporter.UncleanWarningsReporterWrapper)
+
+
+    def test_getsUncleanWarnings(self):
+        """
+        Specifying '--unclean-warnings' on the trial command line will cause
+        reporters to be wrapped in a device which converts unclean errors to
+        warnings.  See L{test_reporter.TestDirtyReactor} for implications.
+        """
+        self.parseOptions(['--unclean-warnings'])
+        runner = self.getRunner()
+        self.assertIsInstance(runner._makeResult(),
+                              reporter.UncleanWarningsReporterWrapper)
+
+
+    def test_runner_working_directory(self):
+        self.parseOptions(['--temp-directory', 'some_path'])
+        runner = self.getRunner()
+        self.assertEqual(runner.workingDirectory, 'some_path')
+
+
+    def test_concurrentImplicitWorkingDirectory(self):
+        """
+        If no working directory is explicitly specified and the default
+        working directory is in use by another runner, L{TrialRunner.run}
+        selects a different default working directory to use.
+        """
+        self.parseOptions([])
+
+        # Make sure we end up with the same working directory after this test
+        # as we had before it.
+        self.addCleanup(os.chdir, os.getcwd())
+
+        # Make a new directory and change into it.  This isolates us from state
+        # that other tests might have dumped into this process's temp
+        # directory.
+        runDirectory = FilePath(self.mktemp())
+        runDirectory.makedirs()
+        os.chdir(runDirectory.path)
+
+        firstRunner = self.getRunner()
+        secondRunner = self.getRunner()
+
+        where = {}
+
+        class ConcurrentCase(unittest.SynchronousTestCase):
+            def test_first(self):
+                """
+                Start a second test run which will have a default working
+                directory which is the same as the working directory of the
+                test run already in progress.
+                """
+                # Change the working directory to the value it had before this
+                # test suite was started.
+                where['concurrent'] = subsequentDirectory = os.getcwd()
+                os.chdir(runDirectory.path)
+                self.addCleanup(os.chdir, subsequentDirectory)
+
+                secondRunner.run(ConcurrentCase('test_second'))
+
+            def test_second(self):
+                """
+                Record the working directory for later analysis.
+                """
+                where['record'] = os.getcwd()
+
+        result = firstRunner.run(ConcurrentCase('test_first'))
+        bad = result.errors + result.failures
+        if bad:
+            self.fail(bad[0][1])
+        self.assertEqual(
+            where, {
+                'concurrent': runDirectory.child('_trial_temp').path,
+                'record': runDirectory.child('_trial_temp-1').path})
+
+
+    def test_concurrentExplicitWorkingDirectory(self):
+        """
+        If a working directory which is already in use is explicitly specified,
+        L{TrialRunner.run} raises L{_WorkingDirectoryBusy}.
+        """
+        self.parseOptions(['--temp-directory', os.path.abspath(self.mktemp())])
+
+        initialDirectory = os.getcwd()
+        self.addCleanup(os.chdir, initialDirectory)
+
+        firstRunner = self.getRunner()
+        secondRunner = self.getRunner()
+
+        class ConcurrentCase(unittest.SynchronousTestCase):
+            def test_concurrent(self):
+                """
+                Try to start another runner in the same working directory and
+                assert that it raises L{_WorkingDirectoryBusy}.
+                """
+                self.assertRaises(
+                    util._WorkingDirectoryBusy,
+                    secondRunner.run, ConcurrentCase('test_failure'))
+
+            def test_failure(self):
+                """
+                Should not be called, always fails.
+                """
+                self.fail("test_failure should never be called.")
+
+        result = firstRunner.run(ConcurrentCase('test_concurrent'))
+        bad = result.errors + result.failures
+        if bad:
+            self.fail(bad[0][1])
+
+
+    def test_runner_normal(self):
+        self.parseOptions(['--temp-directory', self.mktemp(),
+                           '--reporter', 'capturing',
+                           'twisted.trial.test.sample'])
+        my_runner = self.getRunner()
+        loader = runner.TestLoader()
+        suite = loader.loadByName('twisted.trial.test.sample', True)
+        result = my_runner.run(suite)
+        self.assertEqual(self.standardReport, result._calls)
+
+
+    def runSampleSuite(self, my_runner):
+        loader = runner.TestLoader()
+        suite = loader.loadByName('twisted.trial.test.sample', True)
+        return my_runner.run(suite)
+
+
+    def test_runnerDebug(self):
+        """
+        Trial uses its debugger if the `--debug` option is passed.
+        """
+        self.parseOptions(['--reporter', 'capturing',
+                           '--debug', 'twisted.trial.test.sample'])
+        my_runner = self.getRunner()
+        debugger = my_runner.debugger = CapturingDebugger()
+        result = self.runSampleSuite(my_runner)
+        self.assertEqual(self.standardReport, result._calls)
+        self.assertEqual(['runcall'], debugger._calls)
+
+
+    def test_runnerDebuggerDefaultsToPdb(self):
+        """
+        Trial uses pdb if no debugger is specified by `--debugger`
+        """
+        self.parseOptions(['--debug', 'twisted.trial.test.sample'])
+
+        self.runcall_called = False
+        def runcall(pdb, suite, result):
+            self.runcall_called = True
+        self.patch(pdb.Pdb, "runcall", runcall)
+
+        self.runSampleSuite(self.getRunner())
+
+        self.assertTrue(self.runcall_called)
+
+
+    def test_runnerDebuggerWithExplicitlyPassedPdb(self):
+        """
+        Trial uses pdb if pdb is passed explicity to the `--debugger` arg.
+        """
+        self.parseOptions([
+            '--reporter', 'capturing',
+            '--debugger', 'pdb',
+            '--debug', 'twisted.trial.test.sample',
+        ])
+
+        self.runcall_called = False
+        def runcall(pdb, suite, result):
+            self.runcall_called = True
+        self.patch(pdb.Pdb, "runcall", runcall)
+
+        self.runSampleSuite(self.getRunner())
+
+        self.assertTrue(self.runcall_called)
+
+
+    cdebugger = CapturingDebugger()
+
+
+    def test_runnerDebugger(self):
+        """
+        Trial uses specified debugger if the debugger is available.
+        """
+        self.parseOptions([
+            '--reporter', 'capturing',
+            '--debugger',
+            'twisted.trial.test.test_runner.TestRunner.cdebugger',
+            '--debug',
+            'twisted.trial.test.sample',
+        ])
+        my_runner = self.getRunner()
+        result = self.runSampleSuite(my_runner)
+        self.assertEqual(self.standardReport, result._calls)
+        self.assertEqual(['runcall'], my_runner.debugger._calls)
+
+
+
+class TestTrialSuite(unittest.SynchronousTestCase):
+
+    def test_imports(self):
+        # FIXME, HTF do you test the reactor can be cleaned up ?!!!
+        from twisted.trial.runner import TrialSuite
+
+
+
+
+class TestUntilFailure(unittest.SynchronousTestCase):
+    class FailAfter(pyunit.TestCase):
+        """
+        A test case that fails when run 3 times in a row.
+        """
+        count = []
+        def test_foo(self):
+            self.count.append(None)
+            if len(self.count) == 3:
+                self.fail('Count reached 3')
+
+
+    def setUp(self):
+        TestUntilFailure.FailAfter.count = []
+        self.test = TestUntilFailure.FailAfter('test_foo')
+        self.stream = StringIO.StringIO()
+        self.runner = runner.TrialRunner(reporter.Reporter, stream=self.stream)
+
+
+    def test_runUntilFailure(self):
+        """
+        Test that the runUntilFailure method of the runner actually fail after
+        a few runs.
+        """
+        result = self.runner.runUntilFailure(self.test)
+        self.assertEqual(result.testsRun, 1)
+        self.failIf(result.wasSuccessful())
+        self.assertEqual(self._getFailures(result), 1)
+
+
+    def _getFailures(self, result):
+        """
+        Get the number of failures that were reported to a result.
+        """
+        return len(result.failures)
+
+
+    def test_runUntilFailureDecorate(self):
+        """
+        C{runUntilFailure} doesn't decorate the tests uselessly: it does it one
+        time when run starts, but not at each turn.
+        """
+        decorated = []
+        def decorate(test, interface):
+            decorated.append((test, interface))
+            return test
+        self.patch(unittest, "decorate", decorate)
+        result = self.runner.runUntilFailure(self.test)
+        self.assertEqual(result.testsRun, 1)
+
+        self.assertEqual(len(decorated), 1)
+        self.assertEqual(decorated, [(self.test, ITestCase)])
+
+
+    def test_runUntilFailureForceGCDecorate(self):
+        """
+        C{runUntilFailure} applies the force-gc decoration after the standard
+        L{ITestCase} decoration, but only one time.
+        """
+        decorated = []
+        def decorate(test, interface):
+            decorated.append((test, interface))
+            return test
+        self.patch(unittest, "decorate", decorate)
+        self.runner._forceGarbageCollection = True
+        result = self.runner.runUntilFailure(self.test)
+        self.assertEqual(result.testsRun, 1)
+
+        self.assertEqual(len(decorated), 2)
+        self.assertEqual(decorated,
+            [(self.test, ITestCase),
+             (self.test, unittest._ForceGarbageCollectionDecorator)])
+
+
+
+class UncleanUntilFailureTests(TestUntilFailure):
+    """
+    Test that the run-until-failure feature works correctly with the unclean
+    error suppressor.
+    """
+
+    def setUp(self):
+        TestUntilFailure.setUp(self)
+        self.runner = runner.TrialRunner(reporter.Reporter, stream=self.stream,
+                                         uncleanWarnings=True)
+
+    def _getFailures(self, result):
+        """
+        Get the number of failures that were reported to a result that
+        is wrapped in an UncleanFailureWrapper.
+        """
+        return len(result._originalReporter.failures)
+
+
+
+class BreakingSuite(runner.TestSuite):
+    """
+    A L{TestSuite} that logs an error when it is run.
+    """
+
+    def run(self, result):
+        try:
+            raise RuntimeError("error that occurs outside of a test")
+        except RuntimeError:
+            log.err(failure.Failure())
+
+
+
+class TestLoggedErrors(unittest.SynchronousTestCase):
+    """
+    It is possible for an error generated by a test to be logged I{outside} of
+    any test. The log observers constructed by L{TestCase} won't catch these
+    errors. Here we try to generate such errors and ensure they are reported to
+    a L{TestResult} object.
+    """
+
+    def tearDown(self):
+        self.flushLoggedErrors(RuntimeError)
+
+
+    def test_construct(self):
+        """
+        Check that we can construct a L{runner.LoggedSuite} and that it
+        starts empty.
+        """
+        suite = runner.LoggedSuite()
+        self.assertEqual(suite.countTestCases(), 0)
+
+
+    def test_capturesError(self):
+        """
+        Chek that a L{LoggedSuite} reports any logged errors to its result.
+        """
+        result = reporter.TestResult()
+        suite = runner.LoggedSuite([BreakingSuite()])
+        suite.run(result)
+        self.assertEqual(len(result.errors), 1)
+        self.assertEqual(result.errors[0][0].id(), runner.NOT_IN_TEST)
+        self.failUnless(result.errors[0][1].check(RuntimeError))
+
+
+
+class TestTestHolder(unittest.SynchronousTestCase):
+
+    def setUp(self):
+        self.description = "description"
+        self.holder = runner.TestHolder(self.description)
+
+
+    def test_holder(self):
+        """
+        Check that L{runner.TestHolder} takes a description as a parameter
+        and that this description is returned by the C{id} and
+        C{shortDescription} methods.
+        """
+        self.assertEqual(self.holder.id(), self.description)
+        self.assertEqual(self.holder.shortDescription(), self.description)
+
+
+    def test_holderImplementsITestCase(self):
+        """
+        L{runner.TestHolder} implements L{ITestCase}.
+        """
+        self.assertIdentical(self.holder, ITestCase(self.holder))
+        self.assertTrue(
+            verifyObject(ITestCase, self.holder),
+            "%r claims to provide %r but does not do so correctly."
+            % (self.holder, ITestCase))
+
+
+    def test_runsWithStandardResult(self):
+        """
+        A L{runner.TestHolder} can run against the standard Python
+        C{TestResult}.
+        """
+        result = pyunit.TestResult()
+        self.holder.run(result)
+        self.assertTrue(result.wasSuccessful())
+        self.assertEqual(1, result.testsRun)
+
+
+
+class ErrorHolderTestsMixin(object):
+    """
+    This mixin defines test methods which can be applied to a
+    L{runner.ErrorHolder} constructed with either a L{Failure} or a
+    C{exc_info}-style tuple.
+
+    Subclass this and implement C{setUp} to create C{self.holder} referring to a
+    L{runner.ErrorHolder} instance and C{self.error} referring to a L{Failure}
+    which the holder holds.
+    """
+    exceptionForTests = ZeroDivisionError('integer division or modulo by zero')
+
+    class TestResultStub(object):
+        """
+        Stub for L{TestResult}.
+        """
+        def __init__(self):
+            self.errors = []
+
+        def startTest(self, test):
+            pass
+
+        def stopTest(self, test):
+            pass
+
+        def addError(self, test, error):
+            self.errors.append((test, error))
+
+
+    def test_runsWithStandardResult(self):
+        """
+        A L{runner.ErrorHolder} can run against the standard Python
+        C{TestResult}.
+        """
+        result = pyunit.TestResult()
+        self.holder.run(result)
+        self.assertFalse(result.wasSuccessful())
+        self.assertEqual(1, result.testsRun)
+
+
+    def test_run(self):
+        """
+        L{runner.ErrorHolder} adds an error to the result when run.
+        """
+        self.holder.run(self.result)
+        self.assertEqual(
+            self.result.errors,
+            [(self.holder, (self.error.type, self.error.value, self.error.tb))])
+
+
+    def test_call(self):
+        """
+        L{runner.ErrorHolder} adds an error to the result when called.
+        """
+        self.holder(self.result)
+        self.assertEqual(
+            self.result.errors,
+            [(self.holder, (self.error.type, self.error.value, self.error.tb))])
+
+
+    def test_countTestCases(self):
+        """
+        L{runner.ErrorHolder.countTestCases} always returns 0.
+        """
+        self.assertEqual(self.holder.countTestCases(), 0)
+
+
+    def test_repr(self):
+        """
+        L{runner.ErrorHolder.__repr__} returns a string describing the error it
+        holds.
+        """
+        self.assertEqual(repr(self.holder),
+            "<ErrorHolder description='description' "
+            "error=ZeroDivisionError('integer division or modulo by zero',)>")
+
+
+
+class FailureHoldingErrorHolderTests(ErrorHolderTestsMixin, TestTestHolder):
+    """
+    Tests for L{runner.ErrorHolder} behaving similarly to L{runner.TestHolder}
+    when constructed with a L{Failure} representing its error.
+    """
+    def setUp(self):
+        self.description = "description"
+        # make a real Failure so we can construct ErrorHolder()
+        try:
+            raise self.exceptionForTests
+        except ZeroDivisionError:
+            self.error = failure.Failure()
+        self.holder = runner.ErrorHolder(self.description, self.error)
+        self.result = self.TestResultStub()
+
+
+
+class ExcInfoHoldingErrorHolderTests(ErrorHolderTestsMixin, TestTestHolder):
+    """
+    Tests for L{runner.ErrorHolder} behaving similarly to L{runner.TestHolder}
+    when constructed with a C{exc_info}-style tuple representing its error.
+    """
+    def setUp(self):
+        self.description = "description"
+        # make a real Failure so we can construct ErrorHolder()
+        try:
+            raise self.exceptionForTests
+        except ZeroDivisionError:
+            exceptionInfo = sys.exc_info()
+            self.error = failure.Failure()
+        self.holder = runner.ErrorHolder(self.description, exceptionInfo)
+        self.result = self.TestResultStub()
+
+
+
+class TestMalformedMethod(unittest.SynchronousTestCase):
+    """
+    Test that trial manages when test methods don't have correct signatures.
+    """
+    class ContainMalformed(pyunit.TestCase):
+        """
+        This TestCase holds malformed test methods that trial should handle.
+        """
+        def test_foo(self, blah):
+            pass
+        def test_bar():
+            pass
+        test_spam = defer.deferredGenerator(test_bar)
+
+    def _test(self, method):
+        """
+        Wrapper for one of the test method of L{ContainMalformed}.
+        """
+        stream = StringIO.StringIO()
+        trialRunner = runner.TrialRunner(reporter.Reporter, stream=stream)
+        test = TestMalformedMethod.ContainMalformed(method)
+        result = trialRunner.run(test)
+        self.assertEqual(result.testsRun, 1)
+        self.failIf(result.wasSuccessful())
+        self.assertEqual(len(result.errors), 1)
+
+    def test_extraArg(self):
+        """
+        Test when the method has extra (useless) arguments.
+        """
+        self._test('test_foo')
+
+    def test_noArg(self):
+        """
+        Test when the method doesn't have even self as argument.
+        """
+        self._test('test_bar')
+
+    def test_decorated(self):
+        """
+        Test a decorated method also fails.
+        """
+        self._test('test_spam')
+
+
+
+class DestructiveTestSuiteTestCase(unittest.SynchronousTestCase):
+    """
+    Test for L{runner.DestructiveTestSuite}.
+    """
+
+    def test_basic(self):
+        """
+        Thes destructive test suite should run the tests normally.
+        """
+        called = []
+        class MockTest(pyunit.TestCase):
+            def test_foo(test):
+                called.append(True)
+        test = MockTest('test_foo')
+        result = reporter.TestResult()
+        suite = runner.DestructiveTestSuite([test])
+        self.assertEqual(called, [])
+        suite.run(result)
+        self.assertEqual(called, [True])
+        self.assertEqual(suite.countTestCases(), 0)
+
+
+    def test_shouldStop(self):
+        """
+        Test the C{shouldStop} management: raising a C{KeyboardInterrupt} must
+        interrupt the suite.
+        """
+        called = []
+        class MockTest(unittest.TestCase):
+            def test_foo1(test):
+                called.append(1)
+            def test_foo2(test):
+                raise KeyboardInterrupt()
+            def test_foo3(test):
+                called.append(2)
+        result = reporter.TestResult()
+        loader = runner.TestLoader()
+        loader.suiteFactory = runner.DestructiveTestSuite
+        suite = loader.loadClass(MockTest)
+        self.assertEqual(called, [])
+        suite.run(result)
+        self.assertEqual(called, [1])
+        # The last test shouldn't have been run
+        self.assertEqual(suite.countTestCases(), 1)
+
+
+    def test_cleanup(self):
+        """
+        Checks that the test suite cleanups its tests during the run, so that
+        it ends empty.
+        """
+        class MockTest(pyunit.TestCase):
+            def test_foo(test):
+                pass
+        test = MockTest('test_foo')
+        result = reporter.TestResult()
+        suite = runner.DestructiveTestSuite([test])
+        self.assertEqual(suite.countTestCases(), 1)
+        suite.run(result)
+        self.assertEqual(suite.countTestCases(), 0)
+
+
+
+class TestRunnerDeprecation(unittest.SynchronousTestCase):
+
+    class FakeReporter(reporter.Reporter):
+        """
+        Fake reporter that does *not* implement done() but *does* implement
+        printErrors, separator, printSummary, stream, write and writeln
+        without deprecations.
+        """
+
+        done = None
+        separator = None
+        stream = None
+
+        def printErrors(self, *args):
+            pass
+
+        def printSummary(self, *args):
+            pass
+
+        def write(self, *args):
+            pass
+
+        def writeln(self, *args):
+            pass
+
+
+    def test_reporterDeprecations(self):
+        """
+        The runner emits a warning if it is using a result that doesn't
+        implement 'done'.
+        """
+        trialRunner = runner.TrialRunner(None)
+        result = self.FakeReporter()
+        trialRunner._makeResult = lambda: result
+        def f():
+            # We have to use a pyunit test, otherwise we'll get deprecation
+            # warnings about using iterate() in a test.
+            trialRunner.run(pyunit.TestCase('id'))
+        self.assertWarns(
+            DeprecationWarning,
+            "%s should implement done() but doesn't. Falling back to "
+            "printErrors() and friends." % reflect.qual(result.__class__),
+            __file__, f)
diff --git a/ThirdParty/Twisted/twisted/trial/test/test_script.py b/ThirdParty/Twisted/twisted/trial/test/test_script.py
new file mode 100644
index 0000000..d92d9f8
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/test_script.py
@@ -0,0 +1,596 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import gc
+import StringIO, sys, types
+
+from twisted.trial import unittest
+from twisted.trial.runner import (
+    TrialRunner, TestSuite, DestructiveTestSuite, TestLoader)
+from twisted.trial._dist.disttrial import DistTrialRunner
+from twisted.scripts import trial
+from twisted.python import util
+from twisted.python.compat import set
+from twisted.python.usage import UsageError
+from twisted.python.filepath import FilePath
+
+from twisted.trial.test.test_loader import testNames
+
+pyunit = __import__('unittest')
+
+
+def sibpath(filename):
+    """
+    For finding files in twisted/trial/test
+    """
+    return util.sibpath(__file__, filename)
+
+
+
+class ForceGarbageCollection(unittest.SynchronousTestCase):
+    """
+    Tests for the --force-gc option.
+    """
+
+    def setUp(self):
+        self.config = trial.Options()
+        self.log = []
+        self.patch(gc, 'collect', self.collect)
+        test = pyunit.FunctionTestCase(self.simpleTest)
+        self.test = TestSuite([test, test])
+
+
+    def simpleTest(self):
+        """
+        A simple test method that records that it was run.
+        """
+        self.log.append('test')
+
+
+    def collect(self):
+        """
+        A replacement for gc.collect that logs calls to itself.
+        """
+        self.log.append('collect')
+
+
+    def makeRunner(self):
+        """
+        Return a L{TrialRunner} object that is safe to use in tests.
+        """
+        runner = trial._makeRunner(self.config)
+        runner.stream = StringIO.StringIO()
+        return runner
+
+
+    def test_forceGc(self):
+        """
+        Passing the --force-gc option to the trial script forces the garbage
+        collector to run before and after each test.
+        """
+        self.config['force-gc'] = True
+        self.config.postOptions()
+        runner = self.makeRunner()
+        runner.run(self.test)
+        self.assertEqual(self.log, ['collect', 'test', 'collect',
+                                    'collect', 'test', 'collect'])
+
+
+    def test_unforceGc(self):
+        """
+        By default, no garbage collection is forced.
+        """
+        self.config.postOptions()
+        runner = self.makeRunner()
+        runner.run(self.test)
+        self.assertEqual(self.log, ['test', 'test'])
+
+
+
+class TestSuiteUsed(unittest.SynchronousTestCase):
+    """
+    Check the category of tests suite used by the loader.
+    """
+
+    def setUp(self):
+        """
+        Create a trial configuration object.
+        """
+        self.config = trial.Options()
+
+
+    def test_defaultSuite(self):
+        """
+        By default, the loader should use L{DestructiveTestSuite}
+        """
+        loader = trial._getLoader(self.config)
+        self.assertEqual(loader.suiteFactory, DestructiveTestSuite)
+
+
+    def test_untilFailureSuite(self):
+        """
+        The C{until-failure} configuration uses the L{TestSuite} to keep
+        instances alive across runs.
+        """
+        self.config['until-failure'] = True
+        loader = trial._getLoader(self.config)
+        self.assertEqual(loader.suiteFactory, TestSuite)
+
+
+
+class TestModuleTest(unittest.SynchronousTestCase):
+    def setUp(self):
+        self.config = trial.Options()
+
+    def tearDown(self):
+        self.config = None
+
+    def test_testNames(self):
+        """
+        Check that the testNames helper method accurately collects the
+        names of tests in suite.
+        """
+        self.assertEqual(testNames(self), [self.id()])
+
+    def assertSuitesEqual(self, test1, names):
+        loader = TestLoader()
+        names1 = testNames(test1)
+        names2 = testNames(TestSuite(map(loader.loadByName, names)))
+        names1.sort()
+        names2.sort()
+        self.assertEqual(names1, names2)
+
+    def test_baseState(self):
+        self.assertEqual(0, len(self.config['tests']))
+
+    def test_testmoduleOnModule(self):
+        """
+        Check that --testmodule loads a suite which contains the tests
+        referred to in test-case-name inside its parameter.
+        """
+        self.config.opt_testmodule(sibpath('moduletest.py'))
+        self.assertSuitesEqual(trial._getSuite(self.config),
+                               ['twisted.trial.test.test_test_visitor'])
+
+    def test_testmoduleTwice(self):
+        """
+        When the same module is specified with two --testmodule flags, it
+        should only appear once in the suite.
+        """
+        self.config.opt_testmodule(sibpath('moduletest.py'))
+        self.config.opt_testmodule(sibpath('moduletest.py'))
+        self.assertSuitesEqual(trial._getSuite(self.config),
+                               ['twisted.trial.test.test_test_visitor'])
+
+    def test_testmoduleOnSourceAndTarget(self):
+        """
+        If --testmodule is specified twice, once for module A and once for
+        a module which refers to module A, then make sure module A is only
+        added once.
+        """
+        self.config.opt_testmodule(sibpath('moduletest.py'))
+        self.config.opt_testmodule(sibpath('test_test_visitor.py'))
+        self.assertSuitesEqual(trial._getSuite(self.config),
+                               ['twisted.trial.test.test_test_visitor'])
+
+    def test_testmoduleOnSelfModule(self):
+        """
+        When given a module that refers to *itself* in the test-case-name
+        variable, check that --testmodule only adds the tests once.
+        """
+        self.config.opt_testmodule(sibpath('moduleself.py'))
+        self.assertSuitesEqual(trial._getSuite(self.config),
+                               ['twisted.trial.test.moduleself'])
+
+    def test_testmoduleOnScript(self):
+        """
+        Check that --testmodule loads tests referred to in test-case-name
+        buffer variables.
+        """
+        self.config.opt_testmodule(sibpath('scripttest.py'))
+        self.assertSuitesEqual(trial._getSuite(self.config),
+                               ['twisted.trial.test.test_test_visitor',
+                                'twisted.trial.test.test_class'])
+
+    def test_testmoduleOnNonexistentFile(self):
+        """
+        Check that --testmodule displays a meaningful error message when
+        passed a non-existent filename.
+        """
+        buffy = StringIO.StringIO()
+        stderr, sys.stderr = sys.stderr, buffy
+        filename = 'test_thisbetternoteverexist.py'
+        try:
+            self.config.opt_testmodule(filename)
+            self.assertEqual(0, len(self.config['tests']))
+            self.assertEqual("File %r doesn't exist\n" % (filename,),
+                                 buffy.getvalue())
+        finally:
+            sys.stderr = stderr
+
+    def test_testmoduleOnEmptyVars(self):
+        """
+        Check that --testmodule adds no tests to the suite for modules
+        which lack test-case-name buffer variables.
+        """
+        self.config.opt_testmodule(sibpath('novars.py'))
+        self.assertEqual(0, len(self.config['tests']))
+
+    def test_testmoduleOnModuleName(self):
+        """
+        Check that --testmodule does *not* support module names as arguments
+        and that it displays a meaningful error message.
+        """
+        buffy = StringIO.StringIO()
+        stderr, sys.stderr = sys.stderr, buffy
+        moduleName = 'twisted.trial.test.test_script'
+        try:
+            self.config.opt_testmodule(moduleName)
+            self.assertEqual(0, len(self.config['tests']))
+            self.assertEqual("File %r doesn't exist\n" % (moduleName,),
+                                 buffy.getvalue())
+        finally:
+            sys.stderr = stderr
+
+    def test_parseLocalVariable(self):
+        declaration = '-*- test-case-name: twisted.trial.test.test_tests -*-'
+        localVars = trial._parseLocalVariables(declaration)
+        self.assertEqual({'test-case-name':
+                              'twisted.trial.test.test_tests'},
+                             localVars)
+
+    def test_trailingSemicolon(self):
+        declaration = '-*- test-case-name: twisted.trial.test.test_tests; -*-'
+        localVars = trial._parseLocalVariables(declaration)
+        self.assertEqual({'test-case-name':
+                              'twisted.trial.test.test_tests'},
+                             localVars)
+
+    def test_parseLocalVariables(self):
+        declaration = ('-*- test-case-name: twisted.trial.test.test_tests; '
+                       'foo: bar -*-')
+        localVars = trial._parseLocalVariables(declaration)
+        self.assertEqual({'test-case-name':
+                              'twisted.trial.test.test_tests',
+                              'foo': 'bar'},
+                             localVars)
+
+    def test_surroundingGuff(self):
+        declaration = ('## -*- test-case-name: '
+                       'twisted.trial.test.test_tests -*- #')
+        localVars = trial._parseLocalVariables(declaration)
+        self.assertEqual({'test-case-name':
+                              'twisted.trial.test.test_tests'},
+                             localVars)
+
+    def test_invalidLine(self):
+        self.failUnlessRaises(ValueError, trial._parseLocalVariables,
+                              'foo')
+
+    def test_invalidDeclaration(self):
+        self.failUnlessRaises(ValueError, trial._parseLocalVariables,
+                              '-*- foo -*-')
+        self.failUnlessRaises(ValueError, trial._parseLocalVariables,
+                              '-*- foo: bar; qux -*-')
+        self.failUnlessRaises(ValueError, trial._parseLocalVariables,
+                              '-*- foo: bar: baz; qux: qax -*-')
+
+    def test_variablesFromFile(self):
+        localVars = trial.loadLocalVariables(sibpath('moduletest.py'))
+        self.assertEqual({'test-case-name':
+                              'twisted.trial.test.test_test_visitor'},
+                             localVars)
+
+    def test_noVariablesInFile(self):
+        localVars = trial.loadLocalVariables(sibpath('novars.py'))
+        self.assertEqual({}, localVars)
+
+    def test_variablesFromScript(self):
+        localVars = trial.loadLocalVariables(sibpath('scripttest.py'))
+        self.assertEqual(
+            {'test-case-name': ('twisted.trial.test.test_test_visitor,'
+                                'twisted.trial.test.test_class')},
+            localVars)
+
+    def test_getTestModules(self):
+        modules = trial.getTestModules(sibpath('moduletest.py'))
+        self.assertEqual(modules, ['twisted.trial.test.test_test_visitor'])
+
+    def test_getTestModules_noVars(self):
+        modules = trial.getTestModules(sibpath('novars.py'))
+        self.assertEqual(len(modules), 0)
+
+    def test_getTestModules_multiple(self):
+        modules = trial.getTestModules(sibpath('scripttest.py'))
+        self.assertEqual(set(modules),
+                             set(['twisted.trial.test.test_test_visitor',
+                                  'twisted.trial.test.test_class']))
+
+    def test_looksLikeTestModule(self):
+        for filename in ['test_script.py', 'twisted/trial/test/test_script.py']:
+            self.failUnless(trial.isTestFile(filename),
+                            "%r should be a test file" % (filename,))
+        for filename in ['twisted/trial/test/moduletest.py',
+                         sibpath('scripttest.py'), sibpath('test_foo.bat')]:
+            self.failIf(trial.isTestFile(filename),
+                        "%r should *not* be a test file" % (filename,))
+
+
+class WithoutModuleTests(unittest.SynchronousTestCase):
+    """
+    Test the C{without-module} flag.
+    """
+
+    def setUp(self):
+        """
+        Create a L{trial.Options} object to be used in the tests, and save
+        C{sys.modules}.
+        """
+        self.config = trial.Options()
+        self.savedModules = dict(sys.modules)
+
+
+    def tearDown(self):
+        """
+        Restore C{sys.modules}.
+        """
+        for module in ('imaplib', 'smtplib'):
+            if module in self.savedModules:
+                sys.modules[module] = self.savedModules[module]
+            else:
+                sys.modules.pop(module, None)
+
+
+    def _checkSMTP(self):
+        """
+        Try to import the C{smtplib} module, and return it.
+        """
+        import smtplib
+        return smtplib
+
+
+    def _checkIMAP(self):
+        """
+        Try to import the C{imaplib} module, and return it.
+        """
+        import imaplib
+        return imaplib
+
+
+    def test_disableOneModule(self):
+        """
+        Check that after disabling a module, it can't be imported anymore.
+        """
+        self.config.parseOptions(["--without-module", "smtplib"])
+        self.assertRaises(ImportError, self._checkSMTP)
+        # Restore sys.modules
+        del sys.modules["smtplib"]
+        # Then the function should succeed
+        self.assertIsInstance(self._checkSMTP(), types.ModuleType)
+
+
+    def test_disableMultipleModules(self):
+        """
+        Check that several modules can be disabled at once.
+        """
+        self.config.parseOptions(["--without-module", "smtplib,imaplib"])
+        self.assertRaises(ImportError, self._checkSMTP)
+        self.assertRaises(ImportError, self._checkIMAP)
+        # Restore sys.modules
+        del sys.modules["smtplib"]
+        del sys.modules["imaplib"]
+        # Then the functions should succeed
+        self.assertIsInstance(self._checkSMTP(), types.ModuleType)
+        self.assertIsInstance(self._checkIMAP(), types.ModuleType)
+
+
+    def test_disableAlreadyImportedModule(self):
+        """
+        Disabling an already imported module should produce a warning.
+        """
+        self.assertIsInstance(self._checkSMTP(), types.ModuleType)
+        self.assertWarns(RuntimeWarning,
+                "Module 'smtplib' already imported, disabling anyway.",
+                trial.__file__,
+                self.config.parseOptions, ["--without-module", "smtplib"])
+        self.assertRaises(ImportError, self._checkSMTP)
+
+
+
+class CoverageTests(unittest.SynchronousTestCase):
+    """
+    Tests for the I{coverage} option.
+    """
+    if getattr(sys, 'gettrace', None) is None:
+        skip = (
+            "Cannot test trace hook installation without inspection API.")
+
+    def setUp(self):
+        """
+        Arrange for the current trace hook to be restored when the
+        test is complete.
+        """
+        self.addCleanup(sys.settrace, sys.gettrace())
+
+
+    def test_tracerInstalled(self):
+        """
+        L{trial.Options} handles C{"--coverage"} by installing a trace
+        hook to record coverage information.
+        """
+        options = trial.Options()
+        options.parseOptions(["--coverage"])
+        self.assertEqual(sys.gettrace(), options.tracer.globaltrace)
+
+
+    def test_coverdirDefault(self):
+        """
+        L{trial.Options.coverdir} returns a L{FilePath} based on the default
+        for the I{temp-directory} option if that option is not specified.
+        """
+        options = trial.Options()
+        self.assertEqual(
+            options.coverdir(),
+            FilePath(".").descendant([options["temp-directory"], "coverage"]))
+
+
+    def test_coverdirOverridden(self):
+        """
+        If a value is specified for the I{temp-directory} option,
+        L{trial.Options.coverdir} returns a child of that path.
+        """
+        path = self.mktemp()
+        options = trial.Options()
+        options.parseOptions(["--temp-directory", path])
+        self.assertEqual(
+            options.coverdir(), FilePath(path).child("coverage"))
+
+
+
+class OptionsTestCase(unittest.TestCase):
+    """
+    Tests for L{trial.Options}.
+    """
+
+    def setUp(self):
+        """
+        Build an L{Options} object to be used in the tests.
+        """
+        self.options = trial.Options()
+
+
+    def test_getWorkerArguments(self):
+        """
+        C{_getWorkerArguments} discards options like C{random} as they only
+        matter in the manager, and forwards options like C{recursionlimit} or
+        C{disablegc}.
+        """
+        self.addCleanup(sys.setrecursionlimit, sys.getrecursionlimit())
+        if gc.isenabled():
+            self.addCleanup(gc.enable)
+
+        self.options.parseOptions(["--recursionlimit", "2000", "--random",
+                                   "4", "--disablegc"])
+        args = self.options._getWorkerArguments()
+        self.assertIn("--disablegc", args)
+        args.remove("--disablegc")
+        self.assertEqual(["--recursionlimit", "2000"], args)
+
+
+    def test_jobsConflictWithDebug(self):
+        """
+        C{parseOptions} raises a C{UsageError} when C{--debug} is passed along
+        C{--jobs} as it's not supported yet.
+
+        @see: U{http://twistedmatrix.com/trac/ticket/5825}
+        """
+        error = self.assertRaises(
+            UsageError, self.options.parseOptions, ["--jobs", "4", "--debug"])
+        self.assertEqual("You can't specify --debug when using --jobs",
+                         str(error))
+
+
+    def test_jobsConflictWithProfile(self):
+        """
+        C{parseOptions} raises a C{UsageError} when C{--profile} is passed
+        along C{--jobs} as it's not supported yet.
+
+        @see: U{http://twistedmatrix.com/trac/ticket/5827}
+        """
+        error = self.assertRaises(
+            UsageError, self.options.parseOptions,
+            ["--jobs", "4", "--profile"])
+        self.assertEqual("You can't specify --profile when using --jobs",
+                         str(error))
+
+
+    def test_jobsConflictWithDebugStackTraces(self):
+        """
+        C{parseOptions} raises a C{UsageError} when C{--debug-stacktraces} is
+        passed along C{--jobs} as it's not supported yet.
+
+        @see: U{http://twistedmatrix.com/trac/ticket/5826}
+        """
+        error = self.assertRaises(
+            UsageError, self.options.parseOptions,
+            ["--jobs", "4", "--debug-stacktraces"])
+        self.assertEqual(
+            "You can't specify --debug-stacktraces when using --jobs",
+            str(error))
+
+
+
+class MakeRunnerTestCase(unittest.TestCase):
+    """
+    Tests for the L{_makeRunner} helper.
+    """
+
+    def test_jobs(self):
+        """
+        L{_makeRunner} returns a L{DistTrialRunner} instance when the C{--jobs}
+        option is passed, and passes the C{workerNumber} and C{workerArguments}
+        parameters to it.
+        """
+        options = trial.Options()
+        options.parseOptions(["--jobs", "4", "--force-gc"])
+        runner = trial._makeRunner(options)
+        self.assertIsInstance(runner, DistTrialRunner)
+        self.assertEqual(4, runner._workerNumber)
+        self.assertEqual(["--force-gc"], runner._workerArguments)
+
+
+    def test_dryRunWithJobs(self):
+        """
+        L{_makeRunner} returns a L{TrialRunner} instance in C{DRY_RUN} mode
+        when the C{--dry-run} option is passed, even if C{--jobs} is set.
+        """
+        options = trial.Options()
+        options.parseOptions(["--jobs", "4", "--dry-run"])
+        runner = trial._makeRunner(options)
+        self.assertIsInstance(runner, TrialRunner)
+        self.assertEqual(TrialRunner.DRY_RUN, runner.mode)
+
+
+    def test_DebuggerNotFound(self):
+        namedAny = trial.reflect.namedAny
+
+        def namedAnyExceptdoNotFind(fqn):
+            if fqn == "doNotFind":
+                raise trial.reflect.ModuleNotFound(fqn)
+            return namedAny(fqn)
+
+        self.patch(trial.reflect, "namedAny", namedAnyExceptdoNotFind)
+
+        options = trial.Options()
+        options.parseOptions(["--debug", "--debugger", "doNotFind"])
+
+        self.assertRaises(trial._DebuggerNotFound, trial._makeRunner, options)
+
+
+class TestRun(unittest.TestCase):
+    """
+    Tests for the L{run} function.
+    """
+
+    def setUp(self):
+        # don't re-parse cmdline options, because if --reactor was passed to
+        # the test run trial will try to restart the (already running) reactor
+        self.patch(trial.Options, "parseOptions", lambda self: None)
+
+
+    def test_debuggerNotFound(self):
+        """
+        When a debugger is not found, an error message is printed to the user.
+
+        """
+
+        def _makeRunner(*args, **kwargs):
+            raise trial._DebuggerNotFound('foo')
+        self.patch(trial, "_makeRunner", _makeRunner)
+
+        try:
+            trial.run()
+        except SystemExit as e:
+            self.assertIn("foo", str(e))
+        else:
+            self.fail("Should have exited due to non-existent debugger!")
diff --git a/ThirdParty/Twisted/twisted/trial/test/test_suppression.py b/ThirdParty/Twisted/twisted/trial/test/test_suppression.py
new file mode 100644
index 0000000..8dbf046
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/test_suppression.py
@@ -0,0 +1,162 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for warning suppression features of Trial.
+"""
+
+from __future__ import division, absolute_import
+
+import unittest as pyunit
+
+from twisted.trial import unittest
+from twisted.trial.test import suppression
+
+
+class SuppressionMixin(object):
+    """
+    Tests for the warning suppression features of
+    L{twisted.trial.unittest.SynchronousTestCase}.
+    """
+    def runTests(self, suite):
+        suite.run(pyunit.TestResult())
+
+
+    def _load(self, cls, methodName):
+        """
+        Return a new L{unittest.TestSuite} with a single test method in it.
+
+        @param cls: A L{TestCase} subclass defining a test method.
+
+        @param methodName: The name of the test method from C{cls}.
+        """
+        return pyunit.TestSuite([cls(methodName)])
+
+
+    def _assertWarnings(self, warnings, which):
+        """
+        Assert that a certain number of warnings with certain messages were
+        emitted in a certain order.
+
+        @param warnings: A list of emitted warnings, as returned by
+            C{flushWarnings}.
+
+        @param which: A list of strings giving warning messages that should
+            appear in C{warnings}.
+
+        @raise self.failureException: If the warning messages given by C{which}
+            do not match the messages in the warning information in C{warnings},
+            or if they do not appear in the same order.
+        """
+        self.assertEqual(
+            [warning['message'] for warning in warnings],
+            which)
+
+
+    def test_setUpSuppression(self):
+        """
+        Suppressions defined by the test method being run are applied to any
+        warnings emitted while running the C{setUp} fixture.
+        """
+        self.runTests(
+            self._load(self.TestSetUpSuppression, "testSuppressMethod"))
+        warningsShown = self.flushWarnings([
+                self.TestSetUpSuppression._emit])
+        self._assertWarnings(
+            warningsShown,
+            [suppression.CLASS_WARNING_MSG, suppression.MODULE_WARNING_MSG,
+             suppression.CLASS_WARNING_MSG, suppression.MODULE_WARNING_MSG])
+
+
+    def test_tearDownSuppression(self):
+        """
+        Suppressions defined by the test method being run are applied to any
+        warnings emitted while running the C{tearDown} fixture.
+        """
+        self.runTests(
+            self._load(self.TestTearDownSuppression, "testSuppressMethod"))
+        warningsShown = self.flushWarnings([
+                self.TestTearDownSuppression._emit])
+        self._assertWarnings(
+            warningsShown,
+            [suppression.CLASS_WARNING_MSG, suppression.MODULE_WARNING_MSG,
+             suppression.CLASS_WARNING_MSG, suppression.MODULE_WARNING_MSG])
+
+
+    def test_suppressMethod(self):
+        """
+        A suppression set on a test method prevents warnings emitted by that
+        test method which the suppression matches from being emitted.
+        """
+        self.runTests(
+            self._load(self.TestSuppression, "testSuppressMethod"))
+        warningsShown = self.flushWarnings([
+                self.TestSuppression._emit])
+        self._assertWarnings(
+            warningsShown,
+            [suppression.CLASS_WARNING_MSG, suppression.MODULE_WARNING_MSG])
+
+
+    def test_suppressClass(self):
+        """
+        A suppression set on a L{SynchronousTestCase} subclass prevents warnings
+        emitted by any test methods defined on that class which match the
+        suppression from being emitted.
+        """
+        self.runTests(
+            self._load(self.TestSuppression, "testSuppressClass"))
+        warningsShown = self.flushWarnings([
+                self.TestSuppression._emit])
+        self.assertEqual(
+            warningsShown[0]['message'], suppression.METHOD_WARNING_MSG)
+        self.assertEqual(
+            warningsShown[1]['message'], suppression.MODULE_WARNING_MSG)
+        self.assertEqual(len(warningsShown), 2)
+
+
+    def test_suppressModule(self):
+        """
+        A suppression set on a module prevents warnings emitted by any test
+        mewthods defined in that module which match the suppression from being
+        emitted.
+        """
+        self.runTests(
+            self._load(self.TestSuppression2, "testSuppressModule"))
+        warningsShown = self.flushWarnings([
+                self.TestSuppression._emit])
+        self.assertEqual(
+            warningsShown[0]['message'], suppression.METHOD_WARNING_MSG)
+        self.assertEqual(
+            warningsShown[1]['message'], suppression.CLASS_WARNING_MSG)
+        self.assertEqual(len(warningsShown), 2)
+
+
+    def test_overrideSuppressClass(self):
+        """
+        The suppression set on a test method completely overrides a suppression
+        with wider scope; if it does not match a warning emitted by that test
+        method, the warning is emitted, even if a wider suppression matches.
+        """
+        self.runTests(
+            self._load(self.TestSuppression, "testOverrideSuppressClass"))
+        warningsShown = self.flushWarnings([
+                self.TestSuppression._emit])
+        self.assertEqual(
+            warningsShown[0]['message'], suppression.METHOD_WARNING_MSG)
+        self.assertEqual(
+            warningsShown[1]['message'], suppression.CLASS_WARNING_MSG)
+        self.assertEqual(
+            warningsShown[2]['message'], suppression.MODULE_WARNING_MSG)
+        self.assertEqual(len(warningsShown), 3)
+
+
+
+class SynchronousSuppressionTest(SuppressionMixin, unittest.SynchronousTestCase):
+    """
+    @see: L{twisted.trial.test.test_tests}
+    """
+    from twisted.trial.test.suppression import (
+        SynchronousTestSetUpSuppression as TestSetUpSuppression,
+        SynchronousTestTearDownSuppression as TestTearDownSuppression,
+        SynchronousTestSuppression as TestSuppression,
+        SynchronousTestSuppression2 as TestSuppression2)
diff --git a/ThirdParty/Twisted/twisted/trial/test/test_test_visitor.py b/ThirdParty/Twisted/twisted/trial/test/test_test_visitor.py
new file mode 100644
index 0000000..b5c3484
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/test_test_visitor.py
@@ -0,0 +1,82 @@
+from twisted.trial import unittest
+from twisted.trial.runner import TestSuite, suiteVisit
+
+pyunit = __import__('unittest')
+
+
+
+class MockVisitor(object):
+    def __init__(self):
+        self.calls = []
+
+
+    def __call__(self, testCase):
+        self.calls.append(testCase)
+
+
+
+class TestTestVisitor(unittest.TestCase):
+    def setUp(self):
+        self.visitor = MockVisitor()
+
+
+    def test_visitCase(self):
+        """
+        Test that C{visit} works for a single test case.
+        """
+        testCase = TestTestVisitor('test_visitCase')
+        testCase.visit(self.visitor)
+        self.assertEqual(self.visitor.calls, [testCase])
+
+
+    def test_visitSuite(self):
+        """
+        Test that C{visit} hits all tests in a suite.
+        """
+        tests = [TestTestVisitor('test_visitCase'),
+                 TestTestVisitor('test_visitSuite')]
+        testSuite = TestSuite(tests)
+        testSuite.visit(self.visitor)
+        self.assertEqual(self.visitor.calls, tests)
+
+
+    def test_visitEmptySuite(self):
+        """
+        Test that C{visit} on an empty suite hits nothing.
+        """
+        TestSuite().visit(self.visitor)
+        self.assertEqual(self.visitor.calls, [])
+
+
+    def test_visitNestedSuite(self):
+        """
+        Test that C{visit} recurses through suites.
+        """
+        tests = [TestTestVisitor('test_visitCase'),
+                 TestTestVisitor('test_visitSuite')]
+        testSuite = TestSuite([TestSuite([test]) for test in tests])
+        testSuite.visit(self.visitor)
+        self.assertEqual(self.visitor.calls, tests)
+
+
+    def test_visitPyunitSuite(self):
+        """
+        Test that C{suiteVisit} visits stdlib unittest suites
+        """
+        test = TestTestVisitor('test_visitPyunitSuite')
+        suite = pyunit.TestSuite([test])
+        suiteVisit(suite, self.visitor)
+        self.assertEqual(self.visitor.calls, [test])
+
+
+    def test_visitPyunitCase(self):
+        """
+        Test that a stdlib test case in a suite gets visited.
+        """
+        class PyunitCase(pyunit.TestCase):
+            def test_foo(self):
+                pass
+        test = PyunitCase('test_foo')
+        TestSuite([test]).visit(self.visitor)
+        self.assertEqual(
+            [call.id() for call in self.visitor.calls], [test.id()])
diff --git a/ThirdParty/Twisted/twisted/trial/test/test_testcase.py b/ThirdParty/Twisted/twisted/trial/test/test_testcase.py
new file mode 100644
index 0000000..1b1d630
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/test_testcase.py
@@ -0,0 +1,70 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Direct unit tests for L{twisted.trial.unittest.SynchronousTestCase} and
+L{twisted.trial.unittest.TestCase}.
+"""
+
+from __future__ import division, absolute_import
+
+from twisted.trial.unittest import SynchronousTestCase, TestCase
+
+
+class TestCaseMixin(object):
+    """
+    L{TestCase} tests.
+    """
+    def setUp(self):
+        """
+        Create a couple instances of C{MyTestCase}, each for the same test
+        method, to be used in the test methods of this class.
+        """
+        self.first = self.MyTestCase('test_1')
+        self.second = self.MyTestCase('test_1')
+
+
+    def test_equality(self):
+        """
+        In order for one test method to be runnable twice, two TestCase
+        instances with the same test method name must not compare as equal.
+        """
+        self.assertTrue(self.first == self.first)
+        self.assertTrue(self.first != self.second)
+        self.assertFalse(self.first == self.second)
+
+
+    def test_hashability(self):
+        """
+        In order for one test method to be runnable twice, two TestCase
+        instances with the same test method name should not have the same
+        hash value.
+        """
+        container = {}
+        container[self.first] = None
+        container[self.second] = None
+        self.assertEqual(len(container), 2)
+
+
+
+class SynchronousTestCaseTests(TestCaseMixin, SynchronousTestCase):
+    class MyTestCase(SynchronousTestCase):
+        """
+        Some test methods which can be used to test behaviors of
+        L{SynchronousTestCase}.
+        """
+        def test_1(self):
+            pass
+
+
+
+# Yes, subclass SynchronousTestCase again.  There are no interesting behaviors
+# of self being tested below, only of self.MyTestCase.
+class AsynchronousTestCaseTests(TestCaseMixin, SynchronousTestCase):
+    class MyTestCase(TestCase):
+        """
+        Some test methods which can be used to test behaviors of
+        L{TestCase}.
+        """
+        def test_1(self):
+            pass
diff --git a/ThirdParty/Twisted/twisted/trial/test/test_tests.py b/ThirdParty/Twisted/twisted/trial/test/test_tests.py
new file mode 100644
index 0000000..c5d1135
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/test_tests.py
@@ -0,0 +1,1267 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for the behaviour of unit tests.
+
+Many tests in this module follow a simple pattern.  A mixin is defined which
+includes test methods for a certain feature.  The mixin is inherited from twice,
+once by a class also inheriting from SynchronousTestCase and once from a class
+inheriting from TestCase.  These two subclasses are named like
+I{SynchronousFooTests} and I{AsynchronousFooTests}, where I{Foo} is related to
+the name of the mixin.  Sometimes the mixin is defined in another module, along
+with the synchronous subclass.  The mixin is imported into this module to define
+the asynchronous subclass.
+
+This pattern allows the same tests to be applied to the two base test case
+classes trial provides, ensuring their behavior is the same.
+
+Most new tests should be added in this pattern.  Tests for functionality which
+is intentionally only provided by TestCase, not SynchronousTestCase, is excepted
+of course.
+"""
+
+from __future__ import division, absolute_import
+
+import gc, sys, weakref
+import unittest as pyunit
+
+from twisted.python.compat import _PY3, NativeStringIO
+from twisted.internet import defer, reactor
+from twisted.trial import unittest, reporter, util
+if not _PY3:
+    from twisted.trial import runner
+from twisted.trial.test import erroneous
+from twisted.trial.test.test_suppression import SuppressionMixin
+
+
+# Skip messages that are used in multiple places:
+_PY3PORTNEEDED = "Requires runner and/or reporter to be ported (#5964, #5965)"
+
+
+class ResultsTestMixin(object):
+    """
+    Provide useful APIs for test cases that are about test cases.
+    """
+    def loadSuite(self, suite):
+        """
+        Load tests from the given test case class and create a new reporter to
+        use for running it.
+        """
+        self.loader = pyunit.TestLoader()
+        self.suite = self.loader.loadTestsFromTestCase(suite)
+        self.reporter = reporter.TestResult()
+
+
+    def test_setUp(self):
+        self.failUnless(self.reporter.wasSuccessful())
+        self.assertEqual(self.reporter.errors, [])
+        self.assertEqual(self.reporter.failures, [])
+        self.assertEqual(self.reporter.skips, [])
+
+
+    def assertCount(self, numTests):
+        self.assertEqual(self.suite.countTestCases(), numTests)
+        self.suite(self.reporter)
+        self.assertEqual(self.reporter.testsRun, numTests)
+
+
+
+class SuccessMixin(object):
+    """
+    Tests for the reporting of successful tests.
+    """
+    def setUp(self):
+        self.result = reporter.TestResult()
+
+
+    def test_successful(self):
+        """
+        A successful test, used by other tests.
+        """
+
+
+    def assertSuccessful(self, test, result):
+        self.assertEqual(result.successes, 1)
+        self.assertEqual(result.failures, [])
+        self.assertEqual(result.errors, [])
+        self.assertEqual(result.expectedFailures, [])
+        self.assertEqual(result.unexpectedSuccesses, [])
+        self.assertEqual(result.skips, [])
+
+
+    def test_successfulIsReported(self):
+        """
+        Test that when a successful test is run, it is reported as a success,
+        and not as any other kind of result.
+        """
+        test = self.__class__('test_successful')
+        test.run(self.result)
+        self.assertSuccessful(test, self.result)
+
+
+    def test_defaultIsSuccessful(self):
+        """
+        The test case type can be instantiated with no arguments, run, and
+        reported as being successful.
+        """
+        test = self.__class__()
+        test.run(self.result)
+        self.assertSuccessful(test, self.result)
+
+
+    def test_noReference(self):
+        """
+        Test that no reference is kept on a successful test.
+        """
+        test = self.__class__('test_successful')
+        ref = weakref.ref(test)
+        test.run(self.result)
+        self.assertSuccessful(test, self.result)
+        del test
+        gc.collect()
+        self.assertIdentical(ref(), None)
+
+
+
+class SynchronousSuccessTests(SuccessMixin, unittest.SynchronousTestCase):
+    """
+    See module docstring.
+    """
+
+
+
+class AsynchronousSuccessTests(SuccessMixin, unittest.TestCase):
+    """
+    See module docstring.
+    """
+
+
+
+class SkipMethodsMixin(ResultsTestMixin):
+    """
+    Tests for the reporting of skipping tests.
+    """
+    def setUp(self):
+        self.loadSuite(self.Skipping)
+
+
+    def test_counting(self):
+        self.assertCount(3)
+
+
+    def test_results(self):
+        """
+        Running a suite in which all methods are individually set to skip
+        produces a successful result with no recorded errors or failures, all
+        the skipped methods recorded as skips, and no methods recorded as
+        successes.
+        """
+        self.suite(self.reporter)
+        self.assertTrue(self.reporter.wasSuccessful())
+        self.assertEqual(self.reporter.errors, [])
+        self.assertEqual(self.reporter.failures, [])
+        self.assertEqual(len(self.reporter.skips), 3)
+        self.assertEqual(self.reporter.successes, 0)
+
+
+    def test_setUp(self):
+        """
+        Running a suite in which all methods are skipped by C{setUp} raising
+        L{SkipTest} produces a successful result with no recorded errors or
+        failures, all skipped methods recorded as skips, and no methods recorded
+        as successes.
+        """
+        self.loadSuite(self.SkippingSetUp)
+        self.suite(self.reporter)
+        self.assertTrue(self.reporter.wasSuccessful())
+        self.assertEqual(self.reporter.errors, [])
+        self.assertEqual(self.reporter.failures, [])
+        self.assertEqual(len(self.reporter.skips), 2)
+        self.assertEqual(self.reporter.successes, 0)
+
+
+    def test_reasons(self):
+        self.suite(self.reporter)
+        prefix = 'test_'
+        # whiteboxing reporter
+        for test, reason in self.reporter.skips:
+            self.assertEqual(test.shortDescription()[len(prefix):],
+                                 str(reason))
+
+
+    def test_deprecatedSkipWithoutReason(self):
+        """
+        If a test method raises L{SkipTest} with no reason, a deprecation
+        warning is emitted.
+        """
+        self.loadSuite(self.DeprecatedReasonlessSkip)
+        self.suite(self.reporter)
+        warnings = self.flushWarnings([
+                self.DeprecatedReasonlessSkip.test_1])
+        self.assertEqual(1, len(warnings))
+        self.assertEqual(DeprecationWarning, warnings[0]['category'])
+        self.assertEqual(
+            "Do not raise unittest.SkipTest with no arguments! Give a reason "
+            "for skipping tests!",
+            warnings[0]['message'])
+
+
+
+class SynchronousSkipMethodTests(SkipMethodsMixin, unittest.SynchronousTestCase):
+    """
+    See module docstring.
+    """
+    from twisted.trial.test.skipping import (
+        SynchronousSkipping as Skipping,
+        SynchronousSkippingSetUp as SkippingSetUp,
+        SynchronousDeprecatedReasonlessSkip as DeprecatedReasonlessSkip)
+
+
+
+class AsynchronousSkipMethodTests(SkipMethodsMixin, unittest.TestCase):
+    """
+    See module docstring.
+    """
+    from twisted.trial.test.skipping import (
+        AsynchronousSkipping as Skipping,
+        AsynchronousSkippingSetUp as SkippingSetUp,
+        AsynchronousDeprecatedReasonlessSkip as DeprecatedReasonlessSkip)
+
+
+
+
+class SkipClassesMixin(ResultsTestMixin):
+    """
+    Test the class skipping features of L{twisted.trial.unittest.TestCase}.
+    """
+    def setUp(self):
+        self.loadSuite(self.SkippedClass)
+        self.SkippedClass._setUpRan = False
+
+
+    def test_counting(self):
+        """
+        Skipped test methods still contribute to the total test count.
+        """
+        self.assertCount(4)
+
+
+    def test_setUpRan(self):
+        """
+        The C{setUp} method is not called if the class is set to skip.
+        """
+        self.suite(self.reporter)
+        self.assertFalse(self.SkippedClass._setUpRan)
+
+
+    def test_results(self):
+        """
+        Skipped test methods don't cause C{wasSuccessful} to return C{False},
+        nor do they contribute to the C{errors} or C{failures} of the reporter,
+        or to the count of successes.  They do, however, add elements to the
+        reporter's C{skips} list.
+        """
+        self.suite(self.reporter)
+        self.assertTrue(self.reporter.wasSuccessful())
+        self.assertEqual(self.reporter.errors, [])
+        self.assertEqual(self.reporter.failures, [])
+        self.assertEqual(len(self.reporter.skips), 4)
+        self.assertEqual(self.reporter.successes, 0)
+
+
+    def test_reasons(self):
+        """
+        Test methods which raise L{unittest.SkipTest} or have their C{skip}
+        attribute set to something are skipped.
+        """
+        self.suite(self.reporter)
+        expectedReasons = ['class', 'skip2', 'class', 'class']
+        # whitebox reporter
+        reasonsGiven = [reason for test, reason in self.reporter.skips]
+        self.assertEqual(expectedReasons, reasonsGiven)
+
+
+
+class SynchronousSkipClassTests(SkipClassesMixin, unittest.SynchronousTestCase):
+    """
+    See module docstring.
+    """
+    from twisted.trial.test.skipping import (
+        SynchronousSkippedClass as SkippedClass)
+
+
+
+class AsynchronousSkipClassTests(SkipClassesMixin, unittest.TestCase):
+    """
+    See module docstring.
+    """
+    from twisted.trial.test.skipping import (
+        AsynchronousSkippedClass as SkippedClass)
+
+
+
+class TodoMixin(ResultsTestMixin):
+    """
+    Tests for the individual test method I{expected failure} features of
+    L{twisted.trial.unittest.TestCase}.
+    """
+    def setUp(self):
+        self.loadSuite(self.Todo)
+
+
+    def test_counting(self):
+        self.assertCount(3)
+
+
+    def test_results(self):
+        """
+        Running a suite in which all methods are individually marked as expected
+        to fail produces a successful result with no recorded errors, failures,
+        or skips, all methods which fail and were expected to fail recorded as
+        C{expectedFailures}, and all methods which pass but which were expected
+        to fail recorded as C{unexpectedSuccesses}.  Additionally, no tests are
+        recorded as successes.
+        """
+        self.suite(self.reporter)
+        self.assertTrue(self.reporter.wasSuccessful())
+        self.assertEqual(self.reporter.errors, [])
+        self.assertEqual(self.reporter.failures, [])
+        self.assertEqual(self.reporter.skips, [])
+        self.assertEqual(len(self.reporter.expectedFailures), 2)
+        self.assertEqual(len(self.reporter.unexpectedSuccesses), 1)
+        self.assertEqual(self.reporter.successes, 0)
+
+
+    def test_expectedFailures(self):
+        self.suite(self.reporter)
+        expectedReasons = ['todo1', 'todo2']
+        reasonsGiven = [ r.reason
+                         for t, e, r in self.reporter.expectedFailures ]
+        self.assertEqual(expectedReasons, reasonsGiven)
+
+
+    def test_unexpectedSuccesses(self):
+        self.suite(self.reporter)
+        expectedReasons = ['todo3']
+        reasonsGiven = [ r.reason
+                         for t, r in self.reporter.unexpectedSuccesses ]
+        self.assertEqual(expectedReasons, reasonsGiven)
+
+
+    def test_expectedSetUpFailure(self):
+        """
+        C{setUp} is excluded from the failure expectation defined by a C{todo}
+        attribute on a test method.
+        """
+        self.loadSuite(self.SetUpTodo)
+        self.suite(self.reporter)
+        self.assertFalse(self.reporter.wasSuccessful())
+        self.assertEqual(len(self.reporter.errors), 1)
+        self.assertEqual(self.reporter.failures, [])
+        self.assertEqual(self.reporter.skips, [])
+        self.assertEqual(len(self.reporter.expectedFailures), 0)
+        self.assertEqual(len(self.reporter.unexpectedSuccesses), 0)
+        self.assertEqual(self.reporter.successes, 0)
+
+
+    def test_expectedTearDownFailure(self):
+        """
+        C{tearDown} is excluded from the failure expectation defined by a C{todo}
+        attribute on a test method.
+        """
+        self.loadSuite(self.TearDownTodo)
+        self.suite(self.reporter)
+        self.assertFalse(self.reporter.wasSuccessful())
+        self.assertEqual(len(self.reporter.errors), 1)
+        self.assertEqual(self.reporter.failures, [])
+        self.assertEqual(self.reporter.skips, [])
+        self.assertEqual(len(self.reporter.expectedFailures), 0)
+        # This seems strange, since tearDown raised an exception.  However, the
+        # test method did complete without error.  The tearDown error is
+        # reflected in the errors list, checked above.
+        self.assertEqual(len(self.reporter.unexpectedSuccesses), 1)
+        self.assertEqual(self.reporter.successes, 0)
+
+
+
+class SynchronousTodoTests(TodoMixin, unittest.SynchronousTestCase):
+    """
+    See module docstring.
+    """
+    from twisted.trial.test.skipping import (
+        SynchronousTodo as Todo,
+        SynchronousSetUpTodo as SetUpTodo,
+        SynchronousTearDownTodo as TearDownTodo)
+
+
+
+class AsynchronousTodoTests(TodoMixin, unittest.TestCase):
+    """
+    See module docstring.
+    """
+    from twisted.trial.test.skipping import (
+        AsynchronousTodo as Todo,
+        AsynchronousSetUpTodo as SetUpTodo,
+        AsynchronousTearDownTodo as TearDownTodo)
+
+
+
+class ClassTodoMixin(ResultsTestMixin):
+    """
+    Tests for the class-wide I{expected failure} features of
+    L{twisted.trial.unittest.TestCase}.
+    """
+    def setUp(self):
+        self.loadSuite(self.TodoClass)
+
+    def test_counting(self):
+        self.assertCount(4)
+
+
+    def test_results(self):
+        """
+        Running a suite in which an entire class is marked as expected to fail
+        produces a successful result with no recorded errors, failures, or
+        skips, all methods which fail and were expected to fail recorded as
+        C{expectedFailures}, and all methods which pass but which were expected
+        to fail recorded as C{unexpectedSuccesses}.  Additionally, no tests are
+        recorded as successes.
+        """
+        self.suite(self.reporter)
+        self.assertTrue(self.reporter.wasSuccessful())
+        self.assertEqual(self.reporter.errors, [])
+        self.assertEqual(self.reporter.failures, [])
+        self.assertEqual(self.reporter.skips, [])
+        self.assertEqual(len(self.reporter.expectedFailures), 2)
+        self.assertEqual(len(self.reporter.unexpectedSuccesses), 2)
+        self.assertEqual(self.reporter.successes, 0)
+
+
+    def test_expectedFailures(self):
+        self.suite(self.reporter)
+        expectedReasons = ['method', 'class']
+        reasonsGiven = [ r.reason
+                         for t, e, r in self.reporter.expectedFailures ]
+        self.assertEqual(expectedReasons, reasonsGiven)
+
+    def test_unexpectedSuccesses(self):
+        self.suite(self.reporter)
+        expectedReasons = ['method', 'class']
+        reasonsGiven = [ r.reason
+                         for t, r in self.reporter.unexpectedSuccesses ]
+        self.assertEqual(expectedReasons, reasonsGiven)
+
+
+
+class SynchronousClassTodoTests(ClassTodoMixin, unittest.SynchronousTestCase):
+    """
+    See module docstring.
+    """
+    from twisted.trial.test.skipping import (
+        SynchronousTodoClass as TodoClass)
+
+
+
+class AsynchronousClassTodoTests(ClassTodoMixin, unittest.TestCase):
+    """
+    See module docstring.
+    """
+    from twisted.trial.test.skipping import (
+        AsynchronousTodoClass as TodoClass)
+
+
+
+class StrictTodoMixin(ResultsTestMixin):
+    """
+    Tests for the I{expected failure} features of
+    L{twisted.trial.unittest.TestCase} in which the exact failure which is
+    expected is indicated.
+    """
+    def setUp(self):
+        self.loadSuite(self.StrictTodo)
+
+    def test_counting(self):
+        self.assertCount(7)
+
+
+    def test_results(self):
+        """
+        A test method which is marked as expected to fail with a particular
+        exception is only counted as an expected failure if it does fail with
+        that exception, not if it fails with some other exception.
+        """
+        self.suite(self.reporter)
+        self.assertFalse(self.reporter.wasSuccessful())
+        self.assertEqual(len(self.reporter.errors), 2)
+        self.assertEqual(len(self.reporter.failures), 1)
+        self.assertEqual(len(self.reporter.expectedFailures), 3)
+        self.assertEqual(len(self.reporter.unexpectedSuccesses), 1)
+        self.assertEqual(self.reporter.successes, 0)
+        self.assertEqual(self.reporter.skips, [])
+
+
+    def test_expectedFailures(self):
+        self.suite(self.reporter)
+        expectedReasons = ['todo1', 'todo2', 'todo5']
+        reasonsGotten = [ r.reason
+                          for t, e, r in self.reporter.expectedFailures ]
+        self.assertEqual(expectedReasons, reasonsGotten)
+
+
+    def test_unexpectedSuccesses(self):
+        self.suite(self.reporter)
+        expectedReasons = [([RuntimeError], 'todo7')]
+        reasonsGotten = [ (r.errors, r.reason)
+                          for t, r in self.reporter.unexpectedSuccesses ]
+        self.assertEqual(expectedReasons, reasonsGotten)
+
+
+
+class SynchronousStrictTodoTests(StrictTodoMixin, unittest.SynchronousTestCase):
+    """
+    See module docstring.
+    """
+    from twisted.trial.test.skipping import (
+        SynchronousStrictTodo as StrictTodo)
+
+
+
+class AsynchronousStrictTodoTests(StrictTodoMixin, unittest.TestCase):
+    """
+    See module docstring.
+    """
+    from twisted.trial.test.skipping import (
+        AsynchronousStrictTodo as StrictTodo)
+
+
+
+class TestReactorCleanup(unittest.SynchronousTestCase):
+    """
+    Tests for cleanup and reporting of reactor event sources left behind by test
+    methods.
+    """
+
+    if _PY3:
+        skip = _PY3PORTNEEDED
+
+    def setUp(self):
+        self.result = reporter.Reporter(NativeStringIO())
+        self.loader = runner.TestLoader()
+
+
+    def testLeftoverSockets(self):
+        """
+        Trial reports a L{util.DirtyReactorAggregateError} if a test leaves
+        sockets behind.
+        """
+        suite = self.loader.loadMethod(
+            erroneous.SocketOpenTest.test_socketsLeftOpen)
+        suite.run(self.result)
+        self.failIf(self.result.wasSuccessful())
+        # socket cleanup happens at end of class's tests.
+        # all the tests in the class are successful, even if the suite
+        # fails
+        self.assertEqual(self.result.successes, 1)
+        failure = self.result.errors[0][1]
+        self.failUnless(failure.check(util.DirtyReactorAggregateError))
+
+
+    def testLeftoverPendingCalls(self):
+        """
+        Trial reports a L{util.DirtyReactorAggregateError} and fails the test
+        if a test leaves a L{DelayedCall} hanging.
+        """
+        suite = erroneous.ReactorCleanupTests('test_leftoverPendingCalls')
+        suite.run(self.result)
+        self.failIf(self.result.wasSuccessful())
+        failure = self.result.errors[0][1]
+        self.assertEqual(self.result.successes, 0)
+        self.failUnless(failure.check(util.DirtyReactorAggregateError))
+
+
+
+class FixtureMixin(object):
+    """
+    Tests for broken fixture helper methods (e.g. setUp, tearDown).
+    """
+
+    def setUp(self):
+        self.reporter = reporter.Reporter()
+        self.loader = pyunit.TestLoader()
+
+
+    def test_brokenSetUp(self):
+        """
+        When setUp fails, the error is recorded in the result object.
+        """
+        suite = self.loader.loadTestsFromTestCase(self.TestFailureInSetUp)
+        suite.run(self.reporter)
+        self.assertTrue(len(self.reporter.errors) > 0)
+        self.assertIsInstance(
+            self.reporter.errors[0][1].value, erroneous.FoolishError)
+        self.assertEqual(0, self.reporter.successes)
+
+
+    def test_brokenTearDown(self):
+        """
+        When tearDown fails, the error is recorded in the result object.
+        """
+        suite = self.loader.loadTestsFromTestCase(self.TestFailureInTearDown)
+        suite.run(self.reporter)
+        errors = self.reporter.errors
+        self.assertTrue(len(errors) > 0)
+        self.assertIsInstance(errors[0][1].value, erroneous.FoolishError)
+        self.assertEqual(0, self.reporter.successes)
+
+
+
+class SynchronousFixtureTest(FixtureMixin, unittest.SynchronousTestCase):
+    """
+    See module docstring.
+    """
+    from twisted.trial.test.erroneous import (
+        SynchronousTestFailureInSetUp as TestFailureInSetUp,
+        SynchronousTestFailureInTearDown as TestFailureInTearDown)
+
+
+
+class AsynchronousFixtureTest(FixtureMixin, unittest.TestCase):
+    """
+    See module docstring.
+    """
+    from twisted.trial.test.erroneous import (
+        AsynchronousTestFailureInSetUp as TestFailureInSetUp,
+        AsynchronousTestFailureInTearDown as TestFailureInTearDown)
+
+
+
+class AsynchronousSuppressionTest(SuppressionMixin, unittest.TestCase):
+    """
+    See module docstring.
+    """
+    from twisted.trial.test.suppression import (
+        AsynchronousTestSetUpSuppression as TestSetUpSuppression,
+        AsynchronousTestTearDownSuppression as TestTearDownSuppression,
+        AsynchronousTestSuppression as TestSuppression,
+        AsynchronousTestSuppression2 as TestSuppression2)
+
+
+
+class GCMixin:
+    """
+    I provide a few mock tests that log setUp, tearDown, test execution and
+    garbage collection. I'm used to test whether gc.collect gets called.
+    """
+
+    if _PY3:
+        skip = _PY3PORTNEEDED
+
+    class BasicTest(unittest.SynchronousTestCase):
+        def setUp(self):
+            self._log('setUp')
+        def test_foo(self):
+            self._log('test')
+        def tearDown(self):
+            self._log('tearDown')
+
+    class ClassTest(unittest.SynchronousTestCase):
+        def test_1(self):
+            self._log('test1')
+        def test_2(self):
+            self._log('test2')
+
+    def _log(self, msg):
+        self._collectCalled.append(msg)
+
+    def collect(self):
+        """Fake gc.collect"""
+        self._log('collect')
+
+    def setUp(self):
+        self._collectCalled = []
+        self.BasicTest._log = self.ClassTest._log = self._log
+        self._oldCollect = gc.collect
+        gc.collect = self.collect
+
+    def tearDown(self):
+        gc.collect = self._oldCollect
+
+
+
+class TestGarbageCollectionDefault(GCMixin, unittest.SynchronousTestCase):
+
+    def test_collectNotDefault(self):
+        """
+        By default, tests should not force garbage collection.
+        """
+        test = self.BasicTest('test_foo')
+        result = reporter.TestResult()
+        test.run(result)
+        self.assertEqual(self._collectCalled, ['setUp', 'test', 'tearDown'])
+
+
+
+class TestGarbageCollection(GCMixin, unittest.SynchronousTestCase):
+
+    def test_collectCalled(self):
+        """
+        test gc.collect is called before and after each test.
+        """
+        test = TestGarbageCollection.BasicTest('test_foo')
+        test = unittest._ForceGarbageCollectionDecorator(test)
+        result = reporter.TestResult()
+        test.run(result)
+        self.assertEqual(
+            self._collectCalled,
+            ['collect', 'setUp', 'test', 'tearDown', 'collect'])
+
+
+
+class TestUnhandledDeferred(unittest.SynchronousTestCase):
+
+    if _PY3:
+        skip = _PY3PORTNEEDED
+
+    def setUp(self):
+        from twisted.trial.test import weird
+        # test_unhandledDeferred creates a cycle. we need explicit control of gc
+        gc.disable()
+        self.test1 = unittest._ForceGarbageCollectionDecorator(
+            weird.TestBleeding('test_unhandledDeferred'))
+
+    def test_isReported(self):
+        """
+        Forcing garbage collection should cause unhandled Deferreds to be
+        reported as errors.
+        """
+        result = reporter.TestResult()
+        self.test1(result)
+        self.assertEqual(len(result.errors), 1,
+                         'Unhandled deferred passed without notice')
+
+    def test_doesntBleed(self):
+        """
+        Forcing garbage collection in the test should mean that there are
+        no unreachable cycles immediately after the test completes.
+        """
+        result = reporter.TestResult()
+        self.test1(result)
+        self.flushLoggedErrors() # test1 logs errors that get caught be us.
+        # test1 created unreachable cycle.
+        # it & all others should have been collected by now.
+        n = gc.collect()
+        self.assertEqual(n, 0, 'unreachable cycle still existed')
+        # check that last gc.collect didn't log more errors
+        x = self.flushLoggedErrors()
+        self.assertEqual(len(x), 0, 'Errors logged after gc.collect')
+
+    def tearDown(self):
+        gc.collect()
+        gc.enable()
+        self.flushLoggedErrors()
+
+
+
+class AddCleanupMixin(object):
+    """
+    Test the addCleanup method of TestCase.
+    """
+    def setUp(self):
+        super(AddCleanupMixin, self).setUp()
+        self.result = reporter.TestResult()
+        self.test = self.AddCleanup()
+
+
+    def test_addCleanupCalledIfSetUpFails(self):
+        """
+        Callables added with C{addCleanup} are run even if setUp fails.
+        """
+        self.test.setUp = self.test.brokenSetUp
+        self.test.addCleanup(self.test.append, 'foo')
+        self.test.run(self.result)
+        self.assertEqual(['setUp', 'foo'], self.test.log)
+
+
+    def test_addCleanupCalledIfSetUpSkips(self):
+        """
+        Callables added with C{addCleanup} are run even if setUp raises
+        L{SkipTest}. This allows test authors to reliably provide clean up
+        code using C{addCleanup}.
+        """
+        self.test.setUp = self.test.skippingSetUp
+        self.test.addCleanup(self.test.append, 'foo')
+        self.test.run(self.result)
+        self.assertEqual(['setUp', 'foo'], self.test.log)
+
+
+    def test_addCleanupCalledInReverseOrder(self):
+        """
+        Callables added with C{addCleanup} should be called before C{tearDown}
+        in reverse order of addition.
+        """
+        self.test.addCleanup(self.test.append, "foo")
+        self.test.addCleanup(self.test.append, 'bar')
+        self.test.run(self.result)
+        self.assertEqual(['setUp', 'runTest', 'bar', 'foo', 'tearDown'],
+                         self.test.log)
+
+
+    def test_errorInCleanupIsCaptured(self):
+        """
+        Errors raised in cleanup functions should be treated like errors in
+        C{tearDown}. They should be added as errors and fail the test. Skips,
+        todos and failures are all treated as errors.
+        """
+        self.test.addCleanup(self.test.fail, 'foo')
+        self.test.run(self.result)
+        self.failIf(self.result.wasSuccessful())
+        self.assertEqual(1, len(self.result.errors))
+        [(test, error)] = self.result.errors
+        self.assertEqual(test, self.test)
+        self.assertEqual(error.getErrorMessage(), 'foo')
+
+
+    def test_cleanupsContinueRunningAfterError(self):
+        """
+        If a cleanup raises an error then that does not stop the other
+        cleanups from being run.
+        """
+        self.test.addCleanup(self.test.append, 'foo')
+        self.test.addCleanup(self.test.fail, 'bar')
+        self.test.run(self.result)
+        self.assertEqual(['setUp', 'runTest', 'foo', 'tearDown'],
+                         self.test.log)
+        self.assertEqual(1, len(self.result.errors))
+        [(test, error)] = self.result.errors
+        self.assertEqual(test, self.test)
+        self.assertEqual(error.getErrorMessage(), 'bar')
+
+
+    def test_multipleErrorsReported(self):
+        """
+        If more than one cleanup fails, then the test should fail with more
+        than one error.
+        """
+        self.test.addCleanup(self.test.fail, 'foo')
+        self.test.addCleanup(self.test.fail, 'bar')
+        self.test.run(self.result)
+        self.assertEqual(['setUp', 'runTest', 'tearDown'],
+                         self.test.log)
+        self.assertEqual(2, len(self.result.errors))
+        [(test1, error1), (test2, error2)] = self.result.errors
+        self.assertEqual(test1, self.test)
+        self.assertEqual(test2, self.test)
+        self.assertEqual(error1.getErrorMessage(), 'bar')
+        self.assertEqual(error2.getErrorMessage(), 'foo')
+
+
+
+class SynchronousAddCleanupTests(AddCleanupMixin, unittest.SynchronousTestCase):
+    """
+    See module docstring.
+    """
+    from twisted.trial.test.skipping import SynchronousAddCleanup as AddCleanup
+
+
+
+class AsynchronousAddCleanupTests(AddCleanupMixin, unittest.TestCase):
+    """
+    See module docstring.
+    """
+    from twisted.trial.test.skipping import AsynchronousAddCleanup as AddCleanup
+
+    def test_addCleanupWaitsForDeferreds(self):
+        """
+        If an added callable returns a L{Deferred}, then the test should wait
+        until that L{Deferred} has fired before running the next cleanup
+        method.
+        """
+        def cleanup(message):
+            d = defer.Deferred()
+            reactor.callLater(0, d.callback, message)
+            return d.addCallback(self.test.append)
+        self.test.addCleanup(self.test.append, 'foo')
+        self.test.addCleanup(cleanup, 'bar')
+        self.test.run(self.result)
+        self.assertEqual(['setUp', 'runTest', 'bar', 'foo', 'tearDown'],
+                         self.test.log)
+
+
+
+class SuiteClearingMixin(object):
+    """
+    Tests for our extension that allows us to clear out a L{TestSuite}.
+    """
+    if _PY3:
+        skip = _PY3PORTNEEDED
+
+    def test_clearSuite(self):
+        """
+        Calling L{unittest._clearSuite} on a populated L{TestSuite} removes
+        all tests.
+        """
+        suite = unittest.TestSuite()
+        suite.addTest(self.TestCase())
+        # Double check that the test suite actually has something in it.
+        self.assertEqual(1, suite.countTestCases())
+        unittest._clearSuite(suite)
+        self.assertEqual(0, suite.countTestCases())
+
+
+    def test_clearPyunitSuite(self):
+        """
+        Calling L{unittest._clearSuite} on a populated standard library
+        L{TestSuite} removes all tests.
+
+        This test is important since C{_clearSuite} operates by mutating
+        internal variables.
+        """
+        pyunit = __import__('unittest')
+        suite = pyunit.TestSuite()
+        suite.addTest(self.TestCase())
+        # Double check that the test suite actually has something in it.
+        self.assertEqual(1, suite.countTestCases())
+        unittest._clearSuite(suite)
+        self.assertEqual(0, suite.countTestCases())
+
+
+
+class SynchronousSuiteClearingTests(SuiteClearingMixin, unittest.SynchronousTestCase):
+    """
+    See module docstring.
+    """
+    TestCase = unittest.SynchronousTestCase
+
+
+
+class AsynchronousSuiteClearingTests(SuiteClearingMixin, unittest.TestCase):
+    """
+    See module docstring.
+    """
+    TestCase = unittest.TestCase
+
+
+
+class TestDecoratorMixin(object):
+    """
+    Tests for our test decoration features.
+    """
+    if _PY3:
+        skip = _PY3PORTNEEDED
+
+    def assertTestsEqual(self, observed, expected):
+        """
+        Assert that the given decorated tests are equal.
+        """
+        self.assertEqual(observed.__class__, expected.__class__,
+                         "Different class")
+        observedOriginal = getattr(observed, '_originalTest', None)
+        expectedOriginal = getattr(expected, '_originalTest', None)
+        self.assertIdentical(observedOriginal, expectedOriginal)
+        if observedOriginal is expectedOriginal is None:
+            self.assertIdentical(observed, expected)
+
+
+    def assertSuitesEqual(self, observed, expected):
+        """
+        Assert that the given test suites with decorated tests are equal.
+        """
+        self.assertEqual(observed.__class__, expected.__class__,
+                         "Different class")
+        self.assertEqual(len(observed._tests), len(expected._tests),
+                         "Different number of tests.")
+        for observedTest, expectedTest in zip(observed._tests,
+                                              expected._tests):
+            if getattr(observedTest, '_tests', None) is not None:
+                self.assertSuitesEqual(observedTest, expectedTest)
+            else:
+                self.assertTestsEqual(observedTest, expectedTest)
+
+
+    def test_usesAdaptedReporterWithRun(self):
+        """
+        For decorated tests, C{run} uses a result adapter that preserves the
+        test decoration for calls to C{addError}, C{startTest} and the like.
+
+        See L{reporter._AdaptedReporter}.
+        """
+        test = self.TestCase()
+        decoratedTest = unittest.TestDecorator(test)
+        # Move to top in ticket #5964:
+        from twisted.trial.test.test_reporter import LoggingReporter
+        result = LoggingReporter()
+        decoratedTest.run(result)
+        self.assertTestsEqual(result.test, decoratedTest)
+
+
+    def test_usesAdaptedReporterWithCall(self):
+        """
+        For decorated tests, C{__call__} uses a result adapter that preserves
+        the test decoration for calls to C{addError}, C{startTest} and the
+        like.
+
+        See L{reporter._AdaptedReporter}.
+        """
+        test = self.TestCase()
+        decoratedTest = unittest.TestDecorator(test)
+        # Move to top in ticket #5964:
+        from twisted.trial.test.test_reporter import LoggingReporter
+        result = LoggingReporter()
+        decoratedTest(result)
+        self.assertTestsEqual(result.test, decoratedTest)
+
+
+    def test_decorateSingleTest(self):
+        """
+        Calling L{decorate} on a single test case returns the test case
+        decorated with the provided decorator.
+        """
+        test = self.TestCase()
+        decoratedTest = unittest.decorate(test, unittest.TestDecorator)
+        self.assertTestsEqual(unittest.TestDecorator(test), decoratedTest)
+
+
+    def test_decorateTestSuite(self):
+        """
+        Calling L{decorate} on a test suite will return a test suite with
+        each test decorated with the provided decorator.
+        """
+        test = self.TestCase()
+        suite = unittest.TestSuite([test])
+        decoratedTest = unittest.decorate(suite, unittest.TestDecorator)
+        self.assertSuitesEqual(
+            decoratedTest, unittest.TestSuite([unittest.TestDecorator(test)]))
+
+
+    def test_decorateInPlaceMutatesOriginal(self):
+        """
+        Calling L{decorate} on a test suite will mutate the original suite.
+        """
+        test = self.TestCase()
+        suite = unittest.TestSuite([test])
+        decoratedTest = unittest.decorate(
+            suite, unittest.TestDecorator)
+        self.assertSuitesEqual(
+            decoratedTest, unittest.TestSuite([unittest.TestDecorator(test)]))
+        self.assertSuitesEqual(
+            suite, unittest.TestSuite([unittest.TestDecorator(test)]))
+
+
+    def test_decorateTestSuiteReferences(self):
+        """
+        When decorating a test suite in-place, the number of references to the
+        test objects in that test suite should stay the same.
+
+        Previously, L{unittest.decorate} recreated a test suite, so the
+        original suite kept references to the test objects. This test is here
+        to ensure the problem doesn't reappear again.
+        """
+        getrefcount = getattr(sys, 'getrefcount', None)
+        if getrefcount is None:
+            raise unittest.SkipTest(
+                "getrefcount not supported on this platform")
+        test = self.TestCase()
+        suite = unittest.TestSuite([test])
+        count1 = getrefcount(test)
+        decoratedTest = unittest.decorate(suite, unittest.TestDecorator)
+        count2 = getrefcount(test)
+        self.assertEqual(count1, count2)
+
+
+    def test_decorateNestedTestSuite(self):
+        """
+        Calling L{decorate} on a test suite with nested suites will return a
+        test suite that maintains the same structure, but with all tests
+        decorated.
+        """
+        test = self.TestCase()
+        suite = unittest.TestSuite([unittest.TestSuite([test])])
+        decoratedTest = unittest.decorate(suite, unittest.TestDecorator)
+        expected = unittest.TestSuite(
+            [unittest.TestSuite([unittest.TestDecorator(test)])])
+        self.assertSuitesEqual(decoratedTest, expected)
+
+
+    def test_decorateDecoratedSuite(self):
+        """
+        Calling L{decorate} on a test suite with already-decorated tests
+        decorates all of the tests in the suite again.
+        """
+        test = self.TestCase()
+        decoratedTest = unittest.decorate(test, unittest.TestDecorator)
+        redecoratedTest = unittest.decorate(decoratedTest,
+                                            unittest.TestDecorator)
+        self.assertTestsEqual(redecoratedTest,
+                              unittest.TestDecorator(decoratedTest))
+
+
+    def test_decoratePreservesSuite(self):
+        """
+        Tests can be in non-standard suites. L{decorate} preserves the
+        non-standard suites when it decorates the tests.
+        """
+        test = self.TestCase()
+        suite = runner.DestructiveTestSuite([test])
+        decorated = unittest.decorate(suite, unittest.TestDecorator)
+        self.assertSuitesEqual(
+            decorated,
+            runner.DestructiveTestSuite([unittest.TestDecorator(test)]))
+
+
+
+class SynchronousTestDecoratorTests(TestDecoratorMixin, unittest.SynchronousTestCase):
+    """
+    See module docstring.
+    """
+    TestCase = unittest.SynchronousTestCase
+
+
+
+class AsynchronousTestDecoratorTests(TestDecoratorMixin, unittest.TestCase):
+    """
+    See module docstring.
+    """
+    TestCase = unittest.TestCase
+
+
+
+class MonkeyPatchMixin(object):
+    """
+    Tests for the patch() helper method in L{unittest.TestCase}.
+    """
+    def setUp(self):
+        self.originalValue = 'original'
+        self.patchedValue = 'patched'
+        self.objectToPatch = self.originalValue
+        self.test = self.TestCase()
+
+
+    def test_patch(self):
+        """
+        Calling C{patch()} on a test monkey patches the specified object and
+        attribute.
+        """
+        self.test.patch(self, 'objectToPatch', self.patchedValue)
+        self.assertEqual(self.objectToPatch, self.patchedValue)
+
+
+    def test_patchRestoredAfterRun(self):
+        """
+        Any monkey patches introduced by a test using C{patch()} are reverted
+        after the test has run.
+        """
+        self.test.patch(self, 'objectToPatch', self.patchedValue)
+        self.test.run(reporter.Reporter())
+        self.assertEqual(self.objectToPatch, self.originalValue)
+
+
+    def test_revertDuringTest(self):
+        """
+        C{patch()} return a L{monkey.MonkeyPatcher} object that can be used to
+        restore the original values before the end of the test.
+        """
+        patch = self.test.patch(self, 'objectToPatch', self.patchedValue)
+        patch.restore()
+        self.assertEqual(self.objectToPatch, self.originalValue)
+
+
+    def test_revertAndRepatch(self):
+        """
+        The returned L{monkey.MonkeyPatcher} object can re-apply the patch
+        during the test run.
+        """
+        patch = self.test.patch(self, 'objectToPatch', self.patchedValue)
+        patch.restore()
+        patch.patch()
+        self.assertEqual(self.objectToPatch, self.patchedValue)
+
+
+    def test_successivePatches(self):
+        """
+        Successive patches are applied and reverted just like a single patch.
+        """
+        self.test.patch(self, 'objectToPatch', self.patchedValue)
+        self.assertEqual(self.objectToPatch, self.patchedValue)
+        self.test.patch(self, 'objectToPatch', 'second value')
+        self.assertEqual(self.objectToPatch, 'second value')
+        self.test.run(reporter.Reporter())
+        self.assertEqual(self.objectToPatch, self.originalValue)
+
+
+
+class SynchronousMonkeyPatchTests(MonkeyPatchMixin, unittest.SynchronousTestCase):
+    """
+    See module docstring.
+    """
+    TestCase = unittest.SynchronousTestCase
+
+
+
+class AsynchronousMonkeyPatchTests(MonkeyPatchMixin, unittest.TestCase):
+    """
+    See module docstring.
+    """
+    TestCase = unittest.TestCase
+
+
+
+class IterateTestsMixin(object):
+    """
+    L{_iterateTests} returns a list of all test cases in a test suite or test
+    case.
+    """
+    if _PY3:
+        skip = _PY3PORTNEEDED
+
+    def test_iterateTestCase(self):
+        """
+        L{_iterateTests} on a single test case returns a list containing that
+        test case.
+        """
+        test = self.TestCase()
+        self.assertEqual([test], list(unittest._iterateTests(test)))
+
+
+    def test_iterateSingletonTestSuite(self):
+        """
+        L{_iterateTests} on a test suite that contains a single test case
+        returns a list containing that test case.
+        """
+        test = self.TestCase()
+        suite = runner.TestSuite([test])
+        self.assertEqual([test], list(unittest._iterateTests(suite)))
+
+
+    def test_iterateNestedTestSuite(self):
+        """
+        L{_iterateTests} returns tests that are in nested test suites.
+        """
+        test = self.TestCase()
+        suite = runner.TestSuite([runner.TestSuite([test])])
+        self.assertEqual([test], list(unittest._iterateTests(suite)))
+
+
+    def test_iterateIsLeftToRightDepthFirst(self):
+        """
+        L{_iterateTests} returns tests in left-to-right, depth-first order.
+        """
+        test = self.TestCase()
+        suite = runner.TestSuite([runner.TestSuite([test]), self])
+        self.assertEqual([test, self], list(unittest._iterateTests(suite)))
+
+
+
+class SynchronousIterateTestsTests(IterateTestsMixin, unittest.SynchronousTestCase):
+    """
+    See module docstring.
+    """
+    TestCase = unittest.SynchronousTestCase
+
+
+
+class AsynchronousIterateTestsTests(IterateTestsMixin, unittest.TestCase):
+    """
+    See module docstring.
+    """
+    TestCase = unittest.TestCase
diff --git a/ThirdParty/Twisted/twisted/trial/test/test_util.py b/ThirdParty/Twisted/twisted/trial/test/test_util.py
new file mode 100644
index 0000000..0a529b8
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/test_util.py
@@ -0,0 +1,739 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+#
+
+"""
+Tests for L{twisted.trial.util}
+"""
+
+from __future__ import division, absolute_import
+
+import os, sys
+
+from zope.interface import implementer
+
+from twisted.python.compat import _PY3, NativeStringIO
+from twisted.python import filepath
+from twisted.internet.interfaces import IProcessTransport
+from twisted.internet import defer
+from twisted.internet.base import DelayedCall
+from twisted.python.failure import Failure
+
+from twisted.trial.unittest import SynchronousTestCase
+from twisted.trial import util
+from twisted.trial.util import (
+    DirtyReactorAggregateError, _Janitor, excInfoOrFailureToExcInfo,
+    acquireAttribute)
+from twisted.trial.test import packages, suppression
+
+
+
+class TestMktemp(SynchronousTestCase):
+    """
+    Tests for L{TestCase.mktemp}, a helper function for creating temporary file
+    or directory names.
+    """
+    def test_name(self):
+        """
+        The path name returned by C{mktemp} is directly beneath a directory
+        which identifies the test method which created the name.
+        """
+        name = self.mktemp()
+        dirs = os.path.dirname(name).split(os.sep)[:-1]
+        self.assertEqual(
+            dirs, ['twisted.trial.test.test_util', 'TestMktemp', 'test_name'])
+
+
+    def test_unique(self):
+        """
+        Repeated calls to C{mktemp} return different values.
+        """
+        name = self.mktemp()
+        self.assertNotEqual(name, self.mktemp())
+
+
+    def test_created(self):
+        """
+        The directory part of the path name returned by C{mktemp} exists.
+        """
+        name = self.mktemp()
+        dirname = os.path.dirname(name)
+        self.assertTrue(os.path.exists(dirname))
+        self.assertFalse(os.path.exists(name))
+
+
+    def test_location(self):
+        """
+        The path returned by C{mktemp} is beneath the current working directory.
+        """
+        path = os.path.abspath(self.mktemp())
+        self.assertTrue(path.startswith(os.getcwd()))
+
+
+
+class TestIntrospection(SynchronousTestCase):
+    def test_containers(self):
+        """
+        When pased a test case, L{util.getPythonContainers} returns a list
+        including the test case and the module the test case is defined in.
+        """
+        parents = util.getPythonContainers(
+            suppression.SynchronousTestSuppression2.testSuppressModule)
+        expected = [suppression.SynchronousTestSuppression2, suppression]
+        for a, b in zip(parents, expected):
+            self.assertEqual(a, b)
+        # Also, the function is deprecated.
+        warnings = self.flushWarnings([self.test_containers])
+        self.assertEqual(DeprecationWarning, warnings[0]['category'])
+        self.assertEqual(
+            "twisted.trial.util.getPythonContainers was deprecated in "
+            "Twisted 12.3.0: This function never worked correctly.  "
+            "Implement lookup on your own.",
+            warnings[0]['message'])
+        self.assertEqual(1, len(warnings))
+    if _PY3:
+        test_containers.skip = "getPythonContainers is unsupported on Python 3."
+
+
+
+class TestFindObject(packages.SysPathManglingTest):
+    """
+    Tests for L{twisted.trial.util.findObject}
+    """
+
+    def test_deprecation(self):
+        """
+        Calling L{findObject} results in a deprecation warning
+        """
+        util.findObject('')
+        warningsShown = self.flushWarnings()
+        self.assertEqual(len(warningsShown), 1)
+        self.assertIdentical(warningsShown[0]['category'], DeprecationWarning)
+        self.assertEqual(warningsShown[0]['message'],
+                          "twisted.trial.util.findObject was deprecated "
+                          "in Twisted 10.1.0: Please use "
+                          "twisted.python.reflect.namedAny instead.")
+
+
+    def test_importPackage(self):
+        package1 = util.findObject('package')
+        import package as package2
+        self.assertEqual(package1, (True, package2))
+
+    def test_importModule(self):
+        test_sample2 = util.findObject('goodpackage.test_sample')
+        from goodpackage import test_sample
+        self.assertEqual((True, test_sample), test_sample2)
+
+    def test_importError(self):
+        self.failUnlessRaises(ZeroDivisionError,
+                              util.findObject, 'package.test_bad_module')
+
+    def test_sophisticatedImportError(self):
+        self.failUnlessRaises(ImportError,
+                              util.findObject, 'package2.test_module')
+
+    def test_importNonexistentPackage(self):
+        self.assertEqual(util.findObject('doesntexist')[0], False)
+
+    def test_findNonexistentModule(self):
+        self.assertEqual(util.findObject('package.doesntexist')[0], False)
+
+    def test_findNonexistentObject(self):
+        self.assertEqual(util.findObject(
+            'goodpackage.test_sample.doesnt')[0], False)
+        self.assertEqual(util.findObject(
+            'goodpackage.test_sample.AlphabetTest.doesntexist')[0], False)
+
+    def test_findObjectExist(self):
+        alpha1 = util.findObject('goodpackage.test_sample.AlphabetTest')
+        from goodpackage import test_sample
+        self.assertEqual(alpha1, (True, test_sample.AlphabetTest))
+
+
+
+class TestRunSequentially(SynchronousTestCase):
+    """
+    Sometimes it is useful to be able to run an arbitrary list of callables,
+    one after the other.
+
+    When some of those callables can return Deferreds, things become complex.
+    """
+
+    def assertDeferredResult(self, deferred, assertFunction, *args, **kwargs):
+        """
+        Call the given assertion function against the current result of a
+        Deferred.
+        """
+        result = []
+        deferred.addCallback(result.append)
+        assertFunction(result[0], *args, **kwargs)
+
+    def test_emptyList(self):
+        """
+        When asked to run an empty list of callables, runSequentially returns a
+        successful Deferred that fires an empty list.
+        """
+        d = util._runSequentially([])
+        self.assertDeferredResult(d, self.assertEqual, [])
+
+
+    def test_singleSynchronousSuccess(self):
+        """
+        When given a callable that succeeds without returning a Deferred,
+        include the return value in the results list, tagged with a SUCCESS
+        flag.
+        """
+        d = util._runSequentially([lambda: None])
+        self.assertDeferredResult(d, self.assertEqual, [(defer.SUCCESS, None)])
+
+
+    def test_singleSynchronousFailure(self):
+        """
+        When given a callable that raises an exception, include a Failure for
+        that exception in the results list, tagged with a FAILURE flag.
+        """
+        d = util._runSequentially([lambda: self.fail('foo')])
+        def check(results):
+            [(flag, fail)] = results
+            fail.trap(self.failureException)
+            self.assertEqual(fail.getErrorMessage(), 'foo')
+            self.assertEqual(flag, defer.FAILURE)
+        self.assertDeferredResult(d, check)
+
+
+    def test_singleAsynchronousSuccess(self):
+        """
+        When given a callable that returns a successful Deferred, include the
+        result of the Deferred in the results list, tagged with a SUCCESS flag.
+        """
+        d = util._runSequentially([lambda: defer.succeed(None)])
+        self.assertDeferredResult(d, self.assertEqual, [(defer.SUCCESS, None)])
+
+
+    def test_singleAsynchronousFailure(self):
+        """
+        When given a callable that returns a failing Deferred, include the
+        failure the results list, tagged with a FAILURE flag.
+        """
+        d = util._runSequentially([lambda: defer.fail(ValueError('foo'))])
+        def check(results):
+            [(flag, fail)] = results
+            fail.trap(ValueError)
+            self.assertEqual(fail.getErrorMessage(), 'foo')
+            self.assertEqual(flag, defer.FAILURE)
+        self.assertDeferredResult(d, check)
+
+
+    def test_callablesCalledInOrder(self):
+        """
+        Check that the callables are called in the given order, one after the
+        other.
+        """
+        log = []
+        deferreds = []
+
+        def append(value):
+            d = defer.Deferred()
+            log.append(value)
+            deferreds.append(d)
+            return d
+
+        util._runSequentially([lambda: append('foo'),
+                               lambda: append('bar')])
+
+        # runSequentially should wait until the Deferred has fired before
+        # running the second callable.
+        self.assertEqual(log, ['foo'])
+        deferreds[-1].callback(None)
+        self.assertEqual(log, ['foo', 'bar'])
+
+
+    def test_continuesAfterError(self):
+        """
+        If one of the callables raises an error, then runSequentially continues
+        to run the remaining callables.
+        """
+        d = util._runSequentially([lambda: self.fail('foo'), lambda: 'bar'])
+        def check(results):
+            [(flag1, fail), (flag2, result)] = results
+            fail.trap(self.failureException)
+            self.assertEqual(flag1, defer.FAILURE)
+            self.assertEqual(fail.getErrorMessage(), 'foo')
+            self.assertEqual(flag2, defer.SUCCESS)
+            self.assertEqual(result, 'bar')
+        self.assertDeferredResult(d, check)
+
+
+    def test_stopOnFirstError(self):
+        """
+        If the C{stopOnFirstError} option is passed to C{runSequentially}, then
+        no further callables are called after the first exception is raised.
+        """
+        d = util._runSequentially([lambda: self.fail('foo'), lambda: 'bar'],
+                                  stopOnFirstError=True)
+        def check(results):
+            [(flag1, fail)] = results
+            fail.trap(self.failureException)
+            self.assertEqual(flag1, defer.FAILURE)
+            self.assertEqual(fail.getErrorMessage(), 'foo')
+        self.assertDeferredResult(d, check)
+
+
+
+class DirtyReactorAggregateErrorTest(SynchronousTestCase):
+    """
+    Tests for the L{DirtyReactorAggregateError}.
+    """
+
+    def test_formatDelayedCall(self):
+        """
+        Delayed calls are formatted nicely.
+        """
+        error = DirtyReactorAggregateError(["Foo", "bar"])
+        self.assertEqual(str(error),
+                          """\
+Reactor was unclean.
+DelayedCalls: (set twisted.internet.base.DelayedCall.debug = True to debug)
+Foo
+bar""")
+
+
+    def test_formatSelectables(self):
+        """
+        Selectables are formatted nicely.
+        """
+        error = DirtyReactorAggregateError([], ["selectable 1", "selectable 2"])
+        self.assertEqual(str(error),
+                          """\
+Reactor was unclean.
+Selectables:
+selectable 1
+selectable 2""")
+
+
+    def test_formatDelayedCallsAndSelectables(self):
+        """
+        Both delayed calls and selectables can appear in the same error.
+        """
+        error = DirtyReactorAggregateError(["bleck", "Boozo"],
+                                           ["Sel1", "Sel2"])
+        self.assertEqual(str(error),
+                          """\
+Reactor was unclean.
+DelayedCalls: (set twisted.internet.base.DelayedCall.debug = True to debug)
+bleck
+Boozo
+Selectables:
+Sel1
+Sel2""")
+
+
+
+class StubReactor(object):
+    """
+    A reactor stub which contains enough functionality to be used with the
+    L{_Janitor}.
+
+    @ivar iterations: A list of the arguments passed to L{iterate}.
+    @ivar removeAllCalled: Number of times that L{removeAll} was called.
+    @ivar selectables: The value that will be returned from L{removeAll}.
+    @ivar delayedCalls: The value to return from L{getDelayedCalls}.
+    """
+
+    def __init__(self, delayedCalls, selectables=None):
+        """
+        @param delayedCalls: See L{StubReactor.delayedCalls}.
+        @param selectables: See L{StubReactor.selectables}.
+        """
+        self.delayedCalls = delayedCalls
+        self.iterations = []
+        self.removeAllCalled = 0
+        if not selectables:
+            selectables = []
+        self.selectables = selectables
+
+
+    def iterate(self, timeout=None):
+        """
+        Increment C{self.iterations}.
+        """
+        self.iterations.append(timeout)
+
+
+    def getDelayedCalls(self):
+        """
+        Return C{self.delayedCalls}.
+        """
+        return self.delayedCalls
+
+
+    def removeAll(self):
+        """
+        Increment C{self.removeAllCalled} and return C{self.selectables}.
+        """
+        self.removeAllCalled += 1
+        return self.selectables
+
+
+
+class StubErrorReporter(object):
+    """
+    A subset of L{twisted.trial.itrial.IReporter} which records L{addError}
+    calls.
+
+    @ivar errors: List of two-tuples of (test, error) which were passed to
+        L{addError}.
+    """
+
+    def __init__(self):
+        self.errors = []
+
+
+    def addError(self, test, error):
+        """
+        Record parameters in C{self.errors}.
+        """
+        self.errors.append((test, error))
+
+
+
+class JanitorTests(SynchronousTestCase):
+    """
+    Tests for L{_Janitor}!
+    """
+
+    def test_cleanPendingSpinsReactor(self):
+        """
+        During pending-call cleanup, the reactor will be spun twice with an
+        instant timeout. This is not a requirement, it is only a test for
+        current behavior. Hopefully Trial will eventually not do this kind of
+        reactor stuff.
+        """
+        reactor = StubReactor([])
+        jan = _Janitor(None, None, reactor=reactor)
+        jan._cleanPending()
+        self.assertEqual(reactor.iterations, [0, 0])
+
+
+    def test_cleanPendingCancelsCalls(self):
+        """
+        During pending-call cleanup, the janitor cancels pending timed calls.
+        """
+        def func():
+            return "Lulz"
+        cancelled = []
+        delayedCall = DelayedCall(300, func, (), {},
+                                  cancelled.append, lambda x: None)
+        reactor = StubReactor([delayedCall])
+        jan = _Janitor(None, None, reactor=reactor)
+        jan._cleanPending()
+        self.assertEqual(cancelled, [delayedCall])
+
+
+    def test_cleanPendingReturnsDelayedCallStrings(self):
+        """
+        The Janitor produces string representations of delayed calls from the
+        delayed call cleanup method. It gets the string representations
+        *before* cancelling the calls; this is important because cancelling the
+        call removes critical debugging information from the string
+        representation.
+        """
+        delayedCall = DelayedCall(300, lambda: None, (), {},
+                                  lambda x: None, lambda x: None,
+                                  seconds=lambda: 0)
+        delayedCallString = str(delayedCall)
+        reactor = StubReactor([delayedCall])
+        jan = _Janitor(None, None, reactor=reactor)
+        strings = jan._cleanPending()
+        self.assertEqual(strings, [delayedCallString])
+
+
+    def test_cleanReactorRemovesSelectables(self):
+        """
+        The Janitor will remove selectables during reactor cleanup.
+        """
+        reactor = StubReactor([])
+        jan = _Janitor(None, None, reactor=reactor)
+        jan._cleanReactor()
+        self.assertEqual(reactor.removeAllCalled, 1)
+
+
+    def test_cleanReactorKillsProcesses(self):
+        """
+        The Janitor will kill processes during reactor cleanup.
+        """
+        @implementer(IProcessTransport)
+        class StubProcessTransport(object):
+            """
+            A stub L{IProcessTransport} provider which records signals.
+            @ivar signals: The signals passed to L{signalProcess}.
+            """
+
+            def __init__(self):
+                self.signals = []
+
+            def signalProcess(self, signal):
+                """
+                Append C{signal} to C{self.signals}.
+                """
+                self.signals.append(signal)
+
+        pt = StubProcessTransport()
+        reactor = StubReactor([], [pt])
+        jan = _Janitor(None, None, reactor=reactor)
+        jan._cleanReactor()
+        self.assertEqual(pt.signals, ["KILL"])
+
+
+    def test_cleanReactorReturnsSelectableStrings(self):
+        """
+        The Janitor returns string representations of the selectables that it
+        cleaned up from the reactor cleanup method.
+        """
+        class Selectable(object):
+            """
+            A stub Selectable which only has an interesting string
+            representation.
+            """
+            def __repr__(self):
+                return "(SELECTABLE!)"
+
+        reactor = StubReactor([], [Selectable()])
+        jan = _Janitor(None, None, reactor=reactor)
+        self.assertEqual(jan._cleanReactor(), ["(SELECTABLE!)"])
+
+
+    def test_postCaseCleanupNoErrors(self):
+        """
+        The post-case cleanup method will return True and not call C{addError}
+        on the result if there are no pending calls.
+        """
+        reactor = StubReactor([])
+        test = object()
+        reporter = StubErrorReporter()
+        jan = _Janitor(test, reporter, reactor=reactor)
+        self.assertTrue(jan.postCaseCleanup())
+        self.assertEqual(reporter.errors, [])
+
+
+    def test_postCaseCleanupWithErrors(self):
+        """
+        The post-case cleanup method will return False and call C{addError} on
+        the result with a L{DirtyReactorAggregateError} Failure if there are
+        pending calls.
+        """
+        delayedCall = DelayedCall(300, lambda: None, (), {},
+                                  lambda x: None, lambda x: None,
+                                  seconds=lambda: 0)
+        delayedCallString = str(delayedCall)
+        reactor = StubReactor([delayedCall], [])
+        test = object()
+        reporter = StubErrorReporter()
+        jan = _Janitor(test, reporter, reactor=reactor)
+        self.assertFalse(jan.postCaseCleanup())
+        self.assertEqual(len(reporter.errors), 1)
+        self.assertEqual(reporter.errors[0][1].value.delayedCalls,
+                          [delayedCallString])
+
+
+    def test_postClassCleanupNoErrors(self):
+        """
+        The post-class cleanup method will not call C{addError} on the result
+        if there are no pending calls or selectables.
+        """
+        reactor = StubReactor([])
+        test = object()
+        reporter = StubErrorReporter()
+        jan = _Janitor(test, reporter, reactor=reactor)
+        jan.postClassCleanup()
+        self.assertEqual(reporter.errors, [])
+
+
+    def test_postClassCleanupWithPendingCallErrors(self):
+        """
+        The post-class cleanup method call C{addError} on the result with a
+        L{DirtyReactorAggregateError} Failure if there are pending calls.
+        """
+        delayedCall = DelayedCall(300, lambda: None, (), {},
+                                  lambda x: None, lambda x: None,
+                                  seconds=lambda: 0)
+        delayedCallString = str(delayedCall)
+        reactor = StubReactor([delayedCall], [])
+        test = object()
+        reporter = StubErrorReporter()
+        jan = _Janitor(test, reporter, reactor=reactor)
+        jan.postClassCleanup()
+        self.assertEqual(len(reporter.errors), 1)
+        self.assertEqual(reporter.errors[0][1].value.delayedCalls,
+                          [delayedCallString])
+
+
+    def test_postClassCleanupWithSelectableErrors(self):
+        """
+        The post-class cleanup method call C{addError} on the result with a
+        L{DirtyReactorAggregateError} Failure if there are selectables.
+        """
+        selectable = "SELECTABLE HERE"
+        reactor = StubReactor([], [selectable])
+        test = object()
+        reporter = StubErrorReporter()
+        jan = _Janitor(test, reporter, reactor=reactor)
+        jan.postClassCleanup()
+        self.assertEqual(len(reporter.errors), 1)
+        self.assertEqual(reporter.errors[0][1].value.selectables,
+                          [repr(selectable)])
+
+
+
+class RemoveSafelyTests(SynchronousTestCase):
+    """
+    Tests for L{util._removeSafely}.
+    """
+    def test_removeSafelyNoTrialMarker(self):
+        """
+        If a path doesn't contain a node named C{"_trial_marker"}, that path is
+        not removed by L{util._removeSafely} and a L{util._NoTrialMarker}
+        exception is raised instead.
+        """
+        directory = self.mktemp().encode("utf-8")
+        os.mkdir(directory)
+        dirPath = filepath.FilePath(directory)
+        self.assertRaises(util._NoTrialMarker, util._removeSafely, dirPath)
+
+
+    def test_removeSafelyRemoveFailsMoveSucceeds(self):
+        """
+        If an L{OSError} is raised while removing a path in
+        L{util._removeSafely}, an attempt is made to move the path to a new
+        name.
+        """
+        def dummyRemove():
+            """
+            Raise an C{OSError} to emulate the branch of L{util._removeSafely}
+            in which path removal fails.
+            """
+            raise OSError()
+
+        # Patch stdout so we can check the print statements in _removeSafely
+        out = NativeStringIO()
+        self.patch(sys, 'stdout', out)
+
+        # Set up a trial directory with a _trial_marker
+        directory = self.mktemp().encode("utf-8")
+        os.mkdir(directory)
+        dirPath = filepath.FilePath(directory)
+        dirPath.child(b'_trial_marker').touch()
+        # Ensure that path.remove() raises an OSError
+        dirPath.remove = dummyRemove
+
+        util._removeSafely(dirPath)
+        self.assertIn("could not remove FilePath", out.getvalue())
+
+
+    def test_removeSafelyRemoveFailsMoveFails(self):
+        """
+        If an L{OSError} is raised while removing a path in
+        L{util._removeSafely}, an attempt is made to move the path to a new
+        name. If that attempt fails, the L{OSError} is re-raised.
+        """
+        def dummyRemove():
+            """
+            Raise an C{OSError} to emulate the branch of L{util._removeSafely}
+            in which path removal fails.
+            """
+            raise OSError("path removal failed")
+
+        def dummyMoveTo(path):
+            """
+            Raise an C{OSError} to emulate the branch of L{util._removeSafely}
+            in which path movement fails.
+            """
+            raise OSError("path movement failed")
+
+        # Patch stdout so we can check the print statements in _removeSafely
+        out = NativeStringIO()
+        self.patch(sys, 'stdout', out)
+
+        # Set up a trial directory with a _trial_marker
+        directory = self.mktemp().encode("utf-8")
+        os.mkdir(directory)
+        dirPath = filepath.FilePath(directory)
+        dirPath.child(b'_trial_marker').touch()
+
+        # Ensure that path.remove() and path.moveTo() both raise OSErrors
+        dirPath.remove = dummyRemove
+        dirPath.moveTo = dummyMoveTo
+
+        error = self.assertRaises(OSError, util._removeSafely, dirPath)
+        self.assertEqual(str(error), "path movement failed")
+        self.assertIn("could not remove FilePath", out.getvalue())
+
+
+
+class ExcInfoTests(SynchronousTestCase):
+    """
+    Tests for L{excInfoOrFailureToExcInfo}.
+    """
+    def test_excInfo(self):
+        """
+        L{excInfoOrFailureToExcInfo} returns exactly what it is passed, if it is
+        passed a tuple like the one returned by L{sys.exc_info}.
+        """
+        info = (ValueError, ValueError("foo"), None)
+        self.assertTrue(info is excInfoOrFailureToExcInfo(info))
+
+
+    def test_failure(self):
+        """
+        When called with a L{Failure} instance, L{excInfoOrFailureToExcInfo}
+        returns a tuple like the one returned by L{sys.exc_info}, with the
+        elements taken from the type, value, and traceback of the failure.
+        """
+        try:
+            1 / 0
+        except:
+            f = Failure()
+        self.assertEqual((f.type, f.value, f.tb), excInfoOrFailureToExcInfo(f))
+
+
+
+class AcquireAttributeTests(SynchronousTestCase):
+    """
+    Tests for L{acquireAttribute}.
+    """
+    def test_foundOnEarlierObject(self):
+        """
+        The value returned by L{acquireAttribute} is the value of the requested
+        attribute on the first object in the list passed in which has that
+        attribute.
+        """
+        self.value = value = object()
+        self.assertTrue(value is acquireAttribute([self, object()], "value"))
+
+
+    def test_foundOnLaterObject(self):
+        """
+        The same as L{test_foundOnEarlierObject}, but for the case where the 2nd
+        element in the object list has the attribute and the first does not.
+        """
+        self.value = value = object()
+        self.assertTrue(value is acquireAttribute([object(), self], "value"))
+
+
+    def test_notFoundException(self):
+        """
+        If none of the objects passed in the list to L{acquireAttribute} have
+        the requested attribute, L{AttributeError} is raised.
+        """
+        self.assertRaises(AttributeError, acquireAttribute, [object()], "foo")
+
+
+    def test_notFoundDefault(self):
+        """
+        If none of the objects passed in the list to L{acquireAttribute} have
+        the requested attribute and a default value is given, the default value
+        is returned.
+        """
+        default = object()
+        self.assertTrue(default is acquireAttribute([object()], "foo", default))
diff --git a/ThirdParty/Twisted/twisted/trial/test/test_warning.py b/ThirdParty/Twisted/twisted/trial/test/test_warning.py
new file mode 100644
index 0000000..9192648
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/test_warning.py
@@ -0,0 +1,491 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for Trial's interaction with the Python warning system.
+"""
+
+from __future__ import division, absolute_import
+
+import sys, warnings
+
+from unittest import TestResult
+
+from twisted.python.compat import NativeStringIO as StringIO
+from twisted.python.filepath import FilePath
+from twisted.trial.unittest import (
+    SynchronousTestCase, _collectWarnings, _setWarningRegistryToNone)
+
+class Mask(object):
+    """
+    Hide a test case definition from trial's automatic discovery mechanism.
+    """
+    class MockTests(SynchronousTestCase):
+        """
+        A test case which is used by L{FlushWarningsTests} to verify behavior
+        which cannot be verified by code inside a single test method.
+        """
+        message = "some warning text"
+        category = UserWarning
+
+        def test_unflushed(self):
+            """
+            Generate a warning and don't flush it.
+            """
+            warnings.warn(self.message, self.category)
+
+
+        def test_flushed(self):
+            """
+            Generate a warning and flush it.
+            """
+            warnings.warn(self.message, self.category)
+            self.assertEqual(len(self.flushWarnings()), 1)
+
+
+
+class FlushWarningsTests(SynchronousTestCase):
+    """
+    Tests for C{flushWarnings}, an API for examining the warnings
+    emitted so far in a test.
+    """
+
+    def assertDictSubset(self, set, subset):
+        """
+        Assert that all the keys present in C{subset} are also present in
+        C{set} and that the corresponding values are equal.
+        """
+        for k, v in subset.items():
+            self.assertEqual(set[k], v)
+
+
+    def assertDictSubsets(self, sets, subsets):
+        """
+        For each pair of corresponding elements in C{sets} and C{subsets},
+        assert that the element from C{subsets} is a subset of the element from
+        C{sets}.
+        """
+        self.assertEqual(len(sets), len(subsets))
+        for a, b in zip(sets, subsets):
+            self.assertDictSubset(a, b)
+
+
+    def test_none(self):
+        """
+        If no warnings are emitted by a test, C{flushWarnings} returns an empty
+        list.
+        """
+        self.assertEqual(self.flushWarnings(), [])
+
+
+    def test_several(self):
+        """
+        If several warnings are emitted by a test, C{flushWarnings} returns a
+        list containing all of them.
+        """
+        firstMessage = "first warning message"
+        firstCategory = UserWarning
+        warnings.warn(message=firstMessage, category=firstCategory)
+
+        secondMessage = "second warning message"
+        secondCategory = RuntimeWarning
+        warnings.warn(message=secondMessage, category=secondCategory)
+
+        self.assertDictSubsets(
+            self.flushWarnings(),
+            [{'category': firstCategory, 'message': firstMessage},
+             {'category': secondCategory, 'message': secondMessage}])
+
+
+    def test_repeated(self):
+        """
+        The same warning triggered twice from the same place is included twice
+        in the list returned by C{flushWarnings}.
+        """
+        message = "the message"
+        category = RuntimeWarning
+        for i in range(2):
+            warnings.warn(message=message, category=category)
+
+        self.assertDictSubsets(
+            self.flushWarnings(),
+            [{'category': category, 'message': message}] * 2)
+
+
+    def test_cleared(self):
+        """
+        After a particular warning event has been returned by C{flushWarnings},
+        it is not returned by subsequent calls.
+        """
+        message = "the message"
+        category = RuntimeWarning
+        warnings.warn(message=message, category=category)
+        self.assertDictSubsets(
+            self.flushWarnings(),
+            [{'category': category, 'message': message}])
+        self.assertEqual(self.flushWarnings(), [])
+
+
+    def test_unflushed(self):
+        """
+        Any warnings emitted by a test which are not flushed are emitted to the
+        Python warning system.
+        """
+        result = TestResult()
+        case = Mask.MockTests('test_unflushed')
+        case.run(result)
+        warningsShown = self.flushWarnings([Mask.MockTests.test_unflushed])
+        self.assertEqual(warningsShown[0]['message'], 'some warning text')
+        self.assertIdentical(warningsShown[0]['category'], UserWarning)
+
+        where = type(case).test_unflushed.__code__
+        filename = where.co_filename
+        # If someone edits MockTests.test_unflushed, the value added to
+        # firstlineno might need to change.
+        lineno = where.co_firstlineno + 4
+
+        self.assertEqual(warningsShown[0]['filename'], filename)
+        self.assertEqual(warningsShown[0]['lineno'], lineno)
+
+        self.assertEqual(len(warningsShown), 1)
+
+
+    def test_flushed(self):
+        """
+        Any warnings emitted by a test which are flushed are not emitted to the
+        Python warning system.
+        """
+        result = TestResult()
+        case = Mask.MockTests('test_flushed')
+        output = StringIO()
+        monkey = self.patch(sys, 'stdout', output)
+        case.run(result)
+        monkey.restore()
+        self.assertEqual(output.getvalue(), "")
+
+
+    def test_warningsConfiguredAsErrors(self):
+        """
+        If a warnings filter has been installed which turns warnings into
+        exceptions, tests have an error added to the reporter for them for each
+        unflushed warning.
+        """
+        class CustomWarning(Warning):
+            pass
+
+        result = TestResult()
+        case = Mask.MockTests('test_unflushed')
+        case.category = CustomWarning
+
+        originalWarnings = warnings.filters[:]
+        try:
+            warnings.simplefilter('error')
+            case.run(result)
+            self.assertEqual(len(result.errors), 1)
+            self.assertIdentical(result.errors[0][0], case)
+            self.assertTrue(
+                # Different python versions differ in whether they report the
+                # fully qualified class name or just the class name.
+                result.errors[0][1].splitlines()[-1].endswith(
+                    "CustomWarning: some warning text"))
+        finally:
+            warnings.filters[:] = originalWarnings
+
+
+    def test_flushedWarningsConfiguredAsErrors(self):
+        """
+        If a warnings filter has been installed which turns warnings into
+        exceptions, tests which emit those warnings but flush them do not have
+        an error added to the reporter.
+        """
+        class CustomWarning(Warning):
+            pass
+
+        result = TestResult()
+        case = Mask.MockTests('test_flushed')
+        case.category = CustomWarning
+
+        originalWarnings = warnings.filters[:]
+        try:
+            warnings.simplefilter('error')
+            case.run(result)
+            self.assertEqual(result.errors, [])
+        finally:
+            warnings.filters[:] = originalWarnings
+
+
+    def test_multipleFlushes(self):
+        """
+        Any warnings emitted after a call to C{flushWarnings} can be flushed by
+        another call to C{flushWarnings}.
+        """
+        warnings.warn("first message")
+        self.assertEqual(len(self.flushWarnings()), 1)
+        warnings.warn("second message")
+        self.assertEqual(len(self.flushWarnings()), 1)
+
+
+    def test_filterOnOffendingFunction(self):
+        """
+        The list returned by C{flushWarnings} includes only those
+        warnings which refer to the source of the function passed as the value
+        for C{offendingFunction}, if a value is passed for that parameter.
+        """
+        firstMessage = "first warning text"
+        firstCategory = UserWarning
+        def one():
+            warnings.warn(firstMessage, firstCategory, stacklevel=1)
+
+        secondMessage = "some text"
+        secondCategory = RuntimeWarning
+        def two():
+            warnings.warn(secondMessage, secondCategory, stacklevel=1)
+
+        one()
+        two()
+
+        self.assertDictSubsets(
+            self.flushWarnings(offendingFunctions=[one]),
+            [{'category': firstCategory, 'message': firstMessage}])
+        self.assertDictSubsets(
+            self.flushWarnings(offendingFunctions=[two]),
+            [{'category': secondCategory, 'message': secondMessage}])
+
+
+    def test_functionBoundaries(self):
+        """
+        Verify that warnings emitted at the very edges of a function are still
+        determined to be emitted from that function.
+        """
+        def warner():
+            warnings.warn("first line warning")
+            warnings.warn("internal line warning")
+            warnings.warn("last line warning")
+
+        warner()
+        self.assertEqual(
+            len(self.flushWarnings(offendingFunctions=[warner])), 3)
+
+
+    def test_invalidFilter(self):
+        """
+        If an object which is neither a function nor a method is included in the
+        C{offendingFunctions} list, C{flushWarnings} raises L{ValueError}.  Such
+        a call flushes no warnings.
+        """
+        warnings.warn("oh no")
+        self.assertRaises(ValueError, self.flushWarnings, [None])
+        self.assertEqual(len(self.flushWarnings()), 1)
+
+
+    def test_missingSource(self):
+        """
+        Warnings emitted by a function the source code of which is not
+        available can still be flushed.
+        """
+        package = FilePath(self.mktemp().encode('utf-8')).child(b'twisted_private_helper')
+        package.makedirs()
+        package.child(b'__init__.py').setContent(b'')
+        package.child(b'missingsourcefile.py').setContent(b'''
+import warnings
+def foo():
+    warnings.warn("oh no")
+''')
+        pathEntry = package.parent().path.decode('utf-8')
+        sys.path.insert(0, pathEntry)
+        self.addCleanup(sys.path.remove, pathEntry)
+        from twisted_private_helper import missingsourcefile
+        self.addCleanup(sys.modules.pop, 'twisted_private_helper')
+        self.addCleanup(sys.modules.pop, missingsourcefile.__name__)
+        package.child(b'missingsourcefile.py').remove()
+
+        missingsourcefile.foo()
+        self.assertEqual(len(self.flushWarnings([missingsourcefile.foo])), 1)
+
+
+    def test_renamedSource(self):
+        """
+        Warnings emitted by a function defined in a file which has been renamed
+        since it was initially compiled can still be flushed.
+
+        This is testing the code which specifically supports working around the
+        unfortunate behavior of CPython to write a .py source file name into
+        the .pyc files it generates and then trust that it is correct in
+        various places.  If source files are renamed, .pyc files may not be
+        regenerated, but they will contain incorrect filenames.
+        """
+        package = FilePath(self.mktemp().encode('utf-8')).child(b'twisted_private_helper')
+        package.makedirs()
+        package.child(b'__init__.py').setContent(b'')
+        package.child(b'module.py').setContent(b'''
+import warnings
+def foo():
+    warnings.warn("oh no")
+''')
+        pathEntry = package.parent().path.decode('utf-8')
+        sys.path.insert(0, pathEntry)
+        self.addCleanup(sys.path.remove, pathEntry)
+
+        # Import it to cause pycs to be generated
+        from twisted_private_helper import module
+
+        # Clean up the state resulting from that import; we're not going to use
+        # this module, so it should go away.
+        del sys.modules['twisted_private_helper']
+        del sys.modules[module.__name__]
+
+        # Some Python versions have extra state related to the just
+        # imported/renamed package.  Clean it up too.  See also
+        # http://bugs.python.org/issue15912
+        try:
+            from importlib import invalidate_caches
+        except ImportError:
+            pass
+        else:
+            invalidate_caches()
+
+        # Rename the source directory
+        package.moveTo(package.sibling(b'twisted_renamed_helper'))
+
+        # Import the newly renamed version
+        from twisted_renamed_helper import module
+        self.addCleanup(sys.modules.pop, 'twisted_renamed_helper')
+        self.addCleanup(sys.modules.pop, module.__name__)
+
+        # Generate the warning
+        module.foo()
+
+        # Flush it
+        self.assertEqual(len(self.flushWarnings([module.foo])), 1)
+
+
+
+class FakeWarning(Warning):
+    pass
+
+
+
+class CollectWarningsTests(SynchronousTestCase):
+    """
+    Tests for L{_collectWarnings}.
+    """
+    def test_callsObserver(self):
+        """
+        L{_collectWarnings} calls the observer with each emitted warning.
+        """
+        firstMessage = "dummy calls observer warning"
+        secondMessage = firstMessage[::-1]
+        events = []
+        def f():
+            events.append('call')
+            warnings.warn(firstMessage)
+            warnings.warn(secondMessage)
+            events.append('returning')
+
+        _collectWarnings(events.append, f)
+
+        self.assertEqual(events[0], 'call')
+        self.assertEqual(events[1].message, firstMessage)
+        self.assertEqual(events[2].message, secondMessage)
+        self.assertEqual(events[3], 'returning')
+        self.assertEqual(len(events), 4)
+
+
+    def test_suppresses(self):
+        """
+        Any warnings emitted by a call to a function passed to
+        L{_collectWarnings} are not actually emitted to the warning system.
+        """
+        output = StringIO()
+        self.patch(sys, 'stdout', output)
+        _collectWarnings(lambda x: None, warnings.warn, "text")
+        self.assertEqual(output.getvalue(), "")
+
+
+    def test_callsFunction(self):
+        """
+        L{_collectWarnings} returns the result of calling the callable passed to
+        it with the parameters given.
+        """
+        arguments = []
+        value = object()
+
+        def f(*args, **kwargs):
+            arguments.append((args, kwargs))
+            return value
+
+        result = _collectWarnings(lambda x: None, f, 1, 'a', b=2, c='d')
+        self.assertEqual(arguments, [((1, 'a'), {'b': 2, 'c': 'd'})])
+        self.assertIdentical(result, value)
+
+
+    def test_duplicateWarningCollected(self):
+        """
+        Subsequent emissions of a warning from a particular source site can be
+        collected by L{_collectWarnings}.  In particular, the per-module
+        emitted-warning cache should be bypassed (I{__warningregistry__}).
+        """
+        # Make sure the worst case is tested: if __warningregistry__ isn't in a
+        # module's globals, then the warning system will add it and start using
+        # it to avoid emitting duplicate warnings.  Delete __warningregistry__
+        # to ensure that even modules which are first imported as a test is
+        # running still interact properly with the warning system.
+        global __warningregistry__
+        del __warningregistry__
+
+        def f():
+            warnings.warn("foo")
+        warnings.simplefilter('default')
+        f()
+        events = []
+        _collectWarnings(events.append, f)
+        self.assertEqual(len(events), 1)
+        self.assertEqual(events[0].message, "foo")
+        self.assertEqual(len(self.flushWarnings()), 1)
+
+
+    def test_immutableObject(self):
+        """
+        L{_collectWarnings}'s behavior is not altered by the presence of an
+        object which cannot have attributes set on it as a value in
+        C{sys.modules}.
+        """
+        key = object()
+        sys.modules[key] = key
+        self.addCleanup(sys.modules.pop, key)
+        self.test_duplicateWarningCollected()
+
+
+    def test_setWarningRegistryChangeWhileIterating(self):
+        """
+        If the dictionary passed to L{_setWarningRegistryToNone} changes size
+        partway through the process, C{_setWarningRegistryToNone} continues to
+        set C{__warningregistry__} to C{None} on the rest of the values anyway.
+
+
+        This might be caused by C{sys.modules} containing something that's not
+        really a module and imports things on setattr.  py.test does this, as
+        does L{twisted.python.deprecate.deprecatedModuleAttribute}.
+        """
+        d = {}
+
+        class A(object):
+            def __init__(self, key):
+                self.__dict__['_key'] = key
+
+            def __setattr__(self, value, item):
+                d[self._key] = None
+
+        key1 = object()
+        key2 = object()
+        d[key1] = A(key2)
+
+        key3 = object()
+        key4 = object()
+        d[key3] = A(key4)
+
+        _setWarningRegistryToNone(d)
+
+        # If both key2 and key4 were added, then both A instanced were
+        # processed.
+        self.assertEqual(set([key1, key2, key3, key4]), set(d.keys()))
diff --git a/ThirdParty/Twisted/twisted/trial/test/weird.py b/ThirdParty/Twisted/twisted/trial/test/weird.py
new file mode 100644
index 0000000..d7350e9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/test/weird.py
@@ -0,0 +1,23 @@
+from __future__ import division, absolute_import
+
+import unittest
+
+from twisted.internet import defer
+
+# Used in test_tests.TestUnhandledDeferred
+
+class TestBleeding(unittest.TestCase):
+    """This test creates an unhandled Deferred and leaves it in a cycle.
+
+    The Deferred is left in a cycle so that the garbage collector won't pick it
+    up immediately.  We were having some problems where unhandled Deferreds in
+    one test were failing random other tests. (See #1507, #1213)
+    """
+    def test_unhandledDeferred(self):
+        try:
+            1/0
+        except ZeroDivisionError:
+            f = defer.fail()
+        # these two lines create the cycle. don't remove them
+        l = [f]
+        l.append(l)
diff --git a/ThirdParty/Twisted/twisted/trial/unittest.py b/ThirdParty/Twisted/twisted/trial/unittest.py
new file mode 100644
index 0000000..7a9cf31
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/unittest.py
@@ -0,0 +1,42 @@
+# -*- test-case-name: twisted.trial.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Things likely to be used by writers of unit tests.
+"""
+
+from __future__ import division, absolute_import
+
+# Define the public API from the two implementation modules
+from twisted.trial._synctest import (
+    FailTest, SkipTest, SynchronousTestCase, PyUnitResultAdapter, Todo,
+    makeTodo)
+from twisted.trial._asynctest import TestCase
+
+from twisted.python.compat import _PY3
+
+if not _PY3:
+    from twisted.trial._asyncrunner import (
+        TestSuite, TestDecorator, suiteVisit,
+        decorate)
+    from twisted.trial._asyncrunner import (
+        _ForceGarbageCollectionDecorator, _iterateTests, _clearSuite)
+
+# Further obscure the origins of these objects, to reduce surprise (and this is
+# what the values were before code got shuffled around between files, but was
+# otherwise unchanged).
+FailTest.__module__ = SkipTest.__module__ = __name__
+
+
+# Grab some implementation details so tests can continue to import them from
+# here, rather than being concerned with which implementation module they come
+# from (is this a good idea?)
+from twisted.trial._synctest import (
+    _LogObserver, _logObserver, _collectWarnings, _setWarningRegistryToNone)
+
+
+__all__ = [
+    'FailTest', 'SkipTest', 'SynchronousTestCase', 'Todo', 'makeTodo',
+
+    'TestCase', 'TestSuite', 'suiteVisit', 'decorate']
diff --git a/ThirdParty/Twisted/twisted/trial/util.py b/ThirdParty/Twisted/twisted/trial/util.py
new file mode 100644
index 0000000..c642ee5
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/trial/util.py
@@ -0,0 +1,458 @@
+# -*- test-case-name: twisted.trial.test.test_util -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+#
+
+"""
+A collection of utility functions and classes, used internally by Trial.
+
+This code is for Trial's internal use.  Do NOT use this code if you are writing
+tests.  It is subject to change at the Trial maintainer's whim.  There is
+nothing here in this module for you to use unless you are maintaining Trial.
+
+Any non-Trial Twisted code that uses this module will be shot.
+
+Maintainer: Jonathan Lange
+
+ at var DEFAULT_TIMEOUT_DURATION: The default timeout which will be applied to
+    asynchronous (ie, Deferred-returning) test methods, in seconds.
+"""
+
+from __future__ import division, absolute_import, print_function
+
+import traceback, sys
+from random import randrange
+
+from twisted.python.compat import _PY3, reraise
+from twisted.internet import defer, _utilspy3 as utils, interfaces
+from twisted.python.failure import Failure
+from twisted.python import deprecate, versions
+from twisted.python.filepath import FilePath
+
+__all__ = [
+    'DEFAULT_TIMEOUT_DURATION',
+
+    'excInfoOrFailureToExcInfo', 'suppress', 'acquireAttribute']
+
+DEFAULT_TIMEOUT = object()
+DEFAULT_TIMEOUT_DURATION = 120.0
+
+
+
+class DirtyReactorAggregateError(Exception):
+    """
+    Passed to L{twisted.trial.itrial.IReporter.addError} when the reactor is
+    left in an unclean state after a test.
+
+    @ivar delayedCalls: The L{DelayedCall<twisted.internet.base.DelayedCall>}
+        objects which weren't cleaned up.
+    @ivar selectables: The selectables which weren't cleaned up.
+    """
+
+    def __init__(self, delayedCalls, selectables=None):
+        self.delayedCalls = delayedCalls
+        self.selectables = selectables
+
+    def __str__(self):
+        """
+        Return a multi-line message describing all of the unclean state.
+        """
+        msg = "Reactor was unclean."
+        if self.delayedCalls:
+            msg += ("\nDelayedCalls: (set "
+                    "twisted.internet.base.DelayedCall.debug = True to "
+                    "debug)\n")
+            msg += "\n".join(map(str, self.delayedCalls))
+        if self.selectables:
+            msg += "\nSelectables:\n"
+            msg += "\n".join(map(str, self.selectables))
+        return msg
+
+
+
+class _Janitor(object):
+    """
+    The guy that cleans up after you.
+
+    @ivar test: The L{TestCase} to report errors about.
+    @ivar result: The L{IReporter} to report errors to.
+    @ivar reactor: The reactor to use. If None, the global reactor
+        will be used.
+    """
+    def __init__(self, test, result, reactor=None):
+        """
+        @param test: See L{_Janitor.test}.
+        @param result: See L{_Janitor.result}.
+        @param reactor: See L{_Janitor.reactor}.
+        """
+        self.test = test
+        self.result = result
+        self.reactor = reactor
+
+
+    def postCaseCleanup(self):
+        """
+        Called by L{unittest.TestCase} after a test to catch any logged errors
+        or pending L{DelayedCall<twisted.internet.base.DelayedCall>}s.
+        """
+        calls = self._cleanPending()
+        if calls:
+            aggregate = DirtyReactorAggregateError(calls)
+            self.result.addError(self.test, Failure(aggregate))
+            return False
+        return True
+
+
+    def postClassCleanup(self):
+        """
+        Called by L{unittest.TestCase} after the last test in a C{TestCase}
+        subclass. Ensures the reactor is clean by murdering the threadpool,
+        catching any pending
+        L{DelayedCall<twisted.internet.base.DelayedCall>}s, open sockets etc.
+        """
+        selectables = self._cleanReactor()
+        calls = self._cleanPending()
+        if selectables or calls:
+            aggregate = DirtyReactorAggregateError(calls, selectables)
+            self.result.addError(self.test, Failure(aggregate))
+        self._cleanThreads()
+
+
+    def _getReactor(self):
+        """
+        Get either the passed-in reactor or the global reactor.
+        """
+        if self.reactor is not None:
+            reactor = self.reactor
+        else:
+            from twisted.internet import reactor
+        return reactor
+
+
+    def _cleanPending(self):
+        """
+        Cancel all pending calls and return their string representations.
+        """
+        reactor = self._getReactor()
+
+        # flush short-range timers
+        reactor.iterate(0)
+        reactor.iterate(0)
+
+        delayedCallStrings = []
+        for p in reactor.getDelayedCalls():
+            if p.active():
+                delayedString = str(p)
+                p.cancel()
+            else:
+                print("WEIRDNESS! pending timed call not active!")
+            delayedCallStrings.append(delayedString)
+        return delayedCallStrings
+    _cleanPending = utils.suppressWarnings(
+        _cleanPending, (('ignore',), {'category': DeprecationWarning,
+                                      'message':
+                                      r'reactor\.iterate cannot be used.*'}))
+
+    def _cleanThreads(self):
+        reactor = self._getReactor()
+        if interfaces.IReactorThreads.providedBy(reactor):
+            if reactor.threadpool is not None:
+                # Stop the threadpool now so that a new one is created. 
+                # This improves test isolation somewhat (although this is a
+                # post class cleanup hook, so it's only isolating classes
+                # from each other, not methods from each other).
+                reactor._stopThreadPool()
+
+    def _cleanReactor(self):
+        """
+        Remove all selectables from the reactor, kill any of them that were
+        processes, and return their string representation.
+        """
+        reactor = self._getReactor()
+        selectableStrings = []
+        for sel in reactor.removeAll():
+            if interfaces.IProcessTransport.providedBy(sel):
+                sel.signalProcess('KILL')
+            selectableStrings.append(repr(sel))
+        return selectableStrings
+
+
+
+_DEFAULT = object()
+def acquireAttribute(objects, attr, default=_DEFAULT):
+    """
+    Go through the list 'objects' sequentially until we find one which has
+    attribute 'attr', then return the value of that attribute.  If not found,
+    return 'default' if set, otherwise, raise AttributeError.
+    """
+    for obj in objects:
+        if hasattr(obj, attr):
+            return getattr(obj, attr)
+    if default is not _DEFAULT:
+        return default
+    raise AttributeError('attribute %r not found in %r' % (attr, objects))
+
+
+
+def excInfoOrFailureToExcInfo(err):
+    """
+    Coerce a Failure to an _exc_info, if err is a Failure.
+
+    @param err: Either a tuple such as returned by L{sys.exc_info} or a
+        L{Failure} object.
+    @return: A tuple like the one returned by L{sys.exc_info}. e.g.
+        C{exception_type, exception_object, traceback_object}.
+    """
+    if isinstance(err, Failure):
+        # Unwrap the Failure into a exc_info tuple.
+        err = (err.type, err.value, err.getTracebackObject())
+    return err
+
+
+
+def suppress(action='ignore', **kwarg):
+    """
+    Sets up the .suppress tuple properly, pass options to this method as you
+    would the stdlib warnings.filterwarnings()
+
+    So, to use this with a .suppress magic attribute you would do the
+    following:
+
+      >>> from twisted.trial import unittest, util
+      >>> import warnings
+      >>>
+      >>> class TestFoo(unittest.TestCase):
+      ...     def testFooBar(self):
+      ...         warnings.warn("i am deprecated", DeprecationWarning)
+      ...     testFooBar.suppress = [util.suppress(message='i am deprecated')]
+      ...
+      >>>
+
+    Note that as with the todo and timeout attributes: the module level
+    attribute acts as a default for the class attribute which acts as a default
+    for the method attribute. The suppress attribute can be overridden at any
+    level by specifying C{.suppress = []}
+    """
+    return ((action,), kwarg)
+
+
+
+# This should be deleted, and replaced with twisted.application's code; see
+# #6016:
+def profiled(f, outputFile):
+    def _(*args, **kwargs):
+        if sys.version_info[0:2] != (2, 4):
+            import profile
+            prof = profile.Profile()
+            try:
+                result = prof.runcall(f, *args, **kwargs)
+                prof.dump_stats(outputFile)
+            except SystemExit:
+                pass
+            prof.print_stats()
+            return result
+        else: # use hotshot, profile is broken in 2.4
+            import hotshot.stats
+            prof = hotshot.Profile(outputFile)
+            try:
+                return prof.runcall(f, *args, **kwargs)
+            finally:
+                stats = hotshot.stats.load(outputFile)
+                stats.strip_dirs()
+                stats.sort_stats('cum')   # 'time'
+                stats.print_stats(100)
+    return _
+
+
+def getPythonContainers(meth):
+    """Walk up the Python tree from method 'meth', finding its class, its module
+    and all containing packages."""
+    containers = []
+    containers.append(meth.im_class)
+    moduleName = meth.im_class.__module__
+    while moduleName is not None:
+        module = sys.modules.get(moduleName, None)
+        if module is None:
+            module = __import__(moduleName)
+        containers.append(module)
+        moduleName = getattr(module, '__module__', None)
+    return containers
+
+deprecate.deprecatedModuleAttribute(
+    versions.Version("Twisted", 12, 3, 0),
+    "This function never worked correctly.  Implement lookup on your own.",
+    __name__, "getPythonContainers")
+
+
+deprecate.deprecatedModuleAttribute(
+    versions.Version("Twisted", 10, 1, 0),
+    "Please use twisted.python.reflect.namedAny instead.",
+    __name__, "findObject")
+
+
+
+def findObject(name):
+    """Get a fully-named package, module, module-global object or attribute.
+    Forked from twisted.python.reflect.namedAny.
+
+    Returns a tuple of (bool, obj).  If bool is True, the named object exists
+    and is returned as obj.  If bool is False, the named object does not exist
+    and the value of obj is unspecified.
+    """
+    names = name.split('.')
+    topLevelPackage = None
+    moduleNames = names[:]
+    while not topLevelPackage:
+        trialname = '.'.join(moduleNames)
+        if len(trialname) == 0:
+            return (False, None)
+        try:
+            topLevelPackage = __import__(trialname)
+        except ImportError:
+            # if the ImportError happened in the module being imported,
+            # this is a failure that should be handed to our caller.
+            # count stack frames to tell the difference.
+            exc_info = sys.exc_info()
+            if len(traceback.extract_tb(exc_info[2])) > 1:
+                try:
+                    # Clean up garbage left in sys.modules.
+                    del sys.modules[trialname]
+                except KeyError:
+                    # Python 2.4 has fixed this.  Yay!
+                    pass
+                reraise(exc_info[1], exc_info[2])
+            moduleNames.pop()
+    obj = topLevelPackage
+    for n in names[1:]:
+        try:
+            obj = getattr(obj, n)
+        except AttributeError:
+            return (False, obj)
+    return (True, obj)
+
+
+
+def _runSequentially(callables, stopOnFirstError=False):
+    """
+    Run the given callables one after the other. If a callable returns a
+    Deferred, wait until it has finished before running the next callable.
+
+    @param callables: An iterable of callables that take no parameters.
+
+    @param stopOnFirstError: If True, then stop running callables as soon as
+        one raises an exception or fires an errback. False by default.
+
+    @return: A L{Deferred} that fires a list of C{(flag, value)} tuples. Each
+        tuple will be either C{(SUCCESS, <return value>)} or C{(FAILURE,
+        <Failure>)}.
+    """
+    results = []
+    for f in callables:
+        d = defer.maybeDeferred(f)
+        thing = defer.waitForDeferred(d)
+        yield thing
+        try:
+            results.append((defer.SUCCESS, thing.getResult()))
+        except:
+            results.append((defer.FAILURE, Failure()))
+            if stopOnFirstError:
+                break
+    yield results
+_runSequentially = defer.deferredGenerator(_runSequentially)
+
+
+
+class _NoTrialMarker(Exception):
+    """
+    No trial marker file could be found.
+
+    Raised when trial attempts to remove a trial temporary working directory
+    that does not contain a marker file.
+    """
+
+
+
+def _removeSafely(path):
+    """
+    Safely remove a path, recursively.
+
+    If C{path} does not contain a node named C{_trial_marker}, a
+    L{_NoTrialMarker} exception is raised and the path is not removed.
+    """
+    if not path.child(b'_trial_marker').exists():
+        raise _NoTrialMarker(
+            '%r is not a trial temporary path, refusing to remove it'
+            % (path,))
+    try:
+        path.remove()
+    except OSError as e:
+        print ("could not remove %r, caught OSError [Errno %s]: %s"
+               % (path, e.errno, e.strerror))
+        try:
+            newPath = FilePath(b'_trial_temp_old' +
+                               str(randrange(10000000)).encode("utf-8"))
+            path.moveTo(newPath)
+        except OSError as e:
+            print ("could not rename path, caught OSError [Errno %s]: %s"
+                   % (e.errno,e.strerror))
+            raise
+
+
+
+class _WorkingDirectoryBusy(Exception):
+    """
+    A working directory was specified to the runner, but another test run is
+    currently using that directory.
+    """
+
+
+
+def _unusedTestDirectory(base):
+    """
+    Find an unused directory named similarly to C{base}.
+
+    Once a directory is found, it will be locked and a marker dropped into it to
+    identify it as a trial temporary directory.
+
+    @param base: A template path for the discovery process.  If this path
+        exactly cannot be used, a path which varies only in a suffix of the
+        basename will be used instead.
+    @type base: L{FilePath}
+
+    @return: A two-tuple.  The first element is a L{FilePath} representing the
+        directory which was found and created.  The second element is a locked
+        L{FilesystemLock<twisted.python.lockfile.FilesystemLock>}.  Another
+        call to C{_unusedTestDirectory} will not be able to reused the the
+        same name until the lock is released, either explicitly or by this
+        process exiting.
+    """
+    from twisted.python.lockfile import FilesystemLock
+    counter = 0
+    while True:
+        if counter:
+            testdir = base.sibling('%s-%d' % (base.basename(), counter))
+        else:
+            testdir = base
+
+        testDirLock = FilesystemLock(testdir.path + '.lock')
+        if testDirLock.lock():
+            # It is not in use
+            if testdir.exists():
+                # It exists though - delete it
+                _removeSafely(testdir)
+
+            # Create it anew and mark it as ours so the next _removeSafely on it
+            # succeeds.
+            testdir.makedirs()
+            testdir.child('_trial_marker').setContent('')
+            return testdir, testDirLock
+        else:
+            # It is in use
+            if base.basename() == '_trial_temp':
+                counter += 1
+            else:
+                raise _WorkingDirectoryBusy()
+
+# Remove this, and move lockfile import, after ticket #5960 is resolved:
+if _PY3:
+    del _unusedTestDirectory
diff --git a/ThirdParty/Twisted/twisted/web/__init__.py b/ThirdParty/Twisted/twisted/web/__init__.py
new file mode 100644
index 0000000..3de93da
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/__init__.py
@@ -0,0 +1,15 @@
+# -*- test-case-name: twisted.web.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Twisted Web: a L{web server<twisted.web.server>} (including an
+L{HTTP implementation<twisted.web.http>} and a
+L{resource model<twisted.web.resource>}) and
+a L{web client<twisted.web.client>}.
+"""
+
+from twisted.web._version import version
+
+__version__ = version.short()
+
diff --git a/ThirdParty/Twisted/twisted/web/_auth/__init__.py b/ThirdParty/Twisted/twisted/web/_auth/__init__.py
new file mode 100644
index 0000000..6a58870
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/_auth/__init__.py
@@ -0,0 +1,7 @@
+# -*- test-case-name: twisted.web.test.test_httpauth -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+HTTP header-based authentication migrated from web2
+"""
diff --git a/ThirdParty/Twisted/twisted/web/_auth/basic.py b/ThirdParty/Twisted/twisted/web/_auth/basic.py
new file mode 100644
index 0000000..8b588fb
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/_auth/basic.py
@@ -0,0 +1,59 @@
+# -*- test-case-name: twisted.web.test.test_httpauth -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+HTTP BASIC authentication.
+
+ at see: U{http://tools.ietf.org/html/rfc1945}
+ at see: U{http://tools.ietf.org/html/rfc2616}
+ at see: U{http://tools.ietf.org/html/rfc2617}
+"""
+
+import binascii
+
+from zope.interface import implements
+
+from twisted.cred import credentials, error
+from twisted.web.iweb import ICredentialFactory
+
+
+class BasicCredentialFactory(object):
+    """
+    Credential Factory for HTTP Basic Authentication
+
+    @type authenticationRealm: C{str}
+    @ivar authenticationRealm: The HTTP authentication realm which will be issued in
+        challenges.
+    """
+    implements(ICredentialFactory)
+
+    scheme = 'basic'
+
+    def __init__(self, authenticationRealm):
+        self.authenticationRealm = authenticationRealm
+
+
+    def getChallenge(self, request):
+        """
+        Return a challenge including the HTTP authentication realm with which
+        this factory was created.
+        """
+        return {'realm': self.authenticationRealm}
+
+
+    def decode(self, response, request):
+        """
+        Parse the base64-encoded, colon-separated username and password into a
+        L{credentials.UsernamePassword} instance.
+        """
+        try:
+            creds = binascii.a2b_base64(response + '===')
+        except binascii.Error:
+            raise error.LoginFailed('Invalid credentials')
+
+        creds = creds.split(':', 1)
+        if len(creds) == 2:
+            return credentials.UsernamePassword(*creds)
+        else:
+            raise error.LoginFailed('Invalid credentials')
diff --git a/ThirdParty/Twisted/twisted/web/_auth/digest.py b/ThirdParty/Twisted/twisted/web/_auth/digest.py
new file mode 100644
index 0000000..90ebf20
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/_auth/digest.py
@@ -0,0 +1,54 @@
+# -*- test-case-name: twisted.web.test.test_httpauth -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Implementation of RFC2617: HTTP Digest Authentication
+
+ at see: U{http://www.faqs.org/rfcs/rfc2617.html}
+"""
+
+from zope.interface import implements
+from twisted.cred import credentials
+from twisted.web.iweb import ICredentialFactory
+
+class DigestCredentialFactory(object):
+    """
+    Wrapper for L{digest.DigestCredentialFactory} that implements the
+    L{ICredentialFactory} interface.
+    """
+    implements(ICredentialFactory)
+
+    scheme = 'digest'
+
+    def __init__(self, algorithm, authenticationRealm):
+        """
+        Create the digest credential factory that this object wraps.
+        """
+        self.digest = credentials.DigestCredentialFactory(algorithm,
+                                                          authenticationRealm)
+
+
+    def getChallenge(self, request):
+        """
+        Generate the challenge for use in the WWW-Authenticate header
+
+        @param request: The L{IRequest} to with access was denied and for the
+            response to which this challenge is being generated.
+
+        @return: The C{dict} that can be used to generate a WWW-Authenticate
+            header.
+        """
+        return self.digest.getChallenge(request.getClientIP())
+
+
+    def decode(self, response, request):
+        """
+        Create a L{twisted.cred.digest.DigestedCredentials} object from the
+        given response and request.
+
+        @see: L{ICredentialFactory.decode}
+        """
+        return self.digest.decode(response,
+                                  request.method,
+                                  request.getClientIP())
diff --git a/ThirdParty/Twisted/twisted/web/_auth/wrapper.py b/ThirdParty/Twisted/twisted/web/_auth/wrapper.py
new file mode 100644
index 0000000..29f479e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/_auth/wrapper.py
@@ -0,0 +1,225 @@
+# -*- test-case-name: twisted.web.test.test_httpauth -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+A guard implementation which supports HTTP header-based authentication
+schemes.
+
+If no I{Authorization} header is supplied, an anonymous login will be
+attempted by using a L{Anonymous} credentials object.  If such a header is
+supplied and does not contain allowed credentials, or if anonymous login is
+denied, a 401 will be sent in the response along with I{WWW-Authenticate}
+headers for each of the allowed authentication schemes.
+"""
+
+from zope.interface import implements
+
+from twisted.python import log
+from twisted.python.components import proxyForInterface
+from twisted.web.resource import IResource, ErrorPage
+from twisted.web import util
+from twisted.cred import error
+from twisted.cred.credentials import Anonymous
+
+
+class UnauthorizedResource(object):
+    """
+    Simple IResource to escape Resource dispatch
+    """
+    implements(IResource)
+    isLeaf = True
+
+
+    def __init__(self, factories):
+        self._credentialFactories = factories
+
+
+    def render(self, request):
+        """
+        Send www-authenticate headers to the client
+        """
+        def generateWWWAuthenticate(scheme, challenge):
+            l = []
+            for k,v in challenge.iteritems():
+                l.append("%s=%s" % (k, quoteString(v)))
+            return "%s %s" % (scheme, ", ".join(l))
+
+        def quoteString(s):
+            return '"%s"' % (s.replace('\\', '\\\\').replace('"', '\\"'),)
+
+        request.setResponseCode(401)
+        for fact in self._credentialFactories:
+            challenge = fact.getChallenge(request)
+            request.responseHeaders.addRawHeader(
+                'www-authenticate',
+                generateWWWAuthenticate(fact.scheme, challenge))
+        if request.method == 'HEAD':
+            return ''
+        return 'Unauthorized'
+
+
+    def getChildWithDefault(self, path, request):
+        """
+        Disable resource dispatch
+        """
+        return self
+
+
+
+class HTTPAuthSessionWrapper(object):
+    """
+    Wrap a portal, enforcing supported header-based authentication schemes.
+
+    @ivar _portal: The L{Portal} which will be used to retrieve L{IResource}
+        avatars.
+
+    @ivar _credentialFactories: A list of L{ICredentialFactory} providers which
+        will be used to decode I{Authorization} headers into L{ICredentials}
+        providers.
+    """
+    implements(IResource)
+    isLeaf = False
+
+    def __init__(self, portal, credentialFactories):
+        """
+        Initialize a session wrapper
+
+        @type portal: C{Portal}
+        @param portal: The portal that will authenticate the remote client
+
+        @type credentialFactories: C{Iterable}
+        @param credentialFactories: The portal that will authenticate the
+            remote client based on one submitted C{ICredentialFactory}
+        """
+        self._portal = portal
+        self._credentialFactories = credentialFactories
+
+
+    def _authorizedResource(self, request):
+        """
+        Get the L{IResource} which the given request is authorized to receive.
+        If the proper authorization headers are present, the resource will be
+        requested from the portal.  If not, an anonymous login attempt will be
+        made.
+        """
+        authheader = request.getHeader('authorization')
+        if not authheader:
+            return util.DeferredResource(self._login(Anonymous()))
+
+        factory, respString = self._selectParseHeader(authheader)
+        if factory is None:
+            return UnauthorizedResource(self._credentialFactories)
+        try:
+            credentials = factory.decode(respString, request)
+        except error.LoginFailed:
+            return UnauthorizedResource(self._credentialFactories)
+        except:
+            log.err(None, "Unexpected failure from credentials factory")
+            return ErrorPage(500, None, None)
+        else:
+            return util.DeferredResource(self._login(credentials))
+
+
+    def render(self, request):
+        """
+        Find the L{IResource} avatar suitable for the given request, if
+        possible, and render it.  Otherwise, perhaps render an error page
+        requiring authorization or describing an internal server failure.
+        """
+        return self._authorizedResource(request).render(request)
+
+
+    def getChildWithDefault(self, path, request):
+        """
+        Inspect the Authorization HTTP header, and return a deferred which,
+        when fired after successful authentication, will return an authorized
+        C{Avatar}. On authentication failure, an C{UnauthorizedResource} will
+        be returned, essentially halting further dispatch on the wrapped
+        resource and all children
+        """
+        # Don't consume any segments of the request - this class should be
+        # transparent!
+        request.postpath.insert(0, request.prepath.pop())
+        return self._authorizedResource(request)
+
+
+    def _login(self, credentials):
+        """
+        Get the L{IResource} avatar for the given credentials.
+
+        @return: A L{Deferred} which will be called back with an L{IResource}
+            avatar or which will errback if authentication fails.
+        """
+        d = self._portal.login(credentials, None, IResource)
+        d.addCallbacks(self._loginSucceeded, self._loginFailed)
+        return d
+
+
+    def _loginSucceeded(self, (interface, avatar, logout)):
+        """
+        Handle login success by wrapping the resulting L{IResource} avatar
+        so that the C{logout} callback will be invoked when rendering is
+        complete.
+        """
+        class ResourceWrapper(proxyForInterface(IResource, 'resource')):
+            """
+            Wrap an L{IResource} so that whenever it or a child of it
+            completes rendering, the cred logout hook will be invoked.
+
+            An assumption is made here that exactly one L{IResource} from
+            among C{avatar} and all of its children will be rendered.  If
+            more than one is rendered, C{logout} will be invoked multiple
+            times and probably earlier than desired.
+            """
+            def getChildWithDefault(self, name, request):
+                """
+                Pass through the lookup to the wrapped resource, wrapping
+                the result in L{ResourceWrapper} to ensure C{logout} is
+                called when rendering of the child is complete.
+                """
+                return ResourceWrapper(self.resource.getChildWithDefault(name, request))
+
+            def render(self, request):
+                """
+                Hook into response generation so that when rendering has
+                finished completely (with or without error), C{logout} is
+                called.
+                """
+                request.notifyFinish().addBoth(lambda ign: logout())
+                return super(ResourceWrapper, self).render(request)
+
+        return ResourceWrapper(avatar)
+
+
+    def _loginFailed(self, result):
+        """
+        Handle login failure by presenting either another challenge (for
+        expected authentication/authorization-related failures) or a server
+        error page (for anything else).
+        """
+        if result.check(error.Unauthorized, error.LoginFailed):
+            return UnauthorizedResource(self._credentialFactories)
+        else:
+            log.err(
+                result,
+                "HTTPAuthSessionWrapper.getChildWithDefault encountered "
+                "unexpected error")
+            return ErrorPage(500, None, None)
+
+
+    def _selectParseHeader(self, header):
+        """
+        Choose an C{ICredentialFactory} from C{_credentialFactories}
+        suitable to use to decode the given I{Authenticate} header.
+
+        @return: A two-tuple of a factory and the remaining portion of the
+            header value to be decoded or a two-tuple of C{None} if no
+            factory can decode the header value.
+        """
+        elements = header.split(' ')
+        scheme = elements[0].lower()
+        for fact in self._credentialFactories:
+            if fact.scheme == scheme:
+                return (fact, ' '.join(elements[1:]))
+        return (None, None)
diff --git a/ThirdParty/Twisted/twisted/web/_element.py b/ThirdParty/Twisted/twisted/web/_element.py
new file mode 100644
index 0000000..3c15b3b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/_element.py
@@ -0,0 +1,185 @@
+# -*- test-case-name: twisted.web.test.test_template -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from zope.interface import implements
+
+from twisted.web.iweb import IRenderable
+
+from twisted.web.error import MissingRenderMethod, UnexposedMethodError
+from twisted.web.error import MissingTemplateLoader
+
+
+class Expose(object):
+    """
+    Helper for exposing methods for various uses using a simple decorator-style
+    callable.
+
+    Instances of this class can be called with one or more functions as
+    positional arguments.  The names of these functions will be added to a list
+    on the class object of which they are methods.
+
+    @ivar attributeName: The attribute with which exposed methods will be
+    tracked.
+    """
+    def __init__(self, doc=None):
+        self.doc = doc
+
+
+    def __call__(self, *funcObjs):
+        """
+        Add one or more functions to the set of exposed functions.
+
+        This is a way to declare something about a class definition, similar to
+        L{zope.interface.implements}.  Use it like this::
+
+            magic = Expose('perform extra magic')
+            class Foo(Bar):
+                def twiddle(self, x, y):
+                    ...
+                def frob(self, a, b):
+                    ...
+                magic(twiddle, frob)
+
+        Later you can query the object::
+
+            aFoo = Foo()
+            magic.get(aFoo, 'twiddle')(x=1, y=2)
+
+        The call to C{get} will fail if the name it is given has not been
+        exposed using C{magic}.
+
+        @param funcObjs: One or more function objects which will be exposed to
+        the client.
+
+        @return: The first of C{funcObjs}.
+        """
+        if not funcObjs:
+            raise TypeError("expose() takes at least 1 argument (0 given)")
+        for fObj in funcObjs:
+            fObj.exposedThrough = getattr(fObj, 'exposedThrough', [])
+            fObj.exposedThrough.append(self)
+        return funcObjs[0]
+
+
+    _nodefault = object()
+    def get(self, instance, methodName, default=_nodefault):
+        """
+        Retrieve an exposed method with the given name from the given instance.
+
+        @raise UnexposedMethodError: Raised if C{default} is not specified and
+        there is no exposed method with the given name.
+
+        @return: A callable object for the named method assigned to the given
+        instance.
+        """
+        method = getattr(instance, methodName, None)
+        exposedThrough = getattr(method, 'exposedThrough', [])
+        if self not in exposedThrough:
+            if default is self._nodefault:
+                raise UnexposedMethodError(self, methodName)
+            return default
+        return method
+
+
+    @classmethod
+    def _withDocumentation(cls, thunk):
+        """
+        Slight hack to make users of this class appear to have a docstring to
+        documentation generators, by defining them with a decorator.  (This hack
+        should be removed when epydoc can be convinced to use some other method
+        for documenting.)
+        """
+        return cls(thunk.__doc__)
+
+
+# Avoid exposing the ugly, private classmethod name in the docs.  Luckily this
+# namespace is private already so this doesn't leak further.
+exposer = Expose._withDocumentation
+
+ at exposer
+def renderer():
+    """
+    Decorate with L{renderer} to use methods as template render directives.
+
+    For example::
+
+        class Foo(Element):
+            @renderer
+            def twiddle(self, request, tag):
+                return tag('Hello, world.')
+
+        <div xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
+            <span t:render="twiddle" />
+        </div>
+
+    Will result in this final output::
+
+        <div>
+            <span>Hello, world.</span>
+        </div>
+    """
+
+
+
+class Element(object):
+    """
+    Base for classes which can render part of a page.
+
+    An Element is a renderer that can be embedded in a stan document and can
+    hook its template (from the loader) up to render methods.
+
+    An Element might be used to encapsulate the rendering of a complex piece of
+    data which is to be displayed in multiple different contexts.  The Element
+    allows the rendering logic to be easily re-used in different ways.
+
+    Element returns render methods which are registered using
+    L{twisted.web.element.renderer}.  For example::
+
+        class Menu(Element):
+            @renderer
+            def items(self, request, tag):
+                ....
+
+    Render methods are invoked with two arguments: first, the
+    L{twisted.web.http.Request} being served and second, the tag object which
+    "invoked" the render method.
+
+    @type loader: L{ITemplateLoader} provider
+    @ivar loader: The factory which will be used to load documents to
+        return from C{render}.
+    """
+    implements(IRenderable)
+    loader = None
+
+    def __init__(self, loader=None):
+        if loader is not None:
+            self.loader = loader
+
+
+    def lookupRenderMethod(self, name):
+        """
+        Look up and return the named render method.
+        """
+        method = renderer.get(self, name, None)
+        if method is None:
+            raise MissingRenderMethod(self, name)
+        return method
+
+
+    def render(self, request):
+        """
+        Implement L{IRenderable} to allow one L{Element} to be embedded in
+        another's template or rendering output.
+
+        (This will simply load the template from the C{loader}; when used in a
+        template, the flattening engine will keep track of this object
+        separately as the object to lookup renderers on and call
+        L{Element.renderer} to look them up.  The resulting object from this
+        method is not directly associated with this L{Element}.)
+        """
+        loader = self.loader
+        if loader is None:
+            raise MissingTemplateLoader(self)
+        return loader.load()
+
diff --git a/ThirdParty/Twisted/twisted/web/_flatten.py b/ThirdParty/Twisted/twisted/web/_flatten.py
new file mode 100644
index 0000000..bfdc776
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/_flatten.py
@@ -0,0 +1,314 @@
+# -*- test-case-name: twisted.web.test.test_flatten -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Context-free flattener/serializer for rendering Python objects, possibly
+complex or arbitrarily nested, as strings.
+
+"""
+
+from cStringIO import StringIO
+from sys import exc_info
+from types import GeneratorType
+from traceback import extract_tb
+from twisted.internet.defer import Deferred
+from twisted.web.error import UnfilledSlot, UnsupportedType, FlattenerError
+
+from twisted.web.iweb import IRenderable
+from twisted.web._stan import (
+    Tag, slot, voidElements, Comment, CDATA, CharRef)
+
+
+
+def escapedData(data, inAttribute):
+    """
+    Escape a string for inclusion in a document.
+
+    @type data: C{str} or C{unicode}
+    @param data: The string to escape.
+
+    @type inAttribute: C{bool}
+    @param inAttribute: A flag which, if set, indicates that the string should
+        be quoted for use as the value of an XML tag value.
+
+    @rtype: C{str}
+    @return: The quoted form of C{data}. If C{data} is unicode, return a utf-8
+        encoded string.
+    """
+    if isinstance(data, unicode):
+        data = data.encode('utf-8')
+    data = data.replace('&', '&'
+        ).replace('<', '<'
+        ).replace('>', '>')
+    if inAttribute:
+        data = data.replace('"', '"')
+    return data
+
+
+def escapedCDATA(data):
+    """
+    Escape CDATA for inclusion in a document.
+
+    @type data: C{str} or C{unicode}
+    @param data: The string to escape.
+
+    @rtype: C{str}
+    @return: The quoted form of C{data}. If C{data} is unicode, return a utf-8
+        encoded string.
+    """
+    if isinstance(data, unicode):
+        data = data.encode('utf-8')
+    return data.replace(']]>', ']]]]><![CDATA[>')
+
+
+def escapedComment(data):
+    """
+    Escape a comment for inclusion in a document.
+
+    @type data: C{str} or C{unicode}
+    @param data: The string to escape.
+
+    @rtype: C{str}
+    @return: The quoted form of C{data}. If C{data} is unicode, return a utf-8
+        encoded string.
+    """
+    if isinstance(data, unicode):
+        data = data.encode('utf-8')
+    data = data.replace('--', '- - ').replace('>', '>')
+    if data and data[-1] == '-':
+        data += ' '
+    return data
+
+
+def _getSlotValue(name, slotData, default=None):
+    """
+    Find the value of the named slot in the given stack of slot data.
+    """
+    for slotFrame in slotData[::-1]:
+        if slotFrame is not None and name in slotFrame:
+            return slotFrame[name]
+    else:
+        if default is not None:
+            return default
+        raise UnfilledSlot(name)
+
+
+def _flattenElement(request, root, slotData, renderFactory, inAttribute):
+    """
+    Make C{root} slightly more flat by yielding all its immediate contents 
+    as strings, deferreds or generators that are recursive calls to itself.
+
+    @param request: A request object which will be passed to
+        L{IRenderable.render}.
+
+    @param root: An object to be made flatter.  This may be of type C{unicode},
+        C{str}, L{slot}, L{Tag}, L{URL}, L{tuple}, L{list}, L{GeneratorType},
+        L{Deferred}, or an object that implements L{IRenderable}.
+
+    @param slotData: A C{list} of C{dict} mapping C{str} slot names to data
+        with which those slots will be replaced.
+
+    @param renderFactory: If not C{None}, An object that provides L{IRenderable}.
+
+    @param inAttribute: A flag which, if set, indicates that C{str} and
+        C{unicode} instances encountered must be quoted as for XML tag
+        attribute values.
+
+    @return: An iterator which yields C{str}, L{Deferred}, and more iterators
+        of the same type.
+    """
+
+    if isinstance(root, (str, unicode)):
+        yield escapedData(root, inAttribute)
+    elif isinstance(root, slot):
+        slotValue = _getSlotValue(root.name, slotData, root.default)
+        yield _flattenElement(request, slotValue, slotData, renderFactory,
+                inAttribute)
+    elif isinstance(root, CDATA):
+        yield '<![CDATA['
+        yield escapedCDATA(root.data)
+        yield ']]>'
+    elif isinstance(root, Comment):
+        yield '<!--'
+        yield escapedComment(root.data)
+        yield '-->'
+    elif isinstance(root, Tag):
+        slotData.append(root.slotData)
+        if root.render is not None:
+            rendererName = root.render
+            rootClone = root.clone(False)
+            rootClone.render = None
+            renderMethod = renderFactory.lookupRenderMethod(rendererName)
+            result = renderMethod(request, rootClone)
+            yield _flattenElement(request, result, slotData, renderFactory,
+                    False)
+            slotData.pop()
+            return
+
+        if not root.tagName:
+            yield _flattenElement(request, root.children, slotData, renderFactory, False)
+            return
+
+        yield '<'
+        if isinstance(root.tagName, unicode):
+            tagName = root.tagName.encode('ascii')
+        else:
+            tagName = str(root.tagName)
+        yield tagName
+        for k, v in root.attributes.iteritems():
+            if isinstance(k, unicode):
+                k = k.encode('ascii')
+            yield ' ' + k + '="'
+            yield _flattenElement(request, v, slotData, renderFactory, True)
+            yield '"'
+        if root.children or tagName not in voidElements:
+            yield '>'
+            yield _flattenElement(request, root.children, slotData, renderFactory, False)
+            yield '</' + tagName + '>'
+        else:
+            yield ' />'
+
+    elif isinstance(root, (tuple, list, GeneratorType)):
+        for element in root:
+            yield _flattenElement(request, element, slotData, renderFactory,
+                    inAttribute)
+    elif isinstance(root, CharRef):
+        yield '&#%d;' % (root.ordinal,)
+    elif isinstance(root, Deferred):
+        yield root.addCallback(
+            lambda result: (result, _flattenElement(request, result, slotData,
+                                             renderFactory, inAttribute)))
+    elif IRenderable.providedBy(root):
+        result = root.render(request)
+        yield _flattenElement(request, result, slotData, root, inAttribute)
+    else:
+        raise UnsupportedType(root)
+
+
+def _flattenTree(request, root):
+    """
+    Make C{root} into an iterable of C{str} and L{Deferred} by doing a
+    depth first traversal of the tree.
+
+    @param request: A request object which will be passed to
+        L{IRenderable.render}.
+
+    @param root: An object to be made flatter.  This may be of type C{unicode},
+        C{str}, L{slot}, L{Tag}, L{tuple}, L{list}, L{GeneratorType},
+        L{Deferred}, or something providing L{IRenderable}.
+
+    @return: An iterator which yields objects of type C{str} and L{Deferred}.
+        A L{Deferred} is only yielded when one is encountered in the process of
+        flattening C{root}.  The returned iterator must not be iterated again
+        until the L{Deferred} is called back.
+    """
+    stack = [_flattenElement(request, root, [], None, False)]
+    while stack:
+        try:
+            # In Python 2.5, after an exception, a generator's gi_frame is
+            # None.
+            frame = stack[-1].gi_frame
+            element = stack[-1].next()
+        except StopIteration:
+            stack.pop()
+        except Exception, e:
+            stack.pop()
+            roots = []
+            for generator in stack:
+                roots.append(generator.gi_frame.f_locals['root'])
+            roots.append(frame.f_locals['root'])
+            raise FlattenerError(e, roots, extract_tb(exc_info()[2]))
+        else:
+            if type(element) is str:
+                yield element
+            elif isinstance(element, Deferred):
+                def cbx((original, toFlatten)):
+                    stack.append(toFlatten)
+                    return original
+                yield element.addCallback(cbx)
+            else:
+                stack.append(element)
+
+
+def _writeFlattenedData(state, write, result):
+    """
+    Take strings from an iterator and pass them to a writer function.
+
+    @param state: An iterator of C{str} and L{Deferred}.  C{str} instances will
+        be passed to C{write}.  L{Deferred} instances will be waited on before
+        resuming iteration of C{state}.
+
+    @param write: A callable which will be invoked with each C{str}
+        produced by iterating C{state}.
+
+    @param result: A L{Deferred} which will be called back when C{state} has
+        been completely flattened into C{write} or which will be errbacked if
+        an exception in a generator passed to C{state} or an errback from a
+        L{Deferred} from state occurs.
+
+    @return: C{None}
+    """
+    while True:
+        try:
+            element = state.next()
+        except StopIteration:
+            result.callback(None)
+        except:
+            result.errback()
+        else:
+            if type(element) is str:
+                write(element)
+                continue
+            else:
+                def cby(original):
+                    _writeFlattenedData(state, write, result)
+                    return original
+                element.addCallbacks(cby, result.errback)
+        break
+
+
+def flatten(request, root, write):
+    """
+    Incrementally write out a string representation of C{root} using C{write}.
+
+    In order to create a string representation, C{root} will be decomposed into
+    simpler objects which will themselves be decomposed and so on until strings
+    or objects which can easily be converted to strings are encountered.
+
+    @param request: A request object which will be passed to the C{render}
+        method of any L{IRenderable} provider which is encountered.
+
+    @param root: An object to be made flatter.  This may be of type C{unicode},
+        C{str}, L{slot}, L{Tag}, L{tuple}, L{list}, L{GeneratorType},
+        L{Deferred}, or something that provides L{IRenderable}.
+
+    @param write: A callable which will be invoked with each C{str}
+        produced by flattening C{root}.
+
+    @return: A L{Deferred} which will be called back when C{root} has
+        been completely flattened into C{write} or which will be errbacked if
+        an unexpected exception occurs.
+    """
+    result = Deferred()
+    state = _flattenTree(request, root)
+    _writeFlattenedData(state, write, result)
+    return result
+
+
+def flattenString(request, root):
+    """
+    Collate a string representation of C{root} into a single string.
+
+    This is basically gluing L{flatten} to a C{StringIO} and returning the
+    results. See L{flatten} for the exact meanings of C{request} and
+    C{root}.
+
+    @return: A L{Deferred} which will be called back with a single string as
+        its result when C{root} has been completely flattened into C{write} or
+        which will be errbacked if an unexpected exception occurs.
+    """
+    io = StringIO()
+    d = flatten(request, root, io.write)
+    d.addCallback(lambda _: io.getvalue())
+    return d
diff --git a/ThirdParty/Twisted/twisted/web/_newclient.py b/ThirdParty/Twisted/twisted/web/_newclient.py
new file mode 100644
index 0000000..431e029
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/_newclient.py
@@ -0,0 +1,1502 @@
+# -*- test-case-name: twisted.web.test.test_newclient -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+An U{HTTP 1.1<http://www.w3.org/Protocols/rfc2616/rfc2616.html>} client.
+
+The way to use the functionality provided by this module is to:
+
+  - Connect a L{HTTP11ClientProtocol} to an HTTP server
+  - Create a L{Request} with the appropriate data
+  - Pass the request to L{HTTP11ClientProtocol.request}
+  - The returned Deferred will fire with a L{Response} object
+  - Create a L{IProtocol} provider which can handle the response body
+  - Connect it to the response with L{Response.deliverBody}
+  - When the protocol's C{connectionLost} method is called, the response is
+    complete.  See L{Response.deliverBody} for details.
+
+Various other classes in this module support this usage:
+
+  - HTTPParser is the basic HTTP parser.  It can handle the parts of HTTP which
+    are symmetric between requests and responses.
+
+  - HTTPClientParser extends HTTPParser to handle response-specific parts of
+    HTTP.  One instance is created for each request to parse the corresponding
+    response.
+"""
+
+__metaclass__ = type
+
+from zope.interface import implements
+
+from twisted.python import log
+from twisted.python.reflect import fullyQualifiedName
+from twisted.python.failure import Failure
+from twisted.python.compat import set
+from twisted.internet.interfaces import IConsumer, IPushProducer
+from twisted.internet.error import ConnectionDone
+from twisted.internet.defer import Deferred, succeed, fail, maybeDeferred
+from twisted.internet.protocol import Protocol
+from twisted.protocols.basic import LineReceiver
+from twisted.web.iweb import UNKNOWN_LENGTH, IResponse
+from twisted.web.http_headers import Headers
+from twisted.web.http import NO_CONTENT, NOT_MODIFIED
+from twisted.web.http import _DataLoss, PotentialDataLoss
+from twisted.web.http import _IdentityTransferDecoder, _ChunkedTransferDecoder
+
+# States HTTPParser can be in
+STATUS = 'STATUS'
+HEADER = 'HEADER'
+BODY = 'BODY'
+DONE = 'DONE'
+
+
+class BadHeaders(Exception):
+    """
+    Headers passed to L{Request} were in some way invalid.
+    """
+
+
+
+class ExcessWrite(Exception):
+    """
+    The body L{IBodyProducer} for a request tried to write data after
+    indicating it had finished writing data.
+    """
+
+
+class ParseError(Exception):
+    """
+    Some received data could not be parsed.
+
+    @ivar data: The string which could not be parsed.
+    """
+    def __init__(self, reason, data):
+        Exception.__init__(self, reason, data)
+        self.data = data
+
+
+
+class BadResponseVersion(ParseError):
+    """
+    The version string in a status line was unparsable.
+    """
+
+
+
+class _WrapperException(Exception):
+    """
+    L{_WrapperException} is the base exception type for exceptions which
+    include one or more other exceptions as the low-level causes.
+
+    @ivar reasons: A list of exceptions.  See subclass documentation for more
+        details.
+    """
+    def __init__(self, reasons):
+        Exception.__init__(self, reasons)
+        self.reasons = reasons
+
+
+
+class RequestGenerationFailed(_WrapperException):
+    """
+    There was an error while creating the bytes which make up a request.
+
+    @ivar reasons: A C{list} of one or more L{Failure} instances giving the
+        reasons the request generation was considered to have failed.
+    """
+
+
+
+class RequestTransmissionFailed(_WrapperException):
+    """
+    There was an error while sending the bytes which make up a request.
+
+    @ivar reasons: A C{list} of one or more L{Failure} instances giving the
+        reasons the request transmission was considered to have failed.
+    """
+
+
+
+class ConnectionAborted(Exception):
+    """
+    The connection was explicitly aborted by application code.
+    """
+
+
+
+class WrongBodyLength(Exception):
+    """
+    An L{IBodyProducer} declared the number of bytes it was going to
+    produce (via its C{length} attribute) and then produced a different number
+    of bytes.
+    """
+
+
+
+class ResponseDone(Exception):
+    """
+    L{ResponseDone} may be passed to L{IProtocol.connectionLost} on the
+    protocol passed to L{Response.deliverBody} and indicates that the entire
+    response has been delivered.
+    """
+
+
+
+class ResponseFailed(_WrapperException):
+    """
+    L{ResponseFailed} indicates that all of the response to a request was not
+    received for some reason.
+
+    @ivar reasons: A C{list} of one or more L{Failure} instances giving the
+        reasons the response was considered to have failed.
+
+    @ivar response: If specified, the L{Response} received from the server (and
+        in particular the status code and the headers).
+    """
+
+    def __init__(self, reasons, response=None):
+        _WrapperException.__init__(self, reasons)
+        self.response = response
+
+
+
+class ResponseNeverReceived(ResponseFailed):
+    """
+    A L{ResponseFailed} that knows no response bytes at all have been received.
+    """
+
+
+
+class RequestNotSent(Exception):
+    """
+    L{RequestNotSent} indicates that an attempt was made to issue a request but
+    for reasons unrelated to the details of the request itself, the request
+    could not be sent.  For example, this may indicate that an attempt was made
+    to send a request using a protocol which is no longer connected to a
+    server.
+    """
+
+
+
+def _callAppFunction(function):
+    """
+    Call C{function}.  If it raises an exception, log it with a minimal
+    description of the source.
+
+    @return: C{None}
+    """
+    try:
+        function()
+    except:
+        log.err(None, "Unexpected exception from %s" % (
+                fullyQualifiedName(function),))
+
+
+
+class HTTPParser(LineReceiver):
+    """
+    L{HTTPParser} handles the parsing side of HTTP processing. With a suitable
+    subclass, it can parse either the client side or the server side of the
+    connection.
+
+    @ivar headers: All of the non-connection control message headers yet
+        received.
+
+    @ivar state: State indicator for the response parsing state machine.  One
+        of C{STATUS}, C{HEADER}, C{BODY}, C{DONE}.
+
+    @ivar _partialHeader: C{None} or a C{list} of the lines of a multiline
+        header while that header is being received.
+    """
+
+    # NOTE: According to HTTP spec, we're supposed to eat the
+    # 'Proxy-Authenticate' and 'Proxy-Authorization' headers also, but that
+    # doesn't sound like a good idea to me, because it makes it impossible to
+    # have a non-authenticating transparent proxy in front of an authenticating
+    # proxy. An authenticating proxy can eat them itself. -jknight
+    #
+    # Further, quoting
+    # http://homepages.tesco.net/J.deBoynePollard/FGA/web-proxy-connection-header.html
+    # regarding the 'Proxy-Connection' header:
+    #
+    #    The Proxy-Connection: header is a mistake in how some web browsers
+    #    use HTTP. Its name is the result of a false analogy. It is not a
+    #    standard part of the protocol. There is a different standard
+    #    protocol mechanism for doing what it does. And its existence
+    #    imposes a requirement upon HTTP servers such that no proxy HTTP
+    #    server can be standards-conforming in practice.
+    #
+    # -exarkun
+
+    # Some servers (like http://news.ycombinator.com/) return status lines and
+    # HTTP headers delimited by \n instead of \r\n.
+    delimiter = '\n'
+
+    CONNECTION_CONTROL_HEADERS = set([
+            'content-length', 'connection', 'keep-alive', 'te', 'trailers',
+            'transfer-encoding', 'upgrade', 'proxy-connection'])
+
+    def connectionMade(self):
+        self.headers = Headers()
+        self.connHeaders = Headers()
+        self.state = STATUS
+        self._partialHeader = None
+
+
+    def switchToBodyMode(self, decoder):
+        """
+        Switch to body parsing mode - interpret any more bytes delivered as
+        part of the message body and deliver them to the given decoder.
+        """
+        if self.state == BODY:
+            raise RuntimeError("already in body mode")
+
+        self.bodyDecoder = decoder
+        self.state = BODY
+        self.setRawMode()
+
+
+    def lineReceived(self, line):
+        """
+        Handle one line from a response.
+        """
+        # Handle the normal CR LF case.
+        if line[-1:] == '\r':
+            line = line[:-1]
+
+        if self.state == STATUS:
+            self.statusReceived(line)
+            self.state = HEADER
+        elif self.state == HEADER:
+            if not line or line[0] not in ' \t':
+                if self._partialHeader is not None:
+                    header = ''.join(self._partialHeader)
+                    name, value = header.split(':', 1)
+                    value = value.strip()
+                    self.headerReceived(name, value)
+                if not line:
+                    # Empty line means the header section is over.
+                    self.allHeadersReceived()
+                else:
+                    # Line not beginning with LWS is another header.
+                    self._partialHeader = [line]
+            else:
+                # A line beginning with LWS is a continuation of a header
+                # begun on a previous line.
+                self._partialHeader.append(line)
+
+
+    def rawDataReceived(self, data):
+        """
+        Pass data from the message body to the body decoder object.
+        """
+        self.bodyDecoder.dataReceived(data)
+
+
+    def isConnectionControlHeader(self, name):
+        """
+        Return C{True} if the given lower-cased name is the name of a
+        connection control header (rather than an entity header).
+
+        According to RFC 2616, section 14.10, the tokens in the Connection
+        header are probably relevant here.  However, I am not sure what the
+        practical consequences of either implementing or ignoring that are.
+        So I leave it unimplemented for the time being.
+        """
+        return name in self.CONNECTION_CONTROL_HEADERS
+
+
+    def statusReceived(self, status):
+        """
+        Callback invoked whenever the first line of a new message is received.
+        Override this.
+
+        @param status: The first line of an HTTP request or response message
+            without trailing I{CR LF}.
+        @type status: C{str}
+        """
+
+
+    def headerReceived(self, name, value):
+        """
+        Store the given header in C{self.headers}.
+        """
+        name = name.lower()
+        if self.isConnectionControlHeader(name):
+            headers = self.connHeaders
+        else:
+            headers = self.headers
+        headers.addRawHeader(name, value)
+
+
+    def allHeadersReceived(self):
+        """
+        Callback invoked after the last header is passed to C{headerReceived}.
+        Override this to change to the C{BODY} or C{DONE} state.
+        """
+        self.switchToBodyMode(None)
+
+
+
+class HTTPClientParser(HTTPParser):
+    """
+    An HTTP parser which only handles HTTP responses.
+
+    @ivar request: The request with which the expected response is associated.
+    @type request: L{Request}
+
+    @ivar NO_BODY_CODES: A C{set} of response codes which B{MUST NOT} have a
+        body.
+
+    @ivar finisher: A callable to invoke when this response is fully parsed.
+
+    @ivar _responseDeferred: A L{Deferred} which will be called back with the
+        response when all headers in the response have been received.
+        Thereafter, C{None}.
+
+    @ivar _everReceivedData: C{True} if any bytes have been received.
+    """
+    NO_BODY_CODES = set([NO_CONTENT, NOT_MODIFIED])
+
+    _transferDecoders = {
+        'chunked': _ChunkedTransferDecoder,
+        }
+
+    bodyDecoder = None
+
+    def __init__(self, request, finisher):
+        self.request = request
+        self.finisher = finisher
+        self._responseDeferred = Deferred()
+        self._everReceivedData = False
+
+
+    def dataReceived(self, data):
+        """
+        Override so that we know if any response has been received.
+        """
+        self._everReceivedData = True
+        HTTPParser.dataReceived(self, data)
+
+
+    def parseVersion(self, strversion):
+        """
+        Parse version strings of the form Protocol '/' Major '.' Minor. E.g.
+        'HTTP/1.1'.  Returns (protocol, major, minor).  Will raise ValueError
+        on bad syntax.
+        """
+        try:
+            proto, strnumber = strversion.split('/')
+            major, minor = strnumber.split('.')
+            major, minor = int(major), int(minor)
+        except ValueError, e:
+            raise BadResponseVersion(str(e), strversion)
+        if major < 0 or minor < 0:
+            raise BadResponseVersion("version may not be negative", strversion)
+        return (proto, major, minor)
+
+
+    def statusReceived(self, status):
+        """
+        Parse the status line into its components and create a response object
+        to keep track of this response's state.
+        """
+        parts = status.split(' ', 2)
+        if len(parts) != 3:
+            raise ParseError("wrong number of parts", status)
+
+        try:
+            statusCode = int(parts[1])
+        except ValueError:
+            raise ParseError("non-integer status code", status)
+
+        self.response = Response(
+            self.parseVersion(parts[0]),
+            statusCode,
+            parts[2],
+            self.headers,
+            self.transport)
+
+
+    def _finished(self, rest):
+        """
+        Called to indicate that an entire response has been received.  No more
+        bytes will be interpreted by this L{HTTPClientParser}.  Extra bytes are
+        passed up and the state of this L{HTTPClientParser} is set to I{DONE}.
+
+        @param rest: A C{str} giving any extra bytes delivered to this
+            L{HTTPClientParser} which are not part of the response being
+            parsed.
+        """
+        self.state = DONE
+        self.finisher(rest)
+
+
+    def isConnectionControlHeader(self, name):
+        """
+        Content-Length in the response to a HEAD request is an entity header,
+        not a connection control header.
+        """
+        if self.request.method == 'HEAD' and name == 'content-length':
+            return False
+        return HTTPParser.isConnectionControlHeader(self, name)
+
+
+    def allHeadersReceived(self):
+        """
+        Figure out how long the response body is going to be by examining
+        headers and stuff.
+        """
+        if (self.response.code in self.NO_BODY_CODES
+            or self.request.method == 'HEAD'):
+            self.response.length = 0
+            self._finished(self.clearLineBuffer())
+        else:
+            transferEncodingHeaders = self.connHeaders.getRawHeaders(
+                'transfer-encoding')
+            if transferEncodingHeaders:
+
+                # This could be a KeyError.  However, that would mean we do not
+                # know how to decode the response body, so failing the request
+                # is as good a behavior as any.  Perhaps someday we will want
+                # to normalize/document/test this specifically, but failing
+                # seems fine to me for now.
+                transferDecoder = self._transferDecoders[transferEncodingHeaders[0].lower()]
+
+                # If anyone ever invents a transfer encoding other than
+                # chunked (yea right), and that transfer encoding can predict
+                # the length of the response body, it might be sensible to
+                # allow the transfer decoder to set the response object's
+                # length attribute.
+            else:
+                contentLengthHeaders = self.connHeaders.getRawHeaders('content-length')
+                if contentLengthHeaders is None:
+                    contentLength = None
+                elif len(contentLengthHeaders) == 1:
+                    contentLength = int(contentLengthHeaders[0])
+                    self.response.length = contentLength
+                else:
+                    # "HTTP Message Splitting" or "HTTP Response Smuggling"
+                    # potentially happening.  Or it's just a buggy server.
+                    raise ValueError(
+                        "Too many Content-Length headers; response is invalid")
+
+                if contentLength == 0:
+                    self._finished(self.clearLineBuffer())
+                    transferDecoder = None
+                else:
+                    transferDecoder = lambda x, y: _IdentityTransferDecoder(
+                        contentLength, x, y)
+
+            if transferDecoder is None:
+                self.response._bodyDataFinished()
+            else:
+                # Make sure as little data as possible from the response body
+                # gets delivered to the response object until the response
+                # object actually indicates it is ready to handle bytes
+                # (probably because an application gave it a way to interpret
+                # them).
+                self.transport.pauseProducing()
+                self.switchToBodyMode(transferDecoder(
+                        self.response._bodyDataReceived,
+                        self._finished))
+
+        # This must be last.  If it were first, then application code might
+        # change some state (for example, registering a protocol to receive the
+        # response body).  Then the pauseProducing above would be wrong since
+        # the response is ready for bytes and nothing else would ever resume
+        # the transport.
+        self._responseDeferred.callback(self.response)
+        del self._responseDeferred
+
+
+    def connectionLost(self, reason):
+        if self.bodyDecoder is not None:
+            try:
+                try:
+                    self.bodyDecoder.noMoreData()
+                except PotentialDataLoss:
+                    self.response._bodyDataFinished(Failure())
+                except _DataLoss:
+                    self.response._bodyDataFinished(
+                        Failure(ResponseFailed([reason, Failure()],
+                                               self.response)))
+                else:
+                    self.response._bodyDataFinished()
+            except:
+                # Handle exceptions from both the except suites and the else
+                # suite.  Those functions really shouldn't raise exceptions,
+                # but maybe there's some buggy application code somewhere
+                # making things difficult.
+                log.err()
+        elif self.state != DONE:
+            if self._everReceivedData:
+                exceptionClass = ResponseFailed
+            else:
+                exceptionClass = ResponseNeverReceived
+            self._responseDeferred.errback(Failure(exceptionClass([reason])))
+            del self._responseDeferred
+
+
+
+class Request:
+    """
+    A L{Request} instance describes an HTTP request to be sent to an HTTP
+    server.
+
+    @ivar method: The HTTP method to for this request, ex: 'GET', 'HEAD',
+        'POST', etc.
+    @type method: C{str}
+
+    @ivar uri: The relative URI of the resource to request.  For example,
+        C{'/foo/bar?baz=quux'}.
+    @type uri: C{str}
+
+    @ivar headers: Headers to be sent to the server.  It is important to
+        note that this object does not create any implicit headers.  So it
+        is up to the HTTP Client to add required headers such as 'Host'.
+    @type headers: L{twisted.web.http_headers.Headers}
+
+    @ivar bodyProducer: C{None} or an L{IBodyProducer} provider which
+        produces the content body to send to the remote HTTP server.
+
+    @ivar persistent: Set to C{True} when you use HTTP persistent connection.
+    @type persistent: C{bool}
+    """
+    def __init__(self, method, uri, headers, bodyProducer, persistent=False):
+        self.method = method
+        self.uri = uri
+        self.headers = headers
+        self.bodyProducer = bodyProducer
+        self.persistent = persistent
+
+
+    def _writeHeaders(self, transport, TEorCL):
+        hosts = self.headers.getRawHeaders('host', ())
+        if len(hosts) != 1:
+            raise BadHeaders("Exactly one Host header required")
+
+        # In the future, having the protocol version be a parameter to this
+        # method would probably be good.  It would be nice if this method
+        # weren't limited to issueing HTTP/1.1 requests.
+        requestLines = []
+        requestLines.append(
+            '%s %s HTTP/1.1\r\n' % (self.method, self.uri))
+        if not self.persistent:
+            requestLines.append('Connection: close\r\n')
+        if TEorCL is not None:
+            requestLines.append(TEorCL)
+        for name, values in self.headers.getAllRawHeaders():
+            requestLines.extend(['%s: %s\r\n' % (name, v) for v in values])
+        requestLines.append('\r\n')
+        transport.writeSequence(requestLines)
+
+
+    def _writeToChunked(self, transport):
+        """
+        Write this request to the given transport using chunked
+        transfer-encoding to frame the body.
+        """
+        self._writeHeaders(transport, 'Transfer-Encoding: chunked\r\n')
+        encoder = ChunkedEncoder(transport)
+        encoder.registerProducer(self.bodyProducer, True)
+        d = self.bodyProducer.startProducing(encoder)
+
+        def cbProduced(ignored):
+            encoder.unregisterProducer()
+        def ebProduced(err):
+            encoder._allowNoMoreWrites()
+            # Don't call the encoder's unregisterProducer because it will write
+            # a zero-length chunk.  This would indicate to the server that the
+            # request body is complete.  There was an error, though, so we
+            # don't want to do that.
+            transport.unregisterProducer()
+            return err
+        d.addCallbacks(cbProduced, ebProduced)
+        return d
+
+
+    def _writeToContentLength(self, transport):
+        """
+        Write this request to the given transport using content-length to frame
+        the body.
+        """
+        self._writeHeaders(
+            transport,
+            'Content-Length: %d\r\n' % (self.bodyProducer.length,))
+
+        # This Deferred is used to signal an error in the data written to the
+        # encoder below.  It can only errback and it will only do so before too
+        # many bytes have been written to the encoder and before the producer
+        # Deferred fires.
+        finishedConsuming = Deferred()
+
+        # This makes sure the producer writes the correct number of bytes for
+        # the request body.
+        encoder = LengthEnforcingConsumer(
+            self.bodyProducer, transport, finishedConsuming)
+
+        transport.registerProducer(self.bodyProducer, True)
+
+        finishedProducing = self.bodyProducer.startProducing(encoder)
+
+        def combine(consuming, producing):
+            # This Deferred is returned and will be fired when the first of
+            # consuming or producing fires.
+            ultimate = Deferred()
+
+            # Keep track of what has happened so far.  This initially
+            # contains None, then an integer uniquely identifying what
+            # sequence of events happened.  See the callbacks and errbacks
+            # defined below for the meaning of each value.
+            state = [None]
+
+            def ebConsuming(err):
+                if state == [None]:
+                    # The consuming Deferred failed first.  This means the
+                    # overall writeTo Deferred is going to errback now.  The
+                    # producing Deferred should not fire later (because the
+                    # consumer should have called stopProducing on the
+                    # producer), but if it does, a callback will be ignored
+                    # and an errback will be logged.
+                    state[0] = 1
+                    ultimate.errback(err)
+                else:
+                    # The consuming Deferred errbacked after the producing
+                    # Deferred fired.  This really shouldn't ever happen.
+                    # If it does, I goofed.  Log the error anyway, just so
+                    # there's a chance someone might notice and complain.
+                    log.err(
+                        err,
+                        "Buggy state machine in %r/[%d]: "
+                        "ebConsuming called" % (self, state[0]))
+
+            def cbProducing(result):
+                if state == [None]:
+                    # The producing Deferred succeeded first.  Nothing will
+                    # ever happen to the consuming Deferred.  Tell the
+                    # encoder we're done so it can check what the producer
+                    # wrote and make sure it was right.
+                    state[0] = 2
+                    try:
+                        encoder._noMoreWritesExpected()
+                    except:
+                        # Fail the overall writeTo Deferred - something the
+                        # producer did was wrong.
+                        ultimate.errback()
+                    else:
+                        # Success - succeed the overall writeTo Deferred.
+                        ultimate.callback(None)
+                # Otherwise, the consuming Deferred already errbacked.  The
+                # producing Deferred wasn't supposed to fire, but it did
+                # anyway.  It's buggy, but there's not really anything to be
+                # done about it.  Just ignore this result.
+
+            def ebProducing(err):
+                if state == [None]:
+                    # The producing Deferred failed first.  This means the
+                    # overall writeTo Deferred is going to errback now.
+                    # Tell the encoder that we're done so it knows to reject
+                    # further writes from the producer (which should not
+                    # happen, but the producer may be buggy).
+                    state[0] = 3
+                    encoder._allowNoMoreWrites()
+                    ultimate.errback(err)
+                else:
+                    # The producing Deferred failed after the consuming
+                    # Deferred failed.  It shouldn't have, so it's buggy.
+                    # Log the exception in case anyone who can fix the code
+                    # is watching.
+                    log.err(err, "Producer is buggy")
+
+            consuming.addErrback(ebConsuming)
+            producing.addCallbacks(cbProducing, ebProducing)
+
+            return ultimate
+
+        d = combine(finishedConsuming, finishedProducing)
+        def f(passthrough):
+            # Regardless of what happens with the overall Deferred, once it
+            # fires, the producer registered way up above the definition of
+            # combine should be unregistered.
+            transport.unregisterProducer()
+            return passthrough
+        d.addBoth(f)
+        return d
+
+
+    def writeTo(self, transport):
+        """
+        Format this L{Request} as an HTTP/1.1 request and write it to the given
+        transport.  If bodyProducer is not None, it will be associated with an
+        L{IConsumer}.
+
+        @return: A L{Deferred} which fires with C{None} when the request has
+            been completely written to the transport or with a L{Failure} if
+            there is any problem generating the request bytes.
+        """
+        if self.bodyProducer is not None:
+            if self.bodyProducer.length is UNKNOWN_LENGTH:
+                return self._writeToChunked(transport)
+            else:
+                return self._writeToContentLength(transport)
+        else:
+            self._writeHeaders(transport, None)
+            return succeed(None)
+
+
+    def stopWriting(self):
+        """
+        Stop writing this request to the transport.  This can only be called
+        after C{writeTo} and before the L{Deferred} returned by C{writeTo}
+        fires.  It should cancel any asynchronous task started by C{writeTo}.
+        The L{Deferred} returned by C{writeTo} need not be fired if this method
+        is called.
+        """
+        # If bodyProducer is None, then the Deferred returned by writeTo has
+        # fired already and this method cannot be called.
+        _callAppFunction(self.bodyProducer.stopProducing)
+
+
+
+class LengthEnforcingConsumer:
+    """
+    An L{IConsumer} proxy which enforces an exact length requirement on the
+    total data written to it.
+
+    @ivar _length: The number of bytes remaining to be written.
+
+    @ivar _producer: The L{IBodyProducer} which is writing to this
+        consumer.
+
+    @ivar _consumer: The consumer to which at most C{_length} bytes will be
+        forwarded.
+
+    @ivar _finished: A L{Deferred} which will be fired with a L{Failure} if too
+        many bytes are written to this consumer.
+    """
+    def __init__(self, producer, consumer, finished):
+        self._length = producer.length
+        self._producer = producer
+        self._consumer = consumer
+        self._finished = finished
+
+
+    def _allowNoMoreWrites(self):
+        """
+        Indicate that no additional writes are allowed.  Attempts to write
+        after calling this method will be met with an exception.
+        """
+        self._finished = None
+
+
+    def write(self, bytes):
+        """
+        Write C{bytes} to the underlying consumer unless
+        C{_noMoreWritesExpected} has been called or there are/have been too
+        many bytes.
+        """
+        if self._finished is None:
+            # No writes are supposed to happen any more.  Try to convince the
+            # calling code to stop calling this method by calling its
+            # stopProducing method and then throwing an exception at it.  This
+            # exception isn't documented as part of the API because you're
+            # never supposed to expect it: only buggy code will ever receive
+            # it.
+            self._producer.stopProducing()
+            raise ExcessWrite()
+
+        if len(bytes) <= self._length:
+            self._length -= len(bytes)
+            self._consumer.write(bytes)
+        else:
+            # No synchronous exception is raised in *this* error path because
+            # we still have _finished which we can use to report the error to a
+            # better place than the direct caller of this method (some
+            # arbitrary application code).
+            _callAppFunction(self._producer.stopProducing)
+            self._finished.errback(WrongBodyLength("too many bytes written"))
+            self._allowNoMoreWrites()
+
+
+    def _noMoreWritesExpected(self):
+        """
+        Called to indicate no more bytes will be written to this consumer.
+        Check to see that the correct number have been written.
+
+        @raise WrongBodyLength: If not enough bytes have been written.
+        """
+        if self._finished is not None:
+            self._allowNoMoreWrites()
+            if self._length:
+                raise WrongBodyLength("too few bytes written")
+
+
+
+def makeStatefulDispatcher(name, template):
+    """
+    Given a I{dispatch} name and a function, return a function which can be
+    used as a method and which, when called, will call another method defined
+    on the instance and return the result.  The other method which is called is
+    determined by the value of the C{_state} attribute of the instance.
+
+    @param name: A string which is used to construct the name of the subsidiary
+        method to invoke.  The subsidiary method is named like C{'_%s_%s' %
+        (name, _state)}.
+
+    @param template: A function object which is used to give the returned
+        function a docstring.
+
+    @return: The dispatcher function.
+    """
+    def dispatcher(self, *args, **kwargs):
+        func = getattr(self, '_' + name + '_' + self._state, None)
+        if func is None:
+            raise RuntimeError(
+                "%r has no %s method in state %s" % (self, name, self._state))
+        return func(*args, **kwargs)
+    dispatcher.__doc__ = template.__doc__
+    return dispatcher
+
+
+
+class Response:
+    """
+    A L{Response} instance describes an HTTP response received from an HTTP
+    server.
+
+    L{Response} should not be subclassed or instantiated.
+
+    @ivar _transport: The transport which is delivering this response.
+
+    @ivar _bodyProtocol: The L{IProtocol} provider to which the body is
+        delivered.  C{None} before one has been registered with
+        C{deliverBody}.
+
+    @ivar _bodyBuffer: A C{list} of the strings passed to C{bodyDataReceived}
+        before C{deliverBody} is called.  C{None} afterwards.
+
+    @ivar _state: Indicates what state this L{Response} instance is in,
+        particularly with respect to delivering bytes from the response body
+        to an application-suppled protocol object.  This may be one of
+        C{'INITIAL'}, C{'CONNECTED'}, C{'DEFERRED_CLOSE'}, or C{'FINISHED'},
+        with the following meanings:
+
+          - INITIAL: This is the state L{Response} objects start in.  No
+            protocol has yet been provided and the underlying transport may
+            still have bytes to deliver to it.
+
+          - DEFERRED_CLOSE: If the underlying transport indicates all bytes
+            have been delivered but no application-provided protocol is yet
+            available, the L{Response} moves to this state.  Data is
+            buffered and waiting for a protocol to be delivered to.
+
+          - CONNECTED: If a protocol is provided when the state is INITIAL,
+            the L{Response} moves to this state.  Any buffered data is
+            delivered and any data which arrives from the transport
+            subsequently is given directly to the protocol.
+
+          - FINISHED: If a protocol is provided in the DEFERRED_CLOSE state,
+            the L{Response} moves to this state after delivering all
+            buffered data to the protocol.  Otherwise, if the L{Response} is
+            in the CONNECTED state, if the transport indicates there is no
+            more data, the L{Response} moves to this state.  Nothing else
+            can happen once the L{Response} is in this state.
+    """
+    implements(IResponse)
+
+    length = UNKNOWN_LENGTH
+
+    _bodyProtocol = None
+    _bodyFinished = False
+
+    def __init__(self, version, code, phrase, headers, _transport):
+        self.version = version
+        self.code = code
+        self.phrase = phrase
+        self.headers = headers
+        self._transport = _transport
+        self._bodyBuffer = []
+        self._state = 'INITIAL'
+
+
+    def deliverBody(self, protocol):
+        """
+        Dispatch the given L{IProtocol} depending of the current state of the
+        response.
+        """
+    deliverBody = makeStatefulDispatcher('deliverBody', deliverBody)
+
+
+    def _deliverBody_INITIAL(self, protocol):
+        """
+        Deliver any buffered data to C{protocol} and prepare to deliver any
+        future data to it.  Move to the C{'CONNECTED'} state.
+        """
+        # Now that there's a protocol to consume the body, resume the
+        # transport.  It was previously paused by HTTPClientParser to avoid
+        # reading too much data before it could be handled.
+        self._transport.resumeProducing()
+
+        protocol.makeConnection(self._transport)
+        self._bodyProtocol = protocol
+        for data in self._bodyBuffer:
+            self._bodyProtocol.dataReceived(data)
+        self._bodyBuffer = None
+        self._state = 'CONNECTED'
+
+
+    def _deliverBody_CONNECTED(self, protocol):
+        """
+        It is invalid to attempt to deliver data to a protocol when it is
+        already being delivered to another protocol.
+        """
+        raise RuntimeError(
+            "Response already has protocol %r, cannot deliverBody "
+            "again" % (self._bodyProtocol,))
+
+
+    def _deliverBody_DEFERRED_CLOSE(self, protocol):
+        """
+        Deliver any buffered data to C{protocol} and then disconnect the
+        protocol.  Move to the C{'FINISHED'} state.
+        """
+        # Unlike _deliverBody_INITIAL, there is no need to resume the
+        # transport here because all of the response data has been received
+        # already.  Some higher level code may want to resume the transport if
+        # that code expects further data to be received over it.
+
+        protocol.makeConnection(self._transport)
+
+        for data in self._bodyBuffer:
+            protocol.dataReceived(data)
+        self._bodyBuffer = None
+        protocol.connectionLost(self._reason)
+        self._state = 'FINISHED'
+
+
+    def _deliverBody_FINISHED(self, protocol):
+        """
+        It is invalid to attempt to deliver data to a protocol after the
+        response body has been delivered to another protocol.
+        """
+        raise RuntimeError(
+            "Response already finished, cannot deliverBody now.")
+
+
+    def _bodyDataReceived(self, data):
+        """
+        Called by HTTPClientParser with chunks of data from the response body.
+        They will be buffered or delivered to the protocol passed to
+        deliverBody.
+        """
+    _bodyDataReceived = makeStatefulDispatcher('bodyDataReceived',
+                                               _bodyDataReceived)
+
+
+    def _bodyDataReceived_INITIAL(self, data):
+        """
+        Buffer any data received for later delivery to a protocol passed to
+        C{deliverBody}.
+
+        Little or no data should be buffered by this method, since the
+        transport has been paused and will not be resumed until a protocol
+        is supplied.
+        """
+        self._bodyBuffer.append(data)
+
+
+    def _bodyDataReceived_CONNECTED(self, data):
+        """
+        Deliver any data received to the protocol to which this L{Response}
+        is connected.
+        """
+        self._bodyProtocol.dataReceived(data)
+
+
+    def _bodyDataReceived_DEFERRED_CLOSE(self, data):
+        """
+        It is invalid for data to be delivered after it has been indicated
+        that the response body has been completely delivered.
+        """
+        raise RuntimeError("Cannot receive body data after _bodyDataFinished")
+
+
+    def _bodyDataReceived_FINISHED(self, data):
+        """
+        It is invalid for data to be delivered after the response body has
+        been delivered to a protocol.
+        """
+        raise RuntimeError("Cannot receive body data after protocol disconnected")
+
+
+    def _bodyDataFinished(self, reason=None):
+        """
+        Called by HTTPClientParser when no more body data is available.  If the
+        optional reason is supplied, this indicates a problem or potential
+        problem receiving all of the response body.
+        """
+    _bodyDataFinished = makeStatefulDispatcher('bodyDataFinished',
+                                               _bodyDataFinished)
+
+
+    def _bodyDataFinished_INITIAL(self, reason=None):
+        """
+        Move to the C{'DEFERRED_CLOSE'} state to wait for a protocol to
+        which to deliver the response body.
+        """
+        self._state = 'DEFERRED_CLOSE'
+        if reason is None:
+            reason = Failure(ResponseDone("Response body fully received"))
+        self._reason = reason
+
+
+    def _bodyDataFinished_CONNECTED(self, reason=None):
+        """
+        Disconnect the protocol and move to the C{'FINISHED'} state.
+        """
+        if reason is None:
+            reason = Failure(ResponseDone("Response body fully received"))
+        self._bodyProtocol.connectionLost(reason)
+        self._bodyProtocol = None
+        self._state = 'FINISHED'
+
+
+    def _bodyDataFinished_DEFERRED_CLOSE(self):
+        """
+        It is invalid to attempt to notify the L{Response} of the end of the
+        response body data more than once.
+        """
+        raise RuntimeError("Cannot finish body data more than once")
+
+
+    def _bodyDataFinished_FINISHED(self):
+        """
+        It is invalid to attempt to notify the L{Response} of the end of the
+        response body data more than once.
+        """
+        raise RuntimeError("Cannot finish body data after protocol disconnected")
+
+
+
+class ChunkedEncoder:
+    """
+    Helper object which exposes L{IConsumer} on top of L{HTTP11ClientProtocol}
+    for streaming request bodies to the server.
+    """
+    implements(IConsumer)
+
+    def __init__(self, transport):
+        self.transport = transport
+
+
+    def _allowNoMoreWrites(self):
+        """
+        Indicate that no additional writes are allowed.  Attempts to write
+        after calling this method will be met with an exception.
+        """
+        self.transport = None
+
+
+    def registerProducer(self, producer, streaming):
+        """
+        Register the given producer with C{self.transport}.
+        """
+        self.transport.registerProducer(producer, streaming)
+
+
+    def write(self, data):
+        """
+        Write the given request body bytes to the transport using chunked
+        encoding.
+
+        @type data: C{str}
+        """
+        if self.transport is None:
+            raise ExcessWrite()
+        self.transport.writeSequence(("%x\r\n" % len(data), data, "\r\n"))
+
+
+    def unregisterProducer(self):
+        """
+        Indicate that the request body is complete and finish the request.
+        """
+        self.write('')
+        self.transport.unregisterProducer()
+        self._allowNoMoreWrites()
+
+
+
+class TransportProxyProducer:
+    """
+    An L{IPushProducer} implementation which wraps another such thing and
+    proxies calls to it until it is told to stop.
+
+    @ivar _producer: The wrapped L{IPushProducer} provider or C{None} after
+        this proxy has been stopped.
+    """
+    implements(IPushProducer)
+
+    # LineReceiver uses this undocumented attribute of transports to decide
+    # when to stop calling lineReceived or rawDataReceived (if it finds it to
+    # be true, it doesn't bother to deliver any more data).  Set disconnecting
+    # to False here and never change it to true so that all data is always
+    # delivered to us and so that LineReceiver doesn't fail with an
+    # AttributeError.
+    disconnecting = False
+
+    def __init__(self, producer):
+        self._producer = producer
+
+
+    def _stopProxying(self):
+        """
+        Stop forwarding calls of L{IPushProducer} methods to the underlying
+        L{IPushProvider} provider.
+        """
+        self._producer = None
+
+
+    def stopProducing(self):
+        """
+        Proxy the stoppage to the underlying producer, unless this proxy has
+        been stopped.
+        """
+        if self._producer is not None:
+            self._producer.stopProducing()
+
+
+    def resumeProducing(self):
+        """
+        Proxy the resumption to the underlying producer, unless this proxy has
+        been stopped.
+        """
+        if self._producer is not None:
+            self._producer.resumeProducing()
+
+
+    def pauseProducing(self):
+        """
+        Proxy the pause to the underlying producer, unless this proxy has been
+        stopped.
+        """
+        if self._producer is not None:
+            self._producer.pauseProducing()
+
+
+
+class HTTP11ClientProtocol(Protocol):
+    """
+    L{HTTP11ClientProtocol} is an implementation of the HTTP 1.1 client
+    protocol.  It supports as few features as possible.
+
+    @ivar _parser: After a request is issued, the L{HTTPClientParser} to
+        which received data making up the response to that request is
+        delivered.
+
+    @ivar _finishedRequest: After a request is issued, the L{Deferred} which
+        will fire when a L{Response} object corresponding to that request is
+        available.  This allows L{HTTP11ClientProtocol} to fail the request
+        if there is a connection or parsing problem.
+
+    @ivar _currentRequest: After a request is issued, the L{Request}
+        instance used to make that request.  This allows
+        L{HTTP11ClientProtocol} to stop request generation if necessary (for
+        example, if the connection is lost).
+
+    @ivar _transportProxy: After a request is issued, the
+        L{TransportProxyProducer} to which C{_parser} is connected.  This
+        allows C{_parser} to pause and resume the transport in a way which
+        L{HTTP11ClientProtocol} can exert some control over.
+
+    @ivar _responseDeferred: After a request is issued, the L{Deferred} from
+        C{_parser} which will fire with a L{Response} when one has been
+        received.  This is eventually chained with C{_finishedRequest}, but
+        only in certain cases to avoid double firing that Deferred.
+
+    @ivar _state: Indicates what state this L{HTTP11ClientProtocol} instance
+        is in with respect to transmission of a request and reception of a
+        response.  This may be one of the following strings:
+
+          - QUIESCENT: This is the state L{HTTP11ClientProtocol} instances
+            start in.  Nothing is happening: no request is being sent and no
+            response is being received or expected.
+
+          - TRANSMITTING: When a request is made (via L{request}), the
+            instance moves to this state.  L{Request.writeTo} has been used
+            to start to send a request but it has not yet finished.
+
+          - TRANSMITTING_AFTER_RECEIVING_RESPONSE: The server has returned a
+            complete response but the request has not yet been fully sent
+            yet.  The instance will remain in this state until the request
+            is fully sent.
+
+          - GENERATION_FAILED: There was an error while the request.  The
+            request was not fully sent to the network.
+
+          - WAITING: The request was fully sent to the network.  The
+            instance is now waiting for the response to be fully received.
+
+          - ABORTING: Application code has requested that the HTTP connection
+            be aborted.
+
+          - CONNECTION_LOST: The connection has been lost.
+
+    @ivar _abortDeferreds: A list of C{Deferred} instances that will fire when
+        the connection is lost.
+    """
+    _state = 'QUIESCENT'
+    _parser = None
+    _finishedRequest = None
+    _currentRequest = None
+    _transportProxy = None
+    _responseDeferred = None
+
+
+    def __init__(self, quiescentCallback=lambda c: None):
+        self._quiescentCallback = quiescentCallback
+        self._abortDeferreds = []
+
+
+    @property
+    def state(self):
+        return self._state
+
+
+    def request(self, request):
+        """
+        Issue C{request} over C{self.transport} and return a L{Deferred} which
+        will fire with a L{Response} instance or an error.
+
+        @param request: The object defining the parameters of the request to
+           issue.
+        @type request: L{Request}
+
+        @rtype: L{Deferred}
+        @return: The deferred may errback with L{RequestGenerationFailed} if
+            the request was not fully written to the transport due to a local
+            error.  It may errback with L{RequestTransmissionFailed} if it was
+            not fully written to the transport due to a network error.  It may
+            errback with L{ResponseFailed} if the request was sent (not
+            necessarily received) but some or all of the response was lost.  It
+            may errback with L{RequestNotSent} if it is not possible to send
+            any more requests using this L{HTTP11ClientProtocol}.
+        """
+        if self._state != 'QUIESCENT':
+            return fail(RequestNotSent())
+
+        self._state = 'TRANSMITTING'
+        _requestDeferred = maybeDeferred(request.writeTo, self.transport)
+        self._finishedRequest = Deferred()
+
+        # Keep track of the Request object in case we need to call stopWriting
+        # on it.
+        self._currentRequest = request
+
+        self._transportProxy = TransportProxyProducer(self.transport)
+        self._parser = HTTPClientParser(request, self._finishResponse)
+        self._parser.makeConnection(self._transportProxy)
+        self._responseDeferred = self._parser._responseDeferred
+
+        def cbRequestWrotten(ignored):
+            if self._state == 'TRANSMITTING':
+                self._state = 'WAITING'
+                self._responseDeferred.chainDeferred(self._finishedRequest)
+
+        def ebRequestWriting(err):
+            if self._state == 'TRANSMITTING':
+                self._state = 'GENERATION_FAILED'
+                self.transport.loseConnection()
+                self._finishedRequest.errback(
+                    Failure(RequestGenerationFailed([err])))
+            else:
+                log.err(err, 'Error writing request, but not in valid state '
+                             'to finalize request: %s' % self._state)
+
+        _requestDeferred.addCallbacks(cbRequestWrotten, ebRequestWriting)
+
+        return self._finishedRequest
+
+
+    def _finishResponse(self, rest):
+        """
+        Called by an L{HTTPClientParser} to indicate that it has parsed a
+        complete response.
+
+        @param rest: A C{str} giving any trailing bytes which were given to
+            the L{HTTPClientParser} which were not part of the response it
+            was parsing.
+        """
+    _finishResponse = makeStatefulDispatcher('finishResponse', _finishResponse)
+
+
+    def _finishResponse_WAITING(self, rest):
+        # Currently the rest parameter is ignored. Don't forget to use it if
+        # we ever add support for pipelining. And maybe check what trailers
+        # mean.
+        if self._state == 'WAITING':
+            self._state = 'QUIESCENT'
+        else:
+            # The server sent the entire response before we could send the
+            # whole request.  That sucks.  Oh well.  Fire the request()
+            # Deferred with the response.  But first, make sure that if the
+            # request does ever finish being written that it won't try to fire
+            # that Deferred.
+            self._state = 'TRANSMITTING_AFTER_RECEIVING_RESPONSE'
+            self._responseDeferred.chainDeferred(self._finishedRequest)
+
+        # This will happen if we're being called due to connection being lost;
+        # if so, no need to disconnect parser again, or to call
+        # _quiescentCallback.
+        if self._parser is None:
+            return
+
+        reason = ConnectionDone("synthetic!")
+        connHeaders = self._parser.connHeaders.getRawHeaders('connection', ())
+        if (('close' in connHeaders) or self._state != "QUIESCENT" or
+            not self._currentRequest.persistent):
+            self._giveUp(Failure(reason))
+        else:
+            # We call the quiescent callback first, to ensure connection gets
+            # added back to connection pool before we finish the request.
+            try:
+                self._quiescentCallback(self)
+            except:
+                # If callback throws exception, just log it and disconnect;
+                # keeping persistent connections around is an optimisation:
+                log.err()
+                self.transport.loseConnection()
+            self._disconnectParser(reason)
+
+
+    _finishResponse_TRANSMITTING = _finishResponse_WAITING
+
+
+    def _disconnectParser(self, reason):
+        """
+        If there is still a parser, call its C{connectionLost} method with the
+        given reason.  If there is not, do nothing.
+
+        @type reason: L{Failure}
+        """
+        if self._parser is not None:
+            parser = self._parser
+            self._parser = None
+            self._currentRequest = None
+            self._finishedRequest = None
+            self._responseDeferred = None
+
+            # The parser is no longer allowed to do anything to the real
+            # transport.  Stop proxying from the parser's transport to the real
+            # transport before telling the parser it's done so that it can't do
+            # anything.
+            self._transportProxy._stopProxying()
+            self._transportProxy = None
+            parser.connectionLost(reason)
+
+
+    def _giveUp(self, reason):
+        """
+        Lose the underlying connection and disconnect the parser with the given
+        L{Failure}.
+
+        Use this method instead of calling the transport's loseConnection
+        method directly otherwise random things will break.
+        """
+        self.transport.loseConnection()
+        self._disconnectParser(reason)
+
+
+    def dataReceived(self, bytes):
+        """
+        Handle some stuff from some place.
+        """
+        try:
+            self._parser.dataReceived(bytes)
+        except:
+            self._giveUp(Failure())
+
+
+    def connectionLost(self, reason):
+        """
+        The underlying transport went away.  If appropriate, notify the parser
+        object.
+        """
+    connectionLost = makeStatefulDispatcher('connectionLost', connectionLost)
+
+
+    def _connectionLost_QUIESCENT(self, reason):
+        """
+        Nothing is currently happening.  Move to the C{'CONNECTION_LOST'}
+        state but otherwise do nothing.
+        """
+        self._state = 'CONNECTION_LOST'
+
+
+    def _connectionLost_GENERATION_FAILED(self, reason):
+        """
+        The connection was in an inconsistent state.  Move to the
+        C{'CONNECTION_LOST'} state but otherwise do nothing.
+        """
+        self._state = 'CONNECTION_LOST'
+
+
+    def _connectionLost_TRANSMITTING(self, reason):
+        """
+        Fail the L{Deferred} for the current request, notify the request
+        object that it does not need to continue transmitting itself, and
+        move to the C{'CONNECTION_LOST'} state.
+        """
+        self._state = 'CONNECTION_LOST'
+        self._finishedRequest.errback(
+            Failure(RequestTransmissionFailed([reason])))
+        del self._finishedRequest
+
+        # Tell the request that it should stop bothering now.
+        self._currentRequest.stopWriting()
+
+
+    def _connectionLost_TRANSMITTING_AFTER_RECEIVING_RESPONSE(self, reason):
+        """
+        Move to the C{'CONNECTION_LOST'} state.
+        """
+        self._state = 'CONNECTION_LOST'
+
+
+    def _connectionLost_WAITING(self, reason):
+        """
+        Disconnect the response parser so that it can propagate the event as
+        necessary (for example, to call an application protocol's
+        C{connectionLost} method, or to fail a request L{Deferred}) and move
+        to the C{'CONNECTION_LOST'} state.
+        """
+        self._disconnectParser(reason)
+        self._state = 'CONNECTION_LOST'
+
+
+    def _connectionLost_ABORTING(self, reason):
+        """
+        Disconnect the response parser with a L{ConnectionAborted} failure, and
+        move to the C{'CONNECTION_LOST'} state.
+        """
+        self._disconnectParser(Failure(ConnectionAborted()))
+        self._state = 'CONNECTION_LOST'
+        for d in self._abortDeferreds:
+            d.callback(None)
+        self._abortDeferreds = []
+
+
+    def abort(self):
+        """
+        Close the connection and cause all outstanding L{request} L{Deferred}s
+        to fire with an error.
+        """
+        if self._state == "CONNECTION_LOST":
+            return succeed(None)
+        self.transport.loseConnection()
+        self._state = 'ABORTING'
+        d = Deferred()
+        self._abortDeferreds.append(d)
+        return d
diff --git a/ThirdParty/Twisted/twisted/web/_responses.py b/ThirdParty/Twisted/twisted/web/_responses.py
new file mode 100644
index 0000000..4766b40
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/_responses.py
@@ -0,0 +1,114 @@
+# -*- test-case-name: twisted.web.test.test_http -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+HTTP response code definitions.
+"""
+
+from __future__ import division, absolute_import
+
+_CONTINUE = 100
+SWITCHING = 101
+
+OK                              = 200
+CREATED                         = 201
+ACCEPTED                        = 202
+NON_AUTHORITATIVE_INFORMATION   = 203
+NO_CONTENT                      = 204
+RESET_CONTENT                   = 205
+PARTIAL_CONTENT                 = 206
+MULTI_STATUS                    = 207
+
+MULTIPLE_CHOICE                 = 300
+MOVED_PERMANENTLY               = 301
+FOUND                           = 302
+SEE_OTHER                       = 303
+NOT_MODIFIED                    = 304
+USE_PROXY                       = 305
+TEMPORARY_REDIRECT              = 307
+
+BAD_REQUEST                     = 400
+UNAUTHORIZED                    = 401
+PAYMENT_REQUIRED                = 402
+FORBIDDEN                       = 403
+NOT_FOUND                       = 404
+NOT_ALLOWED                     = 405
+NOT_ACCEPTABLE                  = 406
+PROXY_AUTH_REQUIRED             = 407
+REQUEST_TIMEOUT                 = 408
+CONFLICT                        = 409
+GONE                            = 410
+LENGTH_REQUIRED                 = 411
+PRECONDITION_FAILED             = 412
+REQUEST_ENTITY_TOO_LARGE        = 413
+REQUEST_URI_TOO_LONG            = 414
+UNSUPPORTED_MEDIA_TYPE          = 415
+REQUESTED_RANGE_NOT_SATISFIABLE = 416
+EXPECTATION_FAILED              = 417
+
+INTERNAL_SERVER_ERROR           = 500
+NOT_IMPLEMENTED                 = 501
+BAD_GATEWAY                     = 502
+SERVICE_UNAVAILABLE             = 503
+GATEWAY_TIMEOUT                 = 504
+HTTP_VERSION_NOT_SUPPORTED      = 505
+INSUFFICIENT_STORAGE_SPACE      = 507
+NOT_EXTENDED                    = 510
+
+RESPONSES = {
+    # 100
+    _CONTINUE: "Continue",
+    SWITCHING: "Switching Protocols",
+
+    # 200
+    OK: "OK",
+    CREATED: "Created",
+    ACCEPTED: "Accepted",
+    NON_AUTHORITATIVE_INFORMATION: "Non-Authoritative Information",
+    NO_CONTENT: "No Content",
+    RESET_CONTENT: "Reset Content.",
+    PARTIAL_CONTENT: "Partial Content",
+    MULTI_STATUS: "Multi-Status",
+
+    # 300
+    MULTIPLE_CHOICE: "Multiple Choices",
+    MOVED_PERMANENTLY: "Moved Permanently",
+    FOUND: "Found",
+    SEE_OTHER: "See Other",
+    NOT_MODIFIED: "Not Modified",
+    USE_PROXY: "Use Proxy",
+    # 306 not defined??
+    TEMPORARY_REDIRECT: "Temporary Redirect",
+
+    # 400
+    BAD_REQUEST: "Bad Request",
+    UNAUTHORIZED: "Unauthorized",
+    PAYMENT_REQUIRED: "Payment Required",
+    FORBIDDEN: "Forbidden",
+    NOT_FOUND: "Not Found",
+    NOT_ALLOWED: "Method Not Allowed",
+    NOT_ACCEPTABLE: "Not Acceptable",
+    PROXY_AUTH_REQUIRED: "Proxy Authentication Required",
+    REQUEST_TIMEOUT: "Request Time-out",
+    CONFLICT: "Conflict",
+    GONE: "Gone",
+    LENGTH_REQUIRED: "Length Required",
+    PRECONDITION_FAILED: "Precondition Failed",
+    REQUEST_ENTITY_TOO_LARGE: "Request Entity Too Large",
+    REQUEST_URI_TOO_LONG: "Request-URI Too Long",
+    UNSUPPORTED_MEDIA_TYPE: "Unsupported Media Type",
+    REQUESTED_RANGE_NOT_SATISFIABLE: "Requested Range not satisfiable",
+    EXPECTATION_FAILED: "Expectation Failed",
+
+    # 500
+    INTERNAL_SERVER_ERROR: "Internal Server Error",
+    NOT_IMPLEMENTED: "Not Implemented",
+    BAD_GATEWAY: "Bad Gateway",
+    SERVICE_UNAVAILABLE: "Service Unavailable",
+    GATEWAY_TIMEOUT: "Gateway Time-out",
+    HTTP_VERSION_NOT_SUPPORTED: "HTTP Version not supported",
+    INSUFFICIENT_STORAGE_SPACE: "Insufficient Storage Space",
+    NOT_EXTENDED: "Not Extended"
+    }
+
diff --git a/ThirdParty/Twisted/twisted/web/_stan.py b/ThirdParty/Twisted/twisted/web/_stan.py
new file mode 100644
index 0000000..004761f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/_stan.py
@@ -0,0 +1,325 @@
+# -*- test-case-name: twisted.web.test.test_stan -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+An s-expression-like syntax for expressing xml in pure python.
+
+Stan tags allow you to build XML documents using Python.
+
+Stan is a DOM, or Document Object Model, implemented using basic Python types
+and functions called "flatteners". A flattener is a function that knows how to
+turn an object of a specific type into something that is closer to an HTML
+string. Stan differs from the W3C DOM by not being as cumbersome and heavy
+weight. Since the object model is built using simple python types such as lists,
+strings, and dictionaries, the API is simpler and constructing a DOM less
+cumbersome.
+
+ at var voidElements: the names of HTML 'U{void
+    elements<http://www.whatwg.org/specs/web-apps/current-work/multipage/syntax.html#void-elements>}';
+    those which can't have contents and can therefore be self-closing in the
+    output.
+"""
+
+
+class slot(object):
+    """
+    Marker for markup insertion in a template.
+
+    @type name: C{str}
+    @ivar name: The name of this slot.  The key which must be used in
+        L{Tag.fillSlots} to fill it.
+
+    @type children: C{list}
+    @ivar children: The L{Tag} objects included in this L{slot}'s template.
+
+    @type default: anything flattenable, or C{NoneType}
+    @ivar default: The default contents of this slot, if it is left unfilled.
+        If this is C{None}, an L{UnfilledSlot} will be raised, rather than
+        C{None} actually being used.
+
+    @type filename: C{str} or C{NoneType}
+    @ivar filename: The name of the XML file from which this tag was parsed.
+        If it was not parsed from an XML file, C{None}.
+
+    @type lineNumber: C{int} or C{NoneType}
+    @ivar lineNumber: The line number on which this tag was encountered in the
+        XML file from which it was parsed.  If it was not parsed from an XML
+        file, C{None}.
+
+    @type columnNumber: C{int} or C{NoneType}
+    @ivar columnNumber: The column number at which this tag was encountered in
+        the XML file from which it was parsed.  If it was not parsed from an
+        XML file, C{None}.
+    """
+
+    def __init__(self, name, default=None, filename=None, lineNumber=None,
+                 columnNumber=None):
+        self.name = name
+        self.children = []
+        self.default = default
+        self.filename = filename
+        self.lineNumber = lineNumber
+        self.columnNumber = columnNumber
+
+
+    def __repr__(self):
+        return "slot(%r)" % (self.name,)
+
+
+
+class Tag(object):
+    """
+    A L{Tag} represents an XML tags with a tag name, attributes, and children.
+    A L{Tag} can be constructed using the special L{twisted.web.template.tags}
+    object, or it may be constructed directly with a tag name. L{Tag}s have a
+    special method, C{__call__}, which makes representing trees of XML natural
+    using pure python syntax.
+
+    @ivar tagName: The name of the represented element.  For a tag like
+        C{<div></div>}, this would be C{"div"}.
+    @type tagName: C{str}
+
+    @ivar attributes: The attributes of the element.
+    @type attributes: C{dict} mapping C{str} to renderable objects.
+
+    @ivar children: The child L{Tag}s of this C{Tag}.
+    @type children: C{list} of renderable objects.
+
+    @ivar render: The name of the render method to use for this L{Tag}.  This
+        name will be looked up at render time by the
+        L{twisted.web.template.Element} doing the rendering, via
+        L{twisted.web.template.Element.lookupRenderMethod}, to determine which
+        method to call.
+    @type render: C{str}
+
+    @type filename: C{str} or C{NoneType}
+    @ivar filename: The name of the XML file from which this tag was parsed.
+        If it was not parsed from an XML file, C{None}.
+
+    @type lineNumber: C{int} or C{NoneType}
+    @ivar lineNumber: The line number on which this tag was encountered in the
+        XML file from which it was parsed.  If it was not parsed from an XML
+        file, C{None}.
+
+    @type columnNumber: C{int} or C{NoneType}
+    @ivar columnNumber: The column number at which this tag was encountered in
+        the XML file from which it was parsed.  If it was not parsed from an
+        XML file, C{None}.
+
+    @type slotData: C{dict} or C{NoneType}
+    @ivar slotData: The data which can fill slots.  If present, a dictionary
+        mapping slot names to renderable values.  The values in this dict might
+        be anything that can be present as the child of a L{Tag}; strings,
+        lists, L{Tag}s, generators, etc.
+    """
+
+    slotData = None
+    filename = None
+    lineNumber = None
+    columnNumber = None
+
+    def __init__(self, tagName, attributes=None, children=None, render=None,
+                 filename=None, lineNumber=None, columnNumber=None):
+        self.tagName = tagName
+        self.render = render
+        if attributes is None:
+            self.attributes = {}
+        else:
+            self.attributes = attributes
+        if children is None:
+            self.children = []
+        else:
+            self.children = children
+        if filename is not None:
+            self.filename = filename
+        if lineNumber is not None:
+            self.lineNumber = lineNumber
+        if columnNumber is not None:
+            self.columnNumber = columnNumber
+
+
+    def fillSlots(self, **slots):
+        """
+        Remember the slots provided at this position in the DOM.
+
+        During the rendering of children of this node, slots with names in
+        C{slots} will be rendered as their corresponding values.
+
+        @return: C{self}. This enables the idiom C{return tag.fillSlots(...)} in
+            renderers.
+        """
+        if self.slotData is None:
+            self.slotData = {}
+        self.slotData.update(slots)
+        return self
+
+
+    def __call__(self, *children, **kw):
+        """
+        Add children and change attributes on this tag.
+
+        This is implemented using __call__ because it then allows the natural
+        syntax::
+
+          table(tr1, tr2, width="100%", height="50%", border="1")
+
+        Children may be other tag instances, strings, functions, or any other
+        object which has a registered flatten.
+
+        Attributes may be 'transparent' tag instances (so that
+        C{a(href=transparent(data="foo", render=myhrefrenderer))} works),
+        strings, functions, or any other object which has a registered
+        flattener.
+
+        If the attribute is a python keyword, such as 'class', you can add an
+        underscore to the name, like 'class_'.
+
+        There is one special keyword argument, 'render', which will be used as
+        the name of the renderer and saved as the 'render' attribute of this
+        instance, rather than the DOM 'render' attribute in the attributes
+        dictionary.
+        """
+        self.children.extend(children)
+
+        for k, v in kw.iteritems():
+            if k[-1] == '_':
+                k = k[:-1]
+
+            if k == 'render':
+                self.render = v
+            else:
+                self.attributes[k] = v
+        return self
+
+
+    def _clone(self, obj, deep):
+        """
+        Clone an arbitrary object; used by L{Tag.clone}.
+
+        @param obj: an object with a clone method, a list or tuple, or something
+            which should be immutable.
+
+        @param deep: whether to continue cloning child objects; i.e. the
+            contents of lists, the sub-tags within a tag.
+
+        @return: a clone of C{obj}.
+        """
+        if hasattr(obj, 'clone'):
+            return obj.clone(deep)
+        elif isinstance(obj, (list, tuple)):
+            return [self._clone(x, deep) for x in obj]
+        else:
+            return obj
+
+
+    def clone(self, deep=True):
+        """
+        Return a clone of this tag. If deep is True, clone all of this tag's
+        children. Otherwise, just shallow copy the children list without copying
+        the children themselves.
+        """
+        if deep:
+            newchildren = [self._clone(x, True) for x in self.children]
+        else:
+            newchildren = self.children[:]
+        newattrs = self.attributes.copy()
+        for key in newattrs:
+            newattrs[key] = self._clone(newattrs[key], True)
+
+        newslotdata = None
+        if self.slotData:
+            newslotdata = self.slotData.copy()
+            for key in newslotdata:
+                newslotdata[key] = self._clone(newslotdata[key], True)
+
+        newtag = Tag(
+            self.tagName,
+            attributes=newattrs,
+            children=newchildren,
+            render=self.render,
+            filename=self.filename,
+            lineNumber=self.lineNumber,
+            columnNumber=self.columnNumber)
+        newtag.slotData = newslotdata
+
+        return newtag
+
+
+    def clear(self):
+        """
+        Clear any existing children from this tag.
+        """
+        self.children = []
+        return self
+
+
+    def __repr__(self):
+        rstr = ''
+        if self.attributes:
+            rstr += ', attributes=%r' % self.attributes
+        if self.children:
+            rstr += ', children=%r' % self.children
+        return "Tag(%r%s)" % (self.tagName, rstr)
+
+
+
+voidElements = ('img', 'br', 'hr', 'base', 'meta', 'link', 'param', 'area',
+                'input', 'col', 'basefont', 'isindex', 'frame', 'command',
+                'embed', 'keygen', 'source', 'track', 'wbs')
+
+
+class CDATA(object):
+    """
+    A C{<![CDATA[]]>} block from a template.  Given a separate representation in
+    the DOM so that they may be round-tripped through rendering without losing
+    information.
+
+    @ivar data: The data between "C{<![CDATA[}" and "C{]]>}".
+    @type data: C{unicode}
+    """
+    def __init__(self, data):
+        self.data = data
+
+
+    def __repr__(self):
+        return 'CDATA(%r)' % (self.data,)
+
+
+
+class Comment(object):
+    """
+    A C{<!-- -->} comment from a template.  Given a separate representation in
+    the DOM so that they may be round-tripped through rendering without losing
+    information.
+
+    @ivar data: The data between "C{<!--}" and "C{-->}".
+    @type data: C{unicode}
+    """
+
+    def __init__(self, data):
+        self.data = data
+
+
+    def __repr__(self):
+        return 'Comment(%r)' % (self.data,)
+
+
+
+class CharRef(object):
+    """
+    A numeric character reference.  Given a separate representation in the DOM
+    so that non-ASCII characters may be output as pure ASCII.
+
+    @ivar ordinal: The ordinal value of the unicode character to which this is
+        object refers.
+    @type ordinal: C{int}
+
+    @since: 12.0
+    """
+    def __init__(self, ordinal):
+        self.ordinal = ordinal
+
+
+    def __repr__(self):
+        return "CharRef(%d)" % (self.ordinal,)
diff --git a/ThirdParty/Twisted/twisted/web/_version.py b/ThirdParty/Twisted/twisted/web/_version.py
new file mode 100644
index 0000000..6fddd8c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/_version.py
@@ -0,0 +1,3 @@
+# This is an auto-generated file. Do not edit it.
+from twisted.python import versions
+version = versions.Version('twisted.web', 12, 3, 0)
diff --git a/ThirdParty/Twisted/twisted/web/client.py b/ThirdParty/Twisted/twisted/web/client.py
new file mode 100644
index 0000000..45a4b37
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/client.py
@@ -0,0 +1,1616 @@
+# -*- test-case-name: twisted.web.test.test_webclient,twisted.web.test.test_agent -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+HTTP client.
+"""
+
+from __future__ import division, absolute_import
+
+import os, types
+try:
+    from urlparse import urlunparse
+    from urllib import splithost, splittype
+except ImportError:
+    from urllib.parse import splithost, splittype
+    from urllib.parse import urlunparse as _urlunparse
+
+    def urlunparse(parts):
+        result = _urlunparse(tuple([p.decode("charmap") for p in parts]))
+        return result.encode("charmap")
+import zlib
+
+from zope.interface import implementer
+
+from twisted.python.compat import _PY3, nativeString, intToBytes
+from twisted.python import log
+from twisted.python.failure import Failure
+from twisted.web import http
+from twisted.internet import defer, protocol, task, reactor
+from twisted.internet.interfaces import IProtocol
+from twisted.internet._endpointspy3 import TCP4ClientEndpoint, SSL4ClientEndpoint
+from twisted.python import failure
+from twisted.python._utilpy3 import InsensitiveDict
+from twisted.python.components import proxyForInterface
+from twisted.web import error
+from twisted.web.iweb import UNKNOWN_LENGTH, IBodyProducer, IResponse
+from twisted.web.http_headers import Headers
+from twisted.python.compat import set
+
+
+class PartialDownloadError(error.Error):
+    """
+    Page was only partially downloaded, we got disconnected in middle.
+
+    @ivar response: All of the response body which was downloaded.
+    """
+
+
+class HTTPPageGetter(http.HTTPClient):
+    """
+    Gets a resource via HTTP, then quits.
+
+    Typically used with L{HTTPClientFactory}.  Note that this class does not, by
+    itself, do anything with the response.  If you want to download a resource
+    into a file, use L{HTTPPageDownloader} instead.
+
+    @ivar _completelyDone: A boolean indicating whether any further requests are
+        necessary after this one completes in order to provide a result to
+        C{self.factory.deferred}.  If it is C{False}, then a redirect is going
+        to be followed.  Otherwise, this protocol's connection is the last one
+        before firing the result Deferred.  This is used to make sure the result
+        Deferred is only fired after the connection is cleaned up.
+    """
+
+    quietLoss = 0
+    followRedirect = True
+    failed = 0
+
+    _completelyDone = True
+
+    _specialHeaders = set((b'host', b'user-agent', b'cookie', b'content-length'))
+
+    def connectionMade(self):
+        method = getattr(self.factory, 'method', b'GET')
+        self.sendCommand(method, self.factory.path)
+        if self.factory.scheme == b'http' and self.factory.port != 80:
+            host = self.factory.host + b':' + intToBytes(self.factory.port)
+        elif self.factory.scheme == b'https' and self.factory.port != 443:
+            host = self.factory.host + b':' + intToBytes(self.factory.port)
+        else:
+            host = self.factory.host
+        self.sendHeader(b'Host', self.factory.headers.get(b"host", host))
+        self.sendHeader(b'User-Agent', self.factory.agent)
+        data = getattr(self.factory, 'postdata', None)
+        if data is not None:
+            self.sendHeader(b"Content-Length", intToBytes(len(data)))
+
+        cookieData = []
+        for (key, value) in self.factory.headers.items():
+            if key.lower() not in self._specialHeaders:
+                # we calculated it on our own
+                self.sendHeader(key, value)
+            if key.lower() == b'cookie':
+                cookieData.append(value)
+        for cookie, cookval in self.factory.cookies.items():
+            cookieData.append(cookie + b'=' + cookval)
+        if cookieData:
+            self.sendHeader(b'Cookie', b'; '.join(cookieData))
+        self.endHeaders()
+        self.headers = {}
+
+        if data is not None:
+            self.transport.write(data)
+
+    def handleHeader(self, key, value):
+        """
+        Called every time a header is received. Stores the header information
+        as key-value pairs in the C{headers} attribute.
+
+        @type key: C{str}
+        @param key: An HTTP header field name.
+
+        @type value: C{str}
+        @param value: An HTTP header field value.
+        """
+        key = key.lower()
+        l = self.headers.setdefault(key, [])
+        l.append(value)
+
+    def handleStatus(self, version, status, message):
+        self.version, self.status, self.message = version, status, message
+        self.factory.gotStatus(version, status, message)
+
+    def handleEndHeaders(self):
+        self.factory.gotHeaders(self.headers)
+        m = getattr(self, 'handleStatus_' + nativeString(self.status),
+                    self.handleStatusDefault)
+        m()
+
+    def handleStatus_200(self):
+        pass
+
+    handleStatus_201 = lambda self: self.handleStatus_200()
+    handleStatus_202 = lambda self: self.handleStatus_200()
+
+    def handleStatusDefault(self):
+        self.failed = 1
+
+    def handleStatus_301(self):
+        l = self.headers.get(b'location')
+        if not l:
+            self.handleStatusDefault()
+            return
+        url = l[0]
+        if self.followRedirect:
+            scheme, host, port, path = \
+                _parse(url, defaultPort=self.transport.getPeer().port)
+
+            self.factory._redirectCount += 1
+            if self.factory._redirectCount >= self.factory.redirectLimit:
+                err = error.InfiniteRedirection(
+                    self.status,
+                    'Infinite redirection detected',
+                    location=url)
+                self.factory.noPage(failure.Failure(err))
+                self.quietLoss = True
+                self.transport.loseConnection()
+                return
+
+            self._completelyDone = False
+            self.factory.setURL(url)
+
+            if self.factory.scheme == b'https':
+                from twisted.internet import ssl
+                contextFactory = ssl.ClientContextFactory()
+                reactor.connectSSL(nativeString(self.factory.host),
+                                   self.factory.port,
+                                   self.factory, contextFactory)
+            else:
+                reactor.connectTCP(nativeString(self.factory.host),
+                                   self.factory.port,
+                                   self.factory)
+        else:
+            self.handleStatusDefault()
+            self.factory.noPage(
+                failure.Failure(
+                    error.PageRedirect(
+                        self.status, self.message, location = url)))
+        self.quietLoss = True
+        self.transport.loseConnection()
+
+    def handleStatus_302(self):
+        if self.afterFoundGet:
+            self.handleStatus_303()
+        else:
+            self.handleStatus_301()
+
+
+    def handleStatus_303(self):
+        self.factory.method = b'GET'
+        self.handleStatus_301()
+
+
+    def connectionLost(self, reason):
+        """
+        When the connection used to issue the HTTP request is closed, notify the
+        factory if we have not already, so it can produce a result.
+        """
+        if not self.quietLoss:
+            http.HTTPClient.connectionLost(self, reason)
+            self.factory.noPage(reason)
+        if self._completelyDone:
+            # Only if we think we're completely done do we tell the factory that
+            # we're "disconnected".  This way when we're following redirects,
+            # only the last protocol used will fire the _disconnectedDeferred.
+            self.factory._disconnectedDeferred.callback(None)
+
+
+    def handleResponse(self, response):
+        if self.quietLoss:
+            return
+        if self.failed:
+            self.factory.noPage(
+                failure.Failure(
+                    error.Error(
+                        self.status, self.message, response)))
+        if self.factory.method == b'HEAD':
+            # Callback with empty string, since there is never a response
+            # body for HEAD requests.
+            self.factory.page(b'')
+        elif self.length != None and self.length != 0:
+            self.factory.noPage(failure.Failure(
+                PartialDownloadError(self.status, self.message, response)))
+        else:
+            self.factory.page(response)
+        # server might be stupid and not close connection. admittedly
+        # the fact we do only one request per connection is also
+        # stupid...
+        self.transport.loseConnection()
+
+    def timeout(self):
+        self.quietLoss = True
+        self.transport.loseConnection()
+        self.factory.noPage(defer.TimeoutError("Getting %s took longer than %s seconds." % (self.factory.url, self.factory.timeout)))
+
+
+class HTTPPageDownloader(HTTPPageGetter):
+
+    transmittingPage = 0
+
+    def handleStatus_200(self, partialContent=0):
+        HTTPPageGetter.handleStatus_200(self)
+        self.transmittingPage = 1
+        self.factory.pageStart(partialContent)
+
+    def handleStatus_206(self):
+        self.handleStatus_200(partialContent=1)
+
+    def handleResponsePart(self, data):
+        if self.transmittingPage:
+            self.factory.pagePart(data)
+
+    def handleResponseEnd(self):
+        if self.length:
+            self.transmittingPage = 0
+            self.factory.noPage(
+                failure.Failure(
+                    PartialDownloadError(self.status)))
+        if self.transmittingPage:
+            self.factory.pageEnd()
+            self.transmittingPage = 0
+        if self.failed:
+            self.factory.noPage(
+                failure.Failure(
+                    error.Error(
+                        self.status, self.message, None)))
+            self.transport.loseConnection()
+
+
+class HTTPClientFactory(protocol.ClientFactory):
+    """Download a given URL.
+
+    @type deferred: Deferred
+    @ivar deferred: A Deferred that will fire when the content has
+          been retrieved. Once this is fired, the ivars `status', `version',
+          and `message' will be set.
+
+    @type status: bytes
+    @ivar status: The status of the response.
+
+    @type version: bytes
+    @ivar version: The version of the response.
+
+    @type message: bytes
+    @ivar message: The text message returned with the status.
+
+    @type response_headers: dict
+    @ivar response_headers: The headers that were specified in the
+          response from the server.
+
+    @type method: bytes
+    @ivar method: The HTTP method to use in the request.  This should be one of
+        OPTIONS, GET, HEAD, POST, PUT, DELETE, TRACE, or CONNECT (case
+        matters).  Other values may be specified if the server being contacted
+        supports them.
+
+    @type redirectLimit: int
+    @ivar redirectLimit: The maximum number of HTTP redirects that can occur
+          before it is assumed that the redirection is endless.
+
+    @type afterFoundGet: C{bool}
+    @ivar afterFoundGet: Deviate from the HTTP 1.1 RFC by handling redirects
+        the same way as most web browsers; if the request method is POST and a
+        302 status is encountered, the redirect is followed with a GET method
+
+    @type _redirectCount: int
+    @ivar _redirectCount: The current number of HTTP redirects encountered.
+
+    @ivar _disconnectedDeferred: A L{Deferred} which only fires after the last
+        connection associated with the request (redirects may cause multiple
+        connections to be required) has closed.  The result Deferred will only
+        fire after this Deferred, so that callers can be assured that there are
+        no more event sources in the reactor once they get the result.
+    """
+
+    protocol = HTTPPageGetter
+
+    url = None
+    scheme = None
+    host = b''
+    port = None
+    path = None
+
+    def __init__(self, url, method=b'GET', postdata=None, headers=None,
+                 agent=b"Twisted PageGetter", timeout=0, cookies=None,
+                 followRedirect=True, redirectLimit=20,
+                 afterFoundGet=False):
+        self.followRedirect = followRedirect
+        self.redirectLimit = redirectLimit
+        self._redirectCount = 0
+        self.timeout = timeout
+        self.agent = agent
+        self.afterFoundGet = afterFoundGet
+        if cookies is None:
+            cookies = {}
+        self.cookies = cookies
+        if headers is not None:
+            self.headers = InsensitiveDict(headers)
+        else:
+            self.headers = InsensitiveDict()
+        if postdata is not None:
+            self.headers.setdefault(b'Content-Length',
+                                    intToBytes(len(postdata)))
+            # just in case a broken http/1.1 decides to keep connection alive
+            self.headers.setdefault(b"connection", b"close")
+        self.postdata = postdata
+        self.method = method
+
+        self.setURL(url)
+
+        self.waiting = 1
+        self._disconnectedDeferred = defer.Deferred()
+        self.deferred = defer.Deferred()
+        # Make sure the first callback on the result Deferred pauses the
+        # callback chain until the request connection is closed.
+        self.deferred.addBoth(self._waitForDisconnect)
+        self.response_headers = None
+
+
+    def _waitForDisconnect(self, passthrough):
+        """
+        Chain onto the _disconnectedDeferred, preserving C{passthrough}, so that
+        the result is only available after the associated connection has been
+        closed.
+        """
+        self._disconnectedDeferred.addCallback(lambda ignored: passthrough)
+        return self._disconnectedDeferred
+
+
+    def __repr__(self):
+        return "<%s: %s>" % (self.__class__.__name__, self.url)
+
+    def setURL(self, url):
+        self.url = url
+        scheme, host, port, path = _parse(url)
+        if scheme and host:
+            self.scheme = scheme
+            self.host = host
+            self.port = port
+        self.path = path
+
+    def buildProtocol(self, addr):
+        p = protocol.ClientFactory.buildProtocol(self, addr)
+        p.followRedirect = self.followRedirect
+        p.afterFoundGet = self.afterFoundGet
+        if self.timeout:
+            timeoutCall = reactor.callLater(self.timeout, p.timeout)
+            self.deferred.addBoth(self._cancelTimeout, timeoutCall)
+        return p
+
+    def _cancelTimeout(self, result, timeoutCall):
+        if timeoutCall.active():
+            timeoutCall.cancel()
+        return result
+
+    def gotHeaders(self, headers):
+        self.response_headers = headers
+        if b'set-cookie' in headers:
+            for cookie in headers[b'set-cookie']:
+                cookparts = cookie.split(b';')
+                cook = cookparts[0]
+                cook.lstrip()
+                k, v = cook.split(b'=', 1)
+                self.cookies[k.lstrip()] = v.lstrip()
+
+    def gotStatus(self, version, status, message):
+        self.version, self.status, self.message = version, status, message
+
+    def page(self, page):
+        if self.waiting:
+            self.waiting = 0
+            self.deferred.callback(page)
+
+    def noPage(self, reason):
+        if self.waiting:
+            self.waiting = 0
+            self.deferred.errback(reason)
+
+    def clientConnectionFailed(self, _, reason):
+        """
+        When a connection attempt fails, the request cannot be issued.  If no
+        result has yet been provided to the result Deferred, provide the
+        connection failure reason as an error result.
+        """
+        if self.waiting:
+            self.waiting = 0
+            # If the connection attempt failed, there is nothing more to
+            # disconnect, so just fire that Deferred now.
+            self._disconnectedDeferred.callback(None)
+            self.deferred.errback(reason)
+
+
+
+class HTTPDownloader(HTTPClientFactory):
+    """Download to a file."""
+
+    protocol = HTTPPageDownloader
+    value = None
+
+    def __init__(self, url, fileOrName,
+                 method='GET', postdata=None, headers=None,
+                 agent="Twisted client", supportPartial=0,
+                 timeout=0, cookies=None, followRedirect=1,
+                 redirectLimit=20, afterFoundGet=False):
+        self.requestedPartial = 0
+        if isinstance(fileOrName, types.StringTypes):
+            self.fileName = fileOrName
+            self.file = None
+            if supportPartial and os.path.exists(self.fileName):
+                fileLength = os.path.getsize(self.fileName)
+                if fileLength:
+                    self.requestedPartial = fileLength
+                    if headers == None:
+                        headers = {}
+                    headers["range"] = "bytes=%d-" % fileLength
+        else:
+            self.file = fileOrName
+        HTTPClientFactory.__init__(
+            self, url, method=method, postdata=postdata, headers=headers,
+            agent=agent, timeout=timeout, cookies=cookies,
+            followRedirect=followRedirect, redirectLimit=redirectLimit,
+            afterFoundGet=afterFoundGet)
+
+
+    def gotHeaders(self, headers):
+        HTTPClientFactory.gotHeaders(self, headers)
+        if self.requestedPartial:
+            contentRange = headers.get("content-range", None)
+            if not contentRange:
+                # server doesn't support partial requests, oh well
+                self.requestedPartial = 0
+                return
+            start, end, realLength = http.parseContentRange(contentRange[0])
+            if start != self.requestedPartial:
+                # server is acting wierdly
+                self.requestedPartial = 0
+
+
+    def openFile(self, partialContent):
+        if partialContent:
+            file = open(self.fileName, 'rb+')
+            file.seek(0, 2)
+        else:
+            file = open(self.fileName, 'wb')
+        return file
+
+    def pageStart(self, partialContent):
+        """Called on page download start.
+
+        @param partialContent: tells us if the download is partial download we requested.
+        """
+        if partialContent and not self.requestedPartial:
+            raise ValueError("we shouldn't get partial content response if we didn't want it!")
+        if self.waiting:
+            try:
+                if not self.file:
+                    self.file = self.openFile(partialContent)
+            except IOError:
+                #raise
+                self.deferred.errback(failure.Failure())
+
+    def pagePart(self, data):
+        if not self.file:
+            return
+        try:
+            self.file.write(data)
+        except IOError:
+            #raise
+            self.file = None
+            self.deferred.errback(failure.Failure())
+
+
+    def noPage(self, reason):
+        """
+        Close the storage file and errback the waiting L{Deferred} with the
+        given reason.
+        """
+        if self.waiting:
+            self.waiting = 0
+            if self.file:
+                try:
+                    self.file.close()
+                except:
+                    log.err(None, "Error closing HTTPDownloader file")
+            self.deferred.errback(reason)
+
+
+    def pageEnd(self):
+        self.waiting = 0
+        if not self.file:
+            return
+        try:
+            self.file.close()
+        except IOError:
+            self.deferred.errback(failure.Failure())
+            return
+        self.deferred.callback(self.value)
+
+
+
+class _URL(tuple):
+    """
+    A parsed URL.
+
+    At some point this should be replaced with a better URL implementation.
+    """
+    def __new__(self, scheme, host, port, path):
+        return tuple.__new__(_URL, (scheme, host, port, path))
+
+
+    def __init__(self, scheme, host, port, path):
+        self.scheme = scheme
+        self.host = host
+        self.port = port
+        self.path = path
+
+
+def _parse(url, defaultPort=None):
+    """
+    Split the given URL into the scheme, host, port, and path.
+
+    @type url: C{bytes}
+    @param url: An URL to parse.
+
+    @type defaultPort: C{int} or C{None}
+    @param defaultPort: An alternate value to use as the port if the URL does
+    not include one.
+
+    @return: A four-tuple of the scheme, host, port, and path of the URL.  All
+    of these are C{bytes} instances except for port, which is an C{int}.
+    """
+    url = url.strip()
+    parsed = http.urlparse(url)
+    scheme = parsed[0]
+    path = urlunparse((b'', b'') + parsed[2:])
+
+    if defaultPort is None:
+        if scheme == b'https':
+            defaultPort = 443
+        else:
+            defaultPort = 80
+
+    host, port = parsed[1], defaultPort
+    if b':' in host:
+        host, port = host.split(b':')
+        try:
+            port = int(port)
+        except ValueError:
+            port = defaultPort
+
+    if path == b'':
+        path = b'/'
+
+    return _URL(scheme, host, port, path)
+
+
+def _makeGetterFactory(url, factoryFactory, contextFactory=None,
+                       *args, **kwargs):
+    """
+    Create and connect an HTTP page getting factory.
+
+    Any additional positional or keyword arguments are used when calling
+    C{factoryFactory}.
+
+    @param factoryFactory: Factory factory that is called with C{url}, C{args}
+        and C{kwargs} to produce the getter
+
+    @param contextFactory: Context factory to use when creating a secure
+        connection, defaulting to C{None}
+
+    @return: The factory created by C{factoryFactory}
+    """
+    scheme, host, port, path = _parse(url)
+    factory = factoryFactory(url, *args, **kwargs)
+    if scheme == b'https':
+        from twisted.internet import ssl
+        if contextFactory is None:
+            contextFactory = ssl.ClientContextFactory()
+        reactor.connectSSL(nativeString(host), port, factory, contextFactory)
+    else:
+        reactor.connectTCP(nativeString(host), port, factory)
+    return factory
+
+
+def getPage(url, contextFactory=None, *args, **kwargs):
+    """
+    Download a web page as a string.
+
+    Download a page. Return a deferred, which will callback with a
+    page (as a string) or errback with a description of the error.
+
+    See L{HTTPClientFactory} to see what extra arguments can be passed.
+    """
+    return _makeGetterFactory(
+        url,
+        HTTPClientFactory,
+        contextFactory=contextFactory,
+        *args, **kwargs).deferred
+
+
+def downloadPage(url, file, contextFactory=None, *args, **kwargs):
+    """
+    Download a web page to a file.
+
+    @param file: path to file on filesystem, or file-like object.
+
+    See HTTPDownloader to see what extra args can be passed.
+    """
+    factoryFactory = lambda url, *a, **kw: HTTPDownloader(url, file, *a, **kw)
+    return _makeGetterFactory(
+        url,
+        factoryFactory,
+        contextFactory=contextFactory,
+        *args, **kwargs).deferred
+
+
+# The code which follows is based on the new HTTP client implementation.  It
+# should be significantly better than anything above, though it is not yet
+# feature equivalent.
+
+from twisted.web.error import SchemeNotSupported
+if not _PY3:
+    from twisted.web._newclient import Request, Response, HTTP11ClientProtocol
+    from twisted.web._newclient import ResponseDone, ResponseFailed
+    from twisted.web._newclient import RequestNotSent, RequestTransmissionFailed
+    from twisted.web._newclient import ResponseNeverReceived
+
+try:
+    from twisted.internet.ssl import ClientContextFactory
+except ImportError:
+    class WebClientContextFactory(object):
+        """
+        A web context factory which doesn't work because the necessary SSL
+        support is missing.
+        """
+        def getContext(self, hostname, port):
+            raise NotImplementedError("SSL support unavailable")
+else:
+    class WebClientContextFactory(ClientContextFactory):
+        """
+        A web context factory which ignores the hostname and port and does no
+        certificate verification.
+        """
+        def getContext(self, hostname, port):
+            return ClientContextFactory.getContext(self)
+
+
+
+class _WebToNormalContextFactory(object):
+    """
+    Adapt a web context factory to a normal context factory.
+
+    @ivar _webContext: A web context factory which accepts a hostname and port
+        number to its C{getContext} method.
+
+    @ivar _hostname: The hostname which will be passed to
+        C{_webContext.getContext}.
+
+    @ivar _port: The port number which will be passed to
+        C{_webContext.getContext}.
+    """
+    def __init__(self, webContext, hostname, port):
+        self._webContext = webContext
+        self._hostname = hostname
+        self._port = port
+
+
+    def getContext(self):
+        """
+        Called the wrapped web context factory's C{getContext} method with a
+        hostname and port number and return the resulting context object.
+        """
+        return self._webContext.getContext(self._hostname, self._port)
+
+
+
+ at implementer(IBodyProducer)
+class FileBodyProducer(object):
+    """
+    L{FileBodyProducer} produces bytes from an input file object incrementally
+    and writes them to a consumer.
+
+    Since file-like objects cannot be read from in an event-driven manner,
+    L{FileBodyProducer} uses a L{Cooperator} instance to schedule reads from
+    the file.  This process is also paused and resumed based on notifications
+    from the L{IConsumer} provider being written to.
+
+    The file is closed after it has been read, or if the producer is stopped
+    early.
+
+    @ivar _inputFile: Any file-like object, bytes read from which will be
+        written to a consumer.
+
+    @ivar _cooperate: A method like L{Cooperator.cooperate} which is used to
+        schedule all reads.
+
+    @ivar _readSize: The number of bytes to read from C{_inputFile} at a time.
+    """
+
+    # Python 2.4 doesn't have these symbolic constants
+    _SEEK_SET = getattr(os, 'SEEK_SET', 0)
+    _SEEK_END = getattr(os, 'SEEK_END', 2)
+
+    def __init__(self, inputFile, cooperator=task, readSize=2 ** 16):
+        self._inputFile = inputFile
+        self._cooperate = cooperator.cooperate
+        self._readSize = readSize
+        self.length = self._determineLength(inputFile)
+
+
+    def _determineLength(self, fObj):
+        """
+        Determine how many bytes can be read out of C{fObj} (assuming it is not
+        modified from this point on).  If the determination cannot be made,
+        return C{UNKNOWN_LENGTH}.
+        """
+        try:
+            seek = fObj.seek
+            tell = fObj.tell
+        except AttributeError:
+            return UNKNOWN_LENGTH
+        originalPosition = tell()
+        seek(0, self._SEEK_END)
+        end = tell()
+        seek(originalPosition, self._SEEK_SET)
+        return end - originalPosition
+
+
+    def stopProducing(self):
+        """
+        Permanently stop writing bytes from the file to the consumer by
+        stopping the underlying L{CooperativeTask}.
+        """
+        self._inputFile.close()
+        self._task.stop()
+
+
+    def startProducing(self, consumer):
+        """
+        Start a cooperative task which will read bytes from the input file and
+        write them to C{consumer}.  Return a L{Deferred} which fires after all
+        bytes have been written.
+
+        @param consumer: Any L{IConsumer} provider
+        """
+        self._task = self._cooperate(self._writeloop(consumer))
+        d = self._task.whenDone()
+        def maybeStopped(reason):
+            # IBodyProducer.startProducing's Deferred isn't support to fire if
+            # stopProducing is called.
+            reason.trap(task.TaskStopped)
+            return defer.Deferred()
+        d.addCallbacks(lambda ignored: None, maybeStopped)
+        return d
+
+
+    def _writeloop(self, consumer):
+        """
+        Return an iterator which reads one chunk of bytes from the input file
+        and writes them to the consumer for each time it is iterated.
+        """
+        while True:
+            bytes = self._inputFile.read(self._readSize)
+            if not bytes:
+                self._inputFile.close()
+                break
+            consumer.write(bytes)
+            yield None
+
+
+    def pauseProducing(self):
+        """
+        Temporarily suspend copying bytes from the input file to the consumer
+        by pausing the L{CooperativeTask} which drives that activity.
+        """
+        self._task.pause()
+
+
+    def resumeProducing(self):
+        """
+        Undo the effects of a previous C{pauseProducing} and resume copying
+        bytes to the consumer by resuming the L{CooperativeTask} which drives
+        the write activity.
+        """
+        self._task.resume()
+
+
+
+class _HTTP11ClientFactory(protocol.Factory):
+    """
+    A factory for L{HTTP11ClientProtocol}, used by L{HTTPConnectionPool}.
+
+    @ivar _quiescentCallback: The quiescent callback to be passed to protocol
+        instances, used to return them to the connection pool.
+
+    @since: 11.1
+    """
+    def __init__(self, quiescentCallback):
+        self._quiescentCallback = quiescentCallback
+
+
+    def buildProtocol(self, addr):
+        return HTTP11ClientProtocol(self._quiescentCallback)
+
+
+
+class _RetryingHTTP11ClientProtocol(object):
+    """
+    A wrapper for L{HTTP11ClientProtocol} that automatically retries requests.
+
+    @ivar _clientProtocol: The underlying L{HTTP11ClientProtocol}.
+
+    @ivar _newConnection: A callable that creates a new connection for a
+        retry.
+    """
+
+    def __init__(self, clientProtocol, newConnection):
+        self._clientProtocol = clientProtocol
+        self._newConnection = newConnection
+
+
+    def _shouldRetry(self, method, exception, bodyProducer):
+        """
+        Indicate whether request should be retried.
+
+        Only returns C{True} if method is idempotent, no response was
+        received, and no body was sent. The latter requirement may be relaxed
+        in the future, and PUT added to approved method list.
+        """
+        if method not in ("GET", "HEAD", "OPTIONS", "DELETE", "TRACE"):
+            return False
+        if not isinstance(exception, (RequestNotSent, RequestTransmissionFailed,
+                                      ResponseNeverReceived)):
+            return False
+        if bodyProducer is not None:
+            return False
+        return True
+
+
+    def request(self, request):
+        """
+        Do a request, and retry once (with a new connection) it it fails in
+        a retryable manner.
+
+        @param request: A L{Request} instance that will be requested using the
+            wrapped protocol.
+        """
+        d = self._clientProtocol.request(request)
+
+        def failed(reason):
+            if self._shouldRetry(request.method, reason.value,
+                                 request.bodyProducer):
+                return self._newConnection().addCallback(
+                    lambda connection: connection.request(request))
+            else:
+                return reason
+        d.addErrback(failed)
+        return d
+
+
+
+class HTTPConnectionPool(object):
+    """
+    A pool of persistent HTTP connections.
+
+    Features:
+     - Cached connections will eventually time out.
+     - Limits on maximum number of persistent connections.
+
+    Connections are stored using keys, which should be chosen such that any
+    connections stored under a given key can be used interchangeably.
+
+    Failed requests done using previously cached connections will be retried
+    once if they use an idempotent method (e.g. GET), in case the HTTP server
+    timed them out.
+
+    @ivar persistent: Boolean indicating whether connections should be
+        persistent. Connections are persistent by default.
+
+    @ivar maxPersistentPerHost: The maximum number of cached persistent
+        connections for a C{host:port} destination.
+    @type maxPersistentPerHost: C{int}
+
+    @ivar cachedConnectionTimeout: Number of seconds a cached persistent
+        connection will stay open before disconnecting.
+
+    @ivar retryAutomatically: C{boolean} indicating whether idempotent
+        requests should be retried once if no response was received.
+
+    @ivar _factory: The factory used to connect to the proxy.
+
+    @ivar _connections: Map (scheme, host, port) to lists of
+        L{HTTP11ClientProtocol} instances.
+
+    @ivar _timeouts: Map L{HTTP11ClientProtocol} instances to a
+        C{IDelayedCall} instance of their timeout.
+
+    @since: 12.1
+    """
+
+    _factory = _HTTP11ClientFactory
+    maxPersistentPerHost = 2
+    cachedConnectionTimeout = 240
+    retryAutomatically = True
+
+    def __init__(self, reactor, persistent=True):
+        self._reactor = reactor
+        self.persistent = persistent
+        self._connections = {}
+        self._timeouts = {}
+
+
+    def getConnection(self, key, endpoint):
+        """
+        Retrieve a connection, either new or cached, to be used for a HTTP
+        request.
+
+        If a cached connection is returned, it will not be used for other
+        requests until it is put back (which will happen automatically), since
+        we do not support pipelined requests. If no cached connection is
+        available, the passed in endpoint is used to create the connection.
+
+        If the connection doesn't disconnect at the end of its request, it
+        will be returned to this pool automatically. As such, only a single
+        request should be sent using the returned connection.
+
+        @param key: A unique key identifying connections that can be used
+            interchangeably.
+
+        @param endpoint: An endpoint that can be used to open a new connection
+            if no cached connection is available.
+
+        @return: A C{Deferred} that will fire with a L{HTTP11ClientProtocol}
+           (or a wrapper) that can be used to send a single HTTP request.
+        """
+        # Try to get cached version:
+        connections = self._connections.get(key)
+        while connections:
+            connection = connections.pop(0)
+            # Cancel timeout:
+            self._timeouts[connection].cancel()
+            del self._timeouts[connection]
+            if connection.state == "QUIESCENT":
+                if self.retryAutomatically:
+                    newConnection = lambda: self._newConnection(key, endpoint)
+                    connection = _RetryingHTTP11ClientProtocol(
+                        connection, newConnection)
+                return defer.succeed(connection)
+
+        return self._newConnection(key, endpoint)
+
+
+    def _newConnection(self, key, endpoint):
+        """
+        Create a new connection.
+
+        This implements the new connection code path for L{getConnection}.
+        """
+        def quiescentCallback(protocol):
+            self._putConnection(key, protocol)
+        factory = self._factory(quiescentCallback)
+        return endpoint.connect(factory)
+
+
+    def _removeConnection(self, key, connection):
+        """
+        Remove a connection from the cache and disconnect it.
+        """
+        connection.transport.loseConnection()
+        self._connections[key].remove(connection)
+        del self._timeouts[connection]
+
+
+    def _putConnection(self, key, connection):
+        """
+        Return a persistent connection to the pool. This will be called by
+        L{HTTP11ClientProtocol} when the connection becomes quiescent.
+        """
+        if connection.state != "QUIESCENT":
+            # Log with traceback for debugging purposes:
+            try:
+                raise RuntimeError(
+                    "BUG: Non-quiescent protocol added to connection pool.")
+            except:
+                log.err()
+            return
+        connections = self._connections.setdefault(key, [])
+        if len(connections) == self.maxPersistentPerHost:
+            dropped = connections.pop(0)
+            dropped.transport.loseConnection()
+            self._timeouts[dropped].cancel()
+            del self._timeouts[dropped]
+        connections.append(connection)
+        cid = self._reactor.callLater(self.cachedConnectionTimeout,
+                                      self._removeConnection,
+                                      key, connection)
+        self._timeouts[connection] = cid
+
+
+    def closeCachedConnections(self):
+        """
+        Close all persistent connections and remove them from the pool.
+
+        @return: L{defer.Deferred} that fires when all connections have been
+            closed.
+        """
+        results = []
+        for protocols in self._connections.itervalues():
+            for p in protocols:
+                results.append(p.abort())
+        self._connections = {}
+        for dc in self._timeouts.values():
+            dc.cancel()
+        self._timeouts = {}
+        return defer.gatherResults(results).addCallback(lambda ign: None)
+
+
+
+class _AgentBase(object):
+    """
+    Base class offering common facilities for L{Agent}-type classes.
+
+    @ivar _reactor: The C{IReactorTime} implementation which will be used by
+        the pool, and perhaps by subclasses as well.
+
+    @ivar _pool: The L{HTTPConnectionPool} used to manage HTTP connections.
+    """
+
+    def __init__(self, reactor, pool):
+        if pool is None:
+            pool = HTTPConnectionPool(reactor, False)
+        self._reactor = reactor
+        self._pool = pool
+
+
+    def _computeHostValue(self, scheme, host, port):
+        """
+        Compute the string to use for the value of the I{Host} header, based on
+        the given scheme, host name, and port number.
+        """
+        if (scheme, port) in (('http', 80), ('https', 443)):
+            return host
+        return '%s:%d' % (host, port)
+
+
+    def _requestWithEndpoint(self, key, endpoint, method, parsedURI,
+                             headers, bodyProducer, requestPath):
+        """
+        Issue a new request, given the endpoint and the path sent as part of
+        the request.
+        """
+        # Create minimal headers, if necessary:
+        if headers is None:
+            headers = Headers()
+        if not headers.hasHeader('host'):
+            headers = headers.copy()
+            headers.addRawHeader(
+                'host', self._computeHostValue(parsedURI.scheme, parsedURI.host,
+                                               parsedURI.port))
+
+        d = self._pool.getConnection(key, endpoint)
+        def cbConnected(proto):
+            return proto.request(
+                Request(method, requestPath, headers, bodyProducer,
+                        persistent=self._pool.persistent))
+        d.addCallback(cbConnected)
+        return d
+
+
+
+class Agent(_AgentBase):
+    """
+    L{Agent} is a very basic HTTP client.  It supports I{HTTP} and I{HTTPS}
+    scheme URIs (but performs no certificate checking by default).
+
+    @param pool: A L{HTTPConnectionPool} instance, or C{None}, in which case a
+        non-persistent L{HTTPConnectionPool} instance will be created.
+
+    @ivar _contextFactory: A web context factory which will be used to create
+        SSL context objects for any SSL connections the agent needs to make.
+
+    @ivar _connectTimeout: If not C{None}, the timeout passed to C{connectTCP}
+        or C{connectSSL} for specifying the connection timeout.
+
+    @ivar _bindAddress: If not C{None}, the address passed to C{connectTCP} or
+        C{connectSSL} for specifying the local address to bind to.
+
+    @since: 9.0
+    """
+
+    def __init__(self, reactor, contextFactory=WebClientContextFactory(),
+                 connectTimeout=None, bindAddress=None,
+                 pool=None):
+        _AgentBase.__init__(self, reactor, pool)
+        self._contextFactory = contextFactory
+        self._connectTimeout = connectTimeout
+        self._bindAddress = bindAddress
+
+
+    def _wrapContextFactory(self, host, port):
+        """
+        Create and return a normal context factory wrapped around
+        C{self._contextFactory} in such a way that C{self._contextFactory} will
+        have the host and port information passed to it.
+
+        @param host: A C{str} giving the hostname which will be connected to in
+            order to issue a request.
+
+        @param port: An C{int} giving the port number the connection will be
+            on.
+
+        @return: A context factory suitable to be passed to
+            C{reactor.connectSSL}.
+        """
+        return _WebToNormalContextFactory(self._contextFactory, host, port)
+
+
+    def _getEndpoint(self, scheme, host, port):
+        """
+        Get an endpoint for the given host and port, using a transport
+        selected based on scheme.
+
+        @param scheme: A string like C{'http'} or C{'https'} (the only two
+            supported values) to use to determine how to establish the
+            connection.
+
+        @param host: A C{str} giving the hostname which will be connected to in
+            order to issue a request.
+
+        @param port: An C{int} giving the port number the connection will be
+            on.
+
+        @return: An endpoint which can be used to connect to given address.
+        """
+        kwargs = {}
+        if self._connectTimeout is not None:
+            kwargs['timeout'] = self._connectTimeout
+        kwargs['bindAddress'] = self._bindAddress
+        if scheme == 'http':
+            return TCP4ClientEndpoint(self._reactor, host, port, **kwargs)
+        elif scheme == 'https':
+            return SSL4ClientEndpoint(self._reactor, host, port,
+                                      self._wrapContextFactory(host, port),
+                                      **kwargs)
+        else:
+            raise SchemeNotSupported("Unsupported scheme: %r" % (scheme,))
+
+
+    def request(self, method, uri, headers=None, bodyProducer=None):
+        """
+        Issue a new request.
+
+        @param method: The request method to send.
+        @type method: C{str}
+
+        @param uri: The request URI send.
+        @type uri: C{str}
+
+        @param headers: The request headers to send.  If no I{Host} header is
+            included, one will be added based on the request URI.
+        @type headers: L{Headers}
+
+        @param bodyProducer: An object which will produce the request body or,
+            if the request body is to be empty, L{None}.
+        @type bodyProducer: L{IBodyProducer} provider
+
+        @return: A L{Deferred} which fires with the result of the request (a
+            L{twisted.web.iweb.IResponse} provider), or fails if there is a
+            problem setting up a connection over which to issue the request.
+            It may also fail with L{SchemeNotSupported} if the scheme of the
+            given URI is not supported.
+        @rtype: L{Deferred}
+        """
+        parsedURI = _parse(uri)
+        try:
+            endpoint = self._getEndpoint(parsedURI.scheme, parsedURI.host,
+                                         parsedURI.port)
+        except SchemeNotSupported:
+            return defer.fail(Failure())
+        key = (parsedURI.scheme, parsedURI.host, parsedURI.port)
+        return self._requestWithEndpoint(key, endpoint, method, parsedURI,
+                                         headers, bodyProducer, parsedURI.path)
+
+
+
+class ProxyAgent(_AgentBase):
+    """
+    An HTTP agent able to cross HTTP proxies.
+
+    @ivar _proxyEndpoint: The endpoint used to connect to the proxy.
+
+    @since: 11.1
+    """
+
+    def __init__(self, endpoint, reactor=None, pool=None):
+        if reactor is None:
+            from twisted.internet import reactor
+        _AgentBase.__init__(self, reactor, pool)
+        self._proxyEndpoint = endpoint
+
+
+    def request(self, method, uri, headers=None, bodyProducer=None):
+        """
+        Issue a new request via the configured proxy.
+        """
+        # Cache *all* connections under the same key, since we are only
+        # connecting to a single destination, the proxy:
+        key = ("http-proxy", self._proxyEndpoint)
+
+        # To support proxying HTTPS via CONNECT, we will use key
+        # ("http-proxy-CONNECT", scheme, host, port), and an endpoint that
+        # wraps _proxyEndpoint with an additional callback to do the CONNECT.
+        return self._requestWithEndpoint(key, self._proxyEndpoint, method,
+                                         _parse(uri), headers, bodyProducer,
+                                         uri)
+
+
+
+class _FakeUrllib2Request(object):
+    """
+    A fake C{urllib2.Request} object for C{cookielib} to work with.
+
+    @see: U{http://docs.python.org/library/urllib2.html#request-objects}
+
+    @type uri: C{str}
+    @ivar uri: Request URI.
+
+    @type headers: L{twisted.web.http_headers.Headers}
+    @ivar headers: Request headers.
+
+    @type type: C{str}
+    @ivar type: The scheme of the URI.
+
+    @type host: C{str}
+    @ivar host: The host[:port] of the URI.
+
+    @since: 11.1
+    """
+    def __init__(self, uri):
+        self.uri = uri
+        self.headers = Headers()
+        self.type, rest = splittype(self.uri)
+        self.host, rest = splithost(rest)
+
+
+    def has_header(self, header):
+        return self.headers.hasHeader(header)
+
+
+    def add_unredirected_header(self, name, value):
+        self.headers.addRawHeader(name, value)
+
+
+    def get_full_url(self):
+        return self.uri
+
+
+    def get_header(self, name, default=None):
+        headers = self.headers.getRawHeaders(name, default)
+        if headers is not None:
+            return headers[0]
+        return None
+
+
+    def get_host(self):
+        return self.host
+
+
+    def get_type(self):
+        return self.type
+
+
+    def is_unverifiable(self):
+        # In theory this shouldn't be hardcoded.
+        return False
+
+
+
+class _FakeUrllib2Response(object):
+    """
+    A fake C{urllib2.Response} object for C{cookielib} to work with.
+
+    @type response: C{twisted.web.iweb.IResponse}
+    @ivar response: Underlying Twisted Web response.
+
+    @since: 11.1
+    """
+    def __init__(self, response):
+        self.response = response
+
+
+    def info(self):
+        class _Meta(object):
+            def getheaders(zelf, name):
+                return self.response.headers.getRawHeaders(name, [])
+        return _Meta()
+
+
+
+class CookieAgent(object):
+    """
+    L{CookieAgent} extends the basic L{Agent} to add RFC-compliant
+    handling of HTTP cookies.  Cookies are written to and extracted
+    from a C{cookielib.CookieJar} instance.
+
+    The same cookie jar instance will be used for any requests through this
+    agent, mutating it whenever a I{Set-Cookie} header appears in a response.
+
+    @type _agent: L{twisted.web.client.Agent}
+    @ivar _agent: Underlying Twisted Web agent to issue requests through.
+
+    @type cookieJar: C{cookielib.CookieJar}
+    @ivar cookieJar: Initialized cookie jar to read cookies from and store
+        cookies to.
+
+    @since: 11.1
+    """
+    def __init__(self, agent, cookieJar):
+        self._agent = agent
+        self.cookieJar = cookieJar
+
+
+    def request(self, method, uri, headers=None, bodyProducer=None):
+        """
+        Issue a new request to the wrapped L{Agent}.
+
+        Send a I{Cookie} header if a cookie for C{uri} is stored in
+        L{CookieAgent.cookieJar}. Cookies are automatically extracted and
+        stored from requests.
+
+        If a C{'cookie'} header appears in C{headers} it will override the
+        automatic cookie header obtained from the cookie jar.
+
+        @see: L{Agent.request}
+        """
+        if headers is None:
+            headers = Headers()
+        lastRequest = _FakeUrllib2Request(uri)
+        # Setting a cookie header explicitly will disable automatic request
+        # cookies.
+        if not headers.hasHeader('cookie'):
+            self.cookieJar.add_cookie_header(lastRequest)
+            cookieHeader = lastRequest.get_header('Cookie', None)
+            if cookieHeader is not None:
+                headers = headers.copy()
+                headers.addRawHeader('cookie', cookieHeader)
+
+        d = self._agent.request(method, uri, headers, bodyProducer)
+        d.addCallback(self._extractCookies, lastRequest)
+        return d
+
+
+    def _extractCookies(self, response, request):
+        """
+        Extract response cookies and store them in the cookie jar.
+
+        @type response: L{twisted.web.iweb.IResponse}
+        @param response: Twisted Web response.
+
+        @param request: A urllib2 compatible request object.
+        """
+        resp = _FakeUrllib2Response(response)
+        self.cookieJar.extract_cookies(resp, request)
+        return response
+
+
+
+class GzipDecoder(proxyForInterface(IResponse)):
+    """
+    A wrapper for a L{Response} instance which handles gzip'ed body.
+
+    @ivar original: The original L{Response} object.
+
+    @since: 11.1
+    """
+
+    def __init__(self, response):
+        self.original = response
+        self.length = UNKNOWN_LENGTH
+
+
+    def deliverBody(self, protocol):
+        """
+        Override C{deliverBody} to wrap the given C{protocol} with
+        L{_GzipProtocol}.
+        """
+        self.original.deliverBody(_GzipProtocol(protocol, self.original))
+
+
+
+class _GzipProtocol(proxyForInterface(IProtocol)):
+    """
+    A L{Protocol} implementation which wraps another one, transparently
+    decompressing received data.
+
+    @ivar _zlibDecompress: A zlib decompress object used to decompress the data
+        stream.
+
+    @ivar _response: A reference to the original response, in case of errors.
+
+    @since: 11.1
+    """
+
+    def __init__(self, protocol, response):
+        self.original = protocol
+        self._response = response
+        self._zlibDecompress = zlib.decompressobj(16 + zlib.MAX_WBITS)
+
+
+    def dataReceived(self, data):
+        """
+        Decompress C{data} with the zlib decompressor, forwarding the raw data
+        to the original protocol.
+        """
+        try:
+            rawData = self._zlibDecompress.decompress(data)
+        except zlib.error:
+            raise ResponseFailed([failure.Failure()], self._response)
+        if rawData:
+            self.original.dataReceived(rawData)
+
+
+    def connectionLost(self, reason):
+        """
+        Forward the connection lost event, flushing remaining data from the
+        decompressor if any.
+        """
+        try:
+            rawData = self._zlibDecompress.flush()
+        except zlib.error:
+            raise ResponseFailed([reason, failure.Failure()], self._response)
+        if rawData:
+            self.original.dataReceived(rawData)
+        self.original.connectionLost(reason)
+
+
+
+class ContentDecoderAgent(object):
+    """
+    An L{Agent} wrapper to handle encoded content.
+
+    It takes care of declaring the support for content in the
+    I{Accept-Encoding} header, and automatically decompresses the received data
+    if it's effectively using compression.
+
+    @param decoders: A list or tuple of (name, decoder) objects. The name
+        declares which decoding the decoder supports, and the decoder must
+        return a response object when called/instantiated. For example,
+        C{(('gzip', GzipDecoder))}. The order determines how the decoders are
+        going to be advertized to the server.
+
+    @since: 11.1
+    """
+
+    def __init__(self, agent, decoders):
+        self._agent = agent
+        self._decoders = dict(decoders)
+        self._supported = ','.join([decoder[0] for decoder in decoders])
+
+
+    def request(self, method, uri, headers=None, bodyProducer=None):
+        """
+        Send a client request which declares supporting compressed content.
+
+        @see: L{Agent.request}.
+        """
+        if headers is None:
+            headers = Headers()
+        else:
+            headers = headers.copy()
+        headers.addRawHeader('accept-encoding', self._supported)
+        deferred = self._agent.request(method, uri, headers, bodyProducer)
+        return deferred.addCallback(self._handleResponse)
+
+
+    def _handleResponse(self, response):
+        """
+        Check if the response is encoded, and wrap it to handle decompression.
+        """
+        contentEncodingHeaders = response.headers.getRawHeaders(
+            'content-encoding', [])
+        contentEncodingHeaders = ','.join(contentEncodingHeaders).split(',')
+        while contentEncodingHeaders:
+            name = contentEncodingHeaders.pop().strip()
+            decoder = self._decoders.get(name)
+            if decoder is not None:
+                response = decoder(response)
+            else:
+                # Add it back
+                contentEncodingHeaders.append(name)
+                break
+        if contentEncodingHeaders:
+            response.headers.setRawHeaders(
+                'content-encoding', [','.join(contentEncodingHeaders)])
+        else:
+            response.headers.removeHeader('content-encoding')
+        return response
+
+
+
+class RedirectAgent(object):
+    """
+    An L{Agent} wrapper which handles HTTP redirects.
+
+    The implementation is rather strict: 301 and 302 behaves like 307, not
+    redirecting automatically on methods different from C{GET} and C{HEAD}.
+
+    @param redirectLimit: The maximum number of times the agent is allowed to
+        follow redirects before failing with a L{error.InfiniteRedirection}.
+
+    @since: 11.1
+    """
+
+    def __init__(self, agent, redirectLimit=20):
+        self._agent = agent
+        self._redirectLimit = redirectLimit
+
+
+    def request(self, method, uri, headers=None, bodyProducer=None):
+        """
+        Send a client request following HTTP redirects.
+
+        @see: L{Agent.request}.
+        """
+        deferred = self._agent.request(method, uri, headers, bodyProducer)
+        return deferred.addCallback(
+            self._handleResponse, method, uri, headers, 0)
+
+
+    def _handleRedirect(self, response, method, uri, headers, redirectCount):
+        """
+        Handle a redirect response, checking the number of redirects already
+        followed, and extracting the location header fields.
+        """
+        if redirectCount >= self._redirectLimit:
+            err = error.InfiniteRedirection(
+                response.code,
+                'Infinite redirection detected',
+                location=uri)
+            raise ResponseFailed([failure.Failure(err)], response)
+        locationHeaders = response.headers.getRawHeaders('location', [])
+        if not locationHeaders:
+            err = error.RedirectWithNoLocation(
+                response.code, 'No location header field', uri)
+            raise ResponseFailed([failure.Failure(err)], response)
+        location = locationHeaders[0]
+        deferred = self._agent.request(method, location, headers)
+        return deferred.addCallback(
+            self._handleResponse, method, uri, headers, redirectCount + 1)
+
+
+    def _handleResponse(self, response, method, uri, headers, redirectCount):
+        """
+        Handle the response, making another request if it indicates a redirect.
+        """
+        if response.code in (http.MOVED_PERMANENTLY, http.FOUND,
+                             http.TEMPORARY_REDIRECT):
+            if method not in ('GET', 'HEAD'):
+                err = error.PageRedirect(response.code, location=uri)
+                raise ResponseFailed([failure.Failure(err)], response)
+            return self._handleRedirect(response, method, uri, headers,
+                                        redirectCount)
+        elif response.code == http.SEE_OTHER:
+            return self._handleRedirect(response, 'GET', uri, headers,
+                                        redirectCount)
+        return response
+
+
+
+__all__ = [
+    'PartialDownloadError', 'HTTPPageGetter', 'HTTPPageDownloader',
+    'HTTPClientFactory', 'HTTPDownloader', 'getPage', 'downloadPage',
+    'ResponseDone', 'Response', 'ResponseFailed', 'Agent', 'CookieAgent',
+    'ProxyAgent', 'ContentDecoderAgent', 'GzipDecoder', 'RedirectAgent',
+    'HTTPConnectionPool']
diff --git a/ThirdParty/Twisted/twisted/web/demo.py b/ThirdParty/Twisted/twisted/web/demo.py
new file mode 100644
index 0000000..b8475f0
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/demo.py
@@ -0,0 +1,24 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+I am a simple test resource.
+"""
+
+from twisted.web import static
+
+
+class Test(static.Data):
+    isLeaf = True
+    def __init__(self):
+        static.Data.__init__(
+            self,
+            """
+            <html>
+            <head><title>Twisted Web Demo</title><head>
+            <body>
+            Hello! This is a Twisted Web test page.
+            </body>
+            </html>
+            """,
+            "text/html")
diff --git a/ThirdParty/Twisted/twisted/web/distrib.py b/ThirdParty/Twisted/twisted/web/distrib.py
new file mode 100644
index 0000000..830675b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/distrib.py
@@ -0,0 +1,373 @@
+# -*- test-case-name: twisted.web.test.test_distrib -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Distributed web servers.
+
+This is going to have to be refactored so that argument parsing is done
+by each subprocess and not by the main web server (i.e. GET, POST etc.).
+"""
+
+# System Imports
+import types, os, copy, cStringIO
+try:
+    import pwd
+except ImportError:
+    pwd = None
+
+from xml.dom.minidom import Element, Text
+
+# Twisted Imports
+from twisted.spread import pb
+from twisted.spread.banana import SIZE_LIMIT
+from twisted.web import http, resource, server, html, static
+from twisted.web.http_headers import Headers
+from twisted.python import log
+from twisted.persisted import styles
+from twisted.internet import address, reactor
+
+
+class _ReferenceableProducerWrapper(pb.Referenceable):
+    def __init__(self, producer):
+        self.producer = producer
+
+    def remote_resumeProducing(self):
+        self.producer.resumeProducing()
+
+    def remote_pauseProducing(self):
+        self.producer.pauseProducing()
+
+    def remote_stopProducing(self):
+        self.producer.stopProducing()
+
+
+class Request(pb.RemoteCopy, server.Request):
+    """
+    A request which was received by a L{ResourceSubscription} and sent via
+    PB to a distributed node.
+    """
+    def setCopyableState(self, state):
+        """
+        Initialize this L{twisted.web.distrib.Request} based on the copied
+        state so that it closely resembles a L{twisted.web.server.Request}.
+        """
+        for k in 'host', 'client':
+            tup = state[k]
+            addrdesc = {'INET': 'TCP', 'UNIX': 'UNIX'}[tup[0]]
+            addr = {'TCP': lambda: address.IPv4Address(addrdesc,
+                                                       tup[1], tup[2]),
+                    'UNIX': lambda: address.UNIXAddress(tup[1])}[addrdesc]()
+            state[k] = addr
+        state['requestHeaders'] = Headers(dict(state['requestHeaders']))
+        pb.RemoteCopy.setCopyableState(self, state)
+        # Emulate the local request interface --
+        self.content = cStringIO.StringIO(self.content_data)
+        self.finish           = self.remote.remoteMethod('finish')
+        self.setHeader        = self.remote.remoteMethod('setHeader')
+        self.addCookie        = self.remote.remoteMethod('addCookie')
+        self.setETag          = self.remote.remoteMethod('setETag')
+        self.setResponseCode  = self.remote.remoteMethod('setResponseCode')
+        self.setLastModified  = self.remote.remoteMethod('setLastModified')
+
+        # To avoid failing if a resource tries to write a very long string
+        # all at once, this one will be handled slightly differently.
+        self._write = self.remote.remoteMethod('write')
+
+
+    def write(self, bytes):
+        """
+        Write the given bytes to the response body.
+
+        @param bytes: The bytes to write.  If this is longer than 640k, it
+            will be split up into smaller pieces.
+        """
+        start = 0
+        end = SIZE_LIMIT
+        while True:
+            self._write(bytes[start:end])
+            start += SIZE_LIMIT
+            end += SIZE_LIMIT
+            if start >= len(bytes):
+                break
+
+
+    def registerProducer(self, producer, streaming):
+        self.remote.callRemote("registerProducer",
+                               _ReferenceableProducerWrapper(producer),
+                               streaming).addErrback(self.fail)
+
+    def unregisterProducer(self):
+        self.remote.callRemote("unregisterProducer").addErrback(self.fail)
+
+    def fail(self, failure):
+        log.err(failure)
+
+
+pb.setUnjellyableForClass(server.Request, Request)
+
+class Issue:
+    def __init__(self, request):
+        self.request = request
+
+    def finished(self, result):
+        if result != server.NOT_DONE_YET:
+            assert isinstance(result, types.StringType),\
+                   "return value not a string"
+            self.request.write(result)
+            self.request.finish()
+
+    def failed(self, failure):
+        #XXX: Argh. FIXME.
+        failure = str(failure)
+        self.request.write(
+            resource.ErrorPage(http.INTERNAL_SERVER_ERROR,
+                               "Server Connection Lost",
+                               "Connection to distributed server lost:" +
+                               html.PRE(failure)).
+            render(self.request))
+        self.request.finish()
+        log.msg(failure)
+
+
+class ResourceSubscription(resource.Resource):
+    isLeaf = 1
+    waiting = 0
+    def __init__(self, host, port):
+        resource.Resource.__init__(self)
+        self.host = host
+        self.port = port
+        self.pending = []
+        self.publisher = None
+
+    def __getstate__(self):
+        """Get persistent state for this ResourceSubscription.
+        """
+        # When I unserialize,
+        state = copy.copy(self.__dict__)
+        # Publisher won't be connected...
+        state['publisher'] = None
+        # I won't be making a connection
+        state['waiting'] = 0
+        # There will be no pending requests.
+        state['pending'] = []
+        return state
+
+    def connected(self, publisher):
+        """I've connected to a publisher; I'll now send all my requests.
+        """
+        log.msg('connected to publisher')
+        publisher.broker.notifyOnDisconnect(self.booted)
+        self.publisher = publisher
+        self.waiting = 0
+        for request in self.pending:
+            self.render(request)
+        self.pending = []
+
+    def notConnected(self, msg):
+        """I can't connect to a publisher; I'll now reply to all pending
+        requests.
+        """
+        log.msg("could not connect to distributed web service: %s" % msg)
+        self.waiting = 0
+        self.publisher = None
+        for request in self.pending:
+            request.write("Unable to connect to distributed server.")
+            request.finish()
+        self.pending = []
+
+    def booted(self):
+        self.notConnected("connection dropped")
+
+    def render(self, request):
+        """Render this request, from my server.
+
+        This will always be asynchronous, and therefore return NOT_DONE_YET.
+        It spins off a request to the pb client, and either adds it to the list
+        of pending issues or requests it immediately, depending on if the
+        client is already connected.
+        """
+        if not self.publisher:
+            self.pending.append(request)
+            if not self.waiting:
+                self.waiting = 1
+                bf = pb.PBClientFactory()
+                timeout = 10
+                if self.host == "unix":
+                    reactor.connectUNIX(self.port, bf, timeout)
+                else:
+                    reactor.connectTCP(self.host, self.port, bf, timeout)
+                d = bf.getRootObject()
+                d.addCallbacks(self.connected, self.notConnected)
+
+        else:
+            i = Issue(request)
+            self.publisher.callRemote('request', request).addCallbacks(i.finished, i.failed)
+        return server.NOT_DONE_YET
+
+
+
+class ResourcePublisher(pb.Root, styles.Versioned):
+    """
+    L{ResourcePublisher} exposes a remote API which can be used to respond
+    to request.
+
+    @ivar site: The site which will be used for resource lookup.
+    @type site: L{twisted.web.server.Site}
+    """
+    def __init__(self, site):
+        self.site = site
+
+    persistenceVersion = 2
+
+    def upgradeToVersion2(self):
+        self.application.authorizer.removeIdentity("web")
+        del self.application.services[self.serviceName]
+        del self.serviceName
+        del self.application
+        del self.perspectiveName
+
+    def getPerspectiveNamed(self, name):
+        return self
+
+
+    def remote_request(self, request):
+        """
+        Look up the resource for the given request and render it.
+        """
+        res = self.site.getResourceFor(request)
+        log.msg( request )
+        result = res.render(request)
+        if result is not server.NOT_DONE_YET:
+            request.write(result)
+            request.finish()
+        return server.NOT_DONE_YET
+
+
+
+class UserDirectory(resource.Resource):
+    """
+    A resource which lists available user resources and serves them as
+    children.
+
+    @ivar _pwd: An object like L{pwd} which is used to enumerate users and
+        their home directories.
+    """
+
+    userDirName = 'public_html'
+    userSocketName = '.twistd-web-pb'
+
+    template = """
+<html>
+    <head>
+    <title>twisted.web.distrib.UserDirectory</title>
+    <style>
+
+    a
+    {
+        font-family: Lucida, Verdana, Helvetica, Arial, sans-serif;
+        color: #369;
+        text-decoration: none;
+    }
+
+    th
+    {
+        font-family: Lucida, Verdana, Helvetica, Arial, sans-serif;
+        font-weight: bold;
+        text-decoration: none;
+        text-align: left;
+    }
+
+    pre, code
+    {
+        font-family: "Courier New", Courier, monospace;
+    }
+
+    p, body, td, ol, ul, menu, blockquote, div
+    {
+        font-family: Lucida, Verdana, Helvetica, Arial, sans-serif;
+        color: #000;
+    }
+    </style>
+    </head>
+
+    <body>
+    <h1>twisted.web.distrib.UserDirectory</h1>
+
+    %(users)s
+</body>
+</html>
+"""
+
+    def __init__(self, userDatabase=None):
+        resource.Resource.__init__(self)
+        if userDatabase is None:
+            userDatabase = pwd
+        self._pwd = userDatabase
+
+
+    def _users(self):
+        """
+        Return a list of two-tuples giving links to user resources and text to
+        associate with those links.
+        """
+        users = []
+        for user in self._pwd.getpwall():
+            name, passwd, uid, gid, gecos, dir, shell = user
+            realname = gecos.split(',')[0]
+            if not realname:
+                realname = name
+            if os.path.exists(os.path.join(dir, self.userDirName)):
+                users.append((name, realname + ' (file)'))
+            twistdsock = os.path.join(dir, self.userSocketName)
+            if os.path.exists(twistdsock):
+                linkName = name + '.twistd'
+                users.append((linkName, realname + ' (twistd)'))
+        return users
+
+
+    def render_GET(self, request):
+        """
+        Render as HTML a listing of all known users with links to their
+        personal resources.
+        """
+        listing = Element('ul')
+        for link, text in self._users():
+            linkElement = Element('a')
+            linkElement.setAttribute('href', link + '/')
+            textNode = Text()
+            textNode.data = text
+            linkElement.appendChild(textNode)
+            item = Element('li')
+            item.appendChild(linkElement)
+            listing.appendChild(item)
+        return self.template % {'users': listing.toxml()}
+
+
+    def getChild(self, name, request):
+        if name == '':
+            return self
+
+        td = '.twistd'
+
+        if name[-len(td):] == td:
+            username = name[:-len(td)]
+            sub = 1
+        else:
+            username = name
+            sub = 0
+        try:
+            pw_name, pw_passwd, pw_uid, pw_gid, pw_gecos, pw_dir, pw_shell \
+                     = self._pwd.getpwnam(username)
+        except KeyError:
+            return resource.NoResource()
+        if sub:
+            twistdsock = os.path.join(pw_dir, self.userSocketName)
+            rs = ResourceSubscription('unix',twistdsock)
+            self.putChild(name, rs)
+            return rs
+        else:
+            path = os.path.join(pw_dir, self.userDirName)
+            if not os.path.exists(path):
+                return resource.NoResource()
+            return static.File(path)
diff --git a/ThirdParty/Twisted/twisted/web/domhelpers.py b/ThirdParty/Twisted/twisted/web/domhelpers.py
new file mode 100644
index 0000000..e6f1b51
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/domhelpers.py
@@ -0,0 +1,268 @@
+# -*- test-case-name: twisted.web.test.test_domhelpers -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+A library for performing interesting tasks with DOM objects.
+"""
+
+import StringIO
+
+from twisted.web import microdom
+from twisted.web.microdom import getElementsByTagName, escape, unescape
+
+
+class NodeLookupError(Exception):
+    pass
+
+
+def substitute(request, node, subs):
+    """
+    Look through the given node's children for strings, and
+    attempt to do string substitution with the given parameter.
+    """
+    for child in node.childNodes:
+        if hasattr(child, 'nodeValue') and child.nodeValue:
+            child.replaceData(0, len(child.nodeValue), child.nodeValue % subs)
+        substitute(request, child, subs)
+
+def _get(node, nodeId, nodeAttrs=('id','class','model','pattern')):
+    """
+    (internal) Get a node with the specified C{nodeId} as any of the C{class},
+    C{id} or C{pattern} attributes.
+    """
+
+    if hasattr(node, 'hasAttributes') and node.hasAttributes():
+        for nodeAttr in nodeAttrs:
+            if (str (node.getAttribute(nodeAttr)) == nodeId):
+                return node
+    if node.hasChildNodes():
+        if hasattr(node.childNodes, 'length'):
+            length = node.childNodes.length
+        else:
+            length = len(node.childNodes)
+        for childNum in range(length):
+            result = _get(node.childNodes[childNum], nodeId)
+            if result: return result
+
+def get(node, nodeId):
+    """
+    Get a node with the specified C{nodeId} as any of the C{class},
+    C{id} or C{pattern} attributes. If there is no such node, raise
+    L{NodeLookupError}.
+    """
+    result = _get(node, nodeId)
+    if result: return result
+    raise NodeLookupError, nodeId
+
+def getIfExists(node, nodeId):
+    """
+    Get a node with the specified C{nodeId} as any of the C{class},
+    C{id} or C{pattern} attributes.  If there is no such node, return
+    C{None}.
+    """
+    return _get(node, nodeId)
+
+def getAndClear(node, nodeId):
+    """Get a node with the specified C{nodeId} as any of the C{class},
+    C{id} or C{pattern} attributes. If there is no such node, raise
+    L{NodeLookupError}. Remove all child nodes before returning.
+    """
+    result = get(node, nodeId)
+    if result:
+        clearNode(result)
+    return result
+
+def clearNode(node):
+    """
+    Remove all children from the given node.
+    """
+    node.childNodes[:] = []
+
+def locateNodes(nodeList, key, value, noNesting=1):
+    """
+    Find subnodes in the given node where the given attribute
+    has the given value.
+    """
+    returnList = []
+    if not isinstance(nodeList, type([])):
+        return locateNodes(nodeList.childNodes, key, value, noNesting)
+    for childNode in nodeList:
+        if not hasattr(childNode, 'getAttribute'):
+            continue
+        if str(childNode.getAttribute(key)) == value:
+            returnList.append(childNode)
+            if noNesting:
+                continue
+        returnList.extend(locateNodes(childNode, key, value, noNesting))
+    return returnList
+
+def superSetAttribute(node, key, value):
+    if not hasattr(node, 'setAttribute'): return
+    node.setAttribute(key, value)
+    if node.hasChildNodes():
+        for child in node.childNodes:
+            superSetAttribute(child, key, value)
+
+def superPrependAttribute(node, key, value):
+    if not hasattr(node, 'setAttribute'): return
+    old = node.getAttribute(key)
+    if old:
+        node.setAttribute(key, value+'/'+old)
+    else:
+        node.setAttribute(key, value)
+    if node.hasChildNodes():
+        for child in node.childNodes:
+            superPrependAttribute(child, key, value)
+
+def superAppendAttribute(node, key, value):
+    if not hasattr(node, 'setAttribute'): return
+    old = node.getAttribute(key)
+    if old:
+        node.setAttribute(key, old + '/' + value)
+    else:
+        node.setAttribute(key, value)
+    if node.hasChildNodes():
+        for child in node.childNodes:
+            superAppendAttribute(child, key, value)
+
+def gatherTextNodes(iNode, dounescape=0, joinWith=""):
+    """Visit each child node and collect its text data, if any, into a string.
+For example::
+    >>> doc=microdom.parseString('<a>1<b>2<c>3</c>4</b></a>')
+    >>> gatherTextNodes(doc.documentElement)
+    '1234'
+With dounescape=1, also convert entities back into normal characters.
+ at return: the gathered nodes as a single string
+ at rtype: str
+"""
+    gathered=[]
+    gathered_append=gathered.append
+    slice=[iNode]
+    while len(slice)>0:
+        c=slice.pop(0)
+        if hasattr(c, 'nodeValue') and c.nodeValue is not None:
+            if dounescape:
+                val=unescape(c.nodeValue)
+            else:
+                val=c.nodeValue
+            gathered_append(val)
+        slice[:0]=c.childNodes
+    return joinWith.join(gathered)
+
+class RawText(microdom.Text):
+    """This is an evil and horrible speed hack. Basically, if you have a big
+    chunk of XML that you want to insert into the DOM, but you don't want to
+    incur the cost of parsing it, you can construct one of these and insert it
+    into the DOM. This will most certainly only work with microdom as the API
+    for converting nodes to xml is different in every DOM implementation.
+
+    This could be improved by making this class a Lazy parser, so if you
+    inserted this into the DOM and then later actually tried to mutate this
+    node, it would be parsed then.
+    """
+
+    def writexml(self, writer, indent="", addindent="", newl="", strip=0, nsprefixes=None, namespace=None):
+        writer.write("%s%s%s" % (indent, self.data, newl))
+
+def findNodes(parent, matcher, accum=None):
+    if accum is None:
+        accum = []
+    if not parent.hasChildNodes():
+        return accum
+    for child in parent.childNodes:
+        # print child, child.nodeType, child.nodeName
+        if matcher(child):
+            accum.append(child)
+        findNodes(child, matcher, accum)
+    return accum
+
+
+def findNodesShallowOnMatch(parent, matcher, recurseMatcher, accum=None):
+    if accum is None:
+        accum = []
+    if not parent.hasChildNodes():
+        return accum
+    for child in parent.childNodes:
+        # print child, child.nodeType, child.nodeName
+        if matcher(child):
+            accum.append(child)
+        if recurseMatcher(child):
+            findNodesShallowOnMatch(child, matcher, recurseMatcher, accum)
+    return accum
+
+def findNodesShallow(parent, matcher, accum=None):
+    if accum is None:
+        accum = []
+    if not parent.hasChildNodes():
+        return accum
+    for child in parent.childNodes:
+        if matcher(child):
+            accum.append(child)
+        else:
+            findNodes(child, matcher, accum)
+    return accum
+
+
+def findElementsWithAttributeShallow(parent, attribute):
+    """
+    Return an iterable of the elements which are direct children of C{parent}
+    and which have the C{attribute} attribute.
+    """
+    return findNodesShallow(parent,
+        lambda n: getattr(n, 'tagName', None) is not None and
+            n.hasAttribute(attribute))
+
+
+def findElements(parent, matcher):
+    """
+    Return an iterable of the elements which are children of C{parent} for
+    which the predicate C{matcher} returns true.
+    """
+    return findNodes(
+        parent,
+        lambda n, matcher=matcher: getattr(n, 'tagName', None) is not None and
+                                   matcher(n))
+
+def findElementsWithAttribute(parent, attribute, value=None):
+    if value:
+        return findElements(
+            parent,
+            lambda n, attribute=attribute, value=value:
+              n.hasAttribute(attribute) and n.getAttribute(attribute) == value)
+    else:
+        return findElements(
+            parent,
+            lambda n, attribute=attribute: n.hasAttribute(attribute))
+
+
+def findNodesNamed(parent, name):
+    return findNodes(parent, lambda n, name=name: n.nodeName == name)
+
+
+def writeNodeData(node, oldio):
+    for subnode in node.childNodes:
+        if hasattr(subnode, 'data'):
+            oldio.write(subnode.data)
+        else:
+            writeNodeData(subnode, oldio)
+
+
+def getNodeText(node):
+    oldio = StringIO.StringIO()
+    writeNodeData(node, oldio)
+    return oldio.getvalue()
+
+
+def getParents(node):
+    l = []
+    while node:
+        l.append(node)
+        node = node.parentNode
+    return l
+
+def namedChildren(parent, nodeName):
+    """namedChildren(parent, nodeName) -> children (not descendants) of parent
+    that have tagName == nodeName
+    """
+    return [n for n in parent.childNodes if getattr(n, 'tagName', '')==nodeName]
diff --git a/ThirdParty/Twisted/twisted/web/error.py b/ThirdParty/Twisted/twisted/web/error.py
new file mode 100644
index 0000000..dfdca9b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/error.py
@@ -0,0 +1,380 @@
+# -*- test-case-name: twisted.web.test.test_error -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Exception definitions for L{twisted.web}.
+"""
+
+from __future__ import division, absolute_import
+
+__all__ = [
+    'Error', 'PageRedirect', 'InfiniteRedirection', 'RenderError',
+    'MissingRenderMethod', 'MissingTemplateLoader', 'UnexposedMethodError',
+    'UnfilledSlot', 'UnsupportedType', 'FlattenerError',
+    'RedirectWithNoLocation',
+    ]
+
+from collections import Sequence
+
+from twisted.web._responses import RESPONSES
+
+
+class Error(Exception):
+    """
+    A basic HTTP error.
+
+    @type status: C{str}
+    @ivar status: Refers to an HTTP status code, for example C{http.NOT_FOUND}.
+
+    @type message: C{str}
+    @param message: A short error message, for example "NOT FOUND".
+
+    @type response: C{bytes}
+    @ivar response: A complete HTML document for an error page.
+    """
+    def __init__(self, code, message=None, response=None):
+        """
+        Initializes a basic exception.
+
+        @type code: C{str}
+        @param code: Refers to an HTTP status code, for example
+            C{http.NOT_FOUND}. If no C{message} is given, C{code} is mapped to a
+            descriptive bytestring that is used instead.
+
+        @type message: C{str}
+        @param message: A short error message, for example "NOT FOUND".
+
+        @type response: C{bytes}
+        @param response: A complete HTML document for an error page.
+        """
+        if not message:
+            try:
+                message = RESPONSES.get(int(code))
+            except ValueError:
+                # If code wasn't a stringified int, can't map the
+                # status code to a descriptive string so keep message
+                # unchanged.
+                pass
+
+        Exception.__init__(self, code, message, response)
+        self.status = code
+        self.message = message
+        self.response = response
+
+
+    def __str__(self):
+        return '%s %s' % (self.status, self.message)
+
+
+
+class PageRedirect(Error):
+    """
+    A request resulted in an HTTP redirect.
+
+    @type location: C{str}
+    @ivar location: The location of the redirect which was not followed.
+    """
+    def __init__(self, code, message=None, response=None, location=None):
+        """
+        Initializes a page redirect exception.
+
+        @type code: C{str}
+        @param code: Refers to an HTTP status code, for example
+            C{http.NOT_FOUND}. If no C{message} is given, C{code} is mapped to a
+            descriptive string that is used instead.
+
+        @type message: C{str}
+        @param message: A short error message, for example "NOT FOUND".
+
+        @type response: C{str}
+        @param response: A complete HTML document for an error page.
+
+        @type location: C{str}
+        @param location: The location response-header field value. It is an
+            absolute URI used to redirect the receiver to a location other than
+            the Request-URI so the request can be completed.
+        """
+        if not message:
+            try:
+                message = RESPONSES.get(int(code))
+            except ValueError:
+                # If code wasn't a stringified int, can't map the
+                # status code to a descriptive string so keep message
+                # unchanged.
+                pass
+
+        if location and message:
+            message = "%s to %s" % (message, location)
+
+        Error.__init__(self, code, message, response)
+        self.location = location
+
+
+
+class InfiniteRedirection(Error):
+    """
+    HTTP redirection is occurring endlessly.
+
+    @type location: C{str}
+    @ivar location: The first URL in the series of redirections which was
+        not followed.
+    """
+    def __init__(self, code, message=None, response=None, location=None):
+        """
+        Initializes an infinite redirection exception.
+
+        @type code: C{str}
+        @param code: Refers to an HTTP status code, for example
+            C{http.NOT_FOUND}. If no C{message} is given, C{code} is mapped to a
+            descriptive string that is used instead.
+
+        @type message: C{str}
+        @param message: A short error message, for example "NOT FOUND".
+
+        @type response: C{str}
+        @param response: A complete HTML document for an error page.
+
+        @type location: C{str}
+        @param location: The location response-header field value. It is an
+            absolute URI used to redirect the receiver to a location other than
+            the Request-URI so the request can be completed.
+        """
+        if not message:
+            try:
+                message = RESPONSES.get(int(code))
+            except ValueError:
+                # If code wasn't a stringified int, can't map the
+                # status code to a descriptive string so keep message
+                # unchanged.
+                pass
+
+        if location and message:
+            message = "%s to %s" % (message, location)
+
+        Error.__init__(self, code, message, response)
+        self.location = location
+
+
+
+class RedirectWithNoLocation(Error):
+    """
+    Exception passed to L{ResponseFailed} if we got a redirect without a
+    C{Location} header field.
+
+    @since: 11.1
+    """
+
+    def __init__(self, code, message, uri):
+        """
+        Initializes a page redirect exception when no location is given.
+
+        @type code: C{str}
+        @param code: Refers to an HTTP status code, for example
+            C{http.NOT_FOUND}. If no C{message} is given, C{code} is mapped to
+            a descriptive string that is used instead.
+
+        @type message: C{str}
+        @param message: A short error message.
+
+        @type uri: C{str}
+        @param uri: The URI which failed to give a proper location header
+            field.
+        """
+        message = "%s to %s" % (message, uri)
+
+        Error.__init__(self, code, message)
+        self.uri = uri
+
+
+
+class UnsupportedMethod(Exception):
+    """
+    Raised by a resource when faced with a strange request method.
+
+    RFC 2616 (HTTP 1.1) gives us two choices when faced with this situtation:
+    If the type of request is known to us, but not allowed for the requested
+    resource, respond with NOT_ALLOWED.  Otherwise, if the request is something
+    we don't know how to deal with in any case, respond with NOT_IMPLEMENTED.
+
+    When this exception is raised by a Resource's render method, the server
+    will make the appropriate response.
+
+    This exception's first argument MUST be a sequence of the methods the
+    resource *does* support.
+    """
+
+    allowedMethods = ()
+
+    def __init__(self, allowedMethods, *args):
+        Exception.__init__(self, allowedMethods, *args)
+        self.allowedMethods = allowedMethods
+
+        if not isinstance(allowedMethods, Sequence):
+            raise TypeError(
+                "First argument must be a sequence of supported methods, "
+                "but my first argument is not a sequence.")
+
+
+
+class SchemeNotSupported(Exception):
+    """
+    The scheme of a URI was not one of the supported values.
+    """
+
+
+
+class RenderError(Exception):
+    """
+    Base exception class for all errors which can occur during template
+    rendering.
+    """
+
+
+
+class MissingRenderMethod(RenderError):
+    """
+    Tried to use a render method which does not exist.
+
+    @ivar element: The element which did not have the render method.
+    @ivar renderName: The name of the renderer which could not be found.
+    """
+    def __init__(self, element, renderName):
+        RenderError.__init__(self, element, renderName)
+        self.element = element
+        self.renderName = renderName
+
+
+    def __repr__(self):
+        return '%r: %r had no render method named %r' % (
+            self.__class__.__name__, self.element, self.renderName)
+
+
+
+class MissingTemplateLoader(RenderError):
+    """
+    L{MissingTemplateLoader} is raised when trying to render an Element without
+    a template loader, i.e. a C{loader} attribute.
+
+    @ivar element: The Element which did not have a document factory.
+    """
+    def __init__(self, element):
+        RenderError.__init__(self, element)
+        self.element = element
+
+
+    def __repr__(self):
+        return '%r: %r had no loader' % (self.__class__.__name__,
+                                         self.element)
+
+
+
+class UnexposedMethodError(Exception):
+    """
+    Raised on any attempt to get a method which has not been exposed.
+    """
+
+
+
+class UnfilledSlot(Exception):
+    """
+    During flattening, a slot with no associated data was encountered.
+    """
+
+
+
+class UnsupportedType(Exception):
+    """
+    During flattening, an object of a type which cannot be flattened was
+    encountered.
+    """
+
+
+
+class FlattenerError(Exception):
+    """
+    An error occurred while flattening an object.
+
+    @ivar _roots: A list of the objects on the flattener's stack at the time
+        the unflattenable object was encountered.  The first element is least
+        deeply nested object and the last element is the most deeply nested.
+    """
+    def __init__(self, exception, roots, traceback):
+        self._exception = exception
+        self._roots = roots
+        self._traceback = traceback
+        Exception.__init__(self, exception, roots, traceback)
+
+
+    def _formatRoot(self, obj):
+        """
+        Convert an object from C{self._roots} to a string suitable for
+        inclusion in a render-traceback (like a normal Python traceback, but
+        can include "frame" source locations which are not in Python source
+        files).
+
+        @param obj: Any object which can be a render step I{root}.
+            Typically, L{Tag}s, strings, and other simple Python types.
+
+        @return: A string representation of C{obj}.
+        @rtype: L{str}
+        """
+        # There's a circular dependency between this class and 'Tag', although
+        # only for an isinstance() check.
+        from twisted.web.template import Tag
+        if isinstance(obj, (str, unicode)):
+            # It's somewhat unlikely that there will ever be a str in the roots
+            # list.  However, something like a MemoryError during a str.replace
+            # call (eg, replacing " with ") could possibly cause this.
+            # Likewise, UTF-8 encoding a unicode string to a byte string might
+            # fail like this.
+            if len(obj) > 40:
+                if isinstance(obj, str):
+                    prefix = 1
+                else:
+                    prefix = 2
+                return repr(obj[:20])[:-1] + '<...>' + repr(obj[-20:])[prefix:]
+            else:
+                return repr(obj)
+        elif isinstance(obj, Tag):
+            if obj.filename is None:
+                return 'Tag <' + obj.tagName + '>'
+            else:
+                return "File \"%s\", line %d, column %d, in \"%s\"" % (
+                    obj.filename, obj.lineNumber,
+                    obj.columnNumber, obj.tagName)
+        else:
+            return repr(obj)
+
+
+    def __repr__(self):
+        """
+        Present a string representation which includes a template traceback, so
+        we can tell where this error occurred in the template, as well as in
+        Python.
+        """
+        # Avoid importing things unnecessarily until we actually need them;
+        # since this is an 'error' module we should be extra paranoid about
+        # that.
+        from traceback import format_list
+        if self._roots:
+            roots = '  ' + '\n  '.join([
+                    self._formatRoot(r) for r in self._roots]) + '\n'
+        else:
+            roots = ''
+        if self._traceback:
+            traceback = '\n'.join([
+                    line
+                    for entry in format_list(self._traceback)
+                    for line in entry.splitlines()]) + '\n'
+        else:
+            traceback = ''
+        return (
+            'Exception while flattening:\n' +
+            roots + traceback +
+            self._exception.__class__.__name__ + ': ' +
+            str(self._exception) + '\n')
+
+
+    def __str__(self):
+        return repr(self)
diff --git a/ThirdParty/Twisted/twisted/web/failure.xhtml b/ThirdParty/Twisted/twisted/web/failure.xhtml
new file mode 100644
index 0000000..1e88a3a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/failure.xhtml
@@ -0,0 +1,71 @@
+<div xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">
+  <style type="text/css">
+    div.error {
+      color: red;
+      font-family: Verdana, Arial, helvetica, sans-serif;
+      font-weight: bold;
+    }
+
+    div {
+      font-family: Verdana, Arial, helvetica, sans-serif;
+    }
+
+    div.stackTrace {
+    }
+
+    div.frame {
+      padding: 1em;
+      background: white;
+      border-bottom: thin black dashed;
+    }
+
+    div.frame:first-child {
+      padding: 1em;
+      background: white;
+      border-top: thin black dashed;
+      border-bottom: thin black dashed;
+    }
+
+    div.location {
+    }
+
+    span.function {
+      font-weight: bold;
+      font-family: "Courier New", courier, monospace;
+    }
+
+    div.snippet {
+      margin-bottom: 0.5em;
+      margin-left: 1em;
+      background: #FFFFDD;
+    }
+
+    div.snippetHighlightLine {
+      color: red;
+    }
+
+    span.code {
+      font-family: "Courier New", courier, monospace;
+    }
+  </style>
+
+  <div class="error">
+    <span t:render="type" />: <span t:render="value" />
+  </div>
+  <div class="stackTrace" t:render="traceback">
+    <div class="frame" t:render="frames">
+      <div class="location">
+        <span t:render="filename" />:<span t:render="lineNumber" /> in <span class="function" t:render="function" />
+      </div>
+      <div class="snippet" t:render="source">
+        <div t:render="sourceLines">
+          <span class="lineno" t:render="lineNumber" />
+          <code class="code" t:render="sourceLine" />
+        </div>
+      </div>
+    </div>
+  </div>
+  <div class="error">
+    <span t:render="type" />: <span t:render="value" />
+  </div>
+</div>
diff --git a/ThirdParty/Twisted/twisted/web/guard.py b/ThirdParty/Twisted/twisted/web/guard.py
new file mode 100644
index 0000000..f3bb4d7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/guard.py
@@ -0,0 +1,17 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Resource traversal integration with L{twisted.cred} to allow for
+authentication and authorization of HTTP requests.
+"""
+
+# Expose HTTP authentication classes here.
+from twisted.web._auth.wrapper import HTTPAuthSessionWrapper
+from twisted.web._auth.basic import BasicCredentialFactory
+from twisted.web._auth.digest import DigestCredentialFactory
+
+__all__ = [
+    "HTTPAuthSessionWrapper",
+
+    "BasicCredentialFactory", "DigestCredentialFactory"]
diff --git a/ThirdParty/Twisted/twisted/web/html.py b/ThirdParty/Twisted/twisted/web/html.py
new file mode 100644
index 0000000..de8db11
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/html.py
@@ -0,0 +1,46 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""I hold HTML generation helpers.
+"""
+
+from cgi import escape
+
+from twisted.python import log
+from twisted.python.compat import NativeStringIO as StringIO
+
+
+def PRE(text):
+    "Wrap <pre> tags around some text and HTML-escape it."
+    return "<pre>"+escape(text)+"</pre>"
+
+def UL(lst):
+    io = StringIO()
+    io.write("<ul>\n")
+    for el in lst:
+        io.write("<li> %s</li>\n" % el)
+    io.write("</ul>")
+    return io.getvalue()
+
+def linkList(lst):
+    io = StringIO()
+    io.write("<ul>\n")
+    for hr, el in lst:
+        io.write('<li> <a href="%s">%s</a></li>\n' % (hr, el))
+    io.write("</ul>")
+    return io.getvalue()
+
+def output(func, *args, **kw):
+    """output(func, *args, **kw) -> html string
+    Either return the result of a function (which presumably returns an
+    HTML-legal string) or a sparse HTMLized error message and a message
+    in the server log.
+    """
+    try:
+        return func(*args, **kw)
+    except:
+        log.msg("Error calling %r:" % (func,))
+        log.err()
+        return PRE("An error occurred.")
diff --git a/ThirdParty/Twisted/twisted/web/http.py b/ThirdParty/Twisted/twisted/web/http.py
new file mode 100644
index 0000000..3a4fe2b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/http.py
@@ -0,0 +1,1889 @@
+# -*- test-case-name: twisted.web.test.test_http -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+HyperText Transfer Protocol implementation.
+
+This is the basic server-side protocol implementation used by the Twisted
+Web server.  It can parse HTTP 1.0 requests and supports many HTTP 1.1
+features as well.  Additionally, some functionality implemented here is
+also useful for HTTP clients (such as the chunked encoding parser).
+
+ at var CACHED: A marker value to be returned from cache-related request methods to
+    indicate to the caller that a cached response will be usable and no response
+    body should be generated.
+
+ at var NOT_MODIFIED: An HTTP response code indicating that a requested
+    pre-condition (for example, the condition represented by an
+    I{If-Modified-Since} header is present in the request) has succeeded.  This
+    indicates a response body cached by the client can be used.
+
+ at var PRECONDITION_FAILED: An HTTP response code indicating that a requested
+    pre-condition (for example, the condition represented by an I{If-None-Match}
+    header is present in the request) has failed.  This should typically
+    indicate that the server has not taken the requested action.
+"""
+
+from __future__ import division, absolute_import
+
+__all__ = [
+    'SWITCHING', 'OK', 'CREATED', 'ACCEPTED', 'NON_AUTHORITATIVE_INFORMATION',
+    'NO_CONTENT', 'RESET_CONTENT', 'PARTIAL_CONTENT', 'MULTI_STATUS',
+
+    'MULTIPLE_CHOICE', 'MOVED_PERMANENTLY', 'FOUND', 'SEE_OTHER',
+    'NOT_MODIFIED', 'USE_PROXY', 'TEMPORARY_REDIRECT',
+
+    'BAD_REQUEST', 'UNAUTHORIZED', 'PAYMENT_REQUIRED', 'FORBIDDEN', 'NOT_FOUND',
+    'NOT_ALLOWED', 'NOT_ACCEPTABLE', 'PROXY_AUTH_REQUIRED', 'REQUEST_TIMEOUT',
+    'CONFLICT', 'GONE', 'LENGTH_REQUIRED', 'PRECONDITION_FAILED',
+    'REQUEST_ENTITY_TOO_LARGE', 'REQUEST_URI_TOO_LONG',
+    'UNSUPPORTED_MEDIA_TYPE', 'REQUESTED_RANGE_NOT_SATISFIABLE',
+    'EXPECTATION_FAILED',
+
+    'INTERNAL_SERVER_ERROR', 'NOT_IMPLEMENTED', 'BAD_GATEWAY',
+    'SERVICE_UNAVAILABLE', 'GATEWAY_TIMEOUT', 'HTTP_VERSION_NOT_SUPPORTED',
+    'INSUFFICIENT_STORAGE_SPACE', 'NOT_EXTENDED',
+
+    'RESPONSES', 'CACHED',
+
+    'urlparse', 'parse_qs', 'datetimeToString', 'datetimeToLogString', 'timegm',
+    'stringToDatetime', 'toChunk', 'fromChunk', 'parseContentRange',
+
+    'StringTransport', 'HTTPClient', 'NO_BODY_CODES', 'Request',
+    'PotentialDataLoss', 'HTTPChannel', 'HTTPFactory',
+    ]
+
+
+# system imports
+import tempfile
+import base64, binascii
+import cgi
+import socket
+import math
+import time
+import calendar
+import warnings
+import os
+from io import BytesIO as StringIO
+
+try:
+    from urlparse import (
+        ParseResult as ParseResultBytes, urlparse as _urlparse)
+    from urllib import unquote
+    from cgi import parse_header as _parseHeader
+except ImportError:
+    from urllib.parse import (
+        ParseResultBytes, urlparse as _urlparse, unquote as _unquote)
+    from io import TextIOWrapper
+
+    def unquote(string, *args, **kwargs):
+        return _unquote(string.decode('charmap'), *args, **kwargs).encode('charmap')
+
+    def _parseHeader(line):
+        key, pdict = cgi.parse_header(line.decode('charmap'))
+        return (key.encode('charmap'), pdict)
+
+
+from zope.interface import implementer
+
+# twisted imports
+from twisted.python.compat import (_PY3, unicode, intToBytes, networkString,
+                                   nativeString)
+from twisted.internet import interfaces, reactor, protocol, address
+from twisted.internet.defer import Deferred
+from twisted.protocols import policies, basic
+from twisted.python import log
+
+from twisted.web.http_headers import _DictHeaders, Headers
+
+from twisted.web._responses import (
+    SWITCHING,
+
+    OK, CREATED, ACCEPTED, NON_AUTHORITATIVE_INFORMATION, NO_CONTENT,
+    RESET_CONTENT, PARTIAL_CONTENT, MULTI_STATUS,
+
+    MULTIPLE_CHOICE, MOVED_PERMANENTLY, FOUND, SEE_OTHER, NOT_MODIFIED,
+    USE_PROXY, TEMPORARY_REDIRECT,
+
+    BAD_REQUEST, UNAUTHORIZED, PAYMENT_REQUIRED, FORBIDDEN, NOT_FOUND,
+    NOT_ALLOWED, NOT_ACCEPTABLE, PROXY_AUTH_REQUIRED, REQUEST_TIMEOUT,
+    CONFLICT, GONE, LENGTH_REQUIRED, PRECONDITION_FAILED,
+    REQUEST_ENTITY_TOO_LARGE, REQUEST_URI_TOO_LONG, UNSUPPORTED_MEDIA_TYPE,
+    REQUESTED_RANGE_NOT_SATISFIABLE, EXPECTATION_FAILED,
+
+    INTERNAL_SERVER_ERROR, NOT_IMPLEMENTED, BAD_GATEWAY, SERVICE_UNAVAILABLE,
+    GATEWAY_TIMEOUT, HTTP_VERSION_NOT_SUPPORTED, INSUFFICIENT_STORAGE_SPACE,
+    NOT_EXTENDED,
+
+    RESPONSES)
+
+if _PY3:
+    _intTypes = int
+else:
+    _intTypes = (int, long)
+
+protocol_version = "HTTP/1.1"
+
+CACHED = """Magic constant returned by http.Request methods to set cache
+validation headers when the request is conditional and the value fails
+the condition."""
+
+# backwards compatability
+responses = RESPONSES
+
+
+# datetime parsing and formatting
+weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
+monthname = [None,
+             'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+             'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
+weekdayname_lower = [name.lower() for name in weekdayname]
+monthname_lower = [name and name.lower() for name in monthname]
+
+def urlparse(url):
+    """
+    Parse an URL into six components.
+
+    This is similar to C{urlparse.urlparse}, but rejects C{unicode} input
+    and always produces C{bytes} output.
+
+    @type url: C{bytes}
+
+    @raise TypeError: The given url was a C{unicode} string instead of a
+        C{bytes}.
+
+    @return: The scheme, net location, path, params, query string, and fragment
+        of the URL - all as C{bytes}.
+    @rtype: C{ParseResultBytes}
+    """
+    if isinstance(url, unicode):
+        raise TypeError("url must be bytes, not unicode")
+    scheme, netloc, path, params, query, fragment = _urlparse(url)
+    if isinstance(scheme, unicode):
+        scheme = scheme.encode('ascii')
+        netloc = netloc.encode('ascii')
+        path = path.encode('ascii')
+        query = query.encode('ascii')
+        fragment = fragment.encode('ascii')
+    return ParseResultBytes(scheme, netloc, path, params, query, fragment)
+
+
+
+def parse_qs(qs, keep_blank_values=0, strict_parsing=0):
+    """
+    Like C{cgi.parse_qs}, but with support for parsing byte strings on Python 3.
+
+    @type qs: C{bytes}
+    """
+    d = {}
+    items = [s2 for s1 in qs.split(b"&") for s2 in s1.split(b";")]
+    for item in items:
+        try:
+            k, v = item.split(b"=", 1)
+        except ValueError:
+            if strict_parsing:
+                raise
+            continue
+        if v or keep_blank_values:
+            k = unquote(k.replace(b"+", b" "))
+            v = unquote(v.replace(b"+", b" "))
+            if k in d:
+                d[k].append(v)
+            else:
+                d[k] = [v]
+    return d
+
+
+
+def datetimeToString(msSinceEpoch=None):
+    """
+    Convert seconds since epoch to HTTP datetime string.
+
+    @rtype: C{bytes}
+    """
+    if msSinceEpoch == None:
+        msSinceEpoch = time.time()
+    year, month, day, hh, mm, ss, wd, y, z = time.gmtime(msSinceEpoch)
+    s = networkString("%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
+            weekdayname[wd],
+            day, monthname[month], year,
+            hh, mm, ss))
+    return s
+
+
+
+def datetimeToLogString(msSinceEpoch=None):
+    """
+    Convert seconds since epoch to log datetime string.
+
+    @rtype: C{str}
+    """
+    if msSinceEpoch == None:
+        msSinceEpoch = time.time()
+    year, month, day, hh, mm, ss, wd, y, z = time.gmtime(msSinceEpoch)
+    s = "[%02d/%3s/%4d:%02d:%02d:%02d +0000]" % (
+        day, monthname[month], year,
+        hh, mm, ss)
+    return s
+
+
+
+def timegm(year, month, day, hour, minute, second):
+    """
+    Convert time tuple in GMT to seconds since epoch, GMT
+    """
+    EPOCH = 1970
+    if year < EPOCH:
+        raise ValueError("Years prior to %d not supported" % (EPOCH,))
+    assert 1 <= month <= 12
+    days = 365*(year-EPOCH) + calendar.leapdays(EPOCH, year)
+    for i in range(1, month):
+        days = days + calendar.mdays[i]
+    if month > 2 and calendar.isleap(year):
+        days = days + 1
+    days = days + day - 1
+    hours = days*24 + hour
+    minutes = hours*60 + minute
+    seconds = minutes*60 + second
+    return seconds
+
+
+
+def stringToDatetime(dateString):
+    """
+    Convert an HTTP date string (one of three formats) to seconds since epoch.
+
+    @type dateString: C{bytes}
+    """
+    parts = nativeString(dateString).split()
+
+    if not parts[0][0:3].lower() in weekdayname_lower:
+        # Weekday is stupid. Might have been omitted.
+        try:
+            return stringToDatetime(b"Sun, " + dateString)
+        except ValueError:
+            # Guess not.
+            pass
+
+    partlen = len(parts)
+    if (partlen == 5 or partlen == 6) and parts[1].isdigit():
+        # 1st date format: Sun, 06 Nov 1994 08:49:37 GMT
+        # (Note: "GMT" is literal, not a variable timezone)
+        # (also handles without "GMT")
+        # This is the normal format
+        day = parts[1]
+        month = parts[2]
+        year = parts[3]
+        time = parts[4]
+    elif (partlen == 3 or partlen == 4) and parts[1].find('-') != -1:
+        # 2nd date format: Sunday, 06-Nov-94 08:49:37 GMT
+        # (Note: "GMT" is literal, not a variable timezone)
+        # (also handles without without "GMT")
+        # Two digit year, yucko.
+        day, month, year = parts[1].split('-')
+        time = parts[2]
+        year=int(year)
+        if year < 69:
+            year = year + 2000
+        elif year < 100:
+            year = year + 1900
+    elif len(parts) == 5:
+        # 3rd date format: Sun Nov  6 08:49:37 1994
+        # ANSI C asctime() format.
+        day = parts[2]
+        month = parts[1]
+        year = parts[4]
+        time = parts[3]
+    else:
+        raise ValueError("Unknown datetime format %r" % dateString)
+
+    day = int(day)
+    month = int(monthname_lower.index(month.lower()))
+    year = int(year)
+    hour, min, sec = map(int, time.split(':'))
+    return int(timegm(year, month, day, hour, min, sec))
+
+
+
+def toChunk(data):
+    """
+    Convert string to a chunk.
+
+    @type data: C{bytes}
+
+    @returns: a tuple of C{bytes} representing the chunked encoding of data
+    """
+    return (networkString('%x' % (len(data),)), b"\r\n", data, b"\r\n")
+
+
+
+def fromChunk(data):
+    """
+    Convert chunk to string.
+
+    @type data: C{bytes}
+
+    @return: tuple of (result, remaining) - both C{bytes}.
+
+    @raise ValueError: If the given data is not a correctly formatted chunked
+        byte string.
+    """
+    prefix, rest = data.split(b'\r\n', 1)
+    length = int(prefix, 16)
+    if length < 0:
+        raise ValueError("Chunk length must be >= 0, not %d" % (length,))
+    if rest[length:length + 2] != b'\r\n':
+        raise ValueError("chunk must end with CRLF")
+    return rest[:length], rest[length + 2:]
+
+
+
+def parseContentRange(header):
+    """
+    Parse a content-range header into (start, end, realLength).
+
+    realLength might be None if real length is not known ('*').
+    """
+    kind, other = header.strip().split()
+    if kind.lower() != "bytes":
+        raise ValueError("a range of type %r is not supported")
+    startend, realLength = other.split("/")
+    start, end = map(int, startend.split("-"))
+    if realLength == "*":
+        realLength = None
+    else:
+        realLength = int(realLength)
+    return (start, end, realLength)
+
+
+
+class StringTransport:
+    """
+    I am a StringIO wrapper that conforms for the transport API. I support
+    the `writeSequence' method.
+    """
+    def __init__(self):
+        self.s = StringIO()
+    def writeSequence(self, seq):
+        self.s.write(b''.join(seq))
+    def __getattr__(self, attr):
+        return getattr(self.__dict__['s'], attr)
+
+
+
+class HTTPClient(basic.LineReceiver):
+    """
+    A client for HTTP 1.0.
+
+    Notes:
+    You probably want to send a 'Host' header with the name of the site you're
+    connecting to, in order to not break name based virtual hosting.
+
+    @ivar length: The length of the request body in bytes.
+    @type length: C{int}
+
+    @ivar firstLine: Are we waiting for the first header line?
+    @type firstLine: C{bool}
+
+    @ivar __buffer: The buffer that stores the response to the HTTP request.
+    @type __buffer: A C{StringIO} object.
+
+    @ivar _header: Part or all of an HTTP request header.
+    @type _header: C{bytes}
+    """
+    length = None
+    firstLine = True
+    __buffer = None
+    _header = b""
+
+    def sendCommand(self, command, path):
+        self.transport.writeSequence([command, b' ', path, b' HTTP/1.0\r\n'])
+
+    def sendHeader(self, name, value):
+        if not isinstance(value, bytes):
+            # XXX Deprecate this case
+            value = networkString(str(value))
+        self.transport.writeSequence([name, b': ', value, b'\r\n'])
+
+    def endHeaders(self):
+        self.transport.write(b'\r\n')
+
+
+    def extractHeader(self, header):
+        """
+        Given a complete HTTP header, extract the field name and value and
+        process the header.
+
+        @param header: a complete HTTP request header of the form
+            'field-name: value'.
+        @type header: C{bytes}
+        """
+        key, val = header.split(b':', 1)
+        val = val.lstrip()
+        self.handleHeader(key, val)
+        if key.lower() == b'content-length':
+            self.length = int(val)
+
+
+    def lineReceived(self, line):
+        """
+        Parse the status line and headers for an HTTP request.
+
+        @param line: Part of an HTTP request header. Request bodies are parsed
+            in L{rawDataReceived}.
+        @type line: C{bytes}
+        """
+        if self.firstLine:
+            self.firstLine = False
+            l = line.split(None, 2)
+            version = l[0]
+            status = l[1]
+            try:
+                message = l[2]
+            except IndexError:
+                # sometimes there is no message
+                message = b""
+            self.handleStatus(version, status, message)
+            return
+        if not line:
+            if self._header != b"":
+                # Only extract headers if there are any
+                self.extractHeader(self._header)
+            self.__buffer = StringIO()
+            self.handleEndHeaders()
+            self.setRawMode()
+            return
+
+        if line.startswith(b'\t') or line.startswith(b' '):
+            # This line is part of a multiline header. According to RFC 822, in
+            # "unfolding" multiline headers you do not strip the leading
+            # whitespace on the continuing line.
+            self._header = self._header + line
+        elif self._header:
+            # This line starts a new header, so process the previous one.
+            self.extractHeader(self._header)
+            self._header = line
+        else: # First header
+            self._header = line
+
+
+    def connectionLost(self, reason):
+        self.handleResponseEnd()
+
+    def handleResponseEnd(self):
+        """
+        The response has been completely received.
+
+        This callback may be invoked more than once per request.
+        """
+        if self.__buffer is not None:
+            b = self.__buffer.getvalue()
+            self.__buffer = None
+            self.handleResponse(b)
+
+    def handleResponsePart(self, data):
+        self.__buffer.write(data)
+
+    def connectionMade(self):
+        pass
+
+    def handleStatus(self, version, status, message):
+        """
+        Called when the status-line is received.
+
+        @param version: e.g. 'HTTP/1.0'
+        @param status: e.g. '200'
+        @type status: C{bytes}
+        @param message: e.g. 'OK'
+        """
+
+    def handleHeader(self, key, val):
+        """
+        Called every time a header is received.
+        """
+
+    def handleEndHeaders(self):
+        """
+        Called when all headers have been received.
+        """
+
+
+    def rawDataReceived(self, data):
+        if self.length is not None:
+            data, rest = data[:self.length], data[self.length:]
+            self.length -= len(data)
+        else:
+            rest = b''
+        self.handleResponsePart(data)
+        if self.length == 0:
+            self.handleResponseEnd()
+            self.setLineMode(rest)
+
+
+
+# response codes that must have empty bodies
+NO_BODY_CODES = (204, 304)
+
+
+ at implementer(interfaces.IConsumer)
+class Request:
+    """
+    A HTTP request.
+
+    Subclasses should override the process() method to determine how
+    the request will be processed.
+
+    @ivar method: The HTTP method that was used.
+    @ivar uri: The full URI that was requested (includes arguments).
+    @ivar path: The path only (arguments not included).
+    @ivar args: All of the arguments, including URL and POST arguments.
+    @type args: A mapping of strings (the argument names) to lists of values.
+                i.e., ?foo=bar&foo=baz&quux=spam results in
+                {'foo': ['bar', 'baz'], 'quux': ['spam']}.
+
+    @type requestHeaders: L{http_headers.Headers}
+    @ivar requestHeaders: All received HTTP request headers.
+
+    @ivar received_headers: Backwards-compatibility access to
+        C{requestHeaders}.  Use C{requestHeaders} instead.  C{received_headers}
+        behaves mostly like a C{dict} and does not provide access to all header
+        values.
+
+    @type responseHeaders: L{http_headers.Headers}
+    @ivar responseHeaders: All HTTP response headers to be sent.
+
+    @ivar headers: Backwards-compatibility access to C{responseHeaders}.  Use
+        C{responseHeaders} instead.  C{headers} behaves mostly like a C{dict}
+        and does not provide access to all header values nor does it allow
+        multiple values for one header to be set.
+
+    @ivar notifications: A C{list} of L{Deferred}s which are waiting for
+        notification that the response to this request has been finished
+        (successfully or with an error).  Don't use this attribute directly,
+        instead use the L{Request.notifyFinish} method.
+
+    @ivar _disconnected: A flag which is C{False} until the connection over
+        which this request was received is closed and which is C{True} after
+        that.
+    @type _disconnected: C{bool}
+    """
+    producer = None
+    finished = 0
+    code = OK
+    code_message = RESPONSES[OK]
+    method = "(no method yet)"
+    clientproto = b"(no clientproto yet)"
+    uri = "(no uri yet)"
+    startedWriting = 0
+    chunked = 0
+    sentLength = 0 # content-length of response, or total bytes sent via chunking
+    etag = None
+    lastModified = None
+    args = None
+    path = None
+    content = None
+    _forceSSL = 0
+    _disconnected = False
+
+    def __init__(self, channel, queued):
+        """
+        @param channel: the channel we're connected to.
+        @param queued: are we in the request queue, or can we start writing to
+            the transport?
+        """
+        self.notifications = []
+        self.channel = channel
+        self.queued = queued
+        self.requestHeaders = Headers()
+        self.received_cookies = {}
+        self.responseHeaders = Headers()
+        self.cookies = [] # outgoing cookies
+
+        if queued:
+            self.transport = StringTransport()
+        else:
+            self.transport = self.channel.transport
+
+
+    def __setattr__(self, name, value):
+        """
+        Support assignment of C{dict} instances to C{received_headers} for
+        backwards-compatibility.
+        """
+        if name == 'received_headers':
+            # A property would be nice, but Request is classic.
+            self.requestHeaders = headers = Headers()
+            for k, v in value.items():
+                headers.setRawHeaders(k, [v])
+        elif name == 'requestHeaders':
+            self.__dict__[name] = value
+            self.__dict__['received_headers'] = _DictHeaders(value)
+        elif name == 'headers':
+            self.responseHeaders = headers = Headers()
+            for k, v in value.items():
+                headers.setRawHeaders(k, [v])
+        elif name == 'responseHeaders':
+            self.__dict__[name] = value
+            self.__dict__['headers'] = _DictHeaders(value)
+        else:
+            self.__dict__[name] = value
+
+
+    def _cleanup(self):
+        """
+        Called when have finished responding and are no longer queued.
+        """
+        if self.producer:
+            log.err(RuntimeError("Producer was not unregistered for %s" % self.uri))
+            self.unregisterProducer()
+        self.channel.requestDone(self)
+        del self.channel
+        try:
+            self.content.close()
+        except OSError:
+            # win32 suckiness, no idea why it does this
+            pass
+        del self.content
+        for d in self.notifications:
+            d.callback(None)
+        self.notifications = []
+
+    # methods for channel - end users should not use these
+
+    def noLongerQueued(self):
+        """
+        Notify the object that it is no longer queued.
+
+        We start writing whatever data we have to the transport, etc.
+
+        This method is not intended for users.
+        """
+        if not self.queued:
+            raise RuntimeError("noLongerQueued() got called unnecessarily.")
+
+        self.queued = 0
+
+        # set transport to real one and send any buffer data
+        data = self.transport.getvalue()
+        self.transport = self.channel.transport
+        if data:
+            self.transport.write(data)
+
+        # if we have producer, register it with transport
+        if (self.producer is not None) and not self.finished:
+            self.transport.registerProducer(self.producer, self.streamingProducer)
+
+        # if we're finished, clean up
+        if self.finished:
+            self._cleanup()
+
+    def gotLength(self, length):
+        """
+        Called when HTTP channel got length of content in this request.
+
+        This method is not intended for users.
+
+        @param length: The length of the request body, as indicated by the
+            request headers.  C{None} if the request headers do not indicate a
+            length.
+        """
+        if length is not None and length < 100000:
+            self.content = StringIO()
+        else:
+            self.content = tempfile.TemporaryFile()
+
+
+    def parseCookies(self):
+        """
+        Parse cookie headers.
+
+        This method is not intended for users.
+        """
+        cookieheaders = self.requestHeaders.getRawHeaders(b"cookie")
+
+        if cookieheaders is None:
+            return
+
+        for cookietxt in cookieheaders:
+            if cookietxt:
+                for cook in cookietxt.split(b';'):
+                    cook = cook.lstrip()
+                    try:
+                        k, v = cook.split(b'=', 1)
+                        self.received_cookies[k] = v
+                    except ValueError:
+                        pass
+
+
+    def handleContentChunk(self, data):
+        """
+        Write a chunk of data.
+
+        This method is not intended for users.
+        """
+        self.content.write(data)
+
+
+    def requestReceived(self, command, path, version):
+        """
+        Called by channel when all data has been received.
+
+        This method is not intended for users.
+
+        @type command: C{bytes}
+        @param command: The HTTP verb of this request.  This has the case
+            supplied by the client (eg, it maybe "get" rather than "GET").
+
+        @type path: C{bytes}
+        @param path: The URI of this request.
+
+        @type version: C{bytes}
+        @param version: The HTTP version of this request.
+        """
+        self.content.seek(0,0)
+        self.args = {}
+        self.stack = []
+
+        self.method, self.uri = command, path
+        self.clientproto = version
+        x = self.uri.split(b'?', 1)
+
+        if len(x) == 1:
+            self.path = self.uri
+        else:
+            self.path, argstring = x
+            self.args = parse_qs(argstring, 1)
+
+        # cache the client and server information, we'll need this later to be
+        # serialized and sent with the request so CGIs will work remotely
+        self.client = self.channel.transport.getPeer()
+        self.host = self.channel.transport.getHost()
+
+        # Argument processing
+        args = self.args
+        ctype = self.requestHeaders.getRawHeaders(b'content-type')
+        if ctype is not None:
+            ctype = ctype[0]
+
+        if self.method == b"POST" and ctype:
+            mfd = b'multipart/form-data'
+            key, pdict = _parseHeader(ctype)
+            if key == b'application/x-www-form-urlencoded':
+                args.update(parse_qs(self.content.read(), 1))
+            elif key == mfd:
+                try:
+                    args.update(cgi.parse_multipart(self.content, pdict))
+                except KeyError as e:
+                    if e.args[0] == b'content-disposition':
+                        # Parse_multipart can't cope with missing
+                        # content-dispostion headers in multipart/form-data
+                        # parts, so we catch the exception and tell the client
+                        # it was a bad request.
+                        self.channel.transport.write(
+                                b"HTTP/1.1 400 Bad Request\r\n\r\n")
+                        self.channel.transport.loseConnection()
+                        return
+                    raise
+            self.content.seek(0, 0)
+
+        self.process()
+
+
+    def __repr__(self):
+        return '<%s %s %s>'% (self.method, self.uri, self.clientproto)
+
+    def process(self):
+        """
+        Override in subclasses.
+
+        This method is not intended for users.
+        """
+        pass
+
+
+    # consumer interface
+
+    def registerProducer(self, producer, streaming):
+        """
+        Register a producer.
+        """
+        if self.producer:
+            raise ValueError(
+                "registering producer %s before previous one (%s) was "
+                "unregistered" % (producer, self.producer))
+
+        self.streamingProducer = streaming
+        self.producer = producer
+
+        if self.queued:
+            if streaming:
+                producer.pauseProducing()
+        else:
+            self.transport.registerProducer(producer, streaming)
+
+    def unregisterProducer(self):
+        """
+        Unregister the producer.
+        """
+        if not self.queued:
+            self.transport.unregisterProducer()
+        self.producer = None
+
+    # private http response methods
+
+    def _sendError(self, code, resp=''):
+        self.transport.write('%s %s %s\r\n\r\n' % (self.clientproto, code, resp))
+
+
+    # The following is the public interface that people should be
+    # writing to.
+    def getHeader(self, key):
+        """
+        Get an HTTP request header.
+
+        @type key: C{bytes}
+        @param key: The name of the header to get the value of.
+
+        @rtype: C{bytes} or C{NoneType}
+        @return: The value of the specified header, or C{None} if that header
+            was not present in the request.
+        """
+        value = self.requestHeaders.getRawHeaders(key)
+        if value is not None:
+            return value[-1]
+
+
+    def getCookie(self, key):
+        """
+        Get a cookie that was sent from the network.
+        """
+        return self.received_cookies.get(key)
+
+
+    def notifyFinish(self):
+        """
+        Notify when the response to this request has finished.
+
+        @rtype: L{Deferred}
+
+        @return: A L{Deferred} which will be triggered when the request is
+            finished -- with a C{None} value if the request finishes
+            successfully or with an error if the request is interrupted by an
+            error (for example, the client closing the connection prematurely).
+        """
+        self.notifications.append(Deferred())
+        return self.notifications[-1]
+
+
+    def finish(self):
+        """
+        Indicate that all response data has been written to this L{Request}.
+        """
+        if self._disconnected:
+            raise RuntimeError(
+                "Request.finish called on a request after its connection was lost; "
+                "use Request.notifyFinish to keep track of this.")
+        if self.finished:
+            warnings.warn("Warning! request.finish called twice.", stacklevel=2)
+            return
+
+        if not self.startedWriting:
+            # write headers
+            self.write('')
+
+        if self.chunked:
+            # write last chunk and closing CRLF
+            self.transport.write(b"0\r\n\r\n")
+
+        # log request
+        if hasattr(self.channel, "factory"):
+            self.channel.factory.log(self)
+
+        self.finished = 1
+        if not self.queued:
+            self._cleanup()
+
+
+    def write(self, data):
+        """
+        Write some data as a result of an HTTP request.  The first
+        time this is called, it writes out response data.
+
+        @type data: C{bytes}
+        @param data: Some bytes to be sent as part of the response body.
+        """
+        if self.finished:
+            raise RuntimeError('Request.write called on a request after '
+                               'Request.finish was called.')
+        if not self.startedWriting:
+            self.startedWriting = 1
+            version = self.clientproto
+            l = []
+            l.append(
+                version + b" " +
+                intToBytes(self.code) + b" " +
+                networkString(self.code_message) + b"\r\n")
+
+            # if we don't have a content length, we send data in
+            # chunked mode, so that we can support pipelining in
+            # persistent connections.
+            if ((version == b"HTTP/1.1") and
+                (self.responseHeaders.getRawHeaders(b'content-length') is None) and
+                self.method != b"HEAD" and self.code not in NO_BODY_CODES):
+                l.append(b'Transfer-Encoding: chunked\r\n')
+                self.chunked = 1
+
+            if self.lastModified is not None:
+                if self.responseHeaders.hasHeader(b'last-modified'):
+                    log.msg("Warning: last-modified specified both in"
+                            " header list and lastModified attribute.")
+                else:
+                    self.responseHeaders.setRawHeaders(
+                        b'last-modified',
+                        [datetimeToString(self.lastModified)])
+
+            if self.etag is not None:
+                self.responseHeaders.setRawHeaders(b'ETag', [self.etag])
+
+            for name, values in self.responseHeaders.getAllRawHeaders():
+                for value in values:
+                    if not isinstance(value, bytes):
+                        warnings.warn(
+                            "Passing non-bytes header values is deprecated "
+                            "since Twisted 12.3. Pass only bytes instead.",
+                            category=DeprecationWarning, stacklevel=2)
+                        # Backward compatible cast for non-bytes values
+                        value = networkString('%s' % (value,))
+                    l.extend([name, b": ", value, b"\r\n"])
+
+            for cookie in self.cookies:
+                l.append(networkString('Set-Cookie: %s\r\n' % (cookie,)))
+
+            l.append(b"\r\n")
+
+            self.transport.writeSequence(l)
+
+            # if this is a "HEAD" request, we shouldn't return any data
+            if self.method == b"HEAD":
+                self.write = lambda data: None
+                return
+
+            # for certain result codes, we should never return any data
+            if self.code in NO_BODY_CODES:
+                self.write = lambda data: None
+                return
+
+        self.sentLength = self.sentLength + len(data)
+        if data:
+            if self.chunked:
+                self.transport.writeSequence(toChunk(data))
+            else:
+                self.transport.write(data)
+
+    def addCookie(self, k, v, expires=None, domain=None, path=None, max_age=None, comment=None, secure=None):
+        """
+        Set an outgoing HTTP cookie.
+
+        In general, you should consider using sessions instead of cookies, see
+        L{twisted.web.server.Request.getSession} and the
+        L{twisted.web.server.Session} class for details.
+        """
+        cookie = '%s=%s' % (k, v)
+        if expires is not None:
+            cookie = cookie +"; Expires=%s" % expires
+        if domain is not None:
+            cookie = cookie +"; Domain=%s" % domain
+        if path is not None:
+            cookie = cookie +"; Path=%s" % path
+        if max_age is not None:
+            cookie = cookie +"; Max-Age=%s" % max_age
+        if comment is not None:
+            cookie = cookie +"; Comment=%s" % comment
+        if secure:
+            cookie = cookie +"; Secure"
+        self.cookies.append(cookie)
+
+    def setResponseCode(self, code, message=None):
+        """
+        Set the HTTP response code.
+
+        @type code: C{int}
+        @type message: C{bytes}
+        """
+        if not isinstance(code, _intTypes):
+            raise TypeError("HTTP response code must be int or long")
+        self.code = code
+        if message:
+            self.code_message = message
+        else:
+            self.code_message = RESPONSES.get(code, "Unknown Status")
+
+
+    def setHeader(self, name, value):
+        """
+        Set an HTTP response header.  Overrides any previously set values for
+        this header.
+
+        @type name: C{bytes}
+        @param name: The name of the header for which to set the value.
+
+        @type value: C{bytes}
+        @param value: The value to set for the named header.
+        """
+        self.responseHeaders.setRawHeaders(name, [value])
+
+
+    def redirect(self, url):
+        """
+        Utility function that does a redirect.
+
+        The request should have finish() called after this.
+        """
+        self.setResponseCode(FOUND)
+        self.setHeader(b"location", url)
+
+
+    def setLastModified(self, when):
+        """
+        Set the C{Last-Modified} time for the response to this request.
+
+        If I am called more than once, I ignore attempts to set
+        Last-Modified earlier, only replacing the Last-Modified time
+        if it is to a later value.
+
+        If I am a conditional request, I may modify my response code
+        to L{NOT_MODIFIED} if appropriate for the time given.
+
+        @param when: The last time the resource being returned was
+            modified, in seconds since the epoch.
+        @type when: number
+        @return: If I am a C{If-Modified-Since} conditional request and
+            the time given is not newer than the condition, I return
+            L{http.CACHED<CACHED>} to indicate that you should write no
+            body.  Otherwise, I return a false value.
+        """
+        # time.time() may be a float, but the HTTP-date strings are
+        # only good for whole seconds.
+        when = int(math.ceil(when))
+        if (not self.lastModified) or (self.lastModified < when):
+            self.lastModified = when
+
+        modifiedSince = self.getHeader(b'if-modified-since')
+        if modifiedSince:
+            firstPart = modifiedSince.split(b';', 1)[0]
+            try:
+                modifiedSince = stringToDatetime(firstPart)
+            except ValueError:
+                return None
+            if modifiedSince >= when:
+                self.setResponseCode(NOT_MODIFIED)
+                return CACHED
+        return None
+
+    def setETag(self, etag):
+        """
+        Set an C{entity tag} for the outgoing response.
+
+        That's \"entity tag\" as in the HTTP/1.1 C{ETag} header, \"used
+        for comparing two or more entities from the same requested
+        resource.\"
+
+        If I am a conditional request, I may modify my response code
+        to L{NOT_MODIFIED} or L{PRECONDITION_FAILED}, if appropriate
+        for the tag given.
+
+        @param etag: The entity tag for the resource being returned.
+        @type etag: string
+        @return: If I am a C{If-None-Match} conditional request and
+            the tag matches one in the request, I return
+            L{http.CACHED<CACHED>} to indicate that you should write
+            no body.  Otherwise, I return a false value.
+        """
+        if etag:
+            self.etag = etag
+
+        tags = self.getHeader(b"if-none-match")
+        if tags:
+            tags = tags.split()
+            if (etag in tags) or (b'*' in tags):
+                self.setResponseCode(((self.method in (b"HEAD", b"GET"))
+                                      and NOT_MODIFIED)
+                                     or PRECONDITION_FAILED)
+                return CACHED
+        return None
+
+
+    def getAllHeaders(self):
+        """
+        Return dictionary mapping the names of all received headers to the last
+        value received for each.
+
+        Since this method does not return all header information,
+        C{self.requestHeaders.getAllRawHeaders()} may be preferred.
+        """
+        headers = {}
+        for k, v in self.requestHeaders.getAllRawHeaders():
+            headers[k.lower()] = v[-1]
+        return headers
+
+
+    def getRequestHostname(self):
+        """
+        Get the hostname that the user passed in to the request.
+
+        This will either use the Host: header (if it is available) or the
+        host we are listening on if the header is unavailable.
+
+        @returns: the requested hostname
+        @rtype: C{bytes}
+        """
+        # XXX This method probably has no unit tests.  I changed it a ton and
+        # nothing failed.
+        host = self.getHeader(b'host')
+        if host:
+            return host.split(b':', 1)[0]
+        return networkString(self.getHost().host)
+
+
+    def getHost(self):
+        """
+        Get my originally requesting transport's host.
+
+        Don't rely on the 'transport' attribute, since Request objects may be
+        copied remotely.  For information on this method's return value, see
+        twisted.internet.tcp.Port.
+        """
+        return self.host
+
+    def setHost(self, host, port, ssl=0):
+        """
+        Change the host and port the request thinks it's using.
+
+        This method is useful for working with reverse HTTP proxies (e.g.
+        both Squid and Apache's mod_proxy can do this), when the address
+        the HTTP client is using is different than the one we're listening on.
+
+        For example, Apache may be listening on https://www.example.com/, and
+        then forwarding requests to http://localhost:8080/, but we don't want
+        HTML produced by Twisted to say b'http://localhost:8080/', they should
+        say b'https://www.example.com/', so we do::
+
+           request.setHost(b'www.example.com', 443, ssl=1)
+
+        @type host: C{bytes}
+        @param host: The value to which to change the host header.
+
+        @type ssl: C{bool}
+        @param ssl: A flag which, if C{True}, indicates that the request is
+            considered secure (if C{True}, L{isSecure} will return C{True}).
+        """
+        self._forceSSL = ssl # set first so isSecure will work
+        if self.isSecure():
+            default = 443
+        else:
+            default = 80
+        if port == default:
+            hostHeader = host
+        else:
+            hostHeader = host + b":" + intToBytes(port)
+        self.requestHeaders.setRawHeaders(b"host", [hostHeader])
+        self.host = address.IPv4Address("TCP", host, port)
+
+
+    def getClientIP(self):
+        """
+        Return the IP address of the client who submitted this request.
+
+        @returns: the client IP address
+        @rtype: C{str}
+        """
+        if isinstance(self.client, address.IPv4Address):
+            return self.client.host
+        else:
+            return None
+
+    def isSecure(self):
+        """
+        Return True if this request is using a secure transport.
+
+        Normally this method returns True if this request's HTTPChannel
+        instance is using a transport that implements ISSLTransport.
+
+        This will also return True if setHost() has been called
+        with ssl=True.
+
+        @returns: True if this request is secure
+        @rtype: C{bool}
+        """
+        if self._forceSSL:
+            return True
+        transport = getattr(getattr(self, 'channel', None), 'transport', None)
+        if interfaces.ISSLTransport(transport, None) is not None:
+            return True
+        return False
+
+    def _authorize(self):
+        # Authorization, (mostly) per the RFC
+        try:
+            authh = self.getHeader(b"Authorization")
+            if not authh:
+                self.user = self.password = ''
+                return
+            bas, upw = authh.split()
+            if bas.lower() != b"basic":
+                raise ValueError()
+            upw = base64.decodestring(upw)
+            self.user, self.password = upw.split(b':', 1)
+        except (binascii.Error, ValueError):
+            self.user = self.password = ""
+        except:
+            log.err()
+            self.user = self.password = ""
+
+
+    def getUser(self):
+        """
+        Return the HTTP user sent with this request, if any.
+
+        If no user was supplied, return the empty string.
+
+        @returns: the HTTP user, if any
+        @rtype: C{bytes}
+        """
+        try:
+            return self.user
+        except:
+            pass
+        self._authorize()
+        return self.user
+
+
+    def getPassword(self):
+        """
+        Return the HTTP password sent with this request, if any.
+
+        If no password was supplied, return the empty string.
+
+        @returns: the HTTP password, if any
+        @rtype: C{bytes}
+        """
+        try:
+            return self.password
+        except:
+            pass
+        self._authorize()
+        return self.password
+
+
+    def getClient(self):
+        if self.client.type != 'TCP':
+            return None
+        host = self.client.host
+        try:
+            name, names, addresses = socket.gethostbyaddr(host)
+        except socket.error:
+            return host
+        names.insert(0, name)
+        for name in names:
+            if '.' in name:
+                return name
+        return names[0]
+
+
+    def connectionLost(self, reason):
+        """
+        There is no longer a connection for this request to respond over.
+        Clean up anything which can't be useful anymore.
+        """
+        self._disconnected = True
+        self.channel = None
+        if self.content is not None:
+            self.content.close()
+        for d in self.notifications:
+            d.errback(reason)
+        self.notifications = []
+
+
+
+class _DataLoss(Exception):
+    """
+    L{_DataLoss} indicates that not all of a message body was received. This
+    is only one of several possible exceptions which may indicate that data
+    was lost.  Because of this, it should not be checked for by
+    specifically; any unexpected exception should be treated as having
+    caused data loss.
+    """
+
+
+
+class PotentialDataLoss(Exception):
+    """
+    L{PotentialDataLoss} may be raised by a transfer encoding decoder's
+    C{noMoreData} method to indicate that it cannot be determined if the
+    entire response body has been delivered.  This only occurs when making
+    requests to HTTP servers which do not set I{Content-Length} or a
+    I{Transfer-Encoding} in the response because in this case the end of the
+    response is indicated by the connection being closed, an event which may
+    also be due to a transient network problem or other error.
+    """
+
+
+
+class _MalformedChunkedDataError(Exception):
+    """
+    C{_ChunkedTranferDecoder} raises L{_MalformedChunkedDataError} from its
+    C{dataReceived} method when it encounters malformed data. This exception
+    indicates a client-side error. If this exception is raised, the connection
+    should be dropped with a 400 error.
+    """
+
+
+
+class _IdentityTransferDecoder(object):
+    """
+    Protocol for accumulating bytes up to a specified length.  This handles the
+    case where no I{Transfer-Encoding} is specified.
+
+    @ivar contentLength: Counter keeping track of how many more bytes there are
+        to receive.
+
+    @ivar dataCallback: A one-argument callable which will be invoked each
+        time application data is received.
+
+    @ivar finishCallback: A one-argument callable which will be invoked when
+        the terminal chunk is received.  It will be invoked with all bytes
+        which were delivered to this protocol which came after the terminal
+        chunk.
+    """
+    def __init__(self, contentLength, dataCallback, finishCallback):
+        self.contentLength = contentLength
+        self.dataCallback = dataCallback
+        self.finishCallback = finishCallback
+
+
+    def dataReceived(self, data):
+        """
+        Interpret the next chunk of bytes received.  Either deliver them to the
+        data callback or invoke the finish callback if enough bytes have been
+        received.
+
+        @raise RuntimeError: If the finish callback has already been invoked
+            during a previous call to this methood.
+        """
+        if self.dataCallback is None:
+            raise RuntimeError(
+                "_IdentityTransferDecoder cannot decode data after finishing")
+
+        if self.contentLength is None:
+            self.dataCallback(data)
+        elif len(data) < self.contentLength:
+            self.contentLength -= len(data)
+            self.dataCallback(data)
+        else:
+            # Make the state consistent before invoking any code belonging to
+            # anyone else in case noMoreData ends up being called beneath this
+            # stack frame.
+            contentLength = self.contentLength
+            dataCallback = self.dataCallback
+            finishCallback = self.finishCallback
+            self.dataCallback = self.finishCallback = None
+            self.contentLength = 0
+
+            dataCallback(data[:contentLength])
+            finishCallback(data[contentLength:])
+
+
+    def noMoreData(self):
+        """
+        All data which will be delivered to this decoder has been.  Check to
+        make sure as much data as was expected has been received.
+
+        @raise PotentialDataLoss: If the content length is unknown.
+        @raise _DataLoss: If the content length is known and fewer than that
+            many bytes have been delivered.
+
+        @return: C{None}
+        """
+        finishCallback = self.finishCallback
+        self.dataCallback = self.finishCallback = None
+        if self.contentLength is None:
+            finishCallback(b'')
+            raise PotentialDataLoss()
+        elif self.contentLength != 0:
+            raise _DataLoss()
+
+
+
+class _ChunkedTransferDecoder(object):
+    """
+    Protocol for decoding I{chunked} Transfer-Encoding, as defined by RFC 2616,
+    section 3.6.1.  This protocol can interpret the contents of a request or
+    response body which uses the I{chunked} Transfer-Encoding.  It cannot
+    interpret any of the rest of the HTTP protocol.
+
+    It may make sense for _ChunkedTransferDecoder to be an actual IProtocol
+    implementation.  Currently, the only user of this class will only ever
+    call dataReceived on it.  However, it might be an improvement if the
+    user could connect this to a transport and deliver connection lost
+    notification.  This way, `dataCallback` becomes `self.transport.write`
+    and perhaps `finishCallback` becomes `self.transport.loseConnection()`
+    (although I'm not sure where the extra data goes in that case).  This
+    could also allow this object to indicate to the receiver of data that
+    the stream was not completely received, an error case which should be
+    noticed. -exarkun
+
+    @ivar dataCallback: A one-argument callable which will be invoked each
+        time application data is received.
+
+    @ivar finishCallback: A one-argument callable which will be invoked when
+        the terminal chunk is received.  It will be invoked with all bytes
+        which were delivered to this protocol which came after the terminal
+        chunk.
+
+    @ivar length: Counter keeping track of how many more bytes in a chunk there
+        are to receive.
+
+    @ivar state: One of C{'CHUNK_LENGTH'}, C{'CRLF'}, C{'TRAILER'},
+        C{'BODY'}, or C{'FINISHED'}.  For C{'CHUNK_LENGTH'}, data for the
+        chunk length line is currently being read.  For C{'CRLF'}, the CR LF
+        pair which follows each chunk is being read. For C{'TRAILER'}, the CR
+        LF pair which follows the terminal 0-length chunk is currently being
+        read. For C{'BODY'}, the contents of a chunk are being read. For
+        C{'FINISHED'}, the last chunk has been completely read and no more
+        input is valid.
+    """
+    state = 'CHUNK_LENGTH'
+
+    def __init__(self, dataCallback, finishCallback):
+        self.dataCallback = dataCallback
+        self.finishCallback = finishCallback
+        self._buffer = b''
+
+
+    def _dataReceived_CHUNK_LENGTH(self, data):
+        if b'\r\n' in data:
+            line, rest = data.split(b'\r\n', 1)
+            parts = line.split(b';')
+            try:
+                self.length = int(parts[0], 16)
+            except ValueError:
+                raise _MalformedChunkedDataError(
+                    "Chunk-size must be an integer.")
+            if self.length == 0:
+                self.state = 'TRAILER'
+            else:
+                self.state = 'BODY'
+            return rest
+        else:
+            self._buffer = data
+            return b''
+
+
+    def _dataReceived_CRLF(self, data):
+        if data.startswith(b'\r\n'):
+            self.state = 'CHUNK_LENGTH'
+            return data[2:]
+        else:
+            self._buffer = data
+            return b''
+
+
+    def _dataReceived_TRAILER(self, data):
+        if data.startswith(b'\r\n'):
+            data = data[2:]
+            self.state = 'FINISHED'
+            self.finishCallback(data)
+        else:
+            self._buffer = data
+        return b''
+
+
+    def _dataReceived_BODY(self, data):
+        if len(data) >= self.length:
+            chunk, data = data[:self.length], data[self.length:]
+            self.dataCallback(chunk)
+            self.state = 'CRLF'
+            return data
+        elif len(data) < self.length:
+            self.length -= len(data)
+            self.dataCallback(data)
+            return b''
+
+
+    def _dataReceived_FINISHED(self, data):
+        raise RuntimeError(
+            "_ChunkedTransferDecoder.dataReceived called after last "
+            "chunk was processed")
+
+
+    def dataReceived(self, data):
+        """
+        Interpret data from a request or response body which uses the
+        I{chunked} Transfer-Encoding.
+        """
+        data = self._buffer + data
+        self._buffer = b''
+        while data:
+            data = getattr(self, '_dataReceived_%s' % (self.state,))(data)
+
+
+    def noMoreData(self):
+        """
+        Verify that all data has been received.  If it has not been, raise
+        L{_DataLoss}.
+        """
+        if self.state != 'FINISHED':
+            raise _DataLoss(
+                "Chunked decoder in %r state, still expecting more data to "
+                "get to 'FINISHED' state." % (self.state,))
+
+
+
+class HTTPChannel(basic.LineReceiver, policies.TimeoutMixin):
+    """
+    A receiver for HTTP requests.
+
+    @ivar _transferDecoder: C{None} or an instance of
+        L{_ChunkedTransferDecoder} if the request body uses the I{chunked}
+        Transfer-Encoding.
+    """
+
+    maxHeaders = 500 # max number of headers allowed per request
+
+    length = 0
+    persistent = 1
+    __header = ''
+    __first_line = 1
+    __content = None
+
+    # set in instances or subclasses
+    requestFactory = Request
+
+    _savedTimeOut = None
+    _receivedHeaderCount = 0
+
+    def __init__(self):
+        # the request queue
+        self.requests = []
+        self._transferDecoder = None
+
+
+    def connectionMade(self):
+        self.setTimeout(self.timeOut)
+
+    def lineReceived(self, line):
+        self.resetTimeout()
+
+        if self.__first_line:
+            # if this connection is not persistent, drop any data which
+            # the client (illegally) sent after the last request.
+            if not self.persistent:
+                self.dataReceived = self.lineReceived = lambda *args: None
+                return
+
+            # IE sends an extraneous empty line (\r\n) after a POST request;
+            # eat up such a line, but only ONCE
+            if not line and self.__first_line == 1:
+                self.__first_line = 2
+                return
+
+            # create a new Request object
+            request = self.requestFactory(self, len(self.requests))
+            self.requests.append(request)
+
+            self.__first_line = 0
+            parts = line.split()
+            if len(parts) != 3:
+                self.transport.write(b"HTTP/1.1 400 Bad Request\r\n\r\n")
+                self.transport.loseConnection()
+                return
+            command, request, version = parts
+            self._command = command
+            self._path = request
+            self._version = version
+        elif line == b'':
+            if self.__header:
+                self.headerReceived(self.__header)
+            self.__header = ''
+            self.allHeadersReceived()
+            if self.length == 0:
+                self.allContentReceived()
+            else:
+                self.setRawMode()
+        elif line[0] in b' \t':
+            self.__header = self.__header + '\n' + line
+        else:
+            if self.__header:
+                self.headerReceived(self.__header)
+            self.__header = line
+
+
+    def _finishRequestBody(self, data):
+        self.allContentReceived()
+        self.setLineMode(data)
+
+
+    def headerReceived(self, line):
+        """
+        Do pre-processing (for content-length) and store this header away.
+        Enforce the per-request header limit.
+
+        @type line: C{bytes}
+        @param line: A line from the header section of a request, excluding the
+            line delimiter.
+        """
+        header, data = line.split(b':', 1)
+        header = header.lower()
+        data = data.strip()
+        if header == b'content-length':
+            try:
+                self.length = int(data)
+            except ValueError:
+                self.transport.write(b"HTTP/1.1 400 Bad Request\r\n\r\n")
+                self.length = None
+                self.transport.loseConnection()
+                return
+            self._transferDecoder = _IdentityTransferDecoder(
+                self.length, self.requests[-1].handleContentChunk, self._finishRequestBody)
+        elif header == b'transfer-encoding' and data.lower() == b'chunked':
+            # XXX Rather poorly tested code block, apparently only exercised by
+            # test_chunkedEncoding
+            self.length = None
+            self._transferDecoder = _ChunkedTransferDecoder(
+                self.requests[-1].handleContentChunk, self._finishRequestBody)
+        reqHeaders = self.requests[-1].requestHeaders
+        values = reqHeaders.getRawHeaders(header)
+        if values is not None:
+            values.append(data)
+        else:
+            reqHeaders.setRawHeaders(header, [data])
+
+        self._receivedHeaderCount += 1
+        if self._receivedHeaderCount > self.maxHeaders:
+            self.transport.write(b"HTTP/1.1 400 Bad Request\r\n\r\n")
+            self.transport.loseConnection()
+
+
+    def allContentReceived(self):
+        command = self._command
+        path = self._path
+        version = self._version
+
+        # reset ALL state variables, so we don't interfere with next request
+        self.length = 0
+        self._receivedHeaderCount = 0
+        self.__first_line = 1
+        self._transferDecoder = None
+        del self._command, self._path, self._version
+
+        # Disable the idle timeout, in case this request takes a long
+        # time to finish generating output.
+        if self.timeOut:
+            self._savedTimeOut = self.setTimeout(None)
+
+        req = self.requests[-1]
+        req.requestReceived(command, path, version)
+
+
+    def rawDataReceived(self, data):
+        self.resetTimeout()
+        try:
+            self._transferDecoder.dataReceived(data)
+        except _MalformedChunkedDataError:
+            self.transport.write(b"HTTP/1.1 400 Bad Request\r\n\r\n")
+            self.transport.loseConnection()
+
+
+    def allHeadersReceived(self):
+        req = self.requests[-1]
+        req.parseCookies()
+        self.persistent = self.checkPersistence(req, self._version)
+        req.gotLength(self.length)
+        # Handle 'Expect: 100-continue' with automated 100 response code,
+        # a simplistic implementation of RFC 2686 8.2.3:
+        expectContinue = req.requestHeaders.getRawHeaders(b'expect')
+        if (expectContinue and expectContinue[0].lower() == b'100-continue' and
+            self._version == b'HTTP/1.1'):
+            req.transport.write(b"HTTP/1.1 100 Continue\r\n\r\n")
+
+
+    def checkPersistence(self, request, version):
+        """
+        Check if the channel should close or not.
+
+        @param request: The request most recently received over this channel
+            against which checks will be made to determine if this connection
+            can remain open after a matching response is returned.
+
+        @type version: C{bytes}
+        @param version: The version of the request.
+
+        @rtype: C{bool}
+        @return: A flag which, if C{True}, indicates that this connection may
+            remain open to receive another request; if C{False}, the connection
+            must be closed in order to indicate the completion of the response
+            to C{request}.
+        """
+        connection = request.requestHeaders.getRawHeaders(b'connection')
+        if connection:
+            tokens = [t.lower() for t in connection[0].split(b' ')]
+        else:
+            tokens = []
+
+        # HTTP 1.0 persistent connection support is currently disabled,
+        # since we need a way to disable pipelining. HTTP 1.0 can't do
+        # pipelining since we can't know in advance if we'll have a
+        # content-length header, if we don't have the header we need to close the
+        # connection. In HTTP 1.1 this is not an issue since we use chunked
+        # encoding if content-length is not available.
+
+        #if version == "HTTP/1.0":
+        #    if 'keep-alive' in tokens:
+        #        request.setHeader('connection', 'Keep-Alive')
+        #        return 1
+        #    else:
+        #        return 0
+        if version == b"HTTP/1.1":
+            if b'close' in tokens:
+                request.responseHeaders.setRawHeaders(b'connection', [b'close'])
+                return False
+            else:
+                return True
+        else:
+            return False
+
+
+    def requestDone(self, request):
+        """
+        Called by first request in queue when it is done.
+        """
+        if request != self.requests[0]: raise TypeError
+        del self.requests[0]
+
+        if self.persistent:
+            # notify next request it can start writing
+            if self.requests:
+                self.requests[0].noLongerQueued()
+            else:
+                if self._savedTimeOut:
+                    self.setTimeout(self._savedTimeOut)
+        else:
+            self.transport.loseConnection()
+
+    def timeoutConnection(self):
+        log.msg("Timing out client: %s" % str(self.transport.getPeer()))
+        policies.TimeoutMixin.timeoutConnection(self)
+
+    def connectionLost(self, reason):
+        self.setTimeout(None)
+        for request in self.requests:
+            request.connectionLost(reason)
+
+
+class HTTPFactory(protocol.ServerFactory):
+    """
+    Factory for HTTP server.
+
+    @ivar _logDateTime: A cached datetime string for log messages, updated by
+        C{_logDateTimeCall}.
+    @type _logDateTime: C{str}
+
+    @ivar _logDateTimeCall: A delayed call for the next update to the cached
+        log datetime string.
+    @type _logDateTimeCall: L{IDelayedCall} provided
+    """
+
+    protocol = HTTPChannel
+
+    logPath = None
+
+    timeOut = 60 * 60 * 12
+
+    def __init__(self, logPath=None, timeout=60*60*12):
+        if logPath is not None:
+            logPath = os.path.abspath(logPath)
+        self.logPath = logPath
+        self.timeOut = timeout
+
+        # For storing the cached log datetime and the callback to update it
+        self._logDateTime = None
+        self._logDateTimeCall = None
+
+
+    def _updateLogDateTime(self):
+        """
+        Update log datetime periodically, so we aren't always recalculating it.
+        """
+        self._logDateTime = datetimeToLogString()
+        self._logDateTimeCall = reactor.callLater(1, self._updateLogDateTime)
+
+
+    def buildProtocol(self, addr):
+        p = protocol.ServerFactory.buildProtocol(self, addr)
+        # timeOut needs to be on the Protocol instance cause
+        # TimeoutMixin expects it there
+        p.timeOut = self.timeOut
+        return p
+
+
+    def startFactory(self):
+        """
+        Set up request logging if necessary.
+        """
+        if self._logDateTimeCall is None:
+            self._updateLogDateTime()
+
+        if self.logPath:
+            self.logFile = self._openLogFile(self.logPath)
+        else:
+            self.logFile = log.logfile
+
+
+    def stopFactory(self):
+        if hasattr(self, "logFile"):
+            if self.logFile != log.logfile:
+                self.logFile.close()
+            del self.logFile
+
+        if self._logDateTimeCall is not None and self._logDateTimeCall.active():
+            self._logDateTimeCall.cancel()
+            self._logDateTimeCall = None
+
+
+    def _openLogFile(self, path):
+        """
+        Override in subclasses, e.g. to use twisted.python.logfile.
+        """
+        f = open(path, "a", 1)
+        return f
+
+    def _escape(self, s):
+        # pain in the ass. Return a string like python repr, but always
+        # escaped as if surrounding quotes were "".
+        try:
+            s = nativeString(s)
+        except UnicodeError:
+            pass
+        r = repr(s)
+        if r[0] == "'":
+            return r[1:-1].replace('"', '\\"').replace("\\'", "'")
+        return r[1:-1]
+
+    def log(self, request):
+        """
+        Log a request's result to the logfile, by default in combined log format.
+        """
+        if hasattr(self, "logFile"):
+            line = '%s - - %s "%s" %d %s "%s" "%s"\n' % (
+                request.getClientIP(),
+                # request.getUser() or "-", # the remote user is almost never important
+                self._logDateTime,
+                '%s %s %s' % (self._escape(request.method),
+                              self._escape(request.uri),
+                              self._escape(request.clientproto)),
+                request.code,
+                request.sentLength or "-",
+                self._escape(request.getHeader("referer") or "-"),
+                self._escape(request.getHeader("user-agent") or "-"))
+            self.logFile.write(line)
diff --git a/ThirdParty/Twisted/twisted/web/http_headers.py b/ThirdParty/Twisted/twisted/web/http_headers.py
new file mode 100644
index 0000000..f0790b9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/http_headers.py
@@ -0,0 +1,261 @@
+# -*- test-case-name: twisted.web.test.test_http_headers
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+An API for storing HTTP header names and values.
+"""
+
+from __future__ import division, absolute_import
+
+from collections import MutableMapping
+
+from twisted.python.compat import comparable, cmp
+
+
+def _dashCapitalize(name):
+    """
+    Return a byte string which is capitalized using '-' as a word separator.
+
+    @param name: The name of the header to capitalize.
+    @type name: C{bytes}
+
+    @return: The given header capitalized using '-' as a word separator.
+    @rtype: C{bytes}
+    """
+    return b'-'.join([word.capitalize() for word in name.split(b'-')])
+
+
+
+class _DictHeaders(MutableMapping):
+    """
+    A C{dict}-like wrapper around L{Headers} to provide backwards compatibility
+    for L{twisted.web.http.Request.received_headers} and
+    L{twisted.web.http.Request.headers} which used to be plain C{dict}
+    instances.
+
+    @type _headers: L{Headers}
+    @ivar _headers: The real header storage object.
+    """
+    def __init__(self, headers):
+        self._headers = headers
+
+
+    def __getitem__(self, key):
+        """
+        Return the last value for header of C{key}.
+        """
+        if self._headers.hasHeader(key):
+            return self._headers.getRawHeaders(key)[-1]
+        raise KeyError(key)
+
+
+    def __setitem__(self, key, value):
+        """
+        Set the given header.
+        """
+        self._headers.setRawHeaders(key, [value])
+
+
+    def __delitem__(self, key):
+        """
+        Delete the given header.
+        """
+        if self._headers.hasHeader(key):
+            self._headers.removeHeader(key)
+        else:
+            raise KeyError(key)
+
+
+    def __iter__(self):
+        """
+        Return an iterator of the lowercase name of each header present.
+        """
+        for k, v in self._headers.getAllRawHeaders():
+            yield k.lower()
+
+
+    def __len__(self):
+        """
+        Return the number of distinct headers present.
+        """
+        # XXX Too many _
+        return len(self._headers._rawHeaders)
+
+
+    # Extra methods that MutableMapping doesn't care about but that we do.
+    def copy(self):
+        """
+        Return a C{dict} mapping each header name to the last corresponding
+        header value.
+        """
+        return dict(self.items())
+
+
+    def has_key(self, key):
+        """
+        Return C{True} if C{key} is a header in this collection, C{False}
+        otherwise.
+        """
+        return key in self
+
+
+
+ at comparable
+class Headers(object):
+    """
+    This class stores the HTTP headers as both a parsed representation
+    and the raw string representation. It converts between the two on
+    demand.
+
+    @cvar _caseMappings: A C{dict} that maps lowercase header names
+        to their canonicalized representation.
+
+    @ivar _rawHeaders: A C{dict} mapping header names as C{bytes} to C{lists} of
+        header values as C{bytes}.
+    """
+    _caseMappings = {
+        b'content-md5': b'Content-MD5',
+        b'dnt': b'DNT',
+        b'etag': b'ETag',
+        b'p3p': b'P3P',
+        b'te': b'TE',
+        b'www-authenticate': b'WWW-Authenticate',
+        b'x-xss-protection': b'X-XSS-Protection'}
+
+    def __init__(self, rawHeaders=None):
+        self._rawHeaders = {}
+        if rawHeaders is not None:
+            for name, values in rawHeaders.items():
+                self.setRawHeaders(name, values[:])
+
+
+    def __repr__(self):
+        """
+        Return a string fully describing the headers set on this object.
+        """
+        return '%s(%r)' % (self.__class__.__name__, self._rawHeaders,)
+
+
+    def __cmp__(self, other):
+        """
+        Define L{Headers} instances as being equal to each other if they have
+        the same raw headers.
+        """
+        if isinstance(other, Headers):
+            return cmp(
+                sorted(self._rawHeaders.items()),
+                sorted(other._rawHeaders.items()))
+        return NotImplemented
+
+
+    def copy(self):
+        """
+        Return a copy of itself with the same headers set.
+        """
+        return self.__class__(self._rawHeaders)
+
+
+    def hasHeader(self, name):
+        """
+        Check for the existence of a given header.
+
+        @type name: C{bytes}
+        @param name: The name of the HTTP header to check for.
+
+        @rtype: C{bool}
+        @return: C{True} if the header exists, otherwise C{False}.
+        """
+        return name.lower() in self._rawHeaders
+
+
+    def removeHeader(self, name):
+        """
+        Remove the named header from this header object.
+
+        @type name: C{bytes}
+        @param name: The name of the HTTP header to remove.
+
+        @return: C{None}
+        """
+        self._rawHeaders.pop(name.lower(), None)
+
+
+    def setRawHeaders(self, name, values):
+        """
+        Sets the raw representation of the given header.
+
+        @type name: C{bytes}
+        @param name: The name of the HTTP header to set the values for.
+
+        @type values: C{list}
+        @param values: A list of strings each one being a header value of
+            the given name.
+
+        @return: C{None}
+        """
+        if not isinstance(values, list):
+            raise TypeError("Header entry %r should be list but found "
+                            "instance of %r instead" % (name, type(values)))
+        self._rawHeaders[name.lower()] = values
+
+
+    def addRawHeader(self, name, value):
+        """
+        Add a new raw value for the given header.
+
+        @type name: C{bytes}
+        @param name: The name of the header for which to set the value.
+
+        @type value: C{bytes}
+        @param value: The value to set for the named header.
+        """
+        values = self.getRawHeaders(name)
+        if values is None:
+            self.setRawHeaders(name, [value])
+        else:
+            values.append(value)
+
+
+    def getRawHeaders(self, name, default=None):
+        """
+        Returns a list of headers matching the given name as the raw string
+        given.
+
+        @type name: C{bytes}
+        @param name: The name of the HTTP header to get the values of.
+
+        @param default: The value to return if no header with the given C{name}
+            exists.
+
+        @rtype: C{list}
+        @return: A C{list} of values for the given header.
+        """
+        return self._rawHeaders.get(name.lower(), default)
+
+
+    def getAllRawHeaders(self):
+        """
+        Return an iterator of key, value pairs of all headers contained in this
+        object, as strings.  The keys are capitalized in canonical
+        capitalization.
+        """
+        for k, v in self._rawHeaders.items():
+            yield self._canonicalNameCaps(k), v
+
+
+    def _canonicalNameCaps(self, name):
+        """
+        Return the canonical name for the given header.
+
+        @type name: C{bytes}
+        @param name: The all-lowercase header name to capitalize in its
+            canonical form.
+
+        @rtype: C{bytes}
+        @return: The canonical name of the header.
+        """
+        return self._caseMappings.get(name, _dashCapitalize(name))
+
+
+__all__ = ['Headers']
diff --git a/ThirdParty/Twisted/twisted/web/iweb.py b/ThirdParty/Twisted/twisted/web/iweb.py
new file mode 100644
index 0000000..7a0279b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/iweb.py
@@ -0,0 +1,591 @@
+# -*- test-case-name: twisted.web.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Interface definitions for L{twisted.web}.
+
+ at var UNKNOWN_LENGTH: An opaque object which may be used as the value of
+    L{IBodyProducer.length} to indicate that the length of the entity
+    body is not known in advance.
+"""
+
+from zope.interface import Interface, Attribute
+
+from twisted.python.compat import _PY3
+from twisted.internet.interfaces import IPushProducer
+if not _PY3:
+    # Re-enable when cred is ported to Python 3. Fix as part of #6176:
+    from twisted.cred.credentials import IUsernameDigestHash
+
+
+class IRequest(Interface):
+    """
+    An HTTP request.
+
+    @since: 9.0
+    """
+
+    method = Attribute("A C{str} giving the HTTP method that was used.")
+    uri = Attribute(
+        "A C{str} giving the full encoded URI which was requested (including "
+        "query arguments).")
+    path = Attribute(
+        "A C{str} giving the encoded query path of the request URI.")
+    args = Attribute(
+        "A mapping of decoded query argument names as C{str} to "
+        "corresponding query argument values as C{list}s of C{str}.  "
+        "For example, for a URI with C{'foo=bar&foo=baz&quux=spam'} "
+        "for its query part, C{args} will be C{{'foo': ['bar', 'baz'], "
+        "'quux': ['spam']}}.")
+
+    received_headers = Attribute(
+        "Backwards-compatibility access to C{requestHeaders}.  Use "
+        "C{requestHeaders} instead.  C{received_headers} behaves mostly "
+        "like a C{dict} and does not provide access to all header values.")
+
+    requestHeaders = Attribute(
+        "A L{http_headers.Headers} instance giving all received HTTP request "
+        "headers.")
+
+    content = Attribute(
+        "A file-like object giving the request body.  This may be a file on "
+        "disk, a C{StringIO}, or some other type.  The implementation is free "
+        "to decide on a per-request basis.")
+
+    headers = Attribute(
+        "Backwards-compatibility access to C{responseHeaders}.  Use"
+        "C{responseHeaders} instead.  C{headers} behaves mostly like a "
+        "C{dict} and does not provide access to all header values nor "
+        "does it allow multiple values for one header to be set.")
+
+    responseHeaders = Attribute(
+        "A L{http_headers.Headers} instance holding all HTTP response "
+        "headers to be sent.")
+
+    def getHeader(key):
+        """
+        Get an HTTP request header.
+
+        @type key: C{str}
+        @param key: The name of the header to get the value of.
+
+        @rtype: C{str} or C{NoneType}
+        @return: The value of the specified header, or C{None} if that header
+            was not present in the request.
+        """
+
+
+    def getCookie(key):
+        """
+        Get a cookie that was sent from the network.
+        """
+
+
+    def getAllHeaders():
+        """
+        Return dictionary mapping the names of all received headers to the last
+        value received for each.
+
+        Since this method does not return all header information,
+        C{requestHeaders.getAllRawHeaders()} may be preferred.
+        """
+
+
+    def getRequestHostname():
+        """
+        Get the hostname that the user passed in to the request.
+
+        This will either use the Host: header (if it is available) or the
+        host we are listening on if the header is unavailable.
+
+        @returns: the requested hostname
+        @rtype: C{str}
+        """
+
+
+    def getHost():
+        """
+        Get my originally requesting transport's host.
+
+        @return: An L{IAddress<twisted.internet.interfaces.IAddress>}.
+        """
+
+
+    def getClientIP():
+        """
+        Return the IP address of the client who submitted this request.
+
+        @returns: the client IP address or C{None} if the request was submitted
+            over a transport where IP addresses do not make sense.
+        @rtype: L{str} or C{NoneType}
+        """
+
+
+    def getClient():
+        """
+        Return the hostname of the IP address of the client who submitted this
+        request, if possible.
+
+        This method is B{deprecated}.  See L{getClientIP} instead.
+
+        @rtype: C{NoneType} or L{str}
+        @return: The canonical hostname of the client, as determined by
+            performing a name lookup on the IP address of the client.
+        """
+
+
+    def getUser():
+        """
+        Return the HTTP user sent with this request, if any.
+
+        If no user was supplied, return the empty string.
+
+        @returns: the HTTP user, if any
+        @rtype: C{str}
+        """
+
+
+    def getPassword():
+        """
+        Return the HTTP password sent with this request, if any.
+
+        If no password was supplied, return the empty string.
+
+        @returns: the HTTP password, if any
+        @rtype: C{str}
+        """
+
+
+    def isSecure():
+        """
+        Return True if this request is using a secure transport.
+
+        Normally this method returns True if this request's HTTPChannel
+        instance is using a transport that implements ISSLTransport.
+
+        This will also return True if setHost() has been called
+        with ssl=True.
+
+        @returns: True if this request is secure
+        @rtype: C{bool}
+        """
+
+
+    def getSession(sessionInterface=None):
+        """
+        Look up the session associated with this request or create a new one if
+        there is not one.
+
+        @return: The L{Session} instance identified by the session cookie in
+            the request, or the C{sessionInterface} component of that session
+            if C{sessionInterface} is specified.
+        """
+
+
+    def URLPath():
+        """
+        @return: A L{URLPath} instance which identifies the URL for which this
+            request is.
+        """
+
+
+    def prePathURL():
+        """
+        @return: At any time during resource traversal, a L{str} giving an
+            absolute URL to the most nested resource which has yet been
+            reached.
+        """
+
+
+    def rememberRootURL():
+        """
+        Remember the currently-processed part of the URL for later
+        recalling.
+        """
+
+
+    def getRootURL():
+        """
+        Get a previously-remembered URL.
+        """
+
+
+    # Methods for outgoing response
+    def finish():
+        """
+        Indicate that the response to this request is complete.
+        """
+
+
+    def write(data):
+        """
+        Write some data to the body of the response to this request.  Response
+        headers are written the first time this method is called, after which
+        new response headers may not be added.
+        """
+
+
+    def addCookie(k, v, expires=None, domain=None, path=None, max_age=None, comment=None, secure=None):
+        """
+        Set an outgoing HTTP cookie.
+
+        In general, you should consider using sessions instead of cookies, see
+        L{twisted.web.server.Request.getSession} and the
+        L{twisted.web.server.Session} class for details.
+        """
+
+
+    def setResponseCode(code, message=None):
+        """
+        Set the HTTP response code.
+        """
+
+
+    def setHeader(k, v):
+        """
+        Set an HTTP response header.  Overrides any previously set values for
+        this header.
+
+        @type name: C{str}
+        @param name: The name of the header for which to set the value.
+
+        @type value: C{str}
+        @param value: The value to set for the named header.
+        """
+
+
+    def redirect(url):
+        """
+        Utility function that does a redirect.
+
+        The request should have finish() called after this.
+        """
+
+
+    def setLastModified(when):
+        """
+        Set the C{Last-Modified} time for the response to this request.
+
+        If I am called more than once, I ignore attempts to set Last-Modified
+        earlier, only replacing the Last-Modified time if it is to a later
+        value.
+
+        If I am a conditional request, I may modify my response code to
+        L{NOT_MODIFIED<http.NOT_MODIFIED>} if appropriate for the time given.
+
+        @param when: The last time the resource being returned was modified, in
+            seconds since the epoch.
+        @type when: L{int}, L{long} or L{float}
+
+        @return: If I am a C{If-Modified-Since} conditional request and the time
+            given is not newer than the condition, I return
+            L{CACHED<http.CACHED>} to indicate that you should write no body.
+            Otherwise, I return a false value.
+        """
+
+
+    def setETag(etag):
+        """
+        Set an C{entity tag} for the outgoing response.
+
+        That's "entity tag" as in the HTTP/1.1 I{ETag} header, "used for
+        comparing two or more entities from the same requested resource."
+
+        If I am a conditional request, I may modify my response code to
+        L{NOT_MODIFIED<http.NOT_MODIFIED>} or
+        L{PRECONDITION_FAILED<http.PRECONDITION_FAILED>}, if appropriate for the
+        tag given.
+
+        @param etag: The entity tag for the resource being returned.
+        @type etag: C{str}
+
+        @return: If I am a C{If-None-Match} conditional request and the tag
+            matches one in the request, I return L{CACHED<http.CACHED>} to
+            indicate that you should write no body.  Otherwise, I return a
+            false value.
+        """
+
+
+    def setHost(host, port, ssl=0):
+        """
+        Change the host and port the request thinks it's using.
+
+        This method is useful for working with reverse HTTP proxies (e.g.  both
+        Squid and Apache's mod_proxy can do this), when the address the HTTP
+        client is using is different than the one we're listening on.
+
+        For example, Apache may be listening on https://www.example.com, and
+        then forwarding requests to http://localhost:8080, but we don't want
+        HTML produced by Twisted to say 'http://localhost:8080', they should
+        say 'https://www.example.com', so we do::
+
+           request.setHost('www.example.com', 443, ssl=1)
+        """
+
+
+
+class ICredentialFactory(Interface):
+    """
+    A credential factory defines a way to generate a particular kind of
+    authentication challenge and a way to interpret the responses to these
+    challenges.  It creates
+    L{ICredentials<twisted.cred.credentials.ICredentials>} providers from
+    responses.  These objects will be used with L{twisted.cred} to authenticate
+    an authorize requests.
+    """
+    scheme = Attribute(
+        "A C{str} giving the name of the authentication scheme with which "
+        "this factory is associated.  For example, C{'basic'} or C{'digest'}.")
+
+
+    def getChallenge(request):
+        """
+        Generate a new challenge to be sent to a client.
+
+        @type peer: L{twisted.web.http.Request}
+        @param peer: The request the response to which this challenge will be
+            included.
+
+        @rtype: C{dict}
+        @return: A mapping from C{str} challenge fields to associated C{str}
+            values.
+        """
+
+
+    def decode(response, request):
+        """
+        Create a credentials object from the given response.
+
+        @type response: C{str}
+        @param response: scheme specific response string
+
+        @type request: L{twisted.web.http.Request}
+        @param request: The request being processed (from which the response
+            was taken).
+
+        @raise twisted.cred.error.LoginFailed: If the response is invalid.
+
+        @rtype: L{twisted.cred.credentials.ICredentials} provider
+        @return: The credentials represented by the given response.
+        """
+
+
+
+class IBodyProducer(IPushProducer):
+    """
+    Objects which provide L{IBodyProducer} write bytes to an object which
+    provides L{IConsumer<twisted.internet.interfaces.IConsumer>} by calling its
+    C{write} method repeatedly.
+
+    L{IBodyProducer} providers may start producing as soon as they have an
+    L{IConsumer<twisted.internet.interfaces.IConsumer>} provider.  That is, they
+    should not wait for a C{resumeProducing} call to begin writing data.
+
+    L{IConsumer.unregisterProducer<twisted.internet.interfaces.IConsumer.unregisterProducer>}
+    must not be called.  Instead, the
+    L{Deferred<twisted.internet.defer.Deferred>} returned from C{startProducing}
+    must be fired when all bytes have been written.
+
+    L{IConsumer.write<twisted.internet.interfaces.IConsumer.write>} may
+    synchronously invoke any of C{pauseProducing}, C{resumeProducing}, or
+    C{stopProducing}.  These methods must be implemented with this in mind.
+
+    @since: 9.0
+    """
+
+    # Despite the restrictions above and the additional requirements of
+    # stopProducing documented below, this interface still needs to be an
+    # IPushProducer subclass.  Providers of it will be passed to IConsumer
+    # providers which only know about IPushProducer and IPullProducer, not
+    # about this interface.  This interface needs to remain close enough to one
+    # of those interfaces for consumers to work with it.
+
+    length = Attribute(
+        """
+        C{length} is a C{int} indicating how many bytes in total this
+        L{IBodyProducer} will write to the consumer or L{UNKNOWN_LENGTH}
+        if this is not known in advance.
+        """)
+
+    def startProducing(consumer):
+        """
+        Start producing to the given
+        L{IConsumer<twisted.internet.interfaces.IConsumer>} provider.
+
+        @return: A L{Deferred<twisted.internet.defer.Deferred>} which fires with
+            C{None} when all bytes have been produced or with a
+            L{Failure<twisted.python.failure.Failure>} if there is any problem
+            before all bytes have been produced.
+        """
+
+
+    def stopProducing():
+        """
+        In addition to the standard behavior of
+        L{IProducer.stopProducing<twisted.internet.interfaces.IProducer.stopProducing>}
+        (stop producing data), make sure the
+        L{Deferred<twisted.internet.defer.Deferred>} returned by
+        C{startProducing} is never fired.
+        """
+
+
+
+class IRenderable(Interface):
+    """
+    An L{IRenderable} is an object that may be rendered by the
+    L{twisted.web.template} templating system.
+    """
+
+    def lookupRenderMethod(name):
+        """
+        Look up and return the render method associated with the given name.
+
+        @type name: C{str}
+        @param name: The value of a render directive encountered in the
+            document returned by a call to L{IRenderable.render}.
+
+        @return: A two-argument callable which will be invoked with the request
+            being responded to and the tag object on which the render directive
+            was encountered.
+        """
+
+
+    def render(request):
+        """
+        Get the document for this L{IRenderable}.
+
+        @type request: L{IRequest} provider or C{NoneType}
+        @param request: The request in response to which this method is being
+            invoked.
+
+        @return: An object which can be flattened.
+        """
+
+
+
+class ITemplateLoader(Interface):
+    """
+    A loader for templates; something usable as a value for
+    L{twisted.web.template.Element}'s C{loader} attribute.
+    """
+
+    def load():
+        """
+        Load a template suitable for rendering.
+
+        @return: a C{list} of C{list}s, C{unicode} objects, C{Element}s and
+            other L{IRenderable} providers.
+        """
+
+
+
+class IResponse(Interface):
+    """
+    An object representing an HTTP response received from an HTTP server.
+
+    @since: 11.1
+    """
+
+    version = Attribute(
+        "A three-tuple describing the protocol and protocol version "
+        "of the response.  The first element is of type C{str}, the second "
+        "and third are of type C{int}.  For example, C{('HTTP', 1, 1)}.")
+
+
+    code = Attribute("The HTTP status code of this response, as a C{int}.")
+
+
+    phrase = Attribute(
+        "The HTTP reason phrase of this response, as a C{str}.")
+
+
+    headers = Attribute("The HTTP response L{Headers} of this response.")
+
+
+    length = Attribute(
+        "The C{int} number of bytes expected to be in the body of this "
+        "response or L{UNKNOWN_LENGTH} if the server did not indicate how "
+        "many bytes to expect.  For I{HEAD} responses, this will be 0; if "
+        "the response includes a I{Content-Length} header, it will be "
+        "available in C{headers}.")
+
+
+    def deliverBody(protocol):
+        """
+        Register an L{IProtocol<twisted.internet.interfaces.IProtocol>} provider
+        to receive the response body.
+
+        The protocol will be connected to a transport which provides
+        L{IPushProducer}.  The protocol's C{connectionLost} method will be
+        called with:
+
+            - ResponseDone, which indicates that all bytes from the response
+              have been successfully delivered.
+
+            - PotentialDataLoss, which indicates that it cannot be determined
+              if the entire response body has been delivered.  This only occurs
+              when making requests to HTTP servers which do not set
+              I{Content-Length} or a I{Transfer-Encoding} in the response.
+
+            - ResponseFailed, which indicates that some bytes from the response
+              were lost.  The C{reasons} attribute of the exception may provide
+              more specific indications as to why.
+        """
+
+
+
+class _IRequestEncoder(Interface):
+    """
+    An object encoding data passed to L{IRequest.write}, for example for
+    compression purpose.
+
+    @since: 12.3
+    """
+
+    def encode(data):
+        """
+        Encode the data given and return the result.
+
+        @param data: The content to encode.
+        @type data: C{str}
+
+        @return: The encoded data.
+        @rtype: C{str}
+        """
+
+
+    def finish():
+        """
+        Callback called when the request is closing.
+
+        @return: If necessary, the pending data accumulated from previous
+            C{encode} calls.
+        @rtype: C{str}
+        """
+
+
+
+class _IRequestEncoderFactory(Interface):
+    """
+    A factory for returing L{_IRequestEncoder} instances.
+
+    @since: 12.3
+    """
+
+    def encoderForRequest(request):
+        """
+        If applicable, returns a L{_IRequestEncoder} instance which will encode
+        the request.
+        """
+
+
+
+UNKNOWN_LENGTH = u"twisted.web.iweb.UNKNOWN_LENGTH"
+
+__all__ = [
+    "IUsernameDigestHash", "ICredentialFactory", "IRequest",
+    "IBodyProducer", "IRenderable", "IResponse", "_IRequestEncoder",
+    "_IRequestEncoderFactory",
+
+    "UNKNOWN_LENGTH"]
diff --git a/ThirdParty/Twisted/twisted/web/microdom.py b/ThirdParty/Twisted/twisted/web/microdom.py
new file mode 100644
index 0000000..ca35612
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/microdom.py
@@ -0,0 +1,1028 @@
+# -*- test-case-name: twisted.web.test.test_xml -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Micro Document Object Model: a partial DOM implementation with SUX.
+
+This is an implementation of what we consider to be the useful subset of the
+DOM.  The chief advantage of this library is that, not being burdened with
+standards compliance, it can remain very stable between versions.  We can also
+implement utility 'pythonic' ways to access and mutate the XML tree.
+
+Since this has not subjected to a serious trial by fire, it is not recommended
+to use this outside of Twisted applications.  However, it seems to work just
+fine for the documentation generator, which parses a fairly representative
+sample of XML.
+
+Microdom mainly focuses on working with HTML and XHTML.
+"""
+
+# System Imports
+import re
+from cStringIO import StringIO
+
+# create NodeList class
+from types import ListType as NodeList
+from types import StringTypes, UnicodeType
+
+# Twisted Imports
+from twisted.web.sux import XMLParser, ParseError
+from twisted.python.util import InsensitiveDict
+
+
+def getElementsByTagName(iNode, name):
+    """
+    Return a list of all child elements of C{iNode} with a name matching
+    C{name}.
+
+    Note that this implementation does not conform to the DOM Level 1 Core
+    specification because it may return C{iNode}.
+
+    @param iNode: An element at which to begin searching.  If C{iNode} has a
+        name matching C{name}, it will be included in the result.
+
+    @param name: A C{str} giving the name of the elements to return.
+
+    @return: A C{list} of direct or indirect child elements of C{iNode} with
+        the name C{name}.  This may include C{iNode}.
+    """
+    matches = []
+    matches_append = matches.append # faster lookup. don't do this at home
+    slice = [iNode]
+    while len(slice)>0:
+        c = slice.pop(0)
+        if c.nodeName == name:
+            matches_append(c)
+        slice[:0] = c.childNodes
+    return matches
+
+
+
+def getElementsByTagNameNoCase(iNode, name):
+    name = name.lower()
+    matches = []
+    matches_append = matches.append
+    slice=[iNode]
+    while len(slice)>0:
+        c = slice.pop(0)
+        if c.nodeName.lower() == name:
+            matches_append(c)
+        slice[:0] = c.childNodes
+    return matches
+
+# order is important
+HTML_ESCAPE_CHARS = (('&', '&'), # don't add any entities before this one
+                    ('<', '<'),
+                    ('>', '>'),
+                    ('"', '"'))
+REV_HTML_ESCAPE_CHARS = list(HTML_ESCAPE_CHARS)
+REV_HTML_ESCAPE_CHARS.reverse()
+
+XML_ESCAPE_CHARS = HTML_ESCAPE_CHARS + (("'", '''),)
+REV_XML_ESCAPE_CHARS = list(XML_ESCAPE_CHARS)
+REV_XML_ESCAPE_CHARS.reverse()
+
+def unescape(text, chars=REV_HTML_ESCAPE_CHARS):
+    "Perform the exact opposite of 'escape'."
+    for s, h in chars:
+        text = text.replace(h, s)
+    return text
+
+def escape(text, chars=HTML_ESCAPE_CHARS):
+    "Escape a few XML special chars with XML entities."
+    for s, h in chars:
+        text = text.replace(s, h)
+    return text
+
+
+class MismatchedTags(Exception):
+
+    def __init__(self, filename, expect, got, endLine, endCol, begLine, begCol):
+       (self.filename, self.expect, self.got, self.begLine, self.begCol, self.endLine,
+        self.endCol) = filename, expect, got, begLine, begCol, endLine, endCol
+
+    def __str__(self):
+        return ("expected </%s>, got </%s> line: %s col: %s, began line: %s col: %s"
+                % (self.expect, self.got, self.endLine, self.endCol, self.begLine,
+                   self.begCol))
+
+
+class Node(object):
+    nodeName = "Node"
+
+    def __init__(self, parentNode=None):
+        self.parentNode = parentNode
+        self.childNodes = []
+
+    def isEqualToNode(self, other):
+        """
+        Compare this node to C{other}.  If the nodes have the same number of
+        children and corresponding children are equal to each other, return
+        C{True}, otherwise return C{False}.
+
+        @type other: L{Node}
+        @rtype: C{bool}
+        """
+        if len(self.childNodes) != len(other.childNodes):
+            return False
+        for a, b in zip(self.childNodes, other.childNodes):
+            if not a.isEqualToNode(b):
+                return False
+        return True
+
+    def writexml(self, stream, indent='', addindent='', newl='', strip=0,
+                 nsprefixes={}, namespace=''):
+        raise NotImplementedError()
+
+    def toxml(self, indent='', addindent='', newl='', strip=0, nsprefixes={},
+              namespace=''):
+        s = StringIO()
+        self.writexml(s, indent, addindent, newl, strip, nsprefixes, namespace)
+        rv = s.getvalue()
+        return rv
+
+    def writeprettyxml(self, stream, indent='', addindent=' ', newl='\n', strip=0):
+        return self.writexml(stream, indent, addindent, newl, strip)
+
+    def toprettyxml(self, indent='', addindent=' ', newl='\n', strip=0):
+        return self.toxml(indent, addindent, newl, strip)
+
+    def cloneNode(self, deep=0, parent=None):
+        raise NotImplementedError()
+
+    def hasChildNodes(self):
+        if self.childNodes:
+            return 1
+        else:
+            return 0
+
+
+    def appendChild(self, child):
+        """
+        Make the given L{Node} the last child of this node.
+
+        @param child: The L{Node} which will become a child of this node.
+
+        @raise TypeError: If C{child} is not a C{Node} instance.
+        """
+        if not isinstance(child, Node):
+            raise TypeError("expected Node instance")
+        self.childNodes.append(child)
+        child.parentNode = self
+
+
+    def insertBefore(self, new, ref):
+        """
+        Make the given L{Node} C{new} a child of this node which comes before
+        the L{Node} C{ref}.
+
+        @param new: A L{Node} which will become a child of this node.
+
+        @param ref: A L{Node} which is already a child of this node which
+            C{new} will be inserted before.
+
+        @raise TypeError: If C{new} or C{ref} is not a C{Node} instance.
+
+        @return: C{new}
+        """
+        if not isinstance(new, Node) or not isinstance(ref, Node):
+            raise TypeError("expected Node instance")
+        i = self.childNodes.index(ref)
+        new.parentNode = self
+        self.childNodes.insert(i, new)
+        return new
+
+
+    def removeChild(self, child):
+        """
+        Remove the given L{Node} from this node's children.
+
+        @param child: A L{Node} which is a child of this node which will no
+            longer be a child of this node after this method is called.
+
+        @raise TypeError: If C{child} is not a C{Node} instance.
+
+        @return: C{child}
+        """
+        if not isinstance(child, Node):
+            raise TypeError("expected Node instance")
+        if child in self.childNodes:
+            self.childNodes.remove(child)
+            child.parentNode = None
+        return child
+
+    def replaceChild(self, newChild, oldChild):
+        """
+        Replace a L{Node} which is already a child of this node with a
+        different node.
+
+        @param newChild: A L{Node} which will be made a child of this node.
+
+        @param oldChild: A L{Node} which is a child of this node which will
+            give up its position to C{newChild}.
+
+        @raise TypeError: If C{newChild} or C{oldChild} is not a C{Node}
+            instance.
+
+        @raise ValueError: If C{oldChild} is not a child of this C{Node}.
+        """
+        if not isinstance(newChild, Node) or not isinstance(oldChild, Node):
+            raise TypeError("expected Node instance")
+        if oldChild.parentNode is not self:
+            raise ValueError("oldChild is not a child of this node")
+        self.childNodes[self.childNodes.index(oldChild)] = newChild
+        oldChild.parentNode = None
+        newChild.parentNode = self
+
+
+    def lastChild(self):
+        return self.childNodes[-1]
+
+
+    def firstChild(self):
+        if len(self.childNodes):
+            return self.childNodes[0]
+        return None
+
+    #def get_ownerDocument(self):
+    #   """This doesn't really get the owner document; microdom nodes
+    #   don't even have one necessarily.  This gets the root node,
+    #   which is usually what you really meant.
+    #   *NOT DOM COMPLIANT.*
+    #   """
+    #   node=self
+    #   while (node.parentNode): node=node.parentNode
+    #   return node
+    #ownerDocument=node.get_ownerDocument()
+    # leaving commented for discussion; see also domhelpers.getParents(node)
+
+class Document(Node):
+
+    def __init__(self, documentElement=None):
+        Node.__init__(self)
+        if documentElement:
+            self.appendChild(documentElement)
+
+    def cloneNode(self, deep=0, parent=None):
+        d = Document()
+        d.doctype = self.doctype
+        if deep:
+            newEl = self.documentElement.cloneNode(1, self)
+        else:
+            newEl = self.documentElement
+        d.appendChild(newEl)
+        return d
+
+    doctype = None
+
+    def isEqualToDocument(self, n):
+        return (self.doctype == n.doctype) and Node.isEqualToNode(self, n)
+    isEqualToNode = isEqualToDocument
+
+    def get_documentElement(self):
+        return self.childNodes[0]
+    documentElement=property(get_documentElement)
+
+    def appendChild(self, child):
+        """
+        Make the given L{Node} the I{document element} of this L{Document}.
+
+        @param child: The L{Node} to make into this L{Document}'s document
+            element.
+
+        @raise ValueError: If this document already has a document element.
+        """
+        if self.childNodes:
+            raise ValueError("Only one element per document.")
+        Node.appendChild(self, child)
+
+    def writexml(self, stream, indent='', addindent='', newl='', strip=0,
+                 nsprefixes={}, namespace=''):
+        stream.write('<?xml version="1.0"?>' + newl)
+        if self.doctype:
+            stream.write("<!DOCTYPE "+self.doctype+">" + newl)
+        self.documentElement.writexml(stream, indent, addindent, newl, strip,
+                                      nsprefixes, namespace)
+
+    # of dubious utility (?)
+    def createElement(self, name, **kw):
+        return Element(name, **kw)
+
+    def createTextNode(self, text):
+        return Text(text)
+
+    def createComment(self, text):
+        return Comment(text)
+
+    def getElementsByTagName(self, name):
+        if self.documentElement.caseInsensitive:
+            return getElementsByTagNameNoCase(self, name)
+        return getElementsByTagName(self, name)
+
+    def getElementById(self, id):
+        childNodes = self.childNodes[:]
+        while childNodes:
+            node = childNodes.pop(0)
+            if node.childNodes:
+                childNodes.extend(node.childNodes)
+            if hasattr(node, 'getAttribute') and node.getAttribute("id") == id:
+                return node
+
+
+class EntityReference(Node):
+
+    def __init__(self, eref, parentNode=None):
+        Node.__init__(self, parentNode)
+        self.eref = eref
+        self.nodeValue = self.data = "&" + eref + ";"
+
+    def isEqualToEntityReference(self, n):
+        if not isinstance(n, EntityReference):
+            return 0
+        return (self.eref == n.eref) and (self.nodeValue == n.nodeValue)
+    isEqualToNode = isEqualToEntityReference
+
+    def writexml(self, stream, indent='', addindent='', newl='', strip=0,
+                 nsprefixes={}, namespace=''):
+        stream.write(self.nodeValue)
+
+    def cloneNode(self, deep=0, parent=None):
+        return EntityReference(self.eref, parent)
+
+
+class CharacterData(Node):
+
+    def __init__(self, data, parentNode=None):
+        Node.__init__(self, parentNode)
+        self.value = self.data = self.nodeValue = data
+
+    def isEqualToCharacterData(self, n):
+        return self.value == n.value
+    isEqualToNode = isEqualToCharacterData
+
+
+class Comment(CharacterData):
+    """A comment node."""
+
+    def writexml(self, stream, indent='', addindent='', newl='', strip=0,
+                 nsprefixes={}, namespace=''):
+        val=self.data
+        if isinstance(val, UnicodeType):
+            val=val.encode('utf8')
+        stream.write("<!--%s-->" % val)
+
+    def cloneNode(self, deep=0, parent=None):
+        return Comment(self.nodeValue, parent)
+
+
+class Text(CharacterData):
+
+    def __init__(self, data, parentNode=None, raw=0):
+        CharacterData.__init__(self, data, parentNode)
+        self.raw = raw
+
+
+    def isEqualToNode(self, other):
+        """
+        Compare this text to C{text}.  If the underlying values and the C{raw}
+        flag are the same, return C{True}, otherwise return C{False}.
+        """
+        return (
+            CharacterData.isEqualToNode(self, other) and
+            self.raw == other.raw)
+
+
+    def cloneNode(self, deep=0, parent=None):
+        return Text(self.nodeValue, parent, self.raw)
+
+    def writexml(self, stream, indent='', addindent='', newl='', strip=0,
+                 nsprefixes={}, namespace=''):
+        if self.raw:
+            val = self.nodeValue
+            if not isinstance(val, StringTypes):
+                val = str(self.nodeValue)
+        else:
+            v = self.nodeValue
+            if not isinstance(v, StringTypes):
+                v = str(v)
+            if strip:
+                v = ' '.join(v.split())
+            val = escape(v)
+        if isinstance(val, UnicodeType):
+            val = val.encode('utf8')
+        stream.write(val)
+
+    def __repr__(self):
+        return "Text(%s" % repr(self.nodeValue) + ')'
+
+
+class CDATASection(CharacterData):
+    def cloneNode(self, deep=0, parent=None):
+        return CDATASection(self.nodeValue, parent)
+
+    def writexml(self, stream, indent='', addindent='', newl='', strip=0,
+                 nsprefixes={}, namespace=''):
+        stream.write("<![CDATA[")
+        stream.write(self.nodeValue)
+        stream.write("]]>")
+
+def _genprefix():
+    i = 0
+    while True:
+        yield  'p' + str(i)
+        i = i + 1
+genprefix = _genprefix().next
+
+class _Attr(CharacterData):
+    "Support class for getAttributeNode."
+
+class Element(Node):
+
+    preserveCase = 0
+    caseInsensitive = 1
+    nsprefixes = None
+
+    def __init__(self, tagName, attributes=None, parentNode=None,
+                 filename=None, markpos=None,
+                 caseInsensitive=1, preserveCase=0,
+                 namespace=None):
+        Node.__init__(self, parentNode)
+        self.preserveCase = preserveCase or not caseInsensitive
+        self.caseInsensitive = caseInsensitive
+        if not preserveCase:
+            tagName = tagName.lower()
+        if attributes is None:
+            self.attributes = {}
+        else:
+            self.attributes = attributes
+            for k, v in self.attributes.items():
+                self.attributes[k] = unescape(v)
+
+        if caseInsensitive:
+            self.attributes = InsensitiveDict(self.attributes,
+                                              preserve=preserveCase)
+
+        self.endTagName = self.nodeName = self.tagName = tagName
+        self._filename = filename
+        self._markpos = markpos
+        self.namespace = namespace
+
+    def addPrefixes(self, pfxs):
+        if self.nsprefixes is None:
+            self.nsprefixes = pfxs
+        else:
+            self.nsprefixes.update(pfxs)
+
+    def endTag(self, endTagName):
+        if not self.preserveCase:
+            endTagName = endTagName.lower()
+        self.endTagName = endTagName
+
+    def isEqualToElement(self, n):
+        if self.caseInsensitive:
+            return ((self.attributes == n.attributes)
+                    and (self.nodeName.lower() == n.nodeName.lower()))
+        return (self.attributes == n.attributes) and (self.nodeName == n.nodeName)
+
+
+    def isEqualToNode(self, other):
+        """
+        Compare this element to C{other}.  If the C{nodeName}, C{namespace},
+        C{attributes}, and C{childNodes} are all the same, return C{True},
+        otherwise return C{False}.
+        """
+        return (
+            self.nodeName.lower() == other.nodeName.lower() and
+            self.namespace == other.namespace and
+            self.attributes == other.attributes and
+            Node.isEqualToNode(self, other))
+
+
+    def cloneNode(self, deep=0, parent=None):
+        clone = Element(
+            self.tagName, parentNode=parent, namespace=self.namespace,
+            preserveCase=self.preserveCase, caseInsensitive=self.caseInsensitive)
+        clone.attributes.update(self.attributes)
+        if deep:
+            clone.childNodes = [child.cloneNode(1, clone) for child in self.childNodes]
+        else:
+            clone.childNodes = []
+        return clone
+
+    def getElementsByTagName(self, name):
+        if self.caseInsensitive:
+            return getElementsByTagNameNoCase(self, name)
+        return getElementsByTagName(self, name)
+
+    def hasAttributes(self):
+        return 1
+
+    def getAttribute(self, name, default=None):
+        return self.attributes.get(name, default)
+
+    def getAttributeNS(self, ns, name, default=None):
+        nsk = (ns, name)
+        if self.attributes.has_key(nsk):
+            return self.attributes[nsk]
+        if ns == self.namespace:
+            return self.attributes.get(name, default)
+        return default
+
+    def getAttributeNode(self, name):
+        return _Attr(self.getAttribute(name), self)
+
+    def setAttribute(self, name, attr):
+        self.attributes[name] = attr
+
+    def removeAttribute(self, name):
+        if name in self.attributes:
+            del self.attributes[name]
+
+    def hasAttribute(self, name):
+        return name in self.attributes
+
+
+    def writexml(self, stream, indent='', addindent='', newl='', strip=0,
+                 nsprefixes={}, namespace=''):
+        """
+        Serialize this L{Element} to the given stream.
+
+        @param stream: A file-like object to which this L{Element} will be
+            written.
+
+        @param nsprefixes: A C{dict} mapping namespace URIs as C{str} to
+            prefixes as C{str}.  This defines the prefixes which are already in
+            scope in the document at the point at which this L{Element} exists.
+            This is essentially an implementation detail for namespace support.
+            Applications should not try to use it.
+
+        @param namespace: The namespace URI as a C{str} which is the default at
+            the point in the document at which this L{Element} exists.  This is
+            essentially an implementation detail for namespace support.
+            Applications should not try to use it.
+        """
+        # write beginning
+        ALLOWSINGLETON = ('img', 'br', 'hr', 'base', 'meta', 'link', 'param',
+                          'area', 'input', 'col', 'basefont', 'isindex',
+                          'frame')
+        BLOCKELEMENTS = ('html', 'head', 'body', 'noscript', 'ins', 'del',
+                         'h1', 'h2', 'h3', 'h4', 'h5', 'h6', 'script',
+                         'ul', 'ol', 'dl', 'pre', 'hr', 'blockquote',
+                         'address', 'p', 'div', 'fieldset', 'table', 'tr',
+                         'form', 'object', 'fieldset', 'applet', 'map')
+        FORMATNICELY = ('tr', 'ul', 'ol', 'head')
+
+        # this should never be necessary unless people start
+        # changing .tagName on the fly(?)
+        if not self.preserveCase:
+            self.endTagName = self.tagName
+        w = stream.write
+        if self.nsprefixes:
+            newprefixes = self.nsprefixes.copy()
+            for ns in nsprefixes.keys():
+                if ns in newprefixes:
+                    del newprefixes[ns]
+        else:
+             newprefixes = {}
+
+        begin = ['<']
+        if self.tagName in BLOCKELEMENTS:
+            begin = [newl, indent] + begin
+        bext = begin.extend
+        writeattr = lambda _atr, _val: bext((' ', _atr, '="', escape(_val), '"'))
+
+        # Make a local for tracking what end tag will be used.  If namespace
+        # prefixes are involved, this will be changed to account for that
+        # before it's actually used.
+        endTagName = self.endTagName
+
+        if namespace != self.namespace and self.namespace is not None:
+            # If the current default namespace is not the namespace of this tag
+            # (and this tag has a namespace at all) then we'll write out
+            # something related to namespaces.
+            if self.namespace in nsprefixes:
+                # This tag's namespace already has a prefix bound to it.  Use
+                # that prefix.
+                prefix = nsprefixes[self.namespace]
+                bext(prefix + ':' + self.tagName)
+                # Also make sure we use it for the end tag.
+                endTagName = prefix + ':' + self.endTagName
+            else:
+                # This tag's namespace has no prefix bound to it.  Change the
+                # default namespace to this tag's namespace so we don't need
+                # prefixes.  Alternatively, we could add a new prefix binding.
+                # I'm not sure why the code was written one way rather than the
+                # other. -exarkun
+                bext(self.tagName)
+                writeattr("xmlns", self.namespace)
+                # The default namespace just changed.  Make sure any children
+                # know about this.
+                namespace = self.namespace
+        else:
+            # This tag has no namespace or its namespace is already the default
+            # namespace.  Nothing extra to do here.
+            bext(self.tagName)
+
+        j = ''.join
+        for attr, val in self.attributes.iteritems():
+            if isinstance(attr, tuple):
+                ns, key = attr
+                if nsprefixes.has_key(ns):
+                    prefix = nsprefixes[ns]
+                else:
+                    prefix = genprefix()
+                    newprefixes[ns] = prefix
+                assert val is not None
+                writeattr(prefix+':'+key,val)
+            else:
+                assert val is not None
+                writeattr(attr, val)
+        if newprefixes:
+            for ns, prefix in newprefixes.iteritems():
+                if prefix:
+                    writeattr('xmlns:'+prefix, ns)
+            newprefixes.update(nsprefixes)
+            downprefixes = newprefixes
+        else:
+            downprefixes = nsprefixes
+        w(j(begin))
+        if self.childNodes:
+            w(">")
+            newindent = indent + addindent
+            for child in self.childNodes:
+                if self.tagName in BLOCKELEMENTS and \
+                   self.tagName in FORMATNICELY:
+                    w(j((newl, newindent)))
+                child.writexml(stream, newindent, addindent, newl, strip,
+                               downprefixes, namespace)
+            if self.tagName in BLOCKELEMENTS:
+                w(j((newl, indent)))
+            w(j(('</', endTagName, '>')))
+        elif self.tagName.lower() not in ALLOWSINGLETON:
+            w(j(('></', endTagName, '>')))
+        else:
+            w(" />")
+
+
+    def __repr__(self):
+        rep = "Element(%s" % repr(self.nodeName)
+        if self.attributes:
+            rep += ", attributes=%r" % (self.attributes,)
+        if self._filename:
+            rep += ", filename=%r" % (self._filename,)
+        if self._markpos:
+            rep += ", markpos=%r" % (self._markpos,)
+        return rep + ')'
+
+    def __str__(self):
+        rep = "<" + self.nodeName
+        if self._filename or self._markpos:
+            rep += " ("
+        if self._filename:
+            rep += repr(self._filename)
+        if self._markpos:
+            rep += " line %s column %s" % self._markpos
+        if self._filename or self._markpos:
+            rep += ")"
+        for item in self.attributes.items():
+            rep += " %s=%r" % item
+        if self.hasChildNodes():
+            rep += " >...</%s>" % self.nodeName
+        else:
+            rep += " />"
+        return rep
+
+def _unescapeDict(d):
+    dd = {}
+    for k, v in d.items():
+        dd[k] = unescape(v)
+    return dd
+
+def _reverseDict(d):
+    dd = {}
+    for k, v in d.items():
+        dd[v]=k
+    return dd
+
+class MicroDOMParser(XMLParser):
+
+    # <dash> glyph: a quick scan thru the DTD says BODY, AREA, LINK, IMG, HR,
+    # P, DT, DD, LI, INPUT, OPTION, THEAD, TFOOT, TBODY, COLGROUP, COL, TR, TH,
+    # TD, HEAD, BASE, META, HTML all have optional closing tags
+
+    soonClosers = 'area link br img hr input base meta'.split()
+    laterClosers = {'p': ['p', 'dt'],
+                    'dt': ['dt','dd'],
+                    'dd': ['dt', 'dd'],
+                    'li': ['li'],
+                    'tbody': ['thead', 'tfoot', 'tbody'],
+                    'thead': ['thead', 'tfoot', 'tbody'],
+                    'tfoot': ['thead', 'tfoot', 'tbody'],
+                    'colgroup': ['colgroup'],
+                    'col': ['col'],
+                    'tr': ['tr'],
+                    'td': ['td'],
+                    'th': ['th'],
+                    'head': ['body'],
+                    'title': ['head', 'body'], # this looks wrong...
+                    'option': ['option'],
+                    }
+
+
+    def __init__(self, beExtremelyLenient=0, caseInsensitive=1, preserveCase=0,
+                 soonClosers=soonClosers, laterClosers=laterClosers):
+        self.elementstack = []
+        d = {'xmlns': 'xmlns', '': None}
+        dr = _reverseDict(d)
+        self.nsstack = [(d,None,dr)]
+        self.documents = []
+        self._mddoctype = None
+        self.beExtremelyLenient = beExtremelyLenient
+        self.caseInsensitive = caseInsensitive
+        self.preserveCase = preserveCase or not caseInsensitive
+        self.soonClosers = soonClosers
+        self.laterClosers = laterClosers
+        # self.indentlevel = 0
+
+    def shouldPreserveSpace(self):
+        for edx in xrange(len(self.elementstack)):
+            el = self.elementstack[-edx]
+            if el.tagName == 'pre' or el.getAttribute("xml:space", '') == 'preserve':
+                return 1
+        return 0
+
+    def _getparent(self):
+        if self.elementstack:
+            return self.elementstack[-1]
+        else:
+            return None
+
+    COMMENT = re.compile(r"\s*/[/*]\s*")
+
+    def _fixScriptElement(self, el):
+        # this deals with case where there is comment or CDATA inside
+        # <script> tag and we want to do the right thing with it
+        if not self.beExtremelyLenient or not len(el.childNodes) == 1:
+            return
+        c = el.firstChild()
+        if isinstance(c, Text):
+            # deal with nasty people who do stuff like:
+            #   <script> // <!--
+            #      x = 1;
+            #   // --></script>
+            # tidy does this, for example.
+            prefix = ""
+            oldvalue = c.value
+            match = self.COMMENT.match(oldvalue)
+            if match:
+                prefix = match.group()
+                oldvalue = oldvalue[len(prefix):]
+
+            # now see if contents are actual node and comment or CDATA
+            try:
+                e = parseString("<a>%s</a>" % oldvalue).childNodes[0]
+            except (ParseError, MismatchedTags):
+                return
+            if len(e.childNodes) != 1:
+                return
+            e = e.firstChild()
+            if isinstance(e, (CDATASection, Comment)):
+                el.childNodes = []
+                if prefix:
+                    el.childNodes.append(Text(prefix))
+                el.childNodes.append(e)
+
+    def gotDoctype(self, doctype):
+        self._mddoctype = doctype
+
+    def gotTagStart(self, name, attributes):
+        # print ' '*self.indentlevel, 'start tag',name
+        # self.indentlevel += 1
+        parent = self._getparent()
+        if (self.beExtremelyLenient and isinstance(parent, Element)):
+            parentName = parent.tagName
+            myName = name
+            if self.caseInsensitive:
+                parentName = parentName.lower()
+                myName = myName.lower()
+            if myName in self.laterClosers.get(parentName, []):
+                self.gotTagEnd(parent.tagName)
+                parent = self._getparent()
+        attributes = _unescapeDict(attributes)
+        namespaces = self.nsstack[-1][0]
+        newspaces = {}
+        for k, v in attributes.items():
+            if k.startswith('xmlns'):
+                spacenames = k.split(':',1)
+                if len(spacenames) == 2:
+                    newspaces[spacenames[1]] = v
+                else:
+                    newspaces[''] = v
+                del attributes[k]
+        if newspaces:
+            namespaces = namespaces.copy()
+            namespaces.update(newspaces)
+        for k, v in attributes.items():
+            ksplit = k.split(':', 1)
+            if len(ksplit) == 2:
+                pfx, tv = ksplit
+                if pfx != 'xml' and pfx in namespaces:
+                    attributes[namespaces[pfx], tv] = v
+                    del attributes[k]
+        el = Element(name, attributes, parent,
+                     self.filename, self.saveMark(),
+                     caseInsensitive=self.caseInsensitive,
+                     preserveCase=self.preserveCase,
+                     namespace=namespaces.get(''))
+        revspaces = _reverseDict(newspaces)
+        el.addPrefixes(revspaces)
+
+        if newspaces:
+            rscopy = self.nsstack[-1][2].copy()
+            rscopy.update(revspaces)
+            self.nsstack.append((namespaces, el, rscopy))
+        self.elementstack.append(el)
+        if parent:
+            parent.appendChild(el)
+        if (self.beExtremelyLenient and el.tagName in self.soonClosers):
+            self.gotTagEnd(name)
+
+    def _gotStandalone(self, factory, data):
+        parent = self._getparent()
+        te = factory(data, parent)
+        if parent:
+            parent.appendChild(te)
+        elif self.beExtremelyLenient:
+            self.documents.append(te)
+
+    def gotText(self, data):
+        if data.strip() or self.shouldPreserveSpace():
+            self._gotStandalone(Text, data)
+
+    def gotComment(self, data):
+        self._gotStandalone(Comment, data)
+
+    def gotEntityReference(self, entityRef):
+        self._gotStandalone(EntityReference, entityRef)
+
+    def gotCData(self, cdata):
+        self._gotStandalone(CDATASection, cdata)
+
+    def gotTagEnd(self, name):
+        # print ' '*self.indentlevel, 'end tag',name
+        # self.indentlevel -= 1
+        if not self.elementstack:
+            if self.beExtremelyLenient:
+                return
+            raise MismatchedTags(*((self.filename, "NOTHING", name)
+                                   +self.saveMark()+(0,0)))
+        el = self.elementstack.pop()
+        pfxdix = self.nsstack[-1][2]
+        if self.nsstack[-1][1] is el:
+            nstuple = self.nsstack.pop()
+        else:
+            nstuple = None
+        if self.caseInsensitive:
+            tn = el.tagName.lower()
+            cname = name.lower()
+        else:
+            tn = el.tagName
+            cname = name
+
+        nsplit = name.split(':',1)
+        if len(nsplit) == 2:
+            pfx, newname = nsplit
+            ns = pfxdix.get(pfx,None)
+            if ns is not None:
+                if el.namespace != ns:
+                    if not self.beExtremelyLenient:
+                        raise MismatchedTags(*((self.filename, el.tagName, name)
+                                               +self.saveMark()+el._markpos))
+        if not (tn == cname):
+            if self.beExtremelyLenient:
+                if self.elementstack:
+                    lastEl = self.elementstack[0]
+                    for idx in xrange(len(self.elementstack)):
+                        if self.elementstack[-(idx+1)].tagName == cname:
+                            self.elementstack[-(idx+1)].endTag(name)
+                            break
+                    else:
+                        # this was a garbage close tag; wait for a real one
+                        self.elementstack.append(el)
+                        if nstuple is not None:
+                            self.nsstack.append(nstuple)
+                        return
+                    del self.elementstack[-(idx+1):]
+                    if not self.elementstack:
+                        self.documents.append(lastEl)
+                        return
+            else:
+                raise MismatchedTags(*((self.filename, el.tagName, name)
+                                       +self.saveMark()+el._markpos))
+        el.endTag(name)
+        if not self.elementstack:
+            self.documents.append(el)
+        if self.beExtremelyLenient and el.tagName == "script":
+            self._fixScriptElement(el)
+
+    def connectionLost(self, reason):
+        XMLParser.connectionLost(self, reason) # This can cause more events!
+        if self.elementstack:
+            if self.beExtremelyLenient:
+                self.documents.append(self.elementstack[0])
+            else:
+                raise MismatchedTags(*((self.filename, self.elementstack[-1],
+                                        "END_OF_FILE")
+                                       +self.saveMark()
+                                       +self.elementstack[-1]._markpos))
+
+
+def parse(readable, *args, **kwargs):
+    """Parse HTML or XML readable."""
+    if not hasattr(readable, "read"):
+        readable = open(readable, "rb")
+    mdp = MicroDOMParser(*args, **kwargs)
+    mdp.filename = getattr(readable, "name", "<xmlfile />")
+    mdp.makeConnection(None)
+    if hasattr(readable,"getvalue"):
+        mdp.dataReceived(readable.getvalue())
+    else:
+        r = readable.read(1024)
+        while r:
+            mdp.dataReceived(r)
+            r = readable.read(1024)
+    mdp.connectionLost(None)
+
+    if not mdp.documents:
+        raise ParseError(mdp.filename, 0, 0, "No top-level Nodes in document")
+
+    if mdp.beExtremelyLenient:
+        if len(mdp.documents) == 1:
+            d = mdp.documents[0]
+            if not isinstance(d, Element):
+                el = Element("html")
+                el.appendChild(d)
+                d = el
+        else:
+            d = Element("html")
+            for child in mdp.documents:
+                d.appendChild(child)
+    else:
+        d = mdp.documents[0]
+    doc = Document(d)
+    doc.doctype = mdp._mddoctype
+    return doc
+
+def parseString(st, *args, **kw):
+    if isinstance(st, UnicodeType):
+        # this isn't particularly ideal, but it does work.
+        return parse(StringIO(st.encode('UTF-16')), *args, **kw)
+    return parse(StringIO(st), *args, **kw)
+
+
+def parseXML(readable):
+    """Parse an XML readable object."""
+    return parse(readable, caseInsensitive=0, preserveCase=1)
+
+
+def parseXMLString(st):
+    """Parse an XML readable object."""
+    return parseString(st, caseInsensitive=0, preserveCase=1)
+
+
+# Utility
+
+class lmx:
+    """Easy creation of XML."""
+
+    def __init__(self, node='div'):
+        if isinstance(node, StringTypes):
+            node = Element(node)
+        self.node = node
+
+    def __getattr__(self, name):
+        if name[0] == '_':
+            raise AttributeError("no private attrs")
+        return lambda **kw: self.add(name,**kw)
+
+    def __setitem__(self, key, val):
+        self.node.setAttribute(key, val)
+
+    def __getitem__(self, key):
+        return self.node.getAttribute(key)
+
+    def text(self, txt, raw=0):
+        nn = Text(txt, raw=raw)
+        self.node.appendChild(nn)
+        return self
+
+    def add(self, tagName, **kw):
+        newNode = Element(tagName, caseInsensitive=0, preserveCase=0)
+        self.node.appendChild(newNode)
+        xf = lmx(newNode)
+        for k, v in kw.items():
+            if k[0] == '_':
+                k = k[1:]
+            xf[k]=v
+        return xf
diff --git a/ThirdParty/Twisted/twisted/web/proxy.py b/ThirdParty/Twisted/twisted/web/proxy.py
new file mode 100644
index 0000000..68bce7d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/proxy.py
@@ -0,0 +1,303 @@
+# -*- test-case-name: twisted.web.test.test_proxy -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Simplistic HTTP proxy support.
+
+This comes in two main variants - the Proxy and the ReverseProxy.
+
+When a Proxy is in use, a browser trying to connect to a server (say,
+www.yahoo.com) will be intercepted by the Proxy, and the proxy will covertly
+connect to the server, and return the result.
+
+When a ReverseProxy is in use, the client connects directly to the ReverseProxy
+(say, www.yahoo.com) which farms off the request to one of a pool of servers,
+and returns the result.
+
+Normally, a Proxy is used on the client end of an Internet connection, while a
+ReverseProxy is used on the server end.
+"""
+
+import urlparse
+from urllib import quote as urlquote
+
+from twisted.internet import reactor
+from twisted.internet.protocol import ClientFactory
+from twisted.web.resource import Resource
+from twisted.web.server import NOT_DONE_YET
+from twisted.web.http import HTTPClient, Request, HTTPChannel
+
+
+
+class ProxyClient(HTTPClient):
+    """
+    Used by ProxyClientFactory to implement a simple web proxy.
+
+    @ivar _finished: A flag which indicates whether or not the original request
+        has been finished yet.
+    """
+    _finished = False
+
+    def __init__(self, command, rest, version, headers, data, father):
+        self.father = father
+        self.command = command
+        self.rest = rest
+        if "proxy-connection" in headers:
+            del headers["proxy-connection"]
+        headers["connection"] = "close"
+        headers.pop('keep-alive', None)
+        self.headers = headers
+        self.data = data
+
+
+    def connectionMade(self):
+        self.sendCommand(self.command, self.rest)
+        for header, value in self.headers.items():
+            self.sendHeader(header, value)
+        self.endHeaders()
+        self.transport.write(self.data)
+
+
+    def handleStatus(self, version, code, message):
+        self.father.setResponseCode(int(code), message)
+
+
+    def handleHeader(self, key, value):
+        # t.web.server.Request sets default values for these headers in its
+        # 'process' method. When these headers are received from the remote
+        # server, they ought to override the defaults, rather than append to
+        # them.
+        if key.lower() in ['server', 'date', 'content-type']:
+            self.father.responseHeaders.setRawHeaders(key, [value])
+        else:
+            self.father.responseHeaders.addRawHeader(key, value)
+
+
+    def handleResponsePart(self, buffer):
+        self.father.write(buffer)
+
+
+    def handleResponseEnd(self):
+        """
+        Finish the original request, indicating that the response has been
+        completely written to it, and disconnect the outgoing transport.
+        """
+        if not self._finished:
+            self._finished = True
+            self.father.finish()
+            self.transport.loseConnection()
+
+
+
+class ProxyClientFactory(ClientFactory):
+    """
+    Used by ProxyRequest to implement a simple web proxy.
+    """
+
+    protocol = ProxyClient
+
+
+    def __init__(self, command, rest, version, headers, data, father):
+        self.father = father
+        self.command = command
+        self.rest = rest
+        self.headers = headers
+        self.data = data
+        self.version = version
+
+
+    def buildProtocol(self, addr):
+        return self.protocol(self.command, self.rest, self.version,
+                             self.headers, self.data, self.father)
+
+
+    def clientConnectionFailed(self, connector, reason):
+        """
+        Report a connection failure in a response to the incoming request as
+        an error.
+        """
+        self.father.setResponseCode(501, "Gateway error")
+        self.father.responseHeaders.addRawHeader("Content-Type", "text/html")
+        self.father.write("<H1>Could not connect</H1>")
+        self.father.finish()
+
+
+
+class ProxyRequest(Request):
+    """
+    Used by Proxy to implement a simple web proxy.
+
+    @ivar reactor: the reactor used to create connections.
+    @type reactor: object providing L{twisted.internet.interfaces.IReactorTCP}
+    """
+
+    protocols = {'http': ProxyClientFactory}
+    ports = {'http': 80}
+
+    def __init__(self, channel, queued, reactor=reactor):
+        Request.__init__(self, channel, queued)
+        self.reactor = reactor
+
+
+    def process(self):
+        parsed = urlparse.urlparse(self.uri)
+        protocol = parsed[0]
+        host = parsed[1]
+        port = self.ports[protocol]
+        if ':' in host:
+            host, port = host.split(':')
+            port = int(port)
+        rest = urlparse.urlunparse(('', '') + parsed[2:])
+        if not rest:
+            rest = rest + '/'
+        class_ = self.protocols[protocol]
+        headers = self.getAllHeaders().copy()
+        if 'host' not in headers:
+            headers['host'] = host
+        self.content.seek(0, 0)
+        s = self.content.read()
+        clientFactory = class_(self.method, rest, self.clientproto, headers,
+                               s, self)
+        self.reactor.connectTCP(host, port, clientFactory)
+
+
+
+class Proxy(HTTPChannel):
+    """
+    This class implements a simple web proxy.
+
+    Since it inherits from L{twisted.web.http.HTTPChannel}, to use it you
+    should do something like this::
+
+        from twisted.web import http
+        f = http.HTTPFactory()
+        f.protocol = Proxy
+
+    Make the HTTPFactory a listener on a port as per usual, and you have
+    a fully-functioning web proxy!
+    """
+
+    requestFactory = ProxyRequest
+
+
+
+class ReverseProxyRequest(Request):
+    """
+    Used by ReverseProxy to implement a simple reverse proxy.
+
+    @ivar proxyClientFactoryClass: a proxy client factory class, used to create
+        new connections.
+    @type proxyClientFactoryClass: L{ClientFactory}
+
+    @ivar reactor: the reactor used to create connections.
+    @type reactor: object providing L{twisted.internet.interfaces.IReactorTCP}
+    """
+
+    proxyClientFactoryClass = ProxyClientFactory
+
+    def __init__(self, channel, queued, reactor=reactor):
+        Request.__init__(self, channel, queued)
+        self.reactor = reactor
+
+
+    def process(self):
+        """
+        Handle this request by connecting to the proxied server and forwarding
+        it there, then forwarding the response back as the response to this
+        request.
+        """
+        self.received_headers['host'] = self.factory.host
+        clientFactory = self.proxyClientFactoryClass(
+            self.method, self.uri, self.clientproto, self.getAllHeaders(),
+            self.content.read(), self)
+        self.reactor.connectTCP(self.factory.host, self.factory.port,
+                                clientFactory)
+
+
+
+class ReverseProxy(HTTPChannel):
+    """
+    Implements a simple reverse proxy.
+
+    For details of usage, see the file examples/reverse-proxy.py.
+    """
+
+    requestFactory = ReverseProxyRequest
+
+
+
+class ReverseProxyResource(Resource):
+    """
+    Resource that renders the results gotten from another server
+
+    Put this resource in the tree to cause everything below it to be relayed
+    to a different server.
+
+    @ivar proxyClientFactoryClass: a proxy client factory class, used to create
+        new connections.
+    @type proxyClientFactoryClass: L{ClientFactory}
+
+    @ivar reactor: the reactor used to create connections.
+    @type reactor: object providing L{twisted.internet.interfaces.IReactorTCP}
+    """
+
+    proxyClientFactoryClass = ProxyClientFactory
+
+
+    def __init__(self, host, port, path, reactor=reactor):
+        """
+        @param host: the host of the web server to proxy.
+        @type host: C{str}
+
+        @param port: the port of the web server to proxy.
+        @type port: C{port}
+
+        @param path: the base path to fetch data from. Note that you shouldn't
+            put any trailing slashes in it, it will be added automatically in
+            request. For example, if you put B{/foo}, a request on B{/bar} will
+            be proxied to B{/foo/bar}.  Any required encoding of special
+            characters (such as " " or "/") should have been done already.
+
+        @type path: C{str}
+        """
+        Resource.__init__(self)
+        self.host = host
+        self.port = port
+        self.path = path
+        self.reactor = reactor
+
+
+    def getChild(self, path, request):
+        """
+        Create and return a proxy resource with the same proxy configuration
+        as this one, except that its path also contains the segment given by
+        C{path} at the end.
+        """
+        return ReverseProxyResource(
+            self.host, self.port, self.path + '/' + urlquote(path, safe=""),
+            self.reactor)
+
+
+    def render(self, request):
+        """
+        Render a request by forwarding it to the proxied server.
+        """
+        # RFC 2616 tells us that we can omit the port if it's the default port,
+        # but we have to provide it otherwise
+        if self.port == 80:
+            host = self.host
+        else:
+            host = "%s:%d" % (self.host, self.port)
+        request.received_headers['host'] = host
+        request.content.seek(0, 0)
+        qs = urlparse.urlparse(request.uri)[4]
+        if qs:
+            rest = self.path + '?' + qs
+        else:
+            rest = self.path
+        clientFactory = self.proxyClientFactoryClass(
+            request.method, rest, request.clientproto,
+            request.getAllHeaders(), request.content.read(), request)
+        self.reactor.connectTCP(self.host, self.port, clientFactory)
+        return NOT_DONE_YET
diff --git a/ThirdParty/Twisted/twisted/web/resource.py b/ThirdParty/Twisted/twisted/web/resource.py
new file mode 100644
index 0000000..4242305
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/resource.py
@@ -0,0 +1,405 @@
+# -*- test-case-name: twisted.web.test.test_web -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Implementation of the lowest-level Resource class.
+"""
+
+from __future__ import division, absolute_import
+
+__all__ = [
+    'IResource', 'getChildForRequest',
+    'Resource', 'ErrorPage', 'NoResource', 'ForbiddenResource',
+    'EncodingResourceWrapper']
+
+import warnings
+
+from zope.interface import Attribute, Interface, implementer
+
+from twisted.python.compat import nativeString, unicode
+from twisted.python._reflectpy3 import prefixedMethodNames
+from twisted.python.components import proxyForInterface
+
+from twisted.web._responses import FORBIDDEN, NOT_FOUND
+from twisted.web.error import UnsupportedMethod
+
+
+
+class IResource(Interface):
+    """
+    A web resource.
+    """
+
+    isLeaf = Attribute(
+        """
+        Signal if this IResource implementor is a "leaf node" or not. If True,
+        getChildWithDefault will not be called on this Resource.
+        """)
+
+
+    def getChildWithDefault(name, request):
+        """
+        Return a child with the given name for the given request.
+        This is the external interface used by the Resource publishing
+        machinery. If implementing IResource without subclassing
+        Resource, it must be provided. However, if subclassing Resource,
+        getChild overridden instead.
+
+        @param name: A single path component from a requested URL.  For example,
+            a request for I{http://example.com/foo/bar} will result in calls to
+            this method with C{b"foo"} and C{b"bar"} as values for this
+            argument.
+        @type name: C{bytes}
+
+        @param request: A representation of all of the information about the
+            request that is being made for this child.
+        @type request: L{twisted.web.server.Request}
+        """
+
+
+    def putChild(path, child):
+        """
+        Put a child IResource implementor at the given path.
+
+        @param path: A single path component, to be interpreted relative to the
+            path this resource is found at, at which to put the given child.
+            For example, if resource A can be found at I{http://example.com/foo}
+            then a call like C{A.putChild(b"bar", B)} will make resource B
+            available at I{http://example.com/foo/bar}.
+        @type path: C{bytes}
+        """
+
+
+    def render(request):
+        """
+        Render a request. This is called on the leaf resource for a request.
+
+        @return: Either C{server.NOT_DONE_YET} to indicate an asynchronous or a
+            C{bytes} instance to write as the response to the request.  If
+            C{NOT_DONE_YET} is returned, at some point later (for example, in a
+            Deferred callback) call C{request.write(b"<html>")} to write data to
+            the request, and C{request.finish()} to send the data to the
+            browser.
+
+        @raise twisted.web.error.UnsupportedMethod: If the HTTP verb
+            requested is not supported by this resource.
+        """
+
+
+
+def getChildForRequest(resource, request):
+    """
+    Traverse resource tree to find who will handle the request.
+    """
+    while request.postpath and not resource.isLeaf:
+        pathElement = request.postpath.pop(0)
+        request.prepath.append(pathElement)
+        resource = resource.getChildWithDefault(pathElement, request)
+    return resource
+
+
+
+ at implementer(IResource)
+class Resource:
+    """
+    Define a web-accessible resource.
+
+    This serves 2 main purposes; one is to provide a standard representation
+    for what HTTP specification calls an 'entity', and the other is to provide
+    an abstract directory structure for URL retrieval.
+    """
+    entityType = IResource
+
+    server = None
+
+    def __init__(self):
+        """
+        Initialize.
+        """
+        self.children = {}
+
+    isLeaf = 0
+
+    ### Abstract Collection Interface
+
+    def listStaticNames(self):
+        return list(self.children.keys())
+
+    def listStaticEntities(self):
+        return list(self.children.items())
+
+    def listNames(self):
+        return list(self.listStaticNames()) + self.listDynamicNames()
+
+    def listEntities(self):
+        return list(self.listStaticEntities()) + self.listDynamicEntities()
+
+    def listDynamicNames(self):
+        return []
+
+    def listDynamicEntities(self, request=None):
+        return []
+
+    def getStaticEntity(self, name):
+        return self.children.get(name)
+
+    def getDynamicEntity(self, name, request):
+        if not self.children.has_key(name):
+            return self.getChild(name, request)
+        else:
+            return None
+
+    def delEntity(self, name):
+        del self.children[name]
+
+    def reallyPutEntity(self, name, entity):
+        self.children[name] = entity
+
+    # Concrete HTTP interface
+
+    def getChild(self, path, request):
+        """
+        Retrieve a 'child' resource from me.
+
+        Implement this to create dynamic resource generation -- resources which
+        are always available may be registered with self.putChild().
+
+        This will not be called if the class-level variable 'isLeaf' is set in
+        your subclass; instead, the 'postpath' attribute of the request will be
+        left as a list of the remaining path elements.
+
+        For example, the URL /foo/bar/baz will normally be::
+
+          | site.resource.getChild('foo').getChild('bar').getChild('baz').
+
+        However, if the resource returned by 'bar' has isLeaf set to true, then
+        the getChild call will never be made on it.
+
+        Parameters and return value have the same meaning and requirements as
+        those defined by L{IResource.getChildWithDefault}.
+        """
+        return NoResource("No such child resource.")
+
+
+    def getChildWithDefault(self, path, request):
+        """
+        Retrieve a static or dynamically generated child resource from me.
+
+        First checks if a resource was added manually by putChild, and then
+        call getChild to check for dynamic resources. Only override if you want
+        to affect behaviour of all child lookups, rather than just dynamic
+        ones.
+
+        This will check to see if I have a pre-registered child resource of the
+        given name, and call getChild if I do not.
+
+        @see: L{IResource.getChildWithDefault}
+        """
+        if path in self.children:
+            return self.children[path]
+        return self.getChild(path, request)
+
+
+    def getChildForRequest(self, request):
+        warnings.warn("Please use module level getChildForRequest.", DeprecationWarning, 2)
+        return getChildForRequest(self, request)
+
+
+    def putChild(self, path, child):
+        """
+        Register a static child.
+
+        You almost certainly don't want '/' in your path. If you
+        intended to have the root of a folder, e.g. /foo/, you want
+        path to be ''.
+
+        @see: L{IResource.putChild}
+        """
+        self.children[path] = child
+        child.server = self.server
+
+
+    def render(self, request):
+        """
+        Render a given resource. See L{IResource}'s render method.
+
+        I delegate to methods of self with the form 'render_METHOD'
+        where METHOD is the HTTP that was used to make the
+        request. Examples: render_GET, render_HEAD, render_POST, and
+        so on. Generally you should implement those methods instead of
+        overriding this one.
+
+        render_METHOD methods are expected to return a byte string which will be
+        the rendered page, unless the return value is C{server.NOT_DONE_YET}, in
+        which case it is this class's responsibility to write the results using
+        C{request.write(data)} and then call C{request.finish()}.
+
+        Old code that overrides render() directly is likewise expected
+        to return a byte string or NOT_DONE_YET.
+
+        @see: L{IResource.render}
+        """
+        m = getattr(self, 'render_' + nativeString(request.method), None)
+        if not m:
+            try:
+                allowedMethods = self.allowedMethods
+            except AttributeError:
+                allowedMethods = _computeAllowedMethods(self)
+            raise UnsupportedMethod(allowedMethods)
+        return m(request)
+
+
+    def render_HEAD(self, request):
+        """
+        Default handling of HEAD method.
+
+        I just return self.render_GET(request). When method is HEAD,
+        the framework will handle this correctly.
+        """
+        return self.render_GET(request)
+
+
+
+def _computeAllowedMethods(resource):
+    """
+    Compute the allowed methods on a C{Resource} based on defined render_FOO
+    methods. Used when raising C{UnsupportedMethod} but C{Resource} does
+    not define C{allowedMethods} attribute.
+    """
+    allowedMethods = []
+    for name in prefixedMethodNames(resource.__class__, "render_"):
+        # Potentially there should be an API for encode('ascii') in this
+        # situation - an API for taking a Python native string (bytes on Python
+        # 2, text on Python 3) and returning a socket-compatible string type.
+        allowedMethods.append(name.encode('ascii'))
+    return allowedMethods
+
+
+
+class ErrorPage(Resource):
+    """
+    L{ErrorPage} is a resource which responds with a particular
+    (parameterized) status and a body consisting of HTML containing some
+    descriptive text.  This is useful for rendering simple error pages.
+
+    @ivar template: A native string which will have a dictionary interpolated
+        into it to generate the response body.  The dictionary has the following
+        keys:
+
+          - C{"code"}: The status code passed to L{ErrorPage.__init__}.
+          - C{"brief"}: The brief description passed to L{ErrorPage.__init__}.
+          - C{"detail"}: The detailed description passed to
+            L{ErrorPage.__init__}.
+
+    @ivar code: An integer status code which will be used for the response.
+
+    @ivar brief: A short string which will be included in the response body.
+    @type brief: C{str}
+
+    @ivar detail: A longer string which will be included in the response body.
+    @ivar detail: C{str}
+    """
+
+    template = """
+<html>
+  <head><title>%(code)s - %(brief)s</title></head>
+  <body>
+    <h1>%(brief)s</h1>
+    <p>%(detail)s</p>
+  </body>
+</html>
+"""
+
+    def __init__(self, status, brief, detail):
+        Resource.__init__(self)
+        self.code = status
+        self.brief = brief
+        self.detail = detail
+
+
+    def render(self, request):
+        request.setResponseCode(self.code)
+        request.setHeader(b"content-type", b"text/html; charset=utf-8")
+        interpolated = self.template % dict(
+            code=self.code, brief=self.brief, detail=self.detail)
+        if isinstance(interpolated, unicode):
+            return interpolated.encode('utf-8')
+        return interpolated
+
+
+    def getChild(self, chnam, request):
+        return self
+
+
+
+class NoResource(ErrorPage):
+    """
+    L{NoResource} is a specialization of L{ErrorPage} which returns the HTTP
+    response code I{NOT FOUND}.
+    """
+    def __init__(self, message="Sorry. No luck finding that resource."):
+        ErrorPage.__init__(self, NOT_FOUND, "No Such Resource", message)
+
+
+
+class ForbiddenResource(ErrorPage):
+    """
+    L{ForbiddenResource} is a specialization of L{ErrorPage} which returns the
+    I{FORBIDDEN} HTTP response code.
+    """
+    def __init__(self, message="Sorry, resource is forbidden."):
+        ErrorPage.__init__(self, FORBIDDEN, "Forbidden Resource", message)
+
+
+
+class _IEncodingResource(Interface):
+    """
+    A resource which knows about L{_IRequestEncoderFactory}.
+
+    @since: 12.3
+    """
+
+    def getEncoder(request):
+        """
+        Parse the request and return an encoder if applicable, using
+        L{_IRequestEncoderFactory.encoderForRequest}.
+
+        @return: A L{_IRequestEncoder}, or C{None}.
+        """
+
+
+
+ at implementer(_IEncodingResource)
+class EncodingResourceWrapper(proxyForInterface(IResource)):
+    """
+    Wrap a L{IResource}, potentially applying an encoding to the response body
+    generated.
+
+    Note that the returned children resources won't be wrapped, so you have to
+    explicitly wrap them if you want the encoding to be applied.
+
+    @ivar encoders: A list of
+        L{_IRequestEncoderFactory<twisted.web.iweb._IRequestEncoderFactory>}
+        returning L{_IRequestEncoder<twisted.web.iweb._IRequestEncoder>} that
+        may transform the data passed to C{Request.write}. The list must be
+        sorted in order of priority: the first encoder factory handling the
+        request will prevent the others from doing the same.
+    @type encoders: C{list}.
+
+    @since: 12.3
+    """
+
+    def __init__(self, original, encoders):
+        super(EncodingResourceWrapper, self).__init__(original)
+        self._encoders = encoders
+
+
+    def getEncoder(self, request):
+        """
+        Browser the list of encoders looking for one applicable encoder.
+        """
+        for encoderFactory in self._encoders:
+            encoder = encoderFactory.encoderForRequest(request)
+            if encoder is not None:
+                return encoder
diff --git a/ThirdParty/Twisted/twisted/web/rewrite.py b/ThirdParty/Twisted/twisted/web/rewrite.py
new file mode 100644
index 0000000..b5366b4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/rewrite.py
@@ -0,0 +1,52 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+from twisted.web import resource
+
+class RewriterResource(resource.Resource):
+
+    def __init__(self, orig, *rewriteRules):
+        resource.Resource.__init__(self)
+        self.resource = orig
+        self.rewriteRules = list(rewriteRules)
+
+    def _rewrite(self, request):
+        for rewriteRule in self.rewriteRules:
+            rewriteRule(request)
+
+    def getChild(self, path, request):
+        request.postpath.insert(0, path)
+        request.prepath.pop()
+        self._rewrite(request)
+        path = request.postpath.pop(0)
+        request.prepath.append(path)
+        return self.resource.getChildWithDefault(path, request)
+
+    def render(self, request):
+        self._rewrite(request)
+        return self.resource.render(request)
+
+
+def tildeToUsers(request):
+    if request.postpath and request.postpath[0][:1]=='~':
+        request.postpath[:1] = ['users', request.postpath[0][1:]]
+        request.path = '/'+'/'.join(request.prepath+request.postpath)
+
+def alias(aliasPath, sourcePath):
+    """
+    I am not a very good aliaser. But I'm the best I can be. If I'm
+    aliasing to a Resource that generates links, and it uses any parts
+    of request.prepath to do so, the links will not be relative to the
+    aliased path, but rather to the aliased-to path. That I can't
+    alias static.File directory listings that nicely. However, I can
+    still be useful, as many resources will play nice.
+    """
+    sourcePath = sourcePath.split('/')
+    aliasPath = aliasPath.split('/')
+    def rewriter(request):
+        if request.postpath[:len(aliasPath)] == aliasPath:
+            after = request.postpath[len(aliasPath):]
+            request.postpath = sourcePath + after
+            request.path = '/'+'/'.join(request.prepath+request.postpath)
+    return rewriter
diff --git a/ThirdParty/Twisted/twisted/web/script.py b/ThirdParty/Twisted/twisted/web/script.py
new file mode 100644
index 0000000..1b50105
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/script.py
@@ -0,0 +1,170 @@
+# -*- test-case-name: twisted.web.test.test_script -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+I contain PythonScript, which is a very simple python script resource.
+"""
+
+import os, traceback
+
+try:
+    import cStringIO as StringIO
+except ImportError:
+    import StringIO
+
+from twisted import copyright
+from twisted.python.compat import execfile
+from twisted.web import http, server, static, resource, html
+
+
+rpyNoResource = """<p>You forgot to assign to the variable "resource" in your script. For example:</p>
+<pre>
+# MyCoolWebApp.rpy
+
+import mygreatresource
+
+resource = mygreatresource.MyGreatResource()
+</pre>
+"""
+
+class AlreadyCached(Exception):
+    """This exception is raised when a path has already been cached.
+    """
+
+class CacheScanner:
+    def __init__(self, path, registry):
+        self.path = path
+        self.registry = registry
+        self.doCache = 0
+
+    def cache(self):
+        c = self.registry.getCachedPath(self.path)
+        if c is not None:
+            raise AlreadyCached(c)
+        self.recache()
+
+    def recache(self):
+        self.doCache = 1
+
+noRsrc = resource.ErrorPage(500, "Whoops! Internal Error", rpyNoResource)
+
+def ResourceScript(path, registry):
+    """
+    I am a normal py file which must define a 'resource' global, which should
+    be an instance of (a subclass of) web.resource.Resource; it will be
+    renderred.
+    """
+    cs = CacheScanner(path, registry)
+    glob = {'__file__': path,
+            'resource': noRsrc,
+            'registry': registry,
+            'cache': cs.cache,
+            'recache': cs.recache}
+    try:
+        execfile(path, glob, glob)
+    except AlreadyCached, ac:
+        return ac.args[0]
+    rsrc = glob['resource']
+    if cs.doCache and rsrc is not noRsrc:
+        registry.cachePath(path, rsrc)
+    return rsrc
+
+def ResourceTemplate(path, registry):
+    from quixote import ptl_compile
+
+    glob = {'__file__': path,
+            'resource': resource.ErrorPage(500, "Whoops! Internal Error",
+                                           rpyNoResource),
+            'registry': registry}
+
+    e = ptl_compile.compile_template(open(path), path)
+    exec e in glob
+    return glob['resource']
+
+
+class ResourceScriptWrapper(resource.Resource):
+
+    def __init__(self, path, registry=None):
+        resource.Resource.__init__(self)
+        self.path = path
+        self.registry = registry or static.Registry()
+
+    def render(self, request):
+        res = ResourceScript(self.path, self.registry)
+        return res.render(request)
+
+    def getChildWithDefault(self, path, request):
+        res = ResourceScript(self.path, self.registry)
+        return res.getChildWithDefault(path, request)
+
+
+
+class ResourceScriptDirectory(resource.Resource):
+    """
+    L{ResourceScriptDirectory} is a resource which serves scripts from a
+    filesystem directory.  File children of a L{ResourceScriptDirectory} will
+    be served using L{ResourceScript}.  Directory children will be served using
+    another L{ResourceScriptDirectory}.
+
+    @ivar path: A C{str} giving the filesystem path in which children will be
+        looked up.
+
+    @ivar registry: A L{static.Registry} instance which will be used to decide
+        how to interpret scripts found as children of this resource.
+    """
+    def __init__(self, pathname, registry=None):
+        resource.Resource.__init__(self)
+        self.path = pathname
+        self.registry = registry or static.Registry()
+
+    def getChild(self, path, request):
+        fn = os.path.join(self.path, path)
+
+        if os.path.isdir(fn):
+            return ResourceScriptDirectory(fn, self.registry)
+        if os.path.exists(fn):
+            return ResourceScript(fn, self.registry)
+        return resource.NoResource()
+
+    def render(self, request):
+        return resource.NoResource().render(request)
+
+
+class PythonScript(resource.Resource):
+    """I am an extremely simple dynamic resource; an embedded python script.
+
+    This will execute a file (usually of the extension '.epy') as Python code,
+    internal to the webserver.
+    """
+    isLeaf = 1
+    def __init__(self, filename, registry):
+        """Initialize me with a script name.
+        """
+        self.filename = filename
+        self.registry = registry
+
+    def render(self, request):
+        """Render me to a web client.
+
+        Load my file, execute it in a special namespace (with 'request' and
+        '__file__' global vars) and finish the request.  Output to the web-page
+        will NOT be handled with print - standard output goes to the log - but
+        with request.write.
+        """
+        request.setHeader("x-powered-by","Twisted/%s" % copyright.version)
+        namespace = {'request': request,
+                     '__file__': self.filename,
+                     'registry': self.registry}
+        try:
+            execfile(self.filename, namespace, namespace)
+        except IOError, e:
+            if e.errno == 2: #file not found
+                request.setResponseCode(http.NOT_FOUND)
+                request.write(resource.NoResource("File not found.").render(request))
+        except:
+            io = StringIO.StringIO()
+            traceback.print_exc(file=io)
+            request.write(html.PRE(io.getvalue()))
+        request.finish()
+        return server.NOT_DONE_YET
diff --git a/ThirdParty/Twisted/twisted/web/server.py b/ThirdParty/Twisted/twisted/web/server.py
new file mode 100644
index 0000000..bc3f07c
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/server.py
@@ -0,0 +1,723 @@
+# -*- test-case-name: twisted.web.test.test_web -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+This is a web-server which integrates with the twisted.internet
+infrastructure.
+"""
+
+from __future__ import division, absolute_import
+
+import warnings
+import string
+import types
+import copy
+import os
+try:
+    from urllib import quote
+except ImportError:
+    from urllib.parse import quote as _quote
+
+    def quote(string, *args, **kwargs):
+        return _quote(string.decode('charmap'), *args, **kwargs).encode('charmap')
+
+import zlib
+
+from zope.interface import implementer
+
+from twisted.python.compat import _PY3, networkString, nativeString, intToBytes
+if _PY3:
+    class Copyable:
+        """
+        Fake mixin, until twisted.spread is ported.
+        """
+else:
+    from twisted.spread.pb import Copyable, ViewPoint
+from twisted.internet import address, task
+from twisted.web import iweb, http, html
+from twisted.web.http import unquote
+from twisted.python import log, _reflectpy3 as reflect, failure, components
+from twisted import copyright
+# Re-enable as part of #6178 when twisted.web.util is ported to Python 3:
+if not _PY3:
+    from twisted.web import util as webutil
+from twisted.web import resource
+from twisted.web.error import UnsupportedMethod
+
+from twisted.python.versions import Version
+from twisted.python.deprecate import deprecatedModuleAttribute
+
+if _PY3:
+    # cgi.escape is deprecated in Python 3.
+    from html import escape
+else:
+    from cgi import escape
+
+
+NOT_DONE_YET = 1
+
+__all__ = [
+    'supportedMethods',
+    'Request',
+    'Session',
+    'Site',
+    'version',
+    'NOT_DONE_YET',
+    'GzipEncoderFactory'
+]
+
+
+# backwards compatability
+deprecatedModuleAttribute(
+    Version("Twisted", 12, 1, 0),
+    "Please use twisted.web.http.datetimeToString instead",
+    "twisted.web.server",
+    "date_time_string")
+deprecatedModuleAttribute(
+    Version("Twisted", 12, 1, 0),
+    "Please use twisted.web.http.stringToDatetime instead",
+    "twisted.web.server",
+    "string_date_time")
+date_time_string = http.datetimeToString
+string_date_time = http.stringToDatetime
+
+# Support for other methods may be implemented on a per-resource basis.
+supportedMethods = ('GET', 'HEAD', 'POST')
+
+
+def _addressToTuple(addr):
+    if isinstance(addr, address.IPv4Address):
+        return ('INET', addr.host, addr.port)
+    elif isinstance(addr, address.UNIXAddress):
+        return ('UNIX', addr.name)
+    else:
+        return tuple(addr)
+
+
+
+ at implementer(iweb.IRequest)
+class Request(Copyable, http.Request, components.Componentized):
+    """
+    An HTTP request.
+
+    @ivar defaultContentType: A C{bytes} giving the default I{Content-Type}
+        value to send in responses if no other value is set.  C{None} disables
+        the default.
+    """
+
+    defaultContentType = b"text/html"
+
+    site = None
+    appRootURL = None
+    __pychecker__ = 'unusednames=issuer'
+    _inFakeHead = False
+    _encoder = None
+
+    def __init__(self, *args, **kw):
+        http.Request.__init__(self, *args, **kw)
+        components.Componentized.__init__(self)
+
+    def getStateToCopyFor(self, issuer):
+        x = self.__dict__.copy()
+        del x['transport']
+        # XXX refactor this attribute out; it's from protocol
+        # del x['server']
+        del x['channel']
+        del x['content']
+        del x['site']
+        self.content.seek(0, 0)
+        x['content_data'] = self.content.read()
+        x['remote'] = ViewPoint(issuer, self)
+
+        # Address objects aren't jellyable
+        x['host'] = _addressToTuple(x['host'])
+        x['client'] = _addressToTuple(x['client'])
+
+        # Header objects also aren't jellyable.
+        x['requestHeaders'] = list(x['requestHeaders'].getAllRawHeaders())
+
+        return x
+
+    # HTML generation helpers
+
+    def sibLink(self, name):
+        """
+        Return the text that links to a sibling of the requested resource.
+        """
+        if self.postpath:
+            return (len(self.postpath)*b"../") + name
+        else:
+            return name
+
+
+    def childLink(self, name):
+        """
+        Return the text that links to a child of the requested resource.
+        """
+        lpp = len(self.postpath)
+        if lpp > 1:
+            return ((lpp-1)*b"../") + name
+        elif lpp == 1:
+            return name
+        else: # lpp == 0
+            if len(self.prepath) and self.prepath[-1]:
+                return self.prepath[-1] + b'/' + name
+            else:
+                return name
+
+
+    def process(self):
+        """
+        Process a request.
+        """
+
+        # get site from channel
+        self.site = self.channel.site
+
+        # set various default headers
+        self.setHeader(b'server', version)
+        self.setHeader(b'date', http.datetimeToString())
+
+        # Resource Identification
+        self.prepath = []
+        self.postpath = list(map(unquote, self.path[1:].split(b'/')))
+
+        try:
+            resrc = self.site.getResourceFor(self)
+            if resource._IEncodingResource.providedBy(resrc):
+                encoder = resrc.getEncoder(self)
+                if encoder is not None:
+                    self._encoder = encoder
+            self.render(resrc)
+        except:
+            self.processingFailed(failure.Failure())
+
+
+    def write(self, data):
+        """
+        Write data to the transport (if not responding to a HEAD request).
+
+        @param data: A string to write to the response.
+        """
+        if not self.startedWriting:
+            # Before doing the first write, check to see if a default
+            # Content-Type header should be supplied.
+            modified = self.code != http.NOT_MODIFIED
+            contentType = self.responseHeaders.getRawHeaders(b'content-type')
+            if modified and contentType is None and self.defaultContentType is not None:
+                self.responseHeaders.setRawHeaders(
+                    b'content-type', [self.defaultContentType])
+
+        # Only let the write happen if we're not generating a HEAD response by
+        # faking out the request method.  Note, if we are doing that,
+        # startedWriting will never be true, and the above logic may run
+        # multiple times.  It will only actually change the responseHeaders once
+        # though, so it's still okay.
+        if not self._inFakeHead:
+            if self._encoder:
+                data = self._encoder.encode(data)
+            http.Request.write(self, data)
+
+
+    def finish(self):
+        """
+        Override C{http.Request.finish} for possible encoding.
+        """
+        if self._encoder:
+            data = self._encoder.finish()
+            if data:
+                http.Request.write(self, data)
+        return http.Request.finish(self)
+
+
+    def render(self, resrc):
+        """
+        Ask a resource to render itself.
+
+        @param resrc: a L{twisted.web.resource.IResource}.
+        """
+        try:
+            body = resrc.render(self)
+        except UnsupportedMethod as e:
+            allowedMethods = e.allowedMethods
+            if (self.method == b"HEAD") and (b"GET" in allowedMethods):
+                # We must support HEAD (RFC 2616, 5.1.1).  If the
+                # resource doesn't, fake it by giving the resource
+                # a 'GET' request and then return only the headers,
+                # not the body.
+                log.msg("Using GET to fake a HEAD request for %s" %
+                        (resrc,))
+                self.method = b"GET"
+                self._inFakeHead = True
+                body = resrc.render(self)
+
+                if body is NOT_DONE_YET:
+                    log.msg("Tried to fake a HEAD request for %s, but "
+                            "it got away from me." % resrc)
+                    # Oh well, I guess we won't include the content length.
+                else:
+                    self.setHeader(b'content-length', intToBytes(len(body)))
+
+                self._inFakeHead = False
+                self.method = b"HEAD"
+                self.write(b'')
+                self.finish()
+                return
+
+            if self.method in (supportedMethods):
+                # We MUST include an Allow header
+                # (RFC 2616, 10.4.6 and 14.7)
+                self.setHeader('Allow', ', '.join(allowedMethods))
+                s = ('''Your browser approached me (at %(URI)s) with'''
+                     ''' the method "%(method)s".  I only allow'''
+                     ''' the method%(plural)s %(allowed)s here.''' % {
+                    'URI': escape(self.uri),
+                    'method': self.method,
+                    'plural': ((len(allowedMethods) > 1) and 's') or '',
+                    'allowed': ', '.join(allowedMethods)
+                    })
+                epage = resource.ErrorPage(http.NOT_ALLOWED,
+                                           "Method Not Allowed", s)
+                body = epage.render(self)
+            else:
+                epage = resource.ErrorPage(
+                    http.NOT_IMPLEMENTED, "Huh?",
+                    "I don't know how to treat a %s request." %
+                    (escape(self.method.decode("charmap")),))
+                body = epage.render(self)
+        # end except UnsupportedMethod
+
+        if body == NOT_DONE_YET:
+            return
+        if not isinstance(body, bytes):
+            body = resource.ErrorPage(
+                http.INTERNAL_SERVER_ERROR,
+                "Request did not return bytes",
+                "Request: " + html.PRE(reflect.safe_repr(self)) + "<br />" +
+                "Resource: " + html.PRE(reflect.safe_repr(resrc)) + "<br />" +
+                "Value: " + html.PRE(reflect.safe_repr(body))).render(self)
+
+        if self.method == b"HEAD":
+            if len(body) > 0:
+                # This is a Bad Thing (RFC 2616, 9.4)
+                log.msg("Warning: HEAD request %s for resource %s is"
+                        " returning a message body."
+                        "  I think I'll eat it."
+                        % (self, resrc))
+                self.setHeader(b'content-length',
+                               intToBytes(len(body)))
+            self.write(b'')
+        else:
+            self.setHeader(b'content-length',
+                           intToBytes(len(body)))
+            self.write(body)
+        self.finish()
+
+    def processingFailed(self, reason):
+        log.err(reason)
+        # Re-enable on Python 3 as part of #6178:
+        if not _PY3 and self.site.displayTracebacks:
+            body = ("<html><head><title>web.Server Traceback (most recent call last)</title></head>"
+                    "<body><b>web.Server Traceback (most recent call last):</b>\n\n"
+                    "%s\n\n</body></html>\n"
+                    % webutil.formatFailure(reason))
+        else:
+            body = (b"<html><head><title>Processing Failed</title></head><body>"
+                    b"<b>Processing Failed</b></body></html>")
+
+        self.setResponseCode(http.INTERNAL_SERVER_ERROR)
+        self.setHeader(b'content-type', b"text/html")
+        self.setHeader(b'content-length', intToBytes(len(body)))
+        self.write(body)
+        self.finish()
+        return reason
+
+    def view_write(self, issuer, data):
+        """Remote version of write; same interface.
+        """
+        self.write(data)
+
+    def view_finish(self, issuer):
+        """Remote version of finish; same interface.
+        """
+        self.finish()
+
+    def view_addCookie(self, issuer, k, v, **kwargs):
+        """Remote version of addCookie; same interface.
+        """
+        self.addCookie(k, v, **kwargs)
+
+    def view_setHeader(self, issuer, k, v):
+        """Remote version of setHeader; same interface.
+        """
+        self.setHeader(k, v)
+
+    def view_setLastModified(self, issuer, when):
+        """Remote version of setLastModified; same interface.
+        """
+        self.setLastModified(when)
+
+    def view_setETag(self, issuer, tag):
+        """Remote version of setETag; same interface.
+        """
+        self.setETag(tag)
+
+
+    def view_setResponseCode(self, issuer, code, message=None):
+        """
+        Remote version of setResponseCode; same interface.
+        """
+        self.setResponseCode(code, message)
+
+
+    def view_registerProducer(self, issuer, producer, streaming):
+        """Remote version of registerProducer; same interface.
+        (requires a remote producer.)
+        """
+        self.registerProducer(_RemoteProducerWrapper(producer), streaming)
+
+    def view_unregisterProducer(self, issuer):
+        self.unregisterProducer()
+
+    ### these calls remain local
+
+    session = None
+
+    def getSession(self, sessionInterface = None):
+        # Session management
+        if not self.session:
+            cookiename = b"_".join([b'TWISTED_SESSION'] + self.sitepath)
+            sessionCookie = self.getCookie(cookiename)
+            if sessionCookie:
+                try:
+                    self.session = self.site.getSession(sessionCookie)
+                except KeyError:
+                    pass
+            # if it still hasn't been set, fix it up.
+            if not self.session:
+                self.session = self.site.makeSession()
+                self.addCookie(cookiename, self.session.uid, path=b'/')
+        self.session.touch()
+        if sessionInterface:
+            return self.session.getComponent(sessionInterface)
+        return self.session
+
+    def _prePathURL(self, prepath):
+        port = self.getHost().port
+        if self.isSecure():
+            default = 443
+        else:
+            default = 80
+        if port == default:
+            hostport = ''
+        else:
+            hostport = ':%d' % port
+        prefix = networkString('http%s://%s%s/' % (
+            self.isSecure() and 's' or '',
+            nativeString(self.getRequestHostname()),
+            hostport))
+        path = b'/'.join([quote(segment, safe=b'') for segment in prepath])
+        return prefix + path
+
+    def prePathURL(self):
+        return self._prePathURL(self.prepath)
+
+    def URLPath(self):
+        from twisted.python import urlpath
+        return urlpath.URLPath.fromRequest(self)
+
+    def rememberRootURL(self):
+        """
+        Remember the currently-processed part of the URL for later
+        recalling.
+        """
+        url = self._prePathURL(self.prepath[:-1])
+        self.appRootURL = url
+
+    def getRootURL(self):
+        """
+        Get a previously-remembered URL.
+        """
+        return self.appRootURL
+
+
+
+ at implementer(iweb._IRequestEncoderFactory)
+class GzipEncoderFactory(object):
+    """
+    @cvar compressLevel: The compression level used by the compressor, default
+        to 9 (highest).
+
+    @since: 12.3
+    """
+
+    compressLevel = 9
+
+    def encoderForRequest(self, request):
+        """
+        Check the headers if the client accepts gzip encoding, and encodes the
+        request if so.
+        """
+        acceptHeaders = request.requestHeaders.getRawHeaders(
+            'accept-encoding', [])
+        supported = ','.join(acceptHeaders).split(',')
+        if 'gzip' in supported:
+            encoding = request.responseHeaders.getRawHeaders(
+                'content-encoding')
+            if encoding:
+                encoding = '%s,gzip' % ','.join(encoding)
+            else:
+                encoding = 'gzip'
+
+            request.responseHeaders.setRawHeaders('content-encoding',
+                                                  [encoding])
+            return _GzipEncoder(self.compressLevel, request)
+
+
+
+ at implementer(iweb._IRequestEncoder)
+class _GzipEncoder(object):
+    """
+    An encoder which supports gzip.
+
+    @ivar _zlibCompressor: The zlib compressor instance used to compress the
+        stream.
+
+    @ivar _request: A reference to the originating request.
+
+    @since: 12.3
+    """
+
+    _zlibCompressor = None
+
+    def __init__(self, compressLevel, request):
+        self._zlibCompressor = zlib.compressobj(
+            compressLevel, zlib.DEFLATED, 16 + zlib.MAX_WBITS)
+        self._request = request
+
+
+    def encode(self, data):
+        """
+        Write to the request, automatically compressing data on the fly.
+        """
+        if not self._request.startedWriting:
+            # Remove the content-length header, we can't honor it
+            # because we compress on the fly.
+            self._request.responseHeaders.removeHeader(b'content-length')
+        return self._zlibCompressor.compress(data)
+
+
+    def finish(self):
+        """
+        Finish handling the request request, flushing any data from the zlib
+        buffer.
+        """
+        remain = self._zlibCompressor.flush()
+        self._zlibCompressor = None
+        return remain
+
+
+
+class _RemoteProducerWrapper:
+    def __init__(self, remote):
+        self.resumeProducing = remote.remoteMethod("resumeProducing")
+        self.pauseProducing = remote.remoteMethod("pauseProducing")
+        self.stopProducing = remote.remoteMethod("stopProducing")
+
+
+class Session(components.Componentized):
+    """
+    A user's session with a system.
+
+    This utility class contains no functionality, but is used to
+    represent a session.
+
+    @ivar uid: A unique identifier for the session, C{bytes}.
+    @ivar _reactor: An object providing L{IReactorTime} to use for scheduling
+        expiration.
+    @ivar sessionTimeout: timeout of a session, in seconds.
+    @ivar loopFactory: Deprecated in Twisted 9.0.  Does nothing.  Do not use.
+    """
+    sessionTimeout = 900
+    loopFactory = task.LoopingCall
+
+    _expireCall = None
+
+    def __init__(self, site, uid, reactor=None):
+        """
+        Initialize a session with a unique ID for that session.
+        """
+        components.Componentized.__init__(self)
+
+        if reactor is None:
+            from twisted.internet import reactor
+        self._reactor = reactor
+
+        self.site = site
+        self.uid = uid
+        self.expireCallbacks = []
+        self.touch()
+        self.sessionNamespaces = {}
+
+
+    def startCheckingExpiration(self, lifetime=None):
+        """
+        Start expiration tracking.
+
+        @param lifetime: Ignored; deprecated.
+
+        @return: C{None}
+        """
+        if lifetime is not None:
+            warnings.warn(
+                "The lifetime parameter to startCheckingExpiration is "
+                "deprecated since Twisted 9.0.  See Session.sessionTimeout "
+                "instead.", DeprecationWarning, stacklevel=2)
+        self._expireCall = self._reactor.callLater(
+            self.sessionTimeout, self.expire)
+
+
+    def notifyOnExpire(self, callback):
+        """
+        Call this callback when the session expires or logs out.
+        """
+        self.expireCallbacks.append(callback)
+
+
+    def expire(self):
+        """
+        Expire/logout of the session.
+        """
+        del self.site.sessions[self.uid]
+        for c in self.expireCallbacks:
+            c()
+        self.expireCallbacks = []
+        if self._expireCall and self._expireCall.active():
+            self._expireCall.cancel()
+            # Break reference cycle.
+            self._expireCall = None
+
+
+    def touch(self):
+        """
+        Notify session modification.
+        """
+        self.lastModified = self._reactor.seconds()
+        if self._expireCall is not None:
+            self._expireCall.reset(self.sessionTimeout)
+
+
+    def checkExpired(self):
+        """
+        Deprecated; does nothing.
+        """
+        warnings.warn(
+            "Session.checkExpired is deprecated since Twisted 9.0; sessions "
+            "check themselves now, you don't need to.",
+            stacklevel=2, category=DeprecationWarning)
+
+
+version = networkString("TwistedWeb/%s" % (copyright.version,))
+
+
+class Site(http.HTTPFactory):
+    """
+    A web site: manage log, sessions, and resources.
+
+    @ivar counter: increment value used for generating unique sessions ID.
+    @ivar requestFactory: factory creating requests objects. Default to
+        L{Request}.
+    @ivar displayTracebacks: if set, Twisted internal errors are displayed on
+        rendered pages. Default to C{True}.
+    @ivar sessionFactory: factory for sessions objects. Default to L{Session}.
+    @ivar sessionCheckTime: Deprecated.  See L{Session.sessionTimeout} instead.
+    """
+    counter = 0
+    requestFactory = Request
+    displayTracebacks = True
+    sessionFactory = Session
+    sessionCheckTime = 1800
+
+    def __init__(self, resource, logPath=None, timeout=60*60*12):
+        """
+        Initialize.
+        """
+        http.HTTPFactory.__init__(self, logPath=logPath, timeout=timeout)
+        self.sessions = {}
+        self.resource = resource
+
+    def _openLogFile(self, path):
+        from twisted.python import logfile
+        return logfile.LogFile(os.path.basename(path), os.path.dirname(path))
+
+    def __getstate__(self):
+        d = self.__dict__.copy()
+        d['sessions'] = {}
+        return d
+
+    def _mkuid(self):
+        """
+        (internal) Generate an opaque, unique ID for a user's session.
+        """
+        from twisted.python.hashlib import md5
+        import random
+        self.counter = self.counter + 1
+        return md5(networkString(
+                "%s_%s" % (str(random.random()) , str(self.counter)))
+                   ).hexdigest()
+
+    def makeSession(self):
+        """
+        Generate a new Session instance, and store it for future reference.
+        """
+        uid = self._mkuid()
+        session = self.sessions[uid] = self.sessionFactory(self, uid)
+        session.startCheckingExpiration()
+        return session
+
+    def getSession(self, uid):
+        """
+        Get a previously generated session, by its unique ID.
+        This raises a KeyError if the session is not found.
+        """
+        return self.sessions[uid]
+
+    def buildProtocol(self, addr):
+        """
+        Generate a channel attached to this site.
+        """
+        channel = http.HTTPFactory.buildProtocol(self, addr)
+        channel.requestFactory = self.requestFactory
+        channel.site = self
+        return channel
+
+    isLeaf = 0
+
+    def render(self, request):
+        """
+        Redirect because a Site is always a directory.
+        """
+        request.redirect(request.prePathURL() + b'/')
+        request.finish()
+
+    def getChildWithDefault(self, pathEl, request):
+        """
+        Emulate a resource's getChild method.
+        """
+        request.site = self
+        return self.resource.getChildWithDefault(pathEl, request)
+
+    def getResourceFor(self, request):
+        """
+        Get a resource for a request.
+
+        This iterates through the resource heirarchy, calling
+        getChildWithDefault on each resource it finds for a path element,
+        stopping when it hits an element where isLeaf is true.
+        """
+        request.site = self
+        # Sitepath is used to determine cookie names between distributed
+        # servers and disconnected sites.
+        request.sitepath = copy.copy(request.prepath)
+        return resource.getChildForRequest(self.resource, request)
diff --git a/ThirdParty/Twisted/twisted/web/soap.py b/ThirdParty/Twisted/twisted/web/soap.py
new file mode 100644
index 0000000..1ca747b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/soap.py
@@ -0,0 +1,154 @@
+# -*- test-case-name: twisted.web.test.test_soap -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+SOAP support for twisted.web.
+
+Requires SOAPpy 0.10.1 or later.
+
+Maintainer: Itamar Shtull-Trauring
+
+Future plans:
+SOAPContext support of some kind.
+Pluggable method lookup policies.
+"""
+
+# SOAPpy
+import SOAPpy
+
+# twisted imports
+from twisted.web import server, resource, client
+from twisted.internet import defer
+
+
+class SOAPPublisher(resource.Resource):
+    """Publish SOAP methods.
+
+    By default, publish methods beginning with 'soap_'. If the method
+    has an attribute 'useKeywords', it well get the arguments passed
+    as keyword args.
+    """
+
+    isLeaf = 1
+
+    # override to change the encoding used for responses
+    encoding = "UTF-8"
+
+    def lookupFunction(self, functionName):
+        """Lookup published SOAP function.
+
+        Override in subclasses. Default behaviour - publish methods
+        starting with soap_.
+
+        @return: callable or None if not found.
+        """
+        return getattr(self, "soap_%s" % functionName, None)
+
+    def render(self, request):
+        """Handle a SOAP command."""
+        data = request.content.read()
+
+        p, header, body, attrs = SOAPpy.parseSOAPRPC(data, 1, 1, 1)
+
+        methodName, args, kwargs, ns = p._name, p._aslist, p._asdict, p._ns
+
+        # deal with changes in SOAPpy 0.11
+        if callable(args):
+            args = args()
+        if callable(kwargs):
+            kwargs = kwargs()
+
+        function = self.lookupFunction(methodName)
+
+        if not function:
+            self._methodNotFound(request, methodName)
+            return server.NOT_DONE_YET
+        else:
+            if hasattr(function, "useKeywords"):
+                keywords = {}
+                for k, v in kwargs.items():
+                    keywords[str(k)] = v
+                d = defer.maybeDeferred(function, **keywords)
+            else:
+                d = defer.maybeDeferred(function, *args)
+
+        d.addCallback(self._gotResult, request, methodName)
+        d.addErrback(self._gotError, request, methodName)
+        return server.NOT_DONE_YET
+
+    def _methodNotFound(self, request, methodName):
+        response = SOAPpy.buildSOAP(SOAPpy.faultType("%s:Client" %
+            SOAPpy.NS.ENV_T, "Method %s not found" % methodName),
+            encoding=self.encoding)
+        self._sendResponse(request, response, status=500)
+
+    def _gotResult(self, result, request, methodName):
+        if not isinstance(result, SOAPpy.voidType):
+            result = {"Result": result}
+        response = SOAPpy.buildSOAP(kw={'%sResponse' % methodName: result},
+                                  encoding=self.encoding)
+        self._sendResponse(request, response)
+
+    def _gotError(self, failure, request, methodName):
+        e = failure.value
+        if isinstance(e, SOAPpy.faultType):
+            fault = e
+        else:
+            fault = SOAPpy.faultType("%s:Server" % SOAPpy.NS.ENV_T,
+                "Method %s failed." % methodName)
+        response = SOAPpy.buildSOAP(fault, encoding=self.encoding)
+        self._sendResponse(request, response, status=500)
+
+    def _sendResponse(self, request, response, status=200):
+        request.setResponseCode(status)
+
+        if self.encoding is not None:
+            mimeType = 'text/xml; charset="%s"' % self.encoding
+        else:
+            mimeType = "text/xml"
+        request.setHeader("Content-type", mimeType)
+        request.setHeader("Content-length", str(len(response)))
+        request.write(response)
+        request.finish()
+
+
+class Proxy:
+    """A Proxy for making remote SOAP calls.
+
+    Pass the URL of the remote SOAP server to the constructor.
+
+    Use proxy.callRemote('foobar', 1, 2) to call remote method
+    'foobar' with args 1 and 2, proxy.callRemote('foobar', x=1)
+    will call foobar with named argument 'x'.
+    """
+
+    # at some point this should have encoding etc. kwargs
+    def __init__(self, url, namespace=None, header=None):
+        self.url = url
+        self.namespace = namespace
+        self.header = header
+
+    def _cbGotResult(self, result):
+        result = SOAPpy.parseSOAPRPC(result)
+        if hasattr(result, 'Result'):
+            return result.Result
+        elif len(result) == 1:
+            ## SOAPpy 0.11.6 wraps the return results in a containing structure.
+            ## This check added to make Proxy behaviour emulate SOAPProxy, which
+            ## flattens the structure by default.
+            ## This behaviour is OK because even singleton lists are wrapped in
+            ## another singleton structType, which is almost always useless.
+            return result[0]
+        else:
+            return result
+
+    def callRemote(self, method, *args, **kwargs):
+        payload = SOAPpy.buildSOAP(args=args, kw=kwargs, method=method,
+                                   header=self.header, namespace=self.namespace)
+        return client.getPage(self.url, postdata=payload, method="POST",
+                              headers={'content-type': 'text/xml',
+                                       'SOAPAction': method}
+                              ).addCallback(self._cbGotResult)
+
diff --git a/ThirdParty/Twisted/twisted/web/static.py b/ThirdParty/Twisted/twisted/web/static.py
new file mode 100644
index 0000000..05fd7d9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/static.py
@@ -0,0 +1,1033 @@
+# -*- test-case-name: twisted.web.test.test_static -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Static resources for L{twisted.web}.
+"""
+from __future__ import division
+
+import os
+import warnings
+import urllib
+import itertools
+import cgi
+import time
+
+from zope.interface import implements
+
+from twisted.web import server
+from twisted.web import resource
+from twisted.web import http
+from twisted.web.util import redirectTo
+
+from twisted.python import components, filepath, log
+from twisted.internet import abstract, interfaces
+from twisted.persisted import styles
+from twisted.python.util import InsensitiveDict
+from twisted.python.runtime import platformType
+
+
+dangerousPathError = resource.NoResource("Invalid request URL.")
+
+def isDangerous(path):
+    return path == '..' or '/' in path or os.sep in path
+
+
+class Data(resource.Resource):
+    """
+    This is a static, in-memory resource.
+    """
+
+    def __init__(self, data, type):
+        resource.Resource.__init__(self)
+        self.data = data
+        self.type = type
+
+
+    def render_GET(self, request):
+        request.setHeader("content-type", self.type)
+        request.setHeader("content-length", str(len(self.data)))
+        if request.method == "HEAD":
+            return ''
+        return self.data
+    render_HEAD = render_GET
+
+
+def addSlash(request):
+    qs = ''
+    qindex = request.uri.find('?')
+    if qindex != -1:
+        qs = request.uri[qindex:]
+
+    return "http%s://%s%s/%s" % (
+        request.isSecure() and 's' or '',
+        request.getHeader("host"),
+        (request.uri.split('?')[0]),
+        qs)
+
+class Redirect(resource.Resource):
+    def __init__(self, request):
+        resource.Resource.__init__(self)
+        self.url = addSlash(request)
+
+    def render(self, request):
+        return redirectTo(self.url, request)
+
+
+class Registry(components.Componentized, styles.Versioned):
+    """
+    I am a Componentized object that will be made available to internal Twisted
+    file-based dynamic web content such as .rpy and .epy scripts.
+    """
+
+    def __init__(self):
+        components.Componentized.__init__(self)
+        self._pathCache = {}
+
+    persistenceVersion = 1
+
+    def upgradeToVersion1(self):
+        self._pathCache = {}
+
+    def cachePath(self, path, rsrc):
+        self._pathCache[path] = rsrc
+
+    def getCachedPath(self, path):
+        return self._pathCache.get(path)
+
+
+def loadMimeTypes(mimetype_locations=['/etc/mime.types']):
+    """
+    Multiple file locations containing mime-types can be passed as a list.
+    The files will be sourced in that order, overriding mime-types from the
+    files sourced beforehand, but only if a new entry explicitly overrides
+    the current entry.
+    """
+    import mimetypes
+    # Grab Python's built-in mimetypes dictionary.
+    contentTypes = mimetypes.types_map
+    # Update Python's semi-erroneous dictionary with a few of the
+    # usual suspects.
+    contentTypes.update(
+        {
+            '.conf':  'text/plain',
+            '.diff':  'text/plain',
+            '.exe':   'application/x-executable',
+            '.flac':  'audio/x-flac',
+            '.java':  'text/plain',
+            '.ogg':   'application/ogg',
+            '.oz':    'text/x-oz',
+            '.swf':   'application/x-shockwave-flash',
+            '.tgz':   'application/x-gtar',
+            '.wml':   'text/vnd.wap.wml',
+            '.xul':   'application/vnd.mozilla.xul+xml',
+            '.py':    'text/plain',
+            '.patch': 'text/plain',
+        }
+    )
+    # Users can override these mime-types by loading them out configuration
+    # files (this defaults to ['/etc/mime.types']).
+    for location in mimetype_locations:
+        if os.path.exists(location):
+            more = mimetypes.read_mime_types(location)
+            if more is not None:
+                contentTypes.update(more)
+
+    return contentTypes
+
+def getTypeAndEncoding(filename, types, encodings, defaultType):
+    p, ext = os.path.splitext(filename)
+    ext = ext.lower()
+    if ext in encodings:
+        enc = encodings[ext]
+        ext = os.path.splitext(p)[1].lower()
+    else:
+        enc = None
+    type = types.get(ext, defaultType)
+    return type, enc
+
+
+
+class File(resource.Resource, styles.Versioned, filepath.FilePath):
+    """
+    File is a resource that represents a plain non-interpreted file
+    (although it can look for an extension like .rpy or .cgi and hand the
+    file to a processor for interpretation if you wish). Its constructor
+    takes a file path.
+
+    Alternatively, you can give a directory path to the constructor. In this
+    case the resource will represent that directory, and its children will
+    be files underneath that directory. This provides access to an entire
+    filesystem tree with a single Resource.
+
+    If you map the URL 'http://server/FILE' to a resource created as
+    File('/tmp'), then http://server/FILE/ will return an HTML-formatted
+    listing of the /tmp/ directory, and http://server/FILE/foo/bar.html will
+    return the contents of /tmp/foo/bar.html .
+
+    @cvar childNotFound: L{Resource} used to render 404 Not Found error pages.
+    """
+
+    contentTypes = loadMimeTypes()
+
+    contentEncodings = {
+        ".gz" : "gzip",
+        ".bz2": "bzip2"
+        }
+
+    processors = {}
+
+    indexNames = ["index", "index.html", "index.htm", "index.rpy"]
+
+    type = None
+
+    ### Versioning
+
+    persistenceVersion = 6
+
+    def upgradeToVersion6(self):
+        self.ignoredExts = []
+        if self.allowExt:
+            self.ignoreExt("*")
+        del self.allowExt
+
+
+    def upgradeToVersion5(self):
+        if not isinstance(self.registry, Registry):
+            self.registry = Registry()
+
+
+    def upgradeToVersion4(self):
+        if not hasattr(self, 'registry'):
+            self.registry = {}
+
+
+    def upgradeToVersion3(self):
+        if not hasattr(self, 'allowExt'):
+            self.allowExt = 0
+
+
+    def upgradeToVersion2(self):
+        self.defaultType = "text/html"
+
+
+    def upgradeToVersion1(self):
+        if hasattr(self, 'indexName'):
+            self.indexNames = [self.indexName]
+            del self.indexName
+
+
+    def __init__(self, path, defaultType="text/html", ignoredExts=(), registry=None, allowExt=0):
+        """
+        Create a file with the given path.
+
+        @param path: The filename of the file from which this L{File} will
+            serve data.
+        @type path: C{str}
+
+        @param defaultType: A I{major/minor}-style MIME type specifier
+            indicating the I{Content-Type} with which this L{File}'s data
+            will be served if a MIME type cannot be determined based on
+            C{path}'s extension.
+        @type defaultType: C{str}
+
+        @param ignoredExts: A sequence giving the extensions of paths in the
+            filesystem which will be ignored for the purposes of child
+            lookup.  For example, if C{ignoredExts} is C{(".bar",)} and
+            C{path} is a directory containing a file named C{"foo.bar"}, a
+            request for the C{"foo"} child of this resource will succeed
+            with a L{File} pointing to C{"foo.bar"}.
+
+        @param registry: The registry object being used to handle this
+            request.  If C{None}, one will be created.
+        @type registry: L{Registry}
+
+        @param allowExt: Ignored parameter, only present for backwards
+            compatibility.  Do not pass a value for this parameter.
+        """
+        resource.Resource.__init__(self)
+        filepath.FilePath.__init__(self, path)
+        self.defaultType = defaultType
+        if ignoredExts in (0, 1) or allowExt:
+            warnings.warn("ignoredExts should receive a list, not a boolean")
+            if ignoredExts or allowExt:
+                self.ignoredExts = ['*']
+            else:
+                self.ignoredExts = []
+        else:
+            self.ignoredExts = list(ignoredExts)
+        self.registry = registry or Registry()
+
+
+    def ignoreExt(self, ext):
+        """Ignore the given extension.
+
+        Serve file.ext if file is requested
+        """
+        self.ignoredExts.append(ext)
+
+    childNotFound = resource.NoResource("File not found.")
+
+    def directoryListing(self):
+        return DirectoryLister(self.path,
+                               self.listNames(),
+                               self.contentTypes,
+                               self.contentEncodings,
+                               self.defaultType)
+
+
+    def getChild(self, path, request):
+        """
+        If this L{File}'s path refers to a directory, return a L{File}
+        referring to the file named C{path} in that directory.
+
+        If C{path} is the empty string, return a L{DirectoryLister} instead.
+        """
+        self.restat(reraise=False)
+
+        if not self.isdir():
+            return self.childNotFound
+
+        if path:
+            try:
+                fpath = self.child(path)
+            except filepath.InsecurePath:
+                return self.childNotFound
+        else:
+            fpath = self.childSearchPreauth(*self.indexNames)
+            if fpath is None:
+                return self.directoryListing()
+
+        if not fpath.exists():
+            fpath = fpath.siblingExtensionSearch(*self.ignoredExts)
+            if fpath is None:
+                return self.childNotFound
+
+        if platformType == "win32":
+            # don't want .RPY to be different than .rpy, since that would allow
+            # source disclosure.
+            processor = InsensitiveDict(self.processors).get(fpath.splitext()[1])
+        else:
+            processor = self.processors.get(fpath.splitext()[1])
+        if processor:
+            return resource.IResource(processor(fpath.path, self.registry))
+        return self.createSimilarFile(fpath.path)
+
+
+    # methods to allow subclasses to e.g. decrypt files on the fly:
+    def openForReading(self):
+        """Open a file and return it."""
+        return self.open()
+
+
+    def getFileSize(self):
+        """Return file size."""
+        return self.getsize()
+
+
+    def _parseRangeHeader(self, range):
+        """
+        Parse the value of a Range header into (start, stop) pairs.
+
+        In a given pair, either of start or stop can be None, signifying that
+        no value was provided, but not both.
+
+        @return: A list C{[(start, stop)]} of pairs of length at least one.
+
+        @raise ValueError: if the header is syntactically invalid or if the
+            Bytes-Unit is anything other than 'bytes'.
+        """
+        try:
+            kind, value = range.split('=', 1)
+        except ValueError:
+            raise ValueError("Missing '=' separator")
+        kind = kind.strip()
+        if kind != 'bytes':
+            raise ValueError("Unsupported Bytes-Unit: %r" % (kind,))
+        unparsedRanges = filter(None, map(str.strip, value.split(',')))
+        parsedRanges = []
+        for byteRange in unparsedRanges:
+            try:
+                start, end = byteRange.split('-', 1)
+            except ValueError:
+                raise ValueError("Invalid Byte-Range: %r" % (byteRange,))
+            if start:
+                try:
+                    start = int(start)
+                except ValueError:
+                    raise ValueError("Invalid Byte-Range: %r" % (byteRange,))
+            else:
+                start = None
+            if end:
+                try:
+                    end = int(end)
+                except ValueError:
+                    raise ValueError("Invalid Byte-Range: %r" % (byteRange,))
+            else:
+                end = None
+            if start is not None:
+                if end is not None and start > end:
+                    # Start must be less than or equal to end or it is invalid.
+                    raise ValueError("Invalid Byte-Range: %r" % (byteRange,))
+            elif end is None:
+                # One or both of start and end must be specified.  Omitting
+                # both is invalid.
+                raise ValueError("Invalid Byte-Range: %r" % (byteRange,))
+            parsedRanges.append((start, end))
+        return parsedRanges
+
+
+    def _rangeToOffsetAndSize(self, start, end):
+        """
+        Convert a start and end from a Range header to an offset and size.
+
+        This method checks that the resulting range overlaps with the resource
+        being served (and so has the value of C{getFileSize()} as an indirect
+        input).
+
+        Either but not both of start or end can be C{None}:
+
+         - Omitted start means that the end value is actually a start value
+           relative to the end of the resource.
+
+         - Omitted end means the end of the resource should be the end of
+           the range.
+
+        End is interpreted as inclusive, as per RFC 2616.
+
+        If this range doesn't overlap with any of this resource, C{(0, 0)} is
+        returned, which is not otherwise a value return value.
+
+        @param start: The start value from the header, or C{None} if one was
+            not present.
+        @param end: The end value from the header, or C{None} if one was not
+            present.
+        @return: C{(offset, size)} where offset is how far into this resource
+            this resource the range begins and size is how long the range is,
+            or C{(0, 0)} if the range does not overlap this resource.
+        """
+        size = self.getFileSize()
+        if start is None:
+            start = size - end
+            end = size
+        elif end is None:
+            end = size
+        elif end < size:
+            end += 1
+        elif end > size:
+            end = size
+        if start >= size:
+            start = end = 0
+        return start, (end - start)
+
+
+    def _contentRange(self, offset, size):
+        """
+        Return a string suitable for the value of a Content-Range header for a
+        range with the given offset and size.
+
+        The offset and size are not sanity checked in any way.
+
+        @param offset: How far into this resource the range begins.
+        @param size: How long the range is.
+        @return: The value as appropriate for the value of a Content-Range
+            header.
+        """
+        return 'bytes %d-%d/%d' % (
+            offset, offset + size - 1, self.getFileSize())
+
+
+    def _doSingleRangeRequest(self, request, (start, end)):
+        """
+        Set up the response for Range headers that specify a single range.
+
+        This method checks if the request is satisfiable and sets the response
+        code and Content-Range header appropriately.  The return value
+        indicates which part of the resource to return.
+
+        @param request: The Request object.
+        @param start: The start of the byte range as specified by the header.
+        @param end: The end of the byte range as specified by the header.  At
+            most one of C{start} and C{end} may be C{None}.
+        @return: A 2-tuple of the offset and size of the range to return.
+            offset == size == 0 indicates that the request is not satisfiable.
+        """
+        offset, size  = self._rangeToOffsetAndSize(start, end)
+        if offset == size == 0:
+            # This range doesn't overlap with any of this resource, so the
+            # request is unsatisfiable.
+            request.setResponseCode(http.REQUESTED_RANGE_NOT_SATISFIABLE)
+            request.setHeader(
+                'content-range', 'bytes */%d' % (self.getFileSize(),))
+        else:
+            request.setResponseCode(http.PARTIAL_CONTENT)
+            request.setHeader(
+                'content-range', self._contentRange(offset, size))
+        return offset, size
+
+
+    def _doMultipleRangeRequest(self, request, byteRanges):
+        """
+        Set up the response for Range headers that specify a single range.
+
+        This method checks if the request is satisfiable and sets the response
+        code and Content-Type and Content-Length headers appropriately.  The
+        return value, which is a little complicated, indicates which parts of
+        the resource to return and the boundaries that should separate the
+        parts.
+
+        In detail, the return value is a tuple rangeInfo C{rangeInfo} is a
+        list of 3-tuples C{(partSeparator, partOffset, partSize)}.  The
+        response to this request should be, for each element of C{rangeInfo},
+        C{partSeparator} followed by C{partSize} bytes of the resource
+        starting at C{partOffset}.  Each C{partSeparator} includes the
+        MIME-style boundary and the part-specific Content-type and
+        Content-range headers.  It is convenient to return the separator as a
+        concrete string from this method, becasue this method needs to compute
+        the number of bytes that will make up the response to be able to set
+        the Content-Length header of the response accurately.
+
+        @param request: The Request object.
+        @param byteRanges: A list of C{(start, end)} values as specified by
+            the header.  For each range, at most one of C{start} and C{end}
+            may be C{None}.
+        @return: See above.
+        """
+        matchingRangeFound = False
+        rangeInfo = []
+        contentLength = 0
+        boundary = "%x%x" % (int(time.time()*1000000), os.getpid())
+        if self.type:
+            contentType = self.type
+        else:
+            contentType = 'bytes' # It's what Apache does...
+        for start, end in byteRanges:
+            partOffset, partSize = self._rangeToOffsetAndSize(start, end)
+            if partOffset == partSize == 0:
+                continue
+            contentLength += partSize
+            matchingRangeFound = True
+            partContentRange = self._contentRange(partOffset, partSize)
+            partSeparator = (
+                "\r\n"
+                "--%s\r\n"
+                "Content-type: %s\r\n"
+                "Content-range: %s\r\n"
+                "\r\n") % (boundary, contentType, partContentRange)
+            contentLength += len(partSeparator)
+            rangeInfo.append((partSeparator, partOffset, partSize))
+        if not matchingRangeFound:
+            request.setResponseCode(http.REQUESTED_RANGE_NOT_SATISFIABLE)
+            request.setHeader(
+                'content-length', '0')
+            request.setHeader(
+                'content-range', 'bytes */%d' % (self.getFileSize(),))
+            return [], ''
+        finalBoundary = "\r\n--" + boundary + "--\r\n"
+        rangeInfo.append((finalBoundary, 0, 0))
+        request.setResponseCode(http.PARTIAL_CONTENT)
+        request.setHeader(
+            'content-type', 'multipart/byteranges; boundary="%s"' % (boundary,))
+        request.setHeader(
+            'content-length', contentLength + len(finalBoundary))
+        return rangeInfo
+
+
+    def _setContentHeaders(self, request, size=None):
+        """
+        Set the Content-length and Content-type headers for this request.
+
+        This method is not appropriate for requests for multiple byte ranges;
+        L{_doMultipleRangeRequest} will set these headers in that case.
+
+        @param request: The L{Request} object.
+        @param size: The size of the response.  If not specified, default to
+            C{self.getFileSize()}.
+        """
+        if size is None:
+            size = self.getFileSize()
+        request.setHeader('content-length', str(size))
+        if self.type:
+            request.setHeader('content-type', self.type)
+        if self.encoding:
+            request.setHeader('content-encoding', self.encoding)
+
+
+    def makeProducer(self, request, fileForReading):
+        """
+        Make a L{StaticProducer} that will produce the body of this response.
+
+        This method will also set the response code and Content-* headers.
+
+        @param request: The L{Request} object.
+        @param fileForReading: The file object containing the resource.
+        @return: A L{StaticProducer}.  Calling C{.start()} on this will begin
+            producing the response.
+        """
+        byteRange = request.getHeader('range')
+        if byteRange is None:
+            self._setContentHeaders(request)
+            request.setResponseCode(http.OK)
+            return NoRangeStaticProducer(request, fileForReading)
+        try:
+            parsedRanges = self._parseRangeHeader(byteRange)
+        except ValueError:
+            log.msg("Ignoring malformed Range header %r" % (byteRange,))
+            self._setContentHeaders(request)
+            request.setResponseCode(http.OK)
+            return NoRangeStaticProducer(request, fileForReading)
+
+        if len(parsedRanges) == 1:
+            offset, size = self._doSingleRangeRequest(
+                request, parsedRanges[0])
+            self._setContentHeaders(request, size)
+            return SingleRangeStaticProducer(
+                request, fileForReading, offset, size)
+        else:
+            rangeInfo = self._doMultipleRangeRequest(request, parsedRanges)
+            return MultipleRangeStaticProducer(
+                request, fileForReading, rangeInfo)
+
+
+    def render_GET(self, request):
+        """
+        Begin sending the contents of this L{File} (or a subset of the
+        contents, based on the 'range' header) to the given request.
+        """
+        self.restat(False)
+
+        if self.type is None:
+            self.type, self.encoding = getTypeAndEncoding(self.basename(),
+                                                          self.contentTypes,
+                                                          self.contentEncodings,
+                                                          self.defaultType)
+
+        if not self.exists():
+            return self.childNotFound.render(request)
+
+        if self.isdir():
+            return self.redirect(request)
+
+        request.setHeader('accept-ranges', 'bytes')
+
+        try:
+            fileForReading = self.openForReading()
+        except IOError, e:
+            import errno
+            if e[0] == errno.EACCES:
+                return resource.ForbiddenResource().render(request)
+            else:
+                raise
+
+        if request.setLastModified(self.getmtime()) is http.CACHED:
+            return ''
+
+
+        producer = self.makeProducer(request, fileForReading)
+
+        if request.method == 'HEAD':
+            return ''
+
+        producer.start()
+        # and make sure the connection doesn't get closed
+        return server.NOT_DONE_YET
+    render_HEAD = render_GET
+
+
+    def redirect(self, request):
+        return redirectTo(addSlash(request), request)
+
+
+    def listNames(self):
+        if not self.isdir():
+            return []
+        directory = self.listdir()
+        directory.sort()
+        return directory
+
+    def listEntities(self):
+        return map(lambda fileName, self=self: self.createSimilarFile(os.path.join(self.path, fileName)), self.listNames())
+
+
+    def createSimilarFile(self, path):
+        f = self.__class__(path, self.defaultType, self.ignoredExts, self.registry)
+        # refactoring by steps, here - constructor should almost certainly take these
+        f.processors = self.processors
+        f.indexNames = self.indexNames[:]
+        f.childNotFound = self.childNotFound
+        return f
+
+
+
+class StaticProducer(object):
+    """
+    Superclass for classes that implement the business of producing.
+
+    @ivar request: The L{IRequest} to write the contents of the file to.
+    @ivar fileObject: The file the contents of which to write to the request.
+    """
+
+    implements(interfaces.IPullProducer)
+
+    bufferSize = abstract.FileDescriptor.bufferSize
+
+
+    def __init__(self, request, fileObject):
+        """
+        Initialize the instance.
+        """
+        self.request = request
+        self.fileObject = fileObject
+
+
+    def start(self):
+        raise NotImplementedError(self.start)
+
+
+    def resumeProducing(self):
+        raise NotImplementedError(self.resumeProducing)
+
+
+    def stopProducing(self):
+        """
+        Stop producing data.
+
+        L{IPullProducer.stopProducing} is called when our consumer has died,
+        and subclasses also call this method when they are done producing
+        data.
+        """
+        self.fileObject.close()
+        self.request = None
+
+
+
+class NoRangeStaticProducer(StaticProducer):
+    """
+    A L{StaticProducer} that writes the entire file to the request.
+    """
+
+    def start(self):
+        self.request.registerProducer(self, False)
+
+
+    def resumeProducing(self):
+        if not self.request:
+            return
+        data = self.fileObject.read(self.bufferSize)
+        if data:
+            # this .write will spin the reactor, calling .doWrite and then
+            # .resumeProducing again, so be prepared for a re-entrant call
+            self.request.write(data)
+        else:
+            self.request.unregisterProducer()
+            self.request.finish()
+            self.stopProducing()
+
+
+
+class SingleRangeStaticProducer(StaticProducer):
+    """
+    A L{StaticProducer} that writes a single chunk of a file to the request.
+    """
+
+    def __init__(self, request, fileObject, offset, size):
+        """
+        Initialize the instance.
+
+        @param request: See L{StaticProducer}.
+        @param fileObject: See L{StaticProducer}.
+        @param offset: The offset into the file of the chunk to be written.
+        @param size: The size of the chunk to write.
+        """
+        StaticProducer.__init__(self, request, fileObject)
+        self.offset = offset
+        self.size = size
+
+
+    def start(self):
+        self.fileObject.seek(self.offset)
+        self.bytesWritten = 0
+        self.request.registerProducer(self, 0)
+
+
+    def resumeProducing(self):
+        if not self.request:
+            return
+        data = self.fileObject.read(
+            min(self.bufferSize, self.size - self.bytesWritten))
+        if data:
+            self.bytesWritten += len(data)
+            # this .write will spin the reactor, calling .doWrite and then
+            # .resumeProducing again, so be prepared for a re-entrant call
+            self.request.write(data)
+        if self.request and self.bytesWritten == self.size:
+            self.request.unregisterProducer()
+            self.request.finish()
+            self.stopProducing()
+
+
+
+class MultipleRangeStaticProducer(StaticProducer):
+    """
+    A L{StaticProducer} that writes several chunks of a file to the request.
+    """
+
+    def __init__(self, request, fileObject, rangeInfo):
+        """
+        Initialize the instance.
+
+        @param request: See L{StaticProducer}.
+        @param fileObject: See L{StaticProducer}.
+        @param rangeInfo: A list of tuples C{[(boundary, offset, size)]}
+            where:
+             - C{boundary} will be written to the request first.
+             - C{offset} the offset into the file of chunk to write.
+             - C{size} the size of the chunk to write.
+        """
+        StaticProducer.__init__(self, request, fileObject)
+        self.rangeInfo = rangeInfo
+
+
+    def start(self):
+        self.rangeIter = iter(self.rangeInfo)
+        self._nextRange()
+        self.request.registerProducer(self, 0)
+
+
+    def _nextRange(self):
+        self.partBoundary, partOffset, self._partSize = self.rangeIter.next()
+        self._partBytesWritten = 0
+        self.fileObject.seek(partOffset)
+
+
+    def resumeProducing(self):
+        if not self.request:
+            return
+        data = []
+        dataLength = 0
+        done = False
+        while dataLength < self.bufferSize:
+            if self.partBoundary:
+                dataLength += len(self.partBoundary)
+                data.append(self.partBoundary)
+                self.partBoundary = None
+            p = self.fileObject.read(
+                min(self.bufferSize - dataLength,
+                    self._partSize - self._partBytesWritten))
+            self._partBytesWritten += len(p)
+            dataLength += len(p)
+            data.append(p)
+            if self.request and self._partBytesWritten == self._partSize:
+                try:
+                    self._nextRange()
+                except StopIteration:
+                    done = True
+                    break
+        self.request.write(''.join(data))
+        if done:
+            self.request.unregisterProducer()
+            self.request.finish()
+            self.request = None
+
+
+
+class ASISProcessor(resource.Resource):
+    """
+    Serve files exactly as responses without generating a status-line or any
+    headers.  Inspired by Apache's mod_asis.
+    """
+
+    def __init__(self, path, registry=None):
+        resource.Resource.__init__(self)
+        self.path = path
+        self.registry = registry or Registry()
+
+
+    def render(self, request):
+        request.startedWriting = 1
+        res = File(self.path, registry=self.registry)
+        return res.render(request)
+
+
+
+def formatFileSize(size):
+    """
+    Format the given file size in bytes to human readable format.
+    """
+    if size < 1024:
+        return '%iB' % size
+    elif size < (1024 ** 2):
+        return '%iK' % (size / 1024)
+    elif size < (1024 ** 3):
+        return '%iM' % (size / (1024 ** 2))
+    else:
+        return '%iG' % (size / (1024 ** 3))
+
+
+
+class DirectoryLister(resource.Resource):
+    """
+    Print the content of a directory.
+
+    @ivar template: page template used to render the content of the directory.
+        It must contain the format keys B{header} and B{tableContent}.
+    @type template: C{str}
+
+    @ivar linePattern: template used to render one line in the listing table.
+        It must contain the format keys B{class}, B{href}, B{text}, B{size},
+        B{type} and B{encoding}.
+    @type linePattern: C{str}
+
+    @ivar contentEncodings: a mapping of extensions to encoding types.
+    @type contentEncodings: C{dict}
+
+    @ivar defaultType: default type used when no mimetype is detected.
+    @type defaultType: C{str}
+
+    @ivar dirs: filtered content of C{path}, if the whole content should not be
+        displayed (default to C{None}, which means the actual content of
+        C{path} is printed).
+    @type dirs: C{NoneType} or C{list}
+
+    @ivar path: directory which content should be listed.
+    @type path: C{str}
+    """
+
+    template = """<html>
+<head>
+<title>%(header)s</title>
+<style>
+.even-dir { background-color: #efe0ef }
+.even { background-color: #eee }
+.odd-dir {background-color: #f0d0ef }
+.odd { background-color: #dedede }
+.icon { text-align: center }
+.listing {
+    margin-left: auto;
+    margin-right: auto;
+    width: 50%%;
+    padding: 0.1em;
+    }
+
+body { border: 0; padding: 0; margin: 0; background-color: #efefef; }
+h1 {padding: 0.1em; background-color: #777; color: white; border-bottom: thin white dashed;}
+
+</style>
+</head>
+
+<body>
+<h1>%(header)s</h1>
+
+<table>
+    <thead>
+        <tr>
+            <th>Filename</th>
+            <th>Size</th>
+            <th>Content type</th>
+            <th>Content encoding</th>
+        </tr>
+    </thead>
+    <tbody>
+%(tableContent)s
+    </tbody>
+</table>
+
+</body>
+</html>
+"""
+
+    linePattern = """<tr class="%(class)s">
+    <td><a href="%(href)s">%(text)s</a></td>
+    <td>%(size)s</td>
+    <td>%(type)s</td>
+    <td>%(encoding)s</td>
+</tr>
+"""
+
+    def __init__(self, pathname, dirs=None,
+                 contentTypes=File.contentTypes,
+                 contentEncodings=File.contentEncodings,
+                 defaultType='text/html'):
+        resource.Resource.__init__(self)
+        self.contentTypes = contentTypes
+        self.contentEncodings = contentEncodings
+        self.defaultType = defaultType
+        # dirs allows usage of the File to specify what gets listed
+        self.dirs = dirs
+        self.path = pathname
+
+
+    def _getFilesAndDirectories(self, directory):
+        """
+        Helper returning files and directories in given directory listing, with
+        attributes to be used to build a table content with
+        C{self.linePattern}.
+
+        @return: tuple of (directories, files)
+        @rtype: C{tuple} of C{list}
+        """
+        files = []
+        dirs = []
+        for path in directory:
+            url = urllib.quote(path, "/")
+            escapedPath = cgi.escape(path)
+            if os.path.isdir(os.path.join(self.path, path)):
+                url = url + '/'
+                dirs.append({'text': escapedPath + "/", 'href': url,
+                             'size': '', 'type': '[Directory]',
+                             'encoding': ''})
+            else:
+                mimetype, encoding = getTypeAndEncoding(path, self.contentTypes,
+                                                        self.contentEncodings,
+                                                        self.defaultType)
+                try:
+                    size = os.stat(os.path.join(self.path, path)).st_size
+                except OSError:
+                    continue
+                files.append({
+                    'text': escapedPath, "href": url,
+                    'type': '[%s]' % mimetype,
+                    'encoding': (encoding and '[%s]' % encoding or ''),
+                    'size': formatFileSize(size)})
+        return dirs, files
+
+
+    def _buildTableContent(self, elements):
+        """
+        Build a table content using C{self.linePattern} and giving elements odd
+        and even classes.
+        """
+        tableContent = []
+        rowClasses = itertools.cycle(['odd', 'even'])
+        for element, rowClass in zip(elements, rowClasses):
+            element["class"] = rowClass
+            tableContent.append(self.linePattern % element)
+        return tableContent
+
+
+    def render(self, request):
+        """
+        Render a listing of the content of C{self.path}.
+        """
+        request.setHeader("content-type", "text/html; charset=utf-8")
+        if self.dirs is None:
+            directory = os.listdir(self.path)
+            directory.sort()
+        else:
+            directory = self.dirs
+
+        dirs, files = self._getFilesAndDirectories(directory)
+
+        tableContent = "".join(self._buildTableContent(dirs + files))
+
+        header = "Directory listing for %s" % (
+            cgi.escape(urllib.unquote(request.uri)),)
+
+        return self.template % {"header": header, "tableContent": tableContent}
+
+
+    def __repr__(self):
+        return '<DirectoryLister of %r>' % self.path
+
+    __str__ = __repr__
diff --git a/ThirdParty/Twisted/twisted/web/sux.py b/ThirdParty/Twisted/twisted/web/sux.py
new file mode 100644
index 0000000..d5ddc4f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/sux.py
@@ -0,0 +1,636 @@
+# -*- test-case-name: twisted.web.test.test_xml -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+*S*mall, *U*ncomplicated *X*ML.
+
+This is a very simple implementation of XML/HTML as a network
+protocol.  It is not at all clever.  Its main features are that it
+does not:
+
+  - support namespaces
+  - mung mnemonic entity references
+  - validate
+  - perform *any* external actions (such as fetching URLs or writing files)
+    under *any* circumstances
+  - has lots and lots of horrible hacks for supporting broken HTML (as an
+    option, they're not on by default).
+"""
+
+from twisted.internet.protocol import Protocol
+from twisted.python.reflect import prefixedMethodNames
+
+
+
+# Elements of the three-tuples in the state table.
+BEGIN_HANDLER = 0
+DO_HANDLER = 1
+END_HANDLER = 2
+
+identChars = '.-_:'
+lenientIdentChars = identChars + ';+#/%~'
+
+def nop(*args, **kw):
+    "Do nothing."
+
+
+def unionlist(*args):
+    l = []
+    for x in args:
+        l.extend(x)
+    d = dict([(x, 1) for x in l])
+    return d.keys()
+
+
+def zipfndict(*args, **kw):
+    default = kw.get('default', nop)
+    d = {}
+    for key in unionlist(*[fndict.keys() for fndict in args]):
+        d[key] = tuple([x.get(key, default) for x in args])
+    return d
+
+
+def prefixedMethodClassDict(clazz, prefix):
+    return dict([(name, getattr(clazz, prefix + name)) for name in prefixedMethodNames(clazz, prefix)])
+
+
+def prefixedMethodObjDict(obj, prefix):
+    return dict([(name, getattr(obj, prefix + name)) for name in prefixedMethodNames(obj.__class__, prefix)])
+
+
+class ParseError(Exception):
+
+    def __init__(self, filename, line, col, message):
+        self.filename = filename
+        self.line = line
+        self.col = col
+        self.message = message
+
+    def __str__(self):
+       return "%s:%s:%s: %s" % (self.filename, self.line, self.col,
+                                self.message)
+
+class XMLParser(Protocol):
+
+    state = None
+    encodings = None
+    filename = "<xml />"
+    beExtremelyLenient = 0
+    _prepend = None
+
+    # _leadingBodyData will sometimes be set before switching to the
+    # 'bodydata' state, when we "accidentally" read a byte of bodydata
+    # in a different state.
+    _leadingBodyData = None
+
+    def connectionMade(self):
+        self.lineno = 1
+        self.colno = 0
+        self.encodings = []
+
+    def saveMark(self):
+        '''Get the line number and column of the last character parsed'''
+        # This gets replaced during dataReceived, restored afterwards
+        return (self.lineno, self.colno)
+
+    def _parseError(self, message):
+        raise ParseError(*((self.filename,)+self.saveMark()+(message,)))
+
+    def _buildStateTable(self):
+        '''Return a dictionary of begin, do, end state function tuples'''
+        # _buildStateTable leaves something to be desired but it does what it
+        # does.. probably slowly, so I'm doing some evil caching so it doesn't
+        # get called more than once per class.
+        stateTable = getattr(self.__class__, '__stateTable', None)
+        if stateTable is None:
+            stateTable = self.__class__.__stateTable = zipfndict(
+                *[prefixedMethodObjDict(self, prefix)
+                  for prefix in ('begin_', 'do_', 'end_')])
+        return stateTable
+
+    def _decode(self, data):
+        if 'UTF-16' in self.encodings or 'UCS-2' in self.encodings:
+            assert not len(data) & 1, 'UTF-16 must come in pairs for now'
+        if self._prepend:
+            data = self._prepend + data
+        for encoding in self.encodings:
+            data = unicode(data, encoding)
+        return data
+
+    def maybeBodyData(self):
+        if self.endtag:
+            return 'bodydata'
+
+        # Get ready for fun! We're going to allow
+        # <script>if (foo < bar)</script> to work!
+        # We do this by making everything between <script> and
+        # </script> a Text
+        # BUT <script src="foo"> will be special-cased to do regular,
+        # lenient behavior, because those may not have </script>
+        # -radix
+
+        if (self.tagName == 'script' and 'src' not in self.tagAttributes):
+            # we do this ourselves rather than having begin_waitforendscript
+            # becuase that can get called multiple times and we don't want
+            # bodydata to get reset other than the first time.
+            self.begin_bodydata(None)
+            return 'waitforendscript'
+        return 'bodydata'
+
+
+
+    def dataReceived(self, data):
+        stateTable = self._buildStateTable()
+        if not self.state:
+            # all UTF-16 starts with this string
+            if data.startswith('\xff\xfe'):
+                self._prepend = '\xff\xfe'
+                self.encodings.append('UTF-16')
+                data = data[2:]
+            elif data.startswith('\xfe\xff'):
+                self._prepend = '\xfe\xff'
+                self.encodings.append('UTF-16')
+                data = data[2:]
+            self.state = 'begin'
+        if self.encodings:
+            data = self._decode(data)
+        # bring state, lineno, colno into local scope
+        lineno, colno = self.lineno, self.colno
+        curState = self.state
+        # replace saveMark with a nested scope function
+        _saveMark = self.saveMark
+        def saveMark():
+            return (lineno, colno)
+        self.saveMark = saveMark
+        # fetch functions from the stateTable
+        beginFn, doFn, endFn = stateTable[curState]
+        try:
+            for byte in data:
+                # do newline stuff
+                if byte == '\n':
+                    lineno += 1
+                    colno = 0
+                else:
+                    colno += 1
+                newState = doFn(byte)
+                if newState is not None and newState != curState:
+                    # this is the endFn from the previous state
+                    endFn()
+                    curState = newState
+                    beginFn, doFn, endFn = stateTable[curState]
+                    beginFn(byte)
+        finally:
+            self.saveMark = _saveMark
+            self.lineno, self.colno = lineno, colno
+        # state doesn't make sense if there's an exception..
+        self.state = curState
+
+
+    def connectionLost(self, reason):
+        """
+        End the last state we were in.
+        """
+        stateTable = self._buildStateTable()
+        stateTable[self.state][END_HANDLER]()
+
+
+    # state methods
+
+    def do_begin(self, byte):
+        if byte.isspace():
+            return
+        if byte != '<':
+            if self.beExtremelyLenient:
+                self._leadingBodyData = byte
+                return 'bodydata'
+            self._parseError("First char of document [%r] wasn't <" % (byte,))
+        return 'tagstart'
+
+    def begin_comment(self, byte):
+        self.commentbuf = ''
+
+    def do_comment(self, byte):
+        self.commentbuf += byte
+        if self.commentbuf.endswith('-->'):
+            self.gotComment(self.commentbuf[:-3])
+            return 'bodydata'
+
+    def begin_tagstart(self, byte):
+        self.tagName = ''               # name of the tag
+        self.tagAttributes = {}         # attributes of the tag
+        self.termtag = 0                # is the tag self-terminating
+        self.endtag = 0
+
+    def do_tagstart(self, byte):
+        if byte.isalnum() or byte in identChars:
+            self.tagName += byte
+            if self.tagName == '!--':
+                return 'comment'
+        elif byte.isspace():
+            if self.tagName:
+                if self.endtag:
+                    # properly strict thing to do here is probably to only
+                    # accept whitespace
+                    return 'waitforgt'
+                return 'attrs'
+            else:
+                self._parseError("Whitespace before tag-name")
+        elif byte == '>':
+            if self.endtag:
+                self.gotTagEnd(self.tagName)
+                return 'bodydata'
+            else:
+                self.gotTagStart(self.tagName, {})
+                return (not self.beExtremelyLenient) and 'bodydata' or self.maybeBodyData()
+        elif byte == '/':
+            if self.tagName:
+                return 'afterslash'
+            else:
+                self.endtag = 1
+        elif byte in '!?':
+            if self.tagName:
+                if not self.beExtremelyLenient:
+                    self._parseError("Invalid character in tag-name")
+            else:
+                self.tagName += byte
+                self.termtag = 1
+        elif byte == '[':
+            if self.tagName == '!':
+                return 'expectcdata'
+            else:
+                self._parseError("Invalid '[' in tag-name")
+        else:
+            if self.beExtremelyLenient:
+                self.bodydata = '<'
+                return 'unentity'
+            self._parseError('Invalid tag character: %r'% byte)
+
+    def begin_unentity(self, byte):
+        self.bodydata += byte
+
+    def do_unentity(self, byte):
+        self.bodydata += byte
+        return 'bodydata'
+
+    def end_unentity(self):
+        self.gotText(self.bodydata)
+
+    def begin_expectcdata(self, byte):
+        self.cdatabuf = byte
+
+    def do_expectcdata(self, byte):
+        self.cdatabuf += byte
+        cdb = self.cdatabuf
+        cd = '[CDATA['
+        if len(cd) > len(cdb):
+            if cd.startswith(cdb):
+                return
+            elif self.beExtremelyLenient:
+                ## WHAT THE CRAP!?  MSWord9 generates HTML that includes these
+                ## bizarre <![if !foo]> <![endif]> chunks, so I've gotta ignore
+                ## 'em as best I can.  this should really be a separate parse
+                ## state but I don't even have any idea what these _are_.
+                return 'waitforgt'
+            else:
+                self._parseError("Mal-formed CDATA header")
+        if cd == cdb:
+            self.cdatabuf = ''
+            return 'cdata'
+        self._parseError("Mal-formed CDATA header")
+
+    def do_cdata(self, byte):
+        self.cdatabuf += byte
+        if self.cdatabuf.endswith("]]>"):
+            self.cdatabuf = self.cdatabuf[:-3]
+            return 'bodydata'
+
+    def end_cdata(self):
+        self.gotCData(self.cdatabuf)
+        self.cdatabuf = ''
+
+    def do_attrs(self, byte):
+        if byte.isalnum() or byte in identChars:
+            # XXX FIXME really handle !DOCTYPE at some point
+            if self.tagName == '!DOCTYPE':
+                return 'doctype'
+            if self.tagName[0] in '!?':
+                return 'waitforgt'
+            return 'attrname'
+        elif byte.isspace():
+            return
+        elif byte == '>':
+            self.gotTagStart(self.tagName, self.tagAttributes)
+            return (not self.beExtremelyLenient) and 'bodydata' or self.maybeBodyData()
+        elif byte == '/':
+            return 'afterslash'
+        elif self.beExtremelyLenient:
+            # discard and move on?  Only case I've seen of this so far was:
+            # <foo bar="baz"">
+            return
+        self._parseError("Unexpected character: %r" % byte)
+
+    def begin_doctype(self, byte):
+        self.doctype = byte
+
+    def do_doctype(self, byte):
+        if byte == '>':
+            return 'bodydata'
+        self.doctype += byte
+
+    def end_doctype(self):
+        self.gotDoctype(self.doctype)
+        self.doctype = None
+
+    def do_waitforgt(self, byte):
+        if byte == '>':
+            if self.endtag or not self.beExtremelyLenient:
+                return 'bodydata'
+            return self.maybeBodyData()
+
+    def begin_attrname(self, byte):
+        self.attrname = byte
+        self._attrname_termtag = 0
+
+    def do_attrname(self, byte):
+        if byte.isalnum() or byte in identChars:
+            self.attrname += byte
+            return
+        elif byte == '=':
+            return 'beforeattrval'
+        elif byte.isspace():
+            return 'beforeeq'
+        elif self.beExtremelyLenient:
+            if byte in '"\'':
+                return 'attrval'
+            if byte in lenientIdentChars or byte.isalnum():
+                self.attrname += byte
+                return
+            if byte == '/':
+                self._attrname_termtag = 1
+                return
+            if byte == '>':
+                self.attrval = 'True'
+                self.tagAttributes[self.attrname] = self.attrval
+                self.gotTagStart(self.tagName, self.tagAttributes)
+                if self._attrname_termtag:
+                    self.gotTagEnd(self.tagName)
+                    return 'bodydata'
+                return self.maybeBodyData()
+            # something is really broken. let's leave this attribute where it
+            # is and move on to the next thing
+            return
+        self._parseError("Invalid attribute name: %r %r" % (self.attrname, byte))
+
+    def do_beforeattrval(self, byte):
+        if byte in '"\'':
+            return 'attrval'
+        elif byte.isspace():
+            return
+        elif self.beExtremelyLenient:
+            if byte in lenientIdentChars or byte.isalnum():
+                return 'messyattr'
+            if byte == '>':
+                self.attrval = 'True'
+                self.tagAttributes[self.attrname] = self.attrval
+                self.gotTagStart(self.tagName, self.tagAttributes)
+                return self.maybeBodyData()
+            if byte == '\\':
+                # I saw this in actual HTML once:
+                # <font size=\"3\"><sup>SM</sup></font>
+                return
+        self._parseError("Invalid initial attribute value: %r; Attribute values must be quoted." % byte)
+
+    attrname = ''
+    attrval = ''
+
+    def begin_beforeeq(self,byte):
+        self._beforeeq_termtag = 0
+
+    def do_beforeeq(self, byte):
+        if byte == '=':
+            return 'beforeattrval'
+        elif byte.isspace():
+            return
+        elif self.beExtremelyLenient:
+            if byte.isalnum() or byte in identChars:
+                self.attrval = 'True'
+                self.tagAttributes[self.attrname] = self.attrval
+                return 'attrname'
+            elif byte == '>':
+                self.attrval = 'True'
+                self.tagAttributes[self.attrname] = self.attrval
+                self.gotTagStart(self.tagName, self.tagAttributes)
+                if self._beforeeq_termtag:
+                    self.gotTagEnd(self.tagName)
+                    return 'bodydata'
+                return self.maybeBodyData()
+            elif byte == '/':
+                self._beforeeq_termtag = 1
+                return
+        self._parseError("Invalid attribute")
+
+    def begin_attrval(self, byte):
+        self.quotetype = byte
+        self.attrval = ''
+
+    def do_attrval(self, byte):
+        if byte == self.quotetype:
+            return 'attrs'
+        self.attrval += byte
+
+    def end_attrval(self):
+        self.tagAttributes[self.attrname] = self.attrval
+        self.attrname = self.attrval = ''
+
+    def begin_messyattr(self, byte):
+        self.attrval = byte
+
+    def do_messyattr(self, byte):
+        if byte.isspace():
+            return 'attrs'
+        elif byte == '>':
+            endTag = 0
+            if self.attrval.endswith('/'):
+                endTag = 1
+                self.attrval = self.attrval[:-1]
+            self.tagAttributes[self.attrname] = self.attrval
+            self.gotTagStart(self.tagName, self.tagAttributes)
+            if endTag:
+                self.gotTagEnd(self.tagName)
+                return 'bodydata'
+            return self.maybeBodyData()
+        else:
+            self.attrval += byte
+
+    def end_messyattr(self):
+        if self.attrval:
+            self.tagAttributes[self.attrname] = self.attrval
+
+    def begin_afterslash(self, byte):
+        self._after_slash_closed = 0
+
+    def do_afterslash(self, byte):
+        # this state is only after a self-terminating slash, e.g. <foo/>
+        if self._after_slash_closed:
+            self._parseError("Mal-formed")#XXX When does this happen??
+        if byte != '>':
+            if self.beExtremelyLenient:
+                return
+            else:
+                self._parseError("No data allowed after '/'")
+        self._after_slash_closed = 1
+        self.gotTagStart(self.tagName, self.tagAttributes)
+        self.gotTagEnd(self.tagName)
+        # don't need maybeBodyData here because there better not be
+        # any javascript code after a <script/>... we'll see :(
+        return 'bodydata'
+
+    def begin_bodydata(self, byte):
+        if self._leadingBodyData:
+            self.bodydata = self._leadingBodyData
+            del self._leadingBodyData
+        else:
+            self.bodydata = ''
+
+    def do_bodydata(self, byte):
+        if byte == '<':
+            return 'tagstart'
+        if byte == '&':
+            return 'entityref'
+        self.bodydata += byte
+
+    def end_bodydata(self):
+        self.gotText(self.bodydata)
+        self.bodydata = ''
+
+    def do_waitforendscript(self, byte):
+        if byte == '<':
+            return 'waitscriptendtag'
+        self.bodydata += byte
+
+    def begin_waitscriptendtag(self, byte):
+        self.temptagdata = ''
+        self.tagName = ''
+        self.endtag = 0
+
+    def do_waitscriptendtag(self, byte):
+        # 1 enforce / as first byte read
+        # 2 enforce following bytes to be subset of "script" until
+        #   tagName == "script"
+        #   2a when that happens, gotText(self.bodydata) and gotTagEnd(self.tagName)
+        # 3 spaces can happen anywhere, they're ignored
+        #   e.g. < / script >
+        # 4 anything else causes all data I've read to be moved to the
+        #   bodydata, and switch back to waitforendscript state
+
+        # If it turns out this _isn't_ a </script>, we need to
+        # remember all the data we've been through so we can append it
+        # to bodydata
+        self.temptagdata += byte
+
+        # 1
+        if byte == '/':
+            self.endtag = True
+        elif not self.endtag:
+            self.bodydata += "<" + self.temptagdata
+            return 'waitforendscript'
+        # 2
+        elif byte.isalnum() or byte in identChars:
+            self.tagName += byte
+            if not 'script'.startswith(self.tagName):
+                self.bodydata += "<" + self.temptagdata
+                return 'waitforendscript'
+            elif self.tagName == 'script':
+                self.gotText(self.bodydata)
+                self.gotTagEnd(self.tagName)
+                return 'waitforgt'
+        # 3
+        elif byte.isspace():
+            return 'waitscriptendtag'
+        # 4
+        else:
+            self.bodydata += "<" + self.temptagdata
+            return 'waitforendscript'
+
+
+    def begin_entityref(self, byte):
+        self.erefbuf = ''
+        self.erefextra = '' # extra bit for lenient mode
+
+    def do_entityref(self, byte):
+        if byte.isspace() or byte == "<":
+            if self.beExtremelyLenient:
+                # '&foo' probably was '&foo'
+                if self.erefbuf and self.erefbuf != "amp":
+                    self.erefextra = self.erefbuf
+                self.erefbuf = "amp"
+                if byte == "<":
+                    return "tagstart"
+                else:
+                    self.erefextra += byte
+                    return 'spacebodydata'
+            self._parseError("Bad entity reference")
+        elif byte != ';':
+            self.erefbuf += byte
+        else:
+            return 'bodydata'
+
+    def end_entityref(self):
+        self.gotEntityReference(self.erefbuf)
+
+    # hacky support for space after & in entityref in beExtremelyLenient
+    # state should only happen in that case
+    def begin_spacebodydata(self, byte):
+        self.bodydata = self.erefextra
+        self.erefextra = None
+    do_spacebodydata = do_bodydata
+    end_spacebodydata = end_bodydata
+
+    # Sorta SAX-ish API
+
+    def gotTagStart(self, name, attributes):
+        '''Encountered an opening tag.
+
+        Default behaviour is to print.'''
+        print 'begin', name, attributes
+
+    def gotText(self, data):
+        '''Encountered text
+
+        Default behaviour is to print.'''
+        print 'text:', repr(data)
+
+    def gotEntityReference(self, entityRef):
+        '''Encountered mnemonic entity reference
+
+        Default behaviour is to print.'''
+        print 'entityRef: &%s;' % entityRef
+
+    def gotComment(self, comment):
+        '''Encountered comment.
+
+        Default behaviour is to ignore.'''
+        pass
+
+    def gotCData(self, cdata):
+        '''Encountered CDATA
+
+        Default behaviour is to call the gotText method'''
+        self.gotText(cdata)
+
+    def gotDoctype(self, doctype):
+        """Encountered DOCTYPE
+
+        This is really grotty: it basically just gives you everything between
+        '<!DOCTYPE' and '>' as an argument.
+        """
+        print '!DOCTYPE', repr(doctype)
+
+    def gotTagEnd(self, name):
+        '''Encountered closing tag
+
+        Default behaviour is to print.'''
+        print 'end', name
diff --git a/ThirdParty/Twisted/twisted/web/tap.py b/ThirdParty/Twisted/twisted/web/tap.py
new file mode 100644
index 0000000..23abf50
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/tap.py
@@ -0,0 +1,232 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Support for creating a service which runs a web server.
+"""
+
+import os
+
+# Twisted Imports
+from twisted.web import server, static, twcgi, script, demo, distrib, wsgi
+from twisted.internet import interfaces, reactor
+from twisted.python import usage, reflect, threadpool
+from twisted.spread import pb
+from twisted.application import internet, service, strports
+
+
+class Options(usage.Options):
+    """
+    Define the options accepted by the I{twistd web} plugin.
+    """
+    synopsis = "[web options]"
+
+    optParameters = [["port", "p", None, "strports description of the port to "
+                      "start the server on."],
+                     ["logfile", "l", None, "Path to web CLF (Combined Log Format) log file."],
+                     ["https", None, None, "Port to listen on for Secure HTTP."],
+                     ["certificate", "c", "server.pem", "SSL certificate to use for HTTPS. "],
+                     ["privkey", "k", "server.pem", "SSL certificate to use for HTTPS."],
+                     ]
+
+    optFlags = [["personal", "",
+                 "Instead of generating a webserver, generate a "
+                 "ResourcePublisher which listens on  the port given by "
+                 "--port, or ~/%s " % (distrib.UserDirectory.userSocketName,) +
+                 "if --port is not specified."],
+                ["notracebacks", "n", "Do not display tracebacks in broken web pages. " +
+                 "Displaying tracebacks to users may be security risk!"],
+                ]
+
+    compData = usage.Completions(
+                   optActions={"logfile" : usage.CompleteFiles("*.log"),
+                               "certificate" : usage.CompleteFiles("*.pem"),
+                               "privkey" : usage.CompleteFiles("*.pem")}
+                   )
+
+    longdesc = """\
+This starts a webserver.  If you specify no arguments, it will be a
+demo webserver that has the Test class from twisted.web.demo in it."""
+
+    def __init__(self):
+        usage.Options.__init__(self)
+        self['indexes'] = []
+        self['root'] = None
+
+
+    def opt_index(self, indexName):
+        """
+        Add the name of a file used to check for directory indexes.
+        [default: index, index.html]
+        """
+        self['indexes'].append(indexName)
+
+    opt_i = opt_index
+
+
+    def opt_user(self):
+        """
+        Makes a server with ~/public_html and ~/.twistd-web-pb support for
+        users.
+        """
+        self['root'] = distrib.UserDirectory()
+
+    opt_u = opt_user
+
+
+    def opt_path(self, path):
+        """
+        <path> is either a specific file or a directory to be set as the root
+        of the web server. Use this if you have a directory full of HTML, cgi,
+        epy, or rpy files or any other files that you want to be served up raw.
+        """
+        self['root'] = static.File(os.path.abspath(path))
+        self['root'].processors = {
+            '.cgi': twcgi.CGIScript,
+            '.epy': script.PythonScript,
+            '.rpy': script.ResourceScript,
+            }
+
+
+    def opt_processor(self, proc):
+        """
+        `ext=class' where `class' is added as a Processor for files ending
+        with `ext'.
+        """
+        if not isinstance(self['root'], static.File):
+            raise usage.UsageError("You can only use --processor after --path.")
+        ext, klass = proc.split('=', 1)
+        self['root'].processors[ext] = reflect.namedClass(klass)
+
+
+    def opt_class(self, className):
+        """
+        Create a Resource subclass with a zero-argument constructor.
+        """
+        classObj = reflect.namedClass(className)
+        self['root'] = classObj()
+
+
+    def opt_resource_script(self, name):
+        """
+        An .rpy file to be used as the root resource of the webserver.
+        """
+        self['root'] = script.ResourceScriptWrapper(name)
+
+
+    def opt_wsgi(self, name):
+        """
+        The FQPN of a WSGI application object to serve as the root resource of
+        the webserver.
+        """
+        try:
+            application = reflect.namedAny(name)
+        except (AttributeError, ValueError):
+            raise usage.UsageError("No such WSGI application: %r" % (name,))
+        pool = threadpool.ThreadPool()
+        reactor.callWhenRunning(pool.start)
+        reactor.addSystemEventTrigger('after', 'shutdown', pool.stop)
+        self['root'] = wsgi.WSGIResource(reactor, pool, application)
+
+
+    def opt_mime_type(self, defaultType):
+        """
+        Specify the default mime-type for static files.
+        """
+        if not isinstance(self['root'], static.File):
+            raise usage.UsageError("You can only use --mime_type after --path.")
+        self['root'].defaultType = defaultType
+    opt_m = opt_mime_type
+
+
+    def opt_allow_ignore_ext(self):
+        """
+        Specify whether or not a request for 'foo' should return 'foo.ext'
+        """
+        if not isinstance(self['root'], static.File):
+            raise usage.UsageError("You can only use --allow_ignore_ext "
+                                   "after --path.")
+        self['root'].ignoreExt('*')
+
+
+    def opt_ignore_ext(self, ext):
+        """
+        Specify an extension to ignore.  These will be processed in order.
+        """
+        if not isinstance(self['root'], static.File):
+            raise usage.UsageError("You can only use --ignore_ext "
+                                   "after --path.")
+        self['root'].ignoreExt(ext)
+
+
+    def postOptions(self):
+        """
+        Set up conditional defaults and check for dependencies.
+
+        If SSL is not available but an HTTPS server was configured, raise a
+        L{UsageError} indicating that this is not possible.
+
+        If no server port was supplied, select a default appropriate for the
+        other options supplied.
+        """
+        if self['https']:
+            try:
+                from twisted.internet.ssl import DefaultOpenSSLContextFactory
+            except ImportError:
+                raise usage.UsageError("SSL support not installed")
+        if self['port'] is None:
+            if self['personal']:
+                path = os.path.expanduser(
+                    os.path.join('~', distrib.UserDirectory.userSocketName))
+                self['port'] = 'unix:' + path
+            else:
+                self['port'] = 'tcp:8080'
+
+
+
+def makePersonalServerFactory(site):
+    """
+    Create and return a factory which will respond to I{distrib} requests
+    against the given site.
+
+    @type site: L{twisted.web.server.Site}
+    @rtype: L{twisted.internet.protocol.Factory}
+    """
+    return pb.PBServerFactory(distrib.ResourcePublisher(site))
+
+
+
+def makeService(config):
+    s = service.MultiService()
+    if config['root']:
+        root = config['root']
+        if config['indexes']:
+            config['root'].indexNames = config['indexes']
+    else:
+        # This really ought to be web.Admin or something
+        root = demo.Test()
+
+    if isinstance(root, static.File):
+        root.registry.setComponent(interfaces.IServiceCollection, s)
+
+    if config['logfile']:
+        site = server.Site(root, logPath=config['logfile'])
+    else:
+        site = server.Site(root)
+
+    site.displayTracebacks = not config["notracebacks"]
+
+    if config['personal']:
+        personal = strports.service(
+            config['port'], makePersonalServerFactory(site))
+        personal.setServiceParent(s)
+    else:
+        if config['https']:
+            from twisted.internet.ssl import DefaultOpenSSLContextFactory
+            i = internet.SSLServer(int(config['https']), site,
+                          DefaultOpenSSLContextFactory(config['privkey'],
+                                                       config['certificate']))
+            i.setServiceParent(s)
+        strports.service(config['port'], site).setServiceParent(s)
+
+    return s
diff --git a/ThirdParty/Twisted/twisted/web/template.py b/ThirdParty/Twisted/twisted/web/template.py
new file mode 100644
index 0000000..224a192
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/template.py
@@ -0,0 +1,566 @@
+# -*- test-case-name: twisted.web.test.test_template -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+HTML rendering for twisted.web.
+
+ at var VALID_HTML_TAG_NAMES: A list of recognized HTML tag names, used by the
+    L{tag} object.
+
+ at var TEMPLATE_NAMESPACE: The XML namespace used to identify attributes and
+    elements used by the templating system, which should be removed from the
+    final output document.
+
+ at var tags: A convenience object which can produce L{Tag} objects on demand via
+    attribute access.  For example: C{tags.div} is equivalent to C{Tag("div")}.
+    Tags not specified in L{VALID_HTML_TAG_NAMES} will result in an
+    L{AttributeError}.
+"""
+
+__all__ = [
+    'TEMPLATE_NAMESPACE', 'VALID_HTML_TAG_NAMES', 'Element', 'TagLoader',
+    'XMLString', 'XMLFile', 'renderer', 'flatten', 'flattenString', 'tags',
+    'Comment', 'CDATA', 'Tag', 'slot', 'CharRef', 'renderElement'
+    ]
+
+import warnings
+from zope.interface import implements
+
+from cStringIO import StringIO
+from xml.sax import make_parser, handler
+
+from twisted.web._stan import Tag, slot, Comment, CDATA, CharRef
+from twisted.python.filepath import FilePath
+
+TEMPLATE_NAMESPACE = 'http://twistedmatrix.com/ns/twisted.web.template/0.1'
+
+from twisted.web.iweb import ITemplateLoader
+from twisted.python import log
+
+# Go read the definition of NOT_DONE_YET. For lulz. This is totally
+# equivalent. And this turns out to be necessary, because trying to import
+# NOT_DONE_YET in this module causes a circular import which we cannot escape
+# from. From which we cannot escape. Etc. glyph is okay with this solution for
+# now, and so am I, as long as this comment stays to explain to future
+# maintainers what it means. ~ C.
+#
+# See http://twistedmatrix.com/trac/ticket/5557 for progress on fixing this.
+NOT_DONE_YET = 1
+
+class _NSContext(object):
+    """
+    A mapping from XML namespaces onto their prefixes in the document.
+    """
+
+    def __init__(self, parent=None):
+        """
+        Pull out the parent's namespaces, if there's no parent then default to
+        XML.
+        """
+        self.parent = parent
+        if parent is not None:
+            self.nss = dict(parent.nss)
+        else:
+            self.nss = {'http://www.w3.org/XML/1998/namespace':'xml'}
+
+
+    def get(self, k, d=None):
+        """
+        Get a prefix for a namespace.
+
+        @param d: The default prefix value.
+        """
+        return self.nss.get(k, d)
+
+
+    def __setitem__(self, k, v):
+        """
+        Proxy through to setting the prefix for the namespace.
+        """
+        self.nss.__setitem__(k, v)
+
+
+    def __getitem__(self, k):
+        """
+        Proxy through to getting the prefix for the namespace.
+        """
+        return self.nss.__getitem__(k)
+
+
+
+class _ToStan(handler.ContentHandler, handler.EntityResolver):
+    """
+    A SAX parser which converts an XML document to the Twisted STAN
+    Document Object Model.
+    """
+
+    def __init__(self, sourceFilename):
+        """
+        @param sourceFilename: the filename to load the XML out of.
+        """
+        self.sourceFilename = sourceFilename
+        self.prefixMap = _NSContext()
+        self.inCDATA = False
+
+
+    def setDocumentLocator(self, locator):
+        """
+        Set the document locator, which knows about line and character numbers.
+        """
+        self.locator = locator
+
+
+    def startDocument(self):
+        """
+        Initialise the document.
+        """
+        self.document = []
+        self.current = self.document
+        self.stack = []
+        self.xmlnsAttrs = []
+
+
+    def endDocument(self):
+        """
+        Document ended.
+        """
+
+
+    def processingInstruction(self, target, data):
+        """
+        Processing instructions are ignored.
+        """
+
+
+    def startPrefixMapping(self, prefix, uri):
+        """
+        Set up the prefix mapping, which maps fully qualified namespace URIs
+        onto namespace prefixes.
+
+        This gets called before startElementNS whenever an C{xmlns} attribute
+        is seen.
+        """
+
+        self.prefixMap = _NSContext(self.prefixMap)
+        self.prefixMap[uri] = prefix
+
+        # Ignore the template namespace; we'll replace those during parsing.
+        if uri == TEMPLATE_NAMESPACE:
+            return
+
+        # Add to a list that will be applied once we have the element.
+        if prefix is None:
+            self.xmlnsAttrs.append(('xmlns',uri))
+        else:
+            self.xmlnsAttrs.append(('xmlns:%s'%prefix,uri))
+
+
+    def endPrefixMapping(self, prefix):
+        """
+        "Pops the stack" on the prefix mapping.
+
+        Gets called after endElementNS.
+        """
+        self.prefixMap = self.prefixMap.parent
+
+
+    def startElementNS(self, namespaceAndName, qname, attrs):
+        """
+        Gets called when we encounter a new xmlns attribute.
+
+        @param namespaceAndName: a (namespace, name) tuple, where name
+            determines which type of action to take, if the namespace matches
+            L{TEMPLATE_NAMESPACE}.
+        @param qname: ignored.
+        @param attrs: attributes on the element being started.
+        """
+
+        filename = self.sourceFilename
+        lineNumber = self.locator.getLineNumber()
+        columnNumber = self.locator.getColumnNumber()
+
+        ns, name = namespaceAndName
+        if ns == TEMPLATE_NAMESPACE:
+            if name == 'transparent':
+                name = ''
+            elif name == 'slot':
+                try:
+                    # Try to get the default value for the slot
+                    default = attrs[(None, 'default')]
+                except KeyError:
+                    # If there wasn't one, then use None to indicate no
+                    # default.
+                    default = None
+                el = slot(
+                    attrs[(None, 'name')], default=default,
+                    filename=filename, lineNumber=lineNumber,
+                    columnNumber=columnNumber)
+                self.stack.append(el)
+                self.current.append(el)
+                self.current = el.children
+                return
+
+        render = None
+
+        attrs = dict(attrs)
+        for k, v in attrs.items():
+            attrNS, justTheName = k
+            if attrNS != TEMPLATE_NAMESPACE:
+                continue
+            if justTheName == 'render':
+                render = v
+                del attrs[k]
+
+        # nonTemplateAttrs is a dictionary mapping attributes that are *not* in
+        # TEMPLATE_NAMESPACE to their values.  Those in TEMPLATE_NAMESPACE were
+        # just removed from 'attrs' in the loop immediately above.  The key in
+        # nonTemplateAttrs is either simply the attribute name (if it was not
+        # specified as having a namespace in the template) or prefix:name,
+        # preserving the xml namespace prefix given in the document.
+
+        nonTemplateAttrs = {}
+        for (attrNs, attrName), v in attrs.items():
+            nsPrefix = self.prefixMap.get(attrNs)
+            if nsPrefix is None:
+                attrKey = attrName
+            else:
+                attrKey = '%s:%s' % (nsPrefix, attrName)
+            nonTemplateAttrs[attrKey] = v
+
+        if ns == TEMPLATE_NAMESPACE and name == 'attr':
+            if not self.stack:
+                # TODO: define a better exception for this?
+                raise AssertionError(
+                    '<{%s}attr> as top-level element' % (TEMPLATE_NAMESPACE,))
+            if 'name' not in nonTemplateAttrs:
+                # TODO: same here
+                raise AssertionError(
+                    '<{%s}attr> requires a name attribute' % (TEMPLATE_NAMESPACE,))
+            el = Tag('', render=render, filename=filename,
+                     lineNumber=lineNumber, columnNumber=columnNumber)
+            self.stack[-1].attributes[nonTemplateAttrs['name']] = el
+            self.stack.append(el)
+            self.current = el.children
+            return
+
+        # Apply any xmlns attributes
+        if self.xmlnsAttrs:
+            nonTemplateAttrs.update(dict(self.xmlnsAttrs))
+            self.xmlnsAttrs = []
+
+        # Add the prefix that was used in the parsed template for non-template
+        # namespaces (which will not be consumed anyway).
+        if ns != TEMPLATE_NAMESPACE and ns is not None:
+            prefix = self.prefixMap[ns]
+            if prefix is not None:
+                name = '%s:%s' % (self.prefixMap[ns],name)
+        el = Tag(
+            name, attributes=dict(nonTemplateAttrs), render=render,
+            filename=filename, lineNumber=lineNumber,
+            columnNumber=columnNumber)
+        self.stack.append(el)
+        self.current.append(el)
+        self.current = el.children
+
+
+    def characters(self, ch):
+        """
+        Called when we receive some characters.  CDATA characters get passed
+        through as is.
+
+        @type ch: C{string}
+        """
+        if self.inCDATA:
+            self.stack[-1].append(ch)
+            return
+        self.current.append(ch)
+
+
+    def endElementNS(self, name, qname):
+        """
+        A namespace tag is closed.  Pop the stack, if there's anything left in
+        it, otherwise return to the document's namespace.
+        """
+        self.stack.pop()
+        if self.stack:
+            self.current = self.stack[-1].children
+        else:
+            self.current = self.document
+
+
+    def startDTD(self, name, publicId, systemId):
+        """
+        DTDs are ignored.
+        """
+
+
+    def endDTD(self, *args):
+        """
+        DTDs are ignored.
+        """
+
+
+    def startCDATA(self):
+        """
+        We're starting to be in a CDATA element, make a note of this.
+        """
+        self.inCDATA = True
+        self.stack.append([])
+
+
+    def endCDATA(self):
+        """
+        We're no longer in a CDATA element.  Collect up the characters we've
+        parsed and put them in a new CDATA object.
+        """
+        self.inCDATA = False
+        comment = ''.join(self.stack.pop())
+        self.current.append(CDATA(comment))
+
+
+    def comment(self, content):
+        """
+        Add an XML comment which we've encountered.
+        """
+        self.current.append(Comment(content))
+
+
+
+def _flatsaxParse(fl):
+    """
+    Perform a SAX parse of an XML document with the _ToStan class.
+
+    @param fl: The XML document to be parsed.
+    @type fl: A file object or filename.
+
+    @return: a C{list} of Stan objects.
+    """
+    parser = make_parser()
+    parser.setFeature(handler.feature_validation, 0)
+    parser.setFeature(handler.feature_namespaces, 1)
+    parser.setFeature(handler.feature_external_ges, 0)
+    parser.setFeature(handler.feature_external_pes, 0)
+
+    s = _ToStan(getattr(fl, "name", None))
+    parser.setContentHandler(s)
+    parser.setEntityResolver(s)
+    parser.setProperty(handler.property_lexical_handler, s)
+
+    parser.parse(fl)
+
+    return s.document
+
+
+class TagLoader(object):
+    """
+    An L{ITemplateLoader} that loads existing L{IRenderable} providers.
+
+    @ivar tag: The object which will be loaded.
+    @type tag: An L{IRenderable} provider.
+    """
+    implements(ITemplateLoader)
+
+    def __init__(self, tag):
+        """
+        @param tag: The object which will be loaded.
+        @type tag: An L{IRenderable} provider.
+        """
+        self.tag = tag
+
+
+    def load(self):
+        return [self.tag]
+
+
+
+class XMLString(object):
+    """
+    An L{ITemplateLoader} that loads and parses XML from a string.
+
+    @ivar _loadedTemplate: The loaded document.
+    @type _loadedTemplate: a C{list} of Stan objects.
+    """
+    implements(ITemplateLoader)
+
+    def __init__(self, s):
+        """
+        Run the parser on a StringIO copy of the string.
+
+        @param s: The string from which to load the XML.
+        @type s: C{str}
+        """
+        self._loadedTemplate = _flatsaxParse(StringIO(s))
+
+
+    def load(self):
+        """
+        Return the document.
+
+        @return: the loaded document.
+        @rtype: a C{list} of Stan objects.
+        """
+        return self._loadedTemplate
+
+
+
+class XMLFile(object):
+    """
+    An L{ITemplateLoader} that loads and parses XML from a file.
+
+    @ivar _loadedTemplate: The loaded document, or C{None}, if not loaded.
+    @type _loadedTemplate: a C{list} of Stan objects, or C{None}.
+
+    @ivar _path: The L{FilePath}, file object, or filename that is being
+        loaded from.
+    """
+    implements(ITemplateLoader)
+
+    def __init__(self, path):
+        """
+        Run the parser on a file.
+
+        @param path: The file from which to load the XML.
+        @type path: L{FilePath}
+        """
+        if not isinstance(path, FilePath):
+            warnings.warn(
+                "Passing filenames or file objects to XMLFile is deprecated "
+                "since Twisted 12.1.  Pass a FilePath instead.",
+                category=DeprecationWarning, stacklevel=2)
+        self._loadedTemplate = None
+        self._path = path
+
+
+    def _loadDoc(self):
+        """
+        Read and parse the XML.
+
+        @return: the loaded document.
+        @rtype: a C{list} of Stan objects.
+        """
+        if not isinstance(self._path, FilePath):
+            return _flatsaxParse(self._path)
+        else:
+            f = self._path.open('r')
+            try:
+                return _flatsaxParse(f)
+            finally:
+                f.close()
+
+
+    def __repr__(self):
+        return '<XMLFile of %r>' % (self._path,)
+
+
+    def load(self):
+        """
+        Return the document, first loading it if necessary.
+
+        @return: the loaded document.
+        @rtype: a C{list} of Stan objects.
+        """
+        if self._loadedTemplate is None:
+            self._loadedTemplate = self._loadDoc()
+        return self._loadedTemplate
+
+
+
+# Last updated October 2011, using W3Schools as a reference. Link:
+# http://www.w3schools.com/html5/html5_reference.asp
+# Note that <xmp> is explicitly omitted; its semantics do not work with
+# t.w.template and it is officially deprecated.
+VALID_HTML_TAG_NAMES = set([
+    'a', 'abbr', 'acronym', 'address', 'applet', 'area', 'article', 'aside',
+    'audio', 'b', 'base', 'basefont', 'bdi', 'bdo', 'big', 'blockquote',
+    'body', 'br', 'button', 'canvas', 'caption', 'center', 'cite', 'code',
+    'col', 'colgroup', 'command', 'datalist', 'dd', 'del', 'details', 'dfn',
+    'dir', 'div', 'dl', 'dt', 'em', 'embed', 'fieldset', 'figcaption',
+    'figure', 'font', 'footer', 'form', 'frame', 'frameset', 'h1', 'h2', 'h3',
+    'h4', 'h5', 'h6', 'head', 'header', 'hgroup', 'hr', 'html', 'i', 'iframe',
+    'img', 'input', 'ins', 'isindex', 'keygen', 'kbd', 'label', 'legend',
+    'li', 'link', 'map', 'mark', 'menu', 'meta', 'meter', 'nav', 'noframes',
+    'noscript', 'object', 'ol', 'optgroup', 'option', 'output', 'p', 'param',
+    'pre', 'progress', 'q', 'rp', 'rt', 'ruby', 's', 'samp', 'script',
+    'section', 'select', 'small', 'source', 'span', 'strike', 'strong',
+    'style', 'sub', 'summary', 'sup', 'table', 'tbody', 'td', 'textarea',
+    'tfoot', 'th', 'thead', 'time', 'title', 'tr', 'tt', 'u', 'ul', 'var',
+    'video', 'wbr',
+])
+
+
+
+class _TagFactory(object):
+    """
+    A factory for L{Tag} objects; the implementation of the L{tags} object.
+
+    This allows for the syntactic convenience of C{from twisted.web.html import
+    tags; tags.a(href="linked-page.html")}, where 'a' can be basically any HTML
+    tag.
+
+    The class is not exposed publicly because you only ever need one of these,
+    and we already made it for you.
+
+    @see: L{tags}
+    """
+    def __getattr__(self, tagName):
+        if tagName == 'transparent':
+            return Tag('')
+        # allow for E.del as E.del_
+        tagName = tagName.rstrip('_')
+        if tagName not in VALID_HTML_TAG_NAMES:
+            raise AttributeError('unknown tag %r' % (tagName,))
+        return Tag(tagName)
+
+
+
+tags = _TagFactory()
+
+
+
+def renderElement(request, element,
+                  doctype='<!DOCTYPE html>', _failElement=None):
+    """
+    Render an element or other C{IRenderable}.
+
+    @param request: The C{Request} being rendered to.
+    @param element: An C{IRenderable} which will be rendered.
+    @param doctype: A C{str} which will be written as the first line of
+        the request, or C{None} to disable writing of a doctype.  The C{string}
+        should not include a trailing newline and will default to the HTML5
+        doctype C{'<!DOCTYPE html>'}.
+
+    @returns: NOT_DONE_YET
+
+    @since: 12.1
+    """
+    if doctype is not None:
+        request.write(doctype)
+        request.write('\n')
+
+    if _failElement is None:
+        _failElement = twisted.web.util.FailureElement
+
+    d = flatten(request, element, request.write)
+
+    def eb(failure):
+        log.err(failure, "An error occurred while rendering the response.")
+        if request.site.displayTracebacks:
+            return flatten(request, _failElement(failure), request.write)
+        else:
+            request.write(
+                ('<div style="font-size:800%;'
+                 'background-color:#FFF;'
+                 'color:#F00'
+                 '">An error occurred while rendering the response.</div>'))
+
+    d.addErrback(eb)
+    d.addBoth(lambda _: request.finish())
+    return NOT_DONE_YET
+
+
+
+from twisted.web._element import Element, renderer
+from twisted.web._flatten import flatten, flattenString
+import twisted.web.util
diff --git a/ThirdParty/Twisted/twisted/web/test/__init__.py b/ThirdParty/Twisted/twisted/web/test/__init__.py
new file mode 100644
index 0000000..cdbb14e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/__init__.py
@@ -0,0 +1,7 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.web}.
+"""
+
diff --git a/ThirdParty/Twisted/twisted/web/test/_util.py b/ThirdParty/Twisted/twisted/web/test/_util.py
new file mode 100644
index 0000000..6117b72
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/_util.py
@@ -0,0 +1,77 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+General helpers for L{twisted.web} unit tests.
+"""
+
+from twisted.internet.defer import succeed
+from twisted.web import server
+from twisted.trial.unittest import TestCase
+from twisted.python.failure import Failure
+from twisted.web._flatten import flattenString
+from twisted.web.error import FlattenerError
+
+
+def _render(resource, request):
+    result = resource.render(request)
+    if isinstance(result, str):
+        request.write(result)
+        request.finish()
+        return succeed(None)
+    elif result is server.NOT_DONE_YET:
+        if request.finished:
+            return succeed(None)
+        else:
+            return request.notifyFinish()
+    else:
+        raise ValueError("Unexpected return value: %r" % (result,))
+
+
+
+class FlattenTestCase(TestCase):
+    """
+    A test case that assists with testing L{twisted.web._flatten}.
+    """
+    def assertFlattensTo(self, root, target):
+        """
+        Assert that a root element, when flattened, is equal to a string.
+        """
+        d = flattenString(None, root)
+        d.addCallback(lambda s: self.assertEqual(s, target))
+        return d
+
+
+    def assertFlattensImmediately(self, root, target):
+        """
+        Assert that a root element, when flattened, is equal to a string, and
+        performs no asynchronus Deferred anything.
+
+        This version is more convenient in tests which wish to make multiple
+        assertions about flattening, since it can be called multiple times
+        without having to add multiple callbacks.
+        """
+        results = []
+        it = self.assertFlattensTo(root, target)
+        it.addBoth(results.append)
+        # Do our best to clean it up if something goes wrong.
+        self.addCleanup(it.cancel)
+        if not results:
+            self.fail("Rendering did not complete immediately.")
+        result = results[0]
+        if isinstance(result, Failure):
+            result.raiseException()
+
+
+    def assertFlatteningRaises(self, root, exn):
+        """
+        Assert flattening a root element raises a particular exception.
+        """
+        d = self.assertFailure(self.assertFlattensTo(root, ''), FlattenerError)
+        d.addCallback(lambda exc: self.assertIsInstance(exc._exception, exn))
+        return d
+
+
+
+
diff --git a/ThirdParty/Twisted/twisted/web/test/requesthelper.py b/ThirdParty/Twisted/twisted/web/test/requesthelper.py
new file mode 100644
index 0000000..88c6e9f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/requesthelper.py
@@ -0,0 +1,239 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Helpers related to HTTP requests, used by tests.
+"""
+
+from __future__ import division, absolute_import
+
+__all__ = ['DummyChannel', 'DummyRequest']
+
+from io import BytesIO
+
+from zope.interface import implementer
+
+from twisted.internet.defer import Deferred
+from twisted.internet.address import IPv4Address
+from twisted.internet.interfaces import ISSLTransport
+
+from twisted.web.http_headers import Headers
+from twisted.web.resource import Resource
+from twisted.web.server import NOT_DONE_YET, Session, Site
+
+
+class DummyChannel:
+    class TCP:
+        port = 80
+        disconnected = False
+
+        def __init__(self):
+            self.written = BytesIO()
+            self.producers = []
+
+        def getPeer(self):
+            return IPv4Address("TCP", '192.168.1.1', 12344)
+
+        def write(self, data):
+            if not isinstance(data, bytes):
+                raise TypeError("Can only write bytes to a transport, not %r" % (data,))
+            self.written.write(data)
+
+        def writeSequence(self, iovec):
+            for data in iovec:
+                self.write(data)
+
+        def getHost(self):
+            return IPv4Address("TCP", '10.0.0.1', self.port)
+
+        def registerProducer(self, producer, streaming):
+            self.producers.append((producer, streaming))
+
+        def loseConnection(self):
+            self.disconnected = True
+
+
+    @implementer(ISSLTransport)
+    class SSL(TCP):
+        pass
+
+    site = Site(Resource())
+
+    def __init__(self):
+        self.transport = self.TCP()
+
+
+    def requestDone(self, request):
+        pass
+
+
+
+class DummyRequest(object):
+    """
+    Represents a dummy or fake request.
+
+    @ivar _finishedDeferreds: C{None} or a C{list} of L{Deferreds} which will
+        be called back with C{None} when C{finish} is called or which will be
+        errbacked if C{processingFailed} is called.
+
+    @type headers: C{dict}
+    @ivar headers: A mapping of header name to header value for all request
+        headers.
+
+    @type outgoingHeaders: C{dict}
+    @ivar outgoingHeaders: A mapping of header name to header value for all
+        response headers.
+
+    @type responseCode: C{int}
+    @ivar responseCode: The response code which was passed to
+        C{setResponseCode}.
+
+    @type written: C{list} of C{bytes}
+    @ivar written: The bytes which have been written to the request.
+    """
+    uri = b'http://dummy/'
+    method = b'GET'
+    client = None
+
+    def registerProducer(self, prod,s):
+        self.go = 1
+        while self.go:
+            prod.resumeProducing()
+
+    def unregisterProducer(self):
+        self.go = 0
+
+
+    def __init__(self, postpath, session=None):
+        self.sitepath = []
+        self.written = []
+        self.finished = 0
+        self.postpath = postpath
+        self.prepath = []
+        self.session = None
+        self.protoSession = session or Session(0, self)
+        self.args = {}
+        self.outgoingHeaders = {}
+        self.responseHeaders = Headers()
+        self.responseCode = None
+        self.headers = {}
+        self._finishedDeferreds = []
+
+
+    def getHeader(self, name):
+        """
+        Retrieve the value of a request header.
+
+        @type name: C{bytes}
+        @param name: The name of the request header for which to retrieve the
+            value.  Header names are compared case-insensitively.
+
+        @rtype: C{bytes} or L{NoneType}
+        @return: The value of the specified request header.
+        """
+        return self.headers.get(name.lower(), None)
+
+
+    def setHeader(self, name, value):
+        """TODO: make this assert on write() if the header is content-length
+        """
+        self.outgoingHeaders[name.lower()] = value
+
+    def getSession(self):
+        if self.session:
+            return self.session
+        assert not self.written, "Session cannot be requested after data has been written."
+        self.session = self.protoSession
+        return self.session
+
+
+    def render(self, resource):
+        """
+        Render the given resource as a response to this request.
+
+        This implementation only handles a few of the most common behaviors of
+        resources.  It can handle a render method that returns a string or
+        C{NOT_DONE_YET}.  It doesn't know anything about the semantics of
+        request methods (eg HEAD) nor how to set any particular headers.
+        Basically, it's largely broken, but sufficient for some tests at least.
+        It should B{not} be expanded to do all the same stuff L{Request} does.
+        Instead, L{DummyRequest} should be phased out and L{Request} (or some
+        other real code factored in a different way) used.
+        """
+        result = resource.render(self)
+        if result is NOT_DONE_YET:
+            return
+        self.write(result)
+        self.finish()
+
+
+    def write(self, data):
+        if not isinstance(data, bytes):
+            raise TypeError("write() only accepts bytes")
+        self.written.append(data)
+
+    def notifyFinish(self):
+        """
+        Return a L{Deferred} which is called back with C{None} when the request
+        is finished.  This will probably only work if you haven't called
+        C{finish} yet.
+        """
+        finished = Deferred()
+        self._finishedDeferreds.append(finished)
+        return finished
+
+
+    def finish(self):
+        """
+        Record that the request is finished and callback and L{Deferred}s
+        waiting for notification of this.
+        """
+        self.finished = self.finished + 1
+        if self._finishedDeferreds is not None:
+            observers = self._finishedDeferreds
+            self._finishedDeferreds = None
+            for obs in observers:
+                obs.callback(None)
+
+
+    def processingFailed(self, reason):
+        """
+        Errback and L{Deferreds} waiting for finish notification.
+        """
+        if self._finishedDeferreds is not None:
+            observers = self._finishedDeferreds
+            self._finishedDeferreds = None
+            for obs in observers:
+                obs.errback(reason)
+
+
+    def addArg(self, name, value):
+        self.args[name] = [value]
+
+
+    def setResponseCode(self, code, message=None):
+        """
+        Set the HTTP status response code, but takes care that this is called
+        before any data is written.
+        """
+        assert not self.written, "Response code cannot be set after data has been written: %s." % "@@@@".join(self.written)
+        self.responseCode = code
+        self.responseMessage = message
+
+
+    def setLastModified(self, when):
+        assert not self.written, "Last-Modified cannot be set after data has been written: %s." % "@@@@".join(self.written)
+
+
+    def setETag(self, tag):
+        assert not self.written, "ETag cannot be set after data has been written: %s." % "@@@@".join(self.written)
+
+
+    def getClientIP(self):
+        """
+        Return the IPv4 address of the client which made this request, if there
+        is one, otherwise C{None}.
+        """
+        if isinstance(self.client, IPv4Address):
+            return self.client.host
+        return None
diff --git a/ThirdParty/Twisted/twisted/web/test/test_agent.py b/ThirdParty/Twisted/twisted/web/test/test_agent.py
new file mode 100644
index 0000000..723c1ce
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_agent.py
@@ -0,0 +1,2103 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.web.client.Agent} and related new client APIs.
+"""
+
+import cookielib
+import zlib
+from StringIO import StringIO
+
+from zope.interface.verify import verifyObject
+
+from twisted.trial import unittest
+from twisted.web import client, error, http_headers
+from twisted.web._newclient import RequestNotSent, RequestTransmissionFailed
+from twisted.web._newclient import ResponseNeverReceived, ResponseFailed
+from twisted.internet import defer, task
+from twisted.python.failure import Failure
+from twisted.python.components import proxyForInterface
+from twisted.test.proto_helpers import StringTransport
+from twisted.test.proto_helpers import MemoryReactor
+from twisted.internet.task import Clock
+from twisted.internet.error import ConnectionRefusedError, ConnectionDone
+from twisted.internet.protocol import Protocol, Factory
+from twisted.internet.defer import Deferred, succeed
+from twisted.internet.endpoints import TCP4ClientEndpoint, SSL4ClientEndpoint
+from twisted.web.client import FileBodyProducer, Request, HTTPConnectionPool
+from twisted.web.client import _WebToNormalContextFactory
+from twisted.web.client import WebClientContextFactory, _HTTP11ClientFactory
+from twisted.web.iweb import UNKNOWN_LENGTH, IBodyProducer, IResponse
+from twisted.web._newclient import HTTP11ClientProtocol, Response
+from twisted.web.error import SchemeNotSupported
+
+try:
+    from twisted.internet import ssl
+except:
+    ssl = None
+
+
+class GetBodyProtocol(Protocol):
+
+    def __init__(self, deferred):
+        self.deferred = deferred
+        self.buf = ''
+
+    def dataReceived(self, bytes):
+        self.buf += bytes
+
+    def connectionLost(self, reason):
+        self.deferred.callback(self.buf)
+
+
+def getBody(response):
+    d = defer.Deferred()
+    response.deliverBody(GetBodyProtocol(d))
+    return d
+
+
+
+class StubHTTPProtocol(Protocol):
+    """
+    A protocol like L{HTTP11ClientProtocol} but which does not actually know
+    HTTP/1.1 and only collects requests in a list.
+
+    @ivar requests: A C{list} of two-tuples.  Each time a request is made, a
+        tuple consisting of the request and the L{Deferred} returned from the
+        request method is appended to this list.
+    """
+    def __init__(self):
+        self.requests = []
+        self.state = 'QUIESCENT'
+
+
+    def request(self, request):
+        """
+        Capture the given request for later inspection.
+
+        @return: A L{Deferred} which this code will never fire.
+        """
+        result = Deferred()
+        self.requests.append((request, result))
+        return result
+
+
+
+class FileConsumer(object):
+    def __init__(self, outputFile):
+        self.outputFile = outputFile
+
+
+    def write(self, bytes):
+        self.outputFile.write(bytes)
+
+
+
+class FileBodyProducerTests(unittest.TestCase):
+    """
+    Tests for the L{FileBodyProducer} which reads bytes from a file and writes
+    them to an L{IConsumer}.
+    """
+    def _termination(self):
+        """
+        This method can be used as the C{terminationPredicateFactory} for a
+        L{Cooperator}.  It returns a predicate which immediately returns
+        C{False}, indicating that no more work should be done this iteration.
+        This has the result of only allowing one iteration of a cooperative
+        task to be run per L{Cooperator} iteration.
+        """
+        return lambda: True
+
+
+    def setUp(self):
+        """
+        Create a L{Cooperator} hooked up to an easily controlled, deterministic
+        scheduler to use with L{FileBodyProducer}.
+        """
+        self._scheduled = []
+        self.cooperator = task.Cooperator(
+            self._termination, self._scheduled.append)
+
+
+    def test_interface(self):
+        """
+        L{FileBodyProducer} instances provide L{IBodyProducer}.
+        """
+        self.assertTrue(verifyObject(
+                IBodyProducer, FileBodyProducer(StringIO(""))))
+
+
+    def test_unknownLength(self):
+        """
+        If the L{FileBodyProducer} is constructed with a file-like object
+        without either a C{seek} or C{tell} method, its C{length} attribute is
+        set to C{UNKNOWN_LENGTH}.
+        """
+        class HasSeek(object):
+            def seek(self, offset, whence):
+                pass
+
+        class HasTell(object):
+            def tell(self):
+                pass
+
+        producer = FileBodyProducer(HasSeek())
+        self.assertEqual(UNKNOWN_LENGTH, producer.length)
+        producer = FileBodyProducer(HasTell())
+        self.assertEqual(UNKNOWN_LENGTH, producer.length)
+
+
+    def test_knownLength(self):
+        """
+        If the L{FileBodyProducer} is constructed with a file-like object with
+        both C{seek} and C{tell} methods, its C{length} attribute is set to the
+        size of the file as determined by those methods.
+        """
+        inputBytes = "here are some bytes"
+        inputFile = StringIO(inputBytes)
+        inputFile.seek(5)
+        producer = FileBodyProducer(inputFile)
+        self.assertEqual(len(inputBytes) - 5, producer.length)
+        self.assertEqual(inputFile.tell(), 5)
+
+
+    def test_defaultCooperator(self):
+        """
+        If no L{Cooperator} instance is passed to L{FileBodyProducer}, the
+        global cooperator is used.
+        """
+        producer = FileBodyProducer(StringIO(""))
+        self.assertEqual(task.cooperate, producer._cooperate)
+
+
+    def test_startProducing(self):
+        """
+        L{FileBodyProducer.startProducing} starts writing bytes from the input
+        file to the given L{IConsumer} and returns a L{Deferred} which fires
+        when they have all been written.
+        """
+        expectedResult = "hello, world"
+        readSize = 3
+        output = StringIO()
+        consumer = FileConsumer(output)
+        producer = FileBodyProducer(
+            StringIO(expectedResult), self.cooperator, readSize)
+        complete = producer.startProducing(consumer)
+        for i in range(len(expectedResult) // readSize + 1):
+            self._scheduled.pop(0)()
+        self.assertEqual([], self._scheduled)
+        self.assertEqual(expectedResult, output.getvalue())
+        self.assertEqual(None, self.successResultOf(complete))
+
+
+    def test_inputClosedAtEOF(self):
+        """
+        When L{FileBodyProducer} reaches end-of-file on the input file given to
+        it, the input file is closed.
+        """
+        readSize = 4
+        inputBytes = "some friendly bytes"
+        inputFile = StringIO(inputBytes)
+        producer = FileBodyProducer(inputFile, self.cooperator, readSize)
+        consumer = FileConsumer(StringIO())
+        producer.startProducing(consumer)
+        for i in range(len(inputBytes) // readSize + 2):
+            self._scheduled.pop(0)()
+        self.assertTrue(inputFile.closed)
+
+
+    def test_failedReadWhileProducing(self):
+        """
+        If a read from the input file fails while producing bytes to the
+        consumer, the L{Deferred} returned by
+        L{FileBodyProducer.startProducing} fires with a L{Failure} wrapping
+        that exception.
+        """
+        class BrokenFile(object):
+            def read(self, count):
+                raise IOError("Simulated bad thing")
+        producer = FileBodyProducer(BrokenFile(), self.cooperator)
+        complete = producer.startProducing(FileConsumer(StringIO()))
+        self._scheduled.pop(0)()
+        self.failureResultOf(complete).trap(IOError)
+
+
+    def test_stopProducing(self):
+        """
+        L{FileBodyProducer.stopProducing} stops the underlying L{IPullProducer}
+        and the cooperative task responsible for calling C{resumeProducing} and
+        closes the input file but does not cause the L{Deferred} returned by
+        C{startProducing} to fire.
+        """
+        expectedResult = "hello, world"
+        readSize = 3
+        output = StringIO()
+        consumer = FileConsumer(output)
+        inputFile = StringIO(expectedResult)
+        producer = FileBodyProducer(
+            inputFile, self.cooperator, readSize)
+        complete = producer.startProducing(consumer)
+        producer.stopProducing()
+        self.assertTrue(inputFile.closed)
+        self._scheduled.pop(0)()
+        self.assertEqual("", output.getvalue())
+        self.assertNoResult(complete)
+
+
+    def test_pauseProducing(self):
+        """
+        L{FileBodyProducer.pauseProducing} temporarily suspends writing bytes
+        from the input file to the given L{IConsumer}.
+        """
+        expectedResult = "hello, world"
+        readSize = 5
+        output = StringIO()
+        consumer = FileConsumer(output)
+        producer = FileBodyProducer(
+            StringIO(expectedResult), self.cooperator, readSize)
+        complete = producer.startProducing(consumer)
+        self._scheduled.pop(0)()
+        self.assertEqual(output.getvalue(), expectedResult[:5])
+        producer.pauseProducing()
+
+        # Sort of depends on an implementation detail of Cooperator: even
+        # though the only task is paused, there's still a scheduled call.  If
+        # this were to go away because Cooperator became smart enough to cancel
+        # this call in this case, that would be fine.
+        self._scheduled.pop(0)()
+
+        # Since the producer is paused, no new data should be here.
+        self.assertEqual(output.getvalue(), expectedResult[:5])
+        self.assertEqual([], self._scheduled)
+        self.assertNoResult(complete)
+
+
+    def test_resumeProducing(self):
+        """
+        L{FileBodyProducer.resumeProducing} re-commences writing bytes from the
+        input file to the given L{IConsumer} after it was previously paused
+        with L{FileBodyProducer.pauseProducing}.
+        """
+        expectedResult = "hello, world"
+        readSize = 5
+        output = StringIO()
+        consumer = FileConsumer(output)
+        producer = FileBodyProducer(
+            StringIO(expectedResult), self.cooperator, readSize)
+        producer.startProducing(consumer)
+        self._scheduled.pop(0)()
+        self.assertEqual(expectedResult[:readSize], output.getvalue())
+        producer.pauseProducing()
+        producer.resumeProducing()
+        self._scheduled.pop(0)()
+        self.assertEqual(expectedResult[:readSize * 2], output.getvalue())
+
+
+
+class FakeReactorAndConnectMixin:
+    """
+    A test mixin providing a testable C{Reactor} class and a dummy C{connect}
+    method which allows instances to pretend to be endpoints.
+    """
+
+    class Reactor(MemoryReactor, Clock):
+        def __init__(self):
+            MemoryReactor.__init__(self)
+            Clock.__init__(self)
+
+
+    class StubEndpoint(object):
+        """
+        Endpoint that wraps existing endpoint, substitutes StubHTTPProtocol, and
+        resulting protocol instances are attached to the given test case.
+        """
+
+        def __init__(self, endpoint, testCase):
+            self.endpoint = endpoint
+            self.testCase = testCase
+            self.factory = _HTTP11ClientFactory(lambda p: None)
+            self.protocol = StubHTTPProtocol()
+            self.factory.buildProtocol = lambda addr: self.protocol
+
+        def connect(self, ignoredFactory):
+            self.testCase.protocol = self.protocol
+            self.endpoint.connect(self.factory)
+            return succeed(self.protocol)
+
+
+    def buildAgentForWrapperTest(self, reactor):
+        """
+        Return an Agent suitable for use in tests that wrap the Agent and want
+        both a fake reactor and StubHTTPProtocol.
+        """
+        agent = client.Agent(reactor)
+        _oldGetEndpoint = agent._getEndpoint
+        agent._getEndpoint = lambda *args: (
+            self.StubEndpoint(_oldGetEndpoint(*args), self))
+        return agent
+
+
+    def connect(self, factory):
+        """
+        Fake implementation of an endpoint which synchronously
+        succeeds with an instance of L{StubHTTPProtocol} for ease of
+        testing.
+        """
+        protocol = StubHTTPProtocol()
+        protocol.makeConnection(None)
+        self.protocol = protocol
+        return succeed(protocol)
+
+
+
+class DummyEndpoint(object):
+    """
+    An endpoint that uses a fake transport.
+    """
+
+    def connect(self, factory):
+        protocol = factory.buildProtocol(None)
+        protocol.makeConnection(StringTransport())
+        return succeed(protocol)
+
+
+
+class BadEndpoint(object):
+    """
+    An endpoint that shouldn't be called.
+    """
+
+    def connect(self, factory):
+        raise RuntimeError("This endpoint should not have been used.")
+
+
+class DummyFactory(Factory):
+    """
+    Create C{StubHTTPProtocol} instances.
+    """
+    def __init__(self, quiescentCallback):
+        pass
+
+    protocol = StubHTTPProtocol
+
+
+
+class HTTPConnectionPoolTests(unittest.TestCase, FakeReactorAndConnectMixin):
+    """
+    Tests for the L{HTTPConnectionPool} class.
+    """
+
+    def setUp(self):
+        self.fakeReactor = self.Reactor()
+        self.pool = HTTPConnectionPool(self.fakeReactor)
+        self.pool._factory = DummyFactory
+        # The retry code path is tested in HTTPConnectionPoolRetryTests:
+        self.pool.retryAutomatically = False
+
+
+    def test_getReturnsNewIfCacheEmpty(self):
+        """
+        If there are no cached connections,
+        L{HTTPConnectionPool.getConnection} returns a new connection.
+        """
+        self.assertEqual(self.pool._connections, {})
+
+        def gotConnection(conn):
+            self.assertIsInstance(conn, StubHTTPProtocol)
+            # The new connection is not stored in the pool:
+            self.assertNotIn(conn, self.pool._connections.values())
+
+        unknownKey = 12245
+        d = self.pool.getConnection(unknownKey, DummyEndpoint())
+        return d.addCallback(gotConnection)
+
+
+    def test_putStartsTimeout(self):
+        """
+        If a connection is put back to the pool, a 240-sec timeout is started.
+
+        When the timeout hits, the connection is closed and removed from the
+        pool.
+        """
+        # We start out with one cached connection:
+        protocol = StubHTTPProtocol()
+        protocol.makeConnection(StringTransport())
+        self.pool._putConnection(("http", "example.com", 80), protocol)
+
+        # Connection is in pool, still not closed:
+        self.assertEqual(protocol.transport.disconnecting, False)
+        self.assertIn(protocol,
+                      self.pool._connections[("http", "example.com", 80)])
+
+        # Advance 239 seconds, still not closed:
+        self.fakeReactor.advance(239)
+        self.assertEqual(protocol.transport.disconnecting, False)
+        self.assertIn(protocol,
+                      self.pool._connections[("http", "example.com", 80)])
+        self.assertIn(protocol, self.pool._timeouts)
+
+        # Advance past 240 seconds, connection will be closed:
+        self.fakeReactor.advance(1.1)
+        self.assertEqual(protocol.transport.disconnecting, True)
+        self.assertNotIn(protocol,
+                         self.pool._connections[("http", "example.com", 80)])
+        self.assertNotIn(protocol, self.pool._timeouts)
+
+
+    def test_putExceedsMaxPersistent(self):
+        """
+        If an idle connection is put back in the cache and the max number of
+        persistent connections has been exceeded, one of the connections is
+        closed and removed from the cache.
+        """
+        pool = self.pool
+
+        # We start out with two cached connection, the max:
+        origCached = [StubHTTPProtocol(), StubHTTPProtocol()]
+        for p in origCached:
+            p.makeConnection(StringTransport())
+            pool._putConnection(("http", "example.com", 80), p)
+        self.assertEqual(pool._connections[("http", "example.com", 80)],
+                         origCached)
+        timeouts = pool._timeouts.copy()
+
+        # Now we add another one:
+        newProtocol = StubHTTPProtocol()
+        newProtocol.makeConnection(StringTransport())
+        pool._putConnection(("http", "example.com", 80), newProtocol)
+
+        # The oldest cached connections will be removed and disconnected:
+        newCached = pool._connections[("http", "example.com", 80)]
+        self.assertEqual(len(newCached), 2)
+        self.assertEqual(newCached, [origCached[1], newProtocol])
+        self.assertEqual([p.transport.disconnecting for p in newCached],
+                         [False, False])
+        self.assertEqual(origCached[0].transport.disconnecting, True)
+        self.assertTrue(timeouts[origCached[0]].cancelled)
+        self.assertNotIn(origCached[0], pool._timeouts)
+
+
+    def test_maxPersistentPerHost(self):
+        """
+        C{maxPersistentPerHost} is enforced per C{(scheme, host, port)}:
+        different keys have different max connections.
+        """
+        def addProtocol(scheme, host, port):
+            p = StubHTTPProtocol()
+            p.makeConnection(StringTransport())
+            self.pool._putConnection((scheme, host, port), p)
+            return p
+        persistent = []
+        persistent.append(addProtocol("http", "example.com", 80))
+        persistent.append(addProtocol("http", "example.com", 80))
+        addProtocol("https", "example.com", 443)
+        addProtocol("http", "www2.example.com", 80)
+
+        self.assertEqual(
+            self.pool._connections[("http", "example.com", 80)], persistent)
+        self.assertEqual(
+            len(self.pool._connections[("https", "example.com", 443)]), 1)
+        self.assertEqual(
+            len(self.pool._connections[("http", "www2.example.com", 80)]), 1)
+
+
+    def test_getCachedConnection(self):
+        """
+        Getting an address which has a cached connection returns the cached
+        connection, removes it from the cache and cancels its timeout.
+        """
+        # We start out with one cached connection:
+        protocol = StubHTTPProtocol()
+        protocol.makeConnection(StringTransport())
+        self.pool._putConnection(("http", "example.com", 80), protocol)
+
+        def gotConnection(conn):
+            # We got the cached connection:
+            self.assertIdentical(protocol, conn)
+            self.assertNotIn(
+                conn, self.pool._connections[("http", "example.com", 80)])
+            # And the timeout was cancelled:
+            self.fakeReactor.advance(241)
+            self.assertEqual(conn.transport.disconnecting, False)
+            self.assertNotIn(conn, self.pool._timeouts)
+
+        return self.pool.getConnection(("http", "example.com", 80),
+                                       BadEndpoint(),
+                                       ).addCallback(gotConnection)
+
+
+    def test_newConnection(self):
+        """
+        The pool's C{_newConnection} method constructs a new connection.
+        """
+        # We start out with one cached connection:
+        protocol = StubHTTPProtocol()
+        protocol.makeConnection(StringTransport())
+        key = 12245
+        self.pool._putConnection(key, protocol)
+
+        def gotConnection(newConnection):
+            # We got a new connection:
+            self.assertNotIdentical(protocol, newConnection)
+            # And the old connection is still there:
+            self.assertIn(protocol, self.pool._connections[key])
+            # While the new connection is not:
+            self.assertNotIn(newConnection, self.pool._connections.values())
+
+        d = self.pool._newConnection(key, DummyEndpoint())
+        return d.addCallback(gotConnection)
+
+
+    def test_getSkipsDisconnected(self):
+        """
+        When getting connections out of the cache, disconnected connections
+        are removed and not returned.
+        """
+        pool = self.pool
+        key = ("http", "example.com", 80)
+
+        # We start out with two cached connection, the max:
+        origCached = [StubHTTPProtocol(), StubHTTPProtocol()]
+        for p in origCached:
+            p.makeConnection(StringTransport())
+            pool._putConnection(key, p)
+        self.assertEqual(pool._connections[key], origCached)
+
+        # We close the first one:
+        origCached[0].state = "DISCONNECTED"
+
+        # Now, when we retrive connections we should get the *second* one:
+        result = []
+        self.pool.getConnection(key,
+                                BadEndpoint()).addCallback(result.append)
+        self.assertIdentical(result[0], origCached[1])
+
+        # And both the disconnected and removed connections should be out of
+        # the cache:
+        self.assertEqual(pool._connections[key], [])
+        self.assertEqual(pool._timeouts, {})
+
+
+    def test_putNotQuiescent(self):
+        """
+        If a non-quiescent connection is put back in the cache, an error is
+        logged.
+        """
+        protocol = StubHTTPProtocol()
+        # By default state is QUIESCENT
+        self.assertEqual(protocol.state, "QUIESCENT")
+
+        protocol.state = "NOTQUIESCENT"
+        self.pool._putConnection(("http", "example.com", 80), protocol)
+        exc, = self.flushLoggedErrors(RuntimeError)
+        self.assertEqual(
+            exc.value.args[0],
+            "BUG: Non-quiescent protocol added to connection pool.")
+        self.assertIdentical(None, self.pool._connections.get(
+                ("http", "example.com", 80)))
+
+
+    def test_getUsesQuiescentCallback(self):
+        """
+        When L{HTTPConnectionPool.getConnection} connects, it returns a
+        C{Deferred} that fires with an instance of L{HTTP11ClientProtocol}
+        that has the correct quiescent callback attached. When this callback
+        is called the protocol is returned to the cache correctly, using the
+        right key.
+        """
+        class StringEndpoint(object):
+            def connect(self, factory):
+                p = factory.buildProtocol(None)
+                p.makeConnection(StringTransport())
+                return succeed(p)
+
+        pool = HTTPConnectionPool(self.fakeReactor, True)
+        pool.retryAutomatically = False
+        result = []
+        key = "a key"
+        pool.getConnection(
+            key, StringEndpoint()).addCallback(
+            result.append)
+        protocol = result[0]
+        self.assertIsInstance(protocol, HTTP11ClientProtocol)
+
+        # Now that we have protocol instance, lets try to put it back in the
+        # pool:
+        protocol._state = "QUIESCENT"
+        protocol._quiescentCallback(protocol)
+
+        # If we try to retrive a connection to same destination again, we
+        # should get the same protocol, because it should've been added back
+        # to the pool:
+        result2 = []
+        pool.getConnection(
+            key, StringEndpoint()).addCallback(
+            result2.append)
+        self.assertIdentical(result2[0], protocol)
+
+
+    def test_closeCachedConnections(self):
+        """
+        L{HTTPConnectionPool.closeCachedConnections} closes all cached
+        connections and removes them from the cache. It returns a Deferred
+        that fires when they have all lost their connections.
+        """
+        persistent = []
+        def addProtocol(scheme, host, port):
+            p = HTTP11ClientProtocol()
+            p.makeConnection(StringTransport())
+            self.pool._putConnection((scheme, host, port), p)
+            persistent.append(p)
+        addProtocol("http", "example.com", 80)
+        addProtocol("http", "www2.example.com", 80)
+        doneDeferred = self.pool.closeCachedConnections()
+
+        # Connections have begun disconnecting:
+        for p in persistent:
+            self.assertEqual(p.transport.disconnecting, True)
+        self.assertEqual(self.pool._connections, {})
+        # All timeouts were cancelled and removed:
+        for dc in self.fakeReactor.getDelayedCalls():
+            self.assertEqual(dc.cancelled, True)
+        self.assertEqual(self.pool._timeouts, {})
+
+        # Returned Deferred fires when all connections have been closed:
+        result = []
+        doneDeferred.addCallback(result.append)
+        self.assertEqual(result, [])
+        persistent[0].connectionLost(Failure(ConnectionDone()))
+        self.assertEqual(result, [])
+        persistent[1].connectionLost(Failure(ConnectionDone()))
+        self.assertEqual(result, [None])
+
+
+
+class AgentTests(unittest.TestCase, FakeReactorAndConnectMixin):
+    """
+    Tests for the new HTTP client API provided by L{Agent}.
+    """
+    def setUp(self):
+        """
+        Create an L{Agent} wrapped around a fake reactor.
+        """
+        self.reactor = self.Reactor()
+        self.agent = client.Agent(self.reactor)
+
+
+    def completeConnection(self):
+        """
+        Do whitebox stuff to finish any outstanding connection attempts the
+        agent may have initiated.
+
+        This spins the fake reactor clock just enough to get L{ClientCreator},
+        which agent is implemented in terms of, to fire its Deferreds.
+        """
+        self.reactor.advance(0)
+
+
+    def test_defaultPool(self):
+        """
+        If no pool is passed in, the L{Agent} creates a non-persistent pool.
+        """
+        agent = client.Agent(self.reactor)
+        self.assertIsInstance(agent._pool, HTTPConnectionPool)
+        self.assertEqual(agent._pool.persistent, False)
+        self.assertIdentical(agent._reactor, agent._pool._reactor)
+
+
+    def test_persistent(self):
+        """
+        If C{persistent} is set to C{True} on the L{HTTPConnectionPool} (the
+        default), C{Request}s are created with their C{persistent} flag set to
+        C{True}.
+        """
+        pool = HTTPConnectionPool(self.reactor)
+        agent = client.Agent(self.reactor, pool=pool)
+        agent._getEndpoint = lambda *args: self
+        agent.request("GET", "http://127.0.0.1")
+        self.assertEqual(self.protocol.requests[0][0].persistent, True)
+
+
+    def test_nonPersistent(self):
+        """
+        If C{persistent} is set to C{False} when creating the
+        L{HTTPConnectionPool}, C{Request}s are created with their
+        C{persistent} flag set to C{False}.
+
+        Elsewhere in the tests for the underlying HTTP code we ensure that
+        this will result in the disconnection of the HTTP protocol once the
+        request is done, so that the connection will not be returned to the
+        pool.
+        """
+        pool = HTTPConnectionPool(self.reactor, persistent=False)
+        agent = client.Agent(self.reactor, pool=pool)
+        agent._getEndpoint = lambda *args: self
+        agent.request("GET", "http://127.0.0.1")
+        self.assertEqual(self.protocol.requests[0][0].persistent, False)
+
+
+    def test_connectUsesConnectionPool(self):
+        """
+        When a connection is made by the Agent, it uses its pool's
+        C{getConnection} method to do so, with the endpoint returned by
+        C{self._getEndpoint}. The key used is C{(scheme, host, port)}.
+        """
+        endpoint = DummyEndpoint()
+        class MyAgent(client.Agent):
+            def _getEndpoint(this, scheme, host, port):
+                self.assertEqual((scheme, host, port),
+                                 ("http", "foo", 80))
+                return endpoint
+
+        class DummyPool(object):
+            connected = False
+            persistent = False
+            def getConnection(this, key, ep):
+                this.connected = True
+                self.assertEqual(ep, endpoint)
+                # This is the key the default Agent uses, others will have
+                # different keys:
+                self.assertEqual(key, ("http", "foo", 80))
+                return defer.succeed(StubHTTPProtocol())
+
+        pool = DummyPool()
+        agent = MyAgent(self.reactor, pool=pool)
+        self.assertIdentical(pool, agent._pool)
+
+        headers = http_headers.Headers()
+        headers.addRawHeader("host", "foo")
+        bodyProducer = object()
+        agent.request('GET', 'http://foo/',
+                      bodyProducer=bodyProducer, headers=headers)
+        self.assertEqual(agent._pool.connected, True)
+
+
+    def test_unsupportedScheme(self):
+        """
+        L{Agent.request} returns a L{Deferred} which fails with
+        L{SchemeNotSupported} if the scheme of the URI passed to it is not
+        C{'http'}.
+        """
+        return self.assertFailure(
+            self.agent.request('GET', 'mailto:alice at example.com'),
+            SchemeNotSupported)
+
+
+    def test_connectionFailed(self):
+        """
+        The L{Deferred} returned by L{Agent.request} fires with a L{Failure} if
+        the TCP connection attempt fails.
+        """
+        result = self.agent.request('GET', 'http://foo/')
+        # Cause the connection to be refused
+        host, port, factory = self.reactor.tcpClients.pop()[:3]
+        factory.clientConnectionFailed(None, Failure(ConnectionRefusedError()))
+        self.completeConnection()
+        return self.assertFailure(result, ConnectionRefusedError)
+
+
+    def test_connectHTTP(self):
+        """
+        L{Agent._getEndpoint} return a C{TCP4ClientEndpoint} when passed a
+        scheme of C{'http'}.
+        """
+        expectedHost = 'example.com'
+        expectedPort = 1234
+        endpoint = self.agent._getEndpoint('http', expectedHost, expectedPort)
+        self.assertEqual(endpoint._host, expectedHost)
+        self.assertEqual(endpoint._port, expectedPort)
+        self.assertIsInstance(endpoint, TCP4ClientEndpoint)
+
+
+    def test_connectHTTPS(self):
+        """
+        L{Agent._getEndpoint} return a C{SSL4ClientEndpoint} when passed a
+        scheme of C{'https'}.
+        """
+        expectedHost = 'example.com'
+        expectedPort = 4321
+        endpoint = self.agent._getEndpoint('https', expectedHost, expectedPort)
+        self.assertIsInstance(endpoint, SSL4ClientEndpoint)
+        self.assertEqual(endpoint._host, expectedHost)
+        self.assertEqual(endpoint._port, expectedPort)
+        self.assertIsInstance(endpoint._sslContextFactory,
+                              _WebToNormalContextFactory)
+        # Default context factory was used:
+        self.assertIsInstance(endpoint._sslContextFactory._webContext,
+                              WebClientContextFactory)
+    if ssl is None:
+        test_connectHTTPS.skip = "OpenSSL not present"
+
+
+    def test_connectHTTPSCustomContextFactory(self):
+        """
+        If a context factory is passed to L{Agent.__init__} it will be used to
+        determine the SSL parameters for HTTPS requests.  When an HTTPS request
+        is made, the hostname and port number of the request URL will be passed
+        to the context factory's C{getContext} method.  The resulting context
+        object will be used to establish the SSL connection.
+        """
+        expectedHost = 'example.org'
+        expectedPort = 20443
+        expectedContext = object()
+
+        contextArgs = []
+        class StubWebContextFactory(object):
+            def getContext(self, hostname, port):
+                contextArgs.append((hostname, port))
+                return expectedContext
+
+        agent = client.Agent(self.reactor, StubWebContextFactory())
+        endpoint = agent._getEndpoint('https', expectedHost, expectedPort)
+        contextFactory = endpoint._sslContextFactory
+        context = contextFactory.getContext()
+        self.assertEqual(context, expectedContext)
+        self.assertEqual(contextArgs, [(expectedHost, expectedPort)])
+
+
+    def test_hostProvided(self):
+        """
+        If C{None} is passed to L{Agent.request} for the C{headers} parameter,
+        a L{Headers} instance is created for the request and a I{Host} header
+        added to it.
+        """
+        self.agent._getEndpoint = lambda *args: self
+        self.agent.request(
+            'GET', 'http://example.com/foo?bar')
+
+        req, res = self.protocol.requests.pop()
+        self.assertEqual(req.headers.getRawHeaders('host'), ['example.com'])
+
+
+    def test_hostOverride(self):
+        """
+        If the headers passed to L{Agent.request} includes a value for the
+        I{Host} header, that value takes precedence over the one which would
+        otherwise be automatically provided.
+        """
+        headers = http_headers.Headers({'foo': ['bar'], 'host': ['quux']})
+        self.agent._getEndpoint = lambda *args: self
+        self.agent.request(
+            'GET', 'http://example.com/foo?bar', headers)
+
+        req, res = self.protocol.requests.pop()
+        self.assertEqual(req.headers.getRawHeaders('host'), ['quux'])
+
+
+    def test_headersUnmodified(self):
+        """
+        If a I{Host} header must be added to the request, the L{Headers}
+        instance passed to L{Agent.request} is not modified.
+        """
+        headers = http_headers.Headers()
+        self.agent._getEndpoint = lambda *args: self
+        self.agent.request(
+            'GET', 'http://example.com/foo', headers)
+
+        protocol = self.protocol
+
+        # The request should have been issued.
+        self.assertEqual(len(protocol.requests), 1)
+        # And the headers object passed in should not have changed.
+        self.assertEqual(headers, http_headers.Headers())
+
+
+    def test_hostValueStandardHTTP(self):
+        """
+        When passed a scheme of C{'http'} and a port of C{80},
+        L{Agent._computeHostValue} returns a string giving just
+        the host name passed to it.
+        """
+        self.assertEqual(
+            self.agent._computeHostValue('http', 'example.com', 80),
+            'example.com')
+
+
+    def test_hostValueNonStandardHTTP(self):
+        """
+        When passed a scheme of C{'http'} and a port other than C{80},
+        L{Agent._computeHostValue} returns a string giving the
+        host passed to it joined together with the port number by C{":"}.
+        """
+        self.assertEqual(
+            self.agent._computeHostValue('http', 'example.com', 54321),
+            'example.com:54321')
+
+
+    def test_hostValueStandardHTTPS(self):
+        """
+        When passed a scheme of C{'https'} and a port of C{443},
+        L{Agent._computeHostValue} returns a string giving just
+        the host name passed to it.
+        """
+        self.assertEqual(
+            self.agent._computeHostValue('https', 'example.com', 443),
+            'example.com')
+
+
+    def test_hostValueNonStandardHTTPS(self):
+        """
+        When passed a scheme of C{'https'} and a port other than C{443},
+        L{Agent._computeHostValue} returns a string giving the
+        host passed to it joined together with the port number by C{":"}.
+        """
+        self.assertEqual(
+            self.agent._computeHostValue('https', 'example.com', 54321),
+            'example.com:54321')
+
+
+    def test_request(self):
+        """
+        L{Agent.request} establishes a new connection to the host indicated by
+        the host part of the URI passed to it and issues a request using the
+        method, the path portion of the URI, the headers, and the body producer
+        passed to it.  It returns a L{Deferred} which fires with an
+        L{IResponse} from the server.
+        """
+        self.agent._getEndpoint = lambda *args: self
+
+        headers = http_headers.Headers({'foo': ['bar']})
+        # Just going to check the body for identity, so it doesn't need to be
+        # real.
+        body = object()
+        self.agent.request(
+            'GET', 'http://example.com:1234/foo?bar', headers, body)
+
+        protocol = self.protocol
+
+        # The request should be issued.
+        self.assertEqual(len(protocol.requests), 1)
+        req, res = protocol.requests.pop()
+        self.assertIsInstance(req, Request)
+        self.assertEqual(req.method, 'GET')
+        self.assertEqual(req.uri, '/foo?bar')
+        self.assertEqual(
+            req.headers,
+            http_headers.Headers({'foo': ['bar'],
+                                  'host': ['example.com:1234']}))
+        self.assertIdentical(req.bodyProducer, body)
+
+
+    def test_connectTimeout(self):
+        """
+        L{Agent} takes a C{connectTimeout} argument which is forwarded to the
+        following C{connectTCP} agent.
+        """
+        agent = client.Agent(self.reactor, connectTimeout=5)
+        agent.request('GET', 'http://foo/')
+        timeout = self.reactor.tcpClients.pop()[3]
+        self.assertEqual(5, timeout)
+
+
+    def test_connectSSLTimeout(self):
+        """
+        L{Agent} takes a C{connectTimeout} argument which is forwarded to the
+        following C{connectSSL} call.
+        """
+        agent = client.Agent(self.reactor, connectTimeout=5)
+        agent.request('GET', 'https://foo/')
+        timeout = self.reactor.sslClients.pop()[4]
+        self.assertEqual(5, timeout)
+
+
+    def test_bindAddress(self):
+        """
+        L{Agent} takes a C{bindAddress} argument which is forwarded to the
+        following C{connectTCP} call.
+        """
+        agent = client.Agent(self.reactor, bindAddress='192.168.0.1')
+        agent.request('GET', 'http://foo/')
+        address = self.reactor.tcpClients.pop()[4]
+        self.assertEqual('192.168.0.1', address)
+
+
+    def test_bindAddressSSL(self):
+        """
+        L{Agent} takes a C{bindAddress} argument which is forwarded to the
+        following C{connectSSL} call.
+        """
+        agent = client.Agent(self.reactor, bindAddress='192.168.0.1')
+        agent.request('GET', 'https://foo/')
+        address = self.reactor.sslClients.pop()[5]
+        self.assertEqual('192.168.0.1', address)
+
+
+
+class HTTPConnectionPoolRetryTests(unittest.TestCase, FakeReactorAndConnectMixin):
+    """
+    L{client.HTTPConnectionPool}, by using
+    L{client._RetryingHTTP11ClientProtocol}, supports retrying requests done
+    against previously cached connections.
+    """
+
+    def test_onlyRetryIdempotentMethods(self):
+        """
+        Only GET, HEAD, OPTIONS, TRACE, DELETE methods should cause a retry.
+        """
+        pool = client.HTTPConnectionPool(None)
+        connection = client._RetryingHTTP11ClientProtocol(None, pool)
+        self.assertTrue(connection._shouldRetry("GET", RequestNotSent(), None))
+        self.assertTrue(connection._shouldRetry("HEAD", RequestNotSent(), None))
+        self.assertTrue(connection._shouldRetry(
+                "OPTIONS", RequestNotSent(), None))
+        self.assertTrue(connection._shouldRetry(
+                "TRACE", RequestNotSent(), None))
+        self.assertTrue(connection._shouldRetry(
+                "DELETE", RequestNotSent(), None))
+        self.assertFalse(connection._shouldRetry(
+                "POST", RequestNotSent(), None))
+        self.assertFalse(connection._shouldRetry(
+                "MYMETHOD", RequestNotSent(), None))
+        # This will be covered by a different ticket, since we need support
+        #for resettable body producers:
+        # self.assertTrue(connection._doRetry("PUT", RequestNotSent(), None))
+
+
+    def test_onlyRetryIfNoResponseReceived(self):
+        """
+        Only L{RequestNotSent}, L{RequestTransmissionFailed} and
+        L{ResponseNeverReceived} exceptions should be a cause for retrying.
+        """
+        pool = client.HTTPConnectionPool(None)
+        connection = client._RetryingHTTP11ClientProtocol(None, pool)
+        self.assertTrue(connection._shouldRetry("GET", RequestNotSent(), None))
+        self.assertTrue(connection._shouldRetry(
+                "GET", RequestTransmissionFailed([]), None))
+        self.assertTrue(connection._shouldRetry(
+                "GET", ResponseNeverReceived([]),None))
+        self.assertFalse(connection._shouldRetry(
+                "GET", ResponseFailed([]), None))
+        self.assertFalse(connection._shouldRetry(
+                "GET", ConnectionRefusedError(), None))
+
+
+    def test_wrappedOnPersistentReturned(self):
+        """
+        If L{client.HTTPConnectionPool.getConnection} returns a previously
+        cached connection, it will get wrapped in a
+        L{client._RetryingHTTP11ClientProtocol}.
+        """
+        pool = client.HTTPConnectionPool(Clock())
+
+        # Add a connection to the cache:
+        protocol = StubHTTPProtocol()
+        protocol.makeConnection(StringTransport())
+        pool._putConnection(123, protocol)
+
+        # Retrieve it, it should come back wrapped in a
+        # _RetryingHTTP11ClientProtocol:
+        d = pool.getConnection(123, DummyEndpoint())
+
+        def gotConnection(connection):
+            self.assertIsInstance(connection,
+                                  client._RetryingHTTP11ClientProtocol)
+            self.assertIdentical(connection._clientProtocol, protocol)
+        return d.addCallback(gotConnection)
+
+
+    def test_notWrappedOnNewReturned(self):
+        """
+        If L{client.HTTPConnectionPool.getConnection} returns a new
+        connection, it will be returned as is.
+        """
+        pool = client.HTTPConnectionPool(None)
+        d = pool.getConnection(123, DummyEndpoint())
+
+        def gotConnection(connection):
+            # Don't want to use isinstance since potentially the wrapper might
+            # subclass it at some point:
+            self.assertIdentical(connection.__class__, HTTP11ClientProtocol)
+        return d.addCallback(gotConnection)
+
+
+    def retryAttempt(self, willWeRetry):
+        """
+        Fail a first request, possibly retrying depending on argument.
+        """
+        protocols = []
+        def newProtocol():
+            protocol = StubHTTPProtocol()
+            protocols.append(protocol)
+            return defer.succeed(protocol)
+
+        bodyProducer = object()
+        request = client.Request("FOO", "/", client.Headers(), bodyProducer,
+                                 persistent=True)
+        newProtocol()
+        protocol = protocols[0]
+        retrier = client._RetryingHTTP11ClientProtocol(protocol, newProtocol)
+
+        def _shouldRetry(m, e, bp):
+            self.assertEqual(m, "FOO")
+            self.assertIdentical(bp, bodyProducer)
+            self.assertIsInstance(e, (RequestNotSent, ResponseNeverReceived))
+            return willWeRetry
+        retrier._shouldRetry = _shouldRetry
+
+        d = retrier.request(request)
+
+        # So far, one request made:
+        self.assertEqual(len(protocols), 1)
+        self.assertEqual(len(protocols[0].requests), 1)
+
+        # Fail the first request:
+        protocol.requests[0][1].errback(RequestNotSent())
+        return d, protocols
+
+
+    def test_retryIfShouldRetryReturnsTrue(self):
+        """
+        L{client._RetryingHTTP11ClientProtocol} retries when
+        L{client._RetryingHTTP11ClientProtocol._shouldRetry} returns C{True}.
+        """
+        d, protocols = self.retryAttempt(True)
+        # We retried!
+        self.assertEqual(len(protocols), 2)
+        response = object()
+        protocols[1].requests[0][1].callback(response)
+        return d.addCallback(self.assertIdentical, response)
+
+
+    def test_dontRetryIfShouldRetryReturnsFalse(self):
+        """
+        L{client._RetryingHTTP11ClientProtocol} does not retry when
+        L{client._RetryingHTTP11ClientProtocol._shouldRetry} returns C{False}.
+        """
+        d, protocols = self.retryAttempt(False)
+        # We did not retry:
+        self.assertEqual(len(protocols), 1)
+        return self.assertFailure(d, RequestNotSent)
+
+
+    def test_onlyRetryWithoutBody(self):
+        """
+        L{_RetryingHTTP11ClientProtocol} only retries queries that don't have
+        a body.
+
+        This is an implementation restriction; if the restriction is fixed,
+        this test should be removed and PUT added to list of methods that
+        support retries.
+        """
+        pool = client.HTTPConnectionPool(None)
+        connection = client._RetryingHTTP11ClientProtocol(None, pool)
+        self.assertTrue(connection._shouldRetry("GET", RequestNotSent(), None))
+        self.assertFalse(connection._shouldRetry("GET", RequestNotSent(), object()))
+
+
+    def test_onlyRetryOnce(self):
+        """
+        If a L{client._RetryingHTTP11ClientProtocol} fails more than once on
+        an idempotent query before a response is received, it will not retry.
+        """
+        d, protocols = self.retryAttempt(True)
+        self.assertEqual(len(protocols), 2)
+        # Fail the second request too:
+        protocols[1].requests[0][1].errback(ResponseNeverReceived([]))
+        # We didn't retry again:
+        self.assertEqual(len(protocols), 2)
+        return self.assertFailure(d, ResponseNeverReceived)
+
+
+    def test_dontRetryIfRetryAutomaticallyFalse(self):
+        """
+        If L{HTTPConnectionPool.retryAutomatically} is set to C{False}, don't
+        wrap connections with retrying logic.
+        """
+        pool = client.HTTPConnectionPool(Clock())
+        pool.retryAutomatically = False
+
+        # Add a connection to the cache:
+        protocol = StubHTTPProtocol()
+        protocol.makeConnection(StringTransport())
+        pool._putConnection(123, protocol)
+
+        # Retrieve it, it should come back unwrapped:
+        d = pool.getConnection(123, DummyEndpoint())
+
+        def gotConnection(connection):
+            self.assertIdentical(connection, protocol)
+        return d.addCallback(gotConnection)
+
+
+    def test_retryWithNewConnection(self):
+        """
+        L{client.HTTPConnectionPool} creates
+        {client._RetryingHTTP11ClientProtocol} with a new connection factory
+        method that creates a new connection using the same key and endpoint
+        as the wrapped connection.
+        """
+        pool = client.HTTPConnectionPool(Clock())
+        key = 123
+        endpoint = DummyEndpoint()
+        newConnections = []
+
+        # Override the pool's _newConnection:
+        def newConnection(k, e):
+            newConnections.append((k, e))
+        pool._newConnection = newConnection
+
+        # Add a connection to the cache:
+        protocol = StubHTTPProtocol()
+        protocol.makeConnection(StringTransport())
+        pool._putConnection(key, protocol)
+
+        # Retrieve it, it should come back wrapped in a
+        # _RetryingHTTP11ClientProtocol:
+        d = pool.getConnection(key, endpoint)
+
+        def gotConnection(connection):
+            self.assertIsInstance(connection,
+                                  client._RetryingHTTP11ClientProtocol)
+            self.assertIdentical(connection._clientProtocol, protocol)
+            # Verify that the _newConnection method on retrying connection
+            # calls _newConnection on the pool:
+            self.assertEqual(newConnections, [])
+            connection._newConnection()
+            self.assertEqual(len(newConnections), 1)
+            self.assertEqual(newConnections[0][0], key)
+            self.assertIdentical(newConnections[0][1], endpoint)
+        return d.addCallback(gotConnection)
+
+
+
+
+class CookieTestsMixin(object):
+    """
+    Mixin for unit tests dealing with cookies.
+    """
+    def addCookies(self, cookieJar, uri, cookies):
+        """
+        Add a cookie to a cookie jar.
+        """
+        response = client._FakeUrllib2Response(
+            client.Response(
+                ('HTTP', 1, 1),
+                200,
+                'OK',
+                client.Headers({'Set-Cookie': cookies}),
+                None))
+        request = client._FakeUrllib2Request(uri)
+        cookieJar.extract_cookies(response, request)
+        return request, response
+
+
+
+class CookieJarTests(unittest.TestCase, CookieTestsMixin):
+    """
+    Tests for L{twisted.web.client._FakeUrllib2Response} and
+    L{twisted.web.client._FakeUrllib2Request}'s interactions with
+    C{cookielib.CookieJar} instances.
+    """
+    def makeCookieJar(self):
+        """
+        Create a C{cookielib.CookieJar} with some sample cookies.
+        """
+        cookieJar = cookielib.CookieJar()
+        reqres = self.addCookies(
+            cookieJar,
+            'http://example.com:1234/foo?bar',
+            ['foo=1; cow=moo; Path=/foo; Comment=hello',
+             'bar=2; Comment=goodbye'])
+        return cookieJar, reqres
+
+
+    def test_extractCookies(self):
+        """
+        L{cookielib.CookieJar.extract_cookies} extracts cookie information from
+        fake urllib2 response instances.
+        """
+        jar = self.makeCookieJar()[0]
+        cookies = dict([(c.name, c) for c in jar])
+
+        cookie = cookies['foo']
+        self.assertEqual(cookie.version, 0)
+        self.assertEqual(cookie.name, 'foo')
+        self.assertEqual(cookie.value, '1')
+        self.assertEqual(cookie.path, '/foo')
+        self.assertEqual(cookie.comment, 'hello')
+        self.assertEqual(cookie.get_nonstandard_attr('cow'), 'moo')
+
+        cookie = cookies['bar']
+        self.assertEqual(cookie.version, 0)
+        self.assertEqual(cookie.name, 'bar')
+        self.assertEqual(cookie.value, '2')
+        self.assertEqual(cookie.path, '/')
+        self.assertEqual(cookie.comment, 'goodbye')
+        self.assertIdentical(cookie.get_nonstandard_attr('cow'), None)
+
+
+    def test_sendCookie(self):
+        """
+        L{cookielib.CookieJar.add_cookie_header} adds a cookie header to a fake
+        urllib2 request instance.
+        """
+        jar, (request, response) = self.makeCookieJar()
+
+        self.assertIdentical(
+            request.get_header('Cookie', None),
+            None)
+
+        jar.add_cookie_header(request)
+        self.assertEqual(
+            request.get_header('Cookie', None),
+            'foo=1; bar=2')
+
+
+
+class CookieAgentTests(unittest.TestCase, CookieTestsMixin,
+                       FakeReactorAndConnectMixin):
+    """
+    Tests for L{twisted.web.client.CookieAgent}.
+    """
+    def setUp(self):
+        self.reactor = self.Reactor()
+
+
+    def test_emptyCookieJarRequest(self):
+        """
+        L{CookieAgent.request} does not insert any C{'Cookie'} header into the
+        L{Request} object if there is no cookie in the cookie jar for the URI
+        being requested. Cookies are extracted from the response and stored in
+        the cookie jar.
+        """
+        cookieJar = cookielib.CookieJar()
+        self.assertEqual(list(cookieJar), [])
+
+        agent = self.buildAgentForWrapperTest(self.reactor)
+        cookieAgent = client.CookieAgent(agent, cookieJar)
+        d = cookieAgent.request(
+            'GET', 'http://example.com:1234/foo?bar')
+
+        def _checkCookie(ignored):
+            cookies = list(cookieJar)
+            self.assertEqual(len(cookies), 1)
+            self.assertEqual(cookies[0].name, 'foo')
+            self.assertEqual(cookies[0].value, '1')
+
+        d.addCallback(_checkCookie)
+
+        req, res = self.protocol.requests.pop()
+        self.assertIdentical(req.headers.getRawHeaders('cookie'), None)
+
+        resp = client.Response(
+            ('HTTP', 1, 1),
+            200,
+            'OK',
+            client.Headers({'Set-Cookie': ['foo=1',]}),
+            None)
+        res.callback(resp)
+
+        return d
+
+
+    def test_requestWithCookie(self):
+        """
+        L{CookieAgent.request} inserts a C{'Cookie'} header into the L{Request}
+        object when there is a cookie matching the request URI in the cookie
+        jar.
+        """
+        uri = 'http://example.com:1234/foo?bar'
+        cookie = 'foo=1'
+
+        cookieJar = cookielib.CookieJar()
+        self.addCookies(cookieJar, uri, [cookie])
+        self.assertEqual(len(list(cookieJar)), 1)
+
+        agent = self.buildAgentForWrapperTest(self.reactor)
+        cookieAgent = client.CookieAgent(agent, cookieJar)
+        cookieAgent.request('GET', uri)
+
+        req, res = self.protocol.requests.pop()
+        self.assertEqual(req.headers.getRawHeaders('cookie'), [cookie])
+
+
+    def test_secureCookie(self):
+        """
+        L{CookieAgent} is able to handle secure cookies, ie cookies which
+        should only be handled over https.
+        """
+        uri = 'https://example.com:1234/foo?bar'
+        cookie = 'foo=1;secure'
+
+        cookieJar = cookielib.CookieJar()
+        self.addCookies(cookieJar, uri, [cookie])
+        self.assertEqual(len(list(cookieJar)), 1)
+
+        agent = self.buildAgentForWrapperTest(self.reactor)
+        cookieAgent = client.CookieAgent(agent, cookieJar)
+        cookieAgent.request('GET', uri)
+
+        req, res = self.protocol.requests.pop()
+        self.assertEqual(req.headers.getRawHeaders('cookie'), ['foo=1'])
+
+
+    def test_secureCookieOnInsecureConnection(self):
+        """
+        If a cookie is setup as secure, it won't be sent with the request if
+        it's not over HTTPS.
+        """
+        uri = 'http://example.com/foo?bar'
+        cookie = 'foo=1;secure'
+
+        cookieJar = cookielib.CookieJar()
+        self.addCookies(cookieJar, uri, [cookie])
+        self.assertEqual(len(list(cookieJar)), 1)
+
+        agent = self.buildAgentForWrapperTest(self.reactor)
+        cookieAgent = client.CookieAgent(agent, cookieJar)
+        cookieAgent.request('GET', uri)
+
+        req, res = self.protocol.requests.pop()
+        self.assertIdentical(None, req.headers.getRawHeaders('cookie'))
+
+
+    def test_portCookie(self):
+        """
+        L{CookieAgent} supports cookies which enforces the port number they
+        need to be transferred upon.
+        """
+        uri = 'https://example.com:1234/foo?bar'
+        cookie = 'foo=1;port=1234'
+
+        cookieJar = cookielib.CookieJar()
+        self.addCookies(cookieJar, uri, [cookie])
+        self.assertEqual(len(list(cookieJar)), 1)
+
+        agent = self.buildAgentForWrapperTest(self.reactor)
+        cookieAgent = client.CookieAgent(agent, cookieJar)
+        cookieAgent.request('GET', uri)
+
+        req, res = self.protocol.requests.pop()
+        self.assertEqual(req.headers.getRawHeaders('cookie'), ['foo=1'])
+
+
+    def test_portCookieOnWrongPort(self):
+        """
+        When creating a cookie with a port directive, it won't be added to the
+        L{cookie.CookieJar} if the URI is on a different port.
+        """
+        uri = 'https://example.com:4567/foo?bar'
+        cookie = 'foo=1;port=1234'
+
+        cookieJar = cookielib.CookieJar()
+        self.addCookies(cookieJar, uri, [cookie])
+        self.assertEqual(len(list(cookieJar)), 0)
+
+
+
+class Decoder1(proxyForInterface(IResponse)):
+    """
+    A test decoder to be used by L{client.ContentDecoderAgent} tests.
+    """
+
+
+
+class Decoder2(Decoder1):
+    """
+    A test decoder to be used by L{client.ContentDecoderAgent} tests.
+    """
+
+
+
+class ContentDecoderAgentTests(unittest.TestCase, FakeReactorAndConnectMixin):
+    """
+    Tests for L{client.ContentDecoderAgent}.
+    """
+
+    def setUp(self):
+        """
+        Create an L{Agent} wrapped around a fake reactor.
+        """
+        self.reactor = self.Reactor()
+        self.agent = self.buildAgentForWrapperTest(self.reactor)
+
+
+    def test_acceptHeaders(self):
+        """
+        L{client.ContentDecoderAgent} sets the I{Accept-Encoding} header to the
+        names of the available decoder objects.
+        """
+        agent = client.ContentDecoderAgent(
+            self.agent, [('decoder1', Decoder1), ('decoder2', Decoder2)])
+
+        agent.request('GET', 'http://example.com/foo')
+
+        protocol = self.protocol
+
+        self.assertEqual(len(protocol.requests), 1)
+        req, res = protocol.requests.pop()
+        self.assertEqual(req.headers.getRawHeaders('accept-encoding'),
+                          ['decoder1,decoder2'])
+
+
+    def test_existingHeaders(self):
+        """
+        If there are existing I{Accept-Encoding} fields,
+        L{client.ContentDecoderAgent} creates a new field for the decoders it
+        knows about.
+        """
+        headers = http_headers.Headers({'foo': ['bar'],
+                                        'accept-encoding': ['fizz']})
+        agent = client.ContentDecoderAgent(
+            self.agent, [('decoder1', Decoder1), ('decoder2', Decoder2)])
+        agent.request('GET', 'http://example.com/foo', headers=headers)
+
+        protocol = self.protocol
+
+        self.assertEqual(len(protocol.requests), 1)
+        req, res = protocol.requests.pop()
+        self.assertEqual(
+            list(req.headers.getAllRawHeaders()),
+            [('Host', ['example.com']),
+             ('Foo', ['bar']),
+             ('Accept-Encoding', ['fizz', 'decoder1,decoder2'])])
+
+
+    def test_plainEncodingResponse(self):
+        """
+        If the response is not encoded despited the request I{Accept-Encoding}
+        headers, L{client.ContentDecoderAgent} simply forwards the response.
+        """
+        agent = client.ContentDecoderAgent(
+            self.agent, [('decoder1', Decoder1), ('decoder2', Decoder2)])
+        deferred = agent.request('GET', 'http://example.com/foo')
+
+        req, res = self.protocol.requests.pop()
+
+        response = Response(('HTTP', 1, 1), 200, 'OK', http_headers.Headers(),
+                            None)
+        res.callback(response)
+
+        return deferred.addCallback(self.assertIdentical, response)
+
+
+    def test_unsupportedEncoding(self):
+        """
+        If an encoding unknown to the L{client.ContentDecoderAgent} is found,
+        the response is unchanged.
+        """
+        agent = client.ContentDecoderAgent(
+            self.agent, [('decoder1', Decoder1), ('decoder2', Decoder2)])
+        deferred = agent.request('GET', 'http://example.com/foo')
+
+        req, res = self.protocol.requests.pop()
+
+        headers = http_headers.Headers({'foo': ['bar'],
+                                        'content-encoding': ['fizz']})
+        response = Response(('HTTP', 1, 1), 200, 'OK', headers, None)
+        res.callback(response)
+
+        return deferred.addCallback(self.assertIdentical, response)
+
+
+    def test_unknownEncoding(self):
+        """
+        When L{client.ContentDecoderAgent} encounters a decoder it doesn't know
+        about, it stops decoding even if another encoding is known afterwards.
+        """
+        agent = client.ContentDecoderAgent(
+            self.agent, [('decoder1', Decoder1), ('decoder2', Decoder2)])
+        deferred = agent.request('GET', 'http://example.com/foo')
+
+        req, res = self.protocol.requests.pop()
+
+        headers = http_headers.Headers({'foo': ['bar'],
+                                        'content-encoding':
+                                        ['decoder1,fizz,decoder2']})
+        response = Response(('HTTP', 1, 1), 200, 'OK', headers, None)
+        res.callback(response)
+
+        def check(result):
+            self.assertNotIdentical(response, result)
+            self.assertIsInstance(result, Decoder2)
+            self.assertEqual(['decoder1,fizz'],
+                              result.headers.getRawHeaders('content-encoding'))
+
+        return deferred.addCallback(check)
+
+
+
+class SimpleAgentProtocol(Protocol):
+    """
+    A L{Protocol} to be used with an L{client.Agent} to receive data.
+
+    @ivar finished: L{Deferred} firing when C{connectionLost} is called.
+
+    @ivar made: L{Deferred} firing when C{connectionMade} is called.
+
+    @ivar received: C{list} of received data.
+    """
+
+    def __init__(self):
+        self.made = Deferred()
+        self.finished = Deferred()
+        self.received = []
+
+
+    def connectionMade(self):
+        self.made.callback(None)
+
+
+    def connectionLost(self, reason):
+        self.finished.callback(None)
+
+
+    def dataReceived(self, data):
+        self.received.append(data)
+
+
+
+class ContentDecoderAgentWithGzipTests(unittest.TestCase,
+                                       FakeReactorAndConnectMixin):
+
+    def setUp(self):
+        """
+        Create an L{Agent} wrapped around a fake reactor.
+        """
+        self.reactor = self.Reactor()
+        agent = self.buildAgentForWrapperTest(self.reactor)
+        self.agent = client.ContentDecoderAgent(
+            agent, [("gzip", client.GzipDecoder)])
+
+
+    def test_gzipEncodingResponse(self):
+        """
+        If the response has a C{gzip} I{Content-Encoding} header,
+        L{GzipDecoder} wraps the response to return uncompressed data to the
+        user.
+        """
+        deferred = self.agent.request('GET', 'http://example.com/foo')
+
+        req, res = self.protocol.requests.pop()
+
+        headers = http_headers.Headers({'foo': ['bar'],
+                                        'content-encoding': ['gzip']})
+        transport = StringTransport()
+        response = Response(('HTTP', 1, 1), 200, 'OK', headers, transport)
+        response.length = 12
+        res.callback(response)
+
+        compressor = zlib.compressobj(2, zlib.DEFLATED, 16 + zlib.MAX_WBITS)
+        data = (compressor.compress('x' * 6) + compressor.compress('y' * 4) +
+                compressor.flush())
+
+        def checkResponse(result):
+            self.assertNotIdentical(result, response)
+            self.assertEqual(result.version, ('HTTP', 1, 1))
+            self.assertEqual(result.code, 200)
+            self.assertEqual(result.phrase, 'OK')
+            self.assertEqual(list(result.headers.getAllRawHeaders()),
+                              [('Foo', ['bar'])])
+            self.assertEqual(result.length, UNKNOWN_LENGTH)
+            self.assertRaises(AttributeError, getattr, result, 'unknown')
+
+            response._bodyDataReceived(data[:5])
+            response._bodyDataReceived(data[5:])
+            response._bodyDataFinished()
+
+            protocol = SimpleAgentProtocol()
+            result.deliverBody(protocol)
+
+            self.assertEqual(protocol.received, ['x' * 6 + 'y' * 4])
+            return defer.gatherResults([protocol.made, protocol.finished])
+
+        deferred.addCallback(checkResponse)
+
+        return deferred
+
+
+    def test_brokenContent(self):
+        """
+        If the data received by the L{GzipDecoder} isn't valid gzip-compressed
+        data, the call to C{deliverBody} fails with a C{zlib.error}.
+        """
+        deferred = self.agent.request('GET', 'http://example.com/foo')
+
+        req, res = self.protocol.requests.pop()
+
+        headers = http_headers.Headers({'foo': ['bar'],
+                                        'content-encoding': ['gzip']})
+        transport = StringTransport()
+        response = Response(('HTTP', 1, 1), 200, 'OK', headers, transport)
+        response.length = 12
+        res.callback(response)
+
+        data = "not gzipped content"
+
+        def checkResponse(result):
+            response._bodyDataReceived(data)
+
+            result.deliverBody(Protocol())
+
+        deferred.addCallback(checkResponse)
+        self.assertFailure(deferred, client.ResponseFailed)
+
+        def checkFailure(error):
+            error.reasons[0].trap(zlib.error)
+            self.assertIsInstance(error.response, Response)
+
+        return deferred.addCallback(checkFailure)
+
+
+    def test_flushData(self):
+        """
+        When the connection with the server is lost, the gzip protocol calls
+        C{flush} on the zlib decompressor object to get uncompressed data which
+        may have been buffered.
+        """
+        class decompressobj(object):
+
+            def __init__(self, wbits):
+                pass
+
+            def decompress(self, data):
+                return 'x'
+
+            def flush(self):
+                return 'y'
+
+
+        oldDecompressObj = zlib.decompressobj
+        zlib.decompressobj = decompressobj
+        self.addCleanup(setattr, zlib, 'decompressobj', oldDecompressObj)
+
+        deferred = self.agent.request('GET', 'http://example.com/foo')
+
+        req, res = self.protocol.requests.pop()
+
+        headers = http_headers.Headers({'content-encoding': ['gzip']})
+        transport = StringTransport()
+        response = Response(('HTTP', 1, 1), 200, 'OK', headers, transport)
+        res.callback(response)
+
+        def checkResponse(result):
+            response._bodyDataReceived('data')
+            response._bodyDataFinished()
+
+            protocol = SimpleAgentProtocol()
+            result.deliverBody(protocol)
+
+            self.assertEqual(protocol.received, ['x', 'y'])
+            return defer.gatherResults([protocol.made, protocol.finished])
+
+        deferred.addCallback(checkResponse)
+
+        return deferred
+
+
+    def test_flushError(self):
+        """
+        If the C{flush} call in C{connectionLost} fails, the C{zlib.error}
+        exception is caught and turned into a L{ResponseFailed}.
+        """
+        class decompressobj(object):
+
+            def __init__(self, wbits):
+                pass
+
+            def decompress(self, data):
+                return 'x'
+
+            def flush(self):
+                raise zlib.error()
+
+
+        oldDecompressObj = zlib.decompressobj
+        zlib.decompressobj = decompressobj
+        self.addCleanup(setattr, zlib, 'decompressobj', oldDecompressObj)
+
+        deferred = self.agent.request('GET', 'http://example.com/foo')
+
+        req, res = self.protocol.requests.pop()
+
+        headers = http_headers.Headers({'content-encoding': ['gzip']})
+        transport = StringTransport()
+        response = Response(('HTTP', 1, 1), 200, 'OK', headers, transport)
+        res.callback(response)
+
+        def checkResponse(result):
+            response._bodyDataReceived('data')
+            response._bodyDataFinished()
+
+            protocol = SimpleAgentProtocol()
+            result.deliverBody(protocol)
+
+            self.assertEqual(protocol.received, ['x', 'y'])
+            return defer.gatherResults([protocol.made, protocol.finished])
+
+        deferred.addCallback(checkResponse)
+
+        self.assertFailure(deferred, client.ResponseFailed)
+
+        def checkFailure(error):
+            error.reasons[1].trap(zlib.error)
+            self.assertIsInstance(error.response, Response)
+
+        return deferred.addCallback(checkFailure)
+
+
+
+class ProxyAgentTests(unittest.TestCase, FakeReactorAndConnectMixin):
+    """
+    Tests for L{client.ProxyAgent}.
+    """
+
+    def setUp(self):
+        self.reactor = self.Reactor()
+        self.agent = client.ProxyAgent(
+            TCP4ClientEndpoint(self.reactor, "bar", 5678), self.reactor)
+        oldEndpoint = self.agent._proxyEndpoint
+        self.agent._proxyEndpoint = self.StubEndpoint(oldEndpoint, self)
+
+
+    def test_proxyRequest(self):
+        """
+        L{client.ProxyAgent} issues an HTTP request against the proxy, with the
+        full URI as path, when C{request} is called.
+        """
+        headers = http_headers.Headers({'foo': ['bar']})
+        # Just going to check the body for identity, so it doesn't need to be
+        # real.
+        body = object()
+        self.agent.request(
+            'GET', 'http://example.com:1234/foo?bar', headers, body)
+
+        host, port, factory = self.reactor.tcpClients.pop()[:3]
+        self.assertEqual(host, "bar")
+        self.assertEqual(port, 5678)
+
+        self.assertIsInstance(factory._wrappedFactory,
+                              client._HTTP11ClientFactory)
+
+        protocol = self.protocol
+
+        # The request should be issued.
+        self.assertEqual(len(protocol.requests), 1)
+        req, res = protocol.requests.pop()
+        self.assertIsInstance(req, Request)
+        self.assertEqual(req.method, 'GET')
+        self.assertEqual(req.uri, 'http://example.com:1234/foo?bar')
+        self.assertEqual(
+            req.headers,
+            http_headers.Headers({'foo': ['bar'],
+                                  'host': ['example.com:1234']}))
+        self.assertIdentical(req.bodyProducer, body)
+
+
+    def test_nonPersistent(self):
+        """
+        C{ProxyAgent} connections are not persistent by default.
+        """
+        self.assertEqual(self.agent._pool.persistent, False)
+
+
+    def test_connectUsesConnectionPool(self):
+        """
+        When a connection is made by the C{ProxyAgent}, it uses its pool's
+        C{getConnection} method to do so, with the endpoint it was constructed
+        with and a key of C{("http-proxy", endpoint)}.
+        """
+        endpoint = DummyEndpoint()
+        class DummyPool(object):
+            connected = False
+            persistent = False
+            def getConnection(this, key, ep):
+                this.connected = True
+                self.assertIdentical(ep, endpoint)
+                # The key is *not* tied to the final destination, but only to
+                # the address of the proxy, since that's where *we* are
+                # connecting:
+                self.assertEqual(key, ("http-proxy", endpoint))
+                return defer.succeed(StubHTTPProtocol())
+
+        pool = DummyPool()
+        agent = client.ProxyAgent(endpoint, self.reactor, pool=pool)
+        self.assertIdentical(pool, agent._pool)
+
+        agent.request('GET', 'http://foo/')
+        self.assertEqual(agent._pool.connected, True)
+
+
+
+class RedirectAgentTests(unittest.TestCase, FakeReactorAndConnectMixin):
+    """
+    Tests for L{client.RedirectAgent}.
+    """
+
+    def setUp(self):
+        self.reactor = self.Reactor()
+        self.agent = client.RedirectAgent(
+            self.buildAgentForWrapperTest(self.reactor))
+
+
+    def test_noRedirect(self):
+        """
+        L{client.RedirectAgent} behaves like L{client.Agent} if the response
+        doesn't contain a redirect.
+        """
+        deferred = self.agent.request('GET', 'http://example.com/foo')
+
+        req, res = self.protocol.requests.pop()
+
+        headers = http_headers.Headers()
+        response = Response(('HTTP', 1, 1), 200, 'OK', headers, None)
+        res.callback(response)
+
+        self.assertEqual(0, len(self.protocol.requests))
+
+        def checkResponse(result):
+            self.assertIdentical(result, response)
+
+        return deferred.addCallback(checkResponse)
+
+
+    def _testRedirectDefault(self, code):
+        """
+        When getting a redirect, L{RedirectAgent} follows the URL specified in
+        the L{Location} header field and make a new request.
+        """
+        self.agent.request('GET', 'http://example.com/foo')
+
+        host, port = self.reactor.tcpClients.pop()[:2]
+        self.assertEqual("example.com", host)
+        self.assertEqual(80, port)
+
+        req, res = self.protocol.requests.pop()
+
+        headers = http_headers.Headers(
+            {'location': ['https://example.com/bar']})
+        response = Response(('HTTP', 1, 1), code, 'OK', headers, None)
+        res.callback(response)
+
+        req2, res2 = self.protocol.requests.pop()
+        self.assertEqual('GET', req2.method)
+        self.assertEqual('/bar', req2.uri)
+
+        host, port = self.reactor.sslClients.pop()[:2]
+        self.assertEqual("example.com", host)
+        self.assertEqual(443, port)
+
+
+    def test_redirect301(self):
+        """
+        L{RedirectAgent} follows redirects on status code 301.
+        """
+        self._testRedirectDefault(301)
+
+
+    def test_redirect302(self):
+        """
+        L{RedirectAgent} follows redirects on status code 302.
+        """
+        self._testRedirectDefault(302)
+
+
+    def test_redirect307(self):
+        """
+        L{RedirectAgent} follows redirects on status code 307.
+        """
+        self._testRedirectDefault(307)
+
+
+    def test_redirect303(self):
+        """
+        L{RedirectAgent} changes the methods to C{GET} when getting a redirect
+        on a C{POST} request.
+        """
+        self.agent.request('POST', 'http://example.com/foo')
+
+        req, res = self.protocol.requests.pop()
+
+        headers = http_headers.Headers(
+            {'location': ['http://example.com/bar']})
+        response = Response(('HTTP', 1, 1), 303, 'OK', headers, None)
+        res.callback(response)
+
+        req2, res2 = self.protocol.requests.pop()
+        self.assertEqual('GET', req2.method)
+        self.assertEqual('/bar', req2.uri)
+
+
+    def test_noLocationField(self):
+        """
+        If no L{Location} header field is found when getting a redirect,
+        L{RedirectAgent} fails with a L{ResponseFailed} error wrapping a
+        L{error.RedirectWithNoLocation} exception.
+        """
+        deferred = self.agent.request('GET', 'http://example.com/foo')
+
+        req, res = self.protocol.requests.pop()
+
+        headers = http_headers.Headers()
+        response = Response(('HTTP', 1, 1), 301, 'OK', headers, None)
+        res.callback(response)
+
+        self.assertFailure(deferred, client.ResponseFailed)
+
+        def checkFailure(fail):
+            fail.reasons[0].trap(error.RedirectWithNoLocation)
+            self.assertEqual('http://example.com/foo',
+                             fail.reasons[0].value.uri)
+            self.assertEqual(301, fail.response.code)
+
+        return deferred.addCallback(checkFailure)
+
+
+    def test_307OnPost(self):
+        """
+        When getting a 307 redirect on a C{POST} request, L{RedirectAgent} fais
+        with a L{ResponseFailed} error wrapping a L{error.PageRedirect}
+        exception.
+        """
+        deferred = self.agent.request('POST', 'http://example.com/foo')
+
+        req, res = self.protocol.requests.pop()
+
+        headers = http_headers.Headers()
+        response = Response(('HTTP', 1, 1), 307, 'OK', headers, None)
+        res.callback(response)
+
+        self.assertFailure(deferred, client.ResponseFailed)
+
+        def checkFailure(fail):
+            fail.reasons[0].trap(error.PageRedirect)
+            self.assertEqual('http://example.com/foo',
+                             fail.reasons[0].value.location)
+            self.assertEqual(307, fail.response.code)
+
+        return deferred.addCallback(checkFailure)
+
+
+    def test_redirectLimit(self):
+        """
+        If the limit of redirects specified to L{RedirectAgent} is reached, the
+        deferred fires with L{ResponseFailed} error wrapping a
+        L{InfiniteRedirection} exception.
+        """
+        agent = self.buildAgentForWrapperTest(self.reactor)
+        redirectAgent = client.RedirectAgent(agent, 1)
+
+        deferred = redirectAgent.request('GET', 'http://example.com/foo')
+
+        req, res = self.protocol.requests.pop()
+
+        headers = http_headers.Headers(
+            {'location': ['http://example.com/bar']})
+        response = Response(('HTTP', 1, 1), 302, 'OK', headers, None)
+        res.callback(response)
+
+        req2, res2 = self.protocol.requests.pop()
+
+        response2 = Response(('HTTP', 1, 1), 302, 'OK', headers, None)
+        res2.callback(response2)
+
+        self.assertFailure(deferred, client.ResponseFailed)
+
+        def checkFailure(fail):
+            fail.reasons[0].trap(error.InfiniteRedirection)
+            self.assertEqual('http://example.com/foo',
+                             fail.reasons[0].value.location)
+            self.assertEqual(302, fail.response.code)
+
+        return deferred.addCallback(checkFailure)
diff --git a/ThirdParty/Twisted/twisted/web/test/test_cgi.py b/ThirdParty/Twisted/twisted/web/test/test_cgi.py
new file mode 100644
index 0000000..db63211
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_cgi.py
@@ -0,0 +1,270 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.web.twcgi}.
+"""
+
+import sys, os
+
+from twisted.trial import unittest
+from twisted.internet import reactor, interfaces, error
+from twisted.python import util, failure
+from twisted.web.http import NOT_FOUND, INTERNAL_SERVER_ERROR
+from twisted.web import client, twcgi, server, resource
+from twisted.web.test._util import _render
+from twisted.web.test.test_web import DummyRequest
+
+DUMMY_CGI = '''\
+print "Header: OK"
+print
+print "cgi output"
+'''
+
+DUAL_HEADER_CGI = '''\
+print "Header: spam"
+print "Header: eggs"
+print
+print "cgi output"
+'''
+
+SPECIAL_HEADER_CGI = '''\
+print "Server: monkeys"
+print "Date: last year"
+print
+print "cgi output"
+'''
+
+READINPUT_CGI = '''\
+# this is an example of a correctly-written CGI script which reads a body
+# from stdin, which only reads env['CONTENT_LENGTH'] bytes.
+
+import os, sys
+
+body_length = int(os.environ.get('CONTENT_LENGTH',0))
+indata = sys.stdin.read(body_length)
+print "Header: OK"
+print
+print "readinput ok"
+'''
+
+READALLINPUT_CGI = '''\
+# this is an example of the typical (incorrect) CGI script which expects
+# the server to close stdin when the body of the request is complete.
+# A correct CGI should only read env['CONTENT_LENGTH'] bytes.
+
+import sys
+
+indata = sys.stdin.read()
+print "Header: OK"
+print
+print "readallinput ok"
+'''
+
+NO_DUPLICATE_CONTENT_TYPE_HEADER_CGI = '''\
+print "content-type: text/cgi-duplicate-test"
+print
+print "cgi output"
+'''
+
+class PythonScript(twcgi.FilteredScript):
+    filter = sys.executable
+
+class CGI(unittest.TestCase):
+    """
+    Tests for L{twcgi.FilteredScript}.
+    """
+
+    if not interfaces.IReactorProcess.providedBy(reactor):
+        skip = "CGI tests require a functional reactor.spawnProcess()"
+
+    def startServer(self, cgi):
+        root = resource.Resource()
+        cgipath = util.sibpath(__file__, cgi)
+        root.putChild("cgi", PythonScript(cgipath))
+        site = server.Site(root)
+        self.p = reactor.listenTCP(0, site)
+        return self.p.getHost().port
+
+    def tearDown(self):
+        if self.p:
+            return self.p.stopListening()
+
+
+    def writeCGI(self, source):
+        cgiFilename = os.path.abspath(self.mktemp())
+        cgiFile = file(cgiFilename, 'wt')
+        cgiFile.write(source)
+        cgiFile.close()
+        return cgiFilename
+
+
+    def testCGI(self):
+        cgiFilename = self.writeCGI(DUMMY_CGI)
+
+        portnum = self.startServer(cgiFilename)
+        d = client.getPage("http://localhost:%d/cgi" % portnum)
+        d.addCallback(self._testCGI_1)
+        return d
+
+
+    def _testCGI_1(self, res):
+        self.assertEqual(res, "cgi output" + os.linesep)
+
+
+    def test_protectedServerAndDate(self):
+        """
+        If the CGI script emits a I{Server} or I{Date} header, these are
+        ignored.
+        """
+        cgiFilename = self.writeCGI(SPECIAL_HEADER_CGI)
+
+        portnum = self.startServer(cgiFilename)
+        url = "http://localhost:%d/cgi" % (portnum,)
+        factory = client.HTTPClientFactory(url)
+        reactor.connectTCP('localhost', portnum, factory)
+        def checkResponse(ignored):
+            self.assertNotIn('monkeys', factory.response_headers['server'])
+            self.assertNotIn('last year', factory.response_headers['date'])
+        factory.deferred.addCallback(checkResponse)
+        return factory.deferred
+
+
+    def test_noDuplicateContentTypeHeaders(self):
+        """
+        If the CGI script emits a I{content-type} header, make sure that the
+        server doesn't add an additional (duplicate) one, as per ticket 4786.
+        """
+        cgiFilename = self.writeCGI(NO_DUPLICATE_CONTENT_TYPE_HEADER_CGI)
+
+        portnum = self.startServer(cgiFilename)
+        url = "http://localhost:%d/cgi" % (portnum,)
+        factory = client.HTTPClientFactory(url)
+        reactor.connectTCP('localhost', portnum, factory)
+        def checkResponse(ignored):
+            self.assertEqual(
+                factory.response_headers['content-type'], ['text/cgi-duplicate-test'])
+        factory.deferred.addCallback(checkResponse)
+        return factory.deferred
+
+
+    def test_duplicateHeaderCGI(self):
+        """
+        If a CGI script emits two instances of the same header, both are sent in
+        the response.
+        """
+        cgiFilename = self.writeCGI(DUAL_HEADER_CGI)
+
+        portnum = self.startServer(cgiFilename)
+        url = "http://localhost:%d/cgi" % (portnum,)
+        factory = client.HTTPClientFactory(url)
+        reactor.connectTCP('localhost', portnum, factory)
+        def checkResponse(ignored):
+            self.assertEqual(
+                factory.response_headers['header'], ['spam', 'eggs'])
+        factory.deferred.addCallback(checkResponse)
+        return factory.deferred
+
+
+    def testReadEmptyInput(self):
+        cgiFilename = os.path.abspath(self.mktemp())
+        cgiFile = file(cgiFilename, 'wt')
+        cgiFile.write(READINPUT_CGI)
+        cgiFile.close()
+
+        portnum = self.startServer(cgiFilename)
+        d = client.getPage("http://localhost:%d/cgi" % portnum)
+        d.addCallback(self._testReadEmptyInput_1)
+        return d
+    testReadEmptyInput.timeout = 5
+    def _testReadEmptyInput_1(self, res):
+        self.assertEqual(res, "readinput ok%s" % os.linesep)
+
+    def testReadInput(self):
+        cgiFilename = os.path.abspath(self.mktemp())
+        cgiFile = file(cgiFilename, 'wt')
+        cgiFile.write(READINPUT_CGI)
+        cgiFile.close()
+
+        portnum = self.startServer(cgiFilename)
+        d = client.getPage("http://localhost:%d/cgi" % portnum,
+                           method="POST",
+                           postdata="Here is your stdin")
+        d.addCallback(self._testReadInput_1)
+        return d
+    testReadInput.timeout = 5
+    def _testReadInput_1(self, res):
+        self.assertEqual(res, "readinput ok%s" % os.linesep)
+
+
+    def testReadAllInput(self):
+        cgiFilename = os.path.abspath(self.mktemp())
+        cgiFile = file(cgiFilename, 'wt')
+        cgiFile.write(READALLINPUT_CGI)
+        cgiFile.close()
+
+        portnum = self.startServer(cgiFilename)
+        d = client.getPage("http://localhost:%d/cgi" % portnum,
+                           method="POST",
+                           postdata="Here is your stdin")
+        d.addCallback(self._testReadAllInput_1)
+        return d
+    testReadAllInput.timeout = 5
+    def _testReadAllInput_1(self, res):
+        self.assertEqual(res, "readallinput ok%s" % os.linesep)
+
+
+
+class CGIDirectoryTests(unittest.TestCase):
+    """
+    Tests for L{twcgi.CGIDirectory}.
+    """
+    def test_render(self):
+        """
+        L{twcgi.CGIDirectory.render} sets the HTTP response code to I{NOT
+        FOUND}.
+        """
+        resource = twcgi.CGIDirectory(self.mktemp())
+        request = DummyRequest([''])
+        d = _render(resource, request)
+        def cbRendered(ignored):
+            self.assertEqual(request.responseCode, NOT_FOUND)
+        d.addCallback(cbRendered)
+        return d
+
+
+    def test_notFoundChild(self):
+        """
+        L{twcgi.CGIDirectory.getChild} returns a resource which renders an
+        response with the HTTP I{NOT FOUND} status code if the indicated child
+        does not exist as an entry in the directory used to initialized the
+        L{twcgi.CGIDirectory}.
+        """
+        path = self.mktemp()
+        os.makedirs(path)
+        resource = twcgi.CGIDirectory(path)
+        request = DummyRequest(['foo'])
+        child = resource.getChild("foo", request)
+        d = _render(child, request)
+        def cbRendered(ignored):
+            self.assertEqual(request.responseCode, NOT_FOUND)
+        d.addCallback(cbRendered)
+        return d
+
+
+
+class CGIProcessProtocolTests(unittest.TestCase):
+    """
+    Tests for L{twcgi.CGIProcessProtocol}.
+    """
+    def test_prematureEndOfHeaders(self):
+        """
+        If the process communicating with L{CGIProcessProtocol} ends before
+        finishing writing out headers, the response has I{INTERNAL SERVER
+        ERROR} as its status code.
+        """
+        request = DummyRequest([''])
+        protocol = twcgi.CGIProcessProtocol(request)
+        protocol.processEnded(failure.Failure(error.ProcessTerminated()))
+        self.assertEqual(request.responseCode, INTERNAL_SERVER_ERROR)
+
diff --git a/ThirdParty/Twisted/twisted/web/test/test_distrib.py b/ThirdParty/Twisted/twisted/web/test/test_distrib.py
new file mode 100644
index 0000000..c6e2ae3
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_distrib.py
@@ -0,0 +1,434 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.web.distrib}.
+"""
+
+from os.path import abspath
+from xml.dom.minidom import parseString
+try:
+    import pwd
+except ImportError:
+    pwd = None
+
+from zope.interface.verify import verifyObject
+
+from twisted.python import log, filepath
+from twisted.internet import reactor, defer
+from twisted.trial import unittest
+from twisted.spread import pb
+from twisted.spread.banana import SIZE_LIMIT
+from twisted.web import http, distrib, client, resource, static, server
+from twisted.web.test.test_web import DummyRequest
+from twisted.web.test._util import _render
+from twisted.test import proto_helpers
+
+
+class MySite(server.Site):
+    pass
+
+
+class PBServerFactory(pb.PBServerFactory):
+    """
+    A PB server factory which keeps track of the most recent protocol it
+    created.
+
+    @ivar proto: L{None} or the L{Broker} instance most recently returned
+        from C{buildProtocol}.
+    """
+    proto = None
+
+    def buildProtocol(self, addr):
+        self.proto = pb.PBServerFactory.buildProtocol(self, addr)
+        return self.proto
+
+
+
+class DistribTest(unittest.TestCase):
+    port1 = None
+    port2 = None
+    sub = None
+    f1 = None
+
+    def tearDown(self):
+        """
+        Clean up all the event sources left behind by either directly by
+        test methods or indirectly via some distrib API.
+        """
+        dl = [defer.Deferred(), defer.Deferred()]
+        if self.f1 is not None and self.f1.proto is not None:
+            self.f1.proto.notifyOnDisconnect(lambda: dl[0].callback(None))
+        else:
+            dl[0].callback(None)
+        if self.sub is not None and self.sub.publisher is not None:
+            self.sub.publisher.broker.notifyOnDisconnect(
+                lambda: dl[1].callback(None))
+            self.sub.publisher.broker.transport.loseConnection()
+        else:
+            dl[1].callback(None)
+        if self.port1 is not None:
+            dl.append(self.port1.stopListening())
+        if self.port2 is not None:
+            dl.append(self.port2.stopListening())
+        return defer.gatherResults(dl)
+
+
+    def testDistrib(self):
+        # site1 is the publisher
+        r1 = resource.Resource()
+        r1.putChild("there", static.Data("root", "text/plain"))
+        site1 = server.Site(r1)
+        self.f1 = PBServerFactory(distrib.ResourcePublisher(site1))
+        self.port1 = reactor.listenTCP(0, self.f1)
+        self.sub = distrib.ResourceSubscription("127.0.0.1",
+                                                self.port1.getHost().port)
+        r2 = resource.Resource()
+        r2.putChild("here", self.sub)
+        f2 = MySite(r2)
+        self.port2 = reactor.listenTCP(0, f2)
+        d = client.getPage("http://127.0.0.1:%d/here/there" % \
+                           self.port2.getHost().port)
+        d.addCallback(self.assertEqual, 'root')
+        return d
+
+
+    def _setupDistribServer(self, child):
+        """
+        Set up a resource on a distrib site using L{ResourcePublisher}.
+
+        @param child: The resource to publish using distrib.
+
+        @return: A tuple consisting of the host and port on which to contact
+            the created site.
+        """
+        distribRoot = resource.Resource()
+        distribRoot.putChild("child", child)
+        distribSite = server.Site(distribRoot)
+        self.f1 = distribFactory = PBServerFactory(
+            distrib.ResourcePublisher(distribSite))
+        distribPort = reactor.listenTCP(
+            0, distribFactory, interface="127.0.0.1")
+        self.addCleanup(distribPort.stopListening)
+        addr = distribPort.getHost()
+
+        self.sub = mainRoot = distrib.ResourceSubscription(
+            addr.host, addr.port)
+        mainSite = server.Site(mainRoot)
+        mainPort = reactor.listenTCP(0, mainSite, interface="127.0.0.1")
+        self.addCleanup(mainPort.stopListening)
+        mainAddr = mainPort.getHost()
+
+        return mainPort, mainAddr
+
+
+    def _requestTest(self, child, **kwargs):
+        """
+        Set up a resource on a distrib site using L{ResourcePublisher} and
+        then retrieve it from a L{ResourceSubscription} via an HTTP client.
+
+        @param child: The resource to publish using distrib.
+        @param **kwargs: Extra keyword arguments to pass to L{getPage} when
+            requesting the resource.
+
+        @return: A L{Deferred} which fires with the result of the request.
+        """
+        mainPort, mainAddr = self._setupDistribServer(child)
+        return client.getPage("http://%s:%s/child" % (
+            mainAddr.host, mainAddr.port), **kwargs)
+
+
+    def _requestAgentTest(self, child, **kwargs):
+        """
+        Set up a resource on a distrib site using L{ResourcePublisher} and
+        then retrieve it from a L{ResourceSubscription} via an HTTP client.
+
+        @param child: The resource to publish using distrib.
+        @param **kwargs: Extra keyword arguments to pass to L{Agent.request} when
+            requesting the resource.
+
+        @return: A L{Deferred} which fires with a tuple consisting of a
+            L{twisted.test.proto_helpers.AccumulatingProtocol} containing the
+            body of the response and an L{IResponse} with the response itself.
+        """
+        mainPort, mainAddr = self._setupDistribServer(child)
+
+        d = client.Agent(reactor).request("GET", "http://%s:%s/child" % (
+            mainAddr.host, mainAddr.port), **kwargs)
+
+        def cbCollectBody(response):
+            protocol = proto_helpers.AccumulatingProtocol()
+            response.deliverBody(protocol)
+            d = protocol.closedDeferred = defer.Deferred()
+            d.addCallback(lambda _: (protocol, response))
+            return d
+        d.addCallback(cbCollectBody)
+        return d
+
+
+    def test_requestHeaders(self):
+        """
+        The request headers are available on the request object passed to a
+        distributed resource's C{render} method.
+        """
+        requestHeaders = {}
+
+        class ReportRequestHeaders(resource.Resource):
+            def render(self, request):
+                requestHeaders.update(dict(
+                    request.requestHeaders.getAllRawHeaders()))
+                return ""
+
+        request = self._requestTest(
+            ReportRequestHeaders(), headers={'foo': 'bar'})
+        def cbRequested(result):
+            self.assertEqual(requestHeaders['Foo'], ['bar'])
+        request.addCallback(cbRequested)
+        return request
+
+
+    def test_requestResponseCode(self):
+        """
+        The response code can be set by the request object passed to a
+        distributed resource's C{render} method.
+        """
+        class SetResponseCode(resource.Resource):
+            def render(self, request):
+                request.setResponseCode(200)
+                return ""
+
+        request = self._requestAgentTest(SetResponseCode())
+        def cbRequested(result):
+            self.assertEqual(result[0].data, "")
+            self.assertEqual(result[1].code, 200)
+            self.assertEqual(result[1].phrase, "OK")
+        request.addCallback(cbRequested)
+        return request
+
+
+    def test_requestResponseCodeMessage(self):
+        """
+        The response code and message can be set by the request object passed to
+        a distributed resource's C{render} method.
+        """
+        class SetResponseCode(resource.Resource):
+            def render(self, request):
+                request.setResponseCode(200, "some-message")
+                return ""
+
+        request = self._requestAgentTest(SetResponseCode())
+        def cbRequested(result):
+            self.assertEqual(result[0].data, "")
+            self.assertEqual(result[1].code, 200)
+            self.assertEqual(result[1].phrase, "some-message")
+        request.addCallback(cbRequested)
+        return request
+
+
+    def test_largeWrite(self):
+        """
+        If a string longer than the Banana size limit is passed to the
+        L{distrib.Request} passed to the remote resource, it is broken into
+        smaller strings to be transported over the PB connection.
+        """
+        class LargeWrite(resource.Resource):
+            def render(self, request):
+                request.write('x' * SIZE_LIMIT + 'y')
+                request.finish()
+                return server.NOT_DONE_YET
+
+        request = self._requestTest(LargeWrite())
+        request.addCallback(self.assertEqual, 'x' * SIZE_LIMIT + 'y')
+        return request
+
+
+    def test_largeReturn(self):
+        """
+        Like L{test_largeWrite}, but for the case where C{render} returns a
+        long string rather than explicitly passing it to L{Request.write}.
+        """
+        class LargeReturn(resource.Resource):
+            def render(self, request):
+                return 'x' * SIZE_LIMIT + 'y'
+
+        request = self._requestTest(LargeReturn())
+        request.addCallback(self.assertEqual, 'x' * SIZE_LIMIT + 'y')
+        return request
+
+
+    def test_connectionLost(self):
+        """
+        If there is an error issuing the request to the remote publisher, an
+        error response is returned.
+        """
+        # Using pb.Root as a publisher will cause request calls to fail with an
+        # error every time.  Just what we want to test.
+        self.f1 = serverFactory = PBServerFactory(pb.Root())
+        self.port1 = serverPort = reactor.listenTCP(0, serverFactory)
+
+        self.sub = subscription = distrib.ResourceSubscription(
+            "127.0.0.1", serverPort.getHost().port)
+        request = DummyRequest([''])
+        d = _render(subscription, request)
+        def cbRendered(ignored):
+            self.assertEqual(request.responseCode, 500)
+            # This is the error we caused the request to fail with.  It should
+            # have been logged.
+            self.assertEqual(len(self.flushLoggedErrors(pb.NoSuchMethod)), 1)
+        d.addCallback(cbRendered)
+        return d
+
+
+
+class _PasswordDatabase:
+    def __init__(self, users):
+        self._users = users
+
+
+    def getpwall(self):
+        return iter(self._users)
+
+
+    def getpwnam(self, username):
+        for user in self._users:
+            if user[0] == username:
+                return user
+        raise KeyError()
+
+
+
+class UserDirectoryTests(unittest.TestCase):
+    """
+    Tests for L{UserDirectory}, a resource for listing all user resources
+    available on a system.
+    """
+    def setUp(self):
+        self.alice = ('alice', 'x', 123, 456, 'Alice,,,', self.mktemp(), '/bin/sh')
+        self.bob = ('bob', 'x', 234, 567, 'Bob,,,', self.mktemp(), '/bin/sh')
+        self.database = _PasswordDatabase([self.alice, self.bob])
+        self.directory = distrib.UserDirectory(self.database)
+
+
+    def test_interface(self):
+        """
+        L{UserDirectory} instances provide L{resource.IResource}.
+        """
+        self.assertTrue(verifyObject(resource.IResource, self.directory))
+
+
+    def _404Test(self, name):
+        """
+        Verify that requesting the C{name} child of C{self.directory} results
+        in a 404 response.
+        """
+        request = DummyRequest([name])
+        result = self.directory.getChild(name, request)
+        d = _render(result, request)
+        def cbRendered(ignored):
+            self.assertEqual(request.responseCode, 404)
+        d.addCallback(cbRendered)
+        return d
+
+
+    def test_getInvalidUser(self):
+        """
+        L{UserDirectory.getChild} returns a resource which renders a 404
+        response when passed a string which does not correspond to any known
+        user.
+        """
+        return self._404Test('carol')
+
+
+    def test_getUserWithoutResource(self):
+        """
+        L{UserDirectory.getChild} returns a resource which renders a 404
+        response when passed a string which corresponds to a known user who has
+        neither a user directory nor a user distrib socket.
+        """
+        return self._404Test('alice')
+
+
+    def test_getPublicHTMLChild(self):
+        """
+        L{UserDirectory.getChild} returns a L{static.File} instance when passed
+        the name of a user with a home directory containing a I{public_html}
+        directory.
+        """
+        home = filepath.FilePath(self.bob[-2])
+        public_html = home.child('public_html')
+        public_html.makedirs()
+        request = DummyRequest(['bob'])
+        result = self.directory.getChild('bob', request)
+        self.assertIsInstance(result, static.File)
+        self.assertEqual(result.path, public_html.path)
+
+
+    def test_getDistribChild(self):
+        """
+        L{UserDirectory.getChild} returns a L{ResourceSubscription} instance
+        when passed the name of a user suffixed with C{".twistd"} who has a
+        home directory containing a I{.twistd-web-pb} socket.
+        """
+        home = filepath.FilePath(self.bob[-2])
+        home.makedirs()
+        web = home.child('.twistd-web-pb')
+        request = DummyRequest(['bob'])
+        result = self.directory.getChild('bob.twistd', request)
+        self.assertIsInstance(result, distrib.ResourceSubscription)
+        self.assertEqual(result.host, 'unix')
+        self.assertEqual(abspath(result.port), web.path)
+
+
+    def test_invalidMethod(self):
+        """
+        L{UserDirectory.render} raises L{UnsupportedMethod} in response to a
+        non-I{GET} request.
+        """
+        request = DummyRequest([''])
+        request.method = 'POST'
+        self.assertRaises(
+            server.UnsupportedMethod, self.directory.render, request)
+
+
+    def test_render(self):
+        """
+        L{UserDirectory} renders a list of links to available user content
+        in response to a I{GET} request.
+        """
+        public_html = filepath.FilePath(self.alice[-2]).child('public_html')
+        public_html.makedirs()
+        web = filepath.FilePath(self.bob[-2])
+        web.makedirs()
+        # This really only works if it's a unix socket, but the implementation
+        # doesn't currently check for that.  It probably should someday, and
+        # then skip users with non-sockets.
+        web.child('.twistd-web-pb').setContent("")
+
+        request = DummyRequest([''])
+        result = _render(self.directory, request)
+        def cbRendered(ignored):
+            document = parseString(''.join(request.written))
+
+            # Each user should have an li with a link to their page.
+            [alice, bob] = document.getElementsByTagName('li')
+            self.assertEqual(alice.firstChild.tagName, 'a')
+            self.assertEqual(alice.firstChild.getAttribute('href'), 'alice/')
+            self.assertEqual(alice.firstChild.firstChild.data, 'Alice (file)')
+            self.assertEqual(bob.firstChild.tagName, 'a')
+            self.assertEqual(bob.firstChild.getAttribute('href'), 'bob.twistd/')
+            self.assertEqual(bob.firstChild.firstChild.data, 'Bob (twistd)')
+
+        result.addCallback(cbRendered)
+        return result
+
+
+    def test_passwordDatabase(self):
+        """
+        If L{UserDirectory} is instantiated with no arguments, it uses the
+        L{pwd} module as its password database.
+        """
+        directory = distrib.UserDirectory()
+        self.assertIdentical(directory._pwd, pwd)
+    if pwd is None:
+        test_passwordDatabase.skip = "pwd module required"
+
diff --git a/ThirdParty/Twisted/twisted/web/test/test_domhelpers.py b/ThirdParty/Twisted/twisted/web/test/test_domhelpers.py
new file mode 100644
index 0000000..b008374
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_domhelpers.py
@@ -0,0 +1,306 @@
+# -*- test-case-name: twisted.web.test.test_domhelpers -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Specific tests for (some of) the methods in L{twisted.web.domhelpers}.
+"""
+
+from xml.dom import minidom
+
+from twisted.trial.unittest import TestCase
+
+from twisted.web import microdom
+
+from twisted.web import domhelpers
+
+
+class DOMHelpersTestsMixin:
+    """
+    A mixin for L{TestCase} subclasses which defines test methods for
+    domhelpers functionality based on a DOM creation function provided by a
+    subclass.
+    """
+    dom = None
+
+    def test_getElementsByTagName(self):
+        doc1 = self.dom.parseString('<foo/>')
+        actual=domhelpers.getElementsByTagName(doc1, 'foo')[0].nodeName
+        expected='foo'
+        self.assertEqual(actual, expected)
+        el1=doc1.documentElement
+        actual=domhelpers.getElementsByTagName(el1, 'foo')[0].nodeName
+        self.assertEqual(actual, expected)
+
+        doc2_xml='<a><foo in="a"/><b><foo in="b"/></b><c><foo in="c"/></c><foo in="d"/><foo in="ef"/><g><foo in="g"/><h><foo in="h"/></h></g></a>'
+        doc2 = self.dom.parseString(doc2_xml)
+        tag_list=domhelpers.getElementsByTagName(doc2, 'foo')
+        actual=''.join([node.getAttribute('in') for node in tag_list])
+        expected='abcdefgh'
+        self.assertEqual(actual, expected)
+        el2=doc2.documentElement
+        tag_list=domhelpers.getElementsByTagName(el2, 'foo')
+        actual=''.join([node.getAttribute('in') for node in tag_list])
+        self.assertEqual(actual, expected)
+
+        doc3_xml='''
+<a><foo in="a"/>
+    <b><foo in="b"/>
+        <d><foo in="d"/>
+            <g><foo in="g"/></g>
+            <h><foo in="h"/></h>
+        </d>
+        <e><foo in="e"/>
+            <i><foo in="i"/></i>
+        </e>
+    </b>
+    <c><foo in="c"/>
+        <f><foo in="f"/>
+            <j><foo in="j"/></j>
+        </f>
+    </c>
+</a>'''
+        doc3 = self.dom.parseString(doc3_xml)
+        tag_list=domhelpers.getElementsByTagName(doc3, 'foo')
+        actual=''.join([node.getAttribute('in') for node in tag_list])
+        expected='abdgheicfj'
+        self.assertEqual(actual, expected)
+        el3=doc3.documentElement
+        tag_list=domhelpers.getElementsByTagName(el3, 'foo')
+        actual=''.join([node.getAttribute('in') for node in tag_list])
+        self.assertEqual(actual, expected)
+
+        doc4_xml='<foo><bar></bar><baz><foo/></baz></foo>'
+        doc4 = self.dom.parseString(doc4_xml)
+        actual=domhelpers.getElementsByTagName(doc4, 'foo')
+        root=doc4.documentElement
+        expected=[root, root.childNodes[-1].childNodes[0]]
+        self.assertEqual(actual, expected)
+        actual=domhelpers.getElementsByTagName(root, 'foo')
+        self.assertEqual(actual, expected)
+
+
+    def test_gatherTextNodes(self):
+        doc1 = self.dom.parseString('<a>foo</a>')
+        actual=domhelpers.gatherTextNodes(doc1)
+        expected='foo'
+        self.assertEqual(actual, expected)
+        actual=domhelpers.gatherTextNodes(doc1.documentElement)
+        self.assertEqual(actual, expected)
+
+        doc2_xml='<a>a<b>b</b><c>c</c>def<g>g<h>h</h></g></a>'
+        doc2 = self.dom.parseString(doc2_xml)
+        actual=domhelpers.gatherTextNodes(doc2)
+        expected='abcdefgh'
+        self.assertEqual(actual, expected)
+        actual=domhelpers.gatherTextNodes(doc2.documentElement)
+        self.assertEqual(actual, expected)
+
+        doc3_xml=('<a>a<b>b<d>d<g>g</g><h>h</h></d><e>e<i>i</i></e></b>' +
+                  '<c>c<f>f<j>j</j></f></c></a>')
+        doc3 = self.dom.parseString(doc3_xml)
+        actual=domhelpers.gatherTextNodes(doc3)
+        expected='abdgheicfj'
+        self.assertEqual(actual, expected)
+        actual=domhelpers.gatherTextNodes(doc3.documentElement)
+        self.assertEqual(actual, expected)
+
+    def test_clearNode(self):
+        doc1 = self.dom.parseString('<a><b><c><d/></c></b></a>')
+        a_node=doc1.documentElement
+        domhelpers.clearNode(a_node)
+        self.assertEqual(
+            a_node.toxml(),
+            self.dom.Element('a').toxml())
+
+        doc2 = self.dom.parseString('<a><b><c><d/></c></b></a>')
+        b_node=doc2.documentElement.childNodes[0]
+        domhelpers.clearNode(b_node)
+        actual=doc2.documentElement.toxml()
+        expected = self.dom.Element('a')
+        expected.appendChild(self.dom.Element('b'))
+        self.assertEqual(actual, expected.toxml())
+
+
+    def test_get(self):
+        doc1 = self.dom.parseString('<a><b id="bar"/><c class="foo"/></a>')
+        node=domhelpers.get(doc1, "foo")
+        actual=node.toxml()
+        expected = self.dom.Element('c')
+        expected.setAttribute('class', 'foo')
+        self.assertEqual(actual, expected.toxml())
+
+        node=domhelpers.get(doc1, "bar")
+        actual=node.toxml()
+        expected = self.dom.Element('b')
+        expected.setAttribute('id', 'bar')
+        self.assertEqual(actual, expected.toxml())
+
+        self.assertRaises(domhelpers.NodeLookupError, 
+                          domhelpers.get, 
+                          doc1, 
+                          "pzork")
+
+    def test_getIfExists(self):
+        doc1 = self.dom.parseString('<a><b id="bar"/><c class="foo"/></a>')
+        node=domhelpers.getIfExists(doc1, "foo")
+        actual=node.toxml()
+        expected = self.dom.Element('c')
+        expected.setAttribute('class', 'foo')
+        self.assertEqual(actual, expected.toxml())
+
+        node=domhelpers.getIfExists(doc1, "pzork")
+        self.assertIdentical(node, None)
+
+
+    def test_getAndClear(self):
+        doc1 = self.dom.parseString('<a><b id="foo"><c></c></b></a>')
+        node=domhelpers.getAndClear(doc1, "foo")
+        actual=node.toxml()
+        expected = self.dom.Element('b')
+        expected.setAttribute('id', 'foo')
+        self.assertEqual(actual, expected.toxml())
+
+
+    def test_locateNodes(self):
+        doc1 = self.dom.parseString('<a><b foo="olive"><c foo="olive"/></b><d foo="poopy"/></a>')
+        node_list=domhelpers.locateNodes(
+            doc1.childNodes, 'foo', 'olive', noNesting=1)
+        actual=''.join([node.toxml() for node in node_list])
+        expected = self.dom.Element('b')
+        expected.setAttribute('foo', 'olive')
+        c = self.dom.Element('c')
+        c.setAttribute('foo', 'olive')
+        expected.appendChild(c)
+
+        self.assertEqual(actual, expected.toxml())
+
+        node_list=domhelpers.locateNodes(
+            doc1.childNodes, 'foo', 'olive', noNesting=0)
+        actual=''.join([node.toxml() for node in node_list])
+        self.assertEqual(actual, expected.toxml() + c.toxml())
+
+
+    def test_getParents(self):
+        doc1 = self.dom.parseString('<a><b><c><d/></c><e/></b><f/></a>')
+        node_list = domhelpers.getParents(
+            doc1.childNodes[0].childNodes[0].childNodes[0])
+        actual = ''.join([node.tagName for node in node_list
+                          if hasattr(node, 'tagName')])
+        self.assertEqual(actual, 'cba')
+
+
+    def test_findElementsWithAttribute(self):
+        doc1 = self.dom.parseString('<a foo="1"><b foo="2"/><c foo="1"/><d/></a>')
+        node_list = domhelpers.findElementsWithAttribute(doc1, 'foo')
+        actual = ''.join([node.tagName for node in node_list])
+        self.assertEqual(actual, 'abc')
+
+        node_list = domhelpers.findElementsWithAttribute(doc1, 'foo', '1')
+        actual = ''.join([node.tagName for node in node_list])
+        self.assertEqual(actual, 'ac')
+
+
+    def test_findNodesNamed(self):
+        doc1 = self.dom.parseString('<doc><foo/><bar/><foo>a</foo></doc>')
+        node_list = domhelpers.findNodesNamed(doc1, 'foo')
+        actual = len(node_list)
+        self.assertEqual(actual, 2)
+
+    # NOT SURE WHAT THESE ARE SUPPOSED TO DO..
+    # def test_RawText  FIXME
+    # def test_superSetAttribute FIXME
+    # def test_superPrependAttribute FIXME
+    # def test_superAppendAttribute FIXME
+    # def test_substitute FIXME
+
+    def test_escape(self):
+        j='this string " contains many & characters> xml< won\'t like'
+        expected='this string " contains many & characters> xml< won\'t like'
+        self.assertEqual(domhelpers.escape(j), expected)
+
+    def test_unescape(self):
+        j='this string " has && entities > < and some characters xml won\'t like<'
+        expected='this string " has && entities > < and some characters xml won\'t like<'
+        self.assertEqual(domhelpers.unescape(j), expected)
+
+
+    def test_getNodeText(self):
+        """
+        L{getNodeText} returns the concatenation of all the text data at or
+        beneath the node passed to it.
+        """
+        node = self.dom.parseString('<foo><bar>baz</bar><bar>quux</bar></foo>')
+        self.assertEqual(domhelpers.getNodeText(node), "bazquux")
+
+
+
+class MicroDOMHelpersTests(DOMHelpersTestsMixin, TestCase):
+    dom = microdom
+
+    def test_gatherTextNodesDropsWhitespace(self):
+        """
+        Microdom discards whitespace-only text nodes, so L{gatherTextNodes}
+        returns only the text from nodes which had non-whitespace characters.
+        """
+        doc4_xml='''<html>
+  <head>
+  </head>
+  <body>
+    stuff
+  </body>
+</html>
+'''
+        doc4 = self.dom.parseString(doc4_xml)
+        actual = domhelpers.gatherTextNodes(doc4)
+        expected = '\n    stuff\n  '
+        self.assertEqual(actual, expected)
+        actual = domhelpers.gatherTextNodes(doc4.documentElement)
+        self.assertEqual(actual, expected)
+
+
+    def test_textEntitiesNotDecoded(self):
+        """
+        Microdom does not decode entities in text nodes.
+        """
+        doc5_xml='<x>Souffl&</x>'
+        doc5 = self.dom.parseString(doc5_xml)
+        actual=domhelpers.gatherTextNodes(doc5)
+        expected='Souffl&'
+        self.assertEqual(actual, expected)
+        actual=domhelpers.gatherTextNodes(doc5.documentElement)
+        self.assertEqual(actual, expected)
+
+
+
+class MiniDOMHelpersTests(DOMHelpersTestsMixin, TestCase):
+    dom = minidom
+
+    def test_textEntitiesDecoded(self):
+        """
+        Minidom does decode entities in text nodes.
+        """
+        doc5_xml='<x>Souffl&</x>'
+        doc5 = self.dom.parseString(doc5_xml)
+        actual=domhelpers.gatherTextNodes(doc5)
+        expected='Souffl&'
+        self.assertEqual(actual, expected)
+        actual=domhelpers.gatherTextNodes(doc5.documentElement)
+        self.assertEqual(actual, expected)
+
+
+    def test_getNodeUnicodeText(self):
+        """
+        L{domhelpers.getNodeText} returns a C{unicode} string when text
+        nodes are represented in the DOM with unicode, whether or not there
+        are non-ASCII characters present.
+        """
+        node = self.dom.parseString("<foo>bar</foo>")
+        text = domhelpers.getNodeText(node)
+        self.assertEqual(text, u"bar")
+        self.assertIsInstance(text, unicode)
+
+        node = self.dom.parseString(u"<foo>\N{SNOWMAN}</foo>".encode('utf-8'))
+        text = domhelpers.getNodeText(node)
+        self.assertEqual(text, u"\N{SNOWMAN}")
+        self.assertIsInstance(text, unicode)
diff --git a/ThirdParty/Twisted/twisted/web/test/test_error.py b/ThirdParty/Twisted/twisted/web/test/test_error.py
new file mode 100644
index 0000000..4daa7d9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_error.py
@@ -0,0 +1,151 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+HTTP errors.
+"""
+
+from twisted.trial import unittest
+from twisted.web import error
+
+class ErrorTestCase(unittest.TestCase):
+    """
+    Tests for how L{Error} attributes are initialized.
+    """
+    def test_noMessageValidStatus(self):
+        """
+        If no C{message} argument is passed to the L{Error} constructor and the
+        C{code} argument is a valid HTTP status code, C{code} is mapped to a
+        descriptive string to which C{message} is assigned.
+        """
+        e = error.Error("200")
+        self.assertEqual(e.message, "OK")
+
+
+    def test_noMessageInvalidStatus(self):
+        """
+        If no C{message} argument is passed to the L{Error} constructor and
+        C{code} isn't a valid HTTP status code, C{message} stays C{None}.
+        """
+        e = error.Error("InvalidCode")
+        self.assertEqual(e.message, None)
+
+
+    def test_messageExists(self):
+        """
+        If a C{message} argument is passed to the L{Error} constructor, the
+        C{message} isn't affected by the value of C{status}.
+        """
+        e = error.Error("200", "My own message")
+        self.assertEqual(e.message, "My own message")
+
+
+
+class PageRedirectTestCase(unittest.TestCase):
+    """
+    Tests for how L{PageRedirect} attributes are initialized.
+    """
+    def test_noMessageValidStatus(self):
+        """
+        If no C{message} argument is passed to the L{PageRedirect} constructor
+        and the C{code} argument is a valid HTTP status code, C{code} is mapped
+        to a descriptive string to which C{message} is assigned.
+        """
+        e = error.PageRedirect("200", location="/foo")
+        self.assertEqual(e.message, "OK to /foo")
+
+
+    def test_noMessageValidStatusNoLocation(self):
+        """
+        If no C{message} argument is passed to the L{PageRedirect} constructor
+        and C{location} is also empty and the C{code} argument is a valid HTTP
+        status code, C{code} is mapped to a descriptive string to which
+        C{message} is assigned without trying to include an empty location.
+        """
+        e = error.PageRedirect("200")
+        self.assertEqual(e.message, "OK")
+
+
+    def test_noMessageInvalidStatusLocationExists(self):
+        """
+        If no C{message} argument is passed to the L{PageRedirect} constructor
+        and C{code} isn't a valid HTTP status code, C{message} stays C{None}.
+        """
+        e = error.PageRedirect("InvalidCode", location="/foo")
+        self.assertEqual(e.message, None)
+
+
+    def test_messageExistsLocationExists(self):
+        """
+        If a C{message} argument is passed to the L{PageRedirect} constructor,
+        the C{message} isn't affected by the value of C{status}.
+        """
+        e = error.PageRedirect("200", "My own message", location="/foo")
+        self.assertEqual(e.message, "My own message to /foo")
+
+
+    def test_messageExistsNoLocation(self):
+        """
+        If a C{message} argument is passed to the L{PageRedirect} constructor
+        and no location is provided, C{message} doesn't try to include the empty
+        location.
+        """
+        e = error.PageRedirect("200", "My own message")
+        self.assertEqual(e.message, "My own message")
+
+
+
+class InfiniteRedirectionTestCase(unittest.TestCase):
+    """
+    Tests for how L{InfiniteRedirection} attributes are initialized.
+    """
+    def test_noMessageValidStatus(self):
+        """
+        If no C{message} argument is passed to the L{InfiniteRedirection}
+        constructor and the C{code} argument is a valid HTTP status code,
+        C{code} is mapped to a descriptive string to which C{message} is
+        assigned.
+        """
+        e = error.InfiniteRedirection("200", location="/foo")
+        self.assertEqual(e.message, "OK to /foo")
+
+
+    def test_noMessageValidStatusNoLocation(self):
+        """
+        If no C{message} argument is passed to the L{InfiniteRedirection}
+        constructor and C{location} is also empty and the C{code} argument is a
+        valid HTTP status code, C{code} is mapped to a descriptive string to
+        which C{message} is assigned without trying to include an empty
+        location.
+        """
+        e = error.InfiniteRedirection("200")
+        self.assertEqual(e.message, "OK")
+
+
+    def test_noMessageInvalidStatusLocationExists(self):
+        """
+        If no C{message} argument is passed to the L{InfiniteRedirection}
+        constructor and C{code} isn't a valid HTTP status code, C{message} stays
+        C{None}.
+        """
+        e = error.InfiniteRedirection("InvalidCode", location="/foo")
+        self.assertEqual(e.message, None)
+
+
+    def test_messageExistsLocationExists(self):
+        """
+        If a C{message} argument is passed to the L{InfiniteRedirection}
+        constructor, the C{message} isn't affected by the value of C{status}.
+        """
+        e = error.InfiniteRedirection("200", "My own message", location="/foo")
+        self.assertEqual(e.message, "My own message to /foo")
+
+
+    def test_messageExistsNoLocation(self):
+        """
+        If a C{message} argument is passed to the L{InfiniteRedirection}
+        constructor and no location is provided, C{message} doesn't try to
+        include the empty location.
+        """
+        e = error.InfiniteRedirection("200", "My own message")
+        self.assertEqual(e.message, "My own message")
diff --git a/ThirdParty/Twisted/twisted/web/test/test_flatten.py b/ThirdParty/Twisted/twisted/web/test/test_flatten.py
new file mode 100644
index 0000000..c843a61
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_flatten.py
@@ -0,0 +1,348 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import sys
+import traceback
+
+from zope.interface import implements
+
+from twisted.trial.unittest import TestCase
+from twisted.internet.defer import succeed, gatherResults
+from twisted.web._stan import Tag
+from twisted.web._flatten import flattenString
+from twisted.web.error import UnfilledSlot, UnsupportedType, FlattenerError
+from twisted.web.template import tags, Comment, CDATA, CharRef, slot
+from twisted.web.iweb import IRenderable
+from twisted.web.test._util import FlattenTestCase
+
+
+class TestSerialization(FlattenTestCase):
+    """
+    Tests for flattening various things.
+    """
+    def test_nestedTags(self):
+        """
+        Test that nested tags flatten correctly.
+        """
+        return self.assertFlattensTo(
+            tags.html(tags.body('42'), hi='there'),
+            '<html hi="there"><body>42</body></html>')
+
+
+    def test_serializeString(self):
+        """
+        Test that strings will be flattened and escaped correctly.
+        """
+        return gatherResults([
+            self.assertFlattensTo('one', 'one'),
+            self.assertFlattensTo('<abc&&>123', '<abc&&>123'),
+        ])
+
+
+    def test_serializeSelfClosingTags(self):
+        """
+        Test that some tags are normally written out as self-closing tags.
+        """
+        return self.assertFlattensTo(tags.img(src='test'), '<img src="test" />')
+
+
+    def test_serializeComment(self):
+        """
+        Test that comments are correctly flattened and escaped.
+        """
+        return self.assertFlattensTo(Comment('foo bar'), '<!--foo bar-->'),
+
+
+    def test_commentEscaping(self):
+        """
+        The data in a L{Comment} is escaped and mangled in the flattened output
+        so that the result is a legal SGML and XML comment.
+
+        SGML comment syntax is complicated and hard to use. This rule is more
+        restrictive, and more compatible:
+
+        Comments start with <!-- and end with --> and never contain -- or >.
+
+        Also by XML syntax, a comment may not end with '-'.
+
+        @see: U{http://www.w3.org/TR/REC-xml/#sec-comments}
+        """
+        def verifyComment(c):
+            self.assertTrue(
+                c.startswith('<!--'),
+                "%r does not start with the comment prefix" % (c,))
+            self.assertTrue(
+                c.endswith('-->'),
+                "%r does not end with the comment suffix" % (c,))
+            # If it is shorter than 7, then the prefix and suffix overlap
+            # illegally.
+            self.assertTrue(
+                len(c) >= 7,
+                "%r is too short to be a legal comment" % (c,))
+            content = c[4:-3]
+            self.assertNotIn('--', content)
+            self.assertNotIn('>', content)
+            if content:
+                self.assertNotEqual(content[-1], '-')
+
+        results = []
+        for c in [
+            '',
+            'foo---bar',
+            'foo---bar-',
+            'foo>bar',
+            'foo-->bar',
+            '----------------',
+        ]:
+            d = flattenString(None, Comment(c))
+            d.addCallback(verifyComment)
+            results.append(d)
+        return gatherResults(results)
+
+
+    def test_serializeCDATA(self):
+        """
+        Test that CDATA is correctly flattened and escaped.
+        """
+        return gatherResults([
+            self.assertFlattensTo(CDATA('foo bar'), '<![CDATA[foo bar]]>'),
+            self.assertFlattensTo(
+                CDATA('foo ]]> bar'),
+                '<![CDATA[foo ]]]]><![CDATA[> bar]]>'),
+        ])
+
+
+    def test_serializeUnicode(self):
+        """
+        Test that unicode is encoded correctly in the appropriate places, and
+        raises an error when it occurs in inappropriate place.
+        """
+        snowman = u'\N{SNOWMAN}'
+        return gatherResults([
+            self.assertFlattensTo(snowman, '\xe2\x98\x83'),
+            self.assertFlattensTo(tags.p(snowman), '<p>\xe2\x98\x83</p>'),
+            self.assertFlattensTo(Comment(snowman), '<!--\xe2\x98\x83-->'),
+            self.assertFlattensTo(CDATA(snowman), '<![CDATA[\xe2\x98\x83]]>'),
+            self.assertFlatteningRaises(
+                Tag(snowman), UnicodeEncodeError),
+            self.assertFlatteningRaises(
+                Tag('p', attributes={snowman: ''}), UnicodeEncodeError),
+        ])
+
+
+    def test_serializeCharRef(self):
+        """
+        A character reference is flattened to a string using the I{&#NNNN;}
+        syntax.
+        """
+        ref = CharRef(ord(u"\N{SNOWMAN}"))
+        return self.assertFlattensTo(ref, "☃")
+
+
+    def test_serializeDeferred(self):
+        """
+        Test that a deferred is substituted with the current value in the
+        callback chain when flattened.
+        """
+        return self.assertFlattensTo(succeed('two'), 'two')
+
+
+    def test_serializeSameDeferredTwice(self):
+        """
+        Test that the same deferred can be flattened twice.
+        """
+        d = succeed('three')
+        return gatherResults([
+            self.assertFlattensTo(d, 'three'),
+            self.assertFlattensTo(d, 'three'),
+        ])
+
+
+    def test_serializeIRenderable(self):
+        """
+        Test that flattening respects all of the IRenderable interface.
+        """
+        class FakeElement(object):
+            implements(IRenderable)
+            def render(ign,ored):
+                return tags.p(
+                    'hello, ',
+                    tags.transparent(render='test'), ' - ',
+                    tags.transparent(render='test'))
+            def lookupRenderMethod(ign, name):
+                self.assertEqual(name, 'test')
+                return lambda ign, node: node('world')
+
+        return gatherResults([
+            self.assertFlattensTo(FakeElement(), '<p>hello, world - world</p>'),
+        ])
+
+
+    def test_serializeSlots(self):
+        """
+        Test that flattening a slot will use the slot value from the tag.
+        """
+        t1 = tags.p(slot('test'))
+        t2 = t1.clone()
+        t2.fillSlots(test='hello, world')
+        return gatherResults([
+            self.assertFlatteningRaises(t1, UnfilledSlot),
+            self.assertFlattensTo(t2, '<p>hello, world</p>'),
+        ])
+
+
+    def test_serializeDeferredSlots(self):
+        """
+        Test that a slot with a deferred as its value will be flattened using
+        the value from the deferred.
+        """
+        t = tags.p(slot('test'))
+        t.fillSlots(test=succeed(tags.em('four>')))
+        return self.assertFlattensTo(t, '<p><em>four></em></p>')
+
+
+    def test_unknownTypeRaises(self):
+        """
+        Test that flattening an unknown type of thing raises an exception.
+        """
+        return self.assertFlatteningRaises(None, UnsupportedType)
+
+
+# Use the co_filename mechanism (instead of the __file__ mechanism) because
+# it is the mechanism traceback formatting uses.  The two do not necessarily
+# agree with each other.  This requires a code object compiled in this file.
+# The easiest way to get a code object is with a new function.  I'll use a
+# lambda to avoid adding anything else to this namespace.  The result will
+# be a string which agrees with the one the traceback module will put into a
+# traceback for frames associated with functions defined in this file.
+
+HERE = (lambda: None).func_code.co_filename
+
+
+class FlattenerErrorTests(TestCase):
+    """
+    Tests for L{FlattenerError}.
+    """
+
+    def test_string(self):
+        """
+        If a L{FlattenerError} is created with a string root, up to around 40
+        bytes from that string are included in the string representation of the
+        exception.
+        """
+        self.assertEqual(
+            str(FlattenerError(RuntimeError("reason"), ['abc123xyz'], [])),
+            "Exception while flattening:\n"
+            "  'abc123xyz'\n"
+            "RuntimeError: reason\n")
+        self.assertEqual(
+            str(FlattenerError(
+                    RuntimeError("reason"), ['0123456789' * 10], [])),
+            "Exception while flattening:\n"
+            "  '01234567890123456789<...>01234567890123456789'\n"
+            "RuntimeError: reason\n")
+
+
+    def test_unicode(self):
+        """
+        If a L{FlattenerError} is created with a unicode root, up to around 40
+        characters from that string are included in the string representation
+        of the exception.
+        """
+        self.assertEqual(
+            str(FlattenerError(
+                    RuntimeError("reason"), [u'abc\N{SNOWMAN}xyz'], [])),
+            "Exception while flattening:\n"
+            "  u'abc\\u2603xyz'\n" # Codepoint for SNOWMAN
+            "RuntimeError: reason\n")
+        self.assertEqual(
+            str(FlattenerError(
+                    RuntimeError("reason"), [u'01234567\N{SNOWMAN}9' * 10],
+                    [])),
+            "Exception while flattening:\n"
+            "  u'01234567\\u2603901234567\\u26039<...>01234567\\u2603901234567"
+            "\\u26039'\n"
+            "RuntimeError: reason\n")
+
+
+    def test_renderable(self):
+        """
+        If a L{FlattenerError} is created with an L{IRenderable} provider root,
+        the repr of that object is included in the string representation of the
+        exception.
+        """
+        class Renderable(object):
+            implements(IRenderable)
+
+            def __repr__(self):
+                return "renderable repr"
+
+        self.assertEqual(
+            str(FlattenerError(
+                    RuntimeError("reason"), [Renderable()], [])),
+            "Exception while flattening:\n"
+            "  renderable repr\n"
+            "RuntimeError: reason\n")
+
+
+    def test_tag(self):
+        """
+        If a L{FlattenerError} is created with a L{Tag} instance with source
+        location information, the source location is included in the string
+        representation of the exception.
+        """
+        tag = Tag(
+            'div', filename='/foo/filename.xhtml', lineNumber=17, columnNumber=12)
+
+        self.assertEqual(
+            str(FlattenerError(RuntimeError("reason"), [tag], [])),
+            "Exception while flattening:\n"
+            "  File \"/foo/filename.xhtml\", line 17, column 12, in \"div\"\n"
+            "RuntimeError: reason\n")
+
+
+    def test_tagWithoutLocation(self):
+        """
+        If a L{FlattenerError} is created with a L{Tag} instance without source
+        location information, only the tagName is included in the string
+        representation of the exception.
+        """
+        self.assertEqual(
+            str(FlattenerError(RuntimeError("reason"), [Tag('span')], [])),
+            "Exception while flattening:\n"
+            "  Tag <span>\n"
+            "RuntimeError: reason\n")
+
+
+    def test_traceback(self):
+        """
+        If a L{FlattenerError} is created with traceback frames, they are
+        included in the string representation of the exception.
+        """
+        # Try to be realistic in creating the data passed in for the traceback
+        # frames.
+        def f():
+            g()
+        def g():
+            raise RuntimeError("reason")
+
+        try:
+            f()
+        except RuntimeError, exc:
+            # Get the traceback, minus the info for *this* frame
+            tbinfo = traceback.extract_tb(sys.exc_info()[2])[1:]
+        else:
+            self.fail("f() must raise RuntimeError")
+
+        self.assertEqual(
+            str(FlattenerError(exc, [], tbinfo)),
+            "Exception while flattening:\n"
+            "  File \"%s\", line %d, in f\n"
+            "    g()\n"
+            "  File \"%s\", line %d, in g\n"
+            "    raise RuntimeError(\"reason\")\n"
+            "RuntimeError: reason\n" % (
+                HERE, f.func_code.co_firstlineno + 1,
+                HERE, g.func_code.co_firstlineno + 1))
+
diff --git a/ThirdParty/Twisted/twisted/web/test/test_http.py b/ThirdParty/Twisted/twisted/web/test/test_http.py
new file mode 100644
index 0000000..f3f2dcc
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_http.py
@@ -0,0 +1,1849 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test HTTP support.
+"""
+
+import random, cgi, base64
+
+try:
+    from urlparse import (
+        ParseResult as ParseResultBytes, urlparse, urlunsplit, clear_cache)
+except ImportError:
+    from urllib.parse import (
+        ParseResultBytes, urlparse, urlunsplit, clear_cache)
+
+from twisted.python.compat import _PY3, set, iterbytes, networkString, unicode, intToBytes
+from twisted.python.failure import Failure
+from twisted.trial import unittest
+from twisted.trial.unittest import TestCase
+from twisted.web import http, http_headers
+from twisted.web.http import PotentialDataLoss, _DataLoss
+from twisted.web.http import _IdentityTransferDecoder
+from twisted.internet.task import Clock
+from twisted.internet.error import ConnectionLost
+from twisted.protocols import loopback
+from twisted.test.proto_helpers import StringTransport
+from twisted.test.test_internet import DummyProducer
+from twisted.web.test.requesthelper import DummyChannel
+
+
+
+
+class DateTimeTest(unittest.TestCase):
+    """Test date parsing functions."""
+
+    def testRoundtrip(self):
+        for i in range(10000):
+            time = random.randint(0, 2000000000)
+            timestr = http.datetimeToString(time)
+            time2 = http.stringToDatetime(timestr)
+            self.assertEqual(time, time2)
+
+
+class DummyHTTPHandler(http.Request):
+
+    def process(self):
+        self.content.seek(0, 0)
+        data = self.content.read()
+        length = self.getHeader(b'content-length')
+        if length is None:
+            length = networkString(str(length))
+        request = b"'''\n" + length + b"\n" + data + b"'''\n"
+        self.setResponseCode(200)
+        self.setHeader(b"Request", self.uri)
+        self.setHeader(b"Command", self.method)
+        self.setHeader(b"Version", self.clientproto)
+        self.setHeader(b"Content-Length", intToBytes(len(request)))
+        self.write(request)
+        self.finish()
+
+
+class LoopbackHTTPClient(http.HTTPClient):
+
+    def connectionMade(self):
+        self.sendCommand(b"GET", b"/foo/bar")
+        self.sendHeader(b"Content-Length", 10)
+        self.endHeaders()
+        self.transport.write(b"0123456789")
+
+
+class ResponseTestMixin(object):
+    """
+    A mixin that provides a simple means of comparing an actual response string
+    to an expected response string by performing the minimal parsing.
+    """
+
+    def assertResponseEquals(self, responses, expected):
+        """
+        Assert that the C{responses} matches the C{expected} responses.
+
+        @type responses: C{bytes}
+        @param responses: The bytes sent in response to one or more requests.
+
+        @type expected: C{list} of C{tuple} of C{bytes}
+        @param expected: The expected values for the responses.  Each tuple
+            element of the list represents one response.  Each byte string
+            element of the tuple is a full header line without delimiter, except
+            for the last element which gives the full response body.
+        """
+        for response in expected:
+            expectedHeaders, expectedContent = response[:-1], response[-1]
+            # Intentionally avoid mutating the inputs here.
+            expectedStatus = expectedHeaders[0]
+            expectedHeaders = expectedHeaders[1:]
+
+            headers, rest = responses.split(b'\r\n\r\n', 1)
+            headers = headers.splitlines()
+            status = headers.pop(0)
+
+            self.assertEqual(expectedStatus, status)
+            self.assertEqual(set(headers), set(expectedHeaders))
+            content = rest[:len(expectedContent)]
+            responses = rest[len(expectedContent):]
+            self.assertEqual(content, expectedContent)
+
+
+
+class HTTP1_0TestCase(unittest.TestCase, ResponseTestMixin):
+    requests = (
+        b"GET / HTTP/1.0\r\n"
+        b"\r\n"
+        b"GET / HTTP/1.1\r\n"
+        b"Accept: text/html\r\n"
+        b"\r\n")
+
+    expected_response = [
+        (b"HTTP/1.0 200 OK",
+         b"Request: /",
+         b"Command: GET",
+         b"Version: HTTP/1.0",
+         b"Content-Length: 13",
+         b"'''\nNone\n'''\n")]
+
+    def test_buffer(self):
+        """
+        Send requests over a channel and check responses match what is expected.
+        """
+        b = StringTransport()
+        a = http.HTTPChannel()
+        a.requestFactory = DummyHTTPHandler
+        a.makeConnection(b)
+        # one byte at a time, to stress it.
+        for byte in iterbytes(self.requests):
+            a.dataReceived(byte)
+        a.connectionLost(IOError("all one"))
+        value = b.value()
+        self.assertResponseEquals(value, self.expected_response)
+
+
+    def test_requestBodyTimeout(self):
+        """
+        L{HTTPChannel} resets its timeout whenever data from a request body is
+        delivered to it.
+        """
+        clock = Clock()
+        transport = StringTransport()
+        protocol = http.HTTPChannel()
+        protocol.timeOut = 100
+        protocol.callLater = clock.callLater
+        protocol.makeConnection(transport)
+        protocol.dataReceived(b'POST / HTTP/1.0\r\nContent-Length: 2\r\n\r\n')
+        clock.advance(99)
+        self.assertFalse(transport.disconnecting)
+        protocol.dataReceived(b'x')
+        clock.advance(99)
+        self.assertFalse(transport.disconnecting)
+        protocol.dataReceived(b'x')
+        self.assertEqual(len(protocol.requests), 1)
+
+
+
+class HTTP1_1TestCase(HTTP1_0TestCase):
+
+    requests = (
+        b"GET / HTTP/1.1\r\n"
+        b"Accept: text/html\r\n"
+        b"\r\n"
+        b"POST / HTTP/1.1\r\n"
+        b"Content-Length: 10\r\n"
+        b"\r\n"
+        b"0123456789POST / HTTP/1.1\r\n"
+        b"Content-Length: 10\r\n"
+        b"\r\n"
+        b"0123456789HEAD / HTTP/1.1\r\n"
+        b"\r\n")
+
+    expected_response = [
+        (b"HTTP/1.1 200 OK",
+         b"Request: /",
+         b"Command: GET",
+         b"Version: HTTP/1.1",
+         b"Content-Length: 13",
+         b"'''\nNone\n'''\n"),
+        (b"HTTP/1.1 200 OK",
+         b"Request: /",
+         b"Command: POST",
+         b"Version: HTTP/1.1",
+         b"Content-Length: 21",
+         b"'''\n10\n0123456789'''\n"),
+        (b"HTTP/1.1 200 OK",
+         b"Request: /",
+         b"Command: POST",
+         b"Version: HTTP/1.1",
+         b"Content-Length: 21",
+         b"'''\n10\n0123456789'''\n"),
+        (b"HTTP/1.1 200 OK",
+         b"Request: /",
+         b"Command: HEAD",
+         b"Version: HTTP/1.1",
+         b"Content-Length: 13",
+         b"")]
+
+
+
+class HTTP1_1_close_TestCase(HTTP1_0TestCase):
+
+    requests = (
+        b"GET / HTTP/1.1\r\n"
+        b"Accept: text/html\r\n"
+        b"Connection: close\r\n"
+        b"\r\n"
+        b"GET / HTTP/1.0\r\n"
+        b"\r\n")
+
+    expected_response = [
+        (b"HTTP/1.1 200 OK",
+         b"Connection: close",
+         b"Request: /",
+         b"Command: GET",
+         b"Version: HTTP/1.1",
+         b"Content-Length: 13",
+         b"'''\nNone\n'''\n")]
+
+
+
+class HTTP0_9TestCase(HTTP1_0TestCase):
+
+    requests = (
+        b"GET /\r\n")
+
+    expected_response = b"HTTP/1.1 400 Bad Request\r\n\r\n"
+
+
+    def assertResponseEquals(self, response, expectedResponse):
+        self.assertEqual(response, expectedResponse)
+
+
+class HTTPLoopbackTestCase(unittest.TestCase):
+
+    expectedHeaders = {b'request': b'/foo/bar',
+                       b'command': b'GET',
+                       b'version': b'HTTP/1.0',
+                       b'content-length': b'21'}
+    numHeaders = 0
+    gotStatus = 0
+    gotResponse = 0
+    gotEndHeaders = 0
+
+    def _handleStatus(self, version, status, message):
+        self.gotStatus = 1
+        self.assertEqual(version, b"HTTP/1.0")
+        self.assertEqual(status, b"200")
+
+    def _handleResponse(self, data):
+        self.gotResponse = 1
+        self.assertEqual(data, b"'''\n10\n0123456789'''\n")
+
+    def _handleHeader(self, key, value):
+        self.numHeaders = self.numHeaders + 1
+        self.assertEqual(self.expectedHeaders[key.lower()], value)
+
+    def _handleEndHeaders(self):
+        self.gotEndHeaders = 1
+        self.assertEqual(self.numHeaders, 4)
+
+    def testLoopback(self):
+        server = http.HTTPChannel()
+        server.requestFactory = DummyHTTPHandler
+        client = LoopbackHTTPClient()
+        client.handleResponse = self._handleResponse
+        client.handleHeader = self._handleHeader
+        client.handleEndHeaders = self._handleEndHeaders
+        client.handleStatus = self._handleStatus
+        d = loopback.loopbackAsync(server, client)
+        d.addCallback(self._cbTestLoopback)
+        return d
+
+    def _cbTestLoopback(self, ignored):
+        if not (self.gotStatus and self.gotResponse and self.gotEndHeaders):
+            raise RuntimeError(
+                "didn't got all callbacks %s"
+                % [self.gotStatus, self.gotResponse, self.gotEndHeaders])
+        del self.gotEndHeaders
+        del self.gotResponse
+        del self.gotStatus
+        del self.numHeaders
+
+
+
+def _prequest(**headers):
+    """
+    Make a request with the given request headers for the persistence tests.
+    """
+    request = http.Request(DummyChannel(), None)
+    for k, v in headers.items():
+        request.requestHeaders.setRawHeaders(networkString(k), v)
+    return request
+
+
+class PersistenceTestCase(unittest.TestCase):
+    """
+    Tests for persistent HTTP connections.
+    """
+
+    ptests = [
+        # (_prequest(connection=[b"Keep-Alive"]), b"HTTP/1.0", 1, {b'connection' : [b'Keep-Alive']}),
+        (_prequest(), b"HTTP/1.0", 0, {b'connection': None}),
+        (_prequest(connection=[b"close"]), b"HTTP/1.1", 0,
+         {b'connection' : [b'close']}),
+        (_prequest(), b"HTTP/1.1", 1, {b'connection': None}),
+        (_prequest(), b"HTTP/0.9", 0, {b'connection': None}),
+        ]
+
+
+    def testAlgorithm(self):
+        c = http.HTTPChannel()
+        for req, version, correctResult, resultHeaders in self.ptests:
+            result = c.checkPersistence(req, version)
+            self.assertEqual(result, correctResult)
+            for header in resultHeaders:
+                self.assertEqual(
+                    req.responseHeaders.getRawHeaders(header, None),
+                    resultHeaders[header])
+
+
+
+class IdentityTransferEncodingTests(TestCase):
+    """
+    Tests for L{_IdentityTransferDecoder}.
+    """
+    def setUp(self):
+        """
+        Create an L{_IdentityTransferDecoder} with callbacks hooked up so that
+        calls to them can be inspected.
+        """
+        self.data = []
+        self.finish = []
+        self.contentLength = 10
+        self.decoder = _IdentityTransferDecoder(
+            self.contentLength, self.data.append, self.finish.append)
+
+
+    def test_exactAmountReceived(self):
+        """
+        If L{_IdentityTransferDecoder.dataReceived} is called with a byte string
+        with length equal to the content length passed to
+        L{_IdentityTransferDecoder}'s initializer, the data callback is invoked
+        with that string and the finish callback is invoked with a zero-length
+        string.
+        """
+        self.decoder.dataReceived(b'x' * self.contentLength)
+        self.assertEqual(self.data, [b'x' * self.contentLength])
+        self.assertEqual(self.finish, [b''])
+
+
+    def test_shortStrings(self):
+        """
+        If L{_IdentityTransferDecoder.dataReceived} is called multiple times
+        with byte strings which, when concatenated, are as long as the content
+        length provided, the data callback is invoked with each string and the
+        finish callback is invoked only after the second call.
+        """
+        self.decoder.dataReceived(b'x')
+        self.assertEqual(self.data, [b'x'])
+        self.assertEqual(self.finish, [])
+        self.decoder.dataReceived(b'y' * (self.contentLength - 1))
+        self.assertEqual(self.data, [b'x', b'y' * (self.contentLength - 1)])
+        self.assertEqual(self.finish, [b''])
+
+
+    def test_longString(self):
+        """
+        If L{_IdentityTransferDecoder.dataReceived} is called with a byte string
+        with length greater than the provided content length, only the prefix
+        of that string up to the content length is passed to the data callback
+        and the remainder is passed to the finish callback.
+        """
+        self.decoder.dataReceived(b'x' * self.contentLength + b'y')
+        self.assertEqual(self.data, [b'x' * self.contentLength])
+        self.assertEqual(self.finish, [b'y'])
+
+
+    def test_rejectDataAfterFinished(self):
+        """
+        If data is passed to L{_IdentityTransferDecoder.dataReceived} after the
+        finish callback has been invoked, C{RuntimeError} is raised.
+        """
+        failures = []
+        def finish(bytes):
+            try:
+                decoder.dataReceived(b'foo')
+            except:
+                failures.append(Failure())
+        decoder = _IdentityTransferDecoder(5, self.data.append, finish)
+        decoder.dataReceived(b'x' * 4)
+        self.assertEqual(failures, [])
+        decoder.dataReceived(b'y')
+        failures[0].trap(RuntimeError)
+        self.assertEqual(
+            str(failures[0].value),
+            "_IdentityTransferDecoder cannot decode data after finishing")
+
+
+    def test_unknownContentLength(self):
+        """
+        If L{_IdentityTransferDecoder} is constructed with C{None} for the
+        content length, it passes all data delivered to it through to the data
+        callback.
+        """
+        data = []
+        finish = []
+        decoder = _IdentityTransferDecoder(None, data.append, finish.append)
+        decoder.dataReceived(b'x')
+        self.assertEqual(data, [b'x'])
+        decoder.dataReceived(b'y')
+        self.assertEqual(data, [b'x', b'y'])
+        self.assertEqual(finish, [])
+
+
+    def _verifyCallbacksUnreferenced(self, decoder):
+        """
+        Check the decoder's data and finish callbacks and make sure they are
+        None in order to help avoid references cycles.
+        """
+        self.assertIdentical(decoder.dataCallback, None)
+        self.assertIdentical(decoder.finishCallback, None)
+
+
+    def test_earlyConnectionLose(self):
+        """
+        L{_IdentityTransferDecoder.noMoreData} raises L{_DataLoss} if it is
+        called and the content length is known but not enough bytes have been
+        delivered.
+        """
+        self.decoder.dataReceived(b'x' * (self.contentLength - 1))
+        self.assertRaises(_DataLoss, self.decoder.noMoreData)
+        self._verifyCallbacksUnreferenced(self.decoder)
+
+
+    def test_unknownContentLengthConnectionLose(self):
+        """
+        L{_IdentityTransferDecoder.noMoreData} calls the finish callback and
+        raises L{PotentialDataLoss} if it is called and the content length is
+        unknown.
+        """
+        body = []
+        finished = []
+        decoder = _IdentityTransferDecoder(None, body.append, finished.append)
+        self.assertRaises(PotentialDataLoss, decoder.noMoreData)
+        self.assertEqual(body, [])
+        self.assertEqual(finished, [b''])
+        self._verifyCallbacksUnreferenced(decoder)
+
+
+    def test_finishedConnectionLose(self):
+        """
+        L{_IdentityTransferDecoder.noMoreData} does not raise any exception if
+        it is called when the content length is known and that many bytes have
+        been delivered.
+        """
+        self.decoder.dataReceived(b'x' * self.contentLength)
+        self.decoder.noMoreData()
+        self._verifyCallbacksUnreferenced(self.decoder)
+
+
+
+class ChunkedTransferEncodingTests(unittest.TestCase):
+    """
+    Tests for L{_ChunkedTransferDecoder}, which turns a byte stream encoded
+    using HTTP I{chunked} C{Transfer-Encoding} back into the original byte
+    stream.
+    """
+    def test_decoding(self):
+        """
+        L{_ChunkedTransferDecoder.dataReceived} decodes chunked-encoded data
+        and passes the result to the specified callback.
+        """
+        L = []
+        p = http._ChunkedTransferDecoder(L.append, None)
+        p.dataReceived(b'3\r\nabc\r\n5\r\n12345\r\n')
+        p.dataReceived(b'a\r\n0123456789\r\n')
+        self.assertEqual(L, [b'abc', b'12345', b'0123456789'])
+
+
+    def test_short(self):
+        """
+        L{_ChunkedTransferDecoder.dataReceived} decodes chunks broken up and
+        delivered in multiple calls.
+        """
+        L = []
+        finished = []
+        p = http._ChunkedTransferDecoder(L.append, finished.append)
+        for s in iterbytes(b'3\r\nabc\r\n5\r\n12345\r\n0\r\n\r\n'):
+            p.dataReceived(s)
+        self.assertEqual(L, [b'a', b'b', b'c', b'1', b'2', b'3', b'4', b'5'])
+        self.assertEqual(finished, [b''])
+
+
+    def test_newlines(self):
+        """
+        L{_ChunkedTransferDecoder.dataReceived} doesn't treat CR LF pairs
+        embedded in chunk bodies specially.
+        """
+        L = []
+        p = http._ChunkedTransferDecoder(L.append, None)
+        p.dataReceived(b'2\r\n\r\n\r\n')
+        self.assertEqual(L, [b'\r\n'])
+
+
+    def test_extensions(self):
+        """
+        L{_ChunkedTransferDecoder.dataReceived} disregards chunk-extension
+        fields.
+        """
+        L = []
+        p = http._ChunkedTransferDecoder(L.append, None)
+        p.dataReceived(b'3; x-foo=bar\r\nabc\r\n')
+        self.assertEqual(L, [b'abc'])
+
+
+    def test_finish(self):
+        """
+        L{_ChunkedTransferDecoder.dataReceived} interprets a zero-length
+        chunk as the end of the chunked data stream and calls the completion
+        callback.
+        """
+        finished = []
+        p = http._ChunkedTransferDecoder(None, finished.append)
+        p.dataReceived(b'0\r\n\r\n')
+        self.assertEqual(finished, [b''])
+
+
+    def test_extra(self):
+        """
+        L{_ChunkedTransferDecoder.dataReceived} passes any bytes which come
+        after the terminating zero-length chunk to the completion callback.
+        """
+        finished = []
+        p = http._ChunkedTransferDecoder(None, finished.append)
+        p.dataReceived(b'0\r\n\r\nhello')
+        self.assertEqual(finished, [b'hello'])
+
+
+    def test_afterFinished(self):
+        """
+        L{_ChunkedTransferDecoder.dataReceived} raises C{RuntimeError} if it
+        is called after it has seen the last chunk.
+        """
+        p = http._ChunkedTransferDecoder(None, lambda bytes: None)
+        p.dataReceived(b'0\r\n\r\n')
+        self.assertRaises(RuntimeError, p.dataReceived, b'hello')
+
+
+    def test_earlyConnectionLose(self):
+        """
+        L{_ChunkedTransferDecoder.noMoreData} raises L{_DataLoss} if it is
+        called and the end of the last trailer has not yet been received.
+        """
+        parser = http._ChunkedTransferDecoder(None, lambda bytes: None)
+        parser.dataReceived(b'0\r\n\r')
+        exc = self.assertRaises(_DataLoss, parser.noMoreData)
+        self.assertEqual(
+            str(exc),
+            "Chunked decoder in 'TRAILER' state, still expecting more data "
+            "to get to 'FINISHED' state.")
+
+
+    def test_finishedConnectionLose(self):
+        """
+        L{_ChunkedTransferDecoder.noMoreData} does not raise any exception if
+        it is called after the terminal zero length chunk is received.
+        """
+        parser = http._ChunkedTransferDecoder(None, lambda bytes: None)
+        parser.dataReceived(b'0\r\n\r\n')
+        parser.noMoreData()
+
+
+    def test_reentrantFinishedNoMoreData(self):
+        """
+        L{_ChunkedTransferDecoder.noMoreData} can be called from the finished
+        callback without raising an exception.
+        """
+        errors = []
+        successes = []
+        def finished(extra):
+            try:
+                parser.noMoreData()
+            except:
+                errors.append(Failure())
+            else:
+                successes.append(True)
+        parser = http._ChunkedTransferDecoder(None, finished)
+        parser.dataReceived(b'0\r\n\r\n')
+        self.assertEqual(errors, [])
+        self.assertEqual(successes, [True])
+
+
+
+class ChunkingTestCase(unittest.TestCase):
+
+    strings = [b"abcv", b"", b"fdfsd423", b"Ffasfas\r\n",
+               b"523523\n\rfsdf", b"4234"]
+
+    def testChunks(self):
+        for s in self.strings:
+            chunked = b''.join(http.toChunk(s))
+            self.assertEqual((s, b''), http.fromChunk(chunked))
+        self.assertRaises(ValueError, http.fromChunk, b'-5\r\nmalformed!\r\n')
+
+    def testConcatenatedChunks(self):
+        chunked = b''.join([b''.join(http.toChunk(t)) for t in self.strings])
+        result = []
+        buffer = b""
+        for c in iterbytes(chunked):
+            buffer = buffer + c
+            try:
+                data, buffer = http.fromChunk(buffer)
+                result.append(data)
+            except ValueError:
+                pass
+        self.assertEqual(result, self.strings)
+
+
+
+class ParsingTestCase(unittest.TestCase):
+    """
+    Tests for protocol parsing in L{HTTPChannel}.
+    """
+    def setUp(self):
+        self.didRequest = False
+
+
+    def runRequest(self, httpRequest, requestClass, success=1):
+        httpRequest = httpRequest.replace(b"\n", b"\r\n")
+        b = StringTransport()
+        a = http.HTTPChannel()
+        a.requestFactory = requestClass
+        a.makeConnection(b)
+        # one byte at a time, to stress it.
+        for byte in iterbytes(httpRequest):
+            if a.transport.disconnecting:
+                break
+            a.dataReceived(byte)
+        a.connectionLost(IOError("all done"))
+        if success:
+            self.assertTrue(self.didRequest)
+        else:
+            self.assertFalse(self.didRequest)
+        return a
+
+
+    def test_basicAuth(self):
+        """
+        L{HTTPChannel} provides username and password information supplied in
+        an I{Authorization} header to the L{Request} which makes it available
+        via its C{getUser} and C{getPassword} methods.
+        """
+        requests = []
+        class Request(http.Request):
+            def process(self):
+                self.credentials = (self.getUser(), self.getPassword())
+                requests.append(self)
+
+        for u, p in [(b"foo", b"bar"), (b"hello", b"there:z")]:
+            s = base64.encodestring(b":".join((u, p))).strip()
+            f = b"GET / HTTP/1.0\nAuthorization: Basic " + s + b"\n\n"
+            self.runRequest(f, Request, 0)
+            req = requests.pop()
+            self.assertEqual((u, p), req.credentials)
+
+
+    def test_headers(self):
+        """
+        Headers received by L{HTTPChannel} in a request are made available to
+        the L{Request}.
+        """
+        processed = []
+        class MyRequest(http.Request):
+            def process(self):
+                processed.append(self)
+                self.finish()
+
+        requestLines = [
+            b"GET / HTTP/1.0",
+            b"Foo: bar",
+            b"baz: Quux",
+            b"baz: quux",
+            b"",
+            b""]
+
+        self.runRequest(b'\n'.join(requestLines), MyRequest, 0)
+        [request] = processed
+        self.assertEqual(
+            request.requestHeaders.getRawHeaders(b'foo'), [b'bar'])
+        self.assertEqual(
+            request.requestHeaders.getRawHeaders(b'bAz'), [b'Quux', b'quux'])
+
+
+    def test_tooManyHeaders(self):
+        """
+        L{HTTPChannel} enforces a limit of C{HTTPChannel.maxHeaders} on the
+        number of headers received per request.
+        """
+        processed = []
+        class MyRequest(http.Request):
+            def process(self):
+                processed.append(self)
+
+        requestLines = [b"GET / HTTP/1.0"]
+        for i in range(http.HTTPChannel.maxHeaders + 2):
+            requestLines.append(networkString("%s: foo" % (i,)))
+        requestLines.extend([b"", b""])
+
+        channel = self.runRequest(b"\n".join(requestLines), MyRequest, 0)
+        self.assertEqual(processed, [])
+        self.assertEqual(
+            channel.transport.value(),
+            b"HTTP/1.1 400 Bad Request\r\n\r\n")
+
+
+    def test_invalidHeaders(self):
+        """
+        If a Content-Length header with a non-integer value is received, a 400
+        (Bad Request) response is sent to the client and the connection is
+        closed.
+        """
+        requestLines = [b"GET / HTTP/1.0", b"Content-Length: x", b"", b""]
+        channel = self.runRequest(b"\n".join(requestLines), http.Request, 0)
+        self.assertEqual(
+            channel.transport.value(),
+            b"HTTP/1.1 400 Bad Request\r\n\r\n")
+        self.assertTrue(channel.transport.disconnecting)
+
+
+    def test_headerLimitPerRequest(self):
+        """
+        L{HTTPChannel} enforces the limit of C{HTTPChannel.maxHeaders} per
+        request so that headers received in an earlier request do not count
+        towards the limit when processing a later request.
+        """
+        processed = []
+        class MyRequest(http.Request):
+            def process(self):
+                processed.append(self)
+                self.finish()
+
+        self.patch(http.HTTPChannel, 'maxHeaders', 1)
+        requestLines = [
+            b"GET / HTTP/1.1",
+            b"Foo: bar",
+            b"",
+            b"",
+            b"GET / HTTP/1.1",
+            b"Bar: baz",
+            b"",
+            b""]
+
+        channel = self.runRequest(b"\n".join(requestLines), MyRequest, 0)
+        [first, second] = processed
+        self.assertEqual(first.getHeader(b'foo'), b'bar')
+        self.assertEqual(second.getHeader(b'bar'), b'baz')
+        self.assertEqual(
+            channel.transport.value(),
+            b'HTTP/1.1 200 OK\r\n'
+            b'Transfer-Encoding: chunked\r\n'
+            b'\r\n'
+            b'0\r\n'
+            b'\r\n'
+            b'HTTP/1.1 200 OK\r\n'
+            b'Transfer-Encoding: chunked\r\n'
+            b'\r\n'
+            b'0\r\n'
+            b'\r\n')
+
+
+    def testCookies(self):
+        """
+        Test cookies parsing and reading.
+        """
+        httpRequest = b'''\
+GET / HTTP/1.0
+Cookie: rabbit="eat carrot"; ninja=secret; spam="hey 1=1!"
+
+'''
+        cookies = {}
+        testcase = self
+        class MyRequest(http.Request):
+            def process(self):
+                for name in [b'rabbit', b'ninja', b'spam']:
+                    cookies[name] = self.getCookie(name)
+                testcase.didRequest = True
+                self.finish()
+
+        self.runRequest(httpRequest, MyRequest)
+
+        self.assertEqual(
+            cookies, {
+                b'rabbit': b'"eat carrot"',
+                b'ninja': b'secret',
+                b'spam': b'"hey 1=1!"'})
+
+
+    def testGET(self):
+        httpRequest = b'''\
+GET /?key=value&multiple=two+words&multiple=more%20words&empty= HTTP/1.0
+
+'''
+        method = []
+        args = []
+        testcase = self
+        class MyRequest(http.Request):
+            def process(self):
+                method.append(self.method)
+                args.extend([
+                        self.args[b"key"],
+                        self.args[b"empty"],
+                        self.args[b"multiple"]])
+                testcase.didRequest = True
+                self.finish()
+
+        self.runRequest(httpRequest, MyRequest)
+        self.assertEqual(method, [b"GET"])
+        self.assertEqual(
+            args, [[b"value"], [b""], [b"two words", b"more words"]])
+
+
+    def test_extraQuestionMark(self):
+        """
+        While only a single '?' is allowed in an URL, several other servers
+        allow several and pass all after the first through as part of the
+        query arguments.  Test that we emulate this behavior.
+        """
+        httpRequest = b'GET /foo?bar=?&baz=quux HTTP/1.0\n\n'
+
+        method = []
+        path = []
+        args = []
+        testcase = self
+        class MyRequest(http.Request):
+            def process(self):
+                method.append(self.method)
+                path.append(self.path)
+                args.extend([self.args[b'bar'], self.args[b'baz']])
+                testcase.didRequest = True
+                self.finish()
+
+        self.runRequest(httpRequest, MyRequest)
+        self.assertEqual(method, [b'GET'])
+        self.assertEqual(path, [b'/foo'])
+        self.assertEqual(args, [[b'?'], [b'quux']])
+
+
+    def test_formPOSTRequest(self):
+        """
+        The request body of a I{POST} request with a I{Content-Type} header
+        of I{application/x-www-form-urlencoded} is parsed according to that
+        content type and made available in the C{args} attribute of the
+        request object.  The original bytes of the request may still be read
+        from the C{content} attribute.
+        """
+        query = 'key=value&multiple=two+words&multiple=more%20words&empty='
+        httpRequest = networkString('''\
+POST / HTTP/1.0
+Content-Length: %d
+Content-Type: application/x-www-form-urlencoded
+
+%s''' % (len(query), query))
+
+        method = []
+        args = []
+        content = []
+        testcase = self
+        class MyRequest(http.Request):
+            def process(self):
+                method.append(self.method)
+                args.extend([
+                        self.args[b'key'], self.args[b'empty'],
+                        self.args[b'multiple']])
+                content.append(self.content.read())
+                testcase.didRequest = True
+                self.finish()
+
+        self.runRequest(httpRequest, MyRequest)
+        self.assertEqual(method, [b"POST"])
+        self.assertEqual(
+            args, [[b"value"], [b""], [b"two words", b"more words"]])
+        # Reading from the content file-like must produce the entire request
+        # body.
+        self.assertEqual(content, [networkString(query)])
+
+
+    def testMissingContentDisposition(self):
+        req = b'''\
+POST / HTTP/1.0
+Content-Type: multipart/form-data; boundary=AaB03x
+Content-Length: 103
+
+--AaB03x
+Content-Type: text/plain
+Content-Transfer-Encoding: quoted-printable
+
+abasdfg
+--AaB03x--
+'''
+        self.runRequest(req, http.Request, success=False)
+    if _PY3:
+        testMissingContentDisposition.skip = (
+            "Cannot parse multipart/form-data on Python 3.  "
+            "See http://bugs.python.org/issue12411 and #5511.")
+
+
+    def test_chunkedEncoding(self):
+        """
+        If a request uses the I{chunked} transfer encoding, the request body is
+        decoded accordingly before it is made available on the request.
+        """
+        httpRequest = b'''\
+GET / HTTP/1.0
+Content-Type: text/plain
+Transfer-Encoding: chunked
+
+6
+Hello,
+14
+ spam,eggs spam spam
+0
+
+'''
+        path = []
+        method = []
+        content = []
+        decoder = []
+        testcase = self
+        class MyRequest(http.Request):
+            def process(self):
+                content.append(self.content.fileno())
+                content.append(self.content.read())
+                method.append(self.method)
+                path.append(self.path)
+                decoder.append(self.channel._transferDecoder)
+                testcase.didRequest = True
+                self.finish()
+
+        self.runRequest(httpRequest, MyRequest)
+        # The tempfile API used to create content returns an
+        # instance of a different type depending on what platform
+        # we're running on.  The point here is to verify that the
+        # request body is in a file that's on the filesystem.
+        # Having a fileno method that returns an int is a somewhat
+        # close approximation of this. -exarkun
+        self.assertIsInstance(content[0], int)
+        self.assertEqual(content[1], b'Hello, spam,eggs spam spam')
+        self.assertEqual(method, [b'GET'])
+        self.assertEqual(path, [b'/'])
+        self.assertEqual(decoder, [None])
+
+
+    def test_malformedChunkedEncoding(self):
+        """
+        If a request uses the I{chunked} transfer encoding, but provides an
+        invalid chunk length value, the request fails with a 400 error.
+        """
+        # See test_chunkedEncoding for the correct form of this request.
+        httpRequest = b'''\
+GET / HTTP/1.1
+Content-Type: text/plain
+Transfer-Encoding: chunked
+
+MALFORMED_LINE_THIS_SHOULD_BE_'6'
+Hello,
+14
+ spam,eggs spam spam
+0
+
+'''
+        didRequest = []
+
+        class MyRequest(http.Request):
+
+            def process(self):
+                # This request should fail, so this should never be called.
+                didRequest.append(True)
+
+        channel = self.runRequest(httpRequest, MyRequest, success=False)
+        self.assertFalse(didRequest, "Request.process called")
+        self.assertEqual(
+            channel.transport.value(),
+            b"HTTP/1.1 400 Bad Request\r\n\r\n")
+        self.assertTrue(channel.transport.disconnecting)
+
+
+
+class QueryArgumentsTestCase(unittest.TestCase):
+    def testParseqs(self):
+        self.assertEqual(
+            cgi.parse_qs(b"a=b&d=c;+=f"),
+            http.parse_qs(b"a=b&d=c;+=f"))
+        self.assertRaises(
+            ValueError, http.parse_qs, b"blah", strict_parsing=True)
+        self.assertEqual(
+            cgi.parse_qs(b"a=&b=c", keep_blank_values=1),
+            http.parse_qs(b"a=&b=c", keep_blank_values=1))
+        self.assertEqual(
+            cgi.parse_qs(b"a=&b=c"),
+            http.parse_qs(b"a=&b=c"))
+
+
+    def test_urlparse(self):
+        """
+        For a given URL, L{http.urlparse} should behave the same as L{urlparse},
+        except it should always return C{bytes}, never text.
+        """
+        def urls():
+            for scheme in (b'http', b'https'):
+                for host in (b'example.com',):
+                    for port in (None, 100):
+                        for path in (b'', b'path'):
+                            if port is not None:
+                                host = host + b':' + networkString(str(port))
+                                yield urlunsplit((scheme, host, path, b'', b''))
+
+
+        def assertSameParsing(url, decode):
+            """
+            Verify that C{url} is parsed into the same objects by both
+            L{http.urlparse} and L{urlparse}.
+            """
+            urlToStandardImplementation = url
+            if decode:
+                urlToStandardImplementation = url.decode('ascii')
+
+            # stdlib urlparse will give back whatever type we give it.  To be
+            # able to compare the values meaningfully, if it gives back unicode,
+            # convert all the values to bytes.
+            standardResult = urlparse(urlToStandardImplementation)
+            if isinstance(standardResult.scheme, unicode):
+                # The choice of encoding is basically irrelevant.  The values
+                # are all in ASCII.  UTF-8 is, of course, the correct choice.
+                expected = (standardResult.scheme.encode('utf-8'),
+                            standardResult.netloc.encode('utf-8'),
+                            standardResult.path.encode('utf-8'),
+                            standardResult.params.encode('utf-8'),
+                            standardResult.query.encode('utf-8'),
+                            standardResult.fragment.encode('utf-8'))
+            else:
+                expected = (standardResult.scheme,
+                            standardResult.netloc,
+                            standardResult.path,
+                            standardResult.params,
+                            standardResult.query,
+                            standardResult.fragment)
+
+            scheme, netloc, path, params, query, fragment = http.urlparse(url)
+            self.assertEqual(
+                (scheme, netloc, path, params, query, fragment), expected)
+            self.assertIsInstance(scheme, bytes)
+            self.assertIsInstance(netloc, bytes)
+            self.assertIsInstance(path, bytes)
+            self.assertIsInstance(params, bytes)
+            self.assertIsInstance(query, bytes)
+            self.assertIsInstance(fragment, bytes)
+
+        # With caching, unicode then str
+        clear_cache()
+        for url in urls():
+            assertSameParsing(url, True)
+            assertSameParsing(url, False)
+
+        # With caching, str then unicode
+        clear_cache()
+        for url in urls():
+            assertSameParsing(url, False)
+            assertSameParsing(url, True)
+
+        # Without caching
+        for url in urls():
+            clear_cache()
+            assertSameParsing(url, True)
+            clear_cache()
+            assertSameParsing(url, False)
+
+
+    def test_urlparseRejectsUnicode(self):
+        """
+        L{http.urlparse} should reject unicode input early.
+        """
+        self.assertRaises(TypeError, http.urlparse, u'http://example.org/path')
+
+
+
+class ClientDriver(http.HTTPClient):
+    def handleStatus(self, version, status, message):
+        self.version = version
+        self.status = status
+        self.message = message
+
+class ClientStatusParsing(unittest.TestCase):
+    def testBaseline(self):
+        c = ClientDriver()
+        c.lineReceived(b'HTTP/1.0 201 foo')
+        self.assertEqual(c.version, b'HTTP/1.0')
+        self.assertEqual(c.status, b'201')
+        self.assertEqual(c.message, b'foo')
+
+    def testNoMessage(self):
+        c = ClientDriver()
+        c.lineReceived(b'HTTP/1.0 201')
+        self.assertEqual(c.version, b'HTTP/1.0')
+        self.assertEqual(c.status, b'201')
+        self.assertEqual(c.message, b'')
+
+    def testNoMessage_trailingSpace(self):
+        c = ClientDriver()
+        c.lineReceived(b'HTTP/1.0 201 ')
+        self.assertEqual(c.version, b'HTTP/1.0')
+        self.assertEqual(c.status, b'201')
+        self.assertEqual(c.message, b'')
+
+
+
+class RequestTests(unittest.TestCase, ResponseTestMixin):
+    """
+    Tests for L{http.Request}
+    """
+    def _compatHeadersTest(self, oldName, newName):
+        """
+        Verify that each of two different attributes which are associated with
+        the same state properly reflect changes made through the other.
+
+        This is used to test that the C{headers}/C{responseHeaders} and
+        C{received_headers}/C{requestHeaders} pairs interact properly.
+        """
+        req = http.Request(DummyChannel(), None)
+        getattr(req, newName).setRawHeaders(b"test", [b"lemur"])
+        self.assertEqual(getattr(req, oldName)[b"test"], b"lemur")
+        setattr(req, oldName, {b"foo": b"bar"})
+        self.assertEqual(
+            list(getattr(req, newName).getAllRawHeaders()),
+            [(b"Foo", [b"bar"])])
+        setattr(req, newName, http_headers.Headers())
+        self.assertEqual(getattr(req, oldName), {})
+
+
+    def test_received_headers(self):
+        """
+        L{Request.received_headers} is a backwards compatible API which
+        accesses and allows mutation of the state at L{Request.requestHeaders}.
+        """
+        self._compatHeadersTest('received_headers', 'requestHeaders')
+
+
+    def test_headers(self):
+        """
+        L{Request.headers} is a backwards compatible API which accesses and
+        allows mutation of the state at L{Request.responseHeaders}.
+        """
+        self._compatHeadersTest('headers', 'responseHeaders')
+
+
+    def test_getHeader(self):
+        """
+        L{http.Request.getHeader} returns the value of the named request
+        header.
+        """
+        req = http.Request(DummyChannel(), None)
+        req.requestHeaders.setRawHeaders(b"test", [b"lemur"])
+        self.assertEqual(req.getHeader(b"test"), b"lemur")
+
+
+    def test_getHeaderReceivedMultiples(self):
+        """
+        When there are multiple values for a single request header,
+        L{http.Request.getHeader} returns the last value.
+        """
+        req = http.Request(DummyChannel(), None)
+        req.requestHeaders.setRawHeaders(b"test", [b"lemur", b"panda"])
+        self.assertEqual(req.getHeader(b"test"), b"panda")
+
+
+    def test_getHeaderNotFound(self):
+        """
+        L{http.Request.getHeader} returns C{None} when asked for the value of a
+        request header which is not present.
+        """
+        req = http.Request(DummyChannel(), None)
+        self.assertEqual(req.getHeader(b"test"), None)
+
+
+    def test_getAllHeaders(self):
+        """
+        L{http.Request.getAllheaders} returns a C{dict} mapping all request
+        header names to their corresponding values.
+        """
+        req = http.Request(DummyChannel(), None)
+        req.requestHeaders.setRawHeaders(b"test", [b"lemur"])
+        self.assertEqual(req.getAllHeaders(), {b"test": b"lemur"})
+
+
+    def test_getAllHeadersNoHeaders(self):
+        """
+        L{http.Request.getAllHeaders} returns an empty C{dict} if there are no
+        request headers.
+        """
+        req = http.Request(DummyChannel(), None)
+        self.assertEqual(req.getAllHeaders(), {})
+
+
+    def test_getAllHeadersMultipleHeaders(self):
+        """
+        When there are multiple values for a single request header,
+        L{http.Request.getAllHeaders} returns only the last value.
+        """
+        req = http.Request(DummyChannel(), None)
+        req.requestHeaders.setRawHeaders(b"test", [b"lemur", b"panda"])
+        self.assertEqual(req.getAllHeaders(), {b"test": b"panda"})
+
+
+    def test_setResponseCode(self):
+        """
+        L{http.Request.setResponseCode} takes a status code and causes it to be
+        used as the response status.
+        """
+        channel = DummyChannel()
+        req = http.Request(channel, None)
+        req.setResponseCode(201)
+        req.write(b'')
+        self.assertEqual(
+            channel.transport.written.getvalue().splitlines()[0],
+            b"(no clientproto yet) 201 Created")
+
+
+    def test_setResponseCodeAndMessage(self):
+        """
+        L{http.Request.setResponseCode} takes a status code and a message and
+        causes them to be used as the response status.
+        """
+        channel = DummyChannel()
+        req = http.Request(channel, None)
+        req.setResponseCode(202, "happily accepted")
+        req.write(b'')
+        self.assertEqual(
+            channel.transport.written.getvalue().splitlines()[0],
+            b'(no clientproto yet) 202 happily accepted')
+
+
+    def test_setResponseCodeAcceptsIntegers(self):
+        """
+        L{http.Request.setResponseCode} accepts C{int} for the code parameter
+        and raises L{TypeError} if passed anything else.
+        """
+        req = http.Request(DummyChannel(), None)
+        req.setResponseCode(1)
+        self.assertRaises(TypeError, req.setResponseCode, "1")
+
+
+    def test_setResponseCodeAcceptsLongIntegers(self):
+        """
+        L{http.Request.setResponseCode} accepts C{long} for the code
+        parameter.
+        """
+        req = http.Request(DummyChannel(), None)
+        req.setResponseCode(long(1))
+    if _PY3:
+        test_setResponseCodeAcceptsLongIntegers.skip = (
+            "Python 3 has no separate long integer type.")
+
+
+    def test_setHost(self):
+        """
+        L{http.Request.setHost} sets the value of the host request header.
+        The port should not be added because it is the default.
+        """
+        req = http.Request(DummyChannel(), None)
+        req.setHost(b"example.com", 80)
+        self.assertEqual(
+            req.requestHeaders.getRawHeaders(b"host"), [b"example.com"])
+
+
+    def test_setHostSSL(self):
+        """
+        L{http.Request.setHost} sets the value of the host request header.
+        The port should not be added because it is the default.
+        """
+        d = DummyChannel()
+        d.transport = DummyChannel.SSL()
+        req = http.Request(d, None)
+        req.setHost(b"example.com", 443)
+        self.assertEqual(
+            req.requestHeaders.getRawHeaders(b"host"), [b"example.com"])
+
+
+    def test_setHostNonDefaultPort(self):
+        """
+        L{http.Request.setHost} sets the value of the host request header.
+        The port should be added because it is not the default.
+        """
+        req = http.Request(DummyChannel(), None)
+        req.setHost(b"example.com", 81)
+        self.assertEqual(
+            req.requestHeaders.getRawHeaders(b"host"), [b"example.com:81"])
+
+
+    def test_setHostSSLNonDefaultPort(self):
+        """
+        L{http.Request.setHost} sets the value of the host request header.
+        The port should be added because it is not the default.
+        """
+        d = DummyChannel()
+        d.transport = DummyChannel.SSL()
+        req = http.Request(d, None)
+        req.setHost(b"example.com", 81)
+        self.assertEqual(
+            req.requestHeaders.getRawHeaders(b"host"), [b"example.com:81"])
+
+
+    def test_setHeader(self):
+        """
+        L{http.Request.setHeader} sets the value of the given response header.
+        """
+        req = http.Request(DummyChannel(), None)
+        req.setHeader(b"test", b"lemur")
+        self.assertEqual(req.responseHeaders.getRawHeaders(b"test"), [b"lemur"])
+
+
+    def test_firstWrite(self):
+        """
+        For an HTTP 1.0 request, L{http.Request.write} sends an HTTP 1.0
+        Response-Line and whatever response headers are set.
+        """
+        req = http.Request(DummyChannel(), None)
+        trans = StringTransport()
+
+        req.transport = trans
+
+        req.setResponseCode(200)
+        req.clientproto = b"HTTP/1.0"
+        req.responseHeaders.setRawHeaders(b"test", [b"lemur"])
+        req.write(b'Hello')
+
+        self.assertResponseEquals(
+            trans.value(),
+            [(b"HTTP/1.0 200 OK",
+              b"Test: lemur",
+              b"Hello")])
+
+
+    def test_nonByteHeaderValue(self):
+        """
+        L{http.Request.write} casts non-bytes header value to bytes
+        transparently.
+        """
+        req = http.Request(DummyChannel(), None)
+        trans = StringTransport()
+
+        req.transport = trans
+
+        req.setResponseCode(200)
+        req.clientproto = b"HTTP/1.0"
+        req.responseHeaders.setRawHeaders(b"test", [10])
+        req.write(b'Hello')
+
+        self.assertResponseEquals(
+            trans.value(),
+            [(b"HTTP/1.0 200 OK",
+              b"Test: 10",
+              b"Hello")])
+
+        warnings = self.flushWarnings(
+            offendingFunctions=[self.test_nonByteHeaderValue])
+        self.assertEqual(1, len(warnings))
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            "Passing non-bytes header values is deprecated since "
+            "Twisted 12.3. Pass only bytes instead.")
+
+
+    def test_firstWriteHTTP11Chunked(self):
+        """
+        For an HTTP 1.1 request, L{http.Request.write} sends an HTTP 1.1
+        Response-Line, whatever response headers are set, and uses chunked
+        encoding for the response body.
+        """
+        req = http.Request(DummyChannel(), None)
+        trans = StringTransport()
+
+        req.transport = trans
+
+        req.setResponseCode(200)
+        req.clientproto = b"HTTP/1.1"
+        req.responseHeaders.setRawHeaders(b"test", [b"lemur"])
+        req.write(b'Hello')
+        req.write(b'World!')
+
+        self.assertResponseEquals(
+            trans.value(),
+            [(b"HTTP/1.1 200 OK",
+              b"Test: lemur",
+              b"Transfer-Encoding: chunked",
+              b"5\r\nHello\r\n6\r\nWorld!\r\n")])
+
+
+    def test_firstWriteLastModified(self):
+        """
+        For an HTTP 1.0 request for a resource with a known last modified time,
+        L{http.Request.write} sends an HTTP Response-Line, whatever response
+        headers are set, and a last-modified header with that time.
+        """
+        req = http.Request(DummyChannel(), None)
+        trans = StringTransport()
+
+        req.transport = trans
+
+        req.setResponseCode(200)
+        req.clientproto = b"HTTP/1.0"
+        req.lastModified = 0
+        req.responseHeaders.setRawHeaders(b"test", [b"lemur"])
+        req.write(b'Hello')
+
+        self.assertResponseEquals(
+            trans.value(),
+            [(b"HTTP/1.0 200 OK",
+              b"Test: lemur",
+              b"Last-Modified: Thu, 01 Jan 1970 00:00:00 GMT",
+              b"Hello")])
+
+
+    def test_parseCookies(self):
+        """
+        L{http.Request.parseCookies} extracts cookies from C{requestHeaders}
+        and adds them to C{received_cookies}.
+        """
+        req = http.Request(DummyChannel(), None)
+        req.requestHeaders.setRawHeaders(
+            b"cookie", [b'test="lemur"; test2="panda"'])
+        req.parseCookies()
+        self.assertEqual(
+            req.received_cookies, {b"test": b'"lemur"', b"test2": b'"panda"'})
+
+
+    def test_parseCookiesMultipleHeaders(self):
+        """
+        L{http.Request.parseCookies} can extract cookies from multiple Cookie
+        headers.
+        """
+        req = http.Request(DummyChannel(), None)
+        req.requestHeaders.setRawHeaders(
+            b"cookie", [b'test="lemur"', b'test2="panda"'])
+        req.parseCookies()
+        self.assertEqual(
+            req.received_cookies, {b"test": b'"lemur"', b"test2": b'"panda"'})
+
+
+    def test_connectionLost(self):
+        """
+        L{http.Request.connectionLost} closes L{Request.content} and drops the
+        reference to the L{HTTPChannel} to assist with garbage collection.
+        """
+        req = http.Request(DummyChannel(), None)
+
+        # Cause Request.content to be created at all.
+        req.gotLength(10)
+
+        # Grab a reference to content in case the Request drops it later on.
+        content = req.content
+
+        # Put some bytes into it
+        req.handleContentChunk(b"hello")
+
+        # Then something goes wrong and content should get closed.
+        req.connectionLost(Failure(ConnectionLost("Finished")))
+        self.assertTrue(content.closed)
+        self.assertIdentical(req.channel, None)
+
+
+    def test_registerProducerTwiceFails(self):
+        """
+        Calling L{Request.registerProducer} when a producer is already
+        registered raises ValueError.
+        """
+        req = http.Request(DummyChannel(), None)
+        req.registerProducer(DummyProducer(), True)
+        self.assertRaises(
+            ValueError, req.registerProducer, DummyProducer(), True)
+
+
+    def test_registerProducerWhenQueuedPausesPushProducer(self):
+        """
+        Calling L{Request.registerProducer} with an IPushProducer when the
+        request is queued pauses the producer.
+        """
+        req = http.Request(DummyChannel(), True)
+        producer = DummyProducer()
+        req.registerProducer(producer, True)
+        self.assertEqual(['pause'], producer.events)
+
+
+    def test_registerProducerWhenQueuedDoesntPausePullProducer(self):
+        """
+        Calling L{Request.registerProducer} with an IPullProducer when the
+        request is queued does not pause the producer, because it doesn't make
+        sense to pause a pull producer.
+        """
+        req = http.Request(DummyChannel(), True)
+        producer = DummyProducer()
+        req.registerProducer(producer, False)
+        self.assertEqual([], producer.events)
+
+
+    def test_registerProducerWhenQueuedDoesntRegisterPushProducer(self):
+        """
+        Calling L{Request.registerProducer} with an IPushProducer when the
+        request is queued does not register the producer on the request's
+        transport.
+        """
+        self.assertIdentical(
+            None, getattr(http.StringTransport, 'registerProducer', None),
+            "StringTransport cannot implement registerProducer for this test "
+            "to be valid.")
+        req = http.Request(DummyChannel(), True)
+        producer = DummyProducer()
+        req.registerProducer(producer, True)
+        # This is a roundabout assertion: http.StringTransport doesn't
+        # implement registerProducer, so Request.registerProducer can't have
+        # tried to call registerProducer on the transport.
+        self.assertIsInstance(req.transport, http.StringTransport)
+
+
+    def test_registerProducerWhenQueuedDoesntRegisterPullProducer(self):
+        """
+        Calling L{Request.registerProducer} with an IPullProducer when the
+        request is queued does not register the producer on the request's
+        transport.
+        """
+        self.assertIdentical(
+            None, getattr(http.StringTransport, 'registerProducer', None),
+            "StringTransport cannot implement registerProducer for this test "
+            "to be valid.")
+        req = http.Request(DummyChannel(), True)
+        producer = DummyProducer()
+        req.registerProducer(producer, False)
+        # This is a roundabout assertion: http.StringTransport doesn't
+        # implement registerProducer, so Request.registerProducer can't have
+        # tried to call registerProducer on the transport.
+        self.assertIsInstance(req.transport, http.StringTransport)
+
+
+    def test_registerProducerWhenNotQueuedRegistersPushProducer(self):
+        """
+        Calling L{Request.registerProducer} with an IPushProducer when the
+        request is not queued registers the producer as a push producer on the
+        request's transport.
+        """
+        req = http.Request(DummyChannel(), False)
+        producer = DummyProducer()
+        req.registerProducer(producer, True)
+        self.assertEqual([(producer, True)], req.transport.producers)
+
+
+    def test_registerProducerWhenNotQueuedRegistersPullProducer(self):
+        """
+        Calling L{Request.registerProducer} with an IPullProducer when the
+        request is not queued registers the producer as a pull producer on the
+        request's transport.
+        """
+        req = http.Request(DummyChannel(), False)
+        producer = DummyProducer()
+        req.registerProducer(producer, False)
+        self.assertEqual([(producer, False)], req.transport.producers)
+
+
+    def test_connectionLostNotification(self):
+        """
+        L{Request.connectionLost} triggers all finish notification Deferreds
+        and cleans up per-request state.
+        """
+        d = DummyChannel()
+        request = http.Request(d, True)
+        finished = request.notifyFinish()
+        request.connectionLost(Failure(ConnectionLost("Connection done")))
+        self.assertIdentical(request.channel, None)
+        return self.assertFailure(finished, ConnectionLost)
+
+
+    def test_finishNotification(self):
+        """
+        L{Request.finish} triggers all finish notification Deferreds.
+        """
+        request = http.Request(DummyChannel(), False)
+        finished = request.notifyFinish()
+        # Force the request to have a non-None content attribute.  This is
+        # probably a bug in Request.
+        request.gotLength(1)
+        request.finish()
+        return finished
+
+
+    def test_writeAfterFinish(self):
+        """
+        Calling L{Request.write} after L{Request.finish} has been called results
+        in a L{RuntimeError} being raised.
+        """
+        request = http.Request(DummyChannel(), False)
+        finished = request.notifyFinish()
+        # Force the request to have a non-None content attribute.  This is
+        # probably a bug in Request.
+        request.gotLength(1)
+        request.write(b'foobar')
+        request.finish()
+        self.assertRaises(RuntimeError, request.write, b'foobar')
+        return finished
+
+
+    def test_finishAfterConnectionLost(self):
+        """
+        Calling L{Request.finish} after L{Request.connectionLost} has been
+        called results in a L{RuntimeError} being raised.
+        """
+        channel = DummyChannel()
+        req = http.Request(channel, False)
+        req.connectionLost(Failure(ConnectionLost("The end.")))
+        self.assertRaises(RuntimeError, req.finish)
+
+
+
+class MultilineHeadersTestCase(unittest.TestCase):
+    """
+    Tests to exercise handling of multiline headers by L{HTTPClient}.  RFCs 1945
+    (HTTP 1.0) and 2616 (HTTP 1.1) state that HTTP message header fields can
+    span multiple lines if each extra line is preceded by at least one space or
+    horizontal tab.
+    """
+    def setUp(self):
+        """
+        Initialize variables used to verify that the header-processing functions
+        are getting called.
+        """
+        self.handleHeaderCalled = False
+        self.handleEndHeadersCalled = False
+
+    # Dictionary of sample complete HTTP header key/value pairs, including
+    # multiline headers.
+    expectedHeaders = {b'Content-Length': b'10',
+                       b'X-Multiline' : b'line-0\tline-1',
+                       b'X-Multiline2' : b'line-2 line-3'}
+
+    def ourHandleHeader(self, key, val):
+        """
+        Dummy implementation of L{HTTPClient.handleHeader}.
+        """
+        self.handleHeaderCalled = True
+        self.assertEqual(val, self.expectedHeaders[key])
+
+
+    def ourHandleEndHeaders(self):
+        """
+        Dummy implementation of L{HTTPClient.handleEndHeaders}.
+        """
+        self.handleEndHeadersCalled = True
+
+
+    def test_extractHeader(self):
+        """
+        A header isn't processed by L{HTTPClient.extractHeader} until it is
+        confirmed in L{HTTPClient.lineReceived} that the header has been
+        received completely.
+        """
+        c = ClientDriver()
+        c.handleHeader = self.ourHandleHeader
+        c.handleEndHeaders = self.ourHandleEndHeaders
+
+        c.lineReceived(b'HTTP/1.0 201')
+        c.lineReceived(b'Content-Length: 10')
+        self.assertIdentical(c.length, None)
+        self.assertFalse(self.handleHeaderCalled)
+        self.assertFalse(self.handleEndHeadersCalled)
+
+        # Signal end of headers.
+        c.lineReceived(b'')
+        self.assertTrue(self.handleHeaderCalled)
+        self.assertTrue(self.handleEndHeadersCalled)
+
+        self.assertEqual(c.length, 10)
+
+
+    def test_noHeaders(self):
+        """
+        An HTTP request with no headers will not cause any calls to
+        L{handleHeader} but will cause L{handleEndHeaders} to be called on
+        L{HTTPClient} subclasses.
+        """
+        c = ClientDriver()
+        c.handleHeader = self.ourHandleHeader
+        c.handleEndHeaders = self.ourHandleEndHeaders
+        c.lineReceived(b'HTTP/1.0 201')
+
+        # Signal end of headers.
+        c.lineReceived(b'')
+        self.assertFalse(self.handleHeaderCalled)
+        self.assertTrue(self.handleEndHeadersCalled)
+
+        self.assertEqual(c.version, b'HTTP/1.0')
+        self.assertEqual(c.status, b'201')
+
+
+    def test_multilineHeaders(self):
+        """
+        L{HTTPClient} parses multiline headers by buffering header lines until
+        an empty line or a line that does not start with whitespace hits
+        lineReceived, confirming that the header has been received completely.
+        """
+        c = ClientDriver()
+        c.handleHeader = self.ourHandleHeader
+        c.handleEndHeaders = self.ourHandleEndHeaders
+
+        c.lineReceived(b'HTTP/1.0 201')
+        c.lineReceived(b'X-Multiline: line-0')
+        self.assertFalse(self.handleHeaderCalled)
+        # Start continuing line with a tab.
+        c.lineReceived(b'\tline-1')
+        c.lineReceived(b'X-Multiline2: line-2')
+        # The previous header must be complete, so now it can be processed.
+        self.assertTrue(self.handleHeaderCalled)
+        # Start continuing line with a space.
+        c.lineReceived(b' line-3')
+        c.lineReceived(b'Content-Length: 10')
+
+        # Signal end of headers.
+        c.lineReceived(b'')
+        self.assertTrue(self.handleEndHeadersCalled)
+
+        self.assertEqual(c.version, b'HTTP/1.0')
+        self.assertEqual(c.status, b'201')
+        self.assertEqual(c.length, 10)
+
+
+
+class Expect100ContinueServerTests(unittest.TestCase, ResponseTestMixin):
+    """
+    Test that the HTTP server handles 'Expect: 100-continue' header correctly.
+
+    The tests in this class all assume a simplistic behavior where user code
+    cannot choose to deny a request. Once ticket #288 is implemented and user
+    code can run before the body of a POST is processed this should be
+    extended to support overriding this behavior.
+    """
+
+    def test_HTTP10(self):
+        """
+        HTTP/1.0 requests do not get 100-continue returned, even if 'Expect:
+        100-continue' is included (RFC 2616 10.1.1).
+        """
+        transport = StringTransport()
+        channel = http.HTTPChannel()
+        channel.requestFactory = DummyHTTPHandler
+        channel.makeConnection(transport)
+        channel.dataReceived(b"GET / HTTP/1.0\r\n")
+        channel.dataReceived(b"Host: www.example.com\r\n")
+        channel.dataReceived(b"Content-Length: 3\r\n")
+        channel.dataReceived(b"Expect: 100-continue\r\n")
+        channel.dataReceived(b"\r\n")
+        self.assertEqual(transport.value(), b"")
+        channel.dataReceived(b"abc")
+        self.assertResponseEquals(
+            transport.value(),
+            [(b"HTTP/1.0 200 OK",
+              b"Command: GET",
+              b"Content-Length: 13",
+              b"Version: HTTP/1.0",
+              b"Request: /",
+              b"'''\n3\nabc'''\n")])
+
+
+    def test_expect100ContinueHeader(self):
+        """
+        If a HTTP/1.1 client sends a 'Expect: 100-continue' header, the server
+        responds with a 100 response code before handling the request body, if
+        any. The normal resource rendering code will then be called, which
+        will send an additional response code.
+        """
+        transport = StringTransport()
+        channel = http.HTTPChannel()
+        channel.requestFactory = DummyHTTPHandler
+        channel.makeConnection(transport)
+        channel.dataReceived(b"GET / HTTP/1.1\r\n")
+        channel.dataReceived(b"Host: www.example.com\r\n")
+        channel.dataReceived(b"Expect: 100-continue\r\n")
+        channel.dataReceived(b"Content-Length: 3\r\n")
+        # The 100 continue response is not sent until all headers are
+        # received:
+        self.assertEqual(transport.value(), b"")
+        channel.dataReceived(b"\r\n")
+        # The 100 continue response is sent *before* the body is even
+        # received:
+        self.assertEqual(transport.value(), b"HTTP/1.1 100 Continue\r\n\r\n")
+        channel.dataReceived(b"abc")
+        response = transport.value()
+        self.assertTrue(
+            response.startswith(b"HTTP/1.1 100 Continue\r\n\r\n"))
+        response = response[len(b"HTTP/1.1 100 Continue\r\n\r\n"):]
+        self.assertResponseEquals(
+            response,
+            [(b"HTTP/1.1 200 OK",
+              b"Command: GET",
+              b"Content-Length: 13",
+              b"Version: HTTP/1.1",
+              b"Request: /",
+              b"'''\n3\nabc'''\n")])
+
+
+    def test_expect100ContinueWithPipelining(self):
+        """
+        If a HTTP/1.1 client sends a 'Expect: 100-continue' header, followed
+        by another pipelined request, the 100 response does not interfere with
+        the response to the second request.
+        """
+        transport = StringTransport()
+        channel = http.HTTPChannel()
+        channel.requestFactory = DummyHTTPHandler
+        channel.makeConnection(transport)
+        channel.dataReceived(
+            b"GET / HTTP/1.1\r\n"
+            b"Host: www.example.com\r\n"
+            b"Expect: 100-continue\r\n"
+            b"Content-Length: 3\r\n"
+            b"\r\nabc"
+            b"POST /foo HTTP/1.1\r\n"
+            b"Host: www.example.com\r\n"
+            b"Content-Length: 4\r\n"
+            b"\r\ndefg")
+        response = transport.value()
+        self.assertTrue(
+            response.startswith(b"HTTP/1.1 100 Continue\r\n\r\n"))
+        response = response[len(b"HTTP/1.1 100 Continue\r\n\r\n"):]
+        self.assertResponseEquals(
+            response,
+            [(b"HTTP/1.1 200 OK",
+              b"Command: GET",
+              b"Content-Length: 13",
+              b"Version: HTTP/1.1",
+              b"Request: /",
+              b"'''\n3\nabc'''\n"),
+             (b"HTTP/1.1 200 OK",
+              b"Command: POST",
+              b"Content-Length: 14",
+              b"Version: HTTP/1.1",
+              b"Request: /foo",
+              b"'''\n4\ndefg'''\n")])
diff --git a/ThirdParty/Twisted/twisted/web/test/test_http_headers.py b/ThirdParty/Twisted/twisted/web/test/test_http_headers.py
new file mode 100644
index 0000000..6b4b642
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_http_headers.py
@@ -0,0 +1,631 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.web.http_headers}.
+"""
+
+from __future__ import division, absolute_import
+
+import sys
+
+from twisted.python.compat import _PY3, set
+from twisted.trial.unittest import TestCase
+from twisted.web.http_headers import _DictHeaders, Headers
+
+class HeadersTests(TestCase):
+    """
+    Tests for L{Headers}.
+    """
+    def test_initializer(self):
+        """
+        The header values passed to L{Headers.__init__} can be retrieved via
+        L{Headers.getRawHeaders}.
+        """
+        h = Headers({b'Foo': [b'bar']})
+        self.assertEqual(h.getRawHeaders(b'foo'), [b'bar'])
+
+
+    def test_setRawHeaders(self):
+        """
+        L{Headers.setRawHeaders} sets the header values for the given
+        header name to the sequence of byte string values.
+        """
+        rawValue = [b"value1", b"value2"]
+        h = Headers()
+        h.setRawHeaders(b"test", rawValue)
+        self.assertTrue(h.hasHeader(b"test"))
+        self.assertTrue(h.hasHeader(b"Test"))
+        self.assertEqual(h.getRawHeaders(b"test"), rawValue)
+
+
+    def test_rawHeadersTypeChecking(self):
+        """
+        L{Headers.setRawHeaders} requires values to be of type list.
+        """
+        h = Headers()
+        self.assertRaises(TypeError, h.setRawHeaders, {b'Foo': b'bar'})
+
+
+    def test_addRawHeader(self):
+        """
+        L{Headers.addRawHeader} adds a new value for a given header.
+        """
+        h = Headers()
+        h.addRawHeader(b"test", b"lemur")
+        self.assertEqual(h.getRawHeaders(b"test"), [b"lemur"])
+        h.addRawHeader(b"test", b"panda")
+        self.assertEqual(h.getRawHeaders(b"test"), [b"lemur", b"panda"])
+
+
+    def test_getRawHeadersNoDefault(self):
+        """
+        L{Headers.getRawHeaders} returns C{None} if the header is not found and
+        no default is specified.
+        """
+        self.assertIdentical(Headers().getRawHeaders(b"test"), None)
+
+
+    def test_getRawHeadersDefaultValue(self):
+        """
+        L{Headers.getRawHeaders} returns the specified default value when no
+        header is found.
+        """
+        h = Headers()
+        default = object()
+        self.assertIdentical(h.getRawHeaders(b"test", default), default)
+
+
+    def test_getRawHeaders(self):
+        """
+        L{Headers.getRawHeaders} returns the values which have been set for a
+        given header.
+        """
+        h = Headers()
+        h.setRawHeaders(b"test", [b"lemur"])
+        self.assertEqual(h.getRawHeaders(b"test"), [b"lemur"])
+        self.assertEqual(h.getRawHeaders(b"Test"), [b"lemur"])
+
+
+    def test_hasHeaderTrue(self):
+        """
+        Check that L{Headers.hasHeader} returns C{True} when the given header
+        is found.
+        """
+        h = Headers()
+        h.setRawHeaders(b"test", [b"lemur"])
+        self.assertTrue(h.hasHeader(b"test"))
+        self.assertTrue(h.hasHeader(b"Test"))
+
+
+    def test_hasHeaderFalse(self):
+        """
+        L{Headers.hasHeader} returns C{False} when the given header is not
+        found.
+        """
+        self.assertFalse(Headers().hasHeader(b"test"))
+
+
+    def test_removeHeader(self):
+        """
+        Check that L{Headers.removeHeader} removes the given header.
+        """
+        h = Headers()
+
+        h.setRawHeaders(b"foo", [b"lemur"])
+        self.assertTrue(h.hasHeader(b"foo"))
+        h.removeHeader(b"foo")
+        self.assertFalse(h.hasHeader(b"foo"))
+
+        h.setRawHeaders(b"bar", [b"panda"])
+        self.assertTrue(h.hasHeader(b"bar"))
+        h.removeHeader(b"Bar")
+        self.assertFalse(h.hasHeader(b"bar"))
+
+
+    def test_removeHeaderDoesntExist(self):
+        """
+        L{Headers.removeHeader} is a no-operation when the specified header is
+        not found.
+        """
+        h = Headers()
+        h.removeHeader(b"test")
+        self.assertEqual(list(h.getAllRawHeaders()), [])
+
+
+    def test_canonicalNameCaps(self):
+        """
+        L{Headers._canonicalNameCaps} returns the canonical capitalization for
+        the given header.
+        """
+        h = Headers()
+        self.assertEqual(h._canonicalNameCaps(b"test"), b"Test")
+        self.assertEqual(h._canonicalNameCaps(b"test-stuff"), b"Test-Stuff")
+        self.assertEqual(h._canonicalNameCaps(b"content-md5"), b"Content-MD5")
+        self.assertEqual(h._canonicalNameCaps(b"dnt"), b"DNT")
+        self.assertEqual(h._canonicalNameCaps(b"etag"), b"ETag")
+        self.assertEqual(h._canonicalNameCaps(b"p3p"), b"P3P")
+        self.assertEqual(h._canonicalNameCaps(b"te"), b"TE")
+        self.assertEqual(h._canonicalNameCaps(b"www-authenticate"),
+                          b"WWW-Authenticate")
+        self.assertEqual(h._canonicalNameCaps(b"x-xss-protection"),
+                          b"X-XSS-Protection")
+
+
+    def test_getAllRawHeaders(self):
+        """
+        L{Headers.getAllRawHeaders} returns an iterable of (k, v) pairs, where
+        C{k} is the canonicalized representation of the header name, and C{v}
+        is a sequence of values.
+        """
+        h = Headers()
+        h.setRawHeaders(b"test", [b"lemurs"])
+        h.setRawHeaders(b"www-authenticate", [b"basic aksljdlk="])
+
+        allHeaders = set([(k, tuple(v)) for k, v in h.getAllRawHeaders()])
+
+        self.assertEqual(allHeaders,
+                          set([(b"WWW-Authenticate", (b"basic aksljdlk=",)),
+                               (b"Test", (b"lemurs",))]))
+
+
+    def test_headersComparison(self):
+        """
+        A L{Headers} instance compares equal to itself and to another
+        L{Headers} instance with the same values.
+        """
+        first = Headers()
+        first.setRawHeaders(b"foo", [b"panda"])
+        second = Headers()
+        second.setRawHeaders(b"foo", [b"panda"])
+        third = Headers()
+        third.setRawHeaders(b"foo", [b"lemur", b"panda"])
+        self.assertEqual(first, first)
+        self.assertEqual(first, second)
+        self.assertNotEqual(first, third)
+
+
+    def test_otherComparison(self):
+        """
+        An instance of L{Headers} does not compare equal to other unrelated
+        objects.
+        """
+        h = Headers()
+        self.assertNotEqual(h, ())
+        self.assertNotEqual(h, object())
+        self.assertNotEqual(h, b"foo")
+
+
+    def test_repr(self):
+        """
+        The L{repr} of a L{Headers} instance shows the names and values of all
+        the headers it contains.
+        """
+        foo = b"foo"
+        bar = b"bar"
+        baz = b"baz"
+        self.assertEqual(
+            repr(Headers({foo: [bar, baz]})),
+            "Headers({%r: [%r, %r]})" % (foo, bar, baz))
+
+
+    def test_subclassRepr(self):
+        """
+        The L{repr} of an instance of a subclass of L{Headers} uses the name
+        of the subclass instead of the string C{"Headers"}.
+        """
+        foo = b"foo"
+        bar = b"bar"
+        baz = b"baz"
+        class FunnyHeaders(Headers):
+            pass
+        self.assertEqual(
+            repr(FunnyHeaders({foo: [bar, baz]})),
+            "FunnyHeaders({%r: [%r, %r]})" % (foo, bar, baz))
+
+
+    def test_copy(self):
+        """
+        L{Headers.copy} creates a new independant copy of an existing
+        L{Headers} instance, allowing future modifications without impacts
+        between the copies.
+        """
+        h = Headers()
+        h.setRawHeaders(b'test', [b'foo'])
+        i = h.copy()
+        self.assertEqual(i.getRawHeaders(b'test'), [b'foo'])
+        h.addRawHeader(b'test', b'bar')
+        self.assertEqual(i.getRawHeaders(b'test'), [b'foo'])
+        i.addRawHeader(b'test', b'baz')
+        self.assertEqual(h.getRawHeaders(b'test'), [b'foo', b'bar'])
+
+
+
+class HeaderDictTests(TestCase):
+    """
+    Tests for the backwards compatible C{dict} interface for L{Headers}
+    provided by L{_DictHeaders}.
+    """
+    def headers(self, **kw):
+        """
+        Create a L{Headers} instance populated with the header name/values
+        specified by C{kw} and a L{_DictHeaders} wrapped around it and return
+        them both.
+        """
+        h = Headers()
+        for k, v in kw.items():
+            h.setRawHeaders(k.encode('ascii'), v)
+        return h, _DictHeaders(h)
+
+
+    def test_getItem(self):
+        """
+        L{_DictHeaders.__getitem__} returns a single header for the given name.
+        """
+        headers, wrapper = self.headers(test=[b"lemur"])
+        self.assertEqual(wrapper[b"test"], b"lemur")
+
+
+    def test_getItemMultiple(self):
+        """
+        L{_DictHeaders.__getitem__} returns only the last header value for a
+        given name.
+        """
+        headers, wrapper = self.headers(test=[b"lemur", b"panda"])
+        self.assertEqual(wrapper[b"test"], b"panda")
+
+
+    def test_getItemMissing(self):
+        """
+        L{_DictHeaders.__getitem__} raises L{KeyError} if called with a header
+        which is not present.
+        """
+        headers, wrapper = self.headers()
+        exc = self.assertRaises(KeyError, wrapper.__getitem__, b"test")
+        self.assertEqual(exc.args, (b"test",))
+
+
+    def test_iteration(self):
+        """
+        L{_DictHeaders.__iter__} returns an iterator the elements of which
+        are the lowercase name of each header present.
+        """
+        headers, wrapper = self.headers(foo=[b"lemur", b"panda"], bar=[b"baz"])
+        self.assertEqual(set(list(wrapper)), set([b"foo", b"bar"]))
+
+
+    def test_length(self):
+        """
+        L{_DictHeaders.__len__} returns the number of headers present.
+        """
+        headers, wrapper = self.headers()
+        self.assertEqual(len(wrapper), 0)
+        headers.setRawHeaders(b"foo", [b"bar"])
+        self.assertEqual(len(wrapper), 1)
+        headers.setRawHeaders(b"test", [b"lemur", b"panda"])
+        self.assertEqual(len(wrapper), 2)
+
+
+    def test_setItem(self):
+        """
+        L{_DictHeaders.__setitem__} sets a single header value for the given
+        name.
+        """
+        headers, wrapper = self.headers()
+        wrapper[b"test"] = b"lemur"
+        self.assertEqual(headers.getRawHeaders(b"test"), [b"lemur"])
+
+
+    def test_setItemOverwrites(self):
+        """
+        L{_DictHeaders.__setitem__} will replace any previous header values for
+        the given name.
+        """
+        headers, wrapper = self.headers(test=[b"lemur", b"panda"])
+        wrapper[b"test"] = b"lemur"
+        self.assertEqual(headers.getRawHeaders(b"test"), [b"lemur"])
+
+
+    def test_delItem(self):
+        """
+        L{_DictHeaders.__delitem__} will remove the header values for the given
+        name.
+        """
+        headers, wrapper = self.headers(test=[b"lemur"])
+        del wrapper[b"test"]
+        self.assertFalse(headers.hasHeader(b"test"))
+
+
+    def test_delItemMissing(self):
+        """
+        L{_DictHeaders.__delitem__} will raise L{KeyError} if the given name is
+        not present.
+        """
+        headers, wrapper = self.headers()
+        exc = self.assertRaises(KeyError, wrapper.__delitem__, b"test")
+        self.assertEqual(exc.args, (b"test",))
+
+
+    def test_keys(self, _method='keys', _requireList=not _PY3):
+        """
+        L{_DictHeaders.keys} will return a list of all present header names.
+        """
+        headers, wrapper = self.headers(test=[b"lemur"], foo=[b"bar"])
+        keys = getattr(wrapper, _method)()
+        if _requireList:
+            self.assertIsInstance(keys, list)
+        self.assertEqual(set(keys), set([b"foo", b"test"]))
+
+
+    def test_iterkeys(self):
+        """
+        L{_DictHeaders.iterkeys} will return all present header names.
+        """
+        self.test_keys('iterkeys', False)
+
+
+    def test_values(self, _method='values', _requireList=not _PY3):
+        """
+        L{_DictHeaders.values} will return a list of all present header values,
+        returning only the last value for headers with more than one.
+        """
+        headers, wrapper = self.headers(
+            foo=[b"lemur"], bar=[b"marmot", b"panda"])
+        values = getattr(wrapper, _method)()
+        if _requireList:
+            self.assertIsInstance(values, list)
+        self.assertEqual(set(values), set([b"lemur", b"panda"]))
+
+
+    def test_itervalues(self):
+        """
+        L{_DictHeaders.itervalues} will return all present header values,
+        returning only the last value for headers with more than one.
+        """
+        self.test_values('itervalues', False)
+
+
+    def test_items(self, _method='items', _requireList=not _PY3):
+        """
+        L{_DictHeaders.items} will return a list of all present header names
+        and values as tuples, returning only the last value for headers with
+        more than one.
+        """
+        headers, wrapper = self.headers(
+            foo=[b"lemur"], bar=[b"marmot", b"panda"])
+        items = getattr(wrapper, _method)()
+        if _requireList:
+            self.assertIsInstance(items, list)
+        self.assertEqual(
+            set(items), set([(b"foo", b"lemur"), (b"bar", b"panda")]))
+
+
+    def test_iteritems(self):
+        """
+        L{_DictHeaders.iteritems} will return all present header names and
+        values as tuples, returning only the last value for headers with more
+        than one.
+        """
+        self.test_items('iteritems', False)
+
+
+    def test_clear(self):
+        """
+        L{_DictHeaders.clear} will remove all headers.
+        """
+        headers, wrapper = self.headers(foo=[b"lemur"], bar=[b"panda"])
+        wrapper.clear()
+        self.assertEqual(list(headers.getAllRawHeaders()), [])
+
+
+    def test_copy(self):
+        """
+        L{_DictHeaders.copy} will return a C{dict} with all the same headers
+        and the last value for each.
+        """
+        headers, wrapper = self.headers(
+            foo=[b"lemur", b"panda"], bar=[b"marmot"])
+        duplicate = wrapper.copy()
+        self.assertEqual(duplicate, {b"foo": b"panda", b"bar": b"marmot"})
+
+
+    def test_get(self):
+        """
+        L{_DictHeaders.get} returns the last value for the given header name.
+        """
+        headers, wrapper = self.headers(foo=[b"lemur", b"panda"])
+        self.assertEqual(wrapper.get(b"foo"), b"panda")
+
+
+    def test_getMissing(self):
+        """
+        L{_DictHeaders.get} returns C{None} for a header which is not present.
+        """
+        headers, wrapper = self.headers()
+        self.assertIdentical(wrapper.get(b"foo"), None)
+
+
+    def test_getDefault(self):
+        """
+        L{_DictHeaders.get} returns the last value for the given header name
+        even when it is invoked with a default value.
+        """
+        headers, wrapper = self.headers(foo=[b"lemur"])
+        self.assertEqual(wrapper.get(b"foo", b"bar"), b"lemur")
+
+
+    def test_getDefaultMissing(self):
+        """
+        L{_DictHeaders.get} returns the default value specified if asked for a
+        header which is not present.
+        """
+        headers, wrapper = self.headers()
+        self.assertEqual(wrapper.get(b"foo", b"bar"), b"bar")
+
+
+    def test_has_key(self):
+        """
+        L{_DictHeaders.has_key} returns C{True} if the given header is present,
+        C{False} otherwise.
+        """
+        headers, wrapper = self.headers(foo=[b"lemur"])
+        self.assertTrue(wrapper.has_key(b"foo"))
+        self.assertFalse(wrapper.has_key(b"bar"))
+
+
+    def test_contains(self):
+        """
+        L{_DictHeaders.__contains__} returns C{True} if the given header is
+        present, C{False} otherwise.
+        """
+        headers, wrapper = self.headers(foo=[b"lemur"])
+        self.assertIn(b"foo", wrapper)
+        self.assertNotIn(b"bar", wrapper)
+
+
+    def test_pop(self):
+        """
+        L{_DictHeaders.pop} returns the last header value associated with the
+        given header name and removes the header.
+        """
+        headers, wrapper = self.headers(foo=[b"lemur", b"panda"])
+        self.assertEqual(wrapper.pop(b"foo"), b"panda")
+        self.assertIdentical(headers.getRawHeaders(b"foo"), None)
+
+
+    def test_popMissing(self):
+        """
+        L{_DictHeaders.pop} raises L{KeyError} if passed a header name which is
+        not present.
+        """
+        headers, wrapper = self.headers()
+        self.assertRaises(KeyError, wrapper.pop, b"foo")
+
+
+    def test_popDefault(self):
+        """
+        L{_DictHeaders.pop} returns the last header value associated with the
+        given header name and removes the header, even if it is supplied with a
+        default value.
+        """
+        headers, wrapper = self.headers(foo=[b"lemur"])
+        self.assertEqual(wrapper.pop(b"foo", b"bar"), b"lemur")
+        self.assertIdentical(headers.getRawHeaders(b"foo"), None)
+
+
+    def test_popDefaultMissing(self):
+        """
+        L{_DictHeaders.pop} returns the default value is asked for a header
+        name which is not present.
+        """
+        headers, wrapper = self.headers(foo=[b"lemur"])
+        self.assertEqual(wrapper.pop(b"bar", b"baz"), b"baz")
+        self.assertEqual(headers.getRawHeaders(b"foo"), [b"lemur"])
+
+
+    def test_popitem(self):
+        """
+        L{_DictHeaders.popitem} returns some header name/value pair.
+        """
+        headers, wrapper = self.headers(foo=[b"lemur", b"panda"])
+        self.assertEqual(wrapper.popitem(), (b"foo", b"panda"))
+        self.assertIdentical(headers.getRawHeaders(b"foo"), None)
+
+
+    def test_popitemEmpty(self):
+        """
+        L{_DictHeaders.popitem} raises L{KeyError} if there are no headers
+        present.
+        """
+        headers, wrapper = self.headers()
+        self.assertRaises(KeyError, wrapper.popitem)
+
+
+    def test_update(self):
+        """
+        L{_DictHeaders.update} adds the header/value pairs in the C{dict} it is
+        passed, overriding any existing values for those headers.
+        """
+        headers, wrapper = self.headers(foo=[b"lemur"])
+        wrapper.update({b"foo": b"panda", b"bar": b"marmot"})
+        self.assertEqual(headers.getRawHeaders(b"foo"), [b"panda"])
+        self.assertEqual(headers.getRawHeaders(b"bar"), [b"marmot"])
+
+
+    def test_updateWithKeywords(self):
+        """
+        L{_DictHeaders.update} adds header names given as keyword arguments
+        with the keyword values as the header value.
+        """
+        headers, wrapper = self.headers(foo=[b"lemur"])
+        wrapper.update(foo=b"panda", bar=b"marmot")
+        self.assertEqual(headers.getRawHeaders(b"foo"), [b"panda"])
+        self.assertEqual(headers.getRawHeaders(b"bar"), [b"marmot"])
+
+    if _PY3:
+        test_updateWithKeywords.skip = "Not yet supported on Python 3; see #6082."
+
+
+    def test_setdefaultMissing(self):
+        """
+        If passed the name of a header which is not present,
+        L{_DictHeaders.setdefault} sets the value of the given header to the
+        specified default value and returns it.
+        """
+        headers, wrapper = self.headers(foo=[b"bar"])
+        self.assertEqual(wrapper.setdefault(b"baz", b"quux"), b"quux")
+        self.assertEqual(headers.getRawHeaders(b"foo"), [b"bar"])
+        self.assertEqual(headers.getRawHeaders(b"baz"), [b"quux"])
+
+
+    def test_setdefaultPresent(self):
+        """
+        If passed the name of a header which is present,
+        L{_DictHeaders.setdefault} makes no changes to the headers and
+        returns the last value already associated with that header.
+        """
+        headers, wrapper = self.headers(foo=[b"bar", b"baz"])
+        self.assertEqual(wrapper.setdefault(b"foo", b"quux"), b"baz")
+        self.assertEqual(headers.getRawHeaders(b"foo"), [b"bar", b"baz"])
+
+
+    def test_setdefaultDefault(self):
+        """
+        If a value is not passed to L{_DictHeaders.setdefault}, C{None} is
+        used.
+        """
+        # This results in an invalid state for the headers, but maybe some
+        # application is doing this an intermediate step towards some other
+        # state.  Anyway, it was broken with the old implementation so it's
+        # broken with the new implementation.  Compatibility, for the win.
+        # -exarkun
+        headers, wrapper = self.headers()
+        self.assertIdentical(wrapper.setdefault(b"foo"), None)
+        self.assertEqual(headers.getRawHeaders(b"foo"), [None])
+
+
+    def test_dictComparison(self):
+        """
+        An instance of L{_DictHeaders} compares equal to a C{dict} which
+        contains the same header/value pairs.  For header names with multiple
+        values, the last value only is considered.
+        """
+        headers, wrapper = self.headers(foo=[b"lemur"], bar=[b"panda", b"marmot"])
+        self.assertNotEqual(wrapper, {b"foo": b"lemur", b"bar": b"panda"})
+        self.assertEqual(wrapper, {b"foo": b"lemur", b"bar": b"marmot"})
+
+
+    def test_otherComparison(self):
+        """
+        An instance of L{_DictHeaders} does not compare equal to other
+        unrelated objects.
+        """
+        headers, wrapper = self.headers()
+        self.assertNotEqual(wrapper, ())
+        self.assertNotEqual(wrapper, object())
+        self.assertNotEqual(wrapper, b"foo")
+
+    if _PY3:
+        # Python 3 lacks these APIs
+        del test_iterkeys, test_itervalues, test_iteritems, test_has_key
+
diff --git a/ThirdParty/Twisted/twisted/web/test/test_httpauth.py b/ThirdParty/Twisted/twisted/web/test/test_httpauth.py
new file mode 100644
index 0000000..1764b0f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_httpauth.py
@@ -0,0 +1,634 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.web._auth}.
+"""
+
+
+from zope.interface import implements
+from zope.interface.verify import verifyObject
+
+from twisted.trial import unittest
+
+from twisted.python.failure import Failure
+from twisted.internet.error import ConnectionDone
+from twisted.internet.address import IPv4Address
+
+from twisted.cred import error, portal
+from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse
+from twisted.cred.checkers import ANONYMOUS, AllowAnonymousAccess
+from twisted.cred.credentials import IUsernamePassword
+
+from twisted.web.iweb import ICredentialFactory
+from twisted.web.resource import IResource, Resource, getChildForRequest
+from twisted.web._auth import basic, digest
+from twisted.web._auth.wrapper import HTTPAuthSessionWrapper, UnauthorizedResource
+from twisted.web._auth.basic import BasicCredentialFactory
+
+from twisted.web.server import NOT_DONE_YET
+from twisted.web.static import Data
+
+from twisted.web.test.test_web import DummyRequest
+
+
+def b64encode(s):
+    return s.encode('base64').strip()
+
+
+class BasicAuthTestsMixin:
+    """
+    L{TestCase} mixin class which defines a number of tests for
+    L{basic.BasicCredentialFactory}.  Because this mixin defines C{setUp}, it
+    must be inherited before L{TestCase}.
+    """
+    def setUp(self):
+        self.request = self.makeRequest()
+        self.realm = 'foo'
+        self.username = 'dreid'
+        self.password = 'S3CuR1Ty'
+        self.credentialFactory = basic.BasicCredentialFactory(self.realm)
+
+
+    def makeRequest(self, method='GET', clientAddress=None):
+        """
+        Create a request object to be passed to
+        L{basic.BasicCredentialFactory.decode} along with a response value.
+        Override this in a subclass.
+        """
+        raise NotImplementedError("%r did not implement makeRequest" % (
+                self.__class__,))
+
+
+    def test_interface(self):
+        """
+        L{BasicCredentialFactory} implements L{ICredentialFactory}.
+        """
+        self.assertTrue(
+            verifyObject(ICredentialFactory, self.credentialFactory))
+
+
+    def test_usernamePassword(self):
+        """
+        L{basic.BasicCredentialFactory.decode} turns a base64-encoded response
+        into a L{UsernamePassword} object with a password which reflects the
+        one which was encoded in the response.
+        """
+        response = b64encode('%s:%s' % (self.username, self.password))
+
+        creds = self.credentialFactory.decode(response, self.request)
+        self.assertTrue(IUsernamePassword.providedBy(creds))
+        self.assertTrue(creds.checkPassword(self.password))
+        self.assertFalse(creds.checkPassword(self.password + 'wrong'))
+
+
+    def test_incorrectPadding(self):
+        """
+        L{basic.BasicCredentialFactory.decode} decodes a base64-encoded
+        response with incorrect padding.
+        """
+        response = b64encode('%s:%s' % (self.username, self.password))
+        response = response.strip('=')
+
+        creds = self.credentialFactory.decode(response, self.request)
+        self.assertTrue(verifyObject(IUsernamePassword, creds))
+        self.assertTrue(creds.checkPassword(self.password))
+
+
+    def test_invalidEncoding(self):
+        """
+        L{basic.BasicCredentialFactory.decode} raises L{LoginFailed} if passed
+        a response which is not base64-encoded.
+        """
+        response = 'x' # one byte cannot be valid base64 text
+        self.assertRaises(
+            error.LoginFailed,
+            self.credentialFactory.decode, response, self.makeRequest())
+
+
+    def test_invalidCredentials(self):
+        """
+        L{basic.BasicCredentialFactory.decode} raises L{LoginFailed} when
+        passed a response which is not valid base64-encoded text.
+        """
+        response = b64encode('123abc+/')
+        self.assertRaises(
+            error.LoginFailed,
+            self.credentialFactory.decode,
+            response, self.makeRequest())
+
+
+class RequestMixin:
+    def makeRequest(self, method='GET', clientAddress=None):
+        """
+        Create a L{DummyRequest} (change me to create a
+        L{twisted.web.http.Request} instead).
+        """
+        request = DummyRequest('/')
+        request.method = method
+        request.client = clientAddress
+        return request
+
+
+
+class BasicAuthTestCase(RequestMixin, BasicAuthTestsMixin, unittest.TestCase):
+    """
+    Basic authentication tests which use L{twisted.web.http.Request}.
+    """
+
+
+
+class DigestAuthTestCase(RequestMixin, unittest.TestCase):
+    """
+    Digest authentication tests which use L{twisted.web.http.Request}.
+    """
+
+    def setUp(self):
+        """
+        Create a DigestCredentialFactory for testing
+        """
+        self.realm = "test realm"
+        self.algorithm = "md5"
+        self.credentialFactory = digest.DigestCredentialFactory(
+            self.algorithm, self.realm)
+        self.request = self.makeRequest()
+
+
+    def test_decode(self):
+        """
+        L{digest.DigestCredentialFactory.decode} calls the C{decode} method on
+        L{twisted.cred.digest.DigestCredentialFactory} with the HTTP method and
+        host of the request.
+        """
+        host = '169.254.0.1'
+        method = 'GET'
+        done = [False]
+        response = object()
+        def check(_response, _method, _host):
+            self.assertEqual(response, _response)
+            self.assertEqual(method, _method)
+            self.assertEqual(host, _host)
+            done[0] = True
+
+        self.patch(self.credentialFactory.digest, 'decode', check)
+        req = self.makeRequest(method, IPv4Address('TCP', host, 81))
+        self.credentialFactory.decode(response, req)
+        self.assertTrue(done[0])
+
+
+    def test_interface(self):
+        """
+        L{DigestCredentialFactory} implements L{ICredentialFactory}.
+        """
+        self.assertTrue(
+            verifyObject(ICredentialFactory, self.credentialFactory))
+
+
+    def test_getChallenge(self):
+        """
+        The challenge issued by L{DigestCredentialFactory.getChallenge} must
+        include C{'qop'}, C{'realm'}, C{'algorithm'}, C{'nonce'}, and
+        C{'opaque'} keys.  The values for the C{'realm'} and C{'algorithm'}
+        keys must match the values supplied to the factory's initializer.
+        None of the values may have newlines in them.
+        """
+        challenge = self.credentialFactory.getChallenge(self.request)
+        self.assertEqual(challenge['qop'], 'auth')
+        self.assertEqual(challenge['realm'], 'test realm')
+        self.assertEqual(challenge['algorithm'], 'md5')
+        self.assertIn('nonce', challenge)
+        self.assertIn('opaque', challenge)
+        for v in challenge.values():
+            self.assertNotIn('\n', v)
+
+
+    def test_getChallengeWithoutClientIP(self):
+        """
+        L{DigestCredentialFactory.getChallenge} can issue a challenge even if
+        the L{Request} it is passed returns C{None} from C{getClientIP}.
+        """
+        request = self.makeRequest('GET', None)
+        challenge = self.credentialFactory.getChallenge(request)
+        self.assertEqual(challenge['qop'], 'auth')
+        self.assertEqual(challenge['realm'], 'test realm')
+        self.assertEqual(challenge['algorithm'], 'md5')
+        self.assertIn('nonce', challenge)
+        self.assertIn('opaque', challenge)
+
+
+
+class UnauthorizedResourceTests(unittest.TestCase):
+    """
+    Tests for L{UnauthorizedResource}.
+    """
+    def test_getChildWithDefault(self):
+        """
+        An L{UnauthorizedResource} is every child of itself.
+        """
+        resource = UnauthorizedResource([])
+        self.assertIdentical(
+            resource.getChildWithDefault("foo", None), resource)
+        self.assertIdentical(
+            resource.getChildWithDefault("bar", None), resource)
+
+
+    def _unauthorizedRenderTest(self, request):
+        """
+        Render L{UnauthorizedResource} for the given request object and verify
+        that the response code is I{Unauthorized} and that a I{WWW-Authenticate}
+        header is set in the response containing a challenge.
+        """
+        resource = UnauthorizedResource([
+                BasicCredentialFactory('example.com')])
+        request.render(resource)
+        self.assertEqual(request.responseCode, 401)
+        self.assertEqual(
+            request.responseHeaders.getRawHeaders('www-authenticate'),
+            ['basic realm="example.com"'])
+
+
+    def test_render(self):
+        """
+        L{UnauthorizedResource} renders with a 401 response code and a
+        I{WWW-Authenticate} header and puts a simple unauthorized message
+        into the response body.
+        """
+        request = DummyRequest([''])
+        self._unauthorizedRenderTest(request)
+        self.assertEqual('Unauthorized', ''.join(request.written))
+
+
+    def test_renderHEAD(self):
+        """
+        The rendering behavior of L{UnauthorizedResource} for a I{HEAD} request
+        is like its handling of a I{GET} request, but no response body is
+        written.
+        """
+        request = DummyRequest([''])
+        request.method = 'HEAD'
+        self._unauthorizedRenderTest(request)
+        self.assertEqual('', ''.join(request.written))
+
+
+    def test_renderQuotesRealm(self):
+        """
+        The realm value included in the I{WWW-Authenticate} header set in
+        the response when L{UnauthorizedResounrce} is rendered has quotes
+        and backslashes escaped.
+        """
+        resource = UnauthorizedResource([
+                BasicCredentialFactory('example\\"foo')])
+        request = DummyRequest([''])
+        request.render(resource)
+        self.assertEqual(
+            request.responseHeaders.getRawHeaders('www-authenticate'),
+            ['basic realm="example\\\\\\"foo"'])
+
+
+
+class Realm(object):
+    """
+    A simple L{IRealm} implementation which gives out L{WebAvatar} for any
+    avatarId.
+
+    @type loggedIn: C{int}
+    @ivar loggedIn: The number of times C{requestAvatar} has been invoked for
+        L{IResource}.
+
+    @type loggedOut: C{int}
+    @ivar loggedOut: The number of times the logout callback has been invoked.
+    """
+    implements(portal.IRealm)
+
+    def __init__(self, avatarFactory):
+        self.loggedOut = 0
+        self.loggedIn = 0
+        self.avatarFactory = avatarFactory
+
+
+    def requestAvatar(self, avatarId, mind, *interfaces):
+        if IResource in interfaces:
+            self.loggedIn += 1
+            return IResource, self.avatarFactory(avatarId), self.logout
+        raise NotImplementedError()
+
+
+    def logout(self):
+        self.loggedOut += 1
+
+
+
+class HTTPAuthHeaderTests(unittest.TestCase):
+    """
+    Tests for L{HTTPAuthSessionWrapper}.
+    """
+    makeRequest = DummyRequest
+
+    def setUp(self):
+        """
+        Create a realm, portal, and L{HTTPAuthSessionWrapper} to use in the tests.
+        """
+        self.username = 'foo bar'
+        self.password = 'bar baz'
+        self.avatarContent = "contents of the avatar resource itself"
+        self.childName = "foo-child"
+        self.childContent = "contents of the foo child of the avatar"
+        self.checker = InMemoryUsernamePasswordDatabaseDontUse()
+        self.checker.addUser(self.username, self.password)
+        self.avatar = Data(self.avatarContent, 'text/plain')
+        self.avatar.putChild(
+            self.childName, Data(self.childContent, 'text/plain'))
+        self.avatars = {self.username: self.avatar}
+        self.realm = Realm(self.avatars.get)
+        self.portal = portal.Portal(self.realm, [self.checker])
+        self.credentialFactories = []
+        self.wrapper = HTTPAuthSessionWrapper(
+            self.portal, self.credentialFactories)
+
+
+    def _authorizedBasicLogin(self, request):
+        """
+        Add an I{basic authorization} header to the given request and then
+        dispatch it, starting from C{self.wrapper} and returning the resulting
+        L{IResource}.
+        """
+        authorization = b64encode(self.username + ':' + self.password)
+        request.headers['authorization'] = 'Basic ' + authorization
+        return getChildForRequest(self.wrapper, request)
+
+
+    def test_getChildWithDefault(self):
+        """
+        Resource traversal which encounters an L{HTTPAuthSessionWrapper}
+        results in an L{UnauthorizedResource} instance when the request does
+        not have the required I{Authorization} headers.
+        """
+        request = self.makeRequest([self.childName])
+        child = getChildForRequest(self.wrapper, request)
+        d = request.notifyFinish()
+        def cbFinished(result):
+            self.assertEqual(request.responseCode, 401)
+        d.addCallback(cbFinished)
+        request.render(child)
+        return d
+
+
+    def _invalidAuthorizationTest(self, response):
+        """
+        Create a request with the given value as the value of an
+        I{Authorization} header and perform resource traversal with it,
+        starting at C{self.wrapper}.  Assert that the result is a 401 response
+        code.  Return a L{Deferred} which fires when this is all done.
+        """
+        self.credentialFactories.append(BasicCredentialFactory('example.com'))
+        request = self.makeRequest([self.childName])
+        request.headers['authorization'] = response
+        child = getChildForRequest(self.wrapper, request)
+        d = request.notifyFinish()
+        def cbFinished(result):
+            self.assertEqual(request.responseCode, 401)
+        d.addCallback(cbFinished)
+        request.render(child)
+        return d
+
+
+    def test_getChildWithDefaultUnauthorizedUser(self):
+        """
+        Resource traversal which enouncters an L{HTTPAuthSessionWrapper}
+        results in an L{UnauthorizedResource} when the request has an
+        I{Authorization} header with a user which does not exist.
+        """
+        return self._invalidAuthorizationTest('Basic ' + b64encode('foo:bar'))
+
+
+    def test_getChildWithDefaultUnauthorizedPassword(self):
+        """
+        Resource traversal which enouncters an L{HTTPAuthSessionWrapper}
+        results in an L{UnauthorizedResource} when the request has an
+        I{Authorization} header with a user which exists and the wrong
+        password.
+        """
+        return self._invalidAuthorizationTest(
+            'Basic ' + b64encode(self.username + ':bar'))
+
+
+    def test_getChildWithDefaultUnrecognizedScheme(self):
+        """
+        Resource traversal which enouncters an L{HTTPAuthSessionWrapper}
+        results in an L{UnauthorizedResource} when the request has an
+        I{Authorization} header with an unrecognized scheme.
+        """
+        return self._invalidAuthorizationTest('Quux foo bar baz')
+
+
+    def test_getChildWithDefaultAuthorized(self):
+        """
+        Resource traversal which encounters an L{HTTPAuthSessionWrapper}
+        results in an L{IResource} which renders the L{IResource} avatar
+        retrieved from the portal when the request has a valid I{Authorization}
+        header.
+        """
+        self.credentialFactories.append(BasicCredentialFactory('example.com'))
+        request = self.makeRequest([self.childName])
+        child = self._authorizedBasicLogin(request)
+        d = request.notifyFinish()
+        def cbFinished(ignored):
+            self.assertEqual(request.written, [self.childContent])
+        d.addCallback(cbFinished)
+        request.render(child)
+        return d
+
+
+    def test_renderAuthorized(self):
+        """
+        Resource traversal which terminates at an L{HTTPAuthSessionWrapper}
+        and includes correct authentication headers results in the
+        L{IResource} avatar (not one of its children) retrieved from the
+        portal being rendered.
+        """
+        self.credentialFactories.append(BasicCredentialFactory('example.com'))
+        # Request it exactly, not any of its children.
+        request = self.makeRequest([])
+        child = self._authorizedBasicLogin(request)
+        d = request.notifyFinish()
+        def cbFinished(ignored):
+            self.assertEqual(request.written, [self.avatarContent])
+        d.addCallback(cbFinished)
+        request.render(child)
+        return d
+
+
+    def test_getChallengeCalledWithRequest(self):
+        """
+        When L{HTTPAuthSessionWrapper} finds an L{ICredentialFactory} to issue
+        a challenge, it calls the C{getChallenge} method with the request as an
+        argument.
+        """
+        class DumbCredentialFactory(object):
+            implements(ICredentialFactory)
+            scheme = 'dumb'
+
+            def __init__(self):
+                self.requests = []
+
+            def getChallenge(self, request):
+                self.requests.append(request)
+                return {}
+
+        factory = DumbCredentialFactory()
+        self.credentialFactories.append(factory)
+        request = self.makeRequest([self.childName])
+        child = getChildForRequest(self.wrapper, request)
+        d = request.notifyFinish()
+        def cbFinished(ignored):
+            self.assertEqual(factory.requests, [request])
+        d.addCallback(cbFinished)
+        request.render(child)
+        return d
+
+
+    def _logoutTest(self):
+        """
+        Issue a request for an authentication-protected resource using valid
+        credentials and then return the C{DummyRequest} instance which was
+        used.
+
+        This is a helper for tests about the behavior of the logout
+        callback.
+        """
+        self.credentialFactories.append(BasicCredentialFactory('example.com'))
+
+        class SlowerResource(Resource):
+            def render(self, request):
+                return NOT_DONE_YET
+
+        self.avatar.putChild(self.childName, SlowerResource())
+        request = self.makeRequest([self.childName])
+        child = self._authorizedBasicLogin(request)
+        request.render(child)
+        self.assertEqual(self.realm.loggedOut, 0)
+        return request
+
+
+    def test_logout(self):
+        """
+        The realm's logout callback is invoked after the resource is rendered.
+        """
+        request = self._logoutTest()
+        request.finish()
+        self.assertEqual(self.realm.loggedOut, 1)
+
+
+    def test_logoutOnError(self):
+        """
+        The realm's logout callback is also invoked if there is an error
+        generating the response (for example, if the client disconnects
+        early).
+        """
+        request = self._logoutTest()
+        request.processingFailed(
+            Failure(ConnectionDone("Simulated disconnect")))
+        self.assertEqual(self.realm.loggedOut, 1)
+
+
+    def test_decodeRaises(self):
+        """
+        Resource traversal which enouncters an L{HTTPAuthSessionWrapper}
+        results in an L{UnauthorizedResource} when the request has a I{Basic
+        Authorization} header which cannot be decoded using base64.
+        """
+        self.credentialFactories.append(BasicCredentialFactory('example.com'))
+        request = self.makeRequest([self.childName])
+        request.headers['authorization'] = 'Basic decode should fail'
+        child = getChildForRequest(self.wrapper, request)
+        self.assertIsInstance(child, UnauthorizedResource)
+
+
+    def test_selectParseResponse(self):
+        """
+        L{HTTPAuthSessionWrapper._selectParseHeader} returns a two-tuple giving
+        the L{ICredentialFactory} to use to parse the header and a string
+        containing the portion of the header which remains to be parsed.
+        """
+        basicAuthorization = 'Basic abcdef123456'
+        self.assertEqual(
+            self.wrapper._selectParseHeader(basicAuthorization),
+            (None, None))
+        factory = BasicCredentialFactory('example.com')
+        self.credentialFactories.append(factory)
+        self.assertEqual(
+            self.wrapper._selectParseHeader(basicAuthorization),
+            (factory, 'abcdef123456'))
+
+
+    def test_unexpectedDecodeError(self):
+        """
+        Any unexpected exception raised by the credential factory's C{decode}
+        method results in a 500 response code and causes the exception to be
+        logged.
+        """
+        class UnexpectedException(Exception):
+            pass
+
+        class BadFactory(object):
+            scheme = 'bad'
+
+            def getChallenge(self, client):
+                return {}
+
+            def decode(self, response, request):
+                raise UnexpectedException()
+
+        self.credentialFactories.append(BadFactory())
+        request = self.makeRequest([self.childName])
+        request.headers['authorization'] = 'Bad abc'
+        child = getChildForRequest(self.wrapper, request)
+        request.render(child)
+        self.assertEqual(request.responseCode, 500)
+        self.assertEqual(len(self.flushLoggedErrors(UnexpectedException)), 1)
+
+
+    def test_unexpectedLoginError(self):
+        """
+        Any unexpected failure from L{Portal.login} results in a 500 response
+        code and causes the failure to be logged.
+        """
+        class UnexpectedException(Exception):
+            pass
+
+        class BrokenChecker(object):
+            credentialInterfaces = (IUsernamePassword,)
+
+            def requestAvatarId(self, credentials):
+                raise UnexpectedException()
+
+        self.portal.registerChecker(BrokenChecker())
+        self.credentialFactories.append(BasicCredentialFactory('example.com'))
+        request = self.makeRequest([self.childName])
+        child = self._authorizedBasicLogin(request)
+        request.render(child)
+        self.assertEqual(request.responseCode, 500)
+        self.assertEqual(len(self.flushLoggedErrors(UnexpectedException)), 1)
+
+
+    def test_anonymousAccess(self):
+        """
+        Anonymous requests are allowed if a L{Portal} has an anonymous checker
+        registered.
+        """
+        unprotectedContents = "contents of the unprotected child resource"
+
+        self.avatars[ANONYMOUS] = Resource()
+        self.avatars[ANONYMOUS].putChild(
+            self.childName, Data(unprotectedContents, 'text/plain'))
+        self.portal.registerChecker(AllowAnonymousAccess())
+
+        self.credentialFactories.append(BasicCredentialFactory('example.com'))
+        request = self.makeRequest([self.childName])
+        child = getChildForRequest(self.wrapper, request)
+        d = request.notifyFinish()
+        def cbFinished(ignored):
+            self.assertEqual(request.written, [unprotectedContents])
+        d.addCallback(cbFinished)
+        request.render(child)
+        return d
diff --git a/ThirdParty/Twisted/twisted/web/test/test_newclient.py b/ThirdParty/Twisted/twisted/web/test/test_newclient.py
new file mode 100644
index 0000000..516d0aa
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_newclient.py
@@ -0,0 +1,2521 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.web._newclient}.
+"""
+
+__metaclass__ = type
+
+from zope.interface import implements
+from zope.interface.verify import verifyObject
+
+from twisted.python import log
+from twisted.python.failure import Failure
+from twisted.internet.interfaces import IConsumer, IPushProducer
+from twisted.internet.error import ConnectionDone, ConnectionLost
+from twisted.internet.defer import Deferred, succeed, fail
+from twisted.internet.protocol import Protocol
+from twisted.trial.unittest import TestCase
+from twisted.test.proto_helpers import StringTransport, AccumulatingProtocol
+from twisted.web._newclient import UNKNOWN_LENGTH, STATUS, HEADER, BODY, DONE
+from twisted.web._newclient import Request, Response, HTTPParser, HTTPClientParser
+from twisted.web._newclient import BadResponseVersion, ParseError, HTTP11ClientProtocol
+from twisted.web._newclient import ChunkedEncoder, RequestGenerationFailed
+from twisted.web._newclient import RequestTransmissionFailed, ResponseFailed
+from twisted.web._newclient import WrongBodyLength, RequestNotSent
+from twisted.web._newclient import ConnectionAborted, ResponseNeverReceived
+from twisted.web._newclient import BadHeaders, ResponseDone, PotentialDataLoss, ExcessWrite
+from twisted.web._newclient import TransportProxyProducer, LengthEnforcingConsumer, makeStatefulDispatcher
+from twisted.web.http_headers import Headers
+from twisted.web.http import _DataLoss
+from twisted.web.iweb import IBodyProducer, IResponse
+
+
+
+class ArbitraryException(Exception):
+    """
+    A unique, arbitrary exception type which L{twisted.web._newclient} knows
+    nothing about.
+    """
+
+
+class AnotherArbitraryException(Exception):
+    """
+    Similar to L{ArbitraryException} but with a different identity.
+    """
+
+
+# A re-usable Headers instance for tests which don't really care what headers
+# they're sending.
+_boringHeaders = Headers({'host': ['example.com']})
+
+
+def assertWrapperExceptionTypes(self, deferred, mainType, reasonTypes):
+    """
+    Assert that the given L{Deferred} fails with the exception given by
+    C{mainType} and that the exceptions wrapped by the instance of C{mainType}
+    it fails with match the list of exception types given by C{reasonTypes}.
+
+    This is a helper for testing failures of exceptions which subclass
+    L{_newclient._WrapperException}.
+
+    @param self: A L{TestCase} instance which will be used to make the
+        assertions.
+
+    @param deferred: The L{Deferred} which is expected to fail with
+        C{mainType}.
+
+    @param mainType: A L{_newclient._WrapperException} subclass which will be
+        trapped on C{deferred}.
+
+    @param reasonTypes: A sequence of exception types which will be trapped on
+        the resulting L{mainType} exception instance's C{reasons} sequence.
+
+    @return: A L{Deferred} which fires with the C{mainType} instance
+        C{deferred} fails with, or which fails somehow.
+    """
+    def cbFailed(err):
+        for reason, type in zip(err.reasons, reasonTypes):
+            reason.trap(type)
+        self.assertEqual(len(err.reasons), len(reasonTypes),
+                         "len(%s) != len(%s)" % (err.reasons, reasonTypes))
+        return err
+    d = self.assertFailure(deferred, mainType)
+    d.addCallback(cbFailed)
+    return d
+
+
+
+def assertResponseFailed(self, deferred, reasonTypes):
+    """
+    A simple helper to invoke L{assertWrapperExceptionTypes} with a C{mainType}
+    of L{ResponseFailed}.
+    """
+    return assertWrapperExceptionTypes(self, deferred, ResponseFailed, reasonTypes)
+
+
+
+def assertRequestGenerationFailed(self, deferred, reasonTypes):
+    """
+    A simple helper to invoke L{assertWrapperExceptionTypes} with a C{mainType}
+    of L{RequestGenerationFailed}.
+    """
+    return assertWrapperExceptionTypes(self, deferred, RequestGenerationFailed, reasonTypes)
+
+
+
+def assertRequestTransmissionFailed(self, deferred, reasonTypes):
+    """
+    A simple helper to invoke L{assertWrapperExceptionTypes} with a C{mainType}
+    of L{RequestTransmissionFailed}.
+    """
+    return assertWrapperExceptionTypes(self, deferred, RequestTransmissionFailed, reasonTypes)
+
+
+
+def justTransportResponse(transport):
+    """
+    Helper function for creating a Response which uses the given transport.
+    All of the other parameters to L{Response.__init__} are filled with
+    arbitrary values.  Only use this method if you don't care about any of
+    them.
+    """
+    return Response(('HTTP', 1, 1), 200, 'OK', _boringHeaders, transport)
+
+
+class MakeStatefulDispatcherTests(TestCase):
+    """
+    Tests for L{makeStatefulDispatcher}.
+    """
+    def test_functionCalledByState(self):
+        """
+        A method defined with L{makeStatefulDispatcher} invokes a second
+        method based on the current state of the object.
+        """
+        class Foo:
+            _state = 'A'
+
+            def bar(self):
+                pass
+            bar = makeStatefulDispatcher('quux', bar)
+
+            def _quux_A(self):
+                return 'a'
+
+            def _quux_B(self):
+                return 'b'
+
+        stateful = Foo()
+        self.assertEqual(stateful.bar(), 'a')
+        stateful._state = 'B'
+        self.assertEqual(stateful.bar(), 'b')
+        stateful._state = 'C'
+        self.assertRaises(RuntimeError, stateful.bar)
+
+
+
+class _HTTPParserTests(object):
+    """
+    Base test class for L{HTTPParser} which is responsible for the bulk of
+    the task of parsing HTTP bytes.
+    """
+    sep = None
+
+    def test_statusCallback(self):
+        """
+        L{HTTPParser} calls its C{statusReceived} method when it receives a
+        status line.
+        """
+        status = []
+        protocol = HTTPParser()
+        protocol.statusReceived = status.append
+        protocol.makeConnection(StringTransport())
+        self.assertEqual(protocol.state, STATUS)
+        protocol.dataReceived('HTTP/1.1 200 OK' + self.sep)
+        self.assertEqual(status, ['HTTP/1.1 200 OK'])
+        self.assertEqual(protocol.state, HEADER)
+
+
+    def _headerTestSetup(self):
+        header = {}
+        protocol = HTTPParser()
+        protocol.headerReceived = header.__setitem__
+        protocol.makeConnection(StringTransport())
+        protocol.dataReceived('HTTP/1.1 200 OK' + self.sep)
+        return header, protocol
+
+
+    def test_headerCallback(self):
+        """
+        L{HTTPParser} calls its C{headerReceived} method when it receives a
+        header.
+        """
+        header, protocol = self._headerTestSetup()
+        protocol.dataReceived('X-Foo:bar' + self.sep)
+        # Cannot tell it's not a continue header until the next line arrives
+        # and is not a continuation
+        protocol.dataReceived(self.sep)
+        self.assertEqual(header, {'X-Foo': 'bar'})
+        self.assertEqual(protocol.state, BODY)
+
+
+    def test_continuedHeaderCallback(self):
+        """
+        If a header is split over multiple lines, L{HTTPParser} calls
+        C{headerReceived} with the entire value once it is received.
+        """
+        header, protocol = self._headerTestSetup()
+        protocol.dataReceived('X-Foo: bar' + self.sep)
+        protocol.dataReceived(' baz' + self.sep)
+        protocol.dataReceived('\tquux' + self.sep)
+        protocol.dataReceived(self.sep)
+        self.assertEqual(header, {'X-Foo': 'bar baz\tquux'})
+        self.assertEqual(protocol.state, BODY)
+
+
+    def test_fieldContentWhitespace(self):
+        """
+        Leading and trailing linear whitespace is stripped from the header
+        value passed to the C{headerReceived} callback.
+        """
+        header, protocol = self._headerTestSetup()
+        value = ' \t %(sep)s bar \t%(sep)s \t%(sep)s' % dict(sep=self.sep)
+        protocol.dataReceived('X-Bar:' + value)
+        protocol.dataReceived('X-Foo:' + value)
+        protocol.dataReceived(self.sep)
+        self.assertEqual(header, {'X-Foo': 'bar',
+                                  'X-Bar': 'bar'})
+
+
+    def test_allHeadersCallback(self):
+        """
+        After the last header is received, L{HTTPParser} calls
+        C{allHeadersReceived}.
+        """
+        called = []
+        header, protocol = self._headerTestSetup()
+        def allHeadersReceived():
+            called.append(protocol.state)
+            protocol.state = STATUS
+        protocol.allHeadersReceived = allHeadersReceived
+        protocol.dataReceived(self.sep)
+        self.assertEqual(called, [HEADER])
+        self.assertEqual(protocol.state, STATUS)
+
+
+    def test_noHeaderCallback(self):
+        """
+        If there are no headers in the message, L{HTTPParser} does not call
+        C{headerReceived}.
+        """
+        header, protocol = self._headerTestSetup()
+        protocol.dataReceived(self.sep)
+        self.assertEqual(header, {})
+        self.assertEqual(protocol.state, BODY)
+
+
+    def test_headersSavedOnResponse(self):
+        """
+        All headers received by L{HTTPParser} are added to
+        L{HTTPParser.headers}.
+        """
+        protocol = HTTPParser()
+        protocol.makeConnection(StringTransport())
+        protocol.dataReceived('HTTP/1.1 200 OK' + self.sep)
+        protocol.dataReceived('X-Foo: bar' + self.sep)
+        protocol.dataReceived('X-Foo: baz' + self.sep)
+        protocol.dataReceived(self.sep)
+        expected = [('X-Foo', ['bar', 'baz'])]
+        self.assertEqual(expected, list(protocol.headers.getAllRawHeaders()))
+
+
+    def test_connectionControlHeaders(self):
+        """
+        L{HTTPParser.isConnectionControlHeader} returns C{True} for headers
+        which are always connection control headers (similar to "hop-by-hop"
+        headers from RFC 2616 section 13.5.1) and C{False} for other headers.
+        """
+        protocol = HTTPParser()
+        connHeaderNames = [
+            'content-length', 'connection', 'keep-alive', 'te', 'trailers',
+            'transfer-encoding', 'upgrade', 'proxy-connection']
+
+        for header in connHeaderNames:
+            self.assertTrue(
+                protocol.isConnectionControlHeader(header),
+                "Expecting %r to be a connection control header, but "
+                "wasn't" % (header,))
+        self.assertFalse(
+            protocol.isConnectionControlHeader("date"),
+            "Expecting the arbitrarily selected 'date' header to not be "
+            "a connection control header, but was.")
+
+
+    def test_switchToBodyMode(self):
+        """
+        L{HTTPParser.switchToBodyMode} raises L{RuntimeError} if called more
+        than once.
+        """
+        protocol = HTTPParser()
+        protocol.makeConnection(StringTransport())
+        protocol.switchToBodyMode(object())
+        self.assertRaises(RuntimeError, protocol.switchToBodyMode, object())
+
+
+
+class HTTPParserTestsRFCComplaintDelimeter(_HTTPParserTests, TestCase):
+    """
+    L{_HTTPParserTests} using standard CR LF newlines.
+    """
+    sep = '\r\n'
+
+
+
+class HTTPParserTestsNonRFCComplaintDelimeter(_HTTPParserTests, TestCase):
+    """
+    L{_HTTPParserTests} using bare LF newlines.
+    """
+    sep = '\n'
+
+
+
+class HTTPClientParserTests(TestCase):
+    """
+    Tests for L{HTTPClientParser} which is responsible for parsing HTTP
+    response messages.
+    """
+    def test_parseVersion(self):
+        """
+        L{HTTPClientParser.parseVersion} parses a status line into its three
+        components.
+        """
+        protocol = HTTPClientParser(None, None)
+        self.assertEqual(
+            protocol.parseVersion('CANDY/7.2'),
+            ('CANDY', 7, 2))
+
+
+    def test_parseBadVersion(self):
+        """
+        L{HTTPClientParser.parseVersion} raises L{ValueError} when passed an
+        unparsable version.
+        """
+        protocol = HTTPClientParser(None, None)
+        e = BadResponseVersion
+        f = protocol.parseVersion
+
+        def checkParsing(s):
+            exc = self.assertRaises(e, f, s)
+            self.assertEqual(exc.data, s)
+
+        checkParsing('foo')
+        checkParsing('foo/bar/baz')
+
+        checkParsing('foo/')
+        checkParsing('foo/..')
+
+        checkParsing('foo/a.b')
+        checkParsing('foo/-1.-1')
+
+
+    def test_responseStatusParsing(self):
+        """
+        L{HTTPClientParser.statusReceived} parses the version, code, and phrase
+        from the status line and stores them on the response object.
+        """
+        request = Request('GET', '/', _boringHeaders, None)
+        protocol = HTTPClientParser(request, None)
+        protocol.makeConnection(StringTransport())
+        protocol.dataReceived('HTTP/1.1 200 OK\r\n')
+        self.assertEqual(protocol.response.version, ('HTTP', 1, 1))
+        self.assertEqual(protocol.response.code, 200)
+        self.assertEqual(protocol.response.phrase, 'OK')
+
+
+    def test_badResponseStatus(self):
+        """
+        L{HTTPClientParser.statusReceived} raises L{ParseError} if it is called
+        with a status line which cannot be parsed.
+        """
+        protocol = HTTPClientParser(None, None)
+
+        def checkParsing(s):
+            exc = self.assertRaises(ParseError, protocol.statusReceived, s)
+            self.assertEqual(exc.data, s)
+
+        # If there are fewer than three whitespace-delimited parts to the
+        # status line, it is not valid and cannot be parsed.
+        checkParsing('foo')
+        checkParsing('HTTP/1.1 200')
+
+        # If the response code is not an integer, the status line is not valid
+        # and cannot be parsed.
+        checkParsing('HTTP/1.1 bar OK')
+
+
+    def _noBodyTest(self, request, response):
+        """
+        Assert that L{HTTPClientParser} parses the given C{response} to
+        C{request}, resulting in a response with no body and no extra bytes and
+        leaving the transport in the producing state.
+
+        @param request: A L{Request} instance which might have caused a server
+            to return the given response.
+        @param response: A string giving the response to be parsed.
+
+        @return: A C{dict} of headers from the response.
+        """
+        header = {}
+        finished = []
+        protocol = HTTPClientParser(request, finished.append)
+        protocol.headerReceived = header.__setitem__
+        body = []
+        protocol._bodyDataReceived = body.append
+        transport = StringTransport()
+        protocol.makeConnection(transport)
+        protocol.dataReceived(response)
+        self.assertEqual(transport.producerState, 'producing')
+        self.assertEqual(protocol.state, DONE)
+        self.assertEqual(body, [])
+        self.assertEqual(finished, [''])
+        self.assertEqual(protocol.response.length, 0)
+        return header
+
+
+    def test_headResponse(self):
+        """
+        If the response is to a HEAD request, no body is expected, the body
+        callback is not invoked, and the I{Content-Length} header is passed to
+        the header callback.
+        """
+        request = Request('HEAD', '/', _boringHeaders, None)
+        status = (
+            'HTTP/1.1 200 OK\r\n'
+            'Content-Length: 10\r\n'
+            '\r\n')
+        header = self._noBodyTest(request, status)
+        self.assertEqual(header, {'Content-Length': '10'})
+
+
+    def test_noContentResponse(self):
+        """
+        If the response code is I{NO CONTENT} (204), no body is expected and
+        the body callback is not invoked.
+        """
+        request = Request('GET', '/', _boringHeaders, None)
+        status = (
+            'HTTP/1.1 204 NO CONTENT\r\n'
+            '\r\n')
+        self._noBodyTest(request, status)
+
+
+    def test_notModifiedResponse(self):
+        """
+        If the response code is I{NOT MODIFIED} (304), no body is expected and
+        the body callback is not invoked.
+        """
+        request = Request('GET', '/', _boringHeaders, None)
+        status = (
+            'HTTP/1.1 304 NOT MODIFIED\r\n'
+            '\r\n')
+        self._noBodyTest(request, status)
+
+
+    def test_responseHeaders(self):
+        """
+        The response headers are added to the response object's C{headers}
+        L{Headers} instance.
+        """
+        protocol = HTTPClientParser(
+            Request('GET', '/', _boringHeaders, None),
+            lambda rest: None)
+        protocol.makeConnection(StringTransport())
+        protocol.dataReceived('HTTP/1.1 200 OK\r\n')
+        protocol.dataReceived('X-Foo: bar\r\n')
+        protocol.dataReceived('\r\n')
+        self.assertEqual(
+            protocol.connHeaders,
+            Headers({}))
+        self.assertEqual(
+            protocol.response.headers,
+            Headers({'x-foo': ['bar']}))
+        self.assertIdentical(protocol.response.length, UNKNOWN_LENGTH)
+
+
+    def test_connectionHeaders(self):
+        """
+        The connection control headers are added to the parser's C{connHeaders}
+        L{Headers} instance.
+        """
+        protocol = HTTPClientParser(
+            Request('GET', '/', _boringHeaders, None),
+            lambda rest: None)
+        protocol.makeConnection(StringTransport())
+        protocol.dataReceived('HTTP/1.1 200 OK\r\n')
+        protocol.dataReceived('Content-Length: 123\r\n')
+        protocol.dataReceived('Connection: close\r\n')
+        protocol.dataReceived('\r\n')
+        self.assertEqual(
+            protocol.response.headers,
+            Headers({}))
+        self.assertEqual(
+            protocol.connHeaders,
+            Headers({'content-length': ['123'],
+                     'connection': ['close']}))
+        self.assertEqual(protocol.response.length, 123)
+
+
+    def test_headResponseContentLengthEntityHeader(self):
+        """
+        If a HEAD request is made, the I{Content-Length} header in the response
+        is added to the response headers, not the connection control headers.
+        """
+        protocol = HTTPClientParser(
+            Request('HEAD', '/', _boringHeaders, None),
+            lambda rest: None)
+        protocol.makeConnection(StringTransport())
+        protocol.dataReceived('HTTP/1.1 200 OK\r\n')
+        protocol.dataReceived('Content-Length: 123\r\n')
+        protocol.dataReceived('\r\n')
+        self.assertEqual(
+            protocol.response.headers,
+            Headers({'content-length': ['123']}))
+        self.assertEqual(
+            protocol.connHeaders,
+            Headers({}))
+        self.assertEqual(protocol.response.length, 0)
+
+
+    def test_contentLength(self):
+        """
+        If a response includes a body with a length given by the
+        I{Content-Length} header, the bytes which make up the body are passed
+        to the C{_bodyDataReceived} callback on the L{HTTPParser}.
+        """
+        finished = []
+        protocol = HTTPClientParser(
+            Request('GET', '/', _boringHeaders, None),
+            finished.append)
+        transport = StringTransport()
+        protocol.makeConnection(transport)
+        protocol.dataReceived('HTTP/1.1 200 OK\r\n')
+        body = []
+        protocol.response._bodyDataReceived = body.append
+        protocol.dataReceived('Content-Length: 10\r\n')
+        protocol.dataReceived('\r\n')
+
+        # Incidentally, the transport should be paused now.  It is the response
+        # object's responsibility to resume this when it is ready for bytes.
+        self.assertEqual(transport.producerState, 'paused')
+
+        self.assertEqual(protocol.state, BODY)
+        protocol.dataReceived('x' * 6)
+        self.assertEqual(body, ['x' * 6])
+        self.assertEqual(protocol.state, BODY)
+        protocol.dataReceived('y' * 4)
+        self.assertEqual(body, ['x' * 6, 'y' * 4])
+        self.assertEqual(protocol.state, DONE)
+        self.assertTrue(finished, [''])
+
+
+    def test_zeroContentLength(self):
+        """
+        If a response includes a I{Content-Length} header indicating zero bytes
+        in the response, L{Response.length} is set accordingly and no data is
+        delivered to L{Response._bodyDataReceived}.
+        """
+        finished = []
+        protocol = HTTPClientParser(
+            Request('GET', '/', _boringHeaders, None),
+            finished.append)
+
+        protocol.makeConnection(StringTransport())
+        protocol.dataReceived('HTTP/1.1 200 OK\r\n')
+
+        body = []
+        protocol.response._bodyDataReceived = body.append
+
+        protocol.dataReceived('Content-Length: 0\r\n')
+        protocol.dataReceived('\r\n')
+
+        self.assertEqual(protocol.state, DONE)
+        self.assertEqual(body, [])
+        self.assertTrue(finished, [''])
+        self.assertEqual(protocol.response.length, 0)
+
+
+
+    def test_multipleContentLengthHeaders(self):
+        """
+        If a response includes multiple I{Content-Length} headers,
+        L{HTTPClientParser.dataReceived} raises L{ValueError} to indicate that
+        the response is invalid and the transport is now unusable.
+        """
+        protocol = HTTPClientParser(
+            Request('GET', '/', _boringHeaders, None),
+            None)
+
+        protocol.makeConnection(StringTransport())
+        self.assertRaises(
+            ValueError,
+            protocol.dataReceived,
+            'HTTP/1.1 200 OK\r\n'
+            'Content-Length: 1\r\n'
+            'Content-Length: 2\r\n'
+            '\r\n')
+
+
+    def test_extraBytesPassedBack(self):
+        """
+        If extra bytes are received past the end of a response, they are passed
+        to the finish callback.
+        """
+        finished = []
+        protocol = HTTPClientParser(
+            Request('GET', '/', _boringHeaders, None),
+            finished.append)
+
+        protocol.makeConnection(StringTransport())
+        protocol.dataReceived('HTTP/1.1 200 OK\r\n')
+        protocol.dataReceived('Content-Length: 0\r\n')
+        protocol.dataReceived('\r\nHere is another thing!')
+        self.assertEqual(protocol.state, DONE)
+        self.assertEqual(finished, ['Here is another thing!'])
+
+
+    def test_extraBytesPassedBackHEAD(self):
+        """
+        If extra bytes are received past the end of the headers of a response
+        to a HEAD request, they are passed to the finish callback.
+        """
+        finished = []
+        protocol = HTTPClientParser(
+            Request('HEAD', '/', _boringHeaders, None),
+            finished.append)
+
+        protocol.makeConnection(StringTransport())
+        protocol.dataReceived('HTTP/1.1 200 OK\r\n')
+        protocol.dataReceived('Content-Length: 12\r\n')
+        protocol.dataReceived('\r\nHere is another thing!')
+        self.assertEqual(protocol.state, DONE)
+        self.assertEqual(finished, ['Here is another thing!'])
+
+
+    def test_chunkedResponseBody(self):
+        """
+        If the response headers indicate the response body is encoded with the
+        I{chunked} transfer encoding, the body is decoded according to that
+        transfer encoding before being passed to L{Response._bodyDataReceived}.
+        """
+        finished = []
+        protocol = HTTPClientParser(
+            Request('GET', '/', _boringHeaders, None),
+            finished.append)
+        protocol.makeConnection(StringTransport())
+        protocol.dataReceived('HTTP/1.1 200 OK\r\n')
+
+        body = []
+        protocol.response._bodyDataReceived = body.append
+
+        protocol.dataReceived('Transfer-Encoding: chunked\r\n')
+        protocol.dataReceived('\r\n')
+
+        # No data delivered yet
+        self.assertEqual(body, [])
+
+        # Cannot predict the length of a chunked encoded response body.
+        self.assertIdentical(protocol.response.length, UNKNOWN_LENGTH)
+
+        # Deliver some chunks and make sure the data arrives
+        protocol.dataReceived('3\r\na')
+        self.assertEqual(body, ['a'])
+        protocol.dataReceived('bc\r\n')
+        self.assertEqual(body, ['a', 'bc'])
+
+        # The response's _bodyDataFinished method should be called when the last
+        # chunk is received.  Extra data should be passed to the finished
+        # callback.
+        protocol.dataReceived('0\r\n\r\nextra')
+        self.assertEqual(finished, ['extra'])
+
+
+    def test_unknownContentLength(self):
+        """
+        If a response does not include a I{Transfer-Encoding} or a
+        I{Content-Length}, the end of response body is indicated by the
+        connection being closed.
+        """
+        finished = []
+        protocol = HTTPClientParser(
+            Request('GET', '/', _boringHeaders, None), finished.append)
+        transport = StringTransport()
+        protocol.makeConnection(transport)
+        protocol.dataReceived('HTTP/1.1 200 OK\r\n')
+
+        body = []
+        protocol.response._bodyDataReceived = body.append
+
+        protocol.dataReceived('\r\n')
+        protocol.dataReceived('foo')
+        protocol.dataReceived('bar')
+        self.assertEqual(body, ['foo', 'bar'])
+        protocol.connectionLost(ConnectionDone("simulated end of connection"))
+        self.assertEqual(finished, [''])
+
+
+    def test_contentLengthAndTransferEncoding(self):
+        """
+        According to RFC 2616, section 4.4, point 3, if I{Content-Length} and
+        I{Transfer-Encoding: chunked} are present, I{Content-Length} MUST be
+        ignored
+        """
+        finished = []
+        protocol = HTTPClientParser(
+            Request('GET', '/', _boringHeaders, None), finished.append)
+        transport = StringTransport()
+        protocol.makeConnection(transport)
+        protocol.dataReceived('HTTP/1.1 200 OK\r\n')
+
+        body = []
+        protocol.response._bodyDataReceived = body.append
+
+        protocol.dataReceived(
+            'Content-Length: 102\r\n'
+            'Transfer-Encoding: chunked\r\n'
+            '\r\n'
+            '3\r\n'
+            'abc\r\n'
+            '0\r\n'
+            '\r\n')
+
+        self.assertEqual(body, ['abc'])
+        self.assertEqual(finished, [''])
+
+
+    def test_connectionLostBeforeBody(self):
+        """
+        If L{HTTPClientParser.connectionLost} is called before the headers are
+        finished, the C{_responseDeferred} is fired with the L{Failure} passed
+        to C{connectionLost}.
+        """
+        transport = StringTransport()
+        protocol = HTTPClientParser(Request('GET', '/', _boringHeaders, None), None)
+        protocol.makeConnection(transport)
+        # Grab this here because connectionLost gets rid of the attribute
+        responseDeferred = protocol._responseDeferred
+        protocol.connectionLost(Failure(ArbitraryException()))
+
+        return assertResponseFailed(
+            self, responseDeferred, [ArbitraryException])
+
+
+    def test_connectionLostWithError(self):
+        """
+        If one of the L{Response} methods called by
+        L{HTTPClientParser.connectionLost} raises an exception, the exception
+        is logged and not re-raised.
+        """
+        transport = StringTransport()
+        protocol = HTTPClientParser(Request('GET', '/', _boringHeaders, None),
+                                    None)
+        protocol.makeConnection(transport)
+
+        response = []
+        protocol._responseDeferred.addCallback(response.append)
+        protocol.dataReceived(
+            'HTTP/1.1 200 OK\r\n'
+            'Content-Length: 1\r\n'
+            '\r\n')
+        response = response[0]
+
+        # Arrange for an exception
+        def fakeBodyDataFinished(err=None):
+            raise ArbitraryException()
+        response._bodyDataFinished = fakeBodyDataFinished
+
+        protocol.connectionLost(None)
+
+        self.assertEqual(len(self.flushLoggedErrors(ArbitraryException)), 1)
+
+
+    def test_noResponseAtAll(self):
+        """
+        If no response at all was received and the connection is lost, the
+        resulting error is L{ResponseNeverReceived}.
+        """
+        protocol = HTTPClientParser(
+            Request('HEAD', '/', _boringHeaders, None),
+            lambda ign: None)
+        d = protocol._responseDeferred
+
+        protocol.makeConnection(StringTransport())
+        protocol.connectionLost(ConnectionLost())
+        return self.assertFailure(d, ResponseNeverReceived)
+
+
+    def test_someResponseButNotAll(self):
+        """
+        If a partial response was received and the connection is lost, the
+        resulting error is L{ResponseFailed}, but not
+        L{ResponseNeverReceived}.
+        """
+        protocol = HTTPClientParser(
+            Request('HEAD', '/', _boringHeaders, None),
+            lambda ign: None)
+        d = protocol._responseDeferred
+
+        protocol.makeConnection(StringTransport())
+        protocol.dataReceived('2')
+        protocol.connectionLost(ConnectionLost())
+        return self.assertFailure(d, ResponseFailed).addCallback(
+            self.assertIsInstance, ResponseFailed)
+
+
+
+class SlowRequest:
+    """
+    L{SlowRequest} is a fake implementation of L{Request} which is easily
+    controlled externally (for example, by code in a test method).
+
+    @ivar stopped: A flag indicating whether C{stopWriting} has been called.
+
+    @ivar finished: After C{writeTo} is called, a L{Deferred} which was
+        returned by that method.  L{SlowRequest} will never fire this
+        L{Deferred}.
+    """
+    method = 'GET'
+    stopped = False
+    persistent = False
+
+    def writeTo(self, transport):
+        self.finished = Deferred()
+        return self.finished
+
+
+    def stopWriting(self):
+        self.stopped = True
+
+
+
+class SimpleRequest:
+    """
+    L{SimpleRequest} is a fake implementation of L{Request} which writes a
+    short, fixed string to the transport passed to its C{writeTo} method and
+    returns a succeeded L{Deferred}.  This vaguely emulates the behavior of a
+    L{Request} with no body producer.
+    """
+    persistent = False
+
+    def writeTo(self, transport):
+        transport.write('SOME BYTES')
+        return succeed(None)
+
+
+
+class HTTP11ClientProtocolTests(TestCase):
+    """
+    Tests for the HTTP 1.1 client protocol implementation,
+    L{HTTP11ClientProtocol}.
+    """
+    def setUp(self):
+        """
+        Create an L{HTTP11ClientProtocol} connected to a fake transport.
+        """
+        self.transport = StringTransport()
+        self.protocol = HTTP11ClientProtocol()
+        self.protocol.makeConnection(self.transport)
+
+
+    def test_request(self):
+        """
+        L{HTTP11ClientProtocol.request} accepts a L{Request} and calls its
+        C{writeTo} method with its own transport.
+        """
+        self.protocol.request(SimpleRequest())
+        self.assertEqual(self.transport.value(), 'SOME BYTES')
+
+
+    def test_secondRequest(self):
+        """
+        The second time L{HTTP11ClientProtocol.request} is called, it returns a
+        L{Deferred} which immediately fires with a L{Failure} wrapping a
+        L{RequestNotSent} exception.
+        """
+        self.protocol.request(SlowRequest())
+        def cbNotSent(ignored):
+            self.assertEqual(self.transport.value(), '')
+        d = self.assertFailure(
+            self.protocol.request(SimpleRequest()), RequestNotSent)
+        d.addCallback(cbNotSent)
+        return d
+
+
+    def test_requestAfterConnectionLost(self):
+        """
+        L{HTTP11ClientProtocol.request} returns a L{Deferred} which immediately
+        fires with a L{Failure} wrapping a L{RequestNotSent} if called after
+        the protocol has been disconnected.
+        """
+        self.protocol.connectionLost(
+            Failure(ConnectionDone("sad transport")))
+        def cbNotSent(ignored):
+            self.assertEqual(self.transport.value(), '')
+        d = self.assertFailure(
+            self.protocol.request(SimpleRequest()), RequestNotSent)
+        d.addCallback(cbNotSent)
+        return d
+
+
+    def test_failedWriteTo(self):
+        """
+        If the L{Deferred} returned by L{Request.writeTo} fires with a
+        L{Failure}, L{HTTP11ClientProtocol.request} disconnects its transport
+        and returns a L{Deferred} which fires with a L{Failure} of
+        L{RequestGenerationFailed} wrapping the underlying failure.
+        """
+        class BrokenRequest:
+            persistent = False
+            def writeTo(self, transport):
+                return fail(ArbitraryException())
+
+        d = self.protocol.request(BrokenRequest())
+        def cbFailed(ignored):
+            self.assertTrue(self.transport.disconnecting)
+            # Simulate what would happen if the protocol had a real transport
+            # and make sure no exception is raised.
+            self.protocol.connectionLost(
+                Failure(ConnectionDone("you asked for it")))
+        d = assertRequestGenerationFailed(self, d, [ArbitraryException])
+        d.addCallback(cbFailed)
+        return d
+
+
+    def test_synchronousWriteToError(self):
+        """
+        If L{Request.writeTo} raises an exception,
+        L{HTTP11ClientProtocol.request} returns a L{Deferred} which fires with
+        a L{Failure} of L{RequestGenerationFailed} wrapping that exception.
+        """
+        class BrokenRequest:
+            persistent = False
+            def writeTo(self, transport):
+                raise ArbitraryException()
+
+        d = self.protocol.request(BrokenRequest())
+        return assertRequestGenerationFailed(self, d, [ArbitraryException])
+
+
+    def test_connectionLostDuringRequestGeneration(self, mode=None):
+        """
+        If L{HTTP11ClientProtocol}'s transport is disconnected before the
+        L{Deferred} returned by L{Request.writeTo} fires, the L{Deferred}
+        returned by L{HTTP11ClientProtocol.request} fires with a L{Failure} of
+        L{RequestTransmissionFailed} wrapping the underlying failure.
+        """
+        request = SlowRequest()
+        d = self.protocol.request(request)
+        d = assertRequestTransmissionFailed(self, d, [ArbitraryException])
+
+        # The connection hasn't been lost yet.  The request should still be
+        # allowed to do its thing.
+        self.assertFalse(request.stopped)
+
+        self.protocol.connectionLost(Failure(ArbitraryException()))
+
+        # Now the connection has been lost.  The request should have been told
+        # to stop writing itself.
+        self.assertTrue(request.stopped)
+
+        if mode == 'callback':
+            request.finished.callback(None)
+        elif mode == 'errback':
+            request.finished.errback(Failure(AnotherArbitraryException()))
+            errors = self.flushLoggedErrors(AnotherArbitraryException)
+            self.assertEqual(len(errors), 1)
+        else:
+            # Don't fire the writeTo Deferred at all.
+            pass
+        return d
+
+
+    def test_connectionLostBeforeGenerationFinished(self):
+        """
+        If the request passed to L{HTTP11ClientProtocol} finishes generation
+        successfully after the L{HTTP11ClientProtocol}'s connection has been
+        lost, nothing happens.
+        """
+        return self.test_connectionLostDuringRequestGeneration('callback')
+
+
+    def test_connectionLostBeforeGenerationFailed(self):
+        """
+        If the request passed to L{HTTP11ClientProtocol} finished generation
+        with an error after the L{HTTP11ClientProtocol}'s connection has been
+        lost, nothing happens.
+        """
+        return self.test_connectionLostDuringRequestGeneration('errback')
+
+
+    def test_errorMessageOnConnectionLostBeforeGenerationFailedDoesNotConfuse(self):
+        """
+        If the request passed to L{HTTP11ClientProtocol} finished generation
+        with an error after the L{HTTP11ClientProtocol}'s connection has been
+        lost, an error is logged that gives a non-confusing hint to user on what
+        went wrong.
+        """
+        errors = []
+        log.addObserver(errors.append)
+        self.addCleanup(log.removeObserver, errors.append)
+
+        def check(ignore):
+            error = errors[0]
+            self.assertEqual(error['why'],
+                              'Error writing request, but not in valid state '
+                              'to finalize request: CONNECTION_LOST')
+
+        return self.test_connectionLostDuringRequestGeneration(
+            'errback').addCallback(check)
+
+
+    def test_receiveSimplestResponse(self):
+        """
+        When a response is delivered to L{HTTP11ClientProtocol}, the
+        L{Deferred} previously returned by the C{request} method is called back
+        with a L{Response} instance and the connection is closed.
+        """
+        d = self.protocol.request(Request('GET', '/', _boringHeaders, None))
+        def cbRequest(response):
+            self.assertEqual(response.code, 200)
+            self.assertEqual(response.headers, Headers())
+            self.assertTrue(self.transport.disconnecting)
+            self.assertEqual(self.protocol.state, 'QUIESCENT')
+        d.addCallback(cbRequest)
+        self.protocol.dataReceived(
+            "HTTP/1.1 200 OK\r\n"
+            "Content-Length: 0\r\n"
+            "Connection: close\r\n"
+            "\r\n")
+        return d
+
+
+    def test_receiveResponseHeaders(self):
+        """
+        The headers included in a response delivered to L{HTTP11ClientProtocol}
+        are included on the L{Response} instance passed to the callback
+        returned by the C{request} method.
+        """
+        d = self.protocol.request(Request('GET', '/', _boringHeaders, None))
+        def cbRequest(response):
+            expected = Headers({'x-foo': ['bar', 'baz']})
+            self.assertEqual(response.headers, expected)
+        d.addCallback(cbRequest)
+        self.protocol.dataReceived(
+            "HTTP/1.1 200 OK\r\n"
+            "X-Foo: bar\r\n"
+            "X-Foo: baz\r\n"
+            "\r\n")
+        return d
+
+
+    def test_receiveResponseBeforeRequestGenerationDone(self):
+        """
+        If response bytes are delivered to L{HTTP11ClientProtocol} before the
+        L{Deferred} returned by L{Request.writeTo} fires, those response bytes
+        are parsed as part of the response.
+
+        The connection is also closed, because we're in a confusing state, and
+        therefore the C{quiescentCallback} isn't called.
+        """
+        quiescentResult = []
+        transport = StringTransport()
+        protocol = HTTP11ClientProtocol(quiescentResult.append)
+        protocol.makeConnection(transport)
+
+        request = SlowRequest()
+        d = protocol.request(request)
+        protocol.dataReceived(
+            "HTTP/1.1 200 OK\r\n"
+            "X-Foo: bar\r\n"
+            "Content-Length: 6\r\n"
+            "\r\n"
+            "foobar")
+        def cbResponse(response):
+            p = AccumulatingProtocol()
+            whenFinished = p.closedDeferred = Deferred()
+            response.deliverBody(p)
+            self.assertEqual(
+                protocol.state, 'TRANSMITTING_AFTER_RECEIVING_RESPONSE')
+            self.assertTrue(transport.disconnecting)
+            self.assertEqual(quiescentResult, [])
+            return whenFinished.addCallback(
+                lambda ign: (response, p.data))
+        d.addCallback(cbResponse)
+        def cbAllResponse((response, body)):
+            self.assertEqual(response.version, ('HTTP', 1, 1))
+            self.assertEqual(response.code, 200)
+            self.assertEqual(response.phrase, 'OK')
+            self.assertEqual(response.headers, Headers({'x-foo': ['bar']}))
+            self.assertEqual(body, "foobar")
+
+            # Also nothing bad should happen if the request does finally
+            # finish, even though it is completely irrelevant.
+            request.finished.callback(None)
+
+        d.addCallback(cbAllResponse)
+        return d
+
+
+    def test_connectionLostAfterReceivingResponseBeforeRequestGenerationDone(self):
+        """
+        If response bytes are delivered to L{HTTP11ClientProtocol} before the
+        request completes, calling L{connectionLost} on the protocol will
+        result in protocol being moved to C{'CONNECTION_LOST'} state.
+        """
+        request = SlowRequest()
+        d = self.protocol.request(request)
+        self.protocol.dataReceived(
+            "HTTP/1.1 400 BAD REQUEST\r\n"
+            "Content-Length: 9\r\n"
+            "\r\n"
+            "tisk tisk")
+        def cbResponse(response):
+            p = AccumulatingProtocol()
+            whenFinished = p.closedDeferred = Deferred()
+            response.deliverBody(p)
+            return whenFinished.addCallback(
+                lambda ign: (response, p.data))
+        d.addCallback(cbResponse)
+        def cbAllResponse(ignore):
+            request.finished.callback(None)
+            # Nothing dire will happen when the connection is lost
+            self.protocol.connectionLost(Failure(ArbitraryException()))
+            self.assertEqual(self.protocol._state, 'CONNECTION_LOST')
+        d.addCallback(cbAllResponse)
+        return d
+
+
+    def test_receiveResponseBody(self):
+        """
+        The C{deliverBody} method of the response object with which the
+        L{Deferred} returned by L{HTTP11ClientProtocol.request} fires can be
+        used to get the body of the response.
+        """
+        protocol = AccumulatingProtocol()
+        whenFinished = protocol.closedDeferred = Deferred()
+        requestDeferred = self.protocol.request(Request('GET', '/', _boringHeaders, None))
+
+        self.protocol.dataReceived(
+            "HTTP/1.1 200 OK\r\n"
+            "Content-Length: 6\r\n"
+            "\r")
+
+        # Here's what's going on: all the response headers have been delivered
+        # by this point, so the request Deferred can fire with a Response
+        # object.  The body is yet to come, but that's okay, because the
+        # Response object is how you *get* the body.
+        result = []
+        requestDeferred.addCallback(result.append)
+
+        self.assertEqual(result, [])
+        # Deliver the very last byte of the response.  It is exactly at this
+        # point which the Deferred returned by request should fire.
+        self.protocol.dataReceived("\n")
+        response = result[0]
+
+        response.deliverBody(protocol)
+
+        self.protocol.dataReceived("foo")
+        self.protocol.dataReceived("bar")
+
+        def cbAllResponse(ignored):
+            self.assertEqual(protocol.data, "foobar")
+            protocol.closedReason.trap(ResponseDone)
+        whenFinished.addCallback(cbAllResponse)
+        return whenFinished
+
+
+    def test_responseBodyFinishedWhenConnectionLostWhenContentLengthIsUnknown(
+        self):
+        """
+        If the length of the response body is unknown, the protocol passed to
+        the response's C{deliverBody} method has its C{connectionLost}
+        method called with a L{Failure} wrapping a L{PotentialDataLoss}
+        exception.
+        """
+        requestDeferred = self.protocol.request(Request('GET', '/', _boringHeaders, None))
+        self.protocol.dataReceived(
+            "HTTP/1.1 200 OK\r\n"
+            "\r\n")
+
+        result = []
+        requestDeferred.addCallback(result.append)
+        response = result[0]
+
+        protocol = AccumulatingProtocol()
+        response.deliverBody(protocol)
+
+        self.protocol.dataReceived("foo")
+        self.protocol.dataReceived("bar")
+
+        self.assertEqual(protocol.data, "foobar")
+        self.protocol.connectionLost(
+            Failure(ConnectionDone("low-level transport disconnected")))
+
+        protocol.closedReason.trap(PotentialDataLoss)
+
+
+    def test_chunkedResponseBodyUnfinishedWhenConnectionLost(self):
+        """
+        If the final chunk has not been received when the connection is lost
+        (for any reason), the protocol passed to C{deliverBody} has its
+        C{connectionLost} method called with a L{Failure} wrapping the
+        exception for that reason.
+        """
+        requestDeferred = self.protocol.request(Request('GET', '/', _boringHeaders, None))
+        self.protocol.dataReceived(
+            "HTTP/1.1 200 OK\r\n"
+            "Transfer-Encoding: chunked\r\n"
+            "\r\n")
+
+        result = []
+        requestDeferred.addCallback(result.append)
+        response = result[0]
+
+        protocol = AccumulatingProtocol()
+        response.deliverBody(protocol)
+
+        self.protocol.dataReceived("3\r\nfoo\r\n")
+        self.protocol.dataReceived("3\r\nbar\r\n")
+
+        self.assertEqual(protocol.data, "foobar")
+
+        self.protocol.connectionLost(Failure(ArbitraryException()))
+
+        return assertResponseFailed(
+            self, fail(protocol.closedReason), [ArbitraryException, _DataLoss])
+
+
+    def test_parserDataReceivedException(self):
+        """
+        If the parser L{HTTP11ClientProtocol} delivers bytes to in
+        C{dataReceived} raises an exception, the exception is wrapped in a
+        L{Failure} and passed to the parser's C{connectionLost} and then the
+        L{HTTP11ClientProtocol}'s transport is disconnected.
+        """
+        requestDeferred = self.protocol.request(Request('GET', '/', _boringHeaders, None))
+        self.protocol.dataReceived('unparseable garbage goes here\r\n')
+        d = assertResponseFailed(self, requestDeferred, [ParseError])
+        def cbFailed(exc):
+            self.assertTrue(self.transport.disconnecting)
+            self.assertEqual(
+                exc.reasons[0].value.data, 'unparseable garbage goes here')
+
+            # Now do what StringTransport doesn't do but a real transport would
+            # have, call connectionLost on the HTTP11ClientProtocol.  Nothing
+            # is asserted about this, but it's important for it to not raise an
+            # exception.
+            self.protocol.connectionLost(Failure(ConnectionDone("it is done")))
+
+        d.addCallback(cbFailed)
+        return d
+
+
+    def test_proxyStopped(self):
+        """
+        When the HTTP response parser is disconnected, the
+        L{TransportProxyProducer} which was connected to it as a transport is
+        stopped.
+        """
+        requestDeferred = self.protocol.request(Request('GET', '/', _boringHeaders, None))
+        transport = self.protocol._parser.transport
+        self.assertIdentical(transport._producer, self.transport)
+        self.protocol._disconnectParser(Failure(ConnectionDone("connection done")))
+        self.assertIdentical(transport._producer, None)
+        return assertResponseFailed(self, requestDeferred, [ConnectionDone])
+
+
+    def test_abortClosesConnection(self):
+        """
+        L{HTTP11ClientProtocol.abort} will tell the transport to close its
+        connection when it is invoked, and returns a C{Deferred} that fires
+        when the connection is lost.
+        """
+        transport = StringTransport()
+        protocol = HTTP11ClientProtocol()
+        protocol.makeConnection(transport)
+        r1 = []
+        r2 = []
+        protocol.abort().addCallback(r1.append)
+        protocol.abort().addCallback(r2.append)
+        self.assertEqual((r1, r2), ([], []))
+        self.assertTrue(transport.disconnecting)
+
+        # Disconnect protocol, the Deferreds will fire:
+        protocol.connectionLost(Failure(ConnectionDone()))
+        self.assertEqual(r1, [None])
+        self.assertEqual(r2, [None])
+
+
+    def test_abortAfterConnectionLost(self):
+        """
+        L{HTTP11ClientProtocol.abort} called after the connection is lost
+        returns a C{Deferred} that fires immediately.
+        """
+        transport = StringTransport()
+        protocol = HTTP11ClientProtocol()
+        protocol.makeConnection(transport)
+        protocol.connectionLost(Failure(ConnectionDone()))
+
+        result = []
+        protocol.abort().addCallback(result.append)
+        self.assertEqual(result, [None])
+        self.assertEqual(protocol._state, "CONNECTION_LOST")
+
+
+    def test_abortBeforeResponseBody(self):
+        """
+        The Deferred returned by L{HTTP11ClientProtocol.request} will fire
+        with a L{ResponseFailed} failure containing a L{ConnectionAborted}
+        exception, if the connection was aborted before all response headers
+        have been received.
+        """
+        transport = StringTransport()
+        protocol = HTTP11ClientProtocol()
+        protocol.makeConnection(transport)
+        result = protocol.request(Request('GET', '/', _boringHeaders, None))
+        protocol.abort()
+        self.assertTrue(transport.disconnecting)
+        protocol.connectionLost(Failure(ConnectionDone()))
+        return assertResponseFailed(self, result, [ConnectionAborted])
+
+
+    def test_abortAfterResponseHeaders(self):
+        """
+        When the connection is aborted after the response headers have
+        been received and the L{Response} has been made available to
+        application code, the response body protocol's C{connectionLost}
+        method will be invoked with a L{ResponseFailed} failure containing a
+        L{ConnectionAborted} exception.
+        """
+        transport = StringTransport()
+        protocol = HTTP11ClientProtocol()
+        protocol.makeConnection(transport)
+        result = protocol.request(Request('GET', '/', _boringHeaders, None))
+
+        protocol.dataReceived(
+            "HTTP/1.1 200 OK\r\n"
+            "Content-Length: 1\r\n"
+            "\r\n"
+            )
+
+        testResult = Deferred()
+
+        class BodyDestination(Protocol):
+            """
+            A body response protocol which immediately aborts the HTTP
+            connection.
+            """
+            def connectionMade(self):
+                """
+                Abort the HTTP connection.
+                """
+                protocol.abort()
+
+            def connectionLost(self, reason):
+                """
+                Make the reason for the losing of the connection available to
+                the unit test via C{testResult}.
+                """
+                testResult.errback(reason)
+
+
+        def deliverBody(response):
+            """
+            Connect the L{BodyDestination} response body protocol to the
+            response, and then simulate connection loss after ensuring that
+            the HTTP connection has been aborted.
+            """
+            response.deliverBody(BodyDestination())
+            self.assertTrue(transport.disconnecting)
+            protocol.connectionLost(Failure(ConnectionDone()))
+
+
+        def checkError(error):
+            self.assertIsInstance(error.response, Response)
+
+
+        result.addCallback(deliverBody)
+        deferred = assertResponseFailed(self, testResult,
+                                        [ConnectionAborted, _DataLoss])
+        return deferred.addCallback(checkError)
+
+
+    def test_quiescentCallbackCalled(self):
+        """
+        If after a response is done the {HTTP11ClientProtocol} stays open and
+        returns to QUIESCENT state, all per-request state is reset and the
+        C{quiescentCallback} is called with the protocol instance.
+
+        This is useful for implementing a persistent connection pool.
+
+        The C{quiescentCallback} is called *before* the response-receiving
+        protocol's C{connectionLost}, so that new requests triggered by end of
+        first request can re-use a persistent connection.
+        """
+        quiescentResult = []
+        def callback(p):
+            self.assertEqual(p, protocol)
+            self.assertEqual(p.state, "QUIESCENT")
+            quiescentResult.append(p)
+
+        transport = StringTransport()
+        protocol = HTTP11ClientProtocol(callback)
+        protocol.makeConnection(transport)
+
+        requestDeferred = protocol.request(
+            Request('GET', '/', _boringHeaders, None, persistent=True))
+        protocol.dataReceived(
+            "HTTP/1.1 200 OK\r\n"
+            "Content-length: 3\r\n"
+            "\r\n")
+
+        # Headers done, but still no quiescent callback:
+        self.assertEqual(quiescentResult, [])
+
+        result = []
+        requestDeferred.addCallback(result.append)
+        response = result[0]
+
+        # When response body is done (i.e. connectionLost is called), note the
+        # fact in quiescentResult:
+        bodyProtocol = AccumulatingProtocol()
+        bodyProtocol.closedDeferred = Deferred()
+        bodyProtocol.closedDeferred.addCallback(
+            lambda ign: quiescentResult.append("response done"))
+
+        response.deliverBody(bodyProtocol)
+        protocol.dataReceived("abc")
+        bodyProtocol.closedReason.trap(ResponseDone)
+        # Quiescent callback called *before* protocol handling the response
+        # body gets its connectionLost called:
+        self.assertEqual(quiescentResult, [protocol, "response done"])
+
+        # Make sure everything was cleaned up:
+        self.assertEqual(protocol._parser, None)
+        self.assertEqual(protocol._finishedRequest, None)
+        self.assertEqual(protocol._currentRequest, None)
+        self.assertEqual(protocol._transportProxy, None)
+        self.assertEqual(protocol._responseDeferred, None)
+
+
+    def test_quiescentCallbackCalledEmptyResponse(self):
+        """
+        The quiescentCallback is called before the request C{Deferred} fires,
+        in cases where the response has no body.
+        """
+        quiescentResult = []
+        def callback(p):
+            self.assertEqual(p, protocol)
+            self.assertEqual(p.state, "QUIESCENT")
+            quiescentResult.append(p)
+
+        transport = StringTransport()
+        protocol = HTTP11ClientProtocol(callback)
+        protocol.makeConnection(transport)
+
+        requestDeferred = protocol.request(
+            Request('GET', '/', _boringHeaders, None, persistent=True))
+        requestDeferred.addCallback(quiescentResult.append)
+        protocol.dataReceived(
+            "HTTP/1.1 200 OK\r\n"
+            "Content-length: 0\r\n"
+            "\r\n")
+
+        self.assertEqual(len(quiescentResult), 2)
+        self.assertIdentical(quiescentResult[0], protocol)
+        self.assertIsInstance(quiescentResult[1], Response)
+
+
+    def test_quiescentCallbackNotCalled(self):
+        """
+        If after a response is done the {HTTP11ClientProtocol} returns a
+        C{Connection: close} header in the response, the C{quiescentCallback}
+        is not called and the connection is lost.
+        """
+        quiescentResult = []
+        transport = StringTransport()
+        protocol = HTTP11ClientProtocol(quiescentResult.append)
+        protocol.makeConnection(transport)
+
+        requestDeferred = protocol.request(
+            Request('GET', '/', _boringHeaders, None, persistent=True))
+        protocol.dataReceived(
+            "HTTP/1.1 200 OK\r\n"
+            "Content-length: 0\r\n"
+            "Connection: close\r\n"
+            "\r\n")
+
+        result = []
+        requestDeferred.addCallback(result.append)
+        response = result[0]
+
+        bodyProtocol = AccumulatingProtocol()
+        response.deliverBody(bodyProtocol)
+        bodyProtocol.closedReason.trap(ResponseDone)
+        self.assertEqual(quiescentResult, [])
+        self.assertTrue(transport.disconnecting)
+
+
+    def test_quiescentCallbackNotCalledNonPersistentQuery(self):
+        """
+        If the request was non-persistent (i.e. sent C{Connection: close}),
+        the C{quiescentCallback} is not called and the connection is lost.
+        """
+        quiescentResult = []
+        transport = StringTransport()
+        protocol = HTTP11ClientProtocol(quiescentResult.append)
+        protocol.makeConnection(transport)
+
+        requestDeferred = protocol.request(
+            Request('GET', '/', _boringHeaders, None, persistent=False))
+        protocol.dataReceived(
+            "HTTP/1.1 200 OK\r\n"
+            "Content-length: 0\r\n"
+            "\r\n")
+
+        result = []
+        requestDeferred.addCallback(result.append)
+        response = result[0]
+
+        bodyProtocol = AccumulatingProtocol()
+        response.deliverBody(bodyProtocol)
+        bodyProtocol.closedReason.trap(ResponseDone)
+        self.assertEqual(quiescentResult, [])
+        self.assertTrue(transport.disconnecting)
+
+
+    def test_quiescentCallbackThrows(self):
+        """
+        If C{quiescentCallback} throws an exception, the error is logged and
+        protocol is disconnected.
+        """
+        def callback(p):
+            raise ZeroDivisionError()
+
+        transport = StringTransport()
+        protocol = HTTP11ClientProtocol(callback)
+        protocol.makeConnection(transport)
+
+        requestDeferred = protocol.request(
+            Request('GET', '/', _boringHeaders, None, persistent=True))
+        protocol.dataReceived(
+            "HTTP/1.1 200 OK\r\n"
+            "Content-length: 0\r\n"
+            "\r\n")
+
+        result = []
+        requestDeferred.addCallback(result.append)
+        response = result[0]
+        bodyProtocol = AccumulatingProtocol()
+        response.deliverBody(bodyProtocol)
+        bodyProtocol.closedReason.trap(ResponseDone)
+
+        errors = self.flushLoggedErrors(ZeroDivisionError)
+        self.assertEqual(len(errors), 1)
+        self.assertTrue(transport.disconnecting)
+
+
+
+class StringProducer:
+    """
+    L{StringProducer} is a dummy body producer.
+
+    @ivar stopped: A flag which indicates whether or not C{stopProducing} has
+        been called.
+    @ivar consumer: After C{startProducing} is called, the value of the
+        C{consumer} argument to that method.
+    @ivar finished: After C{startProducing} is called, a L{Deferred} which was
+        returned by that method.  L{StringProducer} will never fire this
+        L{Deferred}.
+    """
+    implements(IBodyProducer)
+
+    stopped = False
+
+    def __init__(self, length):
+        self.length = length
+
+
+    def startProducing(self, consumer):
+        self.consumer = consumer
+        self.finished = Deferred()
+        return self.finished
+
+
+    def stopProducing(self):
+        self.stopped = True
+
+
+
+class RequestTests(TestCase):
+    """
+    Tests for L{Request}.
+    """
+    def setUp(self):
+        self.transport = StringTransport()
+
+
+    def test_sendSimplestRequest(self):
+        """
+        L{Request.writeTo} formats the request data and writes it to the given
+        transport.
+        """
+        Request('GET', '/', _boringHeaders, None).writeTo(self.transport)
+        self.assertEqual(
+            self.transport.value(),
+            "GET / HTTP/1.1\r\n"
+            "Connection: close\r\n"
+            "Host: example.com\r\n"
+            "\r\n")
+
+
+    def test_sendSimplestPersistentRequest(self):
+        """
+        A pesistent request does not send 'Connection: close' header.
+        """
+        req = Request('GET', '/', _boringHeaders, None, persistent=True)
+        req.writeTo(self.transport)
+        self.assertEqual(
+            self.transport.value(),
+            "GET / HTTP/1.1\r\n"
+            "Host: example.com\r\n"
+            "\r\n")
+
+
+    def test_sendRequestHeaders(self):
+        """
+        L{Request.writeTo} formats header data and writes it to the given
+        transport.
+        """
+        headers = Headers({'x-foo': ['bar', 'baz'], 'host': ['example.com']})
+        Request('GET', '/foo', headers, None).writeTo(self.transport)
+        lines = self.transport.value().split('\r\n')
+        self.assertEqual(lines[0], "GET /foo HTTP/1.1")
+        self.assertEqual(lines[-2:], ["", ""])
+        del lines[0], lines[-2:]
+        lines.sort()
+        self.assertEqual(
+            lines,
+            ["Connection: close",
+             "Host: example.com",
+             "X-Foo: bar",
+             "X-Foo: baz"])
+
+
+    def test_sendChunkedRequestBody(self):
+        """
+        L{Request.writeTo} uses chunked encoding to write data from the request
+        body producer to the given transport.  It registers the request body
+        producer with the transport.
+        """
+        producer = StringProducer(UNKNOWN_LENGTH)
+        request = Request('POST', '/bar', _boringHeaders, producer)
+        request.writeTo(self.transport)
+
+        self.assertNotIdentical(producer.consumer, None)
+        self.assertIdentical(self.transport.producer, producer)
+        self.assertTrue(self.transport.streaming)
+
+        self.assertEqual(
+            self.transport.value(),
+            "POST /bar HTTP/1.1\r\n"
+            "Connection: close\r\n"
+            "Transfer-Encoding: chunked\r\n"
+            "Host: example.com\r\n"
+            "\r\n")
+        self.transport.clear()
+
+        producer.consumer.write('x' * 3)
+        producer.consumer.write('y' * 15)
+        producer.finished.callback(None)
+        self.assertIdentical(self.transport.producer, None)
+        self.assertEqual(
+            self.transport.value(),
+            "3\r\n"
+            "xxx\r\n"
+            "f\r\n"
+            "yyyyyyyyyyyyyyy\r\n"
+            "0\r\n"
+            "\r\n")
+
+
+    def test_sendChunkedRequestBodyWithError(self):
+        """
+        If L{Request} is created with a C{bodyProducer} without a known length
+        and the L{Deferred} returned from its C{startProducing} method fires
+        with a L{Failure}, the L{Deferred} returned by L{Request.writeTo} fires
+        with that L{Failure} and the body producer is unregistered from the
+        transport.  The final zero-length chunk is not written to the
+        transport.
+        """
+        producer = StringProducer(UNKNOWN_LENGTH)
+        request = Request('POST', '/bar', _boringHeaders, producer)
+        writeDeferred = request.writeTo(self.transport)
+        self.transport.clear()
+        producer.finished.errback(ArbitraryException())
+        def cbFailed(ignored):
+            self.assertEqual(self.transport.value(), "")
+            self.assertIdentical(self.transport.producer, None)
+        d = self.assertFailure(writeDeferred, ArbitraryException)
+        d.addCallback(cbFailed)
+        return d
+
+
+    def test_sendRequestBodyWithLength(self):
+        """
+        If L{Request} is created with a C{bodyProducer} with a known length,
+        that length is sent as the value for the I{Content-Length} header and
+        chunked encoding is not used.
+        """
+        producer = StringProducer(3)
+        request = Request('POST', '/bar', _boringHeaders, producer)
+        request.writeTo(self.transport)
+
+        self.assertNotIdentical(producer.consumer, None)
+        self.assertIdentical(self.transport.producer, producer)
+        self.assertTrue(self.transport.streaming)
+
+        self.assertEqual(
+            self.transport.value(),
+            "POST /bar HTTP/1.1\r\n"
+            "Connection: close\r\n"
+            "Content-Length: 3\r\n"
+            "Host: example.com\r\n"
+            "\r\n")
+        self.transport.clear()
+
+        producer.consumer.write('abc')
+        producer.finished.callback(None)
+        self.assertIdentical(self.transport.producer, None)
+        self.assertEqual(self.transport.value(), "abc")
+
+
+    def test_sendRequestBodyWithTooFewBytes(self):
+        """
+        If L{Request} is created with a C{bodyProducer} with a known length and
+        the producer does not produce that many bytes, the L{Deferred} returned
+        by L{Request.writeTo} fires with a L{Failure} wrapping a
+        L{WrongBodyLength} exception.
+        """
+        producer = StringProducer(3)
+        request = Request('POST', '/bar', _boringHeaders, producer)
+        writeDeferred = request.writeTo(self.transport)
+        producer.consumer.write('ab')
+        producer.finished.callback(None)
+        self.assertIdentical(self.transport.producer, None)
+        return self.assertFailure(writeDeferred, WrongBodyLength)
+
+
+    def _sendRequestBodyWithTooManyBytesTest(self, finisher):
+        """
+        Verify that when too many bytes have been written by a body producer
+        and then the body producer's C{startProducing} L{Deferred} fires that
+        the producer is unregistered from the transport and that the
+        L{Deferred} returned from L{Request.writeTo} is fired with a L{Failure}
+        wrapping a L{WrongBodyLength}.
+
+        @param finisher: A callable which will be invoked with the body
+            producer after too many bytes have been written to the transport.
+            It should fire the startProducing Deferred somehow.
+        """
+        producer = StringProducer(3)
+        request = Request('POST', '/bar', _boringHeaders, producer)
+        writeDeferred = request.writeTo(self.transport)
+
+        producer.consumer.write('ab')
+
+        # The producer hasn't misbehaved yet, so it shouldn't have been
+        # stopped.
+        self.assertFalse(producer.stopped)
+
+        producer.consumer.write('cd')
+
+        # Now the producer *has* misbehaved, so we should have tried to
+        # make it stop.
+        self.assertTrue(producer.stopped)
+
+        # The transport should have had the producer unregistered from it as
+        # well.
+        self.assertIdentical(self.transport.producer, None)
+
+        def cbFailed(exc):
+            # The "cd" should not have been written to the transport because
+            # the request can now locally be recognized to be invalid.  If we
+            # had written the extra bytes, the server could have decided to
+            # start processing the request, which would be bad since we're
+            # going to indicate failure locally.
+            self.assertEqual(
+                self.transport.value(),
+                "POST /bar HTTP/1.1\r\n"
+                "Connection: close\r\n"
+                "Content-Length: 3\r\n"
+                "Host: example.com\r\n"
+                "\r\n"
+                "ab")
+            self.transport.clear()
+
+            # Subsequent writes should be ignored, as should firing the
+            # Deferred returned from startProducing.
+            self.assertRaises(ExcessWrite, producer.consumer.write, 'ef')
+
+            # Likewise, if the Deferred returned from startProducing fires,
+            # this should more or less be ignored (aside from possibly logging
+            # an error).
+            finisher(producer)
+
+            # There should have been nothing further written to the transport.
+            self.assertEqual(self.transport.value(), "")
+
+        d = self.assertFailure(writeDeferred, WrongBodyLength)
+        d.addCallback(cbFailed)
+        return d
+
+
+    def test_sendRequestBodyWithTooManyBytes(self):
+        """
+        If L{Request} is created with a C{bodyProducer} with a known length and
+        the producer tries to produce more than than many bytes, the
+        L{Deferred} returned by L{Request.writeTo} fires with a L{Failure}
+        wrapping a L{WrongBodyLength} exception.
+        """
+        def finisher(producer):
+            producer.finished.callback(None)
+        return self._sendRequestBodyWithTooManyBytesTest(finisher)
+
+
+    def test_sendRequestBodyErrorWithTooManyBytes(self):
+        """
+        If L{Request} is created with a C{bodyProducer} with a known length and
+        the producer tries to produce more than than many bytes, the
+        L{Deferred} returned by L{Request.writeTo} fires with a L{Failure}
+        wrapping a L{WrongBodyLength} exception.
+        """
+        def finisher(producer):
+            producer.finished.errback(ArbitraryException())
+            errors = self.flushLoggedErrors(ArbitraryException)
+            self.assertEqual(len(errors), 1)
+        return self._sendRequestBodyWithTooManyBytesTest(finisher)
+
+
+    def test_sendRequestBodyErrorWithConsumerError(self):
+        """
+        Though there should be no way for the internal C{finishedConsuming}
+        L{Deferred} in L{Request._writeToContentLength} to fire a L{Failure}
+        after the C{finishedProducing} L{Deferred} has fired, in case this does
+        happen, the error should be logged with a message about how there's
+        probably a bug in L{Request}.
+
+        This is a whitebox test.
+        """
+        producer = StringProducer(3)
+        request = Request('POST', '/bar', _boringHeaders, producer)
+        request.writeTo(self.transport)
+
+        finishedConsuming = producer.consumer._finished
+
+        producer.consumer.write('abc')
+        producer.finished.callback(None)
+
+        finishedConsuming.errback(ArbitraryException())
+        self.assertEqual(len(self.flushLoggedErrors(ArbitraryException)), 1)
+
+
+    def _sendRequestBodyFinishedEarlyThenTooManyBytes(self, finisher):
+        """
+        Verify that if the body producer fires its Deferred and then keeps
+        writing to the consumer that the extra writes are ignored and the
+        L{Deferred} returned by L{Request.writeTo} fires with a L{Failure}
+        wrapping the most appropriate exception type.
+        """
+        producer = StringProducer(3)
+        request = Request('POST', '/bar', _boringHeaders, producer)
+        writeDeferred = request.writeTo(self.transport)
+
+        producer.consumer.write('ab')
+        finisher(producer)
+        self.assertIdentical(self.transport.producer, None)
+        self.transport.clear()
+        self.assertRaises(ExcessWrite, producer.consumer.write, 'cd')
+        self.assertEqual(self.transport.value(), "")
+        return writeDeferred
+
+
+    def test_sendRequestBodyFinishedEarlyThenTooManyBytes(self):
+        """
+        If the request body producer indicates it is done by firing the
+        L{Deferred} returned from its C{startProducing} method but then goes on
+        to write too many bytes, the L{Deferred} returned by {Request.writeTo}
+        fires with a L{Failure} wrapping L{WrongBodyLength}.
+        """
+        def finisher(producer):
+            producer.finished.callback(None)
+        return self.assertFailure(
+            self._sendRequestBodyFinishedEarlyThenTooManyBytes(finisher),
+            WrongBodyLength)
+
+
+    def test_sendRequestBodyErroredEarlyThenTooManyBytes(self):
+        """
+        If the request body producer indicates an error by firing the
+        L{Deferred} returned from its C{startProducing} method but then goes on
+        to write too many bytes, the L{Deferred} returned by {Request.writeTo}
+        fires with that L{Failure} and L{WrongBodyLength} is logged.
+        """
+        def finisher(producer):
+            producer.finished.errback(ArbitraryException())
+        return self.assertFailure(
+            self._sendRequestBodyFinishedEarlyThenTooManyBytes(finisher),
+            ArbitraryException)
+
+
+    def test_sendChunkedRequestBodyFinishedThenWriteMore(self, _with=None):
+        """
+        If the request body producer with an unknown length tries to write
+        after firing the L{Deferred} returned by its C{startProducing} method,
+        the C{write} call raises an exception and does not write anything to
+        the underlying transport.
+        """
+        producer = StringProducer(UNKNOWN_LENGTH)
+        request = Request('POST', '/bar', _boringHeaders, producer)
+        writeDeferred = request.writeTo(self.transport)
+        producer.finished.callback(_with)
+        self.transport.clear()
+
+        self.assertRaises(ExcessWrite, producer.consumer.write, 'foo')
+        self.assertEqual(self.transport.value(), "")
+        return writeDeferred
+
+
+    def test_sendChunkedRequestBodyFinishedWithErrorThenWriteMore(self):
+        """
+        If the request body producer with an unknown length tries to write
+        after firing the L{Deferred} returned by its C{startProducing} method
+        with a L{Failure}, the C{write} call raises an exception and does not
+        write anything to the underlying transport.
+        """
+        d = self.test_sendChunkedRequestBodyFinishedThenWriteMore(
+            Failure(ArbitraryException()))
+        return self.assertFailure(d, ArbitraryException)
+
+
+    def test_sendRequestBodyWithError(self):
+        """
+        If the L{Deferred} returned from the C{startProducing} method of the
+        L{IBodyProducer} passed to L{Request} fires with a L{Failure}, the
+        L{Deferred} returned from L{Request.writeTo} fails with that
+        L{Failure}.
+        """
+        producer = StringProducer(5)
+        request = Request('POST', '/bar', _boringHeaders, producer)
+        writeDeferred = request.writeTo(self.transport)
+
+        # Sanity check - the producer should be registered with the underlying
+        # transport.
+        self.assertIdentical(self.transport.producer, producer)
+        self.assertTrue(self.transport.streaming)
+
+        producer.consumer.write('ab')
+        self.assertEqual(
+            self.transport.value(),
+            "POST /bar HTTP/1.1\r\n"
+            "Connection: close\r\n"
+            "Content-Length: 5\r\n"
+            "Host: example.com\r\n"
+            "\r\n"
+            "ab")
+
+        self.assertFalse(self.transport.disconnecting)
+        producer.finished.errback(Failure(ArbitraryException()))
+
+        # Disconnection is handled by a higher level.  Request should leave the
+        # transport alone in this case.
+        self.assertFalse(self.transport.disconnecting)
+
+        # Oh.  Except it should unregister the producer that it registered.
+        self.assertIdentical(self.transport.producer, None)
+
+        return self.assertFailure(writeDeferred, ArbitraryException)
+
+
+    def test_hostHeaderRequired(self):
+        """
+        L{Request.writeTo} raises L{BadHeaders} if there is not exactly one
+        I{Host} header and writes nothing to the given transport.
+        """
+        request = Request('GET', '/', Headers({}), None)
+        self.assertRaises(BadHeaders, request.writeTo, self.transport)
+        self.assertEqual(self.transport.value(), '')
+
+        request = Request('GET', '/', Headers({'Host': ['example.com', 'example.org']}), None)
+        self.assertRaises(BadHeaders, request.writeTo, self.transport)
+        self.assertEqual(self.transport.value(), '')
+
+
+    def test_stopWriting(self):
+        """
+        L{Request.stopWriting} calls its body producer's C{stopProducing}
+        method.
+        """
+        producer = StringProducer(3)
+        request = Request('GET', '/', _boringHeaders, producer)
+        request.writeTo(self.transport)
+        self.assertFalse(producer.stopped)
+        request.stopWriting()
+        self.assertTrue(producer.stopped)
+
+
+    def test_brokenStopProducing(self):
+        """
+        If the body producer's C{stopProducing} method raises an exception,
+        L{Request.stopWriting} logs it and does not re-raise it.
+        """
+        producer = StringProducer(3)
+        def brokenStopProducing():
+            raise ArbitraryException("stopProducing is busted")
+        producer.stopProducing = brokenStopProducing
+
+        request = Request('GET', '/', _boringHeaders, producer)
+        request.writeTo(self.transport)
+        request.stopWriting()
+        self.assertEqual(
+            len(self.flushLoggedErrors(ArbitraryException)), 1)
+
+
+
+class LengthEnforcingConsumerTests(TestCase):
+    """
+    Tests for L{LengthEnforcingConsumer}.
+    """
+    def setUp(self):
+        self.result = Deferred()
+        self.producer = StringProducer(10)
+        self.transport = StringTransport()
+        self.enforcer = LengthEnforcingConsumer(
+            self.producer, self.transport, self.result)
+
+
+    def test_write(self):
+        """
+        L{LengthEnforcingConsumer.write} calls the wrapped consumer's C{write}
+        method with the bytes it is passed as long as there are fewer of them
+        than the C{length} attribute indicates remain to be received.
+        """
+        self.enforcer.write('abc')
+        self.assertEqual(self.transport.value(), 'abc')
+        self.transport.clear()
+        self.enforcer.write('def')
+        self.assertEqual(self.transport.value(), 'def')
+
+
+    def test_finishedEarly(self):
+        """
+        L{LengthEnforcingConsumer._noMoreWritesExpected} raises
+        L{WrongBodyLength} if it is called before the indicated number of bytes
+        have been written.
+        """
+        self.enforcer.write('x' * 9)
+        self.assertRaises(WrongBodyLength, self.enforcer._noMoreWritesExpected)
+
+
+    def test_writeTooMany(self, _unregisterAfter=False):
+        """
+        If it is called with a total number of bytes exceeding the indicated
+        limit passed to L{LengthEnforcingConsumer.__init__},
+        L{LengthEnforcingConsumer.write} fires the L{Deferred} with a
+        L{Failure} wrapping a L{WrongBodyLength} and also calls the
+        C{stopProducing} method of the producer.
+        """
+        self.enforcer.write('x' * 10)
+        self.assertFalse(self.producer.stopped)
+        self.enforcer.write('x')
+        self.assertTrue(self.producer.stopped)
+        if _unregisterAfter:
+            self.enforcer._noMoreWritesExpected()
+        return self.assertFailure(self.result, WrongBodyLength)
+
+
+    def test_writeAfterNoMoreExpected(self):
+        """
+        If L{LengthEnforcingConsumer.write} is called after
+        L{LengthEnforcingConsumer._noMoreWritesExpected}, it calls the
+        producer's C{stopProducing} method and raises L{ExcessWrite}.
+        """
+        self.enforcer.write('x' * 10)
+        self.enforcer._noMoreWritesExpected()
+        self.assertFalse(self.producer.stopped)
+        self.assertRaises(ExcessWrite, self.enforcer.write, 'x')
+        self.assertTrue(self.producer.stopped)
+
+
+    def test_finishedLate(self):
+        """
+        L{LengthEnforcingConsumer._noMoreWritesExpected} does nothing (in
+        particular, it does not raise any exception) if called after too many
+        bytes have been passed to C{write}.
+        """
+        return self.test_writeTooMany(True)
+
+
+    def test_finished(self):
+        """
+        If L{LengthEnforcingConsumer._noMoreWritesExpected} is called after
+        the correct number of bytes have been written it returns C{None}.
+        """
+        self.enforcer.write('x' * 10)
+        self.assertIdentical(self.enforcer._noMoreWritesExpected(), None)
+
+
+    def test_stopProducingRaises(self):
+        """
+        If L{LengthEnforcingConsumer.write} calls the producer's
+        C{stopProducing} because too many bytes were written and the
+        C{stopProducing} method raises an exception, the exception is logged
+        and the L{LengthEnforcingConsumer} still errbacks the finished
+        L{Deferred}.
+        """
+        def brokenStopProducing():
+            StringProducer.stopProducing(self.producer)
+            raise ArbitraryException("stopProducing is busted")
+        self.producer.stopProducing = brokenStopProducing
+
+        def cbFinished(ignored):
+            self.assertEqual(
+                len(self.flushLoggedErrors(ArbitraryException)), 1)
+        d = self.test_writeTooMany()
+        d.addCallback(cbFinished)
+        return d
+
+
+
+class RequestBodyConsumerTests(TestCase):
+    """
+    Tests for L{ChunkedEncoder} which sits between an L{ITransport} and a
+    request/response body producer and chunked encodes everything written to
+    it.
+    """
+    def test_interface(self):
+        """
+        L{ChunkedEncoder} instances provide L{IConsumer}.
+        """
+        self.assertTrue(
+            verifyObject(IConsumer, ChunkedEncoder(StringTransport())))
+
+
+    def test_write(self):
+        """
+        L{ChunkedEncoder.write} writes to the transport the chunked encoded
+        form of the bytes passed to it.
+        """
+        transport = StringTransport()
+        encoder = ChunkedEncoder(transport)
+        encoder.write('foo')
+        self.assertEqual(transport.value(), '3\r\nfoo\r\n')
+        transport.clear()
+        encoder.write('x' * 16)
+        self.assertEqual(transport.value(), '10\r\n' + 'x' * 16 + '\r\n')
+
+
+    def test_producerRegistration(self):
+        """
+        L{ChunkedEncoder.registerProducer} registers the given streaming
+        producer with its transport and L{ChunkedEncoder.unregisterProducer}
+        writes a zero-length chunk to its transport and unregisters the
+        transport's producer.
+        """
+        transport = StringTransport()
+        producer = object()
+        encoder = ChunkedEncoder(transport)
+        encoder.registerProducer(producer, True)
+        self.assertIdentical(transport.producer, producer)
+        self.assertTrue(transport.streaming)
+        encoder.unregisterProducer()
+        self.assertIdentical(transport.producer, None)
+        self.assertEqual(transport.value(), '0\r\n\r\n')
+
+
+
+class TransportProxyProducerTests(TestCase):
+    """
+    Tests for L{TransportProxyProducer} which proxies the L{IPushProducer}
+    interface of a transport.
+    """
+    def test_interface(self):
+        """
+        L{TransportProxyProducer} instances provide L{IPushProducer}.
+        """
+        self.assertTrue(
+            verifyObject(IPushProducer, TransportProxyProducer(None)))
+
+
+    def test_stopProxyingUnreferencesProducer(self):
+        """
+        L{TransportProxyProducer._stopProxying} drops the reference to the
+        wrapped L{IPushProducer} provider.
+        """
+        transport = StringTransport()
+        proxy = TransportProxyProducer(transport)
+        self.assertIdentical(proxy._producer, transport)
+        proxy._stopProxying()
+        self.assertIdentical(proxy._producer, None)
+
+
+    def test_resumeProducing(self):
+        """
+        L{TransportProxyProducer.resumeProducing} calls the wrapped
+        transport's C{resumeProducing} method unless told to stop proxying.
+        """
+        transport = StringTransport()
+        transport.pauseProducing()
+
+        proxy = TransportProxyProducer(transport)
+        # The transport should still be paused.
+        self.assertEqual(transport.producerState, 'paused')
+        proxy.resumeProducing()
+        # The transport should now be resumed.
+        self.assertEqual(transport.producerState, 'producing')
+
+        transport.pauseProducing()
+        proxy._stopProxying()
+
+        # The proxy should no longer do anything to the transport.
+        proxy.resumeProducing()
+        self.assertEqual(transport.producerState, 'paused')
+
+
+    def test_pauseProducing(self):
+        """
+        L{TransportProxyProducer.pauseProducing} calls the wrapped transport's
+        C{pauseProducing} method unless told to stop proxying.
+        """
+        transport = StringTransport()
+
+        proxy = TransportProxyProducer(transport)
+        # The transport should still be producing.
+        self.assertEqual(transport.producerState, 'producing')
+        proxy.pauseProducing()
+        # The transport should now be paused.
+        self.assertEqual(transport.producerState, 'paused')
+
+        transport.resumeProducing()
+        proxy._stopProxying()
+
+        # The proxy should no longer do anything to the transport.
+        proxy.pauseProducing()
+        self.assertEqual(transport.producerState, 'producing')
+
+
+    def test_stopProducing(self):
+        """
+        L{TransportProxyProducer.stopProducing} calls the wrapped transport's
+        C{stopProducing} method unless told to stop proxying.
+        """
+        transport = StringTransport()
+        proxy = TransportProxyProducer(transport)
+        # The transport should still be producing.
+        self.assertEqual(transport.producerState, 'producing')
+        proxy.stopProducing()
+        # The transport should now be stopped.
+        self.assertEqual(transport.producerState, 'stopped')
+
+        transport = StringTransport()
+        proxy = TransportProxyProducer(transport)
+        proxy._stopProxying()
+        proxy.stopProducing()
+        # The transport should not have been stopped.
+        self.assertEqual(transport.producerState, 'producing')
+
+
+
+class ResponseTests(TestCase):
+    """
+    Tests for L{Response}.
+    """
+
+    def test_verifyInterface(self):
+        """
+        L{Response} instances provide L{IResponse}.
+        """
+        response = justTransportResponse(StringTransport())
+        self.assertTrue(verifyObject(IResponse, response))
+
+
+    def test_makeConnection(self):
+        """
+        The L{IProtocol} provider passed to L{Response.deliverBody} has its
+        C{makeConnection} method called with an L{IPushProducer} provider
+        hooked up to the response as an argument.
+        """
+        producers = []
+        transport = StringTransport()
+        class SomeProtocol(Protocol):
+            def makeConnection(self, producer):
+                producers.append(producer)
+
+        consumer = SomeProtocol()
+        response = justTransportResponse(transport)
+        response.deliverBody(consumer)
+        [theProducer] = producers
+        theProducer.pauseProducing()
+        self.assertEqual(transport.producerState, 'paused')
+        theProducer.resumeProducing()
+        self.assertEqual(transport.producerState, 'producing')
+
+
+    def test_dataReceived(self):
+        """
+        The L{IProtocol} provider passed to L{Response.deliverBody} has its
+        C{dataReceived} method called with bytes received as part of the
+        response body.
+        """
+        bytes = []
+        class ListConsumer(Protocol):
+            def dataReceived(self, data):
+                bytes.append(data)
+
+
+        consumer = ListConsumer()
+        response = justTransportResponse(StringTransport())
+        response.deliverBody(consumer)
+
+        response._bodyDataReceived('foo')
+        self.assertEqual(bytes, ['foo'])
+
+
+    def test_connectionLost(self):
+        """
+        The L{IProtocol} provider passed to L{Response.deliverBody} has its
+        C{connectionLost} method called with a L{Failure} wrapping
+        L{ResponseDone} when the response's C{_bodyDataFinished} method is
+        called.
+        """
+        lost = []
+        class ListConsumer(Protocol):
+            def connectionLost(self, reason):
+                lost.append(reason)
+
+        consumer = ListConsumer()
+        response = justTransportResponse(StringTransport())
+        response.deliverBody(consumer)
+
+        response._bodyDataFinished()
+        lost[0].trap(ResponseDone)
+        self.assertEqual(len(lost), 1)
+
+        # The protocol reference should be dropped, too, to facilitate GC or
+        # whatever.
+        self.assertIdentical(response._bodyProtocol, None)
+
+
+    def test_bufferEarlyData(self):
+        """
+        If data is delivered to the L{Response} before a protocol is registered
+        with C{deliverBody}, that data is buffered until the protocol is
+        registered and then is delivered.
+        """
+        bytes = []
+        class ListConsumer(Protocol):
+            def dataReceived(self, data):
+                bytes.append(data)
+
+        protocol = ListConsumer()
+        response = justTransportResponse(StringTransport())
+        response._bodyDataReceived('foo')
+        response._bodyDataReceived('bar')
+        response.deliverBody(protocol)
+        response._bodyDataReceived('baz')
+        self.assertEqual(bytes, ['foo', 'bar', 'baz'])
+        # Make sure the implementation-detail-byte-buffer is cleared because
+        # not clearing it wastes memory.
+        self.assertIdentical(response._bodyBuffer, None)
+
+
+    def test_multipleStartProducingFails(self):
+        """
+        L{Response.deliverBody} raises L{RuntimeError} if called more than
+        once.
+        """
+        response = justTransportResponse(StringTransport())
+        response.deliverBody(Protocol())
+        self.assertRaises(RuntimeError, response.deliverBody, Protocol())
+
+
+    def test_startProducingAfterFinishedFails(self):
+        """
+        L{Response.deliverBody} raises L{RuntimeError} if called after
+        L{Response._bodyDataFinished}.
+        """
+        response = justTransportResponse(StringTransport())
+        response.deliverBody(Protocol())
+        response._bodyDataFinished()
+        self.assertRaises(RuntimeError, response.deliverBody, Protocol())
+
+
+    def test_bodyDataReceivedAfterFinishedFails(self):
+        """
+        L{Response._bodyDataReceived} raises L{RuntimeError} if called after
+        L{Response._bodyDataFinished} but before L{Response.deliverBody}.
+        """
+        response = justTransportResponse(StringTransport())
+        response._bodyDataFinished()
+        self.assertRaises(RuntimeError, response._bodyDataReceived, 'foo')
+
+
+    def test_bodyDataReceivedAfterDeliveryFails(self):
+        """
+        L{Response._bodyDataReceived} raises L{RuntimeError} if called after
+        L{Response._bodyDataFinished} and after L{Response.deliverBody}.
+        """
+        response = justTransportResponse(StringTransport())
+        response._bodyDataFinished()
+        response.deliverBody(Protocol())
+        self.assertRaises(RuntimeError, response._bodyDataReceived, 'foo')
+
+
+    def test_bodyDataFinishedAfterFinishedFails(self):
+        """
+        L{Response._bodyDataFinished} raises L{RuntimeError} if called more
+        than once.
+        """
+        response = justTransportResponse(StringTransport())
+        response._bodyDataFinished()
+        self.assertRaises(RuntimeError, response._bodyDataFinished)
+
+
+    def test_bodyDataFinishedAfterDeliveryFails(self):
+        """
+        L{Response._bodyDataFinished} raises L{RuntimeError} if called after
+        the body has been delivered.
+        """
+        response = justTransportResponse(StringTransport())
+        response._bodyDataFinished()
+        response.deliverBody(Protocol())
+        self.assertRaises(RuntimeError, response._bodyDataFinished)
+
+
+    def test_transportResumed(self):
+        """
+        L{Response.deliverBody} resumes the HTTP connection's transport
+        before passing it to the transport's C{makeConnection} method.
+        """
+        transportState = []
+        class ListConsumer(Protocol):
+            def makeConnection(self, transport):
+                transportState.append(transport.producerState)
+
+        transport = StringTransport()
+        transport.pauseProducing()
+        protocol = ListConsumer()
+        response = justTransportResponse(transport)
+        self.assertEqual(transport.producerState, 'paused')
+        response.deliverBody(protocol)
+        self.assertEqual(transportState, ['producing'])
+
+
+    def test_bodyDataFinishedBeforeStartProducing(self):
+        """
+        If the entire body is delivered to the L{Response} before the
+        response's C{deliverBody} method is called, the protocol passed to
+        C{deliverBody} is immediately given the body data and then
+        disconnected.
+        """
+        transport = StringTransport()
+        response = justTransportResponse(transport)
+        response._bodyDataReceived('foo')
+        response._bodyDataReceived('bar')
+        response._bodyDataFinished()
+
+        protocol = AccumulatingProtocol()
+        response.deliverBody(protocol)
+        self.assertEqual(protocol.data, 'foobar')
+        protocol.closedReason.trap(ResponseDone)
+
+
+    def test_finishedWithErrorWhenConnected(self):
+        """
+        The L{Failure} passed to L{Response._bodyDataFinished} when the response
+        is in the I{connected} state is passed to the C{connectionLost} method
+        of the L{IProtocol} provider passed to the L{Response}'s
+        C{deliverBody} method.
+        """
+        transport = StringTransport()
+        response = justTransportResponse(transport)
+
+        protocol = AccumulatingProtocol()
+        response.deliverBody(protocol)
+
+        # Sanity check - this test is for the connected state
+        self.assertEqual(response._state, 'CONNECTED')
+        response._bodyDataFinished(Failure(ArbitraryException()))
+
+        protocol.closedReason.trap(ArbitraryException)
+
+
+    def test_finishedWithErrorWhenInitial(self):
+        """
+        The L{Failure} passed to L{Response._bodyDataFinished} when the response
+        is in the I{initial} state is passed to the C{connectionLost} method of
+        the L{IProtocol} provider passed to the L{Response}'s C{deliverBody}
+        method.
+        """
+        transport = StringTransport()
+        response = justTransportResponse(transport)
+
+        # Sanity check - this test is for the initial state
+        self.assertEqual(response._state, 'INITIAL')
+        response._bodyDataFinished(Failure(ArbitraryException()))
+
+        protocol = AccumulatingProtocol()
+        response.deliverBody(protocol)
+
+        protocol.closedReason.trap(ArbitraryException)
diff --git a/ThirdParty/Twisted/twisted/web/test/test_proxy.py b/ThirdParty/Twisted/twisted/web/test/test_proxy.py
new file mode 100644
index 0000000..4452fcb
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_proxy.py
@@ -0,0 +1,544 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test for L{twisted.web.proxy}.
+"""
+
+from twisted.trial.unittest import TestCase
+from twisted.test.proto_helpers import StringTransportWithDisconnection
+from twisted.test.proto_helpers import MemoryReactor
+
+from twisted.web.resource import Resource
+from twisted.web.server import Site
+from twisted.web.proxy import ReverseProxyResource, ProxyClientFactory
+from twisted.web.proxy import ProxyClient, ProxyRequest, ReverseProxyRequest
+from twisted.web.test.test_web import DummyRequest
+
+
+class ReverseProxyResourceTestCase(TestCase):
+    """
+    Tests for L{ReverseProxyResource}.
+    """
+
+    def _testRender(self, uri, expectedURI):
+        """
+        Check that a request pointing at C{uri} produce a new proxy connection,
+        with the path of this request pointing at C{expectedURI}.
+        """
+        root = Resource()
+        reactor = MemoryReactor()
+        resource = ReverseProxyResource("127.0.0.1", 1234, "/path", reactor)
+        root.putChild('index', resource)
+        site = Site(root)
+
+        transport = StringTransportWithDisconnection()
+        channel = site.buildProtocol(None)
+        channel.makeConnection(transport)
+        # Clear the timeout if the tests failed
+        self.addCleanup(channel.connectionLost, None)
+
+        channel.dataReceived("GET %s HTTP/1.1\r\nAccept: text/html\r\n\r\n" %
+                             (uri,))
+
+        # Check that one connection has been created, to the good host/port
+        self.assertEqual(len(reactor.tcpClients), 1)
+        self.assertEqual(reactor.tcpClients[0][0], "127.0.0.1")
+        self.assertEqual(reactor.tcpClients[0][1], 1234)
+
+        # Check the factory passed to the connect, and its given path
+        factory = reactor.tcpClients[0][2]
+        self.assertIsInstance(factory, ProxyClientFactory)
+        self.assertEqual(factory.rest, expectedURI)
+        self.assertEqual(factory.headers["host"], "127.0.0.1:1234")
+
+
+    def test_render(self):
+        """
+        Test that L{ReverseProxyResource.render} initiates a connection to the
+        given server with a L{ProxyClientFactory} as parameter.
+        """
+        return self._testRender("/index", "/path")
+
+
+    def test_renderWithQuery(self):
+        """
+        Test that L{ReverseProxyResource.render} passes query parameters to the
+        created factory.
+        """
+        return self._testRender("/index?foo=bar", "/path?foo=bar")
+
+
+    def test_getChild(self):
+        """
+        The L{ReverseProxyResource.getChild} method should return a resource
+        instance with the same class as the originating resource, forward
+        port, host, and reactor values, and update the path value with the
+        value passed.
+        """
+        reactor = MemoryReactor()
+        resource = ReverseProxyResource("127.0.0.1", 1234, "/path", reactor)
+        child = resource.getChild('foo', None)
+        # The child should keep the same class
+        self.assertIsInstance(child, ReverseProxyResource)
+        self.assertEqual(child.path, "/path/foo")
+        self.assertEqual(child.port, 1234)
+        self.assertEqual(child.host, "127.0.0.1")
+        self.assertIdentical(child.reactor, resource.reactor)
+
+
+    def test_getChildWithSpecial(self):
+        """
+        The L{ReverseProxyResource} return by C{getChild} has a path which has
+        already been quoted.
+        """
+        resource = ReverseProxyResource("127.0.0.1", 1234, "/path")
+        child = resource.getChild(' /%', None)
+        self.assertEqual(child.path, "/path/%20%2F%25")
+
+
+
+class DummyChannel(object):
+    """
+    A dummy HTTP channel, that does nothing but holds a transport and saves
+    connection lost.
+
+    @ivar transport: the transport used by the client.
+    @ivar lostReason: the reason saved at connection lost.
+    """
+
+    def __init__(self, transport):
+        """
+        Hold a reference to the transport.
+        """
+        self.transport = transport
+        self.lostReason = None
+
+
+    def connectionLost(self, reason):
+        """
+        Keep track of the connection lost reason.
+        """
+        self.lostReason = reason
+
+
+
+class ProxyClientTestCase(TestCase):
+    """
+    Tests for L{ProxyClient}.
+    """
+
+    def _parseOutHeaders(self, content):
+        """
+        Parse the headers out of some web content.
+
+        @param content: Bytes received from a web server.
+        @return: A tuple of (requestLine, headers, body). C{headers} is a dict
+            of headers, C{requestLine} is the first line (e.g. "POST /foo ...")
+            and C{body} is whatever is left.
+        """
+        headers, body = content.split('\r\n\r\n')
+        headers = headers.split('\r\n')
+        requestLine = headers.pop(0)
+        return (
+            requestLine, dict(header.split(': ') for header in headers), body)
+
+
+    def makeRequest(self, path):
+        """
+        Make a dummy request object for the URL path.
+
+        @param path: A URL path, beginning with a slash.
+        @return: A L{DummyRequest}.
+        """
+        return DummyRequest(path)
+
+
+    def makeProxyClient(self, request, method="GET", headers=None,
+                        requestBody=""):
+        """
+        Make a L{ProxyClient} object used for testing.
+
+        @param request: The request to use.
+        @param method: The HTTP method to use, GET by default.
+        @param headers: The HTTP headers to use expressed as a dict. If not
+            provided, defaults to {'accept': 'text/html'}.
+        @param requestBody: The body of the request. Defaults to the empty
+            string.
+        @return: A L{ProxyClient}
+        """
+        if headers is None:
+            headers = {"accept": "text/html"}
+        path = '/' + request.postpath
+        return ProxyClient(
+            method, path, 'HTTP/1.0', headers, requestBody, request)
+
+
+    def connectProxy(self, proxyClient):
+        """
+        Connect a proxy client to a L{StringTransportWithDisconnection}.
+
+        @param proxyClient: A L{ProxyClient}.
+        @return: The L{StringTransportWithDisconnection}.
+        """
+        clientTransport = StringTransportWithDisconnection()
+        clientTransport.protocol = proxyClient
+        proxyClient.makeConnection(clientTransport)
+        return clientTransport
+
+
+    def assertForwardsHeaders(self, proxyClient, requestLine, headers):
+        """
+        Assert that C{proxyClient} sends C{headers} when it connects.
+
+        @param proxyClient: A L{ProxyClient}.
+        @param requestLine: The request line we expect to be sent.
+        @param headers: A dict of headers we expect to be sent.
+        @return: If the assertion is successful, return the request body as
+            bytes.
+        """
+        self.connectProxy(proxyClient)
+        requestContent = proxyClient.transport.value()
+        receivedLine, receivedHeaders, body = self._parseOutHeaders(
+            requestContent)
+        self.assertEqual(receivedLine, requestLine)
+        self.assertEqual(receivedHeaders, headers)
+        return body
+
+
+    def makeResponseBytes(self, code, message, headers, body):
+        lines = ["HTTP/1.0 %d %s" % (code, message)]
+        for header, values in headers:
+            for value in values:
+                lines.append("%s: %s" % (header, value))
+        lines.extend(['', body])
+        return '\r\n'.join(lines)
+
+
+    def assertForwardsResponse(self, request, code, message, headers, body):
+        """
+        Assert that C{request} has forwarded a response from the server.
+
+        @param request: A L{DummyRequest}.
+        @param code: The expected HTTP response code.
+        @param message: The expected HTTP message.
+        @param headers: The expected HTTP headers.
+        @param body: The expected response body.
+        """
+        self.assertEqual(request.responseCode, code)
+        self.assertEqual(request.responseMessage, message)
+        receivedHeaders = list(request.responseHeaders.getAllRawHeaders())
+        receivedHeaders.sort()
+        expectedHeaders = headers[:]
+        expectedHeaders.sort()
+        self.assertEqual(receivedHeaders, expectedHeaders)
+        self.assertEqual(''.join(request.written), body)
+
+
+    def _testDataForward(self, code, message, headers, body, method="GET",
+                         requestBody="", loseConnection=True):
+        """
+        Build a fake proxy connection, and send C{data} over it, checking that
+        it's forwarded to the originating request.
+        """
+        request = self.makeRequest('foo')
+        client = self.makeProxyClient(
+            request, method, {'accept': 'text/html'}, requestBody)
+
+        receivedBody = self.assertForwardsHeaders(
+            client, '%s /foo HTTP/1.0' % (method,),
+            {'connection': 'close', 'accept': 'text/html'})
+
+        self.assertEqual(receivedBody, requestBody)
+
+        # Fake an answer
+        client.dataReceived(
+            self.makeResponseBytes(code, message, headers, body))
+
+        # Check that the response data has been forwarded back to the original
+        # requester.
+        self.assertForwardsResponse(request, code, message, headers, body)
+
+        # Check that when the response is done, the request is finished.
+        if loseConnection:
+            client.transport.loseConnection()
+
+        # Even if we didn't call loseConnection, the transport should be
+        # disconnected.  This lets us not rely on the server to close our
+        # sockets for us.
+        self.assertFalse(client.transport.connected)
+        self.assertEqual(request.finished, 1)
+
+
+    def test_forward(self):
+        """
+        When connected to the server, L{ProxyClient} should send the saved
+        request, with modifications of the headers, and then forward the result
+        to the parent request.
+        """
+        return self._testDataForward(
+            200, "OK", [("Foo", ["bar", "baz"])], "Some data\r\n")
+
+
+    def test_postData(self):
+        """
+        Try to post content in the request, and check that the proxy client
+        forward the body of the request.
+        """
+        return self._testDataForward(
+            200, "OK", [("Foo", ["bar"])], "Some data\r\n", "POST", "Some content")
+
+
+    def test_statusWithMessage(self):
+        """
+        If the response contains a status with a message, it should be
+        forwarded to the parent request with all the information.
+        """
+        return self._testDataForward(
+            404, "Not Found", [], "")
+
+
+    def test_contentLength(self):
+        """
+        If the response contains a I{Content-Length} header, the inbound
+        request object should still only have C{finish} called on it once.
+        """
+        data = "foo bar baz"
+        return self._testDataForward(
+            200, "OK", [("Content-Length", [str(len(data))])], data)
+
+
+    def test_losesConnection(self):
+        """
+        If the response contains a I{Content-Length} header, the outgoing
+        connection is closed when all response body data has been received.
+        """
+        data = "foo bar baz"
+        return self._testDataForward(
+            200, "OK", [("Content-Length", [str(len(data))])], data,
+            loseConnection=False)
+
+
+    def test_headersCleanups(self):
+        """
+        The headers given at initialization should be modified:
+        B{proxy-connection} should be removed if present, and B{connection}
+        should be added.
+        """
+        client = ProxyClient('GET', '/foo', 'HTTP/1.0',
+                {"accept": "text/html", "proxy-connection": "foo"}, '', None)
+        self.assertEqual(client.headers,
+                {"accept": "text/html", "connection": "close"})
+
+
+    def test_keepaliveNotForwarded(self):
+        """
+        The proxy doesn't really know what to do with keepalive things from
+        the remote server, so we stomp over any keepalive header we get from
+        the client.
+        """
+        headers = {
+            "accept": "text/html",
+            'keep-alive': '300',
+            'connection': 'keep-alive',
+            }
+        expectedHeaders = headers.copy()
+        expectedHeaders['connection'] = 'close'
+        del expectedHeaders['keep-alive']
+        client = ProxyClient('GET', '/foo', 'HTTP/1.0', headers, '', None)
+        self.assertForwardsHeaders(
+            client, 'GET /foo HTTP/1.0', expectedHeaders)
+
+
+    def test_defaultHeadersOverridden(self):
+        """
+        L{server.Request} within the proxy sets certain response headers by
+        default. When we get these headers back from the remote server, the
+        defaults are overridden rather than simply appended.
+        """
+        request = self.makeRequest('foo')
+        request.responseHeaders.setRawHeaders('server', ['old-bar'])
+        request.responseHeaders.setRawHeaders('date', ['old-baz'])
+        request.responseHeaders.setRawHeaders('content-type', ["old/qux"])
+        client = self.makeProxyClient(request, headers={'accept': 'text/html'})
+        self.connectProxy(client)
+        headers = {
+            'Server': ['bar'],
+            'Date': ['2010-01-01'],
+            'Content-Type': ['application/x-baz'],
+            }
+        client.dataReceived(
+            self.makeResponseBytes(200, "OK", headers.items(), ''))
+        self.assertForwardsResponse(
+            request, 200, 'OK', headers.items(), '')
+
+
+
+class ProxyClientFactoryTestCase(TestCase):
+    """
+    Tests for L{ProxyClientFactory}.
+    """
+
+    def test_connectionFailed(self):
+        """
+        Check that L{ProxyClientFactory.clientConnectionFailed} produces
+        a B{501} response to the parent request.
+        """
+        request = DummyRequest(['foo'])
+        factory = ProxyClientFactory('GET', '/foo', 'HTTP/1.0',
+                                     {"accept": "text/html"}, '', request)
+
+        factory.clientConnectionFailed(None, None)
+        self.assertEqual(request.responseCode, 501)
+        self.assertEqual(request.responseMessage, "Gateway error")
+        self.assertEqual(
+            list(request.responseHeaders.getAllRawHeaders()),
+            [("Content-Type", ["text/html"])])
+        self.assertEqual(
+            ''.join(request.written),
+            "<H1>Could not connect</H1>")
+        self.assertEqual(request.finished, 1)
+
+
+    def test_buildProtocol(self):
+        """
+        L{ProxyClientFactory.buildProtocol} should produce a L{ProxyClient}
+        with the same values of attributes (with updates on the headers).
+        """
+        factory = ProxyClientFactory('GET', '/foo', 'HTTP/1.0',
+                                     {"accept": "text/html"}, 'Some data',
+                                     None)
+        proto = factory.buildProtocol(None)
+        self.assertIsInstance(proto, ProxyClient)
+        self.assertEqual(proto.command, 'GET')
+        self.assertEqual(proto.rest, '/foo')
+        self.assertEqual(proto.data, 'Some data')
+        self.assertEqual(proto.headers,
+                          {"accept": "text/html", "connection": "close"})
+
+
+
+class ProxyRequestTestCase(TestCase):
+    """
+    Tests for L{ProxyRequest}.
+    """
+
+    def _testProcess(self, uri, expectedURI, method="GET", data=""):
+        """
+        Build a request pointing at C{uri}, and check that a proxied request
+        is created, pointing a C{expectedURI}.
+        """
+        transport = StringTransportWithDisconnection()
+        channel = DummyChannel(transport)
+        reactor = MemoryReactor()
+        request = ProxyRequest(channel, False, reactor)
+        request.gotLength(len(data))
+        request.handleContentChunk(data)
+        request.requestReceived(method, 'http://example.com%s' % (uri,),
+                                'HTTP/1.0')
+
+        self.assertEqual(len(reactor.tcpClients), 1)
+        self.assertEqual(reactor.tcpClients[0][0], "example.com")
+        self.assertEqual(reactor.tcpClients[0][1], 80)
+
+        factory = reactor.tcpClients[0][2]
+        self.assertIsInstance(factory, ProxyClientFactory)
+        self.assertEqual(factory.command, method)
+        self.assertEqual(factory.version, 'HTTP/1.0')
+        self.assertEqual(factory.headers, {'host': 'example.com'})
+        self.assertEqual(factory.data, data)
+        self.assertEqual(factory.rest, expectedURI)
+        self.assertEqual(factory.father, request)
+
+
+    def test_process(self):
+        """
+        L{ProxyRequest.process} should create a connection to the given server,
+        with a L{ProxyClientFactory} as connection factory, with the correct
+        parameters:
+            - forward comment, version and data values
+            - update headers with the B{host} value
+            - remove the host from the URL
+            - pass the request as parent request
+        """
+        return self._testProcess("/foo/bar", "/foo/bar")
+
+
+    def test_processWithoutTrailingSlash(self):
+        """
+        If the incoming request doesn't contain a slash,
+        L{ProxyRequest.process} should add one when instantiating
+        L{ProxyClientFactory}.
+        """
+        return self._testProcess("", "/")
+
+
+    def test_processWithData(self):
+        """
+        L{ProxyRequest.process} should be able to retrieve request body and
+        to forward it.
+        """
+        return self._testProcess(
+            "/foo/bar", "/foo/bar", "POST", "Some content")
+
+
+    def test_processWithPort(self):
+        """
+        Check that L{ProxyRequest.process} correctly parse port in the incoming
+        URL, and create a outgoing connection with this port.
+        """
+        transport = StringTransportWithDisconnection()
+        channel = DummyChannel(transport)
+        reactor = MemoryReactor()
+        request = ProxyRequest(channel, False, reactor)
+        request.gotLength(0)
+        request.requestReceived('GET', 'http://example.com:1234/foo/bar',
+                                'HTTP/1.0')
+
+        # That should create one connection, with the port parsed from the URL
+        self.assertEqual(len(reactor.tcpClients), 1)
+        self.assertEqual(reactor.tcpClients[0][0], "example.com")
+        self.assertEqual(reactor.tcpClients[0][1], 1234)
+
+
+
+class DummyFactory(object):
+    """
+    A simple holder for C{host} and C{port} information.
+    """
+
+    def __init__(self, host, port):
+        self.host = host
+        self.port = port
+
+
+
+class ReverseProxyRequestTestCase(TestCase):
+    """
+    Tests for L{ReverseProxyRequest}.
+    """
+
+    def test_process(self):
+        """
+        L{ReverseProxyRequest.process} should create a connection to its
+        factory host/port, using a L{ProxyClientFactory} instantiated with the
+        correct parameters, and particulary set the B{host} header to the
+        factory host.
+        """
+        transport = StringTransportWithDisconnection()
+        channel = DummyChannel(transport)
+        reactor = MemoryReactor()
+        request = ReverseProxyRequest(channel, False, reactor)
+        request.factory = DummyFactory("example.com", 1234)
+        request.gotLength(0)
+        request.requestReceived('GET', '/foo/bar', 'HTTP/1.0')
+
+        # Check that one connection has been created, to the good host/port
+        self.assertEqual(len(reactor.tcpClients), 1)
+        self.assertEqual(reactor.tcpClients[0][0], "example.com")
+        self.assertEqual(reactor.tcpClients[0][1], 1234)
+
+        # Check the factory passed to the connect, and its headers
+        factory = reactor.tcpClients[0][2]
+        self.assertIsInstance(factory, ProxyClientFactory)
+        self.assertEqual(factory.headers, {'host': 'example.com'})
diff --git a/ThirdParty/Twisted/twisted/web/test/test_resource.py b/ThirdParty/Twisted/twisted/web/test/test_resource.py
new file mode 100644
index 0000000..38cbe59
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_resource.py
@@ -0,0 +1,261 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.web.resource}.
+"""
+
+from twisted.trial.unittest import TestCase
+
+from twisted.web.error import UnsupportedMethod
+from twisted.web.resource import (
+    NOT_FOUND, FORBIDDEN, Resource, ErrorPage, NoResource, ForbiddenResource,
+    getChildForRequest)
+from twisted.web.test.requesthelper import DummyRequest
+
+
+class ErrorPageTests(TestCase):
+    """
+    Tests for L{ErrorPage}, L{NoResource}, and L{ForbiddenResource}.
+    """
+
+    errorPage = ErrorPage
+    noResource = NoResource
+    forbiddenResource = ForbiddenResource
+
+    def test_getChild(self):
+        """
+        The C{getChild} method of L{ErrorPage} returns the L{ErrorPage} it is
+        called on.
+        """
+        page = self.errorPage(321, "foo", "bar")
+        self.assertIdentical(page.getChild(b"name", object()), page)
+
+
+    def _pageRenderingTest(self, page, code, brief, detail):
+        request = DummyRequest([b''])
+        template = (
+            u"\n"
+            u"<html>\n"
+            u"  <head><title>%s - %s</title></head>\n"
+            u"  <body>\n"
+            u"    <h1>%s</h1>\n"
+            u"    <p>%s</p>\n"
+            u"  </body>\n"
+            u"</html>\n")
+        expected = template % (code, brief, brief, detail)
+        self.assertEqual(
+            page.render(request), expected.encode('utf-8'))
+        self.assertEqual(request.responseCode, code)
+        self.assertEqual(
+            request.outgoingHeaders,
+            {b'content-type': b'text/html; charset=utf-8'})
+
+
+    def test_errorPageRendering(self):
+        """
+        L{ErrorPage.render} returns a C{bytes} describing the error defined by
+        the response code and message passed to L{ErrorPage.__init__}.  It also
+        uses that response code to set the response code on the L{Request}
+        passed in.
+        """
+        code = 321
+        brief = "brief description text"
+        detail = "much longer text might go here"
+        page = self.errorPage(code, brief, detail)
+        self._pageRenderingTest(page, code, brief, detail)
+
+
+    def test_noResourceRendering(self):
+        """
+        L{NoResource} sets the HTTP I{NOT FOUND} code.
+        """
+        detail = "long message"
+        page = self.noResource(detail)
+        self._pageRenderingTest(page, NOT_FOUND, "No Such Resource", detail)
+
+
+    def test_forbiddenResourceRendering(self):
+        """
+        L{ForbiddenResource} sets the HTTP I{FORBIDDEN} code.
+        """
+        detail = "longer message"
+        page = self.forbiddenResource(detail)
+        self._pageRenderingTest(page, FORBIDDEN, "Forbidden Resource", detail)
+
+
+
+class DynamicChild(Resource):
+    """
+    A L{Resource} to be created on the fly by L{DynamicChildren}.
+    """
+    def __init__(self, path, request):
+        Resource.__init__(self)
+        self.path = path
+        self.request = request
+
+
+
+class DynamicChildren(Resource):
+    """
+    A L{Resource} with dynamic children.
+    """
+    def getChild(self, path, request):
+        return DynamicChild(path, request)
+
+
+
+class BytesReturnedRenderable(Resource):
+    """
+    A L{Resource} with minimal capabilities to render a response.
+    """
+    def __init__(self, response):
+        """
+        @param response: A C{bytes} object giving the value to return from
+            C{render_GET}.
+        """
+        Resource.__init__(self)
+        self._response = response
+
+
+    def render_GET(self, request):
+        """
+        Render a response to a I{GET} request by returning a short byte string
+        to be written by the server.
+        """
+        return self._response
+
+
+
+class ImplicitAllowedMethods(Resource):
+    """
+    A L{Resource} which implicitly defines its allowed methods by defining
+    renderers to handle them.
+    """
+    def render_GET(self, request):
+        pass
+
+
+    def render_PUT(self, request):
+        pass
+
+
+
+class ResourceTests(TestCase):
+    """
+    Tests for L{Resource}.
+    """
+    def test_staticChildren(self):
+        """
+        L{Resource.putChild} adds a I{static} child to the resource.  That child
+        is returned from any call to L{Resource.getChildWithDefault} for the
+        child's path.
+        """
+        resource = Resource()
+        child = Resource()
+        sibling = Resource()
+        resource.putChild(b"foo", child)
+        resource.putChild(b"bar", sibling)
+        self.assertIdentical(
+            child, resource.getChildWithDefault(b"foo", DummyRequest([])))
+
+
+    def test_dynamicChildren(self):
+        """
+        L{Resource.getChildWithDefault} delegates to L{Resource.getChild} when
+        the requested path is not associated with any static child.
+        """
+        path = b"foo"
+        request = DummyRequest([])
+        resource = DynamicChildren()
+        child = resource.getChildWithDefault(path, request)
+        self.assertIsInstance(child, DynamicChild)
+        self.assertEqual(child.path, path)
+        self.assertIdentical(child.request, request)
+
+
+    def test_defaultHEAD(self):
+        """
+        When not otherwise overridden, L{Resource.render} treats a I{HEAD}
+        request as if it were a I{GET} request.
+        """
+        expected = b"insert response here"
+        request = DummyRequest([])
+        request.method = b'HEAD'
+        resource = BytesReturnedRenderable(expected)
+        self.assertEqual(expected, resource.render(request))
+
+
+    def test_explicitAllowedMethods(self):
+        """
+        The L{UnsupportedMethod} raised by L{Resource.render} for an unsupported
+        request method has a C{allowedMethods} attribute set to the value of the
+        C{allowedMethods} attribute of the L{Resource}, if it has one.
+        """
+        expected = [b'GET', b'HEAD', b'PUT']
+        resource = Resource()
+        resource.allowedMethods = expected
+        request = DummyRequest([])
+        request.method = b'FICTIONAL'
+        exc = self.assertRaises(UnsupportedMethod, resource.render, request)
+        self.assertEqual(set(expected), set(exc.allowedMethods))
+
+
+    def test_implicitAllowedMethods(self):
+        """
+        The L{UnsupportedMethod} raised by L{Resource.render} for an unsupported
+        request method has a C{allowedMethods} attribute set to a list of the
+        methods supported by the L{Resource}, as determined by the
+        I{render_}-prefixed methods which it defines, if C{allowedMethods} is
+        not explicitly defined by the L{Resource}.
+        """
+        expected = set([b'GET', b'HEAD', b'PUT'])
+        resource = ImplicitAllowedMethods()
+        request = DummyRequest([])
+        request.method = b'FICTIONAL'
+        exc = self.assertRaises(UnsupportedMethod, resource.render, request)
+        self.assertEqual(expected, set(exc.allowedMethods))
+
+
+
+
+class GetChildForRequestTests(TestCase):
+    """
+    Tests for L{getChildForRequest}.
+    """
+    def test_exhaustedPostPath(self):
+        """
+        L{getChildForRequest} returns whatever resource has been reached by the
+        time the request's C{postpath} is empty.
+        """
+        request = DummyRequest([])
+        resource = Resource()
+        result = getChildForRequest(resource, request)
+        self.assertIdentical(resource, result)
+
+
+    def test_leafResource(self):
+        """
+        L{getChildForRequest} returns the first resource it encounters with a
+        C{isLeaf} attribute set to C{True}.
+        """
+        request = DummyRequest([b"foo", b"bar"])
+        resource = Resource()
+        resource.isLeaf = True
+        result = getChildForRequest(resource, request)
+        self.assertIdentical(resource, result)
+
+
+    def test_postPathToPrePath(self):
+        """
+        As path segments from the request are traversed, they are taken from
+        C{postpath} and put into C{prepath}.
+        """
+        request = DummyRequest([b"foo", b"bar"])
+        root = Resource()
+        child = Resource()
+        child.isLeaf = True
+        root.putChild(b"foo", child)
+        self.assertIdentical(child, getChildForRequest(root, request))
+        self.assertEqual(request.prepath, [b"foo"])
+        self.assertEqual(request.postpath, [b"bar"])
diff --git a/ThirdParty/Twisted/twisted/web/test/test_script.py b/ThirdParty/Twisted/twisted/web/test/test_script.py
new file mode 100644
index 0000000..b4248bf
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_script.py
@@ -0,0 +1,70 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.web.script}.
+"""
+
+import os
+
+from twisted.trial.unittest import TestCase
+from twisted.web.http import NOT_FOUND
+from twisted.web.script import ResourceScriptDirectory, PythonScript
+from twisted.web.test._util import _render
+from twisted.web.test.test_web import DummyRequest
+
+
+class ResourceScriptDirectoryTests(TestCase):
+    """
+    Tests for L{ResourceScriptDirectory}.
+    """
+    def test_render(self):
+        """
+        L{ResourceScriptDirectory.render} sets the HTTP response code to I{NOT
+        FOUND}.
+        """
+        resource = ResourceScriptDirectory(self.mktemp())
+        request = DummyRequest([''])
+        d = _render(resource, request)
+        def cbRendered(ignored):
+            self.assertEqual(request.responseCode, NOT_FOUND)
+        d.addCallback(cbRendered)
+        return d
+
+
+    def test_notFoundChild(self):
+        """
+        L{ResourceScriptDirectory.getChild} returns a resource which renders an
+        response with the HTTP I{NOT FOUND} status code if the indicated child
+        does not exist as an entry in the directory used to initialized the
+        L{ResourceScriptDirectory}.
+        """
+        path = self.mktemp()
+        os.makedirs(path)
+        resource = ResourceScriptDirectory(path)
+        request = DummyRequest(['foo'])
+        child = resource.getChild("foo", request)
+        d = _render(child, request)
+        def cbRendered(ignored):
+            self.assertEqual(request.responseCode, NOT_FOUND)
+        d.addCallback(cbRendered)
+        return d
+
+
+
+class PythonScriptTests(TestCase):
+    """
+    Tests for L{PythonScript}.
+    """
+    def test_notFoundRender(self):
+        """
+        If the source file a L{PythonScript} is initialized with doesn't exist,
+        L{PythonScript.render} sets the HTTP response code to I{NOT FOUND}.
+        """
+        resource = PythonScript(self.mktemp(), None)
+        request = DummyRequest([''])
+        d = _render(resource, request)
+        def cbRendered(ignored):
+            self.assertEqual(request.responseCode, NOT_FOUND)
+        d.addCallback(cbRendered)
+        return d
diff --git a/ThirdParty/Twisted/twisted/web/test/test_soap.py b/ThirdParty/Twisted/twisted/web/test/test_soap.py
new file mode 100644
index 0000000..247282f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_soap.py
@@ -0,0 +1,114 @@
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+#
+
+"""Test SOAP support."""
+
+try:
+    import SOAPpy
+except ImportError:
+    SOAPpy = None
+    class SOAPPublisher: pass
+else:
+    from twisted.web import soap
+    SOAPPublisher = soap.SOAPPublisher
+
+from twisted.trial import unittest
+from twisted.web import server, error
+from twisted.internet import reactor, defer
+
+
+class Test(SOAPPublisher):
+
+    def soap_add(self, a, b):
+        return a + b
+
+    def soap_kwargs(self, a=1, b=2):
+        return a + b
+    soap_kwargs.useKeywords=True
+
+    def soap_triple(self, string, num):
+        return [string, num, None]
+
+    def soap_struct(self):
+        return SOAPpy.structType({"a": "c"})
+
+    def soap_defer(self, x):
+        return defer.succeed(x)
+
+    def soap_deferFail(self):
+        return defer.fail(ValueError())
+
+    def soap_fail(self):
+        raise RuntimeError
+
+    def soap_deferFault(self):
+        return defer.fail(ValueError())
+
+    def soap_complex(self):
+        return {"a": ["b", "c", 12, []], "D": "foo"}
+
+    def soap_dict(self, map, key):
+        return map[key]
+
+
+class SOAPTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.publisher = Test()
+        self.p = reactor.listenTCP(0, server.Site(self.publisher),
+                                   interface="127.0.0.1")
+        self.port = self.p.getHost().port
+
+    def tearDown(self):
+        return self.p.stopListening()
+
+    def proxy(self):
+        return soap.Proxy("http://127.0.0.1:%d/" % self.port)
+
+    def testResults(self):
+        inputOutput = [
+            ("add", (2, 3), 5),
+            ("defer", ("a",), "a"),
+            ("dict", ({"a": 1}, "a"), 1),
+            ("triple", ("a", 1), ["a", 1, None])]
+
+        dl = []
+        for meth, args, outp in inputOutput:
+            d = self.proxy().callRemote(meth, *args)
+            d.addCallback(self.assertEqual, outp)
+            dl.append(d)
+
+        # SOAPpy kinda blows.
+        d = self.proxy().callRemote('complex')
+        d.addCallback(lambda result: result._asdict())
+        d.addCallback(self.assertEqual, {"a": ["b", "c", 12, []], "D": "foo"})
+        dl.append(d)
+
+        # We now return to our regularly scheduled program, already in progress.
+        return defer.DeferredList(dl, fireOnOneErrback=True)
+
+    def testMethodNotFound(self):
+        """
+        Check that a non existing method return error 500.
+        """
+        d = self.proxy().callRemote('doesntexist')
+        self.assertFailure(d, error.Error)
+        def cb(err):
+            self.assertEqual(int(err.status), 500)
+        d.addCallback(cb)
+        return d
+
+    def testLookupFunction(self):
+        """
+        Test lookupFunction method on publisher, to see available remote
+        methods.
+        """
+        self.assertTrue(self.publisher.lookupFunction("add"))
+        self.assertTrue(self.publisher.lookupFunction("fail"))
+        self.assertFalse(self.publisher.lookupFunction("foobar"))
+
+if not SOAPpy:
+    SOAPTestCase.skip = "SOAPpy not installed"
+
diff --git a/ThirdParty/Twisted/twisted/web/test/test_stan.py b/ThirdParty/Twisted/twisted/web/test/test_stan.py
new file mode 100644
index 0000000..9aa65a6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_stan.py
@@ -0,0 +1,139 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.web._stan} portion of the L{twisted.web.template}
+implementation.
+"""
+
+from twisted.web.template import Comment, CDATA, CharRef, Tag
+from twisted.trial.unittest import TestCase
+
+def proto(*a, **kw):
+    """
+    Produce a new tag for testing.
+    """
+    return Tag('hello')(*a, **kw)
+
+
+class TestTag(TestCase):
+    """
+    Tests for L{Tag}.
+    """
+    def test_fillSlots(self):
+        """
+        L{Tag.fillSlots} returns self.
+        """
+        tag = proto()
+        self.assertIdentical(tag, tag.fillSlots(test='test'))
+
+
+    def test_cloneShallow(self):
+        """
+        L{Tag.clone} copies all attributes and children of a tag, including its
+        render attribute.  If the shallow flag is C{False}, that's where it
+        stops.
+        """
+        innerList = ["inner list"]
+        tag = proto("How are you", innerList,
+                    hello="world", render="aSampleMethod")
+        tag.fillSlots(foo='bar')
+        tag.filename = "foo/bar"
+        tag.lineNumber = 6
+        tag.columnNumber = 12
+        clone = tag.clone(deep=False)
+        self.assertEqual(clone.attributes['hello'], 'world')
+        self.assertNotIdentical(clone.attributes, tag.attributes)
+        self.assertEqual(clone.children, ["How are you", innerList])
+        self.assertNotIdentical(clone.children, tag.children)
+        self.assertIdentical(clone.children[1], innerList)
+        self.assertEqual(tag.slotData, clone.slotData)
+        self.assertNotIdentical(tag.slotData, clone.slotData)
+        self.assertEqual(clone.filename, "foo/bar")
+        self.assertEqual(clone.lineNumber, 6)
+        self.assertEqual(clone.columnNumber, 12)
+        self.assertEqual(clone.render, "aSampleMethod")
+
+
+    def test_cloneDeep(self):
+        """
+        L{Tag.clone} copies all attributes and children of a tag, including its
+        render attribute.  In its normal operating mode (where the deep flag is
+        C{True}, as is the default), it will clone all sub-lists and sub-tags.
+        """
+        innerTag = proto("inner")
+        innerList = ["inner list"]
+        tag = proto("How are you", innerTag, innerList,
+                    hello="world", render="aSampleMethod")
+        tag.fillSlots(foo='bar')
+        tag.filename = "foo/bar"
+        tag.lineNumber = 6
+        tag.columnNumber = 12
+        clone = tag.clone()
+        self.assertEqual(clone.attributes['hello'], 'world')
+        self.assertNotIdentical(clone.attributes, tag.attributes)
+        self.assertNotIdentical(clone.children, tag.children)
+        # sanity check
+        self.assertIdentical(tag.children[1], innerTag)
+        # clone should have sub-clone
+        self.assertNotIdentical(clone.children[1], innerTag)
+        # sanity check
+        self.assertIdentical(tag.children[2], innerList)
+        # clone should have sub-clone
+        self.assertNotIdentical(clone.children[2], innerList)
+        self.assertEqual(tag.slotData, clone.slotData)
+        self.assertNotIdentical(tag.slotData, clone.slotData)
+        self.assertEqual(clone.filename, "foo/bar")
+        self.assertEqual(clone.lineNumber, 6)
+        self.assertEqual(clone.columnNumber, 12)
+        self.assertEqual(clone.render, "aSampleMethod")
+
+
+    def test_clear(self):
+        """
+        L{Tag.clear} removes all children from a tag, but leaves its attributes
+        in place.
+        """
+        tag = proto("these are", "children", "cool", andSoIs='this-attribute')
+        tag.clear()
+        self.assertEqual(tag.children, [])
+        self.assertEqual(tag.attributes, {'andSoIs': 'this-attribute'})
+
+
+    def test_suffix(self):
+        """
+        L{Tag.__call__} accepts Python keywords with a suffixed underscore as
+        the DOM attribute of that literal suffix.
+        """
+        proto = Tag('div')
+        tag = proto()
+        tag(class_='a')
+        self.assertEqual(tag.attributes, {'class': 'a'})
+
+
+    def test_commentRepr(self):
+        """
+        L{Comment.__repr__} returns a value which makes it easy to see what's in
+        the comment.
+        """
+        self.assertEqual(repr(Comment(u"hello there")),
+                          "Comment(u'hello there')")
+
+
+    def test_cdataRepr(self):
+        """
+        L{CDATA.__repr__} returns a value which makes it easy to see what's in
+        the comment.
+        """
+        self.assertEqual(repr(CDATA(u"test data")),
+                          "CDATA(u'test data')")
+
+
+    def test_charrefRepr(self):
+        """
+        L{CharRef.__repr__} returns a value which makes it easy to see what
+        character is referred to.
+        """
+        snowman = ord(u"\N{SNOWMAN}")
+        self.assertEqual(repr(CharRef(snowman)), "CharRef(9731)")
diff --git a/ThirdParty/Twisted/twisted/web/test/test_static.py b/ThirdParty/Twisted/twisted/web/test/test_static.py
new file mode 100644
index 0000000..9e746ce
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_static.py
@@ -0,0 +1,1486 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.web.static}.
+"""
+
+import os, re, StringIO
+
+from zope.interface.verify import verifyObject
+
+from twisted.internet import abstract, interfaces
+from twisted.python.compat import set
+from twisted.python.runtime import platform
+from twisted.python.filepath import FilePath
+from twisted.python import log
+from twisted.trial.unittest import TestCase
+from twisted.web import static, http, script, resource
+from twisted.web.server import UnsupportedMethod
+from twisted.web.test.test_web import DummyRequest
+from twisted.web.test._util import _render
+
+
+class StaticDataTests(TestCase):
+    """
+    Tests for L{Data}.
+    """
+    def test_headRequest(self):
+        """
+        L{Data.render} returns an empty response body for a I{HEAD} request.
+        """
+        data = static.Data("foo", "bar")
+        request = DummyRequest([''])
+        request.method = 'HEAD'
+        d = _render(data, request)
+        def cbRendered(ignored):
+            self.assertEqual(''.join(request.written), "")
+        d.addCallback(cbRendered)
+        return d
+
+
+    def test_invalidMethod(self):
+        """
+        L{Data.render} raises L{UnsupportedMethod} in response to a non-I{GET},
+        non-I{HEAD} request.
+        """
+        data = static.Data("foo", "bar")
+        request = DummyRequest([''])
+        request.method = 'POST'
+        self.assertRaises(UnsupportedMethod, data.render, request)
+
+
+
+class StaticFileTests(TestCase):
+    """
+    Tests for the basic behavior of L{File}.
+    """
+    def _render(self, resource, request):
+        return _render(resource, request)
+
+
+    def test_invalidMethod(self):
+        """
+        L{File.render} raises L{UnsupportedMethod} in response to a non-I{GET},
+        non-I{HEAD} request.
+        """
+        request = DummyRequest([''])
+        request.method = 'POST'
+        path = FilePath(self.mktemp())
+        path.setContent("foo")
+        file = static.File(path.path)
+        self.assertRaises(UnsupportedMethod, file.render, request)
+
+
+    def test_notFound(self):
+        """
+        If a request is made which encounters a L{File} before a final segment
+        which does not correspond to any file in the path the L{File} was
+        created with, a not found response is sent.
+        """
+        base = FilePath(self.mktemp())
+        base.makedirs()
+        file = static.File(base.path)
+
+        request = DummyRequest(['foobar'])
+        child = resource.getChildForRequest(file, request)
+
+        d = self._render(child, request)
+        def cbRendered(ignored):
+            self.assertEqual(request.responseCode, 404)
+        d.addCallback(cbRendered)
+        return d
+
+
+    def test_emptyChild(self):
+        """
+        The C{''} child of a L{File} which corresponds to a directory in the
+        filesystem is a L{DirectoryLister}.
+        """
+        base = FilePath(self.mktemp())
+        base.makedirs()
+        file = static.File(base.path)
+
+        request = DummyRequest([''])
+        child = resource.getChildForRequest(file, request)
+        self.assertIsInstance(child, static.DirectoryLister)
+        self.assertEqual(child.path, base.path)
+
+
+    def test_securityViolationNotFound(self):
+        """
+        If a request is made which encounters a L{File} before a final segment
+        which cannot be looked up in the filesystem due to security
+        considerations, a not found response is sent.
+        """
+        base = FilePath(self.mktemp())
+        base.makedirs()
+        file = static.File(base.path)
+
+        request = DummyRequest(['..'])
+        child = resource.getChildForRequest(file, request)
+
+        d = self._render(child, request)
+        def cbRendered(ignored):
+            self.assertEqual(request.responseCode, 404)
+        d.addCallback(cbRendered)
+        return d
+
+
+    def test_forbiddenResource(self):
+        """
+        If the file in the filesystem which would satisfy a request cannot be
+        read, L{File.render} sets the HTTP response code to I{FORBIDDEN}.
+        """
+        base = FilePath(self.mktemp())
+        base.setContent('')
+        # Make sure we can delete the file later.
+        self.addCleanup(base.chmod, 0700)
+
+        # Get rid of our own read permission.
+        base.chmod(0)
+
+        file = static.File(base.path)
+        request = DummyRequest([''])
+        d = self._render(file, request)
+        def cbRendered(ignored):
+            self.assertEqual(request.responseCode, 403)
+        d.addCallback(cbRendered)
+        return d
+    if platform.isWindows():
+        test_forbiddenResource.skip = "Cannot remove read permission on Windows"
+
+
+    def test_indexNames(self):
+        """
+        If a request is made which encounters a L{File} before a final empty
+        segment, a file in the L{File} instance's C{indexNames} list which
+        exists in the path the L{File} was created with is served as the
+        response to the request.
+        """
+        base = FilePath(self.mktemp())
+        base.makedirs()
+        base.child("foo.bar").setContent("baz")
+        file = static.File(base.path)
+        file.indexNames = ['foo.bar']
+
+        request = DummyRequest([''])
+        child = resource.getChildForRequest(file, request)
+
+        d = self._render(child, request)
+        def cbRendered(ignored):
+            self.assertEqual(''.join(request.written), 'baz')
+            self.assertEqual(request.outgoingHeaders['content-length'], '3')
+        d.addCallback(cbRendered)
+        return d
+
+
+    def test_staticFile(self):
+        """
+        If a request is made which encounters a L{File} before a final segment
+        which names a file in the path the L{File} was created with, that file
+        is served as the response to the request.
+        """
+        base = FilePath(self.mktemp())
+        base.makedirs()
+        base.child("foo.bar").setContent("baz")
+        file = static.File(base.path)
+
+        request = DummyRequest(['foo.bar'])
+        child = resource.getChildForRequest(file, request)
+
+        d = self._render(child, request)
+        def cbRendered(ignored):
+            self.assertEqual(''.join(request.written), 'baz')
+            self.assertEqual(request.outgoingHeaders['content-length'], '3')
+        d.addCallback(cbRendered)
+        return d
+
+
+    def test_staticFileDeletedGetChild(self):
+        """
+        A L{static.File} created for a directory which does not exist should
+        return childNotFound from L{static.File.getChild}.
+        """
+        staticFile = static.File(self.mktemp())
+        request = DummyRequest(['foo.bar'])
+        child = staticFile.getChild("foo.bar", request)
+        self.assertEqual(child, staticFile.childNotFound)
+
+
+    def test_staticFileDeletedRender(self):
+        """
+        A L{static.File} created for a file which does not exist should render
+        its C{childNotFound} page.
+        """
+        staticFile = static.File(self.mktemp())
+        request = DummyRequest(['foo.bar'])
+        request2 = DummyRequest(['foo.bar'])
+        d = self._render(staticFile, request)
+        d2 = self._render(staticFile.childNotFound, request2)
+        def cbRendered2(ignored):
+            def cbRendered(ignored):
+                self.assertEqual(''.join(request.written),
+                                  ''.join(request2.written))
+            d.addCallback(cbRendered)
+            return d
+        d2.addCallback(cbRendered2)
+        return d2
+
+
+    def test_headRequest(self):
+        """
+        L{static.File.render} returns an empty response body for I{HEAD}
+        requests.
+        """
+        path = FilePath(self.mktemp())
+        path.setContent("foo")
+        file = static.File(path.path)
+        request = DummyRequest([''])
+        request.method = 'HEAD'
+        d = _render(file, request)
+        def cbRendered(ignored):
+            self.assertEqual("".join(request.written), "")
+        d.addCallback(cbRendered)
+        return d
+
+
+    def test_processors(self):
+        """
+        If a request is made which encounters a L{File} before a final segment
+        which names a file with an extension which is in the L{File}'s
+        C{processors} mapping, the processor associated with that extension is
+        used to serve the response to the request.
+        """
+        base = FilePath(self.mktemp())
+        base.makedirs()
+        base.child("foo.bar").setContent(
+            "from twisted.web.static import Data\n"
+            "resource = Data('dynamic world','text/plain')\n")
+
+        file = static.File(base.path)
+        file.processors = {'.bar': script.ResourceScript}
+        request = DummyRequest(["foo.bar"])
+        child = resource.getChildForRequest(file, request)
+
+        d = self._render(child, request)
+        def cbRendered(ignored):
+            self.assertEqual(''.join(request.written), 'dynamic world')
+            self.assertEqual(request.outgoingHeaders['content-length'], '13')
+        d.addCallback(cbRendered)
+        return d
+
+
+    def test_ignoreExt(self):
+        """
+        The list of ignored extensions can be set by passing a value to
+        L{File.__init__} or by calling L{File.ignoreExt} later.
+        """
+        file = static.File(".")
+        self.assertEqual(file.ignoredExts, [])
+        file.ignoreExt(".foo")
+        file.ignoreExt(".bar")
+        self.assertEqual(file.ignoredExts, [".foo", ".bar"])
+
+        file = static.File(".", ignoredExts=(".bar", ".baz"))
+        self.assertEqual(file.ignoredExts, [".bar", ".baz"])
+
+
+    def test_ignoredExtensionsIgnored(self):
+        """
+        A request for the I{base} child of a L{File} succeeds with a resource
+        for the I{base<extension>} file in the path the L{File} was created
+        with if such a file exists and the L{File} has been configured to
+        ignore the I{<extension>} extension.
+        """
+        base = FilePath(self.mktemp())
+        base.makedirs()
+        base.child('foo.bar').setContent('baz')
+        base.child('foo.quux').setContent('foobar')
+        file = static.File(base.path, ignoredExts=(".bar",))
+
+        request = DummyRequest(["foo"])
+        child = resource.getChildForRequest(file, request)
+
+        d = self._render(child, request)
+        def cbRendered(ignored):
+            self.assertEqual(''.join(request.written), 'baz')
+        d.addCallback(cbRendered)
+        return d
+
+
+
+class StaticMakeProducerTests(TestCase):
+    """
+    Tests for L{File.makeProducer}.
+    """
+
+
+    def makeResourceWithContent(self, content, type=None, encoding=None):
+        """
+        Make a L{static.File} resource that has C{content} for its content.
+
+        @param content: The bytes to use as the contents of the resource.
+        @param type: Optional value for the content type of the resource.
+        """
+        fileName = self.mktemp()
+        fileObject = open(fileName, 'w')
+        fileObject.write(content)
+        fileObject.close()
+        resource = static.File(fileName)
+        resource.encoding = encoding
+        resource.type = type
+        return resource
+
+
+    def contentHeaders(self, request):
+        """
+        Extract the content-* headers from the L{DummyRequest} C{request}.
+
+        This returns the subset of C{request.outgoingHeaders} of headers that
+        start with 'content-'.
+        """
+        contentHeaders = {}
+        for k, v in request.outgoingHeaders.iteritems():
+            if k.startswith('content-'):
+                contentHeaders[k] = v
+        return contentHeaders
+
+
+    def test_noRangeHeaderGivesNoRangeStaticProducer(self):
+        """
+        makeProducer when no Range header is set returns an instance of
+        NoRangeStaticProducer.
+        """
+        resource = self.makeResourceWithContent('')
+        request = DummyRequest([])
+        producer = resource.makeProducer(request, resource.openForReading())
+        self.assertIsInstance(producer, static.NoRangeStaticProducer)
+
+
+    def test_noRangeHeaderSets200OK(self):
+        """
+        makeProducer when no Range header is set sets the responseCode on the
+        request to 'OK'.
+        """
+        resource = self.makeResourceWithContent('')
+        request = DummyRequest([])
+        resource.makeProducer(request, resource.openForReading())
+        self.assertEqual(http.OK, request.responseCode)
+
+
+    def test_noRangeHeaderSetsContentHeaders(self):
+        """
+        makeProducer when no Range header is set sets the Content-* headers
+        for the response.
+        """
+        length = 123
+        contentType = "text/plain"
+        contentEncoding = 'gzip'
+        resource = self.makeResourceWithContent(
+            'a'*length, type=contentType, encoding=contentEncoding)
+        request = DummyRequest([])
+        resource.makeProducer(request, resource.openForReading())
+        self.assertEqual(
+            {'content-type': contentType, 'content-length': str(length),
+             'content-encoding': contentEncoding},
+            self.contentHeaders(request))
+
+
+    def test_singleRangeGivesSingleRangeStaticProducer(self):
+        """
+        makeProducer when the Range header requests a single byte range
+        returns an instance of SingleRangeStaticProducer.
+        """
+        request = DummyRequest([])
+        request.headers['range'] = 'bytes=1-3'
+        resource = self.makeResourceWithContent('abcdef')
+        producer = resource.makeProducer(request, resource.openForReading())
+        self.assertIsInstance(producer, static.SingleRangeStaticProducer)
+
+
+    def test_singleRangeSets206PartialContent(self):
+        """
+        makeProducer when the Range header requests a single, satisfiable byte
+        range sets the response code on the request to 'Partial Content'.
+        """
+        request = DummyRequest([])
+        request.headers['range'] = 'bytes=1-3'
+        resource = self.makeResourceWithContent('abcdef')
+        resource.makeProducer(request, resource.openForReading())
+        self.assertEqual(
+            http.PARTIAL_CONTENT, request.responseCode)
+
+
+    def test_singleRangeSetsContentHeaders(self):
+        """
+        makeProducer when the Range header requests a single, satisfiable byte
+        range sets the Content-* headers appropriately.
+        """
+        request = DummyRequest([])
+        request.headers['range'] = 'bytes=1-3'
+        contentType = "text/plain"
+        contentEncoding = 'gzip'
+        resource = self.makeResourceWithContent('abcdef', type=contentType, encoding=contentEncoding)
+        resource.makeProducer(request, resource.openForReading())
+        self.assertEqual(
+            {'content-type': contentType, 'content-encoding': contentEncoding,
+             'content-range': 'bytes 1-3/6', 'content-length': '3'},
+            self.contentHeaders(request))
+
+
+    def test_singleUnsatisfiableRangeReturnsSingleRangeStaticProducer(self):
+        """
+        makeProducer still returns an instance of L{SingleRangeStaticProducer}
+        when the Range header requests a single unsatisfiable byte range.
+        """
+        request = DummyRequest([])
+        request.headers['range'] = 'bytes=4-10'
+        resource = self.makeResourceWithContent('abc')
+        producer = resource.makeProducer(request, resource.openForReading())
+        self.assertIsInstance(producer, static.SingleRangeStaticProducer)
+
+
+    def test_singleUnsatisfiableRangeSets416ReqestedRangeNotSatisfiable(self):
+        """
+        makeProducer sets the response code of the request to of 'Requested
+        Range Not Satisfiable' when the Range header requests a single
+        unsatisfiable byte range.
+        """
+        request = DummyRequest([])
+        request.headers['range'] = 'bytes=4-10'
+        resource = self.makeResourceWithContent('abc')
+        resource.makeProducer(request, resource.openForReading())
+        self.assertEqual(
+            http.REQUESTED_RANGE_NOT_SATISFIABLE, request.responseCode)
+
+
+    def test_singleUnsatisfiableRangeSetsContentHeaders(self):
+        """
+        makeProducer when the Range header requests a single, unsatisfiable
+        byte range sets the Content-* headers appropriately.
+        """
+        request = DummyRequest([])
+        request.headers['range'] = 'bytes=4-10'
+        contentType = "text/plain"
+        resource = self.makeResourceWithContent('abc', type=contentType)
+        resource.makeProducer(request, resource.openForReading())
+        self.assertEqual(
+            {'content-type': 'text/plain', 'content-length': '0',
+             'content-range': 'bytes */3'},
+            self.contentHeaders(request))
+
+
+    def test_singlePartiallyOverlappingRangeSetsContentHeaders(self):
+        """
+        makeProducer when the Range header requests a single byte range that
+        partly overlaps the resource sets the Content-* headers appropriately.
+        """
+        request = DummyRequest([])
+        request.headers['range'] = 'bytes=2-10'
+        contentType = "text/plain"
+        resource = self.makeResourceWithContent('abc', type=contentType)
+        resource.makeProducer(request, resource.openForReading())
+        self.assertEqual(
+            {'content-type': 'text/plain', 'content-length': '1',
+             'content-range': 'bytes 2-2/3'},
+            self.contentHeaders(request))
+
+
+    def test_multipleRangeGivesMultipleRangeStaticProducer(self):
+        """
+        makeProducer when the Range header requests a single byte range
+        returns an instance of MultipleRangeStaticProducer.
+        """
+        request = DummyRequest([])
+        request.headers['range'] = 'bytes=1-3,5-6'
+        resource = self.makeResourceWithContent('abcdef')
+        producer = resource.makeProducer(request, resource.openForReading())
+        self.assertIsInstance(producer, static.MultipleRangeStaticProducer)
+
+
+    def test_multipleRangeSets206PartialContent(self):
+        """
+        makeProducer when the Range header requests a multiple satisfiable
+        byte ranges sets the response code on the request to 'Partial
+        Content'.
+        """
+        request = DummyRequest([])
+        request.headers['range'] = 'bytes=1-3,5-6'
+        resource = self.makeResourceWithContent('abcdef')
+        resource.makeProducer(request, resource.openForReading())
+        self.assertEqual(
+            http.PARTIAL_CONTENT, request.responseCode)
+
+
+    def test_mutipleRangeSetsContentHeaders(self):
+        """
+        makeProducer when the Range header requests a single, satisfiable byte
+        range sets the Content-* headers appropriately.
+        """
+        request = DummyRequest([])
+        request.headers['range'] = 'bytes=1-3,5-6'
+        resource = self.makeResourceWithContent(
+            'abcdefghijkl', encoding='gzip')
+        producer = resource.makeProducer(request, resource.openForReading())
+        contentHeaders = self.contentHeaders(request)
+        # The only content-* headers set are content-type and content-length.
+        self.assertEqual(
+            set(['content-length', 'content-type']),
+            set(contentHeaders.keys()))
+        # The content-length depends on the boundary used in the response.
+        expectedLength = 5
+        for boundary, offset, size in producer.rangeInfo:
+            expectedLength += len(boundary)
+        self.assertEqual(expectedLength, contentHeaders['content-length'])
+        # Content-type should be set to a value indicating a multipart
+        # response and the boundary used to separate the parts.
+        self.assertIn('content-type', contentHeaders)
+        contentType = contentHeaders['content-type']
+        self.assertNotIdentical(
+            None, re.match(
+                'multipart/byteranges; boundary="[^"]*"\Z', contentType))
+        # Content-encoding is not set in the response to a multiple range
+        # response, which is a bit wussy but works well enough with the way
+        # static.File does content-encodings...
+        self.assertNotIn('content-encoding', contentHeaders)
+
+
+    def test_multipleUnsatisfiableRangesReturnsMultipleRangeStaticProducer(self):
+        """
+        makeProducer still returns an instance of L{SingleRangeStaticProducer}
+        when the Range header requests multiple ranges, none of which are
+        satisfiable.
+        """
+        request = DummyRequest([])
+        request.headers['range'] = 'bytes=10-12,15-20'
+        resource = self.makeResourceWithContent('abc')
+        producer = resource.makeProducer(request, resource.openForReading())
+        self.assertIsInstance(producer, static.MultipleRangeStaticProducer)
+
+
+    def test_multipleUnsatisfiableRangesSets416ReqestedRangeNotSatisfiable(self):
+        """
+        makeProducer sets the response code of the request to of 'Requested
+        Range Not Satisfiable' when the Range header requests multiple ranges,
+        none of which are satisfiable.
+        """
+        request = DummyRequest([])
+        request.headers['range'] = 'bytes=10-12,15-20'
+        resource = self.makeResourceWithContent('abc')
+        resource.makeProducer(request, resource.openForReading())
+        self.assertEqual(
+            http.REQUESTED_RANGE_NOT_SATISFIABLE, request.responseCode)
+
+
+    def test_multipleUnsatisfiableRangeSetsContentHeaders(self):
+        """
+        makeProducer when the Range header requests multiple ranges, none of
+        which are satisfiable, sets the Content-* headers appropriately.
+        """
+        request = DummyRequest([])
+        request.headers['range'] = 'bytes=4-10'
+        contentType = "text/plain"
+        request.headers['range'] = 'bytes=10-12,15-20'
+        resource = self.makeResourceWithContent('abc', type=contentType)
+        resource.makeProducer(request, resource.openForReading())
+        self.assertEqual(
+            {'content-length': '0', 'content-range': 'bytes */3'},
+            self.contentHeaders(request))
+
+
+    def test_oneSatisfiableRangeIsEnough(self):
+        """
+        makeProducer when the Range header requests multiple ranges, at least
+        one of which matches, sets the response code to 'Partial Content'.
+        """
+        request = DummyRequest([])
+        request.headers['range'] = 'bytes=1-3,100-200'
+        resource = self.makeResourceWithContent('abcdef')
+        resource.makeProducer(request, resource.openForReading())
+        self.assertEqual(
+            http.PARTIAL_CONTENT, request.responseCode)
+
+
+
+class StaticProducerTests(TestCase):
+    """
+    Tests for the abstract L{StaticProducer}.
+    """
+
+    def test_stopProducingClosesFile(self):
+        """
+        L{StaticProducer.stopProducing} closes the file object the producer is
+        producing data from.
+        """
+        fileObject = StringIO.StringIO()
+        producer = static.StaticProducer(None, fileObject)
+        producer.stopProducing()
+        self.assertTrue(fileObject.closed)
+
+
+    def test_stopProducingSetsRequestToNone(self):
+        """
+        L{StaticProducer.stopProducing} sets the request instance variable to
+        None, which indicates to subclasses' resumeProducing methods that no
+        more data should be produced.
+        """
+        fileObject = StringIO.StringIO()
+        producer = static.StaticProducer(DummyRequest([]), fileObject)
+        producer.stopProducing()
+        self.assertIdentical(None, producer.request)
+
+
+
+class NoRangeStaticProducerTests(TestCase):
+    """
+    Tests for L{NoRangeStaticProducer}.
+    """
+
+    def test_implementsIPullProducer(self):
+        """
+        L{NoRangeStaticProducer} implements L{IPullProducer}.
+        """
+        verifyObject(
+            interfaces.IPullProducer,
+            static.NoRangeStaticProducer(None, None))
+
+
+    def test_resumeProducingProducesContent(self):
+        """
+        L{NoRangeStaticProducer.resumeProducing} writes content from the
+        resource to the request.
+        """
+        request = DummyRequest([])
+        content = 'abcdef'
+        producer = static.NoRangeStaticProducer(
+            request, StringIO.StringIO(content))
+        # start calls registerProducer on the DummyRequest, which pulls all
+        # output from the producer and so we just need this one call.
+        producer.start()
+        self.assertEqual(content, ''.join(request.written))
+
+
+    def test_resumeProducingBuffersOutput(self):
+        """
+        L{NoRangeStaticProducer.start} writes at most
+        C{abstract.FileDescriptor.bufferSize} bytes of content from the
+        resource to the request at once.
+        """
+        request = DummyRequest([])
+        bufferSize = abstract.FileDescriptor.bufferSize
+        content = 'a' * (2*bufferSize + 1)
+        producer = static.NoRangeStaticProducer(
+            request, StringIO.StringIO(content))
+        # start calls registerProducer on the DummyRequest, which pulls all
+        # output from the producer and so we just need this one call.
+        producer.start()
+        expected = [
+            content[0:bufferSize],
+            content[bufferSize:2*bufferSize],
+            content[2*bufferSize:]
+            ]
+        self.assertEqual(expected, request.written)
+
+
+    def test_finishCalledWhenDone(self):
+        """
+        L{NoRangeStaticProducer.resumeProducing} calls finish() on the request
+        after it is done producing content.
+        """
+        request = DummyRequest([])
+        finishDeferred = request.notifyFinish()
+        callbackList = []
+        finishDeferred.addCallback(callbackList.append)
+        producer = static.NoRangeStaticProducer(
+            request, StringIO.StringIO('abcdef'))
+        # start calls registerProducer on the DummyRequest, which pulls all
+        # output from the producer and so we just need this one call.
+        producer.start()
+        self.assertEqual([None], callbackList)
+
+
+
+class SingleRangeStaticProducerTests(TestCase):
+    """
+    Tests for L{SingleRangeStaticProducer}.
+    """
+
+    def test_implementsIPullProducer(self):
+        """
+        L{SingleRangeStaticProducer} implements L{IPullProducer}.
+        """
+        verifyObject(
+            interfaces.IPullProducer,
+            static.SingleRangeStaticProducer(None, None, None, None))
+
+
+    def test_resumeProducingProducesContent(self):
+        """
+        L{SingleRangeStaticProducer.resumeProducing} writes the given amount
+        of content, starting at the given offset, from the resource to the
+        request.
+        """
+        request = DummyRequest([])
+        content = 'abcdef'
+        producer = static.SingleRangeStaticProducer(
+            request, StringIO.StringIO(content), 1, 3)
+        # DummyRequest.registerProducer pulls all output from the producer, so
+        # we just need to call start.
+        producer.start()
+        self.assertEqual(content[1:4], ''.join(request.written))
+
+
+    def test_resumeProducingBuffersOutput(self):
+        """
+        L{SingleRangeStaticProducer.start} writes at most
+        C{abstract.FileDescriptor.bufferSize} bytes of content from the
+        resource to the request at once.
+        """
+        request = DummyRequest([])
+        bufferSize = abstract.FileDescriptor.bufferSize
+        content = 'abc' * bufferSize
+        producer = static.SingleRangeStaticProducer(
+            request, StringIO.StringIO(content), 1, bufferSize+10)
+        # DummyRequest.registerProducer pulls all output from the producer, so
+        # we just need to call start.
+        producer.start()
+        expected = [
+            content[1:bufferSize+1],
+            content[bufferSize+1:bufferSize+11],
+            ]
+        self.assertEqual(expected, request.written)
+
+
+    def test_finishCalledWhenDone(self):
+        """
+        L{SingleRangeStaticProducer.resumeProducing} calls finish() on the
+        request after it is done producing content.
+        """
+        request = DummyRequest([])
+        finishDeferred = request.notifyFinish()
+        callbackList = []
+        finishDeferred.addCallback(callbackList.append)
+        producer = static.SingleRangeStaticProducer(
+            request, StringIO.StringIO('abcdef'), 1, 1)
+        # start calls registerProducer on the DummyRequest, which pulls all
+        # output from the producer and so we just need this one call.
+        producer.start()
+        self.assertEqual([None], callbackList)
+
+
+
+class MultipleRangeStaticProducerTests(TestCase):
+    """
+    Tests for L{MultipleRangeStaticProducer}.
+    """
+
+    def test_implementsIPullProducer(self):
+        """
+        L{MultipleRangeStaticProducer} implements L{IPullProducer}.
+        """
+        verifyObject(
+            interfaces.IPullProducer,
+            static.MultipleRangeStaticProducer(None, None, None))
+
+
+    def test_resumeProducingProducesContent(self):
+        """
+        L{MultipleRangeStaticProducer.resumeProducing} writes the requested
+        chunks of content from the resource to the request, with the supplied
+        boundaries in between each chunk.
+        """
+        request = DummyRequest([])
+        content = 'abcdef'
+        producer = static.MultipleRangeStaticProducer(
+            request, StringIO.StringIO(content), [('1', 1, 3), ('2', 5, 1)])
+        # DummyRequest.registerProducer pulls all output from the producer, so
+        # we just need to call start.
+        producer.start()
+        self.assertEqual('1bcd2f', ''.join(request.written))
+
+
+    def test_resumeProducingBuffersOutput(self):
+        """
+        L{MultipleRangeStaticProducer.start} writes about
+        C{abstract.FileDescriptor.bufferSize} bytes of content from the
+        resource to the request at once.
+
+        To be specific about the 'about' above: it can write slightly more,
+        for example in the case where the first boundary plus the first chunk
+        is less than C{bufferSize} but first boundary plus the first chunk
+        plus the second boundary is more, but this is unimportant as in
+        practice the boundaries are fairly small.  On the other side, it is
+        important for performance to bundle up several small chunks into one
+        call to request.write.
+        """
+        request = DummyRequest([])
+        content = '0123456789' * 2
+        producer = static.MultipleRangeStaticProducer(
+            request, StringIO.StringIO(content),
+            [('a', 0, 2), ('b', 5, 10), ('c', 0, 0)])
+        producer.bufferSize = 10
+        # DummyRequest.registerProducer pulls all output from the producer, so
+        # we just need to call start.
+        producer.start()
+        expected = [
+            'a' + content[0:2] + 'b' + content[5:11],
+            content[11:15] + 'c',
+            ]
+        self.assertEqual(expected, request.written)
+
+
+    def test_finishCalledWhenDone(self):
+        """
+        L{MultipleRangeStaticProducer.resumeProducing} calls finish() on the
+        request after it is done producing content.
+        """
+        request = DummyRequest([])
+        finishDeferred = request.notifyFinish()
+        callbackList = []
+        finishDeferred.addCallback(callbackList.append)
+        producer = static.MultipleRangeStaticProducer(
+            request, StringIO.StringIO('abcdef'), [('', 1, 2)])
+        # start calls registerProducer on the DummyRequest, which pulls all
+        # output from the producer and so we just need this one call.
+        producer.start()
+        self.assertEqual([None], callbackList)
+
+
+
+class RangeTests(TestCase):
+    """
+    Tests for I{Range-Header} support in L{twisted.web.static.File}.
+
+    @type file: L{file}
+    @ivar file: Temporary (binary) file containing the content to be served.
+
+    @type resource: L{static.File}
+    @ivar resource: A leaf web resource using C{file} as content.
+
+    @type request: L{DummyRequest}
+    @ivar request: A fake request, requesting C{resource}.
+
+    @type catcher: L{list}
+    @ivar catcher: List which gathers all log information.
+    """
+    def setUp(self):
+        """
+        Create a temporary file with a fixed payload of 64 bytes.  Create a
+        resource for that file and create a request which will be for that
+        resource.  Each test can set a different range header to test different
+        aspects of the implementation.
+        """
+        path = FilePath(self.mktemp())
+        # This is just a jumble of random stuff.  It's supposed to be a good
+        # set of data for this test, particularly in order to avoid
+        # accidentally seeing the right result by having a byte sequence
+        # repeated at different locations or by having byte values which are
+        # somehow correlated with their position in the string.
+        self.payload = ('\xf8u\xf3E\x8c7\xce\x00\x9e\xb6a0y0S\xf0\xef\xac\xb7'
+                        '\xbe\xb5\x17M\x1e\x136k{\x1e\xbe\x0c\x07\x07\t\xd0'
+                        '\xbckY\xf5I\x0b\xb8\x88oZ\x1d\x85b\x1a\xcdk\xf2\x1d'
+                        '&\xfd%\xdd\x82q/A\x10Y\x8b')
+        path.setContent(self.payload)
+        self.file = path.open()
+        self.resource = static.File(self.file.name)
+        self.resource.isLeaf = 1
+        self.request = DummyRequest([''])
+        self.request.uri = self.file.name
+        self.catcher = []
+        log.addObserver(self.catcher.append)
+
+
+    def tearDown(self):
+        """
+        Clean up the resource file and the log observer.
+        """
+        self.file.close()
+        log.removeObserver(self.catcher.append)
+
+
+    def _assertLogged(self, expected):
+        """
+        Asserts that a given log message occurred with an expected message.
+        """
+        logItem = self.catcher.pop()
+        self.assertEqual(logItem["message"][0], expected)
+        self.assertEqual(
+            self.catcher, [], "An additional log occured: %r" % (logItem,))
+
+
+    def test_invalidRanges(self):
+        """
+        L{File._parseRangeHeader} raises L{ValueError} when passed
+        syntactically invalid byte ranges.
+        """
+        f = self.resource._parseRangeHeader
+
+        # there's no =
+        self.assertRaises(ValueError, f, 'bytes')
+
+        # unknown isn't a valid Bytes-Unit
+        self.assertRaises(ValueError, f, 'unknown=1-2')
+
+        # there's no - in =stuff
+        self.assertRaises(ValueError, f, 'bytes=3')
+
+        # both start and end are empty
+        self.assertRaises(ValueError, f, 'bytes=-')
+
+        # start isn't an integer
+        self.assertRaises(ValueError, f, 'bytes=foo-')
+
+        # end isn't an integer
+        self.assertRaises(ValueError, f, 'bytes=-foo')
+
+        # end isn't equal to or greater than start
+        self.assertRaises(ValueError, f, 'bytes=5-4')
+
+
+    def test_rangeMissingStop(self):
+        """
+        A single bytes range without an explicit stop position is parsed into a
+        two-tuple giving the start position and C{None}.
+        """
+        self.assertEqual(
+            self.resource._parseRangeHeader('bytes=0-'), [(0, None)])
+
+
+    def test_rangeMissingStart(self):
+        """
+        A single bytes range without an explicit start position is parsed into
+        a two-tuple of C{None} and the end position.
+        """
+        self.assertEqual(
+            self.resource._parseRangeHeader('bytes=-3'), [(None, 3)])
+
+
+    def test_range(self):
+        """
+        A single bytes range with explicit start and stop positions is parsed
+        into a two-tuple of those positions.
+        """
+        self.assertEqual(
+            self.resource._parseRangeHeader('bytes=2-5'), [(2, 5)])
+
+
+    def test_rangeWithSpace(self):
+        """
+        A single bytes range with whitespace in allowed places is parsed in
+        the same way as it would be without the whitespace.
+        """
+        self.assertEqual(
+            self.resource._parseRangeHeader(' bytes=1-2 '), [(1, 2)])
+        self.assertEqual(
+            self.resource._parseRangeHeader('bytes =1-2 '), [(1, 2)])
+        self.assertEqual(
+            self.resource._parseRangeHeader('bytes= 1-2'), [(1, 2)])
+        self.assertEqual(
+            self.resource._parseRangeHeader('bytes=1 -2'), [(1, 2)])
+        self.assertEqual(
+            self.resource._parseRangeHeader('bytes=1- 2'), [(1, 2)])
+        self.assertEqual(
+            self.resource._parseRangeHeader('bytes=1-2 '), [(1, 2)])
+
+
+    def test_nullRangeElements(self):
+        """
+        If there are multiple byte ranges but only one is non-null, the
+        non-null range is parsed and its start and stop returned.
+        """
+        self.assertEqual(
+            self.resource._parseRangeHeader('bytes=1-2,\r\n, ,\t'), [(1, 2)])
+
+
+    def test_multipleRanges(self):
+        """
+        If multiple byte ranges are specified their starts and stops are
+        returned.
+        """
+        self.assertEqual(
+            self.resource._parseRangeHeader('bytes=1-2,3-4'),
+            [(1, 2), (3, 4)])
+
+
+    def test_bodyLength(self):
+        """
+        A correct response to a range request is as long as the length of the
+        requested range.
+        """
+        self.request.headers['range'] = 'bytes=0-43'
+        self.resource.render(self.request)
+        self.assertEqual(len(''.join(self.request.written)), 44)
+
+
+    def test_invalidRangeRequest(self):
+        """
+        An incorrect range request (RFC 2616 defines a correct range request as
+        a Bytes-Unit followed by a '=' character followed by a specific range.
+        Only 'bytes' is defined) results in the range header value being logged
+        and a normal 200 response being sent.
+        """
+        self.request.headers['range'] = range = 'foobar=0-43'
+        self.resource.render(self.request)
+        expected = "Ignoring malformed Range header %r" % (range,)
+        self._assertLogged(expected)
+        self.assertEqual(''.join(self.request.written), self.payload)
+        self.assertEqual(self.request.responseCode, http.OK)
+        self.assertEqual(
+            self.request.outgoingHeaders['content-length'],
+            str(len(self.payload)))
+
+
+    def parseMultipartBody(self, body, boundary):
+        """
+        Parse C{body} as a multipart MIME response separated by C{boundary}.
+
+        Note that this with fail the calling test on certain syntactic
+        problems.
+        """
+        sep = "\r\n--" + boundary
+        parts = ''.join(body).split(sep)
+        self.assertEqual('', parts[0])
+        self.assertEqual('--\r\n', parts[-1])
+        parsed_parts = []
+        for part in parts[1:-1]:
+            before, header1, header2, blank, partBody = part.split('\r\n', 4)
+            headers = header1 + '\n' + header2
+            self.assertEqual('', before)
+            self.assertEqual('', blank)
+            partContentTypeValue = re.search(
+                '^content-type: (.*)$', headers, re.I|re.M).group(1)
+            start, end, size = re.search(
+                '^content-range: bytes ([0-9]+)-([0-9]+)/([0-9]+)$',
+                headers, re.I|re.M).groups()
+            parsed_parts.append(
+                {'contentType': partContentTypeValue,
+                 'contentRange': (start, end, size),
+                 'body': partBody})
+        return parsed_parts
+
+
+    def test_multipleRangeRequest(self):
+        """
+        The response to a request for multipe bytes ranges is a MIME-ish
+        multipart response.
+        """
+        startEnds = [(0, 2), (20, 30), (40, 50)]
+        rangeHeaderValue = ','.join(["%s-%s"%(s,e) for (s, e) in startEnds])
+        self.request.headers['range'] = 'bytes=' + rangeHeaderValue
+        self.resource.render(self.request)
+        self.assertEqual(self.request.responseCode, http.PARTIAL_CONTENT)
+        boundary = re.match(
+            '^multipart/byteranges; boundary="(.*)"$',
+            self.request.outgoingHeaders['content-type']).group(1)
+        parts = self.parseMultipartBody(''.join(self.request.written), boundary)
+        self.assertEqual(len(startEnds), len(parts))
+        for part, (s, e) in zip(parts, startEnds):
+            self.assertEqual(self.resource.type, part['contentType'])
+            start, end, size = part['contentRange']
+            self.assertEqual(int(start), s)
+            self.assertEqual(int(end), e)
+            self.assertEqual(int(size), self.resource.getFileSize())
+            self.assertEqual(self.payload[s:e+1], part['body'])
+
+
+    def test_multipleRangeRequestWithRangeOverlappingEnd(self):
+        """
+        The response to a request for multipe bytes ranges is a MIME-ish
+        multipart response, even when one of the ranged falls off the end of
+        the resource.
+        """
+        startEnds = [(0, 2), (40, len(self.payload) + 10)]
+        rangeHeaderValue = ','.join(["%s-%s"%(s,e) for (s, e) in startEnds])
+        self.request.headers['range'] = 'bytes=' + rangeHeaderValue
+        self.resource.render(self.request)
+        self.assertEqual(self.request.responseCode, http.PARTIAL_CONTENT)
+        boundary = re.match(
+            '^multipart/byteranges; boundary="(.*)"$',
+            self.request.outgoingHeaders['content-type']).group(1)
+        parts = self.parseMultipartBody(''.join(self.request.written), boundary)
+        self.assertEqual(len(startEnds), len(parts))
+        for part, (s, e) in zip(parts, startEnds):
+            self.assertEqual(self.resource.type, part['contentType'])
+            start, end, size = part['contentRange']
+            self.assertEqual(int(start), s)
+            self.assertEqual(int(end), min(e, self.resource.getFileSize()-1))
+            self.assertEqual(int(size), self.resource.getFileSize())
+            self.assertEqual(self.payload[s:e+1], part['body'])
+
+
+    def test_implicitEnd(self):
+        """
+        If the end byte position is omitted, then it is treated as if the
+        length of the resource was specified by the end byte position.
+        """
+        self.request.headers['range'] = 'bytes=23-'
+        self.resource.render(self.request)
+        self.assertEqual(''.join(self.request.written), self.payload[23:])
+        self.assertEqual(len(''.join(self.request.written)), 41)
+        self.assertEqual(self.request.responseCode, http.PARTIAL_CONTENT)
+        self.assertEqual(
+            self.request.outgoingHeaders['content-range'], 'bytes 23-63/64')
+        self.assertEqual(self.request.outgoingHeaders['content-length'], '41')
+
+
+    def test_implicitStart(self):
+        """
+        If the start byte position is omitted but the end byte position is
+        supplied, then the range is treated as requesting the last -N bytes of
+        the resource, where N is the end byte position.
+        """
+        self.request.headers['range'] = 'bytes=-17'
+        self.resource.render(self.request)
+        self.assertEqual(''.join(self.request.written), self.payload[-17:])
+        self.assertEqual(len(''.join(self.request.written)), 17)
+        self.assertEqual(self.request.responseCode, http.PARTIAL_CONTENT)
+        self.assertEqual(
+            self.request.outgoingHeaders['content-range'], 'bytes 47-63/64')
+        self.assertEqual(self.request.outgoingHeaders['content-length'], '17')
+
+
+    def test_explicitRange(self):
+        """
+        A correct response to a bytes range header request from A to B starts
+        with the A'th byte and ends with (including) the B'th byte. The first
+        byte of a page is numbered with 0.
+        """
+        self.request.headers['range'] = 'bytes=3-43'
+        self.resource.render(self.request)
+        written = ''.join(self.request.written)
+        self.assertEqual(written, self.payload[3:44])
+        self.assertEqual(self.request.responseCode, http.PARTIAL_CONTENT)
+        self.assertEqual(
+            self.request.outgoingHeaders['content-range'], 'bytes 3-43/64')
+        self.assertEqual(
+            str(len(written)), self.request.outgoingHeaders['content-length'])
+
+
+    def test_explicitRangeOverlappingEnd(self):
+        """
+        A correct response to a bytes range header request from A to B when B
+        is past the end of the resource starts with the A'th byte and ends
+        with the last byte of the resource. The first byte of a page is
+        numbered with 0.
+        """
+        self.request.headers['range'] = 'bytes=40-100'
+        self.resource.render(self.request)
+        written = ''.join(self.request.written)
+        self.assertEqual(written, self.payload[40:])
+        self.assertEqual(self.request.responseCode, http.PARTIAL_CONTENT)
+        self.assertEqual(
+            self.request.outgoingHeaders['content-range'], 'bytes 40-63/64')
+        self.assertEqual(
+            str(len(written)), self.request.outgoingHeaders['content-length'])
+
+
+    def test_statusCodeRequestedRangeNotSatisfiable(self):
+        """
+        If a range is syntactically invalid due to the start being greater than
+        the end, the range header is ignored (the request is responded to as if
+        it were not present).
+        """
+        self.request.headers['range'] = 'bytes=20-13'
+        self.resource.render(self.request)
+        self.assertEqual(self.request.responseCode, http.OK)
+        self.assertEqual(''.join(self.request.written), self.payload)
+        self.assertEqual(
+            self.request.outgoingHeaders['content-length'],
+            str(len(self.payload)))
+
+
+    def test_invalidStartBytePos(self):
+        """
+        If a range is unsatisfiable due to the start not being less than the
+        length of the resource, the response is 416 (Requested range not
+        satisfiable) and no data is written to the response body (RFC 2616,
+        section 14.35.1).
+        """
+        self.request.headers['range'] = 'bytes=67-108'
+        self.resource.render(self.request)
+        self.assertEqual(
+            self.request.responseCode, http.REQUESTED_RANGE_NOT_SATISFIABLE)
+        self.assertEqual(''.join(self.request.written), '')
+        self.assertEqual(self.request.outgoingHeaders['content-length'], '0')
+        # Sections 10.4.17 and 14.16
+        self.assertEqual(
+            self.request.outgoingHeaders['content-range'],
+            'bytes */%d' % (len(self.payload),))
+
+
+
+class DirectoryListerTest(TestCase):
+    """
+    Tests for L{static.DirectoryLister}.
+    """
+    def _request(self, uri):
+        request = DummyRequest([''])
+        request.uri = uri
+        return request
+
+
+    def test_renderHeader(self):
+        """
+        L{static.DirectoryLister} prints the request uri as header of the
+        rendered content.
+        """
+        path = FilePath(self.mktemp())
+        path.makedirs()
+
+        lister = static.DirectoryLister(path.path)
+        data = lister.render(self._request('foo'))
+        self.assertIn("<h1>Directory listing for foo</h1>", data)
+        self.assertIn("<title>Directory listing for foo</title>", data)
+
+
+    def test_renderUnquoteHeader(self):
+        """
+        L{static.DirectoryLister} unquote the request uri before printing it.
+        """
+        path = FilePath(self.mktemp())
+        path.makedirs()
+
+        lister = static.DirectoryLister(path.path)
+        data = lister.render(self._request('foo%20bar'))
+        self.assertIn("<h1>Directory listing for foo bar</h1>", data)
+        self.assertIn("<title>Directory listing for foo bar</title>", data)
+
+
+    def test_escapeHeader(self):
+        """
+        L{static.DirectoryLister} escape "&", "<" and ">" after unquoting the
+        request uri.
+        """
+        path = FilePath(self.mktemp())
+        path.makedirs()
+
+        lister = static.DirectoryLister(path.path)
+        data = lister.render(self._request('foo%26bar'))
+        self.assertIn("<h1>Directory listing for foo&bar</h1>", data)
+        self.assertIn("<title>Directory listing for foo&bar</title>", data)
+
+
+    def test_renderFiles(self):
+        """
+        L{static.DirectoryLister} is able to list all the files inside a
+        directory.
+        """
+        path = FilePath(self.mktemp())
+        path.makedirs()
+        path.child('file1').setContent("content1")
+        path.child('file2').setContent("content2" * 1000)
+
+        lister = static.DirectoryLister(path.path)
+        data = lister.render(self._request('foo'))
+        body = """<tr class="odd">
+    <td><a href="file1">file1</a></td>
+    <td>8B</td>
+    <td>[text/html]</td>
+    <td></td>
+</tr>
+<tr class="even">
+    <td><a href="file2">file2</a></td>
+    <td>7K</td>
+    <td>[text/html]</td>
+    <td></td>
+</tr>"""
+        self.assertIn(body, data)
+
+
+    def test_renderDirectories(self):
+        """
+        L{static.DirectoryLister} is able to list all the directories inside
+        a directory.
+        """
+        path = FilePath(self.mktemp())
+        path.makedirs()
+        path.child('dir1').makedirs()
+        path.child('dir2 & 3').makedirs()
+
+        lister = static.DirectoryLister(path.path)
+        data = lister.render(self._request('foo'))
+        body = """<tr class="odd">
+    <td><a href="dir1/">dir1/</a></td>
+    <td></td>
+    <td>[Directory]</td>
+    <td></td>
+</tr>
+<tr class="even">
+    <td><a href="dir2%20%26%203/">dir2 & 3/</a></td>
+    <td></td>
+    <td>[Directory]</td>
+    <td></td>
+</tr>"""
+        self.assertIn(body, data)
+
+
+    def test_renderFiltered(self):
+        """
+        L{static.DirectoryLister} takes a optional C{dirs} argument that
+        filter out the list of of directories and files printed.
+        """
+        path = FilePath(self.mktemp())
+        path.makedirs()
+        path.child('dir1').makedirs()
+        path.child('dir2').makedirs()
+        path.child('dir3').makedirs()
+        lister = static.DirectoryLister(path.path, dirs=["dir1", "dir3"])
+        data = lister.render(self._request('foo'))
+        body = """<tr class="odd">
+    <td><a href="dir1/">dir1/</a></td>
+    <td></td>
+    <td>[Directory]</td>
+    <td></td>
+</tr>
+<tr class="even">
+    <td><a href="dir3/">dir3/</a></td>
+    <td></td>
+    <td>[Directory]</td>
+    <td></td>
+</tr>"""
+        self.assertIn(body, data)
+
+
+    def test_oddAndEven(self):
+        """
+        L{static.DirectoryLister} gives an alternate class for each odd and
+        even rows in the table.
+        """
+        lister = static.DirectoryLister(None)
+        elements = [{"href": "", "text": "", "size": "", "type": "",
+                     "encoding": ""}  for i in xrange(5)]
+        content = lister._buildTableContent(elements)
+
+        self.assertEqual(len(content), 5)
+        self.assertTrue(content[0].startswith('<tr class="odd">'))
+        self.assertTrue(content[1].startswith('<tr class="even">'))
+        self.assertTrue(content[2].startswith('<tr class="odd">'))
+        self.assertTrue(content[3].startswith('<tr class="even">'))
+        self.assertTrue(content[4].startswith('<tr class="odd">'))
+
+
+    def test_contentType(self):
+        """
+        L{static.DirectoryLister} produces a MIME-type that indicates that it is
+        HTML, and includes its charset (UTF-8).
+        """
+        path = FilePath(self.mktemp())
+        path.makedirs()
+        lister = static.DirectoryLister(path.path)
+        req = self._request('')
+        lister.render(req)
+        self.assertEqual(req.outgoingHeaders['content-type'],
+                          "text/html; charset=utf-8")
+
+
+    def test_mimeTypeAndEncodings(self):
+        """
+        L{static.DirectoryLister} is able to detect mimetype and encoding of
+        listed files.
+        """
+        path = FilePath(self.mktemp())
+        path.makedirs()
+        path.child('file1.txt').setContent("file1")
+        path.child('file2.py').setContent("python")
+        path.child('file3.conf.gz').setContent("conf compressed")
+        path.child('file4.diff.bz2').setContent("diff compressed")
+        directory = os.listdir(path.path)
+        directory.sort()
+
+        contentTypes = {
+            ".txt": "text/plain",
+            ".py": "text/python",
+            ".conf": "text/configuration",
+            ".diff": "text/diff"
+        }
+
+        lister = static.DirectoryLister(path.path, contentTypes=contentTypes)
+        dirs, files = lister._getFilesAndDirectories(directory)
+        self.assertEqual(dirs, [])
+        self.assertEqual(files, [
+            {'encoding': '',
+             'href': 'file1.txt',
+             'size': '5B',
+             'text': 'file1.txt',
+             'type': '[text/plain]'},
+            {'encoding': '',
+             'href': 'file2.py',
+             'size': '6B',
+             'text': 'file2.py',
+             'type': '[text/python]'},
+            {'encoding': '[gzip]',
+             'href': 'file3.conf.gz',
+             'size': '15B',
+             'text': 'file3.conf.gz',
+             'type': '[text/configuration]'},
+            {'encoding': '[bzip2]',
+             'href': 'file4.diff.bz2',
+             'size': '15B',
+             'text': 'file4.diff.bz2',
+             'type': '[text/diff]'}])
+
+
+    def test_brokenSymlink(self):
+        """
+        If on the file in the listing points to a broken symlink, it should not
+        be returned by L{static.DirectoryLister._getFilesAndDirectories}.
+        """
+        path = FilePath(self.mktemp())
+        path.makedirs()
+        file1 = path.child('file1')
+        file1.setContent("file1")
+        file1.linkTo(path.child("file2"))
+        file1.remove()
+
+        lister = static.DirectoryLister(path.path)
+        directory = os.listdir(path.path)
+        directory.sort()
+        dirs, files = lister._getFilesAndDirectories(directory)
+        self.assertEqual(dirs, [])
+        self.assertEqual(files, [])
+
+    if getattr(os, "symlink", None) is None:
+        test_brokenSymlink.skip = "No symlink support"
+
+
+    def test_childrenNotFound(self):
+        """
+        Any child resource of L{static.DirectoryLister} renders an HTTP
+        I{NOT FOUND} response code.
+        """
+        path = FilePath(self.mktemp())
+        path.makedirs()
+        lister = static.DirectoryLister(path.path)
+        request = self._request('')
+        child = resource.getChildForRequest(lister, request)
+        result = _render(child, request)
+        def cbRendered(ignored):
+            self.assertEqual(request.responseCode, http.NOT_FOUND)
+        result.addCallback(cbRendered)
+        return result
+
+
+    def test_repr(self):
+        """
+        L{static.DirectoryLister.__repr__} gives the path of the lister.
+        """
+        path = FilePath(self.mktemp())
+        lister = static.DirectoryLister(path.path)
+        self.assertEqual(repr(lister),
+                          "<DirectoryLister of %r>" % (path.path,))
+        self.assertEqual(str(lister),
+                          "<DirectoryLister of %r>" % (path.path,))
+
+    def test_formatFileSize(self):
+        """
+        L{static.formatFileSize} format an amount of bytes into a more readable
+        format.
+        """
+        self.assertEqual(static.formatFileSize(0), "0B")
+        self.assertEqual(static.formatFileSize(123), "123B")
+        self.assertEqual(static.formatFileSize(4567), "4K")
+        self.assertEqual(static.formatFileSize(8900000), "8M")
+        self.assertEqual(static.formatFileSize(1234000000), "1G")
+        self.assertEqual(static.formatFileSize(1234567890000), "1149G")
+
diff --git a/ThirdParty/Twisted/twisted/web/test/test_tap.py b/ThirdParty/Twisted/twisted/web/test/test_tap.py
new file mode 100644
index 0000000..a3e33da
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_tap.py
@@ -0,0 +1,196 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.web.tap}.
+"""
+
+import os, stat
+
+from twisted.python.usage import UsageError
+from twisted.python.filepath import FilePath
+from twisted.internet.interfaces import IReactorUNIX
+from twisted.internet import reactor
+from twisted.python.threadpool import ThreadPool
+from twisted.trial.unittest import TestCase
+from twisted.application import strports
+
+from twisted.web.server import Site
+from twisted.web.static import Data, File
+from twisted.web.distrib import ResourcePublisher, UserDirectory
+from twisted.web.wsgi import WSGIResource
+from twisted.web.tap import Options, makePersonalServerFactory, makeService
+from twisted.web.twcgi import CGIScript
+from twisted.web.script import PythonScript
+
+
+from twisted.spread.pb import PBServerFactory
+
+application = object()
+
+class ServiceTests(TestCase):
+    """
+    Tests for the service creation APIs in L{twisted.web.tap}.
+    """
+    def _pathOption(self):
+        """
+        Helper for the I{--path} tests which creates a directory and creates
+        an L{Options} object which uses that directory as its static
+        filesystem root.
+
+        @return: A two-tuple of a L{FilePath} referring to the directory and
+            the value associated with the C{'root'} key in the L{Options}
+            instance after parsing a I{--path} option.
+        """
+        path = FilePath(self.mktemp())
+        path.makedirs()
+        options = Options()
+        options.parseOptions(['--path', path.path])
+        root = options['root']
+        return path, root
+
+
+    def test_path(self):
+        """
+        The I{--path} option causes L{Options} to create a root resource
+        which serves responses from the specified path.
+        """
+        path, root = self._pathOption()
+        self.assertIsInstance(root, File)
+        self.assertEqual(root.path, path.path)
+
+
+    def test_cgiProcessor(self):
+        """
+        The I{--path} option creates a root resource which serves a
+        L{CGIScript} instance for any child with the C{".cgi"} extension.
+        """
+        path, root = self._pathOption()
+        path.child("foo.cgi").setContent("")
+        self.assertIsInstance(root.getChild("foo.cgi", None), CGIScript)
+
+
+    def test_epyProcessor(self):
+        """
+        The I{--path} option creates a root resource which serves a
+        L{PythonScript} instance for any child with the C{".epy"} extension.
+        """
+        path, root = self._pathOption()
+        path.child("foo.epy").setContent("")
+        self.assertIsInstance(root.getChild("foo.epy", None), PythonScript)
+
+
+    def test_rpyProcessor(self):
+        """
+        The I{--path} option creates a root resource which serves the
+        C{resource} global defined by the Python source in any child with
+        the C{".rpy"} extension.
+        """
+        path, root = self._pathOption()
+        path.child("foo.rpy").setContent(
+            "from twisted.web.static import Data\n"
+            "resource = Data('content', 'major/minor')\n")
+        child = root.getChild("foo.rpy", None)
+        self.assertIsInstance(child, Data)
+        self.assertEqual(child.data, 'content')
+        self.assertEqual(child.type, 'major/minor')
+
+
+    def test_makePersonalServerFactory(self):
+        """
+        L{makePersonalServerFactory} returns a PB server factory which has
+        as its root object a L{ResourcePublisher}.
+        """
+        # The fact that this pile of objects can actually be used somehow is
+        # verified by twisted.web.test.test_distrib.
+        site = Site(Data("foo bar", "text/plain"))
+        serverFactory = makePersonalServerFactory(site)
+        self.assertIsInstance(serverFactory, PBServerFactory)
+        self.assertIsInstance(serverFactory.root, ResourcePublisher)
+        self.assertIdentical(serverFactory.root.site, site)
+
+
+    def test_personalServer(self):
+        """
+        The I{--personal} option to L{makeService} causes it to return a
+        service which will listen on the server address given by the I{--port}
+        option.
+        """
+        port = self.mktemp()
+        options = Options()
+        options.parseOptions(['--port', 'unix:' + port, '--personal'])
+        service = makeService(options)
+        service.startService()
+        self.addCleanup(service.stopService)
+        self.assertTrue(os.path.exists(port))
+        self.assertTrue(stat.S_ISSOCK(os.stat(port).st_mode))
+
+    if not IReactorUNIX.providedBy(reactor):
+        test_personalServer.skip = (
+            "The reactor does not support UNIX domain sockets")
+
+
+    def test_defaultPersonalPath(self):
+        """
+        If the I{--port} option not specified but the I{--personal} option is,
+        L{Options} defaults the port to C{UserDirectory.userSocketName} in the
+        user's home directory.
+        """
+        options = Options()
+        options.parseOptions(['--personal'])
+        path = os.path.expanduser(
+            os.path.join('~', UserDirectory.userSocketName))
+        self.assertEqual(
+            strports.parse(options['port'], None)[:2],
+            ('UNIX', (path, None)))
+
+    if not IReactorUNIX.providedBy(reactor):
+        test_defaultPersonalPath.skip = (
+            "The reactor does not support UNIX domain sockets")
+
+
+    def test_defaultPort(self):
+        """
+        If the I{--port} option is not specified, L{Options} defaults the port
+        to C{8080}.
+        """
+        options = Options()
+        options.parseOptions([])
+        self.assertEqual(
+            strports.parse(options['port'], None)[:2],
+            ('TCP', (8080, None)))
+
+
+    def test_wsgi(self):
+        """
+        The I{--wsgi} option takes the fully-qualifed Python name of a WSGI
+        application object and creates a L{WSGIResource} at the root which
+        serves that application.
+        """
+        options = Options()
+        options.parseOptions(['--wsgi', __name__ + '.application'])
+        root = options['root']
+        self.assertTrue(root, WSGIResource)
+        self.assertIdentical(root._reactor, reactor)
+        self.assertTrue(isinstance(root._threadpool, ThreadPool))
+        self.assertIdentical(root._application, application)
+
+        # The threadpool should start and stop with the reactor.
+        self.assertFalse(root._threadpool.started)
+        reactor.fireSystemEvent('startup')
+        self.assertTrue(root._threadpool.started)
+        self.assertFalse(root._threadpool.joined)
+        reactor.fireSystemEvent('shutdown')
+        self.assertTrue(root._threadpool.joined)
+
+
+    def test_invalidApplication(self):
+        """
+        If I{--wsgi} is given an invalid name, L{Options.parseOptions}
+        raises L{UsageError}.
+        """
+        options = Options()
+        for name in [__name__ + '.nosuchthing', 'foo.']:
+            exc = self.assertRaises(
+                UsageError, options.parseOptions, ['--wsgi', name])
+            self.assertEqual(str(exc), "No such WSGI application: %r" % (name,))
diff --git a/ThirdParty/Twisted/twisted/web/test/test_template.py b/ThirdParty/Twisted/twisted/web/test/test_template.py
new file mode 100644
index 0000000..b29303b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_template.py
@@ -0,0 +1,810 @@
+
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Tests for L{twisted.web.template}
+"""
+
+from cStringIO import StringIO
+
+from zope.interface.verify import verifyObject
+
+from twisted.internet.defer import succeed, gatherResults
+from twisted.python.filepath import FilePath
+from twisted.trial.unittest import TestCase
+from twisted.web.template import (
+    Element, TagLoader, renderer, tags, XMLFile, XMLString)
+from twisted.web.iweb import ITemplateLoader
+
+from twisted.web.error import (FlattenerError, MissingTemplateLoader,
+    MissingRenderMethod)
+
+from twisted.web.template import renderElement
+from twisted.web._element import UnexposedMethodError
+from twisted.web.test._util import FlattenTestCase
+from twisted.web.test.test_web import DummyRequest
+from twisted.web.server import NOT_DONE_YET
+
+class TagFactoryTests(TestCase):
+    """
+    Tests for L{_TagFactory} through the publicly-exposed L{tags} object.
+    """
+    def test_lookupTag(self):
+        """
+        HTML tags can be retrieved through C{tags}.
+        """
+        tag = tags.a
+        self.assertEqual(tag.tagName, "a")
+
+
+    def test_lookupHTML5Tag(self):
+        """
+        Twisted supports the latest and greatest HTML tags from the HTML5
+        specification.
+        """
+        tag = tags.video
+        self.assertEqual(tag.tagName, "video")
+
+
+    def test_lookupTransparentTag(self):
+        """
+        To support transparent inclusion in templates, there is a special tag,
+        the transparent tag, which has no name of its own but is accessed
+        through the "transparent" attribute.
+        """
+        tag = tags.transparent
+        self.assertEqual(tag.tagName, "")
+
+
+    def test_lookupInvalidTag(self):
+        """
+        Invalid tags which are not part of HTML cause AttributeErrors when
+        accessed through C{tags}.
+        """
+        self.assertRaises(AttributeError, getattr, tags, "invalid")
+
+
+    def test_lookupXMP(self):
+        """
+        As a special case, the <xmp> tag is simply not available through
+        C{tags} or any other part of the templating machinery.
+        """
+        self.assertRaises(AttributeError, getattr, tags, "xmp")
+
+
+
+class ElementTests(TestCase):
+    """
+    Tests for the awesome new L{Element} class.
+    """
+    def test_missingTemplateLoader(self):
+        """
+        L{Element.render} raises L{MissingTemplateLoader} if the C{loader}
+        attribute is C{None}.
+        """
+        element = Element()
+        err = self.assertRaises(MissingTemplateLoader, element.render, None)
+        self.assertIdentical(err.element, element)
+
+
+    def test_missingTemplateLoaderRepr(self):
+        """
+        A L{MissingTemplateLoader} instance can be repr()'d without error.
+        """
+        class PrettyReprElement(Element):
+            def __repr__(self):
+                return 'Pretty Repr Element'
+        self.assertIn('Pretty Repr Element',
+                      repr(MissingTemplateLoader(PrettyReprElement())))
+
+
+    def test_missingRendererMethod(self):
+        """
+        When called with the name which is not associated with a render method,
+        L{Element.lookupRenderMethod} raises L{MissingRenderMethod}.
+        """
+        element = Element()
+        err = self.assertRaises(
+            MissingRenderMethod, element.lookupRenderMethod, "foo")
+        self.assertIdentical(err.element, element)
+        self.assertEqual(err.renderName, "foo")
+
+
+    def test_missingRenderMethodRepr(self):
+        """
+        A L{MissingRenderMethod} instance can be repr()'d without error.
+        """
+        class PrettyReprElement(Element):
+            def __repr__(self):
+                return 'Pretty Repr Element'
+        s = repr(MissingRenderMethod(PrettyReprElement(),
+                                     'expectedMethod'))
+        self.assertIn('Pretty Repr Element', s)
+        self.assertIn('expectedMethod', s)
+
+
+    def test_definedRenderer(self):
+        """
+        When called with the name of a defined render method,
+        L{Element.lookupRenderMethod} returns that render method.
+        """
+        class ElementWithRenderMethod(Element):
+            @renderer
+            def foo(self, request, tag):
+                return "bar"
+        foo = ElementWithRenderMethod().lookupRenderMethod("foo")
+        self.assertEqual(foo(None, None), "bar")
+
+
+    def test_render(self):
+        """
+        L{Element.render} loads a document from the C{loader} attribute and
+        returns it.
+        """
+        class TemplateLoader(object):
+            def load(self):
+                return "result"
+
+        class StubElement(Element):
+            loader = TemplateLoader()
+
+        element = StubElement()
+        self.assertEqual(element.render(None), "result")
+
+
+    def test_misuseRenderer(self):
+        """
+        If the L{renderer} decorator  is called without any arguments, it will
+        raise a comprehensible exception.
+        """
+        te = self.assertRaises(TypeError, renderer)
+        self.assertEqual(str(te),
+                         "expose() takes at least 1 argument (0 given)")
+
+
+    def test_renderGetDirectlyError(self):
+        """
+        Called directly, without a default, L{renderer.get} raises
+        L{UnexposedMethodError} when it cannot find a renderer.
+        """
+        self.assertRaises(UnexposedMethodError, renderer.get, None,
+                          "notARenderer")
+
+
+
+class XMLFileReprTests(TestCase):
+    """
+    Tests for L{twisted.web.template.XMLFile}'s C{__repr__}.
+    """
+    def test_filePath(self):
+        """
+        An L{XMLFile} with a L{FilePath} returns a useful repr().
+        """
+        path = FilePath("/tmp/fake.xml")
+        self.assertEqual('<XMLFile of %r>' % (path,), repr(XMLFile(path)))
+
+
+    def test_filename(self):
+        """
+        An L{XMLFile} with a filename returns a useful repr().
+        """
+        fname = "/tmp/fake.xml"
+        self.assertEqual('<XMLFile of %r>' % (fname,), repr(XMLFile(fname)))
+
+
+    def test_file(self):
+        """
+        An L{XMLFile} with a file object returns a useful repr().
+        """
+        fobj = StringIO("not xml")
+        self.assertEqual('<XMLFile of %r>' % (fobj,), repr(XMLFile(fobj)))
+
+
+
+class XMLLoaderTestsMixin(object):
+    """
+    @ivar templateString: Simple template to use to excercise the loaders.
+
+    @ivar deprecatedUse: C{True} if this use of L{XMLFile} is deprecated and
+        should emit a C{DeprecationWarning}.
+    """
+
+    loaderFactory = None
+    templateString = '<p>Hello, world.</p>'
+    def test_load(self):
+        """
+        Verify that the loader returns a tag with the correct children.
+        """
+        loader = self.loaderFactory()
+        tag, = loader.load()
+
+        warnings = self.flushWarnings(offendingFunctions=[self.loaderFactory])
+        if self.deprecatedUse:
+            self.assertEqual(len(warnings), 1)
+            self.assertEqual(warnings[0]['category'], DeprecationWarning)
+            self.assertEqual(
+                warnings[0]['message'],
+                "Passing filenames or file objects to XMLFile is "
+                "deprecated since Twisted 12.1.  Pass a FilePath instead.")
+        else:
+            self.assertEqual(len(warnings), 0)
+
+        self.assertEqual(tag.tagName, 'p')
+        self.assertEqual(tag.children, [u'Hello, world.'])
+
+
+    def test_loadTwice(self):
+        """
+        If {load()} can be called on a loader twice the result should be the
+        same.
+        """
+        loader = self.loaderFactory()
+        tags1 = loader.load()
+        tags2 = loader.load()
+        self.assertEqual(tags1, tags2)
+
+
+
+class XMLStringLoaderTests(TestCase, XMLLoaderTestsMixin):
+    """
+    Tests for L{twisted.web.template.XMLString}
+    """
+    deprecatedUse = False
+    def loaderFactory(self):
+        """
+        @return: an L{XMLString} constructed with C{self.templateString}.
+        """
+        return XMLString(self.templateString)
+
+
+
+class XMLFileWithFilePathTests(TestCase, XMLLoaderTestsMixin):
+    """
+    Tests for L{twisted.web.template.XMLFile}'s L{FilePath} support.
+    """
+    deprecatedUse = False
+    def loaderFactory(self):
+        """
+        @return: an L{XMLString} constructed with a L{FilePath} pointing to a
+            file that contains C{self.templateString}.
+        """
+        fp = FilePath(self.mktemp())
+        fp.setContent(self.templateString)
+        return XMLFile(fp)
+
+
+
+class XMLFileWithFileTests(TestCase, XMLLoaderTestsMixin):
+    """
+    Tests for L{twisted.web.template.XMLFile}'s deprecated file object support.
+    """
+    deprecatedUse = True
+    def loaderFactory(self):
+        """
+        @return: an L{XMLString} constructed with a file object that contains
+            C{self.templateString}.
+        """
+        return XMLFile(StringIO(self.templateString))
+
+
+
+class XMLFileWithFilenameTests(TestCase, XMLLoaderTestsMixin):
+    """
+    Tests for L{twisted.web.template.XMLFile}'s deprecated filename support.
+    """
+    deprecatedUse = True
+    def loaderFactory(self):
+        """
+        @return: an L{XMLString} constructed with a filename that points to a
+            file containing C{self.templateString}.
+        """
+        fp = FilePath(self.mktemp())
+        fp.setContent(self.templateString)
+        return XMLFile(fp.path)
+
+
+
+class FlattenIntegrationTests(FlattenTestCase):
+    """
+    Tests for integration between L{Element} and
+    L{twisted.web._flatten.flatten}.
+    """
+
+    def test_roundTrip(self):
+        """
+        Given a series of parsable XML strings, verify that
+        L{twisted.web._flatten.flatten} will flatten the L{Element} back to the
+        input when sent on a round trip.
+        """
+        fragments = [
+            "<p>Hello, world.</p>",
+            "<p><!-- hello, world --></p>",
+            "<p><![CDATA[Hello, world.]]></p>",
+            '<test1 xmlns:test2="urn:test2">'
+                '<test2:test3></test2:test3></test1>',
+            '<test1 xmlns="urn:test2"><test3></test3></test1>',
+            '<p>\xe2\x98\x83</p>',
+        ]
+        deferreds = [
+            self.assertFlattensTo(Element(loader=XMLString(xml)), xml)
+            for xml in fragments]
+        return gatherResults(deferreds)
+
+
+    def test_entityConversion(self):
+        """
+        When flattening an HTML entity, it should flatten out to the utf-8
+        representation if possible.
+        """
+        element = Element(loader=XMLString('<p>☃</p>'))
+        return self.assertFlattensTo(element, '<p>\xe2\x98\x83</p>')
+
+
+    def test_missingTemplateLoader(self):
+        """
+        Rendering a Element without a loader attribute raises the appropriate
+        exception.
+        """
+        return self.assertFlatteningRaises(Element(), MissingTemplateLoader)
+
+
+    def test_missingRenderMethod(self):
+        """
+        Flattening an L{Element} with a C{loader} which has a tag with a render
+        directive fails with L{FlattenerError} if there is no available render
+        method to satisfy that directive.
+        """
+        element = Element(loader=XMLString("""
+        <p xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1"
+          t:render="unknownMethod" />
+        """))
+        return self.assertFlatteningRaises(element, MissingRenderMethod)
+
+
+    def test_transparentRendering(self):
+        """
+        A C{transparent} element should be eliminated from the DOM and rendered as
+        only its children.
+        """
+        element = Element(loader=XMLString(
+            '<t:transparent '
+            'xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">'
+            'Hello, world.'
+            '</t:transparent>'
+        ))
+        return self.assertFlattensTo(element, "Hello, world.")
+
+
+    def test_attrRendering(self):
+        """
+        An Element with an attr tag renders the vaule of its attr tag as an
+        attribute of its containing tag.
+        """
+        element = Element(loader=XMLString(
+            '<a xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">'
+            '<t:attr name="href">http://example.com</t:attr>'
+            'Hello, world.'
+            '</a>'
+        ))
+        return self.assertFlattensTo(element,
+            '<a href="http://example.com">Hello, world.</a>')
+
+
+    def test_errorToplevelAttr(self):
+        """
+        A template with a toplevel C{attr} tag will not load; it will raise
+        L{AssertionError} if you try.
+        """
+        self.assertRaises(
+            AssertionError,
+            XMLString,
+            """<t:attr
+            xmlns:t='http://twistedmatrix.com/ns/twisted.web.template/0.1'
+            name='something'
+            >hello</t:attr>
+            """)
+
+
+    def test_errorUnnamedAttr(self):
+        """
+        A template with an C{attr} tag with no C{name} attribute will not load;
+        it will raise L{AssertionError} if you try.
+        """
+        self.assertRaises(
+            AssertionError,
+            XMLString,
+            """<html><t:attr
+            xmlns:t='http://twistedmatrix.com/ns/twisted.web.template/0.1'
+            >hello</t:attr></html>""")
+
+
+    def test_lenientPrefixBehavior(self):
+        """
+        If the parser sees a prefix it doesn't recognize on an attribute, it
+        will pass it on through to serialization.
+        """
+        theInput = (
+            '<hello:world hello:sample="testing" '
+            'xmlns:hello="http://made-up.example.com/ns/not-real">'
+            'This is a made-up tag.</hello:world>')
+        element = Element(loader=XMLString(theInput))
+        self.assertFlattensTo(element, theInput)
+
+
+    def test_deferredRendering(self):
+        """
+        An Element with a render method which returns a Deferred will render
+        correctly.
+        """
+        class RenderfulElement(Element):
+            @renderer
+            def renderMethod(self, request, tag):
+                return succeed("Hello, world.")
+        element = RenderfulElement(loader=XMLString("""
+        <p xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1"
+          t:render="renderMethod">
+            Goodbye, world.
+        </p>
+        """))
+        return self.assertFlattensTo(element, "Hello, world.")
+
+
+    def test_loaderClassAttribute(self):
+        """
+        If there is a non-None loader attribute on the class of an Element
+        instance but none on the instance itself, the class attribute is used.
+        """
+        class SubElement(Element):
+            loader = XMLString("<p>Hello, world.</p>")
+        return self.assertFlattensTo(SubElement(), "<p>Hello, world.</p>")
+
+
+    def test_directiveRendering(self):
+        """
+        An Element with a valid render directive has that directive invoked and
+        the result added to the output.
+        """
+        renders = []
+        class RenderfulElement(Element):
+            @renderer
+            def renderMethod(self, request, tag):
+                renders.append((self, request))
+                return tag("Hello, world.")
+        element = RenderfulElement(loader=XMLString("""
+        <p xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1"
+          t:render="renderMethod" />
+        """))
+        return self.assertFlattensTo(element, "<p>Hello, world.</p>")
+
+
+    def test_directiveRenderingOmittingTag(self):
+        """
+        An Element with a render method which omits the containing tag
+        successfully removes that tag from the output.
+        """
+        class RenderfulElement(Element):
+            @renderer
+            def renderMethod(self, request, tag):
+                return "Hello, world."
+        element = RenderfulElement(loader=XMLString("""
+        <p xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1"
+          t:render="renderMethod">
+            Goodbye, world.
+        </p>
+        """))
+        return self.assertFlattensTo(element, "Hello, world.")
+
+
+    def test_elementContainingStaticElement(self):
+        """
+        An Element which is returned by the render method of another Element is
+        rendered properly.
+        """
+        class RenderfulElement(Element):
+            @renderer
+            def renderMethod(self, request, tag):
+                return tag(Element(
+                    loader=XMLString("<em>Hello, world.</em>")))
+        element = RenderfulElement(loader=XMLString("""
+        <p xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1"
+          t:render="renderMethod" />
+        """))
+        return self.assertFlattensTo(element, "<p><em>Hello, world.</em></p>")
+
+
+    def test_elementUsingSlots(self):
+        """
+        An Element which is returned by the render method of another Element is
+        rendered properly.
+        """
+        class RenderfulElement(Element):
+            @renderer
+            def renderMethod(self, request, tag):
+                return tag.fillSlots(test2='world.')
+        element = RenderfulElement(loader=XMLString(
+            '<p xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1"'
+            ' t:render="renderMethod">'
+            '<t:slot name="test1" default="Hello, " />'
+            '<t:slot name="test2" />'
+            '</p>'
+        ))
+        return self.assertFlattensTo(element, "<p>Hello, world.</p>")
+
+
+    def test_elementContainingDynamicElement(self):
+        """
+        Directives in the document factory of a Element returned from a render
+        method of another Element are satisfied from the correct object: the
+        "inner" Element.
+        """
+        class OuterElement(Element):
+            @renderer
+            def outerMethod(self, request, tag):
+                return tag(InnerElement(loader=XMLString("""
+                <t:ignored
+                  xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1"
+                  t:render="innerMethod" />
+                """)))
+        class InnerElement(Element):
+            @renderer
+            def innerMethod(self, request, tag):
+                return "Hello, world."
+        element = OuterElement(loader=XMLString("""
+        <p xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1"
+          t:render="outerMethod" />
+        """))
+        return self.assertFlattensTo(element, "<p>Hello, world.</p>")
+
+
+    def test_sameLoaderTwice(self):
+        """
+        Rendering the output of a loader, or even the same element, should
+        return different output each time.
+        """
+        sharedLoader = XMLString(
+            '<p xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">'
+            '<t:transparent t:render="classCounter" /> '
+            '<t:transparent t:render="instanceCounter" />'
+            '</p>')
+
+        class DestructiveElement(Element):
+            count = 0
+            instanceCount = 0
+            loader = sharedLoader
+
+            @renderer
+            def classCounter(self, request, tag):
+                DestructiveElement.count += 1
+                return tag(str(DestructiveElement.count))
+            @renderer
+            def instanceCounter(self, request, tag):
+                self.instanceCount += 1
+                return tag(str(self.instanceCount))
+
+        e1 = DestructiveElement()
+        e2 = DestructiveElement()
+        self.assertFlattensImmediately(e1, "<p>1 1</p>")
+        self.assertFlattensImmediately(e1, "<p>2 2</p>")
+        self.assertFlattensImmediately(e2, "<p>3 1</p>")
+
+
+
+class TagLoaderTests(FlattenTestCase):
+    """
+    Tests for L{TagLoader}.
+    """
+    def setUp(self):
+        self.loader = TagLoader(tags.i('test'))
+
+
+    def test_interface(self):
+        """
+        An instance of L{TagLoader} provides L{ITemplateLoader}.
+        """
+        self.assertTrue(verifyObject(ITemplateLoader, self.loader))
+
+
+    def test_loadsList(self):
+        """
+        L{TagLoader.load} returns a list, per L{ITemplateLoader}.
+        """
+        self.assertIsInstance(self.loader.load(), list)
+
+
+    def test_flatten(self):
+        """
+        L{TagLoader} can be used in an L{Element}, and flattens as the tag used
+        to construct the L{TagLoader} would flatten.
+        """
+        e = Element(self.loader)
+        self.assertFlattensImmediately(e, '<i>test</i>')
+
+
+
+class TestElement(Element):
+    """
+    An L{Element} that can be rendered successfully.
+    """
+    loader = XMLString(
+        '<p xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">'
+        'Hello, world.'
+        '</p>')
+
+
+
+class TestFailureElement(Element):
+    """
+    An L{Element} that can be used in place of L{FailureElement} to verify
+    that L{renderElement} can render failures properly.
+    """
+    loader = XMLString(
+        '<p xmlns:t="http://twistedmatrix.com/ns/twisted.web.template/0.1">'
+        'I failed.'
+        '</p>')
+
+    def __init__(self, failure, loader=None):
+        self.failure = failure
+
+
+
+class FailingElement(Element):
+    """
+    An element that raises an exception when rendered.
+    """
+    def render(self, request):
+        a = 42
+        b = 0
+        return a // b
+
+
+
+class FakeSite(object):
+    """
+    A minimal L{Site} object that we can use to test displayTracebacks
+    """
+    displayTracebacks = False
+
+
+
+class TestRenderElement(TestCase):
+    """
+    Test L{renderElement}
+    """
+
+    def setUp(self):
+        """
+        Set up a common L{DummyRequest} and L{FakeSite}.
+        """
+        self.request = DummyRequest([""])
+        self.request.site = FakeSite()
+
+
+    def test_simpleRender(self):
+        """
+        L{renderElement} returns NOT_DONE_YET and eventually
+        writes the rendered L{Element} to the request before finishing the
+        request.
+        """
+        element = TestElement()
+
+        d = self.request.notifyFinish()
+
+        def check(_):
+            self.assertEqual(
+                "".join(self.request.written),
+                "<!DOCTYPE html>\n"
+                "<p>Hello, world.</p>")
+            self.assertTrue(self.request.finished)
+
+        d.addCallback(check)
+
+        self.assertIdentical(NOT_DONE_YET, renderElement(self.request, element))
+
+        return d
+
+
+    def test_simpleFailure(self):
+        """
+        L{renderElement} handles failures by writing a minimal
+        error message to the request and finishing it.
+        """
+        element = FailingElement()
+
+        d = self.request.notifyFinish()
+
+        def check(_):
+            flushed = self.flushLoggedErrors(FlattenerError)
+            self.assertEqual(len(flushed), 1)
+            self.assertEqual(
+                "".join(self.request.written),
+                ('<!DOCTYPE html>\n'
+                 '<div style="font-size:800%;'
+                 'background-color:#FFF;'
+                 'color:#F00'
+                 '">An error occurred while rendering the response.</div>'))
+            self.assertTrue(self.request.finished)
+
+        d.addCallback(check)
+
+        self.assertIdentical(NOT_DONE_YET, renderElement(self.request, element))
+
+        return d
+
+
+    def test_simpleFailureWithTraceback(self):
+        """
+        L{renderElement} will render a traceback when rendering of
+        the element fails and our site is configured to display tracebacks.
+        """
+        self.request.site.displayTracebacks = True
+
+        element = FailingElement()
+
+        d = self.request.notifyFinish()
+
+        def check(_):
+            flushed = self.flushLoggedErrors(FlattenerError)
+            self.assertEqual(len(flushed), 1)
+            self.assertEqual(
+                "".join(self.request.written),
+                "<!DOCTYPE html>\n<p>I failed.</p>")
+            self.assertTrue(self.request.finished)
+
+        d.addCallback(check)
+
+        renderElement(self.request, element, _failElement=TestFailureElement)
+
+        return d
+
+
+    def test_nonDefaultDoctype(self):
+        """
+        L{renderElement} will write the doctype string specified by the
+        doctype keyword argument.
+        """
+
+        element = TestElement()
+
+        d = self.request.notifyFinish()
+
+        def check(_):
+            self.assertEqual(
+                "".join(self.request.written),
+                ('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"'
+                 ' "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n'
+                 '<p>Hello, world.</p>'))
+
+        d.addCallback(check)
+
+        renderElement(
+            self.request,
+            element,
+            doctype=(
+                '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"'
+                ' "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">'))
+
+        return d
+
+
+    def test_noneDoctype(self):
+        """
+        L{renderElement} will not write out a doctype if the doctype keyword
+        argument is C{None}.
+        """
+
+        element = TestElement()
+
+        d = self.request.notifyFinish()
+
+        def check(_):
+            self.assertEqual(
+                "".join(self.request.written),
+                '<p>Hello, world.</p>')
+
+        d.addCallback(check)
+
+        renderElement(self.request, element, doctype=None)
+
+        return d
diff --git a/ThirdParty/Twisted/twisted/web/test/test_util.py b/ThirdParty/Twisted/twisted/web/test/test_util.py
new file mode 100644
index 0000000..ac628e7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_util.py
@@ -0,0 +1,424 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.web.util}.
+"""
+
+from twisted.python.failure import Failure
+from twisted.trial.unittest import TestCase
+from twisted.internet import defer
+from twisted.web import util
+from twisted.web.error import FlattenerError
+from twisted.web.util import (
+    redirectTo, _SourceLineElement,
+    _SourceFragmentElement, _FrameElement, _StackElement,
+    FailureElement, formatFailure, DeferredResource)
+
+from twisted.web.http import FOUND
+from twisted.web.server import Request
+from twisted.web.template import TagLoader, flattenString, tags
+from twisted.web import resource
+from twisted.web.test.requesthelper import DummyChannel, DummyRequest
+
+
+class RedirectToTestCase(TestCase):
+    """
+    Tests for L{redirectTo}.
+    """
+
+    def test_headersAndCode(self):
+        """
+        L{redirectTo} will set the C{Location} and C{Content-Type} headers on
+        its request, and set the response code to C{FOUND}, so the browser will
+        be redirected.
+        """
+        request = Request(DummyChannel(), True)
+        request.method = 'GET'
+        targetURL = "http://target.example.com/4321"
+        redirectTo(targetURL, request)
+        self.assertEqual(request.code, FOUND)
+        self.assertEqual(
+            request.responseHeaders.getRawHeaders('location'), [targetURL])
+        self.assertEqual(
+            request.responseHeaders.getRawHeaders('content-type'),
+            ['text/html; charset=utf-8'])
+
+
+    def test_redirectToUnicodeURL(self) :
+        """
+        L{redirectTo} will raise TypeError if unicode object is passed in URL
+        """
+        request = Request(DummyChannel(), True)
+        request.method = 'GET'
+        targetURL = u'http://target.example.com/4321'
+        self.assertRaises(TypeError, redirectTo, targetURL, request)
+
+
+
+class FailureElementTests(TestCase):
+    """
+    Tests for L{FailureElement} and related helpers which can render a
+    L{Failure} as an HTML string.
+    """
+    def setUp(self):
+        """
+        Create a L{Failure} which can be used by the rendering tests.
+        """
+        def lineNumberProbeAlsoBroken():
+            message = "This is a problem"
+            raise Exception(message)
+        # Figure out the line number from which the exception will be raised.
+        self.base = lineNumberProbeAlsoBroken.func_code.co_firstlineno + 1
+
+        try:
+            lineNumberProbeAlsoBroken()
+        except:
+            self.failure = Failure(captureVars=True)
+            self.frame = self.failure.frames[-1]
+
+
+    def test_sourceLineElement(self):
+        """
+        L{_SourceLineElement} renders a source line and line number.
+        """
+        element = _SourceLineElement(
+            TagLoader(tags.div(
+                    tags.span(render="lineNumber"),
+                    tags.span(render="sourceLine"))),
+            50, "    print 'hello'")
+        d = flattenString(None, element)
+        expected = (
+            u"<div><span>50</span><span>"
+            u" \N{NO-BREAK SPACE} \N{NO-BREAK SPACE}print 'hello'</span></div>")
+        d.addCallback(
+            self.assertEqual, expected.encode('utf-8'))
+        return d
+
+
+    def test_sourceFragmentElement(self):
+        """
+        L{_SourceFragmentElement} renders source lines at and around the line
+        number indicated by a frame object.
+        """
+        element = _SourceFragmentElement(
+            TagLoader(tags.div(
+                    tags.span(render="lineNumber"),
+                    tags.span(render="sourceLine"),
+                    render="sourceLines")),
+            self.frame)
+
+        source = [
+            u' \N{NO-BREAK SPACE} \N{NO-BREAK SPACE}message = '
+            u'"This is a problem"',
+
+            u' \N{NO-BREAK SPACE} \N{NO-BREAK SPACE}raise Exception(message)',
+            u'# Figure out the line number from which the exception will be '
+            u'raised.',
+        ]
+        d = flattenString(None, element)
+        d.addCallback(
+            self.assertEqual,
+            ''.join([
+                    '<div class="snippet%sLine"><span>%d</span><span>%s</span>'
+                    '</div>' % (
+                        ["", "Highlight"][lineNumber == 1],
+                        self.base + lineNumber,
+                        (u" \N{NO-BREAK SPACE}" * 4 + sourceLine).encode(
+                            'utf-8'))
+                    for (lineNumber, sourceLine)
+                    in enumerate(source)]))
+        return d
+
+
+    def test_frameElementFilename(self):
+        """
+        The I{filename} renderer of L{_FrameElement} renders the filename
+        associated with the frame object used to initialize the
+        L{_FrameElement}.
+        """
+        element = _FrameElement(
+            TagLoader(tags.span(render="filename")),
+            self.frame)
+        d = flattenString(None, element)
+        d.addCallback(
+            # __file__ differs depending on whether an up-to-date .pyc file
+            # already existed.
+            self.assertEqual, "<span>" + __file__.rstrip('c') + "</span>")
+        return d
+
+
+    def test_frameElementLineNumber(self):
+        """
+        The I{lineNumber} renderer of L{_FrameElement} renders the line number
+        associated with the frame object used to initialize the
+        L{_FrameElement}.
+        """
+        element = _FrameElement(
+            TagLoader(tags.span(render="lineNumber")),
+            self.frame)
+        d = flattenString(None, element)
+        d.addCallback(
+            self.assertEqual, "<span>" + str(self.base + 1) + "</span>")
+        return d
+
+
+    def test_frameElementFunction(self):
+        """
+        The I{function} renderer of L{_FrameElement} renders the line number
+        associated with the frame object used to initialize the
+        L{_FrameElement}.
+        """
+        element = _FrameElement(
+            TagLoader(tags.span(render="function")),
+            self.frame)
+        d = flattenString(None, element)
+        d.addCallback(
+            self.assertEqual, "<span>lineNumberProbeAlsoBroken</span>")
+        return d
+
+
+    def test_frameElementSource(self):
+        """
+        The I{source} renderer of L{_FrameElement} renders the source code near
+        the source filename/line number associated with the frame object used to
+        initialize the L{_FrameElement}.
+        """
+        element = _FrameElement(None, self.frame)
+        renderer = element.lookupRenderMethod("source")
+        tag = tags.div()
+        result = renderer(None, tag)
+        self.assertIsInstance(result, _SourceFragmentElement)
+        self.assertIdentical(result.frame, self.frame)
+        self.assertEqual([tag], result.loader.load())
+
+
+    def test_stackElement(self):
+        """
+        The I{frames} renderer of L{_StackElement} renders each stack frame in
+        the list of frames used to initialize the L{_StackElement}.
+        """
+        element = _StackElement(None, self.failure.frames[:2])
+        renderer = element.lookupRenderMethod("frames")
+        tag = tags.div()
+        result = renderer(None, tag)
+        self.assertIsInstance(result, list)
+        self.assertIsInstance(result[0], _FrameElement)
+        self.assertIdentical(result[0].frame, self.failure.frames[0])
+        self.assertIsInstance(result[1], _FrameElement)
+        self.assertIdentical(result[1].frame, self.failure.frames[1])
+        # They must not share the same tag object.
+        self.assertNotEqual(result[0].loader.load(), result[1].loader.load())
+        self.assertEqual(2, len(result))
+
+
+    def test_failureElementTraceback(self):
+        """
+        The I{traceback} renderer of L{FailureElement} renders the failure's
+        stack frames using L{_StackElement}.
+        """
+        element = FailureElement(self.failure)
+        renderer = element.lookupRenderMethod("traceback")
+        tag = tags.div()
+        result = renderer(None, tag)
+        self.assertIsInstance(result, _StackElement)
+        self.assertIdentical(result.stackFrames, self.failure.frames)
+        self.assertEqual([tag], result.loader.load())
+
+
+    def test_failureElementType(self):
+        """
+        The I{type} renderer of L{FailureElement} renders the failure's
+        exception type.
+        """
+        element = FailureElement(
+            self.failure, TagLoader(tags.span(render="type")))
+        d = flattenString(None, element)
+        d.addCallback(
+            self.assertEqual, "<span>exceptions.Exception</span>")
+        return d
+
+
+    def test_failureElementValue(self):
+        """
+        The I{value} renderer of L{FailureElement} renders the value's exception
+        value.
+        """
+        element = FailureElement(
+            self.failure, TagLoader(tags.span(render="value")))
+        d = flattenString(None, element)
+        d.addCallback(
+            self.assertEqual, '<span>This is a problem</span>')
+        return d
+
+
+
+class FormatFailureTests(TestCase):
+    """
+    Tests for L{twisted.web.util.formatFailure} which returns an HTML string
+    representing the L{Failure} instance passed to it.
+    """
+    def test_flattenerError(self):
+        """
+        If there is an error flattening the L{Failure} instance,
+        L{formatFailure} raises L{FlattenerError}.
+        """
+        self.assertRaises(FlattenerError, formatFailure, object())
+
+
+    def test_returnsBytes(self):
+        """
+        The return value of L{formatFailure} is a C{str} instance (not a
+        C{unicode} instance) with numeric character references for any non-ASCII
+        characters meant to appear in the output.
+        """
+        try:
+            raise Exception("Fake bug")
+        except:
+            result = formatFailure(Failure())
+
+        self.assertIsInstance(result, str)
+        self.assertTrue(all(ord(ch) < 128 for ch in result))
+        # Indentation happens to rely on NO-BREAK SPACE
+        self.assertIn(" ", result)
+
+
+
+class DeprecatedHTMLHelpers(TestCase):
+    """
+    The various HTML generation helper APIs in L{twisted.web.util} are
+    deprecated.
+    """
+    def _htmlHelperDeprecationTest(self, functionName):
+        """
+        Helper method which asserts that using the name indicated by
+        C{functionName} from the L{twisted.web.util} module emits a deprecation
+        warning.
+        """
+        getattr(util, functionName)
+        warnings = self.flushWarnings([self._htmlHelperDeprecationTest])
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            "twisted.web.util.%s was deprecated in Twisted 12.1.0: "
+            "See twisted.web.template." % (functionName,))
+
+
+    def test_htmlrepr(self):
+        """
+        L{twisted.web.util.htmlrepr} is deprecated.
+        """
+        self._htmlHelperDeprecationTest("htmlrepr")
+
+
+    def test_saferepr(self):
+        """
+        L{twisted.web.util.saferepr} is deprecated.
+        """
+        self._htmlHelperDeprecationTest("saferepr")
+
+
+    def test_htmlUnknown(self):
+        """
+        L{twisted.web.util.htmlUnknown} is deprecated.
+        """
+        self._htmlHelperDeprecationTest("htmlUnknown")
+
+
+    def test_htmlDict(self):
+        """
+        L{twisted.web.util.htmlDict} is deprecated.
+        """
+        self._htmlHelperDeprecationTest("htmlDict")
+
+
+    def test_htmlList(self):
+        """
+        L{twisted.web.util.htmlList} is deprecated.
+        """
+        self._htmlHelperDeprecationTest("htmlList")
+
+
+    def test_htmlInst(self):
+        """
+        L{twisted.web.util.htmlInst} is deprecated.
+        """
+        self._htmlHelperDeprecationTest("htmlInst")
+
+
+    def test_htmlString(self):
+        """
+        L{twisted.web.util.htmlString} is deprecated.
+        """
+        self._htmlHelperDeprecationTest("htmlString")
+
+
+    def test_htmlIndent(self):
+        """
+        L{twisted.web.util.htmlIndent} is deprecated.
+        """
+        self._htmlHelperDeprecationTest("htmlIndent")
+
+
+    def test_htmlFunc(self):
+        """
+        L{twisted.web.util.htmlFunc} is deprecated.
+        """
+        self._htmlHelperDeprecationTest("htmlFunc")
+
+
+    def test_htmlReprTypes(self):
+        """
+        L{twisted.web.util.htmlReprTypes} is deprecated.
+        """
+        self._htmlHelperDeprecationTest("htmlReprTypes")
+
+
+    def test_stylesheet(self):
+        """
+        L{twisted.web.util.stylesheet} is deprecated.
+        """
+        self._htmlHelperDeprecationTest("stylesheet")
+
+
+
+class SDResource(resource.Resource):
+    def __init__(self,default):
+        self.default = default
+
+
+    def getChildWithDefault(self, name, request):
+        d = defer.succeed(self.default)
+        resource = util.DeferredResource(d)
+        return resource.getChildWithDefault(name, request)
+
+
+
+class DeferredResourceTests(TestCase):
+    """
+    Tests for L{DeferredResource}.
+    """
+
+    def testDeferredResource(self):
+        r = resource.Resource()
+        r.isLeaf = 1
+        s = SDResource(r)
+        d = DummyRequest(['foo', 'bar', 'baz'])
+        resource.getChildForRequest(s, d)
+        self.assertEqual(d.postpath, ['bar', 'baz'])
+
+
+    def test_render(self):
+        """
+        L{DeferredResource} uses the request object's C{render} method to
+        render the resource which is the result of the L{Deferred} being
+        handled.
+        """
+        rendered = []
+        request = DummyRequest([])
+        request.render = rendered.append
+
+        result = resource.Resource()
+        deferredResource = DeferredResource(defer.succeed(result))
+        deferredResource.render(request)
+        self.assertEqual(rendered, [result])
diff --git a/ThirdParty/Twisted/twisted/web/test/test_vhost.py b/ThirdParty/Twisted/twisted/web/test/test_vhost.py
new file mode 100644
index 0000000..13e6357
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_vhost.py
@@ -0,0 +1,105 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.web.vhost}.
+"""
+
+from twisted.internet.defer import gatherResults
+from twisted.trial.unittest import TestCase
+from twisted.web.http import NOT_FOUND
+from twisted.web.static import Data
+from twisted.web.vhost import NameVirtualHost
+from twisted.web.test.test_web import DummyRequest
+from twisted.web.test._util import _render
+
+class NameVirtualHostTests(TestCase):
+    """
+    Tests for L{NameVirtualHost}.
+    """
+    def test_renderWithoutHost(self):
+        """
+        L{NameVirtualHost.render} returns the result of rendering the
+        instance's C{default} if it is not C{None} and there is no I{Host}
+        header in the request.
+        """
+        virtualHostResource = NameVirtualHost()
+        virtualHostResource.default = Data("correct result", "")
+        request = DummyRequest([''])
+        self.assertEqual(
+            virtualHostResource.render(request), "correct result")
+
+
+    def test_renderWithoutHostNoDefault(self):
+        """
+        L{NameVirtualHost.render} returns a response with a status of I{NOT
+        FOUND} if the instance's C{default} is C{None} and there is no I{Host}
+        header in the request.
+        """
+        virtualHostResource = NameVirtualHost()
+        request = DummyRequest([''])
+        d = _render(virtualHostResource, request)
+        def cbRendered(ignored):
+            self.assertEqual(request.responseCode, NOT_FOUND)
+        d.addCallback(cbRendered)
+        return d
+
+
+    def test_renderWithHost(self):
+        """
+        L{NameVirtualHost.render} returns the result of rendering the resource
+        which is the value in the instance's C{host} dictionary corresponding
+        to the key indicated by the value of the I{Host} header in the request.
+        """
+        virtualHostResource = NameVirtualHost()
+        virtualHostResource.addHost('example.org', Data("winner", ""))
+
+        request = DummyRequest([''])
+        request.headers['host'] = 'example.org'
+        d = _render(virtualHostResource, request)
+        def cbRendered(ignored, request):
+            self.assertEqual(''.join(request.written), "winner")
+        d.addCallback(cbRendered, request)
+
+        # The port portion of the Host header should not be considered.
+        requestWithPort = DummyRequest([''])
+        requestWithPort.headers['host'] = 'example.org:8000'
+        dWithPort = _render(virtualHostResource, requestWithPort)
+        def cbRendered(ignored, requestWithPort):
+            self.assertEqual(''.join(requestWithPort.written), "winner")
+        dWithPort.addCallback(cbRendered, requestWithPort)
+
+        return gatherResults([d, dWithPort])
+
+
+    def test_renderWithUnknownHost(self):
+        """
+        L{NameVirtualHost.render} returns the result of rendering the
+        instance's C{default} if it is not C{None} and there is no host
+        matching the value of the I{Host} header in the request.
+        """
+        virtualHostResource = NameVirtualHost()
+        virtualHostResource.default = Data("correct data", "")
+        request = DummyRequest([''])
+        request.headers['host'] = 'example.com'
+        d = _render(virtualHostResource, request)
+        def cbRendered(ignored):
+            self.assertEqual(''.join(request.written), "correct data")
+        d.addCallback(cbRendered)
+        return d
+
+
+    def test_renderWithUnknownHostNoDefault(self):
+        """
+        L{NameVirtualHost.render} returns a response with a status of I{NOT
+        FOUND} if the instance's C{default} is C{None} and there is no host
+        matching the value of the I{Host} header in the request.
+        """
+        virtualHostResource = NameVirtualHost()
+        request = DummyRequest([''])
+        request.headers['host'] = 'example.com'
+        d = _render(virtualHostResource, request)
+        def cbRendered(ignored):
+            self.assertEqual(request.responseCode, NOT_FOUND)
+        d.addCallback(cbRendered)
+        return d
diff --git a/ThirdParty/Twisted/twisted/web/test/test_web.py b/ThirdParty/Twisted/twisted/web/test/test_web.py
new file mode 100644
index 0000000..6080d40
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_web.py
@@ -0,0 +1,972 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for various parts of L{twisted.web}.
+"""
+
+import zlib
+
+from zope.interface import implementer
+from zope.interface.verify import verifyObject
+
+from twisted.python.compat import (_PY3, networkString,
+                                   NativeStringIO as StringIO)
+from twisted.trial import unittest
+from twisted.internet import reactor
+from twisted.internet.address import IPv4Address
+from twisted.web import server, resource
+from twisted.internet import task
+from twisted.web import iweb, http, error
+from twisted.python import log
+
+from twisted.web.test.requesthelper import DummyChannel, DummyRequest
+
+# Remove this in #6177, when static is ported to Python 3:
+if _PY3:
+    class Data(resource.Resource):
+        def __init__(self, data, type):
+            resource.Resource.__init__(self)
+            self.data = data
+            self.type = type
+
+
+        def render_GET(self, request):
+            request.setHeader(b"content-type", self.type)
+            request.setHeader(b"content-length",
+                              networkString(str(len(self.data))))
+            return self.data
+else:
+    from twisted.web.static import Data
+
+
+class ResourceTestCase(unittest.TestCase):
+    def testListEntities(self):
+        r = resource.Resource()
+        self.assertEqual([], r.listEntities())
+
+
+class SimpleResource(resource.Resource):
+    """
+    @ivar _contentType: C{None} or a C{str} giving the value of the
+        I{Content-Type} header in the response this resource will render.  If it
+        is C{None}, no I{Content-Type} header will be set in the response.
+    """
+    def __init__(self, contentType=None):
+        resource.Resource.__init__(self)
+        self._contentType = contentType
+
+
+    def render(self, request):
+        if self._contentType is not None:
+            request.responseHeaders.setRawHeaders(
+                b"content-type", [self._contentType])
+
+        if http.CACHED in (request.setLastModified(10),
+                           request.setETag(b'MatchingTag')):
+            return b''
+        else:
+            return b"correct"
+
+
+class SiteTest(unittest.TestCase):
+    def test_simplestSite(self):
+        """
+        L{Site.getResourceFor} returns the C{b""} child of the root resource it
+        is constructed with when processing a request for I{/}.
+        """
+        sres1 = SimpleResource()
+        sres2 = SimpleResource()
+        sres1.putChild(b"",sres2)
+        site = server.Site(sres1)
+        self.assertIdentical(
+            site.getResourceFor(DummyRequest([b''])),
+            sres2, "Got the wrong resource.")
+
+
+
+class SessionTest(unittest.TestCase):
+    """
+    Tests for L{server.Session}.
+    """
+    def setUp(self):
+        """
+        Create a site with one active session using a deterministic, easily
+        controlled clock.
+        """
+        self.clock = task.Clock()
+        self.uid = b'unique'
+        self.site = server.Site(resource.Resource())
+        self.session = server.Session(self.site, self.uid, self.clock)
+        self.site.sessions[self.uid] = self.session
+
+
+    def test_defaultReactor(self):
+        """
+        If not value is passed to L{server.Session.__init__}, the global
+        reactor is used.
+        """
+        session = server.Session(server.Site(resource.Resource()), b'123')
+        self.assertIdentical(session._reactor, reactor)
+
+
+    def test_startCheckingExpiration(self):
+        """
+        L{server.Session.startCheckingExpiration} causes the session to expire
+        after L{server.Session.sessionTimeout} seconds without activity.
+        """
+        self.session.startCheckingExpiration()
+
+        # Advance to almost the timeout - nothing should happen.
+        self.clock.advance(self.session.sessionTimeout - 1)
+        self.assertIn(self.uid, self.site.sessions)
+
+        # Advance to the timeout, the session should expire.
+        self.clock.advance(1)
+        self.assertNotIn(self.uid, self.site.sessions)
+
+        # There should be no calls left over, either.
+        self.assertFalse(self.clock.calls)
+
+
+    def test_expire(self):
+        """
+        L{server.Session.expire} expires the session.
+        """
+        self.session.expire()
+        # It should be gone from the session dictionary.
+        self.assertNotIn(self.uid, self.site.sessions)
+        # And there should be no pending delayed calls.
+        self.assertFalse(self.clock.calls)
+
+
+    def test_expireWhileChecking(self):
+        """
+        L{server.Session.expire} expires the session even if the timeout call
+        isn't due yet.
+        """
+        self.session.startCheckingExpiration()
+        self.test_expire()
+
+
+    def test_notifyOnExpire(self):
+        """
+        A function registered with L{server.Session.notifyOnExpire} is called
+        when the session expires.
+        """
+        callbackRan = [False]
+        def expired():
+            callbackRan[0] = True
+        self.session.notifyOnExpire(expired)
+        self.session.expire()
+        self.assertTrue(callbackRan[0])
+
+
+    def test_touch(self):
+        """
+        L{server.Session.touch} updates L{server.Session.lastModified} and
+        delays session timeout.
+        """
+        # Make sure it works before startCheckingExpiration
+        self.clock.advance(3)
+        self.session.touch()
+        self.assertEqual(self.session.lastModified, 3)
+
+        # And after startCheckingExpiration
+        self.session.startCheckingExpiration()
+        self.clock.advance(self.session.sessionTimeout - 1)
+        self.session.touch()
+        self.clock.advance(self.session.sessionTimeout - 1)
+        self.assertIn(self.uid, self.site.sessions)
+
+        # It should have advanced it by just sessionTimeout, no more.
+        self.clock.advance(1)
+        self.assertNotIn(self.uid, self.site.sessions)
+
+
+    def test_startCheckingExpirationParameterDeprecated(self):
+        """
+        L{server.Session.startCheckingExpiration} emits a deprecation warning
+        if it is invoked with a parameter.
+        """
+        self.session.startCheckingExpiration(123)
+        warnings = self.flushWarnings([
+                self.test_startCheckingExpirationParameterDeprecated])
+        self.assertEqual(len(warnings), 1)
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            "The lifetime parameter to startCheckingExpiration is deprecated "
+            "since Twisted 9.0.  See Session.sessionTimeout instead.")
+
+
+    def test_checkExpiredDeprecated(self):
+        """
+        L{server.Session.checkExpired} is deprecated.
+        """
+        self.session.checkExpired()
+        warnings = self.flushWarnings([self.test_checkExpiredDeprecated])
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            "Session.checkExpired is deprecated since Twisted 9.0; sessions "
+            "check themselves now, you don't need to.")
+        self.assertEqual(len(warnings), 1)
+
+
+# Conditional requests:
+# If-None-Match, If-Modified-Since
+
+# make conditional request:
+#   normal response if condition succeeds
+#   if condition fails:
+#      response code
+#      no body
+
+def httpBody(whole):
+    return whole.split(b'\r\n\r\n', 1)[1]
+
+def httpHeader(whole, key):
+    key = key.lower()
+    headers = whole.split(b'\r\n\r\n', 1)[0]
+    for header in headers.split(b'\r\n'):
+        if header.lower().startswith(key):
+            return header.split(b':', 1)[1].strip()
+    return None
+
+def httpCode(whole):
+    l1 = whole.split(b'\r\n', 1)[0]
+    return int(l1.split()[1])
+
+class ConditionalTest(unittest.TestCase):
+    """
+    web.server's handling of conditional requests for cache validation.
+    """
+    def setUp(self):
+        self.resrc = SimpleResource()
+        self.resrc.putChild(b'', self.resrc)
+        self.resrc.putChild(b'with-content-type', SimpleResource(b'image/jpeg'))
+        self.site = server.Site(self.resrc)
+        self.site.logFile = log.logfile
+
+        # HELLLLLLLLLLP!  This harness is Very Ugly.
+        self.channel = self.site.buildProtocol(None)
+        self.transport = http.StringTransport()
+        self.transport.close = lambda *a, **kw: None
+        self.transport.disconnecting = lambda *a, **kw: 0
+        self.transport.getPeer = lambda *a, **kw: "peer"
+        self.transport.getHost = lambda *a, **kw: "host"
+        self.channel.makeConnection(self.transport)
+
+
+    def tearDown(self):
+        self.channel.connectionLost(None)
+
+
+    def _modifiedTest(self, modifiedSince=None, etag=None):
+        """
+        Given the value C{modifiedSince} for the I{If-Modified-Since} header or
+        the value C{etag} for the I{If-Not-Match} header, verify that a response
+        with a 200 code, a default Content-Type, and the resource as the body is
+        returned.
+        """
+        if modifiedSince is not None:
+            validator = b"If-Modified-Since: " + modifiedSince
+        else:
+            validator = b"If-Not-Match: " + etag
+        for line in [b"GET / HTTP/1.1", validator, b""]:
+            self.channel.lineReceived(line)
+        result = self.transport.getvalue()
+        self.assertEqual(httpCode(result), http.OK)
+        self.assertEqual(httpBody(result), b"correct")
+        self.assertEqual(httpHeader(result, b"Content-Type"), b"text/html")
+
+
+    def test_modified(self):
+        """
+        If a request is made with an I{If-Modified-Since} header value with
+        a timestamp indicating a time before the last modification of the
+        requested resource, a 200 response is returned along with a response
+        body containing the resource.
+        """
+        self._modifiedTest(modifiedSince=http.datetimeToString(1))
+
+
+    def test_unmodified(self):
+        """
+        If a request is made with an I{If-Modified-Since} header value with a
+        timestamp indicating a time after the last modification of the request
+        resource, a 304 response is returned along with an empty response body
+        and no Content-Type header if the application does not set one.
+        """
+        for line in [b"GET / HTTP/1.1",
+                     b"If-Modified-Since: " + http.datetimeToString(100), b""]:
+            self.channel.lineReceived(line)
+        result = self.transport.getvalue()
+        self.assertEqual(httpCode(result), http.NOT_MODIFIED)
+        self.assertEqual(httpBody(result), b"")
+        # Since there SHOULD NOT (RFC 2616, section 10.3.5) be any
+        # entity-headers, the Content-Type is not set if the application does
+        # not explicitly set it.
+        self.assertEqual(httpHeader(result, b"Content-Type"), None)
+
+
+    def test_invalidTimestamp(self):
+        """
+        If a request is made with an I{If-Modified-Since} header value which
+        cannot be parsed, the header is treated as not having been present
+        and a normal 200 response is returned with a response body
+        containing the resource.
+        """
+        self._modifiedTest(modifiedSince=b"like, maybe a week ago, I guess?")
+
+
+    def test_invalidTimestampYear(self):
+        """
+        If a request is made with an I{If-Modified-Since} header value which
+        contains a string in the year position which is not an integer, the
+        header is treated as not having been present and a normal 200
+        response is returned with a response body containing the resource.
+        """
+        self._modifiedTest(modifiedSince=b"Thu, 01 Jan blah 00:00:10 GMT")
+
+
+    def test_invalidTimestampTooLongAgo(self):
+        """
+        If a request is made with an I{If-Modified-Since} header value which
+        contains a year before the epoch, the header is treated as not
+        having been present and a normal 200 response is returned with a
+        response body containing the resource.
+        """
+        self._modifiedTest(modifiedSince=b"Thu, 01 Jan 1899 00:00:10 GMT")
+
+
+    def test_invalidTimestampMonth(self):
+        """
+        If a request is made with an I{If-Modified-Since} header value which
+        contains a string in the month position which is not a recognized
+        month abbreviation, the header is treated as not having been present
+        and a normal 200 response is returned with a response body
+        containing the resource.
+        """
+        self._modifiedTest(modifiedSince=b"Thu, 01 Blah 1970 00:00:10 GMT")
+
+
+    def test_etagMatchedNot(self):
+        """
+        If a request is made with an I{If-None-Match} ETag which does not match
+        the current ETag of the requested resource, the header is treated as not
+        having been present and a normal 200 response is returned with a
+        response body containing the resource.
+        """
+        self._modifiedTest(etag=b"unmatchedTag")
+
+
+    def test_etagMatched(self):
+        """
+        If a request is made with an I{If-None-Match} ETag which does match the
+        current ETag of the requested resource, a 304 response is returned along
+        with an empty response body.
+        """
+        for line in [b"GET / HTTP/1.1", b"If-None-Match: MatchingTag", b""]:
+            self.channel.lineReceived(line)
+        result = self.transport.getvalue()
+        self.assertEqual(httpHeader(result, b"ETag"), b"MatchingTag")
+        self.assertEqual(httpCode(result), http.NOT_MODIFIED)
+        self.assertEqual(httpBody(result), b"")
+
+
+    def test_unmodifiedWithContentType(self):
+        """
+        Similar to L{test_etagMatched}, but the response should include a
+        I{Content-Type} header if the application explicitly sets one.
+
+        This I{Content-Type} header SHOULD NOT be present according to RFC 2616,
+        section 10.3.5.  It will only be present if the application explicitly
+        sets it.
+        """
+        for line in [b"GET /with-content-type HTTP/1.1",
+                     b"If-None-Match: MatchingTag", b""]:
+            self.channel.lineReceived(line)
+        result = self.transport.getvalue()
+        self.assertEqual(httpCode(result), http.NOT_MODIFIED)
+        self.assertEqual(httpBody(result), b"")
+        self.assertEqual(httpHeader(result, b"Content-Type"), b"image/jpeg")
+
+
+
+class RequestTests(unittest.TestCase):
+    """
+    Tests for the HTTP request class, L{server.Request}.
+    """
+
+    def test_interface(self):
+        """
+        L{server.Request} instances provide L{iweb.IRequest}.
+        """
+        self.assertTrue(
+            verifyObject(iweb.IRequest, server.Request(DummyChannel(), True)))
+
+
+    def testChildLink(self):
+        request = server.Request(DummyChannel(), 1)
+        request.gotLength(0)
+        request.requestReceived(b'GET', b'/foo/bar', b'HTTP/1.0')
+        self.assertEqual(request.childLink(b'baz'), b'bar/baz')
+        request = server.Request(DummyChannel(), 1)
+        request.gotLength(0)
+        request.requestReceived(b'GET', b'/foo/bar/', b'HTTP/1.0')
+        self.assertEqual(request.childLink(b'baz'), b'baz')
+
+    def testPrePathURLSimple(self):
+        request = server.Request(DummyChannel(), 1)
+        request.gotLength(0)
+        request.requestReceived(b'GET', b'/foo/bar', b'HTTP/1.0')
+        request.setHost(b'example.com', 80)
+        self.assertEqual(request.prePathURL(), b'http://example.com/foo/bar')
+
+    def testPrePathURLNonDefault(self):
+        d = DummyChannel()
+        d.transport.port = 81
+        request = server.Request(d, 1)
+        request.setHost(b'example.com', 81)
+        request.gotLength(0)
+        request.requestReceived(b'GET', b'/foo/bar', b'HTTP/1.0')
+        self.assertEqual(request.prePathURL(), b'http://example.com:81/foo/bar')
+
+    def testPrePathURLSSLPort(self):
+        d = DummyChannel()
+        d.transport.port = 443
+        request = server.Request(d, 1)
+        request.setHost(b'example.com', 443)
+        request.gotLength(0)
+        request.requestReceived(b'GET', b'/foo/bar', b'HTTP/1.0')
+        self.assertEqual(request.prePathURL(), b'http://example.com:443/foo/bar')
+
+    def testPrePathURLSSLPortAndSSL(self):
+        d = DummyChannel()
+        d.transport = DummyChannel.SSL()
+        d.transport.port = 443
+        request = server.Request(d, 1)
+        request.setHost(b'example.com', 443)
+        request.gotLength(0)
+        request.requestReceived(b'GET', b'/foo/bar', b'HTTP/1.0')
+        self.assertEqual(request.prePathURL(), b'https://example.com/foo/bar')
+
+    def testPrePathURLHTTPPortAndSSL(self):
+        d = DummyChannel()
+        d.transport = DummyChannel.SSL()
+        d.transport.port = 80
+        request = server.Request(d, 1)
+        request.setHost(b'example.com', 80)
+        request.gotLength(0)
+        request.requestReceived(b'GET', b'/foo/bar', b'HTTP/1.0')
+        self.assertEqual(request.prePathURL(), b'https://example.com:80/foo/bar')
+
+    def testPrePathURLSSLNonDefault(self):
+        d = DummyChannel()
+        d.transport = DummyChannel.SSL()
+        d.transport.port = 81
+        request = server.Request(d, 1)
+        request.setHost(b'example.com', 81)
+        request.gotLength(0)
+        request.requestReceived(b'GET', b'/foo/bar', b'HTTP/1.0')
+        self.assertEqual(request.prePathURL(), b'https://example.com:81/foo/bar')
+
+    def testPrePathURLSetSSLHost(self):
+        d = DummyChannel()
+        d.transport.port = 81
+        request = server.Request(d, 1)
+        request.setHost(b'foo.com', 81, 1)
+        request.gotLength(0)
+        request.requestReceived(b'GET', b'/foo/bar', b'HTTP/1.0')
+        self.assertEqual(request.prePathURL(), b'https://foo.com:81/foo/bar')
+
+
+    def test_prePathURLQuoting(self):
+        """
+        L{Request.prePathURL} quotes special characters in the URL segments to
+        preserve the original meaning.
+        """
+        d = DummyChannel()
+        request = server.Request(d, 1)
+        request.setHost(b'example.com', 80)
+        request.gotLength(0)
+        request.requestReceived(b'GET', b'/foo%2Fbar', b'HTTP/1.0')
+        self.assertEqual(request.prePathURL(), b'http://example.com/foo%2Fbar')
+
+
+
+class GzipEncoderTests(unittest.TestCase):
+
+    if _PY3:
+        skip = "GzipEncoder not ported to Python 3 yet."
+
+    def setUp(self):
+        self.channel = DummyChannel()
+        staticResource = Data(b"Some data", b"text/plain")
+        wrapped = resource.EncodingResourceWrapper(
+            staticResource, [server.GzipEncoderFactory()])
+        self.channel.site.resource.putChild(b"foo", wrapped)
+
+
+    def test_interfaces(self):
+        """
+        L{server.GzipEncoderFactory} implements the
+        L{iweb._IRequestEncoderFactory} and its C{encoderForRequest} returns an
+        instance of L{server._GzipEncoder} which implements
+        L{iweb._IRequestEncoder}.
+        """
+        request = server.Request(self.channel, False)
+        request.gotLength(0)
+        request.requestHeaders.setRawHeaders(b"Accept-Encoding",
+                                             [b"gzip,deflate"])
+        factory = server.GzipEncoderFactory()
+        self.assertTrue(verifyObject(iweb._IRequestEncoderFactory, factory))
+
+        encoder = factory.encoderForRequest(request)
+        self.assertTrue(verifyObject(iweb._IRequestEncoder, encoder))
+
+
+    def test_encoding(self):
+        """
+        If the client request passes a I{Accept-Encoding} header which mentions
+        gzip, L{server._GzipEncoder} automatically compresses the data.
+        """
+        request = server.Request(self.channel, False)
+        request.gotLength(0)
+        request.requestHeaders.setRawHeaders(b"Accept-Encoding",
+                                             [b"gzip,deflate"])
+        request.requestReceived(b'GET', b'/foo', b'HTTP/1.0')
+        data = self.channel.transport.written.getvalue()
+        self.assertNotIn(b"Content-Length", data)
+        self.assertIn(b"Content-Encoding: gzip\r\n", data)
+        body = data[data.find(b"\r\n\r\n") + 4:]
+        self.assertEqual(b"Some data",
+                          zlib.decompress(body, 16 + zlib.MAX_WBITS))
+
+
+    def test_nonEncoding(self):
+        """
+        L{server.GzipEncoderFactory} doesn't return a L{server._GzipEncoder} if
+        the I{Accept-Encoding} header doesn't mention gzip support.
+        """
+        request = server.Request(self.channel, False)
+        request.gotLength(0)
+        request.requestHeaders.setRawHeaders(b"Accept-Encoding",
+                                             [b"foo,bar"])
+        request.requestReceived(b'GET', b'/foo', b'HTTP/1.0')
+        data = self.channel.transport.written.getvalue()
+        self.assertIn(b"Content-Length", data)
+        self.assertNotIn(b"Content-Encoding: gzip\r\n", data)
+        body = data[data.find(b"\r\n\r\n") + 4:]
+        self.assertEqual(b"Some data", body)
+
+
+    def test_multipleAccept(self):
+        """
+        If there are multiple I{Accept-Encoding} header,
+        L{server.GzipEncoderFactory} reads them properly to detect if gzip is
+        supported.
+        """
+        request = server.Request(self.channel, False)
+        request.gotLength(0)
+        request.requestHeaders.setRawHeaders(b"Accept-Encoding",
+                                             [b"deflate", b"gzip"])
+        request.requestReceived(b'GET', b'/foo', b'HTTP/1.0')
+        data = self.channel.transport.written.getvalue()
+        self.assertNotIn(b"Content-Length", data)
+        self.assertIn(b"Content-Encoding: gzip\r\n", data)
+        body = data[data.find(b"\r\n\r\n") + 4:]
+        self.assertEqual(b"Some data",
+                         zlib.decompress(body, 16 + zlib.MAX_WBITS))
+
+
+    def test_alreadyEncoded(self):
+        """
+        If the content is already encoded and the I{Content-Encoding} header is
+        set, L{server.GzipEncoderFactory} properly appends gzip to it.
+        """
+        request = server.Request(self.channel, False)
+        request.gotLength(0)
+        request.requestHeaders.setRawHeaders(b"Accept-Encoding",
+                                             [b"deflate", b"gzip"])
+        request.responseHeaders.setRawHeaders(b"Content-Encoding",
+                                             [b"deflate"])
+        request.requestReceived(b'GET', b'/foo', b'HTTP/1.0')
+        data = self.channel.transport.written.getvalue()
+        self.assertNotIn(b"Content-Length", data)
+        self.assertIn(b"Content-Encoding: deflate,gzip\r\n", data)
+        body = data[data.find(b"\r\n\r\n") + 4:]
+        self.assertEqual(b"Some data",
+                         zlib.decompress(body, 16 + zlib.MAX_WBITS))
+
+
+    def test_multipleEncodingLines(self):
+        """
+        If there are several I{Content-Encoding} headers,
+        L{server.GzipEncoderFactory} normalizes it and appends gzip to the
+        field value.
+        """
+        request = server.Request(self.channel, False)
+        request.gotLength(0)
+        request.requestHeaders.setRawHeaders(b"Accept-Encoding",
+                                             [b"deflate", b"gzip"])
+        request.responseHeaders.setRawHeaders(b"Content-Encoding",
+                                             [b"foo", b"bar"])
+        request.requestReceived(b'GET', b'/foo', b'HTTP/1.0')
+        data = self.channel.transport.written.getvalue()
+        self.assertNotIn(b"Content-Length", data)
+        self.assertIn(b"Content-Encoding: foo,bar,gzip\r\n", data)
+        body = data[data.find(b"\r\n\r\n") + 4:]
+        self.assertEqual(b"Some data",
+                         zlib.decompress(body, 16 + zlib.MAX_WBITS))
+
+
+
+class RootResource(resource.Resource):
+    isLeaf=0
+    def getChildWithDefault(self, name, request):
+        request.rememberRootURL()
+        return resource.Resource.getChildWithDefault(self, name, request)
+    def render(self, request):
+        return ''
+
+class RememberURLTest(unittest.TestCase):
+    def createServer(self, r):
+        chan = DummyChannel()
+        chan.site = server.Site(r)
+        return chan
+
+    def testSimple(self):
+        r = resource.Resource()
+        r.isLeaf=0
+        rr = RootResource()
+        r.putChild(b'foo', rr)
+        rr.putChild(b'', rr)
+        rr.putChild(b'bar', resource.Resource())
+        chan = self.createServer(r)
+        for url in [b'/foo/', b'/foo/bar', b'/foo/bar/baz', b'/foo/bar/']:
+            request = server.Request(chan, 1)
+            request.setHost(b'example.com', 81)
+            request.gotLength(0)
+            request.requestReceived(b'GET', url, b'HTTP/1.0')
+            self.assertEqual(request.getRootURL(), b"http://example.com/foo")
+
+    def testRoot(self):
+        rr = RootResource()
+        rr.putChild(b'', rr)
+        rr.putChild(b'bar', resource.Resource())
+        chan = self.createServer(rr)
+        for url in [b'/', b'/bar', b'/bar/baz', b'/bar/']:
+            request = server.Request(chan, 1)
+            request.setHost(b'example.com', 81)
+            request.gotLength(0)
+            request.requestReceived(b'GET', url, b'HTTP/1.0')
+            self.assertEqual(request.getRootURL(), b"http://example.com/")
+
+
+class NewRenderResource(resource.Resource):
+    def render_GET(self, request):
+        return b"hi hi"
+
+    def render_HEH(self, request):
+        return b"ho ho"
+
+
+
+ at implementer(resource.IResource)
+class HeadlessResource(object):
+    """
+    A resource that implements GET but not HEAD.
+    """
+
+    allowedMethods = [b"GET"]
+
+    def render(self, request):
+        """
+        Leave the request open for future writes.
+        """
+        self.request = request
+        if request.method not in self.allowedMethods:
+            raise error.UnsupportedMethod(self.allowedMethods)
+        self.request.write(b"some data")
+        return server.NOT_DONE_YET
+
+
+
+class NewRenderTestCase(unittest.TestCase):
+    """
+    Tests for L{server.Request.render}.
+    """
+    def _getReq(self, resource=None):
+        """
+        Create a request object with a stub channel and install the
+        passed resource at /newrender. If no resource is passed,
+        create one.
+        """
+        d = DummyChannel()
+        if resource is None:
+            resource = NewRenderResource()
+        d.site.resource.putChild(b'newrender', resource)
+        d.transport.port = 81
+        request = server.Request(d, 1)
+        request.setHost(b'example.com', 81)
+        request.gotLength(0)
+        return request
+
+    def testGoodMethods(self):
+        req = self._getReq()
+        req.requestReceived(b'GET', b'/newrender', b'HTTP/1.0')
+        self.assertEqual(req.transport.getvalue().splitlines()[-1], b'hi hi')
+
+        req = self._getReq()
+        req.requestReceived(b'HEH', b'/newrender', b'HTTP/1.0')
+        self.assertEqual(req.transport.getvalue().splitlines()[-1], b'ho ho')
+
+    def testBadMethods(self):
+        req = self._getReq()
+        req.requestReceived(b'CONNECT', b'/newrender', b'HTTP/1.0')
+        self.assertEqual(req.code, 501)
+
+        req = self._getReq()
+        req.requestReceived(b'hlalauguG', b'/newrender', b'HTTP/1.0')
+        self.assertEqual(req.code, 501)
+
+    def testImplicitHead(self):
+        req = self._getReq()
+        req.requestReceived(b'HEAD', b'/newrender', b'HTTP/1.0')
+        self.assertEqual(req.code, 200)
+        self.assertEqual(-1, req.transport.getvalue().find(b'hi hi'))
+
+
+    def test_unsupportedHead(self):
+        """
+        HEAD requests against resource that only claim support for GET
+        should not include a body in the response.
+        """
+        resource = HeadlessResource()
+        req = self._getReq(resource)
+        req.requestReceived(b"HEAD", b"/newrender", b"HTTP/1.0")
+        headers, body = req.transport.getvalue().split(b'\r\n\r\n')
+        self.assertEqual(req.code, 200)
+        self.assertEqual(body, b'')
+
+
+
+class GettableResource(resource.Resource):
+    """
+    Used by AllowedMethodsTest to simulate an allowed method.
+    """
+    def render_GET(self):
+        pass
+
+    def render_fred_render_ethel(self):
+        """
+        The unusual method name is designed to test the culling method
+        in C{twisted.web.resource._computeAllowedMethods}.
+        """
+        pass
+
+
+
+class AllowedMethodsTest(unittest.TestCase):
+    """
+    'C{twisted.web.resource._computeAllowedMethods} is provided by a
+    default should the subclass not provide the method.
+    """
+
+    if _PY3:
+        skip = "Allowed methods functionality not ported to Python 3."
+
+    def _getReq(self):
+        """
+        Generate a dummy request for use by C{_computeAllowedMethod} tests.
+        """
+        d = DummyChannel()
+        d.site.resource.putChild(b'gettableresource', GettableResource())
+        d.transport.port = 81
+        request = server.Request(d, 1)
+        request.setHost(b'example.com', 81)
+        request.gotLength(0)
+        return request
+
+
+    def test_computeAllowedMethods(self):
+        """
+        C{_computeAllowedMethods} will search through the
+        'gettableresource' for all attributes/methods of the form
+        'render_{method}' ('render_GET', for example) and return a list of
+        the methods. 'HEAD' will always be included from the
+        resource.Resource superclass.
+        """
+        res = GettableResource()
+        allowedMethods = resource._computeAllowedMethods(res)
+        self.assertEqual(set(allowedMethods),
+                          set([b'GET', b'HEAD', b'fred_render_ethel']))
+
+
+    def test_notAllowed(self):
+        """
+        When an unsupported method is requested, the default
+        L{_computeAllowedMethods} method will be called to determine the
+        allowed methods, and the HTTP 405 'Method Not Allowed' status will
+        be returned with the allowed methods will be returned in the
+        'Allow' header.
+        """
+        req = self._getReq()
+        req.requestReceived(b'POST', b'/gettableresource', b'HTTP/1.0')
+        self.assertEqual(req.code, 405)
+        self.assertEqual(
+            set(req.responseHeaders.getRawHeaders(b'allow')[0].split(b", ")),
+            set([b'GET', b'HEAD', b'fred_render_ethel'])
+        )
+
+
+    def test_notAllowedQuoting(self):
+        """
+        When an unsupported method response is generated, an HTML message will
+        be displayed.  That message should include a quoted form of the URI and,
+        since that value come from a browser and shouldn't necessarily be
+        trusted.
+        """
+        req = self._getReq()
+        req.requestReceived(b'POST', b'/gettableresource?'
+                            b'value=<script>bad', b'HTTP/1.0')
+        self.assertEqual(req.code, 405)
+        renderedPage = req.transport.getvalue()
+        self.assertNotIn(b"<script>bad", renderedPage)
+        self.assertIn(b'<script>bad', renderedPage)
+
+
+    def test_notImplementedQuoting(self):
+        """
+        When an not-implemented method response is generated, an HTML message
+        will be displayed.  That message should include a quoted form of the
+        requested method, since that value come from a browser and shouldn't
+        necessarily be trusted.
+        """
+        req = self._getReq()
+        req.requestReceived(b'<style>bad', b'/gettableresource', b'HTTP/1.0')
+        self.assertEqual(req.code, 501)
+        renderedPage = req.transport.getvalue()
+        self.assertNotIn(b"<style>bad", renderedPage)
+        self.assertIn(b'<style>bad', renderedPage)
+
+
+
+class DummyRequestForLogTest(DummyRequest):
+    uri = b'/dummy' # parent class uri has "http://", which doesn't really happen
+    code = 123
+
+    clientproto = b'HTTP/1.0'
+    sentLength = None
+    client = IPv4Address('TCP', '1.2.3.4', 12345)
+
+
+
+class TestLogEscaping(unittest.TestCase):
+    def setUp(self):
+        self.site = http.HTTPFactory()
+        self.site.logFile = StringIO()
+        self.request = DummyRequestForLogTest(self.site, False)
+
+    def testSimple(self):
+        self.site._logDateTime = "[%02d/%3s/%4d:%02d:%02d:%02d +0000]" % (
+            25, 'Oct', 2004, 12, 31, 59)
+        self.site.log(self.request)
+        self.site.logFile.seek(0)
+        self.assertEqual(
+            self.site.logFile.read(),
+            '1.2.3.4 - - [25/Oct/2004:12:31:59 +0000] "GET /dummy HTTP/1.0" 123 - "-" "-"\n')
+
+    def testMethodQuote(self):
+        self.site._logDateTime = "[%02d/%3s/%4d:%02d:%02d:%02d +0000]" % (
+            25, 'Oct', 2004, 12, 31, 59)
+        self.request.method = 'G"T'
+        self.site.log(self.request)
+        self.site.logFile.seek(0)
+        self.assertEqual(
+            self.site.logFile.read(),
+            '1.2.3.4 - - [25/Oct/2004:12:31:59 +0000] "G\\"T /dummy HTTP/1.0" 123 - "-" "-"\n')
+
+    def testRequestQuote(self):
+        self.site._logDateTime = "[%02d/%3s/%4d:%02d:%02d:%02d +0000]" % (
+            25, 'Oct', 2004, 12, 31, 59)
+        self.request.uri='/dummy"withquote'
+        self.site.log(self.request)
+        self.site.logFile.seek(0)
+        self.assertEqual(
+            self.site.logFile.read(),
+            '1.2.3.4 - - [25/Oct/2004:12:31:59 +0000] "GET /dummy\\"withquote HTTP/1.0" 123 - "-" "-"\n')
+
+    def testProtoQuote(self):
+        self.site._logDateTime = "[%02d/%3s/%4d:%02d:%02d:%02d +0000]" % (
+            25, 'Oct', 2004, 12, 31, 59)
+        self.request.clientproto='HT"P/1.0'
+        self.site.log(self.request)
+        self.site.logFile.seek(0)
+        self.assertEqual(
+            self.site.logFile.read(),
+            '1.2.3.4 - - [25/Oct/2004:12:31:59 +0000] "GET /dummy HT\\"P/1.0" 123 - "-" "-"\n')
+
+    def testRefererQuote(self):
+        self.site._logDateTime = "[%02d/%3s/%4d:%02d:%02d:%02d +0000]" % (
+            25, 'Oct', 2004, 12, 31, 59)
+        self.request.headers['referer'] = 'http://malicious" ".website.invalid'
+        self.site.log(self.request)
+        self.site.logFile.seek(0)
+        self.assertEqual(
+            self.site.logFile.read(),
+            '1.2.3.4 - - [25/Oct/2004:12:31:59 +0000] "GET /dummy HTTP/1.0" 123 - "http://malicious\\" \\".website.invalid" "-"\n')
+
+    def testUserAgentQuote(self):
+        self.site._logDateTime = "[%02d/%3s/%4d:%02d:%02d:%02d +0000]" % (
+            25, 'Oct', 2004, 12, 31, 59)
+        self.request.headers['user-agent'] = 'Malicious Web" Evil'
+        self.site.log(self.request)
+        self.site.logFile.seek(0)
+        self.assertEqual(
+            self.site.logFile.read(),
+            '1.2.3.4 - - [25/Oct/2004:12:31:59 +0000] "GET /dummy HTTP/1.0" 123 - "-" "Malicious Web\\" Evil"\n')
+
+
+
+class ServerAttributesTestCase(unittest.TestCase):
+    """
+    Tests that deprecated twisted.web.server attributes raise the appropriate
+    deprecation warnings when used.
+    """
+
+    def test_deprecatedAttributeDateTimeString(self):
+        """
+        twisted.web.server.date_time_string should not be used; instead use
+        twisted.web.http.datetimeToString directly
+        """
+        deprecated_func = server.date_time_string
+        warnings = self.flushWarnings(
+            offendingFunctions=[self.test_deprecatedAttributeDateTimeString])
+
+        self.assertEqual(len(warnings), 1)
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            ("twisted.web.server.date_time_string was deprecated in Twisted "
+             "12.1.0: Please use twisted.web.http.datetimeToString instead"))
+
+
+    def test_deprecatedAttributeStringDateTime(self):
+        """
+        twisted.web.server.string_date_time should not be used; instead use
+        twisted.web.http.stringToDatetime directly
+        """
+        deprecated_func = server.string_date_time
+        warnings = self.flushWarnings(
+            offendingFunctions=[self.test_deprecatedAttributeStringDateTime])
+
+        self.assertEqual(len(warnings), 1)
+        self.assertEqual(warnings[0]['category'], DeprecationWarning)
+        self.assertEqual(
+            warnings[0]['message'],
+            ("twisted.web.server.string_date_time was deprecated in Twisted "
+             "12.1.0: Please use twisted.web.http.stringToDatetime instead"))
diff --git a/ThirdParty/Twisted/twisted/web/test/test_webclient.py b/ThirdParty/Twisted/twisted/web/test/test_webclient.py
new file mode 100644
index 0000000..cd3a609
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_webclient.py
@@ -0,0 +1,1070 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for the old L{twisted.web.client} APIs, C{getPage} and friends.
+"""
+
+from __future__ import division, absolute_import
+
+import os
+from errno import ENOSPC
+
+try:
+    from urlparse import urlparse, urljoin
+except ImportError:
+    from urllib.parse import urlparse, urljoin
+
+from twisted.python.compat import _PY3, networkString, nativeString, intToBytes
+from twisted.trial import unittest
+from twisted.web import server, client, error, resource
+from twisted.internet import reactor, defer, interfaces
+from twisted.python.filepath import FilePath
+from twisted.python.log import msg
+from twisted.protocols.policies import WrappingFactory
+from twisted.test.proto_helpers import StringTransport
+
+try:
+    from twisted.internet import ssl
+except:
+    ssl = None
+
+from twisted import test
+serverPEM = FilePath(test.__file__.encode("utf-8")).sibling(b'server.pem')
+serverPEMPath = nativeString(serverPEM.path)
+
+# Remove this in #6177, when static is ported to Python 3:
+if _PY3:
+    from twisted.web.test.test_web import Data
+else:
+    from twisted.web.static import Data
+
+# Remove this in #6178, when util is ported to Python 3:
+if _PY3:
+    class Redirect(resource.Resource):
+        isLeaf = 1
+
+        def __init__(self, url):
+            resource.Resource.__init__(self)
+            self.url = url
+
+        def render(self, request):
+            request.redirect(self.url)
+            return b""
+
+        def getChild(self, name, request):
+            return self
+else:
+    from twisted.web.util import Redirect
+
+_PY3DownloadSkip = "downloadPage will be ported to Python 3 in ticket #6197."
+
+
+class ExtendedRedirect(resource.Resource):
+    """
+    Redirection resource.
+
+    The HTTP status code is set according to the C{code} query parameter.
+
+    @type lastMethod: C{str}
+    @ivar lastMethod: Last handled HTTP request method
+    """
+    isLeaf = 1
+    lastMethod = None
+
+
+    def __init__(self, url):
+        resource.Resource.__init__(self)
+        self.url = url
+
+
+    def render(self, request):
+        if self.lastMethod:
+            self.lastMethod = request.method
+            return b"OK Thnx!"
+        else:
+            self.lastMethod = request.method
+            code = int(request.args[b'code'][0])
+            return self.redirectTo(self.url, request, code)
+
+
+    def getChild(self, name, request):
+        return self
+
+
+    def redirectTo(self, url, request, code):
+        request.setResponseCode(code)
+        request.setHeader(b"location", url)
+        return b"OK Bye!"
+
+
+
+class ForeverTakingResource(resource.Resource):
+    """
+    L{ForeverTakingResource} is a resource which never finishes responding
+    to requests.
+    """
+    def __init__(self, write=False):
+        resource.Resource.__init__(self)
+        self._write = write
+
+    def render(self, request):
+        if self._write:
+            request.write(b'some bytes')
+        return server.NOT_DONE_YET
+
+
+class CookieMirrorResource(resource.Resource):
+    def render(self, request):
+        l = []
+        for k,v in sorted(list(request.received_cookies.items())):
+            l.append((nativeString(k), nativeString(v)))
+        l.sort()
+        return networkString(repr(l))
+
+class RawCookieMirrorResource(resource.Resource):
+    def render(self, request):
+        header = request.getHeader(b'cookie')
+        if header is None:
+            return b'None'
+        return networkString(repr(nativeString(header)))
+
+class ErrorResource(resource.Resource):
+
+    def render(self, request):
+        request.setResponseCode(401)
+        if request.args.get(b"showlength"):
+            request.setHeader(b"content-length", b"0")
+        return b""
+
+class NoLengthResource(resource.Resource):
+
+    def render(self, request):
+        return b"nolength"
+
+
+
+class HostHeaderResource(resource.Resource):
+    """
+    A testing resource which renders itself as the value of the host header
+    from the request.
+    """
+    def render(self, request):
+        return request.received_headers[b'host']
+
+
+
+class PayloadResource(resource.Resource):
+    """
+    A testing resource which renders itself as the contents of the request body
+    as long as the request body is 100 bytes long, otherwise which renders
+    itself as C{"ERROR"}.
+    """
+    def render(self, request):
+        data = request.content.read()
+        contentLength = request.received_headers[b'content-length']
+        if len(data) != 100 or int(contentLength) != 100:
+            return b"ERROR"
+        return data
+
+
+class DelayResource(resource.Resource):
+
+    def __init__(self, seconds):
+        self.seconds = seconds
+
+    def render(self, request):
+        def response():
+            request.write(b'some bytes')
+            request.finish()
+        reactor.callLater(self.seconds, response)
+        return server.NOT_DONE_YET
+
+
+class BrokenDownloadResource(resource.Resource):
+
+    def render(self, request):
+        # only sends 3 bytes even though it claims to send 5
+        request.setHeader(b"content-length", b"5")
+        request.write(b'abc')
+        return b''
+
+class CountingRedirect(Redirect):
+    """
+    A L{Redirect} resource that keeps track of the number of times the
+    resource has been accessed.
+    """
+    def __init__(self, *a, **kw):
+        Redirect.__init__(self, *a, **kw)
+        self.count = 0
+
+    def render(self, request):
+        self.count += 1
+        return Redirect.render(self, request)
+
+
+class CountingResource(resource.Resource):
+    """
+    A resource that keeps track of the number of times it has been accessed.
+    """
+    def __init__(self):
+        resource.Resource.__init__(self)
+        self.count = 0
+
+    def render(self, request):
+        self.count += 1
+        return b"Success"
+
+
+class ParseUrlTestCase(unittest.TestCase):
+    """
+    Test URL parsing facility and defaults values.
+    """
+
+    def test_parse(self):
+        """
+        L{client._parse} correctly parses a URL into its various components.
+        """
+        # The default port for HTTP is 80.
+        self.assertEqual(
+            client._parse(b'http://127.0.0.1/'),
+            (b'http', b'127.0.0.1', 80, b'/'))
+
+        # The default port for HTTPS is 443.
+        self.assertEqual(
+            client._parse(b'https://127.0.0.1/'),
+            (b'https', b'127.0.0.1', 443, b'/'))
+
+        # Specifying a port.
+        self.assertEqual(
+            client._parse(b'http://spam:12345/'),
+            (b'http', b'spam', 12345, b'/'))
+
+        # Weird (but commonly accepted) structure uses default port.
+        self.assertEqual(
+            client._parse(b'http://spam:/'),
+            (b'http', b'spam', 80, b'/'))
+
+        # Spaces in the hostname are trimmed, the default path is /.
+        self.assertEqual(
+            client._parse(b'http://foo '),
+            (b'http', b'foo', 80, b'/'))
+
+
+    def test_externalUnicodeInterference(self):
+        """
+        L{client._parse} should return C{bytes} for the scheme, host, and path
+        elements of its return tuple, even when passed an URL which has
+        previously been passed to L{urlparse} as a C{unicode} string.
+        """
+        badInput = u'http://example.com/path'
+        goodInput = badInput.encode('ascii')
+        urlparse(badInput)
+        scheme, host, port, path = client._parse(goodInput)
+        self.assertIsInstance(scheme, bytes)
+        self.assertIsInstance(host, bytes)
+        self.assertIsInstance(path, bytes)
+
+
+
+class HTTPPageGetterTests(unittest.TestCase):
+    """
+    Tests for L{HTTPPagerGetter}, the HTTP client protocol implementation
+    used to implement L{getPage}.
+    """
+    def test_earlyHeaders(self):
+        """
+        When a connection is made, L{HTTPPagerGetter} sends the headers from
+        its factory's C{headers} dict.  If I{Host} or I{Content-Length} is
+        present in this dict, the values are not sent, since they are sent with
+        special values before the C{headers} dict is processed.  If
+        I{User-Agent} is present in the dict, it overrides the value of the
+        C{agent} attribute of the factory.  If I{Cookie} is present in the
+        dict, its value is added to the values from the factory's C{cookies}
+        attribute.
+        """
+        factory = client.HTTPClientFactory(
+            b'http://foo/bar',
+            agent=b"foobar",
+            cookies={b'baz': b'quux'},
+            postdata=b"some data",
+            headers={
+                b'Host': b'example.net',
+                b'User-Agent': b'fooble',
+                b'Cookie': b'blah blah',
+                b'Content-Length': b'12981',
+                b'Useful': b'value'})
+        transport = StringTransport()
+        protocol = client.HTTPPageGetter()
+        protocol.factory = factory
+        protocol.makeConnection(transport)
+        result = transport.value()
+        for expectedHeader in [
+            b"Host: example.net\r\n",
+            b"User-Agent: foobar\r\n",
+            b"Content-Length: 9\r\n",
+            b"Useful: value\r\n",
+            b"connection: close\r\n",
+            b"Cookie: blah blah; baz=quux\r\n"]:
+            self.assertIn(expectedHeader, result)
+
+
+
+class WebClientTestCase(unittest.TestCase):
+    def _listen(self, site):
+        return reactor.listenTCP(0, site, interface="127.0.0.1")
+
+    def setUp(self):
+        self.agent = None # for twisted.web.client.Agent test
+        self.cleanupServerConnections = 0
+        r = resource.Resource()
+        r.putChild(b"file", Data(b"0123456789", b"text/html"))
+        r.putChild(b"redirect", Redirect(b"/file"))
+        self.infiniteRedirectResource = CountingRedirect(b"/infiniteRedirect")
+        r.putChild(b"infiniteRedirect", self.infiniteRedirectResource)
+        r.putChild(b"wait", ForeverTakingResource())
+        r.putChild(b"write-then-wait", ForeverTakingResource(write=True))
+        r.putChild(b"error", ErrorResource())
+        r.putChild(b"nolength", NoLengthResource())
+        r.putChild(b"host", HostHeaderResource())
+        r.putChild(b"payload", PayloadResource())
+        r.putChild(b"broken", BrokenDownloadResource())
+        r.putChild(b"cookiemirror", CookieMirrorResource())
+        r.putChild(b'delay1', DelayResource(1))
+        r.putChild(b'delay2', DelayResource(2))
+
+        self.afterFoundGetCounter = CountingResource()
+        r.putChild(b"afterFoundGetCounter", self.afterFoundGetCounter)
+        r.putChild(b"afterFoundGetRedirect", Redirect(b"/afterFoundGetCounter"))
+
+        miscasedHead = Data(b"miscased-head GET response content", b"major/minor")
+        miscasedHead.render_Head = lambda request: b"miscased-head content"
+        r.putChild(b"miscased-head", miscasedHead)
+
+        self.extendedRedirect = ExtendedRedirect(b'/extendedRedirect')
+        r.putChild(b"extendedRedirect", self.extendedRedirect)
+        self.site = server.Site(r, timeout=None)
+        self.wrapper = WrappingFactory(self.site)
+        self.port = self._listen(self.wrapper)
+        self.portno = self.port.getHost().port
+
+    def tearDown(self):
+        if self.agent:
+            # clean up connections for twisted.web.client.Agent test.
+            self.agent.closeCachedConnections()
+            self.agent = None
+
+        # If the test indicated it might leave some server-side connections
+        # around, clean them up.
+        connections = list(self.wrapper.protocols.keys())
+        # If there are fewer server-side connections than requested,
+        # that's okay.  Some might have noticed that the client closed
+        # the connection and cleaned up after themselves.
+        for n in range(min(len(connections), self.cleanupServerConnections)):
+            proto = connections.pop()
+            msg("Closing %r" % (proto,))
+            proto.transport.loseConnection()
+        if connections:
+            msg("Some left-over connections; this test is probably buggy.")
+        return self.port.stopListening()
+
+    def getURL(self, path):
+        host = "http://127.0.0.1:%d/" % self.portno
+        return networkString(urljoin(host, nativeString(path)))
+
+    def testPayload(self):
+        s = b"0123456789" * 10
+        return client.getPage(self.getURL("payload"), postdata=s
+                              ).addCallback(self.assertEqual, s
+            )
+
+
+    def test_getPageBrokenDownload(self):
+        """
+        If the connection is closed before the number of bytes indicated by
+        I{Content-Length} have been received, the L{Deferred} returned by
+        L{getPage} fails with L{PartialDownloadError}.
+        """
+        d = client.getPage(self.getURL("broken"))
+        d = self.assertFailure(d, client.PartialDownloadError)
+        d.addCallback(lambda exc: self.assertEqual(exc.response, b"abc"))
+        return d
+
+
+    def test_downloadPageBrokenDownload(self):
+        """
+        If the connection is closed before the number of bytes indicated by
+        I{Content-Length} have been received, the L{Deferred} returned by
+        L{downloadPage} fails with L{PartialDownloadError}.
+        """
+        # test what happens when download gets disconnected in the middle
+        path = FilePath(self.mktemp())
+        d = client.downloadPage(self.getURL("broken"), path.path)
+        d = self.assertFailure(d, client.PartialDownloadError)
+
+        def checkResponse(response):
+            """
+            The HTTP status code from the server is propagated through the
+            C{PartialDownloadError}.
+            """
+            self.assertEqual(response.status, b"200")
+            self.assertEqual(response.message, b"OK")
+            return response
+        d.addCallback(checkResponse)
+
+        def cbFailed(ignored):
+            self.assertEqual(path.getContent(), b"abc")
+        d.addCallback(cbFailed)
+        return d
+
+    def test_downloadPageLogsFileCloseError(self):
+        """
+        If there is an exception closing the file being written to after the
+        connection is prematurely closed, that exception is logged.
+        """
+        class BrokenFile:
+            def write(self, bytes):
+                pass
+
+            def close(self):
+                raise IOError(ENOSPC, "No file left on device")
+
+        d = client.downloadPage(self.getURL("broken"), BrokenFile())
+        d = self.assertFailure(d, client.PartialDownloadError)
+        def cbFailed(ignored):
+            self.assertEqual(len(self.flushLoggedErrors(IOError)), 1)
+        d.addCallback(cbFailed)
+        return d
+
+
+    def testHostHeader(self):
+        # if we pass Host header explicitly, it should be used, otherwise
+        # it should extract from url
+        return defer.gatherResults([
+            client.getPage(self.getURL("host")).addCallback(
+                    self.assertEqual, b"127.0.0.1:" + intToBytes(self.portno)),
+            client.getPage(self.getURL("host"),
+                           headers={b"Host": b"www.example.com"}).addCallback(
+                    self.assertEqual, b"www.example.com")])
+
+
+    def test_getPage(self):
+        """
+        L{client.getPage} returns a L{Deferred} which is called back with
+        the body of the response if the default method B{GET} is used.
+        """
+        d = client.getPage(self.getURL("file"))
+        d.addCallback(self.assertEqual, b"0123456789")
+        return d
+
+
+    def test_getPageHEAD(self):
+        """
+        L{client.getPage} returns a L{Deferred} which is called back with
+        the empty string if the method is I{HEAD} and there is a successful
+        response code.
+        """
+        d = client.getPage(self.getURL("file"), method=b"HEAD")
+        d.addCallback(self.assertEqual, b"")
+        return d
+
+
+    def test_getPageNotQuiteHEAD(self):
+        """
+        If the request method is a different casing of I{HEAD} (ie, not all
+        capitalized) then it is not a I{HEAD} request and the response body
+        is returned.
+        """
+        d = client.getPage(self.getURL("miscased-head"), method=b'Head')
+        d.addCallback(self.assertEqual, b"miscased-head content")
+        return d
+
+
+    def test_timeoutNotTriggering(self):
+        """
+        When a non-zero timeout is passed to L{getPage} and the page is
+        retrieved before the timeout period elapses, the L{Deferred} is
+        called back with the contents of the page.
+        """
+        d = client.getPage(self.getURL("host"), timeout=100)
+        d.addCallback(self.assertEqual,
+                      networkString("127.0.0.1:%s" % (self.portno,)))
+        return d
+
+
+    def test_timeoutTriggering(self):
+        """
+        When a non-zero timeout is passed to L{getPage} and that many
+        seconds elapse before the server responds to the request. the
+        L{Deferred} is errbacked with a L{error.TimeoutError}.
+        """
+        # This will probably leave some connections around.
+        self.cleanupServerConnections = 1
+        return self.assertFailure(
+            client.getPage(self.getURL("wait"), timeout=0.000001),
+            defer.TimeoutError)
+
+
+    def testDownloadPage(self):
+        downloads = []
+        downloadData = [(b"file", self.mktemp(), b"0123456789"),
+                        (b"nolength", self.mktemp(), b"nolength")]
+
+        for (url, name, data) in downloadData:
+            d = client.downloadPage(self.getURL(url), name)
+            d.addCallback(self._cbDownloadPageTest, data, name)
+            downloads.append(d)
+        return defer.gatherResults(downloads)
+
+    def _cbDownloadPageTest(self, ignored, data, name):
+        bytes = file(name, "rb").read()
+        self.assertEqual(bytes, data)
+
+    def testDownloadPageError1(self):
+        class errorfile:
+            def write(self, data):
+                raise IOError("badness happened during write")
+            def close(self):
+                pass
+        ef = errorfile()
+        return self.assertFailure(
+            client.downloadPage(self.getURL("file"), ef),
+            IOError)
+
+    def testDownloadPageError2(self):
+        class errorfile:
+            def write(self, data):
+                pass
+            def close(self):
+                raise IOError("badness happened during close")
+        ef = errorfile()
+        return self.assertFailure(
+            client.downloadPage(self.getURL("file"), ef),
+            IOError)
+
+    def testDownloadPageError3(self):
+        # make sure failures in open() are caught too. This is tricky.
+        # Might only work on posix.
+        tmpfile = open("unwritable", "wb")
+        tmpfile.close()
+        os.chmod("unwritable", 0) # make it unwritable (to us)
+        d = self.assertFailure(
+            client.downloadPage(self.getURL("file"), "unwritable"),
+            IOError)
+        d.addBoth(self._cleanupDownloadPageError3)
+        return d
+
+    def _cleanupDownloadPageError3(self, ignored):
+        os.chmod("unwritable", 0o700)
+        os.unlink("unwritable")
+        return ignored
+
+    def _downloadTest(self, method):
+        dl = []
+        for (url, code) in [("nosuchfile", b"404"), ("error", b"401"),
+                            ("error?showlength=1", b"401")]:
+            d = method(url)
+            d = self.assertFailure(d, error.Error)
+            d.addCallback(lambda exc, code=code: self.assertEqual(exc.args[0], code))
+            dl.append(d)
+        return defer.DeferredList(dl, fireOnOneErrback=True)
+
+    def testServerError(self):
+        return self._downloadTest(lambda url: client.getPage(self.getURL(url)))
+
+    def testDownloadServerError(self):
+        return self._downloadTest(lambda url: client.downloadPage(self.getURL(url), url.split('?')[0]))
+
+    def testFactoryInfo(self):
+        url = self.getURL('file')
+        scheme, host, port, path = client._parse(url)
+        factory = client.HTTPClientFactory(url)
+        reactor.connectTCP(nativeString(host), port, factory)
+        return factory.deferred.addCallback(self._cbFactoryInfo, factory)
+
+    def _cbFactoryInfo(self, ignoredResult, factory):
+        self.assertEqual(factory.status, b'200')
+        self.assert_(factory.version.startswith(b'HTTP/'))
+        self.assertEqual(factory.message, b'OK')
+        self.assertEqual(factory.response_headers[b'content-length'][0], b'10')
+
+
+    def test_followRedirect(self):
+        """
+        By default, L{client.getPage} follows redirects and returns the content
+        of the target resource.
+        """
+        d = client.getPage(self.getURL("redirect"))
+        d.addCallback(self.assertEqual, b"0123456789")
+        return d
+
+
+    def test_noFollowRedirect(self):
+        """
+        If C{followRedirect} is passed a false value, L{client.getPage} does not
+        follow redirects and returns a L{Deferred} which fails with
+        L{error.PageRedirect} when it encounters one.
+        """
+        d = self.assertFailure(
+            client.getPage(self.getURL("redirect"), followRedirect=False),
+            error.PageRedirect)
+        d.addCallback(self._cbCheckLocation)
+        return d
+
+
+    def _cbCheckLocation(self, exc):
+        self.assertEqual(exc.location, b"/file")
+
+
+    def test_infiniteRedirection(self):
+        """
+        When more than C{redirectLimit} HTTP redirects are encountered, the
+        page request fails with L{InfiniteRedirection}.
+        """
+        def checkRedirectCount(*a):
+            self.assertEqual(f._redirectCount, 13)
+            self.assertEqual(self.infiniteRedirectResource.count, 13)
+
+        f = client._makeGetterFactory(
+            self.getURL('infiniteRedirect'),
+            client.HTTPClientFactory,
+            redirectLimit=13)
+        d = self.assertFailure(f.deferred, error.InfiniteRedirection)
+        d.addCallback(checkRedirectCount)
+        return d
+
+
+    def test_isolatedFollowRedirect(self):
+        """
+        C{client.HTTPPagerGetter} instances each obey the C{followRedirect}
+        value passed to the L{client.getPage} call which created them.
+        """
+        d1 = client.getPage(self.getURL('redirect'), followRedirect=True)
+        d2 = client.getPage(self.getURL('redirect'), followRedirect=False)
+
+        d = self.assertFailure(d2, error.PageRedirect
+            ).addCallback(lambda dummy: d1)
+        return d
+
+
+    def test_afterFoundGet(self):
+        """
+        Enabling unsafe redirection behaviour overwrites the method of
+        redirected C{POST} requests with C{GET}.
+        """
+        url = self.getURL('extendedRedirect?code=302')
+        f = client.HTTPClientFactory(url, followRedirect=True, method=b"POST")
+        self.assertFalse(
+            f.afterFoundGet,
+            "By default, afterFoundGet must be disabled")
+
+        def gotPage(page):
+            self.assertEqual(
+                self.extendedRedirect.lastMethod,
+                b"GET",
+                "With afterFoundGet, the HTTP method must change to GET")
+
+        d = client.getPage(
+            url, followRedirect=True, afterFoundGet=True, method=b"POST")
+        d.addCallback(gotPage)
+        return d
+
+
+    def test_downloadAfterFoundGet(self):
+        """
+        Passing C{True} for C{afterFoundGet} to L{client.downloadPage} invokes
+        the same kind of redirect handling as passing that argument to
+        L{client.getPage} invokes.
+        """
+        url = self.getURL('extendedRedirect?code=302')
+
+        def gotPage(page):
+            self.assertEqual(
+                self.extendedRedirect.lastMethod,
+                b"GET",
+                "With afterFoundGet, the HTTP method must change to GET")
+
+        d = client.downloadPage(url, "downloadTemp",
+            followRedirect=True, afterFoundGet=True, method="POST")
+        d.addCallback(gotPage)
+        return d
+
+
+    def test_afterFoundGetMakesOneRequest(self):
+        """
+        When C{afterFoundGet} is C{True}, L{client.getPage} only issues one
+        request to the server when following the redirect.  This is a regression
+        test, see #4760.
+        """
+        def checkRedirectCount(*a):
+            self.assertEqual(self.afterFoundGetCounter.count, 1)
+
+        url = self.getURL('afterFoundGetRedirect')
+        d = client.getPage(
+            url, followRedirect=True, afterFoundGet=True, method=b"POST")
+        d.addCallback(checkRedirectCount)
+        return d
+
+
+    def testPartial(self):
+        name = self.mktemp()
+        f = open(name, "wb")
+        f.write(b"abcd")
+        f.close()
+
+        partialDownload = [(True, b"abcd456789"),
+                           (True, b"abcd456789"),
+                           (False, b"0123456789")]
+
+        d = defer.succeed(None)
+        for (partial, expectedData) in partialDownload:
+            d.addCallback(self._cbRunPartial, name, partial)
+            d.addCallback(self._cbPartialTest, expectedData, name)
+
+        return d
+
+    testPartial.skip = "Cannot test until webserver can serve partial data properly"
+
+    def _cbRunPartial(self, ignored, name, partial):
+        return client.downloadPage(self.getURL("file"), name, supportPartial=partial)
+
+    def _cbPartialTest(self, ignored, expectedData, filename):
+        bytes = file(filename, "rb").read()
+        self.assertEqual(bytes, expectedData)
+
+
+    def test_downloadTimeout(self):
+        """
+        If the timeout indicated by the C{timeout} parameter to
+        L{client.HTTPDownloader.__init__} elapses without the complete response
+        being received, the L{defer.Deferred} returned by
+        L{client.downloadPage} fires with a L{Failure} wrapping a
+        L{defer.TimeoutError}.
+        """
+        self.cleanupServerConnections = 2
+        # Verify the behavior if no bytes are ever written.
+        first = client.downloadPage(
+            self.getURL("wait"),
+            self.mktemp(), timeout=0.01)
+
+        # Verify the behavior if some bytes are written but then the request
+        # never completes.
+        second = client.downloadPage(
+            self.getURL("write-then-wait"),
+            self.mktemp(), timeout=0.01)
+
+        return defer.gatherResults([
+            self.assertFailure(first, defer.TimeoutError),
+            self.assertFailure(second, defer.TimeoutError)])
+
+
+    def test_downloadHeaders(self):
+        """
+        After L{client.HTTPDownloader.deferred} fires, the
+        L{client.HTTPDownloader} instance's C{status} and C{response_headers}
+        attributes are populated with the values from the response.
+        """
+        def checkHeaders(factory):
+            self.assertEqual(factory.status, b'200')
+            self.assertEqual(factory.response_headers[b'content-type'][0], b'text/html')
+            self.assertEqual(factory.response_headers[b'content-length'][0], b'10')
+            os.unlink(factory.fileName)
+        factory = client._makeGetterFactory(
+            self.getURL('file'),
+            client.HTTPDownloader,
+            fileOrName=self.mktemp())
+        return factory.deferred.addCallback(lambda _: checkHeaders(factory))
+
+
+    def test_downloadCookies(self):
+        """
+        The C{cookies} dict passed to the L{client.HTTPDownloader}
+        initializer is used to populate the I{Cookie} header included in the
+        request sent to the server.
+        """
+        output = self.mktemp()
+        factory = client._makeGetterFactory(
+            self.getURL('cookiemirror'),
+            client.HTTPDownloader,
+            fileOrName=output,
+            cookies={b'foo': b'bar'})
+        def cbFinished(ignored):
+            self.assertEqual(
+                FilePath(output).getContent(),
+                "[('foo', 'bar')]")
+        factory.deferred.addCallback(cbFinished)
+        return factory.deferred
+
+
+    def test_downloadRedirectLimit(self):
+        """
+        When more than C{redirectLimit} HTTP redirects are encountered, the
+        page request fails with L{InfiniteRedirection}.
+        """
+        def checkRedirectCount(*a):
+            self.assertEqual(f._redirectCount, 7)
+            self.assertEqual(self.infiniteRedirectResource.count, 7)
+
+        f = client._makeGetterFactory(
+            self.getURL('infiniteRedirect'),
+            client.HTTPDownloader,
+            fileOrName=self.mktemp(),
+            redirectLimit=7)
+        d = self.assertFailure(f.deferred, error.InfiniteRedirection)
+        d.addCallback(checkRedirectCount)
+        return d
+
+    if _PY3:
+        for method in (
+            test_downloadPageBrokenDownload,
+            test_downloadPageLogsFileCloseError,
+            testDownloadPage,
+            testDownloadPageError1,
+            testDownloadPageError2,
+            testDownloadPageError3,
+            testDownloadServerError,
+            test_downloadAfterFoundGet,
+            testPartial,
+            test_downloadTimeout,
+            test_downloadHeaders,
+            test_downloadCookies,
+            test_downloadRedirectLimit):
+            method.skip = _PY3DownloadSkip
+        del method
+
+
+
+class WebClientSSLTestCase(WebClientTestCase):
+    def _listen(self, site):
+        return reactor.listenSSL(
+            0, site,
+            contextFactory=ssl.DefaultOpenSSLContextFactory(
+                serverPEMPath, serverPEMPath),
+            interface="127.0.0.1")
+
+    def getURL(self, path):
+        return networkString("https://127.0.0.1:%d/%s" % (self.portno, path))
+
+    def testFactoryInfo(self):
+        url = self.getURL('file')
+        scheme, host, port, path = client._parse(url)
+        factory = client.HTTPClientFactory(url)
+        reactor.connectSSL(nativeString(host), port, factory,
+                           ssl.ClientContextFactory())
+        # The base class defines _cbFactoryInfo correctly for this
+        return factory.deferred.addCallback(self._cbFactoryInfo, factory)
+
+
+
+class WebClientRedirectBetweenSSLandPlainText(unittest.TestCase):
+    def getHTTPS(self, path):
+        return networkString("https://127.0.0.1:%d/%s" % (self.tlsPortno, path))
+
+    def getHTTP(self, path):
+        return networkString("http://127.0.0.1:%d/%s" % (self.plainPortno, path))
+
+    def setUp(self):
+        plainRoot = Data(b'not me', b'text/plain')
+        tlsRoot = Data(b'me neither', b'text/plain')
+
+        plainSite = server.Site(plainRoot, timeout=None)
+        tlsSite = server.Site(tlsRoot, timeout=None)
+
+        self.tlsPort = reactor.listenSSL(
+            0, tlsSite,
+            contextFactory=ssl.DefaultOpenSSLContextFactory(
+                serverPEMPath, serverPEMPath),
+            interface="127.0.0.1")
+        self.plainPort = reactor.listenTCP(0, plainSite, interface="127.0.0.1")
+
+        self.plainPortno = self.plainPort.getHost().port
+        self.tlsPortno = self.tlsPort.getHost().port
+
+        plainRoot.putChild(b'one', Redirect(self.getHTTPS('two')))
+        tlsRoot.putChild(b'two', Redirect(self.getHTTP('three')))
+        plainRoot.putChild(b'three', Redirect(self.getHTTPS('four')))
+        tlsRoot.putChild(b'four', Data(b'FOUND IT!', b'text/plain'))
+
+    def tearDown(self):
+        ds = list(
+            map(defer.maybeDeferred,
+                [self.plainPort.stopListening, self.tlsPort.stopListening]))
+        return defer.gatherResults(ds)
+
+    def testHoppingAround(self):
+        return client.getPage(self.getHTTP("one")
+            ).addCallback(self.assertEqual, b"FOUND IT!"
+            )
+
+
+class CookieTestCase(unittest.TestCase):
+    def _listen(self, site):
+        return reactor.listenTCP(0, site, interface="127.0.0.1")
+
+    def setUp(self):
+        root = Data(b'El toro!', b'text/plain')
+        root.putChild(b"cookiemirror", CookieMirrorResource())
+        root.putChild(b"rawcookiemirror", RawCookieMirrorResource())
+        site = server.Site(root, timeout=None)
+        self.port = self._listen(site)
+        self.portno = self.port.getHost().port
+
+    def tearDown(self):
+        return self.port.stopListening()
+
+    def getHTTP(self, path):
+        return networkString("http://127.0.0.1:%d/%s" % (self.portno, path))
+
+    def testNoCookies(self):
+        return client.getPage(self.getHTTP("cookiemirror")
+            ).addCallback(self.assertEqual, b"[]"
+            )
+
+    def testSomeCookies(self):
+        cookies = {b'foo': b'bar', b'baz': b'quux'}
+        return client.getPage(self.getHTTP("cookiemirror"), cookies=cookies
+            ).addCallback(self.assertEqual, b"[('baz', 'quux'), ('foo', 'bar')]"
+            )
+
+    def testRawNoCookies(self):
+        return client.getPage(self.getHTTP("rawcookiemirror")
+            ).addCallback(self.assertEqual, b"None"
+            )
+
+    def testRawSomeCookies(self):
+        cookies = {b'foo': b'bar', b'baz': b'quux'}
+        return client.getPage(self.getHTTP("rawcookiemirror"), cookies=cookies
+            ).addCallback(self.assertIn,
+                          (b"'foo=bar; baz=quux'", b"'baz=quux; foo=bar'")
+            )
+
+    def testCookieHeaderParsing(self):
+        factory = client.HTTPClientFactory(b'http://foo.example.com/')
+        proto = factory.buildProtocol('127.42.42.42')
+        transport = StringTransport()
+        proto.makeConnection(transport)
+        for line in [
+            b'200 Ok',
+            b'Squash: yes',
+            b'Hands: stolen',
+            b'Set-Cookie: CUSTOMER=WILE_E_COYOTE; path=/; expires=Wednesday, 09-Nov-99 23:12:40 GMT',
+            b'Set-Cookie: PART_NUMBER=ROCKET_LAUNCHER_0001; path=/',
+            b'Set-Cookie: SHIPPING=FEDEX; path=/foo',
+            b'',
+            b'body',
+            b'more body',
+            ]:
+            proto.dataReceived(line + b'\r\n')
+        self.assertEqual(transport.value(),
+                         b'GET / HTTP/1.0\r\n'
+                         b'Host: foo.example.com\r\n'
+                         b'User-Agent: Twisted PageGetter\r\n'
+                         b'\r\n')
+        self.assertEqual(factory.cookies,
+                          {
+            b'CUSTOMER': b'WILE_E_COYOTE',
+            b'PART_NUMBER': b'ROCKET_LAUNCHER_0001',
+            b'SHIPPING': b'FEDEX',
+            })
+
+
+
+class TestHostHeader(unittest.TestCase):
+    """
+    Test that L{HTTPClientFactory} includes the port in the host header
+    if needed.
+    """
+
+    def _getHost(self, bytes):
+        """
+        Retrieve the value of the I{Host} header from the serialized
+        request given by C{bytes}.
+        """
+        for line in bytes.split(b'\r\n'):
+            try:
+                name, value = line.split(b':', 1)
+                if name.strip().lower() == b'host':
+                    return value.strip()
+            except ValueError:
+                pass
+
+
+    def test_HTTPDefaultPort(self):
+        """
+        No port should be included in the host header when connecting to the
+        default HTTP port.
+        """
+        factory = client.HTTPClientFactory(b'http://foo.example.com/')
+        proto = factory.buildProtocol(b'127.42.42.42')
+        proto.makeConnection(StringTransport())
+        self.assertEqual(self._getHost(proto.transport.value()),
+                          b'foo.example.com')
+
+
+    def test_HTTPPort80(self):
+        """
+        No port should be included in the host header when connecting to the
+        default HTTP port even if it is in the URL.
+        """
+        factory = client.HTTPClientFactory(b'http://foo.example.com:80/')
+        proto = factory.buildProtocol('127.42.42.42')
+        proto.makeConnection(StringTransport())
+        self.assertEqual(self._getHost(proto.transport.value()),
+                          b'foo.example.com')
+
+
+    def test_HTTPNotPort80(self):
+        """
+        The port should be included in the host header when connecting to the
+        a non default HTTP port.
+        """
+        factory = client.HTTPClientFactory(b'http://foo.example.com:8080/')
+        proto = factory.buildProtocol('127.42.42.42')
+        proto.makeConnection(StringTransport())
+        self.assertEqual(self._getHost(proto.transport.value()),
+                          b'foo.example.com:8080')
+
+
+    def test_HTTPSDefaultPort(self):
+        """
+        No port should be included in the host header when connecting to the
+        default HTTPS port.
+        """
+        factory = client.HTTPClientFactory(b'https://foo.example.com/')
+        proto = factory.buildProtocol('127.42.42.42')
+        proto.makeConnection(StringTransport())
+        self.assertEqual(self._getHost(proto.transport.value()),
+                          b'foo.example.com')
+
+
+    def test_HTTPSPort443(self):
+        """
+        No port should be included in the host header when connecting to the
+        default HTTPS port even if it is in the URL.
+        """
+        factory = client.HTTPClientFactory(b'https://foo.example.com:443/')
+        proto = factory.buildProtocol('127.42.42.42')
+        proto.makeConnection(StringTransport())
+        self.assertEqual(self._getHost(proto.transport.value()),
+                          b'foo.example.com')
+
+
+    def test_HTTPSNotPort443(self):
+        """
+        The port should be included in the host header when connecting to the
+        a non default HTTPS port.
+        """
+        factory = client.HTTPClientFactory(b'http://foo.example.com:8080/')
+        proto = factory.buildProtocol('127.42.42.42')
+        proto.makeConnection(StringTransport())
+        self.assertEqual(self._getHost(proto.transport.value()),
+                          b'foo.example.com:8080')
+
+
+if ssl is None or not hasattr(ssl, 'DefaultOpenSSLContextFactory'):
+    for case in [WebClientSSLTestCase, WebClientRedirectBetweenSSLandPlainText]:
+        case.skip = "OpenSSL not present"
+
+if not interfaces.IReactorSSL(reactor, None):
+    for case in [WebClientSSLTestCase, WebClientRedirectBetweenSSLandPlainText]:
+        case.skip = "Reactor doesn't support SSL"
diff --git a/ThirdParty/Twisted/twisted/web/test/test_wsgi.py b/ThirdParty/Twisted/twisted/web/test/test_wsgi.py
new file mode 100644
index 0000000..ddcdf11
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_wsgi.py
@@ -0,0 +1,1572 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.web.wsgi}.
+"""
+
+__metaclass__ = type
+
+from sys import exc_info
+from urllib import quote
+from thread import get_ident
+import StringIO, cStringIO, tempfile
+
+from zope.interface.verify import verifyObject
+
+from twisted.python.compat import set
+from twisted.python.log import addObserver, removeObserver, err
+from twisted.python.failure import Failure
+from twisted.python.threadpool import ThreadPool
+from twisted.internet.defer import Deferred, gatherResults
+from twisted.internet import reactor
+from twisted.internet.error import ConnectionLost
+from twisted.trial.unittest import TestCase
+from twisted.web import http
+from twisted.web.resource import IResource, Resource
+from twisted.web.server import Request, Site, version
+from twisted.web.wsgi import WSGIResource
+from twisted.web.test.test_web import DummyChannel
+
+
+class SynchronousThreadPool:
+    """
+    A single-threaded implementation of part of the L{ThreadPool} interface.
+    This implementation calls functions synchronously rather than running
+    them in a thread pool.  It is used to make the tests which are not
+    directly for thread-related behavior deterministic.
+    """
+    def callInThread(self, f, *a, **kw):
+        """
+        Call C{f(*a, **kw)} in this thread rather than scheduling it to be
+        called in a thread.
+        """
+        try:
+            f(*a, **kw)
+        except:
+            # callInThread doesn't let exceptions propagate to the caller.
+            # None is always returned and any exception raised gets logged
+            # later on.
+            err(None, "Callable passed to SynchronousThreadPool.callInThread failed")
+
+
+
+class SynchronousReactorThreads:
+    """
+    A single-threaded implementation of part of the L{IReactorThreads}
+    interface.  This implementation assumes that it will only be invoked
+    from the reactor thread, so it calls functions synchronously rather than
+    trying to schedule them to run in the reactor thread.  It is used in
+    conjunction with L{SynchronousThreadPool} to make the tests which are
+    not directly for thread-related behavior deterministic.
+    """
+    def callFromThread(self, f, *a, **kw):
+        """
+        Call C{f(*a, **kw)} in this thread which should also be the reactor
+        thread.
+        """
+        f(*a, **kw)
+
+
+
+class WSGIResourceTests(TestCase):
+    def setUp(self):
+        """
+        Create a L{WSGIResource} with synchronous threading objects and a no-op
+        application object.  This is useful for testing certain things about
+        the resource implementation which are unrelated to WSGI.
+        """
+        self.resource = WSGIResource(
+            SynchronousReactorThreads(), SynchronousThreadPool(),
+            lambda environ, startResponse: None)
+
+
+    def test_interfaces(self):
+        """
+        L{WSGIResource} implements L{IResource} and stops resource traversal.
+        """
+        verifyObject(IResource, self.resource)
+        self.assertTrue(self.resource.isLeaf)
+
+
+    def test_unsupported(self):
+        """
+        A L{WSGIResource} cannot have L{IResource} children.  Its
+        C{getChildWithDefault} and C{putChild} methods raise L{RuntimeError}.
+        """
+        self.assertRaises(
+            RuntimeError,
+            self.resource.getChildWithDefault,
+            "foo", Request(DummyChannel(), False))
+        self.assertRaises(
+            RuntimeError,
+            self.resource.putChild,
+            "foo", Resource())
+
+
+class WSGITestsMixin:
+    """
+    @ivar channelFactory: A no-argument callable which will be invoked to
+        create a new HTTP channel to associate with request objects.
+    """
+    channelFactory = DummyChannel
+
+    def setUp(self):
+        self.threadpool = SynchronousThreadPool()
+        self.reactor = SynchronousReactorThreads()
+
+
+    def lowLevelRender(
+        self, requestFactory, applicationFactory, channelFactory, method,
+        version, resourceSegments, requestSegments, query=None, headers=[],
+        body=None, safe=''):
+        """
+        @param method: A C{str} giving the request method to use.
+
+        @param version: A C{str} like C{'1.1'} giving the request version.
+
+        @param resourceSegments: A C{list} of unencoded path segments which
+            specifies the location in the resource hierarchy at which the
+            L{WSGIResource} will be placed, eg C{['']} for I{/}, C{['foo',
+            'bar', '']} for I{/foo/bar/}, etc.
+
+        @param requestSegments: A C{list} of unencoded path segments giving the
+            request URI.
+
+        @param query: A C{list} of two-tuples of C{str} giving unencoded query
+            argument keys and values.
+
+        @param headers: A C{list} of two-tuples of C{str} giving request header
+            names and corresponding values.
+
+        @param safe: A C{str} giving the bytes which are to be considered
+            I{safe} for inclusion in the request URI and not quoted.
+
+        @return: A L{Deferred} which will be called back with a two-tuple of
+            the arguments passed which would be passed to the WSGI application
+            object for this configuration and request (ie, the environment and
+            start_response callable).
+        """
+        root = WSGIResource(
+            self.reactor, self.threadpool, applicationFactory())
+        resourceSegments.reverse()
+        for seg in resourceSegments:
+            tmp = Resource()
+            tmp.putChild(seg, root)
+            root = tmp
+
+        channel = channelFactory()
+        channel.site = Site(root)
+        request = requestFactory(channel, False)
+        for k, v in headers:
+            request.requestHeaders.addRawHeader(k, v)
+        request.gotLength(0)
+        if body:
+            request.content.write(body)
+            request.content.seek(0)
+        uri = '/' + '/'.join([quote(seg, safe) for seg in requestSegments])
+        if query is not None:
+            uri += '?' + '&'.join(['='.join([quote(k, safe), quote(v, safe)])
+                                   for (k, v) in query])
+        request.requestReceived(method, uri, 'HTTP/' + version)
+        return request
+
+
+    def render(self, *a, **kw):
+        result = Deferred()
+        def applicationFactory():
+            def application(*args):
+                environ, startResponse = args
+                result.callback(args)
+                startResponse('200 OK', [])
+                return iter(())
+            return application
+        self.lowLevelRender(
+            Request, applicationFactory, self.channelFactory, *a, **kw)
+        return result
+
+
+    def requestFactoryFactory(self, requestClass=Request):
+        d = Deferred()
+        def requestFactory(*a, **kw):
+            request = requestClass(*a, **kw)
+            # If notifyFinish is called after lowLevelRender returns, it won't
+            # do the right thing, because the request will have already
+            # finished.  One might argue that this is a bug in
+            # Request.notifyFinish.
+            request.notifyFinish().chainDeferred(d)
+            return request
+        return d, requestFactory
+
+
+    def getContentFromResponse(self, response):
+        return response.split('\r\n\r\n', 1)[1]
+
+
+
+class EnvironTests(WSGITestsMixin, TestCase):
+    """
+    Tests for the values in the C{environ} C{dict} passed to the application
+    object by L{twisted.web.wsgi.WSGIResource}.
+    """
+    def environKeyEqual(self, key, value):
+        def assertEnvironKeyEqual((environ, startResponse)):
+            self.assertEqual(environ[key], value)
+        return assertEnvironKeyEqual
+
+
+    def test_environIsDict(self):
+        """
+        L{WSGIResource} calls the application object with an C{environ}
+        parameter which is exactly of type C{dict}.
+        """
+        d = self.render('GET', '1.1', [], [''])
+        def cbRendered((environ, startResponse)):
+            self.assertIdentical(type(environ), dict)
+        d.addCallback(cbRendered)
+        return d
+
+
+    def test_requestMethod(self):
+        """
+        The C{'REQUEST_METHOD'} key of the C{environ} C{dict} passed to the
+        application contains the HTTP method in the request (RFC 3875, section
+        4.1.12).
+        """
+        get = self.render('GET', '1.1', [], [''])
+        get.addCallback(self.environKeyEqual('REQUEST_METHOD', 'GET'))
+
+        # Also make sure a different request method shows up as a different
+        # value in the environ dict.
+        post = self.render('POST', '1.1', [], [''])
+        post.addCallback(self.environKeyEqual('REQUEST_METHOD', 'POST'))
+
+        return gatherResults([get, post])
+
+
+    def test_scriptName(self):
+        """
+        The C{'SCRIPT_NAME'} key of the C{environ} C{dict} passed to the
+        application contains the I{abs_path} (RFC 2396, section 3) to this
+        resource (RFC 3875, section 4.1.13).
+        """
+        root = self.render('GET', '1.1', [], [''])
+        root.addCallback(self.environKeyEqual('SCRIPT_NAME', ''))
+
+        emptyChild = self.render('GET', '1.1', [''], [''])
+        emptyChild.addCallback(self.environKeyEqual('SCRIPT_NAME', '/'))
+
+        leaf = self.render('GET', '1.1', ['foo'], ['foo'])
+        leaf.addCallback(self.environKeyEqual('SCRIPT_NAME', '/foo'))
+
+        container = self.render('GET', '1.1', ['foo', ''], ['foo', ''])
+        container.addCallback(self.environKeyEqual('SCRIPT_NAME', '/foo/'))
+
+        internal = self.render('GET', '1.1', ['foo'], ['foo', 'bar'])
+        internal.addCallback(self.environKeyEqual('SCRIPT_NAME', '/foo'))
+
+        unencoded = self.render(
+            'GET', '1.1', ['foo', '/', 'bar\xff'], ['foo', '/', 'bar\xff'])
+        # The RFC says "(not URL-encoded)", even though that makes
+        # interpretation of SCRIPT_NAME ambiguous.
+        unencoded.addCallback(
+            self.environKeyEqual('SCRIPT_NAME', '/foo///bar\xff'))
+
+        return gatherResults([
+                root, emptyChild, leaf, container, internal, unencoded])
+
+
+    def test_pathInfo(self):
+        """
+        The C{'PATH_INFO'} key of the C{environ} C{dict} passed to the
+        application contains the suffix of the request URI path which is not
+        included in the value for the C{'SCRIPT_NAME'} key (RFC 3875, section
+        4.1.5).
+        """
+        assertKeyEmpty = self.environKeyEqual('PATH_INFO', '')
+
+        root = self.render('GET', '1.1', [], [''])
+        root.addCallback(self.environKeyEqual('PATH_INFO', '/'))
+
+        emptyChild = self.render('GET', '1.1', [''], [''])
+        emptyChild.addCallback(assertKeyEmpty)
+
+        leaf = self.render('GET', '1.1', ['foo'], ['foo'])
+        leaf.addCallback(assertKeyEmpty)
+
+        container = self.render('GET', '1.1', ['foo', ''], ['foo', ''])
+        container.addCallback(assertKeyEmpty)
+
+        internalLeaf = self.render('GET', '1.1', ['foo'], ['foo', 'bar'])
+        internalLeaf.addCallback(self.environKeyEqual('PATH_INFO', '/bar'))
+
+        internalContainer = self.render('GET', '1.1', ['foo'], ['foo', ''])
+        internalContainer.addCallback(self.environKeyEqual('PATH_INFO', '/'))
+
+        unencoded = self.render('GET', '1.1', [], ['foo', '/', 'bar\xff'])
+        unencoded.addCallback(
+            self.environKeyEqual('PATH_INFO', '/foo///bar\xff'))
+
+        return gatherResults([
+                root, leaf, container, internalLeaf,
+                internalContainer, unencoded])
+
+
+    def test_queryString(self):
+        """
+        The C{'QUERY_STRING'} key of the C{environ} C{dict} passed to the
+        application contains the portion of the request URI after the first
+        I{?} (RFC 3875, section 4.1.7).
+        """
+        missing = self.render('GET', '1.1', [], [''], None)
+        missing.addCallback(self.environKeyEqual('QUERY_STRING', ''))
+
+        empty = self.render('GET', '1.1', [], [''], [])
+        empty.addCallback(self.environKeyEqual('QUERY_STRING', ''))
+
+        present = self.render('GET', '1.1', [], [''], [('foo', 'bar')])
+        present.addCallback(self.environKeyEqual('QUERY_STRING', 'foo=bar'))
+
+        unencoded = self.render('GET', '1.1', [], [''], [('/', '/')])
+        unencoded.addCallback(self.environKeyEqual('QUERY_STRING', '%2F=%2F'))
+
+        # "?" is reserved in the <searchpart> portion of a URL.  However, it
+        # seems to be a common mistake of clients to forget to quote it.  So,
+        # make sure we handle that invalid case.
+        doubleQuestion = self.render(
+            'GET', '1.1', [], [''], [('foo', '?bar')], safe='?')
+        doubleQuestion.addCallback(
+            self.environKeyEqual('QUERY_STRING', 'foo=?bar'))
+
+        return gatherResults([
+            missing, empty, present, unencoded, doubleQuestion])
+
+
+    def test_contentType(self):
+        """
+        The C{'CONTENT_TYPE'} key of the C{environ} C{dict} passed to the
+        application contains the value of the I{Content-Type} request header
+        (RFC 3875, section 4.1.3).
+        """
+        missing = self.render('GET', '1.1', [], [''])
+        missing.addCallback(self.environKeyEqual('CONTENT_TYPE', ''))
+
+        present = self.render(
+            'GET', '1.1', [], [''], None, [('content-type', 'x-foo/bar')])
+        present.addCallback(self.environKeyEqual('CONTENT_TYPE', 'x-foo/bar'))
+
+        return gatherResults([missing, present])
+
+
+    def test_contentLength(self):
+        """
+        The C{'CONTENT_LENGTH'} key of the C{environ} C{dict} passed to the
+        application contains the value of the I{Content-Length} request header
+        (RFC 3875, section 4.1.2).
+        """
+        missing = self.render('GET', '1.1', [], [''])
+        missing.addCallback(self.environKeyEqual('CONTENT_LENGTH', ''))
+
+        present = self.render(
+            'GET', '1.1', [], [''], None, [('content-length', '1234')])
+        present.addCallback(self.environKeyEqual('CONTENT_LENGTH', '1234'))
+
+        return gatherResults([missing, present])
+
+
+    def test_serverName(self):
+        """
+        The C{'SERVER_NAME'} key of the C{environ} C{dict} passed to the
+        application contains the best determination of the server hostname
+        possible, using either the value of the I{Host} header in the request
+        or the address the server is listening on if that header is not
+        present (RFC 3875, section 4.1.14).
+        """
+        missing = self.render('GET', '1.1', [], [''])
+        # 10.0.0.1 value comes from a bit far away -
+        # twisted.test.test_web.DummyChannel.transport.getHost().host
+        missing.addCallback(self.environKeyEqual('SERVER_NAME', '10.0.0.1'))
+
+        present = self.render(
+            'GET', '1.1', [], [''], None, [('host', 'example.org')])
+        present.addCallback(self.environKeyEqual('SERVER_NAME', 'example.org'))
+
+        return gatherResults([missing, present])
+
+
+    def test_serverPort(self):
+        """
+        The C{'SERVER_PORT'} key of the C{environ} C{dict} passed to the
+        application contains the port number of the server which received the
+        request (RFC 3875, section 4.1.15).
+        """
+        portNumber = 12354
+        def makeChannel():
+            channel = DummyChannel()
+            channel.transport = DummyChannel.TCP()
+            channel.transport.port = portNumber
+            return channel
+        self.channelFactory = makeChannel
+
+        d = self.render('GET', '1.1', [], [''])
+        d.addCallback(self.environKeyEqual('SERVER_PORT', str(portNumber)))
+        return d
+
+
+    def test_serverProtocol(self):
+        """
+        The C{'SERVER_PROTOCOL'} key of the C{environ} C{dict} passed to the
+        application contains the HTTP version number received in the request
+        (RFC 3875, section 4.1.16).
+        """
+        old = self.render('GET', '1.0', [], [''])
+        old.addCallback(self.environKeyEqual('SERVER_PROTOCOL', 'HTTP/1.0'))
+
+        new = self.render('GET', '1.1', [], [''])
+        new.addCallback(self.environKeyEqual('SERVER_PROTOCOL', 'HTTP/1.1'))
+
+        return gatherResults([old, new])
+
+
+    def test_remoteAddr(self):
+        """
+        The C{'REMOTE_ADDR'} key of the C{environ} C{dict} passed to the
+        application contains the address of the client making the request.
+        """
+        d = self.render('GET', '1.1', [], [''])
+        d.addCallback(self.environKeyEqual('REMOTE_ADDR', '192.168.1.1'))
+
+        return d
+
+    def test_headers(self):
+        """
+        HTTP request headers are copied into the C{environ} C{dict} passed to
+        the application with a C{HTTP_} prefix added to their names.
+        """
+        singleValue = self.render(
+            'GET', '1.1', [], [''], None, [('foo', 'bar'), ('baz', 'quux')])
+        def cbRendered((environ, startResponse)):
+            self.assertEqual(environ['HTTP_FOO'], 'bar')
+            self.assertEqual(environ['HTTP_BAZ'], 'quux')
+        singleValue.addCallback(cbRendered)
+
+        multiValue = self.render(
+            'GET', '1.1', [], [''], None, [('foo', 'bar'), ('foo', 'baz')])
+        multiValue.addCallback(self.environKeyEqual('HTTP_FOO', 'bar,baz'))
+
+        withHyphen = self.render(
+            'GET', '1.1', [], [''], None, [('foo-bar', 'baz')])
+        withHyphen.addCallback(self.environKeyEqual('HTTP_FOO_BAR', 'baz'))
+
+        multiLine = self.render(
+            'GET', '1.1', [], [''], None, [('foo', 'bar\n\tbaz')])
+        multiLine.addCallback(self.environKeyEqual('HTTP_FOO', 'bar \tbaz'))
+
+        return gatherResults([singleValue, multiValue, withHyphen, multiLine])
+
+
+    def test_wsgiVersion(self):
+        """
+        The C{'wsgi.version'} key of the C{environ} C{dict} passed to the
+        application has the value C{(1, 0)} indicating that this is a WSGI 1.0
+        container.
+        """
+        versionDeferred = self.render('GET', '1.1', [], [''])
+        versionDeferred.addCallback(self.environKeyEqual('wsgi.version', (1, 0)))
+        return versionDeferred
+
+
+    def test_wsgiRunOnce(self):
+        """
+        The C{'wsgi.run_once'} key of the C{environ} C{dict} passed to the
+        application is set to C{False}.
+        """
+        once = self.render('GET', '1.1', [], [''])
+        once.addCallback(self.environKeyEqual('wsgi.run_once', False))
+        return once
+
+
+    def test_wsgiMultithread(self):
+        """
+        The C{'wsgi.multithread'} key of the C{environ} C{dict} passed to the
+        application is set to C{True}.
+        """
+        thread = self.render('GET', '1.1', [], [''])
+        thread.addCallback(self.environKeyEqual('wsgi.multithread', True))
+        return thread
+
+
+    def test_wsgiMultiprocess(self):
+        """
+        The C{'wsgi.multiprocess'} key of the C{environ} C{dict} passed to the
+        application is set to C{False}.
+        """
+        process = self.render('GET', '1.1', [], [''])
+        process.addCallback(self.environKeyEqual('wsgi.multiprocess', False))
+        return process
+
+
+    def test_wsgiURLScheme(self):
+        """
+        The C{'wsgi.url_scheme'} key of the C{environ} C{dict} passed to the
+        application has the request URL scheme.
+        """
+        # XXX Does this need to be different if the request is for an absolute
+        # URL?
+        def channelFactory():
+            channel = DummyChannel()
+            channel.transport = DummyChannel.SSL()
+            return channel
+
+        self.channelFactory = DummyChannel
+        httpDeferred = self.render('GET', '1.1', [], [''])
+        httpDeferred.addCallback(self.environKeyEqual('wsgi.url_scheme', 'http'))
+
+        self.channelFactory = channelFactory
+        httpsDeferred = self.render('GET', '1.1', [], [''])
+        httpsDeferred.addCallback(self.environKeyEqual('wsgi.url_scheme', 'https'))
+
+        return gatherResults([httpDeferred, httpsDeferred])
+
+
+    def test_wsgiErrors(self):
+        """
+        The C{'wsgi.errors'} key of the C{environ} C{dict} passed to the
+        application is a file-like object (as defined in the U{Input and Errors
+        Streams<http://www.python.org/dev/peps/pep-0333/#input-and-error-streams>}
+        section of PEP 333) which converts bytes written to it into events for
+        the logging system.
+        """
+        events = []
+        addObserver(events.append)
+        self.addCleanup(removeObserver, events.append)
+
+        errors = self.render('GET', '1.1', [], [''])
+        def cbErrors((environ, startApplication)):
+            errors = environ['wsgi.errors']
+            errors.write('some message\n')
+            errors.writelines(['another\nmessage\n'])
+            errors.flush()
+            self.assertEqual(events[0]['message'], ('some message\n',))
+            self.assertEqual(events[0]['system'], 'wsgi')
+            self.assertTrue(events[0]['isError'])
+            self.assertEqual(events[1]['message'], ('another\nmessage\n',))
+            self.assertEqual(events[1]['system'], 'wsgi')
+            self.assertTrue(events[1]['isError'])
+            self.assertEqual(len(events), 2)
+        errors.addCallback(cbErrors)
+        return errors
+
+
+class InputStreamTestMixin(WSGITestsMixin):
+    """
+    A mixin for L{TestCase} subclasses which defines a number of tests against
+    L{_InputStream}.  The subclass is expected to create a file-like object to
+    be wrapped by an L{_InputStream} under test.
+    """
+    def getFileType(self):
+        raise NotImplementedError(
+            "%s.getFile must be implemented" % (self.__class__.__name__,))
+
+
+    def _renderAndReturnReaderResult(self, reader, content):
+        contentType = self.getFileType()
+        class CustomizedRequest(Request):
+            def gotLength(self, length):
+                # Always allocate a file of the specified type, instead of
+                # using the base behavior of selecting one depending on the
+                # length.
+                self.content = contentType()
+
+        def appFactoryFactory(reader):
+            result = Deferred()
+            def applicationFactory():
+                def application(*args):
+                    environ, startResponse = args
+                    result.callback(reader(environ['wsgi.input']))
+                    startResponse('200 OK', [])
+                    return iter(())
+                return application
+            return result, applicationFactory
+        d, appFactory = appFactoryFactory(reader)
+        self.lowLevelRender(
+            CustomizedRequest, appFactory, DummyChannel,
+            'PUT', '1.1', [], [''], None, [],
+            content)
+        return d
+
+
+    def test_readAll(self):
+        """
+        Calling L{_InputStream.read} with no arguments returns the entire input
+        stream.
+        """
+        bytes = "some bytes are here"
+        d = self._renderAndReturnReaderResult(lambda input: input.read(), bytes)
+        d.addCallback(self.assertEqual, bytes)
+        return d
+
+
+    def test_readSome(self):
+        """
+        Calling L{_InputStream.read} with an integer returns that many bytes
+        from the input stream, as long as it is less than or equal to the total
+        number of bytes available.
+        """
+        bytes = "hello, world."
+        d = self._renderAndReturnReaderResult(lambda input: input.read(3), bytes)
+        d.addCallback(self.assertEqual, "hel")
+        return d
+
+
+    def test_readMoreThan(self):
+        """
+        Calling L{_InputStream.read} with an integer that is greater than the
+        total number of bytes in the input stream returns all bytes in the
+        input stream.
+        """
+        bytes = "some bytes are here"
+        d = self._renderAndReturnReaderResult(
+            lambda input: input.read(len(bytes) + 3), bytes)
+        d.addCallback(self.assertEqual, bytes)
+        return d
+
+
+    def test_readTwice(self):
+        """
+        Calling L{_InputStream.read} a second time returns bytes starting from
+        the position after the last byte returned by the previous read.
+        """
+        bytes = "some bytes, hello"
+        def read(input):
+            input.read(3)
+            return input.read()
+        d = self._renderAndReturnReaderResult(read, bytes)
+        d.addCallback(self.assertEqual, bytes[3:])
+        return d
+
+
+    def test_readNone(self):
+        """
+        Calling L{_InputStream.read} with C{None} as an argument returns all
+        bytes in the input stream.
+        """
+        bytes = "the entire stream"
+        d = self._renderAndReturnReaderResult(
+            lambda input: input.read(None), bytes)
+        d.addCallback(self.assertEqual, bytes)
+        return d
+
+
+    def test_readNegative(self):
+        """
+        Calling L{_InputStream.read} with a negative integer as an argument
+        returns all bytes in the input stream.
+        """
+        bytes = "all of the input"
+        d = self._renderAndReturnReaderResult(
+            lambda input: input.read(-1), bytes)
+        d.addCallback(self.assertEqual, bytes)
+        return d
+
+
+    def test_readline(self):
+        """
+        Calling L{_InputStream.readline} with no argument returns one line from
+        the input stream.
+        """
+        bytes = "hello\nworld"
+        d = self._renderAndReturnReaderResult(
+            lambda input: input.readline(), bytes)
+        d.addCallback(self.assertEqual, "hello\n")
+        return d
+
+
+    def test_readlineSome(self):
+        """
+        Calling L{_InputStream.readline} with an integer returns at most that
+        many bytes, even if it is not enough to make up a complete line.
+
+        COMPATIBILITY NOTE: the size argument is excluded from the WSGI
+        specification, but is provided here anyhow, because useful libraries
+        such as python stdlib's cgi.py assume their input file-like-object
+        supports readline with a size argument. If you use it, be aware your
+        application may not be portable to other conformant WSGI servers.
+        """
+        bytes = "goodbye\nworld"
+        d = self._renderAndReturnReaderResult(
+            lambda input: input.readline(3), bytes)
+        d.addCallback(self.assertEqual, "goo")
+        return d
+
+
+    def test_readlineMoreThan(self):
+        """
+        Calling L{_InputStream.readline} with an integer which is greater than
+        the number of bytes in the next line returns only the next line.
+        """
+        bytes = "some lines\nof text"
+        d = self._renderAndReturnReaderResult(
+            lambda input: input.readline(20), bytes)
+        d.addCallback(self.assertEqual, "some lines\n")
+        return d
+
+
+    def test_readlineTwice(self):
+        """
+        Calling L{_InputStream.readline} a second time returns the line
+        following the line returned by the first call.
+        """
+        bytes = "first line\nsecond line\nlast line"
+        def readline(input):
+            input.readline()
+            return input.readline()
+        d = self._renderAndReturnReaderResult(readline, bytes)
+        d.addCallback(self.assertEqual, "second line\n")
+        return d
+
+
+    def test_readlineNone(self):
+        """
+        Calling L{_InputStream.readline} with C{None} as an argument returns
+        one line from the input stream.
+        """
+        bytes = "this is one line\nthis is another line"
+        d = self._renderAndReturnReaderResult(
+            lambda input: input.readline(None), bytes)
+        d.addCallback(self.assertEqual, "this is one line\n")
+        return d
+
+
+    def test_readlineNegative(self):
+        """
+        Calling L{_InputStream.readline} with a negative integer as an argument
+        returns one line from the input stream.
+        """
+        bytes = "input stream line one\nline two"
+        d = self._renderAndReturnReaderResult(
+            lambda input: input.readline(-1), bytes)
+        d.addCallback(self.assertEqual, "input stream line one\n")
+        return d
+
+
+    def test_readlines(self):
+        """
+        Calling L{_InputStream.readlines} with no arguments returns a list of
+        all lines from the input stream.
+        """
+        bytes = "alice\nbob\ncarol"
+        d = self._renderAndReturnReaderResult(
+            lambda input: input.readlines(), bytes)
+        d.addCallback(self.assertEqual, ["alice\n", "bob\n", "carol"])
+        return d
+
+
+    def test_readlinesSome(self):
+        """
+        Calling L{_InputStream.readlines} with an integer as an argument
+        returns a list of lines from the input stream with the argument serving
+        as an approximate bound on the total number of bytes to read.
+        """
+        bytes = "123\n456\n789\n0"
+        d = self._renderAndReturnReaderResult(
+            lambda input: input.readlines(5), bytes)
+        def cbLines(lines):
+            # Make sure we got enough lines to make 5 bytes.  Anything beyond
+            # that is fine too.
+            self.assertEqual(lines[:2], ["123\n", "456\n"])
+        d.addCallback(cbLines)
+        return d
+
+
+    def test_readlinesMoreThan(self):
+        """
+        Calling L{_InputStream.readlines} with an integer which is greater than
+        the total number of bytes in the input stream returns a list of all
+        lines from the input.
+        """
+        bytes = "one potato\ntwo potato\nthree potato"
+        d = self._renderAndReturnReaderResult(
+            lambda input: input.readlines(100), bytes)
+        d.addCallback(
+            self.assertEqual,
+            ["one potato\n", "two potato\n", "three potato"])
+        return d
+
+
+    def test_readlinesAfterRead(self):
+        """
+        Calling L{_InputStream.readlines} after a call to L{_InputStream.read}
+        returns lines starting at the byte after the last byte returned by the
+        C{read} call.
+        """
+        bytes = "hello\nworld\nfoo"
+        def readlines(input):
+            input.read(7)
+            return input.readlines()
+        d = self._renderAndReturnReaderResult(readlines, bytes)
+        d.addCallback(self.assertEqual, ["orld\n", "foo"])
+        return d
+
+
+    def test_readlinesNone(self):
+        """
+        Calling L{_InputStream.readlines} with C{None} as an argument returns
+        all lines from the input.
+        """
+        bytes = "one fish\ntwo fish\n"
+        d = self._renderAndReturnReaderResult(
+            lambda input: input.readlines(None), bytes)
+        d.addCallback(self.assertEqual, ["one fish\n", "two fish\n"])
+        return d
+
+
+    def test_readlinesNegative(self):
+        """
+        Calling L{_InputStream.readlines} with a negative integer as an
+        argument returns a list of all lines from the input.
+        """
+        bytes = "red fish\nblue fish\n"
+        d = self._renderAndReturnReaderResult(
+            lambda input: input.readlines(-1), bytes)
+        d.addCallback(self.assertEqual, ["red fish\n", "blue fish\n"])
+        return d
+
+
+    def test_iterable(self):
+        """
+        Iterating over L{_InputStream} produces lines from the input stream.
+        """
+        bytes = "green eggs\nand ham\n"
+        d = self._renderAndReturnReaderResult(lambda input: list(input), bytes)
+        d.addCallback(self.assertEqual, ["green eggs\n", "and ham\n"])
+        return d
+
+
+    def test_iterableAfterRead(self):
+        """
+        Iterating over L{_InputStream} after calling L{_InputStream.read}
+        produces lines from the input stream starting from the first byte after
+        the last byte returned by the C{read} call.
+        """
+        bytes = "green eggs\nand ham\n"
+        def iterate(input):
+            input.read(3)
+            return list(input)
+        d = self._renderAndReturnReaderResult(iterate, bytes)
+        d.addCallback(self.assertEqual, ["en eggs\n", "and ham\n"])
+        return d
+
+
+
+class InputStreamStringIOTests(InputStreamTestMixin, TestCase):
+    """
+    Tests for L{_InputStream} when it is wrapped around a L{StringIO.StringIO}.
+    """
+    def getFileType(self):
+        return StringIO.StringIO
+
+
+
+class InputStreamCStringIOTests(InputStreamTestMixin, TestCase):
+    """
+    Tests for L{_InputStream} when it is wrapped around a
+    L{cStringIO.StringIO}.
+    """
+    def getFileType(self):
+        return cStringIO.StringIO
+
+
+
+class InputStreamTemporaryFileTests(InputStreamTestMixin, TestCase):
+    """
+    Tests for L{_InputStream} when it is wrapped around a L{tempfile.TemporaryFile}.
+    """
+    def getFileType(self):
+        return tempfile.TemporaryFile
+
+
+
+class StartResponseTests(WSGITestsMixin, TestCase):
+    """
+    Tests for the I{start_response} parameter passed to the application object
+    by L{WSGIResource}.
+    """
+    def test_status(self):
+        """
+        The response status passed to the I{start_response} callable is written
+        as the status of the response to the request.
+        """
+        channel = DummyChannel()
+
+        def applicationFactory():
+            def application(environ, startResponse):
+                startResponse('107 Strange message', [])
+                return iter(())
+            return application
+
+        d, requestFactory = self.requestFactoryFactory()
+        def cbRendered(ignored):
+            self.assertTrue(
+                channel.transport.written.getvalue().startswith(
+                    'HTTP/1.1 107 Strange message'))
+        d.addCallback(cbRendered)
+
+        request = self.lowLevelRender(
+            requestFactory, applicationFactory,
+            lambda: channel, 'GET', '1.1', [], [''], None, [])
+
+        return d
+
+
+    def _headersTest(self, appHeaders, expectedHeaders):
+        """
+        Verify that if the response headers given by C{appHeaders} are passed
+        to the I{start_response} callable, then the response header lines given
+        by C{expectedHeaders} plus I{Server} and I{Date} header lines are
+        included in the response.
+        """
+        # Make the Date header value deterministic
+        self.patch(http, 'datetimeToString', lambda: 'Tuesday')
+
+        channel = DummyChannel()
+
+        def applicationFactory():
+            def application(environ, startResponse):
+                startResponse('200 OK', appHeaders)
+                return iter(())
+            return application
+
+        d, requestFactory = self.requestFactoryFactory()
+        def cbRendered(ignored):
+            response = channel.transport.written.getvalue()
+            headers, rest = response.split('\r\n\r\n', 1)
+            headerLines = headers.split('\r\n')[1:]
+            headerLines.sort()
+            allExpectedHeaders = expectedHeaders + [
+                'Date: Tuesday',
+                'Server: ' + version,
+                'Transfer-Encoding: chunked']
+            allExpectedHeaders.sort()
+            self.assertEqual(headerLines, allExpectedHeaders)
+
+        d.addCallback(cbRendered)
+
+        request = self.lowLevelRender(
+            requestFactory, applicationFactory,
+            lambda: channel, 'GET', '1.1', [], [''], None, [])
+        return d
+
+
+    def test_headers(self):
+        """
+        The headers passed to the I{start_response} callable are included in
+        the response as are the required I{Date} and I{Server} headers and the
+        necessary connection (hop to hop) header I{Transfer-Encoding}.
+        """
+        return self._headersTest(
+            [('foo', 'bar'), ('baz', 'quux')],
+            ['Baz: quux', 'Foo: bar'])
+
+
+    def test_applicationProvidedContentType(self):
+        """
+        If I{Content-Type} is included in the headers passed to the
+        I{start_response} callable, one I{Content-Type} header is included in
+        the response.
+        """
+        return self._headersTest(
+            [('content-type', 'monkeys are great')],
+            ['Content-Type: monkeys are great'])
+
+
+    def test_applicationProvidedServerAndDate(self):
+        """
+        If either I{Server} or I{Date} is included in the headers passed to the
+        I{start_response} callable, they are disregarded.
+        """
+        return self._headersTest(
+            [('server', 'foo'), ('Server', 'foo'),
+             ('date', 'bar'), ('dATE', 'bar')],
+            [])
+
+
+    def test_delayedUntilReturn(self):
+        """
+        Nothing is written in response to a request when the I{start_response}
+        callable is invoked.  If the iterator returned by the application
+        object produces only empty strings, the response is written after the
+        last element is produced.
+        """
+        channel = DummyChannel()
+
+        intermediateValues = []
+        def record():
+            intermediateValues.append(channel.transport.written.getvalue())
+
+        def applicationFactory():
+            def application(environ, startResponse):
+                startResponse('200 OK', [('foo', 'bar'), ('baz', 'quux')])
+                yield ''
+                record()
+            return application
+
+        d, requestFactory = self.requestFactoryFactory()
+        def cbRendered(ignored):
+            self.assertEqual(intermediateValues, [''])
+        d.addCallback(cbRendered)
+
+        request = self.lowLevelRender(
+            requestFactory, applicationFactory,
+            lambda: channel, 'GET', '1.1', [], [''], None, [])
+
+        return d
+
+
+    def test_delayedUntilContent(self):
+        """
+        Nothing is written in response to a request when the I{start_response}
+        callable is invoked.  Once a non-empty string has been produced by the
+        iterator returned by the application object, the response status and
+        headers are written.
+        """
+        channel = DummyChannel()
+
+        intermediateValues = []
+        def record():
+            intermediateValues.append(channel.transport.written.getvalue())
+
+        def applicationFactory():
+            def application(environ, startResponse):
+                startResponse('200 OK', [('foo', 'bar')])
+                yield ''
+                record()
+                yield 'foo'
+                record()
+            return application
+
+        d, requestFactory = self.requestFactoryFactory()
+        def cbRendered(ignored):
+            self.assertFalse(intermediateValues[0])
+            self.assertTrue(intermediateValues[1])
+        d.addCallback(cbRendered)
+
+        request = self.lowLevelRender(
+            requestFactory, applicationFactory,
+            lambda: channel, 'GET', '1.1', [], [''], None, [])
+
+        return d
+
+
+    def test_content(self):
+        """
+        Content produced by the iterator returned by the application object is
+        written to the request as it is produced.
+        """
+        channel = DummyChannel()
+
+        intermediateValues = []
+        def record():
+            intermediateValues.append(channel.transport.written.getvalue())
+
+        def applicationFactory():
+            def application(environ, startResponse):
+                startResponse('200 OK', [('content-length', '6')])
+                yield 'foo'
+                record()
+                yield 'bar'
+                record()
+            return application
+
+        d, requestFactory = self.requestFactoryFactory()
+        def cbRendered(ignored):
+            self.assertEqual(
+                self.getContentFromResponse(intermediateValues[0]),
+                'foo')
+            self.assertEqual(
+                self.getContentFromResponse(intermediateValues[1]),
+                'foobar')
+        d.addCallback(cbRendered)
+
+        request = self.lowLevelRender(
+            requestFactory, applicationFactory,
+            lambda: channel, 'GET', '1.1', [], [''], None, [])
+
+        return d
+
+
+    def test_multipleStartResponse(self):
+        """
+        If the I{start_response} callable is invoked multiple times before a
+        data for the response body is produced, the values from the last call
+        are used.
+        """
+        channel = DummyChannel()
+
+        def applicationFactory():
+            def application(environ, startResponse):
+                startResponse('100 Foo', [])
+                startResponse('200 Bar', [])
+                return iter(())
+            return application
+
+        d, requestFactory = self.requestFactoryFactory()
+        def cbRendered(ignored):
+            self.assertTrue(
+                channel.transport.written.getvalue().startswith(
+                    'HTTP/1.1 200 Bar\r\n'))
+        d.addCallback(cbRendered)
+
+        request = self.lowLevelRender(
+            requestFactory, applicationFactory,
+            lambda: channel, 'GET', '1.1', [], [''], None, [])
+
+        return d
+
+
+    def test_startResponseWithException(self):
+        """
+        If the I{start_response} callable is invoked with a third positional
+        argument before the status and headers have been written to the
+        response, the status and headers become the newly supplied values.
+        """
+        channel = DummyChannel()
+
+        def applicationFactory():
+            def application(environ, startResponse):
+                startResponse('100 Foo', [], (Exception, Exception("foo"), None))
+                return iter(())
+            return application
+
+        d, requestFactory = self.requestFactoryFactory()
+        def cbRendered(ignored):
+            self.assertTrue(
+                channel.transport.written.getvalue().startswith(
+                    'HTTP/1.1 100 Foo\r\n'))
+        d.addCallback(cbRendered)
+
+        request = self.lowLevelRender(
+            requestFactory, applicationFactory,
+            lambda: channel, 'GET', '1.1', [], [''], None, [])
+
+        return d
+
+
+    def test_startResponseWithExceptionTooLate(self):
+        """
+        If the I{start_response} callable is invoked with a third positional
+        argument after the status and headers have been written to the
+        response, the supplied I{exc_info} values are re-raised to the
+        application.
+        """
+        channel = DummyChannel()
+
+        class SomeException(Exception):
+            pass
+
+        try:
+            raise SomeException()
+        except:
+            excInfo = exc_info()
+
+        reraised = []
+
+        def applicationFactory():
+            def application(environ, startResponse):
+                startResponse('200 OK', [])
+                yield 'foo'
+                try:
+                    startResponse('500 ERR', [], excInfo)
+                except:
+                    reraised.append(exc_info())
+            return application
+
+        d, requestFactory = self.requestFactoryFactory()
+        def cbRendered(ignored):
+            self.assertTrue(
+                channel.transport.written.getvalue().startswith(
+                    'HTTP/1.1 200 OK\r\n'))
+            self.assertEqual(reraised[0][0], excInfo[0])
+            self.assertEqual(reraised[0][1], excInfo[1])
+            self.assertEqual(reraised[0][2].tb_next, excInfo[2])
+
+        d.addCallback(cbRendered)
+
+        request = self.lowLevelRender(
+            requestFactory, applicationFactory,
+            lambda: channel, 'GET', '1.1', [], [''], None, [])
+
+        return d
+
+
+    def test_write(self):
+        """
+        I{start_response} returns the I{write} callable which can be used to
+        write bytes to the response body without buffering.
+        """
+        channel = DummyChannel()
+
+        intermediateValues = []
+        def record():
+            intermediateValues.append(channel.transport.written.getvalue())
+
+        def applicationFactory():
+            def application(environ, startResponse):
+                write = startResponse('100 Foo', [('content-length', '6')])
+                write('foo')
+                record()
+                write('bar')
+                record()
+                return iter(())
+            return application
+
+        d, requestFactory = self.requestFactoryFactory()
+        def cbRendered(ignored):
+            self.assertEqual(
+                self.getContentFromResponse(intermediateValues[0]),
+                'foo')
+            self.assertEqual(
+                self.getContentFromResponse(intermediateValues[1]),
+                'foobar')
+        d.addCallback(cbRendered)
+
+        request = self.lowLevelRender(
+            requestFactory, applicationFactory,
+            lambda: channel, 'GET', '1.1', [], [''], None, [])
+
+        return d
+
+
+
+class ApplicationTests(WSGITestsMixin, TestCase):
+    """
+    Tests for things which are done to the application object and the iterator
+    it returns.
+    """
+    def enableThreads(self):
+        self.reactor = reactor
+        self.threadpool = ThreadPool()
+        self.threadpool.start()
+        self.addCleanup(self.threadpool.stop)
+
+
+    def test_close(self):
+        """
+        If the application object returns an iterator which also has a I{close}
+        method, that method is called after iteration is complete.
+        """
+        channel = DummyChannel()
+
+        class Result:
+            def __init__(self):
+                self.open = True
+
+            def __iter__(self):
+                for i in range(3):
+                    if self.open:
+                        yield str(i)
+
+            def close(self):
+                self.open = False
+
+        result = Result()
+        def applicationFactory():
+            def application(environ, startResponse):
+                startResponse('200 OK', [('content-length', '3')])
+                return result
+            return application
+
+        d, requestFactory = self.requestFactoryFactory()
+        def cbRendered(ignored):
+            self.assertEqual(
+                self.getContentFromResponse(
+                    channel.transport.written.getvalue()),
+                '012')
+            self.assertFalse(result.open)
+        d.addCallback(cbRendered)
+
+        self.lowLevelRender(
+            requestFactory, applicationFactory,
+            lambda: channel, 'GET', '1.1', [], [''])
+
+        return d
+
+
+    def test_applicationCalledInThread(self):
+        """
+        The application object is invoked and iterated in a thread which is not
+        the reactor thread.
+        """
+        self.enableThreads()
+        invoked = []
+
+        def applicationFactory():
+            def application(environ, startResponse):
+                def result():
+                    for i in range(3):
+                        invoked.append(get_ident())
+                        yield str(i)
+                invoked.append(get_ident())
+                startResponse('200 OK', [('content-length', '3')])
+                return result()
+            return application
+
+        d, requestFactory = self.requestFactoryFactory()
+        def cbRendered(ignored):
+            self.assertNotIn(get_ident(), invoked)
+            self.assertEqual(len(set(invoked)), 1)
+        d.addCallback(cbRendered)
+
+        self.lowLevelRender(
+            requestFactory, applicationFactory,
+            DummyChannel, 'GET', '1.1', [], [''])
+
+        return d
+
+
+    def test_writeCalledFromThread(self):
+        """
+        The I{write} callable returned by I{start_response} calls the request's
+        C{write} method in the reactor thread.
+        """
+        self.enableThreads()
+        invoked = []
+
+        class ThreadVerifier(Request):
+            def write(self, bytes):
+                invoked.append(get_ident())
+                return Request.write(self, bytes)
+
+        def applicationFactory():
+            def application(environ, startResponse):
+                write = startResponse('200 OK', [])
+                write('foo')
+                return iter(())
+            return application
+
+        d, requestFactory = self.requestFactoryFactory(ThreadVerifier)
+        def cbRendered(ignored):
+            self.assertEqual(set(invoked), set([get_ident()]))
+        d.addCallback(cbRendered)
+
+        self.lowLevelRender(
+            requestFactory, applicationFactory, DummyChannel,
+            'GET', '1.1', [], [''])
+
+        return d
+
+
+    def test_iteratedValuesWrittenFromThread(self):
+        """
+        Strings produced by the iterator returned by the application object are
+        written to the request in the reactor thread.
+        """
+        self.enableThreads()
+        invoked = []
+
+        class ThreadVerifier(Request):
+            def write(self, bytes):
+                invoked.append(get_ident())
+                return Request.write(self, bytes)
+
+        def applicationFactory():
+            def application(environ, startResponse):
+                startResponse('200 OK', [])
+                yield 'foo'
+            return application
+
+        d, requestFactory = self.requestFactoryFactory(ThreadVerifier)
+        def cbRendered(ignored):
+            self.assertEqual(set(invoked), set([get_ident()]))
+        d.addCallback(cbRendered)
+
+        self.lowLevelRender(
+            requestFactory, applicationFactory, DummyChannel,
+            'GET', '1.1', [], [''])
+
+        return d
+
+
+    def test_statusWrittenFromThread(self):
+        """
+        The response status is set on the request object in the reactor thread.
+        """
+        self.enableThreads()
+        invoked = []
+
+        class ThreadVerifier(Request):
+            def setResponseCode(self, code, message):
+                invoked.append(get_ident())
+                return Request.setResponseCode(self, code, message)
+
+        def applicationFactory():
+            def application(environ, startResponse):
+                startResponse('200 OK', [])
+                return iter(())
+            return application
+
+        d, requestFactory = self.requestFactoryFactory(ThreadVerifier)
+        def cbRendered(ignored):
+            self.assertEqual(set(invoked), set([get_ident()]))
+        d.addCallback(cbRendered)
+
+        self.lowLevelRender(
+            requestFactory, applicationFactory, DummyChannel,
+            'GET', '1.1', [], [''])
+
+        return d
+
+
+    def test_connectionClosedDuringIteration(self):
+        """
+        If the request connection is lost while the application object is being
+        iterated, iteration is stopped.
+        """
+        class UnreliableConnection(Request):
+            """
+            This is a request which pretends its connection is lost immediately
+            after the first write is done to it.
+            """
+            def write(self, bytes):
+                self.connectionLost(Failure(ConnectionLost("No more connection")))
+
+        self.badIter = False
+        def appIter():
+            yield "foo"
+            self.badIter = True
+            raise Exception("Should not have gotten here")
+
+        def applicationFactory():
+            def application(environ, startResponse):
+                startResponse('200 OK', [])
+                return appIter()
+            return application
+
+        d, requestFactory = self.requestFactoryFactory(UnreliableConnection)
+        def cbRendered(ignored):
+            self.assertFalse(self.badIter, "Should not have resumed iteration")
+        d.addCallback(cbRendered)
+
+        self.lowLevelRender(
+            requestFactory, applicationFactory, DummyChannel,
+            'GET', '1.1', [], [''])
+
+        return self.assertFailure(d, ConnectionLost)
+
+
+    def _internalServerErrorTest(self, application):
+        channel = DummyChannel()
+
+        def applicationFactory():
+            return application
+
+        d, requestFactory = self.requestFactoryFactory()
+        def cbRendered(ignored):
+            errors = self.flushLoggedErrors(RuntimeError)
+            self.assertEqual(len(errors), 1)
+
+            self.assertTrue(
+                channel.transport.written.getvalue().startswith(
+                    'HTTP/1.1 500 Internal Server Error'))
+        d.addCallback(cbRendered)
+
+        request = self.lowLevelRender(
+            requestFactory, applicationFactory,
+            lambda: channel, 'GET', '1.1', [], [''], None, [])
+
+        return d
+
+
+    def test_applicationExceptionBeforeStartResponse(self):
+        """
+        If the application raises an exception before calling I{start_response}
+        then the response status is I{500} and the exception is logged.
+        """
+        def application(environ, startResponse):
+            raise RuntimeError("This application had some error.")
+        return self._internalServerErrorTest(application)
+
+
+    def test_applicationExceptionAfterStartResponse(self):
+        """
+        If the application calls I{start_response} but then raises an exception
+        before any data is written to the response then the response status is
+        I{500} and the exception is logged.
+        """
+        def application(environ, startResponse):
+            startResponse('200 OK', [])
+            raise RuntimeError("This application had some error.")
+        return self._internalServerErrorTest(application)
+
+
+    def _connectionClosedTest(self, application, responseContent):
+        channel = DummyChannel()
+
+        def applicationFactory():
+            return application
+
+        d, requestFactory = self.requestFactoryFactory()
+
+        # Capture the request so we can disconnect it later on.
+        requests = []
+        def requestFactoryWrapper(*a, **kw):
+            requests.append(requestFactory(*a, **kw))
+            return requests[-1]
+
+        def ebRendered(ignored):
+            errors = self.flushLoggedErrors(RuntimeError)
+            self.assertEqual(len(errors), 1)
+
+            response = channel.transport.written.getvalue()
+            self.assertTrue(response.startswith('HTTP/1.1 200 OK'))
+            # Chunked transfer-encoding makes this a little messy.
+            self.assertIn(responseContent, response)
+        d.addErrback(ebRendered)
+
+        request = self.lowLevelRender(
+            requestFactoryWrapper, applicationFactory,
+            lambda: channel, 'GET', '1.1', [], [''], None, [])
+
+        # By now the connection should be closed.
+        self.assertTrue(channel.transport.disconnected)
+        # Give it a little push to go the rest of the way.
+        requests[0].connectionLost(Failure(ConnectionLost("All gone")))
+
+        return d
+
+
+    def test_applicationExceptionAfterWrite(self):
+        """
+        If the application raises an exception after the response status has
+        already been sent then the connection is closed and the exception is
+        logged.
+        """
+        responseContent = (
+            'Some bytes, triggering the server to start sending the response')
+
+        def application(environ, startResponse):
+            startResponse('200 OK', [])
+            yield responseContent
+            raise RuntimeError("This application had some error.")
+        return self._connectionClosedTest(application, responseContent)
+
+
+    def test_applicationCloseException(self):
+        """
+        If the application returns a closeable iterator and the C{close} method
+        raises an exception when called then the connection is still closed and
+        the exception is logged.
+        """
+        responseContent = 'foo'
+
+        class Application(object):
+            def __init__(self, environ, startResponse):
+                startResponse('200 OK', [])
+
+            def __iter__(self):
+                yield responseContent
+
+            def close(self):
+                raise RuntimeError("This application had some error.")
+
+        return self._connectionClosedTest(Application, responseContent)
diff --git a/ThirdParty/Twisted/twisted/web/test/test_xml.py b/ThirdParty/Twisted/twisted/web/test/test_xml.py
new file mode 100644
index 0000000..513a943
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_xml.py
@@ -0,0 +1,1105 @@
+# -*- test-case-name: twisted.web.test.test_xml -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Some fairly inadequate testcases for Twisted XML support.
+"""
+
+from twisted.trial.unittest import TestCase
+from twisted.web import sux
+from twisted.web import microdom
+from twisted.web import domhelpers
+
+
+class Sux0r(sux.XMLParser):
+    def __init__(self):
+        self.tokens = []
+
+    def getTagStarts(self):
+        return [token for token in self.tokens if token[0] == 'start']
+
+    def gotTagStart(self, name, attrs):
+        self.tokens.append(("start", name, attrs))
+
+    def gotText(self, text):
+        self.tokens.append(("text", text))
+
+class SUXTest(TestCase):
+
+    def testBork(self):
+        s = "<bork><bork><bork>"
+        ms = Sux0r()
+        ms.connectionMade()
+        ms.dataReceived(s)
+        self.assertEqual(len(ms.getTagStarts()),3)
+
+
+class MicroDOMTest(TestCase):
+
+    def test_leadingTextDropping(self):
+        """
+        Make sure that if there's no top-level node lenient-mode won't
+        drop leading text that's outside of any elements.
+        """
+        s = "Hi orders! <br>Well. <br>"
+        d = microdom.parseString(s, beExtremelyLenient=True)
+        self.assertEqual(d.firstChild().toxml(),
+                          '<html>Hi orders! <br />Well. <br /></html>')
+
+    def test_trailingTextDropping(self):
+        """
+        Ensure that no *trailing* text in a mal-formed
+        no-top-level-element document(s) will not be dropped.
+        """
+        s = "<br>Hi orders!"
+        d = microdom.parseString(s, beExtremelyLenient=True)
+        self.assertEqual(d.firstChild().toxml(),
+                          '<html><br />Hi orders!</html>')
+
+
+    def test_noTags(self):
+        """
+        A string with nothing that looks like a tag at all should just
+        be parsed as body text.
+        """
+        s = "Hi orders!"
+        d = microdom.parseString(s, beExtremelyLenient=True)
+        self.assertEqual(d.firstChild().toxml(),
+                          "<html>Hi orders!</html>")
+
+
+    def test_surroundingCrap(self):
+        """
+        If a document is surrounded by non-xml text, the text should
+        be remain in the XML.
+        """
+        s = "Hi<br> orders!"
+        d = microdom.parseString(s, beExtremelyLenient=True)
+        self.assertEqual(d.firstChild().toxml(),
+                          "<html>Hi<br /> orders!</html>")
+
+
+    def testCaseSensitiveSoonCloser(self):
+        s = """
+              <HTML><BODY>
+              <P ALIGN="CENTER">
+                <A HREF="http://www.apache.org/"><IMG SRC="/icons/apache_pb.gif"></A>
+              </P>
+
+              <P>
+                This is an insane set of text nodes that should NOT be gathered under
+                the A tag above.
+              </P>
+              </BODY></HTML>
+            """
+        d = microdom.parseString(s, beExtremelyLenient=1)
+        l = domhelpers.findNodesNamed(d.documentElement, 'a')
+        n = domhelpers.gatherTextNodes(l[0],1).replace(' ',' ')
+        self.assertEqual(n.find('insane'), -1)
+
+
+    def test_lenientParenting(self):
+        """
+        Test that C{parentNode} attributes are set to meaningful values when
+        we are parsing HTML that lacks a root node.
+        """
+        # Spare the rod, ruin the child.
+        s = "<br/><br/>"
+        d = microdom.parseString(s, beExtremelyLenient=1)
+        self.assertIdentical(d.documentElement,
+                             d.documentElement.firstChild().parentNode)
+
+
+    def test_lenientParentSingle(self):
+        """
+        Test that the C{parentNode} attribute is set to a meaningful value
+        when we parse an HTML document that has a non-Element root node.
+        """
+        s = "Hello"
+        d = microdom.parseString(s, beExtremelyLenient=1)
+        self.assertIdentical(d.documentElement,
+                             d.documentElement.firstChild().parentNode)
+
+
+    def testUnEntities(self):
+        s = """
+                <HTML>
+                    This HTML goes between Stupid <=CrAzY!=> Dumb.
+                </HTML>
+            """
+        d = microdom.parseString(s, beExtremelyLenient=1)
+        n = domhelpers.gatherTextNodes(d)
+        self.assertNotEquals(n.find('>'), -1)
+
+    def testEmptyError(self):
+        self.assertRaises(sux.ParseError, microdom.parseString, "")
+
+    def testTameDocument(self):
+        s = """
+        <test>
+         <it>
+          <is>
+           <a>
+            test
+           </a>
+          </is>
+         </it>
+        </test>
+        """
+        d = microdom.parseString(s)
+        self.assertEqual(
+            domhelpers.gatherTextNodes(d.documentElement).strip() ,'test')
+
+    def testAwfulTagSoup(self):
+        s = """
+        <html>
+        <head><title> I send you this message to have your advice!!!!</titl e
+        </headd>
+
+        <body bgcolor alink hlink vlink>
+
+        <h1><BLINK>SALE</blINK> TWENTY MILLION EMAILS & FUR COAT NOW
+        FREE WITH `ENLARGER'</h1>
+
+        YES THIS WONDERFUL AWFER IS NOW HERER!!!
+
+        <script LANGUAGE="javascript">
+function give_answers() {
+if (score < 70) {
+alert("I hate you");
+}}
+        </script><a href=/foo.com/lalal name=foo>lalal</a>
+        </body>
+        </HTML>
+        """
+        d = microdom.parseString(s, beExtremelyLenient=1)
+        l = domhelpers.findNodesNamed(d.documentElement, 'blink')
+        self.assertEqual(len(l), 1)
+
+    def testScriptLeniency(self):
+        s = """
+        <script>(foo < bar) and (bar > foo)</script>
+        <script language="javascript">foo </scrip bar </script>
+        <script src="foo">
+        <script src="foo">baz</script>
+        <script /><script></script>
+        """
+        d = microdom.parseString(s, beExtremelyLenient=1)
+        self.assertEqual(d.firstChild().firstChild().firstChild().data,
+                          "(foo < bar) and (bar > foo)")
+        self.assertEqual(
+            d.firstChild().getElementsByTagName("script")[1].firstChild().data,
+            "foo </scrip bar ")
+
+    def testScriptLeniencyIntelligence(self):
+        # if there is comment or CDATA in script, the autoquoting in bEL mode
+        # should not happen
+        s = """<script><!-- lalal --></script>"""
+        self.assertEqual(
+            microdom.parseString(s, beExtremelyLenient=1).firstChild().toxml(), s)
+        s = """<script><![CDATA[lalal]]></script>"""
+        self.assertEqual(
+            microdom.parseString(s, beExtremelyLenient=1).firstChild().toxml(), s)
+        s = """<script> // <![CDATA[
+        lalal
+        //]]></script>"""
+        self.assertEqual(
+            microdom.parseString(s, beExtremelyLenient=1).firstChild().toxml(), s)
+
+    def testPreserveCase(self):
+        s = '<eNcApSuLaTe><sUxor></sUxor><bOrk><w00T>TeXt</W00t></BoRk></EnCaPsUlAtE>'
+        s2 = s.lower().replace('text', 'TeXt')
+        # these are the only two option permutations that *can* parse the above
+        d = microdom.parseString(s, caseInsensitive=1, preserveCase=1)
+        d2 = microdom.parseString(s, caseInsensitive=1, preserveCase=0)
+        # caseInsensitive=0 preserveCase=0 is not valid, it's converted to
+        # caseInsensitive=0 preserveCase=1
+        d3 = microdom.parseString(s2, caseInsensitive=0, preserveCase=1)
+        d4 = microdom.parseString(s2, caseInsensitive=1, preserveCase=0)
+        d5 = microdom.parseString(s2, caseInsensitive=1, preserveCase=1)
+        # this is slightly contrived, toxml() doesn't need to be identical
+        # for the documents to be equivalent (i.e. <b></b> to <b/>),
+        # however this assertion tests preserving case for start and
+        # end tags while still matching stuff like <bOrk></BoRk>
+        self.assertEqual(d.documentElement.toxml(), s)
+        self.assert_(d.isEqualToDocument(d2), "%r != %r" % (d.toxml(), d2.toxml()))
+        self.assert_(d2.isEqualToDocument(d3), "%r != %r" % (d2.toxml(), d3.toxml()))
+        # caseInsensitive=0 on the left, NOT perserveCase=1 on the right
+        ## XXX THIS TEST IS TURNED OFF UNTIL SOMEONE WHO CARES ABOUT FIXING IT DOES
+        #self.failIf(d3.isEqualToDocument(d2), "%r == %r" % (d3.toxml(), d2.toxml()))
+        self.assert_(d3.isEqualToDocument(d4), "%r != %r" % (d3.toxml(), d4.toxml()))
+        self.assert_(d4.isEqualToDocument(d5), "%r != %r" % (d4.toxml(), d5.toxml()))
+
+    def testDifferentQuotes(self):
+        s = '<test a="a" b=\'b\' />'
+        d = microdom.parseString(s)
+        e = d.documentElement
+        self.assertEqual(e.getAttribute('a'), 'a')
+        self.assertEqual(e.getAttribute('b'), 'b')
+
+    def testLinebreaks(self):
+        s = '<test \na="a"\n\tb="#b" />'
+        d = microdom.parseString(s)
+        e = d.documentElement
+        self.assertEqual(e.getAttribute('a'), 'a')
+        self.assertEqual(e.getAttribute('b'), '#b')
+
+    def testMismatchedTags(self):
+        for s in '<test>', '<test> </tset>', '</test>':
+            self.assertRaises(microdom.MismatchedTags, microdom.parseString, s)
+
+    def testComment(self):
+        s = "<bar><!--<foo />--></bar>"
+        d = microdom.parseString(s)
+        e = d.documentElement
+        self.assertEqual(e.nodeName, "bar")
+        c = e.childNodes[0]
+        self.assert_(isinstance(c, microdom.Comment))
+        self.assertEqual(c.value, "<foo />")
+        c2 = c.cloneNode()
+        self.assert_(c is not c2)
+        self.assertEqual(c2.toxml(), "<!--<foo />-->")
+
+    def testText(self):
+        d = microdom.parseString("<bar>xxxx</bar>").documentElement
+        text = d.childNodes[0]
+        self.assert_(isinstance(text, microdom.Text))
+        self.assertEqual(text.value, "xxxx")
+        clone = text.cloneNode()
+        self.assert_(clone is not text)
+        self.assertEqual(clone.toxml(), "xxxx")
+
+    def testEntities(self):
+        nodes = microdom.parseString("<b>&&#12AB;</b>").documentElement.childNodes
+        self.assertEqual(len(nodes), 2)
+        self.assertEqual(nodes[0].data, "&")
+        self.assertEqual(nodes[1].data, "&#12AB;")
+        self.assertEqual(nodes[0].cloneNode().toxml(), "&")
+        for n in nodes:
+            self.assert_(isinstance(n, microdom.EntityReference))
+
+    def testCData(self):
+        s = '<x><![CDATA[</x>\r\n & foo]]></x>'
+        cdata = microdom.parseString(s).documentElement.childNodes[0]
+        self.assert_(isinstance(cdata, microdom.CDATASection))
+        self.assertEqual(cdata.data, "</x>\r\n & foo")
+        self.assertEqual(cdata.cloneNode().toxml(), "<![CDATA[</x>\r\n & foo]]>")
+
+    def testSingletons(self):
+        s = "<foo><b/><b /><b\n/></foo>"
+        s2 = "<foo><b/><b/><b/></foo>"
+        nodes = microdom.parseString(s).documentElement.childNodes
+        nodes2 = microdom.parseString(s2).documentElement.childNodes
+        self.assertEqual(len(nodes), 3)
+        for (n, n2) in zip(nodes, nodes2):
+            self.assert_(isinstance(n, microdom.Element))
+            self.assertEqual(n.nodeName, "b")
+            self.assert_(n.isEqualToNode(n2))
+
+    def testAttributes(self):
+        s = '<foo a="b" />'
+        node = microdom.parseString(s).documentElement
+
+        self.assertEqual(node.getAttribute("a"), "b")
+        self.assertEqual(node.getAttribute("c"), None)
+        self.assert_(node.hasAttribute("a"))
+        self.assert_(not node.hasAttribute("c"))
+        a = node.getAttributeNode("a")
+        self.assertEqual(a.value, "b")
+
+        node.setAttribute("foo", "bar")
+        self.assertEqual(node.getAttribute("foo"), "bar")
+
+    def testChildren(self):
+        s = "<foo><bar /><baz /><bax>foo</bax></foo>"
+        d = microdom.parseString(s).documentElement
+        self.assertEqual([n.nodeName for n in d.childNodes], ["bar", "baz", "bax"])
+        self.assertEqual(d.lastChild().nodeName, "bax")
+        self.assertEqual(d.firstChild().nodeName, "bar")
+        self.assert_(d.hasChildNodes())
+        self.assert_(not d.firstChild().hasChildNodes())
+
+    def testMutate(self):
+        s = "<foo />"
+        s1 = '<foo a="b"><bar/><foo/></foo>'
+        s2 = '<foo a="b">foo</foo>'
+        d = microdom.parseString(s).documentElement
+        d1 = microdom.parseString(s1).documentElement
+        d2 = microdom.parseString(s2).documentElement
+
+        d.appendChild(d.cloneNode())
+        d.setAttribute("a", "b")
+        child = d.childNodes[0]
+        self.assertEqual(child.getAttribute("a"), None)
+        self.assertEqual(child.nodeName, "foo")
+
+        d.insertBefore(microdom.Element("bar"), child)
+        self.assertEqual(d.childNodes[0].nodeName, "bar")
+        self.assertEqual(d.childNodes[1], child)
+        for n in d.childNodes:
+            self.assertEqual(n.parentNode, d)
+        self.assert_(d.isEqualToNode(d1))
+
+        d.removeChild(child)
+        self.assertEqual(len(d.childNodes), 1)
+        self.assertEqual(d.childNodes[0].nodeName, "bar")
+
+        t = microdom.Text("foo")
+        d.replaceChild(t, d.firstChild())
+        self.assertEqual(d.firstChild(), t)
+        self.assert_(d.isEqualToNode(d2))
+
+
+    def test_replaceNonChild(self):
+        """
+        L{Node.replaceChild} raises L{ValueError} if the node given to be
+        replaced is not a child of the node C{replaceChild} is called on.
+        """
+        parent = microdom.parseString('<foo />')
+        orphan = microdom.parseString('<bar />')
+        replacement = microdom.parseString('<baz />')
+
+        self.assertRaises(
+            ValueError, parent.replaceChild, replacement, orphan)
+
+
+    def testSearch(self):
+        s = "<foo><bar id='me' /><baz><foo /></baz></foo>"
+        s2 = "<fOo><bAr id='me' /><bAz><fOO /></bAz></fOo>"
+        d = microdom.parseString(s)
+        d2 = microdom.parseString(s2, caseInsensitive=0, preserveCase=1)
+        d3 = microdom.parseString(s2, caseInsensitive=1, preserveCase=1)
+
+        root = d.documentElement
+        self.assertEqual(root.firstChild(), d.getElementById('me'))
+        self.assertEqual(d.getElementsByTagName("foo"),
+                          [root, root.lastChild().firstChild()])
+
+        root = d2.documentElement
+        self.assertEqual(root.firstChild(), d2.getElementById('me'))
+        self.assertEqual(d2.getElementsByTagName('fOo'), [root])
+        self.assertEqual(d2.getElementsByTagName('fOO'),
+                          [root.lastChild().firstChild()])
+        self.assertEqual(d2.getElementsByTagName('foo'), [])
+
+        root = d3.documentElement
+        self.assertEqual(root.firstChild(), d3.getElementById('me'))
+        self.assertEqual(d3.getElementsByTagName('FOO'),
+                          [root, root.lastChild().firstChild()])
+        self.assertEqual(d3.getElementsByTagName('fOo'),
+                          [root, root.lastChild().firstChild()])
+
+    def testDoctype(self):
+        s = ('<?xml version="1.0"?>'
+        '<!DOCTYPE foo PUBLIC "baz" "http://www.example.com/example.dtd">'
+        '<foo></foo>')
+        s2 = '<foo/>'
+        d = microdom.parseString(s)
+        d2 = microdom.parseString(s2)
+        self.assertEqual(d.doctype,
+                          'foo PUBLIC "baz" "http://www.example.com/example.dtd"')
+        self.assertEqual(d.toxml(), s)
+        self.failIf(d.isEqualToDocument(d2))
+        self.failUnless(d.documentElement.isEqualToNode(d2.documentElement))
+
+    samples = [("<img/>", "<img />"),
+               ("<foo A='b'>x</foo>", '<foo A="b">x</foo>'),
+               ("<foo><BAR /></foo>", "<foo><BAR></BAR></foo>"),
+               ("<foo>hello there & yoyoy</foo>",
+                "<foo>hello there & yoyoy</foo>"),
+               ]
+
+    def testOutput(self):
+        for s, out in self.samples:
+            d = microdom.parseString(s, caseInsensitive=0)
+            d2 = microdom.parseString(out, caseInsensitive=0)
+            testOut = d.documentElement.toxml()
+            self.assertEqual(out, testOut)
+            self.assert_(d.isEqualToDocument(d2))
+
+    def testErrors(self):
+        for s in ["<foo>&am</foo>", "<foo", "<f>&</f>", "<() />"]:
+            self.assertRaises(Exception, microdom.parseString, s)
+
+    def testCaseInsensitive(self):
+        s  = "<foo a='b'><BAx>x</bax></FOO>"
+        s2 = '<foo a="b"><bax>x</bax></foo>'
+        s3 = "<FOO a='b'><BAx>x</BAx></FOO>"
+        s4 = "<foo A='b'>x</foo>"
+        d = microdom.parseString(s)
+        d2 = microdom.parseString(s2)
+        d3 = microdom.parseString(s3, caseInsensitive=1)
+        d4 = microdom.parseString(s4, caseInsensitive=1, preserveCase=1)
+        d5 = microdom.parseString(s4, caseInsensitive=1, preserveCase=0)
+        d6 = microdom.parseString(s4, caseInsensitive=0, preserveCase=0)
+        out = microdom.parseString(s).documentElement.toxml()
+        self.assertRaises(microdom.MismatchedTags, microdom.parseString,
+            s, caseInsensitive=0)
+        self.assertEqual(out, s2)
+        self.failUnless(d.isEqualToDocument(d2))
+        self.failUnless(d.isEqualToDocument(d3))
+        self.failUnless(d4.documentElement.hasAttribute('a'))
+        self.failIf(d6.documentElement.hasAttribute('a'))
+        self.assertEqual(d4.documentElement.toxml(), '<foo A="b">x</foo>')
+        self.assertEqual(d5.documentElement.toxml(), '<foo a="b">x</foo>')
+    def testEatingWhitespace(self):
+        s = """<hello>
+        </hello>"""
+        d = microdom.parseString(s)
+        self.failUnless(not d.documentElement.hasChildNodes(),
+                        d.documentElement.childNodes)
+        self.failUnless(d.isEqualToDocument(microdom.parseString('<hello></hello>')))
+
+    def testLenientAmpersand(self):
+        prefix = "<?xml version='1.0'?>"
+        # we use <pre> so space will be preserved
+        for i, o in [("&", "&"),
+                     ("& ", "& "),
+                     ("&", "&"),
+                     ("&hello monkey", "&hello monkey")]:
+            d = microdom.parseString("%s<pre>%s</pre>"
+                                     % (prefix, i), beExtremelyLenient=1)
+            self.assertEqual(d.documentElement.toxml(), "<pre>%s</pre>" % o)
+        # non-space preserving
+        d = microdom.parseString("<t>hello & there</t>", beExtremelyLenient=1)
+        self.assertEqual(d.documentElement.toxml(), "<t>hello & there</t>")
+
+    def testInsensitiveLenient(self):
+        # testing issue #537
+        d = microdom.parseString(
+            "<?xml version='1.0'?><bar><xA><y>c</Xa> <foo></bar>",
+            beExtremelyLenient=1)
+        self.assertEqual(d.documentElement.firstChild().toxml(), "<xa><y>c</y></xa>")
+
+    def testLaterCloserSimple(self):
+        s = "<ul><li>foo<li>bar<li>baz</ul>"
+        d = microdom.parseString(s, beExtremelyLenient=1)
+        expected = "<ul><li>foo</li><li>bar</li><li>baz</li></ul>"
+        actual = d.documentElement.toxml()
+        self.assertEqual(expected, actual)
+
+    def testLaterCloserCaseInsensitive(self):
+        s = "<DL><p><DT>foo<DD>bar</DL>"
+        d = microdom.parseString(s, beExtremelyLenient=1)
+        expected = "<dl><p></p><dt>foo</dt><dd>bar</dd></dl>"
+        actual = d.documentElement.toxml()
+        self.assertEqual(expected, actual)
+
+    def testLaterCloserTable(self):
+        s = ("<table>"
+             "<tr><th>name<th>value<th>comment"
+             "<tr><th>this<td>tag<td>soup"
+             "<tr><th>must<td>be<td>handled"
+             "</table>")
+        expected = ("<table>"
+                    "<tr><th>name</th><th>value</th><th>comment</th></tr>"
+                    "<tr><th>this</th><td>tag</td><td>soup</td></tr>"
+                    "<tr><th>must</th><td>be</td><td>handled</td></tr>"
+                    "</table>")
+        d = microdom.parseString(s, beExtremelyLenient=1)
+        actual = d.documentElement.toxml()
+        self.assertEqual(expected, actual)
+    testLaterCloserTable.todo = "Table parsing needs to be fixed."
+
+    def testLaterCloserDL(self):
+        s = ("<dl>"
+             "<dt>word<dd>definition"
+             "<dt>word<dt>word<dd>definition<dd>definition"
+             "</dl>")
+        expected = ("<dl>"
+                    "<dt>word</dt><dd>definition</dd>"
+                    "<dt>word</dt><dt>word</dt><dd>definition</dd><dd>definition</dd>"
+                    "</dl>")
+        d = microdom.parseString(s, beExtremelyLenient=1)
+        actual = d.documentElement.toxml()
+        self.assertEqual(expected, actual)
+
+    def testLaterCloserDL2(self):
+        s = ("<dl>"
+             "<dt>word<dd>definition<p>more definition"
+             "<dt>word"
+             "</dl>")
+        expected = ("<dl>"
+                    "<dt>word</dt><dd>definition<p>more definition</p></dd>"
+                    "<dt>word</dt>"
+                    "</dl>")
+        d = microdom.parseString(s, beExtremelyLenient=1)
+        actual = d.documentElement.toxml()
+        self.assertEqual(expected, actual)
+
+    testLaterCloserDL2.todo = "unclosed <p> messes it up."
+
+    def testUnicodeTolerance(self):
+        import struct
+        s = '<foo><bar><baz /></bar></foo>'
+        j =(u'<?xml version="1.0" encoding="UCS-2" ?>\r\n<JAPANESE>\r\n'
+            u'<TITLE>\u5c02\u9580\u5bb6\u30ea\u30b9\u30c8 </TITLE></JAPANESE>')
+        j2=('\xff\xfe<\x00?\x00x\x00m\x00l\x00 \x00v\x00e\x00r\x00s\x00i\x00o'
+            '\x00n\x00=\x00"\x001\x00.\x000\x00"\x00 \x00e\x00n\x00c\x00o\x00d'
+            '\x00i\x00n\x00g\x00=\x00"\x00U\x00C\x00S\x00-\x002\x00"\x00 \x00?'
+            '\x00>\x00\r\x00\n\x00<\x00J\x00A\x00P\x00A\x00N\x00E\x00S\x00E'
+            '\x00>\x00\r\x00\n\x00<\x00T\x00I\x00T\x00L\x00E\x00>\x00\x02\\'
+            '\x80\x95\xb6[\xea0\xb90\xc80 \x00<\x00/\x00T\x00I\x00T\x00L\x00E'
+            '\x00>\x00<\x00/\x00J\x00A\x00P\x00A\x00N\x00E\x00S\x00E\x00>\x00')
+        def reverseBytes(s):
+            fmt = str(len(s) // 2) + 'H'
+            return struct.pack('<' + fmt, *struct.unpack('>' + fmt, s))
+        urd = microdom.parseString(reverseBytes(s.encode('UTF-16')))
+        ud = microdom.parseString(s.encode('UTF-16'))
+        sd = microdom.parseString(s)
+        self.assert_(ud.isEqualToDocument(sd))
+        self.assert_(ud.isEqualToDocument(urd))
+        ud = microdom.parseString(j)
+        urd = microdom.parseString(reverseBytes(j2))
+        sd = microdom.parseString(j2)
+        self.assert_(ud.isEqualToDocument(sd))
+        self.assert_(ud.isEqualToDocument(urd))
+
+        # test that raw text still gets encoded
+        # test that comments get encoded
+        j3=microdom.parseString(u'<foo/>')
+        hdr='<?xml version="1.0"?>'
+        div=microdom.lmx().text(u'\u221a', raw=1).node
+        de=j3.documentElement
+        de.appendChild(div)
+        de.appendChild(j3.createComment(u'\u221a'))
+        self.assertEqual(j3.toxml(), hdr+
+                          u'<foo><div>\u221a</div><!--\u221a--></foo>'.encode('utf8'))
+
+    def testNamedChildren(self):
+        tests = {"<foo><bar /><bar unf='1' /><bar>asdfadsf</bar>"
+                         "<bam/></foo>" : 3,
+                 '<foo>asdf</foo>' : 0,
+                 '<foo><bar><bar></bar></bar></foo>' : 1,
+                 }
+        for t in tests.keys():
+            node = microdom.parseString(t).documentElement
+            result = domhelpers.namedChildren(node, 'bar')
+            self.assertEqual(len(result), tests[t])
+            if result:
+                self.assert_(hasattr(result[0], 'tagName'))
+
+    def testCloneNode(self):
+        s = '<foo a="b"><bax>x</bax></foo>'
+        node = microdom.parseString(s).documentElement
+        clone = node.cloneNode(deep=1)
+        self.failIfEquals(node, clone)
+        self.assertEqual(len(node.childNodes), len(clone.childNodes))
+        c1, c2 = node.firstChild(), clone.firstChild()
+        self.failIfEquals(c1, c2)
+        self.assertEqual(len(c1.childNodes), len(c2.childNodes))
+        self.failIfEquals(c1.firstChild(), c2.firstChild())
+        self.assertEqual(s, clone.toxml())
+        self.assertEqual(node.namespace, clone.namespace)
+
+    def testCloneDocument(self):
+        s = ('<?xml version="1.0"?>'
+             '<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"'
+             '"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd"><foo></foo>')
+
+        node = microdom.parseString(s)
+        clone = node.cloneNode(deep=1)
+        self.failIfEquals(node, clone)
+        self.assertEqual(len(node.childNodes), len(clone.childNodes))
+        self.assertEqual(s, clone.toxml())
+
+        self.failUnless(clone.isEqualToDocument(node))
+        self.failUnless(node.isEqualToDocument(clone))
+
+
+    def testLMX(self):
+        n = microdom.Element("p")
+        lmx = microdom.lmx(n)
+        lmx.text("foo")
+        b = lmx.b(a="c")
+        b.foo()["z"] = "foo"
+        b.foo()
+        b.add("bar", c="y")
+
+        s = '<p>foo<b a="c"><foo z="foo"></foo><foo></foo><bar c="y"></bar></b></p>'
+        self.assertEqual(s, n.toxml())
+
+    def testDict(self):
+        n = microdom.Element("p")
+        d = {n : 1} # will fail if Element is unhashable
+
+    def testEscaping(self):
+        # issue 590
+        raw = "&'some \"stuff\"', <what up?>"
+        cooked = "&'some "stuff"', <what up?>"
+        esc1 = microdom.escape(raw)
+        self.assertEqual(esc1, cooked)
+        self.assertEqual(microdom.unescape(esc1), raw)
+
+    def testNamespaces(self):
+        s = '''
+        <x xmlns="base">
+        <y />
+        <y q="1" x:q="2" y:q="3" />
+        <y:y xml:space="1">here is    some space </y:y>
+        <y:y />
+        <x:y />
+        </x>
+        '''
+        d = microdom.parseString(s)
+        # at least make sure it doesn't traceback
+        s2 = d.toprettyxml()
+        self.assertEqual(d.documentElement.namespace,
+                          "base")
+        self.assertEqual(d.documentElement.getElementsByTagName("y")[0].namespace,
+                          "base")
+        self.assertEqual(
+            d.documentElement.getElementsByTagName("y")[1].getAttributeNS('base','q'),
+            '1')
+
+        d2 = microdom.parseString(s2)
+        self.assertEqual(d2.documentElement.namespace,
+                          "base")
+        self.assertEqual(d2.documentElement.getElementsByTagName("y")[0].namespace,
+                          "base")
+        self.assertEqual(
+            d2.documentElement.getElementsByTagName("y")[1].getAttributeNS('base','q'),
+            '1')
+
+    def testNamespaceDelete(self):
+        """
+        Test that C{toxml} can support xml structures that remove namespaces.
+        """
+        s1 = ('<?xml version="1.0"?><html xmlns="http://www.w3.org/TR/REC-html40">'
+              '<body xmlns=""></body></html>')
+        s2 = microdom.parseString(s1).toxml()
+        self.assertEqual(s1, s2)
+
+    def testNamespaceInheritance(self):
+        """
+        Check that unspecified namespace is a thing separate from undefined
+        namespace. This test added after discovering some weirdness in Lore.
+        """
+        # will only work if childNodes is mutated. not sure why.
+        child = microdom.Element('ol')
+        parent = microdom.Element('div', namespace='http://www.w3.org/1999/xhtml')
+        parent.childNodes = [child]
+        self.assertEqual(parent.toxml(),
+                          '<div xmlns="http://www.w3.org/1999/xhtml"><ol></ol></div>')
+
+    def test_prefixedTags(self):
+        """
+        XML elements with a prefixed name as per upper level tag definition
+        have a start-tag of C{"<prefix:tag>"} and an end-tag of
+        C{"</prefix:tag>"}.
+
+        Refer to U{http://www.w3.org/TR/xml-names/#ns-using} for details.
+        """
+        outerNamespace = "http://example.com/outer"
+        innerNamespace = "http://example.com/inner"
+
+        document = microdom.Document()
+        # Create the root in one namespace.  Microdom will probably make this
+        # the default namespace.
+        root = document.createElement("root", namespace=outerNamespace)
+
+        # Give the root some prefixes to use.
+        root.addPrefixes({innerNamespace: "inner"})
+
+        # Append a child to the root from the namespace that prefix is bound
+        # to.
+        tag = document.createElement("tag", namespace=innerNamespace)
+
+        # Give that tag a child too.  This way we test rendering of tags with
+        # children and without children.
+        child = document.createElement("child", namespace=innerNamespace)
+
+        tag.appendChild(child)
+        root.appendChild(tag)
+        document.appendChild(root)
+
+        # ok, the xml should appear like this
+        xmlOk = (
+            '<?xml version="1.0"?>'
+            '<root xmlns="http://example.com/outer" '
+            'xmlns:inner="http://example.com/inner">'
+            '<inner:tag><inner:child></inner:child></inner:tag>'
+            '</root>')
+
+        xmlOut = document.toxml()
+        self.assertEqual(xmlOut, xmlOk)
+
+
+    def test_prefixPropagation(self):
+        """
+        Children of prefixed tags respect the default namespace at the point
+        where they are rendered.  Specifically, they are not influenced by the
+        prefix of their parent as that prefix has no bearing on them.
+
+        See U{http://www.w3.org/TR/xml-names/#scoping} for details.
+
+        To further clarify the matter, the following::
+
+            <root xmlns="http://example.com/ns/test">
+                <mytag xmlns="http://example.com/ns/mytags">
+                    <mysubtag xmlns="http://example.com/ns/mytags">
+                        <element xmlns="http://example.com/ns/test"></element>
+                    </mysubtag>
+                </mytag>
+            </root>
+
+        Should become this after all the namespace declarations have been
+        I{moved up}::
+
+            <root xmlns="http://example.com/ns/test"
+                  xmlns:mytags="http://example.com/ns/mytags">
+                <mytags:mytag>
+                    <mytags:mysubtag>
+                        <element></element>
+                    </mytags:mysubtag>
+                </mytags:mytag>
+            </root>
+        """
+        outerNamespace = "http://example.com/outer"
+        innerNamespace = "http://example.com/inner"
+
+        document = microdom.Document()
+        # creates a root element
+        root = document.createElement("root", namespace=outerNamespace)
+        document.appendChild(root)
+
+        # Create a child with a specific namespace with a prefix bound to it.
+        root.addPrefixes({innerNamespace: "inner"})
+        mytag = document.createElement("mytag",namespace=innerNamespace)
+        root.appendChild(mytag)
+
+        # Create a child of that which has the outer namespace.
+        mysubtag = document.createElement("mysubtag", namespace=outerNamespace)
+        mytag.appendChild(mysubtag)
+
+        xmlOk = (
+            '<?xml version="1.0"?>'
+            '<root xmlns="http://example.com/outer" '
+            'xmlns:inner="http://example.com/inner">'
+            '<inner:mytag>'
+            '<mysubtag></mysubtag>'
+            '</inner:mytag>'
+            '</root>'
+        )
+        xmlOut = document.toxml()
+        self.assertEqual(xmlOut, xmlOk)
+
+
+
+class TestBrokenHTML(TestCase):
+    """
+    Tests for when microdom encounters very bad HTML and C{beExtremelyLenient}
+    is enabled. These tests are inspired by some HTML generated in by a mailer,
+    which breaks up very long lines by splitting them with '!\n '. The expected
+    behaviour is loosely modelled on the way Firefox treats very bad HTML.
+    """
+
+    def checkParsed(self, input, expected, beExtremelyLenient=1):
+        """
+        Check that C{input}, when parsed, produces a DOM where the XML
+        of the document element is equal to C{expected}.
+        """
+        output = microdom.parseString(input,
+                                      beExtremelyLenient=beExtremelyLenient)
+        self.assertEqual(output.documentElement.toxml(), expected)
+
+
+    def test_brokenAttributeName(self):
+        """
+        Check that microdom does its best to handle broken attribute names.
+        The important thing is that it doesn't raise an exception.
+        """
+        input = '<body><h1><div al!\n ign="center">Foo</div></h1></body>'
+        expected = ('<body><h1><div ign="center" al="True">'
+                    'Foo</div></h1></body>')
+        self.checkParsed(input, expected)
+
+
+    def test_brokenAttributeValue(self):
+        """
+        Check that microdom encompasses broken attribute values.
+        """
+        input = '<body><h1><div align="cen!\n ter">Foo</div></h1></body>'
+        expected = '<body><h1><div align="cen!\n ter">Foo</div></h1></body>'
+        self.checkParsed(input, expected)
+
+
+    def test_brokenOpeningTag(self):
+        """
+        Check that microdom does its best to handle broken opening tags.
+        The important thing is that it doesn't raise an exception.
+        """
+        input = '<body><h1><sp!\n an>Hello World!</span></h1></body>'
+        expected = '<body><h1><sp an="True">Hello World!</sp></h1></body>'
+        self.checkParsed(input, expected)
+
+
+    def test_brokenSelfClosingTag(self):
+        """
+        Check that microdom does its best to handle broken self-closing tags
+        The important thing is that it doesn't raise an exception.
+        """
+        self.checkParsed('<body><span /!\n></body>',
+                         '<body><span></span></body>')
+        self.checkParsed('<span!\n />', '<span></span>')
+
+
+    def test_brokenClosingTag(self):
+        """
+        Check that microdom does its best to handle broken closing tags.
+        The important thing is that it doesn't raise an exception.
+        """
+        input = '<body><h1><span>Hello World!</sp!\nan></h1></body>'
+        expected = '<body><h1><span>Hello World!</span></h1></body>'
+        self.checkParsed(input, expected)
+        input = '<body><h1><span>Hello World!</!\nspan></h1></body>'
+        self.checkParsed(input, expected)
+        input = '<body><h1><span>Hello World!</span!\n></h1></body>'
+        self.checkParsed(input, expected)
+        input = '<body><h1><span>Hello World!<!\n/span></h1></body>'
+        expected = '<body><h1><span>Hello World!<!></!></span></h1></body>'
+        self.checkParsed(input, expected)
+
+
+
+
+class NodeTests(TestCase):
+    """
+    Tests for L{Node}.
+    """
+    def test_isNodeEqualTo(self):
+        """
+        L{Node.isEqualToNode} returns C{True} if and only if passed a L{Node}
+        with the same children.
+        """
+        # A node is equal to itself
+        node = microdom.Node(object())
+        self.assertTrue(node.isEqualToNode(node))
+        another = microdom.Node(object())
+        # Two nodes with no children are equal
+        self.assertTrue(node.isEqualToNode(another))
+        node.appendChild(microdom.Node(object()))
+        # A node with no children is not equal to a node with a child
+        self.assertFalse(node.isEqualToNode(another))
+        another.appendChild(microdom.Node(object()))
+        # A node with a child and no grandchildren is equal to another node
+        # with a child and no grandchildren.
+        self.assertTrue(node.isEqualToNode(another))
+        # A node with a child and a grandchild is not equal to another node
+        # with a child and no grandchildren.
+        node.firstChild().appendChild(microdom.Node(object()))
+        self.assertFalse(node.isEqualToNode(another))
+        # A node with a child and a grandchild is equal to another node with a
+        # child and a grandchild.
+        another.firstChild().appendChild(microdom.Node(object()))
+        self.assertTrue(node.isEqualToNode(another))
+
+    def test_validChildInstance(self):
+        """
+        Children of L{Node} instances must also be L{Node} instances.
+        """
+        node = microdom.Node()
+        child = microdom.Node()
+        # Node.appendChild() only accepts Node instances.
+        node.appendChild(child)
+        self.assertRaises(TypeError, node.appendChild, None)
+        # Node.insertBefore() only accepts Node instances.
+        self.assertRaises(TypeError, node.insertBefore, child, None)
+        self.assertRaises(TypeError, node.insertBefore, None, child)
+        self.assertRaises(TypeError, node.insertBefore, None, None)
+        # Node.removeChild() only accepts Node instances.
+        node.removeChild(child)
+        self.assertRaises(TypeError, node.removeChild, None)
+        # Node.replaceChild() only accepts Node instances.
+        self.assertRaises(TypeError, node.replaceChild, child, None)
+        self.assertRaises(TypeError, node.replaceChild, None, child)
+        self.assertRaises(TypeError, node.replaceChild, None, None)
+
+
+class DocumentTests(TestCase):
+    """
+    Tests for L{Document}.
+    """
+    doctype = 'foo PUBLIC "baz" "http://www.example.com/example.dtd"'
+
+    def test_isEqualToNode(self):
+        """
+        L{Document.isEqualToNode} returns C{True} if and only if passed a
+        L{Document} with the same C{doctype} and C{documentElement}.
+        """
+        # A document is equal to itself
+        document = microdom.Document()
+        self.assertTrue(document.isEqualToNode(document))
+        # A document without a doctype or documentElement is equal to another
+        # document without a doctype or documentElement.
+        another = microdom.Document()
+        self.assertTrue(document.isEqualToNode(another))
+        # A document with a doctype is not equal to a document without a
+        # doctype.
+        document.doctype = self.doctype
+        self.assertFalse(document.isEqualToNode(another))
+        # Two documents with the same doctype are equal
+        another.doctype = self.doctype
+        self.assertTrue(document.isEqualToNode(another))
+        # A document with a documentElement is not equal to a document without
+        # a documentElement
+        document.appendChild(microdom.Node(object()))
+        self.assertFalse(document.isEqualToNode(another))
+        # Two documents with equal documentElements are equal.
+        another.appendChild(microdom.Node(object()))
+        self.assertTrue(document.isEqualToNode(another))
+        # Two documents with documentElements which are not equal are not
+        # equal.
+        document.documentElement.appendChild(microdom.Node(object()))
+        self.assertFalse(document.isEqualToNode(another))
+
+
+    def test_childRestriction(self):
+        """
+        L{Document.appendChild} raises L{ValueError} if the document already
+        has a child.
+        """
+        document = microdom.Document()
+        child = microdom.Node()
+        another = microdom.Node()
+        document.appendChild(child)
+        self.assertRaises(ValueError, document.appendChild, another)
+
+
+
+class EntityReferenceTests(TestCase):
+    """
+    Tests for L{EntityReference}.
+    """
+    def test_isEqualToNode(self):
+        """
+        L{EntityReference.isEqualToNode} returns C{True} if and only if passed
+        a L{EntityReference} with the same C{eref}.
+        """
+        self.assertTrue(
+            microdom.EntityReference('quot').isEqualToNode(
+                microdom.EntityReference('quot')))
+        self.assertFalse(
+            microdom.EntityReference('quot').isEqualToNode(
+                microdom.EntityReference('apos')))
+
+
+
+class CharacterDataTests(TestCase):
+    """
+    Tests for L{CharacterData}.
+    """
+    def test_isEqualToNode(self):
+        """
+        L{CharacterData.isEqualToNode} returns C{True} if and only if passed a
+        L{CharacterData} with the same value.
+        """
+        self.assertTrue(
+            microdom.CharacterData('foo').isEqualToNode(
+                microdom.CharacterData('foo')))
+        self.assertFalse(
+            microdom.CharacterData('foo').isEqualToNode(
+                microdom.CharacterData('bar')))
+
+
+
+class CommentTests(TestCase):
+    """
+    Tests for L{Comment}.
+    """
+    def test_isEqualToNode(self):
+        """
+        L{Comment.isEqualToNode} returns C{True} if and only if passed a
+        L{Comment} with the same value.
+        """
+        self.assertTrue(
+            microdom.Comment('foo').isEqualToNode(
+                microdom.Comment('foo')))
+        self.assertFalse(
+            microdom.Comment('foo').isEqualToNode(
+                microdom.Comment('bar')))
+
+
+
+class TextTests(TestCase):
+    """
+    Tests for L{Text}.
+    """
+    def test_isEqualToNode(self):
+        """
+        L{Text.isEqualToNode} returns C{True} if and only if passed a L{Text}
+        which represents the same data.
+        """
+        self.assertTrue(
+            microdom.Text('foo', raw=True).isEqualToNode(
+                microdom.Text('foo', raw=True)))
+        self.assertFalse(
+            microdom.Text('foo', raw=True).isEqualToNode(
+                microdom.Text('foo', raw=False)))
+        self.assertFalse(
+            microdom.Text('foo', raw=True).isEqualToNode(
+                microdom.Text('bar', raw=True)))
+
+
+
+class CDATASectionTests(TestCase):
+    """
+    Tests for L{CDATASection}.
+    """
+    def test_isEqualToNode(self):
+        """
+        L{CDATASection.isEqualToNode} returns C{True} if and only if passed a
+        L{CDATASection} which represents the same data.
+        """
+        self.assertTrue(
+            microdom.CDATASection('foo').isEqualToNode(
+                microdom.CDATASection('foo')))
+        self.assertFalse(
+            microdom.CDATASection('foo').isEqualToNode(
+                microdom.CDATASection('bar')))
+
+
+
+class ElementTests(TestCase):
+    """
+    Tests for L{Element}.
+    """
+    def test_isEqualToNode(self):
+        """
+        L{Element.isEqualToNode} returns C{True} if and only if passed a
+        L{Element} with the same C{nodeName}, C{namespace}, C{childNodes}, and
+        C{attributes}.
+        """
+        self.assertTrue(
+            microdom.Element(
+                'foo', {'a': 'b'}, object(), namespace='bar').isEqualToNode(
+                microdom.Element(
+                    'foo', {'a': 'b'}, object(), namespace='bar')))
+
+        # Elements with different nodeName values do not compare equal.
+        self.assertFalse(
+            microdom.Element(
+                'foo', {'a': 'b'}, object(), namespace='bar').isEqualToNode(
+                microdom.Element(
+                    'bar', {'a': 'b'}, object(), namespace='bar')))
+
+        # Elements with different namespaces do not compare equal.
+        self.assertFalse(
+            microdom.Element(
+                'foo', {'a': 'b'}, object(), namespace='bar').isEqualToNode(
+                microdom.Element(
+                    'foo', {'a': 'b'}, object(), namespace='baz')))
+
+        # Elements with different childNodes do not compare equal.
+        one = microdom.Element('foo', {'a': 'b'}, object(), namespace='bar')
+        two = microdom.Element('foo', {'a': 'b'}, object(), namespace='bar')
+        two.appendChild(microdom.Node(object()))
+        self.assertFalse(one.isEqualToNode(two))
+
+        # Elements with different attributes do not compare equal.
+        self.assertFalse(
+            microdom.Element(
+                'foo', {'a': 'b'}, object(), namespace='bar').isEqualToNode(
+                microdom.Element(
+                    'foo', {'a': 'c'}, object(), namespace='bar')))
diff --git a/ThirdParty/Twisted/twisted/web/test/test_xmlrpc.py b/ThirdParty/Twisted/twisted/web/test/test_xmlrpc.py
new file mode 100644
index 0000000..f49ff02
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/test/test_xmlrpc.py
@@ -0,0 +1,849 @@
+# -*- test-case-name: twisted.web.test.test_xmlrpc -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for  XML-RPC support in L{twisted.web.xmlrpc}.
+"""
+
+import datetime
+import xmlrpclib
+from StringIO import StringIO
+
+from twisted.trial import unittest
+from twisted.web import xmlrpc
+from twisted.web.xmlrpc import (
+    XMLRPC, payloadTemplate, addIntrospection, _QueryFactory, Proxy,
+    withRequest)
+from twisted.web import server, static, client, error, http
+from twisted.internet import reactor, defer
+from twisted.internet.error import ConnectionDone
+from twisted.python import failure
+from twisted.test.proto_helpers import MemoryReactor
+from twisted.web.test.test_web import DummyRequest
+try:
+    import twisted.internet.ssl
+except ImportError:
+    sslSkip = "OpenSSL not present"
+else:
+    sslSkip = None
+
+
+class AsyncXMLRPCTests(unittest.TestCase):
+    """
+    Tests for L{XMLRPC}'s support of Deferreds.
+    """
+    def setUp(self):
+        self.request = DummyRequest([''])
+        self.request.method = 'POST'
+        self.request.content = StringIO(
+            payloadTemplate % ('async', xmlrpclib.dumps(())))
+
+        result = self.result = defer.Deferred()
+        class AsyncResource(XMLRPC):
+            def xmlrpc_async(self):
+                return result
+
+        self.resource = AsyncResource()
+
+
+    def test_deferredResponse(self):
+        """
+        If an L{XMLRPC} C{xmlrpc_*} method returns a L{defer.Deferred}, the
+        response to the request is the result of that L{defer.Deferred}.
+        """
+        self.resource.render(self.request)
+        self.assertEqual(self.request.written, [])
+
+        self.result.callback("result")
+
+        resp = xmlrpclib.loads("".join(self.request.written))
+        self.assertEqual(resp, (('result',), None))
+        self.assertEqual(self.request.finished, 1)
+
+
+    def test_interruptedDeferredResponse(self):
+        """
+        While waiting for the L{Deferred} returned by an L{XMLRPC} C{xmlrpc_*}
+        method to fire, the connection the request was issued over may close.
+        If this happens, neither C{write} nor C{finish} is called on the
+        request.
+        """
+        self.resource.render(self.request)
+        self.request.processingFailed(
+            failure.Failure(ConnectionDone("Simulated")))
+        self.result.callback("result")
+        self.assertEqual(self.request.written, [])
+        self.assertEqual(self.request.finished, 0)
+
+
+
+class TestRuntimeError(RuntimeError):
+    pass
+
+
+
+class TestValueError(ValueError):
+    pass
+
+
+
+class Test(XMLRPC):
+
+    # If you add xmlrpc_ methods to this class, go change test_listMethods
+    # below.
+
+    FAILURE = 666
+    NOT_FOUND = 23
+    SESSION_EXPIRED = 42
+
+    def xmlrpc_echo(self, arg):
+        return arg
+
+    # the doc string is part of the test
+    def xmlrpc_add(self, a, b):
+        """
+        This function add two numbers.
+        """
+        return a + b
+
+    xmlrpc_add.signature = [['int', 'int', 'int'],
+                            ['double', 'double', 'double']]
+
+    # the doc string is part of the test
+    def xmlrpc_pair(self, string, num):
+        """
+        This function puts the two arguments in an array.
+        """
+        return [string, num]
+
+    xmlrpc_pair.signature = [['array', 'string', 'int']]
+
+    # the doc string is part of the test
+    def xmlrpc_defer(self, x):
+        """Help for defer."""
+        return defer.succeed(x)
+
+    def xmlrpc_deferFail(self):
+        return defer.fail(TestValueError())
+
+    # don't add a doc string, it's part of the test
+    def xmlrpc_fail(self):
+        raise TestRuntimeError
+
+    def xmlrpc_fault(self):
+        return xmlrpc.Fault(12, "hello")
+
+    def xmlrpc_deferFault(self):
+        return defer.fail(xmlrpc.Fault(17, "hi"))
+
+    def xmlrpc_complex(self):
+        return {"a": ["b", "c", 12, []], "D": "foo"}
+
+    def xmlrpc_dict(self, map, key):
+        return map[key]
+    xmlrpc_dict.help = 'Help for dict.'
+
+    @withRequest
+    def xmlrpc_withRequest(self, request, other):
+        """
+        A method decorated with L{withRequest} which can be called by
+        a test to verify that the request object really is passed as
+        an argument.
+        """
+        return (
+            # as a proof that request is a request
+            request.method +
+            # plus proof other arguments are still passed along
+            ' ' + other)
+
+
+    def lookupProcedure(self, procedurePath):
+        try:
+            return XMLRPC.lookupProcedure(self, procedurePath)
+        except xmlrpc.NoSuchFunction:
+            if procedurePath.startswith("SESSION"):
+                raise xmlrpc.Fault(self.SESSION_EXPIRED,
+                                   "Session non-existant/expired.")
+            else:
+                raise
+
+
+
+class TestLookupProcedure(XMLRPC):
+    """
+    This is a resource which customizes procedure lookup to be used by the tests
+    of support for this customization.
+    """
+    def echo(self, x):
+        return x
+
+
+    def lookupProcedure(self, procedureName):
+        """
+        Lookup a procedure from a fixed set of choices, either I{echo} or
+        I{system.listeMethods}.
+        """
+        if procedureName == 'echo':
+            return self.echo
+        raise xmlrpc.NoSuchFunction(
+            self.NOT_FOUND, 'procedure %s not found' % (procedureName,))
+
+
+
+class TestListProcedures(XMLRPC):
+    """
+    This is a resource which customizes procedure enumeration to be used by the
+    tests of support for this customization.
+    """
+    def listProcedures(self):
+        """
+        Return a list of a single method this resource will claim to support.
+        """
+        return ['foo']
+
+
+
+class TestAuthHeader(Test):
+    """
+    This is used to get the header info so that we can test
+    authentication.
+    """
+    def __init__(self):
+        Test.__init__(self)
+        self.request = None
+
+    def render(self, request):
+        self.request = request
+        return Test.render(self, request)
+
+    def xmlrpc_authinfo(self):
+        return self.request.getUser(), self.request.getPassword()
+
+
+class TestQueryProtocol(xmlrpc.QueryProtocol):
+    """
+    QueryProtocol for tests that saves headers received inside the factory.
+    """
+
+    def connectionMade(self):
+        self.factory.transport = self.transport
+        xmlrpc.QueryProtocol.connectionMade(self)
+
+    def handleHeader(self, key, val):
+        self.factory.headers[key.lower()] = val
+
+
+class TestQueryFactory(xmlrpc._QueryFactory):
+    """
+    QueryFactory using L{TestQueryProtocol} for saving headers.
+    """
+    protocol = TestQueryProtocol
+
+    def __init__(self, *args, **kwargs):
+        self.headers = {}
+        xmlrpc._QueryFactory.__init__(self, *args, **kwargs)
+
+
+class TestQueryFactoryCancel(xmlrpc._QueryFactory):
+    """
+    QueryFactory that saves a reference to the
+    L{twisted.internet.interfaces.IConnector} to test connection lost.
+    """
+
+    def startedConnecting(self, connector):
+        self.connector = connector
+
+
+class XMLRPCTestCase(unittest.TestCase):
+
+    def setUp(self):
+        self.p = reactor.listenTCP(0, server.Site(Test()),
+                                   interface="127.0.0.1")
+        self.port = self.p.getHost().port
+        self.factories = []
+
+    def tearDown(self):
+        self.factories = []
+        return self.p.stopListening()
+
+    def queryFactory(self, *args, **kwargs):
+        """
+        Specific queryFactory for proxy that uses our custom
+        L{TestQueryFactory}, and save factories.
+        """
+        factory = TestQueryFactory(*args, **kwargs)
+        self.factories.append(factory)
+        return factory
+
+    def proxy(self, factory=None):
+        """
+        Return a new xmlrpc.Proxy for the test site created in
+        setUp(), using the given factory as the queryFactory, or
+        self.queryFactory if no factory is provided.
+        """
+        p = xmlrpc.Proxy("http://127.0.0.1:%d/" % self.port)
+        if factory is None:
+            p.queryFactory = self.queryFactory
+        else:
+            p.queryFactory = factory
+        return p
+
+    def test_results(self):
+        inputOutput = [
+            ("add", (2, 3), 5),
+            ("defer", ("a",), "a"),
+            ("dict", ({"a": 1}, "a"), 1),
+            ("pair", ("a", 1), ["a", 1]),
+            ("complex", (), {"a": ["b", "c", 12, []], "D": "foo"})]
+
+        dl = []
+        for meth, args, outp in inputOutput:
+            d = self.proxy().callRemote(meth, *args)
+            d.addCallback(self.assertEqual, outp)
+            dl.append(d)
+        return defer.DeferredList(dl, fireOnOneErrback=True)
+
+    def test_errors(self):
+        """
+        Verify that for each way a method exposed via XML-RPC can fail, the
+        correct 'Content-type' header is set in the response and that the
+        client-side Deferred is errbacked with an appropriate C{Fault}
+        instance.
+        """
+        dl = []
+        for code, methodName in [(666, "fail"), (666, "deferFail"),
+                                 (12, "fault"), (23, "noSuchMethod"),
+                                 (17, "deferFault"), (42, "SESSION_TEST")]:
+            d = self.proxy().callRemote(methodName)
+            d = self.assertFailure(d, xmlrpc.Fault)
+            d.addCallback(lambda exc, code=code:
+                self.assertEqual(exc.faultCode, code))
+            dl.append(d)
+        d = defer.DeferredList(dl, fireOnOneErrback=True)
+        def cb(ign):
+            for factory in self.factories:
+                self.assertEqual(factory.headers['content-type'],
+                                  'text/xml')
+            self.flushLoggedErrors(TestRuntimeError, TestValueError)
+        d.addCallback(cb)
+        return d
+
+
+    def test_cancel(self):
+        """
+        A deferred from the Proxy can be cancelled, disconnecting
+        the L{twisted.internet.interfaces.IConnector}.
+        """
+        def factory(*args, **kw):
+            factory.f = TestQueryFactoryCancel(*args, **kw)
+            return factory.f
+        d = self.proxy(factory).callRemote('add', 2, 3)
+        self.assertNotEquals(factory.f.connector.state, "disconnected")
+        d.cancel()
+        self.assertEqual(factory.f.connector.state, "disconnected")
+        d = self.assertFailure(d, defer.CancelledError)
+        return d
+
+
+    def test_errorGet(self):
+        """
+        A classic GET on the xml server should return a NOT_ALLOWED.
+        """
+        d = client.getPage("http://127.0.0.1:%d/" % (self.port,))
+        d = self.assertFailure(d, error.Error)
+        d.addCallback(
+            lambda exc: self.assertEqual(int(exc.args[0]), http.NOT_ALLOWED))
+        return d
+
+    def test_errorXMLContent(self):
+        """
+        Test that an invalid XML input returns an L{xmlrpc.Fault}.
+        """
+        d = client.getPage("http://127.0.0.1:%d/" % (self.port,),
+                           method="POST", postdata="foo")
+        def cb(result):
+            self.assertRaises(xmlrpc.Fault, xmlrpclib.loads, result)
+        d.addCallback(cb)
+        return d
+
+
+    def test_datetimeRoundtrip(self):
+        """
+        If an L{xmlrpclib.DateTime} is passed as an argument to an XML-RPC
+        call and then returned by the server unmodified, the result should
+        be equal to the original object.
+        """
+        when = xmlrpclib.DateTime()
+        d = self.proxy().callRemote("echo", when)
+        d.addCallback(self.assertEqual, when)
+        return d
+
+
+    def test_doubleEncodingError(self):
+        """
+        If it is not possible to encode a response to the request (for example,
+        because L{xmlrpclib.dumps} raises an exception when encoding a
+        L{Fault}) the exception which prevents the response from being
+        generated is logged and the request object is finished anyway.
+        """
+        d = self.proxy().callRemote("echo", "")
+
+        # *Now* break xmlrpclib.dumps.  Hopefully the client already used it.
+        def fakeDumps(*args, **kwargs):
+            raise RuntimeError("Cannot encode anything at all!")
+        self.patch(xmlrpclib, 'dumps', fakeDumps)
+
+        # It doesn't matter how it fails, so long as it does.  Also, it happens
+        # to fail with an implementation detail exception right now, not
+        # something suitable as part of a public interface.
+        d = self.assertFailure(d, Exception)
+
+        def cbFailed(ignored):
+            # The fakeDumps exception should have been logged.
+            self.assertEqual(len(self.flushLoggedErrors(RuntimeError)), 1)
+        d.addCallback(cbFailed)
+        return d
+
+
+    def test_closeConnectionAfterRequest(self):
+        """
+        The connection to the web server is closed when the request is done.
+        """
+        d = self.proxy().callRemote('echo', '')
+        def responseDone(ignored):
+            [factory] = self.factories
+            self.assertFalse(factory.transport.connected)
+            self.assertTrue(factory.transport.disconnected)
+        return d.addCallback(responseDone)
+
+
+    def test_tcpTimeout(self):
+        """
+        For I{HTTP} URIs, L{xmlrpc.Proxy.callRemote} passes the value it
+        received for the C{connectTimeout} parameter as the C{timeout} argument
+        to the underlying connectTCP call.
+        """
+        reactor = MemoryReactor()
+        proxy = xmlrpc.Proxy("http://127.0.0.1:69", connectTimeout=2.0,
+                             reactor=reactor)
+        proxy.callRemote("someMethod")
+        self.assertEqual(reactor.tcpClients[0][3], 2.0)
+
+
+    def test_sslTimeout(self):
+        """
+        For I{HTTPS} URIs, L{xmlrpc.Proxy.callRemote} passes the value it
+        received for the C{connectTimeout} parameter as the C{timeout} argument
+        to the underlying connectSSL call.
+        """
+        reactor = MemoryReactor()
+        proxy = xmlrpc.Proxy("https://127.0.0.1:69", connectTimeout=3.0,
+                             reactor=reactor)
+        proxy.callRemote("someMethod")
+        self.assertEqual(reactor.sslClients[0][4], 3.0)
+    test_sslTimeout.skip = sslSkip
+
+
+
+class XMLRPCTestCase2(XMLRPCTestCase):
+    """
+    Test with proxy that doesn't add a slash.
+    """
+
+    def proxy(self, factory=None):
+        p = xmlrpc.Proxy("http://127.0.0.1:%d" % self.port)
+        if factory is None:
+            p.queryFactory = self.queryFactory
+        else:
+            p.queryFactory = factory
+        return p
+
+
+
+class XMLRPCTestPublicLookupProcedure(unittest.TestCase):
+    """
+    Tests for L{XMLRPC}'s support of subclasses which override
+    C{lookupProcedure} and C{listProcedures}.
+    """
+
+    def createServer(self, resource):
+        self.p = reactor.listenTCP(
+            0, server.Site(resource), interface="127.0.0.1")
+        self.addCleanup(self.p.stopListening)
+        self.port = self.p.getHost().port
+        self.proxy = xmlrpc.Proxy('http://127.0.0.1:%d' % self.port)
+
+
+    def test_lookupProcedure(self):
+        """
+        A subclass of L{XMLRPC} can override C{lookupProcedure} to find
+        procedures that are not defined using a C{xmlrpc_}-prefixed method name.
+        """
+        self.createServer(TestLookupProcedure())
+        what = "hello"
+        d = self.proxy.callRemote("echo", what)
+        d.addCallback(self.assertEqual, what)
+        return d
+
+
+    def test_errors(self):
+        """
+        A subclass of L{XMLRPC} can override C{lookupProcedure} to raise
+        L{NoSuchFunction} to indicate that a requested method is not available
+        to be called, signalling a fault to the XML-RPC client.
+        """
+        self.createServer(TestLookupProcedure())
+        d = self.proxy.callRemote("xxxx", "hello")
+        d = self.assertFailure(d, xmlrpc.Fault)
+        return d
+
+
+    def test_listMethods(self):
+        """
+        A subclass of L{XMLRPC} can override C{listProcedures} to define
+        Overriding listProcedures should prevent introspection from being
+        broken.
+        """
+        resource = TestListProcedures()
+        addIntrospection(resource)
+        self.createServer(resource)
+        d = self.proxy.callRemote("system.listMethods")
+        def listed(procedures):
+            # The list will also include other introspection procedures added by
+            # addIntrospection.  We just want to see "foo" from our customized
+            # listProcedures.
+            self.assertIn('foo', procedures)
+        d.addCallback(listed)
+        return d
+
+
+
+class SerializationConfigMixin:
+    """
+    Mixin which defines a couple tests which should pass when a particular flag
+    is passed to L{XMLRPC}.
+
+    These are not meant to be exhaustive serialization tests, since L{xmlrpclib}
+    does all of the actual serialization work.  They are just meant to exercise
+    a few codepaths to make sure we are calling into xmlrpclib correctly.
+
+    @ivar flagName: A C{str} giving the name of the flag which must be passed to
+        L{XMLRPC} to allow the tests to pass.  Subclasses should set this.
+
+    @ivar value: A value which the specified flag will allow the serialization
+        of.  Subclasses should set this.
+    """
+    def setUp(self):
+        """
+        Create a new XML-RPC server with C{allowNone} set to C{True}.
+        """
+        kwargs = {self.flagName: True}
+        self.p = reactor.listenTCP(
+            0, server.Site(Test(**kwargs)), interface="127.0.0.1")
+        self.addCleanup(self.p.stopListening)
+        self.port = self.p.getHost().port
+        self.proxy = xmlrpc.Proxy(
+            "http://127.0.0.1:%d/" % (self.port,), **kwargs)
+
+
+    def test_roundtripValue(self):
+        """
+        C{self.value} can be round-tripped over an XMLRPC method call/response.
+        """
+        d = self.proxy.callRemote('defer', self.value)
+        d.addCallback(self.assertEqual, self.value)
+        return d
+
+
+    def test_roundtripNestedValue(self):
+        """
+        A C{dict} which contains C{self.value} can be round-tripped over an
+        XMLRPC method call/response.
+        """
+        d = self.proxy.callRemote('defer', {'a': self.value})
+        d.addCallback(self.assertEqual, {'a': self.value})
+        return d
+
+
+
+class XMLRPCAllowNoneTestCase(SerializationConfigMixin, unittest.TestCase):
+    """
+    Tests for passing C{None} when the C{allowNone} flag is set.
+    """
+    flagName = "allowNone"
+    value = None
+
+
+try:
+    xmlrpclib.loads(xmlrpclib.dumps(({}, {})), use_datetime=True)
+except TypeError:
+    _datetimeSupported = False
+else:
+    _datetimeSupported = True
+
+
+
+class XMLRPCUseDateTimeTestCase(SerializationConfigMixin, unittest.TestCase):
+    """
+    Tests for passing a C{datetime.datetime} instance when the C{useDateTime}
+    flag is set.
+    """
+    flagName = "useDateTime"
+    value = datetime.datetime(2000, 12, 28, 3, 45, 59)
+
+    if not _datetimeSupported:
+        skip = (
+            "Available version of xmlrpclib does not support datetime "
+            "objects.")
+
+
+
+class XMLRPCDisableUseDateTimeTestCase(unittest.TestCase):
+    """
+    Tests for the C{useDateTime} flag on Python 2.4.
+    """
+    if _datetimeSupported:
+        skip = (
+            "Available version of xmlrpclib supports datetime objects.")
+
+    def test_cannotInitializeWithDateTime(self):
+        """
+        L{XMLRPC} raises L{RuntimeError} if passed C{True} for C{useDateTime}.
+        """
+        self.assertRaises(RuntimeError, XMLRPC, useDateTime=True)
+        self.assertRaises(
+            RuntimeError, Proxy, "http://localhost/", useDateTime=True)
+
+
+    def test_cannotSetDateTime(self):
+        """
+        Setting L{XMLRPC.useDateTime} to C{True} after initialization raises
+        L{RuntimeError}.
+        """
+        xmlrpc = XMLRPC(useDateTime=False)
+        self.assertRaises(RuntimeError, setattr, xmlrpc, "useDateTime", True)
+        proxy = Proxy("http://localhost/", useDateTime=False)
+        self.assertRaises(RuntimeError, setattr, proxy, "useDateTime", True)
+
+
+
+class XMLRPCTestAuthenticated(XMLRPCTestCase):
+    """
+    Test with authenticated proxy. We run this with the same inout/ouput as
+    above.
+    """
+    user = "username"
+    password = "asecret"
+
+    def setUp(self):
+        self.p = reactor.listenTCP(0, server.Site(TestAuthHeader()),
+                                   interface="127.0.0.1")
+        self.port = self.p.getHost().port
+        self.factories = []
+
+
+    def test_authInfoInURL(self):
+        p = xmlrpc.Proxy("http://%s:%s@127.0.0.1:%d/" % (
+            self.user, self.password, self.port))
+        d = p.callRemote("authinfo")
+        d.addCallback(self.assertEqual, [self.user, self.password])
+        return d
+
+
+    def test_explicitAuthInfo(self):
+        p = xmlrpc.Proxy("http://127.0.0.1:%d/" % (
+            self.port,), self.user, self.password)
+        d = p.callRemote("authinfo")
+        d.addCallback(self.assertEqual, [self.user, self.password])
+        return d
+
+
+    def test_explicitAuthInfoOverride(self):
+        p = xmlrpc.Proxy("http://wrong:info@127.0.0.1:%d/" % (
+                self.port,), self.user, self.password)
+        d = p.callRemote("authinfo")
+        d.addCallback(self.assertEqual, [self.user, self.password])
+        return d
+
+
+class XMLRPCTestIntrospection(XMLRPCTestCase):
+
+    def setUp(self):
+        xmlrpc = Test()
+        addIntrospection(xmlrpc)
+        self.p = reactor.listenTCP(0, server.Site(xmlrpc),interface="127.0.0.1")
+        self.port = self.p.getHost().port
+        self.factories = []
+
+    def test_listMethods(self):
+
+        def cbMethods(meths):
+            meths.sort()
+            self.assertEqual(
+                meths,
+                ['add', 'complex', 'defer', 'deferFail',
+                 'deferFault', 'dict', 'echo', 'fail', 'fault',
+                 'pair', 'system.listMethods',
+                 'system.methodHelp',
+                 'system.methodSignature', 'withRequest'])
+
+        d = self.proxy().callRemote("system.listMethods")
+        d.addCallback(cbMethods)
+        return d
+
+    def test_methodHelp(self):
+        inputOutputs = [
+            ("defer", "Help for defer."),
+            ("fail", ""),
+            ("dict", "Help for dict.")]
+
+        dl = []
+        for meth, expected in inputOutputs:
+            d = self.proxy().callRemote("system.methodHelp", meth)
+            d.addCallback(self.assertEqual, expected)
+            dl.append(d)
+        return defer.DeferredList(dl, fireOnOneErrback=True)
+
+    def test_methodSignature(self):
+        inputOutputs = [
+            ("defer", ""),
+            ("add", [['int', 'int', 'int'],
+                     ['double', 'double', 'double']]),
+            ("pair", [['array', 'string', 'int']])]
+
+        dl = []
+        for meth, expected in inputOutputs:
+            d = self.proxy().callRemote("system.methodSignature", meth)
+            d.addCallback(self.assertEqual, expected)
+            dl.append(d)
+        return defer.DeferredList(dl, fireOnOneErrback=True)
+
+
+class XMLRPCClientErrorHandling(unittest.TestCase):
+    """
+    Test error handling on the xmlrpc client.
+    """
+    def setUp(self):
+        self.resource = static.Data(
+            "This text is not a valid XML-RPC response.",
+            "text/plain")
+        self.resource.isLeaf = True
+        self.port = reactor.listenTCP(0, server.Site(self.resource),
+                                                     interface='127.0.0.1')
+
+    def tearDown(self):
+        return self.port.stopListening()
+
+    def test_erroneousResponse(self):
+        """
+        Test that calling the xmlrpc client on a static http server raises
+        an exception.
+        """
+        proxy = xmlrpc.Proxy("http://127.0.0.1:%d/" %
+                             (self.port.getHost().port,))
+        return self.assertFailure(proxy.callRemote("someMethod"), Exception)
+
+
+
+class TestQueryFactoryParseResponse(unittest.TestCase):
+    """
+    Test the behaviour of L{_QueryFactory.parseResponse}.
+    """
+
+    def setUp(self):
+        # The _QueryFactory that we are testing. We don't care about any
+        # of the constructor parameters.
+        self.queryFactory = _QueryFactory(
+            path=None, host=None, method='POST', user=None, password=None,
+            allowNone=False, args=())
+        # An XML-RPC response that will parse without raising an error.
+        self.goodContents = xmlrpclib.dumps(('',))
+        # An 'XML-RPC response' that will raise a parsing error.
+        self.badContents = 'invalid xml'
+        # A dummy 'reason' to pass to clientConnectionLost. We don't care
+        # what it is.
+        self.reason = failure.Failure(ConnectionDone())
+
+
+    def test_parseResponseCallbackSafety(self):
+        """
+        We can safely call L{_QueryFactory.clientConnectionLost} as a callback
+        of L{_QueryFactory.parseResponse}.
+        """
+        d = self.queryFactory.deferred
+        # The failure mode is that this callback raises an AlreadyCalled
+        # error. We have to add it now so that it gets called synchronously
+        # and triggers the race condition.
+        d.addCallback(self.queryFactory.clientConnectionLost, self.reason)
+        self.queryFactory.parseResponse(self.goodContents)
+        return d
+
+
+    def test_parseResponseErrbackSafety(self):
+        """
+        We can safely call L{_QueryFactory.clientConnectionLost} as an errback
+        of L{_QueryFactory.parseResponse}.
+        """
+        d = self.queryFactory.deferred
+        # The failure mode is that this callback raises an AlreadyCalled
+        # error. We have to add it now so that it gets called synchronously
+        # and triggers the race condition.
+        d.addErrback(self.queryFactory.clientConnectionLost, self.reason)
+        self.queryFactory.parseResponse(self.badContents)
+        return d
+
+
+    def test_badStatusErrbackSafety(self):
+        """
+        We can safely call L{_QueryFactory.clientConnectionLost} as an errback
+        of L{_QueryFactory.badStatus}.
+        """
+        d = self.queryFactory.deferred
+        # The failure mode is that this callback raises an AlreadyCalled
+        # error. We have to add it now so that it gets called synchronously
+        # and triggers the race condition.
+        d.addErrback(self.queryFactory.clientConnectionLost, self.reason)
+        self.queryFactory.badStatus('status', 'message')
+        return d
+
+    def test_parseResponseWithoutData(self):
+        """
+        Some server can send a response without any data:
+        L{_QueryFactory.parseResponse} should catch the error and call the
+        result errback.
+        """
+        content = """
+<methodResponse>
+ <params>
+  <param>
+  </param>
+ </params>
+</methodResponse>"""
+        d = self.queryFactory.deferred
+        self.queryFactory.parseResponse(content)
+        return self.assertFailure(d, IndexError)
+
+
+
+class XMLRPCTestWithRequest(unittest.TestCase):
+
+    def setUp(self):
+        self.resource = Test()
+
+
+    def test_withRequest(self):
+        """
+        When an XML-RPC method is called and the implementation is
+        decorated with L{withRequest}, the request object is passed as
+        the first argument.
+        """
+        request = DummyRequest('/RPC2')
+        request.method = "POST"
+        request.content = StringIO(xmlrpclib.dumps(("foo",), 'withRequest'))
+        def valid(n, request):
+            data = xmlrpclib.loads(request.written[0])
+            self.assertEqual(data, (('POST foo',), None))
+        d = request.notifyFinish().addCallback(valid, request)
+        self.resource.render_POST(request)
+        return d
diff --git a/ThirdParty/Twisted/twisted/web/topfiles/NEWS b/ThirdParty/Twisted/twisted/web/topfiles/NEWS
new file mode 100644
index 0000000..16751e8
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/topfiles/NEWS
@@ -0,0 +1,602 @@
+Ticket numbers in this file can be looked up by visiting
+http://twistedmatrix.com/trac/ticket/<number>
+
+Twisted Web 12.3.0 (2012-12-20)
+===============================
+
+Features
+--------
+ - twisted.web.server.Site now supports an encoders argument to encode
+   request content, twisted.web.server.GzipEncoderFactory being the
+   first one provided. (#104)
+
+Bugfixes
+--------
+ - twisted.web.http.HTTPChannel.headerReceived now catches the error
+   if the Content-Length header is not an integer and return a 400 Bad
+   Request response. (#6029)
+ - twisted.web.http.HTTPChannel now drops the connection and issues a
+   400 error upon receipt of a chunk-encoding encoded request with a
+   bad chunk-length field. (#6030)
+
+Improved Documentation
+----------------------
+ - twisted.web.iweb.IRequest now documents its `content` attribute and
+   a new "web in 60 seconds" howto demonstrates its use. (#6181)
+
+Other
+-----
+ - #5882, #5883, #5887, #5920, #6031, #6077, #6078, #6079, #6080,
+   #6110, #6113, #6196, #6205
+
+
+Twisted Web 12.2.0 (2012-08-26)
+===============================
+
+Deprecations and Removals
+-------------------------
+ - twisted.web.static.FileTransfer, deprecated since 9.0, is removed
+   now. Use a subclass of StaticProducer instead. (#5651)
+ - ErrorPage, NoResource and ForbiddenResource in twisted.web.error
+   were deprecated since 9.0 and are removed now. (#5659)
+ - twisted.web.google, deprecated since Twisted 11.1, is removed now.
+   (#5768)
+
+Other
+-----
+ - #5665
+
+
+Twisted Web 12.1.0 (2012-06-02)
+===============================
+
+Features
+--------
+ - twisted.web.client.Agent and ProxyAgent now support persistent
+   connections. (#3420)
+ - Added twisted.web.template.renderElement, a function which renders
+   an Element to a response. (#5395)
+ - twisted.web.client.HTTPConnectionPool now ensures that failed
+   queries on persistent connections are retried, when possible.
+   (#5479)
+ - twisted.web.template.XMLFile now supports FilePath objects. (#5509)
+ - twisted.web.template.renderElement takes a doctype keyword
+   argument, which will be written as the first line of the response,
+   defaulting to the HTML5 doctype. (#5560)
+
+Bugfixes
+--------
+ - twisted.web.util.formatFailure now quotes all data in its output to
+   avoid it being mistakenly interpreted as markup. (#4896)
+ - twisted.web.distrib now lets distributed servers set the response
+   message. (#5525)
+
+Deprecations and Removals
+-------------------------
+ - PHP3Script and PHPScript were removed from twisted.web.twcgi,
+   deprecated since 10.1. Use twcgi.FilteredScript instead. (#5456)
+ - twisted.web.template.XMLFile's support for file objects and
+   filenames is now deprecated.  Use the new support for FilePath
+   objects. (#5509)
+ - twisted.web.server.date_time_string and
+   twisted.web.server.string_date_time are now deprecated in favor of
+   twisted.web.http.datetimeToString and twisted.web.
+   http.stringToDatetime (#5535)
+
+Other
+-----
+ - #4966, #5460, #5490, #5591, #5602, #5609, #5612
+
+
+Twisted Web 12.0.0 (2012-02-10)
+===============================
+
+Features
+--------
+ - twisted.web.util.redirectTo now raises TypeError if the URL passed
+   to it is a unicode string instead of a byte string. (#5236)
+ - The new class twisted.web.template.CharRef provides support for
+   inserting numeric character references in output generated by
+   twisted.web.template. (#5408)
+
+Improved Documentation
+----------------------
+ - The Twisted Web howto now has a section on proxies and reverse
+   proxies. (#399)
+ - The web client howto now covers ContentDecoderAgent and links to an
+   example of its use. (#5415)
+
+Other
+-----
+ - #5404, #5438
+
+
+Twisted Web 11.1.0 (2011-11-15)
+===============================
+
+Features
+--------
+ - twisted.web.client.ProxyAgent is a new HTTP/1.1 web client which
+   adds proxy support. (#1774)
+ - twisted.web.client.Agent now takes optional connectTimeout and
+   bindAddress arguments which are forwarded to the subsequent
+   connectTCP/connectSSL call. (#3450)
+ - The new class twisted.web.client.FileBodyProducer makes it easy to
+   upload data in HTTP requests made using the Agent client APIs.
+   (#4017)
+ - twisted.web.xmlrpc.XMLRPC now allows its lookupProcedure method to
+   be overridden to change how XML-RPC procedures are dispatched.
+   (#4836)
+ - A new HTTP cookie-aware Twisted Web Agent wrapper is included in
+   twisted.web.client.CookieAgent (#4922)
+ - New class twisted.web.template.TagLoader provides an
+   ITemplateLoader implementation which loads already-created
+   twisted.web.iweb.IRenderable providers. (#5040)
+ - The new class twisted.web.client.RedirectAgent adds redirect
+   support to the HTTP 1.1 client stack. (#5157)
+ - twisted.web.template now supports HTML tags from the HTML5
+   standard, including <canvas> and <video>. (#5306)
+
+Bugfixes
+--------
+ - twisted.web.client.getPage and .downloadPage now only fire their
+   result Deferred after the underlying connection they use has been
+   closed. (#3796)
+ - twisted.web.server now omits the default Content-Type header from
+   NOT MODIFIED responses. (#4156)
+ - twisted.web.server now responds correctly to 'Expect: 100-continue'
+   headers, although this is not yet usefully exposed to user code.
+   (#4673)
+ - twisted.web.client.Agent no longer raises an exception if a server
+   responds and closes the connection before the request has been
+   fully transmitted. (#5013)
+ - twisted.web.http_headers.Headers now correctly capitalizes the
+   header names Content-MD5, DNT, ETag, P3P, TE, and X-XSS-Protection.
+   (#5054)
+ - twisted.web.template now escapes more inputs to comments which
+   require escaping in the output. (#5275)
+
+Improved Documentation
+----------------------
+ - The twisted.web.template howto now documents the common idiom of
+   yielding tag clones from a renderer. (#5286)
+ - CookieAgent is now documented in the twisted.web.client how-to.
+   (#5110)
+
+Deprecations and Removals
+-------------------------
+ - twisted.web.google is now deprecated. (#5209)
+
+Other
+-----
+ - #4951, #5057, #5175, #5288, #5316
+
+
+Twisted Web 11.0.0 (2011-04-01)
+===============================
+
+Features
+--------
+ - twisted.web._newclient.HTTPParser (and therefore Agent) now handles
+   HTTP headers delimited by bare LF newlines. (#3833)
+ - twisted.web.client.downloadPage now accepts the `afterFoundGet`
+   parameter, with the same meaning as the `getPage` parameter of the
+   same name. (#4364)
+ - twisted.web.xmlrpc.Proxy constructor now takes additional 'timeout'
+   and 'reactor' arguments. The 'timeout' argument defaults to 30
+   seconds. (#4741)
+ - Twisted Web now has a templating system, twisted.web.template,
+   which is a direct, simplified derivative of Divmod Nevow. (#4939)
+
+Bugfixes
+--------
+ - HTTPPageGetter now adds the port to the host header if it is not
+   the default for that scheme. (#3857)
+ - twisted.web.http.Request.write now raises an exception if it is
+   called after response generation has already finished. (#4317)
+ - twisted.web.client.HTTPPageGetter and twisted.web.client.getPage
+   now no longer make two requests when using afterFoundGet. (#4760)
+ - twisted.web.twcgi no longer adds an extra "content-type" header to
+   CGI responses. (#4786)
+ - twisted.web will now properly specify an encoding (UTF-8) on error,
+   redirect, and directory listing pages, so that IE7 and previous
+   will not improperly guess the 'utf7' encoding in these cases.
+   Please note that Twisted still sets a *default* content-type of
+   'text/html', and you shouldn't rely on that: you should set the
+   encoding appropriately in your application. (#4900)
+ - twisted.web.http.Request.setHost now sets the port in the host
+   header if it is not the default. (#4918)
+ - default NOT_IMPLEMENTED and NOT_ALLOWED pages now quote the request
+   method and URI respectively, to protect against browsers which
+   don't quote those values for us. (#4978)
+
+Improved Documentation
+----------------------
+ - The XML-RPC howto now includes an example demonstrating how to
+   access the HTTP request object in a server-side XML-RPC method.
+   (#4732)
+ - The Twisted Web client howto now uses the correct, public name for
+   twisted.web.client.Response. (#4769)
+ - Some broken links were fixed, descriptions were updated, and new
+   API links were added in the Resource Templating documentation
+   (resource-templates.xhtml) (#4968)
+
+Other
+-----
+ - #2271, #2386, #4162, #4733, #4855, #4911, #4973
+
+
+Twisted Web 10.2.0 (2010-11-29)
+===============================
+
+Features
+--------
+ - twisted.web.xmlrpc.XMLRPC.xmlrpc_* methods can now be decorated
+   using withRequest to cause them to be passed the HTTP request
+   object. (#3073)
+
+Bugfixes
+--------
+ - twisted.web.xmlrpc.QueryProtocol.handleResponse now disconnects
+   from the server, meaning that Twisted XML-RPC clients disconnect
+   from the server as soon as they receive a response, rather than
+   relying on the server to disconnect. (#2518)
+ - twisted.web.twcgi now generates responses containing all
+   occurrences of duplicate headers produced by CGI scripts, not just
+   the last value. (#4742)
+
+Deprecations and Removals
+-------------------------
+ - twisted.web.trp, which has been deprecated since Twisted 9.0, was
+   removed. (#4299)
+
+Other
+-----
+ - #4576, #4577, #4709, #4723
+
+
+Twisted Web 10.1.0 (2010-06-27)
+===============================
+
+Features
+--------
+ - twisted.web.xmlrpc.XMLRPC and twisted.web.xmlrpc.Proxy now expose
+   xmlrpclib's support of datetime.datetime objects if useDateTime is
+   set to True. (#3219)
+ - HTTP11ClientProtocol now has an abort() method for cancelling an
+   outstanding request by closing the connection before receiving the
+   entire response. (#3811)
+ - twisted.web.http_headers.Headers initializer now rejects
+   incorrectly structured dictionaries. (#4022)
+ - twisted.web.client.Agent now supports HTTPS URLs. (#4023)
+ - twisted.web.xmlrpc.Proxy.callRemote now returns a Deferred which
+   can be cancelled to abort the attempted XML-RPC call. (#4377)
+
+Bugfixes
+--------
+ - twisted.web.guard now logs out avatars even if a request completes
+   with an error. (#4411)
+ - twisted.web.xmlrpc.XMLRPC will now no longer trigger a RuntimeError
+   by trying to write responses to closed connections. (#4423)
+
+Improved Documentation
+----------------------
+ - Fix broken links to deliverBody and iweb.UNKNOWN_LENGTH in
+   doc/web/howto/client.xhtml. (#4507)
+
+Deprecations and Removals
+-------------------------
+ - twisted.web.twcgi.PHP3Script and twisted.web.twcgi.PHPScript are
+   now deprecated. (#516)
+
+Other
+-----
+ - #4403, #4452
+
+
+Twisted Web 10.0.0 (2010-03-01)
+===============================
+
+Features
+--------
+ - Twisted Web in 60 Seconds, a series of short tutorials with self-
+   contained examples on a range of common web topics, is now a part
+   of the Twisted Web howto documentation. (#4192)
+
+Bugfixes
+--------
+ - Data and File from twisted.web.static and
+   twisted.web.distrib.UserDirectory will now only generate a 200
+   response for GET or HEAD requests.
+   twisted.web.client.HTTPPageGetter will no longer ignore the case of
+   a request method when considering whether to apply special HEAD
+   processing to a response. (#446)
+
+ - twisted.web.http.HTTPClient now supports multi-line headers.
+   (#2062)
+
+ - Resources served via twisted.web.distrib will no longer encounter a
+   Banana error when writing more than 640kB at once to the request
+   object. (#3212)
+
+ - The Error, PageRedirect, and InfiniteRedirection exception in
+   twisted.web now initialize an empty message parameter by mapping
+   the HTTP status code parameter to a descriptive string. Previously
+   the lookup would always fail, leaving message empty.  (#3806)
+
+ - The 'wsgi.input' WSGI environment object now supports -1 and None
+   as arguments to the read and readlines methods. (#4114)
+
+ - twisted.web.wsgi doesn't unquote QUERY_STRING anymore, thus
+   complying with the WSGI reference implementation. (#4143)
+
+ - The HTTP proxy will no longer pass on keep-alive request headers
+   from the client, preventing pages from loading then "hanging"
+   (leaving the connection open with no hope of termination). (#4179)
+
+Deprecations and Removals
+-------------------------
+ - Remove '--static' option from twistd web, that served as an alias
+   for the '--path' option. (#3907)
+
+Other
+-----
+ - #3784, #4216, #4242
+
+
+Twisted Web 9.0.0 (2009-11-24)
+==============================
+
+Features
+--------
+ - There is now an iweb.IRequest interface which specifies the interface that
+   request objects provide (#3416)
+ - downloadPage now supports the same cookie, redirect, and timeout features
+   that getPage supports (#2971)
+ - A chapter about WSGI has been added to the twisted.web documentation (#3510)
+ - The HTTP auth support in the web server now allows anonymous sessions by
+   logging in with ANONYMOUS credentials when no Authorization header is
+   provided in a request (#3924, #3936)
+ - HTTPClientFactory now accepts a parameter to enable a common deviation from
+   the HTTP 1.1 standard by responding to redirects in a POSTed request with a
+   GET instead of another POST (#3624)
+ - A new basic HTTP/1.1 client API is included in twisted.web.client.Agent
+   (#886, #3987)
+
+Fixes
+-----
+ - Requests for "insecure" children of a static.File (such as paths containing
+   encoded directory separators) will now result in a 404 instead of a 500
+   (#3549, #3469)
+ - When specifying a followRedirect argument to the getPage function, the state
+   of redirect-following for other getPage calls should now be unaffected.  It
+   was previously overwriting a class attribute which would affect outstanding
+   getPage calls (#3192)
+ - Downloading an URL of the form "http://example.com:/" will now work,
+   ignoring the extraneous colon (#2402)
+ - microdom's appendChild method will no longer issue a spurious warning, and
+   microdom's methods in general should now issue more meaningful exceptions
+   when invalid parameters are passed (#3421)
+ - WSGI applications will no longer have spurious Content-Type headers added to
+   their responses by the twisted.web server. In addition, WSGI applications
+   will no longer be able to specify the server-restricted headers Server and
+   Date (#3569)
+ - http_headers.Headers now normalizes the case of raw headers passed directly
+   to it in the same way that it normalizes the headers passed to setRawHeaders
+   (#3557)
+ - The distrib module no longer relies on the deprecated woven package (#3559)
+ - twisted.web.domhelpers now works with both microdom and minidom (#3600)
+ - twisted.web servers will now ignore invalid If-Modified-Since headers instead
+   of returning a 500 error (#3601)
+ - Certain request-bound memory and file resources are cleaned up slightly
+   sooner by the request when the connection is lost (#1621, #3176)
+ - xmlrpclib.DateTime objects should now correctly round-trip over twisted.web's
+   XMLRPC support in all supported versions of Python, and errors during error
+   serialization will no longer hang a twisted.web XMLRPC response (#2446)
+ - request.content should now always be seeked to the beginning when
+   request.process is called, so application code should never need to seek 
+   back manually (#3585)
+ - Fetching a child of static.File with a double-slash in the URL (such as
+   "example//foo.html") should now return a 404 instead of a traceback and
+   500 error (#3631)
+ - downloadPage will now fire a Failure on its returned Deferred instead of
+   indicating success when the connection is prematurely lost (#3645)
+ - static.File will now provide a 404 instead of a 500 error when it was
+   constructed with a non-existent file (#3634)
+ - microdom should now serialize namespaces correctly (#3672)
+ - The HTTP Auth support resource wrapper should no longer corrupt requests and
+   cause them to skip a segment in the request path (#3679)
+ - The twisted.web WSGI support should now include leading slashes in PATH_INFO,
+   and SCRIPT_NAME will be empty if the application is at the root of the
+   resource tree. This means that WSGI applications should no longer generate
+   URLs with double-slashes in them even if they naively concatenate the values
+   (#3721)
+ - WSGI applications should now receive the requesting client's IP in the
+   REMOTE_ADDR environment variable (#3730)
+ - The distrib module should work again. It was unfortunately broken with the
+   refactoring of twisted.web's header support (#3697)
+ - static.File now supports multiple ranges specified in the Range header
+   (#3574)
+ - static.File should now generate a correct Content-Length value when the
+   requested Range value doesn't fit entirely within the file's contents (#3814)
+ - Attempting to call request.finish() after the connection has been lost will
+   now immediately raise a RuntimeError (#4013)
+ - An HTTP-auth resource should now be able to directly render the wrapped
+   avatar, whereas before it would only allow retrieval of child resources
+   (#4014)
+ - twisted.web's wsgi support should no longer attempt to call request.finish
+   twice, which would cause errors in certain cases (#4025)
+ - WSGI applications should now be able to handle requests with large bodies
+   (#4029)
+ - Exceptions raised from WSGI applications should now more reliably be turned
+   into 500 errors on the HTTP level (#4019)
+ - DeferredResource now correctly passes through exceptions raised from the
+   wrapped resource, instead of turning them all into 500 errors (#3932)
+ - Agent.request now generates a Host header when no headers are passed at
+   (#4131)
+
+Deprecations and Removals
+-------------------------
+ - The unmaintained and untested twisted.web.monitor module was removed (#2763)
+ - The twisted.web.woven package has been removed (#1522)
+ - All of the error resources in twisted.web.error are now in
+   twisted.web.resource, and accessing them through twisted.web.error is now
+   deprecated (#3035)
+ - To facilitate a simplification of the timeout logic in server.Session,
+   various things have been deprecated (#3457)
+   - the loopFactory attribute is now ignored
+   - the checkExpired method now does nothing
+   - the lifetime parameter to startCheckingExpiration is now ignored
+ - The twisted.web.trp module is now deprecated (#2030)
+
+Other
+-----
+ - #2763, #3540, #3575, #3610, #3605, #1176, #3539, #3750, #3761, #3779, #2677,
+   #3782, #3904, #3919, #3418, #3990, #1404, #4050
+
+
+Web 8.2.0 (2008-12-16)
+======================
+
+Features
+--------
+ - The web server can now deal with multi-value headers in the new attributes of
+   Request, requestHeaders and responseHeaders (#165)
+ - There is now a resource-wrapper which implements HTTP Basic and Digest auth
+   in terms of twisted.cred (#696)
+ - It's now possible to limit the number of redirects that client.getPage will
+   follow (#2412)
+ - The directory-listing code no longer uses Woven (#3257)
+ - static.File now supports Range headers with a single range (#1493)
+ - twisted.web now has a rudimentary WSGI container (#2753)
+ - The web server now supports chunked encoding in requests (#3385)
+
+Fixes
+-----
+ - The xmlrpc client now raises an error when the server sends an empty
+   response (#3399)
+ - HTTPPageGetter no longer duplicates default headers when they're explicitly
+   overridden in the headers parameter (#1382)
+ - The server will no longer timeout clients which are still sending request
+   data (#1903)
+ - microdom's isEqualToNode now returns False when the nodes aren't equal
+   (#2542)
+
+Deprecations and Removals
+-------------------------
+
+ - Request.headers and Request.received_headers are not quite deprecated, but
+   they are discouraged in favor of requestHeaders and responseHeaders (#165)
+
+Other
+-----
+ - #909, #687, #2938, #1152, #2930, #2025, #2683, #3471
+
+
+8.1.0 (2008-05-18)
+==================
+
+Fixes
+-----
+
+ - Fixed an XMLRPC bug whereby sometimes a callRemote Deferred would
+   accidentally be fired twice when a connection was lost during the handling of
+   a response (#3152)
+ - Fixed a bug in the "Using Twisted Web" document which prevented an example
+   resource from being renderable (#3147)
+ - The deprecated mktap API is no longer used (#3127)
+
+
+8.0.0 (2008-03-17)
+==================
+
+Features
+--------
+ - Add support to twisted.web.client.getPage for the HTTP HEAD method. (#2750)
+
+Fixes
+-----
+ - Set content-type in xmlrpc responses to "text/xml" (#2430)
+ - Add more error checking in the xmlrpc.XMLRPC render method, and enforce
+   POST requests. (#2505)
+ - Reject unicode input to twisted.web.client._parse to reject invalid
+   unicode URLs early. (#2628)
+ - Correctly re-quote URL path segments when generating an URL string to
+   return from Request.prePathURL. (#2934)
+ - Make twisted.web.proxy.ProxyClientFactory close the connection when
+   reporting a 501 error. (#1089)
+ - Fix twisted.web.proxy.ReverseProxyResource to specify the port in the
+   host header if different from 80. (#1117)
+ - Change twisted.web.proxy.ReverseProxyResource so that it correctly encodes
+   the request URI it sends on to the server for which it is a proxy. (#3013)
+ - Make "twistd web --personal" use PBServerFactory (#2681)
+
+Misc
+----
+ - #1996, #2382, #2211, #2633, #2634, #2640, #2752, #238, #2905
+
+
+0.7.0 (2007-01-02)
+==================
+
+Features
+--------
+ - Python 2.5 is now supported (#1867)
+ - twisted.web.xmlrpc now supports the <nil/> xml-rpc extension type
+   in both the server and the client (#469)
+
+Fixes
+-----
+ - Microdom and SUX now manages certain malformed XML more resiliently
+   (#1984, #2225, #2298)
+ - twisted.web.client.getPage can now deal with an URL of the form
+   "http://example.com" (no trailing slash) (#1080)
+ - The HTTP server now allows (invalid) URLs with multiple question
+   marks (#1550)
+ - '=' can now be in the value of a cookie (#1051)
+ - Microdom now correctly handles xmlns="" (#2184)
+
+Deprecations and Removals
+-------------------------
+ - websetroot was removed, because it wasn't working anyway (#945)
+ - woven.guard no longer supports the old twisted.cred API (#1440)
+
+Other
+-----
+The following changes are minor or closely related to other changes.
+
+ - #1636, #1637, #1638, #1936, #1883, #447
+
+
+0.6.0 (2006-05-21)
+==================
+
+Features
+--------
+ - Basic auth support for the XMLRPC client (#1474).
+
+Fixes
+-----
+ - More correct datetime parsing.
+ - Efficiency improvements (#974)
+ - Handle popular non-RFC compliant formats for If-Modified-Since
+   headers (#976).
+ - Improve support for certain buggy CGI scripts.
+ - CONTENT_LENGTH is now available to CGI scripts.
+ - Support for even worse HTML in microdom (#1358).
+ - Trying to view a user's home page when the user doesn't have a
+   ~/public_html no longer displays a traceback (#551).
+ - Misc: #543, #1011, #1005, #1287, #1337, #1383, #1079, #1492, #1189,
+   #737, #872.
+
+
+0.5.0
+=====
+ - Client properly reports timeouts as error
+ - "Socially deprecate" woven
+ - Fix memory leak in _c_urlarg library
+ - Stop using _c_urlarg library
+ - Fix 'gzip' and 'bzip2' content-encodings
+ - Escape log entries so remote user cannot corrupt the log
+ - Commented out range support because it's broken
+ - Fix HEAD responses without content-length
diff --git a/ThirdParty/Twisted/twisted/web/topfiles/README b/ThirdParty/Twisted/twisted/web/topfiles/README
new file mode 100644
index 0000000..5d56a90
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/topfiles/README
@@ -0,0 +1,6 @@
+Twisted Web 12.3.0
+
+Twisted Web depends on Twisted Core.  pyOpenSSL
+(<http://launchpad.net/pyopenssl>) is also required for HTTPS.  SOAPpy
+(<http://pywebsvcs.sourceforge.net/>) is required for SOAP support.  For Quixote
+resource templates, Quixote (<http://www.quixote.ca/>) is required.
diff --git a/ThirdParty/Twisted/twisted/web/topfiles/setup.py b/ThirdParty/Twisted/twisted/web/topfiles/setup.py
new file mode 100644
index 0000000..e54ea7e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/topfiles/setup.py
@@ -0,0 +1,30 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import sys
+
+try:
+    from twisted.python import dist
+except ImportError:
+    raise SystemExit("twisted.python.dist module not found.  Make sure you "
+                     "have installed the Twisted core package before "
+                     "attempting to install any other Twisted projects.")
+
+if __name__ == '__main__':
+    dist.setup(
+        twisted_subproject="web",
+        scripts=dist.getScripts("web"),
+        # metadata
+        name="Twisted Web",
+        description="Twisted web server, programmable in Python.",
+        author="Twisted Matrix Laboratories",
+        author_email="twisted-python at twistedmatrix.com",
+        maintainer="James Knight",
+        url="http://twistedmatrix.com/trac/wiki/TwistedWeb",
+        license="MIT",
+        long_description="""\
+Twisted Web is a complete web server, aimed at hosting web
+applications using Twisted and Python, but fully able to serve static
+pages, also.
+""",
+        )
diff --git a/ThirdParty/Twisted/twisted/web/twcgi.py b/ThirdParty/Twisted/twisted/web/twcgi.py
new file mode 100644
index 0000000..5ab580d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/twcgi.py
@@ -0,0 +1,299 @@
+# -*- test-case-name: twisted.web.test.test_cgi -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+I hold resource classes and helper classes that deal with CGI scripts.
+"""
+
+# System Imports
+import string
+import os
+import urllib
+
+# Twisted Imports
+from twisted.web import http
+from twisted.internet import reactor, protocol
+from twisted.spread import pb
+from twisted.python import log, filepath
+from twisted.web import resource, server, static
+
+
+class CGIDirectory(resource.Resource, filepath.FilePath):
+    def __init__(self, pathname):
+        resource.Resource.__init__(self)
+        filepath.FilePath.__init__(self, pathname)
+
+    def getChild(self, path, request):
+        fnp = self.child(path)
+        if not fnp.exists():
+            return static.File.childNotFound
+        elif fnp.isdir():
+            return CGIDirectory(fnp.path)
+        else:
+            return CGIScript(fnp.path)
+        return resource.NoResource()
+
+    def render(self, request):
+        notFound = resource.NoResource(
+            "CGI directories do not support directory listing.")
+        return notFound.render(request)
+
+
+
+class CGIScript(resource.Resource):
+    """
+    L{CGIScript} is a resource which runs child processes according to the CGI
+    specification.
+
+    The implementation is complex due to the fact that it requires asynchronous
+    IPC with an external process with an unpleasant protocol.
+    """
+    isLeaf = 1
+    def __init__(self, filename, registry=None):
+        """
+        Initialize, with the name of a CGI script file.
+        """
+        self.filename = filename
+
+
+    def render(self, request):
+        """
+        Do various things to conform to the CGI specification.
+
+        I will set up the usual slew of environment variables, then spin off a
+        process.
+
+        @type request: L{twisted.web.http.Request}
+        @param request: An HTTP request.
+        """
+        script_name = "/"+string.join(request.prepath, '/')
+        serverName = string.split(request.getRequestHostname(), ':')[0]
+        env = {"SERVER_SOFTWARE":   server.version,
+               "SERVER_NAME":       serverName,
+               "GATEWAY_INTERFACE": "CGI/1.1",
+               "SERVER_PROTOCOL":   request.clientproto,
+               "SERVER_PORT":       str(request.getHost().port),
+               "REQUEST_METHOD":    request.method,
+               "SCRIPT_NAME":       script_name, # XXX
+               "SCRIPT_FILENAME":   self.filename,
+               "REQUEST_URI":       request.uri,
+        }
+
+        client = request.getClient()
+        if client is not None:
+            env['REMOTE_HOST'] = client
+        ip = request.getClientIP()
+        if ip is not None:
+            env['REMOTE_ADDR'] = ip
+        pp = request.postpath
+        if pp:
+            env["PATH_INFO"] = "/"+string.join(pp, '/')
+
+        if hasattr(request, "content"):
+            # request.content is either a StringIO or a TemporaryFile, and
+            # the file pointer is sitting at the beginning (seek(0,0))
+            request.content.seek(0,2)
+            length = request.content.tell()
+            request.content.seek(0,0)
+            env['CONTENT_LENGTH'] = str(length)
+
+        qindex = string.find(request.uri, '?')
+        if qindex != -1:
+            qs = env['QUERY_STRING'] = request.uri[qindex+1:]
+            if '=' in qs:
+                qargs = []
+            else:
+                qargs = [urllib.unquote(x) for x in qs.split('+')]
+        else:
+            env['QUERY_STRING'] = ''
+            qargs = []
+
+        # Propogate HTTP headers
+        for title, header in request.getAllHeaders().items():
+            envname = string.upper(string.replace(title, '-', '_'))
+            if title not in ('content-type', 'content-length'):
+                envname = "HTTP_" + envname
+            env[envname] = header
+        # Propogate our environment
+        for key, value in os.environ.items():
+            if key not in env:
+                env[key] = value
+        # And they're off!
+        self.runProcess(env, request, qargs)
+        return server.NOT_DONE_YET
+
+
+    def runProcess(self, env, request, qargs=[]):
+        """
+        Run the cgi script.
+
+        @type env: A C{dict} of C{str}, or C{None}
+        @param env: The environment variables to pass to the processs that will
+            get spawned. See
+            L{twisted.internet.interfaces.IReactorProcess.spawnProcess} for more
+            information about environments and process creation.
+
+        @type request: L{twisted.web.http.Request}
+        @param request: An HTTP request.
+
+        @type qargs: A C{list} of C{str}
+        @param qargs: The command line arguments to pass to the process that
+            will get spawned.
+        """
+        p = CGIProcessProtocol(request)
+        reactor.spawnProcess(p, self.filename, [self.filename] + qargs, env,
+                             os.path.dirname(self.filename))
+
+
+
+class FilteredScript(CGIScript):
+    """
+    I am a special version of a CGI script, that uses a specific executable.
+
+    This is useful for interfacing with other scripting languages that adhere to
+    the CGI standard. My C{filter} attribute specifies what executable to run,
+    and my C{filename} init parameter describes which script to pass to the
+    first argument of that script.
+
+    To customize me for a particular location of a CGI interpreter, override
+    C{filter}.
+
+    @type filter: C{str}
+    @ivar filter: The absolute path to the executable.
+    """
+
+    filter = '/usr/bin/cat'
+
+
+    def runProcess(self, env, request, qargs=[]):
+        """
+        Run a script through the C{filter} executable.
+
+        @type env: A C{dict} of C{str}, or C{None}
+        @param env: The environment variables to pass to the processs that will
+            get spawned. See
+            L{twisted.internet.interfaces.IReactorProcess.spawnProcess} for more
+            information about environments and process creation.
+
+        @type request: L{twisted.web.http.Request}
+        @param request: An HTTP request.
+
+        @type qargs: A C{list} of C{str}
+        @param qargs: The command line arguments to pass to the process that
+            will get spawned.
+        """
+        p = CGIProcessProtocol(request)
+        reactor.spawnProcess(p, self.filter,
+                             [self.filter, self.filename] + qargs, env,
+                             os.path.dirname(self.filename))
+
+
+
+class CGIProcessProtocol(protocol.ProcessProtocol, pb.Viewable):
+    handling_headers = 1
+    headers_written = 0
+    headertext = ''
+    errortext = ''
+
+    # Remotely relay producer interface.
+
+    def view_resumeProducing(self, issuer):
+        self.resumeProducing()
+
+    def view_pauseProducing(self, issuer):
+        self.pauseProducing()
+
+    def view_stopProducing(self, issuer):
+        self.stopProducing()
+
+    def resumeProducing(self):
+        self.transport.resumeProducing()
+
+    def pauseProducing(self):
+        self.transport.pauseProducing()
+
+    def stopProducing(self):
+        self.transport.loseConnection()
+
+    def __init__(self, request):
+        self.request = request
+
+    def connectionMade(self):
+        self.request.registerProducer(self, 1)
+        self.request.content.seek(0, 0)
+        content = self.request.content.read()
+        if content:
+            self.transport.write(content)
+        self.transport.closeStdin()
+
+    def errReceived(self, error):
+        self.errortext = self.errortext + error
+
+    def outReceived(self, output):
+        """
+        Handle a chunk of input
+        """
+        # First, make sure that the headers from the script are sorted
+        # out (we'll want to do some parsing on these later.)
+        if self.handling_headers:
+            text = self.headertext + output
+            headerEnds = []
+            for delimiter in '\n\n','\r\n\r\n','\r\r', '\n\r\n':
+                headerend = text.find(delimiter)
+                if headerend != -1:
+                    headerEnds.append((headerend, delimiter))
+            if headerEnds:
+                # The script is entirely in control of response headers; disable the
+                # default Content-Type value normally provided by
+                # twisted.web.server.Request.
+                self.request.defaultContentType = None
+
+                headerEnds.sort()
+                headerend, delimiter = headerEnds[0]
+                self.headertext = text[:headerend]
+                # This is a final version of the header text.
+                linebreak = delimiter[:len(delimiter)//2]
+                headers = self.headertext.split(linebreak)
+                for header in headers:
+                    br = header.find(': ')
+                    if br == -1:
+                        log.msg( 'ignoring malformed CGI header: %s' % header )
+                    else:
+                        headerName = header[:br].lower()
+                        headerText = header[br+2:]
+                        if headerName == 'location':
+                            self.request.setResponseCode(http.FOUND)
+                        if headerName == 'status':
+                            try:
+                                statusNum = int(headerText[:3]) #"XXX <description>" sometimes happens.
+                            except:
+                                log.msg( "malformed status header" )
+                            else:
+                                self.request.setResponseCode(statusNum)
+                        else:
+                            # Don't allow the application to control these required headers.
+                            if headerName.lower() not in ('server', 'date'):
+                                self.request.responseHeaders.addRawHeader(headerName, headerText)
+                output = text[headerend+len(delimiter):]
+                self.handling_headers = 0
+            if self.handling_headers:
+                self.headertext = text
+        if not self.handling_headers:
+            self.request.write(output)
+
+    def processEnded(self, reason):
+        if reason.value.exitCode != 0:
+            log.msg("CGI %s exited with exit code %s" %
+                    (self.request.uri, reason.value.exitCode))
+        if self.errortext:
+            log.msg("Errors from CGI %s: %s" % (self.request.uri, self.errortext))
+        if self.handling_headers:
+            log.msg("Premature end of headers in %s: %s" % (self.request.uri, self.headertext))
+            self.request.write(
+                resource.ErrorPage(http.INTERNAL_SERVER_ERROR,
+                                   "CGI Script Error",
+                                   "Premature end of script headers.").render(self.request))
+        self.request.unregisterProducer()
+        self.request.finish()
diff --git a/ThirdParty/Twisted/twisted/web/util.py b/ThirdParty/Twisted/twisted/web/util.py
new file mode 100644
index 0000000..0c6cdb6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/util.py
@@ -0,0 +1,433 @@
+# -*- test-case-name: twisted.web.test.test_util -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+An assortment of web server-related utilities.
+"""
+
+__all__ = [
+    "redirectTo", "Redirect", "ChildRedirector", "ParentRedirect",
+    "DeferredResource", "htmlIndent", "FailureElement", "formatFailure"]
+
+from cStringIO import StringIO
+import linecache
+import string
+import types
+
+from twisted.python.filepath import FilePath
+from twisted.python.reflect import fullyQualifiedName
+from twisted.python.deprecate import deprecatedModuleAttribute
+from twisted.python.versions import Version
+from twisted.python.modules import getModule
+
+from twisted.web import html, resource
+from twisted.web.template import (
+    TagLoader, XMLFile, Element, renderer, flattenString)
+
+
+def redirectTo(URL, request):
+    """
+    Generate a redirect to the given location.
+
+    @param URL: A C{str} giving the location to which to redirect.
+    @type URL: C{str}
+
+    @param request: The request object to use to generate the redirect.
+    @type request: L{IRequest<twisted.web.iweb.IRequest>} provider
+
+    @raise TypeError: If the type of C{URL} a C{unicode} instead of C{str}.
+
+    @return: A C{str} containing HTML which tries to convince the client agent
+        to visit the new location even if it doesn't respect the I{FOUND}
+        response code.  This is intended to be returned from a render method,
+        eg::
+
+            def render_GET(self, request):
+                return redirectTo("http://example.com/", request)
+    """
+    if isinstance(URL, unicode) :
+        raise TypeError("Unicode object not allowed as URL")
+    request.setHeader("content-type", "text/html; charset=utf-8")
+    request.redirect(URL)
+    return """
+<html>
+    <head>
+        <meta http-equiv=\"refresh\" content=\"0;URL=%(url)s\">
+    </head>
+    <body bgcolor=\"#FFFFFF\" text=\"#000000\">
+    <a href=\"%(url)s\">click here</a>
+    </body>
+</html>
+""" % {'url': URL}
+
+class Redirect(resource.Resource):
+
+    isLeaf = 1
+
+    def __init__(self, url):
+        resource.Resource.__init__(self)
+        self.url = url
+
+    def render(self, request):
+        return redirectTo(self.url, request)
+
+    def getChild(self, name, request):
+        return self
+
+class ChildRedirector(Redirect):
+    isLeaf = 0
+    def __init__(self, url):
+        # XXX is this enough?
+        if ((url.find('://') == -1)
+            and (not url.startswith('..'))
+            and (not url.startswith('/'))):
+            raise ValueError("It seems you've given me a redirect (%s) that is a child of myself! That's not good, it'll cause an infinite redirect." % url)
+        Redirect.__init__(self, url)
+
+    def getChild(self, name, request):
+        newUrl = self.url
+        if not newUrl.endswith('/'):
+            newUrl += '/'
+        newUrl += name
+        return ChildRedirector(newUrl)
+
+
+from twisted.python import urlpath
+
+class ParentRedirect(resource.Resource):
+    """
+    I redirect to URLPath.here().
+    """
+    isLeaf = 1
+    def render(self, request):
+        return redirectTo(urlpath.URLPath.fromRequest(request).here(), request)
+
+    def getChild(self, request):
+        return self
+
+
+class DeferredResource(resource.Resource):
+    """
+    I wrap up a Deferred that will eventually result in a Resource
+    object.
+    """
+    isLeaf = 1
+
+    def __init__(self, d):
+        resource.Resource.__init__(self)
+        self.d = d
+
+    def getChild(self, name, request):
+        return self
+
+    def render(self, request):
+        self.d.addCallback(self._cbChild, request).addErrback(
+            self._ebChild,request)
+        from twisted.web.server import NOT_DONE_YET
+        return NOT_DONE_YET
+
+    def _cbChild(self, child, request):
+        request.render(resource.getChildForRequest(child, request))
+
+    def _ebChild(self, reason, request):
+        request.processingFailed(reason)
+        return reason
+
+
+stylesheet = ""
+
+def htmlrepr(x):
+    return htmlReprTypes.get(type(x), htmlUnknown)(x)
+
+def saferepr(x):
+    try:
+        rx = repr(x)
+    except:
+        rx = "<repr failed! %s instance at %s>" % (x.__class__, id(x))
+    return rx
+
+def htmlUnknown(x):
+    return '<code>'+html.escape(saferepr(x))+'</code>'
+
+def htmlDict(d):
+    io = StringIO()
+    w = io.write
+    w('<div class="dict"><span class="heading">Dictionary instance @ %s</span>' % hex(id(d)))
+    w('<table class="dict">')
+    for k, v in d.items():
+
+        if k == '__builtins__':
+            v = 'builtin dictionary'
+        w('<tr><td class="dictKey">%s</td><td class="dictValue">%s</td></tr>' % (htmlrepr(k), htmlrepr(v)))
+    w('</table></div>')
+    return io.getvalue()
+
+def htmlList(l):
+    io = StringIO()
+    w = io.write
+    w('<div class="list"><span class="heading">List instance @ %s</span>' % hex(id(l)))
+    for i in l:
+        w('<div class="listItem">%s</div>' % htmlrepr(i))
+    w('</div>')
+    return io.getvalue()
+
+def htmlInst(i):
+    if hasattr(i, "__html__"):
+        s = i.__html__()
+    else:
+        s = html.escape(saferepr(i))
+    return '''<div class="instance"><span class="instanceName">%s instance @ %s</span>
+              <span class="instanceRepr">%s</span></div>
+              ''' % (i.__class__, hex(id(i)), s)
+
+def htmlString(s):
+    return html.escape(saferepr(s))
+
+def htmlFunc(f):
+    return ('<div class="function">' +
+            html.escape("function %s in file %s at line %s" %
+                        (f.__name__, f.func_code.co_filename,
+                         f.func_code.co_firstlineno))+
+            '</div>')
+
+htmlReprTypes = {types.DictType: htmlDict,
+                 types.ListType: htmlList,
+                 types.InstanceType: htmlInst,
+                 types.StringType: htmlString,
+                 types.FunctionType: htmlFunc}
+
+
+
+def htmlIndent(snippetLine):
+    ret = string.replace(string.replace(html.escape(string.rstrip(snippetLine)),
+                                  '  ', ' '),
+                   '\t', '        ')
+    return ret
+
+
+
+class _SourceLineElement(Element):
+    """
+    L{_SourceLineElement} is an L{IRenderable} which can render a single line of
+    source code.
+
+    @ivar number: A C{int} giving the line number of the source code to be
+        rendered.
+    @ivar source: A C{str} giving the source code to be rendered.
+    """
+    def __init__(self, loader, number, source):
+        Element.__init__(self, loader)
+        self.number = number
+        self.source = source
+
+
+    @renderer
+    def sourceLine(self, request, tag):
+        """
+        Render the line of source as a child of C{tag}.
+        """
+        return tag(self.source.replace('  ', u' \N{NO-BREAK SPACE}'))
+
+
+    @renderer
+    def lineNumber(self, request, tag):
+        """
+        Render the line number as a child of C{tag}.
+        """
+        return tag(str(self.number))
+
+
+
+class _SourceFragmentElement(Element):
+    """
+    L{_SourceFragmentElement} is an L{IRenderable} which can render several lines
+    of source code near the line number of a particular frame object.
+
+    @ivar frame: A L{Failure<twisted.python.failure.Failure>}-style frame object
+        for which to load a source line to render.  This is really a tuple
+        holding some information from a frame object.  See
+        L{Failure.frames<twisted.python.failure.Failure>} for specifics.
+    """
+    def __init__(self, loader, frame):
+        Element.__init__(self, loader)
+        self.frame = frame
+
+
+    def _getSourceLines(self):
+        """
+        Find the source line references by C{self.frame} and yield, in source
+        line order, it and the previous and following lines.
+
+        @return: A generator which yields two-tuples.  Each tuple gives a source
+            line number and the contents of that source line.
+        """
+        filename = self.frame[1]
+        lineNumber = self.frame[2]
+        for snipLineNumber in range(lineNumber - 1, lineNumber + 2):
+            yield (snipLineNumber,
+                   linecache.getline(filename, snipLineNumber).rstrip())
+
+
+    @renderer
+    def sourceLines(self, request, tag):
+        """
+        Render the source line indicated by C{self.frame} and several
+        surrounding lines.  The active line will be given a I{class} of
+        C{"snippetHighlightLine"}.  Other lines will be given a I{class} of
+        C{"snippetLine"}.
+        """
+        for (lineNumber, sourceLine) in self._getSourceLines():
+            newTag = tag.clone()
+            if lineNumber == self.frame[2]:
+                cssClass = "snippetHighlightLine"
+            else:
+                cssClass = "snippetLine"
+            loader = TagLoader(newTag(**{"class": cssClass}))
+            yield _SourceLineElement(loader, lineNumber, sourceLine)
+
+
+
+class _FrameElement(Element):
+    """
+    L{_FrameElement} is an L{IRenderable} which can render details about one
+    frame from a L{Failure<twisted.python.failure.Failure>}.
+
+    @ivar frame: A L{Failure<twisted.python.failure.Failure>}-style frame object
+        for which to load a source line to render.  This is really a tuple
+        holding some information from a frame object.  See
+        L{Failure.frames<twisted.python.failure.Failure>} for specifics.
+    """
+    def __init__(self, loader, frame):
+        Element.__init__(self, loader)
+        self.frame = frame
+
+
+    @renderer
+    def filename(self, request, tag):
+        """
+        Render the name of the file this frame references as a child of C{tag}.
+        """
+        return tag(self.frame[1])
+
+
+    @renderer
+    def lineNumber(self, request, tag):
+        """
+        Render the source line number this frame references as a child of
+        C{tag}.
+        """
+        return tag(str(self.frame[2]))
+
+
+    @renderer
+    def function(self, request, tag):
+        """
+        Render the function name this frame references as a child of C{tag}.
+        """
+        return tag(self.frame[0])
+
+
+    @renderer
+    def source(self, request, tag):
+        """
+        Render the source code surrounding the line this frame references,
+        replacing C{tag}.
+        """
+        return _SourceFragmentElement(TagLoader(tag), self.frame)
+
+
+
+class _StackElement(Element):
+    """
+    L{_StackElement} renders an L{IRenderable} which can render a list of frames.
+    """
+    def __init__(self, loader, stackFrames):
+        Element.__init__(self, loader)
+        self.stackFrames = stackFrames
+
+
+    @renderer
+    def frames(self, request, tag):
+        """
+        Render the list of frames in this L{_StackElement}, replacing C{tag}.
+        """
+        return [
+            _FrameElement(TagLoader(tag.clone()), frame)
+            for frame
+            in self.stackFrames]
+
+
+
+class FailureElement(Element):
+    """
+    L{FailureElement} is an L{IRenderable} which can render detailed information
+    about a L{Failure<twisted.python.failure.Failure>}.
+
+    @ivar failure: The L{Failure<twisted.python.failure.Failure>} instance which
+        will be rendered.
+
+    @since: 12.1
+    """
+    loader = XMLFile(getModule(__name__).filePath.sibling("failure.xhtml"))
+
+    def __init__(self, failure, loader=None):
+        Element.__init__(self, loader)
+        self.failure = failure
+
+
+    @renderer
+    def type(self, request, tag):
+        """
+        Render the exception type as a child of C{tag}.
+        """
+        return tag(fullyQualifiedName(self.failure.type))
+
+
+    @renderer
+    def value(self, request, tag):
+        """
+        Render the exception value as a child of C{tag}.
+        """
+        return tag(str(self.failure.value))
+
+
+    @renderer
+    def traceback(self, request, tag):
+        """
+        Render all the frames in the wrapped
+        L{Failure<twisted.python.failure.Failure>}'s traceback stack, replacing
+        C{tag}.
+        """
+        return _StackElement(TagLoader(tag), self.failure.frames)
+
+
+
+def formatFailure(myFailure):
+    """
+    Construct an HTML representation of the given failure.
+
+    Consider using L{FailureElement} instead.
+
+    @type myFailure: L{Failure<twisted.python.failure.Failure>}
+
+    @rtype: C{str}
+    @return: A string containing the HTML representation of the given failure.
+    """
+    result = []
+    flattenString(None, FailureElement(myFailure)).addBoth(result.append)
+    if isinstance(result[0], str):
+        # Ensure the result string is all ASCII, for compatibility with the
+        # default encoding expected by browsers.
+        return result[0].decode('utf-8').encode('ascii', 'xmlcharrefreplace')
+    result[0].raiseException()
+
+
+_twelveOne = Version("Twisted", 12, 1, 0)
+
+for name in ["htmlrepr", "saferepr", "htmlUnknown", "htmlString", "htmlList",
+             "htmlDict", "htmlInst", "htmlFunc", "htmlIndent", "htmlReprTypes",
+             "stylesheet"]:
+    deprecatedModuleAttribute(
+        _twelveOne, "See twisted.web.template.", __name__, name)
+del name
diff --git a/ThirdParty/Twisted/twisted/web/vhost.py b/ThirdParty/Twisted/twisted/web/vhost.py
new file mode 100644
index 0000000..1acee21
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/vhost.py
@@ -0,0 +1,135 @@
+# -*- test-case-name: twisted.web.
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+I am a virtual hosts implementation.
+"""
+
+# Twisted Imports
+from twisted.python import roots
+from twisted.web import resource
+
+
+class VirtualHostCollection(roots.Homogenous):
+    """Wrapper for virtual hosts collection.
+
+    This exists for configuration purposes.
+    """
+    entityType = resource.Resource
+
+    def __init__(self, nvh):
+        self.nvh = nvh
+
+    def listStaticEntities(self):
+        return self.nvh.hosts.items()
+
+    def getStaticEntity(self, name):
+        return self.nvh.hosts.get(self)
+
+    def reallyPutEntity(self, name, entity):
+        self.nvh.addHost(name, entity)
+
+    def delEntity(self, name):
+        self.nvh.removeHost(name)
+
+
+class NameVirtualHost(resource.Resource):
+    """I am a resource which represents named virtual hosts.
+    """
+
+    default = None
+
+    def __init__(self):
+        """Initialize.
+        """
+        resource.Resource.__init__(self)
+        self.hosts = {}
+
+    def listStaticEntities(self):
+        return resource.Resource.listStaticEntities(self) + [("Virtual Hosts", VirtualHostCollection(self))]
+
+    def getStaticEntity(self, name):
+        if name == "Virtual Hosts":
+            return VirtualHostCollection(self)
+        else:
+            return resource.Resource.getStaticEntity(self, name)
+
+    def addHost(self, name, resrc):
+        """Add a host to this virtual host.
+
+        This will take a host named `name', and map it to a resource
+        `resrc'.  For example, a setup for our virtual hosts would be::
+
+            nvh.addHost('divunal.com', divunalDirectory)
+            nvh.addHost('www.divunal.com', divunalDirectory)
+            nvh.addHost('twistedmatrix.com', twistedMatrixDirectory)
+            nvh.addHost('www.twistedmatrix.com', twistedMatrixDirectory)
+        """
+        self.hosts[name] = resrc
+
+    def removeHost(self, name):
+        """Remove a host."""
+        del self.hosts[name]
+
+    def _getResourceForRequest(self, request):
+        """(Internal) Get the appropriate resource for the given host.
+        """
+        hostHeader = request.getHeader('host')
+        if hostHeader == None:
+            return self.default or resource.NoResource()
+        else:
+            host = hostHeader.lower().split(':', 1)[0]
+        return (self.hosts.get(host, self.default)
+                or resource.NoResource("host %s not in vhost map" % repr(host)))
+
+    def render(self, request):
+        """Implementation of resource.Resource's render method.
+        """
+        resrc = self._getResourceForRequest(request)
+        return resrc.render(request)
+
+    def getChild(self, path, request):
+        """Implementation of resource.Resource's getChild method.
+        """
+        resrc = self._getResourceForRequest(request)
+        if resrc.isLeaf:
+            request.postpath.insert(0,request.prepath.pop(-1))
+            return resrc
+        else:
+            return resrc.getChildWithDefault(path, request)
+
+class _HostResource(resource.Resource):
+
+    def getChild(self, path, request):
+        if ':' in path:
+            host, port = path.split(':', 1)
+            port = int(port)
+        else:
+            host, port = path, 80
+        request.setHost(host, port)
+        prefixLen = 3+request.isSecure()+4+len(path)+len(request.prepath[-3])
+        request.path = '/'+'/'.join(request.postpath)
+        request.uri = request.uri[prefixLen:]
+        del request.prepath[:3]
+        return request.site.getResourceFor(request)
+
+
+class VHostMonsterResource(resource.Resource):
+
+    """
+    Use this to be able to record the hostname and method (http vs. https)
+    in the URL without disturbing your web site. If you put this resource
+    in a URL http://foo.com/bar then requests to
+    http://foo.com/bar/http/baz.com/something will be equivalent to
+    http://foo.com/something, except that the hostname the request will
+    appear to be accessing will be "baz.com". So if "baz.com" is redirecting
+    all requests for to foo.com, while foo.com is inaccessible from the outside,
+    then redirect and url generation will work correctly
+    """
+    def getChild(self, path, request):
+        if path == 'http':
+            request.isSecure = lambda: 0
+        elif path == 'https':
+            request.isSecure = lambda: 1
+        return _HostResource()
diff --git a/ThirdParty/Twisted/twisted/web/wsgi.py b/ThirdParty/Twisted/twisted/web/wsgi.py
new file mode 100644
index 0000000..0918c4d
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/wsgi.py
@@ -0,0 +1,403 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+An implementation of
+U{Web Resource Gateway Interface<http://www.python.org/dev/peps/pep-0333/>}.
+"""
+
+__metaclass__ = type
+
+from sys import exc_info
+
+from zope.interface import implements
+
+from twisted.python.log import msg, err
+from twisted.python.failure import Failure
+from twisted.web.resource import IResource
+from twisted.web.server import NOT_DONE_YET
+from twisted.web.http import INTERNAL_SERVER_ERROR
+
+
+class _ErrorStream:
+    """
+    File-like object instances of which are used as the value for the
+    C{'wsgi.errors'} key in the C{environ} dictionary passed to the application
+    object.
+
+    This simply passes writes on to L{logging<twisted.python.log>} system as
+    error events from the C{'wsgi'} system.  In the future, it may be desirable
+    to expose more information in the events it logs, such as the application
+    object which generated the message.
+    """
+    def write(self, bytes):
+        """
+        Generate an event for the logging system with the given bytes as the
+        message.
+
+        This is called in a WSGI application thread, not the I/O thread.
+        """
+        msg(bytes, system='wsgi', isError=True)
+
+
+    def writelines(self, iovec):
+        """
+        Join the given lines and pass them to C{write} to be handled in the
+        usual way.
+
+        This is called in a WSGI application thread, not the I/O thread.
+
+        @param iovec: A C{list} of C{'\\n'}-terminated C{str} which will be
+            logged.
+        """
+        self.write(''.join(iovec))
+
+
+    def flush(self):
+        """
+        Nothing is buffered, so flushing does nothing.  This method is required
+        to exist by PEP 333, though.
+
+        This is called in a WSGI application thread, not the I/O thread.
+        """
+
+
+
+class _InputStream:
+    """
+    File-like object instances of which are used as the value for the
+    C{'wsgi.input'} key in the C{environ} dictionary passed to the application
+    object.
+
+    This only exists to make the handling of C{readline(-1)} consistent across
+    different possible underlying file-like object implementations.  The other
+    supported methods pass through directly to the wrapped object.
+    """
+    def __init__(self, input):
+        """
+        Initialize the instance.
+
+        This is called in the I/O thread, not a WSGI application thread.
+        """
+        self._wrapped = input
+
+
+    def read(self, size=None):
+        """
+        Pass through to the underlying C{read}.
+
+        This is called in a WSGI application thread, not the I/O thread.
+        """
+        # Avoid passing None because cStringIO and file don't like it.
+        if size is None:
+            return self._wrapped.read()
+        return self._wrapped.read(size)
+
+
+    def readline(self, size=None):
+        """
+        Pass through to the underlying C{readline}, with a size of C{-1} replaced
+        with a size of C{None}.
+
+        This is called in a WSGI application thread, not the I/O thread.
+        """
+        # Check for -1 because StringIO doesn't handle it correctly.  Check for
+        # None because files and tempfiles don't accept that.
+        if size == -1 or size is None:
+            return self._wrapped.readline()
+        return self._wrapped.readline(size)
+
+
+    def readlines(self, size=None):
+        """
+        Pass through to the underlying C{readlines}.
+
+        This is called in a WSGI application thread, not the I/O thread.
+        """
+        # Avoid passing None because cStringIO and file don't like it.
+        if size is None:
+            return self._wrapped.readlines()
+        return self._wrapped.readlines(size)
+
+
+    def __iter__(self):
+        """
+        Pass through to the underlying C{__iter__}.
+
+        This is called in a WSGI application thread, not the I/O thread.
+        """
+        return iter(self._wrapped)
+
+
+
+class _WSGIResponse:
+    """
+    Helper for L{WSGIResource} which drives the WSGI application using a
+    threadpool and hooks it up to the L{Request}.
+
+    @ivar started: A C{bool} indicating whether or not the response status and
+        headers have been written to the request yet.  This may only be read or
+        written in the WSGI application thread.
+
+    @ivar reactor: An L{IReactorThreads} provider which is used to call methods
+        on the request in the I/O thread.
+
+    @ivar threadpool: A L{ThreadPool} which is used to call the WSGI
+        application object in a non-I/O thread.
+
+    @ivar application: The WSGI application object.
+
+    @ivar request: The L{Request} upon which the WSGI environment is based and
+        to which the application's output will be sent.
+
+    @ivar environ: The WSGI environment C{dict}.
+
+    @ivar status: The HTTP response status C{str} supplied to the WSGI
+        I{start_response} callable by the application.
+
+    @ivar headers: A list of HTTP response headers supplied to the WSGI
+        I{start_response} callable by the application.
+
+    @ivar _requestFinished: A flag which indicates whether it is possible to
+        generate more response data or not.  This is C{False} until
+        L{Request.notifyFinish} tells us the request is done, then C{True}.
+    """
+
+    _requestFinished = False
+
+    def __init__(self, reactor, threadpool, application, request):
+        self.started = False
+        self.reactor = reactor
+        self.threadpool = threadpool
+        self.application = application
+        self.request = request
+        self.request.notifyFinish().addBoth(self._finished)
+
+        if request.prepath:
+            scriptName = '/' + '/'.join(request.prepath)
+        else:
+            scriptName = ''
+
+        if request.postpath:
+            pathInfo = '/' + '/'.join(request.postpath)
+        else:
+            pathInfo = ''
+
+        parts = request.uri.split('?', 1)
+        if len(parts) == 1:
+            queryString = ''
+        else:
+            queryString = parts[1]
+
+        self.environ = {
+            'REQUEST_METHOD': request.method,
+            'REMOTE_ADDR': request.getClientIP(),
+            'SCRIPT_NAME': scriptName,
+            'PATH_INFO': pathInfo,
+            'QUERY_STRING': queryString,
+            'CONTENT_TYPE': request.getHeader('content-type') or '',
+            'CONTENT_LENGTH': request.getHeader('content-length') or '',
+            'SERVER_NAME': request.getRequestHostname(),
+            'SERVER_PORT': str(request.getHost().port),
+            'SERVER_PROTOCOL': request.clientproto}
+
+
+        # The application object is entirely in control of response headers;
+        # disable the default Content-Type value normally provided by
+        # twisted.web.server.Request.
+        self.request.defaultContentType = None
+
+        for name, values in request.requestHeaders.getAllRawHeaders():
+            name = 'HTTP_' + name.upper().replace('-', '_')
+            # It might be preferable for http.HTTPChannel to clear out
+            # newlines.
+            self.environ[name] = ','.join([
+                    v.replace('\n', ' ') for v in values])
+
+        self.environ.update({
+                'wsgi.version': (1, 0),
+                'wsgi.url_scheme': request.isSecure() and 'https' or 'http',
+                'wsgi.run_once': False,
+                'wsgi.multithread': True,
+                'wsgi.multiprocess': False,
+                'wsgi.errors': _ErrorStream(),
+                # Attend: request.content was owned by the I/O thread up until
+                # this point.  By wrapping it and putting the result into the
+                # environment dictionary, it is effectively being given to
+                # another thread.  This means that whatever it is, it has to be
+                # safe to access it from two different threads.  The access
+                # *should* all be serialized (first the I/O thread writes to
+                # it, then the WSGI thread reads from it, then the I/O thread
+                # closes it).  However, since the request is made available to
+                # arbitrary application code during resource traversal, it's
+                # possible that some other code might decide to use it in the
+                # I/O thread concurrently with its use in the WSGI thread.
+                # More likely than not, this will break.  This seems like an
+                # unlikely possibility to me, but if it is to be allowed,
+                # something here needs to change. -exarkun
+                'wsgi.input': _InputStream(request.content)})
+
+
+    def _finished(self, ignored):
+        """
+        Record the end of the response generation for the request being
+        serviced.
+        """
+        self._requestFinished = True
+
+
+    def startResponse(self, status, headers, excInfo=None):
+        """
+        The WSGI I{start_response} callable.  The given values are saved until
+        they are needed to generate the response.
+
+        This will be called in a non-I/O thread.
+        """
+        if self.started and excInfo is not None:
+            raise excInfo[0], excInfo[1], excInfo[2]
+        self.status = status
+        self.headers = headers
+        return self.write
+
+
+    def write(self, bytes):
+        """
+        The WSGI I{write} callable returned by the I{start_response} callable.
+        The given bytes will be written to the response body, possibly flushing
+        the status and headers first.
+
+        This will be called in a non-I/O thread.
+        """
+        def wsgiWrite(started):
+            if not started:
+                self._sendResponseHeaders()
+            self.request.write(bytes)
+        self.reactor.callFromThread(wsgiWrite, self.started)
+        self.started = True
+
+
+    def _sendResponseHeaders(self):
+        """
+        Set the response code and response headers on the request object, but
+        do not flush them.  The caller is responsible for doing a write in
+        order for anything to actually be written out in response to the
+        request.
+
+        This must be called in the I/O thread.
+        """
+        code, message = self.status.split(None, 1)
+        code = int(code)
+        self.request.setResponseCode(code, message)
+
+        for name, value in self.headers:
+            # Don't allow the application to control these required headers.
+            if name.lower() not in ('server', 'date'):
+                self.request.responseHeaders.addRawHeader(name, value)
+
+
+    def start(self):
+        """
+        Start the WSGI application in the threadpool.
+
+        This must be called in the I/O thread.
+        """
+        self.threadpool.callInThread(self.run)
+
+
+    def run(self):
+        """
+        Call the WSGI application object, iterate it, and handle its output.
+
+        This must be called in a non-I/O thread (ie, a WSGI application
+        thread).
+        """
+        try:
+            appIterator = self.application(self.environ, self.startResponse)
+            for elem in appIterator:
+                if elem:
+                    self.write(elem)
+                if self._requestFinished:
+                    break
+            close = getattr(appIterator, 'close', None)
+            if close is not None:
+                close()
+        except:
+            def wsgiError(started, type, value, traceback):
+                err(Failure(value, type, traceback), "WSGI application error")
+                if started:
+                    self.request.transport.loseConnection()
+                else:
+                    self.request.setResponseCode(INTERNAL_SERVER_ERROR)
+                    self.request.finish()
+            self.reactor.callFromThread(wsgiError, self.started, *exc_info())
+        else:
+            def wsgiFinish(started):
+                if not self._requestFinished:
+                    if not started:
+                        self._sendResponseHeaders()
+                    self.request.finish()
+            self.reactor.callFromThread(wsgiFinish, self.started)
+        self.started = True
+
+
+
+class WSGIResource:
+    """
+    An L{IResource} implementation which delegates responsibility for all
+    resources hierarchically inferior to it to a WSGI application.
+
+    @ivar _reactor: An L{IReactorThreads} provider which will be passed on to
+        L{_WSGIResponse} to schedule calls in the I/O thread.
+
+    @ivar _threadpool: A L{ThreadPool} which will be passed on to
+        L{_WSGIResponse} to run the WSGI application object.
+
+    @ivar _application: The WSGI application object.
+    """
+    implements(IResource)
+
+    # Further resource segments are left up to the WSGI application object to
+    # handle.
+    isLeaf = True
+
+    def __init__(self, reactor, threadpool, application):
+        self._reactor = reactor
+        self._threadpool = threadpool
+        self._application = application
+
+
+    def render(self, request):
+        """
+        Turn the request into the appropriate C{environ} C{dict} suitable to be
+        passed to the WSGI application object and then pass it on.
+
+        The WSGI application object is given almost complete control of the
+        rendering process.  C{NOT_DONE_YET} will always be returned in order
+        and response completion will be dictated by the application object, as
+        will the status, headers, and the response body.
+        """
+        response = _WSGIResponse(
+            self._reactor, self._threadpool, self._application, request)
+        response.start()
+        return NOT_DONE_YET
+
+
+    def getChildWithDefault(self, name, request):
+        """
+        Reject attempts to retrieve a child resource.  All path segments beyond
+        the one which refers to this resource are handled by the WSGI
+        application object.
+        """
+        raise RuntimeError("Cannot get IResource children from WSGIResource")
+
+
+    def putChild(self, path, child):
+        """
+        Reject attempts to add a child resource to this resource.  The WSGI
+        application object handles all path segments beneath this resource, so
+        L{IResource} children can never be found.
+        """
+        raise RuntimeError("Cannot put IResource children under WSGIResource")
+
+
+__all__ = ['WSGIResource']
diff --git a/ThirdParty/Twisted/twisted/web/xmlrpc.py b/ThirdParty/Twisted/twisted/web/xmlrpc.py
new file mode 100644
index 0000000..cbcb7b0
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/web/xmlrpc.py
@@ -0,0 +1,590 @@
+# -*- test-case-name: twisted.web.test.test_xmlrpc -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+A generic resource for publishing objects via XML-RPC.
+
+Maintainer: Itamar Shtull-Trauring
+"""
+
+# System Imports
+import sys, xmlrpclib, urlparse
+
+
+# Sibling Imports
+from twisted.web import resource, server, http
+from twisted.internet import defer, protocol, reactor
+from twisted.python import log, reflect, failure
+
+# These are deprecated, use the class level definitions
+NOT_FOUND = 8001
+FAILURE = 8002
+
+
+# Useful so people don't need to import xmlrpclib directly
+Fault = xmlrpclib.Fault
+Binary = xmlrpclib.Binary
+Boolean = xmlrpclib.Boolean
+DateTime = xmlrpclib.DateTime
+
+# On Python 2.4 and earlier, DateTime.decode returns unicode.
+if sys.version_info[:2] < (2, 5):
+    _decode = DateTime.decode
+    DateTime.decode = lambda self, value: _decode(self, value.encode('ascii'))
+
+
+def withRequest(f):
+    """
+    Decorator to cause the request to be passed as the first argument
+    to the method.
+
+    If an I{xmlrpc_} method is wrapped with C{withRequest}, the
+    request object is passed as the first argument to that method.
+    For example::
+
+        @withRequest
+        def xmlrpc_echo(self, request, s):
+            return s
+
+    @since: 10.2
+    """
+    f.withRequest = True
+    return f
+
+
+
+class NoSuchFunction(Fault):
+    """
+    There is no function by the given name.
+    """
+
+
+class Handler:
+    """
+    Handle a XML-RPC request and store the state for a request in progress.
+
+    Override the run() method and return result using self.result,
+    a Deferred.
+
+    We require this class since we're not using threads, so we can't
+    encapsulate state in a running function if we're going  to have
+    to wait for results.
+
+    For example, lets say we want to authenticate against twisted.cred,
+    run a LDAP query and then pass its result to a database query, all
+    as a result of a single XML-RPC command. We'd use a Handler instance
+    to store the state of the running command.
+    """
+
+    def __init__(self, resource, *args):
+        self.resource = resource # the XML-RPC resource we are connected to
+        self.result = defer.Deferred()
+        self.run(*args)
+
+    def run(self, *args):
+        # event driven equivalent of 'raise UnimplementedError'
+        self.result.errback(
+            NotImplementedError("Implement run() in subclasses"))
+
+
+class XMLRPC(resource.Resource):
+    """
+    A resource that implements XML-RPC.
+
+    You probably want to connect this to '/RPC2'.
+
+    Methods published can return XML-RPC serializable results, Faults,
+    Binary, Boolean, DateTime, Deferreds, or Handler instances.
+
+    By default methods beginning with 'xmlrpc_' are published.
+
+    Sub-handlers for prefixed methods (e.g., system.listMethods)
+    can be added with putSubHandler. By default, prefixes are
+    separated with a '.'. Override self.separator to change this.
+
+    @ivar allowNone: Permit XML translating of Python constant None.
+    @type allowNone: C{bool}
+
+    @ivar useDateTime: Present datetime values as datetime.datetime objects?
+        Requires Python >= 2.5.
+    @type useDateTime: C{bool}
+    """
+
+    # Error codes for Twisted, if they conflict with yours then
+    # modify them at runtime.
+    NOT_FOUND = 8001
+    FAILURE = 8002
+
+    isLeaf = 1
+    separator = '.'
+    allowedMethods = ('POST',)
+
+    def __init__(self, allowNone=False, useDateTime=False):
+        resource.Resource.__init__(self)
+        self.subHandlers = {}
+        self.allowNone = allowNone
+        self.useDateTime = useDateTime
+
+
+    def __setattr__(self, name, value):
+        if name == "useDateTime" and value and sys.version_info[:2] < (2, 5):
+            raise RuntimeError("useDateTime requires Python 2.5 or later.")
+        self.__dict__[name] = value
+
+
+    def putSubHandler(self, prefix, handler):
+        self.subHandlers[prefix] = handler
+
+    def getSubHandler(self, prefix):
+        return self.subHandlers.get(prefix, None)
+
+    def getSubHandlerPrefixes(self):
+        return self.subHandlers.keys()
+
+    def render_POST(self, request):
+        request.content.seek(0, 0)
+        request.setHeader("content-type", "text/xml")
+        try:
+            if self.useDateTime:
+                args, functionPath = xmlrpclib.loads(request.content.read(),
+                    use_datetime=True)
+            else:
+                # Maintain backwards compatibility with Python < 2.5
+                args, functionPath = xmlrpclib.loads(request.content.read())
+        except Exception, e:
+            f = Fault(self.FAILURE, "Can't deserialize input: %s" % (e,))
+            self._cbRender(f, request)
+        else:
+            try:
+                function = self.lookupProcedure(functionPath)
+            except Fault, f:
+                self._cbRender(f, request)
+            else:
+                # Use this list to track whether the response has failed or not.
+                # This will be used later on to decide if the result of the
+                # Deferred should be written out and Request.finish called.
+                responseFailed = []
+                request.notifyFinish().addErrback(responseFailed.append)
+                if getattr(function, 'withRequest', False):
+                    d = defer.maybeDeferred(function, request, *args)
+                else:
+                    d = defer.maybeDeferred(function, *args)
+                d.addErrback(self._ebRender)
+                d.addCallback(self._cbRender, request, responseFailed)
+        return server.NOT_DONE_YET
+
+
+    def _cbRender(self, result, request, responseFailed=None):
+        if responseFailed:
+            return
+
+        if isinstance(result, Handler):
+            result = result.result
+        if not isinstance(result, Fault):
+            result = (result,)
+        try:
+            try:
+                content = xmlrpclib.dumps(
+                    result, methodresponse=True,
+                    allow_none=self.allowNone)
+            except Exception, e:
+                f = Fault(self.FAILURE, "Can't serialize output: %s" % (e,))
+                content = xmlrpclib.dumps(f, methodresponse=True,
+                                          allow_none=self.allowNone)
+
+            request.setHeader("content-length", str(len(content)))
+            request.write(content)
+        except:
+            log.err()
+        request.finish()
+
+
+    def _ebRender(self, failure):
+        if isinstance(failure.value, Fault):
+            return failure.value
+        log.err(failure)
+        return Fault(self.FAILURE, "error")
+
+
+    def lookupProcedure(self, procedurePath):
+        """
+        Given a string naming a procedure, return a callable object for that
+        procedure or raise NoSuchFunction.
+
+        The returned object will be called, and should return the result of the
+        procedure, a Deferred, or a Fault instance.
+
+        Override in subclasses if you want your own policy.  The base
+        implementation that given C{'foo'}, C{self.xmlrpc_foo} will be returned.
+        If C{procedurePath} contains C{self.separator}, the sub-handler for the
+        initial prefix is used to search for the remaining path.
+
+        If you override C{lookupProcedure}, you may also want to override
+        C{listProcedures} to accurately report the procedures supported by your
+        resource, so that clients using the I{system.listMethods} procedure
+        receive accurate results.
+
+        @since: 11.1
+        """
+        if procedurePath.find(self.separator) != -1:
+            prefix, procedurePath = procedurePath.split(self.separator, 1)
+            handler = self.getSubHandler(prefix)
+            if handler is None:
+                raise NoSuchFunction(self.NOT_FOUND,
+                    "no such subHandler %s" % prefix)
+            return handler.lookupProcedure(procedurePath)
+
+        f = getattr(self, "xmlrpc_%s" % procedurePath, None)
+        if not f:
+            raise NoSuchFunction(self.NOT_FOUND,
+                "procedure %s not found" % procedurePath)
+        elif not callable(f):
+            raise NoSuchFunction(self.NOT_FOUND,
+                "procedure %s not callable" % procedurePath)
+        else:
+            return f
+
+    def listProcedures(self):
+        """
+        Return a list of the names of all xmlrpc procedures.
+
+        @since: 11.1
+        """
+        return reflect.prefixedMethodNames(self.__class__, 'xmlrpc_')
+
+
+class XMLRPCIntrospection(XMLRPC):
+    """
+    Implement the XML-RPC Introspection API.
+
+    By default, the methodHelp method returns the 'help' method attribute,
+    if it exists, otherwise the __doc__ method attribute, if it exists,
+    otherwise the empty string.
+
+    To enable the methodSignature method, add a 'signature' method attribute
+    containing a list of lists. See methodSignature's documentation for the
+    format. Note the type strings should be XML-RPC types, not Python types.
+    """
+
+    def __init__(self, parent):
+        """
+        Implement Introspection support for an XMLRPC server.
+
+        @param parent: the XMLRPC server to add Introspection support to.
+        @type parent: L{XMLRPC}
+        """
+        XMLRPC.__init__(self)
+        self._xmlrpc_parent = parent
+
+    def xmlrpc_listMethods(self):
+        """
+        Return a list of the method names implemented by this server.
+        """
+        functions = []
+        todo = [(self._xmlrpc_parent, '')]
+        while todo:
+            obj, prefix = todo.pop(0)
+            functions.extend([prefix + name for name in obj.listProcedures()])
+            todo.extend([ (obj.getSubHandler(name),
+                           prefix + name + obj.separator)
+                          for name in obj.getSubHandlerPrefixes() ])
+        return functions
+
+    xmlrpc_listMethods.signature = [['array']]
+
+    def xmlrpc_methodHelp(self, method):
+        """
+        Return a documentation string describing the use of the given method.
+        """
+        method = self._xmlrpc_parent.lookupProcedure(method)
+        return (getattr(method, 'help', None)
+                or getattr(method, '__doc__', None) or '')
+
+    xmlrpc_methodHelp.signature = [['string', 'string']]
+
+    def xmlrpc_methodSignature(self, method):
+        """
+        Return a list of type signatures.
+
+        Each type signature is a list of the form [rtype, type1, type2, ...]
+        where rtype is the return type and typeN is the type of the Nth
+        argument. If no signature information is available, the empty
+        string is returned.
+        """
+        method = self._xmlrpc_parent.lookupProcedure(method)
+        return getattr(method, 'signature', None) or ''
+
+    xmlrpc_methodSignature.signature = [['array', 'string'],
+                                        ['string', 'string']]
+
+
+def addIntrospection(xmlrpc):
+    """
+    Add Introspection support to an XMLRPC server.
+
+    @param parent: the XMLRPC server to add Introspection support to.
+    @type parent: L{XMLRPC}
+    """
+    xmlrpc.putSubHandler('system', XMLRPCIntrospection(xmlrpc))
+
+
+class QueryProtocol(http.HTTPClient):
+
+    def connectionMade(self):
+        self._response = None
+        self.sendCommand('POST', self.factory.path)
+        self.sendHeader('User-Agent', 'Twisted/XMLRPClib')
+        self.sendHeader('Host', self.factory.host)
+        self.sendHeader('Content-type', 'text/xml')
+        self.sendHeader('Content-length', str(len(self.factory.payload)))
+        if self.factory.user:
+            auth = '%s:%s' % (self.factory.user, self.factory.password)
+            auth = auth.encode('base64').strip()
+            self.sendHeader('Authorization', 'Basic %s' % (auth,))
+        self.endHeaders()
+        self.transport.write(self.factory.payload)
+
+    def handleStatus(self, version, status, message):
+        if status != '200':
+            self.factory.badStatus(status, message)
+
+    def handleResponse(self, contents):
+        """
+        Handle the XML-RPC response received from the server.
+
+        Specifically, disconnect from the server and store the XML-RPC
+        response so that it can be properly handled when the disconnect is
+        finished.
+        """
+        self.transport.loseConnection()
+        self._response = contents
+
+    def connectionLost(self, reason):
+        """
+        The connection to the server has been lost.
+
+        If we have a full response from the server, then parse it and fired a
+        Deferred with the return value or C{Fault} that the server gave us.
+        """
+        http.HTTPClient.connectionLost(self, reason)
+        if self._response is not None:
+            response, self._response = self._response, None
+            self.factory.parseResponse(response)
+
+
+payloadTemplate = """<?xml version="1.0"?>
+<methodCall>
+<methodName>%s</methodName>
+%s
+</methodCall>
+"""
+
+
+class _QueryFactory(protocol.ClientFactory):
+    """
+    XML-RPC Client Factory
+
+    @ivar path: The path portion of the URL to which to post method calls.
+    @type path: C{str}
+
+    @ivar host: The value to use for the Host HTTP header.
+    @type host: C{str}
+
+    @ivar user: The username with which to authenticate with the server
+        when making calls.
+    @type user: C{str} or C{NoneType}
+
+    @ivar password: The password with which to authenticate with the server
+        when making calls.
+    @type password: C{str} or C{NoneType}
+
+    @ivar useDateTime: Accept datetime values as datetime.datetime objects.
+        also passed to the underlying xmlrpclib implementation.  Default to
+        False.  Requires Python >= 2.5.
+    @type useDateTime: C{bool}
+    """
+
+    deferred = None
+    protocol = QueryProtocol
+
+    def __init__(self, path, host, method, user=None, password=None,
+                 allowNone=False, args=(), canceller=None, useDateTime=False):
+        """
+        @param method: The name of the method to call.
+        @type method: C{str}
+
+        @param allowNone: allow the use of None values in parameters. It's
+            passed to the underlying xmlrpclib implementation. Default to False.
+        @type allowNone: C{bool} or C{NoneType}
+
+        @param args: the arguments to pass to the method.
+        @type args: C{tuple}
+
+        @param canceller: A 1-argument callable passed to the deferred as the
+            canceller callback.
+        @type canceller: callable or C{NoneType}
+        """
+        self.path, self.host = path, host
+        self.user, self.password = user, password
+        self.payload = payloadTemplate % (method,
+            xmlrpclib.dumps(args, allow_none=allowNone))
+        self.deferred = defer.Deferred(canceller)
+        self.useDateTime = useDateTime
+
+    def parseResponse(self, contents):
+        if not self.deferred:
+            return
+        try:
+            if self.useDateTime:
+                response = xmlrpclib.loads(contents,
+                    use_datetime=True)[0][0]
+            else:
+                # Maintain backwards compatibility with Python < 2.5
+                response = xmlrpclib.loads(contents)[0][0]
+        except:
+            deferred, self.deferred = self.deferred, None
+            deferred.errback(failure.Failure())
+        else:
+            deferred, self.deferred = self.deferred, None
+            deferred.callback(response)
+
+    def clientConnectionLost(self, _, reason):
+        if self.deferred is not None:
+            deferred, self.deferred = self.deferred, None
+            deferred.errback(reason)
+
+    clientConnectionFailed = clientConnectionLost
+
+    def badStatus(self, status, message):
+        deferred, self.deferred = self.deferred, None
+        deferred.errback(ValueError(status, message))
+
+
+
+class Proxy:
+    """
+    A Proxy for making remote XML-RPC calls.
+
+    Pass the URL of the remote XML-RPC server to the constructor.
+
+    Use proxy.callRemote('foobar', *args) to call remote method
+    'foobar' with *args.
+
+    @ivar user: The username with which to authenticate with the server
+        when making calls.  If specified, overrides any username information
+        embedded in C{url}.  If not specified, a value may be taken from
+        C{url} if present.
+    @type user: C{str} or C{NoneType}
+
+    @ivar password: The password with which to authenticate with the server
+        when making calls.  If specified, overrides any password information
+        embedded in C{url}.  If not specified, a value may be taken from
+        C{url} if present.
+    @type password: C{str} or C{NoneType}
+
+    @ivar allowNone: allow the use of None values in parameters. It's
+        passed to the underlying xmlrpclib implementation. Default to False.
+    @type allowNone: C{bool} or C{NoneType}
+
+    @ivar useDateTime: Accept datetime values as datetime.datetime objects.
+        also passed to the underlying xmlrpclib implementation.  Default to
+        False.  Requires Python >= 2.5.
+    @type useDateTime: C{bool}
+
+    @ivar connectTimeout: Number of seconds to wait before assuming the
+        connection has failed.
+    @type connectTimeout: C{float}
+
+    @ivar _reactor: the reactor used to create connections.
+    @type _reactor: object providing L{twisted.internet.interfaces.IReactorTCP}
+
+    @ivar queryFactory: object returning a factory for XML-RPC protocol. Mainly
+        useful for tests.
+    """
+    queryFactory = _QueryFactory
+
+    def __init__(self, url, user=None, password=None, allowNone=False,
+                 useDateTime=False, connectTimeout=30.0, reactor=reactor):
+        """
+        @param url: The URL to which to post method calls.  Calls will be made
+            over SSL if the scheme is HTTPS.  If netloc contains username or
+            password information, these will be used to authenticate, as long as
+            the C{user} and C{password} arguments are not specified.
+        @type url: C{str}
+
+        """
+        scheme, netloc, path, params, query, fragment = urlparse.urlparse(url)
+        netlocParts = netloc.split('@')
+        if len(netlocParts) == 2:
+            userpass = netlocParts.pop(0).split(':')
+            self.user = userpass.pop(0)
+            try:
+                self.password = userpass.pop(0)
+            except:
+                self.password = None
+        else:
+            self.user = self.password = None
+        hostport = netlocParts[0].split(':')
+        self.host = hostport.pop(0)
+        try:
+            self.port = int(hostport.pop(0))
+        except:
+            self.port = None
+        self.path = path
+        if self.path in ['', None]:
+            self.path = '/'
+        self.secure = (scheme == 'https')
+        if user is not None:
+            self.user = user
+        if password is not None:
+            self.password = password
+        self.allowNone = allowNone
+        self.useDateTime = useDateTime
+        self.connectTimeout = connectTimeout
+        self._reactor = reactor
+
+
+    def __setattr__(self, name, value):
+        if name == "useDateTime" and value and sys.version_info[:2] < (2, 5):
+            raise RuntimeError("useDateTime requires Python 2.5 or later.")
+        self.__dict__[name] = value
+
+
+    def callRemote(self, method, *args):
+        """
+        Call remote XML-RPC C{method} with given arguments.
+
+        @return: a L{defer.Deferred} that will fire with the method response,
+            or a failure if the method failed. Generally, the failure type will
+            be L{Fault}, but you can also have an C{IndexError} on some buggy
+            servers giving empty responses.
+
+            If the deferred is cancelled before the request completes, the
+            connection is closed and the deferred will fire with a
+            L{defer.CancelledError}.
+        """
+        def cancel(d):
+            factory.deferred = None
+            connector.disconnect()
+        factory = self.queryFactory(
+            self.path, self.host, method, self.user,
+            self.password, self.allowNone, args, cancel, self.useDateTime)
+
+        if self.secure:
+            from twisted.internet import ssl
+            connector = self._reactor.connectSSL(
+                self.host, self.port or 443,
+                factory, ssl.ClientContextFactory(),
+                timeout=self.connectTimeout)
+        else:
+            connector = self._reactor.connectTCP(
+                self.host, self.port or 80, factory,
+                timeout=self.connectTimeout)
+        return factory.deferred
+
+
+__all__ = [
+    "XMLRPC", "Handler", "NoSuchFunction", "Proxy",
+
+    "Fault", "Binary", "Boolean", "DateTime"]
diff --git a/ThirdParty/Twisted/twisted/words/__init__.py b/ThirdParty/Twisted/twisted/words/__init__.py
new file mode 100644
index 0000000..3e38d15
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/__init__.py
@@ -0,0 +1,10 @@
+# -*- test-case-name: twisted.words.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""Twisted Words: a Twisted Chat service.
+"""
+
+from twisted.words._version import version
+__version__ = version.short()
diff --git a/ThirdParty/Twisted/twisted/words/_version.py b/ThirdParty/Twisted/twisted/words/_version.py
new file mode 100644
index 0000000..9177166
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/_version.py
@@ -0,0 +1,3 @@
+# This is an auto-generated file. Do not edit it.
+from twisted.python import versions
+version = versions.Version('twisted.words', 12, 3, 0)
diff --git a/ThirdParty/Twisted/twisted/words/ewords.py b/ThirdParty/Twisted/twisted/words/ewords.py
new file mode 100644
index 0000000..7621a71
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/ewords.py
@@ -0,0 +1,34 @@
+# -*- test-case-name: twisted.words.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""Exception definitions for Words
+"""
+
+class WordsError(Exception):
+    def __str__(self):
+        return self.__class__.__name__ + ': ' + Exception.__str__(self)
+
+class NoSuchUser(WordsError):
+    pass
+
+
+class DuplicateUser(WordsError):
+    pass
+
+
+class NoSuchGroup(WordsError):
+    pass
+
+
+class DuplicateGroup(WordsError):
+    pass
+
+
+class AlreadyLoggedIn(WordsError):
+    pass
+
+__all__ = [
+    'WordsError', 'NoSuchUser', 'DuplicateUser',
+    'NoSuchGroup', 'DuplicateGroup', 'AlreadyLoggedIn',
+    ]
diff --git a/ThirdParty/Twisted/twisted/words/im/__init__.py b/ThirdParty/Twisted/twisted/words/im/__init__.py
new file mode 100644
index 0000000..cf3492b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/im/__init__.py
@@ -0,0 +1,6 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""Instance Messenger, Pan-protocol chat client."""
+
diff --git a/ThirdParty/Twisted/twisted/words/im/baseaccount.py b/ThirdParty/Twisted/twisted/words/im/baseaccount.py
new file mode 100644
index 0000000..0261dbf
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/im/baseaccount.py
@@ -0,0 +1,62 @@
+# -*- Python -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+
+
+class AccountManager:
+    """I am responsible for managing a user's accounts.
+
+    That is, remembering what accounts are available, their settings,
+    adding and removal of accounts, etc.
+
+    @ivar accounts: A collection of available accounts.
+    @type accounts: mapping of strings to L{Account<interfaces.IAccount>}s.
+    """
+    def __init__(self):
+        self.accounts = {}
+
+    def getSnapShot(self):
+        """A snapshot of all the accounts and their status.
+
+        @returns: A list of tuples, each of the form
+            (string:accountName, boolean:isOnline,
+            boolean:autoLogin, string:gatewayType)
+        """
+        data = []
+        for account in self.accounts.values():
+            data.append((account.accountName, account.isOnline(),
+                         account.autoLogin, account.gatewayType))
+        return data
+
+    def isEmpty(self):
+        return len(self.accounts) == 0
+
+    def getConnectionInfo(self):
+        connectioninfo = []
+        for account in self.accounts.values():
+            connectioninfo.append(account.isOnline())
+        return connectioninfo
+
+    def addAccount(self, account):
+        self.accounts[account.accountName] = account
+
+    def delAccount(self, accountName):
+        del self.accounts[accountName]
+
+    def connect(self, accountName, chatui):
+        """
+        @returntype: Deferred L{interfaces.IClient}
+        """
+        return self.accounts[accountName].logOn(chatui)
+
+    def disconnect(self, accountName):
+        pass
+        #self.accounts[accountName].logOff()  - not yet implemented
+
+    def quit(self):
+        pass
+        #for account in self.accounts.values():
+        #    account.logOff()  - not yet implemented
diff --git a/ThirdParty/Twisted/twisted/words/im/basechat.py b/ThirdParty/Twisted/twisted/words/im/basechat.py
new file mode 100644
index 0000000..076275f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/im/basechat.py
@@ -0,0 +1,512 @@
+# -*- test-case-name: twisted.words.test.test_basechat -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Base classes for Instance Messenger clients.
+"""
+
+from twisted.words.im.locals import OFFLINE, ONLINE, AWAY
+
+
+class ContactsList:
+    """
+    A GUI object that displays a contacts list.
+
+    @ivar chatui: The GUI chat client associated with this contacts list.
+    @type chatui: L{ChatUI}
+
+    @ivar contacts: The contacts.
+    @type contacts: C{dict} mapping C{str} to a L{IPerson<interfaces.IPerson>}
+        provider
+
+    @ivar onlineContacts: The contacts who are currently online (have a status
+        that is not C{OFFLINE}).
+    @type onlineContacts: C{dict} mapping C{str} to a
+        L{IPerson<interfaces.IPerson>} provider
+
+    @ivar clients: The signed-on clients.
+    @type clients: C{list} of L{IClient<interfaces.IClient>} providers
+    """
+    def __init__(self, chatui):
+        """
+        @param chatui: The GUI chat client associated with this contacts list.
+        @type chatui: L{ChatUI}
+        """
+        self.chatui = chatui
+        self.contacts = {}
+        self.onlineContacts = {}
+        self.clients = []
+
+
+    def setContactStatus(self, person):
+        """
+        Inform the user that a person's status has changed.
+
+        @param person: The person whose status has changed.
+        @type person: L{IPerson<interfaces.IPerson>} provider
+        """
+        if not self.contacts.has_key(person.name):
+            self.contacts[person.name] = person
+        if not self.onlineContacts.has_key(person.name) and \
+                (person.status == ONLINE or person.status == AWAY):
+            self.onlineContacts[person.name] = person
+        if self.onlineContacts.has_key(person.name) and \
+                person.status == OFFLINE:
+            del self.onlineContacts[person.name]
+
+
+    def registerAccountClient(self, client):
+        """
+        Notify the user that an account client has been signed on to.
+
+        @param client: The client being added to your list of account clients.
+        @type client: L{IClient<interfaces.IClient>} provider
+        """
+        if not client in self.clients:
+            self.clients.append(client)
+
+
+    def unregisterAccountClient(self, client):
+        """
+        Notify the user that an account client has been signed off or
+        disconnected from.
+
+        @param client: The client being removed from the list of account
+            clients.
+        @type client: L{IClient<interfaces.IClient>} provider
+        """
+        if client in self.clients:
+            self.clients.remove(client)
+
+
+    def contactChangedNick(self, person, newnick):
+        """
+        Update your contact information to reflect a change to a contact's
+        nickname.
+
+        @param person: The person in your contacts list whose nickname is
+            changing.
+        @type person: L{IPerson<interfaces.IPerson>} provider
+
+        @param newnick: The new nickname for this person.
+        @type newnick: C{str}
+        """
+        oldname = person.name
+        if oldname in self.contacts:
+            del self.contacts[oldname]
+            person.name = newnick
+            self.contacts[newnick] = person
+            if self.onlineContacts.has_key(oldname):
+                del self.onlineContacts[oldname]
+                self.onlineContacts[newnick] = person
+
+
+
+class Conversation:
+    """
+    A GUI window of a conversation with a specific person.
+
+    @ivar person: The person who you're having this conversation with.
+    @type person: L{IPerson<interfaces.IPerson>} provider
+
+    @ivar chatui: The GUI chat client associated with this conversation.
+    @type chatui: L{ChatUI}
+    """
+    def __init__(self, person, chatui):
+        """
+        @param person: The person who you're having this conversation with.
+        @type person: L{IPerson<interfaces.IPerson>} provider
+
+        @param chatui: The GUI chat client associated with this conversation.
+        @type chatui: L{ChatUI}
+        """
+        self.chatui = chatui
+        self.person = person
+
+
+    def show(self):
+        """
+        Display the ConversationWindow.
+        """
+        raise NotImplementedError("Subclasses must implement this method")
+
+
+    def hide(self):
+        """
+        Hide the ConversationWindow.
+        """
+        raise NotImplementedError("Subclasses must implement this method")
+
+
+    def sendText(self, text):
+        """
+        Send text to the person with whom the user is conversing.
+
+        @param text: The text to be sent.
+        @type text: C{str}
+        """
+        self.person.sendMessage(text, None)
+
+
+    def showMessage(self, text, metadata=None):
+        """
+        Display a message sent from the person with whom the user is conversing.
+
+        @param text: The sent message.
+        @type text: C{str}
+
+        @param metadata: Metadata associated with this message.
+        @type metadata: C{dict}
+        """
+        raise NotImplementedError("Subclasses must implement this method")
+
+
+    def contactChangedNick(self, person, newnick):
+        """
+        Change a person's name.
+
+        @param person: The person whose nickname is changing.
+        @type person: L{IPerson<interfaces.IPerson>} provider
+
+        @param newnick: The new nickname for this person.
+        @type newnick: C{str}
+        """
+        self.person.name = newnick
+
+
+
+class GroupConversation:
+    """
+    A GUI window of a conversation with a group of people.
+
+    @ivar chatui: The GUI chat client associated with this conversation.
+    @type chatui: L{ChatUI}
+
+    @ivar group: The group of people that are having this conversation.
+    @type group: L{IGroup<interfaces.IGroup>} provider
+
+    @ivar members: The names of the people in this conversation.
+    @type members: C{list} of C{str}
+    """
+    def __init__(self, group, chatui):
+        """
+        @param chatui: The GUI chat client associated with this conversation.
+        @type chatui: L{ChatUI}
+
+        @param group: The group of people that are having this conversation.
+        @type group: L{IGroup<interfaces.IGroup>} provider
+        """
+        self.chatui = chatui
+        self.group = group
+        self.members = []
+
+
+    def show(self):
+        """
+        Display the GroupConversationWindow.
+        """
+        raise NotImplementedError("Subclasses must implement this method")
+
+
+    def hide(self):
+        """
+        Hide the GroupConversationWindow.
+        """
+        raise NotImplementedError("Subclasses must implement this method")
+
+
+    def sendText(self, text):
+        """
+        Send text to the group.
+
+        @param: The text to be sent.
+        @type text: C{str}
+        """
+        self.group.sendGroupMessage(text, None)
+
+
+    def showGroupMessage(self, sender, text, metadata=None):
+        """
+        Display to the user a message sent to this group from the given sender.
+
+        @param sender: The person sending the message.
+        @type sender: C{str}
+
+        @param text: The sent message.
+        @type text: C{str}
+
+        @param metadata: Metadata associated with this message.
+        @type metadata: C{dict}
+        """
+        raise NotImplementedError("Subclasses must implement this method")
+
+
+    def setGroupMembers(self, members):
+        """
+        Set the list of members in the group.
+
+        @param members: The names of the people that will be in this group.
+        @type members: C{list} of C{str}
+        """
+        self.members = members
+
+
+    def setTopic(self, topic, author):
+        """
+        Change the topic for the group conversation window and display this
+        change to the user.
+
+        @param topic: This group's topic.
+        @type topic: C{str}
+
+        @param author: The person changing the topic.
+        @type author: C{str}
+        """
+        raise NotImplementedError("Subclasses must implement this method")
+
+
+    def memberJoined(self, member):
+        """
+        Add the given member to the list of members in the group conversation
+        and displays this to the user.
+
+        @param member: The person joining the group conversation.
+        @type member: C{str}
+        """
+        if not member in self.members:
+            self.members.append(member)
+
+
+    def memberChangedNick(self, oldnick, newnick):
+        """
+        Change the nickname for a member of the group conversation and displays
+        this change to the user.
+
+        @param oldnick: The old nickname.
+        @type oldnick: C{str}
+
+        @param newnick: The new nickname.
+        @type newnick: C{str}
+        """
+        if oldnick in self.members:
+            self.members.remove(oldnick)
+            self.members.append(newnick)
+
+
+    def memberLeft(self, member):
+        """
+        Delete the given member from the list of members in the group
+        conversation and displays the change to the user.
+
+        @param member: The person leaving the group conversation.
+        @type member: C{str}
+        """
+        if member in self.members:
+            self.members.remove(member)
+
+
+
+class ChatUI:
+    """
+    A GUI chat client.
+
+    @type conversations: C{dict} of L{Conversation}
+    @ivar conversations: A cache of all the direct windows.
+
+    @type groupConversations: C{dict} of L{GroupConversation}
+    @ivar groupConversations: A cache of all the group windows.
+
+    @type persons: C{dict} with keys that are a C{tuple} of (C{str},
+       L{IAccount<interfaces.IAccount>} provider) and values that are
+       L{IPerson<interfaces.IPerson>} provider
+    @ivar persons: A cache of all the users associated with this client.
+
+    @type groups: C{dict} with keys that are a C{tuple} of (C{str},
+        L{IAccount<interfaces.IAccount>} provider) and values that are
+        L{IGroup<interfaces.IGroup>} provider
+    @ivar groups: A cache of all the groups associated with this client.
+
+    @type onlineClients: C{list} of L{IClient<interfaces.IClient>} providers
+    @ivar onlineClients: A list of message sources currently online.
+
+    @type contactsList: L{ContactsList}
+    @ivar contactsList: A contacts list.
+    """
+    def __init__(self):
+        self.conversations = {}
+        self.groupConversations = {}
+        self.persons = {}
+        self.groups = {}
+        self.onlineClients = []
+        self.contactsList = ContactsList(self)
+
+
+    def registerAccountClient(self, client):
+        """
+        Notify the user that an account has been signed on to.
+
+        @type client: L{IClient<interfaces.IClient>} provider
+        @param client: The client account for the person who has just signed on.
+
+        @rtype client: L{IClient<interfaces.IClient>} provider
+        @return: The client, so that it may be used in a callback chain.
+        """
+        self.onlineClients.append(client)
+        self.contactsList.registerAccountClient(client)
+        return client
+
+
+    def unregisterAccountClient(self, client):
+        """
+        Notify the user that an account has been signed off or disconnected.
+
+        @type client: L{IClient<interfaces.IClient>} provider
+        @param client: The client account for the person who has just signed
+            off.
+        """
+        self.onlineClients.remove(client)
+        self.contactsList.unregisterAccountClient(client)
+
+
+    def getContactsList(self):
+        """
+        Get the contacts list associated with this chat window.
+
+        @rtype: L{ContactsList}
+        @return: The contacts list associated with this chat window.
+        """
+        return self.contactsList
+
+
+    def getConversation(self, person, Class=Conversation, stayHidden=False):
+        """
+        For the given person object, return the conversation window or create
+        and return a new conversation window if one does not exist.
+
+        @type person: L{IPerson<interfaces.IPerson>} provider
+        @param person: The person whose conversation window we want to get.
+
+        @type Class: L{IConversation<interfaces.IConversation>} implementor
+        @param: The kind of conversation window we want. If the conversation
+            window for this person didn't already exist, create one of this type.
+
+        @type stayHidden: C{bool}
+        @param stayHidden: Whether or not the conversation window should stay
+            hidden.
+
+        @rtype: L{IConversation<interfaces.IConversation>} provider
+        @return: The conversation window.
+        """
+        conv = self.conversations.get(person)
+        if not conv:
+            conv = Class(person, self)
+            self.conversations[person] = conv
+        if stayHidden:
+            conv.hide()
+        else:
+            conv.show()
+        return conv
+
+
+    def getGroupConversation(self, group, Class=GroupConversation,
+                             stayHidden=False):
+        """
+        For the given group object, return the group conversation window or
+        create and return a new group conversation window if it doesn't exist.
+
+        @type group: L{IGroup<interfaces.IGroup>} provider
+        @param group: The group whose conversation window we want to get.
+
+        @type Class: L{IConversation<interfaces.IConversation>} implementor
+        @param: The kind of conversation window we want. If the conversation
+            window for this person didn't already exist, create one of this type.
+
+        @type stayHidden: C{bool}
+        @param stayHidden: Whether or not the conversation window should stay
+            hidden.
+
+        @rtype: L{IGroupConversation<interfaces.IGroupConversation>} provider
+        @return: The group conversation window.
+        """
+        conv = self.groupConversations.get(group)
+        if not conv:
+            conv = Class(group, self)
+            self.groupConversations[group] = conv
+        if stayHidden:
+            conv.hide()
+        else:
+            conv.show()
+        return conv
+
+
+    def getPerson(self, name, client):
+        """
+        For the given name and account client, return an instance of a
+        L{IGroup<interfaces.IPerson>} provider or create and return a new
+        instance of a L{IGroup<interfaces.IPerson>} provider.
+
+        @type name: C{str}
+        @param name: The name of the person of interest.
+
+        @type client: L{IClient<interfaces.IClient>} provider
+        @param client: The client account of interest.
+
+        @rtype: L{IPerson<interfaces.IPerson>} provider
+        @return: The person with that C{name}.
+        """
+        account = client.account
+        p = self.persons.get((name, account))
+        if not p:
+            p = account.getPerson(name)
+            self.persons[name, account] = p
+        return p
+
+
+    def getGroup(self, name, client):
+        """
+        For the given name and account client, return an instance of a
+        L{IGroup<interfaces.IGroup>} provider or create and return a new instance
+        of a L{IGroup<interfaces.IGroup>} provider.
+
+        @type name: C{str}
+        @param name: The name of the group of interest.
+
+        @type client: L{IClient<interfaces.IClient>} provider
+        @param client: The client account of interest.
+
+        @rtype: L{IGroup<interfaces.IGroup>} provider
+        @return: The group with that C{name}.
+        """
+        # I accept 'client' instead of 'account' in my signature for
+        # backwards compatibility.  (Groups changed to be Account-oriented
+        # in CVS revision 1.8.)
+        account = client.account
+        g = self.groups.get((name, account))
+        if not g:
+            g = account.getGroup(name)
+            self.groups[name, account] = g
+        return g
+
+
+    def contactChangedNick(self, person, newnick):
+        """
+        For the given C{person}, change the C{person}'s C{name} to C{newnick}
+        and tell the contact list and any conversation windows with that
+        C{person} to change as well.
+
+        @type person: L{IPerson<interfaces.IPerson>} provider
+        @param person: The person whose nickname will get changed.
+
+        @type newnick: C{str}
+        @param newnick: The new C{name} C{person} will take.
+        """
+        oldnick = person.name
+        if (oldnick, person.account) in self.persons:
+            conv = self.conversations.get(person)
+            if conv:
+                conv.contactChangedNick(person, newnick)
+            self.contactsList.contactChangedNick(person, newnick)
+            del self.persons[oldnick, person.account]
+            person.name = newnick
+            self.persons[person.name, person.account] = person
diff --git a/ThirdParty/Twisted/twisted/words/im/basesupport.py b/ThirdParty/Twisted/twisted/words/im/basesupport.py
new file mode 100644
index 0000000..5c8b424
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/im/basesupport.py
@@ -0,0 +1,270 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+#
+
+"""Instance Messenger base classes for protocol support.
+
+You will find these useful if you're adding a new protocol to IM.
+"""
+
+# Abstract representation of chat "model" classes
+
+from twisted.words.im.locals import ONLINE, OFFLINE, OfflineError
+from twisted.words.im import interfaces
+
+from twisted.internet.protocol import Protocol
+
+from twisted.python.reflect import prefixedMethods
+from twisted.persisted import styles
+
+from twisted.internet import error
+
+class AbstractGroup:
+    def __init__(self, name, account):
+        self.name = name
+        self.account = account
+
+    def getGroupCommands(self):
+        """finds group commands
+
+        these commands are methods on me that start with imgroup_; they are
+        called with no arguments
+        """
+        return prefixedMethods(self, "imgroup_")
+
+    def getTargetCommands(self, target):
+        """finds group commands
+
+        these commands are methods on me that start with imgroup_; they are
+        called with a user present within this room as an argument
+
+        you may want to override this in your group in order to filter for
+        appropriate commands on the given user
+        """
+        return prefixedMethods(self, "imtarget_")
+
+    def join(self):
+        if not self.account.client:
+            raise OfflineError
+        self.account.client.joinGroup(self.name)
+
+    def leave(self):
+        if not self.account.client:
+            raise OfflineError
+        self.account.client.leaveGroup(self.name)
+
+    def __repr__(self):
+        return '<%s %r>' % (self.__class__, self.name)
+
+    def __str__(self):
+        return '%s@%s' % (self.name, self.account.accountName)
+
+class AbstractPerson:
+    def __init__(self, name, baseAccount):
+        self.name = name
+        self.account = baseAccount
+        self.status = OFFLINE
+
+    def getPersonCommands(self):
+        """finds person commands
+
+        these commands are methods on me that start with imperson_; they are
+        called with no arguments
+        """
+        return prefixedMethods(self, "imperson_")
+
+    def getIdleTime(self):
+        """
+        Returns a string.
+        """
+        return '--'
+
+    def __repr__(self):
+        return '<%s %r/%s>' % (self.__class__, self.name, self.status)
+
+    def __str__(self):
+        return '%s@%s' % (self.name, self.account.accountName)
+
+class AbstractClientMixin:
+    """Designed to be mixed in to a Protocol implementing class.
+
+    Inherit from me first.
+
+    @ivar _logonDeferred: Fired when I am done logging in.
+    """
+    def __init__(self, account, chatui, logonDeferred):
+        for base in self.__class__.__bases__:
+            if issubclass(base, Protocol):
+                self.__class__._protoBase = base
+                break
+        else:
+            pass
+        self.account = account
+        self.chat = chatui
+        self._logonDeferred = logonDeferred
+
+    def connectionMade(self):
+        self._protoBase.connectionMade(self)
+
+    def connectionLost(self, reason):
+        self.account._clientLost(self, reason)
+        self.unregisterAsAccountClient()
+        return self._protoBase.connectionLost(self, reason)
+
+    def unregisterAsAccountClient(self):
+        """Tell the chat UI that I have `signed off'.
+        """
+        self.chat.unregisterAccountClient(self)
+
+
+class AbstractAccount(styles.Versioned):
+    """Base class for Accounts.
+
+    I am the start of an implementation of L{IAccount<interfaces.IAccount>}, I
+    implement L{isOnline} and most of L{logOn}, though you'll need to implement
+    L{_startLogOn} in a subclass.
+
+    @cvar _groupFactory: A Callable that will return a L{IGroup} appropriate
+        for this account type.
+    @cvar _personFactory: A Callable that will return a L{IPerson} appropriate
+        for this account type.
+
+    @type _isConnecting: boolean
+    @ivar _isConnecting: Whether I am in the process of establishing a
+    connection to the server.
+    @type _isOnline: boolean
+    @ivar _isOnline: Whether I am currently on-line with the server.
+
+    @ivar accountName:
+    @ivar autoLogin:
+    @ivar username:
+    @ivar password:
+    @ivar host:
+    @ivar port:
+    """
+
+    _isOnline = 0
+    _isConnecting = 0
+    client = None
+
+    _groupFactory = AbstractGroup
+    _personFactory = AbstractPerson
+
+    persistanceVersion = 2
+
+    def __init__(self, accountName, autoLogin, username, password, host, port):
+        self.accountName = accountName
+        self.autoLogin = autoLogin
+        self.username = username
+        self.password = password
+        self.host = host
+        self.port = port
+
+        self._groups = {}
+        self._persons = {}
+
+    def upgrateToVersion2(self):
+        # Added in CVS revision 1.16.
+        for k in ('_groups', '_persons'):
+            if not hasattr(self, k):
+                setattr(self, k, {})
+
+    def __getstate__(self):
+        state = styles.Versioned.__getstate__(self)
+        for k in ('client', '_isOnline', '_isConnecting'):
+            try:
+                del state[k]
+            except KeyError:
+                pass
+        return state
+
+    def isOnline(self):
+        return self._isOnline
+
+    def logOn(self, chatui):
+        """Log on to this account.
+
+        Takes care to not start a connection if a connection is
+        already in progress.  You will need to implement
+        L{_startLogOn} for this to work, and it would be a good idea
+        to override L{_loginFailed} too.
+
+        @returntype: Deferred L{interfaces.IClient}
+        """
+        if (not self._isConnecting) and (not self._isOnline):
+            self._isConnecting = 1
+            d = self._startLogOn(chatui)
+            d.addCallback(self._cb_logOn)
+            # if chatui is not None:
+            # (I don't particularly like having to pass chatUI to this function,
+            # but we haven't factored it out yet.)
+            d.addCallback(chatui.registerAccountClient)
+            d.addErrback(self._loginFailed)
+            return d
+        else:
+            raise error.ConnectError("Connection in progress")
+
+    def getGroup(self, name):
+        """Group factory.
+
+        @param name: Name of the group on this account.
+        @type name: string
+        """
+        group = self._groups.get(name)
+        if group is None:
+            group = self._groupFactory(name, self)
+            self._groups[name] = group
+        return group
+
+    def getPerson(self, name):
+        """Person factory.
+
+        @param name: Name of the person on this account.
+        @type name: string
+        """
+        person = self._persons.get(name)
+        if person is None:
+            person = self._personFactory(name, self)
+            self._persons[name] = person
+        return person
+
+    def _startLogOn(self, chatui):
+        """Start the sign on process.
+
+        Factored out of L{logOn}.
+
+        @returntype: Deferred L{interfaces.IClient}
+        """
+        raise NotImplementedError()
+
+    def _cb_logOn(self, client):
+        self._isConnecting = 0
+        self._isOnline = 1
+        self.client = client
+        return client
+
+    def _loginFailed(self, reason):
+        """Errorback for L{logOn}.
+
+        @type reason: Failure
+
+        @returns: I{reason}, for further processing in the callback chain.
+        @returntype: Failure
+        """
+        self._isConnecting = 0
+        self._isOnline = 0 # just in case
+        return reason
+
+    def _clientLost(self, client, reason):
+        self.client = None
+        self._isConnecting = 0
+        self._isOnline = 0
+        return reason
+
+    def __repr__(self):
+        return "<%s: %s (%s@%s:%s)>" % (self.__class__,
+                                        self.accountName,
+                                        self.username,
+                                        self.host,
+                                        self.port)
diff --git a/ThirdParty/Twisted/twisted/words/im/instancemessenger.glade b/ThirdParty/Twisted/twisted/words/im/instancemessenger.glade
new file mode 100644
index 0000000..33ffaa2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/im/instancemessenger.glade
@@ -0,0 +1,3165 @@
+<?xml version="1.0"?>
+<GTK-Interface>
+
+<project>
+  <name>InstanceMessenger</name>
+  <program_name>instancemessenger</program_name>
+  <directory></directory>
+  <source_directory>src</source_directory>
+  <pixmaps_directory>pixmaps</pixmaps_directory>
+  <language>C</language>
+  <gnome_support>True</gnome_support>
+  <gettext_support>True</gettext_support>
+  <use_widget_names>True</use_widget_names>
+</project>
+
+<widget>
+  <class>GtkWindow</class>
+  <name>UnseenConversationWindow</name>
+  <visible>False</visible>
+  <title>Unseen Conversation Window</title>
+  <type>GTK_WINDOW_TOPLEVEL</type>
+  <position>GTK_WIN_POS_NONE</position>
+  <modal>False</modal>
+  <allow_shrink>False</allow_shrink>
+  <allow_grow>True</allow_grow>
+  <auto_shrink>False</auto_shrink>
+
+  <widget>
+    <class>GtkVBox</class>
+    <name>ConversationWidget</name>
+    <homogeneous>False</homogeneous>
+    <spacing>0</spacing>
+
+    <widget>
+      <class>GtkVPaned</class>
+      <name>vpaned1</name>
+      <handle_size>10</handle_size>
+      <gutter_size>6</gutter_size>
+      <position>0</position>
+      <child>
+	<padding>0</padding>
+	<expand>True</expand>
+	<fill>True</fill>
+      </child>
+
+      <widget>
+	<class>GtkScrolledWindow</class>
+	<name>scrolledwindow10</name>
+	<hscrollbar_policy>GTK_POLICY_NEVER</hscrollbar_policy>
+	<vscrollbar_policy>GTK_POLICY_ALWAYS</vscrollbar_policy>
+	<hupdate_policy>GTK_UPDATE_CONTINUOUS</hupdate_policy>
+	<vupdate_policy>GTK_UPDATE_CONTINUOUS</vupdate_policy>
+	<child>
+	  <shrink>False</shrink>
+	  <resize>True</resize>
+	</child>
+
+	<widget>
+	  <class>GtkText</class>
+	  <name>ConversationOutput</name>
+	  <editable>False</editable>
+	  <text></text>
+	</widget>
+      </widget>
+
+      <widget>
+	<class>GtkScrolledWindow</class>
+	<name>scrolledwindow11</name>
+	<hscrollbar_policy>GTK_POLICY_NEVER</hscrollbar_policy>
+	<vscrollbar_policy>GTK_POLICY_AUTOMATIC</vscrollbar_policy>
+	<hupdate_policy>GTK_UPDATE_CONTINUOUS</hupdate_policy>
+	<vupdate_policy>GTK_UPDATE_CONTINUOUS</vupdate_policy>
+	<child>
+	  <shrink>True</shrink>
+	  <resize>False</resize>
+	</child>
+
+	<widget>
+	  <class>GtkText</class>
+	  <name>ConversationMessageEntry</name>
+	  <can_focus>True</can_focus>
+	  <has_focus>True</has_focus>
+	  <signal>
+	    <name>key_press_event</name>
+	    <handler>handle_key_press_event</handler>
+	    <last_modification_time>Tue, 29 Jan 2002 12:42:58 GMT</last_modification_time>
+	  </signal>
+	  <editable>True</editable>
+	  <text></text>
+	</widget>
+      </widget>
+    </widget>
+
+    <widget>
+      <class>GtkHBox</class>
+      <name>hbox9</name>
+      <homogeneous>True</homogeneous>
+      <spacing>0</spacing>
+      <child>
+	<padding>3</padding>
+	<expand>False</expand>
+	<fill>True</fill>
+      </child>
+
+      <widget>
+	<class>GtkButton</class>
+	<name>button42</name>
+	<can_focus>True</can_focus>
+	<label> Send Message </label>
+	<relief>GTK_RELIEF_NORMAL</relief>
+	<child>
+	  <padding>3</padding>
+	  <expand>True</expand>
+	  <fill>True</fill>
+	</child>
+      </widget>
+
+      <widget>
+	<class>GtkButton</class>
+	<name>AddRemoveContact</name>
+	<can_focus>True</can_focus>
+	<label> Add Contact </label>
+	<relief>GTK_RELIEF_NORMAL</relief>
+	<child>
+	  <padding>3</padding>
+	  <expand>True</expand>
+	  <fill>True</fill>
+	</child>
+      </widget>
+
+      <widget>
+	<class>GtkButton</class>
+	<name>CloseContact</name>
+	<can_focus>True</can_focus>
+	<label> Close </label>
+	<relief>GTK_RELIEF_NORMAL</relief>
+	<child>
+	  <padding>3</padding>
+	  <expand>True</expand>
+	  <fill>True</fill>
+	</child>
+      </widget>
+    </widget>
+  </widget>
+</widget>
+
+<widget>
+  <class>GtkWindow</class>
+  <name>MainIMWindow</name>
+  <signal>
+    <name>destroy</name>
+    <handler>on_MainIMWindow_destroy</handler>
+    <last_modification_time>Sun, 21 Jul 2002 08:16:08 GMT</last_modification_time>
+  </signal>
+  <title>Instance Messenger</title>
+  <type>GTK_WINDOW_TOPLEVEL</type>
+  <position>GTK_WIN_POS_NONE</position>
+  <modal>False</modal>
+  <allow_shrink>True</allow_shrink>
+  <allow_grow>True</allow_grow>
+  <auto_shrink>False</auto_shrink>
+
+  <widget>
+    <class>GtkNotebook</class>
+    <name>ContactsNotebook</name>
+    <can_focus>True</can_focus>
+    <signal>
+      <name>key_press_event</name>
+      <handler>on_ContactsWidget_key_press_event</handler>
+      <last_modification_time>Tue, 07 May 2002 03:02:33 GMT</last_modification_time>
+    </signal>
+    <show_tabs>True</show_tabs>
+    <show_border>True</show_border>
+    <tab_pos>GTK_POS_TOP</tab_pos>
+    <scrollable>False</scrollable>
+    <tab_hborder>2</tab_hborder>
+    <tab_vborder>2</tab_vborder>
+    <popup_enable>False</popup_enable>
+
+    <widget>
+      <class>GtkVBox</class>
+      <name>vbox11</name>
+      <homogeneous>False</homogeneous>
+      <spacing>0</spacing>
+
+      <widget>
+	<class>GtkLabel</class>
+	<name>OnlineCount</name>
+	<label>Online: %d</label>
+	<justify>GTK_JUSTIFY_CENTER</justify>
+	<wrap>False</wrap>
+	<xalign>0.5</xalign>
+	<yalign>0.5</yalign>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<child>
+	  <padding>0</padding>
+	  <expand>False</expand>
+	  <fill>False</fill>
+	</child>
+      </widget>
+
+      <widget>
+	<class>GtkScrolledWindow</class>
+	<name>scrolledwindow14</name>
+	<hscrollbar_policy>GTK_POLICY_AUTOMATIC</hscrollbar_policy>
+	<vscrollbar_policy>GTK_POLICY_AUTOMATIC</vscrollbar_policy>
+	<hupdate_policy>GTK_UPDATE_CONTINUOUS</hupdate_policy>
+	<vupdate_policy>GTK_UPDATE_CONTINUOUS</vupdate_policy>
+	<child>
+	  <padding>0</padding>
+	  <expand>True</expand>
+	  <fill>True</fill>
+	</child>
+
+	<widget>
+	  <class>GtkCTree</class>
+	  <name>OnlineContactsTree</name>
+	  <can_focus>True</can_focus>
+	  <signal>
+	    <name>tree_select_row</name>
+	    <handler>on_OnlineContactsTree_tree_select_row</handler>
+	    <last_modification_time>Tue, 07 May 2002 03:06:32 GMT</last_modification_time>
+	  </signal>
+	  <signal>
+	    <name>select_row</name>
+	    <handler>on_OnlineContactsTree_select_row</handler>
+	    <last_modification_time>Tue, 07 May 2002 04:36:10 GMT</last_modification_time>
+	  </signal>
+	  <columns>4</columns>
+	  <column_widths>109,35,23,80</column_widths>
+	  <selection_mode>GTK_SELECTION_SINGLE</selection_mode>
+	  <show_titles>True</show_titles>
+	  <shadow_type>GTK_SHADOW_IN</shadow_type>
+
+	  <widget>
+	    <class>GtkLabel</class>
+	    <child_name>CTree:title</child_name>
+	    <name>label77</name>
+	    <label>Alias</label>
+	    <justify>GTK_JUSTIFY_CENTER</justify>
+	    <wrap>False</wrap>
+	    <xalign>0.5</xalign>
+	    <yalign>0.5</yalign>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	  </widget>
+
+	  <widget>
+	    <class>GtkLabel</class>
+	    <child_name>CTree:title</child_name>
+	    <name>label78</name>
+	    <label>Status</label>
+	    <justify>GTK_JUSTIFY_CENTER</justify>
+	    <wrap>False</wrap>
+	    <xalign>0.5</xalign>
+	    <yalign>0.5</yalign>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	  </widget>
+
+	  <widget>
+	    <class>GtkLabel</class>
+	    <child_name>CTree:title</child_name>
+	    <name>label79</name>
+	    <label>Idle</label>
+	    <justify>GTK_JUSTIFY_CENTER</justify>
+	    <wrap>False</wrap>
+	    <xalign>0.5</xalign>
+	    <yalign>0.5</yalign>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	  </widget>
+
+	  <widget>
+	    <class>GtkLabel</class>
+	    <child_name>CTree:title</child_name>
+	    <name>label80</name>
+	    <label>Account</label>
+	    <justify>GTK_JUSTIFY_CENTER</justify>
+	    <wrap>False</wrap>
+	    <xalign>0.5</xalign>
+	    <yalign>0.5</yalign>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	  </widget>
+	</widget>
+      </widget>
+
+      <widget>
+	<class>GtkVBox</class>
+	<name>vbox30</name>
+	<homogeneous>False</homogeneous>
+	<spacing>2</spacing>
+	<child>
+	  <padding>1</padding>
+	  <expand>False</expand>
+	  <fill>True</fill>
+	</child>
+
+	<widget>
+	  <class>GtkEntry</class>
+	  <name>ContactNameEntry</name>
+	  <can_focus>True</can_focus>
+	  <signal>
+	    <name>activate</name>
+	    <handler>on_ContactNameEntry_activate</handler>
+	    <last_modification_time>Tue, 07 May 2002 04:07:25 GMT</last_modification_time>
+	  </signal>
+	  <editable>True</editable>
+	  <text_visible>True</text_visible>
+	  <text_max_length>0</text_max_length>
+	  <text></text>
+	  <child>
+	    <padding>0</padding>
+	    <expand>False</expand>
+	    <fill>False</fill>
+	  </child>
+	</widget>
+
+	<widget>
+	  <class>GtkOptionMenu</class>
+	  <name>AccountsListPopup</name>
+	  <can_focus>True</can_focus>
+	  <items>Nothing
+To
+Speak
+Of
+</items>
+	  <initial_choice>1</initial_choice>
+	  <child>
+	    <padding>0</padding>
+	    <expand>False</expand>
+	    <fill>False</fill>
+	  </child>
+	</widget>
+
+	<widget>
+	  <class>GtkHBox</class>
+	  <name>hbox7</name>
+	  <homogeneous>False</homogeneous>
+	  <spacing>0</spacing>
+	  <child>
+	    <padding>0</padding>
+	    <expand>True</expand>
+	    <fill>True</fill>
+	  </child>
+
+	  <widget>
+	    <class>GtkButton</class>
+	    <name>PlainSendIM</name>
+	    <can_focus>True</can_focus>
+	    <signal>
+	      <name>clicked</name>
+	      <handler>on_PlainSendIM_clicked</handler>
+	      <last_modification_time>Tue, 29 Jan 2002 03:17:35 GMT</last_modification_time>
+	    </signal>
+	    <label> Send IM </label>
+	    <relief>GTK_RELIEF_NORMAL</relief>
+	    <child>
+	      <padding>0</padding>
+	      <expand>True</expand>
+	      <fill>False</fill>
+	    </child>
+	  </widget>
+
+	  <widget>
+	    <class>GtkButton</class>
+	    <name>PlainGetInfo</name>
+	    <can_focus>True</can_focus>
+	    <signal>
+	      <name>clicked</name>
+	      <handler>on_PlainGetInfo_clicked</handler>
+	      <last_modification_time>Tue, 07 May 2002 04:06:59 GMT</last_modification_time>
+	    </signal>
+	    <label> Get Info </label>
+	    <relief>GTK_RELIEF_NORMAL</relief>
+	    <child>
+	      <padding>0</padding>
+	      <expand>True</expand>
+	      <fill>False</fill>
+	    </child>
+	  </widget>
+
+	  <widget>
+	    <class>GtkButton</class>
+	    <name>PlainJoinChat</name>
+	    <can_focus>True</can_focus>
+	    <signal>
+	      <name>clicked</name>
+	      <handler>on_PlainJoinChat_clicked</handler>
+	      <last_modification_time>Tue, 29 Jan 2002 13:04:49 GMT</last_modification_time>
+	    </signal>
+	    <label> Join Group </label>
+	    <relief>GTK_RELIEF_NORMAL</relief>
+	    <child>
+	      <padding>0</padding>
+	      <expand>True</expand>
+	      <fill>False</fill>
+	    </child>
+	  </widget>
+
+	  <widget>
+	    <class>GtkButton</class>
+	    <name>PlainGoAway</name>
+	    <can_focus>True</can_focus>
+	    <signal>
+	      <name>clicked</name>
+	      <handler>on_PlainGoAway_clicked</handler>
+	      <last_modification_time>Tue, 07 May 2002 04:06:53 GMT</last_modification_time>
+	    </signal>
+	    <label> Go Away </label>
+	    <relief>GTK_RELIEF_NORMAL</relief>
+	    <child>
+	      <padding>0</padding>
+	      <expand>True</expand>
+	      <fill>False</fill>
+	    </child>
+	  </widget>
+	</widget>
+
+	<widget>
+	  <class>GtkHBox</class>
+	  <name>hbox8</name>
+	  <homogeneous>False</homogeneous>
+	  <spacing>0</spacing>
+	  <child>
+	    <padding>0</padding>
+	    <expand>True</expand>
+	    <fill>True</fill>
+	  </child>
+
+	  <widget>
+	    <class>GtkButton</class>
+	    <name>AddContactButton</name>
+	    <can_focus>True</can_focus>
+	    <signal>
+	      <name>clicked</name>
+	      <handler>on_AddContactButton_clicked</handler>
+	      <last_modification_time>Tue, 07 May 2002 04:06:33 GMT</last_modification_time>
+	    </signal>
+	    <label> Add Contact </label>
+	    <relief>GTK_RELIEF_NORMAL</relief>
+	    <child>
+	      <padding>0</padding>
+	      <expand>True</expand>
+	      <fill>False</fill>
+	    </child>
+	  </widget>
+
+	  <widget>
+	    <class>GtkButton</class>
+	    <name>RemoveContactButton</name>
+	    <can_focus>True</can_focus>
+	    <signal>
+	      <name>clicked</name>
+	      <handler>on_RemoveContactButton_clicked</handler>
+	      <last_modification_time>Tue, 07 May 2002 04:06:28 GMT</last_modification_time>
+	    </signal>
+	    <label> Remove Contact </label>
+	    <relief>GTK_RELIEF_NORMAL</relief>
+	    <child>
+	      <padding>0</padding>
+	      <expand>True</expand>
+	      <fill>False</fill>
+	    </child>
+	  </widget>
+	</widget>
+      </widget>
+    </widget>
+
+    <widget>
+      <class>GtkLabel</class>
+      <child_name>Notebook:tab</child_name>
+      <name>label35</name>
+      <label> Online Contacts </label>
+      <justify>GTK_JUSTIFY_CENTER</justify>
+      <wrap>False</wrap>
+      <xalign>0.5</xalign>
+      <yalign>0.5</yalign>
+      <xpad>0</xpad>
+      <ypad>0</ypad>
+    </widget>
+
+    <widget>
+      <class>GtkVBox</class>
+      <name>vbox14</name>
+      <homogeneous>False</homogeneous>
+      <spacing>0</spacing>
+
+      <widget>
+	<class>GtkScrolledWindow</class>
+	<name>OfflineContactsScroll</name>
+	<hscrollbar_policy>GTK_POLICY_AUTOMATIC</hscrollbar_policy>
+	<vscrollbar_policy>GTK_POLICY_ALWAYS</vscrollbar_policy>
+	<hupdate_policy>GTK_UPDATE_CONTINUOUS</hupdate_policy>
+	<vupdate_policy>GTK_UPDATE_CONTINUOUS</vupdate_policy>
+	<child>
+	  <padding>0</padding>
+	  <expand>True</expand>
+	  <fill>True</fill>
+	</child>
+
+	<widget>
+	  <class>GtkCList</class>
+	  <name>OfflineContactsList</name>
+	  <can_focus>True</can_focus>
+	  <signal>
+	    <name>select_row</name>
+	    <handler>on_OfflineContactsList_select_row</handler>
+	    <last_modification_time>Tue, 07 May 2002 03:00:07 GMT</last_modification_time>
+	  </signal>
+	  <columns>4</columns>
+	  <column_widths>66,80,80,80</column_widths>
+	  <selection_mode>GTK_SELECTION_SINGLE</selection_mode>
+	  <show_titles>True</show_titles>
+	  <shadow_type>GTK_SHADOW_IN</shadow_type>
+
+	  <widget>
+	    <class>GtkLabel</class>
+	    <child_name>CList:title</child_name>
+	    <name>label41</name>
+	    <label>Contact</label>
+	    <justify>GTK_JUSTIFY_CENTER</justify>
+	    <wrap>False</wrap>
+	    <xalign>0.5</xalign>
+	    <yalign>0.5</yalign>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	  </widget>
+
+	  <widget>
+	    <class>GtkLabel</class>
+	    <child_name>CList:title</child_name>
+	    <name>label42</name>
+	    <label>Account</label>
+	    <justify>GTK_JUSTIFY_CENTER</justify>
+	    <wrap>False</wrap>
+	    <xalign>0.5</xalign>
+	    <yalign>0.5</yalign>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	  </widget>
+
+	  <widget>
+	    <class>GtkLabel</class>
+	    <child_name>CList:title</child_name>
+	    <name>label43</name>
+	    <label>Alias</label>
+	    <justify>GTK_JUSTIFY_CENTER</justify>
+	    <wrap>False</wrap>
+	    <xalign>0.5</xalign>
+	    <yalign>0.5</yalign>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	  </widget>
+
+	  <widget>
+	    <class>GtkLabel</class>
+	    <child_name>CList:title</child_name>
+	    <name>label44</name>
+	    <label>Group</label>
+	    <justify>GTK_JUSTIFY_CENTER</justify>
+	    <wrap>False</wrap>
+	    <xalign>0.5</xalign>
+	    <yalign>0.5</yalign>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	  </widget>
+	</widget>
+      </widget>
+    </widget>
+
+    <widget>
+      <class>GtkLabel</class>
+      <child_name>Notebook:tab</child_name>
+      <name>label36</name>
+      <label> All Contacts </label>
+      <justify>GTK_JUSTIFY_CENTER</justify>
+      <wrap>False</wrap>
+      <xalign>0.5</xalign>
+      <yalign>0.5</yalign>
+      <xpad>0</xpad>
+      <ypad>0</ypad>
+    </widget>
+
+    <widget>
+      <class>GtkVBox</class>
+      <name>AccountManWidget</name>
+      <homogeneous>False</homogeneous>
+      <spacing>0</spacing>
+
+      <widget>
+	<class>GtkScrolledWindow</class>
+	<name>scrolledwindow12</name>
+	<hscrollbar_policy>GTK_POLICY_AUTOMATIC</hscrollbar_policy>
+	<vscrollbar_policy>GTK_POLICY_ALWAYS</vscrollbar_policy>
+	<hupdate_policy>GTK_UPDATE_CONTINUOUS</hupdate_policy>
+	<vupdate_policy>GTK_UPDATE_CONTINUOUS</vupdate_policy>
+	<child>
+	  <padding>0</padding>
+	  <expand>True</expand>
+	  <fill>True</fill>
+	</child>
+
+	<widget>
+	  <class>GtkCList</class>
+	  <name>accountsList</name>
+	  <can_focus>True</can_focus>
+	  <columns>4</columns>
+	  <column_widths>80,36,34,80</column_widths>
+	  <selection_mode>GTK_SELECTION_SINGLE</selection_mode>
+	  <show_titles>True</show_titles>
+	  <shadow_type>GTK_SHADOW_IN</shadow_type>
+
+	  <widget>
+	    <class>GtkLabel</class>
+	    <child_name>CList:title</child_name>
+	    <name>label45</name>
+	    <label>Service Name</label>
+	    <justify>GTK_JUSTIFY_CENTER</justify>
+	    <wrap>False</wrap>
+	    <xalign>0.5</xalign>
+	    <yalign>0.5</yalign>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	  </widget>
+
+	  <widget>
+	    <class>GtkLabel</class>
+	    <child_name>CList:title</child_name>
+	    <name>label46</name>
+	    <label>Online</label>
+	    <justify>GTK_JUSTIFY_CENTER</justify>
+	    <wrap>False</wrap>
+	    <xalign>0.5</xalign>
+	    <yalign>0.5</yalign>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	  </widget>
+
+	  <widget>
+	    <class>GtkLabel</class>
+	    <child_name>CList:title</child_name>
+	    <name>label47</name>
+	    <label>Auto</label>
+	    <justify>GTK_JUSTIFY_CENTER</justify>
+	    <wrap>False</wrap>
+	    <xalign>0.5</xalign>
+	    <yalign>0.5</yalign>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	  </widget>
+
+	  <widget>
+	    <class>GtkLabel</class>
+	    <child_name>CList:title</child_name>
+	    <name>label48</name>
+	    <label>Gateway</label>
+	    <justify>GTK_JUSTIFY_CENTER</justify>
+	    <wrap>False</wrap>
+	    <xalign>0.5</xalign>
+	    <yalign>0.5</yalign>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	  </widget>
+	</widget>
+      </widget>
+
+      <widget>
+	<class>GtkTable</class>
+	<name>table5</name>
+	<rows>2</rows>
+	<columns>3</columns>
+	<homogeneous>False</homogeneous>
+	<row_spacing>0</row_spacing>
+	<column_spacing>0</column_spacing>
+	<child>
+	  <padding>3</padding>
+	  <expand>False</expand>
+	  <fill>True</fill>
+	</child>
+
+	<widget>
+	  <class>GtkButton</class>
+	  <name>NewAccountButton</name>
+	  <can_default>True</can_default>
+	  <can_focus>True</can_focus>
+	  <signal>
+	    <name>clicked</name>
+	    <handler>on_NewAccountButton_clicked</handler>
+	    <last_modification_time>Sun, 27 Jan 2002 10:32:20 GMT</last_modification_time>
+	  </signal>
+	  <label>New Account</label>
+	  <relief>GTK_RELIEF_NORMAL</relief>
+	  <child>
+	    <left_attach>0</left_attach>
+	    <right_attach>1</right_attach>
+	    <top_attach>0</top_attach>
+	    <bottom_attach>1</bottom_attach>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	    <xexpand>False</xexpand>
+	    <yexpand>False</yexpand>
+	    <xshrink>False</xshrink>
+	    <yshrink>False</yshrink>
+	    <xfill>True</xfill>
+	    <yfill>False</yfill>
+	  </child>
+	</widget>
+
+	<widget>
+	  <class>GtkButton</class>
+	  <name>button46</name>
+	  <sensitive>False</sensitive>
+	  <can_default>True</can_default>
+	  <label>Modify Account</label>
+	  <relief>GTK_RELIEF_NORMAL</relief>
+	  <child>
+	    <left_attach>1</left_attach>
+	    <right_attach>2</right_attach>
+	    <top_attach>0</top_attach>
+	    <bottom_attach>1</bottom_attach>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	    <xexpand>False</xexpand>
+	    <yexpand>False</yexpand>
+	    <xshrink>False</xshrink>
+	    <yshrink>False</yshrink>
+	    <xfill>True</xfill>
+	    <yfill>False</yfill>
+	  </child>
+	</widget>
+
+	<widget>
+	  <class>GtkButton</class>
+	  <name>LogOnButton</name>
+	  <can_default>True</can_default>
+	  <has_default>True</has_default>
+	  <can_focus>True</can_focus>
+	  <has_focus>True</has_focus>
+	  <signal>
+	    <name>clicked</name>
+	    <handler>on_LogOnButton_clicked</handler>
+	    <last_modification_time>Mon, 28 Jan 2002 04:06:23 GMT</last_modification_time>
+	  </signal>
+	  <label>Logon</label>
+	  <relief>GTK_RELIEF_NORMAL</relief>
+	  <child>
+	    <left_attach>2</left_attach>
+	    <right_attach>3</right_attach>
+	    <top_attach>1</top_attach>
+	    <bottom_attach>2</bottom_attach>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	    <xexpand>False</xexpand>
+	    <yexpand>False</yexpand>
+	    <xshrink>False</xshrink>
+	    <yshrink>False</yshrink>
+	    <xfill>True</xfill>
+	    <yfill>False</yfill>
+	  </child>
+	</widget>
+
+	<widget>
+	  <class>GtkButton</class>
+	  <name>DeleteAccountButton</name>
+	  <can_default>True</can_default>
+	  <can_focus>True</can_focus>
+	  <signal>
+	    <name>clicked</name>
+	    <handler>on_DeleteAccountButton_clicked</handler>
+	    <last_modification_time>Mon, 28 Jan 2002 00:18:22 GMT</last_modification_time>
+	  </signal>
+	  <label>Delete Account</label>
+	  <relief>GTK_RELIEF_NORMAL</relief>
+	  <child>
+	    <left_attach>2</left_attach>
+	    <right_attach>3</right_attach>
+	    <top_attach>0</top_attach>
+	    <bottom_attach>1</bottom_attach>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	    <xexpand>False</xexpand>
+	    <yexpand>False</yexpand>
+	    <xshrink>False</xshrink>
+	    <yshrink>False</yshrink>
+	    <xfill>True</xfill>
+	    <yfill>False</yfill>
+	  </child>
+	</widget>
+
+	<widget>
+	  <class>GtkButton</class>
+	  <name>ConsoleButton</name>
+	  <can_default>True</can_default>
+	  <can_focus>True</can_focus>
+	  <signal>
+	    <name>clicked</name>
+	    <handler>on_ConsoleButton_clicked</handler>
+	    <last_modification_time>Mon, 29 Apr 2002 09:13:32 GMT</last_modification_time>
+	  </signal>
+	  <label>Console</label>
+	  <relief>GTK_RELIEF_NORMAL</relief>
+	  <child>
+	    <left_attach>1</left_attach>
+	    <right_attach>2</right_attach>
+	    <top_attach>1</top_attach>
+	    <bottom_attach>2</bottom_attach>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	    <xexpand>False</xexpand>
+	    <yexpand>False</yexpand>
+	    <xshrink>False</xshrink>
+	    <yshrink>False</yshrink>
+	    <xfill>True</xfill>
+	    <yfill>False</yfill>
+	  </child>
+	</widget>
+
+	<widget>
+	  <class>GtkButton</class>
+	  <name>button75</name>
+	  <can_default>True</can_default>
+	  <can_focus>True</can_focus>
+	  <label>Quit</label>
+	  <relief>GTK_RELIEF_NORMAL</relief>
+	  <child>
+	    <left_attach>0</left_attach>
+	    <right_attach>1</right_attach>
+	    <top_attach>1</top_attach>
+	    <bottom_attach>2</bottom_attach>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	    <xexpand>True</xexpand>
+	    <yexpand>True</yexpand>
+	    <xshrink>False</xshrink>
+	    <yshrink>False</yshrink>
+	    <xfill>True</xfill>
+	    <yfill>True</yfill>
+	  </child>
+	</widget>
+      </widget>
+    </widget>
+
+    <widget>
+      <class>GtkLabel</class>
+      <child_name>Notebook:tab</child_name>
+      <name>label107</name>
+      <label>Accounts</label>
+      <justify>GTK_JUSTIFY_CENTER</justify>
+      <wrap>False</wrap>
+      <xalign>0.5</xalign>
+      <yalign>0.5</yalign>
+      <xpad>0</xpad>
+      <ypad>0</ypad>
+    </widget>
+  </widget>
+</widget>
+
+<widget>
+  <class>GtkWindow</class>
+  <name>UnseenGroupWindow</name>
+  <visible>False</visible>
+  <title>Unseen Group Window</title>
+  <type>GTK_WINDOW_TOPLEVEL</type>
+  <position>GTK_WIN_POS_NONE</position>
+  <modal>False</modal>
+  <allow_shrink>False</allow_shrink>
+  <allow_grow>True</allow_grow>
+  <auto_shrink>False</auto_shrink>
+
+  <widget>
+    <class>GtkVBox</class>
+    <name>GroupChatBox</name>
+    <homogeneous>False</homogeneous>
+    <spacing>0</spacing>
+
+    <widget>
+      <class>GtkHBox</class>
+      <name>hbox5</name>
+      <homogeneous>False</homogeneous>
+      <spacing>0</spacing>
+      <child>
+	<padding>0</padding>
+	<expand>False</expand>
+	<fill>True</fill>
+      </child>
+
+      <widget>
+	<class>GtkEntry</class>
+	<name>TopicEntry</name>
+	<can_focus>True</can_focus>
+	<signal>
+	  <name>activate</name>
+	  <handler>on_TopicEntry_activate</handler>
+	  <last_modification_time>Sat, 23 Feb 2002 02:57:41 GMT</last_modification_time>
+	</signal>
+	<signal>
+	  <name>focus_out_event</name>
+	  <handler>on_TopicEntry_focus_out_event</handler>
+	  <last_modification_time>Sun, 21 Jul 2002 09:36:54 GMT</last_modification_time>
+	</signal>
+	<editable>True</editable>
+	<text_visible>True</text_visible>
+	<text_max_length>0</text_max_length>
+	<text><TOPIC NOT RECEIVED></text>
+	<child>
+	  <padding>0</padding>
+	  <expand>True</expand>
+	  <fill>True</fill>
+	</child>
+      </widget>
+
+      <widget>
+	<class>GtkLabel</class>
+	<name>AuthorLabel</name>
+	<label><nobody></label>
+	<justify>GTK_JUSTIFY_CENTER</justify>
+	<wrap>False</wrap>
+	<xalign>0.5</xalign>
+	<yalign>0.5</yalign>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<child>
+	  <padding>0</padding>
+	  <expand>False</expand>
+	  <fill>False</fill>
+	</child>
+      </widget>
+
+      <widget>
+	<class>GtkButton</class>
+	<name>HideButton</name>
+	<can_focus>True</can_focus>
+	<signal>
+	  <name>clicked</name>
+	  <handler>on_HideButton_clicked</handler>
+	  <last_modification_time>Tue, 29 Jan 2002 14:10:00 GMT</last_modification_time>
+	</signal>
+	<label><</label>
+	<relief>GTK_RELIEF_NORMAL</relief>
+	<child>
+	  <padding>0</padding>
+	  <expand>False</expand>
+	  <fill>False</fill>
+	</child>
+      </widget>
+    </widget>
+
+    <widget>
+      <class>GtkVPaned</class>
+      <name>vpaned2</name>
+      <handle_size>10</handle_size>
+      <gutter_size>6</gutter_size>
+      <position>0</position>
+      <child>
+	<padding>0</padding>
+	<expand>True</expand>
+	<fill>True</fill>
+      </child>
+
+      <widget>
+	<class>GtkHPaned</class>
+	<name>GroupHPaned</name>
+	<handle_size>6</handle_size>
+	<gutter_size>6</gutter_size>
+	<child>
+	  <shrink>False</shrink>
+	  <resize>True</resize>
+	</child>
+
+	<widget>
+	  <class>GtkScrolledWindow</class>
+	  <name>scrolledwindow4</name>
+	  <hscrollbar_policy>GTK_POLICY_NEVER</hscrollbar_policy>
+	  <vscrollbar_policy>GTK_POLICY_ALWAYS</vscrollbar_policy>
+	  <hupdate_policy>GTK_UPDATE_CONTINUOUS</hupdate_policy>
+	  <vupdate_policy>GTK_UPDATE_CONTINUOUS</vupdate_policy>
+	  <child>
+	    <shrink>False</shrink>
+	    <resize>True</resize>
+	  </child>
+
+	  <widget>
+	    <class>GtkText</class>
+	    <name>GroupOutput</name>
+	    <can_focus>True</can_focus>
+	    <editable>False</editable>
+	    <text></text>
+	  </widget>
+	</widget>
+
+	<widget>
+	  <class>GtkVBox</class>
+	  <name>actionvbox</name>
+	  <width>110</width>
+	  <homogeneous>False</homogeneous>
+	  <spacing>1</spacing>
+	  <child>
+	    <shrink>True</shrink>
+	    <resize>False</resize>
+	  </child>
+
+	  <widget>
+	    <class>GtkScrolledWindow</class>
+	    <name>scrolledwindow5</name>
+	    <hscrollbar_policy>GTK_POLICY_NEVER</hscrollbar_policy>
+	    <vscrollbar_policy>GTK_POLICY_ALWAYS</vscrollbar_policy>
+	    <hupdate_policy>GTK_UPDATE_CONTINUOUS</hupdate_policy>
+	    <vupdate_policy>GTK_UPDATE_CONTINUOUS</vupdate_policy>
+	    <child>
+	      <padding>0</padding>
+	      <expand>True</expand>
+	      <fill>True</fill>
+	    </child>
+
+	    <widget>
+	      <class>GtkCList</class>
+	      <name>ParticipantList</name>
+	      <can_focus>True</can_focus>
+	      <signal>
+		<name>select_row</name>
+		<handler>on_ParticipantList_select_row</handler>
+		<last_modification_time>Sat, 13 Jul 2002 08:11:12 GMT</last_modification_time>
+	      </signal>
+	      <signal>
+		<name>unselect_row</name>
+		<handler>on_ParticipantList_unselect_row</handler>
+		<last_modification_time>Sat, 13 Jul 2002 08:23:25 GMT</last_modification_time>
+	      </signal>
+	      <columns>1</columns>
+	      <column_widths>80</column_widths>
+	      <selection_mode>GTK_SELECTION_SINGLE</selection_mode>
+	      <show_titles>False</show_titles>
+	      <shadow_type>GTK_SHADOW_IN</shadow_type>
+
+	      <widget>
+		<class>GtkLabel</class>
+		<child_name>CList:title</child_name>
+		<name>label18</name>
+		<label>Users</label>
+		<justify>GTK_JUSTIFY_CENTER</justify>
+		<wrap>False</wrap>
+		<xalign>0.5</xalign>
+		<yalign>0.5</yalign>
+		<xpad>0</xpad>
+		<ypad>0</ypad>
+	      </widget>
+	    </widget>
+	  </widget>
+
+	  <widget>
+	    <class>GtkFrame</class>
+	    <name>frame10</name>
+	    <label>Group</label>
+	    <label_xalign>0</label_xalign>
+	    <shadow_type>GTK_SHADOW_ETCHED_IN</shadow_type>
+	    <child>
+	      <padding>0</padding>
+	      <expand>False</expand>
+	      <fill>False</fill>
+	    </child>
+
+	    <widget>
+	      <class>GtkVBox</class>
+	      <name>GroupActionsBox</name>
+	      <homogeneous>False</homogeneous>
+	      <spacing>0</spacing>
+
+	      <widget>
+		<class>Placeholder</class>
+	      </widget>
+
+	      <widget>
+		<class>Placeholder</class>
+	      </widget>
+
+	      <widget>
+		<class>Placeholder</class>
+	      </widget>
+	    </widget>
+	  </widget>
+
+	  <widget>
+	    <class>GtkFrame</class>
+	    <name>PersonFrame</name>
+	    <label>Person</label>
+	    <label_xalign>0</label_xalign>
+	    <shadow_type>GTK_SHADOW_ETCHED_IN</shadow_type>
+	    <child>
+	      <padding>0</padding>
+	      <expand>False</expand>
+	      <fill>False</fill>
+	    </child>
+
+	    <widget>
+	      <class>GtkVBox</class>
+	      <name>PersonActionsBox</name>
+	      <homogeneous>False</homogeneous>
+	      <spacing>0</spacing>
+
+	      <widget>
+		<class>Placeholder</class>
+	      </widget>
+
+	      <widget>
+		<class>Placeholder</class>
+	      </widget>
+
+	      <widget>
+		<class>Placeholder</class>
+	      </widget>
+	    </widget>
+	  </widget>
+	</widget>
+      </widget>
+
+      <widget>
+	<class>GtkHBox</class>
+	<name>hbox6</name>
+	<homogeneous>False</homogeneous>
+	<spacing>0</spacing>
+	<child>
+	  <shrink>True</shrink>
+	  <resize>False</resize>
+	</child>
+
+	<widget>
+	  <class>GtkLabel</class>
+	  <name>NickLabel</name>
+	  <label><no nick></label>
+	  <justify>GTK_JUSTIFY_CENTER</justify>
+	  <wrap>False</wrap>
+	  <xalign>0.5</xalign>
+	  <yalign>0.5</yalign>
+	  <xpad>0</xpad>
+	  <ypad>0</ypad>
+	  <child>
+	    <padding>4</padding>
+	    <expand>False</expand>
+	    <fill>False</fill>
+	  </child>
+	</widget>
+
+	<widget>
+	  <class>GtkScrolledWindow</class>
+	  <name>scrolledwindow9</name>
+	  <hscrollbar_policy>GTK_POLICY_NEVER</hscrollbar_policy>
+	  <vscrollbar_policy>GTK_POLICY_AUTOMATIC</vscrollbar_policy>
+	  <hupdate_policy>GTK_UPDATE_CONTINUOUS</hupdate_policy>
+	  <vupdate_policy>GTK_UPDATE_CONTINUOUS</vupdate_policy>
+	  <child>
+	    <padding>0</padding>
+	    <expand>True</expand>
+	    <fill>True</fill>
+	  </child>
+
+	  <widget>
+	    <class>GtkText</class>
+	    <name>GroupInput</name>
+	    <can_focus>True</can_focus>
+	    <has_focus>True</has_focus>
+	    <signal>
+	      <name>key_press_event</name>
+	      <handler>handle_key_press_event</handler>
+	      <last_modification_time>Tue, 29 Jan 2002 12:41:03 GMT</last_modification_time>
+	    </signal>
+	    <editable>True</editable>
+	    <text></text>
+	  </widget>
+	</widget>
+      </widget>
+    </widget>
+  </widget>
+</widget>
+
+<widget>
+  <class>GtkWindow</class>
+  <name>NewAccountWindow</name>
+  <border_width>3</border_width>
+  <visible>False</visible>
+  <signal>
+    <name>destroy</name>
+    <handler>on_NewAccountWindow_destroy</handler>
+    <last_modification_time>Sun, 27 Jan 2002 10:35:19 GMT</last_modification_time>
+  </signal>
+  <title>New Account</title>
+  <type>GTK_WINDOW_TOPLEVEL</type>
+  <position>GTK_WIN_POS_NONE</position>
+  <modal>False</modal>
+  <allow_shrink>False</allow_shrink>
+  <allow_grow>True</allow_grow>
+  <auto_shrink>True</auto_shrink>
+
+  <widget>
+    <class>GtkVBox</class>
+    <name>vbox17</name>
+    <homogeneous>False</homogeneous>
+    <spacing>0</spacing>
+
+    <widget>
+      <class>GtkHBox</class>
+      <name>hbox11</name>
+      <homogeneous>False</homogeneous>
+      <spacing>0</spacing>
+      <child>
+	<padding>3</padding>
+	<expand>False</expand>
+	<fill>True</fill>
+      </child>
+
+      <widget>
+	<class>GtkLabel</class>
+	<name>label49</name>
+	<label>Gateway:</label>
+	<justify>GTK_JUSTIFY_CENTER</justify>
+	<wrap>False</wrap>
+	<xalign>0.5</xalign>
+	<yalign>0.5</yalign>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<child>
+	  <padding>0</padding>
+	  <expand>False</expand>
+	  <fill>True</fill>
+	</child>
+      </widget>
+
+      <widget>
+	<class>GtkOptionMenu</class>
+	<name>GatewayOptionMenu</name>
+	<can_focus>True</can_focus>
+	<items>Twisted (Perspective Broker)
+Internet Relay Chat
+AIM (TOC)
+AIM (OSCAR)
+</items>
+	<initial_choice>0</initial_choice>
+	<child>
+	  <padding>4</padding>
+	  <expand>True</expand>
+	  <fill>True</fill>
+	</child>
+      </widget>
+    </widget>
+
+    <widget>
+      <class>GtkFrame</class>
+      <name>GatewayFrame</name>
+      <border_width>3</border_width>
+      <label>Gateway Options</label>
+      <label_xalign>0</label_xalign>
+      <shadow_type>GTK_SHADOW_ETCHED_IN</shadow_type>
+      <child>
+	<padding>0</padding>
+	<expand>True</expand>
+	<fill>True</fill>
+      </child>
+
+      <widget>
+	<class>Placeholder</class>
+      </widget>
+    </widget>
+
+    <widget>
+      <class>GtkFrame</class>
+      <name>frame2</name>
+      <border_width>3</border_width>
+      <label>Standard Options</label>
+      <label_xalign>0</label_xalign>
+      <shadow_type>GTK_SHADOW_ETCHED_IN</shadow_type>
+      <child>
+	<padding>0</padding>
+	<expand>False</expand>
+	<fill>True</fill>
+      </child>
+
+      <widget>
+	<class>GtkTable</class>
+	<name>table1</name>
+	<border_width>3</border_width>
+	<rows>2</rows>
+	<columns>2</columns>
+	<homogeneous>False</homogeneous>
+	<row_spacing>0</row_spacing>
+	<column_spacing>0</column_spacing>
+
+	<widget>
+	  <class>GtkCheckButton</class>
+	  <name>AutoLogin</name>
+	  <can_focus>True</can_focus>
+	  <label>Automatically Log In</label>
+	  <active>False</active>
+	  <draw_indicator>True</draw_indicator>
+	  <child>
+	    <left_attach>1</left_attach>
+	    <right_attach>2</right_attach>
+	    <top_attach>0</top_attach>
+	    <bottom_attach>1</bottom_attach>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	    <xexpand>True</xexpand>
+	    <yexpand>True</yexpand>
+	    <xshrink>False</xshrink>
+	    <yshrink>False</yshrink>
+	    <xfill>True</xfill>
+	    <yfill>False</yfill>
+	  </child>
+	</widget>
+
+	<widget>
+	  <class>GtkEntry</class>
+	  <name>accountName</name>
+	  <can_focus>True</can_focus>
+	  <editable>True</editable>
+	  <text_visible>True</text_visible>
+	  <text_max_length>0</text_max_length>
+	  <text></text>
+	  <child>
+	    <left_attach>1</left_attach>
+	    <right_attach>2</right_attach>
+	    <top_attach>1</top_attach>
+	    <bottom_attach>2</bottom_attach>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	    <xexpand>True</xexpand>
+	    <yexpand>True</yexpand>
+	    <xshrink>False</xshrink>
+	    <yshrink>False</yshrink>
+	    <xfill>True</xfill>
+	    <yfill>False</yfill>
+	  </child>
+	</widget>
+
+	<widget>
+	  <class>GtkLabel</class>
+	  <name>label50</name>
+	  <label>   Auto-Login: </label>
+	  <justify>GTK_JUSTIFY_RIGHT</justify>
+	  <wrap>False</wrap>
+	  <xalign>0</xalign>
+	  <yalign>0.5</yalign>
+	  <xpad>0</xpad>
+	  <ypad>0</ypad>
+	  <child>
+	    <left_attach>0</left_attach>
+	    <right_attach>1</right_attach>
+	    <top_attach>0</top_attach>
+	    <bottom_attach>1</bottom_attach>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	    <xexpand>False</xexpand>
+	    <yexpand>True</yexpand>
+	    <xshrink>False</xshrink>
+	    <yshrink>False</yshrink>
+	    <xfill>True</xfill>
+	    <yfill>True</yfill>
+	  </child>
+	</widget>
+
+	<widget>
+	  <class>GtkLabel</class>
+	  <name>label51</name>
+	  <label>Account Name: </label>
+	  <justify>GTK_JUSTIFY_RIGHT</justify>
+	  <wrap>False</wrap>
+	  <xalign>0</xalign>
+	  <yalign>0.5</yalign>
+	  <xpad>0</xpad>
+	  <ypad>0</ypad>
+	  <child>
+	    <left_attach>0</left_attach>
+	    <right_attach>1</right_attach>
+	    <top_attach>1</top_attach>
+	    <bottom_attach>2</bottom_attach>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	    <xexpand>False</xexpand>
+	    <yexpand>True</yexpand>
+	    <xshrink>False</xshrink>
+	    <yshrink>False</yshrink>
+	    <xfill>True</xfill>
+	    <yfill>True</yfill>
+	  </child>
+	</widget>
+      </widget>
+    </widget>
+
+    <widget>
+      <class>GtkHButtonBox</class>
+      <name>hbuttonbox2</name>
+      <layout_style>GTK_BUTTONBOX_SPREAD</layout_style>
+      <spacing>30</spacing>
+      <child_min_width>85</child_min_width>
+      <child_min_height>27</child_min_height>
+      <child_ipad_x>7</child_ipad_x>
+      <child_ipad_y>0</child_ipad_y>
+      <child>
+	<padding>0</padding>
+	<expand>False</expand>
+	<fill>True</fill>
+      </child>
+
+      <widget>
+	<class>GtkButton</class>
+	<name>button50</name>
+	<can_default>True</can_default>
+	<can_focus>True</can_focus>
+	<signal>
+	  <name>clicked</name>
+	  <handler>createAccount</handler>
+	  <last_modification_time>Sun, 27 Jan 2002 11:25:05 GMT</last_modification_time>
+	</signal>
+	<label>OK</label>
+	<relief>GTK_RELIEF_NORMAL</relief>
+      </widget>
+
+      <widget>
+	<class>GtkButton</class>
+	<name>button51</name>
+	<can_default>True</can_default>
+	<can_focus>True</can_focus>
+	<signal>
+	  <name>clicked</name>
+	  <handler>destroyMe</handler>
+	  <last_modification_time>Sun, 27 Jan 2002 11:27:12 GMT</last_modification_time>
+	</signal>
+	<label>Cancel</label>
+	<relief>GTK_RELIEF_NORMAL</relief>
+      </widget>
+    </widget>
+  </widget>
+</widget>
+
+<widget>
+  <class>GtkWindow</class>
+  <name>PBAccountWindow</name>
+  <visible>False</visible>
+  <title>PB Account Window</title>
+  <type>GTK_WINDOW_TOPLEVEL</type>
+  <position>GTK_WIN_POS_NONE</position>
+  <modal>False</modal>
+  <allow_shrink>False</allow_shrink>
+  <allow_grow>True</allow_grow>
+  <auto_shrink>False</auto_shrink>
+
+  <widget>
+    <class>GtkVBox</class>
+    <name>PBAccountWidget</name>
+    <border_width>4</border_width>
+    <homogeneous>False</homogeneous>
+    <spacing>0</spacing>
+
+    <widget>
+      <class>GtkTable</class>
+      <name>table3</name>
+      <rows>4</rows>
+      <columns>2</columns>
+      <homogeneous>False</homogeneous>
+      <row_spacing>0</row_spacing>
+      <column_spacing>0</column_spacing>
+      <child>
+	<padding>0</padding>
+	<expand>False</expand>
+	<fill>True</fill>
+      </child>
+
+      <widget>
+	<class>GtkEntry</class>
+	<name>hostname</name>
+	<can_focus>True</can_focus>
+	<editable>True</editable>
+	<text_visible>True</text_visible>
+	<text_max_length>0</text_max_length>
+	<text>twistedmatrix.com</text>
+	<child>
+	  <left_attach>1</left_attach>
+	  <right_attach>2</right_attach>
+	  <top_attach>2</top_attach>
+	  <bottom_attach>3</bottom_attach>
+	  <xpad>0</xpad>
+	  <ypad>0</ypad>
+	  <xexpand>True</xexpand>
+	  <yexpand>False</yexpand>
+	  <xshrink>False</xshrink>
+	  <yshrink>False</yshrink>
+	  <xfill>True</xfill>
+	  <yfill>False</yfill>
+	</child>
+      </widget>
+
+      <widget>
+	<class>GtkEntry</class>
+	<name>identity</name>
+	<can_focus>True</can_focus>
+	<has_focus>True</has_focus>
+	<signal>
+	  <name>changed</name>
+	  <handler>on_identity_changed</handler>
+	  <last_modification_time>Sun, 27 Jan 2002 11:52:17 GMT</last_modification_time>
+	</signal>
+	<editable>True</editable>
+	<text_visible>True</text_visible>
+	<text_max_length>0</text_max_length>
+	<text></text>
+	<child>
+	  <left_attach>1</left_attach>
+	  <right_attach>2</right_attach>
+	  <top_attach>0</top_attach>
+	  <bottom_attach>1</bottom_attach>
+	  <xpad>0</xpad>
+	  <ypad>0</ypad>
+	  <xexpand>True</xexpand>
+	  <yexpand>False</yexpand>
+	  <xshrink>False</xshrink>
+	  <yshrink>False</yshrink>
+	  <xfill>True</xfill>
+	  <yfill>False</yfill>
+	</child>
+      </widget>
+
+      <widget>
+	<class>GtkLabel</class>
+	<name>label52</name>
+	<label>     Hostname: </label>
+	<justify>GTK_JUSTIFY_RIGHT</justify>
+	<wrap>False</wrap>
+	<xalign>0</xalign>
+	<yalign>0.5</yalign>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<child>
+	  <left_attach>0</left_attach>
+	  <right_attach>1</right_attach>
+	  <top_attach>2</top_attach>
+	  <bottom_attach>3</bottom_attach>
+	  <xpad>0</xpad>
+	  <ypad>0</ypad>
+	  <xexpand>False</xexpand>
+	  <yexpand>False</yexpand>
+	  <xshrink>False</xshrink>
+	  <yshrink>False</yshrink>
+	  <xfill>True</xfill>
+	  <yfill>False</yfill>
+	</child>
+      </widget>
+
+      <widget>
+	<class>GtkLabel</class>
+	<name>label54</name>
+	<label>Identity Name: </label>
+	<justify>GTK_JUSTIFY_RIGHT</justify>
+	<wrap>False</wrap>
+	<xalign>0</xalign>
+	<yalign>0.5</yalign>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<child>
+	  <left_attach>0</left_attach>
+	  <right_attach>1</right_attach>
+	  <top_attach>0</top_attach>
+	  <bottom_attach>1</bottom_attach>
+	  <xpad>0</xpad>
+	  <ypad>0</ypad>
+	  <xexpand>False</xexpand>
+	  <yexpand>False</yexpand>
+	  <xshrink>False</xshrink>
+	  <yshrink>False</yshrink>
+	  <xfill>True</xfill>
+	  <yfill>False</yfill>
+	</child>
+      </widget>
+
+      <widget>
+	<class>GtkEntry</class>
+	<name>password</name>
+	<can_focus>True</can_focus>
+	<editable>True</editable>
+	<text_visible>False</text_visible>
+	<text_max_length>0</text_max_length>
+	<text></text>
+	<child>
+	  <left_attach>1</left_attach>
+	  <right_attach>2</right_attach>
+	  <top_attach>1</top_attach>
+	  <bottom_attach>2</bottom_attach>
+	  <xpad>0</xpad>
+	  <ypad>0</ypad>
+	  <xexpand>True</xexpand>
+	  <yexpand>False</yexpand>
+	  <xshrink>False</xshrink>
+	  <yshrink>False</yshrink>
+	  <xfill>True</xfill>
+	  <yfill>False</yfill>
+	</child>
+      </widget>
+
+      <widget>
+	<class>GtkEntry</class>
+	<name>portno</name>
+	<can_focus>True</can_focus>
+	<editable>True</editable>
+	<text_visible>True</text_visible>
+	<text_max_length>0</text_max_length>
+	<text>8787</text>
+	<child>
+	  <left_attach>1</left_attach>
+	  <right_attach>2</right_attach>
+	  <top_attach>3</top_attach>
+	  <bottom_attach>4</bottom_attach>
+	  <xpad>0</xpad>
+	  <ypad>0</ypad>
+	  <xexpand>True</xexpand>
+	  <yexpand>False</yexpand>
+	  <xshrink>False</xshrink>
+	  <yshrink>False</yshrink>
+	  <xfill>True</xfill>
+	  <yfill>False</yfill>
+	</child>
+      </widget>
+
+      <widget>
+	<class>GtkLabel</class>
+	<name>label55</name>
+	<label>     Password: </label>
+	<justify>GTK_JUSTIFY_RIGHT</justify>
+	<wrap>False</wrap>
+	<xalign>0</xalign>
+	<yalign>0.5</yalign>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<child>
+	  <left_attach>0</left_attach>
+	  <right_attach>1</right_attach>
+	  <top_attach>1</top_attach>
+	  <bottom_attach>2</bottom_attach>
+	  <xpad>0</xpad>
+	  <ypad>0</ypad>
+	  <xexpand>False</xexpand>
+	  <yexpand>False</yexpand>
+	  <xshrink>False</xshrink>
+	  <yshrink>False</yshrink>
+	  <xfill>True</xfill>
+	  <yfill>False</yfill>
+	</child>
+      </widget>
+
+      <widget>
+	<class>GtkLabel</class>
+	<name>label53</name>
+	<label>  Port Number: </label>
+	<justify>GTK_JUSTIFY_RIGHT</justify>
+	<wrap>False</wrap>
+	<xalign>0</xalign>
+	<yalign>0.5</yalign>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<child>
+	  <left_attach>0</left_attach>
+	  <right_attach>1</right_attach>
+	  <top_attach>3</top_attach>
+	  <bottom_attach>4</bottom_attach>
+	  <xpad>0</xpad>
+	  <ypad>0</ypad>
+	  <xexpand>False</xexpand>
+	  <yexpand>False</yexpand>
+	  <xshrink>False</xshrink>
+	  <yshrink>False</yshrink>
+	  <xfill>True</xfill>
+	  <yfill>False</yfill>
+	</child>
+      </widget>
+    </widget>
+
+    <widget>
+      <class>GtkFrame</class>
+      <name>frame3</name>
+      <label>Perspectives</label>
+      <label_xalign>0</label_xalign>
+      <shadow_type>GTK_SHADOW_ETCHED_IN</shadow_type>
+      <child>
+	<padding>0</padding>
+	<expand>True</expand>
+	<fill>True</fill>
+      </child>
+
+      <widget>
+	<class>GtkVBox</class>
+	<name>vbox19</name>
+	<border_width>3</border_width>
+	<homogeneous>False</homogeneous>
+	<spacing>0</spacing>
+
+	<widget>
+	  <class>GtkScrolledWindow</class>
+	  <name>scrolledwindow13</name>
+	  <hscrollbar_policy>GTK_POLICY_AUTOMATIC</hscrollbar_policy>
+	  <vscrollbar_policy>GTK_POLICY_ALWAYS</vscrollbar_policy>
+	  <hupdate_policy>GTK_UPDATE_CONTINUOUS</hupdate_policy>
+	  <vupdate_policy>GTK_UPDATE_CONTINUOUS</vupdate_policy>
+	  <child>
+	    <padding>0</padding>
+	    <expand>True</expand>
+	    <fill>True</fill>
+	  </child>
+
+	  <widget>
+	    <class>GtkCList</class>
+	    <name>serviceList</name>
+	    <can_focus>True</can_focus>
+	    <signal>
+	      <name>select_row</name>
+	      <handler>on_serviceList_select_row</handler>
+	      <last_modification_time>Sun, 27 Jan 2002 12:04:38 GMT</last_modification_time>
+	    </signal>
+	    <columns>3</columns>
+	    <column_widths>80,80,80</column_widths>
+	    <selection_mode>GTK_SELECTION_SINGLE</selection_mode>
+	    <show_titles>True</show_titles>
+	    <shadow_type>GTK_SHADOW_IN</shadow_type>
+
+	    <widget>
+	      <class>GtkLabel</class>
+	      <child_name>CList:title</child_name>
+	      <name>label60</name>
+	      <label>Service Type</label>
+	      <justify>GTK_JUSTIFY_CENTER</justify>
+	      <wrap>False</wrap>
+	      <xalign>0.5</xalign>
+	      <yalign>0.5</yalign>
+	      <xpad>0</xpad>
+	      <ypad>0</ypad>
+	    </widget>
+
+	    <widget>
+	      <class>GtkLabel</class>
+	      <child_name>CList:title</child_name>
+	      <name>label61</name>
+	      <label>Service Name</label>
+	      <justify>GTK_JUSTIFY_CENTER</justify>
+	      <wrap>False</wrap>
+	      <xalign>0.5</xalign>
+	      <yalign>0.5</yalign>
+	      <xpad>0</xpad>
+	      <ypad>0</ypad>
+	    </widget>
+
+	    <widget>
+	      <class>GtkLabel</class>
+	      <child_name>CList:title</child_name>
+	      <name>label62</name>
+	      <label>Perspective Name</label>
+	      <justify>GTK_JUSTIFY_CENTER</justify>
+	      <wrap>False</wrap>
+	      <xalign>0.5</xalign>
+	      <yalign>0.5</yalign>
+	      <xpad>0</xpad>
+	      <ypad>0</ypad>
+	    </widget>
+	  </widget>
+	</widget>
+
+	<widget>
+	  <class>GtkTable</class>
+	  <name>table4</name>
+	  <rows>3</rows>
+	  <columns>2</columns>
+	  <homogeneous>False</homogeneous>
+	  <row_spacing>0</row_spacing>
+	  <column_spacing>0</column_spacing>
+	  <child>
+	    <padding>0</padding>
+	    <expand>False</expand>
+	    <fill>True</fill>
+	  </child>
+
+	  <widget>
+	    <class>GtkLabel</class>
+	    <name>label63</name>
+	    <label>Perspective Name: </label>
+	    <justify>GTK_JUSTIFY_RIGHT</justify>
+	    <wrap>False</wrap>
+	    <xalign>0</xalign>
+	    <yalign>0.5</yalign>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	    <child>
+	      <left_attach>0</left_attach>
+	      <right_attach>1</right_attach>
+	      <top_attach>2</top_attach>
+	      <bottom_attach>3</bottom_attach>
+	      <xpad>0</xpad>
+	      <ypad>0</ypad>
+	      <xexpand>False</xexpand>
+	      <yexpand>False</yexpand>
+	      <xshrink>False</xshrink>
+	      <yshrink>False</yshrink>
+	      <xfill>True</xfill>
+	      <yfill>False</yfill>
+	    </child>
+	  </widget>
+
+	  <widget>
+	    <class>GtkLabel</class>
+	    <name>label59</name>
+	    <label>    Service Type: </label>
+	    <justify>GTK_JUSTIFY_RIGHT</justify>
+	    <wrap>False</wrap>
+	    <xalign>0</xalign>
+	    <yalign>0.5</yalign>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	    <child>
+	      <left_attach>0</left_attach>
+	      <right_attach>1</right_attach>
+	      <top_attach>0</top_attach>
+	      <bottom_attach>1</bottom_attach>
+	      <xpad>0</xpad>
+	      <ypad>0</ypad>
+	      <xexpand>False</xexpand>
+	      <yexpand>False</yexpand>
+	      <xshrink>False</xshrink>
+	      <yshrink>False</yshrink>
+	      <xfill>True</xfill>
+	      <yfill>False</yfill>
+	    </child>
+	  </widget>
+
+	  <widget>
+	    <class>GtkCombo</class>
+	    <name>serviceCombo</name>
+	    <value_in_list>False</value_in_list>
+	    <ok_if_empty>True</ok_if_empty>
+	    <case_sensitive>False</case_sensitive>
+	    <use_arrows>True</use_arrows>
+	    <use_arrows_always>False</use_arrows_always>
+	    <items>twisted.words
+twisted.reality
+twisted.manhole
+</items>
+	    <child>
+	      <left_attach>1</left_attach>
+	      <right_attach>2</right_attach>
+	      <top_attach>0</top_attach>
+	      <bottom_attach>1</bottom_attach>
+	      <xpad>0</xpad>
+	      <ypad>0</ypad>
+	      <xexpand>True</xexpand>
+	      <yexpand>False</yexpand>
+	      <xshrink>False</xshrink>
+	      <yshrink>False</yshrink>
+	      <xfill>True</xfill>
+	      <yfill>False</yfill>
+	    </child>
+
+	    <widget>
+	      <class>GtkEntry</class>
+	      <child_name>GtkCombo:entry</child_name>
+	      <name>serviceType</name>
+	      <can_focus>True</can_focus>
+	      <signal>
+		<name>changed</name>
+		<handler>on_serviceType_changed</handler>
+		<last_modification_time>Sun, 27 Jan 2002 11:49:07 GMT</last_modification_time>
+	      </signal>
+	      <editable>True</editable>
+	      <text_visible>True</text_visible>
+	      <text_max_length>0</text_max_length>
+	      <text>twisted.words</text>
+	    </widget>
+	  </widget>
+
+	  <widget>
+	    <class>GtkLabel</class>
+	    <name>label64</name>
+	    <label>    Service Name: </label>
+	    <justify>GTK_JUSTIFY_RIGHT</justify>
+	    <wrap>False</wrap>
+	    <xalign>0</xalign>
+	    <yalign>0.5</yalign>
+	    <xpad>0</xpad>
+	    <ypad>0</ypad>
+	    <child>
+	      <left_attach>0</left_attach>
+	      <right_attach>1</right_attach>
+	      <top_attach>1</top_attach>
+	      <bottom_attach>2</bottom_attach>
+	      <xpad>0</xpad>
+	      <ypad>0</ypad>
+	      <xexpand>False</xexpand>
+	      <yexpand>False</yexpand>
+	      <xshrink>False</xshrink>
+	      <yshrink>False</yshrink>
+	      <xfill>True</xfill>
+	      <yfill>False</yfill>
+	    </child>
+	  </widget>
+
+	  <widget>
+	    <class>GtkEntry</class>
+	    <name>serviceName</name>
+	    <can_focus>True</can_focus>
+	    <editable>True</editable>
+	    <text_visible>True</text_visible>
+	    <text_max_length>0</text_max_length>
+	    <text></text>
+	    <child>
+	      <left_attach>1</left_attach>
+	      <right_attach>2</right_attach>
+	      <top_attach>1</top_attach>
+	      <bottom_attach>2</bottom_attach>
+	      <xpad>0</xpad>
+	      <ypad>0</ypad>
+	      <xexpand>True</xexpand>
+	      <yexpand>False</yexpand>
+	      <xshrink>False</xshrink>
+	      <yshrink>False</yshrink>
+	      <xfill>True</xfill>
+	      <yfill>False</yfill>
+	    </child>
+	  </widget>
+
+	  <widget>
+	    <class>GtkEntry</class>
+	    <name>perspectiveName</name>
+	    <can_focus>True</can_focus>
+	    <editable>True</editable>
+	    <text_visible>True</text_visible>
+	    <text_max_length>0</text_max_length>
+	    <text></text>
+	    <child>
+	      <left_attach>1</left_attach>
+	      <right_attach>2</right_attach>
+	      <top_attach>2</top_attach>
+	      <bottom_attach>3</bottom_attach>
+	      <xpad>0</xpad>
+	      <ypad>0</ypad>
+	      <xexpand>True</xexpand>
+	      <yexpand>False</yexpand>
+	      <xshrink>False</xshrink>
+	      <yshrink>False</yshrink>
+	      <xfill>True</xfill>
+	      <yfill>False</yfill>
+	    </child>
+	  </widget>
+	</widget>
+
+	<widget>
+	  <class>GtkHBox</class>
+	  <name>hbox13</name>
+	  <homogeneous>False</homogeneous>
+	  <spacing>0</spacing>
+	  <child>
+	    <padding>0</padding>
+	    <expand>False</expand>
+	    <fill>True</fill>
+	  </child>
+
+	  <widget>
+	    <class>GtkButton</class>
+	    <name>button53</name>
+	    <can_focus>True</can_focus>
+	    <signal>
+	      <name>clicked</name>
+	      <handler>addPerspective</handler>
+	      <last_modification_time>Mon, 28 Jan 2002 01:07:15 GMT</last_modification_time>
+	    </signal>
+	    <label> Add Perspective </label>
+	    <relief>GTK_RELIEF_NORMAL</relief>
+	    <child>
+	      <padding>0</padding>
+	      <expand>True</expand>
+	      <fill>False</fill>
+	    </child>
+	  </widget>
+
+	  <widget>
+	    <class>GtkButton</class>
+	    <name>button54</name>
+	    <can_focus>True</can_focus>
+	    <signal>
+	      <name>clicked</name>
+	      <handler>removePerspective</handler>
+	      <last_modification_time>Sun, 27 Jan 2002 11:34:36 GMT</last_modification_time>
+	    </signal>
+	    <label>Remove Perspective</label>
+	    <relief>GTK_RELIEF_NORMAL</relief>
+	    <child>
+	      <padding>0</padding>
+	      <expand>True</expand>
+	      <fill>False</fill>
+	    </child>
+	  </widget>
+	</widget>
+      </widget>
+    </widget>
+  </widget>
+</widget>
+
+<widget>
+  <class>GtkWindow</class>
+  <name>IRCAccountWindow</name>
+  <title>IRC Account Window</title>
+  <type>GTK_WINDOW_TOPLEVEL</type>
+  <position>GTK_WIN_POS_NONE</position>
+  <modal>False</modal>
+  <allow_shrink>False</allow_shrink>
+  <allow_grow>True</allow_grow>
+  <auto_shrink>False</auto_shrink>
+
+  <widget>
+    <class>GtkTable</class>
+    <name>IRCAccountWidget</name>
+    <rows>5</rows>
+    <columns>2</columns>
+    <homogeneous>False</homogeneous>
+    <row_spacing>0</row_spacing>
+    <column_spacing>0</column_spacing>
+
+    <widget>
+      <class>GtkLabel</class>
+      <name>label65</name>
+      <label> Nickname: </label>
+      <justify>GTK_JUSTIFY_RIGHT</justify>
+      <wrap>False</wrap>
+      <xalign>0</xalign>
+      <yalign>0.5</yalign>
+      <xpad>0</xpad>
+      <ypad>0</ypad>
+      <child>
+	<left_attach>0</left_attach>
+	<right_attach>1</right_attach>
+	<top_attach>0</top_attach>
+	<bottom_attach>1</bottom_attach>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<xexpand>False</xexpand>
+	<yexpand>False</yexpand>
+	<xshrink>False</xshrink>
+	<yshrink>False</yshrink>
+	<xfill>True</xfill>
+	<yfill>False</yfill>
+      </child>
+    </widget>
+
+    <widget>
+      <class>GtkLabel</class>
+      <name>label66</name>
+      <label>   Server: </label>
+      <justify>GTK_JUSTIFY_RIGHT</justify>
+      <wrap>False</wrap>
+      <xalign>0</xalign>
+      <yalign>0.5</yalign>
+      <xpad>0</xpad>
+      <ypad>0</ypad>
+      <child>
+	<left_attach>0</left_attach>
+	<right_attach>1</right_attach>
+	<top_attach>1</top_attach>
+	<bottom_attach>2</bottom_attach>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<xexpand>False</xexpand>
+	<yexpand>False</yexpand>
+	<xshrink>False</xshrink>
+	<yshrink>False</yshrink>
+	<xfill>True</xfill>
+	<yfill>False</yfill>
+      </child>
+    </widget>
+
+    <widget>
+      <class>GtkLabel</class>
+      <name>label67</name>
+      <label>     Port: </label>
+      <justify>GTK_JUSTIFY_RIGHT</justify>
+      <wrap>False</wrap>
+      <xalign>0</xalign>
+      <yalign>0.5</yalign>
+      <xpad>0</xpad>
+      <ypad>0</ypad>
+      <child>
+	<left_attach>0</left_attach>
+	<right_attach>1</right_attach>
+	<top_attach>2</top_attach>
+	<bottom_attach>3</bottom_attach>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<xexpand>False</xexpand>
+	<yexpand>False</yexpand>
+	<xshrink>False</xshrink>
+	<yshrink>False</yshrink>
+	<xfill>True</xfill>
+	<yfill>False</yfill>
+      </child>
+    </widget>
+
+    <widget>
+      <class>GtkLabel</class>
+      <name>label68</name>
+      <label> Channels: </label>
+      <justify>GTK_JUSTIFY_RIGHT</justify>
+      <wrap>False</wrap>
+      <xalign>0</xalign>
+      <yalign>0.5</yalign>
+      <xpad>0</xpad>
+      <ypad>0</ypad>
+      <child>
+	<left_attach>0</left_attach>
+	<right_attach>1</right_attach>
+	<top_attach>3</top_attach>
+	<bottom_attach>4</bottom_attach>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<xexpand>False</xexpand>
+	<yexpand>False</yexpand>
+	<xshrink>False</xshrink>
+	<yshrink>False</yshrink>
+	<xfill>True</xfill>
+	<yfill>False</yfill>
+      </child>
+    </widget>
+
+    <widget>
+      <class>GtkLabel</class>
+      <name>label69</name>
+      <label> Password: </label>
+      <justify>GTK_JUSTIFY_RIGHT</justify>
+      <wrap>False</wrap>
+      <xalign>0</xalign>
+      <yalign>0.5</yalign>
+      <xpad>0</xpad>
+      <ypad>0</ypad>
+      <child>
+	<left_attach>0</left_attach>
+	<right_attach>1</right_attach>
+	<top_attach>4</top_attach>
+	<bottom_attach>5</bottom_attach>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<xexpand>False</xexpand>
+	<yexpand>False</yexpand>
+	<xshrink>False</xshrink>
+	<yshrink>False</yshrink>
+	<xfill>True</xfill>
+	<yfill>False</yfill>
+      </child>
+    </widget>
+
+    <widget>
+      <class>GtkEntry</class>
+      <name>ircNick</name>
+      <can_focus>True</can_focus>
+      <editable>True</editable>
+      <text_visible>True</text_visible>
+      <text_max_length>0</text_max_length>
+      <text></text>
+      <child>
+	<left_attach>1</left_attach>
+	<right_attach>2</right_attach>
+	<top_attach>0</top_attach>
+	<bottom_attach>1</bottom_attach>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<xexpand>True</xexpand>
+	<yexpand>False</yexpand>
+	<xshrink>False</xshrink>
+	<yshrink>False</yshrink>
+	<xfill>True</xfill>
+	<yfill>False</yfill>
+      </child>
+    </widget>
+
+    <widget>
+      <class>GtkEntry</class>
+      <name>ircServer</name>
+      <can_focus>True</can_focus>
+      <editable>True</editable>
+      <text_visible>True</text_visible>
+      <text_max_length>0</text_max_length>
+      <text></text>
+      <child>
+	<left_attach>1</left_attach>
+	<right_attach>2</right_attach>
+	<top_attach>1</top_attach>
+	<bottom_attach>2</bottom_attach>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<xexpand>True</xexpand>
+	<yexpand>False</yexpand>
+	<xshrink>False</xshrink>
+	<yshrink>False</yshrink>
+	<xfill>True</xfill>
+	<yfill>False</yfill>
+      </child>
+    </widget>
+
+    <widget>
+      <class>GtkEntry</class>
+      <name>ircPort</name>
+      <can_focus>True</can_focus>
+      <editable>True</editable>
+      <text_visible>True</text_visible>
+      <text_max_length>0</text_max_length>
+      <text>6667</text>
+      <child>
+	<left_attach>1</left_attach>
+	<right_attach>2</right_attach>
+	<top_attach>2</top_attach>
+	<bottom_attach>3</bottom_attach>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<xexpand>True</xexpand>
+	<yexpand>False</yexpand>
+	<xshrink>False</xshrink>
+	<yshrink>False</yshrink>
+	<xfill>True</xfill>
+	<yfill>False</yfill>
+      </child>
+    </widget>
+
+    <widget>
+      <class>GtkEntry</class>
+      <name>ircChannels</name>
+      <can_focus>True</can_focus>
+      <editable>True</editable>
+      <text_visible>True</text_visible>
+      <text_max_length>0</text_max_length>
+      <text></text>
+      <child>
+	<left_attach>1</left_attach>
+	<right_attach>2</right_attach>
+	<top_attach>3</top_attach>
+	<bottom_attach>4</bottom_attach>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<xexpand>True</xexpand>
+	<yexpand>False</yexpand>
+	<xshrink>False</xshrink>
+	<yshrink>False</yshrink>
+	<xfill>True</xfill>
+	<yfill>False</yfill>
+      </child>
+    </widget>
+
+    <widget>
+      <class>GtkEntry</class>
+      <name>ircPassword</name>
+      <can_focus>True</can_focus>
+      <editable>True</editable>
+      <text_visible>True</text_visible>
+      <text_max_length>0</text_max_length>
+      <text></text>
+      <child>
+	<left_attach>1</left_attach>
+	<right_attach>2</right_attach>
+	<top_attach>4</top_attach>
+	<bottom_attach>5</bottom_attach>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<xexpand>True</xexpand>
+	<yexpand>False</yexpand>
+	<xshrink>False</xshrink>
+	<yshrink>False</yshrink>
+	<xfill>True</xfill>
+	<yfill>False</yfill>
+      </child>
+    </widget>
+  </widget>
+</widget>
+
+<widget>
+  <class>GtkWindow</class>
+  <name>TOCAccountWindow</name>
+  <title>TOC Account Window</title>
+  <type>GTK_WINDOW_TOPLEVEL</type>
+  <position>GTK_WIN_POS_NONE</position>
+  <modal>False</modal>
+  <allow_shrink>False</allow_shrink>
+  <allow_grow>True</allow_grow>
+  <auto_shrink>False</auto_shrink>
+
+  <widget>
+    <class>GtkTable</class>
+    <name>TOCAccountWidget</name>
+    <rows>4</rows>
+    <columns>2</columns>
+    <homogeneous>False</homogeneous>
+    <row_spacing>0</row_spacing>
+    <column_spacing>0</column_spacing>
+
+    <widget>
+      <class>GtkLabel</class>
+      <name>label70</name>
+      <label> Screen Name: </label>
+      <justify>GTK_JUSTIFY_CENTER</justify>
+      <wrap>False</wrap>
+      <xalign>0</xalign>
+      <yalign>0.5</yalign>
+      <xpad>0</xpad>
+      <ypad>0</ypad>
+      <child>
+	<left_attach>0</left_attach>
+	<right_attach>1</right_attach>
+	<top_attach>0</top_attach>
+	<bottom_attach>1</bottom_attach>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<xexpand>False</xexpand>
+	<yexpand>False</yexpand>
+	<xshrink>False</xshrink>
+	<yshrink>False</yshrink>
+	<xfill>True</xfill>
+	<yfill>False</yfill>
+      </child>
+    </widget>
+
+    <widget>
+      <class>GtkLabel</class>
+      <name>label71</name>
+      <label>    Password: </label>
+      <justify>GTK_JUSTIFY_CENTER</justify>
+      <wrap>False</wrap>
+      <xalign>0</xalign>
+      <yalign>0.5</yalign>
+      <xpad>0</xpad>
+      <ypad>0</ypad>
+      <child>
+	<left_attach>0</left_attach>
+	<right_attach>1</right_attach>
+	<top_attach>1</top_attach>
+	<bottom_attach>2</bottom_attach>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<xexpand>False</xexpand>
+	<yexpand>False</yexpand>
+	<xshrink>False</xshrink>
+	<yshrink>False</yshrink>
+	<xfill>True</xfill>
+	<yfill>False</yfill>
+      </child>
+    </widget>
+
+    <widget>
+      <class>GtkLabel</class>
+      <name>label72</name>
+      <label>        Host: </label>
+      <justify>GTK_JUSTIFY_CENTER</justify>
+      <wrap>False</wrap>
+      <xalign>0</xalign>
+      <yalign>0.5</yalign>
+      <xpad>0</xpad>
+      <ypad>0</ypad>
+      <child>
+	<left_attach>0</left_attach>
+	<right_attach>1</right_attach>
+	<top_attach>2</top_attach>
+	<bottom_attach>3</bottom_attach>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<xexpand>False</xexpand>
+	<yexpand>False</yexpand>
+	<xshrink>False</xshrink>
+	<yshrink>False</yshrink>
+	<xfill>True</xfill>
+	<yfill>False</yfill>
+      </child>
+    </widget>
+
+    <widget>
+      <class>GtkLabel</class>
+      <name>label73</name>
+      <label>        Port: </label>
+      <justify>GTK_JUSTIFY_CENTER</justify>
+      <wrap>False</wrap>
+      <xalign>0</xalign>
+      <yalign>0.5</yalign>
+      <xpad>0</xpad>
+      <ypad>0</ypad>
+      <child>
+	<left_attach>0</left_attach>
+	<right_attach>1</right_attach>
+	<top_attach>3</top_attach>
+	<bottom_attach>4</bottom_attach>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<xexpand>False</xexpand>
+	<yexpand>False</yexpand>
+	<xshrink>False</xshrink>
+	<yshrink>False</yshrink>
+	<xfill>True</xfill>
+	<yfill>False</yfill>
+      </child>
+    </widget>
+
+    <widget>
+      <class>GtkEntry</class>
+      <name>TOCName</name>
+      <can_focus>True</can_focus>
+      <editable>True</editable>
+      <text_visible>True</text_visible>
+      <text_max_length>0</text_max_length>
+      <text></text>
+      <child>
+	<left_attach>1</left_attach>
+	<right_attach>2</right_attach>
+	<top_attach>0</top_attach>
+	<bottom_attach>1</bottom_attach>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<xexpand>True</xexpand>
+	<yexpand>False</yexpand>
+	<xshrink>False</xshrink>
+	<yshrink>False</yshrink>
+	<xfill>True</xfill>
+	<yfill>False</yfill>
+      </child>
+    </widget>
+
+    <widget>
+      <class>GtkEntry</class>
+      <name>TOCPass</name>
+      <can_focus>True</can_focus>
+      <editable>True</editable>
+      <text_visible>False</text_visible>
+      <text_max_length>0</text_max_length>
+      <text></text>
+      <child>
+	<left_attach>1</left_attach>
+	<right_attach>2</right_attach>
+	<top_attach>1</top_attach>
+	<bottom_attach>2</bottom_attach>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<xexpand>True</xexpand>
+	<yexpand>False</yexpand>
+	<xshrink>False</xshrink>
+	<yshrink>False</yshrink>
+	<xfill>True</xfill>
+	<yfill>False</yfill>
+      </child>
+    </widget>
+
+    <widget>
+      <class>GtkEntry</class>
+      <name>TOCHost</name>
+      <can_focus>True</can_focus>
+      <editable>True</editable>
+      <text_visible>True</text_visible>
+      <text_max_length>0</text_max_length>
+      <text>toc.oscar.aol.com</text>
+      <child>
+	<left_attach>1</left_attach>
+	<right_attach>2</right_attach>
+	<top_attach>2</top_attach>
+	<bottom_attach>3</bottom_attach>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<xexpand>True</xexpand>
+	<yexpand>False</yexpand>
+	<xshrink>False</xshrink>
+	<yshrink>False</yshrink>
+	<xfill>True</xfill>
+	<yfill>False</yfill>
+      </child>
+    </widget>
+
+    <widget>
+      <class>GtkEntry</class>
+      <name>TOCPort</name>
+      <can_focus>True</can_focus>
+      <editable>True</editable>
+      <text_visible>True</text_visible>
+      <text_max_length>0</text_max_length>
+      <text>9898</text>
+      <child>
+	<left_attach>1</left_attach>
+	<right_attach>2</right_attach>
+	<top_attach>3</top_attach>
+	<bottom_attach>4</bottom_attach>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<xexpand>True</xexpand>
+	<yexpand>False</yexpand>
+	<xshrink>False</xshrink>
+	<yshrink>False</yshrink>
+	<xfill>True</xfill>
+	<yfill>False</yfill>
+      </child>
+    </widget>
+  </widget>
+</widget>
+
+<widget>
+  <class>GtkWindow</class>
+  <name>JoinGroupWindow</name>
+  <border_width>5</border_width>
+  <visible>False</visible>
+  <title>Group to Join</title>
+  <type>GTK_WINDOW_TOPLEVEL</type>
+  <position>GTK_WIN_POS_NONE</position>
+  <modal>False</modal>
+  <allow_shrink>False</allow_shrink>
+  <allow_grow>True</allow_grow>
+  <auto_shrink>False</auto_shrink>
+
+  <widget>
+    <class>GtkVBox</class>
+    <name>vbox20</name>
+    <homogeneous>False</homogeneous>
+    <spacing>0</spacing>
+
+    <widget>
+      <class>GtkOptionMenu</class>
+      <name>AccountSelector</name>
+      <can_focus>True</can_focus>
+      <items>None
+In
+Particular
+</items>
+      <initial_choice>0</initial_choice>
+      <child>
+	<padding>0</padding>
+	<expand>False</expand>
+	<fill>False</fill>
+      </child>
+    </widget>
+
+    <widget>
+      <class>GtkHBox</class>
+      <name>hbox15</name>
+      <homogeneous>False</homogeneous>
+      <spacing>5</spacing>
+      <child>
+	<padding>0</padding>
+	<expand>True</expand>
+	<fill>True</fill>
+      </child>
+
+      <widget>
+	<class>GtkEntry</class>
+	<name>GroupNameEntry</name>
+	<can_focus>True</can_focus>
+	<has_focus>True</has_focus>
+	<signal>
+	  <name>activate</name>
+	  <handler>on_GroupJoinButton_clicked</handler>
+	  <last_modification_time>Tue, 29 Jan 2002 13:27:18 GMT</last_modification_time>
+	</signal>
+	<editable>True</editable>
+	<text_visible>True</text_visible>
+	<text_max_length>0</text_max_length>
+	<text></text>
+	<child>
+	  <padding>0</padding>
+	  <expand>True</expand>
+	  <fill>True</fill>
+	</child>
+      </widget>
+
+      <widget>
+	<class>GtkButton</class>
+	<name>GroupJoinButton</name>
+	<can_default>True</can_default>
+	<has_default>True</has_default>
+	<can_focus>True</can_focus>
+	<signal>
+	  <name>clicked</name>
+	  <handler>on_GroupJoinButton_clicked</handler>
+	  <last_modification_time>Tue, 29 Jan 2002 13:16:50 GMT</last_modification_time>
+	</signal>
+	<label>Join</label>
+	<relief>GTK_RELIEF_NORMAL</relief>
+	<child>
+	  <padding>0</padding>
+	  <expand>False</expand>
+	  <fill>False</fill>
+	</child>
+      </widget>
+    </widget>
+  </widget>
+</widget>
+
+<widget>
+  <class>GtkWindow</class>
+  <name>UnifiedWindow</name>
+  <title>Twisted Instance Messenger</title>
+  <type>GTK_WINDOW_TOPLEVEL</type>
+  <position>GTK_WIN_POS_NONE</position>
+  <modal>False</modal>
+  <allow_shrink>False</allow_shrink>
+  <allow_grow>True</allow_grow>
+  <auto_shrink>False</auto_shrink>
+
+  <widget>
+    <class>GtkVBox</class>
+    <name>vbox25</name>
+    <homogeneous>False</homogeneous>
+    <spacing>0</spacing>
+
+    <widget>
+      <class>GtkHBox</class>
+      <name>hbox28</name>
+      <homogeneous>False</homogeneous>
+      <spacing>0</spacing>
+      <child>
+	<padding>0</padding>
+	<expand>False</expand>
+	<fill>True</fill>
+      </child>
+
+      <widget>
+	<class>GtkButton</class>
+	<name>button74</name>
+	<can_focus>True</can_focus>
+	<label>></label>
+	<relief>GTK_RELIEF_NORMAL</relief>
+	<child>
+	  <padding>0</padding>
+	  <expand>False</expand>
+	  <fill>False</fill>
+	</child>
+      </widget>
+
+      <widget>
+	<class>GtkEntry</class>
+	<name>entry3</name>
+	<can_focus>True</can_focus>
+	<editable>True</editable>
+	<text_visible>True</text_visible>
+	<text_max_length>0</text_max_length>
+	<text></text>
+	<child>
+	  <padding>0</padding>
+	  <expand>True</expand>
+	  <fill>True</fill>
+	</child>
+      </widget>
+
+      <widget>
+	<class>GtkOptionMenu</class>
+	<name>optionmenu3</name>
+	<items>List
+Of
+Online
+Accounts
+</items>
+	<initial_choice>0</initial_choice>
+	<child>
+	  <padding>0</padding>
+	  <expand>False</expand>
+	  <fill>False</fill>
+	</child>
+      </widget>
+
+      <widget>
+	<class>GtkOptionMenu</class>
+	<name>optionmenu4</name>
+	<can_focus>True</can_focus>
+	<items>Contact
+Person
+Group
+Account
+</items>
+	<initial_choice>0</initial_choice>
+	<child>
+	  <padding>0</padding>
+	  <expand>False</expand>
+	  <fill>False</fill>
+	</child>
+      </widget>
+    </widget>
+
+    <widget>
+      <class>GtkHPaned</class>
+      <name>hpaned1</name>
+      <handle_size>10</handle_size>
+      <gutter_size>6</gutter_size>
+      <position>0</position>
+      <child>
+	<padding>0</padding>
+	<expand>True</expand>
+	<fill>True</fill>
+      </child>
+
+      <widget>
+	<class>GtkVBox</class>
+	<name>vbox26</name>
+	<homogeneous>False</homogeneous>
+	<spacing>0</spacing>
+	<child>
+	  <shrink>True</shrink>
+	  <resize>False</resize>
+	</child>
+
+	<widget>
+	  <class>GtkFrame</class>
+	  <name>frame7</name>
+	  <border_width>2</border_width>
+	  <label>Accounts</label>
+	  <label_xalign>0</label_xalign>
+	  <shadow_type>GTK_SHADOW_ETCHED_IN</shadow_type>
+	  <child>
+	    <padding>0</padding>
+	    <expand>True</expand>
+	    <fill>True</fill>
+	  </child>
+
+	  <widget>
+	    <class>GtkVBox</class>
+	    <name>vbox27</name>
+	    <homogeneous>False</homogeneous>
+	    <spacing>0</spacing>
+
+	    <widget>
+	      <class>GtkScrolledWindow</class>
+	      <name>scrolledwindow18</name>
+	      <hscrollbar_policy>GTK_POLICY_AUTOMATIC</hscrollbar_policy>
+	      <vscrollbar_policy>GTK_POLICY_AUTOMATIC</vscrollbar_policy>
+	      <hupdate_policy>GTK_UPDATE_CONTINUOUS</hupdate_policy>
+	      <vupdate_policy>GTK_UPDATE_CONTINUOUS</vupdate_policy>
+	      <child>
+		<padding>0</padding>
+		<expand>True</expand>
+		<fill>True</fill>
+	      </child>
+
+	      <widget>
+		<class>GtkCList</class>
+		<name>clist4</name>
+		<columns>4</columns>
+		<column_widths>18,25,25,80</column_widths>
+		<selection_mode>GTK_SELECTION_SINGLE</selection_mode>
+		<show_titles>False</show_titles>
+		<shadow_type>GTK_SHADOW_IN</shadow_type>
+
+		<widget>
+		  <class>GtkLabel</class>
+		  <child_name>CList:title</child_name>
+		  <name>label95</name>
+		  <label>label87</label>
+		  <justify>GTK_JUSTIFY_CENTER</justify>
+		  <wrap>False</wrap>
+		  <xalign>0.5</xalign>
+		  <yalign>0.5</yalign>
+		  <xpad>0</xpad>
+		  <ypad>0</ypad>
+		</widget>
+
+		<widget>
+		  <class>GtkLabel</class>
+		  <child_name>CList:title</child_name>
+		  <name>label96</name>
+		  <label>label88</label>
+		  <justify>GTK_JUSTIFY_CENTER</justify>
+		  <wrap>False</wrap>
+		  <xalign>0.5</xalign>
+		  <yalign>0.5</yalign>
+		  <xpad>0</xpad>
+		  <ypad>0</ypad>
+		</widget>
+
+		<widget>
+		  <class>GtkLabel</class>
+		  <child_name>CList:title</child_name>
+		  <name>label97</name>
+		  <label>label89</label>
+		  <justify>GTK_JUSTIFY_CENTER</justify>
+		  <wrap>False</wrap>
+		  <xalign>0.5</xalign>
+		  <yalign>0.5</yalign>
+		  <xpad>0</xpad>
+		  <ypad>0</ypad>
+		</widget>
+
+		<widget>
+		  <class>GtkLabel</class>
+		  <child_name>CList:title</child_name>
+		  <name>label98</name>
+		  <label>label90</label>
+		  <justify>GTK_JUSTIFY_CENTER</justify>
+		  <wrap>False</wrap>
+		  <xalign>0.5</xalign>
+		  <yalign>0.5</yalign>
+		  <xpad>0</xpad>
+		  <ypad>0</ypad>
+		</widget>
+	      </widget>
+	    </widget>
+
+	    <widget>
+	      <class>GtkHBox</class>
+	      <name>hbox23</name>
+	      <homogeneous>True</homogeneous>
+	      <spacing>2</spacing>
+	      <child>
+		<padding>0</padding>
+		<expand>True</expand>
+		<fill>True</fill>
+	      </child>
+
+	      <widget>
+		<class>GtkButton</class>
+		<name>button65</name>
+		<label>New</label>
+		<relief>GTK_RELIEF_NORMAL</relief>
+		<child>
+		  <padding>0</padding>
+		  <expand>True</expand>
+		  <fill>True</fill>
+		</child>
+	      </widget>
+
+	      <widget>
+		<class>GtkButton</class>
+		<name>button66</name>
+		<label>Delete</label>
+		<relief>GTK_RELIEF_NORMAL</relief>
+		<child>
+		  <padding>0</padding>
+		  <expand>True</expand>
+		  <fill>True</fill>
+		</child>
+	      </widget>
+
+	      <widget>
+		<class>GtkButton</class>
+		<name>button67</name>
+		<label>Connect</label>
+		<relief>GTK_RELIEF_NORMAL</relief>
+		<child>
+		  <padding>0</padding>
+		  <expand>True</expand>
+		  <fill>True</fill>
+		</child>
+	      </widget>
+	    </widget>
+	  </widget>
+	</widget>
+
+	<widget>
+	  <class>GtkFrame</class>
+	  <name>frame8</name>
+	  <border_width>2</border_width>
+	  <label>Contacts</label>
+	  <label_xalign>0</label_xalign>
+	  <shadow_type>GTK_SHADOW_ETCHED_IN</shadow_type>
+	  <child>
+	    <padding>0</padding>
+	    <expand>True</expand>
+	    <fill>True</fill>
+	  </child>
+
+	  <widget>
+	    <class>GtkVBox</class>
+	    <name>vbox28</name>
+	    <homogeneous>False</homogeneous>
+	    <spacing>0</spacing>
+
+	    <widget>
+	      <class>GtkScrolledWindow</class>
+	      <name>scrolledwindow19</name>
+	      <hscrollbar_policy>GTK_POLICY_AUTOMATIC</hscrollbar_policy>
+	      <vscrollbar_policy>GTK_POLICY_AUTOMATIC</vscrollbar_policy>
+	      <hupdate_policy>GTK_UPDATE_CONTINUOUS</hupdate_policy>
+	      <vupdate_policy>GTK_UPDATE_CONTINUOUS</vupdate_policy>
+	      <child>
+		<padding>0</padding>
+		<expand>True</expand>
+		<fill>True</fill>
+	      </child>
+
+	      <widget>
+		<class>GtkCList</class>
+		<name>clist5</name>
+		<columns>3</columns>
+		<column_widths>18,17,80</column_widths>
+		<selection_mode>GTK_SELECTION_SINGLE</selection_mode>
+		<show_titles>False</show_titles>
+		<shadow_type>GTK_SHADOW_IN</shadow_type>
+
+		<widget>
+		  <class>GtkLabel</class>
+		  <child_name>CList:title</child_name>
+		  <name>label99</name>
+		  <label>label84</label>
+		  <justify>GTK_JUSTIFY_CENTER</justify>
+		  <wrap>False</wrap>
+		  <xalign>0.5</xalign>
+		  <yalign>0.5</yalign>
+		  <xpad>0</xpad>
+		  <ypad>0</ypad>
+		</widget>
+
+		<widget>
+		  <class>GtkLabel</class>
+		  <child_name>CList:title</child_name>
+		  <name>label100</name>
+		  <label>label85</label>
+		  <justify>GTK_JUSTIFY_CENTER</justify>
+		  <wrap>False</wrap>
+		  <xalign>0.5</xalign>
+		  <yalign>0.5</yalign>
+		  <xpad>0</xpad>
+		  <ypad>0</ypad>
+		</widget>
+
+		<widget>
+		  <class>GtkLabel</class>
+		  <child_name>CList:title</child_name>
+		  <name>label101</name>
+		  <label>label86</label>
+		  <justify>GTK_JUSTIFY_CENTER</justify>
+		  <wrap>False</wrap>
+		  <xalign>0.5</xalign>
+		  <yalign>0.5</yalign>
+		  <xpad>0</xpad>
+		  <ypad>0</ypad>
+		</widget>
+	      </widget>
+	    </widget>
+
+	    <widget>
+	      <class>GtkHBox</class>
+	      <name>hbox24</name>
+	      <homogeneous>True</homogeneous>
+	      <spacing>2</spacing>
+	      <child>
+		<padding>0</padding>
+		<expand>False</expand>
+		<fill>True</fill>
+	      </child>
+
+	      <widget>
+		<class>GtkButton</class>
+		<name>button68</name>
+		<can_focus>True</can_focus>
+		<label>Talk</label>
+		<relief>GTK_RELIEF_NORMAL</relief>
+		<child>
+		  <padding>0</padding>
+		  <expand>True</expand>
+		  <fill>True</fill>
+		</child>
+	      </widget>
+
+	      <widget>
+		<class>GtkButton</class>
+		<name>button69</name>
+		<can_focus>True</can_focus>
+		<label>Info</label>
+		<relief>GTK_RELIEF_NORMAL</relief>
+		<child>
+		  <padding>0</padding>
+		  <expand>True</expand>
+		  <fill>True</fill>
+		</child>
+	      </widget>
+
+	      <widget>
+		<class>GtkButton</class>
+		<name>button70</name>
+		<can_focus>True</can_focus>
+		<label>Add</label>
+		<relief>GTK_RELIEF_NORMAL</relief>
+		<child>
+		  <padding>0</padding>
+		  <expand>True</expand>
+		  <fill>True</fill>
+		</child>
+	      </widget>
+
+	      <widget>
+		<class>GtkButton</class>
+		<name>button71</name>
+		<can_focus>True</can_focus>
+		<label>Remove</label>
+		<relief>GTK_RELIEF_NORMAL</relief>
+		<child>
+		  <padding>0</padding>
+		  <expand>False</expand>
+		  <fill>False</fill>
+		</child>
+	      </widget>
+	    </widget>
+	  </widget>
+	</widget>
+
+	<widget>
+	  <class>GtkFrame</class>
+	  <name>frame9</name>
+	  <border_width>2</border_width>
+	  <label>Groups</label>
+	  <label_xalign>0</label_xalign>
+	  <shadow_type>GTK_SHADOW_ETCHED_IN</shadow_type>
+	  <child>
+	    <padding>0</padding>
+	    <expand>True</expand>
+	    <fill>True</fill>
+	  </child>
+
+	  <widget>
+	    <class>GtkVBox</class>
+	    <name>vbox29</name>
+	    <homogeneous>False</homogeneous>
+	    <spacing>0</spacing>
+
+	    <widget>
+	      <class>GtkScrolledWindow</class>
+	      <name>scrolledwindow20</name>
+	      <hscrollbar_policy>GTK_POLICY_AUTOMATIC</hscrollbar_policy>
+	      <vscrollbar_policy>GTK_POLICY_AUTOMATIC</vscrollbar_policy>
+	      <hupdate_policy>GTK_UPDATE_CONTINUOUS</hupdate_policy>
+	      <vupdate_policy>GTK_UPDATE_CONTINUOUS</vupdate_policy>
+	      <child>
+		<padding>0</padding>
+		<expand>True</expand>
+		<fill>True</fill>
+	      </child>
+
+	      <widget>
+		<class>GtkCList</class>
+		<name>clist6</name>
+		<columns>3</columns>
+		<column_widths>21,75,80</column_widths>
+		<selection_mode>GTK_SELECTION_SINGLE</selection_mode>
+		<show_titles>False</show_titles>
+		<shadow_type>GTK_SHADOW_IN</shadow_type>
+
+		<widget>
+		  <class>GtkLabel</class>
+		  <child_name>CList:title</child_name>
+		  <name>label102</name>
+		  <label>label91</label>
+		  <justify>GTK_JUSTIFY_CENTER</justify>
+		  <wrap>False</wrap>
+		  <xalign>0.5</xalign>
+		  <yalign>0.5</yalign>
+		  <xpad>0</xpad>
+		  <ypad>0</ypad>
+		</widget>
+
+		<widget>
+		  <class>GtkLabel</class>
+		  <child_name>CList:title</child_name>
+		  <name>label103</name>
+		  <label>label92</label>
+		  <justify>GTK_JUSTIFY_CENTER</justify>
+		  <wrap>False</wrap>
+		  <xalign>0.5</xalign>
+		  <yalign>0.5</yalign>
+		  <xpad>0</xpad>
+		  <ypad>0</ypad>
+		</widget>
+
+		<widget>
+		  <class>GtkLabel</class>
+		  <child_name>CList:title</child_name>
+		  <name>label104</name>
+		  <label>label93</label>
+		  <justify>GTK_JUSTIFY_CENTER</justify>
+		  <wrap>False</wrap>
+		  <xalign>0.5</xalign>
+		  <yalign>0.5</yalign>
+		  <xpad>0</xpad>
+		  <ypad>0</ypad>
+		</widget>
+	      </widget>
+	    </widget>
+
+	    <widget>
+	      <class>GtkHBox</class>
+	      <name>hbox27</name>
+	      <homogeneous>True</homogeneous>
+	      <spacing>2</spacing>
+	      <child>
+		<padding>0</padding>
+		<expand>False</expand>
+		<fill>True</fill>
+	      </child>
+
+	      <widget>
+		<class>GtkButton</class>
+		<name>button72</name>
+		<label>Join</label>
+		<relief>GTK_RELIEF_NORMAL</relief>
+		<child>
+		  <padding>0</padding>
+		  <expand>True</expand>
+		  <fill>True</fill>
+		</child>
+	      </widget>
+
+	      <widget>
+		<class>GtkButton</class>
+		<name>button73</name>
+		<label>Leave</label>
+		<relief>GTK_RELIEF_NORMAL</relief>
+		<child>
+		  <padding>0</padding>
+		  <expand>True</expand>
+		  <fill>True</fill>
+		</child>
+	      </widget>
+	    </widget>
+	  </widget>
+	</widget>
+
+	<widget>
+	  <class>GtkHSeparator</class>
+	  <name>hseparator2</name>
+	  <child>
+	    <padding>0</padding>
+	    <expand>True</expand>
+	    <fill>True</fill>
+	  </child>
+	</widget>
+
+	<widget>
+	  <class>GtkLabel</class>
+	  <name>label105</name>
+	  <label>Twisted IM V. %s</label>
+	  <justify>GTK_JUSTIFY_CENTER</justify>
+	  <wrap>False</wrap>
+	  <xalign>0.5</xalign>
+	  <yalign>0.5</yalign>
+	  <xpad>0</xpad>
+	  <ypad>3</ypad>
+	  <child>
+	    <padding>0</padding>
+	    <expand>False</expand>
+	    <fill>False</fill>
+	  </child>
+	</widget>
+      </widget>
+
+      <widget>
+	<class>GtkLabel</class>
+	<name>label106</name>
+	<label>This
+Space
+Left
+Intentionally
+Blank
+(Here is where the UI for the currently
+selected element
+for interaction
+will go.)</label>
+	<justify>GTK_JUSTIFY_CENTER</justify>
+	<wrap>False</wrap>
+	<xalign>0.5</xalign>
+	<yalign>0.5</yalign>
+	<xpad>0</xpad>
+	<ypad>0</ypad>
+	<child>
+	  <shrink>True</shrink>
+	  <resize>True</resize>
+	</child>
+      </widget>
+    </widget>
+  </widget>
+</widget>
+
+</GTK-Interface>
diff --git a/ThirdParty/Twisted/twisted/words/im/interfaces.py b/ThirdParty/Twisted/twisted/words/im/interfaces.py
new file mode 100644
index 0000000..8f34fb1
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/im/interfaces.py
@@ -0,0 +1,364 @@
+# -*- Python -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Pan-protocol chat client.
+"""
+
+from zope.interface import Interface, Attribute
+
+from twisted.words.im import locals
+
+# (Random musings, may not reflect on current state of code:)
+#
+# Accounts have Protocol components (clients)
+# Persons have Conversation components
+# Groups have GroupConversation components
+# Persons and Groups are associated with specific Accounts
+# At run-time, Clients/Accounts are slaved to a User Interface
+#   (Note: User may be a bot, so don't assume all UIs are built on gui toolkits)
+
+
+class IAccount(Interface):
+    """
+    I represent a user's account with a chat service.
+    """
+
+    client = Attribute('The L{IClient} currently connecting to this account, if any.')
+    gatewayType = Attribute('A C{str} that identifies the protocol used by this account.')
+
+    def __init__(accountName, autoLogin, username, password, host, port):
+        """
+        @type accountName: string
+        @param accountName: A name to refer to the account by locally.
+        @type autoLogin: boolean
+        @type username: string
+        @type password: string
+        @type host: string
+        @type port: integer
+        """
+
+    def isOnline():
+        """
+        Am I online?
+
+        @rtype: boolean
+        """
+
+    def logOn(chatui):
+        """
+        Go on-line.
+
+        @type chatui: Implementor of C{IChatUI}
+
+        @rtype: L{Deferred} L{Client}
+        """
+
+    def logOff():
+        """
+        Sign off.
+        """
+
+    def getGroup(groupName):
+        """
+        @rtype: L{Group<IGroup>}
+        """
+
+    def getPerson(personName):
+        """
+        @rtype: L{Person<IPerson>}
+        """
+
+class IClient(Interface):
+
+    account = Attribute('The L{IAccount} I am a Client for')
+
+    def __init__(account, chatui, logonDeferred):
+        """
+        @type account: L{IAccount}
+        @type chatui: L{IChatUI}
+        @param logonDeferred: Will be called back once I am logged on.
+        @type logonDeferred: L{Deferred<twisted.internet.defer.Deferred>}
+        """
+
+    def joinGroup(groupName):
+        """
+        @param groupName: The name of the group to join.
+        @type groupName: string
+        """
+
+    def leaveGroup(groupName):
+        """
+        @param groupName: The name of the group to leave.
+        @type groupName: string
+        """
+
+    def getGroupConversation(name, hide=0):
+        pass
+
+    def getPerson(name):
+        pass
+
+
+class IPerson(Interface):
+
+    def __init__(name, account):
+        """
+        Initialize me.
+
+        @param name: My name, as the server knows me.
+        @type name: string
+        @param account: The account I am accessed through.
+        @type account: I{Account}
+        """
+
+    def isOnline():
+        """
+        Am I online right now?
+
+        @rtype: boolean
+        """
+
+    def getStatus():
+        """
+        What is my on-line status?
+
+        @return: L{locals.StatusEnum}
+        """
+
+    def getIdleTime():
+        """
+        @rtype: string (XXX: How about a scalar?)
+        """
+
+    def sendMessage(text, metadata=None):
+        """
+        Send a message to this person.
+
+        @type text: string
+        @type metadata: dict
+        """
+
+
+class IGroup(Interface):
+    """
+    A group which you may have a conversation with.
+
+    Groups generally have a loosely-defined set of members, who may
+    leave and join at any time.
+    """
+
+    name = Attribute('My C{str} name, as the server knows me.')
+    account = Attribute('The L{Account<IAccount>} I am accessed through.')
+
+    def __init__(name, account):
+        """
+        Initialize me.
+
+        @param name: My name, as the server knows me.
+        @type name: str
+        @param account: The account I am accessed through.
+        @type account: L{Account<IAccount>}
+        """
+
+    def setTopic(text):
+        """
+        Set this Groups topic on the server.
+
+        @type text: string
+        """
+
+    def sendGroupMessage(text, metadata=None):
+        """
+        Send a message to this group.
+
+        @type text: str
+
+        @type metadata: dict
+        @param metadata: Valid keys for this dictionary include:
+
+            - C{'style'}: associated with one of:
+                - C{'emote'}: indicates this is an action
+        """
+
+    def join():
+        """
+        Join this group.
+        """
+
+    def leave():
+        """
+        Depart this group.
+        """
+
+
+class IConversation(Interface):
+    """
+    A conversation with a specific person.
+    """
+
+    def __init__(person, chatui):
+        """
+        @type person: L{IPerson}
+        """
+
+    def show():
+        """
+        doesn't seem like it belongs in this interface.
+        """
+
+    def hide():
+        """
+        nor this neither.
+        """
+
+    def sendText(text, metadata):
+        pass
+
+    def showMessage(text, metadata):
+        pass
+
+    def changedNick(person, newnick):
+        """
+        @param person: XXX Shouldn't this always be Conversation.person?
+        """
+
+class IGroupConversation(Interface):
+
+    def show():
+        """
+        doesn't seem like it belongs in this interface.
+        """
+
+    def hide():
+        """
+        nor this neither.
+        """
+
+    def sendText(text, metadata):
+        pass
+
+    def showGroupMessage(sender, text, metadata):
+        pass
+
+    def setGroupMembers(members):
+        """
+        Sets the list of members in the group and displays it to the user.
+        """
+
+    def setTopic(topic, author):
+        """
+        Displays the topic (from the server) for the group conversation window.
+
+        @type topic: string
+        @type author: string (XXX: Not Person?)
+        """
+
+    def memberJoined(member):
+        """
+        Adds the given member to the list of members in the group conversation
+        and displays this to the user,
+
+        @type member: string (XXX: Not Person?)
+        """
+
+    def memberChangedNick(oldnick, newnick):
+        """
+        Changes the oldnick in the list of members to C{newnick} and displays this
+        change to the user,
+
+        @type oldnick: string (XXX: Not Person?)
+        @type newnick: string
+        """
+
+    def memberLeft(member):
+        """
+        Deletes the given member from the list of members in the group
+        conversation and displays the change to the user.
+
+        @type member: string (XXX: Not Person?)
+        """
+
+
+class IChatUI(Interface):
+
+    def registerAccountClient(client):
+        """
+        Notifies user that an account has been signed on to.
+
+        @type client: L{Client<IClient>}
+        """
+
+    def unregisterAccountClient(client):
+        """
+        Notifies user that an account has been signed off or disconnected.
+
+        @type client: L{Client<IClient>}
+        """
+
+    def getContactsList():
+        """
+        @rtype: L{ContactsList}
+        """
+
+    # WARNING: You'll want to be polymorphed into something with
+    # intrinsic stoning resistance before continuing.
+
+    def getConversation(person, Class, stayHidden=0):
+        """
+        For the given person object, returns the conversation window
+        or creates and returns a new conversation window if one does not exist.
+
+        @type person: L{Person<IPerson>}
+        @type Class: L{Conversation<IConversation>} class
+        @type stayHidden: boolean
+
+        @rtype: L{Conversation<IConversation>}
+        """
+
+    def getGroupConversation(group, Class, stayHidden=0):
+        """
+        For the given group object, returns the group conversation window or
+        creates and returns a new group conversation window if it doesn't exist.
+
+        @type group: L{Group<interfaces.IGroup>}
+        @type Class: L{Conversation<interfaces.IConversation>} class
+        @type stayHidden: boolean
+
+        @rtype: L{GroupConversation<interfaces.IGroupConversation>}
+        """
+
+    def getPerson(name, client):
+        """
+        Get a Person for a client.
+
+        Duplicates L{IAccount.getPerson}.
+
+        @type name: string
+        @type client: L{Client<IClient>}
+
+        @rtype: L{Person<IPerson>}
+        """
+
+    def getGroup(name, client):
+        """
+        Get a Group for a client.
+
+        Duplicates L{IAccount.getGroup}.
+
+        @type name: string
+        @type client: L{Client<IClient>}
+
+        @rtype: L{Group<IGroup>}
+        """
+
+    def contactChangedNick(oldnick, newnick):
+        """
+        For the given person, changes the person's name to newnick, and
+        tells the contact list and any conversation windows with that person
+        to change as well.
+
+        @type oldnick: string
+        @type newnick: string
+        """
diff --git a/ThirdParty/Twisted/twisted/words/im/ircsupport.py b/ThirdParty/Twisted/twisted/words/im/ircsupport.py
new file mode 100644
index 0000000..1feddeb
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/im/ircsupport.py
@@ -0,0 +1,263 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+IRC support for Instance Messenger.
+"""
+
+import string
+
+from twisted.words.protocols import irc
+from twisted.words.im.locals import ONLINE
+from twisted.internet import defer, reactor, protocol
+from twisted.internet.defer import succeed
+from twisted.words.im import basesupport, interfaces, locals
+from zope.interface import implements
+
+
+class IRCPerson(basesupport.AbstractPerson):
+
+    def imperson_whois(self):
+        if self.account.client is None:
+            raise locals.OfflineError
+        self.account.client.sendLine("WHOIS %s" % self.name)
+
+    ### interface impl
+
+    def isOnline(self):
+        return ONLINE
+
+    def getStatus(self):
+        return ONLINE
+
+    def setStatus(self,status):
+        self.status=status
+        self.chat.getContactsList().setContactStatus(self)
+
+    def sendMessage(self, text, meta=None):
+        if self.account.client is None:
+            raise locals.OfflineError
+        for line in string.split(text, '\n'):
+            if meta and meta.get("style", None) == "emote":
+                self.account.client.ctcpMakeQuery(self.name,[('ACTION', line)])
+            else:
+                self.account.client.msg(self.name, line)
+        return succeed(text)
+
+class IRCGroup(basesupport.AbstractGroup):
+
+    implements(interfaces.IGroup)
+
+    def imgroup_testAction(self):
+        pass
+
+    def imtarget_kick(self, target):
+        if self.account.client is None:
+            raise locals.OfflineError
+        reason = "for great justice!"
+        self.account.client.sendLine("KICK #%s %s :%s" % (
+            self.name, target.name, reason))
+
+    ### Interface Implementation
+
+    def setTopic(self, topic):
+        if self.account.client is None:
+            raise locals.OfflineError
+        self.account.client.topic(self.name, topic)
+
+    def sendGroupMessage(self, text, meta={}):
+        if self.account.client is None:
+            raise locals.OfflineError
+        if meta and meta.get("style", None) == "emote":
+            self.account.client.me(self.name,text)
+            return succeed(text)
+        #standard shmandard, clients don't support plain escaped newlines!
+        for line in string.split(text, '\n'):
+            self.account.client.say(self.name, line)
+        return succeed(text)
+
+    def leave(self):
+        if self.account.client is None:
+            raise locals.OfflineError
+        self.account.client.leave(self.name)
+        self.account.client.getGroupConversation(self.name,1)
+
+
+class IRCProto(basesupport.AbstractClientMixin, irc.IRCClient):
+    def __init__(self, account, chatui, logonDeferred=None):
+        basesupport.AbstractClientMixin.__init__(self, account, chatui,
+                                                 logonDeferred)
+        self._namreplies={}
+        self._ingroups={}
+        self._groups={}
+        self._topics={}
+
+    def getGroupConversation(self, name, hide=0):
+        name=string.lower(name)
+        return self.chat.getGroupConversation(self.chat.getGroup(name, self),
+                                              stayHidden=hide)
+
+    def getPerson(self,name):
+        return self.chat.getPerson(name, self)
+
+    def connectionMade(self):
+        # XXX: Why do I duplicate code in IRCClient.register?
+        try:
+            if self.account.password:
+                self.sendLine("PASS :%s" % self.account.password)
+            self.setNick(self.account.username)
+            self.sendLine("USER %s foo bar :Twisted-IM user" % (
+                self.account.username,))
+            for channel in self.account.channels:
+                self.joinGroup(channel)
+            self.account._isOnline=1
+            if self._logonDeferred is not None:
+                self._logonDeferred.callback(self)
+            self.chat.getContactsList()
+        except:
+            import traceback
+            traceback.print_exc()
+
+    def setNick(self,nick):
+        self.name=nick
+        self.accountName="%s (IRC)"%nick
+        irc.IRCClient.setNick(self,nick)
+
+    def kickedFrom(self, channel, kicker, message):
+        """
+        Called when I am kicked from a channel.
+        """
+        return self.chat.getGroupConversation(
+            self.chat.getGroup(channel[1:], self), 1)
+
+    def userKicked(self, kickee, channel, kicker, message):
+        pass
+
+    def noticed(self, username, channel, message):
+        self.privmsg(username, channel, message, {"dontAutoRespond": 1})
+
+    def privmsg(self, username, channel, message, metadata=None):
+        if metadata is None:
+            metadata = {}
+        username=string.split(username,'!',1)[0]
+        if username==self.name: return
+        if channel[0]=='#':
+            group=channel[1:]
+            self.getGroupConversation(group).showGroupMessage(username, message, metadata)
+            return
+        self.chat.getConversation(self.getPerson(username)).showMessage(message, metadata)
+
+    def action(self,username,channel,emote):
+        username=string.split(username,'!',1)[0]
+        if username==self.name: return
+        meta={'style':'emote'}
+        if channel[0]=='#':
+            group=channel[1:]
+            self.getGroupConversation(group).showGroupMessage(username, emote, meta)
+            return
+        self.chat.getConversation(self.getPerson(username)).showMessage(emote,meta)
+
+    def irc_RPL_NAMREPLY(self,prefix,params):
+        """
+        RPL_NAMREPLY
+        >> NAMES #bnl
+        << :Arlington.VA.US.Undernet.Org 353 z3p = #bnl :pSwede Dan-- SkOyg AG
+        """
+        group=string.lower(params[2][1:])
+        users=string.split(params[3])
+        for ui in range(len(users)):
+            while users[ui][0] in ["@","+"]: # channel modes
+                users[ui]=users[ui][1:]
+        if not self._namreplies.has_key(group):
+            self._namreplies[group]=[]
+        self._namreplies[group].extend(users)
+        for nickname in users:
+                try:
+                    self._ingroups[nickname].append(group)
+                except:
+                    self._ingroups[nickname]=[group]
+
+    def irc_RPL_ENDOFNAMES(self,prefix,params):
+        group=params[1][1:]
+        self.getGroupConversation(group).setGroupMembers(self._namreplies[string.lower(group)])
+        del self._namreplies[string.lower(group)]
+
+    def irc_RPL_TOPIC(self,prefix,params):
+        self._topics[params[1][1:]]=params[2]
+
+    def irc_333(self,prefix,params):
+        group=params[1][1:]
+        self.getGroupConversation(group).setTopic(self._topics[group],params[2])
+        del self._topics[group]
+
+    def irc_TOPIC(self,prefix,params):
+        nickname = string.split(prefix,"!")[0]
+        group = params[0][1:]
+        topic = params[1]
+        self.getGroupConversation(group).setTopic(topic,nickname)
+
+    def irc_JOIN(self,prefix,params):
+        nickname=string.split(prefix,"!")[0]
+        group=string.lower(params[0][1:])
+        if nickname!=self.nickname:
+            try:
+                self._ingroups[nickname].append(group)
+            except:
+                self._ingroups[nickname]=[group]
+            self.getGroupConversation(group).memberJoined(nickname)
+
+    def irc_PART(self,prefix,params):
+        nickname=string.split(prefix,"!")[0]
+        group=string.lower(params[0][1:])
+        if nickname!=self.nickname:
+            if group in self._ingroups[nickname]:
+                self._ingroups[nickname].remove(group)
+                self.getGroupConversation(group).memberLeft(nickname)
+
+    def irc_QUIT(self,prefix,params):
+        nickname=string.split(prefix,"!")[0]
+        if self._ingroups.has_key(nickname):
+            for group in self._ingroups[nickname]:
+                self.getGroupConversation(group).memberLeft(nickname)
+            self._ingroups[nickname]=[]
+
+    def irc_NICK(self, prefix, params):
+        fromNick = string.split(prefix, "!")[0]
+        toNick = params[0]
+        if not self._ingroups.has_key(fromNick):
+            return
+        for group in self._ingroups[fromNick]:
+            self.getGroupConversation(group).memberChangedNick(fromNick, toNick)
+        self._ingroups[toNick] = self._ingroups[fromNick]
+        del self._ingroups[fromNick]
+
+    def irc_unknown(self, prefix, command, params):
+        pass
+
+    # GTKIM calls
+    def joinGroup(self,name):
+        self.join(name)
+        self.getGroupConversation(name)
+
+class IRCAccount(basesupport.AbstractAccount):
+    implements(interfaces.IAccount)
+    gatewayType = "IRC"
+
+    _groupFactory = IRCGroup
+    _personFactory = IRCPerson
+
+    def __init__(self, accountName, autoLogin, username, password, host, port,
+                 channels=''):
+        basesupport.AbstractAccount.__init__(self, accountName, autoLogin,
+                                             username, password, host, port)
+        self.channels = map(string.strip,string.split(channels,','))
+        if self.channels == ['']:
+            self.channels = []
+
+    def _startLogOn(self, chatui):
+        logonDeferred = defer.Deferred()
+        cc = protocol.ClientCreator(reactor, IRCProto, self, chatui,
+                                    logonDeferred)
+        d = cc.connectTCP(self.host, self.port)
+        d.addErrback(logonDeferred.errback)
+        return logonDeferred
diff --git a/ThirdParty/Twisted/twisted/words/im/locals.py b/ThirdParty/Twisted/twisted/words/im/locals.py
new file mode 100644
index 0000000..a63547a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/im/locals.py
@@ -0,0 +1,26 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+class Enum:
+    group = None
+
+    def __init__(self, label):
+        self.label = label
+
+    def __repr__(self):
+        return '<%s: %s>' % (self.group, self.label)
+
+    def __str__(self):
+        return self.label
+
+
+class StatusEnum(Enum):
+    group = 'Status'
+
+OFFLINE = Enum('Offline')
+ONLINE = Enum('Online')
+AWAY = Enum('Away')
+
+class OfflineError(Exception):
+    """The requested action can't happen while offline."""
diff --git a/ThirdParty/Twisted/twisted/words/im/pbsupport.py b/ThirdParty/Twisted/twisted/words/im/pbsupport.py
new file mode 100644
index 0000000..04d14e9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/im/pbsupport.py
@@ -0,0 +1,260 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+L{twisted.words} support for Instance Messenger.
+"""
+
+from twisted.internet import defer
+from twisted.internet import error
+from twisted.python import log
+from twisted.python.failure import Failure
+from twisted.spread import pb
+
+from twisted.words.im.locals import ONLINE, OFFLINE, AWAY
+
+from twisted.words.im import basesupport, interfaces
+from zope.interface import implements
+
+
+class TwistedWordsPerson(basesupport.AbstractPerson):
+    """I a facade for a person you can talk to through a twisted.words service.
+    """
+    def __init__(self, name, wordsAccount):
+        basesupport.AbstractPerson.__init__(self, name, wordsAccount)
+        self.status = OFFLINE
+
+    def isOnline(self):
+        return ((self.status == ONLINE) or
+                (self.status == AWAY))
+
+    def getStatus(self):
+        return self.status
+
+    def sendMessage(self, text, metadata):
+        """Return a deferred...
+        """
+        if metadata:
+            d=self.account.client.perspective.directMessage(self.name,
+                                                            text, metadata)
+            d.addErrback(self.metadataFailed, "* "+text)
+            return d
+        else:
+            return self.account.client.perspective.callRemote('directMessage',self.name, text)
+
+    def metadataFailed(self, result, text):
+        print "result:",result,"text:",text
+        return self.account.client.perspective.directMessage(self.name, text)
+
+    def setStatus(self, status):
+        self.status = status
+        self.chat.getContactsList().setContactStatus(self)
+
+class TwistedWordsGroup(basesupport.AbstractGroup):
+    implements(interfaces.IGroup)
+    def __init__(self, name, wordsClient):
+        basesupport.AbstractGroup.__init__(self, name, wordsClient)
+        self.joined = 0
+
+    def sendGroupMessage(self, text, metadata=None):
+        """Return a deferred.
+        """
+        #for backwards compatibility with older twisted.words servers.
+        if metadata:
+            d=self.account.client.perspective.callRemote(
+                'groupMessage', self.name, text, metadata)
+            d.addErrback(self.metadataFailed, "* "+text)
+            return d
+        else:
+            return self.account.client.perspective.callRemote('groupMessage',
+                                                              self.name, text)
+
+    def setTopic(self, text):
+        self.account.client.perspective.callRemote(
+            'setGroupMetadata',
+            {'topic': text, 'topic_author': self.client.name},
+            self.name)
+
+    def metadataFailed(self, result, text):
+        print "result:",result,"text:",text
+        return self.account.client.perspective.callRemote('groupMessage',
+                                                          self.name, text)
+
+    def joining(self):
+        self.joined = 1
+
+    def leaving(self):
+        self.joined = 0
+
+    def leave(self):
+        return self.account.client.perspective.callRemote('leaveGroup',
+                                                          self.name)
+
+
+
+class TwistedWordsClient(pb.Referenceable, basesupport.AbstractClientMixin):
+    """In some cases, this acts as an Account, since it a source of text
+    messages (multiple Words instances may be on a single PB connection)
+    """
+    def __init__(self, acct, serviceName, perspectiveName, chatui,
+                 _logonDeferred=None):
+        self.accountName = "%s (%s:%s)" % (acct.accountName, serviceName, perspectiveName)
+        self.name = perspectiveName
+        print "HELLO I AM A PB SERVICE", serviceName, perspectiveName
+        self.chat = chatui
+        self.account = acct
+        self._logonDeferred = _logonDeferred
+
+    def getPerson(self, name):
+        return self.chat.getPerson(name, self)
+
+    def getGroup(self, name):
+        return self.chat.getGroup(name, self)
+
+    def getGroupConversation(self, name):
+        return self.chat.getGroupConversation(self.getGroup(name))
+
+    def addContact(self, name):
+        self.perspective.callRemote('addContact', name)
+
+    def remote_receiveGroupMembers(self, names, group):
+        print 'received group members:', names, group
+        self.getGroupConversation(group).setGroupMembers(names)
+
+    def remote_receiveGroupMessage(self, sender, group, message, metadata=None):
+        print 'received a group message', sender, group, message, metadata
+        self.getGroupConversation(group).showGroupMessage(sender, message, metadata)
+
+    def remote_memberJoined(self, member, group):
+        print 'member joined', member, group
+        self.getGroupConversation(group).memberJoined(member)
+
+    def remote_memberLeft(self, member, group):
+        print 'member left'
+        self.getGroupConversation(group).memberLeft(member)
+
+    def remote_notifyStatusChanged(self, name, status):
+        self.chat.getPerson(name, self).setStatus(status)
+
+    def remote_receiveDirectMessage(self, name, message, metadata=None):
+        self.chat.getConversation(self.chat.getPerson(name, self)).showMessage(message, metadata)
+
+    def remote_receiveContactList(self, clist):
+        for name, status in clist:
+            self.chat.getPerson(name, self).setStatus(status)
+
+    def remote_setGroupMetadata(self, dict_, groupName):
+        if dict_.has_key("topic"):
+            self.getGroupConversation(groupName).setTopic(dict_["topic"], dict_.get("topic_author", None))
+
+    def joinGroup(self, name):
+        self.getGroup(name).joining()
+        return self.perspective.callRemote('joinGroup', name).addCallback(self._cbGroupJoined, name)
+
+    def leaveGroup(self, name):
+        self.getGroup(name).leaving()
+        return self.perspective.callRemote('leaveGroup', name).addCallback(self._cbGroupLeft, name)
+
+    def _cbGroupJoined(self, result, name):
+        groupConv = self.chat.getGroupConversation(self.getGroup(name))
+        groupConv.showGroupMessage("sys", "you joined")
+        self.perspective.callRemote('getGroupMembers', name)
+
+    def _cbGroupLeft(self, result, name):
+        print 'left',name
+        groupConv = self.chat.getGroupConversation(self.getGroup(name), 1)
+        groupConv.showGroupMessage("sys", "you left")
+
+    def connected(self, perspective):
+        print 'Connected Words Client!', perspective
+        if self._logonDeferred is not None:
+            self._logonDeferred.callback(self)
+        self.perspective = perspective
+        self.chat.getContactsList()
+
+
+pbFrontEnds = {
+    "twisted.words": TwistedWordsClient,
+    "twisted.reality": None
+    }
+
+
+class PBAccount(basesupport.AbstractAccount):
+    implements(interfaces.IAccount)
+    gatewayType = "PB"
+    _groupFactory = TwistedWordsGroup
+    _personFactory = TwistedWordsPerson
+
+    def __init__(self, accountName, autoLogin, username, password, host, port,
+                 services=None):
+        """
+        @param username: The name of your PB Identity.
+        @type username: string
+        """
+        basesupport.AbstractAccount.__init__(self, accountName, autoLogin,
+                                             username, password, host, port)
+        self.services = []
+        if not services:
+            services = [('twisted.words', 'twisted.words', username)]
+        for serviceType, serviceName, perspectiveName in services:
+            self.services.append([pbFrontEnds[serviceType], serviceName,
+                                  perspectiveName])
+
+    def logOn(self, chatui):
+        """
+        @returns: this breaks with L{interfaces.IAccount}
+        @returntype: DeferredList of L{interfaces.IClient}s
+        """
+        # Overriding basesupport's implementation on account of the
+        # fact that _startLogOn tends to return a deferredList rather
+        # than a simple Deferred, and we need to do registerAccountClient.
+        if (not self._isConnecting) and (not self._isOnline):
+            self._isConnecting = 1
+            d = self._startLogOn(chatui)
+            d.addErrback(self._loginFailed)
+            def registerMany(results):
+                for success, result in results:
+                    if success:
+                        chatui.registerAccountClient(result)
+                        self._cb_logOn(result)
+                    else:
+                        log.err(result)
+            d.addCallback(registerMany)
+            return d
+        else:
+            raise error.ConnectionError("Connection in progress")
+
+
+    def _startLogOn(self, chatui):
+        print 'Connecting...',
+        d = pb.getObjectAt(self.host, self.port)
+        d.addCallbacks(self._cbConnected, self._ebConnected,
+                       callbackArgs=(chatui,))
+        return d
+
+    def _cbConnected(self, root, chatui):
+        print 'Connected!'
+        print 'Identifying...',
+        d = pb.authIdentity(root, self.username, self.password)
+        d.addCallbacks(self._cbIdent, self._ebConnected,
+                       callbackArgs=(chatui,))
+        return d
+
+    def _cbIdent(self, ident, chatui):
+        if not ident:
+            print 'falsely identified.'
+            return self._ebConnected(Failure(Exception("username or password incorrect")))
+        print 'Identified!'
+        dl = []
+        for handlerClass, sname, pname in self.services:
+            d = defer.Deferred()
+            dl.append(d)
+            handler = handlerClass(self, sname, pname, chatui, d)
+            ident.callRemote('attach', sname, pname, handler).addCallback(handler.connected)
+        return defer.DeferredList(dl)
+
+    def _ebConnected(self, error):
+        print 'Not connected.'
+        return error
+
diff --git a/ThirdParty/Twisted/twisted/words/iwords.py b/ThirdParty/Twisted/twisted/words/iwords.py
new file mode 100644
index 0000000..c8ce09f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/iwords.py
@@ -0,0 +1,266 @@
+# -*- test-case-name: twisted.words.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from zope.interface import Interface, Attribute, implements
+
+class IProtocolPlugin(Interface):
+    """Interface for plugins providing an interface to a Words service
+    """
+
+    name = Attribute("A single word describing what kind of interface this is (eg, irc or web)")
+
+    def getFactory(realm, portal):
+        """Retrieve a C{twisted.internet.interfaces.IServerFactory} provider
+
+        @param realm: An object providing C{twisted.cred.portal.IRealm} and
+        C{IChatService}, with which service information should be looked up.
+
+        @param portal: An object providing C{twisted.cred.portal.IPortal},
+        through which logins should be performed.
+        """
+
+
+class IGroup(Interface):
+    name = Attribute("A short string, unique among groups.")
+
+    def add(user):
+        """Include the given user in this group.
+
+        @type user: L{IUser}
+        """
+
+    def remove(user, reason=None):
+        """Remove the given user from this group.
+
+        @type user: L{IUser}
+        @type reason: C{unicode}
+        """
+
+    def size():
+        """Return the number of participants in this group.
+
+        @rtype: L{twisted.internet.defer.Deferred}
+        @return: A Deferred which fires with an C{int} representing the the
+        number of participants in this group.
+        """
+
+    def receive(sender, recipient, message):
+        """
+        Broadcast the given message from the given sender to other
+        users in group.
+
+        The message is not re-transmitted to the sender.
+
+        @param sender: L{IUser}
+
+        @type recipient: L{IGroup}
+        @param recipient: This is probably a wart.  Maybe it will be removed
+        in the future.  For now, it should be the group object the message
+        is being delivered to.
+
+        @param message: C{dict}
+
+        @rtype: L{twisted.internet.defer.Deferred}
+        @return: A Deferred which fires with None when delivery has been
+        attempted for all users.
+        """
+
+    def setMetadata(meta):
+        """Change the metadata associated with this group.
+
+        @type meta: C{dict}
+        """
+
+    def iterusers():
+        """Return an iterator of all users in this group.
+        """
+
+
+class IChatClient(Interface):
+    """Interface through which IChatService interacts with clients.
+    """
+
+    name = Attribute("A short string, unique among users.  This will be set by the L{IChatService} at login time.")
+
+    def receive(sender, recipient, message):
+        """
+        Callback notifying this user of the given message sent by the
+        given user.
+
+        This will be invoked whenever another user sends a message to a
+        group this user is participating in, or whenever another user sends
+        a message directly to this user.  In the former case, C{recipient}
+        will be the group to which the message was sent; in the latter, it
+        will be the same object as the user who is receiving the message.
+
+        @type sender: L{IUser}
+        @type recipient: L{IUser} or L{IGroup}
+        @type message: C{dict}
+
+        @rtype: L{twisted.internet.defer.Deferred}
+        @return: A Deferred which fires when the message has been delivered,
+        or which fails in some way.  If the Deferred fails and the message
+        was directed at a group, this user will be removed from that group.
+        """
+
+    def groupMetaUpdate(group, meta):
+        """
+        Callback notifying this user that the metadata for the given
+        group has changed.
+
+        @type group: L{IGroup}
+        @type meta: C{dict}
+
+        @rtype: L{twisted.internet.defer.Deferred}
+        """
+
+    def userJoined(group, user):
+        """
+        Callback notifying this user that the given user has joined
+        the given group.
+
+        @type group: L{IGroup}
+        @type user: L{IUser}
+
+        @rtype: L{twisted.internet.defer.Deferred}
+        """
+
+    def userLeft(group, user, reason=None):
+        """
+        Callback notifying this user that the given user has left the
+        given group for the given reason.
+
+        @type group: L{IGroup}
+        @type user: L{IUser}
+        @type reason: C{unicode}
+
+        @rtype: L{twisted.internet.defer.Deferred}
+        """
+
+
+class IUser(Interface):
+    """Interface through which clients interact with IChatService.
+    """
+
+    realm = Attribute("A reference to the Realm to which this user belongs.  Set if and only if the user is logged in.")
+    mind = Attribute("A reference to the mind which logged in to this user.  Set if and only if the user is logged in.")
+    name = Attribute("A short string, unique among users.")
+
+    lastMessage = Attribute("A POSIX timestamp indicating the time of the last message received from this user.")
+    signOn = Attribute("A POSIX timestamp indicating this user's most recent sign on time.")
+
+    def loggedIn(realm, mind):
+        """Invoked by the associated L{IChatService} when login occurs.
+
+        @param realm: The L{IChatService} through which login is occurring.
+        @param mind: The mind object used for cred login.
+        """
+
+    def send(recipient, message):
+        """Send the given message to the given user or group.
+
+        @type recipient: Either L{IUser} or L{IGroup}
+        @type message: C{dict}
+        """
+
+    def join(group):
+        """Attempt to join the given group.
+
+        @type group: L{IGroup}
+        @rtype: L{twisted.internet.defer.Deferred}
+        """
+
+    def leave(group):
+        """Discontinue participation in the given group.
+
+        @type group: L{IGroup}
+        @rtype: L{twisted.internet.defer.Deferred}
+        """
+
+    def itergroups():
+        """
+        Return an iterator of all groups of which this user is a
+        member.
+        """
+
+
+class IChatService(Interface):
+    name = Attribute("A short string identifying this chat service (eg, a hostname)")
+
+    createGroupOnRequest = Attribute(
+        "A boolean indicating whether L{getGroup} should implicitly "
+        "create groups which are requested but which do not yet exist.")
+
+    createUserOnRequest = Attribute(
+        "A boolean indicating whether L{getUser} should implicitly "
+        "create users which are requested but which do not yet exist.")
+
+    def itergroups():
+        """Return all groups available on this service.
+
+        @rtype: C{twisted.internet.defer.Deferred}
+        @return: A Deferred which fires with a list of C{IGroup} providers.
+        """
+
+    def getGroup(name):
+        """Retrieve the group by the given name.
+
+        @type name: C{str}
+
+        @rtype: L{twisted.internet.defer.Deferred}
+        @return: A Deferred which fires with the group with the given
+        name if one exists (or if one is created due to the setting of
+        L{createGroupOnRequest}, or which fails with
+        L{twisted.words.ewords.NoSuchGroup} if no such group exists.
+        """
+
+    def createGroup(name):
+        """Create a new group with the given name.
+
+        @type name: C{str}
+
+        @rtype: L{twisted.internet.defer.Deferred}
+        @return: A Deferred which fires with the created group, or
+        with fails with L{twisted.words.ewords.DuplicateGroup} if a
+        group by that name exists already.
+        """
+
+    def lookupGroup(name):
+        """Retrieve a group by name.
+
+        Unlike C{getGroup}, this will never implicitly create a group.
+
+        @type name: C{str}
+
+        @rtype: L{twisted.internet.defer.Deferred}
+        @return: A Deferred which fires with the group by the given
+        name, or which fails with L{twisted.words.ewords.NoSuchGroup}.
+        """
+
+    def getUser(name):
+        """Retrieve the user by the given name.
+
+        @type name: C{str}
+
+        @rtype: L{twisted.internet.defer.Deferred}
+        @return: A Deferred which fires with the user with the given
+        name if one exists (or if one is created due to the setting of
+        L{createUserOnRequest}, or which fails with
+        L{twisted.words.ewords.NoSuchUser} if no such user exists.
+        """
+
+    def createUser(name):
+        """Create a new user with the given name.
+
+        @type name: C{str}
+
+        @rtype: L{twisted.internet.defer.Deferred}
+        @return: A Deferred which fires with the created user, or
+        with fails with L{twisted.words.ewords.DuplicateUser} if a
+        user by that name exists already.
+        """
+
+__all__ = [
+    'IChatInterface', 'IGroup', 'IChatClient', 'IUser', 'IChatService',
+    ]
diff --git a/ThirdParty/Twisted/twisted/words/protocols/__init__.py b/ThirdParty/Twisted/twisted/words/protocols/__init__.py
new file mode 100644
index 0000000..5b4f7e5
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/protocols/__init__.py
@@ -0,0 +1 @@
+"Chat protocols"
diff --git a/ThirdParty/Twisted/twisted/words/protocols/irc.py b/ThirdParty/Twisted/twisted/words/protocols/irc.py
new file mode 100644
index 0000000..65daa7a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/protocols/irc.py
@@ -0,0 +1,3302 @@
+# -*- test-case-name: twisted.words.test.test_irc -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Internet Relay Chat Protocol for client and server.
+
+Future Plans
+============
+
+The way the IRCClient class works here encourages people to implement
+IRC clients by subclassing the ephemeral protocol class, and it tends
+to end up with way more state than it should for an object which will
+be destroyed as soon as the TCP transport drops.  Someone oughta do
+something about that, ya know?
+
+The DCC support needs to have more hooks for the client for it to be
+able to ask the user things like "Do you want to accept this session?"
+and "Transfer #2 is 67% done." and otherwise manage the DCC sessions.
+
+Test coverage needs to be better.
+
+ at var MAX_COMMAND_LENGTH: The maximum length of a command, as defined by RFC
+    2812 section 2.3.
+
+ at author: Kevin Turner
+
+ at see: RFC 1459: Internet Relay Chat Protocol
+ at see: RFC 2812: Internet Relay Chat: Client Protocol
+ at see: U{The Client-To-Client-Protocol
+<http://www.irchelp.org/irchelp/rfc/ctcpspec.html>}
+"""
+
+import errno, os, random, re, stat, struct, sys, time, types, traceback
+import string, socket
+import warnings
+import textwrap
+from os import path
+
+from twisted.internet import reactor, protocol, task
+from twisted.persisted import styles
+from twisted.protocols import basic
+from twisted.python import log, reflect, text
+from twisted.python.compat import set
+
+NUL = chr(0)
+CR = chr(015)
+NL = chr(012)
+LF = NL
+SPC = chr(040)
+
+# This includes the CRLF terminator characters.
+MAX_COMMAND_LENGTH = 512
+
+CHANNEL_PREFIXES = '&#!+'
+
+class IRCBadMessage(Exception):
+    pass
+
+class IRCPasswordMismatch(Exception):
+    pass
+
+
+
+class IRCBadModes(ValueError):
+    """
+    A malformed mode was encountered while attempting to parse a mode string.
+    """
+
+
+
+def parsemsg(s):
+    """Breaks a message from an IRC server into its prefix, command, and arguments.
+    """
+    prefix = ''
+    trailing = []
+    if not s:
+        raise IRCBadMessage("Empty line.")
+    if s[0] == ':':
+        prefix, s = s[1:].split(' ', 1)
+    if s.find(' :') != -1:
+        s, trailing = s.split(' :', 1)
+        args = s.split()
+        args.append(trailing)
+    else:
+        args = s.split()
+    command = args.pop(0)
+    return prefix, command, args
+
+
+
+def split(str, length=80):
+    """
+    Split a string into multiple lines.
+
+    Whitespace near C{str[length]} will be preferred as a breaking point.
+    C{"\\n"} will also be used as a breaking point.
+
+    @param str: The string to split.
+    @type str: C{str}
+
+    @param length: The maximum length which will be allowed for any string in
+        the result.
+    @type length: C{int}
+
+    @return: C{list} of C{str}
+    """
+    return [chunk
+            for line in str.split('\n')
+            for chunk in textwrap.wrap(line, length)]
+
+
+def _intOrDefault(value, default=None):
+    """
+    Convert a value to an integer if possible.
+
+    @rtype: C{int} or type of L{default}
+    @return: An integer when C{value} can be converted to an integer,
+        otherwise return C{default}
+    """
+    if value:
+        try:
+            return int(value)
+        except (TypeError, ValueError):
+            pass
+    return default
+
+
+
+class UnhandledCommand(RuntimeError):
+    """
+    A command dispatcher could not locate an appropriate command handler.
+    """
+
+
+
+class _CommandDispatcherMixin(object):
+    """
+    Dispatch commands to handlers based on their name.
+
+    Command handler names should be of the form C{prefix_commandName},
+    where C{prefix} is the value specified by L{prefix}, and must
+    accept the parameters as given to L{dispatch}.
+
+    Attempting to mix this in more than once for a single class will cause
+    strange behaviour, due to L{prefix} being overwritten.
+
+    @type prefix: C{str}
+    @ivar prefix: Command handler prefix, used to locate handler attributes
+    """
+    prefix = None
+
+    def dispatch(self, commandName, *args):
+        """
+        Perform actual command dispatch.
+        """
+        def _getMethodName(command):
+            return '%s_%s' % (self.prefix, command)
+
+        def _getMethod(name):
+            return getattr(self, _getMethodName(name), None)
+
+        method = _getMethod(commandName)
+        if method is not None:
+            return method(*args)
+
+        method = _getMethod('unknown')
+        if method is None:
+            raise UnhandledCommand("No handler for %r could be found" % (_getMethodName(commandName),))
+        return method(commandName, *args)
+
+
+
+
+
+def parseModes(modes, params, paramModes=('', '')):
+    """
+    Parse an IRC mode string.
+
+    The mode string is parsed into two lists of mode changes (added and
+    removed), with each mode change represented as C{(mode, param)} where mode
+    is the mode character, and param is the parameter passed for that mode, or
+    C{None} if no parameter is required.
+
+    @type modes: C{str}
+    @param modes: Modes string to parse.
+
+    @type params: C{list}
+    @param params: Parameters specified along with L{modes}.
+
+    @type paramModes: C{(str, str)}
+    @param paramModes: A pair of strings (C{(add, remove)}) that indicate which modes take
+        parameters when added or removed.
+
+    @returns: Two lists of mode changes, one for modes added and the other for
+        modes removed respectively, mode changes in each list are represented as
+        C{(mode, param)}.
+    """
+    if len(modes) == 0:
+        raise IRCBadModes('Empty mode string')
+
+    if modes[0] not in '+-':
+        raise IRCBadModes('Malformed modes string: %r' % (modes,))
+
+    changes = ([], [])
+
+    direction = None
+    count = -1
+    for ch in modes:
+        if ch in '+-':
+            if count == 0:
+                raise IRCBadModes('Empty mode sequence: %r' % (modes,))
+            direction = '+-'.index(ch)
+            count = 0
+        else:
+            param = None
+            if ch in paramModes[direction]:
+                try:
+                    param = params.pop(0)
+                except IndexError:
+                    raise IRCBadModes('Not enough parameters: %r' % (ch,))
+            changes[direction].append((ch, param))
+            count += 1
+
+    if len(params) > 0:
+        raise IRCBadModes('Too many parameters: %r %r' % (modes, params))
+
+    if count == 0:
+        raise IRCBadModes('Empty mode sequence: %r' % (modes,))
+
+    return changes
+
+
+
+class IRC(protocol.Protocol):
+    """
+    Internet Relay Chat server protocol.
+    """
+
+    buffer = ""
+    hostname = None
+
+    encoding = None
+
+    def connectionMade(self):
+        self.channels = []
+        if self.hostname is None:
+            self.hostname = socket.getfqdn()
+
+
+    def sendLine(self, line):
+        if self.encoding is not None:
+            if isinstance(line, unicode):
+                line = line.encode(self.encoding)
+        self.transport.write("%s%s%s" % (line, CR, LF))
+
+
+    def sendMessage(self, command, *parameter_list, **prefix):
+        """
+        Send a line formatted as an IRC message.
+
+        First argument is the command, all subsequent arguments are parameters
+        to that command.  If a prefix is desired, it may be specified with the
+        keyword argument 'prefix'.
+        """
+
+        if not command:
+            raise ValueError, "IRC message requires a command."
+
+        if ' ' in command or command[0] == ':':
+            # Not the ONLY way to screw up, but provides a little
+            # sanity checking to catch likely dumb mistakes.
+            raise ValueError, "Somebody screwed up, 'cuz this doesn't" \
+                  " look like a command to me: %s" % command
+
+        line = string.join([command] + list(parameter_list))
+        if 'prefix' in prefix:
+            line = ":%s %s" % (prefix['prefix'], line)
+        self.sendLine(line)
+
+        if len(parameter_list) > 15:
+            log.msg("Message has %d parameters (RFC allows 15):\n%s" %
+                    (len(parameter_list), line))
+
+
+    def dataReceived(self, data):
+        """
+        This hack is to support mIRC, which sends LF only, even though the RFC
+        says CRLF.  (Also, the flexibility of LineReceiver to turn "line mode"
+        on and off was not required.)
+        """
+        lines = (self.buffer + data).split(LF)
+        # Put the (possibly empty) element after the last LF back in the
+        # buffer
+        self.buffer = lines.pop()
+
+        for line in lines:
+            if len(line) <= 2:
+                # This is a blank line, at best.
+                continue
+            if line[-1] == CR:
+                line = line[:-1]
+            prefix, command, params = parsemsg(line)
+            # mIRC is a big pile of doo-doo
+            command = command.upper()
+            # DEBUG: log.msg( "%s %s %s" % (prefix, command, params))
+
+            self.handleCommand(command, prefix, params)
+
+
+    def handleCommand(self, command, prefix, params):
+        """
+        Determine the function to call for the given command and call it with
+        the given arguments.
+        """
+        method = getattr(self, "irc_%s" % command, None)
+        try:
+            if method is not None:
+                method(prefix, params)
+            else:
+                self.irc_unknown(prefix, command, params)
+        except:
+            log.deferr()
+
+
+    def irc_unknown(self, prefix, command, params):
+        """
+        Called by L{handleCommand} on a command that doesn't have a defined
+        handler. Subclasses should override this method.
+        """
+        raise NotImplementedError(command, prefix, params)
+
+
+    # Helper methods
+    def privmsg(self, sender, recip, message):
+        """
+        Send a message to a channel or user
+
+        @type sender: C{str} or C{unicode}
+        @param sender: Who is sending this message.  Should be of the form
+            username!ident at hostmask (unless you know better!).
+
+        @type recip: C{str} or C{unicode}
+        @param recip: The recipient of this message.  If a channel, it must
+            start with a channel prefix.
+
+        @type message: C{str} or C{unicode}
+        @param message: The message being sent.
+        """
+        self.sendLine(":%s PRIVMSG %s :%s" % (sender, recip, lowQuote(message)))
+
+
+    def notice(self, sender, recip, message):
+        """
+        Send a "notice" to a channel or user.
+
+        Notices differ from privmsgs in that the RFC claims they are different.
+        Robots are supposed to send notices and not respond to them.  Clients
+        typically display notices differently from privmsgs.
+
+        @type sender: C{str} or C{unicode}
+        @param sender: Who is sending this message.  Should be of the form
+            username!ident at hostmask (unless you know better!).
+
+        @type recip: C{str} or C{unicode}
+        @param recip: The recipient of this message.  If a channel, it must
+            start with a channel prefix.
+
+        @type message: C{str} or C{unicode}
+        @param message: The message being sent.
+        """
+        self.sendLine(":%s NOTICE %s :%s" % (sender, recip, message))
+
+
+    def action(self, sender, recip, message):
+        """
+        Send an action to a channel or user.
+
+        @type sender: C{str} or C{unicode}
+        @param sender: Who is sending this message.  Should be of the form
+            username!ident at hostmask (unless you know better!).
+
+        @type recip: C{str} or C{unicode}
+        @param recip: The recipient of this message.  If a channel, it must
+            start with a channel prefix.
+
+        @type message: C{str} or C{unicode}
+        @param message: The action being sent.
+        """
+        self.sendLine(":%s ACTION %s :%s" % (sender, recip, message))
+
+
+    def topic(self, user, channel, topic, author=None):
+        """
+        Send the topic to a user.
+
+        @type user: C{str} or C{unicode}
+        @param user: The user receiving the topic.  Only their nick name, not
+            the full hostmask.
+
+        @type channel: C{str} or C{unicode}
+        @param channel: The channel for which this is the topic.
+
+        @type topic: C{str} or C{unicode} or C{None}
+        @param topic: The topic string, unquoted, or None if there is no topic.
+
+        @type author: C{str} or C{unicode}
+        @param author: If the topic is being changed, the full username and
+            hostmask of the person changing it.
+        """
+        if author is None:
+            if topic is None:
+                self.sendLine(':%s %s %s %s :%s' % (
+                    self.hostname, RPL_NOTOPIC, user, channel, 'No topic is set.'))
+            else:
+                self.sendLine(":%s %s %s %s :%s" % (
+                    self.hostname, RPL_TOPIC, user, channel, lowQuote(topic)))
+        else:
+            self.sendLine(":%s TOPIC %s :%s" % (author, channel, lowQuote(topic)))
+
+
+    def topicAuthor(self, user, channel, author, date):
+        """
+        Send the author of and time at which a topic was set for the given
+        channel.
+
+        This sends a 333 reply message, which is not part of the IRC RFC.
+
+        @type user: C{str} or C{unicode}
+        @param user: The user receiving the topic.  Only their nick name, not
+            the full hostmask.
+
+        @type channel: C{str} or C{unicode}
+        @param channel: The channel for which this information is relevant.
+
+        @type author: C{str} or C{unicode}
+        @param author: The nickname (without hostmask) of the user who last set
+            the topic.
+
+        @type date: C{int}
+        @param date: A POSIX timestamp (number of seconds since the epoch) at
+            which the topic was last set.
+        """
+        self.sendLine(':%s %d %s %s %s %d' % (
+            self.hostname, 333, user, channel, author, date))
+
+
+    def names(self, user, channel, names):
+        """
+        Send the names of a channel's participants to a user.
+
+        @type user: C{str} or C{unicode}
+        @param user: The user receiving the name list.  Only their nick name,
+            not the full hostmask.
+
+        @type channel: C{str} or C{unicode}
+        @param channel: The channel for which this is the namelist.
+
+        @type names: C{list} of C{str} or C{unicode}
+        @param names: The names to send.
+        """
+        # XXX If unicode is given, these limits are not quite correct
+        prefixLength = len(channel) + len(user) + 10
+        namesLength = 512 - prefixLength
+
+        L = []
+        count = 0
+        for n in names:
+            if count + len(n) + 1 > namesLength:
+                self.sendLine(":%s %s %s = %s :%s" % (
+                    self.hostname, RPL_NAMREPLY, user, channel, ' '.join(L)))
+                L = [n]
+                count = len(n)
+            else:
+                L.append(n)
+                count += len(n) + 1
+        if L:
+            self.sendLine(":%s %s %s = %s :%s" % (
+                self.hostname, RPL_NAMREPLY, user, channel, ' '.join(L)))
+        self.sendLine(":%s %s %s %s :End of /NAMES list" % (
+            self.hostname, RPL_ENDOFNAMES, user, channel))
+
+
+    def who(self, user, channel, memberInfo):
+        """
+        Send a list of users participating in a channel.
+
+        @type user: C{str} or C{unicode}
+        @param user: The user receiving this member information.  Only their
+            nick name, not the full hostmask.
+
+        @type channel: C{str} or C{unicode}
+        @param channel: The channel for which this is the member information.
+
+        @type memberInfo: C{list} of C{tuples}
+        @param memberInfo: For each member of the given channel, a 7-tuple
+            containing their username, their hostmask, the server to which they
+            are connected, their nickname, the letter "H" or "G" (standing for
+            "Here" or "Gone"), the hopcount from C{user} to this member, and
+            this member's real name.
+        """
+        for info in memberInfo:
+            (username, hostmask, server, nickname, flag, hops, realName) = info
+            assert flag in ("H", "G")
+            self.sendLine(":%s %s %s %s %s %s %s %s %s :%d %s" % (
+                self.hostname, RPL_WHOREPLY, user, channel,
+                username, hostmask, server, nickname, flag, hops, realName))
+
+        self.sendLine(":%s %s %s %s :End of /WHO list." % (
+            self.hostname, RPL_ENDOFWHO, user, channel))
+
+
+    def whois(self, user, nick, username, hostname, realName, server, serverInfo, oper, idle, signOn, channels):
+        """
+        Send information about the state of a particular user.
+
+        @type user: C{str} or C{unicode}
+        @param user: The user receiving this information.  Only their nick name,
+            not the full hostmask.
+
+        @type nick: C{str} or C{unicode}
+        @param nick: The nickname of the user this information describes.
+
+        @type username: C{str} or C{unicode}
+        @param username: The user's username (eg, ident response)
+
+        @type hostname: C{str}
+        @param hostname: The user's hostmask
+
+        @type realName: C{str} or C{unicode}
+        @param realName: The user's real name
+
+        @type server: C{str} or C{unicode}
+        @param server: The name of the server to which the user is connected
+
+        @type serverInfo: C{str} or C{unicode}
+        @param serverInfo: A descriptive string about that server
+
+        @type oper: C{bool}
+        @param oper: Indicates whether the user is an IRC operator
+
+        @type idle: C{int}
+        @param idle: The number of seconds since the user last sent a message
+
+        @type signOn: C{int}
+        @param signOn: A POSIX timestamp (number of seconds since the epoch)
+            indicating the time the user signed on
+
+        @type channels: C{list} of C{str} or C{unicode}
+        @param channels: A list of the channels which the user is participating in
+        """
+        self.sendLine(":%s %s %s %s %s %s * :%s" % (
+            self.hostname, RPL_WHOISUSER, user, nick, username, hostname, realName))
+        self.sendLine(":%s %s %s %s %s :%s" % (
+            self.hostname, RPL_WHOISSERVER, user, nick, server, serverInfo))
+        if oper:
+            self.sendLine(":%s %s %s %s :is an IRC operator" % (
+                self.hostname, RPL_WHOISOPERATOR, user, nick))
+        self.sendLine(":%s %s %s %s %d %d :seconds idle, signon time" % (
+            self.hostname, RPL_WHOISIDLE, user, nick, idle, signOn))
+        self.sendLine(":%s %s %s %s :%s" % (
+            self.hostname, RPL_WHOISCHANNELS, user, nick, ' '.join(channels)))
+        self.sendLine(":%s %s %s %s :End of WHOIS list." % (
+            self.hostname, RPL_ENDOFWHOIS, user, nick))
+
+
+    def join(self, who, where):
+        """
+        Send a join message.
+
+        @type who: C{str} or C{unicode}
+        @param who: The name of the user joining.  Should be of the form
+            username!ident at hostmask (unless you know better!).
+
+        @type where: C{str} or C{unicode}
+        @param where: The channel the user is joining.
+        """
+        self.sendLine(":%s JOIN %s" % (who, where))
+
+
+    def part(self, who, where, reason=None):
+        """
+        Send a part message.
+
+        @type who: C{str} or C{unicode}
+        @param who: The name of the user joining.  Should be of the form
+            username!ident at hostmask (unless you know better!).
+
+        @type where: C{str} or C{unicode}
+        @param where: The channel the user is joining.
+
+        @type reason: C{str} or C{unicode}
+        @param reason: A string describing the misery which caused this poor
+            soul to depart.
+        """
+        if reason:
+            self.sendLine(":%s PART %s :%s" % (who, where, reason))
+        else:
+            self.sendLine(":%s PART %s" % (who, where))
+
+
+    def channelMode(self, user, channel, mode, *args):
+        """
+        Send information about the mode of a channel.
+
+        @type user: C{str} or C{unicode}
+        @param user: The user receiving the name list.  Only their nick name,
+            not the full hostmask.
+
+        @type channel: C{str} or C{unicode}
+        @param channel: The channel for which this is the namelist.
+
+        @type mode: C{str}
+        @param mode: A string describing this channel's modes.
+
+        @param args: Any additional arguments required by the modes.
+        """
+        self.sendLine(":%s %s %s %s %s %s" % (
+            self.hostname, RPL_CHANNELMODEIS, user, channel, mode, ' '.join(args)))
+
+
+
+class ServerSupportedFeatures(_CommandDispatcherMixin):
+    """
+    Handle ISUPPORT messages.
+
+    Feature names match those in the ISUPPORT RFC draft identically.
+
+    Information regarding the specifics of ISUPPORT was gleaned from
+    <http://www.irc.org/tech_docs/draft-brocklesby-irc-isupport-03.txt>.
+    """
+    prefix = 'isupport'
+
+    def __init__(self):
+        self._features = {
+            'CHANNELLEN': 200,
+            'CHANTYPES': tuple('#&'),
+            'MODES': 3,
+            'NICKLEN': 9,
+            'PREFIX': self._parsePrefixParam('(ovh)@+%'),
+            # The ISUPPORT draft explicitly says that there is no default for
+            # CHANMODES, but we're defaulting it here to handle the case where
+            # the IRC server doesn't send us any ISUPPORT information, since
+            # IRCClient.getChannelModeParams relies on this value.
+            'CHANMODES': self._parseChanModesParam(['b', '', 'lk'])}
+
+
+    def _splitParamArgs(cls, params, valueProcessor=None):
+        """
+        Split ISUPPORT parameter arguments.
+
+        Values can optionally be processed by C{valueProcessor}.
+
+        For example::
+
+            >>> ServerSupportedFeatures._splitParamArgs(['A:1', 'B:2'])
+            (('A', '1'), ('B', '2'))
+
+        @type params: C{iterable} of C{str}
+
+        @type valueProcessor: C{callable} taking {str}
+        @param valueProcessor: Callable to process argument values, or C{None}
+            to perform no processing
+
+        @rtype: C{list} of C{(str, object)}
+        @return: Sequence of C{(name, processedValue)}
+        """
+        if valueProcessor is None:
+            valueProcessor = lambda x: x
+
+        def _parse():
+            for param in params:
+                if ':' not in param:
+                    param += ':'
+                a, b = param.split(':', 1)
+                yield a, valueProcessor(b)
+        return list(_parse())
+    _splitParamArgs = classmethod(_splitParamArgs)
+
+
+    def _unescapeParamValue(cls, value):
+        """
+        Unescape an ISUPPORT parameter.
+
+        The only form of supported escape is C{\\xHH}, where HH must be a valid
+        2-digit hexadecimal number.
+
+        @rtype: C{str}
+        """
+        def _unescape():
+            parts = value.split('\\x')
+            # The first part can never be preceeded by the escape.
+            yield parts.pop(0)
+            for s in parts:
+                octet, rest = s[:2], s[2:]
+                try:
+                    octet = int(octet, 16)
+                except ValueError:
+                    raise ValueError('Invalid hex octet: %r' % (octet,))
+                yield chr(octet) + rest
+
+        if '\\x' not in value:
+            return value
+        return ''.join(_unescape())
+    _unescapeParamValue = classmethod(_unescapeParamValue)
+
+
+    def _splitParam(cls, param):
+        """
+        Split an ISUPPORT parameter.
+
+        @type param: C{str}
+
+        @rtype: C{(str, list)}
+        @return C{(key, arguments)}
+        """
+        if '=' not in param:
+            param += '='
+        key, value = param.split('=', 1)
+        return key, map(cls._unescapeParamValue, value.split(','))
+    _splitParam = classmethod(_splitParam)
+
+
+    def _parsePrefixParam(cls, prefix):
+        """
+        Parse the ISUPPORT "PREFIX" parameter.
+
+        The order in which the parameter arguments appear is significant, the
+        earlier a mode appears the more privileges it gives.
+
+        @rtype: C{dict} mapping C{str} to C{(str, int)}
+        @return: A dictionary mapping a mode character to a two-tuple of
+            C({symbol, priority)}, the lower a priority (the lowest being
+            C{0}) the more privileges it gives
+        """
+        if not prefix:
+            return None
+        if prefix[0] != '(' and ')' not in prefix:
+            raise ValueError('Malformed PREFIX parameter')
+        modes, symbols = prefix.split(')', 1)
+        symbols = zip(symbols, xrange(len(symbols)))
+        modes = modes[1:]
+        return dict(zip(modes, symbols))
+    _parsePrefixParam = classmethod(_parsePrefixParam)
+
+
+    def _parseChanModesParam(self, params):
+        """
+        Parse the ISUPPORT "CHANMODES" parameter.
+
+        See L{isupport_CHANMODES} for a detailed explanation of this parameter.
+        """
+        names = ('addressModes', 'param', 'setParam', 'noParam')
+        if len(params) > len(names):
+            raise ValueError(
+                'Expecting a maximum of %d channel mode parameters, got %d' % (
+                    len(names), len(params)))
+        items = map(lambda key, value: (key, value or ''), names, params)
+        return dict(items)
+    _parseChanModesParam = classmethod(_parseChanModesParam)
+
+
+    def getFeature(self, feature, default=None):
+        """
+        Get a server supported feature's value.
+
+        A feature with the value C{None} is equivalent to the feature being
+        unsupported.
+
+        @type feature: C{str}
+        @param feature: Feature name
+
+        @type default: C{object}
+        @param default: The value to default to, assuming that C{feature}
+            is not supported
+
+        @return: Feature value
+        """
+        return self._features.get(feature, default)
+
+
+    def hasFeature(self, feature):
+        """
+        Determine whether a feature is supported or not.
+
+        @rtype: C{bool}
+        """
+        return self.getFeature(feature) is not None
+
+
+    def parse(self, params):
+        """
+        Parse ISUPPORT parameters.
+
+        If an unknown parameter is encountered, it is simply added to the
+        dictionary, keyed by its name, as a tuple of the parameters provided.
+
+        @type params: C{iterable} of C{str}
+        @param params: Iterable of ISUPPORT parameters to parse
+        """
+        for param in params:
+            key, value = self._splitParam(param)
+            if key.startswith('-'):
+                self._features.pop(key[1:], None)
+            else:
+                self._features[key] = self.dispatch(key, value)
+
+
+    def isupport_unknown(self, command, params):
+        """
+        Unknown ISUPPORT parameter.
+        """
+        return tuple(params)
+
+
+    def isupport_CHANLIMIT(self, params):
+        """
+        The maximum number of each channel type a user may join.
+        """
+        return self._splitParamArgs(params, _intOrDefault)
+
+
+    def isupport_CHANMODES(self, params):
+        """
+        Available channel modes.
+
+        There are 4 categories of channel mode::
+
+            addressModes - Modes that add or remove an address to or from a
+            list, these modes always take a parameter.
+
+            param - Modes that change a setting on a channel, these modes
+            always take a parameter.
+
+            setParam - Modes that change a setting on a channel, these modes
+            only take a parameter when being set.
+
+            noParam - Modes that change a setting on a channel, these modes
+            never take a parameter.
+        """
+        try:
+            return self._parseChanModesParam(params)
+        except ValueError:
+            return self.getFeature('CHANMODES')
+
+
+    def isupport_CHANNELLEN(self, params):
+        """
+        Maximum length of a channel name a client may create.
+        """
+        return _intOrDefault(params[0], self.getFeature('CHANNELLEN'))
+
+
+    def isupport_CHANTYPES(self, params):
+        """
+        Valid channel prefixes.
+        """
+        return tuple(params[0])
+
+
+    def isupport_EXCEPTS(self, params):
+        """
+        Mode character for "ban exceptions".
+
+        The presence of this parameter indicates that the server supports
+        this functionality.
+        """
+        return params[0] or 'e'
+
+
+    def isupport_IDCHAN(self, params):
+        """
+        Safe channel identifiers.
+
+        The presence of this parameter indicates that the server supports
+        this functionality.
+        """
+        return self._splitParamArgs(params)
+
+
+    def isupport_INVEX(self, params):
+        """
+        Mode character for "invite exceptions".
+
+        The presence of this parameter indicates that the server supports
+        this functionality.
+        """
+        return params[0] or 'I'
+
+
+    def isupport_KICKLEN(self, params):
+        """
+        Maximum length of a kick message a client may provide.
+        """
+        return _intOrDefault(params[0])
+
+
+    def isupport_MAXLIST(self, params):
+        """
+        Maximum number of "list modes" a client may set on a channel at once.
+
+        List modes are identified by the "addressModes" key in CHANMODES.
+        """
+        return self._splitParamArgs(params, _intOrDefault)
+
+
+    def isupport_MODES(self, params):
+        """
+        Maximum number of modes accepting parameters that may be sent, by a
+        client, in a single MODE command.
+        """
+        return _intOrDefault(params[0])
+
+
+    def isupport_NETWORK(self, params):
+        """
+        IRC network name.
+        """
+        return params[0]
+
+
+    def isupport_NICKLEN(self, params):
+        """
+        Maximum length of a nickname the client may use.
+        """
+        return _intOrDefault(params[0], self.getFeature('NICKLEN'))
+
+
+    def isupport_PREFIX(self, params):
+        """
+        Mapping of channel modes that clients may have to status flags.
+        """
+        try:
+            return self._parsePrefixParam(params[0])
+        except ValueError:
+            return self.getFeature('PREFIX')
+
+
+    def isupport_SAFELIST(self, params):
+        """
+        Flag indicating that a client may request a LIST without being
+        disconnected due to the large amount of data generated.
+        """
+        return True
+
+
+    def isupport_STATUSMSG(self, params):
+        """
+        The server supports sending messages to only to clients on a channel
+        with a specific status.
+        """
+        return params[0]
+
+
+    def isupport_TARGMAX(self, params):
+        """
+        Maximum number of targets allowable for commands that accept multiple
+        targets.
+        """
+        return dict(self._splitParamArgs(params, _intOrDefault))
+
+
+    def isupport_TOPICLEN(self, params):
+        """
+        Maximum length of a topic that may be set.
+        """
+        return _intOrDefault(params[0])
+
+
+
+class IRCClient(basic.LineReceiver):
+    """
+    Internet Relay Chat client protocol, with sprinkles.
+
+    In addition to providing an interface for an IRC client protocol,
+    this class also contains reasonable implementations of many common
+    CTCP methods.
+
+    TODO
+    ====
+     - Limit the length of messages sent (because the IRC server probably
+       does).
+     - Add flood protection/rate limiting for my CTCP replies.
+     - NickServ cooperation.  (a mix-in?)
+
+    @ivar nickname: Nickname the client will use.
+    @ivar password: Password used to log on to the server.  May be C{None}.
+    @ivar realname: Supplied to the server during login as the "Real name"
+        or "ircname".  May be C{None}.
+    @ivar username: Supplied to the server during login as the "User name".
+        May be C{None}
+
+    @ivar userinfo: Sent in reply to a C{USERINFO} CTCP query.  If C{None}, no
+        USERINFO reply will be sent.
+        "This is used to transmit a string which is settable by
+        the user (and never should be set by the client)."
+    @ivar fingerReply: Sent in reply to a C{FINGER} CTCP query.  If C{None}, no
+        FINGER reply will be sent.
+    @type fingerReply: Callable or String
+
+    @ivar versionName: CTCP VERSION reply, client name.  If C{None}, no VERSION
+        reply will be sent.
+    @type versionName: C{str}, or None.
+    @ivar versionNum: CTCP VERSION reply, client version.
+    @type versionNum: C{str}, or None.
+    @ivar versionEnv: CTCP VERSION reply, environment the client is running in.
+    @type versionEnv: C{str}, or None.
+
+    @ivar sourceURL: CTCP SOURCE reply, a URL where the source code of this
+        client may be found.  If C{None}, no SOURCE reply will be sent.
+
+    @ivar lineRate: Minimum delay between lines sent to the server.  If
+        C{None}, no delay will be imposed.
+    @type lineRate: Number of Seconds.
+
+    @ivar motd: Either L{None} or, between receipt of I{RPL_MOTDSTART} and
+        I{RPL_ENDOFMOTD}, a L{list} of L{str}, each of which is the content
+        of an I{RPL_MOTD} message.
+
+    @ivar erroneousNickFallback: Default nickname assigned when an unregistered
+        client triggers an C{ERR_ERRONEUSNICKNAME} while trying to register
+        with an illegal nickname.
+    @type erroneousNickFallback: C{str}
+
+    @ivar _registered: Whether or not the user is registered. It becomes True
+        once a welcome has been received from the server.
+    @type _registered: C{bool}
+
+    @ivar _attemptedNick: The nickname that will try to get registered. It may
+        change if it is illegal or already taken. L{nickname} becomes the
+        L{_attemptedNick} that is successfully registered.
+    @type _attemptedNick:  C{str}
+
+    @type supported: L{ServerSupportedFeatures}
+    @ivar supported: Available ISUPPORT features on the server
+
+    @type hostname: C{str}
+    @ivar hostname: Host name of the IRC server the client is connected to.
+        Initially the host name is C{None} and later is set to the host name
+        from which the I{RPL_WELCOME} message is received.
+
+    @type _heartbeat: L{task.LoopingCall}
+    @ivar _heartbeat: Looping call to perform the keepalive by calling
+        L{IRCClient._sendHeartbeat} every L{heartbeatInterval} seconds, or
+        C{None} if there is no heartbeat.
+
+    @type heartbeatInterval: C{float}
+    @ivar heartbeatInterval: Interval, in seconds, to send I{PING} messages to
+        the server as a form of keepalive, defaults to 120 seconds. Use C{None}
+        to disable the heartbeat.
+    """
+    hostname = None
+    motd = None
+    nickname = 'irc'
+    password = None
+    realname = None
+    username = None
+    ### Responses to various CTCP queries.
+
+    userinfo = None
+    # fingerReply is a callable returning a string, or a str()able object.
+    fingerReply = None
+    versionName = None
+    versionNum = None
+    versionEnv = None
+
+    sourceURL = "http://twistedmatrix.com/downloads/"
+
+    dcc_destdir = '.'
+    dcc_sessions = None
+
+    # If this is false, no attempt will be made to identify
+    # ourself to the server.
+    performLogin = 1
+
+    lineRate = None
+    _queue = None
+    _queueEmptying = None
+
+    delimiter = '\n' # '\r\n' will also work (see dataReceived)
+
+    __pychecker__ = 'unusednames=params,prefix,channel'
+
+    _registered = False
+    _attemptedNick = ''
+    erroneousNickFallback = 'defaultnick'
+
+    _heartbeat = None
+    heartbeatInterval = 120
+
+
+    def _reallySendLine(self, line):
+        return basic.LineReceiver.sendLine(self, lowQuote(line) + '\r')
+
+    def sendLine(self, line):
+        if self.lineRate is None:
+            self._reallySendLine(line)
+        else:
+            self._queue.append(line)
+            if not self._queueEmptying:
+                self._sendLine()
+
+    def _sendLine(self):
+        if self._queue:
+            self._reallySendLine(self._queue.pop(0))
+            self._queueEmptying = reactor.callLater(self.lineRate,
+                                                    self._sendLine)
+        else:
+            self._queueEmptying = None
+
+
+    def connectionLost(self, reason):
+        basic.LineReceiver.connectionLost(self, reason)
+        self.stopHeartbeat()
+
+
+    def _createHeartbeat(self):
+        """
+        Create the heartbeat L{LoopingCall}.
+        """
+        return task.LoopingCall(self._sendHeartbeat)
+
+
+    def _sendHeartbeat(self):
+        """
+        Send a I{PING} message to the IRC server as a form of keepalive.
+        """
+        self.sendLine('PING ' + self.hostname)
+
+
+    def stopHeartbeat(self):
+        """
+        Stop sending I{PING} messages to keep the connection to the server
+        alive.
+
+        @since: 11.1
+        """
+        if self._heartbeat is not None:
+            self._heartbeat.stop()
+            self._heartbeat = None
+
+
+    def startHeartbeat(self):
+        """
+        Start sending I{PING} messages every L{IRCClient.heartbeatInterval}
+        seconds to keep the connection to the server alive during periods of no
+        activity.
+
+        @since: 11.1
+        """
+        self.stopHeartbeat()
+        if self.heartbeatInterval is None:
+            return
+        self._heartbeat = self._createHeartbeat()
+        self._heartbeat.start(self.heartbeatInterval, now=False)
+
+
+    ### Interface level client->user output methods
+    ###
+    ### You'll want to override these.
+
+    ### Methods relating to the server itself
+
+    def created(self, when):
+        """
+        Called with creation date information about the server, usually at logon.
+
+        @type when: C{str}
+        @param when: A string describing when the server was created, probably.
+        """
+
+    def yourHost(self, info):
+        """
+        Called with daemon information about the server, usually at logon.
+
+        @type info: C{str}
+        @param when: A string describing what software the server is running, probably.
+        """
+
+    def myInfo(self, servername, version, umodes, cmodes):
+        """
+        Called with information about the server, usually at logon.
+
+        @type servername: C{str}
+        @param servername: The hostname of this server.
+
+        @type version: C{str}
+        @param version: A description of what software this server runs.
+
+        @type umodes: C{str}
+        @param umodes: All the available user modes.
+
+        @type cmodes: C{str}
+        @param cmodes: All the available channel modes.
+        """
+
+    def luserClient(self, info):
+        """
+        Called with information about the number of connections, usually at logon.
+
+        @type info: C{str}
+        @param info: A description of the number of clients and servers
+        connected to the network, probably.
+        """
+
+    def bounce(self, info):
+        """
+        Called with information about where the client should reconnect.
+
+        @type info: C{str}
+        @param info: A plaintext description of the address that should be
+        connected to.
+        """
+
+    def isupport(self, options):
+        """
+        Called with various information about what the server supports.
+
+        @type options: C{list} of C{str}
+        @param options: Descriptions of features or limits of the server, possibly
+        in the form "NAME=VALUE".
+        """
+
+    def luserChannels(self, channels):
+        """
+        Called with the number of channels existant on the server.
+
+        @type channels: C{int}
+        """
+
+    def luserOp(self, ops):
+        """
+        Called with the number of ops logged on to the server.
+
+        @type ops: C{int}
+        """
+
+    def luserMe(self, info):
+        """
+        Called with information about the server connected to.
+
+        @type info: C{str}
+        @param info: A plaintext string describing the number of users and servers
+        connected to this server.
+        """
+
+    ### Methods involving me directly
+
+    def privmsg(self, user, channel, message):
+        """
+        Called when I have a message from a user to me or a channel.
+        """
+        pass
+
+    def joined(self, channel):
+        """
+        Called when I finish joining a channel.
+
+        channel has the starting character (C{'#'}, C{'&'}, C{'!'}, or C{'+'})
+        intact.
+        """
+
+    def left(self, channel):
+        """
+        Called when I have left a channel.
+
+        channel has the starting character (C{'#'}, C{'&'}, C{'!'}, or C{'+'})
+        intact.
+        """
+
+
+    def noticed(self, user, channel, message):
+        """
+        Called when I have a notice from a user to me or a channel.
+
+        If the client makes any automated replies, it must not do so in
+        response to a NOTICE message, per the RFC::
+
+            The difference between NOTICE and PRIVMSG is that
+            automatic replies MUST NEVER be sent in response to a
+            NOTICE message. [...] The object of this rule is to avoid
+            loops between clients automatically sending something in
+            response to something it received.
+        """
+
+
+    def modeChanged(self, user, channel, set, modes, args):
+        """
+        Called when users or channel's modes are changed.
+
+        @type user: C{str}
+        @param user: The user and hostmask which instigated this change.
+
+        @type channel: C{str}
+        @param channel: The channel where the modes are changed. If args is
+        empty the channel for which the modes are changing. If the changes are
+        at server level it could be equal to C{user}.
+
+        @type set: C{bool} or C{int}
+        @param set: True if the mode(s) is being added, False if it is being
+        removed. If some modes are added and others removed at the same time
+        this function will be called twice, the first time with all the added
+        modes, the second with the removed ones. (To change this behaviour
+        override the irc_MODE method)
+
+        @type modes: C{str}
+        @param modes: The mode or modes which are being changed.
+
+        @type args: C{tuple}
+        @param args: Any additional information required for the mode
+        change.
+        """
+
+    def pong(self, user, secs):
+        """
+        Called with the results of a CTCP PING query.
+        """
+        pass
+
+    def signedOn(self):
+        """
+        Called after sucessfully signing on to the server.
+        """
+        pass
+
+    def kickedFrom(self, channel, kicker, message):
+        """
+        Called when I am kicked from a channel.
+        """
+        pass
+
+    def nickChanged(self, nick):
+        """
+        Called when my nick has been changed.
+        """
+        self.nickname = nick
+
+
+    ### Things I observe other people doing in a channel.
+
+    def userJoined(self, user, channel):
+        """
+        Called when I see another user joining a channel.
+        """
+        pass
+
+    def userLeft(self, user, channel):
+        """
+        Called when I see another user leaving a channel.
+        """
+        pass
+
+    def userQuit(self, user, quitMessage):
+        """
+        Called when I see another user disconnect from the network.
+        """
+        pass
+
+    def userKicked(self, kickee, channel, kicker, message):
+        """
+        Called when I observe someone else being kicked from a channel.
+        """
+        pass
+
+    def action(self, user, channel, data):
+        """
+        Called when I see a user perform an ACTION on a channel.
+        """
+        pass
+
+    def topicUpdated(self, user, channel, newTopic):
+        """
+        In channel, user changed the topic to newTopic.
+
+        Also called when first joining a channel.
+        """
+        pass
+
+    def userRenamed(self, oldname, newname):
+        """
+        A user changed their name from oldname to newname.
+        """
+        pass
+
+    ### Information from the server.
+
+    def receivedMOTD(self, motd):
+        """
+        I received a message-of-the-day banner from the server.
+
+        motd is a list of strings, where each string was sent as a seperate
+        message from the server. To display, you might want to use::
+
+            '\\n'.join(motd)
+
+        to get a nicely formatted string.
+        """
+        pass
+
+    ### user input commands, client->server
+    ### Your client will want to invoke these.
+
+    def join(self, channel, key=None):
+        """
+        Join a channel.
+
+        @type channel: C{str}
+        @param channel: The name of the channel to join. If it has no prefix,
+            C{'#'} will be prepended to it.
+        @type key: C{str}
+        @param key: If specified, the key used to join the channel.
+        """
+        if channel[0] not in CHANNEL_PREFIXES:
+            channel = '#' + channel
+        if key:
+            self.sendLine("JOIN %s %s" % (channel, key))
+        else:
+            self.sendLine("JOIN %s" % (channel,))
+
+    def leave(self, channel, reason=None):
+        """
+        Leave a channel.
+
+        @type channel: C{str}
+        @param channel: The name of the channel to leave. If it has no prefix,
+            C{'#'} will be prepended to it.
+        @type reason: C{str}
+        @param reason: If given, the reason for leaving.
+        """
+        if channel[0] not in CHANNEL_PREFIXES:
+            channel = '#' + channel
+        if reason:
+            self.sendLine("PART %s :%s" % (channel, reason))
+        else:
+            self.sendLine("PART %s" % (channel,))
+
+    def kick(self, channel, user, reason=None):
+        """
+        Attempt to kick a user from a channel.
+
+        @type channel: C{str}
+        @param channel: The name of the channel to kick the user from. If it has
+            no prefix, C{'#'} will be prepended to it.
+        @type user: C{str}
+        @param user: The nick of the user to kick.
+        @type reason: C{str}
+        @param reason: If given, the reason for kicking the user.
+        """
+        if channel[0] not in CHANNEL_PREFIXES:
+            channel = '#' + channel
+        if reason:
+            self.sendLine("KICK %s %s :%s" % (channel, user, reason))
+        else:
+            self.sendLine("KICK %s %s" % (channel, user))
+
+    part = leave
+
+
+    def invite(self, user, channel):
+        """
+        Attempt to invite user to channel
+
+        @type user: C{str}
+        @param user: The user to invite
+        @type channel: C{str}
+        @param channel: The channel to invite the user too
+
+        @since: 11.0
+        """
+        if channel[0] not in CHANNEL_PREFIXES:
+            channel = '#' + channel
+        self.sendLine("INVITE %s %s" % (user, channel))
+
+
+    def topic(self, channel, topic=None):
+        """
+        Attempt to set the topic of the given channel, or ask what it is.
+
+        If topic is None, then I sent a topic query instead of trying to set the
+        topic. The server should respond with a TOPIC message containing the
+        current topic of the given channel.
+
+        @type channel: C{str}
+        @param channel: The name of the channel to change the topic on. If it
+            has no prefix, C{'#'} will be prepended to it.
+        @type topic: C{str}
+        @param topic: If specified, what to set the topic to.
+        """
+        # << TOPIC #xtestx :fff
+        if channel[0] not in CHANNEL_PREFIXES:
+            channel = '#' + channel
+        if topic != None:
+            self.sendLine("TOPIC %s :%s" % (channel, topic))
+        else:
+            self.sendLine("TOPIC %s" % (channel,))
+
+
+    def mode(self, chan, set, modes, limit = None, user = None, mask = None):
+        """
+        Change the modes on a user or channel.
+
+        The C{limit}, C{user}, and C{mask} parameters are mutually exclusive.
+
+        @type chan: C{str}
+        @param chan: The name of the channel to operate on.
+        @type set: C{bool}
+        @param set: True to give the user or channel permissions and False to
+            remove them.
+        @type modes: C{str}
+        @param modes: The mode flags to set on the user or channel.
+        @type limit: C{int}
+        @param limit: In conjuction with the C{'l'} mode flag, limits the
+             number of users on the channel.
+        @type user: C{str}
+        @param user: The user to change the mode on.
+        @type mask: C{str}
+        @param mask: In conjuction with the C{'b'} mode flag, sets a mask of
+            users to be banned from the channel.
+        """
+        if set:
+            line = 'MODE %s +%s' % (chan, modes)
+        else:
+            line = 'MODE %s -%s' % (chan, modes)
+        if limit is not None:
+            line = '%s %d' % (line, limit)
+        elif user is not None:
+            line = '%s %s' % (line, user)
+        elif mask is not None:
+            line = '%s %s' % (line, mask)
+        self.sendLine(line)
+
+
+    def say(self, channel, message, length=None):
+        """
+        Send a message to a channel
+
+        @type channel: C{str}
+        @param channel: The channel to say the message on. If it has no prefix,
+            C{'#'} will be prepended to it.
+        @type message: C{str}
+        @param message: The message to say.
+        @type length: C{int}
+        @param length: The maximum number of octets to send at a time.  This has
+            the effect of turning a single call to C{msg()} into multiple
+            commands to the server.  This is useful when long messages may be
+            sent that would otherwise cause the server to kick us off or
+            silently truncate the text we are sending.  If None is passed, the
+            entire message is always send in one command.
+        """
+        if channel[0] not in CHANNEL_PREFIXES:
+            channel = '#' + channel
+        self.msg(channel, message, length)
+
+
+    def _safeMaximumLineLength(self, command):
+        """
+        Estimate a safe maximum line length for the given command.
+
+        This is done by assuming the maximum values for nickname length,
+        realname and hostname combined with the command that needs to be sent
+        and some guessing. A theoretical maximum value is used because it is
+        possible that our nickname, username or hostname changes (on the server
+        side) while the length is still being calculated.
+        """
+        # :nickname!realname at hostname COMMAND ...
+        theoretical = ':%s!%s@%s %s' % (
+            'a' * self.supported.getFeature('NICKLEN'),
+            # This value is based on observation.
+            'b' * 10,
+            # See <http://tools.ietf.org/html/rfc2812#section-2.3.1>.
+            'c' * 63,
+            command)
+        # Fingers crossed.
+        fudge = 10
+        return MAX_COMMAND_LENGTH - len(theoretical) - fudge
+
+
+    def msg(self, user, message, length=None):
+        """
+        Send a message to a user or channel.
+
+        The message will be split into multiple commands to the server if:
+         - The message contains any newline characters
+         - Any span between newline characters is longer than the given
+           line-length.
+
+        @param user: Username or channel name to which to direct the
+            message.
+        @type user: C{str}
+
+        @param message: Text to send.
+        @type message: C{str}
+
+        @param length: Maximum number of octets to send in a single
+            command, including the IRC protocol framing. If C{None} is given
+            then L{IRCClient._safeMaximumLineLength} is used to determine a
+            value.
+        @type length: C{int}
+        """
+        fmt = 'PRIVMSG %s :' % (user,)
+
+        if length is None:
+            length = self._safeMaximumLineLength(fmt)
+
+        # Account for the line terminator.
+        minimumLength = len(fmt) + 2
+        if length <= minimumLength:
+            raise ValueError("Maximum length must exceed %d for message "
+                             "to %s" % (minimumLength, user))
+        for line in split(message, length - minimumLength):
+            self.sendLine(fmt + line)
+
+
+    def notice(self, user, message):
+        """
+        Send a notice to a user.
+
+        Notices are like normal message, but should never get automated
+        replies.
+
+        @type user: C{str}
+        @param user: The user to send a notice to.
+        @type message: C{str}
+        @param message: The contents of the notice to send.
+        """
+        self.sendLine("NOTICE %s :%s" % (user, message))
+
+
+    def away(self, message=''):
+        """
+        Mark this client as away.
+
+        @type message: C{str}
+        @param message: If specified, the away message.
+        """
+        self.sendLine("AWAY :%s" % message)
+
+
+    def back(self):
+        """
+        Clear the away status.
+        """
+        # An empty away marks us as back
+        self.away()
+
+
+    def whois(self, nickname, server=None):
+        """
+        Retrieve user information about the given nick name.
+
+        @type nickname: C{str}
+        @param nickname: The nick name about which to retrieve information.
+
+        @since: 8.2
+        """
+        if server is None:
+            self.sendLine('WHOIS ' + nickname)
+        else:
+            self.sendLine('WHOIS %s %s' % (server, nickname))
+
+
+    def register(self, nickname, hostname='foo', servername='bar'):
+        """
+        Login to the server.
+
+        @type nickname: C{str}
+        @param nickname: The nickname to register.
+        @type hostname: C{str}
+        @param hostname: If specified, the hostname to logon as.
+        @type servername: C{str}
+        @param servername: If specified, the servername to logon as.
+        """
+        if self.password is not None:
+            self.sendLine("PASS %s" % self.password)
+        self.setNick(nickname)
+        if self.username is None:
+            self.username = nickname
+        self.sendLine("USER %s %s %s :%s" % (self.username, hostname, servername, self.realname))
+
+
+    def setNick(self, nickname):
+        """
+        Set this client's nickname.
+
+        @type nickname: C{str}
+        @param nickname: The nickname to change to.
+        """
+        self._attemptedNick = nickname
+        self.sendLine("NICK %s" % nickname)
+
+
+    def quit(self, message = ''):
+        """
+        Disconnect from the server
+
+        @type message: C{str}
+
+        @param message: If specified, the message to give when quitting the
+            server.
+        """
+        self.sendLine("QUIT :%s" % message)
+
+    ### user input commands, client->client
+
+    def describe(self, channel, action):
+        """
+        Strike a pose.
+
+        @type channel: C{str}
+        @param channel: The name of the channel to have an action on. If it
+            has no prefix, it is sent to the user of that name.
+        @type action: C{str}
+        @param action: The action to preform.
+        @since: 9.0
+        """
+        self.ctcpMakeQuery(channel, [('ACTION', action)])
+
+
+    _pings = None
+    _MAX_PINGRING = 12
+
+    def ping(self, user, text = None):
+        """
+        Measure round-trip delay to another IRC client.
+        """
+        if self._pings is None:
+            self._pings = {}
+
+        if text is None:
+            chars = string.letters + string.digits + string.punctuation
+            key = ''.join([random.choice(chars) for i in range(12)])
+        else:
+            key = str(text)
+        self._pings[(user, key)] = time.time()
+        self.ctcpMakeQuery(user, [('PING', key)])
+
+        if len(self._pings) > self._MAX_PINGRING:
+            # Remove some of the oldest entries.
+            byValue = [(v, k) for (k, v) in self._pings.items()]
+            byValue.sort()
+            excess = self._MAX_PINGRING - len(self._pings)
+            for i in xrange(excess):
+                del self._pings[byValue[i][1]]
+
+
+    def dccSend(self, user, file):
+        if type(file) == types.StringType:
+            file = open(file, 'r')
+
+        size = fileSize(file)
+
+        name = getattr(file, "name", "file@%s" % (id(file),))
+
+        factory = DccSendFactory(file)
+        port = reactor.listenTCP(0, factory, 1)
+
+        raise NotImplementedError,(
+            "XXX!!! Help!  I need to bind a socket, have it listen, and tell me its address.  "
+            "(and stop accepting once we've made a single connection.)")
+
+        my_address = struct.pack("!I", my_address)
+
+        args = ['SEND', name, my_address, str(port)]
+
+        if not (size is None):
+            args.append(size)
+
+        args = string.join(args, ' ')
+
+        self.ctcpMakeQuery(user, [('DCC', args)])
+
+
+    def dccResume(self, user, fileName, port, resumePos):
+        """
+        Send a DCC RESUME request to another user.
+        """
+        self.ctcpMakeQuery(user, [
+            ('DCC', ['RESUME', fileName, port, resumePos])])
+
+
+    def dccAcceptResume(self, user, fileName, port, resumePos):
+        """
+        Send a DCC ACCEPT response to clients who have requested a resume.
+        """
+        self.ctcpMakeQuery(user, [
+            ('DCC', ['ACCEPT', fileName, port, resumePos])])
+
+    ### server->client messages
+    ### You might want to fiddle with these,
+    ### but it is safe to leave them alone.
+
+    def irc_ERR_NICKNAMEINUSE(self, prefix, params):
+        """
+        Called when we try to register or change to a nickname that is already
+        taken.
+        """
+        self._attemptedNick = self.alterCollidedNick(self._attemptedNick)
+        self.setNick(self._attemptedNick)
+
+
+    def alterCollidedNick(self, nickname):
+        """
+        Generate an altered version of a nickname that caused a collision in an
+        effort to create an unused related name for subsequent registration.
+
+        @param nickname: The nickname a user is attempting to register.
+        @type nickname: C{str}
+
+        @returns: A string that is in some way different from the nickname.
+        @rtype: C{str}
+        """
+        return nickname + '_'
+
+
+    def irc_ERR_ERRONEUSNICKNAME(self, prefix, params):
+        """
+        Called when we try to register or change to an illegal nickname.
+
+        The server should send this reply when the nickname contains any
+        disallowed characters.  The bot will stall, waiting for RPL_WELCOME, if
+        we don't handle this during sign-on.
+
+        @note: The method uses the spelling I{erroneus}, as it appears in
+            the RFC, section 6.1.
+        """
+        if not self._registered:
+            self.setNick(self.erroneousNickFallback)
+
+
+    def irc_ERR_PASSWDMISMATCH(self, prefix, params):
+        """
+        Called when the login was incorrect.
+        """
+        raise IRCPasswordMismatch("Password Incorrect.")
+
+
+    def irc_RPL_WELCOME(self, prefix, params):
+        """
+        Called when we have received the welcome from the server.
+        """
+        self.hostname = prefix
+        self._registered = True
+        self.nickname = self._attemptedNick
+        self.signedOn()
+        self.startHeartbeat()
+
+
+    def irc_JOIN(self, prefix, params):
+        """
+        Called when a user joins a channel.
+        """
+        nick = string.split(prefix,'!')[0]
+        channel = params[-1]
+        if nick == self.nickname:
+            self.joined(channel)
+        else:
+            self.userJoined(nick, channel)
+
+    def irc_PART(self, prefix, params):
+        """
+        Called when a user leaves a channel.
+        """
+        nick = string.split(prefix,'!')[0]
+        channel = params[0]
+        if nick == self.nickname:
+            self.left(channel)
+        else:
+            self.userLeft(nick, channel)
+
+    def irc_QUIT(self, prefix, params):
+        """
+        Called when a user has quit.
+        """
+        nick = string.split(prefix,'!')[0]
+        self.userQuit(nick, params[0])
+
+
+    def irc_MODE(self, user, params):
+        """
+        Parse a server mode change message.
+        """
+        channel, modes, args = params[0], params[1], params[2:]
+
+        if modes[0] not in '-+':
+            modes = '+' + modes
+
+        if channel == self.nickname:
+            # This is a mode change to our individual user, not a channel mode
+            # that involves us.
+            paramModes = self.getUserModeParams()
+        else:
+            paramModes = self.getChannelModeParams()
+
+        try:
+            added, removed = parseModes(modes, args, paramModes)
+        except IRCBadModes:
+            log.err(None, 'An error occured while parsing the following '
+                          'MODE message: MODE %s' % (' '.join(params),))
+        else:
+            if added:
+                modes, params = zip(*added)
+                self.modeChanged(user, channel, True, ''.join(modes), params)
+
+            if removed:
+                modes, params = zip(*removed)
+                self.modeChanged(user, channel, False, ''.join(modes), params)
+
+
+    def irc_PING(self, prefix, params):
+        """
+        Called when some has pinged us.
+        """
+        self.sendLine("PONG %s" % params[-1])
+
+    def irc_PRIVMSG(self, prefix, params):
+        """
+        Called when we get a message.
+        """
+        user = prefix
+        channel = params[0]
+        message = params[-1]
+
+        if not message:
+            # Don't raise an exception if we get blank message.
+            return
+
+        if message[0] == X_DELIM:
+            m = ctcpExtract(message)
+            if m['extended']:
+                self.ctcpQuery(user, channel, m['extended'])
+
+            if not m['normal']:
+                return
+
+            message = string.join(m['normal'], ' ')
+
+        self.privmsg(user, channel, message)
+
+    def irc_NOTICE(self, prefix, params):
+        """
+        Called when a user gets a notice.
+        """
+        user = prefix
+        channel = params[0]
+        message = params[-1]
+
+        if message[0]==X_DELIM:
+            m = ctcpExtract(message)
+            if m['extended']:
+                self.ctcpReply(user, channel, m['extended'])
+
+            if not m['normal']:
+                return
+
+            message = string.join(m['normal'], ' ')
+
+        self.noticed(user, channel, message)
+
+    def irc_NICK(self, prefix, params):
+        """
+        Called when a user changes their nickname.
+        """
+        nick = string.split(prefix,'!', 1)[0]
+        if nick == self.nickname:
+            self.nickChanged(params[0])
+        else:
+            self.userRenamed(nick, params[0])
+
+    def irc_KICK(self, prefix, params):
+        """
+        Called when a user is kicked from a channel.
+        """
+        kicker = string.split(prefix,'!')[0]
+        channel = params[0]
+        kicked = params[1]
+        message = params[-1]
+        if string.lower(kicked) == string.lower(self.nickname):
+            # Yikes!
+            self.kickedFrom(channel, kicker, message)
+        else:
+            self.userKicked(kicked, channel, kicker, message)
+
+    def irc_TOPIC(self, prefix, params):
+        """
+        Someone in the channel set the topic.
+        """
+        user = string.split(prefix, '!')[0]
+        channel = params[0]
+        newtopic = params[1]
+        self.topicUpdated(user, channel, newtopic)
+
+    def irc_RPL_TOPIC(self, prefix, params):
+        """
+        Called when the topic for a channel is initially reported or when it
+        subsequently changes.
+        """
+        user = string.split(prefix, '!')[0]
+        channel = params[1]
+        newtopic = params[2]
+        self.topicUpdated(user, channel, newtopic)
+
+    def irc_RPL_NOTOPIC(self, prefix, params):
+        user = string.split(prefix, '!')[0]
+        channel = params[1]
+        newtopic = ""
+        self.topicUpdated(user, channel, newtopic)
+
+    def irc_RPL_MOTDSTART(self, prefix, params):
+        if params[-1].startswith("- "):
+            params[-1] = params[-1][2:]
+        self.motd = [params[-1]]
+
+    def irc_RPL_MOTD(self, prefix, params):
+        if params[-1].startswith("- "):
+            params[-1] = params[-1][2:]
+        if self.motd is None:
+            self.motd = []
+        self.motd.append(params[-1])
+
+
+    def irc_RPL_ENDOFMOTD(self, prefix, params):
+        """
+        I{RPL_ENDOFMOTD} indicates the end of the message of the day
+        messages.  Deliver the accumulated lines to C{receivedMOTD}.
+        """
+        motd = self.motd
+        self.motd = None
+        self.receivedMOTD(motd)
+
+
+    def irc_RPL_CREATED(self, prefix, params):
+        self.created(params[1])
+
+    def irc_RPL_YOURHOST(self, prefix, params):
+        self.yourHost(params[1])
+
+    def irc_RPL_MYINFO(self, prefix, params):
+        info = params[1].split(None, 3)
+        while len(info) < 4:
+            info.append(None)
+        self.myInfo(*info)
+
+    def irc_RPL_BOUNCE(self, prefix, params):
+        self.bounce(params[1])
+
+    def irc_RPL_ISUPPORT(self, prefix, params):
+        args = params[1:-1]
+        # Several ISUPPORT messages, in no particular order, may be sent
+        # to the client at any given point in time (usually only on connect,
+        # though.) For this reason, ServerSupportedFeatures.parse is intended
+        # to mutate the supported feature list.
+        self.supported.parse(args)
+        self.isupport(args)
+
+    def irc_RPL_LUSERCLIENT(self, prefix, params):
+        self.luserClient(params[1])
+
+    def irc_RPL_LUSEROP(self, prefix, params):
+        try:
+            self.luserOp(int(params[1]))
+        except ValueError:
+            pass
+
+    def irc_RPL_LUSERCHANNELS(self, prefix, params):
+        try:
+            self.luserChannels(int(params[1]))
+        except ValueError:
+            pass
+
+    def irc_RPL_LUSERME(self, prefix, params):
+        self.luserMe(params[1])
+
+    def irc_unknown(self, prefix, command, params):
+        pass
+
+    ### Receiving a CTCP query from another party
+    ### It is safe to leave these alone.
+
+
+    def ctcpQuery(self, user, channel, messages):
+        """
+        Dispatch method for any CTCP queries received.
+
+        Duplicated CTCP queries are ignored and no dispatch is
+        made. Unrecognized CTCP queries invoke L{IRCClient.ctcpUnknownQuery}.
+        """
+        seen = set()
+        for tag, data in messages:
+            method = getattr(self, 'ctcpQuery_%s' % tag, None)
+            if tag not in seen:
+                if method is not None:
+                    method(user, channel, data)
+                else:
+                    self.ctcpUnknownQuery(user, channel, tag, data)
+            seen.add(tag)
+
+
+    def ctcpUnknownQuery(self, user, channel, tag, data):
+        """
+        Fallback handler for unrecognized CTCP queries.
+
+        No CTCP I{ERRMSG} reply is made to remove a potential denial of service
+        avenue.
+        """
+        log.msg('Unknown CTCP query from %r: %r %r' % (user, tag, data))
+
+
+    def ctcpQuery_ACTION(self, user, channel, data):
+        self.action(user, channel, data)
+
+    def ctcpQuery_PING(self, user, channel, data):
+        nick = string.split(user,"!")[0]
+        self.ctcpMakeReply(nick, [("PING", data)])
+
+    def ctcpQuery_FINGER(self, user, channel, data):
+        if data is not None:
+            self.quirkyMessage("Why did %s send '%s' with a FINGER query?"
+                               % (user, data))
+        if not self.fingerReply:
+            return
+
+        if callable(self.fingerReply):
+            reply = self.fingerReply()
+        else:
+            reply = str(self.fingerReply)
+
+        nick = string.split(user,"!")[0]
+        self.ctcpMakeReply(nick, [('FINGER', reply)])
+
+    def ctcpQuery_VERSION(self, user, channel, data):
+        if data is not None:
+            self.quirkyMessage("Why did %s send '%s' with a VERSION query?"
+                               % (user, data))
+
+        if self.versionName:
+            nick = string.split(user,"!")[0]
+            self.ctcpMakeReply(nick, [('VERSION', '%s:%s:%s' %
+                                       (self.versionName,
+                                        self.versionNum or '',
+                                        self.versionEnv or ''))])
+
+    def ctcpQuery_SOURCE(self, user, channel, data):
+        if data is not None:
+            self.quirkyMessage("Why did %s send '%s' with a SOURCE query?"
+                               % (user, data))
+        if self.sourceURL:
+            nick = string.split(user,"!")[0]
+            # The CTCP document (Zeuge, Rollo, Mesander 1994) says that SOURCE
+            # replies should be responded to with the location of an anonymous
+            # FTP server in host:directory:file format.  I'm taking the liberty
+            # of bringing it into the 21st century by sending a URL instead.
+            self.ctcpMakeReply(nick, [('SOURCE', self.sourceURL),
+                                      ('SOURCE', None)])
+
+    def ctcpQuery_USERINFO(self, user, channel, data):
+        if data is not None:
+            self.quirkyMessage("Why did %s send '%s' with a USERINFO query?"
+                               % (user, data))
+        if self.userinfo:
+            nick = string.split(user,"!")[0]
+            self.ctcpMakeReply(nick, [('USERINFO', self.userinfo)])
+
+    def ctcpQuery_CLIENTINFO(self, user, channel, data):
+        """
+        A master index of what CTCP tags this client knows.
+
+        If no arguments are provided, respond with a list of known tags.
+        If an argument is provided, provide human-readable help on
+        the usage of that tag.
+        """
+
+        nick = string.split(user,"!")[0]
+        if not data:
+            # XXX: prefixedMethodNames gets methods from my *class*,
+            # but it's entirely possible that this *instance* has more
+            # methods.
+            names = reflect.prefixedMethodNames(self.__class__,
+                                                'ctcpQuery_')
+
+            self.ctcpMakeReply(nick, [('CLIENTINFO',
+                                       string.join(names, ' '))])
+        else:
+            args = string.split(data)
+            method = getattr(self, 'ctcpQuery_%s' % (args[0],), None)
+            if not method:
+                self.ctcpMakeReply(nick, [('ERRMSG',
+                                           "CLIENTINFO %s :"
+                                           "Unknown query '%s'"
+                                           % (data, args[0]))])
+                return
+            doc = getattr(method, '__doc__', '')
+            self.ctcpMakeReply(nick, [('CLIENTINFO', doc)])
+
+
+    def ctcpQuery_ERRMSG(self, user, channel, data):
+        # Yeah, this seems strange, but that's what the spec says to do
+        # when faced with an ERRMSG query (not a reply).
+        nick = string.split(user,"!")[0]
+        self.ctcpMakeReply(nick, [('ERRMSG',
+                                   "%s :No error has occoured." % data)])
+
+    def ctcpQuery_TIME(self, user, channel, data):
+        if data is not None:
+            self.quirkyMessage("Why did %s send '%s' with a TIME query?"
+                               % (user, data))
+        nick = string.split(user,"!")[0]
+        self.ctcpMakeReply(nick,
+                           [('TIME', ':%s' %
+                             time.asctime(time.localtime(time.time())))])
+
+    def ctcpQuery_DCC(self, user, channel, data):
+        """Initiate a Direct Client Connection
+        """
+
+        if not data: return
+        dcctype = data.split(None, 1)[0].upper()
+        handler = getattr(self, "dcc_" + dcctype, None)
+        if handler:
+            if self.dcc_sessions is None:
+                self.dcc_sessions = []
+            data = data[len(dcctype)+1:]
+            handler(user, channel, data)
+        else:
+            nick = string.split(user,"!")[0]
+            self.ctcpMakeReply(nick, [('ERRMSG',
+                                       "DCC %s :Unknown DCC type '%s'"
+                                       % (data, dcctype))])
+            self.quirkyMessage("%s offered unknown DCC type %s"
+                               % (user, dcctype))
+
+    def dcc_SEND(self, user, channel, data):
+        # Use splitQuoted for those who send files with spaces in the names.
+        data = text.splitQuoted(data)
+        if len(data) < 3:
+            raise IRCBadMessage, "malformed DCC SEND request: %r" % (data,)
+
+        (filename, address, port) = data[:3]
+
+        address = dccParseAddress(address)
+        try:
+            port = int(port)
+        except ValueError:
+            raise IRCBadMessage, "Indecipherable port %r" % (port,)
+
+        size = -1
+        if len(data) >= 4:
+            try:
+                size = int(data[3])
+            except ValueError:
+                pass
+
+        # XXX Should we bother passing this data?
+        self.dccDoSend(user, address, port, filename, size, data)
+
+    def dcc_ACCEPT(self, user, channel, data):
+        data = text.splitQuoted(data)
+        if len(data) < 3:
+            raise IRCBadMessage, "malformed DCC SEND ACCEPT request: %r" % (data,)
+        (filename, port, resumePos) = data[:3]
+        try:
+            port = int(port)
+            resumePos = int(resumePos)
+        except ValueError:
+            return
+
+        self.dccDoAcceptResume(user, filename, port, resumePos)
+
+    def dcc_RESUME(self, user, channel, data):
+        data = text.splitQuoted(data)
+        if len(data) < 3:
+            raise IRCBadMessage, "malformed DCC SEND RESUME request: %r" % (data,)
+        (filename, port, resumePos) = data[:3]
+        try:
+            port = int(port)
+            resumePos = int(resumePos)
+        except ValueError:
+            return
+        self.dccDoResume(user, filename, port, resumePos)
+
+    def dcc_CHAT(self, user, channel, data):
+        data = text.splitQuoted(data)
+        if len(data) < 3:
+            raise IRCBadMessage, "malformed DCC CHAT request: %r" % (data,)
+
+        (filename, address, port) = data[:3]
+
+        address = dccParseAddress(address)
+        try:
+            port = int(port)
+        except ValueError:
+            raise IRCBadMessage, "Indecipherable port %r" % (port,)
+
+        self.dccDoChat(user, channel, address, port, data)
+
+    ### The dccDo methods are the slightly higher-level siblings of
+    ### common dcc_ methods; the arguments have been parsed for them.
+
+    def dccDoSend(self, user, address, port, fileName, size, data):
+        """Called when I receive a DCC SEND offer from a client.
+
+        By default, I do nothing here."""
+        ## filename = path.basename(arg)
+        ## protocol = DccFileReceive(filename, size,
+        ##                           (user,channel,data),self.dcc_destdir)
+        ## reactor.clientTCP(address, port, protocol)
+        ## self.dcc_sessions.append(protocol)
+        pass
+
+    def dccDoResume(self, user, file, port, resumePos):
+        """Called when a client is trying to resume an offered file
+        via DCC send.  It should be either replied to with a DCC
+        ACCEPT or ignored (default)."""
+        pass
+
+    def dccDoAcceptResume(self, user, file, port, resumePos):
+        """Called when a client has verified and accepted a DCC resume
+        request made by us.  By default it will do nothing."""
+        pass
+
+    def dccDoChat(self, user, channel, address, port, data):
+        pass
+        #factory = DccChatFactory(self, queryData=(user, channel, data))
+        #reactor.connectTCP(address, port, factory)
+        #self.dcc_sessions.append(factory)
+
+    #def ctcpQuery_SED(self, user, data):
+    #    """Simple Encryption Doodoo
+    #
+    #    Feel free to implement this, but no specification is available.
+    #    """
+    #    raise NotImplementedError
+
+
+    def ctcpMakeReply(self, user, messages):
+        """
+        Send one or more C{extended messages} as a CTCP reply.
+
+        @type messages: a list of extended messages.  An extended
+        message is a (tag, data) tuple, where 'data' may be C{None}.
+        """
+        self.notice(user, ctcpStringify(messages))
+
+    ### client CTCP query commands
+
+    def ctcpMakeQuery(self, user, messages):
+        """
+        Send one or more C{extended messages} as a CTCP query.
+
+        @type messages: a list of extended messages.  An extended
+        message is a (tag, data) tuple, where 'data' may be C{None}.
+        """
+        self.msg(user, ctcpStringify(messages))
+
+    ### Receiving a response to a CTCP query (presumably to one we made)
+    ### You may want to add methods here, or override UnknownReply.
+
+    def ctcpReply(self, user, channel, messages):
+        """
+        Dispatch method for any CTCP replies received.
+        """
+        for m in messages:
+            method = getattr(self, "ctcpReply_%s" % m[0], None)
+            if method:
+                method(user, channel, m[1])
+            else:
+                self.ctcpUnknownReply(user, channel, m[0], m[1])
+
+    def ctcpReply_PING(self, user, channel, data):
+        nick = user.split('!', 1)[0]
+        if (not self._pings) or (not self._pings.has_key((nick, data))):
+            raise IRCBadMessage,\
+                  "Bogus PING response from %s: %s" % (user, data)
+
+        t0 = self._pings[(nick, data)]
+        self.pong(user, time.time() - t0)
+
+    def ctcpUnknownReply(self, user, channel, tag, data):
+        """Called when a fitting ctcpReply_ method is not found.
+
+        XXX: If the client makes arbitrary CTCP queries,
+        this method should probably show the responses to
+        them instead of treating them as anomolies.
+        """
+        log.msg("Unknown CTCP reply from %s: %s %s\n"
+                 % (user, tag, data))
+
+    ### Error handlers
+    ### You may override these with something more appropriate to your UI.
+
+    def badMessage(self, line, excType, excValue, tb):
+        """When I get a message that's so broken I can't use it.
+        """
+        log.msg(line)
+        log.msg(string.join(traceback.format_exception(excType,
+                                                        excValue,
+                                                        tb),''))
+
+    def quirkyMessage(self, s):
+        """This is called when I receive a message which is peculiar,
+        but not wholly indecipherable.
+        """
+        log.msg(s + '\n')
+
+    ### Protocool methods
+
+    def connectionMade(self):
+        self.supported = ServerSupportedFeatures()
+        self._queue = []
+        if self.performLogin:
+            self.register(self.nickname)
+
+    def dataReceived(self, data):
+        basic.LineReceiver.dataReceived(self, data.replace('\r', ''))
+
+    def lineReceived(self, line):
+        line = lowDequote(line)
+        try:
+            prefix, command, params = parsemsg(line)
+            if command in numeric_to_symbolic:
+                command = numeric_to_symbolic[command]
+            self.handleCommand(command, prefix, params)
+        except IRCBadMessage:
+            self.badMessage(line, *sys.exc_info())
+
+
+    def getUserModeParams(self):
+        """
+        Get user modes that require parameters for correct parsing.
+
+        @rtype: C{[str, str]}
+        @return C{[add, remove]}
+        """
+        return ['', '']
+
+
+    def getChannelModeParams(self):
+        """
+        Get channel modes that require parameters for correct parsing.
+
+        @rtype: C{[str, str]}
+        @return C{[add, remove]}
+        """
+        # PREFIX modes are treated as "type B" CHANMODES, they always take
+        # parameter.
+        params = ['', '']
+        prefixes = self.supported.getFeature('PREFIX', {})
+        params[0] = params[1] = ''.join(prefixes.iterkeys())
+
+        chanmodes = self.supported.getFeature('CHANMODES')
+        if chanmodes is not None:
+            params[0] += chanmodes.get('addressModes', '')
+            params[0] += chanmodes.get('param', '')
+            params[1] = params[0]
+            params[0] += chanmodes.get('setParam', '')
+        return params
+
+
+    def handleCommand(self, command, prefix, params):
+        """Determine the function to call for the given command and call
+        it with the given arguments.
+        """
+        method = getattr(self, "irc_%s" % command, None)
+        try:
+            if method is not None:
+                method(prefix, params)
+            else:
+                self.irc_unknown(prefix, command, params)
+        except:
+            log.deferr()
+
+
+    def __getstate__(self):
+        dct = self.__dict__.copy()
+        dct['dcc_sessions'] = None
+        dct['_pings'] = None
+        return dct
+
+
+def dccParseAddress(address):
+    if '.' in address:
+        pass
+    else:
+        try:
+            address = long(address)
+        except ValueError:
+            raise IRCBadMessage,\
+                  "Indecipherable address %r" % (address,)
+        else:
+            address = (
+                (address >> 24) & 0xFF,
+                (address >> 16) & 0xFF,
+                (address >> 8) & 0xFF,
+                address & 0xFF,
+                )
+            address = '.'.join(map(str,address))
+    return address
+
+
+class DccFileReceiveBasic(protocol.Protocol, styles.Ephemeral):
+    """Bare protocol to receive a Direct Client Connection SEND stream.
+
+    This does enough to keep the other guy talking, but you'll want to
+    extend my dataReceived method to *do* something with the data I get.
+    """
+
+    bytesReceived = 0
+
+    def __init__(self, resumeOffset=0):
+        self.bytesReceived = resumeOffset
+        self.resume = (resumeOffset != 0)
+
+    def dataReceived(self, data):
+        """Called when data is received.
+
+        Warning: This just acknowledges to the remote host that the
+        data has been received; it doesn't *do* anything with the
+        data, so you'll want to override this.
+        """
+        self.bytesReceived = self.bytesReceived + len(data)
+        self.transport.write(struct.pack('!i', self.bytesReceived))
+
+
+class DccSendProtocol(protocol.Protocol, styles.Ephemeral):
+    """Protocol for an outgoing Direct Client Connection SEND.
+    """
+
+    blocksize = 1024
+    file = None
+    bytesSent = 0
+    completed = 0
+    connected = 0
+
+    def __init__(self, file):
+        if type(file) is types.StringType:
+            self.file = open(file, 'r')
+
+    def connectionMade(self):
+        self.connected = 1
+        self.sendBlock()
+
+    def dataReceived(self, data):
+        # XXX: Do we need to check to see if len(data) != fmtsize?
+
+        bytesShesGot = struct.unpack("!I", data)
+        if bytesShesGot < self.bytesSent:
+            # Wait for her.
+            # XXX? Add some checks to see if we've stalled out?
+            return
+        elif bytesShesGot > self.bytesSent:
+            # self.transport.log("DCC SEND %s: She says she has %d bytes "
+            #                    "but I've only sent %d.  I'm stopping "
+            #                    "this screwy transfer."
+            #                    % (self.file,
+            #                       bytesShesGot, self.bytesSent))
+            self.transport.loseConnection()
+            return
+
+        self.sendBlock()
+
+    def sendBlock(self):
+        block = self.file.read(self.blocksize)
+        if block:
+            self.transport.write(block)
+            self.bytesSent = self.bytesSent + len(block)
+        else:
+            # Nothing more to send, transfer complete.
+            self.transport.loseConnection()
+            self.completed = 1
+
+    def connectionLost(self, reason):
+        self.connected = 0
+        if hasattr(self.file, "close"):
+            self.file.close()
+
+
+class DccSendFactory(protocol.Factory):
+    protocol = DccSendProtocol
+    def __init__(self, file):
+        self.file = file
+
+    def buildProtocol(self, connection):
+        p = self.protocol(self.file)
+        p.factory = self
+        return p
+
+
+def fileSize(file):
+    """I'll try my damndest to determine the size of this file object.
+    """
+    size = None
+    if hasattr(file, "fileno"):
+        fileno = file.fileno()
+        try:
+            stat_ = os.fstat(fileno)
+            size = stat_[stat.ST_SIZE]
+        except:
+            pass
+        else:
+            return size
+
+    if hasattr(file, "name") and path.exists(file.name):
+        try:
+            size = path.getsize(file.name)
+        except:
+            pass
+        else:
+            return size
+
+    if hasattr(file, "seek") and hasattr(file, "tell"):
+        try:
+            try:
+                file.seek(0, 2)
+                size = file.tell()
+            finally:
+                file.seek(0, 0)
+        except:
+            pass
+        else:
+            return size
+
+    return size
+
+class DccChat(basic.LineReceiver, styles.Ephemeral):
+    """Direct Client Connection protocol type CHAT.
+
+    DCC CHAT is really just your run o' the mill basic.LineReceiver
+    protocol.  This class only varies from that slightly, accepting
+    either LF or CR LF for a line delimeter for incoming messages
+    while always using CR LF for outgoing.
+
+    The lineReceived method implemented here uses the DCC connection's
+    'client' attribute (provided upon construction) to deliver incoming
+    lines from the DCC chat via IRCClient's normal privmsg interface.
+    That's something of a spoof, which you may well want to override.
+    """
+
+    queryData = None
+    delimiter = CR + NL
+    client = None
+    remoteParty = None
+    buffer = ""
+
+    def __init__(self, client, queryData=None):
+        """Initialize a new DCC CHAT session.
+
+        queryData is a 3-tuple of
+        (fromUser, targetUserOrChannel, data)
+        as received by the CTCP query.
+
+        (To be honest, fromUser is the only thing that's currently
+        used here. targetUserOrChannel is potentially useful, while
+        the 'data' argument is soley for informational purposes.)
+        """
+        self.client = client
+        if queryData:
+            self.queryData = queryData
+            self.remoteParty = self.queryData[0]
+
+    def dataReceived(self, data):
+        self.buffer = self.buffer + data
+        lines = string.split(self.buffer, LF)
+        # Put the (possibly empty) element after the last LF back in the
+        # buffer
+        self.buffer = lines.pop()
+
+        for line in lines:
+            if line[-1] == CR:
+                line = line[:-1]
+            self.lineReceived(line)
+
+    def lineReceived(self, line):
+        log.msg("DCC CHAT<%s> %s" % (self.remoteParty, line))
+        self.client.privmsg(self.remoteParty,
+                            self.client.nickname, line)
+
+
+class DccChatFactory(protocol.ClientFactory):
+    protocol = DccChat
+    noisy = 0
+    def __init__(self, client, queryData):
+        self.client = client
+        self.queryData = queryData
+
+
+    def buildProtocol(self, addr):
+        p = self.protocol(client=self.client, queryData=self.queryData)
+        p.factory = self
+        return p
+
+
+    def clientConnectionFailed(self, unused_connector, unused_reason):
+        self.client.dcc_sessions.remove(self)
+
+    def clientConnectionLost(self, unused_connector, unused_reason):
+        self.client.dcc_sessions.remove(self)
+
+
+def dccDescribe(data):
+    """Given the data chunk from a DCC query, return a descriptive string.
+    """
+
+    orig_data = data
+    data = string.split(data)
+    if len(data) < 4:
+        return orig_data
+
+    (dcctype, arg, address, port) = data[:4]
+
+    if '.' in address:
+        pass
+    else:
+        try:
+            address = long(address)
+        except ValueError:
+            pass
+        else:
+            address = (
+                (address >> 24) & 0xFF,
+                (address >> 16) & 0xFF,
+                (address >> 8) & 0xFF,
+                address & 0xFF,
+                )
+            # The mapping to 'int' is to get rid of those accursed
+            # "L"s which python 1.5.2 puts on the end of longs.
+            address = string.join(map(str,map(int,address)), ".")
+
+    if dcctype == 'SEND':
+        filename = arg
+
+        size_txt = ''
+        if len(data) >= 5:
+            try:
+                size = int(data[4])
+                size_txt = ' of size %d bytes' % (size,)
+            except ValueError:
+                pass
+
+        dcc_text = ("SEND for file '%s'%s at host %s, port %s"
+                    % (filename, size_txt, address, port))
+    elif dcctype == 'CHAT':
+        dcc_text = ("CHAT for host %s, port %s"
+                    % (address, port))
+    else:
+        dcc_text = orig_data
+
+    return dcc_text
+
+
+class DccFileReceive(DccFileReceiveBasic):
+    """Higher-level coverage for getting a file from DCC SEND.
+
+    I allow you to change the file's name and destination directory.
+    I won't overwrite an existing file unless I've been told it's okay
+    to do so. If passed the resumeOffset keyword argument I will attempt to
+    resume the file from that amount of bytes.
+
+    XXX: I need to let the client know when I am finished.
+    XXX: I need to decide how to keep a progress indicator updated.
+    XXX: Client needs a way to tell me "Do not finish until I say so."
+    XXX: I need to make sure the client understands if the file cannot be written.
+    """
+
+    filename = 'dcc'
+    fileSize = -1
+    destDir = '.'
+    overwrite = 0
+    fromUser = None
+    queryData = None
+
+    def __init__(self, filename, fileSize=-1, queryData=None,
+                 destDir='.', resumeOffset=0):
+        DccFileReceiveBasic.__init__(self, resumeOffset=resumeOffset)
+        self.filename = filename
+        self.destDir = destDir
+        self.fileSize = fileSize
+
+        if queryData:
+            self.queryData = queryData
+            self.fromUser = self.queryData[0]
+
+    def set_directory(self, directory):
+        """Set the directory where the downloaded file will be placed.
+
+        May raise OSError if the supplied directory path is not suitable.
+        """
+        if not path.exists(directory):
+            raise OSError(errno.ENOENT, "You see no directory there.",
+                          directory)
+        if not path.isdir(directory):
+            raise OSError(errno.ENOTDIR, "You cannot put a file into "
+                          "something which is not a directory.",
+                          directory)
+        if not os.access(directory, os.X_OK | os.W_OK):
+            raise OSError(errno.EACCES,
+                          "This directory is too hard to write in to.",
+                          directory)
+        self.destDir = directory
+
+    def set_filename(self, filename):
+        """Change the name of the file being transferred.
+
+        This replaces the file name provided by the sender.
+        """
+        self.filename = filename
+
+    def set_overwrite(self, boolean):
+        """May I overwrite existing files?
+        """
+        self.overwrite = boolean
+
+
+    # Protocol-level methods.
+
+    def connectionMade(self):
+        dst = path.abspath(path.join(self.destDir,self.filename))
+        exists = path.exists(dst)
+        if self.resume and exists:
+            # I have been told I want to resume, and a file already
+            # exists - Here we go
+            self.file = open(dst, 'ab')
+            log.msg("Attempting to resume %s - starting from %d bytes" %
+                    (self.file, self.file.tell()))
+        elif self.overwrite or not exists:
+            self.file = open(dst, 'wb')
+        else:
+            raise OSError(errno.EEXIST,
+                          "There's a file in the way.  "
+                          "Perhaps that's why you cannot open it.",
+                          dst)
+
+    def dataReceived(self, data):
+        self.file.write(data)
+        DccFileReceiveBasic.dataReceived(self, data)
+
+        # XXX: update a progress indicator here?
+
+    def connectionLost(self, reason):
+        """When the connection is lost, I close the file.
+        """
+        self.connected = 0
+        logmsg = ("%s closed." % (self,))
+        if self.fileSize > 0:
+            logmsg = ("%s  %d/%d bytes received"
+                      % (logmsg, self.bytesReceived, self.fileSize))
+            if self.bytesReceived == self.fileSize:
+                pass # Hooray!
+            elif self.bytesReceived < self.fileSize:
+                logmsg = ("%s (Warning: %d bytes short)"
+                          % (logmsg, self.fileSize - self.bytesReceived))
+            else:
+                logmsg = ("%s (file larger than expected)"
+                          % (logmsg,))
+        else:
+            logmsg = ("%s  %d bytes received"
+                      % (logmsg, self.bytesReceived))
+
+        if hasattr(self, 'file'):
+            logmsg = "%s and written to %s.\n" % (logmsg, self.file.name)
+            if hasattr(self.file, 'close'): self.file.close()
+
+        # self.transport.log(logmsg)
+
+    def __str__(self):
+        if not self.connected:
+            return "<Unconnected DccFileReceive object at %x>" % (id(self),)
+        from_ = self.transport.getPeer()
+        if self.fromUser:
+            from_ = "%s (%s)" % (self.fromUser, from_)
+
+        s = ("DCC transfer of '%s' from %s" % (self.filename, from_))
+        return s
+
+    def __repr__(self):
+        s = ("<%s at %x: GET %s>"
+             % (self.__class__, id(self), self.filename))
+        return s
+
+
+# CTCP constants and helper functions
+
+X_DELIM = chr(001)
+
+def ctcpExtract(message):
+    """
+    Extract CTCP data from a string.
+
+    @return: A C{dict} containing two keys:
+       - C{'extended'}: A list of CTCP (tag, data) tuples.
+       - C{'normal'}: A list of strings which were not inside a CTCP delimiter.
+    """
+    extended_messages = []
+    normal_messages = []
+    retval = {'extended': extended_messages,
+              'normal': normal_messages }
+
+    messages = string.split(message, X_DELIM)
+    odd = 0
+
+    # X1 extended data X2 nomal data X3 extended data X4 normal...
+    while messages:
+        if odd:
+            extended_messages.append(messages.pop(0))
+        else:
+            normal_messages.append(messages.pop(0))
+        odd = not odd
+
+    extended_messages[:] = filter(None, extended_messages)
+    normal_messages[:] = filter(None, normal_messages)
+
+    extended_messages[:] = map(ctcpDequote, extended_messages)
+    for i in xrange(len(extended_messages)):
+        m = string.split(extended_messages[i], SPC, 1)
+        tag = m[0]
+        if len(m) > 1:
+            data = m[1]
+        else:
+            data = None
+
+        extended_messages[i] = (tag, data)
+
+    return retval
+
+# CTCP escaping
+
+M_QUOTE= chr(020)
+
+mQuoteTable = {
+    NUL: M_QUOTE + '0',
+    NL: M_QUOTE + 'n',
+    CR: M_QUOTE + 'r',
+    M_QUOTE: M_QUOTE + M_QUOTE
+    }
+
+mDequoteTable = {}
+for k, v in mQuoteTable.items():
+    mDequoteTable[v[-1]] = k
+del k, v
+
+mEscape_re = re.compile('%s.' % (re.escape(M_QUOTE),), re.DOTALL)
+
+def lowQuote(s):
+    for c in (M_QUOTE, NUL, NL, CR):
+        s = string.replace(s, c, mQuoteTable[c])
+    return s
+
+def lowDequote(s):
+    def sub(matchobj, mDequoteTable=mDequoteTable):
+        s = matchobj.group()[1]
+        try:
+            s = mDequoteTable[s]
+        except KeyError:
+            s = s
+        return s
+
+    return mEscape_re.sub(sub, s)
+
+X_QUOTE = '\\'
+
+xQuoteTable = {
+    X_DELIM: X_QUOTE + 'a',
+    X_QUOTE: X_QUOTE + X_QUOTE
+    }
+
+xDequoteTable = {}
+
+for k, v in xQuoteTable.items():
+    xDequoteTable[v[-1]] = k
+
+xEscape_re = re.compile('%s.' % (re.escape(X_QUOTE),), re.DOTALL)
+
+def ctcpQuote(s):
+    for c in (X_QUOTE, X_DELIM):
+        s = string.replace(s, c, xQuoteTable[c])
+    return s
+
+def ctcpDequote(s):
+    def sub(matchobj, xDequoteTable=xDequoteTable):
+        s = matchobj.group()[1]
+        try:
+            s = xDequoteTable[s]
+        except KeyError:
+            s = s
+        return s
+
+    return xEscape_re.sub(sub, s)
+
+def ctcpStringify(messages):
+    """
+    @type messages: a list of extended messages.  An extended
+    message is a (tag, data) tuple, where 'data' may be C{None}, a
+    string, or a list of strings to be joined with whitespace.
+
+    @returns: String
+    """
+    coded_messages = []
+    for (tag, data) in messages:
+        if data:
+            if not isinstance(data, types.StringType):
+                try:
+                    # data as list-of-strings
+                    data = " ".join(map(str, data))
+                except TypeError:
+                    # No?  Then use it's %s representation.
+                    pass
+            m = "%s %s" % (tag, data)
+        else:
+            m = str(tag)
+        m = ctcpQuote(m)
+        m = "%s%s%s" % (X_DELIM, m, X_DELIM)
+        coded_messages.append(m)
+
+    line = string.join(coded_messages, '')
+    return line
+
+
+# Constants (from RFC 2812)
+RPL_WELCOME = '001'
+RPL_YOURHOST = '002'
+RPL_CREATED = '003'
+RPL_MYINFO = '004'
+RPL_ISUPPORT = '005'
+RPL_BOUNCE = '010'
+RPL_USERHOST = '302'
+RPL_ISON = '303'
+RPL_AWAY = '301'
+RPL_UNAWAY = '305'
+RPL_NOWAWAY = '306'
+RPL_WHOISUSER = '311'
+RPL_WHOISSERVER = '312'
+RPL_WHOISOPERATOR = '313'
+RPL_WHOISIDLE = '317'
+RPL_ENDOFWHOIS = '318'
+RPL_WHOISCHANNELS = '319'
+RPL_WHOWASUSER = '314'
+RPL_ENDOFWHOWAS = '369'
+RPL_LISTSTART = '321'
+RPL_LIST = '322'
+RPL_LISTEND = '323'
+RPL_UNIQOPIS = '325'
+RPL_CHANNELMODEIS = '324'
+RPL_NOTOPIC = '331'
+RPL_TOPIC = '332'
+RPL_INVITING = '341'
+RPL_SUMMONING = '342'
+RPL_INVITELIST = '346'
+RPL_ENDOFINVITELIST = '347'
+RPL_EXCEPTLIST = '348'
+RPL_ENDOFEXCEPTLIST = '349'
+RPL_VERSION = '351'
+RPL_WHOREPLY = '352'
+RPL_ENDOFWHO = '315'
+RPL_NAMREPLY = '353'
+RPL_ENDOFNAMES = '366'
+RPL_LINKS = '364'
+RPL_ENDOFLINKS = '365'
+RPL_BANLIST = '367'
+RPL_ENDOFBANLIST = '368'
+RPL_INFO = '371'
+RPL_ENDOFINFO = '374'
+RPL_MOTDSTART = '375'
+RPL_MOTD = '372'
+RPL_ENDOFMOTD = '376'
+RPL_YOUREOPER = '381'
+RPL_REHASHING = '382'
+RPL_YOURESERVICE = '383'
+RPL_TIME = '391'
+RPL_USERSSTART = '392'
+RPL_USERS = '393'
+RPL_ENDOFUSERS = '394'
+RPL_NOUSERS = '395'
+RPL_TRACELINK = '200'
+RPL_TRACECONNECTING = '201'
+RPL_TRACEHANDSHAKE = '202'
+RPL_TRACEUNKNOWN = '203'
+RPL_TRACEOPERATOR = '204'
+RPL_TRACEUSER = '205'
+RPL_TRACESERVER = '206'
+RPL_TRACESERVICE = '207'
+RPL_TRACENEWTYPE = '208'
+RPL_TRACECLASS = '209'
+RPL_TRACERECONNECT = '210'
+RPL_TRACELOG = '261'
+RPL_TRACEEND = '262'
+RPL_STATSLINKINFO = '211'
+RPL_STATSCOMMANDS = '212'
+RPL_ENDOFSTATS = '219'
+RPL_STATSUPTIME = '242'
+RPL_STATSOLINE = '243'
+RPL_UMODEIS = '221'
+RPL_SERVLIST = '234'
+RPL_SERVLISTEND = '235'
+RPL_LUSERCLIENT = '251'
+RPL_LUSEROP = '252'
+RPL_LUSERUNKNOWN = '253'
+RPL_LUSERCHANNELS = '254'
+RPL_LUSERME = '255'
+RPL_ADMINME = '256'
+RPL_ADMINLOC = '257'
+RPL_ADMINLOC = '258'
+RPL_ADMINEMAIL = '259'
+RPL_TRYAGAIN = '263'
+ERR_NOSUCHNICK = '401'
+ERR_NOSUCHSERVER = '402'
+ERR_NOSUCHCHANNEL = '403'
+ERR_CANNOTSENDTOCHAN = '404'
+ERR_TOOMANYCHANNELS = '405'
+ERR_WASNOSUCHNICK = '406'
+ERR_TOOMANYTARGETS = '407'
+ERR_NOSUCHSERVICE = '408'
+ERR_NOORIGIN = '409'
+ERR_NORECIPIENT = '411'
+ERR_NOTEXTTOSEND = '412'
+ERR_NOTOPLEVEL = '413'
+ERR_WILDTOPLEVEL = '414'
+ERR_BADMASK = '415'
+ERR_UNKNOWNCOMMAND = '421'
+ERR_NOMOTD = '422'
+ERR_NOADMININFO = '423'
+ERR_FILEERROR = '424'
+ERR_NONICKNAMEGIVEN = '431'
+ERR_ERRONEUSNICKNAME = '432'
+ERR_NICKNAMEINUSE = '433'
+ERR_NICKCOLLISION = '436'
+ERR_UNAVAILRESOURCE = '437'
+ERR_USERNOTINCHANNEL = '441'
+ERR_NOTONCHANNEL = '442'
+ERR_USERONCHANNEL = '443'
+ERR_NOLOGIN = '444'
+ERR_SUMMONDISABLED = '445'
+ERR_USERSDISABLED = '446'
+ERR_NOTREGISTERED = '451'
+ERR_NEEDMOREPARAMS = '461'
+ERR_ALREADYREGISTRED = '462'
+ERR_NOPERMFORHOST = '463'
+ERR_PASSWDMISMATCH = '464'
+ERR_YOUREBANNEDCREEP = '465'
+ERR_YOUWILLBEBANNED = '466'
+ERR_KEYSET = '467'
+ERR_CHANNELISFULL = '471'
+ERR_UNKNOWNMODE = '472'
+ERR_INVITEONLYCHAN = '473'
+ERR_BANNEDFROMCHAN = '474'
+ERR_BADCHANNELKEY = '475'
+ERR_BADCHANMASK = '476'
+ERR_NOCHANMODES = '477'
+ERR_BANLISTFULL = '478'
+ERR_NOPRIVILEGES = '481'
+ERR_CHANOPRIVSNEEDED = '482'
+ERR_CANTKILLSERVER = '483'
+ERR_RESTRICTED = '484'
+ERR_UNIQOPPRIVSNEEDED = '485'
+ERR_NOOPERHOST = '491'
+ERR_NOSERVICEHOST = '492'
+ERR_UMODEUNKNOWNFLAG = '501'
+ERR_USERSDONTMATCH = '502'
+
+# And hey, as long as the strings are already intern'd...
+symbolic_to_numeric = {
+    "RPL_WELCOME": '001',
+    "RPL_YOURHOST": '002',
+    "RPL_CREATED": '003',
+    "RPL_MYINFO": '004',
+    "RPL_ISUPPORT": '005',
+    "RPL_BOUNCE": '010',
+    "RPL_USERHOST": '302',
+    "RPL_ISON": '303',
+    "RPL_AWAY": '301',
+    "RPL_UNAWAY": '305',
+    "RPL_NOWAWAY": '306',
+    "RPL_WHOISUSER": '311',
+    "RPL_WHOISSERVER": '312',
+    "RPL_WHOISOPERATOR": '313',
+    "RPL_WHOISIDLE": '317',
+    "RPL_ENDOFWHOIS": '318',
+    "RPL_WHOISCHANNELS": '319',
+    "RPL_WHOWASUSER": '314',
+    "RPL_ENDOFWHOWAS": '369',
+    "RPL_LISTSTART": '321',
+    "RPL_LIST": '322',
+    "RPL_LISTEND": '323',
+    "RPL_UNIQOPIS": '325',
+    "RPL_CHANNELMODEIS": '324',
+    "RPL_NOTOPIC": '331',
+    "RPL_TOPIC": '332',
+    "RPL_INVITING": '341',
+    "RPL_SUMMONING": '342',
+    "RPL_INVITELIST": '346',
+    "RPL_ENDOFINVITELIST": '347',
+    "RPL_EXCEPTLIST": '348',
+    "RPL_ENDOFEXCEPTLIST": '349',
+    "RPL_VERSION": '351',
+    "RPL_WHOREPLY": '352',
+    "RPL_ENDOFWHO": '315',
+    "RPL_NAMREPLY": '353',
+    "RPL_ENDOFNAMES": '366',
+    "RPL_LINKS": '364',
+    "RPL_ENDOFLINKS": '365',
+    "RPL_BANLIST": '367',
+    "RPL_ENDOFBANLIST": '368',
+    "RPL_INFO": '371',
+    "RPL_ENDOFINFO": '374',
+    "RPL_MOTDSTART": '375',
+    "RPL_MOTD": '372',
+    "RPL_ENDOFMOTD": '376',
+    "RPL_YOUREOPER": '381',
+    "RPL_REHASHING": '382',
+    "RPL_YOURESERVICE": '383',
+    "RPL_TIME": '391',
+    "RPL_USERSSTART": '392',
+    "RPL_USERS": '393',
+    "RPL_ENDOFUSERS": '394',
+    "RPL_NOUSERS": '395',
+    "RPL_TRACELINK": '200',
+    "RPL_TRACECONNECTING": '201',
+    "RPL_TRACEHANDSHAKE": '202',
+    "RPL_TRACEUNKNOWN": '203',
+    "RPL_TRACEOPERATOR": '204',
+    "RPL_TRACEUSER": '205',
+    "RPL_TRACESERVER": '206',
+    "RPL_TRACESERVICE": '207',
+    "RPL_TRACENEWTYPE": '208',
+    "RPL_TRACECLASS": '209',
+    "RPL_TRACERECONNECT": '210',
+    "RPL_TRACELOG": '261',
+    "RPL_TRACEEND": '262',
+    "RPL_STATSLINKINFO": '211',
+    "RPL_STATSCOMMANDS": '212',
+    "RPL_ENDOFSTATS": '219',
+    "RPL_STATSUPTIME": '242',
+    "RPL_STATSOLINE": '243',
+    "RPL_UMODEIS": '221',
+    "RPL_SERVLIST": '234',
+    "RPL_SERVLISTEND": '235',
+    "RPL_LUSERCLIENT": '251',
+    "RPL_LUSEROP": '252',
+    "RPL_LUSERUNKNOWN": '253',
+    "RPL_LUSERCHANNELS": '254',
+    "RPL_LUSERME": '255',
+    "RPL_ADMINME": '256',
+    "RPL_ADMINLOC": '257',
+    "RPL_ADMINLOC": '258',
+    "RPL_ADMINEMAIL": '259',
+    "RPL_TRYAGAIN": '263',
+    "ERR_NOSUCHNICK": '401',
+    "ERR_NOSUCHSERVER": '402',
+    "ERR_NOSUCHCHANNEL": '403',
+    "ERR_CANNOTSENDTOCHAN": '404',
+    "ERR_TOOMANYCHANNELS": '405',
+    "ERR_WASNOSUCHNICK": '406',
+    "ERR_TOOMANYTARGETS": '407',
+    "ERR_NOSUCHSERVICE": '408',
+    "ERR_NOORIGIN": '409',
+    "ERR_NORECIPIENT": '411',
+    "ERR_NOTEXTTOSEND": '412',
+    "ERR_NOTOPLEVEL": '413',
+    "ERR_WILDTOPLEVEL": '414',
+    "ERR_BADMASK": '415',
+    "ERR_UNKNOWNCOMMAND": '421',
+    "ERR_NOMOTD": '422',
+    "ERR_NOADMININFO": '423',
+    "ERR_FILEERROR": '424',
+    "ERR_NONICKNAMEGIVEN": '431',
+    "ERR_ERRONEUSNICKNAME": '432',
+    "ERR_NICKNAMEINUSE": '433',
+    "ERR_NICKCOLLISION": '436',
+    "ERR_UNAVAILRESOURCE": '437',
+    "ERR_USERNOTINCHANNEL": '441',
+    "ERR_NOTONCHANNEL": '442',
+    "ERR_USERONCHANNEL": '443',
+    "ERR_NOLOGIN": '444',
+    "ERR_SUMMONDISABLED": '445',
+    "ERR_USERSDISABLED": '446',
+    "ERR_NOTREGISTERED": '451',
+    "ERR_NEEDMOREPARAMS": '461',
+    "ERR_ALREADYREGISTRED": '462',
+    "ERR_NOPERMFORHOST": '463',
+    "ERR_PASSWDMISMATCH": '464',
+    "ERR_YOUREBANNEDCREEP": '465',
+    "ERR_YOUWILLBEBANNED": '466',
+    "ERR_KEYSET": '467',
+    "ERR_CHANNELISFULL": '471',
+    "ERR_UNKNOWNMODE": '472',
+    "ERR_INVITEONLYCHAN": '473',
+    "ERR_BANNEDFROMCHAN": '474',
+    "ERR_BADCHANNELKEY": '475',
+    "ERR_BADCHANMASK": '476',
+    "ERR_NOCHANMODES": '477',
+    "ERR_BANLISTFULL": '478',
+    "ERR_NOPRIVILEGES": '481',
+    "ERR_CHANOPRIVSNEEDED": '482',
+    "ERR_CANTKILLSERVER": '483',
+    "ERR_RESTRICTED": '484',
+    "ERR_UNIQOPPRIVSNEEDED": '485',
+    "ERR_NOOPERHOST": '491',
+    "ERR_NOSERVICEHOST": '492',
+    "ERR_UMODEUNKNOWNFLAG": '501',
+    "ERR_USERSDONTMATCH": '502',
+}
+
+numeric_to_symbolic = {}
+for k, v in symbolic_to_numeric.items():
+    numeric_to_symbolic[v] = k
diff --git a/ThirdParty/Twisted/twisted/words/protocols/jabber/__init__.py b/ThirdParty/Twisted/twisted/words/protocols/jabber/__init__.py
new file mode 100644
index 0000000..ad95b68
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/protocols/jabber/__init__.py
@@ -0,0 +1,8 @@
+# -*- test-case-name: twisted.words.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+Twisted Jabber: Jabber Protocol Helpers
+"""
diff --git a/ThirdParty/Twisted/twisted/words/protocols/jabber/client.py b/ThirdParty/Twisted/twisted/words/protocols/jabber/client.py
new file mode 100644
index 0000000..2a37bcb
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/protocols/jabber/client.py
@@ -0,0 +1,368 @@
+# -*- test-case-name: twisted.words.test.test_jabberclient -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.words.xish import domish, xpath, utility
+from twisted.words.protocols.jabber import xmlstream, sasl, error
+from twisted.words.protocols.jabber.jid import JID
+
+NS_XMPP_STREAMS = 'urn:ietf:params:xml:ns:xmpp-streams'
+NS_XMPP_BIND = 'urn:ietf:params:xml:ns:xmpp-bind'
+NS_XMPP_SESSION = 'urn:ietf:params:xml:ns:xmpp-session'
+NS_IQ_AUTH_FEATURE = 'http://jabber.org/features/iq-auth'
+
+DigestAuthQry = xpath.internQuery("/iq/query/digest")
+PlaintextAuthQry = xpath.internQuery("/iq/query/password")
+
+def basicClientFactory(jid, secret):
+    a = BasicAuthenticator(jid, secret)
+    return xmlstream.XmlStreamFactory(a)
+
+class IQ(domish.Element):
+    """
+    Wrapper for a Info/Query packet.
+
+    This provides the necessary functionality to send IQs and get notified when
+    a result comes back. It's a subclass from L{domish.Element}, so you can use
+    the standard DOM manipulation calls to add data to the outbound request.
+
+    @type callbacks: L{utility.CallbackList}
+    @cvar callbacks: Callback list to be notified when response comes back
+
+    """
+    def __init__(self, xmlstream, type = "set"):
+        """
+        @type xmlstream: L{xmlstream.XmlStream}
+        @param xmlstream: XmlStream to use for transmission of this IQ
+
+        @type type: C{str}
+        @param type: IQ type identifier ('get' or 'set')
+        """
+
+        domish.Element.__init__(self, ("jabber:client", "iq"))
+        self.addUniqueId()
+        self["type"] = type
+        self._xmlstream = xmlstream
+        self.callbacks = utility.CallbackList()
+
+    def addCallback(self, fn, *args, **kwargs):
+        """
+        Register a callback for notification when the IQ result is available.
+        """
+
+        self.callbacks.addCallback(True, fn, *args, **kwargs)
+
+    def send(self, to = None):
+        """
+        Call this method to send this IQ request via the associated XmlStream.
+
+        @param to: Jabber ID of the entity to send the request to
+        @type to: C{str}
+
+        @returns: Callback list for this IQ. Any callbacks added to this list
+                  will be fired when the result comes back.
+        """
+        if to != None:
+            self["to"] = to
+        self._xmlstream.addOnetimeObserver("/iq[@id='%s']" % self["id"], \
+                                                             self._resultEvent)
+        self._xmlstream.send(self)
+
+    def _resultEvent(self, iq):
+        self.callbacks.callback(iq)
+        self.callbacks = None
+
+
+
+class IQAuthInitializer(object):
+    """
+    Non-SASL Authentication initializer for the initiating entity.
+
+    This protocol is defined in
+    U{JEP-0078<http://www.jabber.org/jeps/jep-0078.html>} and mainly serves for
+    compatibility with pre-XMPP-1.0 server implementations.
+    """
+
+    INVALID_USER_EVENT    = "//event/client/basicauth/invaliduser"
+    AUTH_FAILED_EVENT     = "//event/client/basicauth/authfailed"
+
+    def __init__(self, xs):
+        self.xmlstream = xs
+
+
+    def initialize(self):
+        # Send request for auth fields
+        iq = xmlstream.IQ(self.xmlstream, "get")
+        iq.addElement(("jabber:iq:auth", "query"))
+        jid = self.xmlstream.authenticator.jid
+        iq.query.addElement("username", content = jid.user)
+
+        d = iq.send()
+        d.addCallbacks(self._cbAuthQuery, self._ebAuthQuery)
+        return d
+
+
+    def _cbAuthQuery(self, iq):
+        jid = self.xmlstream.authenticator.jid
+        password = self.xmlstream.authenticator.password
+
+        # Construct auth request
+        reply = xmlstream.IQ(self.xmlstream, "set")
+        reply.addElement(("jabber:iq:auth", "query"))
+        reply.query.addElement("username", content = jid.user)
+        reply.query.addElement("resource", content = jid.resource)
+
+        # Prefer digest over plaintext
+        if DigestAuthQry.matches(iq):
+            digest = xmlstream.hashPassword(self.xmlstream.sid, unicode(password))
+            reply.query.addElement("digest", content = digest)
+        else:
+            reply.query.addElement("password", content = password)
+
+        d = reply.send()
+        d.addCallbacks(self._cbAuth, self._ebAuth)
+        return d
+
+
+    def _ebAuthQuery(self, failure):
+        failure.trap(error.StanzaError)
+        e = failure.value
+        if e.condition == 'not-authorized':
+            self.xmlstream.dispatch(e.stanza, self.INVALID_USER_EVENT)
+        else:
+            self.xmlstream.dispatch(e.stanza, self.AUTH_FAILED_EVENT)
+
+        return failure
+
+
+    def _cbAuth(self, iq):
+        pass
+
+
+    def _ebAuth(self, failure):
+        failure.trap(error.StanzaError)
+        self.xmlstream.dispatch(failure.value.stanza, self.AUTH_FAILED_EVENT)
+        return failure
+
+
+
+class BasicAuthenticator(xmlstream.ConnectAuthenticator):
+    """
+    Authenticates an XmlStream against a Jabber server as a Client.
+
+    This only implements non-SASL authentication, per
+    U{JEP-0078<http://www.jabber.org/jeps/jep-0078.html>}. Additionally, this
+    authenticator provides the ability to perform inline registration, per
+    U{JEP-0077<http://www.jabber.org/jeps/jep-0077.html>}.
+
+    Under normal circumstances, the BasicAuthenticator generates the
+    L{xmlstream.STREAM_AUTHD_EVENT} once the stream has authenticated. However,
+    it can also generate other events, such as:
+      - L{INVALID_USER_EVENT} : Authentication failed, due to invalid username
+      - L{AUTH_FAILED_EVENT} : Authentication failed, due to invalid password
+      - L{REGISTER_FAILED_EVENT} : Registration failed
+
+    If authentication fails for any reason, you can attempt to register by
+    calling the L{registerAccount} method. If the registration succeeds, a
+    L{xmlstream.STREAM_AUTHD_EVENT} will be fired. Otherwise, one of the above
+    errors will be generated (again).
+    """
+
+    namespace = "jabber:client"
+
+    INVALID_USER_EVENT    = IQAuthInitializer.INVALID_USER_EVENT
+    AUTH_FAILED_EVENT     = IQAuthInitializer.AUTH_FAILED_EVENT
+    REGISTER_FAILED_EVENT = "//event/client/basicauth/registerfailed"
+
+    def __init__(self, jid, password):
+        xmlstream.ConnectAuthenticator.__init__(self, jid.host)
+        self.jid = jid
+        self.password = password
+
+    def associateWithStream(self, xs):
+        xs.version = (0, 0)
+        xmlstream.ConnectAuthenticator.associateWithStream(self, xs)
+
+        inits = [ (xmlstream.TLSInitiatingInitializer, False),
+                  (IQAuthInitializer, True),
+                ]
+
+        for initClass, required in inits:
+            init = initClass(xs)
+            init.required = required
+            xs.initializers.append(init)
+
+    # TODO: move registration into an Initializer?
+
+    def registerAccount(self, username = None, password = None):
+        if username:
+            self.jid.user = username
+        if password:
+            self.password = password
+
+        iq = IQ(self.xmlstream, "set")
+        iq.addElement(("jabber:iq:register", "query"))
+        iq.query.addElement("username", content = self.jid.user)
+        iq.query.addElement("password", content = self.password)
+
+        iq.addCallback(self._registerResultEvent)
+
+        iq.send()
+
+    def _registerResultEvent(self, iq):
+        if iq["type"] == "result":
+            # Registration succeeded -- go ahead and auth
+            self.streamStarted()
+        else:
+            # Registration failed
+            self.xmlstream.dispatch(iq, self.REGISTER_FAILED_EVENT)
+
+
+
+class CheckVersionInitializer(object):
+    """
+    Initializer that checks if the minimum common stream version number is 1.0.
+    """
+
+    def __init__(self, xs):
+        self.xmlstream = xs
+
+
+    def initialize(self):
+        if self.xmlstream.version < (1, 0):
+            raise error.StreamError('unsupported-version')
+
+
+
+class BindInitializer(xmlstream.BaseFeatureInitiatingInitializer):
+    """
+    Initializer that implements Resource Binding for the initiating entity.
+
+    This protocol is documented in U{RFC 3920, section
+    7<http://www.xmpp.org/specs/rfc3920.html#bind>}.
+    """
+
+    feature = (NS_XMPP_BIND, 'bind')
+
+    def start(self):
+        iq = xmlstream.IQ(self.xmlstream, 'set')
+        bind = iq.addElement((NS_XMPP_BIND, 'bind'))
+        resource = self.xmlstream.authenticator.jid.resource
+        if resource:
+            bind.addElement('resource', content=resource)
+        d = iq.send()
+        d.addCallback(self.onBind)
+        return d
+
+
+    def onBind(self, iq):
+        if iq.bind:
+            self.xmlstream.authenticator.jid = JID(unicode(iq.bind.jid))
+
+
+
+class SessionInitializer(xmlstream.BaseFeatureInitiatingInitializer):
+    """
+    Initializer that implements session establishment for the initiating
+    entity.
+
+    This protocol is defined in U{RFC 3921, section
+    3<http://www.xmpp.org/specs/rfc3921.html#session>}.
+    """
+
+    feature = (NS_XMPP_SESSION, 'session')
+
+    def start(self):
+        iq = xmlstream.IQ(self.xmlstream, 'set')
+        session = iq.addElement((NS_XMPP_SESSION, 'session'))
+        return iq.send()
+
+
+
+def XMPPClientFactory(jid, password):
+    """
+    Client factory for XMPP 1.0 (only).
+
+    This returns a L{xmlstream.XmlStreamFactory} with an L{XMPPAuthenticator}
+    object to perform the stream initialization steps (such as authentication).
+
+    @see: The notes at L{XMPPAuthenticator} describe how the L{jid} and
+    L{password} parameters are to be used.
+
+    @param jid: Jabber ID to connect with.
+    @type jid: L{jid.JID}
+    @param password: password to authenticate with.
+    @type password: C{unicode}
+    @return: XML stream factory.
+    @rtype: L{xmlstream.XmlStreamFactory}
+    """
+    a = XMPPAuthenticator(jid, password)
+    return xmlstream.XmlStreamFactory(a)
+
+
+
+class XMPPAuthenticator(xmlstream.ConnectAuthenticator):
+    """
+    Initializes an XmlStream connecting to an XMPP server as a Client.
+
+    This authenticator performs the initialization steps needed to start
+    exchanging XML stanzas with an XMPP server as an XMPP client. It checks if
+    the server advertises XML stream version 1.0, negotiates TLS (when
+    available), performs SASL authentication, binds a resource and establishes
+    a session.
+
+    Upon successful stream initialization, the L{xmlstream.STREAM_AUTHD_EVENT}
+    event will be dispatched through the XML stream object. Otherwise, the
+    L{xmlstream.INIT_FAILED_EVENT} event will be dispatched with a failure
+    object.
+
+    After inspection of the failure, initialization can then be restarted by
+    calling L{initializeStream}. For example, in case of authentication
+    failure, a user may be given the opportunity to input the correct password.
+    By setting the L{password} instance variable and restarting initialization,
+    the stream authentication step is then retried, and subsequent steps are
+    performed if succesful.
+
+    @ivar jid: Jabber ID to authenticate with. This may contain a resource
+               part, as a suggestion to the server for resource binding. A
+               server may override this, though. If the resource part is left
+               off, the server will generate a unique resource identifier.
+               The server will always return the full Jabber ID in the
+               resource binding step, and this is stored in this instance
+               variable.
+    @type jid: L{jid.JID}
+    @ivar password: password to be used during SASL authentication.
+    @type password: C{unicode}
+    """
+
+    namespace = 'jabber:client'
+
+    def __init__(self, jid, password):
+        xmlstream.ConnectAuthenticator.__init__(self, jid.host)
+        self.jid = jid
+        self.password = password
+
+
+    def associateWithStream(self, xs):
+        """
+        Register with the XML stream.
+
+        Populates stream's list of initializers, along with their
+        requiredness. This list is used by
+        L{ConnectAuthenticator.initializeStream} to perform the initalization
+        steps.
+        """
+        xmlstream.ConnectAuthenticator.associateWithStream(self, xs)
+
+        xs.initializers = [CheckVersionInitializer(xs)]
+        inits = [ (xmlstream.TLSInitiatingInitializer, False),
+                  (sasl.SASLInitiatingInitializer, True),
+                  (BindInitializer, False),
+                  (SessionInitializer, False),
+                ]
+
+        for initClass, required in inits:
+            init = initClass(xs)
+            init.required = required
+            xs.initializers.append(init)
diff --git a/ThirdParty/Twisted/twisted/words/protocols/jabber/component.py b/ThirdParty/Twisted/twisted/words/protocols/jabber/component.py
new file mode 100644
index 0000000..1f37490
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/protocols/jabber/component.py
@@ -0,0 +1,474 @@
+# -*- test-case-name: twisted.words.test.test_jabbercomponent -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+External server-side components.
+
+Most Jabber server implementations allow for add-on components that act as a
+seperate entity on the Jabber network, but use the server-to-server
+functionality of a regular Jabber IM server. These so-called 'external
+components' are connected to the Jabber server using the Jabber Component
+Protocol as defined in U{JEP-0114<http://www.jabber.org/jeps/jep-0114.html>}.
+
+This module allows for writing external server-side component by assigning one
+or more services implementing L{ijabber.IService} up to L{ServiceManager}. The
+ServiceManager connects to the Jabber server and is responsible for the
+corresponding XML stream.
+"""
+
+from zope.interface import implements
+
+from twisted.application import service
+from twisted.internet import defer
+from twisted.python import log
+from twisted.words.xish import domish
+from twisted.words.protocols.jabber import error, ijabber, jstrports, xmlstream
+from twisted.words.protocols.jabber.jid import internJID as JID
+
+NS_COMPONENT_ACCEPT = 'jabber:component:accept'
+
+def componentFactory(componentid, password):
+    """
+    XML stream factory for external server-side components.
+
+    @param componentid: JID of the component.
+    @type componentid: C{unicode}
+    @param password: password used to authenticate to the server.
+    @type password: C{str}
+    """
+    a = ConnectComponentAuthenticator(componentid, password)
+    return xmlstream.XmlStreamFactory(a)
+
+class ComponentInitiatingInitializer(object):
+    """
+    External server-side component authentication initializer for the
+    initiating entity.
+
+    @ivar xmlstream: XML stream between server and component.
+    @type xmlstream: L{xmlstream.XmlStream}
+    """
+
+    def __init__(self, xs):
+        self.xmlstream = xs
+        self._deferred = None
+
+    def initialize(self):
+        xs = self.xmlstream
+        hs = domish.Element((self.xmlstream.namespace, "handshake"))
+        hs.addContent(xmlstream.hashPassword(xs.sid,
+                                             unicode(xs.authenticator.password)))
+
+        # Setup observer to watch for handshake result
+        xs.addOnetimeObserver("/handshake", self._cbHandshake)
+        xs.send(hs)
+        self._deferred = defer.Deferred()
+        return self._deferred
+
+    def _cbHandshake(self, _):
+        # we have successfully shaken hands and can now consider this
+        # entity to represent the component JID.
+        self.xmlstream.thisEntity = self.xmlstream.otherEntity
+        self._deferred.callback(None)
+
+
+
+class ConnectComponentAuthenticator(xmlstream.ConnectAuthenticator):
+    """
+    Authenticator to permit an XmlStream to authenticate against a Jabber
+    server as an external component (where the Authenticator is initiating the
+    stream).
+    """
+    namespace = NS_COMPONENT_ACCEPT
+
+    def __init__(self, componentjid, password):
+        """
+        @type componentjid: C{str}
+        @param componentjid: Jabber ID that this component wishes to bind to.
+
+        @type password: C{str}
+        @param password: Password/secret this component uses to authenticate.
+        """
+        # Note that we are sending 'to' our desired component JID.
+        xmlstream.ConnectAuthenticator.__init__(self, componentjid)
+        self.password = password
+
+    def associateWithStream(self, xs):
+        xs.version = (0, 0)
+        xmlstream.ConnectAuthenticator.associateWithStream(self, xs)
+
+        xs.initializers = [ComponentInitiatingInitializer(xs)]
+
+
+
+class ListenComponentAuthenticator(xmlstream.ListenAuthenticator):
+    """
+    Authenticator for accepting components.
+
+    @since: 8.2
+    @ivar secret: The shared secret used to authorized incoming component
+                  connections.
+    @type secret: C{unicode}.
+    """
+
+    namespace = NS_COMPONENT_ACCEPT
+
+    def __init__(self, secret):
+        self.secret = secret
+        xmlstream.ListenAuthenticator.__init__(self)
+
+
+    def associateWithStream(self, xs):
+        """
+        Associate the authenticator with a stream.
+
+        This sets the stream's version to 0.0, because the XEP-0114 component
+        protocol was not designed for XMPP 1.0.
+        """
+        xs.version = (0, 0)
+        xmlstream.ListenAuthenticator.associateWithStream(self, xs)
+
+
+    def streamStarted(self, rootElement):
+        """
+        Called by the stream when it has started.
+
+        This examines the default namespace of the incoming stream and whether
+        there is a requested hostname for the component. Then it generates a
+        stream identifier, sends a response header and adds an observer for
+        the first incoming element, triggering L{onElement}.
+        """
+
+        xmlstream.ListenAuthenticator.streamStarted(self, rootElement)
+
+        if rootElement.defaultUri != self.namespace:
+            exc = error.StreamError('invalid-namespace')
+            self.xmlstream.sendStreamError(exc)
+            return
+
+        # self.xmlstream.thisEntity is set to the address the component
+        # wants to assume.
+        if not self.xmlstream.thisEntity:
+            exc = error.StreamError('improper-addressing')
+            self.xmlstream.sendStreamError(exc)
+            return
+
+        self.xmlstream.sendHeader()
+        self.xmlstream.addOnetimeObserver('/*', self.onElement)
+
+
+    def onElement(self, element):
+        """
+        Called on incoming XML Stanzas.
+
+        The very first element received should be a request for handshake.
+        Otherwise, the stream is dropped with a 'not-authorized' error. If a
+        handshake request was received, the hash is extracted and passed to
+        L{onHandshake}.
+        """
+        if (element.uri, element.name) == (self.namespace, 'handshake'):
+            self.onHandshake(unicode(element))
+        else:
+            exc = error.StreamError('not-authorized')
+            self.xmlstream.sendStreamError(exc)
+
+
+    def onHandshake(self, handshake):
+        """
+        Called upon receiving the handshake request.
+
+        This checks that the given hash in C{handshake} is equal to a
+        calculated hash, responding with a handshake reply or a stream error.
+        If the handshake was ok, the stream is authorized, and  XML Stanzas may
+        be exchanged.
+        """
+        calculatedHash = xmlstream.hashPassword(self.xmlstream.sid,
+                                                unicode(self.secret))
+        if handshake != calculatedHash:
+            exc = error.StreamError('not-authorized', text='Invalid hash')
+            self.xmlstream.sendStreamError(exc)
+        else:
+            self.xmlstream.send('<handshake/>')
+            self.xmlstream.dispatch(self.xmlstream,
+                                    xmlstream.STREAM_AUTHD_EVENT)
+
+
+
+class Service(service.Service):
+    """
+    External server-side component service.
+    """
+
+    implements(ijabber.IService)
+
+    def componentConnected(self, xs):
+        pass
+
+    def componentDisconnected(self):
+        pass
+
+    def transportConnected(self, xs):
+        pass
+
+    def send(self, obj):
+        """
+        Send data over service parent's XML stream.
+
+        @note: L{ServiceManager} maintains a queue for data sent using this
+        method when there is no current established XML stream. This data is
+        then sent as soon as a new stream has been established and initialized.
+        Subsequently, L{componentConnected} will be called again. If this
+        queueing is not desired, use C{send} on the XmlStream object (passed to
+        L{componentConnected}) directly.
+
+        @param obj: data to be sent over the XML stream. This is usually an
+        object providing L{domish.IElement}, or serialized XML. See
+        L{xmlstream.XmlStream} for details.
+        """
+
+        self.parent.send(obj)
+
+class ServiceManager(service.MultiService):
+    """
+    Business logic representing a managed component connection to a Jabber
+    router.
+
+    This service maintains a single connection to a Jabber router and provides
+    facilities for packet routing and transmission. Business logic modules are
+    services implementing L{ijabber.IService} (like subclasses of L{Service}), and
+    added as sub-service.
+    """
+
+    def __init__(self, jid, password):
+        service.MultiService.__init__(self)
+
+        # Setup defaults
+        self.jabberId = jid
+        self.xmlstream = None
+
+        # Internal buffer of packets
+        self._packetQueue = []
+
+        # Setup the xmlstream factory
+        self._xsFactory = componentFactory(self.jabberId, password)
+
+        # Register some lambda functions to keep the self.xmlstream var up to
+        # date
+        self._xsFactory.addBootstrap(xmlstream.STREAM_CONNECTED_EVENT,
+                                     self._connected)
+        self._xsFactory.addBootstrap(xmlstream.STREAM_AUTHD_EVENT, self._authd)
+        self._xsFactory.addBootstrap(xmlstream.STREAM_END_EVENT,
+                                     self._disconnected)
+
+        # Map addBootstrap and removeBootstrap to the underlying factory -- is
+        # this right? I have no clue...but it'll work for now, until i can
+        # think about it more.
+        self.addBootstrap = self._xsFactory.addBootstrap
+        self.removeBootstrap = self._xsFactory.removeBootstrap
+
+    def getFactory(self):
+        return self._xsFactory
+
+    def _connected(self, xs):
+        self.xmlstream = xs
+        for c in self:
+            if ijabber.IService.providedBy(c):
+                c.transportConnected(xs)
+
+    def _authd(self, xs):
+        # Flush all pending packets
+        for p in self._packetQueue:
+            self.xmlstream.send(p)
+        self._packetQueue = []
+
+        # Notify all child services which implement the IService interface
+        for c in self:
+            if ijabber.IService.providedBy(c):
+                c.componentConnected(xs)
+
+    def _disconnected(self, _):
+        self.xmlstream = None
+
+        # Notify all child services which implement
+        # the IService interface
+        for c in self:
+            if ijabber.IService.providedBy(c):
+                c.componentDisconnected()
+
+    def send(self, obj):
+        """
+        Send data over the XML stream.
+
+        When there is no established XML stream, the data is queued and sent
+        out when a new XML stream has been established and initialized.
+
+        @param obj: data to be sent over the XML stream. This is usually an
+        object providing L{domish.IElement}, or serialized XML. See
+        L{xmlstream.XmlStream} for details.
+        """
+
+        if self.xmlstream != None:
+            self.xmlstream.send(obj)
+        else:
+            self._packetQueue.append(obj)
+
+def buildServiceManager(jid, password, strport):
+    """
+    Constructs a pre-built L{ServiceManager}, using the specified strport
+    string.
+    """
+
+    svc = ServiceManager(jid, password)
+    client_svc = jstrports.client(strport, svc.getFactory())
+    client_svc.setServiceParent(svc)
+    return svc
+
+
+
+class Router(object):
+    """
+    XMPP Server's Router.
+
+    A router connects the different components of the XMPP service and routes
+    messages between them based on the given routing table.
+
+    Connected components are trusted to have correct addressing in the
+    stanzas they offer for routing.
+
+    A route destination of C{None} adds a default route. Traffic for which no
+    specific route exists, will be routed to this default route.
+
+    @since: 8.2
+    @ivar routes: Routes based on the host part of JIDs. Maps host names to the
+                  L{EventDispatcher<utility.EventDispatcher>}s that should
+                  receive the traffic. A key of C{None} means the default
+                  route.
+    @type routes: C{dict}
+    """
+
+    def __init__(self):
+        self.routes = {}
+
+
+    def addRoute(self, destination, xs):
+        """
+        Add a new route.
+
+        The passed XML Stream C{xs} will have an observer for all stanzas
+        added to route its outgoing traffic. In turn, traffic for
+        C{destination} will be passed to this stream.
+
+        @param destination: Destination of the route to be added as a host name
+                            or C{None} for the default route.
+        @type destination: C{str} or C{NoneType}.
+        @param xs: XML Stream to register the route for.
+        @type xs: L{EventDispatcher<utility.EventDispatcher>}.
+        """
+        self.routes[destination] = xs
+        xs.addObserver('/*', self.route)
+
+
+    def removeRoute(self, destination, xs):
+        """
+        Remove a route.
+
+        @param destination: Destination of the route that should be removed.
+        @type destination: C{str}.
+        @param xs: XML Stream to remove the route for.
+        @type xs: L{EventDispatcher<utility.EventDispatcher>}.
+        """
+        xs.removeObserver('/*', self.route)
+        if (xs == self.routes[destination]):
+            del self.routes[destination]
+
+
+    def route(self, stanza):
+        """
+        Route a stanza.
+
+        @param stanza: The stanza to be routed.
+        @type stanza: L{domish.Element}.
+        """
+        destination = JID(stanza['to'])
+
+        log.msg("Routing to %s: %r" % (destination.full(), stanza.toXml()))
+
+        if destination.host in self.routes:
+            self.routes[destination.host].send(stanza)
+        else:
+            self.routes[None].send(stanza)
+
+
+
+class XMPPComponentServerFactory(xmlstream.XmlStreamServerFactory):
+    """
+    XMPP Component Server factory.
+
+    This factory accepts XMPP external component connections and makes
+    the router service route traffic for a component's bound domain
+    to that component.
+
+    @since: 8.2
+    """
+
+    logTraffic = False
+
+    def __init__(self, router, secret='secret'):
+        self.router = router
+        self.secret = secret
+
+        def authenticatorFactory():
+            return ListenComponentAuthenticator(self.secret)
+
+        xmlstream.XmlStreamServerFactory.__init__(self, authenticatorFactory)
+        self.addBootstrap(xmlstream.STREAM_CONNECTED_EVENT,
+                          self.onConnectionMade)
+        self.addBootstrap(xmlstream.STREAM_AUTHD_EVENT,
+                          self.onAuthenticated)
+
+        self.serial = 0
+
+
+    def onConnectionMade(self, xs):
+        """
+        Called when a component connection was made.
+
+        This enables traffic debugging on incoming streams.
+        """
+        xs.serial = self.serial
+        self.serial += 1
+
+        def logDataIn(buf):
+            log.msg("RECV (%d): %r" % (xs.serial, buf))
+
+        def logDataOut(buf):
+            log.msg("SEND (%d): %r" % (xs.serial, buf))
+
+        if self.logTraffic:
+            xs.rawDataInFn = logDataIn
+            xs.rawDataOutFn = logDataOut
+
+        xs.addObserver(xmlstream.STREAM_ERROR_EVENT, self.onError)
+
+
+    def onAuthenticated(self, xs):
+        """
+        Called when a component has succesfully authenticated.
+
+        Add the component to the routing table and establish a handler
+        for a closed connection.
+        """
+        destination = xs.thisEntity.host
+
+        self.router.addRoute(destination, xs)
+        xs.addObserver(xmlstream.STREAM_END_EVENT, self.onConnectionLost, 0,
+                                                   destination, xs)
+
+
+    def onError(self, reason):
+        log.err(reason, "Stream Error")
+
+
+    def onConnectionLost(self, destination, xs, reason):
+        self.router.removeRoute(destination, xs)
diff --git a/ThirdParty/Twisted/twisted/words/protocols/jabber/error.py b/ThirdParty/Twisted/twisted/words/protocols/jabber/error.py
new file mode 100644
index 0000000..aa5e9d2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/protocols/jabber/error.py
@@ -0,0 +1,336 @@
+# -*- test-case-name: twisted.words.test.test_jabbererror -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+XMPP Error support.
+"""
+
+import copy
+
+from twisted.words.xish import domish
+
+NS_XML = "http://www.w3.org/XML/1998/namespace"
+NS_XMPP_STREAMS = "urn:ietf:params:xml:ns:xmpp-streams"
+NS_XMPP_STANZAS = "urn:ietf:params:xml:ns:xmpp-stanzas"
+
+STANZA_CONDITIONS = {
+    'bad-request':              {'code': '400', 'type': 'modify'},
+    'conflict':                 {'code': '409', 'type': 'cancel'},
+    'feature-not-implemented':  {'code': '501', 'type': 'cancel'},
+    'forbidden':                {'code': '403', 'type': 'auth'},
+    'gone':                     {'code': '302', 'type': 'modify'},
+    'internal-server-error':    {'code': '500', 'type': 'wait'},
+    'item-not-found':           {'code': '404', 'type': 'cancel'},
+    'jid-malformed':            {'code': '400', 'type': 'modify'},
+    'not-acceptable':           {'code': '406', 'type': 'modify'},
+    'not-allowed':              {'code': '405', 'type': 'cancel'},
+    'not-authorized':           {'code': '401', 'type': 'auth'},
+    'payment-required':         {'code': '402', 'type': 'auth'},
+    'recipient-unavailable':    {'code': '404', 'type': 'wait'},
+    'redirect':                 {'code': '302', 'type': 'modify'},
+    'registration-required':    {'code': '407', 'type': 'auth'},
+    'remote-server-not-found':  {'code': '404', 'type': 'cancel'},
+    'remote-server-timeout':    {'code': '504', 'type': 'wait'},
+    'resource-constraint':      {'code': '500', 'type': 'wait'},
+    'service-unavailable':      {'code': '503', 'type': 'cancel'},
+    'subscription-required':    {'code': '407', 'type': 'auth'},
+    'undefined-condition':      {'code': '500', 'type': None},
+    'unexpected-request':       {'code': '400', 'type': 'wait'},
+}
+
+CODES_TO_CONDITIONS = {
+    '302': ('gone', 'modify'),
+    '400': ('bad-request', 'modify'),
+    '401': ('not-authorized', 'auth'),
+    '402': ('payment-required', 'auth'),
+    '403': ('forbidden', 'auth'),
+    '404': ('item-not-found', 'cancel'),
+    '405': ('not-allowed', 'cancel'),
+    '406': ('not-acceptable', 'modify'),
+    '407': ('registration-required', 'auth'),
+    '408': ('remote-server-timeout', 'wait'),
+    '409': ('conflict', 'cancel'),
+    '500': ('internal-server-error', 'wait'),
+    '501': ('feature-not-implemented', 'cancel'),
+    '502': ('service-unavailable', 'wait'),
+    '503': ('service-unavailable', 'cancel'),
+    '504': ('remote-server-timeout', 'wait'),
+    '510': ('service-unavailable', 'cancel'),
+}
+
+class BaseError(Exception):
+    """
+    Base class for XMPP error exceptions.
+
+    @cvar namespace: The namespace of the C{error} element generated by
+                     C{getElement}.
+    @type namespace: C{str}
+    @ivar condition: The error condition. The valid values are defined by
+                     subclasses of L{BaseError}.
+    @type contition: C{str}
+    @ivar text: Optional text message to supplement the condition or application
+                specific condition.
+    @type text: C{unicode}
+    @ivar textLang: Identifier of the language used for the message in C{text}.
+                    Values are as described in RFC 3066.
+    @type textLang: C{str}
+    @ivar appCondition: Application specific condition element, supplementing
+                        the error condition in C{condition}.
+    @type appCondition: object providing L{domish.IElement}.
+    """
+
+    namespace = None
+
+    def __init__(self, condition, text=None, textLang=None, appCondition=None):
+        Exception.__init__(self)
+        self.condition = condition
+        self.text = text
+        self.textLang = textLang
+        self.appCondition = appCondition
+
+
+    def __str__(self):
+        message = "%s with condition %r" % (self.__class__.__name__,
+                                            self.condition)
+
+        if self.text:
+            message += ': ' + self.text
+
+        return message
+
+
+    def getElement(self):
+        """
+        Get XML representation from self.
+
+        The method creates an L{domish} representation of the
+        error data contained in this exception.
+
+        @rtype: L{domish.Element}
+        """
+        error = domish.Element((None, 'error'))
+        error.addElement((self.namespace, self.condition))
+        if self.text:
+            text = error.addElement((self.namespace, 'text'),
+                                    content=self.text)
+            if self.textLang:
+                text[(NS_XML, 'lang')] = self.textLang
+        if self.appCondition:
+            error.addChild(self.appCondition)
+        return error
+
+
+
+class StreamError(BaseError):
+    """
+    Stream Error exception.
+
+    Refer to RFC 3920, section 4.7.3, for the allowed values for C{condition}.
+    """
+
+    namespace = NS_XMPP_STREAMS
+
+    def getElement(self):
+        """
+        Get XML representation from self.
+
+        Overrides the base L{BaseError.getElement} to make sure the returned
+        element is in the XML Stream namespace.
+
+        @rtype: L{domish.Element}
+        """
+        from twisted.words.protocols.jabber.xmlstream import NS_STREAMS
+
+        error = BaseError.getElement(self)
+        error.uri = NS_STREAMS
+        return error
+
+
+
+class StanzaError(BaseError):
+    """
+    Stanza Error exception.
+
+    Refer to RFC 3920, section 9.3, for the allowed values for C{condition} and
+    C{type}.
+
+    @ivar type: The stanza error type. Gives a suggestion to the recipient
+                of the error on how to proceed.
+    @type type: C{str}
+    @ivar code: A numeric identifier for the error condition for backwards
+                compatibility with pre-XMPP Jabber implementations.
+    """
+
+    namespace = NS_XMPP_STANZAS
+
+    def __init__(self, condition, type=None, text=None, textLang=None,
+                       appCondition=None):
+        BaseError.__init__(self, condition, text, textLang, appCondition)
+
+        if type is None:
+            try:
+                type = STANZA_CONDITIONS[condition]['type']
+            except KeyError:
+                pass
+        self.type = type
+
+        try:
+            self.code = STANZA_CONDITIONS[condition]['code']
+        except KeyError:
+            self.code = None
+
+        self.children = []
+        self.iq = None
+
+
+    def getElement(self):
+        """
+        Get XML representation from self.
+
+        Overrides the base L{BaseError.getElement} to make sure the returned
+        element has a C{type} attribute and optionally a legacy C{code}
+        attribute.
+
+        @rtype: L{domish.Element}
+        """
+        error = BaseError.getElement(self)
+        error['type'] = self.type
+        if self.code:
+            error['code'] = self.code
+        return error
+
+
+    def toResponse(self, stanza):
+        """
+        Construct error response stanza.
+
+        The C{stanza} is transformed into an error response stanza by
+        swapping the C{to} and C{from} addresses and inserting an error
+        element.
+
+        @note: This creates a shallow copy of the list of child elements of the
+               stanza. The child elements themselves are not copied themselves,
+               and references to their parent element will still point to the
+               original stanza element.
+
+               The serialization of an element does not use the reference to
+               its parent, so the typical use case of immediately sending out
+               the constructed error response is not affected.
+
+        @param stanza: the stanza to respond to
+        @type stanza: L{domish.Element}
+        """
+        from twisted.words.protocols.jabber.xmlstream import toResponse
+        response = toResponse(stanza, stanzaType='error')
+        response.children = copy.copy(stanza.children)
+        response.addChild(self.getElement())
+        return response
+
+
+def _getText(element):
+    for child in element.children:
+        if isinstance(child, basestring):
+            return unicode(child)
+
+    return None
+
+
+
+def _parseError(error, errorNamespace):
+    """
+    Parses an error element.
+
+    @param error: The error element to be parsed
+    @type error: L{domish.Element}
+    @param errorNamespace: The namespace of the elements that hold the error
+                           condition and text.
+    @type errorNamespace: C{str}
+    @return: Dictionary with extracted error information. If present, keys
+             C{condition}, C{text}, C{textLang} have a string value,
+             and C{appCondition} has an L{domish.Element} value.
+    @rtype: C{dict}
+    """
+    condition = None
+    text = None
+    textLang = None
+    appCondition = None
+
+    for element in error.elements():
+        if element.uri == errorNamespace:
+            if element.name == 'text':
+                text = _getText(element)
+                textLang = element.getAttribute((NS_XML, 'lang'))
+            else:
+                condition = element.name
+        else:
+            appCondition = element
+
+    return {
+        'condition': condition,
+        'text': text,
+        'textLang': textLang,
+        'appCondition': appCondition,
+    }
+
+
+
+def exceptionFromStreamError(element):
+    """
+    Build an exception object from a stream error.
+
+    @param element: the stream error
+    @type element: L{domish.Element}
+    @return: the generated exception object
+    @rtype: L{StreamError}
+    """
+    error = _parseError(element, NS_XMPP_STREAMS)
+
+    exception = StreamError(error['condition'],
+                            error['text'],
+                            error['textLang'],
+                            error['appCondition'])
+
+    return exception
+
+
+
+def exceptionFromStanza(stanza):
+    """
+    Build an exception object from an error stanza.
+
+    @param stanza: the error stanza
+    @type stanza: L{domish.Element}
+    @return: the generated exception object
+    @rtype: L{StanzaError}
+    """
+    children = []
+    condition = text = textLang = appCondition = type = code = None
+
+    for element in stanza.elements():
+        if element.name == 'error' and element.uri == stanza.uri:
+            code = element.getAttribute('code')
+            type = element.getAttribute('type')
+            error = _parseError(element, NS_XMPP_STANZAS)
+            condition = error['condition']
+            text = error['text']
+            textLang = error['textLang']
+            appCondition = error['appCondition']
+
+            if not condition and code:
+               condition, type = CODES_TO_CONDITIONS[code]
+               text = _getText(stanza.error)
+        else:
+            children.append(element)
+
+    if condition is None:
+        # TODO: raise exception instead?
+        return StanzaError(None)
+
+    exception = StanzaError(condition, type, text, textLang, appCondition)
+
+    exception.children = children
+    exception.stanza = stanza
+
+    return exception
diff --git a/ThirdParty/Twisted/twisted/words/protocols/jabber/ijabber.py b/ThirdParty/Twisted/twisted/words/protocols/jabber/ijabber.py
new file mode 100644
index 0000000..9cc65ff
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/protocols/jabber/ijabber.py
@@ -0,0 +1,199 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Public Jabber Interfaces.
+"""
+
+from zope.interface import Attribute, Interface
+
+class IInitializer(Interface):
+    """
+    Interface for XML stream initializers.
+
+    Initializers perform a step in getting the XML stream ready to be
+    used for the exchange of XML stanzas.
+    """
+
+
+
+class IInitiatingInitializer(IInitializer):
+    """
+    Interface for XML stream initializers for the initiating entity.
+    """
+
+    xmlstream = Attribute("""The associated XML stream""")
+
+    def initialize():
+        """
+        Initiate the initialization step.
+
+        May return a deferred when the initialization is done asynchronously.
+        """
+
+
+
+class IIQResponseTracker(Interface):
+    """
+    IQ response tracker interface.
+
+    The XMPP stanza C{iq} has a request-response nature that fits
+    naturally with deferreds. You send out a request and when the response
+    comes back a deferred is fired.
+
+    The L{IQ} class implements a C{send} method that returns a deferred. This
+    deferred is put in a dictionary that is kept in an L{XmlStream} object,
+    keyed by the request stanzas C{id} attribute.
+
+    An object providing this interface (usually an instance of L{XmlStream}),
+    keeps the said dictionary and sets observers on the iq stanzas of type
+    C{result} and C{error} and lets the callback fire the associated deferred.
+    """
+    iqDeferreds = Attribute("Dictionary of deferreds waiting for an iq "
+                             "response")
+
+
+
+class IXMPPHandler(Interface):
+    """
+    Interface for XMPP protocol handlers.
+
+    Objects that provide this interface can be added to a stream manager to
+    handle of (part of) an XMPP extension protocol.
+    """
+
+    parent = Attribute("""XML stream manager for this handler""")
+    xmlstream = Attribute("""The managed XML stream""")
+
+    def setHandlerParent(parent):
+        """
+        Set the parent of the handler.
+
+        @type parent: L{IXMPPHandlerCollection}
+        """
+
+
+    def disownHandlerParent(parent):
+        """
+        Remove the parent of the handler.
+
+        @type parent: L{IXMPPHandlerCollection}
+        """
+
+
+    def makeConnection(xs):
+        """
+        A connection over the underlying transport of the XML stream has been
+        established.
+
+        At this point, no traffic has been exchanged over the XML stream
+        given in C{xs}.
+
+        This should setup L{xmlstream} and call L{connectionMade}.
+
+        @type xs: L{XmlStream<twisted.words.protocols.jabber.XmlStream>}
+        """
+
+
+    def connectionMade():
+        """
+        Called after a connection has been established.
+
+        This method can be used to change properties of the XML Stream, its
+        authenticator or the stream manager prior to stream initialization
+        (including authentication).
+        """
+
+
+    def connectionInitialized():
+        """
+        The XML stream has been initialized.
+
+        At this point, authentication was successful, and XML stanzas can be
+        exchanged over the XML stream L{xmlstream}. This method can be
+        used to setup observers for incoming stanzas.
+        """
+
+
+    def connectionLost(reason):
+        """
+        The XML stream has been closed.
+
+        Subsequent use of C{parent.send} will result in data being queued
+        until a new connection has been established.
+
+        @type reason: L{twisted.python.failure.Failure}
+        """
+
+
+
+class IXMPPHandlerCollection(Interface):
+    """
+    Collection of handlers.
+
+    Contain several handlers and manage their connection.
+    """
+
+    def __iter__():
+        """
+        Get an iterator over all child handlers.
+        """
+
+
+    def addHandler(handler):
+        """
+        Add a child handler.
+
+        @type handler: L{IXMPPHandler}
+        """
+
+
+    def removeHandler(handler):
+        """
+        Remove a child handler.
+
+        @type handler: L{IXMPPHandler}
+        """
+
+
+
+class IService(Interface):
+    """
+    External server-side component service interface.
+
+    Services that provide this interface can be added to L{ServiceManager} to
+    implement (part of) the functionality of the server-side component.
+    """
+
+    def componentConnected(xs):
+        """
+        Parent component has established a connection.
+
+        At this point, authentication was succesful, and XML stanzas
+        can be exchanged over the XML stream C{xs}. This method can be used
+        to setup observers for incoming stanzas.
+
+        @param xs: XML Stream that represents the established connection.
+        @type xs: L{xmlstream.XmlStream}
+        """
+
+
+    def componentDisconnected():
+        """
+        Parent component has lost the connection to the Jabber server.
+
+        Subsequent use of C{self.parent.send} will result in data being
+        queued until a new connection has been established.
+        """
+
+
+    def transportConnected(xs):
+        """
+        Parent component has established a connection over the underlying
+        transport.
+
+        At this point, no traffic has been exchanged over the XML stream. This
+        method can be used to change properties of the XML Stream (in C{xs}),
+        the service manager or it's authenticator prior to stream
+        initialization (including authentication).
+        """
diff --git a/ThirdParty/Twisted/twisted/words/protocols/jabber/jid.py b/ThirdParty/Twisted/twisted/words/protocols/jabber/jid.py
new file mode 100644
index 0000000..9911cee
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/protocols/jabber/jid.py
@@ -0,0 +1,249 @@
+# -*- test-case-name: twisted.words.test.test_jabberjid -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Jabber Identifier support.
+
+This module provides an object to represent Jabber Identifiers (JIDs) and
+parse string representations into them with proper checking for illegal
+characters, case folding and canonicalisation through L{stringprep<twisted.words.protocols.jabber.xmpp_stringprep>}.
+"""
+
+from twisted.words.protocols.jabber.xmpp_stringprep import nodeprep, resourceprep, nameprep
+
+class InvalidFormat(Exception):
+    """
+    The given string could not be parsed into a valid Jabber Identifier (JID).
+    """
+
+def parse(jidstring):
+    """
+    Parse given JID string into its respective parts and apply stringprep.
+
+    @param jidstring: string representation of a JID.
+    @type jidstring: C{unicode}
+    @return: tuple of (user, host, resource), each of type C{unicode} as
+             the parsed and stringprep'd parts of the given JID. If the
+             given string did not have a user or resource part, the respective
+             field in the tuple will hold C{None}.
+    @rtype: C{tuple}
+    """
+    user = None
+    host = None
+    resource = None
+
+    # Search for delimiters
+    user_sep = jidstring.find("@")
+    res_sep  = jidstring.find("/")
+
+    if user_sep == -1:
+        if res_sep == -1:
+            # host
+            host = jidstring
+        else:
+            # host/resource
+            host = jidstring[0:res_sep]
+            resource = jidstring[res_sep + 1:] or None
+    else:
+        if res_sep == -1:
+            # user at host
+            user = jidstring[0:user_sep] or None
+            host = jidstring[user_sep + 1:]
+        else:
+            if user_sep < res_sep:
+                # user at host/resource
+                user = jidstring[0:user_sep] or None
+                host = jidstring[user_sep + 1:user_sep + (res_sep - user_sep)]
+                resource = jidstring[res_sep + 1:] or None
+            else:
+                # host/resource (with an @ in resource)
+                host = jidstring[0:res_sep]
+                resource = jidstring[res_sep + 1:] or None
+
+    return prep(user, host, resource)
+
+def prep(user, host, resource):
+    """
+    Perform stringprep on all JID fragments.
+
+    @param user: The user part of the JID.
+    @type user: C{unicode}
+    @param host: The host part of the JID.
+    @type host: C{unicode}
+    @param resource: The resource part of the JID.
+    @type resource: C{unicode}
+    @return: The given parts with stringprep applied.
+    @rtype: C{tuple}
+    """
+
+    if user:
+        try:
+            user = nodeprep.prepare(unicode(user))
+        except UnicodeError:
+            raise InvalidFormat, "Invalid character in username"
+    else:
+        user = None
+
+    if not host:
+        raise InvalidFormat, "Server address required."
+    else:
+        try:
+            host = nameprep.prepare(unicode(host))
+        except UnicodeError:
+            raise InvalidFormat, "Invalid character in hostname"
+
+    if resource:
+        try:
+            resource = resourceprep.prepare(unicode(resource))
+        except UnicodeError:
+            raise InvalidFormat, "Invalid character in resource"
+    else:
+        resource = None
+
+    return (user, host, resource)
+
+__internJIDs = {}
+
+def internJID(jidstring):
+    """
+    Return interned JID.
+
+    @rtype: L{JID}
+    """
+
+    if jidstring in __internJIDs:
+        return __internJIDs[jidstring]
+    else:
+        j = JID(jidstring)
+        __internJIDs[jidstring] = j
+        return j
+
+class JID(object):
+    """
+    Represents a stringprep'd Jabber ID.
+
+    JID objects are hashable so they can be used in sets and as keys in
+    dictionaries.
+    """
+
+    def __init__(self, str=None, tuple=None):
+        if not (str or tuple):
+            raise RuntimeError("You must provide a value for either 'str' or "
+                               "'tuple' arguments.")
+
+        if str:
+            user, host, res = parse(str)
+        else:
+            user, host, res = prep(*tuple)
+
+        self.user = user
+        self.host = host
+        self.resource = res
+
+    def userhost(self):
+        """
+        Extract the bare JID as a unicode string.
+
+        A bare JID does not have a resource part, so this returns either
+        C{user at host} or just C{host}.
+
+        @rtype: C{unicode}
+        """
+        if self.user:
+            return u"%s@%s" % (self.user, self.host)
+        else:
+            return self.host
+
+    def userhostJID(self):
+        """
+        Extract the bare JID.
+
+        A bare JID does not have a resource part, so this returns a
+        L{JID} object representing either C{user at host} or just C{host}.
+
+        If the object this method is called upon doesn't have a resource
+        set, it will return itself. Otherwise, the bare JID object will
+        be created, interned using L{internJID}.
+
+        @rtype: L{JID}
+        """
+        if self.resource:
+            return internJID(self.userhost())
+        else:
+            return self
+
+    def full(self):
+        """
+        Return the string representation of this JID.
+
+        @rtype: C{unicode}
+        """
+        if self.user:
+            if self.resource:
+                return u"%s@%s/%s" % (self.user, self.host, self.resource)
+            else:
+                return u"%s@%s" % (self.user, self.host)
+        else:
+            if self.resource:
+                return u"%s/%s" % (self.host, self.resource)
+            else:
+                return self.host
+
+    def __eq__(self, other):
+        """
+        Equality comparison.
+
+        L{JID}s compare equal if their user, host and resource parts all
+        compare equal.  When comparing against instances of other types, it
+        uses the default comparison.
+        """
+        if isinstance(other, JID):
+            return (self.user == other.user and
+                    self.host == other.host and
+                    self.resource == other.resource)
+        else:
+            return NotImplemented
+
+    def __ne__(self, other):
+        """
+        Inequality comparison.
+
+        This negates L{__eq__} for comparison with JIDs and uses the default
+        comparison for other types.
+        """
+        result = self.__eq__(other)
+        if result is NotImplemented:
+            return result
+        else:
+            return not result
+
+    def __hash__(self):
+        """
+        Calculate hash.
+
+        L{JID}s with identical constituent user, host and resource parts have
+        equal hash values.  In combination with the comparison defined on JIDs,
+        this allows for using L{JID}s in sets and as dictionary keys.
+        """
+        return hash((self.user, self.host, self.resource))
+
+    def __unicode__(self):
+        """
+        Get unicode representation.
+
+        Return the string representation of this JID as a unicode string.
+        @see: L{full}
+        """
+
+        return self.full()
+
+    def __repr__(self):
+        """
+        Get object representation.
+
+        Returns a string that would create a new JID object that compares equal
+        to this one.
+        """
+        return 'JID(%r)' % self.full()
diff --git a/ThirdParty/Twisted/twisted/words/protocols/jabber/jstrports.py b/ThirdParty/Twisted/twisted/words/protocols/jabber/jstrports.py
new file mode 100644
index 0000000..773b6d2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/protocols/jabber/jstrports.py
@@ -0,0 +1,31 @@
+# -*- test-case-name: twisted.words.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+""" A temporary placeholder for client-capable strports, until we
+sufficient use cases get identified """
+
+from twisted.internet.endpoints import _parse
+
+def _parseTCPSSL(factory, domain, port):
+    """ For the moment, parse TCP or SSL connections the same """
+    return (domain, int(port), factory), {}
+
+def _parseUNIX(factory, address):
+    return (address, factory), {}
+
+
+_funcs = { "tcp"  : _parseTCPSSL,
+           "unix" : _parseUNIX,
+           "ssl"  : _parseTCPSSL }
+
+
+def parse(description, factory):
+    args, kw = _parse(description)
+    return (args[0].upper(),) + _funcs[args[0]](factory, *args[1:], **kw)
+
+def client(description, factory):
+    from twisted.application import internet
+    name, args, kw = parse(description, factory)
+    return getattr(internet, name + 'Client')(*args, **kw)
diff --git a/ThirdParty/Twisted/twisted/words/protocols/jabber/sasl.py b/ThirdParty/Twisted/twisted/words/protocols/jabber/sasl.py
new file mode 100644
index 0000000..c804ad4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/protocols/jabber/sasl.py
@@ -0,0 +1,243 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+XMPP-specific SASL profile.
+"""
+
+import re
+from twisted.internet import defer
+from twisted.words.protocols.jabber import sasl_mechanisms, xmlstream
+from twisted.words.xish import domish
+
+# The b64decode and b64encode functions from the base64 module are new in
+# Python 2.4. For Python 2.3 compatibility, the legacy interface is used while
+# working around MIMEisms.
+
+try:
+    from base64 import b64decode, b64encode
+except ImportError:
+    import base64
+
+    def b64encode(s):
+        return "".join(base64.encodestring(s).split("\n"))
+
+    b64decode = base64.decodestring
+
+NS_XMPP_SASL = 'urn:ietf:params:xml:ns:xmpp-sasl'
+
+def get_mechanisms(xs):
+    """
+    Parse the SASL feature to extract the available mechanism names.
+    """
+    mechanisms = []
+    for element in xs.features[(NS_XMPP_SASL, 'mechanisms')].elements():
+        if element.name == 'mechanism':
+            mechanisms.append(str(element))
+
+    return mechanisms
+
+
+class SASLError(Exception):
+    """
+    SASL base exception.
+    """
+
+
+class SASLNoAcceptableMechanism(SASLError):
+    """
+    The server did not present an acceptable SASL mechanism.
+    """
+
+
+class SASLAuthError(SASLError):
+    """
+    SASL Authentication failed.
+    """
+    def __init__(self, condition=None):
+        self.condition = condition
+
+
+    def __str__(self):
+        return "SASLAuthError with condition %r" % self.condition
+
+
+class SASLIncorrectEncodingError(SASLError):
+    """
+    SASL base64 encoding was incorrect.
+
+    RFC 3920 specifies that any characters not in the base64 alphabet
+    and padding characters present elsewhere than at the end of the string
+    MUST be rejected. See also L{fromBase64}.
+
+    This exception is raised whenever the encoded string does not adhere
+    to these additional restrictions or when the decoding itself fails.
+
+    The recommended behaviour for so-called receiving entities (like servers in
+    client-to-server connections, see RFC 3920 for terminology) is to fail the
+    SASL negotiation with a C{'incorrect-encoding'} condition. For initiating
+    entities, one should assume the receiving entity to be either buggy or
+    malevolent. The stream should be terminated and reconnecting is not
+    advised.
+    """
+
+base64Pattern = re.compile("^[0-9A-Za-z+/]*[0-9A-Za-z+/=]{,2}$")
+
+def fromBase64(s):
+    """
+    Decode base64 encoded string.
+
+    This helper performs regular decoding of a base64 encoded string, but also
+    rejects any characters that are not in the base64 alphabet and padding
+    occurring elsewhere from the last or last two characters, as specified in
+    section 14.9 of RFC 3920. This safeguards against various attack vectors
+    among which the creation of a covert channel that "leaks" information.
+    """
+
+    if base64Pattern.match(s) is None:
+        raise SASLIncorrectEncodingError()
+
+    try:
+        return b64decode(s)
+    except Exception, e:
+        raise SASLIncorrectEncodingError(str(e))
+
+
+
+class SASLInitiatingInitializer(xmlstream.BaseFeatureInitiatingInitializer):
+    """
+    Stream initializer that performs SASL authentication.
+
+    The supported mechanisms by this initializer are C{DIGEST-MD5}, C{PLAIN}
+    and C{ANONYMOUS}. The C{ANONYMOUS} SASL mechanism is used when the JID, set
+    on the authenticator, does not have a localpart (username), requesting an
+    anonymous session where the username is generated by the server.
+    Otherwise, C{DIGEST-MD5} and C{PLAIN} are attempted, in that order.
+    """
+
+    feature = (NS_XMPP_SASL, 'mechanisms')
+    _deferred = None
+
+    def setMechanism(self):
+        """
+        Select and setup authentication mechanism.
+
+        Uses the authenticator's C{jid} and C{password} attribute for the
+        authentication credentials. If no supported SASL mechanisms are
+        advertized by the receiving party, a failing deferred is returned with
+        a L{SASLNoAcceptableMechanism} exception.
+        """
+
+        jid = self.xmlstream.authenticator.jid
+        password = self.xmlstream.authenticator.password
+
+        mechanisms = get_mechanisms(self.xmlstream)
+        if jid.user is not None:
+            if 'DIGEST-MD5' in mechanisms:
+                self.mechanism = sasl_mechanisms.DigestMD5('xmpp', jid.host, None,
+                                                           jid.user, password)
+            elif 'PLAIN' in mechanisms:
+                self.mechanism = sasl_mechanisms.Plain(None, jid.user, password)
+            else:
+                raise SASLNoAcceptableMechanism()
+        else:
+            if 'ANONYMOUS' in mechanisms:
+                self.mechanism = sasl_mechanisms.Anonymous()
+            else:
+                raise SASLNoAcceptableMechanism()
+
+
+    def start(self):
+        """
+        Start SASL authentication exchange.
+        """
+
+        self.setMechanism()
+        self._deferred = defer.Deferred()
+        self.xmlstream.addObserver('/challenge', self.onChallenge)
+        self.xmlstream.addOnetimeObserver('/success', self.onSuccess)
+        self.xmlstream.addOnetimeObserver('/failure', self.onFailure)
+        self.sendAuth(self.mechanism.getInitialResponse())
+        return self._deferred
+
+
+    def sendAuth(self, data=None):
+        """
+        Initiate authentication protocol exchange.
+
+        If an initial client response is given in C{data}, it will be
+        sent along.
+
+        @param data: initial client response.
+        @type data: C{str} or C{None}.
+        """
+
+        auth = domish.Element((NS_XMPP_SASL, 'auth'))
+        auth['mechanism'] = self.mechanism.name
+        if data is not None:
+            auth.addContent(b64encode(data) or '=')
+        self.xmlstream.send(auth)
+
+
+    def sendResponse(self, data=''):
+        """
+        Send response to a challenge.
+
+        @param data: client response.
+        @type data: C{str}.
+        """
+
+        response = domish.Element((NS_XMPP_SASL, 'response'))
+        if data:
+            response.addContent(b64encode(data))
+        self.xmlstream.send(response)
+
+
+    def onChallenge(self, element):
+        """
+        Parse challenge and send response from the mechanism.
+
+        @param element: the challenge protocol element.
+        @type element: L{domish.Element}.
+        """
+
+        try:
+            challenge = fromBase64(str(element))
+        except SASLIncorrectEncodingError:
+            self._deferred.errback()
+        else:
+            self.sendResponse(self.mechanism.getResponse(challenge))
+
+
+    def onSuccess(self, success):
+        """
+        Clean up observers, reset the XML stream and send a new header.
+
+        @param success: the success protocol element. For now unused, but
+                        could hold additional data.
+        @type success: L{domish.Element}
+        """
+
+        self.xmlstream.removeObserver('/challenge', self.onChallenge)
+        self.xmlstream.removeObserver('/failure', self.onFailure)
+        self.xmlstream.reset()
+        self.xmlstream.sendHeader()
+        self._deferred.callback(xmlstream.Reset)
+
+
+    def onFailure(self, failure):
+        """
+        Clean up observers, parse the failure and errback the deferred.
+
+        @param failure: the failure protocol element. Holds details on
+                        the error condition.
+        @type failure: L{domish.Element}
+        """
+
+        self.xmlstream.removeObserver('/challenge', self.onChallenge)
+        self.xmlstream.removeObserver('/success', self.onSuccess)
+        try:
+            condition = failure.firstChildElement().name
+        except AttributeError:
+            condition = None
+        self._deferred.errback(SASLAuthError(condition))
diff --git a/ThirdParty/Twisted/twisted/words/protocols/jabber/sasl_mechanisms.py b/ThirdParty/Twisted/twisted/words/protocols/jabber/sasl_mechanisms.py
new file mode 100644
index 0000000..5d51be2
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/protocols/jabber/sasl_mechanisms.py
@@ -0,0 +1,240 @@
+# -*- test-case-name: twisted.words.test.test_jabbersaslmechanisms -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Protocol agnostic implementations of SASL authentication mechanisms.
+"""
+
+import binascii, random, time, os
+
+from zope.interface import Interface, Attribute, implements
+
+from twisted.python.hashlib import md5
+
+class ISASLMechanism(Interface):
+    name = Attribute("""Common name for the SASL Mechanism.""")
+
+    def getInitialResponse():
+        """
+        Get the initial client response, if defined for this mechanism.
+
+        @return: initial client response string.
+        @rtype: C{str}.
+        """
+
+
+    def getResponse(challenge):
+        """
+        Get the response to a server challenge.
+
+        @param challenge: server challenge.
+        @type challenge: C{str}.
+        @return: client response.
+        @rtype: C{str}.
+        """
+
+
+
+class Anonymous(object):
+    """
+    Implements the ANONYMOUS SASL authentication mechanism.
+
+    This mechanism is defined in RFC 2245.
+    """
+    implements(ISASLMechanism)
+    name = 'ANONYMOUS'
+
+    def getInitialResponse(self):
+        return None
+
+
+
+class Plain(object):
+    """
+    Implements the PLAIN SASL authentication mechanism.
+
+    The PLAIN SASL authentication mechanism is defined in RFC 2595.
+    """
+    implements(ISASLMechanism)
+
+    name = 'PLAIN'
+
+    def __init__(self, authzid, authcid, password):
+        self.authzid = authzid or ''
+        self.authcid = authcid or ''
+        self.password = password or ''
+
+
+    def getInitialResponse(self):
+        return "%s\x00%s\x00%s" % (self.authzid.encode('utf-8'),
+                                   self.authcid.encode('utf-8'),
+                                   self.password.encode('utf-8'))
+
+
+
+class DigestMD5(object):
+    """
+    Implements the DIGEST-MD5 SASL authentication mechanism.
+
+    The DIGEST-MD5 SASL authentication mechanism is defined in RFC 2831.
+    """
+    implements(ISASLMechanism)
+
+    name = 'DIGEST-MD5'
+
+    def __init__(self, serv_type, host, serv_name, username, password):
+        self.username = username
+        self.password = password
+        self.defaultRealm = host
+
+        self.digest_uri = '%s/%s' % (serv_type, host)
+        if serv_name is not None:
+            self.digest_uri += '/%s' % serv_name
+
+
+    def getInitialResponse(self):
+        return None
+
+
+    def getResponse(self, challenge):
+        directives = self._parse(challenge)
+
+        # Compat for implementations that do not send this along with
+        # a succesful authentication.
+        if 'rspauth' in directives:
+            return ''
+
+        try:
+            realm = directives['realm']
+        except KeyError:
+            realm = self.defaultRealm
+
+        return self._gen_response(directives['charset'],
+                                  realm,
+                                  directives['nonce'])
+
+    def _parse(self, challenge):
+        """
+        Parses the server challenge.
+
+        Splits the challenge into a dictionary of directives with values.
+
+        @return: challenge directives and their values.
+        @rtype: C{dict} of C{str} to C{str}.
+        """
+        s = challenge
+        paramDict = {}
+        cur = 0
+        remainingParams = True
+        while remainingParams:
+            # Parse a param. We can't just split on commas, because there can
+            # be some commas inside (quoted) param values, e.g.:
+            # qop="auth,auth-int"
+
+            middle = s.index("=", cur)
+            name = s[cur:middle].lstrip()
+            middle += 1
+            if s[middle] == '"':
+                middle += 1
+                end = s.index('"', middle)
+                value = s[middle:end]
+                cur = s.find(',', end) + 1
+                if cur == 0:
+                    remainingParams = False
+            else:
+                end = s.find(',', middle)
+                if end == -1:
+                    value = s[middle:].rstrip()
+                    remainingParams = False
+                else:
+                    value = s[middle:end].rstrip()
+                cur = end + 1
+            paramDict[name] = value
+
+        for param in ('qop', 'cipher'):
+            if param in paramDict:
+                paramDict[param] = paramDict[param].split(',')
+
+        return paramDict
+
+    def _unparse(self, directives):
+        """
+        Create message string from directives.
+
+        @param directives: dictionary of directives (names to their values).
+                           For certain directives, extra quotes are added, as
+                           needed.
+        @type directives: C{dict} of C{str} to C{str}
+        @return: message string.
+        @rtype: C{str}.
+        """
+
+        directive_list = []
+        for name, value in directives.iteritems():
+            if name in ('username', 'realm', 'cnonce',
+                        'nonce', 'digest-uri', 'authzid', 'cipher'):
+                directive = '%s="%s"' % (name, value)
+            else:
+                directive = '%s=%s' % (name, value)
+
+            directive_list.append(directive)
+
+        return ','.join(directive_list)
+
+
+    def _gen_response(self, charset, realm, nonce):
+        """
+        Generate response-value.
+
+        Creates a response to a challenge according to section 2.1.2.1 of
+        RFC 2831 using the C{charset}, C{realm} and C{nonce} directives
+        from the challenge.
+        """
+
+        def H(s):
+            return md5(s).digest()
+
+        def HEX(n):
+            return binascii.b2a_hex(n)
+
+        def KD(k, s):
+            return H('%s:%s' % (k, s))
+
+        try:
+            username = self.username.encode(charset)
+            password = self.password.encode(charset)
+        except UnicodeError:
+            # TODO - add error checking
+            raise
+
+        nc = '%08x' % 1 # TODO: support subsequent auth.
+        cnonce = self._gen_nonce()
+        qop = 'auth'
+
+        # TODO - add support for authzid
+        a1 = "%s:%s:%s" % (H("%s:%s:%s" % (username, realm, password)),
+                           nonce,
+                           cnonce)
+        a2 = "AUTHENTICATE:%s" % self.digest_uri
+
+        response = HEX( KD ( HEX(H(a1)),
+                             "%s:%s:%s:%s:%s" % (nonce, nc,
+                                                 cnonce, "auth", HEX(H(a2)))))
+
+        directives = {'username': username,
+                      'realm' : realm,
+                      'nonce' : nonce,
+                      'cnonce' : cnonce,
+                      'nc' : nc,
+                      'qop' : qop,
+                      'digest-uri': self.digest_uri,
+                      'response': response,
+                      'charset': charset}
+
+        return self._unparse(directives)
+
+
+    def _gen_nonce(self):
+        return md5("%s:%s:%s" % (str(random.random()) , str(time.gmtime()),str(os.getpid()))).hexdigest()
diff --git a/ThirdParty/Twisted/twisted/words/protocols/jabber/xmlstream.py b/ThirdParty/Twisted/twisted/words/protocols/jabber/xmlstream.py
new file mode 100644
index 0000000..cc2745b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/protocols/jabber/xmlstream.py
@@ -0,0 +1,1136 @@
+# -*- test-case-name: twisted.words.test.test_jabberxmlstream -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+XMPP XML Streams
+
+Building blocks for setting up XML Streams, including helping classes for
+doing authentication on either client or server side, and working with XML
+Stanzas.
+"""
+
+from zope.interface import directlyProvides, implements
+
+from twisted.internet import defer, protocol
+from twisted.internet.error import ConnectionLost
+from twisted.python import failure, log, randbytes
+from twisted.python.hashlib import sha1
+from twisted.words.protocols.jabber import error, ijabber, jid
+from twisted.words.xish import domish, xmlstream
+from twisted.words.xish.xmlstream import STREAM_CONNECTED_EVENT
+from twisted.words.xish.xmlstream import STREAM_START_EVENT
+from twisted.words.xish.xmlstream import STREAM_END_EVENT
+from twisted.words.xish.xmlstream import STREAM_ERROR_EVENT
+
+try:
+    from twisted.internet import ssl
+except ImportError:
+    ssl = None
+if ssl and not ssl.supported:
+    ssl = None
+
+STREAM_AUTHD_EVENT = intern("//event/stream/authd")
+INIT_FAILED_EVENT = intern("//event/xmpp/initfailed")
+
+NS_STREAMS = 'http://etherx.jabber.org/streams'
+NS_XMPP_TLS = 'urn:ietf:params:xml:ns:xmpp-tls'
+
+Reset = object()
+
+def hashPassword(sid, password):
+    """
+    Create a SHA1-digest string of a session identifier and password.
+
+    @param sid: The stream session identifier.
+    @type sid: C{unicode}.
+    @param password: The password to be hashed.
+    @type password: C{unicode}.
+    """
+    if not isinstance(sid, unicode):
+        raise TypeError("The session identifier must be a unicode object")
+    if not isinstance(password, unicode):
+        raise TypeError("The password must be a unicode object")
+    input = u"%s%s" % (sid, password)
+    return sha1(input.encode('utf-8')).hexdigest()
+
+
+
+class Authenticator:
+    """
+    Base class for business logic of initializing an XmlStream
+
+    Subclass this object to enable an XmlStream to initialize and authenticate
+    to different types of stream hosts (such as clients, components, etc.).
+
+    Rules:
+      1. The Authenticator MUST dispatch a L{STREAM_AUTHD_EVENT} when the
+         stream has been completely initialized.
+      2. The Authenticator SHOULD reset all state information when
+         L{associateWithStream} is called.
+      3. The Authenticator SHOULD override L{streamStarted}, and start
+         initialization there.
+
+    @type xmlstream: L{XmlStream}
+    @ivar xmlstream: The XmlStream that needs authentication
+
+    @note: the term authenticator is historical. Authenticators perform
+           all steps required to prepare the stream for the exchange
+           of XML stanzas.
+    """
+
+    def __init__(self):
+        self.xmlstream = None
+
+
+    def connectionMade(self):
+        """
+        Called by the XmlStream when the underlying socket connection is
+        in place.
+
+        This allows the Authenticator to send an initial root element, if it's
+        connecting, or wait for an inbound root from the peer if it's accepting
+        the connection.
+
+        Subclasses can use self.xmlstream.send() to send any initial data to
+        the peer.
+        """
+
+
+    def streamStarted(self, rootElement):
+        """
+        Called by the XmlStream when the stream has started.
+
+        A stream is considered to have started when the start tag of the root
+        element has been received.
+
+        This examines C{rootElement} to see if there is a version attribute.
+        If absent, C{0.0} is assumed per RFC 3920. Subsequently, the
+        minimum of the version from the received stream header and the
+        value stored in L{xmlstream} is taken and put back in L{xmlstream}.
+
+        Extensions of this method can extract more information from the
+        stream header and perform checks on them, optionally sending
+        stream errors and closing the stream.
+        """
+        if rootElement.hasAttribute("version"):
+            version = rootElement["version"].split(".")
+            try:
+                version = (int(version[0]), int(version[1]))
+            except (IndexError, ValueError):
+                version = (0, 0)
+        else:
+            version = (0, 0)
+
+        self.xmlstream.version = min(self.xmlstream.version, version)
+
+
+    def associateWithStream(self, xmlstream):
+        """
+        Called by the XmlStreamFactory when a connection has been made
+        to the requested peer, and an XmlStream object has been
+        instantiated.
+
+        The default implementation just saves a handle to the new
+        XmlStream.
+
+        @type xmlstream: L{XmlStream}
+        @param xmlstream: The XmlStream that will be passing events to this
+                          Authenticator.
+
+        """
+        self.xmlstream = xmlstream
+
+
+
+class ConnectAuthenticator(Authenticator):
+    """
+    Authenticator for initiating entities.
+    """
+
+    namespace = None
+
+    def __init__(self, otherHost):
+        self.otherHost = otherHost
+
+
+    def connectionMade(self):
+        self.xmlstream.namespace = self.namespace
+        self.xmlstream.otherEntity = jid.internJID(self.otherHost)
+        self.xmlstream.sendHeader()
+
+
+    def initializeStream(self):
+        """
+        Perform stream initialization procedures.
+
+        An L{XmlStream} holds a list of initializer objects in its
+        C{initializers} attribute. This method calls these initializers in
+        order and dispatches the C{STREAM_AUTHD_EVENT} event when the list has
+        been successfully processed. Otherwise it dispatches the
+        C{INIT_FAILED_EVENT} event with the failure.
+
+        Initializers may return the special L{Reset} object to halt the
+        initialization processing. It signals that the current initializer was
+        successfully processed, but that the XML Stream has been reset. An
+        example is the TLSInitiatingInitializer.
+        """
+
+        def remove_first(result):
+            self.xmlstream.initializers.pop(0)
+
+            return result
+
+        def do_next(result):
+            """
+            Take the first initializer and process it.
+
+            On success, the initializer is removed from the list and
+            then next initializer will be tried.
+            """
+
+            if result is Reset:
+                return None
+
+            try:
+                init = self.xmlstream.initializers[0]
+            except IndexError:
+                self.xmlstream.dispatch(self.xmlstream, STREAM_AUTHD_EVENT)
+                return None
+            else:
+                d = defer.maybeDeferred(init.initialize)
+                d.addCallback(remove_first)
+                d.addCallback(do_next)
+                return d
+
+        d = defer.succeed(None)
+        d.addCallback(do_next)
+        d.addErrback(self.xmlstream.dispatch, INIT_FAILED_EVENT)
+
+
+    def streamStarted(self, rootElement):
+        """
+        Called by the XmlStream when the stream has started.
+
+        This extends L{Authenticator.streamStarted} to extract further stream
+        headers from C{rootElement}, optionally wait for stream features being
+        received and then call C{initializeStream}.
+        """
+
+        Authenticator.streamStarted(self, rootElement)
+
+        self.xmlstream.sid = rootElement.getAttribute("id")
+
+        if rootElement.hasAttribute("from"):
+            self.xmlstream.otherEntity = jid.internJID(rootElement["from"])
+
+        # Setup observer for stream features, if applicable
+        if self.xmlstream.version >= (1, 0):
+            def onFeatures(element):
+                features = {}
+                for feature in element.elements():
+                    features[(feature.uri, feature.name)] = feature
+
+                self.xmlstream.features = features
+                self.initializeStream()
+
+            self.xmlstream.addOnetimeObserver('/features[@xmlns="%s"]' %
+                                                  NS_STREAMS,
+                                              onFeatures)
+        else:
+            self.initializeStream()
+
+
+
+class ListenAuthenticator(Authenticator):
+    """
+    Authenticator for receiving entities.
+    """
+
+    namespace = None
+
+    def associateWithStream(self, xmlstream):
+        """
+        Called by the XmlStreamFactory when a connection has been made.
+
+        Extend L{Authenticator.associateWithStream} to set the L{XmlStream}
+        to be non-initiating.
+        """
+        Authenticator.associateWithStream(self, xmlstream)
+        self.xmlstream.initiating = False
+
+
+    def streamStarted(self, rootElement):
+        """
+        Called by the XmlStream when the stream has started.
+
+        This extends L{Authenticator.streamStarted} to extract further
+        information from the stream headers from C{rootElement}.
+        """
+        Authenticator.streamStarted(self, rootElement)
+
+        self.xmlstream.namespace = rootElement.defaultUri
+
+        if rootElement.hasAttribute("to"):
+            self.xmlstream.thisEntity = jid.internJID(rootElement["to"])
+
+        self.xmlstream.prefixes = {}
+        for prefix, uri in rootElement.localPrefixes.iteritems():
+            self.xmlstream.prefixes[uri] = prefix
+
+        self.xmlstream.sid = unicode(randbytes.secureRandom(8).encode('hex'))
+
+
+
+class FeatureNotAdvertized(Exception):
+    """
+    Exception indicating a stream feature was not advertized, while required by
+    the initiating entity.
+    """
+
+
+
+class BaseFeatureInitiatingInitializer(object):
+    """
+    Base class for initializers with a stream feature.
+
+    This assumes the associated XmlStream represents the initiating entity
+    of the connection.
+
+    @cvar feature: tuple of (uri, name) of the stream feature root element.
+    @type feature: tuple of (C{str}, C{str})
+    @ivar required: whether the stream feature is required to be advertized
+                    by the receiving entity.
+    @type required: C{bool}
+    """
+
+    implements(ijabber.IInitiatingInitializer)
+
+    feature = None
+    required = False
+
+    def __init__(self, xs):
+        self.xmlstream = xs
+
+
+    def initialize(self):
+        """
+        Initiate the initialization.
+
+        Checks if the receiving entity advertizes the stream feature. If it
+        does, the initialization is started. If it is not advertized, and the
+        C{required} instance variable is C{True}, it raises
+        L{FeatureNotAdvertized}. Otherwise, the initialization silently
+        succeeds.
+        """
+
+        if self.feature in self.xmlstream.features:
+            return self.start()
+        elif self.required:
+            raise FeatureNotAdvertized
+        else:
+            return None
+
+
+    def start(self):
+        """
+        Start the actual initialization.
+
+        May return a deferred for asynchronous initialization.
+        """
+
+
+
+class TLSError(Exception):
+    """
+    TLS base exception.
+    """
+
+
+
+class TLSFailed(TLSError):
+    """
+    Exception indicating failed TLS negotiation
+    """
+
+
+
+class TLSRequired(TLSError):
+    """
+    Exception indicating required TLS negotiation.
+
+    This exception is raised when the receiving entity requires TLS
+    negotiation and the initiating does not desire to negotiate TLS.
+    """
+
+
+
+class TLSNotSupported(TLSError):
+    """
+    Exception indicating missing TLS support.
+
+    This exception is raised when the initiating entity wants and requires to
+    negotiate TLS when the OpenSSL library is not available.
+    """
+
+
+
+class TLSInitiatingInitializer(BaseFeatureInitiatingInitializer):
+    """
+    TLS stream initializer for the initiating entity.
+
+    It is strongly required to include this initializer in the list of
+    initializers for an XMPP stream. By default it will try to negotiate TLS.
+    An XMPP server may indicate that TLS is required. If TLS is not desired,
+    set the C{wanted} attribute to False instead of removing it from the list
+    of initializers, so a proper exception L{TLSRequired} can be raised.
+
+    @cvar wanted: indicates if TLS negotiation is wanted.
+    @type wanted: C{bool}
+    """
+
+    feature = (NS_XMPP_TLS, 'starttls')
+    wanted = True
+    _deferred = None
+
+    def onProceed(self, obj):
+        """
+        Proceed with TLS negotiation and reset the XML stream.
+        """
+
+        self.xmlstream.removeObserver('/failure', self.onFailure)
+        ctx = ssl.CertificateOptions()
+        self.xmlstream.transport.startTLS(ctx)
+        self.xmlstream.reset()
+        self.xmlstream.sendHeader()
+        self._deferred.callback(Reset)
+
+
+    def onFailure(self, obj):
+        self.xmlstream.removeObserver('/proceed', self.onProceed)
+        self._deferred.errback(TLSFailed())
+
+
+    def start(self):
+        """
+        Start TLS negotiation.
+
+        This checks if the receiving entity requires TLS, the SSL library is
+        available and uses the C{required} and C{wanted} instance variables to
+        determine what to do in the various different cases.
+
+        For example, if the SSL library is not available, and wanted and
+        required by the user, it raises an exception. However if it is not
+        required by both parties, initialization silently succeeds, moving
+        on to the next step.
+        """
+        if self.wanted:
+            if ssl is None:
+                if self.required:
+                    return defer.fail(TLSNotSupported())
+                else:
+                    return defer.succeed(None)
+            else:
+                pass
+        elif self.xmlstream.features[self.feature].required:
+            return defer.fail(TLSRequired())
+        else:
+            return defer.succeed(None)
+
+        self._deferred = defer.Deferred()
+        self.xmlstream.addOnetimeObserver("/proceed", self.onProceed)
+        self.xmlstream.addOnetimeObserver("/failure", self.onFailure)
+        self.xmlstream.send(domish.Element((NS_XMPP_TLS, "starttls")))
+        return self._deferred
+
+
+
+class XmlStream(xmlstream.XmlStream):
+    """
+    XMPP XML Stream protocol handler.
+
+    @ivar version: XML stream version as a tuple (major, minor). Initially,
+                   this is set to the minimally supported version. Upon
+                   receiving the stream header of the peer, it is set to the
+                   minimum of that value and the version on the received
+                   header.
+    @type version: (C{int}, C{int})
+    @ivar namespace: default namespace URI for stream
+    @type namespace: C{unicode}
+    @ivar thisEntity: JID of this entity
+    @type thisEntity: L{JID}
+    @ivar otherEntity: JID of the peer entity
+    @type otherEntity: L{JID}
+    @ivar sid: session identifier
+    @type sid: C{unicode}
+    @ivar initiating: True if this is the initiating stream
+    @type initiating: C{bool}
+    @ivar features: map of (uri, name) to stream features element received from
+                    the receiving entity.
+    @type features: C{dict} of (C{unicode}, C{unicode}) to L{domish.Element}.
+    @ivar prefixes: map of URI to prefixes that are to appear on stream
+                    header.
+    @type prefixes: C{dict} of C{unicode} to C{unicode}
+    @ivar initializers: list of stream initializer objects
+    @type initializers: C{list} of objects that provide L{IInitializer}
+    @ivar authenticator: associated authenticator that uses C{initializers} to
+                         initialize the XML stream.
+    """
+
+    version = (1, 0)
+    namespace = 'invalid'
+    thisEntity = None
+    otherEntity = None
+    sid = None
+    initiating = True
+
+    _headerSent = False     # True if the stream header has been sent
+
+    def __init__(self, authenticator):
+        xmlstream.XmlStream.__init__(self)
+
+        self.prefixes = {NS_STREAMS: 'stream'}
+        self.authenticator = authenticator
+        self.initializers = []
+        self.features = {}
+
+        # Reset the authenticator
+        authenticator.associateWithStream(self)
+
+
+    def _callLater(self, *args, **kwargs):
+        from twisted.internet import reactor
+        return reactor.callLater(*args, **kwargs)
+
+
+    def reset(self):
+        """
+        Reset XML Stream.
+
+        Resets the XML Parser for incoming data. This is to be used after
+        successfully negotiating a new layer, e.g. TLS and SASL. Note that
+        registered event observers will continue to be in place.
+        """
+        self._headerSent = False
+        self._initializeStream()
+
+
+    def onStreamError(self, errelem):
+        """
+        Called when a stream:error element has been received.
+
+        Dispatches a L{STREAM_ERROR_EVENT} event with the error element to
+        allow for cleanup actions and drops the connection.
+
+        @param errelem: The received error element.
+        @type errelem: L{domish.Element}
+        """
+        self.dispatch(failure.Failure(error.exceptionFromStreamError(errelem)),
+                      STREAM_ERROR_EVENT)
+        self.transport.loseConnection()
+
+
+    def sendHeader(self):
+        """
+        Send stream header.
+        """
+        # set up optional extra namespaces
+        localPrefixes = {}
+        for uri, prefix in self.prefixes.iteritems():
+            if uri != NS_STREAMS:
+                localPrefixes[prefix] = uri
+
+        rootElement = domish.Element((NS_STREAMS, 'stream'), self.namespace,
+                                     localPrefixes=localPrefixes)
+
+        if self.otherEntity:
+            rootElement['to'] = self.otherEntity.userhost()
+
+        if self.thisEntity:
+            rootElement['from'] = self.thisEntity.userhost()
+
+        if not self.initiating and self.sid:
+            rootElement['id'] = self.sid
+
+        if self.version >= (1, 0):
+            rootElement['version'] = "%d.%d" % self.version
+
+        self.send(rootElement.toXml(prefixes=self.prefixes, closeElement=0))
+        self._headerSent = True
+
+
+    def sendFooter(self):
+        """
+        Send stream footer.
+        """
+        self.send('</stream:stream>')
+
+
+    def sendStreamError(self, streamError):
+        """
+        Send stream level error.
+
+        If we are the receiving entity, and haven't sent the header yet,
+        we sent one first.
+
+        After sending the stream error, the stream is closed and the transport
+        connection dropped.
+
+        @param streamError: stream error instance
+        @type streamError: L{error.StreamError}
+        """
+        if not self._headerSent and not self.initiating:
+            self.sendHeader()
+
+        if self._headerSent:
+            self.send(streamError.getElement())
+            self.sendFooter()
+
+        self.transport.loseConnection()
+
+
+    def send(self, obj):
+        """
+        Send data over the stream.
+
+        This overrides L{xmlstream.Xmlstream.send} to use the default namespace
+        of the stream header when serializing L{domish.IElement}s. It is
+        assumed that if you pass an object that provides L{domish.IElement},
+        it represents a direct child of the stream's root element.
+        """
+        if domish.IElement.providedBy(obj):
+            obj = obj.toXml(prefixes=self.prefixes,
+                            defaultUri=self.namespace,
+                            prefixesInScope=self.prefixes.values())
+
+        xmlstream.XmlStream.send(self, obj)
+
+
+    def connectionMade(self):
+        """
+        Called when a connection is made.
+
+        Notifies the authenticator when a connection has been made.
+        """
+        xmlstream.XmlStream.connectionMade(self)
+        self.authenticator.connectionMade()
+
+
+    def onDocumentStart(self, rootElement):
+        """
+        Called when the stream header has been received.
+
+        Extracts the header's C{id} and C{version} attributes from the root
+        element. The C{id} attribute is stored in our C{sid} attribute and the
+        C{version} attribute is parsed and the minimum of the version we sent
+        and the parsed C{version} attribute is stored as a tuple (major, minor)
+        in this class' C{version} attribute. If no C{version} attribute was
+        present, we assume version 0.0.
+
+        If appropriate (we are the initiating stream and the minimum of our and
+        the other party's version is at least 1.0), a one-time observer is
+        registered for getting the stream features. The registered function is
+        C{onFeatures}.
+
+        Ultimately, the authenticator's C{streamStarted} method will be called.
+
+        @param rootElement: The root element.
+        @type rootElement: L{domish.Element}
+        """
+        xmlstream.XmlStream.onDocumentStart(self, rootElement)
+
+        # Setup observer for stream errors
+        self.addOnetimeObserver("/error[@xmlns='%s']" % NS_STREAMS,
+                                self.onStreamError)
+
+        self.authenticator.streamStarted(rootElement)
+
+
+
+class XmlStreamFactory(xmlstream.XmlStreamFactory):
+    """
+    Factory for Jabber XmlStream objects as a reconnecting client.
+
+    Note that this differs from L{xmlstream.XmlStreamFactory} in that
+    it generates Jabber specific L{XmlStream} instances that have
+    authenticators.
+    """
+
+    protocol = XmlStream
+
+    def __init__(self, authenticator):
+        xmlstream.XmlStreamFactory.__init__(self, authenticator)
+        self.authenticator = authenticator
+
+
+
+class XmlStreamServerFactory(xmlstream.BootstrapMixin,
+                             protocol.ServerFactory):
+    """
+    Factory for Jabber XmlStream objects as a server.
+
+    @since: 8.2.
+    @ivar authenticatorFactory: Factory callable that takes no arguments, to
+                                create a fresh authenticator to be associated
+                                with the XmlStream.
+    """
+
+    protocol = XmlStream
+
+    def __init__(self, authenticatorFactory):
+        xmlstream.BootstrapMixin.__init__(self)
+        self.authenticatorFactory = authenticatorFactory
+
+
+    def buildProtocol(self, addr):
+        """
+        Create an instance of XmlStream.
+
+        A new authenticator instance will be created and passed to the new
+        XmlStream. Registered bootstrap event observers are installed as well.
+        """
+        authenticator = self.authenticatorFactory()
+        xs = self.protocol(authenticator)
+        xs.factory = self
+        self.installBootstraps(xs)
+        return xs
+
+
+
+class TimeoutError(Exception):
+    """
+    Exception raised when no IQ response has been received before the
+    configured timeout.
+    """
+
+
+
+def upgradeWithIQResponseTracker(xs):
+    """
+    Enhances an XmlStream for iq response tracking.
+
+    This makes an L{XmlStream} object provide L{IIQResponseTracker}. When a
+    response is an error iq stanza, the deferred has its errback invoked with a
+    failure that holds a L{StanzaException<error.StanzaException>} that is
+    easier to examine.
+    """
+    def callback(iq):
+        """
+        Handle iq response by firing associated deferred.
+        """
+        if getattr(iq, 'handled', False):
+            return
+
+        try:
+            d = xs.iqDeferreds[iq["id"]]
+        except KeyError:
+            pass
+        else:
+            del xs.iqDeferreds[iq["id"]]
+            iq.handled = True
+            if iq['type'] == 'error':
+                d.errback(error.exceptionFromStanza(iq))
+            else:
+                d.callback(iq)
+
+
+    def disconnected(_):
+        """
+        Make sure deferreds do not linger on after disconnect.
+
+        This errbacks all deferreds of iq's for which no response has been
+        received with a L{ConnectionLost} failure. Otherwise, the deferreds
+        will never be fired.
+        """
+        iqDeferreds = xs.iqDeferreds
+        xs.iqDeferreds = {}
+        for d in iqDeferreds.itervalues():
+            d.errback(ConnectionLost())
+
+    xs.iqDeferreds = {}
+    xs.iqDefaultTimeout = getattr(xs, 'iqDefaultTimeout', None)
+    xs.addObserver(xmlstream.STREAM_END_EVENT, disconnected)
+    xs.addObserver('/iq[@type="result"]', callback)
+    xs.addObserver('/iq[@type="error"]', callback)
+    directlyProvides(xs, ijabber.IIQResponseTracker)
+
+
+
+class IQ(domish.Element):
+    """
+    Wrapper for an iq stanza.
+
+    Iq stanzas are used for communications with a request-response behaviour.
+    Each iq request is associated with an XML stream and has its own unique id
+    to be able to track the response.
+
+    @ivar timeout: if set, a timeout period after which the deferred returned
+                   by C{send} will have its errback called with a
+                   L{TimeoutError} failure.
+    @type timeout: C{float}
+    """
+
+    timeout = None
+
+    def __init__(self, xmlstream, stanzaType="set"):
+        """
+        @type xmlstream: L{xmlstream.XmlStream}
+        @param xmlstream: XmlStream to use for transmission of this IQ
+
+        @type stanzaType: C{str}
+        @param stanzaType: IQ type identifier ('get' or 'set')
+        """
+        domish.Element.__init__(self, (None, "iq"))
+        self.addUniqueId()
+        self["type"] = stanzaType
+        self._xmlstream = xmlstream
+
+
+    def send(self, to=None):
+        """
+        Send out this iq.
+
+        Returns a deferred that is fired when an iq response with the same id
+        is received. Result responses will be passed to the deferred callback.
+        Error responses will be transformed into a
+        L{StanzaError<error.StanzaError>} and result in the errback of the
+        deferred being invoked.
+
+        @rtype: L{defer.Deferred}
+        """
+        if to is not None:
+            self["to"] = to
+
+        if not ijabber.IIQResponseTracker.providedBy(self._xmlstream):
+            upgradeWithIQResponseTracker(self._xmlstream)
+
+        d = defer.Deferred()
+        self._xmlstream.iqDeferreds[self['id']] = d
+
+        timeout = self.timeout or self._xmlstream.iqDefaultTimeout
+        if timeout is not None:
+            def onTimeout():
+                del self._xmlstream.iqDeferreds[self['id']]
+                d.errback(TimeoutError("IQ timed out"))
+
+            call = self._xmlstream._callLater(timeout, onTimeout)
+
+            def cancelTimeout(result):
+                if call.active():
+                    call.cancel()
+
+                return result
+
+            d.addBoth(cancelTimeout)
+
+        self._xmlstream.send(self)
+        return d
+
+
+
+def toResponse(stanza, stanzaType=None):
+    """
+    Create a response stanza from another stanza.
+
+    This takes the addressing and id attributes from a stanza to create a (new,
+    empty) response stanza. The addressing attributes are swapped and the id
+    copied. Optionally, the stanza type of the response can be specified.
+
+    @param stanza: the original stanza
+    @type stanza: L{domish.Element}
+    @param stanzaType: optional response stanza type
+    @type stanzaType: C{str}
+    @return: the response stanza.
+    @rtype: L{domish.Element}
+    """
+
+    toAddr = stanza.getAttribute('from')
+    fromAddr = stanza.getAttribute('to')
+    stanzaID = stanza.getAttribute('id')
+
+    response = domish.Element((None, stanza.name))
+    if toAddr:
+        response['to'] = toAddr
+    if fromAddr:
+        response['from'] = fromAddr
+    if stanzaID:
+        response['id'] = stanzaID
+    if stanzaType:
+        response['type'] = stanzaType
+
+    return response
+
+
+
+class XMPPHandler(object):
+    """
+    XMPP protocol handler.
+
+    Classes derived from this class implement (part of) one or more XMPP
+    extension protocols, and are referred to as a subprotocol implementation.
+    """
+
+    implements(ijabber.IXMPPHandler)
+
+    def __init__(self):
+        self.parent = None
+        self.xmlstream = None
+
+
+    def setHandlerParent(self, parent):
+        self.parent = parent
+        self.parent.addHandler(self)
+
+
+    def disownHandlerParent(self, parent):
+        self.parent.removeHandler(self)
+        self.parent = None
+
+
+    def makeConnection(self, xs):
+        self.xmlstream = xs
+        self.connectionMade()
+
+
+    def connectionMade(self):
+        """
+        Called after a connection has been established.
+
+        Can be overridden to perform work before stream initialization.
+        """
+
+
+    def connectionInitialized(self):
+        """
+        The XML stream has been initialized.
+
+        Can be overridden to perform work after stream initialization, e.g. to
+        set up observers and start exchanging XML stanzas.
+        """
+
+
+    def connectionLost(self, reason):
+        """
+        The XML stream has been closed.
+
+        This method can be extended to inspect the C{reason} argument and
+        act on it.
+        """
+        self.xmlstream = None
+
+
+    def send(self, obj):
+        """
+        Send data over the managed XML stream.
+
+        @note: The stream manager maintains a queue for data sent using this
+               method when there is no current initialized XML stream. This
+               data is then sent as soon as a new stream has been established
+               and initialized. Subsequently, L{connectionInitialized} will be
+               called again. If this queueing is not desired, use C{send} on
+               C{self.xmlstream}.
+
+        @param obj: data to be sent over the XML stream. This is usually an
+                    object providing L{domish.IElement}, or serialized XML. See
+                    L{xmlstream.XmlStream} for details.
+        """
+        self.parent.send(obj)
+
+
+
+class XMPPHandlerCollection(object):
+    """
+    Collection of XMPP subprotocol handlers.
+
+    This allows for grouping of subprotocol handlers, but is not an
+    L{XMPPHandler} itself, so this is not recursive.
+
+    @ivar handlers: List of protocol handlers.
+    @type handlers: C{list} of objects providing
+                      L{IXMPPHandler}
+    """
+
+    implements(ijabber.IXMPPHandlerCollection)
+
+    def __init__(self):
+        self.handlers = []
+
+
+    def __iter__(self):
+        """
+        Act as a container for handlers.
+        """
+        return iter(self.handlers)
+
+
+    def addHandler(self, handler):
+        """
+        Add protocol handler.
+
+        Protocol handlers are expected to provide L{ijabber.IXMPPHandler}.
+        """
+        self.handlers.append(handler)
+
+
+    def removeHandler(self, handler):
+        """
+        Remove protocol handler.
+        """
+        self.handlers.remove(handler)
+
+
+
+class StreamManager(XMPPHandlerCollection):
+    """
+    Business logic representing a managed XMPP connection.
+
+    This maintains a single XMPP connection and provides facilities for packet
+    routing and transmission. Business logic modules are objects providing
+    L{ijabber.IXMPPHandler} (like subclasses of L{XMPPHandler}), and added
+    using L{addHandler}.
+
+    @ivar xmlstream: currently managed XML stream
+    @type xmlstream: L{XmlStream}
+    @ivar logTraffic: if true, log all traffic.
+    @type logTraffic: C{bool}
+    @ivar _initialized: Whether the stream represented by L{xmlstream} has
+                        been initialized. This is used when caching outgoing
+                        stanzas.
+    @type _initialized: C{bool}
+    @ivar _packetQueue: internal buffer of unsent data. See L{send} for details.
+    @type _packetQueue: C{list}
+    """
+
+    logTraffic = False
+
+    def __init__(self, factory):
+        XMPPHandlerCollection.__init__(self)
+        self.xmlstream = None
+        self._packetQueue = []
+        self._initialized = False
+
+        factory.addBootstrap(STREAM_CONNECTED_EVENT, self._connected)
+        factory.addBootstrap(STREAM_AUTHD_EVENT, self._authd)
+        factory.addBootstrap(INIT_FAILED_EVENT, self.initializationFailed)
+        factory.addBootstrap(STREAM_END_EVENT, self._disconnected)
+        self.factory = factory
+
+
+    def addHandler(self, handler):
+        """
+        Add protocol handler.
+
+        When an XML stream has already been established, the handler's
+        C{connectionInitialized} will be called to get it up to speed.
+        """
+        XMPPHandlerCollection.addHandler(self, handler)
+
+        # get protocol handler up to speed when a connection has already
+        # been established
+        if self.xmlstream and self._initialized:
+            handler.makeConnection(self.xmlstream)
+            handler.connectionInitialized()
+
+
+    def _connected(self, xs):
+        """
+        Called when the transport connection has been established.
+
+        Here we optionally set up traffic logging (depending on L{logTraffic})
+        and call each handler's C{makeConnection} method with the L{XmlStream}
+        instance.
+        """
+        def logDataIn(buf):
+            log.msg("RECV: %r" % buf)
+
+        def logDataOut(buf):
+            log.msg("SEND: %r" % buf)
+
+        if self.logTraffic:
+            xs.rawDataInFn = logDataIn
+            xs.rawDataOutFn = logDataOut
+
+        self.xmlstream = xs
+
+        for e in self:
+            e.makeConnection(xs)
+
+
+    def _authd(self, xs):
+        """
+        Called when the stream has been initialized.
+
+        Send out cached stanzas and call each handler's
+        C{connectionInitialized} method.
+        """
+        # Flush all pending packets
+        for p in self._packetQueue:
+            xs.send(p)
+        self._packetQueue = []
+        self._initialized = True
+
+        # Notify all child services which implement
+        # the IService interface
+        for e in self:
+            e.connectionInitialized()
+
+
+    def initializationFailed(self, reason):
+        """
+        Called when stream initialization has failed.
+
+        Stream initialization has halted, with the reason indicated by
+        C{reason}. It may be retried by calling the authenticator's
+        C{initializeStream}. See the respective authenticators for details.
+
+        @param reason: A failure instance indicating why stream initialization
+                       failed.
+        @type reason: L{failure.Failure}
+        """
+
+
+    def _disconnected(self, reason):
+        """
+        Called when the stream has been closed.
+
+        From this point on, the manager doesn't interact with the
+        L{XmlStream} anymore and notifies each handler that the connection
+        was lost by calling its C{connectionLost} method.
+        """
+        self.xmlstream = None
+        self._initialized = False
+
+        # Notify all child services which implement
+        # the IService interface
+        for e in self:
+            e.connectionLost(reason)
+
+
+    def send(self, obj):
+        """
+        Send data over the XML stream.
+
+        When there is no established XML stream, the data is queued and sent
+        out when a new XML stream has been established and initialized.
+
+        @param obj: data to be sent over the XML stream. See
+                    L{xmlstream.XmlStream.send} for details.
+        """
+        if self._initialized:
+            self.xmlstream.send(obj)
+        else:
+            self._packetQueue.append(obj)
+
+
+
+__all__ = ['Authenticator', 'BaseFeatureInitiatingInitializer',
+           'ConnectAuthenticator', 'FeatureNotAdvertized',
+           'INIT_FAILED_EVENT', 'IQ', 'ListenAuthenticator', 'NS_STREAMS',
+           'NS_XMPP_TLS', 'Reset', 'STREAM_AUTHD_EVENT',
+           'STREAM_CONNECTED_EVENT', 'STREAM_END_EVENT', 'STREAM_ERROR_EVENT',
+           'STREAM_START_EVENT', 'StreamManager', 'TLSError', 'TLSFailed',
+           'TLSInitiatingInitializer', 'TLSNotSupported', 'TLSRequired',
+           'TimeoutError', 'XMPPHandler', 'XMPPHandlerCollection', 'XmlStream',
+           'XmlStreamFactory', 'XmlStreamServerFactory', 'hashPassword',
+           'toResponse', 'upgradeWithIQResponseTracker']
diff --git a/ThirdParty/Twisted/twisted/words/protocols/jabber/xmpp_stringprep.py b/ThirdParty/Twisted/twisted/words/protocols/jabber/xmpp_stringprep.py
new file mode 100644
index 0000000..7527412
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/protocols/jabber/xmpp_stringprep.py
@@ -0,0 +1,253 @@
+# -*- test-case-name: twisted.words.test.test_jabberxmppstringprep -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import sys, warnings
+from zope.interface import Interface, implements
+
+if sys.version_info < (2,3,2):
+    import re
+
+    class IDNA:
+        dots = re.compile(u"[\u002E\u3002\uFF0E\uFF61]")
+        def nameprep(self, label):
+            return label.lower()
+
+    idna = IDNA()
+
+    crippled = True
+
+    warnings.warn("Accented and non-Western Jabber IDs will not be properly "
+                  "case-folded with this version of Python, resulting in "
+                  "incorrect protocol-level behavior.  It is strongly "
+                  "recommended you upgrade to Python 2.3.2 or newer if you "
+                  "intend to use Twisted's Jabber support.")
+
+else:
+    import stringprep
+    # We require Unicode version 3.2. Python 2.5 and later provide this as
+    # a separate object. Before that the unicodedata module uses 3.2.
+    try:
+        from unicodedata import ucd_3_2_0 as unicodedata
+    except:
+        import unicodedata
+    from encodings import idna
+
+    crippled = False
+
+del sys, warnings
+
+class ILookupTable(Interface):
+    """ Interface for character lookup classes. """
+
+    def lookup(c):
+        """ Return whether character is in this table. """
+
+class IMappingTable(Interface):
+    """ Interface for character mapping classes. """
+
+    def map(c):
+        """ Return mapping for character. """
+
+class LookupTableFromFunction:
+
+    implements(ILookupTable)
+
+    def __init__(self, in_table_function):
+        self.lookup = in_table_function
+
+class LookupTable:
+
+    implements(ILookupTable)
+
+    def __init__(self, table):
+        self._table = table
+
+    def lookup(self, c):
+        return c in self._table
+
+class MappingTableFromFunction:
+
+    implements(IMappingTable)
+
+    def __init__(self, map_table_function):
+        self.map = map_table_function
+
+class EmptyMappingTable:
+
+    implements(IMappingTable)
+
+    def __init__(self, in_table_function):
+        self._in_table_function = in_table_function
+
+    def map(self, c):
+        if self._in_table_function(c):
+            return None
+        else:
+            return c
+
+class Profile:
+    def __init__(self, mappings=[],  normalize=True, prohibiteds=[],
+                       check_unassigneds=True, check_bidi=True):
+        self.mappings = mappings
+        self.normalize = normalize
+        self.prohibiteds = prohibiteds
+        self.do_check_unassigneds = check_unassigneds
+        self.do_check_bidi = check_bidi
+
+    def prepare(self, string):
+        result = self.map(string)
+        if self.normalize:
+            result = unicodedata.normalize("NFKC", result)
+        self.check_prohibiteds(result)
+        if self.do_check_unassigneds:
+            self.check_unassigneds(result)
+        if self.do_check_bidi:
+            self.check_bidirectionals(result)
+        return result
+
+    def map(self, string):
+        result = []
+
+        for c in string:
+            result_c = c
+
+            for mapping in self.mappings:
+                result_c = mapping.map(c)
+                if result_c != c:
+                    break
+
+            if result_c is not None:
+                result.append(result_c)
+
+        return u"".join(result)
+
+    def check_prohibiteds(self, string):
+        for c in string:
+            for table in self.prohibiteds:
+                if table.lookup(c):
+                    raise UnicodeError, "Invalid character %s" % repr(c)
+
+    def check_unassigneds(self, string):
+        for c in string:
+            if stringprep.in_table_a1(c):
+                raise UnicodeError, "Unassigned code point %s" % repr(c)
+
+    def check_bidirectionals(self, string):
+        found_LCat = False
+        found_RandALCat = False
+
+        for c in string:
+            if stringprep.in_table_d1(c):
+                found_RandALCat = True
+            if stringprep.in_table_d2(c):
+                found_LCat = True
+
+        if found_LCat and found_RandALCat:
+            raise UnicodeError, "Violation of BIDI Requirement 2"
+
+        if found_RandALCat and not (stringprep.in_table_d1(string[0]) and
+                                    stringprep.in_table_d1(string[-1])):
+            raise UnicodeError, "Violation of BIDI Requirement 3"
+
+
+class NamePrep:
+    """ Implements preparation of internationalized domain names.
+
+    This class implements preparing internationalized domain names using the
+    rules defined in RFC 3491, section 4 (Conversion operations).
+
+    We do not perform step 4 since we deal with unicode representations of
+    domain names and do not convert from or to ASCII representations using
+    punycode encoding. When such a conversion is needed, the C{idna} standard
+    library provides the C{ToUnicode()} and C{ToASCII()} functions. Note that
+    C{idna} itself assumes UseSTD3ASCIIRules to be false.
+
+    The following steps are performed by C{prepare()}:
+
+      - Split the domain name in labels at the dots (RFC 3490, 3.1)
+      - Apply nameprep proper on each label (RFC 3491)
+      - Enforce the restrictions on ASCII characters in host names by
+        assuming STD3ASCIIRules to be true. (STD 3)
+      - Rejoin the labels using the label separator U+002E (full stop).
+
+    """
+
+    # Prohibited characters.
+    prohibiteds = [unichr(n) for n in range(0x00, 0x2c + 1) +
+                                       range(0x2e, 0x2f + 1) +
+                                       range(0x3a, 0x40 + 1) +
+                                       range(0x5b, 0x60 + 1) +
+                                       range(0x7b, 0x7f + 1) ]
+
+    def prepare(self, string):
+        result = []
+
+        labels = idna.dots.split(string)
+
+        if labels and len(labels[-1]) == 0:
+            trailing_dot = '.'
+            del labels[-1]
+        else:
+            trailing_dot = ''
+
+        for label in labels:
+            result.append(self.nameprep(label))
+
+        return ".".join(result) + trailing_dot
+
+    def check_prohibiteds(self, string):
+        for c in string:
+           if c in self.prohibiteds:
+               raise UnicodeError, "Invalid character %s" % repr(c)
+
+    def nameprep(self, label):
+        label = idna.nameprep(label)
+        self.check_prohibiteds(label)
+        if label[0] == '-':
+            raise UnicodeError, "Invalid leading hyphen-minus"
+        if label[-1] == '-':
+            raise UnicodeError, "Invalid trailing hyphen-minus"
+        return label
+
+if crippled:
+    case_map = MappingTableFromFunction(lambda c: c.lower())
+    nodeprep = Profile(mappings=[case_map],
+                       normalize=False,
+                       prohibiteds=[LookupTable([u' ', u'"', u'&', u"'", u'/',
+                                                 u':', u'<', u'>', u'@'])],
+                       check_unassigneds=False,
+                       check_bidi=False)
+
+    resourceprep = Profile(normalize=False,
+                           check_unassigneds=False,
+                           check_bidi=False)
+
+else:
+    C_11 = LookupTableFromFunction(stringprep.in_table_c11)
+    C_12 = LookupTableFromFunction(stringprep.in_table_c12)
+    C_21 = LookupTableFromFunction(stringprep.in_table_c21)
+    C_22 = LookupTableFromFunction(stringprep.in_table_c22)
+    C_3 = LookupTableFromFunction(stringprep.in_table_c3)
+    C_4 = LookupTableFromFunction(stringprep.in_table_c4)
+    C_5 = LookupTableFromFunction(stringprep.in_table_c5)
+    C_6 = LookupTableFromFunction(stringprep.in_table_c6)
+    C_7 = LookupTableFromFunction(stringprep.in_table_c7)
+    C_8 = LookupTableFromFunction(stringprep.in_table_c8)
+    C_9 = LookupTableFromFunction(stringprep.in_table_c9)
+
+    B_1 = EmptyMappingTable(stringprep.in_table_b1)
+    B_2 = MappingTableFromFunction(stringprep.map_table_b2)
+
+    nodeprep = Profile(mappings=[B_1, B_2],
+                       prohibiteds=[C_11, C_12, C_21, C_22,
+                                    C_3, C_4, C_5, C_6, C_7, C_8, C_9,
+                                    LookupTable([u'"', u'&', u"'", u'/',
+                                                 u':', u'<', u'>', u'@'])])
+
+    resourceprep = Profile(mappings=[B_1,],
+                           prohibiteds=[C_12, C_21, C_22,
+                                        C_3, C_4, C_5, C_6, C_7, C_8, C_9])
+
+nameprep = NamePrep()
diff --git a/ThirdParty/Twisted/twisted/words/protocols/msn.py b/ThirdParty/Twisted/twisted/words/protocols/msn.py
new file mode 100644
index 0000000..79c0fa1
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/protocols/msn.py
@@ -0,0 +1,2479 @@
+# -*- test-case-name: twisted.words.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+MSNP8 Protocol (client only) - semi-experimental
+
+This module provides support for clients using the MSN Protocol (MSNP8).
+There are basically 3 servers involved in any MSN session:
+
+I{Dispatch server}
+
+The DispatchClient class handles connections to the
+dispatch server, which basically delegates users to a
+suitable notification server.
+
+You will want to subclass this and handle the gotNotificationReferral
+method appropriately.
+
+I{Notification Server}
+
+The NotificationClient class handles connections to the
+notification server, which acts as a session server
+(state updates, message negotiation etc...)
+
+I{Switcboard Server}
+
+The SwitchboardClient handles connections to switchboard
+servers which are used to conduct conversations with other users.
+
+There are also two classes (FileSend and FileReceive) used
+for file transfers.
+
+Clients handle events in two ways.
+
+  - each client request requiring a response will return a Deferred,
+    the callback for same will be fired when the server sends the
+    required response
+  - Events which are not in response to any client request have
+    respective methods which should be overridden and handled in
+    an adequate manner
+
+Most client request callbacks require more than one argument,
+and since Deferreds can only pass the callback one result,
+most of the time the callback argument will be a tuple of
+values (documented in the respective request method).
+To make reading/writing code easier, callbacks can be defined in
+a number of ways to handle this 'cleanly'. One way would be to
+define methods like: def callBack(self, (arg1, arg2, arg)): ...
+another way would be to do something like:
+d.addCallback(lambda result: myCallback(*result)).
+
+If the server sends an error response to a client request,
+the errback of the corresponding Deferred will be called,
+the argument being the corresponding error code.
+
+B{NOTE}:
+Due to the lack of an official spec for MSNP8, extra checking
+than may be deemed necessary often takes place considering the
+server is never 'wrong'. Thus, if gotBadLine (in any of the 3
+main clients) is called, or an MSNProtocolError is raised, it's
+probably a good idea to submit a bug report. ;)
+Use of this module requires that PyOpenSSL is installed.
+
+TODO
+====
+- check message hooks with invalid x-msgsinvite messages.
+- font handling
+- switchboard factory
+
+ at author: Sam Jordan
+"""
+
+import types, operator, os
+from random import randint
+from urllib import quote, unquote
+
+from twisted.python import failure, log
+from twisted.python.hashlib import md5
+from twisted.internet import reactor
+from twisted.internet.defer import Deferred, execute
+from twisted.internet.protocol import ClientFactory
+try:
+    from twisted.internet.ssl import ClientContextFactory
+except ImportError:
+    ClientContextFactory = None
+from twisted.protocols.basic import LineReceiver
+from twisted.web.http import HTTPClient
+
+
+MSN_PROTOCOL_VERSION = "MSNP8 CVR0"       # protocol version
+MSN_PORT             = 1863               # default dispatch server port
+MSN_MAX_MESSAGE      = 1664               # max message length
+MSN_CHALLENGE_STR    = "Q1P7W2E4J9R8U3S5" # used for server challenges
+MSN_CVR_STR          = "0x0409 win 4.10 i386 MSNMSGR 5.0.0544 MSMSGS" # :(
+
+# auth constants
+LOGIN_SUCCESS  = 1
+LOGIN_FAILURE  = 2
+LOGIN_REDIRECT = 3
+
+# list constants
+FORWARD_LIST = 1
+ALLOW_LIST   = 2
+BLOCK_LIST   = 4
+REVERSE_LIST = 8
+
+# phone constants
+HOME_PHONE   = "PHH"
+WORK_PHONE   = "PHW"
+MOBILE_PHONE = "PHM"
+HAS_PAGER    = "MOB"
+
+# status constants
+STATUS_ONLINE  = 'NLN'
+STATUS_OFFLINE = 'FLN'
+STATUS_HIDDEN  = 'HDN'
+STATUS_IDLE    = 'IDL'
+STATUS_AWAY    = 'AWY'
+STATUS_BUSY    = 'BSY'
+STATUS_BRB     = 'BRB'
+STATUS_PHONE   = 'PHN'
+STATUS_LUNCH   = 'LUN'
+
+CR = "\r"
+LF = "\n"
+
+
+class SSLRequired(Exception):
+    """
+    This exception is raised when it is necessary to talk to a passport server
+    using SSL, but the necessary SSL dependencies are unavailable.
+
+    @since: 11.0
+    """
+
+
+
+def checkParamLen(num, expected, cmd, error=None):
+    if error == None:
+        error = "Invalid Number of Parameters for %s" % cmd
+    if num != expected:
+        raise MSNProtocolError, error
+
+def _parseHeader(h, v):
+    """
+    Split a certin number of known
+    header values with the format:
+    field1=val,field2=val,field3=val into
+    a dict mapping fields to values.
+    @param h: the header's key
+    @param v: the header's value as a string
+    """
+
+    if h in ('passporturls','authentication-info','www-authenticate'):
+        v = v.replace('Passport1.4','').lstrip()
+        fields = {}
+        for fieldPair in v.split(','):
+            try:
+                field,value = fieldPair.split('=',1)
+                fields[field.lower()] = value
+            except ValueError:
+                fields[field.lower()] = ''
+        return fields
+    else:
+        return v
+
+def _parsePrimitiveHost(host):
+    # Ho Ho Ho
+    h,p = host.replace('https://','').split('/',1)
+    p = '/' + p
+    return h,p
+
+
+def _login(userHandle, passwd, nexusServer, cached=0, authData=''):
+    """
+    This function is used internally and should not ever be called
+    directly.
+
+    @raise SSLRequired: If there is no SSL support available.
+    """
+    if ClientContextFactory is None:
+        raise SSLRequired(
+            'Connecting to the Passport server requires SSL, but SSL is '
+            'unavailable.')
+
+    cb = Deferred()
+    def _cb(server, auth):
+        loginFac = ClientFactory()
+        loginFac.protocol = lambda : PassportLogin(cb, userHandle, passwd, server, auth)
+        reactor.connectSSL(_parsePrimitiveHost(server)[0], 443, loginFac, ClientContextFactory())
+
+    if cached:
+        _cb(nexusServer, authData)
+    else:
+        fac = ClientFactory()
+        d = Deferred()
+        d.addCallbacks(_cb, callbackArgs=(authData,))
+        d.addErrback(lambda f: cb.errback(f))
+        fac.protocol = lambda : PassportNexus(d, nexusServer)
+        reactor.connectSSL(_parsePrimitiveHost(nexusServer)[0], 443, fac, ClientContextFactory())
+    return cb
+
+
+class PassportNexus(HTTPClient):
+
+    """
+    Used to obtain the URL of a valid passport
+    login HTTPS server.
+
+    This class is used internally and should
+    not be instantiated directly -- that is,
+    The passport logging in process is handled
+    transparantly by NotificationClient.
+    """
+
+    def __init__(self, deferred, host):
+        self.deferred = deferred
+        self.host, self.path = _parsePrimitiveHost(host)
+
+    def connectionMade(self):
+        HTTPClient.connectionMade(self)
+        self.sendCommand('GET', self.path)
+        self.sendHeader('Host', self.host)
+        self.endHeaders()
+        self.headers = {}
+
+    def handleHeader(self, header, value):
+        h = header.lower()
+        self.headers[h] = _parseHeader(h, value)
+
+    def handleEndHeaders(self):
+        if self.connected:
+            self.transport.loseConnection()
+        if 'passporturls' not in self.headers or 'dalogin' not in self.headers['passporturls']:
+            self.deferred.errback(failure.Failure(failure.DefaultException("Invalid Nexus Reply")))
+        self.deferred.callback('https://' + self.headers['passporturls']['dalogin'])
+
+    def handleResponse(self, r):
+        pass
+
+class PassportLogin(HTTPClient):
+    """
+    This class is used internally to obtain
+    a login ticket from a passport HTTPS
+    server -- it should not be used directly.
+    """
+
+    _finished = 0
+
+    def __init__(self, deferred, userHandle, passwd, host, authData):
+        self.deferred = deferred
+        self.userHandle = userHandle
+        self.passwd = passwd
+        self.authData = authData
+        self.host, self.path = _parsePrimitiveHost(host)
+
+    def connectionMade(self):
+        self.sendCommand('GET', self.path)
+        self.sendHeader('Authorization', 'Passport1.4 OrgVerb=GET,OrgURL=http://messenger.msn.com,' +
+                                         'sign-in=%s,pwd=%s,%s' % (quote(self.userHandle), self.passwd,self.authData))
+        self.sendHeader('Host', self.host)
+        self.endHeaders()
+        self.headers = {}
+
+    def handleHeader(self, header, value):
+        h = header.lower()
+        self.headers[h] = _parseHeader(h, value)
+
+    def handleEndHeaders(self):
+        if self._finished:
+            return
+        self._finished = 1 # I think we need this because of HTTPClient
+        if self.connected:
+            self.transport.loseConnection()
+        authHeader = 'authentication-info'
+        _interHeader = 'www-authenticate'
+        if _interHeader in self.headers:
+            authHeader = _interHeader
+        try:
+            info = self.headers[authHeader]
+            status = info['da-status']
+            handler = getattr(self, 'login_%s' % (status,), None)
+            if handler:
+                handler(info)
+            else:
+                raise Exception()
+        except Exception, e:
+            self.deferred.errback(failure.Failure(e))
+
+    def handleResponse(self, r):
+        pass
+
+    def login_success(self, info):
+        ticket = info['from-pp']
+        ticket = ticket[1:len(ticket)-1]
+        self.deferred.callback((LOGIN_SUCCESS, ticket))
+
+    def login_failed(self, info):
+        self.deferred.callback((LOGIN_FAILURE, unquote(info['cbtxt'])))
+
+    def login_redir(self, info):
+        self.deferred.callback((LOGIN_REDIRECT, self.headers['location'], self.authData))
+
+
+class MSNProtocolError(Exception):
+    """
+    This Exception is basically used for debugging
+    purposes, as the official MSN server should never
+    send anything _wrong_ and nobody in their right
+    mind would run their B{own} MSN server.
+    If it is raised by default command handlers
+    (handle_BLAH) the error will be logged.
+    """
+    pass
+
+
+class MSNCommandFailed(Exception):
+    """
+    The server said that the command failed.
+    """
+
+    def __init__(self, errorCode):
+        self.errorCode = errorCode
+
+    def __str__(self):
+        return ("Command failed: %s (error code %d)"
+                % (errorCodes[self.errorCode], self.errorCode))
+
+
+class MSNMessage:
+    """
+    I am the class used to represent an 'instant' message.
+
+    @ivar userHandle: The user handle (passport) of the sender
+                      (this is only used when receiving a message)
+    @ivar screenName: The screen name of the sender (this is only used
+                      when receiving a message)
+    @ivar message: The message
+    @ivar headers: The message headers
+    @type headers: dict
+    @ivar length: The message length (including headers and line endings)
+    @ivar ack: This variable is used to tell the server how to respond
+               once the message has been sent. If set to MESSAGE_ACK
+               (default) the server will respond with an ACK upon receiving
+               the message, if set to MESSAGE_NACK the server will respond
+               with a NACK upon failure to receive the message.
+               If set to MESSAGE_ACK_NONE the server will do nothing.
+               This is relevant for the return value of
+               SwitchboardClient.sendMessage (which will return
+               a Deferred if ack is set to either MESSAGE_ACK or MESSAGE_NACK
+               and will fire when the respective ACK or NACK is received).
+               If set to MESSAGE_ACK_NONE sendMessage will return None.
+    """
+    MESSAGE_ACK      = 'A'
+    MESSAGE_NACK     = 'N'
+    MESSAGE_ACK_NONE = 'U'
+
+    ack = MESSAGE_ACK
+
+    def __init__(self, length=0, userHandle="", screenName="", message=""):
+        self.userHandle = userHandle
+        self.screenName = screenName
+        self.message = message
+        self.headers = {'MIME-Version' : '1.0', 'Content-Type' : 'text/plain'}
+        self.length = length
+        self.readPos = 0
+
+    def _calcMessageLen(self):
+        """
+        used to calculte the number to send
+        as the message length when sending a message.
+        """
+        return reduce(operator.add, [len(x[0]) + len(x[1]) + 4  for x in self.headers.items()]) + len(self.message) + 2
+
+    def setHeader(self, header, value):
+        """ set the desired header """
+        self.headers[header] = value
+
+    def getHeader(self, header):
+        """
+        get the desired header value
+        @raise KeyError: if no such header exists.
+        """
+        return self.headers[header]
+
+    def hasHeader(self, header):
+        """ check to see if the desired header exists """
+        return header in self.headers
+
+    def getMessage(self):
+        """ return the message - not including headers """
+        return self.message
+
+    def setMessage(self, message):
+        """ set the message text """
+        self.message = message
+
+class MSNContact:
+
+    """
+    This class represents a contact (user).
+
+    @ivar userHandle: The contact's user handle (passport).
+    @ivar screenName: The contact's screen name.
+    @ivar groups: A list of all the group IDs which this
+                  contact belongs to.
+    @ivar lists: An integer representing the sum of all lists
+                 that this contact belongs to.
+    @ivar status: The contact's status code.
+    @type status: str if contact's status is known, None otherwise.
+
+    @ivar homePhone: The contact's home phone number.
+    @type homePhone: str if known, otherwise None.
+    @ivar workPhone: The contact's work phone number.
+    @type workPhone: str if known, otherwise None.
+    @ivar mobilePhone: The contact's mobile phone number.
+    @type mobilePhone: str if known, otherwise None.
+    @ivar hasPager: Whether or not this user has a mobile pager
+                    (true=yes, false=no)
+    """
+
+    def __init__(self, userHandle="", screenName="", lists=0, groups=[], status=None):
+        self.userHandle = userHandle
+        self.screenName = screenName
+        self.lists = lists
+        self.groups = [] # if applicable
+        self.status = status # current status
+
+        # phone details
+        self.homePhone   = None
+        self.workPhone   = None
+        self.mobilePhone = None
+        self.hasPager    = None
+
+    def setPhone(self, phoneType, value):
+        """
+        set phone numbers/values for this specific user.
+        for phoneType check the *_PHONE constants and HAS_PAGER
+        """
+
+        t = phoneType.upper()
+        if t == HOME_PHONE:
+            self.homePhone = value
+        elif t == WORK_PHONE:
+            self.workPhone = value
+        elif t == MOBILE_PHONE:
+            self.mobilePhone = value
+        elif t == HAS_PAGER:
+            self.hasPager = value
+        else:
+            raise ValueError, "Invalid Phone Type"
+
+    def addToList(self, listType):
+        """
+        Update the lists attribute to
+        reflect being part of the
+        given list.
+        """
+        self.lists |= listType
+
+    def removeFromList(self, listType):
+        """
+        Update the lists attribute to
+        reflect being removed from the
+        given list.
+        """
+        self.lists ^= listType
+
+class MSNContactList:
+    """
+    This class represents a basic MSN contact list.
+
+    @ivar contacts: All contacts on my various lists
+    @type contacts: dict (mapping user handles to MSNContact objects)
+    @ivar version: The current contact list version (used for list syncing)
+    @ivar groups: a mapping of group ids to group names
+                  (groups can only exist on the forward list)
+    @type groups: dict
+
+    B{Note}:
+    This is used only for storage and doesn't effect the
+    server's contact list.
+    """
+
+    def __init__(self):
+        self.contacts = {}
+        self.version = 0
+        self.groups = {}
+        self.autoAdd = 0
+        self.privacy = 0
+
+    def _getContactsFromList(self, listType):
+        """
+        Obtain all contacts which belong
+        to the given list type.
+        """
+        return dict([(uH,obj) for uH,obj in self.contacts.items() if obj.lists & listType])
+
+    def addContact(self, contact):
+        """
+        Add a contact
+        """
+        self.contacts[contact.userHandle] = contact
+
+    def remContact(self, userHandle):
+        """
+        Remove a contact
+        """
+        try:
+            del self.contacts[userHandle]
+        except KeyError:
+            pass
+
+    def getContact(self, userHandle):
+        """
+        Obtain the MSNContact object
+        associated with the given
+        userHandle.
+        @return: the MSNContact object if
+                 the user exists, or None.
+        """
+        try:
+            return self.contacts[userHandle]
+        except KeyError:
+            return None
+
+    def getBlockedContacts(self):
+        """
+        Obtain all the contacts on my block list
+        """
+        return self._getContactsFromList(BLOCK_LIST)
+
+    def getAuthorizedContacts(self):
+        """
+        Obtain all the contacts on my auth list.
+        (These are contacts which I have verified
+        can view my state changes).
+        """
+        return self._getContactsFromList(ALLOW_LIST)
+
+    def getReverseContacts(self):
+        """
+        Get all contacts on my reverse list.
+        (These are contacts which have added me
+        to their forward list).
+        """
+        return self._getContactsFromList(REVERSE_LIST)
+
+    def getContacts(self):
+        """
+        Get all contacts on my forward list.
+        (These are the contacts which I have added
+        to my list).
+        """
+        return self._getContactsFromList(FORWARD_LIST)
+
+    def setGroup(self, id, name):
+        """
+        Keep a mapping from the given id
+        to the given name.
+        """
+        self.groups[id] = name
+
+    def remGroup(self, id):
+        """
+        Removed the stored group
+        mapping for the given id.
+        """
+        try:
+            del self.groups[id]
+        except KeyError:
+            pass
+        for c in self.contacts:
+            if id in c.groups:
+                c.groups.remove(id)
+
+
+class MSNEventBase(LineReceiver):
+    """
+    This class provides support for handling / dispatching events and is the
+    base class of the three main client protocols (DispatchClient,
+    NotificationClient, SwitchboardClient)
+    """
+
+    def __init__(self):
+        self.ids = {} # mapping of ids to Deferreds
+        self.currentID = 0
+        self.connected = 0
+        self.setLineMode()
+        self.currentMessage = None
+
+    def connectionLost(self, reason):
+        self.ids = {}
+        self.connected = 0
+
+    def connectionMade(self):
+        self.connected = 1
+
+    def _fireCallback(self, id, *args):
+        """
+        Fire the callback for the given id
+        if one exists and return 1, else return false
+        """
+        if id in self.ids:
+            self.ids[id][0].callback(args)
+            del self.ids[id]
+            return 1
+        return 0
+
+    def _nextTransactionID(self):
+        """ return a usable transaction ID """
+        self.currentID += 1
+        if self.currentID > 1000:
+            self.currentID = 1
+        return self.currentID
+
+    def _createIDMapping(self, data=None):
+        """
+        return a unique transaction ID that is mapped internally to a
+        deferred .. also store arbitrary data if it is needed
+        """
+        id = self._nextTransactionID()
+        d = Deferred()
+        self.ids[id] = (d, data)
+        return (id, d)
+
+    def checkMessage(self, message):
+        """
+        process received messages to check for file invitations and
+        typing notifications and other control type messages
+        """
+        raise NotImplementedError
+
+    def lineReceived(self, line):
+        if self.currentMessage:
+            self.currentMessage.readPos += len(line+CR+LF)
+            if line == "":
+                self.setRawMode()
+                if self.currentMessage.readPos == self.currentMessage.length:
+                    self.rawDataReceived("") # :(
+                return
+            try:
+                header, value = line.split(':')
+            except ValueError:
+                raise MSNProtocolError, "Invalid Message Header"
+            self.currentMessage.setHeader(header, unquote(value).lstrip())
+            return
+        try:
+            cmd, params = line.split(' ', 1)
+        except ValueError:
+            raise MSNProtocolError, "Invalid Message, %s" % repr(line)
+
+        if len(cmd) != 3:
+            raise MSNProtocolError, "Invalid Command, %s" % repr(cmd)
+        if cmd.isdigit():
+            errorCode = int(cmd)
+            id = int(params.split()[0])
+            if id in self.ids:
+                self.ids[id][0].errback(MSNCommandFailed(errorCode))
+                del self.ids[id]
+                return
+            else:       # we received an error which doesn't map to a sent command
+                self.gotError(errorCode)
+                return
+
+        handler = getattr(self, "handle_%s" % cmd.upper(), None)
+        if handler:
+            try:
+                handler(params.split())
+            except MSNProtocolError, why:
+                self.gotBadLine(line, why)
+        else:
+            self.handle_UNKNOWN(cmd, params.split())
+
+    def rawDataReceived(self, data):
+        extra = ""
+        self.currentMessage.readPos += len(data)
+        diff = self.currentMessage.readPos - self.currentMessage.length
+        if diff > 0:
+            self.currentMessage.message += data[:-diff]
+            extra = data[-diff:]
+        elif diff == 0:
+            self.currentMessage.message += data
+        else:
+            self.currentMessage += data
+            return
+        del self.currentMessage.readPos
+        m = self.currentMessage
+        self.currentMessage = None
+        self.setLineMode(extra)
+        if not self.checkMessage(m):
+            return
+        self.gotMessage(m)
+
+    ### protocol command handlers - no need to override these.
+
+    def handle_MSG(self, params):
+        checkParamLen(len(params), 3, 'MSG')
+        try:
+            messageLen = int(params[2])
+        except ValueError:
+            raise MSNProtocolError, "Invalid Parameter for MSG length argument"
+        self.currentMessage = MSNMessage(length=messageLen, userHandle=params[0], screenName=unquote(params[1]))
+
+    def handle_UNKNOWN(self, cmd, params):
+        """ implement me in subclasses if you want to handle unknown events """
+        log.msg("Received unknown command (%s), params: %s" % (cmd, params))
+
+    ### callbacks
+
+    def gotMessage(self, message):
+        """
+        called when we receive a message - override in notification
+        and switchboard clients
+        """
+        raise NotImplementedError
+
+    def gotBadLine(self, line, why):
+        """ called when a handler notifies me that this line is broken """
+        log.msg('Error in line: %s (%s)' % (line, why))
+
+    def gotError(self, errorCode):
+        """
+        called when the server sends an error which is not in
+        response to a sent command (ie. it has no matching transaction ID)
+        """
+        log.msg('Error %s' % (errorCodes[errorCode]))
+
+
+
+class DispatchClient(MSNEventBase):
+    """
+    This class provides support for clients connecting to the dispatch server
+    @ivar userHandle: your user handle (passport) needed before connecting.
+    """
+
+    # eventually this may become an attribute of the
+    # factory.
+    userHandle = ""
+
+    def connectionMade(self):
+        MSNEventBase.connectionMade(self)
+        self.sendLine('VER %s %s' % (self._nextTransactionID(), MSN_PROTOCOL_VERSION))
+
+    ### protocol command handlers ( there is no need to override these )
+
+    def handle_VER(self, params):
+        id = self._nextTransactionID()
+        self.sendLine("CVR %s %s %s" % (id, MSN_CVR_STR, self.userHandle))
+
+    def handle_CVR(self, params):
+        self.sendLine("USR %s TWN I %s" % (self._nextTransactionID(), self.userHandle))
+
+    def handle_XFR(self, params):
+        if len(params) < 4:
+            raise MSNProtocolError, "Invalid number of parameters for XFR"
+        id, refType, addr = params[:3]
+        # was addr a host:port pair?
+        try:
+            host, port = addr.split(':')
+        except ValueError:
+            host = addr
+            port = MSN_PORT
+        if refType == "NS":
+            self.gotNotificationReferral(host, int(port))
+
+    ### callbacks
+
+    def gotNotificationReferral(self, host, port):
+        """
+        called when we get a referral to the notification server.
+
+        @param host: the notification server's hostname
+        @param port: the port to connect to
+        """
+        pass
+
+
+class NotificationClient(MSNEventBase):
+    """
+    This class provides support for clients connecting
+    to the notification server.
+    """
+
+    factory = None # sssh pychecker
+
+    def __init__(self, currentID=0):
+        MSNEventBase.__init__(self)
+        self.currentID = currentID
+        self._state = ['DISCONNECTED', {}]
+
+    def _setState(self, state):
+        self._state[0] = state
+
+    def _getState(self):
+        return self._state[0]
+
+    def _getStateData(self, key):
+        return self._state[1][key]
+
+    def _setStateData(self, key, value):
+        self._state[1][key] = value
+
+    def _remStateData(self, *args):
+        for key in args:
+            del self._state[1][key]
+
+    def connectionMade(self):
+        MSNEventBase.connectionMade(self)
+        self._setState('CONNECTED')
+        self.sendLine("VER %s %s" % (self._nextTransactionID(), MSN_PROTOCOL_VERSION))
+
+    def connectionLost(self, reason):
+        self._setState('DISCONNECTED')
+        self._state[1] = {}
+        MSNEventBase.connectionLost(self, reason)
+
+    def checkMessage(self, message):
+        """ hook used for detecting specific notification messages """
+        cTypes = [s.lstrip() for s in message.getHeader('Content-Type').split(';')]
+        if 'text/x-msmsgsprofile' in cTypes:
+            self.gotProfile(message)
+            return 0
+        return 1
+
+    ### protocol command handlers - no need to override these
+
+    def handle_VER(self, params):
+        id = self._nextTransactionID()
+        self.sendLine("CVR %s %s %s" % (id, MSN_CVR_STR, self.factory.userHandle))
+
+    def handle_CVR(self, params):
+        self.sendLine("USR %s TWN I %s" % (self._nextTransactionID(), self.factory.userHandle))
+
+    def handle_USR(self, params):
+        if len(params) != 4 and len(params) != 6:
+            raise MSNProtocolError, "Invalid Number of Parameters for USR"
+
+        mechanism = params[1]
+        if mechanism == "OK":
+            self.loggedIn(params[2], unquote(params[3]), int(params[4]))
+        elif params[2].upper() == "S":
+            # we need to obtain auth from a passport server
+            f = self.factory
+            d = execute(
+                _login, f.userHandle, f.password, f.passportServer,
+                authData=params[3])
+            d.addCallback(self._passportLogin)
+            d.addErrback(self._passportError)
+
+    def _passportLogin(self, result):
+        if result[0] == LOGIN_REDIRECT:
+            d = _login(self.factory.userHandle, self.factory.password,
+                       result[1], cached=1, authData=result[2])
+            d.addCallback(self._passportLogin)
+            d.addErrback(self._passportError)
+        elif result[0] == LOGIN_SUCCESS:
+            self.sendLine("USR %s TWN S %s" % (self._nextTransactionID(), result[1]))
+        elif result[0] == LOGIN_FAILURE:
+            self.loginFailure(result[1])
+
+
+    def _passportError(self, failure):
+        """
+        Handle a problem logging in via the Passport server, passing on the
+        error as a string message to the C{loginFailure} callback.
+        """
+        if failure.check(SSLRequired):
+            failure = failure.getErrorMessage()
+        self.loginFailure("Exception while authenticating: %s" % failure)
+
+
+    def handle_CHG(self, params):
+        checkParamLen(len(params), 3, 'CHG')
+        id = int(params[0])
+        if not self._fireCallback(id, params[1]):
+            self.statusChanged(params[1])
+
+    def handle_ILN(self, params):
+        checkParamLen(len(params), 5, 'ILN')
+        self.gotContactStatus(params[1], params[2], unquote(params[3]))
+
+    def handle_CHL(self, params):
+        checkParamLen(len(params), 2, 'CHL')
+        self.sendLine("QRY %s msmsgs at msnmsgr.com 32" % self._nextTransactionID())
+        self.transport.write(md5(params[1] + MSN_CHALLENGE_STR).hexdigest())
+
+    def handle_QRY(self, params):
+        pass
+
+    def handle_NLN(self, params):
+        checkParamLen(len(params), 4, 'NLN')
+        self.contactStatusChanged(params[0], params[1], unquote(params[2]))
+
+    def handle_FLN(self, params):
+        checkParamLen(len(params), 1, 'FLN')
+        self.contactOffline(params[0])
+
+    def handle_LST(self, params):
+        # support no longer exists for manually
+        # requesting lists - why do I feel cleaner now?
+        if self._getState() != 'SYNC':
+            return
+        contact = MSNContact(userHandle=params[0], screenName=unquote(params[1]),
+                             lists=int(params[2]))
+        if contact.lists & FORWARD_LIST:
+            contact.groups.extend(map(int, params[3].split(',')))
+        self._getStateData('list').addContact(contact)
+        self._setStateData('last_contact', contact)
+        sofar = self._getStateData('lst_sofar') + 1
+        if sofar == self._getStateData('lst_reply'):
+            # this is the best place to determine that
+            # a syn realy has finished - msn _may_ send
+            # BPR information for the last contact
+            # which is unfortunate because it means
+            # that the real end of a syn is non-deterministic.
+            # to handle this we'll keep 'last_contact' hanging
+            # around in the state data and update it if we need
+            # to later.
+            self._setState('SESSION')
+            contacts = self._getStateData('list')
+            phone = self._getStateData('phone')
+            id = self._getStateData('synid')
+            self._remStateData('lst_reply', 'lsg_reply', 'lst_sofar', 'phone', 'synid', 'list')
+            self._fireCallback(id, contacts, phone)
+        else:
+            self._setStateData('lst_sofar',sofar)
+
+    def handle_BLP(self, params):
+        # check to see if this is in response to a SYN
+        if self._getState() == 'SYNC':
+            self._getStateData('list').privacy = listCodeToID[params[0].lower()]
+        else:
+            id = int(params[0])
+            self._fireCallback(id, int(params[1]), listCodeToID[params[2].lower()])
+
+    def handle_GTC(self, params):
+        # check to see if this is in response to a SYN
+        if self._getState() == 'SYNC':
+            if params[0].lower() == "a":
+                self._getStateData('list').autoAdd = 0
+            elif params[0].lower() == "n":
+                self._getStateData('list').autoAdd = 1
+            else:
+                raise MSNProtocolError, "Invalid Paramater for GTC" # debug
+        else:
+            id = int(params[0])
+            if params[1].lower() == "a":
+                self._fireCallback(id, 0)
+            elif params[1].lower() == "n":
+                self._fireCallback(id, 1)
+            else:
+                raise MSNProtocolError, "Invalid Paramater for GTC" # debug
+
+    def handle_SYN(self, params):
+        id = int(params[0])
+        if len(params) == 2:
+            self._setState('SESSION')
+            self._fireCallback(id, None, None)
+        else:
+            contacts = MSNContactList()
+            contacts.version = int(params[1])
+            self._setStateData('list', contacts)
+            self._setStateData('lst_reply', int(params[2]))
+            self._setStateData('lsg_reply', int(params[3]))
+            self._setStateData('lst_sofar', 0)
+            self._setStateData('phone', [])
+
+    def handle_LSG(self, params):
+        if self._getState() == 'SYNC':
+            self._getStateData('list').groups[int(params[0])] = unquote(params[1])
+
+        # Please see the comment above the requestListGroups / requestList methods
+        # regarding support for this
+        #
+        #else:
+        #    self._getStateData('groups').append((int(params[4]), unquote(params[5])))
+        #    if params[3] == params[4]: # this was the last group
+        #        self._fireCallback(int(params[0]), self._getStateData('groups'), int(params[1]))
+        #        self._remStateData('groups')
+
+    def handle_PRP(self, params):
+        if self._getState() == 'SYNC':
+            self._getStateData('phone').append((params[0], unquote(params[1])))
+        else:
+            self._fireCallback(int(params[0]), int(params[1]), unquote(params[3]))
+
+    def handle_BPR(self, params):
+        numParams = len(params)
+        if numParams == 2: # part of a syn
+            self._getStateData('last_contact').setPhone(params[0], unquote(params[1]))
+        elif numParams == 4:
+            self.gotPhoneNumber(int(params[0]), params[1], params[2], unquote(params[3]))
+
+    def handle_ADG(self, params):
+        checkParamLen(len(params), 5, 'ADG')
+        id = int(params[0])
+        if not self._fireCallback(id, int(params[1]), unquote(params[2]), int(params[3])):
+            raise MSNProtocolError, "ADG response does not match up to a request" # debug
+
+    def handle_RMG(self, params):
+        checkParamLen(len(params), 3, 'RMG')
+        id = int(params[0])
+        if not self._fireCallback(id, int(params[1]), int(params[2])):
+            raise MSNProtocolError, "RMG response does not match up to a request" # debug
+
+    def handle_REG(self, params):
+        checkParamLen(len(params), 5, 'REG')
+        id = int(params[0])
+        if not self._fireCallback(id, int(params[1]), int(params[2]), unquote(params[3])):
+            raise MSNProtocolError, "REG response does not match up to a request" # debug
+
+    def handle_ADD(self, params):
+        numParams = len(params)
+        if numParams < 5 or params[1].upper() not in ('AL','BL','RL','FL'):
+            raise MSNProtocolError, "Invalid Paramaters for ADD" # debug
+        id = int(params[0])
+        listType = params[1].lower()
+        listVer = int(params[2])
+        userHandle = params[3]
+        groupID = None
+        if numParams == 6: # they sent a group id
+            if params[1].upper() != "FL":
+                raise MSNProtocolError, "Only forward list can contain groups" # debug
+            groupID = int(params[5])
+        if not self._fireCallback(id, listCodeToID[listType], userHandle, listVer, groupID):
+            self.userAddedMe(userHandle, unquote(params[4]), listVer)
+
+    def handle_REM(self, params):
+        numParams = len(params)
+        if numParams < 4 or params[1].upper() not in ('AL','BL','FL','RL'):
+            raise MSNProtocolError, "Invalid Paramaters for REM" # debug
+        id = int(params[0])
+        listType = params[1].lower()
+        listVer = int(params[2])
+        userHandle = params[3]
+        groupID = None
+        if numParams == 5:
+            if params[1] != "FL":
+                raise MSNProtocolError, "Only forward list can contain groups" # debug
+            groupID = int(params[4])
+        if not self._fireCallback(id, listCodeToID[listType], userHandle, listVer, groupID):
+            if listType.upper() == "RL":
+                self.userRemovedMe(userHandle, listVer)
+
+    def handle_REA(self, params):
+        checkParamLen(len(params), 4, 'REA')
+        id = int(params[0])
+        self._fireCallback(id, int(params[1]), unquote(params[3]))
+
+    def handle_XFR(self, params):
+        checkParamLen(len(params), 5, 'XFR')
+        id = int(params[0])
+        # check to see if they sent a host/port pair
+        try:
+            host, port = params[2].split(':')
+        except ValueError:
+            host = params[2]
+            port = MSN_PORT
+
+        if not self._fireCallback(id, host, int(port), params[4]):
+            raise MSNProtocolError, "Got XFR (referral) that I didn't ask for .. should this happen?" # debug
+
+    def handle_RNG(self, params):
+        checkParamLen(len(params), 6, 'RNG')
+        # check for host:port pair
+        try:
+            host, port = params[1].split(":")
+            port = int(port)
+        except ValueError:
+            host = params[1]
+            port = MSN_PORT
+        self.gotSwitchboardInvitation(int(params[0]), host, port, params[3], params[4],
+                                      unquote(params[5]))
+
+    def handle_OUT(self, params):
+        checkParamLen(len(params), 1, 'OUT')
+        if params[0] == "OTH":
+            self.multipleLogin()
+        elif params[0] == "SSD":
+            self.serverGoingDown()
+        else:
+            raise MSNProtocolError, "Invalid Parameters received for OUT" # debug
+
+    # callbacks
+
+    def loggedIn(self, userHandle, screenName, verified):
+        """
+        Called when the client has logged in.
+        The default behaviour of this method is to
+        update the factory with our screenName and
+        to sync the contact list (factory.contacts).
+        When this is complete self.listSynchronized
+        will be called.
+
+        @param userHandle: our userHandle
+        @param screenName: our screenName
+        @param verified: 1 if our passport has been (verified), 0 if not.
+                         (i'm not sure of the significace of this)
+        @type verified: int
+        """
+        self.factory.screenName = screenName
+        if not self.factory.contacts:
+            listVersion = 0
+        else:
+            listVersion = self.factory.contacts.version
+        self.syncList(listVersion).addCallback(self.listSynchronized)
+
+
+    def loginFailure(self, message):
+        """
+        Called when the client fails to login.
+
+        @param message: a message indicating the problem that was encountered
+        """
+
+
+    def gotProfile(self, message):
+        """
+        Called after logging in when the server sends an initial
+        message with MSN/passport specific profile information
+        such as country, number of kids, etc.
+        Check the message headers for the specific values.
+
+        @param message: The profile message
+        """
+        pass
+
+    def listSynchronized(self, *args):
+        """
+        Lists are now synchronized by default upon logging in, this
+        method is called after the synchronization has finished
+        and the factory now has the up-to-date contacts.
+        """
+        pass
+
+    def statusChanged(self, statusCode):
+        """
+        Called when our status changes and it isn't in response to
+        a client command. By default we will update the status
+        attribute of the factory.
+
+        @param statusCode: 3-letter status code
+        """
+        self.factory.status = statusCode
+
+    def gotContactStatus(self, statusCode, userHandle, screenName):
+        """
+        Called after loggin in when the server sends status of online contacts.
+        By default we will update the status attribute of the contact stored
+        on the factory.
+
+        @param statusCode: 3-letter status code
+        @param userHandle: the contact's user handle (passport)
+        @param screenName: the contact's screen name
+        """
+        self.factory.contacts.getContact(userHandle).status = statusCode
+
+    def contactStatusChanged(self, statusCode, userHandle, screenName):
+        """
+        Called when we're notified that a contact's status has changed.
+        By default we will update the status attribute of the contact
+        stored on the factory.
+
+        @param statusCode: 3-letter status code
+        @param userHandle: the contact's user handle (passport)
+        @param screenName: the contact's screen name
+        """
+        self.factory.contacts.getContact(userHandle).status = statusCode
+
+    def contactOffline(self, userHandle):
+        """
+        Called when a contact goes offline. By default this method
+        will update the status attribute of the contact stored
+        on the factory.
+
+        @param userHandle: the contact's user handle
+        """
+        self.factory.contacts.getContact(userHandle).status = STATUS_OFFLINE
+
+    def gotPhoneNumber(self, listVersion, userHandle, phoneType, number):
+        """
+        Called when the server sends us phone details about
+        a specific user (for example after a user is added
+        the server will send their status, phone details etc.
+        By default we will update the list version for the
+        factory's contact list and update the phone details
+        for the specific user.
+
+        @param listVersion: the new list version
+        @param userHandle: the contact's user handle (passport)
+        @param phoneType: the specific phoneType
+                          (*_PHONE constants or HAS_PAGER)
+        @param number: the value/phone number.
+        """
+        self.factory.contacts.version = listVersion
+        self.factory.contacts.getContact(userHandle).setPhone(phoneType, number)
+
+    def userAddedMe(self, userHandle, screenName, listVersion):
+        """
+        Called when a user adds me to their list. (ie. they have been added to
+        the reverse list. By default this method will update the version of
+        the factory's contact list -- that is, if the contact already exists
+        it will update the associated lists attribute, otherwise it will create
+        a new MSNContact object and store it.
+
+        @param userHandle: the userHandle of the user
+        @param screenName: the screen name of the user
+        @param listVersion: the new list version
+        @type listVersion: int
+        """
+        self.factory.contacts.version = listVersion
+        c = self.factory.contacts.getContact(userHandle)
+        if not c:
+            c = MSNContact(userHandle=userHandle, screenName=screenName)
+            self.factory.contacts.addContact(c)
+        c.addToList(REVERSE_LIST)
+
+    def userRemovedMe(self, userHandle, listVersion):
+        """
+        Called when a user removes us from their contact list
+        (they are no longer on our reverseContacts list.
+        By default this method will update the version of
+        the factory's contact list -- that is, the user will
+        be removed from the reverse list and if they are no longer
+        part of any lists they will be removed from the contact
+        list entirely.
+
+        @param userHandle: the contact's user handle (passport)
+        @param listVersion: the new list version
+        """
+        self.factory.contacts.version = listVersion
+        c = self.factory.contacts.getContact(userHandle)
+        c.removeFromList(REVERSE_LIST)
+        if c.lists == 0:
+            self.factory.contacts.remContact(c.userHandle)
+
+    def gotSwitchboardInvitation(self, sessionID, host, port,
+                                 key, userHandle, screenName):
+        """
+        Called when we get an invitation to a switchboard server.
+        This happens when a user requests a chat session with us.
+
+        @param sessionID: session ID number, must be remembered for logging in
+        @param host: the hostname of the switchboard server
+        @param port: the port to connect to
+        @param key: used for authorization when connecting
+        @param userHandle: the user handle of the person who invited us
+        @param screenName: the screen name of the person who invited us
+        """
+        pass
+
+    def multipleLogin(self):
+        """
+        Called when the server says there has been another login
+        under our account, the server should disconnect us right away.
+        """
+        pass
+
+    def serverGoingDown(self):
+        """
+        Called when the server has notified us that it is going down for
+        maintenance.
+        """
+        pass
+
+    # api calls
+
+    def changeStatus(self, status):
+        """
+        Change my current status. This method will add
+        a default callback to the returned Deferred
+        which will update the status attribute of the
+        factory.
+
+        @param status: 3-letter status code (as defined by
+                       the STATUS_* constants)
+        @return: A Deferred, the callback of which will be
+                 fired when the server confirms the change
+                 of status.  The callback argument will be
+                 a tuple with the new status code as the
+                 only element.
+        """
+
+        id, d = self._createIDMapping()
+        self.sendLine("CHG %s %s" % (id, status))
+        def _cb(r):
+            self.factory.status = r[0]
+            return r
+        return d.addCallback(_cb)
+
+    # I am no longer supporting the process of manually requesting
+    # lists or list groups -- as far as I can see this has no use
+    # if lists are synchronized and updated correctly, which they
+    # should be. If someone has a specific justified need for this
+    # then please contact me and i'll re-enable/fix support for it.
+
+    #def requestList(self, listType):
+    #    """
+    #    request the desired list type
+    #
+    #    @param listType: (as defined by the *_LIST constants)
+    #    @return: A Deferred, the callback of which will be
+    #             fired when the list has been retrieved.
+    #             The callback argument will be a tuple with
+    #             the only element being a list of MSNContact
+    #             objects.
+    #    """
+    #    # this doesn't need to ever be used if syncing of the lists takes place
+    #    # i.e. please don't use it!
+    #    warnings.warn("Please do not use this method - use the list syncing process instead")
+    #    id, d = self._createIDMapping()
+    #    self.sendLine("LST %s %s" % (id, listIDToCode[listType].upper()))
+    #    self._setStateData('list',[])
+    #    return d
+
+    def setPrivacyMode(self, privLevel):
+        """
+        Set my privacy mode on the server.
+
+        B{Note}:
+        This only keeps the current privacy setting on
+        the server for later retrieval, it does not
+        effect the way the server works at all.
+
+        @param privLevel: This parameter can be true, in which
+                          case the server will keep the state as
+                          'al' which the official client interprets
+                          as -> allow messages from only users on
+                          the allow list.  Alternatively it can be
+                          false, in which case the server will keep
+                          the state as 'bl' which the official client
+                          interprets as -> allow messages from all
+                          users except those on the block list.
+
+        @return: A Deferred, the callback of which will be fired when
+                 the server replies with the new privacy setting.
+                 The callback argument will be a tuple, the 2 elements
+                 of which being the list version and either 'al'
+                 or 'bl' (the new privacy setting).
+        """
+
+        id, d = self._createIDMapping()
+        if privLevel:
+            self.sendLine("BLP %s AL" % id)
+        else:
+            self.sendLine("BLP %s BL" % id)
+        return d
+
+    def syncList(self, version):
+        """
+        Used for keeping an up-to-date contact list.
+        A callback is added to the returned Deferred
+        that updates the contact list on the factory
+        and also sets my state to STATUS_ONLINE.
+
+        B{Note}:
+        This is called automatically upon signing
+        in using the version attribute of
+        factory.contacts, so you may want to persist
+        this object accordingly. Because of this there
+        is no real need to ever call this method
+        directly.
+
+        @param version: The current known list version
+
+        @return: A Deferred, the callback of which will be
+                 fired when the server sends an adequate reply.
+                 The callback argument will be a tuple with two
+                 elements, the new list (MSNContactList) and
+                 your current state (a dictionary).  If the version
+                 you sent _was_ the latest list version, both elements
+                 will be None. To just request the list send a version of 0.
+        """
+
+        self._setState('SYNC')
+        id, d = self._createIDMapping(data=str(version))
+        self._setStateData('synid',id)
+        self.sendLine("SYN %s %s" % (id, version))
+        def _cb(r):
+            self.changeStatus(STATUS_ONLINE)
+            if r[0] is not None:
+                self.factory.contacts = r[0]
+            return r
+        return d.addCallback(_cb)
+
+
+    # I am no longer supporting the process of manually requesting
+    # lists or list groups -- as far as I can see this has no use
+    # if lists are synchronized and updated correctly, which they
+    # should be. If someone has a specific justified need for this
+    # then please contact me and i'll re-enable/fix support for it.
+
+    #def requestListGroups(self):
+    #    """
+    #    Request (forward) list groups.
+    #
+    #    @return: A Deferred, the callback for which will be called
+    #             when the server responds with the list groups.
+    #             The callback argument will be a tuple with two elements,
+    #             a dictionary mapping group IDs to group names and the
+    #             current list version.
+    #    """
+    #
+    #    # this doesn't need to be used if syncing of the lists takes place (which it SHOULD!)
+    #    # i.e. please don't use it!
+    #    warnings.warn("Please do not use this method - use the list syncing process instead")
+    #    id, d = self._createIDMapping()
+    #    self.sendLine("LSG %s" % id)
+    #    self._setStateData('groups',{})
+    #    return d
+
+    def setPhoneDetails(self, phoneType, value):
+        """
+        Set/change my phone numbers stored on the server.
+
+        @param phoneType: phoneType can be one of the following
+                          constants - HOME_PHONE, WORK_PHONE,
+                          MOBILE_PHONE, HAS_PAGER.
+                          These are pretty self-explanatory, except
+                          maybe HAS_PAGER which refers to whether or
+                          not you have a pager.
+        @param value: for all of the *_PHONE constants the value is a
+                      phone number (str), for HAS_PAGER accepted values
+                      are 'Y' (for yes) and 'N' (for no).
+
+        @return: A Deferred, the callback for which will be fired when
+                 the server confirms the change has been made. The
+                 callback argument will be a tuple with 2 elements, the
+                 first being the new list version (int) and the second
+                 being the new phone number value (str).
+        """
+        # XXX: Add a default callback which updates
+        # factory.contacts.version and the relevant phone
+        # number
+        id, d = self._createIDMapping()
+        self.sendLine("PRP %s %s %s" % (id, phoneType, quote(value)))
+        return d
+
+    def addListGroup(self, name):
+        """
+        Used to create a new list group.
+        A default callback is added to the
+        returned Deferred which updates the
+        contacts attribute of the factory.
+
+        @param name: The desired name of the new group.
+
+        @return: A Deferred, the callbacck for which will be called
+                 when the server clarifies that the new group has been
+                 created.  The callback argument will be a tuple with 3
+                 elements: the new list version (int), the new group name
+                 (str) and the new group ID (int).
+        """
+
+        id, d = self._createIDMapping()
+        self.sendLine("ADG %s %s 0" % (id, quote(name)))
+        def _cb(r):
+            self.factory.contacts.version = r[0]
+            self.factory.contacts.setGroup(r[1], r[2])
+            return r
+        return d.addCallback(_cb)
+
+    def remListGroup(self, groupID):
+        """
+        Used to remove a list group.
+        A default callback is added to the
+        returned Deferred which updates the
+        contacts attribute of the factory.
+
+        @param groupID: the ID of the desired group to be removed.
+
+        @return: A Deferred, the callback for which will be called when
+                 the server clarifies the deletion of the group.
+                 The callback argument will be a tuple with 2 elements:
+                 the new list version (int) and the group ID (int) of
+                 the removed group.
+        """
+
+        id, d = self._createIDMapping()
+        self.sendLine("RMG %s %s" % (id, groupID))
+        def _cb(r):
+            self.factory.contacts.version = r[0]
+            self.factory.contacts.remGroup(r[1])
+            return r
+        return d.addCallback(_cb)
+
+    def renameListGroup(self, groupID, newName):
+        """
+        Used to rename an existing list group.
+        A default callback is added to the returned
+        Deferred which updates the contacts attribute
+        of the factory.
+
+        @param groupID: the ID of the desired group to rename.
+        @param newName: the desired new name for the group.
+
+        @return: A Deferred, the callback for which will be called
+                 when the server clarifies the renaming.
+                 The callback argument will be a tuple of 3 elements,
+                 the new list version (int), the group id (int) and
+                 the new group name (str).
+        """
+
+        id, d = self._createIDMapping()
+        self.sendLine("REG %s %s %s 0" % (id, groupID, quote(newName)))
+        def _cb(r):
+            self.factory.contacts.version = r[0]
+            self.factory.contacts.setGroup(r[1], r[2])
+            return r
+        return d.addCallback(_cb)
+
+    def addContact(self, listType, userHandle, groupID=0):
+        """
+        Used to add a contact to the desired list.
+        A default callback is added to the returned
+        Deferred which updates the contacts attribute of
+        the factory with the new contact information.
+        If you are adding a contact to the forward list
+        and you want to associate this contact with multiple
+        groups then you will need to call this method for each
+        group you would like to add them to, changing the groupID
+        parameter. The default callback will take care of updating
+        the group information on the factory's contact list.
+
+        @param listType: (as defined by the *_LIST constants)
+        @param userHandle: the user handle (passport) of the contact
+                           that is being added
+        @param groupID: the group ID for which to associate this contact
+                        with. (default 0 - default group). Groups are only
+                        valid for FORWARD_LIST.
+
+        @return: A Deferred, the callback for which will be called when
+                 the server has clarified that the user has been added.
+                 The callback argument will be a tuple with 4 elements:
+                 the list type, the contact's user handle, the new list
+                 version, and the group id (if relevant, otherwise it
+                 will be None)
+        """
+
+        id, d = self._createIDMapping()
+        listType = listIDToCode[listType].upper()
+        if listType == "FL":
+            self.sendLine("ADD %s FL %s %s %s" % (id, userHandle, userHandle, groupID))
+        else:
+            self.sendLine("ADD %s %s %s %s" % (id, listType, userHandle, userHandle))
+
+        def _cb(r):
+            self.factory.contacts.version = r[2]
+            c = self.factory.contacts.getContact(r[1])
+            if not c:
+                c = MSNContact(userHandle=r[1])
+            if r[3]:
+                c.groups.append(r[3])
+            c.addToList(r[0])
+            return r
+        return d.addCallback(_cb)
+
+    def remContact(self, listType, userHandle, groupID=0):
+        """
+        Used to remove a contact from the desired list.
+        A default callback is added to the returned deferred
+        which updates the contacts attribute of the factory
+        to reflect the new contact information. If you are
+        removing from the forward list then you will need to
+        supply a groupID, if the contact is in more than one
+        group then they will only be removed from this group
+        and not the entire forward list, but if this is their
+        only group they will be removed from the whole list.
+
+        @param listType: (as defined by the *_LIST constants)
+        @param userHandle: the user handle (passport) of the
+                           contact being removed
+        @param groupID: the ID of the group to which this contact
+                        belongs (only relevant for FORWARD_LIST,
+                        default is 0)
+
+        @return: A Deferred, the callback for which will be called when
+                 the server has clarified that the user has been removed.
+                 The callback argument will be a tuple of 4 elements:
+                 the list type, the contact's user handle, the new list
+                 version, and the group id (if relevant, otherwise it will
+                 be None)
+        """
+
+        id, d = self._createIDMapping()
+        listType = listIDToCode[listType].upper()
+        if listType == "FL":
+            self.sendLine("REM %s FL %s %s" % (id, userHandle, groupID))
+        else:
+            self.sendLine("REM %s %s %s" % (id, listType, userHandle))
+
+        def _cb(r):
+            l = self.factory.contacts
+            l.version = r[2]
+            c = l.getContact(r[1])
+            group = r[3]
+            shouldRemove = 1
+            if group: # they may not have been removed from the list
+                c.groups.remove(group)
+                if c.groups:
+                    shouldRemove = 0
+            if shouldRemove:
+                c.removeFromList(r[0])
+                if c.lists == 0:
+                    l.remContact(c.userHandle)
+            return r
+        return d.addCallback(_cb)
+
+    def changeScreenName(self, newName):
+        """
+        Used to change your current screen name.
+        A default callback is added to the returned
+        Deferred which updates the screenName attribute
+        of the factory and also updates the contact list
+        version.
+
+        @param newName: the new screen name
+
+        @return: A Deferred, the callback for which will be called
+                 when the server sends an adequate reply.
+                 The callback argument will be a tuple of 2 elements:
+                 the new list version and the new screen name.
+        """
+
+        id, d = self._createIDMapping()
+        self.sendLine("REA %s %s %s" % (id, self.factory.userHandle, quote(newName)))
+        def _cb(r):
+            self.factory.contacts.version = r[0]
+            self.factory.screenName = r[1]
+            return r
+        return d.addCallback(_cb)
+
+    def requestSwitchboardServer(self):
+        """
+        Used to request a switchboard server to use for conversations.
+
+        @return: A Deferred, the callback for which will be called when
+                 the server responds with the switchboard information.
+                 The callback argument will be a tuple with 3 elements:
+                 the host of the switchboard server, the port and a key
+                 used for logging in.
+        """
+
+        id, d = self._createIDMapping()
+        self.sendLine("XFR %s SB" % id)
+        return d
+
+    def logOut(self):
+        """
+        Used to log out of the notification server.
+        After running the method the server is expected
+        to close the connection.
+        """
+
+        self.sendLine("OUT")
+
+class NotificationFactory(ClientFactory):
+    """
+    Factory for the NotificationClient protocol.
+    This is basically responsible for keeping
+    the state of the client and thus should be used
+    in a 1:1 situation with clients.
+
+    @ivar contacts: An MSNContactList instance reflecting
+                    the current contact list -- this is
+                    generally kept up to date by the default
+                    command handlers.
+    @ivar userHandle: The client's userHandle, this is expected
+                      to be set by the client and is used by the
+                      protocol (for logging in etc).
+    @ivar screenName: The client's current screen-name -- this is
+                      generally kept up to date by the default
+                      command handlers.
+    @ivar password: The client's password -- this is (obviously)
+                    expected to be set by the client.
+    @ivar passportServer: This must point to an msn passport server
+                          (the whole URL is required)
+    @ivar status: The status of the client -- this is generally kept
+                  up to date by the default command handlers
+    """
+
+    contacts = None
+    userHandle = ''
+    screenName = ''
+    password = ''
+    passportServer = 'https://nexus.passport.com/rdr/pprdr.asp'
+    status = 'FLN'
+    protocol = NotificationClient
+
+
+# XXX: A lot of the state currently kept in
+# instances of SwitchboardClient is likely to
+# be moved into a factory at some stage in the
+# future
+
+class SwitchboardClient(MSNEventBase):
+    """
+    This class provides support for clients connecting to a switchboard server.
+
+    Switchboard servers are used for conversations with other people
+    on the MSN network. This means that the number of conversations at
+    any given time will be directly proportional to the number of
+    connections to varioius switchboard servers.
+
+    MSN makes no distinction between single and group conversations,
+    so any number of users may be invited to join a specific conversation
+    taking place on a switchboard server.
+
+    @ivar key: authorization key, obtained when receiving
+               invitation / requesting switchboard server.
+    @ivar userHandle: your user handle (passport)
+    @ivar sessionID: unique session ID, used if you are replying
+                     to a switchboard invitation
+    @ivar reply: set this to 1 in connectionMade or before to signifiy
+                 that you are replying to a switchboard invitation.
+    """
+
+    key = 0
+    userHandle = ""
+    sessionID = ""
+    reply = 0
+
+    _iCookie = 0
+
+    def __init__(self):
+        MSNEventBase.__init__(self)
+        self.pendingUsers = {}
+        self.cookies = {'iCookies' : {}, 'external' : {}} # will maybe be moved to a factory in the future
+
+    def connectionMade(self):
+        MSNEventBase.connectionMade(self)
+        print 'sending initial stuff'
+        self._sendInit()
+
+    def connectionLost(self, reason):
+        self.cookies['iCookies'] = {}
+        self.cookies['external'] = {}
+        MSNEventBase.connectionLost(self, reason)
+
+    def _sendInit(self):
+        """
+        send initial data based on whether we are replying to an invitation
+        or starting one.
+        """
+        id = self._nextTransactionID()
+        if not self.reply:
+            self.sendLine("USR %s %s %s" % (id, self.userHandle, self.key))
+        else:
+            self.sendLine("ANS %s %s %s %s" % (id, self.userHandle, self.key, self.sessionID))
+
+    def _newInvitationCookie(self):
+        self._iCookie += 1
+        if self._iCookie > 1000:
+            self._iCookie = 1
+        return self._iCookie
+
+    def _checkTyping(self, message, cTypes):
+        """ helper method for checkMessage """
+        if 'text/x-msmsgscontrol' in cTypes and message.hasHeader('TypingUser'):
+            self.userTyping(message)
+            return 1
+
+    def _checkFileInvitation(self, message, info):
+        """ helper method for checkMessage """
+        guid = info.get('Application-GUID', '').lower()
+        name = info.get('Application-Name', '').lower()
+
+        # Both fields are required, but we'll let some lazy clients get away
+        # with only sending a name, if it is easy for us to recognize the
+        # name (the name is localized, so this check might fail for lazy,
+        # non-english clients, but I'm not about to include "file transfer"
+        # in 80 different languages here).
+
+        if name != "file transfer" and guid != classNameToGUID["file transfer"]:
+            return 0
+        try:
+            cookie = int(info['Invitation-Cookie'])
+            fileName = info['Application-File']
+            fileSize = int(info['Application-FileSize'])
+        except KeyError:
+            log.msg('Received munged file transfer request ... ignoring.')
+            return 0
+        self.gotSendRequest(fileName, fileSize, cookie, message)
+        return 1
+
+    def _checkFileResponse(self, message, info):
+        """ helper method for checkMessage """
+        try:
+            cmd = info['Invitation-Command'].upper()
+            cookie = int(info['Invitation-Cookie'])
+        except KeyError:
+            return 0
+        accept = (cmd == 'ACCEPT') and 1 or 0
+        requested = self.cookies['iCookies'].get(cookie)
+        if not requested:
+            return 1
+        requested[0].callback((accept, cookie, info))
+        del self.cookies['iCookies'][cookie]
+        return 1
+
+    def _checkFileInfo(self, message, info):
+        """ helper method for checkMessage """
+        try:
+            ip = info['IP-Address']
+            iCookie = int(info['Invitation-Cookie'])
+            aCookie = int(info['AuthCookie'])
+            cmd = info['Invitation-Command'].upper()
+            port = int(info['Port'])
+        except KeyError:
+            return 0
+        accept = (cmd == 'ACCEPT') and 1 or 0
+        requested = self.cookies['external'].get(iCookie)
+        if not requested:
+            return 1 # we didn't ask for this
+        requested[0].callback((accept, ip, port, aCookie, info))
+        del self.cookies['external'][iCookie]
+        return 1
+
+    def checkMessage(self, message):
+        """
+        hook for detecting any notification type messages
+        (e.g. file transfer)
+        """
+        cTypes = [s.lstrip() for s in message.getHeader('Content-Type').split(';')]
+        if self._checkTyping(message, cTypes):
+            return 0
+        if 'text/x-msmsgsinvite' in cTypes:
+            # header like info is sent as part of the message body.
+            info = {}
+            for line in message.message.split('\r\n'):
+                try:
+                    key, val = line.split(':')
+                    info[key] = val.lstrip()
+                except ValueError:
+                    continue
+            if self._checkFileInvitation(message, info) or self._checkFileInfo(message, info) or self._checkFileResponse(message, info):
+                return 0
+        elif 'text/x-clientcaps' in cTypes:
+            # do something with capabilities
+            return 0
+        return 1
+
+    # negotiation
+    def handle_USR(self, params):
+        checkParamLen(len(params), 4, 'USR')
+        if params[1] == "OK":
+            self.loggedIn()
+
+    # invite a user
+    def handle_CAL(self, params):
+        checkParamLen(len(params), 3, 'CAL')
+        id = int(params[0])
+        if params[1].upper() == "RINGING":
+            self._fireCallback(id, int(params[2])) # session ID as parameter
+
+    # user joined
+    def handle_JOI(self, params):
+        checkParamLen(len(params), 2, 'JOI')
+        self.userJoined(params[0], unquote(params[1]))
+
+    # users participating in the current chat
+    def handle_IRO(self, params):
+        checkParamLen(len(params), 5, 'IRO')
+        self.pendingUsers[params[3]] = unquote(params[4])
+        if params[1] == params[2]:
+            self.gotChattingUsers(self.pendingUsers)
+            self.pendingUsers = {}
+
+    # finished listing users
+    def handle_ANS(self, params):
+        checkParamLen(len(params), 2, 'ANS')
+        if params[1] == "OK":
+            self.loggedIn()
+
+    def handle_ACK(self, params):
+        checkParamLen(len(params), 1, 'ACK')
+        self._fireCallback(int(params[0]), None)
+
+    def handle_NAK(self, params):
+        checkParamLen(len(params), 1, 'NAK')
+        self._fireCallback(int(params[0]), None)
+
+    def handle_BYE(self, params):
+        #checkParamLen(len(params), 1, 'BYE') # i've seen more than 1 param passed to this
+        self.userLeft(params[0])
+
+    # callbacks
+
+    def loggedIn(self):
+        """
+        called when all login details have been negotiated.
+        Messages can now be sent, or new users invited.
+        """
+        pass
+
+    def gotChattingUsers(self, users):
+        """
+        called after connecting to an existing chat session.
+
+        @param users: A dict mapping user handles to screen names
+                      (current users taking part in the conversation)
+        """
+        pass
+
+    def userJoined(self, userHandle, screenName):
+        """
+        called when a user has joined the conversation.
+
+        @param userHandle: the user handle (passport) of the user
+        @param screenName: the screen name of the user
+        """
+        pass
+
+    def userLeft(self, userHandle):
+        """
+        called when a user has left the conversation.
+
+        @param userHandle: the user handle (passport) of the user.
+        """
+        pass
+
+    def gotMessage(self, message):
+        """
+        called when we receive a message.
+
+        @param message: the associated MSNMessage object
+        """
+        pass
+
+    def userTyping(self, message):
+        """
+        called when we receive the special type of message notifying
+        us that a user is typing a message.
+
+        @param message: the associated MSNMessage object
+        """
+        pass
+
+    def gotSendRequest(self, fileName, fileSize, iCookie, message):
+        """
+        called when a contact is trying to send us a file.
+        To accept or reject this transfer see the
+        fileInvitationReply method.
+
+        @param fileName: the name of the file
+        @param fileSize: the size of the file
+        @param iCookie: the invitation cookie, used so the client can
+                        match up your reply with this request.
+        @param message: the MSNMessage object which brought about this
+                        invitation (it may contain more information)
+        """
+        pass
+
+    # api calls
+
+    def inviteUser(self, userHandle):
+        """
+        used to invite a user to the current switchboard server.
+
+        @param userHandle: the user handle (passport) of the desired user.
+
+        @return: A Deferred, the callback for which will be called
+                 when the server notifies us that the user has indeed
+                 been invited.  The callback argument will be a tuple
+                 with 1 element, the sessionID given to the invited user.
+                 I'm not sure if this is useful or not.
+        """
+
+        id, d = self._createIDMapping()
+        self.sendLine("CAL %s %s" % (id, userHandle))
+        return d
+
+    def sendMessage(self, message):
+        """
+        used to send a message.
+
+        @param message: the corresponding MSNMessage object.
+
+        @return: Depending on the value of message.ack.
+                 If set to MSNMessage.MESSAGE_ACK or
+                 MSNMessage.MESSAGE_NACK a Deferred will be returned,
+                 the callback for which will be fired when an ACK or
+                 NACK is received - the callback argument will be
+                 (None,). If set to MSNMessage.MESSAGE_ACK_NONE then
+                 the return value is None.
+        """
+
+        if message.ack not in ('A','N'):
+            id, d = self._nextTransactionID(), None
+        else:
+            id, d = self._createIDMapping()
+        if message.length == 0:
+            message.length = message._calcMessageLen()
+        self.sendLine("MSG %s %s %s" % (id, message.ack, message.length))
+        # apparently order matters with at least MIME-Version and Content-Type
+        self.sendLine('MIME-Version: %s' % message.getHeader('MIME-Version'))
+        self.sendLine('Content-Type: %s' % message.getHeader('Content-Type'))
+        # send the rest of the headers
+        for header in [h for h in message.headers.items() if h[0].lower() not in ('mime-version','content-type')]:
+            self.sendLine("%s: %s" % (header[0], header[1]))
+        self.transport.write(CR+LF)
+        self.transport.write(message.message)
+        return d
+
+    def sendTypingNotification(self):
+        """
+        used to send a typing notification. Upon receiving this
+        message the official client will display a 'user is typing'
+        message to all other users in the chat session for 10 seconds.
+        The official client sends one of these every 5 seconds (I think)
+        as long as you continue to type.
+        """
+        m = MSNMessage()
+        m.ack = m.MESSAGE_ACK_NONE
+        m.setHeader('Content-Type', 'text/x-msmsgscontrol')
+        m.setHeader('TypingUser', self.userHandle)
+        m.message = "\r\n"
+        self.sendMessage(m)
+
+    def sendFileInvitation(self, fileName, fileSize):
+        """
+        send an notification that we want to send a file.
+
+        @param fileName: the file name
+        @param fileSize: the file size
+
+        @return: A Deferred, the callback of which will be fired
+                 when the user responds to this invitation with an
+                 appropriate message. The callback argument will be
+                 a tuple with 3 elements, the first being 1 or 0
+                 depending on whether they accepted the transfer
+                 (1=yes, 0=no), the second being an invitation cookie
+                 to identify your follow-up responses and the third being
+                 the message 'info' which is a dict of information they
+                 sent in their reply (this doesn't really need to be used).
+                 If you wish to proceed with the transfer see the
+                 sendTransferInfo method.
+        """
+        cookie = self._newInvitationCookie()
+        d = Deferred()
+        m = MSNMessage()
+        m.setHeader('Content-Type', 'text/x-msmsgsinvite; charset=UTF-8')
+        m.message += 'Application-Name: File Transfer\r\n'
+        m.message += 'Application-GUID: %s\r\n' % (classNameToGUID["file transfer"],)
+        m.message += 'Invitation-Command: INVITE\r\n'
+        m.message += 'Invitation-Cookie: %s\r\n' % str(cookie)
+        m.message += 'Application-File: %s\r\n' % fileName
+        m.message += 'Application-FileSize: %s\r\n\r\n' % str(fileSize)
+        m.ack = m.MESSAGE_ACK_NONE
+        self.sendMessage(m)
+        self.cookies['iCookies'][cookie] = (d, m)
+        return d
+
+    def fileInvitationReply(self, iCookie, accept=1):
+        """
+        used to reply to a file transfer invitation.
+
+        @param iCookie: the invitation cookie of the initial invitation
+        @param accept: whether or not you accept this transfer,
+                       1 = yes, 0 = no, default = 1.
+
+        @return: A Deferred, the callback for which will be fired when
+                 the user responds with the transfer information.
+                 The callback argument will be a tuple with 5 elements,
+                 whether or not they wish to proceed with the transfer
+                 (1=yes, 0=no), their ip, the port, the authentication
+                 cookie (see FileReceive/FileSend) and the message
+                 info (dict) (in case they send extra header-like info
+                 like Internal-IP, this doesn't necessarily need to be
+                 used). If you wish to proceed with the transfer see
+                 FileReceive.
+        """
+        d = Deferred()
+        m = MSNMessage()
+        m.setHeader('Content-Type', 'text/x-msmsgsinvite; charset=UTF-8')
+        m.message += 'Invitation-Command: %s\r\n' % (accept and 'ACCEPT' or 'CANCEL')
+        m.message += 'Invitation-Cookie: %s\r\n' % str(iCookie)
+        if not accept:
+            m.message += 'Cancel-Code: REJECT\r\n'
+        m.message += 'Launch-Application: FALSE\r\n'
+        m.message += 'Request-Data: IP-Address:\r\n'
+        m.message += '\r\n'
+        m.ack = m.MESSAGE_ACK_NONE
+        self.sendMessage(m)
+        self.cookies['external'][iCookie] = (d, m)
+        return d
+
+    def sendTransferInfo(self, accept, iCookie, authCookie, ip, port):
+        """
+        send information relating to a file transfer session.
+
+        @param accept: whether or not to go ahead with the transfer
+                       (1=yes, 0=no)
+        @param iCookie: the invitation cookie of previous replies
+                        relating to this transfer
+        @param authCookie: the authentication cookie obtained from
+                           an FileSend instance
+        @param ip: your ip
+        @param port: the port on which an FileSend protocol is listening.
+        """
+        m = MSNMessage()
+        m.setHeader('Content-Type', 'text/x-msmsgsinvite; charset=UTF-8')
+        m.message += 'Invitation-Command: %s\r\n' % (accept and 'ACCEPT' or 'CANCEL')
+        m.message += 'Invitation-Cookie: %s\r\n' % iCookie
+        m.message += 'IP-Address: %s\r\n' % ip
+        m.message += 'Port: %s\r\n' % port
+        m.message += 'AuthCookie: %s\r\n' % authCookie
+        m.message += '\r\n'
+        m.ack = m.MESSAGE_NACK
+        self.sendMessage(m)
+
+class FileReceive(LineReceiver):
+    """
+    This class provides support for receiving files from contacts.
+
+    @ivar fileSize: the size of the receiving file. (you will have to set this)
+    @ivar connected: true if a connection has been established.
+    @ivar completed: true if the transfer is complete.
+    @ivar bytesReceived: number of bytes (of the file) received.
+                         This does not include header data.
+    """
+
+    def __init__(self, auth, myUserHandle, file, directory="", overwrite=0):
+        """
+        @param auth: auth string received in the file invitation.
+        @param myUserHandle: your userhandle.
+        @param file: A string or file object represnting the file
+                     to save data to.
+        @param directory: optional parameter specifiying the directory.
+                          Defaults to the current directory.
+        @param overwrite: if true and a file of the same name exists on
+                          your system, it will be overwritten. (0 by default)
+        """
+        self.auth = auth
+        self.myUserHandle = myUserHandle
+        self.fileSize = 0
+        self.connected = 0
+        self.completed = 0
+        self.directory = directory
+        self.bytesReceived = 0
+        self.overwrite = overwrite
+
+        # used for handling current received state
+        self.state = 'CONNECTING'
+        self.segmentLength = 0
+        self.buffer = ''
+
+        if isinstance(file, types.StringType):
+            path = os.path.join(directory, file)
+            if os.path.exists(path) and not self.overwrite:
+                log.msg('File already exists...')
+                raise IOError, "File Exists" # is this all we should do here?
+            self.file = open(os.path.join(directory, file), 'wb')
+        else:
+            self.file = file
+
+    def connectionMade(self):
+        self.connected = 1
+        self.state = 'INHEADER'
+        self.sendLine('VER MSNFTP')
+
+    def connectionLost(self, reason):
+        self.connected = 0
+        self.file.close()
+
+    def parseHeader(self, header):
+        """ parse the header of each 'message' to obtain the segment length """
+
+        if ord(header[0]) != 0: # they requested that we close the connection
+            self.transport.loseConnection()
+            return
+        try:
+            extra, factor = header[1:]
+        except ValueError:
+            # munged header, ending transfer
+            self.transport.loseConnection()
+            raise
+        extra  = ord(extra)
+        factor = ord(factor)
+        return factor * 256 + extra
+
+    def lineReceived(self, line):
+        temp = line.split()
+        if len(temp) == 1:
+            params = []
+        else:
+            params = temp[1:]
+        cmd = temp[0]
+        handler = getattr(self, "handle_%s" % cmd.upper(), None)
+        if handler:
+            handler(params) # try/except
+        else:
+            self.handle_UNKNOWN(cmd, params)
+
+    def rawDataReceived(self, data):
+        bufferLen = len(self.buffer)
+        if self.state == 'INHEADER':
+            delim = 3-bufferLen
+            self.buffer += data[:delim]
+            if len(self.buffer) == 3:
+                self.segmentLength = self.parseHeader(self.buffer)
+                if not self.segmentLength:
+                    return # hrm
+                self.buffer = ""
+                self.state = 'INSEGMENT'
+            extra = data[delim:]
+            if len(extra) > 0:
+                self.rawDataReceived(extra)
+            return
+
+        elif self.state == 'INSEGMENT':
+            dataSeg = data[:(self.segmentLength-bufferLen)]
+            self.buffer += dataSeg
+            self.bytesReceived += len(dataSeg)
+            if len(self.buffer) == self.segmentLength:
+                self.gotSegment(self.buffer)
+                self.buffer = ""
+                if self.bytesReceived == self.fileSize:
+                    self.completed = 1
+                    self.buffer = ""
+                    self.file.close()
+                    self.sendLine("BYE 16777989")
+                    return
+                self.state = 'INHEADER'
+                extra = data[(self.segmentLength-bufferLen):]
+                if len(extra) > 0:
+                    self.rawDataReceived(extra)
+                return
+
+    def handle_VER(self, params):
+        checkParamLen(len(params), 1, 'VER')
+        if params[0].upper() == "MSNFTP":
+            self.sendLine("USR %s %s" % (self.myUserHandle, self.auth))
+        else:
+            log.msg('they sent the wrong version, time to quit this transfer')
+            self.transport.loseConnection()
+
+    def handle_FIL(self, params):
+        checkParamLen(len(params), 1, 'FIL')
+        try:
+            self.fileSize = int(params[0])
+        except ValueError: # they sent the wrong file size - probably want to log this
+            self.transport.loseConnection()
+            return
+        self.setRawMode()
+        self.sendLine("TFR")
+
+    def handle_UNKNOWN(self, cmd, params):
+        log.msg('received unknown command (%s), params: %s' % (cmd, params))
+
+    def gotSegment(self, data):
+        """ called when a segment (block) of data arrives. """
+        self.file.write(data)
+
+class FileSend(LineReceiver):
+    """
+    This class provides support for sending files to other contacts.
+
+    @ivar bytesSent: the number of bytes that have currently been sent.
+    @ivar completed: true if the send has completed.
+    @ivar connected: true if a connection has been established.
+    @ivar targetUser: the target user (contact).
+    @ivar segmentSize: the segment (block) size.
+    @ivar auth: the auth cookie (number) to use when sending the
+                transfer invitation
+    """
+
+    def __init__(self, file):
+        """
+        @param file: A string or file object represnting the file to send.
+        """
+
+        if isinstance(file, types.StringType):
+            self.file = open(file, 'rb')
+        else:
+            self.file = file
+
+        self.fileSize = 0
+        self.bytesSent = 0
+        self.completed = 0
+        self.connected = 0
+        self.targetUser = None
+        self.segmentSize = 2045
+        self.auth = randint(0, 2**30)
+        self._pendingSend = None # :(
+
+    def connectionMade(self):
+        self.connected = 1
+
+    def connectionLost(self, reason):
+        if self._pendingSend.active():
+            self._pendingSend.cancel()
+            self._pendingSend = None
+        if self.bytesSent == self.fileSize:
+            self.completed = 1
+        self.connected = 0
+        self.file.close()
+
+    def lineReceived(self, line):
+        temp = line.split()
+        if len(temp) == 1:
+            params = []
+        else:
+            params = temp[1:]
+        cmd = temp[0]
+        handler = getattr(self, "handle_%s" % cmd.upper(), None)
+        if handler:
+            handler(params)
+        else:
+            self.handle_UNKNOWN(cmd, params)
+
+    def handle_VER(self, params):
+        checkParamLen(len(params), 1, 'VER')
+        if params[0].upper() == "MSNFTP":
+            self.sendLine("VER MSNFTP")
+        else: # they sent some weird version during negotiation, i'm quitting.
+            self.transport.loseConnection()
+
+    def handle_USR(self, params):
+        checkParamLen(len(params), 2, 'USR')
+        self.targetUser = params[0]
+        if self.auth == int(params[1]):
+            self.sendLine("FIL %s" % (self.fileSize))
+        else: # they failed the auth test, disconnecting.
+            self.transport.loseConnection()
+
+    def handle_TFR(self, params):
+        checkParamLen(len(params), 0, 'TFR')
+        # they are ready for me to start sending
+        self.sendPart()
+
+    def handle_BYE(self, params):
+        self.completed = (self.bytesSent == self.fileSize)
+        self.transport.loseConnection()
+
+    def handle_CCL(self, params):
+        self.completed = (self.bytesSent == self.fileSize)
+        self.transport.loseConnection()
+
+    def handle_UNKNOWN(self, cmd, params):
+        log.msg('received unknown command (%s), params: %s' % (cmd, params))
+
+    def makeHeader(self, size):
+        """ make the appropriate header given a specific segment size. """
+        quotient, remainder = divmod(size, 256)
+        return chr(0) + chr(remainder) + chr(quotient)
+
+    def sendPart(self):
+        """ send a segment of data """
+        if not self.connected:
+            self._pendingSend = None
+            return # may be buggy (if handle_CCL/BYE is called but self.connected is still 1)
+        data = self.file.read(self.segmentSize)
+        if data:
+            dataSize = len(data)
+            header = self.makeHeader(dataSize)
+            self.bytesSent += dataSize
+            self.transport.write(header + data)
+            self._pendingSend = reactor.callLater(0, self.sendPart)
+        else:
+            self._pendingSend = None
+            self.completed = 1
+
+# mapping of error codes to error messages
+errorCodes = {
+
+    200 : "Syntax error",
+    201 : "Invalid parameter",
+    205 : "Invalid user",
+    206 : "Domain name missing",
+    207 : "Already logged in",
+    208 : "Invalid username",
+    209 : "Invalid screen name",
+    210 : "User list full",
+    215 : "User already there",
+    216 : "User already on list",
+    217 : "User not online",
+    218 : "Already in mode",
+    219 : "User is in the opposite list",
+    223 : "Too many groups",
+    224 : "Invalid group",
+    225 : "User not in group",
+    229 : "Group name too long",
+    230 : "Cannot remove group 0",
+    231 : "Invalid group",
+    280 : "Switchboard failed",
+    281 : "Transfer to switchboard failed",
+
+    300 : "Required field missing",
+    301 : "Too many FND responses",
+    302 : "Not logged in",
+
+    500 : "Internal server error",
+    501 : "Database server error",
+    502 : "Command disabled",
+    510 : "File operation failed",
+    520 : "Memory allocation failed",
+    540 : "Wrong CHL value sent to server",
+
+    600 : "Server is busy",
+    601 : "Server is unavaliable",
+    602 : "Peer nameserver is down",
+    603 : "Database connection failed",
+    604 : "Server is going down",
+    605 : "Server unavailable",
+
+    707 : "Could not create connection",
+    710 : "Invalid CVR parameters",
+    711 : "Write is blocking",
+    712 : "Session is overloaded",
+    713 : "Too many active users",
+    714 : "Too many sessions",
+    715 : "Not expected",
+    717 : "Bad friend file",
+    731 : "Not expected",
+
+    800 : "Requests too rapid",
+
+    910 : "Server too busy",
+    911 : "Authentication failed",
+    912 : "Server too busy",
+    913 : "Not allowed when offline",
+    914 : "Server too busy",
+    915 : "Server too busy",
+    916 : "Server too busy",
+    917 : "Server too busy",
+    918 : "Server too busy",
+    919 : "Server too busy",
+    920 : "Not accepting new users",
+    921 : "Server too busy",
+    922 : "Server too busy",
+    923 : "No parent consent",
+    924 : "Passport account not yet verified"
+
+}
+
+# mapping of status codes to readable status format
+statusCodes = {
+
+    STATUS_ONLINE  : "Online",
+    STATUS_OFFLINE : "Offline",
+    STATUS_HIDDEN  : "Appear Offline",
+    STATUS_IDLE    : "Idle",
+    STATUS_AWAY    : "Away",
+    STATUS_BUSY    : "Busy",
+    STATUS_BRB     : "Be Right Back",
+    STATUS_PHONE   : "On the Phone",
+    STATUS_LUNCH   : "Out to Lunch"
+
+}
+
+# mapping of list ids to list codes
+listIDToCode = {
+
+    FORWARD_LIST : 'fl',
+    BLOCK_LIST   : 'bl',
+    ALLOW_LIST   : 'al',
+    REVERSE_LIST : 'rl'
+
+}
+
+# mapping of list codes to list ids
+listCodeToID = {}
+for id,code in listIDToCode.items():
+    listCodeToID[code] = id
+
+del id, code
+
+# Mapping of class GUIDs to simple english names
+guidToClassName = {
+    "{5D3E02AB-6190-11d3-BBBB-00C04F795683}": "file transfer",
+    }
+
+# Reverse of the above
+classNameToGUID = {}
+for guid, name in guidToClassName.iteritems():
+    classNameToGUID[name] = guid
diff --git a/ThirdParty/Twisted/twisted/words/protocols/oscar.py b/ThirdParty/Twisted/twisted/words/protocols/oscar.py
new file mode 100644
index 0000000..81571d4
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/protocols/oscar.py
@@ -0,0 +1,1235 @@
+# -*- test-case-name: twisted.words.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+An implementation of the OSCAR protocol, which AIM and ICQ use to communcate.
+
+Maintainer: Paul Swartz
+"""
+
+import struct
+import string
+import socket
+import random
+import types
+import re
+
+from twisted.internet import reactor, defer, protocol
+from twisted.python import log
+from twisted.python.hashlib import md5
+
+def logPacketData(data):
+    lines = len(data)/16
+    if lines*16 != len(data): lines=lines+1
+    for i in range(lines):
+        d = tuple(data[16*i:16*i+16])
+        hex = map(lambda x: "%02X"%ord(x),d)
+        text = map(lambda x: (len(repr(x))>3 and '.') or x, d)
+        log.msg(' '.join(hex)+ ' '*3*(16-len(d)) +''.join(text))
+    log.msg('')
+
+def SNAC(fam,sub,id,data,flags=[0,0]):
+    header="!HHBBL"
+    head=struct.pack(header,fam,sub,
+                     flags[0],flags[1],
+                     id)
+    return head+str(data)
+
+def readSNAC(data):
+    header="!HHBBL"
+    head=list(struct.unpack(header,data[:10]))
+    return head+[data[10:]]
+
+def TLV(type,value):
+    header="!HH"
+    head=struct.pack(header,type,len(value))
+    return head+str(value)
+
+def readTLVs(data,count=None):
+    header="!HH"
+    dict={}
+    while data and len(dict)!=count:
+        head=struct.unpack(header,data[:4])
+        dict[head[0]]=data[4:4+head[1]]
+        data=data[4+head[1]:]
+    if not count:
+        return dict
+    return dict,data
+
+def encryptPasswordMD5(password,key):
+    m=md5()
+    m.update(key)
+    m.update(md5(password).digest())
+    m.update("AOL Instant Messenger (SM)")
+    return m.digest()
+
+def encryptPasswordICQ(password):
+    key=[0xF3,0x26,0x81,0xC4,0x39,0x86,0xDB,0x92,0x71,0xA3,0xB9,0xE6,0x53,0x7A,0x95,0x7C]
+    bytes=map(ord,password)
+    r=""
+    for i in range(len(bytes)):
+        r=r+chr(bytes[i]^key[i%len(key)])
+    return r
+
+def dehtml(text):
+    text=string.replace(text,"<br>","\n")
+    text=string.replace(text,"<BR>","\n")
+    text=string.replace(text,"<Br>","\n") # XXX make this a regexp
+    text=string.replace(text,"<bR>","\n")
+    text=re.sub('<.*?>','',text)
+    text=string.replace(text,'>','>')
+    text=string.replace(text,'<','<')
+    text=string.replace(text,' ',' ')
+    text=string.replace(text,'"','"')
+    text=string.replace(text,'&','&')
+    return text
+
+def html(text):
+    text=string.replace(text,'"','"')
+    text=string.replace(text,'&','&')
+    text=string.replace(text,'<','<')
+    text=string.replace(text,'>','>')
+    text=string.replace(text,"\n","<br>")
+    return '<html><body bgcolor="white"><font color="black">%s</font></body></html>'%text
+
+class OSCARUser:
+    def __init__(self, name, warn, tlvs):
+        self.name = name
+        self.warning = warn
+        self.flags = []
+        self.caps = []
+        for k,v in tlvs.items():
+            if k == 1: # user flags
+                v=struct.unpack('!H',v)[0]
+                for o, f in [(1,'trial'),
+                             (2,'unknown bit 2'),
+                             (4,'aol'),
+                             (8,'unknown bit 4'),
+                             (16,'aim'),
+                             (32,'away'),
+                             (1024,'activebuddy')]:
+                    if v&o: self.flags.append(f)
+            elif k == 2: # member since date
+                self.memberSince = struct.unpack('!L',v)[0]
+            elif k == 3: # on-since
+                self.onSince = struct.unpack('!L',v)[0]
+            elif k == 4: # idle time
+                self.idleTime = struct.unpack('!H',v)[0]
+            elif k == 5: # unknown
+                pass
+            elif k == 6: # icq online status
+                if v[2] == '\x00':
+                    self.icqStatus = 'online'
+                elif v[2] == '\x01':
+                    self.icqStatus = 'away'
+                elif v[2] == '\x02':
+                    self.icqStatus = 'dnd'
+                elif v[2] == '\x04':
+                    self.icqStatus = 'out'
+                elif v[2] == '\x10':
+                    self.icqStatus = 'busy'
+                else:
+                    self.icqStatus = 'unknown'
+            elif k == 10: # icq ip address
+                self.icqIPaddy = socket.inet_ntoa(v)
+            elif k == 12: # icq random stuff
+                self.icqRandom = v
+            elif k == 13: # capabilities
+                caps=[]
+                while v:
+                    c=v[:16]
+                    if c==CAP_ICON: caps.append("icon")
+                    elif c==CAP_IMAGE: caps.append("image")
+                    elif c==CAP_VOICE: caps.append("voice")
+                    elif c==CAP_CHAT: caps.append("chat")
+                    elif c==CAP_GET_FILE: caps.append("getfile")
+                    elif c==CAP_SEND_FILE: caps.append("sendfile")
+                    elif c==CAP_SEND_LIST: caps.append("sendlist")
+                    elif c==CAP_GAMES: caps.append("games")
+                    else: caps.append(("unknown",c))
+                    v=v[16:]
+                caps.sort()
+                self.caps=caps
+            elif k == 14: pass
+            elif k == 15: # session length (aim)
+                self.sessionLength = struct.unpack('!L',v)[0]
+            elif k == 16: # session length (aol)
+                self.sessionLength = struct.unpack('!L',v)[0]
+            elif k == 30: # no idea
+                pass
+            else:
+                log.msg("unknown tlv for user %s\nt: %s\nv: %s"%(self.name,k,repr(v)))
+
+    def __str__(self):
+        s = '<OSCARUser %s' % self.name
+        o = []
+        if self.warning!=0: o.append('warning level %s'%self.warning)
+        if hasattr(self, 'flags'): o.append('flags %s'%self.flags)
+        if hasattr(self, 'sessionLength'): o.append('online for %i minutes' % (self.sessionLength/60,))
+        if hasattr(self, 'idleTime'): o.append('idle for %i minutes' % self.idleTime)
+        if self.caps: o.append('caps %s'%self.caps)
+        if o:
+            s=s+', '+', '.join(o)
+        s=s+'>'
+        return s
+
+
+class SSIGroup:
+    def __init__(self, name, tlvs = {}):
+        self.name = name
+        #self.tlvs = []
+        #self.userIDs = []
+        self.usersToID = {}
+        self.users = []
+        #if not tlvs.has_key(0xC8): return
+        #buddyIDs = tlvs[0xC8]
+        #while buddyIDs:
+        #    bid = struct.unpack('!H',buddyIDs[:2])[0]
+        #    buddyIDs = buddyIDs[2:]
+        #    self.users.append(bid)
+
+    def findIDFor(self, user):
+        return self.usersToID[user]
+
+    def addUser(self, buddyID, user):
+        self.usersToID[user] = buddyID
+        self.users.append(user)
+        user.group = self
+
+    def oscarRep(self, groupID, buddyID):
+        tlvData = TLV(0xc8, reduce(lambda x,y:x+y, [struct.pack('!H',self.usersToID[x]) for x in self.users]))
+        return struct.pack('!H', len(self.name)) + self.name + \
+               struct.pack('!HH', groupID, buddyID) + '\000\001' + tlvData
+
+
+class SSIBuddy:
+    def __init__(self, name, tlvs = {}):
+        self.name = name
+        self.tlvs = tlvs
+        for k,v in tlvs.items():
+            if k == 0x013c: # buddy comment
+                self.buddyComment = v
+            elif k == 0x013d: # buddy alerts
+                actionFlag = ord(v[0])
+                whenFlag = ord(v[1])
+                self.alertActions = []
+                self.alertWhen = []
+                if actionFlag&1:
+                    self.alertActions.append('popup')
+                if actionFlag&2:
+                    self.alertActions.append('sound')
+                if whenFlag&1:
+                    self.alertWhen.append('online')
+                if whenFlag&2:
+                    self.alertWhen.append('unidle')
+                if whenFlag&4:
+                    self.alertWhen.append('unaway')
+            elif k == 0x013e:
+                self.alertSound = v
+ 
+    def oscarRep(self, groupID, buddyID):
+        tlvData = reduce(lambda x,y: x+y, map(lambda (k,v):TLV(k,v), self.tlvs.items()), '\000\000')
+        return struct.pack('!H', len(self.name)) + self.name + \
+               struct.pack('!HH', groupID, buddyID) + '\000\000' + tlvData
+
+
+class OscarConnection(protocol.Protocol):
+    def connectionMade(self):
+        self.state=""
+        self.seqnum=0
+        self.buf=''
+        self.stopKeepAliveID = None
+        self.setKeepAlive(4*60) # 4 minutes
+
+    def connectionLost(self, reason):
+        log.msg("Connection Lost! %s" % self)
+        self.stopKeepAlive()
+
+#    def connectionFailed(self):
+#        log.msg("Connection Failed! %s" % self)
+#        self.stopKeepAlive()
+
+    def sendFLAP(self,data,channel = 0x02):
+        header="!cBHH"
+        self.seqnum=(self.seqnum+1)%0xFFFF
+        seqnum=self.seqnum
+        head=struct.pack(header,'*', channel,
+                         seqnum, len(data))
+        self.transport.write(head+str(data))
+#        if isinstance(self, ChatService):
+#            logPacketData(head+str(data))
+
+    def readFlap(self):
+        header="!cBHH"
+        if len(self.buf)<6: return
+        flap=struct.unpack(header,self.buf[:6])
+        if len(self.buf)<6+flap[3]: return
+        data,self.buf=self.buf[6:6+flap[3]],self.buf[6+flap[3]:]
+        return [flap[1],data]
+
+    def dataReceived(self,data):
+#        if isinstance(self, ChatService):
+#            logPacketData(data)
+        self.buf=self.buf+data
+        flap=self.readFlap()
+        while flap:
+            func=getattr(self,"oscar_%s"%self.state,None)
+            if not func:
+                log.msg("no func for state: %s" % self.state)
+            state=func(flap)
+            if state:
+                self.state=state
+            flap=self.readFlap()
+
+    def setKeepAlive(self,t):
+        self.keepAliveDelay=t
+        self.stopKeepAlive()
+        self.stopKeepAliveID = reactor.callLater(t, self.sendKeepAlive)
+
+    def sendKeepAlive(self):
+        self.sendFLAP("",0x05)
+        self.stopKeepAliveID = reactor.callLater(self.keepAliveDelay, self.sendKeepAlive)
+
+    def stopKeepAlive(self):
+        if self.stopKeepAliveID:
+            self.stopKeepAliveID.cancel()
+            self.stopKeepAliveID = None
+
+    def disconnect(self):
+        """
+        send the disconnect flap, and sever the connection
+        """
+        self.sendFLAP('', 0x04)
+        def f(reason): pass
+        self.connectionLost = f
+        self.transport.loseConnection()
+
+
+class SNACBased(OscarConnection):
+    snacFamilies = {
+        # family : (version, toolID, toolVersion)
+    }
+    def __init__(self,cookie):
+        self.cookie=cookie
+        self.lastID=0
+        self.supportedFamilies = ()
+        self.requestCallbacks={} # request id:Deferred
+
+    def sendSNAC(self,fam,sub,data,flags=[0,0]):
+        """
+        send a snac and wait for the response by returning a Deferred.
+        """
+        reqid=self.lastID
+        self.lastID=reqid+1
+        d = defer.Deferred()
+        d.reqid = reqid
+
+        #d.addErrback(self._ebDeferredError,fam,sub,data) # XXX for testing
+
+        self.requestCallbacks[reqid] = d
+        self.sendFLAP(SNAC(fam,sub,reqid,data))
+        return d
+
+    def _ebDeferredError(self, error, fam, sub, data):
+        log.msg('ERROR IN DEFERRED %s' % error)
+        log.msg('on sending of message, family 0x%02x, subtype 0x%02x' % (fam, sub))
+        log.msg('data: %s' % repr(data))
+
+    def sendSNACnr(self,fam,sub,data,flags=[0,0]):
+        """
+        send a snac, but don't bother adding a deferred, we don't care.
+        """
+        self.sendFLAP(SNAC(fam,sub,0x10000*fam+sub,data))
+
+    def oscar_(self,data):
+        self.sendFLAP("\000\000\000\001"+TLV(6,self.cookie), 0x01)
+        return "Data"
+
+    def oscar_Data(self,data):
+        snac=readSNAC(data[1])
+        if self.requestCallbacks.has_key(snac[4]):
+            d = self.requestCallbacks[snac[4]]
+            del self.requestCallbacks[snac[4]]
+            if snac[1]!=1:
+                d.callback(snac)
+            else:
+                d.errback(snac)
+            return
+        func=getattr(self,'oscar_%02X_%02X'%(snac[0],snac[1]),None)
+        if not func:
+            self.oscar_unknown(snac)
+        else:
+            func(snac[2:])
+        return "Data"
+
+    def oscar_unknown(self,snac):
+        log.msg("unknown for %s" % self)
+        log.msg(snac)
+
+
+    def oscar_01_03(self, snac):
+        numFamilies = len(snac[3])/2
+        self.supportedFamilies = struct.unpack("!"+str(numFamilies)+'H', snac[3])
+        d = ''
+        for fam in self.supportedFamilies:
+            if self.snacFamilies.has_key(fam):
+                d=d+struct.pack('!2H',fam,self.snacFamilies[fam][0])
+        self.sendSNACnr(0x01,0x17, d)
+
+    def oscar_01_0A(self,snac):
+        """
+        change of rate information.
+        """
+        # this can be parsed, maybe we can even work it in
+        pass
+
+    def oscar_01_18(self,snac):
+        """
+        host versions, in the same format as we sent
+        """
+        self.sendSNACnr(0x01,0x06,"") #pass
+
+    def clientReady(self):
+        """
+        called when the client is ready to be online
+        """
+        d = ''
+        for fam in self.supportedFamilies:
+            if self.snacFamilies.has_key(fam):
+                version, toolID, toolVersion = self.snacFamilies[fam]
+                d = d + struct.pack('!4H',fam,version,toolID,toolVersion)
+        self.sendSNACnr(0x01,0x02,d)
+
+class BOSConnection(SNACBased):
+    snacFamilies = {
+        0x01:(3, 0x0110, 0x059b),
+        0x13:(3, 0x0110, 0x059b),
+        0x02:(1, 0x0110, 0x059b),
+        0x03:(1, 0x0110, 0x059b),
+        0x04:(1, 0x0110, 0x059b),
+        0x06:(1, 0x0110, 0x059b),
+        0x08:(1, 0x0104, 0x0001),
+        0x09:(1, 0x0110, 0x059b),
+        0x0a:(1, 0x0110, 0x059b),
+        0x0b:(1, 0x0104, 0x0001),
+        0x0c:(1, 0x0104, 0x0001)
+    }
+
+    capabilities = None
+
+    def __init__(self,username,cookie):
+        SNACBased.__init__(self,cookie)
+        self.username=username
+        self.profile = None
+        self.awayMessage = None
+        self.services = {}
+
+        if not self.capabilities:
+            self.capabilities = [CAP_CHAT]
+
+    def parseUser(self,data,count=None):
+        l=ord(data[0])
+        name=data[1:1+l]
+        warn,foo=struct.unpack("!HH",data[1+l:5+l])
+        warn=int(warn/10)
+        tlvs=data[5+l:]
+        if count:
+            tlvs,rest = readTLVs(tlvs,foo)
+        else:
+            tlvs,rest = readTLVs(tlvs), None
+        u = OSCARUser(name, warn, tlvs)
+        if rest == None:
+            return u
+        else:
+            return u, rest
+
+    def oscar_01_05(self, snac, d = None):
+        """
+        data for a new service connection
+        d might be a deferred to be called back when the service is ready
+        """
+        tlvs = readTLVs(snac[3][2:])
+        service = struct.unpack('!H',tlvs[0x0d])[0]
+        ip = tlvs[5]
+        cookie = tlvs[6]
+        #c = serviceClasses[service](self, cookie, d)
+        c = protocol.ClientCreator(reactor, serviceClasses[service], self, cookie, d)
+        def addService(x):
+            self.services[service] = x
+        c.connectTCP(ip, 5190).addCallback(addService)
+        #self.services[service] = c
+
+    def oscar_01_07(self,snac):
+        """
+        rate paramaters
+        """
+        self.sendSNACnr(0x01,0x08,"\x00\x01\x00\x02\x00\x03\x00\x04\x00\x05") # ack
+        self.initDone()
+        self.sendSNACnr(0x13,0x02,'') # SSI rights info
+        self.sendSNACnr(0x02,0x02,'') # location rights info
+        self.sendSNACnr(0x03,0x02,'') # buddy list rights
+        self.sendSNACnr(0x04,0x04,'') # ICBM parms
+        self.sendSNACnr(0x09,0x02,'') # BOS rights
+
+    def oscar_01_10(self,snac):
+        """
+        we've been warned
+        """
+        skip = struct.unpack('!H',snac[3][:2])[0]
+        newLevel = struct.unpack('!H',snac[3][2+skip:4+skip])[0]/10
+        if len(snac[3])>4+skip:
+            by = self.parseUser(snac[3][4+skip:])
+        else:
+            by = None
+        self.receiveWarning(newLevel, by)
+
+    def oscar_01_13(self,snac):
+        """
+        MOTD
+        """
+        pass # we don't care for now
+
+    def oscar_02_03(self, snac):
+        """
+        location rights response
+        """
+        tlvs = readTLVs(snac[3])
+        self.maxProfileLength = tlvs[1]
+
+    def oscar_03_03(self, snac):
+        """
+        buddy list rights response
+        """
+        tlvs = readTLVs(snac[3])
+        self.maxBuddies = tlvs[1]
+        self.maxWatchers = tlvs[2]
+
+    def oscar_03_0B(self, snac):
+        """
+        buddy update
+        """
+        self.updateBuddy(self.parseUser(snac[3]))
+
+    def oscar_03_0C(self, snac):
+        """
+        buddy offline
+        """
+        self.offlineBuddy(self.parseUser(snac[3]))
+
+#    def oscar_04_03(self, snac):
+
+    def oscar_04_05(self, snac):
+        """
+        ICBM parms response
+        """
+        self.sendSNACnr(0x04,0x02,'\x00\x00\x00\x00\x00\x0b\x1f@\x03\xe7\x03\xe7\x00\x00\x00\x00') # IM rights
+
+    def oscar_04_07(self, snac):
+        """
+        ICBM message (instant message)
+        """
+        data = snac[3]
+        cookie, data = data[:8], data[8:]
+        channel = struct.unpack('!H',data[:2])[0]
+        data = data[2:]
+        user, data = self.parseUser(data, 1)
+        tlvs = readTLVs(data)
+        if channel == 1: # message
+            flags = []
+            multiparts = []
+            for k, v in tlvs.items():
+                if k == 2:
+                    while v:
+                        v = v[2:] # skip bad data
+                        messageLength, charSet, charSubSet = struct.unpack('!3H', v[:6])
+                        messageLength -= 4
+                        message = [v[6:6+messageLength]]
+                        if charSet == 0:
+                            pass # don't add anything special
+                        elif charSet == 2:
+                            message.append('unicode')
+                        elif charSet == 3:
+                            message.append('iso-8859-1')
+                        elif charSet == 0xffff:
+                            message.append('none')
+                        if charSubSet == 0xb:
+                            message.append('macintosh')
+                        if messageLength > 0: multiparts.append(tuple(message))
+                        v = v[6+messageLength:]
+                elif k == 3:
+                    flags.append('acknowledge')
+                elif k == 4:
+                    flags.append('auto')
+                elif k == 6:
+                    flags.append('offline')
+                elif k == 8:
+                    iconLength, foo, iconSum, iconStamp = struct.unpack('!LHHL',v)
+                    if iconLength:
+                        flags.append('icon')
+                        flags.append((iconLength, iconSum, iconStamp))
+                elif k == 9:
+                    flags.append('buddyrequest')
+                elif k == 0xb: # unknown
+                    pass
+                elif k == 0x17:
+                    flags.append('extradata')
+                    flags.append(v)
+                else:
+                    log.msg('unknown TLV for incoming IM, %04x, %s' % (k,repr(v)))
+
+#  unknown tlv for user SNewdorf
+#  t: 29
+#  v: '\x00\x00\x00\x05\x02\x01\xd2\x04r\x00\x01\x01\x10/\x8c\x8b\x8a\x1e\x94*\xbc\x80}\x8d\xc4;\x1dEM'
+# XXX what is this?
+            self.receiveMessage(user, multiparts, flags)
+        elif channel == 2: # rondevouz
+            status = struct.unpack('!H',tlvs[5][:2])[0]
+            requestClass = tlvs[5][10:26]
+            moreTLVs = readTLVs(tlvs[5][26:])
+            if requestClass == CAP_CHAT: # a chat request
+                exchange = struct.unpack('!H',moreTLVs[10001][:2])[0]
+                name = moreTLVs[10001][3:-2]
+                instance = struct.unpack('!H',moreTLVs[10001][-2:])[0]
+                if not self.services.has_key(SERVICE_CHATNAV):
+                    self.connectService(SERVICE_CHATNAV,1).addCallback(lambda x: self.services[SERVICE_CHATNAV].getChatInfo(exchange, name, instance).\
+                        addCallback(self._cbGetChatInfoForInvite, user, moreTLVs[12]))
+                else:
+                    self.services[SERVICE_CHATNAV].getChatInfo(exchange, name, instance).\
+                        addCallback(self._cbGetChatInfoForInvite, user, moreTLVs[12])
+            elif requestClass == CAP_SEND_FILE:
+                if moreTLVs.has_key(11): # cancel
+                    log.msg('cancelled file request')
+                    log.msg(status)
+                    return # handle this later
+                name = moreTLVs[10001][9:-7]
+                desc = moreTLVs[12]
+                log.msg('file request from %s, %s, %s' % (user, name, desc))
+                self.receiveSendFileRequest(user, name, desc, cookie)
+            else:
+                log.msg('unsupported rondevouz: %s' % requestClass)
+                log.msg(repr(moreTLVs))
+        else:
+            log.msg('unknown channel %02x' % channel)
+            log.msg(tlvs)
+
+    def _cbGetChatInfoForInvite(self, info, user, message):
+        apply(self.receiveChatInvite, (user,message)+info)
+
+    def oscar_09_03(self, snac):
+        """
+        BOS rights response
+        """
+        tlvs = readTLVs(snac[3])
+        self.maxPermitList = tlvs[1]
+        self.maxDenyList = tlvs[2]
+
+    def oscar_0B_02(self, snac):
+        """
+        stats reporting interval
+        """
+        self.reportingInterval = struct.unpack('!H',snac[3][:2])[0]
+
+    def oscar_13_03(self, snac):
+        """
+        SSI rights response
+        """
+        #tlvs = readTLVs(snac[3])
+        pass # we don't know how to parse this
+
+    # methods to be called by the client, and their support methods
+    def requestSelfInfo(self):
+        """
+        ask for the OSCARUser for ourselves
+        """
+        d = defer.Deferred()
+        self.sendSNAC(0x01, 0x0E, '').addCallback(self._cbRequestSelfInfo, d)
+        return d
+
+    def _cbRequestSelfInfo(self, snac, d):
+        d.callback(self.parseUser(snac[5]))
+
+    def initSSI(self):
+        """
+        this sends the rate request for family 0x13 (Server Side Information)
+        so we can then use it
+        """
+        return self.sendSNAC(0x13, 0x02, '').addCallback(self._cbInitSSI)
+
+    def _cbInitSSI(self, snac, d):
+        return {} # don't even bother parsing this
+
+    def requestSSI(self, timestamp = 0, revision = 0):
+        """
+        request the server side information
+        if the deferred gets None, it means the SSI is the same
+        """
+        return self.sendSNAC(0x13, 0x05,
+            struct.pack('!LH',timestamp,revision)).addCallback(self._cbRequestSSI)
+
+    def _cbRequestSSI(self, snac, args = ()):
+        if snac[1] == 0x0f: # same SSI as we have
+            return
+        itemdata = snac[5][3:]
+        if args:
+            revision, groups, permit, deny, permitMode, visibility = args
+        else:
+            version, revision = struct.unpack('!BH', snac[5][:3])
+            groups = {}
+            permit = []
+            deny = []
+            permitMode = None
+            visibility = None
+        while len(itemdata)>4:
+            nameLength = struct.unpack('!H', itemdata[:2])[0]
+            name = itemdata[2:2+nameLength]
+            groupID, buddyID, itemType, restLength = \
+                struct.unpack('!4H', itemdata[2+nameLength:10+nameLength])
+            tlvs = readTLVs(itemdata[10+nameLength:10+nameLength+restLength])
+            itemdata = itemdata[10+nameLength+restLength:]
+            if itemType == 0: # buddies
+                groups[groupID].addUser(buddyID, SSIBuddy(name, tlvs))
+            elif itemType == 1: # group
+                g = SSIGroup(name, tlvs)
+                if groups.has_key(0): groups[0].addUser(groupID, g)
+                groups[groupID] = g
+            elif itemType == 2: # permit
+                permit.append(name)
+            elif itemType == 3: # deny
+                deny.append(name)
+            elif itemType == 4: # permit deny info
+                if not tlvs.has_key(0xcb):
+                    continue # this happens with ICQ
+                permitMode = {1:'permitall',2:'denyall',3:'permitsome',4:'denysome',5:'permitbuddies'}[ord(tlvs[0xca])]
+                visibility = {'\xff\xff\xff\xff':'all','\x00\x00\x00\x04':'notaim'}[tlvs[0xcb]]
+            elif itemType == 5: # unknown (perhaps idle data)?
+                pass
+            else:
+                log.msg('%s %s %s %s %s' % (name, groupID, buddyID, itemType, tlvs))
+        timestamp = struct.unpack('!L',itemdata)[0]
+        if not timestamp: # we've got more packets coming
+            # which means add some deferred stuff
+            d = defer.Deferred()
+            self.requestCallbacks[snac[4]] = d
+            d.addCallback(self._cbRequestSSI, (revision, groups, permit, deny, permitMode, visibility))
+            return d
+        return (groups[0].users,permit,deny,permitMode,visibility,timestamp,revision)
+
+    def activateSSI(self):
+        """
+        active the data stored on the server (use buddy list, permit deny settings, etc.)
+        """
+        self.sendSNACnr(0x13,0x07,'')
+
+    def startModifySSI(self):
+        """
+        tell the OSCAR server to be on the lookout for SSI modifications
+        """
+        self.sendSNACnr(0x13,0x11,'')
+
+    def addItemSSI(self, item, groupID = None, buddyID = None):
+        """
+        add an item to the SSI server.  if buddyID == 0, then this should be a group.
+        this gets a callback when it's finished, but you can probably ignore it.
+        """
+        if groupID is None:
+            if isinstance(item, SSIGroup):
+                groupID = 0
+            else:
+                groupID = item.group.group.findIDFor(item.group)
+        if buddyID is None:
+            buddyID = item.group.findIDFor(item)
+        return self.sendSNAC(0x13,0x08, item.oscarRep(groupID, buddyID))
+
+    def modifyItemSSI(self, item, groupID = None, buddyID = None):
+        if groupID is None:
+            if isinstance(item, SSIGroup):
+                groupID = 0
+            else:
+                groupID = item.group.group.findIDFor(item.group)
+        if buddyID is None:
+            buddyID = item.group.findIDFor(item)
+        return self.sendSNAC(0x13,0x09, item.oscarRep(groupID, buddyID))
+
+    def delItemSSI(self, item, groupID = None, buddyID = None):
+        if groupID is None:
+            if isinstance(item, SSIGroup):
+                groupID = 0
+            else:
+                groupID = item.group.group.findIDFor(item.group)
+        if buddyID is None:
+            buddyID = item.group.findIDFor(item)
+        return self.sendSNAC(0x13,0x0A, item.oscarRep(groupID, buddyID))
+
+    def endModifySSI(self):
+        self.sendSNACnr(0x13,0x12,'')
+
+    def setProfile(self, profile):
+        """
+        set the profile.
+        send None to not set a profile (different from '' for a blank one)
+        """
+        self.profile = profile
+        tlvs = ''
+        if self.profile is not None:
+            tlvs =  TLV(1,'text/aolrtf; charset="us-ascii"') + \
+                    TLV(2,self.profile)
+
+        tlvs = tlvs + TLV(5, ''.join(self.capabilities))
+        self.sendSNACnr(0x02, 0x04, tlvs)
+
+    def setAway(self, away = None):
+        """
+        set the away message, or return (if away == None)
+        """
+        self.awayMessage = away
+        tlvs = TLV(3,'text/aolrtf; charset="us-ascii"') + \
+               TLV(4,away or '')
+        self.sendSNACnr(0x02, 0x04, tlvs)
+
+    def setIdleTime(self, idleTime):
+        """
+        set our idle time.  don't call more than once with a non-0 idle time.
+        """
+        self.sendSNACnr(0x01, 0x11, struct.pack('!L',idleTime))
+
+    def sendMessage(self, user, message, wantAck = 0, autoResponse = 0, offline = 0 ):  \
+                    #haveIcon = 0, ):
+        """
+        send a message to user (not an OSCARUseR).
+        message can be a string, or a multipart tuple.
+        if wantAck, we return a Deferred that gets a callback when the message is sent.
+        if autoResponse, this message is an autoResponse, as if from an away message.
+        if offline, this is an offline message (ICQ only, I think)
+        """
+        data = ''.join([chr(random.randrange(0, 127)) for i in range(8)]) # cookie
+        data = data + '\x00\x01' + chr(len(user)) + user
+        if not type(message) in (types.TupleType, types.ListType):
+            message = [[message,]]
+            if type(message[0][0]) == types.UnicodeType:
+                message[0].append('unicode')
+        messageData = ''
+        for part in message:
+            charSet = 0
+            if 'unicode' in part[1:]:
+                charSet = 2
+                part[0] = part[0].encode('utf-8')
+            elif 'iso-8859-1' in part[1:]:
+                charSet = 3
+                part[0] = part[0].encode('iso-8859-1')
+            elif 'none' in part[1:]:
+                charSet = 0xffff
+            if 'macintosh' in part[1:]:
+                charSubSet = 0xb
+            else:
+                charSubSet = 0
+            messageData = messageData + '\x01\x01' + \
+                          struct.pack('!3H',len(part[0])+4,charSet,charSubSet)
+            messageData = messageData + part[0]
+        data = data + TLV(2, '\x05\x01\x00\x03\x01\x01\x02'+messageData)
+        if wantAck:
+            data = data + TLV(3,'')
+        if autoResponse:
+            data = data + TLV(4,'')
+        if offline:
+            data = data + TLV(6,'')
+        if wantAck:
+            return self.sendSNAC(0x04, 0x06, data).addCallback(self._cbSendMessageAck, user, message)
+        self.sendSNACnr(0x04, 0x06, data)
+
+    def _cbSendMessageAck(self, snac, user, message):
+        return user, message
+
+    def connectService(self, service, wantCallback = 0, extraData = ''):
+        """
+        connect to another service
+        if wantCallback, we return a Deferred that gets called back when the service is online.
+        if extraData, append that to our request.
+        """
+        if wantCallback:
+            d = defer.Deferred()
+            self.sendSNAC(0x01,0x04,struct.pack('!H',service) + extraData).addCallback(self._cbConnectService, d)
+            return d
+        else:
+            self.sendSNACnr(0x01,0x04,struct.pack('!H',service))
+
+    def _cbConnectService(self, snac, d):
+        self.oscar_01_05(snac[2:], d)
+
+    def createChat(self, shortName):
+        """
+        create a chat room
+        """
+        if self.services.has_key(SERVICE_CHATNAV):
+            return self.services[SERVICE_CHATNAV].createChat(shortName)
+        else:
+            return self.connectService(SERVICE_CHATNAV,1).addCallback(lambda s: s.createChat(shortName))
+
+
+    def joinChat(self, exchange, fullName, instance):
+        """
+        join a chat room
+        """
+        #d = defer.Deferred()
+        return self.connectService(0x0e, 1, TLV(0x01, struct.pack('!HB',exchange, len(fullName)) + fullName +
+                          struct.pack('!H', instance))).addCallback(self._cbJoinChat) #, d)
+        #return d
+
+    def _cbJoinChat(self, chat):
+        del self.services[SERVICE_CHAT]
+        return chat
+
+    def warnUser(self, user, anon = 0):
+        return self.sendSNAC(0x04, 0x08, '\x00'+chr(anon)+chr(len(user))+user).addCallback(self._cbWarnUser)
+
+    def _cbWarnUser(self, snac):
+        oldLevel, newLevel = struct.unpack('!2H', snac[5])
+        return oldLevel, newLevel
+
+    def getInfo(self, user):
+        #if user.
+        return self.sendSNAC(0x02, 0x05, '\x00\x01'+chr(len(user))+user).addCallback(self._cbGetInfo)
+
+    def _cbGetInfo(self, snac):
+        user, rest = self.parseUser(snac[5],1)
+        tlvs = readTLVs(rest)
+        return tlvs.get(0x02,None)
+
+    def getAway(self, user):
+        return self.sendSNAC(0x02, 0x05, '\x00\x03'+chr(len(user))+user).addCallback(self._cbGetAway)
+
+    def _cbGetAway(self, snac):
+        user, rest = self.parseUser(snac[5],1)
+        tlvs = readTLVs(rest)
+        return tlvs.get(0x04,None) # return None if there is no away message
+
+    #def acceptSendFileRequest(self,
+
+    # methods to be overriden by the client
+    def initDone(self):
+        """
+        called when we get the rate information, which means we should do other init. stuff.
+        """
+        log.msg('%s initDone' % self)
+        pass
+
+    def updateBuddy(self, user):
+        """
+        called when a buddy changes status, with the OSCARUser for that buddy.
+        """
+        log.msg('%s updateBuddy %s' % (self, user))
+        pass
+
+    def offlineBuddy(self, user):
+        """
+        called when a buddy goes offline
+        """
+        log.msg('%s offlineBuddy %s' % (self, user))
+        pass
+
+    def receiveMessage(self, user, multiparts, flags):
+        """
+        called when someone sends us a message
+        """
+        pass
+
+    def receiveWarning(self, newLevel, user):
+        """
+        called when someone warns us.
+        user is either None (if it was anonymous) or an OSCARUser
+        """
+        pass
+
+    def receiveChatInvite(self, user, message, exchange, fullName, instance, shortName, inviteTime):
+        """
+        called when someone invites us to a chat room
+        """
+        pass
+
+    def chatReceiveMessage(self, chat, user, message):
+        """
+        called when someone in a chatroom sends us a message in the chat
+        """
+        pass
+
+    def chatMemberJoined(self, chat, member):
+        """
+        called when a member joins the chat
+        """
+        pass
+
+    def chatMemberLeft(self, chat, member):
+        """
+        called when a member leaves the chat
+        """
+        pass
+
+    def receiveSendFileRequest(self, user, file, description, cookie):
+        """
+        called when someone tries to send a file to us
+        """
+        pass
+
+class OSCARService(SNACBased):
+    def __init__(self, bos, cookie, d = None):
+        SNACBased.__init__(self, cookie)
+        self.bos = bos
+        self.d = d
+
+    def connectionLost(self, reason):
+        for k,v in self.bos.services.items():
+            if v == self:
+                del self.bos.services[k]
+                return
+
+    def clientReady(self):
+        SNACBased.clientReady(self)
+        if self.d:
+            self.d.callback(self)
+            self.d = None
+
+class ChatNavService(OSCARService):
+    snacFamilies = {
+        0x01:(3, 0x0010, 0x059b),
+        0x0d:(1, 0x0010, 0x059b)
+    }
+    def oscar_01_07(self, snac):
+        # rate info
+        self.sendSNACnr(0x01, 0x08, '\000\001\000\002\000\003\000\004\000\005')
+        self.sendSNACnr(0x0d, 0x02, '')
+
+    def oscar_0D_09(self, snac):
+        self.clientReady()
+
+    def getChatInfo(self, exchange, name, instance):
+        d = defer.Deferred()
+        self.sendSNAC(0x0d,0x04,struct.pack('!HB',exchange,len(name)) + \
+                      name + struct.pack('!HB',instance,2)). \
+            addCallback(self._cbGetChatInfo, d)
+        return d
+
+    def _cbGetChatInfo(self, snac, d):
+        data = snac[5][4:]
+        exchange, length = struct.unpack('!HB',data[:3])
+        fullName = data[3:3+length]
+        instance = struct.unpack('!H',data[3+length:5+length])[0]
+        tlvs = readTLVs(data[8+length:])
+        shortName = tlvs[0x6a]
+        inviteTime = struct.unpack('!L',tlvs[0xca])[0]
+        info = (exchange,fullName,instance,shortName,inviteTime)
+        d.callback(info)
+
+    def createChat(self, shortName):
+        #d = defer.Deferred()
+        data = '\x00\x04\x06create\xff\xff\x01\x00\x03'
+        data = data + TLV(0xd7, 'en')
+        data = data + TLV(0xd6, 'us-ascii')
+        data = data + TLV(0xd3, shortName)
+        return self.sendSNAC(0x0d, 0x08, data).addCallback(self._cbCreateChat)
+        #return d
+
+    def _cbCreateChat(self, snac): #d):
+        exchange, length = struct.unpack('!HB',snac[5][4:7])
+        fullName = snac[5][7:7+length]
+        instance = struct.unpack('!H',snac[5][7+length:9+length])[0]
+        #d.callback((exchange, fullName, instance))
+        return exchange, fullName, instance
+
+class ChatService(OSCARService):
+    snacFamilies = {
+        0x01:(3, 0x0010, 0x059b),
+        0x0E:(1, 0x0010, 0x059b)
+    }
+    def __init__(self,bos,cookie, d = None):
+        OSCARService.__init__(self,bos,cookie,d)
+        self.exchange = None
+        self.fullName = None
+        self.instance = None
+        self.name = None
+        self.members = None
+
+    clientReady = SNACBased.clientReady # we'll do our own callback
+
+    def oscar_01_07(self,snac):
+        self.sendSNAC(0x01,0x08,"\000\001\000\002\000\003\000\004\000\005")
+        self.clientReady()
+
+    def oscar_0E_02(self, snac):
+#        try: # this is EVIL
+#            data = snac[3][4:]
+#            self.exchange, length = struct.unpack('!HB',data[:3])
+#            self.fullName = data[3:3+length]
+#            self.instance = struct.unpack('!H',data[3+length:5+length])[0]
+#            tlvs = readTLVs(data[8+length:])
+#            self.name = tlvs[0xd3]
+#            self.d.callback(self)
+#        except KeyError:
+        data = snac[3]
+        self.exchange, length = struct.unpack('!HB',data[:3])
+        self.fullName = data[3:3+length]
+        self.instance = struct.unpack('!H',data[3+length:5+length])[0]
+        tlvs = readTLVs(data[8+length:])
+        self.name = tlvs[0xd3]
+        self.d.callback(self)
+
+    def oscar_0E_03(self,snac):
+        users=[]
+        rest=snac[3]
+        while rest:
+            user, rest = self.bos.parseUser(rest, 1)
+            users.append(user)
+        if not self.fullName:
+            self.members = users
+        else:
+            self.members.append(users[0])
+            self.bos.chatMemberJoined(self,users[0])
+
+    def oscar_0E_04(self,snac):
+        user=self.bos.parseUser(snac[3])
+        for u in self.members:
+            if u.name == user.name: # same person!
+                self.members.remove(u)
+        self.bos.chatMemberLeft(self,user)
+
+    def oscar_0E_06(self,snac):
+        data = snac[3]
+        user,rest=self.bos.parseUser(snac[3][14:],1)
+        tlvs = readTLVs(rest[8:])
+        message=tlvs[1]
+        self.bos.chatReceiveMessage(self,user,message)
+
+    def sendMessage(self,message):
+        tlvs=TLV(0x02,"us-ascii")+TLV(0x03,"en")+TLV(0x01,message)
+        self.sendSNAC(0x0e,0x05,
+                      "\x46\x30\x38\x30\x44\x00\x63\x00\x00\x03\x00\x01\x00\x00\x00\x06\x00\x00\x00\x05"+
+                      struct.pack("!H",len(tlvs))+
+                      tlvs)
+
+    def leaveChat(self):
+        self.disconnect()
+
+class OscarAuthenticator(OscarConnection):
+    BOSClass = BOSConnection
+    def __init__(self,username,password,deferred=None,icq=0):
+        self.username=username
+        self.password=password
+        self.deferred=deferred
+        self.icq=icq # icq mode is disabled
+        #if icq and self.BOSClass==BOSConnection:
+        #    self.BOSClass=ICQConnection
+
+    def oscar_(self,flap):
+        if not self.icq:
+            self.sendFLAP("\000\000\000\001", 0x01)
+            self.sendFLAP(SNAC(0x17,0x06,0,
+                               TLV(TLV_USERNAME,self.username)+
+                               TLV(0x004B,'')))
+            self.state="Key"
+        else:
+            encpass=encryptPasswordICQ(self.password)
+            self.sendFLAP('\000\000\000\001'+
+                          TLV(0x01,self.username)+
+                          TLV(0x02,encpass)+
+                          TLV(0x03,'ICQ Inc. - Product of ICQ (TM).2001b.5.18.1.3659.85')+
+                          TLV(0x16,"\x01\x0a")+
+                          TLV(0x17,"\x00\x05")+
+                          TLV(0x18,"\x00\x12")+
+                          TLV(0x19,"\000\001")+
+                          TLV(0x1a,"\x0eK")+
+                          TLV(0x14,"\x00\x00\x00U")+
+                          TLV(0x0f,"en")+
+                          TLV(0x0e,"us"),0x01)
+            self.state="Cookie"
+
+    def oscar_Key(self,data):
+        snac=readSNAC(data[1])
+        key=snac[5][2:]
+        encpass=encryptPasswordMD5(self.password,key)
+        self.sendFLAP(SNAC(0x17,0x02,0,
+                           TLV(TLV_USERNAME,self.username)+
+                           TLV(TLV_PASSWORD,encpass)+
+                           TLV(0x004C, '')+ # unknown
+                           TLV(TLV_CLIENTNAME,"AOL Instant Messenger (SM), version 4.8.2790/WIN32")+
+                           TLV(0x0016,"\x01\x09")+
+                           TLV(TLV_CLIENTMAJOR,"\000\004")+
+                           TLV(TLV_CLIENTMINOR,"\000\010")+
+                           TLV(0x0019,"\000\000")+
+                           TLV(TLV_CLIENTSUB,"\x0A\xE6")+
+                           TLV(0x0014,"\x00\x00\x00\xBB")+
+                           TLV(TLV_LANG,"en")+
+                           TLV(TLV_COUNTRY,"us")+
+                           TLV(TLV_USESSI,"\001")))
+        return "Cookie"
+
+    def oscar_Cookie(self,data):
+        snac=readSNAC(data[1])
+        if self.icq:
+            i=snac[5].find("\000")
+            snac[5]=snac[5][i:]
+        tlvs=readTLVs(snac[5])
+        if tlvs.has_key(6):
+            self.cookie=tlvs[6]
+            server,port=string.split(tlvs[5],":")
+            d = self.connectToBOS(server, int(port))
+            d.addErrback(lambda x: log.msg("Connection Failed! Reason: %s" % x))
+            if self.deferred:
+                d.chainDeferred(self.deferred)
+            self.disconnect()
+        elif tlvs.has_key(8):
+            errorcode=tlvs[8]
+            errorurl=tlvs[4]
+            if errorcode=='\000\030':
+                error="You are attempting to sign on again too soon.  Please try again later."
+            elif errorcode=='\000\005':
+                error="Invalid Username or Password."
+            else: error=repr(errorcode)
+            self.error(error,errorurl)
+        else:
+            log.msg('hmm, weird tlvs for %s cookie packet' % str(self))
+            log.msg(tlvs)
+            log.msg('snac')
+            log.msg(str(snac))
+        return "None"
+
+    def oscar_None(self,data): pass
+
+    def connectToBOS(self, server, port):
+        c = protocol.ClientCreator(reactor, self.BOSClass, self.username, self.cookie)
+        return c.connectTCP(server, int(port))
+
+    def error(self,error,url):
+        log.msg("ERROR! %s %s" % (error,url))
+        if self.deferred: self.deferred.errback((error,url))
+        self.transport.loseConnection()
+
+FLAP_CHANNEL_NEW_CONNECTION = 0x01
+FLAP_CHANNEL_DATA = 0x02
+FLAP_CHANNEL_ERROR = 0x03
+FLAP_CHANNEL_CLOSE_CONNECTION = 0x04
+
+SERVICE_CHATNAV = 0x0d
+SERVICE_CHAT = 0x0e
+serviceClasses = {
+    SERVICE_CHATNAV:ChatNavService,
+    SERVICE_CHAT:ChatService
+}
+TLV_USERNAME = 0x0001
+TLV_CLIENTNAME = 0x0003
+TLV_COUNTRY = 0x000E
+TLV_LANG = 0x000F
+TLV_CLIENTMAJOR = 0x0017
+TLV_CLIENTMINOR = 0x0018
+TLV_CLIENTSUB = 0x001A
+TLV_PASSWORD = 0x0025
+TLV_USESSI = 0x004A
+
+CAP_ICON = '\011F\023FL\177\021\321\202"DEST\000\000'
+CAP_VOICE = '\011F\023AL\177\021\321\202"DEST\000\000'
+CAP_IMAGE = '\011F\023EL\177\021\321\202"DEST\000\000'
+CAP_CHAT = 't\217$ b\207\021\321\202"DEST\000\000'
+CAP_GET_FILE = '\011F\023HL\177\021\321\202"DEST\000\000'
+CAP_SEND_FILE = '\011F\023CL\177\021\321\202"DEST\000\000'
+CAP_GAMES = '\011F\023GL\177\021\321\202"DEST\000\000'
+CAP_SEND_LIST = '\011F\023KL\177\021\321\202"DEST\000\000'
+CAP_SERV_REL = '\011F\023IL\177\021\321\202"DEST\000\000'
diff --git a/ThirdParty/Twisted/twisted/words/service.py b/ThirdParty/Twisted/twisted/words/service.py
new file mode 100644
index 0000000..0e4f8b6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/service.py
@@ -0,0 +1,1223 @@
+# -*- test-case-name: twisted.words.test.test_service -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+A module that needs a better name.
+
+Implements new cred things for words.
+
+How does this thing work?
+
+  - Network connection on some port expecting to speak some protocol
+
+  - Protocol-specific authentication, resulting in some kind of credentials object
+
+  - twisted.cred.portal login using those credentials for the interface
+    IUser and with something implementing IChatClient as the mind
+
+  - successful login results in an IUser avatar the protocol can call
+    methods on, and state added to the realm such that the mind will have
+    methods called on it as is necessary
+
+  - protocol specific actions lead to calls onto the avatar; remote events
+    lead to calls onto the mind
+
+  - protocol specific hangup, realm is notified, user is removed from active
+    play, the end.
+"""
+
+from time import time, ctime
+
+from zope.interface import implements
+
+from twisted.words import iwords, ewords
+
+from twisted.python.components import registerAdapter
+from twisted.cred import portal, credentials, error as ecred
+from twisted.spread import pb
+from twisted.words.protocols import irc
+from twisted.internet import defer, protocol
+from twisted.python import log, failure, reflect
+from twisted import copyright
+
+
+class Group(object):
+    implements(iwords.IGroup)
+
+    def __init__(self, name):
+        self.name = name
+        self.users = {}
+        self.meta = {
+            "topic": "",
+            "topic_author": "",
+            }
+
+
+    def _ebUserCall(self, err, p):
+        return failure.Failure(Exception(p, err))
+
+
+    def _cbUserCall(self, results):
+        for (success, result) in results:
+            if not success:
+                user, err = result.value # XXX
+                self.remove(user, err.getErrorMessage())
+
+
+    def add(self, user):
+        assert iwords.IChatClient.providedBy(user), "%r is not a chat client" % (user,)
+        if user.name not in self.users:
+            additions = []
+            self.users[user.name] = user
+            for p in self.users.itervalues():
+                if p is not user:
+                    d = defer.maybeDeferred(p.userJoined, self, user)
+                    d.addErrback(self._ebUserCall, p=p)
+                    additions.append(d)
+            defer.DeferredList(additions).addCallback(self._cbUserCall)
+        return defer.succeed(None)
+
+
+    def remove(self, user, reason=None):
+        assert reason is None or isinstance(reason, unicode)
+        try:
+            del self.users[user.name]
+        except KeyError:
+            pass
+        else:
+            removals = []
+            for p in self.users.itervalues():
+                if p is not user:
+                    d = defer.maybeDeferred(p.userLeft, self, user, reason)
+                    d.addErrback(self._ebUserCall, p=p)
+                    removals.append(d)
+            defer.DeferredList(removals).addCallback(self._cbUserCall)
+        return defer.succeed(None)
+
+
+    def size(self):
+        return defer.succeed(len(self.users))
+
+
+    def receive(self, sender, recipient, message):
+        assert recipient is self
+        receives = []
+        for p in self.users.itervalues():
+            if p is not sender:
+                d = defer.maybeDeferred(p.receive, sender, self, message)
+                d.addErrback(self._ebUserCall, p=p)
+                receives.append(d)
+        defer.DeferredList(receives).addCallback(self._cbUserCall)
+        return defer.succeed(None)
+
+
+    def setMetadata(self, meta):
+        self.meta = meta
+        sets = []
+        for p in self.users.itervalues():
+            d = defer.maybeDeferred(p.groupMetaUpdate, self, meta)
+            d.addErrback(self._ebUserCall, p=p)
+            sets.append(d)
+        defer.DeferredList(sets).addCallback(self._cbUserCall)
+        return defer.succeed(None)
+
+
+    def iterusers(self):
+        # XXX Deferred?
+        return iter(self.users.values())
+
+
+class User(object):
+    implements(iwords.IUser)
+
+    realm = None
+    mind = None
+
+    def __init__(self, name):
+        self.name = name
+        self.groups = []
+        self.lastMessage = time()
+
+
+    def loggedIn(self, realm, mind):
+        self.realm = realm
+        self.mind = mind
+        self.signOn = time()
+
+
+    def join(self, group):
+        def cbJoin(result):
+            self.groups.append(group)
+            return result
+        return group.add(self.mind).addCallback(cbJoin)
+
+
+    def leave(self, group, reason=None):
+        def cbLeave(result):
+            self.groups.remove(group)
+            return result
+        return group.remove(self.mind, reason).addCallback(cbLeave)
+
+
+    def send(self, recipient, message):
+        self.lastMessage = time()
+        return recipient.receive(self.mind, recipient, message)
+
+
+    def itergroups(self):
+        return iter(self.groups)
+
+
+    def logout(self):
+        for g in self.groups[:]:
+            self.leave(g)
+
+
+NICKSERV = 'NickServ!NickServ at services'
+
+
+class IRCUser(irc.IRC):
+    """
+    Protocol instance representing an IRC user connected to the server.
+    """
+    implements(iwords.IChatClient)
+
+    # A list of IGroups in which I am participating
+    groups = None
+
+    # A no-argument callable I should invoke when I go away
+    logout = None
+
+    # An IUser we use to interact with the chat service
+    avatar = None
+
+    # To whence I belong
+    realm = None
+
+    # How to handle unicode (TODO: Make this customizable on a per-user basis)
+    encoding = 'utf-8'
+
+    # Twisted callbacks
+    def connectionMade(self):
+        self.irc_PRIVMSG = self.irc_NICKSERV_PRIVMSG
+        self.realm = self.factory.realm
+        self.hostname = self.realm.name
+
+
+    def connectionLost(self, reason):
+        if self.logout is not None:
+            self.logout()
+            self.avatar = None
+
+
+    # Make sendMessage a bit more useful to us
+    def sendMessage(self, command, *parameter_list, **kw):
+        if 'prefix' not in kw:
+            kw['prefix'] = self.hostname
+        if 'to' not in kw:
+            kw['to'] = self.name.encode(self.encoding)
+
+        arglist = [self, command, kw['to']] + list(parameter_list)
+        irc.IRC.sendMessage(*arglist, **kw)
+
+
+    # IChatClient implementation
+    def userJoined(self, group, user):
+        self.join(
+            "%s!%s@%s" % (user.name, user.name, self.hostname),
+            '#' + group.name)
+
+
+    def userLeft(self, group, user, reason=None):
+        assert reason is None or isinstance(reason, unicode)
+        self.part(
+            "%s!%s@%s" % (user.name, user.name, self.hostname),
+            '#' + group.name,
+            (reason or u"leaving").encode(self.encoding, 'replace'))
+
+
+    def receive(self, sender, recipient, message):
+        #>> :glyph!glyph at adsl-64-123-27-108.dsl.austtx.swbell.net PRIVMSG glyph_ :hello
+
+        # omg???????????
+        if iwords.IGroup.providedBy(recipient):
+            recipientName = '#' + recipient.name
+        else:
+            recipientName = recipient.name
+
+        text = message.get('text', '<an unrepresentable message>')
+        for L in text.splitlines():
+            self.privmsg(
+                '%s!%s@%s' % (sender.name, sender.name, self.hostname),
+                recipientName,
+                L)
+
+
+    def groupMetaUpdate(self, group, meta):
+        if 'topic' in meta:
+            topic = meta['topic']
+            author = meta.get('topic_author', '')
+            self.topic(
+                self.name,
+                '#' + group.name,
+                topic,
+                '%s!%s@%s' % (author, author, self.hostname)
+                )
+
+    # irc.IRC callbacks - starting with login related stuff.
+    nickname = None
+    password = None
+
+    def irc_PASS(self, prefix, params):
+        """Password message -- Register a password.
+
+        Parameters: <password>
+
+        [REQUIRED]
+
+        Note that IRC requires the client send this *before* NICK
+        and USER.
+        """
+        self.password = params[-1]
+
+
+    def irc_NICK(self, prefix, params):
+        """Nick message -- Set your nickname.
+
+        Parameters: <nickname>
+
+        [REQUIRED]
+        """
+        try:
+            nickname = params[0].decode(self.encoding)
+        except UnicodeDecodeError:
+            self.privmsg(
+                NICKSERV,
+                nickname,
+                'Your nickname is cannot be decoded.  Please use ASCII or UTF-8.')
+            self.transport.loseConnection()
+            return
+
+        self.nickname = nickname
+        self.name = nickname
+
+        for code, text in self._motdMessages:
+            self.sendMessage(code, text % self.factory._serverInfo)
+
+        if self.password is None:
+            self.privmsg(
+                NICKSERV,
+                nickname,
+                'Password?')
+        else:
+            password = self.password
+            self.password = None
+            self.logInAs(nickname, password)
+
+
+    def irc_USER(self, prefix, params):
+        """User message -- Set your realname.
+
+        Parameters: <user> <mode> <unused> <realname>
+        """
+        # Note: who gives a crap about this?  The IUser has the real
+        # information we care about.  Save it anyway, I guess, just
+        # for fun.
+        self.realname = params[-1]
+
+
+    def irc_NICKSERV_PRIVMSG(self, prefix, params):
+        """Send a (private) message.
+
+        Parameters: <msgtarget> <text to be sent>
+        """
+        target = params[0]
+        password = params[-1]
+
+        if self.nickname is None:
+            # XXX Send an error response here
+            self.transport.loseConnection()
+        elif target.lower() != "nickserv":
+            self.privmsg(
+                NICKSERV,
+                self.nickname,
+                "Denied.  Please send me (NickServ) your password.")
+        else:
+            nickname = self.nickname
+            self.nickname = None
+            self.logInAs(nickname, password)
+
+
+    def logInAs(self, nickname, password):
+        d = self.factory.portal.login(
+            credentials.UsernamePassword(nickname, password),
+            self,
+            iwords.IUser)
+        d.addCallbacks(self._cbLogin, self._ebLogin, errbackArgs=(nickname,))
+
+
+    _welcomeMessages = [
+        (irc.RPL_WELCOME,
+         ":connected to Twisted IRC"),
+        (irc.RPL_YOURHOST,
+         ":Your host is %(serviceName)s, running version %(serviceVersion)s"),
+        (irc.RPL_CREATED,
+         ":This server was created on %(creationDate)s"),
+
+        # "Bummer.  This server returned a worthless 004 numeric.
+        #  I'll have to guess at all the values"
+        #    -- epic
+        (irc.RPL_MYINFO,
+         # w and n are the currently supported channel and user modes
+         # -- specify this better
+         "%(serviceName)s %(serviceVersion)s w n")
+        ]
+
+    _motdMessages = [
+        (irc.RPL_MOTDSTART,
+         ":- %(serviceName)s Message of the Day - "),
+        (irc.RPL_ENDOFMOTD,
+         ":End of /MOTD command.")
+        ]
+
+    def _cbLogin(self, (iface, avatar, logout)):
+        assert iface is iwords.IUser, "Realm is buggy, got %r" % (iface,)
+
+        # Let them send messages to the world
+        del self.irc_PRIVMSG
+
+        self.avatar = avatar
+        self.logout = logout
+        for code, text in self._welcomeMessages:
+            self.sendMessage(code, text % self.factory._serverInfo)
+
+
+    def _ebLogin(self, err, nickname):
+        if err.check(ewords.AlreadyLoggedIn):
+            self.privmsg(
+                NICKSERV,
+                nickname,
+                "Already logged in.  No pod people allowed!")
+        elif err.check(ecred.UnauthorizedLogin):
+            self.privmsg(
+                NICKSERV,
+                nickname,
+                "Login failed.  Goodbye.")
+        else:
+            log.msg("Unhandled error during login:")
+            log.err(err)
+            self.privmsg(
+                NICKSERV,
+                nickname,
+                "Server error during login.  Sorry.")
+        self.transport.loseConnection()
+
+
+    # Great, now that's out of the way, here's some of the interesting
+    # bits
+    def irc_PING(self, prefix, params):
+        """Ping message
+
+        Parameters: <server1> [ <server2> ]
+        """
+        if self.realm is not None:
+            self.sendMessage('PONG', self.hostname)
+
+
+    def irc_QUIT(self, prefix, params):
+        """Quit
+
+        Parameters: [ <Quit Message> ]
+        """
+        self.transport.loseConnection()
+
+
+    def _channelMode(self, group, modes=None, *args):
+        if modes:
+            self.sendMessage(
+                irc.ERR_UNKNOWNMODE,
+                ":Unknown MODE flag.")
+        else:
+            self.channelMode(self.name, '#' + group.name, '+')
+
+
+    def _userMode(self, user, modes=None):
+        if modes:
+            self.sendMessage(
+                irc.ERR_UNKNOWNMODE,
+                ":Unknown MODE flag.")
+        elif user is self.avatar:
+            self.sendMessage(
+                irc.RPL_UMODEIS,
+                "+")
+        else:
+            self.sendMessage(
+                irc.ERR_USERSDONTMATCH,
+                ":You can't look at someone else's modes.")
+
+
+    def irc_MODE(self, prefix, params):
+        """User mode message
+
+        Parameters: <nickname>
+        *( ( "+" / "-" ) *( "i" / "w" / "o" / "O" / "r" ) )
+
+        """
+        try:
+            channelOrUser = params[0].decode(self.encoding)
+        except UnicodeDecodeError:
+            self.sendMessage(
+                irc.ERR_NOSUCHNICK, params[0],
+                ":No such nickname (could not decode your unicode!)")
+            return
+
+        if channelOrUser.startswith('#'):
+            def ebGroup(err):
+                err.trap(ewords.NoSuchGroup)
+                self.sendMessage(
+                    irc.ERR_NOSUCHCHANNEL, params[0],
+                    ":That channel doesn't exist.")
+            d = self.realm.lookupGroup(channelOrUser[1:])
+            d.addCallbacks(
+                self._channelMode,
+                ebGroup,
+                callbackArgs=tuple(params[1:]))
+        else:
+            def ebUser(err):
+                self.sendMessage(
+                    irc.ERR_NOSUCHNICK,
+                    ":No such nickname.")
+
+            d = self.realm.lookupUser(channelOrUser)
+            d.addCallbacks(
+                self._userMode,
+                ebUser,
+                callbackArgs=tuple(params[1:]))
+
+
+    def irc_USERHOST(self, prefix, params):
+        """Userhost message
+
+        Parameters: <nickname> *( SPACE <nickname> )
+
+        [Optional]
+        """
+        pass
+
+
+    def irc_PRIVMSG(self, prefix, params):
+        """Send a (private) message.
+
+        Parameters: <msgtarget> <text to be sent>
+        """
+        try:
+            targetName = params[0].decode(self.encoding)
+        except UnicodeDecodeError:
+            self.sendMessage(
+                irc.ERR_NOSUCHNICK, params[0],
+                ":No such nick/channel (could not decode your unicode!)")
+            return
+
+        messageText = params[-1]
+        if targetName.startswith('#'):
+            target = self.realm.lookupGroup(targetName[1:])
+        else:
+            target = self.realm.lookupUser(targetName).addCallback(lambda user: user.mind)
+
+        def cbTarget(targ):
+            if targ is not None:
+                return self.avatar.send(targ, {"text": messageText})
+
+        def ebTarget(err):
+            self.sendMessage(
+                irc.ERR_NOSUCHNICK, targetName,
+                ":No such nick/channel.")
+
+        target.addCallbacks(cbTarget, ebTarget)
+
+
+    def irc_JOIN(self, prefix, params):
+        """Join message
+
+        Parameters: ( <channel> *( "," <channel> ) [ <key> *( "," <key> ) ] )
+        """
+        try:
+            groupName = params[0].decode(self.encoding)
+        except UnicodeDecodeError:
+            self.sendMessage(
+                irc.ERR_NOSUCHCHANNEL, params[0],
+                ":No such channel (could not decode your unicode!)")
+            return
+
+        if groupName.startswith('#'):
+            groupName = groupName[1:]
+
+        def cbGroup(group):
+            def cbJoin(ign):
+                self.userJoined(group, self)
+                self.names(
+                    self.name,
+                    '#' + group.name,
+                    [user.name for user in group.iterusers()])
+                self._sendTopic(group)
+            return self.avatar.join(group).addCallback(cbJoin)
+
+        def ebGroup(err):
+            self.sendMessage(
+                irc.ERR_NOSUCHCHANNEL, '#' + groupName,
+                ":No such channel.")
+
+        self.realm.getGroup(groupName).addCallbacks(cbGroup, ebGroup)
+
+
+    def irc_PART(self, prefix, params):
+        """Part message
+
+        Parameters: <channel> *( "," <channel> ) [ <Part Message> ]
+        """
+        try:
+            groupName = params[0].decode(self.encoding)
+        except UnicodeDecodeError:
+            self.sendMessage(
+                irc.ERR_NOTONCHANNEL, params[0],
+                ":Could not decode your unicode!")
+            return
+
+        if groupName.startswith('#'):
+            groupName = groupName[1:]
+
+        if len(params) > 1:
+            reason = params[1].decode('utf-8')
+        else:
+            reason = None
+
+        def cbGroup(group):
+            def cbLeave(result):
+                self.userLeft(group, self, reason)
+            return self.avatar.leave(group, reason).addCallback(cbLeave)
+
+        def ebGroup(err):
+            err.trap(ewords.NoSuchGroup)
+            self.sendMessage(
+                irc.ERR_NOTONCHANNEL,
+                '#' + groupName,
+                ":" + err.getErrorMessage())
+
+        self.realm.lookupGroup(groupName).addCallbacks(cbGroup, ebGroup)
+
+
+    def irc_NAMES(self, prefix, params):
+        """Names message
+
+        Parameters: [ <channel> *( "," <channel> ) [ <target> ] ]
+        """
+        #<< NAMES #python
+        #>> :benford.openprojects.net 353 glyph = #python :Orban ... @glyph ... Zymurgy skreech
+        #>> :benford.openprojects.net 366 glyph #python :End of /NAMES list.
+        try:
+            channel = params[-1].decode(self.encoding)
+        except UnicodeDecodeError:
+            self.sendMessage(
+                irc.ERR_NOSUCHCHANNEL, params[-1],
+                ":No such channel (could not decode your unicode!)")
+            return
+
+        if channel.startswith('#'):
+            channel = channel[1:]
+
+        def cbGroup(group):
+            self.names(
+                self.name,
+                '#' + group.name,
+                [user.name for user in group.iterusers()])
+
+        def ebGroup(err):
+            err.trap(ewords.NoSuchGroup)
+            # No group?  Fine, no names!
+            self.names(
+                self.name,
+                '#' + channel,
+                [])
+
+        self.realm.lookupGroup(channel).addCallbacks(cbGroup, ebGroup)
+
+
+    def irc_TOPIC(self, prefix, params):
+        """Topic message
+
+        Parameters: <channel> [ <topic> ]
+        """
+        try:
+            channel = params[0].decode(self.encoding)
+        except UnicodeDecodeError:
+            self.sendMessage(
+                irc.ERR_NOSUCHCHANNEL,
+                ":That channel doesn't exist (could not decode your unicode!)")
+            return
+
+        if channel.startswith('#'):
+            channel = channel[1:]
+
+        if len(params) > 1:
+            self._setTopic(channel, params[1])
+        else:
+            self._getTopic(channel)
+
+
+    def _sendTopic(self, group):
+        """
+        Send the topic of the given group to this user, if it has one.
+        """
+        topic = group.meta.get("topic")
+        if topic:
+            author = group.meta.get("topic_author") or "<noone>"
+            date = group.meta.get("topic_date", 0)
+            self.topic(self.name, '#' + group.name, topic)
+            self.topicAuthor(self.name, '#' + group.name, author, date)
+
+
+    def _getTopic(self, channel):
+        #<< TOPIC #python
+        #>> :benford.openprojects.net 332 glyph #python :<churchr> I really did. I sprained all my toes.
+        #>> :benford.openprojects.net 333 glyph #python itamar|nyc 994713482
+        def ebGroup(err):
+            err.trap(ewords.NoSuchGroup)
+            self.sendMessage(
+                irc.ERR_NOSUCHCHANNEL, '=', channel,
+                ":That channel doesn't exist.")
+
+        self.realm.lookupGroup(channel).addCallbacks(self._sendTopic, ebGroup)
+
+
+    def _setTopic(self, channel, topic):
+        #<< TOPIC #divunal :foo
+        #>> :glyph!glyph at adsl-64-123-27-108.dsl.austtx.swbell.net TOPIC #divunal :foo
+
+        def cbGroup(group):
+            newMeta = group.meta.copy()
+            newMeta['topic'] = topic
+            newMeta['topic_author'] = self.name
+            newMeta['topic_date'] = int(time())
+
+            def ebSet(err):
+                self.sendMessage(
+                    irc.ERR_CHANOPRIVSNEEDED,
+                    "#" + group.name,
+                    ":You need to be a channel operator to do that.")
+
+            return group.setMetadata(newMeta).addErrback(ebSet)
+
+        def ebGroup(err):
+            err.trap(ewords.NoSuchGroup)
+            self.sendMessage(
+                irc.ERR_NOSUCHCHANNEL, '=', channel,
+                ":That channel doesn't exist.")
+
+        self.realm.lookupGroup(channel).addCallbacks(cbGroup, ebGroup)
+
+
+    def list(self, channels):
+        """Send a group of LIST response lines
+
+        @type channel: C{list} of C{(str, int, str)}
+        @param channel: Information about the channels being sent:
+        their name, the number of participants, and their topic.
+        """
+        for (name, size, topic) in channels:
+            self.sendMessage(irc.RPL_LIST, name, str(size), ":" + topic)
+        self.sendMessage(irc.RPL_LISTEND, ":End of /LIST")
+
+
+    def irc_LIST(self, prefix, params):
+        """List query
+
+        Return information about the indicated channels, or about all
+        channels if none are specified.
+
+        Parameters: [ <channel> *( "," <channel> ) [ <target> ] ]
+        """
+        #<< list #python
+        #>> :orwell.freenode.net 321 exarkun Channel :Users  Name
+        #>> :orwell.freenode.net 322 exarkun #python 358 :The Python programming language
+        #>> :orwell.freenode.net 323 exarkun :End of /LIST
+        if params:
+            # Return information about indicated channels
+            try:
+                channels = params[0].decode(self.encoding).split(',')
+            except UnicodeDecodeError:
+                self.sendMessage(
+                    irc.ERR_NOSUCHCHANNEL, params[0],
+                    ":No such channel (could not decode your unicode!)")
+                return
+
+            groups = []
+            for ch in channels:
+                if ch.startswith('#'):
+                    ch = ch[1:]
+                groups.append(self.realm.lookupGroup(ch))
+
+            groups = defer.DeferredList(groups, consumeErrors=True)
+            groups.addCallback(lambda gs: [r for (s, r) in gs if s])
+        else:
+            # Return information about all channels
+            groups = self.realm.itergroups()
+
+        def cbGroups(groups):
+            def gotSize(size, group):
+                return group.name, size, group.meta.get('topic')
+            d = defer.DeferredList([
+                group.size().addCallback(gotSize, group) for group in groups])
+            d.addCallback(lambda results: self.list([r for (s, r) in results if s]))
+            return d
+        groups.addCallback(cbGroups)
+
+
+    def _channelWho(self, group):
+        self.who(self.name, '#' + group.name,
+            [(m.name, self.hostname, self.realm.name, m.name, "H", 0, m.name) for m in group.iterusers()])
+
+
+    def _userWho(self, user):
+        self.sendMessage(irc.RPL_ENDOFWHO,
+                         ":User /WHO not implemented")
+
+
+    def irc_WHO(self, prefix, params):
+        """Who query
+
+        Parameters: [ <mask> [ "o" ] ]
+        """
+        #<< who #python
+        #>> :x.opn 352 glyph #python aquarius pc-62-31-193-114-du.blueyonder.co.uk y.opn Aquarius H :3 Aquarius
+        # ...
+        #>> :x.opn 352 glyph #python foobar europa.tranquility.net z.opn skreech H :0 skreech
+        #>> :x.opn 315 glyph #python :End of /WHO list.
+        ### also
+        #<< who glyph
+        #>> :x.opn 352 glyph #python glyph adsl-64-123-27-108.dsl.austtx.swbell.net x.opn glyph H :0 glyph
+        #>> :x.opn 315 glyph glyph :End of /WHO list.
+        if not params:
+            self.sendMessage(irc.RPL_ENDOFWHO, ":/WHO not supported.")
+            return
+
+        try:
+            channelOrUser = params[0].decode(self.encoding)
+        except UnicodeDecodeError:
+            self.sendMessage(
+                irc.RPL_ENDOFWHO, params[0],
+                ":End of /WHO list (could not decode your unicode!)")
+            return
+
+        if channelOrUser.startswith('#'):
+            def ebGroup(err):
+                err.trap(ewords.NoSuchGroup)
+                self.sendMessage(
+                    irc.RPL_ENDOFWHO, channelOrUser,
+                    ":End of /WHO list.")
+            d = self.realm.lookupGroup(channelOrUser[1:])
+            d.addCallbacks(self._channelWho, ebGroup)
+        else:
+            def ebUser(err):
+                err.trap(ewords.NoSuchUser)
+                self.sendMessage(
+                    irc.RPL_ENDOFWHO, channelOrUser,
+                    ":End of /WHO list.")
+            d = self.realm.lookupUser(channelOrUser)
+            d.addCallbacks(self._userWho, ebUser)
+
+
+
+    def irc_WHOIS(self, prefix, params):
+        """Whois query
+
+        Parameters: [ <target> ] <mask> *( "," <mask> )
+        """
+        def cbUser(user):
+            self.whois(
+                self.name,
+                user.name, user.name, self.realm.name,
+                user.name, self.realm.name, 'Hi mom!', False,
+                int(time() - user.lastMessage), user.signOn,
+                ['#' + group.name for group in user.itergroups()])
+
+        def ebUser(err):
+            err.trap(ewords.NoSuchUser)
+            self.sendMessage(
+                irc.ERR_NOSUCHNICK,
+                params[0],
+                ":No such nick/channel")
+
+        try:
+            user = params[0].decode(self.encoding)
+        except UnicodeDecodeError:
+            self.sendMessage(
+                irc.ERR_NOSUCHNICK,
+                params[0],
+                ":No such nick/channel")
+            return
+
+        self.realm.lookupUser(user).addCallbacks(cbUser, ebUser)
+
+
+    # Unsupported commands, here for legacy compatibility
+    def irc_OPER(self, prefix, params):
+        """Oper message
+
+        Parameters: <name> <password>
+        """
+        self.sendMessage(irc.ERR_NOOPERHOST, ":O-lines not applicable")
+
+
+class IRCFactory(protocol.ServerFactory):
+    """
+    IRC server that creates instances of the L{IRCUser} protocol.
+    
+    @ivar _serverInfo: A dictionary mapping:
+        "serviceName" to the name of the server,
+        "serviceVersion" to the copyright version,
+        "creationDate" to the time that the server was started.
+    """
+    protocol = IRCUser
+
+    def __init__(self, realm, portal):
+        self.realm = realm
+        self.portal = portal
+        self._serverInfo = {
+            "serviceName": self.realm.name,
+            "serviceVersion": copyright.version,
+            "creationDate": ctime()
+            }
+
+
+
+class PBMind(pb.Referenceable):
+    def __init__(self):
+        pass
+
+    def jellyFor(self, jellier):
+        return reflect.qual(PBMind), jellier.invoker.registerReference(self)
+
+    def remote_userJoined(self, user, group):
+        pass
+
+    def remote_userLeft(self, user, group, reason):
+        pass
+
+    def remote_receive(self, sender, recipient, message):
+        pass
+
+    def remote_groupMetaUpdate(self, group, meta):
+        pass
+
+
+class PBMindReference(pb.RemoteReference):
+    implements(iwords.IChatClient)
+
+    def receive(self, sender, recipient, message):
+        if iwords.IGroup.providedBy(recipient):
+            rec = PBGroup(self.realm, self.avatar, recipient)
+        else:
+            rec = PBUser(self.realm, self.avatar, recipient)
+        return self.callRemote(
+            'receive',
+            PBUser(self.realm, self.avatar, sender),
+            rec,
+            message)
+
+    def groupMetaUpdate(self, group, meta):
+        return self.callRemote(
+            'groupMetaUpdate',
+            PBGroup(self.realm, self.avatar, group),
+            meta)
+
+    def userJoined(self, group, user):
+        return self.callRemote(
+            'userJoined',
+            PBGroup(self.realm, self.avatar, group),
+            PBUser(self.realm, self.avatar, user))
+
+    def userLeft(self, group, user, reason=None):
+        assert reason is None or isinstance(reason, unicode)
+        return self.callRemote(
+            'userLeft',
+            PBGroup(self.realm, self.avatar, group),
+            PBUser(self.realm, self.avatar, user),
+            reason)
+pb.setUnjellyableForClass(PBMind, PBMindReference)
+
+
+class PBGroup(pb.Referenceable):
+    def __init__(self, realm, avatar, group):
+        self.realm = realm
+        self.avatar = avatar
+        self.group = group
+
+
+    def processUniqueID(self):
+        return hash((self.realm.name, self.avatar.name, self.group.name))
+
+
+    def jellyFor(self, jellier):
+        return reflect.qual(self.__class__), self.group.name.encode('utf-8'), jellier.invoker.registerReference(self)
+
+
+    def remote_leave(self, reason=None):
+        return self.avatar.leave(self.group, reason)
+
+
+    def remote_send(self, message):
+        return self.avatar.send(self.group, message)
+
+
+class PBGroupReference(pb.RemoteReference):
+    implements(iwords.IGroup)
+
+    def unjellyFor(self, unjellier, unjellyList):
+        clsName, name, ref = unjellyList
+        self.name = name.decode('utf-8')
+        return pb.RemoteReference.unjellyFor(self, unjellier, [clsName, ref])
+
+    def leave(self, reason=None):
+        return self.callRemote("leave", reason)
+
+    def send(self, message):
+        return self.callRemote("send", message)
+pb.setUnjellyableForClass(PBGroup, PBGroupReference)
+
+class PBUser(pb.Referenceable):
+    def __init__(self, realm, avatar, user):
+        self.realm = realm
+        self.avatar = avatar
+        self.user = user
+
+    def processUniqueID(self):
+        return hash((self.realm.name, self.avatar.name, self.user.name))
+
+
+class ChatAvatar(pb.Referenceable):
+    implements(iwords.IChatClient)
+
+    def __init__(self, avatar):
+        self.avatar = avatar
+
+
+    def jellyFor(self, jellier):
+        return reflect.qual(self.__class__), jellier.invoker.registerReference(self)
+
+
+    def remote_join(self, groupName):
+        assert isinstance(groupName, unicode)
+        def cbGroup(group):
+            def cbJoin(ignored):
+                return PBGroup(self.avatar.realm, self.avatar, group)
+            d = self.avatar.join(group)
+            d.addCallback(cbJoin)
+            return d
+        d = self.avatar.realm.getGroup(groupName)
+        d.addCallback(cbGroup)
+        return d
+registerAdapter(ChatAvatar, iwords.IUser, pb.IPerspective)
+
+class AvatarReference(pb.RemoteReference):
+    def join(self, groupName):
+        return self.callRemote('join', groupName)
+
+    def quit(self):
+        d = defer.Deferred()
+        self.broker.notifyOnDisconnect(lambda: d.callback(None))
+        self.broker.transport.loseConnection()
+        return d
+
+pb.setUnjellyableForClass(ChatAvatar, AvatarReference)
+
+
+class WordsRealm(object):
+    implements(portal.IRealm, iwords.IChatService)
+
+    _encoding = 'utf-8'
+
+    def __init__(self, name):
+        self.name = name
+
+
+    def userFactory(self, name):
+        return User(name)
+
+
+    def groupFactory(self, name):
+        return Group(name)
+
+
+    def logoutFactory(self, avatar, facet):
+        def logout():
+            # XXX Deferred support here
+            getattr(facet, 'logout', lambda: None)()
+            avatar.realm = avatar.mind = None
+        return logout
+
+
+    def requestAvatar(self, avatarId, mind, *interfaces):
+        if isinstance(avatarId, str):
+            avatarId = avatarId.decode(self._encoding)
+
+        def gotAvatar(avatar):
+            if avatar.realm is not None:
+                raise ewords.AlreadyLoggedIn()
+            for iface in interfaces:
+                facet = iface(avatar, None)
+                if facet is not None:
+                    avatar.loggedIn(self, mind)
+                    mind.name = avatarId
+                    mind.realm = self
+                    mind.avatar = avatar
+                    return iface, facet, self.logoutFactory(avatar, facet)
+            raise NotImplementedError(self, interfaces)
+
+        return self.getUser(avatarId).addCallback(gotAvatar)
+
+
+    # IChatService, mostly.
+    createGroupOnRequest = False
+    createUserOnRequest = True
+
+    def lookupUser(self, name):
+        raise NotImplementedError
+
+
+    def lookupGroup(self, group):
+        raise NotImplementedError
+
+
+    def addUser(self, user):
+        """Add the given user to this service.
+
+        This is an internal method intented to be overridden by
+        L{WordsRealm} subclasses, not called by external code.
+
+        @type user: L{IUser}
+
+        @rtype: L{twisted.internet.defer.Deferred}
+        @return: A Deferred which fires with C{None} when the user is
+        added, or which fails with
+        L{twisted.words.ewords.DuplicateUser} if a user with the
+        same name exists already.
+        """
+        raise NotImplementedError
+
+
+    def addGroup(self, group):
+        """Add the given group to this service.
+
+        @type group: L{IGroup}
+
+        @rtype: L{twisted.internet.defer.Deferred}
+        @return: A Deferred which fires with C{None} when the group is
+        added, or which fails with
+        L{twisted.words.ewords.DuplicateGroup} if a group with the
+        same name exists already.
+        """
+        raise NotImplementedError
+
+
+    def getGroup(self, name):
+        assert isinstance(name, unicode)
+        if self.createGroupOnRequest:
+            def ebGroup(err):
+                err.trap(ewords.DuplicateGroup)
+                return self.lookupGroup(name)
+            return self.createGroup(name).addErrback(ebGroup)
+        return self.lookupGroup(name)
+
+
+    def getUser(self, name):
+        assert isinstance(name, unicode)
+        if self.createUserOnRequest:
+            def ebUser(err):
+                err.trap(ewords.DuplicateUser)
+                return self.lookupUser(name)
+            return self.createUser(name).addErrback(ebUser)
+        return self.lookupUser(name)
+
+
+    def createUser(self, name):
+        assert isinstance(name, unicode)
+        def cbLookup(user):
+            return failure.Failure(ewords.DuplicateUser(name))
+        def ebLookup(err):
+            err.trap(ewords.NoSuchUser)
+            return self.userFactory(name)
+
+        name = name.lower()
+        d = self.lookupUser(name)
+        d.addCallbacks(cbLookup, ebLookup)
+        d.addCallback(self.addUser)
+        return d
+
+
+    def createGroup(self, name):
+        assert isinstance(name, unicode)
+        def cbLookup(group):
+            return failure.Failure(ewords.DuplicateGroup(name))
+        def ebLookup(err):
+            err.trap(ewords.NoSuchGroup)
+            return self.groupFactory(name)
+
+        name = name.lower()
+        d = self.lookupGroup(name)
+        d.addCallbacks(cbLookup, ebLookup)
+        d.addCallback(self.addGroup)
+        return d
+
+
+class InMemoryWordsRealm(WordsRealm):
+    def __init__(self, *a, **kw):
+        super(InMemoryWordsRealm, self).__init__(*a, **kw)
+        self.users = {}
+        self.groups = {}
+
+
+    def itergroups(self):
+        return defer.succeed(self.groups.itervalues())
+
+
+    def addUser(self, user):
+        if user.name in self.users:
+            return defer.fail(failure.Failure(ewords.DuplicateUser()))
+        self.users[user.name] = user
+        return defer.succeed(user)
+
+
+    def addGroup(self, group):
+        if group.name in self.groups:
+            return defer.fail(failure.Failure(ewords.DuplicateGroup()))
+        self.groups[group.name] = group
+        return defer.succeed(group)
+
+
+    def lookupUser(self, name):
+        assert isinstance(name, unicode)
+        name = name.lower()
+        try:
+            user = self.users[name]
+        except KeyError:
+            return defer.fail(failure.Failure(ewords.NoSuchUser(name)))
+        else:
+            return defer.succeed(user)
+
+
+    def lookupGroup(self, name):
+        assert isinstance(name, unicode)
+        name = name.lower()
+        try:
+            group = self.groups[name]
+        except KeyError:
+            return defer.fail(failure.Failure(ewords.NoSuchGroup(name)))
+        else:
+            return defer.succeed(group)
+
+__all__ = [
+    'Group', 'User',
+
+    'WordsRealm', 'InMemoryWordsRealm',
+    ]
diff --git a/ThirdParty/Twisted/twisted/words/tap.py b/ThirdParty/Twisted/twisted/words/tap.py
new file mode 100644
index 0000000..c0ba9fd
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/tap.py
@@ -0,0 +1,74 @@
+# -*- test-case-name: twisted.words.test.test_tap -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+"""
+Shiny new words service maker
+"""
+
+import sys, socket
+
+from twisted.application import strports
+from twisted.application.service import MultiService
+from twisted.python import usage
+from twisted import plugin
+
+from twisted.words import iwords, service
+from twisted.cred import checkers, credentials, portal, strcred
+
+class Options(usage.Options, strcred.AuthOptionMixin):
+    supportedInterfaces = [credentials.IUsernamePassword]
+    optParameters = [
+        ('hostname', None, socket.gethostname(),
+         'Name of this server; purely an informative')]
+
+    compData = usage.Completions(multiUse=["group"])
+
+    interfacePlugins = {}
+    plg = None
+    for plg in plugin.getPlugins(iwords.IProtocolPlugin):
+        assert plg.name not in interfacePlugins
+        interfacePlugins[plg.name] = plg
+        optParameters.append((
+            plg.name + '-port',
+            None, None,
+            'strports description of the port to bind for the  ' + plg.name + ' server'))
+    del plg
+
+    def __init__(self, *a, **kw):
+        usage.Options.__init__(self, *a, **kw)
+        self['groups'] = []
+
+    def opt_group(self, name):
+        """Specify a group which should exist
+        """
+        self['groups'].append(name.decode(sys.stdin.encoding))
+
+    def opt_passwd(self, filename):
+        """
+        Name of a passwd-style file. (This is for
+        backwards-compatibility only; you should use the --auth
+        command instead.)
+        """
+        self.addChecker(checkers.FilePasswordDB(filename))
+
+def makeService(config):
+    credCheckers = config.get('credCheckers', [])
+    wordsRealm = service.InMemoryWordsRealm(config['hostname'])
+    wordsPortal = portal.Portal(wordsRealm, credCheckers)
+
+    msvc = MultiService()
+
+    # XXX Attribute lookup on config is kind of bad - hrm.
+    for plgName in config.interfacePlugins:
+        port = config.get(plgName + '-port')
+        if port is not None:
+            factory = config.interfacePlugins[plgName].getFactory(wordsRealm, wordsPortal)
+            svc = strports.service(port, factory)
+            svc.setServiceParent(msvc)
+
+    # This is bogus.  createGroup is async.  makeService must be
+    # allowed to return a Deferred or some crap.
+    for g in config['groups']:
+        wordsRealm.createGroup(g)
+
+    return msvc
diff --git a/ThirdParty/Twisted/twisted/words/test/__init__.py b/ThirdParty/Twisted/twisted/words/test/__init__.py
new file mode 100644
index 0000000..d599f20
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/__init__.py
@@ -0,0 +1 @@
+"Words tests"
diff --git a/ThirdParty/Twisted/twisted/words/test/test_basechat.py b/ThirdParty/Twisted/twisted/words/test/test_basechat.py
new file mode 100644
index 0000000..8347d30
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_basechat.py
@@ -0,0 +1,68 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.words.im.basechat}.
+"""
+
+from twisted.trial import unittest
+from twisted.words.im import basechat, basesupport
+
+
+class ChatUITests(unittest.TestCase):
+    """
+    Tests for the L{basechat.ChatUI} chat client.
+    """
+    def setUp(self):
+        self.ui = basechat.ChatUI()
+        self.account = basesupport.AbstractAccount("fooAccount", False, "foo",
+                                                   "password", "host", "port")
+        self.person = basesupport.AbstractPerson("foo", self.account)
+
+
+    def test_contactChangedNickNoKey(self):
+        """
+        L{basechat.ChatUI.contactChangedNick} on an
+        L{twisted.words.im.interfaces.IPerson} who doesn't have an account
+        associated with the L{basechat.ChatUI} instance has no effect.
+        """
+        self.assertEqual(self.person.name, "foo")
+        self.assertEqual(self.person.account, self.account)
+
+        self.ui.contactChangedNick(self.person, "bar")
+        self.assertEqual(self.person.name, "foo")
+        self.assertEqual(self.person.account, self.account)
+
+
+    def test_contactChangedNickNoConversation(self):
+        """
+        L{basechat.ChatUI.contactChangedNick} changes the name for an
+        L{twisted.words.im.interfaces.IPerson}.
+        """
+        self.ui.persons[self.person.name, self.person.account] = self.person
+
+        self.assertEqual(self.person.name, "foo")
+        self.assertEqual(self.person.account, self.account)
+
+        self.ui.contactChangedNick(self.person, "bar")
+        self.assertEqual(self.person.name, "bar")
+        self.assertEqual(self.person.account, self.account)
+
+
+    def test_contactChangedNickHasConversation(self):
+        """
+        If an L{twisted.words.im.interfaces.IPerson} is in a
+        L{basechat.Conversation}, L{basechat.ChatUI.contactChangedNick} causes a
+        name change for that person in both the L{basechat.Conversation} and the
+        L{basechat.ChatUI}.
+        """
+        self.ui.persons[self.person.name, self.person.account] = self.person
+        conversation = basechat.Conversation(self.person, self.ui)
+        self.ui.conversations[self.person] = conversation
+
+        self.assertEqual(self.person.name, "foo")
+        self.assertEqual(self.person.account, self.account)
+
+        self.ui.contactChangedNick(self.person, "bar")
+        self.assertEqual(self.person.name, "bar")
+        self.assertEqual(self.person.account, self.account)
diff --git a/ThirdParty/Twisted/twisted/words/test/test_basesupport.py b/ThirdParty/Twisted/twisted/words/test/test_basesupport.py
new file mode 100644
index 0000000..3a81963
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_basesupport.py
@@ -0,0 +1,97 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.trial import unittest
+from twisted.words.im import basesupport
+from twisted.internet import error, defer
+
+class DummyAccount(basesupport.AbstractAccount):
+    """
+    An account object that will do nothing when asked to start to log on.
+    """
+
+    loginHasFailed = False
+    loginCallbackCalled = False
+
+    def _startLogOn(self, *args):
+        """
+        Set self.loginDeferred to the same as the deferred returned, allowing a
+        testcase to .callback or .errback.
+        
+        @return: A deferred.
+        """
+        self.loginDeferred = defer.Deferred()
+        return self.loginDeferred
+
+    def _loginFailed(self, result):
+        self.loginHasFailed = True
+        return basesupport.AbstractAccount._loginFailed(self, result)
+
+    def _cb_logOn(self, result):
+        self.loginCallbackCalled = True
+        return basesupport.AbstractAccount._cb_logOn(self, result)
+
+class DummyUI(object):
+    """
+    Provide just the interface required to be passed to AbstractAccount.logOn.
+    """
+    clientRegistered = False
+
+    def registerAccountClient(self, result): 
+        self.clientRegistered = True
+
+class ClientMsgTests(unittest.TestCase):
+    def makeUI(self):
+        return DummyUI()
+
+    def makeAccount(self):
+        return DummyAccount('la', False, 'la', None, 'localhost', 6667)
+
+    def test_connect(self):
+        """
+        Test that account.logOn works, and it calls the right callback when a
+        connection is established.
+        """
+        account = self.makeAccount()
+        ui = self.makeUI()
+        d = account.logOn(ui)
+        account.loginDeferred.callback(None)
+
+        def check(result):
+            self.assert_(not account.loginHasFailed, 
+                    "Login shouldn't have failed")
+            self.assert_(account.loginCallbackCalled, 
+                    "We should be logged in")
+        d.addCallback(check)
+        return d
+
+    def test_failedConnect(self):
+        """
+        Test that account.logOn works, and it calls the right callback when a
+        connection is established.
+        """
+        account = self.makeAccount()
+        ui = self.makeUI()
+        d = account.logOn(ui)
+        account.loginDeferred.errback(Exception())
+
+        def err(reason):
+            self.assert_(account.loginHasFailed, "Login should have failed")
+            self.assert_(not account.loginCallbackCalled, 
+                    "We shouldn't be logged in")
+            self.assert_(not ui.clientRegistered, 
+                    "Client shouldn't be registered in the UI")
+        cb = lambda r: self.assert_(False, "Shouldn't get called back")
+        d.addCallbacks(cb, err)
+        return d
+
+    def test_alreadyConnecting(self):
+        """
+        Test that it can fail sensibly when someone tried to connect before
+        we did. 
+        """
+        account = self.makeAccount()
+        ui = self.makeUI()
+        account.logOn(ui)
+        self.assertRaises(error.ConnectError, account.logOn, ui)
+
diff --git a/ThirdParty/Twisted/twisted/words/test/test_domish.py b/ThirdParty/Twisted/twisted/words/test/test_domish.py
new file mode 100644
index 0000000..275afb7
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_domish.py
@@ -0,0 +1,434 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.words.xish.domish}, a DOM-like library for XMPP.
+"""
+
+from twisted.trial import unittest
+from twisted.words.xish import domish
+
+
+class DomishTestCase(unittest.TestCase):
+    def testEscaping(self):
+        s = "&<>'\""
+        self.assertEqual(domish.escapeToXml(s), "&<>'\"")
+        self.assertEqual(domish.escapeToXml(s, 1), "&<>'"")
+
+    def testNamespaceObject(self):
+        ns = domish.Namespace("testns")
+        self.assertEqual(ns.foo, ("testns", "foo"))
+
+    def testElementInit(self):
+        e = domish.Element((None, "foo"))
+        self.assertEqual(e.name, "foo")
+        self.assertEqual(e.uri, None)
+        self.assertEqual(e.defaultUri, None)
+        self.assertEqual(e.parent, None)
+
+        e = domish.Element(("", "foo"))
+        self.assertEqual(e.name, "foo")
+        self.assertEqual(e.uri, "")
+        self.assertEqual(e.defaultUri, "")
+        self.assertEqual(e.parent, None)
+
+        e = domish.Element(("testns", "foo"))
+        self.assertEqual(e.name, "foo")
+        self.assertEqual(e.uri, "testns")
+        self.assertEqual(e.defaultUri, "testns")
+        self.assertEqual(e.parent, None)
+
+        e = domish.Element(("testns", "foo"), "test2ns")
+        self.assertEqual(e.name, "foo")
+        self.assertEqual(e.uri, "testns")
+        self.assertEqual(e.defaultUri, "test2ns")
+
+    def testChildOps(self):
+        e = domish.Element(("testns", "foo"))
+        e.addContent("somecontent")
+        b2 = e.addElement(("testns2", "bar2"))
+        e["attrib1"] = "value1"
+        e[("testns2", "attrib2")] = "value2"
+        e.addElement("bar")
+        e.addElement("bar")
+        e.addContent("abc")
+        e.addContent("123")
+
+        # Check content merging
+        self.assertEqual(e.children[-1], "abc123")
+
+        # Check str()/content extraction
+        self.assertEqual(str(e), "somecontent")
+
+        # Check direct child accessor
+        self.assertEqual(e.bar2, b2)
+        e.bar2.addContent("subcontent")
+        e.bar2["bar2value"] = "somevalue"
+
+        # Check child ops
+        self.assertEqual(e.children[1], e.bar2)
+        self.assertEqual(e.children[2], e.bar)
+
+        # Check attribute ops
+        self.assertEqual(e["attrib1"], "value1")
+        del e["attrib1"]
+        self.assertEqual(e.hasAttribute("attrib1"), 0)
+        self.assertEqual(e.hasAttribute("attrib2"), 0)
+        self.assertEqual(e[("testns2", "attrib2")], "value2")
+
+
+    def test_elements(self):
+        """
+        Calling C{elements} without arguments on a L{domish.Element} returns
+        all child elements, whatever the qualfied name.
+        """
+        e = domish.Element((u"testns", u"foo"))
+        c1 = e.addElement(u"name")
+        c2 = e.addElement((u"testns2", u"baz"))
+        c3 = e.addElement(u"quux")
+        c4 = e.addElement((u"testns", u"name"))
+
+        elts = list(e.elements())
+
+        self.assertIn(c1, elts)
+        self.assertIn(c2, elts)
+        self.assertIn(c3, elts)
+        self.assertIn(c4, elts)
+
+
+    def test_elementsWithQN(self):
+        """
+        Calling C{elements} with a namespace and local name on a
+        L{domish.Element} returns all child elements with that qualified name.
+        """
+        e = domish.Element((u"testns", u"foo"))
+        c1 = e.addElement(u"name")
+        c2 = e.addElement((u"testns2", u"baz"))
+        c3 = e.addElement(u"quux")
+        c4 = e.addElement((u"testns", u"name"))
+
+        elts = list(e.elements(u"testns", u"name"))
+
+        self.assertIn(c1, elts)
+        self.assertNotIn(c2, elts)
+        self.assertNotIn(c3, elts)
+        self.assertIn(c4, elts)
+
+
+
+class DomishStreamTestsMixin:
+    """
+    Mixin defining tests for different stream implementations.
+
+    @ivar streamClass: A no-argument callable which will be used to create an
+        XML parser which can produce a stream of elements from incremental
+        input.
+    """
+    def setUp(self):
+        self.doc_started = False
+        self.doc_ended = False
+        self.root = None
+        self.elements = []
+        self.stream = self.streamClass()
+        self.stream.DocumentStartEvent = self._docStarted
+        self.stream.ElementEvent = self.elements.append
+        self.stream.DocumentEndEvent = self._docEnded
+
+    def _docStarted(self, root):
+        self.root = root
+        self.doc_started = True
+
+    def _docEnded(self):
+        self.doc_ended = True
+
+    def doTest(self, xml):
+        self.stream.parse(xml)
+
+    def testHarness(self):
+        xml = "<root><child/><child2/></root>"
+        self.stream.parse(xml)
+        self.assertEqual(self.doc_started, True)
+        self.assertEqual(self.root.name, 'root')
+        self.assertEqual(self.elements[0].name, 'child')
+        self.assertEqual(self.elements[1].name, 'child2')
+        self.assertEqual(self.doc_ended, True)
+
+    def testBasic(self):
+        xml = "<stream:stream xmlns:stream='etherx' xmlns='jabber'>\n" + \
+              "  <message to='bar'>" + \
+              "    <x xmlns='xdelay'>some&data></x>" + \
+              "  </message>" + \
+              "</stream:stream>"
+
+        self.stream.parse(xml)
+        self.assertEqual(self.root.name, 'stream')
+        self.assertEqual(self.root.uri, 'etherx')
+        self.assertEqual(self.elements[0].name, 'message')
+        self.assertEqual(self.elements[0].uri, 'jabber')
+        self.assertEqual(self.elements[0]['to'], 'bar')
+        self.assertEqual(self.elements[0].x.uri, 'xdelay')
+        self.assertEqual(unicode(self.elements[0].x), 'some&data>')
+
+    def testNoRootNS(self):
+        xml = "<stream><error xmlns='etherx'/></stream>"
+
+        self.stream.parse(xml)
+        self.assertEqual(self.root.uri, '')
+        self.assertEqual(self.elements[0].uri, 'etherx')
+
+    def testNoDefaultNS(self):
+        xml = "<stream:stream xmlns:stream='etherx'><error/></stream:stream>"""
+
+        self.stream.parse(xml)
+        self.assertEqual(self.root.uri, 'etherx')
+        self.assertEqual(self.root.defaultUri, '')
+        self.assertEqual(self.elements[0].uri, '')
+        self.assertEqual(self.elements[0].defaultUri, '')
+
+    def testChildDefaultNS(self):
+        xml = "<root xmlns='testns'><child/></root>"
+
+        self.stream.parse(xml)
+        self.assertEqual(self.root.uri, 'testns')
+        self.assertEqual(self.elements[0].uri, 'testns')
+
+    def testEmptyChildNS(self):
+        xml = "<root xmlns='testns'><child1><child2 xmlns=''/></child1></root>"
+
+        self.stream.parse(xml)
+        self.assertEqual(self.elements[0].child2.uri, '')
+
+
+    def test_namespaceWithWhitespace(self):
+        """
+        Whitespace in an xmlns value is preserved in the resulting node's C{uri}
+        attribute.
+        """
+        xml = "<root xmlns:foo=' bar baz '><foo:bar foo:baz='quux'/></root>"
+        self.stream.parse(xml)
+        self.assertEqual(self.elements[0].uri, " bar baz ")
+        self.assertEqual(
+            self.elements[0].attributes, {(" bar baz ", "baz"): "quux"})
+
+
+    def testChildPrefix(self):
+        xml = "<root xmlns='testns' xmlns:foo='testns2'><foo:child/></root>"
+
+        self.stream.parse(xml)
+        self.assertEqual(self.root.localPrefixes['foo'], 'testns2')
+        self.assertEqual(self.elements[0].uri, 'testns2')
+
+    def testUnclosedElement(self):
+        self.assertRaises(domish.ParserError, self.stream.parse,
+                                              "<root><error></root>")
+
+    def test_namespaceReuse(self):
+        """
+        Test that reuse of namespaces does affect an element's serialization.
+
+        When one element uses a prefix for a certain namespace, this is
+        stored in the C{localPrefixes} attribute of the element. We want
+        to make sure that elements created after such use, won't have this
+        prefix end up in their C{localPrefixes} attribute, too.
+        """
+
+        xml = """<root>
+                   <foo:child1 xmlns:foo='testns'/>
+                   <child2 xmlns='testns'/>
+                 </root>"""
+
+        self.stream.parse(xml)
+        self.assertEqual('child1', self.elements[0].name)
+        self.assertEqual('testns', self.elements[0].uri)
+        self.assertEqual('', self.elements[0].defaultUri)
+        self.assertEqual({'foo': 'testns'}, self.elements[0].localPrefixes)
+        self.assertEqual('child2', self.elements[1].name)
+        self.assertEqual('testns', self.elements[1].uri)
+        self.assertEqual('testns', self.elements[1].defaultUri)
+        self.assertEqual({}, self.elements[1].localPrefixes)
+
+
+
+class DomishExpatStreamTestCase(DomishStreamTestsMixin, unittest.TestCase):
+    """
+    Tests for L{domish.ExpatElementStream}, the expat-based element stream
+    implementation.
+    """
+    streamClass = domish.ExpatElementStream
+
+    try:
+        import pyexpat
+    except ImportError:
+        skip = "pyexpat is required for ExpatElementStream tests."
+
+
+
+class DomishSuxStreamTestCase(DomishStreamTestsMixin, unittest.TestCase):
+    """
+    Tests for L{domish.SuxElementStream}, the L{twisted.web.sux}-based element
+    stream implementation.
+    """
+    streamClass = domish.SuxElementStream
+
+    if domish.SuxElementStream is None:
+        skip = "twisted.web is required for SuxElementStream tests."
+
+
+
+class SerializerTests(unittest.TestCase):
+    def testNoNamespace(self):
+        e = domish.Element((None, "foo"))
+        self.assertEqual(e.toXml(), "<foo/>")
+        self.assertEqual(e.toXml(closeElement = 0), "<foo>")
+
+    def testDefaultNamespace(self):
+        e = domish.Element(("testns", "foo"))
+        self.assertEqual(e.toXml(), "<foo xmlns='testns'/>")
+
+    def testOtherNamespace(self):
+        e = domish.Element(("testns", "foo"), "testns2")
+        self.assertEqual(e.toXml({'testns': 'bar'}),
+                          "<bar:foo xmlns:bar='testns' xmlns='testns2'/>")
+
+    def testChildDefaultNamespace(self):
+        e = domish.Element(("testns", "foo"))
+        e.addElement("bar")
+        self.assertEqual(e.toXml(), "<foo xmlns='testns'><bar/></foo>")
+
+    def testChildSameNamespace(self):
+        e = domish.Element(("testns", "foo"))
+        e.addElement(("testns", "bar"))
+        self.assertEqual(e.toXml(), "<foo xmlns='testns'><bar/></foo>")
+
+    def testChildSameDefaultNamespace(self):
+        e = domish.Element(("testns", "foo"))
+        e.addElement("bar", "testns")
+        self.assertEqual(e.toXml(), "<foo xmlns='testns'><bar/></foo>")
+
+    def testChildOtherDefaultNamespace(self):
+        e = domish.Element(("testns", "foo"))
+        e.addElement(("testns2", "bar"), 'testns2')
+        self.assertEqual(e.toXml(), "<foo xmlns='testns'><bar xmlns='testns2'/></foo>")
+
+    def testOnlyChildDefaultNamespace(self):
+        e = domish.Element((None, "foo"))
+        e.addElement(("ns2", "bar"), 'ns2')
+        self.assertEqual(e.toXml(), "<foo><bar xmlns='ns2'/></foo>")
+
+    def testOnlyChildDefaultNamespace2(self):
+        e = domish.Element((None, "foo"))
+        e.addElement("bar")
+        self.assertEqual(e.toXml(), "<foo><bar/></foo>")
+
+    def testChildInDefaultNamespace(self):
+        e = domish.Element(("testns", "foo"), "testns2")
+        e.addElement(("testns2", "bar"))
+        self.assertEqual(e.toXml(), "<xn0:foo xmlns:xn0='testns' xmlns='testns2'><bar/></xn0:foo>")
+
+    def testQualifiedAttribute(self):
+        e = domish.Element((None, "foo"),
+                           attribs = {("testns2", "bar"): "baz"})
+        self.assertEqual(e.toXml(), "<foo xmlns:xn0='testns2' xn0:bar='baz'/>")
+
+    def testQualifiedAttributeDefaultNS(self):
+        e = domish.Element(("testns", "foo"),
+                           attribs = {("testns", "bar"): "baz"})
+        self.assertEqual(e.toXml(), "<foo xmlns='testns' xmlns:xn0='testns' xn0:bar='baz'/>")
+
+    def testTwoChilds(self):
+        e = domish.Element(('', "foo"))
+        child1 = e.addElement(("testns", "bar"), "testns2")
+        child1.addElement(('testns2', 'quux'))
+        child2 = e.addElement(("testns3", "baz"), "testns4")
+        child2.addElement(('testns', 'quux'))
+        self.assertEqual(e.toXml(), "<foo><xn0:bar xmlns:xn0='testns' xmlns='testns2'><quux/></xn0:bar><xn1:baz xmlns:xn1='testns3' xmlns='testns4'><xn0:quux xmlns:xn0='testns'/></xn1:baz></foo>")
+
+    def testXMLNamespace(self):
+        e = domish.Element((None, "foo"),
+                           attribs = {("http://www.w3.org/XML/1998/namespace",
+                                       "lang"): "en_US"})
+        self.assertEqual(e.toXml(), "<foo xml:lang='en_US'/>")
+
+    def testQualifiedAttributeGivenListOfPrefixes(self):
+        e = domish.Element((None, "foo"),
+                           attribs = {("testns2", "bar"): "baz"})
+        self.assertEqual(e.toXml({"testns2": "qux"}),
+                          "<foo xmlns:qux='testns2' qux:bar='baz'/>")
+
+    def testNSPrefix(self):
+        e = domish.Element((None, "foo"),
+                           attribs = {("testns2", "bar"): "baz"})
+        c = e.addElement(("testns2", "qux"))
+        c[("testns2", "bar")] = "quux"
+
+        self.assertEqual(e.toXml(), "<foo xmlns:xn0='testns2' xn0:bar='baz'><xn0:qux xn0:bar='quux'/></foo>")
+
+    def testDefaultNSPrefix(self):
+        e = domish.Element((None, "foo"),
+                           attribs = {("testns2", "bar"): "baz"})
+        c = e.addElement(("testns2", "qux"))
+        c[("testns2", "bar")] = "quux"
+        c.addElement('foo')
+
+        self.assertEqual(e.toXml(), "<foo xmlns:xn0='testns2' xn0:bar='baz'><xn0:qux xn0:bar='quux'><xn0:foo/></xn0:qux></foo>")
+
+    def testPrefixScope(self):
+        e = domish.Element(('testns', 'foo'))
+
+        self.assertEqual(e.toXml(prefixes={'testns': 'bar'},
+                                  prefixesInScope=['bar']),
+                          "<bar:foo/>")
+
+    def testLocalPrefixes(self):
+        e = domish.Element(('testns', 'foo'), localPrefixes={'bar': 'testns'})
+        self.assertEqual(e.toXml(), "<bar:foo xmlns:bar='testns'/>")
+
+    def testLocalPrefixesWithChild(self):
+        e = domish.Element(('testns', 'foo'), localPrefixes={'bar': 'testns'})
+        e.addElement('baz')
+        self.assertIdentical(e.baz.defaultUri, None)
+        self.assertEqual(e.toXml(), "<bar:foo xmlns:bar='testns'><baz/></bar:foo>")
+
+    def test_prefixesReuse(self):
+        """
+        Test that prefixes passed to serialization are not modified.
+
+        This test makes sure that passing a dictionary of prefixes repeatedly
+        to C{toXml} of elements does not cause serialization errors. A
+        previous implementation changed the passed in dictionary internally,
+        causing havoc later on.
+        """
+        prefixes = {'testns': 'foo'}
+
+        # test passing of dictionary
+        s = domish.SerializerClass(prefixes=prefixes)
+        self.assertNotIdentical(prefixes, s.prefixes)
+
+        # test proper serialization on prefixes reuse
+        e = domish.Element(('testns2', 'foo'),
+                           localPrefixes={'quux': 'testns2'})
+        self.assertEqual("<quux:foo xmlns:quux='testns2'/>",
+                          e.toXml(prefixes=prefixes))
+        e = domish.Element(('testns2', 'foo'))
+        self.assertEqual("<foo xmlns='testns2'/>",
+                          e.toXml(prefixes=prefixes))
+
+    def testRawXMLSerialization(self):
+        e = domish.Element((None, "foo"))
+        e.addRawXml("<abc123>")
+        # The testcase below should NOT generate valid XML -- that's
+        # the whole point of using the raw XML call -- it's the callers
+        # responsiblity to ensure that the data inserted is valid
+        self.assertEqual(e.toXml(), "<foo><abc123></foo>")
+
+    def testRawXMLWithUnicodeSerialization(self):
+        e = domish.Element((None, "foo"))
+        e.addRawXml(u"<degree>\u00B0</degree>")
+        self.assertEqual(e.toXml(), u"<foo><degree>\u00B0</degree></foo>")
+
+    def testUnicodeSerialization(self):
+        e = domish.Element((None, "foo"))
+        e["test"] = u"my value\u0221e"
+        e.addContent(u"A degree symbol...\u00B0")
+        self.assertEqual(e.toXml(),
+                          u"<foo test='my value\u0221e'>A degree symbol...\u00B0</foo>")
diff --git a/ThirdParty/Twisted/twisted/words/test/test_irc.py b/ThirdParty/Twisted/twisted/words/test/test_irc.py
new file mode 100644
index 0000000..ffda689
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_irc.py
@@ -0,0 +1,1898 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.words.protocols.irc}.
+"""
+
+import time
+
+from twisted.trial import unittest
+from twisted.trial.unittest import TestCase
+from twisted.words.protocols import irc
+from twisted.words.protocols.irc import IRCClient
+from twisted.internet import protocol, task
+from twisted.test.proto_helpers import StringTransport, StringIOWithoutClosing
+
+
+
+class ModeParsingTests(unittest.TestCase):
+    """
+    Tests for L{twisted.words.protocols.irc.parseModes}.
+    """
+    paramModes = ('klb', 'b')
+
+
+    def test_emptyModes(self):
+        """
+        Parsing an empty mode string raises L{irc.IRCBadModes}.
+        """
+        self.assertRaises(irc.IRCBadModes, irc.parseModes, '', [])
+
+
+    def test_emptyModeSequence(self):
+        """
+        Parsing a mode string that contains an empty sequence (either a C{+} or
+        C{-} followed directly by another C{+} or C{-}, or not followed by
+        anything at all) raises L{irc.IRCBadModes}.
+        """
+        self.assertRaises(irc.IRCBadModes, irc.parseModes, '++k', [])
+        self.assertRaises(irc.IRCBadModes, irc.parseModes, '-+k', [])
+        self.assertRaises(irc.IRCBadModes, irc.parseModes, '+', [])
+        self.assertRaises(irc.IRCBadModes, irc.parseModes, '-', [])
+
+
+    def test_malformedModes(self):
+        """
+        Parsing a mode string that does not start with C{+} or C{-} raises
+        L{irc.IRCBadModes}.
+        """
+        self.assertRaises(irc.IRCBadModes, irc.parseModes, 'foo', [])
+        self.assertRaises(irc.IRCBadModes, irc.parseModes, '%', [])
+
+
+    def test_nullModes(self):
+        """
+        Parsing a mode string that contains no mode characters raises
+        L{irc.IRCBadModes}.
+        """
+        self.assertRaises(irc.IRCBadModes, irc.parseModes, '+', [])
+        self.assertRaises(irc.IRCBadModes, irc.parseModes, '-', [])
+
+
+    def test_singleMode(self):
+        """
+        Parsing a single mode setting with no parameters results in that mode,
+        with no parameters, in the "added" direction and no modes in the
+        "removed" direction.
+        """
+        added, removed = irc.parseModes('+s', [])
+        self.assertEqual(added, [('s', None)])
+        self.assertEqual(removed, [])
+
+        added, removed = irc.parseModes('-s', [])
+        self.assertEqual(added, [])
+        self.assertEqual(removed, [('s', None)])
+
+
+    def test_singleDirection(self):
+        """
+        Parsing a single-direction mode setting with multiple modes and no
+        parameters, results in all modes falling into the same direction group.
+        """
+        added, removed = irc.parseModes('+stn', [])
+        self.assertEqual(added, [('s', None),
+                                  ('t', None),
+                                  ('n', None)])
+        self.assertEqual(removed, [])
+
+        added, removed = irc.parseModes('-nt', [])
+        self.assertEqual(added, [])
+        self.assertEqual(removed, [('n', None),
+                                    ('t', None)])
+
+
+    def test_multiDirection(self):
+        """
+        Parsing a multi-direction mode setting with no parameters.
+        """
+        added, removed = irc.parseModes('+s-n+ti', [])
+        self.assertEqual(added, [('s', None),
+                                  ('t', None),
+                                  ('i', None)])
+        self.assertEqual(removed, [('n', None)])
+
+
+    def test_consecutiveDirection(self):
+        """
+        Parsing a multi-direction mode setting containing two consecutive mode
+        sequences with the same direction results in the same result as if
+        there were only one mode sequence in the same direction.
+        """
+        added, removed = irc.parseModes('+sn+ti', [])
+        self.assertEqual(added, [('s', None),
+                                  ('n', None),
+                                  ('t', None),
+                                  ('i', None)])
+        self.assertEqual(removed, [])
+
+
+    def test_mismatchedParams(self):
+        """
+        If the number of mode parameters does not match the number of modes
+        expecting parameters, L{irc.IRCBadModes} is raised.
+        """
+        self.assertRaises(irc.IRCBadModes,
+                          irc.parseModes,
+                          '+k', [],
+                          self.paramModes)
+        self.assertRaises(irc.IRCBadModes,
+                          irc.parseModes,
+                          '+kl', ['foo', '10', 'lulz_extra_param'],
+                          self.paramModes)
+
+
+    def test_parameters(self):
+        """
+        Modes which require parameters are parsed and paired with their relevant
+        parameter, modes which do not require parameters do not consume any of
+        the parameters.
+        """
+        added, removed = irc.parseModes(
+            '+klbb',
+            ['somekey', '42', 'nick!user at host', 'other!*@*'],
+            self.paramModes)
+        self.assertEqual(added, [('k', 'somekey'),
+                                  ('l', '42'),
+                                  ('b', 'nick!user at host'),
+                                  ('b', 'other!*@*')])
+        self.assertEqual(removed, [])
+
+        added, removed = irc.parseModes(
+            '-klbb',
+            ['nick!user at host', 'other!*@*'],
+            self.paramModes)
+        self.assertEqual(added, [])
+        self.assertEqual(removed, [('k', None),
+                                    ('l', None),
+                                    ('b', 'nick!user at host'),
+                                    ('b', 'other!*@*')])
+
+        # Mix a no-argument mode in with argument modes.
+        added, removed = irc.parseModes(
+            '+knbb',
+            ['somekey', 'nick!user at host', 'other!*@*'],
+            self.paramModes)
+        self.assertEqual(added, [('k', 'somekey'),
+                                  ('n', None),
+                                  ('b', 'nick!user at host'),
+                                  ('b', 'other!*@*')])
+        self.assertEqual(removed, [])
+
+
+
+stringSubjects = [
+    "Hello, this is a nice string with no complications.",
+    "xargs%(NUL)smight%(NUL)slike%(NUL)sthis" % {'NUL': irc.NUL },
+    "embedded%(CR)snewline%(CR)s%(NL)sFUN%(NL)s" % {'CR': irc.CR,
+                                                    'NL': irc.NL},
+    "escape!%(X)s escape!%(M)s %(X)s%(X)sa %(M)s0" % {'X': irc.X_QUOTE,
+                                                      'M': irc.M_QUOTE}
+    ]
+
+
+class QuotingTest(unittest.TestCase):
+    def test_lowquoteSanity(self):
+        """
+        Testing client-server level quote/dequote.
+        """
+        for s in stringSubjects:
+            self.assertEqual(s, irc.lowDequote(irc.lowQuote(s)))
+
+
+    def test_ctcpquoteSanity(self):
+        """
+        Testing CTCP message level quote/dequote.
+        """
+        for s in stringSubjects:
+            self.assertEqual(s, irc.ctcpDequote(irc.ctcpQuote(s)))
+
+
+
+class Dispatcher(irc._CommandDispatcherMixin):
+    """
+    A dispatcher that exposes one known command and handles unknown commands.
+    """
+    prefix = 'disp'
+
+    def disp_working(self, a, b):
+        """
+        A known command that returns its input.
+        """
+        return a, b
+
+
+    def disp_unknown(self, name, a, b):
+        """
+        Handle unknown commands by returning their name and inputs.
+        """
+        return name, a, b
+
+
+
+class DispatcherTests(unittest.TestCase):
+    """
+    Tests for L{irc._CommandDispatcherMixin}.
+    """
+    def test_dispatch(self):
+        """
+        Dispatching a command invokes the correct handler.
+        """
+        disp = Dispatcher()
+        args = (1, 2)
+        res = disp.dispatch('working', *args)
+        self.assertEqual(res, args)
+
+
+    def test_dispatchUnknown(self):
+        """
+        Dispatching an unknown command invokes the default handler.
+        """
+        disp = Dispatcher()
+        name = 'missing'
+        args = (1, 2)
+        res = disp.dispatch(name, *args)
+        self.assertEqual(res, (name,) + args)
+
+
+    def test_dispatchMissingUnknown(self):
+        """
+        Dispatching an unknown command, when no default handler is present,
+        results in an exception being raised.
+        """
+        disp = Dispatcher()
+        disp.disp_unknown = None
+        self.assertRaises(irc.UnhandledCommand, disp.dispatch, 'bar')
+
+
+
+class ServerSupportedFeatureTests(unittest.TestCase):
+    """
+    Tests for L{ServerSupportedFeatures} and related functions.
+    """
+    def test_intOrDefault(self):
+        """
+        L{_intOrDefault} converts values to C{int} if possible, otherwise
+        returns a default value.
+        """
+        self.assertEqual(irc._intOrDefault(None), None)
+        self.assertEqual(irc._intOrDefault([]), None)
+        self.assertEqual(irc._intOrDefault(''), None)
+        self.assertEqual(irc._intOrDefault('hello', 5), 5)
+        self.assertEqual(irc._intOrDefault('123'), 123)
+        self.assertEqual(irc._intOrDefault(123), 123)
+
+
+    def test_splitParam(self):
+        """
+        L{ServerSupportedFeatures._splitParam} splits ISUPPORT parameters
+        into key and values. Parameters without a separator are split into a
+        key and a list containing only the empty string. Escaped parameters
+        are unescaped.
+        """
+        params = [('FOO',         ('FOO', [''])),
+                  ('FOO=',        ('FOO', [''])),
+                  ('FOO=1',       ('FOO', ['1'])),
+                  ('FOO=1,2,3',   ('FOO', ['1', '2', '3'])),
+                  ('FOO=A\\x20B', ('FOO', ['A B'])),
+                  ('FOO=\\x5Cx',  ('FOO', ['\\x'])),
+                  ('FOO=\\',      ('FOO', ['\\'])),
+                  ('FOO=\\n',     ('FOO', ['\\n']))]
+
+        _splitParam = irc.ServerSupportedFeatures._splitParam
+
+        for param, expected in params:
+            res = _splitParam(param)
+            self.assertEqual(res, expected)
+
+        self.assertRaises(ValueError, _splitParam, 'FOO=\\x')
+        self.assertRaises(ValueError, _splitParam, 'FOO=\\xNN')
+        self.assertRaises(ValueError, _splitParam, 'FOO=\\xN')
+        self.assertRaises(ValueError, _splitParam, 'FOO=\\x20\\x')
+
+
+    def test_splitParamArgs(self):
+        """
+        L{ServerSupportedFeatures._splitParamArgs} splits ISUPPORT parameter
+        arguments into key and value.  Arguments without a separator are
+        split into a key and an empty string.
+        """
+        res = irc.ServerSupportedFeatures._splitParamArgs(['A:1', 'B:2', 'C:', 'D'])
+        self.assertEqual(res, [('A', '1'),
+                                ('B', '2'),
+                                ('C', ''),
+                                ('D', '')])
+
+
+    def test_splitParamArgsProcessor(self):
+        """
+        L{ServerSupportedFeatures._splitParamArgs} uses the argument processor
+        passed to to convert ISUPPORT argument values to some more suitable
+        form.
+        """
+        res = irc.ServerSupportedFeatures._splitParamArgs(['A:1', 'B:2', 'C'],
+                                           irc._intOrDefault)
+        self.assertEqual(res, [('A', 1),
+                                ('B', 2),
+                                ('C', None)])
+
+
+    def test_parsePrefixParam(self):
+        """
+        L{ServerSupportedFeatures._parsePrefixParam} parses the ISUPPORT PREFIX
+        parameter into a mapping from modes to prefix symbols, returns
+        C{None} if there is no parseable prefix parameter or raises
+        C{ValueError} if the prefix parameter is malformed.
+        """
+        _parsePrefixParam = irc.ServerSupportedFeatures._parsePrefixParam
+        self.assertEqual(_parsePrefixParam(''), None)
+        self.assertRaises(ValueError, _parsePrefixParam, 'hello')
+        self.assertEqual(_parsePrefixParam('(ov)@+'),
+                          {'o': ('@', 0),
+                           'v': ('+', 1)})
+
+
+    def test_parseChanModesParam(self):
+        """
+        L{ServerSupportedFeatures._parseChanModesParam} parses the ISUPPORT
+        CHANMODES parameter into a mapping from mode categories to mode
+        characters. Passing fewer than 4 parameters results in the empty string
+        for the relevant categories. Passing more than 4 parameters raises
+        C{ValueError}.
+        """
+        _parseChanModesParam = irc.ServerSupportedFeatures._parseChanModesParam
+        self.assertEqual(
+            _parseChanModesParam([]),
+            {'addressModes': '',
+             'param': '',
+             'setParam': '',
+             'noParam': ''})
+
+        self.assertEqual(
+            _parseChanModesParam(['b', 'k', 'l', 'imnpst']),
+            {'addressModes': 'b',
+             'param': 'k',
+             'setParam': 'l',
+             'noParam': 'imnpst'})
+
+        self.assertEqual(
+            _parseChanModesParam(['b', 'k', 'l']),
+            {'addressModes': 'b',
+             'param': 'k',
+             'setParam': 'l',
+             'noParam': ''})
+
+        self.assertRaises(
+            ValueError,
+            _parseChanModesParam, ['a', 'b', 'c', 'd', 'e'])
+
+
+    def test_parse(self):
+        """
+        L{ServerSupportedFeatures.parse} changes the internal state of the
+        instance to reflect the features indicated by the parsed ISUPPORT
+        parameters, including unknown parameters and unsetting previously set
+        parameters.
+        """
+        supported = irc.ServerSupportedFeatures()
+        supported.parse(['MODES=4',
+                        'CHANLIMIT=#:20,&:10',
+                        'INVEX',
+                        'EXCEPTS=Z',
+                        'UNKNOWN=A,B,C'])
+
+        self.assertEqual(supported.getFeature('MODES'), 4)
+        self.assertEqual(supported.getFeature('CHANLIMIT'),
+                          [('#', 20),
+                           ('&', 10)])
+        self.assertEqual(supported.getFeature('INVEX'), 'I')
+        self.assertEqual(supported.getFeature('EXCEPTS'), 'Z')
+        self.assertEqual(supported.getFeature('UNKNOWN'), ('A', 'B', 'C'))
+
+        self.assertTrue(supported.hasFeature('INVEX'))
+        supported.parse(['-INVEX'])
+        self.assertFalse(supported.hasFeature('INVEX'))
+        # Unsetting a previously unset parameter should not be a problem.
+        supported.parse(['-INVEX'])
+
+
+    def _parse(self, features):
+        """
+        Parse all specified features according to the ISUPPORT specifications.
+
+        @type features: C{list} of C{(featureName, value)}
+        @param features: Feature names and values to parse
+
+        @rtype: L{irc.ServerSupportedFeatures}
+        """
+        supported = irc.ServerSupportedFeatures()
+        features = ['%s=%s' % (name, value or '')
+                    for name, value in features]
+        supported.parse(features)
+        return supported
+
+
+    def _parseFeature(self, name, value=None):
+        """
+        Parse a feature, with the given name and value, according to the
+        ISUPPORT specifications and return the parsed value.
+        """
+        supported = self._parse([(name, value)])
+        return supported.getFeature(name)
+
+
+    def _testIntOrDefaultFeature(self, name, default=None):
+        """
+        Perform some common tests on a feature known to use L{_intOrDefault}.
+        """
+        self.assertEqual(
+            self._parseFeature(name, None),
+            default)
+        self.assertEqual(
+            self._parseFeature(name, 'notanint'),
+            default)
+        self.assertEqual(
+            self._parseFeature(name, '42'),
+            42)
+
+
+    def _testFeatureDefault(self, name, features=None):
+        """
+        Features known to have default values are reported as being present by
+        L{irc.ServerSupportedFeatures.hasFeature}, and their value defaults
+        correctly, when they don't appear in an ISUPPORT message.
+        """
+        default = irc.ServerSupportedFeatures()._features[name]
+
+        if features is None:
+            features = [('DEFINITELY_NOT', 'a_feature')]
+
+        supported = self._parse(features)
+        self.assertTrue(supported.hasFeature(name))
+        self.assertEqual(supported.getFeature(name), default)
+
+
+    def test_support_CHANMODES(self):
+        """
+        The CHANMODES ISUPPORT parameter is parsed into a C{dict} giving the
+        four mode categories, C{'addressModes'}, C{'param'}, C{'setParam'}, and
+        C{'noParam'}.
+        """
+        self._testFeatureDefault('CHANMODES')
+        self._testFeatureDefault('CHANMODES', [('CHANMODES', 'b,,lk,')])
+        self._testFeatureDefault('CHANMODES', [('CHANMODES', 'b,,lk,ha,ha')])
+
+        self.assertEqual(
+            self._parseFeature('CHANMODES', ''),
+            {'addressModes': '',
+             'param': '',
+             'setParam': '',
+             'noParam': ''})
+
+        self.assertEqual(
+            self._parseFeature('CHANMODES', ',A'),
+            {'addressModes': '',
+             'param': 'A',
+             'setParam': '',
+             'noParam': ''})
+
+        self.assertEqual(
+            self._parseFeature('CHANMODES', 'A,Bc,Def,Ghij'),
+            {'addressModes': 'A',
+             'param': 'Bc',
+             'setParam': 'Def',
+             'noParam': 'Ghij'})
+
+
+    def test_support_IDCHAN(self):
+        """
+        The IDCHAN support parameter is parsed into a sequence of two-tuples
+        giving channel prefix and ID length pairs.
+        """
+        self.assertEqual(
+            self._parseFeature('IDCHAN', '!:5'),
+            [('!', '5')])
+
+
+    def test_support_MAXLIST(self):
+        """
+        The MAXLIST support parameter is parsed into a sequence of two-tuples
+        giving modes and their limits.
+        """
+        self.assertEqual(
+            self._parseFeature('MAXLIST', 'b:25,eI:50'),
+            [('b', 25), ('eI', 50)])
+        # A non-integer parameter argument results in None.
+        self.assertEqual(
+            self._parseFeature('MAXLIST', 'b:25,eI:50,a:3.1415'),
+            [('b', 25), ('eI', 50), ('a', None)])
+        self.assertEqual(
+            self._parseFeature('MAXLIST', 'b:25,eI:50,a:notanint'),
+            [('b', 25), ('eI', 50), ('a', None)])
+
+
+    def test_support_NETWORK(self):
+        """
+        The NETWORK support parameter is parsed as the network name, as
+        specified by the server.
+        """
+        self.assertEqual(
+            self._parseFeature('NETWORK', 'IRCNet'),
+            'IRCNet')
+
+
+    def test_support_SAFELIST(self):
+        """
+        The SAFELIST support parameter is parsed into a boolean indicating
+        whether the safe "list" command is supported or not.
+        """
+        self.assertEqual(
+            self._parseFeature('SAFELIST'),
+            True)
+
+
+    def test_support_STATUSMSG(self):
+        """
+        The STATUSMSG support parameter is parsed into a string of channel
+        status that support the exclusive channel notice method.
+        """
+        self.assertEqual(
+            self._parseFeature('STATUSMSG', '@+'),
+            '@+')
+
+
+    def test_support_TARGMAX(self):
+        """
+        The TARGMAX support parameter is parsed into a dictionary, mapping
+        strings to integers, of the maximum number of targets for a particular
+        command.
+        """
+        self.assertEqual(
+            self._parseFeature('TARGMAX', 'PRIVMSG:4,NOTICE:3'),
+            {'PRIVMSG': 4,
+             'NOTICE': 3})
+        # A non-integer parameter argument results in None.
+        self.assertEqual(
+            self._parseFeature('TARGMAX', 'PRIVMSG:4,NOTICE:3,KICK:3.1415'),
+            {'PRIVMSG': 4,
+             'NOTICE': 3,
+             'KICK': None})
+        self.assertEqual(
+            self._parseFeature('TARGMAX', 'PRIVMSG:4,NOTICE:3,KICK:notanint'),
+            {'PRIVMSG': 4,
+             'NOTICE': 3,
+             'KICK': None})
+
+
+    def test_support_NICKLEN(self):
+        """
+        The NICKLEN support parameter is parsed into an integer value
+        indicating the maximum length of a nickname the client may use,
+        otherwise, if the parameter is missing or invalid, the default value
+        (as specified by RFC 1459) is used.
+        """
+        default = irc.ServerSupportedFeatures()._features['NICKLEN']
+        self._testIntOrDefaultFeature('NICKLEN', default)
+
+
+    def test_support_CHANNELLEN(self):
+        """
+        The CHANNELLEN support parameter is parsed into an integer value
+        indicating the maximum channel name length, otherwise, if the
+        parameter is missing or invalid, the default value (as specified by
+        RFC 1459) is used.
+        """
+        default = irc.ServerSupportedFeatures()._features['CHANNELLEN']
+        self._testIntOrDefaultFeature('CHANNELLEN', default)
+
+
+    def test_support_CHANTYPES(self):
+        """
+        The CHANTYPES support parameter is parsed into a tuple of
+        valid channel prefix characters.
+        """
+        self._testFeatureDefault('CHANTYPES')
+
+        self.assertEqual(
+            self._parseFeature('CHANTYPES', '#&%'),
+            ('#', '&', '%'))
+
+
+    def test_support_KICKLEN(self):
+        """
+        The KICKLEN support parameter is parsed into an integer value
+        indicating the maximum length of a kick message a client may use.
+        """
+        self._testIntOrDefaultFeature('KICKLEN')
+
+
+    def test_support_PREFIX(self):
+        """
+        The PREFIX support parameter is parsed into a dictionary mapping
+        modes to two-tuples of status symbol and priority.
+        """
+        self._testFeatureDefault('PREFIX')
+        self._testFeatureDefault('PREFIX', [('PREFIX', 'hello')])
+
+        self.assertEqual(
+            self._parseFeature('PREFIX', None),
+            None)
+        self.assertEqual(
+            self._parseFeature('PREFIX', '(ohv)@%+'),
+            {'o': ('@', 0),
+             'h': ('%', 1),
+             'v': ('+', 2)})
+        self.assertEqual(
+            self._parseFeature('PREFIX', '(hov)@%+'),
+            {'o': ('%', 1),
+             'h': ('@', 0),
+             'v': ('+', 2)})
+
+
+    def test_support_TOPICLEN(self):
+        """
+        The TOPICLEN support parameter is parsed into an integer value
+        indicating the maximum length of a topic a client may set.
+        """
+        self._testIntOrDefaultFeature('TOPICLEN')
+
+
+    def test_support_MODES(self):
+        """
+        The MODES support parameter is parsed into an integer value
+        indicating the maximum number of "variable" modes (defined as being
+        modes from C{addressModes}, C{param} or C{setParam} categories for
+        the C{CHANMODES} ISUPPORT parameter) which may by set on a channel
+        by a single MODE command from a client.
+        """
+        self._testIntOrDefaultFeature('MODES')
+
+
+    def test_support_EXCEPTS(self):
+        """
+        The EXCEPTS support parameter is parsed into the mode character
+        to be used for "ban exception" modes. If no parameter is specified
+        then the character C{e} is assumed.
+        """
+        self.assertEqual(
+            self._parseFeature('EXCEPTS', 'Z'),
+            'Z')
+        self.assertEqual(
+            self._parseFeature('EXCEPTS'),
+            'e')
+
+
+    def test_support_INVEX(self):
+        """
+        The INVEX support parameter is parsed into the mode character to be
+        used for "invite exception" modes. If no parameter is specified then
+        the character C{I} is assumed.
+        """
+        self.assertEqual(
+            self._parseFeature('INVEX', 'Z'),
+            'Z')
+        self.assertEqual(
+            self._parseFeature('INVEX'),
+            'I')
+
+
+
+class IRCClientWithoutLogin(irc.IRCClient):
+    performLogin = 0
+
+
+
+class CTCPTest(unittest.TestCase):
+    """
+    Tests for L{twisted.words.protocols.irc.IRCClient} CTCP handling.
+    """
+    def setUp(self):
+        self.file = StringIOWithoutClosing()
+        self.transport = protocol.FileWrapper(self.file)
+        self.client = IRCClientWithoutLogin()
+        self.client.makeConnection(self.transport)
+
+        self.addCleanup(self.transport.loseConnection)
+        self.addCleanup(self.client.connectionLost, None)
+
+
+    def test_ERRMSG(self):
+        """Testing CTCP query ERRMSG.
+
+        Not because this is this is an especially important case in the
+        field, but it does go through the entire dispatch/decode/encode
+        process.
+        """
+
+        errQuery = (":nick!guy at over.there PRIVMSG #theChan :"
+                    "%(X)cERRMSG t%(X)c%(EOL)s"
+                    % {'X': irc.X_DELIM,
+                       'EOL': irc.CR + irc.LF})
+
+        errReply = ("NOTICE nick :%(X)cERRMSG t :"
+                    "No error has occoured.%(X)c%(EOL)s"
+                    % {'X': irc.X_DELIM,
+                       'EOL': irc.CR + irc.LF})
+
+        self.client.dataReceived(errQuery)
+        reply = self.file.getvalue()
+
+        self.assertEqual(errReply, reply)
+
+
+    def test_noNumbersVERSION(self):
+        """
+        If attributes for version information on L{IRCClient} are set to
+        C{None}, the parts of the CTCP VERSION response they correspond to
+        are omitted.
+        """
+        self.client.versionName = "FrobozzIRC"
+        self.client.ctcpQuery_VERSION("nick!guy at over.there", "#theChan", None)
+        versionReply = ("NOTICE nick :%(X)cVERSION %(vname)s::"
+                        "%(X)c%(EOL)s"
+                        % {'X': irc.X_DELIM,
+                           'EOL': irc.CR + irc.LF,
+                           'vname': self.client.versionName})
+        reply = self.file.getvalue()
+        self.assertEqual(versionReply, reply)
+
+
+    def test_fullVERSION(self):
+        """
+        The response to a CTCP VERSION query includes the version number and
+        environment information, as specified by L{IRCClient.versionNum} and
+        L{IRCClient.versionEnv}.
+        """
+        self.client.versionName = "FrobozzIRC"
+        self.client.versionNum = "1.2g"
+        self.client.versionEnv = "ZorkOS"
+        self.client.ctcpQuery_VERSION("nick!guy at over.there", "#theChan", None)
+        versionReply = ("NOTICE nick :%(X)cVERSION %(vname)s:%(vnum)s:%(venv)s"
+                        "%(X)c%(EOL)s"
+                        % {'X': irc.X_DELIM,
+                           'EOL': irc.CR + irc.LF,
+                           'vname': self.client.versionName,
+                           'vnum': self.client.versionNum,
+                           'venv': self.client.versionEnv})
+        reply = self.file.getvalue()
+        self.assertEqual(versionReply, reply)
+
+
+    def test_noDuplicateCTCPDispatch(self):
+        """
+        Duplicated CTCP messages are ignored and no reply is made.
+        """
+        def testCTCP(user, channel, data):
+            self.called += 1
+
+        self.called = 0
+        self.client.ctcpQuery_TESTTHIS = testCTCP
+
+        self.client.irc_PRIVMSG(
+            'foo!bar at baz.quux', [
+                '#chan',
+                '%(X)sTESTTHIS%(X)sfoo%(X)sTESTTHIS%(X)s' % {'X': irc.X_DELIM}])
+        self.assertEqual(
+            self.file.getvalue(),
+            '')
+        self.assertEqual(self.called, 1)
+
+
+    def test_noDefaultDispatch(self):
+        """
+        The fallback handler is invoked for unrecognized CTCP messages.
+        """
+        def unknownQuery(user, channel, tag, data):
+            self.calledWith = (user, channel, tag, data)
+            self.called += 1
+
+        self.called = 0
+        self.patch(self.client, 'ctcpUnknownQuery', unknownQuery)
+        self.client.irc_PRIVMSG(
+            'foo!bar at baz.quux', [
+                '#chan',
+                '%(X)sNOTREAL%(X)s' % {'X': irc.X_DELIM}])
+        self.assertEqual(
+            self.file.getvalue(),
+            '')
+        self.assertEqual(
+            self.calledWith,
+            ('foo!bar at baz.quux', '#chan', 'NOTREAL', None))
+        self.assertEqual(self.called, 1)
+
+        # The fallback handler is not invoked for duplicate unknown CTCP
+        # messages.
+        self.client.irc_PRIVMSG(
+            'foo!bar at baz.quux', [
+                '#chan',
+                '%(X)sNOTREAL%(X)sfoo%(X)sNOTREAL%(X)s' % {'X': irc.X_DELIM}])
+        self.assertEqual(self.called, 2)
+
+
+
+class NoticingClient(IRCClientWithoutLogin, object):
+    methods = {
+        'created': ('when',),
+        'yourHost': ('info',),
+        'myInfo': ('servername', 'version', 'umodes', 'cmodes'),
+        'luserClient': ('info',),
+        'bounce': ('info',),
+        'isupport': ('options',),
+        'luserChannels': ('channels',),
+        'luserOp': ('ops',),
+        'luserMe': ('info',),
+        'receivedMOTD': ('motd',),
+
+        'privmsg': ('user', 'channel', 'message'),
+        'joined': ('channel',),
+        'left': ('channel',),
+        'noticed': ('user', 'channel', 'message'),
+        'modeChanged': ('user', 'channel', 'set', 'modes', 'args'),
+        'pong': ('user', 'secs'),
+        'signedOn': (),
+        'kickedFrom': ('channel', 'kicker', 'message'),
+        'nickChanged': ('nick',),
+
+        'userJoined': ('user', 'channel'),
+        'userLeft': ('user', 'channel'),
+        'userKicked': ('user', 'channel', 'kicker', 'message'),
+        'action': ('user', 'channel', 'data'),
+        'topicUpdated': ('user', 'channel', 'newTopic'),
+        'userRenamed': ('oldname', 'newname')}
+
+
+    def __init__(self, *a, **kw):
+        # It is important that IRCClient.__init__ is not called since
+        # traditionally it did not exist, so it is important that nothing is
+        # initialised there that would prevent subclasses that did not (or
+        # could not) invoke the base implementation. Any protocol
+        # initialisation should happen in connectionMode.
+        self.calls = []
+
+
+    def __getattribute__(self, name):
+        if name.startswith('__') and name.endswith('__'):
+            return super(NoticingClient, self).__getattribute__(name)
+        try:
+            args = super(NoticingClient, self).__getattribute__('methods')[name]
+        except KeyError:
+            return super(NoticingClient, self).__getattribute__(name)
+        else:
+            return self.makeMethod(name, args)
+
+
+    def makeMethod(self, fname, args):
+        def method(*a, **kw):
+            if len(a) > len(args):
+                raise TypeError("TypeError: %s() takes %d arguments "
+                                "(%d given)" % (fname, len(args), len(a)))
+            for (name, value) in zip(args, a):
+                if name in kw:
+                    raise TypeError("TypeError: %s() got multiple values "
+                                    "for keyword argument '%s'" % (fname, name))
+                else:
+                    kw[name] = value
+            if len(kw) != len(args):
+                raise TypeError("TypeError: %s() takes %d arguments "
+                                "(%d given)" % (fname, len(args), len(a)))
+            self.calls.append((fname, kw))
+        return method
+
+
+def pop(dict, key, default):
+    try:
+        value = dict[key]
+    except KeyError:
+        return default
+    else:
+        del dict[key]
+        return value
+
+
+
+class ClientImplementationTests(unittest.TestCase):
+    def setUp(self):
+        self.transport = StringTransport()
+        self.client = NoticingClient()
+        self.client.makeConnection(self.transport)
+
+        self.addCleanup(self.transport.loseConnection)
+        self.addCleanup(self.client.connectionLost, None)
+
+
+    def _serverTestImpl(self, code, msg, func, **kw):
+        host = pop(kw, 'host', 'server.host')
+        nick = pop(kw, 'nick', 'nickname')
+        args = pop(kw, 'args', '')
+
+        message = (":" +
+                   host + " " +
+                   code + " " +
+                   nick + " " +
+                   args + " :" +
+                   msg + "\r\n")
+
+        self.client.dataReceived(message)
+        self.assertEqual(
+            self.client.calls,
+            [(func, kw)])
+
+
+    def testYourHost(self):
+        msg = "Your host is some.host[blah.blah/6667], running version server-version-3"
+        self._serverTestImpl("002", msg, "yourHost", info=msg)
+
+
+    def testCreated(self):
+        msg = "This server was cobbled together Fri Aug 13 18:00:25 UTC 2004"
+        self._serverTestImpl("003", msg, "created", when=msg)
+
+
+    def testMyInfo(self):
+        msg = "server.host server-version abcDEF bcdEHI"
+        self._serverTestImpl("004", msg, "myInfo",
+                             servername="server.host",
+                             version="server-version",
+                             umodes="abcDEF",
+                             cmodes="bcdEHI")
+
+
+    def testLuserClient(self):
+        msg = "There are 9227 victims and 9542 hiding on 24 servers"
+        self._serverTestImpl("251", msg, "luserClient",
+                             info=msg)
+
+
+    def _sendISUPPORT(self):
+        args = ("MODES=4 CHANLIMIT=#:20 NICKLEN=16 USERLEN=10 HOSTLEN=63 "
+                "TOPICLEN=450 KICKLEN=450 CHANNELLEN=30 KEYLEN=23 CHANTYPES=# "
+                "PREFIX=(ov)@+ CASEMAPPING=ascii CAPAB IRCD=dancer")
+        msg = "are available on this server"
+        self._serverTestImpl("005", msg, "isupport", args=args,
+                             options=['MODES=4',
+                                      'CHANLIMIT=#:20',
+                                      'NICKLEN=16',
+                                      'USERLEN=10',
+                                      'HOSTLEN=63',
+                                      'TOPICLEN=450',
+                                      'KICKLEN=450',
+                                      'CHANNELLEN=30',
+                                      'KEYLEN=23',
+                                      'CHANTYPES=#',
+                                      'PREFIX=(ov)@+',
+                                      'CASEMAPPING=ascii',
+                                      'CAPAB',
+                                      'IRCD=dancer'])
+
+
+    def test_ISUPPORT(self):
+        """
+        The client parses ISUPPORT messages sent by the server and calls
+        L{IRCClient.isupport}.
+        """
+        self._sendISUPPORT()
+
+
+    def testBounce(self):
+        msg = "Try server some.host, port 321"
+        self._serverTestImpl("010", msg, "bounce",
+                             info=msg)
+
+
+    def testLuserChannels(self):
+        args = "7116"
+        msg = "channels formed"
+        self._serverTestImpl("254", msg, "luserChannels", args=args,
+                             channels=int(args))
+
+
+    def testLuserOp(self):
+        args = "34"
+        msg = "flagged staff members"
+        self._serverTestImpl("252", msg, "luserOp", args=args,
+                             ops=int(args))
+
+
+    def testLuserMe(self):
+        msg = "I have 1937 clients and 0 servers"
+        self._serverTestImpl("255", msg, "luserMe",
+                             info=msg)
+
+
+    def test_receivedMOTD(self):
+        """
+        Lines received in I{RPL_MOTDSTART} and I{RPL_MOTD} are delivered to
+        L{IRCClient.receivedMOTD} when I{RPL_ENDOFMOTD} is received.
+        """
+        lines = [
+            ":host.name 375 nickname :- host.name Message of the Day -",
+            ":host.name 372 nickname :- Welcome to host.name",
+            ":host.name 376 nickname :End of /MOTD command."]
+        for L in lines:
+            self.assertEqual(self.client.calls, [])
+            self.client.dataReceived(L + '\r\n')
+
+        self.assertEqual(
+            self.client.calls,
+            [("receivedMOTD", {"motd": ["host.name Message of the Day -", "Welcome to host.name"]})])
+
+        # After the motd is delivered, the tracking variable should be
+        # reset.
+        self.assertIdentical(self.client.motd, None)
+
+
+    def test_withoutMOTDSTART(self):
+        """
+        If L{IRCClient} receives I{RPL_MOTD} and I{RPL_ENDOFMOTD} without
+        receiving I{RPL_MOTDSTART}, L{IRCClient.receivedMOTD} is still
+        called with a list of MOTD lines.
+        """
+        lines = [
+            ":host.name 372 nickname :- Welcome to host.name",
+            ":host.name 376 nickname :End of /MOTD command."]
+
+        for L in lines:
+            self.client.dataReceived(L + '\r\n')
+
+        self.assertEqual(
+            self.client.calls,
+            [("receivedMOTD", {"motd": ["Welcome to host.name"]})])
+
+
+    def _clientTestImpl(self, sender, group, type, msg, func, **kw):
+        ident = pop(kw, 'ident', 'ident')
+        host = pop(kw, 'host', 'host')
+
+        wholeUser = sender + '!' + ident + '@' + host
+        message = (":" +
+                   wholeUser + " " +
+                   type + " " +
+                   group + " :" +
+                   msg + "\r\n")
+        self.client.dataReceived(message)
+        self.assertEqual(
+            self.client.calls,
+            [(func, kw)])
+        self.client.calls = []
+
+
+    def testPrivmsg(self):
+        msg = "Tooty toot toot."
+        self._clientTestImpl("sender", "#group", "PRIVMSG", msg, "privmsg",
+                             ident="ident", host="host",
+                             # Expected results below
+                             user="sender!ident at host",
+                             channel="#group",
+                             message=msg)
+
+        self._clientTestImpl("sender", "recipient", "PRIVMSG", msg, "privmsg",
+                             ident="ident", host="host",
+                             # Expected results below
+                             user="sender!ident at host",
+                             channel="recipient",
+                             message=msg)
+
+
+    def test_getChannelModeParams(self):
+        """
+        L{IRCClient.getChannelModeParams} uses ISUPPORT information, either
+        given by the server or defaults, to determine which channel modes
+        require arguments when being added or removed.
+        """
+        add, remove = map(sorted, self.client.getChannelModeParams())
+        self.assertEqual(add, ['b', 'h', 'k', 'l', 'o', 'v'])
+        self.assertEqual(remove, ['b', 'h', 'o', 'v'])
+
+        def removeFeature(name):
+            name = '-' + name
+            msg = "are available on this server"
+            self._serverTestImpl(
+                '005', msg, 'isupport', args=name, options=[name])
+            self.assertIdentical(
+                self.client.supported.getFeature(name), None)
+            self.client.calls = []
+
+        # Remove CHANMODES feature, causing getFeature('CHANMODES') to return
+        # None.
+        removeFeature('CHANMODES')
+        add, remove = map(sorted, self.client.getChannelModeParams())
+        self.assertEqual(add, ['h', 'o', 'v'])
+        self.assertEqual(remove, ['h', 'o', 'v'])
+
+        # Remove PREFIX feature, causing getFeature('PREFIX') to return None.
+        removeFeature('PREFIX')
+        add, remove = map(sorted, self.client.getChannelModeParams())
+        self.assertEqual(add, [])
+        self.assertEqual(remove, [])
+
+        # Restore ISUPPORT features.
+        self._sendISUPPORT()
+        self.assertNotIdentical(
+            self.client.supported.getFeature('PREFIX'), None)
+
+
+    def test_getUserModeParams(self):
+        """
+        L{IRCClient.getUserModeParams} returns a list of user modes (modes that
+        the user sets on themself, outside of channel modes) that require
+        parameters when added and removed, respectively.
+        """
+        add, remove = map(sorted, self.client.getUserModeParams())
+        self.assertEqual(add, [])
+        self.assertEqual(remove, [])
+
+
+    def _sendModeChange(self, msg, args='', target=None):
+        """
+        Build a MODE string and send it to the client.
+        """
+        if target is None:
+            target = '#chan'
+        message = ":Wolf!~wolf at yok.utu.fi MODE %s %s %s\r\n" % (
+            target, msg, args)
+        self.client.dataReceived(message)
+
+
+    def _parseModeChange(self, results, target=None):
+        """
+        Parse the results, do some test and return the data to check.
+        """
+        if target is None:
+            target = '#chan'
+
+        for n, result in enumerate(results):
+            method, data = result
+            self.assertEqual(method, 'modeChanged')
+            self.assertEqual(data['user'], 'Wolf!~wolf at yok.utu.fi')
+            self.assertEqual(data['channel'], target)
+            results[n] = tuple([data[key] for key in ('set', 'modes', 'args')])
+        return results
+
+
+    def _checkModeChange(self, expected, target=None):
+        """
+        Compare the expected result with the one returned by the client.
+        """
+        result = self._parseModeChange(self.client.calls, target)
+        self.assertEqual(result, expected)
+        self.client.calls = []
+
+
+    def test_modeMissingDirection(self):
+        """
+        Mode strings that do not begin with a directional character, C{'+'} or
+        C{'-'}, have C{'+'} automatically prepended.
+        """
+        self._sendModeChange('s')
+        self._checkModeChange([(True, 's', (None,))])
+
+
+    def test_noModeParameters(self):
+        """
+        No parameters are passed to L{IRCClient.modeChanged} for modes that
+        don't take any parameters.
+        """
+        self._sendModeChange('-s')
+        self._checkModeChange([(False, 's', (None,))])
+        self._sendModeChange('+n')
+        self._checkModeChange([(True, 'n', (None,))])
+
+
+    def test_oneModeParameter(self):
+        """
+        Parameters are passed to L{IRCClient.modeChanged} for modes that take
+        parameters.
+        """
+        self._sendModeChange('+o', 'a_user')
+        self._checkModeChange([(True, 'o', ('a_user',))])
+        self._sendModeChange('-o', 'a_user')
+        self._checkModeChange([(False, 'o', ('a_user',))])
+
+
+    def test_mixedModes(self):
+        """
+        Mixing adding and removing modes that do and don't take parameters
+        invokes L{IRCClient.modeChanged} with mode characters and parameters
+        that match up.
+        """
+        self._sendModeChange('+osv', 'a_user another_user')
+        self._checkModeChange([(True, 'osv', ('a_user', None, 'another_user'))])
+        self._sendModeChange('+v-os', 'a_user another_user')
+        self._checkModeChange([(True, 'v', ('a_user',)),
+                               (False, 'os', ('another_user', None))])
+
+
+    def test_tooManyModeParameters(self):
+        """
+        Passing an argument to modes that take no parameters results in
+        L{IRCClient.modeChanged} not being called and an error being logged.
+        """
+        self._sendModeChange('+s', 'wrong')
+        self._checkModeChange([])
+        errors = self.flushLoggedErrors(irc.IRCBadModes)
+        self.assertEqual(len(errors), 1)
+        self.assertSubstring(
+            'Too many parameters', errors[0].getErrorMessage())
+
+
+    def test_tooFewModeParameters(self):
+        """
+        Passing no arguments to modes that do take parameters results in
+        L{IRCClient.modeChange} not being called and an error being logged.
+        """
+        self._sendModeChange('+o')
+        self._checkModeChange([])
+        errors = self.flushLoggedErrors(irc.IRCBadModes)
+        self.assertEqual(len(errors), 1)
+        self.assertSubstring(
+            'Not enough parameters', errors[0].getErrorMessage())
+
+
+    def test_userMode(self):
+        """
+        A C{MODE} message whose target is our user (the nickname of our user,
+        to be precise), as opposed to a channel, will be parsed according to
+        the modes specified by L{IRCClient.getUserModeParams}.
+        """
+        target = self.client.nickname
+        # Mode "o" on channels is supposed to take a parameter, but since this
+        # is not a channel this will not cause an exception.
+        self._sendModeChange('+o', target=target)
+        self._checkModeChange([(True, 'o', (None,))], target=target)
+
+        def getUserModeParams():
+            return ['Z', '']
+
+        # Introduce our own user mode that takes an argument.
+        self.patch(self.client, 'getUserModeParams', getUserModeParams)
+
+        self._sendModeChange('+Z', 'an_arg', target=target)
+        self._checkModeChange([(True, 'Z', ('an_arg',))], target=target)
+
+
+    def test_heartbeat(self):
+        """
+        When the I{RPL_WELCOME} message is received a heartbeat is started that
+        will send a I{PING} message to the IRC server every
+        L{irc.IRCClient.heartbeatInterval} seconds. When the transport is
+        closed the heartbeat looping call is stopped too.
+        """
+        def _createHeartbeat():
+            heartbeat = self._originalCreateHeartbeat()
+            heartbeat.clock = self.clock
+            return heartbeat
+
+        self.clock = task.Clock()
+        self._originalCreateHeartbeat = self.client._createHeartbeat
+        self.patch(self.client, '_createHeartbeat', _createHeartbeat)
+
+        self.assertIdentical(self.client._heartbeat, None)
+        self.client.irc_RPL_WELCOME('foo', [])
+        self.assertNotIdentical(self.client._heartbeat, None)
+        self.assertEqual(self.client.hostname, 'foo')
+
+        # Pump the clock enough to trigger one LoopingCall.
+        self.assertEqual(self.transport.value(), '')
+        self.clock.advance(self.client.heartbeatInterval)
+        self.assertEqual(self.transport.value(), 'PING foo\r\n')
+
+        # When the connection is lost the heartbeat is stopped.
+        self.transport.loseConnection()
+        self.client.connectionLost(None)
+        self.assertEqual(
+            len(self.clock.getDelayedCalls()), 0)
+        self.assertIdentical(self.client._heartbeat, None)
+
+
+    def test_heartbeatDisabled(self):
+        """
+        If L{irc.IRCClient.heartbeatInterval} is set to C{None} then no
+        heartbeat is created.
+        """
+        self.assertIdentical(self.client._heartbeat, None)
+        self.client.heartbeatInterval = None
+        self.client.irc_RPL_WELCOME('foo', [])
+        self.assertIdentical(self.client._heartbeat, None)
+
+
+
+class BasicServerFunctionalityTestCase(unittest.TestCase):
+    def setUp(self):
+        self.f = StringIOWithoutClosing()
+        self.t = protocol.FileWrapper(self.f)
+        self.p = irc.IRC()
+        self.p.makeConnection(self.t)
+
+
+    def check(self, s):
+        self.assertEqual(self.f.getvalue(), s)
+
+
+    def testPrivmsg(self):
+        self.p.privmsg("this-is-sender", "this-is-recip", "this is message")
+        self.check(":this-is-sender PRIVMSG this-is-recip :this is message\r\n")
+
+
+    def testNotice(self):
+        self.p.notice("this-is-sender", "this-is-recip", "this is notice")
+        self.check(":this-is-sender NOTICE this-is-recip :this is notice\r\n")
+
+
+    def testAction(self):
+        self.p.action("this-is-sender", "this-is-recip", "this is action")
+        self.check(":this-is-sender ACTION this-is-recip :this is action\r\n")
+
+
+    def testJoin(self):
+        self.p.join("this-person", "#this-channel")
+        self.check(":this-person JOIN #this-channel\r\n")
+
+
+    def testPart(self):
+        self.p.part("this-person", "#that-channel")
+        self.check(":this-person PART #that-channel\r\n")
+
+
+    def testWhois(self):
+        """
+        Verify that a whois by the client receives the right protocol actions
+        from the server.
+        """
+        timestamp = int(time.time()-100)
+        hostname = self.p.hostname
+        req = 'requesting-nick'
+        targ = 'target-nick'
+        self.p.whois(req, targ, 'target', 'host.com',
+                'Target User', 'irc.host.com', 'A fake server', False,
+                12, timestamp, ['#fakeusers', '#fakemisc'])
+        expected = '\r\n'.join([
+':%(hostname)s 311 %(req)s %(targ)s target host.com * :Target User',
+':%(hostname)s 312 %(req)s %(targ)s irc.host.com :A fake server',
+':%(hostname)s 317 %(req)s %(targ)s 12 %(timestamp)s :seconds idle, signon time',
+':%(hostname)s 319 %(req)s %(targ)s :#fakeusers #fakemisc',
+':%(hostname)s 318 %(req)s %(targ)s :End of WHOIS list.',
+'']) % dict(hostname=hostname, timestamp=timestamp, req=req, targ=targ)
+        self.check(expected)
+
+
+
+class DummyClient(irc.IRCClient):
+    """
+    A L{twisted.words.protocols.irc.IRCClient} that stores sent lines in a
+    C{list} rather than transmitting them.
+    """
+    def __init__(self):
+        self.lines = []
+
+
+    def connectionMade(self):
+        irc.IRCClient.connectionMade(self)
+        self.lines = []
+
+
+    def _truncateLine(self, line):
+        """
+        Truncate an IRC line to the maximum allowed length.
+        """
+        return line[:irc.MAX_COMMAND_LENGTH - len(self.delimiter)]
+
+
+    def lineReceived(self, line):
+        # Emulate IRC servers throwing away our important data.
+        line = self._truncateLine(line)
+        return irc.IRCClient.lineReceived(self, line)
+
+
+    def sendLine(self, m):
+        self.lines.append(self._truncateLine(m))
+
+
+
+class ClientInviteTests(unittest.TestCase):
+    """
+    Tests for L{IRCClient.invite}.
+    """
+    def setUp(self):
+        """
+        Create a L{DummyClient} to call C{invite} on in test methods.
+        """
+        self.client = DummyClient()
+
+
+    def test_channelCorrection(self):
+        """
+        If the channel name passed to L{IRCClient.invite} does not begin with a
+        channel prefix character, one is prepended to it.
+        """
+        self.client.invite('foo', 'bar')
+        self.assertEqual(self.client.lines, ['INVITE foo #bar'])
+
+
+    def test_invite(self):
+        """
+        L{IRCClient.invite} sends an I{INVITE} message with the specified
+        username and a channel.
+        """
+        self.client.invite('foo', '#bar')
+        self.assertEqual(self.client.lines, ['INVITE foo #bar'])
+
+
+
+class ClientMsgTests(unittest.TestCase):
+    """
+    Tests for messages sent with L{twisted.words.protocols.irc.IRCClient}.
+    """
+    def setUp(self):
+        self.client = DummyClient()
+        self.client.connectionMade()
+
+
+    def test_singleLine(self):
+        """
+        A message containing no newlines is sent in a single command.
+        """
+        self.client.msg('foo', 'bar')
+        self.assertEqual(self.client.lines, ['PRIVMSG foo :bar'])
+
+
+    def test_invalidMaxLength(self):
+        """
+        Specifying a C{length} value to L{IRCClient.msg} that is too short to
+        contain the protocol command to send a message raises C{ValueError}.
+        """
+        self.assertRaises(ValueError, self.client.msg, 'foo', 'bar', 0)
+        self.assertRaises(ValueError, self.client.msg, 'foo', 'bar', 3)
+
+
+    def test_multipleLine(self):
+        """
+        Messages longer than the C{length} parameter to L{IRCClient.msg} will
+        be split and sent in multiple commands.
+        """
+        maxLen = len('PRIVMSG foo :') + 3 + 2 # 2 for line endings
+        self.client.msg('foo', 'barbazbo', maxLen)
+        self.assertEqual(
+            self.client.lines,
+            ['PRIVMSG foo :bar',
+             'PRIVMSG foo :baz',
+             'PRIVMSG foo :bo'])
+
+
+    def test_sufficientWidth(self):
+        """
+        Messages exactly equal in length to the C{length} paramtere to
+        L{IRCClient.msg} are sent in a single command.
+        """
+        msg = 'barbazbo'
+        maxLen = len('PRIVMSG foo :%s' % (msg,)) + 2
+        self.client.msg('foo', msg, maxLen)
+        self.assertEqual(self.client.lines, ['PRIVMSG foo :%s' % (msg,)])
+        self.client.lines = []
+        self.client.msg('foo', msg, maxLen-1)
+        self.assertEqual(2, len(self.client.lines))
+        self.client.lines = []
+        self.client.msg('foo', msg, maxLen+1)
+        self.assertEqual(1, len(self.client.lines))
+
+
+    def test_newlinesAtStart(self):
+        """
+        An LF at the beginning of the message is ignored.
+        """
+        self.client.lines = []
+        self.client.msg('foo', '\nbar')
+        self.assertEqual(self.client.lines, ['PRIVMSG foo :bar'])
+
+
+    def test_newlinesAtEnd(self):
+        """
+        An LF at the end of the message is ignored.
+        """
+        self.client.lines = []
+        self.client.msg('foo', 'bar\n')
+        self.assertEqual(self.client.lines, ['PRIVMSG foo :bar'])
+
+
+    def test_newlinesWithinMessage(self):
+        """
+        An LF within a message causes a new line.
+        """
+        self.client.lines = []
+        self.client.msg('foo', 'bar\nbaz')
+        self.assertEqual(
+            self.client.lines,
+            ['PRIVMSG foo :bar',
+             'PRIVMSG foo :baz'])
+
+
+    def test_consecutiveNewlines(self):
+        """
+        Consecutive LFs do not cause a blank line.
+        """
+        self.client.lines = []
+        self.client.msg('foo', 'bar\n\nbaz')
+        self.assertEqual(
+            self.client.lines,
+            ['PRIVMSG foo :bar',
+             'PRIVMSG foo :baz'])
+
+
+    def assertLongMessageSplitting(self, message, expectedNumCommands,
+                                   length=None):
+        """
+        Assert that messages sent by L{IRCClient.msg} are split into an
+        expected number of commands and the original message is transmitted in
+        its entirety over those commands.
+        """
+        responsePrefix = ':%s!%s@%s ' % (
+            self.client.nickname,
+            self.client.realname,
+            self.client.hostname)
+
+        self.client.msg('foo', message, length=length)
+
+        privmsg = []
+        self.patch(self.client, 'privmsg', lambda *a: privmsg.append(a))
+        # Deliver these to IRCClient via the normal mechanisms.
+        for line in self.client.lines:
+            self.client.lineReceived(responsePrefix + line)
+
+        self.assertEqual(len(privmsg), expectedNumCommands)
+        receivedMessage = ''.join(
+            message for user, target, message in privmsg)
+
+        # Did the long message we sent arrive as intended?
+        self.assertEqual(message, receivedMessage)
+
+
+    def test_splitLongMessagesWithDefault(self):
+        """
+        If a maximum message length is not provided to L{IRCClient.msg} a
+        best-guess effort is made to determine a safe maximum,  messages longer
+        than this are split into multiple commands with the intent of
+        delivering long messages without losing data due to message truncation
+        when the server relays them.
+        """
+        message = 'o' * (irc.MAX_COMMAND_LENGTH - 2)
+        self.assertLongMessageSplitting(message, 2)
+
+
+    def test_splitLongMessagesWithOverride(self):
+        """
+        The maximum message length can be specified to L{IRCClient.msg},
+        messages longer than this are split into multiple commands with the
+        intent of delivering long messages without losing data due to message
+        truncation when the server relays them.
+        """
+        message = 'o' * (irc.MAX_COMMAND_LENGTH - 2)
+        self.assertLongMessageSplitting(
+            message, 3, length=irc.MAX_COMMAND_LENGTH // 2)
+
+
+    def test_newlinesBeforeLineBreaking(self):
+        """
+        IRCClient breaks on newlines before it breaks long lines.
+        """
+        # Because MAX_COMMAND_LENGTH includes framing characters, this long
+        # line is slightly longer than half the permissible message size.
+        longline = 'o' * (irc.MAX_COMMAND_LENGTH // 2)
+
+        self.client.msg('foo', longline + '\n' + longline)
+        self.assertEqual(
+            self.client.lines,
+            ['PRIVMSG foo :' + longline,
+             'PRIVMSG foo :' + longline])
+
+
+    def test_lineBreakOnWordBoundaries(self):
+        """
+        IRCClient prefers to break long lines at word boundaries.
+        """
+        # Because MAX_COMMAND_LENGTH includes framing characters, this long
+        # line is slightly longer than half the permissible message size.
+        longline = 'o' * (irc.MAX_COMMAND_LENGTH // 2)
+
+        self.client.msg('foo', longline + ' ' + longline)
+        self.assertEqual(
+            self.client.lines,
+            ['PRIVMSG foo :' + longline,
+             'PRIVMSG foo :' + longline])
+
+
+    def test_splitSanity(self):
+        """
+        L{twisted.words.protocols.irc.split} raises C{ValueError} if given a
+        length less than or equal to C{0} and returns C{[]} when splitting
+        C{''}.
+        """
+        # Whiteboxing
+        self.assertRaises(ValueError, irc.split, 'foo', -1)
+        self.assertRaises(ValueError, irc.split, 'foo', 0)
+        self.assertEqual([], irc.split('', 1))
+        self.assertEqual([], irc.split(''))
+
+
+    def test_splitDelimiters(self):
+        """
+        L{twisted.words.protocols.irc.split} skips any delimiter (space or
+        newline) that it finds at the very beginning of the string segment it
+        is operating on.  Nothing should be added to the output list because of
+        it.
+        """
+        r = irc.split("xx yyz", 2)
+        self.assertEqual(['xx', 'yy', 'z'], r)
+        r = irc.split("xx\nyyz", 2)
+        self.assertEqual(['xx', 'yy', 'z'], r)
+
+
+    def test_splitValidatesLength(self):
+        """
+        L{twisted.words.protocols.irc.split} raises C{ValueError} if given a
+        length less than or equal to C{0}.
+        """
+        self.assertRaises(ValueError, irc.split, "foo", 0)
+        self.assertRaises(ValueError, irc.split, "foo", -1)
+
+
+    def test_say(self):
+        """
+        L{IRCClient.say} prepends the channel prefix C{"#"} if necessary and
+        then sends the message to the server for delivery to that channel.
+        """
+        self.client.say("thechannel", "the message")
+        self.assertEquals(
+            self.client.lines, ["PRIVMSG #thechannel :the message"])
+
+
+
+class ClientTests(TestCase):
+    """
+    Tests for the protocol-level behavior of IRCClient methods intended to
+    be called by application code.
+    """
+    def setUp(self):
+        """
+        Create and connect a new L{IRCClient} to a new L{StringTransport}.
+        """
+        self.transport = StringTransport()
+        self.protocol = IRCClient()
+        self.protocol.performLogin = False
+        self.protocol.makeConnection(self.transport)
+
+        # Sanity check - we don't want anything to have happened at this
+        # point, since we're not in a test yet.
+        self.assertEqual(self.transport.value(), "")
+
+        self.addCleanup(self.transport.loseConnection)
+        self.addCleanup(self.protocol.connectionLost, None)
+
+
+    def getLastLine(self, transport):
+        """
+        Return the last IRC message in the transport buffer.
+        """
+        return transport.value().split('\r\n')[-2]
+
+
+    def test_away(self):
+        """
+        L{IRCCLient.away} sends an AWAY command with the specified message.
+        """
+        message = "Sorry, I'm not here."
+        self.protocol.away(message)
+        expected = [
+            'AWAY :%s' % (message,),
+            '',
+        ]
+        self.assertEqual(self.transport.value().split('\r\n'), expected)
+
+
+    def test_back(self):
+        """
+        L{IRCClient.back} sends an AWAY command with an empty message.
+        """
+        self.protocol.back()
+        expected = [
+            'AWAY :',
+            '',
+        ]
+        self.assertEqual(self.transport.value().split('\r\n'), expected)
+
+
+    def test_whois(self):
+        """
+        L{IRCClient.whois} sends a WHOIS message.
+        """
+        self.protocol.whois('alice')
+        self.assertEqual(
+            self.transport.value().split('\r\n'),
+            ['WHOIS alice', ''])
+
+
+    def test_whoisWithServer(self):
+        """
+        L{IRCClient.whois} sends a WHOIS message with a server name if a
+        value is passed for the C{server} parameter.
+        """
+        self.protocol.whois('alice', 'example.org')
+        self.assertEqual(
+            self.transport.value().split('\r\n'),
+            ['WHOIS example.org alice', ''])
+
+
+    def test_register(self):
+        """
+        L{IRCClient.register} sends NICK and USER commands with the
+        username, name, hostname, server name, and real name specified.
+        """
+        username = 'testuser'
+        hostname = 'testhost'
+        servername = 'testserver'
+        self.protocol.realname = 'testname'
+        self.protocol.password = None
+        self.protocol.register(username, hostname, servername)
+        expected = [
+            'NICK %s' % (username,),
+            'USER %s %s %s :%s' % (
+                username, hostname, servername, self.protocol.realname),
+            '']
+        self.assertEqual(self.transport.value().split('\r\n'), expected)
+
+
+    def test_registerWithPassword(self):
+        """
+        If the C{password} attribute of L{IRCClient} is not C{None}, the
+        C{register} method also sends a PASS command with it as the
+        argument.
+        """
+        username = 'testuser'
+        hostname = 'testhost'
+        servername = 'testserver'
+        self.protocol.realname = 'testname'
+        self.protocol.password = 'testpass'
+        self.protocol.register(username, hostname, servername)
+        expected = [
+            'PASS %s' % (self.protocol.password,),
+            'NICK %s' % (username,),
+            'USER %s %s %s :%s' % (
+                username, hostname, servername, self.protocol.realname),
+            '']
+        self.assertEqual(self.transport.value().split('\r\n'), expected)
+
+
+    def test_registerWithTakenNick(self):
+        """
+        Verify that the client repeats the L{IRCClient.setNick} method with a
+        new value when presented with an C{ERR_NICKNAMEINUSE} while trying to
+        register.
+        """
+        username = 'testuser'
+        hostname = 'testhost'
+        servername = 'testserver'
+        self.protocol.realname = 'testname'
+        self.protocol.password = 'testpass'
+        self.protocol.register(username, hostname, servername)
+        self.protocol.irc_ERR_NICKNAMEINUSE('prefix', ['param'])
+        lastLine = self.getLastLine(self.transport)
+        self.assertNotEquals(lastLine, 'NICK %s' % (username,))
+
+        # Keep chaining underscores for each collision
+        self.protocol.irc_ERR_NICKNAMEINUSE('prefix', ['param'])
+        lastLine = self.getLastLine(self.transport)
+        self.assertEqual(lastLine, 'NICK %s' % (username + '__',))
+
+
+    def test_overrideAlterCollidedNick(self):
+        """
+        L{IRCClient.alterCollidedNick} determines how a nickname is altered upon
+        collision while a user is trying to change to that nickname.
+        """
+        nick = 'foo'
+        self.protocol.alterCollidedNick = lambda nick: nick + '***'
+        self.protocol.register(nick)
+        self.protocol.irc_ERR_NICKNAMEINUSE('prefix', ['param'])
+        lastLine = self.getLastLine(self.transport)
+        self.assertEqual(
+            lastLine, 'NICK %s' % (nick + '***',))
+
+
+    def test_nickChange(self):
+        """
+        When a NICK command is sent after signon, C{IRCClient.nickname} is set
+        to the new nickname I{after} the server sends an acknowledgement.
+        """
+        oldnick = 'foo'
+        newnick = 'bar'
+        self.protocol.register(oldnick)
+        self.protocol.irc_RPL_WELCOME('prefix', ['param'])
+        self.protocol.setNick(newnick)
+        self.assertEqual(self.protocol.nickname, oldnick)
+        self.protocol.irc_NICK('%s!quux at qux' % (oldnick,), [newnick])
+        self.assertEqual(self.protocol.nickname, newnick)
+
+
+    def test_erroneousNick(self):
+        """
+        Trying to register an illegal nickname results in the default legal
+        nickname being set, and trying to change a nickname to an illegal
+        nickname results in the old nickname being kept.
+        """
+        # Registration case: change illegal nickname to erroneousNickFallback
+        badnick = 'foo'
+        self.assertEqual(self.protocol._registered, False)
+        self.protocol.register(badnick)
+        self.protocol.irc_ERR_ERRONEUSNICKNAME('prefix', ['param'])
+        lastLine = self.getLastLine(self.transport)
+        self.assertEqual(
+            lastLine, 'NICK %s' % (self.protocol.erroneousNickFallback,))
+        self.protocol.irc_RPL_WELCOME('prefix', ['param'])
+        self.assertEqual(self.protocol._registered, True)
+        self.protocol.setNick(self.protocol.erroneousNickFallback)
+        self.assertEqual(
+            self.protocol.nickname, self.protocol.erroneousNickFallback)
+
+        # Illegal nick change attempt after registration. Fall back to the old
+        # nickname instead of erroneousNickFallback.
+        oldnick = self.protocol.nickname
+        self.protocol.setNick(badnick)
+        self.protocol.irc_ERR_ERRONEUSNICKNAME('prefix', ['param'])
+        lastLine = self.getLastLine(self.transport)
+        self.assertEqual(
+            lastLine, 'NICK %s' % (badnick,))
+        self.assertEqual(self.protocol.nickname, oldnick)
+
+
+    def test_describe(self):
+        """
+        L{IRCClient.desrcibe} sends a CTCP ACTION message to the target
+        specified.
+        """
+        target = 'foo'
+        channel = '#bar'
+        action = 'waves'
+        self.protocol.describe(target, action)
+        self.protocol.describe(channel, action)
+        expected = [
+            'PRIVMSG %s :\01ACTION %s\01' % (target, action),
+            'PRIVMSG %s :\01ACTION %s\01' % (channel, action),
+            '']
+        self.assertEqual(self.transport.value().split('\r\n'), expected)
+
+
+    def test_noticedDoesntPrivmsg(self):
+        """
+        The default implementation of L{IRCClient.noticed} doesn't invoke
+        C{privmsg()}
+        """
+        def privmsg(user, channel, message):
+            self.fail("privmsg() should not have been called")
+        self.protocol.privmsg = privmsg
+        self.protocol.irc_NOTICE(
+            'spam', ['#greasyspooncafe', "I don't want any spam!"])
+
+
+
+class DccChatFactoryTests(unittest.TestCase):
+    """
+    Tests for L{DccChatFactory}
+    """
+    def test_buildProtocol(self):
+        """
+        An instance of the DccChat protocol is returned, which has the factory
+        property set to the factory which created it.
+        """
+        queryData = ('fromUser', None, None)
+        f = irc.DccChatFactory(None, queryData)
+        p = f.buildProtocol('127.0.0.1')
+        self.assertTrue(isinstance(p, irc.DccChat))
+        self.assertEqual(p.factory, f)
diff --git a/ThirdParty/Twisted/twisted/words/test/test_irc_service.py b/ThirdParty/Twisted/twisted/words/test/test_irc_service.py
new file mode 100644
index 0000000..f3ed292
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_irc_service.py
@@ -0,0 +1,216 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for IRC portions of L{twisted.words.service}.
+"""
+
+from twisted.trial import unittest
+from twisted.test import proto_helpers
+from twisted.words.service import InMemoryWordsRealm, IRCFactory, IRCUser
+from twisted.words.protocols import irc
+from twisted.cred import checkers, portal
+
+class IRCUserTestCase(unittest.TestCase):
+    """
+    Isolated tests for L{IRCUser}
+    """
+
+    def setUp(self):
+        """
+        Sets up a Realm, Portal, Factory, IRCUser, Transport, and Connection
+        for our tests.
+        """
+        self.wordsRealm = InMemoryWordsRealm("example.com")
+        self.portal = portal.Portal(self.wordsRealm,
+            [checkers.InMemoryUsernamePasswordDatabaseDontUse(john="pass")])
+        self.factory = IRCFactory(self.wordsRealm, self.portal)
+        self.ircUser = self.factory.buildProtocol(None)
+        self.stringTransport = proto_helpers.StringTransport()
+        self.ircUser.makeConnection(self.stringTransport)
+
+
+    def test_sendMessage(self):
+        """
+        Sending a message to a user after they have sent NICK, but before they
+        have authenticated, results in a message from "example.com".
+        """
+        self.ircUser.irc_NICK("", ["mynick"])
+        self.stringTransport.clear()
+        self.ircUser.sendMessage("foo")
+        self.assertEqual(":example.com foo mynick\r\n",
+                          self.stringTransport.value())
+
+
+    def response(self):
+        """
+        Grabs our responses and then clears the transport
+        """
+        response = self.ircUser.transport.value().splitlines()
+        self.ircUser.transport.clear()
+        return map(irc.parsemsg, response)
+
+
+    def scanResponse(self, response, messageType):
+        """
+        Gets messages out of a response
+
+        @param response: The parsed IRC messages of the response, as returned
+        by L{IRCServiceTestCase.response}
+
+        @param messageType: The string type of the desired messages.
+
+        @return: An iterator which yields 2-tuples of C{(index, ircMessage)}
+        """
+        for n, message in enumerate(response):
+            if (message[1] == messageType):
+                yield n, message
+
+
+    def test_sendNickSendsGreeting(self):
+        """
+        Receiving NICK without authenticating sends the MOTD Start and MOTD End
+        messages, which is required by certain popular IRC clients (such as
+        Pidgin) before a connection is considered to be fully established.
+        """
+        self.ircUser.irc_NICK("", ["mynick"])
+        response = self.response()
+        start = list(self.scanResponse(response, irc.RPL_MOTDSTART))
+        end = list(self.scanResponse(response, irc.RPL_ENDOFMOTD))
+        self.assertEqual(start,
+            [(0, ('example.com', '375', ['mynick', '- example.com Message of the Day - ']))])
+        self.assertEqual(end,
+            [(1, ('example.com', '376', ['mynick', 'End of /MOTD command.']))])
+
+
+    def test_fullLogin(self):
+        """
+        Receiving USER, PASS, NICK will log in the user, and transmit the
+        appropriate response messages.
+        """
+        self.ircUser.irc_USER("", ["john doe"])
+        self.ircUser.irc_PASS("", ["pass"])
+        self.ircUser.irc_NICK("", ["john"])
+
+        version = ('Your host is example.com, running version %s' %
+            (self.factory._serverInfo["serviceVersion"],))
+
+        creation = ('This server was created on %s' %
+            (self.factory._serverInfo["creationDate"],))
+
+        self.assertEqual(self.response(),
+            [('example.com', '375',
+              ['john', '- example.com Message of the Day - ']),
+             ('example.com', '376', ['john', 'End of /MOTD command.']),
+             ('example.com', '001', ['john', 'connected to Twisted IRC']),
+             ('example.com', '002', ['john', version]),
+             ('example.com', '003', ['john', creation]),
+             ('example.com', '004',
+              ['john', 'example.com', self.factory._serverInfo["serviceVersion"],
+               'w', 'n'])])
+
+
+
+class MocksyIRCUser(IRCUser):
+    def __init__(self):
+        self.mockedCodes = []
+
+    def sendMessage(self, code, *_, **__):
+        self.mockedCodes.append(code)
+
+BADTEXT = '\xff'
+
+class IRCUserBadEncodingTestCase(unittest.TestCase):
+    """
+    Verifies that L{IRCUser} sends the correct error messages back to clients
+    when given indecipherable bytes
+    """
+    # TODO: irc_NICK -- but NICKSERV is used for that, so it isn't as easy.
+
+    def setUp(self):
+        self.ircuser = MocksyIRCUser()
+
+    def assertChokesOnBadBytes(self, irc_x, error):
+        """
+        Asserts that IRCUser sends the relevant error code when a given irc_x
+        dispatch method is given undecodable bytes.
+
+        @param irc_x: the name of the irc_FOO method to test.
+        For example, irc_x = 'PRIVMSG' will check irc_PRIVMSG
+
+        @param error: the error code irc_x should send. For example,
+        irc.ERR_NOTONCHANNEL
+        """
+        getattr(self.ircuser, 'irc_%s' % irc_x)(None, [BADTEXT])
+        self.assertEqual(self.ircuser.mockedCodes, [error])
+
+    # no such channel
+
+    def test_JOIN(self):
+        """
+        Tests that irc_JOIN sends ERR_NOSUCHCHANNEL if the channel name can't
+        be decoded.
+        """
+        self.assertChokesOnBadBytes('JOIN', irc.ERR_NOSUCHCHANNEL)
+
+    def test_NAMES(self):
+        """
+        Tests that irc_NAMES sends ERR_NOSUCHCHANNEL if the channel name can't
+        be decoded.
+        """
+        self.assertChokesOnBadBytes('NAMES', irc.ERR_NOSUCHCHANNEL)
+
+    def test_TOPIC(self):
+        """
+        Tests that irc_TOPIC sends ERR_NOSUCHCHANNEL if the channel name can't
+        be decoded.
+        """
+        self.assertChokesOnBadBytes('TOPIC', irc.ERR_NOSUCHCHANNEL)
+
+    def test_LIST(self):
+        """
+        Tests that irc_LIST sends ERR_NOSUCHCHANNEL if the channel name can't
+        be decoded.
+        """
+        self.assertChokesOnBadBytes('LIST', irc.ERR_NOSUCHCHANNEL)
+
+    # no such nick
+
+    def test_MODE(self):
+        """
+        Tests that irc_MODE sends ERR_NOSUCHNICK if the target name can't
+        be decoded.
+        """
+        self.assertChokesOnBadBytes('MODE', irc.ERR_NOSUCHNICK)
+
+    def test_PRIVMSG(self):
+        """
+        Tests that irc_PRIVMSG sends ERR_NOSUCHNICK if the target name can't
+        be decoded.
+        """
+        self.assertChokesOnBadBytes('PRIVMSG', irc.ERR_NOSUCHNICK)
+
+    def test_WHOIS(self):
+        """
+        Tests that irc_WHOIS sends ERR_NOSUCHNICK if the target name can't
+        be decoded.
+        """
+        self.assertChokesOnBadBytes('WHOIS', irc.ERR_NOSUCHNICK)
+
+    # not on channel
+
+    def test_PART(self):
+        """
+        Tests that irc_PART sends ERR_NOTONCHANNEL if the target name can't
+        be decoded.
+        """
+        self.assertChokesOnBadBytes('PART', irc.ERR_NOTONCHANNEL)
+
+    # probably nothing
+
+    def test_WHO(self):
+        """
+        Tests that irc_WHO immediately ends the WHO list if the target name
+        can't be decoded.
+        """
+        self.assertChokesOnBadBytes('WHO', irc.RPL_ENDOFWHO)
diff --git a/ThirdParty/Twisted/twisted/words/test/test_ircsupport.py b/ThirdParty/Twisted/twisted/words/test/test_ircsupport.py
new file mode 100644
index 0000000..de1f40b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_ircsupport.py
@@ -0,0 +1,79 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.words.im.ircsupport}.
+"""
+
+from twisted.trial.unittest import TestCase
+from twisted.test.proto_helpers import StringTransport
+
+from twisted.words.im.basechat import Conversation, ChatUI
+from twisted.words.im.ircsupport import IRCAccount, IRCProto
+
+
+
+class StubConversation(Conversation):
+    def show(self):
+        pass
+
+
+
+class StubChatUI(ChatUI):
+    def getGroupConversation(self, group, Class=StubConversation, stayHidden=0):
+        return ChatUI.getGroupConversation(self, group, Class, stayHidden)
+
+
+
+class IRCProtoTests(TestCase):
+    """
+    Tests for L{IRCProto}.
+    """
+    def setUp(self):
+        self.account = IRCAccount(
+            "Some account", False, "alice", None, "example.com", 6667)
+        self.proto = IRCProto(self.account, StubChatUI(), None)
+
+
+    def test_login(self):
+        """
+        When L{IRCProto} is connected to a transport, it sends I{NICK} and
+        I{USER} commands with the username from the account object.
+        """
+        transport = StringTransport()
+        self.proto.makeConnection(transport)
+        self.assertEqual(
+            transport.value(),
+            "NICK alice\r\n"
+            "USER alice foo bar :Twisted-IM user\r\n")
+
+
+    def test_authenticate(self):
+        """
+        If created with an account with a password, L{IRCProto} sends a
+        I{PASS} command before the I{NICK} and I{USER} commands.
+        """
+        self.account.password = "secret"
+        transport = StringTransport()
+        self.proto.makeConnection(transport)
+        self.assertEqual(
+            transport.value(),
+            "PASS :secret\r\n"
+            "NICK alice\r\n"
+            "USER alice foo bar :Twisted-IM user\r\n")
+
+
+    def test_channels(self):
+        """
+        If created with an account with a list of channels, L{IRCProto}
+        joins each of those channels after registering.
+        """
+        self.account.channels = ['#foo', '#bar']
+        transport = StringTransport()
+        self.proto.makeConnection(transport)
+        self.assertEqual(
+            transport.value(),
+            "NICK alice\r\n"
+            "USER alice foo bar :Twisted-IM user\r\n"
+            "JOIN #foo\r\n"
+            "JOIN #bar\r\n")
diff --git a/ThirdParty/Twisted/twisted/words/test/test_jabberclient.py b/ThirdParty/Twisted/twisted/words/test/test_jabberclient.py
new file mode 100644
index 0000000..87af883
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_jabberclient.py
@@ -0,0 +1,414 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.words.protocols.jabber.client}
+"""
+
+from twisted.internet import defer
+from twisted.python.hashlib import sha1
+from twisted.trial import unittest
+from twisted.words.protocols.jabber import client, error, jid, xmlstream
+from twisted.words.protocols.jabber.sasl import SASLInitiatingInitializer
+from twisted.words.xish import utility
+
+IQ_AUTH_GET = '/iq[@type="get"]/query[@xmlns="jabber:iq:auth"]'
+IQ_AUTH_SET = '/iq[@type="set"]/query[@xmlns="jabber:iq:auth"]'
+NS_BIND = 'urn:ietf:params:xml:ns:xmpp-bind'
+IQ_BIND_SET = '/iq[@type="set"]/bind[@xmlns="%s"]' % NS_BIND
+NS_SESSION = 'urn:ietf:params:xml:ns:xmpp-session'
+IQ_SESSION_SET = '/iq[@type="set"]/session[@xmlns="%s"]' % NS_SESSION
+
+class CheckVersionInitializerTest(unittest.TestCase):
+    def setUp(self):
+        a = xmlstream.Authenticator()
+        xs = xmlstream.XmlStream(a)
+        self.init = client.CheckVersionInitializer(xs)
+
+
+    def testSupported(self):
+        """
+        Test supported version number 1.0
+        """
+        self.init.xmlstream.version = (1, 0)
+        self.init.initialize()
+
+
+    def testNotSupported(self):
+        """
+        Test unsupported version number 0.0, and check exception.
+        """
+        self.init.xmlstream.version = (0, 0)
+        exc = self.assertRaises(error.StreamError, self.init.initialize)
+        self.assertEqual('unsupported-version', exc.condition)
+
+
+
+class InitiatingInitializerHarness(object):
+    """
+    Testing harness for interacting with XML stream initializers.
+
+    This sets up an L{utility.XmlPipe} to create a communication channel between
+    the initializer and the stubbed receiving entity. It features a sink and
+    source side that both act similarly to a real L{xmlstream.XmlStream}. The
+    sink is augmented with an authenticator to which initializers can be added.
+
+    The harness also provides some utility methods to work with event observers
+    and deferreds.
+    """
+
+    def setUp(self):
+        self.output = []
+        self.pipe = utility.XmlPipe()
+        self.xmlstream = self.pipe.sink
+        self.authenticator = xmlstream.ConnectAuthenticator('example.org')
+        self.xmlstream.authenticator = self.authenticator
+
+
+    def waitFor(self, event, handler):
+        """
+        Observe an output event, returning a deferred.
+
+        The returned deferred will be fired when the given event has been
+        observed on the source end of the L{XmlPipe} tied to the protocol
+        under test. The handler is added as the first callback.
+
+        @param event: The event to be observed. See
+            L{utility.EventDispatcher.addOnetimeObserver}.
+        @param handler: The handler to be called with the observed event object.
+        @rtype: L{defer.Deferred}.
+        """
+        d = defer.Deferred()
+        d.addCallback(handler)
+        self.pipe.source.addOnetimeObserver(event, d.callback)
+        return d
+
+
+
+class IQAuthInitializerTest(InitiatingInitializerHarness, unittest.TestCase):
+    """
+    Tests for L{client.IQAuthInitializer}.
+    """
+
+    def setUp(self):
+        super(IQAuthInitializerTest, self).setUp()
+        self.init = client.IQAuthInitializer(self.xmlstream)
+        self.authenticator.jid = jid.JID('user at example.com/resource')
+        self.authenticator.password = 'secret'
+
+
+    def testPlainText(self):
+        """
+        Test plain-text authentication.
+
+        Act as a server supporting plain-text authentication and expect the
+        C{password} field to be filled with the password. Then act as if
+        authentication succeeds.
+        """
+
+        def onAuthGet(iq):
+            """
+            Called when the initializer sent a query for authentication methods.
+
+            The response informs the client that plain-text authentication
+            is supported.
+            """
+
+            # Create server response
+            response = xmlstream.toResponse(iq, 'result')
+            response.addElement(('jabber:iq:auth', 'query'))
+            response.query.addElement('username')
+            response.query.addElement('password')
+            response.query.addElement('resource')
+
+            # Set up an observer for the next request we expect.
+            d = self.waitFor(IQ_AUTH_SET, onAuthSet)
+
+            # Send server response
+            self.pipe.source.send(response)
+
+            return d
+
+        def onAuthSet(iq):
+            """
+            Called when the initializer sent the authentication request.
+
+            The server checks the credentials and responds with an empty result
+            signalling success.
+            """
+            self.assertEqual('user', unicode(iq.query.username))
+            self.assertEqual('secret', unicode(iq.query.password))
+            self.assertEqual('resource', unicode(iq.query.resource))
+
+            # Send server response
+            response = xmlstream.toResponse(iq, 'result')
+            self.pipe.source.send(response)
+
+        # Set up an observer for the request for authentication fields
+        d1 = self.waitFor(IQ_AUTH_GET, onAuthGet)
+
+        # Start the initializer
+        d2 = self.init.initialize()
+        return defer.gatherResults([d1, d2])
+
+
+    def testDigest(self):
+        """
+        Test digest authentication.
+
+        Act as a server supporting digest authentication and expect the
+        C{digest} field to be filled with a sha1 digest of the concatenated
+        stream session identifier and password. Then act as if authentication
+        succeeds.
+        """
+
+        def onAuthGet(iq):
+            """
+            Called when the initializer sent a query for authentication methods.
+
+            The response informs the client that digest authentication is
+            supported.
+            """
+
+            # Create server response
+            response = xmlstream.toResponse(iq, 'result')
+            response.addElement(('jabber:iq:auth', 'query'))
+            response.query.addElement('username')
+            response.query.addElement('digest')
+            response.query.addElement('resource')
+
+            # Set up an observer for the next request we expect.
+            d = self.waitFor(IQ_AUTH_SET, onAuthSet)
+
+            # Send server response
+            self.pipe.source.send(response)
+
+            return d
+
+        def onAuthSet(iq):
+            """
+            Called when the initializer sent the authentication request.
+
+            The server checks the credentials and responds with an empty result
+            signalling success.
+            """
+            self.assertEqual('user', unicode(iq.query.username))
+            self.assertEqual(sha1('12345secret').hexdigest(),
+                              unicode(iq.query.digest).encode('utf-8'))
+            self.assertEqual('resource', unicode(iq.query.resource))
+
+            # Send server response
+            response = xmlstream.toResponse(iq, 'result')
+            self.pipe.source.send(response)
+
+        # Digest authentication relies on the stream session identifier. Set it.
+        self.xmlstream.sid = u'12345'
+
+        # Set up an observer for the request for authentication fields
+        d1 = self.waitFor(IQ_AUTH_GET, onAuthGet)
+
+        # Start the initializer
+        d2 = self.init.initialize()
+
+        return defer.gatherResults([d1, d2])
+
+
+    def testFailRequestFields(self):
+        """
+        Test initializer failure of request for fields for authentication.
+        """
+        def onAuthGet(iq):
+            """
+            Called when the initializer sent a query for authentication methods.
+
+            The server responds that the client is not authorized to authenticate.
+            """
+            response = error.StanzaError('not-authorized').toResponse(iq)
+            self.pipe.source.send(response)
+
+        # Set up an observer for the request for authentication fields
+        d1 = self.waitFor(IQ_AUTH_GET, onAuthGet)
+
+        # Start the initializer
+        d2 = self.init.initialize()
+
+        # The initialized should fail with a stanza error.
+        self.assertFailure(d2, error.StanzaError)
+
+        return defer.gatherResults([d1, d2])
+
+
+    def testFailAuth(self):
+        """
+        Test initializer failure to authenticate.
+        """
+
+        def onAuthGet(iq):
+            """
+            Called when the initializer sent a query for authentication methods.
+
+            The response informs the client that plain-text authentication
+            is supported.
+            """
+
+            # Send server response
+            response = xmlstream.toResponse(iq, 'result')
+            response.addElement(('jabber:iq:auth', 'query'))
+            response.query.addElement('username')
+            response.query.addElement('password')
+            response.query.addElement('resource')
+
+            # Set up an observer for the next request we expect.
+            d = self.waitFor(IQ_AUTH_SET, onAuthSet)
+
+            # Send server response
+            self.pipe.source.send(response)
+
+            return d
+
+        def onAuthSet(iq):
+            """
+            Called when the initializer sent the authentication request.
+
+            The server checks the credentials and responds with a not-authorized
+            stanza error.
+            """
+            response = error.StanzaError('not-authorized').toResponse(iq)
+            self.pipe.source.send(response)
+
+        # Set up an observer for the request for authentication fields
+        d1 = self.waitFor(IQ_AUTH_GET, onAuthGet)
+
+        # Start the initializer
+        d2 = self.init.initialize()
+
+        # The initializer should fail with a stanza error.
+        self.assertFailure(d2, error.StanzaError)
+
+        return defer.gatherResults([d1, d2])
+
+
+
+class BindInitializerTest(InitiatingInitializerHarness, unittest.TestCase):
+    """
+    Tests for L{client.BindInitializer}.
+    """
+
+    def setUp(self):
+        super(BindInitializerTest, self).setUp()
+        self.init = client.BindInitializer(self.xmlstream)
+        self.authenticator.jid = jid.JID('user at example.com/resource')
+
+
+    def testBasic(self):
+        """
+        Set up a stream, and act as if resource binding succeeds.
+        """
+        def onBind(iq):
+            response = xmlstream.toResponse(iq, 'result')
+            response.addElement((NS_BIND, 'bind'))
+            response.bind.addElement('jid',
+                                     content='user at example.com/other resource')
+            self.pipe.source.send(response)
+
+        def cb(result):
+            self.assertEqual(jid.JID('user at example.com/other resource'),
+                              self.authenticator.jid)
+
+        d1 = self.waitFor(IQ_BIND_SET, onBind)
+        d2 = self.init.start()
+        d2.addCallback(cb)
+        return defer.gatherResults([d1, d2])
+
+
+    def testFailure(self):
+        """
+        Set up a stream, and act as if resource binding fails.
+        """
+        def onBind(iq):
+            response = error.StanzaError('conflict').toResponse(iq)
+            self.pipe.source.send(response)
+
+        d1 = self.waitFor(IQ_BIND_SET, onBind)
+        d2 = self.init.start()
+        self.assertFailure(d2, error.StanzaError)
+        return defer.gatherResults([d1, d2])
+
+
+
+class SessionInitializerTest(InitiatingInitializerHarness, unittest.TestCase):
+    """
+    Tests for L{client.SessionInitializer}.
+    """
+
+    def setUp(self):
+        super(SessionInitializerTest, self).setUp()
+        self.init = client.SessionInitializer(self.xmlstream)
+
+
+    def testSuccess(self):
+        """
+        Set up a stream, and act as if session establishment succeeds.
+        """
+
+        def onSession(iq):
+            response = xmlstream.toResponse(iq, 'result')
+            self.pipe.source.send(response)
+
+        d1 = self.waitFor(IQ_SESSION_SET, onSession)
+        d2 = self.init.start()
+        return defer.gatherResults([d1, d2])
+
+
+    def testFailure(self):
+        """
+        Set up a stream, and act as if session establishment fails.
+        """
+        def onSession(iq):
+            response = error.StanzaError('forbidden').toResponse(iq)
+            self.pipe.source.send(response)
+
+        d1 = self.waitFor(IQ_SESSION_SET, onSession)
+        d2 = self.init.start()
+        self.assertFailure(d2, error.StanzaError)
+        return defer.gatherResults([d1, d2])
+
+
+
+class XMPPAuthenticatorTest(unittest.TestCase):
+    """
+    Test for both XMPPAuthenticator and XMPPClientFactory.
+    """
+    def testBasic(self):
+        """
+        Test basic operations.
+
+        Setup an XMPPClientFactory, which sets up an XMPPAuthenticator, and let
+        it produce a protocol instance. Then inspect the instance variables of
+        the authenticator and XML stream objects.
+        """
+        self.client_jid = jid.JID('user at example.com/resource')
+
+        # Get an XmlStream instance. Note that it gets initialized with the
+        # XMPPAuthenticator (that has its associateWithXmlStream called) that
+        # is in turn initialized with the arguments to the factory.
+        xs = client.XMPPClientFactory(self.client_jid,
+                                      'secret').buildProtocol(None)
+
+        # test authenticator's instance variables
+        self.assertEqual('example.com', xs.authenticator.otherHost)
+        self.assertEqual(self.client_jid, xs.authenticator.jid)
+        self.assertEqual('secret', xs.authenticator.password)
+
+        # test list of initializers
+        version, tls, sasl, bind, session = xs.initializers
+
+        self.assert_(isinstance(tls, xmlstream.TLSInitiatingInitializer))
+        self.assert_(isinstance(sasl, SASLInitiatingInitializer))
+        self.assert_(isinstance(bind, client.BindInitializer))
+        self.assert_(isinstance(session, client.SessionInitializer))
+
+        self.assertFalse(tls.required)
+        self.assertTrue(sasl.required)
+        self.assertFalse(bind.required)
+        self.assertFalse(session.required)
diff --git a/ThirdParty/Twisted/twisted/words/test/test_jabbercomponent.py b/ThirdParty/Twisted/twisted/words/test/test_jabbercomponent.py
new file mode 100644
index 0000000..d8bb108
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_jabbercomponent.py
@@ -0,0 +1,422 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.words.protocols.jabber.component}
+"""
+
+from twisted.python import failure
+from twisted.python.hashlib import sha1
+from twisted.trial import unittest
+from twisted.words.protocols.jabber import component, xmlstream
+from twisted.words.protocols.jabber.jid import JID
+from twisted.words.xish import domish
+from twisted.words.xish.utility import XmlPipe
+
+class DummyTransport:
+    def __init__(self, list):
+        self.list = list
+
+    def write(self, bytes):
+        self.list.append(bytes)
+
+class ComponentInitiatingInitializerTest(unittest.TestCase):
+    def setUp(self):
+        self.output = []
+
+        self.authenticator = xmlstream.Authenticator()
+        self.authenticator.password = 'secret'
+        self.xmlstream = xmlstream.XmlStream(self.authenticator)
+        self.xmlstream.namespace = 'test:component'
+        self.xmlstream.send = self.output.append
+        self.xmlstream.connectionMade()
+        self.xmlstream.dataReceived(
+                "<stream:stream xmlns='test:component' "
+                "xmlns:stream='http://etherx.jabber.org/streams' "
+                "from='example.com' id='12345' version='1.0'>")
+        self.xmlstream.sid = u'12345'
+        self.init = component.ComponentInitiatingInitializer(self.xmlstream)
+
+    def testHandshake(self):
+        """
+        Test basic operations of component handshake.
+        """
+
+        d = self.init.initialize()
+
+        # the initializer should have sent the handshake request
+
+        handshake = self.output[-1]
+        self.assertEqual('handshake', handshake.name)
+        self.assertEqual('test:component', handshake.uri)
+        self.assertEqual(sha1("%s%s" % ('12345', 'secret')).hexdigest(),
+                          unicode(handshake))
+
+        # successful authentication
+
+        handshake.children = []
+        self.xmlstream.dataReceived(handshake.toXml())
+
+        return d
+
+class ComponentAuthTest(unittest.TestCase):
+    def authPassed(self, stream):
+        self.authComplete = True
+
+    def testAuth(self):
+        self.authComplete = False
+        outlist = []
+
+        ca = component.ConnectComponentAuthenticator("cjid", "secret")
+        xs = xmlstream.XmlStream(ca)
+        xs.transport = DummyTransport(outlist)
+
+        xs.addObserver(xmlstream.STREAM_AUTHD_EVENT,
+                       self.authPassed)
+
+        # Go...
+        xs.connectionMade()
+        xs.dataReceived("<stream:stream xmlns='jabber:component:accept' xmlns:stream='http://etherx.jabber.org/streams' from='cjid' id='12345'>")
+
+        # Calculate what we expect the handshake value to be
+        hv = sha1("%s%s" % ("12345", "secret")).hexdigest()
+
+        self.assertEqual(outlist[1], "<handshake>%s</handshake>" % (hv))
+
+        xs.dataReceived("<handshake/>")
+
+        self.assertEqual(self.authComplete, True)
+
+
+class JabberServiceHarness(component.Service):
+    def __init__(self):
+        self.componentConnectedFlag = False
+        self.componentDisconnectedFlag = False
+        self.transportConnectedFlag = False
+
+    def componentConnected(self, xmlstream):
+        self.componentConnectedFlag = True
+
+    def componentDisconnected(self):
+        self.componentDisconnectedFlag = True
+
+    def transportConnected(self, xmlstream):
+        self.transportConnectedFlag = True
+
+
+class TestJabberServiceManager(unittest.TestCase):
+    def testSM(self):
+        # Setup service manager and test harnes
+        sm = component.ServiceManager("foo", "password")
+        svc = JabberServiceHarness()
+        svc.setServiceParent(sm)
+
+        # Create a write list
+        wlist = []
+
+        # Setup a XmlStream
+        xs = sm.getFactory().buildProtocol(None)
+        xs.transport = self
+        xs.transport.write = wlist.append
+
+        # Indicate that it's connected
+        xs.connectionMade()
+
+        # Ensure the test service harness got notified
+        self.assertEqual(True, svc.transportConnectedFlag)
+
+        # Jump ahead and pretend like the stream got auth'd
+        xs.dispatch(xs, xmlstream.STREAM_AUTHD_EVENT)
+
+        # Ensure the test service harness got notified
+        self.assertEqual(True, svc.componentConnectedFlag)
+
+        # Pretend to drop the connection
+        xs.connectionLost(None)
+
+        # Ensure the test service harness got notified
+        self.assertEqual(True, svc.componentDisconnectedFlag)
+
+
+
+class RouterTest(unittest.TestCase):
+    """
+    Tests for L{component.Router}.
+    """
+
+    def test_addRoute(self):
+        """
+        Test route registration and routing on incoming stanzas.
+        """
+        router = component.Router()
+        routed = []
+        router.route = lambda element: routed.append(element)
+
+        pipe = XmlPipe()
+        router.addRoute('example.org', pipe.sink)
+        self.assertEqual(1, len(router.routes))
+        self.assertEqual(pipe.sink, router.routes['example.org'])
+
+        element = domish.Element(('testns', 'test'))
+        pipe.source.send(element)
+        self.assertEqual([element], routed)
+
+
+    def test_route(self):
+        """
+        Test routing of a message.
+        """
+        component1 = XmlPipe()
+        component2 = XmlPipe()
+        router = component.Router()
+        router.addRoute('component1.example.org', component1.sink)
+        router.addRoute('component2.example.org', component2.sink)
+
+        outgoing = []
+        component2.source.addObserver('/*',
+                                      lambda element: outgoing.append(element))
+        stanza = domish.Element((None, 'presence'))
+        stanza['from'] = 'component1.example.org'
+        stanza['to'] = 'component2.example.org'
+        component1.source.send(stanza)
+        self.assertEqual([stanza], outgoing)
+
+
+    def test_routeDefault(self):
+        """
+        Test routing of a message using the default route.
+
+        The default route is the one with C{None} as its key in the
+        routing table. It is taken when there is no more specific route
+        in the routing table that matches the stanza's destination.
+        """
+        component1 = XmlPipe()
+        s2s = XmlPipe()
+        router = component.Router()
+        router.addRoute('component1.example.org', component1.sink)
+        router.addRoute(None, s2s.sink)
+
+        outgoing = []
+        s2s.source.addObserver('/*', lambda element: outgoing.append(element))
+        stanza = domish.Element((None, 'presence'))
+        stanza['from'] = 'component1.example.org'
+        stanza['to'] = 'example.com'
+        component1.source.send(stanza)
+        self.assertEqual([stanza], outgoing)
+
+
+
+class ListenComponentAuthenticatorTest(unittest.TestCase):
+    """
+    Tests for L{component.ListenComponentAuthenticator}.
+    """
+
+    def setUp(self):
+        self.output = []
+        authenticator = component.ListenComponentAuthenticator('secret')
+        self.xmlstream = xmlstream.XmlStream(authenticator)
+        self.xmlstream.send = self.output.append
+
+
+    def loseConnection(self):
+        """
+        Stub loseConnection because we are a transport.
+        """
+        self.xmlstream.connectionLost("no reason")
+
+
+    def test_streamStarted(self):
+        """
+        The received stream header should set several attributes.
+        """
+        observers = []
+
+        def addOnetimeObserver(event, observerfn):
+            observers.append((event, observerfn))
+
+        xs = self.xmlstream
+        xs.addOnetimeObserver = addOnetimeObserver
+
+        xs.makeConnection(self)
+        self.assertIdentical(None, xs.sid)
+        self.assertFalse(xs._headerSent)
+
+        xs.dataReceived("<stream:stream xmlns='jabber:component:accept' "
+                         "xmlns:stream='http://etherx.jabber.org/streams' "
+                         "to='component.example.org'>")
+        self.assertEqual((0, 0), xs.version)
+        self.assertNotIdentical(None, xs.sid)
+        self.assertTrue(xs._headerSent)
+        self.assertEqual(('/*', xs.authenticator.onElement), observers[-1])
+
+
+    def test_streamStartedWrongNamespace(self):
+        """
+        The received stream header should have a correct namespace.
+        """
+        streamErrors = []
+
+        xs = self.xmlstream
+        xs.sendStreamError = streamErrors.append
+        xs.makeConnection(self)
+        xs.dataReceived("<stream:stream xmlns='jabber:client' "
+                         "xmlns:stream='http://etherx.jabber.org/streams' "
+                         "to='component.example.org'>")
+        self.assertEqual(1, len(streamErrors))
+        self.assertEqual('invalid-namespace', streamErrors[-1].condition)
+
+
+    def test_streamStartedNoTo(self):
+        """
+        The received stream header should have a 'to' attribute.
+        """
+        streamErrors = []
+
+        xs = self.xmlstream
+        xs.sendStreamError = streamErrors.append
+        xs.makeConnection(self)
+        xs.dataReceived("<stream:stream xmlns='jabber:component:accept' "
+                         "xmlns:stream='http://etherx.jabber.org/streams'>")
+        self.assertEqual(1, len(streamErrors))
+        self.assertEqual('improper-addressing', streamErrors[-1].condition)
+
+
+    def test_onElement(self):
+        """
+        We expect a handshake element with a hash.
+        """
+        handshakes = []
+
+        xs = self.xmlstream
+        xs.authenticator.onHandshake = handshakes.append
+
+        handshake = domish.Element(('jabber:component:accept', 'handshake'))
+        handshake.addContent('1234')
+        xs.authenticator.onElement(handshake)
+        self.assertEqual('1234', handshakes[-1])
+
+    def test_onElementNotHandshake(self):
+        """
+        Reject elements that are not handshakes
+        """
+        handshakes = []
+        streamErrors = []
+
+        xs = self.xmlstream
+        xs.authenticator.onHandshake = handshakes.append
+        xs.sendStreamError = streamErrors.append
+
+        element = domish.Element(('jabber:component:accept', 'message'))
+        xs.authenticator.onElement(element)
+        self.assertFalse(handshakes)
+        self.assertEqual('not-authorized', streamErrors[-1].condition)
+
+
+    def test_onHandshake(self):
+        """
+        Receiving a handshake matching the secret authenticates the stream.
+        """
+        authd = []
+
+        def authenticated(xs):
+            authd.append(xs)
+
+        xs = self.xmlstream
+        xs.addOnetimeObserver(xmlstream.STREAM_AUTHD_EVENT, authenticated)
+        xs.sid = u'1234'
+        theHash = '32532c0f7dbf1253c095b18b18e36d38d94c1256'
+        xs.authenticator.onHandshake(theHash)
+        self.assertEqual('<handshake/>', self.output[-1])
+        self.assertEqual(1, len(authd))
+
+
+    def test_onHandshakeWrongHash(self):
+        """
+        Receiving a bad handshake should yield a stream error.
+        """
+        streamErrors = []
+        authd = []
+
+        def authenticated(xs):
+            authd.append(xs)
+
+        xs = self.xmlstream
+        xs.addOnetimeObserver(xmlstream.STREAM_AUTHD_EVENT, authenticated)
+        xs.sendStreamError = streamErrors.append
+
+        xs.sid = u'1234'
+        theHash = '1234'
+        xs.authenticator.onHandshake(theHash)
+        self.assertEqual('not-authorized', streamErrors[-1].condition)
+        self.assertEqual(0, len(authd))
+
+
+
+class XMPPComponentServerFactoryTest(unittest.TestCase):
+    """
+    Tests for L{component.XMPPComponentServerFactory}.
+    """
+
+    def setUp(self):
+        self.router = component.Router()
+        self.factory = component.XMPPComponentServerFactory(self.router,
+                                                            'secret')
+        self.xmlstream = self.factory.buildProtocol(None)
+        self.xmlstream.thisEntity = JID('component.example.org')
+
+
+    def test_makeConnection(self):
+        """
+        A new connection increases the stream serial count. No logs by default.
+        """
+        self.xmlstream.dispatch(self.xmlstream,
+                                xmlstream.STREAM_CONNECTED_EVENT)
+        self.assertEqual(0, self.xmlstream.serial)
+        self.assertEqual(1, self.factory.serial)
+        self.assertIdentical(None, self.xmlstream.rawDataInFn)
+        self.assertIdentical(None, self.xmlstream.rawDataOutFn)
+
+
+    def test_makeConnectionLogTraffic(self):
+        """
+        Setting logTraffic should set up raw data loggers.
+        """
+        self.factory.logTraffic = True
+        self.xmlstream.dispatch(self.xmlstream,
+                                xmlstream.STREAM_CONNECTED_EVENT)
+        self.assertNotIdentical(None, self.xmlstream.rawDataInFn)
+        self.assertNotIdentical(None, self.xmlstream.rawDataOutFn)
+
+
+    def test_onError(self):
+        """
+        An observer for stream errors should trigger onError to log it.
+        """
+        self.xmlstream.dispatch(self.xmlstream,
+                                xmlstream.STREAM_CONNECTED_EVENT)
+
+        class TestError(Exception):
+            pass
+
+        reason = failure.Failure(TestError())
+        self.xmlstream.dispatch(reason, xmlstream.STREAM_ERROR_EVENT)
+        self.assertEqual(1, len(self.flushLoggedErrors(TestError)))
+
+
+    def test_connectionInitialized(self):
+        """
+        Make sure a new stream is added to the routing table.
+        """
+        self.xmlstream.dispatch(self.xmlstream, xmlstream.STREAM_AUTHD_EVENT)
+        self.assertIn('component.example.org', self.router.routes)
+        self.assertIdentical(self.xmlstream,
+                             self.router.routes['component.example.org'])
+
+
+    def test_connectionLost(self):
+        """
+        Make sure a stream is removed from the routing table on disconnect.
+        """
+        self.xmlstream.dispatch(self.xmlstream, xmlstream.STREAM_AUTHD_EVENT)
+        self.xmlstream.dispatch(None, xmlstream.STREAM_END_EVENT)
+        self.assertNotIn('component.example.org', self.router.routes)
diff --git a/ThirdParty/Twisted/twisted/words/test/test_jabbererror.py b/ThirdParty/Twisted/twisted/words/test/test_jabbererror.py
new file mode 100644
index 0000000..45d8dac
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_jabbererror.py
@@ -0,0 +1,342 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.words.protocols.jabber.error}.
+"""
+
+from twisted.trial import unittest
+
+from twisted.words.protocols.jabber import error
+from twisted.words.xish import domish
+
+NS_XML = 'http://www.w3.org/XML/1998/namespace'
+NS_STREAMS = 'http://etherx.jabber.org/streams'
+NS_XMPP_STREAMS = 'urn:ietf:params:xml:ns:xmpp-streams'
+NS_XMPP_STANZAS = 'urn:ietf:params:xml:ns:xmpp-stanzas'
+
+class BaseErrorTest(unittest.TestCase):
+
+    def test_getElementPlain(self):
+        """
+        Test getting an element for a plain error.
+        """
+        e = error.BaseError('feature-not-implemented')
+        element = e.getElement()
+        self.assertIdentical(element.uri, None)
+        self.assertEqual(len(element.children), 1)
+
+    def test_getElementText(self):
+        """
+        Test getting an element for an error with a text.
+        """
+        e = error.BaseError('feature-not-implemented', 'text')
+        element = e.getElement()
+        self.assertEqual(len(element.children), 2)
+        self.assertEqual(unicode(element.text), 'text')
+        self.assertEqual(element.text.getAttribute((NS_XML, 'lang')), None)
+
+    def test_getElementTextLang(self):
+        """
+        Test getting an element for an error with a text and language.
+        """
+        e = error.BaseError('feature-not-implemented', 'text', 'en_US')
+        element = e.getElement()
+        self.assertEqual(len(element.children), 2)
+        self.assertEqual(unicode(element.text), 'text')
+        self.assertEqual(element.text[(NS_XML, 'lang')], 'en_US')
+
+    def test_getElementAppCondition(self):
+        """
+        Test getting an element for an error with an app specific condition.
+        """
+        ac = domish.Element(('testns', 'myerror'))
+        e = error.BaseError('feature-not-implemented', appCondition=ac)
+        element = e.getElement()
+        self.assertEqual(len(element.children), 2)
+        self.assertEqual(element.myerror, ac)
+
+class StreamErrorTest(unittest.TestCase):
+
+    def test_getElementPlain(self):
+        """
+        Test namespace of the element representation of an error.
+        """
+        e = error.StreamError('feature-not-implemented')
+        element = e.getElement()
+        self.assertEqual(element.uri, NS_STREAMS)
+
+    def test_getElementConditionNamespace(self):
+        """
+        Test that the error condition element has the correct namespace.
+        """
+        e = error.StreamError('feature-not-implemented')
+        element = e.getElement()
+        self.assertEqual(NS_XMPP_STREAMS, getattr(element, 'feature-not-implemented').uri)
+
+    def test_getElementTextNamespace(self):
+        """
+        Test that the error text element has the correct namespace.
+        """
+        e = error.StreamError('feature-not-implemented', 'text')
+        element = e.getElement()
+        self.assertEqual(NS_XMPP_STREAMS, element.text.uri)
+
+
+
+class StanzaErrorTest(unittest.TestCase):
+    """
+    Tests for L{error.StreamError}.
+    """
+
+
+    def test_typeRemoteServerTimeout(self):
+        """
+        Remote Server Timeout should yield type wait, code 504.
+        """
+        e = error.StanzaError('remote-server-timeout')
+        self.assertEqual('wait', e.type)
+        self.assertEqual('504', e.code)
+
+
+    def test_getElementPlain(self):
+        """
+        Test getting an element for a plain stanza error.
+        """
+        e = error.StanzaError('feature-not-implemented')
+        element = e.getElement()
+        self.assertEqual(element.uri, None)
+        self.assertEqual(element['type'], 'cancel')
+        self.assertEqual(element['code'], '501')
+
+
+    def test_getElementType(self):
+        """
+        Test getting an element for a stanza error with a given type.
+        """
+        e = error.StanzaError('feature-not-implemented', 'auth')
+        element = e.getElement()
+        self.assertEqual(element.uri, None)
+        self.assertEqual(element['type'], 'auth')
+        self.assertEqual(element['code'], '501')
+
+
+    def test_getElementConditionNamespace(self):
+        """
+        Test that the error condition element has the correct namespace.
+        """
+        e = error.StanzaError('feature-not-implemented')
+        element = e.getElement()
+        self.assertEqual(NS_XMPP_STANZAS, getattr(element, 'feature-not-implemented').uri)
+
+
+    def test_getElementTextNamespace(self):
+        """
+        Test that the error text element has the correct namespace.
+        """
+        e = error.StanzaError('feature-not-implemented', text='text')
+        element = e.getElement()
+        self.assertEqual(NS_XMPP_STANZAS, element.text.uri)
+
+
+    def test_toResponse(self):
+        """
+        Test an error response is generated from a stanza.
+
+        The addressing on the (new) response stanza should be reversed, an
+        error child (with proper properties) added and the type set to
+        C{'error'}.
+        """
+        stanza = domish.Element(('jabber:client', 'message'))
+        stanza['type'] = 'chat'
+        stanza['to'] = 'user1 at example.com'
+        stanza['from'] = 'user2 at example.com/resource'
+        e = error.StanzaError('service-unavailable')
+        response = e.toResponse(stanza)
+        self.assertNotIdentical(response, stanza)
+        self.assertEqual(response['from'], 'user1 at example.com')
+        self.assertEqual(response['to'], 'user2 at example.com/resource')
+        self.assertEqual(response['type'], 'error')
+        self.assertEqual(response.error.children[0].name,
+                         'service-unavailable')
+        self.assertEqual(response.error['type'], 'cancel')
+        self.assertNotEqual(stanza.children, response.children)
+
+
+
+class ParseErrorTest(unittest.TestCase):
+    """
+    Tests for L{error._parseError}.
+    """
+
+
+    def setUp(self):
+        self.error = domish.Element((None, 'error'))
+
+
+    def test_empty(self):
+        """
+        Test parsing of the empty error element.
+        """
+        result = error._parseError(self.error, 'errorns')
+        self.assertEqual({'condition': None,
+                          'text': None,
+                          'textLang': None,
+                          'appCondition': None}, result)
+
+
+    def test_condition(self):
+        """
+        Test parsing of an error element with a condition.
+        """
+        self.error.addElement(('errorns', 'bad-request'))
+        result = error._parseError(self.error, 'errorns')
+        self.assertEqual('bad-request', result['condition'])
+
+
+    def test_text(self):
+        """
+        Test parsing of an error element with a text.
+        """
+        text = self.error.addElement(('errorns', 'text'))
+        text.addContent('test')
+        result = error._parseError(self.error, 'errorns')
+        self.assertEqual('test', result['text'])
+        self.assertEqual(None, result['textLang'])
+
+
+    def test_textLang(self):
+        """
+        Test parsing of an error element with a text with a defined language.
+        """
+        text = self.error.addElement(('errorns', 'text'))
+        text[NS_XML, 'lang'] = 'en_US'
+        text.addContent('test')
+        result = error._parseError(self.error, 'errorns')
+        self.assertEqual('en_US', result['textLang'])
+
+
+    def test_textLangInherited(self):
+        """
+        Test parsing of an error element with a text with inherited language.
+        """
+        text = self.error.addElement(('errorns', 'text'))
+        self.error[NS_XML, 'lang'] = 'en_US'
+        text.addContent('test')
+        result = error._parseError(self.error, 'errorns')
+        self.assertEqual('en_US', result['textLang'])
+    test_textLangInherited.todo = "xml:lang inheritance not implemented"
+
+
+    def test_appCondition(self):
+        """
+        Test parsing of an error element with an app specific condition.
+        """
+        condition = self.error.addElement(('testns', 'condition'))
+        result = error._parseError(self.error, 'errorns')
+        self.assertEqual(condition, result['appCondition'])
+
+
+    def test_appConditionMultiple(self):
+        """
+        Test parsing of an error element with multiple app specific conditions.
+        """
+        self.error.addElement(('testns', 'condition'))
+        condition = self.error.addElement(('testns', 'condition2'))
+        result = error._parseError(self.error, 'errorns')
+        self.assertEqual(condition, result['appCondition'])
+
+
+
+class ExceptionFromStanzaTest(unittest.TestCase):
+
+    def test_basic(self):
+        """
+        Test basic operations of exceptionFromStanza.
+
+        Given a realistic stanza, check if a sane exception is returned.
+
+        Using this stanza::
+
+          <iq type='error'
+              from='pubsub.shakespeare.lit'
+              to='francisco at denmark.lit/barracks'
+              id='subscriptions1'>
+            <pubsub xmlns='http://jabber.org/protocol/pubsub'>
+              <subscriptions/>
+            </pubsub>
+            <error type='cancel'>
+              <feature-not-implemented
+                xmlns='urn:ietf:params:xml:ns:xmpp-stanzas'/>
+              <unsupported xmlns='http://jabber.org/protocol/pubsub#errors'
+                           feature='retrieve-subscriptions'/>
+            </error>
+          </iq>
+        """
+
+        stanza = domish.Element((None, 'stanza'))
+        p = stanza.addElement(('http://jabber.org/protocol/pubsub', 'pubsub'))
+        p.addElement('subscriptions')
+        e = stanza.addElement('error')
+        e['type'] = 'cancel'
+        e.addElement((NS_XMPP_STANZAS, 'feature-not-implemented'))
+        uc = e.addElement(('http://jabber.org/protocol/pubsub#errors',
+                           'unsupported'))
+        uc['feature'] = 'retrieve-subscriptions'
+
+        result = error.exceptionFromStanza(stanza)
+        self.assert_(isinstance(result, error.StanzaError))
+        self.assertEqual('feature-not-implemented', result.condition)
+        self.assertEqual('cancel', result.type)
+        self.assertEqual(uc, result.appCondition)
+        self.assertEqual([p], result.children)
+
+    def test_legacy(self):
+        """
+        Test legacy operations of exceptionFromStanza.
+
+        Given a realistic stanza with only legacy (pre-XMPP) error information,
+        check if a sane exception is returned.
+
+        Using this stanza::
+
+          <message type='error'
+                   to='piers at pipetree.com/Home'
+                   from='qmacro at jaber.org'>
+            <body>Are you there?</body>
+            <error code='502'>Unable to resolve hostname.</error>
+          </message>
+        """
+        stanza = domish.Element((None, 'stanza'))
+        p = stanza.addElement('body', content='Are you there?')
+        e = stanza.addElement('error', content='Unable to resolve hostname.')
+        e['code'] = '502'
+
+        result = error.exceptionFromStanza(stanza)
+        self.assert_(isinstance(result, error.StanzaError))
+        self.assertEqual('service-unavailable', result.condition)
+        self.assertEqual('wait', result.type)
+        self.assertEqual('Unable to resolve hostname.', result.text)
+        self.assertEqual([p], result.children)
+
+class ExceptionFromStreamErrorTest(unittest.TestCase):
+
+    def test_basic(self):
+        """
+        Test basic operations of exceptionFromStreamError.
+
+        Given a realistic stream error, check if a sane exception is returned.
+
+        Using this error::
+
+          <stream:error xmlns:stream='http://etherx.jabber.org/streams'>
+            <xml-not-well-formed xmlns='urn:ietf:params:xml:ns:xmpp-streams'/>
+          </stream:error>
+        """
+
+        e = domish.Element(('http://etherx.jabber.org/streams', 'error'))
+        e.addElement((NS_XMPP_STREAMS, 'xml-not-well-formed'))
+
+        result = error.exceptionFromStreamError(e)
+        self.assert_(isinstance(result, error.StreamError))
+        self.assertEqual('xml-not-well-formed', result.condition)
diff --git a/ThirdParty/Twisted/twisted/words/test/test_jabberjid.py b/ThirdParty/Twisted/twisted/words/test/test_jabberjid.py
new file mode 100644
index 0000000..fa3a119
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_jabberjid.py
@@ -0,0 +1,225 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.words.protocols.jabber.jid}.
+"""
+
+from twisted.trial import unittest
+
+from twisted.words.protocols.jabber import jid
+
+class JIDParsingTest(unittest.TestCase):
+    def test_parse(self):
+        """
+        Test different forms of JIDs.
+        """
+        # Basic forms
+        self.assertEqual(jid.parse("user at host/resource"),
+                          ("user", "host", "resource"))
+        self.assertEqual(jid.parse("user at host"),
+                          ("user", "host", None))
+        self.assertEqual(jid.parse("host"),
+                          (None, "host", None))
+        self.assertEqual(jid.parse("host/resource"),
+                          (None, "host", "resource"))
+
+        # More interesting forms
+        self.assertEqual(jid.parse("foo/bar at baz"),
+                          (None, "foo", "bar at baz"))
+        self.assertEqual(jid.parse("boo at foo/bar at baz"),
+                          ("boo", "foo", "bar at baz"))
+        self.assertEqual(jid.parse("boo at foo/bar/baz"),
+                          ("boo", "foo", "bar/baz"))
+        self.assertEqual(jid.parse("boo/foo at bar@baz"),
+                          (None, "boo", "foo at bar@baz"))
+        self.assertEqual(jid.parse("boo/foo/bar"),
+                          (None, "boo", "foo/bar"))
+        self.assertEqual(jid.parse("boo//foo"),
+                          (None, "boo", "/foo"))
+
+    def test_noHost(self):
+        """
+        Test for failure on no host part.
+        """
+        self.assertRaises(jid.InvalidFormat, jid.parse, "user@")
+
+    def test_doubleAt(self):
+        """
+        Test for failure on double @ signs.
+
+        This should fail because @ is not a valid character for the host
+        part of the JID.
+        """
+        self.assertRaises(jid.InvalidFormat, jid.parse, "user@@host")
+
+    def test_multipleAt(self):
+        """
+        Test for failure on two @ signs.
+
+        This should fail because @ is not a valid character for the host
+        part of the JID.
+        """
+        self.assertRaises(jid.InvalidFormat, jid.parse, "user at host@host")
+
+    # Basic tests for case mapping. These are fallback tests for the
+    # prepping done in twisted.words.protocols.jabber.xmpp_stringprep
+
+    def test_prepCaseMapUser(self):
+        """
+        Test case mapping of the user part of the JID.
+        """
+        self.assertEqual(jid.prep("UsEr", "host", "resource"),
+                          ("user", "host", "resource"))
+
+    def test_prepCaseMapHost(self):
+        """
+        Test case mapping of the host part of the JID.
+        """
+        self.assertEqual(jid.prep("user", "hoST", "resource"),
+                          ("user", "host", "resource"))
+
+    def test_prepNoCaseMapResource(self):
+        """
+        Test no case mapping of the resourcce part of the JID.
+        """
+        self.assertEqual(jid.prep("user", "hoST", "resource"),
+                          ("user", "host", "resource"))
+        self.assertNotEquals(jid.prep("user", "host", "Resource"),
+                             ("user", "host", "resource"))
+
+class JIDTest(unittest.TestCase):
+
+    def test_noneArguments(self):
+        """
+        Test that using no arguments raises an exception.
+        """
+        self.assertRaises(RuntimeError, jid.JID)
+
+    def test_attributes(self):
+        """
+        Test that the attributes correspond with the JID parts.
+        """
+        j = jid.JID("user at host/resource")
+        self.assertEqual(j.user, "user")
+        self.assertEqual(j.host, "host")
+        self.assertEqual(j.resource, "resource")
+
+    def test_userhost(self):
+        """
+        Test the extraction of the bare JID.
+        """
+        j = jid.JID("user at host/resource")
+        self.assertEqual("user at host", j.userhost())
+
+    def test_userhostOnlyHost(self):
+        """
+        Test the extraction of the bare JID of the full form host/resource.
+        """
+        j = jid.JID("host/resource")
+        self.assertEqual("host", j.userhost())
+
+    def test_userhostJID(self):
+        """
+        Test getting a JID object of the bare JID.
+        """
+        j1 = jid.JID("user at host/resource")
+        j2 = jid.internJID("user at host")
+        self.assertIdentical(j2, j1.userhostJID())
+
+    def test_userhostJIDNoResource(self):
+        """
+        Test getting a JID object of the bare JID when there was no resource.
+        """
+        j = jid.JID("user at host")
+        self.assertIdentical(j, j.userhostJID())
+
+    def test_fullHost(self):
+        """
+        Test giving a string representation of the JID with only a host part.
+        """
+        j = jid.JID(tuple=(None, 'host', None))
+        self.assertEqual('host', j.full())
+
+    def test_fullHostResource(self):
+        """
+        Test giving a string representation of the JID with host, resource.
+        """
+        j = jid.JID(tuple=(None, 'host', 'resource'))
+        self.assertEqual('host/resource', j.full())
+
+    def test_fullUserHost(self):
+        """
+        Test giving a string representation of the JID with user, host.
+        """
+        j = jid.JID(tuple=('user', 'host', None))
+        self.assertEqual('user at host', j.full())
+
+    def test_fullAll(self):
+        """
+        Test giving a string representation of the JID.
+        """
+        j = jid.JID(tuple=('user', 'host', 'resource'))
+        self.assertEqual('user at host/resource', j.full())
+
+    def test_equality(self):
+        """
+        Test JID equality.
+        """
+        j1 = jid.JID("user at host/resource")
+        j2 = jid.JID("user at host/resource")
+        self.assertNotIdentical(j1, j2)
+        self.assertEqual(j1, j2)
+
+    def test_equalityWithNonJIDs(self):
+        """
+        Test JID equality.
+        """
+        j = jid.JID("user at host/resource")
+        self.assertFalse(j == 'user at host/resource')
+
+    def test_inequality(self):
+        """
+        Test JID inequality.
+        """
+        j1 = jid.JID("user1 at host/resource")
+        j2 = jid.JID("user2 at host/resource")
+        self.assertNotEqual(j1, j2)
+
+    def test_inequalityWithNonJIDs(self):
+        """
+        Test JID equality.
+        """
+        j = jid.JID("user at host/resource")
+        self.assertNotEqual(j, 'user at host/resource')
+
+    def test_hashable(self):
+        """
+        Test JID hashability.
+        """
+        j1 = jid.JID("user at host/resource")
+        j2 = jid.JID("user at host/resource")
+        self.assertEqual(hash(j1), hash(j2))
+
+    def test_unicode(self):
+        """
+        Test unicode representation of JIDs.
+        """
+        j = jid.JID(tuple=('user', 'host', 'resource'))
+        self.assertEqual("user at host/resource", unicode(j))
+
+    def test_repr(self):
+        """
+        Test representation of JID objects.
+        """
+        j = jid.JID(tuple=('user', 'host', 'resource'))
+        self.assertEqual("JID(u'user at host/resource')", repr(j))
+
+class InternJIDTest(unittest.TestCase):
+    def test_identity(self):
+        """
+        Test that two interned JIDs yield the same object.
+        """
+        j1 = jid.internJID("user at host")
+        j2 = jid.internJID("user at host")
+        self.assertIdentical(j1, j2)
diff --git a/ThirdParty/Twisted/twisted/words/test/test_jabberjstrports.py b/ThirdParty/Twisted/twisted/words/test/test_jabberjstrports.py
new file mode 100644
index 0000000..6d8f045
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_jabberjstrports.py
@@ -0,0 +1,34 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.words.protocols.jabber.jstrports}.
+"""
+
+from twisted.trial import unittest
+
+from twisted.words.protocols.jabber import jstrports
+from twisted.application.internet import TCPClient
+
+
+class JabberStrPortsPlaceHolderTest(unittest.TestCase):
+    """
+    Tests for L{jstrports}
+    """
+
+    def test_parse(self):
+        """
+        L{jstrports.parse} accepts an endpoint description string and returns a
+        tuple and dict of parsed endpoint arguments.
+        """
+        expected = ('TCP', ('DOMAIN', 65535, 'Factory'), {})
+        got = jstrports.parse("tcp:DOMAIN:65535", "Factory")
+        self.assertEqual(expected, got)
+
+
+    def test_client(self):
+        """
+        L{jstrports.client} returns a L{TCPClient} service.
+        """
+        got = jstrports.client("tcp:DOMAIN:65535", "Factory")
+        self.assertIsInstance(got, TCPClient)
diff --git a/ThirdParty/Twisted/twisted/words/test/test_jabbersasl.py b/ThirdParty/Twisted/twisted/words/test/test_jabbersasl.py
new file mode 100644
index 0000000..b22f956
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_jabbersasl.py
@@ -0,0 +1,272 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from zope.interface import implements
+from twisted.internet import defer
+from twisted.trial import unittest
+from twisted.words.protocols.jabber import sasl, sasl_mechanisms, xmlstream, jid
+from twisted.words.xish import domish
+
+NS_XMPP_SASL = 'urn:ietf:params:xml:ns:xmpp-sasl'
+
+class DummySASLMechanism(object):
+    """
+    Dummy SASL mechanism.
+
+    This just returns the initialResponse passed on creation, stores any
+    challenges and replies with an empty response.
+
+    @ivar challenge: Last received challenge.
+    @type challenge: C{unicode}.
+    @ivar initialResponse: Initial response to be returned when requested
+                           via C{getInitialResponse} or C{None}.
+    @type initialResponse: C{unicode}
+    """
+
+    implements(sasl_mechanisms.ISASLMechanism)
+
+    challenge = None
+    name = "DUMMY"
+
+    def __init__(self, initialResponse):
+        self.initialResponse = initialResponse
+
+    def getInitialResponse(self):
+        return self.initialResponse
+
+    def getResponse(self, challenge):
+        self.challenge = challenge
+        return ""
+
+class DummySASLInitiatingInitializer(sasl.SASLInitiatingInitializer):
+    """
+    Dummy SASL Initializer for initiating entities.
+
+    This hardwires the SASL mechanism to L{DummySASLMechanism}, that is
+    instantiated with the value of C{initialResponse}.
+
+    @ivar initialResponse: The initial response to be returned by the
+                           dummy SASL mechanism or C{None}.
+    @type initialResponse: C{unicode}.
+    """
+
+    initialResponse = None
+
+    def setMechanism(self):
+        self.mechanism = DummySASLMechanism(self.initialResponse)
+
+
+
+class SASLInitiatingInitializerTest(unittest.TestCase):
+    """
+    Tests for L{sasl.SASLInitiatingInitializer}
+    """
+
+    def setUp(self):
+        self.output = []
+
+        self.authenticator = xmlstream.Authenticator()
+        self.xmlstream = xmlstream.XmlStream(self.authenticator)
+        self.xmlstream.send = self.output.append
+        self.xmlstream.connectionMade()
+        self.xmlstream.dataReceived("<stream:stream xmlns='jabber:client' "
+                        "xmlns:stream='http://etherx.jabber.org/streams' "
+                        "from='example.com' id='12345' version='1.0'>")
+        self.init = DummySASLInitiatingInitializer(self.xmlstream)
+
+
+    def test_onFailure(self):
+        """
+        Test that the SASL error condition is correctly extracted.
+        """
+        failure = domish.Element(('urn:ietf:params:xml:ns:xmpp-sasl',
+                                  'failure'))
+        failure.addElement('not-authorized')
+        self.init._deferred = defer.Deferred()
+        self.init.onFailure(failure)
+        self.assertFailure(self.init._deferred, sasl.SASLAuthError)
+        self.init._deferred.addCallback(lambda e:
+                                        self.assertEqual('not-authorized',
+                                                          e.condition))
+        return self.init._deferred
+
+
+    def test_sendAuthInitialResponse(self):
+        """
+        Test starting authentication with an initial response.
+        """
+        self.init.initialResponse = "dummy"
+        self.init.start()
+        auth = self.output[0]
+        self.assertEqual(NS_XMPP_SASL, auth.uri)
+        self.assertEqual('auth', auth.name)
+        self.assertEqual('DUMMY', auth['mechanism'])
+        self.assertEqual('ZHVtbXk=', str(auth))
+
+
+    def test_sendAuthNoInitialResponse(self):
+        """
+        Test starting authentication without an initial response.
+        """
+        self.init.initialResponse = None
+        self.init.start()
+        auth = self.output[0]
+        self.assertEqual('', str(auth))
+
+
+    def test_sendAuthEmptyInitialResponse(self):
+        """
+        Test starting authentication where the initial response is empty.
+        """
+        self.init.initialResponse = ""
+        self.init.start()
+        auth = self.output[0]
+        self.assertEqual('=', str(auth))
+
+
+    def test_onChallenge(self):
+        """
+        Test receiving a challenge message.
+        """
+        d = self.init.start()
+        challenge = domish.Element((NS_XMPP_SASL, 'challenge'))
+        challenge.addContent('bXkgY2hhbGxlbmdl')
+        self.init.onChallenge(challenge)
+        self.assertEqual('my challenge', self.init.mechanism.challenge)
+        self.init.onSuccess(None)
+        return d
+
+
+    def test_onChallengeEmpty(self):
+        """
+        Test receiving an empty challenge message.
+        """
+        d = self.init.start()
+        challenge = domish.Element((NS_XMPP_SASL, 'challenge'))
+        self.init.onChallenge(challenge)
+        self.assertEqual('', self.init.mechanism.challenge)
+        self.init.onSuccess(None)
+        return d
+
+
+    def test_onChallengeIllegalPadding(self):
+        """
+        Test receiving a challenge message with illegal padding.
+        """
+        d = self.init.start()
+        challenge = domish.Element((NS_XMPP_SASL, 'challenge'))
+        challenge.addContent('bXkg=Y2hhbGxlbmdl')
+        self.init.onChallenge(challenge)
+        self.assertFailure(d, sasl.SASLIncorrectEncodingError)
+        return d
+
+
+    def test_onChallengeIllegalCharacters(self):
+        """
+        Test receiving a challenge message with illegal characters.
+        """
+        d = self.init.start()
+        challenge = domish.Element((NS_XMPP_SASL, 'challenge'))
+        challenge.addContent('bXkg*Y2hhbGxlbmdl')
+        self.init.onChallenge(challenge)
+        self.assertFailure(d, sasl.SASLIncorrectEncodingError)
+        return d
+
+
+    def test_onChallengeMalformed(self):
+        """
+        Test receiving a malformed challenge message.
+        """
+        d = self.init.start()
+        challenge = domish.Element((NS_XMPP_SASL, 'challenge'))
+        challenge.addContent('a')
+        self.init.onChallenge(challenge)
+        self.assertFailure(d, sasl.SASLIncorrectEncodingError)
+        return d
+
+
+class SASLInitiatingInitializerSetMechanismTest(unittest.TestCase):
+    """
+    Test for L{sasl.SASLInitiatingInitializer.setMechanism}.
+    """
+
+    def setUp(self):
+        self.output = []
+
+        self.authenticator = xmlstream.Authenticator()
+        self.xmlstream = xmlstream.XmlStream(self.authenticator)
+        self.xmlstream.send = self.output.append
+        self.xmlstream.connectionMade()
+        self.xmlstream.dataReceived("<stream:stream xmlns='jabber:client' "
+                        "xmlns:stream='http://etherx.jabber.org/streams' "
+                        "from='example.com' id='12345' version='1.0'>")
+
+        self.init = sasl.SASLInitiatingInitializer(self.xmlstream)
+
+
+    def _setMechanism(self, name):
+        """
+        Set up the XML Stream to have a SASL feature with the given mechanism.
+        """
+        feature = domish.Element((NS_XMPP_SASL, 'mechanisms'))
+        feature.addElement('mechanism', content=name)
+        self.xmlstream.features[(feature.uri, feature.name)] = feature
+
+        self.init.setMechanism()
+        return self.init.mechanism.name
+
+
+    def test_anonymous(self):
+        """
+        Test setting ANONYMOUS as the authentication mechanism.
+        """
+        self.authenticator.jid = jid.JID('example.com')
+        self.authenticator.password = None
+        name = "ANONYMOUS"
+
+        self.assertEqual(name, self._setMechanism(name))
+
+
+    def test_plain(self):
+        """
+        Test setting PLAIN as the authentication mechanism.
+        """
+        self.authenticator.jid = jid.JID('test at example.com')
+        self.authenticator.password = 'secret'
+        name = "PLAIN"
+
+        self.assertEqual(name, self._setMechanism(name))
+
+
+    def test_digest(self):
+        """
+        Test setting DIGEST-MD5 as the authentication mechanism.
+        """
+        self.authenticator.jid = jid.JID('test at example.com')
+        self.authenticator.password = 'secret'
+        name = "DIGEST-MD5"
+
+        self.assertEqual(name, self._setMechanism(name))
+
+
+    def test_notAcceptable(self):
+        """
+        Test using an unacceptable SASL authentication mechanism.
+        """
+
+        self.authenticator.jid = jid.JID('test at example.com')
+        self.authenticator.password = 'secret'
+
+        self.assertRaises(sasl.SASLNoAcceptableMechanism,
+                          self._setMechanism, 'SOMETHING_UNACCEPTABLE')
+
+
+    def test_notAcceptableWithoutUser(self):
+        """
+        Test using an unacceptable SASL authentication mechanism with no JID.
+        """
+        self.authenticator.jid = jid.JID('example.com')
+        self.authenticator.password = 'secret'
+
+        self.assertRaises(sasl.SASLNoAcceptableMechanism,
+                          self._setMechanism, 'SOMETHING_UNACCEPTABLE')
diff --git a/ThirdParty/Twisted/twisted/words/test/test_jabbersaslmechanisms.py b/ThirdParty/Twisted/twisted/words/test/test_jabbersaslmechanisms.py
new file mode 100644
index 0000000..1e195ab
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_jabbersaslmechanisms.py
@@ -0,0 +1,90 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.words.protocols.jabber.sasl_mechanisms}.
+"""
+
+from twisted.trial import unittest
+
+from twisted.words.protocols.jabber import sasl_mechanisms
+
+class PlainTest(unittest.TestCase):
+    def test_getInitialResponse(self):
+        """
+        Test the initial response.
+        """
+        m = sasl_mechanisms.Plain(None, 'test', 'secret')
+        self.assertEqual(m.getInitialResponse(), '\x00test\x00secret')
+
+
+
+class AnonymousTest(unittest.TestCase):
+    """
+    Tests for L{twisted.words.protocols.jabber.sasl_mechanisms.Anonymous}.
+    """
+    def test_getInitialResponse(self):
+        """
+        Test the initial response to be empty.
+        """
+        m = sasl_mechanisms.Anonymous()
+        self.assertEqual(m.getInitialResponse(), None)
+
+
+
+class DigestMD5Test(unittest.TestCase):
+    def setUp(self):
+        self.mechanism = sasl_mechanisms.DigestMD5('xmpp', 'example.org', None,
+                                                   'test', 'secret')
+
+
+    def test_getInitialResponse(self):
+        """
+        Test that no initial response is generated.
+        """
+        self.assertIdentical(self.mechanism.getInitialResponse(), None)
+
+    def test_getResponse(self):
+        """
+        Partially test challenge response.
+
+        Does not actually test the response-value, yet.
+        """
+
+        challenge = 'realm="localhost",nonce="1234",qop="auth",charset=utf-8,algorithm=md5-sess'
+        directives = self.mechanism._parse(self.mechanism.getResponse(challenge))
+        self.assertEqual(directives['username'], 'test')
+        self.assertEqual(directives['nonce'], '1234')
+        self.assertEqual(directives['nc'], '00000001')
+        self.assertEqual(directives['qop'], ['auth'])
+        self.assertEqual(directives['charset'], 'utf-8')
+        self.assertEqual(directives['digest-uri'], 'xmpp/example.org')
+        self.assertEqual(directives['realm'], 'localhost')
+
+    def test_getResponseNoRealm(self):
+        """
+        Test that we accept challenges without realm.
+
+        The realm should default to the host part of the JID.
+        """
+
+        challenge = 'nonce="1234",qop="auth",charset=utf-8,algorithm=md5-sess'
+        directives = self.mechanism._parse(self.mechanism.getResponse(challenge))
+        self.assertEqual(directives['realm'], 'example.org')
+
+    def test__parse(self):
+        """
+        Test challenge decoding.
+
+        Specifically, check for multiple values for the C{qop} and C{cipher}
+        directives.
+        """
+        challenge = 'nonce="1234",qop="auth,auth-conf",charset=utf-8,' \
+                    'algorithm=md5-sess,cipher="des,3des"'
+        directives = self.mechanism._parse(challenge)
+        self.assertEqual('1234', directives['nonce'])
+        self.assertEqual('utf-8', directives['charset'])
+        self.assertIn('auth', directives['qop'])
+        self.assertIn('auth-conf', directives['qop'])
+        self.assertIn('des', directives['cipher'])
+        self.assertIn('3des', directives['cipher'])
diff --git a/ThirdParty/Twisted/twisted/words/test/test_jabberxmlstream.py b/ThirdParty/Twisted/twisted/words/test/test_jabberxmlstream.py
new file mode 100644
index 0000000..caa2ba6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_jabberxmlstream.py
@@ -0,0 +1,1334 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.words.protocols.jabber.xmlstream}.
+"""
+
+from twisted.trial import unittest
+
+from zope.interface.verify import verifyObject
+
+from twisted.internet import defer, task
+from twisted.internet.error import ConnectionLost
+from twisted.internet.interfaces import IProtocolFactory
+from twisted.python import failure
+from twisted.test import proto_helpers
+from twisted.words.test.test_xmlstream import GenericXmlStreamFactoryTestsMixin
+from twisted.words.xish import domish
+from twisted.words.protocols.jabber import error, ijabber, jid, xmlstream
+
+
+
+NS_XMPP_TLS = 'urn:ietf:params:xml:ns:xmpp-tls'
+
+
+
+class HashPasswordTest(unittest.TestCase):
+    """
+    Tests for L{xmlstream.hashPassword}.
+    """
+
+    def test_basic(self):
+        """
+        The sid and secret are concatenated to calculate sha1 hex digest.
+        """
+        hash = xmlstream.hashPassword(u"12345", u"secret")
+        self.assertEqual('99567ee91b2c7cabf607f10cb9f4a3634fa820e0', hash)
+
+
+    def test_sidNotUnicode(self):
+        """
+        The session identifier must be a unicode object.
+        """
+        self.assertRaises(TypeError, xmlstream.hashPassword, "\xc2\xb92345",
+                                                             u"secret")
+
+
+    def test_passwordNotUnicode(self):
+        """
+        The password must be a unicode object.
+        """
+        self.assertRaises(TypeError, xmlstream.hashPassword, u"12345",
+                                                             "secr\xc3\xa9t")
+
+
+    def test_unicodeSecret(self):
+        """
+        The concatenated sid and password must be encoded to UTF-8 before hashing.
+        """
+        hash = xmlstream.hashPassword(u"12345", u"secr\u00e9t")
+        self.assertEqual('659bf88d8f8e179081f7f3b4a8e7d224652d2853', hash)
+
+
+
+class IQTest(unittest.TestCase):
+    """
+    Tests both IQ and the associated IIQResponseTracker callback.
+    """
+
+    def setUp(self):
+        authenticator = xmlstream.ConnectAuthenticator('otherhost')
+        authenticator.namespace = 'testns'
+        self.xmlstream = xmlstream.XmlStream(authenticator)
+        self.clock = task.Clock()
+        self.xmlstream._callLater = self.clock.callLater
+        self.xmlstream.makeConnection(proto_helpers.StringTransport())
+        self.xmlstream.dataReceived(
+           "<stream:stream xmlns:stream='http://etherx.jabber.org/streams' "
+                          "xmlns='testns' from='otherhost' version='1.0'>")
+        self.iq = xmlstream.IQ(self.xmlstream, 'get')
+
+
+    def testBasic(self):
+        self.assertEqual(self.iq['type'], 'get')
+        self.assertTrue(self.iq['id'])
+
+
+    def testSend(self):
+        self.xmlstream.transport.clear()
+        self.iq.send()
+        self.assertIn(self.xmlstream.transport.value(), [
+                      "<iq type='get' id='%s'/>" % self.iq['id'],
+                      "<iq id='%s' type='get'/>" % self.iq['id'],
+                      ])
+
+
+    def testResultResponse(self):
+        def cb(result):
+            self.assertEqual(result['type'], 'result')
+
+        d = self.iq.send()
+        d.addCallback(cb)
+
+        xs = self.xmlstream
+        xs.dataReceived("<iq type='result' id='%s'/>" % self.iq['id'])
+        return d
+
+
+    def testErrorResponse(self):
+        d = self.iq.send()
+        self.assertFailure(d, error.StanzaError)
+
+        xs = self.xmlstream
+        xs.dataReceived("<iq type='error' id='%s'/>" % self.iq['id'])
+        return d
+
+
+    def testNonTrackedResponse(self):
+        """
+        Test that untracked iq responses don't trigger any action.
+
+        Untracked means that the id of the incoming response iq is not
+        in the stream's C{iqDeferreds} dictionary.
+        """
+        xs = self.xmlstream
+        xmlstream.upgradeWithIQResponseTracker(xs)
+
+        # Make sure we aren't tracking any iq's.
+        self.assertFalse(xs.iqDeferreds)
+
+        # Set up a fallback handler that checks the stanza's handled attribute.
+        # If that is set to True, the iq tracker claims to have handled the
+        # response.
+        def cb(iq):
+            self.assertFalse(getattr(iq, 'handled', False))
+
+        xs.addObserver("/iq", cb, -1)
+
+        # Receive an untracked iq response
+        xs.dataReceived("<iq type='result' id='test'/>")
+
+
+    def testCleanup(self):
+        """
+        Test if the deferred associated with an iq request is removed
+        from the list kept in the L{XmlStream} object after it has
+        been fired.
+        """
+
+        d = self.iq.send()
+        xs = self.xmlstream
+        xs.dataReceived("<iq type='result' id='%s'/>" % self.iq['id'])
+        self.assertNotIn(self.iq['id'], xs.iqDeferreds)
+        return d
+
+
+    def testDisconnectCleanup(self):
+        """
+        Test if deferreds for iq's that haven't yet received a response
+        have their errback called on stream disconnect.
+        """
+
+        d = self.iq.send()
+        xs = self.xmlstream
+        xs.connectionLost("Closed by peer")
+        self.assertFailure(d, ConnectionLost)
+        return d
+
+
+    def testNoModifyingDict(self):
+        """
+        Test to make sure the errbacks cannot cause the iteration of the
+        iqDeferreds to blow up in our face.
+        """
+
+        def eb(failure):
+            d = xmlstream.IQ(self.xmlstream).send()
+            d.addErrback(eb)
+
+        d = self.iq.send()
+        d.addErrback(eb)
+        self.xmlstream.connectionLost("Closed by peer")
+        return d
+
+
+    def testRequestTimingOut(self):
+        """
+        Test that an iq request with a defined timeout times out.
+        """
+        self.iq.timeout = 60
+        d = self.iq.send()
+        self.assertFailure(d, xmlstream.TimeoutError)
+
+        self.clock.pump([1, 60])
+        self.assertFalse(self.clock.calls)
+        self.assertFalse(self.xmlstream.iqDeferreds)
+        return d
+
+
+    def testRequestNotTimingOut(self):
+        """
+        Test that an iq request with a defined timeout does not time out
+        when a response was received before the timeout period elapsed.
+        """
+        self.iq.timeout = 60
+        d = self.iq.send()
+        self.clock.callLater(1, self.xmlstream.dataReceived,
+                             "<iq type='result' id='%s'/>" % self.iq['id'])
+        self.clock.pump([1, 1])
+        self.assertFalse(self.clock.calls)
+        return d
+
+
+    def testDisconnectTimeoutCancellation(self):
+        """
+        Test if timeouts for iq's that haven't yet received a response
+        are cancelled on stream disconnect.
+        """
+
+        self.iq.timeout = 60
+        d = self.iq.send()
+
+        xs = self.xmlstream
+        xs.connectionLost("Closed by peer")
+        self.assertFailure(d, ConnectionLost)
+        self.assertFalse(self.clock.calls)
+        return d
+
+
+
+class XmlStreamTest(unittest.TestCase):
+
+    def onStreamStart(self, obj):
+        self.gotStreamStart = True
+
+
+    def onStreamEnd(self, obj):
+        self.gotStreamEnd = True
+
+
+    def onStreamError(self, obj):
+        self.gotStreamError = True
+
+
+    def setUp(self):
+        """
+        Set up XmlStream and several observers.
+        """
+        self.gotStreamStart = False
+        self.gotStreamEnd = False
+        self.gotStreamError = False
+        xs = xmlstream.XmlStream(xmlstream.Authenticator())
+        xs.addObserver('//event/stream/start', self.onStreamStart)
+        xs.addObserver('//event/stream/end', self.onStreamEnd)
+        xs.addObserver('//event/stream/error', self.onStreamError)
+        xs.makeConnection(proto_helpers.StringTransportWithDisconnection())
+        xs.transport.protocol = xs
+        xs.namespace = 'testns'
+        xs.version = (1, 0)
+        self.xmlstream = xs
+
+
+    def test_sendHeaderBasic(self):
+        """
+        Basic test on the header sent by sendHeader.
+        """
+        xs = self.xmlstream
+        xs.sendHeader()
+        splitHeader = self.xmlstream.transport.value()[0:-1].split(' ')
+        self.assertIn("<stream:stream", splitHeader)
+        self.assertIn("xmlns:stream='http://etherx.jabber.org/streams'",
+                      splitHeader)
+        self.assertIn("xmlns='testns'", splitHeader)
+        self.assertIn("version='1.0'", splitHeader)
+        self.assertTrue(xs._headerSent)
+
+
+    def test_sendHeaderAdditionalNamespaces(self):
+        """
+        Test for additional namespace declarations.
+        """
+        xs = self.xmlstream
+        xs.prefixes['jabber:server:dialback'] = 'db'
+        xs.sendHeader()
+        splitHeader = self.xmlstream.transport.value()[0:-1].split(' ')
+        self.assertIn("<stream:stream", splitHeader)
+        self.assertIn("xmlns:stream='http://etherx.jabber.org/streams'",
+                      splitHeader)
+        self.assertIn("xmlns:db='jabber:server:dialback'", splitHeader)
+        self.assertIn("xmlns='testns'", splitHeader)
+        self.assertIn("version='1.0'", splitHeader)
+        self.assertTrue(xs._headerSent)
+
+
+    def test_sendHeaderInitiating(self):
+        """
+        Test addressing when initiating a stream.
+        """
+        xs = self.xmlstream
+        xs.thisEntity = jid.JID('thisHost')
+        xs.otherEntity = jid.JID('otherHost')
+        xs.initiating = True
+        xs.sendHeader()
+        splitHeader = xs.transport.value()[0:-1].split(' ')
+        self.assertIn("to='otherhost'", splitHeader)
+        self.assertIn("from='thishost'", splitHeader)
+
+
+    def test_sendHeaderReceiving(self):
+        """
+        Test addressing when receiving a stream.
+        """
+        xs = self.xmlstream
+        xs.thisEntity = jid.JID('thisHost')
+        xs.otherEntity = jid.JID('otherHost')
+        xs.initiating = False
+        xs.sid = 'session01'
+        xs.sendHeader()
+        splitHeader = xs.transport.value()[0:-1].split(' ')
+        self.assertIn("to='otherhost'", splitHeader)
+        self.assertIn("from='thishost'", splitHeader)
+        self.assertIn("id='session01'", splitHeader)
+
+
+    def test_receiveStreamError(self):
+        """
+        Test events when a stream error is received.
+        """
+        xs = self.xmlstream
+        xs.dataReceived("<stream:stream xmlns='jabber:client' "
+                        "xmlns:stream='http://etherx.jabber.org/streams' "
+                        "from='example.com' id='12345' version='1.0'>")
+        xs.dataReceived("<stream:error/>")
+        self.assertTrue(self.gotStreamError)
+        self.assertTrue(self.gotStreamEnd)
+
+
+    def test_sendStreamErrorInitiating(self):
+        """
+        Test sendStreamError on an initiating xmlstream with a header sent.
+
+        An error should be sent out and the connection lost.
+        """
+        xs = self.xmlstream
+        xs.initiating = True
+        xs.sendHeader()
+        xs.transport.clear()
+        xs.sendStreamError(error.StreamError('version-unsupported'))
+        self.assertNotEqual('', xs.transport.value())
+        self.assertTrue(self.gotStreamEnd)
+
+
+    def test_sendStreamErrorInitiatingNoHeader(self):
+        """
+        Test sendStreamError on an initiating xmlstream without having sent a
+        header.
+
+        In this case, no header should be generated. Also, the error should
+        not be sent out on the stream. Just closing the connection.
+        """
+        xs = self.xmlstream
+        xs.initiating = True
+        xs.transport.clear()
+        xs.sendStreamError(error.StreamError('version-unsupported'))
+        self.assertNot(xs._headerSent)
+        self.assertEqual('', xs.transport.value())
+        self.assertTrue(self.gotStreamEnd)
+
+
+    def test_sendStreamErrorReceiving(self):
+        """
+        Test sendStreamError on a receiving xmlstream with a header sent.
+
+        An error should be sent out and the connection lost.
+        """
+        xs = self.xmlstream
+        xs.initiating = False
+        xs.sendHeader()
+        xs.transport.clear()
+        xs.sendStreamError(error.StreamError('version-unsupported'))
+        self.assertNotEqual('', xs.transport.value())
+        self.assertTrue(self.gotStreamEnd)
+
+
+    def test_sendStreamErrorReceivingNoHeader(self):
+        """
+        Test sendStreamError on a receiving xmlstream without having sent a
+        header.
+
+        In this case, a header should be generated. Then, the error should
+        be sent out on the stream followed by closing the connection.
+        """
+        xs = self.xmlstream
+        xs.initiating = False
+        xs.transport.clear()
+        xs.sendStreamError(error.StreamError('version-unsupported'))
+        self.assertTrue(xs._headerSent)
+        self.assertNotEqual('', xs.transport.value())
+        self.assertTrue(self.gotStreamEnd)
+
+
+    def test_reset(self):
+        """
+        Test resetting the XML stream to start a new layer.
+        """
+        xs = self.xmlstream
+        xs.sendHeader()
+        stream = xs.stream
+        xs.reset()
+        self.assertNotEqual(stream, xs.stream)
+        self.assertNot(xs._headerSent)
+
+
+    def test_send(self):
+        """
+        Test send with various types of objects.
+        """
+        xs = self.xmlstream
+        xs.send('<presence/>')
+        self.assertEqual(xs.transport.value(), '<presence/>')
+
+        xs.transport.clear()
+        el = domish.Element(('testns', 'presence'))
+        xs.send(el)
+        self.assertEqual(xs.transport.value(), '<presence/>')
+
+        xs.transport.clear()
+        el = domish.Element(('http://etherx.jabber.org/streams', 'features'))
+        xs.send(el)
+        self.assertEqual(xs.transport.value(), '<stream:features/>')
+
+
+    def test_authenticator(self):
+        """
+        Test that the associated authenticator is correctly called.
+        """
+        connectionMadeCalls = []
+        streamStartedCalls = []
+        associateWithStreamCalls = []
+
+        class TestAuthenticator:
+            def connectionMade(self):
+                connectionMadeCalls.append(None)
+
+            def streamStarted(self, rootElement):
+                streamStartedCalls.append(rootElement)
+
+            def associateWithStream(self, xs):
+                associateWithStreamCalls.append(xs)
+
+        a = TestAuthenticator()
+        xs = xmlstream.XmlStream(a)
+        self.assertEqual([xs], associateWithStreamCalls)
+        xs.connectionMade()
+        self.assertEqual([None], connectionMadeCalls)
+        xs.dataReceived("<stream:stream xmlns='jabber:client' "
+                        "xmlns:stream='http://etherx.jabber.org/streams' "
+                        "from='example.com' id='12345'>")
+        self.assertEqual(1, len(streamStartedCalls))
+        xs.reset()
+        self.assertEqual([None], connectionMadeCalls)
+
+
+
+class TestError(Exception):
+    pass
+
+
+
+class AuthenticatorTest(unittest.TestCase):
+    def setUp(self):
+        self.authenticator = xmlstream.Authenticator()
+        self.xmlstream = xmlstream.XmlStream(self.authenticator)
+
+
+    def test_streamStart(self):
+        """
+        Test streamStart to fill the appropriate attributes from the
+        stream header.
+        """
+        xs = self.xmlstream
+        xs.makeConnection(proto_helpers.StringTransport())
+        xs.dataReceived("<stream:stream xmlns='jabber:client' "
+                         "xmlns:stream='http://etherx.jabber.org/streams' "
+                         "from='example.org' to='example.com' id='12345' "
+                         "version='1.0'>")
+        self.assertEqual((1, 0), xs.version)
+        self.assertIdentical(None, xs.sid)
+        self.assertEqual('invalid', xs.namespace)
+        self.assertIdentical(None, xs.otherEntity)
+        self.assertEqual(None, xs.thisEntity)
+
+
+    def test_streamStartLegacy(self):
+        """
+        Test streamStart to fill the appropriate attributes from the
+        stream header for a pre-XMPP-1.0 header.
+        """
+        xs = self.xmlstream
+        xs.makeConnection(proto_helpers.StringTransport())
+        xs.dataReceived("<stream:stream xmlns='jabber:client' "
+                        "xmlns:stream='http://etherx.jabber.org/streams' "
+                        "from='example.com' id='12345'>")
+        self.assertEqual((0, 0), xs.version)
+
+
+    def test_streamBadVersionOneDigit(self):
+        """
+        Test streamStart to fill the appropriate attributes from the
+        stream header for a version with only one digit.
+        """
+        xs = self.xmlstream
+        xs.makeConnection(proto_helpers.StringTransport())
+        xs.dataReceived("<stream:stream xmlns='jabber:client' "
+                        "xmlns:stream='http://etherx.jabber.org/streams' "
+                        "from='example.com' id='12345' version='1'>")
+        self.assertEqual((0, 0), xs.version)
+
+
+    def test_streamBadVersionNoNumber(self):
+        """
+        Test streamStart to fill the appropriate attributes from the
+        stream header for a malformed version.
+        """
+        xs = self.xmlstream
+        xs.makeConnection(proto_helpers.StringTransport())
+        xs.dataReceived("<stream:stream xmlns='jabber:client' "
+                        "xmlns:stream='http://etherx.jabber.org/streams' "
+                        "from='example.com' id='12345' version='blah'>")
+        self.assertEqual((0, 0), xs.version)
+
+
+
+class ConnectAuthenticatorTest(unittest.TestCase):
+
+    def setUp(self):
+        self.gotAuthenticated = False
+        self.initFailure = None
+        self.authenticator = xmlstream.ConnectAuthenticator('otherHost')
+        self.xmlstream = xmlstream.XmlStream(self.authenticator)
+        self.xmlstream.addObserver('//event/stream/authd', self.onAuthenticated)
+        self.xmlstream.addObserver('//event/xmpp/initfailed', self.onInitFailed)
+
+
+    def onAuthenticated(self, obj):
+        self.gotAuthenticated = True
+
+
+    def onInitFailed(self, failure):
+        self.initFailure = failure
+
+
+    def testSucces(self):
+        """
+        Test successful completion of an initialization step.
+        """
+        class Initializer:
+            def initialize(self):
+                pass
+
+        init = Initializer()
+        self.xmlstream.initializers = [init]
+
+        self.authenticator.initializeStream()
+        self.assertEqual([], self.xmlstream.initializers)
+        self.assertTrue(self.gotAuthenticated)
+
+
+    def testFailure(self):
+        """
+        Test failure of an initialization step.
+        """
+        class Initializer:
+            def initialize(self):
+                raise TestError
+
+        init = Initializer()
+        self.xmlstream.initializers = [init]
+
+        self.authenticator.initializeStream()
+        self.assertEqual([init], self.xmlstream.initializers)
+        self.assertFalse(self.gotAuthenticated)
+        self.assertNotIdentical(None, self.initFailure)
+        self.assertTrue(self.initFailure.check(TestError))
+
+
+    def test_streamStart(self):
+        """
+        Test streamStart to fill the appropriate attributes from the
+        stream header.
+        """
+        self.authenticator.namespace = 'testns'
+        xs = self.xmlstream
+        xs.makeConnection(proto_helpers.StringTransport())
+        xs.dataReceived("<stream:stream xmlns='jabber:client' "
+                         "xmlns:stream='http://etherx.jabber.org/streams' "
+                         "from='example.com' to='example.org' id='12345' "
+                         "version='1.0'>")
+        self.assertEqual((1, 0), xs.version)
+        self.assertEqual('12345', xs.sid)
+        self.assertEqual('testns', xs.namespace)
+        self.assertEqual('example.com', xs.otherEntity.host)
+        self.assertIdentical(None, xs.thisEntity)
+        self.assertNot(self.gotAuthenticated)
+        xs.dataReceived("<stream:features>"
+                          "<test xmlns='testns'/>"
+                        "</stream:features>")
+        self.assertIn(('testns', 'test'), xs.features)
+        self.assertTrue(self.gotAuthenticated)
+
+
+
+class ListenAuthenticatorTest(unittest.TestCase):
+    """
+    Tests for L{xmlstream.ListenAuthenticator}
+    """
+
+    def setUp(self):
+        self.authenticator = xmlstream.ListenAuthenticator()
+        self.xmlstream = xmlstream.XmlStream(self.authenticator)
+
+
+    def test_streamStart(self):
+        """
+        Test streamStart to fill the appropriate attributes from the
+        stream header.
+        """
+        xs = self.xmlstream
+        xs.makeConnection(proto_helpers.StringTransport())
+        self.assertIdentical(None, xs.sid)
+        xs.dataReceived("<stream:stream xmlns='jabber:client' "
+                         "xmlns:stream='http://etherx.jabber.org/streams' "
+                         "from='example.org' to='example.com' id='12345' "
+                         "version='1.0'>")
+        self.assertEqual((1, 0), xs.version)
+        self.assertNotIdentical(None, xs.sid)
+        self.assertNotEquals('12345', xs.sid)
+        self.assertEqual('jabber:client', xs.namespace)
+        self.assertIdentical(None, xs.otherEntity)
+        self.assertEqual('example.com', xs.thisEntity.host)
+
+
+    def test_streamStartUnicodeSessionID(self):
+        """
+        The generated session id must be a unicode object.
+        """
+        xs = self.xmlstream
+        xs.makeConnection(proto_helpers.StringTransport())
+        xs.dataReceived("<stream:stream xmlns='jabber:client' "
+                         "xmlns:stream='http://etherx.jabber.org/streams' "
+                         "from='example.org' to='example.com' id='12345' "
+                         "version='1.0'>")
+        self.assertIsInstance(xs.sid, unicode)
+
+
+
+class TLSInitiatingInitializerTest(unittest.TestCase):
+    def setUp(self):
+        self.output = []
+        self.done = []
+
+        self.savedSSL = xmlstream.ssl
+
+        self.authenticator = xmlstream.Authenticator()
+        self.xmlstream = xmlstream.XmlStream(self.authenticator)
+        self.xmlstream.send = self.output.append
+        self.xmlstream.connectionMade()
+        self.xmlstream.dataReceived("<stream:stream xmlns='jabber:client' "
+                        "xmlns:stream='http://etherx.jabber.org/streams' "
+                        "from='example.com' id='12345' version='1.0'>")
+        self.init = xmlstream.TLSInitiatingInitializer(self.xmlstream)
+
+
+    def tearDown(self):
+        xmlstream.ssl = self.savedSSL
+
+
+    def testWantedSupported(self):
+        """
+        Test start when TLS is wanted and the SSL library available.
+        """
+        self.xmlstream.transport = proto_helpers.StringTransport()
+        self.xmlstream.transport.startTLS = lambda ctx: self.done.append('TLS')
+        self.xmlstream.reset = lambda: self.done.append('reset')
+        self.xmlstream.sendHeader = lambda: self.done.append('header')
+
+        d = self.init.start()
+        d.addCallback(self.assertEqual, xmlstream.Reset)
+        starttls = self.output[0]
+        self.assertEqual('starttls', starttls.name)
+        self.assertEqual(NS_XMPP_TLS, starttls.uri)
+        self.xmlstream.dataReceived("<proceed xmlns='%s'/>" % NS_XMPP_TLS)
+        self.assertEqual(['TLS', 'reset', 'header'], self.done)
+
+        return d
+
+    if not xmlstream.ssl:
+        testWantedSupported.skip = "SSL not available"
+
+
+    def testWantedNotSupportedNotRequired(self):
+        """
+        Test start when TLS is wanted and the SSL library available.
+        """
+        xmlstream.ssl = None
+
+        d = self.init.start()
+        d.addCallback(self.assertEqual, None)
+        self.assertEqual([], self.output)
+
+        return d
+
+
+    def testWantedNotSupportedRequired(self):
+        """
+        Test start when TLS is wanted and the SSL library available.
+        """
+        xmlstream.ssl = None
+        self.init.required = True
+
+        d = self.init.start()
+        self.assertFailure(d, xmlstream.TLSNotSupported)
+        self.assertEqual([], self.output)
+
+        return d
+
+
+    def testNotWantedRequired(self):
+        """
+        Test start when TLS is not wanted, but required by the server.
+        """
+        tls = domish.Element(('urn:ietf:params:xml:ns:xmpp-tls', 'starttls'))
+        tls.addElement('required')
+        self.xmlstream.features = {(tls.uri, tls.name): tls}
+        self.init.wanted = False
+
+        d = self.init.start()
+        self.assertEqual([], self.output)
+        self.assertFailure(d, xmlstream.TLSRequired)
+
+        return d
+
+
+    def testNotWantedNotRequired(self):
+        """
+        Test start when TLS is not wanted, but required by the server.
+        """
+        tls = domish.Element(('urn:ietf:params:xml:ns:xmpp-tls', 'starttls'))
+        self.xmlstream.features = {(tls.uri, tls.name): tls}
+        self.init.wanted = False
+
+        d = self.init.start()
+        d.addCallback(self.assertEqual, None)
+        self.assertEqual([], self.output)
+        return d
+
+
+    def testFailed(self):
+        """
+        Test failed TLS negotiation.
+        """
+        # Pretend that ssl is supported, it isn't actually used when the
+        # server starts out with a failure in response to our initial
+        # C{starttls} stanza.
+        xmlstream.ssl = 1
+
+        d = self.init.start()
+        self.assertFailure(d, xmlstream.TLSFailed)
+        self.xmlstream.dataReceived("<failure xmlns='%s'/>" % NS_XMPP_TLS)
+        return d
+
+
+
+class TestFeatureInitializer(xmlstream.BaseFeatureInitiatingInitializer):
+    feature = ('testns', 'test')
+
+    def start(self):
+        return defer.succeed(None)
+
+
+
+class BaseFeatureInitiatingInitializerTest(unittest.TestCase):
+
+    def setUp(self):
+        self.xmlstream = xmlstream.XmlStream(xmlstream.Authenticator())
+        self.init = TestFeatureInitializer(self.xmlstream)
+
+
+    def testAdvertized(self):
+        """
+        Test that an advertized feature results in successful initialization.
+        """
+        self.xmlstream.features = {self.init.feature:
+                                   domish.Element(self.init.feature)}
+        return self.init.initialize()
+
+
+    def testNotAdvertizedRequired(self):
+        """
+        Test that when the feature is not advertized, but required by the
+        initializer, an exception is raised.
+        """
+        self.init.required = True
+        self.assertRaises(xmlstream.FeatureNotAdvertized, self.init.initialize)
+
+
+    def testNotAdvertizedNotRequired(self):
+        """
+        Test that when the feature is not advertized, and not required by the
+        initializer, the initializer silently succeeds.
+        """
+        self.init.required = False
+        self.assertIdentical(None, self.init.initialize())
+
+
+
+class ToResponseTest(unittest.TestCase):
+
+    def test_toResponse(self):
+        """
+        Test that a response stanza is generated with addressing swapped.
+        """
+        stanza = domish.Element(('jabber:client', 'iq'))
+        stanza['type'] = 'get'
+        stanza['to'] = 'user1 at example.com'
+        stanza['from'] = 'user2 at example.com/resource'
+        stanza['id'] = 'stanza1'
+        response = xmlstream.toResponse(stanza, 'result')
+        self.assertNotIdentical(stanza, response)
+        self.assertEqual(response['from'], 'user1 at example.com')
+        self.assertEqual(response['to'], 'user2 at example.com/resource')
+        self.assertEqual(response['type'], 'result')
+        self.assertEqual(response['id'], 'stanza1')
+
+
+    def test_toResponseNoFrom(self):
+        """
+        Test that a response is generated from a stanza without a from address.
+        """
+        stanza = domish.Element(('jabber:client', 'iq'))
+        stanza['type'] = 'get'
+        stanza['to'] = 'user1 at example.com'
+        response = xmlstream.toResponse(stanza)
+        self.assertEqual(response['from'], 'user1 at example.com')
+        self.assertFalse(response.hasAttribute('to'))
+
+
+    def test_toResponseNoTo(self):
+        """
+        Test that a response is generated from a stanza without a to address.
+        """
+        stanza = domish.Element(('jabber:client', 'iq'))
+        stanza['type'] = 'get'
+        stanza['from'] = 'user2 at example.com/resource'
+        response = xmlstream.toResponse(stanza)
+        self.assertFalse(response.hasAttribute('from'))
+        self.assertEqual(response['to'], 'user2 at example.com/resource')
+
+
+    def test_toResponseNoAddressing(self):
+        """
+        Test that a response is generated from a stanza without any addressing.
+        """
+        stanza = domish.Element(('jabber:client', 'message'))
+        stanza['type'] = 'chat'
+        response = xmlstream.toResponse(stanza)
+        self.assertFalse(response.hasAttribute('to'))
+        self.assertFalse(response.hasAttribute('from'))
+
+
+    def test_noID(self):
+        """
+        Test that a proper response is generated without id attribute.
+        """
+        stanza = domish.Element(('jabber:client', 'message'))
+        response = xmlstream.toResponse(stanza)
+        self.assertFalse(response.hasAttribute('id'))
+
+
+    def test_noType(self):
+        """
+        Test that a proper response is generated without type attribute.
+        """
+        stanza = domish.Element(('jabber:client', 'message'))
+        response = xmlstream.toResponse(stanza)
+        self.assertFalse(response.hasAttribute('type'))
+
+
+class DummyFactory(object):
+    """
+    Dummy XmlStream factory that only registers bootstrap observers.
+    """
+    def __init__(self):
+        self.callbacks = {}
+
+
+    def addBootstrap(self, event, callback):
+        self.callbacks[event] = callback
+
+
+
+class DummyXMPPHandler(xmlstream.XMPPHandler):
+    """
+    Dummy XMPP subprotocol handler to count the methods are called on it.
+    """
+    def __init__(self):
+        self.doneMade = 0
+        self.doneInitialized = 0
+        self.doneLost = 0
+
+
+    def makeConnection(self, xs):
+        self.connectionMade()
+
+
+    def connectionMade(self):
+        self.doneMade += 1
+
+
+    def connectionInitialized(self):
+        self.doneInitialized += 1
+
+
+    def connectionLost(self, reason):
+        self.doneLost += 1
+
+
+
+class FailureReasonXMPPHandler(xmlstream.XMPPHandler):
+    """
+    Dummy handler specifically for failure Reason tests.
+    """
+    def __init__(self):
+        self.gotFailureReason = False
+
+
+    def connectionLost(self, reason):
+        if isinstance(reason, failure.Failure):
+            self.gotFailureReason = True
+
+
+
+class XMPPHandlerTest(unittest.TestCase):
+    """
+    Tests for L{xmlstream.XMPPHandler}.
+    """
+
+    def test_interface(self):
+        """
+        L{xmlstream.XMPPHandler} implements L{ijabber.IXMPPHandler}.
+        """
+        verifyObject(ijabber.IXMPPHandler, xmlstream.XMPPHandler())
+
+
+    def test_send(self):
+        """
+        Test that data is passed on for sending by the stream manager.
+        """
+        class DummyStreamManager(object):
+            def __init__(self):
+                self.outlist = []
+
+            def send(self, data):
+                self.outlist.append(data)
+
+        handler = xmlstream.XMPPHandler()
+        handler.parent = DummyStreamManager()
+        handler.send('<presence/>')
+        self.assertEqual(['<presence/>'], handler.parent.outlist)
+
+
+    def test_makeConnection(self):
+        """
+        Test that makeConnection saves the XML stream and calls connectionMade.
+        """
+        class TestXMPPHandler(xmlstream.XMPPHandler):
+            def connectionMade(self):
+                self.doneMade = True
+
+        handler = TestXMPPHandler()
+        xs = xmlstream.XmlStream(xmlstream.Authenticator())
+        handler.makeConnection(xs)
+        self.assertTrue(handler.doneMade)
+        self.assertIdentical(xs, handler.xmlstream)
+
+
+    def test_connectionLost(self):
+        """
+        Test that connectionLost forgets the XML stream.
+        """
+        handler = xmlstream.XMPPHandler()
+        xs = xmlstream.XmlStream(xmlstream.Authenticator())
+        handler.makeConnection(xs)
+        handler.connectionLost(Exception())
+        self.assertIdentical(None, handler.xmlstream)
+
+
+
+class XMPPHandlerCollectionTest(unittest.TestCase):
+    """
+    Tests for L{xmlstream.XMPPHandlerCollection}.
+    """
+
+    def setUp(self):
+        self.collection = xmlstream.XMPPHandlerCollection()
+
+
+    def test_interface(self):
+        """
+        L{xmlstream.StreamManager} implements L{ijabber.IXMPPHandlerCollection}.
+        """
+        verifyObject(ijabber.IXMPPHandlerCollection, self.collection)
+
+
+    def test_addHandler(self):
+        """
+        Test the addition of a protocol handler.
+        """
+        handler = DummyXMPPHandler()
+        handler.setHandlerParent(self.collection)
+        self.assertIn(handler, self.collection)
+        self.assertIdentical(self.collection, handler.parent)
+
+
+    def test_removeHandler(self):
+        """
+        Test removal of a protocol handler.
+        """
+        handler = DummyXMPPHandler()
+        handler.setHandlerParent(self.collection)
+        handler.disownHandlerParent(self.collection)
+        self.assertNotIn(handler, self.collection)
+        self.assertIdentical(None, handler.parent)
+
+
+
+class StreamManagerTest(unittest.TestCase):
+    """
+    Tests for L{xmlstream.StreamManager}.
+    """
+
+    def setUp(self):
+        factory = DummyFactory()
+        self.streamManager = xmlstream.StreamManager(factory)
+
+
+    def test_basic(self):
+        """
+        Test correct initialization and setup of factory observers.
+        """
+        sm = self.streamManager
+        self.assertIdentical(None, sm.xmlstream)
+        self.assertEqual([], sm.handlers)
+        self.assertEqual(sm._connected,
+                          sm.factory.callbacks['//event/stream/connected'])
+        self.assertEqual(sm._authd,
+                          sm.factory.callbacks['//event/stream/authd'])
+        self.assertEqual(sm._disconnected,
+                          sm.factory.callbacks['//event/stream/end'])
+        self.assertEqual(sm.initializationFailed,
+                          sm.factory.callbacks['//event/xmpp/initfailed'])
+
+
+    def test_connected(self):
+        """
+        Test that protocol handlers have their connectionMade method called
+        when the XML stream is connected.
+        """
+        sm = self.streamManager
+        handler = DummyXMPPHandler()
+        handler.setHandlerParent(sm)
+        xs = xmlstream.XmlStream(xmlstream.Authenticator())
+        sm._connected(xs)
+        self.assertEqual(1, handler.doneMade)
+        self.assertEqual(0, handler.doneInitialized)
+        self.assertEqual(0, handler.doneLost)
+
+
+    def test_connectedLogTrafficFalse(self):
+        """
+        Test raw data functions unset when logTraffic is set to False.
+        """
+        sm = self.streamManager
+        handler = DummyXMPPHandler()
+        handler.setHandlerParent(sm)
+        xs = xmlstream.XmlStream(xmlstream.Authenticator())
+        sm._connected(xs)
+        self.assertIdentical(None, xs.rawDataInFn)
+        self.assertIdentical(None, xs.rawDataOutFn)
+
+
+    def test_connectedLogTrafficTrue(self):
+        """
+        Test raw data functions set when logTraffic is set to True.
+        """
+        sm = self.streamManager
+        sm.logTraffic = True
+        handler = DummyXMPPHandler()
+        handler.setHandlerParent(sm)
+        xs = xmlstream.XmlStream(xmlstream.Authenticator())
+        sm._connected(xs)
+        self.assertNotIdentical(None, xs.rawDataInFn)
+        self.assertNotIdentical(None, xs.rawDataOutFn)
+
+
+    def test_authd(self):
+        """
+        Test that protocol handlers have their connectionInitialized method
+        called when the XML stream is initialized.
+        """
+        sm = self.streamManager
+        handler = DummyXMPPHandler()
+        handler.setHandlerParent(sm)
+        xs = xmlstream.XmlStream(xmlstream.Authenticator())
+        sm._authd(xs)
+        self.assertEqual(0, handler.doneMade)
+        self.assertEqual(1, handler.doneInitialized)
+        self.assertEqual(0, handler.doneLost)
+
+
+    def test_disconnected(self):
+        """
+        Test that protocol handlers have their connectionLost method
+        called when the XML stream is disconnected.
+        """
+        sm = self.streamManager
+        handler = DummyXMPPHandler()
+        handler.setHandlerParent(sm)
+        xs = xmlstream.XmlStream(xmlstream.Authenticator())
+        sm._disconnected(xs)
+        self.assertEqual(0, handler.doneMade)
+        self.assertEqual(0, handler.doneInitialized)
+        self.assertEqual(1, handler.doneLost)
+
+
+    def test_disconnectedReason(self):
+        """
+        A L{STREAM_END_EVENT} results in L{StreamManager} firing the handlers
+        L{connectionLost} methods, passing a L{failure.Failure} reason.
+        """
+        sm = self.streamManager
+        handler = FailureReasonXMPPHandler()
+        handler.setHandlerParent(sm)
+        xs = xmlstream.XmlStream(xmlstream.Authenticator())
+        sm._disconnected(failure.Failure(Exception("no reason")))
+        self.assertEqual(True, handler.gotFailureReason)
+
+
+    def test_addHandler(self):
+        """
+        Test the addition of a protocol handler while not connected.
+        """
+        sm = self.streamManager
+        handler = DummyXMPPHandler()
+        handler.setHandlerParent(sm)
+
+        self.assertEqual(0, handler.doneMade)
+        self.assertEqual(0, handler.doneInitialized)
+        self.assertEqual(0, handler.doneLost)
+
+
+    def test_addHandlerInitialized(self):
+        """
+        Test the addition of a protocol handler after the stream
+        have been initialized.
+
+        Make sure that the handler will have the connected stream
+        passed via C{makeConnection} and have C{connectionInitialized}
+        called.
+        """
+        sm = self.streamManager
+        xs = xmlstream.XmlStream(xmlstream.Authenticator())
+        sm._connected(xs)
+        sm._authd(xs)
+        handler = DummyXMPPHandler()
+        handler.setHandlerParent(sm)
+
+        self.assertEqual(1, handler.doneMade)
+        self.assertEqual(1, handler.doneInitialized)
+        self.assertEqual(0, handler.doneLost)
+
+
+    def test_sendInitialized(self):
+        """
+        Test send when the stream has been initialized.
+
+        The data should be sent directly over the XML stream.
+        """
+        factory = xmlstream.XmlStreamFactory(xmlstream.Authenticator())
+        sm = xmlstream.StreamManager(factory)
+        xs = factory.buildProtocol(None)
+        xs.transport = proto_helpers.StringTransport()
+        xs.connectionMade()
+        xs.dataReceived("<stream:stream xmlns='jabber:client' "
+                        "xmlns:stream='http://etherx.jabber.org/streams' "
+                        "from='example.com' id='12345'>")
+        xs.dispatch(xs, "//event/stream/authd")
+        sm.send("<presence/>")
+        self.assertEqual("<presence/>", xs.transport.value())
+
+
+    def test_sendNotConnected(self):
+        """
+        Test send when there is no established XML stream.
+
+        The data should be cached until an XML stream has been established and
+        initialized.
+        """
+        factory = xmlstream.XmlStreamFactory(xmlstream.Authenticator())
+        sm = xmlstream.StreamManager(factory)
+        handler = DummyXMPPHandler()
+        sm.addHandler(handler)
+
+        xs = factory.buildProtocol(None)
+        xs.transport = proto_helpers.StringTransport()
+        sm.send("<presence/>")
+        self.assertEqual("", xs.transport.value())
+        self.assertEqual("<presence/>", sm._packetQueue[0])
+
+        xs.connectionMade()
+        self.assertEqual("", xs.transport.value())
+        self.assertEqual("<presence/>", sm._packetQueue[0])
+
+        xs.dataReceived("<stream:stream xmlns='jabber:client' "
+                        "xmlns:stream='http://etherx.jabber.org/streams' "
+                        "from='example.com' id='12345'>")
+        xs.dispatch(xs, "//event/stream/authd")
+
+        self.assertEqual("<presence/>", xs.transport.value())
+        self.assertFalse(sm._packetQueue)
+
+
+    def test_sendNotInitialized(self):
+        """
+        Test send when the stream is connected but not yet initialized.
+
+        The data should be cached until the XML stream has been initialized.
+        """
+        factory = xmlstream.XmlStreamFactory(xmlstream.Authenticator())
+        sm = xmlstream.StreamManager(factory)
+        xs = factory.buildProtocol(None)
+        xs.transport = proto_helpers.StringTransport()
+        xs.connectionMade()
+        xs.dataReceived("<stream:stream xmlns='jabber:client' "
+                        "xmlns:stream='http://etherx.jabber.org/streams' "
+                        "from='example.com' id='12345'>")
+        sm.send("<presence/>")
+        self.assertEqual("", xs.transport.value())
+        self.assertEqual("<presence/>", sm._packetQueue[0])
+
+
+    def test_sendDisconnected(self):
+        """
+        Test send after XML stream disconnection.
+
+        The data should be cached until a new XML stream has been established
+        and initialized.
+        """
+        factory = xmlstream.XmlStreamFactory(xmlstream.Authenticator())
+        sm = xmlstream.StreamManager(factory)
+        handler = DummyXMPPHandler()
+        sm.addHandler(handler)
+
+        xs = factory.buildProtocol(None)
+        xs.connectionMade()
+        xs.transport = proto_helpers.StringTransport()
+        xs.connectionLost(None)
+
+        sm.send("<presence/>")
+        self.assertEqual("", xs.transport.value())
+        self.assertEqual("<presence/>", sm._packetQueue[0])
+
+
+
+class XmlStreamServerFactoryTest(GenericXmlStreamFactoryTestsMixin):
+    """
+    Tests for L{xmlstream.XmlStreamServerFactory}.
+    """
+
+    def setUp(self):
+        """
+        Set up a server factory with a authenticator factory function.
+        """
+        class TestAuthenticator(object):
+            def __init__(self):
+                self.xmlstreams = []
+
+            def associateWithStream(self, xs):
+                self.xmlstreams.append(xs)
+
+        def authenticatorFactory():
+            return TestAuthenticator()
+
+        self.factory = xmlstream.XmlStreamServerFactory(authenticatorFactory)
+
+
+    def test_interface(self):
+        """
+        L{XmlStreamServerFactory} is a L{Factory}.
+        """
+        verifyObject(IProtocolFactory, self.factory)
+
+
+    def test_buildProtocolAuthenticatorInstantiation(self):
+        """
+        The authenticator factory should be used to instantiate the
+        authenticator and pass it to the protocol.
+
+        The default protocol, L{XmlStream} stores the authenticator it is
+        passed, and calls its C{associateWithStream} method. so we use that to
+        check whether our authenticator factory is used and the protocol
+        instance gets an authenticator.
+        """
+        xs = self.factory.buildProtocol(None)
+        self.assertEqual([xs], xs.authenticator.xmlstreams)
+
+
+    def test_buildProtocolXmlStream(self):
+        """
+        The protocol factory creates Jabber XML Stream protocols by default.
+        """
+        xs = self.factory.buildProtocol(None)
+        self.assertIsInstance(xs, xmlstream.XmlStream)
+
+
+    def test_buildProtocolTwice(self):
+        """
+        Subsequent calls to buildProtocol should result in different instances
+        of the protocol, as well as their authenticators.
+        """
+        xs1 = self.factory.buildProtocol(None)
+        xs2 = self.factory.buildProtocol(None)
+        self.assertNotIdentical(xs1, xs2)
+        self.assertNotIdentical(xs1.authenticator, xs2.authenticator)
diff --git a/ThirdParty/Twisted/twisted/words/test/test_jabberxmppstringprep.py b/ThirdParty/Twisted/twisted/words/test/test_jabberxmppstringprep.py
new file mode 100644
index 0000000..4b25c0b
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_jabberxmppstringprep.py
@@ -0,0 +1,92 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.trial import unittest
+
+from twisted.words.protocols.jabber.xmpp_stringprep import nodeprep, resourceprep, nameprep, crippled
+
+class XMPPStringPrepTest(unittest.TestCase):
+    """
+
+    The nodeprep stringprep profile is similar to the resourceprep profile,
+    but does an extra mapping of characters (table B.2) and disallows
+    more characters (table C.1.1 and eight extra punctuation characters).
+    Due to this similarity, the resourceprep tests are more extensive, and
+    the nodeprep tests only address the mappings additional restrictions.
+
+    The nameprep profile is nearly identical to the nameprep implementation in
+    L{encodings.idna}, but that implementation assumes the C{UseSTD4ASCIIRules}
+    flag to be false. This implementation assumes it to be true, and restricts
+    the allowed set of characters.  The tests here only check for the
+    differences.
+    
+    """
+
+    def testResourcePrep(self):
+        self.assertEqual(resourceprep.prepare(u'resource'), u'resource')
+        self.assertNotEquals(resourceprep.prepare(u'Resource'), u'resource')
+        self.assertEqual(resourceprep.prepare(u' '), u' ')
+
+        if crippled:
+            return
+
+        self.assertEqual(resourceprep.prepare(u'Henry \u2163'), u'Henry IV')
+        self.assertEqual(resourceprep.prepare(u'foo\xad\u034f\u1806\u180b'
+                                               u'bar\u200b\u2060'
+                                               u'baz\ufe00\ufe08\ufe0f\ufeff'),
+                          u'foobarbaz')
+        self.assertEqual(resourceprep.prepare(u'\u00a0'), u' ')
+        self.assertRaises(UnicodeError, resourceprep.prepare, u'\u1680')
+        self.assertEqual(resourceprep.prepare(u'\u2000'), u' ')
+        self.assertEqual(resourceprep.prepare(u'\u200b'), u'')
+        self.assertRaises(UnicodeError, resourceprep.prepare, u'\u0010\u007f')
+        self.assertRaises(UnicodeError, resourceprep.prepare, u'\u0085')
+        self.assertRaises(UnicodeError, resourceprep.prepare, u'\u180e')
+        self.assertEqual(resourceprep.prepare(u'\ufeff'), u'')
+        self.assertRaises(UnicodeError, resourceprep.prepare, u'\uf123')
+        self.assertRaises(UnicodeError, resourceprep.prepare, u'\U000f1234')
+        self.assertRaises(UnicodeError, resourceprep.prepare, u'\U0010f234')
+        self.assertRaises(UnicodeError, resourceprep.prepare, u'\U0008fffe')
+        self.assertRaises(UnicodeError, resourceprep.prepare, u'\U0010ffff')
+        self.assertRaises(UnicodeError, resourceprep.prepare, u'\udf42')
+        self.assertRaises(UnicodeError, resourceprep.prepare, u'\ufffd')
+        self.assertRaises(UnicodeError, resourceprep.prepare, u'\u2ff5')
+        self.assertEqual(resourceprep.prepare(u'\u0341'), u'\u0301')
+        self.assertRaises(UnicodeError, resourceprep.prepare, u'\u200e')
+        self.assertRaises(UnicodeError, resourceprep.prepare, u'\u202a')
+        self.assertRaises(UnicodeError, resourceprep.prepare, u'\U000e0001')
+        self.assertRaises(UnicodeError, resourceprep.prepare, u'\U000e0042')
+        self.assertRaises(UnicodeError, resourceprep.prepare, u'foo\u05bebar')
+        self.assertRaises(UnicodeError, resourceprep.prepare, u'foo\ufd50bar')
+        #self.assertEqual(resourceprep.prepare(u'foo\ufb38bar'),
+        #                  u'foo\u064ebar')
+        self.assertRaises(UnicodeError, resourceprep.prepare, u'\u06271')
+        self.assertEqual(resourceprep.prepare(u'\u06271\u0628'),
+                          u'\u06271\u0628')
+        self.assertRaises(UnicodeError, resourceprep.prepare, u'\U000e0002')
+
+    def testNodePrep(self):
+        self.assertEqual(nodeprep.prepare(u'user'), u'user')
+        self.assertEqual(nodeprep.prepare(u'User'), u'user')
+        self.assertRaises(UnicodeError, nodeprep.prepare, u'us&er')
+
+
+    def test_nodeprepUnassignedInUnicode32(self):
+        """
+        Make sure unassigned code points from Unicode 3.2 are rejected.
+        """
+        self.assertRaises(UnicodeError, nodeprep.prepare, u'\u1d39')
+
+
+    def testNamePrep(self):
+        self.assertEqual(nameprep.prepare(u'example.com'), u'example.com')
+        self.assertEqual(nameprep.prepare(u'Example.com'), u'example.com')
+        self.assertRaises(UnicodeError, nameprep.prepare, u'ex at mple.com')
+        self.assertRaises(UnicodeError, nameprep.prepare, u'-example.com')
+        self.assertRaises(UnicodeError, nameprep.prepare, u'example-.com')
+
+        if crippled:
+            return
+
+        self.assertEqual(nameprep.prepare(u'stra\u00dfe.example.com'),
+                          u'strasse.example.com')
diff --git a/ThirdParty/Twisted/twisted/words/test/test_msn.py b/ThirdParty/Twisted/twisted/words/test/test_msn.py
new file mode 100644
index 0000000..ece580f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_msn.py
@@ -0,0 +1,522 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test cases for L{twisted.words.protocols.msn}.
+"""
+
+# System imports
+import StringIO
+
+# Twisted imports
+
+# t.w.p.msn requires an HTTP client
+try:
+    # So try to get one - do it directly instead of catching an ImportError
+    # from t.w.p.msn so that other problems which cause that module to fail
+    # to import don't cause the tests to be skipped.
+    from twisted.web import client
+except ImportError:
+    # If there isn't one, we're going to skip all the tests.
+    msn = None
+else:
+    # Otherwise importing it should work, so do it.
+    from twisted.words.protocols import msn
+
+
+from twisted.python.hashlib import md5
+from twisted.protocols import loopback
+from twisted.internet.defer import Deferred
+from twisted.trial import unittest
+from twisted.test.proto_helpers import StringTransport, StringIOWithoutClosing
+
+def printError(f):
+    print f
+
+
+class PassportTests(unittest.TestCase):
+
+    def setUp(self):
+        self.result = []
+        self.deferred = Deferred()
+        self.deferred.addCallback(lambda r: self.result.append(r))
+        self.deferred.addErrback(printError)
+
+    def test_nexus(self):
+        """
+        When L{msn.PassportNexus} receives enough information to identify the
+        address of the login server, it fires the L{Deferred} passed to its
+        initializer with that address.
+        """
+        protocol = msn.PassportNexus(self.deferred, 'https://foobar.com/somepage.quux')
+        headers = {
+            'Content-Length' : '0',
+            'Content-Type'   : 'text/html',
+            'PassportURLs'   : 'DARealm=Passport.Net,DALogin=login.myserver.com/,DAReg=reg.myserver.com'
+        }
+        transport = StringTransport()
+        protocol.makeConnection(transport)
+        protocol.dataReceived('HTTP/1.0 200 OK\r\n')
+        for (h, v) in headers.items():
+            protocol.dataReceived('%s: %s\r\n' % (h,v))
+        protocol.dataReceived('\r\n')
+        self.assertEqual(self.result[0], "https://login.myserver.com/")
+
+
+    def _doLoginTest(self, response, headers):
+        protocol = msn.PassportLogin(self.deferred,'foo at foo.com','testpass','https://foo.com/', 'a')
+        protocol.makeConnection(StringTransport())
+        protocol.dataReceived(response)
+        for (h,v) in headers.items(): protocol.dataReceived('%s: %s\r\n' % (h,v))
+        protocol.dataReceived('\r\n')
+
+    def testPassportLoginSuccess(self):
+        headers = {
+            'Content-Length'      : '0',
+            'Content-Type'        : 'text/html',
+            'Authentication-Info' : "Passport1.4 da-status=success,tname=MSPAuth," +
+                                    "tname=MSPProf,tname=MSPSec,from-PP='somekey'," +
+                                    "ru=http://messenger.msn.com"
+        }
+        self._doLoginTest('HTTP/1.1 200 OK\r\n', headers)
+        self.failUnless(self.result[0] == (msn.LOGIN_SUCCESS, 'somekey'))
+
+    def testPassportLoginFailure(self):
+        headers = {
+            'Content-Type'     : 'text/html',
+            'WWW-Authenticate' : 'Passport1.4 da-status=failed,' +
+                                 'srealm=Passport.NET,ts=-3,prompt,cburl=http://host.com,' +
+                                 'cbtxt=the%20error%20message'
+        }
+        self._doLoginTest('HTTP/1.1 401 Unauthorized\r\n', headers)
+        self.failUnless(self.result[0] == (msn.LOGIN_FAILURE, 'the error message'))
+
+    def testPassportLoginRedirect(self):
+        headers = {
+            'Content-Type'        : 'text/html',
+            'Authentication-Info' : 'Passport1.4 da-status=redir',
+            'Location'            : 'https://newlogin.host.com/'
+        }
+        self._doLoginTest('HTTP/1.1 302 Found\r\n', headers)
+        self.failUnless(self.result[0] == (msn.LOGIN_REDIRECT, 'https://newlogin.host.com/', 'a'))
+
+
+if msn is not None:
+    class DummySwitchboardClient(msn.SwitchboardClient):
+        def userTyping(self, message):
+            self.state = 'TYPING'
+
+        def gotSendRequest(self, fileName, fileSize, cookie, message):
+            if fileName == 'foobar.ext' and fileSize == 31337 and cookie == 1234: self.state = 'INVITATION'
+
+
+    class DummyNotificationClient(msn.NotificationClient):
+        def loggedIn(self, userHandle, screenName, verified):
+            if userHandle == 'foo at bar.com' and screenName == 'Test Screen Name' and verified:
+                self.state = 'LOGIN'
+
+        def gotProfile(self, message):
+            self.state = 'PROFILE'
+
+        def gotContactStatus(self, code, userHandle, screenName):
+            if code == msn.STATUS_AWAY and userHandle == "foo at bar.com" and screenName == "Test Screen Name":
+                self.state = 'INITSTATUS'
+
+        def contactStatusChanged(self, code, userHandle, screenName):
+            if code == msn.STATUS_LUNCH and userHandle == "foo at bar.com" and screenName == "Test Name":
+                self.state = 'NEWSTATUS'
+
+        def contactOffline(self, userHandle):
+            if userHandle == "foo at bar.com": self.state = 'OFFLINE'
+
+        def statusChanged(self, code):
+            if code == msn.STATUS_HIDDEN: self.state = 'MYSTATUS'
+
+        def listSynchronized(self, *args):
+            self.state = 'GOTLIST'
+
+        def gotPhoneNumber(self, listVersion, userHandle, phoneType, number):
+            msn.NotificationClient.gotPhoneNumber(self, listVersion, userHandle, phoneType, number)
+            self.state = 'GOTPHONE'
+
+        def userRemovedMe(self, userHandle, listVersion):
+            msn.NotificationClient.userRemovedMe(self, userHandle, listVersion)
+            c = self.factory.contacts.getContact(userHandle)
+            if not c and self.factory.contacts.version == listVersion: self.state = 'USERREMOVEDME'
+
+        def userAddedMe(self, userHandle, screenName, listVersion):
+            msn.NotificationClient.userAddedMe(self, userHandle, screenName, listVersion)
+            c = self.factory.contacts.getContact(userHandle)
+            if c and (c.lists | msn.REVERSE_LIST) and (self.factory.contacts.version == listVersion) and \
+               (screenName == 'Screen Name'):
+                self.state = 'USERADDEDME'
+
+        def gotSwitchboardInvitation(self, sessionID, host, port, key, userHandle, screenName):
+            if sessionID == 1234 and \
+               host == '192.168.1.1' and \
+               port == 1863 and \
+               key == '123.456' and \
+               userHandle == 'foo at foo.com' and \
+               screenName == 'Screen Name':
+                self.state = 'SBINVITED'
+
+
+
+class DispatchTests(unittest.TestCase):
+    """
+    Tests for L{DispatchClient}.
+    """
+    def _versionTest(self, serverVersionResponse):
+        """
+        Test L{DispatchClient} version negotiation.
+        """
+        client = msn.DispatchClient()
+        client.userHandle = "foo"
+
+        transport = StringTransport()
+        client.makeConnection(transport)
+        self.assertEqual(
+            transport.value(), "VER 1 MSNP8 CVR0\r\n")
+        transport.clear()
+
+        client.dataReceived(serverVersionResponse)
+        self.assertEqual(
+            transport.value(),
+            "CVR 2 0x0409 win 4.10 i386 MSNMSGR 5.0.0544 MSMSGS foo\r\n")
+
+
+    def test_version(self):
+        """
+        L{DispatchClient.connectionMade} greets the server with a I{VER}
+        (version) message and then L{NotificationClient.dataReceived}
+        handles the server's I{VER} response by sending a I{CVR} (client
+        version) message.
+        """
+        self._versionTest("VER 1 MSNP8 CVR0\r\n")
+
+
+    def test_versionWithoutCVR0(self):
+        """
+        If the server responds to a I{VER} command without including the
+        I{CVR0} protocol, L{DispatchClient} behaves in the same way as if
+        that protocol were included.
+
+        Starting in August 2008, CVR0 disappeared from the I{VER} response.
+        """
+        self._versionTest("VER 1 MSNP8\r\n")
+
+
+
+class NotificationTests(unittest.TestCase):
+    """ testing the various events in NotificationClient """
+
+    def setUp(self):
+        self.client = DummyNotificationClient()
+        self.client.factory = msn.NotificationFactory()
+        self.client.state = 'START'
+
+
+    def tearDown(self):
+        self.client = None
+
+
+    def _versionTest(self, serverVersionResponse):
+        """
+        Test L{NotificationClient} version negotiation.
+        """
+        self.client.factory.userHandle = "foo"
+
+        transport = StringTransport()
+        self.client.makeConnection(transport)
+        self.assertEqual(
+            transport.value(), "VER 1 MSNP8 CVR0\r\n")
+        transport.clear()
+
+        self.client.dataReceived(serverVersionResponse)
+        self.assertEqual(
+            transport.value(),
+            "CVR 2 0x0409 win 4.10 i386 MSNMSGR 5.0.0544 MSMSGS foo\r\n")
+
+
+    def test_version(self):
+        """
+        L{NotificationClient.connectionMade} greets the server with a I{VER}
+        (version) message and then L{NotificationClient.dataReceived}
+        handles the server's I{VER} response by sending a I{CVR} (client
+        version) message.
+        """
+        self._versionTest("VER 1 MSNP8 CVR0\r\n")
+
+
+    def test_versionWithoutCVR0(self):
+        """
+        If the server responds to a I{VER} command without including the
+        I{CVR0} protocol, L{NotificationClient} behaves in the same way as
+        if that protocol were included.
+
+        Starting in August 2008, CVR0 disappeared from the I{VER} response.
+        """
+        self._versionTest("VER 1 MSNP8\r\n")
+
+
+    def test_challenge(self):
+        """
+        L{NotificationClient} responds to a I{CHL} message by sending a I{QRY}
+        back which included a hash based on the parameters of the I{CHL}.
+        """
+        transport = StringTransport()
+        self.client.makeConnection(transport)
+        transport.clear()
+
+        challenge = "15570131571988941333"
+        self.client.dataReceived('CHL 0 ' + challenge + '\r\n')
+        # md5 of the challenge and a magic string defined by the protocol
+        response = "8f2f5a91b72102cd28355e9fc9000d6e"
+        # Sanity check - the response is what the comment above says it is.
+        self.assertEqual(
+            response, md5(challenge + "Q1P7W2E4J9R8U3S5").hexdigest())
+        self.assertEqual(
+            transport.value(),
+            # 2 is the next transaction identifier.  32 is the length of the
+            # response.
+            "QRY 2 msmsgs at msnmsgr.com 32\r\n" + response)
+
+
+    def testLogin(self):
+        self.client.lineReceived('USR 1 OK foo at bar.com Test%20Screen%20Name 1 0')
+        self.failUnless((self.client.state == 'LOGIN'), msg='Failed to detect successful login')
+
+
+    def test_loginWithoutSSLFailure(self):
+        """
+        L{NotificationClient.loginFailure} is called if the necessary SSL APIs
+        are unavailable.
+        """
+        self.patch(msn, 'ClientContextFactory', None)
+        success = []
+        self.client.loggedIn = lambda *args: success.append(args)
+        failure = []
+        self.client.loginFailure = failure.append
+
+        self.client.lineReceived('USR 6 TWN S opaque-string-goes-here')
+        self.assertEqual(success, [])
+        self.assertEqual(
+            failure,
+            ["Exception while authenticating: "
+             "Connecting to the Passport server requires SSL, but SSL is "
+             "unavailable."])
+
+
+    def testProfile(self):
+        m = 'MSG Hotmail Hotmail 353\r\nMIME-Version: 1.0\r\nContent-Type: text/x-msmsgsprofile; charset=UTF-8\r\n'
+        m += 'LoginTime: 1016941010\r\nEmailEnabled: 1\r\nMemberIdHigh: 40000\r\nMemberIdLow: -600000000\r\nlang_preference: 1033\r\n'
+        m += 'preferredEmail: foo at bar.com\r\ncountry: AU\r\nPostalCode: 90210\r\nGender: M\r\nKid: 0\r\nAge:\r\nsid: 400\r\n'
+        m += 'kv: 2\r\nMSPAuth: 2CACCBCCADMoV8ORoz64BVwmjtksIg!kmR!Rj5tBBqEaW9hc4YnPHSOQ$$\r\n\r\n'
+        map(self.client.lineReceived, m.split('\r\n')[:-1])
+        self.failUnless((self.client.state == 'PROFILE'), msg='Failed to detect initial profile')
+
+    def testStatus(self):
+        t = [('ILN 1 AWY foo at bar.com Test%20Screen%20Name 0', 'INITSTATUS', 'Failed to detect initial status report'),
+             ('NLN LUN foo at bar.com Test%20Name 0', 'NEWSTATUS', 'Failed to detect contact status change'),
+             ('FLN foo at bar.com', 'OFFLINE', 'Failed to detect contact signing off'),
+             ('CHG 1 HDN 0', 'MYSTATUS', 'Failed to detect my status changing')]
+        for i in t:
+            self.client.lineReceived(i[0])
+            self.failUnless((self.client.state == i[1]), msg=i[2])
+
+    def testListSync(self):
+        # currently this test does not take into account the fact
+        # that BPRs sent as part of the SYN reply may not be interpreted
+        # as such if they are for the last LST -- maybe I should
+        # factor this in later.
+        self.client.makeConnection(StringTransport())
+        msn.NotificationClient.loggedIn(self.client, 'foo at foo.com', 'foobar', 1)
+        lines = [
+            "SYN %s 100 1 1" % self.client.currentID,
+            "GTC A",
+            "BLP AL",
+            "LSG 0 Other%20Contacts 0",
+            "LST userHandle at email.com Some%20Name 11 0"
+        ]
+        map(self.client.lineReceived, lines)
+        contacts = self.client.factory.contacts
+        contact = contacts.getContact('userHandle at email.com')
+        self.failUnless(contacts.version == 100, "Invalid contact list version")
+        self.failUnless(contact.screenName == 'Some Name', "Invalid screen-name for user")
+        self.failUnless(contacts.groups == {0 : 'Other Contacts'}, "Did not get proper group list")
+        self.failUnless(contact.groups == [0] and contact.lists == 11, "Invalid contact list/group info")
+        self.failUnless(self.client.state == 'GOTLIST', "Failed to call list sync handler")
+
+    def testAsyncPhoneChange(self):
+        c = msn.MSNContact(userHandle='userHandle at email.com')
+        self.client.factory.contacts = msn.MSNContactList()
+        self.client.factory.contacts.addContact(c)
+        self.client.makeConnection(StringTransport())
+        self.client.lineReceived("BPR 101 userHandle at email.com PHH 123%20456")
+        c = self.client.factory.contacts.getContact('userHandle at email.com')
+        self.failUnless(self.client.state == 'GOTPHONE', "Did not fire phone change callback")
+        self.failUnless(c.homePhone == '123 456', "Did not update the contact's phone number")
+        self.failUnless(self.client.factory.contacts.version == 101, "Did not update list version")
+
+    def testLateBPR(self):
+        """
+        This test makes sure that if a BPR response that was meant
+        to be part of a SYN response (but came after the last LST)
+        is received, the correct contact is updated and all is well
+        """
+        self.client.makeConnection(StringTransport())
+        msn.NotificationClient.loggedIn(self.client, 'foo at foo.com', 'foo', 1)
+        lines = [
+            "SYN %s 100 1 1" % self.client.currentID,
+            "GTC A",
+            "BLP AL",
+            "LSG 0 Other%20Contacts 0",
+            "LST userHandle at email.com Some%20Name 11 0",
+            "BPR PHH 123%20456"
+        ]
+        map(self.client.lineReceived, lines)
+        contact = self.client.factory.contacts.getContact('userHandle at email.com')
+        self.failUnless(contact.homePhone == '123 456', "Did not update contact's phone number")
+
+    def testUserRemovedMe(self):
+        self.client.factory.contacts = msn.MSNContactList()
+        contact = msn.MSNContact(userHandle='foo at foo.com')
+        contact.addToList(msn.REVERSE_LIST)
+        self.client.factory.contacts.addContact(contact)
+        self.client.lineReceived("REM 0 RL 100 foo at foo.com")
+        self.failUnless(self.client.state == 'USERREMOVEDME', "Failed to remove user from reverse list")
+
+    def testUserAddedMe(self):
+        self.client.factory.contacts = msn.MSNContactList()
+        self.client.lineReceived("ADD 0 RL 100 foo at foo.com Screen%20Name")
+        self.failUnless(self.client.state == 'USERADDEDME', "Failed to add user to reverse lise")
+
+    def testAsyncSwitchboardInvitation(self):
+        self.client.lineReceived("RNG 1234 192.168.1.1:1863 CKI 123.456 foo at foo.com Screen%20Name")
+        self.failUnless(self.client.state == "SBINVITED")
+
+    def testCommandFailed(self):
+        """
+        Ensures that error responses from the server fires an errback with
+        MSNCommandFailed.
+        """
+        id, d = self.client._createIDMapping()
+        self.client.lineReceived("201 %s" % id)
+        d = self.assertFailure(d, msn.MSNCommandFailed)
+        def assertErrorCode(exception):
+            self.assertEqual(201, exception.errorCode)
+        return d.addCallback(assertErrorCode)
+
+
+class MessageHandlingTests(unittest.TestCase):
+    """ testing various message handling methods from SwichboardClient """
+
+    def setUp(self):
+        self.client = DummySwitchboardClient()
+        self.client.state = 'START'
+
+    def tearDown(self):
+        self.client = None
+
+    def testClientCapabilitiesCheck(self):
+        m = msn.MSNMessage()
+        m.setHeader('Content-Type', 'text/x-clientcaps')
+        self.assertEqual(self.client.checkMessage(m), 0, 'Failed to detect client capability message')
+
+    def testTypingCheck(self):
+        m = msn.MSNMessage()
+        m.setHeader('Content-Type', 'text/x-msmsgscontrol')
+        m.setHeader('TypingUser', 'foo at bar')
+        self.client.checkMessage(m)
+        self.failUnless((self.client.state == 'TYPING'), msg='Failed to detect typing notification')
+
+    def testFileInvitation(self, lazyClient=False):
+        m = msn.MSNMessage()
+        m.setHeader('Content-Type', 'text/x-msmsgsinvite; charset=UTF-8')
+        m.message += 'Application-Name: File Transfer\r\n'
+        if not lazyClient:
+            m.message += 'Application-GUID: {5D3E02AB-6190-11d3-BBBB-00C04F795683}\r\n'
+        m.message += 'Invitation-Command: Invite\r\n'
+        m.message += 'Invitation-Cookie: 1234\r\n'
+        m.message += 'Application-File: foobar.ext\r\n'
+        m.message += 'Application-FileSize: 31337\r\n\r\n'
+        self.client.checkMessage(m)
+        self.failUnless((self.client.state == 'INVITATION'), msg='Failed to detect file transfer invitation')
+
+    def testFileInvitationMissingGUID(self):
+        return self.testFileInvitation(True)
+
+    def testFileResponse(self):
+        d = Deferred()
+        d.addCallback(self.fileResponse)
+        self.client.cookies['iCookies'][1234] = (d, None)
+        m = msn.MSNMessage()
+        m.setHeader('Content-Type', 'text/x-msmsgsinvite; charset=UTF-8')
+        m.message += 'Invitation-Command: ACCEPT\r\n'
+        m.message += 'Invitation-Cookie: 1234\r\n\r\n'
+        self.client.checkMessage(m)
+        self.failUnless((self.client.state == 'RESPONSE'), msg='Failed to detect file transfer response')
+
+    def testFileInfo(self):
+        d = Deferred()
+        d.addCallback(self.fileInfo)
+        self.client.cookies['external'][1234] = (d, None)
+        m = msn.MSNMessage()
+        m.setHeader('Content-Type', 'text/x-msmsgsinvite; charset=UTF-8')
+        m.message += 'Invitation-Command: ACCEPT\r\n'
+        m.message += 'Invitation-Cookie: 1234\r\n'
+        m.message += 'IP-Address: 192.168.0.1\r\n'
+        m.message += 'Port: 6891\r\n'
+        m.message += 'AuthCookie: 4321\r\n\r\n'
+        self.client.checkMessage(m)
+        self.failUnless((self.client.state == 'INFO'), msg='Failed to detect file transfer info')
+
+    def fileResponse(self, (accept, cookie, info)):
+        if accept and cookie == 1234: self.client.state = 'RESPONSE'
+
+    def fileInfo(self, (accept, ip, port, aCookie, info)):
+        if accept and ip == '192.168.0.1' and port == 6891 and aCookie == 4321: self.client.state = 'INFO'
+
+
+class FileTransferTestCase(unittest.TestCase):
+    """
+    test FileSend against FileReceive
+    """
+
+    def setUp(self):
+        self.input = 'a' * 7000
+        self.output = StringIOWithoutClosing()
+
+
+    def tearDown(self):
+        self.input = None
+        self.output = None
+
+
+    def test_fileTransfer(self):
+        """
+        Test L{FileSend} against L{FileReceive} using a loopback transport.
+        """
+        auth = 1234
+        sender = msn.FileSend(StringIO.StringIO(self.input))
+        sender.auth = auth
+        sender.fileSize = 7000
+        client = msn.FileReceive(auth, "foo at bar.com", self.output)
+        client.fileSize = 7000
+        def check(ignored):
+            self.assertTrue(
+                client.completed and sender.completed,
+                msg="send failed to complete")
+            self.assertEqual(
+                self.input, self.output.getvalue(),
+                msg="saved file does not match original")
+        d = loopback.loopbackAsync(sender, client)
+        d.addCallback(check)
+        return d
+
+if msn is None:
+    for testClass in [DispatchTests, PassportTests, NotificationTests,
+                      MessageHandlingTests, FileTransferTestCase]:
+        testClass.skip = (
+            "MSN requires an HTTP client but none is available, "
+            "skipping tests.")
diff --git a/ThirdParty/Twisted/twisted/words/test/test_oscar.py b/ThirdParty/Twisted/twisted/words/test/test_oscar.py
new file mode 100644
index 0000000..f807ce5
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_oscar.py
@@ -0,0 +1,24 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.words.protocols.oscar}.
+"""
+
+from twisted.trial.unittest import TestCase
+
+from twisted.words.protocols.oscar import encryptPasswordMD5
+
+
+class PasswordTests(TestCase):
+    """
+    Tests for L{encryptPasswordMD5}.
+    """
+    def test_encryptPasswordMD5(self):
+        """
+        L{encryptPasswordMD5} hashes the given password and key and returns a
+        string suitable to use to authenticate against an OSCAR server.
+        """
+        self.assertEqual(
+            encryptPasswordMD5('foo', 'bar').encode('hex'),
+            'd73475c370a7b18c6c20386bcf1339f2')
diff --git a/ThirdParty/Twisted/twisted/words/test/test_service.py b/ThirdParty/Twisted/twisted/words/test/test_service.py
new file mode 100644
index 0000000..12720fe
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_service.py
@@ -0,0 +1,995 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.words.service}.
+"""
+
+import time
+
+from twisted.trial import unittest
+from twisted.test import proto_helpers
+
+from twisted.cred import portal, credentials, checkers
+from twisted.words import ewords, service
+from twisted.words.protocols import irc
+from twisted.spread import pb
+from twisted.internet.defer import Deferred, DeferredList, maybeDeferred, succeed
+from twisted.internet.defer import deferredGenerator as dG, waitForDeferred as wFD
+from twisted.internet import address, reactor
+
+class RealmTestCase(unittest.TestCase):
+    def _entityCreationTest(self, kind):
+        # Kind is "user" or "group"
+        realm = service.InMemoryWordsRealm("realmname")
+
+        name = u'test' + kind.lower()
+        create = getattr(realm, 'create' + kind.title())
+        get = getattr(realm, 'get' + kind.title())
+        flag = 'create' + kind.title() + 'OnRequest'
+        dupExc = getattr(ewords, 'Duplicate' + kind.title())
+        noSuchExc = getattr(ewords, 'NoSuch' + kind.title())
+
+        # Creating should succeed
+        d = wFD(create(name))
+        yield d
+        p = d.getResult()
+        self.assertEqual(p.name, name)
+
+        # Creating the same user again should not
+        d = wFD(create(name))
+        yield d
+        self.assertRaises(dupExc, d.getResult)
+
+        # Getting a non-existent user should succeed if createUserOnRequest is True
+        setattr(realm, flag, True)
+        d = wFD(get(u"new" + kind.lower()))
+        yield d
+        p = d.getResult()
+        self.assertEqual(p.name, "new" + kind.lower())
+
+        # Getting that user again should return the same object
+        d = wFD(get(u"new" + kind.lower()))
+        yield d
+        newp = d.getResult()
+        self.assertIdentical(p, newp)
+
+        # Getting a non-existent user should fail if createUserOnRequest is False
+        setattr(realm, flag, False)
+        d = wFD(get(u"another" + kind.lower()))
+        yield d
+        self.assertRaises(noSuchExc, d.getResult)
+    _entityCreationTest = dG(_entityCreationTest)
+
+
+    def testUserCreation(self):
+        return self._entityCreationTest("User")
+
+
+    def testGroupCreation(self):
+        return self._entityCreationTest("Group")
+
+
+    def testUserRetrieval(self):
+        realm = service.InMemoryWordsRealm("realmname")
+
+        # Make a user to play around with
+        d = wFD(realm.createUser(u"testuser"))
+        yield d
+        user = d.getResult()
+
+        # Make sure getting the user returns the same object
+        d = wFD(realm.getUser(u"testuser"))
+        yield d
+        retrieved = d.getResult()
+        self.assertIdentical(user, retrieved)
+
+        # Make sure looking up the user also returns the same object
+        d = wFD(realm.lookupUser(u"testuser"))
+        yield d
+        lookedUp = d.getResult()
+        self.assertIdentical(retrieved, lookedUp)
+
+        # Make sure looking up a user who does not exist fails
+        d = wFD(realm.lookupUser(u"nosuchuser"))
+        yield d
+        self.assertRaises(ewords.NoSuchUser, d.getResult)
+    testUserRetrieval = dG(testUserRetrieval)
+
+
+    def testUserAddition(self):
+        realm = service.InMemoryWordsRealm("realmname")
+
+        # Create and manually add a user to the realm
+        p = service.User("testuser")
+        d = wFD(realm.addUser(p))
+        yield d
+        user = d.getResult()
+        self.assertIdentical(p, user)
+
+        # Make sure getting that user returns the same object
+        d = wFD(realm.getUser(u"testuser"))
+        yield d
+        retrieved = d.getResult()
+        self.assertIdentical(user, retrieved)
+
+        # Make sure looking up that user returns the same object
+        d = wFD(realm.lookupUser(u"testuser"))
+        yield d
+        lookedUp = d.getResult()
+        self.assertIdentical(retrieved, lookedUp)
+    testUserAddition = dG(testUserAddition)
+
+
+    def testGroupRetrieval(self):
+        realm = service.InMemoryWordsRealm("realmname")
+
+        d = wFD(realm.createGroup(u"testgroup"))
+        yield d
+        group = d.getResult()
+
+        d = wFD(realm.getGroup(u"testgroup"))
+        yield d
+        retrieved = d.getResult()
+
+        self.assertIdentical(group, retrieved)
+
+        d = wFD(realm.getGroup(u"nosuchgroup"))
+        yield d
+        self.assertRaises(ewords.NoSuchGroup, d.getResult)
+    testGroupRetrieval = dG(testGroupRetrieval)
+
+
+    def testGroupAddition(self):
+        realm = service.InMemoryWordsRealm("realmname")
+
+        p = service.Group("testgroup")
+        d = wFD(realm.addGroup(p))
+        yield d
+        d.getResult()
+
+        d = wFD(realm.getGroup(u"testGroup"))
+        yield d
+        group = d.getResult()
+
+        self.assertIdentical(p, group)
+    testGroupAddition = dG(testGroupAddition)
+
+
+    def testGroupUsernameCollision(self):
+        """
+        Try creating a group with the same name as an existing user and
+        assert that it succeeds, since users and groups should not be in the
+        same namespace and collisions should be impossible.
+        """
+        realm = service.InMemoryWordsRealm("realmname")
+
+        d = wFD(realm.createUser(u"test"))
+        yield d
+        user = d.getResult()
+
+        d = wFD(realm.createGroup(u"test"))
+        yield d
+        group = d.getResult()
+    testGroupUsernameCollision = dG(testGroupUsernameCollision)
+
+
+    def testEnumeration(self):
+        realm = service.InMemoryWordsRealm("realmname")
+        d = wFD(realm.createGroup(u"groupone"))
+        yield d
+        d.getResult()
+
+        d = wFD(realm.createGroup(u"grouptwo"))
+        yield d
+        d.getResult()
+
+        groups = wFD(realm.itergroups())
+        yield groups
+        groups = groups.getResult()
+
+        n = [g.name for g in groups]
+        n.sort()
+        self.assertEqual(n, ["groupone", "grouptwo"])
+    testEnumeration = dG(testEnumeration)
+
+
+class TestGroup(object):
+    def __init__(self, name, size, topic):
+        self.name = name
+        self.size = lambda: size
+        self.meta = {'topic': topic}
+
+
+class TestUser(object):
+    def __init__(self, name, groups, signOn, lastMessage):
+        self.name = name
+        self.itergroups = lambda: iter([TestGroup(g, 3, 'Hello') for g in groups])
+        self.signOn = signOn
+        self.lastMessage = lastMessage
+
+
+class TestPortal(object):
+    def __init__(self):
+        self.logins = []
+
+
+    def login(self, credentials, mind, *interfaces):
+        d = Deferred()
+        self.logins.append((credentials, mind, interfaces, d))
+        return d
+
+
+class TestCaseUserAgg(object):
+    def __init__(self, user, realm, factory, address=address.IPv4Address('TCP', '127.0.0.1', 54321)):
+        self.user = user
+        self.transport = proto_helpers.StringTransportWithDisconnection()
+        self.protocol = factory.buildProtocol(address)
+        self.transport.protocol = self.protocol
+        self.user.mind = self.protocol
+        self.protocol.makeConnection(self.transport)
+
+
+    def write(self, stuff):
+        if isinstance(stuff, unicode):
+            stuff = stuff.encode('utf-8')
+        self.protocol.dataReceived(stuff)
+
+
+class IRCProtocolTestCase(unittest.TestCase):
+    STATIC_USERS = [
+        u'useruser', u'otheruser', u'someguy', u'firstuser', u'username',
+        u'userone', u'usertwo', u'userthree', u'someuser']
+
+
+    def setUp(self):
+        self.realm = service.InMemoryWordsRealm("realmname")
+        self.checker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
+        self.portal = portal.Portal(self.realm, [self.checker])
+        self.factory = service.IRCFactory(self.realm, self.portal)
+
+        c = []
+        for nick in self.STATIC_USERS:
+            c.append(self.realm.createUser(nick))
+            self.checker.addUser(nick.encode('ascii'), nick + "_password")
+        return DeferredList(c)
+
+
+    def _assertGreeting(self, user):
+        """
+        The user has been greeted with the four messages that are (usually)
+        considered to start an IRC session.
+        
+        Asserts that the required responses were received.
+        """
+        # Make sure we get 1-4 at least
+        response = self._response(user)
+        expected = [irc.RPL_WELCOME, irc.RPL_YOURHOST, irc.RPL_CREATED,
+                    irc.RPL_MYINFO]
+        for (prefix, command, args) in response:
+            if command in expected:
+                expected.remove(command)
+        self.failIf(expected, "Missing responses for %r" % (expected,))
+
+
+    def _login(self, user, nick, password=None):
+        if password is None:
+            password = nick + "_password"
+        user.write('PASS %s\r\n' % (password,))
+        user.write('NICK %s extrainfo\r\n' % (nick,))
+
+
+    def _loggedInUser(self, name):
+        d = wFD(self.realm.lookupUser(name))
+        yield d
+        user = d.getResult()
+        agg = TestCaseUserAgg(user, self.realm, self.factory)
+        self._login(agg, name)
+        yield agg
+    _loggedInUser = dG(_loggedInUser)
+
+
+    def _response(self, user, messageType=None):
+        """
+        Extracts the user's response, and returns a list of parsed lines.
+        If messageType is defined, only messages of that type will be returned.
+        """
+        response = user.transport.value().splitlines()
+        user.transport.clear()
+        result = []
+        for message in map(irc.parsemsg, response):
+            if messageType is None or message[1] == messageType:
+                result.append(message)
+        return result
+
+
+    def testPASSLogin(self):
+        user = wFD(self._loggedInUser(u'firstuser'))
+        yield user
+        user = user.getResult()
+        self._assertGreeting(user)
+    testPASSLogin = dG(testPASSLogin)
+
+
+    def test_nickServLogin(self):
+        """
+        Sending NICK without PASS will prompt the user for their password.
+        When the user sends their password to NickServ, it will respond with a
+        Greeting.
+        """
+        firstuser = wFD(self.realm.lookupUser(u'firstuser'))
+        yield firstuser
+        firstuser = firstuser.getResult()
+
+        user = TestCaseUserAgg(firstuser, self.realm, self.factory)
+        user.write('NICK firstuser extrainfo\r\n')
+        response = self._response(user, 'PRIVMSG')
+        self.assertEqual(len(response), 1)
+        self.assertEqual(response[0][0], service.NICKSERV)
+        self.assertEqual(response[0][1], 'PRIVMSG')
+        self.assertEqual(response[0][2], ['firstuser', 'Password?'])
+        user.transport.clear()
+
+        user.write('PRIVMSG nickserv firstuser_password\r\n')
+        self._assertGreeting(user)
+    test_nickServLogin = dG(test_nickServLogin)
+
+
+    def testFailedLogin(self):
+        firstuser = wFD(self.realm.lookupUser(u'firstuser'))
+        yield firstuser
+        firstuser = firstuser.getResult()
+
+        user = TestCaseUserAgg(firstuser, self.realm, self.factory)
+        self._login(user, "firstuser", "wrongpass")
+        response = self._response(user, "PRIVMSG")
+        self.assertEqual(len(response), 1)
+        self.assertEqual(response[0][2], ['firstuser', 'Login failed.  Goodbye.'])
+    testFailedLogin = dG(testFailedLogin)
+
+
+    def testLogout(self):
+        logout = []
+        firstuser = wFD(self.realm.lookupUser(u'firstuser'))
+        yield firstuser
+        firstuser = firstuser.getResult()
+
+        user = TestCaseUserAgg(firstuser, self.realm, self.factory)
+        self._login(user, "firstuser")
+        user.protocol.logout = lambda: logout.append(True)
+        user.write('QUIT\r\n')
+        self.assertEqual(logout, [True])
+    testLogout = dG(testLogout)
+
+
+    def testJoin(self):
+        firstuser = wFD(self.realm.lookupUser(u'firstuser'))
+        yield firstuser
+        firstuser = firstuser.getResult()
+
+        somechannel = wFD(self.realm.createGroup(u"somechannel"))
+        yield somechannel
+        somechannel = somechannel.getResult()
+
+        somechannel.meta['topic'] = 'some random topic'
+
+        # Bring in one user, make sure he gets into the channel sanely
+        user = TestCaseUserAgg(firstuser, self.realm, self.factory)
+        self._login(user, "firstuser")
+        user.transport.clear()
+        user.write('JOIN #somechannel\r\n')
+
+        response = self._response(user)
+        self.assertEqual(len(response), 5)
+
+        # Join message
+        self.assertEqual(response[0][0], 'firstuser!firstuser at realmname')
+        self.assertEqual(response[0][1], 'JOIN')
+        self.assertEqual(response[0][2], ['#somechannel'])
+
+        # User list
+        self.assertEqual(response[1][1], '353')
+        self.assertEqual(response[2][1], '366')
+
+        # Topic (or lack thereof, as the case may be)
+        self.assertEqual(response[3][1], '332')
+        self.assertEqual(response[4][1], '333')
+
+
+        # Hook up another client!  It is a CHAT SYSTEM!!!!!!!
+        other = wFD(self._loggedInUser(u'otheruser'))
+        yield other
+        other = other.getResult()
+
+        other.transport.clear()
+        user.transport.clear()
+        other.write('JOIN #somechannel\r\n')
+
+        # At this point, both users should be in the channel
+        response = self._response(other)
+
+        event = self._response(user)
+        self.assertEqual(len(event), 1)
+        self.assertEqual(event[0][0], 'otheruser!otheruser at realmname')
+        self.assertEqual(event[0][1], 'JOIN')
+        self.assertEqual(event[0][2], ['#somechannel'])
+
+        self.assertEqual(response[1][0], 'realmname')
+        self.assertEqual(response[1][1], '353')
+        self.assertIn(response[1][2], [
+                      ['otheruser', '=', '#somechannel', 'firstuser otheruser'],
+                      ['otheruser', '=', '#somechannel', 'otheruser firstuser'],
+                      ])
+    testJoin = dG(testJoin)
+
+
+    def test_joinTopicless(self):
+        """
+        When a user joins a group without a topic, no topic information is
+        sent to that user.
+        """
+        firstuser = wFD(self.realm.lookupUser(u'firstuser'))
+        yield firstuser
+        firstuser = firstuser.getResult()
+
+        somechannel = wFD(self.realm.createGroup(u"somechannel"))
+        yield somechannel
+        somechannel = somechannel.getResult()
+
+        # Bring in one user, make sure he gets into the channel sanely
+        user = TestCaseUserAgg(firstuser, self.realm, self.factory)
+        self._login(user, "firstuser")
+        user.transport.clear()
+        user.write('JOIN #somechannel\r\n')
+
+        response = self._response(user)
+        responseCodes = [r[1] for r in response]
+        self.assertNotIn('332', responseCodes)
+        self.assertNotIn('333', responseCodes)
+    test_joinTopicless = dG(test_joinTopicless)
+
+
+    def testLeave(self):
+        user = wFD(self._loggedInUser(u'useruser'))
+        yield user
+        user = user.getResult()
+
+        somechannel = wFD(self.realm.createGroup(u"somechannel"))
+        yield somechannel
+        somechannel = somechannel.getResult()
+
+        user.write('JOIN #somechannel\r\n')
+        user.transport.clear()
+
+        other = wFD(self._loggedInUser(u'otheruser'))
+        yield other
+        other = other.getResult()
+
+        other.write('JOIN #somechannel\r\n')
+
+        user.transport.clear()
+        other.transport.clear()
+
+        user.write('PART #somechannel\r\n')
+
+        response = self._response(user)
+        event = self._response(other)
+
+        self.assertEqual(len(response), 1)
+        self.assertEqual(response[0][0], 'useruser!useruser at realmname')
+        self.assertEqual(response[0][1], 'PART')
+        self.assertEqual(response[0][2], ['#somechannel', 'leaving'])
+        self.assertEqual(response, event)
+
+        # Now again, with a part message
+        user.write('JOIN #somechannel\r\n')
+
+        user.transport.clear()
+        other.transport.clear()
+
+        user.write('PART #somechannel :goodbye stupidheads\r\n')
+
+        response = self._response(user)
+        event = self._response(other)
+
+        self.assertEqual(len(response), 1)
+        self.assertEqual(response[0][0], 'useruser!useruser at realmname')
+        self.assertEqual(response[0][1], 'PART')
+        self.assertEqual(response[0][2], ['#somechannel', 'goodbye stupidheads'])
+        self.assertEqual(response, event)
+    testLeave = dG(testLeave)
+
+
+    def testGetTopic(self):
+        user = wFD(self._loggedInUser(u'useruser'))
+        yield user
+        user = user.getResult()
+
+        group = service.Group("somechannel")
+        group.meta["topic"] = "This is a test topic."
+        group.meta["topic_author"] = "some_fellow"
+        group.meta["topic_date"] = 77777777
+
+        add = wFD(self.realm.addGroup(group))
+        yield add
+        add.getResult()
+
+        user.transport.clear()
+        user.write("JOIN #somechannel\r\n")
+
+        response = self._response(user)
+
+        self.assertEqual(response[3][0], 'realmname')
+        self.assertEqual(response[3][1], '332')
+
+        # XXX Sigh.  irc.parsemsg() is not as correct as one might hope.
+        self.assertEqual(response[3][2], ['useruser', '#somechannel', 'This is a test topic.'])
+        self.assertEqual(response[4][1], '333')
+        self.assertEqual(response[4][2], ['useruser', '#somechannel', 'some_fellow', '77777777'])
+
+        user.transport.clear()
+
+        user.write('TOPIC #somechannel\r\n')
+
+        response = self._response(user)
+
+        self.assertEqual(response[0][1], '332')
+        self.assertEqual(response[0][2], ['useruser', '#somechannel', 'This is a test topic.'])
+        self.assertEqual(response[1][1], '333')
+        self.assertEqual(response[1][2], ['useruser', '#somechannel', 'some_fellow', '77777777'])
+    testGetTopic = dG(testGetTopic)
+
+
+    def testSetTopic(self):
+        user = wFD(self._loggedInUser(u'useruser'))
+        yield user
+        user = user.getResult()
+
+        add = wFD(self.realm.createGroup(u"somechannel"))
+        yield add
+        somechannel = add.getResult()
+
+        user.write("JOIN #somechannel\r\n")
+
+        other = wFD(self._loggedInUser(u'otheruser'))
+        yield other
+        other = other.getResult()
+
+        other.write("JOIN #somechannel\r\n")
+
+        user.transport.clear()
+        other.transport.clear()
+
+        other.write('TOPIC #somechannel :This is the new topic.\r\n')
+
+        response = self._response(other)
+        event = self._response(user)
+
+        self.assertEqual(response, event)
+
+        self.assertEqual(response[0][0], 'otheruser!otheruser at realmname')
+        self.assertEqual(response[0][1], 'TOPIC')
+        self.assertEqual(response[0][2], ['#somechannel', 'This is the new topic.'])
+
+        other.transport.clear()
+
+        somechannel.meta['topic_date'] = 12345
+        other.write('TOPIC #somechannel\r\n')
+
+        response = self._response(other)
+        self.assertEqual(response[0][1], '332')
+        self.assertEqual(response[0][2], ['otheruser', '#somechannel', 'This is the new topic.'])
+        self.assertEqual(response[1][1], '333')
+        self.assertEqual(response[1][2], ['otheruser', '#somechannel', 'otheruser', '12345'])
+
+        other.transport.clear()
+        other.write('TOPIC #asdlkjasd\r\n')
+
+        response = self._response(other)
+        self.assertEqual(response[0][1], '403')
+    testSetTopic = dG(testSetTopic)
+
+
+    def testGroupMessage(self):
+        user = wFD(self._loggedInUser(u'useruser'))
+        yield user
+        user = user.getResult()
+
+        add = wFD(self.realm.createGroup(u"somechannel"))
+        yield add
+        somechannel = add.getResult()
+
+        user.write("JOIN #somechannel\r\n")
+
+        other = wFD(self._loggedInUser(u'otheruser'))
+        yield other
+        other = other.getResult()
+
+        other.write("JOIN #somechannel\r\n")
+
+        user.transport.clear()
+        other.transport.clear()
+
+        user.write('PRIVMSG #somechannel :Hello, world.\r\n')
+
+        response = self._response(user)
+        event = self._response(other)
+
+        self.failIf(response)
+        self.assertEqual(len(event), 1)
+        self.assertEqual(event[0][0], 'useruser!useruser at realmname')
+        self.assertEqual(event[0][1], 'PRIVMSG', -1)
+        self.assertEqual(event[0][2], ['#somechannel', 'Hello, world.'])
+    testGroupMessage = dG(testGroupMessage)
+
+
+    def testPrivateMessage(self):
+        user = wFD(self._loggedInUser(u'useruser'))
+        yield user
+        user = user.getResult()
+
+        other = wFD(self._loggedInUser(u'otheruser'))
+        yield other
+        other = other.getResult()
+
+        user.transport.clear()
+        other.transport.clear()
+
+        user.write('PRIVMSG otheruser :Hello, monkey.\r\n')
+
+        response = self._response(user)
+        event = self._response(other)
+
+        self.failIf(response)
+        self.assertEqual(len(event), 1)
+        self.assertEqual(event[0][0], 'useruser!useruser at realmname')
+        self.assertEqual(event[0][1], 'PRIVMSG')
+        self.assertEqual(event[0][2], ['otheruser', 'Hello, monkey.'])
+
+        user.write('PRIVMSG nousernamedthis :Hello, monkey.\r\n')
+
+        response = self._response(user)
+
+        self.assertEqual(len(response), 1)
+        self.assertEqual(response[0][0], 'realmname')
+        self.assertEqual(response[0][1], '401')
+        self.assertEqual(response[0][2], ['useruser', 'nousernamedthis', 'No such nick/channel.'])
+    testPrivateMessage = dG(testPrivateMessage)
+
+
+    def testOper(self):
+        user = wFD(self._loggedInUser(u'useruser'))
+        yield user
+        user = user.getResult()
+
+        user.transport.clear()
+        user.write('OPER user pass\r\n')
+        response = self._response(user)
+
+        self.assertEqual(len(response), 1)
+        self.assertEqual(response[0][1], '491')
+    testOper = dG(testOper)
+
+
+    def testGetUserMode(self):
+        user = wFD(self._loggedInUser(u'useruser'))
+        yield user
+        user = user.getResult()
+
+        user.transport.clear()
+        user.write('MODE useruser\r\n')
+
+        response = self._response(user)
+        self.assertEqual(len(response), 1)
+        self.assertEqual(response[0][0], 'realmname')
+        self.assertEqual(response[0][1], '221')
+        self.assertEqual(response[0][2], ['useruser', '+'])
+    testGetUserMode = dG(testGetUserMode)
+
+
+    def testSetUserMode(self):
+        user = wFD(self._loggedInUser(u'useruser'))
+        yield user
+        user = user.getResult()
+
+        user.transport.clear()
+        user.write('MODE useruser +abcd\r\n')
+
+        response = self._response(user)
+        self.assertEqual(len(response), 1)
+        self.assertEqual(response[0][1], '472')
+    testSetUserMode = dG(testSetUserMode)
+
+
+    def testGetGroupMode(self):
+        user = wFD(self._loggedInUser(u'useruser'))
+        yield user
+        user = user.getResult()
+
+        add = wFD(self.realm.createGroup(u"somechannel"))
+        yield add
+        somechannel = add.getResult()
+
+        user.write('JOIN #somechannel\r\n')
+
+        user.transport.clear()
+        user.write('MODE #somechannel\r\n')
+
+        response = self._response(user)
+        self.assertEqual(len(response), 1)
+        self.assertEqual(response[0][1], '324')
+    testGetGroupMode = dG(testGetGroupMode)
+
+
+    def testSetGroupMode(self):
+        user = wFD(self._loggedInUser(u'useruser'))
+        yield user
+        user = user.getResult()
+
+        group = wFD(self.realm.createGroup(u"groupname"))
+        yield group
+        group = group.getResult()
+
+        user.write('JOIN #groupname\r\n')
+
+        user.transport.clear()
+        user.write('MODE #groupname +abcd\r\n')
+
+        response = self._response(user)
+        self.assertEqual(len(response), 1)
+        self.assertEqual(response[0][1], '472')
+    testSetGroupMode = dG(testSetGroupMode)
+
+
+    def testWho(self):
+        group = service.Group('groupname')
+        add = wFD(self.realm.addGroup(group))
+        yield add
+        add.getResult()
+
+        users = []
+        for nick in u'userone', u'usertwo', u'userthree':
+            u = wFD(self._loggedInUser(nick))
+            yield u
+            u = u.getResult()
+            users.append(u)
+            users[-1].write('JOIN #groupname\r\n')
+        for user in users:
+            user.transport.clear()
+
+        users[0].write('WHO #groupname\r\n')
+
+        r = self._response(users[0])
+        self.failIf(self._response(users[1]))
+        self.failIf(self._response(users[2]))
+
+        wantusers = ['userone', 'usertwo', 'userthree']
+        for (prefix, code, stuff) in r[:-1]:
+            self.assertEqual(prefix, 'realmname')
+            self.assertEqual(code, '352')
+
+            (myname, group, theirname, theirhost, theirserver, theirnick, flag, extra) = stuff
+            self.assertEqual(myname, 'userone')
+            self.assertEqual(group, '#groupname')
+            self.failUnless(theirname in wantusers)
+            self.assertEqual(theirhost, 'realmname')
+            self.assertEqual(theirserver, 'realmname')
+            wantusers.remove(theirnick)
+            self.assertEqual(flag, 'H')
+            self.assertEqual(extra, '0 ' + theirnick)
+        self.failIf(wantusers)
+
+        prefix, code, stuff = r[-1]
+        self.assertEqual(prefix, 'realmname')
+        self.assertEqual(code, '315')
+        myname, channel, extra = stuff
+        self.assertEqual(myname, 'userone')
+        self.assertEqual(channel, '#groupname')
+        self.assertEqual(extra, 'End of /WHO list.')
+    testWho = dG(testWho)
+
+
+    def testList(self):
+        user = wFD(self._loggedInUser(u"someuser"))
+        yield user
+        user = user.getResult()
+        user.transport.clear()
+
+        somegroup = wFD(self.realm.createGroup(u"somegroup"))
+        yield somegroup
+        somegroup = somegroup.getResult()
+        somegroup.size = lambda: succeed(17)
+        somegroup.meta['topic'] = 'this is the topic woo'
+
+        # Test one group
+        user.write('LIST #somegroup\r\n')
+
+        r = self._response(user)
+        self.assertEqual(len(r), 2)
+        resp, end = r
+
+        self.assertEqual(resp[0], 'realmname')
+        self.assertEqual(resp[1], '322')
+        self.assertEqual(resp[2][0], 'someuser')
+        self.assertEqual(resp[2][1], 'somegroup')
+        self.assertEqual(resp[2][2], '17')
+        self.assertEqual(resp[2][3], 'this is the topic woo')
+
+        self.assertEqual(end[0], 'realmname')
+        self.assertEqual(end[1], '323')
+        self.assertEqual(end[2][0], 'someuser')
+        self.assertEqual(end[2][1], 'End of /LIST')
+
+        user.transport.clear()
+        # Test all groups
+
+        user.write('LIST\r\n')
+        r = self._response(user)
+        self.assertEqual(len(r), 2)
+
+        fg1, end = r
+
+        self.assertEqual(fg1[1], '322')
+        self.assertEqual(fg1[2][1], 'somegroup')
+        self.assertEqual(fg1[2][2], '17')
+        self.assertEqual(fg1[2][3], 'this is the topic woo')
+
+        self.assertEqual(end[1], '323')
+    testList = dG(testList)
+
+
+    def testWhois(self):
+        user = wFD(self._loggedInUser(u'someguy'))
+        yield user
+        user = user.getResult()
+
+        otherguy = service.User("otherguy")
+        otherguy.itergroups = lambda: iter([
+            service.Group('groupA'),
+            service.Group('groupB')])
+        otherguy.signOn = 10
+        otherguy.lastMessage = time.time() - 15
+
+        add = wFD(self.realm.addUser(otherguy))
+        yield add
+        add.getResult()
+
+        user.transport.clear()
+        user.write('WHOIS otherguy\r\n')
+        r = self._response(user)
+
+        self.assertEqual(len(r), 5)
+        wuser, wserver, idle, channels, end = r
+
+        self.assertEqual(wuser[0], 'realmname')
+        self.assertEqual(wuser[1], '311')
+        self.assertEqual(wuser[2][0], 'someguy')
+        self.assertEqual(wuser[2][1], 'otherguy')
+        self.assertEqual(wuser[2][2], 'otherguy')
+        self.assertEqual(wuser[2][3], 'realmname')
+        self.assertEqual(wuser[2][4], '*')
+        self.assertEqual(wuser[2][5], 'otherguy')
+
+        self.assertEqual(wserver[0], 'realmname')
+        self.assertEqual(wserver[1], '312')
+        self.assertEqual(wserver[2][0], 'someguy')
+        self.assertEqual(wserver[2][1], 'otherguy')
+        self.assertEqual(wserver[2][2], 'realmname')
+        self.assertEqual(wserver[2][3], 'Hi mom!')
+
+        self.assertEqual(idle[0], 'realmname')
+        self.assertEqual(idle[1], '317')
+        self.assertEqual(idle[2][0], 'someguy')
+        self.assertEqual(idle[2][1], 'otherguy')
+        self.assertEqual(idle[2][2], '15')
+        self.assertEqual(idle[2][3], '10')
+        self.assertEqual(idle[2][4], "seconds idle, signon time")
+
+        self.assertEqual(channels[0], 'realmname')
+        self.assertEqual(channels[1], '319')
+        self.assertEqual(channels[2][0], 'someguy')
+        self.assertEqual(channels[2][1], 'otherguy')
+        self.assertEqual(channels[2][2], '#groupA #groupB')
+
+        self.assertEqual(end[0], 'realmname')
+        self.assertEqual(end[1], '318')
+        self.assertEqual(end[2][0], 'someguy')
+        self.assertEqual(end[2][1], 'otherguy')
+        self.assertEqual(end[2][2], 'End of WHOIS list.')
+    testWhois = dG(testWhois)
+
+
+class TestMind(service.PBMind):
+    def __init__(self, *a, **kw):
+        self.joins = []
+        self.parts = []
+        self.messages = []
+        self.meta = []
+
+    def remote_userJoined(self, user, group):
+        self.joins.append((user, group))
+
+
+    def remote_userLeft(self, user, group, reason):
+        self.parts.append((user, group, reason))
+
+
+    def remote_receive(self, sender, recipient, message):
+        self.messages.append((sender, recipient, message))
+
+
+    def remote_groupMetaUpdate(self, group, meta):
+        self.meta.append((group, meta))
+pb.setUnjellyableForClass(TestMind, service.PBMindReference)
+
+
+class PBProtocolTestCase(unittest.TestCase):
+    def setUp(self):
+        self.realm = service.InMemoryWordsRealm("realmname")
+        self.checker = checkers.InMemoryUsernamePasswordDatabaseDontUse()
+        self.portal = portal.Portal(
+            self.realm, [self.checker])
+        self.serverFactory = pb.PBServerFactory(self.portal)
+        self.serverFactory.protocol = self._protocolFactory
+        self.serverFactory.unsafeTracebacks = True
+        self.clientFactory = pb.PBClientFactory()
+        self.clientFactory.unsafeTracebacks = True
+        self.serverPort = reactor.listenTCP(0, self.serverFactory)
+        self.clientConn = reactor.connectTCP(
+            '127.0.0.1',
+            self.serverPort.getHost().port,
+            self.clientFactory)
+
+
+    def _protocolFactory(self, *args, **kw):
+        self._serverProtocol = pb.Broker(0)
+        return self._serverProtocol
+
+
+    def tearDown(self):
+        d3 = Deferred()
+        self._serverProtocol.notifyOnDisconnect(lambda: d3.callback(None))
+        return DeferredList([
+            maybeDeferred(self.serverPort.stopListening),
+            maybeDeferred(self.clientConn.disconnect), d3])
+
+
+    def _loggedInAvatar(self, name, password, mind):
+        creds = credentials.UsernamePassword(name, password)
+        self.checker.addUser(name.encode('ascii'), password)
+        d = self.realm.createUser(name)
+        d.addCallback(lambda ign: self.clientFactory.login(creds, mind))
+        return d
+
+
+    def testGroups(self):
+        mindone = TestMind()
+        one = wFD(self._loggedInAvatar(u"one", "p1", mindone))
+        yield one
+        one = one.getResult()
+
+        mindtwo = TestMind()
+        two = wFD(self._loggedInAvatar(u"two", "p2", mindtwo))
+        yield two
+        two = two.getResult()
+
+        add = wFD(self.realm.createGroup(u"foobar"))
+        yield add
+        add.getResult()
+
+        groupone = wFD(one.join(u"foobar"))
+        yield groupone
+        groupone = groupone.getResult()
+
+        grouptwo = wFD(two.join(u"foobar"))
+        yield grouptwo
+        grouptwo = grouptwo.getResult()
+
+        msg = wFD(groupone.send({"text": "hello, monkeys"}))
+        yield msg
+        msg = msg.getResult()
+
+        leave = wFD(groupone.leave())
+        yield leave
+        leave = leave.getResult()
+    testGroups = dG(testGroups)
diff --git a/ThirdParty/Twisted/twisted/words/test/test_tap.py b/ThirdParty/Twisted/twisted/words/test/test_tap.py
new file mode 100644
index 0000000..099c104
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_tap.py
@@ -0,0 +1,78 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.cred import credentials, error
+from twisted.words import tap
+from twisted.trial import unittest
+
+
+
+class WordsTap(unittest.TestCase):
+    """
+    Ensures that the twisted.words.tap API works.
+    """
+
+    PASSWD_TEXT = "admin:admin\njoe:foo\n"
+    admin = credentials.UsernamePassword('admin', 'admin')
+    joeWrong = credentials.UsernamePassword('joe', 'bar')
+
+
+    def setUp(self):
+        """
+        Create a file with two users.
+        """
+        self.filename = self.mktemp()
+        self.file = open(self.filename, 'w')
+        self.file.write(self.PASSWD_TEXT)
+        self.file.flush()
+
+
+    def tearDown(self):
+        """
+        Close the dummy user database.
+        """
+        self.file.close()
+
+
+    def test_hostname(self):
+        """
+        Tests that the --hostname parameter gets passed to Options.
+        """
+        opt = tap.Options()
+        opt.parseOptions(['--hostname', 'myhost'])
+        self.assertEqual(opt['hostname'], 'myhost')
+
+
+    def test_passwd(self):
+        """
+        Tests the --passwd command for backwards-compatibility.
+        """
+        opt = tap.Options()
+        opt.parseOptions(['--passwd', self.file.name])
+        self._loginTest(opt)
+
+
+    def test_auth(self):
+        """
+        Tests that the --auth command generates a checker.
+        """
+        opt = tap.Options()
+        opt.parseOptions(['--auth', 'file:'+self.file.name])
+        self._loginTest(opt)
+
+
+    def _loginTest(self, opt):
+        """
+        This method executes both positive and negative authentication
+        tests against whatever credentials checker has been stored in
+        the Options class.
+
+        @param opt: An instance of L{tap.Options}.
+        """
+        self.assertEqual(len(opt['credCheckers']), 1)
+        checker = opt['credCheckers'][0]
+        self.assertFailure(checker.requestAvatarId(self.joeWrong),
+                           error.UnauthorizedLogin)
+        def _gotAvatar(username):
+            self.assertEqual(username, self.admin.username)
+        return checker.requestAvatarId(self.admin).addCallback(_gotAvatar)
diff --git a/ThirdParty/Twisted/twisted/words/test/test_xishutil.py b/ThirdParty/Twisted/twisted/words/test/test_xishutil.py
new file mode 100644
index 0000000..b046e6e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_xishutil.py
@@ -0,0 +1,345 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Test cases for twisted.words.xish.utility
+"""
+
+from twisted.trial import unittest
+
+from twisted.python.util import OrderedDict
+from twisted.words.xish import utility
+from twisted.words.xish.domish import Element
+from twisted.words.xish.utility import EventDispatcher
+
+class CallbackTracker:
+    """
+    Test helper for tracking callbacks.
+
+    Increases a counter on each call to L{call} and stores the object
+    passed in the call.
+    """
+
+    def __init__(self):
+        self.called = 0
+        self.obj = None
+
+
+    def call(self, obj):
+        self.called = self.called + 1
+        self.obj = obj
+
+
+
+class OrderedCallbackTracker:
+    """
+    Test helper for tracking callbacks and their order.
+    """
+
+    def __init__(self):
+        self.callList = []
+
+
+    def call1(self, object):
+        self.callList.append(self.call1)
+
+
+    def call2(self, object):
+        self.callList.append(self.call2)
+
+
+    def call3(self, object):
+        self.callList.append(self.call3)
+
+
+
+class EventDispatcherTest(unittest.TestCase):
+    """
+    Tests for L{EventDispatcher}.
+    """
+
+    def testStuff(self):
+        d = EventDispatcher()
+        cb1 = CallbackTracker()
+        cb2 = CallbackTracker()
+        cb3 = CallbackTracker()
+
+        d.addObserver("/message/body", cb1.call)
+        d.addObserver("/message", cb1.call)
+        d.addObserver("/presence", cb2.call)
+        d.addObserver("//event/testevent", cb3.call)
+
+        msg = Element(("ns", "message"))
+        msg.addElement("body")
+
+        pres = Element(("ns", "presence"))
+        pres.addElement("presence")
+
+        d.dispatch(msg)
+        self.assertEqual(cb1.called, 2)
+        self.assertEqual(cb1.obj, msg)
+        self.assertEqual(cb2.called, 0)
+
+        d.dispatch(pres)
+        self.assertEqual(cb1.called, 2)
+        self.assertEqual(cb2.called, 1)
+        self.assertEqual(cb2.obj, pres)
+        self.assertEqual(cb3.called, 0)
+
+        d.dispatch(d, "//event/testevent")
+        self.assertEqual(cb3.called, 1)
+        self.assertEqual(cb3.obj, d)
+
+        d.removeObserver("/presence", cb2.call)
+        d.dispatch(pres)
+        self.assertEqual(cb2.called, 1)
+
+
+    def test_addObserverTwice(self):
+        """
+        Test adding two observers for the same query.
+
+        When the event is dispath both of the observers need to be called.
+        """
+        d = EventDispatcher()
+        cb1 = CallbackTracker()
+        cb2 = CallbackTracker()
+
+        d.addObserver("//event/testevent", cb1.call)
+        d.addObserver("//event/testevent", cb2.call)
+        d.dispatch(d, "//event/testevent")
+
+        self.assertEqual(cb1.called, 1)
+        self.assertEqual(cb1.obj, d)
+        self.assertEqual(cb2.called, 1)
+        self.assertEqual(cb2.obj, d)
+
+
+    def test_addObserverInDispatch(self):
+        """
+        Test for registration of an observer during dispatch.
+        """
+        d = EventDispatcher()
+        msg = Element(("ns", "message"))
+        cb = CallbackTracker()
+
+        def onMessage(_):
+            d.addObserver("/message", cb.call)
+
+        d.addOnetimeObserver("/message", onMessage)
+
+        d.dispatch(msg)
+        self.assertEqual(cb.called, 0)
+
+        d.dispatch(msg)
+        self.assertEqual(cb.called, 1)
+
+        d.dispatch(msg)
+        self.assertEqual(cb.called, 2)
+
+
+    def test_addOnetimeObserverInDispatch(self):
+        """
+        Test for registration of a onetime observer during dispatch.
+        """
+        d = EventDispatcher()
+        msg = Element(("ns", "message"))
+        cb = CallbackTracker()
+
+        def onMessage(msg):
+            d.addOnetimeObserver("/message", cb.call)
+
+        d.addOnetimeObserver("/message", onMessage)
+
+        d.dispatch(msg)
+        self.assertEqual(cb.called, 0)
+
+        d.dispatch(msg)
+        self.assertEqual(cb.called, 1)
+
+        d.dispatch(msg)
+        self.assertEqual(cb.called, 1)
+
+
+    def testOnetimeDispatch(self):
+        d = EventDispatcher()
+        msg = Element(("ns", "message"))
+        cb = CallbackTracker()
+
+        d.addOnetimeObserver("/message", cb.call)
+        d.dispatch(msg)
+        self.assertEqual(cb.called, 1)
+        d.dispatch(msg)
+        self.assertEqual(cb.called, 1)
+
+
+    def testDispatcherResult(self):
+        d = EventDispatcher()
+        msg = Element(("ns", "message"))
+        pres = Element(("ns", "presence"))
+        cb = CallbackTracker()
+
+        d.addObserver("/presence", cb.call)
+        result = d.dispatch(msg)
+        self.assertEqual(False, result)
+
+        result = d.dispatch(pres)
+        self.assertEqual(True, result)
+
+
+    def testOrderedXPathDispatch(self):
+        d = EventDispatcher()
+        cb = OrderedCallbackTracker()
+        d.addObserver("/message/body", cb.call2)
+        d.addObserver("/message", cb.call3, -1)
+        d.addObserver("/message/body", cb.call1, 1)
+
+        msg = Element(("ns", "message"))
+        msg.addElement("body")
+        d.dispatch(msg)
+        self.assertEqual(cb.callList, [cb.call1, cb.call2, cb.call3],
+                          "Calls out of order: %s" %
+                          repr([c.__name__ for c in cb.callList]))
+
+
+    # Observers are put into CallbackLists that are then put into dictionaries
+    # keyed by the event trigger. Upon removal of the last observer for a
+    # particular event trigger, the (now empty) CallbackList and corresponding
+    # event trigger should be removed from those dictionaries to prevent
+    # slowdown and memory leakage.
+
+    def test_cleanUpRemoveEventObserver(self):
+        """
+        Test observer clean-up after removeObserver for named events.
+        """
+
+        d = EventDispatcher()
+        cb = CallbackTracker()
+
+        d.addObserver('//event/test', cb.call)
+        d.dispatch(None, '//event/test')
+        self.assertEqual(1, cb.called)
+        d.removeObserver('//event/test', cb.call)
+        self.assertEqual(0, len(d._eventObservers.pop(0)))
+
+
+    def test_cleanUpRemoveXPathObserver(self):
+        """
+        Test observer clean-up after removeObserver for XPath events.
+        """
+
+        d = EventDispatcher()
+        cb = CallbackTracker()
+        msg = Element((None, "message"))
+
+        d.addObserver('/message', cb.call)
+        d.dispatch(msg)
+        self.assertEqual(1, cb.called)
+        d.removeObserver('/message', cb.call)
+        self.assertEqual(0, len(d._xpathObservers.pop(0)))
+
+
+    def test_cleanUpOnetimeEventObserver(self):
+        """
+        Test observer clean-up after onetime named events.
+        """
+
+        d = EventDispatcher()
+        cb = CallbackTracker()
+
+        d.addOnetimeObserver('//event/test', cb.call)
+        d.dispatch(None, '//event/test')
+        self.assertEqual(1, cb.called)
+        self.assertEqual(0, len(d._eventObservers.pop(0)))
+
+
+    def test_cleanUpOnetimeXPathObserver(self):
+        """
+        Test observer clean-up after onetime XPath events.
+        """
+
+        d = EventDispatcher()
+        cb = CallbackTracker()
+        msg = Element((None, "message"))
+
+        d.addOnetimeObserver('/message', cb.call)
+        d.dispatch(msg)
+        self.assertEqual(1, cb.called)
+        self.assertEqual(0, len(d._xpathObservers.pop(0)))
+
+
+    def test_observerRaisingException(self):
+        """
+        Test that exceptions in observers do not bubble up to dispatch.
+
+        The exceptions raised in observers should be logged and other
+        observers should be called as if nothing happened.
+        """
+
+        class OrderedCallbackList(utility.CallbackList):
+            def __init__(self):
+                self.callbacks = OrderedDict()
+
+        class TestError(Exception):
+            pass
+
+        def raiseError(_):
+            raise TestError()
+
+        d = EventDispatcher()
+        cb = CallbackTracker()
+
+        originalCallbackList = utility.CallbackList
+
+        try:
+            utility.CallbackList = OrderedCallbackList
+
+            d.addObserver('//event/test', raiseError)
+            d.addObserver('//event/test', cb.call)
+            try:
+                d.dispatch(None, '//event/test')
+            except TestError:
+                self.fail("TestError raised. Should have been logged instead.")
+
+            self.assertEqual(1, len(self.flushLoggedErrors(TestError)))
+            self.assertEqual(1, cb.called)
+        finally:
+            utility.CallbackList = originalCallbackList
+
+
+
+class XmlPipeTest(unittest.TestCase):
+    """
+    Tests for L{twisted.words.xish.utility.XmlPipe}.
+    """
+
+    def setUp(self):
+        self.pipe = utility.XmlPipe()
+
+
+    def test_sendFromSource(self):
+        """
+        Send an element from the source and observe it from the sink.
+        """
+        def cb(obj):
+            called.append(obj)
+
+        called = []
+        self.pipe.sink.addObserver('/test[@xmlns="testns"]', cb)
+        element = Element(('testns', 'test'))
+        self.pipe.source.send(element)
+        self.assertEqual([element], called)
+
+
+    def test_sendFromSink(self):
+        """
+        Send an element from the sink and observe it from the source.
+        """
+        def cb(obj):
+            called.append(obj)
+
+        called = []
+        self.pipe.source.addObserver('/test[@xmlns="testns"]', cb)
+        element = Element(('testns', 'test'))
+        self.pipe.sink.send(element)
+        self.assertEqual([element], called)
diff --git a/ThirdParty/Twisted/twisted/words/test/test_xmlstream.py b/ThirdParty/Twisted/twisted/words/test/test_xmlstream.py
new file mode 100644
index 0000000..4eb2446
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_xmlstream.py
@@ -0,0 +1,224 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.words.xish.xmlstream}.
+"""
+
+from twisted.internet import protocol
+from twisted.python import failure
+from twisted.trial import unittest
+from twisted.words.xish import domish, utility, xmlstream
+
+class XmlStreamTest(unittest.TestCase):
+    def setUp(self):
+        self.connectionLostMsg = "no reason"
+        self.outlist = []
+        self.xmlstream = xmlstream.XmlStream()
+        self.xmlstream.transport = self
+        self.xmlstream.transport.write = self.outlist.append
+
+
+    def loseConnection(self):
+        """
+        Stub loseConnection because we are a transport.
+        """
+        self.xmlstream.connectionLost(failure.Failure(
+            Exception(self.connectionLostMsg)))
+
+
+    def test_send(self):
+        """
+        Calling L{xmlstream.XmlStream.send} results in the data being written
+        to the transport.
+        """
+        self.xmlstream.connectionMade()
+        self.xmlstream.send("<root>")
+        self.assertEqual(self.outlist[0], "<root>")
+
+
+    def test_receiveRoot(self):
+        """
+        Receiving the starttag of the root element results in stream start.
+        """
+        streamStarted = []
+
+        def streamStartEvent(rootelem):
+            streamStarted.append(None)
+
+        self.xmlstream.addObserver(xmlstream.STREAM_START_EVENT,
+                                   streamStartEvent)
+        self.xmlstream.connectionMade()
+        self.xmlstream.dataReceived("<root>")
+        self.assertEqual(1, len(streamStarted))
+
+
+    def test_receiveBadXML(self):
+        """
+        Receiving malformed XML results in an L{STREAM_ERROR_EVENT}.
+        """
+        streamError = []
+        streamEnd = []
+
+        def streamErrorEvent(reason):
+            streamError.append(reason)
+
+        def streamEndEvent(_):
+            streamEnd.append(None)
+
+        self.xmlstream.addObserver(xmlstream.STREAM_ERROR_EVENT,
+                                   streamErrorEvent)
+        self.xmlstream.addObserver(xmlstream.STREAM_END_EVENT,
+                                   streamEndEvent)
+        self.xmlstream.connectionMade()
+
+        self.xmlstream.dataReceived("<root>")
+        self.assertEqual(0, len(streamError))
+        self.assertEqual(0, len(streamEnd))
+
+        self.xmlstream.dataReceived("<child><unclosed></child>")
+        self.assertEqual(1, len(streamError))
+        self.assertTrue(streamError[0].check(domish.ParserError))
+        self.assertEqual(1, len(streamEnd))
+
+
+    def test_streamEnd(self):
+        """
+        Ending the stream fires a L{STREAM_END_EVENT}.
+        """
+        streamEnd = []
+
+        def streamEndEvent(reason):
+            streamEnd.append(reason)
+
+        self.xmlstream.addObserver(xmlstream.STREAM_END_EVENT,
+                                   streamEndEvent)
+        self.xmlstream.connectionMade()
+        self.loseConnection()
+        self.assertEqual(1, len(streamEnd))
+        self.assertIsInstance(streamEnd[0], failure.Failure)
+        self.assertEqual(streamEnd[0].getErrorMessage(),
+                self.connectionLostMsg)
+
+
+
+class DummyProtocol(protocol.Protocol, utility.EventDispatcher):
+    """
+    I am a protocol with an event dispatcher without further processing.
+
+    This protocol is only used for testing XmlStreamFactoryMixin to make
+    sure the bootstrap observers are added to the protocol instance.
+    """
+
+    def __init__(self, *args, **kwargs):
+        self.args = args
+        self.kwargs = kwargs
+        self.observers = []
+
+        utility.EventDispatcher.__init__(self)
+
+
+
+class BootstrapMixinTest(unittest.TestCase):
+    """
+    Tests for L{xmlstream.BootstrapMixin}.
+
+    @ivar factory: Instance of the factory or mixin under test.
+    """
+
+    def setUp(self):
+        self.factory = xmlstream.BootstrapMixin()
+
+
+    def test_installBootstraps(self):
+        """
+        Dispatching an event fires registered bootstrap observers.
+        """
+        called = []
+
+        def cb(data):
+            called.append(data)
+
+        dispatcher = DummyProtocol()
+        self.factory.addBootstrap('//event/myevent', cb)
+        self.factory.installBootstraps(dispatcher)
+
+        dispatcher.dispatch(None, '//event/myevent')
+        self.assertEqual(1, len(called))
+
+
+    def test_addAndRemoveBootstrap(self):
+        """
+        Test addition and removal of a bootstrap event handler.
+        """
+
+        called = []
+
+        def cb(data):
+            called.append(data)
+
+        self.factory.addBootstrap('//event/myevent', cb)
+        self.factory.removeBootstrap('//event/myevent', cb)
+
+        dispatcher = DummyProtocol()
+        self.factory.installBootstraps(dispatcher)
+
+        dispatcher.dispatch(None, '//event/myevent')
+        self.assertFalse(called)
+
+
+
+class GenericXmlStreamFactoryTestsMixin(BootstrapMixinTest):
+    """
+    Generic tests for L{XmlStream} factories.
+    """
+
+    def setUp(self):
+        self.factory = xmlstream.XmlStreamFactory()
+
+
+    def test_buildProtocolInstallsBootstraps(self):
+        """
+        The protocol factory installs bootstrap event handlers on the protocol.
+        """
+        called = []
+
+        def cb(data):
+            called.append(data)
+
+        self.factory.addBootstrap('//event/myevent', cb)
+
+        xs = self.factory.buildProtocol(None)
+        xs.dispatch(None, '//event/myevent')
+
+        self.assertEqual(1, len(called))
+
+
+    def test_buildProtocolStoresFactory(self):
+        """
+        The protocol factory is saved in the protocol.
+        """
+        xs = self.factory.buildProtocol(None)
+        self.assertIdentical(self.factory, xs.factory)
+
+
+
+class XmlStreamFactoryMixinTest(GenericXmlStreamFactoryTestsMixin):
+    """
+    Tests for L{xmlstream.XmlStreamFactoryMixin}.
+    """
+
+    def setUp(self):
+        self.factory = xmlstream.XmlStreamFactoryMixin(None, test=None)
+        self.factory.protocol = DummyProtocol
+
+
+    def test_buildProtocolFactoryArguments(self):
+        """
+        Arguments passed to the factory are passed to protocol on
+        instantiation.
+        """
+        xs = self.factory.buildProtocol(None)
+
+        self.assertEqual((None,), xs.args)
+        self.assertEqual({'test': None}, xs.kwargs)
diff --git a/ThirdParty/Twisted/twisted/words/test/test_xmpproutertap.py b/ThirdParty/Twisted/twisted/words/test/test_xmpproutertap.py
new file mode 100644
index 0000000..aaadf34
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_xmpproutertap.py
@@ -0,0 +1,84 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Tests for L{twisted.words.xmpproutertap}.
+"""
+
+from twisted.application import internet
+from twisted.trial import unittest
+from twisted.words import xmpproutertap as tap
+from twisted.words.protocols.jabber import component
+
+class XMPPRouterTapTest(unittest.TestCase):
+
+    def test_port(self):
+        """
+        The port option is recognised as a parameter.
+        """
+        opt = tap.Options()
+        opt.parseOptions(['--port', '7001'])
+        self.assertEqual(opt['port'], '7001')
+
+
+    def test_portDefault(self):
+        """
+        The port option has '5347' as default value
+        """
+        opt = tap.Options()
+        opt.parseOptions([])
+        self.assertEqual(opt['port'], 'tcp:5347:interface=127.0.0.1')
+
+
+    def test_secret(self):
+        """
+        The secret option is recognised as a parameter.
+        """
+        opt = tap.Options()
+        opt.parseOptions(['--secret', 'hushhush'])
+        self.assertEqual(opt['secret'], 'hushhush')
+
+
+    def test_secretDefault(self):
+        """
+        The secret option has 'secret' as default value
+        """
+        opt = tap.Options()
+        opt.parseOptions([])
+        self.assertEqual(opt['secret'], 'secret')
+
+
+    def test_verbose(self):
+        """
+        The verbose option is recognised as a flag.
+        """
+        opt = tap.Options()
+        opt.parseOptions(['--verbose'])
+        self.assertTrue(opt['verbose'])
+
+
+    def test_makeService(self):
+        """
+        The service gets set up with a router and factory.
+        """
+        opt = tap.Options()
+        opt.parseOptions([])
+        s = tap.makeService(opt)
+        self.assertIsInstance(s, internet.StreamServerEndpointService)
+        self.assertEqual('127.0.0.1', s.endpoint._interface)
+        self.assertEqual(5347, s.endpoint._port)
+        factory = s.factory
+        self.assertIsInstance(factory, component.XMPPComponentServerFactory)
+        self.assertIsInstance(factory.router, component.Router)
+        self.assertEqual('secret', factory.secret)
+        self.assertFalse(factory.logTraffic)
+
+
+    def test_makeServiceVerbose(self):
+        """
+        The verbose flag enables traffic logging.
+        """
+        opt = tap.Options()
+        opt.parseOptions(['--verbose'])
+        s = tap.makeService(opt)
+        self.assertTrue(s.factory.logTraffic)
diff --git a/ThirdParty/Twisted/twisted/words/test/test_xpath.py b/ThirdParty/Twisted/twisted/words/test/test_xpath.py
new file mode 100644
index 0000000..9dbda0f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/test/test_xpath.py
@@ -0,0 +1,260 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+from twisted.trial import unittest
+import sys, os
+
+from twisted.words.xish.domish import Element
+from twisted.words.xish.xpath import XPathQuery
+from twisted.words.xish import xpath
+
+class XPathTest(unittest.TestCase):
+    def setUp(self):
+        # Build element:
+        # <foo xmlns='testns' attrib1='value1' attrib3="user at host/resource">
+        #     somecontent
+        #     <bar>
+        #        <foo>
+        #         <gar>DEF</gar>
+        #        </foo>
+        #     </bar>
+        #     somemorecontent
+        #     <bar attrib2="value2">
+        #        <bar>
+        #          <foo/>
+        #          <gar>ABC</gar>
+        #        </bar>
+        #     <bar/>
+        #     <bar attrib4='value4' attrib5='value5'>
+        #        <foo/>
+        #        <gar>JKL</gar>
+        #     </bar>
+        #     <bar attrib4='value4' attrib5='value4'>
+        #        <foo/>
+        #        <gar>MNO</gar>
+        #     </bar>
+        #     <bar attrib4='value4' attrib5='value6'/>
+        # </foo>
+        self.e = Element(("testns", "foo"))
+        self.e["attrib1"] = "value1"
+        self.e["attrib3"] = "user at host/resource"
+        self.e.addContent("somecontent")
+        self.bar1 = self.e.addElement("bar")
+        self.subfoo = self.bar1.addElement("foo")
+        self.gar1 = self.subfoo.addElement("gar")
+        self.gar1.addContent("DEF")
+        self.e.addContent("somemorecontent")
+        self.bar2 = self.e.addElement("bar")
+        self.bar2["attrib2"] = "value2"
+        self.bar3 = self.bar2.addElement("bar")
+        self.subfoo2 = self.bar3.addElement("foo")
+        self.gar2 = self.bar3.addElement("gar")
+        self.gar2.addContent("ABC")
+        self.bar4 = self.e.addElement("bar")
+        self.bar5 = self.e.addElement("bar")
+        self.bar5["attrib4"] = "value4"
+        self.bar5["attrib5"] = "value5"
+        self.subfoo3 = self.bar5.addElement("foo")
+        self.gar3 = self.bar5.addElement("gar")
+        self.gar3.addContent("JKL")
+        self.bar6 = self.e.addElement("bar")
+        self.bar6["attrib4"] = "value4"
+        self.bar6["attrib5"] = "value4"
+        self.subfoo4 = self.bar6.addElement("foo")
+        self.gar4 = self.bar6.addElement("gar")
+        self.gar4.addContent("MNO")
+        self.bar7 = self.e.addElement("bar")
+        self.bar7["attrib4"] = "value4"
+        self.bar7["attrib5"] = "value6"
+
+    def test_staticMethods(self):
+        """
+        Test basic operation of the static methods.
+        """
+        self.assertEqual(xpath.matches("/foo/bar", self.e),
+                          True)
+        self.assertEqual(xpath.queryForNodes("/foo/bar", self.e),
+                          [self.bar1, self.bar2, self.bar4,
+                           self.bar5, self.bar6, self.bar7])
+        self.assertEqual(xpath.queryForString("/foo", self.e),
+                          "somecontent")
+        self.assertEqual(xpath.queryForStringList("/foo", self.e),
+                          ["somecontent", "somemorecontent"])
+
+    def test_locationFooBar(self):
+        """
+        Test matching foo with child bar.
+        """
+        xp = XPathQuery("/foo/bar")
+        self.assertEqual(xp.matches(self.e), 1)
+
+    def test_locationFooBarFoo(self):
+        """
+        Test finding foos at the second level.
+        """
+        xp = XPathQuery("/foo/bar/foo")
+        self.assertEqual(xp.matches(self.e), 1)
+        self.assertEqual(xp.queryForNodes(self.e), [self.subfoo,
+                                                     self.subfoo3,
+                                                     self.subfoo4])
+
+    def test_locationNoBar3(self):
+        """
+        Test not finding bar3.
+        """
+        xp = XPathQuery("/foo/bar3")
+        self.assertEqual(xp.matches(self.e), 0)
+
+    def test_locationAllChilds(self):
+        """
+        Test finding childs of foo.
+        """
+        xp = XPathQuery("/foo/*")
+        self.assertEqual(xp.matches(self.e), True)
+        self.assertEqual(xp.queryForNodes(self.e), [self.bar1, self.bar2,
+                                                     self.bar4, self.bar5,
+                                                     self.bar6, self.bar7])
+
+    def test_attribute(self):
+        """
+        Test matching foo with attribute.
+        """
+        xp = XPathQuery("/foo[@attrib1]")
+        self.assertEqual(xp.matches(self.e), True)
+
+    def test_attributeWithValueAny(self):
+        """
+        Test find nodes with attribute having value.
+        """
+        xp = XPathQuery("/foo/*[@attrib2='value2']")
+        self.assertEqual(xp.matches(self.e), True)
+        self.assertEqual(xp.queryForNodes(self.e), [self.bar2])
+
+    def test_position(self):
+        """
+        Test finding element at position.
+        """
+        xp = XPathQuery("/foo/bar[2]")
+        self.assertEqual(xp.matches(self.e), 1)
+        self.assertEqual(xp.queryForNodes(self.e), [self.bar1])
+
+    test_position.todo = "XPath queries with position are not working."
+
+    def test_namespaceFound(self):
+        """
+        Test matching node with namespace.
+        """
+        xp = XPathQuery("/foo[@xmlns='testns']/bar")
+        self.assertEqual(xp.matches(self.e), 1)
+
+    def test_namespaceNotFound(self):
+        """
+        Test not matching node with wrong namespace.
+        """
+        xp = XPathQuery("/foo[@xmlns='badns']/bar2")
+        self.assertEqual(xp.matches(self.e), 0)
+
+    def test_attributeWithValue(self):
+        """
+        Test matching node with attribute having value.
+        """
+        xp = XPathQuery("/foo[@attrib1='value1']")
+        self.assertEqual(xp.matches(self.e), 1)
+
+    def test_queryForString(self):
+        """
+        Test for queryForString and queryForStringList.
+        """
+        xp = XPathQuery("/foo")
+        self.assertEqual(xp.queryForString(self.e), "somecontent")
+        self.assertEqual(xp.queryForStringList(self.e),
+                          ["somecontent", "somemorecontent"])
+
+    def test_queryForNodes(self):
+        """
+        Test finding nodes.
+        """
+        xp = XPathQuery("/foo/bar")
+        self.assertEqual(xp.queryForNodes(self.e), [self.bar1, self.bar2,
+                                                     self.bar4, self.bar5,
+                                                     self.bar6, self.bar7])
+
+    def test_textCondition(self):
+        """
+        Test matching a node with given text.
+        """
+        xp = XPathQuery("/foo[text() = 'somecontent']")
+        self.assertEqual(xp.matches(self.e), True)
+
+    def test_textNotOperator(self):
+        """
+        Test for not operator.
+        """
+        xp = XPathQuery("/foo[not(@nosuchattrib)]")
+        self.assertEqual(xp.matches(self.e), True)
+
+    def test_anyLocationAndText(self):
+        """
+        Test finding any nodes named gar and getting their text contents.
+        """
+        xp = XPathQuery("//gar")
+        self.assertEqual(xp.matches(self.e), True)
+        self.assertEqual(xp.queryForNodes(self.e), [self.gar1, self.gar2,
+                                                     self.gar3, self.gar4])
+        self.assertEqual(xp.queryForStringList(self.e), ["DEF", "ABC",
+                                                          "JKL", "MNO"])
+
+    def test_anyLocation(self):
+        """
+        Test finding any nodes named bar.
+        """
+        xp = XPathQuery("//bar")
+        self.assertEqual(xp.matches(self.e), True)
+        self.assertEqual(xp.queryForNodes(self.e), [self.bar1, self.bar2,
+                                                     self.bar3, self.bar4,
+                                                     self.bar5, self.bar6,
+                                                     self.bar7])
+
+    def test_anyLocationQueryForString(self):
+        """
+        L{XPathQuery.queryForString} should raise a L{NotImplementedError}
+        for any location.
+        """
+        xp = XPathQuery("//bar")
+        self.assertRaises(NotImplementedError, xp.queryForString, None)
+
+    def test_andOperator(self):
+        """
+        Test boolean and operator in condition.
+        """
+        xp = XPathQuery("//bar[@attrib4='value4' and @attrib5='value5']")
+        self.assertEqual(xp.matches(self.e), True)
+        self.assertEqual(xp.queryForNodes(self.e), [self.bar5])
+
+    def test_orOperator(self):
+        """
+        Test boolean or operator in condition.
+        """
+        xp = XPathQuery("//bar[@attrib5='value4' or @attrib5='value5']")
+        self.assertEqual(xp.matches(self.e), True)
+        self.assertEqual(xp.queryForNodes(self.e), [self.bar5, self.bar6])
+
+    def test_booleanOperatorsParens(self):
+        """
+        Test multiple boolean operators in condition with parens.
+        """
+        xp = XPathQuery("""//bar[@attrib4='value4' and
+                                 (@attrib5='value4' or @attrib5='value6')]""")
+        self.assertEqual(xp.matches(self.e), True)
+        self.assertEqual(xp.queryForNodes(self.e), [self.bar6, self.bar7])
+
+    def test_booleanOperatorsNoParens(self):
+        """
+        Test multiple boolean operators in condition without parens.
+        """
+        xp = XPathQuery("""//bar[@attrib5='value4' or
+                                 @attrib5='value5' or
+                                 @attrib5='value6']""")
+        self.assertEqual(xp.matches(self.e), True)
+        self.assertEqual(xp.queryForNodes(self.e), [self.bar5, self.bar6, self.bar7])
diff --git a/ThirdParty/Twisted/twisted/words/topfiles/NEWS b/ThirdParty/Twisted/twisted/words/topfiles/NEWS
new file mode 100644
index 0000000..16a6a9e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/topfiles/NEWS
@@ -0,0 +1,378 @@
+Ticket numbers in this file can be looked up by visiting
+http://twistedmatrix.com/trac/ticket/<number>
+
+Twisted Words 12.3.0 (2012-12-20)
+=================================
+
+Improved Documentation
+----------------------
+ - The Twisted Words code examples now documents inside each example
+   description on how to run it. (#5589)
+
+
+Twisted Words 12.2.0 (2012-08-26)
+=================================
+
+No significant changes have been made for this release.
+
+Other
+-----
+ - #5752, #5753
+
+
+Twisted Words 12.1.0 (2012-06-02)
+=================================
+
+Bugfixes
+--------
+ - twisted.words.protocols.irc.DccChatFactory.buildProtocol now
+   returns the protocol object that it creates (#3179)
+ - twisted.words.im no longer offers an empty threat of a rewrite on
+   import. (#5598)
+
+Other
+-----
+ - #5555, #5595
+
+
+Twisted Words 12.0.0 (2012-02-10)
+=================================
+
+Improved Documentation
+----------------------
+ - twisted.words.im.basechat now has improved API documentation.
+   (#2458)
+
+Other
+-----
+ - #5401
+
+
+Twisted Words 11.1.0 (2011-11-15)
+=================================
+
+Features
+--------
+ - twisted.words.protocols.irc.IRCClient now uses a PING heartbeat as
+   a keepalive to avoid losing an IRC connection without being aware
+   of it. (#5047)
+
+Bugfixes
+--------
+ - twisted.words.protocols.irc.IRCClient now replies only once to
+   known CTCP queries per message and not at all to unknown CTCP
+   queries. (#5029)
+ - IRCClient.msg now determines a safe maximum command length,
+   drastically reducing the chance of relayed text being truncated on
+   the server side. (#5176)
+
+Deprecations and Removals
+-------------------------
+ - twisted.words.protocols.irc.IRCClient.me was deprecated in Twisted
+   9.0 and has been removed. Use IRCClient.describe instead. (#5059)
+
+Other
+-----
+ - #5025, #5330
+
+
+Twisted Words 11.0.0 (2011-04-01)
+=================================
+
+Features
+--------
+ - twisted.words.protocols.irc.IRCClient now has an invite method.
+   (#4820)
+
+Bugfixes
+--------
+ - twisted.words.protocols.irc.IRCClient.say is once again able to
+   send messages when using the default value for the length limit
+   argument. (#4758)
+ - twisted.words.protocols.jabber.jstrports is once again able to
+   parse jstrport description strings. (#4771)
+ - twisted.words.protocols.msn.NotificationClient now calls the
+   loginFailure callback when it is unable to connect to the Passport
+   server due to missing SSL dependencies. (#4801)
+ - twisted.words.protocols.jabber.xmpp_stringprep now always uses
+   Unicode version 3.2 for stringprep normalization. (#4850)
+
+Improved Documentation
+----------------------
+ - Removed the non-working AIM bot example, depending on the obsolete
+   twisted.words.protocols.toc functionality. (#4007)
+ - Outdated GUI-related information was removed from the IM howto.
+   (#4054)
+
+Deprecations and Removals
+-------------------------
+ - Remove twisted.words.protocols.toc, that was largely non-working
+   and useless since AOL disabled TOC on their AIM network. (#4363)
+
+Other
+-----
+ - #4733, #4902
+
+
+Twisted Words 10.2.0 (2010-11-29)
+=================================
+
+Features
+--------
+ - twisted.words.protocols.irc.IRCClient.msg now enforces a maximum
+   length for messages, splitting up messages that are too long.
+   (#4416)
+
+Bugfixes
+--------
+ - twisted.words.protocols.irc.IRCClient no longer invokes privmsg()
+   in the default noticed() implementation. (#4419)
+ - twisted.words.im.ircsupport.IRCProto now sends the correct name in
+   the USER command. (#4641)
+
+Deprecations and Removals
+-------------------------
+ - Remove twisted.words.im.proxyui and twisted.words.im.tap. (#1823)
+
+
+Twisted Words 10.1.0 (2010-06-27)
+=================================
+
+Bugfixes
+--------
+ - twisted.words.im.basechat.ChatUI now has a functional
+   contactChangedNick with unit tests. (#229)
+ - twisted.words.protocols.jabber.error.StanzaError now correctly sets
+   a default error type and code for the remote-server-timeout
+   condition (#4311)
+ - twisted.words.protocols.jabber.xmlstream.ListenAuthenticator now
+   uses unicode objects for session identifiers (#4345)
+
+
+Twisted Words 10.0.0 (2010-03-01)
+=================================
+
+Features
+--------
+ - twisted.words.protocols.irc.IRCClient.irc_MODE now takes ISUPPORT
+   parameters into account when parsing mode messages with arguments
+   that take parameters (#3296)
+
+Bugfixes
+--------
+ - When twisted.words.protocols.irc.IRCClient's versionNum and
+   versionEnv attributes are set to None, they will no longer be
+   included in the client's response to CTCP VERSION queries. (#3660)
+
+ - twisted.words.protocols.jabber.xmlstream.hashPassword now only
+   accepts unicode as input (#3741, #3742, #3847)
+
+Other
+-----
+ - #2503, #4066, #4261
+
+
+Twisted Words 9.0.0 (2009-11-24)
+================================
+
+Features
+--------
+ - IRCClient.describe is a new method meant to replace IRCClient.me to send
+   CTCP ACTION messages with less confusing behavior (#3910)
+ - The XMPP client protocol implementation now supports ANONYMOUS SASL
+   authentication (#4067)
+ - The IRC client protocol implementation now has better support for the
+   ISUPPORT server->client message, storing the data in a new
+   ServerSupportedFeatures object accessible via IRCClient.supported (#3285)
+
+Fixes
+-----
+ - The twisted.words IRC server now always sends an MOTD, which at least makes
+   Pidgin able to successfully connect to a twisted.words IRC server (#2385)
+ - The IRC client will now dispatch "RPL MOTD" messages received before a
+   "RPL MOTD START" instead of raising an exception (#3676)
+ - The IRC client protocol implementation no longer updates its 'nickname'
+   attribute directly; instead, that attribute will be updated when the server
+   acknowledges the change (#3377)
+ - The IRC client protocol implementation now supports falling back to another
+   nickname when a nick change request fails (#3377, #4010)
+
+Deprecations and Removals
+-------------------------
+ - The TOC protocol implementation is now deprecated, since the protocol itself
+   has been deprecated and obselete for quite a long time (#3580)
+ - The gui "im" application has been removed, since it relied on GTK1, which is
+   hard to find these days (#3699, #3340)
+
+Other
+-----
+ - #2763, #3540, #3647, #3750, #3895, #3968, #4050
+
+Words 8.2.0 (2008-12-16)
+========================
+
+Feature
+-------
+ - There is now a standalone XMPP router included in twisted.words: it can be
+   used with the 'twistd xmpp-router' command line (#3407)
+ - A server factory for Jabber XML Streams has been added (#3435)
+ - Domish now allows for iterating child elements with specific qualified names
+   (#2429)
+ - IRCClient now has a 'back' method which removes the away status (#3366)
+ - IRCClient now has a 'whois' method (#3133)
+
+Fixes
+-----
+ - The IRC Client implementation can now deal with compound mode changes (#3230)
+ - The MSN protocol implementation no longer requires the CVR0 protocol to
+   be included in the VER command (#3394)
+ - In the IRC server implementation, topic messages will no longer be sent for
+   a group which has no topic (#2204)
+ - An infinite loop (which caused infinite memory usage) in irc.split has been
+   fixed.  This was triggered any time a message that starts with a delimiter
+   was sent (#3446)
+ - Jabber's toResponse now generates a valid stanza even when stanzaType is not
+   specified (#3467)
+ - The lifetime of authenticator instances in XmlStreamServerFactory is no
+   longer artificially extended (#3464)
+
+Other
+-----
+ - #3365
+
+
+8.1.0 (2008-05-18)
+==================
+
+Features
+--------
+ - JID objects now have a nice __repr__ (#3156)
+ - Extending XMPP protocols is now easier (#2178)
+
+Fixes
+-----
+ - The deprecated mktap API is no longer used (#3127)
+ - A bug whereby one-time XMPP observers would be enabled permanently was fixed
+   (#3066)
+
+
+8.0.0 (2008-03-17)
+==================
+
+Features
+--------
+ - Provide function for creating XMPP response stanzas. (#2614, #2614)
+ - Log exceptions raised in Xish observers. (#2616)
+ - Add 'and' and 'or' operators for Xish XPath expressions. (#2502)
+ - Make JIDs hashable. (#2770)
+
+Fixes
+-----
+ - Respect the hostname and servername parameters to IRCClient.register. (#1649)
+ - Make EventDispatcher remove empty callback lists. (#1652)
+ - Use legacy base64 API to support Python 2.3 (#2461)
+ - Fix support of DIGEST-MD5 challenge parsing with multi-valued directives.
+   (#2606)
+ - Fix reuse of dict of prefixes in domish.Element.toXml (#2609)
+ - Properly process XMPP stream headers (#2615)
+ - Use proper namespace for XMPP stream errors. (#2630)
+ - Properly parse XMPP stream errors. (#2771)
+ - Fix toResponse for XMPP stanzas without an id attribute. (#2773)
+ - Move XMPP stream header procesing to authenticators. (#2772)
+
+Misc
+----
+ - #2617, #2640, #2741, #2063, #2570, #2847
+
+
+0.5.0 (2007-01-06)
+==================
+
+Features
+--------
+ - (Jabber) IQ.send now optionally has a 'timeout' parameter which
+   specifies a time at which to errback the Deferred with a
+   TimeoutError (#2218)
+ - (Jabber) SASL authentication, resource binding and session
+   establishment were added. (#1046) The following were done in
+   support of this change:
+   - Rework ConnectAuthenticator to work with initializer objects that
+     provide a stream initialization step.
+   - Reimplement iq:auth as an initializer.
+   - Reimplement TLS negotiation as an initializer.
+   - Add XMPPAuthenticator as a XMPP 1.0 client authenticator (only), along
+     with XMPPClientFactory.
+   - Add support for working with pre-XMPP-1.0 error stanzas.
+   - Remove hasFeature() from XmlStream as you can test (uri, name) in
+     xs.features.
+   - Add sendFooter() and sendStreamError() to XmlStream
+
+Fixes
+-----
+ - (Jabber) Deferreds from queries which were never resolved before
+   a lost connection are now errbacked (#2006)
+ - (Jabber) servers which didn't send a 'realm' directive in
+   authentication challenges no longer cause the Jabber client to
+   choke (#2098)
+ - (MSN) error responses are now properly turned into errbacks (#2019)
+ - (IRC) A trivial bug in IRCClient which would cause whois(oper=True)
+   to always raise an exception was fixed (#2089)
+ - (IM) Bugs in the error handling and already-connecting cases of
+   AbstractAccount.logOn were fixed (#2086)
+
+Misc
+----
+ - #1734, #1735, #1636, #1936, #1883, #1995, #2171, #2165, #2177
+
+
+0.4.0 (2006-05-21)
+==================
+
+Features
+--------
+ - Jabber:
+   - Add support for stream and stanza level errors
+   - Create new IQ stanza helper that works with deferreds
+   - Add TLS support for initiating entities to XmlStream
+   - Fix account registration
+ - Xish:
+   - Fix various namespace issues
+   - Add IElement
+   - Store namespace declarations in parsed XML for later serialization
+ - Fix user name/group collision in server service (#1655).
+ - Correctly recognize MSN capability messages (#861).
+
+Fixes
+-----
+ - Misc: #1283, #1296, #1302, #1424
+ - Fix unicode/str confusion in IRC server service.
+
+
+0.3.0:
+ - Jabber:
+
+   - Fix digest authentication in Jabber
+   - Add Jabber xmlstream module that contains the Jabber specific bits that
+     got factored out of Twisted Xish's xmlstream, and make it suitable for
+     implementing full XMPP support.
+ - Xish:
+   - Fixed serialization in _ListSerializer
+   - Removed unneeded extra whitespace generated in serialization
+   - Removed _Serializer in favour of _ListSerializer
+   - Use unicode objects for representing serialized XML, instead of utf-8
+     encoded str objects.
+   - Properly catch XML parser errors
+   - Rework and fix element stream test cases
+   - Strip xmlstream from all Jabber specifics that moved to Twisted Words
+   - Added exhaustive docstrings to xmlstream.
+ - Words Service:
+   - Complete rewrite
+   - Not backwards compatible
+
+0.1.0:
+ - Fix some miscellaneous bugs in OSCAR
+ - Add QUIT notification for IRC
+ - Fix message wrapping
+ - Misc Jabber fixes
+ - Add stringprep support for Jabber IDs
+   This only works properly on 2.3.2 or higher
diff --git a/ThirdParty/Twisted/twisted/words/topfiles/README b/ThirdParty/Twisted/twisted/words/topfiles/README
new file mode 100644
index 0000000..8eb41fe
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/topfiles/README
@@ -0,0 +1,5 @@
+Twisted Words 12.3.0
+
+Twisted Words depends on Twisted Core and Twisted Web.  The Twisted Web
+dependency is only necessary for MSN support.  MSN support also requires HTTPS,
+and therefore pyOpenSSL (<http://launchpad.net/pyopenssl>).
diff --git a/ThirdParty/Twisted/twisted/words/topfiles/setup.py b/ThirdParty/Twisted/twisted/words/topfiles/setup.py
new file mode 100644
index 0000000..2df89fd
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/topfiles/setup.py
@@ -0,0 +1,53 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+import sys
+
+try:
+    from twisted.python import dist
+except ImportError:
+    raise SystemExit("twisted.python.dist module not found.  Make sure you "
+                     "have installed the Twisted core package before "
+                     "attempting to install any other Twisted projects.")
+
+if __name__ == '__main__':
+    if sys.version_info[:2] >= (2, 4):
+        extraMeta = dict(
+            classifiers=[
+                "Development Status :: 4 - Beta",
+                "Environment :: No Input/Output (Daemon)",
+                "Intended Audience :: Developers",
+                "License :: OSI Approved :: MIT License",
+                "Programming Language :: Python",
+                "Topic :: Communications :: Chat",
+                "Topic :: Communications :: Chat :: AOL Instant Messenger",
+                "Topic :: Communications :: Chat :: ICQ",
+                "Topic :: Communications :: Chat :: Internet Relay Chat",
+                "Topic :: Internet",
+                "Topic :: Software Development :: Libraries :: Python Modules",
+            ])
+    else:
+        extraMeta = {}
+
+    dist.setup(
+        twisted_subproject="words",
+        scripts=dist.getScripts("words"),
+        # metadata
+        name="Twisted Words",
+        description="Twisted Words contains Instant Messaging implementations.",
+        author="Twisted Matrix Laboratories",
+        author_email="twisted-python at twistedmatrix.com",
+        maintainer="Jp Calderone",
+        url="http://twistedmatrix.com/trac/wiki/TwistedWords",
+        license="MIT",
+        long_description="""\
+Twisted Words contains implementations of many Instant Messaging
+protocols, including IRC, Jabber, MSN, OSCAR (AIM & ICQ), TOC (AOL),
+and some functionality for creating bots, inter-protocol gateways, and
+a client application for many of the protocols.
+
+In support of Jabber, Twisted Words also contains X-ish, a library for
+processing XML with Twisted and Python, with support for a Pythonic DOM and
+an XPath-like toolkit.
+""",
+        **extraMeta)
diff --git a/ThirdParty/Twisted/twisted/words/xish/__init__.py b/ThirdParty/Twisted/twisted/words/xish/__init__.py
new file mode 100644
index 0000000..1d2469f
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/xish/__init__.py
@@ -0,0 +1,10 @@
+# -*- test-case-name: twisted.words.test -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+
+"""
+
+Twisted X-ish: XML-ish DOM and XPath-ish engine
+
+"""
diff --git a/ThirdParty/Twisted/twisted/words/xish/domish.py b/ThirdParty/Twisted/twisted/words/xish/domish.py
new file mode 100644
index 0000000..3be7ed6
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/xish/domish.py
@@ -0,0 +1,848 @@
+# -*- test-case-name: twisted.words.test.test_domish -*-
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+DOM-like XML processing support.
+
+This module provides support for parsing XML into DOM-like object structures
+and serializing such structures to an XML string representation, optimized
+for use in streaming XML applications.
+"""
+
+import types
+
+from zope.interface import implements, Interface, Attribute
+
+def _splitPrefix(name):
+    """ Internal method for splitting a prefixed Element name into its
+        respective parts """
+    ntok = name.split(":", 1)
+    if len(ntok) == 2:
+        return ntok
+    else:
+        return (None, ntok[0])
+
+# Global map of prefixes that always get injected
+# into the serializers prefix map (note, that doesn't
+# mean they're always _USED_)
+G_PREFIXES = { "http://www.w3.org/XML/1998/namespace":"xml" }
+
+class _ListSerializer:
+    """ Internal class which serializes an Element tree into a buffer """
+    def __init__(self, prefixes=None, prefixesInScope=None):
+        self.writelist = []
+        self.prefixes = {}
+        if prefixes:
+            self.prefixes.update(prefixes)
+        self.prefixes.update(G_PREFIXES)
+        self.prefixStack = [G_PREFIXES.values()] + (prefixesInScope or [])
+        self.prefixCounter = 0
+
+    def getValue(self):
+        return u"".join(self.writelist)
+
+    def getPrefix(self, uri):
+        if uri not in self.prefixes:
+            self.prefixes[uri] = "xn%d" % (self.prefixCounter)
+            self.prefixCounter = self.prefixCounter + 1
+        return self.prefixes[uri]
+
+    def prefixInScope(self, prefix):
+        stack = self.prefixStack
+        for i in range(-1, (len(self.prefixStack)+1) * -1, -1):
+            if prefix in stack[i]:
+                return True
+        return False
+
+    def serialize(self, elem, closeElement=1, defaultUri=''):
+        # Optimization shortcuts
+        write = self.writelist.append
+
+        # Shortcut, check to see if elem is actually a chunk o' serialized XML
+        if isinstance(elem, SerializedXML):
+            write(elem)
+            return
+
+        # Shortcut, check to see if elem is actually a string (aka Cdata)
+        if isinstance(elem, types.StringTypes):
+            write(escapeToXml(elem))
+            return
+
+        # Further optimizations
+        name = elem.name
+        uri = elem.uri
+        defaultUri, currentDefaultUri = elem.defaultUri, defaultUri
+
+        for p, u in elem.localPrefixes.iteritems():
+            self.prefixes[u] = p
+        self.prefixStack.append(elem.localPrefixes.keys())
+
+        # Inherit the default namespace
+        if defaultUri is None:
+            defaultUri = currentDefaultUri
+
+        if uri is None:
+            uri = defaultUri
+
+        prefix = None
+        if uri != defaultUri or uri in self.prefixes:
+            prefix = self.getPrefix(uri)
+            inScope = self.prefixInScope(prefix)
+
+        # Create the starttag
+
+        if not prefix:
+            write("<%s" % (name))
+        else:
+            write("<%s:%s" % (prefix, name))
+
+            if not inScope:
+                write(" xmlns:%s='%s'" % (prefix, uri))
+                self.prefixStack[-1].append(prefix)
+                inScope = True
+
+        if defaultUri != currentDefaultUri and \
+           (uri != defaultUri or not prefix or not inScope):
+            write(" xmlns='%s'" % (defaultUri))
+
+        for p, u in elem.localPrefixes.iteritems():
+            write(" xmlns:%s='%s'" % (p, u))
+
+        # Serialize attributes
+        for k,v in elem.attributes.items():
+            # If the attribute name is a tuple, it's a qualified attribute
+            if isinstance(k, types.TupleType):
+                attr_uri, attr_name = k
+                attr_prefix = self.getPrefix(attr_uri)
+
+                if not self.prefixInScope(attr_prefix):
+                    write(" xmlns:%s='%s'" % (attr_prefix, attr_uri))
+                    self.prefixStack[-1].append(attr_prefix)
+
+                write(" %s:%s='%s'" % (attr_prefix, attr_name,
+                                       escapeToXml(v, 1)))
+            else:
+                write((" %s='%s'" % ( k, escapeToXml(v, 1))))
+
+        # Shortcut out if this is only going to return
+        # the element (i.e. no children)
+        if closeElement == 0:
+            write(">")
+            return
+
+        # Serialize children
+        if len(elem.children) > 0:
+            write(">")
+            for c in elem.children:
+                self.serialize(c, defaultUri=defaultUri)
+            # Add closing tag
+            if not prefix:
+                write("</%s>" % (name))
+            else:
+                write("</%s:%s>" % (prefix, name))
+        else:
+            write("/>")
+
+        self.prefixStack.pop()
+
+
+SerializerClass = _ListSerializer
+
+def escapeToXml(text, isattrib = 0):
+    """ Escape text to proper XML form, per section 2.3 in the XML specification.
+
+    @type text: C{str}
+    @param text: Text to escape
+
+    @type isattrib: C{bool}
+    @param isattrib: Triggers escaping of characters necessary for use as
+                     attribute values
+    """
+    text = text.replace("&", "&")
+    text = text.replace("<", "<")
+    text = text.replace(">", ">")
+    if isattrib == 1:
+        text = text.replace("'", "'")
+        text = text.replace("\"", """)
+    return text
+
+def unescapeFromXml(text):
+    text = text.replace("<", "<")
+    text = text.replace(">", ">")
+    text = text.replace("'", "'")
+    text = text.replace(""", "\"")
+    text = text.replace("&", "&")
+    return text
+
+def generateOnlyInterface(list, int):
+    """ Filters items in a list by class
+    """
+    for n in list:
+        if int.providedBy(n):
+            yield n
+
+def generateElementsQNamed(list, name, uri):
+    """ Filters Element items in a list with matching name and URI. """
+    for n in list:
+        if IElement.providedBy(n) and n.name == name and n.uri == uri:
+            yield n
+
+def generateElementsNamed(list, name):
+    """ Filters Element items in a list with matching name, regardless of URI.
+    """
+    for n in list:
+        if IElement.providedBy(n) and n.name == name:
+            yield n
+
+
+class SerializedXML(unicode):
+    """ Marker class for pre-serialized XML in the DOM. """
+    pass
+
+
+class Namespace:
+    """ Convenience object for tracking namespace declarations. """
+    def __init__(self, uri):
+        self._uri = uri
+    def __getattr__(self, n):
+        return (self._uri, n)
+    def __getitem__(self, n):
+        return (self._uri, n)
+
+class IElement(Interface):
+    """
+    Interface to XML element nodes.
+
+    See L{Element} for a detailed example of its general use.
+
+    Warning: this Interface is not yet complete!
+    """
+
+    uri = Attribute(""" Element's namespace URI """)
+    name = Attribute(""" Element's local name """)
+    defaultUri = Attribute(""" Default namespace URI of child elements """)
+    attributes = Attribute(""" Dictionary of element attributes """)
+    children = Attribute(""" List of child nodes """)
+    parent = Attribute(""" Reference to element's parent element """)
+    localPrefixes = Attribute(""" Dictionary of local prefixes """)
+
+    def toXml(prefixes=None, closeElement=1, defaultUri='',
+              prefixesInScope=None):
+        """ Serializes object to a (partial) XML document
+
+        @param prefixes: dictionary that maps namespace URIs to suggested
+                         prefix names.
+        @type prefixes: L{dict}
+        @param closeElement: flag that determines whether to include the
+                             closing tag of the element in the serialized
+                             string. A value of C{0} only generates the
+                             element's start tag. A value of C{1} yields a
+                             complete serialization.
+        @type closeElement: C{int}
+        @param defaultUri: Initial default namespace URI. This is most useful
+                           for partial rendering, where the logical parent
+                           element (of which the starttag was already
+                           serialized) declares a default namespace that should
+                           be inherited.
+        @type defaultUri: C{str}
+        @param prefixesInScope: list of prefixes that are assumed to be
+                                declared by ancestors.
+        @type prefixesInScope: C{list}
+        @return: (partial) serialized XML
+        @rtype: C{unicode}
+        """
+
+    def addElement(name, defaultUri = None, content = None):
+        """ Create an element and add as child.
+
+        The new element is added to this element as a child, and will have
+        this element as its parent.
+
+        @param name: element name. This can be either a C{unicode} object that
+                     contains the local name, or a tuple of (uri, local_name)
+                     for a fully qualified name. In the former case,
+                     the namespace URI is inherited from this element.
+        @type name: C{unicode} or C{tuple} of (C{unicode}, C{unicode})
+        @param defaultUri: default namespace URI for child elements. If
+                           C{None}, this is inherited from this element.
+        @type defaultUri: C{unicode}
+        @param content: text contained by the new element.
+        @type content: C{unicode}
+        @return: the created element
+        @rtype: object providing L{IElement}
+        """
+
+    def addChild(node):
+        """ Adds a node as child of this element.
+
+        The C{node} will be added to the list of childs of this element, and
+        will have this element set as its parent when C{node} provides
+        L{IElement}.
+
+        @param node: the child node.
+        @type node: C{unicode} or object implementing L{IElement}
+        """
+
+class Element(object):
+    """ Represents an XML element node.
+
+    An Element contains a series of attributes (name/value pairs), content
+    (character data), and other child Element objects. When building a document
+    with markup (such as HTML or XML), use this object as the starting point.
+
+    Element objects fully support XML Namespaces. The fully qualified name of
+    the XML Element it represents is stored in the C{uri} and C{name}
+    attributes, where C{uri} holds the namespace URI. There is also a default
+    namespace, for child elements. This is stored in the C{defaultUri}
+    attribute. Note that C{''} means the empty namespace.
+
+    Serialization of Elements through C{toXml()} will use these attributes
+    for generating proper serialized XML. When both C{uri} and C{defaultUri}
+    are not None in the Element and all of its descendents, serialization
+    proceeds as expected:
+
+      >>> from twisted.words.xish import domish
+      >>> root = domish.Element(('myns', 'root'))
+      >>> root.addElement('child', content='test')
+      <twisted.words.xish.domish.Element object at 0x83002ac>
+      >>> root.toXml()
+      u"<root xmlns='myns'><child>test</child></root>"
+
+    For partial serialization, needed for streaming XML, a special value for
+    namespace URIs can be used: C{None}.
+
+    Using C{None} as the value for C{uri} means: this element is in whatever
+    namespace inherited by the closest logical ancestor when the complete XML
+    document has been serialized. The serialized start tag will have a
+    non-prefixed name, and no xmlns declaration will be generated.
+
+    Similarly, C{None} for C{defaultUri} means: the default namespace for my
+    child elements is inherited from the logical ancestors of this element,
+    when the complete XML document has been serialized.
+
+    To illustrate, an example from a Jabber stream. Assume the start tag of the
+    root element of the stream has already been serialized, along with several
+    complete child elements, and sent off, looking like this::
+
+      <stream:stream xmlns:stream='http://etherx.jabber.org/streams'
+                     xmlns='jabber:client' to='example.com'>
+        ...
+
+    Now suppose we want to send a complete element represented by an
+    object C{message} created like:
+
+      >>> message = domish.Element((None, 'message'))
+      >>> message['to'] = 'user at example.com'
+      >>> message.addElement('body', content='Hi!')
+      <twisted.words.xish.domish.Element object at 0x8276e8c>
+      >>> message.toXml()
+      u"<message to='user at example.com'><body>Hi!</body></message>"
+
+    As, you can see, this XML snippet has no xmlns declaration. When sent
+    off, it inherits the C{jabber:client} namespace from the root element.
+    Note that this renders the same as using C{''} instead of C{None}:
+
+      >>> presence = domish.Element(('', 'presence'))
+      >>> presence.toXml()
+      u"<presence/>"
+
+    However, if this object has a parent defined, the difference becomes
+    clear:
+
+      >>> child = message.addElement(('http://example.com/', 'envelope'))
+      >>> child.addChild(presence)
+      <twisted.words.xish.domish.Element object at 0x8276fac>
+      >>> message.toXml()
+      u"<message to='user at example.com'><body>Hi!</body><envelope xmlns='http://example.com/'><presence xmlns=''/></envelope></message>"
+
+    As, you can see, the <presence/> element is now in the empty namespace, not
+    in the default namespace of the parent or the streams'.
+
+    @type uri: C{unicode} or None
+    @ivar uri: URI of this Element's name
+
+    @type name: C{unicode}
+    @ivar name: Name of this Element
+
+    @type defaultUri: C{unicode} or None
+    @ivar defaultUri: URI this Element exists within
+
+    @type children: C{list}
+    @ivar children: List of child Elements and content
+
+    @type parent: L{Element}
+    @ivar parent: Reference to the parent Element, if any.
+
+    @type attributes: L{dict}
+    @ivar attributes: Dictionary of attributes associated with this Element.
+
+    @type localPrefixes: L{dict}
+    @ivar localPrefixes: Dictionary of namespace declarations on this
+                         element. The key is the prefix to bind the
+                         namespace uri to.
+    """
+
+    implements(IElement)
+
+    _idCounter = 0
+
+    def __init__(self, qname, defaultUri=None, attribs=None,
+                       localPrefixes=None):
+        """
+        @param qname: Tuple of (uri, name)
+        @param defaultUri: The default URI of the element; defaults to the URI
+                           specified in C{qname}
+        @param attribs: Dictionary of attributes
+        @param localPrefixes: Dictionary of namespace declarations on this
+                              element. The key is the prefix to bind the
+                              namespace uri to.
+        """
+        self.localPrefixes = localPrefixes or {}
+        self.uri, self.name = qname
+        if defaultUri is None and \
+           self.uri not in self.localPrefixes.itervalues():
+            self.defaultUri = self.uri
+        else:
+            self.defaultUri = defaultUri
+        self.attributes = attribs or {}
+        self.children = []
+        self.parent = None
+
+    def __getattr__(self, key):
+        # Check child list for first Element with a name matching the key
+        for n in self.children:
+            if IElement.providedBy(n) and n.name == key:
+                return n
+
+        # Tweak the behaviour so that it's more friendly about not
+        # finding elements -- we need to document this somewhere :)
+        if key.startswith('_'):
+            raise AttributeError(key)
+        else:
+            return None
+
+    def __getitem__(self, key):
+        return self.attributes[self._dqa(key)]
+
+    def __delitem__(self, key):
+        del self.attributes[self._dqa(key)];
+
+    def __setitem__(self, key, value):
+        self.attributes[self._dqa(key)] = value
+
+    def __str__(self):
+        """ Retrieve the first CData (content) node
+        """
+        for n in self.children:
+            if isinstance(n, types.StringTypes): return n
+        return ""
+
+    def _dqa(self, attr):
+        """ Dequalify an attribute key as needed """
+        if isinstance(attr, types.TupleType) and not attr[0]:
+            return attr[1]
+        else:
+            return attr
+
+    def getAttribute(self, attribname, default = None):
+        """ Retrieve the value of attribname, if it exists """
+        return self.attributes.get(attribname, default)
+
+    def hasAttribute(self, attrib):
+        """ Determine if the specified attribute exists """
+        return self._dqa(attrib) in self.attributes
+
+    def compareAttribute(self, attrib, value):
+        """ Safely compare the value of an attribute against a provided value.
+
+        C{None}-safe.
+        """
+        return self.attributes.get(self._dqa(attrib), None) == value
+
+    def swapAttributeValues(self, left, right):
+        """ Swap the values of two attribute. """
+        d = self.attributes
+        l = d[left]
+        d[left] = d[right]
+        d[right] = l
+
+    def addChild(self, node):
+        """ Add a child to this Element. """
+        if IElement.providedBy(node):
+            node.parent = self
+        self.children.append(node)
+        return self.children[-1]
+
+    def addContent(self, text):
+        """ Add some text data to this Element. """
+        c = self.children
+        if len(c) > 0 and isinstance(c[-1], types.StringTypes):
+            c[-1] = c[-1] + text
+        else:
+            c.append(text)
+        return c[-1]
+
+    def addElement(self, name, defaultUri = None, content = None):
+        result = None
+        if isinstance(name, type(())):
+            if defaultUri is None:
+                defaultUri = name[0]
+            self.children.append(Element(name, defaultUri))
+        else:
+            if defaultUri is None:
+                defaultUri = self.defaultUri
+            self.children.append(Element((defaultUri, name), defaultUri))
+
+        result = self.children[-1]
+        result.parent = self
+
+        if content:
+            result.children.append(content)
+
+        return result
+
+    def addRawXml(self, rawxmlstring):
+        """ Add a pre-serialized chunk o' XML as a child of this Element. """
+        self.children.append(SerializedXML(rawxmlstring))
+
+    def addUniqueId(self):
+        """ Add a unique (across a given Python session) id attribute to this
+            Element.
+        """
+        self.attributes["id"] = "H_%d" % Element._idCounter
+        Element._idCounter = Element._idCounter + 1
+
+
+    def elements(self, uri=None, name=None):
+        """
+        Iterate across all children of this Element that are Elements.
+
+        Returns a generator over the child elements. If both the C{uri} and
+        C{name} parameters are set, the returned generator will only yield
+        on elements matching the qualified name.
+
+        @param uri: Optional element URI.
+        @type uri: C{unicode}
+        @param name: Optional element name.
+        @type name: C{unicode}
+        @return: Iterator that yields objects implementing L{IElement}.
+        """
+        if name is None:
+            return generateOnlyInterface(self.children, IElement)
+        else:
+            return generateElementsQNamed(self.children, name, uri)
+
+
+    def toXml(self, prefixes=None, closeElement=1, defaultUri='',
+                    prefixesInScope=None):
+        """ Serialize this Element and all children to a string. """
+        s = SerializerClass(prefixes=prefixes, prefixesInScope=prefixesInScope)
+        s.serialize(self, closeElement=closeElement, defaultUri=defaultUri)
+        return s.getValue()
+
+    def firstChildElement(self):
+        for c in self.children:
+            if IElement.providedBy(c):
+                return c
+        return None
+
+
+class ParserError(Exception):
+    """ Exception thrown when a parsing error occurs """
+    pass
+
+def elementStream():
+    """ Preferred method to construct an ElementStream
+
+    Uses Expat-based stream if available, and falls back to Sux if necessary.
+    """
+    try:
+        es = ExpatElementStream()
+        return es
+    except ImportError:
+        if SuxElementStream is None:
+            raise Exception("No parsers available :(")
+        es = SuxElementStream()
+        return es
+
+try:
+    from twisted.web import sux
+except:
+    SuxElementStream = None
+else:
+    class SuxElementStream(sux.XMLParser):
+        def __init__(self):
+            self.connectionMade()
+            self.DocumentStartEvent = None
+            self.ElementEvent = None
+            self.DocumentEndEvent = None
+            self.currElem = None
+            self.rootElem = None
+            self.documentStarted = False
+            self.defaultNsStack = []
+            self.prefixStack = []
+
+        def parse(self, buffer):
+            try:
+                self.dataReceived(buffer)
+            except sux.ParseError, e:
+                raise ParserError, str(e)
+
+
+        def findUri(self, prefix):
+            # Walk prefix stack backwards, looking for the uri
+            # matching the specified prefix
+            stack = self.prefixStack
+            for i in range(-1, (len(self.prefixStack)+1) * -1, -1):
+                if prefix in stack[i]:
+                    return stack[i][prefix]
+            return None
+
+        def gotTagStart(self, name, attributes):
+            defaultUri = None
+            localPrefixes = {}
+            attribs = {}
+            uri = None
+
+            # Pass 1 - Identify namespace decls
+            for k, v in attributes.items():
+                if k.startswith("xmlns"):
+                    x, p = _splitPrefix(k)
+                    if (x is None): # I.e.  default declaration
+                        defaultUri = v
+                    else:
+                        localPrefixes[p] = v
+                    del attributes[k]
+
+            # Push namespace decls onto prefix stack
+            self.prefixStack.append(localPrefixes)
+
+            # Determine default namespace for this element; if there
+            # is one
+            if defaultUri is None:
+                if len(self.defaultNsStack) > 0:
+                    defaultUri = self.defaultNsStack[-1]
+                else:
+                    defaultUri = ''
+
+            # Fix up name
+            prefix, name = _splitPrefix(name)
+            if prefix is None: # This element is in the default namespace
+                uri = defaultUri
+            else:
+                # Find the URI for the prefix
+                uri = self.findUri(prefix)
+
+            # Pass 2 - Fix up and escape attributes
+            for k, v in attributes.items():
+                p, n = _splitPrefix(k)
+                if p is None:
+                    attribs[n] = v
+                else:
+                    attribs[(self.findUri(p)), n] = unescapeFromXml(v)
+
+            # Construct the actual Element object
+            e = Element((uri, name), defaultUri, attribs, localPrefixes)
+
+            # Save current default namespace
+            self.defaultNsStack.append(defaultUri)
+
+            # Document already started
+            if self.documentStarted:
+                # Starting a new packet
+                if self.currElem is None:
+                    self.currElem = e
+                # Adding to existing element
+                else:
+                    self.currElem = self.currElem.addChild(e)
+            # New document
+            else:
+                self.rootElem = e
+                self.documentStarted = True
+                self.DocumentStartEvent(e)
+
+        def gotText(self, data):
+            if self.currElem != None:
+                self.currElem.addContent(data)
+
+        def gotCData(self, data):
+            if self.currElem != None:
+                self.currElem.addContent(data)
+
+        def gotComment(self, data):
+            # Ignore comments for the moment
+            pass
+
+        entities = { "amp" : "&",
+                     "lt"  : "<",
+                     "gt"  : ">",
+                     "apos": "'",
+                     "quot": "\"" }
+
+        def gotEntityReference(self, entityRef):
+            # If this is an entity we know about, add it as content
+            # to the current element
+            if entityRef in SuxElementStream.entities:
+                self.currElem.addContent(SuxElementStream.entities[entityRef])
+
+        def gotTagEnd(self, name):
+            # Ensure the document hasn't already ended
+            if self.rootElem is None:
+                # XXX: Write more legible explanation
+                raise ParserError, "Element closed after end of document."
+
+            # Fix up name
+            prefix, name = _splitPrefix(name)
+            if prefix is None:
+                uri = self.defaultNsStack[-1]
+            else:
+                uri = self.findUri(prefix)
+
+            # End of document
+            if self.currElem is None:
+                # Ensure element name and uri matches
+                if self.rootElem.name != name or self.rootElem.uri != uri:
+                    raise ParserError, "Mismatched root elements"
+                self.DocumentEndEvent()
+                self.rootElem = None
+
+            # Other elements
+            else:
+                # Ensure the tag being closed matches the name of the current
+                # element
+                if self.currElem.name != name or self.currElem.uri != uri:
+                    # XXX: Write more legible explanation
+                    raise ParserError, "Malformed element close"
+
+                # Pop prefix and default NS stack
+                self.prefixStack.pop()
+                self.defaultNsStack.pop()
+
+                # Check for parent null parent of current elem;
+                # that's the top of the stack
+                if self.currElem.parent is None:
+                    self.currElem.parent = self.rootElem
+                    self.ElementEvent(self.currElem)
+                    self.currElem = None
+
+                # Anything else is just some element wrapping up
+                else:
+                    self.currElem = self.currElem.parent
+
+
+class ExpatElementStream:
+    def __init__(self):
+        import pyexpat
+        self.DocumentStartEvent = None
+        self.ElementEvent = None
+        self.DocumentEndEvent = None
+        self.error = pyexpat.error
+        self.parser = pyexpat.ParserCreate("UTF-8", " ")
+        self.parser.StartElementHandler = self._onStartElement
+        self.parser.EndElementHandler = self._onEndElement
+        self.parser.CharacterDataHandler = self._onCdata
+        self.parser.StartNamespaceDeclHandler = self._onStartNamespace
+        self.parser.EndNamespaceDeclHandler = self._onEndNamespace
+        self.currElem = None
+        self.defaultNsStack = ['']
+        self.documentStarted = 0
+        self.localPrefixes = {}
+
+    def parse(self, buffer):
+        try:
+            self.parser.Parse(buffer)
+        except self.error, e:
+            raise ParserError, str(e)
+
+    def _onStartElement(self, name, attrs):
+        # Generate a qname tuple from the provided name.  See
+        # http://docs.python.org/library/pyexpat.html#xml.parsers.expat.ParserCreate
+        # for an explanation of the formatting of name.
+        qname = name.rsplit(" ", 1)
+        if len(qname) == 1:
+            qname = ('', name)
+
+        # Process attributes
+        for k, v in attrs.items():
+            if " " in k:
+                aqname = k.rsplit(" ", 1)
+                attrs[(aqname[0], aqname[1])] = v
+                del attrs[k]
+
+        # Construct the new element
+        e = Element(qname, self.defaultNsStack[-1], attrs, self.localPrefixes)
+        self.localPrefixes = {}
+
+        # Document already started
+        if self.documentStarted == 1:
+            if self.currElem != None:
+                self.currElem.children.append(e)
+                e.parent = self.currElem
+            self.currElem = e
+
+        # New document
+        else:
+            self.documentStarted = 1
+            self.DocumentStartEvent(e)
+
+    def _onEndElement(self, _):
+        # Check for null current elem; end of doc
+        if self.currElem is None:
+            self.DocumentEndEvent()
+
+        # Check for parent that is None; that's
+        # the top of the stack
+        elif self.currElem.parent is None:
+            self.ElementEvent(self.currElem)
+            self.currElem = None
+
+        # Anything else is just some element in the current
+        # packet wrapping up
+        else:
+            self.currElem = self.currElem.parent
+
+    def _onCdata(self, data):
+        if self.currElem != None:
+            self.currElem.addContent(data)
+
+    def _onStartNamespace(self, prefix, uri):
+        # If this is the default namespace, put
+        # it on the stack
+        if prefix is None:
+            self.defaultNsStack.append(uri)
+        else:
+            self.localPrefixes[prefix] = uri
+
+    def _onEndNamespace(self, prefix):
+        # Remove last element on the stack
+        if prefix is None:
+            self.defaultNsStack.pop()
+
+## class FileParser(ElementStream):
+##     def __init__(self):
+##         ElementStream.__init__(self)
+##         self.DocumentStartEvent = self.docStart
+##         self.ElementEvent = self.elem
+##         self.DocumentEndEvent = self.docEnd
+##         self.done = 0
+
+##     def docStart(self, elem):
+##         self.document = elem
+
+##     def elem(self, elem):
+##         self.document.addChild(elem)
+
+##     def docEnd(self):
+##         self.done = 1
+
+##     def parse(self, filename):
+##         for l in open(filename).readlines():
+##             self.parser.Parse(l)
+##         assert self.done == 1
+##         return self.document
+
+## def parseFile(filename):
+##     return FileParser().parse(filename)
+
+
diff --git a/ThirdParty/Twisted/twisted/words/xish/utility.py b/ThirdParty/Twisted/twisted/words/xish/utility.py
new file mode 100644
index 0000000..5c54095
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/xish/utility.py
@@ -0,0 +1,372 @@
+# -*- test-case-name: twisted.words.test.test_xishutil -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+Event Dispatching and Callback utilities.
+"""
+
+from twisted.python import log
+from twisted.words.xish import xpath
+
+class _MethodWrapper(object):
+    """
+    Internal class for tracking method calls.
+    """
+    def __init__(self, method, *args, **kwargs):
+        self.method = method
+        self.args = args
+        self.kwargs = kwargs
+
+
+    def __call__(self, *args, **kwargs):
+        nargs = self.args + args
+        nkwargs = self.kwargs.copy()
+        nkwargs.update(kwargs)
+        self.method(*nargs, **nkwargs)
+
+
+
+class CallbackList:
+    """
+    Container for callbacks.
+
+    Event queries are linked to lists of callables. When a matching event
+    occurs, these callables are called in sequence. One-time callbacks
+    are removed from the list after the first time the event was triggered.
+
+    Arguments to callbacks are split spread across two sets. The first set,
+    callback specific, is passed to C{addCallback} and is used for all
+    subsequent event triggers.  The second set is passed to C{callback} and is
+    event specific. Positional arguments in the second set come after the
+    positional arguments of the first set. Keyword arguments in the second set
+    override those in the first set.
+
+    @ivar callbacks: The registered callbacks as mapping from the callable to a
+                     tuple of a wrapper for that callable that keeps the
+                     callback specific arguments and a boolean that signifies
+                     if it is to be called only once.
+    @type callbacks: C{dict}
+    """
+
+    def __init__(self):
+        self.callbacks = {}
+
+
+    def addCallback(self, onetime, method, *args, **kwargs):
+        """
+        Add callback.
+
+        The arguments passed are used as callback specific arguments.
+
+        @param onetime: If C{True}, this callback is called at most once.
+        @type onetime: C{bool}
+        @param method: The callback callable to be added.
+        @param args: Positional arguments to the callable.
+        @type args: C{list}
+        @param kwargs: Keyword arguments to the callable.
+        @type kwargs: C{dict}
+        """
+
+        if not method in self.callbacks:
+            self.callbacks[method] = (_MethodWrapper(method, *args, **kwargs),
+                                      onetime)
+
+
+    def removeCallback(self, method):
+        """
+        Remove callback.
+
+        @param method: The callable to be removed.
+        """
+
+        if method in self.callbacks:
+            del self.callbacks[method]
+
+
+    def callback(self, *args, **kwargs):
+        """
+        Call all registered callbacks.
+
+        The passed arguments are event specific and augment and override
+        the callback specific arguments as described above.
+
+        @note: Exceptions raised by callbacks are trapped and logged. They will
+               not propagate up to make sure other callbacks will still be
+               called, and the event dispatching always succeeds.
+
+        @param args: Positional arguments to the callable.
+        @type args: C{list}
+        @param kwargs: Keyword arguments to the callable.
+        @type kwargs: C{dict}
+        """
+
+        for key, (methodwrapper, onetime) in self.callbacks.items():
+            try:
+                methodwrapper(*args, **kwargs)
+            except:
+                log.err()
+
+            if onetime:
+                del self.callbacks[key]
+
+
+    def isEmpty(self):
+        """
+        Return if list of registered callbacks is empty.
+
+        @rtype: C{bool}
+        """
+
+        return len(self.callbacks) == 0
+
+
+
+class EventDispatcher:
+    """
+    Event dispatching service.
+
+    The C{EventDispatcher} allows observers to be registered for certain events
+    that are dispatched. There are two types of events: XPath events and Named
+    events.
+
+    Every dispatch is triggered by calling L{dispatch} with a data object and,
+    for named events, the name of the event.
+
+    When an XPath type event is dispatched, the associated object is assumed to
+    be an L{Element<twisted.words.xish.domish.Element>} instance, which is
+    matched against all registered XPath queries. For every match, the
+    respective observer will be called with the data object.
+
+    A named event will simply call each registered observer for that particular
+    event name, with the data object. Unlike XPath type events, the data object
+    is not restricted to L{Element<twisted.words.xish.domish.Element>}, but can
+    be anything.
+
+    When registering observers, the event that is to be observed is specified
+    using an L{xpath.XPathQuery} instance or a string. In the latter case, the
+    string can also contain the string representation of an XPath expression.
+    To distinguish these from named events, each named event should start with
+    a special prefix that is stored in C{self.prefix}. It defaults to
+    C{//event/}.
+
+    Observers registered using L{addObserver} are persistent: after the
+    observer has been triggered by a dispatch, it remains registered for a
+    possible next dispatch. If instead L{addOnetimeObserver} was used to
+    observe an event, the observer is removed from the list of observers after
+    the first observed event.
+
+    Observers can also be prioritized, by providing an optional C{priority}
+    parameter to the L{addObserver} and L{addOnetimeObserver} methods. Higher
+    priority observers are then called before lower priority observers.
+
+    Finally, observers can be unregistered by using L{removeObserver}.
+    """
+
+    def __init__(self, eventprefix="//event/"):
+        self.prefix = eventprefix
+        self._eventObservers = {}
+        self._xpathObservers = {}
+        self._dispatchDepth = 0  # Flag indicating levels of dispatching
+                                 # in progress
+        self._updateQueue = [] # Queued updates for observer ops
+
+
+    def _getEventAndObservers(self, event):
+        if isinstance(event, xpath.XPathQuery):
+            # Treat as xpath
+            observers = self._xpathObservers
+        else:
+            if self.prefix == event[:len(self.prefix)]:
+                # Treat as event
+                observers = self._eventObservers
+            else:
+                # Treat as xpath
+                event = xpath.internQuery(event)
+                observers = self._xpathObservers
+
+        return event, observers
+
+
+    def addOnetimeObserver(self, event, observerfn, priority=0, *args, **kwargs):
+        """
+        Register a one-time observer for an event.
+
+        Like L{addObserver}, but is only triggered at most once. See there
+        for a description of the parameters.
+        """
+        self._addObserver(True, event, observerfn, priority, *args, **kwargs)
+
+
+    def addObserver(self, event, observerfn, priority=0, *args, **kwargs):
+        """
+        Register an observer for an event.
+
+        Each observer will be registered with a certain priority. Higher
+        priority observers get called before lower priority observers.
+
+        @param event: Name or XPath query for the event to be monitored.
+        @type event: C{str} or L{xpath.XPathQuery}.
+        @param observerfn: Function to be called when the specified event
+                           has been triggered. This callable takes
+                           one parameter: the data object that triggered
+                           the event. When specified, the C{*args} and
+                           C{**kwargs} parameters to addObserver are being used
+                           as additional parameters to the registered observer
+                           callable.
+        @param priority: (Optional) priority of this observer in relation to
+                         other observer that match the same event. Defaults to
+                         C{0}.
+        @type priority: C{int}
+        """
+        self._addObserver(False, event, observerfn, priority, *args, **kwargs)
+
+
+    def _addObserver(self, onetime, event, observerfn, priority, *args, **kwargs):
+        # If this is happening in the middle of the dispatch, queue
+        # it up for processing after the dispatch completes
+        if self._dispatchDepth > 0:
+            self._updateQueue.append(lambda:self._addObserver(onetime, event, observerfn, priority, *args, **kwargs))
+            return
+
+        event, observers = self._getEventAndObservers(event)
+
+        if priority not in observers:
+            cbl = CallbackList()
+            observers[priority] = {event: cbl}
+        else:
+            priorityObservers = observers[priority]
+            if event not in priorityObservers:
+                cbl = CallbackList()
+                observers[priority][event] = cbl
+            else:
+                cbl = priorityObservers[event]
+
+        cbl.addCallback(onetime, observerfn, *args, **kwargs)
+
+
+    def removeObserver(self, event, observerfn):
+        """
+        Remove callable as observer for an event.
+
+        The observer callable is removed for all priority levels for the
+        specified event.
+
+        @param event: Event for which the observer callable was registered.
+        @type event: C{str} or L{xpath.XPathQuery}
+        @param observerfn: Observer callable to be unregistered.
+        """
+
+        # If this is happening in the middle of the dispatch, queue
+        # it up for processing after the dispatch completes
+        if self._dispatchDepth > 0:
+            self._updateQueue.append(lambda:self.removeObserver(event, observerfn))
+            return
+
+        event, observers = self._getEventAndObservers(event)
+
+        emptyLists = []
+        for priority, priorityObservers in observers.iteritems():
+            for query, callbacklist in priorityObservers.iteritems():
+                if event == query:
+                    callbacklist.removeCallback(observerfn)
+                    if callbacklist.isEmpty():
+                        emptyLists.append((priority, query))
+
+        for priority, query in emptyLists:
+            del observers[priority][query]
+
+
+    def dispatch(self, obj, event=None):
+        """
+        Dispatch an event.
+
+        When C{event} is C{None}, an XPath type event is triggered, and
+        C{obj} is assumed to be an instance of
+        L{Element<twisted.words.xish.domish.Element>}. Otherwise, C{event}
+        holds the name of the named event being triggered. In the latter case,
+        C{obj} can be anything.
+
+        @param obj: The object to be dispatched.
+        @param event: Optional event name.
+        @type event: C{str}
+        """
+
+        foundTarget = False
+
+        self._dispatchDepth += 1
+
+        if event != None:
+            # Named event
+            observers = self._eventObservers
+            match = lambda query, obj: query == event
+        else:
+            # XPath event
+            observers = self._xpathObservers
+            match = lambda query, obj: query.matches(obj)
+
+        priorities = observers.keys()
+        priorities.sort()
+        priorities.reverse()
+
+        emptyLists = []
+        for priority in priorities:
+            for query, callbacklist in observers[priority].iteritems():
+                if match(query, obj):
+                    callbacklist.callback(obj)
+                    foundTarget = True
+                    if callbacklist.isEmpty():
+                        emptyLists.append((priority, query))
+
+        for priority, query in emptyLists:
+            del observers[priority][query]
+
+        self._dispatchDepth -= 1
+
+        # If this is a dispatch within a dispatch, don't
+        # do anything with the updateQueue -- it needs to
+        # wait until we've back all the way out of the stack
+        if self._dispatchDepth == 0:
+            # Deal with pending update operations
+            for f in self._updateQueue:
+                f()
+            self._updateQueue = []
+
+        return foundTarget
+
+
+
+class XmlPipe(object):
+    """
+    XML stream pipe.
+
+    Connects two objects that communicate stanzas through an XML stream like
+    interface. Each of the ends of the pipe (sink and source) can be used to
+    send XML stanzas to the other side, or add observers to process XML stanzas
+    that were sent from the other side.
+
+    XML pipes are usually used in place of regular XML streams that are
+    transported over TCP. This is the reason for the use of the names source
+    and sink for both ends of the pipe. The source side corresponds with the
+    entity that initiated the TCP connection, whereas the sink corresponds with
+    the entity that accepts that connection. In this object, though, the source
+    and sink are treated equally.
+
+    Unlike Jabber
+    L{XmlStream<twisted.words.protocols.jabber.xmlstream.XmlStream>}s, the sink
+    and source objects are assumed to represent an eternal connected and
+    initialized XML stream. As such, events corresponding to connection,
+    disconnection, initialization and stream errors are not dispatched or
+    processed.
+
+    @since: 8.2
+    @ivar source: Source XML stream.
+    @ivar sink: Sink XML stream.
+    """
+
+    def __init__(self):
+        self.source = EventDispatcher()
+        self.sink = EventDispatcher()
+        self.source.send = lambda obj: self.sink.dispatch(obj)
+        self.sink.send = lambda obj: self.source.dispatch(obj)
diff --git a/ThirdParty/Twisted/twisted/words/xish/xmlstream.py b/ThirdParty/Twisted/twisted/words/xish/xmlstream.py
new file mode 100644
index 0000000..3018a0e
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/xish/xmlstream.py
@@ -0,0 +1,261 @@
+# -*- test-case-name: twisted.words.test.test_xmlstream -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+XML Stream processing.
+
+An XML Stream is defined as a connection over which two XML documents are
+exchanged during the lifetime of the connection, one for each direction. The
+unit of interaction is a direct child element of the root element (stanza).
+
+The most prominent use of XML Streams is Jabber, but this module is generically
+usable. See Twisted Words for Jabber specific protocol support.
+
+Maintainer: Ralph Meijer
+"""
+
+from twisted.python import failure
+from twisted.internet import protocol
+from twisted.words.xish import domish, utility
+
+STREAM_CONNECTED_EVENT = intern("//event/stream/connected")
+STREAM_START_EVENT = intern("//event/stream/start")
+STREAM_END_EVENT = intern("//event/stream/end")
+STREAM_ERROR_EVENT = intern("//event/stream/error")
+
+class XmlStream(protocol.Protocol, utility.EventDispatcher):
+    """ Generic Streaming XML protocol handler.
+
+    This protocol handler will parse incoming data as XML and dispatch events
+    accordingly. Incoming stanzas can be handled by registering observers using
+    XPath-like expressions that are matched against each stanza. See
+    L{utility.EventDispatcher} for details.
+    """
+    def __init__(self):
+        utility.EventDispatcher.__init__(self)
+        self.stream = None
+        self.rawDataOutFn = None
+        self.rawDataInFn = None
+
+    def _initializeStream(self):
+        """ Sets up XML Parser. """
+        self.stream = domish.elementStream()
+        self.stream.DocumentStartEvent = self.onDocumentStart
+        self.stream.ElementEvent = self.onElement
+        self.stream.DocumentEndEvent = self.onDocumentEnd
+
+    ### --------------------------------------------------------------
+    ###
+    ### Protocol events
+    ###
+    ### --------------------------------------------------------------
+
+    def connectionMade(self):
+        """ Called when a connection is made.
+
+        Sets up the XML parser and dispatches the L{STREAM_CONNECTED_EVENT}
+        event indicating the connection has been established.
+        """
+        self._initializeStream()
+        self.dispatch(self, STREAM_CONNECTED_EVENT)
+
+    def dataReceived(self, data):
+        """ Called whenever data is received.
+
+        Passes the data to the XML parser. This can result in calls to the
+        DOM handlers. If a parse error occurs, the L{STREAM_ERROR_EVENT} event
+        is called to allow for cleanup actions, followed by dropping the
+        connection.
+        """
+        try:
+            if self.rawDataInFn:
+                self.rawDataInFn(data)
+            self.stream.parse(data)
+        except domish.ParserError:
+            self.dispatch(failure.Failure(), STREAM_ERROR_EVENT)
+            self.transport.loseConnection()
+
+    def connectionLost(self, reason):
+        """ Called when the connection is shut down.
+
+        Dispatches the L{STREAM_END_EVENT}.
+        """
+        self.dispatch(reason, STREAM_END_EVENT)
+        self.stream = None
+
+    ### --------------------------------------------------------------
+    ###
+    ### DOM events
+    ###
+    ### --------------------------------------------------------------
+
+    def onDocumentStart(self, rootElement):
+        """ Called whenever the start tag of a root element has been received.
+
+        Dispatches the L{STREAM_START_EVENT}.
+        """
+        self.dispatch(self, STREAM_START_EVENT)
+
+    def onElement(self, element):
+        """ Called whenever a direct child element of the root element has
+        been received.
+
+        Dispatches the received element.
+        """
+        self.dispatch(element)
+
+    def onDocumentEnd(self):
+        """ Called whenever the end tag of the root element has been received.
+
+        Closes the connection. This causes C{connectionLost} being called.
+        """
+        self.transport.loseConnection()
+
+    def setDispatchFn(self, fn):
+        """ Set another function to handle elements. """
+        self.stream.ElementEvent = fn
+
+    def resetDispatchFn(self):
+        """ Set the default function (C{onElement}) to handle elements. """
+        self.stream.ElementEvent = self.onElement
+
+    def send(self, obj):
+        """ Send data over the stream.
+
+        Sends the given C{obj} over the connection. C{obj} may be instances of
+        L{domish.Element}, C{unicode} and C{str}. The first two will be
+        properly serialized and/or encoded. C{str} objects must be in UTF-8
+        encoding.
+
+        Note: because it is easy to make mistakes in maintaining a properly
+        encoded C{str} object, it is advised to use C{unicode} objects
+        everywhere when dealing with XML Streams.
+
+        @param obj: Object to be sent over the stream.
+        @type obj: L{domish.Element}, L{domish} or C{str}
+
+        """
+        if domish.IElement.providedBy(obj):
+            obj = obj.toXml()
+
+        if isinstance(obj, unicode):
+            obj = obj.encode('utf-8')
+
+        if self.rawDataOutFn:
+            self.rawDataOutFn(obj)
+
+        self.transport.write(obj)
+
+
+
+class BootstrapMixin(object):
+    """
+    XmlStream factory mixin to install bootstrap event observers.
+
+    This mixin is for factories providing
+    L{IProtocolFactory<twisted.internet.interfaces.IProtocolFactory>} to make
+    sure bootstrap event observers are set up on protocols, before incoming
+    data is processed. Such protocols typically derive from
+    L{utility.EventDispatcher}, like L{XmlStream}.
+
+    You can set up bootstrap event observers using C{addBootstrap}. The
+    C{event} and C{fn} parameters correspond with the C{event} and
+    C{observerfn} arguments to L{utility.EventDispatcher.addObserver}.
+
+    @since: 8.2.
+    @ivar bootstraps: The list of registered bootstrap event observers.
+    @type bootstrap: C{list}
+    """
+
+    def __init__(self):
+        self.bootstraps = []
+
+
+    def installBootstraps(self, dispatcher):
+        """
+        Install registered bootstrap observers.
+
+        @param dispatcher: Event dispatcher to add the observers to.
+        @type dispatcher: L{utility.EventDispatcher}
+        """
+        for event, fn in self.bootstraps:
+            dispatcher.addObserver(event, fn)
+
+
+    def addBootstrap(self, event, fn):
+        """
+        Add a bootstrap event handler.
+
+        @param event: The event to register an observer for.
+        @type event: C{str} or L{xpath.XPathQuery}
+        @param fn: The observer callable to be registered.
+        """
+        self.bootstraps.append((event, fn))
+
+
+    def removeBootstrap(self, event, fn):
+        """
+        Remove a bootstrap event handler.
+
+        @param event: The event the observer is registered for.
+        @type event: C{str} or L{xpath.XPathQuery}
+        @param fn: The registered observer callable.
+        """
+        self.bootstraps.remove((event, fn))
+
+
+
+class XmlStreamFactoryMixin(BootstrapMixin):
+    """
+    XmlStream factory mixin that takes care of event handlers.
+
+    All positional and keyword arguments passed to create this factory are
+    passed on as-is to the protocol.
+
+    @ivar args: Positional arguments passed to the protocol upon instantiation.
+    @type args: C{tuple}.
+    @ivar kwargs: Keyword arguments passed to the protocol upon instantiation.
+    @type kwargs: C{dict}.
+    """
+
+    def __init__(self, *args, **kwargs):
+        BootstrapMixin.__init__(self)
+        self.args = args
+        self.kwargs = kwargs
+
+
+    def buildProtocol(self, addr):
+        """
+        Create an instance of XmlStream.
+
+        The returned instance will have bootstrap event observers registered
+        and will proceed to handle input on an incoming connection.
+        """
+        xs = self.protocol(*self.args, **self.kwargs)
+        xs.factory = self
+        self.installBootstraps(xs)
+        return xs
+
+
+
+class XmlStreamFactory(XmlStreamFactoryMixin,
+                       protocol.ReconnectingClientFactory):
+    """
+    Factory for XmlStream protocol objects as a reconnection client.
+    """
+
+    protocol = XmlStream
+
+    def buildProtocol(self, addr):
+        """
+        Create a protocol instance.
+
+        Overrides L{XmlStreamFactoryMixin.buildProtocol} to work with
+        a L{ReconnectingClientFactory}. As this is called upon having an
+        connection established, we are resetting the delay for reconnection
+        attempts when the connection is lost again.
+        """
+        self.resetDelay()
+        return XmlStreamFactoryMixin.buildProtocol(self, addr)
diff --git a/ThirdParty/Twisted/twisted/words/xish/xpath.py b/ThirdParty/Twisted/twisted/words/xish/xpath.py
new file mode 100644
index 0000000..bf5b529
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/xish/xpath.py
@@ -0,0 +1,333 @@
+# -*- test-case-name: twisted.words.test.test_xpath -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+"""
+XPath query support.
+
+This module provides L{XPathQuery} to match
+L{domish.Element<twisted.words.xish.domish.Element>} instances against
+XPath-like expressions.
+"""
+
+try:
+    import cStringIO as StringIO
+except ImportError:
+    import StringIO
+
+class LiteralValue(str):
+    def value(self, elem):
+        return self
+
+
+class IndexValue:
+    def __init__(self, index):
+        self.index = int(index) - 1
+
+    def value(self, elem):
+        return elem.children[self.index]
+
+
+class AttribValue:
+    def __init__(self, attribname):
+        self.attribname = attribname
+        if self.attribname == "xmlns":
+            self.value = self.value_ns
+
+    def value_ns(self, elem):
+        return elem.uri
+
+    def value(self, elem):
+        if self.attribname in elem.attributes:
+            return elem.attributes[self.attribname]
+        else:
+            return None
+
+
+class CompareValue:
+    def __init__(self, lhs, op, rhs):
+        self.lhs = lhs
+        self.rhs = rhs
+        if op == "=":
+            self.value = self._compareEqual
+        else:
+            self.value = self._compareNotEqual
+
+    def _compareEqual(self, elem):
+        return self.lhs.value(elem) == self.rhs.value(elem)
+
+    def _compareNotEqual(self, elem):
+        return self.lhs.value(elem) != self.rhs.value(elem)
+
+
+class BooleanValue:
+    """
+    Provide boolean XPath expression operators.
+
+    @ivar lhs: Left hand side expression of the operator.
+    @ivar op: The operator. One of C{'and'}, C{'or'}.
+    @ivar rhs: Right hand side expression of the operator.
+    @ivar value: Reference to the method that will calculate the value of
+                 this expression given an element.
+    """
+    def __init__(self, lhs, op, rhs):
+        self.lhs = lhs
+        self.rhs = rhs
+        if op == "and":
+            self.value = self._booleanAnd
+        else:
+            self.value = self._booleanOr
+
+    def _booleanAnd(self, elem):
+        """
+        Calculate boolean and of the given expressions given an element.
+
+        @param elem: The element to calculate the value of the expression from.
+        """
+        return self.lhs.value(elem) and self.rhs.value(elem)
+
+    def _booleanOr(self, elem):
+        """
+        Calculate boolean or of the given expressions given an element.
+
+        @param elem: The element to calculate the value of the expression from.
+        """
+        return self.lhs.value(elem) or self.rhs.value(elem)
+
+
+def Function(fname):
+    """
+    Internal method which selects the function object
+    """
+    klassname = "_%s_Function" % fname
+    c = globals()[klassname]()
+    return c
+
+
+class _not_Function:
+    def __init__(self):
+        self.baseValue = None
+
+    def setParams(self, baseValue):
+        self.baseValue = baseValue
+
+    def value(self, elem):
+        return not self.baseValue.value(elem)
+
+
+class _text_Function:
+    def setParams(self):
+        pass
+
+    def value(self, elem):
+        return str(elem)
+
+
+class _Location:
+    def __init__(self):
+        self.predicates = []
+        self.elementName  = None
+        self.childLocation = None
+
+    def matchesPredicates(self, elem):
+        if self.elementName != None and self.elementName != elem.name:
+            return 0
+
+        for p in self.predicates:
+            if not p.value(elem):
+                return 0
+
+        return 1
+
+    def matches(self, elem):
+        if not self.matchesPredicates(elem):
+            return 0
+
+        if self.childLocation != None:
+            for c in elem.elements():
+                if self.childLocation.matches(c):
+                    return 1
+        else:
+            return 1
+
+        return 0
+
+    def queryForString(self, elem, resultbuf):
+        if not self.matchesPredicates(elem):
+            return
+
+        if self.childLocation != None:
+            for c in elem.elements():
+                self.childLocation.queryForString(c, resultbuf)
+        else:
+            resultbuf.write(str(elem))
+
+    def queryForNodes(self, elem, resultlist):
+        if not self.matchesPredicates(elem):
+            return
+
+        if self.childLocation != None:
+            for c in elem.elements():
+                self.childLocation.queryForNodes(c, resultlist)
+        else:
+            resultlist.append(elem)
+
+    def queryForStringList(self, elem, resultlist):
+        if not self.matchesPredicates(elem):
+            return
+
+        if self.childLocation != None:
+            for c in elem.elements():
+                self.childLocation.queryForStringList(c, resultlist)
+        else:
+            for c in elem.children:
+                if isinstance(c, (str, unicode)):
+                    resultlist.append(c)
+
+
+class _AnyLocation:
+    def __init__(self):
+        self.predicates = []
+        self.elementName = None
+        self.childLocation = None
+
+    def matchesPredicates(self, elem):
+        for p in self.predicates:
+            if not p.value(elem):
+                return 0
+        return 1
+
+    def listParents(self, elem, parentlist):
+        if elem.parent != None:
+            self.listParents(elem.parent, parentlist)
+        parentlist.append(elem.name)
+
+    def isRootMatch(self, elem):
+        if (self.elementName == None or self.elementName == elem.name) and \
+           self.matchesPredicates(elem):
+            if self.childLocation != None:
+                for c in elem.elements():
+                    if self.childLocation.matches(c):
+                        return True
+            else:
+                return True
+        return False
+
+    def findFirstRootMatch(self, elem):
+        if (self.elementName == None or self.elementName == elem.name) and \
+           self.matchesPredicates(elem):
+            # Thus far, the name matches and the predicates match,
+            # now check into the children and find the first one
+            # that matches the rest of the structure
+            # the rest of the structure
+            if self.childLocation != None:
+                for c in elem.elements():
+                    if self.childLocation.matches(c):
+                        return c
+                return None
+            else:
+                # No children locations; this is a match!
+                return elem
+        else:
+            # Ok, predicates or name didn't match, so we need to start
+            # down each child and treat it as the root and try
+            # again
+            for c in elem.elements():
+                if self.matches(c):
+                    return c
+            # No children matched...
+            return None
+
+    def matches(self, elem):
+        if self.isRootMatch(elem):
+            return True
+        else:
+            # Ok, initial element isn't an exact match, walk
+            # down each child and treat it as the root and try
+            # again
+            for c in elem.elements():
+                if self.matches(c):
+                    return True
+            # No children matched...
+            return False
+
+    def queryForString(self, elem, resultbuf):
+        raise NotImplementedError(
+            "queryForString is not implemented for any location")
+
+    def queryForNodes(self, elem, resultlist):
+        # First check to see if _this_ element is a root
+        if self.isRootMatch(elem):
+            resultlist.append(elem)
+
+        # Now check each child
+        for c in elem.elements():
+            self.queryForNodes(c, resultlist)
+
+
+    def queryForStringList(self, elem, resultlist):
+        if self.isRootMatch(elem):
+            for c in elem.children:
+                if isinstance(c, (str, unicode)):
+                    resultlist.append(c)
+        for c in elem.elements():
+            self.queryForStringList(c, resultlist)
+
+
+class XPathQuery:
+    def __init__(self, queryStr):
+        self.queryStr = queryStr
+        from twisted.words.xish.xpathparser import parse
+        self.baseLocation = parse('XPATH', queryStr)
+
+    def __hash__(self):
+        return self.queryStr.__hash__()
+
+    def matches(self, elem):
+        return self.baseLocation.matches(elem)
+
+    def queryForString(self, elem):
+        result = StringIO.StringIO()
+        self.baseLocation.queryForString(elem, result)
+        return result.getvalue()
+
+    def queryForNodes(self, elem):
+        result = []
+        self.baseLocation.queryForNodes(elem, result)
+        if len(result) == 0:
+            return None
+        else:
+            return result
+
+    def queryForStringList(self, elem):
+        result = []
+        self.baseLocation.queryForStringList(elem, result)
+        if len(result) == 0:
+            return None
+        else:
+            return result
+
+
+__internedQueries = {}
+
+def internQuery(queryString):
+    if queryString not in __internedQueries:
+        __internedQueries[queryString] = XPathQuery(queryString)
+    return __internedQueries[queryString]
+
+
+def matches(xpathstr, elem):
+    return internQuery(xpathstr).matches(elem)
+
+
+def queryForStringList(xpathstr, elem):
+    return internQuery(xpathstr).queryForStringList(elem)
+
+
+def queryForString(xpathstr, elem):
+    return internQuery(xpathstr).queryForString(elem)
+
+
+def queryForNodes(xpathstr, elem):
+    return internQuery(xpathstr).queryForNodes(elem)
diff --git a/ThirdParty/Twisted/twisted/words/xish/xpathparser.g b/ThirdParty/Twisted/twisted/words/xish/xpathparser.g
new file mode 100644
index 0000000..02c67c9
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/xish/xpathparser.g
@@ -0,0 +1,375 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+# DO NOT EDIT xpathparser.py!
+#
+# It is generated from xpathparser.g using Yapps. Make needed changes there.
+# This also means that the generated Python may not conform to Twisted's coding
+# standards.
+
+# HOWTO Generate me:
+#
+# 1.) Grab a copy of yapps2, version 2.1.1:
+#         http://theory.stanford.edu/~amitp/Yapps/
+#
+#     Note: Do NOT use the package in debian/ubuntu as it has incompatible
+#     modifications.
+#
+# 2.) Generate the grammar:
+#
+#         yapps2 xpathparser.g xpathparser.py.proto
+#
+# 3.) Edit the output to depend on the embedded runtime, not yappsrt.
+#
+#         sed -e '/^import yapps/d' -e '/^[^#]/s/yappsrt\.//g' \
+#             xpathparser.py.proto > xpathparser.py
+
+"""
+XPath Parser.
+
+Besides the parser code produced by Yapps, this module also defines the
+parse-time exception classes, a scanner class, a base class for parsers
+produced by Yapps, and a context class that keeps track of the parse stack.
+These have been copied from the Yapps runtime.
+"""
+
+import sys, re
+
+class SyntaxError(Exception):
+    """When we run into an unexpected token, this is the exception to use"""
+    def __init__(self, charpos=-1, msg="Bad Token", context=None):
+        Exception.__init__(self)
+        self.charpos = charpos
+        self.msg = msg
+        self.context = context
+
+    def __str__(self):
+        if self.charpos < 0: return 'SyntaxError'
+        else: return 'SyntaxError at char%s(%s)' % (repr(self.charpos), self.msg)
+
+class NoMoreTokens(Exception):
+    """Another exception object, for when we run out of tokens"""
+    pass
+
+class Scanner:
+    """Yapps scanner.
+
+    The Yapps scanner can work in context sensitive or context
+    insensitive modes.  The token(i) method is used to retrieve the
+    i-th token.  It takes a restrict set that limits the set of tokens
+    it is allowed to return.  In context sensitive mode, this restrict
+    set guides the scanner.  In context insensitive mode, there is no
+    restriction (the set is always the full set of tokens).
+
+    """
+
+    def __init__(self, patterns, ignore, input):
+        """Initialize the scanner.
+
+        @param patterns: [(terminal, uncompiled regex), ...] or C{None}
+        @param ignore: [terminal,...]
+        @param input: string
+
+        If patterns is C{None}, we assume that the subclass has defined
+        C{self.patterns} : [(terminal, compiled regex), ...]. Note that the
+        patterns parameter expects uncompiled regexes, whereas the
+        C{self.patterns} field expects compiled regexes.
+        """
+        self.tokens = [] # [(begin char pos, end char pos, token name, matched text), ...]
+        self.restrictions = []
+        self.input = input
+        self.pos = 0
+        self.ignore = ignore
+        self.first_line_number = 1
+
+        if patterns is not None:
+            # Compile the regex strings into regex objects
+            self.patterns = []
+            for terminal, regex in patterns:
+                self.patterns.append( (terminal, re.compile(regex)) )
+
+    def get_token_pos(self):
+        """Get the current token position in the input text."""
+        return len(self.tokens)
+
+    def get_char_pos(self):
+        """Get the current char position in the input text."""
+        return self.pos
+
+    def get_prev_char_pos(self, i=None):
+        """Get the previous position (one token back) in the input text."""
+        if self.pos == 0: return 0
+        if i is None: i = -1
+        return self.tokens[i][0]
+
+    def get_line_number(self):
+        """Get the line number of the current position in the input text."""
+        # TODO: make this work at any token/char position
+        return self.first_line_number + self.get_input_scanned().count('\n')
+
+    def get_column_number(self):
+        """Get the column number of the current position in the input text."""
+        s = self.get_input_scanned()
+        i = s.rfind('\n') # may be -1, but that's okay in this case
+        return len(s) - (i+1)
+
+    def get_input_scanned(self):
+        """Get the portion of the input that has been tokenized."""
+        return self.input[:self.pos]
+
+    def get_input_unscanned(self):
+        """Get the portion of the input that has not yet been tokenized."""
+        return self.input[self.pos:]
+
+    def token(self, i, restrict=None):
+        """Get the i'th token in the input.
+
+        If C{i} is one past the end, then scan for another token.
+
+        @param i: token index
+
+        @param restrict: [token, ...] or C{None}; if restrict is
+                         C{None}, then any token is allowed.  You may call
+                         token(i) more than once.  However, the restrict set
+                         may never be larger than what was passed in on the
+                         first call to token(i).
+        """
+        if i == len(self.tokens):
+            self.scan(restrict)
+        if i < len(self.tokens):
+            # Make sure the restriction is more restricted.  This
+            # invariant is needed to avoid ruining tokenization at
+            # position i+1 and higher.
+            if restrict and self.restrictions[i]:
+                for r in restrict:
+                    if r not in self.restrictions[i]:
+                        raise NotImplementedError("Unimplemented: restriction set changed")
+            return self.tokens[i]
+        raise NoMoreTokens()
+
+    def __repr__(self):
+        """Print the last 10 tokens that have been scanned in"""
+        output = ''
+        for t in self.tokens[-10:]:
+            output = '%s\n  (@%s)  %s  =  %s' % (output,t[0],t[2],repr(t[3]))
+        return output
+
+    def scan(self, restrict):
+        """Should scan another token and add it to the list, self.tokens,
+        and add the restriction to self.restrictions"""
+        # Keep looking for a token, ignoring any in self.ignore
+        while 1:
+            # Search the patterns for the longest match, with earlier
+            # tokens in the list having preference
+            best_match = -1
+            best_pat = '(error)'
+            for p, regexp in self.patterns:
+                # First check to see if we're ignoring this token
+                if restrict and p not in restrict and p not in self.ignore:
+                    continue
+                m = regexp.match(self.input, self.pos)
+                if m and len(m.group(0)) > best_match:
+                    # We got a match that's better than the previous one
+                    best_pat = p
+                    best_match = len(m.group(0))
+
+            # If we didn't find anything, raise an error
+            if best_pat == '(error)' and best_match < 0:
+                msg = 'Bad Token'
+                if restrict:
+                    msg = 'Trying to find one of '+', '.join(restrict)
+                raise SyntaxError(self.pos, msg)
+
+            # If we found something that isn't to be ignored, return it
+            if best_pat not in self.ignore:
+                # Create a token with this data
+                token = (self.pos, self.pos+best_match, best_pat,
+                         self.input[self.pos:self.pos+best_match])
+                self.pos = self.pos + best_match
+                # Only add this token if it's not in the list
+                # (to prevent looping)
+                if not self.tokens or token != self.tokens[-1]:
+                    self.tokens.append(token)
+                    self.restrictions.append(restrict)
+                return
+            else:
+                # This token should be ignored ..
+                self.pos = self.pos + best_match
+
+class Parser:
+    """Base class for Yapps-generated parsers.
+
+    """
+
+    def __init__(self, scanner):
+        self._scanner = scanner
+        self._pos = 0
+
+    def _peek(self, *types):
+        """Returns the token type for lookahead; if there are any args
+        then the list of args is the set of token types to allow"""
+        tok = self._scanner.token(self._pos, types)
+        return tok[2]
+
+    def _scan(self, type):
+        """Returns the matched text, and moves to the next token"""
+        tok = self._scanner.token(self._pos, [type])
+        if tok[2] != type:
+            raise SyntaxError(tok[0], 'Trying to find '+type+' :'+ ' ,'.join(self._scanner.restrictions[self._pos]))
+        self._pos = 1 + self._pos
+        return tok[3]
+
+class Context:
+    """Class to represent the parser's call stack.
+
+    Every rule creates a Context that links to its parent rule.  The
+    contexts can be used for debugging.
+
+    """
+
+    def __init__(self, parent, scanner, tokenpos, rule, args=()):
+        """Create a new context.
+
+        @param parent: Context object or C{None}
+        @param scanner: Scanner object
+        @param tokenpos: scanner token position
+        @type tokenpos: C{int}
+        @param rule: name of the rule
+        @type rule: C{str}
+        @param args: tuple listing parameters to the rule
+
+        """
+        self.parent = parent
+        self.scanner = scanner
+        self.tokenpos = tokenpos
+        self.rule = rule
+        self.args = args
+
+    def __str__(self):
+        output = ''
+        if self.parent: output = str(self.parent) + ' > '
+        output += self.rule
+        return output
+
+def print_line_with_pointer(text, p):
+    """Print the line of 'text' that includes position 'p',
+    along with a second line with a single caret (^) at position p"""
+
+    # TODO: separate out the logic for determining the line/character
+    # location from the logic for determining how to display an
+    # 80-column line to stderr.
+
+    # Now try printing part of the line
+    text = text[max(p-80, 0):p+80]
+    p = p - max(p-80, 0)
+
+    # Strip to the left
+    i = text[:p].rfind('\n')
+    j = text[:p].rfind('\r')
+    if i < 0 or (0 <= j < i): i = j
+    if 0 <= i < p:
+        p = p - i - 1
+        text = text[i+1:]
+
+    # Strip to the right
+    i = text.find('\n', p)
+    j = text.find('\r', p)
+    if i < 0 or (0 <= j < i): i = j
+    if i >= 0:
+        text = text[:i]
+
+    # Now shorten the text
+    while len(text) > 70 and p > 60:
+        # Cut off 10 chars
+        text = "..." + text[10:]
+        p = p - 7
+
+    # Now print the string, along with an indicator
+    print >>sys.stderr, '> ',text
+    print >>sys.stderr, '> ',' '*p + '^'
+
+def print_error(input, err, scanner):
+    """Print error messages, the parser stack, and the input text -- for human-readable error messages."""
+    # NOTE: this function assumes 80 columns :-(
+    # Figure out the line number
+    line_number = scanner.get_line_number()
+    column_number = scanner.get_column_number()
+    print >>sys.stderr, '%d:%d: %s' % (line_number, column_number, err.msg)
+
+    context = err.context
+    if not context:
+        print_line_with_pointer(input, err.charpos)
+
+    while context:
+        # TODO: add line number
+        print >>sys.stderr, 'while parsing %s%s:' % (context.rule, tuple(context.args))
+        print_line_with_pointer(input, context.scanner.get_prev_char_pos(context.tokenpos))
+        context = context.parent
+
+def wrap_error_reporter(parser, rule):
+    try:
+        return getattr(parser, rule)()
+    except SyntaxError, e:
+        input = parser._scanner.input
+        print_error(input, e, parser._scanner)
+    except NoMoreTokens:
+        print >>sys.stderr, 'Could not complete parsing; stopped around here:'
+        print >>sys.stderr, parser._scanner
+
+
+from twisted.words.xish.xpath import AttribValue, BooleanValue, CompareValue
+from twisted.words.xish.xpath import Function, IndexValue, LiteralValue
+from twisted.words.xish.xpath import _AnyLocation, _Location
+
+%%
+parser XPathParser:
+        ignore:             "\\s+"
+        token INDEX:        "[0-9]+"
+        token WILDCARD:     "\*"
+        token IDENTIFIER:   "[a-zA-Z][a-zA-Z0-9_\-]*"
+        token ATTRIBUTE:    "\@[a-zA-Z][a-zA-Z0-9_\-]*"
+        token FUNCNAME:     "[a-zA-Z][a-zA-Z0-9_]*"
+        token CMP_EQ:       "\="
+        token CMP_NE:       "\!\="
+        token STR_DQ:       '"([^"]|(\\"))*?"'
+        token STR_SQ:       "'([^']|(\\'))*?'"
+        token OP_AND:       "and"
+        token OP_OR:        "or"
+        token END:          "$"
+
+        rule XPATH:      PATH {{ result = PATH; current = result }}
+                           ( PATH {{ current.childLocation = PATH; current = current.childLocation }} ) * END
+                           {{ return  result }}
+
+        rule PATH:       ("/" {{ result = _Location() }} | "//" {{ result = _AnyLocation() }} )
+                           ( IDENTIFIER {{ result.elementName = IDENTIFIER }} | WILDCARD {{ result.elementName = None }} )
+                           ( "\[" PREDICATE {{ result.predicates.append(PREDICATE) }} "\]")*
+                           {{ return result }}
+
+        rule PREDICATE:  EXPR  {{ return EXPR }} |
+                         INDEX {{ return IndexValue(INDEX) }}
+
+        rule EXPR:       FACTOR {{ e = FACTOR }}
+                           ( BOOLOP FACTOR {{ e = BooleanValue(e, BOOLOP, FACTOR) }} )*
+                           {{ return e }}
+
+        rule BOOLOP:     ( OP_AND {{ return OP_AND }} | OP_OR {{ return OP_OR }} )
+
+        rule FACTOR:     TERM {{ return TERM }}
+                           | "\(" EXPR "\)" {{ return EXPR }}
+
+        rule TERM:       VALUE            {{ t = VALUE }}
+                           [ CMP VALUE  {{ t = CompareValue(t, CMP, VALUE) }} ]
+                                          {{ return t }}
+
+        rule VALUE:      "@" IDENTIFIER   {{ return AttribValue(IDENTIFIER) }} |
+                         FUNCNAME         {{ f = Function(FUNCNAME); args = [] }}
+                           "\(" [ VALUE      {{ args.append(VALUE) }}
+                             (
+                               "," VALUE     {{ args.append(VALUE) }}
+                             )*
+                           ] "\)"           {{ f.setParams(*args); return f }} |
+                         STR              {{ return LiteralValue(STR[1:len(STR)-1]) }}
+
+        rule CMP: (CMP_EQ  {{ return CMP_EQ }} | CMP_NE {{ return CMP_NE }})
+        rule STR: (STR_DQ  {{ return STR_DQ }} | STR_SQ {{ return STR_SQ }})
diff --git a/ThirdParty/Twisted/twisted/words/xish/xpathparser.py b/ThirdParty/Twisted/twisted/words/xish/xpathparser.py
new file mode 100644
index 0000000..312f6ec
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/xish/xpathparser.py
@@ -0,0 +1,508 @@
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+# DO NOT EDIT xpathparser.py!
+#
+# It is generated from xpathparser.g using Yapps. Make needed changes there.
+# This also means that the generated Python may not conform to Twisted's coding
+# standards.
+
+# HOWTO Generate me:
+#
+# 1.) Grab a copy of yapps2, version 2.1.1:
+#         http://theory.stanford.edu/~amitp/Yapps/
+#
+#     Note: Do NOT use the package in debian/ubuntu as it has incompatible
+#     modifications.
+#
+# 2.) Generate the grammar:
+#
+#         yapps2 xpathparser.g xpathparser.py.proto
+#
+# 3.) Edit the output to depend on the embedded runtime, not yappsrt.
+#
+#         sed -e '/^import yapps/d' -e '/^[^#]/s/yappsrt\.//g' \
+#             xpathparser.py.proto > xpathparser.py
+
+"""
+XPath Parser.
+
+Besides the parser code produced by Yapps, this module also defines the
+parse-time exception classes, a scanner class, a base class for parsers
+produced by Yapps, and a context class that keeps track of the parse stack.
+These have been copied from the Yapps runtime.
+"""
+
+import sys, re
+
+class SyntaxError(Exception):
+    """When we run into an unexpected token, this is the exception to use"""
+    def __init__(self, charpos=-1, msg="Bad Token", context=None):
+        Exception.__init__(self)
+        self.charpos = charpos
+        self.msg = msg
+        self.context = context
+
+    def __str__(self):
+        if self.charpos < 0: return 'SyntaxError'
+        else: return 'SyntaxError at char%s(%s)' % (repr(self.charpos), self.msg)
+
+class NoMoreTokens(Exception):
+    """Another exception object, for when we run out of tokens"""
+    pass
+
+class Scanner:
+    """Yapps scanner.
+
+    The Yapps scanner can work in context sensitive or context
+    insensitive modes.  The token(i) method is used to retrieve the
+    i-th token.  It takes a restrict set that limits the set of tokens
+    it is allowed to return.  In context sensitive mode, this restrict
+    set guides the scanner.  In context insensitive mode, there is no
+    restriction (the set is always the full set of tokens).
+
+    """
+
+    def __init__(self, patterns, ignore, input):
+        """Initialize the scanner.
+
+        @param patterns: [(terminal, uncompiled regex), ...] or C{None}
+        @param ignore: [terminal,...]
+        @param input: string
+
+        If patterns is C{None}, we assume that the subclass has defined
+        C{self.patterns} : [(terminal, compiled regex), ...]. Note that the
+        patterns parameter expects uncompiled regexes, whereas the
+        C{self.patterns} field expects compiled regexes.
+        """
+        self.tokens = [] # [(begin char pos, end char pos, token name, matched text), ...]
+        self.restrictions = []
+        self.input = input
+        self.pos = 0
+        self.ignore = ignore
+        self.first_line_number = 1
+
+        if patterns is not None:
+            # Compile the regex strings into regex objects
+            self.patterns = []
+            for terminal, regex in patterns:
+                self.patterns.append( (terminal, re.compile(regex)) )
+
+    def get_token_pos(self):
+        """Get the current token position in the input text."""
+        return len(self.tokens)
+
+    def get_char_pos(self):
+        """Get the current char position in the input text."""
+        return self.pos
+
+    def get_prev_char_pos(self, i=None):
+        """Get the previous position (one token back) in the input text."""
+        if self.pos == 0: return 0
+        if i is None: i = -1
+        return self.tokens[i][0]
+
+    def get_line_number(self):
+        """Get the line number of the current position in the input text."""
+        # TODO: make this work at any token/char position
+        return self.first_line_number + self.get_input_scanned().count('\n')
+
+    def get_column_number(self):
+        """Get the column number of the current position in the input text."""
+        s = self.get_input_scanned()
+        i = s.rfind('\n') # may be -1, but that's okay in this case
+        return len(s) - (i+1)
+
+    def get_input_scanned(self):
+        """Get the portion of the input that has been tokenized."""
+        return self.input[:self.pos]
+
+    def get_input_unscanned(self):
+        """Get the portion of the input that has not yet been tokenized."""
+        return self.input[self.pos:]
+
+    def token(self, i, restrict=None):
+        """Get the i'th token in the input.
+
+        If C{i} is one past the end, then scan for another token.
+
+        @param i: token index
+
+        @param restrict: [token, ...] or C{None}; if restrict is
+                         C{None}, then any token is allowed.  You may call
+                         token(i) more than once.  However, the restrict set
+                         may never be larger than what was passed in on the
+                         first call to token(i).
+        """
+        if i == len(self.tokens):
+            self.scan(restrict)
+        if i < len(self.tokens):
+            # Make sure the restriction is more restricted.  This
+            # invariant is needed to avoid ruining tokenization at
+            # position i+1 and higher.
+            if restrict and self.restrictions[i]:
+                for r in restrict:
+                    if r not in self.restrictions[i]:
+                        raise NotImplementedError("Unimplemented: restriction set changed")
+            return self.tokens[i]
+        raise NoMoreTokens()
+
+    def __repr__(self):
+        """Print the last 10 tokens that have been scanned in"""
+        output = ''
+        for t in self.tokens[-10:]:
+            output = '%s\n  (@%s)  %s  =  %s' % (output,t[0],t[2],repr(t[3]))
+        return output
+
+    def scan(self, restrict):
+        """Should scan another token and add it to the list, self.tokens,
+        and add the restriction to self.restrictions"""
+        # Keep looking for a token, ignoring any in self.ignore
+        while 1:
+            # Search the patterns for the longest match, with earlier
+            # tokens in the list having preference
+            best_match = -1
+            best_pat = '(error)'
+            for p, regexp in self.patterns:
+                # First check to see if we're ignoring this token
+                if restrict and p not in restrict and p not in self.ignore:
+                    continue
+                m = regexp.match(self.input, self.pos)
+                if m and len(m.group(0)) > best_match:
+                    # We got a match that's better than the previous one
+                    best_pat = p
+                    best_match = len(m.group(0))
+
+            # If we didn't find anything, raise an error
+            if best_pat == '(error)' and best_match < 0:
+                msg = 'Bad Token'
+                if restrict:
+                    msg = 'Trying to find one of '+', '.join(restrict)
+                raise SyntaxError(self.pos, msg)
+
+            # If we found something that isn't to be ignored, return it
+            if best_pat not in self.ignore:
+                # Create a token with this data
+                token = (self.pos, self.pos+best_match, best_pat,
+                         self.input[self.pos:self.pos+best_match])
+                self.pos = self.pos + best_match
+                # Only add this token if it's not in the list
+                # (to prevent looping)
+                if not self.tokens or token != self.tokens[-1]:
+                    self.tokens.append(token)
+                    self.restrictions.append(restrict)
+                return
+            else:
+                # This token should be ignored ..
+                self.pos = self.pos + best_match
+
+class Parser:
+    """Base class for Yapps-generated parsers.
+
+    """
+
+    def __init__(self, scanner):
+        self._scanner = scanner
+        self._pos = 0
+
+    def _peek(self, *types):
+        """Returns the token type for lookahead; if there are any args
+        then the list of args is the set of token types to allow"""
+        tok = self._scanner.token(self._pos, types)
+        return tok[2]
+
+    def _scan(self, type):
+        """Returns the matched text, and moves to the next token"""
+        tok = self._scanner.token(self._pos, [type])
+        if tok[2] != type:
+            raise SyntaxError(tok[0], 'Trying to find '+type+' :'+ ' ,'.join(self._scanner.restrictions[self._pos]))
+        self._pos = 1 + self._pos
+        return tok[3]
+
+class Context:
+    """Class to represent the parser's call stack.
+
+    Every rule creates a Context that links to its parent rule.  The
+    contexts can be used for debugging.
+
+    """
+
+    def __init__(self, parent, scanner, tokenpos, rule, args=()):
+        """Create a new context.
+
+        @param parent: Context object or C{None}
+        @param scanner: Scanner object
+        @param tokenpos: scanner token position
+        @type tokenpos: C{int}
+        @param rule: name of the rule
+        @type rule: C{str}
+        @param args: tuple listing parameters to the rule
+
+        """
+        self.parent = parent
+        self.scanner = scanner
+        self.tokenpos = tokenpos
+        self.rule = rule
+        self.args = args
+
+    def __str__(self):
+        output = ''
+        if self.parent: output = str(self.parent) + ' > '
+        output += self.rule
+        return output
+
+def print_line_with_pointer(text, p):
+    """Print the line of 'text' that includes position 'p',
+    along with a second line with a single caret (^) at position p"""
+
+    # TODO: separate out the logic for determining the line/character
+    # location from the logic for determining how to display an
+    # 80-column line to stderr.
+
+    # Now try printing part of the line
+    text = text[max(p-80, 0):p+80]
+    p = p - max(p-80, 0)
+
+    # Strip to the left
+    i = text[:p].rfind('\n')
+    j = text[:p].rfind('\r')
+    if i < 0 or (0 <= j < i): i = j
+    if 0 <= i < p:
+        p = p - i - 1
+        text = text[i+1:]
+
+    # Strip to the right
+    i = text.find('\n', p)
+    j = text.find('\r', p)
+    if i < 0 or (0 <= j < i): i = j
+    if i >= 0:
+        text = text[:i]
+
+    # Now shorten the text
+    while len(text) > 70 and p > 60:
+        # Cut off 10 chars
+        text = "..." + text[10:]
+        p = p - 7
+
+    # Now print the string, along with an indicator
+    print >>sys.stderr, '> ',text
+    print >>sys.stderr, '> ',' '*p + '^'
+
+def print_error(input, err, scanner):
+    """Print error messages, the parser stack, and the input text -- for human-readable error messages."""
+    # NOTE: this function assumes 80 columns :-(
+    # Figure out the line number
+    line_number = scanner.get_line_number()
+    column_number = scanner.get_column_number()
+    print >>sys.stderr, '%d:%d: %s' % (line_number, column_number, err.msg)
+
+    context = err.context
+    if not context:
+        print_line_with_pointer(input, err.charpos)
+
+    while context:
+        # TODO: add line number
+        print >>sys.stderr, 'while parsing %s%s:' % (context.rule, tuple(context.args))
+        print_line_with_pointer(input, context.scanner.get_prev_char_pos(context.tokenpos))
+        context = context.parent
+
+def wrap_error_reporter(parser, rule):
+    try:
+        return getattr(parser, rule)()
+    except SyntaxError, e:
+        input = parser._scanner.input
+        print_error(input, e, parser._scanner)
+    except NoMoreTokens:
+        print >>sys.stderr, 'Could not complete parsing; stopped around here:'
+        print >>sys.stderr, parser._scanner
+
+
+from twisted.words.xish.xpath import AttribValue, BooleanValue, CompareValue
+from twisted.words.xish.xpath import Function, IndexValue, LiteralValue
+from twisted.words.xish.xpath import _AnyLocation, _Location
+
+
+# Begin -- grammar generated by Yapps
+import sys, re
+
+class XPathParserScanner(Scanner):
+    patterns = [
+        ('","', re.compile(',')),
+        ('"@"', re.compile('@')),
+        ('"\\)"', re.compile('\\)')),
+        ('"\\("', re.compile('\\(')),
+        ('"\\]"', re.compile('\\]')),
+        ('"\\["', re.compile('\\[')),
+        ('"//"', re.compile('//')),
+        ('"/"', re.compile('/')),
+        ('\\s+', re.compile('\\s+')),
+        ('INDEX', re.compile('[0-9]+')),
+        ('WILDCARD', re.compile('\\*')),
+        ('IDENTIFIER', re.compile('[a-zA-Z][a-zA-Z0-9_\\-]*')),
+        ('ATTRIBUTE', re.compile('\\@[a-zA-Z][a-zA-Z0-9_\\-]*')),
+        ('FUNCNAME', re.compile('[a-zA-Z][a-zA-Z0-9_]*')),
+        ('CMP_EQ', re.compile('\\=')),
+        ('CMP_NE', re.compile('\\!\\=')),
+        ('STR_DQ', re.compile('"([^"]|(\\"))*?"')),
+        ('STR_SQ', re.compile("'([^']|(\\'))*?'")),
+        ('OP_AND', re.compile('and')),
+        ('OP_OR', re.compile('or')),
+        ('END', re.compile('$')),
+    ]
+    def __init__(self, str):
+        Scanner.__init__(self,None,['\\s+'],str)
+
+class XPathParser(Parser):
+    Context = Context
+    def XPATH(self, _parent=None):
+        _context = self.Context(_parent, self._scanner, self._pos, 'XPATH', [])
+        PATH = self.PATH(_context)
+        result = PATH; current = result
+        while self._peek('END', '"/"', '"//"') != 'END':
+            PATH = self.PATH(_context)
+            current.childLocation = PATH; current = current.childLocation
+        if self._peek() not in ['END', '"/"', '"//"']:
+            raise SyntaxError(charpos=self._scanner.get_prev_char_pos(), context=_context, msg='Need one of ' + ', '.join(['END', '"/"', '"//"']))
+        END = self._scan('END')
+        return  result
+
+    def PATH(self, _parent=None):
+        _context = self.Context(_parent, self._scanner, self._pos, 'PATH', [])
+        _token = self._peek('"/"', '"//"')
+        if _token == '"/"':
+            self._scan('"/"')
+            result = _Location()
+        else: # == '"//"'
+            self._scan('"//"')
+            result = _AnyLocation()
+        _token = self._peek('IDENTIFIER', 'WILDCARD')
+        if _token == 'IDENTIFIER':
+            IDENTIFIER = self._scan('IDENTIFIER')
+            result.elementName = IDENTIFIER
+        else: # == 'WILDCARD'
+            WILDCARD = self._scan('WILDCARD')
+            result.elementName = None
+        while self._peek('"\\["', 'END', '"/"', '"//"') == '"\\["':
+            self._scan('"\\["')
+            PREDICATE = self.PREDICATE(_context)
+            result.predicates.append(PREDICATE)
+            self._scan('"\\]"')
+        if self._peek() not in ['"\\["', 'END', '"/"', '"//"']:
+            raise SyntaxError(charpos=self._scanner.get_prev_char_pos(), context=_context, msg='Need one of ' + ', '.join(['"\\["', 'END', '"/"', '"//"']))
+        return result
+
+    def PREDICATE(self, _parent=None):
+        _context = self.Context(_parent, self._scanner, self._pos, 'PREDICATE', [])
+        _token = self._peek('INDEX', '"\\("', '"@"', 'FUNCNAME', 'STR_DQ', 'STR_SQ')
+        if _token != 'INDEX':
+            EXPR = self.EXPR(_context)
+            return EXPR
+        else: # == 'INDEX'
+            INDEX = self._scan('INDEX')
+            return IndexValue(INDEX)
+
+    def EXPR(self, _parent=None):
+        _context = self.Context(_parent, self._scanner, self._pos, 'EXPR', [])
+        FACTOR = self.FACTOR(_context)
+        e = FACTOR
+        while self._peek('OP_AND', 'OP_OR', '"\\)"', '"\\]"') in ['OP_AND', 'OP_OR']:
+            BOOLOP = self.BOOLOP(_context)
+            FACTOR = self.FACTOR(_context)
+            e = BooleanValue(e, BOOLOP, FACTOR)
+        if self._peek() not in ['OP_AND', 'OP_OR', '"\\)"', '"\\]"']:
+            raise SyntaxError(charpos=self._scanner.get_prev_char_pos(), context=_context, msg='Need one of ' + ', '.join(['OP_AND', 'OP_OR', '"\\)"', '"\\]"']))
+        return e
+
+    def BOOLOP(self, _parent=None):
+        _context = self.Context(_parent, self._scanner, self._pos, 'BOOLOP', [])
+        _token = self._peek('OP_AND', 'OP_OR')
+        if _token == 'OP_AND':
+            OP_AND = self._scan('OP_AND')
+            return OP_AND
+        else: # == 'OP_OR'
+            OP_OR = self._scan('OP_OR')
+            return OP_OR
+
+    def FACTOR(self, _parent=None):
+        _context = self.Context(_parent, self._scanner, self._pos, 'FACTOR', [])
+        _token = self._peek('"\\("', '"@"', 'FUNCNAME', 'STR_DQ', 'STR_SQ')
+        if _token != '"\\("':
+            TERM = self.TERM(_context)
+            return TERM
+        else: # == '"\\("'
+            self._scan('"\\("')
+            EXPR = self.EXPR(_context)
+            self._scan('"\\)"')
+            return EXPR
+
+    def TERM(self, _parent=None):
+        _context = self.Context(_parent, self._scanner, self._pos, 'TERM', [])
+        VALUE = self.VALUE(_context)
+        t = VALUE
+        if self._peek('CMP_EQ', 'CMP_NE', 'OP_AND', 'OP_OR', '"\\)"', '"\\]"') in ['CMP_EQ', 'CMP_NE']:
+            CMP = self.CMP(_context)
+            VALUE = self.VALUE(_context)
+            t = CompareValue(t, CMP, VALUE)
+        return t
+
+    def VALUE(self, _parent=None):
+        _context = self.Context(_parent, self._scanner, self._pos, 'VALUE', [])
+        _token = self._peek('"@"', 'FUNCNAME', 'STR_DQ', 'STR_SQ')
+        if _token == '"@"':
+            self._scan('"@"')
+            IDENTIFIER = self._scan('IDENTIFIER')
+            return AttribValue(IDENTIFIER)
+        elif _token == 'FUNCNAME':
+            FUNCNAME = self._scan('FUNCNAME')
+            f = Function(FUNCNAME); args = []
+            self._scan('"\\("')
+            if self._peek('"\\)"', '"@"', 'FUNCNAME', '","', 'STR_DQ', 'STR_SQ') not in ['"\\)"', '","']:
+                VALUE = self.VALUE(_context)
+                args.append(VALUE)
+                while self._peek('","', '"\\)"') == '","':
+                    self._scan('","')
+                    VALUE = self.VALUE(_context)
+                    args.append(VALUE)
+                if self._peek() not in ['","', '"\\)"']:
+                    raise SyntaxError(charpos=self._scanner.get_prev_char_pos(), context=_context, msg='Need one of ' + ', '.join(['","', '"\\)"']))
+            self._scan('"\\)"')
+            f.setParams(*args); return f
+        else: # in ['STR_DQ', 'STR_SQ']
+            STR = self.STR(_context)
+            return LiteralValue(STR[1:len(STR)-1])
+
+    def CMP(self, _parent=None):
+        _context = self.Context(_parent, self._scanner, self._pos, 'CMP', [])
+        _token = self._peek('CMP_EQ', 'CMP_NE')
+        if _token == 'CMP_EQ':
+            CMP_EQ = self._scan('CMP_EQ')
+            return CMP_EQ
+        else: # == 'CMP_NE'
+            CMP_NE = self._scan('CMP_NE')
+            return CMP_NE
+
+    def STR(self, _parent=None):
+        _context = self.Context(_parent, self._scanner, self._pos, 'STR', [])
+        _token = self._peek('STR_DQ', 'STR_SQ')
+        if _token == 'STR_DQ':
+            STR_DQ = self._scan('STR_DQ')
+            return STR_DQ
+        else: # == 'STR_SQ'
+            STR_SQ = self._scan('STR_SQ')
+            return STR_SQ
+
+
+def parse(rule, text):
+    P = XPathParser(XPathParserScanner(text))
+    return wrap_error_reporter(P, rule)
+
+if __name__ == '__main__':
+    from sys import argv, stdin
+    if len(argv) >= 2:
+        if len(argv) >= 3:
+            f = open(argv[2],'r')
+        else:
+            f = stdin
+        print parse(argv[1], f.read())
+    else: print >>sys.stderr, 'Args:  <rule> [<filename>]'
+# End -- grammar generated by Yapps
diff --git a/ThirdParty/Twisted/twisted/words/xmpproutertap.py b/ThirdParty/Twisted/twisted/words/xmpproutertap.py
new file mode 100644
index 0000000..0bdae0a
--- /dev/null
+++ b/ThirdParty/Twisted/twisted/words/xmpproutertap.py
@@ -0,0 +1,30 @@
+# -*- test-case-name: twisted.words.test.test_xmpproutertap -*-
+#
+# Copyright (c) Twisted Matrix Laboratories.
+# See LICENSE for details.
+
+from twisted.application import strports
+from twisted.python import usage
+from twisted.words.protocols.jabber import component
+
+class Options(usage.Options):
+    optParameters = [
+            ('port', None, 'tcp:5347:interface=127.0.0.1',
+                           'Port components connect to'),
+            ('secret', None, 'secret', 'Router secret'),
+    ]
+
+    optFlags = [
+            ('verbose', 'v', 'Log traffic'),
+    ]
+
+
+
+def makeService(config):
+    router = component.Router()
+    factory = component.XMPPComponentServerFactory(router, config['secret'])
+
+    if config['verbose']:
+        factory.logTraffic = True
+
+    return strports.service(config['port'], factory)
diff --git a/ThirdParty/VPIC/CMakeLists.txt b/ThirdParty/VPIC/CMakeLists.txt
index 0803d2c..fddb3fb 100644
--- a/ThirdParty/VPIC/CMakeLists.txt
+++ b/ThirdParty/VPIC/CMakeLists.txt
@@ -50,6 +50,6 @@ IF(NOT VTK_INSTALL_NO_DEVELOPMENT)
     ${VPIC_SOURCE_DIR}/VPICView.h
     ${VPIC_SOURCE_DIR}/GridExchange.h
     ${VPIC_BINARY_DIR}/VPICDefinition.h
-    DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/VPIC
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/VPIC
     COMPONENT Development)
 ENDIF(NOT VTK_INSTALL_NO_DEVELOPMENT)
diff --git a/ThirdParty/ZopeInterface/CMakeLists.txt b/ThirdParty/ZopeInterface/CMakeLists.txt
new file mode 100644
index 0000000..3e89851
--- /dev/null
+++ b/ThirdParty/ZopeInterface/CMakeLists.txt
@@ -0,0 +1,20 @@
+vtk_module_impl()
+vtk_module_export("")
+
+option(VTK_USE_SYSTEM_ZOPE "Use system Zope Interface Python package" OFF)
+mark_as_advanced(VTK_USE_SYSTEM_ZOPE)
+
+if(NOT VTK_USE_SYSTEM_ZOPE)
+  find_package(PythonInterp)
+
+  include(vtkPythonPackages)
+
+  set(zope_SOURCE_DIR "${CMAKE_CURRENT_SOURCE_DIR}/zope")
+  set(zope_BINARY_DIR "${VTK_BUILD_PYTHON_MODULE_DIR}/zope")
+
+  build_python_package("zope" ${zope_SOURCE_DIR} ${zope_BINARY_DIR})
+
+  install(DIRECTORY ${zope_BINARY_DIR}
+    DESTINATION "${VTK_INSTALL_PYTHON_MODULE_DIR}"
+    COMPONENT Runtime)
+endif()
diff --git a/ThirdParty/ZopeInterface/COPYRIGHT.txt b/ThirdParty/ZopeInterface/COPYRIGHT.txt
new file mode 100644
index 0000000..79859e0
--- /dev/null
+++ b/ThirdParty/ZopeInterface/COPYRIGHT.txt
@@ -0,0 +1 @@
+Zope Foundation and Contributors
\ No newline at end of file
diff --git a/ThirdParty/ZopeInterface/LICENSE.txt b/ThirdParty/ZopeInterface/LICENSE.txt
new file mode 100644
index 0000000..e1f9ad7
--- /dev/null
+++ b/ThirdParty/ZopeInterface/LICENSE.txt
@@ -0,0 +1,44 @@
+Zope Public License (ZPL) Version 2.1
+
+A copyright notice accompanies this license document that identifies the
+copyright holders.
+
+This license has been certified as open source. It has also been designated as
+GPL compatible by the Free Software Foundation (FSF).
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are met:
+
+1. Redistributions in source code must retain the accompanying copyright
+notice, this list of conditions, and the following disclaimer.
+
+2. Redistributions in binary form must reproduce the accompanying copyright
+notice, this list of conditions, and the following disclaimer in the
+documentation and/or other materials provided with the distribution.
+
+3. Names of the copyright holders must not be used to endorse or promote
+products derived from this software without prior written permission from the
+copyright holders.
+
+4. The right to distribute this software or to use it for any purpose does not
+give you the right to use Servicemarks (sm) or Trademarks (tm) of the
+copyright
+holders. Use of them is covered by separate agreement with the copyright
+holders.
+
+5. If any files are modified, you must cause the modified files to carry
+prominent notices stating that you changed the files and the date of any
+change.
+
+Disclaimer
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY EXPRESSED
+OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
+OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+EVENT SHALL THE COPYRIGHT HOLDERS BE LIABLE FOR ANY DIRECT, INDIRECT,
+INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
+PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
+LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
+EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/ThirdParty/ZopeInterface/module.cmake b/ThirdParty/ZopeInterface/module.cmake
new file mode 100644
index 0000000..55264d7
--- /dev/null
+++ b/ThirdParty/ZopeInterface/module.cmake
@@ -0,0 +1,4 @@
+vtk_module(ZopeInterface
+  DEPENDS
+    vtkPython
+  EXCLUDE_FROM_WRAPPING)
diff --git a/ThirdParty/ZopeInterface/zope/__init__.py b/ThirdParty/ZopeInterface/zope/__init__.py
new file mode 100644
index 0000000..2e2033b
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/__init__.py
@@ -0,0 +1,7 @@
+# this is a namespace package
+try:
+    import pkg_resources
+    pkg_resources.declare_namespace(__name__)
+except ImportError:
+    import pkgutil
+    __path__ = pkgutil.extend_path(__path__, __name__)
diff --git a/ThirdParty/ZopeInterface/zope/interface/__init__.py b/ThirdParty/ZopeInterface/zope/interface/__init__.py
new file mode 100644
index 0000000..ad0a766
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/__init__.py
@@ -0,0 +1,89 @@
+##############################################################################
+#
+# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Interfaces
+
+This package implements the Python "scarecrow" proposal.
+
+The package exports two objects, `Interface` and `Attribute` directly. It also
+exports several helper methods. Interface is used to create an interface with
+a class statement, as in:
+
+  class IMyInterface(Interface):
+    '''Interface documentation
+    '''
+
+    def meth(arg1, arg2):
+        '''Documentation for meth
+        '''
+
+    # Note that there is no self argument
+
+To find out what you can do with interfaces, see the interface
+interface, `IInterface` in the `interfaces` module.
+
+The package has several public modules:
+
+  o `declarations` provides utilities to declare interfaces on objects. It
+    also provides a wide range of helpful utilities that aid in managing
+    declared interfaces. Most of its public names are however imported here.
+
+  o `document` has a utility for documenting an interface as structured text.
+
+  o `exceptions` has the interface-defined exceptions
+
+  o `interfaces` contains a list of all public interfaces for this package.
+
+  o `verify` has utilities for verifying implementations of interfaces.
+
+See the module doc strings for more information.
+"""
+__docformat__ = 'restructuredtext'
+
+from zope.interface.interface import Interface
+from zope.interface.interface import _wire
+
+# Need to actually get the interface elements to implement the right interfaces
+_wire()
+del _wire
+
+from zope.interface.declarations import Declaration
+from zope.interface.declarations import alsoProvides
+from zope.interface.declarations import classImplements
+from zope.interface.declarations import classImplementsOnly
+from zope.interface.declarations import classProvides
+from zope.interface.declarations import directlyProvidedBy
+from zope.interface.declarations import directlyProvides
+from zope.interface.declarations import implementedBy
+from zope.interface.declarations import implementer
+from zope.interface.declarations import implementer_only
+from zope.interface.declarations import implements
+from zope.interface.declarations import implementsOnly
+from zope.interface.declarations import moduleProvides
+from zope.interface.declarations import noLongerProvides
+from zope.interface.declarations import providedBy
+from zope.interface.declarations import provider
+from zope.interface.exceptions import Invalid
+from zope.interface.interface import Attribute
+from zope.interface.interface import invariant
+from zope.interface.interface import taggedValue
+
+# The following are to make spec pickles cleaner
+from zope.interface.declarations import Provides
+
+
+from zope.interface.interfaces import IInterfaceDeclaration
+
+moduleProvides(IInterfaceDeclaration)
+
+__all__ = ('Interface', 'Attribute') + tuple(IInterfaceDeclaration)
diff --git a/ThirdParty/ZopeInterface/zope/interface/_compat.py b/ThirdParty/ZopeInterface/zope/interface/_compat.py
new file mode 100644
index 0000000..b1f0212
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/_compat.py
@@ -0,0 +1,69 @@
+##############################################################################
+#
+# Copyright (c) 2006 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Basic components support
+"""
+import sys
+import types
+
+if sys.version_info[0] < 3: #pragma NO COVER
+
+    def _u(s):
+        return unicode(s, 'unicode_escape')
+
+    def _normalize_name(name):
+        if isinstance(name, basestring):
+            return unicode(name)
+        raise TypeError("name must be a regular or unicode string")
+
+    CLASS_TYPES = (type, types.ClassType)
+    STRING_TYPES = (basestring,)
+
+    _BUILTINS = '__builtin__'
+
+    PYTHON3 = False
+    PYTHON2 = True
+
+else: #pragma NO COVER
+
+    def _u(s):
+        return s
+
+    def _normalize_name(name):
+        if isinstance(name, bytes):
+            name = str(name, 'ascii')
+        if isinstance(name, str):
+            return name
+        raise TypeError("name must be a string or ASCII-only bytes")
+
+    CLASS_TYPES = (type,)
+    STRING_TYPES = (str,)
+
+    _BUILTINS = 'builtins'
+
+    PYTHON3 = True
+    PYTHON2 = False
+
+def _skip_under_py3k(test_method): #pragma NO COVER
+    if sys.version_info[0] < 3:
+        return test_method
+    def _dummy(*args):
+        pass
+    return _dummy
+
+def _skip_under_py2(test_method): #pragma NO COVER
+    if sys.version_info[0] > 2:
+        return test_method
+    def _dummy(*args):
+        pass
+    return _dummy
diff --git a/ThirdParty/ZopeInterface/zope/interface/_flatten.py b/ThirdParty/ZopeInterface/zope/interface/_flatten.py
new file mode 100644
index 0000000..a80c2de
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/_flatten.py
@@ -0,0 +1,35 @@
+##############################################################################
+#
+# Copyright (c) 2002 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Adapter-style interface registry
+
+See Adapter class.
+"""
+from zope.interface import Declaration
+
+def _flatten(implements, include_None=0):
+
+    try:
+        r = implements.flattened()
+    except AttributeError:
+        if implements is None:
+            r=()
+        else:
+            r = Declaration(implements).flattened()
+
+    if not include_None:
+        return r
+
+    r = list(r)
+    r.append(None)
+    return r
diff --git a/ThirdParty/ZopeInterface/zope/interface/_zope_interface_coptimizations.c b/ThirdParty/ZopeInterface/zope/interface/_zope_interface_coptimizations.c
new file mode 100644
index 0000000..b2088ee
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/_zope_interface_coptimizations.c
@@ -0,0 +1,1688 @@
+/*###########################################################################
+ #
+ # Copyright (c) 2003 Zope Foundation and Contributors.
+ # All Rights Reserved.
+ #
+ # This software is subject to the provisions of the Zope Public License,
+ # Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+ # THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+ # WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+ # WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+ # FOR A PARTICULAR PURPOSE.
+ #
+ ############################################################################*/
+
+#include "Python.h"
+#include "structmember.h"
+
+#define TYPE(O) ((PyTypeObject*)(O))
+#define OBJECT(O) ((PyObject*)(O))
+#define CLASSIC(O) ((PyClassObject*)(O))
+#ifndef PyVarObject_HEAD_INIT
+#define PyVarObject_HEAD_INIT(a, b) PyObject_HEAD_INIT(a) b,
+#endif
+#ifndef Py_TYPE
+#define Py_TYPE(o) ((o)->ob_type)
+#endif
+
+static PyObject *str__dict__, *str__implemented__, *strextends;
+static PyObject *BuiltinImplementationSpecifications, *str__provides__;
+static PyObject *str__class__, *str__providedBy__;
+static PyObject *empty, *fallback, *str_implied, *str_cls, *str_implements;
+static PyObject *str__conform__, *str_call_conform, *adapter_hooks;
+static PyObject *str_uncached_lookup, *str_uncached_lookupAll;
+static PyObject *str_uncached_subscriptions;
+static PyObject *str_registry, *strro, *str_generation, *strchanged;
+
+static PyTypeObject *Implements;
+
+static int imported_declarations = 0;
+
+static int 
+import_declarations(void)
+{
+  PyObject *declarations, *i;
+
+  declarations = PyImport_ImportModule("zope.interface.declarations");
+  if (declarations == NULL)
+    return -1;
+  
+  BuiltinImplementationSpecifications = PyObject_GetAttrString(
+                    declarations, "BuiltinImplementationSpecifications");
+  if (BuiltinImplementationSpecifications == NULL)
+    return -1;
+
+  empty = PyObject_GetAttrString(declarations, "_empty");
+  if (empty == NULL)
+    return -1;
+
+  fallback = PyObject_GetAttrString(declarations, "implementedByFallback");
+  if (fallback == NULL)
+    return -1;
+
+
+
+  i = PyObject_GetAttrString(declarations, "Implements");
+  if (i == NULL)
+    return -1;
+
+  if (! PyType_Check(i))
+    {
+      PyErr_SetString(PyExc_TypeError, 
+                      "zope.interface.declarations.Implements is not a type");
+      return -1;
+    }
+
+  Implements = (PyTypeObject *)i;
+
+  Py_DECREF(declarations);
+
+  imported_declarations = 1;
+  return 0;
+}
+
+static PyTypeObject SpecType;   /* Forward */
+
+static PyObject *
+implementedByFallback(PyObject *cls)
+{
+  if (imported_declarations == 0 && import_declarations() < 0)
+    return NULL;
+
+  return PyObject_CallFunctionObjArgs(fallback, cls, NULL);
+}
+
+static PyObject *
+implementedBy(PyObject *ignored, PyObject *cls)
+{
+  /* Fast retrieval of implements spec, if possible, to optimize
+     common case.  Use fallback code if we get stuck.
+  */
+
+  PyObject *dict = NULL, *spec;
+
+  if (PyType_Check(cls))
+    {
+      dict = TYPE(cls)->tp_dict;
+      Py_XINCREF(dict);
+    }
+
+  if (dict == NULL)
+    dict = PyObject_GetAttr(cls, str__dict__);
+
+  if (dict == NULL)
+    {
+      /* Probably a security proxied class, use more expensive fallback code */
+      PyErr_Clear();
+      return implementedByFallback(cls);
+    }
+
+  spec = PyObject_GetItem(dict, str__implemented__);
+  Py_DECREF(dict);
+  if (spec)
+    {
+      if (imported_declarations == 0 && import_declarations() < 0)
+        return NULL;
+
+      if (PyObject_TypeCheck(spec, Implements))
+        return spec;
+
+      /* Old-style declaration, use more expensive fallback code */
+      Py_DECREF(spec);
+      return implementedByFallback(cls);
+    }
+
+  PyErr_Clear();
+
+  /* Maybe we have a builtin */
+  if (imported_declarations == 0 && import_declarations() < 0)
+    return NULL;
+  
+  spec = PyDict_GetItem(BuiltinImplementationSpecifications, cls);
+  if (spec != NULL)
+    {
+      Py_INCREF(spec);
+      return spec;
+    }
+
+  /* We're stuck, use fallback */
+  return implementedByFallback(cls);
+}
+
+static PyObject *
+getObjectSpecification(PyObject *ignored, PyObject *ob)
+{
+  PyObject *cls, *result;
+
+  result = PyObject_GetAttr(ob, str__provides__);
+  if (result != NULL && PyObject_TypeCheck(result, &SpecType))
+    return result;
+
+  PyErr_Clear();
+
+  /* We do a getattr here so as not to be defeated by proxies */
+  cls = PyObject_GetAttr(ob, str__class__);
+  if (cls == NULL)
+    {
+      PyErr_Clear();
+      if (imported_declarations == 0 && import_declarations() < 0)
+        return NULL;
+      Py_INCREF(empty);
+      return empty;
+    }
+
+  result = implementedBy(NULL, cls);
+  Py_DECREF(cls);
+
+  return result;
+}
+
+static PyObject *
+providedBy(PyObject *ignored, PyObject *ob)
+{
+  PyObject *result, *cls, *cp;
+  
+  result = PyObject_GetAttr(ob, str__providedBy__);
+  if (result == NULL)
+    {
+      PyErr_Clear();
+      return getObjectSpecification(NULL, ob);
+    } 
+
+
+  /* We want to make sure we have a spec. We can't do a type check
+     because we may have a proxy, so we'll just try to get the
+     only attribute.
+  */
+  if (PyObject_TypeCheck(result, &SpecType)
+      || 
+      PyObject_HasAttr(result, strextends)
+      )
+    return result;
+    
+  /*
+    The object's class doesn't understand descriptors.
+    Sigh. We need to get an object descriptor, but we have to be
+    careful.  We want to use the instance's __provides__,l if
+    there is one, but only if it didn't come from the class.
+  */
+  Py_DECREF(result);
+
+  cls = PyObject_GetAttr(ob, str__class__);
+  if (cls == NULL)
+    return NULL;
+
+  result = PyObject_GetAttr(ob, str__provides__);
+  if (result == NULL)
+    {      
+      /* No __provides__, so just fall back to implementedBy */
+      PyErr_Clear();
+      result = implementedBy(NULL, cls);
+      Py_DECREF(cls);
+      return result;
+    } 
+
+  cp = PyObject_GetAttr(cls, str__provides__);
+  if (cp == NULL)
+    {
+      /* The the class has no provides, assume we're done: */
+      PyErr_Clear();
+      Py_DECREF(cls);
+      return result;
+    }
+
+  if (cp == result)
+    {
+      /*
+        Oops, we got the provides from the class. This means
+        the object doesn't have it's own. We should use implementedBy
+      */
+      Py_DECREF(result);
+      result = implementedBy(NULL, cls);
+    }
+
+  Py_DECREF(cls);
+  Py_DECREF(cp);
+
+  return result;
+}
+
+/* 
+   Get an attribute from an inst dict. Return a borrowed reference.
+  
+   This has a number of advantages:
+
+   - It avoids layers of Python api
+
+   - It doesn't waste time looking for descriptors
+
+   - It fails wo raising an exception, although that shouldn't really
+     matter.
+
+*/
+static PyObject *
+inst_attr(PyObject *self, PyObject *name)
+{
+  PyObject **dictp, *v;
+
+  dictp = _PyObject_GetDictPtr(self);
+  if (dictp && *dictp && (v = PyDict_GetItem(*dictp, name)))
+    return v;
+  PyErr_SetObject(PyExc_AttributeError, name);
+  return NULL;
+}
+
+
+static PyObject *
+Spec_extends(PyObject *self, PyObject *other)
+{  
+  PyObject *implied;
+
+  implied = inst_attr(self, str_implied);
+  if (implied == NULL)
+    return NULL;
+
+#ifdef Py_True
+  if (PyDict_GetItem(implied, other) != NULL)
+    {
+      Py_INCREF(Py_True);
+      return Py_True;
+    }
+  Py_INCREF(Py_False);
+  return Py_False;
+#else
+  return PyInt_FromLong(PyDict_GetItem(implied, other) != NULL);
+#endif
+}
+
+static char Spec_extends__doc__[] = 
+"Test whether a specification is or extends another"
+;
+
+static char Spec_providedBy__doc__[] = 
+"Test whether an interface is implemented by the specification"
+;
+
+static PyObject *
+Spec_call(PyObject *self, PyObject *args, PyObject *kw)
+{
+  PyObject *spec;
+
+  if (! PyArg_ParseTuple(args, "O", &spec))
+    return NULL;
+  return Spec_extends(self, spec);
+}
+
+static PyObject *
+Spec_providedBy(PyObject *self, PyObject *ob)
+{
+  PyObject *decl, *item;
+
+  decl = providedBy(NULL, ob);
+  if (decl == NULL)
+    return NULL;
+
+  if (PyObject_TypeCheck(decl, &SpecType))
+    item = Spec_extends(decl, self);
+  else
+    /* decl is probably a security proxy.  We have to go the long way
+       around. 
+    */
+    item = PyObject_CallFunctionObjArgs(decl, self, NULL);
+
+  Py_DECREF(decl);
+  return item;
+}
+
+
+static char Spec_implementedBy__doc__[] = 
+"Test whether the specification is implemented by a class or factory.\n"
+"Raise TypeError if argument is neither a class nor a callable."
+;
+
+static PyObject *
+Spec_implementedBy(PyObject *self, PyObject *cls)
+{
+  PyObject *decl, *item;
+
+  decl = implementedBy(NULL, cls);
+  if (decl == NULL)
+    return NULL;
+  
+  if (PyObject_TypeCheck(decl, &SpecType))
+    item = Spec_extends(decl, self);
+  else
+    item = PyObject_CallFunctionObjArgs(decl, self, NULL);
+
+  Py_DECREF(decl);
+  return item;
+}
+
+static struct PyMethodDef Spec_methods[] = {
+	{"providedBy",  
+         (PyCFunction)Spec_providedBy,		METH_O,
+	 Spec_providedBy__doc__},
+	{"implementedBy", 
+         (PyCFunction)Spec_implementedBy,	METH_O,
+	 Spec_implementedBy__doc__},
+	{"isOrExtends",	(PyCFunction)Spec_extends,	METH_O,
+	 Spec_extends__doc__},
+
+	{NULL,		NULL}		/* sentinel */
+};
+
+static PyTypeObject SpecType = {
+	PyVarObject_HEAD_INIT(NULL, 0)
+	/* tp_name           */ "_interface_coptimizations."
+                                "SpecificationBase",
+	/* tp_basicsize      */ 0,
+	/* tp_itemsize       */ 0,
+	/* tp_dealloc        */ (destructor)0,
+	/* tp_print          */ (printfunc)0,
+	/* tp_getattr        */ (getattrfunc)0,
+	/* tp_setattr        */ (setattrfunc)0,
+	/* tp_compare        */ 0,
+	/* tp_repr           */ (reprfunc)0,
+	/* tp_as_number      */ 0,
+	/* tp_as_sequence    */ 0,
+	/* tp_as_mapping     */ 0,
+	/* tp_hash           */ (hashfunc)0,
+	/* tp_call           */ (ternaryfunc)Spec_call,
+	/* tp_str            */ (reprfunc)0,
+        /* tp_getattro       */ (getattrofunc)0,
+        /* tp_setattro       */ (setattrofunc)0,
+        /* tp_as_buffer      */ 0,
+        /* tp_flags          */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
+        "Base type for Specification objects",
+        /* tp_traverse       */ (traverseproc)0,
+        /* tp_clear          */ (inquiry)0,
+        /* tp_richcompare    */ (richcmpfunc)0,
+        /* tp_weaklistoffset */ (long)0,
+        /* tp_iter           */ (getiterfunc)0,
+        /* tp_iternext       */ (iternextfunc)0,
+        /* tp_methods        */ Spec_methods,
+};
+
+static PyObject *
+OSD_descr_get(PyObject *self, PyObject *inst, PyObject *cls)
+{
+  PyObject *provides;
+
+  if (inst == NULL)
+    return getObjectSpecification(NULL, cls);
+
+  provides = PyObject_GetAttr(inst, str__provides__);
+  if (provides != NULL)
+    return provides;
+  PyErr_Clear();
+  return implementedBy(NULL, cls);
+}
+
+static PyTypeObject OSDType = {
+	PyVarObject_HEAD_INIT(NULL, 0)
+	/* tp_name           */ "_interface_coptimizations."
+                                "ObjectSpecificationDescriptor",
+	/* tp_basicsize      */ 0,
+	/* tp_itemsize       */ 0,
+	/* tp_dealloc        */ (destructor)0,
+	/* tp_print          */ (printfunc)0,
+	/* tp_getattr        */ (getattrfunc)0,
+	/* tp_setattr        */ (setattrfunc)0,
+	/* tp_compare        */ 0,
+	/* tp_repr           */ (reprfunc)0,
+	/* tp_as_number      */ 0,
+	/* tp_as_sequence    */ 0,
+	/* tp_as_mapping     */ 0,
+	/* tp_hash           */ (hashfunc)0,
+	/* tp_call           */ (ternaryfunc)0,
+	/* tp_str            */ (reprfunc)0,
+        /* tp_getattro       */ (getattrofunc)0,
+        /* tp_setattro       */ (setattrofunc)0,
+        /* tp_as_buffer      */ 0,
+        /* tp_flags          */ Py_TPFLAGS_DEFAULT
+				| Py_TPFLAGS_BASETYPE ,
+	"Object Specification Descriptor",
+        /* tp_traverse       */ (traverseproc)0,
+        /* tp_clear          */ (inquiry)0,
+        /* tp_richcompare    */ (richcmpfunc)0,
+        /* tp_weaklistoffset */ (long)0,
+        /* tp_iter           */ (getiterfunc)0,
+        /* tp_iternext       */ (iternextfunc)0,
+        /* tp_methods        */ 0,
+        /* tp_members        */ 0,
+        /* tp_getset         */ 0,
+        /* tp_base           */ 0,
+        /* tp_dict           */ 0, /* internal use */
+        /* tp_descr_get      */ (descrgetfunc)OSD_descr_get,
+};
+
+static PyObject *
+CPB_descr_get(PyObject *self, PyObject *inst, PyObject *cls)
+{
+  PyObject *mycls, *implements;
+
+  mycls = inst_attr(self, str_cls);
+  if (mycls == NULL)
+    return NULL;
+
+  if (cls == mycls)
+    {
+      if (inst == NULL)
+        {
+          Py_INCREF(self);
+          return OBJECT(self);
+        }
+
+      implements = inst_attr(self, str_implements);
+      Py_XINCREF(implements);
+      return implements;
+    }
+  
+  PyErr_SetObject(PyExc_AttributeError, str__provides__);
+  return NULL;
+}
+
+static PyTypeObject CPBType = {
+	PyVarObject_HEAD_INIT(NULL, 0)
+	/* tp_name           */ "_interface_coptimizations."
+                                "ClassProvidesBase",
+	/* tp_basicsize      */ 0,
+	/* tp_itemsize       */ 0,
+	/* tp_dealloc        */ (destructor)0,
+	/* tp_print          */ (printfunc)0,
+	/* tp_getattr        */ (getattrfunc)0,
+	/* tp_setattr        */ (setattrfunc)0,
+	/* tp_compare        */ 0,
+	/* tp_repr           */ (reprfunc)0,
+	/* tp_as_number      */ 0,
+	/* tp_as_sequence    */ 0,
+	/* tp_as_mapping     */ 0,
+	/* tp_hash           */ (hashfunc)0,
+	/* tp_call           */ (ternaryfunc)0,
+	/* tp_str            */ (reprfunc)0,
+        /* tp_getattro       */ (getattrofunc)0,
+        /* tp_setattro       */ (setattrofunc)0,
+        /* tp_as_buffer      */ 0,
+        /* tp_flags          */ Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE,
+        "C Base class for ClassProvides",
+        /* tp_traverse       */ (traverseproc)0,
+        /* tp_clear          */ (inquiry)0,
+        /* tp_richcompare    */ (richcmpfunc)0,
+        /* tp_weaklistoffset */ (long)0,
+        /* tp_iter           */ (getiterfunc)0,
+        /* tp_iternext       */ (iternextfunc)0,
+        /* tp_methods        */ 0,
+        /* tp_members        */ 0,
+        /* tp_getset         */ 0,
+        /* tp_base           */ &SpecType,
+        /* tp_dict           */ 0, /* internal use */
+        /* tp_descr_get      */ (descrgetfunc)CPB_descr_get,
+};
+
+/* ==================================================================== */
+/* ========== Begin: __call__ and __adapt__ =========================== */
+
+/*
+    def __adapt__(self, obj):
+        """Adapt an object to the reciever
+        """
+        if self.providedBy(obj):
+            return obj
+
+        for hook in adapter_hooks:
+            adapter = hook(self, obj)
+            if adapter is not None:
+                return adapter
+
+  
+*/
+static PyObject *
+__adapt__(PyObject *self, PyObject *obj)
+{
+  PyObject *decl, *args, *adapter;
+  int implements, i, l;
+
+  decl = providedBy(NULL, obj);
+  if (decl == NULL)
+    return NULL;
+
+  if (PyObject_TypeCheck(decl, &SpecType))
+    {
+      PyObject *implied;
+
+      implied = inst_attr(decl, str_implied);
+      if (implied == NULL)
+        {
+          Py_DECREF(decl);
+          return NULL;
+        }
+
+      implements = PyDict_GetItem(implied, self) != NULL;
+      Py_DECREF(decl);
+    }
+  else
+    {
+      /* decl is probably a security proxy.  We have to go the long way
+         around. 
+      */
+      PyObject *r;
+      r = PyObject_CallFunctionObjArgs(decl, self, NULL);
+      Py_DECREF(decl);
+      if (r == NULL)
+        return NULL;
+      implements = PyObject_IsTrue(r);
+      Py_DECREF(r);
+    }
+
+  if (implements)
+    {
+      Py_INCREF(obj);
+      return obj;
+    }
+
+  l = PyList_GET_SIZE(adapter_hooks);
+  args = PyTuple_New(2);
+  if (args == NULL)
+    return NULL;
+  Py_INCREF(self);
+  PyTuple_SET_ITEM(args, 0, self);
+  Py_INCREF(obj);
+  PyTuple_SET_ITEM(args, 1, obj);
+  for (i = 0; i < l; i++)
+    {
+      adapter = PyObject_CallObject(PyList_GET_ITEM(adapter_hooks, i), args);
+      if (adapter == NULL || adapter != Py_None)
+        {
+          Py_DECREF(args);
+          return adapter;
+        }
+      Py_DECREF(adapter);
+    }
+
+  Py_DECREF(args);
+
+  Py_INCREF(Py_None);
+  return Py_None;
+}
+
+static struct PyMethodDef ib_methods[] = {
+  {"__adapt__",	(PyCFunction)__adapt__, METH_O,
+   "Adapt an object to the reciever"},
+  {NULL,		NULL}		/* sentinel */
+};
+
+/* 
+        def __call__(self, obj, alternate=_marker):
+            conform = getattr(obj, '__conform__', None)
+            if conform is not None:
+                adapter = self._call_conform(conform)
+                if adapter is not None:
+                    return adapter
+
+            adapter = self.__adapt__(obj)
+
+            if adapter is not None:
+                return adapter
+            elif alternate is not _marker:
+                return alternate
+            else:
+                raise TypeError("Could not adapt", obj, self)
+*/
+static PyObject *
+ib_call(PyObject *self, PyObject *args, PyObject *kwargs)
+{
+  PyObject *conform, *obj, *alternate=NULL, *adapter;
+  
+  static char *kwlist[] = {"obj", "alternate", NULL};
+
+  if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O|O", kwlist,
+                                   &obj, &alternate))
+    return NULL;
+
+  conform = PyObject_GetAttr(obj, str__conform__);
+  if (conform != NULL)
+    {
+      adapter = PyObject_CallMethodObjArgs(self, str_call_conform,
+                                           conform, NULL);
+      Py_DECREF(conform);
+      if (adapter == NULL || adapter != Py_None)
+        return adapter;
+      Py_DECREF(adapter);
+    }
+  else
+    PyErr_Clear();
+ 
+  adapter = __adapt__(self, obj);
+  if (adapter == NULL || adapter != Py_None)
+    return adapter;
+  Py_DECREF(adapter);
+
+  if (alternate != NULL)
+    {
+      Py_INCREF(alternate);
+      return alternate;
+    }
+
+  adapter = Py_BuildValue("sOO", "Could not adapt", obj, self);
+  if (adapter != NULL)
+    {
+      PyErr_SetObject(PyExc_TypeError, adapter);
+      Py_DECREF(adapter);
+    }
+  return NULL;
+}
+
+static PyTypeObject InterfaceBase = {
+	PyVarObject_HEAD_INIT(NULL, 0)
+	/* tp_name           */ "_zope_interface_coptimizations."
+                                "InterfaceBase",
+	/* tp_basicsize      */ 0,
+	/* tp_itemsize       */ 0,
+	/* tp_dealloc        */ (destructor)0,
+	/* tp_print          */ (printfunc)0,
+	/* tp_getattr        */ (getattrfunc)0,
+	/* tp_setattr        */ (setattrfunc)0,
+	/* tp_compare        */ 0,
+	/* tp_repr           */ (reprfunc)0,
+	/* tp_as_number      */ 0,
+	/* tp_as_sequence    */ 0,
+	/* tp_as_mapping     */ 0,
+	/* tp_hash           */ (hashfunc)0,
+	/* tp_call           */ (ternaryfunc)ib_call,
+	/* tp_str            */ (reprfunc)0,
+        /* tp_getattro       */ (getattrofunc)0,
+        /* tp_setattro       */ (setattrofunc)0,
+        /* tp_as_buffer      */ 0,
+        /* tp_flags          */ Py_TPFLAGS_DEFAULT
+				| Py_TPFLAGS_BASETYPE ,
+	/* tp_doc */ "Interface base type providing __call__ and __adapt__",
+        /* tp_traverse       */ (traverseproc)0,
+        /* tp_clear          */ (inquiry)0,
+        /* tp_richcompare    */ (richcmpfunc)0,
+        /* tp_weaklistoffset */ (long)0,
+        /* tp_iter           */ (getiterfunc)0,
+        /* tp_iternext       */ (iternextfunc)0,
+        /* tp_methods        */ ib_methods,
+};
+
+/* =================== End: __call__ and __adapt__ ==================== */
+/* ==================================================================== */
+
+/* ==================================================================== */
+/* ========================== Begin: Lookup Bases ===================== */
+
+typedef struct {
+  PyObject_HEAD
+  PyObject *_cache;
+  PyObject *_mcache;
+  PyObject *_scache;
+} lookup;
+
+typedef struct {
+  PyObject_HEAD
+  PyObject *_cache;
+  PyObject *_mcache;
+  PyObject *_scache;
+  PyObject *_verify_ro;
+  PyObject *_verify_generations;
+} verify;
+
+static int
+lookup_traverse(lookup *self, visitproc visit, void *arg)
+{
+  int vret;
+
+  if (self->_cache) {
+    vret = visit(self->_cache, arg);
+    if (vret != 0)
+      return vret;
+  }
+
+  if (self->_mcache) {
+    vret = visit(self->_mcache, arg);
+    if (vret != 0)
+      return vret;
+  }
+
+  if (self->_scache) {
+    vret = visit(self->_scache, arg);
+    if (vret != 0)
+      return vret;
+  }
+  
+  return 0;
+}
+
+static int
+lookup_clear(lookup *self)
+{
+  Py_CLEAR(self->_cache);
+  Py_CLEAR(self->_mcache);
+  Py_CLEAR(self->_scache);
+  return 0;
+}
+
+static void
+lookup_dealloc(lookup *self)
+{
+  lookup_clear(self);
+  Py_TYPE(self)->tp_free((PyObject*)self);
+}
+
+/*
+    def changed(self, ignored=None):
+        self._cache.clear()
+        self._mcache.clear()
+        self._scache.clear()
+*/
+static PyObject *
+lookup_changed(lookup *self, PyObject *ignored)
+{
+  lookup_clear(self);
+  Py_INCREF(Py_None);
+  return Py_None;
+}
+
+#define ASSURE_DICT(N) if (N == NULL) { N = PyDict_New(); \
+                                        if (N == NULL) return NULL; \
+                                       }
+
+/*  
+    def _getcache(self, provided, name):
+        cache = self._cache.get(provided)
+        if cache is None:
+            cache = {}
+            self._cache[provided] = cache
+        if name:
+            c = cache.get(name)
+            if c is None:
+                c = {}
+                cache[name] = c
+            cache = c
+        return cache
+*/
+static PyObject *
+_subcache(PyObject *cache, PyObject *key)
+{
+  PyObject *subcache;
+
+  subcache = PyDict_GetItem(cache, key);
+  if (subcache == NULL)
+    {
+      int status;
+ 
+      subcache = PyDict_New();
+      if (subcache == NULL)
+        return NULL;
+      status = PyDict_SetItem(cache, key, subcache);
+      Py_DECREF(subcache);
+      if (status < 0)
+        return NULL;
+    }
+
+  return subcache;
+}
+static PyObject *
+_getcache(lookup *self, PyObject *provided, PyObject *name)
+{
+  PyObject *cache;
+
+  ASSURE_DICT(self->_cache);
+  cache = _subcache(self->_cache, provided);
+  if (cache == NULL)
+    return NULL;
+
+  if (name != NULL && PyObject_IsTrue(name))
+    cache = _subcache(cache, name);
+
+  return cache;
+}
+
+
+/*  
+    def lookup(self, required, provided, name=u'', default=None):
+        cache = self._getcache(provided, name)
+        if len(required) == 1:
+            result = cache.get(required[0], _not_in_mapping)
+        else:
+            result = cache.get(tuple(required), _not_in_mapping)
+
+        if result is _not_in_mapping:
+            result = self._uncached_lookup(required, provided, name)
+            if len(required) == 1:
+                cache[required[0]] = result
+            else:
+                cache[tuple(required)] = result
+
+        if result is None:
+            return default
+
+        return result
+*/
+static PyObject *
+tuplefy(PyObject *v)
+{
+  if (! PyTuple_Check(v))
+    {
+      v = PyObject_CallFunctionObjArgs(OBJECT(&PyTuple_Type), v, NULL);
+      if (v == NULL)
+        return NULL;
+    }
+  else
+    Py_INCREF(v);
+  
+  return v;
+}
+static PyObject *
+_lookup(lookup *self, 
+        PyObject *required, PyObject *provided, PyObject *name, 
+        PyObject *default_)
+{
+  PyObject *result, *key, *cache;
+
+  cache = _getcache(self, provided, name);
+  if (cache == NULL)
+    return NULL;
+
+  required = tuplefy(required);
+  if (required == NULL)
+    return NULL;
+
+  if (PyTuple_GET_SIZE(required) == 1)
+    key = PyTuple_GET_ITEM(required, 0);
+  else
+    key = required;
+
+  result = PyDict_GetItem(cache, key);
+  if (result == NULL)
+    {
+      int status;
+
+      result = PyObject_CallMethodObjArgs(OBJECT(self), str_uncached_lookup,
+                                          required, provided, name, NULL);
+      if (result == NULL)
+        {
+          Py_DECREF(required);
+          return NULL;
+        }
+      status = PyDict_SetItem(cache, key, result);
+      Py_DECREF(required);
+      if (status < 0)
+        {
+          Py_DECREF(result);
+          return NULL;
+        }
+    }
+  else
+    {
+      Py_INCREF(result);
+      Py_DECREF(required);
+    }
+
+  if (result == Py_None && default_ != NULL)
+    {
+      Py_DECREF(Py_None);
+      Py_INCREF(default_);
+      return default_;
+    }
+
+  return result;
+}
+static PyObject *
+lookup_lookup(lookup *self, PyObject *args, PyObject *kwds)
+{
+  static char *kwlist[] = {"required", "provided", "name", "default", NULL};
+  PyObject *required, *provided, *name=NULL, *default_=NULL;
+
+  if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO", kwlist,
+                                    &required, &provided, &name, &default_))
+    return NULL; 
+
+  return _lookup(self, required, provided, name, default_);
+}
+
+
+/*  
+    def lookup1(self, required, provided, name=u'', default=None):
+        cache = self._getcache(provided, name)
+        result = cache.get(required, _not_in_mapping)
+        if result is _not_in_mapping:
+            return self.lookup((required, ), provided, name, default)
+
+        if result is None:
+            return default
+
+        return result
+*/
+static PyObject *
+_lookup1(lookup *self, 
+        PyObject *required, PyObject *provided, PyObject *name, 
+        PyObject *default_)
+{
+  PyObject *result, *cache;
+
+  cache = _getcache(self, provided, name);
+  if (cache == NULL)
+    return NULL;
+
+  result = PyDict_GetItem(cache, required);
+  if (result == NULL)
+    {
+      PyObject *tup;
+
+      tup = PyTuple_New(1);
+      if (tup == NULL)
+        return NULL;
+      Py_INCREF(required);
+      PyTuple_SET_ITEM(tup, 0, required);
+      result = _lookup(self, tup, provided, name, default_);
+      Py_DECREF(tup);
+    }
+  else
+    {
+      if (result == Py_None && default_ != NULL)
+        {
+          result = default_;
+        }
+      Py_INCREF(result);
+    }
+
+  return result;
+}
+static PyObject *
+lookup_lookup1(lookup *self, PyObject *args, PyObject *kwds)
+{
+  static char *kwlist[] = {"required", "provided", "name", "default", NULL};
+  PyObject *required, *provided, *name=NULL, *default_=NULL;
+
+  if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO", kwlist,
+                                    &required, &provided, &name, &default_))
+    return NULL; 
+
+  return _lookup1(self, required, provided, name, default_);
+}
+
+/*  
+    def adapter_hook(self, provided, object, name=u'', default=None):
+        required = providedBy(object)
+        cache = self._getcache(provided, name)
+        factory = cache.get(required, _not_in_mapping)
+        if factory is _not_in_mapping:
+            factory = self.lookup((required, ), provided, name)
+
+        if factory is not None:
+            result = factory(object)
+            if result is not None:
+                return result
+
+        return default
+*/
+static PyObject *
+_adapter_hook(lookup *self, 
+              PyObject *provided, PyObject *object,  PyObject *name, 
+              PyObject *default_)
+{
+  PyObject *required, *factory, *result;
+
+  required = providedBy(NULL, object);
+  if (required == NULL)
+    return NULL;
+  
+  factory = _lookup1(self, required, provided, name, Py_None);
+  Py_DECREF(required);
+  if (factory == NULL)
+    return NULL;
+  
+  if (factory != Py_None)
+    {
+      result = PyObject_CallFunctionObjArgs(factory, object, NULL);
+      Py_DECREF(factory);
+      if (result == NULL || result != Py_None)
+        return result;
+    }
+  else
+    result = factory; /* None */
+
+  if (default_ == NULL || default_ == result) /* No default specified, */
+    return result;   /* Return None.  result is owned None */
+
+  Py_DECREF(result);
+  Py_INCREF(default_);
+
+  return default_;
+}
+static PyObject *
+lookup_adapter_hook(lookup *self, PyObject *args, PyObject *kwds)
+{
+  static char *kwlist[] = {"provided", "object", "name", "default", NULL};
+  PyObject *object, *provided, *name=NULL, *default_=NULL;
+
+  if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO", kwlist,
+                                    &provided, &object, &name, &default_))
+    return NULL; 
+
+  return _adapter_hook(self, provided, object, name, default_);
+}
+
+static PyObject *
+lookup_queryAdapter(lookup *self, PyObject *args, PyObject *kwds)
+{
+  static char *kwlist[] = {"object", "provided", "name", "default", NULL};
+  PyObject *object, *provided, *name=NULL, *default_=NULL;
+
+  if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO", kwlist,
+                                    &object, &provided, &name, &default_))
+    return NULL; 
+
+  return _adapter_hook(self, provided, object, name, default_);
+}
+
+/*  
+    def lookupAll(self, required, provided):
+        cache = self._mcache.get(provided)
+        if cache is None:
+            cache = {}
+            self._mcache[provided] = cache
+
+        required = tuple(required)
+        result = cache.get(required, _not_in_mapping)
+        if result is _not_in_mapping:
+            result = self._uncached_lookupAll(required, provided)
+            cache[required] = result
+
+        return result
+*/
+static PyObject *
+_lookupAll(lookup *self, PyObject *required, PyObject *provided)
+{
+  PyObject *cache, *result;
+
+  ASSURE_DICT(self->_mcache);
+  cache = _subcache(self->_mcache, provided);
+  if (cache == NULL)
+    return NULL;
+
+  required = tuplefy(required);
+  if (required == NULL)
+    return NULL;
+
+  result = PyDict_GetItem(cache, required);
+  if (result == NULL)
+    {
+      int status;
+
+      result = PyObject_CallMethodObjArgs(OBJECT(self), str_uncached_lookupAll,
+                                          required, provided, NULL);
+      if (result == NULL)
+        {
+          Py_DECREF(required);
+          return NULL;
+        }
+      status = PyDict_SetItem(cache, required, result);
+      Py_DECREF(required);
+      if (status < 0)
+        {
+          Py_DECREF(result);
+          return NULL;
+        }
+    }
+  else
+    {
+      Py_INCREF(result);
+      Py_DECREF(required);
+    }
+
+  return result;  
+}
+static PyObject *
+lookup_lookupAll(lookup *self, PyObject *args, PyObject *kwds)
+{
+  static char *kwlist[] = {"required", "provided", NULL};
+  PyObject *required, *provided;
+
+  if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO", kwlist,
+                                    &required, &provided))
+    return NULL; 
+
+  return _lookupAll(self, required, provided);
+}
+
+/*  
+    def subscriptions(self, required, provided):
+        cache = self._scache.get(provided)
+        if cache is None:
+            cache = {}
+            self._scache[provided] = cache
+
+        required = tuple(required)
+        result = cache.get(required, _not_in_mapping)
+        if result is _not_in_mapping:
+            result = self._uncached_subscriptions(required, provided)
+            cache[required] = result
+
+        return result
+*/
+static PyObject *
+_subscriptions(lookup *self, PyObject *required, PyObject *provided)
+{
+  PyObject *cache, *result;
+
+  ASSURE_DICT(self->_scache);
+  cache = _subcache(self->_scache, provided);
+  if (cache == NULL)
+    return NULL;
+
+  required = tuplefy(required);
+  if (required == NULL)
+    return NULL;
+
+  result = PyDict_GetItem(cache, required);
+  if (result == NULL)
+    {
+      int status;
+
+      result = PyObject_CallMethodObjArgs(
+                                 OBJECT(self), str_uncached_subscriptions,
+                                 required, provided, NULL);
+      if (result == NULL)
+        {
+          Py_DECREF(required);
+          return NULL;
+        }
+      status = PyDict_SetItem(cache, required, result);
+      Py_DECREF(required);
+      if (status < 0)
+        {
+          Py_DECREF(result);
+          return NULL;
+        }
+    }
+  else
+    {
+      Py_INCREF(result);
+      Py_DECREF(required);
+    }
+
+  return result;  
+}
+static PyObject *
+lookup_subscriptions(lookup *self, PyObject *args, PyObject *kwds)
+{
+  static char *kwlist[] = {"required", "provided", NULL};
+  PyObject *required, *provided;
+
+  if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO", kwlist,
+                                    &required, &provided))
+    return NULL; 
+
+  return _subscriptions(self, required, provided);
+}
+
+static struct PyMethodDef lookup_methods[] = {
+  {"changed",	    (PyCFunction)lookup_changed,       METH_O,        ""},
+  {"lookup",	    (PyCFunction)lookup_lookup,	       METH_KEYWORDS, ""},
+  {"lookup1",	    (PyCFunction)lookup_lookup1,       METH_KEYWORDS, ""},
+  {"queryAdapter",  (PyCFunction)lookup_queryAdapter,  METH_KEYWORDS, ""},
+  {"adapter_hook",  (PyCFunction)lookup_adapter_hook,  METH_KEYWORDS, ""},
+  {"lookupAll",	    (PyCFunction)lookup_lookupAll,     METH_KEYWORDS, ""},
+  {"subscriptions", (PyCFunction)lookup_subscriptions, METH_KEYWORDS, ""},
+  {NULL,	    NULL}		/* sentinel */
+};
+
+static PyTypeObject LookupBase = {
+	PyVarObject_HEAD_INIT(NULL, 0)
+	/* tp_name           */ "_zope_interface_coptimizations."
+                                "LookupBase",
+	/* tp_basicsize      */ sizeof(lookup),
+	/* tp_itemsize       */ 0,
+	/* tp_dealloc        */ (destructor)&lookup_dealloc,
+	/* tp_print          */ (printfunc)0,
+	/* tp_getattr        */ (getattrfunc)0,
+	/* tp_setattr        */ (setattrfunc)0,
+	/* tp_compare        */ 0,
+	/* tp_repr           */ (reprfunc)0,
+	/* tp_as_number      */ 0,
+	/* tp_as_sequence    */ 0,
+	/* tp_as_mapping     */ 0,
+	/* tp_hash           */ (hashfunc)0,
+	/* tp_call           */ (ternaryfunc)0,
+	/* tp_str            */ (reprfunc)0,
+        /* tp_getattro       */ (getattrofunc)0,
+        /* tp_setattro       */ (setattrofunc)0,
+        /* tp_as_buffer      */ 0,
+        /* tp_flags          */ Py_TPFLAGS_DEFAULT
+				| Py_TPFLAGS_BASETYPE 
+                          	| Py_TPFLAGS_HAVE_GC,
+	/* tp_doc            */ "",
+        /* tp_traverse       */ (traverseproc)lookup_traverse,
+        /* tp_clear          */ (inquiry)lookup_clear,
+        /* tp_richcompare    */ (richcmpfunc)0,
+        /* tp_weaklistoffset */ (long)0,
+        /* tp_iter           */ (getiterfunc)0,
+        /* tp_iternext       */ (iternextfunc)0,
+        /* tp_methods        */ lookup_methods,
+};
+
+static int
+verifying_traverse(verify *self, visitproc visit, void *arg)
+{
+  int vret;
+
+  vret = lookup_traverse((lookup *)self, visit, arg);
+  if (vret != 0)
+    return vret;
+
+  if (self->_verify_ro) {
+    vret = visit(self->_verify_ro, arg);
+    if (vret != 0)
+      return vret;
+  }
+  if (self->_verify_generations) {
+    vret = visit(self->_verify_generations, arg);
+    if (vret != 0)
+      return vret;
+  }
+  
+  return 0;
+}
+
+static int
+verifying_clear(verify *self)
+{
+  lookup_clear((lookup *)self);
+  Py_CLEAR(self->_verify_generations);
+  Py_CLEAR(self->_verify_ro);
+  return 0;
+}
+
+
+static void
+verifying_dealloc(verify *self)
+{
+  verifying_clear(self);
+  Py_TYPE(self)->tp_free((PyObject*)self);
+}
+
+/*  
+    def changed(self, originally_changed):
+        super(VerifyingBasePy, self).changed(originally_changed)
+        self._verify_ro = self._registry.ro[1:]
+        self._verify_generations = [r._generation for r in self._verify_ro]
+*/
+static PyObject *
+_generations_tuple(PyObject *ro)
+{
+  int i, l;
+  PyObject *generations;
+  
+  l = PyTuple_GET_SIZE(ro);
+  generations = PyTuple_New(l);
+  for (i=0; i < l; i++)
+    {
+      PyObject *generation;
+      
+      generation = PyObject_GetAttr(PyTuple_GET_ITEM(ro, i), str_generation);
+      if (generation == NULL)
+        {
+          Py_DECREF(generations);
+          return NULL;
+        }
+      PyTuple_SET_ITEM(generations, i, generation);
+    }
+
+  return generations;
+}
+static PyObject *
+verifying_changed(verify *self, PyObject *ignored)
+{
+  PyObject *t, *ro;
+
+  verifying_clear(self);
+
+  t = PyObject_GetAttr(OBJECT(self), str_registry);
+  if (t == NULL)
+    return NULL;
+  ro = PyObject_GetAttr(t, strro);
+  Py_DECREF(t);
+  if (ro == NULL)
+    return NULL;
+
+  t = PyObject_CallFunctionObjArgs(OBJECT(&PyTuple_Type), ro, NULL);
+  Py_DECREF(ro);
+  if (t == NULL)
+    return NULL;
+
+  ro = PyTuple_GetSlice(t, 1, PyTuple_GET_SIZE(t));
+  Py_DECREF(t);
+  if (ro == NULL)
+    return NULL;
+  
+  self->_verify_generations = _generations_tuple(ro);
+  if (self->_verify_generations == NULL)
+    {
+      Py_DECREF(ro);
+      return NULL;
+    }
+
+  self->_verify_ro = ro;
+
+  Py_INCREF(Py_None);
+  return Py_None;
+}
+
+/*  
+    def _verify(self):
+        if ([r._generation for r in self._verify_ro]
+            != self._verify_generations):
+            self.changed(None)
+*/
+static int
+_verify(verify *self)
+{
+  PyObject *changed_result;
+
+  if (self->_verify_ro != NULL && self->_verify_generations != NULL)
+    {
+      PyObject *generations;
+      int changed;
+
+      generations = _generations_tuple(self->_verify_ro);
+      if (generations == NULL)
+        return -1;
+
+      changed = PyObject_RichCompareBool(self->_verify_generations, 
+					 generations, Py_NE);
+      Py_DECREF(generations);
+      if (changed == -1)
+        return -1;
+      
+      if (changed == 0)
+        return 0;
+    }
+
+  changed_result = PyObject_CallMethodObjArgs(OBJECT(self), strchanged, 
+                                              Py_None, NULL);
+  if (changed_result == NULL)
+    return -1;
+
+  Py_DECREF(changed_result);
+  return 0;
+}
+
+static PyObject *
+verifying_lookup(verify *self, PyObject *args, PyObject *kwds)
+{
+  static char *kwlist[] = {"required", "provided", "name", "default", NULL};
+  PyObject *required, *provided, *name=NULL, *default_=NULL;
+
+  if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO", kwlist,
+                                    &required, &provided, &name, &default_))
+    return NULL; 
+
+  if (_verify(self) < 0)
+    return NULL;
+
+  return _lookup((lookup *)self, required, provided, name, default_);
+}
+
+static PyObject *
+verifying_lookup1(verify *self, PyObject *args, PyObject *kwds)
+{
+  static char *kwlist[] = {"required", "provided", "name", "default", NULL};
+  PyObject *required, *provided, *name=NULL, *default_=NULL;
+
+  if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO", kwlist,
+                                    &required, &provided, &name, &default_))
+    return NULL; 
+
+  if (_verify(self) < 0)
+    return NULL;
+
+  return _lookup1((lookup *)self, required, provided, name, default_);
+}
+
+static PyObject *
+verifying_adapter_hook(verify *self, PyObject *args, PyObject *kwds)
+{
+  static char *kwlist[] = {"provided", "object", "name", "default", NULL};
+  PyObject *object, *provided, *name=NULL, *default_=NULL;
+
+  if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO", kwlist,
+                                    &provided, &object, &name, &default_))
+    return NULL; 
+
+  if (_verify(self) < 0)
+    return NULL;
+
+  return _adapter_hook((lookup *)self, provided, object, name, default_);
+}
+
+static PyObject *
+verifying_queryAdapter(verify *self, PyObject *args, PyObject *kwds)
+{
+  static char *kwlist[] = {"object", "provided", "name", "default", NULL};
+  PyObject *object, *provided, *name=NULL, *default_=NULL;
+
+  if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO|OO", kwlist,
+                                    &object, &provided, &name, &default_))
+    return NULL; 
+
+  if (_verify(self) < 0)
+    return NULL;
+
+  return _adapter_hook((lookup *)self, provided, object, name, default_);
+}
+
+static PyObject *
+verifying_lookupAll(verify *self, PyObject *args, PyObject *kwds)
+{
+  static char *kwlist[] = {"required", "provided", NULL};
+  PyObject *required, *provided;
+
+  if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO", kwlist,
+                                    &required, &provided))
+    return NULL; 
+
+  if (_verify(self) < 0)
+    return NULL;
+
+  return _lookupAll((lookup *)self, required, provided);
+}
+
+static PyObject *
+verifying_subscriptions(verify *self, PyObject *args, PyObject *kwds)
+{
+  static char *kwlist[] = {"required", "provided", NULL};
+  PyObject *required, *provided;
+
+  if (! PyArg_ParseTupleAndKeywords(args, kwds, "OO", kwlist,
+                                    &required, &provided))
+    return NULL; 
+
+  if (_verify(self) < 0)
+    return NULL;
+
+  return _subscriptions((lookup *)self, required, provided);
+}
+
+static struct PyMethodDef verifying_methods[] = {
+  {"changed",	   (PyCFunction)verifying_changed,	  METH_O,        ""},
+  {"lookup",	   (PyCFunction)verifying_lookup,	  METH_KEYWORDS, ""},
+  {"lookup1",	   (PyCFunction)verifying_lookup1,	  METH_KEYWORDS, ""},
+  {"queryAdapter",  (PyCFunction)verifying_queryAdapter,  METH_KEYWORDS, ""},
+  {"adapter_hook",  (PyCFunction)verifying_adapter_hook,  METH_KEYWORDS, ""},
+  {"lookupAll",	   (PyCFunction)verifying_lookupAll,	  METH_KEYWORDS, ""},
+  {"subscriptions", (PyCFunction)verifying_subscriptions, METH_KEYWORDS, ""},
+  {NULL,	    NULL}		/* sentinel */
+};
+
+static PyTypeObject VerifyingBase = {
+	PyVarObject_HEAD_INIT(NULL, 0)
+	/* tp_name           */ "_zope_interface_coptimizations."
+                                "VerifyingBase",
+	/* tp_basicsize      */ sizeof(verify),
+	/* tp_itemsize       */ 0,
+	/* tp_dealloc        */ (destructor)&verifying_dealloc,
+	/* tp_print          */ (printfunc)0,
+	/* tp_getattr        */ (getattrfunc)0,
+	/* tp_setattr        */ (setattrfunc)0,
+	/* tp_compare        */ 0,
+	/* tp_repr           */ (reprfunc)0,
+	/* tp_as_number      */ 0,
+	/* tp_as_sequence    */ 0,
+	/* tp_as_mapping     */ 0,
+	/* tp_hash           */ (hashfunc)0,
+	/* tp_call           */ (ternaryfunc)0,
+	/* tp_str            */ (reprfunc)0,
+        /* tp_getattro       */ (getattrofunc)0,
+        /* tp_setattro       */ (setattrofunc)0,
+        /* tp_as_buffer      */ 0,
+        /* tp_flags          */ Py_TPFLAGS_DEFAULT
+				| Py_TPFLAGS_BASETYPE 
+                          	| Py_TPFLAGS_HAVE_GC,
+	/* tp_doc            */ "",
+        /* tp_traverse       */ (traverseproc)verifying_traverse,
+        /* tp_clear          */ (inquiry)verifying_clear,
+        /* tp_richcompare    */ (richcmpfunc)0,
+        /* tp_weaklistoffset */ (long)0,
+        /* tp_iter           */ (getiterfunc)0,
+        /* tp_iternext       */ (iternextfunc)0,
+        /* tp_methods        */ verifying_methods,
+        /* tp_members        */ 0,
+        /* tp_getset         */ 0,
+        /* tp_base           */ &LookupBase,
+};
+
+/* ========================== End: Lookup Bases ======================= */
+/* ==================================================================== */
+
+
+
+static struct PyMethodDef m_methods[] = {
+  {"implementedBy", (PyCFunction)implementedBy, METH_O,
+   "Interfaces implemented by a class or factory.\n"
+   "Raises TypeError if argument is neither a class nor a callable."},
+  {"getObjectSpecification", (PyCFunction)getObjectSpecification, METH_O,
+   "Get an object's interfaces (internal api)"},
+  {"providedBy", (PyCFunction)providedBy, METH_O,
+   "Get an object's interfaces"},
+  
+  {NULL,	 (PyCFunction)NULL, 0, NULL}		/* sentinel */
+};
+
+#if  PY_MAJOR_VERSION >= 3
+static char module_doc[] = "C optimizations for zope.interface\n\n";
+
+static struct PyModuleDef _zic_module = {
+	PyModuleDef_HEAD_INIT,
+	"_zope_interface_coptimizations",
+	module_doc,
+	-1,
+	m_methods,
+	NULL,
+	NULL,
+	NULL,
+	NULL
+};
+#endif
+
+static PyObject *
+init(void)
+{
+  PyObject *m;
+
+#if  PY_MAJOR_VERSION < 3
+#define DEFINE_STRING(S) \
+  if(! (str ## S = PyString_FromString(# S))) return NULL
+#else
+#define DEFINE_STRING(S) \
+  if(! (str ## S = PyUnicode_FromString(# S))) return NULL
+#endif
+
+  DEFINE_STRING(__dict__);
+  DEFINE_STRING(__implemented__);
+  DEFINE_STRING(__provides__);
+  DEFINE_STRING(__class__);
+  DEFINE_STRING(__providedBy__);
+  DEFINE_STRING(extends);
+  DEFINE_STRING(_implied);
+  DEFINE_STRING(_implements);
+  DEFINE_STRING(_cls);
+  DEFINE_STRING(__conform__);
+  DEFINE_STRING(_call_conform);
+  DEFINE_STRING(_uncached_lookup);
+  DEFINE_STRING(_uncached_lookupAll);
+  DEFINE_STRING(_uncached_subscriptions);
+  DEFINE_STRING(_registry);
+  DEFINE_STRING(_generation);
+  DEFINE_STRING(ro);
+  DEFINE_STRING(changed);
+#undef DEFINE_STRING
+  adapter_hooks = PyList_New(0);
+  if (adapter_hooks == NULL)
+    return NULL;
+        
+  /* Initialize types: */
+  SpecType.tp_new = PyBaseObject_Type.tp_new;
+  if (PyType_Ready(&SpecType) < 0)
+    return NULL;
+  OSDType.tp_new = PyBaseObject_Type.tp_new;
+  if (PyType_Ready(&OSDType) < 0)
+    return NULL;
+  CPBType.tp_new = PyBaseObject_Type.tp_new;
+  if (PyType_Ready(&CPBType) < 0)
+    return NULL;
+
+  InterfaceBase.tp_new = PyBaseObject_Type.tp_new;
+  if (PyType_Ready(&InterfaceBase) < 0)
+    return NULL;
+
+  LookupBase.tp_new = PyBaseObject_Type.tp_new;
+  if (PyType_Ready(&LookupBase) < 0)
+    return NULL;
+
+  VerifyingBase.tp_new = PyBaseObject_Type.tp_new;
+  if (PyType_Ready(&VerifyingBase) < 0)
+    return NULL;
+
+  #if PY_MAJOR_VERSION < 3
+  /* Create the module and add the functions */
+  m = Py_InitModule3("_zope_interface_coptimizations", m_methods,
+                     "C optimizations for zope.interface\n\n");
+  #else
+  m = PyModule_Create(&_zic_module);
+  #endif
+  if (m == NULL)
+    return NULL;
+
+  /* Add types: */
+  if (PyModule_AddObject(m, "SpecificationBase", OBJECT(&SpecType)) < 0)
+    return NULL;
+  if (PyModule_AddObject(m, "ObjectSpecificationDescriptor", 
+                         (PyObject *)&OSDType) < 0)
+    return NULL;
+  if (PyModule_AddObject(m, "ClassProvidesBase", OBJECT(&CPBType)) < 0)
+    return NULL;
+  if (PyModule_AddObject(m, "InterfaceBase", OBJECT(&InterfaceBase)) < 0)
+    return NULL;
+  if (PyModule_AddObject(m, "LookupBase", OBJECT(&LookupBase)) < 0)
+    return NULL;
+  if (PyModule_AddObject(m, "VerifyingBase", OBJECT(&VerifyingBase)) < 0)
+    return NULL;
+  if (PyModule_AddObject(m, "adapter_hooks", adapter_hooks) < 0)
+    return NULL;
+  return m;
+}
+
+PyMODINIT_FUNC
+#if PY_MAJOR_VERSION < 3
+init_zope_interface_coptimizations(void)
+{
+  init();
+}
+#else
+PyInit__zope_interface_coptimizations(void)
+{
+  return init();
+}
+#endif
diff --git a/ThirdParty/ZopeInterface/zope/interface/adapter.py b/ThirdParty/ZopeInterface/zope/interface/adapter.py
new file mode 100644
index 0000000..feee8d0
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/adapter.py
@@ -0,0 +1,704 @@
+##############################################################################
+#
+# Copyright (c) 2004 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Adapter management
+"""
+import weakref
+
+from zope.interface import providedBy
+from zope.interface import Interface
+from zope.interface import ro
+from zope.interface._compat import _u
+from zope.interface._compat import _normalize_name
+
+_BLANK = _u('')
+
+class BaseAdapterRegistry(object):
+
+    # List of methods copied from lookup sub-objects:
+    _delegated = ('lookup', 'queryMultiAdapter', 'lookup1', 'queryAdapter',
+                  'adapter_hook', 'lookupAll', 'names',
+                  'subscriptions', 'subscribers')
+
+    # All registries maintain a generation that can be used by verifying
+    # registries
+    _generation = 0
+
+    def __init__(self, bases=()):
+
+        # The comments here could be improved. Possibly this bit needs
+        # explaining in a separate document, as the comments here can
+        # be quite confusing. /regebro
+
+        # {order -> {required -> {provided -> {name -> value}}}}
+        # Here "order" is actually an index in a list, "required" and
+        # "provided" are interfaces, and "required" is really a nested
+        # key.  So, for example:
+        # for order == 0 (that is, self._adapters[0]), we have:
+        #   {provided -> {name -> value}}
+        # but for order == 2 (that is, self._adapters[2]), we have:
+        #   {r1 -> {r2 -> {provided -> {name -> value}}}}
+        #
+        self._adapters = []
+
+        # {order -> {required -> {provided -> {name -> [value]}}}}
+        # where the remarks about adapters above apply
+        self._subscribers = []
+
+        # Set, with a reference count, keeping track of the interfaces
+        # for which we have provided components:
+        self._provided = {}
+
+        # Create ``_v_lookup`` object to perform lookup.  We make this a
+        # separate object to to make it easier to implement just the
+        # lookup functionality in C.  This object keeps track of cache
+        # invalidation data in two kinds of registries.
+
+        #   Invalidating registries have caches that are invalidated
+        #     when they or their base registies change.  An invalidating
+        #     registry can only have invalidating registries as bases.
+        #     See LookupBaseFallback below for the pertinent logic.
+
+        #   Verifying registies can't rely on getting invalidation messages,
+        #     so have to check the generations of base registries to determine
+        #     if their cache data are current.  See VerifyingBasePy below
+        #     for the pertinent object.
+        self._createLookup()
+
+        # Setting the bases causes the registries described above
+        # to be initialized (self._setBases -> self.changed ->
+        # self._v_lookup.changed).
+
+        self.__bases__ = bases
+
+    def _setBases(self, bases):
+        self.__dict__['__bases__'] = bases
+        self.ro = ro.ro(self)
+        self.changed(self)
+
+    __bases__ = property(lambda self: self.__dict__['__bases__'],
+                         lambda self, bases: self._setBases(bases),
+                         )
+
+    def _createLookup(self):
+        self._v_lookup = self.LookupClass(self)
+        for name in self._delegated:
+            self.__dict__[name] = getattr(self._v_lookup, name)
+
+    def changed(self, originally_changed):
+        self._generation += 1
+        self._v_lookup.changed(originally_changed)
+
+    def register(self, required, provided, name, value):
+        if value is None:
+            self.unregister(required, provided, name, value)
+            return
+
+        required = tuple(map(_convert_None_to_Interface, required))
+        name = _normalize_name(name)
+        order = len(required)
+        byorder = self._adapters
+        while len(byorder) <= order:
+            byorder.append({})
+        components = byorder[order]
+        key = required + (provided,)
+
+        for k in key:
+            d = components.get(k)
+            if d is None:
+                d = {}
+                components[k] = d
+            components = d
+
+        if components.get(name) is value:
+            return
+
+        components[name] = value
+
+        n = self._provided.get(provided, 0) + 1
+        self._provided[provided] = n
+        if n == 1:
+            self._v_lookup.add_extendor(provided)
+
+        self.changed(self)
+
+    def registered(self, required, provided, name=_BLANK):
+        required = tuple(map(_convert_None_to_Interface, required))
+        name = _normalize_name(name)
+        order = len(required)
+        byorder = self._adapters
+        if len(byorder) <= order:
+            return None
+
+        components = byorder[order]
+        key = required + (provided,)
+
+        for k in key:
+            d = components.get(k)
+            if d is None:
+                return None
+            components = d
+
+        return components.get(name)
+
+    def unregister(self, required, provided, name, value=None):
+        required = tuple(map(_convert_None_to_Interface, required))
+        order = len(required)
+        byorder = self._adapters
+        if order >= len(byorder):
+            return False
+        components = byorder[order]
+        key = required + (provided,)
+
+        # Keep track of how we got to `components`:
+        lookups = []
+        # Keep track of how we got to `components`:
+        lookups = []
+        for k in key:
+            d = components.get(k)
+            if d is None:
+                return
+            lookups.append((components, k))
+            components = d
+
+        old = components.get(name)
+        if old is None:
+            return
+        if (value is not None) and (old is not value):
+            return
+
+        del components[name]
+        if not components:
+            # Clean out empty containers, since we don't want our keys
+            # to reference global objects (interfaces) unnecessarily.
+            # This is often a problem when an interface is slated for
+            # removal; a hold-over entry in the registry can make it
+            # difficult to remove such interfaces.
+            for comp, k in reversed(lookups):
+                d = comp[k]
+                if d:
+                    break
+                else:
+                    del comp[k]
+            while byorder and not byorder[-1]:
+                del byorder[-1]
+        n = self._provided[provided] - 1
+        if n == 0:
+            del self._provided[provided]
+            self._v_lookup.remove_extendor(provided)
+        else:
+            self._provided[provided] = n
+
+        self.changed(self)
+
+    def subscribe(self, required, provided, value):
+        required = tuple(map(_convert_None_to_Interface, required))
+        name = _BLANK
+        order = len(required)
+        byorder = self._subscribers
+        while len(byorder) <= order:
+            byorder.append({})
+        components = byorder[order]
+        key = required + (provided,)
+
+        for k in key:
+            d = components.get(k)
+            if d is None:
+                d = {}
+                components[k] = d
+            components = d
+
+        components[name] = components.get(name, ()) + (value, )
+
+        if provided is not None:
+            n = self._provided.get(provided, 0) + 1
+            self._provided[provided] = n
+            if n == 1:
+                self._v_lookup.add_extendor(provided)
+
+        self.changed(self)
+
+    def unsubscribe(self, required, provided, value=None):
+        required = tuple(map(_convert_None_to_Interface, required))
+        order = len(required)
+        byorder = self._subscribers
+        if order >= len(byorder):
+            return
+        components = byorder[order]
+        key = required + (provided,)
+
+        # Keep track of how we got to `components`:
+        lookups = []
+        # Keep track of how we got to `components`:
+        lookups = []
+        for k in key:
+            d = components.get(k)
+            if d is None:
+                return
+            lookups.append((components, k))
+            components = d
+
+        old = components.get(_BLANK)
+        if not old:
+            # this is belt-and-suspenders against the failure of cleanup below
+            return  #pragma NO COVERAGE 
+
+        if value is None:
+            new = ()
+        else:
+            new = tuple([v for v in old if v is not value])
+
+        if new == old:
+            return
+
+        if new:
+            components[_BLANK] = new
+        else:
+            # Instead of setting components[_BLANK] = new, we clean out
+            # empty containers, since we don't want our keys to
+            # reference global objects (interfaces) unnecessarily.  This
+            # is often a problem when an interface is slated for
+            # removal; a hold-over entry in the registry can make it
+            # difficult to remove such interfaces.
+            if _BLANK in components:
+                del components[_BLANK]
+            for comp, k in reversed(lookups):
+                d = comp[k]
+                if d:
+                    break
+                else:
+                    del comp[k]
+            while byorder and not byorder[-1]:
+                del byorder[-1]
+
+        if provided is not None:
+            n = self._provided[provided] + len(new) - len(old)
+            if n == 0:
+                del self._provided[provided]
+                self._v_lookup.remove_extendor(provided)
+
+        self.changed(self)
+
+    # XXX hack to fake out twisted's use of a private api.  We need to get them
+    # to use the new registed method.
+    def get(self, _): #pragma NO COVER
+        class XXXTwistedFakeOut:
+            selfImplied = {}
+        return XXXTwistedFakeOut
+
+
+_not_in_mapping = object()
+class LookupBaseFallback(object):
+
+    def __init__(self):
+        self._cache = {}
+        self._mcache = {}
+        self._scache = {}
+
+    def changed(self, ignored=None):
+        self._cache.clear()
+        self._mcache.clear()
+        self._scache.clear()
+
+    def _getcache(self, provided, name):
+        cache = self._cache.get(provided)
+        if cache is None:
+            cache = {}
+            self._cache[provided] = cache
+        if name:
+            c = cache.get(name)
+            if c is None:
+                c = {}
+                cache[name] = c
+            cache = c
+        return cache
+
+    def lookup(self, required, provided, name=_BLANK, default=None):
+        cache = self._getcache(provided, name)
+        required = tuple(required)
+        if len(required) == 1:
+            result = cache.get(required[0], _not_in_mapping)
+        else:
+            result = cache.get(tuple(required), _not_in_mapping)
+
+        if result is _not_in_mapping:
+            result = self._uncached_lookup(required, provided, name)
+            if len(required) == 1:
+                cache[required[0]] = result
+            else:
+                cache[tuple(required)] = result
+
+        if result is None:
+            return default
+
+        return result
+
+    def lookup1(self, required, provided, name=_BLANK, default=None):
+        cache = self._getcache(provided, name)
+        result = cache.get(required, _not_in_mapping)
+        if result is _not_in_mapping:
+            return self.lookup((required, ), provided, name, default)
+
+        if result is None:
+            return default
+
+        return result
+
+    def queryAdapter(self, object, provided, name=_BLANK, default=None):
+        return self.adapter_hook(provided, object, name, default)
+
+    def adapter_hook(self, provided, object, name=_BLANK, default=None):
+        required = providedBy(object)
+        cache = self._getcache(provided, name)
+        factory = cache.get(required, _not_in_mapping)
+        if factory is _not_in_mapping:
+            factory = self.lookup((required, ), provided, name)
+
+        if factory is not None:
+            result = factory(object)
+            if result is not None:
+                return result
+
+        return default
+
+    def lookupAll(self, required, provided):
+        cache = self._mcache.get(provided)
+        if cache is None:
+            cache = {}
+            self._mcache[provided] = cache
+
+        required = tuple(required)
+        result = cache.get(required, _not_in_mapping)
+        if result is _not_in_mapping:
+            result = self._uncached_lookupAll(required, provided)
+            cache[required] = result
+
+        return result
+
+
+    def subscriptions(self, required, provided):
+        cache = self._scache.get(provided)
+        if cache is None:
+            cache = {}
+            self._scache[provided] = cache
+
+        required = tuple(required)
+        result = cache.get(required, _not_in_mapping)
+        if result is _not_in_mapping:
+            result = self._uncached_subscriptions(required, provided)
+            cache[required] = result
+
+        return result
+
+LookupBasePy = LookupBaseFallback # BBB
+
+try:
+    from _zope_interface_coptimizations import LookupBase
+except ImportError: #pragma NO COVER
+    LookupBase = LookupBaseFallback
+
+
+class VerifyingBaseFallback(LookupBaseFallback):
+    # Mixin for lookups against registries which "chain" upwards, and
+    # whose lookups invalidate their own caches whenever a parent registry
+    # bumps its own '_generation' counter.  E.g., used by 
+    # zope.component.persistentregistry
+
+    def changed(self, originally_changed):
+        LookupBaseFallback.changed(self, originally_changed)
+        self._verify_ro = self._registry.ro[1:]
+        self._verify_generations = [r._generation for r in self._verify_ro]
+
+    def _verify(self):
+        if ([r._generation for r in self._verify_ro]
+            != self._verify_generations):
+            self.changed(None)
+
+    def _getcache(self, provided, name):
+        self._verify()
+        return LookupBaseFallback._getcache(self, provided, name)
+
+    def lookupAll(self, required, provided):
+        self._verify()
+        return LookupBaseFallback.lookupAll(self, required, provided)
+
+    def subscriptions(self, required, provided):
+        self._verify()
+        return LookupBaseFallback.subscriptions(self, required, provided)
+
+VerifyingBasePy = VerifyingBaseFallback #BBB
+
+try:
+    from _zope_interface_coptimizations import VerifyingBase
+except ImportError: #pragma NO COVER
+    VerifyingBase = VerifyingBaseFallback
+
+
+class AdapterLookupBase(object):
+
+    def __init__(self, registry):
+        self._registry = registry
+        self._required = {}
+        self.init_extendors()
+        super(AdapterLookupBase, self).__init__()
+
+    def changed(self, ignored=None):
+        super(AdapterLookupBase, self).changed(None)
+        for r in self._required.keys():
+            r = r()
+            if r is not None:
+                r.unsubscribe(self)
+        self._required.clear()
+
+
+    # Extendors
+    # ---------
+
+    # When given an target interface for an adapter lookup, we need to consider
+    # adapters for interfaces that extend the target interface.  This is
+    # what the extendors dictionary is about.  It tells us all of the
+    # interfaces that extend an interface for which there are adapters
+    # registered.
+
+    # We could separate this by order and name, thus reducing the
+    # number of provided interfaces to search at run time.  The tradeoff,
+    # however, is that we have to store more information.  For example,
+    # is the same interface is provided for multiple names and if the
+    # interface extends many interfaces, we'll have to keep track of
+    # a fair bit of information for each name.  It's better to
+    # be space efficient here and be time efficient in the cache
+    # implementation.
+
+    # TODO: add invalidation when a provided interface changes, in case
+    # the interface's __iro__ has changed.  This is unlikely enough that
+    # we'll take our chances for now.
+
+    def init_extendors(self):
+        self._extendors = {}
+        for p in self._registry._provided:
+            self.add_extendor(p)
+
+    def add_extendor(self, provided):
+        _extendors = self._extendors
+        for i in provided.__iro__:
+            extendors = _extendors.get(i, ())
+            _extendors[i] = (
+                [e for e in extendors if provided.isOrExtends(e)]
+                +
+                [provided]
+                +
+                [e for e in extendors if not provided.isOrExtends(e)]
+                )
+
+    def remove_extendor(self, provided):
+        _extendors = self._extendors
+        for i in provided.__iro__:
+            _extendors[i] = [e for e in _extendors.get(i, ())
+                             if e != provided]
+
+
+    def _subscribe(self, *required):
+        _refs = self._required
+        for r in required:
+            ref = r.weakref()
+            if ref not in _refs:
+                r.subscribe(self)
+                _refs[ref] = 1
+
+    def _uncached_lookup(self, required, provided, name=_BLANK):
+        required = tuple(required)
+        result = None
+        order = len(required)
+        for registry in self._registry.ro:
+            byorder = registry._adapters
+            if order >= len(byorder):
+                continue
+
+            extendors = registry._v_lookup._extendors.get(provided)
+            if not extendors:
+                continue
+
+            components = byorder[order]
+            result = _lookup(components, required, extendors, name, 0,
+                             order)
+            if result is not None:
+                break
+
+        self._subscribe(*required)
+
+        return result
+
+    def queryMultiAdapter(self, objects, provided, name=_BLANK, default=None):
+        factory = self.lookup(map(providedBy, objects), provided, name)
+        if factory is None:
+            return default
+
+        result = factory(*objects)
+        if result is None:
+            return default
+
+        return result
+
+    def _uncached_lookupAll(self, required, provided):
+        required = tuple(required)
+        order = len(required)
+        result = {}
+        for registry in reversed(self._registry.ro):
+            byorder = registry._adapters
+            if order >= len(byorder):
+                continue
+            extendors = registry._v_lookup._extendors.get(provided)
+            if not extendors:
+                continue
+            components = byorder[order]
+            _lookupAll(components, required, extendors, result, 0, order)
+
+        self._subscribe(*required)
+
+        return tuple(result.items())
+
+    def names(self, required, provided):
+        return [c[0] for c in self.lookupAll(required, provided)]
+
+    def _uncached_subscriptions(self, required, provided):
+        required = tuple(required)
+        order = len(required)
+        result = []
+        for registry in reversed(self._registry.ro):
+            byorder = registry._subscribers
+            if order >= len(byorder):
+                continue
+
+            if provided is None:
+                extendors = (provided, )
+            else:
+                extendors = registry._v_lookup._extendors.get(provided)
+                if extendors is None:
+                    continue
+
+            _subscriptions(byorder[order], required, extendors, _BLANK,
+                           result, 0, order)
+
+        self._subscribe(*required)
+
+        return result
+
+    def subscribers(self, objects, provided):
+        subscriptions = self.subscriptions(map(providedBy, objects), provided)
+        if provided is None:
+            result = ()
+            for subscription in subscriptions:
+                subscription(*objects)
+        else:
+            result = []
+            for subscription in subscriptions:
+                subscriber = subscription(*objects)
+                if subscriber is not None:
+                    result.append(subscriber)
+        return result
+
+class AdapterLookup(AdapterLookupBase, LookupBase):
+    pass
+
+class AdapterRegistry(BaseAdapterRegistry):
+
+    LookupClass = AdapterLookup
+
+    def __init__(self, bases=()):
+        # AdapterRegisties are invalidating registries, so
+        # we need to keep track of out invalidating subregistries.
+        self._v_subregistries = weakref.WeakKeyDictionary()
+
+        super(AdapterRegistry, self).__init__(bases)
+
+    def _addSubregistry(self, r):
+        self._v_subregistries[r] = 1
+
+    def _removeSubregistry(self, r):
+        if r in self._v_subregistries:
+            del self._v_subregistries[r]
+
+    def _setBases(self, bases):
+        old = self.__dict__.get('__bases__', ())
+        for r in old:
+            if r not in bases:
+                r._removeSubregistry(self)
+        for r in bases:
+            if r not in old:
+                r._addSubregistry(self)
+
+        super(AdapterRegistry, self)._setBases(bases)
+
+    def changed(self, originally_changed):
+        super(AdapterRegistry, self).changed(originally_changed)
+
+        for sub in self._v_subregistries.keys():
+            sub.changed(originally_changed)
+
+
+class VerifyingAdapterLookup(AdapterLookupBase, VerifyingBase):
+    pass
+
+class VerifyingAdapterRegistry(BaseAdapterRegistry):
+
+    LookupClass = VerifyingAdapterLookup
+
+def _convert_None_to_Interface(x):
+    if x is None:
+        return Interface
+    else:
+        return x
+
+def _lookup(components, specs, provided, name, i, l):
+    if i < l:
+        for spec in specs[i].__sro__:
+            comps = components.get(spec)
+            if comps:
+                r = _lookup(comps, specs, provided, name, i+1, l)
+                if r is not None:
+                    return r
+    else:
+        for iface in provided:
+            comps = components.get(iface)
+            if comps:
+                r = comps.get(name)
+                if r is not None:
+                    return r
+
+    return None
+
+def _lookupAll(components, specs, provided, result, i, l):
+    if i < l:
+        for spec in reversed(specs[i].__sro__):
+            comps = components.get(spec)
+            if comps:
+                _lookupAll(comps, specs, provided, result, i+1, l)
+    else:
+        for iface in reversed(provided):
+            comps = components.get(iface)
+            if comps:
+                result.update(comps)
+
+def _subscriptions(components, specs, provided, name, result, i, l):
+    if i < l:
+        for spec in reversed(specs[i].__sro__):
+            comps = components.get(spec)
+            if comps:
+                _subscriptions(comps, specs, provided, name, result, i+1, l)
+    else:
+        for iface in reversed(provided):
+            comps = components.get(iface)
+            if comps:
+                comps = comps.get(name)
+                if comps:
+                    result.extend(comps)
diff --git a/ThirdParty/ZopeInterface/zope/interface/advice.py b/ThirdParty/ZopeInterface/zope/interface/advice.py
new file mode 100644
index 0000000..788663f
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/advice.py
@@ -0,0 +1,206 @@
+##############################################################################
+#
+# Copyright (c) 2003 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Class advice.
+
+This module was adapted from 'protocols.advice', part of the Python
+Enterprise Application Kit (PEAK).  Please notify the PEAK authors
+(pje at telecommunity.com and tsarna at sarna.org) if bugs are found or
+Zope-specific changes are required, so that the PEAK version of this module
+can be kept in sync.
+
+PEAK is a Python application framework that interoperates with (but does
+not require) Zope 3 and Twisted.  It provides tools for manipulating UML
+models, object-relational persistence, aspect-oriented programming, and more.
+Visit the PEAK home page at http://peak.telecommunity.com for more information.
+"""
+
+from types import FunctionType
+try:
+    from types import ClassType
+except ImportError:  #pragma NO COVER Python > 3.x
+    __python3 = True
+else: #pragma NO COVER Python < 3.x
+    __python3 = False
+    
+import sys
+
+def getFrameInfo(frame):
+    """Return (kind,module,locals,globals) for a frame
+
+    'kind' is one of "exec", "module", "class", "function call", or "unknown".
+    """
+
+    f_locals = frame.f_locals
+    f_globals = frame.f_globals
+
+    sameNamespace = f_locals is f_globals
+    hasModule = '__module__' in f_locals
+    hasName = '__name__' in f_globals
+
+    sameName = hasModule and hasName
+    sameName = sameName and f_globals['__name__']==f_locals['__module__']
+
+    module = hasName and sys.modules.get(f_globals['__name__']) or None
+
+    namespaceIsModule = module and module.__dict__ is f_globals
+
+    if not namespaceIsModule:
+        # some kind of funky exec
+        kind = "exec"
+    elif sameNamespace and not hasModule:
+        kind = "module"
+    elif sameName and not sameNamespace:
+        kind = "class"
+    elif not sameNamespace:
+        kind = "function call"
+    else:   # pragma NO COVER
+        # How can you have f_locals is f_globals, and have '__module__' set?
+        # This is probably module-level code, but with a '__module__' variable.
+        kind = "unknown"
+    return kind, module, f_locals, f_globals
+
+
+def addClassAdvisor(callback, depth=2):
+    """Set up 'callback' to be passed the containing class upon creation
+
+    This function is designed to be called by an "advising" function executed
+    in a class suite.  The "advising" function supplies a callback that it
+    wishes to have executed when the containing class is created.  The
+    callback will be given one argument: the newly created containing class.
+    The return value of the callback will be used in place of the class, so
+    the callback should return the input if it does not wish to replace the
+    class.
+
+    The optional 'depth' argument to this function determines the number of
+    frames between this function and the targeted class suite.  'depth'
+    defaults to 2, since this skips this function's frame and one calling
+    function frame.  If you use this function from a function called directly
+    in the class suite, the default will be correct, otherwise you will need
+    to determine the correct depth yourself.
+
+    This function works by installing a special class factory function in
+    place of the '__metaclass__' of the containing class.  Therefore, only
+    callbacks *after* the last '__metaclass__' assignment in the containing
+    class will be executed.  Be sure that classes using "advising" functions
+    declare any '__metaclass__' *first*, to ensure all callbacks are run."""
+    # This entire approach is invalid under Py3K.  Don't even try to fix
+    # the coverage for this block there. :(
+    if __python3: #pragma NO COVER
+        raise TypeError('Class advice impossible in Python3')
+
+    frame = sys._getframe(depth)
+    kind, module, caller_locals, caller_globals = getFrameInfo(frame)
+
+    # This causes a problem when zope interfaces are used from doctest.
+    # In these cases, kind == "exec".
+    #
+    #if kind != "class":
+    #    raise SyntaxError(
+    #        "Advice must be in the body of a class statement"
+    #    )
+
+    previousMetaclass = caller_locals.get('__metaclass__')
+    if __python3:   # pragma NO COVER
+        defaultMetaclass  = caller_globals.get('__metaclass__', type)
+    else:
+        defaultMetaclass  = caller_globals.get('__metaclass__', ClassType)
+
+
+    def advise(name, bases, cdict):
+
+        if '__metaclass__' in cdict:
+            del cdict['__metaclass__']
+
+        if previousMetaclass is None:
+            if bases:
+                # find best metaclass or use global __metaclass__ if no bases
+                meta = determineMetaclass(bases)
+            else:
+                meta = defaultMetaclass
+
+        elif isClassAdvisor(previousMetaclass):
+            # special case: we can't compute the "true" metaclass here,
+            # so we need to invoke the previous metaclass and let it
+            # figure it out for us (and apply its own advice in the process)
+            meta = previousMetaclass
+
+        else:
+            meta = determineMetaclass(bases, previousMetaclass)
+
+        newClass = meta(name,bases,cdict)
+
+        # this lets the callback replace the class completely, if it wants to
+        return callback(newClass)
+
+    # introspection data only, not used by inner function
+    advise.previousMetaclass = previousMetaclass
+    advise.callback = callback
+
+    # install the advisor
+    caller_locals['__metaclass__'] = advise
+
+
+def isClassAdvisor(ob):
+    """True if 'ob' is a class advisor function"""
+    return isinstance(ob,FunctionType) and hasattr(ob,'previousMetaclass')
+
+
+def determineMetaclass(bases, explicit_mc=None):
+    """Determine metaclass from 1+ bases and optional explicit __metaclass__"""
+
+    meta = [getattr(b,'__class__',type(b)) for b in bases]
+
+    if explicit_mc is not None:
+        # The explicit metaclass needs to be verified for compatibility
+        # as well, and allowed to resolve the incompatible bases, if any
+        meta.append(explicit_mc)
+
+    if len(meta)==1:
+        # easy case
+        return meta[0]
+
+    candidates = minimalBases(meta) # minimal set of metaclasses
+
+    if not candidates: #pragma NO COVER
+        # they're all "classic" classes
+        assert(not __python3) # This should not happen under Python 3
+        return ClassType
+
+    elif len(candidates)>1:
+        # We could auto-combine, but for now we won't...
+        raise TypeError("Incompatible metatypes",bases)
+
+    # Just one, return it
+    return candidates[0]
+
+
+def minimalBases(classes):
+    """Reduce a list of base classes to its ordered minimum equivalent"""
+
+    if not __python3: #pragma NO COVER
+        classes = [c for c in classes if c is not ClassType]
+    candidates = []
+
+    for m in classes:
+        for n in classes:
+            if issubclass(n,m) and m is not n:
+                break
+        else:
+            # m has no subclasses in 'classes'
+            if m in candidates:
+                candidates.remove(m)    # ensure that we're later in the list
+            candidates.append(m)
+
+    return candidates
+
diff --git a/ThirdParty/ZopeInterface/zope/interface/common/__init__.py b/ThirdParty/ZopeInterface/zope/interface/common/__init__.py
new file mode 100644
index 0000000..b711d36
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/common/__init__.py
@@ -0,0 +1,2 @@
+#
+# This file is necessary to make this directory a package.
diff --git a/ThirdParty/ZopeInterface/zope/interface/common/idatetime.py b/ThirdParty/ZopeInterface/zope/interface/common/idatetime.py
new file mode 100644
index 0000000..e8700af
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/common/idatetime.py
@@ -0,0 +1,575 @@
+##############################################################################
+# Copyright (c) 2002 Zope Foundation and Contributors.
+# All Rights Reserved.
+# 
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+##############################################################################
+"""Datetime interfaces.
+
+This module is called idatetime because if it were called datetime the import
+of the real datetime would fail.
+"""
+
+from zope.interface import Interface, Attribute
+from zope.interface import classImplements
+
+from datetime import timedelta, date, datetime, time, tzinfo
+
+
+class ITimeDeltaClass(Interface):
+    """This is the timedelta class interface."""
+
+    min = Attribute("The most negative timedelta object")
+
+    max = Attribute("The most positive timedelta object")
+
+    resolution = Attribute(
+        "The smallest difference between non-equal timedelta objects")
+
+
+class ITimeDelta(ITimeDeltaClass):
+    """Represent the difference between two datetime objects.
+
+    Supported operators:
+
+    - add, subtract timedelta
+    - unary plus, minus, abs
+    - compare to timedelta
+    - multiply, divide by int/long
+
+    In addition, datetime supports subtraction of two datetime objects
+    returning a timedelta, and addition or subtraction of a datetime
+    and a timedelta giving a datetime.
+
+    Representation: (days, seconds, microseconds).
+    """
+
+    days = Attribute("Days between -999999999 and 999999999 inclusive")
+
+    seconds = Attribute("Seconds between 0 and 86399 inclusive")
+
+    microseconds = Attribute("Microseconds between 0 and 999999 inclusive")
+
+
+class IDateClass(Interface):
+    """This is the date class interface."""
+
+    min = Attribute("The earliest representable date")
+
+    max = Attribute("The latest representable date")
+
+    resolution = Attribute(
+        "The smallest difference between non-equal date objects")
+
+    def today():
+        """Return the current local time.
+
+        This is equivalent to date.fromtimestamp(time.time())"""
+
+    def fromtimestamp(timestamp):
+        """Return the local date from a POSIX timestamp (like time.time())
+
+        This may raise ValueError, if the timestamp is out of the range of
+        values supported by the platform C localtime() function. It's common
+        for this to be restricted to years from 1970 through 2038. Note that
+        on non-POSIX systems that include leap seconds in their notion of a
+        timestamp, leap seconds are ignored by fromtimestamp().
+        """
+
+    def fromordinal(ordinal):
+        """Return the date corresponding to the proleptic Gregorian ordinal.
+
+         January 1 of year 1 has ordinal 1. ValueError is raised unless
+         1 <= ordinal <= date.max.toordinal().
+         For any date d, date.fromordinal(d.toordinal()) == d.
+         """
+
+
+class IDate(IDateClass):
+    """Represents a date (year, month and day) in an idealized calendar.
+
+    Operators:
+
+    __repr__, __str__
+    __cmp__, __hash__
+    __add__, __radd__, __sub__ (add/radd only with timedelta arg)
+    """
+
+    year = Attribute("Between MINYEAR and MAXYEAR inclusive.")
+
+    month = Attribute("Between 1 and 12 inclusive")
+
+    day = Attribute(
+        "Between 1 and the number of days in the given month of the given year.")
+
+    def replace(year, month, day):
+        """Return a date with the same value.
+
+        Except for those members given new values by whichever keyword
+        arguments are specified. For example, if d == date(2002, 12, 31), then
+        d.replace(day=26) == date(2000, 12, 26). 
+        """
+
+    def timetuple():
+        """Return a 9-element tuple of the form returned by time.localtime().
+
+        The hours, minutes and seconds are 0, and the DST flag is -1.
+        d.timetuple() is equivalent to
+        (d.year, d.month, d.day, 0, 0, 0, d.weekday(), d.toordinal() -
+        date(d.year, 1, 1).toordinal() + 1, -1)
+        """
+
+    def toordinal():
+        """Return the proleptic Gregorian ordinal of the date
+
+        January 1 of year 1 has ordinal 1. For any date object d,
+        date.fromordinal(d.toordinal()) == d.
+        """
+
+    def weekday():
+        """Return the day of the week as an integer.
+
+        Monday is 0 and Sunday is 6. For example,
+        date(2002, 12, 4).weekday() == 2, a Wednesday.
+
+        See also isoweekday().
+        """
+
+    def isoweekday():
+        """Return the day of the week as an integer.
+
+        Monday is 1 and Sunday is 7. For example,
+        date(2002, 12, 4).isoweekday() == 3, a Wednesday.
+
+        See also weekday(), isocalendar().
+        """
+
+    def isocalendar():
+        """Return a 3-tuple, (ISO year, ISO week number, ISO weekday).
+
+        The ISO calendar is a widely used variant of the Gregorian calendar.
+        See http://www.phys.uu.nl/~vgent/calendar/isocalendar.htm for a good
+        explanation.
+
+        The ISO year consists of 52 or 53 full weeks, and where a week starts
+        on a Monday and ends on a Sunday. The first week of an ISO year is the
+        first (Gregorian) calendar week of a year containing a Thursday. This
+        is called week number 1, and the ISO year of that Thursday is the same
+        as its Gregorian year.
+
+        For example, 2004 begins on a Thursday, so the first week of ISO year
+        2004 begins on Monday, 29 Dec 2003 and ends on Sunday, 4 Jan 2004, so
+        that date(2003, 12, 29).isocalendar() == (2004, 1, 1) and
+        date(2004, 1, 4).isocalendar() == (2004, 1, 7).
+        """
+
+    def isoformat():
+        """Return a string representing the date in ISO 8601 format.
+
+        This is 'YYYY-MM-DD'.
+        For example, date(2002, 12, 4).isoformat() == '2002-12-04'.
+        """
+
+    def __str__():
+        """For a date d, str(d) is equivalent to d.isoformat()."""
+
+    def ctime():
+        """Return a string representing the date.
+
+        For example date(2002, 12, 4).ctime() == 'Wed Dec 4 00:00:00 2002'.
+        d.ctime() is equivalent to time.ctime(time.mktime(d.timetuple()))
+        on platforms where the native C ctime() function
+        (which time.ctime() invokes, but which date.ctime() does not invoke)
+        conforms to the C standard.
+        """
+
+    def strftime(format):
+        """Return a string representing the date.
+
+        Controlled by an explicit format string. Format codes referring to
+        hours, minutes or seconds will see 0 values.
+        """
+
+
+class IDateTimeClass(Interface):
+    """This is the datetime class interface."""
+
+    min = Attribute("The earliest representable datetime")
+
+    max = Attribute("The latest representable datetime")
+
+    resolution = Attribute(
+        "The smallest possible difference between non-equal datetime objects")
+
+    def today():
+        """Return the current local datetime, with tzinfo None.
+
+        This is equivalent to datetime.fromtimestamp(time.time()).
+        See also now(), fromtimestamp().
+        """
+
+    def now(tz=None):
+        """Return the current local date and time.
+
+        If optional argument tz is None or not specified, this is like today(),
+        but, if possible, supplies more precision than can be gotten from going
+        through a time.time() timestamp (for example, this may be possible on
+        platforms supplying the C gettimeofday() function).
+
+        Else tz must be an instance of a class tzinfo subclass, and the current
+        date and time are converted to tz's time zone. In this case the result
+        is equivalent to tz.fromutc(datetime.utcnow().replace(tzinfo=tz)).
+
+        See also today(), utcnow().
+        """
+
+    def utcnow():
+        """Return the current UTC date and time, with tzinfo None.
+
+        This is like now(), but returns the current UTC date and time, as a
+        naive datetime object. 
+
+        See also now().
+        """
+
+    def fromtimestamp(timestamp, tz=None):
+        """Return the local date and time corresponding to the POSIX timestamp.
+
+        Same as is returned by time.time(). If optional argument tz is None or
+        not specified, the timestamp is converted to the platform's local date
+        and time, and the returned datetime object is naive.
+
+        Else tz must be an instance of a class tzinfo subclass, and the
+        timestamp is converted to tz's time zone. In this case the result is
+        equivalent to
+        tz.fromutc(datetime.utcfromtimestamp(timestamp).replace(tzinfo=tz)).
+
+        fromtimestamp() may raise ValueError, if the timestamp is out of the
+        range of values supported by the platform C localtime() or gmtime()
+        functions. It's common for this to be restricted to years in 1970
+        through 2038. Note that on non-POSIX systems that include leap seconds
+        in their notion of a timestamp, leap seconds are ignored by
+        fromtimestamp(), and then it's possible to have two timestamps
+        differing by a second that yield identical datetime objects.
+
+        See also utcfromtimestamp().
+        """
+
+    def utcfromtimestamp(timestamp):
+        """Return the UTC datetime from the POSIX timestamp with tzinfo None.
+
+        This may raise ValueError, if the timestamp is out of the range of
+        values supported by the platform C gmtime() function. It's common for
+        this to be restricted to years in 1970 through 2038.
+
+        See also fromtimestamp().
+        """
+
+    def fromordinal(ordinal):
+        """Return the datetime from the proleptic Gregorian ordinal.
+
+        January 1 of year 1 has ordinal 1. ValueError is raised unless
+        1 <= ordinal <= datetime.max.toordinal().
+        The hour, minute, second and microsecond of the result are all 0, and
+        tzinfo is None.
+        """
+
+    def combine(date, time):
+        """Return a new datetime object.
+
+        Its date members are equal to the given date object's, and whose time
+        and tzinfo members are equal to the given time object's. For any
+        datetime object d, d == datetime.combine(d.date(), d.timetz()).
+        If date is a datetime object, its time and tzinfo members are ignored.
+        """
+
+
+class IDateTime(IDate, IDateTimeClass):
+    """Object contains all the information from a date object and a time object.
+    """
+
+    year = Attribute("Year between MINYEAR and MAXYEAR inclusive")
+
+    month = Attribute("Month between 1 and 12 inclusive")
+
+    day = Attribute(
+        "Day between 1 and the number of days in the given month of the year")
+
+    hour = Attribute("Hour in range(24)")
+
+    minute = Attribute("Minute in range(60)")
+
+    second = Attribute("Second in range(60)")
+
+    microsecond = Attribute("Microsecond in range(1000000)")
+
+    tzinfo = Attribute(
+        """The object passed as the tzinfo argument to the datetime constructor
+        or None if none was passed""")
+
+    def date():
+         """Return date object with same year, month and day."""
+
+    def time():
+        """Return time object with same hour, minute, second, microsecond.
+
+        tzinfo is None. See also method timetz().
+        """
+
+    def timetz():
+        """Return time object with same hour, minute, second, microsecond,
+        and tzinfo.
+
+        See also method time().
+        """
+
+    def replace(year, month, day, hour, minute, second, microsecond, tzinfo):
+        """Return a datetime with the same members, except for those members
+        given new values by whichever keyword arguments are specified.
+
+        Note that tzinfo=None can be specified to create a naive datetime from
+        an aware datetime with no conversion of date and time members.
+        """
+
+    def astimezone(tz):
+        """Return a datetime object with new tzinfo member tz, adjusting the
+        date and time members so the result is the same UTC time as self, but
+        in tz's local time.
+
+        tz must be an instance of a tzinfo subclass, and its utcoffset() and
+        dst() methods must not return None. self must be aware (self.tzinfo
+        must not be None, and self.utcoffset() must not return None).
+
+        If self.tzinfo is tz, self.astimezone(tz) is equal to self: no
+        adjustment of date or time members is performed. Else the result is
+        local time in time zone tz, representing the same UTC time as self:
+            after astz = dt.astimezone(tz), astz - astz.utcoffset()
+        will usually have the same date and time members as dt - dt.utcoffset().
+        The discussion of class tzinfo explains the cases at Daylight Saving
+        Time transition boundaries where this cannot be achieved (an issue only
+        if tz models both standard and daylight time).
+
+        If you merely want to attach a time zone object tz to a datetime dt
+        without adjustment of date and time members, use dt.replace(tzinfo=tz).
+        If you merely want to remove the time zone object from an aware
+        datetime dt without conversion of date and time members, use 
+        dt.replace(tzinfo=None).
+
+        Note that the default tzinfo.fromutc() method can be overridden in a
+        tzinfo subclass to effect the result returned by astimezone().
+        """
+
+    def utcoffset():
+        """Return the timezone offset in minutes east of UTC (negative west of
+        UTC)."""
+
+    def dst():
+        """Return 0 if DST is not in effect, or the DST offset (in minutes
+        eastward) if DST is in effect.
+        """
+
+    def tzname():
+        """Return the timezone name."""
+
+    def timetuple():
+        """Return a 9-element tuple of the form returned by time.localtime()."""
+
+    def utctimetuple():
+        """Return UTC time tuple compatilble with time.gmtimr()."""
+
+    def toordinal():
+        """Return the proleptic Gregorian ordinal of the date.
+
+        The same as self.date().toordinal().
+        """
+
+    def weekday():
+        """Return the day of the week as an integer.
+
+        Monday is 0 and Sunday is 6. The same as self.date().weekday().
+        See also isoweekday().
+        """
+
+    def isoweekday():
+        """Return the day of the week as an integer.
+
+        Monday is 1 and Sunday is 7. The same as self.date().isoweekday.
+        See also weekday(), isocalendar().
+        """
+
+    def isocalendar():
+        """Return a 3-tuple, (ISO year, ISO week number, ISO weekday).
+
+        The same as self.date().isocalendar().
+        """
+
+    def isoformat(sep='T'):
+        """Return a string representing the date and time in ISO 8601 format.
+
+        YYYY-MM-DDTHH:MM:SS.mmmmmm or YYYY-MM-DDTHH:MM:SS if microsecond is 0
+
+        If utcoffset() does not return None, a 6-character string is appended,
+        giving the UTC offset in (signed) hours and minutes:
+
+        YYYY-MM-DDTHH:MM:SS.mmmmmm+HH:MM or YYYY-MM-DDTHH:MM:SS+HH:MM
+        if microsecond is 0.
+
+        The optional argument sep (default 'T') is a one-character separator,
+        placed between the date and time portions of the result.
+        """
+
+    def __str__():
+        """For a datetime instance d, str(d) is equivalent to d.isoformat(' ').
+        """
+
+    def ctime():
+        """Return a string representing the date and time.
+
+        datetime(2002, 12, 4, 20, 30, 40).ctime() == 'Wed Dec 4 20:30:40 2002'.
+        d.ctime() is equivalent to time.ctime(time.mktime(d.timetuple())) on
+        platforms where the native C ctime() function (which time.ctime()
+        invokes, but which datetime.ctime() does not invoke) conforms to the
+        C standard.
+        """
+
+    def strftime(format):
+        """Return a string representing the date and time.
+
+        This is controlled by an explicit format string.
+        """
+
+
+class ITimeClass(Interface):
+    """This is the time class interface."""
+
+    min = Attribute("The earliest representable time")
+
+    max = Attribute("The latest representable time")
+
+    resolution = Attribute(
+        "The smallest possible difference between non-equal time objects")
+
+
+class ITime(ITimeClass):
+    """Represent time with time zone.
+
+    Operators:
+
+    __repr__, __str__
+    __cmp__, __hash__
+    """
+
+    hour = Attribute("Hour in range(24)")
+
+    minute = Attribute("Minute in range(60)")
+
+    second = Attribute("Second in range(60)")
+
+    microsecond = Attribute("Microsecond in range(1000000)")
+
+    tzinfo = Attribute(
+        """The object passed as the tzinfo argument to the time constructor
+        or None if none was passed.""")
+
+    def replace(hour, minute, second, microsecond, tzinfo):
+        """Return a time with the same value.
+
+        Except for those members given new values by whichever keyword
+        arguments are specified. Note that tzinfo=None can be specified
+        to create a naive time from an aware time, without conversion of the
+        time members.
+        """
+
+    def isoformat():
+        """Return a string representing the time in ISO 8601 format.
+
+        That is HH:MM:SS.mmmmmm or, if self.microsecond is 0, HH:MM:SS
+        If utcoffset() does not return None, a 6-character string is appended,
+        giving the UTC offset in (signed) hours and minutes:
+        HH:MM:SS.mmmmmm+HH:MM or, if self.microsecond is 0, HH:MM:SS+HH:MM
+        """
+
+    def __str__():
+        """For a time t, str(t) is equivalent to t.isoformat()."""
+
+    def strftime(format):
+        """Return a string representing the time.
+
+        This is controlled by an explicit format string.
+        """
+
+    def utcoffset():
+        """Return the timezone offset in minutes east of UTC (negative west of
+        UTC).
+
+        If tzinfo is None, returns None, else returns
+        self.tzinfo.utcoffset(None), and raises an exception if the latter
+        doesn't return None or a timedelta object representing a whole number
+        of minutes with magnitude less than one day.
+        """
+
+    def dst():
+        """Return 0 if DST is not in effect, or the DST offset (in minutes
+        eastward) if DST is in effect.
+
+        If tzinfo is None, returns None, else returns self.tzinfo.dst(None),
+        and raises an exception if the latter doesn't return None, or a
+        timedelta object representing a whole number of minutes with
+        magnitude less than one day.
+        """
+
+    def tzname():
+        """Return the timezone name.
+
+        If tzinfo is None, returns None, else returns self.tzinfo.tzname(None),
+        or raises an exception if the latter doesn't return None or a string
+        object.
+        """
+
+
+class ITZInfo(Interface):
+    """Time zone info class.
+    """
+
+    def utcoffset(dt):
+        """Return offset of local time from UTC, in minutes east of UTC.
+
+        If local time is west of UTC, this should be negative.
+        Note that this is intended to be the total offset from UTC;
+        for example, if a tzinfo object represents both time zone and DST
+        adjustments, utcoffset() should return their sum. If the UTC offset
+        isn't known, return None. Else the value returned must be a timedelta
+        object specifying a whole number of minutes in the range -1439 to 1439
+        inclusive (1440 = 24*60; the magnitude of the offset must be less
+        than one day).
+        """
+
+    def dst(dt):
+        """Return the daylight saving time (DST) adjustment, in minutes east
+        of UTC, or None if DST information isn't known.
+        """
+
+    def tzname(dt):
+        """Return the time zone name corresponding to the datetime object as
+        a string.
+        """
+
+    def fromutc(dt):
+        """Return an equivalent datetime in self's local time."""
+
+
+classImplements(timedelta, ITimeDelta)
+classImplements(date, IDate)
+classImplements(datetime, IDateTime)
+classImplements(time, ITime)
+classImplements(tzinfo, ITZInfo)
+
+## directlyProvides(timedelta, ITimeDeltaClass)
+## directlyProvides(date, IDateClass)
+## directlyProvides(datetime, IDateTimeClass)
+## directlyProvides(time, ITimeClass)
diff --git a/ThirdParty/ZopeInterface/zope/interface/common/interfaces.py b/ThirdParty/ZopeInterface/zope/interface/common/interfaces.py
new file mode 100644
index 0000000..6274e6d
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/common/interfaces.py
@@ -0,0 +1,102 @@
+##############################################################################
+#
+# Copyright (c) 2003 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Interfaces for standard python exceptions
+"""
+from zope.interface import Interface
+from zope.interface import classImplements
+
+class IException(Interface): pass
+class IStandardError(IException): pass
+class IWarning(IException): pass
+class ISyntaxError(IStandardError): pass
+class ILookupError(IStandardError): pass
+class IValueError(IStandardError): pass
+class IRuntimeError(IStandardError): pass
+class IArithmeticError(IStandardError): pass
+class IAssertionError(IStandardError): pass
+class IAttributeError(IStandardError): pass
+class IDeprecationWarning(IWarning): pass
+class IEOFError(IStandardError): pass
+class IEnvironmentError(IStandardError): pass
+class IFloatingPointError(IArithmeticError): pass
+class IIOError(IEnvironmentError): pass
+class IImportError(IStandardError): pass
+class IIndentationError(ISyntaxError): pass
+class IIndexError(ILookupError): pass
+class IKeyError(ILookupError): pass
+class IKeyboardInterrupt(IStandardError): pass
+class IMemoryError(IStandardError): pass
+class INameError(IStandardError): pass
+class INotImplementedError(IRuntimeError): pass
+class IOSError(IEnvironmentError): pass
+class IOverflowError(IArithmeticError): pass
+class IOverflowWarning(IWarning): pass
+class IReferenceError(IStandardError): pass
+class IRuntimeWarning(IWarning): pass
+class IStopIteration(IException): pass
+class ISyntaxWarning(IWarning): pass
+class ISystemError(IStandardError): pass
+class ISystemExit(IException): pass
+class ITabError(IIndentationError): pass
+class ITypeError(IStandardError): pass
+class IUnboundLocalError(INameError): pass
+class IUnicodeError(IValueError): pass
+class IUserWarning(IWarning): pass
+class IZeroDivisionError(IArithmeticError): pass
+
+classImplements(ArithmeticError, IArithmeticError)
+classImplements(AssertionError, IAssertionError)
+classImplements(AttributeError, IAttributeError)
+classImplements(DeprecationWarning, IDeprecationWarning)
+classImplements(EnvironmentError, IEnvironmentError)
+classImplements(EOFError, IEOFError)
+classImplements(Exception, IException)
+classImplements(FloatingPointError, IFloatingPointError)
+classImplements(ImportError, IImportError)
+classImplements(IndentationError, IIndentationError)
+classImplements(IndexError, IIndexError)
+classImplements(IOError, IIOError)
+classImplements(KeyboardInterrupt, IKeyboardInterrupt)
+classImplements(KeyError, IKeyError)
+classImplements(LookupError, ILookupError)
+classImplements(MemoryError, IMemoryError)
+classImplements(NameError, INameError)
+classImplements(NotImplementedError, INotImplementedError)
+classImplements(OSError, IOSError)
+classImplements(OverflowError, IOverflowError)
+try:
+    classImplements(OverflowWarning, IOverflowWarning)
+except NameError:  #pragma NO COVER
+    pass # OverflowWarning was removed in Python 2.5
+classImplements(ReferenceError, IReferenceError)
+classImplements(RuntimeError, IRuntimeError)
+classImplements(RuntimeWarning, IRuntimeWarning)
+try:
+    classImplements(StandardError, IStandardError)
+except NameError:  #pragma NO COVER
+    pass # StandardError does not exist in Python 3
+classImplements(StopIteration, IStopIteration)
+classImplements(SyntaxError, ISyntaxError)
+classImplements(SyntaxWarning, ISyntaxWarning)
+classImplements(SystemError, ISystemError)
+classImplements(SystemExit, ISystemExit)
+classImplements(TabError, ITabError)
+classImplements(TypeError, ITypeError)
+classImplements(UnboundLocalError, IUnboundLocalError)
+classImplements(UnicodeError, IUnicodeError)
+classImplements(UserWarning, IUserWarning)
+classImplements(ValueError, IValueError)
+classImplements(Warning, IWarning)
+classImplements(ZeroDivisionError, IZeroDivisionError)
+
diff --git a/ThirdParty/ZopeInterface/zope/interface/common/mapping.py b/ThirdParty/ZopeInterface/zope/interface/common/mapping.py
new file mode 100644
index 0000000..139715f
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/common/mapping.py
@@ -0,0 +1,125 @@
+##############################################################################
+#
+# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Mapping Interfaces
+"""
+from zope.interface import Interface
+
+class IItemMapping(Interface):
+    """Simplest readable mapping object
+    """
+
+    def __getitem__(key):
+        """Get a value for a key
+
+        A KeyError is raised if there is no value for the key.
+        """
+
+
+class IReadMapping(IItemMapping):
+    """Basic mapping interface
+    """
+
+    def get(key, default=None):
+        """Get a value for a key
+
+        The default is returned if there is no value for the key.
+        """
+
+    def __contains__(key):
+        """Tell if a key exists in the mapping."""
+
+
+class IWriteMapping(Interface):
+    """Mapping methods for changing data"""
+    
+    def __delitem__(key):
+        """Delete a value from the mapping using the key."""
+
+    def __setitem__(key, value):
+        """Set a new item in the mapping."""
+        
+
+class IEnumerableMapping(IReadMapping):
+    """Mapping objects whose items can be enumerated.
+    """
+
+    def keys():
+        """Return the keys of the mapping object.
+        """
+
+    def __iter__():
+        """Return an iterator for the keys of the mapping object.
+        """
+
+    def values():
+        """Return the values of the mapping object.
+        """
+
+    def items():
+        """Return the items of the mapping object.
+        """
+
+    def __len__():
+        """Return the number of items.
+        """
+
+class IMapping(IWriteMapping, IEnumerableMapping):
+    ''' Simple mapping interface '''
+
+class IIterableMapping(IEnumerableMapping):
+
+    def iterkeys():
+        "iterate over keys; equivalent to __iter__"
+
+    def itervalues():
+        "iterate over values"
+
+    def iteritems():
+        "iterate over items"
+
+class IClonableMapping(Interface):
+    
+    def copy():
+        "return copy of dict"
+
+class IExtendedReadMapping(IIterableMapping):
+    
+    def has_key(key):
+        """Tell if a key exists in the mapping; equivalent to __contains__"""
+
+class IExtendedWriteMapping(IWriteMapping):
+    
+    def clear():
+        "delete all items"
+    
+    def update(d):
+        " Update D from E: for k in E.keys(): D[k] = E[k]"
+    
+    def setdefault(key, default=None):
+        "D.setdefault(k[,d]) -> D.get(k,d), also set D[k]=d if k not in D"
+    
+    def pop(k, *args):
+        """remove specified key and return the corresponding value
+        *args may contain a single default value, or may not be supplied.
+        If key is not found, default is returned if given, otherwise 
+        KeyError is raised"""
+    
+    def popitem():
+        """remove and return some (key, value) pair as a
+        2-tuple; but raise KeyError if mapping is empty"""
+
+class IFullMapping(
+    IExtendedReadMapping, IExtendedWriteMapping, IClonableMapping, IMapping):
+    ''' Full mapping interface ''' # IMapping included so tests for IMapping
+    # succeed with IFullMapping
diff --git a/ThirdParty/ZopeInterface/zope/interface/common/sequence.py b/ThirdParty/ZopeInterface/zope/interface/common/sequence.py
new file mode 100644
index 0000000..223a94e
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/common/sequence.py
@@ -0,0 +1,160 @@
+##############################################################################
+#
+# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Sequence Interfaces
+"""
+__docformat__ = 'restructuredtext'
+from zope import interface
+
+class IMinimalSequence(interface.Interface):
+    """Most basic sequence interface.
+
+    All sequences are iterable.  This requires at least one of the
+    following:
+
+    - a `__getitem__()` method that takes a single argument; interger
+      values starting at 0 must be supported, and `IndexError` should
+      be raised for the first index for which there is no value, or
+
+    - an `__iter__()` method that returns an iterator as defined in
+      the Python documentation (http://docs.python.org/lib/typeiter.html).
+
+    """
+
+    def __getitem__(index):
+        """`x.__getitem__(index)` <==> `x[index]`
+
+        Declaring this interface does not specify whether `__getitem__`
+        supports slice objects."""
+
+class IFiniteSequence(IMinimalSequence):
+
+    def __len__():
+        """`x.__len__()` <==> `len(x)`"""
+
+class IReadSequence(IFiniteSequence):
+    """read interface shared by tuple and list"""
+
+    def __contains__(item):
+        """`x.__contains__(item)` <==> `item in x`"""
+
+    def __lt__(other):
+        """`x.__lt__(other)` <==> `x < other`"""
+
+    def __le__(other):
+        """`x.__le__(other)` <==> `x <= other`"""
+
+    def __eq__(other):
+        """`x.__eq__(other)` <==> `x == other`"""
+
+    def __ne__(other):
+        """`x.__ne__(other)` <==> `x != other`"""
+
+    def __gt__(other):
+        """`x.__gt__(other)` <==> `x > other`"""
+
+    def __ge__(other):
+        """`x.__ge__(other)` <==> `x >= other`"""
+
+    def __add__(other):
+        """`x.__add__(other)` <==> `x + other`"""
+
+    def __mul__(n):
+        """`x.__mul__(n)` <==> `x * n`"""
+
+    def __rmul__(n):
+        """`x.__rmul__(n)` <==> `n * x`"""
+
+    def __getslice__(i, j):
+        """`x.__getslice__(i, j)` <==> `x[i:j]`
+
+        Use of negative indices is not supported.
+
+        Deprecated since Python 2.0 but still a part of `UserList`.
+        """
+
+class IExtendedReadSequence(IReadSequence):
+    """Full read interface for lists"""
+
+    def count(item):
+        """Return number of occurrences of value"""
+
+    def index(item, *args):
+        """Return first index of value
+
+        `L.index(value, [start, [stop]])` -> integer"""
+
+class IUniqueMemberWriteSequence(interface.Interface):
+    """The write contract for a sequence that may enforce unique members"""
+
+    def __setitem__(index, item):
+        """`x.__setitem__(index, item)` <==> `x[index] = item`
+
+        Declaring this interface does not specify whether `__setitem__`
+        supports slice objects.
+        """
+
+    def __delitem__(index):
+        """`x.__delitem__(index)` <==> `del x[index]`
+
+        Declaring this interface does not specify whether `__delitem__`
+        supports slice objects.
+        """
+
+    def __setslice__(i, j, other):
+        """`x.__setslice__(i, j, other)` <==> `x[i:j]=other`
+
+        Use of negative indices is not supported.
+
+        Deprecated since Python 2.0 but still a part of `UserList`.
+        """
+
+    def __delslice__(i, j):
+        """`x.__delslice__(i, j)` <==> `del x[i:j]`
+
+        Use of negative indices is not supported.
+
+        Deprecated since Python 2.0 but still a part of `UserList`.
+        """
+    def __iadd__(y):
+        """`x.__iadd__(y)` <==> `x += y`"""
+
+    def append(item):
+        """Append item to end"""
+
+    def insert(index, item):
+        """Insert item before index"""
+
+    def pop(index=-1):
+        """Remove and return item at index (default last)"""
+
+    def remove(item):
+        """Remove first occurrence of value"""
+
+    def reverse():
+        """Reverse *IN PLACE*"""
+
+    def sort(cmpfunc=None):
+        """Stable sort *IN PLACE*; `cmpfunc(x, y)` -> -1, 0, 1"""
+
+    def extend(iterable):
+        """Extend list by appending elements from the iterable"""
+
+class IWriteSequence(IUniqueMemberWriteSequence):
+    """Full write contract for sequences"""
+
+    def __imul__(n):
+        """`x.__imul__(n)` <==> `x *= n`"""
+
+class ISequence(IReadSequence, IWriteSequence):
+    """Full sequence contract"""
diff --git a/ThirdParty/ZopeInterface/zope/interface/common/tests/__init__.py b/ThirdParty/ZopeInterface/zope/interface/common/tests/__init__.py
new file mode 100644
index 0000000..b711d36
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/common/tests/__init__.py
@@ -0,0 +1,2 @@
+#
+# This file is necessary to make this directory a package.
diff --git a/ThirdParty/ZopeInterface/zope/interface/common/tests/basemapping.py b/ThirdParty/ZopeInterface/zope/interface/common/tests/basemapping.py
new file mode 100644
index 0000000..66f0ee4
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/common/tests/basemapping.py
@@ -0,0 +1,107 @@
+##############################################################################
+#
+# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Base Mapping tests
+"""
+from operator import __getitem__
+
+def testIReadMapping(self, inst, state, absent):
+    for key in state:
+        self.assertEqual(inst[key], state[key])
+        self.assertEqual(inst.get(key, None), state[key])
+        self.failUnless(key in inst)
+
+    for key in absent:
+        self.assertEqual(inst.get(key, None), None)
+        self.assertEqual(inst.get(key), None)
+        self.assertEqual(inst.get(key, self), self)
+        self.assertRaises(KeyError, __getitem__, inst, key)
+
+
+def test_keys(self, inst, state):
+    # Return the keys of the mapping object
+    inst_keys = list(inst.keys()); inst_keys.sort()
+    state_keys = list(state.keys()) ; state_keys.sort()
+    self.assertEqual(inst_keys, state_keys)
+
+def test_iter(self, inst, state):
+    # Return the keys of the mapping object
+    inst_keys = list(inst); inst_keys.sort()
+    state_keys = list(state.keys()) ; state_keys.sort()
+    self.assertEqual(inst_keys, state_keys)
+
+def test_values(self, inst, state):
+    # Return the values of the mapping object
+    inst_values = list(inst.values()); inst_values.sort()
+    state_values = list(state.values()) ; state_values.sort()
+    self.assertEqual(inst_values, state_values)
+
+def test_items(self, inst, state):
+    # Return the items of the mapping object
+    inst_items = list(inst.items()); inst_items.sort()
+    state_items = list(state.items()) ; state_items.sort()
+    self.assertEqual(inst_items, state_items)
+
+def test___len__(self, inst, state):
+    # Return the number of items
+    self.assertEqual(len(inst), len(state))
+
+def testIEnumerableMapping(self, inst, state):
+    test_keys(self, inst, state)
+    test_items(self, inst, state)
+    test_values(self, inst, state)
+    test___len__(self, inst, state)
+
+
+class BaseTestIReadMapping(object):
+    def testIReadMapping(self):
+        inst = self._IReadMapping__sample()
+        state = self._IReadMapping__stateDict()
+        absent = self._IReadMapping__absentKeys()
+        testIReadMapping(self, inst, state, absent)
+
+
+class BaseTestIEnumerableMapping(BaseTestIReadMapping):
+    # Mapping objects whose items can be enumerated
+    def test_keys(self):
+        # Return the keys of the mapping object
+        inst = self._IEnumerableMapping__sample()
+        state = self._IEnumerableMapping__stateDict()
+        test_keys(self, inst, state)
+
+    def test_values(self):
+        # Return the values of the mapping object
+        inst = self._IEnumerableMapping__sample()
+        state = self._IEnumerableMapping__stateDict()
+        test_values(self, inst, state)
+
+    def test_items(self):
+        # Return the items of the mapping object
+        inst = self._IEnumerableMapping__sample()
+        state = self._IEnumerableMapping__stateDict()
+        test_items(self, inst, state)
+
+    def test___len__(self):
+        # Return the number of items
+        inst = self._IEnumerableMapping__sample()
+        state = self._IEnumerableMapping__stateDict()
+        test___len__(self, inst, state)
+
+    def _IReadMapping__stateDict(self):
+        return self._IEnumerableMapping__stateDict()
+
+    def _IReadMapping__sample(self):
+        return self._IEnumerableMapping__sample()
+
+    def _IReadMapping__absentKeys(self):
+        return self._IEnumerableMapping__absentKeys()
diff --git a/ThirdParty/ZopeInterface/zope/interface/common/tests/test_idatetime.py b/ThirdParty/ZopeInterface/zope/interface/common/tests/test_idatetime.py
new file mode 100644
index 0000000..60f377e
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/common/tests/test_idatetime.py
@@ -0,0 +1,47 @@
+##############################################################################
+#
+# Copyright (c) 2003 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Test for datetime interfaces
+"""
+
+import unittest
+
+from zope.interface.verify import verifyObject, verifyClass
+from zope.interface.common.idatetime import ITimeDelta, ITimeDeltaClass
+from zope.interface.common.idatetime import IDate, IDateClass
+from zope.interface.common.idatetime import IDateTime, IDateTimeClass
+from zope.interface.common.idatetime import ITime, ITimeClass, ITZInfo
+from datetime import timedelta, date, datetime, time, tzinfo
+
+class TestDateTimeInterfaces(unittest.TestCase):
+
+    def test_interfaces(self):
+        verifyObject(ITimeDelta, timedelta(minutes=20))
+        verifyObject(IDate, date(2000, 1, 2))
+        verifyObject(IDateTime, datetime(2000, 1, 2, 10, 20))
+        verifyObject(ITime, time(20, 30, 15, 1234))
+        verifyObject(ITZInfo, tzinfo())
+        verifyClass(ITimeDeltaClass, timedelta)
+        verifyClass(IDateClass, date)
+        verifyClass(IDateTimeClass, datetime)
+        verifyClass(ITimeClass, time)
+
+
+def test_suite():
+    suite = unittest.TestSuite()
+    suite.addTest(unittest.makeSuite(TestDateTimeInterfaces))
+    return suite
+
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/ThirdParty/ZopeInterface/zope/interface/common/tests/test_import_interfaces.py b/ThirdParty/ZopeInterface/zope/interface/common/tests/test_import_interfaces.py
new file mode 100644
index 0000000..908f05d
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/common/tests/test_import_interfaces.py
@@ -0,0 +1,29 @@
+##############################################################################
+#
+# Copyright (c) 2006 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+import doctest
+import unittest
+
+def test_interface_import():
+    """
+    >>> import zope.interface.common.interfaces
+    """
+
+def test_suite():
+    return unittest.TestSuite((
+        doctest.DocTestSuite(),
+        ))
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
+
diff --git a/ThirdParty/ZopeInterface/zope/interface/declarations.py b/ThirdParty/ZopeInterface/zope/interface/declarations.py
new file mode 100644
index 0000000..0992adc
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/declarations.py
@@ -0,0 +1,848 @@
+##############################################################################
+# Copyright (c) 2003 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+##############################################################################
+"""Implementation of interface declarations
+
+There are three flavors of declarations:
+
+  - Declarations are used to simply name declared interfaces.
+
+  - ImplementsDeclarations are used to express the interfaces that a
+    class implements (that instances of the class provides).
+
+    Implements specifications support inheriting interfaces.
+
+  - ProvidesDeclarations are used to express interfaces directly
+    provided by objects.
+
+"""
+__docformat__ = 'restructuredtext'
+
+import sys
+from types import FunctionType
+from types import MethodType
+from types import ModuleType
+import warnings
+import weakref
+
+from zope.interface.advice import addClassAdvisor
+from zope.interface.interface import InterfaceClass
+from zope.interface.interface import SpecificationBase
+from zope.interface.interface import Specification
+from zope.interface._compat import CLASS_TYPES as DescriptorAwareMetaClasses
+from zope.interface._compat import PYTHON3
+
+# Registry of class-implementation specifications
+BuiltinImplementationSpecifications = {}
+
+_ADVICE_ERROR = ('Class advice impossible in Python3.  '
+                 'Use the @%s class decorator instead.')
+
+_ADVICE_WARNING = ('The %s API is deprecated, and will not work in Python3  '
+                   'Use the @%s class decorator instead.')
+
+class Declaration(Specification):
+    """Interface declarations"""
+
+    def __init__(self, *interfaces):
+        Specification.__init__(self, _normalizeargs(interfaces))
+
+    def changed(self, originally_changed):
+        Specification.changed(self, originally_changed)
+        try:
+            del self._v_attrs
+        except AttributeError:
+            pass
+
+    def __contains__(self, interface):
+        """Test whether an interface is in the specification
+        """
+
+        return self.extends(interface) and interface in self.interfaces()
+
+    def __iter__(self):
+        """Return an iterator for the interfaces in the specification
+        """
+        return self.interfaces()
+
+    def flattened(self):
+        """Return an iterator of all included and extended interfaces
+        """
+        return iter(self.__iro__)
+
+    def __sub__(self, other):
+        """Remove interfaces from a specification
+        """
+        return Declaration(
+            *[i for i in self.interfaces()
+                if not [j for j in other.interfaces()
+                        if i.extends(j, 0)]
+                ]
+                )
+
+    def __add__(self, other):
+        """Add two specifications or a specification and an interface
+        """
+        seen = {}
+        result = []
+        for i in self.interfaces():
+            if i not in seen:
+                seen[i] = 1
+                result.append(i)
+        for i in other.interfaces():
+            if i not in seen:
+                seen[i] = 1
+                result.append(i)
+
+        return Declaration(*result)
+
+    __radd__ = __add__
+
+
+##############################################################################
+#
+# Implementation specifications
+#
+# These specify interfaces implemented by instances of classes
+
+class Implements(Declaration):
+
+    # class whose specification should be used as additional base
+    inherit = None
+
+    # interfaces actually declared for a class
+    declared = ()
+
+    __name__ = '?'
+
+    def __repr__(self):
+        return '<implementedBy %s>' % (self.__name__)
+
+    def __reduce__(self):
+        return implementedBy, (self.inherit, )
+
+def implementedByFallback(cls):
+    """Return the interfaces implemented for a class' instances
+
+      The value returned is an IDeclaration.
+    """
+    try:
+        spec = cls.__dict__.get('__implemented__')
+    except AttributeError:
+
+        # we can't get the class dict. This is probably due to a
+        # security proxy.  If this is the case, then probably no
+        # descriptor was installed for the class.
+
+        # We don't want to depend directly on zope.security in
+        # zope.interface, but we'll try to make reasonable
+        # accommodations in an indirect way.
+
+        # We'll check to see if there's an implements:
+
+        spec = getattr(cls, '__implemented__', None)
+        if spec is None:
+            # There's no spec stred in the class. Maybe its a builtin:
+            spec = BuiltinImplementationSpecifications.get(cls)
+            if spec is not None:
+                return spec
+            return _empty
+
+        if spec.__class__ == Implements:
+            # we defaulted to _empty or there was a spec. Good enough.
+            # Return it.
+            return spec
+
+        # TODO: need old style __implements__ compatibility?
+        # Hm, there's an __implemented__, but it's not a spec. Must be
+        # an old-style declaration. Just compute a spec for it
+        return Declaration(*_normalizeargs((spec, )))
+
+    if isinstance(spec, Implements):
+        return spec
+
+    if spec is None:
+        spec = BuiltinImplementationSpecifications.get(cls)
+        if spec is not None:
+            return spec
+
+    # TODO: need old style __implements__ compatibility?
+    if spec is not None:
+        # old-style __implemented__ = foo declaration
+        spec = (spec, ) # tuplefy, as it might be just an int
+        spec = Implements(*_normalizeargs(spec))
+        spec.inherit = None    # old-style implies no inherit
+        del cls.__implemented__ # get rid of the old-style declaration
+    else:
+        try:
+            bases = cls.__bases__
+        except AttributeError:
+            if not callable(cls):
+                raise TypeError("ImplementedBy called for non-factory", cls)
+            bases = ()
+
+        spec = Implements(*[implementedBy(c) for c in bases])
+        spec.inherit = cls
+
+    spec.__name__ = (getattr(cls, '__module__', '?') or '?') + \
+                    '.' + (getattr(cls, '__name__', '?') or '?')
+
+    try:
+        cls.__implemented__ = spec
+        if not hasattr(cls, '__providedBy__'):
+            cls.__providedBy__ = objectSpecificationDescriptor
+
+        if (isinstance(cls, DescriptorAwareMetaClasses)
+            and
+            '__provides__' not in cls.__dict__):
+            # Make sure we get a __provides__ descriptor
+            cls.__provides__ = ClassProvides(
+                cls,
+                getattr(cls, '__class__', type(cls)),
+                )
+
+    except TypeError:
+        if not isinstance(cls, type):
+            raise TypeError("ImplementedBy called for non-type", cls)
+        BuiltinImplementationSpecifications[cls] = spec
+
+    return spec
+
+implementedBy = implementedByFallback
+
+def classImplementsOnly(cls, *interfaces):
+    """Declare the only interfaces implemented by instances of a class
+
+      The arguments after the class are one or more interfaces or interface
+      specifications (``IDeclaration`` objects).
+
+      The interfaces given (including the interfaces in the specifications)
+      replace any previous declarations.
+    """
+    spec = implementedBy(cls)
+    spec.declared = ()
+    spec.inherit = None
+    classImplements(cls, *interfaces)
+
+def classImplements(cls, *interfaces):
+    """Declare additional interfaces implemented for instances of a class
+
+      The arguments after the class are one or more interfaces or
+      interface specifications (``IDeclaration`` objects).
+
+      The interfaces given (including the interfaces in the specifications)
+      are added to any interfaces previously declared.
+    """
+    spec = implementedBy(cls)
+    spec.declared += tuple(_normalizeargs(interfaces))
+
+    # compute the bases
+    bases = []
+    seen = {}
+    for b in spec.declared:
+        if b not in seen:
+            seen[b] = 1
+            bases.append(b)
+
+    if spec.inherit is not None:
+
+        for c in spec.inherit.__bases__:
+            b = implementedBy(c)
+            if b not in seen:
+                seen[b] = 1
+                bases.append(b)
+
+    spec.__bases__ = tuple(bases)
+
+def _implements_advice(cls):
+    interfaces, classImplements = cls.__dict__['__implements_advice_data__']
+    del cls.__implements_advice_data__
+    classImplements(cls, *interfaces)
+    return cls
+
+
+class implementer:
+    """Declare the interfaces implemented by instances of a class.
+
+      This function is called as a class decorator.
+
+      The arguments are one or more interfaces or interface
+      specifications (IDeclaration objects).
+
+      The interfaces given (including the interfaces in the
+      specifications) are added to any interfaces previously
+      declared.
+
+      Previous declarations include declarations for base classes
+      unless implementsOnly was used.
+
+      This function is provided for convenience. It provides a more
+      convenient way to call classImplements. For example::
+
+        @implementer(I1)
+        class C(object):
+            pass
+
+      is equivalent to calling::
+
+        classImplements(C, I1)
+
+      after the class has been created.
+      """
+
+    def __init__(self, *interfaces):
+        self.interfaces = interfaces
+
+    def __call__(self, ob):
+        if isinstance(ob, DescriptorAwareMetaClasses):
+            classImplements(ob, *self.interfaces)
+            return ob
+
+        spec = Implements(*self.interfaces)
+        try:
+            ob.__implemented__ = spec
+        except AttributeError:
+            raise TypeError("Can't declare implements", ob)
+        return ob
+
+class implementer_only:
+    """Declare the only interfaces implemented by instances of a class
+
+      This function is called as a class decorator.
+
+      The arguments are one or more interfaces or interface
+      specifications (IDeclaration objects).
+
+      Previous declarations including declarations for base classes
+      are overridden.
+
+      This function is provided for convenience. It provides a more
+      convenient way to call classImplementsOnly. For example::
+
+        @implementer_only(I1)
+        class C(object): pass
+
+      is equivalent to calling::
+
+        classImplementsOnly(I1)
+
+      after the class has been created.
+      """
+
+    def __init__(self, *interfaces):
+        self.interfaces = interfaces
+
+    def __call__(self, ob):
+        if isinstance(ob, (FunctionType, MethodType)):
+            # XXX Does this decorator make sense for anything but classes?
+            # I don't think so. There can be no inheritance of interfaces
+            # on a method pr function....
+            raise ValueError('The implementer_only decorator is not '
+                             'supported for methods or functions.')
+        else:
+            # Assume it's a class:
+            classImplementsOnly(ob, *self.interfaces)
+            return ob
+
+def _implements(name, interfaces, classImplements):
+    # This entire approach is invalid under Py3K.  Don't even try to fix
+    # the coverage for this block there. :(
+    if PYTHON3: #pragma NO COVER
+        raise TypeError('Class advice impossible in Python3')
+    frame = sys._getframe(2)
+    locals = frame.f_locals
+
+    # Try to make sure we were called from a class def. In 2.2.0 we can't
+    # check for __module__ since it doesn't seem to be added to the locals
+    # until later on.
+    if locals is frame.f_globals or '__module__' not in locals:
+        raise TypeError(name+" can be used only from a class definition.")
+
+    if '__implements_advice_data__' in locals:
+        raise TypeError(name+" can be used only once in a class definition.")
+
+    locals['__implements_advice_data__'] = interfaces, classImplements
+    addClassAdvisor(_implements_advice, depth=3)
+
+def implements(*interfaces):
+    """Declare interfaces implemented by instances of a class
+
+      This function is called in a class definition.
+
+      The arguments are one or more interfaces or interface
+      specifications (IDeclaration objects).
+
+      The interfaces given (including the interfaces in the
+      specifications) are added to any interfaces previously
+      declared.
+
+      Previous declarations include declarations for base classes
+      unless implementsOnly was used.
+
+      This function is provided for convenience. It provides a more
+      convenient way to call classImplements. For example::
+
+        implements(I1)
+
+      is equivalent to calling::
+
+        classImplements(C, I1)
+
+      after the class has been created.
+    """
+    # This entire approach is invalid under Py3K.  Don't even try to fix
+    # the coverage for this block there. :(
+    if PYTHON3: #pragma NO COVER
+        raise TypeError(_ADVICE_ERROR % 'implementer')
+    _implements("implements", interfaces, classImplements)
+
+def implementsOnly(*interfaces):
+    """Declare the only interfaces implemented by instances of a class
+
+      This function is called in a class definition.
+
+      The arguments are one or more interfaces or interface
+      specifications (IDeclaration objects).
+
+      Previous declarations including declarations for base classes
+      are overridden.
+
+      This function is provided for convenience. It provides a more
+      convenient way to call classImplementsOnly. For example::
+
+        implementsOnly(I1)
+
+      is equivalent to calling::
+
+        classImplementsOnly(I1)
+
+      after the class has been created.
+    """
+    # This entire approach is invalid under Py3K.  Don't even try to fix
+    # the coverage for this block there. :(
+    if PYTHON3: #pragma NO COVER
+        raise TypeError(_ADVICE_ERROR % 'implementer_only')
+    _implements("implementsOnly", interfaces, classImplementsOnly)
+
+##############################################################################
+#
+# Instance declarations
+
+class Provides(Declaration):  # Really named ProvidesClass
+    """Implement __provides__, the instance-specific specification
+
+    When an object is pickled, we pickle the interfaces that it implements.
+    """
+
+    def __init__(self, cls, *interfaces):
+        self.__args = (cls, ) + interfaces
+        self._cls = cls
+        Declaration.__init__(self, *(interfaces + (implementedBy(cls), )))
+
+    def __reduce__(self):
+        return Provides, self.__args
+
+    __module__ = 'zope.interface'
+
+    def __get__(self, inst, cls):
+        """Make sure that a class __provides__ doesn't leak to an instance
+        """
+        if inst is None and cls is self._cls:
+            # We were accessed through a class, so we are the class'
+            # provides spec. Just return this object, but only if we are
+            # being called on the same class that we were defined for:
+            return self
+
+        raise AttributeError('__provides__')
+
+ProvidesClass = Provides
+
+# Registry of instance declarations
+# This is a memory optimization to allow objects to share specifications.
+InstanceDeclarations = weakref.WeakValueDictionary()
+
+def Provides(*interfaces):
+    """Cache instance declarations
+
+      Instance declarations are shared among instances that have the same
+      declaration. The declarations are cached in a weak value dictionary.
+    """
+    spec = InstanceDeclarations.get(interfaces)
+    if spec is None:
+        spec = ProvidesClass(*interfaces)
+        InstanceDeclarations[interfaces] = spec
+
+    return spec
+
+Provides.__safe_for_unpickling__ = True
+
+
+def directlyProvides(object, *interfaces):
+    """Declare interfaces declared directly for an object
+
+      The arguments after the object are one or more interfaces or interface
+      specifications (``IDeclaration`` objects).
+
+      The interfaces given (including the interfaces in the specifications)
+      replace interfaces previously declared for the object.
+    """
+    cls = getattr(object, '__class__', None)
+    if cls is not None and getattr(cls,  '__class__', None) is cls:
+        # It's a meta class (well, at least it it could be an extension class)
+        # Note that we can't get here from Py3k tests:  there is no normal
+        # class which isn't descriptor aware.
+        if not isinstance(object,
+                          DescriptorAwareMetaClasses): #pragma NO COVER Py3k
+            raise TypeError("Attempt to make an interface declaration on a "
+                            "non-descriptor-aware class")
+
+    interfaces = _normalizeargs(interfaces)
+    if cls is None:
+        cls = type(object)
+
+    issub = False
+    for damc in DescriptorAwareMetaClasses:
+        if issubclass(cls, damc):
+            issub = True
+            break
+    if issub:
+        # we have a class or type.  We'll use a special descriptor
+        # that provides some extra caching
+        object.__provides__ = ClassProvides(object, cls, *interfaces)
+    else:
+        object.__provides__ = Provides(cls, *interfaces)
+
+
+def alsoProvides(object, *interfaces):
+    """Declare interfaces declared directly for an object
+
+    The arguments after the object are one or more interfaces or interface
+    specifications (``IDeclaration`` objects).
+
+    The interfaces given (including the interfaces in the specifications) are
+    added to the interfaces previously declared for the object.
+    """
+    directlyProvides(object, directlyProvidedBy(object), *interfaces)
+
+def noLongerProvides(object, interface):
+    """ Removes a directly provided interface from an object.
+    """
+    directlyProvides(object, directlyProvidedBy(object) - interface)
+    if interface.providedBy(object):
+        raise ValueError("Can only remove directly provided interfaces.")
+
+class ClassProvidesBaseFallback(object):
+
+    def __get__(self, inst, cls):
+        if cls is self._cls:
+            # We only work if called on the class we were defined for
+
+            if inst is None:
+                # We were accessed through a class, so we are the class'
+                # provides spec. Just return this object as is:
+                return self
+
+            return self._implements
+
+        raise AttributeError('__provides__')
+
+ClassProvidesBasePy = ClassProvidesBaseFallback # BBB
+ClassProvidesBase = ClassProvidesBaseFallback
+
+# Try to get C base:
+try:
+    import _zope_interface_coptimizations
+except ImportError:  #pragma NO COVERAGE
+    pass
+else:  #pragma NO COVERAGE
+    from _zope_interface_coptimizations import ClassProvidesBase
+
+
+class ClassProvides(Declaration, ClassProvidesBase):
+    """Special descriptor for class __provides__
+
+    The descriptor caches the implementedBy info, so that
+    we can get declarations for objects without instance-specific
+    interfaces a bit quicker.
+    """
+    def __init__(self, cls, metacls, *interfaces):
+        self._cls = cls
+        self._implements = implementedBy(cls)
+        self.__args = (cls, metacls, ) + interfaces
+        Declaration.__init__(self, *(interfaces + (implementedBy(metacls), )))
+
+    def __reduce__(self):
+        return self.__class__, self.__args
+
+    # Copy base-class method for speed
+    __get__ = ClassProvidesBase.__get__
+
+def directlyProvidedBy(object):
+    """Return the interfaces directly provided by the given object
+
+    The value returned is an ``IDeclaration``.
+    """
+    provides = getattr(object, "__provides__", None)
+    if (provides is None # no spec
+        or
+        # We might have gotten the implements spec, as an
+        # optimization. If so, it's like having only one base, that we
+        # lop off to exclude class-supplied declarations:
+        isinstance(provides, Implements)
+        ):
+        return _empty
+
+    # Strip off the class part of the spec:
+    return Declaration(provides.__bases__[:-1])
+
+def classProvides(*interfaces):
+    """Declare interfaces provided directly by a class
+
+      This function is called in a class definition.
+
+      The arguments are one or more interfaces or interface specifications
+      (``IDeclaration`` objects).
+
+      The given interfaces (including the interfaces in the specifications)
+      are used to create the class's direct-object interface specification.
+      An error will be raised if the module class has an direct interface
+      specification. In other words, it is an error to call this function more
+      than once in a class definition.
+
+      Note that the given interfaces have nothing to do with the interfaces
+      implemented by instances of the class.
+
+      This function is provided for convenience. It provides a more convenient
+      way to call directlyProvides for a class. For example::
+
+        classProvides(I1)
+
+      is equivalent to calling::
+
+        directlyProvides(theclass, I1)
+
+      after the class has been created.
+    """
+    # This entire approach is invalid under Py3K.  Don't even try to fix
+    # the coverage for this block there. :(
+                       
+    if PYTHON3: #pragma NO COVER
+        raise TypeError(_ADVICE_ERROR % 'provider')
+
+    frame = sys._getframe(1)
+    locals = frame.f_locals
+
+    # Try to make sure we were called from a class def
+    if (locals is frame.f_globals) or ('__module__' not in locals):
+        raise TypeError("classProvides can be used only from a "
+                        "class definition.")
+
+    if '__provides__' in locals:
+        raise TypeError(
+            "classProvides can only be used once in a class definition.")
+
+    locals["__provides__"] = _normalizeargs(interfaces)
+
+    addClassAdvisor(_classProvides_advice, depth=2)
+
+def _classProvides_advice(cls):
+    # This entire approach is invalid under Py3K.  Don't even try to fix
+    # the coverage for this block there. :(
+    interfaces = cls.__dict__['__provides__']
+    del cls.__provides__
+    directlyProvides(cls, *interfaces)
+    return cls
+
+class provider:
+    """Class decorator version of classProvides"""
+
+    def __init__(self, *interfaces):
+        self.interfaces = interfaces
+
+    def __call__(self, ob):
+        directlyProvides(ob, *self.interfaces)
+        return ob
+
+def moduleProvides(*interfaces):
+    """Declare interfaces provided by a module
+
+    This function is used in a module definition.
+
+    The arguments are one or more interfaces or interface specifications
+    (``IDeclaration`` objects).
+
+    The given interfaces (including the interfaces in the specifications) are
+    used to create the module's direct-object interface specification.  An
+    error will be raised if the module already has an interface specification.
+    In other words, it is an error to call this function more than once in a
+    module definition.
+
+    This function is provided for convenience. It provides a more convenient
+    way to call directlyProvides. For example::
+
+      moduleImplements(I1)
+
+    is equivalent to::
+
+      directlyProvides(sys.modules[__name__], I1)
+    """
+    frame = sys._getframe(1)
+    locals = frame.f_locals
+
+    # Try to make sure we were called from a class def
+    if (locals is not frame.f_globals) or ('__name__' not in locals):
+        raise TypeError(
+            "moduleProvides can only be used from a module definition.")
+
+    if '__provides__' in locals:
+        raise TypeError(
+            "moduleProvides can only be used once in a module definition.")
+
+    locals["__provides__"] = Provides(ModuleType,
+                                      *_normalizeargs(interfaces))
+
+##############################################################################
+#
+# Declaration querying support
+
+# XXX:  is this a fossil?  Nobody calls it, no unit tests exercise it, no
+#       doctests import it, and the package __init__ doesn't import it.
+def ObjectSpecification(direct, cls):
+    """Provide object specifications
+
+    These combine information for the object and for it's classes.
+    """
+    return Provides(cls, direct) #pragma NO COVER fossil
+
+def getObjectSpecificationFallback(ob):
+
+    provides = getattr(ob, '__provides__', None)
+    if provides is not None:
+        if isinstance(provides, SpecificationBase):
+            return provides
+
+    try:
+        cls = ob.__class__
+    except AttributeError:
+        # We can't get the class, so just consider provides
+        return _empty
+
+    return implementedBy(cls)
+
+getObjectSpecification = getObjectSpecificationFallback
+
+def providedByFallback(ob):
+
+    # Here we have either a special object, an old-style declaration
+    # or a descriptor
+
+    # Try to get __providedBy__
+    try:
+        r = ob.__providedBy__
+    except AttributeError:
+        # Not set yet. Fall back to lower-level thing that computes it
+        return getObjectSpecification(ob)
+
+    try:
+        # We might have gotten a descriptor from an instance of a
+        # class (like an ExtensionClass) that doesn't support
+        # descriptors.  We'll make sure we got one by trying to get
+        # the only attribute, which all specs have.
+        r.extends
+
+    except AttributeError:
+
+        # The object's class doesn't understand descriptors.
+        # Sigh. We need to get an object descriptor, but we have to be
+        # careful.  We want to use the instance's __provides__, if
+        # there is one, but only if it didn't come from the class.
+
+        try:
+            r = ob.__provides__
+        except AttributeError:
+            # No __provides__, so just fall back to implementedBy
+            return implementedBy(ob.__class__)
+
+        # We need to make sure we got the __provides__ from the
+        # instance. We'll do this by making sure we don't get the same
+        # thing from the class:
+
+        try:
+            cp = ob.__class__.__provides__
+        except AttributeError:
+            # The ob doesn't have a class or the class has no
+            # provides, assume we're done:
+            return r
+
+        if r is cp:
+            # Oops, we got the provides from the class. This means
+            # the object doesn't have it's own. We should use implementedBy
+            return implementedBy(ob.__class__)
+
+    return r
+providedBy = providedByFallback
+
+class ObjectSpecificationDescriptorFallback(object):
+    """Implement the `__providedBy__` attribute
+
+    The `__providedBy__` attribute computes the interfaces peovided by
+    an object.
+    """
+
+    def __get__(self, inst, cls):
+        """Get an object specification for an object
+        """
+        if inst is None:
+            return getObjectSpecification(cls)
+
+        provides = getattr(inst, '__provides__', None)
+        if provides is not None:
+            return provides
+
+        return implementedBy(cls)
+
+ObjectSpecificationDescriptor = ObjectSpecificationDescriptorFallback
+
+##############################################################################
+
+def _normalizeargs(sequence, output = None):
+    """Normalize declaration arguments
+
+    Normalization arguments might contain Declarions, tuples, or single
+    interfaces.
+
+    Anything but individial interfaces or implements specs will be expanded.
+    """
+    if output is None:
+        output = []
+
+    cls = sequence.__class__
+    if InterfaceClass in cls.__mro__ or Implements in cls.__mro__:
+        output.append(sequence)
+    else:
+        for v in sequence:
+            _normalizeargs(v, output)
+
+    return output
+
+_empty = Declaration()
+
+try:
+    import _zope_interface_coptimizations
+except ImportError: #pragma NO COVER
+    pass
+else: #pragma NO COVER PyPy
+    from _zope_interface_coptimizations import implementedBy
+    from _zope_interface_coptimizations import providedBy
+    from _zope_interface_coptimizations import getObjectSpecification
+    from _zope_interface_coptimizations import ObjectSpecificationDescriptor
+
+objectSpecificationDescriptor = ObjectSpecificationDescriptor()
diff --git a/ThirdParty/ZopeInterface/zope/interface/document.py b/ThirdParty/ZopeInterface/zope/interface/document.py
new file mode 100644
index 0000000..7dfeb5e
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/document.py
@@ -0,0 +1,104 @@
+##############################################################################
+#
+# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+""" Pretty-Print an Interface object as structured text (Yum)
+
+This module provides a function, asStructuredText, for rendering an
+interface as structured text.
+"""
+import zope.interface
+
+def asStructuredText(I, munge=0):
+    """ Output structured text format.  Note, this will whack any existing
+    'structured' format of the text.  """
+
+    r = [I.getName()]
+    outp = r.append
+    level = 1
+
+    if I.getDoc():
+        outp(_justify_and_indent(_trim_doc_string(I.getDoc()), level))
+
+    bases = [base
+             for base in I.__bases__
+             if base is not zope.interface.Interface
+             ]
+    if bases:
+        outp(_justify_and_indent("This interface extends:", level, munge))
+        level += 1
+        for b in bases:
+            item = "o %s" % b.getName()
+            outp(_justify_and_indent(_trim_doc_string(item), level, munge))
+        level -= 1
+
+    namesAndDescriptions = sorted(I.namesAndDescriptions())
+
+    outp(_justify_and_indent("Attributes:", level, munge))
+    level += 1
+    for name, desc in namesAndDescriptions:
+        if not hasattr(desc, 'getSignatureString'):   # ugh...
+            item = "%s -- %s" % (desc.getName(),
+                                 desc.getDoc() or 'no documentation')
+            outp(_justify_and_indent(_trim_doc_string(item), level, munge))
+    level -= 1
+
+    outp(_justify_and_indent("Methods:", level, munge))
+    level += 1
+    for name, desc in namesAndDescriptions:
+        if hasattr(desc, 'getSignatureString'):   # ugh...
+            item = "%s%s -- %s" % (desc.getName(),
+                                   desc.getSignatureString(),
+                                   desc.getDoc() or 'no documentation')
+            outp(_justify_and_indent(_trim_doc_string(item), level, munge))
+
+    return "\n\n".join(r) + "\n\n"
+
+
+def _trim_doc_string(text):
+    """ Trims a doc string to make it format
+    correctly with structured text. """
+
+    lines = text.replace('\r\n', '\n').split('\n')
+    nlines = [lines.pop(0)]
+    if lines:
+        min_indent = min([len(line) - len(line.lstrip())
+                          for line in lines])
+        for line in lines:
+            nlines.append(line[min_indent:])
+
+    return '\n'.join(nlines)
+
+
+def _justify_and_indent(text, level, munge=0, width=72):
+    """ indent and justify text, rejustify (munge) if specified """
+
+    indent = " " * level
+
+    if munge:
+        lines = []
+        line = indent
+        text = text.split()
+
+        for word in text:
+            line = ' '.join([line, word])
+            if len(line) > width:
+                lines.append(line)
+                line = indent
+        else:
+            lines.append(line)
+
+        return '\n'.join(lines)
+
+    else:
+        return indent + \
+            text.strip().replace("\r\n", "\n") .replace("\n", "\n" + indent)
diff --git a/ThirdParty/ZopeInterface/zope/interface/exceptions.py b/ThirdParty/ZopeInterface/zope/interface/exceptions.py
new file mode 100644
index 0000000..e9a4788
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/exceptions.py
@@ -0,0 +1,67 @@
+##############################################################################
+#
+# Copyright (c) 2002 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Interface-specific exceptions
+"""
+
+class Invalid(Exception):
+    """A specification is violated
+    """
+
+class DoesNotImplement(Invalid):
+    """ This object does not implement """
+    def __init__(self, interface):
+        self.interface = interface
+
+    def __str__(self):
+        return """An object does not implement interface %(interface)s
+
+        """ % self.__dict__
+
+class BrokenImplementation(Invalid):
+    """An attribute is not completely implemented.
+    """
+
+    def __init__(self, interface, name):
+        self.interface=interface
+        self.name=name
+
+    def __str__(self):
+        return """An object has failed to implement interface %(interface)s
+
+        The %(name)s attribute was not provided.
+        """ % self.__dict__
+
+class BrokenMethodImplementation(Invalid):
+    """An method is not completely implemented.
+    """
+
+    def __init__(self, method, mess):
+        self.method=method
+        self.mess=mess
+
+    def __str__(self):
+        return """The implementation of %(method)s violates its contract
+        because %(mess)s.
+        """ % self.__dict__
+
+class InvalidInterface(Exception):
+    """The interface has invalid contents
+    """
+
+class BadImplements(TypeError):
+    """An implementation assertion is invalid
+
+    because it doesn't contain an interface or a sequence of valid
+    implementation assertions.
+    """
diff --git a/ThirdParty/ZopeInterface/zope/interface/interface.py b/ThirdParty/ZopeInterface/zope/interface/interface.py
new file mode 100644
index 0000000..46d3f10
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/interface.py
@@ -0,0 +1,712 @@
+##############################################################################
+#
+# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Interface object implementation
+"""
+from __future__ import generators
+
+import sys
+from types import MethodType
+from types import FunctionType
+import warnings
+import weakref
+
+from zope.interface.exceptions import Invalid
+from zope.interface.ro import ro
+
+
+CO_VARARGS = 4
+CO_VARKEYWORDS = 8
+TAGGED_DATA = '__interface_tagged_values__'
+
+_decorator_non_return = object()
+
+def invariant(call):
+    f_locals = sys._getframe(1).f_locals
+    tags = f_locals.setdefault(TAGGED_DATA, {})
+    invariants = tags.setdefault('invariants', [])
+    invariants.append(call)
+    return _decorator_non_return
+
+
+def taggedValue(key, value):
+    """Attaches a tagged value to an interface at definition time."""
+    f_locals = sys._getframe(1).f_locals
+    tagged_values = f_locals.setdefault(TAGGED_DATA, {})
+    tagged_values[key] = value
+    return _decorator_non_return
+
+
+class Element(object):
+
+    # We can't say this yet because we don't have enough
+    # infrastructure in place.
+    #
+    #implements(IElement)
+
+    def __init__(self, __name__, __doc__=''):
+        """Create an 'attribute' description
+        """
+        if not __doc__ and __name__.find(' ') >= 0:
+            __doc__ = __name__
+            __name__ = None
+
+        self.__name__=__name__
+        self.__doc__=__doc__
+        self.__tagged_values = {}
+
+    def getName(self):
+        """ Returns the name of the object. """
+        return self.__name__
+
+    def getDoc(self):
+        """ Returns the documentation for the object. """
+        return self.__doc__
+
+    def getTaggedValue(self, tag):
+        """ Returns the value associated with 'tag'. """
+        return self.__tagged_values[tag]
+
+    def queryTaggedValue(self, tag, default=None):
+        """ Returns the value associated with 'tag'. """
+        return self.__tagged_values.get(tag, default)
+
+    def getTaggedValueTags(self):
+        """ Returns a list of all tags. """
+        return self.__tagged_values.keys()
+
+    def setTaggedValue(self, tag, value):
+        """ Associates 'value' with 'key'. """
+        self.__tagged_values[tag] = value
+
+class SpecificationBasePy(object):
+
+    def providedBy(self, ob):
+        """Is the interface implemented by an object
+        """
+        spec = providedBy(ob)
+        return self in spec._implied
+
+    def implementedBy(self, cls):
+        """Test whether the specification is implemented by a class or factory.
+
+        Raise TypeError if argument is neither a class nor a callable.
+        """
+        spec = implementedBy(cls)
+        return self in spec._implied
+
+    def isOrExtends(self, interface):
+        """Is the interface the same as or extend the given interface
+        """
+        return interface in self._implied
+
+    __call__ = isOrExtends
+
+SpecificationBase = SpecificationBasePy
+try:
+    from _zope_interface_coptimizations import SpecificationBase
+except ImportError: #pragma NO COVER
+    pass
+
+_marker = object()
+class InterfaceBasePy(object):
+    """Base class that wants to be replaced with a C base :)
+    """
+
+    def __call__(self, obj, alternate=_marker):
+        """Adapt an object to the interface
+        """
+        conform = getattr(obj, '__conform__', None)
+        if conform is not None:
+            adapter = self._call_conform(conform)
+            if adapter is not None:
+                return adapter
+
+        adapter = self.__adapt__(obj)
+
+        if adapter is not None:
+            return adapter
+        elif alternate is not _marker:
+            return alternate
+        else:
+            raise TypeError("Could not adapt", obj, self)
+
+    def __adapt__(self, obj):
+        """Adapt an object to the reciever
+        """
+        if self.providedBy(obj):
+            return obj
+
+        for hook in adapter_hooks:
+            adapter = hook(self, obj)
+            if adapter is not None:
+                return adapter
+
+    
+InterfaceBase = InterfaceBasePy
+try:
+    from _zope_interface_coptimizations import InterfaceBase
+except ImportError: #pragma NO COVER
+    pass
+
+
+adapter_hooks = []
+try:
+    from _zope_interface_coptimizations import adapter_hooks
+except ImportError: #pragma NO COVER
+    pass
+
+
+class Specification(SpecificationBase):
+    """Specifications
+
+    An interface specification is used to track interface declarations
+    and component registrations.
+
+    This class is a base class for both interfaces themselves and for
+    interface specifications (declarations).
+
+    Specifications are mutable.  If you reassign their bases, their
+    relations with other specifications are adjusted accordingly.
+    """
+
+    # Copy some base class methods for speed
+    isOrExtends = SpecificationBase.isOrExtends
+    providedBy = SpecificationBase.providedBy
+
+    def __init__(self, bases=()):
+        self._implied = {}
+        self.dependents = weakref.WeakKeyDictionary()
+        self.__bases__ = tuple(bases)
+
+    def subscribe(self, dependent):
+        self.dependents[dependent] = self.dependents.get(dependent, 0) + 1
+
+    def unsubscribe(self, dependent):
+        n = self.dependents.get(dependent, 0) - 1
+        if not n:
+            del self.dependents[dependent]
+        elif n > 0:
+            self.dependents[dependent] = n
+        else:
+            raise KeyError(dependent)
+
+    def __setBases(self, bases):
+        # Register ourselves as a dependent of our old bases
+        for b in self.__bases__:
+            b.unsubscribe(self)
+
+        # Register ourselves as a dependent of our bases
+        self.__dict__['__bases__'] = bases
+        for b in bases:
+            b.subscribe(self)
+
+        self.changed(self)
+
+    __bases__ = property(
+
+        lambda self: self.__dict__.get('__bases__', ()),
+        __setBases,
+        )
+
+    def changed(self, originally_changed):
+        """We, or something we depend on, have changed
+        """
+        try:
+            del self._v_attrs
+        except AttributeError:
+            pass
+
+        implied = self._implied
+        implied.clear()
+
+        ancestors = ro(self)
+
+        try:
+            if Interface not in ancestors:
+                ancestors.append(Interface)
+        except NameError:
+            pass # defining Interface itself
+
+        self.__sro__ = tuple(ancestors)
+        self.__iro__ = tuple([ancestor for ancestor in ancestors
+                              if isinstance(ancestor, InterfaceClass)
+                              ])
+
+        for ancestor in ancestors:
+            # We directly imply our ancestors:
+            implied[ancestor] = ()
+
+        # Now, advise our dependents of change:
+        for dependent in self.dependents.keys():
+            dependent.changed(originally_changed)
+
+
+    def interfaces(self):
+        """Return an iterator for the interfaces in the specification.
+        """
+        seen = {}
+        for base in self.__bases__:
+            for interface in base.interfaces():
+                if interface not in seen:
+                    seen[interface] = 1
+                    yield interface
+
+
+    def extends(self, interface, strict=True):
+        """Does the specification extend the given interface?
+
+        Test whether an interface in the specification extends the
+        given interface
+        """
+        return ((interface in self._implied)
+                and
+                ((not strict) or (self != interface))
+                )
+
+    def weakref(self, callback=None):
+        return weakref.ref(self, callback)
+
+    def get(self, name, default=None):
+        """Query for an attribute description
+        """
+        try:
+            attrs = self._v_attrs
+        except AttributeError:
+            attrs = self._v_attrs = {}
+        attr = attrs.get(name)
+        if attr is None:
+            for iface in self.__iro__:
+                attr = iface.direct(name)
+                if attr is not None:
+                    attrs[name] = attr
+                    break
+
+        if attr is None:
+            return default
+        else:
+            return attr
+
+class InterfaceClass(Element, InterfaceBase, Specification):
+    """Prototype (scarecrow) Interfaces Implementation."""
+
+    # We can't say this yet because we don't have enough
+    # infrastructure in place.
+    #
+    #implements(IInterface)
+
+    def __init__(self, name, bases=(), attrs=None, __doc__=None,
+                 __module__=None):
+
+        if attrs is None:
+            attrs = {}
+
+        if __module__ is None:
+            __module__ = attrs.get('__module__')
+            if isinstance(__module__, str):
+                del attrs['__module__']
+            else:
+                try:
+                    # Figure out what module defined the interface.
+                    # This is how cPython figures out the module of
+                    # a class, but of course it does it in C. :-/
+                    __module__ = sys._getframe(1).f_globals['__name__']
+                except (AttributeError, KeyError): #pragma NO COVERAGE
+                    pass
+
+        self.__module__ = __module__
+
+        d = attrs.get('__doc__')
+        if d is not None:
+            if not isinstance(d, Attribute):
+                if __doc__ is None:
+                    __doc__ = d
+                del attrs['__doc__']
+
+        if __doc__ is None:
+            __doc__ = ''
+
+        Element.__init__(self, name, __doc__)
+
+        tagged_data = attrs.pop(TAGGED_DATA, None)
+        if tagged_data is not None:
+            for key, val in tagged_data.items():
+                self.setTaggedValue(key, val)
+
+        for base in bases:
+            if not isinstance(base, InterfaceClass):
+                raise TypeError('Expected base interfaces')
+
+        Specification.__init__(self, bases)
+
+        # Make sure that all recorded attributes (and methods) are of type
+        # `Attribute` and `Method`
+        for name, attr in list(attrs.items()):
+            if name in ('__locals__', '__qualname__'):
+                # __locals__: Python 3 sometimes adds this.
+                # __qualname__: PEP 3155 (Python 3.3+)
+                del attrs[name]
+                continue
+            if isinstance(attr, Attribute):
+                attr.interface = self
+                if not attr.__name__:
+                    attr.__name__ = name
+            elif isinstance(attr, FunctionType):
+                attrs[name] = fromFunction(attr, self, name=name)
+            elif attr is _decorator_non_return:
+                del attrs[name]
+            else:
+                raise InvalidInterface("Concrete attribute, " + name)
+
+        self.__attrs = attrs
+
+        self.__identifier__ = "%s.%s" % (self.__module__, self.__name__)
+
+    def interfaces(self):
+        """Return an iterator for the interfaces in the specification.
+        """
+        yield self
+
+    def getBases(self):
+        return self.__bases__
+
+    def isEqualOrExtendedBy(self, other):
+        """Same interface or extends?"""
+        return self == other or other.extends(self)
+
+    def names(self, all=False):
+        """Return the attribute names defined by the interface."""
+        if not all:
+            return self.__attrs.keys()
+
+        r = self.__attrs.copy()
+
+        for base in self.__bases__:
+            r.update(dict.fromkeys(base.names(all)))
+
+        return r.keys()
+
+    def __iter__(self):
+        return iter(self.names(all=True))
+
+    def namesAndDescriptions(self, all=False):
+        """Return attribute names and descriptions defined by interface."""
+        if not all:
+            return self.__attrs.items()
+
+        r = {}
+        for base in self.__bases__[::-1]:
+            r.update(dict(base.namesAndDescriptions(all)))
+
+        r.update(self.__attrs)
+
+        return r.items()
+
+    def getDescriptionFor(self, name):
+        """Return the attribute description for the given name."""
+        r = self.get(name)
+        if r is not None:
+            return r
+
+        raise KeyError(name)
+
+    __getitem__ = getDescriptionFor
+
+    def __contains__(self, name):
+        return self.get(name) is not None
+
+    def direct(self, name):
+        return self.__attrs.get(name)
+
+    def queryDescriptionFor(self, name, default=None):
+        return self.get(name, default)
+
+    def deferred(self):
+        """Return a defered class corresponding to the interface."""
+        if hasattr(self, "_deferred"): return self._deferred
+
+        klass={}
+        exec("class %s: pass" % self.__name__, klass)
+        klass=klass[self.__name__]
+
+        self.__d(klass)
+
+        self._deferred=klass
+
+        return klass
+
+    def validateInvariants(self, obj, errors=None):
+        """validate object to defined invariants."""
+        for call in self.queryTaggedValue('invariants', []):
+            try:
+                call(obj)
+            except Invalid as e:
+                if errors is None:
+                    raise
+                else:
+                    errors.append(e)
+        for base in self.__bases__:
+            try:
+                base.validateInvariants(obj, errors)
+            except Invalid:
+                if errors is None:
+                    raise
+        if errors:
+            raise Invalid(errors)
+
+    #XXX I believe this is a fossil:  nobody calls it anywhere.
+    #def _getInterface(self, ob, name):
+    #    """Retrieve a named interface."""
+    #    return None
+
+    def __d(self, klass):
+        for k, v in self.__attrs.items():
+            if isinstance(v, Method) and not (k in klass.__dict__):
+                setattr(klass, k, v)
+
+        for b in self.__bases__:
+            b.__d(klass)
+
+    def __repr__(self):
+        try:
+            return self._v_repr
+        except AttributeError:
+            name = self.__name__
+            m = self.__module__
+            if m:
+                name = '%s.%s' % (m, name)
+            r = "<%s %s>" % (self.__class__.__name__, name)
+            self._v_repr = r
+            return r
+
+    def _call_conform(self, conform):
+        try:
+            return conform(self)
+        except TypeError: #pragma NO COVER
+            # We got a TypeError. It might be an error raised by
+            # the __conform__ implementation, or *we* may have
+            # made the TypeError by calling an unbound method
+            # (object is a class).  In the later case, we behave
+            # as though there is no __conform__ method. We can
+            # detect this case by checking whether there is more
+            # than one traceback object in the traceback chain:
+            if sys.exc_info()[2].tb_next is not None:
+                # There is more than one entry in the chain, so
+                # reraise the error:
+                raise
+            # This clever trick is from Phillip Eby
+
+        return None #pragma NO COVER
+
+    def __reduce__(self):
+        return self.__name__
+
+    def __cmp(self, other):
+        # Yes, I did mean to name this __cmp, rather than __cmp__.
+        # It is a private method used by __lt__ and __gt__.
+        # I don't want to override __eq__ because I want the default
+        # __eq__, which is really fast.
+        """Make interfaces sortable
+
+        TODO: It would ne nice if:
+
+           More specific interfaces should sort before less specific ones.
+           Otherwise, sort on name and module.
+
+           But this is too complicated, and we're going to punt on it
+           for now.
+
+        For now, sort on interface and module name.
+
+        None is treated as a pseudo interface that implies the loosest
+        contact possible, no contract. For that reason, all interfaces
+        sort before None.
+
+        """
+        if other is None:
+            return -1
+
+        n1 = (getattr(self, '__name__', ''), getattr(self,  '__module__', ''))
+        n2 = (getattr(other, '__name__', ''), getattr(other,  '__module__', ''))
+
+        # This spelling works under Python3, which doesn't have cmp().
+        return (n1 > n2) - (n1 < n2)
+
+    def __hash__(self):
+        d = self.__dict__
+        if '__module__' not in d or '__name__' not in d: #pragma NO COVER
+            warnings.warn('Hashing uninitialized InterfaceClass instance')
+            return 1
+        return hash((self.__name__, self.__module__))
+
+    def __eq__(self, other):
+        c = self.__cmp(other)
+        return c == 0
+
+    def __ne__(self, other):
+        c = self.__cmp(other)
+        return c != 0
+
+    def __lt__(self, other):
+        c = self.__cmp(other)
+        return c < 0
+
+    def __le__(self, other):
+        c = self.__cmp(other)
+        return c <= 0
+
+    def __gt__(self, other):
+        c = self.__cmp(other)
+        return c > 0
+
+    def __ge__(self, other):
+        c = self.__cmp(other)
+        return c >= 0
+
+
+Interface = InterfaceClass("Interface", __module__ = 'zope.interface')
+
+class Attribute(Element):
+    """Attribute descriptions
+    """
+
+    # We can't say this yet because we don't have enough
+    # infrastructure in place.
+    #
+    # implements(IAttribute)
+
+    interface = None
+
+
+class Method(Attribute):
+    """Method interfaces
+
+    The idea here is that you have objects that describe methods.
+    This provides an opportunity for rich meta-data.
+    """
+
+    # We can't say this yet because we don't have enough
+    # infrastructure in place.
+    #
+    # implements(IMethod)
+
+    positional = required = ()
+    _optional = varargs = kwargs = None
+    def _get_optional(self):
+        if self._optional is None:
+            return {}
+        return self._optional
+    def _set_optional(self, opt):
+        self._optional = opt
+    def _del_optional(self):
+        self._optional = None
+    optional = property(_get_optional, _set_optional, _del_optional)
+
+    def __call__(self, *args, **kw):
+        raise BrokenImplementation(self.interface, self.__name__)
+
+    def getSignatureInfo(self):
+        return {'positional': self.positional,
+                'required': self.required,
+                'optional': self.optional,
+                'varargs': self.varargs,
+                'kwargs': self.kwargs,
+                }
+
+    def getSignatureString(self):
+        sig = []
+        for v in self.positional:
+            sig.append(v)
+            if v in self.optional.keys():
+                sig[-1] += "=" + repr(self.optional[v])
+        if self.varargs:
+            sig.append("*" + self.varargs)
+        if self.kwargs:
+            sig.append("**" + self.kwargs)
+
+        return "(%s)" % ", ".join(sig)
+
+def fromFunction(func, interface=None, imlevel=0, name=None):
+    name = name or func.__name__
+    method = Method(name, func.__doc__)
+    defaults = getattr(func, '__defaults__', None) or ()
+    code = func.__code__
+    # Number of positional arguments
+    na = code.co_argcount-imlevel
+    names = code.co_varnames[imlevel:]
+    opt = {}
+    # Number of required arguments
+    nr = na-len(defaults)
+    if nr < 0:
+        defaults=defaults[-nr:]
+        nr = 0
+
+    # Determine the optional arguments.
+    opt.update(dict(zip(names[nr:], defaults)))
+
+    method.positional = names[:na]
+    method.required = names[:nr]
+    method.optional = opt
+
+    argno = na
+
+    # Determine the function's variable argument's name (i.e. *args)
+    if code.co_flags & CO_VARARGS:
+        method.varargs = names[argno]
+        argno = argno + 1
+    else:
+        method.varargs = None
+
+    # Determine the function's keyword argument's name (i.e. **kw)
+    if code.co_flags & CO_VARKEYWORDS:
+        method.kwargs = names[argno]
+    else:
+        method.kwargs = None
+
+    method.interface = interface
+
+    for key, value in func.__dict__.items():
+        method.setTaggedValue(key, value)
+
+    return method
+
+
+def fromMethod(meth, interface=None, name=None):
+    if isinstance(meth, MethodType):
+        func = meth.__func__
+    else:
+        func = meth
+    return fromFunction(func, interface, imlevel=1, name=name)
+
+
+# Now we can create the interesting interfaces and wire them up:
+def _wire():
+    from zope.interface.declarations import classImplements
+
+    from zope.interface.interfaces import IAttribute
+    classImplements(Attribute, IAttribute)
+
+    from zope.interface.interfaces import IMethod
+    classImplements(Method, IMethod)
+
+    from zope.interface.interfaces import IInterface
+    classImplements(InterfaceClass, IInterface)
+
+    from zope.interface.interfaces import ISpecification
+    classImplements(Specification, ISpecification)
+
+# We import this here to deal with module dependencies.
+from zope.interface.declarations import implementedBy
+from zope.interface.declarations import providedBy
+from zope.interface.exceptions import InvalidInterface
+from zope.interface.exceptions import BrokenImplementation
diff --git a/ThirdParty/ZopeInterface/zope/interface/interfaces.py b/ThirdParty/ZopeInterface/zope/interface/interfaces.py
new file mode 100644
index 0000000..b5551be
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/interfaces.py
@@ -0,0 +1,1288 @@
+##############################################################################
+#
+# Copyright (c) 2002 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Interface Package Interfaces
+"""
+__docformat__ = 'restructuredtext'
+
+from zope.interface.interface import Attribute
+from zope.interface.interface import Interface
+from zope.interface.declarations import implementer
+from zope.interface._compat import _u
+
+_BLANK = _u('')
+
+class IElement(Interface):
+    """Objects that have basic documentation and tagged values.
+    """
+
+    __name__ = Attribute('__name__', 'The object name')
+    __doc__  = Attribute('__doc__', 'The object doc string')
+
+    def getTaggedValue(tag):
+        """Returns the value associated with `tag`.
+
+        Raise a `KeyError` of the tag isn't set.
+        """
+
+    def queryTaggedValue(tag, default=None):
+        """Returns the value associated with `tag`.
+
+        Return the default value of the tag isn't set.
+        """
+
+    def getTaggedValueTags():
+        """Returns a list of all tags."""
+
+    def setTaggedValue(tag, value):
+        """Associates `value` with `key`."""
+
+
+class IAttribute(IElement):
+    """Attribute descriptors"""
+
+    interface = Attribute('interface',
+                          'Stores the interface instance in which the '
+                          'attribute is located.')
+
+
+class IMethod(IAttribute):
+    """Method attributes"""
+
+    def getSignatureInfo():
+        """Returns the signature information.
+
+        This method returns a dictionary with the following keys:
+
+        o `positional` - All positional arguments.
+
+        o `required` - A list of all required arguments.
+
+        o `optional` - A list of all optional arguments.
+
+        o `varargs` - The name of the varargs argument.
+
+        o `kwargs` - The name of the kwargs argument.
+        """
+
+    def getSignatureString():
+        """Return a signature string suitable for inclusion in documentation.
+
+        This method returns the function signature string. For example, if you
+        have `func(a, b, c=1, d='f')`, then the signature string is `(a, b,
+        c=1, d='f')`.
+        """
+
+class ISpecification(Interface):
+    """Object Behavioral specifications"""
+
+    def providedBy(object):
+        """Test whether the interface is implemented by the object
+
+        Return true of the object asserts that it implements the
+        interface, including asserting that it implements an extended
+        interface.
+        """
+
+    def implementedBy(class_):
+        """Test whether the interface is implemented by instances of the class
+
+        Return true of the class asserts that its instances implement the
+        interface, including asserting that they implement an extended
+        interface.
+        """
+
+    def isOrExtends(other):
+        """Test whether the specification is or extends another
+        """
+
+    def extends(other, strict=True):
+        """Test whether a specification extends another
+
+        The specification extends other if it has other as a base
+        interface or if one of it's bases extends other.
+
+        If strict is false, then the specification extends itself.
+        """
+
+    def weakref(callback=None):
+        """Return a weakref to the specification
+
+        This method is, regrettably, needed to allow weakrefs to be
+        computed to security-proxied specifications.  While the
+        zope.interface package does not require zope.security or
+        zope.proxy, it has to be able to coexist with it.
+
+        """
+
+    __bases__ = Attribute("""Base specifications
+
+    A tuple if specifications from which this specification is
+    directly derived.
+
+    """)
+
+    __sro__ = Attribute("""Specification-resolution order
+
+    A tuple of the specification and all of it's ancestor
+    specifications from most specific to least specific.
+
+    (This is similar to the method-resolution order for new-style classes.)
+    """)
+
+    __iro__ = Attribute("""Interface-resolution order
+
+    A tuple of the of the specification's ancestor interfaces from
+    most specific to least specific.  The specification itself is
+    included if it is an interface.
+
+    (This is similar to the method-resolution order for new-style classes.)
+    """)
+
+    def get(name, default=None):
+        """Look up the description for a name
+
+        If the named attribute is not defined, the default is
+        returned.
+        """
+
+
+class IInterface(ISpecification, IElement):
+    """Interface objects
+
+    Interface objects describe the behavior of an object by containing
+    useful information about the object.  This information includes:
+
+      o Prose documentation about the object.  In Python terms, this
+        is called the "doc string" of the interface.  In this element,
+        you describe how the object works in prose language and any
+        other useful information about the object.
+
+      o Descriptions of attributes.  Attribute descriptions include
+        the name of the attribute and prose documentation describing
+        the attributes usage.
+
+      o Descriptions of methods.  Method descriptions can include:
+
+        - Prose "doc string" documentation about the method and its
+          usage.
+
+        - A description of the methods arguments; how many arguments
+          are expected, optional arguments and their default values,
+          the position or arguments in the signature, whether the
+          method accepts arbitrary arguments and whether the method
+          accepts arbitrary keyword arguments.
+
+      o Optional tagged data.  Interface objects (and their attributes and
+        methods) can have optional, application specific tagged data
+        associated with them.  Examples uses for this are examples,
+        security assertions, pre/post conditions, and other possible
+        information you may want to associate with an Interface or its
+        attributes.
+
+    Not all of this information is mandatory.  For example, you may
+    only want the methods of your interface to have prose
+    documentation and not describe the arguments of the method in
+    exact detail.  Interface objects are flexible and let you give or
+    take any of these components.
+
+    Interfaces are created with the Python class statement using
+    either Interface.Interface or another interface, as in::
+
+      from zope.interface import Interface
+
+      class IMyInterface(Interface):
+        '''Interface documentation'''
+
+        def meth(arg1, arg2):
+            '''Documentation for meth'''
+
+        # Note that there is no self argument
+
+     class IMySubInterface(IMyInterface):
+        '''Interface documentation'''
+
+        def meth2():
+            '''Documentation for meth2'''
+
+    You use interfaces in two ways:
+
+    o You assert that your object implement the interfaces.
+
+      There are several ways that you can assert that an object
+      implements an interface:
+
+      1. Call zope.interface.implements in your class definition.
+
+      2. Call zope.interfaces.directlyProvides on your object.
+
+      3. Call 'zope.interface.classImplements' to assert that instances
+         of a class implement an interface.
+
+         For example::
+
+           from zope.interface import classImplements
+
+           classImplements(some_class, some_interface)
+
+         This approach is useful when it is not an option to modify
+         the class source.  Note that this doesn't affect what the
+         class itself implements, but only what its instances
+         implement.
+
+    o You query interface meta-data. See the IInterface methods and
+      attributes for details.
+
+    """
+
+    def names(all=False):
+        """Get the interface attribute names
+
+        Return a sequence of the names of the attributes, including
+        methods, included in the interface definition.
+
+        Normally, only directly defined attributes are included. If
+        a true positional or keyword argument is given, then
+        attributes defined by base classes will be included.
+        """
+
+    def namesAndDescriptions(all=False):
+        """Get the interface attribute names and descriptions
+
+        Return a sequence of the names and descriptions of the
+        attributes, including methods, as name-value pairs, included
+        in the interface definition.
+
+        Normally, only directly defined attributes are included. If
+        a true positional or keyword argument is given, then
+        attributes defined by base classes will be included.
+        """
+
+    def __getitem__(name):
+        """Get the description for a name
+
+        If the named attribute is not defined, a KeyError is raised.
+        """
+
+    def direct(name):
+        """Get the description for the name if it was defined by the interface
+
+        If the interface doesn't define the name, returns None.
+        """
+
+    def validateInvariants(obj, errors=None):
+        """Validate invariants
+
+        Validate object to defined invariants.  If errors is None,
+        raises first Invalid error; if errors is a list, appends all errors
+        to list, then raises Invalid with the errors as the first element
+        of the "args" tuple."""
+
+    def __contains__(name):
+        """Test whether the name is defined by the interface"""
+
+    def __iter__():
+        """Return an iterator over the names defined by the interface
+
+        The names iterated include all of the names defined by the
+        interface directly and indirectly by base interfaces.
+        """
+
+    __module__ = Attribute("""The name of the module defining the interface""")
+
+class IDeclaration(ISpecification):
+    """Interface declaration
+
+    Declarations are used to express the interfaces implemented by
+    classes or provided by objects.
+    """
+
+    def __contains__(interface):
+        """Test whether an interface is in the specification
+
+        Return true if the given interface is one of the interfaces in
+        the specification and false otherwise.
+        """
+
+    def __iter__():
+        """Return an iterator for the interfaces in the specification
+        """
+
+    def flattened():
+        """Return an iterator of all included and extended interfaces
+
+        An iterator is returned for all interfaces either included in
+        or extended by interfaces included in the specifications
+        without duplicates. The interfaces are in "interface
+        resolution order". The interface resolution order is such that
+        base interfaces are listed after interfaces that extend them
+        and, otherwise, interfaces are included in the order that they
+        were defined in the specification.
+        """
+
+    def __sub__(interfaces):
+        """Create an interface specification with some interfaces excluded
+
+        The argument can be an interface or an interface
+        specifications.  The interface or interfaces given in a
+        specification are subtracted from the interface specification.
+
+        Removing an interface that is not in the specification does
+        not raise an error. Doing so has no effect.
+
+        Removing an interface also removes sub-interfaces of the interface.
+
+        """
+
+    def __add__(interfaces):
+        """Create an interface specification with some interfaces added
+
+        The argument can be an interface or an interface
+        specifications.  The interface or interfaces given in a
+        specification are added to the interface specification.
+
+        Adding an interface that is already in the specification does
+        not raise an error. Doing so has no effect.
+        """
+
+    def __nonzero__():
+        """Return a true value of the interface specification is non-empty
+        """
+
+class IInterfaceDeclaration(Interface):
+    """Declare and check the interfaces of objects
+
+    The functions defined in this interface are used to declare the
+    interfaces that objects provide and to query the interfaces that have
+    been declared.
+
+    Interfaces can be declared for objects in two ways:
+
+    - Interfaces are declared for instances of the object's class
+
+    - Interfaces are declared for the object directly.
+
+    The interfaces declared for an object are, therefore, the union of
+    interfaces declared for the object directly and the interfaces
+    declared for instances of the object's class.
+
+    Note that we say that a class implements the interfaces provided
+    by it's instances.  An instance can also provide interfaces
+    directly.  The interfaces provided by an object are the union of
+    the interfaces provided directly and the interfaces implemented by
+    the class.
+    """
+
+    def providedBy(ob):
+        """Return the interfaces provided by an object
+
+        This is the union of the interfaces directly provided by an
+        object and interfaces implemented by it's class.
+
+        The value returned is an IDeclaration.
+        """
+
+    def implementedBy(class_):
+        """Return the interfaces implemented for a class' instances
+
+        The value returned is an IDeclaration.
+        """
+
+    def classImplements(class_, *interfaces):
+        """Declare additional interfaces implemented for instances of a class
+
+        The arguments after the class are one or more interfaces or
+        interface specifications (IDeclaration objects).
+
+        The interfaces given (including the interfaces in the
+        specifications) are added to any interfaces previously
+        declared.
+
+        Consider the following example::
+
+          class C(A, B):
+             ...
+
+          classImplements(C, I1, I2)
+
+
+        Instances of ``C`` provide ``I1``, ``I2``, and whatever interfaces
+        instances of ``A`` and ``B`` provide.
+        """
+
+    def implementer(*interfaces):
+        """Create a decorator for declaring interfaces implemented by a facory
+
+        A callable is returned that makes an implements declaration on
+        objects passed to it.
+        """
+
+    def classImplementsOnly(class_, *interfaces):
+        """Declare the only interfaces implemented by instances of a class
+
+        The arguments after the class are one or more interfaces or
+        interface specifications (IDeclaration objects).
+
+        The interfaces given (including the interfaces in the
+        specifications) replace any previous declarations.
+
+        Consider the following example::
+
+          class C(A, B):
+             ...
+
+          classImplements(C, IA, IB. IC)
+          classImplementsOnly(C. I1, I2)
+
+        Instances of ``C`` provide only ``I1``, ``I2``, and regardless of
+        whatever interfaces instances of ``A`` and ``B`` implement.
+        """
+        
+    def implementer_only(*interfaces):
+        """Create a decorator for declaring the only interfaces implemented 
+        
+        A callable is returned that makes an implements declaration on
+        objects passed to it.
+        """
+
+    def directlyProvidedBy(object):
+        """Return the interfaces directly provided by the given object
+
+        The value returned is an IDeclaration.
+        """
+
+    def directlyProvides(object, *interfaces):
+        """Declare interfaces declared directly for an object
+
+        The arguments after the object are one or more interfaces or
+        interface specifications (IDeclaration objects).
+
+        The interfaces given (including the interfaces in the
+        specifications) replace interfaces previously
+        declared for the object.
+
+        Consider the following example::
+
+          class C(A, B):
+             ...
+
+          ob = C()
+          directlyProvides(ob, I1, I2)
+
+        The object, ``ob`` provides ``I1``, ``I2``, and whatever interfaces
+        instances have been declared for instances of ``C``.
+
+        To remove directly provided interfaces, use ``directlyProvidedBy`` and
+        subtract the unwanted interfaces. For example::
+
+          directlyProvides(ob, directlyProvidedBy(ob)-I2)
+
+        removes I2 from the interfaces directly provided by
+        ``ob``. The object, ``ob`` no longer directly provides ``I2``,
+        although it might still provide ``I2`` if it's class
+        implements ``I2``.
+
+        To add directly provided interfaces, use ``directlyProvidedBy`` and
+        include additional interfaces.  For example::
+
+          directlyProvides(ob, directlyProvidedBy(ob), I2)
+
+        adds I2 to the interfaces directly provided by ob.
+        """
+
+    def alsoProvides(object, *interfaces):
+        """Declare additional interfaces directly for an object::
+
+          alsoProvides(ob, I1)
+
+        is equivalent to::
+
+          directlyProvides(ob, directlyProvidedBy(ob), I1)
+        """
+
+    def noLongerProvides(object, interface):
+        """Remove an interface from the list of an object's directly
+        provided interfaces::
+
+          noLongerProvides(ob, I1)
+
+        is equivalent to::
+
+          directlyProvides(ob, directlyProvidedBy(ob)-I1)
+
+        with the exception that if ``I1`` is an interface that is
+        provided by ``ob`` through the class's implementation,
+        ValueError is raised.
+        """
+
+    def implements(*interfaces):
+        """Declare interfaces implemented by instances of a class
+
+        This function is called in a class definition (Python 2.x only).
+
+        The arguments are one or more interfaces or interface
+        specifications (IDeclaration objects).
+
+        The interfaces given (including the interfaces in the
+        specifications) are added to any interfaces previously
+        declared.
+
+        Previous declarations include declarations for base classes
+        unless implementsOnly was used.
+
+        This function is provided for convenience. It provides a more
+        convenient way to call classImplements. For example::
+
+          implements(I1)
+
+        is equivalent to calling::
+
+          classImplements(C, I1)
+
+        after the class has been created.
+
+        Consider the following example (Python 2.x only)::
+
+          class C(A, B):
+            implements(I1, I2)
+
+
+        Instances of ``C`` implement ``I1``, ``I2``, and whatever interfaces
+        instances of ``A`` and ``B`` implement.
+        """
+
+    def implementsOnly(*interfaces):
+        """Declare the only interfaces implemented by instances of a class
+
+        This function is called in a class definition (Python 2.x only).
+
+        The arguments are one or more interfaces or interface
+        specifications (IDeclaration objects).
+
+        Previous declarations including declarations for base classes
+        are overridden.
+
+        This function is provided for convenience. It provides a more
+        convenient way to call classImplementsOnly. For example::
+
+          implementsOnly(I1)
+
+        is equivalent to calling::
+
+          classImplementsOnly(I1)
+
+        after the class has been created.
+
+        Consider the following example (Python 2.x only)::
+
+          class C(A, B):
+            implementsOnly(I1, I2)
+
+
+        Instances of ``C`` implement ``I1``, ``I2``, regardless of what
+        instances of ``A`` and ``B`` implement.
+        """
+
+    def classProvides(*interfaces):
+        """Declare interfaces provided directly by a class
+
+        This function is called in a class definition.
+
+        The arguments are one or more interfaces or interface
+        specifications (IDeclaration objects).
+
+        The given interfaces (including the interfaces in the
+        specifications) are used to create the class's direct-object
+        interface specification.  An error will be raised if the module
+        class has an direct interface specification.  In other words, it is
+        an error to call this function more than once in a class
+        definition.
+
+        Note that the given interfaces have nothing to do with the
+        interfaces implemented by instances of the class.
+
+        This function is provided for convenience. It provides a more
+        convenient way to call directlyProvides for a class. For example::
+
+          classProvides(I1)
+
+        is equivalent to calling::
+
+          directlyProvides(theclass, I1)
+
+        after the class has been created.
+        """
+    def provider(*interfaces):
+        """A class decorator version of classProvides"""
+
+    def moduleProvides(*interfaces):
+        """Declare interfaces provided by a module
+
+        This function is used in a module definition.
+
+        The arguments are one or more interfaces or interface
+        specifications (IDeclaration objects).
+
+        The given interfaces (including the interfaces in the
+        specifications) are used to create the module's direct-object
+        interface specification.  An error will be raised if the module
+        already has an interface specification.  In other words, it is
+        an error to call this function more than once in a module
+        definition.
+
+        This function is provided for convenience. It provides a more
+        convenient way to call directlyProvides for a module. For example::
+
+          moduleImplements(I1)
+
+        is equivalent to::
+
+          directlyProvides(sys.modules[__name__], I1)
+        """
+
+    def Declaration(*interfaces):
+        """Create an interface specification
+
+        The arguments are one or more interfaces or interface
+        specifications (IDeclaration objects).
+
+        A new interface specification (IDeclaration) with
+        the given interfaces is returned.
+        """
+
+class IAdapterRegistry(Interface):
+    """Provide an interface-based registry for adapters
+
+    This registry registers objects that are in some sense "from" a
+    sequence of specification to an interface and a name.
+
+    No specific semantics are assumed for the registered objects,
+    however, the most common application will be to register factories
+    that adapt objects providing required specifications to a provided
+    interface.
+    """
+
+    def register(required, provided, name, value):
+        """Register a value
+
+        A value is registered for a *sequence* of required specifications, a
+        provided interface, and a name.
+        """
+
+    def registered(required, provided, name=_BLANK):
+        """Return the component registered for the given interfaces and name
+
+        Unlike the lookup method, this methods won't retrieve
+        components registered for more specific required interfaces or
+        less specific provided interfaces.
+
+        If no component was registered exactly for the given
+        interfaces and name, then None is returned.
+
+        """
+
+    def lookup(required, provided, name='', default=None):
+        """Lookup a value
+
+        A value is looked up based on a *sequence* of required
+        specifications, a provided interface, and a name.
+        """
+
+    def queryMultiAdapter(objects, provided, name=_BLANK, default=None):
+        """Adapt a sequence of objects to a named, provided, interface
+        """
+
+    def lookup1(required, provided, name=_BLANK, default=None):
+        """Lookup a value using a single required interface
+
+        A value is looked up based on a single required
+        specifications, a provided interface, and a name.
+        """
+
+    def queryAdapter(object, provided, name=_BLANK, default=None):
+        """Adapt an object using a registered adapter factory.
+        """
+
+    def adapter_hook(provided, object, name=_BLANK, default=None):
+        """Adapt an object using a registered adapter factory.
+        """
+
+    def lookupAll(required, provided):
+        """Find all adapters from the required to the provided interfaces
+
+        An iterable object is returned that provides name-value two-tuples.
+        """
+
+    def names(required, provided):
+        """Return the names for which there are registered objects
+        """
+
+    def subscribe(required, provided, subscriber, name=_BLANK):
+        """Register a subscriber
+
+        A subscriber is registered for a *sequence* of required
+        specifications, a provided interface, and a name.
+
+        Multiple subscribers may be registered for the same (or
+        equivalent) interfaces.
+        """
+
+    def subscriptions(required, provided, name=_BLANK):
+        """Get a sequence of subscribers
+
+        Subscribers for a *sequence* of required interfaces, and a provided
+        interface are returned.
+        """
+
+    def subscribers(objects, provided, name=_BLANK):
+        """Get a sequence of subscription adapters
+        """
+
+# begin formerly in zope.component
+
+class ComponentLookupError(LookupError):
+    """A component could not be found."""
+
+class Invalid(Exception):
+    """A component doesn't satisfy a promise."""
+
+class IObjectEvent(Interface):
+    """An event related to an object.
+
+    The object that generated this event is not necessarily the object
+    refered to by location.
+    """
+
+    object = Attribute("The subject of the event.")
+
+
+ at implementer(IObjectEvent)
+class ObjectEvent(object):
+
+    def __init__(self, object):
+        self.object = object
+
+class IComponentLookup(Interface):
+    """Component Manager for a Site
+
+    This object manages the components registered at a particular site. The
+    definition of a site is intentionally vague.
+    """
+
+    adapters = Attribute(
+        "Adapter Registry to manage all registered adapters.")
+
+    utilities = Attribute(
+        "Adapter Registry to manage all registered utilities.")
+
+    def queryAdapter(object, interface, name=_BLANK, default=None):
+        """Look for a named adapter to an interface for an object
+
+        If a matching adapter cannot be found, returns the default.
+        """
+
+    def getAdapter(object, interface, name=_BLANK):
+        """Look for a named adapter to an interface for an object
+
+        If a matching adapter cannot be found, a ComponentLookupError
+        is raised.
+        """
+
+    def queryMultiAdapter(objects, interface, name=_BLANK, default=None):
+        """Look for a multi-adapter to an interface for multiple objects
+
+        If a matching adapter cannot be found, returns the default.
+        """
+
+    def getMultiAdapter(objects, interface, name=_BLANK):
+        """Look for a multi-adapter to an interface for multiple objects
+
+        If a matching adapter cannot be found, a ComponentLookupError
+        is raised.
+        """
+
+    def getAdapters(objects, provided):
+        """Look for all matching adapters to a provided interface for objects
+
+        Return an iterable of name-adapter pairs for adapters that
+        provide the given interface.
+        """
+
+    def subscribers(objects, provided):
+        """Get subscribers
+
+        Subscribers are returned that provide the provided interface
+        and that depend on and are comuted from the sequence of
+        required objects.
+        """
+
+    def handle(*objects):
+        """Call handlers for the given objects
+
+        Handlers registered for the given objects are called.
+        """
+
+    def queryUtility(interface, name='', default=None):
+        """Look up a utility that provides an interface.
+
+        If one is not found, returns default.
+        """
+
+    def getUtilitiesFor(interface):
+        """Look up the registered utilities that provide an interface.
+
+        Returns an iterable of name-utility pairs.
+        """
+
+    def getAllUtilitiesRegisteredFor(interface):
+        """Return all registered utilities for an interface
+
+        This includes overridden utilities.
+
+        An iterable of utility instances is returned.  No names are
+        returned.
+        """
+
+class IRegistration(Interface):
+    """A registration-information object
+    """
+
+    registry = Attribute("The registry having the registration")
+
+    name = Attribute("The registration name")
+
+    info = Attribute("""Information about the registration
+
+    This is information deemed useful to people browsing the
+    configuration of a system. It could, for example, include
+    commentary or information about the source of the configuration.
+    """)
+
+class IUtilityRegistration(IRegistration):
+    """Information about the registration of a utility
+    """
+
+    factory = Attribute("The factory used to create the utility. Optional.")
+    component = Attribute("The object registered")
+    provided = Attribute("The interface provided by the component")
+
+class _IBaseAdapterRegistration(IRegistration):
+    """Information about the registration of an adapter
+    """
+
+    factory = Attribute("The factory used to create adapters")
+
+    required = Attribute("""The adapted interfaces
+
+    This is a sequence of interfaces adapters by the registered
+    factory.  The factory will be caled with a sequence of objects, as
+    positional arguments, that provide these interfaces.
+    """)
+
+    provided = Attribute("""The interface provided by the adapters.
+
+    This interface is implemented by the factory
+    """)
+
+class IAdapterRegistration(_IBaseAdapterRegistration):
+    """Information about the registration of an adapter
+    """
+
+class ISubscriptionAdapterRegistration(_IBaseAdapterRegistration):
+    """Information about the registration of a subscription adapter
+    """
+
+class IHandlerRegistration(IRegistration):
+
+    handler = Attribute("An object called used to handle an event")
+
+    required = Attribute("""The handled interfaces
+
+    This is a sequence of interfaces handled by the registered
+    handler.  The handler will be caled with a sequence of objects, as
+    positional arguments, that provide these interfaces.
+    """)
+
+class IRegistrationEvent(IObjectEvent):
+    """An event that involves a registration"""
+
+
+ at implementer(IRegistrationEvent)
+class RegistrationEvent(ObjectEvent):
+    """There has been a change in a registration
+    """
+    def __repr__(self):
+        return "%s event:\n%r" % (self.__class__.__name__, self.object)
+
+class IRegistered(IRegistrationEvent):
+    """A component or factory was registered
+    """
+
+ at implementer(IRegistered)
+class Registered(RegistrationEvent):
+    pass
+
+class IUnregistered(IRegistrationEvent):
+    """A component or factory was unregistered
+    """
+
+ at implementer(IUnregistered)
+class Unregistered(RegistrationEvent):
+    """A component or factory was unregistered
+    """
+    pass
+
+class IComponentRegistry(Interface):
+    """Register components
+    """
+
+    def registerUtility(component=None, provided=None, name=_BLANK,
+                        info=_BLANK, factory=None):
+        """Register a utility
+
+        factory
+           Factory for the component to be registerd.
+
+        component
+           The registered component
+
+        provided
+           This is the interface provided by the utility.  If the
+           component provides a single interface, then this
+           argument is optional and the component-implemented
+           interface will be used.
+
+        name
+           The utility name.
+
+        info
+           An object that can be converted to a string to provide
+           information about the registration.
+
+        Only one of component and factory can be used.
+        A Registered event is generated with an IUtilityRegistration.
+        """
+
+    def unregisterUtility(component=None, provided=None, name=_BLANK,
+                          factory=None):
+        """Unregister a utility
+
+        A boolean is returned indicating whether the registry was
+        changed.  If the given component is None and there is no
+        component registered, or if the given component is not
+        None and is not registered, then the function returns
+        False, otherwise it returns True.
+
+        factory
+           Factory for the component to be unregisterd.
+
+        component
+           The registered component The given component can be
+           None, in which case any component registered to provide
+           the given provided interface with the given name is
+           unregistered.
+
+        provided
+           This is the interface provided by the utility.  If the
+           component is not None and provides a single interface,
+           then this argument is optional and the
+           component-implemented interface will be used.
+
+        name
+           The utility name.
+
+        Only one of component and factory can be used.
+        An UnRegistered event is generated with an IUtilityRegistration.
+        """
+
+    def registeredUtilities():
+        """Return an iterable of IUtilityRegistration instances.
+
+        These registrations describe the current utility registrations
+        in the object.
+        """
+
+    def registerAdapter(factory, required=None, provided=None, name=_BLANK,
+                       info=_BLANK):
+        """Register an adapter factory
+
+        Parameters:
+
+        factory
+            The object used to compute the adapter
+
+        required
+            This is a sequence of specifications for objects to be
+            adapted.  If omitted, then the value of the factory's
+            __component_adapts__ attribute will be used.  The
+            __component_adapts__ attribute is usually attribute is
+            normally set in class definitions using adapts
+            function, or for callables using the adapter
+            decorator.  If the factory doesn't have a
+            __component_adapts__ adapts attribute, then this
+            argument is required.
+
+        provided
+            This is the interface provided by the adapter and
+            implemented by the factory.  If the factory
+            implements a single interface, then this argument is
+            optional and the factory-implemented interface will be
+            used.
+
+        name
+            The adapter name.
+
+        info
+           An object that can be converted to a string to provide
+           information about the registration.
+
+        A Registered event is generated with an IAdapterRegistration.
+        """
+
+    def unregisterAdapter(factory=None, required=None,
+                          provided=None, name=_BLANK):
+        """Register an adapter factory
+
+        A boolean is returned indicating whether the registry was
+        changed.  If the given component is None and there is no
+        component registered, or if the given component is not
+        None and is not registered, then the function returns
+        False, otherwise it returns True.
+
+        Parameters:
+
+        factory
+            This is the object used to compute the adapter. The
+            factory can be None, in which case any factory
+            registered to implement the given provided interface
+            for the given required specifications with the given
+            name is unregistered.
+
+        required
+            This is a sequence of specifications for objects to be
+            adapted.  If the factory is not None and the required
+            arguments is omitted, then the value of the factory's
+            __component_adapts__ attribute will be used.  The
+            __component_adapts__ attribute attribute is normally
+            set in class definitions using adapts function, or for
+            callables using the adapter decorator.  If the factory
+            is None or doesn't have a __component_adapts__ adapts
+            attribute, then this argument is required.
+
+        provided
+            This is the interface provided by the adapter and
+            implemented by the factory.  If the factory is not
+            None and implements a single interface, then this
+            argument is optional and the factory-implemented
+            interface will be used.
+
+        name
+            The adapter name.
+
+        An Unregistered event is generated with an IAdapterRegistration.
+        """
+
+    def registeredAdapters():
+        """Return an iterable of IAdapterRegistration instances.
+
+        These registrations describe the current adapter registrations
+        in the object.
+        """
+
+    def registerSubscriptionAdapter(factory, required=None, provides=None,
+                                    name=_BLANK, info=''):
+        """Register a subscriber factory
+
+        Parameters:
+
+        factory
+            The object used to compute the adapter
+
+        required
+            This is a sequence of specifications for objects to be
+            adapted.  If omitted, then the value of the factory's
+            __component_adapts__ attribute will be used.  The
+            __component_adapts__ attribute is usually attribute is
+            normally set in class definitions using adapts
+            function, or for callables using the adapter
+            decorator.  If the factory doesn't have a
+            __component_adapts__ adapts attribute, then this
+            argument is required.
+
+        provided
+            This is the interface provided by the adapter and
+            implemented by the factory.  If the factory implements
+            a single interface, then this argument is optional and
+            the factory-implemented interface will be used.
+
+        name
+            The adapter name.
+
+            Currently, only the empty string is accepted.  Other
+            strings will be accepted in the future when support for
+            named subscribers is added.
+
+        info
+           An object that can be converted to a string to provide
+           information about the registration.
+
+        A Registered event is generated with an
+        ISubscriptionAdapterRegistration.
+        """
+
+    def unregisterSubscriptionAdapter(factory=None, required=None,
+                                      provides=None, name=_BLANK):
+        """Unregister a subscriber factory.
+
+        A boolean is returned indicating whether the registry was
+        changed.  If the given component is None and there is no
+        component registered, or if the given component is not
+        None and is not registered, then the function returns
+        False, otherwise it returns True.
+
+        Parameters:
+
+        factory
+            This is the object used to compute the adapter. The
+            factory can be None, in which case any factories
+            registered to implement the given provided interface
+            for the given required specifications with the given
+            name are unregistered.
+
+        required
+            This is a sequence of specifications for objects to be
+            adapted.  If the factory is not None and the required
+            arguments is omitted, then the value of the factory's
+            __component_adapts__ attribute will be used.  The
+            __component_adapts__ attribute attribute is normally
+            set in class definitions using adapts function, or for
+            callables using the adapter decorator.  If the factory
+            is None or doesn't have a __component_adapts__ adapts
+            attribute, then this argument is required.
+
+        provided
+            This is the interface provided by the adapter and
+            implemented by the factory.  If the factory is not
+            None implements a single interface, then this argument
+            is optional and the factory-implemented interface will
+            be used.
+
+        name
+            The adapter name.
+
+            Currently, only the empty string is accepted.  Other
+            strings will be accepted in the future when support for
+            named subscribers is added.
+
+        An Unregistered event is generated with an
+        ISubscriptionAdapterRegistration.
+        """
+
+    def registeredSubscriptionAdapters():
+        """Return an iterable of ISubscriptionAdapterRegistration instances.
+
+        These registrations describe the current subscription adapter
+        registrations in the object.
+        """
+
+    def registerHandler(handler, required=None, name=_BLANK, info=''):
+        """Register a handler.
+
+        A handler is a subscriber that doesn't compute an adapter
+        but performs some function when called.
+
+        Parameters:
+
+        handler
+            The object used to handle some event represented by
+            the objects passed to it.
+
+        required
+            This is a sequence of specifications for objects to be
+            adapted.  If omitted, then the value of the factory's
+            __component_adapts__ attribute will be used.  The
+            __component_adapts__ attribute is usually attribute is
+            normally set in class definitions using adapts
+            function, or for callables using the adapter
+            decorator.  If the factory doesn't have a
+            __component_adapts__ adapts attribute, then this
+            argument is required.
+
+        name
+            The handler name.
+
+            Currently, only the empty string is accepted.  Other
+            strings will be accepted in the future when support for
+            named handlers is added.
+
+        info
+           An object that can be converted to a string to provide
+           information about the registration.
+
+
+        A Registered event is generated with an IHandlerRegistration.
+        """
+
+    def unregisterHandler(handler=None, required=None, name=_BLANK):
+        """Unregister a handler.
+
+        A handler is a subscriber that doesn't compute an adapter
+        but performs some function when called.
+
+        A boolean is returned indicating whether the registry was
+        changed.
+
+        Parameters:
+
+        handler
+            This is the object used to handle some event
+            represented by the objects passed to it. The handler
+            can be None, in which case any handlers registered for
+            the given required specifications with the given are
+            unregistered.
+
+        required
+            This is a sequence of specifications for objects to be
+            adapted.  If omitted, then the value of the factory's
+            __component_adapts__ attribute will be used.  The
+            __component_adapts__ attribute is usually attribute is
+            normally set in class definitions using adapts
+            function, or for callables using the adapter
+            decorator.  If the factory doesn't have a
+            __component_adapts__ adapts attribute, then this
+            argument is required.
+
+        name
+            The handler name.
+
+            Currently, only the empty string is accepted.  Other
+            strings will be accepted in the future when support for
+            named handlers is added.
+
+        An Unregistered event is generated with an IHandlerRegistration.
+        """
+
+    def registeredHandlers():
+        """Return an iterable of IHandlerRegistration instances.
+
+        These registrations describe the current handler registrations
+        in the object.
+        """
+
+
+class IComponents(IComponentLookup, IComponentRegistry):
+    """Component registration and access
+    """
+
+
+# end formerly in zope.component
diff --git a/ThirdParty/ZopeInterface/zope/interface/registry.py b/ThirdParty/ZopeInterface/zope/interface/registry.py
new file mode 100644
index 0000000..1882e0d
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/registry.py
@@ -0,0 +1,530 @@
+##############################################################################
+#
+# Copyright (c) 2006 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Basic components support
+"""
+try:
+    from zope.event import notify
+except ImportError: #pragma NO COVER
+    def notify(*arg, **kw): pass
+
+from zope.interface.interfaces import ISpecification
+from zope.interface.interfaces import ComponentLookupError
+from zope.interface.interfaces import IAdapterRegistration
+from zope.interface.interfaces import IComponents
+from zope.interface.interfaces import IHandlerRegistration
+from zope.interface.interfaces import ISubscriptionAdapterRegistration
+from zope.interface.interfaces import IUtilityRegistration
+from zope.interface.interfaces import Registered
+from zope.interface.interfaces import Unregistered
+
+from zope.interface.interface import Interface
+from zope.interface.declarations import implementedBy
+from zope.interface.declarations import implementer
+from zope.interface.declarations import implementer_only
+from zope.interface.declarations import providedBy
+from zope.interface.adapter import AdapterRegistry
+from zope.interface._compat import _u
+from zope.interface._compat import CLASS_TYPES
+from zope.interface._compat import STRING_TYPES
+
+
+ at implementer(IComponents)
+class Components(object):
+
+    def __init__(self, name='', bases=()):
+        assert isinstance(name, STRING_TYPES)
+        self.__name__ = name
+        self._init_registries()
+        self._init_registrations()
+        self.__bases__ = tuple(bases)
+
+    def __repr__(self):
+        return "<%s %s>" % (self.__class__.__name__, self.__name__)
+
+    def _init_registries(self):
+        self.adapters = AdapterRegistry()
+        self.utilities = AdapterRegistry()
+
+    def _init_registrations(self):
+        self._utility_registrations = {}
+        self._adapter_registrations = {}
+        self._subscription_registrations = []
+        self._handler_registrations = []
+
+    def _getBases(self):
+        # Subclasses might override
+        return self.__dict__.get('__bases__', ())
+
+    def _setBases(self, bases):
+        # Subclasses might override
+        self.adapters.__bases__ = tuple([
+            base.adapters for base in bases])
+        self.utilities.__bases__ = tuple([
+            base.utilities for base in bases])
+        self.__dict__['__bases__'] = tuple(bases)
+
+    __bases__ = property(
+        lambda self: self._getBases(),
+        lambda self, bases: self._setBases(bases),
+        )
+
+    def registerUtility(self, component=None, provided=None, name=_u(''), 
+                        info=_u(''), event=True, factory=None):
+        if factory:
+            if component:
+                raise TypeError("Can't specify factory and component.")
+            component = factory()
+
+        if provided is None:
+            provided = _getUtilityProvided(component)
+
+        reg = self._utility_registrations.get((provided, name))
+        if reg is not None:
+            if reg[:2] == (component, info):
+                # already registered
+                return
+            self.unregisterUtility(reg[0], provided, name)
+
+        subscribed = False
+        for ((p, _), data) in iter(self._utility_registrations.items()):
+            if p == provided and data[0] == component:
+                subscribed = True
+                break
+
+        self._utility_registrations[(provided, name)] = component, info, factory
+        self.utilities.register((), provided, name, component)
+
+        if not subscribed:
+            self.utilities.subscribe((), provided, component)
+
+        if event:
+            notify(Registered(
+                UtilityRegistration(self, provided, name, component, info,
+                                    factory)
+                ))
+
+    def unregisterUtility(self, component=None, provided=None, name=_u(''),
+                          factory=None):
+        if factory:
+            if component:
+                raise TypeError("Can't specify factory and component.")
+            component = factory()
+
+        if provided is None:
+            if component is None:
+                raise TypeError("Must specify one of component, factory and "
+                                "provided")
+            provided = _getUtilityProvided(component)
+
+        old = self._utility_registrations.get((provided, name))
+        if (old is None) or ((component is not None) and
+                             (component != old[0])):
+            return False
+
+        if component is None:
+            component = old[0]
+
+        # Note that component is now the old thing registered
+
+        del self._utility_registrations[(provided, name)]
+        self.utilities.unregister((), provided, name)
+
+        subscribed = False
+        for ((p, _), data) in iter(self._utility_registrations.items()):
+            if p == provided and data[0] == component:
+                subscribed = True
+                break
+
+        if not subscribed:
+            self.utilities.unsubscribe((), provided, component)
+
+        notify(Unregistered(
+            UtilityRegistration(self, provided, name, component, *old[1:])
+            ))
+
+        return True
+
+    def registeredUtilities(self):
+        for ((provided, name), data
+             ) in iter(self._utility_registrations.items()):
+            yield UtilityRegistration(self, provided, name, *data)
+
+    def queryUtility(self, provided, name=_u(''), default=None):
+        return self.utilities.lookup((), provided, name, default)
+
+    def getUtility(self, provided, name=_u('')):
+        utility = self.utilities.lookup((), provided, name)
+        if utility is None:
+            raise ComponentLookupError(provided, name)
+        return utility
+
+    def getUtilitiesFor(self, interface):
+        for name, utility in self.utilities.lookupAll((), interface):
+            yield name, utility
+
+    def getAllUtilitiesRegisteredFor(self, interface):
+        return self.utilities.subscriptions((), interface)
+
+    def registerAdapter(self, factory, required=None, provided=None, 
+                        name=_u(''), info=_u(''), event=True):
+        if provided is None:
+            provided = _getAdapterProvided(factory)
+        required = _getAdapterRequired(factory, required)
+        self._adapter_registrations[(required, provided, name)
+                                    ] = factory, info
+        self.adapters.register(required, provided, name, factory)
+
+        if event:
+            notify(Registered(
+                AdapterRegistration(self, required, provided, name,
+                                    factory, info)
+                ))
+
+
+    def unregisterAdapter(self, factory=None,
+                          required=None, provided=None, name=_u(''),
+                          ):
+        if provided is None:
+            if factory is None:
+                raise TypeError("Must specify one of factory and provided")
+            provided = _getAdapterProvided(factory)
+
+        if (required is None) and (factory is None):
+            raise TypeError("Must specify one of factory and required")
+
+        required = _getAdapterRequired(factory, required)
+        old = self._adapter_registrations.get((required, provided, name))
+        if (old is None) or ((factory is not None) and
+                             (factory != old[0])):
+            return False
+
+        del self._adapter_registrations[(required, provided, name)]
+        self.adapters.unregister(required, provided, name)
+
+        notify(Unregistered(
+            AdapterRegistration(self, required, provided, name,
+                                *old)
+            ))
+
+        return True
+
+    def registeredAdapters(self):
+        for ((required, provided, name), (component, info)
+             ) in iter(self._adapter_registrations.items()):
+            yield AdapterRegistration(self, required, provided, name,
+                                      component, info)
+
+    def queryAdapter(self, object, interface, name=_u(''), default=None):
+        return self.adapters.queryAdapter(object, interface, name, default)
+
+    def getAdapter(self, object, interface, name=_u('')):
+        adapter = self.adapters.queryAdapter(object, interface, name)
+        if adapter is None:
+            raise ComponentLookupError(object, interface, name)
+        return adapter
+
+    def queryMultiAdapter(self, objects, interface, name=_u(''), 
+                          default=None):
+        return self.adapters.queryMultiAdapter(
+            objects, interface, name, default)
+
+    def getMultiAdapter(self, objects, interface, name=_u('')):
+        adapter = self.adapters.queryMultiAdapter(objects, interface, name)
+        if adapter is None:
+            raise ComponentLookupError(objects, interface, name)
+        return adapter
+
+    def getAdapters(self, objects, provided):
+        for name, factory in self.adapters.lookupAll(
+            list(map(providedBy, objects)),
+            provided):
+            adapter = factory(*objects)
+            if adapter is not None:
+                yield name, adapter
+
+    def registerSubscriptionAdapter(self,
+                                    factory, required=None, provided=None,
+                                    name=_u(''), info=_u(''),
+                                    event=True):
+        if name:
+            raise TypeError("Named subscribers are not yet supported")
+        if provided is None:
+            provided = _getAdapterProvided(factory)
+        required = _getAdapterRequired(factory, required)
+        self._subscription_registrations.append(
+            (required, provided, name, factory, info)
+            )
+        self.adapters.subscribe(required, provided, factory)
+
+        if event:
+            notify(Registered(
+                SubscriptionRegistration(self, required, provided, name,
+                                         factory, info)
+                ))
+
+    def registeredSubscriptionAdapters(self):
+        for data in self._subscription_registrations:
+            yield SubscriptionRegistration(self, *data)
+
+    def unregisterSubscriptionAdapter(self, factory=None,
+                          required=None, provided=None, name=_u(''),
+                          ):
+        if name:
+            raise TypeError("Named subscribers are not yet supported")
+        if provided is None:
+            if factory is None:
+                raise TypeError("Must specify one of factory and provided")
+            provided = _getAdapterProvided(factory)
+
+        if (required is None) and (factory is None):
+            raise TypeError("Must specify one of factory and required")
+
+        required = _getAdapterRequired(factory, required)
+
+        if factory is None:
+            new = [(r, p, n, f, i)
+                   for (r, p, n, f, i)
+                   in self._subscription_registrations
+                   if not (r == required and p == provided)
+                   ]
+        else:
+            new = [(r, p, n, f, i)
+                   for (r, p, n, f, i)
+                   in self._subscription_registrations
+                   if not (r == required and p == provided and f == factory)
+                   ]
+
+        if len(new) == len(self._subscription_registrations):
+            return False
+
+
+        self._subscription_registrations[:] = new
+        self.adapters.unsubscribe(required, provided, factory)
+
+        notify(Unregistered(
+            SubscriptionRegistration(self, required, provided, name,
+                                     factory, '')
+            ))
+
+        return True
+
+    def subscribers(self, objects, provided):
+        return self.adapters.subscribers(objects, provided)
+
+    def registerHandler(self,
+                        factory, required=None,
+                        name=_u(''), info=_u(''),
+                        event=True):
+        if name:
+            raise TypeError("Named handlers are not yet supported")
+        required = _getAdapterRequired(factory, required)
+        self._handler_registrations.append(
+            (required, name, factory, info)
+            )
+        self.adapters.subscribe(required, None, factory)
+
+        if event:
+            notify(Registered(
+                HandlerRegistration(self, required, name, factory, info)
+                ))
+
+    def registeredHandlers(self):
+        for data in self._handler_registrations:
+            yield HandlerRegistration(self, *data)
+
+    def unregisterHandler(self, factory=None, required=None, name=_u('')):
+        if name:
+            raise TypeError("Named subscribers are not yet supported")
+
+        if (required is None) and (factory is None):
+            raise TypeError("Must specify one of factory and required")
+
+        required = _getAdapterRequired(factory, required)
+
+        if factory is None:
+            new = [(r, n, f, i)
+                   for (r, n, f, i)
+                   in self._handler_registrations
+                   if r != required
+                   ]
+        else:
+            new = [(r, n, f, i)
+                   for (r, n, f, i)
+                   in self._handler_registrations
+                   if not (r == required and f == factory)
+                   ]
+
+        if len(new) == len(self._handler_registrations):
+            return False
+
+        self._handler_registrations[:] = new
+        self.adapters.unsubscribe(required, None, factory)
+
+        notify(Unregistered(
+            HandlerRegistration(self, required, name, factory, '')
+            ))
+
+        return True
+
+    def handle(self, *objects):
+        self.adapters.subscribers(objects, None)
+
+
+def _getUtilityProvided(component):
+    provided = list(providedBy(component))
+    if len(provided) == 1:
+        return provided[0]
+    raise TypeError(
+        "The utility doesn't provide a single interface "
+        "and no provided interface was specified.")
+
+def _getAdapterProvided(factory):
+    provided = list(implementedBy(factory))
+    if len(provided) == 1:
+        return provided[0]
+    raise TypeError(
+        "The adapter factory doesn't implement a single interface "
+        "and no provided interface was specified.")
+
+def _getAdapterRequired(factory, required):
+    if required is None:
+        try:
+            required = factory.__component_adapts__
+        except AttributeError:
+            raise TypeError(
+                "The adapter factory doesn't have a __component_adapts__ "
+                "attribute and no required specifications were specified"
+                )
+    elif ISpecification.providedBy(required):
+        raise TypeError("the required argument should be a list of "
+                        "interfaces, not a single interface")
+
+    result = []
+    for r in required:
+        if r is None:
+            r = Interface
+        elif not ISpecification.providedBy(r):
+            if isinstance(r, CLASS_TYPES):
+                r = implementedBy(r)
+            else:
+                raise TypeError("Required specification must be a "
+                                "specification or class."
+                                )
+        result.append(r)
+    return tuple(result)
+
+
+ at implementer(IUtilityRegistration)
+class UtilityRegistration(object):
+
+    def __init__(self, registry, provided, name, component, doc, factory=None):
+        (self.registry, self.provided, self.name, self.component, self.info,
+         self.factory
+         ) = registry, provided, name, component, doc, factory
+
+    def __repr__(self):
+        return '%s(%r, %s, %r, %s, %r, %r)' % (
+                self.__class__.__name__,
+                self.registry,
+                getattr(self.provided, '__name__', None), self.name,
+                getattr(self.component, '__name__', repr(self.component)),
+                self.factory, self.info,
+                )
+
+    def __hash__(self):
+        return id(self)
+
+    def __eq__(self, other):
+        return repr(self) == repr(other)
+
+    def __ne__(self, other):
+        return repr(self) != repr(other)
+
+    def __lt__(self, other):
+        return repr(self) < repr(other)
+
+    def __le__(self, other):
+        return repr(self) <= repr(other)
+
+    def __gt__(self, other):
+        return repr(self) > repr(other)
+
+    def __ge__(self, other):
+        return repr(self) >= repr(other)
+
+ at implementer(IAdapterRegistration)
+class AdapterRegistration(object):
+
+    def __init__(self, registry, required, provided, name, component, doc):
+        (self.registry, self.required, self.provided, self.name,
+         self.factory, self.info
+         ) = registry, required, provided, name, component, doc
+
+    def __repr__(self):
+        return '%s(%r, %s, %s, %r, %s, %r)' % (
+            self.__class__.__name__,
+            self.registry,
+            '[' + ", ".join([r.__name__ for r in self.required]) + ']',
+            getattr(self.provided, '__name__', None), self.name,
+            getattr(self.factory, '__name__', repr(self.factory)), self.info,
+            )
+
+    def __hash__(self):
+        return id(self)
+
+    def __eq__(self, other):
+        return repr(self) == repr(other)
+
+    def __ne__(self, other):
+        return repr(self) != repr(other)
+
+    def __lt__(self, other):
+        return repr(self) < repr(other)
+
+    def __le__(self, other):
+        return repr(self) <= repr(other)
+
+    def __gt__(self, other):
+        return repr(self) > repr(other)
+
+    def __ge__(self, other):
+        return repr(self) >= repr(other)
+
+ at implementer_only(ISubscriptionAdapterRegistration)
+class SubscriptionRegistration(AdapterRegistration):
+    pass
+
+
+ at implementer_only(IHandlerRegistration)
+class HandlerRegistration(AdapterRegistration):
+
+    def __init__(self, registry, required, name, handler, doc):
+        (self.registry, self.required, self.name, self.handler, self.info
+         ) = registry, required, name, handler, doc
+
+    @property
+    def factory(self):
+        return self.handler
+
+    provided = None
+
+    def __repr__(self):
+        return '%s(%r, %s, %r, %s, %r)' % (
+            self.__class__.__name__,
+            self.registry,
+            '[' + ", ".join([r.__name__ for r in self.required]) + ']',
+            self.name,
+            getattr(self.factory, '__name__', repr(self.factory)), self.info,
+            )
+
diff --git a/ThirdParty/ZopeInterface/zope/interface/ro.py b/ThirdParty/ZopeInterface/zope/interface/ro.py
new file mode 100644
index 0000000..ab47a4c
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/ro.py
@@ -0,0 +1,69 @@
+##############################################################################
+#
+# Copyright (c) 2003 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Compute a resolution order for an object and its bases
+"""
+__docformat__ = 'restructuredtext'
+
+
+def ro(object):
+    """Compute a "resolution order" for an object
+    """
+    return mergeOrderings([_flatten(object)])
+
+def mergeOrderings(orderings, seen=None):
+    """Merge multiple orderings so that within-ordering order is preserved
+
+    Orderings are constrained in such a way that if an object appears
+    in two or more orderings, then the suffix that begins with the
+    object must be in both orderings.
+
+    For example:
+
+    >>> mergeOrderings([
+    ... ['x', 'y', 'z'],
+    ... ['q', 'z'],
+    ... [1, 3, 5],
+    ... ['z']
+    ... ])
+    ['x', 'y', 'q', 1, 3, 5, 'z']
+
+    """
+
+    if seen is None:
+        seen = {}
+    result = []
+    orderings.reverse()
+    for ordering in orderings:
+        ordering = list(ordering)
+        ordering.reverse()
+        for o in ordering:
+            if o not in seen:
+                seen[o] = 1
+                result.append(o)
+
+    result.reverse()
+    return result
+
+def _flatten(ob):
+    result = [ob]
+    i = 0
+    for ob in iter(result):
+        i += 1
+        # The recursive calls can be avoided by inserting the base classes
+        # into the dynamically growing list directly after the currently
+        # considered object;  the iterator makes sure this will keep working
+        # in the future, since it cannot rely on the length of the list
+        # by definition.
+        result[i:i] = ob.__bases__
+    return result
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/__init__.py b/ThirdParty/ZopeInterface/zope/interface/tests/__init__.py
new file mode 100644
index 0000000..1cf24e6
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/__init__.py
@@ -0,0 +1,13 @@
+import os
+import unittest
+
+def additional_tests():
+    suites = unittest.TestSuite()
+    for file in os.listdir(os.path.dirname(__file__)):
+        if file.endswith('.py') and file!='__init__.py':
+            name = os.path.splitext(file)[0]
+            module = __import__('.'.join((__name__, name)), globals(), 
+                                locals(), [name])
+            if hasattr(module, 'test_suite'):
+                suites.addTests(module.test_suite())
+    return suites
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/advisory_testing.py b/ThirdParty/ZopeInterface/zope/interface/tests/advisory_testing.py
new file mode 100644
index 0000000..f78a550
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/advisory_testing.py
@@ -0,0 +1,42 @@
+##############################################################################
+#
+# Copyright (c) 2003 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+import sys
+
+from zope.interface.advice import addClassAdvisor
+from zope.interface.advice import getFrameInfo
+
+my_globals = globals()
+
+def ping(log, value):
+
+    def pong(klass):
+        log.append((value,klass))
+        return [klass]
+
+    addClassAdvisor(pong)
+
+try:
+    from types import ClassType
+    
+    class ClassicClass:
+        __metaclass__ = ClassType
+        classLevelFrameInfo = getFrameInfo(sys._getframe())
+except ImportError:
+    ClassicClass = None
+
+class NewStyleClass:
+    __metaclass__ = type
+    classLevelFrameInfo = getFrameInfo(sys._getframe())
+
+moduleLevelFrameInfo = getFrameInfo(sys._getframe())
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/dummy.py b/ThirdParty/ZopeInterface/zope/interface/tests/dummy.py
new file mode 100644
index 0000000..9057d73
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/dummy.py
@@ -0,0 +1,23 @@
+##############################################################################
+#
+# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+""" Dummy Module
+"""
+from zope.interface import moduleProvides
+from zope.interface.tests.idummy import IDummyModule
+
+moduleProvides(IDummyModule)
+
+def bar(baz):
+    # Note:  no 'self', because the module provides the interface directly.
+    pass
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/idummy.py b/ThirdParty/ZopeInterface/zope/interface/tests/idummy.py
new file mode 100644
index 0000000..1e34fe0
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/idummy.py
@@ -0,0 +1,23 @@
+##############################################################################
+#
+# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+""" Interface describing API of zope.interface.tests.dummy test module
+"""
+from zope.interface import Interface
+
+class IDummyModule(Interface):
+    """ Dummy interface for unit tests.
+    """
+    def bar(baz):
+        """ Just a note.
+        """
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/ifoo.py b/ThirdParty/ZopeInterface/zope/interface/tests/ifoo.py
new file mode 100644
index 0000000..29a7877
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/ifoo.py
@@ -0,0 +1,26 @@
+##############################################################################
+#
+# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""IFoo test module
+"""
+from zope.interface import Interface
+
+class IFoo(Interface):
+    """
+        Dummy interface for unit tests.
+    """
+
+    def bar(baz):
+        """
+            Just a note.
+        """
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/ifoo_other.py b/ThirdParty/ZopeInterface/zope/interface/tests/ifoo_other.py
new file mode 100644
index 0000000..29a7877
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/ifoo_other.py
@@ -0,0 +1,26 @@
+##############################################################################
+#
+# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""IFoo test module
+"""
+from zope.interface import Interface
+
+class IFoo(Interface):
+    """
+        Dummy interface for unit tests.
+    """
+
+    def bar(baz):
+        """
+            Just a note.
+        """
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/m1.py b/ThirdParty/ZopeInterface/zope/interface/tests/m1.py
new file mode 100644
index 0000000..d311fb4
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/m1.py
@@ -0,0 +1,21 @@
+##############################################################################
+#
+# Copyright (c) 2004 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Test module that declares an interface
+"""
+from zope.interface import Interface, moduleProvides
+
+class I1(Interface): pass
+class I2(Interface): pass
+
+moduleProvides(I1, I2)
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/m2.py b/ThirdParty/ZopeInterface/zope/interface/tests/m2.py
new file mode 100644
index 0000000..511cd9c
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/m2.py
@@ -0,0 +1,15 @@
+##############################################################################
+#
+# Copyright (c) 2004 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Test module that doesn't declare an interface
+"""
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/odd.py b/ThirdParty/ZopeInterface/zope/interface/tests/odd.py
new file mode 100644
index 0000000..04ffa31
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/odd.py
@@ -0,0 +1,129 @@
+##############################################################################
+#
+# Copyright (c) 2003 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Odd meta class that doesn't subclass type.
+
+This is used for testing support for ExtensionClass in new interfaces.
+
+  >>> class A(object):
+  ...     __metaclass__ = MetaClass
+  ...     a = 1
+  ...
+  >>> A.__name__
+  'A'
+  >>> A.__bases__ == (object,)
+  True
+  >>> class B(object):
+  ...     __metaclass__ = MetaClass
+  ...     b = 1
+  ...
+  >>> class C(A, B): pass
+  ...
+  >>> C.__name__
+  'C'
+  >>> int(C.__bases__ == (A, B))
+  1
+  >>> a = A()
+  >>> aa = A()
+  >>> a.a
+  1
+  >>> aa.a
+  1
+  >>> aa.a = 2
+  >>> a.a
+  1
+  >>> aa.a
+  2
+  >>> c = C()
+  >>> c.a
+  1
+  >>> c.b
+  1
+  >>> c.b = 2
+  >>> c.b
+  2
+  >>> C.c = 1
+  >>> c.c
+  1
+  >>> import sys
+  >>> if sys.version[0] == '2': # This test only makes sense under Python 2.x
+  ...     from types import ClassType
+  ...     assert not isinstance(C, (type, ClassType))
+  
+  >>> int(C.__class__.__class__ is C.__class__)
+  1
+"""
+
+# class OddClass is an odd meta class
+
+class MetaMetaClass(type):
+
+    def __getattribute__(self, name):
+        if name == '__class__':
+            return self
+        return type.__getattribute__(self, name)
+    
+
+class MetaClass(object):
+    """Odd classes
+    """
+    __metaclass__ = MetaMetaClass
+
+    def __init__(self, name, bases, dict):
+        self.__name__ = name
+        self.__bases__ = bases
+        self.__dict__.update(dict)
+
+    def __call__(self):
+        return OddInstance(self)
+
+    def __getattr__(self, name):
+        for b in self.__bases__:
+            v = getattr(b, name, self)
+            if v is not self:
+                return v
+        raise AttributeError(name)
+
+    def __repr__(self):
+        return "<odd class %s at %s>" % (self.__name__, hex(id(self)))
+
+class OddInstance(object):
+
+    def __init__(self, cls):
+        self.__dict__['__class__'] = cls
+
+    def __getattribute__(self, name):
+        dict = object.__getattribute__(self, '__dict__')
+        if name == '__dict__':
+            return dict
+        v = dict.get(name, self)
+        if v is not self:
+            return v
+        return getattr(dict['__class__'], name)
+
+    def __setattr__(self, name, v):
+        self.__dict__[name] = v
+
+    def __delattr__(self, name):
+        del self.__dict__[name]
+
+    def __repr__(self):
+        return "<odd %s instance at %s>" % (
+            self.__class__.__name__, hex(id(self)))
+        
+
+
+# DocTest:
+if __name__ == "__main__":
+    import doctest, __main__
+    doctest.testmod(__main__, isprivate=lambda *a: False)
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/test_adapter.py b/ThirdParty/ZopeInterface/zope/interface/tests/test_adapter.py
new file mode 100644
index 0000000..0f82bcc
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/test_adapter.py
@@ -0,0 +1,1285 @@
+##############################################################################
+#
+# Copyright (c) 2003 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Adapter registry tests
+"""
+import unittest
+
+class _SilencePy3Deprecations(unittest.TestCase):
+    # silence deprecation warnings under py3
+
+    def failUnless(self, expr):
+        # St00pid speling.
+        return self.assertTrue(expr)
+
+    def failIf(self, expr):
+        # St00pid speling.
+        return self.assertFalse(expr)
+
+
+def _makeInterfaces():
+    from zope.interface import Interface
+
+    class IB0(Interface): pass
+    class IB1(IB0): pass
+    class IB2(IB0): pass
+    class IB3(IB2, IB1): pass
+    class IB4(IB1, IB2): pass
+
+    class IF0(Interface): pass
+    class IF1(IF0): pass
+
+    class IR0(Interface): pass
+    class IR1(IR0): pass
+
+    return IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1
+
+
+class BaseAdapterRegistryTests(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.adapter import BaseAdapterRegistry
+        class _CUT(BaseAdapterRegistry):
+            class LookupClass(object):
+                _changed = _extendors = ()
+                def __init__(self, reg):
+                    pass
+                def changed(self, orig):
+                    self._changed += (orig,)
+                def add_extendor(self, provided):
+                    self._extendors += (provided,)
+                def remove_extendor(self, provided):
+                    self._extendors = tuple([x for x in self._extendors
+                                                    if x != provided])
+        for name in BaseAdapterRegistry._delegated:
+            setattr(_CUT.LookupClass, name, object())
+        return _CUT
+
+    def _makeOne(self):
+        return self._getTargetClass()()
+
+    def test_lookup_delegation(self):
+        CUT = self._getTargetClass()
+        registry = CUT()
+        for name in CUT._delegated:
+            self.failUnless(
+                getattr(registry, name) is getattr(registry._v_lookup, name))
+
+    def test__generation_on_first_creation(self):
+        registry = self._makeOne()
+        # Bumped to 1 in BaseAdapterRegistry.__init__
+        self.assertEqual(registry._generation, 1)
+
+    def test__generation_after_calling_changed(self):
+        registry = self._makeOne()
+        orig = object()
+        registry.changed(orig)
+        # Bumped to 1 in BaseAdapterRegistry.__init__
+        self.assertEqual(registry._generation, 2)
+        self.assertEqual(registry._v_lookup._changed, (registry, orig,))
+
+    def test__generation_after_changing___bases__(self):
+        class _Base(object): pass
+        registry = self._makeOne()
+        registry.__bases__ = (_Base,)
+        self.assertEqual(registry._generation, 2)
+
+    def test_register(self):
+        IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces()
+        registry = self._makeOne()
+        registry.register([IB0], IR0, '', 'A1')
+        self.assertEqual(registry.registered([IB0], IR0, ''), 'A1')
+        self.assertEqual(len(registry._adapters), 2) #order 0 and order 1
+        self.assertEqual(registry._generation, 2)
+
+    def test_register_with_value_None_unregisters(self):
+        IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces()
+        registry = self._makeOne()
+        registry.register([None], IR0, '', 'A1')
+        registry.register([None], IR0, '', None)
+        self.assertEqual(len(registry._adapters), 0)
+
+    def test_register_with_same_value(self):
+        IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces()
+        registry = self._makeOne()
+        _value = object()
+        registry.register([None], IR0, '', _value)
+        _before = registry._generation
+        registry.register([None], IR0, '', _value)
+        self.assertEqual(registry._generation, _before) # skipped changed()
+
+    def test_registered_empty(self):
+        registry = self._makeOne()
+        self.assertEqual(registry.registered([None], None, ''), None)
+
+    def test_registered_non_empty_miss(self):
+        IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces()
+        registry = self._makeOne()
+        registry.register([IB1], None, '', 'A1')
+        self.assertEqual(registry.registered([IB2], None, ''), None)
+
+    def test_registered_non_empty_hit(self):
+        registry = self._makeOne()
+        registry.register([None], None, '', 'A1')
+        self.assertEqual(registry.registered([None], None, ''), 'A1')
+
+    def test_unregister_empty(self):
+        registry = self._makeOne()
+        registry.unregister([None], None, '') #doesn't raise
+        self.assertEqual(registry.registered([None], None, ''), None)
+
+    def test_unregister_non_empty_miss_on_required(self):
+        IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces()
+        registry = self._makeOne()
+        registry.register([IB1], None, '', 'A1')
+        registry.unregister([IB2], None, '') #doesn't raise
+        self.assertEqual(registry.registered([IB1], None, ''), 'A1')
+
+    def test_unregister_non_empty_miss_on_name(self):
+        IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces()
+        registry = self._makeOne()
+        registry.register([IB1], None, '', 'A1')
+        registry.unregister([IB1], None, 'nonesuch') #doesn't raise
+        self.assertEqual(registry.registered([IB1], None, ''), 'A1')
+
+    def test_unregister_with_value_not_None_miss(self):
+        IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces()
+        registry = self._makeOne()
+        orig = object()
+        nomatch = object()
+        registry.register([IB1], None, '', orig)
+        registry.unregister([IB1], None, '', nomatch) #doesn't raise
+        self.failUnless(registry.registered([IB1], None, '') is orig)
+
+    def test_unregister_hit_clears_empty_subcomponents(self):
+        IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces()
+        registry = self._makeOne()
+        one = object()
+        another = object()
+        registry.register([IB1, IB2], None, '', one)
+        registry.register([IB1, IB3], None, '', another)
+        self.failUnless(IB2 in registry._adapters[2][IB1])
+        self.failUnless(IB3 in registry._adapters[2][IB1])
+        registry.unregister([IB1, IB3], None, '', another)
+        self.failUnless(IB2 in registry._adapters[2][IB1])
+        self.failIf(IB3 in registry._adapters[2][IB1])
+
+    def test_unsubscribe_empty(self):
+        registry = self._makeOne()
+        registry.unsubscribe([None], None, '') #doesn't raise
+        self.assertEqual(registry.registered([None], None, ''), None)
+
+    def test_unsubscribe_hit(self):
+        IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces()
+        registry = self._makeOne()
+        orig = object()
+        registry.subscribe([IB1], None, orig)
+        registry.unsubscribe([IB1], None, orig) #doesn't raise
+        self.assertEqual(len(registry._subscribers), 0)
+
+    def test_unsubscribe_after_multiple(self):
+        IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces()
+        registry = self._makeOne()
+        first = object()
+        second = object()
+        third = object()
+        registry.subscribe([IB1], None, first)
+        registry.subscribe([IB1], None, second)
+        registry.subscribe([IB1], IR0, third)
+        registry.unsubscribe([IB1], IR0, third)
+        registry.unsubscribe([IB1], None, second)
+        registry.unsubscribe([IB1], None, first)
+        self.assertEqual(len(registry._subscribers), 0)
+
+    def test_unsubscribe_w_None_after_multiple(self):
+        IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces()
+        registry = self._makeOne()
+        first = object()
+        second = object()
+        third = object()
+        registry.subscribe([IB1], None, first)
+        registry.subscribe([IB1], None, second)
+        registry.unsubscribe([IB1], None)
+        self.assertEqual(len(registry._subscribers), 0)
+
+    def test_unsubscribe_non_empty_miss_on_required(self):
+        IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces()
+        registry = self._makeOne()
+        registry.subscribe([IB1], None, 'A1')
+        self.assertEqual(len(registry._subscribers), 2)
+        registry.unsubscribe([IB2], None, '') #doesn't raise
+        self.assertEqual(len(registry._subscribers), 2)
+
+    def test_unsubscribe_non_empty_miss_on_value(self):
+        IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces()
+        registry = self._makeOne()
+        registry.subscribe([IB1], None, 'A1')
+        self.assertEqual(len(registry._subscribers), 2)
+        registry.unsubscribe([IB1], None, 'A2') #doesn't raise
+        self.assertEqual(len(registry._subscribers), 2)
+
+    def test_unsubscribe_with_value_not_None_miss(self):
+        IB0, IB1, IB2, IB3, IB4, IF0, IF1, IR0, IR1 = _makeInterfaces()
+        registry = self._makeOne()
+        orig = object()
+        nomatch = object()
+        registry.subscribe([IB1], None, orig)
+        registry.unsubscribe([IB1], None, nomatch) #doesn't raise
+        self.assertEqual(len(registry._subscribers), 2)
+
+
+class LookupBaseFallbackTests(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.adapter import LookupBaseFallback
+        return LookupBaseFallback
+
+    def _makeOne(self, uc_lookup=None, uc_lookupAll=None,
+                 uc_subscriptions=None):
+        if uc_lookup is None:
+            def uc_lookup(self, required, provided, name):
+                pass
+        if uc_lookupAll is None:
+            def uc_lookupAll(self, required, provided):
+                pass
+        if uc_subscriptions is None:
+            def uc_subscriptions(self, required, provided):
+                pass
+        class Derived(self._getTargetClass()):
+            _uncached_lookup = uc_lookup
+            _uncached_lookupAll = uc_lookupAll
+            _uncached_subscriptions = uc_subscriptions
+        return Derived()
+
+    def test_lookup_miss_no_default(self):
+        _called_with = []
+        def _lookup(self, required, provided, name):
+            _called_with.append((required, provided, name))
+            return None
+        lb = self._makeOne(uc_lookup=_lookup)
+        found = lb.lookup(('A',), 'B', 'C')
+        self.failUnless(found is None)
+        self.assertEqual(_called_with, [(('A',), 'B', 'C')])
+
+    def test_lookup_miss_w_default(self):
+        _called_with = []
+        _default = object()
+        def _lookup(self, required, provided, name):
+            _called_with.append((required, provided, name))
+            return None
+        lb = self._makeOne(uc_lookup=_lookup)
+        found = lb.lookup(('A',), 'B', 'C', _default)
+        self.failUnless(found is _default)
+        self.assertEqual(_called_with, [(('A',), 'B', 'C')])
+
+    def test_lookup_not_cached(self):
+        _called_with = []
+        a, b, c = object(), object(), object()
+        _results = [a, b, c]
+        def _lookup(self, required, provided, name):
+            _called_with.append((required, provided, name))
+            return _results.pop(0)
+        lb = self._makeOne(uc_lookup=_lookup)
+        found = lb.lookup(('A',), 'B', 'C')
+        self.failUnless(found is a)
+        self.assertEqual(_called_with, [(('A',), 'B', 'C')])
+        self.assertEqual(_results, [b, c])
+
+    def test_lookup_cached(self):
+        _called_with = []
+        a, b, c = object(), object(), object()
+        _results = [a, b, c]
+        def _lookup(self, required, provided, name):
+            _called_with.append((required, provided, name))
+            return _results.pop(0)
+        lb = self._makeOne(uc_lookup=_lookup)
+        found = lb.lookup(('A',), 'B', 'C')
+        found = lb.lookup(('A',), 'B', 'C')
+        self.failUnless(found is a)
+        self.assertEqual(_called_with, [(('A',), 'B', 'C')])
+        self.assertEqual(_results, [b, c])
+
+    def test_lookup_not_cached_multi_required(self):
+        _called_with = []
+        a, b, c = object(), object(), object()
+        _results = [a, b, c]
+        def _lookup(self, required, provided, name):
+            _called_with.append((required, provided, name))
+            return _results.pop(0)
+        lb = self._makeOne(uc_lookup=_lookup)
+        found = lb.lookup(('A', 'D'), 'B', 'C')
+        self.failUnless(found is a)
+        self.assertEqual(_called_with, [(('A', 'D'), 'B', 'C')])
+        self.assertEqual(_results, [b, c])
+
+    def test_lookup_cached_multi_required(self):
+        _called_with = []
+        a, b, c = object(), object(), object()
+        _results = [a, b, c]
+        def _lookup(self, required, provided, name):
+            _called_with.append((required, provided, name))
+            return _results.pop(0)
+        lb = self._makeOne(uc_lookup=_lookup)
+        found = lb.lookup(('A', 'D'), 'B', 'C')
+        found = lb.lookup(('A', 'D'), 'B', 'C')
+        self.failUnless(found is a)
+        self.assertEqual(_called_with, [(('A', 'D'), 'B', 'C')])
+        self.assertEqual(_results, [b, c])
+
+    def test_lookup_not_cached_after_changed(self):
+        _called_with = []
+        a, b, c = object(), object(), object()
+        _results = [a, b, c]
+        def _lookup(self, required, provided, name):
+            _called_with.append((required, provided, name))
+            return _results.pop(0)
+        lb = self._makeOne(uc_lookup=_lookup)
+        found = lb.lookup(('A',), 'B', 'C')
+        lb.changed(lb)
+        found = lb.lookup(('A',), 'B', 'C')
+        self.failUnless(found is b)
+        self.assertEqual(_called_with,
+                         [(('A',), 'B', 'C'), (('A',), 'B', 'C')])
+        self.assertEqual(_results, [c])
+
+    def test_lookup1_miss_no_default(self):
+        _called_with = []
+        def _lookup(self, required, provided, name):
+            _called_with.append((required, provided, name))
+            return None
+        lb = self._makeOne(uc_lookup=_lookup)
+        found = lb.lookup1('A', 'B', 'C')
+        self.failUnless(found is None)
+        self.assertEqual(_called_with, [(('A',), 'B', 'C')])
+
+    def test_lookup1_miss_w_default(self):
+        _called_with = []
+        _default = object()
+        def _lookup(self, required, provided, name):
+            _called_with.append((required, provided, name))
+            return None
+        lb = self._makeOne(uc_lookup=_lookup)
+        found = lb.lookup1('A', 'B', 'C', _default)
+        self.failUnless(found is _default)
+        self.assertEqual(_called_with, [(('A',), 'B', 'C')])
+
+    def test_lookup1_miss_w_default_negative_cache(self):
+        _called_with = []
+        _default = object()
+        def _lookup(self, required, provided, name):
+            _called_with.append((required, provided, name))
+            return None
+        lb = self._makeOne(uc_lookup=_lookup)
+        found = lb.lookup1('A', 'B', 'C', _default)
+        self.failUnless(found is _default)
+        found = lb.lookup1('A', 'B', 'C', _default)
+        self.failUnless(found is _default)
+        self.assertEqual(_called_with, [(('A',), 'B', 'C')])
+
+    def test_lookup1_not_cached(self):
+        _called_with = []
+        a, b, c = object(), object(), object()
+        _results = [a, b, c]
+        def _lookup(self, required, provided, name):
+            _called_with.append((required, provided, name))
+            return _results.pop(0)
+        lb = self._makeOne(uc_lookup=_lookup)
+        found = lb.lookup1('A', 'B', 'C')
+        self.failUnless(found is a)
+        self.assertEqual(_called_with, [(('A',), 'B', 'C')])
+        self.assertEqual(_results, [b, c])
+
+    def test_lookup1_cached(self):
+        _called_with = []
+        a, b, c = object(), object(), object()
+        _results = [a, b, c]
+        def _lookup(self, required, provided, name):
+            _called_with.append((required, provided, name))
+            return _results.pop(0)
+        lb = self._makeOne(uc_lookup=_lookup)
+        found = lb.lookup1('A', 'B', 'C')
+        found = lb.lookup1('A', 'B', 'C')
+        self.failUnless(found is a)
+        self.assertEqual(_called_with, [(('A',), 'B', 'C')])
+        self.assertEqual(_results, [b, c])
+
+    def test_lookup1_not_cached_after_changed(self):
+        _called_with = []
+        a, b, c = object(), object(), object()
+        _results = [a, b, c]
+        def _lookup(self, required, provided, name):
+            _called_with.append((required, provided, name))
+            return _results.pop(0)
+        lb = self._makeOne(uc_lookup=_lookup)
+        found = lb.lookup1('A', 'B', 'C')
+        lb.changed(lb)
+        found = lb.lookup1('A', 'B', 'C')
+        self.failUnless(found is b)
+        self.assertEqual(_called_with,
+                         [(('A',), 'B', 'C'), (('A',), 'B', 'C')])
+        self.assertEqual(_results, [c])
+
+    def test_adapter_hook_miss_no_default(self):
+        req, prv = object(), object()
+        lb = self._makeOne()
+        found = lb.adapter_hook(prv, req, '')
+        self.failUnless(found is None)
+
+    def test_adapter_hook_miss_w_default(self):
+        req, prv, _default = object(), object(), object()
+        lb = self._makeOne()
+        found = lb.adapter_hook(prv, req, '', _default)
+        self.failUnless(found is _default)
+
+    def test_adapter_hook_hit_factory_returns_None(self):
+        _f_called_with = []
+        def _factory(context):
+            _f_called_with.append(context)
+            return None
+        def _lookup(self, required, provided, name):
+            return _factory
+        req, prv, _default = object(), object(), object()
+        lb = self._makeOne(uc_lookup=_lookup)
+        adapted = lb.adapter_hook(prv, req, 'C', _default)
+        self.failUnless(adapted is _default)
+        self.assertEqual(_f_called_with, [req])
+
+    def test_adapter_hook_hit_factory_returns_adapter(self):
+        _f_called_with = []
+        _adapter = object()
+        def _factory(context):
+            _f_called_with.append(context)
+            return _adapter
+        def _lookup(self, required, provided, name):
+            return _factory
+        req, prv, _default = object(), object(), object()
+        lb = self._makeOne(uc_lookup=_lookup)
+        adapted = lb.adapter_hook(prv, req, 'C', _default)
+        self.failUnless(adapted is _adapter)
+        self.assertEqual(_f_called_with, [req])
+
+    def test_queryAdapter(self):
+        _f_called_with = []
+        _adapter = object()
+        def _factory(context):
+            _f_called_with.append(context)
+            return _adapter
+        def _lookup(self, required, provided, name):
+            return _factory
+        req, prv, _default = object(), object(), object()
+        lb = self._makeOne(uc_lookup=_lookup)
+        adapted = lb.queryAdapter(req, prv, 'C', _default)
+        self.failUnless(adapted is _adapter)
+        self.assertEqual(_f_called_with, [req])
+
+    def test_lookupAll_uncached(self):
+        _called_with = []
+        _results = [object(), object(), object()]
+        def _lookupAll(self, required, provided):
+            _called_with.append((required, provided))
+            return tuple(_results)
+        lb = self._makeOne(uc_lookupAll=_lookupAll)
+        found = lb.lookupAll('A', 'B')
+        self.assertEqual(found, tuple(_results))
+        self.assertEqual(_called_with, [(('A',), 'B')])
+
+    def test_lookupAll_cached(self):
+        _called_with = []
+        _results = [object(), object(), object()]
+        def _lookupAll(self, required, provided):
+            _called_with.append((required, provided))
+            return tuple(_results)
+        lb = self._makeOne(uc_lookupAll=_lookupAll)
+        found = lb.lookupAll('A', 'B')
+        found = lb.lookupAll('A', 'B')
+        self.assertEqual(found, tuple(_results))
+        self.assertEqual(_called_with, [(('A',), 'B')])
+
+    def test_subscriptions_uncached(self):
+        _called_with = []
+        _results = [object(), object(), object()]
+        def _subscriptions(self, required, provided):
+            _called_with.append((required, provided))
+            return tuple(_results)
+        lb = self._makeOne(uc_subscriptions=_subscriptions)
+        found = lb.subscriptions('A', 'B')
+        self.assertEqual(found, tuple(_results))
+        self.assertEqual(_called_with, [(('A',), 'B')])
+
+    def test_subscriptions_cached(self):
+        _called_with = []
+        _results = [object(), object(), object()]
+        def _subscriptions(self, required, provided):
+            _called_with.append((required, provided))
+            return tuple(_results)
+        lb = self._makeOne(uc_subscriptions=_subscriptions)
+        found = lb.subscriptions('A', 'B')
+        found = lb.subscriptions('A', 'B')
+        self.assertEqual(found, tuple(_results))
+        self.assertEqual(_called_with, [(('A',), 'B')])
+
+
+class LookupBaseTests(LookupBaseFallbackTests):
+
+    def _getTargetClass(self):
+        from zope.interface.adapter import LookupBase
+        return LookupBase
+
+
+class VerifyingBaseFallbackTests(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.adapter import VerifyingBaseFallback
+        return VerifyingBaseFallback
+
+    def _makeOne(self, registry, uc_lookup=None, uc_lookupAll=None,
+                 uc_subscriptions=None):
+        if uc_lookup is None:
+            def uc_lookup(self, required, provided, name):
+                pass
+        if uc_lookupAll is None:
+            def uc_lookupAll(self, required, provided):
+                pass
+        if uc_subscriptions is None:
+            def uc_subscriptions(self, required, provided):
+                pass
+        class Derived(self._getTargetClass()):
+            _uncached_lookup = uc_lookup
+            _uncached_lookupAll = uc_lookupAll
+            _uncached_subscriptions = uc_subscriptions
+            def __init__(self, registry):
+                super(Derived, self).__init__()
+                self._registry = registry
+        derived = Derived(registry)
+        derived.changed(derived) # init. '_verify_ro' / '_verify_generations'
+        return derived
+
+    def _makeRegistry(self, depth):
+        class WithGeneration(object):
+            _generation = 1
+        class Registry:
+            def __init__(self, depth):
+                self.ro = [WithGeneration() for i in range(depth)]
+        return Registry(depth)
+
+    def test_lookup(self):
+        _called_with = []
+        a, b, c = object(), object(), object()
+        _results = [a, b, c]
+        def _lookup(self, required, provided, name):
+            _called_with.append((required, provided, name))
+            return _results.pop(0)
+        reg = self._makeRegistry(3)
+        lb = self._makeOne(reg, uc_lookup=_lookup)
+        found = lb.lookup(('A',), 'B', 'C')
+        found = lb.lookup(('A',), 'B', 'C')
+        self.failUnless(found is a)
+        self.assertEqual(_called_with, [(('A',), 'B', 'C')])
+        self.assertEqual(_results, [b, c])
+        reg.ro[1]._generation += 1
+        found = lb.lookup(('A',), 'B', 'C')
+        self.failUnless(found is b)
+        self.assertEqual(_called_with,
+                        [(('A',), 'B', 'C'), (('A',), 'B', 'C')])
+        self.assertEqual(_results, [c])
+
+    def test_lookup1(self):
+        _called_with = []
+        a, b, c = object(), object(), object()
+        _results = [a, b, c]
+        def _lookup(self, required, provided, name):
+            _called_with.append((required, provided, name))
+            return _results.pop(0)
+        reg = self._makeRegistry(3)
+        lb = self._makeOne(reg, uc_lookup=_lookup)
+        found = lb.lookup1('A', 'B', 'C')
+        found = lb.lookup1('A', 'B', 'C')
+        self.failUnless(found is a)
+        self.assertEqual(_called_with, [(('A',), 'B', 'C')])
+        self.assertEqual(_results, [b, c])
+        reg.ro[1]._generation += 1
+        found = lb.lookup1('A', 'B', 'C')
+        self.failUnless(found is b)
+        self.assertEqual(_called_with,
+                        [(('A',), 'B', 'C'), (('A',), 'B', 'C')])
+        self.assertEqual(_results, [c])
+
+    def test_adapter_hook(self):
+        a, b, c = [object(), object(), object()]
+        def _factory1(context):
+            return a
+        def _factory2(context):
+            return b
+        def _factory3(context):
+            return c
+        _factories = [_factory1, _factory2, _factory3]
+        def _lookup(self, required, provided, name):
+            return _factories.pop(0)
+        req, prv, _default = object(), object(), object()
+        reg = self._makeRegistry(3)
+        lb = self._makeOne(reg, uc_lookup=_lookup)
+        adapted = lb.adapter_hook(prv, req, 'C', _default)
+        self.failUnless(adapted is a)
+        adapted = lb.adapter_hook(prv, req, 'C', _default)
+        self.failUnless(adapted is a)
+        reg.ro[1]._generation += 1
+        adapted = lb.adapter_hook(prv, req, 'C', _default)
+        self.failUnless(adapted is b)
+
+    def test_queryAdapter(self):
+        a, b, c = [object(), object(), object()]
+        def _factory1(context):
+            return a
+        def _factory2(context):
+            return b
+        def _factory3(context):
+            return c
+        _factories = [_factory1, _factory2, _factory3]
+        def _lookup(self, required, provided, name):
+            return _factories.pop(0)
+        req, prv, _default = object(), object(), object()
+        reg = self._makeRegistry(3)
+        lb = self._makeOne(reg, uc_lookup=_lookup)
+        adapted = lb.queryAdapter(req, prv, 'C', _default)
+        self.failUnless(adapted is a)
+        adapted = lb.queryAdapter(req, prv, 'C', _default)
+        self.failUnless(adapted is a)
+        reg.ro[1]._generation += 1
+        adapted = lb.adapter_hook(prv, req, 'C', _default)
+        self.failUnless(adapted is b)
+
+    def test_lookupAll(self):
+        _results_1 = [object(), object(), object()]
+        _results_2 = [object(), object(), object()]
+        _results = [_results_1, _results_2]
+        def _lookupAll(self, required, provided):
+            return tuple(_results.pop(0))
+        reg = self._makeRegistry(3)
+        lb = self._makeOne(reg, uc_lookupAll=_lookupAll)
+        found = lb.lookupAll('A', 'B')
+        self.assertEqual(found, tuple(_results_1))
+        found = lb.lookupAll('A', 'B')
+        self.assertEqual(found, tuple(_results_1))
+        reg.ro[1]._generation += 1
+        found = lb.lookupAll('A', 'B')
+        self.assertEqual(found, tuple(_results_2))
+
+    def test_subscriptions(self):
+        _results_1 = [object(), object(), object()]
+        _results_2 = [object(), object(), object()]
+        _results = [_results_1, _results_2]
+        def _subscriptions(self, required, provided):
+            return tuple(_results.pop(0))
+        reg = self._makeRegistry(3)
+        lb = self._makeOne(reg, uc_subscriptions=_subscriptions)
+        found = lb.subscriptions('A', 'B')
+        self.assertEqual(found, tuple(_results_1))
+        found = lb.subscriptions('A', 'B')
+        self.assertEqual(found, tuple(_results_1))
+        reg.ro[1]._generation += 1
+        found = lb.subscriptions('A', 'B')
+        self.assertEqual(found, tuple(_results_2))
+
+
+class VerifyingBaseTests(VerifyingBaseFallbackTests):
+
+    def _getTargetClass(self):
+        from zope.interface.adapter import VerifyingBase
+        return VerifyingBase
+
+
+class AdapterLookupBaseTests(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.adapter import AdapterLookupBase
+        return AdapterLookupBase
+
+    def _makeOne(self, registry):
+        return self._getTargetClass()(registry)
+
+    def _makeSubregistry(self, *provided):
+        class Subregistry:
+            def __init__(self):
+                self._adapters = []
+                self._subscribers = []
+        return Subregistry()
+
+    def _makeRegistry(self, *provided):
+        class Registry:
+            def __init__(self, provided):
+                self._provided = provided
+                self.ro = []
+        return Registry(provided)
+
+    def test_ctor_empty_registry(self):
+        registry = self._makeRegistry()
+        alb = self._makeOne(registry)
+        self.assertEqual(alb._extendors, {})
+
+    def test_ctor_w_registry_provided(self):
+        from zope.interface import Interface
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        registry = self._makeRegistry(IFoo, IBar)
+        alb = self._makeOne(registry)
+        self.assertEqual(sorted(alb._extendors.keys()),
+                         sorted([IBar, IFoo, Interface]))
+        self.assertEqual(alb._extendors[IFoo], [IFoo])
+        self.assertEqual(alb._extendors[IBar], [IBar])
+        self.assertEqual(sorted(alb._extendors[Interface]),
+                         sorted([IFoo, IBar]))
+
+    def test_changed_empty_required(self):
+        # ALB.changed expects to call a mixed in changed.
+        class Mixin(object):
+            def changed(self, *other):
+                pass
+        class Derived(self._getTargetClass(), Mixin):
+            pass
+        registry = self._makeRegistry()
+        alb = Derived(registry)
+        alb.changed(alb)
+
+    def test_changed_w_required(self):
+        # ALB.changed expects to call a mixed in changed.
+        class Mixin(object):
+            def changed(self, *other):
+                pass
+        class Derived(self._getTargetClass(), Mixin):
+            pass
+        class FauxWeakref(object):
+            _unsub = None
+            def __init__(self, here):
+                self._here = here
+            def __call__(self):
+                if self._here:
+                    return self
+            def unsubscribe(self, target):
+                self._unsub = target
+        gone = FauxWeakref(False)
+        here = FauxWeakref(True)
+        registry = self._makeRegistry()
+        alb = Derived(registry)
+        alb._required[gone] = 1
+        alb._required[here] = 1
+        alb.changed(alb)
+        self.assertEqual(len(alb._required), 0)
+        self.assertEqual(gone._unsub, None)
+        self.assertEqual(here._unsub, alb)
+
+    def test_init_extendors_after_registry_update(self):
+        from zope.interface import Interface
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        registry = self._makeRegistry()
+        alb = self._makeOne(registry)
+        registry._provided = [IFoo, IBar]
+        alb.init_extendors()
+        self.assertEqual(sorted(alb._extendors.keys()),
+                         sorted([IBar, IFoo, Interface]))
+        self.assertEqual(alb._extendors[IFoo], [IFoo])
+        self.assertEqual(alb._extendors[IBar], [IBar])
+        self.assertEqual(sorted(alb._extendors[Interface]),
+                         sorted([IFoo, IBar]))
+
+    def test_add_extendor(self):
+        from zope.interface import Interface
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        registry = self._makeRegistry()
+        alb = self._makeOne(registry)
+        alb.add_extendor(IFoo)
+        alb.add_extendor(IBar)
+        self.assertEqual(sorted(alb._extendors.keys()),
+                         sorted([IBar, IFoo, Interface]))
+        self.assertEqual(alb._extendors[IFoo], [IFoo])
+        self.assertEqual(alb._extendors[IBar], [IBar])
+        self.assertEqual(sorted(alb._extendors[Interface]),
+                         sorted([IFoo, IBar]))
+
+    def test_remove_extendor(self):
+        from zope.interface import Interface
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        registry = self._makeRegistry(IFoo, IBar)
+        alb = self._makeOne(registry)
+        alb.remove_extendor(IFoo)
+        self.assertEqual(sorted(alb._extendors.keys()),
+                         sorted([IFoo, IBar, Interface]))
+        self.assertEqual(alb._extendors[IFoo], [])
+        self.assertEqual(alb._extendors[IBar], [IBar])
+        self.assertEqual(sorted(alb._extendors[Interface]),
+                         sorted([IBar]))
+
+    # test '_subscribe' via its callers, '_uncached_lookup', etc.
+
+    def test__uncached_lookup_empty_ro(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        registry = self._makeRegistry()
+        alb = self._makeOne(registry)
+        result = alb._uncached_lookup((IFoo,), IBar)
+        self.assertEqual(result, None)
+        self.assertEqual(len(alb._required), 1)
+        self.failUnless(IFoo.weakref() in alb._required)
+
+    def test__uncached_lookup_order_miss(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        registry = self._makeRegistry(IFoo, IBar)
+        subr = self._makeSubregistry()
+        registry.ro.append(subr)
+        alb = self._makeOne(registry)
+        result = alb._uncached_lookup((IFoo,), IBar)
+        self.assertEqual(result, None)
+
+    def test__uncached_lookup_extendors_miss(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        registry = self._makeRegistry()
+        subr = self._makeSubregistry()
+        subr._adapters = [{}, {}] #utilities, single adapters
+        registry.ro.append(subr)
+        alb = self._makeOne(registry)
+        subr._v_lookup = alb
+        result = alb._uncached_lookup((IFoo,), IBar)
+        self.assertEqual(result, None)
+
+    def test__uncached_lookup_components_miss(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        registry = self._makeRegistry(IFoo, IBar)
+        subr = self._makeSubregistry()
+        subr._adapters = [{}, {}] #utilities, single adapters
+        registry.ro.append(subr)
+        alb = self._makeOne(registry)
+        subr._v_lookup = alb
+        result = alb._uncached_lookup((IFoo,), IBar)
+        self.assertEqual(result, None)
+
+    def test__uncached_lookup_simple_hit(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        registry = self._makeRegistry(IFoo, IBar)
+        subr = self._makeSubregistry()
+        _expected = object()
+        subr._adapters = [ #utilities, single adapters
+            {},
+            {IFoo: {IBar: {'': _expected}}},
+        ]
+        registry.ro.append(subr)
+        alb = self._makeOne(registry)
+        subr._v_lookup = alb
+        result = alb._uncached_lookup((IFoo,), IBar)
+        self.failUnless(result is _expected)
+
+    def test_queryMultiAdaptor_lookup_miss(self):
+        from zope.interface.declarations import implementer
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        @implementer(IFoo)
+        class Foo(object):
+            pass
+        foo = Foo()
+        registry = self._makeRegistry()
+        subr = self._makeSubregistry()
+        subr._adapters = [ #utilities, single adapters
+            {},
+            {},
+        ]
+        registry.ro.append(subr)
+        alb = self._makeOne(registry)
+        alb.lookup = alb._uncached_lookup # provided by derived
+        subr._v_lookup = alb
+        _default = object()
+        result = alb.queryMultiAdapter((foo,), IBar, default=_default)
+        self.failUnless(result is _default)
+
+    def test_queryMultiAdaptor_factory_miss(self):
+        from zope.interface.declarations import implementer
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        @implementer(IFoo)
+        class Foo(object):
+            pass
+        foo = Foo()
+        registry = self._makeRegistry(IFoo, IBar)
+        subr = self._makeSubregistry()
+        _expected = object()
+        _called_with = []
+        def _factory(context):
+            _called_with.append(context)
+            return None
+        subr._adapters = [ #utilities, single adapters
+            {},
+            {IFoo: {IBar: {'': _factory}}},
+        ]
+        registry.ro.append(subr)
+        alb = self._makeOne(registry)
+        alb.lookup = alb._uncached_lookup # provided by derived
+        subr._v_lookup = alb
+        _default = object()
+        result = alb.queryMultiAdapter((foo,), IBar, default=_default)
+        self.failUnless(result is _default)
+        self.assertEqual(_called_with, [foo])
+
+    def test_queryMultiAdaptor_factory_hit(self):
+        from zope.interface.declarations import implementer
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        @implementer(IFoo)
+        class Foo(object):
+            pass
+        foo = Foo()
+        registry = self._makeRegistry(IFoo, IBar)
+        subr = self._makeSubregistry()
+        _expected = object()
+        _called_with = []
+        def _factory(context):
+            _called_with.append(context)
+            return _expected
+        subr._adapters = [ #utilities, single adapters
+            {},
+            {IFoo: {IBar: {'': _factory}}},
+        ]
+        registry.ro.append(subr)
+        alb = self._makeOne(registry)
+        alb.lookup = alb._uncached_lookup # provided by derived
+        subr._v_lookup = alb
+        _default = object()
+        result = alb.queryMultiAdapter((foo,), IBar, default=_default)
+        self.failUnless(result is _expected)
+        self.assertEqual(_called_with, [foo])
+
+    def test__uncached_lookupAll_empty_ro(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        registry = self._makeRegistry()
+        alb = self._makeOne(registry)
+        result = alb._uncached_lookupAll((IFoo,), IBar)
+        self.assertEqual(result, ())
+        self.assertEqual(len(alb._required), 1)
+        self.failUnless(IFoo.weakref() in alb._required)
+
+    def test__uncached_lookupAll_order_miss(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        registry = self._makeRegistry(IFoo, IBar)
+        subr = self._makeSubregistry()
+        registry.ro.append(subr)
+        alb = self._makeOne(registry)
+        subr._v_lookup = alb
+        result = alb._uncached_lookupAll((IFoo,), IBar)
+        self.assertEqual(result, ())
+
+    def test__uncached_lookupAll_extendors_miss(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        registry = self._makeRegistry()
+        subr = self._makeSubregistry()
+        subr._adapters = [{}, {}] #utilities, single adapters
+        registry.ro.append(subr)
+        alb = self._makeOne(registry)
+        subr._v_lookup = alb
+        result = alb._uncached_lookupAll((IFoo,), IBar)
+        self.assertEqual(result, ())
+
+    def test__uncached_lookupAll_components_miss(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        registry = self._makeRegistry(IFoo, IBar)
+        subr = self._makeSubregistry()
+        subr._adapters = [{}, {}] #utilities, single adapters
+        registry.ro.append(subr)
+        alb = self._makeOne(registry)
+        subr._v_lookup = alb
+        result = alb._uncached_lookupAll((IFoo,), IBar)
+        self.assertEqual(result, ())
+
+    def test__uncached_lookupAll_simple_hit(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        registry = self._makeRegistry(IFoo, IBar)
+        subr = self._makeSubregistry()
+        _expected = object()
+        _named = object()
+        subr._adapters = [ #utilities, single adapters
+            {},
+            {IFoo: {IBar: {'': _expected, 'named': _named}}},
+        ]
+        registry.ro.append(subr)
+        alb = self._makeOne(registry)
+        subr._v_lookup = alb
+        result = alb._uncached_lookupAll((IFoo,), IBar)
+        self.assertEqual(sorted(result), [('', _expected), ('named', _named)])
+
+    def test_names(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        registry = self._makeRegistry(IFoo, IBar)
+        subr = self._makeSubregistry()
+        _expected = object()
+        _named = object()
+        subr._adapters = [ #utilities, single adapters
+            {},
+            {IFoo: {IBar: {'': _expected, 'named': _named}}},
+        ]
+        registry.ro.append(subr)
+        alb = self._makeOne(registry)
+        alb.lookupAll = alb._uncached_lookupAll
+        subr._v_lookup = alb
+        result = alb.names((IFoo,), IBar)
+        self.assertEqual(sorted(result), ['', 'named'])
+
+    def test__uncached_subscriptions_empty_ro(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        registry = self._makeRegistry()
+        alb = self._makeOne(registry)
+        result = alb._uncached_subscriptions((IFoo,), IBar)
+        self.assertEqual(result, [])
+        self.assertEqual(len(alb._required), 1)
+        self.failUnless(IFoo.weakref() in alb._required)
+
+    def test__uncached_subscriptions_order_miss(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        registry = self._makeRegistry(IFoo, IBar)
+        subr = self._makeSubregistry()
+        registry.ro.append(subr)
+        alb = self._makeOne(registry)
+        subr._v_lookup = alb
+        result = alb._uncached_subscriptions((IFoo,), IBar)
+        self.assertEqual(result, [])
+
+    def test__uncached_subscriptions_extendors_miss(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        registry = self._makeRegistry()
+        subr = self._makeSubregistry()
+        subr._subscribers = [{}, {}] #utilities, single adapters
+        registry.ro.append(subr)
+        alb = self._makeOne(registry)
+        subr._v_lookup = alb
+        result = alb._uncached_subscriptions((IFoo,), IBar)
+        self.assertEqual(result, [])
+
+    def test__uncached_subscriptions_components_miss(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        registry = self._makeRegistry(IFoo, IBar)
+        subr = self._makeSubregistry()
+        subr._subscribers = [{}, {}] #utilities, single adapters
+        registry.ro.append(subr)
+        alb = self._makeOne(registry)
+        subr._v_lookup = alb
+        result = alb._uncached_subscriptions((IFoo,), IBar)
+        self.assertEqual(result, [])
+
+    def test__uncached_subscriptions_simple_hit(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        registry = self._makeRegistry(IFoo, IBar)
+        subr = self._makeSubregistry()
+        class Foo(object):
+            def __lt__(self, other):
+                return True
+        _exp1, _exp2 = Foo(), Foo()
+        subr._subscribers = [ #utilities, single adapters
+            {},
+            {IFoo: {IBar: {'': (_exp1, _exp2)}}},
+        ]
+        registry.ro.append(subr)
+        alb = self._makeOne(registry)
+        subr._v_lookup = alb
+        result = alb._uncached_subscriptions((IFoo,), IBar)
+        self.assertEqual(sorted(result), sorted([_exp1, _exp2]))
+
+    def test_subscribers_wo_provided(self):
+        from zope.interface.declarations import implementer
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        @implementer(IFoo)
+        class Foo(object):
+            pass
+        foo = Foo()
+        registry = self._makeRegistry(IFoo, IBar)
+        registry = self._makeRegistry(IFoo, IBar)
+        subr = self._makeSubregistry()
+        _called = {}
+        def _factory1(context):
+            _called.setdefault('_factory1', []).append(context)
+        def _factory2(context):
+            _called.setdefault('_factory2', []).append(context)
+        subr._subscribers = [ #utilities, single adapters
+            {},
+            {IFoo: {None: {'': (_factory1, _factory2)}}},
+        ]
+        registry.ro.append(subr)
+        alb = self._makeOne(registry)
+        alb.subscriptions = alb._uncached_subscriptions
+        subr._v_lookup = alb
+        result = alb.subscribers((foo,), None)
+        self.assertEqual(result, ())
+        self.assertEqual(_called, {'_factory1': [foo], '_factory2': [foo]})
+
+    def test_subscribers_w_provided(self):
+        from zope.interface.declarations import implementer
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', IFoo)
+        @implementer(IFoo)
+        class Foo(object):
+            pass
+        foo = Foo()
+        registry = self._makeRegistry(IFoo, IBar)
+        registry = self._makeRegistry(IFoo, IBar)
+        subr = self._makeSubregistry()
+        _called = {}
+        _exp1, _exp2 = object(), object()
+        def _factory1(context):
+            _called.setdefault('_factory1', []).append(context)
+            return _exp1
+        def _factory2(context):
+            _called.setdefault('_factory2', []).append(context)
+            return _exp2
+        subr._subscribers = [ #utilities, single adapters
+            {},
+            {IFoo: {IBar: {'': (_factory1, _factory2)}}},
+        ]
+        registry.ro.append(subr)
+        alb = self._makeOne(registry)
+        alb.subscriptions = alb._uncached_subscriptions
+        subr._v_lookup = alb
+        result = alb.subscribers((foo,), IBar)
+        self.assertEqual(result, [_exp1, _exp2])
+        self.assertEqual(_called, {'_factory1': [foo], '_factory2': [foo]})
+
+
+class AdapterRegistryTests(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.adapter import AdapterRegistry
+        return AdapterRegistry
+
+    def _makeOne(self, *args, **kw):
+        return self._getTargetClass()(*args, **kw)
+
+    def test_ctor_no_bases(self):
+        ar = self._makeOne()
+        self.assertEqual(len(ar._v_subregistries), 0)
+
+    def test_ctor_w_bases(self):
+        base = self._makeOne()
+        sub = self._makeOne([base])
+        self.assertEqual(len(sub._v_subregistries), 0)
+        self.assertEqual(len(base._v_subregistries), 1)
+        self.failUnless(sub in base._v_subregistries)
+
+    # test _addSubregistry / _removeSubregistry via only caller, _setBases
+
+    def test__setBases_removing_existing_subregistry(self):
+        before = self._makeOne()
+        after = self._makeOne()
+        sub = self._makeOne([before])
+        sub.__bases__ = [after]
+        self.assertEqual(len(before._v_subregistries), 0)
+        self.assertEqual(len(after._v_subregistries), 1)
+        self.failUnless(sub in after._v_subregistries)
+
+    def test_changed_w_subregistries(self):
+        base = self._makeOne()
+        class Derived(object):
+            _changed = None
+            def changed(self, originally_changed):
+                self._changed = originally_changed
+        derived1, derived2 = Derived(), Derived()
+        base._addSubregistry(derived1)
+        base._addSubregistry(derived2)
+        orig = object()
+        base.changed(orig)
+        self.failUnless(derived1._changed is orig)
+        self.failUnless(derived2._changed is orig)
+
+
+class Test_utils(_SilencePy3Deprecations):
+
+    def test__convert_None_to_Interface_w_None(self):
+        from zope.interface.adapter import _convert_None_to_Interface
+        from zope.interface.interface import Interface
+        self.failUnless(_convert_None_to_Interface(None) is Interface)
+
+    def test__convert_None_to_Interface_w_other(self):
+        from zope.interface.adapter import _convert_None_to_Interface
+        other = object()
+        self.failUnless(_convert_None_to_Interface(other) is other)
+
+    def test__normalize_name_str(self):
+        import sys
+        from zope.interface.adapter import _normalize_name
+        STR = b'str'
+        if sys.version_info[0] < 3:
+            self.assertEqual(_normalize_name(STR), unicode(STR))
+        else:
+            self.assertEqual(_normalize_name(STR), str(STR, 'ascii'))
+
+    def test__normalize_name_unicode(self):
+        from zope.interface.adapter import _normalize_name
+        from zope.interface._compat import _u
+        USTR = _u('ustr')
+        self.assertEqual(_normalize_name(USTR), USTR)
+
+    def test__normalize_name_other(self):
+        from zope.interface.adapter import _normalize_name
+        for other in 1, 1.0, (), [], {}, object():
+            self.assertRaises(TypeError, _normalize_name, other)
+
+    # _lookup, _lookupAll, and _subscriptions tested via their callers
+    # (AdapterLookupBase.{lookup,lookupAll,subscriptions}).
+
+
+def test_suite():
+    return unittest.TestSuite((
+        unittest.makeSuite(BaseAdapterRegistryTests),
+        unittest.makeSuite(LookupBaseFallbackTests),
+        unittest.makeSuite(LookupBaseTests),
+        unittest.makeSuite(VerifyingBaseFallbackTests),
+        unittest.makeSuite(VerifyingBaseTests),
+        unittest.makeSuite(AdapterLookupBaseTests),
+        unittest.makeSuite(AdapterRegistryTests),
+        unittest.makeSuite(Test_utils),
+        ))
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/test_advice.py b/ThirdParty/ZopeInterface/zope/interface/tests/test_advice.py
new file mode 100644
index 0000000..e9875a7
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/test_advice.py
@@ -0,0 +1,397 @@
+##############################################################################
+#
+# Copyright (c) 2003 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Tests for advice
+
+This module was adapted from 'protocols.tests.advice', part of the Python
+Enterprise Application Kit (PEAK).  Please notify the PEAK authors
+(pje at telecommunity.com and tsarna at sarna.org) if bugs are found or
+Zope-specific changes are required, so that the PEAK version of this module
+can be kept in sync.
+
+PEAK is a Python application framework that interoperates with (but does
+not require) Zope 3 and Twisted.  It provides tools for manipulating UML
+models, object-relational persistence, aspect-oriented programming, and more.
+Visit the PEAK home page at http://peak.telecommunity.com for more information.
+"""
+
+import unittest
+import sys
+
+from zope.interface._compat import _skip_under_py2
+from zope.interface._compat import _skip_under_py3k
+
+
+class _SilencePy3Deprecations(unittest.TestCase):
+    # silence deprecation warnings under py3
+
+    def failUnless(self, expr):
+        # St00pid speling.
+        return self.assertTrue(expr)
+
+    def failIf(self, expr):
+        # St00pid speling.
+        return self.assertFalse(expr)
+
+
+class FrameInfoTest(_SilencePy3Deprecations):
+
+    def test_w_module(self):
+        from zope.interface.tests import advisory_testing
+        (kind, module,
+         f_locals, f_globals) = advisory_testing.moduleLevelFrameInfo
+        self.assertEqual(kind, "module")
+        for d in module.__dict__, f_locals, f_globals:
+            self.failUnless(d is advisory_testing.my_globals)
+
+    @_skip_under_py3k
+    def test_w_ClassicClass(self):
+        from zope.interface.tests import advisory_testing
+        if advisory_testing.ClassicClass is None:
+            return
+        (kind,
+         module,
+         f_locals,
+         f_globals) = advisory_testing.ClassicClass.classLevelFrameInfo
+        self.assertEqual(kind, "class")
+
+        self.failUnless(
+            f_locals is advisory_testing.ClassicClass.__dict__)  # ???
+        for d in module.__dict__, f_globals:
+            self.failUnless(d is advisory_testing.my_globals)
+
+    def test_w_NewStyleClass(self):
+        from zope.interface.tests import advisory_testing
+        (kind,
+         module,
+         f_locals,
+         f_globals) = advisory_testing.NewStyleClass.classLevelFrameInfo
+        self.assertEqual(kind, "class")
+
+        for d in module.__dict__, f_globals:
+            self.failUnless(d is advisory_testing.my_globals)
+
+    def test_inside_function_call(self):
+        from zope.interface.advice import getFrameInfo
+        kind, module, f_locals, f_globals = getFrameInfo(sys._getframe())
+        self.assertEqual(kind, "function call")
+        self.failUnless(f_locals is locals()) # ???
+        for d in module.__dict__, f_globals:
+            self.failUnless(d is globals())
+
+    def test_inside_exec(self):
+        from zope.interface.advice import getFrameInfo
+        _globals = {'getFrameInfo': getFrameInfo}
+        _locals = {}
+        exec(_FUNKY_EXEC, _globals, _locals)
+        self.assertEqual(_locals['kind'], "exec")
+        self.failUnless(_locals['f_locals'] is _locals)
+        self.failUnless(_locals['module'] is None)
+        self.failUnless(_locals['f_globals'] is _globals)
+
+
+_FUNKY_EXEC = """\
+import sys
+kind, module, f_locals, f_globals = getFrameInfo(sys._getframe())
+"""
+
+class AdviceTests(_SilencePy3Deprecations):
+
+    @_skip_under_py3k
+    def test_order(self):
+        from zope.interface.tests.advisory_testing import ping
+        log = []
+        class Foo(object):
+            ping(log, 1)
+            ping(log, 2)
+            ping(log, 3)
+
+        # Strip the list nesting
+        for i in 1, 2, 3:
+            self.failUnless(isinstance(Foo, list))
+            Foo, = Foo
+
+        self.assertEqual(log, [(1, Foo), (2, [Foo]), (3, [[Foo]])])
+
+    def TODOtest_outside(self):
+        from zope.interface.tests.advisory_testing import ping
+        # Disabled because the check does not work with doctest tests.
+        try:
+            ping([], 1)
+        except SyntaxError:
+            pass
+        else:
+            raise AssertionError(
+                "Should have detected advice outside class body"
+            )
+
+    @_skip_under_py3k
+    def test_single_explicit_meta(self):
+        from zope.interface.tests.advisory_testing import ping
+
+        class Metaclass(type):
+            pass
+
+        class Concrete(Metaclass):
+            __metaclass__ = Metaclass
+            ping([],1)
+
+        Concrete, = Concrete
+        self.failUnless(Concrete.__class__ is Metaclass)
+
+
+    @_skip_under_py3k
+    def test_mixed_metas(self):
+        from zope.interface.tests.advisory_testing import ping
+
+        class Metaclass1(type):
+            pass
+
+        class Metaclass2(type):
+            pass
+
+        class Base1:
+            __metaclass__ = Metaclass1
+
+        class Base2:
+            __metaclass__ = Metaclass2
+
+        try:
+            class Derived(Base1, Base2):
+                ping([], 1)
+
+        except TypeError:
+            pass
+        else:
+            raise AssertionError("Should have gotten incompatibility error")
+
+        class Metaclass3(Metaclass1, Metaclass2):
+            pass
+
+        class Derived(Base1, Base2):
+            __metaclass__ = Metaclass3
+            ping([], 1)
+
+        self.failUnless(isinstance(Derived, list))
+        Derived, = Derived
+        self.failUnless(isinstance(Derived, Metaclass3))
+
+    @_skip_under_py3k
+    def test_meta_no_bases(self):
+        from zope.interface.tests.advisory_testing import ping
+        try:
+            from types import ClassType
+        except ImportError:
+            return
+        class Thing:
+            ping([], 1)
+        klass, = Thing # unpack list created by pong
+        self.assertEqual(type(klass), ClassType)
+
+
+class Test_isClassAdvisor(_SilencePy3Deprecations):
+
+    def _callFUT(self, *args, **kw):
+        from zope.interface.advice import isClassAdvisor
+        return isClassAdvisor(*args, **kw)
+
+    def test_w_non_function(self):
+        self.assertEqual(self._callFUT(self), False)
+
+    def test_w_normal_function(self):
+        def foo():
+            pass
+        self.assertEqual(self._callFUT(foo), False)
+
+    def test_w_advisor_function(self):
+        def bar():
+            pass
+        bar.previousMetaclass = object()
+        self.assertEqual(self._callFUT(bar), True)
+
+
+class Test_determineMetaclass(_SilencePy3Deprecations):
+
+    def _callFUT(self, *args, **kw):
+        from zope.interface.advice import determineMetaclass
+        return determineMetaclass(*args, **kw)
+
+    @_skip_under_py3k
+    def test_empty(self):
+        from types import ClassType
+        self.assertEqual(self._callFUT(()), ClassType)
+
+    def test_empty_w_explicit_metatype(self):
+        class Meta(type):
+            pass
+        self.assertEqual(self._callFUT((), Meta), Meta)
+
+    def test_single(self):
+        class Meta(type):
+            pass
+        self.assertEqual(self._callFUT((Meta,)), type)
+
+    @_skip_under_py3k
+    def test_meta_of_class(self):
+        class Metameta(type):
+            pass
+
+        class Meta(type):
+            __metaclass__ = Metameta
+
+        self.assertEqual(self._callFUT((Meta, type)), Metameta)
+
+    @_skip_under_py2
+    def test_meta_of_class_py3k(self):
+        # Work around SyntaxError under Python2.
+        EXEC = '\n'.join([
+        'class Metameta(type):',
+        '    pass',
+        'class Meta(type, metaclass=Metameta):',
+        '    pass',
+        ])
+        globs = {}
+        exec(EXEC, globs)
+        Meta = globs['Meta']
+        Metameta = globs['Metameta']
+
+        self.assertEqual(self._callFUT((Meta, type)), Metameta)
+
+    @_skip_under_py3k
+    def test_multiple_in_hierarchy(self):
+        class Meta_A(type):
+            pass
+        class Meta_B(Meta_A):
+            pass
+        class A(type):
+            __metaclass__ = Meta_A
+        class B(type):
+            __metaclass__ = Meta_B
+        self.assertEqual(self._callFUT((A, B,)), Meta_B)
+
+    @_skip_under_py2
+    def test_multiple_in_hierarchy_py3k(self):
+        # Work around SyntaxError under Python2.
+        EXEC = '\n'.join([
+        'class Meta_A(type):',
+        '    pass',
+        'class Meta_B(Meta_A):',
+        '    pass',
+        'class A(type, metaclass=Meta_A):',
+        '    pass',
+        'class B(type, metaclass=Meta_B):',
+        '    pass',
+        ])
+        globs = {}
+        exec(EXEC, globs)
+        Meta_A = globs['Meta_A']
+        Meta_B = globs['Meta_B']
+        A = globs['A']
+        B = globs['B']
+        self.assertEqual(self._callFUT((A, B)), Meta_B)
+
+    @_skip_under_py3k
+    def test_multiple_not_in_hierarchy(self):
+        class Meta_A(type):
+            pass
+        class Meta_B(type):
+            pass
+        class A(type):
+            __metaclass__ = Meta_A
+        class B(type):
+            __metaclass__ = Meta_B
+        self.assertRaises(TypeError, self._callFUT, (A, B,))
+
+    @_skip_under_py2
+    def test_multiple_not_in_hierarchy_py3k(self):
+        # Work around SyntaxError under Python2.
+        EXEC = '\n'.join([
+        'class Meta_A(type):',
+        '    pass',
+        'class Meta_B(type):',
+        '    pass',
+        'class A(type, metaclass=Meta_A):',
+        '    pass',
+        'class B(type, metaclass=Meta_B):',
+        '    pass',
+        ])
+        globs = {}
+        exec(EXEC, globs)
+        Meta_A = globs['Meta_A']
+        Meta_B = globs['Meta_B']
+        A = globs['A']
+        B = globs['B']
+        self.assertRaises(TypeError, self._callFUT, (A, B))
+
+
+class Test_minimalBases(_SilencePy3Deprecations):
+
+    def _callFUT(self, klasses):
+        from zope.interface.advice import minimalBases
+        return minimalBases(klasses)
+
+    def test_empty(self):
+        self.assertEqual(self._callFUT([]), [])
+
+    @_skip_under_py3k
+    def test_w_oldstyle_meta(self):
+        class C:
+            pass
+        self.assertEqual(self._callFUT([type(C)]), [])
+
+    @_skip_under_py3k
+    def test_w_oldstyle_class(self):
+        class C:
+            pass
+        self.assertEqual(self._callFUT([C]), [C])
+
+    def test_w_newstyle_meta(self):
+        self.assertEqual(self._callFUT([type]), [type])
+
+    def test_w_newstyle_class(self):
+        class C(object):
+            pass
+        self.assertEqual(self._callFUT([C]), [C])
+
+    def test_simple_hierarchy_skips_implied(self):
+        class A(object):
+            pass
+        class B(A):
+            pass
+        class C(B):
+            pass
+        class D(object):
+            pass
+        self.assertEqual(self._callFUT([A, B, C]), [C])
+        self.assertEqual(self._callFUT([A, C]), [C])
+        self.assertEqual(self._callFUT([B, C]), [C])
+        self.assertEqual(self._callFUT([A, B]), [B])
+        self.assertEqual(self._callFUT([D, B, D]), [B, D])
+
+    def test_repeats_kicked_to_end_of_queue(self):
+        class A(object):
+            pass
+        class B(object):
+            pass
+        self.assertEqual(self._callFUT([A, B, A]), [B, A])
+
+
+
+def test_suite():
+    return unittest.TestSuite((
+        unittest.makeSuite(FrameInfoTest),
+        unittest.makeSuite(AdviceTests),
+        unittest.makeSuite(Test_isClassAdvisor),
+        unittest.makeSuite(Test_determineMetaclass),
+        unittest.makeSuite(Test_minimalBases),
+    ))
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/test_declarations.py b/ThirdParty/ZopeInterface/zope/interface/tests/test_declarations.py
new file mode 100644
index 0000000..49991d0
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/test_declarations.py
@@ -0,0 +1,1576 @@
+##############################################################################
+#
+# Copyright (c) 2003 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Test the new API for making and checking interface declarations
+"""
+import unittest
+
+from zope.interface._compat import _skip_under_py3k
+
+
+class _SilencePy3Deprecations(unittest.TestCase):
+    # silence deprecation warnings under py3
+
+    def failUnless(self, expr):
+        # St00pid speling.
+        return self.assertTrue(expr)
+
+    def failIf(self, expr):
+        # St00pid speling.
+        return self.assertFalse(expr)
+
+
+class _Py3ClassAdvice(object):
+
+    def _run_generated_code(self, code, globs, locs,
+                            fails_under_py3k=True,
+                           ):
+        import warnings
+        from zope.interface._compat import PYTHON3
+        with warnings.catch_warnings(record=True) as log:
+            warnings.resetwarnings()
+            if not PYTHON3:
+                exec(code, globs, locs)
+                self.assertEqual(len(log), 0) # no longer warn
+                return True
+            else:
+                try:
+                    exec(code, globs, locs)
+                except TypeError:
+                    return False
+                else:
+                    if fails_under_py3k:
+                        self.fail("Didn't raise TypeError")
+ 
+
+class DeclarationTests(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.declarations import Declaration
+        return Declaration
+
+    def _makeOne(self, *args, **kw):
+        return self._getTargetClass()(*args, **kw)
+ 
+    def test_ctor_no_bases(self):
+        decl = self._makeOne()
+        self.assertEqual(list(decl.__bases__), [])
+
+    def test_ctor_w_interface_in_bases(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        decl = self._makeOne(IFoo)
+        self.assertEqual(list(decl.__bases__), [IFoo])
+
+    def test_ctor_w_implements_in_bases(self):
+        from zope.interface.declarations import Implements
+        impl = Implements()
+        decl = self._makeOne(impl)
+        self.assertEqual(list(decl.__bases__), [impl])
+
+    def test_changed_wo_existing__v_attrs(self):
+        decl = self._makeOne()
+        decl.changed(decl) # doesn't raise
+        self.failIf('_v_attrs' in decl.__dict__)
+
+    def test_changed_w_existing__v_attrs(self):
+        decl = self._makeOne()
+        decl._v_attrs = object()
+        decl.changed(decl)
+        self.failIf('_v_attrs' in decl.__dict__)
+
+    def test___contains__w_self(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        decl = self._makeOne()
+        self.failIf(decl in decl)
+
+    def test___contains__w_unrelated_iface(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        decl = self._makeOne()
+        self.failIf(IFoo in decl)
+
+    def test___contains__w_base_interface(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        decl = self._makeOne(IFoo)
+        self.failUnless(IFoo in decl)
+
+    def test___iter___empty(self):
+        decl = self._makeOne()
+        self.assertEqual(list(decl), [])
+
+    def test___iter___single_base(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        decl = self._makeOne(IFoo)
+        self.assertEqual(list(decl), [IFoo])
+
+    def test___iter___multiple_bases(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar')
+        decl = self._makeOne(IFoo, IBar)
+        self.assertEqual(list(decl), [IFoo, IBar])
+
+    def test___iter___inheritance(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', (IFoo,))
+        decl = self._makeOne(IBar)
+        self.assertEqual(list(decl), [IBar]) #IBar.interfaces() omits bases
+
+    def test___iter___w_nested_sequence_overlap(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar')
+        decl = self._makeOne(IBar, (IFoo, IBar))
+        self.assertEqual(list(decl), [IBar, IFoo])
+
+    def test_flattened_empty(self):
+        from zope.interface.interface import Interface
+        decl = self._makeOne()
+        self.assertEqual(list(decl.flattened()), [Interface])
+
+    def test_flattened_single_base(self):
+        from zope.interface.interface import Interface
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        decl = self._makeOne(IFoo)
+        self.assertEqual(list(decl.flattened()), [IFoo, Interface])
+
+    def test_flattened_multiple_bases(self):
+        from zope.interface.interface import Interface
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar')
+        decl = self._makeOne(IFoo, IBar)
+        self.assertEqual(list(decl.flattened()), [IFoo, IBar, Interface])
+
+    def test_flattened_inheritance(self):
+        from zope.interface.interface import Interface
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', (IFoo,))
+        decl = self._makeOne(IBar)
+        self.assertEqual(list(decl.flattened()), [IBar, IFoo, Interface])
+
+    def test_flattened_w_nested_sequence_overlap(self):
+        from zope.interface.interface import Interface
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar')
+        decl = self._makeOne(IBar, (IFoo, IBar))
+        # Note that decl.__iro__ has IFoo first.
+        self.assertEqual(list(decl.flattened()), [IFoo, IBar, Interface])
+
+    def test___sub___unrelated_interface(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar')
+        before = self._makeOne(IFoo)
+        after = before - IBar
+        self.failUnless(isinstance(after, self._getTargetClass()))
+        self.assertEqual(list(after), [IFoo])
+
+    def test___sub___related_interface(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        before = self._makeOne(IFoo)
+        after = before - IFoo
+        self.assertEqual(list(after), [])
+
+    def test___sub___related_interface_by_inheritance(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar', (IFoo,))
+        before = self._makeOne(IBar)
+        after = before - IBar
+        self.assertEqual(list(after), [])
+
+    def test___add___unrelated_interface(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar')
+        before = self._makeOne(IFoo)
+        after = before + IBar
+        self.failUnless(isinstance(after, self._getTargetClass()))
+        self.assertEqual(list(after), [IFoo, IBar])
+
+    def test___add___related_interface(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar')
+        IBaz = InterfaceClass('IBaz')
+        before = self._makeOne(IFoo, IBar)
+        other = self._makeOne(IBar, IBaz)
+        after = before + other
+        self.assertEqual(list(after), [IFoo, IBar, IBaz])
+ 
+
+class ImplementsTests(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.declarations import Implements
+        return Implements
+
+    def _makeOne(self, *args, **kw):
+        return self._getTargetClass()(*args, **kw)
+ 
+    def test_ctor_no_bases(self):
+        impl = self._makeOne()
+        self.assertEqual(impl.inherit, None)
+        self.assertEqual(impl.declared, ())
+        self.assertEqual(impl.__name__, '?')
+        self.assertEqual(list(impl.__bases__), [])
+ 
+    def test___repr__(self):
+        impl = self._makeOne()
+        impl.__name__ = 'Testing'
+        self.assertEqual(repr(impl), '<implementedBy Testing>')
+ 
+    def test___reduce__(self):
+        from zope.interface.declarations import implementedBy
+        impl = self._makeOne()
+        self.assertEqual(impl.__reduce__(), (implementedBy, (None,)))
+
+
+class Test_implementedByFallback(_SilencePy3Deprecations):
+
+    def _callFUT(self, *args, **kw):
+        from zope.interface.declarations import implementedByFallback
+        return implementedByFallback(*args, **kw)
+
+    def test_dictless_wo_existing_Implements_wo_registrations(self):
+        class Foo(object):
+            __slots__ = ('__implemented__',)
+        foo = Foo()
+        foo.__implemented__ = None
+        self.assertEqual(list(self._callFUT(foo)), [])
+
+    def test_dictless_wo_existing_Implements_cant_assign___implemented__(self):
+        class Foo(object):
+            def _get_impl(self): return None
+            def _set_impl(self, val): raise TypeError
+            __implemented__ = property(_get_impl, _set_impl)
+            def __call__(self): pass  #act like a factory
+        foo = Foo()
+        self.assertRaises(TypeError, self._callFUT, foo)
+
+    def test_dictless_wo_existing_Implements_w_registrations(self):
+        from zope.interface import declarations
+        class Foo(object):
+            __slots__ = ('__implemented__',)
+        foo = Foo()
+        foo.__implemented__ = None
+        reg = object()
+        with _MonkeyDict(declarations,
+                         'BuiltinImplementationSpecifications') as specs:
+            specs[foo] = reg
+            self.failUnless(self._callFUT(foo) is reg)
+
+    def test_dictless_w_existing_Implements(self):
+        from zope.interface.declarations import Implements
+        impl = Implements()
+        class Foo(object):
+            __slots__ = ('__implemented__',)
+        foo = Foo()
+        foo.__implemented__ = impl
+        self.failUnless(self._callFUT(foo) is impl)
+
+    def test_dictless_w_existing_not_Implements(self):
+        from zope.interface.interface import InterfaceClass
+        class Foo(object):
+            __slots__ = ('__implemented__',)
+        foo = Foo()
+        IFoo = InterfaceClass('IFoo')
+        foo.__implemented__ = (IFoo,)
+        self.assertEqual(list(self._callFUT(foo)), [IFoo])
+
+    def test_w_existing_attr_as_Implements(self):
+        from zope.interface.declarations import Implements
+        impl = Implements()
+        class Foo(object):
+            __implemented__ = impl
+        self.failUnless(self._callFUT(Foo) is impl)
+
+    def test_builtins_added_to_cache(self):
+        from zope.interface import declarations
+        from zope.interface.declarations import Implements
+        from zope.interface._compat import _BUILTINS
+        with _MonkeyDict(declarations,
+                         'BuiltinImplementationSpecifications') as specs:
+            self.assertEqual(list(self._callFUT(tuple)), [])
+            self.assertEqual(list(self._callFUT(list)), [])
+            self.assertEqual(list(self._callFUT(dict)), [])
+            for typ in (tuple, list, dict):
+                spec = specs[typ]
+                self.failUnless(isinstance(spec, Implements))
+                self.assertEqual(repr(spec),
+                                '<implementedBy %s.%s>'
+                                    % (_BUILTINS, typ.__name__))
+
+    def test_builtins_w_existing_cache(self):
+        from zope.interface import declarations
+        t_spec, l_spec, d_spec = object(), object(), object()
+        with _MonkeyDict(declarations,
+                         'BuiltinImplementationSpecifications') as specs:
+            specs[tuple] = t_spec
+            specs[list] = l_spec
+            specs[dict] = d_spec
+            self.failUnless(self._callFUT(tuple) is t_spec)
+            self.failUnless(self._callFUT(list) is l_spec)
+            self.failUnless(self._callFUT(dict) is d_spec)
+
+    def test_oldstyle_class_no_assertions(self):
+        # TODO: Figure out P3 story
+        class Foo:
+            pass
+        self.assertEqual(list(self._callFUT(Foo)), [])
+
+    def test_no_assertions(self):
+        # TODO: Figure out P3 story
+        class Foo(object):
+            pass
+        self.assertEqual(list(self._callFUT(Foo)), [])
+
+    def test_w_None_no_bases_not_factory(self):
+        class Foo(object):
+            __implemented__ = None
+        foo = Foo()
+        self.assertRaises(TypeError, self._callFUT, foo)
+
+    def test_w_None_no_bases_w_factory(self):
+        from zope.interface.declarations import objectSpecificationDescriptor
+        class Foo(object):
+            __implemented__ = None
+            def __call__(self):
+                pass
+        foo = Foo()
+        foo.__name__ = 'foo'
+        spec = self._callFUT(foo)
+        self.assertEqual(spec.__name__,
+                         'zope.interface.tests.test_declarations.foo')
+        self.failUnless(spec.inherit is foo)
+        self.failUnless(foo.__implemented__ is spec)
+        self.failUnless(foo.__providedBy__ is objectSpecificationDescriptor)
+        self.failIf('__provides__' in foo.__dict__)
+
+    def test_w_None_no_bases_w_class(self):
+        from zope.interface.declarations import ClassProvides
+        class Foo(object):
+            __implemented__ = None
+        spec = self._callFUT(Foo)
+        self.assertEqual(spec.__name__,
+                         'zope.interface.tests.test_declarations.Foo')
+        self.failUnless(spec.inherit is Foo)
+        self.failUnless(Foo.__implemented__ is spec)
+        self.failUnless(isinstance(Foo.__providedBy__, ClassProvides))
+        self.failUnless(isinstance(Foo.__provides__, ClassProvides))
+        self.assertEqual(Foo.__provides__, Foo.__providedBy__)
+
+    def test_w_existing_Implements(self):
+        from zope.interface.declarations import Implements
+        impl = Implements()
+        class Foo(object):
+            __implemented__ = impl
+        self.failUnless(self._callFUT(Foo) is impl)
+
+
+class Test_implementedBy(Test_implementedByFallback):
+    # Repeat tests for C optimizations
+
+    def _callFUT(self, *args, **kw):
+        from zope.interface.declarations import implementedBy
+        return implementedBy(*args, **kw)
+
+
+class Test_classImplementsOnly(_SilencePy3Deprecations):
+
+    def _callFUT(self, *args, **kw):
+        from zope.interface.declarations import classImplementsOnly
+        return classImplementsOnly(*args, **kw)
+
+    def test_no_existing(self):
+        from zope.interface.declarations import ClassProvides
+        from zope.interface.interface import InterfaceClass
+        class Foo(object):
+            pass
+        ifoo = InterfaceClass('IFoo')
+        self._callFUT(Foo, ifoo)
+        spec = Foo.__implemented__
+        self.assertEqual(spec.__name__,
+                         'zope.interface.tests.test_declarations.Foo')
+        self.failUnless(spec.inherit is None)
+        self.failUnless(Foo.__implemented__ is spec)
+        self.failUnless(isinstance(Foo.__providedBy__, ClassProvides))
+        self.failUnless(isinstance(Foo.__provides__, ClassProvides))
+        self.assertEqual(Foo.__provides__, Foo.__providedBy__)
+
+    def test_w_existing_Implements(self):
+        from zope.interface.declarations import Implements
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar')
+        impl = Implements(IFoo)
+        impl.declared = (IFoo,)
+        class Foo(object):
+            __implemented__ = impl
+        impl.inherit = Foo
+        self._callFUT(Foo, IBar)
+        # Same spec, now different values
+        self.failUnless(Foo.__implemented__ is impl)
+        self.assertEqual(impl.inherit, None)
+        self.assertEqual(impl.declared, (IBar,))
+
+
+class Test_classImplements(_SilencePy3Deprecations):
+
+    def _callFUT(self, *args, **kw):
+        from zope.interface.declarations import classImplements
+        return classImplements(*args, **kw)
+
+    def test_no_existing(self):
+        from zope.interface.declarations import ClassProvides
+        from zope.interface.interface import InterfaceClass
+        class Foo(object):
+            pass
+        IFoo = InterfaceClass('IFoo')
+        self._callFUT(Foo, IFoo)
+        spec = Foo.__implemented__
+        self.assertEqual(spec.__name__,
+                         'zope.interface.tests.test_declarations.Foo')
+        self.failUnless(spec.inherit is Foo)
+        self.failUnless(Foo.__implemented__ is spec)
+        self.failUnless(isinstance(Foo.__providedBy__, ClassProvides))
+        self.failUnless(isinstance(Foo.__provides__, ClassProvides))
+        self.assertEqual(Foo.__provides__, Foo.__providedBy__)
+
+    def test_w_existing_Implements(self):
+        from zope.interface.declarations import Implements
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar')
+        impl = Implements(IFoo)
+        impl.declared = (IFoo,)
+        class Foo(object):
+            __implemented__ = impl
+        impl.inherit = Foo
+        self._callFUT(Foo, IBar)
+        # Same spec, now different values
+        self.failUnless(Foo.__implemented__ is impl)
+        self.assertEqual(impl.inherit, Foo)
+        self.assertEqual(impl.declared, (IFoo, IBar,))
+
+    def test_w_existing_Implements_w_bases(self):
+        from zope.interface.declarations import Implements
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar')
+        IBaz = InterfaceClass('IBar')
+        b_impl = Implements(IBaz)
+        impl = Implements(IFoo)
+        impl.declared = (IFoo,)
+        class Base(object):
+            __implemented__ = b_impl
+        class Foo(Base):
+            __implemented__ = impl
+        impl.inherit = Foo
+        self._callFUT(Foo, IBar)
+        # Same spec, now different values
+        self.failUnless(Foo.__implemented__ is impl)
+        self.assertEqual(impl.inherit, Foo)
+        self.assertEqual(impl.declared, (IFoo, IBar,))
+        self.assertEqual(impl.__bases__, (IFoo, IBar, b_impl))
+
+
+class Test__implements_advice(_SilencePy3Deprecations):
+
+    def _callFUT(self, *args, **kw):
+        from zope.interface.declarations import _implements_advice
+        return _implements_advice(*args, **kw)
+
+    def test_no_existing_implements(self):
+        from zope.interface.declarations import classImplements
+        from zope.interface.declarations import Implements
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        class Foo(object):
+            __implements_advice_data__ = ((IFoo,), classImplements)
+        self._callFUT(Foo)
+        self.failIf('__implements_advice_data__' in Foo.__dict__)
+        self.failUnless(isinstance(Foo.__implemented__, Implements))
+        self.assertEqual(list(Foo.__implemented__), [IFoo])
+
+
+class Test_implementer(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.declarations import implementer
+        return implementer
+
+    def _makeOne(self, *args, **kw):
+        return self._getTargetClass()(*args, **kw)
+
+    def test_oldstyle_class(self):
+        # TODO Py3 story
+        from zope.interface.declarations import ClassProvides
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        class Foo:
+            pass
+        decorator = self._makeOne(IFoo)
+        returned = decorator(Foo)
+        self.failUnless(returned is Foo)
+        spec = Foo.__implemented__
+        self.assertEqual(spec.__name__,
+                         'zope.interface.tests.test_declarations.Foo')
+        self.failUnless(spec.inherit is Foo)
+        self.failUnless(Foo.__implemented__ is spec)
+        self.failUnless(isinstance(Foo.__providedBy__, ClassProvides))
+        self.failUnless(isinstance(Foo.__provides__, ClassProvides))
+        self.assertEqual(Foo.__provides__, Foo.__providedBy__)
+
+    def test_newstyle_class(self):
+        from zope.interface.declarations import ClassProvides
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        class Foo(object):
+            pass
+        decorator = self._makeOne(IFoo)
+        returned = decorator(Foo)
+        self.failUnless(returned is Foo)
+        spec = Foo.__implemented__
+        self.assertEqual(spec.__name__,
+                         'zope.interface.tests.test_declarations.Foo')
+        self.failUnless(spec.inherit is Foo)
+        self.failUnless(Foo.__implemented__ is spec)
+        self.failUnless(isinstance(Foo.__providedBy__, ClassProvides))
+        self.failUnless(isinstance(Foo.__provides__, ClassProvides))
+        self.assertEqual(Foo.__provides__, Foo.__providedBy__)
+
+    def test_nonclass_cannot_assign_attr(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        decorator = self._makeOne(IFoo)
+        self.assertRaises(TypeError, decorator, object())
+
+    def test_nonclass_can_assign_attr(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        class Foo(object):
+            pass
+        foo = Foo()
+        decorator = self._makeOne(IFoo)
+        returned = decorator(foo)
+        self.failUnless(returned is foo)
+        spec = foo.__implemented__
+        self.assertEqual(spec.__name__, '?')
+        self.failUnless(spec.inherit is None)
+        self.failUnless(foo.__implemented__ is spec)
+
+
+class Test_implementer_only(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.declarations import implementer_only
+        return implementer_only
+
+    def _makeOne(self, *args, **kw):
+        return self._getTargetClass()(*args, **kw)
+
+    def test_function(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        decorator = self._makeOne(IFoo)
+        def _function(): pass
+        self.assertRaises(ValueError, decorator, _function)
+
+    def test_method(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        decorator = self._makeOne(IFoo)
+        class Bar:
+            def _method(): pass
+        self.assertRaises(ValueError, decorator, Bar._method)
+
+    def test_oldstyle_class(self):
+        # TODO Py3 story
+        from zope.interface.declarations import Implements
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar')
+        old_spec = Implements(IBar)
+        class Foo:
+            __implemented__ = old_spec
+        decorator = self._makeOne(IFoo)
+        returned = decorator(Foo)
+        self.failUnless(returned is Foo)
+        spec = Foo.__implemented__
+        self.assertEqual(spec.__name__, '?')
+        self.failUnless(spec.inherit is None)
+        self.failUnless(Foo.__implemented__ is spec)
+
+    def test_newstyle_class(self):
+        from zope.interface.declarations import Implements
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass('IFoo')
+        IBar = InterfaceClass('IBar')
+        old_spec = Implements(IBar)
+        class Foo(object):
+            __implemented__ = old_spec
+        decorator = self._makeOne(IFoo)
+        returned = decorator(Foo)
+        self.failUnless(returned is Foo)
+        spec = Foo.__implemented__
+        self.assertEqual(spec.__name__, '?')
+        self.failUnless(spec.inherit is None)
+        self.failUnless(Foo.__implemented__ is spec)
+
+
+# Test '_implements' by way of 'implements{,Only}', its only callers.
+
+class Test_implementsOnly(_SilencePy3Deprecations, _Py3ClassAdvice):
+
+    def _getFUT(self):
+        from zope.interface.declarations import implementsOnly
+        return implementsOnly
+
+    def test_simple(self):
+        import warnings
+        from zope.interface.declarations import implementsOnly
+        from zope.interface._compat import PYTHON3
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        globs = {'implementsOnly': implementsOnly,
+                 'IFoo': IFoo,
+                }
+        locs = {}
+        CODE = "\n".join([
+            'class Foo(object):'
+            '    implementsOnly(IFoo)',
+            ])
+        with warnings.catch_warnings(record=True) as log:
+            warnings.resetwarnings()
+            try:
+                exec(CODE, globs, locs)
+            except TypeError:
+                if not PYTHON3:
+                    raise
+            else:
+                if PYTHON3:
+                    self.fail("Didn't raise TypeError")
+                Foo = locs['Foo']
+                spec = Foo.__implemented__
+                self.assertEqual(list(spec), [IFoo])
+                self.assertEqual(len(log), 0) # no longer warn
+
+    def test_called_once_from_class_w_bases(self):
+        from zope.interface.declarations import implements
+        from zope.interface.declarations import implementsOnly
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        IBar = InterfaceClass("IBar")
+        globs = {'implements': implements,
+                 'implementsOnly': implementsOnly,
+                 'IFoo': IFoo,
+                 'IBar': IBar,
+                }
+        locs = {}
+        CODE = "\n".join([
+            'class Foo(object):',
+            '    implements(IFoo)',
+            'class Bar(Foo):'
+            '    implementsOnly(IBar)',
+            ])
+        if self._run_generated_code(CODE, globs, locs):
+            Bar = locs['Bar']
+            spec = Bar.__implemented__
+            self.assertEqual(list(spec), [IBar])
+
+
+class Test_implements(_SilencePy3Deprecations, _Py3ClassAdvice):
+
+    def _getFUT(self):
+        from zope.interface.declarations import implements
+        return implements
+
+    def test_called_from_function(self):
+        import warnings
+        from zope.interface.declarations import implements
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        globs = {'implements': implements, 'IFoo': IFoo}
+        locs = {}
+        CODE = "\n".join([
+            'def foo():',
+            '    implements(IFoo)'
+            ])
+        if self._run_generated_code(CODE, globs, locs, False):
+            foo = locs['foo']
+            with warnings.catch_warnings(record=True) as log:
+                warnings.resetwarnings()
+                self.assertRaises(TypeError, foo)
+                self.assertEqual(len(log), 0) # no longer warn
+
+    def test_called_twice_from_class(self):
+        import warnings
+        from zope.interface.declarations import implements
+        from zope.interface.interface import InterfaceClass
+        from zope.interface._compat import PYTHON3
+        IFoo = InterfaceClass("IFoo")
+        IBar = InterfaceClass("IBar")
+        globs = {'implements': implements, 'IFoo': IFoo, 'IBar': IBar}
+        locs = {}
+        CODE = "\n".join([
+            'class Foo(object):',
+            '    implements(IFoo)',
+            '    implements(IBar)',
+            ])
+        with warnings.catch_warnings(record=True) as log:
+            warnings.resetwarnings()
+            try:
+                exec(CODE, globs, locs)
+            except TypeError:
+                if not PYTHON3:
+                    self.assertEqual(len(log), 0) # no longer warn
+            else:
+                self.fail("Didn't raise TypeError")
+
+    def test_called_once_from_class(self):
+        from zope.interface.declarations import implements
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        globs = {'implements': implements, 'IFoo': IFoo}
+        locs = {}
+        CODE = "\n".join([
+            'class Foo(object):',
+            '    implements(IFoo)',
+            ])
+        if self._run_generated_code(CODE, globs, locs):
+            Foo = locs['Foo']
+            spec = Foo.__implemented__
+            self.assertEqual(list(spec), [IFoo])
+
+
+class ProvidesClassTests(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.declarations import ProvidesClass
+        return ProvidesClass
+
+    def _makeOne(self, *args, **kw):
+        return self._getTargetClass()(*args, **kw)
+
+    def test_simple_class_one_interface(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        class Foo(object):
+            pass
+        spec = self._makeOne(Foo, IFoo)
+        self.assertEqual(list(spec), [IFoo])
+
+    def test___reduce__(self):
+        from zope.interface.declarations import Provides # the function
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        class Foo(object):
+            pass
+        spec = self._makeOne(Foo, IFoo)
+        klass, args = spec.__reduce__()
+        self.failUnless(klass is Provides)
+        self.assertEqual(args, (Foo, IFoo))
+
+    def test___get___class(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        class Foo(object):
+            pass
+        spec = self._makeOne(Foo, IFoo)
+        Foo.__provides__ = spec
+        self.failUnless(Foo.__provides__ is spec)
+
+    def test___get___instance(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        class Foo(object):
+            pass
+        spec = self._makeOne(Foo, IFoo)
+        Foo.__provides__ = spec
+        def _test():
+            foo = Foo()
+            return foo.__provides__
+        self.assertRaises(AttributeError, _test)
+
+
+class Test_Provides(_SilencePy3Deprecations):
+
+    def _callFUT(self, *args, **kw):
+        from zope.interface.declarations import Provides
+        return Provides(*args, **kw)
+
+    def test_no_cached_spec(self):
+        from zope.interface import declarations
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        cache = {}
+        class Foo(object):
+            pass
+        with _Monkey(declarations, InstanceDeclarations=cache):
+            spec = self._callFUT(Foo, IFoo)
+        self.assertEqual(list(spec), [IFoo])
+        self.failUnless(cache[(Foo, IFoo)] is spec)
+
+    def test_w_cached_spec(self):
+        from zope.interface import declarations
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        prior = object()
+        class Foo(object):
+            pass
+        cache = {(Foo, IFoo): prior}
+        with _Monkey(declarations, InstanceDeclarations=cache):
+            spec = self._callFUT(Foo, IFoo)
+        self.failUnless(spec is prior)
+
+
+class Test_directlyProvides(_SilencePy3Deprecations):
+
+    def _callFUT(self, *args, **kw):
+        from zope.interface.declarations import directlyProvides
+        return directlyProvides(*args, **kw)
+
+    def test_w_normal_object(self):
+        from zope.interface.declarations import ProvidesClass
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        class Foo(object):
+            pass
+        obj = Foo()
+        self._callFUT(obj, IFoo)
+        self.failUnless(isinstance(obj.__provides__, ProvidesClass))
+        self.assertEqual(list(obj.__provides__), [IFoo])
+
+    def test_w_class(self):
+        from zope.interface.declarations import ClassProvides
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        class Foo(object):
+            pass
+        self._callFUT(Foo, IFoo)
+        self.failUnless(isinstance(Foo.__provides__, ClassProvides))
+        self.assertEqual(list(Foo.__provides__), [IFoo])
+
+    @_skip_under_py3k
+    def test_w_non_descriptor_aware_metaclass(self):
+        # There are no non-descriptor-aware types in Py3k
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        class MetaClass(type):
+            def __getattribute__(self, name):
+                # Emulate metaclass whose base is not the type object.
+                if name == '__class__':
+                    return self
+                return type.__getattribute__(self, name)
+        class Foo(object):
+            __metaclass__ = MetaClass
+        obj = Foo()
+        self.assertRaises(TypeError, self._callFUT, obj, IFoo)
+
+    def test_w_classless_object(self):
+        from zope.interface.declarations import ProvidesClass
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        the_dict = {}
+        class Foo(object):
+            def __getattribute__(self, name):
+                # Emulate object w/o any class
+                if name == '__class__':
+                    return None
+                try:
+                    return the_dict[name]
+                except KeyError:
+                    raise AttributeError(name)
+            def __setattr__(self, name, value):
+                the_dict[name] = value
+        obj = Foo()
+        self._callFUT(obj, IFoo)
+        self.failUnless(isinstance(the_dict['__provides__'], ProvidesClass))
+        self.assertEqual(list(the_dict['__provides__']), [IFoo])
+
+
+class Test_alsoProvides(_SilencePy3Deprecations):
+
+    def _callFUT(self, *args, **kw):
+        from zope.interface.declarations import alsoProvides
+        return alsoProvides(*args, **kw)
+
+    def test_wo_existing_provides(self):
+        from zope.interface.declarations import ProvidesClass
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        class Foo(object):
+            pass
+        obj = Foo()
+        self._callFUT(obj, IFoo)
+        self.failUnless(isinstance(obj.__provides__, ProvidesClass))
+        self.assertEqual(list(obj.__provides__), [IFoo])
+
+    def test_w_existing_provides(self):
+        from zope.interface.declarations import directlyProvides
+        from zope.interface.declarations import ProvidesClass
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        IBar = InterfaceClass("IBar")
+        class Foo(object):
+            pass
+        obj = Foo()
+        directlyProvides(obj, IFoo)
+        self._callFUT(obj, IBar)
+        self.failUnless(isinstance(obj.__provides__, ProvidesClass))
+        self.assertEqual(list(obj.__provides__), [IFoo, IBar])
+
+
+class Test_noLongerProvides(_SilencePy3Deprecations):
+
+    def _callFUT(self, *args, **kw):
+        from zope.interface.declarations import noLongerProvides
+        return noLongerProvides(*args, **kw)
+
+    def test_wo_existing_provides(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        class Foo(object):
+            pass
+        obj = Foo()
+        self._callFUT(obj, IFoo)
+        self.assertEqual(list(obj.__provides__), [])
+
+    def test_w_existing_provides_hit(self):
+        from zope.interface.declarations import directlyProvides
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        class Foo(object):
+            pass
+        obj = Foo()
+        directlyProvides(obj, IFoo)
+        self._callFUT(obj, IFoo)
+        self.assertEqual(list(obj.__provides__), [])
+
+    def test_w_existing_provides_miss(self):
+        from zope.interface.declarations import directlyProvides
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        IBar = InterfaceClass("IBar")
+        class Foo(object):
+            pass
+        obj = Foo()
+        directlyProvides(obj, IFoo)
+        self._callFUT(obj, IBar)
+        self.assertEqual(list(obj.__provides__), [IFoo])
+
+    def test_w_iface_implemented_by_class(self):
+        from zope.interface.declarations import implementer
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        @implementer(IFoo)
+        class Foo(object):
+            pass
+        obj = Foo()
+        self.assertRaises(ValueError, self._callFUT, obj, IFoo)
+
+
+class ClassProvidesBaseFallbackTests(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.declarations import ClassProvidesBaseFallback
+        return ClassProvidesBaseFallback
+
+    def _makeOne(self, klass, implements):
+        # Don't instantiate directly:  the C version can't have attributes
+        # assigned.
+        class Derived(self._getTargetClass()):
+            def __init__(self, k, i):
+                self._cls = k
+                self._implements = i
+        return Derived(klass, implements)
+
+    def test_w_same_class_via_class(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        class Foo(object):
+            pass
+        cpbp = Foo.__provides__ = self._makeOne(Foo, IFoo)
+        self.failUnless(Foo.__provides__ is cpbp)
+
+    def test_w_same_class_via_instance(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        class Foo(object):
+            pass
+        foo = Foo()
+        cpbp = Foo.__provides__ = self._makeOne(Foo, IFoo)
+        self.failUnless(foo.__provides__ is IFoo)
+
+    def test_w_different_class(self):
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        class Foo(object):
+            pass
+        class Bar(Foo):
+            pass
+        bar = Bar()
+        cpbp = Foo.__provides__ = self._makeOne(Foo, IFoo)
+        self.assertRaises(AttributeError, getattr, Bar, '__provides__')
+        self.assertRaises(AttributeError, getattr, bar, '__provides__')
+
+
+class ClassProvidesBaseTests(ClassProvidesBaseFallbackTests):
+    # Repeat tests for C optimizations
+
+    def _getTargetClass(self):
+        from zope.interface.declarations import ClassProvidesBase
+        return ClassProvidesBase
+
+
+class ClassProvidesTests(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.declarations import ClassProvides
+        return ClassProvides
+
+    def _makeOne(self, *args, **kw):
+        return self._getTargetClass()(*args, **kw)
+
+    def test_w_simple_metaclass(self):
+        from zope.interface.declarations import implementer
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        IBar = InterfaceClass("IBar")
+        @implementer(IFoo)
+        class Foo(object):
+            pass
+        cp = Foo.__provides__ = self._makeOne(Foo, type(Foo), IBar)
+        self.failUnless(Foo.__provides__ is cp)
+        self.assertEqual(list(Foo().__provides__), [IFoo])
+
+    def test___reduce__(self):
+        from zope.interface.declarations import implementer
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        IBar = InterfaceClass("IBar")
+        @implementer(IFoo)
+        class Foo(object):
+            pass
+        cp = Foo.__provides__ = self._makeOne(Foo, type(Foo), IBar)
+        self.assertEqual(cp.__reduce__(),
+                         (self._getTargetClass(), (Foo, type(Foo), IBar)))
+
+
+class Test_directlyProvidedBy(_SilencePy3Deprecations):
+
+    def _callFUT(self, *args, **kw):
+        from zope.interface.declarations import directlyProvidedBy
+        return directlyProvidedBy(*args, **kw)
+
+    def test_wo_declarations_in_class_or_instance(self):
+        class Foo(object):
+            pass
+        foo = Foo()
+        self.assertEqual(list(self._callFUT(foo)), [])
+
+    def test_w_declarations_in_class_but_not_instance(self):
+        from zope.interface.declarations import implementer
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        @implementer(IFoo)
+        class Foo(object):
+            pass
+        foo = Foo()
+        self.assertEqual(list(self._callFUT(foo)), [])
+
+    def test_w_declarations_in_instance_but_not_class(self):
+        from zope.interface.declarations import directlyProvides
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        class Foo(object):
+            pass
+        foo = Foo()
+        directlyProvides(foo, IFoo)
+        self.assertEqual(list(self._callFUT(foo)), [IFoo])
+
+    def test_w_declarations_in_instance_and_class(self):
+        from zope.interface.declarations import directlyProvides
+        from zope.interface.declarations import implementer
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        IBar = InterfaceClass("IBar")
+        @implementer(IFoo)
+        class Foo(object):
+            pass
+        foo = Foo()
+        directlyProvides(foo, IBar)
+        self.assertEqual(list(self._callFUT(foo)), [IBar])
+
+
+class Test_classProvides(_SilencePy3Deprecations, _Py3ClassAdvice):
+
+    def _getFUT(self):
+        from zope.interface.declarations import classProvides
+        return classProvides
+
+    def test_called_from_function(self):
+        import warnings
+        from zope.interface.declarations import classProvides
+        from zope.interface.interface import InterfaceClass
+        from zope.interface._compat import PYTHON3
+        IFoo = InterfaceClass("IFoo")
+        globs = {'classProvides': classProvides, 'IFoo': IFoo}
+        locs = {}
+        CODE = "\n".join([
+            'def foo():',
+            '    classProvides(IFoo)'
+            ])
+        exec(CODE, globs, locs)
+        foo = locs['foo']
+        with warnings.catch_warnings(record=True) as log:
+            warnings.resetwarnings()
+            self.assertRaises(TypeError, foo)
+            if not PYTHON3:
+                self.assertEqual(len(log), 0) # no longer warn
+
+    def test_called_twice_from_class(self):
+        import warnings
+        from zope.interface.declarations import classProvides
+        from zope.interface.interface import InterfaceClass
+        from zope.interface._compat import PYTHON3
+        IFoo = InterfaceClass("IFoo")
+        IBar = InterfaceClass("IBar")
+        globs = {'classProvides': classProvides, 'IFoo': IFoo, 'IBar': IBar}
+        locs = {}
+        CODE = "\n".join([
+            'class Foo(object):',
+            '    classProvides(IFoo)',
+            '    classProvides(IBar)',
+            ])
+        with warnings.catch_warnings(record=True) as log:
+            warnings.resetwarnings()
+            try:
+                exec(CODE, globs, locs)
+            except TypeError:
+                if not PYTHON3:
+                    self.assertEqual(len(log), 0) # no longer warn
+            else:
+                self.fail("Didn't raise TypeError")
+
+    def test_called_once_from_class(self):
+        from zope.interface.declarations import classProvides
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        globs = {'classProvides': classProvides, 'IFoo': IFoo}
+        locs = {}
+        CODE = "\n".join([
+            'class Foo(object):',
+            '    classProvides(IFoo)',
+            ])
+        if self._run_generated_code(CODE, globs, locs):
+            Foo = locs['Foo']
+            spec = Foo.__providedBy__
+            self.assertEqual(list(spec), [IFoo])
+
+# Test _classProvides_advice through classProvides, its only caller.
+
+
+class Test_provider(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.declarations import provider
+        return provider
+
+    def _makeOne(self, *args, **kw):
+        return self._getTargetClass()(*args, **kw)
+
+    def test_w_class(self):
+        from zope.interface.declarations import ClassProvides
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        @self._makeOne(IFoo)
+        class Foo(object):
+            pass
+        self.failUnless(isinstance(Foo.__provides__, ClassProvides))
+        self.assertEqual(list(Foo.__provides__), [IFoo])
+
+
+class Test_moduleProvides(_SilencePy3Deprecations):
+
+    def _getFUT(self):
+        from zope.interface.declarations import moduleProvides
+        return moduleProvides
+
+    def test_called_from_function(self):
+        from zope.interface.declarations import moduleProvides
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        globs = {'__name__': 'zope.interface.tests.foo',
+                 'moduleProvides': moduleProvides, 'IFoo': IFoo}
+        locs = {}
+        CODE = "\n".join([
+            'def foo():',
+            '    moduleProvides(IFoo)'
+            ])
+        exec(CODE, globs, locs)
+        foo = locs['foo']
+        self.assertRaises(TypeError, foo)
+
+    def test_called_from_class(self):
+        from zope.interface.declarations import moduleProvides
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        globs = {'__name__': 'zope.interface.tests.foo',
+                 'moduleProvides': moduleProvides, 'IFoo': IFoo}
+        locs = {}
+        CODE = "\n".join([
+            'class Foo(object):',
+            '    moduleProvides(IFoo)',
+            ])
+        try:
+            exec(CODE, globs, locs)
+        except TypeError:
+            pass
+        else:
+            assert False, 'TypeError not raised'
+
+    def test_called_once_from_module_scope(self):
+        from zope.interface.declarations import moduleProvides
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        globs = {'__name__': 'zope.interface.tests.foo',
+                 'moduleProvides': moduleProvides, 'IFoo': IFoo}
+        CODE = "\n".join([
+            'moduleProvides(IFoo)',
+            ])
+        exec(CODE, globs)
+        spec = globs['__provides__']
+        self.assertEqual(list(spec), [IFoo])
+
+    def test_called_twice_from_module_scope(self):
+        from zope.interface.declarations import moduleProvides
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        globs = {'__name__': 'zope.interface.tests.foo',
+                 'moduleProvides': moduleProvides, 'IFoo': IFoo}
+        locs = {}
+        CODE = "\n".join([
+            'moduleProvides(IFoo)',
+            'moduleProvides(IFoo)',
+            ])
+        try:
+            exec(CODE, globs)
+        except TypeError:
+            pass
+        else:
+            assert False, 'TypeError not raised'
+
+
+class Test_getObjectSpecificationFallback(_SilencePy3Deprecations):
+
+    def _callFUT(self, *args, **kw):
+        from zope.interface.declarations import getObjectSpecificationFallback
+        return getObjectSpecificationFallback(*args, **kw)
+
+    def test_wo_existing_provides_classless(self):
+        the_dict = {}
+        class Foo(object):
+            def __getattribute__(self, name):
+                # Emulate object w/o any class
+                if name == '__class__':
+                    raise AttributeError(name)
+                try:
+                    return the_dict[name]
+                except KeyError:
+                    raise AttributeError(name)
+            def __setattr__(self, name, value):
+                the_dict[name] = value
+        foo = Foo()
+        spec = self._callFUT(foo)
+        self.assertEqual(list(spec), [])
+
+    def test_existing_provides_is_spec(self):
+        from zope.interface.declarations import directlyProvides
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        def foo():
+            pass
+        directlyProvides(foo, IFoo)
+        spec = self._callFUT(foo)
+        self.failUnless(spec is foo.__provides__)
+
+    def test_existing_provides_is_not_spec(self):
+        def foo():
+            pass
+        foo.__provides__ = object() # not a valid spec
+        spec = self._callFUT(foo)
+        self.assertEqual(list(spec), [])
+
+    def test_existing_provides(self):
+        from zope.interface.declarations import directlyProvides
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        class Foo(object):
+            pass
+        foo = Foo()
+        directlyProvides(foo, IFoo)
+        spec = self._callFUT(foo)
+        self.assertEqual(list(spec), [IFoo])
+
+    def test_wo_provides_on_class_w_implements(self):
+        from zope.interface.declarations import implementer
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        @implementer(IFoo)
+        class Foo(object):
+            pass
+        foo = Foo()
+        spec = self._callFUT(foo)
+        self.assertEqual(list(spec), [IFoo])
+
+    def test_wo_provides_on_class_wo_implements(self):
+        class Foo(object):
+            pass
+        foo = Foo()
+        spec = self._callFUT(foo)
+        self.assertEqual(list(spec), [])
+
+
+class Test_getObjectSpecification(Test_getObjectSpecificationFallback):
+    # Repeat tests for C optimizations
+
+    def _callFUT(self, *args, **kw):
+        from zope.interface.declarations import getObjectSpecification
+        return getObjectSpecification(*args, **kw)
+
+
+class Test_providedByFallback(_SilencePy3Deprecations):
+
+    def _callFUT(self, *args, **kw):
+        from zope.interface.declarations import providedByFallback
+        return providedByFallback(*args, **kw)
+
+    def test_wo_providedBy_on_class_wo_implements(self):
+        class Foo(object):
+            pass
+        foo = Foo()
+        spec = self._callFUT(foo)
+        self.assertEqual(list(spec), [])
+
+    def test_w_providedBy_valid_spec(self):
+        from zope.interface.declarations import Provides
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        class Foo(object):
+            pass
+        foo = Foo()
+        foo.__providedBy__ = Provides(Foo, IFoo)
+        spec = self._callFUT(foo)
+        self.assertEqual(list(spec), [IFoo])
+
+    def test_w_providedBy_invalid_spec(self):
+        class Foo(object):
+            pass
+        foo = Foo()
+        foo.__providedBy__ = object()
+        spec = self._callFUT(foo)
+        self.assertEqual(list(spec), [])
+
+    def test_w_providedBy_invalid_spec_class_w_implements(self):
+        from zope.interface.declarations import implementer
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        @implementer(IFoo)
+        class Foo(object):
+            pass
+        foo = Foo()
+        foo.__providedBy__ = object()
+        spec = self._callFUT(foo)
+        self.assertEqual(list(spec), [IFoo])
+
+    def test_w_providedBy_invalid_spec_w_provides_no_provides_on_class(self):
+        class Foo(object):
+            pass
+        foo = Foo()
+        foo.__providedBy__ = object()
+        expected = foo.__provides__ = object()
+        spec = self._callFUT(foo)
+        self.failUnless(spec is expected)
+
+    def test_w_providedBy_invalid_spec_w_provides_diff_provides_on_class(self):
+        class Foo(object):
+            pass
+        foo = Foo()
+        foo.__providedBy__ = object()
+        expected = foo.__provides__ = object()
+        Foo.__provides__ = object()
+        spec = self._callFUT(foo)
+        self.failUnless(spec is expected)
+
+    def test_w_providedBy_invalid_spec_w_provides_same_provides_on_class(self):
+        from zope.interface.declarations import implementer
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        @implementer(IFoo)
+        class Foo(object):
+            pass
+        foo = Foo()
+        foo.__providedBy__ = object()
+        foo.__provides__ = Foo.__provides__ = object()
+        spec = self._callFUT(foo)
+        self.assertEqual(list(spec), [IFoo])
+
+
+class Test_providedBy(Test_providedByFallback):
+    # Repeat tests for C optimizations
+
+    def _callFUT(self, *args, **kw):
+        from zope.interface.declarations import providedBy
+        return providedBy(*args, **kw)
+
+
+class ObjectSpecificationDescriptorFallbackTests(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.declarations \
+            import ObjectSpecificationDescriptorFallback
+        return ObjectSpecificationDescriptorFallback
+
+    def _makeOne(self, *args, **kw):
+        return self._getTargetClass()(*args, **kw)
+
+    def test_accessed_via_class(self):
+        from zope.interface.declarations import Provides
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        class Foo(object):
+            pass
+        Foo.__provides__ = Provides(Foo, IFoo)
+        Foo.__providedBy__ = self._makeOne()
+        self.assertEqual(list(Foo.__providedBy__), [IFoo])
+
+    def test_accessed_via_inst_wo_provides(self):
+        from zope.interface.declarations import implementer
+        from zope.interface.declarations import Provides
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        IBar = InterfaceClass("IBar")
+        @implementer(IFoo)
+        class Foo(object):
+            pass
+        Foo.__provides__ = Provides(Foo, IBar)
+        Foo.__providedBy__ = self._makeOne()
+        foo = Foo()
+        self.assertEqual(list(foo.__providedBy__), [IFoo])
+
+    def test_accessed_via_inst_w_provides(self):
+        from zope.interface.declarations import directlyProvides
+        from zope.interface.declarations import implementer
+        from zope.interface.declarations import Provides
+        from zope.interface.interface import InterfaceClass
+        IFoo = InterfaceClass("IFoo")
+        IBar = InterfaceClass("IBar")
+        IBaz = InterfaceClass("IBaz")
+        @implementer(IFoo)
+        class Foo(object):
+            pass
+        Foo.__provides__ = Provides(Foo, IBar)
+        Foo.__providedBy__ = self._makeOne()
+        foo = Foo()
+        directlyProvides(foo, IBaz)
+        self.assertEqual(list(foo.__providedBy__), [IBaz, IFoo])
+
+
+class ObjectSpecificationDescriptorTests(
+                ObjectSpecificationDescriptorFallbackTests):
+    # Repeat tests for C optimizations
+
+    def _getTargetClass(self):
+        from zope.interface.declarations import ObjectSpecificationDescriptor
+        return ObjectSpecificationDescriptor
+
+
+# Test _normalizeargs through its callers.
+
+
+class _Monkey(object):
+    # context-manager for replacing module names in the scope of a test.
+    def __init__(self, module, **kw):
+        self.module = module
+        self.to_restore = dict([(key, getattr(module, key)) for key in kw])
+        for key, value in kw.items():
+            setattr(module, key, value)
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        for key, value in self.to_restore.items():
+            setattr(self.module, key, value)
+
+
+class _MonkeyDict(object):
+    # context-manager for restoring a dict w/in a module in the scope of a test.
+    def __init__(self, module, attrname, **kw):
+        self.module = module
+        self.target = getattr(module, attrname)
+        self.to_restore = self.target.copy()
+        self.target.clear()
+        self.target.update(kw)
+
+    def __enter__(self):
+        return self.target
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        self.target.clear()
+        self.target.update(self.to_restore)
+
+
+def test_suite():
+    return unittest.TestSuite((
+        unittest.makeSuite(DeclarationTests),
+        unittest.makeSuite(ImplementsTests),
+        unittest.makeSuite(Test_implementedByFallback),
+        unittest.makeSuite(Test_implementedBy),
+        unittest.makeSuite(Test_classImplementsOnly),
+        unittest.makeSuite(Test_classImplements),
+        unittest.makeSuite(Test__implements_advice),
+        unittest.makeSuite(Test_implementer),
+        unittest.makeSuite(Test_implementer_only),
+        unittest.makeSuite(Test_implements),
+        unittest.makeSuite(Test_implementsOnly),
+        unittest.makeSuite(ProvidesClassTests),
+        unittest.makeSuite(Test_Provides),
+        unittest.makeSuite(Test_directlyProvides),
+        unittest.makeSuite(Test_alsoProvides),
+        unittest.makeSuite(Test_noLongerProvides),
+        unittest.makeSuite(ClassProvidesBaseFallbackTests),
+        unittest.makeSuite(ClassProvidesTests),
+        unittest.makeSuite(Test_directlyProvidedBy),
+        unittest.makeSuite(Test_classProvides),
+        unittest.makeSuite(Test_provider),
+        unittest.makeSuite(Test_moduleProvides),
+        unittest.makeSuite(Test_getObjectSpecificationFallback),
+        unittest.makeSuite(Test_getObjectSpecification),
+        unittest.makeSuite(Test_providedByFallback),
+        unittest.makeSuite(Test_providedBy),
+        unittest.makeSuite(ObjectSpecificationDescriptorFallbackTests),
+        unittest.makeSuite(ObjectSpecificationDescriptorTests),
+    ))
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/test_document.py b/ThirdParty/ZopeInterface/zope/interface/tests/test_document.py
new file mode 100644
index 0000000..6a1b0b2
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/test_document.py
@@ -0,0 +1,286 @@
+##############################################################################
+#
+# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Documentation tests.
+"""
+import unittest 
+
+
+class Test_asStructuredText(unittest.TestCase):
+
+    def _callFUT(self, iface):
+        from zope.interface.document import asStructuredText
+        return asStructuredText(iface)
+
+    def test_asStructuredText_no_docstring(self):
+        from zope.interface import Interface
+        EXPECTED = '\n\n'.join([
+            "INoDocstring",
+            " Attributes:",
+            " Methods:",
+            ""
+        ])
+        class INoDocstring(Interface):
+            pass
+        self.assertEqual(self._callFUT(INoDocstring), EXPECTED)
+
+    def test_asStructuredText_empty_with_docstring(self):
+        from zope.interface import Interface
+        EXPECTED = '\n\n'.join([
+            "IEmpty",
+            " This is an empty interface.",
+            " Attributes:",
+            " Methods:",
+            ""
+        ])
+        class IEmpty(Interface):
+            """ This is an empty interface.
+            """
+        self.assertEqual(self._callFUT(IEmpty), EXPECTED)
+
+    def test_asStructuredText_empty_with_multiline_docstring(self):
+        from zope.interface import Interface
+        EXPECTED = '\n'.join([
+            "IEmpty",
+            "",
+            " This is an empty interface.",
+            " ",
+            ("             It can be used to annotate any class or object, "
+                             "because it promises"),
+            "             nothing.",
+            "",
+            " Attributes:",
+            "",
+            " Methods:",
+            "",
+            ""
+        ])
+        class IEmpty(Interface):
+            """ This is an empty interface.
+
+            It can be used to annotate any class or object, because it promises
+            nothing.
+            """
+        self.assertEqual(self._callFUT(IEmpty), EXPECTED)
+
+    def test_asStructuredText_with_attribute_no_docstring(self):
+        from zope.interface import Attribute
+        from zope.interface import Interface
+        EXPECTED = '\n\n'.join([
+            "IHasAttribute",
+            " This interface has an attribute.",
+            " Attributes:",
+            "  an_attribute -- no documentation",
+            " Methods:",
+            ""
+        ])
+        class IHasAttribute(Interface):
+            """ This interface has an attribute.
+            """
+            an_attribute = Attribute('an_attribute')
+
+        self.assertEqual(self._callFUT(IHasAttribute), EXPECTED)
+
+    def test_asStructuredText_with_attribute_with_docstring(self):
+        from zope.interface import Attribute
+        from zope.interface import Interface
+        EXPECTED = '\n\n'.join([
+            "IHasAttribute",
+            " This interface has an attribute.",
+            " Attributes:",
+            "  an_attribute -- This attribute is documented.",
+            " Methods:",
+            ""
+        ])
+        class IHasAttribute(Interface):
+            """ This interface has an attribute.
+            """
+            an_attribute = Attribute('an_attribute',
+                                     'This attribute is documented.')
+
+        self.assertEqual(self._callFUT(IHasAttribute), EXPECTED)
+
+    def test_asStructuredText_with_method_no_args_no_docstring(self):
+        from zope.interface import Interface
+        EXPECTED = '\n\n'.join([
+            "IHasMethod",
+            " This interface has a method.",
+            " Attributes:",
+            " Methods:",
+            "  aMethod() -- no documentation",
+            ""
+        ])
+        class IHasMethod(Interface):
+            """ This interface has a method.
+            """
+            def aMethod():
+                pass
+
+        self.assertEqual(self._callFUT(IHasMethod), EXPECTED)
+
+    def test_asStructuredText_with_method_positional_args_no_docstring(self):
+        from zope.interface import Interface
+        EXPECTED = '\n\n'.join([
+            "IHasMethod",
+            " This interface has a method.",
+            " Attributes:",
+            " Methods:",
+            "  aMethod(first, second) -- no documentation",
+            ""
+        ])
+        class IHasMethod(Interface):
+            """ This interface has a method.
+            """
+            def aMethod(first, second):
+                pass
+
+        self.assertEqual(self._callFUT(IHasMethod), EXPECTED)
+
+    def test_asStructuredText_with_method_starargs_no_docstring(self):
+        from zope.interface import Interface
+        EXPECTED = '\n\n'.join([
+            "IHasMethod",
+            " This interface has a method.",
+            " Attributes:",
+            " Methods:",
+            "  aMethod(first, second, *rest) -- no documentation",
+            ""
+        ])
+        class IHasMethod(Interface):
+            """ This interface has a method.
+            """
+            def aMethod(first, second, *rest):
+                pass
+
+        self.assertEqual(self._callFUT(IHasMethod), EXPECTED)
+
+    def test_asStructuredText_with_method_kwargs_no_docstring(self):
+        from zope.interface import Interface
+        EXPECTED = '\n\n'.join([
+            "IHasMethod",
+            " This interface has a method.",
+            " Attributes:",
+            " Methods:",
+            "  aMethod(first, second, **kw) -- no documentation",
+            ""
+        ])
+        class IHasMethod(Interface):
+            """ This interface has a method.
+            """
+            def aMethod(first, second, **kw):
+                pass
+
+        self.assertEqual(self._callFUT(IHasMethod), EXPECTED)
+
+    def test_asStructuredText_with_method_with_docstring(self):
+        from zope.interface import Interface
+        EXPECTED = '\n\n'.join([
+            "IHasMethod",
+            " This interface has a method.",
+            " Attributes:",
+            " Methods:",
+            "  aMethod() -- This method is documented.",
+            ""
+        ])
+        class IHasMethod(Interface):
+            """ This interface has a method.
+            """
+            def aMethod():
+                """This method is documented.
+                """
+
+        self.assertEqual(self._callFUT(IHasMethod), EXPECTED)
+
+    def test_asStructuredText_derived_ignores_base(self):
+        from zope.interface import Attribute
+        from zope.interface import Interface
+        EXPECTED = '\n\n'.join([
+            "IDerived",
+            " IDerived doc",
+            " This interface extends:",
+            "  o IBase",
+            " Attributes:",
+            "  attr1 -- no documentation",
+            "  attr2 -- attr2 doc",
+            " Methods:",
+            "  method3() -- method3 doc",
+            "  method4() -- no documentation",
+            "  method5() -- method5 doc",
+            "",
+        ])
+
+        class IBase(Interface):
+            def method1():
+                pass
+            def method2():
+                pass
+
+        class IDerived(IBase):
+            "IDerived doc"
+            attr1 = Attribute('attr1')
+            attr2 = Attribute('attr2', 'attr2 doc')
+
+            def method3():
+                "method3 doc"
+            def method4():
+                pass
+            def method5():
+                "method5 doc"
+
+        self.assertEqual(self._callFUT(IDerived), EXPECTED)
+
+class Test__justify_and_indent(unittest.TestCase):
+
+    def _callFUT(self, text, level, **kw):
+        from zope.interface.document import _justify_and_indent
+        return _justify_and_indent(text, level, **kw)
+
+    def test_simple_level_0(self):
+        LINES = ['Three blind mice', 'See how they run']
+        text = '\n'.join(LINES)
+        self.assertEqual(self._callFUT(text, 0), text)
+
+    def test_simple_level_1(self):
+        LINES = ['Three blind mice', 'See how they run']
+        text = '\n'.join(LINES)
+        self.assertEqual(self._callFUT(text, 1),
+                         '\n'.join([' ' + line for line in LINES]))
+
+    def test_simple_level_2(self):
+        LINES = ['Three blind mice', 'See how they run']
+        text = '\n'.join(LINES)
+        self.assertEqual(self._callFUT(text, 1),
+                         '\n'.join([' ' + line for line in LINES]))
+
+    def test_simple_w_CRLF(self):
+        LINES = ['Three blind mice', 'See how they run']
+        text = '\r\n'.join(LINES)
+        self.assertEqual(self._callFUT(text, 1),
+                         '\n'.join([' ' + line for line in LINES]))
+
+    def test_with_munge(self):
+        TEXT = ("This is a piece of text longer than 15 characters, \n"
+                "and split across multiple lines.")
+        EXPECTED = ("  This is a piece\n"
+                    "  of text longer\n"
+                    "  than 15 characters,\n"
+                    "  and split across\n"
+                    "  multiple lines.\n"
+                    " ")
+        self.assertEqual(self._callFUT(TEXT, 1, munge=1, width=15), EXPECTED)
+
+def test_suite():
+    return unittest.TestSuite((
+        unittest.makeSuite(Test_asStructuredText),
+        unittest.makeSuite(Test__justify_and_indent),
+    ))
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/test_element.py b/ThirdParty/ZopeInterface/zope/interface/tests/test_element.py
new file mode 100644
index 0000000..66724a6
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/test_element.py
@@ -0,0 +1,41 @@
+##############################################################################
+#
+# Copyright (c) 2003 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Test Element meta-class.
+"""
+
+import unittest
+from zope.interface.interface import Element
+
+class TestElement(unittest.TestCase):
+
+    def test_taggedValues(self):
+        """Test that we can update tagged values of more than one element
+        """
+        
+        e1 = Element("foo")
+        e2 = Element("bar")
+        e1.setTaggedValue("x", 1)
+        e2.setTaggedValue("x", 2)
+        self.assertEqual(e1.getTaggedValue("x"), 1)
+        self.assertEqual(e2.getTaggedValue("x"), 2)
+
+
+def test_suite():
+    suite = unittest.TestSuite()
+    suite.addTest(unittest.makeSuite(TestElement))
+    return suite
+
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/test_exceptions.py b/ThirdParty/ZopeInterface/zope/interface/tests/test_exceptions.py
new file mode 100644
index 0000000..94009f6
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/test_exceptions.py
@@ -0,0 +1,75 @@
+##############################################################################
+#
+# Copyright (c) 2010 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+""" zope.interface.exceptions unit tests
+"""
+import unittest
+
+def _makeIface():
+    from zope.interface import Interface
+    class IDummy(Interface):
+        pass
+    return IDummy
+
+class DoesNotImplementTests(unittest.TestCase):
+
+    def _getTargetClass(self):
+        from zope.interface.exceptions import DoesNotImplement
+        return DoesNotImplement
+
+    def _makeOne(self, iface=None):
+        if iface is None:
+            iface = _makeIface()
+        return self._getTargetClass()(iface)
+
+    def test___str__(self):
+        dni = self._makeOne()
+        # XXX The trailing newlines and blank spaces are a stupid artifact.
+        self.assertEqual(str(dni),
+            'An object does not implement interface <InterfaceClass '
+               'zope.interface.tests.test_exceptions.IDummy>\n\n        ')
+
+class BrokenImplementationTests(unittest.TestCase):
+
+    def _getTargetClass(self):
+        from zope.interface.exceptions import BrokenImplementation
+        return BrokenImplementation
+
+    def _makeOne(self, iface=None, name='missing'):
+        if iface is None:
+            iface = _makeIface()
+        return self._getTargetClass()(iface, name)
+
+    def test___str__(self):
+        dni = self._makeOne()
+        # XXX The trailing newlines and blank spaces are a stupid artifact.
+        self.assertEqual(str(dni),
+            'An object has failed to implement interface <InterfaceClass '
+               'zope.interface.tests.test_exceptions.IDummy>\n\n'
+               '        The missing attribute was not provided.\n        ')
+
+class BrokenMethodImplementationTests(unittest.TestCase):
+
+    def _getTargetClass(self):
+        from zope.interface.exceptions import BrokenMethodImplementation
+        return BrokenMethodImplementation
+
+    def _makeOne(self, method='aMethod', mess='I said so'):
+        return self._getTargetClass()(method, mess)
+
+    def test___str__(self):
+        dni = self._makeOne()
+        self.assertEqual(str(dni),
+            'The implementation of aMethod violates its contract\n'
+             '        because I said so.\n        ')
+
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/test_interface.py b/ThirdParty/ZopeInterface/zope/interface/tests/test_interface.py
new file mode 100644
index 0000000..92cda82
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/test_interface.py
@@ -0,0 +1,2089 @@
+##############################################################################
+#
+# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Test Interface implementation
+"""
+import unittest
+
+_marker = object()
+
+class _SilencePy3Deprecations(unittest.TestCase):
+    # silence deprecation warnings under py3
+
+    def failUnless(self, expr):
+        # St00pid speling.
+        return self.assertTrue(expr)
+
+    def failIf(self, expr):
+        # St00pid speling.
+        return self.assertFalse(expr)
+
+
+class Test_invariant(unittest.TestCase):
+
+    def test_w_single(self):
+        from zope.interface.interface import invariant
+        from zope.interface.interface import TAGGED_DATA
+
+        def _check(*args, **kw):
+            pass
+
+        class Foo(object):
+            invariant(_check)
+
+        self.assertEqual(getattr(Foo, TAGGED_DATA, None),
+                         {'invariants': [_check]})
+
+    def test_w_multiple(self):
+        from zope.interface.interface import invariant
+        from zope.interface.interface import TAGGED_DATA
+
+        def _check(*args, **kw):
+            pass
+
+        def _another_check(*args, **kw):
+            pass
+
+        class Foo(object):
+            invariant(_check)
+            invariant(_another_check)
+
+        self.assertEqual(getattr(Foo, TAGGED_DATA, None),
+                         {'invariants': [_check, _another_check]})
+
+
+class Test_taggedValue(unittest.TestCase):
+
+    def test_w_single(self):
+        from zope.interface.interface import taggedValue
+        from zope.interface.interface import TAGGED_DATA
+
+        class Foo(object):
+            taggedValue('bar', ['baz'])
+
+        self.assertEqual(getattr(Foo, TAGGED_DATA, None),
+                         {'bar': ['baz']})
+
+    def test_w_multiple(self):
+        from zope.interface.interface import taggedValue
+        from zope.interface.interface import TAGGED_DATA
+
+        class Foo(object):
+            taggedValue('bar', ['baz'])
+            taggedValue('qux', 'spam')
+
+        self.assertEqual(getattr(Foo, TAGGED_DATA, None),
+                         {'bar': ['baz'], 'qux': 'spam'})
+
+    def test_w_multiple_overwriting(self):
+        from zope.interface.interface import taggedValue
+        from zope.interface.interface import TAGGED_DATA
+
+        class Foo(object):
+            taggedValue('bar', ['baz'])
+            taggedValue('qux', 'spam')
+            taggedValue('bar', 'frob')
+
+        self.assertEqual(getattr(Foo, TAGGED_DATA, None),
+                         {'bar': 'frob', 'qux': 'spam'})
+
+
+class ElementTests(unittest.TestCase):
+
+    DEFAULT_NAME = 'AnElement'
+
+    def _getTargetClass(self):
+        from zope.interface.interface import Element
+        return Element
+
+    def _makeOne(self,  name=None, __doc__=_marker):
+        if name is None:
+            name = self.DEFAULT_NAME
+        if __doc__ is _marker:
+            return self._getTargetClass()(name)
+        return self._getTargetClass()(name, __doc__)
+
+    def test_ctor_defaults(self):
+        element = self._makeOne()
+        self.assertEqual(element.__name__, self.DEFAULT_NAME)
+        self.assertEqual(element.getName(), self.DEFAULT_NAME)
+        self.assertEqual(element.__doc__, '')
+        self.assertEqual(element.getDoc(), '')
+        self.assertEqual(list(element.getTaggedValueTags()), [])
+
+    def test_ctor_no_doc_space_in_name(self):
+        element = self._makeOne('An Element')
+        self.assertEqual(element.__name__, None)
+        self.assertEqual(element.__doc__, 'An Element')
+
+    def test_getTaggedValue_miss(self):
+        element = self._makeOne()
+        self.assertRaises(KeyError, element.getTaggedValue, 'nonesuch')
+
+    def test_queryTaggedValue_miss(self):
+        element = self._makeOne()
+        self.assertEqual(element.queryTaggedValue('nonesuch'), None)
+
+    def test_queryTaggedValue_miss_w_default(self):
+        element = self._makeOne()
+        self.assertEqual(element.queryTaggedValue('nonesuch', 'bar'), 'bar')
+
+    def test_setTaggedValue(self):
+        element = self._makeOne()
+        element.setTaggedValue('foo', 'bar')
+        self.assertEqual(list(element.getTaggedValueTags()), ['foo'])
+        self.assertEqual(element.getTaggedValue('foo'), 'bar')
+        self.assertEqual(element.queryTaggedValue('foo'), 'bar')
+
+
+class SpecificationBasePyTests(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.interface import SpecificationBasePy
+        return SpecificationBasePy
+
+    def _makeOne(self):
+        return self._getTargetClass()()
+
+    def test_providedBy_miss(self):
+        from zope.interface import interface
+        from zope.interface.declarations import _empty
+        sb = self._makeOne()
+        def _providedBy(obj):
+            return _empty
+        with _Monkey(interface, providedBy=_providedBy):
+            self.failIf(sb.providedBy(object()))
+
+    def test_providedBy_hit(self):
+        from zope.interface import interface
+        sb = self._makeOne()
+        class _Decl(object):
+            _implied = {sb: {},}
+        def _providedBy(obj):
+            return _Decl()
+        with _Monkey(interface, providedBy=_providedBy):
+            self.failUnless(sb.providedBy(object()))
+
+    def test_implementedBy_miss(self):
+        from zope.interface import interface
+        from zope.interface.declarations import _empty
+        sb = self._makeOne()
+        def _implementedBy(obj):
+            return _empty
+        with _Monkey(interface, implementedBy=_implementedBy):
+            self.failIf(sb.implementedBy(object()))
+
+    def test_implementedBy_hit(self):
+        from zope.interface import interface
+        sb = self._makeOne()
+        class _Decl(object):
+            _implied = {sb: {},}
+        def _implementedBy(obj):
+            return _Decl()
+        with _Monkey(interface, implementedBy=_implementedBy):
+            self.failUnless(sb.implementedBy(object()))
+
+    def test_isOrExtends_miss(self):
+        sb = self._makeOne()
+        sb._implied = {}  # not defined by SpecificationBasePy 
+        self.failIf(sb.isOrExtends(object()))
+
+    def test_isOrExtends_hit(self):
+        sb = self._makeOne()
+        testing = object()
+        sb._implied = {testing: {}}  # not defined by SpecificationBasePy 
+        self.failUnless(sb(testing))
+
+    def test___call___miss(self):
+        sb = self._makeOne()
+        sb._implied = {}  # not defined by SpecificationBasePy 
+        self.failIf(sb.isOrExtends(object()))
+
+    def test___call___hit(self):
+        sb = self._makeOne()
+        testing = object()
+        sb._implied = {testing: {}}  # not defined by SpecificationBasePy 
+        self.failUnless(sb(testing))
+
+
+class InterfaceBasePyTests(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.interface import InterfaceBasePy
+        return InterfaceBasePy
+
+    def _makeOne(self, object_should_provide):
+        class IB(self._getTargetClass()):
+            def _call_conform(self, conform):
+                return conform(self)
+            def providedBy(self, obj):
+                return object_should_provide
+        return IB()
+
+    def test___call___w___conform___returning_value(self):
+        ib = self._makeOne(False)
+        conformed = object()
+        class _Adapted(object):
+            def __conform__(self, iface):
+                return conformed
+        self.failUnless(ib(_Adapted()) is conformed)
+
+    def test___call___w___conform___miss_ob_provides(self):
+        ib = self._makeOne(True)
+        class _Adapted(object):
+            def __conform__(self, iface):
+                return None
+        adapted = _Adapted()
+        self.failUnless(ib(adapted) is adapted)
+
+    def test___call___wo___conform___ob_no_provides_w_alternate(self):
+        ib = self._makeOne(False)
+        adapted = object()
+        alternate = object()
+        self.failUnless(ib(adapted, alternate) is alternate)
+
+    def test___call___w___conform___ob_no_provides_wo_alternate(self):
+        ib = self._makeOne(False)
+        adapted = object()
+        self.assertRaises(TypeError, ib, adapted)
+
+    def test___adapt___ob_provides(self):
+        ib = self._makeOne(True)
+        adapted = object()
+        self.failUnless(ib.__adapt__(adapted) is adapted)
+
+    def test___adapt___ob_no_provides_uses_hooks(self):
+        from zope.interface import interface
+        ib = self._makeOne(False)
+        adapted = object()
+        _missed = []
+        def _hook_miss(iface, obj):
+            _missed.append((iface, obj))
+            return None
+        def _hook_hit(iface, obj):
+            return obj
+        with _Monkey(interface, adapter_hooks=[_hook_miss, _hook_hit]):
+            self.failUnless(ib.__adapt__(adapted) is adapted)
+            self.assertEqual(_missed, [(ib, adapted)])
+
+
+class SpecificationTests(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.interface import Specification
+        return Specification
+
+    def _makeOne(self, bases=_marker):
+        if bases is _marker:
+            return self._getTargetClass()()
+        return self._getTargetClass()(bases)
+
+    def test_ctor(self):
+        from zope.interface.interface import Interface
+        spec = self._makeOne()
+        self.assertEqual(spec.__bases__, ())
+        self.assertEqual(len(spec._implied), 2)
+        self.failUnless(spec in spec._implied)
+        self.failUnless(Interface in spec._implied)
+        self.assertEqual(len(spec.dependents), 0)
+
+    def test_subscribe_first_time(self):
+        spec = self._makeOne()
+        dep = DummyDependent()
+        spec.subscribe(dep)
+        self.assertEqual(len(spec.dependents), 1)
+        self.assertEqual(spec.dependents[dep], 1)
+
+    def test_subscribe_again(self):
+        spec = self._makeOne()
+        dep = DummyDependent()
+        spec.subscribe(dep)
+        spec.subscribe(dep)
+        self.assertEqual(spec.dependents[dep], 2)
+
+    def test_unsubscribe_miss(self):
+        spec = self._makeOne()
+        dep = DummyDependent()
+        self.assertRaises(KeyError, spec.unsubscribe, dep)
+
+    def test_unsubscribe(self):
+        spec = self._makeOne()
+        dep = DummyDependent()
+        spec.subscribe(dep)
+        spec.subscribe(dep)
+        spec.unsubscribe(dep)
+        self.assertEqual(spec.dependents[dep], 1)
+        spec.unsubscribe(dep)
+        self.failIf(dep in spec.dependents)
+
+    def test___setBases_subscribes_bases_and_notifies_dependents(self):
+        from zope.interface.interface import Interface
+        spec = self._makeOne()
+        dep = DummyDependent()
+        spec.subscribe(dep)
+        class I(Interface):
+            pass
+        class J(Interface):
+            pass
+        spec.__bases__ = (I,)
+        self.assertEqual(dep._changed, [spec])
+        self.assertEqual(I.dependents[spec], 1)
+        spec.__bases__ = (J,)
+        self.assertEqual(I.dependents.get(spec), None)
+        self.assertEqual(J.dependents[spec], 1)
+
+    def test_changed_clears_volatiles_and_implied(self):
+        from zope.interface.interface import Interface
+        class I(Interface):
+            pass
+        spec = self._makeOne()
+        spec._v_attrs = 'Foo'
+        spec._implied[I] = ()
+        spec.changed(spec)
+        self.failUnless(getattr(spec, '_v_attrs', self) is self)
+        self.failIf(I in spec._implied)
+
+
+class InterfaceClassTests(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.interface import InterfaceClass
+        return InterfaceClass
+
+    def _makeOne(self,  name='ITest', bases=(), attrs=None, __doc__=None,
+                 __module__=None):
+        return self._getTargetClass()(name, bases, attrs, __doc__, __module__)
+
+    def test_ctor_defaults(self):
+        klass = self._getTargetClass()
+        inst = klass('ITesting')
+        self.assertEqual(inst.__name__, 'ITesting')
+        self.assertEqual(inst.__doc__, '')
+        self.assertEqual(inst.__bases__, ())
+        self.assertEqual(inst.getBases(), ())
+
+    def test_ctor_bad_bases(self):
+        klass = self._getTargetClass()
+        self.assertRaises(TypeError, klass, 'ITesting', (object(),))
+
+    def test_ctor_w_attrs_attrib_methods(self):
+        from zope.interface.interface import Attribute
+        from zope.interface.interface import fromFunction
+        def _bar():
+            """DOCSTRING"""
+        ATTRS = {'foo': Attribute('Foo', ''),
+                 'bar': fromFunction(_bar),
+                }
+        klass = self._getTargetClass()
+        inst = klass('ITesting', attrs=ATTRS)
+        self.assertEqual(inst.__name__, 'ITesting')
+        self.assertEqual(inst.__doc__, '')
+        self.assertEqual(inst.__bases__, ())
+        self.assertEqual(inst.names(), ATTRS.keys())
+
+    def test_ctor_attrs_w___locals__(self):
+        ATTRS = {'__locals__': {}}
+        klass = self._getTargetClass()
+        inst = klass('ITesting', attrs=ATTRS)
+        self.assertEqual(inst.__name__, 'ITesting')
+        self.assertEqual(inst.__doc__, '')
+        self.assertEqual(inst.__bases__, ())
+        self.assertEqual(inst.names(), ATTRS.keys())
+
+    def test_ctor_attrs_w__decorator_non_return(self):
+        from zope.interface.interface import _decorator_non_return
+        ATTRS = {'dropme': _decorator_non_return}
+        klass = self._getTargetClass()
+        inst = klass('ITesting', attrs=ATTRS)
+        self.assertEqual(inst.__name__, 'ITesting')
+        self.assertEqual(inst.__doc__, '')
+        self.assertEqual(inst.__bases__, ())
+        self.assertEqual(list(inst.names()), [])
+
+    def test_ctor_attrs_w_invalide_attr_type(self):
+        from zope.interface.exceptions import InvalidInterface
+        ATTRS = {'invalid': object()}
+        klass = self._getTargetClass()
+        self.assertRaises(InvalidInterface, klass, 'ITesting', attrs=ATTRS)
+
+    def test_interfaces(self):
+        iface = self._makeOne()
+        self.assertEqual(list(iface.interfaces()), [iface])
+
+    def test_getBases(self):
+        iface = self._makeOne()
+        sub = self._makeOne('ISub', bases=(iface,))
+        self.assertEqual(sub.getBases(), (iface,))
+
+    def test_isEqualOrExtendedBy_identity(self):
+        iface = self._makeOne()
+        self.failUnless(iface.isEqualOrExtendedBy(iface))
+
+    def test_isEqualOrExtendedBy_subiface(self):
+        iface = self._makeOne()
+        sub = self._makeOne('ISub', bases=(iface,))
+        self.failUnless(iface.isEqualOrExtendedBy(sub))
+        self.failIf(sub.isEqualOrExtendedBy(iface))
+
+    def test_isEqualOrExtendedBy_unrelated(self):
+        one = self._makeOne('One')
+        another = self._makeOne('Another')
+        self.failIf(one.isEqualOrExtendedBy(another))
+        self.failIf(another.isEqualOrExtendedBy(one))
+
+    def test_names_w_all_False_ignores_bases(self):
+        from zope.interface.interface import Attribute
+        from zope.interface.interface import fromFunction
+        def _bar():
+            """DOCSTRING"""
+        BASE_ATTRS = {'foo': Attribute('Foo', ''),
+                      'bar': fromFunction(_bar),
+                     }
+        DERIVED_ATTRS = {'baz': Attribute('Baz', ''),
+                        }
+        base = self._makeOne('IBase', attrs=BASE_ATTRS)
+        derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS)
+        self.assertEqual(sorted(derived.names(all=False)), ['baz'])
+
+    def test_names_w_all_True_no_bases(self):
+        from zope.interface.interface import Attribute
+        from zope.interface.interface import fromFunction
+        def _bar():
+            """DOCSTRING"""
+        ATTRS = {'foo': Attribute('Foo', ''),
+                 'bar': fromFunction(_bar),
+                }
+        one = self._makeOne(attrs=ATTRS)
+        self.assertEqual(sorted(one.names(all=True)), ['bar', 'foo'])
+
+    def test_names_w_all_True_w_bases_simple(self):
+        from zope.interface.interface import Attribute
+        from zope.interface.interface import fromFunction
+        def _bar():
+            """DOCSTRING"""
+        BASE_ATTRS = {'foo': Attribute('Foo', ''),
+                      'bar': fromFunction(_bar),
+                     }
+        DERIVED_ATTRS = {'baz': Attribute('Baz', ''),
+                        }
+        base = self._makeOne('IBase', attrs=BASE_ATTRS)
+        derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS)
+        self.assertEqual(sorted(derived.names(all=True)), ['bar', 'baz', 'foo'])
+
+    def test_names_w_all_True_bases_w_same_names(self):
+        from zope.interface.interface import Attribute
+        from zope.interface.interface import fromFunction
+        def _bar():
+            """DOCSTRING"""
+        def _foo():
+            """DOCSTRING"""
+        BASE_ATTRS = {'foo': Attribute('Foo', ''),
+                      'bar': fromFunction(_bar),
+                     }
+        DERIVED_ATTRS = {'foo': fromFunction(_foo),
+                         'baz': Attribute('Baz', ''),
+                        }
+        base = self._makeOne('IBase', attrs=BASE_ATTRS)
+        derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS)
+        self.assertEqual(sorted(derived.names(all=True)), ['bar', 'baz', 'foo'])
+
+    def test___iter__(self):
+        from zope.interface.interface import Attribute
+        from zope.interface.interface import fromFunction
+        def _bar():
+            """DOCSTRING"""
+        def _foo():
+            """DOCSTRING"""
+        BASE_ATTRS = {'foo': Attribute('Foo', ''),
+                      'bar': fromFunction(_bar),
+                     }
+        DERIVED_ATTRS = {'foo': fromFunction(_foo),
+                         'baz': Attribute('Baz', ''),
+                        }
+        base = self._makeOne('IBase', attrs=BASE_ATTRS)
+        derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS)
+        self.assertEqual(sorted(derived), ['bar', 'baz', 'foo'])
+
+    def test_namesAndDescriptions_w_all_False_ignores_bases(self):
+        from zope.interface.interface import Attribute
+        from zope.interface.interface import fromFunction
+        def _bar():
+            """DOCSTRING"""
+        BASE_ATTRS = {'foo': Attribute('Foo', ''),
+                      'bar': fromFunction(_bar),
+                     }
+        DERIVED_ATTRS = {'baz': Attribute('Baz', ''),
+                        }
+        base = self._makeOne('IBase', attrs=BASE_ATTRS)
+        derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS)
+        self.assertEqual(sorted(derived.namesAndDescriptions(all=False)),
+                        [('baz', DERIVED_ATTRS['baz']),
+                        ])
+
+    def test_namesAndDescriptions_w_all_True_no_bases(self):
+        from zope.interface.interface import Attribute
+        from zope.interface.interface import fromFunction
+        def _bar():
+            """DOCSTRING"""
+        ATTRS = {'foo': Attribute('Foo', ''),
+                 'bar': fromFunction(_bar),
+                }
+        one = self._makeOne(attrs=ATTRS)
+        self.assertEqual(sorted(one.namesAndDescriptions(all=False)),
+                        [('bar', ATTRS['bar']),
+                         ('foo', ATTRS['foo']),
+                        ])
+
+    def test_namesAndDescriptions_w_all_True_simple(self):
+        from zope.interface.interface import Attribute
+        from zope.interface.interface import fromFunction
+        def _bar():
+            """DOCSTRING"""
+        BASE_ATTRS = {'foo': Attribute('Foo', ''),
+                      'bar': fromFunction(_bar),
+                     }
+        DERIVED_ATTRS = {'baz': Attribute('Baz', ''),
+                        }
+        base = self._makeOne('IBase', attrs=BASE_ATTRS)
+        derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS)
+        self.assertEqual(sorted(derived.namesAndDescriptions(all=True)),
+                        [('bar', BASE_ATTRS['bar']),
+                         ('baz', DERIVED_ATTRS['baz']),
+                         ('foo', BASE_ATTRS['foo']),
+                        ])
+
+    def test_namesAndDescriptions_w_all_True_bases_w_same_names(self):
+        from zope.interface.interface import Attribute
+        from zope.interface.interface import fromFunction
+        def _bar():
+            """DOCSTRING"""
+        def _foo():
+            """DOCSTRING"""
+        BASE_ATTRS = {'foo': Attribute('Foo', ''),
+                      'bar': fromFunction(_bar),
+                     }
+        DERIVED_ATTRS = {'foo': fromFunction(_foo),
+                         'baz': Attribute('Baz', ''),
+                        }
+        base = self._makeOne('IBase', attrs=BASE_ATTRS)
+        derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS)
+        self.assertEqual(sorted(derived.namesAndDescriptions(all=True)),
+                        [('bar', BASE_ATTRS['bar']),
+                         ('baz', DERIVED_ATTRS['baz']),
+                         ('foo', DERIVED_ATTRS['foo']),
+                        ])
+
+    def test_getDescriptionFor_miss(self):
+        one = self._makeOne()
+        self.assertRaises(KeyError, one.getDescriptionFor, 'nonesuch')
+
+    def test_getDescriptionFor_hit(self):
+        from zope.interface.interface import Attribute
+        from zope.interface.interface import fromFunction
+        def _bar():
+            """DOCSTRING"""
+        ATTRS = {'foo': Attribute('Foo', ''),
+                 'bar': fromFunction(_bar),
+                }
+        one = self._makeOne(attrs=ATTRS)
+        self.assertEqual(one.getDescriptionFor('foo'), ATTRS['foo'])
+        self.assertEqual(one.getDescriptionFor('bar'), ATTRS['bar'])
+
+    def test___getitem___miss(self):
+        one = self._makeOne()
+        def _test():
+            return one['nonesuch']
+        self.assertRaises(KeyError, _test)
+
+    def test___getitem___hit(self):
+        from zope.interface.interface import Attribute
+        from zope.interface.interface import fromFunction
+        def _bar():
+            """DOCSTRING"""
+        ATTRS = {'foo': Attribute('Foo', ''),
+                 'bar': fromFunction(_bar),
+                }
+        one = self._makeOne(attrs=ATTRS)
+        self.assertEqual(one['foo'], ATTRS['foo'])
+        self.assertEqual(one['bar'], ATTRS['bar'])
+
+    def test___contains___miss(self):
+        one = self._makeOne()
+        self.failIf('nonesuch' in one)
+
+    def test___contains___hit(self):
+        from zope.interface.interface import Attribute
+        from zope.interface.interface import fromFunction
+        def _bar():
+            """DOCSTRING"""
+        ATTRS = {'foo': Attribute('Foo', ''),
+                 'bar': fromFunction(_bar),
+                }
+        one = self._makeOne(attrs=ATTRS)
+        self.failUnless('foo' in one)
+        self.failUnless('bar' in one)
+
+    def test_direct_miss(self):
+        one = self._makeOne()
+        self.assertEqual(one.direct('nonesuch'), None)
+
+    def test_direct_hit_local_miss_bases(self):
+        from zope.interface.interface import Attribute
+        from zope.interface.interface import fromFunction
+        def _bar():
+            """DOCSTRING"""
+        def _foo():
+            """DOCSTRING"""
+        BASE_ATTRS = {'foo': Attribute('Foo', ''),
+                      'bar': fromFunction(_bar),
+                     }
+        DERIVED_ATTRS = {'foo': fromFunction(_foo),
+                         'baz': Attribute('Baz', ''),
+                        }
+        base = self._makeOne('IBase', attrs=BASE_ATTRS)
+        derived = self._makeOne('IDerived', bases=(base,), attrs=DERIVED_ATTRS)
+        self.assertEqual(derived.direct('foo'), DERIVED_ATTRS['foo'])
+        self.assertEqual(derived.direct('baz'), DERIVED_ATTRS['baz'])
+        self.assertEqual(derived.direct('bar'), None)
+
+    def test_queryDescriptionFor_miss(self):
+        iface = self._makeOne()
+        self.assertEqual(iface.queryDescriptionFor('nonesuch'), None)
+
+    def test_queryDescriptionFor_hit(self):
+        from zope.interface import Attribute
+        ATTRS = {'attr': Attribute('Title', 'Description')}
+        iface = self._makeOne(attrs=ATTRS)
+        self.assertEqual(iface.queryDescriptionFor('attr'), ATTRS['attr'])
+
+
+    #TODO  (or not:  'deferred' looks like a fossil to me.
+    #def test_deferred_cache_hit(self):
+    #def test_deferred_cache_miss(self):
+    #def test_deferred_cache_miss_w_bases(self):
+
+    def test_validateInvariants_pass(self):
+        _called_with = []
+        def _passable(*args, **kw):
+            _called_with.append((args, kw))
+            return True
+        iface = self._makeOne()
+        obj = object()
+        iface.setTaggedValue('invariants', [_passable])
+        self.assertEqual(iface.validateInvariants(obj), None)
+        self.assertEqual(_called_with, [((obj,), {})])
+
+    def test_validateInvariants_fail_wo_errors_passed(self):
+        from zope.interface.exceptions import Invalid
+        _passable_called_with = []
+        def _passable(*args, **kw):
+            _passable_called_with.append((args, kw))
+            return True
+        _fail_called_with = []
+        def _fail(*args, **kw):
+            _fail_called_with.append((args, kw))
+            raise Invalid
+        iface = self._makeOne()
+        obj = object()
+        iface.setTaggedValue('invariants', [_passable, _fail])
+        self.assertRaises(Invalid, iface.validateInvariants, obj)
+        self.assertEqual(_passable_called_with, [((obj,), {})])
+        self.assertEqual(_fail_called_with, [((obj,), {})])
+
+    def test_validateInvariants_fail_w_errors_passed(self):
+        from zope.interface.exceptions import Invalid
+        _errors = []
+        _fail_called_with = []
+        def _fail(*args, **kw):
+            _fail_called_with.append((args, kw))
+            raise Invalid
+        iface = self._makeOne()
+        obj = object()
+        iface.setTaggedValue('invariants', [_fail])
+        self.assertRaises(Invalid, iface.validateInvariants, obj, _errors)
+        self.assertEqual(_fail_called_with, [((obj,), {})])
+        self.assertEqual(len(_errors), 1)
+        self.failUnless(isinstance(_errors[0], Invalid))
+
+    def test_validateInvariants_fail_in_base_wo_errors_passed(self):
+        from zope.interface.exceptions import Invalid
+        _passable_called_with = []
+        def _passable(*args, **kw):
+            _passable_called_with.append((args, kw))
+            return True
+        _fail_called_with = []
+        def _fail(*args, **kw):
+            _fail_called_with.append((args, kw))
+            raise Invalid
+        base = self._makeOne('IBase')
+        derived = self._makeOne('IDerived', (base,))
+        obj = object()
+        base.setTaggedValue('invariants', [_fail])
+        derived.setTaggedValue('invariants', [_passable])
+        self.assertRaises(Invalid, derived.validateInvariants, obj)
+        self.assertEqual(_passable_called_with, [((obj,), {})])
+        self.assertEqual(_fail_called_with, [((obj,), {})])
+
+    #TODO
+    def test_validateInvariants_fail_in_base_w_errors_passed(self):
+        from zope.interface.exceptions import Invalid
+        _errors = []
+        _passable_called_with = []
+        def _passable(*args, **kw):
+            _passable_called_with.append((args, kw))
+            return True
+        _fail_called_with = []
+        def _fail(*args, **kw):
+            _fail_called_with.append((args, kw))
+            raise Invalid
+        base = self._makeOne('IBase')
+        derived = self._makeOne('IDerived', (base,))
+        obj = object()
+        base.setTaggedValue('invariants', [_fail])
+        derived.setTaggedValue('invariants', [_passable])
+        self.assertRaises(Invalid, derived.validateInvariants, obj, _errors)
+        self.assertEqual(_passable_called_with, [((obj,), {})])
+        self.assertEqual(_fail_called_with, [((obj,), {})])
+        self.assertEqual(len(_errors), 1)
+        self.failUnless(isinstance(_errors[0], Invalid))
+
+    def test___reduce__(self):
+        iface = self._makeOne('PickleMe')
+        self.assertEqual(iface.__reduce__(), 'PickleMe')
+
+    def test___hash___normal(self):
+        iface = self._makeOne('HashMe')
+        self.assertEqual(hash(iface),
+                         hash((('HashMe',
+                                'zope.interface.tests.test_interface'))))
+
+    def test___hash___missing_required_attrs(self):
+        import warnings
+        try:
+            from warnings import catch_warnings
+        except ImportError:  # Python 2.5
+            return
+        class Derived(self._getTargetClass()):
+            def __init__(self):
+                pass # Don't call base class.
+        derived = Derived()
+        with catch_warnings(record=True) as warned:
+            warnings.simplefilter('always') # see LP #825249 
+            self.assertEqual(hash(derived), 1)
+            self.assertEqual(len(warned), 1)
+            self.failUnless(warned[0].category is UserWarning)
+            self.assertEqual(str(warned[0].message),
+                             'Hashing uninitialized InterfaceClass instance')
+
+    def test_comparison_with_None(self):
+        iface = self._makeOne()
+        self.failUnless(iface < None)
+        self.failUnless(iface <= None)
+        self.failIf(iface == None)
+        self.failUnless(iface != None)
+        self.failIf(iface >= None)
+        self.failIf(iface > None)
+
+        self.failIf(None < iface)
+        self.failIf(None <= iface)
+        self.failIf(None == iface)
+        self.failUnless(None != iface)
+        self.failUnless(None >= iface)
+        self.failUnless(None > iface)
+
+    def test_comparison_with_same_instance(self):
+        iface = self._makeOne()
+
+        self.failIf(iface < iface)
+        self.failUnless(iface <= iface)
+        self.failUnless(iface == iface)
+        self.failIf(iface != iface)
+        self.failUnless(iface >= iface)
+        self.failIf(iface > iface)
+
+    def test_comparison_with_same_named_instance_in_other_module(self):
+
+        one = self._makeOne('IName', __module__='zope.interface.tests.one')
+        other = self._makeOne('IName', __module__='zope.interface.tests.other')
+
+        self.failUnless(one < other)
+        self.failIf(other < one)
+        self.failUnless(one <= other)
+        self.failIf(other <= one)
+        self.failIf(one == other)
+        self.failIf(other == one)
+        self.failUnless(one != other)
+        self.failUnless(other != one)
+        self.failIf(one >= other)
+        self.failUnless(other >= one)
+        self.failIf(one > other)
+        self.failUnless(other > one)
+
+
+class InterfaceTests(_SilencePy3Deprecations):
+
+    def test_attributes_link_to_interface(self):
+        from zope.interface import Interface
+        from zope.interface import Attribute
+
+        class I1(Interface):
+            attr = Attribute("My attr")
+
+        self.failUnless(I1['attr'].interface is I1)
+
+    def test_methods_link_to_interface(self):
+        from zope.interface import Interface
+
+        class I1(Interface):
+
+            def method(foo, bar, bingo):
+                pass
+
+        self.failUnless(I1['method'].interface is I1)
+
+    def test_classImplements_simple(self):
+        from zope.interface import Interface
+        from zope.interface import implementedBy
+        from zope.interface import providedBy
+
+        class ICurrent(Interface):
+            def method1(a, b):
+                pass
+            def method2(a, b):
+                pass
+
+        class IOther(Interface):
+            pass
+
+        class Current(object):
+            __implemented__ = ICurrent
+            def method1(self, a, b):
+                return 1
+            def method2(self, a, b):
+                return 2
+
+        current = Current()
+
+        self.failUnless(ICurrent.implementedBy(Current))
+        self.failIf(IOther.implementedBy(Current))
+        self.failUnless(ICurrent in implementedBy(Current))
+        self.failIf(IOther in implementedBy(Current))
+        self.failUnless(ICurrent in providedBy(current))
+        self.failIf(IOther in providedBy(current))
+
+    def test_classImplements_base_not_derived(self):
+        from zope.interface import Interface
+        from zope.interface import implementedBy
+        from zope.interface import providedBy
+        class IBase(Interface):
+            def method():
+                pass
+        class IDerived(IBase):
+            pass
+        class Current():
+            __implemented__ = IBase
+            def method(self):
+                pass
+        current = Current()
+
+        self.failUnless(IBase.implementedBy(Current))
+        self.failIf(IDerived.implementedBy(Current))
+        self.failUnless(IBase in implementedBy(Current))
+        self.failIf(IDerived in implementedBy(Current))
+        self.failUnless(IBase in providedBy(current))
+        self.failIf(IDerived in providedBy(current))
+
+    def test_classImplements_base_and_derived(self):
+        from zope.interface import Interface
+        from zope.interface import implementedBy
+        from zope.interface import providedBy
+
+        class IBase(Interface):
+            def method():
+                pass
+
+        class IDerived(IBase):
+            pass
+
+        class Current(object):
+            __implemented__ = IDerived
+            def method(self):
+                pass
+
+        current = Current()
+
+        self.failUnless(IBase.implementedBy(Current))
+        self.failUnless(IDerived.implementedBy(Current))
+        self.failIf(IBase in implementedBy(Current))
+        self.failUnless(IBase in implementedBy(Current).flattened())
+        self.failUnless(IDerived in implementedBy(Current))
+        self.failIf(IBase in providedBy(current))
+        self.failUnless(IBase in providedBy(current).flattened())
+        self.failUnless(IDerived in providedBy(current))
+
+    def test_classImplements_multiple(self):
+        from zope.interface import Interface
+        from zope.interface import implementedBy
+        from zope.interface import providedBy
+
+        class ILeft(Interface):
+            def method():
+                pass
+
+        class IRight(ILeft):
+            pass
+
+        class Left(object):
+            __implemented__ = ILeft
+
+            def method(self):
+                pass
+
+        class Right(object):
+            __implemented__ = IRight
+
+        class Ambi(Left, Right):
+            pass
+
+        ambi = Ambi()
+
+        self.failUnless(ILeft.implementedBy(Ambi))
+        self.failUnless(IRight.implementedBy(Ambi))
+        self.failUnless(ILeft in implementedBy(Ambi))
+        self.failUnless(IRight in implementedBy(Ambi))
+        self.failUnless(ILeft in providedBy(ambi))
+        self.failUnless(IRight in providedBy(ambi))
+
+    def test_classImplements_multiple_w_explict_implements(self):
+        from zope.interface import Interface
+        from zope.interface import implementedBy
+        from zope.interface import providedBy
+
+        class ILeft(Interface):
+
+            def method():
+                pass
+
+        class IRight(ILeft):
+            pass
+
+        class IOther(Interface):
+            pass
+
+        class Left():
+            __implemented__ = ILeft
+
+            def method(self):
+                pass
+
+        class Right(object):
+            __implemented__ = IRight
+
+        class Other(object):
+            __implemented__ = IOther
+
+        class Mixed(Left, Right):
+            __implemented__ = Left.__implemented__, Other.__implemented__
+
+        mixed = Mixed()
+
+        self.failUnless(ILeft.implementedBy(Mixed))
+        self.failIf(IRight.implementedBy(Mixed))
+        self.failUnless(IOther.implementedBy(Mixed))
+        self.failUnless(ILeft in implementedBy(Mixed))
+        self.failIf(IRight in implementedBy(Mixed))
+        self.failUnless(IOther in implementedBy(Mixed))
+        self.failUnless(ILeft in providedBy(mixed))
+        self.failIf(IRight in providedBy(mixed))
+        self.failUnless(IOther in providedBy(mixed))
+
+    def test_interface_deferred_class_method_broken(self):
+        from zope.interface import Interface
+        from zope.interface.exceptions import BrokenImplementation
+
+        class IDeferring(Interface):
+            def method():
+                pass
+
+        class Deferring(IDeferring.deferred()):
+            __implemented__ = IDeferring
+
+        deferring = Deferring()
+
+        self.assertRaises(BrokenImplementation, deferring.method)
+
+    def testInterfaceExtendsInterface(self):
+        from zope.interface import Interface
+
+        new = Interface.__class__
+        FunInterface = new('FunInterface')
+        BarInterface = new('BarInterface', [FunInterface])
+        BobInterface = new('BobInterface')
+        BazInterface = new('BazInterface', [BobInterface, BarInterface])
+
+        self.failUnless(BazInterface.extends(BobInterface))
+        self.failUnless(BazInterface.extends(BarInterface))
+        self.failUnless(BazInterface.extends(FunInterface))
+        self.failIf(BobInterface.extends(FunInterface))
+        self.failIf(BobInterface.extends(BarInterface))
+        self.failUnless(BarInterface.extends(FunInterface))
+        self.failIf(BarInterface.extends(BazInterface))
+
+    def test_verifyClass(self):
+        from zope.interface import Attribute
+        from zope.interface import Interface
+        from zope.interface.verify import verifyClass
+        from zope.interface._compat import _u
+
+        class ICheckMe(Interface):
+            attr = Attribute(_u('My attr'))
+
+            def method():
+                pass
+
+        class CheckMe(object):
+            __implemented__ = ICheckMe
+            attr = 'value'
+
+            def method(self):
+                pass
+
+        self.failUnless(verifyClass(ICheckMe, CheckMe))
+
+    def test_verifyObject(self):
+        from zope.interface import Attribute
+        from zope.interface import Interface
+        from zope.interface.verify import verifyObject
+        from zope.interface._compat import _u
+
+        class ICheckMe(Interface):
+            attr = Attribute(_u('My attr'))
+
+            def method():
+                pass
+
+        class CheckMe(object):
+            __implemented__ = ICheckMe
+            attr = 'value'
+
+            def method(self):
+                pass
+
+        check_me = CheckMe()
+
+        self.failUnless(verifyObject(ICheckMe, check_me))
+
+    def test_interface_object_provides_Interface(self):
+        from zope.interface import Interface
+
+        class AnInterface(Interface):
+            pass
+
+        self.failUnless(Interface.providedBy(AnInterface))
+
+    def test_names_simple(self):
+        from zope.interface import Attribute
+        from zope.interface import Interface
+        from zope.interface._compat import _u
+
+        class ISimple(Interface):
+            attr = Attribute(_u('My attr'))
+
+            def method():
+                pass
+
+        self.assertEqual(sorted(ISimple.names()), ['attr', 'method'])
+
+    def test_names_derived(self):
+        from zope.interface import Attribute
+        from zope.interface import Interface
+        from zope.interface._compat import _u
+
+        class IBase(Interface):
+            attr = Attribute(_u('My attr'))
+
+            def method():
+                pass
+
+        class IDerived(IBase):
+            attr2 = Attribute(_u('My attr2'))
+
+            def method():
+                pass
+
+            def method2():
+                pass
+
+        self.assertEqual(sorted(IDerived.names()),
+                         ['attr2', 'method', 'method2'])
+        self.assertEqual(sorted(IDerived.names(all=True)),
+                         ['attr', 'attr2', 'method', 'method2'])
+
+    def test_namesAndDescriptions_simple(self):
+        from zope.interface import Attribute
+        from zope.interface.interface import Method
+        from zope.interface import Interface
+        from zope.interface._compat import _u
+
+        class ISimple(Interface):
+            attr = Attribute(_u('My attr'))
+
+            def method():
+                "My method"
+
+        name_values = sorted(ISimple.namesAndDescriptions())
+
+        self.assertEqual(len(name_values), 2)
+        self.assertEqual(name_values[0][0], 'attr')
+        self.failUnless(isinstance(name_values[0][1], Attribute))
+        self.assertEqual(name_values[0][1].__name__, 'attr')
+        self.assertEqual(name_values[0][1].__doc__, 'My attr')
+        self.assertEqual(name_values[1][0], 'method')
+        self.failUnless(isinstance(name_values[1][1], Method))
+        self.assertEqual(name_values[1][1].__name__, 'method')
+        self.assertEqual(name_values[1][1].__doc__, 'My method')
+
+    def test_namesAndDescriptions_derived(self):
+        from zope.interface import Attribute
+        from zope.interface import Interface
+        from zope.interface.interface import Method
+        from zope.interface._compat import _u
+
+        class IBase(Interface):
+            attr = Attribute(_u('My attr'))
+
+            def method():
+                "My method"
+
+        class IDerived(IBase):
+            attr2 = Attribute(_u('My attr2'))
+
+            def method():
+                "My method, overridden"
+
+            def method2():
+                "My method2"
+
+        name_values = sorted(IDerived.namesAndDescriptions())
+
+        self.assertEqual(len(name_values), 3)
+        self.assertEqual(name_values[0][0], 'attr2')
+        self.failUnless(isinstance(name_values[0][1], Attribute))
+        self.assertEqual(name_values[0][1].__name__, 'attr2')
+        self.assertEqual(name_values[0][1].__doc__, 'My attr2')
+        self.assertEqual(name_values[1][0], 'method')
+        self.failUnless(isinstance(name_values[1][1], Method))
+        self.assertEqual(name_values[1][1].__name__, 'method')
+        self.assertEqual(name_values[1][1].__doc__, 'My method, overridden')
+        self.assertEqual(name_values[2][0], 'method2')
+        self.failUnless(isinstance(name_values[2][1], Method))
+        self.assertEqual(name_values[2][1].__name__, 'method2')
+        self.assertEqual(name_values[2][1].__doc__, 'My method2')
+
+        name_values = sorted(IDerived.namesAndDescriptions(all=True))
+
+        self.assertEqual(len(name_values), 4)
+        self.assertEqual(name_values[0][0], 'attr')
+        self.failUnless(isinstance(name_values[0][1], Attribute))
+        self.assertEqual(name_values[0][1].__name__, 'attr')
+        self.assertEqual(name_values[0][1].__doc__, 'My attr')
+        self.assertEqual(name_values[1][0], 'attr2')
+        self.failUnless(isinstance(name_values[1][1], Attribute))
+        self.assertEqual(name_values[1][1].__name__, 'attr2')
+        self.assertEqual(name_values[1][1].__doc__, 'My attr2')
+        self.assertEqual(name_values[2][0], 'method')
+        self.failUnless(isinstance(name_values[2][1], Method))
+        self.assertEqual(name_values[2][1].__name__, 'method')
+        self.assertEqual(name_values[2][1].__doc__, 'My method, overridden')
+        self.assertEqual(name_values[3][0], 'method2')
+        self.failUnless(isinstance(name_values[3][1], Method))
+        self.assertEqual(name_values[3][1].__name__, 'method2')
+        self.assertEqual(name_values[3][1].__doc__, 'My method2')
+
+    def test_getDescriptionFor_nonesuch_no_default(self):
+        from zope.interface import Interface
+
+        class IEmpty(Interface):
+            pass
+
+        self.assertRaises(KeyError, IEmpty.getDescriptionFor, 'nonesuch')
+
+    def test_getDescriptionFor_simple(self):
+        from zope.interface import Attribute
+        from zope.interface.interface import Method
+        from zope.interface import Interface
+        from zope.interface._compat import _u
+
+        class ISimple(Interface):
+            attr = Attribute(_u('My attr'))
+
+            def method():
+                "My method"
+
+        a_desc = ISimple.getDescriptionFor('attr')
+        self.failUnless(isinstance(a_desc, Attribute))
+        self.assertEqual(a_desc.__name__, 'attr')
+        self.assertEqual(a_desc.__doc__, 'My attr')
+
+        m_desc = ISimple.getDescriptionFor('method')
+        self.failUnless(isinstance(m_desc, Method))
+        self.assertEqual(m_desc.__name__, 'method')
+        self.assertEqual(m_desc.__doc__, 'My method')
+
+    def test_getDescriptionFor_derived(self):
+        from zope.interface import Attribute
+        from zope.interface.interface import Method
+        from zope.interface import Interface
+        from zope.interface._compat import _u
+
+        class IBase(Interface):
+            attr = Attribute(_u('My attr'))
+
+            def method():
+                "My method"
+
+        class IDerived(IBase):
+            attr2 = Attribute(_u('My attr2'))
+
+            def method():
+                "My method, overridden"
+
+            def method2():
+                "My method2"
+
+        a_desc = IDerived.getDescriptionFor('attr')
+        self.failUnless(isinstance(a_desc, Attribute))
+        self.assertEqual(a_desc.__name__, 'attr')
+        self.assertEqual(a_desc.__doc__, 'My attr')
+
+        m_desc = IDerived.getDescriptionFor('method')
+        self.failUnless(isinstance(m_desc, Method))
+        self.assertEqual(m_desc.__name__, 'method')
+        self.assertEqual(m_desc.__doc__, 'My method, overridden')
+
+        a2_desc = IDerived.getDescriptionFor('attr2')
+        self.failUnless(isinstance(a2_desc, Attribute))
+        self.assertEqual(a2_desc.__name__, 'attr2')
+        self.assertEqual(a2_desc.__doc__, 'My attr2')
+
+        m2_desc = IDerived.getDescriptionFor('method2')
+        self.failUnless(isinstance(m2_desc, Method))
+        self.assertEqual(m2_desc.__name__, 'method2')
+        self.assertEqual(m2_desc.__doc__, 'My method2')
+
+    def test___getitem__nonesuch(self):
+        from zope.interface import Interface
+
+        class IEmpty(Interface):
+            pass
+
+        self.assertRaises(KeyError, IEmpty.__getitem__, 'nonesuch')
+
+    def test___getitem__simple(self):
+        from zope.interface import Attribute
+        from zope.interface.interface import Method
+        from zope.interface import Interface
+        from zope.interface._compat import _u
+
+        class ISimple(Interface):
+            attr = Attribute(_u('My attr'))
+
+            def method():
+                "My method"
+
+        a_desc = ISimple['attr']
+        self.failUnless(isinstance(a_desc, Attribute))
+        self.assertEqual(a_desc.__name__, 'attr')
+        self.assertEqual(a_desc.__doc__, 'My attr')
+
+        m_desc = ISimple['method']
+        self.failUnless(isinstance(m_desc, Method))
+        self.assertEqual(m_desc.__name__, 'method')
+        self.assertEqual(m_desc.__doc__, 'My method')
+
+    def test___getitem___derived(self):
+        from zope.interface import Attribute
+        from zope.interface.interface import Method
+        from zope.interface import Interface
+        from zope.interface._compat import _u
+
+        class IBase(Interface):
+            attr = Attribute(_u('My attr'))
+
+            def method():
+                "My method"
+
+        class IDerived(IBase):
+            attr2 = Attribute(_u('My attr2'))
+
+            def method():
+                "My method, overridden"
+
+            def method2():
+                "My method2"
+
+        a_desc = IDerived['attr']
+        self.failUnless(isinstance(a_desc, Attribute))
+        self.assertEqual(a_desc.__name__, 'attr')
+        self.assertEqual(a_desc.__doc__, 'My attr')
+
+        m_desc = IDerived['method']
+        self.failUnless(isinstance(m_desc, Method))
+        self.assertEqual(m_desc.__name__, 'method')
+        self.assertEqual(m_desc.__doc__, 'My method, overridden')
+
+        a2_desc = IDerived['attr2']
+        self.failUnless(isinstance(a2_desc, Attribute))
+        self.assertEqual(a2_desc.__name__, 'attr2')
+        self.assertEqual(a2_desc.__doc__, 'My attr2')
+
+        m2_desc = IDerived['method2']
+        self.failUnless(isinstance(m2_desc, Method))
+        self.assertEqual(m2_desc.__name__, 'method2')
+        self.assertEqual(m2_desc.__doc__, 'My method2')
+
+    def test___contains__nonesuch(self):
+        from zope.interface import Interface
+
+        class IEmpty(Interface):
+            pass
+
+        self.failIf('nonesuch' in IEmpty)
+
+    def test___contains__simple(self):
+        from zope.interface import Attribute
+        from zope.interface import Interface
+        from zope.interface._compat import _u
+
+        class ISimple(Interface):
+            attr = Attribute(_u('My attr'))
+
+            def method():
+                "My method"
+
+        self.failUnless('attr' in ISimple)
+        self.failUnless('method' in ISimple)
+
+    def test___contains__derived(self):
+        from zope.interface import Attribute
+        from zope.interface import Interface
+        from zope.interface._compat import _u
+
+        class IBase(Interface):
+            attr = Attribute(_u('My attr'))
+
+            def method():
+                "My method"
+
+        class IDerived(IBase):
+            attr2 = Attribute(_u('My attr2'))
+
+            def method():
+                "My method, overridden"
+
+            def method2():
+                "My method2"
+
+        self.failUnless('attr' in IDerived)
+        self.failUnless('method' in IDerived)
+        self.failUnless('attr2' in IDerived)
+        self.failUnless('method2' in IDerived)
+
+    def test___iter__empty(self):
+        from zope.interface import Interface
+
+        class IEmpty(Interface):
+            pass
+
+        self.assertEqual(list(IEmpty), [])
+
+    def test___iter__simple(self):
+        from zope.interface import Attribute
+        from zope.interface import Interface
+        from zope.interface._compat import _u
+
+        class ISimple(Interface):
+            attr = Attribute(_u('My attr'))
+
+            def method():
+                "My method"
+
+        self.assertEqual(sorted(list(ISimple)), ['attr', 'method'])
+
+    def test___iter__derived(self):
+        from zope.interface import Attribute
+        from zope.interface import Interface
+        from zope.interface._compat import _u
+
+        class IBase(Interface):
+            attr = Attribute(_u('My attr'))
+
+            def method():
+                "My method"
+
+        class IDerived(IBase):
+            attr2 = Attribute(_u('My attr2'))
+
+            def method():
+                "My method, overridden"
+
+            def method2():
+                "My method2"
+
+        self.assertEqual(sorted(list(IDerived)),
+                         ['attr', 'attr2', 'method', 'method2'])
+
+    def test_function_attributes_become_tagged_values(self):
+        from zope.interface import Interface
+
+        class ITagMe(Interface):
+            def method():
+                pass
+            method.optional = 1
+
+        method = ITagMe['method']
+        self.assertEqual(method.getTaggedValue('optional'), 1)
+
+    def test___doc___non_element(self):
+        from zope.interface import Interface
+
+        class IHaveADocString(Interface):
+            "xxx"
+
+        self.assertEqual(IHaveADocString.__doc__, "xxx")
+        self.assertEqual(list(IHaveADocString), [])
+
+    def test___doc___as_element(self):
+        from zope.interface import Attribute
+        from zope.interface import Interface
+
+        class IHaveADocString(Interface):
+            "xxx"
+            __doc__ = Attribute('the doc')
+
+        self.assertEqual(IHaveADocString.__doc__, "")
+        self.assertEqual(list(IHaveADocString), ['__doc__'])
+
+    def _errorsEqual(self, has_invariant, error_len, error_msgs, iface):
+        from zope.interface.exceptions import Invalid
+        self.assertRaises(Invalid, iface.validateInvariants, has_invariant)
+        e = []
+        try:
+            iface.validateInvariants(has_invariant, e)
+        except Invalid as error:
+            self.assertEqual(error.args[0], e)
+        else:
+            self._assert(0) # validateInvariants should always raise
+            # Invalid
+        self.assertEqual(len(e), error_len)
+        msgs = [error.args[0] for error in e]
+        msgs.sort()
+        for msg in msgs:
+            self.assertEqual(msg, error_msgs.pop(0))
+
+    def test_invariant_simple(self):
+        from zope.interface import Attribute
+        from zope.interface import Interface
+        from zope.interface import directlyProvides
+        from zope.interface import invariant
+
+        class IInvariant(Interface):
+            foo = Attribute('foo')
+            bar = Attribute('bar; must eval to Boolean True if foo does')
+            invariant(_ifFooThenBar)
+
+        class HasInvariant(object):
+            pass
+
+        # set up
+        has_invariant = HasInvariant()
+        directlyProvides(has_invariant, IInvariant)
+
+        # the tests
+        self.assertEqual(IInvariant.getTaggedValue('invariants'),
+                         [_ifFooThenBar])
+        self.assertEqual(IInvariant.validateInvariants(has_invariant), None)
+        has_invariant.bar = 27
+        self.assertEqual(IInvariant.validateInvariants(has_invariant), None)
+        has_invariant.foo = 42
+        self.assertEqual(IInvariant.validateInvariants(has_invariant), None)
+        del has_invariant.bar
+        self._errorsEqual(has_invariant, 1, ['If Foo, then Bar!'],
+                          IInvariant)
+
+    def test_invariant_nested(self):
+        from zope.interface import Attribute
+        from zope.interface import Interface
+        from zope.interface import directlyProvides
+        from zope.interface import invariant
+
+        class IInvariant(Interface):
+            foo = Attribute('foo')
+            bar = Attribute('bar; must eval to Boolean True if foo does')
+            invariant(_ifFooThenBar)
+
+        class ISubInvariant(IInvariant):
+            invariant(_barGreaterThanFoo)
+
+        class HasInvariant(object):
+            pass
+
+        # nested interfaces with invariants:
+        self.assertEqual(ISubInvariant.getTaggedValue('invariants'),
+                         [_barGreaterThanFoo])
+        has_invariant = HasInvariant()
+        directlyProvides(has_invariant, ISubInvariant)
+        has_invariant.foo = 42
+        # even though the interface has changed, we should still only have one
+        # error.
+        self._errorsEqual(has_invariant, 1, ['If Foo, then Bar!'],
+                          ISubInvariant)
+        # however, if we set foo to 0 (Boolean False) and bar to a negative
+        # number then we'll get the new error
+        has_invariant.foo = 2
+        has_invariant.bar = 1
+        self._errorsEqual(has_invariant, 1,
+                          ['Please, Boo MUST be greater than Foo!'],
+                          ISubInvariant)
+        # and if we set foo to a positive number and boo to 0, we'll
+        # get both errors!
+        has_invariant.foo = 1
+        has_invariant.bar = 0
+        self._errorsEqual(has_invariant, 2,
+                          ['If Foo, then Bar!',
+                           'Please, Boo MUST be greater than Foo!'],
+                          ISubInvariant)
+        # for a happy ending, we'll make the invariants happy
+        has_invariant.foo = 1
+        has_invariant.bar = 2
+        self.assertEqual(IInvariant.validateInvariants(has_invariant), None)
+
+    def test_invariant_mutandis(self):
+        from zope.interface import Attribute
+        from zope.interface import Interface
+        from zope.interface import directlyProvides
+        from zope.interface import invariant
+
+        class IInvariant(Interface):
+            foo = Attribute('foo')
+            bar = Attribute('bar; must eval to Boolean True if foo does')
+            invariant(_ifFooThenBar)
+
+        class HasInvariant(object):
+            pass
+
+        # now we'll do two invariants on the same interface,
+        # just to make sure that a small
+        # multi-invariant interface is at least minimally tested.
+        has_invariant = HasInvariant()
+        directlyProvides(has_invariant, IInvariant)
+        has_invariant.foo = 42
+
+        # if you really need to mutate, then this would be the way to do it.
+        # Probably a bad idea, though. :-)
+        old_invariants = IInvariant.getTaggedValue('invariants')
+        invariants = old_invariants[:]
+        invariants.append(_barGreaterThanFoo)
+        IInvariant.setTaggedValue('invariants', invariants)
+
+        # even though the interface has changed, we should still only have one
+        # error.
+        self._errorsEqual(has_invariant, 1, ['If Foo, then Bar!'],
+                          IInvariant)
+        # however, if we set foo to 0 (Boolean False) and bar to a negative
+        # number then we'll get the new error
+        has_invariant.foo = 2
+        has_invariant.bar = 1
+        self._errorsEqual(has_invariant, 1,
+                         ['Please, Boo MUST be greater than Foo!'], IInvariant)
+        # and if we set foo to a positive number and boo to 0, we'll
+        # get both errors!
+        has_invariant.foo = 1
+        has_invariant.bar = 0
+        self._errorsEqual(has_invariant, 2,
+                          ['If Foo, then Bar!',
+                           'Please, Boo MUST be greater than Foo!'],
+                          IInvariant)
+        # for another happy ending, we'll make the invariants happy again
+        has_invariant.foo = 1
+        has_invariant.bar = 2
+        self.assertEqual(IInvariant.validateInvariants(has_invariant), None)
+        # clean up
+        IInvariant.setTaggedValue('invariants', old_invariants)
+
+    def test___doc___element(self):
+        from zope.interface import Interface
+        from zope.interface import Attribute
+        class I(Interface):
+            "xxx"
+
+        self.assertEqual(I.__doc__, "xxx")
+        self.assertEqual(list(I), [])
+
+        class I(Interface):
+            "xxx"
+
+            __doc__ = Attribute('the doc')
+
+        self.assertEqual(I.__doc__, "")
+        self.assertEqual(list(I), ['__doc__'])
+
+    def testIssue228(self):
+        # Test for http://collector.zope.org/Zope3-dev/228
+        # Old style classes don't have a '__class__' attribute
+        import sys
+        if sys.version[0] < '3':
+            # No old style classes in Python 3, so the test becomes moot.
+            from zope.interface import Interface
+
+            class I(Interface):
+                "xxx"
+
+            class OldStyle:
+                __providedBy__ = None
+
+            self.assertRaises(AttributeError, I.providedBy, OldStyle)
+
+    def test_invariant_as_decorator(self):
+        from zope.interface import Interface
+        from zope.interface import Attribute
+        from zope.interface import implementer
+        from zope.interface import invariant
+        from zope.interface.exceptions import Invalid
+
+        class IRange(Interface):
+            min = Attribute("Lower bound")
+            max = Attribute("Upper bound")
+            
+            @invariant
+            def range_invariant(ob):
+                if ob.max < ob.min:
+                    raise Invalid('max < min')
+
+        @implementer(IRange)
+        class Range(object):
+
+            def __init__(self, min, max):
+                self.min, self.max = min, max
+
+        IRange.validateInvariants(Range(1,2))
+        IRange.validateInvariants(Range(1,1))
+        try:
+            IRange.validateInvariants(Range(2,1))
+        except Invalid as e:
+            self.assertEqual(str(e), 'max < min')
+
+    def test_taggedValue(self):
+        from zope.interface import Attribute
+        from zope.interface import Interface
+        from zope.interface import taggedValue
+
+        class ITagged(Interface):
+            foo = Attribute('foo')
+            bar = Attribute('bar; must eval to Boolean True if foo does')
+            taggedValue('qux', 'Spam')
+
+        class HasInvariant(object):
+            pass
+
+        self.assertEqual(ITagged.getTaggedValue('qux'), 'Spam')
+        self.failUnless('qux' in ITagged.getTaggedValueTags())
+
+    def test_description_cache_management(self):
+        # See https://bugs.launchpad.net/zope.interface/+bug/185974
+        # There was a bug where the cache used by Specification.get() was not
+        # cleared when the bases were changed.
+        from zope.interface import Interface
+        from zope.interface import Attribute
+
+        class I1(Interface):
+            a = Attribute('a')
+
+        class I2(I1):
+            pass
+
+        class I3(I2):
+            pass
+
+        self.failUnless(I3.get('a') is I1.get('a'))
+
+        I2.__bases__ = (Interface,)
+        self.failUnless(I3.get('a') is None)
+
+    def test___call___defers_to___conform___(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+
+        class I(Interface):
+            pass
+
+        @implementer(I)
+        class C(object):
+            def __conform__(self, proto):
+                return 0
+
+        self.assertEqual(I(C()), 0)
+
+    def test___call___object_implements(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+
+        class I(Interface):
+            pass
+
+        @implementer(I)
+        class C(object):
+            pass
+
+        c = C()
+        self.failUnless(I(c) is c)
+
+    def test___call___miss_wo_alternate(self):
+        from zope.interface import Interface
+
+        class I(Interface):
+            pass
+
+        class C(object):
+            pass
+
+        c = C()
+        self.assertRaises(TypeError, I, c)
+
+    def test___call___miss_w_alternate(self):
+        from zope.interface import Interface
+
+        class I(Interface):
+            pass
+
+        class C(object):
+            pass
+
+        c = C()
+        self.failUnless(I(c, self) is self)
+
+    def test___call___w_adapter_hook(self):
+        from zope.interface import Interface
+        from zope.interface.interface import adapter_hooks
+        old_hooks = adapter_hooks[:]
+
+        def _miss(iface, obj):
+            pass
+
+        def _hit(iface, obj):
+            return self
+
+        class I(Interface):
+            pass
+
+        class C(object):
+            pass
+
+        c = C()
+
+        old_adapter_hooks = adapter_hooks[:]
+        adapter_hooks[:] = [_miss, _hit]
+        try:
+            self.failUnless(I(c) is self)
+        finally:
+            adapter_hooks[:] = old_adapter_hooks
+
+
+class AttributeTests(ElementTests):
+
+    DEFAULT_NAME = 'TestAttribute'
+
+    def _getTargetClass(self):
+        from zope.interface.interface import Attribute
+        return Attribute
+
+
+class MethodTests(AttributeTests):
+
+    DEFAULT_NAME = 'TestMethod'
+
+    def _getTargetClass(self):
+        from zope.interface.interface import Method
+        return Method
+
+    def test_optional_as_property(self):
+        method = self._makeOne()
+        self.assertEqual(method.optional, {})
+        method.optional = {'foo': 'bar'}
+        self.assertEqual(method.optional, {'foo': 'bar'})
+        del method.optional
+        self.assertEqual(method.optional, {})
+
+    def test___call___raises_BrokenImplementation(self):
+        from zope.interface.exceptions import BrokenImplementation
+        method = self._makeOne()
+        try:
+            method()
+        except BrokenImplementation as e:
+            self.assertEqual(e.interface, None)
+            self.assertEqual(e.name, self.DEFAULT_NAME)
+        else:
+            self.fail('__call__ should raise BrokenImplementation')
+
+    def test_getSignatureInfo_bare(self):
+        method = self._makeOne()
+        info = method.getSignatureInfo()
+        self.assertEqual(list(info['positional']), [])
+        self.assertEqual(list(info['required']), [])
+        self.assertEqual(info['optional'], {})
+        self.assertEqual(info['varargs'], None)
+        self.assertEqual(info['kwargs'], None)
+
+    def test_getSignatureString_bare(self):
+        method = self._makeOne()
+        self.assertEqual(method.getSignatureString(), '()')
+
+    def test_getSignatureString_w_only_required(self):
+        method = self._makeOne()
+        method.positional = method.required = ['foo']
+        self.assertEqual(method.getSignatureString(), '(foo)')
+
+    def test_getSignatureString_w_optional(self):
+        method = self._makeOne()
+        method.positional = method.required = ['foo']
+        method.optional = {'foo': 'bar'}
+        self.assertEqual(method.getSignatureString(), "(foo='bar')")
+
+    def test_getSignatureString_w_varargs(self):
+        method = self._makeOne()
+        method.varargs = 'args'
+        self.assertEqual(method.getSignatureString(), "(*args)")
+
+    def test_getSignatureString_w_kwargs(self):
+        method = self._makeOne()
+        method.kwargs = 'kw'
+        self.assertEqual(method.getSignatureString(), "(**kw)")
+
+
+class Test_fromFunction(unittest.TestCase):
+
+    def _callFUT(self, *args, **kw):
+        from zope.interface.interface import fromFunction
+        return fromFunction(*args, **kw)
+
+    def test_bare(self):
+        def _func():
+            "DOCSTRING"
+        method = self._callFUT(_func)
+        self.assertEqual(method.getName(), '_func')
+        self.assertEqual(method.getDoc(), 'DOCSTRING')
+        self.assertEqual(method.interface, None)
+        self.assertEqual(list(method.getTaggedValueTags()), [])
+        info = method.getSignatureInfo()
+        self.assertEqual(list(info['positional']), [])
+        self.assertEqual(list(info['required']), [])
+        self.assertEqual(info['optional'], {})
+        self.assertEqual(info['varargs'], None)
+        self.assertEqual(info['kwargs'], None)
+
+    def test_w_interface(self):
+        from zope.interface.interface import InterfaceClass
+        class IFoo(InterfaceClass):
+            pass
+        def _func():
+            "DOCSTRING"
+        method = self._callFUT(_func, interface=IFoo)
+        self.assertEqual(method.interface, IFoo)
+
+    def test_w_name(self):
+        def _func():
+            "DOCSTRING"
+        method = self._callFUT(_func, name='anotherName')
+        self.assertEqual(method.getName(), 'anotherName')
+
+    def test_w_only_required(self):
+        def _func(foo):
+            "DOCSTRING"
+        method = self._callFUT(_func)
+        info = method.getSignatureInfo()
+        self.assertEqual(list(info['positional']), ['foo'])
+        self.assertEqual(list(info['required']), ['foo'])
+        self.assertEqual(info['optional'], {})
+        self.assertEqual(info['varargs'], None)
+        self.assertEqual(info['kwargs'], None)
+
+    def test_w_optional(self):
+        def _func(foo='bar'):
+            "DOCSTRING"
+        method = self._callFUT(_func)
+        info = method.getSignatureInfo()
+        self.assertEqual(list(info['positional']), ['foo'])
+        self.assertEqual(list(info['required']), [])
+        self.assertEqual(info['optional'], {'foo': 'bar'})
+        self.assertEqual(info['varargs'], None)
+        self.assertEqual(info['kwargs'], None)
+
+    def test_w_optional_self(self):
+        # XXX This is a weird case, trying to cover the following code in
+        # FUT::
+        #
+        # nr = na-len(defaults)
+        # if nr < 0:
+        #     defaults=defaults[-nr:]
+        #     nr = 0
+        def _func(self='bar'):
+            "DOCSTRING"
+        method = self._callFUT(_func, imlevel=1)
+        info = method.getSignatureInfo()
+        self.assertEqual(list(info['positional']), [])
+        self.assertEqual(list(info['required']), [])
+        self.assertEqual(info['optional'], {})
+        self.assertEqual(info['varargs'], None)
+        self.assertEqual(info['kwargs'], None)
+
+    def test_w_varargs(self):
+        def _func(*args):
+            "DOCSTRING"
+        method = self._callFUT(_func)
+        info = method.getSignatureInfo()
+        self.assertEqual(list(info['positional']), [])
+        self.assertEqual(list(info['required']), [])
+        self.assertEqual(info['optional'], {})
+        self.assertEqual(info['varargs'], 'args')
+        self.assertEqual(info['kwargs'], None)
+
+    def test_w_kwargs(self):
+        def _func(**kw):
+            "DOCSTRING"
+        method = self._callFUT(_func)
+        info = method.getSignatureInfo()
+        self.assertEqual(list(info['positional']), [])
+        self.assertEqual(list(info['required']), [])
+        self.assertEqual(info['optional'], {})
+        self.assertEqual(info['varargs'], None)
+        self.assertEqual(info['kwargs'], 'kw')
+
+    def test_full_spectrum(self):
+        def _func(foo, bar='baz', *args, **kw):
+            "DOCSTRING"
+        method = self._callFUT(_func)
+        info = method.getSignatureInfo()
+        self.assertEqual(list(info['positional']), ['foo', 'bar'])
+        self.assertEqual(list(info['required']), ['foo'])
+        self.assertEqual(info['optional'], {'bar': 'baz'})
+        self.assertEqual(info['varargs'], 'args')
+        self.assertEqual(info['kwargs'], 'kw')
+
+
+class Test_fromMethod(unittest.TestCase):
+
+    def _callFUT(self, *args, **kw):
+        from zope.interface.interface import fromMethod
+        return fromMethod(*args, **kw)
+
+    def test_no_args(self):
+        class Foo(object):
+            def bar(self):
+                "DOCSTRING"
+        method = self._callFUT(Foo.bar)
+        self.assertEqual(method.getName(), 'bar')
+        self.assertEqual(method.getDoc(), 'DOCSTRING')
+        self.assertEqual(method.interface, None)
+        self.assertEqual(list(method.getTaggedValueTags()), [])
+        info = method.getSignatureInfo()
+        self.assertEqual(list(info['positional']), [])
+        self.assertEqual(list(info['required']), [])
+        self.assertEqual(info['optional'], {})
+        self.assertEqual(info['varargs'], None)
+        self.assertEqual(info['kwargs'], None)
+
+    def test_full_spectrum(self):
+        class Foo(object):
+            def bar(self, foo, bar='baz', *args, **kw):
+                "DOCSTRING"
+        method = self._callFUT(Foo.bar)
+        info = method.getSignatureInfo()
+        self.assertEqual(list(info['positional']), ['foo', 'bar'])
+        self.assertEqual(list(info['required']), ['foo'])
+        self.assertEqual(info['optional'], {'bar': 'baz'})
+        self.assertEqual(info['varargs'], 'args')
+        self.assertEqual(info['kwargs'], 'kw')
+
+    def test_w_non_method(self):
+        def foo():
+            "DOCSTRING"
+        method = self._callFUT(foo)
+        self.assertEqual(method.getName(), 'foo')
+        self.assertEqual(method.getDoc(), 'DOCSTRING')
+        self.assertEqual(method.interface, None)
+        self.assertEqual(list(method.getTaggedValueTags()), [])
+        info = method.getSignatureInfo()
+        self.assertEqual(list(info['positional']), [])
+        self.assertEqual(list(info['required']), [])
+        self.assertEqual(info['optional'], {})
+        self.assertEqual(info['varargs'], None)
+        self.assertEqual(info['kwargs'], None)
+
+class DummyDependent(object):
+
+    def __init__(self):
+        self._changed = []
+
+    def changed(self, originally_changed):
+        self._changed.append(originally_changed)
+
+
+def _barGreaterThanFoo(obj):
+    from zope.interface.exceptions import Invalid
+    foo = getattr(obj, 'foo', None)
+    bar = getattr(obj, 'bar', None)
+    if foo is not None and isinstance(foo, type(bar)):
+        # type checking should be handled elsewhere (like, say, 
+        # schema); these invariants should be intra-interface 
+        # constraints.  This is a hacky way to do it, maybe, but you
+        # get the idea
+        if not bar > foo:
+            raise Invalid('Please, Boo MUST be greater than Foo!')
+
+def _ifFooThenBar(obj):
+    from zope.interface.exceptions import Invalid
+    if getattr(obj, 'foo', None) and not getattr(obj, 'bar', None):
+        raise Invalid('If Foo, then Bar!')
+
+
+class _Monkey(object):
+    # context-manager for replacing module names in the scope of a test.
+    def __init__(self, module, **kw):
+        self.module = module
+        self.to_restore = dict([(key, getattr(module, key)) for key in kw])
+        for key, value in kw.items():
+            setattr(module, key, value)
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        for key, value in self.to_restore.items():
+            setattr(self.module, key, value)
+
+
+def test_suite():
+    import doctest
+    return unittest.TestSuite((
+        unittest.makeSuite(ElementTests),
+        unittest.makeSuite(SpecificationBasePyTests),
+        unittest.makeSuite(InterfaceBasePyTests),
+        unittest.makeSuite(SpecificationTests),
+        unittest.makeSuite(InterfaceTests),
+        unittest.makeSuite(AttributeTests),
+        unittest.makeSuite(MethodTests),
+        unittest.makeSuite(Test_fromFunction),
+        #unittest.makeSuite(Test_fromMethod),
+        doctest.DocTestSuite(),
+        doctest.DocTestSuite("zope.interface.interface"),
+    ))
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/test_interfaces.py b/ThirdParty/ZopeInterface/zope/interface/tests/test_interfaces.py
new file mode 100644
index 0000000..7bc5807
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/test_interfaces.py
@@ -0,0 +1,115 @@
+import unittest
+
+class _SilencePy3Deprecations(unittest.TestCase):
+    # silence deprecation warnings under py3
+
+    def failUnless(self, expr):
+        # St00pid speling.
+        return self.assertTrue(expr)
+
+    def failIf(self, expr):
+        # St00pid speling.
+        return self.assertFalse(expr)
+
+
+class _ConformsToIObjectEvent(object):
+
+    def _makeOne(self, target=None):
+        if target is None:
+            target = object()
+        return self._getTargetClass()(target)
+
+    def test_class_conforms_to_IObjectEvent(self):
+        from zope.interface.interfaces import IObjectEvent
+        from zope.interface.verify import verifyClass
+        verifyClass(IObjectEvent, self._getTargetClass())
+
+    def test_instance_conforms_to_IObjectEvent(self):
+        from zope.interface.interfaces import IObjectEvent
+        from zope.interface.verify import verifyObject
+        verifyObject(IObjectEvent, self._makeOne())
+
+
+class _ConformsToIRegistrationEvent(_ConformsToIObjectEvent):
+
+    def test_class_conforms_to_IRegistrationEvent(self):
+        from zope.interface.interfaces import IRegistrationEvent
+        from zope.interface.verify import verifyClass
+        verifyClass(IRegistrationEvent, self._getTargetClass())
+
+    def test_instance_conforms_to_IRegistrationEvent(self):
+        from zope.interface.interfaces import IRegistrationEvent
+        from zope.interface.verify import verifyObject
+        verifyObject(IRegistrationEvent, self._makeOne())
+
+
+class ObjectEventTests(_SilencePy3Deprecations, _ConformsToIObjectEvent):
+
+    def _getTargetClass(self):
+        from zope.interface.interfaces import ObjectEvent
+        return ObjectEvent
+
+    def test_ctor(self):
+        target = object()
+        event = self._makeOne(target)
+        self.failUnless(event.object is target)
+
+
+class RegistrationEventTests(_SilencePy3Deprecations,
+                             _ConformsToIRegistrationEvent):
+
+    def _getTargetClass(self):
+        from zope.interface.interfaces import RegistrationEvent
+        return RegistrationEvent
+
+    def test___repr__(self):
+        target = object()
+        event = self._makeOne(target)
+        r = repr(event)
+        self.assertEqual(r.splitlines(),
+                         ['RegistrationEvent event:', repr(target)])
+
+
+class RegisteredTests(_SilencePy3Deprecations,
+                      _ConformsToIRegistrationEvent):
+
+    def _getTargetClass(self):
+        from zope.interface.interfaces import Registered
+        return Registered
+
+    def test_class_conforms_to_IRegistered(self):
+        from zope.interface.interfaces import IRegistered
+        from zope.interface.verify import verifyClass
+        verifyClass(IRegistered, self._getTargetClass())
+
+    def test_instance_conforms_to_IRegistered(self):
+        from zope.interface.interfaces import IRegistered
+        from zope.interface.verify import verifyObject
+        verifyObject(IRegistered, self._makeOne())
+
+
+class UnregisteredTests(_SilencePy3Deprecations,
+                        _ConformsToIRegistrationEvent):
+
+    def _getTargetClass(self):
+        from zope.interface.interfaces import Unregistered
+        return Unregistered
+
+    def test_class_conforms_to_IUnregistered(self):
+        from zope.interface.interfaces import IUnregistered
+        from zope.interface.verify import verifyClass
+        verifyClass(IUnregistered, self._getTargetClass())
+
+    def test_instance_conforms_to_IUnregistered(self):
+        from zope.interface.interfaces import IUnregistered
+        from zope.interface.verify import verifyObject
+        verifyObject(IUnregistered, self._makeOne())
+
+
+def test_suite():
+    return unittest.TestSuite((
+            unittest.makeSuite(ObjectEventTests),
+            unittest.makeSuite(RegistrationEventTests),
+            unittest.makeSuite(RegisteredTests),
+            unittest.makeSuite(UnregisteredTests),
+        ))
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/test_odd_declarations.py b/ThirdParty/ZopeInterface/zope/interface/tests/test_odd_declarations.py
new file mode 100644
index 0000000..7314c7c
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/test_odd_declarations.py
@@ -0,0 +1,227 @@
+##############################################################################
+#
+# Copyright (c) 2003 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Test interface declarations against ExtensionClass-like classes.
+
+These tests are to make sure we do something sane in the presence of
+classic ExtensionClass classes and instances.
+"""
+import unittest
+
+from zope.interface.tests import odd
+from zope.interface import Interface
+from zope.interface import implementer
+from zope.interface import directlyProvides
+from zope.interface import providedBy
+from zope.interface import directlyProvidedBy
+from zope.interface import classImplements
+from zope.interface import classImplementsOnly
+from zope.interface import implementedBy
+from zope.interface._compat import _skip_under_py3k
+
+class I1(Interface): pass
+class I2(Interface): pass
+class I3(Interface): pass
+class I31(I3): pass
+class I4(Interface): pass
+class I5(Interface): pass
+
+class Odd(object): __metaclass__ = odd.MetaClass
+
+class B(Odd): __implemented__ = I2
+
+
+# TODO: We are going to need more magic to make classProvides work with odd
+#       classes. This will work in the next iteration. For now, we'll use
+#       a different mechanism.
+
+# from zope.interface import classProvides
+
+class A(Odd):
+    pass
+classImplements(A, I1)
+
+class C(A, B):
+    pass
+classImplements(C, I31)
+
+
+class Test(unittest.TestCase):
+
+    def failUnless(self, expr): # silence deprecation warnings under py3
+        return self.assertTrue(expr)
+
+    def failIf(self, expr): # silence deprecation warnings under py3
+        return self.assertFalse(expr)
+
+    def test_ObjectSpecification(self):
+        c = C()
+        directlyProvides(c, I4)
+        self.assertEqual([i.getName() for i in providedBy(c)],
+                         ['I4', 'I31', 'I1', 'I2']
+                         )
+        self.assertEqual([i.getName() for i in providedBy(c).flattened()],
+                         ['I4', 'I31', 'I3', 'I1', 'I2', 'Interface']
+                         )
+        self.failUnless(I1 in providedBy(c))
+        self.failIf(I3 in providedBy(c))
+        self.failUnless(providedBy(c).extends(I3))
+        self.failUnless(providedBy(c).extends(I31))
+        self.failIf(providedBy(c).extends(I5))
+
+        class COnly(A, B):
+            pass
+        classImplementsOnly(COnly, I31)
+
+        class D(COnly):
+            pass
+        classImplements(D, I5)
+
+        classImplements(D, I5)
+
+        c = D()
+        directlyProvides(c, I4)
+        self.assertEqual([i.getName() for i in providedBy(c)],
+                         ['I4', 'I5', 'I31'])
+        self.assertEqual([i.getName() for i in providedBy(c).flattened()],
+                         ['I4', 'I5', 'I31', 'I3', 'Interface'])
+        self.failIf(I1 in providedBy(c))
+        self.failIf(I3 in providedBy(c))
+        self.failUnless(providedBy(c).extends(I3))
+        self.failIf(providedBy(c).extends(I1))
+        self.failUnless(providedBy(c).extends(I31))
+        self.failUnless(providedBy(c).extends(I5))
+
+        class COnly(A, B): __implemented__ = I31
+        class D(COnly):
+            pass
+        classImplements(D, I5)
+
+        classImplements(D, I5)
+        c = D()
+        directlyProvides(c, I4)
+        self.assertEqual([i.getName() for i in providedBy(c)],
+                         ['I4', 'I5', 'I31'])
+        self.assertEqual([i.getName() for i in providedBy(c).flattened()],
+                         ['I4', 'I5', 'I31', 'I3', 'Interface'])
+        self.failIf(I1 in providedBy(c))
+        self.failIf(I3 in providedBy(c))
+        self.failUnless(providedBy(c).extends(I3))
+        self.failIf(providedBy(c).extends(I1))
+        self.failUnless(providedBy(c).extends(I31))
+        self.failUnless(providedBy(c).extends(I5))
+
+    def test_classImplements(self):
+
+        @implementer(I3)
+        class A(Odd):
+            pass
+
+        @implementer(I4)
+        class B(Odd):
+            pass
+
+        class C(A, B):
+            pass
+        classImplements(C, I1, I2)
+        self.assertEqual([i.getName() for i in implementedBy(C)],
+                         ['I1', 'I2', 'I3', 'I4'])
+        classImplements(C, I5)
+        self.assertEqual([i.getName() for i in implementedBy(C)],
+                         ['I1', 'I2', 'I5', 'I3', 'I4'])
+
+    def test_classImplementsOnly(self):
+        @implementer(I3)
+        class A(Odd):
+            pass
+
+        @implementer(I4)
+        class B(Odd):
+            pass
+
+        class C(A, B):
+            pass
+        classImplementsOnly(C, I1, I2)
+        self.assertEqual([i.__name__ for i in implementedBy(C)],
+                         ['I1', 'I2'])
+
+
+    def test_directlyProvides(self):
+        class IA1(Interface): pass
+        class IA2(Interface): pass
+        class IB(Interface): pass
+        class IC(Interface): pass
+        class A(Odd):
+            pass
+        classImplements(A, IA1, IA2)
+
+        class B(Odd):
+            pass
+        classImplements(B, IB)
+
+        class C(A, B):
+            pass
+        classImplements(C, IC)
+
+
+        ob = C()
+        directlyProvides(ob, I1, I2)
+        self.failUnless(I1 in providedBy(ob))
+        self.failUnless(I2 in providedBy(ob))
+        self.failUnless(IA1 in providedBy(ob))
+        self.failUnless(IA2 in providedBy(ob))
+        self.failUnless(IB in providedBy(ob))
+        self.failUnless(IC in providedBy(ob))
+
+        directlyProvides(ob, directlyProvidedBy(ob)-I2)
+        self.failUnless(I1 in providedBy(ob))
+        self.failIf(I2 in providedBy(ob))
+        self.failIf(I2 in providedBy(ob))
+        directlyProvides(ob, directlyProvidedBy(ob), I2)
+        self.failUnless(I2 in providedBy(ob))
+
+    @_skip_under_py3k
+    def test_directlyProvides_fails_for_odd_class(self):
+        self.assertRaises(TypeError, directlyProvides, C, I5)
+
+    # see above
+    #def TODO_test_classProvides_fails_for_odd_class(self):
+    #    try:
+    #        class A(Odd):
+    #            classProvides(I1)
+    #    except TypeError:
+    #        pass # Sucess
+    #    self.assert_(False,
+    #                 "Shouldn't be able to use directlyProvides on odd class."
+    #                 )
+
+    def test_implementedBy(self):
+        class I2(I1): pass
+
+        class C1(Odd):
+            pass
+        classImplements(C1, I2)
+
+        class C2(C1):
+            pass
+        classImplements(C2, I3)
+
+        self.assertEqual([i.getName() for i in implementedBy(C2)],
+                         ['I3', 'I2'])
+
+def test_suite():
+    import doctest
+    suite = unittest.TestSuite()
+    suite.addTest(unittest.makeSuite(Test))
+    suite.addTest(doctest.DocTestSuite(odd))
+    return suite
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/test_registry.py b/ThirdParty/ZopeInterface/zope/interface/tests/test_registry.py
new file mode 100644
index 0000000..f717c59
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/test_registry.py
@@ -0,0 +1,2454 @@
+##############################################################################
+#
+# Copyright (c) 2001, 2002, 2009 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Component Registry Tests"""
+import unittest
+
+class _SilencePy3Deprecations(unittest.TestCase):
+    # silence deprecation warnings under py3
+
+    def failUnless(self, expr):
+        # St00pid speling.
+        return self.assertTrue(expr)
+
+    def failIf(self, expr):
+        # St00pid speling.
+        return self.assertFalse(expr)
+
+class ComponentsTests(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.registry import Components
+        return Components
+
+    def _makeOne(self, name='test', *args, **kw):
+        return self._getTargetClass()(name, *args, **kw)
+
+    def _wrapEvents(self):
+        from zope.interface import registry
+        _events = []
+        def _notify(*args, **kw):
+            _events.append((args, kw))
+        _monkey = _Monkey(registry, notify=_notify)
+        return _monkey, _events
+
+    def test_ctor_no_bases(self):
+        from zope.interface.adapter import AdapterRegistry
+        comp = self._makeOne('testing')
+        self.assertEqual(comp.__name__, 'testing')
+        self.assertEqual(comp.__bases__, ())
+        self.failUnless(isinstance(comp.adapters, AdapterRegistry))
+        self.failUnless(isinstance(comp.utilities, AdapterRegistry))
+        self.assertEqual(comp.adapters.__bases__, ())
+        self.assertEqual(comp.utilities.__bases__, ())
+        self.assertEqual(comp._utility_registrations, {})
+        self.assertEqual(comp._adapter_registrations, {})
+        self.assertEqual(comp._subscription_registrations, [])
+        self.assertEqual(comp._handler_registrations, [])
+
+    def test_ctor_w_base(self):
+        base = self._makeOne('base')
+        comp = self._makeOne('testing', (base,))
+        self.assertEqual(comp.__name__, 'testing')
+        self.assertEqual(comp.__bases__, (base,))
+        self.assertEqual(comp.adapters.__bases__, (base.adapters,))
+        self.assertEqual(comp.utilities.__bases__, (base.utilities,))
+
+    def test___repr__(self):
+        comp = self._makeOne('testing')
+        self.assertEqual(repr(comp), '<Components testing>')
+
+    # test _init_registries / _init_registrations via only caller, __init__.
+
+    def test_assign_to___bases__(self):
+        base1 = self._makeOne('base1')
+        base2 = self._makeOne('base2')
+        comp = self._makeOne()
+        comp.__bases__ = (base1, base2)
+        self.assertEqual(comp.__bases__, (base1, base2))
+        self.assertEqual(comp.adapters.__bases__,
+                         (base1.adapters, base2.adapters))
+        self.assertEqual(comp.utilities.__bases__,
+                         (base1.utilities, base2.utilities))
+
+    def test_registerUtility_both_factory_and_component(self):
+        def _factory():
+            pass
+        _to_reg = object()
+        comp = self._makeOne()
+        self.assertRaises(TypeError, comp.registerUtility,
+                          component=_to_reg, factory=_factory)
+
+    def test_registerUtility_w_component(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Registered
+        from zope.interface.registry import UtilityRegistration
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _info = _u('info')
+        _name = _u('name')
+        _to_reg = object()
+        comp = self._makeOne()
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            comp.registerUtility(_to_reg, ifoo, _name, _info)
+        self.failUnless(comp.utilities._adapters[0][ifoo][_name] is _to_reg)
+        self.assertEqual(comp._utility_registrations[ifoo, _name],
+                         (_to_reg, _info, None))
+        self.assertEqual(comp.utilities._subscribers[0][ifoo][''], (_to_reg,))
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Registered))
+        self.failUnless(isinstance(event.object, UtilityRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.failUnless(event.object.name is _name)
+        self.failUnless(event.object.component is _to_reg)
+        self.failUnless(event.object.info is _info)
+        self.failUnless(event.object.factory is None)
+
+    def test_registerUtility_w_factory(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Registered
+        from zope.interface.registry import UtilityRegistration
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _info = _u('info')
+        _name = _u('name')
+        _to_reg = object()
+        def _factory():
+            return _to_reg
+        comp = self._makeOne()
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            comp.registerUtility(None, ifoo, _name, _info, factory=_factory)
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Registered))
+        self.failUnless(isinstance(event.object, UtilityRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.failUnless(event.object.name is _name)
+        self.failUnless(event.object.component is _to_reg)
+        self.failUnless(event.object.info is _info)
+        self.failUnless(event.object.factory is _factory)
+
+    def test_registerUtility_no_provided_available(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        class Foo(object):
+            pass
+        ifoo = IFoo('IFoo')
+        _info = _u('info')
+        _name = _u('name')
+        _to_reg = Foo()
+        comp = self._makeOne()
+        self.assertRaises(TypeError,
+                          comp.registerUtility, _to_reg, None, _name, _info)
+
+    def test_registerUtility_wo_provided(self):
+        from zope.interface.declarations import directlyProvides
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Registered
+        from zope.interface.registry import UtilityRegistration
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        class Foo(object):
+            pass
+        ifoo = IFoo('IFoo')
+        _info = _u('info')
+        _name = _u('name')
+        _to_reg = Foo()
+        directlyProvides(_to_reg, ifoo)
+        comp = self._makeOne()
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            comp.registerUtility(_to_reg, None, _name, _info)
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Registered))
+        self.failUnless(isinstance(event.object, UtilityRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.failUnless(event.object.name is _name)
+        self.failUnless(event.object.component is _to_reg)
+        self.failUnless(event.object.info is _info)
+        self.failUnless(event.object.factory is None)
+
+    def test_registerUtility_duplicates_existing_reg(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _info = _u('info')
+        _name = _u('name')
+        _to_reg = object()
+        comp = self._makeOne()
+        comp.registerUtility(_to_reg, ifoo, _name, _info)
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            comp.registerUtility(_to_reg, ifoo, _name, _info)
+        self.assertEqual(len(_events), 0)
+
+    def test_registerUtility_replaces_existing_reg(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Unregistered
+        from zope.interface.interfaces import Registered
+        from zope.interface.registry import UtilityRegistration
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _info = _u('info')
+        _name = _u('name')
+        _before, _after = object(), object()
+        comp = self._makeOne()
+        comp.registerUtility(_before, ifoo, _name, _info)
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            comp.registerUtility(_after, ifoo, _name, _info)
+        self.assertEqual(len(_events), 2)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Unregistered))
+        self.failUnless(isinstance(event.object, UtilityRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.failUnless(event.object.name is _name)
+        self.failUnless(event.object.component is _before)
+        self.failUnless(event.object.info is _info)
+        self.failUnless(event.object.factory is None)
+        args, kw = _events[1]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Registered))
+        self.failUnless(isinstance(event.object, UtilityRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.failUnless(event.object.name is _name)
+        self.failUnless(event.object.component is _after)
+        self.failUnless(event.object.info is _info)
+        self.failUnless(event.object.factory is None)
+
+    def test_registerUtility_w_existing_subscr(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _info = _u('info')
+        _name1 = _u('name1')
+        _name2 = _u('name2')
+        _to_reg = object()
+        comp = self._makeOne()
+        comp.registerUtility(_to_reg, ifoo, _name1, _info)
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            comp.registerUtility(_to_reg, ifoo, _name2, _info)
+        self.assertEqual(comp.utilities._subscribers[0][ifoo][''], (_to_reg,))
+
+    def test_registerUtility_wo_event(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _info = _u('info')
+        _name = _u('name')
+        _to_reg = object()
+        comp = self._makeOne()
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            comp.registerUtility(_to_reg, ifoo, _name, _info, False)
+        self.assertEqual(len(_events), 0)
+
+    def test_unregisterUtility_neither_factory_nor_component_nor_provided(self):
+        comp = self._makeOne()
+        self.assertRaises(TypeError, comp.unregisterUtility,
+                          component=None, provided=None, factory=None)
+
+    def test_unregisterUtility_both_factory_and_component(self):
+        def _factory():
+            pass
+        _to_reg = object()
+        comp = self._makeOne()
+        self.assertRaises(TypeError, comp.unregisterUtility,
+                          component=_to_reg, factory=_factory)
+
+    def test_unregisterUtility_w_component_miss(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _name = _u('name')
+        _to_reg = object()
+        comp = self._makeOne()
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            unreg = comp.unregisterUtility(_to_reg, ifoo, _name)
+        self.failIf(unreg)
+        self.failIf(_events)
+
+    def test_unregisterUtility_w_component(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Unregistered
+        from zope.interface.registry import UtilityRegistration
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _name = _u('name')
+        _to_reg = object()
+        comp = self._makeOne()
+        comp.registerUtility(_to_reg, ifoo, _name)
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            unreg = comp.unregisterUtility(_to_reg, ifoo, _name)
+        self.failUnless(unreg)
+        self.failIf(comp.utilities._adapters) # all erased
+        self.failIf((ifoo, _name) in comp._utility_registrations)
+        self.failIf(comp.utilities._subscribers)
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Unregistered))
+        self.failUnless(isinstance(event.object, UtilityRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.failUnless(event.object.name is _name)
+        self.failUnless(event.object.component is _to_reg)
+        self.failUnless(event.object.factory is None)
+
+    def test_unregisterUtility_w_factory(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Unregistered
+        from zope.interface.registry import UtilityRegistration
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _info = _u('info')
+        _name = _u('name')
+        _to_reg = object()
+        def _factory():
+            return _to_reg
+        comp = self._makeOne()
+        comp.registerUtility(None, ifoo, _name, _info, factory=_factory)
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            unreg = comp.unregisterUtility(None, ifoo, _name, factory=_factory)
+        self.failUnless(unreg)
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Unregistered))
+        self.failUnless(isinstance(event.object, UtilityRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.failUnless(event.object.name is _name)
+        self.failUnless(event.object.component is _to_reg)
+        self.failUnless(event.object.factory is _factory)
+
+    def test_unregisterUtility_wo_explicit_provided(self):
+        from zope.interface.declarations import directlyProvides
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Unregistered
+        from zope.interface.registry import UtilityRegistration
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        class Foo(object):
+            pass
+        ifoo = IFoo('IFoo')
+        _info = _u('info')
+        _name = _u('name')
+        _to_reg = Foo()
+        directlyProvides(_to_reg, ifoo)
+        comp = self._makeOne()
+        comp.registerUtility(_to_reg, ifoo, _name, _info)
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            unreg = comp.unregisterUtility(_to_reg, None, _name)
+        self.failUnless(unreg)
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Unregistered))
+        self.failUnless(isinstance(event.object, UtilityRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.failUnless(event.object.name is _name)
+        self.failUnless(event.object.component is _to_reg)
+        self.failUnless(event.object.info is _info)
+        self.failUnless(event.object.factory is None)
+
+    def test_unregisterUtility_wo_component_or_factory(self):
+        from zope.interface.declarations import directlyProvides
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Unregistered
+        from zope.interface.registry import UtilityRegistration
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        class Foo(object):
+            pass
+        ifoo = IFoo('IFoo')
+        _info = _u('info')
+        _name = _u('name')
+        _to_reg = Foo()
+        directlyProvides(_to_reg, ifoo)
+        comp = self._makeOne()
+        comp.registerUtility(_to_reg, ifoo, _name, _info)
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            # Just pass the interface / name
+            unreg = comp.unregisterUtility(provided=ifoo, name=_name)
+        self.failUnless(unreg)
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Unregistered))
+        self.failUnless(isinstance(event.object, UtilityRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.failUnless(event.object.name is _name)
+        self.failUnless(event.object.component is _to_reg)
+        self.failUnless(event.object.info is _info)
+        self.failUnless(event.object.factory is None)
+
+    def test_unregisterUtility_w_existing_subscr(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _info = _u('info')
+        _name1 = _u('name1')
+        _name2 = _u('name2')
+        _to_reg = object()
+        comp = self._makeOne()
+        comp.registerUtility(_to_reg, ifoo, _name1, _info)
+        comp.registerUtility(_to_reg, ifoo, _name2, _info)
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            comp.unregisterUtility(_to_reg, ifoo, _name2)
+        self.assertEqual(comp.utilities._subscribers[0][ifoo][''], (_to_reg,))
+
+    def test_registeredUtilities_empty(self):
+        comp = self._makeOne()
+        self.assertEqual(list(comp.registeredUtilities()), [])
+
+    def test_registeredUtilities_notempty(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        from zope.interface.registry import UtilityRegistration
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _info = _u('info')
+        _name1 = _u('name1')
+        _name2 = _u('name2')
+        _to_reg = object()
+        comp = self._makeOne()
+        comp.registerUtility(_to_reg, ifoo, _name1, _info)
+        comp.registerUtility(_to_reg, ifoo, _name2, _info)
+        reg = sorted(comp.registeredUtilities(), key=lambda r: r.name)
+        self.assertEqual(len(reg), 2)
+        self.failUnless(isinstance(reg[0], UtilityRegistration))
+        self.failUnless(reg[0].registry is comp)
+        self.failUnless(reg[0].provided is ifoo)
+        self.failUnless(reg[0].name is _name1)
+        self.failUnless(reg[0].component is _to_reg)
+        self.failUnless(reg[0].info is _info)
+        self.failUnless(reg[0].factory is None)
+        self.failUnless(isinstance(reg[1], UtilityRegistration))
+        self.failUnless(reg[1].registry is comp)
+        self.failUnless(reg[1].provided is ifoo)
+        self.failUnless(reg[1].name is _name2)
+        self.failUnless(reg[1].component is _to_reg)
+        self.failUnless(reg[1].info is _info)
+        self.failUnless(reg[1].factory is None)
+
+    def test_queryUtility_miss_no_default(self):
+        from zope.interface.declarations import InterfaceClass
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        comp = self._makeOne()
+        self.failUnless(comp.queryUtility(ifoo) is None)
+
+    def test_queryUtility_miss_w_default(self):
+        from zope.interface.declarations import InterfaceClass
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        comp = self._makeOne()
+        _default = object()
+        self.failUnless(comp.queryUtility(ifoo, default=_default) is _default)
+
+    def test_queryUtility_hit(self):
+        from zope.interface.declarations import InterfaceClass
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _to_reg = object()
+        comp = self._makeOne()
+        comp.registerUtility(_to_reg, ifoo)
+        self.failUnless(comp.queryUtility(ifoo) is _to_reg)
+
+    def test_getUtility_miss(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import ComponentLookupError
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        comp = self._makeOne()
+        self.assertRaises(ComponentLookupError, comp.getUtility, ifoo)
+
+    def test_getUtility_hit(self):
+        from zope.interface.declarations import InterfaceClass
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _to_reg = object()
+        comp = self._makeOne()
+        comp.registerUtility(_to_reg, ifoo)
+        self.failUnless(comp.getUtility(ifoo) is _to_reg)
+
+    def test_getUtilitiesFor_miss(self):
+        from zope.interface.declarations import InterfaceClass
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        comp = self._makeOne()
+        self.assertEqual(list(comp.getUtilitiesFor(ifoo)), [])
+
+    def test_getUtilitiesFor_hit(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _name1 = _u('name1')
+        _name2 = _u('name2')
+        _to_reg = object()
+        comp = self._makeOne()
+        comp.registerUtility(_to_reg, ifoo, name=_name1)
+        comp.registerUtility(_to_reg, ifoo, name=_name2)
+        self.assertEqual(sorted(comp.getUtilitiesFor(ifoo)),
+                         [(_name1, _to_reg), (_name2, _to_reg)])
+
+    def test_getAllUtilitiesRegisteredFor_miss(self):
+        from zope.interface.declarations import InterfaceClass
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        comp = self._makeOne()
+        self.assertEqual(list(comp.getAllUtilitiesRegisteredFor(ifoo)), [])
+
+    def test_getAllUtilitiesRegisteredFor_hit(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _name1 = _u('name1')
+        _name2 = _u('name2')
+        _to_reg = object()
+        comp = self._makeOne()
+        comp.registerUtility(_to_reg, ifoo, name=_name1)
+        comp.registerUtility(_to_reg, ifoo, name=_name2)
+        self.assertEqual(list(comp.getAllUtilitiesRegisteredFor(ifoo)),
+                         [_to_reg])
+
+    def test_registerAdapter_w_explicit_provided_and_required(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Registered
+        from zope.interface.registry import AdapterRegistration
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        _info = _u('info')
+        _name = _u('name')
+        _to_reg = object()
+        def _factory(context):
+            return _to_reg
+        comp = self._makeOne()
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            comp.registerAdapter(_factory, (ibar,), ifoo, _name, _info)
+        self.failUnless(comp.adapters._adapters[1][ibar][ifoo][_name]
+                        is _factory)
+        self.assertEqual(comp._adapter_registrations[(ibar,), ifoo, _name],
+                         (_factory, _info))
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Registered))
+        self.failUnless(isinstance(event.object, AdapterRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.assertEqual(event.object.required, (ibar,))
+        self.failUnless(event.object.name is _name)
+        self.failUnless(event.object.info is _info)
+        self.failUnless(event.object.factory is _factory)
+
+    def test_registerAdapter_no_provided_available(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        _info = _u('info')
+        _name = _u('name')
+        _to_reg = object()
+        class _Factory(object):
+            def __init__(self, context):
+                self._context = context
+        comp = self._makeOne()
+        self.assertRaises(TypeError, comp.registerAdapter, _Factory, (ibar,),
+                          name=_name, info=_info)
+
+    def test_registerAdapter_wo_explicit_provided(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.declarations import implementer
+        from zope.interface.interfaces import Registered
+        from zope.interface.registry import AdapterRegistration
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        _info = _u('info')
+        _name = _u('name')
+        _to_reg = object()
+        @implementer(ifoo)
+        class _Factory(object):
+            def __init__(self, context):
+                self._context = context
+        comp = self._makeOne()
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            comp.registerAdapter(_Factory, (ibar,), name=_name, info=_info)
+        self.failUnless(comp.adapters._adapters[1][ibar][ifoo][_name]
+                        is _Factory)
+        self.assertEqual(comp._adapter_registrations[(ibar,), ifoo, _name],
+                         (_Factory, _info))
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Registered))
+        self.failUnless(isinstance(event.object, AdapterRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.assertEqual(event.object.required, (ibar,))
+        self.failUnless(event.object.name is _name)
+        self.failUnless(event.object.info is _info)
+        self.failUnless(event.object.factory is _Factory)
+
+    def test_registerAdapter_no_required_available(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        _info = _u('info')
+        _name = _u('name')
+        class _Factory(object):
+            def __init__(self, context):
+                self._context = context
+        comp = self._makeOne()
+        self.assertRaises(TypeError, comp.registerAdapter, _Factory,
+                          provided=ifoo, name=_name, info=_info)
+
+    def test_registerAdapter_w_invalid_required(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        _info = _u('info')
+        _name = _u('name')
+        class _Factory(object):
+            def __init__(self, context):
+                self._context = context
+        comp = self._makeOne()
+        self.assertRaises(TypeError, comp.registerAdapter, _Factory,
+                          ibar, provided=ifoo, name=_name, info=_info)
+
+    def test_registerAdapter_w_required_containing_None(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interface import Interface
+        from zope.interface.interfaces import Registered
+        from zope.interface.registry import AdapterRegistration
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _info = _u('info')
+        _name = _u('name')
+        class _Factory(object):
+            def __init__(self, context):
+                self._context = context
+        comp = self._makeOne()
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            comp.registerAdapter(_Factory, [None], provided=ifoo,
+                                 name=_name, info=_info)
+        self.failUnless(comp.adapters._adapters[1][Interface][ifoo][_name]
+                        is _Factory)
+        self.assertEqual(comp._adapter_registrations[(Interface,), ifoo, _name],
+                         (_Factory, _info))
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Registered))
+        self.failUnless(isinstance(event.object, AdapterRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.assertEqual(event.object.required, (Interface,))
+        self.failUnless(event.object.name is _name)
+        self.failUnless(event.object.info is _info)
+        self.failUnless(event.object.factory is _Factory)
+
+    def test_registerAdapter_w_required_containing_class(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.declarations import implementer
+        from zope.interface.declarations import implementedBy
+        from zope.interface.interfaces import Registered
+        from zope.interface.registry import AdapterRegistration
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        _info = _u('info')
+        _name = _u('name')
+        class _Factory(object):
+            def __init__(self, context):
+                self._context = context
+        @implementer(ibar)
+        class _Context(object):
+            pass
+        _ctx_impl = implementedBy(_Context)
+        comp = self._makeOne()
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            comp.registerAdapter(_Factory, [_Context], provided=ifoo,
+                                 name=_name, info=_info)
+        self.failUnless(comp.adapters._adapters[1][_ctx_impl][ifoo][_name]
+                        is _Factory)
+        self.assertEqual(comp._adapter_registrations[(_ctx_impl,), ifoo, _name],
+                         (_Factory, _info))
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Registered))
+        self.failUnless(isinstance(event.object, AdapterRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.assertEqual(event.object.required, (_ctx_impl,))
+        self.failUnless(event.object.name is _name)
+        self.failUnless(event.object.info is _info)
+        self.failUnless(event.object.factory is _Factory)
+
+    def test_registerAdapter_w_required_containing_junk(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        _info = _u('info')
+        _name = _u('name')
+        class _Factory(object):
+            def __init__(self, context):
+                self._context = context
+        comp = self._makeOne()
+        self.assertRaises(TypeError, comp.registerAdapter, _Factory, [object()],
+                          provided=ifoo, name=_name, info=_info)
+
+    def test_registerAdapter_wo_explicit_required(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Registered
+        from zope.interface.registry import AdapterRegistration
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        _info = _u('info')
+        _name = _u('name')
+        class _Factory(object):
+            __component_adapts__ = (ibar,)
+            def __init__(self, context):
+                self._context = context
+        comp = self._makeOne()
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            comp.registerAdapter(_Factory, provided=ifoo, name=_name,
+                                 info=_info)
+        self.failUnless(comp.adapters._adapters[1][ibar][ifoo][_name]
+                        is _Factory)
+        self.assertEqual(comp._adapter_registrations[(ibar,), ifoo, _name],
+                         (_Factory, _info))
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Registered))
+        self.failUnless(isinstance(event.object, AdapterRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.assertEqual(event.object.required, (ibar,))
+        self.failUnless(event.object.name is _name)
+        self.failUnless(event.object.info is _info)
+        self.failUnless(event.object.factory is _Factory)
+
+    def test_registerAdapter_wo_event(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        _info = _u('info')
+        _name = _u('name')
+        _to_reg = object()
+        def _factory(context):
+            return _to_reg
+        comp = self._makeOne()
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            comp.registerAdapter(_factory, (ibar,), ifoo, _name, _info,
+                                 event=False)
+        self.assertEqual(len(_events), 0)
+
+    def test_unregisterAdapter_neither_factory_nor_provided(self):
+        comp = self._makeOne()
+        self.assertRaises(TypeError, comp.unregisterAdapter,
+                          factory=None, provided=None)
+
+    def test_unregisterAdapter_neither_factory_nor_required(self):
+        from zope.interface.declarations import InterfaceClass
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        comp = self._makeOne()
+        self.assertRaises(TypeError, comp.unregisterAdapter,
+                          factory=None, provided=ifoo, required=None)
+
+    def test_unregisterAdapter_miss(self):
+        from zope.interface.declarations import InterfaceClass
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        class _Factory(object):
+            def __init__(self, context):
+                self._context = context
+        comp = self._makeOne()
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            unreg = comp.unregisterAdapter(_Factory, (ibar,), ifoo)
+        self.failIf(unreg)
+
+    def test_unregisterAdapter_hit_w_explicit_provided_and_required(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Unregistered
+        from zope.interface.registry import AdapterRegistration
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        class _Factory(object):
+            def __init__(self, context):
+                self._context = context
+        comp = self._makeOne()
+        comp.registerAdapter(_Factory, (ibar,), ifoo)
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            unreg = comp.unregisterAdapter(_Factory, (ibar,), ifoo)
+        self.failUnless(unreg)
+        self.failIf(comp.adapters._adapters)
+        self.failIf(comp._adapter_registrations)
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Unregistered))
+        self.failUnless(isinstance(event.object, AdapterRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.assertEqual(event.object.required, (ibar,))
+        self.assertEqual(event.object.name, '')
+        self.assertEqual(event.object.info, '')
+        self.failUnless(event.object.factory is _Factory)
+
+    def test_unregisterAdapter_wo_explicit_provided(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.declarations import implementer
+        from zope.interface.interfaces import Unregistered
+        from zope.interface.registry import AdapterRegistration
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        @implementer(ifoo)
+        class _Factory(object):
+            def __init__(self, context):
+                self._context = context
+        comp = self._makeOne()
+        comp.registerAdapter(_Factory, (ibar,), ifoo)
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            unreg = comp.unregisterAdapter(_Factory, (ibar,))
+        self.failUnless(unreg)
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Unregistered))
+        self.failUnless(isinstance(event.object, AdapterRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.assertEqual(event.object.required, (ibar,))
+        self.assertEqual(event.object.name, '')
+        self.assertEqual(event.object.info, '')
+        self.failUnless(event.object.factory is _Factory)
+
+    def test_unregisterAdapter_wo_explicit_required(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Unregistered
+        from zope.interface.registry import AdapterRegistration
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        class _Factory(object):
+            __component_adapts__ = (ibar,)
+            def __init__(self, context):
+                self._context = context
+        comp = self._makeOne()
+        comp.registerAdapter(_Factory, (ibar,), ifoo)
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            unreg = comp.unregisterAdapter(_Factory, provided=ifoo)
+        self.failUnless(unreg)
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Unregistered))
+        self.failUnless(isinstance(event.object, AdapterRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.assertEqual(event.object.required, (ibar,))
+        self.assertEqual(event.object.name, '')
+        self.assertEqual(event.object.info, '')
+        self.failUnless(event.object.factory is _Factory)
+
+    def test_registeredAdapters_empty(self):
+        comp = self._makeOne()
+        self.assertEqual(list(comp.registeredAdapters()), [])
+
+    def test_registeredAdapters_notempty(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        from zope.interface.registry import AdapterRegistration
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IFoo')
+        _info = _u('info')
+        _name1 = _u('name1')
+        _name2 = _u('name2')
+        class _Factory(object):
+            def __init__(self, context):
+                pass
+        comp = self._makeOne()
+        comp.registerAdapter(_Factory, (ibar,), ifoo, _name1, _info)
+        comp.registerAdapter(_Factory, (ibar,), ifoo, _name2, _info)
+        reg = sorted(comp.registeredAdapters(), key=lambda r: r.name)
+        self.assertEqual(len(reg), 2)
+        self.failUnless(isinstance(reg[0], AdapterRegistration))
+        self.failUnless(reg[0].registry is comp)
+        self.failUnless(reg[0].provided is ifoo)
+        self.assertEqual(reg[0].required, (ibar,))
+        self.failUnless(reg[0].name is _name1)
+        self.failUnless(reg[0].info is _info)
+        self.failUnless(reg[0].factory is _Factory)
+        self.failUnless(isinstance(reg[1], AdapterRegistration))
+        self.failUnless(reg[1].registry is comp)
+        self.failUnless(reg[1].provided is ifoo)
+        self.assertEqual(reg[1].required, (ibar,))
+        self.failUnless(reg[1].name is _name2)
+        self.failUnless(reg[1].info is _info)
+        self.failUnless(reg[1].factory is _Factory)
+
+    def test_queryAdapter_miss_no_default(self):
+        from zope.interface.declarations import InterfaceClass
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        comp = self._makeOne()
+        _context = object()
+        self.failUnless(comp.queryAdapter(_context, ifoo) is None)
+
+    def test_queryAdapter_miss_w_default(self):
+        from zope.interface.declarations import InterfaceClass
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        comp = self._makeOne()
+        _context = object()
+        _default = object()
+        self.failUnless(
+            comp.queryAdapter(_context, ifoo, default=_default) is _default)
+
+    def test_queryAdapter_hit(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.declarations import implementer
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        class _Factory(object):
+            def __init__(self, context):
+                self.context = context
+        @implementer(ibar)
+        class _Context(object):
+            pass
+        _context = _Context()
+        comp = self._makeOne()
+        comp.registerAdapter(_Factory, (ibar,), ifoo)
+        adapter = comp.queryAdapter(_context, ifoo)
+        self.failUnless(isinstance(adapter, _Factory))
+        self.failUnless(adapter.context is _context)
+
+    def test_getAdapter_miss(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.declarations import implementer
+        from zope.interface.interfaces import ComponentLookupError
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        @implementer(ibar)
+        class _Context(object):
+            pass
+        _context = _Context()
+        comp = self._makeOne()
+        self.assertRaises(ComponentLookupError,
+                          comp.getAdapter, _context, ifoo)
+
+    def test_getAdapter_hit(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.declarations import implementer
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        class _Factory(object):
+            def __init__(self, context):
+                self.context = context
+        @implementer(ibar)
+        class _Context(object):
+            pass
+        _context = _Context()
+        comp = self._makeOne()
+        comp.registerAdapter(_Factory, (ibar,), ifoo)
+        adapter = comp.getAdapter(_context, ifoo)
+        self.failUnless(isinstance(adapter, _Factory))
+        self.failUnless(adapter.context is _context)
+
+    def test_queryMultiAdapter_miss(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.declarations import implementer
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        ibaz = IFoo('IBaz')
+        @implementer(ibar)
+        class _Context1(object):
+            pass
+        @implementer(ibaz)
+        class _Context2(object):
+            pass
+        _context1 = _Context1()
+        _context2 = _Context2()
+        comp = self._makeOne()
+        self.assertEqual(comp.queryMultiAdapter((_context1, _context2), ifoo),
+                         None)
+
+    def test_queryMultiAdapter_miss_w_default(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.declarations import implementer
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        ibaz = IFoo('IBaz')
+        @implementer(ibar)
+        class _Context1(object):
+            pass
+        @implementer(ibaz)
+        class _Context2(object):
+            pass
+        _context1 = _Context1()
+        _context2 = _Context2()
+        _default = object()
+        comp = self._makeOne()
+        self.failUnless(
+            comp.queryMultiAdapter((_context1, _context2), ifoo,
+                                   default=_default) is _default)
+
+    def test_queryMultiAdapter_hit(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.declarations import implementer
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        ibaz = IFoo('IBaz')
+        @implementer(ibar)
+        class _Context1(object):
+            pass
+        @implementer(ibaz)
+        class _Context2(object):
+            pass
+        _context1 = _Context1()
+        _context2 = _Context2()
+        class _Factory(object):
+            def __init__(self, context1, context2):
+                self.context = context1, context2
+        comp = self._makeOne()
+        comp.registerAdapter(_Factory, (ibar, ibaz), ifoo)
+        adapter = comp.queryMultiAdapter((_context1, _context2), ifoo)
+        self.failUnless(isinstance(adapter, _Factory))
+        self.assertEqual(adapter.context, (_context1, _context2))
+
+    def test_getMultiAdapter_miss(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.declarations import implementer
+        from zope.interface.interfaces import ComponentLookupError
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        ibaz = IFoo('IBaz')
+        @implementer(ibar)
+        class _Context1(object):
+            pass
+        @implementer(ibaz)
+        class _Context2(object):
+            pass
+        _context1 = _Context1()
+        _context2 = _Context2()
+        comp = self._makeOne()
+        self.assertRaises(ComponentLookupError,
+                          comp.getMultiAdapter, (_context1, _context2), ifoo)
+
+    def test_getMultiAdapter_hit(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.declarations import implementer
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        ibaz = IFoo('IBaz')
+        @implementer(ibar)
+        class _Context1(object):
+            pass
+        @implementer(ibaz)
+        class _Context2(object):
+            pass
+        _context1 = _Context1()
+        _context2 = _Context2()
+        class _Factory(object):
+            def __init__(self, context1, context2):
+                self.context = context1, context2
+        comp = self._makeOne()
+        comp.registerAdapter(_Factory, (ibar, ibaz), ifoo)
+        adapter = comp.getMultiAdapter((_context1, _context2), ifoo)
+        self.failUnless(isinstance(adapter, _Factory))
+        self.assertEqual(adapter.context, (_context1, _context2))
+
+    def test_getAdapters_empty(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.declarations import implementer
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        ibaz = IFoo('IBaz')
+        @implementer(ibar)
+        class _Context1(object):
+            pass
+        @implementer(ibaz)
+        class _Context2(object):
+            pass
+        _context1 = _Context1()
+        _context2 = _Context2()
+        comp = self._makeOne()
+        self.assertEqual(
+            list(comp.getAdapters((_context1, _context2), ifoo)), [])
+
+    def test_getAdapters_non_empty(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.declarations import implementer
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        ibaz = IFoo('IBaz')
+        @implementer(ibar)
+        class _Context1(object):
+            pass
+        @implementer(ibaz)
+        class _Context2(object):
+            pass
+        _context1 = _Context1()
+        _context2 = _Context2()
+        class _Factory1(object):
+            def __init__(self, context1, context2):
+                self.context = context1, context2
+        class _Factory2(object):
+            def __init__(self, context1, context2):
+                self.context = context1, context2
+        _name1 = _u('name1')
+        _name2 = _u('name2')
+        comp = self._makeOne()
+        comp.registerAdapter(_Factory1, (ibar, ibaz), ifoo, name=_name1)
+        comp.registerAdapter(_Factory2, (ibar, ibaz), ifoo, name=_name2)
+        found = sorted(comp.getAdapters((_context1, _context2), ifoo))
+        self.assertEqual(len(found), 2)
+        self.assertEqual(found[0][0], _name1)
+        self.failUnless(isinstance(found[0][1], _Factory1))
+        self.assertEqual(found[1][0], _name2)
+        self.failUnless(isinstance(found[1][1], _Factory2))
+
+    def test_registerSubscriptionAdapter_w_nonblank_name(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        _name = _u('name')
+        _info = _u('info')
+        _to_reg = object()
+        def _factory(context):
+            return _to_reg
+        comp = self._makeOne()
+        self.assertRaises(TypeError, comp.registerSubscriptionAdapter,
+                          _factory, (ibar,), ifoo, _name, _info)
+
+    def test_registerSubscriptionAdapter_w_explicit_provided_and_required(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Registered
+        from zope.interface.registry import SubscriptionRegistration
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        _blank = _u('')
+        _info = _u('info')
+        _to_reg = object()
+        def _factory(context):
+            return _to_reg
+        comp = self._makeOne()
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            comp.registerSubscriptionAdapter(_factory, (ibar,), ifoo,
+                                             info=_info)
+        reg = comp.adapters._subscribers[1][ibar][ifoo][_blank]
+        self.assertEqual(len(reg), 1)
+        self.failUnless(reg[0] is _factory)
+        self.assertEqual(comp._subscription_registrations,
+                         [((ibar,), ifoo, _blank, _factory, _info)])
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Registered))
+        self.failUnless(isinstance(event.object, SubscriptionRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.assertEqual(event.object.required, (ibar,))
+        self.assertEqual(event.object.name, _blank)
+        self.failUnless(event.object.info is _info)
+        self.failUnless(event.object.factory is _factory)
+
+    def test_registerSubscriptionAdapter_wo_explicit_provided(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.declarations import implementer
+        from zope.interface.interfaces import Registered
+        from zope.interface.registry import SubscriptionRegistration
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        _info = _u('info')
+        _blank = _u('')
+        _to_reg = object()
+        @implementer(ifoo)
+        class _Factory(object):
+            def __init__(self, context):
+                self._context = context
+        comp = self._makeOne()
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            comp.registerSubscriptionAdapter(_Factory, (ibar,), info=_info)
+        reg = comp.adapters._subscribers[1][ibar][ifoo][_blank]
+        self.assertEqual(len(reg), 1)
+        self.failUnless(reg[0] is _Factory)
+        self.assertEqual(comp._subscription_registrations,
+                         [((ibar,), ifoo, _blank, _Factory, _info)])
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Registered))
+        self.failUnless(isinstance(event.object, SubscriptionRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.assertEqual(event.object.required, (ibar,))
+        self.assertEqual(event.object.name, _blank)
+        self.failUnless(event.object.info is _info)
+        self.failUnless(event.object.factory is _Factory)
+
+    def test_registerSubscriptionAdapter_wo_explicit_required(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Registered
+        from zope.interface.registry import SubscriptionRegistration
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        _info = _u('info')
+        _blank = _u('')
+        class _Factory(object):
+            __component_adapts__ = (ibar,)
+            def __init__(self, context):
+                self._context = context
+        comp = self._makeOne()
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            comp.registerSubscriptionAdapter(
+                    _Factory, provided=ifoo, info=_info)
+        reg = comp.adapters._subscribers[1][ibar][ifoo][_blank]
+        self.assertEqual(len(reg), 1)
+        self.failUnless(reg[0] is _Factory)
+        self.assertEqual(comp._subscription_registrations,
+                         [((ibar,), ifoo, _blank, _Factory, _info)])
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Registered))
+        self.failUnless(isinstance(event.object, SubscriptionRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.assertEqual(event.object.required, (ibar,))
+        self.assertEqual(event.object.name, _blank)
+        self.failUnless(event.object.info is _info)
+        self.failUnless(event.object.factory is _Factory)
+
+    def test_registerSubscriptionAdapter_wo_event(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        _blank = _u('')
+        _info = _u('info')
+        _to_reg = object()
+        def _factory(context):
+            return _to_reg
+        comp = self._makeOne()
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            comp.registerSubscriptionAdapter(_factory, (ibar,), ifoo,
+                                             info=_info, event=False)
+        self.assertEqual(len(_events), 0)
+
+    def test_registeredSubscriptionAdapters_empty(self):
+        comp = self._makeOne()
+        self.assertEqual(list(comp.registeredSubscriptionAdapters()), [])
+
+    def test_registeredSubscriptionAdapters_notempty(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        from zope.interface.registry import SubscriptionRegistration
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IFoo')
+        _info = _u('info')
+        _blank = _u('')
+        class _Factory(object):
+            def __init__(self, context):
+                pass
+        comp = self._makeOne()
+        comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo, info=_info)
+        comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo, info=_info)
+        reg = list(comp.registeredSubscriptionAdapters())
+        self.assertEqual(len(reg), 2)
+        self.failUnless(isinstance(reg[0], SubscriptionRegistration))
+        self.failUnless(reg[0].registry is comp)
+        self.failUnless(reg[0].provided is ifoo)
+        self.assertEqual(reg[0].required, (ibar,))
+        self.assertEqual(reg[0].name, _blank)
+        self.failUnless(reg[0].info is _info)
+        self.failUnless(reg[0].factory is _Factory)
+        self.failUnless(isinstance(reg[1], SubscriptionRegistration))
+        self.failUnless(reg[1].registry is comp)
+        self.failUnless(reg[1].provided is ifoo)
+        self.assertEqual(reg[1].required, (ibar,))
+        self.assertEqual(reg[1].name, _blank)
+        self.failUnless(reg[1].info is _info)
+        self.failUnless(reg[1].factory is _Factory)
+
+    def test_unregisterSubscriptionAdapter_w_nonblank_name(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        _nonblank = _u('nonblank')
+        comp = self._makeOne()
+        self.assertRaises(TypeError, comp.unregisterSubscriptionAdapter,
+                          required=ifoo, provided=ibar, name=_nonblank)
+
+    def test_unregisterSubscriptionAdapter_neither_factory_nor_provided(self):
+        comp = self._makeOne()
+        self.assertRaises(TypeError, comp.unregisterSubscriptionAdapter,
+                          factory=None, provided=None)
+
+    def test_unregisterSubscriptionAdapter_neither_factory_nor_required(self):
+        from zope.interface.declarations import InterfaceClass
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        comp = self._makeOne()
+        self.assertRaises(TypeError, comp.unregisterSubscriptionAdapter,
+                          factory=None, provided=ifoo, required=None)
+
+    def test_unregisterSubscriptionAdapter_miss(self):
+        from zope.interface.declarations import InterfaceClass
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        class _Factory(object):
+            def __init__(self, context):
+                self._context = context
+        comp = self._makeOne()
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            unreg = comp.unregisterSubscriptionAdapter(_Factory, (ibar,), ifoo)
+        self.failIf(unreg)
+        self.failIf(_events)
+
+    def test_unregisterSubscriptionAdapter_hit_wo_factory(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Unregistered
+        from zope.interface.registry import SubscriptionRegistration
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        class _Factory(object):
+            def __init__(self, context):
+                self._context = context
+        comp = self._makeOne()
+        comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo)
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            unreg = comp.unregisterSubscriptionAdapter(None, (ibar,), ifoo)
+        self.failUnless(unreg)
+        self.failIf(comp.adapters._subscribers)
+        self.failIf(comp._subscription_registrations)
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Unregistered))
+        self.failUnless(isinstance(event.object, SubscriptionRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.assertEqual(event.object.required, (ibar,))
+        self.assertEqual(event.object.name, '')
+        self.assertEqual(event.object.info, '')
+        self.failUnless(event.object.factory is None)
+
+    def test_unregisterSubscriptionAdapter_hit_w_factory(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Unregistered
+        from zope.interface.registry import SubscriptionRegistration
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        class _Factory(object):
+            def __init__(self, context):
+                self._context = context
+        comp = self._makeOne()
+        comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo)
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            unreg = comp.unregisterSubscriptionAdapter(_Factory, (ibar,), ifoo)
+        self.failUnless(unreg)
+        self.failIf(comp.adapters._subscribers)
+        self.failIf(comp._subscription_registrations)
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Unregistered))
+        self.failUnless(isinstance(event.object, SubscriptionRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.assertEqual(event.object.required, (ibar,))
+        self.assertEqual(event.object.name, '')
+        self.assertEqual(event.object.info, '')
+        self.failUnless(event.object.factory is _Factory)
+
+    def test_unregisterSubscriptionAdapter_wo_explicit_provided(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.declarations import implementer
+        from zope.interface.interfaces import Unregistered
+        from zope.interface.registry import SubscriptionRegistration
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        @implementer(ifoo)
+        class _Factory(object):
+            def __init__(self, context):
+                self._context = context
+        comp = self._makeOne()
+        comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo)
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            unreg = comp.unregisterSubscriptionAdapter(_Factory, (ibar,))
+        self.failUnless(unreg)
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Unregistered))
+        self.failUnless(isinstance(event.object, SubscriptionRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.assertEqual(event.object.required, (ibar,))
+        self.assertEqual(event.object.name, '')
+        self.assertEqual(event.object.info, '')
+        self.failUnless(event.object.factory is _Factory)
+
+    def test_unregisterSubscriptionAdapter_wo_explicit_required(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Unregistered
+        from zope.interface.registry import SubscriptionRegistration
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        class _Factory(object):
+            __component_adapts__ = (ibar,)
+            def __init__(self, context):
+                self._context = context
+        comp = self._makeOne()
+        comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo)
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            unreg = comp.unregisterSubscriptionAdapter(_Factory, provided=ifoo)
+        self.failUnless(unreg)
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Unregistered))
+        self.failUnless(isinstance(event.object, SubscriptionRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.failUnless(event.object.provided is ifoo)
+        self.assertEqual(event.object.required, (ibar,))
+        self.assertEqual(event.object.name, '')
+        self.assertEqual(event.object.info, '')
+        self.failUnless(event.object.factory is _Factory)
+
+    def test_subscribers_empty(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.declarations import implementer
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        comp = self._makeOne()
+        @implementer(ibar)
+        class Bar(object):
+            pass
+        bar = Bar()
+        self.assertEqual(list(comp.subscribers((bar,), ifoo)), [])
+
+    def test_subscribers_non_empty(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.declarations import implementer
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        class _Factory(object):
+            __component_adapts__ = (ibar,)
+            def __init__(self, context):
+                self._context = context
+        class _Derived(_Factory):
+            pass
+        comp = self._makeOne()
+        comp.registerSubscriptionAdapter(_Factory, (ibar,), ifoo)
+        comp.registerSubscriptionAdapter(_Derived, (ibar,), ifoo)
+        @implementer(ibar)
+        class Bar(object):
+            pass
+        bar = Bar()
+        subscribers = comp.subscribers((bar,), ifoo)
+        def _klassname(x):
+            return x.__class__.__name__
+        subscribers = sorted(subscribers, key=_klassname)
+        self.assertEqual(len(subscribers), 2)
+        self.failUnless(isinstance(subscribers[0], _Derived))
+        self.failUnless(isinstance(subscribers[1], _Factory))
+
+    def test_registerHandler_w_nonblank_name(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _nonblank = _u('nonblank')
+        comp = self._makeOne()
+        def _factory(context):
+            pass
+        self.assertRaises(TypeError, comp.registerHandler, _factory,
+                          required=ifoo, name=_nonblank)
+
+    def test_registerHandler_w_explicit_required(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Registered
+        from zope.interface.registry import HandlerRegistration
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _blank = _u('')
+        _info = _u('info')
+        _to_reg = object()
+        def _factory(context):
+            return _to_reg
+        comp = self._makeOne()
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            comp.registerHandler(_factory, (ifoo,), info=_info)
+        reg = comp.adapters._subscribers[1][ifoo][None][_blank]
+        self.assertEqual(len(reg), 1)
+        self.failUnless(reg[0] is _factory)
+        self.assertEqual(comp._handler_registrations,
+                         [((ifoo,), _blank, _factory, _info)])
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Registered))
+        self.failUnless(isinstance(event.object, HandlerRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.assertEqual(event.object.required, (ifoo,))
+        self.assertEqual(event.object.name, _blank)
+        self.failUnless(event.object.info is _info)
+        self.failUnless(event.object.factory is _factory)
+
+    def test_registerHandler_wo_explicit_required(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Registered
+        from zope.interface.registry import HandlerRegistration
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _info = _u('info')
+        _blank = _u('')
+        class _Factory(object):
+            __component_adapts__ = (ifoo,)
+            def __init__(self, context):
+                self._context = context
+        comp = self._makeOne()
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            comp.registerHandler(_Factory, info=_info)
+        reg = comp.adapters._subscribers[1][ifoo][None][_blank]
+        self.assertEqual(len(reg), 1)
+        self.failUnless(reg[0] is _Factory)
+        self.assertEqual(comp._handler_registrations,
+                         [((ifoo,), _blank, _Factory, _info)])
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Registered))
+        self.failUnless(isinstance(event.object, HandlerRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.assertEqual(event.object.required, (ifoo,))
+        self.assertEqual(event.object.name, _blank)
+        self.failUnless(event.object.info is _info)
+        self.failUnless(event.object.factory is _Factory)
+
+    def test_registeredHandlers_empty(self):
+        from zope.interface.declarations import InterfaceClass
+        class IFoo(InterfaceClass):
+            pass
+        comp = self._makeOne()
+        self.failIf(list(comp.registeredHandlers()))
+
+    def test_registeredHandlers_non_empty(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.registry import HandlerRegistration
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        def _factory1(context):
+            pass
+        def _factory2(context):
+            pass
+        comp = self._makeOne()
+        comp.registerHandler(_factory1, (ifoo,))
+        comp.registerHandler(_factory2, (ifoo,))
+        def _factory_name(x):
+            return x.factory.__code__.co_name
+        subscribers = sorted(comp.registeredHandlers(), key=_factory_name)
+        self.assertEqual(len(subscribers), 2)
+        self.failUnless(isinstance(subscribers[0], HandlerRegistration))
+        self.assertEqual(subscribers[0].required, (ifoo,))
+        self.assertEqual(subscribers[0].name, '')
+        self.assertEqual(subscribers[0].factory, _factory1)
+        self.assertEqual(subscribers[0].info, '')
+        self.failUnless(isinstance(subscribers[1], HandlerRegistration))
+        self.assertEqual(subscribers[1].required, (ifoo,))
+        self.assertEqual(subscribers[1].name, '')
+        self.assertEqual(subscribers[1].factory, _factory2)
+        self.assertEqual(subscribers[1].info, '')
+ 
+    def test_unregisterHandler_w_nonblank_name(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _nonblank = _u('nonblank')
+        comp = self._makeOne()
+        self.assertRaises(TypeError, comp.unregisterHandler,
+                          required=(ifoo,), name=_nonblank)
+
+    def test_unregisterHandler_neither_factory_nor_required(self):
+        from zope.interface.declarations import InterfaceClass
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        comp = self._makeOne()
+        self.assertRaises(TypeError, comp.unregisterHandler)
+
+    def test_unregisterHandler_miss(self):
+        from zope.interface.declarations import InterfaceClass
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        comp = self._makeOne()
+        unreg = comp.unregisterHandler(required=(ifoo,))
+        self.failIf(unreg)
+
+    def test_unregisterHandler_hit_w_factory_and_explicit_provided(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Unregistered
+        from zope.interface.registry import HandlerRegistration
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        comp = self._makeOne()
+        _to_reg = object()
+        def _factory(context):
+            return _to_reg
+        comp = self._makeOne()
+        comp.registerHandler(_factory, (ifoo,))
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            unreg = comp.unregisterHandler(_factory, (ifoo,))
+        self.failUnless(unreg)
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Unregistered))
+        self.failUnless(isinstance(event.object, HandlerRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.assertEqual(event.object.required, (ifoo,))
+        self.assertEqual(event.object.name, '')
+        self.failUnless(event.object.factory is _factory)
+
+    def test_unregisterHandler_hit_w_only_explicit_provided(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Unregistered
+        from zope.interface.registry import HandlerRegistration
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        comp = self._makeOne()
+        _to_reg = object()
+        def _factory(context):
+            return _to_reg
+        comp = self._makeOne()
+        comp.registerHandler(_factory, (ifoo,))
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            unreg = comp.unregisterHandler(required=(ifoo,))
+        self.failUnless(unreg)
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Unregistered))
+        self.failUnless(isinstance(event.object, HandlerRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.assertEqual(event.object.required, (ifoo,))
+        self.assertEqual(event.object.name, '')
+        self.failUnless(event.object.factory is None)
+
+    def test_unregisterHandler_wo_explicit_required(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.interfaces import Unregistered
+        from zope.interface.registry import HandlerRegistration
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        class _Factory(object):
+            __component_adapts__ = (ifoo,)
+            def __init__(self, context):
+                self._context = context
+        comp = self._makeOne()
+        comp.registerHandler(_Factory)
+        _monkey, _events = self._wrapEvents()
+        with _monkey:
+            unreg = comp.unregisterHandler(_Factory)
+        self.failUnless(unreg)
+        self.assertEqual(len(_events), 1)
+        args, kw = _events[0]
+        event, = args
+        self.assertEqual(kw, {})
+        self.failUnless(isinstance(event, Unregistered))
+        self.failUnless(isinstance(event.object, HandlerRegistration))
+        self.failUnless(event.object.registry is comp)
+        self.assertEqual(event.object.required, (ifoo,))
+        self.assertEqual(event.object.name, '')
+        self.assertEqual(event.object.info, '')
+        self.failUnless(event.object.factory is _Factory)
+
+    def test_handle_empty(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.declarations import implementer
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        comp = self._makeOne()
+        @implementer(ifoo)
+        class Bar(object):
+            pass
+        bar = Bar()
+        comp.handle((bar,)) # doesn't raise
+
+    def test_handle_non_empty(self):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface.declarations import implementer
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        _called_1 = []
+        def _factory_1(context):
+                _called_1.append(context)
+        _called_2 = []
+        def _factory_2(context):
+                _called_2.append(context)
+        comp = self._makeOne()
+        comp.registerHandler(_factory_1, (ifoo,))
+        comp.registerHandler(_factory_2, (ifoo,))
+        @implementer(ifoo)
+        class Bar(object):
+            pass
+        bar = Bar()
+        comp.handle(bar)
+        self.assertEqual(_called_1, [bar])
+        self.assertEqual(_called_2, [bar])
+
+
+# Test _getUtilityProvided, _getAdapterProvided, _getAdapterRequired via their
+# callers (Component.registerUtility, Component.registerAdapter).
+
+
+class UtilityRegistrationTests(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.registry import UtilityRegistration
+        return UtilityRegistration
+
+    def _makeOne(self, component=None, factory=None):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        class _Registry(object):
+            def __repr__(self):
+                return '_REGISTRY'
+        registry = _Registry()
+        name = _u('name')
+        doc = 'DOCSTRING'
+        klass = self._getTargetClass()
+        return (klass(registry, ifoo, name, component, doc, factory),
+                registry,
+                name,
+               )
+
+    def test_class_conforms_to_IUtilityRegistration(self):
+        from zope.interface.verify import verifyClass
+        from zope.interface.interfaces import IUtilityRegistration
+        verifyClass(IUtilityRegistration, self._getTargetClass())
+
+    def test_instance_conforms_to_IUtilityRegistration(self):
+        from zope.interface.verify import verifyObject
+        from zope.interface.interfaces import IUtilityRegistration
+        ur, _, _ =  self._makeOne()
+        verifyObject(IUtilityRegistration, ur)
+
+    def test___repr__(self):
+        class _Component(object):
+            __name__ = 'TEST'
+        _component = _Component()
+        ur, _registry, _name = self._makeOne(_component)
+        self.assertEqual(repr(ur),
+            "UtilityRegistration(_REGISTRY, IFoo, %r, TEST, None, 'DOCSTRING')"
+                            % (_name))
+
+    def test___repr___provided_wo_name(self):
+        class _Component(object):
+            def __repr__(self):
+                return 'TEST'
+        _component = _Component()
+        ur, _registry, _name = self._makeOne(_component)
+        ur.provided = object()
+        self.assertEqual(repr(ur),
+            "UtilityRegistration(_REGISTRY, None, %r, TEST, None, 'DOCSTRING')"
+                            % (_name))
+
+    def test___repr___component_wo_name(self):
+        class _Component(object):
+            def __repr__(self):
+                return 'TEST'
+        _component = _Component()
+        ur, _registry, _name = self._makeOne(_component)
+        ur.provided = object()
+        self.assertEqual(repr(ur),
+            "UtilityRegistration(_REGISTRY, None, %r, TEST, None, 'DOCSTRING')"
+                            % (_name))
+
+    def test___hash__(self):
+        _component = object()
+        ur, _registry, _name = self._makeOne(_component)
+        self.assertEqual(ur.__hash__(), id(ur))
+
+    def test___eq___identity(self):
+        _component = object()
+        ur, _registry, _name = self._makeOne(_component)
+        self.failUnless(ur == ur)
+
+    def test___eq___hit(self):
+        _component = object()
+        ur, _registry, _name = self._makeOne(_component)
+        ur2, _, _ = self._makeOne(_component)
+        self.failUnless(ur == ur2)
+
+    def test___eq___miss(self):
+        _component = object()
+        _component2 = object()
+        ur, _registry, _name = self._makeOne(_component)
+        ur2, _, _ = self._makeOne(_component2)
+        self.failIf(ur == ur2)
+
+    def test___ne___identity(self):
+        _component = object()
+        ur, _registry, _name = self._makeOne(_component)
+        self.failIf(ur != ur)
+
+    def test___ne___hit(self):
+        _component = object()
+        ur, _registry, _name = self._makeOne(_component)
+        ur2, _, _ = self._makeOne(_component)
+        self.failIf(ur != ur2)
+
+    def test___ne___miss(self):
+        _component = object()
+        _component2 = object()
+        ur, _registry, _name = self._makeOne(_component)
+        ur2, _, _ = self._makeOne(_component2)
+        self.failUnless(ur != ur2)
+
+    def test___lt___identity(self):
+        _component = object()
+        ur, _registry, _name = self._makeOne(_component)
+        self.failIf(ur < ur)
+
+    def test___lt___hit(self):
+        _component = object()
+        ur, _registry, _name = self._makeOne(_component)
+        ur2, _, _ = self._makeOne(_component)
+        self.failIf(ur < ur2)
+
+    def test___lt___miss(self):
+        _component = object()
+        _component2 = object()
+        ur, _registry, _name = self._makeOne(_component)
+        ur2, _, _ = self._makeOne(_component2)
+        ur2.name = _name + '2'
+        self.failUnless(ur < ur2)
+
+    def test___le___identity(self):
+        _component = object()
+        ur, _registry, _name = self._makeOne(_component)
+        self.failUnless(ur <= ur)
+
+    def test___le___hit(self):
+        _component = object()
+        ur, _registry, _name = self._makeOne(_component)
+        ur2, _, _ = self._makeOne(_component)
+        self.failUnless(ur <= ur2)
+
+    def test___le___miss(self):
+        _component = object()
+        _component2 = object()
+        ur, _registry, _name = self._makeOne(_component)
+        ur2, _, _ = self._makeOne(_component2)
+        ur2.name = _name + '2'
+        self.failUnless(ur <= ur2)
+
+    def test___gt___identity(self):
+        _component = object()
+        ur, _registry, _name = self._makeOne(_component)
+        self.failIf(ur > ur)
+
+    def test___gt___hit(self):
+        _component = object()
+        _component2 = object()
+        ur, _registry, _name = self._makeOne(_component)
+        ur2, _, _ = self._makeOne(_component2)
+        ur2.name = _name + '2'
+        self.failUnless(ur2 > ur)
+
+    def test___gt___miss(self):
+        _component = object()
+        ur, _registry, _name = self._makeOne(_component)
+        ur2, _, _ = self._makeOne(_component)
+        self.failIf(ur2 > ur)
+
+    def test___ge___identity(self):
+        _component = object()
+        ur, _registry, _name = self._makeOne(_component)
+        self.failUnless(ur >= ur)
+
+    def test___ge___miss(self):
+        _component = object()
+        _component2 = object()
+        ur, _registry, _name = self._makeOne(_component)
+        ur2, _, _ = self._makeOne(_component2)
+        ur2.name = _name + '2'
+        self.failIf(ur >= ur2)
+
+    def test___ge___hit(self):
+        _component = object()
+        ur, _registry, _name = self._makeOne(_component)
+        ur2, _, _ = self._makeOne(_component)
+        ur2.name = _name + '2'
+        self.failUnless(ur2 >= ur)
+
+
+class AdapterRegistrationTests(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.registry import AdapterRegistration
+        return AdapterRegistration
+
+    def _makeOne(self, component=None):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        class _Registry(object):
+            def __repr__(self):
+                return '_REGISTRY'
+        registry = _Registry()
+        name = _u('name')
+        doc = 'DOCSTRING'
+        klass = self._getTargetClass()
+        return (klass(registry, (ibar,), ifoo, name, component, doc),
+                registry,
+                name,
+               )
+
+    def test_class_conforms_to_IAdapterRegistration(self):
+        from zope.interface.verify import verifyClass
+        from zope.interface.interfaces import IAdapterRegistration
+        verifyClass(IAdapterRegistration, self._getTargetClass())
+
+    def test_instance_conforms_to_IAdapterRegistration(self):
+        from zope.interface.verify import verifyObject
+        from zope.interface.interfaces import IAdapterRegistration
+        ar, _, _ =  self._makeOne()
+        verifyObject(IAdapterRegistration, ar)
+
+    def test___repr__(self):
+        class _Component(object):
+            __name__ = 'TEST'
+        _component = _Component()
+        ar, _registry, _name = self._makeOne(_component)
+        self.assertEqual(repr(ar),
+            ("AdapterRegistration(_REGISTRY, [IBar], IFoo, %r, TEST, "
+           + "'DOCSTRING')") % (_name))
+
+    def test___repr___provided_wo_name(self):
+        class _Component(object):
+            def __repr__(self):
+                return 'TEST'
+        _component = _Component()
+        ar, _registry, _name = self._makeOne(_component)
+        ar.provided = object()
+        self.assertEqual(repr(ar),
+            ("AdapterRegistration(_REGISTRY, [IBar], None, %r, TEST, "
+           + "'DOCSTRING')") % (_name))
+
+    def test___repr___component_wo_name(self):
+        class _Component(object):
+            def __repr__(self):
+                return 'TEST'
+        _component = _Component()
+        ar, _registry, _name = self._makeOne(_component)
+        ar.provided = object()
+        self.assertEqual(repr(ar),
+            ("AdapterRegistration(_REGISTRY, [IBar], None, %r, TEST, "
+           + "'DOCSTRING')") % (_name))
+
+    def test___hash__(self):
+        _component = object()
+        ar, _registry, _name = self._makeOne(_component)
+        self.assertEqual(ar.__hash__(), id(ar))
+
+    def test___eq___identity(self):
+        _component = object()
+        ar, _registry, _name = self._makeOne(_component)
+        self.failUnless(ar == ar)
+
+    def test___eq___hit(self):
+        _component = object()
+        ar, _registry, _name = self._makeOne(_component)
+        ar2, _, _ = self._makeOne(_component)
+        self.failUnless(ar == ar2)
+
+    def test___eq___miss(self):
+        _component = object()
+        _component2 = object()
+        ar, _registry, _name = self._makeOne(_component)
+        ar2, _, _ = self._makeOne(_component2)
+        self.failIf(ar == ar2)
+
+    def test___ne___identity(self):
+        _component = object()
+        ar, _registry, _name = self._makeOne(_component)
+        self.failIf(ar != ar)
+
+    def test___ne___miss(self):
+        _component = object()
+        ar, _registry, _name = self._makeOne(_component)
+        ar2, _, _ = self._makeOne(_component)
+        self.failIf(ar != ar2)
+
+    def test___ne___hit_component(self):
+        _component = object()
+        _component2 = object()
+        ar, _registry, _name = self._makeOne(_component)
+        ar2, _, _ = self._makeOne(_component2)
+        self.failUnless(ar != ar2)
+
+    def test___ne___hit_provided(self):
+        from zope.interface.declarations import InterfaceClass
+        class IFoo(InterfaceClass):
+            pass
+        ibaz = IFoo('IBaz')
+        _component = object()
+        ar, _registry, _name = self._makeOne(_component)
+        ar2, _, _ = self._makeOne(_component)
+        ar2.provided = ibaz
+        self.failUnless(ar != ar2)
+
+    def test___ne___hit_required(self):
+        from zope.interface.declarations import InterfaceClass
+        class IFoo(InterfaceClass):
+            pass
+        ibaz = IFoo('IBaz')
+        _component = object()
+        _component2 = object()
+        ar, _registry, _name = self._makeOne(_component)
+        ar2, _, _ = self._makeOne(_component2)
+        ar2.required = (ibaz,)
+        self.failUnless(ar != ar2)
+
+    def test___lt___identity(self):
+        _component = object()
+        ar, _registry, _name = self._makeOne(_component)
+        self.failIf(ar < ar)
+
+    def test___lt___hit(self):
+        _component = object()
+        ar, _registry, _name = self._makeOne(_component)
+        ar2, _, _ = self._makeOne(_component)
+        self.failIf(ar < ar2)
+
+    def test___lt___miss(self):
+        _component = object()
+        _component2 = object()
+        ar, _registry, _name = self._makeOne(_component)
+        ar2, _, _ = self._makeOne(_component2)
+        ar2.name = _name + '2'
+        self.failUnless(ar < ar2)
+
+    def test___le___identity(self):
+        _component = object()
+        ar, _registry, _name = self._makeOne(_component)
+        self.failUnless(ar <= ar)
+
+    def test___le___hit(self):
+        _component = object()
+        ar, _registry, _name = self._makeOne(_component)
+        ar2, _, _ = self._makeOne(_component)
+        self.failUnless(ar <= ar2)
+
+    def test___le___miss(self):
+        _component = object()
+        _component2 = object()
+        ar, _registry, _name = self._makeOne(_component)
+        ar2, _, _ = self._makeOne(_component2)
+        ar2.name = _name + '2'
+        self.failUnless(ar <= ar2)
+
+    def test___gt___identity(self):
+        _component = object()
+        ar, _registry, _name = self._makeOne(_component)
+        self.failIf(ar > ar)
+
+    def test___gt___hit(self):
+        _component = object()
+        _component2 = object()
+        ar, _registry, _name = self._makeOne(_component)
+        ar2, _, _ = self._makeOne(_component2)
+        ar2.name = _name + '2'
+        self.failUnless(ar2 > ar)
+
+    def test___gt___miss(self):
+        _component = object()
+        ar, _registry, _name = self._makeOne(_component)
+        ar2, _, _ = self._makeOne(_component)
+        self.failIf(ar2 > ar)
+
+    def test___ge___identity(self):
+        _component = object()
+        ar, _registry, _name = self._makeOne(_component)
+        self.failUnless(ar >= ar)
+
+    def test___ge___miss(self):
+        _component = object()
+        _component2 = object()
+        ar, _registry, _name = self._makeOne(_component)
+        ar2, _, _ = self._makeOne(_component2)
+        ar2.name = _name + '2'
+        self.failIf(ar >= ar2)
+
+    def test___ge___hit(self):
+        _component = object()
+        ar, _registry, _name = self._makeOne(_component)
+        ar2, _, _ = self._makeOne(_component)
+        ar2.name = _name + '2'
+        self.failUnless(ar2 >= ar)
+
+
+class SubscriptionRegistrationTests(unittest.TestCase):
+
+    def _getTargetClass(self):
+        from zope.interface.registry import SubscriptionRegistration
+        return SubscriptionRegistration
+
+    def _makeOne(self, component=None):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        ibar = IFoo('IBar')
+        class _Registry(object):
+            def __repr__(self):
+                return '_REGISTRY'
+        registry = _Registry()
+        name = _u('name')
+        doc = 'DOCSTRING'
+        klass = self._getTargetClass()
+        return (klass(registry, (ibar,), ifoo, name, component, doc),
+                registry,
+                name,
+               )
+
+    def test_class_conforms_to_ISubscriptionAdapterRegistration(self):
+        from zope.interface.verify import verifyClass
+        from zope.interface.interfaces import ISubscriptionAdapterRegistration
+        verifyClass(ISubscriptionAdapterRegistration, self._getTargetClass())
+
+    def test_instance_conforms_to_ISubscriptionAdapterRegistration(self):
+        from zope.interface.verify import verifyObject
+        from zope.interface.interfaces import ISubscriptionAdapterRegistration
+        sar, _, _ =  self._makeOne()
+        verifyObject(ISubscriptionAdapterRegistration, sar)
+
+
+class HandlerRegistrationTests(_SilencePy3Deprecations):
+
+    def _getTargetClass(self):
+        from zope.interface.registry import HandlerRegistration
+        return HandlerRegistration
+
+    def _makeOne(self, component=None):
+        from zope.interface.declarations import InterfaceClass
+        from zope.interface._compat import _u
+        class IFoo(InterfaceClass):
+            pass
+        ifoo = IFoo('IFoo')
+        class _Registry(object):
+            def __repr__(self):
+                return '_REGISTRY'
+        registry = _Registry()
+        name = _u('name')
+        doc = 'DOCSTRING'
+        klass = self._getTargetClass()
+        return (klass(registry, (ifoo,), name, component, doc),
+                registry,
+                name,
+               )
+
+    def test_class_conforms_to_IHandlerRegistration(self):
+        from zope.interface.verify import verifyClass
+        from zope.interface.interfaces import IHandlerRegistration
+        verifyClass(IHandlerRegistration, self._getTargetClass())
+
+    def test_instance_conforms_to_IHandlerRegistration(self):
+        from zope.interface.verify import verifyObject
+        from zope.interface.interfaces import IHandlerRegistration
+        hr, _, _ =  self._makeOne()
+        verifyObject(IHandlerRegistration, hr)
+
+    def test_properties(self):
+        def _factory(context):
+            pass
+        hr, _, _ =  self._makeOne(_factory)
+        self.failUnless(hr.handler is _factory)
+        self.failUnless(hr.factory is hr.handler)
+        self.failUnless(hr.provided is None)
+
+    def test___repr___factory_w_name(self):
+        class _Factory(object):
+            __name__ = 'TEST'
+        hr, _registry, _name =  self._makeOne(_Factory())
+        self.assertEqual(repr(hr),
+            ("HandlerRegistration(_REGISTRY, [IFoo], %r, TEST, "
+           + "'DOCSTRING')") % (_name))
+
+    def test___repr___factory_wo_name(self):
+        class _Factory(object):
+            def __repr__(self):
+                return 'TEST'
+        hr, _registry, _name =  self._makeOne(_Factory())
+        self.assertEqual(repr(hr),
+            ("HandlerRegistration(_REGISTRY, [IFoo], %r, TEST, "
+           + "'DOCSTRING')") % (_name))
+
+
+class _Monkey(object):
+    # context-manager for replacing module names in the scope of a test.
+    def __init__(self, module, **kw):
+        self.module = module
+        self.to_restore = dict([(key, getattr(module, key)) for key in kw])
+        for key, value in kw.items():
+            setattr(module, key, value)
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        for key, value in self.to_restore.items():
+            setattr(self.module, key, value)
+
+def test_suite():
+    return unittest.TestSuite((
+            unittest.makeSuite(ComponentsTests),
+            unittest.makeSuite(UtilityRegistrationTests),
+            unittest.makeSuite(AdapterRegistrationTests),
+            unittest.makeSuite(SubscriptionRegistrationTests),
+            unittest.makeSuite(AdapterRegistrationTests),
+        ))
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/test_sorting.py b/ThirdParty/ZopeInterface/zope/interface/tests/test_sorting.py
new file mode 100644
index 0000000..af60f88
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/test_sorting.py
@@ -0,0 +1,55 @@
+##############################################################################
+#
+# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Test interface sorting
+"""
+
+from unittest import TestCase, TestSuite, main, makeSuite
+
+from zope.interface import Interface
+
+class I1(Interface): pass
+class I2(I1): pass
+class I3(I1): pass
+class I4(Interface): pass
+class I5(I4): pass
+class I6(I2): pass
+
+
+class Test(TestCase):
+
+    def test(self):
+        l = [I1, I3, I5, I6, I4, I2]
+        l.sort()
+        self.assertEqual(l, [I1, I2, I3, I4, I5, I6])
+
+    def test_w_None(self):
+        l = [I1, None, I3, I5, I6, I4, I2]
+        l.sort()
+        self.assertEqual(l, [I1, I2, I3, I4, I5, I6, None])
+    
+    def test_w_equal_names(self):
+        # interfaces with equal names but different modules should sort by
+        # module name
+        from zope.interface.tests.m1 import I1 as m1_I1
+        l = [I1, m1_I1]
+        l.sort()
+        self.assertEqual(l, [m1_I1, I1])
+
+def test_suite():
+    return TestSuite((
+        makeSuite(Test),
+        ))
+
+if __name__=='__main__':
+    main(defaultTest='test_suite')
diff --git a/ThirdParty/ZopeInterface/zope/interface/tests/test_verify.py b/ThirdParty/ZopeInterface/zope/interface/tests/test_verify.py
new file mode 100644
index 0000000..83b3ada
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/tests/test_verify.py
@@ -0,0 +1,548 @@
+##############################################################################
+#
+# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+""" zope.interface.verify unit tests
+"""
+import unittest
+
+
+class Test_verifyClass(unittest.TestCase):
+
+    def _callFUT(self, iface, klass):
+        from zope.interface.verify import verifyClass
+        return verifyClass(iface, klass)
+
+    def test_class_doesnt_implement(self):
+        from zope.interface import Interface
+        from zope.interface.exceptions import DoesNotImplement
+
+        class ICurrent(Interface):
+            pass
+
+        class Current(object):
+            pass
+
+        self.assertRaises(DoesNotImplement, self._callFUT, ICurrent, Current)
+
+    def test_class_doesnt_implement_but_classImplements_later(self):
+        from zope.interface import Interface
+        from zope.interface import classImplements
+
+        class ICurrent(Interface):
+            pass
+
+        class Current(object):
+            pass
+
+        classImplements(Current, ICurrent)
+
+        self._callFUT(ICurrent, Current)
+
+    def test_class_doesnt_have_required_method_simple(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+        from zope.interface.exceptions import BrokenImplementation
+
+        class ICurrent(Interface):
+            def method(): pass
+
+        @implementer(ICurrent)
+        class Current(object):
+            pass
+
+        self.assertRaises(BrokenImplementation,
+                          self._callFUT, ICurrent, Current)
+
+    def test_class_has_required_method_simple(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+
+        class ICurrent(Interface):
+            def method(): pass
+
+        @implementer(ICurrent)
+        class Current(object):
+
+            def method(self):
+                pass
+
+        self._callFUT(ICurrent, Current)
+
+    def test_class_doesnt_have_required_method_derived(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+        from zope.interface.exceptions import BrokenImplementation
+
+        class IBase(Interface):
+            def method():
+                pass
+
+        class IDerived(IBase):
+            pass
+
+        @implementer(IDerived)
+        class Current(object):
+            pass
+
+        self.assertRaises(BrokenImplementation,
+                          self._callFUT, IDerived, Current)
+
+    def test_class_has_required_method_derived(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+
+        class IBase(Interface):
+            def method():
+                pass
+
+        class IDerived(IBase):
+            pass
+
+        @implementer(IDerived)
+        class Current(object):
+
+            def method(self):
+                pass
+
+        self._callFUT(IDerived, Current)
+
+    def test_method_takes_wrong_arg_names_but_OK(self):
+        # We no longer require names to match.
+        from zope.interface import Interface
+        from zope.interface import implementer
+
+        class ICurrent(Interface):
+
+            def method(a):
+                pass
+
+        @implementer(ICurrent)
+        class Current(object):
+
+            def method(self, b):
+                pass
+
+        self._callFUT(ICurrent, Current)
+
+    def test_method_takes_not_enough_args(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+        from zope.interface.exceptions import BrokenMethodImplementation
+
+        class ICurrent(Interface):
+
+            def method(a):
+                pass
+
+        @implementer(ICurrent)
+        class Current(object):
+
+            def method(self):
+                pass
+
+        self.assertRaises(BrokenMethodImplementation,
+                          self._callFUT, ICurrent, Current)
+
+    def test_method_doesnt_take_required_starargs(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+        from zope.interface.exceptions import BrokenMethodImplementation
+
+        class ICurrent(Interface):
+
+            def method(*args):
+                pass
+
+        @implementer(ICurrent)
+        class Current(object):
+
+            def method(self):
+                pass
+
+        self.assertRaises(BrokenMethodImplementation,
+                          self._callFUT, ICurrent, Current)
+
+    def test_method_doesnt_take_required_only_kwargs(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+        from zope.interface.exceptions import BrokenMethodImplementation
+
+        class ICurrent(Interface):
+
+            def method(**kw):
+                pass
+
+        @implementer(ICurrent)
+        class Current(object):
+
+            def method(self):
+                pass
+
+        self.assertRaises(BrokenMethodImplementation,
+                          self._callFUT, ICurrent, Current)
+
+    def test_method_takes_extra_arg(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+        from zope.interface.exceptions import BrokenMethodImplementation
+
+        class ICurrent(Interface):
+
+            def method(a):
+                pass
+
+        @implementer(ICurrent)
+        class Current(object):
+
+            def method(self, a, b):
+                pass
+
+        self.assertRaises(BrokenMethodImplementation,
+                          self._callFUT, ICurrent, Current)
+
+    def test_method_takes_extra_arg_with_default(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+
+        class ICurrent(Interface):
+
+            def method(a):
+                pass
+
+        @implementer(ICurrent)
+        class Current(object):
+
+            def method(self, a, b=None):
+                pass
+
+        self._callFUT(ICurrent, Current)
+
+    def test_method_takes_only_positional_args(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+
+        class ICurrent(Interface):
+
+            def method(a):
+                pass
+
+        @implementer(ICurrent)
+        class Current(object):
+
+            def method(self, *args):
+                pass
+
+        self._callFUT(ICurrent, Current)
+
+    def test_method_takes_only_kwargs(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+        from zope.interface.exceptions import BrokenMethodImplementation
+
+        class ICurrent(Interface):
+
+            def method(a):
+                pass
+
+        @implementer(ICurrent)
+        class Current(object):
+
+            def method(self, **kw):
+                pass
+
+        self.assertRaises(BrokenMethodImplementation,
+                          self._callFUT, ICurrent, Current)
+
+    def test_method_takes_extra_starargs(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+
+        class ICurrent(Interface):
+
+            def method(a):
+                pass
+
+        @implementer(ICurrent)
+        class Current(object):
+
+            def method(self, a, *args):
+                pass
+
+        self._callFUT(ICurrent, Current)
+
+    def test_method_takes_extra_starargs_and_kwargs(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+
+        class ICurrent(Interface):
+
+            def method(a):
+                pass
+
+        @implementer(ICurrent)
+        class Current(object):
+
+            def method(self, a, *args, **kw):
+                pass
+
+        self._callFUT(ICurrent, Current)
+
+    def test_method_doesnt_take_required_positional_and_starargs(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+        from zope.interface.exceptions import BrokenMethodImplementation
+
+        class ICurrent(Interface):
+
+            def method(a, *args):
+                pass
+
+        @implementer(ICurrent)
+        class Current(object):
+
+            def method(self, a):
+                pass
+
+        self.assertRaises(BrokenMethodImplementation,
+                          self._callFUT, ICurrent, Current)
+
+    def test_method_takes_required_positional_and_starargs(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+
+        class ICurrent(Interface):
+
+            def method(a, *args):
+                pass
+
+        @implementer(ICurrent)
+        class Current(object):
+
+            def method(self, a, *args):
+                pass
+
+        self._callFUT(ICurrent, Current)
+
+    def test_method_takes_only_starargs(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+
+        class ICurrent(Interface):
+
+            def method(a, *args):
+                pass
+
+        @implementer(ICurrent)
+        class Current(object):
+
+            def method(self, *args):
+                pass
+
+        self._callFUT(ICurrent, Current)
+
+    def test_method_takes_required_kwargs(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+
+        class ICurrent(Interface):
+
+            def method(**kwargs):
+                pass
+
+        @implementer(ICurrent)
+        class Current(object):
+
+            def method(self, **kw):
+                pass
+
+        self._callFUT(ICurrent, Current)
+
+    def test_method_takes_positional_plus_required_starargs(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+        from zope.interface.exceptions import BrokenMethodImplementation
+
+        class ICurrent(Interface):
+
+            def method(*args):
+                pass
+
+        @implementer(ICurrent)
+        class Current(object):
+
+            def method(self, a, *args):
+                pass
+
+        self.assertRaises(BrokenMethodImplementation,
+                          self._callFUT, ICurrent, Current)
+
+
+    def test_method_doesnt_take_required_kwargs(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+        from zope.interface.exceptions import BrokenMethodImplementation
+
+        class ICurrent(Interface):
+
+            def method(**kwargs):
+                pass
+
+        @implementer(ICurrent)
+        class Current(object):
+
+            def method(self, a):
+                pass
+
+        self.assertRaises(BrokenMethodImplementation,
+                          self._callFUT, ICurrent, Current)
+
+
+    def test_class_has_method_for_iface_attr(self):
+        from zope.interface import Attribute
+        from zope.interface import Interface
+        from zope.interface import implementer
+
+        class ICurrent(Interface):
+            attr = Attribute("The foo Attribute")
+
+        @implementer(ICurrent)
+        class Current:
+
+            def attr(self):
+                pass
+
+        self._callFUT(ICurrent, Current)
+
+    def test_class_has_nonmethod_for_method(self):
+        from zope.interface import Interface
+        from zope.interface import implementer
+        from zope.interface.exceptions import BrokenMethodImplementation
+
+        class ICurrent(Interface):
+            def method():
+                pass
+
+        @implementer(ICurrent)
+        class Current:
+            method = 1
+
+        self.assertRaises(BrokenMethodImplementation,
+                          self._callFUT, ICurrent, Current)
+
+    def test_class_has_attribute_for_attribute(self):
+        from zope.interface import Attribute
+        from zope.interface import Interface
+        from zope.interface import implementer
+
+        class ICurrent(Interface):
+            attr = Attribute("The foo Attribute")
+
+        @implementer(ICurrent)
+        class Current:
+
+            attr = 1
+
+        self._callFUT(ICurrent, Current)
+
+    def test_class_misses_attribute_for_attribute(self):
+        # This check *passes* for verifyClass
+        from zope.interface import Attribute
+        from zope.interface import Interface
+        from zope.interface import implementer
+
+        class ICurrent(Interface):
+            attr = Attribute("The foo Attribute")
+
+        @implementer(ICurrent)
+        class Current:
+            pass
+
+        self._callFUT(ICurrent, Current)
+
+    def test_w_callable_non_func_method(self):
+        from zope.interface.interface import Method
+        from zope.interface import Interface
+        from zope.interface import implementer
+
+        class QuasiMethod(Method):
+            def __call__(self, *args, **kw):
+                pass
+
+        class QuasiCallable(object):
+            def __call__(self, *args, **kw):
+                pass
+
+        class ICurrent(Interface):
+            attr = QuasiMethod('This is callable')
+
+        @implementer(ICurrent)
+        class Current:
+            attr = QuasiCallable()
+
+        self._callFUT(ICurrent, Current)
+
+class Test_verifyObject(Test_verifyClass):
+
+    def _callFUT(self, iface, target):
+        from zope.interface.verify import verifyObject
+        if isinstance(target, (type, type(OldSkool))):
+            target = target()
+        return verifyObject(iface, target)
+
+    def test_class_misses_attribute_for_attribute(self):
+        # This check *fails* for verifyObject
+        from zope.interface import Attribute
+        from zope.interface import Interface
+        from zope.interface import implementer
+        from zope.interface.exceptions import BrokenImplementation
+
+        class ICurrent(Interface):
+            attr = Attribute("The foo Attribute")
+
+        @implementer(ICurrent)
+        class Current:
+            pass
+
+        self.assertRaises(BrokenImplementation,
+                          self._callFUT, ICurrent, Current)
+
+    def test_module_hit(self):
+        from zope.interface.tests.idummy import IDummyModule
+        from zope.interface.tests import dummy
+
+        self._callFUT(IDummyModule, dummy)
+
+    def test_module_miss(self):
+        from zope.interface import Interface
+        from zope.interface.tests import dummy
+        from zope.interface.exceptions import DoesNotImplement
+
+        # same name, different object
+        class IDummyModule(Interface):
+            pass
+
+        self.assertRaises(DoesNotImplement,
+                          self._callFUT, IDummyModule, dummy)
+
+class OldSkool:
+    pass
+
+def test_suite():
+    #import doctest
+    return unittest.TestSuite((
+        unittest.makeSuite(Test_verifyClass),
+        unittest.makeSuite(Test_verifyObject),
+    #   This one needs to turn into just docs.
+    #doctest.DocFileSuite('../verify.txt',
+    #                     optionflags=doctest.NORMALIZE_WHITESPACE),
+    ))
diff --git a/ThirdParty/ZopeInterface/zope/interface/verify.py b/ThirdParty/ZopeInterface/zope/interface/verify.py
new file mode 100644
index 0000000..488bd88
--- /dev/null
+++ b/ThirdParty/ZopeInterface/zope/interface/verify.py
@@ -0,0 +1,116 @@
+##############################################################################
+#
+# Copyright (c) 2001, 2002 Zope Foundation and Contributors.
+# All Rights Reserved.
+#
+# This software is subject to the provisions of the Zope Public License,
+# Version 2.1 (ZPL).  A copy of the ZPL should accompany this distribution.
+# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
+# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
+# FOR A PARTICULAR PURPOSE.
+#
+##############################################################################
+"""Verify interface implementations
+"""
+from zope.interface.exceptions import BrokenImplementation, DoesNotImplement
+from zope.interface.exceptions import BrokenMethodImplementation
+from types import FunctionType, MethodType
+from zope.interface.interface import fromMethod, fromFunction, Method
+import sys
+
+# This will be monkey-patched when running under Zope 2, so leave this
+# here:
+MethodTypes = (MethodType, )
+
+
+def _verify(iface, candidate, tentative=0, vtype=None):
+    """Verify that 'candidate' might correctly implements 'iface'.
+
+    This involves:
+
+      o Making sure the candidate defines all the necessary methods
+
+      o Making sure the methods have the correct signature
+
+      o Making sure the candidate asserts that it implements the interface
+
+    Note that this isn't the same as verifying that the class does
+    implement the interface.
+
+    If optional tentative is true, suppress the "is implemented by" test.
+    """
+
+    if vtype == 'c':
+        tester = iface.implementedBy
+    else:
+        tester = iface.providedBy
+
+    if not tentative and not tester(candidate):
+        raise DoesNotImplement(iface)
+
+    # Here the `desc` is either an `Attribute` or `Method` instance
+    for name, desc in iface.namesAndDescriptions(1):
+        try:
+            attr = getattr(candidate, name)
+        except AttributeError:
+            if (not isinstance(desc, Method)) and vtype == 'c':
+                # We can't verify non-methods on classes, since the
+                # class may provide attrs in it's __init__.
+                continue
+
+            raise BrokenImplementation(iface, name)
+
+        if not isinstance(desc, Method):
+            # If it's not a method, there's nothing else we can test
+            continue
+
+        if isinstance(attr, FunctionType):
+            if sys.version[0] == '3' and isinstance(candidate, type):
+                # This is an "unbound method" in Python 3.
+                meth = fromFunction(attr, iface, name=name,
+                                    imlevel=1) #pragma NO COVERAGE
+            else:
+                # Nope, just a normal function
+                meth = fromFunction(attr, iface, name=name)
+        elif (isinstance(attr, MethodTypes)
+              and type(attr.__func__) is FunctionType):
+            meth = fromMethod(attr, iface, name)
+        else:
+            if not callable(attr):
+                raise BrokenMethodImplementation(name, "Not a method")
+            # sigh, it's callable, but we don't know how to introspect it, so
+            # we have to give it a pass.
+            continue #pragma NO COVERAGE
+
+        # Make sure that the required and implemented method signatures are
+        # the same.
+        desc = desc.getSignatureInfo()
+        meth = meth.getSignatureInfo()
+
+        mess = _incompat(desc, meth)
+        if mess:
+            raise BrokenMethodImplementation(name, mess)
+
+    return True
+
+def verifyClass(iface, candidate, tentative=0):
+    return _verify(iface, candidate, tentative, vtype='c')
+
+def verifyObject(iface, candidate, tentative=0):
+    return _verify(iface, candidate, tentative, vtype='o')
+
+def _incompat(required, implemented):
+    #if (required['positional'] !=
+    #    implemented['positional'][:len(required['positional'])]
+    #    and implemented['kwargs'] is None):
+    #    return 'imlementation has different argument names'
+    if len(implemented['required']) > len(required['required']):
+        return 'implementation requires too many arguments'
+    if ((len(implemented['positional']) < len(required['positional']))
+        and not implemented['varargs']):
+        return "implementation doesn't allow enough arguments"
+    if required['kwargs'] and not implemented['kwargs']:
+        return "implementation doesn't support keyword arguments"
+    if required['varargs'] and not implemented['varargs']:
+        return "implementation doesn't support variable arguments"
diff --git a/ThirdParty/alglib/CMakeLists.txt b/ThirdParty/alglib/CMakeLists.txt
index e47f1f7..86f121d 100644
--- a/ThirdParty/alglib/CMakeLists.txt
+++ b/ThirdParty/alglib/CMakeLists.txt
@@ -37,11 +37,11 @@ vtk_add_library( vtkalglib
 #
 
 # Grab install locations from VTK if we are building as part of VTK
-if ( VTK_INSTALL_INCLUDE_DIR_CM24 )
-  set( alglib_install_inc_dir ${VTK_INSTALL_INCLUDE_DIR_CM24} )
-else ( VTK_INSTALL_INCLUDE_DIR_CM24 )
-  set( alglib_install_inc_dir ${CMAKE_INSTALL_PREFIX}/include )
-endif ( VTK_INSTALL_INCLUDE_DIR_CM24 )
+if(VTK_INSTALL_INCLUDE_DIR)
+  set(alglib_install_inc_dir ${VTK_INSTALL_INCLUDE_DIR})
+else()
+  set(alglib_install_inc_dir ${CMAKE_INSTALL_PREFIX}/include)
+endif()
 
 # Header files:
 if ( NOT VTK_INSTALL_NO_DEVELOPMENT )
diff --git a/ThirdParty/exodusII/vtkexodusII/CMakeLists.txt b/ThirdParty/exodusII/vtkexodusII/CMakeLists.txt
index dc500da..26193d0 100644
--- a/ThirdParty/exodusII/vtkexodusII/CMakeLists.txt
+++ b/ThirdParty/exodusII/vtkexodusII/CMakeLists.txt
@@ -241,7 +241,7 @@ ENDIF(APPLE)
 
 
 VTK_ADD_LIBRARY(vtkexoIIc ${cbind_SRCS})
-TARGET_LINK_LIBRARIES(vtkexoIIc vtkNetCDF)
+TARGET_LINK_LIBRARIES(vtkexoIIc ${vtknetcdf_LIBRARIES})
 
 #INCLUDE (${CMAKE_ROOT}/Modules/Dart.cmake)
 
diff --git a/ThirdParty/expat/vtkexpat/CMakeLists.txt b/ThirdParty/expat/vtkexpat/CMakeLists.txt
index aef6a10..21746c8 100644
--- a/ThirdParty/expat/vtkexpat/CMakeLists.txt
+++ b/ThirdParty/expat/vtkexpat/CMakeLists.txt
@@ -27,6 +27,6 @@ IF(NOT VTK_INSTALL_NO_DEVELOPMENT)
     ${VTKEXPAT_BINARY_DIR}/expatDllConfig.h
     ${VTKEXPAT_SOURCE_DIR}/expat.h
     ${VTKEXPAT_SOURCE_DIR}/vtk_expat_mangle.h
-    DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/vtkexpat
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/vtkexpat
     COMPONENT Development)
 ENDIF(NOT VTK_INSTALL_NO_DEVELOPMENT)
diff --git a/ThirdParty/freetype/vtkfreetype/CMakeLists.txt b/ThirdParty/freetype/vtkfreetype/CMakeLists.txt
index 629480a..7616dc8 100644
--- a/ThirdParty/freetype/vtkfreetype/CMakeLists.txt
+++ b/ThirdParty/freetype/vtkfreetype/CMakeLists.txt
@@ -129,13 +129,13 @@ IF(NOT VTK_INSTALL_NO_DEVELOPMENT)
     ${VTKFREETYPE_SOURCE_DIR}/include/vtk_freetype_mangle.h
     ${VTKFREETYPE_SOURCE_DIR}/include/vtk_ftmodule.h
     ${VTKFREETYPE_BINARY_DIR}/include/vtkFreeTypeConfig.h
-    DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/vtkfreetype/include
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/vtkfreetype/include
     COMPONENT Development)
 
   file(GLOB freetype_headers 
     "${VTKFREETYPE_SOURCE_DIR}/include/freetype/*.h")
   INSTALL(FILES ${freetype_headers}
-    DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/vtkfreetype/include/freetype
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/vtkfreetype/include/freetype
     COMPONENT Development)
 
   file(GLOB freetype_config_headers 
@@ -145,6 +145,6 @@ IF(NOT VTK_INSTALL_NO_DEVELOPMENT)
   endif()
   INSTALL(FILES ${freetype_config_headers}
                 ${freetype_config_headers_include}
-    DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/vtkfreetype/include/freetype/config
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/vtkfreetype/include/freetype/config
     COMPONENT Development)
 ENDIF(NOT VTK_INSTALL_NO_DEVELOPMENT)
diff --git a/ThirdParty/hdf5/CMakeLists.txt b/ThirdParty/hdf5/CMakeLists.txt
index fbb4942..48e16f9 100644
--- a/ThirdParty/hdf5/CMakeLists.txt
+++ b/ThirdParty/hdf5/CMakeLists.txt
@@ -21,9 +21,6 @@ set(HDF5_INSTALL_RUNTIME_DIR ${VTK_INSTALL_RUNTIME_DIR})
 set(HDF5_INSTALL_INCLUDE_DIR ${VTK_INSTALL_INCLUDE_DIR}/vtkhdf5)
 set(HDF5_INSTALL_NO_DEVELOPMENT ${VTK_INSTALL_NO_DEVELOPMENT})
 
-set(HDF5_INSTALL_BIN_DIR ${VTK_INSTALL_BIN_DIR_CM24})
-set(HDF5_INSTALL_LIB_DIR ${VTK_INSTALL_LIB_DIR_CM24})
-
 # Configure HDF5 privately so its options do not appear to the user.
 set(HDF5_ALLOW_EXTERNAL_SUPPORT OFF CACHE INTERNAL "Allow External Library Building")
 set(HDF5_BUILD_CPP_LIB OFF CACHE INTERNAL "Build HDF5 C++ Library")
diff --git a/ThirdParty/hdf5/vtkhdf5/CMakeLists.txt b/ThirdParty/hdf5/vtkhdf5/CMakeLists.txt
index 83c98c7..0cf7ebd 100644
--- a/ThirdParty/hdf5/vtkhdf5/CMakeLists.txt
+++ b/ThirdParty/hdf5/vtkhdf5/CMakeLists.txt
@@ -565,7 +565,7 @@ INSTALL (
     FILES
         ${PROJECT_BINARY_DIR}/H5pubconf.h
     DESTINATION
-        ${VTK_INSTALL_INCLUDE_DIR_CM24}/vtkhdf5
+        ${VTK_INSTALL_INCLUDE_DIR}/vtkhdf5
 )
 ENDIF (NOT VTK_INSTALL_NO_DEVELOPMENT)
 
diff --git a/ThirdParty/hdf5/vtkhdf5/c++/src/CMakeLists.txt b/ThirdParty/hdf5/vtkhdf5/c++/src/CMakeLists.txt
index abb074b..64edd60 100644
--- a/ThirdParty/hdf5/vtkhdf5/c++/src/CMakeLists.txt
+++ b/ThirdParty/hdf5/vtkhdf5/c++/src/CMakeLists.txt
@@ -94,7 +94,7 @@ INSTALL (
     FILES
         ${CPP_HDRS}
     DESTINATION
-        ${VTK_INSTALL_INCLUDE_DIR_CM24}/vtkhdf5/cpp
+        ${VTK_INSTALL_INCLUDE_DIR}/vtkhdf5/cpp
 )
 ENDIF (NOT VTK_INSTALL_NO_DEVELOPMENT)
 
@@ -107,9 +107,9 @@ IF (HDF5_EXPORTED_TARGETS AND NOT VTK_INSTALL_NO_LIBRARIES)
           ${HDF5_CPP_LIB_TARGET}
       EXPORT
           ${HDF5_EXPORTED_TARGETS}
-      RUNTIME DESTINATION ${VTK_INSTALL_BIN_DIR_CM24} COMPONENT RuntimeLibraries
-      LIBRARY DESTINATION ${VTK_INSTALL_LIB_DIR_CM24} COMPONENT RuntimeLibraries
-      ARCHIVE DESTINATION ${VTK_INSTALL_LIB_DIR_CM24} COMPONENT Development
+      RUNTIME DESTINATION ${VTK_INSTALL_RUNTIME_DIR} COMPONENT RuntimeLibraries
+      LIBRARY DESTINATION ${VTK_INSTALL_LIBRARY_DIR} COMPONENT RuntimeLibraries
+      ARCHIVE DESTINATION ${VTK_INSTALL_ARCHIVE_DIR} COMPONENT Development
   )
 ENDIF (HDF5_EXPORTED_TARGETS AND NOT VTK_INSTALL_NO_LIBRARIES)
 
diff --git a/ThirdParty/hdf5/vtkhdf5/hl/c++/src/CMakeLists.txt b/ThirdParty/hdf5/vtkhdf5/hl/c++/src/CMakeLists.txt
index be16558..71c6b27 100644
--- a/ThirdParty/hdf5/vtkhdf5/hl/c++/src/CMakeLists.txt
+++ b/ThirdParty/hdf5/vtkhdf5/hl/c++/src/CMakeLists.txt
@@ -25,7 +25,7 @@ INSTALL (
     FILES
         ${HDF5_HL_CPP_HDRS}
     DESTINATION
-        ${VTK_INSTALL_INCLUDE_DIR_CM24}/vtkhdf5/hl/cpp
+        ${VTK_INSTALL_INCLUDE_DIR}/vtkhdf5/hl/cpp
 )
 
 #-----------------------------------------------------------------------------
@@ -37,8 +37,8 @@ IF (HDF5_EXPORTED_TARGETS AND NOT VTK_INSTALL_NO_LIBRARIES)
           ${HDF5_HL_CPP_LIB_TARGET}
       EXPORT
           ${HDF5_EXPORTED_TARGETS}
-      RUNTIME DESTINATION ${VTK_INSTALL_BIN_DIR_CM24} COMPONENT RuntimeLibraries
-      LIBRARY DESTINATION ${VTK_INSTALL_LIB_DIR_CM24} COMPONENT RuntimeLibraries
-      ARCHIVE DESTINATION ${VTK_INSTALL_LIB_DIR_CM24} COMPONENT Development
+      RUNTIME DESTINATION ${VTK_INSTALL_RUNTIME_DIR} COMPONENT RuntimeLibraries
+      LIBRARY DESTINATION ${VTK_INSTALL_LIBRARY_DIR} COMPONENT RuntimeLibraries
+      ARCHIVE DESTINATION ${VTK_INSTALL_ARCHIVE_DIR} COMPONENT Development
   )
 ENDIF (HDF5_EXPORTED_TARGETS AND NOT VTK_INSTALL_NO_LIBRARIES)
diff --git a/ThirdParty/hdf5/vtkhdf5/src/CMakeLists.txt b/ThirdParty/hdf5/vtkhdf5/src/CMakeLists.txt
index d17459a..412ff67 100644
--- a/ThirdParty/hdf5/vtkhdf5/src/CMakeLists.txt
+++ b/ThirdParty/hdf5/vtkhdf5/src/CMakeLists.txt
@@ -654,7 +654,7 @@ INSTALL (
         ${H5_PUBLIC_HEADERS}
         ${H5_PRIVATE_HEADERS}
     DESTINATION
-        ${VTK_INSTALL_INCLUDE_DIR_CM24}/vtkhdf5
+        ${VTK_INSTALL_INCLUDE_DIR}/vtkhdf5
 )
 ENDIF (NOT VTK_INSTALL_NO_DEVELOPMENT)
 
diff --git a/ThirdParty/hdf5/vtkhdf5/src/H5FDfamily.c b/ThirdParty/hdf5/vtkhdf5/src/H5FDfamily.c
index 67bb107..7eef1ef 100644
--- a/ThirdParty/hdf5/vtkhdf5/src/H5FDfamily.c
+++ b/ThirdParty/hdf5/vtkhdf5/src/H5FDfamily.c
@@ -648,7 +648,7 @@ H5FD_family_sb_encode(H5FD_t *_file, char *name/*out*/, unsigned char *buf/*out*
 /*-------------------------------------------------------------------------
  * Function:	H5FD_family_sb_decode
  *
- * Purpose:	This function has 2 seperate purpose.  One is to decodes the
+ * Purpose:	This function has 2 separate purpose.  One is to decodes the
  *              superblock information for this driver. The NAME argument is
  *              the eight-character (plus null termination) name stored in i
  *              the file.  The FILE argument is updated according to the
diff --git a/ThirdParty/hdf5/vtkhdf5/src/H5public.h b/ThirdParty/hdf5/vtkhdf5/src/H5public.h
index f5873bb..84283de 100644
--- a/ThirdParty/hdf5/vtkhdf5/src/H5public.h
+++ b/ThirdParty/hdf5/vtkhdf5/src/H5public.h
@@ -189,7 +189,7 @@ typedef signed long long	hssize_t;
 #elif H5_SIZEOF_HADDR_T ==H5_SIZEOF_LONG
 #   define H5_PRINTF_HADDR_FMT  "%lu"
 #elif H5_SIZEOF_HADDR_T ==H5_SIZEOF_LONG_LONG
-#   define H5_PRINTF_HADDR_FMT  "%"H5_PRINTF_LL_WIDTH"u"
+#   define H5_PRINTF_HADDR_FMT  "%" H5_PRINTF_LL_WIDTH "u"
 #else
 #   error "nothing appropriate for H5_PRINTF_HADDR_FMT"
 #endif
diff --git a/ThirdParty/jpeg/vtkjpeg/CMakeLists.txt b/ThirdParty/jpeg/vtkjpeg/CMakeLists.txt
index 62e4446..90915ec 100644
--- a/ThirdParty/jpeg/vtkjpeg/CMakeLists.txt
+++ b/ThirdParty/jpeg/vtkjpeg/CMakeLists.txt
@@ -53,6 +53,6 @@ IF(NOT VTK_INSTALL_NO_DEVELOPMENT)
     ${VTKJPEG_SOURCE_DIR}/jconfig.h
     ${VTKJPEG_SOURCE_DIR}/jmorecfg.h
     ${VTKJPEG_SOURCE_DIR}/jerror.h
-    DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/vtkjpeg
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/vtkjpeg
     COMPONENT Development)
 ENDIF(NOT VTK_INSTALL_NO_DEVELOPMENT)
diff --git a/ThirdParty/jsoncpp/CMakeLists.txt b/ThirdParty/jsoncpp/CMakeLists.txt
index 8da6a73..206fa40 100644
--- a/ThirdParty/jsoncpp/CMakeLists.txt
+++ b/ThirdParty/jsoncpp/CMakeLists.txt
@@ -1,32 +1 @@
-project(JSONCPP)
-
-set(vtkjsoncpp_THIRD_PARTY 1)
-set(vtkjsoncpp_LIBRARIES vtkjsoncpp)
-vtk_module_export_info()
-
-if(VTK_REQUIRE_LARGE_FILE_SUPPORT)
-  add_definitions(
-    -D_LARGEFILE_SOURCE
-    -D_LARGEFILE64_SOURCE
-    -D_LARGE_FILES
-    -D_FILE_OFFSET_BITS=64
-    )
-endif()
-
-set(JSONCPP_SOURCES
-  jsoncpp.cpp
-)
-
-include_directories(${JSONCPP_SOURCE_DIR} ${JSONCPP_BINARY_DIR})
-
-if (BUILD_SHARED_LIBS AND WIN32)
-  add_definitions(-DJSON_DLL_BUILD)
-endif()
-vtk_add_library(vtkjsoncpp ${JSONCPP_SOURCES})
-
-if(NOT VTK_INSTALL_NO_DEVELOPMENT)
-  install(DIRECTORY
-    ${JSONCPP_SOURCE_DIR}/json
-    DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/JSONCPP
-    COMPONENT Development)
-endif()
+vtk_module_third_party(JsonCpp)
diff --git a/ThirdParty/jsoncpp/json/json.h b/ThirdParty/jsoncpp/json/json.h
deleted file mode 100644
index 568897a..0000000
--- a/ThirdParty/jsoncpp/json/json.h
+++ /dev/null
@@ -1,1855 +0,0 @@
-/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).
-/// It is intented to be used with #include <json/json.h>
-
-// //////////////////////////////////////////////////////////////////////
-// Beginning of content of file: LICENSE
-// //////////////////////////////////////////////////////////////////////
-
-/*
-The JsonCpp library's source code, including accompanying documentation,
-tests and demonstration applications, are licensed under the following
-conditions...
-
-The author (Baptiste Lepilleur) explicitly disclaims copyright in all
-jurisdictions which recognize such a disclaimer. In such jurisdictions,
-this software is released into the Public Domain.
-
-In jurisdictions which do not recognize Public Domain property (e.g. Germany as of
-2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is
-released under the terms of the MIT License (see below).
-
-In jurisdictions which recognize Public Domain property, the user of this
-software may choose to accept it either as 1) Public Domain, 2) under the
-conditions of the MIT License (see below), or 3) under the terms of dual
-Public Domain/MIT License conditions described here, as they choose.
-
-The MIT License is about as close to Public Domain as a license can get, and is
-described in clear, concise terms at:
-
-   http://en.wikipedia.org/wiki/MIT_License
-
-The full text of the MIT License follows:
-
-========================================================================
-Copyright (c) 2007-2010 Baptiste Lepilleur
-
-Permission is hereby granted, free of charge, to any person
-obtaining a copy of this software and associated documentation
-files (the "Software"), to deal in the Software without
-restriction, including without limitation the rights to use, copy,
-modify, merge, publish, distribute, sublicense, and/or sell copies
-of the Software, and to permit persons to whom the Software is
-furnished to do so, subject to the following conditions:
-
-The above copyright notice and this permission notice shall be
-included in all copies or substantial portions of the Software.
-
-THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
-EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
-MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
-NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
-BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
-ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
-CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-SOFTWARE.
-========================================================================
-(END LICENSE TEXT)
-
-The MIT license is compatible with both the GPL and commercial
-software, affording one all of the rights of Public Domain with the
-minor nuisance of being required to keep the above copyright notice
-and license text in the source code. Note also that by accepting the
-Public Domain "license" you can re-license your copy using whatever
-license you like.
-
-*/
-
-// //////////////////////////////////////////////////////////////////////
-// End of content of file: LICENSE
-// //////////////////////////////////////////////////////////////////////
-
-
-
-
-
-#ifndef JSON_AMALGATED_H_INCLUDED
-# define JSON_AMALGATED_H_INCLUDED
-/// If defined, indicates that the source file is amalgated
-/// to prevent private header inclusion.
-#define JSON_IS_AMALGATED
-
-// //////////////////////////////////////////////////////////////////////
-// Beginning of content of file: include/json/config.h
-// //////////////////////////////////////////////////////////////////////
-
-// Copyright 2007-2010 Baptiste Lepilleur
-// Distributed under MIT license, or public domain if desired and
-// recognized in your jurisdiction.
-// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
-
-#ifndef JSON_CONFIG_H_INCLUDED
-# define JSON_CONFIG_H_INCLUDED
-
-/// If defined, indicates that json library is embedded in CppTL library.
-//# define JSON_IN_CPPTL 1
-
-/// If defined, indicates that json may leverage CppTL library
-//#  define JSON_USE_CPPTL 1
-/// If defined, indicates that cpptl vector based map should be used instead of std::map
-/// as Value container.
-//#  define JSON_USE_CPPTL_SMALLMAP 1
-/// If defined, indicates that Json specific container should be used
-/// (hash table & simple deque container with customizable allocator).
-/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332
-//#  define JSON_VALUE_USE_INTERNAL_MAP 1
-/// Force usage of standard new/malloc based allocator instead of memory pool based allocator.
-/// The memory pools allocator used optimization (initializing Value and ValueInternalLink
-/// as if it was a POD) that may cause some validation tool to report errors.
-/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined.
-//#  define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1
-
-/// If defined, indicates that Json use exception to report invalid type manipulation
-/// instead of C assert macro.
-# define JSON_USE_EXCEPTION 1
-
-/// If defined, indicates that the source file is amalgated
-/// to prevent private header inclusion.
-/// Remarks: it is automatically defined in the generated amalgated header.
-#define JSON_IS_AMALGAMATION
-
-
-# ifdef JSON_IN_CPPTL
-#  include <cpptl/config.h>
-#  ifndef JSON_USE_CPPTL
-#   define JSON_USE_CPPTL 1
-#  endif
-# endif
-
-# ifdef JSON_IN_CPPTL
-#  define JSON_API CPPTL_API
-# elif defined(JSON_DLL_BUILD)
-#  define JSON_API __declspec(dllexport)
-# elif defined(JSON_DLL)
-#  define JSON_API __declspec(dllimport)
-# else
-#  define JSON_API
-# endif
-
-// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for integer
-// Storages, and 64 bits integer support is disabled.
-// #define JSON_NO_INT64 1
-
-#if defined(_MSC_VER)  &&  _MSC_VER <= 1200 // MSVC 6
-// Microsoft Visual Studio 6 only support conversion from __int64 to double
-// (no conversion from unsigned __int64).
-#define JSON_USE_INT64_DOUBLE_CONVERSION 1
-#endif // if defined(_MSC_VER)  &&  _MSC_VER < 1200 // MSVC 6
-
-#if defined(_MSC_VER)  &&  _MSC_VER >= 1500 // MSVC 2008
-/// Indicates that the following function is deprecated.
-# define JSONCPP_DEPRECATED(message) __declspec(deprecated(message))
-#endif
-
-#if !defined(JSONCPP_DEPRECATED)
-# define JSONCPP_DEPRECATED(message)
-#endif // if !defined(JSONCPP_DEPRECATED)
-
-namespace Json {
-   typedef int Int;
-   typedef unsigned int UInt;
-# if defined(JSON_NO_INT64)
-   typedef int LargestInt;
-   typedef unsigned int LargestUInt;
-#  undef JSON_HAS_INT64
-# else // if defined(JSON_NO_INT64)
-   // For Microsoft Visual use specific types as long long is not supported
-#  if defined(_MSC_VER) // Microsoft Visual Studio
-   typedef __int64 Int64;
-   typedef unsigned __int64 UInt64;
-#  else // if defined(_MSC_VER) // Other platforms, use long long
-   typedef long long int Int64;
-   typedef unsigned long long int UInt64;
-#  endif // if defined(_MSC_VER)
-   typedef Int64 LargestInt;
-   typedef UInt64 LargestUInt;
-#  define JSON_HAS_INT64
-# endif // if defined(JSON_NO_INT64)
-} // end namespace Json
-
-
-#endif // JSON_CONFIG_H_INCLUDED
-
-// //////////////////////////////////////////////////////////////////////
-// End of content of file: include/json/config.h
-// //////////////////////////////////////////////////////////////////////
-
-
-
-
-
-
-// //////////////////////////////////////////////////////////////////////
-// Beginning of content of file: include/json/forwards.h
-// //////////////////////////////////////////////////////////////////////
-
-// Copyright 2007-2010 Baptiste Lepilleur
-// Distributed under MIT license, or public domain if desired and
-// recognized in your jurisdiction.
-// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
-
-#ifndef JSON_FORWARDS_H_INCLUDED
-# define JSON_FORWARDS_H_INCLUDED
-
-#if !defined(JSON_IS_AMALGAMATION)
-# include "config.h"
-#endif // if !defined(JSON_IS_AMALGAMATION)
-
-namespace Json {
-
-   // writer.h
-   class FastWriter;
-   class StyledWriter;
-
-   // reader.h
-   class Reader;
-
-   // features.h
-   class Features;
-
-   // value.h
-   typedef unsigned int ArrayIndex;
-   class StaticString;
-   class Path;
-   class PathArgument;
-   class Value;
-   class ValueIteratorBase;
-   class ValueIterator;
-   class ValueConstIterator;
-#ifdef JSON_VALUE_USE_INTERNAL_MAP
-   class ValueMapAllocator;
-   class ValueInternalLink;
-   class ValueInternalArray;
-   class ValueInternalMap;
-#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP
-
-} // namespace Json
-
-
-#endif // JSON_FORWARDS_H_INCLUDED
-
-// //////////////////////////////////////////////////////////////////////
-// End of content of file: include/json/forwards.h
-// //////////////////////////////////////////////////////////////////////
-
-
-
-
-
-
-// //////////////////////////////////////////////////////////////////////
-// Beginning of content of file: include/json/features.h
-// //////////////////////////////////////////////////////////////////////
-
-// Copyright 2007-2010 Baptiste Lepilleur
-// Distributed under MIT license, or public domain if desired and
-// recognized in your jurisdiction.
-// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
-
-#ifndef CPPTL_JSON_FEATURES_H_INCLUDED
-# define CPPTL_JSON_FEATURES_H_INCLUDED
-
-#if !defined(JSON_IS_AMALGAMATION)
-# include "forwards.h"
-#endif // if !defined(JSON_IS_AMALGAMATION)
-
-namespace Json {
-
-   /** \brief Configuration passed to reader and writer.
-    * This configuration object can be used to force the Reader or Writer
-    * to behave in a standard conforming way.
-    */
-   class JSON_API Features
-   {
-   public:
-      /** \brief A configuration that allows all features and assumes all strings are UTF-8.
-       * - C & C++ comments are allowed
-       * - Root object can be any JSON value
-       * - Assumes Value strings are encoded in UTF-8
-       */
-      static Features all();
-
-      /** \brief A configuration that is strictly compatible with the JSON specification.
-       * - Comments are forbidden.
-       * - Root object must be either an array or an object value.
-       * - Assumes Value strings are encoded in UTF-8
-       */
-      static Features strictMode();
-
-      /** \brief Initialize the configuration like JsonConfig::allFeatures;
-       */
-      Features();
-
-      /// \c true if comments are allowed. Default: \c true.
-      bool allowComments_;
-
-      /// \c true if root must be either an array or an object value. Default: \c false.
-      bool strictRoot_;
-   };
-
-} // namespace Json
-
-#endif // CPPTL_JSON_FEATURES_H_INCLUDED
-
-// //////////////////////////////////////////////////////////////////////
-// End of content of file: include/json/features.h
-// //////////////////////////////////////////////////////////////////////
-
-
-
-
-
-
-// //////////////////////////////////////////////////////////////////////
-// Beginning of content of file: include/json/value.h
-// //////////////////////////////////////////////////////////////////////
-
-// Copyright 2007-2010 Baptiste Lepilleur
-// Distributed under MIT license, or public domain if desired and
-// recognized in your jurisdiction.
-// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
-
-#ifndef CPPTL_JSON_H_INCLUDED
-# define CPPTL_JSON_H_INCLUDED
-
-#if !defined(JSON_IS_AMALGAMATION)
-# include "forwards.h"
-#endif // if !defined(JSON_IS_AMALGAMATION)
-# include <string>
-# include <vector>
-
-# ifndef JSON_USE_CPPTL_SMALLMAP
-#  include <map>
-# else
-#  include <cpptl/smallmap.h>
-# endif
-# ifdef JSON_USE_CPPTL
-#  include <cpptl/forwards.h>
-# endif
-
-/** \brief JSON (JavaScript Object Notation).
- */
-namespace Json {
-
-   /** \brief Type of the value held by a Value object.
-    */
-   enum ValueType
-   {
-      nullValue = 0, ///< 'null' value
-      intValue,      ///< signed integer value
-      uintValue,     ///< unsigned integer value
-      realValue,     ///< double value
-      stringValue,   ///< UTF-8 string value
-      booleanValue,  ///< bool value
-      arrayValue,    ///< array value (ordered list)
-      objectValue    ///< object value (collection of name/value pairs).
-   };
-
-   enum CommentPlacement
-   {
-      commentBefore = 0,        ///< a comment placed on the line before a value
-      commentAfterOnSameLine,   ///< a comment just after a value on the same line
-      commentAfter,             ///< a comment on the line after a value (only make sense for root value)
-      numberOfCommentPlacement
-   };
-
-//# ifdef JSON_USE_CPPTL
-//   typedef CppTL::AnyEnumerator<const char *> EnumMemberNames;
-//   typedef CppTL::AnyEnumerator<const Value &> EnumValues;
-//# endif
-
-   /** \brief Lightweight wrapper to tag static string.
-    *
-    * Value constructor and objectValue member assignement takes advantage of the
-    * StaticString and avoid the cost of string duplication when storing the
-    * string or the member name.
-    *
-    * Example of usage:
-    * \code
-    * Json::Value aValue( StaticString("some text") );
-    * Json::Value object;
-    * static const StaticString code("code");
-    * object[code] = 1234;
-    * \endcode
-    */
-   class JSON_API StaticString
-   {
-   public:
-      explicit StaticString( const char *czstring )
-         : str_( czstring )
-      {
-      }
-
-      operator const char *() const
-      {
-         return str_;
-      }
-
-      const char *c_str() const
-      {
-         return str_;
-      }
-
-   private:
-      const char *str_;
-   };
-
-   /** \brief Represents a <a HREF="http://www.json.org">JSON</a> value.
-    *
-    * This class is a discriminated union wrapper that can represents a:
-    * - signed integer [range: Value::minInt - Value::maxInt]
-    * - unsigned integer (range: 0 - Value::maxUInt)
-    * - double
-    * - UTF-8 string
-    * - boolean
-    * - 'null'
-    * - an ordered list of Value
-    * - collection of name/value pairs (javascript object)
-    *
-    * The type of the held value is represented by a #ValueType and
-    * can be obtained using type().
-    *
-    * values of an #objectValue or #arrayValue can be accessed using operator[]() methods.
-    * Non const methods will automatically create the a #nullValue element
-    * if it does not exist.
-    * The sequence of an #arrayValue will be automatically resize and initialized
-    * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue.
-    *
-    * The get() methods can be used to obtanis default value in the case the required element
-    * does not exist.
-    *
-    * It is possible to iterate over the list of a #objectValue values using
-    * the getMemberNames() method.
-    */
-   class JSON_API Value
-   {
-      friend class ValueIteratorBase;
-# ifdef JSON_VALUE_USE_INTERNAL_MAP
-      friend class ValueInternalLink;
-      friend class ValueInternalMap;
-# endif
-   public:
-      typedef std::vector<std::string> Members;
-      typedef ValueIterator iterator;
-      typedef ValueConstIterator const_iterator;
-      typedef Json::UInt UInt;
-      typedef Json::Int Int;
-# if defined(JSON_HAS_INT64)
-      typedef Json::UInt64 UInt64;
-      typedef Json::Int64 Int64;
-#endif // defined(JSON_HAS_INT64)
-      typedef Json::LargestInt LargestInt;
-      typedef Json::LargestUInt LargestUInt;
-      typedef Json::ArrayIndex ArrayIndex;
-
-      static const Value null;
-      /// Minimum signed integer value that can be stored in a Json::Value.
-      static const LargestInt minLargestInt;
-      /// Maximum signed integer value that can be stored in a Json::Value.
-      static const LargestInt maxLargestInt;
-      /// Maximum unsigned integer value that can be stored in a Json::Value.
-      static const LargestUInt maxLargestUInt;
-
-      /// Minimum signed int value that can be stored in a Json::Value.
-      static const Int minInt;
-      /// Maximum signed int value that can be stored in a Json::Value.
-      static const Int maxInt;
-      /// Maximum unsigned int value that can be stored in a Json::Value.
-      static const UInt maxUInt;
-
-      /// Minimum signed 64 bits int value that can be stored in a Json::Value.
-      static const Int64 minInt64;
-      /// Maximum signed 64 bits int value that can be stored in a Json::Value.
-      static const Int64 maxInt64;
-      /// Maximum unsigned 64 bits int value that can be stored in a Json::Value.
-      static const UInt64 maxUInt64;
-
-   private:
-#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
-# ifndef JSON_VALUE_USE_INTERNAL_MAP
-      class CZString
-      {
-      public:
-         enum DuplicationPolicy
-         {
-            noDuplication = 0,
-            duplicate,
-            duplicateOnCopy
-         };
-         CZString( ArrayIndex index );
-         CZString( const char *cstr, DuplicationPolicy allocate );
-         CZString( const CZString &other );
-         ~CZString();
-         CZString &operator =( const CZString &other );
-         bool operator<( const CZString &other ) const;
-         bool operator==( const CZString &other ) const;
-         ArrayIndex index() const;
-         const char *c_str() const;
-         bool isStaticString() const;
-      private:
-         void swap( CZString &other );
-         const char *cstr_;
-         ArrayIndex index_;
-      };
-
-   public:
-#  ifndef JSON_USE_CPPTL_SMALLMAP
-      typedef std::map<CZString, Value> ObjectValues;
-#  else
-      typedef CppTL::SmallMap<CZString, Value> ObjectValues;
-#  endif // ifndef JSON_USE_CPPTL_SMALLMAP
-# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP
-#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
-
-   public:
-      /** \brief Create a default Value of the given type.
-
-        This is a very useful constructor.
-        To create an empty array, pass arrayValue.
-        To create an empty object, pass objectValue.
-        Another Value can then be set to this one by assignment.
-    This is useful since clear() and resize() will not alter types.
-
-        Examples:
-    \code
-    Json::Value null_value; // null
-    Json::Value arr_value(Json::arrayValue); // []
-    Json::Value obj_value(Json::objectValue); // {}
-    \endcode
-      */
-      Value( ValueType type = nullValue );
-      Value( Int value );
-      Value( UInt value );
-#if defined(JSON_HAS_INT64)
-      Value( Int64 value );
-      Value( UInt64 value );
-#endif // if defined(JSON_HAS_INT64)
-      Value( double value );
-      Value( const char *value );
-      Value( const char *beginValue, const char *endValue );
-      /** \brief Constructs a value from a static string.
-
-       * Like other value string constructor but do not duplicate the string for
-       * internal storage. The given string must remain alive after the call to this
-       * constructor.
-       * Example of usage:
-       * \code
-       * Json::Value aValue( StaticString("some text") );
-       * \endcode
-       */
-      Value( const StaticString &value );
-      Value( const std::string &value );
-# ifdef JSON_USE_CPPTL
-      Value( const CppTL::ConstString &value );
-# endif
-      Value( bool value );
-      Value( const Value &other );
-      ~Value();
-
-      Value &operator=( const Value &other );
-      /// Swap values.
-      /// \note Currently, comments are intentionally not swapped, for
-      /// both logic and efficiency.
-      void swap( Value &other );
-
-      ValueType type() const;
-
-      bool operator <( const Value &other ) const;
-      bool operator <=( const Value &other ) const;
-      bool operator >=( const Value &other ) const;
-      bool operator >( const Value &other ) const;
-
-      bool operator ==( const Value &other ) const;
-      bool operator !=( const Value &other ) const;
-
-      int compare( const Value &other ) const;
-
-      const char *asCString() const;
-      std::string asString() const;
-# ifdef JSON_USE_CPPTL
-      CppTL::ConstString asConstString() const;
-# endif
-      Int asInt() const;
-      UInt asUInt() const;
-      Int64 asInt64() const;
-      UInt64 asUInt64() const;
-      LargestInt asLargestInt() const;
-      LargestUInt asLargestUInt() const;
-      float asFloat() const;
-      double asDouble() const;
-      bool asBool() const;
-
-      bool isNull() const;
-      bool isBool() const;
-      bool isInt() const;
-      bool isUInt() const;
-      bool isIntegral() const;
-      bool isDouble() const;
-      bool isNumeric() const;
-      bool isString() const;
-      bool isArray() const;
-      bool isObject() const;
-
-      bool isConvertibleTo( ValueType other ) const;
-
-      /// Number of values in array or object
-      ArrayIndex size() const;
-
-      /// \brief Return true if empty array, empty object, or null;
-      /// otherwise, false.
-      bool empty() const;
-
-      /// Return isNull()
-      bool operator!() const;
-
-      /// Remove all object members and array elements.
-      /// \pre type() is arrayValue, objectValue, or nullValue
-      /// \post type() is unchanged
-      void clear();
-
-      /// Resize the array to size elements.
-      /// New elements are initialized to null.
-      /// May only be called on nullValue or arrayValue.
-      /// \pre type() is arrayValue or nullValue
-      /// \post type() is arrayValue
-      void resize( ArrayIndex size );
-
-      /// Access an array element (zero based index ).
-      /// If the array contains less than index element, then null value are inserted
-      /// in the array so that its size is index+1.
-      /// (You may need to say 'value[0u]' to get your compiler to distinguish
-      ///  this from the operator[] which takes a string.)
-      Value &operator[]( ArrayIndex index );
-
-      /// Access an array element (zero based index ).
-      /// If the array contains less than index element, then null value are inserted
-      /// in the array so that its size is index+1.
-      /// (You may need to say 'value[0u]' to get your compiler to distinguish
-      ///  this from the operator[] which takes a string.)
-      Value &operator[]( int index );
-
-      /// Access an array element (zero based index )
-      /// (You may need to say 'value[0u]' to get your compiler to distinguish
-      ///  this from the operator[] which takes a string.)
-      const Value &operator[]( ArrayIndex index ) const;
-
-      /// Access an array element (zero based index )
-      /// (You may need to say 'value[0u]' to get your compiler to distinguish
-      ///  this from the operator[] which takes a string.)
-      const Value &operator[]( int index ) const;
-
-      /// If the array contains at least index+1 elements, returns the element value,
-      /// otherwise returns defaultValue.
-      Value get( ArrayIndex index,
-                 const Value &defaultValue ) const;
-      /// Return true if index < size().
-      bool isValidIndex( ArrayIndex index ) const;
-      /// \brief Append value to array at the end.
-      ///
-      /// Equivalent to jsonvalue[jsonvalue.size()] = value;
-      Value &append( const Value &value );
-
-      /// Access an object value by name, create a null member if it does not exist.
-      Value &operator[]( const char *key );
-      /// Access an object value by name, returns null if there is no member with that name.
-      const Value &operator[]( const char *key ) const;
-      /// Access an object value by name, create a null member if it does not exist.
-      Value &operator[]( const std::string &key );
-      /// Access an object value by name, returns null if there is no member with that name.
-      const Value &operator[]( const std::string &key ) const;
-      /** \brief Access an object value by name, create a null member if it does not exist.
-
-       * If the object as no entry for that name, then the member name used to store
-       * the new entry is not duplicated.
-       * Example of use:
-       * \code
-       * Json::Value object;
-       * static const StaticString code("code");
-       * object[code] = 1234;
-       * \endcode
-       */
-      Value &operator[]( const StaticString &key );
-# ifdef JSON_USE_CPPTL
-      /// Access an object value by name, create a null member if it does not exist.
-      Value &operator[]( const CppTL::ConstString &key );
-      /// Access an object value by name, returns null if there is no member with that name.
-      const Value &operator[]( const CppTL::ConstString &key ) const;
-# endif
-      /// Return the member named key if it exist, defaultValue otherwise.
-      Value get( const char *key,
-                 const Value &defaultValue ) const;
-      /// Return the member named key if it exist, defaultValue otherwise.
-      Value get( const std::string &key,
-                 const Value &defaultValue ) const;
-# ifdef JSON_USE_CPPTL
-      /// Return the member named key if it exist, defaultValue otherwise.
-      Value get( const CppTL::ConstString &key,
-                 const Value &defaultValue ) const;
-# endif
-      /// \brief Remove and return the named member.
-      ///
-      /// Do nothing if it did not exist.
-      /// \return the removed Value, or null.
-      /// \pre type() is objectValue or nullValue
-      /// \post type() is unchanged
-      Value removeMember( const char* key );
-      /// Same as removeMember(const char*)
-      Value removeMember( const std::string &key );
-
-      /// Return true if the object has a member named key.
-      bool isMember( const char *key ) const;
-      /// Return true if the object has a member named key.
-      bool isMember( const std::string &key ) const;
-# ifdef JSON_USE_CPPTL
-      /// Return true if the object has a member named key.
-      bool isMember( const CppTL::ConstString &key ) const;
-# endif
-
-      /// \brief Return a list of the member names.
-      ///
-      /// If null, return an empty list.
-      /// \pre type() is objectValue or nullValue
-      /// \post if type() was nullValue, it remains nullValue
-      Members getMemberNames() const;
-
-//# ifdef JSON_USE_CPPTL
-//      EnumMemberNames enumMemberNames() const;
-//      EnumValues enumValues() const;
-//# endif
-
-      /// Comments must be //... or /* ... */
-      void setComment( const char *comment,
-                       CommentPlacement placement );
-      /// Comments must be //... or /* ... */
-      void setComment( const std::string &comment,
-                       CommentPlacement placement );
-      bool hasComment( CommentPlacement placement ) const;
-      /// Include delimiters and embedded newlines.
-      std::string getComment( CommentPlacement placement ) const;
-
-      std::string toStyledString() const;
-
-      const_iterator begin() const;
-      const_iterator end() const;
-
-      iterator begin();
-      iterator end();
-
-   private:
-      Value &resolveReference( const char *key,
-                               bool isStatic );
-
-# ifdef JSON_VALUE_USE_INTERNAL_MAP
-      inline bool isItemAvailable() const
-      {
-         return itemIsUsed_ == 0;
-      }
-
-      inline void setItemUsed( bool isUsed = true )
-      {
-         itemIsUsed_ = isUsed ? 1 : 0;
-      }
-
-      inline bool isMemberNameStatic() const
-      {
-         return memberNameIsStatic_ == 0;
-      }
-
-      inline void setMemberNameIsStatic( bool isStatic )
-      {
-         memberNameIsStatic_ = isStatic ? 1 : 0;
-      }
-# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP
-
-   private:
-      struct CommentInfo
-      {
-         CommentInfo();
-         ~CommentInfo();
-
-         void setComment( const char *text );
-
-         char *comment_;
-      };
-
-      //struct MemberNamesTransform
-      //{
-      //   typedef const char *result_type;
-      //   const char *operator()( const CZString &name ) const
-      //   {
-      //      return name.c_str();
-      //   }
-      //};
-
-      union ValueHolder
-      {
-         LargestInt int_;
-         LargestUInt uint_;
-         double real_;
-         bool bool_;
-         char *string_;
-# ifdef JSON_VALUE_USE_INTERNAL_MAP
-         ValueInternalArray *array_;
-         ValueInternalMap *map_;
-#else
-         ObjectValues *map_;
-# endif
-      } value_;
-      ValueType type_ : 8;
-      int allocated_ : 1;     // Notes: if declared as bool, bitfield is useless.
-# ifdef JSON_VALUE_USE_INTERNAL_MAP
-      unsigned int itemIsUsed_ : 1;      // used by the ValueInternalMap container.
-      int memberNameIsStatic_ : 1;       // used by the ValueInternalMap container.
-# endif
-      CommentInfo *comments_;
-   };
-
-
-   /** \brief Experimental and untested: represents an element of the "path" to access a node.
-    */
-   class PathArgument
-   {
-   public:
-      friend class Path;
-
-      PathArgument();
-      PathArgument( ArrayIndex index );
-      PathArgument( const char *key );
-      PathArgument( const std::string &key );
-
-   private:
-      enum Kind
-      {
-         kindNone = 0,
-         kindIndex,
-         kindKey
-      };
-      std::string key_;
-      ArrayIndex index_;
-      Kind kind_;
-   };
-
-   /** \brief Experimental and untested: represents a "path" to access a node.
-    *
-    * Syntax:
-    * - "." => root node
-    * - ".[n]" => elements at index 'n' of root node (an array value)
-    * - ".name" => member named 'name' of root node (an object value)
-    * - ".name1.name2.name3"
-    * - ".[0][1][2].name1[3]"
-    * - ".%" => member name is provided as parameter
-    * - ".[%]" => index is provied as parameter
-    */
-   class Path
-   {
-   public:
-      Path( const std::string &path,
-            const PathArgument &a1 = PathArgument(),
-            const PathArgument &a2 = PathArgument(),
-            const PathArgument &a3 = PathArgument(),
-            const PathArgument &a4 = PathArgument(),
-            const PathArgument &a5 = PathArgument() );
-
-      const Value &resolve( const Value &root ) const;
-      Value resolve( const Value &root,
-                     const Value &defaultValue ) const;
-      /// Creates the "path" to access the specified node and returns a reference on the node.
-      Value &make( Value &root ) const;
-
-   private:
-      typedef std::vector<const PathArgument *> InArgs;
-      typedef std::vector<PathArgument> Args;
-
-      void makePath( const std::string &path,
-                     const InArgs &in );
-      void addPathInArg( const std::string &path,
-                         const InArgs &in,
-                         InArgs::const_iterator &itInArg,
-                         PathArgument::Kind kind );
-      void invalidPath( const std::string &path,
-                        int location );
-
-      Args args_;
-   };
-
-
-
-#ifdef JSON_VALUE_USE_INTERNAL_MAP
-   /** \brief Allocator to customize Value internal map.
-    * Below is an example of a simple implementation (default implementation actually
-    * use memory pool for speed).
-    * \code
-      class DefaultValueMapAllocator : public ValueMapAllocator
-      {
-      public: // overridden from ValueMapAllocator
-         virtual ValueInternalMap *newMap()
-         {
-            return new ValueInternalMap();
-         }
-
-         virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other )
-         {
-            return new ValueInternalMap( other );
-         }
-
-         virtual void destructMap( ValueInternalMap *map )
-         {
-            delete map;
-         }
-
-         virtual ValueInternalLink *allocateMapBuckets( unsigned int size )
-         {
-            return new ValueInternalLink[size];
-         }
-
-         virtual void releaseMapBuckets( ValueInternalLink *links )
-         {
-            delete [] links;
-         }
-
-         virtual ValueInternalLink *allocateMapLink()
-         {
-            return new ValueInternalLink();
-         }
-
-         virtual void releaseMapLink( ValueInternalLink *link )
-         {
-            delete link;
-         }
-      };
-    * \endcode
-    */
-   class JSON_API ValueMapAllocator
-   {
-   public:
-      virtual ~ValueMapAllocator();
-      virtual ValueInternalMap *newMap() = 0;
-      virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0;
-      virtual void destructMap( ValueInternalMap *map ) = 0;
-      virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0;
-      virtual void releaseMapBuckets( ValueInternalLink *links ) = 0;
-      virtual ValueInternalLink *allocateMapLink() = 0;
-      virtual void releaseMapLink( ValueInternalLink *link ) = 0;
-   };
-
-   /** \brief ValueInternalMap hash-map bucket chain link (for internal use only).
-    * \internal previous_ & next_ allows for bidirectional traversal.
-    */
-   class JSON_API ValueInternalLink
-   {
-   public:
-      enum { itemPerLink = 6 };  // sizeof(ValueInternalLink) = 128 on 32 bits architecture.
-      enum InternalFlags {
-         flagAvailable = 0,
-         flagUsed = 1
-      };
-
-      ValueInternalLink();
-
-      ~ValueInternalLink();
-
-      Value items_[itemPerLink];
-      char *keys_[itemPerLink];
-      ValueInternalLink *previous_;
-      ValueInternalLink *next_;
-   };
-
-
-   /** \brief A linked page based hash-table implementation used internally by Value.
-    * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked
-    * list in each bucket to handle collision. There is an addional twist in that
-    * each node of the collision linked list is a page containing a fixed amount of
-    * value. This provides a better compromise between memory usage and speed.
-    *
-    * Each bucket is made up of a chained list of ValueInternalLink. The last
-    * link of a given bucket can be found in the 'previous_' field of the following bucket.
-    * The last link of the last bucket is stored in tailLink_ as it has no following bucket.
-    * Only the last link of a bucket may contains 'available' item. The last link always
-    * contains at least one element unless is it the bucket one very first link.
-    */
-   class JSON_API ValueInternalMap
-   {
-      friend class ValueIteratorBase;
-      friend class Value;
-   public:
-      typedef unsigned int HashKey;
-      typedef unsigned int BucketIndex;
-
-# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
-      struct IteratorState
-      {
-         IteratorState()
-            : map_(0)
-            , link_(0)
-            , itemIndex_(0)
-            , bucketIndex_(0)
-         {
-         }
-         ValueInternalMap *map_;
-         ValueInternalLink *link_;
-         BucketIndex itemIndex_;
-         BucketIndex bucketIndex_;
-      };
-# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
-
-      ValueInternalMap();
-      ValueInternalMap( const ValueInternalMap &other );
-      ValueInternalMap &operator =( const ValueInternalMap &other );
-      ~ValueInternalMap();
-
-      void swap( ValueInternalMap &other );
-
-      BucketIndex size() const;
-
-      void clear();
-
-      bool reserveDelta( BucketIndex growth );
-
-      bool reserve( BucketIndex newItemCount );
-
-      const Value *find( const char *key ) const;
-
-      Value *find( const char *key );
-
-      Value &resolveReference( const char *key,
-                               bool isStatic );
-
-      void remove( const char *key );
-
-      void doActualRemove( ValueInternalLink *link,
-                           BucketIndex index,
-                           BucketIndex bucketIndex );
-
-      ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex );
-
-      Value &setNewItem( const char *key,
-                         bool isStatic,
-                         ValueInternalLink *link,
-                         BucketIndex index );
-
-      Value &unsafeAdd( const char *key,
-                        bool isStatic,
-                        HashKey hashedKey );
-
-      HashKey hash( const char *key ) const;
-
-      int compare( const ValueInternalMap &other ) const;
-
-   private:
-      void makeBeginIterator( IteratorState &it ) const;
-      void makeEndIterator( IteratorState &it ) const;
-      static bool equals( const IteratorState &x, const IteratorState &other );
-      static void increment( IteratorState &iterator );
-      static void incrementBucket( IteratorState &iterator );
-      static void decrement( IteratorState &iterator );
-      static const char *key( const IteratorState &iterator );
-      static const char *key( const IteratorState &iterator, bool &isStatic );
-      static Value &value( const IteratorState &iterator );
-      static int distance( const IteratorState &x, const IteratorState &y );
-
-   private:
-      ValueInternalLink *buckets_;
-      ValueInternalLink *tailLink_;
-      BucketIndex bucketsSize_;
-      BucketIndex itemCount_;
-   };
-
-   /** \brief A simplified deque implementation used internally by Value.
-   * \internal
-   * It is based on a list of fixed "page", each page contains a fixed number of items.
-   * Instead of using a linked-list, a array of pointer is used for fast item look-up.
-   * Look-up for an element is as follow:
-   * - compute page index: pageIndex = itemIndex / itemsPerPage
-   * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage]
-   *
-   * Insertion is amortized constant time (only the array containing the index of pointers
-   * need to be reallocated when items are appended).
-   */
-   class JSON_API ValueInternalArray
-   {
-      friend class Value;
-      friend class ValueIteratorBase;
-   public:
-      enum { itemsPerPage = 8 };    // should be a power of 2 for fast divide and modulo.
-      typedef Value::ArrayIndex ArrayIndex;
-      typedef unsigned int PageIndex;
-
-# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
-      struct IteratorState // Must be a POD
-      {
-         IteratorState()
-            : array_(0)
-            , currentPageIndex_(0)
-            , currentItemIndex_(0)
-         {
-         }
-         ValueInternalArray *array_;
-         Value **currentPageIndex_;
-         unsigned int currentItemIndex_;
-      };
-# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
-
-      ValueInternalArray();
-      ValueInternalArray( const ValueInternalArray &other );
-      ValueInternalArray &operator =( const ValueInternalArray &other );
-      ~ValueInternalArray();
-      void swap( ValueInternalArray &other );
-
-      void clear();
-      void resize( ArrayIndex newSize );
-
-      Value &resolveReference( ArrayIndex index );
-
-      Value *find( ArrayIndex index ) const;
-
-      ArrayIndex size() const;
-
-      int compare( const ValueInternalArray &other ) const;
-
-   private:
-      static bool equals( const IteratorState &x, const IteratorState &other );
-      static void increment( IteratorState &iterator );
-      static void decrement( IteratorState &iterator );
-      static Value &dereference( const IteratorState &iterator );
-      static Value &unsafeDereference( const IteratorState &iterator );
-      static int distance( const IteratorState &x, const IteratorState &y );
-      static ArrayIndex indexOf( const IteratorState &iterator );
-      void makeBeginIterator( IteratorState &it ) const;
-      void makeEndIterator( IteratorState &it ) const;
-      void makeIterator( IteratorState &it, ArrayIndex index ) const;
-
-      void makeIndexValid( ArrayIndex index );
-
-      Value **pages_;
-      ArrayIndex size_;
-      PageIndex pageCount_;
-   };
-
-   /** \brief Experimental: do not use. Allocator to customize Value internal array.
-    * Below is an example of a simple implementation (actual implementation use
-    * memory pool).
-      \code
-class DefaultValueArrayAllocator : public ValueArrayAllocator
-{
-public: // overridden from ValueArrayAllocator
-   virtual ~DefaultValueArrayAllocator()
-   {
-   }
-
-   virtual ValueInternalArray *newArray()
-   {
-      return new ValueInternalArray();
-   }
-
-   virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other )
-   {
-      return new ValueInternalArray( other );
-   }
-
-   virtual void destruct( ValueInternalArray *array )
-   {
-      delete array;
-   }
-
-   virtual void reallocateArrayPageIndex( Value **&indexes,
-                                          ValueInternalArray::PageIndex &indexCount,
-                                          ValueInternalArray::PageIndex minNewIndexCount )
-   {
-      ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1;
-      if ( minNewIndexCount > newIndexCount )
-         newIndexCount = minNewIndexCount;
-      void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount );
-      if ( !newIndexes )
-         throw std::bad_alloc();
-      indexCount = newIndexCount;
-      indexes = static_cast<Value **>( newIndexes );
-   }
-   virtual void releaseArrayPageIndex( Value **indexes,
-                                       ValueInternalArray::PageIndex indexCount )
-   {
-      if ( indexes )
-         free( indexes );
-   }
-
-   virtual Value *allocateArrayPage()
-   {
-      return static_cast<Value *>( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) );
-   }
-
-   virtual void releaseArrayPage( Value *value )
-   {
-      if ( value )
-         free( value );
-   }
-};
-      \endcode
-    */
-   class JSON_API ValueArrayAllocator
-   {
-   public:
-      virtual ~ValueArrayAllocator();
-      virtual ValueInternalArray *newArray() = 0;
-      virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0;
-      virtual void destructArray( ValueInternalArray *array ) = 0;
-      /** \brief Reallocate array page index.
-       * Reallocates an array of pointer on each page.
-       * \param indexes [input] pointer on the current index. May be \c NULL.
-       *                [output] pointer on the new index of at least
-       *                         \a minNewIndexCount pages.
-       * \param indexCount [input] current number of pages in the index.
-       *                   [output] number of page the reallocated index can handle.
-       *                            \b MUST be >= \a minNewIndexCount.
-       * \param minNewIndexCount Minimum number of page the new index must be able to
-       *                         handle.
-       */
-      virtual void reallocateArrayPageIndex( Value **&indexes,
-                                             ValueInternalArray::PageIndex &indexCount,
-                                             ValueInternalArray::PageIndex minNewIndexCount ) = 0;
-      virtual void releaseArrayPageIndex( Value **indexes,
-                                          ValueInternalArray::PageIndex indexCount ) = 0;
-      virtual Value *allocateArrayPage() = 0;
-      virtual void releaseArrayPage( Value *value ) = 0;
-   };
-#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP
-
-
-   /** \brief base class for Value iterators.
-    *
-    */
-   class ValueIteratorBase
-   {
-   public:
-      typedef unsigned int size_t;
-      typedef int difference_type;
-      typedef ValueIteratorBase SelfType;
-
-      ValueIteratorBase();
-#ifndef JSON_VALUE_USE_INTERNAL_MAP
-      explicit ValueIteratorBase( const Value::ObjectValues::iterator &current );
-#else
-      ValueIteratorBase( const ValueInternalArray::IteratorState &state );
-      ValueIteratorBase( const ValueInternalMap::IteratorState &state );
-#endif
-
-      bool operator ==( const SelfType &other ) const
-      {
-         return isEqual( other );
-      }
-
-      bool operator !=( const SelfType &other ) const
-      {
-         return !isEqual( other );
-      }
-
-      difference_type operator -( const SelfType &other ) const
-      {
-         return computeDistance( other );
-      }
-
-      /// Return either the index or the member name of the referenced value as a Value.
-      Value key() const;
-
-      /// Return the index of the referenced Value. -1 if it is not an arrayValue.
-      UInt index() const;
-
-      /// Return the member name of the referenced Value. "" if it is not an objectValue.
-      const char *memberName() const;
-
-   protected:
-      Value &deref() const;
-
-      void increment();
-
-      void decrement();
-
-      difference_type computeDistance( const SelfType &other ) const;
-
-      bool isEqual( const SelfType &other ) const;
-
-      void copy( const SelfType &other );
-
-   private:
-#ifndef JSON_VALUE_USE_INTERNAL_MAP
-      Value::ObjectValues::iterator current_;
-      // Indicates that iterator is for a null value.
-      bool isNull_;
-#else
-      union
-      {
-         ValueInternalArray::IteratorState array_;
-         ValueInternalMap::IteratorState map_;
-      } iterator_;
-      bool isArray_;
-#endif
-   };
-
-   /** \brief const iterator for object and array value.
-    *
-    */
-   class ValueConstIterator : public ValueIteratorBase
-   {
-      friend class Value;
-   public:
-      typedef unsigned int size_t;
-      typedef int difference_type;
-      typedef const Value &reference;
-      typedef const Value *pointer;
-      typedef ValueConstIterator SelfType;
-
-      ValueConstIterator();
-   private:
-      /*! \internal Use by Value to create an iterator.
-       */
-#ifndef JSON_VALUE_USE_INTERNAL_MAP
-      explicit ValueConstIterator( const Value::ObjectValues::iterator &current );
-#else
-      ValueConstIterator( const ValueInternalArray::IteratorState &state );
-      ValueConstIterator( const ValueInternalMap::IteratorState &state );
-#endif
-   public:
-      SelfType &operator =( const ValueIteratorBase &other );
-
-      SelfType operator++( int )
-      {
-         SelfType temp( *this );
-         ++*this;
-         return temp;
-      }
-
-      SelfType operator--( int )
-      {
-         SelfType temp( *this );
-         --*this;
-         return temp;
-      }
-
-      SelfType &operator--()
-      {
-         decrement();
-         return *this;
-      }
-
-      SelfType &operator++()
-      {
-         increment();
-         return *this;
-      }
-
-      reference operator *() const
-      {
-         return deref();
-      }
-   };
-
-
-   /** \brief Iterator for object and array value.
-    */
-   class ValueIterator : public ValueIteratorBase
-   {
-      friend class Value;
-   public:
-      typedef unsigned int size_t;
-      typedef int difference_type;
-      typedef Value &reference;
-      typedef Value *pointer;
-      typedef ValueIterator SelfType;
-
-      ValueIterator();
-      ValueIterator( const ValueConstIterator &other );
-      ValueIterator( const ValueIterator &other );
-   private:
-      /*! \internal Use by Value to create an iterator.
-       */
-#ifndef JSON_VALUE_USE_INTERNAL_MAP
-      explicit ValueIterator( const Value::ObjectValues::iterator &current );
-#else
-      ValueIterator( const ValueInternalArray::IteratorState &state );
-      ValueIterator( const ValueInternalMap::IteratorState &state );
-#endif
-   public:
-
-      SelfType &operator =( const SelfType &other );
-
-      SelfType operator++( int )
-      {
-         SelfType temp( *this );
-         ++*this;
-         return temp;
-      }
-
-      SelfType operator--( int )
-      {
-         SelfType temp( *this );
-         --*this;
-         return temp;
-      }
-
-      SelfType &operator--()
-      {
-         decrement();
-         return *this;
-      }
-
-      SelfType &operator++()
-      {
-         increment();
-         return *this;
-      }
-
-      reference operator *() const
-      {
-         return deref();
-      }
-   };
-
-
-} // namespace Json
-
-
-#endif // CPPTL_JSON_H_INCLUDED
-
-// //////////////////////////////////////////////////////////////////////
-// End of content of file: include/json/value.h
-// //////////////////////////////////////////////////////////////////////
-
-
-
-
-
-
-// //////////////////////////////////////////////////////////////////////
-// Beginning of content of file: include/json/reader.h
-// //////////////////////////////////////////////////////////////////////
-
-// Copyright 2007-2010 Baptiste Lepilleur
-// Distributed under MIT license, or public domain if desired and
-// recognized in your jurisdiction.
-// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
-
-#ifndef CPPTL_JSON_READER_H_INCLUDED
-# define CPPTL_JSON_READER_H_INCLUDED
-
-#if !defined(JSON_IS_AMALGAMATION)
-# include "features.h"
-# include "value.h"
-#endif // if !defined(JSON_IS_AMALGAMATION)
-# include <deque>
-# include <stack>
-# include <string>
-# include <iostream>
-
-namespace Json {
-
-   /** \brief Unserialize a <a HREF="http://www.json.org">JSON</a> document into a Value.
-    *
-    */
-   class JSON_API Reader
-   {
-   public:
-      typedef char Char;
-      typedef const Char *Location;
-
-      /** \brief Constructs a Reader allowing all features
-       * for parsing.
-       */
-      Reader();
-
-      /** \brief Constructs a Reader allowing the specified feature set
-       * for parsing.
-       */
-      Reader( const Features &features );
-
-      /** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a> document.
-       * \param document UTF-8 encoded string containing the document to read.
-       * \param root [out] Contains the root value of the document if it was
-       *             successfully parsed.
-       * \param collectComments \c true to collect comment and allow writing them back during
-       *                        serialization, \c false to discard comments.
-       *                        This parameter is ignored if Features::allowComments_
-       *                        is \c false.
-       * \return \c true if the document was successfully parsed, \c false if an error occurred.
-       */
-      bool parse( const std::string &document,
-                  Value &root,
-                  bool collectComments = true );
-
-      /** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a> document.
-       * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the document to read.
-       * \param endDoc Pointer on the end of the UTF-8 encoded string of the document to read.
-       \               Must be >= beginDoc.
-       * \param root [out] Contains the root value of the document if it was
-       *             successfully parsed.
-       * \param collectComments \c true to collect comment and allow writing them back during
-       *                        serialization, \c false to discard comments.
-       *                        This parameter is ignored if Features::allowComments_
-       *                        is \c false.
-       * \return \c true if the document was successfully parsed, \c false if an error occurred.
-       */
-      bool parse( const char *beginDoc, const char *endDoc,
-                  Value &root,
-                  bool collectComments = true );
-
-      /// \brief Parse from input stream.
-      /// \see Json::operator>>(std::istream&, Json::Value&).
-      bool parse( std::istream &is,
-                  Value &root,
-                  bool collectComments = true );
-
-      /** \brief Returns a user friendly string that list errors in the parsed document.
-       * \return Formatted error message with the list of errors with their location in
-       *         the parsed document. An empty string is returned if no error occurred
-       *         during parsing.
-       * deprecated: Use getFormattedErrorMessages() instead (typo fix).
-       */
-      JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead")
-      std::string getFormatedErrorMessages() const;
-
-      /** \brief Returns a user friendly string that list errors in the parsed document.
-       * \return Formatted error message with the list of errors with their location in
-       *         the parsed document. An empty string is returned if no error occurred
-       *         during parsing.
-       */
-      std::string getFormattedErrorMessages() const;
-
-   private:
-      enum TokenType
-      {
-         tokenEndOfStream = 0,
-         tokenObjectBegin,
-         tokenObjectEnd,
-         tokenArrayBegin,
-         tokenArrayEnd,
-         tokenString,
-         tokenNumber,
-         tokenTrue,
-         tokenFalse,
-         tokenNull,
-         tokenArraySeparator,
-         tokenMemberSeparator,
-         tokenComment,
-         tokenError
-      };
-
-      class Token
-      {
-      public:
-         TokenType type_;
-         Location start_;
-         Location end_;
-      };
-
-      class ErrorInfo
-      {
-      public:
-         Token token_;
-         std::string message_;
-         Location extra_;
-      };
-
-      typedef std::deque<ErrorInfo> Errors;
-
-      bool expectToken( TokenType type, Token &token, const char *message );
-      bool readToken( Token &token );
-      void skipSpaces();
-      bool match( Location pattern,
-                  int patternLength );
-      bool readComment();
-      bool readCStyleComment();
-      bool readCppStyleComment();
-      bool readString();
-      void readNumber();
-      bool readValue();
-      bool readObject( Token &token );
-      bool readArray( Token &token );
-      bool decodeNumber( Token &token );
-      bool decodeString( Token &token );
-      bool decodeString( Token &token, std::string &decoded );
-      bool decodeDouble( Token &token );
-      bool decodeUnicodeCodePoint( Token &token,
-                                   Location &current,
-                                   Location end,
-                                   unsigned int &unicode );
-      bool decodeUnicodeEscapeSequence( Token &token,
-                                        Location &current,
-                                        Location end,
-                                        unsigned int &unicode );
-      bool addError( const std::string &message,
-                     Token &token,
-                     Location extra = 0 );
-      bool recoverFromError( TokenType skipUntilToken );
-      bool addErrorAndRecover( const std::string &message,
-                               Token &token,
-                               TokenType skipUntilToken );
-      void skipUntilSpace();
-      Value &currentValue();
-      Char getNextChar();
-      void getLocationLineAndColumn( Location location,
-                                     int &line,
-                                     int &column ) const;
-      std::string getLocationLineAndColumn( Location location ) const;
-      void addComment( Location begin,
-                       Location end,
-                       CommentPlacement placement );
-      void skipCommentTokens( Token &token );
-
-      typedef std::stack<Value *> Nodes;
-      Nodes nodes_;
-      Errors errors_;
-      std::string document_;
-      Location begin_;
-      Location end_;
-      Location current_;
-      Location lastValueEnd_;
-      Value *lastValue_;
-      std::string commentsBefore_;
-      Features features_;
-      bool collectComments_;
-   };
-
-   /** \brief Read from 'sin' into 'root'.
-
-    Always keep comments from the input JSON.
-
-    This can be used to read a file into a particular sub-object.
-    For example:
-    \code
-    Json::Value root;
-    cin >> root["dir"]["file"];
-    cout << root;
-    \endcode
-    Result:
-    \verbatim
-    {
-    "dir": {
-        "file": {
-        // The input stream JSON would be nested here.
-        }
-    }
-    }
-    \endverbatim
-    \throw std::exception on parse error.
-    \see Json::operator<<()
-   */
-   std::istream& operator>>( std::istream&, Value& );
-
-} // namespace Json
-
-#endif // CPPTL_JSON_READER_H_INCLUDED
-
-// //////////////////////////////////////////////////////////////////////
-// End of content of file: include/json/reader.h
-// //////////////////////////////////////////////////////////////////////
-
-
-
-
-
-
-// //////////////////////////////////////////////////////////////////////
-// Beginning of content of file: include/json/writer.h
-// //////////////////////////////////////////////////////////////////////
-
-// Copyright 2007-2010 Baptiste Lepilleur
-// Distributed under MIT license, or public domain if desired and
-// recognized in your jurisdiction.
-// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
-
-#ifndef JSON_WRITER_H_INCLUDED
-# define JSON_WRITER_H_INCLUDED
-
-#if !defined(JSON_IS_AMALGAMATION)
-# include "value.h"
-#endif // if !defined(JSON_IS_AMALGAMATION)
-# include <vector>
-# include <string>
-# include <iostream>
-
-namespace Json {
-
-   class Value;
-
-   /** \brief Abstract class for writers.
-    */
-   class JSON_API Writer
-   {
-   public:
-      virtual ~Writer();
-
-      virtual std::string write( const Value &root ) = 0;
-   };
-
-   /** \brief Outputs a Value in <a HREF="http://www.json.org">JSON</a> format without formatting (not human friendly).
-    *
-    * The JSON document is written in a single line. It is not intended for 'human' consumption,
-    * but may be usefull to support feature such as RPC where bandwith is limited.
-    * \sa Reader, Value
-    */
-   class JSON_API FastWriter : public Writer
-   {
-   public:
-      FastWriter();
-      virtual ~FastWriter(){}
-
-      void enableYAMLCompatibility();
-
-   public: // overridden from Writer
-      virtual std::string write( const Value &root );
-
-   private:
-      void writeValue( const Value &value );
-
-      std::string document_;
-      bool yamlCompatiblityEnabled_;
-   };
-
-   /** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a human friendly way.
-    *
-    * The rules for line break and indent are as follow:
-    * - Object value:
-    *     - if empty then print {} without indent and line break
-    *     - if not empty the print '{', line break & indent, print one value per line
-    *       and then unindent and line break and print '}'.
-    * - Array value:
-    *     - if empty then print [] without indent and line break
-    *     - if the array contains no object value, empty array or some other value types,
-    *       and all the values fit on one lines, then print the array on a single line.
-    *     - otherwise, it the values do not fit on one line, or the array contains
-    *       object or non empty array, then print one value per line.
-    *
-    * If the Value have comments then they are outputed according to their #CommentPlacement.
-    *
-    * \sa Reader, Value, Value::setComment()
-    */
-   class JSON_API StyledWriter: public Writer
-   {
-   public:
-      StyledWriter();
-      virtual ~StyledWriter(){}
-
-   public: // overridden from Writer
-      /** \brief Serialize a Value in <a HREF="http://www.json.org">JSON</a> format.
-       * \param root Value to serialize.
-       * \return String containing the JSON document that represents the root value.
-       */
-      virtual std::string write( const Value &root );
-
-   private:
-      void writeValue( const Value &value );
-      void writeArrayValue( const Value &value );
-      bool isMultineArray( const Value &value );
-      void pushValue( const std::string &value );
-      void writeIndent();
-      void writeWithIndent( const std::string &value );
-      void indent();
-      void unindent();
-      void writeCommentBeforeValue( const Value &root );
-      void writeCommentAfterValueOnSameLine( const Value &root );
-      bool hasCommentForValue( const Value &value );
-      static std::string normalizeEOL( const std::string &text );
-
-      typedef std::vector<std::string> ChildValues;
-
-      ChildValues childValues_;
-      std::string document_;
-      std::string indentString_;
-      int rightMargin_;
-      int indentSize_;
-      bool addChildValues_;
-   };
-
-   /** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a human friendly way,
-        to a stream rather than to a string.
-    *
-    * The rules for line break and indent are as follow:
-    * - Object value:
-    *     - if empty then print {} without indent and line break
-    *     - if not empty the print '{', line break & indent, print one value per line
-    *       and then unindent and line break and print '}'.
-    * - Array value:
-    *     - if empty then print [] without indent and line break
-    *     - if the array contains no object value, empty array or some other value types,
-    *       and all the values fit on one lines, then print the array on a single line.
-    *     - otherwise, it the values do not fit on one line, or the array contains
-    *       object or non empty array, then print one value per line.
-    *
-    * If the Value have comments then they are outputed according to their #CommentPlacement.
-    *
-    * param: indentation Each level will be indented by this amount extra.
-    * \sa Reader, Value, Value::setComment()
-    */
-   class JSON_API StyledStreamWriter
-   {
-   public:
-      StyledStreamWriter( std::string indentation="\t" );
-      ~StyledStreamWriter(){}
-
-   public:
-      /** \brief Serialize a Value in <a HREF="http://www.json.org">JSON</a> format.
-       * \param out Stream to write to. (Can be ostringstream, e.g.)
-       * \param root Value to serialize.
-       * \note There is no point in deriving from Writer, since write() should not return a value.
-       */
-      void write( std::ostream &out, const Value &root );
-
-   private:
-      void writeValue( const Value &value );
-      void writeArrayValue( const Value &value );
-      bool isMultineArray( const Value &value );
-      void pushValue( const std::string &value );
-      void writeIndent();
-      void writeWithIndent( const std::string &value );
-      void indent();
-      void unindent();
-      void writeCommentBeforeValue( const Value &root );
-      void writeCommentAfterValueOnSameLine( const Value &root );
-      bool hasCommentForValue( const Value &value );
-      static std::string normalizeEOL( const std::string &text );
-
-      typedef std::vector<std::string> ChildValues;
-
-      ChildValues childValues_;
-      std::ostream* document_;
-      std::string indentString_;
-      int rightMargin_;
-      std::string indentation_;
-      bool addChildValues_;
-   };
-
-# if defined(JSON_HAS_INT64)
-   std::string JSON_API valueToString( Int value );
-   std::string JSON_API valueToString( UInt value );
-# endif // if defined(JSON_HAS_INT64)
-   std::string JSON_API valueToString( LargestInt value );
-   std::string JSON_API valueToString( LargestUInt value );
-   std::string JSON_API valueToString( double value );
-   std::string JSON_API valueToString( bool value );
-   std::string JSON_API valueToQuotedString( const char *value );
-
-   /// \brief Output using the StyledStreamWriter.
-   /// \see Json::operator>>()
-   std::ostream& operator<<( std::ostream&, const Value &root );
-
-} // namespace Json
-
-
-
-#endif // JSON_WRITER_H_INCLUDED
-
-// //////////////////////////////////////////////////////////////////////
-// End of content of file: include/json/writer.h
-// //////////////////////////////////////////////////////////////////////
-
-
-
-
-
-#endif //ifndef JSON_AMALGATED_H_INCLUDED
diff --git a/ThirdParty/jsoncpp/vtk_jsoncpp.h.in b/ThirdParty/jsoncpp/vtk_jsoncpp.h.in
new file mode 100644
index 0000000..6a84dc1
--- /dev/null
+++ b/ThirdParty/jsoncpp/vtk_jsoncpp.h.in
@@ -0,0 +1,33 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtk_jsoncpp.h.in
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#ifndef __vtk_jsoncpp_h
+#define __vtk_jsoncpp_h
+
+/* Use the JsonCpp library configured for VTK.  */
+#cmakedefine VTK_USE_SYSTEM_JSONCPP
+#ifdef VTK_USE_SYSTEM_JSONCPP
+# include <json/json.h>
+# ifndef JSON_FORWARDS_H_INCLUDED
+#  error Incorrect header included, check your include paths for conflicts
+# endif
+#else
+// Needed for Windows declspec import logic
+# if defined(_WIN32) && defined(VTK_BUILD_SHARED_LIBS)
+#  define JSON_DLL
+# endif
+# include <vtkjsoncpp/json/json.h>
+#endif
+
+#endif
diff --git a/ThirdParty/jsoncpp/vtkjsoncpp/CMakeLists.txt b/ThirdParty/jsoncpp/vtkjsoncpp/CMakeLists.txt
new file mode 100644
index 0000000..f81939e
--- /dev/null
+++ b/ThirdParty/jsoncpp/vtkjsoncpp/CMakeLists.txt
@@ -0,0 +1,32 @@
+project(JsonCpp)
+
+set(vtkjsoncpp_THIRD_PARTY 1)
+set(vtkjsoncpp_LIBRARIES vtkjsoncpp)
+vtk_module_export_info()
+
+if(VTK_REQUIRE_LARGE_FILE_SUPPORT)
+  add_definitions(
+    -D_LARGEFILE_SOURCE
+    -D_LARGEFILE64_SOURCE
+    -D_LARGE_FILES
+    -D_FILE_OFFSET_BITS=64
+    )
+endif()
+
+set(JSONCPP_SOURCES
+  jsoncpp.cpp
+)
+
+include_directories(${JSONCPP_SOURCE_DIR} ${JSONCPP_BINARY_DIR})
+
+if(BUILD_SHARED_LIBS AND WIN32)
+  add_definitions(-DJSON_DLL_BUILD)
+endif()
+vtk_add_library(vtkjsoncpp ${JSONCPP_SOURCES})
+
+if(NOT VTK_INSTALL_NO_DEVELOPMENT)
+  install(DIRECTORY
+    ${JsonCpp_SOURCE_DIR}/json
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/vtkjsoncpp
+    COMPONENT Development)
+endif()
diff --git a/ThirdParty/jsoncpp/json/json-forwards.h b/ThirdParty/jsoncpp/vtkjsoncpp/json/json-forwards.h
similarity index 100%
rename from ThirdParty/jsoncpp/json/json-forwards.h
rename to ThirdParty/jsoncpp/vtkjsoncpp/json/json-forwards.h
diff --git a/ThirdParty/jsoncpp/vtkjsoncpp/json/json.h b/ThirdParty/jsoncpp/vtkjsoncpp/json/json.h
new file mode 100644
index 0000000..64291c8
--- /dev/null
+++ b/ThirdParty/jsoncpp/vtkjsoncpp/json/json.h
@@ -0,0 +1,1855 @@
+/// Json-cpp amalgated header (http://jsoncpp.sourceforge.net/).
+/// It is intented to be used with #include <json/json.h>
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: LICENSE
+// //////////////////////////////////////////////////////////////////////
+
+/*
+The JsonCpp library's source code, including accompanying documentation,
+tests and demonstration applications, are licensed under the following
+conditions...
+
+The author (Baptiste Lepilleur) explicitly disclaims copyright in all
+jurisdictions which recognize such a disclaimer. In such jurisdictions,
+this software is released into the Public Domain.
+
+In jurisdictions which do not recognize Public Domain property (e.g. Germany as of
+2010), this software is Copyright (c) 2007-2010 by Baptiste Lepilleur, and is
+released under the terms of the MIT License (see below).
+
+In jurisdictions which recognize Public Domain property, the user of this
+software may choose to accept it either as 1) Public Domain, 2) under the
+conditions of the MIT License (see below), or 3) under the terms of dual
+Public Domain/MIT License conditions described here, as they choose.
+
+The MIT License is about as close to Public Domain as a license can get, and is
+described in clear, concise terms at:
+
+   http://en.wikipedia.org/wiki/MIT_License
+
+The full text of the MIT License follows:
+
+========================================================================
+Copyright (c) 2007-2010 Baptiste Lepilleur
+
+Permission is hereby granted, free of charge, to any person
+obtaining a copy of this software and associated documentation
+files (the "Software"), to deal in the Software without
+restriction, including without limitation the rights to use, copy,
+modify, merge, publish, distribute, sublicense, and/or sell copies
+of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
+BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
+ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
+SOFTWARE.
+========================================================================
+(END LICENSE TEXT)
+
+The MIT license is compatible with both the GPL and commercial
+software, affording one all of the rights of Public Domain with the
+minor nuisance of being required to keep the above copyright notice
+and license text in the source code. Note also that by accepting the
+Public Domain "license" you can re-license your copy using whatever
+license you like.
+
+*/
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: LICENSE
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+#ifndef JSON_AMALGATED_H_INCLUDED
+# define JSON_AMALGATED_H_INCLUDED
+/// If defined, indicates that the source file is amalgated
+/// to prevent private header inclusion.
+#define JSON_IS_AMALGATED
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/config.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef JSON_CONFIG_H_INCLUDED
+# define JSON_CONFIG_H_INCLUDED
+
+/// If defined, indicates that json library is embedded in CppTL library.
+//# define JSON_IN_CPPTL 1
+
+/// If defined, indicates that json may leverage CppTL library
+//#  define JSON_USE_CPPTL 1
+/// If defined, indicates that cpptl vector based map should be used instead of std::map
+/// as Value container.
+//#  define JSON_USE_CPPTL_SMALLMAP 1
+/// If defined, indicates that Json specific container should be used
+/// (hash table & simple deque container with customizable allocator).
+/// THIS FEATURE IS STILL EXPERIMENTAL! There is know bugs: See #3177332
+//#  define JSON_VALUE_USE_INTERNAL_MAP 1
+/// Force usage of standard new/malloc based allocator instead of memory pool based allocator.
+/// The memory pools allocator used optimization (initializing Value and ValueInternalLink
+/// as if it was a POD) that may cause some validation tool to report errors.
+/// Only has effects if JSON_VALUE_USE_INTERNAL_MAP is defined.
+//#  define JSON_USE_SIMPLE_INTERNAL_ALLOCATOR 1
+
+/// If defined, indicates that Json use exception to report invalid type manipulation
+/// instead of C assert macro.
+# define JSON_USE_EXCEPTION 1
+
+/// If defined, indicates that the source file is amalgated
+/// to prevent private header inclusion.
+/// Remarks: it is automatically defined in the generated amalgated header.
+#define JSON_IS_AMALGAMATION
+
+
+# ifdef JSON_IN_CPPTL
+#  include <cpptl/config.h>
+#  ifndef JSON_USE_CPPTL
+#   define JSON_USE_CPPTL 1
+#  endif
+# endif
+
+# ifdef JSON_IN_CPPTL
+#  define JSON_API CPPTL_API
+# elif defined(JSON_DLL_BUILD)
+#  define JSON_API __declspec(dllexport)
+# elif defined(JSON_DLL)
+#  define JSON_API __declspec(dllimport)
+# else
+#  define JSON_API
+# endif
+
+// If JSON_NO_INT64 is defined, then Json only support C++ "int" type for integer
+// Storages, and 64 bits integer support is disabled.
+// #define JSON_NO_INT64 1
+
+#if defined(_MSC_VER)  &&  _MSC_VER <= 1200 // MSVC 6
+// Microsoft Visual Studio 6 only support conversion from __int64 to double
+// (no conversion from unsigned __int64).
+#define JSON_USE_INT64_DOUBLE_CONVERSION 1
+#endif // if defined(_MSC_VER)  &&  _MSC_VER < 1200 // MSVC 6
+
+#if defined(_MSC_VER)  &&  _MSC_VER >= 1500 // MSVC 2008
+/// Indicates that the following function is deprecated.
+# define JSONCPP_DEPRECATED(message) __declspec(deprecated(message))
+#endif
+
+#if !defined(JSONCPP_DEPRECATED)
+# define JSONCPP_DEPRECATED(message)
+#endif // if !defined(JSONCPP_DEPRECATED)
+
+namespace Json {
+   typedef int Int;
+   typedef unsigned int UInt;
+# if defined(JSON_NO_INT64)
+   typedef int LargestInt;
+   typedef unsigned int LargestUInt;
+#  undef JSON_HAS_INT64
+# else // if defined(JSON_NO_INT64)
+   // For Microsoft Visual use specific types as long long is not supported
+#  if defined(_MSC_VER) // Microsoft Visual Studio
+   typedef __int64 Int64;
+   typedef unsigned __int64 UInt64;
+#  else // if defined(_MSC_VER) // Other platforms, use long long
+   typedef long long int Int64;
+   typedef unsigned long long int UInt64;
+#  endif // if defined(_MSC_VER)
+   typedef Int64 LargestInt;
+   typedef UInt64 LargestUInt;
+#  define JSON_HAS_INT64
+# endif // if defined(JSON_NO_INT64)
+} // end namespace Json
+
+
+#endif // JSON_CONFIG_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/config.h
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/forwards.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef JSON_FORWARDS_H_INCLUDED
+# define JSON_FORWARDS_H_INCLUDED
+
+#if !defined(JSON_IS_AMALGAMATION)
+# include "config.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+
+namespace Json {
+
+   // writer.h
+   class FastWriter;
+   class StyledWriter;
+
+   // reader.h
+   class Reader;
+
+   // features.h
+   class Features;
+
+   // value.h
+   typedef unsigned int ArrayIndex;
+   class StaticString;
+   class Path;
+   class PathArgument;
+   class Value;
+   class ValueIteratorBase;
+   class ValueIterator;
+   class ValueConstIterator;
+#ifdef JSON_VALUE_USE_INTERNAL_MAP
+   class ValueMapAllocator;
+   class ValueInternalLink;
+   class ValueInternalArray;
+   class ValueInternalMap;
+#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP
+
+} // namespace Json
+
+
+#endif // JSON_FORWARDS_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/forwards.h
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/features.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef CPPTL_JSON_FEATURES_H_INCLUDED
+# define CPPTL_JSON_FEATURES_H_INCLUDED
+
+#if !defined(JSON_IS_AMALGAMATION)
+# include "forwards.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+
+namespace Json {
+
+   /** \brief Configuration passed to reader and writer.
+    * This configuration object can be used to force the Reader or Writer
+    * to behave in a standard conforming way.
+    */
+   class JSON_API Features
+   {
+   public:
+      /** \brief A configuration that allows all features and assumes all strings are UTF-8.
+       * - C & C++ comments are allowed
+       * - Root object can be any JSON value
+       * - Assumes Value strings are encoded in UTF-8
+       */
+      static Features all();
+
+      /** \brief A configuration that is strictly compatible with the JSON specification.
+       * - Comments are forbidden.
+       * - Root object must be either an array or an object value.
+       * - Assumes Value strings are encoded in UTF-8
+       */
+      static Features strictMode();
+
+      /** \brief Initialize the configuration like JsonConfig::allFeatures;
+       */
+      Features();
+
+      /// \c true if comments are allowed. Default: \c true.
+      bool allowComments_;
+
+      /// \c true if root must be either an array or an object value. Default: \c false.
+      bool strictRoot_;
+   };
+
+} // namespace Json
+
+#endif // CPPTL_JSON_FEATURES_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/features.h
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/value.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef CPPTL_JSON_H_INCLUDED
+# define CPPTL_JSON_H_INCLUDED
+
+#if !defined(JSON_IS_AMALGAMATION)
+# include "forwards.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+# include <string>
+# include <vector>
+
+# ifndef JSON_USE_CPPTL_SMALLMAP
+#  include <map>
+# else
+#  include <cpptl/smallmap.h>
+# endif
+# ifdef JSON_USE_CPPTL
+#  include <cpptl/forwards.h>
+# endif
+
+/** \brief JSON (JavaScript Object Notation).
+ */
+namespace Json {
+
+   /** \brief Type of the value held by a Value object.
+    */
+   enum ValueType
+   {
+      nullValue = 0, ///< 'null' value
+      intValue,      ///< signed integer value
+      uintValue,     ///< unsigned integer value
+      realValue,     ///< double value
+      stringValue,   ///< UTF-8 string value
+      booleanValue,  ///< bool value
+      arrayValue,    ///< array value (ordered list)
+      objectValue    ///< object value (collection of name/value pairs).
+   };
+
+   enum CommentPlacement
+   {
+      commentBefore = 0,        ///< a comment placed on the line before a value
+      commentAfterOnSameLine,   ///< a comment just after a value on the same line
+      commentAfter,             ///< a comment on the line after a value (only make sense for root value)
+      numberOfCommentPlacement
+   };
+
+//# ifdef JSON_USE_CPPTL
+//   typedef CppTL::AnyEnumerator<const char *> EnumMemberNames;
+//   typedef CppTL::AnyEnumerator<const Value &> EnumValues;
+//# endif
+
+   /** \brief Lightweight wrapper to tag static string.
+    *
+    * Value constructor and objectValue member assignement takes advantage of the
+    * StaticString and avoid the cost of string duplication when storing the
+    * string or the member name.
+    *
+    * Example of usage:
+    * \code
+    * Json::Value aValue( StaticString("some text") );
+    * Json::Value object;
+    * static const StaticString code("code");
+    * object[code] = 1234;
+    * \endcode
+    */
+   class JSON_API StaticString
+   {
+   public:
+      explicit StaticString( const char *czstring )
+         : str_( czstring )
+      {
+      }
+
+      operator const char *() const
+      {
+         return str_;
+      }
+
+      const char *c_str() const
+      {
+         return str_;
+      }
+
+   private:
+      const char *str_;
+   };
+
+   /** \brief Represents a <a HREF="http://www.json.org">JSON</a> value.
+    *
+    * This class is a discriminated union wrapper that can represents a:
+    * - signed integer [range: Value::minInt - Value::maxInt]
+    * - unsigned integer (range: 0 - Value::maxUInt)
+    * - double
+    * - UTF-8 string
+    * - boolean
+    * - 'null'
+    * - an ordered list of Value
+    * - collection of name/value pairs (javascript object)
+    *
+    * The type of the held value is represented by a #ValueType and
+    * can be obtained using type().
+    *
+    * values of an #objectValue or #arrayValue can be accessed using operator[]() methods.
+    * Non const methods will automatically create the a #nullValue element
+    * if it does not exist.
+    * The sequence of an #arrayValue will be automatically resize and initialized
+    * with #nullValue. resize() can be used to enlarge or truncate an #arrayValue.
+    *
+    * The get() methods can be used to obtanis default value in the case the required element
+    * does not exist.
+    *
+    * It is possible to iterate over the list of a #objectValue values using
+    * the getMemberNames() method.
+    */
+   class JSON_API Value
+   {
+      friend class ValueIteratorBase;
+# ifdef JSON_VALUE_USE_INTERNAL_MAP
+      friend class ValueInternalLink;
+      friend class ValueInternalMap;
+# endif
+   public:
+      typedef std::vector<std::string> Members;
+      typedef ValueIterator iterator;
+      typedef ValueConstIterator const_iterator;
+      typedef Json::UInt UInt;
+      typedef Json::Int Int;
+# if defined(JSON_HAS_INT64)
+      typedef Json::UInt64 UInt64;
+      typedef Json::Int64 Int64;
+#endif // defined(JSON_HAS_INT64)
+      typedef Json::LargestInt LargestInt;
+      typedef Json::LargestUInt LargestUInt;
+      typedef Json::ArrayIndex ArrayIndex;
+
+      static const Value null;
+      /// Minimum signed integer value that can be stored in a Json::Value.
+      static const LargestInt minLargestInt;
+      /// Maximum signed integer value that can be stored in a Json::Value.
+      static const LargestInt maxLargestInt;
+      /// Maximum unsigned integer value that can be stored in a Json::Value.
+      static const LargestUInt maxLargestUInt;
+
+      /// Minimum signed int value that can be stored in a Json::Value.
+      static const Int minInt;
+      /// Maximum signed int value that can be stored in a Json::Value.
+      static const Int maxInt;
+      /// Maximum unsigned int value that can be stored in a Json::Value.
+      static const UInt maxUInt;
+
+      /// Minimum signed 64 bits int value that can be stored in a Json::Value.
+      static const Int64 minInt64;
+      /// Maximum signed 64 bits int value that can be stored in a Json::Value.
+      static const Int64 maxInt64;
+      /// Maximum unsigned 64 bits int value that can be stored in a Json::Value.
+      static const UInt64 maxUInt64;
+
+   private:
+#ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
+# ifndef JSON_VALUE_USE_INTERNAL_MAP
+      class CZString
+      {
+      public:
+         enum DuplicationPolicy
+         {
+            noDuplication = 0,
+            duplicate,
+            duplicateOnCopy
+         };
+         CZString( ArrayIndex index );
+         CZString( const char *cstr, DuplicationPolicy allocate );
+         CZString( const CZString &other );
+         ~CZString();
+         CZString &operator =( const CZString &other );
+         bool operator<( const CZString &other ) const;
+         bool operator==( const CZString &other ) const;
+         ArrayIndex index() const;
+         const char *c_str() const;
+         bool isStaticString() const;
+      private:
+         void swap( CZString &other );
+         const char *cstr_;
+         ArrayIndex index_;
+      };
+
+   public:
+#  ifndef JSON_USE_CPPTL_SMALLMAP
+      typedef std::map<CZString, Value> ObjectValues;
+#  else
+      typedef CppTL::SmallMap<CZString, Value> ObjectValues;
+#  endif // ifndef JSON_USE_CPPTL_SMALLMAP
+# endif // ifndef JSON_VALUE_USE_INTERNAL_MAP
+#endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
+
+   public:
+      /** \brief Create a default Value of the given type.
+
+        This is a very useful constructor.
+        To create an empty array, pass arrayValue.
+        To create an empty object, pass objectValue.
+        Another Value can then be set to this one by assignment.
+    This is useful since clear() and resize() will not alter types.
+
+        Examples:
+    \code
+    Json::Value null_value; // null
+    Json::Value arr_value(Json::arrayValue); // []
+    Json::Value obj_value(Json::objectValue); // {}
+    \endcode
+      */
+      Value( ValueType type = nullValue );
+      Value( Int value );
+      Value( UInt value );
+#if defined(JSON_HAS_INT64)
+      Value( Int64 value );
+      Value( UInt64 value );
+#endif // if defined(JSON_HAS_INT64)
+      Value( double value );
+      Value( const char *value );
+      Value( const char *beginValue, const char *endValue );
+      /** \brief Constructs a value from a static string.
+
+       * Like other value string constructor but do not duplicate the string for
+       * internal storage. The given string must remain alive after the call to this
+       * constructor.
+       * Example of usage:
+       * \code
+       * Json::Value aValue( StaticString("some text") );
+       * \endcode
+       */
+      Value( const StaticString &value );
+      Value( const std::string &value );
+# ifdef JSON_USE_CPPTL
+      Value( const CppTL::ConstString &value );
+# endif
+      Value( bool value );
+      Value( const Value &other );
+      ~Value();
+
+      Value &operator=( const Value &other );
+      /// Swap values.
+      /// \note Currently, comments are intentionally not swapped, for
+      /// both logic and efficiency.
+      void swap( Value &other );
+
+      ValueType type() const;
+
+      bool operator <( const Value &other ) const;
+      bool operator <=( const Value &other ) const;
+      bool operator >=( const Value &other ) const;
+      bool operator >( const Value &other ) const;
+
+      bool operator ==( const Value &other ) const;
+      bool operator !=( const Value &other ) const;
+
+      int compare( const Value &other ) const;
+
+      const char *asCString() const;
+      std::string asString() const;
+# ifdef JSON_USE_CPPTL
+      CppTL::ConstString asConstString() const;
+# endif
+      Int asInt() const;
+      UInt asUInt() const;
+      Int64 asInt64() const;
+      UInt64 asUInt64() const;
+      LargestInt asLargestInt() const;
+      LargestUInt asLargestUInt() const;
+      float asFloat() const;
+      double asDouble() const;
+      bool asBool() const;
+
+      bool isNull() const;
+      bool isBool() const;
+      bool isInt() const;
+      bool isUInt() const;
+      bool isIntegral() const;
+      bool isDouble() const;
+      bool isNumeric() const;
+      bool isString() const;
+      bool isArray() const;
+      bool isObject() const;
+
+      bool isConvertibleTo( ValueType other ) const;
+
+      /// Number of values in array or object
+      ArrayIndex size() const;
+
+      /// \brief Return true if empty array, empty object, or null;
+      /// otherwise, false.
+      bool empty() const;
+
+      /// Return isNull()
+      bool operator!() const;
+
+      /// Remove all object members and array elements.
+      /// \pre type() is arrayValue, objectValue, or nullValue
+      /// \post type() is unchanged
+      void clear();
+
+      /// Resize the array to size elements.
+      /// New elements are initialized to null.
+      /// May only be called on nullValue or arrayValue.
+      /// \pre type() is arrayValue or nullValue
+      /// \post type() is arrayValue
+      void resize( ArrayIndex size );
+
+      /// Access an array element (zero based index ).
+      /// If the array contains less than index element, then null value are inserted
+      /// in the array so that its size is index+1.
+      /// (You may need to say 'value[0u]' to get your compiler to distinguish
+      ///  this from the operator[] which takes a string.)
+      Value &operator[]( ArrayIndex index );
+
+      /// Access an array element (zero based index ).
+      /// If the array contains less than index element, then null value are inserted
+      /// in the array so that its size is index+1.
+      /// (You may need to say 'value[0u]' to get your compiler to distinguish
+      ///  this from the operator[] which takes a string.)
+      Value &operator[]( int index );
+
+      /// Access an array element (zero based index )
+      /// (You may need to say 'value[0u]' to get your compiler to distinguish
+      ///  this from the operator[] which takes a string.)
+      const Value &operator[]( ArrayIndex index ) const;
+
+      /// Access an array element (zero based index )
+      /// (You may need to say 'value[0u]' to get your compiler to distinguish
+      ///  this from the operator[] which takes a string.)
+      const Value &operator[]( int index ) const;
+
+      /// If the array contains at least index+1 elements, returns the element value,
+      /// otherwise returns defaultValue.
+      Value get( ArrayIndex index,
+                 const Value &defaultValue ) const;
+      /// Return true if index < size().
+      bool isValidIndex( ArrayIndex index ) const;
+      /// \brief Append value to array at the end.
+      ///
+      /// Equivalent to jsonvalue[jsonvalue.size()] = value;
+      Value &append( const Value &value );
+
+      /// Access an object value by name, create a null member if it does not exist.
+      Value &operator[]( const char *key );
+      /// Access an object value by name, returns null if there is no member with that name.
+      const Value &operator[]( const char *key ) const;
+      /// Access an object value by name, create a null member if it does not exist.
+      Value &operator[]( const std::string &key );
+      /// Access an object value by name, returns null if there is no member with that name.
+      const Value &operator[]( const std::string &key ) const;
+      /** \brief Access an object value by name, create a null member if it does not exist.
+
+       * If the object as no entry for that name, then the member name used to store
+       * the new entry is not duplicated.
+       * Example of use:
+       * \code
+       * Json::Value object;
+       * static const StaticString code("code");
+       * object[code] = 1234;
+       * \endcode
+       */
+      Value &operator[]( const StaticString &key );
+# ifdef JSON_USE_CPPTL
+      /// Access an object value by name, create a null member if it does not exist.
+      Value &operator[]( const CppTL::ConstString &key );
+      /// Access an object value by name, returns null if there is no member with that name.
+      const Value &operator[]( const CppTL::ConstString &key ) const;
+# endif
+      /// Return the member named key if it exist, defaultValue otherwise.
+      Value get( const char *key,
+                 const Value &defaultValue ) const;
+      /// Return the member named key if it exist, defaultValue otherwise.
+      Value get( const std::string &key,
+                 const Value &defaultValue ) const;
+# ifdef JSON_USE_CPPTL
+      /// Return the member named key if it exist, defaultValue otherwise.
+      Value get( const CppTL::ConstString &key,
+                 const Value &defaultValue ) const;
+# endif
+      /// \brief Remove and return the named member.
+      ///
+      /// Do nothing if it did not exist.
+      /// \return the removed Value, or null.
+      /// \pre type() is objectValue or nullValue
+      /// \post type() is unchanged
+      Value removeMember( const char* key );
+      /// Same as removeMember(const char*)
+      Value removeMember( const std::string &key );
+
+      /// Return true if the object has a member named key.
+      bool isMember( const char *key ) const;
+      /// Return true if the object has a member named key.
+      bool isMember( const std::string &key ) const;
+# ifdef JSON_USE_CPPTL
+      /// Return true if the object has a member named key.
+      bool isMember( const CppTL::ConstString &key ) const;
+# endif
+
+      /// \brief Return a list of the member names.
+      ///
+      /// If null, return an empty list.
+      /// \pre type() is objectValue or nullValue
+      /// \post if type() was nullValue, it remains nullValue
+      Members getMemberNames() const;
+
+//# ifdef JSON_USE_CPPTL
+//      EnumMemberNames enumMemberNames() const;
+//      EnumValues enumValues() const;
+//# endif
+
+      /// Comments must be //... or /* ... */
+      void setComment( const char *comment,
+                       CommentPlacement placement );
+      /// Comments must be //... or /* ... */
+      void setComment( const std::string &comment,
+                       CommentPlacement placement );
+      bool hasComment( CommentPlacement placement ) const;
+      /// Include delimiters and embedded newlines.
+      std::string getComment( CommentPlacement placement ) const;
+
+      std::string toStyledString() const;
+
+      const_iterator begin() const;
+      const_iterator end() const;
+
+      iterator begin();
+      iterator end();
+
+   private:
+      Value &resolveReference( const char *key,
+                               bool isStatic );
+
+# ifdef JSON_VALUE_USE_INTERNAL_MAP
+      inline bool isItemAvailable() const
+      {
+         return itemIsUsed_ == 0;
+      }
+
+      inline void setItemUsed( bool isUsed = true )
+      {
+         itemIsUsed_ = isUsed ? 1 : 0;
+      }
+
+      inline bool isMemberNameStatic() const
+      {
+         return memberNameIsStatic_ == 0;
+      }
+
+      inline void setMemberNameIsStatic( bool isStatic )
+      {
+         memberNameIsStatic_ = isStatic ? 1 : 0;
+      }
+# endif // # ifdef JSON_VALUE_USE_INTERNAL_MAP
+
+   private:
+      struct CommentInfo
+      {
+         CommentInfo();
+         ~CommentInfo();
+
+         void setComment( const char *text );
+
+         char *comment_;
+      };
+
+      //struct MemberNamesTransform
+      //{
+      //   typedef const char *result_type;
+      //   const char *operator()( const CZString &name ) const
+      //   {
+      //      return name.c_str();
+      //   }
+      //};
+
+      union ValueHolder
+      {
+         LargestInt int_;
+         LargestUInt uint_;
+         double real_;
+         bool bool_;
+         char *string_;
+# ifdef JSON_VALUE_USE_INTERNAL_MAP
+         ValueInternalArray *array_;
+         ValueInternalMap *map_;
+#else
+         ObjectValues *map_;
+# endif
+      } value_;
+      ValueType type_ : 8;
+      int allocated_ : 1;     // Notes: if declared as bool, bitfield is useless.
+# ifdef JSON_VALUE_USE_INTERNAL_MAP
+      unsigned int itemIsUsed_ : 1;      // used by the ValueInternalMap container.
+      int memberNameIsStatic_ : 1;       // used by the ValueInternalMap container.
+# endif
+      CommentInfo *comments_;
+   };
+
+
+   /** \brief Experimental and untested: represents an element of the "path" to access a node.
+    */
+   class PathArgument
+   {
+   public:
+      friend class Path;
+
+      PathArgument();
+      PathArgument( ArrayIndex index );
+      PathArgument( const char *key );
+      PathArgument( const std::string &key );
+
+   private:
+      enum Kind
+      {
+         kindNone = 0,
+         kindIndex,
+         kindKey
+      };
+      std::string key_;
+      ArrayIndex index_;
+      Kind kind_;
+   };
+
+   /** \brief Experimental and untested: represents a "path" to access a node.
+    *
+    * Syntax:
+    * - "." => root node
+    * - ".[n]" => elements at index 'n' of root node (an array value)
+    * - ".name" => member named 'name' of root node (an object value)
+    * - ".name1.name2.name3"
+    * - ".[0][1][2].name1[3]"
+    * - ".%" => member name is provided as parameter
+    * - ".[%]" => index is provied as parameter
+    */
+   class Path
+   {
+   public:
+      Path( const std::string &path,
+            const PathArgument &a1 = PathArgument(),
+            const PathArgument &a2 = PathArgument(),
+            const PathArgument &a3 = PathArgument(),
+            const PathArgument &a4 = PathArgument(),
+            const PathArgument &a5 = PathArgument() );
+
+      const Value &resolve( const Value &root ) const;
+      Value resolve( const Value &root,
+                     const Value &defaultValue ) const;
+      /// Creates the "path" to access the specified node and returns a reference on the node.
+      Value &make( Value &root ) const;
+
+   private:
+      typedef std::vector<const PathArgument *> InArgs;
+      typedef std::vector<PathArgument> Args;
+
+      void makePath( const std::string &path,
+                     const InArgs &in );
+      void addPathInArg( const std::string &path,
+                         const InArgs &in,
+                         InArgs::const_iterator &itInArg,
+                         PathArgument::Kind kind );
+      void invalidPath( const std::string &path,
+                        int location );
+
+      Args args_;
+   };
+
+
+
+#ifdef JSON_VALUE_USE_INTERNAL_MAP
+   /** \brief Allocator to customize Value internal map.
+    * Below is an example of a simple implementation (default implementation actually
+    * use memory pool for speed).
+    * \code
+      class DefaultValueMapAllocator : public ValueMapAllocator
+      {
+      public: // overridden from ValueMapAllocator
+         virtual ValueInternalMap *newMap()
+         {
+            return new ValueInternalMap();
+         }
+
+         virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other )
+         {
+            return new ValueInternalMap( other );
+         }
+
+         virtual void destructMap( ValueInternalMap *map )
+         {
+            delete map;
+         }
+
+         virtual ValueInternalLink *allocateMapBuckets( unsigned int size )
+         {
+            return new ValueInternalLink[size];
+         }
+
+         virtual void releaseMapBuckets( ValueInternalLink *links )
+         {
+            delete [] links;
+         }
+
+         virtual ValueInternalLink *allocateMapLink()
+         {
+            return new ValueInternalLink();
+         }
+
+         virtual void releaseMapLink( ValueInternalLink *link )
+         {
+            delete link;
+         }
+      };
+    * \endcode
+    */
+   class JSON_API ValueMapAllocator
+   {
+   public:
+      virtual ~ValueMapAllocator();
+      virtual ValueInternalMap *newMap() = 0;
+      virtual ValueInternalMap *newMapCopy( const ValueInternalMap &other ) = 0;
+      virtual void destructMap( ValueInternalMap *map ) = 0;
+      virtual ValueInternalLink *allocateMapBuckets( unsigned int size ) = 0;
+      virtual void releaseMapBuckets( ValueInternalLink *links ) = 0;
+      virtual ValueInternalLink *allocateMapLink() = 0;
+      virtual void releaseMapLink( ValueInternalLink *link ) = 0;
+   };
+
+   /** \brief ValueInternalMap hash-map bucket chain link (for internal use only).
+    * \internal previous_ & next_ allows for bidirectional traversal.
+    */
+   class JSON_API ValueInternalLink
+   {
+   public:
+      enum { itemPerLink = 6 };  // sizeof(ValueInternalLink) = 128 on 32 bits architecture.
+      enum InternalFlags {
+         flagAvailable = 0,
+         flagUsed = 1
+      };
+
+      ValueInternalLink();
+
+      ~ValueInternalLink();
+
+      Value items_[itemPerLink];
+      char *keys_[itemPerLink];
+      ValueInternalLink *previous_;
+      ValueInternalLink *next_;
+   };
+
+
+   /** \brief A linked page based hash-table implementation used internally by Value.
+    * \internal ValueInternalMap is a tradional bucket based hash-table, with a linked
+    * list in each bucket to handle collision. There is an addional twist in that
+    * each node of the collision linked list is a page containing a fixed amount of
+    * value. This provides a better compromise between memory usage and speed.
+    *
+    * Each bucket is made up of a chained list of ValueInternalLink. The last
+    * link of a given bucket can be found in the 'previous_' field of the following bucket.
+    * The last link of the last bucket is stored in tailLink_ as it has no following bucket.
+    * Only the last link of a bucket may contains 'available' item. The last link always
+    * contains at least one element unless is it the bucket one very first link.
+    */
+   class JSON_API ValueInternalMap
+   {
+      friend class ValueIteratorBase;
+      friend class Value;
+   public:
+      typedef unsigned int HashKey;
+      typedef unsigned int BucketIndex;
+
+# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
+      struct IteratorState
+      {
+         IteratorState()
+            : map_(0)
+            , link_(0)
+            , itemIndex_(0)
+            , bucketIndex_(0)
+         {
+         }
+         ValueInternalMap *map_;
+         ValueInternalLink *link_;
+         BucketIndex itemIndex_;
+         BucketIndex bucketIndex_;
+      };
+# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
+
+      ValueInternalMap();
+      ValueInternalMap( const ValueInternalMap &other );
+      ValueInternalMap &operator =( const ValueInternalMap &other );
+      ~ValueInternalMap();
+
+      void swap( ValueInternalMap &other );
+
+      BucketIndex size() const;
+
+      void clear();
+
+      bool reserveDelta( BucketIndex growth );
+
+      bool reserve( BucketIndex newItemCount );
+
+      const Value *find( const char *key ) const;
+
+      Value *find( const char *key );
+
+      Value &resolveReference( const char *key,
+                               bool isStatic );
+
+      void remove( const char *key );
+
+      void doActualRemove( ValueInternalLink *link,
+                           BucketIndex index,
+                           BucketIndex bucketIndex );
+
+      ValueInternalLink *&getLastLinkInBucket( BucketIndex bucketIndex );
+
+      Value &setNewItem( const char *key,
+                         bool isStatic,
+                         ValueInternalLink *link,
+                         BucketIndex index );
+
+      Value &unsafeAdd( const char *key,
+                        bool isStatic,
+                        HashKey hashedKey );
+
+      HashKey hash( const char *key ) const;
+
+      int compare( const ValueInternalMap &other ) const;
+
+   private:
+      void makeBeginIterator( IteratorState &it ) const;
+      void makeEndIterator( IteratorState &it ) const;
+      static bool equals( const IteratorState &x, const IteratorState &other );
+      static void increment( IteratorState &iterator );
+      static void incrementBucket( IteratorState &iterator );
+      static void decrement( IteratorState &iterator );
+      static const char *key( const IteratorState &iterator );
+      static const char *key( const IteratorState &iterator, bool &isStatic );
+      static Value &value( const IteratorState &iterator );
+      static int distance( const IteratorState &x, const IteratorState &y );
+
+   private:
+      ValueInternalLink *buckets_;
+      ValueInternalLink *tailLink_;
+      BucketIndex bucketsSize_;
+      BucketIndex itemCount_;
+   };
+
+   /** \brief A simplified deque implementation used internally by Value.
+   * \internal
+   * It is based on a list of fixed "page", each page contains a fixed number of items.
+   * Instead of using a linked-list, a array of pointer is used for fast item look-up.
+   * Look-up for an element is as follow:
+   * - compute page index: pageIndex = itemIndex / itemsPerPage
+   * - look-up item in page: pages_[pageIndex][itemIndex % itemsPerPage]
+   *
+   * Insertion is amortized constant time (only the array containing the index of pointers
+   * need to be reallocated when items are appended).
+   */
+   class JSON_API ValueInternalArray
+   {
+      friend class Value;
+      friend class ValueIteratorBase;
+   public:
+      enum { itemsPerPage = 8 };    // should be a power of 2 for fast divide and modulo.
+      typedef Value::ArrayIndex ArrayIndex;
+      typedef unsigned int PageIndex;
+
+# ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
+      struct IteratorState // Must be a POD
+      {
+         IteratorState()
+            : array_(0)
+            , currentPageIndex_(0)
+            , currentItemIndex_(0)
+         {
+         }
+         ValueInternalArray *array_;
+         Value **currentPageIndex_;
+         unsigned int currentItemIndex_;
+      };
+# endif // ifndef JSONCPP_DOC_EXCLUDE_IMPLEMENTATION
+
+      ValueInternalArray();
+      ValueInternalArray( const ValueInternalArray &other );
+      ValueInternalArray &operator =( const ValueInternalArray &other );
+      ~ValueInternalArray();
+      void swap( ValueInternalArray &other );
+
+      void clear();
+      void resize( ArrayIndex newSize );
+
+      Value &resolveReference( ArrayIndex index );
+
+      Value *find( ArrayIndex index ) const;
+
+      ArrayIndex size() const;
+
+      int compare( const ValueInternalArray &other ) const;
+
+   private:
+      static bool equals( const IteratorState &x, const IteratorState &other );
+      static void increment( IteratorState &iterator );
+      static void decrement( IteratorState &iterator );
+      static Value &dereference( const IteratorState &iterator );
+      static Value &unsafeDereference( const IteratorState &iterator );
+      static int distance( const IteratorState &x, const IteratorState &y );
+      static ArrayIndex indexOf( const IteratorState &iterator );
+      void makeBeginIterator( IteratorState &it ) const;
+      void makeEndIterator( IteratorState &it ) const;
+      void makeIterator( IteratorState &it, ArrayIndex index ) const;
+
+      void makeIndexValid( ArrayIndex index );
+
+      Value **pages_;
+      ArrayIndex size_;
+      PageIndex pageCount_;
+   };
+
+   /** \brief Experimental: do not use. Allocator to customize Value internal array.
+    * Below is an example of a simple implementation (actual implementation use
+    * memory pool).
+      \code
+class DefaultValueArrayAllocator : public ValueArrayAllocator
+{
+public: // overridden from ValueArrayAllocator
+   virtual ~DefaultValueArrayAllocator()
+   {
+   }
+
+   virtual ValueInternalArray *newArray()
+   {
+      return new ValueInternalArray();
+   }
+
+   virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other )
+   {
+      return new ValueInternalArray( other );
+   }
+
+   virtual void destruct( ValueInternalArray *array )
+   {
+      delete array;
+   }
+
+   virtual void reallocateArrayPageIndex( Value **&indexes,
+                                          ValueInternalArray::PageIndex &indexCount,
+                                          ValueInternalArray::PageIndex minNewIndexCount )
+   {
+      ValueInternalArray::PageIndex newIndexCount = (indexCount*3)/2 + 1;
+      if ( minNewIndexCount > newIndexCount )
+         newIndexCount = minNewIndexCount;
+      void *newIndexes = realloc( indexes, sizeof(Value*) * newIndexCount );
+      if ( !newIndexes )
+         throw std::bad_alloc();
+      indexCount = newIndexCount;
+      indexes = static_cast<Value **>( newIndexes );
+   }
+   virtual void releaseArrayPageIndex( Value **indexes,
+                                       ValueInternalArray::PageIndex indexCount )
+   {
+      if ( indexes )
+         free( indexes );
+   }
+
+   virtual Value *allocateArrayPage()
+   {
+      return static_cast<Value *>( malloc( sizeof(Value) * ValueInternalArray::itemsPerPage ) );
+   }
+
+   virtual void releaseArrayPage( Value *value )
+   {
+      if ( value )
+         free( value );
+   }
+};
+      \endcode
+    */
+   class JSON_API ValueArrayAllocator
+   {
+   public:
+      virtual ~ValueArrayAllocator();
+      virtual ValueInternalArray *newArray() = 0;
+      virtual ValueInternalArray *newArrayCopy( const ValueInternalArray &other ) = 0;
+      virtual void destructArray( ValueInternalArray *array ) = 0;
+      /** \brief Reallocate array page index.
+       * Reallocates an array of pointer on each page.
+       * \param indexes [input] pointer on the current index. May be \c NULL.
+       *                [output] pointer on the new index of at least
+       *                         \a minNewIndexCount pages.
+       * \param indexCount [input] current number of pages in the index.
+       *                   [output] number of page the reallocated index can handle.
+       *                            \b MUST be >= \a minNewIndexCount.
+       * \param minNewIndexCount Minimum number of page the new index must be able to
+       *                         handle.
+       */
+      virtual void reallocateArrayPageIndex( Value **&indexes,
+                                             ValueInternalArray::PageIndex &indexCount,
+                                             ValueInternalArray::PageIndex minNewIndexCount ) = 0;
+      virtual void releaseArrayPageIndex( Value **indexes,
+                                          ValueInternalArray::PageIndex indexCount ) = 0;
+      virtual Value *allocateArrayPage() = 0;
+      virtual void releaseArrayPage( Value *value ) = 0;
+   };
+#endif // #ifdef JSON_VALUE_USE_INTERNAL_MAP
+
+
+   /** \brief base class for Value iterators.
+    *
+    */
+   class ValueIteratorBase
+   {
+   public:
+      typedef unsigned int size_t;
+      typedef int difference_type;
+      typedef ValueIteratorBase SelfType;
+
+      ValueIteratorBase();
+#ifndef JSON_VALUE_USE_INTERNAL_MAP
+      explicit ValueIteratorBase( const Value::ObjectValues::iterator &current );
+#else
+      ValueIteratorBase( const ValueInternalArray::IteratorState &state );
+      ValueIteratorBase( const ValueInternalMap::IteratorState &state );
+#endif
+
+      bool operator ==( const SelfType &other ) const
+      {
+         return isEqual( other );
+      }
+
+      bool operator !=( const SelfType &other ) const
+      {
+         return !isEqual( other );
+      }
+
+      difference_type operator -( const SelfType &other ) const
+      {
+         return computeDistance( other );
+      }
+
+      /// Return either the index or the member name of the referenced value as a Value.
+      Value key() const;
+
+      /// Return the index of the referenced Value. -1 if it is not an arrayValue.
+      UInt index() const;
+
+      /// Return the member name of the referenced Value. "" if it is not an objectValue.
+      const char *memberName() const;
+
+   protected:
+      Value &deref() const;
+
+      void increment();
+
+      void decrement();
+
+      difference_type computeDistance( const SelfType &other ) const;
+
+      bool isEqual( const SelfType &other ) const;
+
+      void copy( const SelfType &other );
+
+   private:
+#ifndef JSON_VALUE_USE_INTERNAL_MAP
+      Value::ObjectValues::iterator current_;
+      // Indicates that iterator is for a null value.
+      bool isNull_;
+#else
+      union
+      {
+         ValueInternalArray::IteratorState array_;
+         ValueInternalMap::IteratorState map_;
+      } iterator_;
+      bool isArray_;
+#endif
+   };
+
+   /** \brief const iterator for object and array value.
+    *
+    */
+   class ValueConstIterator : public ValueIteratorBase
+   {
+      friend class Value;
+   public:
+      typedef unsigned int size_t;
+      typedef int difference_type;
+      typedef const Value &reference;
+      typedef const Value *pointer;
+      typedef ValueConstIterator SelfType;
+
+      ValueConstIterator();
+   private:
+      /*! \internal Use by Value to create an iterator.
+       */
+#ifndef JSON_VALUE_USE_INTERNAL_MAP
+      explicit ValueConstIterator( const Value::ObjectValues::iterator &current );
+#else
+      ValueConstIterator( const ValueInternalArray::IteratorState &state );
+      ValueConstIterator( const ValueInternalMap::IteratorState &state );
+#endif
+   public:
+      SelfType &operator =( const ValueIteratorBase &other );
+
+      SelfType operator++( int )
+      {
+         SelfType temp( *this );
+         ++*this;
+         return temp;
+      }
+
+      SelfType operator--( int )
+      {
+         SelfType temp( *this );
+         --*this;
+         return temp;
+      }
+
+      SelfType &operator--()
+      {
+         decrement();
+         return *this;
+      }
+
+      SelfType &operator++()
+      {
+         increment();
+         return *this;
+      }
+
+      reference operator *() const
+      {
+         return deref();
+      }
+   };
+
+
+   /** \brief Iterator for object and array value.
+    */
+   class ValueIterator : public ValueIteratorBase
+   {
+      friend class Value;
+   public:
+      typedef unsigned int size_t;
+      typedef int difference_type;
+      typedef Value &reference;
+      typedef Value *pointer;
+      typedef ValueIterator SelfType;
+
+      ValueIterator();
+      ValueIterator( const ValueConstIterator &other );
+      ValueIterator( const ValueIterator &other );
+   private:
+      /*! \internal Use by Value to create an iterator.
+       */
+#ifndef JSON_VALUE_USE_INTERNAL_MAP
+      explicit ValueIterator( const Value::ObjectValues::iterator &current );
+#else
+      ValueIterator( const ValueInternalArray::IteratorState &state );
+      ValueIterator( const ValueInternalMap::IteratorState &state );
+#endif
+   public:
+
+      SelfType &operator =( const SelfType &other );
+
+      SelfType operator++( int )
+      {
+         SelfType temp( *this );
+         ++*this;
+         return temp;
+      }
+
+      SelfType operator--( int )
+      {
+         SelfType temp( *this );
+         --*this;
+         return temp;
+      }
+
+      SelfType &operator--()
+      {
+         decrement();
+         return *this;
+      }
+
+      SelfType &operator++()
+      {
+         increment();
+         return *this;
+      }
+
+      reference operator *() const
+      {
+         return deref();
+      }
+   };
+
+
+} // namespace Json
+
+
+#endif // CPPTL_JSON_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/value.h
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/reader.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef CPPTL_JSON_READER_H_INCLUDED
+# define CPPTL_JSON_READER_H_INCLUDED
+
+#if !defined(JSON_IS_AMALGAMATION)
+# include "features.h"
+# include "value.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+# include <deque>
+# include <stack>
+# include <string>
+# include <iostream>
+
+namespace Json {
+
+   /** \brief Unserialize a <a HREF="http://www.json.org">JSON</a> document into a Value.
+    *
+    */
+   class JSON_API Reader
+   {
+   public:
+      typedef char Char;
+      typedef const Char *Location;
+
+      /** \brief Constructs a Reader allowing all features
+       * for parsing.
+       */
+      Reader();
+
+      /** \brief Constructs a Reader allowing the specified feature set
+       * for parsing.
+       */
+      Reader( const Features &features );
+
+      /** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a> document.
+       * \param document UTF-8 encoded string containing the document to read.
+       * \param root [out] Contains the root value of the document if it was
+       *             successfully parsed.
+       * \param collectComments \c true to collect comment and allow writing them back during
+       *                        serialization, \c false to discard comments.
+       *                        This parameter is ignored if Features::allowComments_
+       *                        is \c false.
+       * \return \c true if the document was successfully parsed, \c false if an error occurred.
+       */
+      bool parse( const std::string &document,
+                  Value &root,
+                  bool collectComments = true );
+
+      /** \brief Read a Value from a <a HREF="http://www.json.org">JSON</a> document.
+       * \param beginDoc Pointer on the beginning of the UTF-8 encoded string of the document to read.
+       * \param endDoc Pointer on the end of the UTF-8 encoded string of the document to read.
+       \               Must be >= beginDoc.
+       * \param root [out] Contains the root value of the document if it was
+       *             successfully parsed.
+       * \param collectComments \c true to collect comment and allow writing them back during
+       *                        serialization, \c false to discard comments.
+       *                        This parameter is ignored if Features::allowComments_
+       *                        is \c false.
+       * \return \c true if the document was successfully parsed, \c false if an error occurred.
+       */
+      bool parse( const char *beginDoc, const char *endDoc,
+                  Value &root,
+                  bool collectComments = true );
+
+      /// \brief Parse from input stream.
+      /// \see Json::operator>>(std::istream&, Json::Value&).
+      bool parse( std::istream &is,
+                  Value &root,
+                  bool collectComments = true );
+
+      /** \brief Returns a user friendly string that list errors in the parsed document.
+       * \return Formatted error message with the list of errors with their location in
+       *         the parsed document. An empty string is returned if no error occurred
+       *         during parsing.
+       * deprecated: Use getFormattedErrorMessages() instead (typo fix).
+       */
+      //JSONCPP_DEPRECATED("Use getFormattedErrorMessages instead")
+      std::string getFormatedErrorMessages() const;
+
+      /** \brief Returns a user friendly string that list errors in the parsed document.
+       * \return Formatted error message with the list of errors with their location in
+       *         the parsed document. An empty string is returned if no error occurred
+       *         during parsing.
+       */
+      std::string getFormattedErrorMessages() const;
+
+   private:
+      enum TokenType
+      {
+         tokenEndOfStream = 0,
+         tokenObjectBegin,
+         tokenObjectEnd,
+         tokenArrayBegin,
+         tokenArrayEnd,
+         tokenString,
+         tokenNumber,
+         tokenTrue,
+         tokenFalse,
+         tokenNull,
+         tokenArraySeparator,
+         tokenMemberSeparator,
+         tokenComment,
+         tokenError
+      };
+
+      class Token
+      {
+      public:
+         TokenType type_;
+         Location start_;
+         Location end_;
+      };
+
+      class ErrorInfo
+      {
+      public:
+         Token token_;
+         std::string message_;
+         Location extra_;
+      };
+
+      typedef std::deque<ErrorInfo> Errors;
+
+      bool expectToken( TokenType type, Token &token, const char *message );
+      bool readToken( Token &token );
+      void skipSpaces();
+      bool match( Location pattern,
+                  int patternLength );
+      bool readComment();
+      bool readCStyleComment();
+      bool readCppStyleComment();
+      bool readString();
+      void readNumber();
+      bool readValue();
+      bool readObject( Token &token );
+      bool readArray( Token &token );
+      bool decodeNumber( Token &token );
+      bool decodeString( Token &token );
+      bool decodeString( Token &token, std::string &decoded );
+      bool decodeDouble( Token &token );
+      bool decodeUnicodeCodePoint( Token &token,
+                                   Location &current,
+                                   Location end,
+                                   unsigned int &unicode );
+      bool decodeUnicodeEscapeSequence( Token &token,
+                                        Location &current,
+                                        Location end,
+                                        unsigned int &unicode );
+      bool addError( const std::string &message,
+                     Token &token,
+                     Location extra = 0 );
+      bool recoverFromError( TokenType skipUntilToken );
+      bool addErrorAndRecover( const std::string &message,
+                               Token &token,
+                               TokenType skipUntilToken );
+      void skipUntilSpace();
+      Value &currentValue();
+      Char getNextChar();
+      void getLocationLineAndColumn( Location location,
+                                     int &line,
+                                     int &column ) const;
+      std::string getLocationLineAndColumn( Location location ) const;
+      void addComment( Location begin,
+                       Location end,
+                       CommentPlacement placement );
+      void skipCommentTokens( Token &token );
+
+      typedef std::stack<Value *> Nodes;
+      Nodes nodes_;
+      Errors errors_;
+      std::string document_;
+      Location begin_;
+      Location end_;
+      Location current_;
+      Location lastValueEnd_;
+      Value *lastValue_;
+      std::string commentsBefore_;
+      Features features_;
+      bool collectComments_;
+   };
+
+   /** \brief Read from 'sin' into 'root'.
+
+    Always keep comments from the input JSON.
+
+    This can be used to read a file into a particular sub-object.
+    For example:
+    \code
+    Json::Value root;
+    cin >> root["dir"]["file"];
+    cout << root;
+    \endcode
+    Result:
+    \verbatim
+    {
+    "dir": {
+        "file": {
+        // The input stream JSON would be nested here.
+        }
+    }
+    }
+    \endverbatim
+    \throw std::exception on parse error.
+    \see Json::operator<<()
+   */
+   std::istream& operator>>( std::istream&, Value& );
+
+} // namespace Json
+
+#endif // CPPTL_JSON_READER_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/reader.h
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+
+// //////////////////////////////////////////////////////////////////////
+// Beginning of content of file: include/json/writer.h
+// //////////////////////////////////////////////////////////////////////
+
+// Copyright 2007-2010 Baptiste Lepilleur
+// Distributed under MIT license, or public domain if desired and
+// recognized in your jurisdiction.
+// See file LICENSE for detail or copy at http://jsoncpp.sourceforge.net/LICENSE
+
+#ifndef JSON_WRITER_H_INCLUDED
+# define JSON_WRITER_H_INCLUDED
+
+#if !defined(JSON_IS_AMALGAMATION)
+# include "value.h"
+#endif // if !defined(JSON_IS_AMALGAMATION)
+# include <vector>
+# include <string>
+# include <iostream>
+
+namespace Json {
+
+   class Value;
+
+   /** \brief Abstract class for writers.
+    */
+   class JSON_API Writer
+   {
+   public:
+      virtual ~Writer();
+
+      virtual std::string write( const Value &root ) = 0;
+   };
+
+   /** \brief Outputs a Value in <a HREF="http://www.json.org">JSON</a> format without formatting (not human friendly).
+    *
+    * The JSON document is written in a single line. It is not intended for 'human' consumption,
+    * but may be usefull to support feature such as RPC where bandwith is limited.
+    * \sa Reader, Value
+    */
+   class JSON_API FastWriter : public Writer
+   {
+   public:
+      FastWriter();
+      virtual ~FastWriter(){}
+
+      void enableYAMLCompatibility();
+
+   public: // overridden from Writer
+      virtual std::string write( const Value &root );
+
+   private:
+      void writeValue( const Value &value );
+
+      std::string document_;
+      bool yamlCompatiblityEnabled_;
+   };
+
+   /** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a human friendly way.
+    *
+    * The rules for line break and indent are as follow:
+    * - Object value:
+    *     - if empty then print {} without indent and line break
+    *     - if not empty the print '{', line break & indent, print one value per line
+    *       and then unindent and line break and print '}'.
+    * - Array value:
+    *     - if empty then print [] without indent and line break
+    *     - if the array contains no object value, empty array or some other value types,
+    *       and all the values fit on one lines, then print the array on a single line.
+    *     - otherwise, it the values do not fit on one line, or the array contains
+    *       object or non empty array, then print one value per line.
+    *
+    * If the Value have comments then they are outputed according to their #CommentPlacement.
+    *
+    * \sa Reader, Value, Value::setComment()
+    */
+   class JSON_API StyledWriter: public Writer
+   {
+   public:
+      StyledWriter();
+      virtual ~StyledWriter(){}
+
+   public: // overridden from Writer
+      /** \brief Serialize a Value in <a HREF="http://www.json.org">JSON</a> format.
+       * \param root Value to serialize.
+       * \return String containing the JSON document that represents the root value.
+       */
+      virtual std::string write( const Value &root );
+
+   private:
+      void writeValue( const Value &value );
+      void writeArrayValue( const Value &value );
+      bool isMultineArray( const Value &value );
+      void pushValue( const std::string &value );
+      void writeIndent();
+      void writeWithIndent( const std::string &value );
+      void indent();
+      void unindent();
+      void writeCommentBeforeValue( const Value &root );
+      void writeCommentAfterValueOnSameLine( const Value &root );
+      bool hasCommentForValue( const Value &value );
+      static std::string normalizeEOL( const std::string &text );
+
+      typedef std::vector<std::string> ChildValues;
+
+      ChildValues childValues_;
+      std::string document_;
+      std::string indentString_;
+      int rightMargin_;
+      int indentSize_;
+      bool addChildValues_;
+   };
+
+   /** \brief Writes a Value in <a HREF="http://www.json.org">JSON</a> format in a human friendly way,
+        to a stream rather than to a string.
+    *
+    * The rules for line break and indent are as follow:
+    * - Object value:
+    *     - if empty then print {} without indent and line break
+    *     - if not empty the print '{', line break & indent, print one value per line
+    *       and then unindent and line break and print '}'.
+    * - Array value:
+    *     - if empty then print [] without indent and line break
+    *     - if the array contains no object value, empty array or some other value types,
+    *       and all the values fit on one lines, then print the array on a single line.
+    *     - otherwise, it the values do not fit on one line, or the array contains
+    *       object or non empty array, then print one value per line.
+    *
+    * If the Value have comments then they are outputed according to their #CommentPlacement.
+    *
+    * param: indentation Each level will be indented by this amount extra.
+    * \sa Reader, Value, Value::setComment()
+    */
+   class JSON_API StyledStreamWriter
+   {
+   public:
+      StyledStreamWriter( std::string indentation="\t" );
+      ~StyledStreamWriter(){}
+
+   public:
+      /** \brief Serialize a Value in <a HREF="http://www.json.org">JSON</a> format.
+       * \param out Stream to write to. (Can be ostringstream, e.g.)
+       * \param root Value to serialize.
+       * \note There is no point in deriving from Writer, since write() should not return a value.
+       */
+      void write( std::ostream &out, const Value &root );
+
+   private:
+      void writeValue( const Value &value );
+      void writeArrayValue( const Value &value );
+      bool isMultineArray( const Value &value );
+      void pushValue( const std::string &value );
+      void writeIndent();
+      void writeWithIndent( const std::string &value );
+      void indent();
+      void unindent();
+      void writeCommentBeforeValue( const Value &root );
+      void writeCommentAfterValueOnSameLine( const Value &root );
+      bool hasCommentForValue( const Value &value );
+      static std::string normalizeEOL( const std::string &text );
+
+      typedef std::vector<std::string> ChildValues;
+
+      ChildValues childValues_;
+      std::ostream* document_;
+      std::string indentString_;
+      int rightMargin_;
+      std::string indentation_;
+      bool addChildValues_;
+   };
+
+# if defined(JSON_HAS_INT64)
+   std::string JSON_API valueToString( Int value );
+   std::string JSON_API valueToString( UInt value );
+# endif // if defined(JSON_HAS_INT64)
+   std::string JSON_API valueToString( LargestInt value );
+   std::string JSON_API valueToString( LargestUInt value );
+   std::string JSON_API valueToString( double value );
+   std::string JSON_API valueToString( bool value );
+   std::string JSON_API valueToQuotedString( const char *value );
+
+   /// \brief Output using the StyledStreamWriter.
+   /// \see Json::operator>>()
+   std::ostream& operator<<( std::ostream&, const Value &root );
+
+} // namespace Json
+
+
+
+#endif // JSON_WRITER_H_INCLUDED
+
+// //////////////////////////////////////////////////////////////////////
+// End of content of file: include/json/writer.h
+// //////////////////////////////////////////////////////////////////////
+
+
+
+
+
+#endif //ifndef JSON_AMALGATED_H_INCLUDED
diff --git a/ThirdParty/jsoncpp/jsoncpp.cpp b/ThirdParty/jsoncpp/vtkjsoncpp/jsoncpp.cpp
similarity index 100%
rename from ThirdParty/jsoncpp/jsoncpp.cpp
rename to ThirdParty/jsoncpp/vtkjsoncpp/jsoncpp.cpp
diff --git a/ThirdParty/libproj4/CMakeLists.txt b/ThirdParty/libproj4/CMakeLists.txt
index 1e059e8..913507d 100644
--- a/ThirdParty/libproj4/CMakeLists.txt
+++ b/ThirdParty/libproj4/CMakeLists.txt
@@ -1,4 +1,4 @@
 vtk_module_third_party(LIBPROJ4
   LIBRARIES vtkproj4
-  INCLUDE_DIRS ${CMAKE_CURRENT_BINARY_DIR}/vtklibproj4
-  )
+  INCLUDE_DIRS ${vtklibproj4_SOURCE_DIR}/vtklibproj4
+               ${vtklibproj4_BINARY_DIR}/vtklibproj4)
diff --git a/ThirdParty/libproj4/vtklibproj4/CMakeLists.txt b/ThirdParty/libproj4/vtklibproj4/CMakeLists.txt
index af246a9..daa38ba 100644
--- a/ThirdParty/libproj4/vtklibproj4/CMakeLists.txt
+++ b/ThirdParty/libproj4/vtklibproj4/CMakeLists.txt
@@ -19,7 +19,7 @@ if ( PROJ_LIST_EXTERNAL AND BUILD_SHARED_LIBS )
 endif ( PROJ_LIST_EXTERNAL AND BUILD_SHARED_LIBS )
 
 # Make libproj4 thread-safe?
-option( PROJ_USE_PTHREADS "Should libproj4 be built as a thread-friendly library?" OFF )
+option( PROJ_USE_PTHREADS "Should libproj4 be built as a thread-friendly library?" ${VTK_USE_PTHREADS} )
 mark_as_advanced( PROJ_USE_PTHREADS )
 if ( PROJ_USE_PTHREADS )
   if ( NOT VTK_USE_PTHREADS )
@@ -169,13 +169,19 @@ set( LP4_SRCS
 #------------------------------------------------------------------------------
 # Executable and library targets
 
+include_directories(
+  ${libproj4_SOURCE_DIR}
+  ${libproj4_BINARY_DIR}
+)
+
 vtk_add_library( vtkproj4
   ${LP4_SRCS}
 )
 
 if ( UNIX )
   target_link_libraries( vtkproj4
-    m
+    LINK_PUBLIC
+      m
   )
 endif ( UNIX )
 
@@ -194,11 +200,6 @@ target_link_libraries( lproj
   vtkproj4
 )
 
-include_directories(
-  ${libproj4_SOURCE_DIR}
-  ${libproj4_BINARY_DIR}
-)
-
 if ( GSL_FOUND )
   include_directories( GSL_INCLUDE_DIRS )
   target_link_libraries( proj4
@@ -206,11 +207,6 @@ if ( GSL_FOUND )
   )
 endif ( GSL_FOUND )
 
-add_custom_command(TARGET lproj
-  COMMAND ${CMAKE_COMMAND} -E copy
-  ${libproj4_SOURCE_DIR}/vtk_libproj4_mangle.h
-  ${libproj4_BINARY_DIR}/vtk_libproj4_mangle.h)
-
 configure_file(
   ${libproj4_SOURCE_DIR}/proj_config.h.cmake
   ${libproj4_BINARY_DIR}/proj_config.h
@@ -224,6 +220,6 @@ IF(NOT VTK_INSTALL_NO_DEVELOPMENT)
     ${libproj4_SOURCE_DIR}/lib_proj.h
     ${libproj4_SOURCE_DIR}/vtk_libproj4_mangle.h
     ${libproj4_BINARY_DIR}/proj_config.h
-    DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/vtklibproj4
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/vtklibproj4
     COMPONENT Development)
 ENDIF(NOT VTK_INSTALL_NO_DEVELOPMENT)
diff --git a/ThirdParty/libxml2/vtklibxml2/CMakeLists.txt b/ThirdParty/libxml2/vtklibxml2/CMakeLists.txt
index 5bd006b..2838cf2 100644
--- a/ThirdParty/libxml2/vtklibxml2/CMakeLists.txt
+++ b/ThirdParty/libxml2/vtklibxml2/CMakeLists.txt
@@ -300,6 +300,6 @@ TARGET_LINK_LIBRARIES(vtklibxml2 ${LIBXML2_LIBS})
 IF(NOT VTK_INSTALL_NO_DEVELOPMENT)
   INSTALL(FILES
     ${vtklibxml2_HEADER_FILES}
-    DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/vtklibxml2/libxml
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/vtklibxml2/libxml
     COMPONENT Development)
 ENDIF(NOT VTK_INSTALL_NO_DEVELOPMENT)
diff --git a/ThirdParty/libxml2/vtklibxml2/SAX2.c b/ThirdParty/libxml2/vtklibxml2/SAX2.c
index e31f544..0ada943 100644
--- a/ThirdParty/libxml2/vtklibxml2/SAX2.c
+++ b/ThirdParty/libxml2/vtklibxml2/SAX2.c
@@ -1302,7 +1302,7 @@ xmlSAX2AttributeInternal(void *ctx, const xmlChar *fullname,
 
                 /*
                  * Do the last stage of the attribute normalization
-                 * It need to be done twice ... it's an extra burden related
+                 * It needs to be done twice ... it's an extra burden related
                  * to the ability to keep xmlSAX2References in attributes
                  */
                 nvalnorm = xmlValidNormalizeAttributeValue(ctxt->myDoc,
@@ -2011,7 +2011,7 @@ xmlSAX2AttributeNs(xmlParserCtxtPtr ctxt,
                  * dup now contains a string of the flattened attribute
                  * content with entities substitued. Check if we need to
                  * apply an extra layer of normalization.
-                 * It need to be done twice ... it's an extra burden related
+                 * It needs to be done twice ... it's an extra burden related
                  * to the ability to keep references in attributes
                  */
                 if (ctxt->attsSpecial != NULL) {
diff --git a/ThirdParty/libxml2/vtklibxml2/xmllint.c b/ThirdParty/libxml2/vtklibxml2/xmllint.c
index f1d0eb1..945e91f 100644
--- a/ThirdParty/libxml2/vtklibxml2/xmllint.c
+++ b/ThirdParty/libxml2/vtklibxml2/xmllint.c
@@ -518,7 +518,7 @@ endTimer(char *format, ...)
 #endif
 /************************************************************************
  *                                                                      *
- *                      HTML ouput                                      *
+ *                      HTML output                                     *
  *                                                                      *
  ************************************************************************/
 static char buffer[50000];
diff --git a/ThirdParty/libxml2/vtklibxml2/xmlregexp.c b/ThirdParty/libxml2/vtklibxml2/xmlregexp.c
index 8f2b056..e6f71e9 100644
--- a/ThirdParty/libxml2/vtklibxml2/xmlregexp.c
+++ b/ThirdParty/libxml2/vtklibxml2/xmlregexp.c
@@ -1970,7 +1970,7 @@ xmlFACompareRanges(xmlRegRangePtr range1, xmlRegRangePtr range2) {
         } else {
             /*
              * comparing a block range with anything else is way
-             * too costly, and maintining the table is like too much
+             * too costly, and maintaining the table is like too much
              * memory too, so let's force the automata to save state
              * here.
              */
diff --git a/ThirdParty/libxml2/vtklibxml2/xmlschemas.c b/ThirdParty/libxml2/vtklibxml2/xmlschemas.c
index 4ac6b39..dd54e51 100644
--- a/ThirdParty/libxml2/vtklibxml2/xmlschemas.c
+++ b/ThirdParty/libxml2/vtklibxml2/xmlschemas.c
@@ -20931,7 +20931,7 @@ xmlSchemaFixupComponents(xmlSchemaParserCtxtPtr pctxt,
         goto exit_error;
     /* 
     * First compute the variety of simple types. This is needed as
-    * a seperate step, since otherwise we won't be able to detect
+    * a separate step, since otherwise we won't be able to detect
     * circular union types in all cases.
     */
     for (i = 0; i < nbItems; i++) {
diff --git a/ThirdParty/mrmpi/mpistubs/CMakeLists.txt b/ThirdParty/mrmpi/mpistubs/CMakeLists.txt
index b097865..08b5690 100644
--- a/ThirdParty/mrmpi/mpistubs/CMakeLists.txt
+++ b/ThirdParty/mrmpi/mpistubs/CMakeLists.txt
@@ -6,6 +6,6 @@ TARGET_LINK_LIBRARIES (mpistubs vtksys)
 IF(NOT VTK_INSTALL_NO_DEVELOPMENT)
   INSTALL(FILES
     ${CMAKE_CURRENT_SOURCE_DIR}/mpi.h
-    DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/mrmpi
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/mrmpi
     COMPONENT Development)
 ENDIF(NOT VTK_INSTALL_NO_DEVELOPMENT)
diff --git a/ThirdParty/mrmpi/src/CMakeLists.txt b/ThirdParty/mrmpi/src/CMakeLists.txt
index cbf0c39..7408f03 100644
--- a/ThirdParty/mrmpi/src/CMakeLists.txt
+++ b/ThirdParty/mrmpi/src/CMakeLists.txt
@@ -21,6 +21,6 @@ IF(NOT VTK_INSTALL_NO_DEVELOPMENT)
     ${CMAKE_CURRENT_SOURCE_DIR}/mapreduce.h
     ${CMAKE_CURRENT_SOURCE_DIR}/memory.h
     ${CMAKE_CURRENT_SOURCE_DIR}/mrmpi_config.h
-    DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/mrmpi
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/mrmpi
     COMPONENT Development)
 ENDIF(NOT VTK_INSTALL_NO_DEVELOPMENT)
diff --git a/ThirdParty/netcdf/CMakeLists.txt b/ThirdParty/netcdf/CMakeLists.txt
index 64402c9..98292ba 100644
--- a/ThirdParty/netcdf/CMakeLists.txt
+++ b/ThirdParty/netcdf/CMakeLists.txt
@@ -1,8 +1,18 @@
-set(vtknetcdf_THIRD_PARTY 1)
-set(vtknetcdf_LIBRARIES vtkNetCDF vtkNetCDF_cxx)
-set(vtknetcdf_INCLUDE_DIRS
-  ${vtknetcdf_SOURCE_DIR}/vtknetcdf/include
-  ${vtknetcdf_BINARY_DIR}/vtknetcdf
+vtk_module_third_party(NetCDF
+  LIBRARIES vtkNetCDF vtkNetCDF_cxx
+  INCLUDE_DIRS
+    ${CMAKE_CURRENT_SOURCE_DIR}/vtknetcdf/include
+    ${CMAKE_CURRENT_BINARY_DIR}/vtknetcdf
+  COMPONENTS C CXX
   )
-vtk_module_export_info()
-add_subdirectory(vtknetcdf)
+
+#Configure the top cpp header to switch between system and internal
+#netcdf just like vtk_module_third_party does for the c header.
+configure_file(vtk_netcdfcpp.h.in vtk_netcdfcpp.h)
+
+#Install the top cpp header too.
+if (NOT VTK_INSTALL_NO_DEVELOPMENT)
+  install(FILES ${CMAKE_CURRENT_BINARY_DIR}/vtk_netcdfcpp.h
+          DESTINATION ${VTK_INSTALL_INCLUDE_DIR}
+          COMPONENT Development)
+endif()
diff --git a/ThirdParty/netcdf/vtk_netcdf.h b/ThirdParty/netcdf/vtk_netcdf.h
deleted file mode 100644
index d18268c..0000000
--- a/ThirdParty/netcdf/vtk_netcdf.h
+++ /dev/null
@@ -1,21 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtk_netcdf.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#ifndef __vtk_netcdf_h
-#define __vtk_netcdf_h
-
-/* Use the netcdf library configured for VTK.  */
-#include <vtknetcdf/include/netcdf.h>
-
-#endif
diff --git a/ThirdParty/netcdf/vtk_netcdf.h.in b/ThirdParty/netcdf/vtk_netcdf.h.in
new file mode 100644
index 0000000..9046b03
--- /dev/null
+++ b/ThirdParty/netcdf/vtk_netcdf.h.in
@@ -0,0 +1,27 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtk_netcdf.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#ifndef __vtk_netcdf_h
+#define __vtk_netcdf_h
+
+/* Use the netcdf library configured for VTK.  */
+#cmakedefine VTK_USE_SYSTEM_NETCDF
+
+#ifdef VTK_USE_SYSTEM_NETCDF
+# include <netcdf.h>
+#else
+# include <vtknetcdf/include/netcdf.h>
+#endif
+
+#endif
diff --git a/ThirdParty/netcdf/vtk_netcdfcpp.h b/ThirdParty/netcdf/vtk_netcdfcpp.h
deleted file mode 100644
index a6edfba..0000000
--- a/ThirdParty/netcdf/vtk_netcdfcpp.h
+++ /dev/null
@@ -1,21 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtk_netcdf.h
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-#ifndef __vtk_netcdfcpp_h
-#define __vtk_netcdfcpp_h
-
-/* Use the netcdf library configured for VTK.  */
-#include <vtknetcdf/cxx/netcdfcpp.h>
-
-#endif
diff --git a/ThirdParty/netcdf/vtk_netcdfcpp.h.in b/ThirdParty/netcdf/vtk_netcdfcpp.h.in
new file mode 100644
index 0000000..2f8ae97
--- /dev/null
+++ b/ThirdParty/netcdf/vtk_netcdfcpp.h.in
@@ -0,0 +1,27 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtk_netcdf.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#ifndef __vtk_netcdfcpp_h
+#define __vtk_netcdfcpp_h
+
+/* Use the netcdfcpp library configured for VTK.  */
+#cmakedefine VTK_USE_SYSTEM_NETCDF
+
+#ifdef VTK_USE_SYSTEM_NETCDF
+# include <netcdfcpp.h>
+#else
+# include <vtknetcdf/cxx/netcdfcpp.h>
+#endif
+
+#endif
diff --git a/ThirdParty/netcdf/vtknetcdf/CMakeLists.txt b/ThirdParty/netcdf/vtknetcdf/CMakeLists.txt
index fad6f8f..8c57f71 100644
--- a/ThirdParty/netcdf/vtknetcdf/CMakeLists.txt
+++ b/ThirdParty/netcdf/vtknetcdf/CMakeLists.txt
@@ -344,7 +344,7 @@ if (UNIX)
 endif()
 
 VTK_ADD_LIBRARY(vtkNetCDF_cxx ${netcdf_cxx_SOURCES})
-TARGET_LINK_LIBRARIES(vtkNetCDF_cxx vtkNetCDF)
+TARGET_LINK_LIBRARIES(vtkNetCDF_cxx LINK_PUBLIC vtkNetCDF)
 
 #-----------------------------------------------------------------------------
 #package the result
@@ -358,6 +358,6 @@ IF(NOT VTK_INSTALL_NO_DEVELOPMENT)
     ${VTKNETCDF_SOURCE_DIR}/cxx/netcdfcpp.h
     ${VTKNETCDF_SOURCE_DIR}/cxx/ncvalues.h
     ${VTKNETCDF_BINARY_DIR}/vtk_netcdf_config.h
-    DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/vtknetcdf/include
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/vtknetcdf/include
     COMPONENT Development)
 ENDIF(NOT VTK_INSTALL_NO_DEVELOPMENT)
diff --git a/ThirdParty/netcdf/vtknetcdf/libsrc4/nc4file.c b/ThirdParty/netcdf/vtknetcdf/libsrc4/nc4file.c
index 8407868..003a446 100644
--- a/ThirdParty/netcdf/vtknetcdf/libsrc4/nc4file.c
+++ b/ThirdParty/netcdf/vtknetcdf/libsrc4/nc4file.c
@@ -2603,7 +2603,7 @@ sync_netcdf4_file(NC_HDF5_FILE_INFO_T *h5)
       /* Turn define mode off. */
       h5->flags ^= NC_INDEF;
       
-      /* Redef mode needs to be tracked seperately for nc_abort. */
+      /* Redef mode needs to be tracked separately for nc_abort. */
       h5->redef = 0;
    }
 
@@ -2909,7 +2909,7 @@ nc4_enddef_netcdf4_file(NC_HDF5_FILE_INFO_T *h5)
    /* Turn define mode off. */
    h5->flags ^= NC_INDEF;
 
-   /* Redef mode needs to be tracked seperately for nc_abort. */
+   /* Redef mode needs to be tracked separately for nc_abort. */
    h5->redef = 0;
 
    return sync_netcdf4_file(h5);
diff --git a/ThirdParty/oggtheora/vtkoggtheora/CMakeLists.txt b/ThirdParty/oggtheora/vtkoggtheora/CMakeLists.txt
index 54848de..3404618 100644
--- a/ThirdParty/oggtheora/vtkoggtheora/CMakeLists.txt
+++ b/ThirdParty/oggtheora/vtkoggtheora/CMakeLists.txt
@@ -254,13 +254,13 @@ VTK_ADD_LIBRARY(vtkoggtheora ${VTKOGGTHEORA_SRCS})
 IF(NOT VTK_INSTALL_NO_DEVELOPMENT)
   INSTALL(FILES
     ${VTKOGGTHEORA_SOURCE_DIR}/vtk_oggtheora_mangle.h
-    DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/vtkoggtheora/include
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/vtkoggtheora/include
     COMPONENT Development)
   INSTALL(FILES
     ${VTKOGGTHEORA_SOURCE_DIR}/libogg-${OGG_VERSION}/include/ogg/ogg.h
     ${VTKOGGTHEORA_SOURCE_DIR}/libogg-${OGG_VERSION}/include/ogg/os_types.h
     ${VTKOGGTHEORA_BINARY_DIR}/include/ogg/config_types.h
-    DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/vtkoggtheora/include/ogg
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/vtkoggtheora/include/ogg
     COMPONENT Development)
   INSTALL(FILES
     ${VTKOGGTHEORA_SOURCE_DIR}/libtheora-${THEORA_VERSION}/include/theora/codec.h
@@ -268,6 +268,6 @@ IF(NOT VTK_INSTALL_NO_DEVELOPMENT)
     ${VTKOGGTHEORA_SOURCE_DIR}/libtheora-${THEORA_VERSION}/include/theora/theoradec.h
     ${VTKOGGTHEORA_SOURCE_DIR}/libtheora-${THEORA_VERSION}/include/theora/theoraenc.h
     ${VTKOGGTHEORA_SOURCE_DIR}/libtheora-${THEORA_VERSION}/include/theora/theoraenc.h
-    DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/vtkoggtheora/include/theora
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/vtkoggtheora/include/theora
     COMPONENT Development)
 ENDIF(NOT VTK_INSTALL_NO_DEVELOPMENT)
diff --git a/ThirdParty/oggtheora/vtkoggtheora/libogg-1.1.4/include/ogg/ogg.h b/ThirdParty/oggtheora/vtkoggtheora/libogg-1.1.4/include/ogg/ogg.h
index e97b65a..f3128d7 100644
--- a/ThirdParty/oggtheora/vtkoggtheora/libogg-1.1.4/include/ogg/ogg.h
+++ b/ThirdParty/oggtheora/vtkoggtheora/libogg-1.1.4/include/ogg/ogg.h
@@ -79,7 +79,7 @@ typedef struct {
   ogg_int64_t  packetno;  /* sequence number for decode; the framing
                              knows where there's a hole in the data,
                              but we need coupling so that the codec
-                             (which is in a seperate abstraction
+                             (which is in a separate abstraction
                              layer) also knows about the gap */
   ogg_int64_t   granulepos;
 
@@ -99,7 +99,7 @@ typedef struct {
   ogg_int64_t  packetno;     /* sequence number for decode; the framing
                                 knows where there's a hole in the data,
                                 but we need coupling so that the codec
-                                (which is in a seperate abstraction
+                                (which is in a separate abstraction
                                 layer) also knows about the gap */
 } ogg_packet;
 
diff --git a/ThirdParty/png/vtkpng/CMakeLists.txt b/ThirdParty/png/vtkpng/CMakeLists.txt
index d416b2c..e1b0ffb 100644
--- a/ThirdParty/png/vtkpng/CMakeLists.txt
+++ b/ThirdParty/png/vtkpng/CMakeLists.txt
@@ -38,6 +38,6 @@ IF(NOT VTK_INSTALL_NO_DEVELOPMENT)
     ${VTKPNG_SOURCE_DIR}/png.h
     ${VTKPNG_SOURCE_DIR}/pngconf.h
     ${VTKPNG_SOURCE_DIR}/vtk_png_mangle.h
-    DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/vtkpng
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/vtkpng
     COMPONENT Development)
 ENDIF(NOT VTK_INSTALL_NO_DEVELOPMENT)
diff --git a/ThirdParty/png/vtkpng/png.h b/ThirdParty/png/vtkpng/png.h
index f5f23c3..c9166ca 100644
--- a/ThirdParty/png/vtkpng/png.h
+++ b/ThirdParty/png/vtkpng/png.h
@@ -2214,19 +2214,19 @@ extern PNG_EXPORT(void, png_write_png) PNGARG((png_structp png_ptr,
 #define png_debug(l,m) \
 { \
      int num_tabs=l; \
-     fprintf(PNG_DEBUG_FILE,"%s"m,(num_tabs==1 ? "\t" : \
+     fprintf(PNG_DEBUG_FILE,"%s" m,(num_tabs==1 ? "\t" : \
        (num_tabs==2 ? "\t\t":(num_tabs>2 ? "\t\t\t":"")))); \
 }
 #define png_debug1(l,m,p1) \
 { \
      int num_tabs=l; \
-     fprintf(PNG_DEBUG_FILE,"%s"m,(num_tabs==1 ? "\t" : \
+     fprintf(PNG_DEBUG_FILE,"%s" m,(num_tabs==1 ? "\t" : \
        (num_tabs==2 ? "\t\t":(num_tabs>2 ? "\t\t\t":""))),p1); \
 }
 #define png_debug2(l,m,p1,p2) \
 { \
      int num_tabs=l; \
-     fprintf(PNG_DEBUG_FILE,"%s"m,(num_tabs==1 ? "\t" : \
+     fprintf(PNG_DEBUG_FILE,"%s" m,(num_tabs==1 ? "\t" : \
        (num_tabs==2 ? "\t\t":(num_tabs>2 ? "\t\t\t":""))),p1,p2); \
 }
 #endif /* (PNG_DEBUG > 1) */
diff --git a/ThirdParty/png/vtkpng/pngwio.c b/ThirdParty/png/vtkpng/pngwio.c
index 3ef5fd7..ebe8c78 100644
--- a/ThirdParty/png/vtkpng/pngwio.c
+++ b/ThirdParty/png/vtkpng/pngwio.c
@@ -156,7 +156,7 @@ png_default_flush(png_structp png_ptr)
                    arguments a pointer to a png_struct.  After a call to
                    the flush function, there should be no data in any buffers
                    or pending transmission.  If the output method doesn't do
-                   any buffering of ouput, a function prototype must still be
+                   any buffering of output, a function prototype must still be
                    supplied although it doesn't have to do anything.  If
                    PNG_WRITE_FLUSH_SUPPORTED is not defined at libpng compile
                    time, output_flush_fn will be ignored, although it must be
diff --git a/ThirdParty/sqlite/vtksqlite/CMakeLists.txt b/ThirdParty/sqlite/vtksqlite/CMakeLists.txt
index 3c82a6a..f39339e 100644
--- a/ThirdParty/sqlite/vtksqlite/CMakeLists.txt
+++ b/ThirdParty/sqlite/vtksqlite/CMakeLists.txt
@@ -47,6 +47,6 @@ ENDIF (WIN32)
 IF(NOT VTK_INSTALL_NO_DEVELOPMENT)
   INSTALL(FILES
     ${CMAKE_CURRENT_SOURCE_DIR}/vtk_sqlite3.h
-    DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/vtksqlite
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/vtksqlite
     COMPONENT Development)
 ENDIF(NOT VTK_INSTALL_NO_DEVELOPMENT)
diff --git a/ThirdParty/sqlite/vtksqlite/vtk_sqlite3.c b/ThirdParty/sqlite/vtksqlite/vtk_sqlite3.c
index 7e6fcac..b4bfb25 100644
--- a/ThirdParty/sqlite/vtksqlite/vtk_sqlite3.c
+++ b/ThirdParty/sqlite/vtksqlite/vtk_sqlite3.c
@@ -34996,7 +34996,7 @@ static void pagerUnlockIfUnused(Pager *pPager){
 ** page is initialized to all zeros. 
 **
 ** If noContent is true, it means that we do not care about the contents
-** of the page. This occurs in two seperate scenarios:
+** of the page. This occurs in two separate scenarios:
 **
 **   a) When reading a free-list leaf page from the database, and
 **
@@ -49571,7 +49571,7 @@ VTK_SQLITE_PRIVATE int vtk_sqlite3VdbeCursorMoveto(VdbeCursor *p){
 ** the blob of data that it corresponds to. In a table record, all serial
 ** types are stored at the start of the record, and the blobs of data at
 ** the end. Hence these functions allow the caller to handle the
-** serial-type and data blob seperately.
+** serial-type and data blob separately.
 **
 ** The following table describes the various storage classes for data:
 **
@@ -68330,7 +68330,7 @@ VTK_SQLITE_PRIVATE void vtk_sqlite3DropTable(Parse *pParse, SrcList *pName, int
     /* Drop all VTK_SQLITE_MASTER table and index entries that refer to the
     ** table. The program name loops through the master table and deletes
     ** every row that refers to a table of the same name as the one being
-    ** dropped. Triggers are handled seperately because a trigger can be
+    ** dropped. Triggers are handled separately because a trigger can be
     ** created in the temp database that refers to a table in another
     ** database.
     */
diff --git a/ThirdParty/tiff/vtktiff/CMakeLists.txt b/ThirdParty/tiff/vtktiff/CMakeLists.txt
index 4d4fe5c..53c7b89 100644
--- a/ThirdParty/tiff/vtktiff/CMakeLists.txt
+++ b/ThirdParty/tiff/vtktiff/CMakeLists.txt
@@ -91,7 +91,7 @@ CHECK_FUNCTION_EXISTS(strchr HAVE_STRCHR)
 CHECK_FUNCTION_EXISTS(strrchr HAVE_STRRCHR)
 CHECK_FUNCTION_EXISTS(strstr HAVE_STRSTR)
 CHECK_FUNCTION_EXISTS(strtol HAVE_STRTOL)
-CHECK_FUNCTION_EXISTS(areroul HAVE_STRTOUL)
+CHECK_FUNCTION_EXISTS(strtoul HAVE_STRTOUL)
 
 
 IF("${CMAKE_MAJOR_VERSION}.${CMAKE_MINOR_VERSION}" LESS 2.0)
@@ -124,6 +124,6 @@ IF(NOT VTK_INSTALL_NO_DEVELOPMENT)
     ${VTKTIFF_SOURCE_DIR}/tiffio.h
     ${VTKTIFF_SOURCE_DIR}/tconf.h
     ${VTKTIFF_SOURCE_DIR}/tiffconf.h
-    DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/vtktiff
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/vtktiff
     COMPONENT Development)
 ENDIF(NOT VTK_INSTALL_NO_DEVELOPMENT)
diff --git a/ThirdParty/tiff/vtktiff/tif_config.h.in b/ThirdParty/tiff/vtktiff/tif_config.h.in
index 55ff16a..cc97062 100644
--- a/ThirdParty/tiff/vtktiff/tif_config.h.in
+++ b/ThirdParty/tiff/vtktiff/tif_config.h.in
@@ -238,11 +238,12 @@ the sizes can be different.*/
 /* Define to empty if `const' does not conform to ANSI C. */
 #cmakedefine const
 
-/* Define to `__inline__' or `__inline' if that's what the C compiler
-   calls it, or to nothing if 'inline' is not supported under any name.  */
+/* MSVC does not support C99 inline, so just make the inline keyword
+   disappear for C.  */
 #ifndef __cplusplus
-#define inline
-//#cmakedefine inline
+#  ifdef _MSC_VER
+#    define inline
+#  endif
 #endif
 
 /* Define to `long' if <sys/types.h> does not define. */
diff --git a/ThirdParty/utf8/doc/ReleaseNotes b/ThirdParty/utf8/doc/ReleaseNotes
index 3e9db73..03bf18f 100644
--- a/ThirdParty/utf8/doc/ReleaseNotes
+++ b/ThirdParty/utf8/doc/ReleaseNotes
@@ -12,6 +12,9 @@ class.  We've made several VTK-specific modifications to support older compilers
 * Altered utf16to8() to work with vtkstd::back_insert_iterator, which also reduces the
   generality of the library.
 
+* Suppress clang warnings about dynamic exception specifiers on a few methods. The methods need
+  to keep the "throw()" on their declaration to be compatible with their superclass in C++98.
+
 For these reasons, we strongly discourage use of the VTK-specific version of UTF8-CPP, which is
 a private part the VTK API, and subject to removal or revision at any time.  See the following
 links for the full version of the library:
diff --git a/ThirdParty/utf8/source/utf8/checked.h b/ThirdParty/utf8/source/utf8/checked.h
index a7b976a..09b115c 100644
--- a/ThirdParty/utf8/source/utf8/checked.h
+++ b/ThirdParty/utf8/source/utf8/checked.h
@@ -31,6 +31,17 @@ DEALINGS IN THE SOFTWARE.
 #include "core.h"
 #include <stdexcept>
 
+// In C++11, clang will warn about using dynamic exception specifications
+// as they are deprecated.  But as this class is subclassing std::exception
+// we must keep the 'throw()' to be C++98 compatible.
+// So we suppress the warning.
+#if defined(__clang__) && defined(__has_warning)
+# if __has_warning("-Wdeprecated")
+#  pragma clang diagnostic push
+#  pragma clang diagnostic ignored "-Wdeprecated"
+# endif
+#endif
+
 namespace vtk_utf8
 {
     // Exceptions that may be thrown from the library functions.
@@ -313,6 +324,13 @@ namespace vtk_utf8
 
 } // namespace vtk_utf8
 
+// Undo warning suppression.
+#if defined(__clang__) && defined(__has_warning)
+# if __has_warning("-Wdeprecated")
+#  pragma clang diagnostic pop
+# endif
+#endif
+
 #endif //header guard
 
 
diff --git a/ThirdParty/xdmf2/vtkxdmf2/libsrc/XdmfValuesBinary.cxx b/ThirdParty/xdmf2/vtkxdmf2/libsrc/XdmfValuesBinary.cxx
index 74638eb..89a1038 100644
--- a/ThirdParty/xdmf2/vtkxdmf2/libsrc/XdmfValuesBinary.cxx
+++ b/ThirdParty/xdmf2/vtkxdmf2/libsrc/XdmfValuesBinary.cxx
@@ -282,7 +282,7 @@ XdmfValuesBinary::Read(XdmfArray *anArray){
     }
     FullFileName << DataSetName << ends;
     char * path = FullFileName.rdbuf()->str();
-    XdmfDebug("Opening Binary Data for Reading : " << FullFileName);
+    XdmfDebug("Opening Binary Data for Reading : " << FullFileName.str());
 
 
     //char * path = new char [ strlen(this->DOM->GetWorkingDirectory())+strlen(DataSetName) + 1 ];
diff --git a/ThirdParty/zlib/vtkzlib/CMakeLists.txt b/ThirdParty/zlib/vtkzlib/CMakeLists.txt
index 129fe40..a64a736 100644
--- a/ThirdParty/zlib/vtkzlib/CMakeLists.txt
+++ b/ThirdParty/zlib/vtkzlib/CMakeLists.txt
@@ -35,6 +35,6 @@ IF(NOT VTK_INSTALL_NO_DEVELOPMENT)
     ${VTKZLIB_SOURCE_DIR}/zconf.h
     ${VTKZLIB_SOURCE_DIR}/vtk_zlib_mangle.h
     ${VTKZLIB_BINARY_DIR}/zlibDllConfig.h
-    DESTINATION ${VTK_INSTALL_INCLUDE_DIR_CM24}/vtkzlib
+    DESTINATION ${VTK_INSTALL_INCLUDE_DIR}/vtkzlib
     COMPONENT Development)
 ENDIF(NOT VTK_INSTALL_NO_DEVELOPMENT)
diff --git a/Utilities/DICOMParser/DICOMAppHelper.cxx b/Utilities/DICOMParser/DICOMAppHelper.cxx
index febc413..021c9b1 100644
--- a/Utilities/DICOMParser/DICOMAppHelper.cxx
+++ b/Utilities/DICOMParser/DICOMAppHelper.cxx
@@ -205,6 +205,7 @@ void DICOMAppHelper::RegisterCallbacks(DICOMParser* parser)
   if (!parser)
     {
     dicom_stream::cerr << "Null parser!" << dicom_stream::endl;
+    return;
     }
 
   SeriesUIDCB->SetCallbackFunction(this, &DICOMAppHelper::SeriesUIDCallback);
diff --git a/Utilities/DICOMParser/DICOMCallback.h b/Utilities/DICOMParser/DICOMCallback.h
index fed0b8a..4f1a8a7 100644
--- a/Utilities/DICOMParser/DICOMCallback.h
+++ b/Utilities/DICOMParser/DICOMCallback.h
@@ -45,7 +45,7 @@
 class DICOM_EXPORT DICOMCallback
 {
  public:
-  virtual ~DICOMCallback() {};
+  virtual ~DICOMCallback() {}
   virtual void Execute(DICOMParser *parser,
                        doublebyte group,
                        doublebyte element,
diff --git a/Utilities/DICOMParser/DICOMParser.cxx b/Utilities/DICOMParser/DICOMParser.cxx
index 8c6f33e..3c0060f 100644
--- a/Utilities/DICOMParser/DICOMParser.cxx
+++ b/Utilities/DICOMParser/DICOMParser.cxx
@@ -34,7 +34,7 @@
 #include <math.h>
 #endif
 #include <time.h>
-#include <assert.h>
+#include <cassert>
 #if !defined(__MWERKS__)
 #include <sys/types.h>
 #endif
diff --git a/Utilities/DICOMParser/DICOMParserMap.h b/Utilities/DICOMParser/DICOMParserMap.h
index af65c33..aafec83 100644
--- a/Utilities/DICOMParser/DICOMParserMap.h
+++ b/Utilities/DICOMParser/DICOMParserMap.h
@@ -102,7 +102,7 @@ typedef dicom_stl::pair<doublebyte, dicom_stl::vector<DICOMCallback*>*> DICOMMap
 class  DICOMMapValue : public DICOMMapValueOverride
 {
  public:
-   DICOMMapValue() : dicom_stl::pair<doublebyte, dicom_stl::vector<DICOMCallback*>*>() {};
+   DICOMMapValue() : dicom_stl::pair<doublebyte, dicom_stl::vector<DICOMCallback*>*>() {}
 
   DICOMMapValue(doublebyte v1, dicom_stl::vector<DICOMCallback*> * v2) :
     dicom_stl::pair<doublebyte, dicom_stl::vector<DICOMCallback*>*>(v1, v2)
diff --git a/Utilities/Doxygen/CMakeLists.txt b/Utilities/Doxygen/CMakeLists.txt
index 820f87e..cc5c13e 100644
--- a/Utilities/Doxygen/CMakeLists.txt
+++ b/Utilities/Doxygen/CMakeLists.txt
@@ -32,9 +32,16 @@ file(GLOB src RELATIVE "${VTK_SOURCE_DIR}" "${VTK_SOURCE_DIR}/*/*/module.cmake")
 foreach(module ${src})
   get_filename_component(module_BASE ${module} PATH)
   if("${module_BASE}" MATCHES "^Utilities" OR ${module_BASE} MATCHES "^Third")
+    # Skip the utilities and third parties directories.
   else()
+    # Use both the source and binary directories, this ensures that
+    # generated files will be included, but they may then be architecture,
+    # build configuration and/or compiler specific. All source directories
+    # are included whether the module is enabled or not.
     set(VTK_MODULE_DIRS_DOXYGEN
-      "${VTK_MODULE_DIRS_DOXYGEN}  \"${VTK_SOURCE_DIR}/${module_BASE}\"\n")
+      "${VTK_MODULE_DIRS_DOXYGEN}
+  \"${VTK_SOURCE_DIR}/${module_BASE}\"
+  \"${VTK_BINARY_DIR}/${module_BASE}\"")
   endif()
 endforeach()
 
diff --git a/Utilities/Doxygen/doc_header2doxygen.pl b/Utilities/Doxygen/doc_header2doxygen.pl
index f8c21a0..ee8dc04 100755
--- a/Utilities/Doxygen/doc_header2doxygen.pl
+++ b/Utilities/Doxygen/doc_header2doxygen.pl
@@ -509,6 +509,7 @@ foreach my $source (@files) {
 
             my $indent = $1;
             $Text::Wrap::columns = 76;
+            $Text::Wrap::huge = 'overflow';
 
             # While there are still lines beginning with '//' append them to
             # the function's description and trim spaces.
diff --git a/Utilities/EncodeString/vtkEncodeString.cxx b/Utilities/EncodeString/vtkEncodeString.cxx
index 89386b4..dae1a0e 100644
--- a/Utilities/EncodeString/vtkEncodeString.cxx
+++ b/Utilities/EncodeString/vtkEncodeString.cxx
@@ -28,7 +28,7 @@
 /**
  * Return file name of a full filename (i.e. file name without path).
  */
-std::string GetFilenameName(const std::string& filename)
+static std::string GetFilenameName(const std::string& filename)
 {
 #if defined(_WIN32)
   std::string::size_type slash_pos = filename.find_last_of("/\\");
@@ -49,7 +49,7 @@ std::string GetFilenameName(const std::string& filename)
  * Return file name without extension of a full filename (i.e. without path).
  * Warning: it considers the longest extension (for example: .tar.gz)
  */
-std::string GetFilenameWithoutExtension(const std::string& filename)
+static std::string GetFilenameWithoutExtension(const std::string& filename)
 {
   std::string name = GetFilenameName(filename);
   std::string::size_type dot_pos = name.find(".");
@@ -69,7 +69,7 @@ std::string GetFilenameWithoutExtension(const std::string& filename)
  * Warning: it considers the last extension (for example: removes .gz
  * from .tar.gz)
  */
-std::string GetFilenameWithoutLastExtension(const std::string& filename)
+static std::string GetFilenameWithoutLastExtension(const std::string& filename)
 {
   std::string name = GetFilenameName(filename);
   std::string::size_type dot_pos = name.rfind(".");
diff --git a/Utilities/KWSys/vtksys/CMakeLists.txt b/Utilities/KWSys/vtksys/CMakeLists.txt
index 124b8ac..c2b377d 100644
--- a/Utilities/KWSys/vtksys/CMakeLists.txt
+++ b/Utilities/KWSys/vtksys/CMakeLists.txt
@@ -112,6 +112,7 @@ IF(KWSYS_STANDALONE OR CMake_SOURCE_DIR)
   SET(KWSYS_USE_Base64 1)
   SET(KWSYS_USE_Directory 1)
   SET(KWSYS_USE_DynamicLoader 1)
+  SET(KWSYS_USE_Encoding 1)
   SET(KWSYS_USE_Glob 1)
   SET(KWSYS_USE_MD5 1)
   SET(KWSYS_USE_Process 1)
@@ -122,6 +123,7 @@ IF(KWSYS_STANDALONE OR CMake_SOURCE_DIR)
   SET(KWSYS_USE_FundamentalType 1)
   SET(KWSYS_USE_Terminal 1)
   SET(KWSYS_USE_IOStream 1)
+  SET(KWSYS_USE_FStream 1)
   SET(KWSYS_USE_String 1)
   SET(KWSYS_USE_SystemInformation 1)
   SET(KWSYS_USE_CPU 1)
@@ -130,18 +132,32 @@ ENDIF(KWSYS_STANDALONE OR CMake_SOURCE_DIR)
 # Enforce component dependencies.
 IF(KWSYS_USE_SystemTools)
   SET(KWSYS_USE_Directory 1)
+  SET(KWSYS_USE_FStream 1)
+  SET(KWSYS_USE_Encoding 1)
 ENDIF(KWSYS_USE_SystemTools)
 IF(KWSYS_USE_Glob)
   SET(KWSYS_USE_Directory 1)
   SET(KWSYS_USE_SystemTools 1)
   SET(KWSYS_USE_RegularExpression 1)
+  SET(KWSYS_USE_FStream 1)
+  SET(KWSYS_USE_Encoding 1)
 ENDIF(KWSYS_USE_Glob)
 IF(KWSYS_USE_Process)
   SET(KWSYS_USE_System 1)
+  SET(KWSYS_USE_Encoding 1)
 ENDIF(KWSYS_USE_Process)
 IF(KWSYS_USE_SystemInformation)
   SET(KWSYS_USE_Process 1)
 ENDIF(KWSYS_USE_SystemInformation)
+IF(KWSYS_USE_System)
+  SET(KWSYS_USE_Encoding 1)
+ENDIF(KWSYS_USE_System)
+IF(KWSYS_USE_Directory)
+  SET(KWSYS_USE_Encoding 1)
+ENDIF(KWSYS_USE_Directory)
+IF(KWSYS_USE_FStream)
+  SET(KWSYS_USE_Encoding 1)
+ENDIF(KWSYS_USE_FStream)
 
 # Setup the large file support default.
 IF(KWSYS_LFS_DISABLE)
@@ -150,6 +166,11 @@ ELSE(KWSYS_LFS_DISABLE)
   SET(KWSYS_LFS_REQUESTED 1)
 ENDIF(KWSYS_LFS_DISABLE)
 
+# Specify default 8 bit encoding for Windows
+IF(NOT KWSYS_ENCODING_DEFAULT_CODEPAGE)
+  SET(KWSYS_ENCODING_DEFAULT_CODEPAGE CP_ACP)
+ENDIF(NOT KWSYS_ENCODING_DEFAULT_CODEPAGE)
+
 # Enable testing if building standalone.
 IF(KWSYS_STANDALONE)
   INCLUDE(Dart)
@@ -506,6 +527,12 @@ IF(KWSYS_USE_FundamentalType)
     "Checking whether char is signed" DIRECT)
 ENDIF(KWSYS_USE_FundamentalType)
 
+IF(KWSYS_USE_Encoding)
+  # Look for type size helper macros.
+  KWSYS_PLATFORM_CXX_TEST(KWSYS_STL_HAS_WSTRING
+    "Checking whether wstring is available" DIRECT)
+ENDIF(KWSYS_USE_Encoding)
+
 IF(KWSYS_USE_IOStream)
   # Determine whether iostreams support long long.
   SET(KWSYS_PLATFORM_CXX_TEST_DEFINES
@@ -569,8 +596,17 @@ IF(KWSYS_USE_SystemTools)
     "Checking whether CXX compiler has unsetenv" DIRECT)
   KWSYS_PLATFORM_CXX_TEST(KWSYS_CXX_HAS_ENVIRON_IN_STDLIB_H
     "Checking whether CXX compiler has environ in stdlib.h" DIRECT)
-  SET_SOURCE_FILES_PROPERTIES(SystemTools.cxx PROPERTIES
-    COMPILE_FLAGS "-DKWSYS_CXX_HAS_SETENV=${KWSYS_CXX_HAS_SETENV} -DKWSYS_CXX_HAS_UNSETENV=${KWSYS_CXX_HAS_UNSETENV} -DKWSYS_CXX_HAS_ENVIRON_IN_STDLIB_H=${KWSYS_CXX_HAS_ENVIRON_IN_STDLIB_H}")
+  KWSYS_PLATFORM_CXX_TEST(KWSYS_CXX_HAS_UTIMES
+    "Checking whether CXX compiler has utimes" DIRECT)
+  KWSYS_PLATFORM_CXX_TEST(KWSYS_CXX_HAS_UTIMENSAT
+    "Checking whether CXX compiler has utimensat" DIRECT)
+  SET_PROPERTY(SOURCE SystemTools.cxx APPEND PROPERTY COMPILE_DEFINITIONS
+    KWSYS_CXX_HAS_SETENV=${KWSYS_CXX_HAS_SETENV}
+    KWSYS_CXX_HAS_UNSETENV=${KWSYS_CXX_HAS_UNSETENV}
+    KWSYS_CXX_HAS_ENVIRON_IN_STDLIB_H=${KWSYS_CXX_HAS_ENVIRON_IN_STDLIB_H}
+    KWSYS_CXX_HAS_UTIMES=${KWSYS_CXX_HAS_UTIMES}
+    KWSYS_CXX_HAS_UTIMENSAT=${KWSYS_CXX_HAS_UTIMENSAT}
+    )
 ENDIF()
 
 IF(KWSYS_USE_SystemInformation)
@@ -640,6 +676,68 @@ IF(KWSYS_USE_SystemInformation)
     SET_PROPERTY(SOURCE SystemInformation.cxx APPEND PROPERTY
       COMPILE_DEFINITIONS KWSYS_CXX_HAS__ATOI64=1)
   ENDIF()
+  IF(UNIX)
+    INCLUDE(CheckIncludeFileCXX)
+    # check for simple stack trace
+    # usually it's in libc but on FreeBSD
+    # it's in libexecinfo
+    FIND_LIBRARY(EXECINFO_LIB "execinfo")
+    IF (NOT EXECINFO_LIB)
+      SET(EXECINFO_LIB "")
+    ENDIF()
+    CHECK_INCLUDE_FILE_CXX("execinfo.h" KWSYS_CXX_HAS_EXECINFOH)
+    IF (KWSYS_CXX_HAS_EXECINFOH)
+      # we have the backtrace header check if it
+      # can be used  with this compiler
+      SET(KWSYS_PLATFORM_CXX_TEST_LINK_LIBRARIES ${EXECINFO_LIB})
+      KWSYS_PLATFORM_CXX_TEST(KWSYS_CXX_HAS_BACKTRACE
+         "Checking whether backtrace works with this C++ compiler" DIRECT)
+      SET(KWSYS_PLATFORM_CXX_TEST_LINK_LIBRARIES)
+      IF (KWSYS_CXX_HAS_BACKTRACE)
+        # backtrace is supported by this system and compiler.
+        # now check for the more advanced capabilities.
+        SET_PROPERTY(SOURCE SystemInformation.cxx APPEND PROPERTY
+          COMPILE_DEFINITIONS KWSYS_SYSTEMINFORMATION_HAS_BACKTRACE=1)
+        # check for symbol lookup using dladdr
+        CHECK_INCLUDE_FILE_CXX("dlfcn.h" KWSYS_CXX_HAS_DLFCNH)
+        IF (KWSYS_CXX_HAS_DLFCNH)
+          # we have symbol lookup libraries and headers
+          # check if they can be used with this compiler
+          SET(KWSYS_PLATFORM_CXX_TEST_LINK_LIBRARIES ${CMAKE_DL_LIBS})
+            KWSYS_PLATFORM_CXX_TEST(KWSYS_CXX_HAS_DLADDR
+            "Checking whether dladdr works with this C++ compiler" DIRECT)
+          SET(KWSYS_PLATFORM_CXX_TEST_LINK_LIBRARIES)
+          IF (KWSYS_CXX_HAS_DLADDR)
+            # symbol lookup is supported by this system
+            # and compiler.
+            SET_PROPERTY(SOURCE SystemInformation.cxx APPEND PROPERTY
+              COMPILE_DEFINITIONS KWSYS_SYSTEMINFORMATION_HAS_SYMBOL_LOOKUP=1)
+          ENDIF()
+        ENDIF()
+        # c++ demangling support
+        # check for cxxabi headers
+        CHECK_INCLUDE_FILE_CXX("cxxabi.h" KWSYS_CXX_HAS_CXXABIH)
+        IF (KWSYS_CXX_HAS_CXXABIH)
+          # check if cxxabi can be used with this
+          # system and compiler.
+          KWSYS_PLATFORM_CXX_TEST(KWSYS_CXX_HAS_CXXABI
+            "Checking whether cxxabi works with this C++ compiler" DIRECT)
+          IF (KWSYS_CXX_HAS_CXXABI)
+            # c++ demangle using cxxabi is supported with
+            # this system and compiler
+            SET_PROPERTY(SOURCE SystemInformation.cxx APPEND PROPERTY
+              COMPILE_DEFINITIONS KWSYS_SYSTEMINFORMATION_HAS_CPP_DEMANGLE=1)
+          ENDIF()
+        ENDIF()
+        # basic backtrace works better with release build
+        # don't bother with advanced features for release
+        SET_PROPERTY(SOURCE SystemInformation.cxx APPEND PROPERTY
+          COMPILE_DEFINITIONS_DEBUG KWSYS_SYSTEMINFORMATION_HAS_DEBUG_BUILD=1)
+        SET_PROPERTY(SOURCE SystemInformation.cxx APPEND PROPERTY
+          COMPILE_DEFINITIONS_RELWITHDEBINFO KWSYS_SYSTEMINFORMATION_HAS_DEBUG_BUILD=1)
+      ENDIF()
+    ENDIF()
+  ENDIF()
   IF(BORLAND)
     KWSYS_PLATFORM_CXX_TEST(KWSYS_CXX_HAS_BORLAND_ASM
       "Checking whether Borland CXX compiler supports assembler instructions" DIRECT)
@@ -790,8 +888,8 @@ SET(KWSYS_HXX_FILES Configure String
 
 # Add selected C++ classes.
 SET(cppclasses
-  Directory DynamicLoader Glob RegularExpression SystemTools
-  CommandLineArguments IOStream SystemInformation
+  Directory DynamicLoader Encoding Glob RegularExpression SystemTools
+  CommandLineArguments IOStream FStream SystemInformation
   )
 FOREACH(cpp ${cppclasses})
   IF(KWSYS_USE_${cpp})
@@ -807,7 +905,7 @@ ENDFOREACH(cpp)
 
 # Add selected C components.
 FOREACH(c
-    Process Base64 FundamentalType MD5 Terminal System String CPU
+    Process Base64 Encoding FundamentalType MD5 Terminal System String CPU
     )
   IF(KWSYS_USE_${c})
     # Use the corresponding header file.
@@ -838,16 +936,24 @@ IF(KWSYS_USE_Process)
 ENDIF(KWSYS_USE_Process)
 
 # Add selected C sources.
-FOREACH(c Base64 MD5 Terminal System String)
+FOREACH(c Base64 Encoding MD5 Terminal System String)
   IF(KWSYS_USE_${c})
-    SET(KWSYS_C_SRCS ${KWSYS_C_SRCS} ${c}.c)
+    IF(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/${c}C.c)
+      LIST(APPEND KWSYS_C_SRCS ${c}C.c)
+    ELSE()
+      LIST(APPEND KWSYS_C_SRCS ${c}.c)
+    ENDIF()
   ENDIF(KWSYS_USE_${c})
 ENDFOREACH(c)
 
 # Configure headers of C++ classes and construct the list of sources.
 FOREACH(c ${KWSYS_CLASSES})
   # Add this source to the list of source files for the library.
-  SET(KWSYS_CXX_SRCS ${KWSYS_CXX_SRCS} ${c}.cxx)
+  IF(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/${c}CXX.cxx)
+    LIST(APPEND KWSYS_CXX_SRCS ${c}CXX.cxx)
+  ELSEIF(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/${c}.cxx)
+    LIST(APPEND KWSYS_CXX_SRCS ${c}.cxx)
+  ENDIF()
 
   # Configure the header for this class.
   CONFIGURE_FILE(${PROJECT_SOURCE_DIR}/${c}.hxx.in ${KWSYS_HEADER_DIR}/${c}.hxx
@@ -904,12 +1010,23 @@ IF(KWSYS_C_SRCS OR KWSYS_CXX_SRCS)
     ENDIF(UNIX)
   ENDIF(KWSYS_USE_DynamicLoader)
 
-  IF(KWSYS_USE_SystemInformation AND WIN32)
-    TARGET_LINK_LIBRARIES(${KWSYS_NAMESPACE} ws2_32)
-    IF(KWSYS_SYS_HAS_PSAPI)
-      TARGET_LINK_LIBRARIES(${KWSYS_NAMESPACE} Psapi)
+  IF(KWSYS_USE_SystemInformation)
+    IF(WIN32)
+      TARGET_LINK_LIBRARIES(${KWSYS_NAMESPACE} ws2_32)
+      IF(KWSYS_SYS_HAS_PSAPI)
+        TARGET_LINK_LIBRARIES(${KWSYS_NAMESPACE} Psapi)
+      ENDIF()
+    ELSEIF(UNIX)
+      IF (EXECINFO_LIB AND KWSYS_CXX_HAS_BACKTRACE)
+        # backtrace on FreeBSD is not in libc
+        TARGET_LINK_LIBRARIES(${KWSYS_NAMESPACE} ${EXECINFO_LIB})
+      ENDIF()
+      IF (KWSYS_CXX_HAS_DLADDR)
+        # for symbol lookup using dladdr
+        TARGET_LINK_LIBRARIES(${KWSYS_NAMESPACE} ${CMAKE_DL_LIBS})
+      ENDIF()
     ENDIF()
-  ENDIF(KWSYS_USE_SystemInformation AND WIN32)
+  ENDIF()
 
   # Apply user-defined target properties to the library.
   IF(KWSYS_PROPERTIES_CXX)
@@ -962,6 +1079,12 @@ IF(KWSYS_USE_String)
     COMPILE_FLAGS "-DKWSYS_STRING_C")
 ENDIF(KWSYS_USE_String)
 
+IF(KWSYS_USE_Encoding)
+  # Set default 8 bit encoding in "EndcodingC.c".
+  SET_PROPERTY(SOURCE EncodingC.c APPEND PROPERTY COMPILE_DEFINITIONS
+    KWSYS_ENCODING_DEFAULT_CODEPAGE=${KWSYS_ENCODING_DEFAULT_CODEPAGE})
+ENDIF(KWSYS_USE_Encoding)
+
 #-----------------------------------------------------------------------------
 # Setup testing if not being built as part of another project.
 IF(KWSYS_STANDALONE OR CMake_SOURCE_DIR)
@@ -1005,6 +1128,11 @@ IF(KWSYS_STANDALONE OR CMake_SOURCE_DIR)
       testCommandLineArguments
       testCommandLineArguments1
       )
+    IF(KWSYS_STL_HAS_WSTRING)
+      SET(KWSYS_CXX_TESTS ${KWSYS_CXX_TESTS}
+        testEncoding
+        )
+    ENDIF(KWSYS_STL_HAS_WSTRING)
     IF(KWSYS_USE_SystemInformation)
       SET(KWSYS_CXX_TESTS ${KWSYS_CXX_TESTS} testSystemInformation)
     ENDIF(KWSYS_USE_SystemInformation)
diff --git a/Utilities/KWSys/vtksys/CommandLineArguments.hxx.in b/Utilities/KWSys/vtksys/CommandLineArguments.hxx.in
index 68e9600..cbf6ee3 100644
--- a/Utilities/KWSys/vtksys/CommandLineArguments.hxx.in
+++ b/Utilities/KWSys/vtksys/CommandLineArguments.hxx.in
@@ -44,7 +44,7 @@ struct CommandLineArgumentsCallbackStructure;
  *
  * For the variable interface you associate variable with each argument. When
  * the argument is specified, the variable is set to the specified value casted
- * to the apropriate type. For boolean (NO_ARGUMENT), the value is "1".
+ * to the appropriate type. For boolean (NO_ARGUMENT), the value is "1".
  *
  * Both interfaces can be used at the same time. 
  *
@@ -99,7 +99,7 @@ public:
     STRING_TYPE,          // The variable is string (char*)
     STL_STRING_TYPE,      // The variable is string (char*)
     VECTOR_INT_TYPE,             // The variable is integer (int)
-    VECTOR_BOOL_TYPE,            // The vairable is boolean (bool)
+    VECTOR_BOOL_TYPE,            // The variable is boolean (bool)
     VECTOR_DOUBLE_TYPE,          // The variable is float (double)
     VECTOR_STRING_TYPE,          // The variable is string (char*)
     VECTOR_STL_STRING_TYPE,      // The variable is string (char*)
@@ -128,7 +128,7 @@ public:
   void ProcessArgument(const char* arg);
 
   /**
-   * This method will parse arguments and call apropriate methods. 
+   * This method will parse arguments and call appropriate methods.
    */
   int Parse();
 
@@ -144,7 +144,7 @@ public:
   /**
    * Add handler for argument which is going to set the variable to the
    * specified value. If the argument is specified, the option is casted to the
-   * apropriate type.
+   * appropriate type.
    */
   void AddArgument(const char* argument, ArgumentTypeEnum type,
     bool* variable, const char* help);
@@ -160,7 +160,7 @@ public:
   /**
    * Add handler for argument which is going to set the variable to the
    * specified value. If the argument is specified, the option is casted to the
-   * apropriate type. This will handle the multi argument values.
+   * appropriate type. This will handle the multi argument values.
    */
   void AddArgument(const char* argument, ArgumentTypeEnum type,
     kwsys_stl::vector<bool>* variable, const char* help);
diff --git a/Utilities/KWSys/vtksys/Configure.hxx.in b/Utilities/KWSys/vtksys/Configure.hxx.in
index 716b84f..8f5ace2 100644
--- a/Utilities/KWSys/vtksys/Configure.hxx.in
+++ b/Utilities/KWSys/vtksys/Configure.hxx.in
@@ -36,6 +36,9 @@
 /* Whether STL is in std namespace.  */
 #define @KWSYS_NAMESPACE at _STL_HAVE_STD @KWSYS_STL_HAVE_STD@
 
+/* Whether wstring is available.  */
+#define @KWSYS_NAMESPACE at _STL_HAS_WSTRING @KWSYS_STL_HAS_WSTRING@
+
 /* Whether the STL string has operator<< for ostream.  */
 #define @KWSYS_NAMESPACE at _STL_STRING_HAVE_OSTREAM @KWSYS_STL_STRING_HAVE_OSTREAM@
 
@@ -170,6 +173,7 @@
 # define KWSYS_STL_HAS_ALLOCATOR_TEMPLATE @KWSYS_NAMESPACE at _STL_HAS_ALLOCATOR_TEMPLATE
 # define KWSYS_STL_HAS_ALLOCATOR_NONTEMPLATE @KWSYS_NAMESPACE at _STL_HAS_ALLOCATOR_NONTEMPLATE
 # define KWSYS_STL_HAS_ALLOCATOR_OBJECTS @KWSYS_NAMESPACE at _STL_HAS_ALLOCATOR_OBJECTS
+# define KWSYS_STL_HAS_WSTRING          @KWSYS_NAMESPACE at _STL_HAS_WSTRING
 #endif
 
 #endif
diff --git a/Utilities/KWSys/vtksys/Directory.cxx b/Utilities/KWSys/vtksys/Directory.cxx
index b884747..d54e607 100644
--- a/Utilities/KWSys/vtksys/Directory.cxx
+++ b/Utilities/KWSys/vtksys/Directory.cxx
@@ -14,6 +14,8 @@
 
 #include KWSYS_HEADER(Configure.hxx)
 
+#include KWSYS_HEADER(Encoding.hxx)
+
 #include KWSYS_HEADER(stl/string)
 #include KWSYS_HEADER(stl/vector)
 
@@ -22,6 +24,7 @@
 #if 0
 # include "Directory.hxx.in"
 # include "Configure.hxx.in"
+# include "Encoding.hxx.in"
 # include "kwsys_stl.hxx.in"
 # include "kwsys_stl_string.hxx.in"
 # include "kwsys_stl_vector.hxx.in"
@@ -120,10 +123,10 @@ bool Directory::Load(const char* name)
     buf = new char[n + 2 + 1];
     sprintf(buf, "%s/*", name);
     }
-  struct _finddata_t data;      // data of current file
+  struct _wfinddata_t data;      // data of current file
 
   // Now put them into the file array
-  srchHandle = _findfirst(buf, &data);
+  srchHandle = _wfindfirst((wchar_t*)Encoding::ToWide(buf).c_str(), &data);
   delete [] buf;
 
   if ( srchHandle == -1 )
@@ -134,9 +137,9 @@ bool Directory::Load(const char* name)
   // Loop through names
   do
     {
-    this->Internal->Files.push_back(data.name);
+    this->Internal->Files.push_back(Encoding::ToNarrow(data.name));
     }
-  while ( _findnext(srchHandle, &data) != -1 );
+  while ( _wfindnext(srchHandle, &data) != -1 );
   this->Internal->Path = name;
   return _findclose(srchHandle) != -1;
 }
@@ -160,10 +163,10 @@ unsigned long Directory::GetNumberOfFilesInDirectory(const char* name)
     buf = new char[n + 2 + 1];
     sprintf(buf, "%s/*", name);
     }
-  struct _finddata_t data;      // data of current file
+  struct _wfinddata_t data;      // data of current file
 
   // Now put them into the file array
-  srchHandle = _findfirst(buf, &data);
+  srchHandle = _wfindfirst((wchar_t*)Encoding::ToWide(buf).c_str(), &data);
   delete [] buf;
 
   if ( srchHandle == -1 )
@@ -177,7 +180,7 @@ unsigned long Directory::GetNumberOfFilesInDirectory(const char* name)
     {
     count++;
     }
-  while ( _findnext(srchHandle, &data) != -1 );
+  while ( _wfindnext(srchHandle, &data) != -1 );
   _findclose(srchHandle);
   return count;
 }
diff --git a/Utilities/KWSys/vtksys/DynamicLoader.cxx b/Utilities/KWSys/vtksys/DynamicLoader.cxx
index fd83752..44cf6af 100644
--- a/Utilities/KWSys/vtksys/DynamicLoader.cxx
+++ b/Utilities/KWSys/vtksys/DynamicLoader.cxx
@@ -186,13 +186,12 @@ namespace KWSYS_NAMESPACE
 DynamicLoader::LibraryHandle DynamicLoader::OpenLibrary(const char* libname)
 {
   DynamicLoader::LibraryHandle lh;
-#ifdef UNICODE
-  wchar_t libn[MB_CUR_MAX];
-  mbstowcs(libn, libname, MB_CUR_MAX);
-  lh = LoadLibrary(libn);
-#else
-  lh = LoadLibrary(libname);
-#endif
+  int length = MultiByteToWideChar(CP_UTF8, 0, libname, -1, NULL, 0);
+  wchar_t* wchars = new wchar_t[length+1];
+  wchars[0] = '\0';
+  MultiByteToWideChar(CP_UTF8, 0, libname, -1, wchars, length);
+  lh = LoadLibraryW(wchars);
+  delete [] wchars;
   return lh;
 }
 
@@ -238,13 +237,7 @@ DynamicLoader::SymbolPointer DynamicLoader::GetSymbolAddress(
 #else
   const char *rsym = sym;
 #endif
-#ifdef UNICODE
-  wchar_t wsym[MB_CUR_MAX];
-  mbstowcs(wsym, rsym, MB_CUR_MAX);
-  result = GetProcAddress(lib, wsym);
-#else
   result = (void*)GetProcAddress(lib, rsym);
-#endif
 #if defined(__BORLANDC__) || defined(__WATCOMC__)
   delete[] rsym;
 #endif
diff --git a/Utilities/KWSys/vtksys/Encoding.h.in b/Utilities/KWSys/vtksys/Encoding.h.in
new file mode 100644
index 0000000..591c5a8
--- /dev/null
+++ b/Utilities/KWSys/vtksys/Encoding.h.in
@@ -0,0 +1,79 @@
+/*============================================================================
+  KWSys - Kitware System Library
+  Copyright 2000-2009 Kitware, Inc., Insight Software Consortium
+
+  Distributed under the OSI-approved BSD License (the "License");
+  see accompanying file Copyright.txt for details.
+
+  This software is distributed WITHOUT ANY WARRANTY; without even the
+  implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+  See the License for more information.
+============================================================================*/
+#ifndef @KWSYS_NAMESPACE at _Encoding_h
+#define @KWSYS_NAMESPACE at _Encoding_h
+
+#include <@KWSYS_NAMESPACE@/Configure.h>
+#include <wchar.h>
+
+/* Redefine all public interface symbol names to be in the proper
+   namespace.  These macros are used internally to kwsys only, and are
+   not visible to user code.  Use kwsysHeaderDump.pl to reproduce
+   these macros after making changes to the interface.  */
+#if !defined(KWSYS_NAMESPACE)
+# define kwsys_ns(x) @KWSYS_NAMESPACE@##x
+# define kwsysEXPORT @KWSYS_NAMESPACE at _EXPORT
+#endif
+#if !@KWSYS_NAMESPACE at _NAME_IS_KWSYS
+# define kwsysEncoding         kwsys_ns(Encoding)
+# define kwsysEncoding_mbstowcs  kwsys_ns(Encoding_mbstowcs)
+# define kwsysEncoding_DupToWide  kwsys_ns(Encoding_DupToWide)
+# define kwsysEncoding_wcstombs  kwsys_ns(Encoding_wcstombs)
+# define kwsysEncoding_DupToNarrow kwsys_ns(Encoding_DupToNarrow)
+#endif
+
+#if defined(__cplusplus)
+extern "C"
+{
+#endif
+
+
+/* Convert a narrow string to a wide string.
+   On Windows, UTF-8 is assumed, and on other platforms,
+   the current locale is assumed.
+   */
+kwsysEXPORT size_t kwsysEncoding_mbstowcs(wchar_t* dest, const char* src, size_t n);
+
+/* Convert a narrow string to a wide string.
+   This can return NULL if the conversion fails. */
+kwsysEXPORT wchar_t* kwsysEncoding_DupToWide(const char* src);
+
+
+/* Convert a wide string to a narrow string.
+   On Windows, UTF-8 is assumed, and on other platforms,
+   the current locale is assumed. */
+kwsysEXPORT size_t kwsysEncoding_wcstombs(char* dest, const wchar_t* src, size_t n);
+
+/* Convert a wide string to a narrow string.
+   This can return NULL if the conversion fails. */
+kwsysEXPORT char* kwsysEncoding_DupToNarrow(const wchar_t* str);
+
+
+#if defined(__cplusplus)
+} /* extern "C" */
+#endif
+
+/* If we are building a kwsys .c or .cxx file, let it use these macros.
+   Otherwise, undefine them to keep the namespace clean.  */
+#if !defined(KWSYS_NAMESPACE)
+# undef kwsys_ns
+# undef kwsysEXPORT
+# if !defined(KWSYS_NAMESPACE) && !@KWSYS_NAMESPACE at _NAME_IS_KWSYS
+#  undef kwsysEncoding
+#  undef kwsysEncoding_mbstowcs
+#  undef kwsysEncoding_DupToWide
+#  undef kwsysEncoding_wcstombs
+#  undef kwsysEncoding_DupToNarrow
+# endif
+#endif
+
+#endif
diff --git a/Utilities/KWSys/vtksys/Encoding.hxx.in b/Utilities/KWSys/vtksys/Encoding.hxx.in
new file mode 100644
index 0000000..60a4a8e
--- /dev/null
+++ b/Utilities/KWSys/vtksys/Encoding.hxx.in
@@ -0,0 +1,56 @@
+/*============================================================================
+  KWSys - Kitware System Library
+  Copyright 2000-2009 Kitware, Inc., Insight Software Consortium
+
+  Distributed under the OSI-approved BSD License (the "License");
+  see accompanying file Copyright.txt for details.
+
+  This software is distributed WITHOUT ANY WARRANTY; without even the
+  implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+  See the License for more information.
+============================================================================*/
+#ifndef @KWSYS_NAMESPACE at _Encoding_hxx
+#define @KWSYS_NAMESPACE at _Encoding_hxx
+
+#include <@KWSYS_NAMESPACE@/Configure.hxx>
+#include <@KWSYS_NAMESPACE@/stl/string>
+
+/* Define these macros temporarily to keep the code readable.  */
+#if !defined (KWSYS_NAMESPACE) && !@KWSYS_NAMESPACE at _NAME_IS_KWSYS
+# define kwsys_stl @KWSYS_NAMESPACE at _stl
+#endif
+
+namespace @KWSYS_NAMESPACE@
+{
+class @KWSYS_NAMESPACE at _EXPORT Encoding
+{
+public:
+  /**
+   * Convert between char and wchar_t
+   */
+
+#if @KWSYS_NAMESPACE at _STL_HAS_WSTRING
+
+  // Convert a narrow string to a wide string.
+  // On Windows, UTF-8 is assumed, and on other platforms,
+  // the current locale is assumed.
+  static kwsys_stl::wstring ToWide(const kwsys_stl::string& str);
+  static kwsys_stl::wstring ToWide(const char* str);
+
+  // Convert a wide string to a narrow string.
+  // On Windows, UTF-8 is assumed, and on other platforms,
+  // the current locale is assumed.
+  static kwsys_stl::string ToNarrow(const kwsys_stl::wstring& str);
+  static kwsys_stl::string ToNarrow(const wchar_t* str);
+
+#endif // @KWSYS_NAMESPACE at _STL_HAS_WSTRING
+
+}; // class Encoding
+} // namespace @KWSYS_NAMESPACE@
+
+/* Undefine temporary macros.  */
+#if !defined (KWSYS_NAMESPACE) && !@KWSYS_NAMESPACE at _NAME_IS_KWSYS
+# undef kwsys_stl
+#endif
+
+#endif
diff --git a/Utilities/KWSys/vtksys/EncodingC.c b/Utilities/KWSys/vtksys/EncodingC.c
new file mode 100644
index 0000000..a36eecc
--- /dev/null
+++ b/Utilities/KWSys/vtksys/EncodingC.c
@@ -0,0 +1,79 @@
+/*============================================================================
+  KWSys - Kitware System Library
+  Copyright 2000-2009 Kitware, Inc., Insight Software Consortium
+
+  Distributed under the OSI-approved BSD License (the "License");
+  see accompanying file Copyright.txt for details.
+
+  This software is distributed WITHOUT ANY WARRANTY; without even the
+  implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+  See the License for more information.
+============================================================================*/
+#include "kwsysPrivate.h"
+#include KWSYS_HEADER(Encoding.h)
+
+/* Work-around CMake dependency scanning limitation.  This must
+   duplicate the above list of headers.  */
+#if 0
+# include "Encoding.h.in"
+#endif
+
+#include <stdlib.h>
+
+#ifdef _WIN32
+#include <windows.h>
+#endif
+
+size_t kwsysEncoding_mbstowcs(wchar_t* dest, const char* str, size_t n)
+{
+  if(str == 0)
+    {
+    return (size_t)-1;
+    }
+#ifdef _WIN32
+  return MultiByteToWideChar(KWSYS_ENCODING_DEFAULT_CODEPAGE, 0,
+                             str, -1, dest, (int)n) - 1;
+#else
+  return mbstowcs(dest, str, n);
+#endif
+}
+
+wchar_t* kwsysEncoding_DupToWide(const char* str)
+{
+  wchar_t* ret = NULL;
+  size_t length = kwsysEncoding_mbstowcs(NULL, str, 0) + 1;
+  if(length > 0)
+    {
+    ret = malloc((length)*sizeof(wchar_t));
+    ret[0] = 0;
+    kwsysEncoding_mbstowcs(ret, str, length);
+    }
+  return ret;
+}
+
+size_t kwsysEncoding_wcstombs(char* dest, const wchar_t* str, size_t n)
+{
+  if(str == 0)
+    {
+    return (size_t)-1;
+    }
+#ifdef _WIN32
+  return WideCharToMultiByte(KWSYS_ENCODING_DEFAULT_CODEPAGE, 0, str, -1,
+                             dest, (int)n, NULL, NULL) - 1;
+#else
+  return wcstombs(dest, str, n);
+#endif
+}
+
+char* kwsysEncoding_DupToNarrow(const wchar_t* str)
+{
+  char* ret = NULL;
+  size_t length = kwsysEncoding_wcstombs(0, str, 0);
+  if(length > 0)
+    {
+    ret = malloc(length);
+    ret[0] = 0;
+    kwsysEncoding_wcstombs(ret, str, length);
+    }
+  return ret;
+}
diff --git a/Utilities/KWSys/vtksys/EncodingCXX.cxx b/Utilities/KWSys/vtksys/EncodingCXX.cxx
new file mode 100644
index 0000000..aebc148
--- /dev/null
+++ b/Utilities/KWSys/vtksys/EncodingCXX.cxx
@@ -0,0 +1,88 @@
+/*============================================================================
+  KWSys - Kitware System Library
+  Copyright 2000-2009 Kitware, Inc., Insight Software Consortium
+
+  Distributed under the OSI-approved BSD License (the "License");
+  see accompanying file Copyright.txt for details.
+
+  This software is distributed WITHOUT ANY WARRANTY; without even the
+  implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+  See the License for more information.
+============================================================================*/
+
+#ifdef __osf__
+#  define _OSF_SOURCE
+#  define _POSIX_C_SOURCE 199506L
+#  define _XOPEN_SOURCE_EXTENDED
+#endif
+
+#include "kwsysPrivate.h"
+#include KWSYS_HEADER(Encoding.hxx)
+#include KWSYS_HEADER(Encoding.h)
+#include KWSYS_HEADER(stl/vector)
+
+// Work-around CMake dependency scanning limitation.  This must
+// duplicate the above list of headers.
+#if 0
+# include "Encoding.hxx.in"
+# include "Encoding.h.in"
+#endif
+
+#include <stdlib.h>
+
+#ifdef _MSC_VER
+# pragma warning (disable: 4786)
+#endif
+
+// Windows API.
+#if defined(_WIN32)
+# include <windows.h>
+#endif
+
+namespace KWSYS_NAMESPACE
+{
+
+#if KWSYS_STL_HAS_WSTRING
+
+kwsys_stl::wstring Encoding::ToWide(const kwsys_stl::string& str)
+{
+  return ToWide(str.c_str());
+}
+
+kwsys_stl::string Encoding::ToNarrow(const kwsys_stl::wstring& str)
+{
+  return ToNarrow(str.c_str());
+}
+
+kwsys_stl::wstring Encoding::ToWide(const char* cstr)
+{
+  kwsys_stl::wstring wstr;
+  size_t length = kwsysEncoding_mbstowcs(0, cstr, 0) + 1;
+  if(length > 0)
+    {
+    kwsys_stl::vector<wchar_t> wchars(length);
+    if(kwsysEncoding_mbstowcs(&wchars[0], cstr, length) > 0)
+      {
+      wstr = &wchars[0];
+      }
+    }
+  return wstr;
+}
+
+kwsys_stl::string Encoding::ToNarrow(const wchar_t* wcstr)
+{
+  kwsys_stl::string str;
+  size_t length = kwsysEncoding_wcstombs(0, wcstr, 0) + 1;
+  if(length > 0)
+    {
+    std::vector<char> chars(length);
+    if(kwsysEncoding_wcstombs(&chars[0], wcstr, length) > 0)
+      {
+      str = &chars[0];
+      }
+    }
+  return str;
+}
+#endif // KWSYS_STL_HAS_WSTRING
+
+} // namespace KWSYS_NAMESPACE
diff --git a/Utilities/KWSys/vtksys/FStream.hxx.in b/Utilities/KWSys/vtksys/FStream.hxx.in
new file mode 100644
index 0000000..8170fb3
--- /dev/null
+++ b/Utilities/KWSys/vtksys/FStream.hxx.in
@@ -0,0 +1,172 @@
+/*============================================================================
+  KWSys - Kitware System Library
+  Copyright 2000-2009 Kitware, Inc., Insight Software Consortium
+
+  Distributed under the OSI-approved BSD License (the "License");
+  see accompanying file Copyright.txt for details.
+
+  This software is distributed WITHOUT ANY WARRANTY; without even the
+  implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+  See the License for more information.
+============================================================================*/
+#ifndef @KWSYS_NAMESPACE at _FStream_hxx
+#define @KWSYS_NAMESPACE at _FStream_hxx
+
+#include <@KWSYS_NAMESPACE@/ios/fstream>
+#include <@KWSYS_NAMESPACE@/Encoding.hxx>
+
+namespace @KWSYS_NAMESPACE@
+{
+#if defined(_MSC_VER) && _MSC_VER >= 1400
+  template<typename CharType,typename Traits>
+  class basic_filebuf : public std::basic_filebuf<CharType,Traits>
+  {
+    public:
+      typedef std::basic_filebuf<CharType,Traits> my_base_type;
+      basic_filebuf *open(char const *s,std::ios_base::openmode mode)
+      {
+        my_base_type::open(Encoding::ToWide(s).c_str(), mode);
+        return this;
+      }
+  };
+
+  template<typename CharType,typename Traits = std::char_traits<CharType> >
+  class basic_ifstream : public std::basic_istream<CharType,Traits>
+  {
+  public:
+    typedef basic_filebuf<CharType,Traits> internal_buffer_type;
+    typedef std::basic_istream<CharType,Traits> internal_stream_type;
+
+    basic_ifstream() : internal_stream_type(new internal_buffer_type())
+    {
+      buf_ = static_cast<internal_buffer_type *>(internal_stream_type::rdbuf());
+    }
+    explicit basic_ifstream(char const *file_name,
+                            std::ios_base::openmode mode = std::ios_base::in)
+      : internal_stream_type(new internal_buffer_type())
+    {
+      buf_ = static_cast<internal_buffer_type *>(internal_stream_type::rdbuf());
+      open(file_name,mode);
+    }
+    void open(char const *file_name,std::ios_base::openmode mode = std::ios_base::in)
+    {
+      if(!buf_->open(file_name,mode | std::ios_base::in))
+        {
+        this->setstate(std::ios_base::failbit);
+        }
+      else
+        {
+        this->clear();
+        }
+    }
+    bool is_open()
+    {
+      return buf_->is_open();
+    }
+    bool is_open() const
+    {
+      return buf_->is_open();
+    }
+    void close()
+    {
+      if(!buf_->close())
+        {
+        this->setstate(std::ios_base::failbit);
+        }
+      else
+      {
+        this->clear();
+      }
+    }
+
+    internal_buffer_type *rdbuf() const
+    {
+      return buf_.get();
+    }
+
+    ~basic_ifstream()
+    {
+      buf_->close();
+      delete buf_;
+    }
+
+  private:
+    internal_buffer_type* buf_;
+};
+
+template<typename CharType,typename Traits = std::char_traits<CharType> >
+class basic_ofstream : public std::basic_ostream<CharType,Traits>
+{
+  public:
+  typedef basic_filebuf<CharType,Traits> internal_buffer_type;
+  typedef std::basic_ostream<CharType,Traits> internal_stream_type;
+
+  basic_ofstream() : internal_stream_type(new internal_buffer_type())
+  {
+  buf_ = static_cast<internal_buffer_type *>(internal_stream_type::rdbuf());
+  }
+  explicit basic_ofstream(char const *file_name,std::ios_base::openmode mode = std::ios_base::out) :
+  internal_stream_type(new internal_buffer_type())
+  {
+    buf_ = static_cast<internal_buffer_type *>(internal_stream_type::rdbuf());
+    open(file_name,mode);
+  }
+  void open(char const *file_name,std::ios_base::openmode mode = std::ios_base::out)
+  {
+    if(!buf_->open(file_name,mode | std::ios_base::out))
+    {
+    this->setstate(std::ios_base::failbit);
+    }
+    else
+    {
+    this->clear();
+    }
+  }
+  bool is_open()
+  {
+    return buf_->is_open();
+  }
+  bool is_open() const
+  {
+    return buf_->is_open();
+  }
+  void close()
+  {
+    if(!buf_->close())
+      {
+      this->setstate(std::ios_base::failbit);
+      }
+    else
+      {
+      this->clear();
+      }
+  }
+
+  internal_buffer_type *rdbuf() const
+  {
+    return buf_.get();
+  }
+  ~basic_ofstream()
+  {
+    buf_->close();
+    delete buf_;
+  }
+
+  private:
+  internal_buffer_type* buf_;
+};
+
+  typedef basic_ifstream<char> ifstream;
+  typedef basic_ofstream<char> ofstream;
+
+#else
+  using @KWSYS_NAMESPACE at _ios_namespace::basic_filebuf;
+  using @KWSYS_NAMESPACE at _ios_namespace::ofstream;
+  using @KWSYS_NAMESPACE at _ios_namespace::ifstream;
+#endif
+
+}
+
+
+
+#endif
diff --git a/Utilities/KWSys/vtksys/ProcessUNIX.c b/Utilities/KWSys/vtksys/ProcessUNIX.c
index fc9e8bf..b9af2f1 100644
--- a/Utilities/KWSys/vtksys/ProcessUNIX.c
+++ b/Utilities/KWSys/vtksys/ProcessUNIX.c
@@ -47,6 +47,12 @@ do.
 
 */
 
+#if defined(__CYGWIN__)
+/* Increase the file descriptor limit for select() before including
+   related system headers. (Default: 64) */
+# define FD_SETSIZE 16384
+#endif
+
 #include <stddef.h>    /* ptrdiff_t */
 #include <stdio.h>     /* snprintf */
 #include <stdlib.h>    /* malloc, free */
diff --git a/Utilities/KWSys/vtksys/ProcessWin32.c b/Utilities/KWSys/vtksys/ProcessWin32.c
index c836f9b..c8ec754 100644
--- a/Utilities/KWSys/vtksys/ProcessWin32.c
+++ b/Utilities/KWSys/vtksys/ProcessWin32.c
@@ -12,12 +12,14 @@
 #include "kwsysPrivate.h"
 #include KWSYS_HEADER(Process.h)
 #include KWSYS_HEADER(System.h)
+#include KWSYS_HEADER(Encoding.h)
 
 /* Work-around CMake dependency scanning limitation.  This must
    duplicate the above list of headers.  */
 #if 0
 # include "Process.h.in"
 # include "System.h.in"
+# include "Encoding_c.h.in"
 #endif
 
 /*
@@ -88,9 +90,10 @@ typedef LARGE_INTEGER kwsysProcessTime;
 typedef struct kwsysProcessCreateInformation_s
 {
   /* Windows child startup control data.  */
-  STARTUPINFO StartupInfo;
+  STARTUPINFOW StartupInfo;
 } kwsysProcessCreateInformation;
 
+
 /*--------------------------------------------------------------------------*/
 typedef struct kwsysProcessPipeData_s kwsysProcessPipeData;
 static DWORD WINAPI kwsysProcessPipeThreadRead(LPVOID ptd);
@@ -197,14 +200,14 @@ struct kwsysProcess_s
   int State;
 
   /* The command lines to execute.  */
-  char** Commands;
+  wchar_t** Commands;
   int NumberOfCommands;
 
   /* The exit code of each command.  */
   DWORD* CommandExitCodes;
 
   /* The working directory for the child process.  */
-  char* WorkingDirectory;
+  wchar_t* WorkingDirectory;
 
   /* Whether to create the child as a detached process.  */
   int OptionDetach;
@@ -299,7 +302,7 @@ struct kwsysProcess_s
 
   /* Real working directory of our own process.  */
   DWORD RealWorkingDirectoryLength;
-  char* RealWorkingDirectory;
+  wchar_t* RealWorkingDirectory;
 };
 
 /*--------------------------------------------------------------------------*/
@@ -546,7 +549,7 @@ int kwsysProcess_SetCommand(kwsysProcess* cp, char const* const* command)
 int kwsysProcess_AddCommand(kwsysProcess* cp, char const* const* command)
 {
   int newNumberOfCommands;
-  char** newCommands;
+  wchar_t** newCommands;
 
   /* Make sure we have a command to add.  */
   if(!cp || !command || !*command)
@@ -554,9 +557,10 @@ int kwsysProcess_AddCommand(kwsysProcess* cp, char const* const* command)
     return 0;
     }
 
+
   /* Allocate a new array for command pointers.  */
   newNumberOfCommands = cp->NumberOfCommands + 1;
-  if(!(newCommands = (char**)malloc(sizeof(char*) * newNumberOfCommands)))
+  if(!(newCommands = (wchar_t**)malloc(sizeof(wchar_t*) * newNumberOfCommands)))
     {
     /* Out of memory.  */
     return 0;
@@ -585,8 +589,8 @@ int kwsysProcess_AddCommand(kwsysProcess* cp, char const* const* command)
   /* Allocate enough space for the command.  We do not need an extra
      byte for the terminating null because we allocated a space for
      the first argument that we will not use.  */
-  newCommands[cp->NumberOfCommands] = (char*)malloc(length);
-  if(!newCommands[cp->NumberOfCommands])
+  char* new_cmd = malloc(length);
+  if(!new_cmd)
     {
     /* Out of memory.  */
     free(newCommands);
@@ -595,9 +599,13 @@ int kwsysProcess_AddCommand(kwsysProcess* cp, char const* const* command)
 
   /* Construct the command line in the allocated buffer.  */
   kwsysProcessComputeCommandLine(cp, command,
-                                 newCommands[cp->NumberOfCommands]);
+                                 new_cmd);
+
+  newCommands[cp->NumberOfCommands] = kwsysEncoding_DupToWide(new_cmd);
+  free(new_cmd);
   }
 
+
   /* Save the new array of commands.  */
   free(cp->Commands);
   cp->Commands = newCommands;
@@ -633,22 +641,26 @@ int kwsysProcess_SetWorkingDirectory(kwsysProcess* cp, const char* dir)
     }
   if(dir && dir[0])
     {
+    wchar_t* wdir = kwsysEncoding_DupToWide(dir);
     /* We must convert the working directory to a full path.  */
-    DWORD length = GetFullPathName(dir, 0, 0, 0);
+    DWORD length = GetFullPathNameW(wdir, 0, 0, 0);
     if(length > 0)
       {
-      cp->WorkingDirectory = (char*)malloc(length);
-      if(!cp->WorkingDirectory)
+      wchar_t* work_dir = malloc(length*sizeof(wchar_t));
+      if(!work_dir)
         {
+        free(wdir);
         return 0;
         }
-      if(!GetFullPathName(dir, length, cp->WorkingDirectory, 0))
+      if(!GetFullPathNameW(wdir, length, work_dir, 0))
         {
-        free(cp->WorkingDirectory);
-        cp->WorkingDirectory = 0;
+        free(work_dir);
+        free(wdir);
         return 0;
         }
+      cp->WorkingDirectory = work_dir;
       }
+    free(wdir);
     }
   return 1;
 }
@@ -879,13 +891,13 @@ void kwsysProcess_Execute(kwsysProcess* cp)
      to make pipe file paths evaluate correctly.  */
   if(cp->WorkingDirectory)
     {
-    if(!GetCurrentDirectory(cp->RealWorkingDirectoryLength,
+    if(!GetCurrentDirectoryW(cp->RealWorkingDirectoryLength,
                             cp->RealWorkingDirectory))
       {
       kwsysProcessCleanup(cp, 1);
       return;
       }
-    SetCurrentDirectory(cp->WorkingDirectory);
+    SetCurrentDirectoryW(cp->WorkingDirectory);
     }
 
   /* Initialize startup info data.  */
@@ -1003,7 +1015,7 @@ void kwsysProcess_Execute(kwsysProcess* cp)
   /* Restore the working directory.  */
   if(cp->RealWorkingDirectory)
     {
-    SetCurrentDirectory(cp->RealWorkingDirectory);
+    SetCurrentDirectoryW(cp->RealWorkingDirectory);
     free(cp->RealWorkingDirectory);
     cp->RealWorkingDirectory = 0;
     }
@@ -1507,10 +1519,10 @@ int kwsysProcessInitialize(kwsysProcess* cp)
   /* Allocate space to save the real working directory of this process.  */
   if(cp->WorkingDirectory)
     {
-    cp->RealWorkingDirectoryLength = GetCurrentDirectory(0, 0);
+    cp->RealWorkingDirectoryLength = GetCurrentDirectoryW(0, 0);
     if(cp->RealWorkingDirectoryLength > 0)
       {
-      cp->RealWorkingDirectory = (char*)malloc(cp->RealWorkingDirectoryLength);
+      cp->RealWorkingDirectory = malloc(cp->RealWorkingDirectoryLength * sizeof(wchar_t));
       if(!cp->RealWorkingDirectory)
         {
         return 0;
@@ -1547,9 +1559,11 @@ int kwsysProcessCreate(kwsysProcess* cp, int index,
   else if(cp->PipeFileSTDIN)
     {
     /* Create a handle to read a file for stdin.  */
-    HANDLE fin = CreateFile(cp->PipeFileSTDIN, GENERIC_READ|GENERIC_WRITE,
+    wchar_t* wstdin = kwsysEncoding_DupToWide(cp->PipeFileSTDIN);
+    HANDLE fin = CreateFileW(wstdin, GENERIC_READ|GENERIC_WRITE,
                             FILE_SHARE_READ|FILE_SHARE_WRITE,
                             0, OPEN_EXISTING, 0, 0);
+    free(wstdin);
     if(fin == INVALID_HANDLE_VALUE)
       {
       return 0;
@@ -1655,7 +1669,7 @@ int kwsysProcessCreate(kwsysProcess* cp, int index,
 
   /* Create the child in a suspended state so we can wait until all
      children have been created before running any one.  */
-  if(!CreateProcess(0, cp->Commands[index], 0, 0, TRUE, CREATE_SUSPENDED, 0,
+  if(!CreateProcessW(0, cp->Commands[index], 0, 0, TRUE, CREATE_SUSPENDED, 0,
                     0, &si->StartupInfo, &cp->ProcessInformation[index]))
     {
     return 0;
@@ -1729,6 +1743,7 @@ void kwsysProcessDestroy(kwsysProcess* cp, int event)
 int kwsysProcessSetupOutputPipeFile(PHANDLE phandle, const char* name)
 {
   HANDLE fout;
+  wchar_t* wname;
   if(!name)
     {
     return 1;
@@ -1738,8 +1753,10 @@ int kwsysProcessSetupOutputPipeFile(PHANDLE phandle, const char* name)
   kwsysProcessCleanupHandle(phandle);
 
   /* Create a handle to write a file for the pipe.  */
-  fout = CreateFile(name, GENERIC_WRITE, FILE_SHARE_READ, 0,
+  wname = kwsysEncoding_DupToWide(name);
+  fout = CreateFileW(wname, GENERIC_WRITE, FILE_SHARE_READ, 0,
                     CREATE_ALWAYS, 0, 0);
+  free(wname);
   if(fout == INVALID_HANDLE_VALUE)
     {
     return 0;
@@ -1883,10 +1900,13 @@ void kwsysProcessCleanup(kwsysProcess* cp, int error)
       {
       /* Format the error message.  */
       DWORD original = GetLastError();
-      DWORD length = FormatMessage(FORMAT_MESSAGE_FROM_SYSTEM |
+      wchar_t err_msg[KWSYSPE_PIPE_BUFFER_SIZE];
+      DWORD length = FormatMessageW(FORMAT_MESSAGE_FROM_SYSTEM |
                                    FORMAT_MESSAGE_IGNORE_INSERTS, 0, original,
                                    MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
-                                   cp->ErrorMessage, KWSYSPE_PIPE_BUFFER_SIZE, 0);
+                                   err_msg, KWSYSPE_PIPE_BUFFER_SIZE, 0);
+      WideCharToMultiByte(CP_UTF8, 0, err_msg, -1, cp->ErrorMessage,
+                          KWSYSPE_PIPE_BUFFER_SIZE, NULL, NULL);
       if(length < 1)
         {
         /* FormatMessage failed.  Use a default message.  */
@@ -1924,7 +1944,7 @@ void kwsysProcessCleanup(kwsysProcess* cp, int error)
     /* Restore the working directory.  */
     if(cp->RealWorkingDirectory)
       {
-      SetCurrentDirectory(cp->RealWorkingDirectory);
+      SetCurrentDirectoryW(cp->RealWorkingDirectory);
       }
     }
 
@@ -2222,7 +2242,7 @@ static void kwsysProcessSetExitException(kwsysProcess* cp, int code)
     case STATUS_NO_MEMORY:
     default:
       cp->ExitException = kwsysProcess_Exception_Other;
-      sprintf(cp->ExitExceptionString, "Exit code 0x%x\n", code);
+      _snprintf(cp->ExitExceptionString, KWSYSPE_PIPE_BUFFER_SIZE, "Exit code 0x%x\n", code);
       break;
     }
 }
@@ -2430,7 +2450,7 @@ static int kwsysProcess_List__New_NT4(kwsysProcess_List* self)
      loaded in this program.  This does not actually increment the
      reference count to the module so we do not need to close the
      handle.  */
-  HMODULE hNT = GetModuleHandle("ntdll.dll");
+  HMODULE hNT = GetModuleHandleW(L"ntdll.dll");
   if(hNT)
     {
     /* Get pointers to the needed API functions.  */
@@ -2534,7 +2554,7 @@ static int kwsysProcess_List__New_Snapshot(kwsysProcess_List* self)
      loaded in this program.  This does not actually increment the
      reference count to the module so we do not need to close the
      handle.  */
-  HMODULE hKernel = GetModuleHandle("kernel32.dll");
+  HMODULE hKernel = GetModuleHandleW(L"kernel32.dll");
   if(hKernel)
     {
     self->P_CreateToolhelp32Snapshot =
diff --git a/Utilities/KWSys/vtksys/RegularExpression.cxx b/Utilities/KWSys/vtksys/RegularExpression.cxx
index f6eeeba..c829787 100644
--- a/Utilities/KWSys/vtksys/RegularExpression.cxx
+++ b/Utilities/KWSys/vtksys/RegularExpression.cxx
@@ -312,8 +312,8 @@ static       char* regbranch (int*);
 static       char* regpiece (int*);
 static       char* regatom (int*);
 static       char* regnode (char);
-static const char* regnext (register const char*);
-static       char* regnext (register char*);
+static const char* regnext (const char*);
+static       char* regnext (char*);
 static void        regc (char);
 static void        reginsert (char, char*);
 static void        regtail (char*, const char*);
@@ -344,10 +344,10 @@ static int strcspn ();
 // for later pattern matching.
 
 bool RegularExpression::compile (const char* exp) {
-    register const char* scan;
-    register const char* longest;
-    register size_t len;
-             int         flags;
+    const char* scan;
+    const char* longest;
+    size_t      len;
+    int         flags;
 
     if (exp == 0) {
       //RAISE Error, SYM(RegularExpression), SYM(No_Expr),
@@ -444,11 +444,11 @@ bool RegularExpression::compile (const char* exp) {
  * follows makes it hard to avoid.
  */
 static char* reg (int paren, int *flagp) {
-    register char* ret;
-    register char* br;
-    register char* ender;
-    register int   parno =0;
-             int   flags;
+    char* ret;
+    char* br;
+    char* ender;
+    int   parno =0;
+    int   flags;
 
     *flagp = HASWIDTH;          // Tentatively.
 
@@ -525,10 +525,10 @@ static char* reg (int paren, int *flagp) {
  * Implements the concatenation operator.
  */
 static char* regbranch (int *flagp) {
-    register char* ret;
-    register char* chain;
-    register char* latest;
-    int                  flags;
+    char* ret;
+    char* chain;
+    char* latest;
+    int   flags;
 
     *flagp = WORST;             // Tentatively.
 
@@ -562,10 +562,10 @@ static char* regbranch (int *flagp) {
  * endmarker role is not redundant.
  */
 static char* regpiece (int *flagp) {
-    register char* ret;
-    register char  op;
-    register char* next;
-    int            flags;
+    char* ret;
+    char  op;
+    char* next;
+    int   flags;
 
     ret = regatom(&flags);
     if (ret == 0)
@@ -631,8 +631,8 @@ static char* regpiece (int *flagp) {
  * separate node; the code is simpler that way and it's not worth fixing.
  */
 static char* regatom (int *flagp) {
-    register char* ret;
-             int   flags;
+    char* ret;
+    int   flags;
 
     *flagp = WORST;             // Tentatively.
 
@@ -648,8 +648,8 @@ static char* regatom (int *flagp) {
             *flagp |= HASWIDTH | SIMPLE;
             break;
         case '[':{
-                register int    rxpclass;
-                register int    rxpclassend;
+                int    rxpclass;
+                int    rxpclassend;
 
                 if (*regparse == '^') { // Complement of range.
                     ret = regnode(ANYBUT);
@@ -720,8 +720,8 @@ static char* regatom (int *flagp) {
             *flagp |= HASWIDTH | SIMPLE;
             break;
         default:{
-                register int    len;
-                register char   ender;
+                int    len;
+                char   ender;
 
                 regparse--;
                 len = int(strcspn(regparse, META));
@@ -754,8 +754,8 @@ static char* regatom (int *flagp) {
    Location.
  */
 static char* regnode (char op) {
-    register char* ret;
-    register char* ptr;
+    char* ret;
+    char* ptr;
 
     ret = regcode;
     if (ret == &regdummy) {
@@ -790,9 +790,9 @@ static void regc (char b) {
  * Means relocating the operand.
  */
 static void reginsert (char op, char* opnd) {
-    register char* src;
-    register char* dst;
-    register char* place;
+    char* src;
+    char* dst;
+    char* place;
 
     if (regcode == &regdummy) {
         regsize += 3;
@@ -816,9 +816,9 @@ static void reginsert (char op, char* opnd) {
  - regtail - set the next-pointer at the end of a node chain
  */
 static void regtail (char* p, const char* val) {
-    register char* scan;
-    register char* temp;
-    register int   offset;
+    char* scan;
+    char* temp;
+    int   offset;
 
     if (p == &regdummy)
         return;
@@ -893,7 +893,7 @@ bool RegularExpression::find (kwsys_stl::string const& s)
 // Returns true if found, and sets start and end indexes accordingly.
 
 bool RegularExpression::find (const char* string) {
-    register const char* s;
+    const char* s;
 
     this->searchstring = string;
 
@@ -956,9 +956,9 @@ bool RegularExpression::find (const char* string) {
  */
 static int regtry (const char* string, const char* *start,
                    const char* *end, const char* prog) {
-    register       int    i;
-    register const char* *sp1;
-    register const char* *ep;
+                   int    i;
+                   const char* *sp1;
+                   const char* *ep;
 
     reginput = string;
     regstartp = start;
@@ -992,8 +992,8 @@ static int regtry (const char* string, const char* *start,
  * 0 failure, 1 success
  */
 static int regmatch (const char* prog) {
-    register const char* scan;  // Current node.
-             const char* next;  // Next node.
+    const char* scan;  // Current node.
+    const char* next;  // Next node.
 
     scan = prog;
 
@@ -1016,8 +1016,8 @@ static int regmatch (const char* prog) {
                 reginput++;
                 break;
             case EXACTLY:{
-                    register size_t len;
-                    register const char* opnd;
+                    size_t len;
+                    const char* opnd;
 
                     opnd = OPERAND(scan);
                     // Inline the first character, for speed.
@@ -1052,8 +1052,8 @@ static int regmatch (const char* prog) {
             case OPEN + 7:
             case OPEN + 8:
             case OPEN + 9:{
-                    register       int    no;
-                    register const char* save;
+                    int    no;
+                    const char* save;
 
                     no = OP(scan) - OPEN;
                     save = reginput;
@@ -1081,8 +1081,8 @@ static int regmatch (const char* prog) {
             case CLOSE + 7:
             case CLOSE + 8:
             case CLOSE + 9:{
-                    register       int    no;
-                    register const char* save;
+                    int    no;
+                    const char* save;
 
                     no = OP(scan) - CLOSE;
                     save = reginput;
@@ -1103,7 +1103,7 @@ static int regmatch (const char* prog) {
 //              break;
             case BRANCH:{
 
-              register const char* save;
+              const char* save;
 
                     if (OP(next) != BRANCH)     // No choice.
                         next = OPERAND(scan);   // Avoid recursion.
@@ -1122,10 +1122,10 @@ static int regmatch (const char* prog) {
                 break;
             case STAR:
             case PLUS:{
-              register char   nextch;
-                    register int        no;
-                    register const char* save;
-                    register int        min_no;
+                    char   nextch;
+                    int        no;
+                    const char* save;
+                    int        min_no;
 
                     //
                     // Lookahead to avoid useless match attempts when we know
@@ -1174,9 +1174,9 @@ static int regmatch (const char* prog) {
  - regrepeat - repeatedly match something simple, report how many
  */
 static int regrepeat (const char* p) {
-    register       int   count = 0;
-    register const char* scan;
-    register const char* opnd;
+    int         count = 0;
+    const char* scan;
+    const char* opnd;
 
     scan = reginput;
     opnd = OPERAND(p);
@@ -1216,8 +1216,8 @@ static int regrepeat (const char* p) {
 /*
  - regnext - dig the "next" pointer out of a node
  */
-static const char* regnext (register const char* p) {
-    register int offset;
+static const char* regnext (const char* p) {
+    int offset;
 
     if (p == &regdummy)
         return (0);
@@ -1232,8 +1232,8 @@ static const char* regnext (register const char* p) {
         return (p + offset);
 }
 
-static char* regnext (register char* p) {
-    register int offset;
+static char* regnext (char* p) {
+    int offset;
 
     if (p == &regdummy)
         return (0);
diff --git a/Utilities/KWSys/vtksys/SharedForward.h.in b/Utilities/KWSys/vtksys/SharedForward.h.in
index 8521099..8bbc74a 100644
--- a/Utilities/KWSys/vtksys/SharedForward.h.in
+++ b/Utilities/KWSys/vtksys/SharedForward.h.in
@@ -772,7 +772,7 @@ static int kwsys_shared_forward_get_settings(const char* self_path,
   const char** dir;
   for(dir = search_path; *dir; ++dir)
     {
-    /* Add seperator between path components.  */
+    /* Add separator between path components.  */
     if(dir != search_path)
       {
       strcat(ldpath, kwsys_shared_forward_path_sep);
diff --git a/Utilities/KWSys/vtksys/SystemInformation.cxx b/Utilities/KWSys/vtksys/SystemInformation.cxx
index f057e0f..2f6c949 100644
--- a/Utilities/KWSys/vtksys/SystemInformation.cxx
+++ b/Utilities/KWSys/vtksys/SystemInformation.cxx
@@ -18,6 +18,10 @@
 # include <winsock.h> // WSADATA, include before sys/types.h
 #endif
 
+#if (defined(__GNUC__) || defined(__PGI)) && !defined(_GNU_SOURCE)
+# define _GNU_SOURCE
+#endif
+
 // TODO:
 // We need an alternative implementation for many functions in this file
 // when USE_ASM_INSTRUCTIONS gets defined as 0.
@@ -84,6 +88,15 @@ typedef int siginfo_t;
 #  include <ifaddrs.h>
 #  define KWSYS_SYSTEMINFORMATION_IMPLEMENT_FQDN
 # endif
+# if defined(KWSYS_SYSTEMINFORMATION_HAS_BACKTRACE)
+#  include <execinfo.h>
+#  if defined(KWSYS_SYSTEMINFORMATION_HAS_CPP_DEMANGLE)
+#    include <cxxabi.h>
+#  endif
+#  if defined(KWSYS_SYSTEMINFORMATION_HAS_SYMBOL_LOOKUP)
+#    include <dlfcn.h>
+#  endif
+# endif
 #endif
 
 #if defined(__OpenBSD__) || defined(__NetBSD__)
@@ -114,8 +127,15 @@ typedef int siginfo_t;
 #  define KWSYS_SYSTEMINFORMATION_IMPLEMENT_FQDN
 # endif
 # if __ENVIRONMENT_MAC_OS_X_VERSION_MIN_REQUIRED__-0 >= 1050
-#  include <execinfo.h>
-#  define KWSYS_SYSTEMINFORMATION_HAVE_BACKTRACE
+#  if defined(KWSYS_SYSTEMINFORMATION_HAS_BACKTRACE)
+#   include <execinfo.h>
+#   if defined(KWSYS_SYSTEMINFORMATION_HAS_CPP_DEMANGLE)
+#     include <cxxabi.h>
+#   endif
+#   if defined(KWSYS_SYSTEMINFORMATION_HAS_SYMBOL_LOOKUP)
+#     include <dlfcn.h>
+#   endif
+#  endif
 # endif
 #endif
 
@@ -130,10 +150,13 @@ typedef int siginfo_t;
 #   define KWSYS_SYSTEMINFORMATION_IMPLEMENT_FQDN
 #  endif
 # endif
-# if defined(__GNUG__)
+# if defined(KWSYS_SYSTEMINFORMATION_HAS_BACKTRACE)
 #  include <execinfo.h>
-#  if !(defined(__LSB_VERSION__) && __LSB_VERSION__ < 41)
-#   define KWSYS_SYSTEMINFORMATION_HAVE_BACKTRACE
+#  if defined(KWSYS_SYSTEMINFORMATION_HAS_CPP_DEMANGLE)
+#    include <cxxabi.h>
+#  endif
+#  if defined(KWSYS_SYSTEMINFORMATION_HAS_SYMBOL_LOOKUP)
+#    include <dlfcn.h>
 #  endif
 # endif
 # if defined(KWSYS_CXX_HAS_RLIMIT64)
@@ -230,7 +253,7 @@ static bool call_cpuid(int select, int result[4])
     _asm {
 #ifdef CPUID_AWARE_COMPILER
       ; we must push/pop the registers <<CPUID>> writes to, as the
-      ; optimiser doesn't know about <<CPUID>>, and so doesn't expect
+      ; optimiser does not know about <<CPUID>>, and so does not expect
       ; these registers to change.
       push eax
       push ebx
@@ -357,6 +380,10 @@ public:
   static
   void SetStackTraceOnError(int enable);
 
+  // get current stack
+  static
+  kwsys_stl::string GetProgramStack(int firstFrame, int wholePath);
+
   /** Run the different checks */
   void RunCPUCheck();
   void RunOSCheck();
@@ -812,6 +839,11 @@ void SystemInformation::SetStackTraceOnError(int enable)
   SystemInformationImplementation::SetStackTraceOnError(enable);
 }
 
+kwsys_stl::string SystemInformation::GetProgramStack(int firstFrame, int wholePath)
+{
+  return SystemInformationImplementation::GetProgramStack(firstFrame, wholePath);
+}
+
 /** Run the different checks */
 void SystemInformation::RunCPUCheck()
 {
@@ -908,6 +940,12 @@ int LoadLines(
         }
       continue;
       }
+    char *pBuf=buf;
+    while(*pBuf)
+      {
+      if (*pBuf=='\n') *pBuf='\0';
+      pBuf+=1;
+      }
     lines.push_back(buf);
     ++nRead;
     }
@@ -1046,12 +1084,29 @@ void StacktraceSignalHandler(
 #if defined(__linux) || defined(__APPLE__)
   kwsys_ios::ostringstream oss;
   oss
+     << kwsys_ios::endl
      << "=========================================================" << kwsys_ios::endl
      << "Process id " << getpid() << " ";
   switch (sigNo)
     {
+    case SIGINT:
+      oss << "Caught SIGINT";
+      break;
+
+    case SIGTERM:
+      oss << "Caught SIGTERM";
+      break;
+
+    case SIGABRT:
+      oss << "Caught SIGABRT";
+      break;
+
     case SIGFPE:
-      oss << "Caught SIGFPE ";
+      oss
+        << "Caught SIGFPE at "
+        << (sigInfo->si_addr==0?"0x":"")
+        << sigInfo->si_addr
+        <<  " ";
       switch (sigInfo->si_code)
         {
 # if defined(FPE_INTDIV)
@@ -1099,7 +1154,11 @@ void StacktraceSignalHandler(
       break;
 
     case SIGSEGV:
-      oss << "Caught SIGSEGV ";
+      oss
+        << "Caught SIGSEGV at "
+        << (sigInfo->si_addr==0?"0x":"")
+        << sigInfo->si_addr
+        <<  " ";
       switch (sigInfo->si_code)
         {
         case SEGV_MAPERR:
@@ -1116,16 +1175,12 @@ void StacktraceSignalHandler(
         }
       break;
 
-    case SIGINT:
-      oss << "Caught SIGTERM";
-      break;
-
-    case SIGTERM:
-      oss << "Caught SIGTERM";
-      break;
-
     case SIGBUS:
-      oss << "Caught SIGBUS type ";
+      oss
+        << "Caught SIGBUS at "
+        << (sigInfo->si_addr==0?"0x":"")
+        << sigInfo->si_addr
+        <<  " ";
       switch (sigInfo->si_code)
         {
         case BUS_ADRALN:
@@ -1134,13 +1189,25 @@ void StacktraceSignalHandler(
 
 # if defined(BUS_ADRERR)
         case BUS_ADRERR:
-          oss << "non-exestent physical address";
+          oss << "nonexistent physical address";
           break;
 # endif
 
 # if defined(BUS_OBJERR)
         case BUS_OBJERR:
-          oss << "object specific hardware error";
+          oss << "object-specific hardware error";
+          break;
+# endif
+
+# if defined(BUS_MCEERR_AR)
+        case BUS_MCEERR_AR:
+          oss << "Hardware memory error consumed on a machine check; action required.";
+          break;
+# endif
+
+# if defined(BUS_MCEERR_AO)
+        case BUS_MCEERR_AO:
+          oss << "Hardware memory error detected in process but not consumed; action optional.";
           break;
 # endif
 
@@ -1151,7 +1218,11 @@ void StacktraceSignalHandler(
       break;
 
     case SIGILL:
-      oss << "Caught SIGILL ";
+      oss
+        << "Caught SIGILL at "
+        << (sigInfo->si_addr==0?"0x":"")
+        << sigInfo->si_addr
+        <<  " ";
       switch (sigInfo->si_code)
         {
         case ILL_ILLOPC:
@@ -1205,20 +1276,16 @@ void StacktraceSignalHandler(
       oss << "Caught " << sigNo << " code " << sigInfo->si_code;
       break;
     }
-  oss << kwsys_ios::endl;
-#if defined(KWSYS_SYSTEMINFORMATION_HAVE_BACKTRACE)
-  oss << "Program Stack:" << kwsys_ios::endl;
-  void *stackSymbols[128];
-  int n=backtrace(stackSymbols,128);
-  char **stackText=backtrace_symbols(stackSymbols,n);
-  for (int i=0; i<n; ++i)
-    {
-    oss << "  " << stackText[i] << kwsys_ios::endl;
-    }
-#endif
   oss
-     << "=========================================================" << kwsys_ios::endl;
+    << kwsys_ios::endl
+    << "Program Stack:" << kwsys_ios::endl
+    << SystemInformationImplementation::GetProgramStack(2,0)
+    << "=========================================================" << kwsys_ios::endl;
   kwsys_ios::cerr << oss.str() << kwsys_ios::endl;
+
+  // restore the previously registered handlers
+  // and abort
+  SystemInformationImplementation::SetStackTraceOnError(0);
   abort();
 #else
   // avoid warning C4100
@@ -1227,8 +1294,213 @@ void StacktraceSignalHandler(
 #endif
 }
 #endif
+
+#if defined(KWSYS_SYSTEMINFORMATION_HAS_BACKTRACE)
+#define safes(_arg)((_arg)?(_arg):"???")
+
+// Description:
+// A container for symbol properties. Each instance
+// must be Initialized.
+class SymbolProperties
+{
+public:
+  SymbolProperties();
+
+  // Description:
+  // The SymbolProperties instance must be initialized by
+  // passing a stack address.
+  void Initialize(void *address);
+
+  // Description:
+  // Get the symbol's stack address.
+  void *GetAddress() const { return this->Address; }
+
+  // Description:
+  // If not set paths will be removed. eg, from a binary
+  // or source file.
+  void SetReportPath(int rp){ this->ReportPath=rp; }
+
+  // Description:
+  // Set/Get the name of the binary file that the symbol
+  // is found in.
+  void SetBinary(const char *binary)
+    { this->Binary=safes(binary); }
+
+  kwsys_stl::string GetBinary() const;
+
+  // Description:
+  // Set the name of the function that the symbol is found in.
+  // If c++ demangling is supported it will be demangled.
+  void SetFunction(const char *function)
+    { this->Function=this->Demangle(function); }
+
+  kwsys_stl::string GetFunction() const
+    { return this->Function; }
+
+  // Description:
+  // Set/Get the name of the source file where the symbol
+  // is defined.
+  void SetSourceFile(const char *sourcefile)
+    { this->SourceFile=safes(sourcefile); }
+
+  kwsys_stl::string GetSourceFile() const
+    { return this->GetFileName(this->SourceFile); }
+
+  // Description:
+  // Set/Get the line number where the symbol is defined
+  void SetLineNumber(long linenumber){ this->LineNumber=linenumber; }
+  long GetLineNumber() const { return this->LineNumber; }
+
+  // Description:
+  // Set the address where the biinary image is mapped
+  // into memory.
+  void SetBinaryBaseAddress(void *address)
+    { this->BinaryBaseAddress=address; }
+
+private:
+  void *GetRealAddress() const
+    { return (void*)((char*)this->Address-(char*)this->BinaryBaseAddress); }
+
+  kwsys_stl::string GetFileName(const kwsys_stl::string &path) const;
+  kwsys_stl::string Demangle(const char *symbol) const;
+
+private:
+  kwsys_stl::string Binary;
+  void *BinaryBaseAddress;
+  void *Address;
+  kwsys_stl::string SourceFile;
+  kwsys_stl::string Function;
+  long LineNumber;
+  int ReportPath;
+};
+
+// --------------------------------------------------------------------------
+kwsys_ios::ostream &operator<<(
+      kwsys_ios::ostream &os,
+      const SymbolProperties &sp)
+{
+#if defined(KWSYS_SYSTEMINFORMATION_HAS_SYMBOL_LOOKUP)
+  os
+    << kwsys_ios::hex << sp.GetAddress() << " : "
+    << sp.GetFunction()
+    << " [(" << sp.GetBinary() << ") "
+    << sp.GetSourceFile() << ":"
+    << kwsys_ios::dec << sp.GetLineNumber() << "]";
+#elif defined(KWSYS_SYSTEMINFORMATION_HAS_BACKTRACE)
+  void *addr = sp.GetAddress();
+  char **syminfo = backtrace_symbols(&addr,1);
+  os << safes(syminfo[0]);
+  free(syminfo);
+#else
+  (void)os;
+  (void)sp;
+#endif
+  return os;
+}
+
+// --------------------------------------------------------------------------
+SymbolProperties::SymbolProperties()
+{
+  // not using an initializer list
+  // to avoid some PGI compiler warnings
+  this->SetBinary("???");
+  this->SetBinaryBaseAddress(NULL);
+  this->Address = NULL;
+  this->SetSourceFile("???");
+  this->SetFunction("???");
+  this->SetLineNumber(-1);
+  this->SetReportPath(0);
+  // avoid PGI compiler warnings
+  this->GetRealAddress();
+  this->GetFunction();
+  this->GetSourceFile();
+  this->GetLineNumber();
+}
+
+// --------------------------------------------------------------------------
+kwsys_stl::string SymbolProperties::GetFileName(const kwsys_stl::string &path) const
+{
+  kwsys_stl::string file(path);
+  if (!this->ReportPath)
+    {
+    size_t at = file.rfind("/");
+    if (at!=kwsys_stl::string::npos)
+      {
+      file = file.substr(at+1,kwsys_stl::string::npos);
+      }
+    }
+  return file;
+}
+
+// --------------------------------------------------------------------------
+kwsys_stl::string SymbolProperties::GetBinary() const
+{
+// only linux has proc fs
+#if defined(__linux__)
+  if (this->Binary=="/proc/self/exe")
+    {
+    kwsys_stl::string binary;
+    char buf[1024]={'\0'};
+    ssize_t ll=0;
+    if ((ll=readlink("/proc/self/exe",buf,1024))>0)
+      {
+      buf[ll]='\0';
+      binary=buf;
+      }
+    else
+      {
+      binary="/proc/self/exe";
+      }
+    return this->GetFileName(binary);
+    }
+#endif
+  return this->GetFileName(this->Binary);
+}
+
+// --------------------------------------------------------------------------
+kwsys_stl::string SymbolProperties::Demangle(const char *symbol) const
+{
+  kwsys_stl::string result = safes(symbol);
+#if defined(KWSYS_SYSTEMINFORMATION_HAS_CPP_DEMANGLE)
+  int status = 0;
+  size_t bufferLen = 1024;
+  char *buffer = (char*)malloc(1024);
+  char *demangledSymbol =
+    abi::__cxa_demangle(symbol, buffer, &bufferLen, &status);
+  if (!status)
+    {
+    result = demangledSymbol;
+    }
+  free(buffer);
+#else
+  (void)symbol;
+#endif
+  return result;
+}
+
+// --------------------------------------------------------------------------
+void SymbolProperties::Initialize(void *address)
+{
+  this->Address = address;
+#if defined(KWSYS_SYSTEMINFORMATION_HAS_SYMBOL_LOOKUP)
+  // first fallback option can demangle c++ functions
+  Dl_info info;
+  int ierr=dladdr(this->Address,&info);
+  if (ierr && info.dli_sname && info.dli_saddr)
+    {
+    this->SetBinary(info.dli_fname);
+    this->SetFunction(info.dli_sname);
+    }
+#else
+  // second fallback use builtin backtrace_symbols
+  // to decode the bactrace.
+#endif
+}
+#endif // don't define this class if we're not using it
+
 } // anonymous namespace
 
+
 SystemInformationImplementation::SystemInformationImplementation()
 {
   this->TotalVirtualMemory = 0;
@@ -1471,12 +1743,12 @@ int SystemInformationImplementation::GetFullyQualifiedDomainName(
       {
       char host[NI_MAXHOST]={'\0'};
 
-      int addrlen
+      const size_t addrlen
         = (fam==AF_INET?sizeof(struct sockaddr_in):sizeof(struct sockaddr_in6));
 
       ierr=getnameinfo(
             ifa->ifa_addr,
-            addrlen,
+            static_cast<socklen_t>(addrlen),
             host,
             NI_MAXHOST,
             NULL,
@@ -2182,8 +2454,8 @@ bool SystemInformationImplementation::RetrieveCPUClockSpeed()
   if (!retrieved)
     {
     HKEY hKey = NULL;
-    LONG err = RegOpenKeyEx(HKEY_LOCAL_MACHINE,
-      "HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0", 0,
+    LONG err = RegOpenKeyExW(HKEY_LOCAL_MACHINE,
+      L"HARDWARE\\DESCRIPTION\\System\\CentralProcessor\\0", 0,
       KEY_READ, &hKey);
 
     if (ERROR_SUCCESS == err)
@@ -2192,7 +2464,7 @@ bool SystemInformationImplementation::RetrieveCPUClockSpeed()
       DWORD data = 0;
       DWORD dwSize = sizeof(DWORD);
 
-      err = RegQueryValueEx(hKey, "~MHz", 0,
+      err = RegQueryValueExW(hKey, L"~MHz", 0,
         &dwType, (LPBYTE) &data, &dwSize);
 
       if (ERROR_SUCCESS == err)
@@ -3336,12 +3608,61 @@ SystemInformationImplementation::GetProcessId()
 }
 
 /**
+return current program stack in a string
+demangle cxx symbols if possible.
+*/
+kwsys_stl::string SystemInformationImplementation::GetProgramStack(
+      int firstFrame,
+      int wholePath)
+{
+  kwsys_stl::string programStack = ""
+#if !defined(KWSYS_SYSTEMINFORMATION_HAS_BACKTRACE)
+    "WARNING: The stack could not be examined "
+    "because backtrace is not supported.\n"
+#elif !defined(KWSYS_SYSTEMINFORMATION_HAS_DEBUG_BUILD)
+    "WARNING: The stack trace will not use advanced "
+    "capabilities because this is a release build.\n"
+#else
+# if !defined(KWSYS_SYSTEMINFORMATION_HAS_SYMBOL_LOOKUP)
+    "WARNING: Function names will not be demangled because "
+    "dladdr is not available.\n"
+# endif
+# if !defined(KWSYS_SYSTEMINFORMATION_HAS_CPP_DEMANGLE)
+    "WARNING: Function names will not be demangled "
+    "because cxxabi is not available.\n"
+# endif
+#endif
+    ;
+
+  kwsys_ios::ostringstream oss;
+#if defined(KWSYS_SYSTEMINFORMATION_HAS_BACKTRACE)
+  void *stackSymbols[256];
+  int nFrames=backtrace(stackSymbols,256);
+  for (int i=firstFrame; i<nFrames; ++i)
+    {
+    SymbolProperties symProps;
+    symProps.SetReportPath(wholePath);
+    symProps.Initialize(stackSymbols[i]);
+    oss << symProps << kwsys_ios::endl;
+    }
+#else
+  (void)firstFrame;
+  (void)wholePath;
+#endif
+  programStack += oss.str();
+
+  return programStack;
+}
+
+
+/**
 when set print stack trace in response to common signals.
 */
 void SystemInformationImplementation::SetStackTraceOnError(int enable)
 {
 #if !defined(_WIN32) && !defined(__MINGW32__) && !defined(__CYGWIN__)
   static int saOrigValid=0;
+  static struct sigaction saABRTOrig;
   static struct sigaction saSEGVOrig;
   static struct sigaction saTERMOrig;
   static struct sigaction saINTOrig;
@@ -3349,9 +3670,11 @@ void SystemInformationImplementation::SetStackTraceOnError(int enable)
   static struct sigaction saBUSOrig;
   static struct sigaction saFPEOrig;
 
+
   if (enable && !saOrigValid)
     {
     // save the current actions
+    sigaction(SIGABRT,0,&saABRTOrig);
     sigaction(SIGSEGV,0,&saSEGVOrig);
     sigaction(SIGTERM,0,&saTERMOrig);
     sigaction(SIGINT,0,&saINTOrig);
@@ -3365,9 +3688,10 @@ void SystemInformationImplementation::SetStackTraceOnError(int enable)
     // install ours
     struct sigaction sa;
     sa.sa_sigaction=(SigAction)StacktraceSignalHandler;
-    sa.sa_flags=SA_SIGINFO|SA_RESTART;
+    sa.sa_flags=SA_SIGINFO|SA_RESTART|SA_RESETHAND;
     sigemptyset(&sa.sa_mask);
 
+    sigaction(SIGABRT,&sa,0);
     sigaction(SIGSEGV,&sa,0);
     sigaction(SIGTERM,&sa,0);
     sigaction(SIGINT,&sa,0);
@@ -3379,6 +3703,7 @@ void SystemInformationImplementation::SetStackTraceOnError(int enable)
   if (!enable && saOrigValid)
     {
     // restore previous actions
+    sigaction(SIGABRT,&saABRTOrig,0);
     sigaction(SIGSEGV,&saSEGVOrig,0);
     sigaction(SIGTERM,&saTERMOrig,0);
     sigaction(SIGINT,&saINTOrig,0);
@@ -3487,7 +3812,7 @@ bool SystemInformationImplementation::QueryLinuxMemory()
     bool have[6] = { false, false, false, false, false, false };
     unsigned long value[6];
     int count = 0;
-    while(fgets(buffer, sizeof(buffer), fd))
+    while(fgets(buffer, static_cast<int>(sizeof(buffer)), fd))
       {
       for(int i=0; i < 6; ++i)
         {
@@ -3520,7 +3845,8 @@ bool SystemInformationImplementation::QueryLinuxMemory()
     unsigned long temp;
     unsigned long cachedMem;
     unsigned long buffersMem;
-    char *r=fgets(buffer, sizeof(buffer), fd); // Skip "total: used:..."
+    // Skip "total: used:..."
+    char *r=fgets(buffer, static_cast<int>(sizeof(buffer)), fd);
     int status=0;
     if(r==buffer)
       {
@@ -4691,19 +5017,19 @@ bool SystemInformationImplementation::QueryOSInformation()
 
   this->OSName = "Windows";
 
-  OSVERSIONINFOEX osvi;
+  OSVERSIONINFOEXW osvi;
   BOOL bIsWindows64Bit;
   BOOL bOsVersionInfoEx;
   char operatingSystem[256];
 
   // Try calling GetVersionEx using the OSVERSIONINFOEX structure.
-  ZeroMemory (&osvi, sizeof (OSVERSIONINFOEX));
-  osvi.dwOSVersionInfoSize = sizeof (OSVERSIONINFOEX);
-  bOsVersionInfoEx = GetVersionEx ((OSVERSIONINFO *) &osvi);
+  ZeroMemory (&osvi, sizeof (OSVERSIONINFOEXW));
+  osvi.dwOSVersionInfoSize = sizeof (OSVERSIONINFOEXW);
+  bOsVersionInfoEx = GetVersionExW ((OSVERSIONINFOW*)&osvi);
   if (!bOsVersionInfoEx)
     {
-    osvi.dwOSVersionInfoSize = sizeof (OSVERSIONINFO);
-    if (!GetVersionEx ((OSVERSIONINFO *) &osvi))
+    osvi.dwOSVersionInfoSize = sizeof (OSVERSIONINFOW);
+    if (!GetVersionExW((OSVERSIONINFOW*)&osvi))
       {
       return false;
       }
@@ -4789,19 +5115,19 @@ bool SystemInformationImplementation::QueryOSInformation()
 #endif        // VER_NT_WORKSTATION
         {
         HKEY hKey;
-        char szProductType[80];
+        wchar_t szProductType[80];
         DWORD dwBufLen;
 
         // Query the registry to retrieve information.
-        RegOpenKeyEx (HKEY_LOCAL_MACHINE, "SYSTEM\\CurrentControlSet\\Control\\ProductOptions", 0, KEY_QUERY_VALUE, &hKey);
-        RegQueryValueEx (hKey, "ProductType", NULL, NULL, (LPBYTE) szProductType, &dwBufLen);
+        RegOpenKeyExW(HKEY_LOCAL_MACHINE, L"SYSTEM\\CurrentControlSet\\Control\\ProductOptions", 0, KEY_QUERY_VALUE, &hKey);
+        RegQueryValueExW(hKey, L"ProductType", NULL, NULL, (LPBYTE) szProductType, &dwBufLen);
         RegCloseKey (hKey);
 
-        if (lstrcmpi ("WINNT", szProductType) == 0)
+        if (lstrcmpiW(L"WINNT", szProductType) == 0)
           {
           this->OSRelease += " Professional";
           }
-        if (lstrcmpi ("LANMANNT", szProductType) == 0)
+        if (lstrcmpiW(L"LANMANNT", szProductType) == 0)
           {
           // Decide between Windows 2000 Advanced Server and Windows .NET Enterprise Server.
           if (osvi.dwMajorVersion == 5 && osvi.dwMinorVersion == 1)
@@ -4813,7 +5139,7 @@ bool SystemInformationImplementation::QueryOSInformation()
             this->OSRelease += " Server";
             }
           }
-        if (lstrcmpi ("SERVERNT", szProductType) == 0)
+        if (lstrcmpiW(L"SERVERNT", szProductType) == 0)
           {
           // Decide between Windows 2000 Advanced Server and Windows .NET Enterprise Server.
           if (osvi.dwMajorVersion == 5 && osvi.dwMinorVersion == 1)
@@ -4846,7 +5172,7 @@ bool SystemInformationImplementation::QueryOSInformation()
         LPFNPROC DLLProc;
 
         // Load the Kernel32 DLL.
-        hKernelDLL = LoadLibrary ("kernel32");
+        hKernelDLL = LoadLibraryW(L"kernel32");
         if (hKernelDLL != NULL)  {
           // Only XP and .NET Server support IsWOW64Process so... Load dynamically!
           DLLProc = (LPFNPROC) GetProcAddress (hKernelDLL, "IsWow64Process");
diff --git a/Utilities/KWSys/vtksys/SystemInformation.hxx.in b/Utilities/KWSys/vtksys/SystemInformation.hxx.in
index 8f4cb4e..a9fd05d 100644
--- a/Utilities/KWSys/vtksys/SystemInformation.hxx.in
+++ b/Utilities/KWSys/vtksys/SystemInformation.hxx.in
@@ -117,8 +117,8 @@ public:
   // Get total system RAM in units of KiB available to this process.
   // This may differ from the host available if a per-process resource
   // limit is applied. per-process memory limits are applied on unix
-  // system via rlimit api. Resource limits that are not imposed via
-  // rlimit api may be reported to us via an application specified
+  // system via rlimit API. Resource limits that are not imposed via
+  // rlimit API may be reported to us via an application specified
   // environment variable.
   LongLong GetProcMemoryAvailable(
         const char *hostLimitEnvVarName=NULL,
@@ -136,6 +136,12 @@ public:
   static
   void SetStackTraceOnError(int enable);
 
+  // format and return the current program stack in a string. In
+  // order to produce an informative stack trace the application
+  // should be dynamically linked and compiled with debug symbols.
+  static
+  kwsys_stl::string GetProgramStack(int firstFrame, int wholePath);
+
   /** Run the different checks */
   void RunCPUCheck();
   void RunOSCheck();
diff --git a/Utilities/KWSys/vtksys/SystemTools.cxx b/Utilities/KWSys/vtksys/SystemTools.cxx
index 881c49a..4649f3b 100644
--- a/Utilities/KWSys/vtksys/SystemTools.cxx
+++ b/Utilities/KWSys/vtksys/SystemTools.cxx
@@ -20,6 +20,8 @@
 #include KWSYS_HEADER(RegularExpression.hxx)
 #include KWSYS_HEADER(SystemTools.hxx)
 #include KWSYS_HEADER(Directory.hxx)
+#include KWSYS_HEADER(FStream.hxx)
+#include KWSYS_HEADER(Encoding.hxx)
 
 #include KWSYS_HEADER(ios/iostream)
 #include KWSYS_HEADER(ios/fstream)
@@ -32,6 +34,8 @@
 #if 0
 # include "SystemTools.hxx.in"
 # include "Directory.hxx.in"
+# include "FStream.hxx.in"
+# include "Encoding.hxx.in"
 # include "kwsys_ios_iostream.h.in"
 # include "kwsys_ios_fstream.h.in"
 # include "kwsys_ios_sstream.h.in"
@@ -75,6 +79,9 @@
 // Windows API.
 #if defined(_WIN32)
 # include <windows.h>
+# ifndef INVALID_FILE_ATTRIBUTES
+#  define INVALID_FILE_ATTRIBUTES ((DWORD)-1)
+# endif
 #elif defined (__CYGWIN__)
 # include <windows.h>
 # undef _WIN32
@@ -152,11 +159,6 @@ public:
 #define _chdir chdir
 #endif
 
-#if defined(__HAIKU__)
-#include <os/kernel/OS.h>
-#include <os/storage/Path.h>
-#endif
-
 #if defined(__BEOS__) && !defined(__ZETA__)
 #include <be/kernel/OS.h>
 #include <be/storage/Path.h>
@@ -188,42 +190,45 @@ static inline char *realpath(const char *path, char *resolved_path)
 #if defined(_WIN32) && (defined(_MSC_VER) || defined(__WATCOMC__) || defined(__BORLANDC__) || defined(__MINGW32__))
 inline int Mkdir(const char* dir)
 {
-  return _mkdir(dir);
+  return _wmkdir(KWSYS_NAMESPACE::Encoding::ToWide(dir).c_str());
 }
 inline int Rmdir(const char* dir)
 {
-  return _rmdir(dir);
+  return _wrmdir(KWSYS_NAMESPACE::Encoding::ToWide(dir).c_str());
 }
 inline const char* Getcwd(char* buf, unsigned int len)
 {
-  const char* ret = _getcwd(buf, len);
-  if(!ret)
-    {
-    fprintf(stderr, "No current working directory.\n");
-    abort();
-    }
-  // make sure the drive letter is capital
-  if(strlen(buf) > 1 && buf[1] == ':')
+  std::vector<wchar_t> w_buf(len);
+  if(const wchar_t* ret = _wgetcwd(&w_buf[0], len))
     {
-    buf[0] = toupper(buf[0]);
+    // make sure the drive letter is capital
+    if(wcslen(&w_buf[0]) > 1 && w_buf[1] == L':')
+      {
+      w_buf[0] = towupper(w_buf[0]);
+      }
+    std::string tmp = KWSYS_NAMESPACE::Encoding::ToNarrow(&w_buf[0]);
+    strcpy(buf, tmp.c_str());
+    return buf;
     }
-  return ret;
+  return 0;
 }
 inline int Chdir(const char* dir)
 {
   #if defined(__BORLANDC__)
   return chdir(dir);
   #else
-  return _chdir(dir);
+  return _wchdir(KWSYS_NAMESPACE::Encoding::ToWide(dir).c_str());
   #endif
 }
 inline void Realpath(const char *path, kwsys_stl::string & resolved_path)
 {
-  char *ptemp;
-  char fullpath[MAX_PATH];
-  if( GetFullPathName(path, sizeof(fullpath), fullpath, &ptemp) )
+  kwsys_stl::wstring tmp = KWSYS_NAMESPACE::Encoding::ToWide(path);
+  wchar_t *ptemp;
+  wchar_t fullpath[MAX_PATH];
+  if( GetFullPathNameW(tmp.c_str(), sizeof(fullpath)/sizeof(fullpath[0]),
+                       fullpath, &ptemp) )
     {
-    resolved_path = fullpath;
+    resolved_path = KWSYS_NAMESPACE::Encoding::ToNarrow(fullpath);
     KWSYS_NAMESPACE::SystemTools::ConvertToUnixSlashes(resolved_path);
     }
   else
@@ -245,13 +250,7 @@ inline int Rmdir(const char* dir)
 }
 inline const char* Getcwd(char* buf, unsigned int len)
 {
-  const char* ret = getcwd(buf, len);
-  if(!ret)
-    {
-    fprintf(stderr, "No current working directory\n");
-    abort();
-    }
-  return ret;
+  return getcwd(buf, len);
 }
 
 inline int Chdir(const char* dir)
@@ -604,6 +603,15 @@ const char* SystemTools::GetExecutableExtension()
 #endif
 }
 
+FILE* SystemTools::Fopen(const char* file, const char* mode)
+{
+#ifdef _WIN32
+  return _wfopen(Encoding::ToWide(file).c_str(),
+                 Encoding::ToWide(mode).c_str());
+#else
+  return fopen(file, mode);
+#endif
+}
 
 bool SystemTools::MakeDirectory(const char* path)
 {
@@ -613,7 +621,7 @@ bool SystemTools::MakeDirectory(const char* path)
     }
   if(SystemTools::FileExists(path))
     {
-    return true;
+    return SystemTools::FileIsDirectory(path);
     }
   kwsys_stl::string dir = path;
   if(dir.size() == 0)
@@ -703,11 +711,57 @@ void SystemTools::ReplaceString(kwsys_stl::string& source,
 #endif
 
 #if defined(_WIN32) && !defined(__CYGWIN__)
+static bool SystemToolsParseRegistryKey(const char* key,
+                                        HKEY& primaryKey,
+                                        kwsys_stl::string& second,
+                                        kwsys_stl::string& valuename)
+{
+  kwsys_stl::string primary = key;
+
+  size_t start = primary.find("\\");
+  if (start == kwsys_stl::string::npos)
+    {
+    return false;
+    }
+
+  size_t valuenamepos = primary.find(";");
+  if (valuenamepos != kwsys_stl::string::npos)
+    {
+    valuename = primary.substr(valuenamepos+1);
+    }
+
+  second = primary.substr(start+1, valuenamepos-start-1);
+  primary = primary.substr(0, start);
+
+  if (primary == "HKEY_CURRENT_USER")
+    {
+    primaryKey = HKEY_CURRENT_USER;
+    }
+  if (primary == "HKEY_CURRENT_CONFIG")
+    {
+    primaryKey = HKEY_CURRENT_CONFIG;
+    }
+  if (primary == "HKEY_CLASSES_ROOT")
+    {
+    primaryKey = HKEY_CLASSES_ROOT;
+    }
+  if (primary == "HKEY_LOCAL_MACHINE")
+    {
+    primaryKey = HKEY_LOCAL_MACHINE;
+    }
+  if (primary == "HKEY_USERS")
+    {
+    primaryKey = HKEY_USERS;
+    }
+
+  return true;
+}
+
 static DWORD SystemToolsMakeRegistryMode(DWORD mode,
                                          SystemTools::KeyWOW64 view)
 {
   // only add the modes when on a system that supports Wow64.
-  static FARPROC wow64p = GetProcAddress(GetModuleHandle("kernel32"),
+  static FARPROC wow64p = GetProcAddress(GetModuleHandleW(L"kernel32"),
                                          "IsWow64Process");
   if(wow64p == NULL)
     {
@@ -726,6 +780,55 @@ static DWORD SystemToolsMakeRegistryMode(DWORD mode,
 }
 #endif
 
+#if defined(_WIN32) && !defined(__CYGWIN__)
+bool
+SystemTools::GetRegistrySubKeys(const char *key,
+                                kwsys_stl::vector<kwsys_stl::string>& subkeys,
+                                KeyWOW64 view)
+{
+  HKEY primaryKey = HKEY_CURRENT_USER;
+  kwsys_stl::string second;
+  kwsys_stl::string valuename;
+  if (!SystemToolsParseRegistryKey(key, primaryKey, second, valuename))
+    {
+    return false;
+    }
+
+  HKEY hKey;
+  if(RegOpenKeyExW(primaryKey,
+                  Encoding::ToWide(second).c_str(),
+                  0,
+                  SystemToolsMakeRegistryMode(KEY_READ, view),
+                  &hKey) != ERROR_SUCCESS)
+    {
+    return false;
+    }
+  else
+    {
+    wchar_t name[1024];
+    DWORD dwNameSize = sizeof(name)/sizeof(name[0]);
+
+    DWORD i = 0;
+    while (RegEnumKeyW(hKey, i, name, dwNameSize) == ERROR_SUCCESS)
+      {
+      subkeys.push_back(Encoding::ToNarrow(name));
+      ++i;
+      }
+
+    RegCloseKey(hKey);
+    }
+
+  return true;
+}
+#else
+bool SystemTools::GetRegistrySubKeys(const char *,
+                                     kwsys_stl::vector<kwsys_stl::string>&,
+                                     KeyWOW64)
+{
+  return false;
+}
+#endif
+
 // Read a registry value.
 // Example :
 //      HKEY_LOCAL_MACHINE\SOFTWARE\Python\PythonCore\2.1\InstallPath
@@ -738,50 +841,17 @@ bool SystemTools::ReadRegistryValue(const char *key, kwsys_stl::string &value,
                                     KeyWOW64 view)
 {
   bool valueset = false;
-  kwsys_stl::string primary = key;
+  HKEY primaryKey = HKEY_CURRENT_USER;
   kwsys_stl::string second;
   kwsys_stl::string valuename;
-
-  size_t start = primary.find("\\");
-  if (start == kwsys_stl::string::npos)
+  if (!SystemToolsParseRegistryKey(key, primaryKey, second, valuename))
     {
     return false;
     }
 
-  size_t valuenamepos = primary.find(";");
-  if (valuenamepos != kwsys_stl::string::npos)
-    {
-    valuename = primary.substr(valuenamepos+1);
-    }
-
-  second = primary.substr(start+1, valuenamepos-start-1);
-  primary = primary.substr(0, start);
-
-  HKEY primaryKey = HKEY_CURRENT_USER;
-  if (primary == "HKEY_CURRENT_USER")
-    {
-    primaryKey = HKEY_CURRENT_USER;
-    }
-  if (primary == "HKEY_CURRENT_CONFIG")
-    {
-    primaryKey = HKEY_CURRENT_CONFIG;
-    }
-  if (primary == "HKEY_CLASSES_ROOT")
-    {
-    primaryKey = HKEY_CLASSES_ROOT;
-    }
-  if (primary == "HKEY_LOCAL_MACHINE")
-    {
-    primaryKey = HKEY_LOCAL_MACHINE;
-    }
-  if (primary == "HKEY_USERS")
-    {
-    primaryKey = HKEY_USERS;
-    }
-
   HKEY hKey;
-  if(RegOpenKeyEx(primaryKey,
-                  second.c_str(),
+  if(RegOpenKeyExW(primaryKey,
+                  Encoding::ToWide(second).c_str(),
                   0,
                   SystemToolsMakeRegistryMode(KEY_READ, view),
                   &hKey) != ERROR_SUCCESS)
@@ -792,9 +862,9 @@ bool SystemTools::ReadRegistryValue(const char *key, kwsys_stl::string &value,
     {
     DWORD dwType, dwSize;
     dwSize = 1023;
-    char data[1024];
-    if(RegQueryValueEx(hKey,
-                       (LPTSTR)valuename.c_str(),
+    wchar_t data[1024];
+    if(RegQueryValueExW(hKey,
+                       Encoding::ToWide(valuename).c_str(),
                        NULL,
                        &dwType,
                        (BYTE *)data,
@@ -802,16 +872,17 @@ bool SystemTools::ReadRegistryValue(const char *key, kwsys_stl::string &value,
       {
       if (dwType == REG_SZ)
         {
-        value = data;
+        value = Encoding::ToNarrow(data);
         valueset = true;
         }
       else if (dwType == REG_EXPAND_SZ)
         {
-        char expanded[1024];
+        wchar_t expanded[1024];
         DWORD dwExpandedSize = sizeof(expanded)/sizeof(expanded[0]);
-        if(ExpandEnvironmentStrings(data, expanded, dwExpandedSize))
+        if(ExpandEnvironmentStringsW(data, expanded,
+            dwExpandedSize))
           {
-          value = expanded;
+          value = Encoding::ToNarrow(expanded);
           valueset = true;
           }
         }
@@ -842,52 +913,19 @@ bool SystemTools::ReadRegistryValue(const char *, kwsys_stl::string &,
 bool SystemTools::WriteRegistryValue(const char *key, const char *value,
                                      KeyWOW64 view)
 {
-  kwsys_stl::string primary = key;
+  HKEY primaryKey = HKEY_CURRENT_USER;
   kwsys_stl::string second;
   kwsys_stl::string valuename;
-
-  size_t start = primary.find("\\");
-  if (start == kwsys_stl::string::npos)
+  if (!SystemToolsParseRegistryKey(key, primaryKey, second, valuename))
     {
     return false;
     }
 
-  size_t valuenamepos = primary.find(";");
-  if (valuenamepos != kwsys_stl::string::npos)
-    {
-    valuename = primary.substr(valuenamepos+1);
-    }
-
-  second = primary.substr(start+1, valuenamepos-start-1);
-  primary = primary.substr(0, start);
-
-  HKEY primaryKey = HKEY_CURRENT_USER;
-  if (primary == "HKEY_CURRENT_USER")
-    {
-    primaryKey = HKEY_CURRENT_USER;
-    }
-  if (primary == "HKEY_CURRENT_CONFIG")
-    {
-    primaryKey = HKEY_CURRENT_CONFIG;
-    }
-  if (primary == "HKEY_CLASSES_ROOT")
-    {
-    primaryKey = HKEY_CLASSES_ROOT;
-    }
-  if (primary == "HKEY_LOCAL_MACHINE")
-    {
-    primaryKey = HKEY_LOCAL_MACHINE;
-    }
-  if (primary == "HKEY_USERS")
-    {
-    primaryKey = HKEY_USERS;
-    }
-
   HKEY hKey;
   DWORD dwDummy;
-  char lpClass[] = "";
-  if(RegCreateKeyEx(primaryKey,
-                    second.c_str(),
+  wchar_t lpClass[] = L"";
+  if(RegCreateKeyExW(primaryKey,
+                    Encoding::ToWide(second).c_str(),
                     0,
                     lpClass,
                     REG_OPTION_NON_VOLATILE,
@@ -899,12 +937,13 @@ bool SystemTools::WriteRegistryValue(const char *key, const char *value,
     return false;
     }
 
-  if(RegSetValueEx(hKey,
-                   (LPTSTR)valuename.c_str(),
+  std::wstring wvalue = Encoding::ToWide(value);
+  if(RegSetValueExW(hKey,
+                   Encoding::ToWide(valuename).c_str(),
                    0,
                    REG_SZ,
-                   (CONST BYTE *)value,
-                   (DWORD)(strlen(value) + 1)) == ERROR_SUCCESS)
+                   (CONST BYTE *)wvalue.c_str(),
+                   (DWORD)(sizeof(wchar_t) * (wvalue.size() + 1))) == ERROR_SUCCESS)
     {
     return true;
     }
@@ -927,50 +966,17 @@ bool SystemTools::WriteRegistryValue(const char *, const char *, KeyWOW64)
 #if defined(_WIN32) && !defined(__CYGWIN__)
 bool SystemTools::DeleteRegistryValue(const char *key, KeyWOW64 view)
 {
-  kwsys_stl::string primary = key;
+  HKEY primaryKey = HKEY_CURRENT_USER;
   kwsys_stl::string second;
   kwsys_stl::string valuename;
-
-  size_t start = primary.find("\\");
-  if (start == kwsys_stl::string::npos)
+  if (!SystemToolsParseRegistryKey(key, primaryKey, second, valuename))
     {
     return false;
     }
 
-  size_t valuenamepos = primary.find(";");
-  if (valuenamepos != kwsys_stl::string::npos)
-    {
-    valuename = primary.substr(valuenamepos+1);
-    }
-
-  second = primary.substr(start+1, valuenamepos-start-1);
-  primary = primary.substr(0, start);
-
-  HKEY primaryKey = HKEY_CURRENT_USER;
-  if (primary == "HKEY_CURRENT_USER")
-    {
-    primaryKey = HKEY_CURRENT_USER;
-    }
-  if (primary == "HKEY_CURRENT_CONFIG")
-    {
-    primaryKey = HKEY_CURRENT_CONFIG;
-    }
-  if (primary == "HKEY_CLASSES_ROOT")
-    {
-    primaryKey = HKEY_CLASSES_ROOT;
-    }
-  if (primary == "HKEY_LOCAL_MACHINE")
-    {
-    primaryKey = HKEY_LOCAL_MACHINE;
-    }
-  if (primary == "HKEY_USERS")
-    {
-    primaryKey = HKEY_USERS;
-    }
-
   HKEY hKey;
-  if(RegOpenKeyEx(primaryKey,
-                  second.c_str(),
+  if(RegOpenKeyExW(primaryKey,
+                  Encoding::ToWide(second).c_str(),
                   0,
                   SystemToolsMakeRegistryMode(KEY_WRITE, view),
                   &hKey) != ERROR_SUCCESS)
@@ -1000,7 +1006,7 @@ bool SystemTools::SameFile(const char* file1, const char* file2)
 #ifdef _WIN32
   HANDLE hFile1, hFile2;
 
-  hFile1 = CreateFile( file1,
+  hFile1 = CreateFileW( Encoding::ToWide(file1).c_str(),
                       GENERIC_READ,
                       FILE_SHARE_READ ,
                       NULL,
@@ -1008,7 +1014,7 @@ bool SystemTools::SameFile(const char* file1, const char* file2)
                       FILE_FLAG_BACKUP_SEMANTICS,
                       NULL
     );
-  hFile2 = CreateFile( file2,
+  hFile2 = CreateFileW( Encoding::ToWide(file2).c_str(),
                       GENERIC_READ,
                       FILE_SHARE_READ,
                       NULL,
@@ -1057,15 +1063,6 @@ bool SystemTools::SameFile(const char* file1, const char* file2)
 }
 
 //----------------------------------------------------------------------------
-#if defined(_WIN32) || defined(__CYGWIN__)
-static bool WindowsFileExists(const char* filename)
-{
-  WIN32_FILE_ATTRIBUTE_DATA fd;
-  return GetFileAttributesExA(filename, GetFileExInfoStandard, &fd) != 0;
-}
-#endif
-
-//----------------------------------------------------------------------------
 bool SystemTools::FileExists(const char* filename)
 {
   if(!(filename && *filename))
@@ -1077,11 +1074,12 @@ bool SystemTools::FileExists(const char* filename)
   char winpath[MAX_PATH];
   if(SystemTools::PathCygwinToWin32(filename, winpath))
     {
-    return WindowsFileExists(winpath);
+    return (GetFileAttributesA(winpath) != INVALID_FILE_ATTRIBUTES);
     }
   return access(filename, R_OK) == 0;
 #elif defined(_WIN32)
-  return WindowsFileExists(filename);
+  return (GetFileAttributesW(Encoding::ToWide(filename).c_str())
+          != INVALID_FILE_ATTRIBUTES);
 #else
   return access(filename, R_OK) == 0;
 #endif
@@ -1124,7 +1122,7 @@ bool SystemTools::Touch(const char* filename, bool create)
 {
   if(create && !SystemTools::FileExists(filename))
     {
-    FILE* file = fopen(filename, "a+b");
+    FILE* file = Fopen(filename, "a+b");
     if(file)
       {
       fclose(file);
@@ -1132,22 +1130,59 @@ bool SystemTools::Touch(const char* filename, bool create)
       }
     return false;
     }
-#ifdef _MSC_VER
-#define utime _utime
-#define utimbuf _utimbuf
-#endif
-  struct stat fromStat;
-  if(stat(filename, &fromStat) < 0)
+#if defined(_WIN32) && !defined(__CYGWIN__)
+  HANDLE h = CreateFileW(Encoding::ToWide(filename).c_str(),
+                        FILE_WRITE_ATTRIBUTES,
+                        FILE_SHARE_WRITE, 0, OPEN_EXISTING,
+                        FILE_FLAG_BACKUP_SEMANTICS, 0);
+  if(!h)
     {
     return false;
     }
-  struct utimbuf buf;
-  buf.actime = fromStat.st_atime;
-  buf.modtime = static_cast<time_t>(SystemTools::GetTime());
-  if(utime(filename, &buf) < 0)
+  FILETIME mtime;
+  GetSystemTimeAsFileTime(&mtime);
+  if(!SetFileTime(h, 0, 0, &mtime))
+    {
+    CloseHandle(h);
+    return false;
+    }
+  CloseHandle(h);
+#elif KWSYS_CXX_HAS_UTIMENSAT
+  struct timespec times[2] = {{0,UTIME_OMIT},{0,UTIME_NOW}};
+  if(utimensat(AT_FDCWD, filename, times, 0) < 0)
+    {
+    return false;
+    }
+#else
+  struct stat st;
+  if(stat(filename, &st) < 0)
     {
     return false;
     }
+  struct timeval mtime;
+  gettimeofday(&mtime, 0);
+# if KWSYS_CXX_HAS_UTIMES
+  struct timeval times[2] =
+    {
+#  if KWSYS_STAT_HAS_ST_MTIM
+      {st.st_atim.tv_sec, st.st_atim.tv_nsec/1000}, /* tv_sec, tv_usec */
+#  else
+      {st.st_atime, 0},
+#  endif
+      mtime
+    };
+  if(utimes(filename, times) < 0)
+    {
+    return false;
+    }
+# else
+  struct utimbuf times = {st.st_atime, mtime.tv_sec};
+  if(utime(filename, &times) < 0)
+    {
+    return false;
+    }
+# endif
+#endif
   return true;
 }
 
@@ -1201,11 +1236,13 @@ bool SystemTools::FileTimeCompare(const char* f1, const char* f2,
   // Windows version.  Get the modification time from extended file attributes.
   WIN32_FILE_ATTRIBUTE_DATA f1d;
   WIN32_FILE_ATTRIBUTE_DATA f2d;
-  if(!GetFileAttributesEx(f1, GetFileExInfoStandard, &f1d))
+  if(!GetFileAttributesExW(Encoding::ToWide(f1).c_str(),
+                           GetFileExInfoStandard, &f1d))
     {
     return false;
     }
-  if(!GetFileAttributesEx(f2, GetFileExInfoStandard, &f2d))
+  if(!GetFileAttributesExW(Encoding::ToWide(f2).c_str(),
+                           GetFileExInfoStandard, &f2d))
     {
     return false;
     }
@@ -1913,6 +1950,39 @@ bool SystemTools::CopyFileIfDifferent(const char* source,
 bool SystemTools::FilesDiffer(const char* source,
                               const char* destination)
 {
+
+#if defined(_WIN32)
+  WIN32_FILE_ATTRIBUTE_DATA statSource;
+  if (GetFileAttributesExW(Encoding::ToWide(source).c_str(),
+                           GetFileExInfoStandard,
+                           &statSource) == 0)
+    {
+    return true;
+    }
+
+  WIN32_FILE_ATTRIBUTE_DATA statDestination;
+  if (GetFileAttributesExW(Encoding::ToWide(destination).c_str(),
+                           GetFileExInfoStandard,
+                           &statDestination) == 0)
+    {
+    return true;
+    }
+
+  if(statSource.nFileSizeHigh != statDestination.nFileSizeHigh ||
+     statSource.nFileSizeLow != statDestination.nFileSizeLow)
+    {
+    return true;
+    }
+
+  if(statSource.nFileSizeHigh == 0 && statSource.nFileSizeLow == 0)
+    {
+    return false;
+    }
+  off_t nleft = ((__int64)statSource.nFileSizeHigh << 32) +
+                statSource.nFileSizeLow;
+
+#else
+
   struct stat statSource;
   if (stat(source, &statSource) != 0)
     {
@@ -1934,15 +2004,19 @@ bool SystemTools::FilesDiffer(const char* source,
     {
     return false;
     }
+  off_t nleft = statSource.st_size;
+#endif
 
-#if defined(_WIN32) || defined(__CYGWIN__)
-  kwsys_ios::ifstream finSource(source, (kwsys_ios::ios::binary |
-                                         kwsys_ios::ios::in));
-  kwsys_ios::ifstream finDestination(destination, (kwsys_ios::ios::binary |
-                                                   kwsys_ios::ios::in));
+#if defined(_WIN32)
+  kwsys::ifstream finSource(source,
+                            (kwsys_ios::ios::binary |
+                             kwsys_ios::ios::in));
+  kwsys::ifstream finDestination(destination,
+                                 (kwsys_ios::ios::binary |
+                                  kwsys_ios::ios::in));
 #else
-  kwsys_ios::ifstream finSource(source);
-  kwsys_ios::ifstream finDestination(destination);
+  kwsys::ifstream finSource(source);
+  kwsys::ifstream finDestination(destination);
 #endif
   if(!finSource || !finDestination)
     {
@@ -1952,7 +2026,6 @@ bool SystemTools::FilesDiffer(const char* source,
   // Compare the files a block at a time.
   char source_buf[KWSYS_ST_BUFFER];
   char dest_buf[KWSYS_ST_BUFFER];
-  off_t nleft = statSource.st_size;
   while(nleft > 0)
     {
     // Read a block from each file.
@@ -2025,10 +2098,10 @@ bool SystemTools::CopyFileAlways(const char* source, const char* destination)
   // Open files
 
 #if defined(_WIN32) || defined(__CYGWIN__)
-  kwsys_ios::ifstream fin(source,
-                    kwsys_ios::ios::binary | kwsys_ios::ios::in);
+  kwsys::ifstream fin(source,
+                kwsys_ios::ios::binary | kwsys_ios::ios::in);
 #else
-  kwsys_ios::ifstream fin(source);
+  kwsys::ifstream fin(source);
 #endif
   if(!fin)
     {
@@ -2325,7 +2398,11 @@ bool SystemTools::RemoveFile(const char* source)
   /* Win32 unlink is stupid --- it fails if the file is read-only  */
   SystemTools::SetPermissions(source, S_IWRITE);
 #endif
+#ifdef _WIN32
+  bool res = _wunlink(Encoding::ToWide(source).c_str()) != 0 ? false : true;
+#else
   bool res = unlink(source) != 0 ? false : true;
+#endif
 #ifdef _WIN32
   if ( !res )
     {
@@ -2749,24 +2826,36 @@ bool SystemTools::FileIsDirectory(const char* name)
     return false;
     }
 
-  // Remove any trailing slash from the name.
-  char buffer[KWSYS_SYSTEMTOOLS_MAXPATH];
+  // Remove any trailing slash from the name except in a root component.
+  char local_buffer[KWSYS_SYSTEMTOOLS_MAXPATH];
+  std::string string_buffer;
   size_t last = length-1;
   if(last > 0 && (name[last] == '/' || name[last] == '\\')
-    && strcmp(name, "/") !=0)
+    && strcmp(name, "/") !=0 && name[last-1] != ':')
     {
-    memcpy(buffer, name, last);
-    buffer[last] = 0;
-    name = buffer;
+    if(last < sizeof(local_buffer))
+      {
+      memcpy(local_buffer, name, last);
+      local_buffer[last] = 0;
+      name = local_buffer;
+      }
+    else
+      {
+      string_buffer.append(name, last);
+      name = string_buffer.c_str();
+      }
     }
 
   // Now check the file node type.
+#if defined( _WIN32 )
+  DWORD attr = GetFileAttributesW(Encoding::ToWide(name).c_str());
+  if (attr != INVALID_FILE_ATTRIBUTES)
+    {
+    return (attr & FILE_ATTRIBUTE_DIRECTORY) != 0;
+#else
   struct stat fs;
   if(stat(name, &fs) == 0)
     {
-#if defined( _WIN32 ) && !defined(__CYGWIN__)
-    return ((fs.st_mode & _S_IFDIR) != 0);
-#else
     return S_ISDIR(fs.st_mode);
 #endif
     }
@@ -3048,7 +3137,7 @@ SystemToolsAppendComponents(
     {
     if(*i == "..")
       {
-      if(out_components.begin() != out_components.end())
+      if(out_components.size() > 1)
         {
         out_components.erase(out_components.end()-1, out_components.end());
         }
@@ -3089,7 +3178,7 @@ kwsys_stl::string SystemTools::CollapseFullPath(const char* in_path,
         }
       else
         {
-        // ??
+        base_components.push_back("");
         }
       }
 
@@ -3251,11 +3340,12 @@ static int GetCasePathName(const kwsys_stl::string & pathIn,
     kwsys_stl::string test_str = casePath;
     test_str += path_components[idx];
 
-    WIN32_FIND_DATA findData;
-    HANDLE hFind = ::FindFirstFile(test_str.c_str(), &findData);
+    WIN32_FIND_DATAW findData;
+    HANDLE hFind = ::FindFirstFileW(Encoding::ToWide(test_str).c_str(),
+      &findData);
     if (INVALID_HANDLE_VALUE != hFind)
       {
-      casePath += findData.cFileName;
+      casePath += Encoding::ToNarrow(findData.cFileName);
       ::FindClose(hFind);
       }
     else
@@ -3705,8 +3795,7 @@ bool SystemTools::FileHasSignature(const char *filename,
     return false;
     }
 
-  FILE *fp;
-  fp = fopen(filename, "rb");
+  FILE *fp = Fopen(filename, "rb");
   if (!fp)
     {
     return false;
@@ -3739,8 +3828,7 @@ SystemTools::DetectFileType(const char *filename,
     return SystemTools::FileTypeUnknown;
     }
 
-  FILE *fp;
-  fp = fopen(filename, "rb");
+  FILE *fp = Fopen(filename, "rb");
   if (!fp)
     {
     return SystemTools::FileTypeUnknown;
@@ -3930,9 +4018,8 @@ bool SystemTools::GetShortPath(const char* path, kwsys_stl::string& shortPath)
 {
 #if defined(WIN32) && !defined(__CYGWIN__)
   const int size = int(strlen(path)) +1; // size of return
-  char *buffer = new char[size];  // create a buffer
   char *tempPath = new char[size];  // create a buffer
-  int ret;
+  DWORD ret;
 
   // if the path passed in has quotes around it, first remove the quotes
   if (path[0] == '"' && path[strlen(path)-1] == '"')
@@ -3945,19 +4032,20 @@ bool SystemTools::GetShortPath(const char* path, kwsys_stl::string& shortPath)
     strcpy(tempPath,path);
     }
 
+  kwsys_stl::wstring wtempPath = Encoding::ToWide(tempPath);
+  kwsys_stl::vector<wchar_t> buffer(wtempPath.size()+1);
   buffer[0] = 0;
-  ret = GetShortPathName(tempPath, buffer, size);
+  ret = GetShortPathNameW(Encoding::ToWide(tempPath).c_str(),
+    &buffer[0], static_cast<DWORD>(wtempPath.size()));
 
-  if(buffer[0] == 0 || ret > size)
+  if(buffer[0] == 0 || ret > wtempPath.size())
     {
-    delete [] buffer;
     delete [] tempPath;
     return false;
     }
   else
     {
-    shortPath = buffer;
-    delete [] buffer;
+    shortPath = Encoding::ToNarrow(&buffer[0]);
     delete [] tempPath;
     return true;
     }
@@ -4010,7 +4098,7 @@ void SystemTools::SplitProgramFromArgs(const char* path,
       args = dir.substr(spacePos, dir.size()-spacePos);
       return;
       }
-    // Now try and find the the program in the path
+    // Now try and find the program in the path
     findProg = SystemTools::FindProgram(tryProg.c_str(), e);
     if(findProg.size())
       {
@@ -4184,12 +4272,45 @@ bool SystemTools::GetPermissions(const char* file, mode_t& mode)
     return false;
     }
 
+#if defined(_WIN32)
+  DWORD attr = GetFileAttributesW(Encoding::ToWide(file).c_str());
+  if(attr == INVALID_FILE_ATTRIBUTES)
+    {
+    return false;
+    }
+  if((attr & FILE_ATTRIBUTE_READONLY) != 0)
+    {
+    mode = (_S_IREAD  | (_S_IREAD  >> 3) | (_S_IREAD  >> 6));
+    }
+  else
+    {
+    mode = (_S_IWRITE | (_S_IWRITE >> 3) | (_S_IWRITE >> 6)) |
+           (_S_IREAD  | (_S_IREAD  >> 3) | (_S_IREAD  >> 6));
+    }
+  if((attr & FILE_ATTRIBUTE_DIRECTORY) != 0)
+    {
+    mode |= S_IFDIR | (_S_IEXEC  | (_S_IEXEC  >> 3) | (_S_IEXEC  >> 6));
+    }
+  else
+    {
+    mode |= S_IFREG;
+    }
+  const char* ext = strrchr(file, '.');
+  if(ext && (Strucmp(ext, ".exe") == 0 ||
+    Strucmp(ext, ".com") == 0 ||
+    Strucmp(ext, ".cmd") == 0 ||
+    Strucmp(ext, ".bat") == 0))
+    {
+    mode |= (_S_IEXEC  | (_S_IEXEC  >> 3) | (_S_IEXEC  >> 6));
+    }
+#else
   struct stat st;
   if ( stat(file, &st) < 0 )
     {
     return false;
     }
   mode = st.st_mode;
+#endif
   return true;
 }
 
@@ -4203,7 +4324,11 @@ bool SystemTools::SetPermissions(const char* file, mode_t mode)
     {
     return false;
     }
+#ifdef _WIN32
+  if ( _wchmod(Encoding::ToWide(file).c_str(), mode) < 0 )
+#else
   if ( chmod(file, mode) < 0 )
+#endif
     {
     return false;
     }
@@ -4224,17 +4349,13 @@ bool SystemTools::IsSubDirectory(const char* cSubdir, const char* cDir)
     }
   kwsys_stl::string subdir = cSubdir;
   kwsys_stl::string dir = cDir;
+  SystemTools::ConvertToUnixSlashes(subdir);
   SystemTools::ConvertToUnixSlashes(dir);
-  kwsys_stl::string path = subdir;
-  do
+  if(subdir.size() > dir.size() && subdir[dir.size()] == '/')
     {
-    path = SystemTools::GetParentDirectory(path.c_str());
-    if(SystemTools::ComparePath(dir.c_str(), path.c_str()))
-      {
-      return true;
-      }
+    std::string s = subdir.substr(0, dir.size());
+    return SystemTools::ComparePath(s.c_str(), dir.c_str());
     }
-  while ( path.size() > dir.size() );
   return false;
 }
 
@@ -4312,7 +4433,9 @@ void SystemTools::ConvertWindowsCommandLineToUnixArguments(
 
   (*argv)[0] = new char [1024];
 #ifdef _WIN32
-  ::GetModuleFileName(0, (*argv)[0], 1024);
+  wchar_t tmp[1024];
+  ::GetModuleFileNameW(0, tmp, 1024);
+  strcpy((*argv)[0], Encoding::ToNarrow(tmp).c_str());
 #else
   (*argv)[0][0] = '\0';
 #endif
@@ -4372,14 +4495,14 @@ kwsys_stl::string SystemTools::GetOperatingSystemNameAndVersion()
 #ifdef _WIN32
   char buffer[256];
 
-  OSVERSIONINFOEX osvi;
+  OSVERSIONINFOEXA osvi;
   BOOL bOsVersionInfoEx;
 
   // Try calling GetVersionEx using the OSVERSIONINFOEX structure.
   // If that fails, try using the OSVERSIONINFO structure.
 
-  ZeroMemory(&osvi, sizeof(OSVERSIONINFOEX));
-  osvi.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEX);
+  ZeroMemory(&osvi, sizeof(OSVERSIONINFOEXA));
+  osvi.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEXA);
 
   bOsVersionInfoEx = GetVersionEx((OSVERSIONINFO *)&osvi);
   if (!bOsVersionInfoEx)
@@ -4522,21 +4645,21 @@ kwsys_stl::string SystemTools::GetOperatingSystemNameAndVersion()
         {
         HKEY hKey;
         #define BUFSIZE 80
-        char szProductType[BUFSIZE];
+        wchar_t szProductType[BUFSIZE];
         DWORD dwBufLen=BUFSIZE;
         LONG lRet;
 
-        lRet = RegOpenKeyEx(
+        lRet = RegOpenKeyExW(
           HKEY_LOCAL_MACHINE,
-          "SYSTEM\\CurrentControlSet\\Control\\ProductOptions",
+          L"SYSTEM\\CurrentControlSet\\Control\\ProductOptions",
           0, KEY_QUERY_VALUE, &hKey);
         if (lRet != ERROR_SUCCESS)
           {
           return 0;
           }
 
-        lRet = RegQueryValueEx(hKey, "ProductType", NULL, NULL,
-                               (LPBYTE) szProductType, &dwBufLen);
+        lRet = RegQueryValueExW(hKey, L"ProductType", NULL, NULL,
+                                (LPBYTE) szProductType, &dwBufLen);
 
         if ((lRet != ERROR_SUCCESS) || (dwBufLen > BUFSIZE))
           {
@@ -4545,15 +4668,15 @@ kwsys_stl::string SystemTools::GetOperatingSystemNameAndVersion()
 
         RegCloseKey(hKey);
 
-        if (lstrcmpi("WINNT", szProductType) == 0)
+        if (lstrcmpiW(L"WINNT", szProductType) == 0)
           {
           res += " Workstation";
           }
-        if (lstrcmpi("LANMANNT", szProductType) == 0)
+        if (lstrcmpiW(L"LANMANNT", szProductType) == 0)
           {
           res += " Server";
           }
-        if (lstrcmpi("SERVERNT", szProductType) == 0)
+        if (lstrcmpiW(L"SERVERNT", szProductType) == 0)
           {
           res += " Advanced Server";
           }
@@ -4569,16 +4692,16 @@ kwsys_stl::string SystemTools::GetOperatingSystemNameAndVersion()
       // Display service pack (if any) and build number.
 
       if (osvi.dwMajorVersion == 4 &&
-          lstrcmpi(osvi.szCSDVersion, "Service Pack 6") == 0)
+          lstrcmpiA(osvi.szCSDVersion, "Service Pack 6") == 0)
         {
         HKEY hKey;
         LONG lRet;
 
         // Test for SP6 versus SP6a.
 
-        lRet = RegOpenKeyEx(
+        lRet = RegOpenKeyExW(
           HKEY_LOCAL_MACHINE,
-          "SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Hotfix\\Q246009",
+          L"SOFTWARE\\Microsoft\\Windows NT\\CurrentVersion\\Hotfix\\Q246009",
           0, KEY_QUERY_VALUE, &hKey);
 
         if (lRet == ERROR_SUCCESS)
@@ -4836,7 +4959,8 @@ static int SystemToolsDebugReport(int, char* message, int*)
 
 void SystemTools::EnableMSVCDebugHook()
 {
-  if (getenv("DART_TEST_FROM_DART"))
+  if (getenv("DART_TEST_FROM_DART") ||
+      getenv("DASHBOARD_TEST_FROM_CTEST"))
     {
     _CrtSetReportHook(SystemToolsDebugReport);
     }
diff --git a/Utilities/KWSys/vtksys/SystemTools.hxx.in b/Utilities/KWSys/vtksys/SystemTools.hxx.in
index 9c56e96..9457a4e 100644
--- a/Utilities/KWSys/vtksys/SystemTools.hxx.in
+++ b/Utilities/KWSys/vtksys/SystemTools.hxx.in
@@ -24,6 +24,8 @@
 
 // Required for va_list
 #include <stdarg.h>
+// Required for FILE*
+#include <stdio.h>
 #if @KWSYS_NAMESPACE at _STL_HAVE_STD && !defined(va_list)
 // Some compilers move va_list into the std namespace and there is no way to
 // tell that this has been done. Playing with things being included before or
@@ -42,10 +44,6 @@ namespace @KWSYS_NAMESPACE@
 }
 #endif // va_list
 
-#if defined( _MSC_VER )
-typedef unsigned short mode_t;
-#endif
-
 /* Define these macros temporarily to keep the code readable.  */
 #if !defined (KWSYS_NAMESPACE) && !@KWSYS_NAMESPACE at _NAME_IS_KWSYS
 # define kwsys_stl @KWSYS_NAMESPACE at _stl
@@ -497,6 +495,11 @@ public:
    */
 
   /**
+   * Open a file considering unicode.
+   */
+  static FILE* Fopen(const char* file, const char* mode);
+
+  /**
    * Make a new directory if it is not there.  This function
    * can make a full path even if none of the directories existed
    * prior to calling this function.  
@@ -684,6 +687,10 @@ public:
    */
   static long int CreationTime(const char* filename);
 
+  #if defined( _MSC_VER )
+  typedef unsigned short mode_t;
+  #endif
+
   /**
    * Get and set permissions of the file.
    */
@@ -716,6 +723,13 @@ public:
   enum KeyWOW64 { KeyWOW64_Default, KeyWOW64_32, KeyWOW64_64 };
 
   /**
+   * Get a list of subkeys.
+   */
+  static bool GetRegistrySubKeys(const char *key,
+                                 kwsys_stl::vector<kwsys_stl::string>& subkeys,
+                                 KeyWOW64 view = KeyWOW64_Default);
+
+  /**
    * Read a registry value
    */
   static bool ReadRegistryValue(const char *key, kwsys_stl::string &value,
@@ -766,7 +780,7 @@ public:
   static kwsys_stl::string GetCurrentWorkingDirectory(bool collapse =true);
 
   /**
-   * Change directory the the directory specified
+   * Change directory to the directory specified
    */
   static int ChangeDirectory(const char* dir);
 
diff --git a/Utilities/KWSys/vtksys/auto_ptr.hxx.in b/Utilities/KWSys/vtksys/auto_ptr.hxx.in
index 857b1db..ad9654c 100644
--- a/Utilities/KWSys/vtksys/auto_ptr.hxx.in
+++ b/Utilities/KWSys/vtksys/auto_ptr.hxx.in
@@ -31,6 +31,17 @@
 # define @KWSYS_NAMESPACE at _AUTO_PTR_CAST(a) a
 #endif
 
+// In C++11, clang will warn about using dynamic exception specifications
+// as they are deprecated.  But as this class is trying to faithfully
+// mimic std::auto_ptr, we want to keep the 'throw()' decorations below.
+// So we suppress the warning.
+#if defined(__clang__) && defined(__has_warning)
+# if __has_warning("-Wdeprecated")
+#  pragma clang diagnostic push
+#  pragma clang diagnostic ignored "-Wdeprecated"
+# endif
+#endif
+
 namespace @KWSYS_NAMESPACE@
 {
 
@@ -198,4 +209,11 @@ public:
 
 } // namespace @KWSYS_NAMESPACE@
 
+// Undo warning suppression.
+#if defined(__clang__) && defined(__has_warning)
+# if __has_warning("-Wdeprecated")
+#  pragma clang diagnostic pop
+# endif
+#endif
+
 #endif
diff --git a/Utilities/KWSys/vtksys/hashtable.hxx.in b/Utilities/KWSys/vtksys/hashtable.hxx.in
index db52fc8..62aa3f3 100644
--- a/Utilities/KWSys/vtksys/hashtable.hxx.in
+++ b/Utilities/KWSys/vtksys/hashtable.hxx.in
@@ -62,6 +62,17 @@
 # pragma set woff 3970 /* pointer to int conversion */ 3321 3968
 #endif
 
+// In C++11, clang will warn about using dynamic exception specifications
+// as they are deprecated.  But as this class is trying to faithfully
+// mimic unordered_set and unordered_map, we want to keep the 'throw()'
+// decorations below.  So we suppress the warning.
+#if defined(__clang__) && defined(__has_warning)
+# if __has_warning("-Wdeprecated")
+#  pragma clang diagnostic push
+#  pragma clang diagnostic ignored "-Wdeprecated"
+# endif
+#endif
+
 #if @KWSYS_NAMESPACE at _STL_HAS_ALLOCATOR_TEMPLATE
 # define @KWSYS_NAMESPACE at _HASH_DEFAULT_ALLOCATOR(T) @KWSYS_NAMESPACE at _stl::allocator< T >
 #elif @KWSYS_NAMESPACE at _STL_HAS_ALLOCATOR_NONTEMPLATE
@@ -1268,6 +1279,13 @@ using @KWSYS_NAMESPACE@::operator==;
 using @KWSYS_NAMESPACE@::operator!=;
 #endif
 
+// Undo warning suppression.
+#if defined(__clang__) && defined(__has_warning)
+# if __has_warning("-Wdeprecated")
+#  pragma clang diagnostic pop
+# endif
+#endif
+
 #if defined(_MSC_VER)
 # pragma warning (pop)
 #endif
diff --git a/Utilities/KWSys/vtksys/kwsysPlatformTests.cmake b/Utilities/KWSys/vtksys/kwsysPlatformTests.cmake
index d042450..f9ee254 100644
--- a/Utilities/KWSys/vtksys/kwsysPlatformTests.cmake
+++ b/Utilities/KWSys/vtksys/kwsysPlatformTests.cmake
@@ -19,6 +19,7 @@ MACRO(KWSYS_PLATFORM_TEST lang var description invert)
       ${CMAKE_CURRENT_BINARY_DIR}
       ${CMAKE_CURRENT_SOURCE_DIR}/${KWSYS_PLATFORM_TEST_FILE_${lang}}
       COMPILE_DEFINITIONS -DTEST_${var} ${KWSYS_PLATFORM_TEST_DEFINES} ${KWSYS_PLATFORM_TEST_EXTRA_FLAGS}
+      CMAKE_FLAGS "-DLINK_LIBRARIES:STRING=${KWSYS_PLATFORM_TEST_LINK_LIBRARIES}"
       OUTPUT_VARIABLE OUTPUT)
     IF(${var}_COMPILED)
       FILE(APPEND
@@ -150,9 +151,11 @@ ENDMACRO(KWSYS_PLATFORM_C_TEST_RUN)
 MACRO(KWSYS_PLATFORM_CXX_TEST var description invert)
   SET(KWSYS_PLATFORM_TEST_DEFINES ${KWSYS_PLATFORM_CXX_TEST_DEFINES})
   SET(KWSYS_PLATFORM_TEST_EXTRA_FLAGS ${KWSYS_PLATFORM_CXX_TEST_EXTRA_FLAGS})
+  SET(KWSYS_PLATFORM_TEST_LINK_LIBRARIES ${KWSYS_PLATFORM_CXX_TEST_LINK_LIBRARIES})
   KWSYS_PLATFORM_TEST(CXX "${var}" "${description}" "${invert}")
   SET(KWSYS_PLATFORM_TEST_DEFINES)
   SET(KWSYS_PLATFORM_TEST_EXTRA_FLAGS)
+  SET(KWSYS_PLATFORM_TEST_LINK_LIBRARIES)
 ENDMACRO(KWSYS_PLATFORM_CXX_TEST)
 
 MACRO(KWSYS_PLATFORM_CXX_TEST_RUN var description invert)
diff --git a/Utilities/KWSys/vtksys/kwsysPlatformTestsCXX.cxx b/Utilities/KWSys/vtksys/kwsysPlatformTestsCXX.cxx
index 48976c4..ca96b0e 100644
--- a/Utilities/KWSys/vtksys/kwsysPlatformTestsCXX.cxx
+++ b/Utilities/KWSys/vtksys/kwsysPlatformTestsCXX.cxx
@@ -399,7 +399,7 @@ int main()
 #define _FILE_OFFSET_BITS 64
 #include <sys/types.h>
 #include <sys/stat.h>
-#include <assert.h>
+#include <cassert>
 #if KWSYS_CXX_HAS_CSTDIO
 # include <cstdio>
 #endif
@@ -494,6 +494,73 @@ int main()
 }
 #endif
 
+#ifdef TEST_KWSYS_CXX_HAS_UTIMES
+#include <sys/time.h>
+int main()
+{
+  struct timeval* current_time = 0;
+  return utimes("/example", current_time);
+}
+#endif
+
+#ifdef TEST_KWSYS_CXX_HAS_UTIMENSAT
+#include <fcntl.h>
+#include <sys/stat.h>
+int main()
+{
+  struct timespec times[2] = {{0,UTIME_OMIT},{0,UTIME_NOW}};
+  return utimensat(AT_FDCWD, "/example", times, AT_SYMLINK_NOFOLLOW);
+}
+#endif
+
+#ifdef TEST_KWSYS_CXX_HAS_BACKTRACE
+#if defined(__PATHSCALE__) || defined(__PATHCC__) \
+  || (defined(__LSB_VERSION__) && (__LSB_VERSION__ < 41))
+backtrace doesnt work with this compiler or os
+#endif
+#if (defined(__GNUC__) || defined(__PGI)) && !defined(_GNU_SOURCE)
+# define _GNU_SOURCE
+#endif
+#include <execinfo.h>
+int main()
+{
+  void *stackSymbols[256];
+  backtrace(stackSymbols,256);
+  backtrace_symbols(&stackSymbols[0],1);
+  return 0;
+}
+#endif
+
+#ifdef TEST_KWSYS_CXX_HAS_DLADDR
+#if (defined(__GNUC__) || defined(__PGI)) && !defined(_GNU_SOURCE)
+# define _GNU_SOURCE
+#endif
+#include <dlfcn.h>
+int main()
+{
+  Dl_info info;
+  int ierr=dladdr((void*)main,&info);
+  return 0;
+}
+#endif
+
+#ifdef TEST_KWSYS_CXX_HAS_CXXABI
+#if (defined(__GNUC__) || defined(__PGI)) && !defined(_GNU_SOURCE)
+# define _GNU_SOURCE
+#endif
+#include <cxxabi.h>
+int main()
+{
+  int status = 0;
+  size_t bufferLen = 512;
+  char buffer[512] = {'\0'};
+  const char *function="_ZN5kwsys17SystemInformation15GetProgramStackEii";
+  char *demangledFunction =
+    abi::__cxa_demangle(function, buffer, &bufferLen, &status);
+  return status;
+}
+#endif
+
 #ifdef TEST_KWSYS_CXX_TYPE_INFO
 /* Collect fundamental type information and save it to a CMake script.  */
 
@@ -607,3 +674,9 @@ int main()
   return a;
 }
 #endif
+
+#ifdef TEST_KWSYS_STL_HAS_WSTRING
+#include <string>
+void f(std ::wstring*) {}
+int main() { return 0; }
+#endif
diff --git a/Utilities/KWSys/vtksys/testDynamicLoader.cxx b/Utilities/KWSys/vtksys/testDynamicLoader.cxx
index 61c1572..1bff707 100644
--- a/Utilities/KWSys/vtksys/testDynamicLoader.cxx
+++ b/Utilities/KWSys/vtksys/testDynamicLoader.cxx
@@ -15,14 +15,10 @@
 #include KWSYS_HEADER(ios/iostream)
 #include KWSYS_HEADER(stl/string)
 
-#if defined(__BEOS__)
+#if defined(__BEOS__) || defined(__HAIKU__)
 #include <be/kernel/OS.h>  /* disable_debugger() API. */
 #endif
 
-#if defined(__HAIKU__)
-#include <os/kernel/OS.h>  /* disable_debugger() API. */
-#endif
-
 // Work-around CMake dependency scanning limitation.  This must
 // duplicate the above list of headers.
 #if 0
@@ -109,9 +105,9 @@ int testDynamicLoader(int argc, char *argv[])
 
 // dlopen() on Syllable before 11/22/2007 doesn't return 0 on error
 #ifndef __SYLLABLE__
-  // Make sure that inexistant lib is giving correct result
+  // Make sure that inexistent lib is giving correct result
   res += TestDynamicLoader("azerty_", "foo_bar",0,0,0);
-  // Make sure that random binary file cannnot be assimilated as dylib
+  // Make sure that random binary file cannot be assimilated as dylib
   res += TestDynamicLoader(TEST_SYSTEMTOOLS_BIN_FILE, "wp",0,0,0);
 #endif
 
diff --git a/Utilities/KWSys/vtksys/testEncoding.cxx b/Utilities/KWSys/vtksys/testEncoding.cxx
new file mode 100644
index 0000000..8e74a50
--- /dev/null
+++ b/Utilities/KWSys/vtksys/testEncoding.cxx
@@ -0,0 +1,159 @@
+/*============================================================================
+  KWSys - Kitware System Library
+  Copyright 2000-2009 Kitware, Inc., Insight Software Consortium
+
+  Distributed under the OSI-approved BSD License (the "License");
+  see accompanying file Copyright.txt for details.
+
+  This software is distributed WITHOUT ANY WARRANTY; without even the
+  implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+  See the License for more information.
+============================================================================*/
+#include "kwsysPrivate.h"
+
+#if defined(_MSC_VER)
+# pragma warning (disable:4786)
+#endif
+
+#include KWSYS_HEADER(Encoding.hxx)
+#include KWSYS_HEADER(ios/iostream)
+
+#include <locale.h>
+
+// Work-around CMake dependency scanning limitation.  This must
+// duplicate the above list of headers.
+#if 0
+# include "Encoding.hxx.in"
+# include "kwsys_ios_iostream.h.in"
+#endif
+
+//----------------------------------------------------------------------------
+static const unsigned char helloWorldStrings[][32] =
+{
+  // English
+  {'H','e','l','l','o',' ','W','o','r','l','d',0},
+  // Japanese
+  {0xE3, 0x81, 0x93, 0xE3, 0x82, 0x93, 0xE3, 0x81, 0xAB, 0xE3,
+   0x81, 0xA1, 0xE3, 0x81, 0xAF, 0xE4, 0xB8, 0x96, 0xE7, 0x95,
+   0x8C, 0},
+   // Arabic
+  {0xD9, 0x85, 0xD8, 0xB1, 0xD8, 0xAD, 0xD8, 0xA8, 0xD8, 0xA7,
+   0x20, 0xD8, 0xA7, 0xD9, 0x84, 0xD8, 0xB9, 0xD8, 0xA7, 0xD9,
+   0x84, 0xD9, 0x85, 0},
+  // Yiddish
+  {0xD7, 0x94, 0xD7, 0xA2, 0xD7, 0x9C, 0xD7, 0x90, 0x20, 0xD7,
+   0x95, 0xD7, 0x95, 0xD7, 0xA2, 0xD7, 0x9C, 0xD7, 0x98, 0},
+  // Russian
+  {0xD0, 0xBF, 0xD1, 0x80, 0xD0, 0xB8, 0xD0, 0xB2, 0xD0, 0xB5,
+   0xD1, 0x82, 0x20, 0xD0, 0xBC, 0xD0, 0xB8, 0xD1, 0x80, 0},
+  // Latin
+  {0x4D, 0x75, 0x6E, 0x64, 0x75, 0x73, 0x20, 0x73, 0x61, 0x6C,
+   0x76, 0x65, 0},
+  // Swahili
+  {0x68, 0x75, 0x6A, 0x61, 0x6D, 0x62, 0x6F, 0x20, 0x44, 0x75,
+   0x6E, 0x69, 0x61, 0},
+  // Icelandic
+  {0x48, 0x61, 0x6C, 0x6C, 0xC3, 0xB3, 0x20, 0x68, 0x65, 0x69,
+   0x6D, 0x75, 0x72, 0},
+  {0}
+};
+
+//----------------------------------------------------------------------------
+static int testHelloWorldEncoding()
+{
+  int ret = 0;
+  for(int i=0; helloWorldStrings[i][0] != 0; i++)
+    {
+    std::string str = reinterpret_cast<const char*>(helloWorldStrings[i]);
+    std::cout << str << std::endl;
+    std::wstring wstr = kwsys::Encoding::ToWide(str);
+    std::string str2 = kwsys::Encoding::ToNarrow(wstr);
+    if(!wstr.empty() && str != str2)
+      {
+      std::cout << "converted string was different: " << str2 << std::endl;
+      ret++;
+      }
+    }
+  return ret;
+}
+
+static int testRobustEncoding()
+{
+  // test that the conversion functions handle invalid
+  // unicode correctly/gracefully
+
+  int ret = 0;
+  char cstr[] = {(char)-1, 0};
+  // this conversion could fail
+  std::wstring wstr = kwsys::Encoding::ToWide(cstr);
+
+  wstr = kwsys::Encoding::ToWide(NULL);
+  if(wstr != L"")
+    {
+    const wchar_t* wcstr = wstr.c_str();
+    std::cout << "ToWide(NULL) returned";
+    for(size_t i=0; i<wstr.size(); i++)
+      {
+      std::cout << " " << std::hex << (int)wcstr[i];
+      }
+    std::cout << std::endl;
+    ret++;
+    }
+  wstr = kwsys::Encoding::ToWide("");
+  if(wstr != L"")
+    {
+    const wchar_t* wcstr = wstr.c_str();
+    std::cout << "ToWide(\"\") returned";
+    for(size_t i=0; i<wstr.size(); i++)
+      {
+      std::cout << " " << std::hex << (int)wcstr[i];
+      }
+    std::cout << std::endl;
+    ret++;
+    }
+
+#ifdef WIN32
+  // 16 bit wchar_t - we make an invalid surrogate pair
+  wchar_t cwstr[] = {0xD801, 0xDA00, 0};
+  // this conversion could fail
+  std::string win_str = kwsys::Encoding::ToNarrow(cwstr);
+#endif
+
+  std::string str = kwsys::Encoding::ToNarrow(NULL);
+  if(str != "")
+    {
+    std::cout << "ToNarrow(NULL) returned " << str << std::endl;
+    ret++;
+    }
+
+  str = kwsys::Encoding::ToNarrow(L"");
+  if(wstr != L"")
+    {
+    std::cout << "ToNarrow(\"\") returned " << str << std::endl;
+    ret++;
+    }
+
+  return ret;
+}
+
+
+//----------------------------------------------------------------------------
+int testEncoding(int, char*[])
+{
+  const char* loc = setlocale(LC_ALL, "");
+  if(loc)
+    {
+    std::cout << "Locale: " << loc << std::endl;
+    }
+  else
+    {
+    std::cout << "Locale: None" << std::endl;
+    }
+
+  int ret = 0;
+
+  ret |= testHelloWorldEncoding();
+  ret |= testRobustEncoding();
+
+  return ret;
+}
diff --git a/Utilities/KWSys/vtksys/testIOS.cxx b/Utilities/KWSys/vtksys/testIOS.cxx
index 3b971e2..f0c7f1a 100644
--- a/Utilities/KWSys/vtksys/testIOS.cxx
+++ b/Utilities/KWSys/vtksys/testIOS.cxx
@@ -48,7 +48,7 @@ int testIOS(int, char*[])
     return 1;
     }
   static const unsigned char array[] = { 0xff,0x4f,0xff,0x51,0x00,0x29,0x00,0x00,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x30,0x00,0x00,0x00,0x3e,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x01,0x07,0x01,0x01,0xff,0x52,0x00,0x0c,0x00,0x00,0x00,0x01,0x00,0x05,0x04,0x04,0x00,0x01,0xff,0x5c,0x00,0x13,0x40,0x40,0x48,0x48,0x50,0x48,0x48,0x50,0x48,0x48,0x50,0x48,0x48,0x50,0x48,0x48,0x50,0xff,0x64,0x00,0x2c,0x00,0x00,0x43,0x72,0x65,0x61,0 [...]
-  const unsigned int narray = sizeof(array); // 180
+  const size_t narray = sizeof(array); // 180
   kwsys_ios::stringstream strstr;
   strstr.write( (char*)array, narray );
   //strstr.seekp( narray / 2 ); // set position of put pointer in mid string
diff --git a/Utilities/KWSys/vtksys/testSystemInformation.cxx b/Utilities/KWSys/vtksys/testSystemInformation.cxx
index 49a686c..53d51ac 100644
--- a/Utilities/KWSys/vtksys/testSystemInformation.cxx
+++ b/Utilities/KWSys/vtksys/testSystemInformation.cxx
@@ -88,14 +88,31 @@ int testSystemInformation(int, char*[])
   printMethod3(info, GetHostMemoryUsed(), "KiB");
   printMethod3(info, GetProcMemoryUsed(), "KiB");
 
-  for (int i = 0; i <= 31; i++)
+  for (long int i = 0; i <= 31; i++)
     {
-    if (info.DoesCPUSupportFeature(1 << i))
+    if (info.DoesCPUSupportFeature(static_cast<long int>(1) << i))
       {
       kwsys_ios::cout << "CPU feature " << i << "\n";
       }
     }
-  //int GetProcessorCacheXSize(long int);
-//  bool DoesCPUSupportFeature(long int);
+
+  /* test stack trace
+  */
+  kwsys_ios::cout
+    << "Program Stack:" << kwsys_ios::endl
+    << kwsys::SystemInformation::GetProgramStack(0,0) << kwsys_ios::endl
+    << kwsys_ios::endl;
+
+  /* test segv handler
+  info.SetStackTraceOnError(1);
+  double *d = (double*)100;
+  *d=0;
+  */
+
+  /* test abort handler
+  info.SetStackTraceOnError(1);
+  abort();
+  */
+
   return 0;
 }
diff --git a/Utilities/Maintenance/ArchiveTestingData.py b/Utilities/Maintenance/ArchiveTestingData.py
new file mode 100755
index 0000000..ccb283d
--- /dev/null
+++ b/Utilities/Maintenance/ArchiveTestingData.py
@@ -0,0 +1,184 @@
+#!/usr/bin/env python
+#==========================================================================
+#
+#   Copyright Insight Software Consortium
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#          http://www.apache.org/licenses/LICENSE-2.0.txt
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+#==========================================================================*/
+
+description = """
+Upload all the ExternalData files to the Midas server.
+
+The files corresponding to all the ExternalData content links
+in the source tree are archived on the Midas server using the
+local ExternalData object store.
+
+Requires pydas: https://github.com/midasplatform/pydas
+"""
+
+import argparse
+import fnmatch
+import os
+import sys
+
+import pydas
+
+def connect_to_midas(email=None, api_key=None):
+    midas_url = 'http://midas3.kitware.com/midas/'
+    if not api_key:
+        print('Please enter your login information for ' + midas_url)
+        pydas.login(url=midas_url, email=email)
+    else:
+        pydas.login(url=midas_url, email=email, api_key=api_key)
+    session = pydas.session
+    communicator = session.communicator
+    return session, communicator
+
+
+def upload_to_midas(content_link, externaldata_object_store,
+        repository_dir,
+        session, communicator):
+    # get the MD5 checksum
+    print('Uploading ' + content_link + ' ...')
+    with open(content_link, 'r') as fp:
+        md5hash = fp.readline().strip()
+    print('Checksum: ' + md5hash)
+
+    # upload to Midas
+    def get_child_folder(parent, child_name):
+        children = communicator.folder_children(session.token, parent['folder_id'])
+        for folder in children['folders']:
+            if folder['name'] == child_name:
+                return folder
+        return None
+    vtk_community = communicator.get_community_by_name('VTK')
+    vtk_public = get_child_folder(vtk_community, 'Public')
+    vtk_folder = get_child_folder(vtk_public, 'VTK')
+
+    # Where to place the file in Midas.  Mirror the source tree location.
+    folders = os.path.dirname(content_link)
+    if repository_dir != '':
+        folders = folders[folders.find(repository_dir)+len(repository_dir)+1:]
+    folders = folders.split(os.path.sep)
+
+    current_folder = vtk_folder
+    for folder in folders:
+        child_folder = get_child_folder(current_folder, folder)
+        if child_folder is None:
+            print('Creating folder: ' + folder)
+            current_folder = communicator.create_folder(session.token,
+                    folder,
+                    current_folder['folder_id'])
+        else:
+            current_folder = child_folder
+
+    # get the existing or create a new item to hold the file
+    item_name = os.path.basename(content_link[:-4])
+    item_id = None
+    current_folder_children = communicator.folder_children(session.token,
+            current_folder['folder_id'])
+    if current_folder_children.has_key('items'):
+        for item in current_folder_children['items']:
+            if item['name'] == item_name:
+                item_id = item['item_id']
+                break
+
+    if item_id is None:
+        new_item = communicator.create_item(session.token, item_name,
+                current_folder['folder_id'])
+        item_id = new_item['item_id']
+
+    object_store = os.path.join(externaldata_object_store, 'MD5', md5hash)
+    if not os.path.exists(object_store):
+        sys.stderr.write('Could not find the expected object store.\n')
+        sys.exit(1)
+
+    upload_token = communicator.generate_upload_token(session.token,
+            item_id,
+            item_name,
+            md5hash)
+    if upload_token != "":
+        communicator.perform_upload(upload_token,
+                item_name,
+                item_id=item_id,
+                revision='head',
+                filepath=object_store)
+
+
+def run(vtk_source_dir, externaldata_object_store,
+        email=None, api_key=None):
+    session, communicator = connect_to_midas(email, api_key)
+
+    md5files = []
+    for root, dirnames, filenames in os.walk(vtk_source_dir):
+        for filename in fnmatch.filter(filenames, '*.md5'):
+            md5files.append(os.path.join(root, filename))
+
+    # Find the location of top level directory in the repository.
+    repository_dir = None
+    example_path = os.path.normpath(os.path.dirname(md5files[0]))
+    potential_path = example_path
+    previous_path = None
+    while previous_path != '':
+        dir = os.path.join(potential_path, '.git')
+        if os.path.exists(dir):
+            repository_dir = potential_path
+            break
+        previous_path = potential_path
+        potential_path = os.path.split(potential_path)[0]
+    if repository_dir == None:
+        sys.stderr.write('Could not find VTK repository directory.\n')
+        sys.exit(1)
+
+    for content_link in md5files:
+        src_examples = os.path.join('src', 'Examples')
+        if content_link.find(src_examples) == -1:
+            uncategorized = True
+        else:
+            uncategorized = False
+
+        if uncategorized:
+            folders = ['Uncategorized']
+        else:
+            group_module_path = content_link.split(src_examples)[1][1:]
+            hiercharchy = group_module_path.split(os.path.sep)
+            group = hiercharchy[0]
+            module = hiercharchy[1]
+            folders = [group, module]
+
+        upload_to_midas(content_link, externaldata_object_store, repository_dir,
+                session, communicator)
+
+
+if __name__ == '__main__':
+    parser = argparse.ArgumentParser(description=description)
+    parser.add_argument('--api-key-file', '-k', type=argparse.FileType('r'),
+            help="A file that contains your Midas user's API key.")
+    parser.add_argument('--email', '-e',
+            help="Email address associated with your Midas account.")
+    parser.add_argument('vtk_source_dir',
+            help='Path to the VTK source tree.')
+    parser.add_argument('externaldata_object_store',
+            help='Path to the ExternalData object store, e.g. ' \
+            + 'ExternalData/Objects/ in a build tree.')
+    args = parser.parse_args()
+
+    if args.api_key_file:
+        api_key = args.api_key_file.readline()
+        api_key = api_key.strip()
+    else:
+        api_key = None
+
+    run(args.vtk_source_dir, args.externaldata_object_store,
+        email=args.email, api_key=api_key)
diff --git a/Utilities/Maintenance/SourceTarball.bash b/Utilities/Maintenance/SourceTarball.bash
new file mode 100755
index 0000000..a7ae722
--- /dev/null
+++ b/Utilities/Maintenance/SourceTarball.bash
@@ -0,0 +1,223 @@
+#!/usr/bin/env bash
+#==========================================================================
+#
+#   Copyright Insight Software Consortium
+#
+#   Licensed under the Apache License, Version 2.0 (the "License");
+#   you may not use this file except in compliance with the License.
+#   You may obtain a copy of the License at
+#
+#          http://www.apache.org/licenses/LICENSE-2.0.txt
+#
+#   Unless required by applicable law or agreed to in writing, software
+#   distributed under the License is distributed on an "AS IS" BASIS,
+#   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+#   See the License for the specific language governing permissions and
+#   limitations under the License.
+#
+#==========================================================================*/
+
+usage() {
+  die 'USAGE: SourceTarball.bash [(--tgz|--txz|--zip)...] \
+        [--verbose] [-v <version>] [<tag>|<commit>]'
+}
+
+info() {
+  echo "$@" 1>&2
+}
+
+die() {
+  echo "$@" 1>&2; exit 1
+}
+
+return_pipe_status() {
+  echo ${PIPESTATUS[@]} |grep -q -v "[1-9]"
+}
+
+find_data_objects() {
+  # TODO: Somehow mark large data in the tree instead of hard-coding it here.
+  large='Testing/Data/(LSDyna|MFIXReader|NetCDF|SLAC|WindBladeReader)/'
+  if test "$2" = "VTK_USE_LARGE_DATA"; then
+    v=''
+  else
+    v='-v'
+  fi
+  git ls-tree --full-tree -r "$1" |
+  egrep $v "$large" |
+  egrep '\.(md5)$' |
+  while read mode type obj path; do
+    case "$path" in
+      *.md5)  echo MD5/$(git cat-file blob $obj) ;;
+      *)      die "Unknown ExternalData content link: $path" ;;
+    esac
+  done | sort | uniq
+  return_pipe_status
+}
+
+if md5sum_type=$(type -p md5sum); then
+  compute_MD5() {
+    md5sum "$1" | sed 's/ .*//'
+  }
+elif md5_type=$(type -p md5); then
+  compute_MD5() {
+    md5 "$1" | sed 's/.*= //'
+  }
+else
+  die 'Neither "md5sum" nor "md5" tool is available.'
+fi
+
+validate_MD5() {
+  md5sum=$(compute_MD5 "$1") &&
+  if test "$md5sum" != "$2"; then
+    die "Object MD5/$2 is corrupt: $1"
+  fi
+}
+
+download_object() {
+  algo="$1" ; hash="$2" ; path="$3"
+  mkdir -p $(dirname "$path") &&
+  if wget "http://www.vtk.org/files/ExternalData/$algo/$hash" -O "$path.tmp$$" 1>&2; then
+    mv "$path.tmp$$" "$path"
+  else
+    rm -f "$path.tmp$$"
+    false
+  fi
+}
+
+index_data_objects() {
+  # Input lines have format <algo>/<hash>
+  while IFS=/ read algo hash; do
+    # Final path in source tarball
+    path=".ExternalData/$algo/$hash"
+    # Find the object file on disk
+    if test -f "$path"; then
+      file="$path" # available in place
+    elif test -f ~/"$path" ; then
+      file=~/"$path" # available in home dir
+    else
+      download_object "$algo" "$hash" "$path" &&
+      file="$path"
+    fi &&
+    validate_$algo "$file" "$hash" &&
+    obj=$(git hash-object -t blob -w "$file") &&
+    echo "100644 blob $obj	$path" ||
+    return
+  done |
+  git update-index --index-info
+  return_pipe_status
+}
+
+load_data_objects() {
+  find_data_objects "$@" |
+  index_data_objects
+  return_pipe_status
+}
+
+load_data_files() {
+  git ls-tree -r "$1" -- '.ExternalData' |
+  git update-index --index-info
+  return_pipe_status
+}
+
+git_archive_tgz() {
+  out="$2.tar.gz" && tmp="$out.tmp$$" &&
+  if test -n "$3"; then prefix="$3"; else prefix="$2"; fi &&
+  git -c core.autocrlf=false archive $verbose --format=tar --prefix=$prefix/ $1 |
+  gzip -9 > "$tmp" &&
+  mv "$tmp" "$out" &&
+  info "Wrote $out"
+}
+
+git_archive_txz() {
+  out="$2.tar.xz" && tmp="$out.tmp$$" &&
+  if test -n "$3"; then prefix="$3"; else prefix="$2"; fi &&
+  git -c core.autocrlf=false archive $verbose --format=tar --prefix=$prefix/ $1 |
+  xz -9 > "$tmp" &&
+  mv "$tmp" "$out" &&
+  info "Wrote $out"
+}
+
+git_archive_zip() {
+  out="$2.zip" && tmp="$out.tmp$$" &&
+  if test -n "$3"; then prefix="$3"; else prefix="$2"; fi &&
+  git -c core.autocrlf=true archive $verbose --format=zip --prefix=$prefix/ $1 > "$tmp" &&
+  mv "$tmp" "$out" &&
+  info "Wrote $out"
+}
+
+#-----------------------------------------------------------------------------
+
+formats=
+commit=
+version=
+verbose=
+
+# Parse command line options.
+while test $# != 0; do
+  case "$1" in
+    --tgz) formats="$formats tgz" ;;
+    --txz) formats="$formats txz" ;;
+    --zip) formats="$formats zip" ;;
+    --verbose) verbose=-v ;;
+    --) shift; break ;;
+    -v) shift; version="$1" ;;
+    -*) usage ;;
+    *) test -z "$commit" && commit="$1" || usage ;;
+  esac
+  shift
+done
+test $# = 0 || usage
+test -n "$commit" || commit=HEAD
+test -n "$formats" || formats=tgz
+
+if ! git rev-parse --verify -q "$commit" >/dev/null ; then
+  die "'$commit' is not a valid commit"
+fi
+if test -z "$version"; then
+  desc=$(git describe $commit) &&
+  if test "${desc:0:1}" != "v"; then
+    die "'git describe $commit' is '$desc'; use -v <version>"
+  fi &&
+  version=${desc#v} &&
+  echo "$commit is version $version"
+fi
+
+# Create temporary git index to construct source tree
+export GIT_INDEX_FILE="$(pwd)/tmp-$$-index" &&
+trap "rm -f '$GIT_INDEX_FILE'" EXIT &&
+
+result=0 &&
+
+info "Loading source tree from $commit..." &&
+rm -f "$GIT_INDEX_FILE" &&
+git read-tree -m -i $commit &&
+git rm -rf -q --cached '.ExternalData' &&
+tree=$(git write-tree) &&
+
+info "Generating source archive(s)..." &&
+for fmt in $formats; do
+  git_archive_$fmt $tree "VTK-$version" || result=1
+done &&
+
+info "Loading normal data for $commit..." &&
+rm -f "$GIT_INDEX_FILE" &&
+load_data_objects $commit &&
+load_data_files $commit &&
+tree=$(git write-tree) &&
+
+info "Generating normal data archive(s)..." &&
+for fmt in $formats; do
+  git_archive_$fmt $tree "VTKData-$version" "VTK-$version" || result=1
+done &&
+
+info "Loading large data for $commit..." &&
+rm -f "$GIT_INDEX_FILE" &&
+load_data_objects $commit VTK_USE_LARGE_DATA &&
+tree=$(git write-tree) &&
+
+info "Generating large data archive(s)..." &&
+for fmt in $formats; do
+  git_archive_$fmt $tree "VTKLargeData-$version" "VTK-$version" || result=1
+done &&
+
+exit $result
diff --git a/Utilities/Maintenance/VisualizeModuleDependencies.py b/Utilities/Maintenance/VisualizeModuleDependencies.py
new file mode 100755
index 0000000..e668dd5
--- /dev/null
+++ b/Utilities/Maintenance/VisualizeModuleDependencies.py
@@ -0,0 +1,328 @@
+#!/usr/bin/env python
+
+'''
+This program takes a list of module files and creates a (possibly disjoint)
+directed graph of the modules and their dependencies. Arrows on the
+directed graph point to the dependent module.
+
+Typical usage would be as follows:
+ VisualizeModuleDependencies.py VTKSourceDir vtkFiltersSources,vtkInteractionStyle,vtkRenderingOpenGL
+
+'''
+
+import os, sys
+from collections import defaultdict
+import vtk
+
+def GetProgramParameters():
+    import argparse
+    description = 'Creates a directed graph of the modules and their dependencies.'
+    epilogue = '''
+        This program takes a list of module files and creates a
+        (possibly disjoint) directed graph of the modules and their
+        dependencies. Arrows on the directed graph point to the dependent module.
+        By default, dependencies of a given module are followed to their maximum
+        depth. However you can restrict the depth by specifying the depth to
+        which dependent modules are searched.
+        The moduleList is a comma-separated list of module names with no
+        spaces between the names.
+        The treeDepth defaults to 0, this means that for a given module all
+        dependent modules will be found. If non-zero, then trees will be only
+        searched to that depth.
+    '''
+    parser = argparse.ArgumentParser(description=description, epilog=epilogue)
+    parser.add_argument('vtkSourceDir', help='The path to the vtk Source Directory.')
+    parser.add_argument('moduleList', help='The list of modules.')
+    parser.add_argument('moduleTreeDepth', help='The depth of the module trees', nargs='?', default=0, type=int)
+    args = parser.parse_args()
+    vtkSourceDir = args.vtkSourceDir
+    moduleList = [x.strip() for x in args.moduleList.split(',')]
+    moduleTreeDepth = args.moduleTreeDepth
+    return (vtkSourceDir, moduleList, moduleTreeDepth)
+
+def GetProgramParametersOld():
+    '''
+    Used for Python versions < 2.7
+    '''
+    if len(sys.argv) < 3:
+        s = 'Usage: ' + sys.argv[0] + ' vtkSourceDir moduleList [moduleTreeDepth]'
+        print(s)
+        exit(0)
+    args = dict()
+    args['vtkSourceDir'] = sys.argv[1]
+    args['moduleList'] = sys.argv[2]
+    args['moduleTreeDepth'] = 0
+    if len(sys.argv) > 3:
+        args['moduleTreeDepth'] = int(sys.argv[3])
+    vtkSourceDir = args['vtkSourceDir']
+    moduleList = [x.strip() for x in args['moduleList'].split(',')]
+    moduleTreeDepth = args['moduleTreeDepth']
+    return (vtkSourceDir, moduleList, moduleTreeDepth)
+
+def FindModuleFiles(path):
+    '''
+    Get a list of module files in the VTK directory.
+    '''
+    moduleFiles = [os.path.join(root, name)
+                 for root, dirs, files in os.walk(path)
+                 for name in files
+                 if name == ("module.cmake")]
+    return moduleFiles
+
+def ParseModuleFile(fileName):
+    '''
+    Read each module file returning the module name and what
+    it depends on or implements.
+    '''
+    fh = open(fileName, 'rb')
+    lines = []
+    for line in fh:
+        line = line.strip()
+        if line.startswith('$'):  # Skip CMake variable names
+            continue
+        if line.startswith('#'):
+            continue
+        line = line.split('#')[0].strip()  # inline comments
+        if line == "":
+            continue
+        line = line.split(')')[0].strip()  # closing brace with no space
+        if line == "":
+            continue
+        for l in line.split(" "):
+            lines.append(l)
+    keywords = ['GROUPS', 'DEPENDS', 'IMPLEMENTS', 'PRIVATE_DEPENDS', 'TEST_DEPENDS',
+                 'COMPILE_DEPENDS', 'EXCLUDE_FROM_WRAPPING', 'EXCLUDE_FROM_ALL']
+    moduleName = ""
+    depends = []
+    implements = []
+    state = "START";
+    for item in lines:
+        if state == "START" and item.startswith("vtk_module("):
+            moduleName = item.split("(")[1]
+            continue
+        if item in keywords:
+            state = item
+            continue
+        if state == 'DEPENDS' and item != ')':
+            depends.append(item)
+            continue
+        if state == 'IMPLEMENTS' and item != ')':
+            implements.append(item)
+            continue
+    return [moduleName, depends + implements]
+
+def FindAllNeededModules(modules, foundModules, moduleDepencencies):
+    '''
+    Recursively search moduleDependencies finding all modules.
+    '''
+    if modules != None and len(modules) > 0:
+        for m in modules:
+            foundModules.add(m)
+            foundModules = foundModules | set(moduleDepencencies[m])  # Set union
+            foundModules = FindAllNeededModules(moduleDepencencies[m],
+                                                foundModules, moduleDepencencies)
+    return foundModules
+
+def MakeModuleTree(module, index, tree, moduleDependencies, treeDepth, level=0):
+    '''
+    For a given module make a tree with the module as the root and the
+    dependent modules as children.
+    '''
+    if module:
+        index = index + [module]
+        if treeDepth == 0 or level < treeDepth:
+            for m in moduleDependencies[module]:
+                level += 1
+                MakeModuleTree(m, index, tree, moduleDependencies, treeDepth, level)
+                level -= 1
+    Add(tree, index)
+
+# One-line Tree in Python
+# See: https:gist.github.com/hrldcpr/2012250
+def Tree(): return defaultdict(Tree)
+
+def Add(tree, keys):
+    for key in keys:
+        tree = tree[key]
+
+def PrettyPrint(tree, level=0):
+    '''
+    Useful to visualize the tree.
+    '''
+    result = ''
+    for k, v in tree.iteritems():
+        s = '  ' * level + k + '\n'
+        result += s
+        level += 1
+        result += PrettyPrint(v, level)
+        level -= 1
+    return result
+
+def GetAllKeys(tree):
+    '''
+    Return all the modules in the tree as a set.
+    '''
+    modules = set()
+    for key in tree:
+        modules = set(list(modules) + [key] + list(GetAllKeys(tree[key])))
+    return modules
+
+def MakeEdgeList(t):
+    '''
+    Return a set that represents the edges in the tree.
+    '''
+    edgeList = set()
+    for k, v in t.iteritems():
+        subKeys = v.keys()
+        if subKeys:
+            for kk in subKeys:
+                edgeList.add((k, kk))
+        edg = MakeEdgeList(v)
+        if edg:
+            edgeList.update(edg)
+    return edgeList
+
+def MakeGraph(t, parent='', level=0):
+    '''
+    Returns a list that has two elements, the vertices and the edge list.
+    '''
+    return [GetAllKeys(t), MakeEdgeList(t)]
+
+def GenerateGraph(moduleList, moduleDepencencies, moduleTreeDepth):
+    '''
+    Generate a graph from the module list.
+    The resultant graph is a list consisting of two sets, the first set
+    is the set of vertices and the second set is the edge list.
+    '''
+    graph = [set(), set()]
+    for m in moduleList:
+        t = Tree()
+        MakeModuleTree(m, [], t, moduleDepencencies, moduleTreeDepth)
+        g = MakeGraph(t)
+        graph[0].update(g[0])
+        if g[1]:
+            graph[1].update(g[1])
+    return graph
+
+def GenerateVTKGraph(graph):
+    '''
+    Take the vertices and edge list in the graph parameter
+    and return a VTK graph.
+    '''
+    g = vtk.vtkMutableDirectedGraph()
+    # Label the vertices
+    labels = vtk.vtkStringArray()
+    labels.SetNumberOfComponents(1)
+    labels.SetName("Labels")
+
+    index = dict()
+    l = list(graph[0])
+    # Make the vertex labels and create a dictionary with the
+    # keys as labels and the vertex ids as the values.
+    for i in range(0, len(l)):
+        # Set the vertex labels
+        labels.InsertNextValue(l[i])
+        index[l[i]] = g.AddVertex()
+    g.GetVertexData().AddArray(labels)
+    # Add edges
+    l = list(graph[1])
+    for i in range(0, len(l)):
+        ll = list(l[i])
+        g.AddGraphEdge(index[ll[0]], index[ll[1]])
+#    g.Dump()
+    return g
+
+def DisplayGraph(graph):
+    '''
+    Display the graph.
+    '''
+    theme = vtk.vtkViewTheme()
+    theme.SetBackgroundColor(0, 0, .1)
+    theme.SetBackgroundColor2(0, 0, .5)
+
+    # Layout the graph
+    # Pick a strategy you like.
+    # strategy = vtk.vtkCircularLayoutStrategy()
+    strategy = vtk.vtkSimple2DLayoutStrategy()
+    # strategy = vtk.vtkRandomLayoutStrategy()
+    layout = vtk.vtkGraphLayout()
+    layout.SetLayoutStrategy(strategy)
+    layout.SetInputData(graph)
+
+    view = vtk.vtkGraphLayoutView()
+    view.AddRepresentationFromInputConnection(layout.GetOutputPort())
+    # Tell the view to use the vertex layout we provide.
+    view.SetLayoutStrategyToPassThrough()
+    view.SetEdgeLabelVisibility(True)
+    view.SetVertexLabelArrayName("Labels")
+    view.SetVertexLabelVisibility(True)
+    view.ApplyViewTheme(theme)
+
+    # Manually create an actor containing the glyphed arrows.
+    # Get the edge geometry
+    edgeGeom = vtk.vtkGraphToPolyData()
+    edgeGeom.SetInputConnection(layout.GetOutputPort())
+    edgeGeom.EdgeGlyphOutputOn()
+
+    # Set the position (0: edge start, 1: edge end) where
+    # the edge arrows should go.
+#        edgeGeom.SetEdgeGlyphPosition(0.8)
+    edgeGeom.SetEdgeGlyphPosition(0.85)
+
+    # Make a simple edge arrow for glyphing.
+#        arrowSource = vtk.vtkGlyphSource2D()
+#        arrowSource.SetGlyphTypeToEdgeArrow()
+#        arrowSource.SetScale(0.075)
+    # Or use a cone.
+    coneSource = vtk.vtkConeSource()
+    coneSource.SetRadius(0.025)
+    coneSource.SetHeight(0.1)
+    coneSource.SetResolution(12)
+
+    # Use Glyph3D to repeat the glyph on all edges.
+    arrowGlyph = vtk.vtkGlyph3D()
+    arrowGlyph.SetInputConnection(0, edgeGeom.GetOutputPort(1))
+#        arrowGlyph.SetInputConnection(1, arrowSource.GetOutputPort())
+    arrowGlyph.SetInputConnection(1, coneSource.GetOutputPort())
+
+    # Add the edge arrow actor to the view.
+    arrowMapper = vtk.vtkPolyDataMapper()
+    arrowMapper.SetInputConnection(arrowGlyph.GetOutputPort())
+    arrowActor = vtk.vtkActor()
+    arrowActor.SetMapper(arrowMapper)
+    view.GetRenderer().AddActor(arrowActor)
+
+    view.ResetCamera()
+    view.Render()
+
+    view.SetInteractionModeTo3D()
+    view.GetInteractor().Initialize()
+    view.GetInteractor().Start()
+
+def main():
+    ver = list(sys.version_info[0:2])
+    ver = ver[0] + ver[1] / 10.0
+    if ver >= 2.7:
+        vtkSourceDir, moduleList, moduleTreeDepth = GetProgramParameters()
+    else:
+        vtkSourceDir, moduleList, moduleTreeDepth = GetProgramParametersOld()
+
+    # Parse the module files making a dictionary of each module and its
+    # dependencies or what it implements.
+    moduleDepencencies = dict()
+    moduleFiles = FindModuleFiles(vtkSourceDir + "/")
+    for fname in moduleFiles:
+        m = ParseModuleFile(fname)
+        moduleDepencencies[m[0]] = m[1]
+
+    # Generate a graph from the module list.
+    graph = GenerateGraph(moduleList, moduleDepencencies, moduleTreeDepth)
+
+    # Now build a vtk graph.
+    g = GenerateVTKGraph(graph)
+
+    # Display it.
+    DisplayGraph(g)
+
+if __name__ == '__main__':
+    main()
diff --git a/Utilities/Maintenance/WhatModulesVTK.py b/Utilities/Maintenance/WhatModulesVTK.py
index 819a7df..3508503 100755
--- a/Utilities/Maintenance/WhatModulesVTK.py
+++ b/Utilities/Maintenance/WhatModulesVTK.py
@@ -2,33 +2,72 @@
 import os, sys
 import re
 
-if len(sys.argv) < 3:
+if len(sys.argv) != 3:
     print """
-Usage: WhatModulesVTK.py vtkSourceTree applicationFiles...
+Usage: WhatModulesVTK.py vtkSourceTree applicationFile|applicationFolder
   Generate a FindPackage(VTK COMPONENTS) that lists all modules
-    referenced by a set of files
+    referenced by a set of files.
+  Additionally, two extra find_package( VTK COMPONENTS) lists of modules
+  are produced. One is a minimal set and the other chases down all the
+  dependencies to produce a maximal set of modules. This is done by
+  parsing the module.cmake files.
 
     For example:
       Running from the VTK source,
         ./Utilities/Maintenance/WhatModulesVTK.py . Filters/Modeling/Testing/Cxx/TestRotationalExtrusion.cxx
       Produces
-        Find_Package(VTK COMPONENTS
+        Modules and their dependencies:
+        find_package(VTK COMPONENTS
+          vtkCommonComputationalGeometry
           vtkCommonCore
+          vtkCommonDataModel
+          vtkCommonExecutionModel
+          vtkCommonMath
+          vtkCommonMisc
+          vtkCommonSystem
+          vtkCommonTransforms
           vtkFiltersCore
+          vtkFiltersGeneral
           vtkFiltersModeling
           vtkFiltersSources
+          vtkImagingCore
           vtkRenderingCore
+          vtkRenderingOpenGL
+          vtkTestingCore
+          vtkTestingRendering
+        )
+        Your application code includes 17 of 170 vtk modules.
+
+        All modules referenced in the files:
+        find_package(VTK COMPONENTS
+          vtkCommonCore
+          vtkFiltersCore
+          vtkFiltersModeling
+          vtkFiltersSources
+          vtkRenderingCore
+          vtkRenderingOpenGL
           vtkTestingCore
           vtkTestingRendering
+        )
+        Your application code includes 8 of 170 vtk modules.
+
+        Minimal set of modules:
+        find_package(VTK COMPONENTS
+          vtkCommonCore
+          vtkFiltersCore
+          vtkFiltersModeling
           vtkRenderingOpenGL
-          )
-       To select many files from an application,
-         ./Utilities/Maintenance/WhatModulesVTK.py . $(find /path/to/vtk/project/ -type f)
+          vtkTestingRendering
+        )
+        Your application code includes 5 of 170 vtk modules.
+
 """
     exit(0)
 
-# Build a dict that maps include files to paths
 def IncludesToPaths(path):
+    '''
+    Build a dict that maps include files to paths.
+    '''
     includeToPath = dict()
     prog = re.compile(r"(vtk.*\.h)")
     for root, dirs, files in os.walk(path):
@@ -40,8 +79,10 @@ def IncludesToPaths(path):
                 includeToPath[includeFile] = module
     return includeToPath
 
-# Build a dict that maps paths to modules
 def FindModules(path):
+    '''
+    Build a dict that maps paths to modules.
+    '''
     pathToModule = dict()
     fileProg = re.compile(r"module.cmake")
     moduleProg = re.compile(r".*module[^(]*\(\s*(\w*)",re.S)
@@ -58,8 +99,10 @@ def FindModules(path):
                 fid.close()
     return pathToModule
 
-# Build a set that contains vtk includes
 def FindIncludes(path):
+    '''
+    Build a set that contains vtk includes.
+    '''
     includes = set()
     includeProg = re.compile(r"(vtk.*\.h)")
     fid = open(path, "r")
@@ -69,51 +112,168 @@ def FindIncludes(path):
     fid.close()
     return includes
 
-# Start the program
-
-# Generate dict's for mapping includes to modules
-includesToPaths = IncludesToPaths(sys.argv[1] + "/")
-pathsToModules = FindModules(sys.argv[1] + "/")
-
-# Test to see if VTK source is provided
-if len(pathsToModules) == 0:
-    print sys.argv[1] + " is not a VTK source directory. It does not contain any module.cmake files."
-    exit(1)
-
-# Build a set of includes for all command line files
-allIncludes = set()
-program = sys.argv[0]
-sys.argv.pop(0) # remove program name
-sys.argv.pop(0) # remove vtk source tree
-for f in sys.argv:
-    if os.path.isfile(f):
-        allIncludes.update(FindIncludes(f))
-    else:
+def FindModuleFiles(path):
+    '''
+    Get a list of module files in the VTK directory.
+    '''
+    moduleFiles = [os.path.join(root, name)
+                 for root, dirs, files in os.walk(path)
+                 for name in files
+                 if name == ("module.cmake")]
+    return moduleFiles
+
+def ParseModuleFile(fileName):
+    '''
+    Read each module file returning the module name and what
+    it depends on or implements.
+    '''
+    fh = open(fileName, 'rb')
+    lines = []
+    for line in fh:
+        line = line.strip()
+        if line.startswith('$'): # Skip CMake variable names
+            continue
+        if line.startswith('#'):
+            continue
+        line = line.split('#')[0].strip() # inline comments
+        if line == "":
+            continue
+        line = line.split(')')[0].strip() # closing brace with no space
+        if line == "":
+            continue
+        for l in line.split(" "):
+            lines.append(l)
+    keywords = ['GROUPS', 'DEPENDS', 'IMPLEMENTS', 'PRIVATE_DEPENDS', 'TEST_DEPENDS',
+                 'COMPILE_DEPENDS', 'EXCLUDE_FROM_WRAPPING', 'EXCLUDE_FROM_ALL']
+    moduleName = ""
+    depends = []
+    implements = []
+    state = "START";
+    for item in lines:
+        if state == "START" and item.startswith("vtk_module("):
+            moduleName = item.split("(")[1]
+            continue
+        if item in keywords:
+            state = item
+            continue
+        if state == 'DEPENDS' and item !=  ')':
+            depends.append(item)
+            continue
+        if state == 'IMPLEMENTS' and item !=  ')':
+            implements.append(item)
+            continue
+    return [moduleName, depends + implements]
+
+def FindMinimalSetOfModules(modules, moduleDepencencies):
+    '''
+    Find the minimal set of modules needed.
+    '''
+    dependencies = set()
+    for m in modules:
+        dependencies = dependencies | set(moduleDepencencies[m]) # Set union
+    return modules - dependencies # Set difference
+
+
+def FindAllNeededModules(modules, foundModules, moduleDepencencies):
+    '''
+    Recursively search moduleDependencies finding all modules.
+    '''
+    if modules != None and len(modules) > 0:
+        for m in modules:
+            foundModules.add(m)
+            foundModules = foundModules | set(moduleDepencencies[m]) # Set union
+            foundModules = FindAllNeededModules(moduleDepencencies[m],
+                                                foundModules,moduleDepencencies)
+    return foundModules
+
+def MakeFindPackage(modules):
+    '''
+    Make a useful find_package command.
+    '''
+    # Print a useful cmake command
+    res = "find_package(VTK COMPONENTS\n"
+    for module in sorted(modules):
+        res +=  "  " + module + "\n"
+    res +=  ")"
+    return res
+
+def main():
+    '''
+    Start the program
+    '''
+    # Generate dict's for mapping includes to modules
+    includesToPaths = IncludesToPaths(sys.argv[1] + "/")
+    pathsToModules = FindModules(sys.argv[1] + "/")
+
+    # Test to see if VTK source is provided
+    if len(pathsToModules) == 0:
+        print sys.argv[1] +\
+        " is not a VTK source directory. It does not contain any module.cmake files."
+        exit(1)
+
+    # Parse the module files making a dictionary of each module and its
+    # dependencies or what it implements.
+    moduleDepencencies = dict()
+    moduleFiles = FindModuleFiles(sys.argv[1] + "/")
+    for fname in moduleFiles:
+        m = ParseModuleFile(fname)
+        moduleDepencencies[m[0]] = m[1]
+
+    # Build a set of includes for all command line files
+    allIncludes = set()
+    program = sys.argv[0]
+    sys.argv.pop(0) # remove program name
+    sys.argv.pop(0) # remove vtk source tree
+    for f in sys.argv:
+        if os.path.isfile(f):
+            allIncludes.update(FindIncludes(f))
+        else:
+            # We have a folder so look through all the files.
+            for path, dirs, files in os.walk(f):
+                for fn in files:
+                    allIncludes.update(FindIncludes(os.path.join(path,fn)))
+    if len(allIncludes) == 0:
         print program + ": " + f + " does not exist"
         exit(1)
-# Build a set that contains all modules referenced in command line files
-allModules = set()
-for inc in allIncludes:
-    if inc in includesToPaths:
-        module = includesToPaths[inc]
-        if module in pathsToModules:
-            allModules.add(pathsToModules[includesToPaths[inc]])
-
-# Add OpenGL factory classes if required
-if "vtkRenderingFreeType" in allModules:
-    allModules.add("vtkRenderingFreeTypeFontConfig")
-    allModules.add("vtkRenderingFreeTypeOpenGL")
-if "vtkRenderingCore" in allModules:
-    allModules.add("vtkRenderingOpenGL")
-if "vtkRenderingVolume" in allModules:
-    allModules.add("vtkRenderingVolumeOpenGL")
-
-# Print a useful cmake command
-print "find_package(VTK COMPONENTS"
-for module in sorted(allModules):
-    print "  " + module
-
-
-print ")"
-
-print "Your application code includes " + str(len(allModules)) + " of " + str(len(pathsToModules)) + " vtk modules."
+
+    # Build a set that contains all modules referenced in command line files
+    allModules = set()
+    for inc in allIncludes:
+        if inc in includesToPaths:
+            module = includesToPaths[inc]
+            if module in pathsToModules:
+                allModules.add(pathsToModules[includesToPaths[inc]])
+
+    # Add vtkInteractionStyle if required.
+    if "vtkRenderWindowInteractor.h" in allIncludes:
+        allModules.add("vtkInteractionStyle")
+
+    # Add OpenGL factory classes if required.
+    if "vtkRenderingFreeType" in allModules:
+        allModules.add("vtkRenderingFreeTypeFontConfig")
+        allModules.add("vtkRenderingFreeTypeOpenGL")
+    if "vtkRenderingCore" in allModules:
+        allModules.add("vtkRenderingOpenGL")
+    if "vtkRenderingVolume" in allModules:
+        allModules.add("vtkRenderingVolumeOpenGL")
+
+    # Find the minimal set of modules.
+    minimalSetOfModules =\
+        FindMinimalSetOfModules(allModules, moduleDepencencies)
+    # Find all the modules, chasing down all the dependencies.
+    allNeededModules =\
+        FindAllNeededModules(minimalSetOfModules, set(), moduleDepencencies)
+
+    modules = {'All modules referenced in the files:': allModules,
+                'Minimal set of modules:': minimalSetOfModules,
+                'Modules and their dependencies:': allNeededModules
+              }
+    for k, v in modules.iteritems():
+        print k
+        print MakeFindPackage(v)
+        print "Your application code includes " + str(len(v)) +\
+              " of " + str(len(pathsToModules)) + " vtk modules.\n"
+    print
+
+if __name__ == '__main__':
+    main()
diff --git a/Utilities/MaterialLibrary/CMakeLists.txt b/Utilities/MaterialLibrary/CMakeLists.txt
deleted file mode 100644
index 330a189..0000000
--- a/Utilities/MaterialLibrary/CMakeLists.txt
+++ /dev/null
@@ -1,283 +0,0 @@
-PROJECT(MaterialLibrary)
-
-vtk_module_export_info()
-
-# FIXME: Provide a reasonable default for this value in MaterialLibrary?
-set(VTK_DEFAULT_SHADERS_DIR
-  "${VTK_BINARY_DIR}/Utilities/MaterialLibrary/Repository"
-  CACHE INTERNAL
-  "The directory that code for shaders is stored in.")
-set(VTK_MATERIALS_DIRS
-  ${VTK_DEFAULT_SHADERS_DIR}
-  CACHE STRING
-  "A ; separated list of directories to search for materials/shaders.")
-mark_as_advanced(VTK_MATERIALS_DIRS)
-
-include_directories("${VTK_SOURCE_DIR}/Common/Core"
-  "${VTK_BINARY_DIR}/Common/Core")
-
-IF(NOT CMAKE_CROSSCOMPILING)
-  ADD_EXECUTABLE(ProcessShader ProcessShader.cxx)
-  TARGET_LINK_LIBRARIES(ProcessShader vtksys)
-  vtk_compile_tools_target(ProcessShader)
-ENDIF(NOT CMAKE_CROSSCOMPILING)
-
-# Macro to obtain the relative path of a file.
-MACRO(GET_RELATIVE_PATH __path __file __result)
-  STRING(REGEX REPLACE "/" ";" __dirfilelist "${__file}")
-  SET(__found)
-  SET(__cfile "${__file}")
-  SET(__res "")
-  FOREACH(__dir ${__dirfilelist})
-    IF(NOT __found)
-      STRING(REGEX REPLACE ".*/(.*)" "\\1" __lineres "${__cfile}")
-      STRING(REGEX REPLACE "(.*)/.*" "\\1" __cfile "${__cfile}")
-      SET(__res "${__lineres}/${__res}")
-      STRING(COMPARE EQUAL "${__cfile}" "${__path}" __flag)
-      IF(__flag)
-        SET(__found 1)
-      ENDIF(__flag)
-    ENDIF(NOT __found)
-  ENDFOREACH(__dir)
-  STRING(REGEX REPLACE "/$" "" __res "${__res}")
-  #MESSAGE("Dir: ${__cfile}")
-  #MESSAGE("File: ${__res}")
-  SET(${__result} "${__res}")
-ENDMACRO(GET_RELATIVE_PATH)
-
-#Macro to copy files over. Copies the entire subtree.
-MACRO(COPY_FILES_TO_DIR src_dir target_dir)
-  IF (NOT EXISTS ${target_dir})
-    FILE(MAKE_DIRECTORY ${target_dir})
-  ENDIF(NOT EXISTS ${target_dir})
-
-  FILE (GLOB_RECURSE files_to_copy1 ${src_dir}/*.*)
-
-  FOREACH (src_file ${files_to_copy1})
-    GET_RELATIVE_PATH( ${src_dir} ${src_file} rel_file )
-    GET_FILENAME_COMPONENT(src_path "${rel_file}" PATH)
-    GET_FILENAME_COMPONENT(filename "${rel_file}" NAME)
-    STRING(REGEX REPLACE "/" ";" dirlist "${src_path}")
-
-    # Ensure that the tree structure is identical on the destination side.
-    SET (path ${target_dir})
-    FOREACH(subdir ${dirlist})
-      SET(path "${path}/${subdir}")
-      IF (NOT EXISTS ${path})
-        FILE(MAKE_DIRECTORY ${path})
-      ENDIF (NOT EXISTS ${path})
-    ENDFOREACH(subdir)
-
-    # Now configure the file over.
-    CONFIGURE_FILE(${src_file} "${path}/${filename}" COPYONLY IMMEDIATE)
-  ENDFOREACH (src_file)
-ENDMACRO(COPY_FILES_TO_DIR)
-
-# Shaders/Materials provided in Utilities/MaterialLibrary/Repository
-# are copied over to the binary dir.
-IF (NOT "${VTK_BINARY_DIR}" MATCHES "^${VTK_SOURCE_DIR}$")
-  # Ensure that the Source and Build dir are not same.
-  # In that case we don't need to copy anything.
-  COPY_FILES_TO_DIR(${MaterialLibrary_SOURCE_DIR}/Repository
-    ${VTK_DEFAULT_SHADERS_DIR})
-
-ENDIF (NOT "${VTK_BINARY_DIR}" MATCHES "^${VTK_SOURCE_DIR}$")
-
-# Build the ShaderCodes Library.
-SET (CgCodes
-  ${MaterialLibrary_SOURCE_DIR}/CgShaders/FragmentTexture.cg
-  ${MaterialLibrary_SOURCE_DIR}/CgShaders/VertTexPassThrough.cg
-  ${MaterialLibrary_SOURCE_DIR}/CgShaders/VertPassThrough.cg
-  ${MaterialLibrary_SOURCE_DIR}/CgShaders/FragmentIsotropicTorranceSparrow.cg
-  ${MaterialLibrary_SOURCE_DIR}/CgShaders/cg_sinewave.cg
-  ${MaterialLibrary_SOURCE_DIR}/CgShaders/VertexLighting.cg
-  ${MaterialLibrary_SOURCE_DIR}/CgShaders/Twisted.cg
-  )
-
-SET (GLSLCodes
-  ${MaterialLibrary_SOURCE_DIR}/GLSLShaders/TestAppVarFrag.glsl
-  ${MaterialLibrary_SOURCE_DIR}/GLSLShaders/TestVertex.glsl
-  ${MaterialLibrary_SOURCE_DIR}/GLSLShaders/TestVtkPropertyFrag.glsl
-  ${MaterialLibrary_SOURCE_DIR}/GLSLShaders/TestMatrixFrag.glsl
-  ${MaterialLibrary_SOURCE_DIR}/GLSLShaders/TestScalarVectorFrag.glsl
-  ${MaterialLibrary_SOURCE_DIR}/GLSLShaders/Twisted.glsl
-  )
-
-SET (ShaderLibraryDependencies)
-SET (ShaderLibraryHeaders)
-SET (ShaderLibraryModules)
-
-# Macro to obtain the Module names from filenames.
-MACRO(get_modules modules prefix files )
-  FOREACH(file ${files})
-    STRING(REGEX REPLACE ".*[\\\\/]([^./\\\\]+)\\.(cg|glsl|xml)$" "\\1" module "${file}")
-    SET(${modules} ${${modules}} "${prefix}${module}")
-  ENDFOREACH(file)
-ENDMACRO(get_modules)
-
-SET(CMD_DEP ProcessShader)
-SET(CMD ProcessShader)
-
-IF (VTK_USE_CG_SHADERS)
-  # Create the Cg library.
-  ADD_CUSTOM_COMMAND(
-    OUTPUT ${MaterialLibrary_BINARY_DIR}/vtkCgShaderLibrary.h
-    DEPENDS ${CgCodes}
-    ${CMD_DEP}
-    COMMAND ${CMD}
-    ARGS ${MaterialLibrary_BINARY_DIR}/vtkCgShaderLibrary.h
-    vtkShaderCG Code GetCode
-    ${CgCodes})
-
-  SET (ShaderLibraryHeaders ${ShaderLibraryHeaders}
-    vtkCgShaderLibrary.h)
-  SET (ShaderLibraryDependencies ${ShaderLibraryDependencies}
-    ${MaterialLibrary_BINARY_DIR}/vtkCgShaderLibrary.h)
-
-  get_modules(ShaderLibraryModules "CG" "${CgCodes}")
-ENDIF (VTK_USE_CG_SHADERS)
-
-IF (VTK_USE_GLSL_SHADERS)
-  # Create the GLSL library
-  ADD_CUSTOM_COMMAND(
-    OUTPUT ${MaterialLibrary_BINARY_DIR}/vtkGLSLShaderLibrary.h
-    DEPENDS ${GLSLCodes}
-    ${CMD_DEP}
-    COMMAND ${CMD}
-    ARGS ${MaterialLibrary_BINARY_DIR}/vtkGLSLShaderLibrary.h
-    vtkShaderGLSL Code GetCode
-    ${GLSLCodes})
-  SET (ShaderLibraryHeaders ${ShaderLibraryHeaders}
-    vtkGLSLShaderLibrary.h)
-  SET (ShaderLibraryDependencies ${ShaderLibraryDependencies}
-    ${MaterialLibrary_BINARY_DIR}/vtkGLSLShaderLibrary.h)
-
-  get_modules(ShaderLibraryModules "GLSL" "${GLSLCodes}")
-ENDIF (VTK_USE_GLSL_SHADERS)
-
-# Write the vtkShaderCodeLibraryMacro header file.
-SET (VTK_SHADER_CODE_LIBRARY_MACRO)
-SET (VTK_SHADER_CODE_LIBRARY_LIST)
-FOREACH(file ${ShaderLibraryHeaders})
-  SET (VTK_SHADER_CODE_LIBRARY_MACRO
-    "${VTK_SHADER_CODE_LIBRARY_MACRO}#include \"${file}\"\n")
-ENDFOREACH(file)
-
-SET (VTK_SHADER_CODE_LIBRARY_MACRO "${VTK_SHADER_CODE_LIBRARY_MACRO}
-#define vtkShaderCodeLibraryMacro(name) \\\n")
-FOREACH (module ${ShaderLibraryModules})
-  # get the module name.
-  SET(VTK_SHADER_CODE_LIBRARY_MACRO
-    "${VTK_SHADER_CODE_LIBRARY_MACRO} if (strcmp(name, \"${module}\") == 0)\\
-        {\\
-        return vtkShader${module}GetCode();\\
-        }\\\n")
-  SET(VTK_SHADER_CODE_LIBRARY_LIST
-    "${VTK_SHADER_CODE_LIBRARY_LIST}
-    \"${module}\",")
-ENDFOREACH(module)
-
-CONFIGURE_FILE(${MaterialLibrary_SOURCE_DIR}/vtkShaderCodeLibraryMacro.h.in
-  ${MaterialLibrary_BINARY_DIR}/vtkShaderCodeLibraryMacro.h
-  @ONLY)
-
-# Build the Material library.
-SET (MaterialXMLs)
-
-IF (VTK_USE_CG_SHADERS)
-# # Add Materials using Cg Shaders.
-  SET (MaterialXMLs ${MaterialXMLs}
-    ${MaterialLibrary_SOURCE_DIR}/Materials/CgSinewave.xml
-    ${MaterialLibrary_SOURCE_DIR}/Materials/CgLighting.xml
-    ${MaterialLibrary_SOURCE_DIR}/Materials/CgTwisted.xml
-    )
-ENDIF (VTK_USE_CG_SHADERS)
-
-IF (VTK_USE_GLSL_SHADERS)
-  # Add Materials using GLSL Shaders.
-  SET (MaterialXMLs ${MaterialXMLs}
-    ${MaterialLibrary_SOURCE_DIR}/Materials/GLSLTwisted.xml
-    )
-ENDIF (VTK_USE_GLSL_SHADERS)
-
-# Add the Materials that don't use Cg/GLSL.
-SET (Materials ${MaterialXMLs}
-  )
-
-CONFIGURE_FILE(
-  ${MaterialLibrary_SOURCE_DIR}/vtkMaterialXMLLibrary.h.forcebuild.in
-  ${MaterialLibrary_BINARY_DIR}/vtkMaterialXMLLibrary.h.forcebuild
-  @ONLY)
-
-SET (VTK_MATERIAL_LIBRARY_MACRO "")
-# materials_exist is variable to indicate if there are any materials.
-# if not we can reduce our work by not trying to build the headers at
-# all.
-SET (materials_exist 0)
-FOREACH(material ${Materials})
-  SET (materials_exist 1)
-ENDFOREACH(material ${Materials})
-
-SET(vtkMaterialLibraryConfiguredFilesDependencies
-  ${ShaderLibraryDependencies}
-  ${MaterialLibrary_BINARY_DIR}/vtkShaderCodeLibraryMacro.h
-  ${MaterialLibrary_BINARY_DIR}/vtkMaterialLibraryMacro.h)
-
-IF (${materials_exist})
-  ADD_CUSTOM_COMMAND(
-    OUTPUT ${MaterialLibrary_BINARY_DIR}/vtkMaterialXMLLibrary.h
-    DEPENDS ${MaterialXMLs}
-      ${CMD_DEP}
-      ${MaterialLibrary_BINARY_DIR}/vtkMaterialXMLLibrary.h.forcebuild
-    COMMAND ${CMD}
-    ARGS ${MaterialLibrary_BINARY_DIR}/vtkMaterialXMLLibrary.h
-    vtkMaterial XML GetXML
-    ${MaterialXMLs}
-    )
-
-  SET (VTK_MATERIAL_LIBRARY_MACRO "#include \"vtkMaterialXMLLibrary.h\"\n")
-
-  # Add a dependency on the vtkMaterialXMLLibrary.h file for the target that generates all
-  # the configured files.
-  SET(vtkMaterialLibraryConfiguredFilesDependencies
-    ${vtkMaterialLibraryConfiguredFilesDependencies}
-    ${MaterialLibrary_BINARY_DIR}/vtkMaterialXMLLibrary.h)
-ENDIF (${materials_exist})
-
-# Write the MaterialLibraryMacro header file.
-SET (VTK_MATERIAL_LIBRARY_LIST)
-SET (VTK_MATERIAL_LIBRARY_MACRO "${VTK_MATERIAL_LIBRARY_MACRO}
-#define vtkMaterialLibraryMacro(name) \\\n")
-
-
-SET (MaterialModules)
-get_modules(MaterialModules "" "${MaterialXMLs}")
-FOREACH(module ${MaterialModules})
-  SET (VTK_MATERIAL_LIBRARY_MACRO
-    "${VTK_MATERIAL_LIBRARY_MACRO} if (strcmp(name, \"${module}\") == 0)\\
-      {\\
-      return vtkMaterial${module}GetXML();\\
-      }\\\n")
-  SET (VTK_MATERIAL_LIBRARY_LIST
-    "${VTK_MATERIAL_LIBRARY_LIST}
-    \"${module}\",")
-ENDFOREACH(module)
-
-CONFIGURE_FILE(
-  ${MaterialLibrary_SOURCE_DIR}/vtkMaterialLibraryMacro.h.in
-  ${MaterialLibrary_BINARY_DIR}/vtkMaterialLibraryMacro.h)
-
-ADD_CUSTOM_TARGET(vtkMaterialLibraryConfiguredFiles ALL DEPENDS
-  ${vtkMaterialLibraryConfiguredFilesDependencies}
-  )
-
-
-SET(MATERIAL_LIBRARY_SHADER_MACRO_HEADER
-  ${MaterialLibrary_BINARY_DIR}/vtkShaderCodeLibraryMacro.h
-  CACHE INTERNAL "Path to vtkShaderCodeLibraryMacro.h")
-
-SET(MATERIAL_LIBRARY_MATERIAL_MACRO_HEADER
-  ${MaterialLibrary_BINARY_DIR}/vtkMaterialLibraryMacro.h
-  CACHE INTERNAL "Path to vtkMaterialLibraryMacro.h")
-
-
diff --git a/Utilities/MaterialLibrary/CgShaders/FragmentIsotropicTorranceSparrow.cg b/Utilities/MaterialLibrary/CgShaders/FragmentIsotropicTorranceSparrow.cg
deleted file mode 100644
index cd5b045..0000000
--- a/Utilities/MaterialLibrary/CgShaders/FragmentIsotropicTorranceSparrow.cg
+++ /dev/null
@@ -1,228 +0,0 @@
-// represents all data members stored ine vtkLight
-struct vtkLight
-{
-  uniform float3 Position;
-  uniform float  Intensity;
-};
-// represents all data members stored ine vtkProperty
-struct vtkProperty
-{
-  uniform float3 AmbientColor;
-  uniform float3 DiffuseColor;
-  uniform float3 SpecularColor;
-
-  uniform float Ambient;
-  uniform float Diffuse;
-  uniform float Specular;
-  uniform float SpecularPower;
-  uniform float Opacity;
-};
-
-// represents all data members stored ine vtkCamera
-struct vtkCamera
-{
-  uniform float3 Position;
-};
-
-// Central location for functions commonly used when computing reflectance
-
-
-float Phong(float alpha, float c)
-{
-  //            c1
-  //  D1 = (N.H)
-  //
-  return max( 0, pow(cos(alpha),c) );
-}
-
-float GaussianMicroFacets(float alpha, float c)
-{
-  //                      2
-  //         -(alpha * C)
-  //  D2 = e
-  //
-  return exp( -pow(alpha*c,2) );
-}
-
-float BlinnMicroFacets(float alpha, float c)
-{
-  // AKA Blinn shading function
-  //       /                              \
-  //       |                2             |
-  //       |              C3              |
-  //  D3 = | ---------------------------- |
-  //       |     2             2          |
-  //       |  cos (alpha) * (C3 - 1) + 1  |
-  //       \                              /
-  //
-  c3sq = pow(c,2);
-  float den = pow( cos(alpha), 2 ) * (c3sq-1.0) + 1.0;
-  if( den != 0.0 )
-    {
-    return pow( c3sq/den, 2 );
-    }
-  return 0.0;
-}
-
-float TrowbridgeMicroFacets( float alpha, float c )
-{
-  float c2 = pow(c,2);
-  float c2a = pow(cos(alpha),2);
-  return pow( c2/(c2a*(c2-1)+1), 2 );
-}
-
-float BeckmannMicroFacets( float m, float a )
-{
-  // a = dot(N,H)
-  //
-  //                        /      \  2
-  //                       | tan(a) |
-  //                     - | ------ |
-  //                       |   m    |
-  //         1              \      /
-  //  --------------- * e
-  //      2       4
-  //    m    * cos a
-  //
-  float den = pow(m,2) * pow(cos(a),4 );
-  float value = 0.0;
-  if( abs(den) > 0.00001 )
-    {
-    value = exp( -1.0 * pow((tan(a)/m),2))/den;
-    }
-  return 0.0;
-}
-
-
-
-// Rendering variables
-//
-// V - unit vector in direction of viewer
-// L - unit vector in direction of light source
-// H - unit vector that bisects V and L
-// N - unit vector in normal direction
-//
-// theta - angle between H and V or H and L
-// alpha - angle betwee N and H
-//
-// GAF = Geometric Attenuation Factor
-//
-// MicroFacets as V�Shaped grooves
-float VGrooveGAF( float3 E, float3 H, float3 L, float3 N )
-{
-  float Ga = 1.0;
-  float Gb = Ga;
-  float Gc = Ga;
-  float nh = dot(N,H);
-  float eh = dot(E,H);
-  if( abs(dot(E,H)) > 0.00001 )
-    {
-    Gb = 2.0 * nh * dot(N,E) / eh;
-    Gc = 2.0 * nh * dot(N,L) / eh;
-    }
-  return min( 1.0, min(Gb,Gc));
-}
-
-
-// c = dot(E,H)
-// n = refractive index
-float Fresnel( float3 E, float3 H, float n )
-{
-  float zero = 0.000001;
-  float c = dot(E,H);
-  float g = sqrt( c*c + n*n - 1 );
-
-  float num = pow((c*(g+c)-1.0),2);
-  float den = pow((c*(g-c)+1.0),2);
-
-  float f = 0.0;
-  if( abs(pow(g+c,2)) > zero )
-    {
-    f = pow((g-c),2)/pow((g+c),2);
-    }
-
-  if( abs(den) > zero )
-    {
-    f *= (1.0 + num/den) * 0.5;
-    }
-  else
-    {
-    f = 0.0;
-    }
-  return f;
-}
-
-
-// Compute the diffuse and specular intesity of a light source
-// N, L, E should all be normalized
-float3 ReflectedIntensity( float3 N, // unit surface normal
-                          float3 E, // eye vector
-                          float3 L, // eye vector
-                          float3 H, // eye vector
-                          vtkLight light, // vtkLight in question
-                          vtkProperty property, // vtkProperties
-                          float facetConst, // const for micro-facet distribution
-                          float RefractiveIndex // surface property
-                          )
-{
-#if 1
-  float mf = GaussianMicroFacets( dot(H,N), facetConst );
-#else
-  float mf = TrowbridgeMicroFacets( dot(H,N), facetConst );
-#endif
-  float gaf = VGrooveGAF(E,H,L,N);
-  float fresnel = Fresnel(E,H,RefractiveIndex);
-
-  // don't let these values go negative
-  float Rd = max(0.0, dot(N,L) );
-#if 1
-  float Rs = max(0.0, mf*gaf*fresnel/dot(N,E) );
-#else
-  float Rs = max(0.0, pow(dot(N,H),property.SpecularPower));
-#endif
-
-  return light.Intensity * (Rd * property.Diffuse * property.DiffuseColor +
-                            Rs * property.Specular * property.SpecularColor);
-}
-
-// don't reference TEXCOORD0, it's and alias for POSITION
-void fragment_program( in float4 pos    : TEXCOORD0,
-                       in float4 normal : TEXCOORD1,
-                       in float4 col    : COLOR0,
-
-                       uniform float facetConst,
-                       uniform float RefractiveIndex,
-
-                       uniform vtkLight light0,
-                       uniform vtkLight light1,
-                       uniform vtkCamera camera,
-                       uniform vtkProperty property,
-
-                       out float4 color  : COLOR
-                       )
-{
-  float3 N = normalize( normal.xyz );
-  float3 E = normalize( camera.Position - pos.xyz );
-
-  color.rgb = float3( 0.0, 0.0, 0.0 );
-  // Ambient Term
-#if 1
-  color.rgb += property.Ambient * property.AmbientColor;
-#endif
-
-#if 1
-  // Diffuse and Specular - light0
-  float3 L = normalize( light0.Position - pos.xyz );
-  float3 H = normalize(L+E);
-  color.rgb += ReflectedIntensity( N, E, L, H, light0, property, facetConst, RefractiveIndex);
-#endif
-
-#if 1
-  // Diffuse and Specular - light1
-  L = normalize( light1.Position - pos.xyz );
-  H = normalize(L+E);
-  color.rgb += ReflectedIntensity( N, E, L, H, light1, property, facetConst, RefractiveIndex);
-#endif
-
-  color.a = property.Opacity;
-}
diff --git a/Utilities/MaterialLibrary/CgShaders/FragmentTexture.cg b/Utilities/MaterialLibrary/CgShaders/FragmentTexture.cg
deleted file mode 100644
index 056cb80..0000000
--- a/Utilities/MaterialLibrary/CgShaders/FragmentTexture.cg
+++ /dev/null
@@ -1,20 +0,0 @@
-// don't reference TEXCOORD0, it's and alias for POSITION
-void fragment_program( in float2 tcoord : TEXCOORD0,
-                       in float4 pos    : TEXCOORD1,
-                       in float4 normal : TEXCOORD2,
-                       in float4 col    : COLOR0,
-
-                       uniform float3 opacity,
-                       uniform sampler2D decal,
-
-                       out float4 color  : COLOR
-                       )
-{
-#if 0
-  color.rg = tcoord.rg;
-  color.b = 1.0;
-#else
-  color.rgb = tex2D(decal, tcoord);
-#endif
-  color.a = opacity;
-}
diff --git a/Utilities/MaterialLibrary/CgShaders/Twisted.cg b/Utilities/MaterialLibrary/CgShaders/Twisted.cg
deleted file mode 100644
index f2cb2df..0000000
--- a/Utilities/MaterialLibrary/CgShaders/Twisted.cg
+++ /dev/null
@@ -1,92 +0,0 @@
-// define inputs from application
-struct vertin
-{
-  float4 Position : POSITION;
-  float4 Color    : COLOR0;
-  float3 Normal   : NORMAL;
-};
-
-// define outputs from vertex shader
-struct vertout
-{
-  float4 HPosition : POSITION;
-  float4 Color    : COLOR0;
-};
-
-vertout main(vertin IN,
-  uniform float4x4 ModelViewProj,
-  uniform float4x4 ModelViewIT,
-  uniform float Rate,
-  uniform float Ka,
-  uniform float3 AmbientColor,
-  uniform float Kd,
-  uniform float3 DiffuseColor,
-  uniform float Ks,
-  uniform float Ksp,
-  uniform float3 SpecularColor,
-
-  uniform float3 lightPosition,
-  uniform float3 eyePosition,
-  uniform float3 lightDiffuseColor,
-  uniform float3 lightAmbientColor,
-  uniform float3 lightSpecularColor)
-{
-  vertout OUT;
-
-  int twist = 1;
-  float rate = Rate ;
-
-  // transform vertex position into homogenous clip-space
-  float4 inPosition = IN.Position;
-  float3 inNormal = IN.Normal;
-
-  float4 usedPosition;
-  float3 usedNormal;
-
-  if ( twist )
-    {
-    float angle = inPosition[2];
-
-    float4 twistedPosition;
-    twistedPosition[0] = cos(angle*rate) * inPosition[0] + sin(angle*rate) * inPosition[1];
-    twistedPosition[1] = -sin(angle*rate) * inPosition[0] + cos(angle*rate)* inPosition[1];
-    twistedPosition[2] = inPosition[2];
-    twistedPosition[3] = inPosition[3];
-
-    usedPosition = twistedPosition;
-
-    float3 twistedNormal;
-    twistedNormal[0] = cos(angle*rate) * inNormal[0] + sin(angle*rate) * inNormal[1];
-    twistedNormal[1] = -sin(angle*rate) * inNormal[0] + cos(angle*rate)* inNormal[1];
-    twistedNormal[2] = inNormal[2];
-
-    usedNormal = twistedNormal;
-    }
-  else
-    {
-    usedPosition = inPosition;
-    usedNormal = inNormal;
-    }
-
-  OUT.HPosition = mul(ModelViewProj, usedPosition);
-
-  // Computer Ambient intensity.
-  float3 ambient = Ka * AmbientColor * lightAmbientColor;
-
-  // Comput Diffuse intensity.
-  float3 L = normalize(lightPosition - usedPosition);
-  float diffuseLight = max(dot(usedNormal.xyz, L), 0);
-  float3 diffuse = Kd * DiffuseColor * lightDiffuseColor * diffuseLight;
-
-  // Compute Specular intensity.
-  float3 V = normalize(eyePosition - usedPosition);
-  float3 H = normalize(L + V);
-  float specularLight = pow(max(dot(usedNormal.xyz, H), 0), Ksp);
-  if (diffuseLight <= 0) specularLight = 0;
-  float3 specular = Ks * SpecularColor * lightSpecularColor * specularLight;
-
-  OUT.Color.xyz = ambient + diffuse + specular;
-  OUT.Color.w = 1;
-
-  return OUT;
-}
diff --git a/Utilities/MaterialLibrary/CgShaders/VertPassThrough.cg b/Utilities/MaterialLibrary/CgShaders/VertPassThrough.cg
deleted file mode 100644
index ca9f5f6..0000000
--- a/Utilities/MaterialLibrary/CgShaders/VertPassThrough.cg
+++ /dev/null
@@ -1,60 +0,0 @@
-/*********************************************************************NVMH3****
-Path:  NVSDK\Common\media\programs
-File:  simple.cg
-
-Copyright NVIDIA Corporation 2002
-TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, THIS SOFTWARE IS PROVIDED
-*AS IS* AND NVIDIA AND ITS SUPPLIERS DISCLAIM ALL WARRANTIES, EITHER EXPRESS
-OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, IMPLIED WARRANTIES OF MERCHANTABILITY
-AND FITNESS FOR A PARTICULAR PURPOSE.  IN NO EVENT SHALL NVIDIA OR ITS SUPPLIERS
-BE LIABLE FOR ANY SPECIAL, INCIDENTAL, INDIRECT, OR CONSEQUENTIAL DAMAGES
-WHATSOEVER (INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF BUSINESS PROFITS,
-BUSINESS INTERRUPTION, LOSS OF BUSINESS INFORMATION, OR ANY OTHER PECUNIARY LOSS)
-ARISING OUT OF THE USE OF OR INABILITY TO USE THIS SOFTWARE, EVEN IF NVIDIA HAS
-BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
-
-
-Comments:
-
-******************************************************************************/
-
-// define inputs from application
-struct appin
-{
-    float4 Position : POSITION;
-    float4 Color    : COLOR0;
-    float4 Normal   : NORMAL;
-};
-
-// define outputs from vertex shader
-struct vertout
-{
-    float4 HPosition : POSITION;
-    float4 OPosition : TEXCOORD0; // Obj coords location
-    float4 Normal    : TEXCOORD1;
-    float4 Color0    : COLOR0;
-};
-
-vertout main(appin IN,
-             uniform float4x4 ModelViewProj,
-             uniform float4x4 ModelViewIT,
-             uniform float4x4 ModelView
-             )
-{
-  vertout OUT;
-
-  // transform vertex position into homogenous clip-space
-  OUT.HPosition = mul(ModelViewProj, IN.Position);
-  // pass through postion in object space for reflectance calculation
-#if 1
-  OUT.Normal = IN.Normal;
-  OUT.OPosition = IN.Position;
-#else
-  OUT.Normal = normalize( mul( ModelViewIT, IN.Normal) );
-  OUT.OPosition = mul(ModelViewIT, IN.Position);
-#endif
-
-  OUT.Color0 = IN.Color;
-
-  return OUT;
-}
diff --git a/Utilities/MaterialLibrary/CgShaders/VertTexPassThrough.cg b/Utilities/MaterialLibrary/CgShaders/VertTexPassThrough.cg
deleted file mode 100644
index fe40f7b..0000000
--- a/Utilities/MaterialLibrary/CgShaders/VertTexPassThrough.cg
+++ /dev/null
@@ -1,63 +0,0 @@
-/*********************************************************************NVMH3****
-Path:  NVSDK\Common\media\programs
-File:  simple.cg
-
-Copyright NVIDIA Corporation 2002
-TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, THIS SOFTWARE IS PROVIDED
-*AS IS* AND NVIDIA AND ITS SUPPLIERS DISCLAIM ALL WARRANTIES, EITHER EXPRESS
-OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, IMPLIED WARRANTIES OF MERCHANTABILITY
-AND FITNESS FOR A PARTICULAR PURPOSE.  IN NO EVENT SHALL NVIDIA OR ITS SUPPLIERS
-BE LIABLE FOR ANY SPECIAL, INCIDENTAL, INDIRECT, OR CONSEQUENTIAL DAMAGES
-WHATSOEVER (INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF BUSINESS PROFITS,
-BUSINESS INTERRUPTION, LOSS OF BUSINESS INFORMATION, OR ANY OTHER PECUNIARY LOSS)
-ARISING OUT OF THE USE OF OR INABILITY TO USE THIS SOFTWARE, EVEN IF NVIDIA HAS
-BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
-
-
-Comments:
-
-******************************************************************************/
-
-// define inputs from application
-struct appin
-{
-    float4 Position : POSITION;
-    float4 Color    : COLOR0;
-    float4 Normal   : NORMAL;
-    float4 TCoord   : TEXCOORD0;
-};
-
-// define outputs from vertex shader
-struct vertout
-{
-    float4 HPosition : POSITION;
-    float2 TCoord : TEXCOORD0; // Obj coords location
-    float4 OPosition : TEXCOORD1; // Obj coords location
-    float4 Normal    : TEXCOORD2;
-    float4 Color0    : COLOR0;
-};
-
-vertout main(appin IN,
-             uniform float4x4 ModelViewProj,
-             uniform float4x4 ModelViewIT,
-             uniform float4x4 ModelView
-             )
-{
-  vertout OUT;
-
-  // transform vertex position into homogenous clip-space
-  OUT.HPosition = mul(ModelViewProj, IN.Position);
-  // pass through postion in object space for reflectance calculation
-#if 1
-  OUT.Normal = IN.Normal;
-  OUT.TCoord = IN.TCoord;
-  OUT.OPosition = IN.Position;
-#else
-  OUT.Normal = normalize( mul( ModelViewIT, IN.Normal) );
-  OUT.OPosition = mul(ModelViewIT, IN.Position);
-#endif
-
-  OUT.Color0 = IN.Color;
-
-  return OUT;
-}
diff --git a/Utilities/MaterialLibrary/CgShaders/VertexLighting.cg b/Utilities/MaterialLibrary/CgShaders/VertexLighting.cg
deleted file mode 100644
index 59baf0b..0000000
--- a/Utilities/MaterialLibrary/CgShaders/VertexLighting.cg
+++ /dev/null
@@ -1,56 +0,0 @@
-
-struct vertin
-{
-  float4 Position: POSITION;
-  float3 Normal: NORMAL;
-};
-
-struct vertout
-{
-  float4 Position : POSITION;
-  float4 Color    : COLOR;
-};
-
-vertout main(vertin IN,
-  uniform float4x4 ModelViewProj,
-  uniform float Ka,
-  uniform float3 AmbientColor,
-  uniform float Kd,
-  uniform float3 DiffuseColor,
-  uniform float Ks,
-  uniform float Ksp,
-  uniform float3 SpecularColor,
-
-  uniform float3 lightPosition,
-  uniform float3 eyePosition,
-  uniform float3 lightDiffuseColor,
-  uniform float3 lightAmbientColor,
-  uniform float3 lightSpecularColor
-)
-{
-  float3 N = IN.Normal; //invert(IN.Normal);
-  vertout OUT;
-  OUT.Position = mul(ModelViewProj, IN.Position);
-
-  // Computer Ambient intensity.
-  float3 ambient = Ka * AmbientColor * lightAmbientColor;
-
-  // Comput Diffuse intensity.
-  float3 L = normalize(lightPosition - IN.Position);
-  float diffuseLight = max(dot(N, L), 0);
-  float3 diffuse = Kd * DiffuseColor * lightDiffuseColor * diffuseLight;
-
-  // Compute Specular intensity.
-  float3 V = normalize(eyePosition - IN.Position);
-  float3 H = normalize(L + V);
-  float specularLight = pow(max(dot(N, H), 0), Ksp);
-  if (diffuseLight <= 0) specularLight = 0;
-  float3 specular = Ks * SpecularColor * lightSpecularColor * specularLight;
-
-  OUT.Color.xyz = ambient + diffuse + specular;
-  OUT.Color.w = 1;
-
-  return OUT;
-}
-
-
diff --git a/Utilities/MaterialLibrary/CgShaders/cg_sinewave.cg b/Utilities/MaterialLibrary/CgShaders/cg_sinewave.cg
deleted file mode 100644
index 5bd8993..0000000
--- a/Utilities/MaterialLibrary/CgShaders/cg_sinewave.cg
+++ /dev/null
@@ -1,77 +0,0 @@
-/*********************************************************************NVMH3****
-Path:  NVSDK\Common\media\programs
-File:  cg_sinewave.cg
-
-Copyright NVIDIA Corporation 2002
-TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, THIS SOFTWARE IS PROVIDED
-*AS IS* AND NVIDIA AND ITS SUPPLIERS DISCLAIM ALL WARRANTIES, EITHER EXPRESS
-OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, IMPLIED WARRANTIES OF MERCHANTABILITY
-AND FITNESS FOR A PARTICULAR PURPOSE.  IN NO EVENT SHALL NVIDIA OR ITS SUPPLIERS
-BE LIABLE FOR ANY SPECIAL, INCIDENTAL, INDIRECT, OR CONSEQUENTIAL DAMAGES
-WHATSOEVER (INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF BUSINESS PROFITS,
-BUSINESS INTERRUPTION, LOSS OF BUSINESS INFORMATION, OR ANY OTHER PECUNIARY LOSS)
-ARISING OUT OF THE USE OF OR INABILITY TO USE THIS SOFTWARE, EVEN IF NVIDIA HAS
-BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
-
-
-Comments:
-
-******************************************************************************/
-
-struct appin
-{
-    float4 Position     : POSITION;
-};
-
-struct vertout
-{
-    float4 HPosition    : POSITION;
-    float4 Color0       : COLOR0;
-    float4 Texture0     : TEXCOORD0;
-};
-
-vertout main(appin IN,
-             uniform float4x4 ModelViewProj,    // Modelview-projection matrix
-             uniform float4x4 ModelView,        // Modelview matrix
-             uniform float4x4 ModelViewIT,      // Inverse transpose modelview matrix
-             uniform float SinTime,
-             uniform float ShowNormals)
-{
-    vertout OUT;
-
-    float4 pos = float4(IN.Position.x, IN.Position.y, 0.0, 0.0);
-    float distance = 1/rsqrt(dot(pos, pos));
-
-    float cosVal;
-    float sinVal;
-    sincos(distance * SinTime, sinVal, cosVal);
-
-    OUT.HPosition = mul(ModelViewProj, float4(IN.Position.x,
-                                              IN.Position.y,
-                                              (sinVal * 0.06),
-                                              IN.Position.w));
-
-    // Calculate vertex normal
-    float4 normal;
-    normal.x = (IN.Position.x / distance) * cosVal;
-    normal.y = (IN.Position.y / distance) * cosVal;
-    normal.z = 1.0; normal.w = 1.0;
-
-    // transform normal from model-space to view-space
-    normal = normalize(mul(ModelViewIT, normal).xyz).xyzz;
-
-    // Output vertex color
-    if (ShowNormals == 1.0)
-        OUT.Color0 = normal;
-    else
-        OUT.Color0 = float4(1.0, 1.0, 1.0, 1.0);
-
-    float4 eye = normalize(mul(ModelView, IN.Position));
-
-    // Compute reflection vector for look up into cube map
-    float4 texCoord = eye - 2 * dot(eye, normal) * normal;
-    OUT.Texture0 = texCoord;
-    OUT.Texture0.w = 1.0;
-
-    return OUT;
-}
diff --git a/Utilities/MaterialLibrary/GLSLShaders/TestAppVarFrag.glsl b/Utilities/MaterialLibrary/GLSLShaders/TestAppVarFrag.glsl
deleted file mode 100644
index 365c707..0000000
--- a/Utilities/MaterialLibrary/GLSLShaders/TestAppVarFrag.glsl
+++ /dev/null
@@ -1,110 +0,0 @@
-//
-// Begin "3Dlabs-License.txt"
-//
-// Copyright (C) 2002-2005  3Dlabs Inc. Ltd.
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions
-// are met:
-//
-//     Redistributions of source code must retain the above copyright
-//     notice, this list of conditions and the following disclaimer.
-//
-//     Redistributions in binary form must reproduce the above
-//     copyright notice, this list of conditions and the following
-//     disclaimer in the documentation and/or other materials provided
-//     with the distribution.
-//
-//     Neither the name of 3Dlabs Inc. Ltd. nor the names of its
-//     contributors may be used to endorse or promote products derived
-//     from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
-// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
-// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
-// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-// POSSIBILITY OF SUCH DAMAGE.
-//
-//
-// End "3Dlabs-License.txt"
-
-
-
-uniform vec3 Color;
-uniform vec3 AmbientColor;
-uniform vec3 DiffuseColor;
-uniform vec3 SpecularColor;
-uniform vec3 EdgeColor;
-
-uniform float Ambient;
-uniform float Diffuse;
-uniform float Specular;
-uniform float SpecularPower;
-uniform float Opacity;
-
-uniform float PointSize;
-uniform float LineWidth;
-
-uniform int LineStipplePattern;
-uniform int LineStippleRepeatFactor;
-uniform int Interpolation;
-uniform int Representation;
-uniform int EdgeVisibility;
-uniform int BackfaceCulling;
-uniform int FrontfaceCulling;
-
-
-uniform vec3  SurfaceColor; // (0.75, 0.75, 0.75)
-uniform vec3  WarmColor;    // (0.6, 0.6, 0.0)
-uniform vec3  CoolColor;    // (0.0, 0.0, 0.6)
-uniform float DiffuseWarm;  // 0.45
-uniform float DiffuseCool;  // 0.45
-
-varying float NdotL;
-varying vec3  ReflectVec;
-varying vec3  ViewVec;
-
-uniform vec4 appVara;
-uniform vec4 appVarb;
-uniform vec4 appVarc;
-uniform vec4 appVard;
-uniform vec4 appVare;
-uniform vec4 appVarf;
-uniform vec4 appVarg;
-
-void main (void)
-{
-    vec3 kcool    = min(CoolColor + DiffuseCool * SurfaceColor, 1.0);
-    vec3 kwarm    = min(WarmColor + DiffuseWarm * SurfaceColor, 1.0);
-    vec3 kfinal   = mix(kcool, kwarm, NdotL);
-
-    vec3 nreflect = normalize(ReflectVec);
-    vec3 nview    = normalize(ViewVec);
-
-    float spec    = max(dot(nreflect, nview), 0.0);
-    spec          = pow(spec, 32.0);
-
-    gl_FragColor = vec4 (min(kfinal + spec, 1.0), 1.0);
-
-
-    if( 0
-        || appVara.x != 0.37714 || appVara.y != 0.61465 || appVara.z != 0.48399 || appVara.w != 0.68252
-        || appVarb.x != 0.03900 || appVarb.y != 0.15857 || appVarb.z != 0.57913 || appVarb.w != 0.54458
-        || appVarc.x != 0.97061 || appVarc.y != 0.86053 || appVarc.z != 0.63583 || appVarc.w != 0.51058
-        || appVard.x != 0.12885 || appVard.y != 0.91490 || appVard.z != 0.86394 || appVard.w != 0.58951
-        || appVare.x != 0.23403 || appVare.y != 0.35340 || appVare.z != 0.52559 || appVare.w != 0.77830
-        || appVarf.x != 0.19550 || appVarf.y != 0.17429 || appVarf.z != 0.89958 || appVarf.w != 0.15063
-        || appVarg.x != 0.75796 || appVarg.y != 0.48072 || appVarg.z != 0.07728 || appVarg.w != 0.16434
-      )
-      {
-      gl_FragColor = vec4 (1.0, 0.0, 0.0, 1.0);
-      }
-}
diff --git a/Utilities/MaterialLibrary/GLSLShaders/TestMatrixFrag.glsl b/Utilities/MaterialLibrary/GLSLShaders/TestMatrixFrag.glsl
deleted file mode 100644
index 097cc70..0000000
--- a/Utilities/MaterialLibrary/GLSLShaders/TestMatrixFrag.glsl
+++ /dev/null
@@ -1,89 +0,0 @@
-//
-// Begin "3Dlabs-License.txt"
-//
-// Copyright (C) 2002-2005  3Dlabs Inc. Ltd.
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions
-// are met:
-//
-//     Redistributions of source code must retain the above copyright
-//     notice, this list of conditions and the following disclaimer.
-//
-//     Redistributions in binary form must reproduce the above
-//     copyright notice, this list of conditions and the following
-//     disclaimer in the documentation and/or other materials provided
-//     with the distribution.
-//
-//     Neither the name of 3Dlabs Inc. Ltd. nor the names of its
-//     contributors may be used to endorse or promote products derived
-//     from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
-// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
-// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
-// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-// POSSIBILITY OF SUCH DAMAGE.
-//
-//
-// End "3Dlabs-License.txt"
-
-uniform mat2 testMat2;
-uniform mat3 testMat3;
-uniform mat4 testMat4;
-
-
-
-
-uniform vec3  SurfaceColor; // (0.75, 0.75, 0.75)
-uniform vec3  WarmColor;    // (0.6, 0.6, 0.0)
-uniform vec3  CoolColor;    // (0.0, 0.0, 0.6)
-uniform float DiffuseWarm;  // 0.45
-uniform float DiffuseCool;  // 0.45
-
-varying float NdotL;
-varying vec3  ReflectVec;
-varying vec3  ViewVec;
-
-void main (void)
-{
-    vec3 kcool    = min(CoolColor + DiffuseCool * SurfaceColor, 1.0);
-    vec3 kwarm    = min(WarmColor + DiffuseWarm * SurfaceColor, 1.0);
-    vec3 kfinal   = mix(kcool, kwarm, NdotL);
-
-    vec3 nreflect = normalize(ReflectVec);
-    vec3 nview    = normalize(ViewVec);
-
-    float spec    = max(dot(nreflect, nview), 0.0);
-    spec          = pow(spec, 32.0);
-
-    gl_FragColor = vec4 (min(kfinal + spec, 1.0), 1.0);
-
-
-    if( 0
-
-      || testMat2[0][0]!=3.14159 || testMat2[0][1]!=6.28319
-      || testMat2[1][0]!=9.42478 || testMat2[1][1]!=12.5664
-
-      || testMat3[0][0]!=9.4248  || testMat3[0][1]!=18.8496 || testMat3[0][2]!=28.2743
-      || testMat3[1][0]!=37.6991 || testMat3[1][1]!=47.1239 || testMat3[1][2]!=56.5487
-      || testMat3[2][0]!=65.9734 || testMat3[2][1]!=75.3982 || testMat3[2][2]!=84.8230
-
-      || testMat4[0][0]!=12.5664  || testMat4[0][1]!=25.1327  || testMat4[0][2]!=37.6991  || testMat4[0][3]!=50.2655
-      || testMat4[1][0]!=62.8319  || testMat4[1][1]!=75.3982  || testMat4[1][2]!=87.9646  || testMat4[1][3]!=100.5310
-      || testMat4[2][0]!=113.0973 || testMat4[2][1]!=125.6637 || testMat4[2][2]!=138.2301 || testMat4[2][3]!=150.7964
-      || testMat4[3][0]!=163.3628 || testMat4[3][1]!=175.9292 || testMat4[3][2]!=188.4956 || testMat4[3][3]!=201.0619
-
-    )
-      {
-      gl_FragColor = vec4 (1.0, 0.0, 0.0, 1.0);
-      }
-}
diff --git a/Utilities/MaterialLibrary/GLSLShaders/TestScalarVectorFrag.glsl b/Utilities/MaterialLibrary/GLSLShaders/TestScalarVectorFrag.glsl
deleted file mode 100644
index 32f1977..0000000
--- a/Utilities/MaterialLibrary/GLSLShaders/TestScalarVectorFrag.glsl
+++ /dev/null
@@ -1,120 +0,0 @@
-//
-// Begin "3Dlabs-License.txt"
-//
-// Copyright (C) 2002-2005  3Dlabs Inc. Ltd.
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions
-// are met:
-//
-//     Redistributions of source code must retain the above copyright
-//     notice, this list of conditions and the following disclaimer.
-//
-//     Redistributions in binary form must reproduce the above
-//     copyright notice, this list of conditions and the following
-//     disclaimer in the documentation and/or other materials provided
-//     with the distribution.
-//
-//     Neither the name of 3Dlabs Inc. Ltd. nor the names of its
-//     contributors may be used to endorse or promote products derived
-//     from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
-// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
-// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
-// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-// POSSIBILITY OF SUCH DAMAGE.
-//
-//
-// End "3Dlabs-License.txt"
-
-uniform float testFloat;
-uniform vec2  testVec2;
-uniform vec3  testVec3;
-uniform vec4  testVec4;
-
-uniform int   testInt;
-uniform ivec2  testIVec2;
-uniform ivec3  testIVec3;
-uniform ivec4  testIVec4;
-
-uniform mat2 testMat2;
-uniform mat3 testMat3;
-uniform mat4 testMat4;
-
-struct tStruct {
-  float f;
-  vec2 f2;
-  vec3 f3;
-  vec4 f4;
-};
-
-uniform tStruct tStruct2;
-
-
-uniform vec3  SurfaceColor; // (0.75, 0.75, 0.75)
-uniform vec3  WarmColor;    // (0.6, 0.6, 0.0)
-uniform vec3  CoolColor;    // (0.0, 0.0, 0.6)
-uniform float DiffuseWarm;  // 0.45
-uniform float DiffuseCool;  // 0.45
-
-varying float NdotL;
-varying vec3  ReflectVec;
-varying vec3  ViewVec;
-
-void main (void)
-{
-    vec3 kcool    = min(CoolColor + DiffuseCool * SurfaceColor, 1.0);
-    vec3 kwarm    = min(WarmColor + DiffuseWarm * SurfaceColor, 1.0);
-    vec3 kfinal   = mix(kcool, kwarm, NdotL);
-
-    vec3 nreflect = normalize(ReflectVec);
-    vec3 nview    = normalize(ViewVec);
-
-    float spec    = max(dot(nreflect, nview), 0.0);
-    spec          = pow(spec, 32.0);
-
-    gl_FragColor = vec4 (min(kfinal + spec, 1.0), 1.0);
-
-
-    if( 0
-      || testFloat!=1.0
-      || testVec2.x!=1.0 || testVec2.y!=2.0
-      || testVec3.x!=1.0 || testVec3.y!=2.0 || testVec3.z!=3.0
-      || testVec4.x!=1.0 || testVec4.y!=2.0 || testVec4.z!=3.0 || testVec4.w!=4.0
-
-      || testInt!=1
-      || testIVec2.x!=1 || testIVec2.y!=2
-      || testIVec3.x!=1 || testIVec3.y!=2 || testIVec3.z!=3
-      || testIVec4.x!=1 || testIVec4.y!=2 || testIVec4.z!=3 || testIVec4.w!=4
-
-      || testMat2[0][0]!=3.14159 || testMat2[0][1]!=6.28319
-      || testMat2[1][0]!=9.42478 || testMat2[1][1]!=12.5664
-
-      || testMat3[0][0]!=9.4248  || testMat3[0][1]!=18.8496 || testMat3[0][2]!=28.2743
-      || testMat3[1][0]!=37.6991 || testMat3[1][1]!=47.1239 || testMat3[1][2]!=56.5487
-      || testMat3[2][0]!=65.9734 || testMat3[2][1]!=75.3982 || testMat3[2][2]!=84.8230
-
-      || testMat4[0][0]!=12.5664  || testMat4[0][1]!=25.1327  || testMat4[0][2]!=37.6991  || testMat4[0][3]!=50.2655
-      || testMat4[1][0]!=62.8319  || testMat4[1][1]!=75.3982  || testMat4[1][2]!=87.9646  || testMat4[1][3]!=100.5310
-      || testMat4[2][0]!=113.0973 || testMat4[2][1]!=125.6637 || testMat4[2][2]!=138.2301 || testMat4[2][3]!=150.7964
-      || testMat4[3][0]!=163.3628 || testMat4[3][1]!=175.9292 || testMat4[3][2]!=188.4956 || testMat4[3][3]!=201.0619
-
-      || tStruct2.f!=2.0
-      || tStruct2.f2.x!=2.0 || tStruct2.f2.y!=2.1
-      || tStruct2.f3.x!=2.0 || tStruct2.f3.y!=2.1 || tStruct2.f3.z!=2.2
-      || tStruct2.f4.x!=2.0 || tStruct2.f4.y!=2.1 || tStruct2.f4.z!=2.2 || tStruct2.f4.w!=2.3
-
-    )
-      {
-      gl_FragColor = vec4 (1.0, 0.0, 0.0, 1.0);
-      }
-}
diff --git a/Utilities/MaterialLibrary/GLSLShaders/TestVertex.glsl b/Utilities/MaterialLibrary/GLSLShaders/TestVertex.glsl
deleted file mode 100644
index 69feef9..0000000
--- a/Utilities/MaterialLibrary/GLSLShaders/TestVertex.glsl
+++ /dev/null
@@ -1,62 +0,0 @@
-//
-// Begin "3Dlabs-License.txt"
-//
-// Copyright (C) 2002-2005  3Dlabs Inc. Ltd.
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions
-// are met:
-//
-//     Redistributions of source code must retain the above copyright
-//     notice, this list of conditions and the following disclaimer.
-//
-//     Redistributions in binary form must reproduce the above
-//     copyright notice, this list of conditions and the following
-//     disclaimer in the documentation and/or other materials provided
-//     with the distribution.
-//
-//     Neither the name of 3Dlabs Inc. Ltd. nor the names of its
-//     contributors may be used to endorse or promote products derived
-//     from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
-// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
-// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
-// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-// POSSIBILITY OF SUCH DAMAGE.
-//
-//
-// End "3Dlabs-License.txt"
-
-uniform vec3  LightPosition;  // (0.0, 10.0, 4.0)
-
-varying float NdotL;
-varying vec3  ReflectVec;
-varying vec3  ViewVec;
-
-struct light
-{
-  uniform vec3 position;
-  uniform vec3 color;
-};
-
-void main(void)
-{
-  uniform light l1;
-  uniform light l2;
-    vec3 ecPos      = vec3 (gl_ModelViewMatrix * gl_Vertex);
-    vec3 tnorm      = normalize(gl_NormalMatrix * gl_Normal);
-    vec3 lightVec   = normalize(LightPosition - ecPos);
-    ReflectVec      = normalize(reflect(-lightVec, tnorm));
-    ViewVec         = normalize(-ecPos);
-    NdotL           = (dot(lightVec, tnorm) + 1.0) * 0.5;
-    gl_Position     = ftransform();
-}
diff --git a/Utilities/MaterialLibrary/GLSLShaders/TestVtkPropertyFrag.glsl b/Utilities/MaterialLibrary/GLSLShaders/TestVtkPropertyFrag.glsl
deleted file mode 100644
index 1bf55d2..0000000
--- a/Utilities/MaterialLibrary/GLSLShaders/TestVtkPropertyFrag.glsl
+++ /dev/null
@@ -1,113 +0,0 @@
-//
-// Begin "3Dlabs-License.txt"
-//
-// Copyright (C) 2002-2005  3Dlabs Inc. Ltd.
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions
-// are met:
-//
-//     Redistributions of source code must retain the above copyright
-//     notice, this list of conditions and the following disclaimer.
-//
-//     Redistributions in binary form must reproduce the above
-//     copyright notice, this list of conditions and the following
-//     disclaimer in the documentation and/or other materials provided
-//     with the distribution.
-//
-//     Neither the name of 3Dlabs Inc. Ltd. nor the names of its
-//     contributors may be used to endorse or promote products derived
-//     from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
-// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
-// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
-// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-// POSSIBILITY OF SUCH DAMAGE.
-//
-//
-// End "3Dlabs-License.txt"
-
-
-
-uniform vec3 Color;
-uniform vec3 AmbientColor;
-uniform vec3 DiffuseColor;
-uniform vec3 SpecularColor;
-uniform vec3 EdgeColor;
-
-uniform float Ambient;
-uniform float Diffuse;
-uniform float Specular;
-uniform float SpecularPower;
-uniform float Opacity;
-
-uniform float PointSize;
-uniform float LineWidth;
-
-uniform int LineStipplePattern;
-uniform int LineStippleRepeatFactor;
-uniform int Interpolation;
-uniform int Representation;
-uniform int EdgeVisibility;
-uniform int BackfaceCulling;
-uniform int FrontfaceCulling;
-
-
-uniform vec3  SurfaceColor; // (0.75, 0.75, 0.75)
-uniform vec3  WarmColor;    // (0.6, 0.6, 0.0)
-uniform vec3  CoolColor;    // (0.0, 0.0, 0.6)
-uniform float DiffuseWarm;  // 0.45
-uniform float DiffuseCool;  // 0.45
-
-varying float NdotL;
-varying vec3  ReflectVec;
-varying vec3  ViewVec;
-
-void main (void)
-{
-    vec3 kcool    = min(CoolColor + DiffuseCool * SurfaceColor, 1.0);
-    vec3 kwarm    = min(WarmColor + DiffuseWarm * SurfaceColor, 1.0);
-    vec3 kfinal   = mix(kcool, kwarm, NdotL);
-
-    vec3 nreflect = normalize(ReflectVec);
-    vec3 nview    = normalize(ViewVec);
-
-    float spec    = max(dot(nreflect, nview), 0.0);
-    spec          = pow(spec, 32.0);
-
-    gl_FragColor = vec4 (min(kfinal + spec, 1.0), 1.0);
-
-
-    if( 0
-      || AmbientColor.x!=0.75 || AmbientColor.y!=0.751 || AmbientColor.z!=0.752
-      || DiffuseColor.x!=0.61 || DiffuseColor.y!=0.62 || DiffuseColor.z!=0.006
-      || SpecularColor.x!=0.001 || SpecularColor.y!=0.002 || SpecularColor.z!=0.61
-      || EdgeColor.x!=0.1 || EdgeColor.y!=0.2 || EdgeColor.z!=0.3
-      || Ambient!=0.45
-      || Diffuse!=0.451
-      || Specular!=0.4
-      || SpecularPower!=1.0
-      || Opacity!=1.0
-      || PointSize!=1.0
-      || LineWidth!=1.0
-      || LineStipplePattern!=0
-      || LineStippleRepeatFactor!=1
-      || Interpolation!=1
-      || Representation!=2
-      || EdgeVisibility!=0
-      || BackfaceCulling!=0
-      || FrontfaceCulling!=0
-    )
-      {
-      gl_FragColor = vec4 (1.0, 0.0, 0.0, 1.0);
-      }
-}
diff --git a/Utilities/MaterialLibrary/GLSLShaders/Twisted.glsl b/Utilities/MaterialLibrary/GLSLShaders/Twisted.glsl
deleted file mode 100644
index 0de3e45..0000000
--- a/Utilities/MaterialLibrary/GLSLShaders/Twisted.glsl
+++ /dev/null
@@ -1,39 +0,0 @@
-uniform float Rate;
-
-void main()
-{
-  float angle = gl_Vertex[2];
-  float rate = Rate;
-  vec4 newPos;
-  newPos[0] = cos(angle* rate) * gl_Vertex[0] + sin(angle* rate) *gl_Vertex[1];
-  newPos[1] = -sin(angle* rate) * gl_Vertex[0] + cos(angle* rate) *gl_Vertex[1];
-  newPos[2] = gl_Vertex[2];
-  newPos[3] = gl_Vertex[3];
-
-  vec3 newNormal;
-  newNormal[0] = cos(angle* rate) * gl_Normal[0] + sin(angle* rate) *gl_Normal[1];
-  newNormal[1] = -sin(angle* rate) * gl_Normal[0] + cos(angle* rate) *gl_Normal[1];
-  newNormal[2] = gl_Normal[2];
-
-  vec4 ambient = gl_FrontMaterial.ambient * gl_LightSource[0].ambient;
-  vec3 normal = normalize(gl_NormalMatrix * newNormal);
-  vec3 lightDir = normalize(vec3(gl_LightSource[0].position));
-  float NdotL = max(dot(normal, lightDir), 0.0);
-
-  vec4 col = vec4(0,0.5,0.5,1);
-  vec4 diffuse = col * gl_LightSource[0].diffuse * NdotL;
-  vec4 specular;
-
-  if (NdotL > 0.0)
-    {
-    float NdotHV = max(dot(normal, gl_LightSource[0].halfVector.xyz), 0.0);
-    specular = gl_FrontMaterial.specular * gl_LightSource[0].specular *
-      pow(NdotHV, gl_FrontMaterial.shininess);
-    }
-  gl_FrontColor = diffuse + ambient + specular;
-
-  gl_Position = gl_ModelViewProjectionMatrix * newPos;
-
-
-
-}
diff --git a/Utilities/MaterialLibrary/Materials/CgLighting.xml b/Utilities/MaterialLibrary/Materials/CgLighting.xml
deleted file mode 100644
index 8a42899..0000000
--- a/Utilities/MaterialLibrary/Materials/CgLighting.xml
+++ /dev/null
@@ -1,28 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material name="CgTestMaterial">
-  <Property name="Property1">
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="1.0 0.5 0.5" />
-  </Property>
-
-  <Shader scope="Vertex" name="CGVertexLighting" location="Library" language="Cg"
-    entry="main" args="-DVERTEX_PROGRAM">
-    <MatrixUniform name="ModelViewProj" type="State" number_of_elements="2"
-      value="CG_GL_MODELVIEW_PROJECTION_MATRIX CG_GL_MATRIX_IDENTITY">
-    </MatrixUniform>
-
-    <PropertyUniform name="Ka" value="Ambient" />
-    <PropertyUniform name="AmbientColor" value="AmbientColor" />
-    <PropertyUniform name="Kd" value="Diffuse" />
-    <PropertyUniform name="DiffuseColor" value="DiffuseColor" />
-    <PropertyUniform name="Ks" value="Specular" />
-    <PropertyUniform name="Ksp" value="SpecularPower" />
-    <PropertyUniform name="SpecularColor" value="SpecularColor" />
-
-    <LightUniform name="lightPosition" value="Position" />
-    <LightUniform name="lightDiffuseColor" value="DiffuseColor" />
-    <LightUniform name="lightSpecularColor" value="SpecularColor" />
-    <LightUniform name="lightAmbientColor" value="AmbientColor" />
-
-    <CameraUniform name="eyePosition" value="Position" />
-  </Shader>
-</Material>
diff --git a/Utilities/MaterialLibrary/Materials/CgSinewave.xml b/Utilities/MaterialLibrary/Materials/CgSinewave.xml
deleted file mode 100644
index c4e973c..0000000
--- a/Utilities/MaterialLibrary/Materials/CgSinewave.xml
+++ /dev/null
@@ -1,31 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material name="Mat1">
-
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="0.85 0.05 0.15">
-    </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="0.15 0.05 0.85">
-    </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="0.15 0.05 0.85">
-    </Member>
-  </Property>
-
-  <Shader scope="Vertex" name="CGcg_sinewave"
-    location="Library"
-    language="Cg" entry="main" args="-DVERTEX_PROGRAM">
-
-    <MatrixUniform name="ModelView" type="State" number_of_elements="2" value="CG_GL_MODELVIEW_MATRIX CG_GL_MATRIX_IDENTITY">
-    </MatrixUniform>
-    <MatrixUniform name="ModelViewProj" type="State" number_of_elements="2" value="CG_GL_MODELVIEW_PROJECTION_MATRIX CG_GL_MATRIX_IDENTITY">
-    </MatrixUniform>
-    <MatrixUniform name="ModelViewIT" type="State" number_of_elements="2" value="CG_GL_MODELVIEW_MATRIX CG_GL_MATRIX_INVERSE_TRANSPOSE">
-    </MatrixUniform>
-
-    <PropertyUniform type="Member" value="MTime" name="SinTime" >
-    </PropertyUniform>
-    <Uniform type="float" name="ShowNormals" number_of_elements="1" value="1.0">
-    </Uniform>
-
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Materials/CgTwisted.xml b/Utilities/MaterialLibrary/Materials/CgTwisted.xml
deleted file mode 100644
index 0773491..0000000
--- a/Utilities/MaterialLibrary/Materials/CgTwisted.xml
+++ /dev/null
@@ -1,30 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material name="Mat1" NumberOfProperties="1" NumberOfVertexShaders="1" NumberOfFragmentShaders="0">
-  <Property name="Property1">
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="1.0 0.5 0.5" />
-  </Property>
-
-  <Shader scope="Vertex" name="CGTwisted" location="Library" language="Cg" entry="main" args="-DVERTEX_PROGRAM">
-    <MatrixUniform name="ModelViewProj" type="State" number_of_elements="2" value="CG_GL_MODELVIEW_PROJECTION_MATRIX CG_GL_MATRIX_IDENTITY"> </MatrixUniform>
-    <MatrixUniform name="ModelViewIT" type="State" number_of_elements="2" value="CG_GL_MODELVIEW_MATRIX CG_GL_MATRIX_INVERSE_TRANSPOSE"> </MatrixUniform>
-    <!--
-    <ApplicationUniform name="Rate" value="Rate"> </ApplicationUniform>
-    -->
-    <Uniform name="Rate" value="1.0" type="double" number_of_elements="1" />
-    <PropertyUniform name="Ka" value="Ambient" />
-    <PropertyUniform name="AmbientColor" value="AmbientColor" />
-    <PropertyUniform name="Kd" value="Diffuse" />
-    <PropertyUniform name="DiffuseColor" value="DiffuseColor" />
-    <PropertyUniform name="Ks" value="Specular" />
-    <PropertyUniform name="Ksp" value="SpecularPower" />
-    <PropertyUniform name="SpecularColor" value="SpecularColor" />
-
-    <LightUniform name="lightPosition" value="Position" />
-    <LightUniform name="lightSpecularColor" value="SpecularColor" />
-    <LightUniform name="lightAmbientColor" value="AmbientColor" />
-
-    <Uniform name="lightDiffuseColor" type="double" number_of_elements="3" value="0 1 1" />
-    <CameraUniform name="eyePosition" value="Position" />
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Materials/GLSLTwisted.xml b/Utilities/MaterialLibrary/Materials/GLSLTwisted.xml
deleted file mode 100644
index 229e4a3..0000000
--- a/Utilities/MaterialLibrary/Materials/GLSLTwisted.xml
+++ /dev/null
@@ -1,15 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material name="Twisted">
-  <Property name="Property1">
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="1.0 0.5 0.5" />
-  </Property>
-
-  <Shader scope="Vertex" name="GLSLTwisted" location="Library"
-    language="GLSL" entry="main">
-    <ApplicationUniform name="Rate" value="Rate"> </ApplicationUniform>
-  </Shader>
-  <!--
-  <Shader scope="Fragment" name="GLSLTwisted" location="c:/temp/GLSLTwistedFrag.glsl" language="GLSL" entry="main">
-  </Shader>
-    -->
-</Material>
diff --git a/Utilities/MaterialLibrary/ProcessShader.cxx b/Utilities/MaterialLibrary/ProcessShader.cxx
deleted file mode 100644
index cbc7c3a..0000000
--- a/Utilities/MaterialLibrary/ProcessShader.cxx
+++ /dev/null
@@ -1,231 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    ProcessShader.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-
-#include "vtkObject.h"
-
-#include <string>
-#include <vector>
-#include <vtksys/SystemTools.hxx>
-#include <vtksys/RegularExpression.hxx>
-#include <vtksys/ios/fstream>
-#include <vtksys/ios/sstream>
-
-class Output
-{
-public:
-  Output()
-    {
-    this->MaxLen = 16000;
-    this->CurrentPosition = 0;
-    }
-  ~Output(){}
-  Output(const Output&){}
-  void operator=(const Output&){}
-  vtksys_ios::ostringstream Stream;
-
-  int MaxLen;
-  vtksys_ios::streamoff CurrentPosition;
-  int Count;
-  std::string Prefix;
-  std::string Suffix;
-
-  void PrintHeader(const char* title, const char* file)
-    {
-    this->Stream << endl
-      << "// From file " << file << endl
-      << "static const char* " << this->Prefix.c_str() << title << this->Suffix.c_str() << this->Count
-      << " =" << endl;
-    this->CurrentPosition = this->Stream.tellp();
-    }
-
-  void CheckSplit(const char* title, const char* file, int force=0)
-    {
-    if ( (static_cast<long>(this->Stream.tellp()) - this->CurrentPosition) > this->MaxLen ||
-      force )
-      {
-      this->Count ++;
-      this->Stream << ";" << endl;
-      this->PrintHeader(title, file);
-      }
-    }
-
-  int ProcessFile(const char* file, const char* title)
-    {
-    vtksys_ios::ifstream ifs(file);
-    if ( !ifs )
-      {
-      cout << "Cannot open file: " << file << endl;
-      return 0;
-      }
-    int ch;
-    int in_ifdef = 0;
-
-    this->Count = 0;
-    this->PrintHeader(title, file);
-    this->Stream << "\"";
-
-    std::string line;
-    std::string::size_type cc;
-
-    vtksys::RegularExpression reIfDef("^[ \r\n\t]*#[ \r\n\t]*if");
-    vtksys::RegularExpression reElse("^[ \r\n\t]*#[ \r\n\t]*el(se|if)");
-    vtksys::RegularExpression reEndif("^[ \r\n\t]*#[ \r\n\t]*endif");
-    int res = 0;
-
-    while ( vtksys::SystemTools::GetLineFromStream(ifs, line) )
-      {
-      res ++;
-      int regex = 0;
-      int ifdef_line = 0;
-      if ( reIfDef.find(line) )
-        {
-        in_ifdef ++;
-        regex = 1;
-        ifdef_line = 1;
-        }
-      else if ( reElse.find(line) )
-        {
-        regex = 1;
-        }
-      else if ( reEndif.find(line) )
-        {
-        in_ifdef --;
-        regex = 1;
-        }
-      if ( regex )
-        {
-        this->Stream << "\\n\"" << endl;
-        if ( ifdef_line )
-          {
-          this->CheckSplit(title, file, 1);
-          }
-        this->Stream << line.c_str() << endl;
-        if ( !ifdef_line )
-          {
-          this->CheckSplit(title, file);
-          }
-        this->Stream << "\"";
-        }
-      else
-        {
-        for ( cc = 0; cc < line.size(); cc ++ )
-          {
-          ch = line[cc];
-          if ( ch == '\\' )
-            {
-            this->Stream << "\\\\";
-            }
-          else if ( ch == '\"' )
-            {
-            this->Stream << "\\\"";
-            }
-          else
-            {
-            this->Stream << static_cast<unsigned char>(ch);
-            }
-          }
-        this->Stream << "\\n\"" << endl;
-        if ( !in_ifdef )
-          {
-          this->CheckSplit(title, file);
-          }
-        this->Stream << "\"";
-        }
-      }
-    this->Stream << "\\n\";" << endl;
-
-    if ( !res )
-      {
-      return 0;
-      }
-    return this->Count+1;
-    }
-};
-
-int main(int argc, char* argv[])
-{
-  if ( argc < 4 )
-    {
-    cerr << "Usage: " << argv[0] << " <output-file> <prefix> <suffix> <getmethod> <modules>..."
-      << endl;
-    return 1;
-    }
-  Output ot;
-  ot.Prefix = argv[2];
-  ot.Suffix = argv[3];
-  ot.Stream << "// Loadable shader code" << endl
-    << "//" << endl
-    << "// Generated by " << argv[0] << endl
-    << "//" << endl
-    << "#ifndef __" << ot.Prefix.c_str() << "_h" << endl
-    << "#define __" << ot.Prefix.c_str() << "_h" << endl
-    << endl;
-
-  std::string output = argv[1];
-
-  int cc;
-  for ( cc = 5; cc < argc; cc ++ )
-    {
-    std::string fname = argv[cc];
-    std::string moduleName;
-    moduleName = vtksys::SystemTools::GetFilenameWithoutExtension(fname);
-
-    cout << "-- Generate module: " << moduleName << endl;
-
-    int num = ot.ProcessFile(fname.c_str(), moduleName.c_str());
-    if ( num == 0 )
-      {
-      cout << "Problem generating header file from XML file: " << fname.c_str() << endl;
-      return 1;
-      }
-    int kk;
-    vtksys_ios::ostringstream createstring;
-    vtksys_ios::ostringstream lenstr;
-    for ( kk = 0; kk < num; kk ++ )
-      {
-      lenstr << endl
-        << "    + static_cast<int>(strlen(" << ot.Prefix.c_str()
-        << moduleName.c_str() << ot.Suffix.c_str() << kk << "))";
-      createstring << "  strcat(res, " << ot.Prefix.c_str() << moduleName.c_str() << ot.Suffix.c_str()
-        << kk << ");" << endl;
-      }
-    ot.Stream
-      << "// Get single string" << endl
-      << "char* " << ot.Prefix.c_str() << moduleName.c_str() << argv[4] << "()" << endl
-      << "{" << endl
-      << "  int len = ( 0"
-      << lenstr.str()
-      << " );" << endl
-      << "  char* res = new char[ len + 1];" << endl
-      << "  res[0] = 0;" << endl
-      << createstring.str()
-      << "  return res;" << endl
-      << "}" << endl << endl;
-    }
-
-  ot.Stream
-    << endl << endl
-    << "#endif" << endl;
-  ot.Stream << ends;
-  FILE* fp = fopen(output.c_str(), "w");
-  if ( !fp )
-    {
-    cout << "Cannot open output file: " << output.c_str() << endl;
-    return 1;
-    }
-  fprintf(fp, "%s", ot.Stream.str().c_str());
-  fclose(fp);
-  return 0;
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/3DLabsLicense.txt b/Utilities/MaterialLibrary/Repository/OrangeBook/3DLabsLicense.txt
deleted file mode 100644
index f02d5a2..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/3DLabsLicense.txt
+++ /dev/null
@@ -1,37 +0,0 @@
-//
-// Begin "3Dlabs-License.txt"
-//
-// Copyright (C) 2002-2005  3Dlabs Inc. Ltd.
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions
-// are met:
-//
-//     Redistributions of source code must retain the above copyright
-//     notice, this list of conditions and the following disclaimer.
-//
-//     Redistributions in binary form must reproduce the above
-//     copyright notice, this list of conditions and the following
-//     disclaimer in the documentation and/or other materials provided
-//     with the distribution.
-//
-//     Neither the name of 3Dlabs Inc. Ltd. nor the names of its
-//     contributors may be used to endorse or promote products derived
-//     from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
-// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
-// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
-// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-// POSSIBILITY OF SUCH DAMAGE.
-//
-//
-// End "3Dlabs-License.txt"
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch06/Ch06Brick.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch06/Ch06Brick.xml
deleted file mode 100644
index c9f3904..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch06/Ch06Brick.xml
+++ /dev/null
@@ -1,56 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material name="Mat1" NumberOfProperties="1" NumberOfVertexShaders="1" NumberOfFragmentShaders="1">>
-
-
-
-  <Property name="Property1">
-    <!--
-    <Member name="Color" number_of_elements="3" type="Double" value="0.75 0.75 0.75">
-    </Member>
-    -->
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="0.0 0.0 0.15"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="0.65 0.55 0.15"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="0.0 0.0 0.0"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="0.5"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="0.6"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="0.4"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="0.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <!-- Setting this to '0' here causes the object to disappear in the scene
-    <Member name="LineStipplePattern" number_of_elements="1"  type="Int" value="1">
-    </Member>
-    -->
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-
-
-  <Shader scope="Vertex" name="brickVert" location="OrangeBook/Ch06/Ch06BrickVert.glsl" language="GLSL" entry="main" args="-DVERTEX_PROGRAM">
-
-    <Uniform type="vec3" name="LightPosition" number_of_elements="3" value="0.0 10.0 4.0"> </Uniform>
-
-  </Shader>
-
-
-
-  <Shader scope="Fragment" name="brickFrag" location="OrangeBook/Ch06/Ch06BrickFrag.glsl" language="GLSL" entry="main" args="-DFRAGMENT_PROGRAM">
-
-    <Uniform type="vec3" name="BrickColor" number_of_elements="3" value="0.75 0.15 0.15"> </Uniform>
-    <Uniform type="vec3" name="MortarColor" number_of_elements="3" value="0.75 0.75 0.75"> </Uniform>
-    <Uniform type="vec2" name="BrickSize" number_of_elements="2" value="0.75 0.75"> </Uniform>
-    <Uniform type="vec2" name="BrickPct" number_of_elements="2" value="0.75 0.75"> </Uniform>
-
-  </Shader>
-
-
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch06/Ch06BrickFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch06/Ch06BrickFrag.glsl
deleted file mode 100644
index eb8a801..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch06/Ch06BrickFrag.glsl
+++ /dev/null
@@ -1,37 +0,0 @@
-// Fragment shader for procedural bricks
-//
-// Authors: Dave Baldwin, Steve Koren, Randi Rost
-//          based on a shader by Darwyn Peachey
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-uniform vec3  BrickColor;
-uniform vec3  MortarColor;
-uniform vec2  BrickSize;
-uniform vec2  BrickPct;
-
-varying vec2  MCposition;
-varying float LightIntensity;
-
-void main(void)
-{
-    vec3 color;
-    vec2 position;
-    vec2 useBrick;
-
-    position = MCposition / BrickSize;
-
-    if (fract(position.y * 0.5) > 0.5)
-        position.x += 0.5;
-
-    position = fract(position);
-
-    useBrick = step(position, BrickPct);
-
-    color  = mix(MortarColor, BrickColor, useBrick.x * useBrick.y);
-    color *= LightIntensity;
-    gl_FragColor = vec4(color, 1.0);
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch06/Ch06BrickVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch06/Ch06BrickVert.glsl
deleted file mode 100644
index c67728f..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch06/Ch06BrickVert.glsl
+++ /dev/null
@@ -1,40 +0,0 @@
-//
-// Vertex shader for procedural bricks
-//
-// Authors: Dave Baldwin, Steve Koren, Randi Rost
-//          based on a shader by Darwyn Peachey
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-uniform vec3 LightPosition;
-
-const float SpecularContribution = 0.3;
-const float DiffuseContribution  = 1.0 - SpecularContribution;
-
-varying float LightIntensity;
-varying vec2  MCposition;
-
-void main(void)
-{
-    vec3 ecPosition = vec3 (gl_ModelViewMatrix * gl_Vertex);
-    vec3 tnorm      = normalize(gl_NormalMatrix * gl_Normal);
-    vec3 lightVec   = normalize(LightPosition - ecPosition);
-    vec3 reflectVec = reflect(-lightVec, tnorm);
-    vec3 viewVec    = normalize(-ecPosition);
-    float diffuse   = max(dot(lightVec, tnorm), 0.0);
-    float spec      = 0.0;
-
-    if (diffuse > 0.0)
-    {
-        spec = max(dot(reflectVec, viewVec), 0.0);
-        spec = pow(spec, 16.0);
-    }
-
-    LightIntensity  = DiffuseContribution * diffuse + SpecularContribution * spec;
-
-    MCposition      = gl_Vertex.xy;
-    gl_Position     = ftransform();
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10Earth1DTex.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10Earth1DTex.xml
deleted file mode 100644
index c6a2a41..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10Earth1DTex.xml
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material name="UnnamedMaterial" number_of_properties="1" number_of_vertex_shaders="1"  number_of_fragment_shaders="1" >
-
-  <Property name="Property1">
-    <!--
-    <Texture dimension="2D" format="bmp" location="Textures/texture.bmp"> </Texture>
-    <Texture dimension="2D" format="bmp" location="Textures/masonry.bmp"> </Texture>
-    -->
-    <Texture name="earthTexture" type="2D" format="ppm" location="Textures/earth.ppm"> </Texture>
-  </Property>
-
-  <Shader scope="Vertex" name="Ch10Earth1DTexVertex" location="OrangeBook/Ch10/Ch10Earth1DTexVert.glsl"  language="GLSL" entry="main">
-    <LightUniform value="Position" name="LightPosition"> </LightUniform>
-  </Shader>
-
-  <Shader scope="Fragment" name="Ch10Earth1DTexFrag" location="OrangeBook/Ch10/Ch10Earth1DTexFrag.glsl"  language="GLSL" entry="main">
-    <!--
-    <SamplerUniform type="sampler2D" name="EarthTexture" value="1"> </SamplerUniform>
-    -->
-    <Uniform type="int" name="EarthTexture" number_of_elements="1" value="0"> </Uniform>
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10Earth1DTexFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10Earth1DTexFrag.glsl
deleted file mode 100644
index 5d8e229..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10Earth1DTexFrag.glsl
+++ /dev/null
@@ -1,18 +0,0 @@
-//
-// Fragment shader for drawing the earth with one texture
-//
-// Author: Randi Rost
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying float LightIntensity;
-uniform sampler2D EarthTexture;
-
-void main (void)
-{
-    vec3 lightColor = vec3(texture2D(EarthTexture, gl_TexCoord[0].st));
-    gl_FragColor    = vec4(lightColor * LightIntensity, 1.0);
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10Earth1DTexVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10Earth1DTexVert.glsl
deleted file mode 100644
index 9fd03a1..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10Earth1DTexVert.glsl
+++ /dev/null
@@ -1,36 +0,0 @@
-//
-// Vertex shader for drawing the earth with one texture
-//
-// Author: Randi Rost
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying float LightIntensity;
-uniform vec3 LightPosition;
-
-const float specularContribution = 0.1;
-const float diffuseContribution  = 1.0 - specularContribution;
-
-void main(void)
-{
-    vec3 ecPosition = vec3(gl_ModelViewMatrix * gl_Vertex);
-    vec3 ecLightPosition = vec3(gl_ModelViewMatrix * vec4(LightPosition,1.0));
-
-    vec3 tnorm      = normalize(gl_NormalMatrix * gl_Normal);
-    vec3 lightVec   = normalize(ecLightPosition - ecPosition);
-
-    vec3 reflectVec = reflect(-lightVec, tnorm);
-    vec3 viewVec    = normalize(-ecPosition);
-
-    float spec      = clamp(dot(reflectVec, viewVec), 0.0, 1.0);
-    spec            = pow(spec, 16.0);
-
-    LightIntensity  = diffuseContribution * max(dot(lightVec, tnorm), 0.0)
-                      + specularContribution * spec;
-
-    gl_TexCoord[0]  = gl_MultiTexCoord0;
-    gl_Position     = ftransform();
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10Earth3DTex.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10Earth3DTex.xml
deleted file mode 100644
index 9f25682..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10Earth3DTex.xml
+++ /dev/null
@@ -1,48 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material>
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="1.0"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <!-- Setting this to '0' here causes the object to disappear in the scene
-    <Member name="LineStipplePattern" number_of_elements="1"  type="Int" value="1">
-    </Member>
-    -->
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-
-    <Texture name="masonry" type="2D" format="bmp" location="Textures/masonry.bmp"> </Texture>
-    <Texture name="earth" type="2D" format="ppm" location="Textures/earth.ppm"> </Texture>
-    <Texture name="texture" type="2D" format="bmp" location="Textures/texture.bmp"> </Texture>
-
-  </Property>
-
-  <Shader scope="Vertex" name="OrangeBook/Ch10/Ch10Earth3DTexVert.glsl" location="OrangeBook/Ch10/Ch10Earth3DTexVert.glsl"  language="GLSL" entry="main">
-    <Uniform type="vec3" name="LightPosition" number_of_elements="3" value="1.0 1.0 1.0"> </Uniform>
-  </Shader>
-
-  <Shader scope="Fragment" name="OrangeBook/Ch10/Ch10Earth3DTexFrag.glsl" location="OrangeBook/Ch10/Ch10Earth3DTexFrag.glsl"  language="GLSL" entry="main">
-    <!--
-    <SamplerUniform type="sampler2D" name="EarthDay" value="0"> </SamplerUniform>
-    <SamplerUniform type="sampler2D" name="EarthNight" value="1"> </SamplerUniform>
-    <SamplerUniform type="sampler2D" name="EarthCloudGloss" value="2"> </SamplerUniform>
-    -->
-    <Uniform type="int" name="EarthDay" number_of_elements="1" value="0"> </Uniform>
-    <Uniform type="int" name="EarthNight" number_of_elements="1" value="1"> </Uniform>
-    <Uniform type="int" name="EarthCloudGloss" number_of_elements="1" value="2"> </Uniform>
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10Earth3DTexFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10Earth3DTexFrag.glsl
deleted file mode 100644
index 365ff56..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10Earth3DTexFrag.glsl
+++ /dev/null
@@ -1,41 +0,0 @@
-//
-// Fragment shader for drawing the earth with multiple textures
-//
-// Author: Randi Rost
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-uniform sampler2D EarthDay;
-uniform sampler2D EarthNight;
-uniform sampler2D EarthCloudGloss;
-
-varying float Diffuse;
-varying vec3  Specular;
-varying vec2  TexCoord;
-
-void main (void)
-{
-    // Monochrome cloud cover value will be in clouds.r
-    // Gloss value will be in clouds.g
-    // clouds.b will be unused
-
-    vec3 clouds    = texture2D(EarthCloudGloss, TexCoord).stp;
-    vec3 daytime   = (texture2D(EarthDay, TexCoord).stp * Diffuse +
-                          Specular * clouds.g) * (1.0 - clouds.r) +
-                          clouds.r * Diffuse;
-    vec3 nighttime = texture2D(EarthNight, TexCoord).stp *
-                         (1.0 - clouds.r);
-
-    vec3 color = daytime;
-
-    if (Diffuse < -0.1)
-        color = nighttime;
-
-    if (abs(Diffuse) <= 0.1)
-        color = mix(nighttime, daytime, (Diffuse + 0.1) * 5.0);
-
-    gl_FragColor = vec4(color, 1.0);
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10Earth3DTexVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10Earth3DTexVert.glsl
deleted file mode 100644
index 537dd22..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10Earth3DTexVert.glsl
+++ /dev/null
@@ -1,33 +0,0 @@
-//
-// Vertex shader for drawing the earth with multiple textures
-//
-// Author: Randi Rost
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying float Diffuse;
-varying vec3  Specular;
-varying vec2  TexCoord;
-
-uniform vec3 LightPosition;
-
-void main(void)
-{
-    vec3 ecPosition = vec3(gl_ModelViewMatrix * gl_Vertex);
-    vec3 tnorm      = normalize(gl_NormalMatrix * gl_Normal);
-    vec3 lightVec   = normalize(LightPosition - ecPosition);
-    vec3 reflectVec = reflect(-lightVec, tnorm);
-    vec3 viewVec    = normalize(-ecPosition);
-
-    float spec      = clamp(dot(reflectVec, viewVec), 0.0, 1.0);
-    spec            = pow(spec, 8.0);
-    Specular        = vec3(spec) * vec3(1.0, 0.941, 0.898) * 0.3;
-
-    Diffuse         = max(dot(lightVec, tnorm), 0.0);
-
-    TexCoord        = gl_MultiTexCoord0.st;
-    gl_Position     = ftransform();
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10EnvMap.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10EnvMap.xml
deleted file mode 100644
index dd3ea84..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10EnvMap.xml
+++ /dev/null
@@ -1,23 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material>
-  <Property name="Property1">
-    <Texture name="masonryTexture" type="2D" format="bmp" location="Textures/masonry.bmp"> </Texture>
-  <!--
-    <Texture dimension="2D" format="bmp" location="OrangeBook/Ch10/NewDay.bmp"> </Member>
-  -->
-  </Property>
-
-  <Shader scope="Vertex" name="OrangeBook/Ch10/Ch10EnvMapVert.glsl" location="OrangeBook/Ch10/Ch10EnvMapVert.glsl"  language="GLSL" entry="main">
-    <LightUniform  value="Position" name="LightPos"> </LightUniform>
-  </Shader>
-
-  <Shader scope="Fragment" name="OrangeBook/Ch10/Ch10EnvMapFrag.glsl" location="OrangeBook/Ch10/Ch10EnvMapFrag.glsl"  language="GLSL" entry="main">
-    <Uniform type="vec3" name="BaseColor" number_of_elements="3" value="1.0 1.0 1.0"> </Uniform>
-    <Uniform type="float" name="MixRatio" number_of_elements="1" value="1.0"> </Uniform>
-    <!--
-    <SamplerUniform type="sampler2D" name="EnvMap" value="0"> </SamplerUniform>
-    -->
-    <Uniform type="int" name="EnvMap" number_of_elements="1" value="0"> </Uniform>
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10EnvMapFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10EnvMapFrag.glsl
deleted file mode 100644
index f49a809..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10EnvMapFrag.glsl
+++ /dev/null
@@ -1,62 +0,0 @@
-//
-// Fragment shader for environment mapping with an
-// equirectangular 2D texture
-//
-// Authors: John Kessenich, Randi Rost
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-const vec3 Xunitvec = vec3(1.0, 0.0, 0.0);
-const vec3 Yunitvec = vec3(0.0, 1.0, 0.0);
-
-uniform vec3  BaseColor;
-uniform float MixRatio;
-
-uniform sampler2D EnvMap;
-
-varying vec3  Normal;
-varying vec3  EyeDir;
-varying float LightIntensity;
-
-void main (void)
-{
-    // Compute reflection vector
-
-    vec3 reflectDir = reflect(EyeDir, Normal);
-
-    // Compute altitude and azimuth angles
-
-    vec2 index;
-
-    index.y = dot(normalize(reflectDir), Yunitvec);
-    reflectDir.y = 0.0;
-    index.x = dot(normalize(reflectDir), Xunitvec) * 0.5;
-
-    // Translate index values into proper range
-
-    if (reflectDir.z >= 0.0)
-        index = (index + 1.0) * 0.5;
-    else
-    {
-        index.t = (index.t + 1.0) * 0.5;
-        index.s = (-index.s) * 0.5 + 1.0;
-    }
-
-    // if reflectDir.z >= 0.0, s will go from 0.25 to 0.75
-    // if reflectDir.z <  0.0, s will go from 0.75 to 1.25, and
-    // that's OK, because we've set the texture to wrap.
-
-    // Do a lookup into the environment map.
-
-    vec3 envColor = vec3(texture2D(EnvMap, index));
-
-    // Add lighting to base color and mix
-
-    vec3 base = LightIntensity * BaseColor;
-    envColor = mix(envColor, base, MixRatio);
-
-    gl_FragColor = vec4(envColor, 1.0);
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10EnvMapVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10EnvMapVert.glsl
deleted file mode 100644
index 377e6bc..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch10/Ch10EnvMapVert.glsl
+++ /dev/null
@@ -1,25 +0,0 @@
-//
-// Vertex shader for environment mapping with an
-// equirectangular 2D texture
-//
-// Authors: John Kessenich, Randi Rost
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying vec3  Normal;
-varying vec3  EyeDir;
-varying float LightIntensity;
-
-uniform vec3  LightPos;
-
-void main(void)
-{
-    gl_Position    = ftransform();
-    Normal         = normalize(gl_NormalMatrix * gl_Normal);
-    vec4 pos       = gl_ModelViewMatrix * gl_Vertex;
-    EyeDir         = pos.xyz;
-    LightIntensity = max(dot(normalize(LightPos - EyeDir), Normal), 0.0);
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/BumpMap.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/BumpMap.xml
deleted file mode 100644
index 5574a54..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/BumpMap.xml
+++ /dev/null
@@ -1,34 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material>
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="0.1 0.1 0.8"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="1.0"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="0.9"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-  </Property>
-
-  <Shader scope="Vertex" name="OrangeBook/Ch11/Ch11BumpMapVert.glsl" location="OrangeBook/Ch11/Ch11BumpMapVert.glsl"  language="GLSL" entry="main">
-    <LightUniform  value="Position"  name="LightPosition"> </LightUniform>
-</Shader>
-
-
-
-<Shader scope="Fragment" name="OrangeBook/Ch11/Ch11BumpMapFrag.glsl" location="OrangeBook/Ch11/Ch11BumpMapFrag.glsl"  language="GLSL" entry="main">
-
-    <PropertyUniform  value="AmbientColor" name="SurfaceColor"> </PropertyUniform>
-    <PropertyUniform  value="Specular" name="SpecularFactor"> </PropertyUniform>
-
-    <Uniform type="float" name="BumpDensity" number_of_elements="1" value="15.0"> </Uniform>
-    <Uniform type="float" name="BumpSize" number_of_elements="1" value="0.15"> </Uniform>
-
-</Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11BumpMapFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11BumpMapFrag.glsl
deleted file mode 100644
index f42e32f..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11BumpMapFrag.glsl
+++ /dev/null
@@ -1,41 +0,0 @@
-//
-// Fragment shader for procedural bumps
-//
-// Authors: Randi Rost, John Kessenich
-//
-// Copyright (c) 2002-2005 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying vec3 LightDir;
-varying vec3 EyeDir;
-
-uniform vec3  SurfaceColor;    // = (0.7, 0.6, 0.18)
-uniform float BumpDensity;     // = 16.0
-uniform float BumpSize;        // = 0.15
-uniform float SpecularFactor;  // = 0.5
-
-void main (void)
-{
-    vec3 litColor;
-    vec2 c = BumpDensity * gl_TexCoord[0].st;
-    vec2 p = fract(c) - vec2(0.5);
-
-    float d, f;
-    d = p.x * p.x + p.y * p.y;
-    f = 1.0 / sqrt(d + 1.0);
-
-    if (d >= BumpSize)
-        { p = vec2(0.0); f = 1.0; }
-
-    vec3 normDelta = vec3(p.x, p.y, 1.0) * f;
-    litColor = SurfaceColor * max(dot(normDelta, LightDir), 0.0);
-    vec3 reflectDir = reflect(LightDir, normDelta);
-
-    float spec = max(dot(EyeDir, reflectDir), 0.0);
-    spec *= SpecularFactor;
-    litColor = min(litColor + spec, vec3(1.0));
-
-    gl_FragColor = vec4(litColor, 1.0);
-}
\ No newline at end of file
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11BumpMapVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11BumpMapVert.glsl
deleted file mode 100644
index 9d6f49a..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11BumpMapVert.glsl
+++ /dev/null
@@ -1,42 +0,0 @@
-//
-// Vertex shader for procedural bumps
-//
-// Authors: Randi Rost, John Kessenich
-//
-// Copyright (c) 2002-2005 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying vec3 LightDir;
-varying vec3 EyeDir;
-
-uniform vec3 LightPosition;
-
-// VTK can't set Tangent, use Normal instead
-//attribute vec3 Tangent;
-attribute vec3 Normal;
-
-void main(void)
-{
-    EyeDir         = vec3 (gl_ModelViewMatrix * gl_Vertex);
-    gl_Position    = ftransform();
-    gl_TexCoord[0] = gl_MultiTexCoord0;
-
-    vec3 n = normalize(gl_NormalMatrix * gl_Normal);
-    // VTK can't set Tangent, use Normal instead
-    //vec3 t = normalize(gl_NormalMatrix * Tangent);
-    vec3 t = normalize(gl_NormalMatrix * Normal);
-    vec3 b = cross(n, t);
-
-    vec3 v;
-    v.x = dot(LightPosition, t);
-    v.y = dot(LightPosition, b);
-    v.z = dot(LightPosition, n);
-    LightDir = normalize(v);
-
-    v.x = dot(EyeDir, t);
-    v.y = dot(EyeDir, b);
-    v.z = dot(EyeDir, n);
-    EyeDir = normalize(v);
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11LatticeFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11LatticeFrag.glsl
deleted file mode 100644
index a96136b..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11LatticeFrag.glsl
+++ /dev/null
@@ -1,27 +0,0 @@
-//
-// Fragment shader for testing the discard command
-//
-// Author: Randi Rost
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying vec3  DiffuseColor;
-varying vec3  SpecularColor;
-
-uniform vec2  Scale;
-uniform vec2  Threshold;
-uniform vec3  SurfaceColor;
-
-void main (void)
-{
-    float ss = fract(gl_TexCoord[0].s * Scale.s);
-    float tt = fract(gl_TexCoord[0].t * Scale.t);
-
-    if ((ss > Threshold.s) && (tt > Threshold.t)) discard;
-
-    vec3 finalColor = SurfaceColor * DiffuseColor + SpecularColor;
-    gl_FragColor = vec4 (finalColor, 1.0);
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11LatticeVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11LatticeVert.glsl
deleted file mode 100644
index 5e193d2..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11LatticeVert.glsl
+++ /dev/null
@@ -1,39 +0,0 @@
-//
-// Vertex shader for testing the discard command
-//
-// Author: OGLSL implementation by Ian Nurse
-//
-// Copyright (C) 2002-2004  LightWork Design Ltd.
-//          www.lightworkdesign.com
-//
-// See LightworkDesign-License.txt for license information
-//
-
-uniform vec3  LightPosition;
-uniform vec3  LightColor;
-uniform vec3  EyePosition;
-uniform vec3  Specular;
-uniform vec3  Ambient;
-uniform float Kd;
-
-varying vec3  DiffuseColor;
-varying vec3  SpecularColor;
-
-void main(void)
-{
-    vec3 ecPosition = vec3 (gl_ModelViewMatrix * gl_Vertex);
-    vec3 tnorm      = normalize(gl_NormalMatrix * gl_Normal);
-    vec3 lightVec   = normalize(LightPosition - ecPosition);
-    vec3 viewVec    = normalize(EyePosition - ecPosition);
-    vec3 Hvec       = normalize(viewVec + lightVec);
-
-    float spec = abs(dot(Hvec, tnorm));
-    spec = pow(spec, 16.0);
-
-    DiffuseColor    = LightColor * vec3 (Kd * abs(dot(lightVec, tnorm)));
-    DiffuseColor    = clamp(Ambient + DiffuseColor, 0.0, 1.0);
-    SpecularColor   = clamp((LightColor * Specular * spec), 0.0, 1.0);
-
-    gl_TexCoord[0]  = gl_MultiTexCoord0;
-    gl_Position     = ftransform();
-}
\ No newline at end of file
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11StripeFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11StripeFrag.glsl
deleted file mode 100644
index 762890c..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11StripeFrag.glsl
+++ /dev/null
@@ -1,35 +0,0 @@
-//
-// Fragment shader for drawing procedural stripes
-//
-// Author: OGLSL implementation by Ian Nurse
-//
-// Copyright (C) 2002-2004  LightWork Design Ltd.
-//          www.lightworkdesign.com
-//
-// See LightworkDesign-License.txt for license information
-//
-
-uniform vec3  StripeColor;
-uniform vec3  BackColor;
-uniform float Width;
-uniform float Fuzz;
-uniform float Scale;
-
-varying vec3  DiffuseColor;
-varying vec3  SpecularColor;
-
-void main(void)
-{
-    float scaled_t = fract(gl_TexCoord[0].t * Scale);
-
-    float frac1 = clamp(scaled_t / Fuzz, 0.0, 1.0);
-    float frac2 = clamp((scaled_t - Width) / Fuzz, 0.0, 1.0);
-
-    frac1 = frac1 * (1.0 - frac2);
-    frac1 = frac1 * frac1 * (3.0 - (2.0 * frac1));
-
-    vec3 finalColor = mix(BackColor, StripeColor, frac1);
-    finalColor = finalColor * DiffuseColor + SpecularColor;
-
-    gl_FragColor = vec4 (finalColor, 1.0);
-}
\ No newline at end of file
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11StripeVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11StripeVert.glsl
deleted file mode 100644
index ebcccab..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11StripeVert.glsl
+++ /dev/null
@@ -1,39 +0,0 @@
-//
-// Vertex shader for drawing procedural stripes
-//
-// Author: OGLSL implementation by Ian Nurse
-//
-// Copyright (C) 2002-2004  LightWork Design Ltd.
-//          www.lightworkdesign.com
-//
-// See LightworkDesign-License.txt for license information
-//
-
-uniform vec3  LightPosition;
-uniform vec3  LightColor;
-uniform vec3  EyePosition;
-uniform vec3  Specular;
-uniform vec3  Ambient;
-uniform float Kd;
-
-varying vec3  DiffuseColor;
-varying vec3  SpecularColor;
-
-void main(void)
-{
-    vec3 ecPosition = vec3 (gl_ModelViewMatrix * gl_Vertex);
-    vec3 tnorm      = normalize(gl_NormalMatrix * gl_Normal);
-    vec3 lightVec   = normalize(LightPosition - ecPosition);
-    vec3 viewVec    = normalize(EyePosition - ecPosition);
-    vec3 Hvec       = normalize(viewVec + lightVec);
-
-    float spec = clamp(dot(Hvec, tnorm), 0.0, 1.0);
-    spec = pow(spec, 16.0);
-
-    DiffuseColor    = LightColor * vec3 (Kd * dot(lightVec, tnorm));
-    DiffuseColor    = clamp(Ambient + DiffuseColor, 0.0, 1.0);
-    SpecularColor   = clamp((LightColor * Specular * spec), 0.0, 1.0);
-
-    gl_TexCoord[0]  = gl_MultiTexCoord0;
-    gl_Position     = ftransform();
-}
\ No newline at end of file
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11ToyBallFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11ToyBallFrag.glsl
deleted file mode 100644
index 74c010c..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11ToyBallFrag.glsl
+++ /dev/null
@@ -1,78 +0,0 @@
-//
-// Fragment shader for procedurally generated toy ball
-//
-// Author: Bill Licea-Kane
-//
-// Copyright (c) 2002-2003 ATI Research
-//
-// See ATI-License.txt for license information
-//
-
-varying vec4 ECposition;   // surface position in eye coordinates
-varying vec4 ECballCenter; // ball center in eye coordinates
-
-uniform vec4  LightDir;     // light direction, should be normalized
-uniform vec4  HVector;      // reflection vector for infinite light source
-uniform vec4  SpecularColor;
-uniform vec4  Red;
-uniform vec4  Yellow;
-uniform vec4  Blue;
-
-uniform vec4  HalfSpace0;   // half-spaces used to define star pattern
-uniform vec4  HalfSpace1;
-uniform vec4  HalfSpace2;
-uniform vec4  HalfSpace3;
-uniform vec4  HalfSpace4;
-
-uniform float InOrOutInit;  // = -3
-uniform float StripeWidth;  // = 0.3
-uniform float FWidth;       // = 0.005
-
-void main(void)
-{
-    vec4  normal;              // Analytically computed normal
-    vec4  p;                   // Point in shader space
-    vec4  surfColor;           // Computed color of the surface
-    float intensity;           // Computed light intensity
-    vec4  distance;            // Computed distance values
-    float inorout;             // Counter for computing star pattern
-
-    p.xyz = normalize(ECposition.xyz - ECballCenter.xyz);    // Calculate p
-    p.w   = 1.0;
-
-    inorout = InOrOutInit;     // initialize inorout to -3
-
-    distance[0] = dot(p, HalfSpace0);
-    distance[1] = dot(p, HalfSpace1);
-    distance[2] = dot(p, HalfSpace2);
-    distance[3] = dot(p, HalfSpace3);
-
-    distance = smoothstep(-FWidth, FWidth, distance);
-
-    inorout += dot(distance, vec4(1.0));
-
-    distance.x = dot(p, HalfSpace4);
-    distance.y = StripeWidth - abs(p.z);
-    distance = smoothstep(-FWidth, FWidth, distance);
-    inorout += distance.x;
-
-    inorout = clamp(inorout, 0.0, 1.0);
-
-    surfColor = mix(Yellow, Red, inorout);
-    surfColor = mix(surfColor, Blue, distance.y);
-
-    // normal = point on surface for sphere at (0,0,0)
-    normal = p;
-
-    // Per fragment diffuse lighting
-    intensity  = 0.2; // ambient
-    intensity += 0.8 * clamp(dot(LightDir, normal), 0.0, 1.0);
-    surfColor *= intensity;
-
-    // Per fragment specular lighting
-    intensity  = clamp(dot(HVector, normal), 0.0, 1.0);
-    intensity  = pow(intensity, SpecularColor.a);
-    surfColor += SpecularColor * intensity;
-
-    gl_FragColor = surfColor;
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11ToyBallVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11ToyBallVert.glsl
deleted file mode 100644
index bd29f3f..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Ch11ToyBallVert.glsl
+++ /dev/null
@@ -1,20 +0,0 @@
-//
-// Fragment shader for procedurally generated toy ball
-//
-// Author: Bill Licea-Kane
-//
-// Copyright (c) 2002-2003 ATI Research
-//
-// See ATI-License.txt for license information
-//
-
-varying vec4 ECposition;   // surface position in eye coordinates
-varying vec4 ECballCenter; // ball center in eye coordinates
-uniform vec4 BallCenter;   // ball center in modelling coordinates
-
-void main(void)
-{
-    ECposition   = gl_ModelViewMatrix * gl_Vertex;
-    ECballCenter = gl_ModelViewMatrix * BallCenter;
-    gl_Position  = ftransform();
-}
\ No newline at end of file
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Lattice.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Lattice.xml
deleted file mode 100644
index 09165b1..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Lattice.xml
+++ /dev/null
@@ -1,50 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material>
-
-  <Property name="Property1">
-    <!--
-    <Member name="Color" number_of_elements="3" type="Double" value="1.0 1.0 1.0">
-    </Member>
-    -->
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="0.2 0.2 0.9"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="0.1 0.5 0.1"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="0.5 0.1 0.1"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="1.0"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="0.8"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <!-- Setting this to '0' here causes the object to disappear in the scene
-    <Member name="LineStipplePattern" number_of_elements="1"  type="Int" value="1">
-    </Member>
-    -->
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-  <Shader scope="Vertex" name="OrangeBook/Ch11/Ch11LatticeVert.glsl" location="OrangeBook/Ch11/Ch11LatticeVert.glsl"  language="GLSL" entry="main">
-    <LightUniform  value="Position" name="LightPosition"> </LightUniform>
-    <LightUniform  value="AmbientColor" name="LightColor"> </LightUniform>
-
-    <CameraUniform  value="Position" name="EyePosition"> </CameraUniform>
-
-    <PropertyUniform  value="SpecularColor" name="Specular"> </PropertyUniform>
-    <PropertyUniform  value="AmbientColor" name="Ambient"> </PropertyUniform>
-    <PropertyUniform  value="Diffuse"  name="Kd"> </PropertyUniform>
-  </Shader>
-
-  <Shader scope="Fragment" name="OrangeBook/Ch11/Ch11LatticeFrag.glsl" location="OrangeBook/Ch11/Ch11LatticeFrag.glsl"  language="GLSL" entry="main">
-    <Uniform type="vec2" name="Scale" number_of_elements="2" value="1.0 1.0"> </Uniform>
-    <Uniform type="vec2" name="Threshold" number_of_elements="2" value="10.0 10.0"> </Uniform>
-    <Uniform type="vec3" name="SurfaceColor" number_of_elements="3" value="1.0 1.0 1.0"> </Uniform>
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Stripe.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Stripe.xml
deleted file mode 100644
index 309085b..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/Stripe.xml
+++ /dev/null
@@ -1,51 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material>
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="1.0"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-
-
-  <Shader scope="Vertex" name="Ch11StripeVert.glsl" location="OrangeBook/Ch11/Ch11StripeVert.glsl"  language="GLSL" entry="main">
-    <LightUniform  value="Position" name="LightPosition"> </LightUniform>
-    <LightUniform  value="AmbientColor" name="LightColor"> </LightUniform>
-
-    <CameraUniform  value="Position" name="EyePosition"> </CameraUniform>
-
-    <PropertyUniform  value="SpecularColor" name="Specular"> </PropertyUniform>
-    <PropertyUniform  value="AmbientColor" name="Ambient"> </PropertyUniform>
-    <PropertyUniform  value="Diffuse" name="Kd"> </PropertyUniform>
-  </Shader>
-
-
-
-  <Shader scope="Fragment" name="Ch11StripeFrag.glsl" location="OrangeBook/Ch11/Ch11StripeFrag.glsl"  language="GLSL" entry="main">
-
-    <Uniform type="vec3" name="StripeColor" number_of_elements="3" value="1.0 0.5 0.5"> </Uniform>
-    <Uniform type="vec3" name="BackColor" number_of_elements="3" value="0.5 0.5 1.0"> </Uniform>
-
-    <Uniform type="float" name="Width" number_of_elements="1" value="0.5"> </Uniform>
-    <Uniform type="float" name="Fuzz" number_of_elements="1" value="1.0"> </Uniform>
-    <Uniform type="float" name="Scale" number_of_elements="1" value="10.0"> </Uniform>
-
-  </Shader>
-
-
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/ToyBall.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/ToyBall.xml
deleted file mode 100644
index 7a85cd4..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch11/ToyBall.xml
+++ /dev/null
@@ -1,49 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material>
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="1.0"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-  <Shader scope="Vertex" name="OrangeBook/Ch11/Ch11ToyBallVert.glsl" location="OrangeBook/Ch11/Ch11ToyBallVert.glsl"  language="GLSL" entry="main">
-    <Uniform type="vec4" name="BallCenter" number_of_elements="4" value="1.0 1.0 1.0 1.0"> </Uniform>
-  </Shader>
-
-  <Shader scope="Fragment" name="OrangeBook/Ch11/Ch11ToyBallFrag.glsl" location="OrangeBook/Ch11/Ch11ToyBallFrag.glsl"  language="GLSL" entry="main">
-    <Uniform type="vec4" name="LightDir" number_of_elements="4" value="1.0 1.0 1.0 1.0"> </Uniform>
-    <Uniform type="vec4" name="HVector" number_of_elements="4" value="1.0 1.0 1.0 1.0"> </Uniform>
-    <Uniform type="vec4" name="SpecularColor" number_of_elements="4" value="1.0 1.0 1.0 1.0"> </Uniform>
-
-    <Uniform type="vec4" name="Red" number_of_elements="4" value="1.0 0.0 0.0 1.0"> </Uniform>
-    <Uniform type="vec4" name="Yellow" number_of_elements="4" value="0.0 1.0 1.0 1.0"> </Uniform>
-    <Uniform type="vec4" name="Blue" number_of_elements="4" value="0.0 0.0 1.0 1.0"> </Uniform>
-
-    <Uniform type="vec4" name="HalfSpace0" number_of_elements="4" value="1.0 1.0 1.0 1.0"> </Uniform>
-    <Uniform type="vec4" name="HalfSpace1" number_of_elements="4" value="1.0 1.0 1.0 1.0"> </Uniform>
-    <Uniform type="vec4" name="HalfSpace2" number_of_elements="4" value="1.0 1.0 1.0 1.0"> </Uniform>
-    <Uniform type="vec4" name="HalfSpace3" number_of_elements="4" value="1.0 1.0 1.0 1.0"> </Uniform>
-    <Uniform type="vec4" name="HalfSpace4" number_of_elements="4" value="1.0 1.0 1.0 1.0"> </Uniform>
-
-    <Uniform type="float" name="InOrOutInit" number_of_elements="1" value="1.0"> </Uniform>
-
-    <Uniform type="float" name="StripeWidth" number_of_elements="1" value="1.0"> </Uniform>
-    <Uniform type="float" name="FWidth" number_of_elements="1" value="1.0"> </Uniform>
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12Cloud.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12Cloud.xml
deleted file mode 100644
index 5454021..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12Cloud.xml
+++ /dev/null
@@ -1,39 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material>
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="1.0"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-  <Shader scope="Vertex" name="./OrangeBook/Ch12/Ch12CloudVert.glsl" location="./OrangeBook/Ch12/Ch12CloudVert.glsl"  language="GLSL" entry="main">
-    <Uniform type="vec3" name="LightPos" number_of_elements="3" value="1.0 1.0 1.0"> </Uniform>
-    <Uniform type="float" name="Scale" number_of_elements="1" value="1.0"> </Uniform>
-  </Shader>
-
-  <Shader scope="Fragment" name="./OrangeBook/Ch12/Ch12CloudFrag.glsl" location="./OrangeBook/Ch12/Ch12CloudFrag.glsl"  language="GLSL" entry="main">
-    <!--
-    <SamplerUniform type="sampler3D" name="Noise" value="0"> </SamplerUniform>
-    -->
-    <Uniform type="int" name="Noise" number_of_elements="1" value="0"> </Uniform>
-
-    <Uniform type="vec3" name="SkyColor" number_of_elements="3" value="1.0 1.0 1.0"> </Uniform>
-    <Uniform type="vec3" name="CloudColor" number_of_elements="3" value="1.0 1.0 1.0"> </Uniform>
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12CloudFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12CloudFrag.glsl
deleted file mode 100644
index 77c8e77..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12CloudFrag.glsl
+++ /dev/null
@@ -1,28 +0,0 @@
-//
-// Fragment shader for producing clouds (mostly cloudy)
-//
-// Author: Randi Rost
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying float LightIntensity;
-varying vec3  MCposition;
-
-uniform sampler3D Noise;
-uniform vec3 SkyColor;     // (0.0, 0.0, 0.8)
-uniform vec3 CloudColor;   // (0.8, 0.8, 0.8)
-
-void main (void)
-{
-    vec4  noisevec  = texture3D(Noise, MCposition);
-
-    float intensity = (noisevec[0] + noisevec[1] +
-                       noisevec[2] + noisevec[3] + 0.03125) * 1.5;
-
-    vec3 color   = mix(SkyColor, CloudColor, intensity) * LightIntensity;
-
-    gl_FragColor = vec4 (color, 1.0);
-}
\ No newline at end of file
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12CloudVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12CloudVert.glsl
deleted file mode 100644
index f9068ec..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12CloudVert.glsl
+++ /dev/null
@@ -1,25 +0,0 @@
-//
-// Fragment shader for producing clouds (mostly cloudy)
-//
-// Authors: John Kessenich, Randi Rost
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying float LightIntensity;
-varying vec3  MCposition;
-
-uniform vec3  LightPos;
-uniform float Scale;
-
-void main(void)
-{
-    vec3 ECposition = vec3 (gl_ModelViewMatrix * gl_Vertex);
-    MCposition      = vec3 (gl_Vertex) * Scale;
-    vec3 tnorm      = normalize(vec3 (gl_NormalMatrix * gl_Normal));
-    LightIntensity  = dot(normalize(LightPos - ECposition), tnorm);
-    LightIntensity *= 1.5;
-    gl_Position     = ftransform();
-}
\ No newline at end of file
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12Fire.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12Fire.xml
deleted file mode 100644
index 5a88f6c..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12Fire.xml
+++ /dev/null
@@ -1,40 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material>
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="1.0"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-  <Shader scope="Vertex" name="./OrangeBook/Ch12/Ch12FireVert.glsl" location="./OrangeBook/Ch12/Ch12FireVert.glsl"  language="GLSL" entry="main">
-    <Uniform type="vec3" name="LightPos" number_of_elements="3" value="1.0 1.0 1.0"> </Uniform>
-    <Uniform type="float" name="Scale" number_of_elements="1" value="1.0"> </Uniform>
-  </Shader>
-
-  <Shader scope="Fragment" name="./OrangeBook/Ch12/Ch12FireFrag.glsl" location="./OrangeBook/Ch12/Ch12FireFrag.glsl"  language="GLSL" entry="main">
-    <!--
-    <SamplerUniform type="sampler3D" name="Noise" value="0"> </SamplerUniform>
-    -->
-    <Uniform type="int" name="Noise" number_of_elements="1" value="0"> </Uniform>
-
-    <Uniform type="vec3" name="Color1" number_of_elements="3" value="1.0 1.0 1.0"> </Uniform>
-    <Uniform type="vec3" name="Color2" number_of_elements="3" value="1.0 1.0 1.0"> </Uniform>
-    <Uniform type="float" name="NoiseScale" number_of_elements="1" value="1.0"> </Uniform>
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12FireFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12FireFrag.glsl
deleted file mode 100644
index 2e274c9..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12FireFrag.glsl
+++ /dev/null
@@ -1,33 +0,0 @@
-//
-// Fragment shader for producing a turbulent fire effect
-//
-// Author: Randi Rost
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying float LightIntensity;
-varying vec3  MCposition;
-
-uniform sampler3D Noise;
-uniform vec3 Color1;       // (0.8, 0.7, 0.0)
-uniform vec3 Color2;       // (0.6, 0.1, 0.0)
-uniform float NoiseScale;  // 1.2
-
-void main (void)
-{
-    vec4 noisevec = texture3D(Noise, MCposition * NoiseScale);
-
-
-    float intensity = abs(noisevec[0] - 0.25) +
-                      abs(noisevec[1] - 0.125) +
-                      abs(noisevec[2] - 0.0625) +
-                      abs(noisevec[3] - 0.03125);
-
-    intensity    = clamp(intensity * 6.0, 0.0, 1.0);
-    vec3 color   = mix(Color1, Color2, intensity) * LightIntensity;
-
-    gl_FragColor = vec4 (color, 1.0);
-}
\ No newline at end of file
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12FireVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12FireVert.glsl
deleted file mode 100644
index 7f24173..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12FireVert.glsl
+++ /dev/null
@@ -1,25 +0,0 @@
-//
-// Vertex shader for producing a turbulent fire effect
-//
-// Authors: John Kessenich, Randi Rost
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying float LightIntensity;
-varying vec3  MCposition;
-
-uniform vec3  LightPos;
-uniform float Scale;
-
-void main(void)
-{
-    vec3 ECposition = vec3 (gl_ModelViewMatrix * gl_Vertex);
-    MCposition      = vec3 (gl_Vertex) * Scale;
-    vec3 tnorm      = normalize(vec3 (gl_NormalMatrix * gl_Normal));
-    LightIntensity  = dot(normalize(LightPos - ECposition), tnorm);
-    LightIntensity *= 1.5;
-    gl_Position     = ftransform();
-}
\ No newline at end of file
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12Granite.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12Granite.xml
deleted file mode 100644
index dd01ad7..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12Granite.xml
+++ /dev/null
@@ -1,38 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material>
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="1.0"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-  <Shader scope="Vertex" name="./OrangeBook/Ch12/Ch12GraniteVert.glsl" location="./OrangeBook/Ch12/Ch12GraniteVert.glsl"  language="GLSL" entry="main">
-    <LightUniform  value="Position" name="LightPos"> </LightUniform>
-    <Uniform type="float" name="Scale" number_of_elements="1" value="1.0"> </Uniform>
-  </Shader>
-
-  <Shader scope="Fragment" name="./OrangeBook/Ch12/Ch12GraniteFrag.glsl" location="./OrangeBook/Ch12/Ch12GraniteFrag.glsl"  language="GLSL" entry="main">
-    <!--
-    <SamplerUniform type="sampler3D" name="Noise" value="0"> </SamplerUniform>
-    -->
-    <Uniform type="int" name="Noise" number_of_elements="1" value="0"> </Uniform>
-
-    <Uniform type="float" name="NoiseScale" number_of_elements="1" value="1.0"> </Uniform>
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12GraniteFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12GraniteFrag.glsl
deleted file mode 100644
index 91e0947..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12GraniteFrag.glsl
+++ /dev/null
@@ -1,24 +0,0 @@
-//
-// Fragment shader for producing a granite effect
-//
-// Author: Randi Rost
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying float LightIntensity;
-varying vec3  MCposition;
-
-uniform sampler3D Noise;
-uniform float NoiseScale;
-
-void main(void)
-{
-    vec4  noisevec  = texture3D(Noise, NoiseScale * MCposition);
-    float intensity = min(1.0, noisevec[3] * 18.0);
-    vec3  color     = vec3 (intensity * LightIntensity);
-
-    gl_FragColor    = vec4 (color, 1.0);
-}
\ No newline at end of file
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12GraniteVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12GraniteVert.glsl
deleted file mode 100644
index 92b2d2d..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12GraniteVert.glsl
+++ /dev/null
@@ -1,29 +0,0 @@
-//
-// Vertex shader for producing a granite effect
-//
-// Authors: John Kessenich, Randi Rost
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying float LightIntensity;
-varying vec3  MCposition;
-
-uniform vec3  LightPos;
-uniform float Scale;
-
-void main(void)
-{
-    vec3 ECposition = vec3 (gl_ModelViewMatrix * gl_Vertex);
-    MCposition      = vec3 (gl_Vertex) * Scale;
-    vec3 tnorm      = normalize(vec3 (gl_NormalMatrix * gl_Normal));
-
-
-    vec3 ECLightPos = vec3 (gl_ModelViewMatrix * vec4(LightPos,1.0));
-
-    LightIntensity  = dot(normalize(ECLightPos - ECposition), tnorm);
-    LightIntensity *= 1.5;
-    gl_Position     = ftransform();
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12Wood.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12Wood.xml
deleted file mode 100644
index b246a53..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12Wood.xml
+++ /dev/null
@@ -1,46 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material>
-
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="1.0"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-  <Shader scope="Vertex" name="./OrangeBook/Ch12/Ch12WoodVert.glsl" location="./OrangeBook/Ch12/Ch12WoodVert.glsl"  language="GLSL" entry="main">
-    <Uniform type="vec3" name="LightPos" number_of_elements="3" value="1.0 1.0 1.0"> </Uniform>
-    <Uniform type="float" name="Scale" number_of_elements="1" value="1.0"> </Uniform>
-  </Shader>
-
-  <Shader scope="Fragment" name="./OrangeBook/Ch12/Ch12WoodFrag.glsl" location="./OrangeBook/Ch12/Ch12WoodFrag.glsl"  language="GLSL" entry="main">
-    <!--
-    <SamplerUniform type="sampler3D" name="Noise" value="0"> </SamplerUniform>
-    -->
-    <Uniform type="int" name="Noise" number_of_elements="1" value="0"> </Uniform>
-
-    <Uniform type="vec3" name="LightWoodColor" number_of_elements="3" value="1.0 1.0 1.0"> </Uniform>
-    <Uniform type="vec3" name="DarkWoodColor" number_of_elements="3" value="1.0 1.0 1.0"> </Uniform>
-    <Uniform type="float" name="RingFreq" number_of_elements="1" value="1.0"> </Uniform>
-    <Uniform type="float" name="LightGrains" number_of_elements="1" value="1.0"> </Uniform>
-    <Uniform type="float" name="DarkGrains" number_of_elements="1" value="1.0"> </Uniform>
-    <Uniform type="float" name="GrainThreshold" number_of_elements="1" value="1.0"> </Uniform>
-    <Uniform type="vec3" name="NoiseScale" number_of_elements="3" value="1.0 1.0 1.0"> </Uniform>
-    <Uniform type="float" name="Noisiness" number_of_elements="1" value="1.0"> </Uniform>
-    <Uniform type="float" name="GrainScale" number_of_elements="1" value="1.0"> </Uniform>
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12WoodFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12WoodFrag.glsl
deleted file mode 100644
index 4911224..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12WoodFrag.glsl
+++ /dev/null
@@ -1,52 +0,0 @@
-//
-// Fragment shader for producing a wood effect
-//
-// Author: Randi Rost
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying float LightIntensity;
-varying vec3  MCposition;
-
-uniform sampler3D Noise;
-
-uniform vec3  LightWoodColor;
-uniform vec3  DarkWoodColor;
-uniform float RingFreq;
-uniform float LightGrains;
-uniform float DarkGrains;
-uniform float GrainThreshold;
-uniform vec3  NoiseScale;
-uniform float Noisiness;
-uniform float GrainScale;
-
-void main(void)
-{
-    vec3 noisevec = vec3 (texture3D(Noise, MCposition * NoiseScale) *
-                                                 Noisiness);
-    vec3 location = MCposition + noisevec;
-
-    float dist = sqrt(location.x * location.x + location.z * location.z);
-    dist *= RingFreq;
-
-    float r = fract(dist + noisevec[0] + noisevec[1] + noisevec[2]) * 2.0;
-
-    if (r > 1.0)
-        r = 2.0 - r;
-
-    vec3 color = mix(LightWoodColor, DarkWoodColor, r);
-
-    /*r = fract((MCposition.x + MCposition.z) * GrainScale + 0.5);
-    noisevec[2] *= r;
-    if (r < GrainThreshold)
-        color += LightWoodColor * LightGrains * noisevec[2];
-    else
-        color -= LightWoodColor * DarkGrains * noisevec[2];*/
-
-    color *= LightIntensity;
-
-    gl_FragColor = vec4 (color, 1.0);
-}
\ No newline at end of file
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12WoodVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12WoodVert.glsl
deleted file mode 100644
index 5df743a..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch12/Ch12WoodVert.glsl
+++ /dev/null
@@ -1,25 +0,0 @@
-//
-// Vertex shader for producing a wood effect
-//
-// Authors: John Kessenich, Randi Rost
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying float LightIntensity;
-varying vec3  MCposition;
-
-uniform vec3  LightPos;
-uniform float Scale;
-
-void main(void)
-{
-    vec3 ECposition = vec3 (gl_ModelViewMatrix * gl_Vertex);
-    MCposition      = vec3 (gl_Vertex) * Scale;
-    vec3 tnorm      = normalize(vec3 (gl_NormalMatrix * gl_Normal));
-    LightIntensity  = dot(normalize(LightPos - ECposition), tnorm);
-    LightIntensity *= 1.5;
-    gl_Position     = ftransform();
-}
\ No newline at end of file
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13Cloud.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13Cloud.xml
deleted file mode 100644
index 6fc4ed9..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13Cloud.xml
+++ /dev/null
@@ -1,20 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material>
-
-  <Property name="Property1">
-    <Texture name="masonry" type="2D" format="bmp" location="Textures/masonry.bmp"> </Texture>
-  </Property>
-
-  <Shader scope="Vertex" name="Ch13CloudAnimVert" location="OrangeBook/Ch13/Ch13CloudAnimVert.glsl"  language="GLSL" entry="main">
-    <LightUniform  value="Position" name="LightPos"> </LightUniform>
-    <Uniform type="float" name="Scale" number_of_elements="1" value="0.1"> </Uniform>
-  </Shader>
-
-  <Shader scope="Fragment" name="Ch13CloudAnimFrag" location="OrangeBook/Ch13/Ch13CloudAnimFrag.glsl" language="GLSL" entry="main">
-    <Uniform type="vec3" name="SkyColor" number_of_elements="3" value="0.0 0.0 0.8"> </Uniform>
-    <Uniform type="vec3" name="CloudColor" number_of_elements="3" value="0.8 0.8 0.8"> </Uniform>
-    <Uniform type="vec3" name="Offset" number_of_elements="3" value="0.1 0.1 0.1"> </Uniform>
-    <Uniform type="sampler3D" name="Noise" number_of_elements="1" value="0"> </Uniform>
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13CloudAnimFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13CloudAnimFrag.glsl
deleted file mode 100644
index e99a7e9..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13CloudAnimFrag.glsl
+++ /dev/null
@@ -1,29 +0,0 @@
-//
-// Fragment shader for producing animated clouds (mostly cloudy)
-//
-// Author: Randi Rost
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying float LightIntensity;
-varying vec3  MCposition;
-
-uniform sampler3D Noise;
-uniform vec3 SkyColor;     // (0.0, 0.0, 0.8)
-uniform vec3 CloudColor;   // (0.8, 0.8, 0.8)
-uniform vec3 Offset;       // updated each frame by the application
-
-void main (void)
-{
-    vec4  noisevec  = texture3D(Noise, MCposition + Offset);
-
-    float intensity = (noisevec[0] + noisevec[1] +
-                       noisevec[2] + noisevec[3]) * 1.5;
-
-    vec3 color   = mix(SkyColor, CloudColor, intensity) * LightIntensity;
-
-    gl_FragColor = vec4 (color, 1.0);
-}
\ No newline at end of file
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13CloudAnimVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13CloudAnimVert.glsl
deleted file mode 100644
index 4dba8fc..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13CloudAnimVert.glsl
+++ /dev/null
@@ -1,25 +0,0 @@
-//
-// Vertex shader for producing animated clouds (mostly cloudy)
-//
-// Authors: John Kessenich, Randi Rost
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying float LightIntensity;
-varying vec3  MCposition;
-
-uniform vec3  LightPos;
-uniform float Scale;
-
-void main(void)
-{
-    vec3 ECposition = vec3 (gl_ModelViewMatrix * gl_Vertex);
-    MCposition      = vec3 (gl_Vertex) * Scale;
-    vec3 tnorm      = normalize(vec3 (gl_NormalMatrix * gl_Normal));
-    LightIntensity  = dot(normalize(LightPos - ECposition), tnorm);
-    LightIntensity *= 1.5;
-    gl_Position     = ftransform();
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13Particle.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13Particle.xml
deleted file mode 100644
index 119372d..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13Particle.xml
+++ /dev/null
@@ -1,33 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material>
-
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="1.0"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-  <Shader scope="Vertex" name="OrangeBook/Ch13/Ch13ParticleVert.glsl" location="OrangeBook/Ch13/Ch13ParticleVert.glsl"  language="GLSL" entry="main">
-    <Uniform type="float" name="Time" number_of_elements="1" value="1.0"> </Uniform>
-    <Uniform type="vec4" name="Background" number_of_elements="4" value="1.0 1.0 1.0 1.0"> </Uniform>
-  </Shader>
-
-  <Shader scope="Fragment" name="OrangeBook/Ch13/Ch13ParticleFrag.glsl" location="OrangeBook/Ch13/Ch13ParticleFrag.glsl"  language="GLSL" entry="main">
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13ParticleFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13ParticleFrag.glsl
deleted file mode 100644
index deb7be2..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13ParticleFrag.glsl
+++ /dev/null
@@ -1,17 +0,0 @@
-//
-// Fragment shader for rendering a "confetti cannon"
-// via a partcle system
-//
-// Author: Randi Rost
-//
-// Copyright (c) 2003-2004: 3Dlabs, Inc.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying vec4 Color;
-
-void main (void)
-{
-    gl_FragColor = Color;
-}
\ No newline at end of file
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13ParticleVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13ParticleVert.glsl
deleted file mode 100644
index 9f3fa67..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13ParticleVert.glsl
+++ /dev/null
@@ -1,38 +0,0 @@
-//
-// Vertex shader for rendering a "confetti cannon"
-// via a partcle system
-//
-// Author: Randi Rost
-//
-// Copyright (c) 2003-2004: 3Dlabs, Inc.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-uniform float Time;            // updated each frame by the application
-uniform vec4  Background;      // constant color equal to background
-
-attribute vec3  Velocity;      // initial velocity
-attribute float StartTime;     // time at which particle is activated
-
-varying vec4 Color;
-
-void main(void)
-{
-    vec4  vert;
-    float t = Time - StartTime;
-
-    if (t >= 0.0)
-    {
-        vert    = gl_Vertex + vec4 (Velocity * t, 0.0);
-        vert.y -= 4.9 * t * t;
-        Color   = gl_Color;
-    }
-    else
-    {
-        vert  = gl_Vertex;      // Initial position
-        Color = Background;     // "pre-birth" color
-    }
-
-    gl_Position  = gl_ModelViewProjectionMatrix * vert;
-}
\ No newline at end of file
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13Wobble.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13Wobble.xml
deleted file mode 100644
index 4d2dc1d..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13Wobble.xml
+++ /dev/null
@@ -1,29 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material>
-
-  <Property name="Property1">
-    <!--
-    <Texture dimension="Texture2D" format="bmp" location="Textures/Rust.bmp"> </Texture>
-    <Texture dimension="Texture2D" format="bmp" location="Textures/RgbRand.bmp"> </Texture>
-    -->
-    <Texture name="goodOleMasonry" type="Texture2D" format="bmp" location="Textures/masonry.bmp"> </Texture>
-  </Property>
-
-  <Shader scope="Vertex" name="OrangeBook/Ch13/Ch13WobbleVert.glsl" location="OrangeBook/Ch13/Ch13WobbleVert.glsl"  language="GLSL" entry="main">
-    <LightUniform  value="Position" name="LightPosition"> </LightUniform>
-  </Shader>
-
-  <Shader scope="Fragment" name="OrangeBook/Ch13/Ch13WobbleFrag.glsl" location="OrangeBook/Ch13/Ch13WobbleFrag.glsl"  language="GLSL" entry="main">
-
-    <PropertyUniform  value="MTime" name="mtime"> </PropertyUniform>
-
-    <Uniform type="float" name="StartRad" number_of_elements="1" value="1.0"> </Uniform>
-    <Uniform type="vec2" name="Freq" number_of_elements="2" value="0.000000001 0.0001"> </Uniform>
-    <Uniform type="vec2" name="Amplitude" number_of_elements="2" value="1.0 1.0"> </Uniform>
-    <!--
-    <SamplerUniform type="sampler2D" name="WobbleTex" value="0"> </SamplerUniform>
-    -->
-    <Uniform type="int" name="WobbleTex" number_of_elements="1" value="0"> </Uniform>
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13WobbleFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13WobbleFrag.glsl
deleted file mode 100644
index dce3876..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13WobbleFrag.glsl
+++ /dev/null
@@ -1,71 +0,0 @@
-//
-// Fragment shader for wobbling a texture
-//
-// Author: Antonio Tejada
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-// Constants
-const float C_PI    = 3.1415;
-const float C_2PI   = 2.0 * C_PI;
-const float C_2PI_I = 1.0 / (2.0 * C_PI);
-const float C_PI_2  = C_PI / 2.0;
-
-varying float LightIntensity;
-
-uniform float mtime;
-uniform float StartRad;
-uniform vec2  Freq;
-uniform vec2  Amplitude;
-
-uniform sampler2D WobbleTex;
-
-void main (void)
-{
-    vec2  perturb;
-    float rad;
-    vec3  color;
-
-    // Compute a perturbation factor for the x-direction
-    rad = (gl_TexCoord[0].s + gl_TexCoord[0].t - 1.0 + StartRad) * Freq.x * mtime;
-
-    // Wrap to -2.0*PI, 2*PI
-    rad = rad * C_2PI_I;
-    rad = fract(rad);
-    rad = rad * C_2PI;
-
-    // Center in -PI, PI
-    if (rad >  C_PI) rad = rad - C_2PI;
-    if (rad < -C_PI) rad = rad + C_2PI;
-
-    // Center in -PI/2, PI/2
-    if (rad >  C_PI_2) rad =  C_PI - rad;
-    if (rad < -C_PI_2) rad = -C_PI - rad;
-
-    perturb.x  = (rad - (rad * rad * rad / 6.0)) * Amplitude.x;
-
-    // Now compute a perturbation factor for the y-direction
-    rad = (gl_TexCoord[0].s - gl_TexCoord[0].t + StartRad) * Freq.y * mtime;
-
-    // Wrap to -2*PI, 2*PI
-    rad = rad * C_2PI_I;
-    rad = fract(rad);
-    rad = rad * C_2PI;
-
-    // Center in -PI, PI
-    if (rad >  C_PI) rad = rad - C_2PI;
-    if (rad < -C_PI) rad = rad + C_2PI;
-
-    // Center in -PI/2, PI/2
-    if (rad >  C_PI_2) rad =  C_PI - rad;
-    if (rad < -C_PI_2) rad = -C_PI - rad;
-
-    perturb.y  = (rad - (rad * rad * rad / 6.0)) * Amplitude.y;
-
-    color = vec3 (texture2D(WobbleTex, perturb + gl_TexCoord[0].st));
-
-    gl_FragColor = vec4 (color * LightIntensity, 1.0);
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13WobbleVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13WobbleVert.glsl
deleted file mode 100644
index 8837fa6..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch13/Ch13WobbleVert.glsl
+++ /dev/null
@@ -1,33 +0,0 @@
-//
-// Vertex shader for wobbling a texture
-//
-// Author: Antonio Tejada
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying float LightIntensity;
-uniform vec3 LightPosition;
-
-const float specularContribution = 0.1;
-const float diffuseContribution  = 1.0 - specularContribution;
-
-void main(void)
-{
-    vec3 ecPosition = vec3 (gl_ModelViewMatrix * gl_Vertex);
-    vec3 tnorm      = normalize(gl_NormalMatrix * gl_Normal);
-    vec3 lightVec   = normalize(LightPosition - ecPosition);
-    vec3 reflectVec = reflect(-lightVec, tnorm);
-    vec3 viewVec    = normalize(-ecPosition);
-
-    float spec      = clamp(dot(reflectVec, viewVec), 0.0, 1.0);
-    spec            = pow(spec, 16.0);
-
-    LightIntensity  = diffuseContribution * max(dot(lightVec, tnorm), 0.0)
-                      + specularContribution * spec;
-
-    gl_TexCoord[0]  = gl_MultiTexCoord0;
-    gl_Position     = ftransform();
-}
\ No newline at end of file
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch14/Ch14AABrick.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch14/Ch14AABrick.xml
deleted file mode 100644
index 3ad9270..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch14/Ch14AABrick.xml
+++ /dev/null
@@ -1,42 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material>
-
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="1.0 0.1 0.1"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="1.0"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <!-- Setting this to '0' here causes the object to disappear in the scene
-    <Member name="LineStipplePattern" number_of_elements="1"  type="Int" value="1">
-    </Member>
-    -->
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-  <Shader scope="Vertex" name="OrangeBook/Ch14/Ch14AABrickVert.glsl" location="OrangeBook/Ch14/Ch14AABrickVert.glsl"  language="GLSL" entry="main">
-    <LightUniform  value="Position" name="LightPosition"> </LightUniform>
-  </Shader>
-
-  <Shader scope="Fragment" name="OrangeBook/Ch14/Ch14AABrickFrag.glsl" location="OrangeBook/Ch14/Ch14AABrickFrag.glsl"  language="GLSL" entry="main">
-    <PropertyUniform  value="AmbientColor" name="BrickColor"> </PropertyUniform>
-    <PropertyUniform  value="DiffuseColor" name="MortarColor"> </PropertyUniform>
-
-    <Uniform type="vec2" name="BrickSize" number_of_elements="2" value="0.5 0.25"> </Uniform>
-    <Uniform type="vec2" name="BrickPct" number_of_elements="2" value="0.9 0.9"> </Uniform>
-    <Uniform type="vec2" name="MortarPct" number_of_elements="2" value="0.1 0.1"> </Uniform>
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch14/Ch14AABrickFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch14/Ch14AABrickFrag.glsl
deleted file mode 100644
index 46a1b53..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch14/Ch14AABrickFrag.glsl
+++ /dev/null
@@ -1,46 +0,0 @@
-//
-// Fragment shader for antialiased procedural bricks
-//
-// Authors: Dave Baldwin, Randi Rost
-//          based on a shader by Darwyn Peachey
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-uniform vec3  BrickColor, MortarColor;
-uniform vec2  BrickSize;
-uniform vec2  BrickPct;
-uniform vec2  MortarPct;
-
-varying vec2  MCposition;
-varying float LightIntensity;
-
-#define Integral(x, p, notp) ((floor(x)*(p)) + max(fract(x)-(notp), 0.0))
-
-void main(void)
-{
-    vec2 position, fw, useBrick;
-    vec3 color;
-
-    // Determine position within the brick pattern
-    position = MCposition / BrickSize;
-
-    // Adjust every other row by an offset of half a brick
-    if (fract(position.y * 0.5) > 0.5)
-        position.x += 0.5;
-
-    // Calculate filter size
-    fw = fwidth(position);
-
-    // Perform filtering by integrating the 2D pulse made by the
-    // brick pattern over the filter width and height
-    useBrick = (Integral(position + fw, BrickPct, MortarPct) -
-                Integral(position, BrickPct, MortarPct)) / fw;
-
-    // Determine final color
-    color  = mix(MortarColor, BrickColor, useBrick.x * useBrick.y);
-    color *= LightIntensity;
-    gl_FragColor = vec4 (color, 1.0);
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch14/Ch14AABrickVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch14/Ch14AABrickVert.glsl
deleted file mode 100644
index 7cd1d45..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch14/Ch14AABrickVert.glsl
+++ /dev/null
@@ -1,41 +0,0 @@
-//
-// Vertex shader for antialiased procedural bricks
-//
-// Authors: Dave Baldwin, Steve Koren, Randi Rost
-//          based on a shader by Darwyn Peachey
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-uniform vec3 LightPosition;
-
-const float SpecularContribution = 0.3;
-const float DiffuseContribution  = 1.0 - SpecularContribution;
-
-varying float LightIntensity;
-varying vec2  MCposition;
-
-void main(void)
-{
-    vec3 ecPosition = vec3 (gl_ModelViewMatrix * gl_Vertex);
-    vec3 tnorm      = normalize(gl_NormalMatrix * gl_Normal);
-    vec3 lightVec   = normalize(LightPosition - ecPosition);
-    vec3 reflectVec = reflect(-lightVec, tnorm);
-    vec3 viewVec    = normalize(-ecPosition);
-    float diffuse   = max(dot(lightVec, tnorm), 0.0);
-    float spec      = 0.0;
-
-    if (diffuse > 0.0)
-    {
-        spec = max(dot(reflectVec, viewVec), 0.0);
-        spec = pow(spec, 16.0);
-    }
-
-    LightIntensity  = DiffuseContribution * diffuse +
-                      SpecularContribution * spec;
-
-    MCposition      = gl_Vertex.xy;
-    gl_Position     = ftransform();
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch14/Ch14Adaptive.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch14/Ch14Adaptive.xml
deleted file mode 100644
index d79a785..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch14/Ch14Adaptive.xml
+++ /dev/null
@@ -1,12 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material>
-
-  <Shader scope="Vertex" name="OrangeBook/Ch14/Ch14AdaptiveAAVert.glsl" location="OrangeBook/Ch14/Ch14AdaptiveAAVert.glsl"  language="GLSL" entry="main">
-    <LightUniform  value="Position" name="LightPosition"> </LightUniform>
-  </Shader>
-
-  <Shader scope="Fragment" name="OrangeBook/Ch14/Ch14AdaptiveAAFrag.glsl" location="OrangeBook/Ch14/Ch14AdaptiveAAFrag.glsl"  language="GLSL" entry="main">
-    <Uniform type="float" name="Frequency" number_of_elements="1" value="16.0"> </Uniform>
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch14/Ch14AdaptiveAAFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch14/Ch14AdaptiveAAFrag.glsl
deleted file mode 100644
index a851c2c..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch14/Ch14AdaptiveAAFrag.glsl
+++ /dev/null
@@ -1,25 +0,0 @@
-//
-// Fragment shader for adaptively antialiasing a procedural stripe pattern
-//
-// Author: Randi Rost
-//         based on a shader by Bert Freudenberg
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying float V;                    // generic varying
-varying float LightIntensity;
-
-uniform float Frequency;            // Stripe frequency = 16
-
-void main (void)
-{
-    float sawtooth = fract(V * Frequency);
-    float triangle = abs(2.0 * sawtooth - 1.0);
-    float dp = length(vec2 (dFdx(V), dFdy(V)));
-    float edge = dp * Frequency * 2.0;
-    float square = smoothstep(0.5 - edge, 0.5 + edge, triangle);
-    gl_FragColor = vec4 (vec3 (square), 1.0) * LightIntensity;
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch14/Ch14AdaptiveAAVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch14/Ch14AdaptiveAAVert.glsl
deleted file mode 100644
index c0cd847..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch14/Ch14AdaptiveAAVert.glsl
+++ /dev/null
@@ -1,28 +0,0 @@
-//
-// Vertex shader for adaptively antialiasing a procedural stripe pattern
-//
-// Author: Randi Rost
-//         based on a shader by Bert Freudenberg
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-uniform vec3  LightPosition;
-
-varying float V;
-varying float LightIntensity;
-
-void main(void)
-{
-    vec3 pos        = vec3(gl_ModelViewMatrix * gl_Vertex);
-    vec3 tnorm      = normalize(gl_NormalMatrix * gl_Normal);
-    vec3 lightVec   = normalize(LightPosition - pos);
-
-    LightIntensity = max(dot(lightVec, tnorm), 0.0);
-
-    V = gl_MultiTexCoord0.s;  // try .s for vertical stripes
-
-    gl_Position = ftransform();
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15Gooch.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15Gooch.xml
deleted file mode 100644
index 55510fc..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15Gooch.xml
+++ /dev/null
@@ -1,52 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material name="Mat1" NumberOfProperties="1" NumberOfVertexShaders="1" NumberOfFragmentShaders="1">
-
-
-
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="0.75 0.75 0.75"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="0.6 0.6 0.0"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="0.0 0.0 0.6"> </Member>
-
-    <Member name="Ambient" number_of_elements="1" type="Double" value="0.45"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="0.45"> </Member>
-  </Property>
-
-
-
-  <Shader scope="Vertex" name="Ch15GoochVert" location="OrangeBook/Ch15/Ch15GoochVert.glsl" language="GLSL" entry="main" args="-DVERTEX_PROGRAM">
-
-    <Uniform type="vec3" name="LightPosition" number_of_elements="3" value="0.0 10.0 4.0"> </Uniform>
-    <!--
-    <LightUniform  value="Position" name="LightPosition" > </LightUniform>
-    -->
-
-  </Shader>
-
-
-
-  <Shader scope="Fragment" name="Ch15GoochFrag" location="OrangeBook/Ch15/Ch15GoochFrag.glsl" language="GLSL" entry="fragment_program" args="-DFRAGMENT_PROGRAM">
-
-    <PropertyUniform  value="AmbientColor" name="SurfaceColor"> </PropertyUniform>
-    <PropertyUniform  value="DiffuseColor" name="WarmColor"> </PropertyUniform>
-    <PropertyUniform  value="SpecularColor" name="CoolColor"> </PropertyUniform>
-
-    <PropertyUniform  value="Ambient" name="DiffuseWarm"> </PropertyUniform>
-    <PropertyUniform  value="Diffuse" name="DiffuseCool"> </PropertyUniform>
-
-    <!--
-    <varying type="Float" name="NdotL" Index="1">
-    </varying>
-    <varying type="vec3" name="ReflectVec" Index="1">
-    </varying>
-    <varying type="vec3" name="ViewVec" Index="1">
-    </varying>
-    -->
-
-
-
-  </Shader>
-
-
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15GoochFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15GoochFrag.glsl
deleted file mode 100644
index d9d0ce5..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15GoochFrag.glsl
+++ /dev/null
@@ -1,34 +0,0 @@
-//
-// Fragment shader for Gooch shading
-//
-// Author: Randi Rost
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-uniform vec3  SurfaceColor; // (0.75, 0.75, 0.75)
-uniform vec3  WarmColor;    // (0.6, 0.6, 0.0)
-uniform vec3  CoolColor;    // (0.0, 0.0, 0.6)
-uniform float DiffuseWarm;  // 0.45
-uniform float DiffuseCool;  // 0.45
-
-varying float NdotL;
-varying vec3  ReflectVec;
-varying vec3  ViewVec;
-
-void main (void)
-{
-    vec3 kcool    = min(CoolColor + DiffuseCool * SurfaceColor, 1.0);
-    vec3 kwarm    = min(WarmColor + DiffuseWarm * SurfaceColor, 1.0);
-    vec3 kfinal   = mix(kcool, kwarm, NdotL);
-
-    vec3 nreflect = normalize(ReflectVec);
-    vec3 nview    = normalize(ViewVec);
-
-    float spec    = max(dot(nreflect, nview), 0.0);
-    spec          = pow(spec, 32.0);
-
-    gl_FragColor = vec4 (min(kfinal + spec, 1.0), 1.0);
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15GoochVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15GoochVert.glsl
deleted file mode 100644
index 3b4c11c..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15GoochVert.glsl
+++ /dev/null
@@ -1,26 +0,0 @@
-//
-// Vertex shader for Gooch shading
-//
-// Author: Randi Rost
-//
-// Copyright (c) 2002-2004 3Dlabs Inc. Ltd.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-uniform vec3  LightPosition;  // (0.0, 10.0, 4.0)
-
-varying float NdotL;
-varying vec3  ReflectVec;
-varying vec3  ViewVec;
-
-void main(void)
-{
-    vec3 ecPos      = vec3 (gl_ModelViewMatrix * gl_Vertex);
-    vec3 tnorm      = normalize(gl_NormalMatrix * gl_Normal);
-    vec3 lightVec   = normalize(LightPosition - ecPos);
-    ReflectVec      = normalize(reflect(-lightVec, tnorm));
-    ViewVec         = normalize(-ecPos);
-    NdotL           = (dot(lightVec, tnorm) + 1.0) * 0.5;
-    gl_Position     = ftransform();
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15Hatch.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15Hatch.xml
deleted file mode 100644
index 21c8394..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15Hatch.xml
+++ /dev/null
@@ -1,37 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material>
-<Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="1.0 1.0 1.0"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="1.0"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-  <Shader scope="Vertex" name="OrangeBook/Ch15/Ch15HatchVert.glsl" location="OrangeBook/Ch15/Ch15HatchVert.glsl"  language="GLSL" entry="main">
-    <Uniform type="vec3" name="LightPosition" number_of_elements="3" value="1.0 1.0 1.0"> </Uniform>
-    <Uniform type="float" name="Time" number_of_elements="1" value="1.0"> </Uniform>
-  </Shader>
-
-  <Shader scope="Fragment" name="OrangeBook/Ch15/Ch15HatchFrag.glsl" location="OrangeBook/Ch15/Ch15HatchFrag.glsl"  language="GLSL" entry="main">
-    <!--
-    <SamplerUniform type="sampler3D" name="Noise" value="0"> </SamplerUniform>
-    -->
-    <Uniform type="sampler3D" name="Noise" number_of_elements="1" value="0"> </Uniform>
-    <Uniform type="float" name="Swidth" number_of_elements="1" value="1.0"> </Uniform>
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15HatchFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15HatchFrag.glsl
deleted file mode 100644
index 75ff1eb..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15HatchFrag.glsl
+++ /dev/null
@@ -1,48 +0,0 @@
-//
-// Fragment shader for procedurally generated hatching or "woodcut" appearance.
-//
-// This is an OpenGL 2.0 implementation of Scott F. Johnston's "Mock Media"
-// (from "Advanced RenderMan: Beyond the Companion" SIGGRAPH 98 Course Notes)
-//
-// Author: Bert Freudenberg <bert at isg.cs.uni-magdeburg.de>
-//
-// Copyright (c) 2002-2003 3Dlabs, Inc.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-const float frequency = 1.0;
-
-varying vec3  ObjPos;               // object space coord (noisy)
-varying float V;                    // generic varying
-varying float LightIntensity;
-
-uniform sampler3D Noise;            // value of Noise = 3;
-uniform float Swidth;               // relative width of stripes = 16.0
-
-void main (void)
-{
-    float dp       = length(vec2 (dFdx(V * Swidth), dFdy(V * Swidth)));
-    float logdp    = -log2(dp);
-    float ilogdp   = floor(logdp);
-    float stripes  = exp2(ilogdp);
-
-    float noise    = texture3D(Noise, ObjPos).x;
-
-    float sawtooth = fract((V + noise * 0.1) * frequency * stripes);
-    float triangle = abs(2.0 * sawtooth - 1.0);
-
-    // adjust line width
-    float transition = logdp - ilogdp;
-
-    // taper ends
-    triangle = abs((1.0 + transition) * triangle - transition);
-
-    const float edgew = 0.2;            // width of smooth step
-
-    float edge0  = clamp(LightIntensity - edgew, 0.0, 1.0);
-    float edge1  = clamp(LightIntensity, 0.0, 1.0);
-    float square = 1.0 - smoothstep(edge0, edge1, triangle);
-
-    gl_FragColor = vec4 (vec3 (square), 1.0);
-}
\ No newline at end of file
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15HatchVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15HatchVert.glsl
deleted file mode 100644
index 6a02d3f..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15HatchVert.glsl
+++ /dev/null
@@ -1,34 +0,0 @@
-//
-// Vertex shader for procedurally generated hatching or "woodcut" appearance.
-//
-// This is an OpenGL 2.0 implementation of Scott F. Johnston's "Mock Media"
-// (from "Advanced RenderMan: Beyond the Companion" SIGGRAPH 98 Course Notes)
-//
-// Author: Bert Freudenberg <bert at isg.cs.uni-magdeburg.de>
-//
-// Copyright (c) 2002-2003 3Dlabs, Inc.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-uniform vec3  LightPosition;
-uniform float Time;
-
-varying vec3  ObjPos;
-varying float V;
-varying float LightIntensity;
-
-void main(void)
-{
-    ObjPos          = (vec3 (gl_Vertex) + vec3 (0.0, 0.0, Time)) * 0.2;
-
-    vec3 pos        = vec3 (gl_ModelViewMatrix * gl_Vertex);
-    vec3 tnorm      = normalize(gl_NormalMatrix * gl_Normal);
-    vec3 lightVec   = normalize(LightPosition - pos);
-
-    LightIntensity  = max(dot(lightVec, tnorm), 0.0);
-
-    V = gl_MultiTexCoord0.t;  // try .s for vertical stripes
-
-    gl_Position = ftransform();
-}
\ No newline at end of file
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15Julia.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15Julia.xml
deleted file mode 100644
index c12c3e4..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15Julia.xml
+++ /dev/null
@@ -1,29 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material>
-
-  <Property name="Property1">
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="0.4"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="0.6"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="128.0"> </Member>
-  </Property>
-
-  <Shader scope="Vertex" name="OrangeBook/Ch15/Ch15JuliaVert.glsl" location="OrangeBook/Ch15/Ch15JuliaVert.glsl"  language="GLSL" entry="main">
-    <LightUniform  value="Position" name="LightPosition"> </LightUniform>
-    <PropertyUniform  value="Specular" name="SpecularContribution"> </PropertyUniform>
-    <PropertyUniform  value="Diffuse" name="DiffuseContribution"> </PropertyUniform>
-    <PropertyUniform  value="SpecularPower" name="Shininess"> </PropertyUniform>
-  </Shader>
-
-  <Shader scope="Fragment" name="OrangeBook/Ch15/Ch15JuliaFrag.glsl" location="OrangeBook/Ch15/Ch15JuliaFrag.glsl"  language="GLSL" entry="main">
-    <Uniform type="float" name="MaxIterations" number_of_elements="1000" value="1.0"> </Uniform>
-    <Uniform type="float" name="Zoom" number_of_elements="1" value="1.0"> </Uniform>
-    <Uniform type="float" name="Xcenter" number_of_elements="1" value="0.2340"> </Uniform>
-    <Uniform type="float" name="Ycenter" number_of_elements="1" value="0.36450"> </Uniform>
-    <Uniform type="vec3" name="InnerColor" number_of_elements="3" value="1.0 0.0 0.0"> </Uniform>
-    <Uniform type="vec3" name="OuterColor1" number_of_elements="3" value="0.0 1.0 0.0"> </Uniform>
-    <Uniform type="vec3" name="OuterColor2" number_of_elements="3" value="0.0 0.0 1.0"> </Uniform>
-    <Uniform type="float" name="Creal" number_of_elements="1" value="1.0"> </Uniform>
-    <Uniform type="float" name="Cimag" number_of_elements="1" value="1.0"> </Uniform>
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15JuliaFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15JuliaFrag.glsl
deleted file mode 100644
index 10cfa13..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15JuliaFrag.glsl
+++ /dev/null
@@ -1,56 +0,0 @@
-//
-// Fragment shader for drawing Julia sets
-//
-// Authors: Dave Baldwin, Steve Koren, Randi Rost
-//          based on a shader by Michael Rivero
-//
-// Copyright (c) 2002-2004: 3Dlabs, Inc.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying vec3  Position;
-varying float LightIntensity;
-
-uniform float MaxIterations;
-uniform float Zoom;
-uniform float Xcenter;
-uniform float Ycenter;
-uniform vec3  InnerColor;
-uniform vec3  OuterColor1;
-uniform vec3  OuterColor2;
-uniform float Creal;
-uniform float Cimag;
-
-void main(void)
-{
-    float   real  = Position.x * Zoom + Xcenter;
-    float   imag  = Position.y * Zoom + Ycenter;
-    //float   Creal = real;   // Change this line...
-    //float   Cimag = imag;   // ...and this one to get a Julia set
-
-    float r2 = 0.0;
-    float iter;
-
-    for (iter = 0.0; iter < MaxIterations && r2 < 4.0; ++iter)
-    {
-        float tempreal = real;
-
-        real = (tempreal * tempreal) - (imag * imag) + Creal;
-        imag = 2.0 * tempreal * imag + Cimag;
-        r2   = (real * real) + (imag * imag);
-    }
-
-    // Base the color on the number of iterations
-
-    vec3 color;
-
-    if (r2 < 4.0)
-        color = InnerColor;
-    else
-        color = mix(OuterColor1, OuterColor2, fract(iter * 0.05));
-
-    color *= LightIntensity;
-
-    gl_FragColor = vec4 (color, 1.0);
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15JuliaVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15JuliaVert.glsl
deleted file mode 100644
index d6e3e23..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15JuliaVert.glsl
+++ /dev/null
@@ -1,35 +0,0 @@
-//
-// Vertex shader for drawing Julia sets
-//
-// Authors: Dave Baldwin, Steve Koren, Randi Rost
-//          based on a shader by Michael Rivero
-//
-// Copyright (c) 2002-2004: 3Dlabs, Inc.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-uniform vec3 LightPosition;
-uniform float SpecularContribution;
-uniform float DiffuseContribution;
-uniform float Shininess;
-
-varying float LightIntensity;
-varying vec3  Position;
-
-void main(void)
-{
-    vec3 ecPosition = vec3 (gl_ModelViewMatrix * gl_Vertex);
-    vec3 tnorm      = normalize(gl_NormalMatrix * gl_Normal);
-    vec3 lightVec   = normalize(LightPosition - ecPosition);
-    vec3 reflectVec = reflect(-lightVec, tnorm);
-    vec3 viewVec    = normalize(-ecPosition);
-    float spec      = max(dot(reflectVec, viewVec), 0.0);
-    spec            = pow(spec, Shininess);
-    LightIntensity  = DiffuseContribution *
-                          max(dot(lightVec, tnorm), 0.0) +
-                          SpecularContribution * spec;
-    Position        = vec3(gl_MultiTexCoord0 - 0.5) * 5.0;
-    gl_Position     = ftransform();
-
-}
\ No newline at end of file
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15Mandel.xml b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15Mandel.xml
deleted file mode 100644
index 8591174..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15Mandel.xml
+++ /dev/null
@@ -1,34 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material>
-
-  <Property name="Property1">
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="0.4"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="0.6"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="128.0"> </Member>
-  </Property>
-
-  <Shader scope="Vertex" name="OrangeBook/Ch15/Ch15MandelVert.glsl" location="OrangeBook/Ch15/Ch15MandelVert.glsl"  language="GLSL" entry="main">
-    <!--
-    <Uniform type="vec3" name="LightPosition" number_of_elements="3" value="1.0 1.0 1.0"> </Uniform>
-    <Uniform type="float" name="SpecularContribution" number_of_elements="1" value="1.0"> </Uniform>
-    <Uniform type="float" name="DiffuseContribution" number_of_elements="1" value="1.0"> </Uniform>
-    <Uniform type="float" name="Shininess" number_of_elements="1" value="1.0"> </Uniform>
-    -->
-
-    <LightUniform  value="Position" name="LightPosition"> </LightUniform>
-    <PropertyUniform  value="Specular" name="SpecularContribution"> </PropertyUniform>
-    <PropertyUniform  value="Diffuse" name="DiffuseContribution"> </PropertyUniform>
-    <PropertyUniform  value="SpecularPower" name="Shininess"> </PropertyUniform>
-  </Shader>
-
-  <Shader scope="Fragment" name="OrangeBook/Ch15/Ch15MandelFrag.glsl" location="OrangeBook/Ch15/Ch15MandelFrag.glsl"  language="GLSL" entry="main">
-    <Uniform type="float" name="MaxIterations" number_of_elements="1000" value="1.0"> </Uniform>
-    <Uniform type="float" name="Zoom" number_of_elements="1" value="1.0"> </Uniform>
-    <Uniform type="float" name="Xcenter" number_of_elements="1" value="0.2340"> </Uniform>
-    <Uniform type="float" name="Ycenter" number_of_elements="1" value="0.36450"> </Uniform>
-    <Uniform type="vec3" name="InnerColor" number_of_elements="3" value="1.0 0.0 0.0"> </Uniform>
-    <Uniform type="vec3" name="OuterColor1" number_of_elements="3" value="0.0 1.0 0.0"> </Uniform>
-    <Uniform type="vec3" name="OuterColor2" number_of_elements="3" value="0.0 0.0 1.0"> </Uniform>
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15MandelFrag.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15MandelFrag.glsl
deleted file mode 100644
index 9d132a8..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15MandelFrag.glsl
+++ /dev/null
@@ -1,54 +0,0 @@
-//
-// Fragment shader for drawing the Mandelbrot set
-//
-// Authors: Dave Baldwin, Steve Koren, Randi Rost
-//          based on a shader by Michael Rivero
-//
-// Copyright (c) 2002-2004: 3Dlabs, Inc.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-varying vec3  Position;
-varying float LightIntensity;
-
-uniform float MaxIterations;
-uniform float Zoom;
-uniform float Xcenter;
-uniform float Ycenter;
-uniform vec3  InnerColor;
-uniform vec3  OuterColor1;
-uniform vec3  OuterColor2;
-
-void main(void)
-{
-    float   real  = Position.x * Zoom + Xcenter;
-    float   imag  = Position.y * Zoom + Ycenter;
-    float   Creal = real;   // Change this line...
-    float   Cimag = imag;   // ...and this one to get a Julia set
-
-    float r2 = 0.0;
-    float iter;
-
-    for (iter = 0.0; iter < MaxIterations && r2 < 4.0; ++iter)
-    {
-        float tempreal = real;
-
-        real = (tempreal * tempreal) - (imag * imag) + Creal;
-        imag = 2.0 * tempreal * imag + Cimag;
-        r2   = (real * real) + (imag * imag);
-    }
-
-    // Base the color on the number of iterations
-
-    vec3 color;
-
-    if (r2 < 4.0)
-        color = InnerColor;
-    else
-        color = mix(OuterColor1, OuterColor2, fract(iter * 0.05));
-
-    color *= LightIntensity;
-
-    gl_FragColor = vec4 (color, 1.0);
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15MandelVert.glsl b/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15MandelVert.glsl
deleted file mode 100644
index 53024f1..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Ch15/Ch15MandelVert.glsl
+++ /dev/null
@@ -1,40 +0,0 @@
-//
-// Vertex shader for drawing the Mandelbrot set
-//
-// Authors: Dave Baldwin, Steve Koren, Randi Rost
-//          based on a shader by Michael Rivero
-//
-// Copyright (c) 2002-2004: 3Dlabs, Inc.
-//
-// See 3Dlabs-License.txt for license information
-//
-
-uniform vec3 LightPosition;
-uniform float SpecularContribution;
-uniform float DiffuseContribution;
-uniform float Shininess;
-
-varying float LightIntensity;
-varying vec3  Position;
-
-void main(void)
-{
-    vec3 ecPosition = vec3 (gl_ModelViewMatrix * gl_Vertex);
-    vec3 ecLightPosition = vec3 (gl_ModelViewMatrix * vec4(LightPosition,1.0));
-
-
-
-    vec3 tnorm      = normalize(gl_NormalMatrix * gl_Normal);
-    vec3 lightVec   = normalize(ecLightPosition - ecPosition);
-
-    vec3 reflectVec = reflect(-lightVec, tnorm);
-    vec3 viewVec    = normalize(-ecPosition);
-    float spec      = max(dot(reflectVec, viewVec), 0.0);
-    spec            = pow(spec, Shininess);
-    LightIntensity  = DiffuseContribution *
-                          max(dot(lightVec, tnorm), 0.0) +
-                          SpecularContribution * spec;
-    Position        = vec3(gl_MultiTexCoord0 - 0.5) * 5.0;
-    gl_Position     = ftransform();
-
-}
diff --git a/Utilities/MaterialLibrary/Repository/OrangeBook/Readme.txt b/Utilities/MaterialLibrary/Repository/OrangeBook/Readme.txt
deleted file mode 100644
index 558154d..0000000
--- a/Utilities/MaterialLibrary/Repository/OrangeBook/Readme.txt
+++ /dev/null
@@ -1,7 +0,0 @@
-This subdirectory contains materials that apply shaders from the
-'OrangeBook', "The OpenGL Shading Language" by Randi J. Rost.
-
-Some materials shaders work fine, some don't. The main reason is
-that VTK currently can only populate 4 vertex attributes and a
-single 2D texture. Work is currently being done to overcome this
-and allow VTK full access to set these values.
diff --git a/Utilities/MaterialLibrary/Repository/README.txt b/Utilities/MaterialLibrary/Repository/README.txt
deleted file mode 100644
index 77ca997..0000000
--- a/Utilities/MaterialLibrary/Repository/README.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-This directory includes additional Shaders/Materials
-which are not imbedded into VTK. For these shaders to work
-this directory must one of the directories specified in
-VTK_MATERIALS_DIRS.
-
-
diff --git a/Utilities/MaterialLibrary/Repository/TestCg/AppVarFragmentTest.cg b/Utilities/MaterialLibrary/Repository/TestCg/AppVarFragmentTest.cg
deleted file mode 100644
index dc04718..0000000
--- a/Utilities/MaterialLibrary/Repository/TestCg/AppVarFragmentTest.cg
+++ /dev/null
@@ -1,51 +0,0 @@
-// don't reference TEXCOORD0, it's and alias for POSITION
-void fragment_program( in float4 normal : TEXCOORD1,
-                       in float4 col    : COLOR0,
-                       out float4 color  : COLOR,
-
-                       uniform float3 lightFocalPoint,
-                       uniform float3 lightPosition,
-
-                       uniform float3 cameraFocalPoint,
-                       uniform float3 cameraPosition,
-
-                       // Testing vars
-                       uniform float4 appVara,
-                       uniform float4 appVarb,
-                       uniform float4 appVarc,
-                       uniform float4 appVard,
-                       uniform float4 appVare,
-                       uniform float4 appVarf,
-                       uniform float4 appVarg
-
-                     )
-{
-  float3 lightVec = lightFocalPoint - lightPosition;
-  float3 eyeVec   = cameraFocalPoint - cameraPosition;
-  float ne = dot( normal.xyz, normalize(eyeVec) );
-
-  // Halfway Vector varies once per object
-  float3 H = normalize( lightVec + eyeVec );
-
-  float diffuse = max(0.0, dot(normalize(normal.xyz),normalize(lightVec)));
-
-  // check Vector inputs, object is red upon error conditions
-  if( 0
-    || appVara.x != 0.37714 || appVara.y != 0.61465 || appVara.z != 0.48399 || appVara.w != 0.68252
-    || appVarb.x != 0.03900 || appVarb.y != 0.15857 || appVarb.z != 0.57913 || appVarb.w != 0.54458
-    || appVarc.x != 0.97061 || appVarc.y != 0.86053 || appVarc.z != 0.63583 || appVarc.w != 0.51058
-    || appVard.x != 0.12885 || appVard.y != 0.91490 || appVard.z != 0.86394 || appVard.w != 0.58951
-    || appVare.x != 0.23403 || appVare.y != 0.35340 || appVare.z != 0.52559 || appVare.w != 0.77830
-    || appVarf.x != 0.19550 || appVarf.y != 0.17429 || appVarf.z != 0.89958 || appVarf.w != 0.15063
-    || appVarg.x != 0.75796 || appVarg.y != 0.48072 || appVarg.z != 0.07728 || appVarg.w != 0.16434
-    )
-    {
-    color = float4( 1.0, 0.0, 0.0, 1.0 );
-    }
-  else
-    {
-    color = (0.5 + diffuse) * col;
-    color.a = 1.0;
-    }
-
-}
diff --git a/Utilities/MaterialLibrary/Repository/TestCg/AppVarTest.xml b/Utilities/MaterialLibrary/Repository/TestCg/AppVarTest.xml
deleted file mode 100644
index 64809ab..0000000
--- a/Utilities/MaterialLibrary/Repository/TestCg/AppVarTest.xml
+++ /dev/null
@@ -1,62 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material name="Mat1" NumberOfProperties="1" NumberOfVertexShaders="1" NumberOfFragmentShaders="1">>
-
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="0.75 0.751 0.752"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="0.61 0.62 0.006"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="0.001 0.002 0.61"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="0.1 0.2 0.3"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="0.45"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="0.451"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="0.4"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineStipplePattern" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-
-
-  <Shader scope="Vertex" name="VertexTest" location="TestCg/Vertex.cg" language="Cg" entry="main" args="-DVERTEX_PROGRAM">
-    <MatrixUniform name="ModelViewProj" type="State" number_of_elements="2" value="CG_GL_MODELVIEW_PROJECTION_MATRIX CG_GL_MATRIX_IDENTITY"> </MatrixUniform>
-    <MatrixUniform name="ModelViewIT" type="State" number_of_elements="2" value="CG_GL_MODELVIEW_MATRIX CG_GL_MATRIX_INVERSE_TRANSPOSE"> </MatrixUniform>
-  </Shader>
-
-
-
-  <Shader scope="Fragment" name="TestAppVarFrag" location="TestCg/AppVarFragmentTest.cg" language="Cg" entry="fragment_program" args="-DFRAGMENT_PROGRAM">
-
-    <!-- these values are for rendering
-    <PropertyUniform  value="AmbientColor" name="SurfaceColor"> </PropertyUniform>
-    <PropertyUniform  value="DiffuseColor" name="WarmColor"> </PropertyUniform>
-    <PropertyUniform  value="SpecularColor" name="CoolColor"> </PropertyUniform>
-    <PropertyUniform  value="Ambient" name="DiffuseWarm"> </PropertyUniform>
-    <PropertyUniform  value="Diffuse" name="DiffuseCool"> </PropertyUniform>
-    -->
-
-    <LightUniform  value="Position" name="lightPosition"> </LightUniform>
-    <LightUniform  value="FocalPoint" name="lightFocalPoint"> </LightUniform>
-
-    <CameraUniform  value="Position" name="cameraPosition"> </CameraUniform>
-    <CameraUniform  value="FocalPoint" name="cameraFocalPoint"> </CameraUniform>
-
-    <!-- these values are for testing
-    -->
-    <ApplicationUniform value="appVar1" name="appVara"> </ApplicationUniform>
-    <ApplicationUniform value="appVar2" name="appVarb"> </ApplicationUniform>
-    <ApplicationUniform value="appVar3" name="appVarc"> </ApplicationUniform>
-    <ApplicationUniform value="appVar4" name="appVard"> </ApplicationUniform>
-    <ApplicationUniform value="appVar5" name="appVare"> </ApplicationUniform>
-    <ApplicationUniform value="appVar6" name="appVarf"> </ApplicationUniform>
-    <ApplicationUniform value="appVar7" name="appVarg"> </ApplicationUniform>
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/TestCg/MatrixFragmentTest.cg b/Utilities/MaterialLibrary/Repository/TestCg/MatrixFragmentTest.cg
deleted file mode 100644
index 24d1440..0000000
--- a/Utilities/MaterialLibrary/Repository/TestCg/MatrixFragmentTest.cg
+++ /dev/null
@@ -1,79 +0,0 @@
-// Rendering variables
-//
-// V - unit vector in direction of viewer
-// L - unit vector in direction of light source
-// H - unit vector that bisects V and L
-// N - unit vector in normal direction
-//
-// theta - angle between H and V or H and L
-// alpha - angle betwee N and H
-
-
-
-// don't reference TEXCOORD0, it's and alias for POSITION
-void fragment_program( in float4 pos    : POSITION,
-                       in float4 normal : TEXCOORD1,
-                       in float4 col    : COLOR0,
-                       out float4 color  : COLOR,
-
-                       uniform float4x4 testFloat4x4,
-                       uniform float4x4 testDouble4x4,
-
-                       uniform float3 lightFocalPoint,
-                       uniform float3 lightPosition,
-
-                       uniform float3 cameraFocalPoint,
-                       uniform float3 cameraPosition
-
-                     )
-{
-  float3 lightVec = lightFocalPoint - lightPosition;
-  float3 eyeVec   = cameraFocalPoint - cameraPosition;
-  float ne = dot( normal.xyz, normalize(eyeVec) );
-
-  // Halfway Vector varies once per object
-  float3 H = normalize( lightVec + eyeVec );
-
-  float diffuse = max(0.0, dot(normalize(normal.xyz),normalize(lightVec)));
-
-  // check Vector inputs, object is red upon error conditions
-  if( 0
-      || testFloat4x4[0][0]!=99.43669 || testFloat4x4[0][1]!=53.27563
-      || testFloat4x4[0][2]!=31.72327 || testFloat4x4[0][3]!=46.41460
-
-      || testFloat4x4[1][0]!= 3.92156 || testFloat4x4[1][1]!=36.45097
-      || testFloat4x4[1][2]!=32.70953 || testFloat4x4[1][3]!=62.55845
-
-      || testFloat4x4[2][0]!= 8.42773 || testFloat4x4[2][1]!=55.26141
-      || testFloat4x4[2][2]!=14.26995 || testFloat4x4[2][3]!=27.33909
-
-      || testFloat4x4[3][0]!=45.95263 || testFloat4x4[3][1]!=72.71652
-      || testFloat4x4[3][2]!=74.05554 || testFloat4x4[3][3]!=83.90914
-
-
-      || testDouble4x4[0][0]!=17.68202 || testDouble4x4[0][1]!=20.61210
-      || testDouble4x4[0][2]!=90.67103 || testDouble4x4[0][3]!=0.10092
-
-      || testDouble4x4[1][0]!=30.20822 || testDouble4x4[1][1]!=45.71106
-      || testDouble4x4[1][2]!=59.58049 || testDouble4x4[1][3]!=48.07516
-
-      || testDouble4x4[2][0]!=92.06191 || testDouble4x4[2][1]!=92.43174
-      || testDouble4x4[2][2]!=90.71167 || testDouble4x4[2][3]!=39.48975
-
-      || testDouble4x4[3][0]!=77.99728 || testDouble4x4[3][1]!=51.03002
-      || testDouble4x4[3][2]!=12.48477 || testDouble4x4[3][3]!=48.91196
-
-      )
-    {
-    color = float4( 1.0, 0.0, 0.0, 1.0 );
-    }
-  else
-    {
-    // Lambertian Shading
-    //color.rgb = MyAmbient * col;
-    // Phong Shading
-    color = (0.5 + diffuse) * col;
-    color.a = 1.0;
-    }
-
-}
diff --git a/Utilities/MaterialLibrary/Repository/TestCg/MatrixTest.xml b/Utilities/MaterialLibrary/Repository/TestCg/MatrixTest.xml
deleted file mode 100644
index 6eed753..0000000
--- a/Utilities/MaterialLibrary/Repository/TestCg/MatrixTest.xml
+++ /dev/null
@@ -1,64 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material name="Mat1" NumberOfProperties="1" NumberOfVertexShaders="1" NumberOfFragmentShaders="1">>
-
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="0.75 0.751 0.752"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="0.61 0.62 0.006"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="0.001 0.002 0.61"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="0.1 0.2 0.3"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="0.45"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="0.451"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="0.4"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineStipplePattern" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-
-
-  <Shader scope="Vertex" name="Vertex" location="TestCg/Vertex.cg" language="Cg" entry="main" args="-DVERTEX_PROGRAM">
-    <MatrixUniform name="ModelViewProj" type="State" number_of_elements="2"
-      value="CG_GL_MODELVIEW_PROJECTION_MATRIX CG_GL_MATRIX_IDENTITY"> </MatrixUniform>
-    <MatrixUniform name="ModelViewIT" type="State" number_of_elements="2"
-      value="CG_GL_MODELVIEW_MATRIX CG_GL_MATRIX_INVERSE_TRANSPOSE"> </MatrixUniform>
-  </Shader>
-
-
-
-  <Shader scope="Fragment" name="TestMatrixFrag" location="TestCg/MatrixFragmentTest.cg" language="Cg" entry="fragment_program" args="-DFRAGMENT_PROGRAM">
-
-    <!-- these values are for rendering -->
-    <LightUniform  value="Position" name="lightPosition"> </LightUniform>
-    <LightUniform  value="FocalPoint" name="lightFocalPoint"> </LightUniform>
-
-    <CameraUniform  value="Position" name="cameraPosition"> </CameraUniform>
-    <CameraUniform  value="FocalPoint" name="cameraFocalPoint"> </CameraUniform>
-    <!-- end rendering values -->
-
-    <!-- these values are for testing -->
-    <MatrixUniform type="float" name="testFloat4x4" number_of_rows="4" number_of_columns="4" order="RowMajor"
-      number_of_elements="16"
-      value="99.43669 53.27563 31.72327 46.41460
-              3.92156 36.45097 32.70953 62.55845
-              8.42773 55.26141 14.26995 27.33909
-             45.95263 72.71652 74.05554 83.90914"> </MatrixUniform>
-    <MatrixUniform type="double" name="testDouble4x4" number_of_rows="4" number_of_columns="4" order="RowMajor"
-      number_of_elements="16"
-      value="17.68202 20.61210 90.67103 0.10092
-             30.20822 45.71106 59.58049 48.07516
-             92.06191 92.43174 90.71167 39.48975
-             77.99728 51.03002 12.48477 48.91196"> </MatrixUniform>
-    <!-- end testing values -->
-
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/TestCg/ModelViewXFormTest.xml b/Utilities/MaterialLibrary/Repository/TestCg/ModelViewXFormTest.xml
deleted file mode 100644
index 3aa3a60..0000000
--- a/Utilities/MaterialLibrary/Repository/TestCg/ModelViewXFormTest.xml
+++ /dev/null
@@ -1,36 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material name="Mat1" NumberOfProperties="1" NumberOfVertexShaders="1" NumberOfFragmentShaders="1">>
-
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="0.15 0.151 0.752"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="0.61 0.62 0.006"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="0.001 0.002 0.61"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="0.1 0.2 0.3"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="0.45"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="0.451"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="0.4"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineStipplePattern" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-
-
-  <Shader scope="Vertex" name="VertexTest" location="TestCg/ModelViewXFormVertexTest.cg" language="Cg" entry="main" args="-DVERTEX_PROGRAM">
-    <MatrixUniform name="ModelViewProj" type="State" number_of_elements="2" value="CG_GL_MODELVIEW_PROJECTION_MATRIX CG_GL_MATRIX_IDENTITY"> </MatrixUniform>
-    <MatrixUniform name="ModelView" type="State" number_of_elements="2" value="CG_GL_MODELVIEW_MATRIX CG_GL_MATRIX_IDENTITY"> </MatrixUniform>
-    <MatrixUniform name="Proj" type="State" number_of_elements="2" value="CG_GL_PROJECTION_MATRIX CG_GL_MATRIX_IDENTITY"> </MatrixUniform>
-    <MatrixUniform name="ModelViewIT" type="State" number_of_elements="2" value="CG_GL_MODELVIEW_MATRIX CG_GL_MATRIX_INVERSE_TRANSPOSE"> </MatrixUniform>
-  </Shader>
-
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/TestCg/ModelViewXFormVertexTest.cg b/Utilities/MaterialLibrary/Repository/TestCg/ModelViewXFormVertexTest.cg
deleted file mode 100644
index d5ee28c..0000000
--- a/Utilities/MaterialLibrary/Repository/TestCg/ModelViewXFormVertexTest.cg
+++ /dev/null
@@ -1,83 +0,0 @@
-/*********************************************************************NVMH3****
-Path:  NVSDK\Common\media\programs
-File:  simple.cg
-
-Copyright NVIDIA Corporation 2002
-TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, THIS SOFTWARE IS PROVIDED
-*AS IS* AND NVIDIA AND ITS SUPPLIERS DISCLAIM ALL WARRANTIES, EITHER EXPRESS
-OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, IMPLIED WARRANTIES OF MERCHANTABILITY
-AND FITNESS FOR A PARTICULAR PURPOSE.  IN NO EVENT SHALL NVIDIA OR ITS SUPPLIERS
-BE LIABLE FOR ANY SPECIAL, INCIDENTAL, INDIRECT, OR CONSEQUENTIAL DAMAGES
-WHATSOEVER (INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF BUSINESS PROFITS,
-BUSINESS INTERRUPTION, LOSS OF BUSINESS INFORMATION, OR ANY OTHER PECUNIARY LOSS)
-ARISING OUT OF THE USE OF OR INABILITY TO USE THIS SOFTWARE, EVEN IF NVIDIA HAS
-BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
-
-
-Comments:
-
-******************************************************************************/
-
-// define inputs from application
-struct appin
-{
-    float4 Position : POSITION;
-    float4 Color    : COLOR0;
-    float4 Normal   : NORMAL;
-};
-
-// define outputs from vertex shader
-struct vertout
-{
-    float4 HPosition : POSITION;
-    float4 Normal    : TEXCOORD1;
-    float4 Color0    : COLOR0;
-};
-
-vertout main(appin IN,
-             uniform float4x4 ModelView,
-             uniform float4x4 Proj,
-             uniform float4x4 ModelViewProj,
-             uniform float4x4 ModelViewIT )
-{
-    vertout OUT;
-
-    float4 temp1 = mul( ModelView, IN.Position );
-    float4 temp2 = mul( Proj, IN.Position );
-
-    // transform vertex position into homogenous clip-space
-    OUT.HPosition = mul(ModelViewProj, IN.Position);
-    OUT.Normal = normalize( mul(ModelViewIT, IN.Normal));
-    // combine diffuse and specular contributions and output final vertex color
-    OUT.Color0 = IN.Color + 0.0001 * temp1 + 0.0001 * temp2;
-
-    if( 1
-#if 1
-        && ModelViewProj[0][0]==1.0
-        && ModelViewProj[0][1]==0.0
-        && ModelViewProj[0][2]==0.0
-        && ModelViewProj[0][3]==0.0
-
-        && ModelViewProj[1][0]==0.0
-        && ModelViewProj[1][1]==1.0
-        && ModelViewProj[1][2]==0.0
-        && ModelViewProj[1][3]==0.0
-
-        && ModelViewProj[2][0]==0.0
-        && ModelViewProj[2][1]==0.0
-        && ModelViewProj[2][2]==1.0
-        && ModelViewProj[2][3]==0.0
-
-        && ModelViewProj[3][0]==0.0
-        && ModelViewProj[3][1]==0.0
-        && ModelViewProj[3][2]==0.0
-#endif
-        && ModelViewProj[3][3]==1.0
-      )
-    {
-    OUT.Color0 = float4(1.0, 0.0, 0.0, 1.0);
-    }
-
-    OUT.Color0 = float4(1.0, 0.0, 0.0, 1.0);
-    return OUT;
-}
diff --git a/Utilities/MaterialLibrary/Repository/TestCg/ScalarVectorFragmentTest.cg b/Utilities/MaterialLibrary/Repository/TestCg/ScalarVectorFragmentTest.cg
deleted file mode 100644
index 0a3773b..0000000
--- a/Utilities/MaterialLibrary/Repository/TestCg/ScalarVectorFragmentTest.cg
+++ /dev/null
@@ -1,79 +0,0 @@
-// Rendering variables
-//
-// V - unit vector in direction of viewer
-// L - unit vector in direction of light source
-// H - unit vector that bisects V and L
-// N - unit vector in normal direction
-//
-// theta - angle between H and V or H and L
-// alpha - angle betwee N and H
-
-
-
-// don't reference TEXCOORD0, it's and alias for POSITION
-void fragment_program( in float4 pos    : POSITION,
-                       in float4 normal : TEXCOORD1,
-                       in float4 col    : COLOR0,
-                       out float4 color  : COLOR,
-
-                       uniform float  testFloat,
-                       uniform float1 testFloat1,
-                       uniform float2 testFloat2,
-                       uniform float3 testFloat3,
-                       uniform float4 testFloat4,
-
-                       // when these parameters are declared as doubleN
-                       // an error is generated when the application
-                       // tries to set their value.
-                       uniform float testDouble,
-                       uniform float1 testDouble1,
-                       uniform float2 testDouble2,
-                       uniform float3 testDouble3,
-                       uniform float4 testDouble4,
-
-                       uniform float3 lightFocalPoint,
-                       uniform float3 lightPosition,
-
-                       uniform float3 cameraFocalPoint,
-                       uniform float3 cameraPosition
-
-                     )
-{
-  float3 lightVec = lightFocalPoint - lightPosition;
-  float3 eyeVec   = cameraFocalPoint - cameraPosition;
-  float ne = dot( normal.xyz, normalize(eyeVec) );
-
-  // Halfway Vector varies once per object
-  float3 H = normalize( lightVec + eyeVec );
-
-  float diffuse = max(0.0, dot(normalize(normal.xyz),normalize(lightVec)));
-
-
-
-  // check Vector inputs, object is red upon error conditions
-  if( 0
-      || testFloat!=1.01
-      || testFloat1.r!=1.02
-      || testFloat2.r!=1.03 || testFloat2.g!=2.08
-      || testFloat3.r!=1.04 || testFloat3.g!=2.07 || testFloat3.b!=3.09
-      || testFloat4.r!=1.05 || testFloat4.g!=2.06 || testFloat4.b!=3.001 || testFloat4.a!=4.002
-
-      || testDouble!=10.01
-      || testDouble1.r!=10.02
-      || testDouble2.r!=10.03 || testDouble2.g!=20.08
-      || testDouble3.r!=10.04 || testDouble3.g!=20.07 || testDouble3.b!=30.09
-      || testDouble4.r!=10.05 || testDouble4.g!=20.06 || testDouble4.b!=30.001 || testDouble4.a!=40.002
-      )
-    {
-    color = float4( 1.0, 0.0, 0.0, 1.0 );
-    }
-  else
-    {
-    // Lambertian Shading
-    //color.rgb = MyAmbient * col;
-    // Phong Shading
-    color = (0.5 + diffuse) * col;
-    color.a = 1.0;
-    }
-
-}
diff --git a/Utilities/MaterialLibrary/Repository/TestCg/ScalarVectorTest.xml b/Utilities/MaterialLibrary/Repository/TestCg/ScalarVectorTest.xml
deleted file mode 100644
index 5453d5b..0000000
--- a/Utilities/MaterialLibrary/Repository/TestCg/ScalarVectorTest.xml
+++ /dev/null
@@ -1,61 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material name="Mat1" NumberOfProperties="1" NumberOfVertexShaders="1" NumberOfFragmentShaders="1">>
-
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="0.75 0.751 0.752"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="0.61 0.62 0.006"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="0.001 0.002 0.61"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="0.1 0.2 0.3"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="0.45"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="0.451"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="0.4"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineStipplePattern" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-
-
-  <Shader scope="Vertex" name="Vertex" location="TestCg/Vertex.cg" language="Cg" entry="main" args="-DVERTEX_PROGRAM">
-    <MatrixUniform name="ModelViewProj" type="State" number_of_elements="2" value="CG_GL_MODELVIEW_PROJECTION_MATRIX CG_GL_MATRIX_IDENTITY"> </MatrixUniform>
-    <MatrixUniform name="ModelViewIT" type="State" number_of_elements="2" value="CG_GL_MODELVIEW_MATRIX CG_GL_MATRIX_INVERSE_TRANSPOSE"> </MatrixUniform>
-  </Shader>
-
-
-
-  <Shader scope="Fragment" name="TestScalarVectorFrag" location="TestCg/ScalarVectorFragmentTest.cg" language="Cg" entry="fragment_program" args="-DFRAGMENT_PROGRAM">
-
-    <!-- these values are for rendering -->
-    <LightUniform  value="Position" name="lightPosition"> </LightUniform>
-    <LightUniform  value="FocalPoint" name="lightFocalPoint"> </LightUniform>
-
-    <CameraUniform  value="Position" name="cameraPosition"> </CameraUniform>
-    <CameraUniform  value="FocalPoint" name="cameraFocalPoint"> </CameraUniform>
-    <!-- end rendering values -->
-
-    <!-- these values are for testing -->
-    <Uniform type="float" name="testFloat" number_of_elements="1" value="1.01"> </Uniform>
-    <Uniform type="float1" name="testFloat1" number_of_elements="1" value="1.02"> </Uniform>
-    <Uniform type="float2" name="testFloat2" number_of_elements="2" value="1.03 2.08"> </Uniform>
-    <Uniform type="float3" name="testFloat3" number_of_elements="3" value="1.04 2.07 3.09"> </Uniform>
-    <Uniform type="float3" name="testFloat4" number_of_elements="4" value="1.05 2.06 3.001 4.002"> </Uniform>
-
-    <Uniform type="double" name="testDouble" number_of_elements="1" value="10.01"> </Uniform>
-    <Uniform type="double1" name="testDouble1" number_of_elements="1" value="10.02"> </Uniform>
-    <Uniform type="double2" name="testDouble2" number_of_elements="2" value="10.03 20.08"> </Uniform>
-    <Uniform type="double3" name="testDouble3" number_of_elements="3" value="10.04 20.07 30.09"> </Uniform>
-    <Uniform type="double3" name="testDouble4" number_of_elements="4" value="10.05 20.06 30.001 40.002"> </Uniform>
-    <!-- end testing values -->
-
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/TestCg/StructureFragmentTest.cg b/Utilities/MaterialLibrary/Repository/TestCg/StructureFragmentTest.cg
deleted file mode 100644
index 9feb574..0000000
--- a/Utilities/MaterialLibrary/Repository/TestCg/StructureFragmentTest.cg
+++ /dev/null
@@ -1,77 +0,0 @@
-struct TestStructure
-{
-  uniform float f;
-  uniform float1 f1;
-  uniform float2 f2;
-  uniform float3 f3;
-  uniform float4 f4;
-};
-
-// don't reference TEXCOORD0, it's and alias for POSITION
-void fragment_program( in float4 normal : TEXCOORD1,
-                       in float4 col    : COLOR0,
-                       out float4 color  : COLOR,
-
-                       uniform TestStructure testStructure1,
-                       uniform TestStructure testStructure2,
-
-                       uniform float3 lightFocalPoint,
-                       uniform float3 lightPosition,
-
-                       uniform float3 cameraFocalPoint,
-                       uniform float3 cameraPosition
-
-                     )
-{
-  float3 lightVec = lightFocalPoint - lightPosition;
-  float3 eyeVec   = cameraFocalPoint - cameraPosition;
-  float ne = dot( normal.xyz, normalize(eyeVec) );
-
-  // Halfway Vector varies once per object
-  float3 H = normalize( lightVec + eyeVec );
-
-  float diffuse = max(0.0, dot(normalize(normal.xyz),normalize(lightVec)));
-
-  if( 0
-//    || testStructure1.f != 1.0
-
-//    || testStructure1.f1.x != 1.1
-
-//    || testStructure1.f2.x != 1.1
-//    || testStructure1.f2.y != 1.2
-
-//    || testStructure1.f3.x != 1.1
-//    || testStructure1.f3.y != 1.2
-//    || testStructure1.f3.z != 1.3
-
-//    || testStructure1.f4.x != 1.1
-//    || testStructure1.f4.y != 1.2
-//    || testStructure1.f4.z != 1.3
-//    || testStructure1.f4.w != 1.4
-
-    || testStructure2.f != 2.0
-
-    || testStructure2.f1.x != 2.1
-
-    || testStructure2.f2.x != 2.1
-    || testStructure2.f2.y != 2.2
-
-    || testStructure2.f3.x != 2.1
-    || testStructure2.f3.y != 2.2
-    || testStructure2.f3.z != 2.3
-
-    || testStructure2.f4.x != 2.1
-    || testStructure2.f4.y != 2.2
-    || testStructure2.f4.z != 2.3
-    || testStructure2.f4.w != 2.4
-    )
-    {
-    color = float4( 1.0, 0.0, 0.0, 1.0 );
-    }
-  else
-    {
-    color = (0.5 + diffuse) * col;
-    color.a = 1.0;
-    }
-
-}
diff --git a/Utilities/MaterialLibrary/Repository/TestCg/StructureTest.xml b/Utilities/MaterialLibrary/Repository/TestCg/StructureTest.xml
deleted file mode 100644
index 5cea19c..0000000
--- a/Utilities/MaterialLibrary/Repository/TestCg/StructureTest.xml
+++ /dev/null
@@ -1,65 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material name="Mat1" NumberOfProperties="1" NumberOfVertexShaders="1" NumberOfFragmentShaders="1">>
-
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="0.75 0.751 0.752"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="0.61 0.62 0.006"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="0.001 0.002 0.61"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="0.1 0.2 0.3"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="0.45"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="0.451"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="0.4"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineStipplePattern" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-
-
-  <Shader scope="Vertex" name="Vertex" location="TestCg/Vertex.cg" language="Cg" entry="main" args="-DVERTEX_PROGRAM">
-    <MatrixUniform name="ModelViewProj" type="State" number_of_elements="2" value="CG_GL_MODELVIEW_PROJECTION_MATRIX CG_GL_MATRIX_IDENTITY"> </MatrixUniform>
-    <MatrixUniform name="ModelViewIT" type="State" number_of_elements="2" value="CG_GL_MODELVIEW_MATRIX CG_GL_MATRIX_INVERSE_TRANSPOSE"> </MatrixUniform>
-  </Shader>
-
-
-
-  <Shader scope="Fragment" name="TestCgStructureFrag" location="TestCg/StructureFragmentTest.cg" language="Cg" entry="fragment_program" args="-DFRAGMENT_PROGRAM">
-
-    <!-- these values are for rendering -->
-    <LightUniform  value="Position" name="lightPosition"> </LightUniform>
-    <LightUniform  value="FocalPoint" name="lightFocalPoint"> </LightUniform>
-
-    <CameraUniform  value="Position" name="cameraPosition"> </CameraUniform>
-    <CameraUniform  value="FocalPoint" name="cameraFocalPoint"> </CameraUniform>
-    <!-- end rendering values -->
-
-    <!-- these values are for testing -->
-    <Uniform type="float" name="testStructure2.f" number_of_elements="1" value="2.0"> </Uniform>
-    <Uniform type="float1" name="testStructure2.f1" number_of_elements="1" value="2.1"> </Uniform>
-    <Uniform type="float2" name="testStructure2.f2" number_of_elements="2" value="2.1 2.2"> </Uniform>
-    <Uniform type="float3" name="testStructure2.f3" number_of_elements="3" value="2.1 2.2 2.3"> </Uniform>
-    <Uniform type="float4" name="testStructure2.f4" number_of_elements="4" value="2.1 2.2 2.3 2.4"> </Uniform>
-
-    <!--
-    <Uniform type="struct" name="testStructure1">
-      <Uniform type="float" name="f" number_of_elements="1" value="1.0"> </Uniform>
-      <Uniform type="float1" name="f1" number_of_elements="1" value="1.1"> </Uniform>
-      <Uniform type="float2" name="f2" number_of_elements="2" value="1.1 1.2"> </Uniform>
-      <Uniform type="float3" name="f3" number_of_elements="3" value="1.1 1.2 1.3"> </Uniform>
-      <Uniform type="float4" name="f4" number_of_elements="4" value="1.1 1.2 1.3 1.4"> </Uniform>
-    </uniform>
-    -->
-    <!-- end testing values -->
-
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/TestCg/Vertex.cg b/Utilities/MaterialLibrary/Repository/TestCg/Vertex.cg
deleted file mode 100644
index 5213bbc..0000000
--- a/Utilities/MaterialLibrary/Repository/TestCg/Vertex.cg
+++ /dev/null
@@ -1,50 +0,0 @@
-/*********************************************************************NVMH3****
-Path:  NVSDK\Common\media\programs
-File:  simple.cg
-
-Copyright NVIDIA Corporation 2002
-TO THE MAXIMUM EXTENT PERMITTED BY APPLICABLE LAW, THIS SOFTWARE IS PROVIDED
-*AS IS* AND NVIDIA AND ITS SUPPLIERS DISCLAIM ALL WARRANTIES, EITHER EXPRESS
-OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, IMPLIED WARRANTIES OF MERCHANTABILITY
-AND FITNESS FOR A PARTICULAR PURPOSE.  IN NO EVENT SHALL NVIDIA OR ITS SUPPLIERS
-BE LIABLE FOR ANY SPECIAL, INCIDENTAL, INDIRECT, OR CONSEQUENTIAL DAMAGES
-WHATSOEVER (INCLUDING, WITHOUT LIMITATION, DAMAGES FOR LOSS OF BUSINESS PROFITS,
-BUSINESS INTERRUPTION, LOSS OF BUSINESS INFORMATION, OR ANY OTHER PECUNIARY LOSS)
-ARISING OUT OF THE USE OF OR INABILITY TO USE THIS SOFTWARE, EVEN IF NVIDIA HAS
-BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES.
-
-
-Comments:
-
-******************************************************************************/
-
-// define inputs from application
-struct appin
-{
-    float4 Position : POSITION;
-    float4 Color    : COLOR0;
-    float4 Normal   : NORMAL;
-};
-
-// define outputs from vertex shader
-struct vertout
-{
-    float4 HPosition : POSITION;
-    float4 Normal    : TEXCOORD1;
-    float4 Color0    : COLOR0;
-};
-
-vertout main(appin IN,
-             uniform float4x4 ModelViewProj,
-             uniform float4x4 ModelViewIT )
-{
-    vertout OUT;
-
-    // transform vertex position into homogenous clip-space
-    OUT.HPosition = mul(ModelViewProj, IN.Position);
-    OUT.Normal = normalize( mul(ModelViewIT, IN.Normal));
-    // combine diffuse and specular contributions and output final vertex color
-    OUT.Color0 = IN.Color;
-
-    return OUT;
-}
diff --git a/Utilities/MaterialLibrary/Repository/TestCg/VtkPropertyFragmentTest.cg b/Utilities/MaterialLibrary/Repository/TestCg/VtkPropertyFragmentTest.cg
deleted file mode 100644
index de9a357..0000000
--- a/Utilities/MaterialLibrary/Repository/TestCg/VtkPropertyFragmentTest.cg
+++ /dev/null
@@ -1,79 +0,0 @@
-// don't reference TEXCOORD0, it's and alias for POSITION
-void fragment_program( in float4 normal : TEXCOORD1,
-                       in float4 col    : COLOR0,
-                       out float4 color  : COLOR,
-
-                       uniform float3 AmbientColor,
-                       uniform float3 DiffuseColor,
-                       uniform float3 SpecularColor,
-                       uniform float3 EdgeColor,
-
-                       uniform float Ambient,
-                       uniform float Diffuse,
-                       uniform float Specular,
-                       uniform float SpecularPower,
-                       uniform float Opacity,
-
-                       uniform float PointSize,
-                       uniform float LineWidth,
-
-                      // Cg doesn't allow int types to be used as varying parameters
-                       uniform float LineStipplePattern,
-                       uniform float LineStippleRepeatFactor,
-                       uniform float Interpolation,
-                       uniform float Representation,
-                       uniform float EdgeVisibility,
-                       uniform float BackfaceCulling,
-                       uniform float FrontfaceCulling,
-
-                       uniform float3 lightFocalPoint,
-                       uniform float3 lightPosition,
-
-                       uniform float3 cameraFocalPoint,
-                       uniform float3 cameraPosition
-
-                     )
-{
-  float3 lightVec = lightFocalPoint - lightPosition;
-  float3 eyeVec   = cameraFocalPoint - cameraPosition;
-  float ne = dot( normal.xyz, normalize(eyeVec) );
-
-  // Halfway Vector varies once per object
-  float3 H = normalize( lightVec + eyeVec );
-
-  float diffuse = max(0.0, dot(normalize(normal.xyz),normalize(lightVec)));
-
-
-
-  // check Vector inputs, object is red upon error conditions
-  if( 0
-      || AmbientColor.x!=0.75 || AmbientColor.y!=0.751 || AmbientColor.z!=0.752
-      || DiffuseColor.x!=0.61 || DiffuseColor.y!=0.62 || DiffuseColor.z!=0.006
-      || SpecularColor.x!=0.001 || SpecularColor.y!=0.002 || SpecularColor.z!=0.61
-      || EdgeColor.x!=0.1 || EdgeColor.y!=0.2 || EdgeColor.z!=0.3
-      || Ambient!=0.45
-      || Diffuse!=0.451
-      || Specular!=0.4
-      || SpecularPower!=1.0
-      || Opacity!=1.0
-      || PointSize!=1.0
-      || LineWidth!=1.0
-      || LineStipplePattern!=0
-      || LineStippleRepeatFactor!=1
-      || Interpolation!=1
-      || Representation!=2
-      || EdgeVisibility!=0
-      || BackfaceCulling!=0
-      || FrontfaceCulling!=0
-
-      )
-    {
-    color = float4( 1.0, 0.0, 0.0, 1.0 );
-    }
-  else
-    {
-    color = (0.5 + diffuse) * col;
-    color.a = 1.0;
-    }
-
-}
diff --git a/Utilities/MaterialLibrary/Repository/TestCg/VtkPropertyTest.xml b/Utilities/MaterialLibrary/Repository/TestCg/VtkPropertyTest.xml
deleted file mode 100644
index 9ea4421..0000000
--- a/Utilities/MaterialLibrary/Repository/TestCg/VtkPropertyTest.xml
+++ /dev/null
@@ -1,76 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material name="Mat1" NumberOfProperties="1" NumberOfVertexShaders="1" NumberOfFragmentShaders="1">>
-
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="0.75 0.751 0.752"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="0.61 0.62 0.006"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="0.001 0.002 0.61"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="0.1 0.2 0.3"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="0.45"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="0.451"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="0.4"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineStipplePattern" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-
-
-  <Shader scope="Vertex" name="Vertex" location="TestCg/Vertex.cg" language="Cg" entry="main" args="-DVERTEX_PROGRAM">
-    <MatrixUniform name="ModelViewProj" type="State" number_of_elements="2" value="CG_GL_MODELVIEW_PROJECTION_MATRIX CG_GL_MATRIX_IDENTITY"> </MatrixUniform>
-    <MatrixUniform name="ModelViewIT" type="State" number_of_elements="2" value="CG_GL_MODELVIEW_MATRIX CG_GL_MATRIX_INVERSE_TRANSPOSE"> </MatrixUniform>
-  </Shader>
-
-
-
-  <Shader scope="Fragment" name="TestVtkPropertyFrag" location="TestCg/VtkPropertyFragmentTest.cg" language="Cg" entry="fragment_program" args="-DFRAGMENT_PROGRAM">
-
-    <!-- these values are for rendering
-    <PropertyUniform  value="AmbientColor" name="SurfaceColor"> </PropertyUniform>
-    <PropertyUniform  value="DiffuseColor" name="WarmColor"> </PropertyUniform>
-    <PropertyUniform  value="SpecularColor" name="CoolColor"> </PropertyUniform>
-    <PropertyUniform  value="Ambient" name="DiffuseWarm"> </PropertyUniform>
-    <PropertyUniform  value="Diffuse" name="DiffuseCool"> </PropertyUniform>
-    -->
-
-    <LightUniform  value="Position" name="lightPosition"> </LightUniform>
-    <LightUniform  value="FocalPoint" name="lightFocalPoint"> </LightUniform>
-
-    <CameraUniform  value="Position" name="cameraPosition"> </CameraUniform>
-    <CameraUniform  value="FocalPoint" name="cameraFocalPoint"> </CameraUniform>
-
-    <!-- these values are for testing -->
-    <PropertyUniform  value="AmbientColor" name="AmbientColor" > </PropertyUniform>
-    <PropertyUniform  value="DiffuseColor" name="DiffuseColor" > </PropertyUniform>
-    <PropertyUniform  value="SpecularColor" name="SpecularColor" > </PropertyUniform>
-    <PropertyUniform  value="EdgeColor" name="EdgeColor" > </PropertyUniform>
-
-    <PropertyUniform  value="Ambient" name="Ambient" > </PropertyUniform>
-    <PropertyUniform  value="Diffuse" name="Diffuse" > </PropertyUniform>
-    <PropertyUniform  value="Specular" name="Specular" > </PropertyUniform>
-    <PropertyUniform  value="SpecularPower" name="SpecularPower" > </PropertyUniform>
-
-    <PropertyUniform  value="Opacity" name="Opacity" > </PropertyUniform>
-
-    <PropertyUniform  value="PointSize" name="PointSize" > </PropertyUniform>
-    <PropertyUniform  value="LineWidth" name="LineWidth" > </PropertyUniform>
-    <PropertyUniform  value="LineStipplePattern" name="LineStipplePattern" > </PropertyUniform>
-    <PropertyUniform  value="LineStippleRepeatFactor" name="LineStippleRepeatFactor" > </PropertyUniform>
-    <PropertyUniform  value="Interpolation" name="Interpolation" > </PropertyUniform>
-    <PropertyUniform  value="Representation" name="Representation" > </PropertyUniform>
-    <PropertyUniform  value="EdgeVisibility" name="EdgeVisibility" > </PropertyUniform>
-    <PropertyUniform  value="BackfaceCulling" name="BackfaceCulling" > </PropertyUniform>
-    <PropertyUniform  value="FrontfaceCulling" name="FrontfaceCulling" > </PropertyUniform>
-
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/TestGLSL/AppVarTestFragment.glsl b/Utilities/MaterialLibrary/Repository/TestGLSL/AppVarTestFragment.glsl
deleted file mode 100644
index 365c707..0000000
--- a/Utilities/MaterialLibrary/Repository/TestGLSL/AppVarTestFragment.glsl
+++ /dev/null
@@ -1,110 +0,0 @@
-//
-// Begin "3Dlabs-License.txt"
-//
-// Copyright (C) 2002-2005  3Dlabs Inc. Ltd.
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions
-// are met:
-//
-//     Redistributions of source code must retain the above copyright
-//     notice, this list of conditions and the following disclaimer.
-//
-//     Redistributions in binary form must reproduce the above
-//     copyright notice, this list of conditions and the following
-//     disclaimer in the documentation and/or other materials provided
-//     with the distribution.
-//
-//     Neither the name of 3Dlabs Inc. Ltd. nor the names of its
-//     contributors may be used to endorse or promote products derived
-//     from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
-// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
-// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
-// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-// POSSIBILITY OF SUCH DAMAGE.
-//
-//
-// End "3Dlabs-License.txt"
-
-
-
-uniform vec3 Color;
-uniform vec3 AmbientColor;
-uniform vec3 DiffuseColor;
-uniform vec3 SpecularColor;
-uniform vec3 EdgeColor;
-
-uniform float Ambient;
-uniform float Diffuse;
-uniform float Specular;
-uniform float SpecularPower;
-uniform float Opacity;
-
-uniform float PointSize;
-uniform float LineWidth;
-
-uniform int LineStipplePattern;
-uniform int LineStippleRepeatFactor;
-uniform int Interpolation;
-uniform int Representation;
-uniform int EdgeVisibility;
-uniform int BackfaceCulling;
-uniform int FrontfaceCulling;
-
-
-uniform vec3  SurfaceColor; // (0.75, 0.75, 0.75)
-uniform vec3  WarmColor;    // (0.6, 0.6, 0.0)
-uniform vec3  CoolColor;    // (0.0, 0.0, 0.6)
-uniform float DiffuseWarm;  // 0.45
-uniform float DiffuseCool;  // 0.45
-
-varying float NdotL;
-varying vec3  ReflectVec;
-varying vec3  ViewVec;
-
-uniform vec4 appVara;
-uniform vec4 appVarb;
-uniform vec4 appVarc;
-uniform vec4 appVard;
-uniform vec4 appVare;
-uniform vec4 appVarf;
-uniform vec4 appVarg;
-
-void main (void)
-{
-    vec3 kcool    = min(CoolColor + DiffuseCool * SurfaceColor, 1.0);
-    vec3 kwarm    = min(WarmColor + DiffuseWarm * SurfaceColor, 1.0);
-    vec3 kfinal   = mix(kcool, kwarm, NdotL);
-
-    vec3 nreflect = normalize(ReflectVec);
-    vec3 nview    = normalize(ViewVec);
-
-    float spec    = max(dot(nreflect, nview), 0.0);
-    spec          = pow(spec, 32.0);
-
-    gl_FragColor = vec4 (min(kfinal + spec, 1.0), 1.0);
-
-
-    if( 0
-        || appVara.x != 0.37714 || appVara.y != 0.61465 || appVara.z != 0.48399 || appVara.w != 0.68252
-        || appVarb.x != 0.03900 || appVarb.y != 0.15857 || appVarb.z != 0.57913 || appVarb.w != 0.54458
-        || appVarc.x != 0.97061 || appVarc.y != 0.86053 || appVarc.z != 0.63583 || appVarc.w != 0.51058
-        || appVard.x != 0.12885 || appVard.y != 0.91490 || appVard.z != 0.86394 || appVard.w != 0.58951
-        || appVare.x != 0.23403 || appVare.y != 0.35340 || appVare.z != 0.52559 || appVare.w != 0.77830
-        || appVarf.x != 0.19550 || appVarf.y != 0.17429 || appVarf.z != 0.89958 || appVarf.w != 0.15063
-        || appVarg.x != 0.75796 || appVarg.y != 0.48072 || appVarg.z != 0.07728 || appVarg.w != 0.16434
-      )
-      {
-      gl_FragColor = vec4 (1.0, 0.0, 0.0, 1.0);
-      }
-}
diff --git a/Utilities/MaterialLibrary/Repository/TestGLSL/AppVarTestFragment.xml b/Utilities/MaterialLibrary/Repository/TestGLSL/AppVarTestFragment.xml
deleted file mode 100644
index b2c8e47..0000000
--- a/Utilities/MaterialLibrary/Repository/TestGLSL/AppVarTestFragment.xml
+++ /dev/null
@@ -1,49 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material name="Mat1" NumberOfProperties="1" NumberOfVertexShaders="1" NumberOfFragmentShaders="1">>
-
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="0.75 0.751 0.752"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="0.61 0.62 0.006"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="0.001 0.002 0.61"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="0.1 0.2 0.3"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="0.45"> </Member>
-    <Member name="Diffuse" number_of_elements="1" type="Double" value="0.451"> </Member>
-    <Member name="Specular" number_of_elements="1" type="Double" value="0.4"> </Member>
-    <Member name="SpecularPower" number_of_elements="1" type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1" type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1" type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1" type="Float" value="1.0"> </Member>
-    <Member name="LineStipplePattern" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1" type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1" type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1" type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1" type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1" type="Int" value="0"> </Member>
-  </Property>
-
-  <Shader scope="Vertex" name="TestGLSLScalarVectorVert" location="TestGLSL/Vertex.glsl" language="GLSL" entry="main" args="-DVERTEX_PROGRAM">
-    <LightUniform  value="Position" name="LightPosition"> </LightUniform>
-  </Shader>
-
-  <Shader scope="Fragment" name="TestAppVarFrag" location="TestGLSL/AppVarTestFragment.glsl" language="GLSL" entry="main" args="-DFRAGMENT_PROGRAM">
-    <!-- these values are for rendering -->
-    <PropertyUniform  value="AmbientColor" name="SurfaceColor"> </PropertyUniform>
-    <PropertyUniform  value="DiffuseColor" name="WarmColor"> </PropertyUniform>
-    <PropertyUniform  value="SpecularColor" name="CoolColor"> </PropertyUniform>
-    <PropertyUniform  value="Ambient" name="DiffuseWarm"> </PropertyUniform>
-    <PropertyUniform  value="Diffuse" name="DiffuseCool"> </PropertyUniform>
-
-    <!-- these values are for testing -->
-    <ApplicationUniform value="appVar1" name="appVara"> </ApplicationUniform>
-    <ApplicationUniform value="appVar2" name="appVarb"> </ApplicationUniform>
-    <ApplicationUniform value="appVar3" name="appVarc"> </ApplicationUniform>
-    <ApplicationUniform value="appVar4" name="appVard"> </ApplicationUniform>
-    <ApplicationUniform value="appVar5" name="appVare"> </ApplicationUniform>
-    <ApplicationUniform value="appVar6" name="appVarf"> </ApplicationUniform>
-    <ApplicationUniform value="appVar7" name="appVarg"> </ApplicationUniform>
-
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/TestGLSL/MatrixTestFragment.glsl b/Utilities/MaterialLibrary/Repository/TestGLSL/MatrixTestFragment.glsl
deleted file mode 100644
index f6e2fa6..0000000
--- a/Utilities/MaterialLibrary/Repository/TestGLSL/MatrixTestFragment.glsl
+++ /dev/null
@@ -1,86 +0,0 @@
-//
-// Begin "3Dlabs-License.txt"
-//
-// Copyright (C) 2002-2005  3Dlabs Inc. Ltd.
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions
-// are met:
-//
-//     Redistributions of source code must retain the above copyright
-//     notice, this list of conditions and the following disclaimer.
-//
-//     Redistributions in binary form must reproduce the above
-//     copyright notice, this list of conditions and the following
-//     disclaimer in the documentation and/or other materials provided
-//     with the distribution.
-//
-//     Neither the name of 3Dlabs Inc. Ltd. nor the names of its
-//     contributors may be used to endorse or promote products derived
-//     from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
-// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
-// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
-// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-// POSSIBILITY OF SUCH DAMAGE.
-//
-//
-// End "3Dlabs-License.txt"
-
-uniform mat2 testMat2;
-uniform mat3 testMat3;
-uniform mat4 testMat4;
-
-
-
-
-uniform vec3  SurfaceColor; // (0.75, 0.75, 0.75)
-uniform vec3  WarmColor;    // (0.6, 0.6, 0.0)
-uniform vec3  CoolColor;    // (0.0, 0.0, 0.6)
-uniform float DiffuseWarm;  // 0.45
-uniform float DiffuseCool;  // 0.45
-
-varying float NdotL;
-varying vec3  ReflectVec;
-varying vec3  ViewVec;
-
-void main (void)
-{
-    vec3 kcool    = min(CoolColor + DiffuseCool * SurfaceColor, 1.0);
-    vec3 kwarm    = min(WarmColor + DiffuseWarm * SurfaceColor, 1.0);
-    vec3 kfinal   = mix(kcool, kwarm, NdotL);
-
-    vec3 nreflect = normalize(ReflectVec);
-    vec3 nview    = normalize(ViewVec);
-
-    float spec    = max(dot(nreflect, nview), 0.0);
-    spec          = pow(spec, 32.0);
-
-    gl_FragColor = vec4 (min(kfinal + spec, 1.0), 1.0);
-
-    if( 0
-      || testMat2[0][0]!=44.38361 || testMat2[0][1]!=58.62439
-      || testMat2[1][0]!=22.02428 || testMat2[1][1]!=97.35272
-
-      || testMat3[0][0]!= 5.91774 || testMat3[0][1]!=25.49759 || testMat3[0][2]!=50.20272
-      || testMat3[1][0]!= 6.45461 || testMat3[1][1]!=42.84395 || testMat3[1][2]!=11.17144
-      || testMat3[2][0]!= 8.02892 || testMat3[2][1]!=29.76296 || testMat3[2][2]!= 1.92514
-
-      || testMat4[0][0]!=46.22906 || testMat4[0][1]!=11.80764 || testMat4[0][2]!= 5.07503 || testMat4[0][3]!=46.32990
-      || testMat4[1][0]!=39.79442 || testMat4[1][1]!=81.58471 || testMat4[1][2]!=52.86966 || testMat4[1][3]!=95.58122
-      || testMat4[2][0]!=35.94935 || testMat4[2][1]!=56.07540 || testMat4[2][2]!=81.56149 || testMat4[2][3]!=63.69266
-      || testMat4[3][0]!=28.87369 || testMat4[3][1]!=52.99193 || testMat4[3][2]!=69.44439 || testMat4[3][3]!=94.62996
-    )
-      {
-      gl_FragColor = vec4 (1.0, 0.0, 0.0, 1.0);
-      }
-}
diff --git a/Utilities/MaterialLibrary/Repository/TestGLSL/MatrixTestFragment.xml b/Utilities/MaterialLibrary/Repository/TestGLSL/MatrixTestFragment.xml
deleted file mode 100644
index 7e39afa..0000000
--- a/Utilities/MaterialLibrary/Repository/TestGLSL/MatrixTestFragment.xml
+++ /dev/null
@@ -1,55 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material name="Mat1" NumberOfProperties="1" NumberOfVertexShaders="1" NumberOfFragmentShaders="1">>
-
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="0.75 0.751 0.752"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="0.61 0.62 0.006"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="0.001 0.002 0.61"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="0.1 0.2 0.3"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="0.45"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="0.451"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="0.4"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineStipplePattern" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-  <Shader scope="Vertex" name="TestGLSLMatrixVert" location="TestGLSL/Vertex.glsl" language="GLSL" entry="main" args="-DVERTEX_PROGRAM">
-    <LightUniform  value="Position" name="LightPosition"> </LightUniform>
-  </Shader>
-
-  <Shader scope="Fragment" name="TestGLSLMatrixFrag" location="TestGLSL/MatrixTestFragment.glsl" language="GLSL" entry="main" args="-DFRAGMENT_PROGRAM">
-    <!-- these values are for rendering -->
-    <PropertyUniform  value="AmbientColor" name="SurfaceColor"> </PropertyUniform>
-    <PropertyUniform  value="DiffuseColor" name="WarmColor"> </PropertyUniform>
-    <PropertyUniform  value="SpecularColor" name="CoolColor"> </PropertyUniform>
-    <PropertyUniform  value="Ambient" name="DiffuseWarm"> </PropertyUniform>
-    <PropertyUniform  value="Diffuse" name="DiffuseCool"> </PropertyUniform>
-
-    <!-- these values are for testing -->
-    <MatrixUniform type="mat2" name="testMat2" number_of_elements="4" order="ColumnMajor"
-      value="44.38361 58.62439
-             22.02428 97.35272"> </MatrixUniform>
-
-    <MatrixUniform type="mat3" name="testMat3" number_of_elements="9" order="ColumnMajor"
-      value="5.91774 25.49759 50.20272
-             6.45461 42.84395 11.17144
-             8.02892 29.76296  1.92514"> </MatrixUniform>
-
-    <MatrixUniform type="mat4" name="testMat4" number_of_elements="16" order="ColumnMajor"
-      value="46.22906 11.80764  5.07503 46.32990
-             39.79442 81.58471 52.86966 95.58122
-             35.94935 56.07540 81.56149 63.69266
-             28.87369 52.99193 69.44439 94.62996"> </MatrixUniform>
-    <!-- end testing values -->
-  </Shader>
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/TestGLSL/ScalarVectorTestFragment.glsl b/Utilities/MaterialLibrary/Repository/TestGLSL/ScalarVectorTestFragment.glsl
deleted file mode 100644
index e22017f..0000000
--- a/Utilities/MaterialLibrary/Repository/TestGLSL/ScalarVectorTestFragment.glsl
+++ /dev/null
@@ -1,120 +0,0 @@
-//
-// Begin "3Dlabs-License.txt"
-//
-// Copyright (C) 2002-2005  3Dlabs Inc. Ltd.
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions
-// are met:
-//
-//     Redistributions of source code must retain the above copyright
-//     notice, this list of conditions and the following disclaimer.
-//
-//     Redistributions in binary form must reproduce the above
-//     copyright notice, this list of conditions and the following
-//     disclaimer in the documentation and/or other materials provided
-//     with the distribution.
-//
-//     Neither the name of 3Dlabs Inc. Ltd. nor the names of its
-//     contributors may be used to endorse or promote products derived
-//     from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
-// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
-// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
-// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-// POSSIBILITY OF SUCH DAMAGE.
-//
-//
-// End "3Dlabs-License.txt"
-
-uniform float testFloat;
-uniform vec2  testVec2;
-uniform vec3  testVec3;
-uniform vec4  testVec4;
-
-uniform int   testInt;
-uniform ivec2  testIVec2;
-uniform ivec3  testIVec3;
-uniform ivec4  testIVec4;
-
-uniform mat2 testMat2;
-uniform mat3 testMat3;
-uniform mat4 testMat4;
-
-struct tStruct {
-  float f;
-  vec2 f2;
-  vec3 f3;
-  vec4 f4;
-};
-
-uniform tStruct tStruct2;
-
-
-uniform vec3  SurfaceColor; // (0.75, 0.75, 0.75)
-uniform vec3  WarmColor;    // (0.6, 0.6, 0.0)
-uniform vec3  CoolColor;    // (0.0, 0.0, 0.6)
-uniform float DiffuseWarm;  // 0.45
-uniform float DiffuseCool;  // 0.45
-
-varying float NdotL;
-varying vec3  ReflectVec;
-varying vec3  ViewVec;
-
-void main (void)
-{
-    vec3 kcool    = min(CoolColor + DiffuseCool * SurfaceColor, 1.0);
-    vec3 kwarm    = min(WarmColor + DiffuseWarm * SurfaceColor, 1.0);
-    vec3 kfinal   = mix(kcool, kwarm, NdotL);
-
-    vec3 nreflect = normalize(ReflectVec);
-    vec3 nview    = normalize(ViewVec);
-
-    float spec    = max(dot(nreflect, nview), 0.0);
-    spec          = pow(spec, 32.0);
-
-    gl_FragColor = vec4 (min(kfinal + spec, 1.0), 1.0);
-
-
-    if( 0
-      || testFloat!=2.63749
-      || testVec2.x!=76.95621 || testVec2.y!= 50.41138
-      || testVec3.x!=76.96096 || testVec3.y!=63.27260 || testVec3.z!= 18.99907
-      || testVec4.x!=3.40330 || testVec4.y!=93.71665 || testVec4.z!=3.25358 || testVec4.w!=53.86765
-
-      || testInt!=55
-      || testIVec2.x!=33 || testIVec2.y!=14
-      || testIVec3.x!=13 || testIVec3.y!=97 || testIVec3.z!=86
-      || testIVec4.x!=20 || testIVec4.y!=76 || testIVec4.z!=36 || testIVec4.w!=57
-
-      || testMat2[0][0]!=47.50016 || testMat2[0][1]!=77.17215
-      || testMat2[1][0]!=93.53756 || testMat2[1][1]!=26.49386
-
-      || testMat3[0][0]!=20.76451 || testMat3[0][1]!=68.89935 || testMat3[0][2]!= 1.55911
-      || testMat3[1][0]!=61.52196 || testMat3[1][1]!=24.00890 || testMat3[1][2]!= 20.14159
-      || testMat3[2][0]!=12.79913 || testMat3[2][1]!=41.43690 || testMat3[2][2]!= 43.70222
-
-      || testMat4[0][0]!=78.48394 || testMat4[0][1]!=18.00379 || testMat4[0][2]!=58.96785 || testMat4[0][3]!= 39.45659
-      || testMat4[1][0]!=46.36133 || testMat4[1][1]!=20.19386 || testMat4[1][2]!=61.52903 || testMat4[1][3]!= 91.34887
-      || testMat4[2][0]!=35.78233 || testMat4[2][1]!=28.60134 || testMat4[2][2]!=23.23688 || testMat4[2][3]!=  4.85008
-      || testMat4[3][0]!=94.90575 || testMat4[3][1]!=43.35341 || testMat4[3][2]!=28.75032 || testMat4[3][3]!= 64.18256
-
-      || tStruct2.f!=80.79816
-      || tStruct2.f2.x!=55.42347 || tStruct2.f2.y!=84.07679
-      || tStruct2.f3.x!=93.45425 || tStruct2.f3.y!=17.00968 || tStruct2.f3.z!=77.06349
-      || tStruct2.f4.x!=44.63571 || tStruct2.f4.y!=62.42333 || tStruct2.f4.z!=86.14692 || tStruct2.f4.w!=1.50092
-
-    )
-      {
-      gl_FragColor = vec4 (1.0, 0.0, 0.0, 1.0);
-      }
-}
diff --git a/Utilities/MaterialLibrary/Repository/TestGLSL/ScalarVectorTestFragment.xml b/Utilities/MaterialLibrary/Repository/TestGLSL/ScalarVectorTestFragment.xml
deleted file mode 100644
index 0488656..0000000
--- a/Utilities/MaterialLibrary/Repository/TestGLSL/ScalarVectorTestFragment.xml
+++ /dev/null
@@ -1,78 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material name="Mat1" NumberOfProperties="1" NumberOfVertexShaders="1" NumberOfFragmentShaders="1">>
-
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="0.75 0.751 0.752"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="0.61 0.62 0.006"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="0.001 0.002 0.61"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="0.1 0.2 0.3"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="0.45"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="0.451"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="0.4"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineStipplePattern" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-  <Shader scope="Vertex" name="TestGLSLScalarVectorVert" location="TestGLSL/Vertex.glsl" language="GLSL" entry="main" args="-DVERTEX_PROGRAM">
-    <LightUniform  value="Position" name="LightPosition"> </LightUniform>
-  </Shader>
-
-
-
-  <Shader scope="Fragment" name="TestGLSLScalarVectorFrag" location="TestGLSL/ScalarVectorTestFragment.glsl" language="GLSL" entry="main" args="-DFRAGMENT_PROGRAM">
-
-    <!-- these values are for rendering -->
-    <PropertyUniform  value="AmbientColor"  name="SurfaceColor"> </PropertyUniform>
-    <PropertyUniform  value="DiffuseColor"  name="WarmColor"> </PropertyUniform>
-    <PropertyUniform  value="SpecularColor" name="CoolColor"> </PropertyUniform>
-    <PropertyUniform  value="Ambient"       name="DiffuseWarm"> </PropertyUniform>
-    <PropertyUniform  value="Diffuse"       name="DiffuseCool"> </PropertyUniform>
-
-    <!-- these values are for testing -->
-    <Uniform type="float" name="testFloat" number_of_elements="1" value="2.63749"> </Uniform>
-
-    <Uniform type="vec2" name="testVec2" number_of_elements="2" value="76.95621 50.41138"> </Uniform>
-    <Uniform type="vec3" name="testVec3" number_of_elements="3" value="76.96096 63.27260 18.99907"> </Uniform>
-    <Uniform type="vec4" name="testVec4" number_of_elements="4" value=" 3.40330 93.71665  3.25358 53.86765"> </Uniform>
-
-    <Uniform type="int"   name="testInt"   number_of_elements="1" value="55"> </Uniform>
-    <Uniform type="ivec2" name="testIVec2" number_of_elements="2" value="33 14"> </Uniform>
-    <Uniform type="ivec3" name="testIVec3" number_of_elements="3" value="13 97 86"> </Uniform>
-    <Uniform type="ivec4" name="testIVec4" number_of_elements="4" value="20 76 36 57"> </Uniform>
-
-    <MatrixUniform type="mat2" name="testMat2" number_of_elements="4" order="ColumnMajor"
-      value="47.50016 77.17215
-             93.53756 26.49386"> </MatrixUniform>
-
-    <MatrixUniform type="mat3" name="testMat3" number_of_elements="9" order="ColumnMajor"
-      value="20.76451 68.89935  1.55911
-             61.52196 24.00890 20.14159
-             12.79913 41.43690 43.70222"> </MatrixUniform>
-
-    <MatrixUniform type="mat4" name="testMat4" number_of_elements="16" order="ColumnMajor"
-      value="78.48394 18.00379 58.96785 39.45659
-             46.36133 20.19386 61.52903 91.34887
-             35.78233 28.60134 23.23688  4.85008
-             94.90575 43.35341 28.75032 64.18256"> </MatrixUniform>
-
-    <Uniform type="float" name="tStruct2.f"  number_of_elements="1" value="80.79816"> </Uniform>
-    <Uniform type="vec2"  name="tStruct2.f2" number_of_elements="2" value="55.42347 84.07679"> </Uniform>
-    <Uniform type="vec3"  name="tStruct2.f3" number_of_elements="3" value="93.45425 17.00968 77.06349"> </Uniform>
-    <Uniform type="vec4"  name="tStruct2.f4" number_of_elements="4" value="44.63571 62.42333 86.14692 1.50092"> </Uniform>
-    <!-- end testing values -->
-
-
-
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/TestGLSL/Vertex.glsl b/Utilities/MaterialLibrary/Repository/TestGLSL/Vertex.glsl
deleted file mode 100644
index 69feef9..0000000
--- a/Utilities/MaterialLibrary/Repository/TestGLSL/Vertex.glsl
+++ /dev/null
@@ -1,62 +0,0 @@
-//
-// Begin "3Dlabs-License.txt"
-//
-// Copyright (C) 2002-2005  3Dlabs Inc. Ltd.
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions
-// are met:
-//
-//     Redistributions of source code must retain the above copyright
-//     notice, this list of conditions and the following disclaimer.
-//
-//     Redistributions in binary form must reproduce the above
-//     copyright notice, this list of conditions and the following
-//     disclaimer in the documentation and/or other materials provided
-//     with the distribution.
-//
-//     Neither the name of 3Dlabs Inc. Ltd. nor the names of its
-//     contributors may be used to endorse or promote products derived
-//     from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
-// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
-// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
-// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-// POSSIBILITY OF SUCH DAMAGE.
-//
-//
-// End "3Dlabs-License.txt"
-
-uniform vec3  LightPosition;  // (0.0, 10.0, 4.0)
-
-varying float NdotL;
-varying vec3  ReflectVec;
-varying vec3  ViewVec;
-
-struct light
-{
-  uniform vec3 position;
-  uniform vec3 color;
-};
-
-void main(void)
-{
-  uniform light l1;
-  uniform light l2;
-    vec3 ecPos      = vec3 (gl_ModelViewMatrix * gl_Vertex);
-    vec3 tnorm      = normalize(gl_NormalMatrix * gl_Normal);
-    vec3 lightVec   = normalize(LightPosition - ecPos);
-    ReflectVec      = normalize(reflect(-lightVec, tnorm));
-    ViewVec         = normalize(-ecPos);
-    NdotL           = (dot(lightVec, tnorm) + 1.0) * 0.5;
-    gl_Position     = ftransform();
-}
diff --git a/Utilities/MaterialLibrary/Repository/TestGLSL/VtkPropertyTestFragment.glsl b/Utilities/MaterialLibrary/Repository/TestGLSL/VtkPropertyTestFragment.glsl
deleted file mode 100644
index 1bf55d2..0000000
--- a/Utilities/MaterialLibrary/Repository/TestGLSL/VtkPropertyTestFragment.glsl
+++ /dev/null
@@ -1,113 +0,0 @@
-//
-// Begin "3Dlabs-License.txt"
-//
-// Copyright (C) 2002-2005  3Dlabs Inc. Ltd.
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions
-// are met:
-//
-//     Redistributions of source code must retain the above copyright
-//     notice, this list of conditions and the following disclaimer.
-//
-//     Redistributions in binary form must reproduce the above
-//     copyright notice, this list of conditions and the following
-//     disclaimer in the documentation and/or other materials provided
-//     with the distribution.
-//
-//     Neither the name of 3Dlabs Inc. Ltd. nor the names of its
-//     contributors may be used to endorse or promote products derived
-//     from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
-// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
-// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
-// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-// POSSIBILITY OF SUCH DAMAGE.
-//
-//
-// End "3Dlabs-License.txt"
-
-
-
-uniform vec3 Color;
-uniform vec3 AmbientColor;
-uniform vec3 DiffuseColor;
-uniform vec3 SpecularColor;
-uniform vec3 EdgeColor;
-
-uniform float Ambient;
-uniform float Diffuse;
-uniform float Specular;
-uniform float SpecularPower;
-uniform float Opacity;
-
-uniform float PointSize;
-uniform float LineWidth;
-
-uniform int LineStipplePattern;
-uniform int LineStippleRepeatFactor;
-uniform int Interpolation;
-uniform int Representation;
-uniform int EdgeVisibility;
-uniform int BackfaceCulling;
-uniform int FrontfaceCulling;
-
-
-uniform vec3  SurfaceColor; // (0.75, 0.75, 0.75)
-uniform vec3  WarmColor;    // (0.6, 0.6, 0.0)
-uniform vec3  CoolColor;    // (0.0, 0.0, 0.6)
-uniform float DiffuseWarm;  // 0.45
-uniform float DiffuseCool;  // 0.45
-
-varying float NdotL;
-varying vec3  ReflectVec;
-varying vec3  ViewVec;
-
-void main (void)
-{
-    vec3 kcool    = min(CoolColor + DiffuseCool * SurfaceColor, 1.0);
-    vec3 kwarm    = min(WarmColor + DiffuseWarm * SurfaceColor, 1.0);
-    vec3 kfinal   = mix(kcool, kwarm, NdotL);
-
-    vec3 nreflect = normalize(ReflectVec);
-    vec3 nview    = normalize(ViewVec);
-
-    float spec    = max(dot(nreflect, nview), 0.0);
-    spec          = pow(spec, 32.0);
-
-    gl_FragColor = vec4 (min(kfinal + spec, 1.0), 1.0);
-
-
-    if( 0
-      || AmbientColor.x!=0.75 || AmbientColor.y!=0.751 || AmbientColor.z!=0.752
-      || DiffuseColor.x!=0.61 || DiffuseColor.y!=0.62 || DiffuseColor.z!=0.006
-      || SpecularColor.x!=0.001 || SpecularColor.y!=0.002 || SpecularColor.z!=0.61
-      || EdgeColor.x!=0.1 || EdgeColor.y!=0.2 || EdgeColor.z!=0.3
-      || Ambient!=0.45
-      || Diffuse!=0.451
-      || Specular!=0.4
-      || SpecularPower!=1.0
-      || Opacity!=1.0
-      || PointSize!=1.0
-      || LineWidth!=1.0
-      || LineStipplePattern!=0
-      || LineStippleRepeatFactor!=1
-      || Interpolation!=1
-      || Representation!=2
-      || EdgeVisibility!=0
-      || BackfaceCulling!=0
-      || FrontfaceCulling!=0
-    )
-      {
-      gl_FragColor = vec4 (1.0, 0.0, 0.0, 1.0);
-      }
-}
diff --git a/Utilities/MaterialLibrary/Repository/TestGLSL/VtkPropertyTestFragment.xml b/Utilities/MaterialLibrary/Repository/TestGLSL/VtkPropertyTestFragment.xml
deleted file mode 100644
index e8c1455..0000000
--- a/Utilities/MaterialLibrary/Repository/TestGLSL/VtkPropertyTestFragment.xml
+++ /dev/null
@@ -1,66 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material name="Mat1" NumberOfProperties="1" NumberOfVertexShaders="1" NumberOfFragmentShaders="1">>
-
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="0.75 0.751 0.752"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="0.61 0.62 0.006"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="0.001 0.002 0.61"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="0.1 0.2 0.3"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="0.45"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="0.451"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="0.4"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineStipplePattern" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-  <Shader scope="Vertex" name="TestGLSLScalarVectorVert" location="TestGLSL/Vertex.glsl" language="GLSL" entry="main" args="-DVERTEX_PROGRAM">
-    <LightUniform  value="Position" name="LightPosition"> </LightUniform>
-  </Shader>
-
-
-
-  <Shader scope="Fragment" name="TestVtkPropertyFrag" location="TestGLSL/VtkPropertyTestFragment.glsl" language="GLSL" entry="main" args="-DFRAGMENT_PROGRAM">
-
-    <!-- these values are for rendering -->
-    <PropertyUniform  value="AmbientColor" name="SurfaceColor"> </PropertyUniform>
-    <PropertyUniform  value="DiffuseColor" name="WarmColor"> </PropertyUniform>
-    <PropertyUniform  value="SpecularColor" name="CoolColor"> </PropertyUniform>
-    <PropertyUniform  value="Ambient" name="DiffuseWarm"> </PropertyUniform>
-    <PropertyUniform  value="Diffuse" name="DiffuseCool"> </PropertyUniform>
-
-    <!-- these values are for testing -->
-    <PropertyUniform  value="AmbientColor" name="AmbientColor" > </PropertyUniform>
-    <PropertyUniform  value="DiffuseColor" name="DiffuseColor" > </PropertyUniform>
-    <PropertyUniform  value="SpecularColor" name="SpecularColor" > </PropertyUniform>
-    <PropertyUniform  value="EdgeColor" name="EdgeColor" > </PropertyUniform>
-
-    <PropertyUniform  value="Ambient" name="Ambient" > </PropertyUniform>
-    <PropertyUniform  value="Diffuse" name="Diffuse" > </PropertyUniform>
-    <PropertyUniform  value="Specular" name="Specular" > </PropertyUniform>
-    <PropertyUniform  value="SpecularPower" name="SpecularPower" > </PropertyUniform>
-
-    <PropertyUniform  value="Opacity" name="Opacity" > </PropertyUniform>
-
-    <PropertyUniform  value="PointSize" name="PointSize" > </PropertyUniform>
-    <PropertyUniform  value="LineWidth" name="LineWidth" > </PropertyUniform>
-    <PropertyUniform  value="LineStipplePattern" name="LineStipplePattern" > </PropertyUniform>
-    <PropertyUniform  value="LineStippleRepeatFactor" name="LineStippleRepeatFactor" > </PropertyUniform>
-    <PropertyUniform  value="Interpolation" name="Interpolation" > </PropertyUniform>
-    <PropertyUniform  value="Representation" name="Representation" > </PropertyUniform>
-    <PropertyUniform  value="EdgeVisibility" name="EdgeVisibility" > </PropertyUniform>
-    <PropertyUniform  value="BackfaceCulling" name="BackfaceCulling" > </PropertyUniform>
-    <PropertyUniform  value="FrontfaceCulling" name="FrontfaceCulling" > </PropertyUniform>
-
-  </Shader>
-
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/TestGLSL/XFormsTestFragment.xml b/Utilities/MaterialLibrary/Repository/TestGLSL/XFormsTestFragment.xml
deleted file mode 100644
index 3399190..0000000
--- a/Utilities/MaterialLibrary/Repository/TestGLSL/XFormsTestFragment.xml
+++ /dev/null
@@ -1,48 +0,0 @@
-<?xml version="1.0" encoding="UTF-8"?>
-<Material name="Mat1" NumberOfProperties="1" NumberOfVertexShaders="1" NumberOfFragmentShaders="1">>
-
-  <Property name="Property1">
-    <Member name="AmbientColor" number_of_elements="3" type="Double" value="0.75 0.751 0.752"> </Member>
-    <Member name="DiffuseColor" number_of_elements="3" type="Double" value="0.61 0.62 0.006"> </Member>
-    <Member name="SpecularColor" number_of_elements="3" type="Double" value="0.001 0.002 0.61"> </Member>
-
-    <Member name="EdgeColor" number_of_elements="3" type="Double" value="0.1 0.2 0.3"> </Member>
-    <Member name="Ambient" number_of_elements="1" type="Double" value="0.45"> </Member>
-    <Member name="Diffuse" number_of_elements="1"  type="Double" value="0.451"> </Member>
-    <Member name="Specular" number_of_elements="1"  type="Double" value="0.4"> </Member>
-    <Member name="SpecularPower" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="Opacity" number_of_elements="1"  type="Double" value="1.0"> </Member>
-    <Member name="PointSize" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineWidth" number_of_elements="1"  type="Float" value="1.0"> </Member>
-    <Member name="LineStipplePattern" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="LineStippleRepeatFactor" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Interpolation" number_of_elements="1"  type="Int" value="1"> </Member>
-    <Member name="Representation" number_of_elements="1"  type="Int" value="2"> </Member>
-    <Member name="EdgeVisibility" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="BackfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-    <Member name="FrontfaceCulling" number_of_elements="1"  type="Int" value="0"> </Member>
-  </Property>
-
-  <Shader scope="Vertex" name="blah" location="TestGLSL/XFormsTestVertex.glsl" language="GLSL" entry="main" args="-DVERTEX_PROGRAM">
-    <LightUniform  value="Position" name="LightPosition"> </LightUniform>
-    <!-- these values are for testing -->
-           <!--
-    <MatrixUniform type="mat4" name="ModelView" number_of_elements="16" order="ColumnMajor"
-      value="1.0 0.0 0.0 0.0
-             0.0 1.0 0.0 0.0
-             0.0 0.0 1.0 0.0
-             0.0 0.0 0.1 0.0"> </MatrixUniform>
-    <MatrixUniform type="mat4" name="ModelViewProj" number_of_elements="16" order="ColumnMajor"
-      value="1.0 0.0 0.0 0.0
-             0.0 1.0 0.0 0.0
-             0.0 0.0 1.0 0.0
-             0.0 0.0 0.1 0.0"> </MatrixUniform>
-    <MatrixUniform type="mat4" name="Proj" number_of_elements="16" order="ColumnMajor"
-      value="1.0 0.0 0.0 0.0
-             0.0 1.0 0.0 0.0
-             0.0 0.0 1.0 0.0
-             0.0 0.0 0.1 0.0"> </MatrixUniform>
-           -->
-    <!-- end testing values -->
-  </Shader>
-</Material>
diff --git a/Utilities/MaterialLibrary/Repository/TestGLSL/XFormsTestVertex.glsl b/Utilities/MaterialLibrary/Repository/TestGLSL/XFormsTestVertex.glsl
deleted file mode 100644
index 0ded15a..0000000
--- a/Utilities/MaterialLibrary/Repository/TestGLSL/XFormsTestVertex.glsl
+++ /dev/null
@@ -1,77 +0,0 @@
-//
-// Begin "3Dlabs-License.txt"
-//
-// Copyright (C) 2002-2005  3Dlabs Inc. Ltd.
-// All rights reserved.
-//
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions
-// are met:
-//
-//     Redistributions of source code must retain the above copyright
-//     notice, this list of conditions and the following disclaimer.
-//
-//     Redistributions in binary form must reproduce the above
-//     copyright notice, this list of conditions and the following
-//     disclaimer in the documentation and/or other materials provided
-//     with the distribution.
-//
-//     Neither the name of 3Dlabs Inc. Ltd. nor the names of its
-//     contributors may be used to endorse or promote products derived
-//     from this software without specific prior written permission.
-//
-// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
-// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
-// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
-// FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
-// COPYRIGHT HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
-// INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
-// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
-// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
-// LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
-// ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
-// POSSIBILITY OF SUCH DAMAGE.
-//
-//
-// End "3Dlabs-License.txt"
-
-uniform vec3  LightPosition;  // (0.0, 10.0, 4.0)
-
-varying float NdotL;
-varying vec3  ReflectVec;
-varying vec3  ViewVec;
-
-uniform mat4 ModelView;
-//uniform mat4 ModelViewProj;
-//uniform mat4 Proj;
-
-struct light
-{
-  uniform vec3 position;
-  uniform vec3 color;
-};
-
-void main(void)
-{
-  uniform light l1;
-  uniform light l2;
-
-    ModelView = gl_ModelViewMatrix * 1.0;
-
-#if 0
-    vec3 ecPos      = vec3 (gl_ModelViewMatrix * gl_Vertex);
-#else
-    vec3 ecPos      = vec3 (ModelView * gl_Vertex);
-#endif
-    vec3 tnorm      = normalize(gl_NormalMatrix * gl_Normal);
-    vec3 lightVec   = normalize(LightPosition - ecPos);
-    ReflectVec      = normalize(reflect(-lightVec, tnorm));
-    ViewVec         = normalize(-ecPos);
-    NdotL           = (dot(lightVec, tnorm) + 1.0) * 0.5;
-    gl_Position     = ftransform();
-
-
-//    ModelViewProj = mul( gl_ModelViewProjectionMatrix, 1.0 );
-//    Proj = mul( gl_ProjectionMatrix, 1.0 );
-}
diff --git a/Utilities/MaterialLibrary/Repository/Textures/earth.ppm b/Utilities/MaterialLibrary/Repository/Textures/earth.ppm
deleted file mode 100644
index 0a41c2e..0000000
Binary files a/Utilities/MaterialLibrary/Repository/Textures/earth.ppm and /dev/null differ
diff --git a/Utilities/MaterialLibrary/module.cmake b/Utilities/MaterialLibrary/module.cmake
deleted file mode 100644
index 22f4e10..0000000
--- a/Utilities/MaterialLibrary/module.cmake
+++ /dev/null
@@ -1,5 +0,0 @@
-vtk_module(vtkUtilitiesMaterialLibrary
-  DEPENDS
-    vtksys
-  EXCLUDE_FROM_WRAPPING
-  )
diff --git a/Utilities/MaterialLibrary/vtk-forward.c.in b/Utilities/MaterialLibrary/vtk-forward.c.in
deleted file mode 100644
index cb8c6f6..0000000
--- a/Utilities/MaterialLibrary/vtk-forward.c.in
+++ /dev/null
@@ -1,29 +0,0 @@
-/*=========================================================================
-
-Program:   ParaView
-Module:    vtk-forward.c.in
-
-Copyright (c) Kitware, Inc.
-All rights reserved.
-See Copyright.txt or http://www.paraview.org/HTML/Copyright.html for details.
-
-This software is distributed WITHOUT ANY WARRANTY; without even
-the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-
-#define vtksys_SHARED_FORWARD_DIR_BUILD "@ML_FORWARD_DIR_BUILD@"
-#define vtksys_SHARED_FORWARD_PATH_BUILD @ML_FORWARD_PATH_BUILD@
-#define vtksys_SHARED_FORWARD_PATH_INSTALL @ML_FORWARD_PATH_INSTALL@
-#define vtksys_SHARED_FORWARD_EXE_BUILD "@ML_FORWARD_DIR_BUILD@/@ML_FORWARD_EXE@"
-#define vtksys_SHARED_FORWARD_EXE_INSTALL "@ML_FORWARD_DIR_INSTALL@/@ML_FORWARD_EXE@"
-#define vtksys_SHARED_FORWARD_OPTION_PRINT "--print"
-#define vtksys_SHARED_FORWARD_OPTION_LDD "--ldd"
-
-#include <vtksys/SharedForward.h>
-
-int main(int argc, char** argv)
-{
-  return vtksys_shared_forward_to_real(argc, argv);
-}
diff --git a/Utilities/MaterialLibrary/vtkMaterialLibraryMacro.h.in b/Utilities/MaterialLibrary/vtkMaterialLibraryMacro.h.in
deleted file mode 100644
index 5b33c56..0000000
--- a/Utilities/MaterialLibrary/vtkMaterialLibraryMacro.h.in
+++ /dev/null
@@ -1,23 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkMaterialLibraryMacro.h.in
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-
- at VTK_MATERIAL_LIBRARY_MACRO@
-
-
-// Null terminated pointers to all materials available.
-static const char* ListOfMaterialNames[] = {
-  @VTK_MATERIAL_LIBRARY_LIST@
-  0};
-
diff --git a/Utilities/MaterialLibrary/vtkMaterialXMLLibrary.h.forcebuild.in b/Utilities/MaterialLibrary/vtkMaterialXMLLibrary.h.forcebuild.in
deleted file mode 100644
index 73d55e1..0000000
--- a/Utilities/MaterialLibrary/vtkMaterialXMLLibrary.h.forcebuild.in
+++ /dev/null
@@ -1,5 +0,0 @@
-# This file is used to ensure that vtkMaterialXMLLibrary.h is re-generated when
-# the user changes the CMake options VTK_USE_CG_SHADERS or VTK_USE_GLSL_SHADERS.
-
-
- at MaterialXMLs@
diff --git a/Utilities/MaterialLibrary/vtkShaderCodeLibraryMacro.h.in b/Utilities/MaterialLibrary/vtkShaderCodeLibraryMacro.h.in
deleted file mode 100644
index 75d251c..0000000
--- a/Utilities/MaterialLibrary/vtkShaderCodeLibraryMacro.h.in
+++ /dev/null
@@ -1,21 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    vtkShaderCodeLibraryMacro.h.in
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-
- at VTK_SHADER_CODE_LIBRARY_MACRO@
-
-// Null terminated pointers to all shader code names.
-static const char* ListOfShaderNames[] = {
-  @VTK_SHADER_CODE_LIBRARY_LIST@
-  0};
diff --git a/Utilities/MetaIO/vtkmetaio/CMakeLists.txt b/Utilities/MetaIO/vtkmetaio/CMakeLists.txt
index 440c032..0698e03 100644
--- a/Utilities/MetaIO/vtkmetaio/CMakeLists.txt
+++ b/Utilities/MetaIO/vtkmetaio/CMakeLists.txt
@@ -14,12 +14,12 @@ if(METAIO_FOR_VTK)
   set(METAIO_NAMESPACE "vtkmetaio")
   set(METAIO_ZLIB_LIBRARIES ${VTK_ZLIB_LIBRARIES})
   set(METAIO_INSTALL_EXPORT_NAME ${VTK_INSTALL_EXPORT_NAME})
-  set(METAIO_INSTALL_RUNTIME_DIR ${VTK_INSTALL_BIN_DIR_CM24})
-  set(METAIO_INSTALL_LIBRARY_DIR ${VTK_INSTALL_LIB_DIR_CM24})
-  set(METAIO_INSTALL_ARCHIVE_DIR ${VTK_INSTALL_LIB_DIR_CM24})
+  set(METAIO_INSTALL_RUNTIME_DIR ${VTK_INSTALL_RUNTIME_DIR})
+  set(METAIO_INSTALL_LIBRARY_DIR ${VTK_INSTALL_LIBRARY_DIR})
+  set(METAIO_INSTALL_ARCHIVE_DIR ${VTK_INSTALL_ARCHIVE_DIR})
   set(METAIO_INSTALL_NO_LIBRARIES ${VTK_INSTALL_NO_LIBRARIES})
   set(METAIO_INSTALL_NO_DEVELOPMENT ${VTK_INSTALL_NO_DEVELOPMENT})
-  set(METAIO_INSTALL_INCLUDE_DIR ${VTK_INSTALL_INCLUDE_DIR_CM24}/vtkmetaio)
+  set(METAIO_INSTALL_INCLUDE_DIR ${VTK_INSTALL_INCLUDE_DIR}/vtkmetaio)
   set(METAIO_LIBRARY_PROPERTIES ${VTK_LIBRARY_PROPERTIES})
   if(BUILD_SHARED_LIBS)
     add_definitions(-Dvtkmetaio_BUILD_SHARED_LIBS)
@@ -117,7 +117,7 @@ IF(WIN32)
 ENDIF(WIN32)
 
 
-#SUBDIRS(tests)
+#ADD_SUBDIRECTORY(tests)
 
 if(NOT METAIO_INSTALL_NO_LIBRARIES)
   install(TARGETS ${METAIO_NAMESPACE}
diff --git a/Utilities/MetaIO/vtkmetaio/metaForm.cxx b/Utilities/MetaIO/vtkmetaio/metaForm.cxx
index d36b6b7..43169ac 100644
--- a/Utilities/MetaIO/vtkmetaio/metaForm.cxx
+++ b/Utilities/MetaIO/vtkmetaio/metaForm.cxx
@@ -578,16 +578,6 @@ GetUserField(const char* _name)
   return NULL;
   }
 
-bool MetaForm::
-AddUserField(const char* _fieldName, MET_ValueEnumType _type, int _length,
-             bool _required, int _dependsOn)
-  {
-  MET_FieldRecordType* mFr = new MET_FieldRecordType;
-  MET_InitReadField(mFr,_fieldName, _type, _required,_dependsOn,_length);
-  m_UserDefinedReadFields.push_back(mFr);
-  return 1;
-  }
-
 //
 //
 //
@@ -617,6 +607,9 @@ Read(const char *_fileName)
     strcpy(m_FileName, _fileName);
     }
 
+  METAIO_STREAM::cout << "Read FileName = _" << m_FileName << "_"
+                      << METAIO_STREAM::endl;
+
   METAIO_STREAM::ifstream * tmpReadStream = new METAIO_STREAM::ifstream;
 #ifdef __sgi
   tmpReadStream->open(m_FileName, METAIO_STREAM::ios::in);
@@ -700,6 +693,9 @@ Write(const char *_fileName)
     FileName(_fileName);
     }
 
+  METAIO_STREAM::cout << "Write FileName = _" << m_FileName << "_"
+                      << METAIO_STREAM::endl;
+
   METAIO_STREAM::ofstream * tmpWriteStream = new METAIO_STREAM::ofstream;
 
 #ifdef __sgi
@@ -708,15 +704,16 @@ Write(const char *_fileName)
   METAIO_STREAM::ofstream tFile(m_FileName, METAIO_STREAM::ios::out);
   tFile.close();
   }
-  tmpWriteStream->open(_fileName, METAIO_STREAM::ios::out);
+  tmpWriteStream->open(m_FileName, METAIO_STREAM::ios::out);
 #else
-  tmpWriteStream->open(_fileName, METAIO_STREAM::ios::binary |
+  tmpWriteStream->open(m_FileName, METAIO_STREAM::ios::binary |
                                   METAIO_STREAM::ios::out);
 #endif
 
   if(!tmpWriteStream->rdbuf()->is_open())
     {
     delete tmpWriteStream;
+    METAIO_STREAM::cout << "Write failed." << METAIO_STREAM::endl;
     return false;
     }
 
diff --git a/Utilities/MetaIO/vtkmetaio/metaForm.h b/Utilities/MetaIO/vtkmetaio/metaForm.h
index b2e65b2..bad002a 100644
--- a/Utilities/MetaIO/vtkmetaio/metaForm.h
+++ b/Utilities/MetaIO/vtkmetaio/metaForm.h
@@ -111,12 +111,6 @@ class METAIO_EXPORT MetaForm
 
     void * GetUserField(const char* _name);
 
-    bool   AddUserField(const char* _fieldName,
-                        MET_ValueEnumType _type,
-                        int _length=0,
-                        bool _required=true,
-                        int _dependsOn=-1);
-
     template <class TType>
     bool   AddUserField(const char* _fieldName,
                         MET_ValueEnumType _type,
@@ -140,11 +134,11 @@ class METAIO_EXPORT MetaForm
     //
     //
     //
-    virtual bool  CanRead(const char * _fileName=NULL) const;
+    bool  CanRead(const char * _fileName=NULL) const;
 
     bool  Read(const char * _fileName=NULL);
 
-    virtual bool  CanReadStream(METAIO_STREAM::ifstream * _stream) const;
+    bool  CanReadStream(METAIO_STREAM::ifstream * _stream) const;
 
     bool  ReadStream(METAIO_STREAM::ifstream * _stream);
 
diff --git a/Utilities/MetaIO/vtkmetaio/metaImage.h b/Utilities/MetaIO/vtkmetaio/metaImage.h
index 6d451e9..5fe3cde 100644
--- a/Utilities/MetaIO/vtkmetaio/metaImage.h
+++ b/Utilities/MetaIO/vtkmetaio/metaImage.h
@@ -105,7 +105,7 @@ class METAIO_EXPORT MetaImage : public MetaObject
 
     virtual void Clear(void);
 
-    virtual bool InitializeEssential(int _nDims,
+    bool InitializeEssential(int _nDims,
                                      const int * _dimSize,
                                      const float * _elementSpacing,
                                      MET_ValueEnumType _elementType,
@@ -234,13 +234,13 @@ class METAIO_EXPORT MetaImage : public MetaObject
     //
     //
     //
-    virtual bool CanRead(const char *_headerName=NULL) const;
+    bool CanRead(const char *_headerName=NULL) const;
 
-    virtual bool Read(const char *_headerName=NULL,
+    bool Read(const char *_headerName=NULL,
                       bool _readElements=true,
                       void * _buffer=NULL);
 
-    virtual bool ReadROI(int * _indexMin, int * _indexMax,
+    bool ReadROI(int * _indexMin, int * _indexMax,
                          const char *_headerName=NULL,
                          bool _readElements=true,
                          void * _buffer=NULL,
@@ -248,27 +248,27 @@ class METAIO_EXPORT MetaImage : public MetaObject
                          );
 
 
-    virtual bool CanReadStream(METAIO_STREAM::ifstream * _stream) const;
+    bool CanReadStream(METAIO_STREAM::ifstream * _stream) const;
 
-    virtual bool ReadStream(int _nDims,
+    bool ReadStream(int _nDims,
                             METAIO_STREAM::ifstream * _stream,
                             bool _readElements=true,
                             void * _buffer=NULL);
 
-    virtual bool ReadROIStream(int * _indexMin, int * _indexMax,
+    bool ReadROIStream(int * _indexMin, int * _indexMax,
                                int _nDims,
                                METAIO_STREAM::ifstream * _stream,
                                bool _readElements=true,
                                void * _buffer=NULL,
                                unsigned int subSamplingFactor=1);
 
-    virtual bool Write(const char *_headName=NULL,
+    bool Write(const char *_headName=NULL,
                        const char *_dataName=NULL,
                        bool _writeElements=true,
                        const void * _constElementData=NULL,
                        bool _append=false);
 
-    virtual bool WriteROI(int * _indexMin, int * _indexMax,
+    bool WriteROI(int * _indexMin, int * _indexMax,
                           const char *_headName=NULL,
                           const char *_dataName=NULL,
                           bool _writeElements=true,
@@ -276,12 +276,12 @@ class METAIO_EXPORT MetaImage : public MetaObject
                           bool _append=false
                           );
 
-    virtual bool WriteStream(METAIO_STREAM::ofstream * _stream,
+    bool WriteStream(METAIO_STREAM::ofstream * _stream,
                              bool _writeElements=true,
                              const void * _constElementData=NULL);
 
 
-    virtual bool Append(const char *_headName=NULL);
+    bool Append(const char *_headName=NULL);
 
 
     typedef METAIO_STL::pair<long,long> CompressionOffsetType;
@@ -327,13 +327,13 @@ class METAIO_EXPORT MetaImage : public MetaObject
     char               m_ElementDataFileName[255];
 
 
-    void  M_Destroy(void);
+    virtual void  M_Destroy(void);
 
-    void  M_SetupReadFields(void);
+    virtual void  M_SetupReadFields(void);
 
-    void  M_SetupWriteFields(void);
+    virtual void  M_SetupWriteFields(void);
 
-    bool  M_Read(void);
+    virtual bool  M_Read(void);
 
     // _dataQuantity is expressed in number of pixels. Internally it will be
     // scaled by the number of components and number of bytes per component.
diff --git a/Utilities/MetaIO/vtkmetaio/metaMesh.cxx b/Utilities/MetaIO/vtkmetaio/metaMesh.cxx
index c039739..50199c3 100644
--- a/Utilities/MetaIO/vtkmetaio/metaMesh.cxx
+++ b/Utilities/MetaIO/vtkmetaio/metaMesh.cxx
@@ -632,8 +632,8 @@ M_Read(void)
                             << METAIO_STREAM::endl;
         METAIO_STREAM::cout << "   ideal = " << readSize << " : actual = " << gc
                             << METAIO_STREAM::endl;
-        return false;
         delete [] _data;
+        return false;
         }
 
       int i=0;
diff --git a/Utilities/MetaIO/vtkmetaio/metaOutput.cxx b/Utilities/MetaIO/vtkmetaio/metaOutput.cxx
index 85cadc9..96d390b 100644
--- a/Utilities/MetaIO/vtkmetaio/metaOutput.cxx
+++ b/Utilities/MetaIO/vtkmetaio/metaOutput.cxx
@@ -39,7 +39,7 @@ namespace METAIO_NAMESPACE {
 
 
 /** Stolen from kwsys */
-METAIO_STL::string GetCurrentDateTime(const char* format)
+static METAIO_STL::string GetCurrentDateTime(const char* format)
 {
   char buf[1024];
   time_t t;
diff --git a/Utilities/MetaIO/vtkmetaio/metaTransform.cxx b/Utilities/MetaIO/vtkmetaio/metaTransform.cxx
index 62c011a..f5c3419 100644
--- a/Utilities/MetaIO/vtkmetaio/metaTransform.cxx
+++ b/Utilities/MetaIO/vtkmetaio/metaTransform.cxx
@@ -520,8 +520,8 @@ M_Read(void)
       METAIO_STREAM::cout << "MetaTransform: m_Read: data not read completely"
                 << METAIO_STREAM::endl;
       METAIO_STREAM::cout << "   ideal = " << parametersDimension*sizeof(double) << " : actual = " << gc << METAIO_STREAM::endl;
-      return false;
       delete [] _data;
+      return false;
       }
 
     unsigned long k=0;
diff --git a/Utilities/MetaIO/vtkmetaio/metaTube.cxx b/Utilities/MetaIO/vtkmetaio/metaTube.cxx
index 47f7035..0fa78c2 100644
--- a/Utilities/MetaIO/vtkmetaio/metaTube.cxx
+++ b/Utilities/MetaIO/vtkmetaio/metaTube.cxx
@@ -504,9 +504,9 @@ M_Read(void)
                 << METAIO_STREAM::endl;
       METAIO_STREAM::cout << "   ideal = " << readSize
                 << " : actual = " << gc << METAIO_STREAM::endl;
-      return false;
       delete [] posDim;
       delete [] _data;
+      return false;
       }
 
     i=0;
diff --git a/Utilities/MetaIO/vtkmetaio/metaUtils.cxx b/Utilities/MetaIO/vtkmetaio/metaUtils.cxx
index 1e887d4..a824734 100644
--- a/Utilities/MetaIO/vtkmetaio/metaUtils.cxx
+++ b/Utilities/MetaIO/vtkmetaio/metaUtils.cxx
@@ -52,7 +52,7 @@ namespace METAIO_NAMESPACE {
 
 int META_DEBUG = 0;
 
-char MET_SeperatorChar = '=';
+static char MET_SeperatorChar = '=';
 
 MET_FieldRecordType *
 MET_GetFieldRecord(const char * _fieldName,
@@ -130,7 +130,7 @@ METAIO_STL::string MET_ReadForm(METAIO_STREAM::istream &_fp)
   METAIO_STL::streampos pos = _fp.tellg();
   METAIO_STL::vector<MET_FieldRecordType *> fields;
   MET_FieldRecordType* mF = new MET_FieldRecordType;
-  MET_InitReadField(mF, "Form", MET_STRING, false);
+  MET_InitReadField(mF, "FormTypeName", MET_STRING, false);
   mF->required = false;
   mF->terminateRead = true;
   fields.push_back(mF);
@@ -138,18 +138,15 @@ METAIO_STL::string MET_ReadForm(METAIO_STREAM::istream &_fp)
   MET_Read(_fp, &fields, '=', true);
   _fp.seekg(pos);
 
-  METAIO_STL::string value;
-
   if(mF->defined)
     {
-    value = (char *)(mF->value);
+    METAIO_STL::string value = (char *)(mF->value);
     delete mF;
     return value;
     }
 
-  value[0] = '\0';
   delete mF;
-  return value;
+  return METAIO_STL::string();
   }
 
 //
@@ -168,18 +165,15 @@ METAIO_STL::string MET_ReadType(METAIO_STREAM::istream &_fp)
   MET_Read(_fp, &fields, '=', true);
   _fp.seekg(pos);
 
-  METAIO_STL::string value;
-
   if(mF->defined)
     {
-    value = (char *)(mF->value);
+    METAIO_STL::string value  = (char *)(mF->value);
     delete mF;
     return value;
     }
 
-  value[0] = '\0';
   delete mF;
-  return value;
+  return METAIO_STL::string();
   }
 
 //
@@ -991,7 +985,7 @@ bool MET_InitReadField(MET_FieldRecordType * _mf,
 //
 //
 //
-bool MET_SkipToVal(METAIO_STREAM::istream &fp)
+static bool MET_SkipToVal(METAIO_STREAM::istream &fp)
   {
   int c;
   if( fp.eof() )
@@ -1026,7 +1020,7 @@ bool MET_SkipToVal(METAIO_STREAM::istream &fp)
 //
 //
 //
-bool MET_IsComplete(METAIO_STL::vector<MET_FieldRecordType *> * fields)
+static bool MET_IsComplete(METAIO_STL::vector<MET_FieldRecordType *> * fields)
   {
   METAIO_STL::vector<MET_FieldRecordType *>::iterator fieldIter;
   for(fieldIter=fields->begin(); fieldIter!=fields->end(); fieldIter++)
diff --git a/Utilities/MetaIO/vtkmetaio/tests/testMeta10Contour.cxx b/Utilities/MetaIO/vtkmetaio/tests/testMeta10Contour.cxx
index 4dfeae7..3e963b6 100644
--- a/Utilities/MetaIO/vtkmetaio/tests/testMeta10Contour.cxx
+++ b/Utilities/MetaIO/vtkmetaio/tests/testMeta10Contour.cxx
@@ -2,7 +2,7 @@
 #include <ctype.h>
 #include <metaContour.h>
 
-int main(int , char * [])
+int main(int, char * [])
 {
   METAIO_STREAM::cout << "Creating test file ..." << METAIO_STREAM::endl;
   MetaContour Contour(3);
diff --git a/Utilities/MetaIO/vtkmetaio/tests/testMeta11Form.cxx b/Utilities/MetaIO/vtkmetaio/tests/testMeta11Form.cxx
index 7891c2d..cb42476 100644
--- a/Utilities/MetaIO/vtkmetaio/tests/testMeta11Form.cxx
+++ b/Utilities/MetaIO/vtkmetaio/tests/testMeta11Form.cxx
@@ -4,7 +4,7 @@
 
 #include <metaForm.h>
 
-int main(int argc, char **argv)
+int main(int, char * [])
   {
   MetaForm tObj;
 
@@ -35,19 +35,23 @@ int main(int argc, char **argv)
   tObj.Clear();
   tObj.ClearUserFields();
 
-  tObj.AddUserField("MyName", MET_STRING);
-  tObj.AddUserField("MyArray", MET_INT_ARRAY,3);
-  tObj.AddUserField("MyMatrix", MET_FLOAT_MATRIX,2);
+  tObj.AddUserField("MyName", MET_STRING, strlen("default"), "default");
+  tObj.AddUserField("MyArray", MET_INT_ARRAY, 3, myarray);
+  tObj.AddUserField("MyMatrix", MET_FLOAT_MATRIX, 2, myMatrix);
 
+  METAIO_STREAM::cout << "Read: " << METAIO_STREAM::endl;
   tObj.Read();
+  METAIO_STREAM::cout << "PrintInfo: " << METAIO_STREAM::endl;
   tObj.PrintInfo();
 
+  METAIO_STREAM::cout << "Check fields: " << METAIO_STREAM::endl;
   char* name = static_cast<char*>(tObj.GetUserField("MyName"));
-  if(strcmp(name,"Julien"))
+  if(!strcmp(name,"Julien"))
   {
     METAIO_STREAM::cout << "MyName: FAIL" << METAIO_STREAM::endl;
     return 0;
   }
+  METAIO_STREAM::cout << "MyName: " << name << METAIO_STREAM::endl;
 
   int* array = static_cast<int*>(tObj.GetUserField("MyArray"));
 
@@ -59,6 +63,7 @@ int main(int argc, char **argv)
       return 0;
     }
   }
+  METAIO_STREAM::cout << "MyArray: PASS" << METAIO_STREAM::endl;
 
   float* matrix = static_cast<float*>(tObj.GetUserField("MyMatrix"));
   for(i=0; i<4; i++)
@@ -68,6 +73,7 @@ int main(int argc, char **argv)
       METAIO_STREAM::cout << "MyMatrix: FAIL" << METAIO_STREAM::endl;
     }
   }
+  METAIO_STREAM::cout << "MyMatrix: PASS" << METAIO_STREAM::endl;
 
   METAIO_STREAM::cout << "PASSED!" << METAIO_STREAM::endl;
 
diff --git a/Utilities/MetaIO/vtkmetaio/tests/testMeta12Array.cxx b/Utilities/MetaIO/vtkmetaio/tests/testMeta12Array.cxx
index c933ad1..533f02a 100644
--- a/Utilities/MetaIO/vtkmetaio/tests/testMeta12Array.cxx
+++ b/Utilities/MetaIO/vtkmetaio/tests/testMeta12Array.cxx
@@ -4,7 +4,7 @@
 
 #include <metaArray.h>
 
-int main(int argc, char **argv)
+int main(int, char * [])
   {
   MetaArray tObj;
 
diff --git a/Utilities/MetaIO/vtkmetaio/tests/testMeta1Utils.cxx b/Utilities/MetaIO/vtkmetaio/tests/testMeta1Utils.cxx
index 2869a20..f93502d 100644
--- a/Utilities/MetaIO/vtkmetaio/tests/testMeta1Utils.cxx
+++ b/Utilities/MetaIO/vtkmetaio/tests/testMeta1Utils.cxx
@@ -5,7 +5,7 @@
 
 #include <metaUtils.h>
 
-int main(int argc, char **argv)
+int main(int, char * [])
   {
 
   if(MET_SystemByteOrderMSB())
diff --git a/Utilities/MetaIO/vtkmetaio/tests/testMeta2Object.cxx b/Utilities/MetaIO/vtkmetaio/tests/testMeta2Object.cxx
index 9acc604..e9fe251 100644
--- a/Utilities/MetaIO/vtkmetaio/tests/testMeta2Object.cxx
+++ b/Utilities/MetaIO/vtkmetaio/tests/testMeta2Object.cxx
@@ -4,7 +4,7 @@
 
 #include <metaObject.h>
 
-int main(int argc, char **argv)
+int main(int, char * [])
   {
   MetaObject tObj;
 
diff --git a/Utilities/MetaIO/vtkmetaio/tests/testMeta3Image.cxx b/Utilities/MetaIO/vtkmetaio/tests/testMeta3Image.cxx
index a0ed6f6..ae7a4b5 100644
--- a/Utilities/MetaIO/vtkmetaio/tests/testMeta3Image.cxx
+++ b/Utilities/MetaIO/vtkmetaio/tests/testMeta3Image.cxx
@@ -2,7 +2,7 @@
 #include <ctype.h>
 #include <metaImage.h>
 
-int main(int argc, char **argv)
+int main(int, char * [])
   {
 
   MetaImage tIm(8, 8, 1, 2, MET_CHAR);
diff --git a/Utilities/MetaIO/vtkmetaio/tests/testMeta4Tube.cxx b/Utilities/MetaIO/vtkmetaio/tests/testMeta4Tube.cxx
index 52932b1..9e5f373 100644
--- a/Utilities/MetaIO/vtkmetaio/tests/testMeta4Tube.cxx
+++ b/Utilities/MetaIO/vtkmetaio/tests/testMeta4Tube.cxx
@@ -5,7 +5,7 @@
 #include <metaScene.h>
 #include <metaEllipse.h>
 
-int main(int argc, char **argv)
+int main(int, char * [])
 {
 
   METAIO_STREAM::cout << "Initializing scene ..." << METAIO_STREAM::endl;
diff --git a/Utilities/MetaIO/vtkmetaio/tests/testMeta5Blob.cxx b/Utilities/MetaIO/vtkmetaio/tests/testMeta5Blob.cxx
index 31d7c47..4a3fd54 100644
--- a/Utilities/MetaIO/vtkmetaio/tests/testMeta5Blob.cxx
+++ b/Utilities/MetaIO/vtkmetaio/tests/testMeta5Blob.cxx
@@ -2,7 +2,7 @@
 #include <ctype.h>
 #include <metaBlob.h>
 
-int main(int argc, char **argv)
+int main(int, char * [])
 {
 
   METAIO_STREAM::cout << "Creating test file ..." << METAIO_STREAM::endl;
diff --git a/Utilities/MetaIO/vtkmetaio/tests/testMeta6Surface.cxx b/Utilities/MetaIO/vtkmetaio/tests/testMeta6Surface.cxx
index ac81edb..4e7bad1 100644
--- a/Utilities/MetaIO/vtkmetaio/tests/testMeta6Surface.cxx
+++ b/Utilities/MetaIO/vtkmetaio/tests/testMeta6Surface.cxx
@@ -2,7 +2,7 @@
 #include <ctype.h>
 #include <metaSurface.h>
 
-int main(int argc, char **argv)
+int main(int, char * [])
 {
 
   METAIO_STREAM::cout << "Creating test file ...";
diff --git a/Utilities/MetaIO/vtkmetaio/tests/testMeta7Line.cxx b/Utilities/MetaIO/vtkmetaio/tests/testMeta7Line.cxx
index 025bdbd..4e4e963 100644
--- a/Utilities/MetaIO/vtkmetaio/tests/testMeta7Line.cxx
+++ b/Utilities/MetaIO/vtkmetaio/tests/testMeta7Line.cxx
@@ -2,7 +2,7 @@
 #include <ctype.h>
 #include <metaLine.h>
 
-int main(int argc, char **argv)
+int main(int, char * [])
 {
 
   METAIO_STREAM::cout << "Creating test file ...";
diff --git a/Utilities/MetaIO/vtkmetaio/tests/testMeta8Scene.cxx b/Utilities/MetaIO/vtkmetaio/tests/testMeta8Scene.cxx
index 9ec00c7..9fd7f34 100644
--- a/Utilities/MetaIO/vtkmetaio/tests/testMeta8Scene.cxx
+++ b/Utilities/MetaIO/vtkmetaio/tests/testMeta8Scene.cxx
@@ -4,7 +4,7 @@
 #include <metaGroup.h>
 #include <metaEllipse.h>
 
-int main(int argc, char **argv)
+int main(int, char * [])
 {
 
   METAIO_STREAM::cout << "Creating test scene ..." << METAIO_STREAM::endl;
diff --git a/Utilities/Python/CMakeLists.txt b/Utilities/Python/CMakeLists.txt
index d96a6b9..86fe051 100644
--- a/Utilities/Python/CMakeLists.txt
+++ b/Utilities/Python/CMakeLists.txt
@@ -23,3 +23,19 @@ if(NOT VTK_INSTALL_NO_DEVELOPMENT)
     COMPONENT Development
     )
 endif()
+
+# Export location of python module dirs in install and build tree for every vtkpython module to use
+# As long as those modules depend on vtkpython, they can retrieve and use these
+if (NOT VTK_INSTALL_PYTHON_MODULE_DIR)
+  set(VTK_INSTALL_PYTHON_MODULE_DIR
+      "${VTK_INSTALL_LIBRARY_DIR}/python${PYTHON_MAJOR_VERSION}.${PYTHON_MINOR_VERSION}/site-packages"
+      CACHE
+      PATH "Directory where python modules will be installed" FORCE)
+endif()
+
+if (NOT VTK_BUILD_PYTHON_MODULE_DIR)
+  set(VTK_BUILD_PYTHON_MODULE_DIR
+      "${VTK_BINARY_DIR}/Wrapping/Python"
+      CACHE
+      PATH "Directory where python modules will be built" FORCE)
+endif()
diff --git a/Utilities/Release/CMakeLists.txt b/Utilities/Release/CMakeLists.txt
deleted file mode 100644
index 891db26..0000000
--- a/Utilities/Release/CMakeLists.txt
+++ /dev/null
@@ -1,124 +0,0 @@
-
-#------------------------------------------------------------------------------
-# Add install rules for required system runtimes such as MSVCRxx.dll
-set (CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS_SKIP ON)
-include(InstallRequiredSystemLibraries)
-if (CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS)
-  install(FILES ${CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS}
-    DESTINATION ${VTK_INSTALL_BIN_DIR_CM24}
-    PERMISSIONS OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ
-    COMPONENT RuntimeLibraries)
-
-  if(VTK_WRAP_PYTHON)
-    # Install the runtimes to the lib dir as well since python modules are
-    # installed in that directory and the manifest files need to present there as
-    # well.
-    install(FILES ${CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS}
-      DESTINATION ${VTK_INSTALL_LIB_DIR_CM24}/site-packages/vtk
-      PERMISSIONS OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ
-      COMPONENT RuntimeLibraries)
-  endif(VTK_WRAP_PYTHON)
-endif (CMAKE_INSTALL_SYSTEM_RUNTIME_LIBS)
-
-if(WIN32)
-  install(FILES ${VTK_SOURCE_DIR}/vtkLogo.ico
-    DESTINATION ${VTK_INSTALL_BIN_DIR_CM24}
-    PERMISSIONS OWNER_WRITE OWNER_READ OWNER_EXECUTE GROUP_READ GROUP_EXECUTE WORLD_READ
-    COMPONENT RuntimeExecutables)
-endif()
-
-if(WIN32)
-  set(vtk_runtime_dir ${VTK_INSTALL_BIN_DIR_CM24})
-else()
-  set(vtk_runtime_dir ${VTK_INSTALL_LIB_DIR_CM24})
-endif()
-
-# install core python distribution
-if(NOT APPLE AND VTK_WRAP_PYTHON)
-  get_filename_component(PYTHON_BIN_DIR "${PYTHON_EXECUTABLE}" PATH)
-  get_filename_component(PYTHON_LIB_DIR "${PYTHON_LIBRARY}" PATH)
-  if(WIN32)
-    execute_process(COMMAND ${PYTHON_EXECUTABLE} -c "from distutils.sysconfig import get_python_lib; print get_python_lib(standard_lib=1)"
-                  OUTPUT_VARIABLE PYTHON_LIBS_DIR
-                  OUTPUT_STRIP_TRAILING_WHITESPACE)
-
-    file(GLOB python_core_modules "${PYTHON_BIN_DIR}/DLLs/*.pyd")
-
-    install(FILES ${python_core_modules}
-        DESTINATION ${VTK_INSTALL_BIN_DIR_CM24}
-        COMPONENT RuntimeLibraries)
-  else(WIN32)
-    get_filename_component(PYTHON_LIBS_DIR "${PYTHON_LIBRARY}" PATH)
-    file(TO_CMAKE_PATH ${PYTHON_LIBS_DIR} PYTHON_LIBS_DIR)
-
-    # install pyconfig.h
-    string(REGEX MATCH "python[0-9].[0-9]" py_inc_dir ${PYTHON_INCLUDE_DIR})
-    install(FILES ${PYTHON_INCLUDE_DIR}/pyconfig.h
-            DESTINATION ${vtk_runtime_dir}/include/${py_inc_dir}
-            COMPONENT RuntimeLibraries)
-  endif(WIN32)
-
-  file(TO_CMAKE_PATH ${PYTHON_LIBS_DIR} PYTHON_LIBS_DIR)
-  install(DIRECTORY ${PYTHON_LIBS_DIR}
-          DESTINATION ${vtk_runtime_dir}
-          USE_SOURCE_PERMISSIONS
-          COMPONENT RuntimeLibraries
-          PATTERN "command" EXCLUDE)
-endif()
-
-set(_dir "${VTK_INSTALL_LIB_DIR_CM24}")
-if(WIN32)
-  set(_dir "bin")
-endif(WIN32)
-
-
-list(APPEND lib_search_dir
-     "\${CMAKE_INSTALL_PREFIX}/bin"
-     "\${CMAKE_INSTALL_PREFIX}/${VTK_INSTALL_LIB_DIR_CM24}")
-
-if(WIN32)
-  list(APPEND lib_search_dir
-    "\${CMAKE_INSTALL_PREFIX}/bin" )
-else()
-  list(APPEND lib_search_dir
-    "\${CMAKE_INSTALL_PREFIX}/${VTK_INSTALL_LIB_DIR_CM24}")
-endif()
-
-list(APPEND lib_search_dir "${VTK_BINARY_DIR}/bin")
-
-if(WIN32)
-  list(APPEND lib_search_dir "${VTK_BINARY_DIR}/bin/Release")
-endif()
-
-if(VTK_WRAP_TCL AND NOT APPLE)
-  get_filename_component(TCL_BIN_DIR "${TCL_TCLSH}" PATH)
-  get_filename_component(TCL_LIB_DIR "${TCL_LIBRARY}" PATH)
-  get_filename_component(TK_LIB_DIR "${TK_LIBRARY}" PATH)
-  list(APPEND lib_search_dir ${TCL_BIN_DIR} ${TCL_BIN_DIR} ${TK_LIB_DIR})
-endif()
-
-if(VTK_WRAP_PYTHON AND NOT APPLE)
-  list(APPEND lib_search_dir ${PYTHON_BIN_DIR} ${PYTHON_LIB_DIR})
-  if(WIN32)
-    list(APPEND lib_search_dir ${PYTHON_BIN_DIR}/DLLs)
-  endif()
-endif()
-
-set(APPS)
-
-if(VTK_WRAP_PYTHON)
-  if(WIN32)
-    list(APPEND APPS "\${CMAKE_INSTALL_PREFIX}/bin/vtkpython${CMAKE_EXECUTABLE_SUFFIX}")  # paths to executables
-  else()
-    list(APPEND APPS "\${CMAKE_INSTALL_PREFIX}/${VTK_INSTALL_LIB_DIR_CM24}/vtkpython${CMAKE_EXECUTABLE_SUFFIX}")  # paths to executables
-  endif()
-endif()
-
-if(VTK_WRAP_TCL)
-  #list(APPEND APPS "\${CMAKE_INSTALL_PREFIX}/bin/vtk${CMAKE_EXECUTABLE_SUFFIX}")
-endif()
-
-install(CODE "
-  include(BundleUtilities)
-  fixup_bundle(\"${APPS}\"   \"${lib_names}\"   \"${lib_search_dir}\")"
-  COMPONENT RuntimeLibraries)
diff --git a/Utilities/Release/VTKInstall.bmp b/Utilities/Release/VTKInstall.bmp
deleted file mode 100644
index d476d10..0000000
Binary files a/Utilities/Release/VTKInstall.bmp and /dev/null differ
diff --git a/Utilities/Scripts/SetupExternalData.sh b/Utilities/Scripts/SetupExternalData.sh
new file mode 100755
index 0000000..4b50031
--- /dev/null
+++ b/Utilities/Scripts/SetupExternalData.sh
@@ -0,0 +1,92 @@
+#!/usr/bin/env bash
+#=============================================================================
+# Copyright 2013 Kitware, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#=============================================================================
+
+# Path conversion function.
+case "$(uname)" in
+  *CYGWIN*)
+    native_path() {
+      cygpath -m "$1"
+    }
+    ;;
+  *MINGW*)
+    native_path() {
+      cmd //c echo "$1" | sed 's/^"//;s/"$//'
+    }
+    ;;
+  *)
+    native_path() {
+      echo "$1"
+    }
+    ;;
+esac
+
+say_store() {
+  if test -f "$config_store"; then
+    echo 'A default for VTK_DATA_STORE is configured as:' &&
+    echo &&
+    sed 's/^/  /' < "$config_store" &&
+    echo
+  else
+    echo 'No default for VTK_DATA_STORE is configured.' &&
+    echo
+  fi
+}
+
+ask_store() {
+  ans='?'
+  while test "$ans" = '?'; do
+    read -ep 'From the options
+
+ <empty>     = No change
+ n,no        = No default
+ h,home      = Use '"$store_home"'
+ s,sibling   = Use '"$store_sibling"'
+ <full-path> = Use specified <full-path>
+
+select a default for VTK_DATA_STORE [n/h/s]: ' ans &&
+    case "$ans" in
+      n|N|no) ans='no' ;;
+      h|H|home) ans="$store_home" ;;
+      s|S|sibling) ans="$store_sibling" ;;
+      /*) ;;
+      [A-Za-z]:[/\\]*) ;;
+      '') ans='' ;;
+      *) echo; echo "Invalid response '$ans'!"; echo; ans='?' ;;
+    esac
+  done
+  eval "$1='$ans'"
+}
+
+cd "${BASH_SOURCE%/*}/../.." &&
+config_store='.ExternalData/config/store' &&
+store_home="$(native_path "${HOME-$USERPROFILE}/.ExternalData")" &&
+store_sibling="$(native_path "${PWD%/*}/VTKExternalData")" &&
+echo 'VTK may download data objects into a local "store" using
+the CMake ExternalData module.  The store is content-addressed
+and can be shared across multiple projects and build trees.
+VTK build trees have a VTK_DATA_STORE CMake cache entry to set
+their store location.  A default for this value to be used in
+build trees created with this source tree may now be chosen.
+' &&
+say_store &&
+ask_store ans &&
+case "$ans" in
+  '') ;;
+  no) rm -f "$config_store" ;;
+  *)  mkdir -p "${config_store%/*}" && echo "$ans" > "$config_store" ;;
+esac &&
+say_store
diff --git a/Utilities/Scripts/git-gerrit-push b/Utilities/Scripts/git-gerrit-push
index 7f8f47b..ab11e5c 100755
--- a/Utilities/Scripts/git-gerrit-push
+++ b/Utilities/Scripts/git-gerrit-push
@@ -1,22 +1,111 @@
 #!/usr/bin/env bash
 
-USAGE="[<remote>] [--no-topic] [--dry-run] [--]"
+USAGE="[<remote>] [--no-topic] [--no-data] [--keep-data] [--dry-run] [--]"
 OPTIONS_SPEC=
 SUBDIRECTORY_OK=Yes
 . "$(git --exec-path)/git-sh-setup"
 
+egrep-q() {
+  egrep "$@" >/dev/null 2>/dev/null
+}
+
+data_commit() {
+  # Get data refs.  Skip if none.
+  test $# != 0 || return 0
+  state=$(git for-each-ref "$@") || return
+  test -n "$state" || return 0
+
+  # Convert each data ref to an index entry.
+  index=$(
+    echo "$state" |
+    while read obj type refname; do
+      # Take blobs with valid ref names.
+      name="${refname#refs/data/}"
+      if echo "$type,$name" | egrep-q '^blob,[A-Za-z0-9-]+/[0-9A-Fa-f]+$'; then
+        # Place the blob at the path named by the ref.
+        echo "100644 $obj 0	$name"
+      else
+        # Warn about unprocessed refs.
+        echo "unknown $refname" 1>&2
+      fi
+    done
+  ) || return
+  test -n "$index" || return 0
+
+  # Convert the index into a tree.
+  tree=$(
+    GIT_INDEX_FILE="$GIT_DIR/tmp-index.$$.$RANDOM" &&
+    export GIT_INDEX_FILE &&
+    trap "rm -f '$GIT_INDEX_FILE'" EXIT &&
+    rm -f "$GIT_INDEX_FILE" &&
+    echo "$index" | git update-index --index-info &&
+    git write-tree
+  ) &&
+
+  # Store the tree in a commit object.
+  echo 'data' | git commit-tree "$tree"
+}
+
+data_remove() {
+  test -z "$dry_run" || return 0
+  git ls-tree -r "$1" |
+  while read mode type obj name; do
+    # Remove ref only if it still has the data we expected.
+    git update-ref -d "refs/data/$name" "$obj" 2>/dev/null || true
+  done
+}
+
+data_report_and_remove() {
+  if test -n "$keep_data"; then
+    action="kept"
+  else
+    action="removed"
+    data_remove "$1" || true
+  fi &&
+  echo "Pushed refs/data and $action local copy:" &&
+  git ls-tree --name-only -r "$1" | sed "s/^/  /"
+}
+
+data_push_refspec() {
+  echo "$1:refs/data/commits/$1"
+}
+
+data_refs() {
+  git rev-list "$@" |
+  git diff-tree --no-commit-id --root -c -r --diff-filter=AM --stdin |
+  egrep '\.(md5)$' |
+  #     read :srcmode dstmode srcobj dstobj status file
+  while read  _       _       _      obj    _      file; do
+    # Identify the hash algorithm used.
+    case "$file" in
+      *.md5) algo=MD5 ; validate="^[0-9a-fA-F]{32}$" ;;
+      *) continue ;;
+    esac
+
+    # Load and validate the hash.
+    if hash=$(git cat-file blob $obj 2>/dev/null) &&
+        echo "$hash" | egrep-q "$validate"; then
+      echo "refs/data/$algo/$hash"
+    fi
+  done
+}
+
 #-----------------------------------------------------------------------------
 
 remote=''
 refspecs=''
+keep_data=''
 no_topic=''
+no_data=''
 dry_run=''
 
-# Parse the command line options.
+# Parse command line options.
 while test $# != 0; do
   case "$1" in
-    --no-topic) no_topic=1 ;;
-    --dry-run)  dry_run=--dry-run ;;
+    --keep-data) keep_data=1 ;;
+    --no-topic)  no_topic=1 ;;
+    --no-data)   no_data=1 ;;
+    --dry-run)   dry_run=--dry-run ;;
     --) shift; break ;;
     -*) usage ;;
     *) test -z "$remote" || usage ; remote="$1" ;;
@@ -30,29 +119,48 @@ test -n "$remote" || remote="gerrit"
 
 if test -z "$no_topic"; then
   # Identify and validate the topic branch name.
-  topic="$(git symbolic-ref HEAD | sed -e 's|^refs/heads/||')"
-  if test "$topic" = "master"; then
+  head="$(git symbolic-ref HEAD)" && topic="${head#refs/heads/}" || topic=''
+  if test -z "$topic" -o "$topic" = "master"; then
     die 'Please name your topic:
-    git checkout -b descriptive-name'
+  git checkout -b descriptive-name'
   fi
-  refspecs="HEAD:refs/for/master/$topic"
+  # The topic branch will be pushed by name.
+  refspecs="HEAD:refs/for/master/$topic $refspecs"
+fi
+
+# Fetch the current upstream master branch head.
+# This helps computation of a minimal pack to push.
+echo "Fetching $remote master"
+fetch_out=$(git fetch "$remote" master 2>&1) || die "$fetch_out"
+master=$(git rev-parse FETCH_HEAD) || exit
+
+if test -z "$no_data"; then
+  # Create a commit containing the data to push.
+  data_refs=$(data_refs $master..) &&
+  data=$(data_commit $data_refs) || die 'Failed to create data commit'
+  if test -n "$data"; then
+    refspecs="$(data_push_refspec "$data") $refspecs"
+  fi
+else
+  data=''
 fi
 
 # Exit early if we have nothing to push.
 if test -z "$refspecs"; then
-  echo "Nothing to push!"
+  echo 'Nothing to push!'
   exit 0
 fi
 
-# Fetch the current upstream master branch head.
-# This helps the computation of a minimal pack to push.
-echo "Fetching $remote master"
-fetch_out=$(git fetch "$remote" master 2>&1) || die "$fetch_out"
-
 # Push.  Save output and exit code.
 echo "Pushing to $remote"
 push_stdout=$(git push --porcelain $dry_run "$remote" $refspecs); push_exit=$?
 echo "$push_stdout"
 
+# Check if data were pushed successfully.
+if test -n "$data" &&
+   echo "$push_stdout" | egrep-q "^[*=+]	$data"; then
+  data_report_and_remove "$data"
+fi
+
 # Reproduce the push exit code.
 exit $push_exit
diff --git a/Utilities/Scripts/pre-commit b/Utilities/Scripts/pre-commit
index 0e1d134..fae09f8 100755
--- a/Utilities/Scripts/pre-commit
+++ b/Utilities/Scripts/pre-commit
@@ -12,6 +12,62 @@ die() {
   exit 1
 }
 
+ExternalData_stage_linked_content() {
+  # Identify the hash algorithm used.
+  case "$file" in
+    *.md5) algo=MD5 ; base="${file/.md5}" ; validate="^[0-9a-fA-F]{32}$" ;;
+    *) die "$file: invalid content link (unrecognized extension)" ;;
+  esac
+
+  # Load and validate the hash stored in the staged blob.
+  hash=$(git cat-file blob $dst_obj) || hash=""
+  echo "$hash" | egrep-q "$validate" ||
+  die "$file: invalid content link (does not match '$validate')"
+
+  # Reject simultaneous raw file and content link.
+  files=$(git ls-files -- "$base")
+  if test -n "$files"; then
+    die "$file: content link may not coexist with $files"
+  fi
+
+  # Find the content referenced by the link.
+  staged="$(dirname "$file")/.ExternalData_${algo}_${hash}"
+  stored="${ExternalData_STORE}/$algo/$hash"
+  ref="refs/data/$algo/$hash"
+  obj=$(git rev-parse --verify -q "$ref") || obj=""
+  if test -z "$obj" -a -f "$staged"; then
+    # Content is staged by the ExternalData module.  Store it in Git.
+    obj=$(git hash-object -w -- "$staged") ||
+    die "$file: git hash-object failed to load $staged"
+    git update-ref "$ref" "$obj" "" ||
+    die "$file: git update-ref failed to create $ref = $obj"
+    echo "$file: Added content to Git at $ref"
+  fi
+
+  # Move staged object to local store if it is in Git.
+  if test -f "$staged" && test -n "$obj"; then
+    mkdir -p "${stored%/*}" &&
+    mv -n "$staged" "$stored" &&
+    rm -f "$staged" &&
+    echo "$file: Added content to local store at $stored"
+  fi
+
+  # Report destination of content link.
+  if test -f "$stored"; then
+    echo "Content link $file -> $stored"
+  else
+    echo "Content link $file -> (object not in local store)"
+  fi
+}
+
+ExternalData_non_content_link() {
+  # Reject simultaneous raw file and content link.
+  files=$(git ls-files -- "$file.md5")
+  if test -n "$files"; then
+    die "$file: file may not coexist with $files"
+  fi
+}
+
 #-----------------------------------------------------------------------------
 
 # Check that developmer setup is up-to-date.
@@ -24,3 +80,18 @@ if test $lastSetupForDevelopment -lt $SetupForDevelopment_VERSION; then
   Utilities/SetupForDevelopment.sh
 '
 fi
+
+#-----------------------------------------------------------------------------
+
+# Local ExternalData object repository.
+ExternalData_STORE=".ExternalData"
+
+# Process content links created by/for the CMake ExternalData module.
+git diff-index --cached HEAD --diff-filter=AM |
+while read src_mode dst_mode src_obj dst_obj status file; do
+  if echo "$dst_mode $file" | egrep-q '^100644 .*\.(md5)$'; then
+    ExternalData_stage_linked_content
+  else
+    ExternalData_non_content_link
+  fi
+done || exit 1
diff --git a/Utilities/SetupForDevelopment.sh b/Utilities/SetupForDevelopment.sh
index f34f0ad..9af3f8c 100755
--- a/Utilities/SetupForDevelopment.sh
+++ b/Utilities/SetupForDevelopment.sh
@@ -6,6 +6,7 @@ Utilities/GitSetup/setup-hooks && echo &&
 Utilities/Scripts/SetupGitAliases.sh && echo &&
 (Utilities/GitSetup/setup-gerrit ||
  echo 'Failed to setup Gerrit.  Run this again to retry.') && echo &&
+Utilities/Scripts/SetupExternalData.sh && echo &&
 Utilities/GitSetup/tips
 
 # Rebase master by default
diff --git a/Utilities/octree/octree b/Utilities/octree/octree
index d5b7ae3..b848803 100644
--- a/Utilities/octree/octree
+++ b/Utilities/octree/octree
@@ -5,14 +5,6 @@
 #include <stdexcept>
 #include <vector>
 
-//#ifdef __APPLE__
-/* What software-bundling DRMoron at Apple thought this was OK?!?!! */
-//#undef A_
-//#undef P_
-//#undef R_
-//#undef T_
-//#endif // __APPLE__
-
 #if defined(_MSC_VER)
 /* What bit-brained bug generating embrace-and-extinguisher thought of this?!??!! */
 #  pragma warning (disable: 4181) /* qualifier applied to reference type ignored */
diff --git a/Utilities/octree/octree_cursor.cxx b/Utilities/octree/octree_cursor.cxx
index 5de4513..3a24b8b 100644
--- a/Utilities/octree/octree_cursor.cxx
+++ b/Utilities/octree/octree_cursor.cxx
@@ -89,14 +89,6 @@ octree_cursor<T_,R_,P_,O_,OP_,d_>::octree_cursor( const const_path& src )
   this->_M_current_node = src._M_current_node;
 }
 
-/**\brief Destructor.
-  *
-  */
-template< typename T_, typename R_, typename P_, typename O_, typename OP_, int d_ >
-octree_cursor<T_,R_,P_,O_,OP_,d_>::~octree_cursor()
-{
-}
-
 /**\brief Move the cursor up one level.
   *
   * If this is called when the cursor is on the root node, it has no effect.
diff --git a/Utilities/octree/octree_cursor.h b/Utilities/octree/octree_cursor.h
index c485f8e..57276b8 100644
--- a/Utilities/octree/octree_cursor.h
+++ b/Utilities/octree/octree_cursor.h
@@ -36,7 +36,6 @@ public:
   octree_cursor( octree_pointer otree );
   octree_cursor( octree_node_pointer oroot );
   octree_cursor( const const_path& src );
-  ~octree_cursor();
 
   void up();
   void down( int child_of_this_node );
@@ -49,8 +48,8 @@ public:
 
   bool visit( const std::vector<int>& path );
 
-  virtual self_path& operator = ( const path& it );
-  virtual self_path& operator = ( const const_path& it );
+  self_path& operator = ( const path& it );
+  self_path& operator = ( const const_path& it );
 };
 
 #endif // __octree_cursor
diff --git a/Utilities/octree/octree_iterator.cxx b/Utilities/octree/octree_iterator.cxx
index b9a94ee..5bfbc63 100644
--- a/Utilities/octree/octree_iterator.cxx
+++ b/Utilities/octree/octree_iterator.cxx
@@ -93,14 +93,6 @@ octree_iterator<T_,R_,P_,O_,OP_,d_>::octree_iterator( const const_iterator& it )
   this->_M_current_node = it._M_current_node;
 }
 
-/**\brief Destructor.
-  *
-  */
-template< typename T_, typename R_, typename P_, typename O_, typename OP_, int d_ >
-octree_iterator<T_,R_,P_,O_,OP_,d_>::~octree_iterator()
-{
-}
-
 /**\brief Utility routine used to advance the iterator if possible.
   *
   */
@@ -268,30 +260,6 @@ void octree_iterator<T_,R_,P_,O_,OP_,d_>::immediate_family( bool val )
   *\brief Return whether the iterator is set to traverse the entire tree or just the siblings of the current node.
   */
 
-/**\brief Assignment operator (for copying iterators of mutable nodes).
-  *
-  */
-template< typename T_, typename R_, typename P_, typename O_, typename OP_, int d_ >
-octree_iterator< T_, R_, P_, O_, OP_, d_ >& octree_iterator<T_,R_,P_,O_,OP_,d_>::operator = ( const iterator& it )
-{
-  this->octree_path<T_,R_,P_,O_,OP_,d_>::operator=( it );
-  this->_M_immediate_family = it._M_immediate_family;
-  this->_M_only_leaf_nodes = it._M_only_leaf_nodes;
-  return *this;
-}
-
-/**\brief Assignment operator (for copying iterators of immutable nodes).
-  *
-  */
-template< typename T_, typename R_, typename P_, typename O_, typename OP_, int d_ >
-octree_iterator< T_, R_, P_, O_, OP_, d_ >& octree_iterator<T_,R_,P_,O_,OP_,d_>::operator = ( const const_iterator& it )
-{
-  this->octree_path<T_,R_,P_,O_,OP_,d_>::operator=( it );
-  this->_M_immediate_family = it._M_immediate_family;
-  this->_M_only_leaf_nodes = it._M_only_leaf_nodes;
-  return *this;
-}
-
 /**\fn template< typename T_, typename R_, typename P_, typename O_, typename OP_, int d_ > \
   *    self_iterator& octree_iterator<T_,R_,P_,O_,OP_,d_>::operator ++ ()
   *\brief Move to the next node in the octree that satisfies the traversal criteria.
diff --git a/Utilities/octree/octree_iterator.h b/Utilities/octree/octree_iterator.h
index a127288..0117c77 100644
--- a/Utilities/octree/octree_iterator.h
+++ b/Utilities/octree/octree_iterator.h
@@ -37,7 +37,6 @@ public:
   octree_iterator();
   octree_iterator( octree_node_pointer oroot, octree_node_pointer onode, bool only_leaves = true );
   octree_iterator( const const_iterator& it );
-  ~octree_iterator();
 
   octree_node_pointer check_incr();
   octree_node_pointer check_decr();
@@ -45,9 +44,6 @@ public:
   bool immediate_family() const { return this->_M_immediate_family; }
   bool& immediate_family() { return this->_M_immediate_family; }
 
-  virtual self_iterator& operator = ( const iterator& it );
-  virtual self_iterator& operator = ( const const_iterator& it );
-
   self_iterator& operator ++ ()      { this->_M_current_node = check_incr(); return *this; }
   self_iterator  operator ++ ( int ) { self_iterator tmp = *this; this->_M_current_node = check_incr(); return tmp; }
 
diff --git a/Utilities/octree/octree_path.cxx b/Utilities/octree/octree_path.cxx
index 1f78ba6..487d055 100644
--- a/Utilities/octree/octree_path.cxx
+++ b/Utilities/octree/octree_path.cxx
@@ -109,14 +109,6 @@ octree_path<T_,R_,P_,O_,OP_,d_>::octree_path( octree_node_pointer root, std::vec
     }
 }
 
-/**\brief Destructor.
-  *
-  */
-template< typename T_, typename R_, typename P_, typename O_, typename OP_, int d_ >
-octree_path<T_,R_,P_,O_,OP_,d_>::~octree_path()
-{
-}
-
 /**\fn template< typename T_, typename R_, typename P_, typename O_, typename OP_, int d_ > \
   *    octree_node_reference octree_path<T_,R_,P_,O_,OP_,d_>::operator * () const
   *\brief Provide access to the node at the current path head.
@@ -135,39 +127,6 @@ octree_path<T_,R_,P_,O_,OP_,d_>::~octree_path()
   * and so forth.
   */
 
-
-/**\brief Assignment operator (for copying paths of mutable nodes).
-  *
-  */
-template< typename T_, typename R_, typename P_, typename O_, typename OP_, int d_ >
-octree_path<T_,R_,P_,O_,OP_,d_>& octree_path<T_,R_,P_,O_,OP_,d_>::operator = ( const path& src )
-{
-  this->_M_root = src._M_root;
-  this->_M_parents = src._M_parents;
-  this->_M_indices = src._M_indices;
-  this->_M_current_node = src._M_current_node;
-  return *this;
-}
-
-/**\fn    template< typename T_, typename R_, typename P_, typename O_, typename OP_, int d_ > \
-  *       self_path& octree_path<T_,R_,P_,O_,OP_,d_>::operator = ( const const_path& src )
-  *\brief Assignment operator (for copying paths of immutable nodes).
-  * Frappy
-  */
-
-/**\brief Assignment operator (for copying paths of immutable nodes).
-  *
-  */
-template< typename T_, typename R_, typename P_, typename O_, typename OP_, int d_ >
-octree_path<T_,R_,P_,O_,OP_,d_>& octree_path<T_,R_,P_,O_,OP_,d_>::operator = ( const const_path& src )
-{
-  this->_M_root = const_cast<octree_node_pointer>( src._M_root );
-  this->_M_parents = src._M_parents;
-  this->_M_indices = src._M_indices;
-  this->_M_current_node = src._M_current_node;
-  return *this;
-}
-
 /**\fn template< typename T_, typename R_, typename P_, typename O_, typename OP_, int d_ > \
   *    bool octree_path<T_,R_,P_,O_,OP_,d_>::operator == ( const path& it )
   *\brief Compare two paths for equality.
diff --git a/Utilities/octree/octree_path.h b/Utilities/octree/octree_path.h
index 0cea77d..bfe2919 100644
--- a/Utilities/octree/octree_path.h
+++ b/Utilities/octree/octree_path.h
@@ -31,16 +31,12 @@ public:
   octree_path( octree_pointer otree );
   octree_path( octree_node_pointer oroot );
   octree_path( octree_node_pointer oroot, std::vector<int>& children );
-  virtual ~octree_path();
 
   octree_node_reference operator * () const { return *_M_current_node; }
   octree_node_pointer operator -> () const { return &(operator*()); }
 
   size_type level() const { return this->_M_parents.size(); }
 
-  self_path& operator = ( const path& it );
-  self_path& operator = ( const const_path& src );
-
   bool operator == ( const path& it ) { return _M_root == it._M_root && _M_current_node == it._M_current_node; }
   bool operator == ( const const_path& it ) { return _M_root == it._M_root && _M_current_node == it._M_current_node; }
 
diff --git a/Views/Infovis/CMakeLists.txt b/Views/Infovis/CMakeLists.txt
index 5a5c3ac..dc6dbff 100644
--- a/Views/Infovis/CMakeLists.txt
+++ b/Views/Infovis/CMakeLists.txt
@@ -1,8 +1,10 @@
 SET(Module_SRCS
   vtkApplyColors.cxx
   vtkApplyIcons.cxx
+  vtkDendrogramItem.cxx
   vtkGraphItem.cxx
   vtkGraphLayoutView.cxx
+  vtkHeatmapItem.cxx
   vtkHierarchicalGraphPipeline.cxx
   vtkHierarchicalGraphView.cxx
   vtkIcicleView.cxx
@@ -17,6 +19,8 @@ SET(Module_SRCS
   vtkRenderedSurfaceRepresentation.cxx
   vtkRenderedTreeAreaRepresentation.cxx
   vtkRenderView.cxx
+  vtkSCurveSpline.cxx
+  vtkTanglegramItem.cxx
   vtkTreeAreaView.cxx
   vtkTreeHeatmapItem.cxx
   vtkTreeMapView.cxx
diff --git a/Views/Infovis/Testing/Cxx/CMakeLists.txt b/Views/Infovis/Testing/Cxx/CMakeLists.txt
index d76ecae..917855b 100644
--- a/Views/Infovis/Testing/Cxx/CMakeLists.txt
+++ b/Views/Infovis/Testing/Cxx/CMakeLists.txt
@@ -1,43 +1,26 @@
-set(MyTests
+vtk_add_test_cxx(
   TestCoincidentGraphLayoutView.cxx
+  TestColumnTree.cxx
   TestConeLayoutStrategy.cxx
-  TestDendrogramOnly.cxx
+  TestDendrogramItem.cxx
   TestGraphLayoutView.cxx
-  TestHeatmapOnly.cxx
+  TestHeatmapCategoryLegend.cxx
+  TestHeatmapItem.cxx
+  TestHeatmapScalarLegend.cxx
   TestHierarchicalGraphView.cxx
   TestIcicleView.cxx
   TestInteractorStyleTreeMapHover.cxx
   TestNetworkViews.cxx
+  TestParallelCoordinatesView.cxx
   TestRenderView.cxx
   TestSpanTreeLayoutStrategy.cxx
+  TestTanglegramItem.cxx
   TestTreeHeatmapAutoCollapse.cxx
   TestTreeHeatmapItem.cxx
   TestTreeMapView.cxx
   TestTreeRingView.cxx
-  ${QtTests}
-)
+  )
 
-if( VTK_DATA_ROOT)
-  set(MyTests ${MyTests} TestIconGlyphFilter.cxx)
-endif()
+vtk_add_test_cxx(TestIconGlyphFilter.cxx)
 
-# Use the testing object factory, to reduce boilerplate code in tests.
-include(vtkTestingObjectFactory)
-vtk_module_test_executable(${vtk-module}CxxTests ${Tests})
-
-set(TestsToRun ${Tests})
-list(REMOVE_ITEM TestsToRun CxxTests.cxx)
-
-# Add all the executables
-foreach(test ${TestsToRun})
-  get_filename_component(TName ${test} NAME_WE)
-  if(VTK_DATA_ROOT)
-    add_test(NAME ${vtk-module}Cxx-${TName}
-            COMMAND ${vtk-module}CxxTests ${TName}
-                                          -D ${VTK_DATA_ROOT}
-                                          -T ${VTK_TEST_OUTPUT_DIR}
-                                          -V Baseline/Views/${TName}.png)
-  else()
-    add_test(NAME ${vtk-module}Cxx-${TName} COMMAND ${vtk-module}CxxTests ${TName})
-  endif()
-endforeach()
+vtk_test_cxx_executable(${vtk-module}CxxTests RENDERING_FACTORY)
diff --git a/Views/Infovis/Testing/Cxx/TestColumnTree.cxx b/Views/Infovis/Testing/Cxx/TestColumnTree.cxx
new file mode 100644
index 0000000..9121a58
--- /dev/null
+++ b/Views/Infovis/Testing/Cxx/TestColumnTree.cxx
@@ -0,0 +1,164 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestColumnTree.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkTreeHeatmapItem.h"
+#include "vtkDataSetAttributes.h"
+#include "vtkDoubleArray.h"
+#include "vtkMutableDirectedGraph.h"
+#include "vtkNew.h"
+#include "vtkStringArray.h"
+#include "vtkTable.h"
+#include "vtkTree.h"
+
+#include "vtkRenderer.h"
+#include "vtkRenderWindow.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkContextInteractorStyle.h"
+#include "vtkContextMouseEvent.h"
+#include "vtkContextScene.h"
+#include "vtkContextTransform.h"
+#include "vtkContextView.h"
+#include "vtkNew.h"
+
+#include "vtkRegressionTestImage.h"
+
+//----------------------------------------------------------------------------
+int TestColumnTree(int argc, char* argv[])
+{
+  // Construct a tree
+  vtkNew<vtkMutableDirectedGraph> graph;
+  vtkIdType root = graph->AddVertex();
+  vtkIdType internalOne = graph->AddChild(root);
+  vtkIdType internalTwo = graph->AddChild(internalOne);
+  vtkIdType a = graph->AddChild(internalTwo);
+  vtkIdType b = graph->AddChild(internalTwo);
+  vtkIdType c = graph->AddChild(internalOne);
+
+  vtkNew<vtkDoubleArray> weights;
+  weights->SetNumberOfTuples(5);
+  weights->SetValue(graph->GetEdgeId(root, internalOne), 1.0f);
+  weights->SetValue(graph->GetEdgeId(internalOne, internalTwo), 2.0f);
+  weights->SetValue(graph->GetEdgeId(internalTwo, a), 1.0f);
+  weights->SetValue(graph->GetEdgeId(internalTwo, b), 1.0f);
+  weights->SetValue(graph->GetEdgeId(internalOne, c), 3.0f);
+
+  weights->SetName("weight");
+  graph->GetEdgeData()->AddArray(weights.GetPointer());
+
+  vtkNew<vtkStringArray> names;
+  names->SetNumberOfTuples(6);
+  names->SetValue(a, "a");
+  names->SetValue(b, "b");
+  names->SetValue(c, "c");
+
+  names->SetName("node name");
+  graph->GetVertexData()->AddArray(names.GetPointer());
+
+  vtkNew<vtkDoubleArray> nodeWeights;
+  nodeWeights->SetNumberOfTuples(6);
+  nodeWeights->SetValue(root, 0.0f);
+  nodeWeights->SetValue(internalOne, 1.0f);
+  nodeWeights->SetValue(internalTwo, 3.0f);
+  nodeWeights->SetValue(a, 4.0f);
+  nodeWeights->SetValue(b, 4.0f);
+  nodeWeights->SetValue(c, 4.0f);
+  nodeWeights->SetName("node weight");
+  graph->GetVertexData()->AddArray(nodeWeights.GetPointer());
+
+  vtkNew<vtkTree> tree;
+  tree->ShallowCopy(graph.GetPointer());
+
+  vtkNew<vtkTree> tree2;
+  tree2->DeepCopy(tree.GetPointer());
+
+  // Construct a table
+  vtkNew<vtkTable> table;
+  vtkNew<vtkStringArray> tableNames;
+  vtkNew<vtkDoubleArray> m1;
+  vtkNew<vtkDoubleArray> m2;
+  vtkNew<vtkDoubleArray> m3;
+
+  tableNames->SetNumberOfTuples(3);
+  tableNames->SetValue(0, "c");
+  tableNames->SetValue(1, "b");
+  tableNames->SetValue(2, "a");
+  tableNames->SetName("name");
+
+  m1->SetNumberOfTuples(3);
+  m2->SetNumberOfTuples(3);
+  m3->SetNumberOfTuples(3);
+
+  m1->SetName("a");
+  m2->SetName("b");
+  m3->SetName("c");
+
+  m1->SetValue(0, 1.0f);
+  m1->SetValue(1, 3.0f);
+  m1->SetValue(2, 1.0f);
+
+  m2->SetValue(0, 2.0f);
+  m2->SetValue(1, 2.0f);
+  m2->SetValue(2, 2.0f);
+
+  m3->SetValue(0, 3.0f);
+  m3->SetValue(1, 1.0f);
+  m3->SetValue(2, 3.0f);
+
+  table->AddColumn(tableNames.GetPointer());
+  table->AddColumn(m1.GetPointer());
+  table->AddColumn(m2.GetPointer());
+  table->AddColumn(m3.GetPointer());
+
+  vtkNew<vtkTreeHeatmapItem> treeItem;
+  treeItem->SetTree(tree.GetPointer());
+  treeItem->SetColumnTree(tree2.GetPointer());
+  treeItem->SetTable(table.GetPointer());
+
+  vtkNew<vtkContextTransform> trans;
+  trans->SetInteractive(true);
+  trans->AddItem(treeItem.GetPointer());
+
+  // center the item within the render window
+  trans->Translate(80, 25);
+  trans->Scale(1.5, 1.5);
+
+  vtkSmartPointer<vtkContextView> view = vtkSmartPointer<vtkContextView>::New();
+  view->GetRenderWindow()->SetSize(400, 200);
+  view->GetRenderer()->SetBackground(1.0, 1.0, 1.0);
+  view->GetScene()->AddItem(trans.GetPointer());
+
+  //Finally render the scene and compare the image to a reference image
+  view->GetRenderWindow()->SetMultiSamples(0);
+  view->GetRenderWindow()->Render();
+
+  // collapse a column subtree
+  vtkContextMouseEvent mouseEvent;
+  mouseEvent.SetInteractor(view->GetInteractor());
+  vtkVector2f pos;
+  mouseEvent.SetButton(vtkContextMouseEvent::LEFT_BUTTON);
+  pos.Set(62, 81);
+  mouseEvent.SetPos(pos);
+  treeItem->MouseDoubleClickEvent(mouseEvent);
+  view->GetRenderWindow()->Render();
+
+  int retVal = vtkRegressionTestImage(view->GetRenderWindow());
+  if (retVal == vtkRegressionTester::DO_INTERACTOR)
+    {
+    view->GetRenderWindow()->Render();
+    view->GetInteractor()->Start();
+    retVal = vtkRegressionTester::PASSED;
+    }
+  return !retVal;
+}
diff --git a/Views/Infovis/Testing/Cxx/TestDendrogramItem.cxx b/Views/Infovis/Testing/Cxx/TestDendrogramItem.cxx
new file mode 100644
index 0000000..f614733
--- /dev/null
+++ b/Views/Infovis/Testing/Cxx/TestDendrogramItem.cxx
@@ -0,0 +1,117 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestDendrogramItem.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkDendrogramItem.h"
+#include "vtkDataSetAttributes.h"
+#include "vtkDoubleArray.h"
+#include "vtkMutableDirectedGraph.h"
+#include "vtkNew.h"
+#include "vtkStringArray.h"
+#include "vtkTree.h"
+
+#include "vtkRenderer.h"
+#include "vtkRenderWindow.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkContextInteractorStyle.h"
+#include "vtkContextActor.h"
+#include "vtkContextScene.h"
+#include "vtkContextTransform.h"
+#include "vtkNew.h"
+
+#include "vtkRegressionTestImage.h"
+
+//----------------------------------------------------------------------------
+int TestDendrogramItem(int argc, char* argv[])
+{
+  vtkNew<vtkMutableDirectedGraph> graph;
+  vtkIdType root = graph->AddVertex();
+  vtkIdType internalOne = graph->AddChild(root);
+  vtkIdType internalTwo = graph->AddChild(internalOne);
+  vtkIdType a = graph->AddChild(internalTwo);
+  vtkIdType b = graph->AddChild(internalTwo);
+  vtkIdType c = graph->AddChild(internalOne);
+
+  vtkNew<vtkDoubleArray> weights;
+  weights->SetNumberOfTuples(5);
+  weights->SetValue(graph->GetEdgeId(root, internalOne), 1.0f);
+  weights->SetValue(graph->GetEdgeId(internalOne, internalTwo), 2.0f);
+  weights->SetValue(graph->GetEdgeId(internalTwo, a), 1.0f);
+  weights->SetValue(graph->GetEdgeId(internalTwo, b), 1.0f);
+  weights->SetValue(graph->GetEdgeId(internalOne, c), 3.0f);
+
+  weights->SetName("weight");
+  graph->GetEdgeData()->AddArray(weights.GetPointer());
+
+  vtkNew<vtkStringArray> names;
+  names->SetNumberOfTuples(6);
+  names->SetValue(a, "a");
+  names->SetValue(b, "b");
+  names->SetValue(c, "c");
+
+  names->SetName("node name");
+  graph->GetVertexData()->AddArray(names.GetPointer());
+
+  vtkNew<vtkDoubleArray> nodeWeights;
+  nodeWeights->SetNumberOfTuples(6);
+  nodeWeights->SetValue(root, 0.0f);
+  nodeWeights->SetValue(internalOne, 1.0f);
+  nodeWeights->SetValue(internalTwo, 3.0f);
+  nodeWeights->SetValue(a, 4.0f);
+  nodeWeights->SetValue(b, 4.0f);
+  nodeWeights->SetValue(c, 4.0f);
+  nodeWeights->SetName("node weight");
+  graph->GetVertexData()->AddArray(nodeWeights.GetPointer());
+
+  vtkNew<vtkContextActor> actor;
+
+  vtkNew<vtkTree> tree;
+  tree->ShallowCopy(graph.GetPointer());
+
+  vtkNew<vtkDendrogramItem> dendrogram;
+  dendrogram->SetTree(tree.GetPointer());
+  dendrogram->SetPosition(40, 15);
+
+  vtkNew<vtkContextTransform> trans;
+  trans->SetInteractive(true);
+  trans->AddItem(dendrogram.GetPointer());
+  trans->Scale(3, 3);
+  actor->GetScene()->AddItem(trans.GetPointer());
+
+  vtkNew<vtkRenderer> renderer;
+  renderer->SetBackground(1.0, 1.0, 1.0);
+
+  vtkNew<vtkRenderWindow> renderWindow;
+  renderWindow->SetSize(400, 200);
+  renderWindow->AddRenderer(renderer.GetPointer());
+  renderer->AddActor(actor.GetPointer());
+
+  vtkNew<vtkContextInteractorStyle> interactorStyle;
+  interactorStyle->SetScene(actor->GetScene());
+
+  vtkNew<vtkRenderWindowInteractor> interactor;
+  interactor->SetInteractorStyle(interactorStyle.GetPointer());
+  interactor->SetRenderWindow(renderWindow.GetPointer());
+  renderWindow->SetMultiSamples(0);
+  renderWindow->Render();
+
+  int retVal = vtkRegressionTestImage(renderWindow.GetPointer());
+  if (retVal == vtkRegressionTester::DO_INTERACTOR)
+    {
+    renderWindow->Render();
+    interactor->Start();
+    retVal = vtkRegressionTester::PASSED;
+    }
+  return !retVal;
+}
diff --git a/Views/Infovis/Testing/Cxx/TestDendrogramOnly.cxx b/Views/Infovis/Testing/Cxx/TestDendrogramOnly.cxx
deleted file mode 100644
index 9e3d59a..0000000
--- a/Views/Infovis/Testing/Cxx/TestDendrogramOnly.cxx
+++ /dev/null
@@ -1,115 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    TestDendrogramOnly.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-
-#include "vtkTreeHeatmapItem.h"
-#include "vtkDataSetAttributes.h"
-#include "vtkDoubleArray.h"
-#include "vtkMutableDirectedGraph.h"
-#include "vtkNew.h"
-#include "vtkStringArray.h"
-#include "vtkTree.h"
-
-#include "vtkRenderer.h"
-#include "vtkRenderWindow.h"
-#include "vtkRenderWindowInteractor.h"
-#include "vtkContextInteractorStyle.h"
-#include "vtkContextActor.h"
-#include "vtkContextScene.h"
-#include "vtkContextTransform.h"
-#include "vtkNew.h"
-
-#include "vtkRegressionTestImage.h"
-
-//----------------------------------------------------------------------------
-int TestDendrogramOnly(int argc, char* argv[])
-{
-  vtkNew<vtkMutableDirectedGraph> graph;
-  vtkIdType root = graph->AddVertex();
-  vtkIdType internalOne = graph->AddChild(root);
-  vtkIdType internalTwo = graph->AddChild(internalOne);
-  vtkIdType a = graph->AddChild(internalTwo);
-  vtkIdType b = graph->AddChild(internalTwo);
-  vtkIdType c = graph->AddChild(internalOne);
-
-  vtkNew<vtkDoubleArray> weights;
-  weights->SetNumberOfTuples(5);
-  weights->SetValue(graph->GetEdgeId(root, internalOne), 1.0f);
-  weights->SetValue(graph->GetEdgeId(internalOne, internalTwo), 2.0f);
-  weights->SetValue(graph->GetEdgeId(internalTwo, a), 1.0f);
-  weights->SetValue(graph->GetEdgeId(internalTwo, b), 1.0f);
-  weights->SetValue(graph->GetEdgeId(internalOne, c), 3.0f);
-
-  weights->SetName("weight");
-  graph->GetEdgeData()->AddArray(weights.GetPointer());
-
-  vtkNew<vtkStringArray> names;
-  names->SetNumberOfTuples(6);
-  names->SetValue(a, "a");
-  names->SetValue(b, "b");
-  names->SetValue(c, "c");
-
-  names->SetName("node name");
-  graph->GetVertexData()->AddArray(names.GetPointer());
-
-  vtkNew<vtkDoubleArray> nodeWeights;
-  nodeWeights->SetNumberOfTuples(6);
-  nodeWeights->SetValue(root, 0.0f);
-  nodeWeights->SetValue(internalOne, 1.0f);
-  nodeWeights->SetValue(internalTwo, 3.0f);
-  nodeWeights->SetValue(a, 4.0f);
-  nodeWeights->SetValue(b, 4.0f);
-  nodeWeights->SetValue(c, 4.0f);
-  nodeWeights->SetName("node weight");
-  graph->GetVertexData()->AddArray(nodeWeights.GetPointer());
-
-  vtkNew<vtkContextActor> actor;
-
-  vtkNew<vtkTree> tree;
-  tree->ShallowCopy(graph.GetPointer());
-
-  vtkNew<vtkTreeHeatmapItem> treeItem;
-  treeItem->SetTree(tree.GetPointer());
-
-  vtkNew<vtkContextTransform> trans;
-  trans->SetInteractive(true);
-  trans->AddItem(treeItem.GetPointer());
-  actor->GetScene()->AddItem(trans.GetPointer());
-
-  vtkNew<vtkRenderer> renderer;
-  renderer->SetBackground(1.0, 1.0, 1.0);
-
-  vtkNew<vtkRenderWindow> renderWindow;
-  renderWindow->SetSize(400, 200);
-  renderWindow->AddRenderer(renderer.GetPointer());
-  renderer->AddActor(actor.GetPointer());
-
-  vtkNew<vtkContextInteractorStyle> interactorStyle;
-  interactorStyle->SetScene(actor->GetScene());
-
-  vtkNew<vtkRenderWindowInteractor> interactor;
-  interactor->SetInteractorStyle(interactorStyle.GetPointer());
-  interactor->SetRenderWindow(renderWindow.GetPointer());
-  renderWindow->SetMultiSamples(0);
-  renderWindow->Render();
-
-  int retVal = vtkRegressionTestImage(renderWindow.GetPointer());
-  if (retVal == vtkRegressionTester::DO_INTERACTOR)
-    {
-    renderWindow->Render();
-    interactor->Start();
-    retVal = vtkRegressionTester::PASSED;
-    }
-  return !retVal;
-}
diff --git a/Views/Infovis/Testing/Cxx/TestHeatmapCategoryLegend.cxx b/Views/Infovis/Testing/Cxx/TestHeatmapCategoryLegend.cxx
new file mode 100644
index 0000000..21d8fcb
--- /dev/null
+++ b/Views/Infovis/Testing/Cxx/TestHeatmapCategoryLegend.cxx
@@ -0,0 +1,87 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestHeatmapCategoryLegend.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkHeatmapItem.h"
+#include "vtkNew.h"
+#include "vtkStringArray.h"
+#include "vtkTable.h"
+
+#include "vtkContextMouseEvent.h"
+#include "vtkContextScene.h"
+#include "vtkContextTransform.h"
+#include "vtkContextView.h"
+#include "vtkRenderer.h"
+#include "vtkRenderWindow.h"
+#include "vtkRenderWindowInteractor.h"
+
+#include "vtkRegressionTestImage.h"
+
+//----------------------------------------------------------------------------
+int TestHeatmapCategoryLegend(int argc, char* argv[])
+{
+  vtkNew<vtkTable> table;
+  vtkNew<vtkStringArray> tableNames;
+  vtkNew<vtkStringArray> column;
+
+  tableNames->SetNumberOfTuples(4);
+  tableNames->SetValue(0, "c");
+  tableNames->SetValue(1, "b");
+  tableNames->SetValue(2, "a");
+  tableNames->SetValue(3, "a");
+  tableNames->SetName("names");
+
+  column->SetNumberOfTuples(4);
+  column->SetName("values");
+  column->SetValue(0, "c");
+  column->SetValue(1, "b");
+  column->SetValue(2, "a");
+  column->SetValue(3, "a");
+
+  table->AddColumn(tableNames.GetPointer());
+  table->AddColumn(column.GetPointer());
+
+  vtkNew<vtkHeatmapItem> heatmap;
+  heatmap->SetTable(table.GetPointer());
+
+  vtkNew<vtkContextTransform> trans;
+  trans->SetInteractive(true);
+  trans->AddItem(heatmap.GetPointer());
+  trans->Translate(125, 125);
+
+  vtkNew<vtkContextView> contextView;
+  contextView->GetScene()->AddItem(trans.GetPointer());
+
+  contextView->GetRenderWindow()->SetMultiSamples(0);
+  contextView->GetRenderWindow()->Render();
+
+  // double click to display the category legend
+  vtkContextMouseEvent mouseEvent;
+  mouseEvent.SetInteractor(contextView->GetInteractor());
+  vtkVector2f pos;
+  mouseEvent.SetButton(vtkContextMouseEvent::LEFT_BUTTON);
+  pos.Set(16, 38);
+  mouseEvent.SetPos(pos);
+  heatmap->MouseDoubleClickEvent(mouseEvent);
+  contextView->GetRenderWindow()->Render();
+
+  int retVal = vtkRegressionTestImage(contextView->GetRenderWindow());
+  if (retVal == vtkRegressionTester::DO_INTERACTOR)
+    {
+    contextView->GetRenderWindow()->Render();
+    contextView->GetInteractor()->Start();
+    retVal = vtkRegressionTester::PASSED;
+    }
+  return !retVal;
+}
diff --git a/Views/Infovis/Testing/Cxx/TestHeatmapItem.cxx b/Views/Infovis/Testing/Cxx/TestHeatmapItem.cxx
new file mode 100644
index 0000000..cba66d5
--- /dev/null
+++ b/Views/Infovis/Testing/Cxx/TestHeatmapItem.cxx
@@ -0,0 +1,119 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestHeatmapItem.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkHeatmapItem.h"
+#include "vtkDoubleArray.h"
+#include "vtkNew.h"
+#include "vtkStringArray.h"
+#include "vtkTable.h"
+
+#include "vtkRenderer.h"
+#include "vtkRenderWindow.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkContextInteractorStyle.h"
+#include "vtkContextActor.h"
+#include "vtkContextScene.h"
+#include "vtkContextTransform.h"
+#include "vtkNew.h"
+
+#include "vtkRegressionTestImage.h"
+
+//----------------------------------------------------------------------------
+int TestHeatmapItem(int argc, char* argv[])
+{
+  vtkNew<vtkTable> table;
+  vtkNew<vtkStringArray> tableNames;
+  vtkNew<vtkDoubleArray> m1;
+  vtkNew<vtkDoubleArray> m2;
+  vtkNew<vtkDoubleArray> m3;
+  vtkNew<vtkStringArray> m4;
+
+  tableNames->SetNumberOfTuples(3);
+  tableNames->SetValue(0, "c");
+  tableNames->SetValue(1, "b");
+  tableNames->SetValue(2, "a");
+  tableNames->SetName("name");
+
+  m1->SetNumberOfTuples(3);
+  m2->SetNumberOfTuples(3);
+  m3->SetNumberOfTuples(3);
+  m4->SetNumberOfTuples(3);
+
+  m1->SetName("m1");
+  m2->SetName("m2");
+  m3->SetName("m3");
+  m4->SetName("m4");
+
+  m1->SetValue(0, 1.0f);
+  m1->SetValue(1, 3.0f);
+  m1->SetValue(2, 1.0f);
+
+  m2->SetValue(0, 2.0f);
+  m2->SetValue(1, 2.0f);
+  m2->SetValue(2, 2.0f);
+
+  m3->SetValue(0, 3.0f);
+  m3->SetValue(1, 1.0f);
+  m3->SetValue(2, 3.0f);
+
+  m4->SetValue(0, "a");
+  m4->SetValue(1, "b");
+  m4->SetValue(2, "c");
+
+  table->AddColumn(tableNames.GetPointer());
+  table->AddColumn(m1.GetPointer());
+  table->AddColumn(m2.GetPointer());
+  table->AddColumn(m3.GetPointer());
+  table->AddColumn(m4.GetPointer());
+
+  vtkNew<vtkContextActor> actor;
+
+  vtkNew<vtkHeatmapItem> heatmap;
+  heatmap->SetTable(table.GetPointer());
+  heatmap->SetPosition(20, 5);
+
+  vtkNew<vtkContextTransform> trans;
+  trans->SetInteractive(true);
+  trans->AddItem(heatmap.GetPointer());
+  trans->Scale(2, 2);
+  actor->GetScene()->AddItem(trans.GetPointer());
+
+  vtkNew<vtkRenderer> renderer;
+  renderer->SetBackground(1.0, 1.0, 1.0);
+
+  vtkNew<vtkRenderWindow> renderWindow;
+  renderWindow->SetSize(400, 200);
+  renderWindow->AddRenderer(renderer.GetPointer());
+  renderer->AddActor(actor.GetPointer());
+  actor->GetScene()->SetRenderer(renderer.GetPointer());
+
+  vtkNew<vtkContextInteractorStyle> interactorStyle;
+  interactorStyle->SetScene(actor->GetScene());
+
+  vtkNew<vtkRenderWindowInteractor> interactor;
+  interactor->SetInteractorStyle(interactorStyle.GetPointer());
+  interactor->SetRenderWindow(renderWindow.GetPointer());
+  renderWindow->SetMultiSamples(0);
+  renderWindow->Render();
+
+  int retVal = vtkRegressionTestImage(renderWindow.GetPointer());
+  if (retVal == vtkRegressionTester::DO_INTERACTOR)
+    {
+    renderWindow->Render();
+    interactor->Start();
+    retVal = vtkRegressionTester::PASSED;
+    }
+  return !retVal;
+}
diff --git a/Views/Infovis/Testing/Cxx/TestHeatmapOnly.cxx b/Views/Infovis/Testing/Cxx/TestHeatmapOnly.cxx
deleted file mode 100644
index 353dd1f..0000000
--- a/Views/Infovis/Testing/Cxx/TestHeatmapOnly.cxx
+++ /dev/null
@@ -1,117 +0,0 @@
-/*=========================================================================
-
-  Program:   Visualization Toolkit
-  Module:    TestHeatmapOnly.cxx
-
-  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
-  All rights reserved.
-  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
-
-     This software is distributed WITHOUT ANY WARRANTY; without even
-     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
-     PURPOSE.  See the above copyright notice for more information.
-
-=========================================================================*/
-
-#include "vtkTreeHeatmapItem.h"
-#include "vtkDoubleArray.h"
-#include "vtkNew.h"
-#include "vtkStringArray.h"
-#include "vtkTable.h"
-
-#include "vtkRenderer.h"
-#include "vtkRenderWindow.h"
-#include "vtkRenderWindowInteractor.h"
-#include "vtkContextInteractorStyle.h"
-#include "vtkContextActor.h"
-#include "vtkContextScene.h"
-#include "vtkContextTransform.h"
-#include "vtkNew.h"
-
-#include "vtkRegressionTestImage.h"
-
-//----------------------------------------------------------------------------
-int TestHeatmapOnly(int argc, char* argv[])
-{
-  vtkNew<vtkTable> table;
-  vtkNew<vtkStringArray> tableNames;
-  vtkNew<vtkDoubleArray> m1;
-  vtkNew<vtkDoubleArray> m2;
-  vtkNew<vtkDoubleArray> m3;
-  vtkNew<vtkStringArray> m4;
-
-  tableNames->SetNumberOfTuples(3);
-  tableNames->SetValue(0, "c");
-  tableNames->SetValue(1, "b");
-  tableNames->SetValue(2, "a");
-  tableNames->SetName("name");
-
-  m1->SetNumberOfTuples(3);
-  m2->SetNumberOfTuples(3);
-  m3->SetNumberOfTuples(3);
-  m4->SetNumberOfTuples(3);
-
-  m1->SetName("m1");
-  m2->SetName("m2");
-  m3->SetName("m3");
-  m4->SetName("m4");
-
-  m1->SetValue(0, 1.0f);
-  m1->SetValue(1, 3.0f);
-  m1->SetValue(2, 1.0f);
-
-  m2->SetValue(0, 2.0f);
-  m2->SetValue(1, 2.0f);
-  m2->SetValue(2, 2.0f);
-
-  m3->SetValue(0, 3.0f);
-  m3->SetValue(1, 1.0f);
-  m3->SetValue(2, 3.0f);
-
-  m4->SetValue(0, "a");
-  m4->SetValue(1, "b");
-  m4->SetValue(2, "c");
-
-  table->AddColumn(tableNames.GetPointer());
-  table->AddColumn(m1.GetPointer());
-  table->AddColumn(m2.GetPointer());
-  table->AddColumn(m3.GetPointer());
-  table->AddColumn(m4.GetPointer());
-
-  vtkNew<vtkContextActor> actor;
-
-  vtkNew<vtkTreeHeatmapItem> treeItem;
-  treeItem->SetTable(table.GetPointer());
-
-  vtkNew<vtkContextTransform> trans;
-  trans->SetInteractive(true);
-  trans->AddItem(treeItem.GetPointer());
-  actor->GetScene()->AddItem(trans.GetPointer());
-
-  vtkNew<vtkRenderer> renderer;
-  renderer->SetBackground(1.0, 1.0, 1.0);
-
-  vtkNew<vtkRenderWindow> renderWindow;
-  renderWindow->SetSize(400, 200);
-  renderWindow->AddRenderer(renderer.GetPointer());
-  renderer->AddActor(actor.GetPointer());
-  actor->GetScene()->SetRenderer(renderer.GetPointer());
-
-  vtkNew<vtkContextInteractorStyle> interactorStyle;
-  interactorStyle->SetScene(actor->GetScene());
-
-  vtkNew<vtkRenderWindowInteractor> interactor;
-  interactor->SetInteractorStyle(interactorStyle.GetPointer());
-  interactor->SetRenderWindow(renderWindow.GetPointer());
-  renderWindow->SetMultiSamples(0);
-  renderWindow->Render();
-
-  int retVal = vtkRegressionTestImage(renderWindow.GetPointer());
-  if (retVal == vtkRegressionTester::DO_INTERACTOR)
-    {
-    renderWindow->Render();
-    interactor->Start();
-    retVal = vtkRegressionTester::PASSED;
-    }
-  return !retVal;
-}
diff --git a/Views/Infovis/Testing/Cxx/TestHeatmapScalarLegend.cxx b/Views/Infovis/Testing/Cxx/TestHeatmapScalarLegend.cxx
new file mode 100644
index 0000000..decd23d
--- /dev/null
+++ b/Views/Infovis/Testing/Cxx/TestHeatmapScalarLegend.cxx
@@ -0,0 +1,86 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestHeatmapScalarLegend.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkHeatmapItem.h"
+#include "vtkNew.h"
+#include "vtkIntArray.h"
+#include "vtkStringArray.h"
+#include "vtkTable.h"
+
+#include "vtkContextMouseEvent.h"
+#include "vtkContextScene.h"
+#include "vtkContextTransform.h"
+#include "vtkContextView.h"
+#include "vtkRenderer.h"
+#include "vtkRenderWindow.h"
+#include "vtkRenderWindowInteractor.h"
+
+#include "vtkRegressionTestImage.h"
+
+//----------------------------------------------------------------------------
+int TestHeatmapScalarLegend(int argc, char* argv[])
+{
+  vtkNew<vtkTable> table;
+  vtkNew<vtkStringArray> tableNames;
+  vtkNew<vtkIntArray> column;
+
+  tableNames->SetNumberOfTuples(3);
+  tableNames->SetValue(0, "3");
+  tableNames->SetValue(1, "2");
+  tableNames->SetValue(2, "1");
+  tableNames->SetName("names");
+
+  column->SetNumberOfTuples(3);
+  column->SetName("values");
+  column->SetValue(0, 3);
+  column->SetValue(1, 2);
+  column->SetValue(2, 1);
+
+  table->AddColumn(tableNames.GetPointer());
+  table->AddColumn(column.GetPointer());
+
+  vtkNew<vtkHeatmapItem> heatmap;
+  heatmap->SetTable(table.GetPointer());
+
+  vtkNew<vtkContextTransform> trans;
+  trans->SetInteractive(true);
+  trans->AddItem(heatmap.GetPointer());
+  trans->Translate(125, 125);
+
+  vtkNew<vtkContextView> contextView;
+  contextView->GetScene()->AddItem(trans.GetPointer());
+
+  contextView->GetRenderWindow()->SetMultiSamples(0);
+  contextView->GetRenderWindow()->Render();
+
+  // double click to display the color legend
+  vtkContextMouseEvent mouseEvent;
+  mouseEvent.SetInteractor(contextView->GetInteractor());
+  vtkVector2f pos;
+  mouseEvent.SetButton(vtkContextMouseEvent::LEFT_BUTTON);
+  pos.Set(16, 38);
+  mouseEvent.SetPos(pos);
+  heatmap->MouseDoubleClickEvent(mouseEvent);
+  contextView->GetRenderWindow()->Render();
+
+  int retVal = vtkRegressionTestImage(contextView->GetRenderWindow());
+  if (retVal == vtkRegressionTester::DO_INTERACTOR)
+    {
+    contextView->GetRenderWindow()->Render();
+    contextView->GetInteractor()->Start();
+    retVal = vtkRegressionTester::PASSED;
+    }
+  return !retVal;
+}
diff --git a/Views/Infovis/Testing/Cxx/TestParallelCoordinatesView.cxx b/Views/Infovis/Testing/Cxx/TestParallelCoordinatesView.cxx
new file mode 100644
index 0000000..2e7321f
--- /dev/null
+++ b/Views/Infovis/Testing/Cxx/TestParallelCoordinatesView.cxx
@@ -0,0 +1,120 @@
+#include <vtkVersion.h>
+
+#include <vtkSmartPointer.h>
+#include <vtkPolyData.h>
+#include <vtkPointData.h>
+#include <vtkFloatArray.h>
+#include <vtkParallelCoordinatesView.h>
+#include <vtkParallelCoordinatesRepresentation.h>
+#include <vtkRenderWindow.h>
+#include <vtkRenderWindowInteractor.h>
+
+int TestParallelCoordinatesView(int, char*[])
+{
+  int curves = 1;
+
+  vtkSmartPointer<vtkFloatArray> array1 =
+    vtkSmartPointer<vtkFloatArray>::New();
+  array1->SetName("Array1");
+  array1->SetNumberOfComponents(1);
+  array1->InsertNextValue(0);
+  array1->InsertNextValue(1);
+  array1->InsertNextValue(2);
+  array1->InsertNextValue(3);
+  array1->InsertNextValue(4);
+
+  vtkSmartPointer<vtkFloatArray> array2 =
+    vtkSmartPointer<vtkFloatArray>::New();
+  array2->SetName("Array2");
+  array2->SetNumberOfComponents(1);
+  array2->InsertNextValue(-0);
+  array2->InsertNextValue(-1);
+  array2->InsertNextValue(-2);
+  array2->InsertNextValue(-3);
+  array2->InsertNextValue(-4);
+
+  vtkSmartPointer<vtkFloatArray> array3 =
+    vtkSmartPointer<vtkFloatArray>::New();
+  array3->SetName("Array3");
+  array3->SetNumberOfComponents(1);
+  array3->InsertNextValue(0);
+  array3->InsertNextValue(1);
+  array3->InsertNextValue(4);
+  array3->InsertNextValue(9);
+  array3->InsertNextValue(16);
+
+  vtkSmartPointer<vtkFloatArray> array4 =
+    vtkSmartPointer<vtkFloatArray>::New();
+  array4->SetName("Array4");
+  array4->SetNumberOfComponents(1);
+  array4->InsertNextValue(0);
+  array4->InsertNextValue(2);
+  array4->InsertNextValue(4);
+  array4->InsertNextValue(6);
+  array4->InsertNextValue(8);
+
+  vtkSmartPointer<vtkFloatArray> array5 =
+    vtkSmartPointer<vtkFloatArray>::New();
+  array5->SetName("Array5");
+  array5->SetNumberOfComponents(1);
+  array5->InsertNextValue(0);
+  array5->InsertNextValue(1);
+  array5->InsertNextValue(0.5);
+  array5->InsertNextValue(0.33);
+  array5->InsertNextValue(0.25);
+
+  vtkSmartPointer<vtkFloatArray> array6 =
+    vtkSmartPointer<vtkFloatArray>::New();
+  array6->SetName("Array6");
+  array6->SetNumberOfComponents(1);
+  array6->InsertNextValue(3);
+  array6->InsertNextValue(6);
+  array6->InsertNextValue(2);
+  array6->InsertNextValue(4);
+  array6->InsertNextValue(9);
+
+  vtkSmartPointer<vtkPolyData> polydata =
+    vtkSmartPointer<vtkPolyData>::New();
+  polydata->GetPointData()->AddArray(array1);
+  polydata->GetPointData()->AddArray(array2);
+  polydata->GetPointData()->AddArray(array3);
+  polydata->GetPointData()->AddArray(array4);
+  polydata->GetPointData()->AddArray(array5);
+  polydata->GetPointData()->AddArray(array6);
+
+  // Set up the parallel coordinates representation to be used in the View
+  vtkSmartPointer<vtkParallelCoordinatesRepresentation> rep =
+    vtkSmartPointer<vtkParallelCoordinatesRepresentation>::New();
+  rep->SetInputData(polydata);
+
+  // List all of the attribute arrays you want plotted in parallel coordinates
+  rep->SetInputArrayToProcess(0,0,0,0,"Array1");
+  rep->SetInputArrayToProcess(1,0,0,0,"Array2");
+  rep->SetInputArrayToProcess(2,0,0,0,"Array3");
+  rep->SetInputArrayToProcess(3,0,0,0,"Array4");
+  rep->SetInputArrayToProcess(4,0,0,0,"Array5");
+  rep->SetInputArrayToProcess(5,0,0,0,"Array6");
+
+  rep->SetUseCurves(curves);
+  rep->SetLineOpacity(0.5);
+
+  // Set up the Parallel Coordinates View and hook in the Representation
+  vtkSmartPointer<vtkParallelCoordinatesView> view =
+    vtkSmartPointer<vtkParallelCoordinatesView>::New();
+  view->SetRepresentation(rep);
+  view->SetInspectMode(1);
+
+  // Brush Mode determines the type of interaction you perform to select data
+  view->SetBrushModeToLasso();
+  view->SetBrushOperatorToReplace();
+
+  // Set up render window
+  view->GetRenderWindow()->SetSize(600,300);
+  view->ResetCamera();
+  view->Render();
+
+  // Start interaction event loop
+  view->GetInteractor()->Start();
+
+  return EXIT_SUCCESS;
+}
diff --git a/Views/Infovis/Testing/Cxx/TestTanglegramItem.cxx b/Views/Infovis/Testing/Cxx/TestTanglegramItem.cxx
new file mode 100644
index 0000000..a0e7f4a
--- /dev/null
+++ b/Views/Infovis/Testing/Cxx/TestTanglegramItem.cxx
@@ -0,0 +1,188 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestTanglegramItem.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+
+#include "vtkTanglegramItem.h"
+#include "vtkDataSetAttributes.h"
+#include "vtkDoubleArray.h"
+#include "vtkMutableDirectedGraph.h"
+#include "vtkNew.h"
+#include "vtkStringArray.h"
+#include "vtkTable.h"
+#include "vtkTree.h"
+
+#include "vtkRenderer.h"
+#include "vtkRenderWindow.h"
+#include "vtkRenderWindowInteractor.h"
+#include "vtkContextInteractorStyle.h"
+#include "vtkContextActor.h"
+#include "vtkContextMouseEvent.h"
+#include "vtkContextScene.h"
+#include "vtkContextTransform.h"
+#include "vtkNew.h"
+
+#include "vtkRegressionTestImage.h"
+
+//----------------------------------------------------------------------------
+int TestTanglegramItem(int argc, char* argv[])
+{
+  //tree #1
+  vtkNew<vtkMutableDirectedGraph> graph1;
+  vtkIdType root = graph1->AddVertex();
+  vtkIdType internalOne = graph1->AddChild(root);
+  vtkIdType internalTwo = graph1->AddChild(internalOne);
+  vtkIdType a = graph1->AddChild(internalTwo);
+  vtkIdType b = graph1->AddChild(internalTwo);
+  vtkIdType c = graph1->AddChild(internalOne);
+
+  vtkNew<vtkDoubleArray> weights;
+  weights->SetNumberOfTuples(5);
+  weights->SetValue(graph1->GetEdgeId(root, internalOne), 1.0f);
+  weights->SetValue(graph1->GetEdgeId(internalOne, internalTwo), 2.0f);
+  weights->SetValue(graph1->GetEdgeId(internalTwo, a), 1.0f);
+  weights->SetValue(graph1->GetEdgeId(internalTwo, b), 1.0f);
+  weights->SetValue(graph1->GetEdgeId(internalOne, c), 3.0f);
+
+  weights->SetName("weight");
+  graph1->GetEdgeData()->AddArray(weights.GetPointer());
+
+  vtkNew<vtkStringArray> names1;
+  names1->SetNumberOfTuples(6);
+  names1->SetValue(a, "cat");
+  names1->SetValue(b, "dog");
+  names1->SetValue(c, "human");
+
+  names1->SetName("node name");
+  graph1->GetVertexData()->AddArray(names1.GetPointer());
+
+  vtkNew<vtkDoubleArray> nodeWeights;
+  nodeWeights->SetNumberOfTuples(6);
+  nodeWeights->SetValue(root, 0.0f);
+  nodeWeights->SetValue(internalOne, 1.0f);
+  nodeWeights->SetValue(internalTwo, 3.0f);
+  nodeWeights->SetValue(a, 4.0f);
+  nodeWeights->SetValue(b, 4.0f);
+  nodeWeights->SetValue(c, 4.0f);
+  nodeWeights->SetName("node weight");
+  graph1->GetVertexData()->AddArray(nodeWeights.GetPointer());
+
+  //tree #2
+  vtkNew<vtkMutableDirectedGraph> graph2;
+  root = graph2->AddVertex();
+  internalOne = graph2->AddChild(root);
+  internalTwo = graph2->AddChild(internalOne);
+  a = graph2->AddChild(internalTwo);
+  b = graph2->AddChild(internalTwo);
+  c = graph2->AddChild(internalOne);
+
+  weights->SetName("weight");
+  graph2->GetEdgeData()->AddArray(weights.GetPointer());
+
+  vtkNew<vtkStringArray> names2;
+  names2->SetNumberOfTuples(6);
+  names2->SetValue(a, "dog food");
+  names2->SetValue(b, "cat food");
+  names2->SetValue(c, "steak");
+
+  names2->SetName("node name");
+  graph2->GetVertexData()->AddArray(names2.GetPointer());
+
+  graph2->GetVertexData()->AddArray(nodeWeights.GetPointer());
+
+  // set up correspondence table: who eats what
+  vtkNew<vtkTable> table;
+  vtkNew<vtkStringArray> eaters;
+  vtkNew<vtkDoubleArray> hungerForSteak;
+  hungerForSteak->SetName("steak");
+  vtkNew<vtkDoubleArray> hungerForDogFood;
+  hungerForDogFood->SetName("dog food");
+  vtkNew<vtkDoubleArray> hungerForCatFood;
+  hungerForCatFood->SetName("cat food");
+
+  eaters->SetNumberOfTuples(3);
+  hungerForSteak->SetNumberOfTuples(3);
+  hungerForDogFood->SetNumberOfTuples(3);
+  hungerForCatFood->SetNumberOfTuples(3);
+
+  eaters->SetValue(0, "human");
+  eaters->SetValue(1, "dog");
+  eaters->SetValue(2, "cat");
+
+  hungerForSteak->SetValue(0, 2.0);
+  hungerForSteak->SetValue(1, 1.0);
+  hungerForSteak->SetValue(2, 1.0);
+
+  hungerForDogFood->SetValue(0, 0.0);
+  hungerForDogFood->SetValue(1, 2.0);
+  hungerForDogFood->SetValue(2, 0.0);
+
+  hungerForCatFood->SetValue(0, 0.0);
+  hungerForCatFood->SetValue(1, 1.0);
+  hungerForCatFood->SetValue(2, 2.0);
+
+  table->AddColumn(eaters.GetPointer());
+  table->AddColumn(hungerForSteak.GetPointer());
+  table->AddColumn(hungerForDogFood.GetPointer());
+  table->AddColumn(hungerForCatFood.GetPointer());
+
+  vtkNew<vtkContextActor> actor;
+
+  vtkNew<vtkTree> tree1;
+  tree1->ShallowCopy(graph1.GetPointer());
+
+  vtkNew<vtkTree> tree2;
+  tree2->ShallowCopy(graph2.GetPointer());
+
+  vtkNew<vtkTanglegramItem> tanglegram;
+  tanglegram->SetTree1(tree1.GetPointer());
+  tanglegram->SetTree2(tree2.GetPointer());
+  tanglegram->SetTable(table.GetPointer());
+  tanglegram->SetTree1Label("Diners");
+  tanglegram->SetTree2Label("Meals");
+
+  vtkNew<vtkContextTransform> trans;
+  trans->SetInteractive(true);
+  trans->AddItem(tanglegram.GetPointer());
+  // center the item within the render window
+  trans->Translate(20, 75);
+  trans->Scale(1.25, 1.25);
+  actor->GetScene()->AddItem(trans.GetPointer());
+
+  vtkNew<vtkRenderer> renderer;
+  renderer->SetBackground(1.0, 1.0, 1.0);
+
+  vtkNew<vtkRenderWindow> renderWindow;
+  renderWindow->SetSize(400, 200);
+  renderWindow->AddRenderer(renderer.GetPointer());
+  renderer->AddActor(actor.GetPointer());
+  actor->GetScene()->SetRenderer(renderer.GetPointer());
+
+  vtkNew<vtkContextInteractorStyle> interactorStyle;
+  interactorStyle->SetScene(actor->GetScene());
+
+  vtkNew<vtkRenderWindowInteractor> interactor;
+  interactor->SetInteractorStyle(interactorStyle.GetPointer());
+  interactor->SetRenderWindow(renderWindow.GetPointer());
+  renderWindow->SetMultiSamples(0);
+  renderWindow->Render();
+
+  int retVal = vtkRegressionTestImageThreshold(renderWindow.GetPointer(), 100);
+  if (retVal == vtkRegressionTester::DO_INTERACTOR)
+    {
+    renderWindow->Render();
+    interactor->Start();
+    retVal = vtkRegressionTester::PASSED;
+    }
+  return !retVal;
+}
diff --git a/Views/Infovis/Testing/Cxx/TestTreeHeatmapAutoCollapse.cxx b/Views/Infovis/Testing/Cxx/TestTreeHeatmapAutoCollapse.cxx
index 892d7e0..4d12d2f 100644
--- a/Views/Infovis/Testing/Cxx/TestTreeHeatmapAutoCollapse.cxx
+++ b/Views/Infovis/Testing/Cxx/TestTreeHeatmapAutoCollapse.cxx
@@ -15,6 +15,7 @@
 
 #include "vtkTreeHeatmapItem.h"
 #include "vtkDataSetAttributes.h"
+#include "vtkDendrogramItem.h"
 #include "vtkDoubleArray.h"
 #include "vtkMutableDirectedGraph.h"
 #include "vtkNew.h"
@@ -120,9 +121,13 @@ int TestTreeHeatmapAutoCollapse(int argc, char* argv[])
   vtkNew<vtkTreeHeatmapItem> treeItem;
   treeItem->SetTree(tree.GetPointer());
   treeItem->SetTable(table.GetPointer());
+  treeItem->GetDendrogram()->DisplayNumberOfCollapsedLeafNodesOff();
 
   vtkNew<vtkContextTransform> trans;
   trans->SetInteractive(true);
+  // center the item within the render window
+  trans->Translate(20, 30);
+  trans->Scale(2.5, 2.5);
   trans->AddItem(treeItem.GetPointer());
   actor->GetScene()->AddItem(trans.GetPointer());
 
diff --git a/Views/Infovis/Testing/Cxx/TestTreeHeatmapItem.cxx b/Views/Infovis/Testing/Cxx/TestTreeHeatmapItem.cxx
index 0dbf9fa..339d962 100644
--- a/Views/Infovis/Testing/Cxx/TestTreeHeatmapItem.cxx
+++ b/Views/Infovis/Testing/Cxx/TestTreeHeatmapItem.cxx
@@ -122,10 +122,14 @@ int TestTreeHeatmapItem(int argc, char* argv[])
   treeItem->SetTree(tree.GetPointer());
   treeItem->SetTable(table.GetPointer());
   treeItem->SetTreeColorArray("node weight");
+  treeItem->SetTreeLineWidth(2.0);
 
   vtkNew<vtkContextTransform> trans;
   trans->SetInteractive(true);
   trans->AddItem(treeItem.GetPointer());
+  // center the item within the render window
+  trans->Translate(40, 30);
+  trans->Scale(2, 2);
   actor->GetScene()->AddItem(trans.GetPointer());
 
   vtkNew<vtkRenderer> renderer;
diff --git a/Views/Infovis/Testing/Data/Baseline/TestCoincidentGraphLayoutView.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestCoincidentGraphLayoutView.png.md5
new file mode 100644
index 0000000..93a241b
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestCoincidentGraphLayoutView.png.md5
@@ -0,0 +1 @@
+33daab90a05054167e71fbfa1d4e4580
diff --git a/Views/Infovis/Testing/Data/Baseline/TestColumnTree.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestColumnTree.png.md5
new file mode 100644
index 0000000..19b517f
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestColumnTree.png.md5
@@ -0,0 +1 @@
+0d5c16b63e6da0d539268329b87ddd51
diff --git a/Views/Infovis/Testing/Data/Baseline/TestConeLayoutStrategy.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestConeLayoutStrategy.png.md5
new file mode 100644
index 0000000..d6d79bd
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestConeLayoutStrategy.png.md5
@@ -0,0 +1 @@
+e3e1f31b6d238602055f7f58dac034a3
diff --git a/Views/Infovis/Testing/Data/Baseline/TestConeLayoutStrategy_1.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestConeLayoutStrategy_1.png.md5
new file mode 100644
index 0000000..e84987a
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestConeLayoutStrategy_1.png.md5
@@ -0,0 +1 @@
+f6d17a2c78fdbd24340dca2bff8914c6
diff --git a/Views/Infovis/Testing/Data/Baseline/TestDendrogramItem.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestDendrogramItem.png.md5
new file mode 100644
index 0000000..6927ea7
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestDendrogramItem.png.md5
@@ -0,0 +1 @@
+142adb34c8192d5c184cb04eca3b59da
diff --git a/Views/Infovis/Testing/Data/Baseline/TestDendrogramOnly.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestDendrogramOnly.png.md5
new file mode 100644
index 0000000..19ae5aa
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestDendrogramOnly.png.md5
@@ -0,0 +1 @@
+baa8225122a25c6f95049adbda1ec986
diff --git a/Views/Infovis/Testing/Data/Baseline/TestDendrogramOnly_1.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestDendrogramOnly_1.png.md5
new file mode 100644
index 0000000..39fa6fc
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestDendrogramOnly_1.png.md5
@@ -0,0 +1 @@
+710ea93e28442d01b26af7a63dc69652
diff --git a/Views/Infovis/Testing/Data/Baseline/TestGraphLayoutView.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestGraphLayoutView.png.md5
new file mode 100644
index 0000000..53bd2ff
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestGraphLayoutView.png.md5
@@ -0,0 +1 @@
+f6ba0d03972acf7da46f584df0bc5701
diff --git a/Views/Infovis/Testing/Data/Baseline/TestHeatmapCategoryLegend.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestHeatmapCategoryLegend.png.md5
new file mode 100644
index 0000000..6533139
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestHeatmapCategoryLegend.png.md5
@@ -0,0 +1 @@
+5e5f1fb61d8e3d907c5788bb8553253e
diff --git a/Views/Infovis/Testing/Data/Baseline/TestHeatmapItem.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestHeatmapItem.png.md5
new file mode 100644
index 0000000..19c5229
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestHeatmapItem.png.md5
@@ -0,0 +1 @@
+cadc41b117b96aa790814a3f2f565835
diff --git a/Views/Infovis/Testing/Data/Baseline/TestHeatmapOnly.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestHeatmapOnly.png.md5
new file mode 100644
index 0000000..b3f0145
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestHeatmapOnly.png.md5
@@ -0,0 +1 @@
+4d361f7186ba6ebacc94ea7c8e9b5c64
diff --git a/Views/Infovis/Testing/Data/Baseline/TestHeatmapScalarLegend.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestHeatmapScalarLegend.png.md5
new file mode 100644
index 0000000..d35ff5d
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestHeatmapScalarLegend.png.md5
@@ -0,0 +1 @@
+239d831ba3e70489d43cd823035395e7
diff --git a/Views/Infovis/Testing/Data/Baseline/TestHierarchicalGraphView.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestHierarchicalGraphView.png.md5
new file mode 100644
index 0000000..f1f3fcc
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestHierarchicalGraphView.png.md5
@@ -0,0 +1 @@
+40f1e078cef00e6445214403cc4f8c48
diff --git a/Views/Infovis/Testing/Data/Baseline/TestIcicleView.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestIcicleView.png.md5
new file mode 100644
index 0000000..0d4cd03
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestIcicleView.png.md5
@@ -0,0 +1 @@
+25e4ee407be4f741ff235c67a5a5caa4
diff --git a/Views/Infovis/Testing/Data/Baseline/TestIcicleView_1.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestIcicleView_1.png.md5
new file mode 100644
index 0000000..ac68d87
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestIcicleView_1.png.md5
@@ -0,0 +1 @@
+79e73858ae946e4103e135a9a2db7c77
diff --git a/Views/Infovis/Testing/Data/Baseline/TestIcicleView_2.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestIcicleView_2.png.md5
new file mode 100644
index 0000000..c9618bd
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestIcicleView_2.png.md5
@@ -0,0 +1 @@
+3e4ff89a6370c341563b05d48731df1b
diff --git a/Views/Infovis/Testing/Data/Baseline/TestIcicleView_3.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestIcicleView_3.png.md5
new file mode 100644
index 0000000..ef2782a
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestIcicleView_3.png.md5
@@ -0,0 +1 @@
+52ed72ac0fde5076a505b2a4151ab416
diff --git a/Views/Infovis/Testing/Data/Baseline/TestIcicleView_4.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestIcicleView_4.png.md5
new file mode 100644
index 0000000..6a7cf17
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestIcicleView_4.png.md5
@@ -0,0 +1 @@
+69461bf9b8ac7092819f20f887af9158
diff --git a/Views/Infovis/Testing/Data/Baseline/TestIcicleView_5.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestIcicleView_5.png.md5
new file mode 100644
index 0000000..98c301d
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestIcicleView_5.png.md5
@@ -0,0 +1 @@
+2df1df764604e62b0228901f42b5090c
diff --git a/Views/Infovis/Testing/Data/Baseline/TestIconGlyphFilter.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestIconGlyphFilter.png.md5
new file mode 100644
index 0000000..483f022
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestIconGlyphFilter.png.md5
@@ -0,0 +1 @@
+7f9e3b687abfe601da2f94c785d931b2
diff --git a/Views/Infovis/Testing/Data/Baseline/TestInteractorStyleTreeMapHover.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestInteractorStyleTreeMapHover.png.md5
new file mode 100644
index 0000000..c767d64
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestInteractorStyleTreeMapHover.png.md5
@@ -0,0 +1 @@
+c7ed5eea999e331354537d6c9a7904e4
diff --git a/Views/Infovis/Testing/Data/Baseline/TestNetworkViews.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestNetworkViews.png.md5
new file mode 100644
index 0000000..50ca8bb
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestNetworkViews.png.md5
@@ -0,0 +1 @@
+fda2c1313dbdd6948fcb4d56aa0ad518
diff --git a/Views/Infovis/Testing/Data/Baseline/TestParallelCoordinatesView.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestParallelCoordinatesView.png.md5
new file mode 100644
index 0000000..b2a9446
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestParallelCoordinatesView.png.md5
@@ -0,0 +1 @@
+3c7ec97b31fc69be990d84cd031dcb8d
diff --git a/Views/Infovis/Testing/Data/Baseline/TestRenderView.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestRenderView.png.md5
new file mode 100644
index 0000000..2ac0719
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestRenderView.png.md5
@@ -0,0 +1 @@
+6d22c984454346b50192b3d1c6ae17b7
diff --git a/Views/Infovis/Testing/Data/Baseline/TestSpanTreeLayoutStrategy.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestSpanTreeLayoutStrategy.png.md5
new file mode 100644
index 0000000..b1c66cc
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestSpanTreeLayoutStrategy.png.md5
@@ -0,0 +1 @@
+860760a45e30224e4fb16b0ced855458
diff --git a/Views/Infovis/Testing/Data/Baseline/TestTanglegramItem.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestTanglegramItem.png.md5
new file mode 100644
index 0000000..7810d61
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestTanglegramItem.png.md5
@@ -0,0 +1 @@
+5548784a3f7181bc7c04c239180deac7
diff --git a/Views/Infovis/Testing/Data/Baseline/TestTreeHeatmapAutoCollapse.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestTreeHeatmapAutoCollapse.png.md5
new file mode 100644
index 0000000..471c0d5
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestTreeHeatmapAutoCollapse.png.md5
@@ -0,0 +1 @@
+d1ffa41cd988a64cfccf48f4daa4cb0a
diff --git a/Views/Infovis/Testing/Data/Baseline/TestTreeHeatmapItem.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestTreeHeatmapItem.png.md5
new file mode 100644
index 0000000..4d7a976
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestTreeHeatmapItem.png.md5
@@ -0,0 +1 @@
+c7dd8c6992c367f078617aa914750897
diff --git a/Views/Infovis/Testing/Data/Baseline/TestTreeMapView.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestTreeMapView.png.md5
new file mode 100644
index 0000000..eb893e1
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestTreeMapView.png.md5
@@ -0,0 +1 @@
+5850bf6330d8b55c9957642671ba34fe
diff --git a/Views/Infovis/Testing/Data/Baseline/TestTreeMapView_1.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestTreeMapView_1.png.md5
new file mode 100644
index 0000000..8a24f80
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestTreeMapView_1.png.md5
@@ -0,0 +1 @@
+59c64f1f578ef1129928e95cf17ac934
diff --git a/Views/Infovis/Testing/Data/Baseline/TestTreeMapView_2.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestTreeMapView_2.png.md5
new file mode 100644
index 0000000..fa87613
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestTreeMapView_2.png.md5
@@ -0,0 +1 @@
+ef441137e3ef90d350aaa53a55b44ee4
diff --git a/Views/Infovis/Testing/Data/Baseline/TestTreeMapView_3.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestTreeMapView_3.png.md5
new file mode 100644
index 0000000..fa87613
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestTreeMapView_3.png.md5
@@ -0,0 +1 @@
+ef441137e3ef90d350aaa53a55b44ee4
diff --git a/Views/Infovis/Testing/Data/Baseline/TestTreeRingView.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestTreeRingView.png.md5
new file mode 100644
index 0000000..4582f04
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestTreeRingView.png.md5
@@ -0,0 +1 @@
+5238766c9e244d69b4aa526d78f04fce
diff --git a/Views/Infovis/Testing/Data/Baseline/TestTreeRingView_1.png.md5 b/Views/Infovis/Testing/Data/Baseline/TestTreeRingView_1.png.md5
new file mode 100644
index 0000000..421052e
--- /dev/null
+++ b/Views/Infovis/Testing/Data/Baseline/TestTreeRingView_1.png.md5
@@ -0,0 +1 @@
+26af24721d31323e258cf075b5550d8f
diff --git a/Views/Infovis/module.cmake b/Views/Infovis/module.cmake
index b45cc6c..9d5461c 100644
--- a/Views/Infovis/module.cmake
+++ b/Views/Infovis/module.cmake
@@ -9,6 +9,10 @@ vtk_module(vtkViewsInfovis
     vtkInteractionStyle
     vtkFiltersModeling
     vtkFiltersImaging
+    vtkCommonColor
+    vtkChartsCore
+  PRIVATE_DEPENDS
+    vtkFiltersGeometry
   TEST_DEPENDS
     vtkTestingRendering
     vtkIOInfovis
diff --git a/Views/Infovis/vtkDendrogramItem.cxx b/Views/Infovis/vtkDendrogramItem.cxx
new file mode 100644
index 0000000..3b24bc9
--- /dev/null
+++ b/Views/Infovis/vtkDendrogramItem.cxx
@@ -0,0 +1,1542 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkDendrogramItem.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkDendrogramItem.h"
+
+#include "vtkBrush.h"
+#include "vtkColorLegend.h"
+#include "vtkContext2D.h"
+#include "vtkContextMouseEvent.h"
+#include "vtkContextScene.h"
+#include "vtkDataSetAttributes.h"
+#include "vtkDoubleArray.h"
+#include "vtkGraphLayout.h"
+#include "vtkIdTypeArray.h"
+#include "vtkIntArray.h"
+#include "vtkLookupTable.h"
+#include "vtkNew.h"
+#include "vtkObjectFactory.h"
+#include "vtkPen.h"
+#include "vtkPruneTreeFilter.h"
+#include "vtkStringArray.h"
+#include "vtkTextProperty.h"
+#include "vtkTransform2D.h"
+#include "vtkTree.h"
+#include "vtkTreeLayoutStrategy.h"
+#include "vtkUnsignedIntArray.h"
+
+#include <algorithm>
+#include <queue>
+#include <sstream>
+
+vtkStandardNewMacro(vtkDendrogramItem);
+
+//-----------------------------------------------------------------------------
+vtkDendrogramItem::vtkDendrogramItem() : PositionVector(0, 0)
+{
+  this->Position = this->PositionVector.GetData();
+  this->DendrogramBuildTime = 0;
+  this->Interactive = true;
+  this->ColorTree = false;
+  this->LegendPositionSet = false;
+  this->Tree = vtkSmartPointer<vtkTree>::New();
+  this->PrunedTree = vtkSmartPointer<vtkTree>::New();
+  this->LayoutTree = vtkSmartPointer<vtkTree>::New();
+
+  /* initialize bounds with impossible values */
+  this->MinX = 1.0;
+  this->MinY = 1.0;
+  this->MaxX = 0.0;
+  this->MaxY = 0.0;
+
+  this->LabelWidth = 0.0;
+  this->LineWidth = 1.0;
+
+  this->NumberOfLeafNodes = 0;
+  this->MultiplierX = 100.0;
+  this->MultiplierY = 100.0;
+  this->LeafSpacing = 18.0;
+
+  this->PruneFilter->SetShouldPruneParentVertex(false);
+
+  this->ExtendLeafNodes = false;
+  this->DrawLabels = true;
+  this->DisplayNumberOfCollapsedLeafNodes = true;
+
+  this->DistanceArrayName = "node weight";
+  this->VertexNameArrayName = "node name";
+
+  this->ColorLegend->SetVisible(false);
+  this->ColorLegend->DrawBorderOn();
+  this->ColorLegend->CacheBoundsOff();
+  this->AddItem(this->ColorLegend.GetPointer());
+}
+
+//-----------------------------------------------------------------------------
+vtkDendrogramItem::~vtkDendrogramItem()
+{
+}
+
+//-----------------------------------------------------------------------------
+void vtkDendrogramItem::SetPosition(const vtkVector2f &pos)
+{
+  this->PositionVector = pos;
+  this->DendrogramBuildTime = 0;
+}
+
+//-----------------------------------------------------------------------------
+vtkVector2f vtkDendrogramItem::GetPositionVector()
+{
+  return this->PositionVector;
+}
+
+//-----------------------------------------------------------------------------
+void vtkDendrogramItem::SetTree(vtkTree *tree)
+{
+  if (tree == NULL || tree->GetNumberOfVertices() == 0)
+    {
+    this->Tree = vtkSmartPointer<vtkTree>::New();
+    this->PrunedTree = vtkSmartPointer<vtkTree>::New();
+    this->LayoutTree = vtkSmartPointer<vtkTree>::New();
+    return;
+    }
+
+  this->Tree = tree;
+
+  // initialize some additional arrays for the tree's vertex data
+  vtkNew<vtkUnsignedIntArray> vertexIsPruned;
+  vertexIsPruned->SetNumberOfComponents(1);
+  vertexIsPruned->SetName("VertexIsPruned");
+  vertexIsPruned->SetNumberOfValues(
+    this->Tree->GetNumberOfVertices());
+  vertexIsPruned->FillComponent(0, 0.0);
+  this->Tree->GetVertexData()->AddArray(vertexIsPruned.GetPointer());
+
+  vtkNew<vtkIdTypeArray> originalId;
+  originalId->SetNumberOfComponents(1);
+  originalId->SetName("OriginalId");
+  vtkIdType numVertices = this->Tree->GetNumberOfVertices();
+  originalId->SetNumberOfValues(numVertices);
+  for (vtkIdType i = 0; i < numVertices; ++i)
+    {
+    originalId->SetValue(i, i);
+    }
+  this->Tree->GetVertexData()->AddArray(originalId.GetPointer());
+
+  // make a copy of the full tree for later pruning
+  this->PrunedTree->DeepCopy(this->Tree);
+
+  // setup the lookup table that's used to color the triangles representing
+  // collapsed subtrees.  First we find maximum possible value.
+  vtkIdType root = this->Tree->GetRoot();
+  if (this->Tree->GetNumberOfChildren(root) == 1)
+    {
+    root = this->Tree->GetChild(root, 0);
+    }
+  int numLeavesInBiggestSubTree = 0;
+  for (vtkIdType child = 0; child < this->Tree->GetNumberOfChildren(root);
+       ++child)
+    {
+    vtkIdType childVertex = this->Tree->GetChild(root, child);
+    int numLeaves = this->CountLeafNodes(childVertex);
+    if (numLeaves > numLeavesInBiggestSubTree)
+      {
+      numLeavesInBiggestSubTree = numLeaves;
+      }
+    }
+
+  double rangeMinimum = 2.0;
+  if (numLeavesInBiggestSubTree < rangeMinimum)
+    {
+    rangeMinimum = numLeavesInBiggestSubTree;
+    }
+
+  this->TriangleLookupTable->SetNumberOfTableValues(256);
+  this->TriangleLookupTable->SetHueRange(0.5, 0.045);
+  this->TriangleLookupTable->SetRange(
+    rangeMinimum, static_cast<double>(numLeavesInBiggestSubTree));
+  this->TriangleLookupTable->Build();
+}
+
+//-----------------------------------------------------------------------------
+vtkTree * vtkDendrogramItem::GetTree()
+{
+  return this->Tree;
+}
+
+//-----------------------------------------------------------------------------
+vtkTree * vtkDendrogramItem::GetPrunedTree()
+{
+  return this->PrunedTree;
+}
+
+//-----------------------------------------------------------------------------
+bool vtkDendrogramItem::Paint(vtkContext2D *painter)
+{
+  if (this->Tree->GetNumberOfVertices() == 0)
+    {
+    return true;
+    }
+
+  this->PrepareToPaint(painter);
+  this->PaintBuffers(painter);
+  this->PaintChildren(painter);
+  return true;
+}
+
+//-----------------------------------------------------------------------------
+void vtkDendrogramItem::PrepareToPaint(vtkContext2D *painter)
+{
+  if (this->IsDirty())
+    {
+    this->RebuildBuffers();
+    }
+  this->ComputeLabelWidth(painter);
+}
+
+//-----------------------------------------------------------------------------
+bool vtkDendrogramItem::IsDirty()
+{
+  if (this->Tree->GetNumberOfVertices() == 0)
+    {
+    return false;
+    }
+  if (this->MTime > this->DendrogramBuildTime)
+    {
+    return true;
+    }
+  if (this->PrunedTree->GetMTime() > this->DendrogramBuildTime)
+    {
+    return true;
+    }
+  if (this->Tree->GetMTime() > this->DendrogramBuildTime)
+    {
+    return true;
+    }
+  return false;
+}
+
+//-----------------------------------------------------------------------------
+void vtkDendrogramItem::RebuildBuffers()
+{
+  if (this->Tree->GetNumberOfVertices() == 0)
+    {
+    return;
+    }
+
+  // Special case where our input tree has been modified.  Refresh PrunedTree
+  // to be an up-to-date full copy of it.
+  if( this->Tree->GetMTime() > this->PrunedTree->GetMTime())
+    {
+    this->PrunedTree->DeepCopy(this->Tree);
+    }
+
+  int orientation = this->GetOrientation();
+
+  vtkNew<vtkTreeLayoutStrategy> strategy;
+
+  if (this->PrunedTree->GetVertexData()->GetAbstractArray(
+    this->DistanceArrayName) != NULL)
+    {
+    strategy->SetDistanceArrayName(this->DistanceArrayName);
+    }
+
+  strategy->SetLeafSpacing(1.0);
+
+  strategy->SetRotation(
+    this->GetAngleForOrientation(orientation));
+
+  this->Layout->SetLayoutStrategy(strategy.GetPointer());
+  this->Layout->SetInputData(this->PrunedTree);
+  this->Layout->Update();
+  this->LayoutTree = vtkTree::SafeDownCast(this->Layout->GetOutput());
+
+  this->CountLeafNodes();
+  this->ComputeMultipliers();
+  this->ComputeBounds();
+
+  if (this->ColorTree && !this->LegendPositionSet)
+    {
+    this->PositionColorLegend();
+    }
+
+  if( this->PrunedTree->GetMTime() > this->MTime)
+    {
+    this->DendrogramBuildTime = this->PrunedTree->GetMTime();
+    }
+  else
+    {
+    this->DendrogramBuildTime = this->MTime;
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkDendrogramItem::ComputeMultipliers()
+{
+  double xMax = 1;
+  double yMax = 1;
+  double targetPoint[3];
+  if (this->Tree->GetNumberOfVertices() > 0)
+    {
+    for (vtkIdType edge = 0; edge < this->LayoutTree->GetNumberOfEdges(); ++edge)
+      {
+      vtkIdType target = this->LayoutTree->GetTargetVertex(edge);
+      this->LayoutTree->GetPoint(target, targetPoint);
+      double x = fabs(targetPoint[0]);
+      double y = fabs(targetPoint[1]);
+      if (x > xMax)
+        {
+        xMax = x;
+        }
+      if (y > yMax)
+        {
+        yMax = y;
+        }
+      }
+    }
+
+  int orientation = this->GetOrientation();
+  if (orientation == vtkDendrogramItem::LEFT_TO_RIGHT ||
+      orientation == vtkDendrogramItem::RIGHT_TO_LEFT)
+    {
+    this->MultiplierX =
+      (this->LeafSpacing * (this->NumberOfLeafNodes - 1)) / yMax;
+    this->MultiplierY = this->MultiplierX;
+    }
+  else
+    {
+    this->MultiplierY =
+      (this->LeafSpacing * (this->NumberOfLeafNodes - 1)) / xMax;
+    this->MultiplierX = this->MultiplierY;
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkDendrogramItem::ComputeBounds()
+{
+  this->MinX = VTK_DOUBLE_MAX;
+  this->MinY = VTK_DOUBLE_MAX;
+  this->MaxX = VTK_DOUBLE_MIN;
+  this->MaxY = VTK_DOUBLE_MIN;
+
+  double sourcePoint[3];
+  double targetPoint[3];
+
+  for (vtkIdType edge = 0; edge < this->LayoutTree->GetNumberOfEdges(); ++edge)
+    {
+    vtkIdType source = this->LayoutTree->GetSourceVertex(edge);
+    this->LayoutTree->GetPoint(source, sourcePoint);
+    double x0 = this->Position[0] + sourcePoint[0] * this->MultiplierX;
+    double y0 = this->Position[1] + sourcePoint[1] * this->MultiplierY;
+
+    vtkIdType target = this->LayoutTree->GetTargetVertex(edge);
+    this->LayoutTree->GetPoint(target, targetPoint);
+    double x1 = this->Position[0] + targetPoint[0] * this->MultiplierX;
+    double y1 = this->Position[1] + targetPoint[1] * this->MultiplierY;
+
+    if (x0 < this->MinX)
+      {
+      this->MinX = x0;
+      }
+    if (y0 < this->MinY)
+      {
+      this->MinY = y0;
+      }
+    if (x0 > this->MaxX)
+      {
+      this->MaxX = x0;
+      }
+    if (y0 > this->MaxY)
+      {
+      this->MaxY = y0;
+      }
+    if (x1 < this->MinX)
+      {
+      this->MinX = x1;
+      }
+    if (y1 < this->MinY)
+      {
+      this->MinY = y1;
+      }
+    if (x1 > this->MaxX)
+      {
+      this->MaxX = x1;
+      }
+    if (y1 > this->MaxY)
+      {
+      this->MaxY = y1;
+      }
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkDendrogramItem::CountLeafNodes()
+{
+  // figure out how many leaf nodes we have.
+  this->NumberOfLeafNodes = 0;
+  for (vtkIdType vertex = 0; vertex < this->LayoutTree->GetNumberOfVertices();
+       ++vertex)
+    {
+    if (!this->LayoutTree->IsLeaf(vertex))
+      {
+      continue;
+      }
+    ++this->NumberOfLeafNodes;
+    }
+}
+
+//-----------------------------------------------------------------------------
+int vtkDendrogramItem::CountLeafNodes(vtkIdType vertex)
+{
+  // figure out how many leaf nodes descend from vertex.
+  int numLeaves = 0;
+  for (vtkIdType child = 0; child < this->Tree->GetNumberOfChildren(vertex);
+       ++child)
+    {
+    vtkIdType childVertex = this->Tree->GetChild(vertex, child);
+    if (this->Tree->IsLeaf(childVertex))
+      {
+      ++numLeaves;
+      }
+    else
+      {
+      numLeaves += this->CountLeafNodes(childVertex);
+      }
+    }
+  return numLeaves;
+}
+
+//-----------------------------------------------------------------------------
+void vtkDendrogramItem::PaintBuffers(vtkContext2D *painter)
+{
+  // Calculate the extent of the data that is visible within the window.
+  this->UpdateVisibleSceneExtent(painter);
+
+  double xStart, yStart;
+  double sourcePoint[3];
+  double targetPoint[3];
+  int numberOfCollapsedSubTrees = 0;
+
+  vtkUnsignedIntArray *vertexIsPruned = vtkUnsignedIntArray::SafeDownCast(
+    this->Tree->GetVertexData()->GetArray("VertexIsPruned"));
+
+  int orientation = this->GetOrientation();
+
+  int previousPenWidth = painter->GetPen()->GetWidth();
+  painter->GetPen()->SetWidth(this->LineWidth);
+
+  // draw the tree
+  for (vtkIdType edge = 0; edge < this->LayoutTree->GetNumberOfEdges(); ++edge)
+    {
+    vtkIdType source = this->LayoutTree->GetSourceVertex(edge);
+    vtkIdType target = this->LayoutTree->GetTargetVertex(edge);
+
+    this->LayoutTree->GetPoint(source, sourcePoint);
+    this->LayoutTree->GetPoint(target, targetPoint);
+
+    double x0 = this->Position[0] + sourcePoint[0] * this->MultiplierX;
+    double y0 = this->Position[1] + sourcePoint[1] * this->MultiplierY;
+    double x1 = this->Position[0] +  targetPoint[0] * this->MultiplierX;
+    double y1 = this->Position[1] + targetPoint[1] * this->MultiplierY;
+
+    // check if the target vertex is the root of a collapsed tree
+    bool alreadyDrewCollapsedSubTree = false;
+    vtkIdType originalId = this->GetOriginalId(target);
+
+    double color[4];
+    double colorKey;
+    if (vertexIsPruned->GetValue(originalId) > 0)
+      {
+      ++numberOfCollapsedSubTrees;
+
+      float trianglePoints[6], triangleLabelX, triangleLabelY;
+      switch (orientation)
+        {
+        case vtkDendrogramItem::DOWN_TO_UP:
+          trianglePoints[0] = x1;
+          trianglePoints[1] = y0;
+          trianglePoints[2] = x1 - this->LeafSpacing / 2;
+          trianglePoints[3] = this->MaxY;
+          trianglePoints[4] = x1 + this->LeafSpacing / 2;
+          trianglePoints[5] = this->MaxY;
+          triangleLabelX = trianglePoints[0];
+          triangleLabelY = trianglePoints[3] - 1;
+          painter->GetTextProp()->SetJustificationToRight();
+          break;
+        case vtkDendrogramItem::RIGHT_TO_LEFT:
+          trianglePoints[0] = x0;
+          trianglePoints[1] = y1;
+          trianglePoints[2] = this->MinX;
+          trianglePoints[3] = y1 - this->LeafSpacing / 2;
+          trianglePoints[4] = this->MinX;
+          trianglePoints[5] = y1 + this->LeafSpacing / 2;
+          triangleLabelX = trianglePoints[2] + 1;
+          triangleLabelY = trianglePoints[1];
+          painter->GetTextProp()->SetJustificationToLeft();
+          break;
+        case vtkDendrogramItem::UP_TO_DOWN:
+          trianglePoints[0] = x1;
+          trianglePoints[1] = y0;
+          trianglePoints[2] = x1 - this->LeafSpacing / 2;
+          trianglePoints[3] = this->MinY;
+          trianglePoints[4] = x1 + this->LeafSpacing / 2;
+          trianglePoints[5] = this->MinY;
+          triangleLabelX = trianglePoints[0];
+          triangleLabelY = trianglePoints[3] + 1;
+          painter->GetTextProp()->SetJustificationToLeft();
+          break;
+        case vtkDendrogramItem::LEFT_TO_RIGHT:
+        default:
+          trianglePoints[0] = x0;
+          trianglePoints[1] = y1;
+          trianglePoints[2] = this->MaxX;
+          trianglePoints[3] = y1 - this->LeafSpacing / 2;
+          trianglePoints[4] = this->MaxX;
+          trianglePoints[5] = y1 + this->LeafSpacing / 2;
+          triangleLabelX = trianglePoints[2] - 1;
+          triangleLabelY = trianglePoints[1];
+          painter->GetTextProp()->SetJustificationToRight();
+          break;
+        }
+
+      if (this->LineIsVisible(trianglePoints[0], trianglePoints[1],
+                              trianglePoints[2], trianglePoints[3]) ||
+          this->LineIsVisible(trianglePoints[0], trianglePoints[1],
+                              trianglePoints[4], trianglePoints[5]) ||
+          this->LineIsVisible(trianglePoints[2], trianglePoints[3],
+                              trianglePoints[4], trianglePoints[5]))
+        {
+        colorKey = static_cast<double>(vertexIsPruned->GetValue(originalId));
+        this->TriangleLookupTable->GetColor(colorKey, color);
+        painter->GetBrush()->SetColorF(color[0], color[1], color[2]);
+        painter->DrawPolygon(trianglePoints, 3);
+
+
+        if (this->DisplayNumberOfCollapsedLeafNodes)
+          {
+          unsigned int numCollapsedLeafNodes =
+            vertexIsPruned->GetValue(originalId);
+          std::stringstream ss;
+          ss << numCollapsedLeafNodes;
+
+          painter->GetTextProp()->SetVerticalJustificationToCentered();
+          painter->GetTextProp()->SetOrientation(
+            this->GetTextAngleForOrientation(orientation));
+          painter->DrawString(triangleLabelX, triangleLabelY, ss.str());
+          }
+        }
+      alreadyDrewCollapsedSubTree = true;
+      }
+
+    // color this portion of the tree based on the target node
+    if (this->ColorTree)
+      {
+      colorKey = this->ColorArray->GetValue(target);
+      this->TreeLookupTable->GetColor(colorKey, color);
+      painter->GetPen()->SetColorF(color[0], color[1], color[2]);
+      }
+
+    // when drawing horizontal trees, we want to draw the vertical segment
+    // before the horizontal segment.  The opposite is true when we are
+    // drawing vertical trees.  We use the variables midpointX and midpointY
+    // to handle this behavior.  extendedX and extendedY are used similarly
+    // for extending leaf nodes below.
+    double midpointX, midpointY, extendedX, extendedY;
+    switch (orientation)
+      {
+      case vtkDendrogramItem::DOWN_TO_UP:
+        midpointX = x1;
+        midpointY = y0;
+        extendedX = x1;
+        extendedY = this->MaxY;
+        break;
+      case vtkDendrogramItem::RIGHT_TO_LEFT:
+        midpointX = x0;
+        midpointY = y1;
+        extendedX = this->MinX;
+        extendedY = y1;
+        break;
+      case vtkDendrogramItem::UP_TO_DOWN:
+        midpointX = x1;
+        midpointY = y0;
+        extendedX = x1;
+        extendedY = this->MinY;
+        break;
+      case vtkDendrogramItem::LEFT_TO_RIGHT:
+      default:
+        midpointX = x0;
+        midpointY = y1;
+        extendedX = this->MaxX;
+        extendedY = y1;
+        break;
+      }
+
+    if (this->LineIsVisible(x0, y0, midpointX, midpointY))
+      {
+      painter->DrawLine (x0, y0, midpointX, midpointY);
+      }
+    if (!alreadyDrewCollapsedSubTree)
+      {
+      if (this->LineIsVisible(midpointX, midpointY, x1, y1))
+        {
+        painter->DrawLine (midpointX, midpointY, x1, y1);
+        }
+
+      // extend leaf nodes so they line up
+      if (this->ExtendLeafNodes &&
+          !(x1 == extendedX && y1 == extendedY) &&
+          this->LayoutTree->IsLeaf(target) &&
+          this->LineIsVisible(x1, y1, extendedX, extendedY))
+        {
+        // we draw these extensions as grey lines to distinguish them
+        // from the actual lengths of the leaf nodes.
+        painter->GetPen()->SetColorF(0.75, 0.75, 0.75);
+
+        painter->DrawLine(x1, y1, extendedX, extendedY);
+
+        // revert to drawing black lines when we're done
+        painter->GetPen()->SetColorF(0.0, 0.0, 0.0);
+        }
+      }
+
+    if (this->ColorTree)
+      {
+      // revert to drawing thin black lines by default
+      painter->GetPen()->SetColorF(0.0, 0.0, 0.0);
+      }
+    }
+
+  painter->GetPen()->SetWidth(previousPenWidth);
+
+  // the remainder of this function involves drawing the leaf node labels,
+  // so we can return now if that feature has been disabled.
+  if (!this->DrawLabels)
+    {
+    return;
+    }
+
+  // special case: all the true leaf nodes have been collapsed.
+  // This means that there aren't any labels left to draw.
+  if (this->NumberOfLeafNodes <= numberOfCollapsedSubTrees)
+    {
+    return;
+    }
+
+  //"Igq" selected for range of height
+  int fontSize = painter->ComputeFontSizeForBoundedString("Igq", VTK_FLOAT_MAX,
+                                                           this->LeafSpacing);
+  // make sure our current zoom level allows for a legibly-sized font
+  if (fontSize < 8)
+    {
+    return;
+    }
+
+  // leave a small amount of space between the tree and the vertex labels
+  double spacing = this->LeafSpacing * 0.5;
+
+  // set up our text property to draw leaf node labels
+  painter->GetTextProp()->SetColor(0.0, 0.0, 0.0);
+  painter->GetTextProp()->SetJustificationToLeft();
+  painter->GetTextProp()->SetVerticalJustificationToCentered();
+  painter->GetTextProp()->SetOrientation(
+    this->GetTextAngleForOrientation(orientation));
+
+  // make sure some of the labels would be visible on screen
+  switch (orientation)
+    {
+    case vtkDendrogramItem::DOWN_TO_UP:
+      if (this->SceneBottomLeft[1] > this->MaxY + spacing ||
+          this->SceneTopRight[1] < this->MaxY + spacing)
+        {
+        return;
+        }
+      break;
+    case vtkDendrogramItem::RIGHT_TO_LEFT:
+      if (this->SceneBottomLeft[0] > this->MinX - spacing ||
+          this->SceneTopRight[0] < this->MinX - spacing)
+        {
+        return;
+        }
+      painter->GetTextProp()->SetJustificationToRight();
+      break;
+    case vtkDendrogramItem::UP_TO_DOWN:
+      if (this->SceneBottomLeft[1] > this->MinY - spacing ||
+          this->SceneTopRight[1] < this->MinY - spacing)
+        {
+        return;
+        }
+      painter->GetTextProp()->SetJustificationToRight();
+      break;
+    case vtkDendrogramItem::LEFT_TO_RIGHT:
+    default:
+      if (this->SceneBottomLeft[0] > this->MaxX + spacing ||
+          this->SceneTopRight[0] < this->MaxX + spacing)
+        {
+        return;
+        }
+      break;
+    }
+
+  // get array of node names from the tree
+  vtkStringArray *vertexNames = vtkStringArray::SafeDownCast(
+    this->LayoutTree->GetVertexData()->GetAbstractArray(
+    this->VertexNameArrayName));
+
+  // find our leaf nodes & draw their labels
+  for (vtkIdType vertex = 0; vertex < this->LayoutTree->GetNumberOfVertices();
+       ++vertex)
+    {
+    if (!this->LayoutTree->IsLeaf(vertex))
+      {
+      continue;
+      }
+
+    double point[3];
+    this->LayoutTree->GetPoint(vertex, point);
+    switch (orientation)
+      {
+      case vtkDendrogramItem::DOWN_TO_UP:
+        xStart = this->Position[0] + point[0] * this->MultiplierX;
+        yStart = this->MaxY + spacing;
+        break;
+      case vtkDendrogramItem::RIGHT_TO_LEFT:
+        xStart = this->MinX - spacing;
+        yStart = this->Position[1] + point[1] * this->MultiplierY;
+        break;
+      case vtkDendrogramItem::UP_TO_DOWN:
+        xStart = this->Position[0] + point[0] * this->MultiplierX;
+        yStart = this->MinY - spacing;
+        break;
+      case vtkDendrogramItem::LEFT_TO_RIGHT:
+      default:
+        xStart = this->MaxX + spacing;
+        yStart = this->Position[1] + point[1] * this->MultiplierY;
+        break;
+      }
+
+    std::string vertexName = vertexNames->GetValue(vertex);
+    if (this->SceneBottomLeft[0] < xStart &&
+        this->SceneTopRight[0] > xStart   &&
+        this->SceneBottomLeft[1] < yStart &&
+        this->SceneTopRight[1] > yStart)
+      {
+      painter->DrawString(xStart, yStart, vertexName);
+      }
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkDendrogramItem::UpdateVisibleSceneExtent(vtkContext2D *painter)
+{
+  float position[2];
+  painter->GetTransform()->GetPosition(position);
+  this->SceneBottomLeft[0] = -position[0];
+  this->SceneBottomLeft[1] = -position[1];
+  this->SceneBottomLeft[2] = 0.0;
+
+  this->SceneTopRight[0] =
+    static_cast<double>(this->GetScene()->GetSceneWidth() - position[0]);
+  this->SceneTopRight[1] =
+    static_cast<double>(this->GetScene()->GetSceneHeight() - position[1]);
+  this->SceneTopRight[2] = 0.0;
+  vtkNew<vtkMatrix3x3> inverse;
+  painter->GetTransform()->GetInverse(inverse.GetPointer());
+  inverse->MultiplyPoint(this->SceneBottomLeft, this->SceneBottomLeft);
+  inverse->MultiplyPoint(this->SceneTopRight, this->SceneTopRight);
+}
+
+//-----------------------------------------------------------------------------
+bool vtkDendrogramItem::LineIsVisible(double x0, double y0,
+                                        double x1, double y1)
+{
+  // use local variables to improve readibility
+  double xMinScene = this->SceneBottomLeft[0];
+  double yMinScene = this->SceneBottomLeft[1];
+  double xMaxScene = this->SceneTopRight[0];
+  double yMaxScene = this->SceneTopRight[1];
+
+  // if either end point of the line segment falls within the screen,
+  // then the line segment is visible.
+  if ( (xMinScene <= x0 && xMaxScene >= x0 &&
+        yMinScene <= y0 && yMaxScene >= y0) ||
+       (xMinScene <= x1 && xMaxScene >= x1 &&
+        yMinScene <= y1 && yMaxScene >= y1) )
+    {
+    return true;
+    }
+
+  // figure out which end point is "greater" than the other in both dimensions
+  double xMinLine, xMaxLine, yMinLine, yMaxLine;
+  if (x0 < x1)
+    {
+    xMinLine = x0;
+    xMaxLine = x1;
+    }
+  else
+    {
+    xMinLine = x1;
+    xMaxLine = x0;
+    }
+  if (y0 < y1)
+    {
+    yMinLine = y0;
+    yMaxLine = y1;
+    }
+  else
+    {
+    yMinLine = y1;
+    yMaxLine = y0;
+    }
+
+  // case where the Y range of the line falls within the visible scene
+  // and the X range of the line contains the entire visible scene
+  if (yMinScene <= yMinLine && yMaxScene >= yMinLine &&
+      yMinScene <= yMaxLine && yMaxScene >= yMaxLine &&
+      xMinLine <= xMinScene && xMaxLine >= xMaxScene)
+    {
+    return true;
+    }
+
+  // case where the X range of the line falls within the visible scene
+  // and the Y range of the line contains the entire visible scene
+  if (xMinScene <= xMinLine && xMaxScene >= xMinLine &&
+      xMinScene <= xMaxLine && xMaxScene >= xMaxLine &&
+      yMinLine <= yMinScene && yMaxLine >= yMaxScene)
+    {
+    return true;
+    }
+
+  return false;
+}
+
+//-----------------------------------------------------------------------------
+bool vtkDendrogramItem::MouseDoubleClickEvent(
+  const vtkContextMouseEvent &event)
+{
+  // get the position of the double click and convert it to scene coordinates
+  double pos[3];
+  vtkNew<vtkMatrix3x3> inverse;
+  pos[0] = event.GetPos().GetX();
+  pos[1] = event.GetPos().GetY();
+  pos[2] = 0;
+  this->GetScene()->GetTransform()->GetInverse(inverse.GetPointer());
+  inverse->MultiplyPoint(pos, pos);
+
+  bool rotatedTree = false;
+  int orientation = this->GetOrientation();
+  if (orientation == vtkDendrogramItem::UP_TO_DOWN ||
+      orientation == vtkDendrogramItem::DOWN_TO_UP)
+    {
+    rotatedTree = true;
+    }
+
+  // this event is only captured within the tree (not the vertex labels)
+  if ( (!rotatedTree && pos[0] <= this->MaxX && pos[0] >= this->MinX) ||
+       (rotatedTree && pos[1] <= this->MaxY && pos[1] >= this->MinY) )
+    {
+    vtkIdType collapsedSubTree =
+      this->GetClickedCollapsedSubTree(pos[0], pos[1]);
+    if (collapsedSubTree != -1)
+      {
+      // re-expand the subtree rooted at this vertex
+      this->ExpandSubTree(collapsedSubTree);
+      }
+    else
+      {
+      // collapse the subtree rooted at this vertex
+      vtkIdType closestVertex =
+        this->GetClosestVertex((pos[0] - this->Position[0]) / this->MultiplierX,
+                               (pos[1] - this->Position[1]) / this->MultiplierY);
+      this->CollapseSubTree(closestVertex);
+      }
+
+    this->Scene->SetDirty(true);
+    return true;
+    }
+
+  return false;
+}
+
+//-----------------------------------------------------------------------------
+vtkIdType vtkDendrogramItem::GetClickedCollapsedSubTree(double x, double y)
+{
+  // iterate over all the collapsed subtrees to see if this click refers
+  // to one of them.
+  vtkUnsignedIntArray *vertexIsPruned = vtkUnsignedIntArray::SafeDownCast(
+    this->Tree->GetVertexData()->GetArray("VertexIsPruned"));
+  vtkIdTypeArray *originalIdArray = vtkIdTypeArray::SafeDownCast(
+    this->PrunedTree->GetVertexData()->GetArray("OriginalId"));
+  int orientation = this->GetOrientation();
+
+  for (vtkIdType originalId = 0;
+       originalId < vertexIsPruned->GetNumberOfTuples(); ++originalId)
+    {
+    if (vertexIsPruned->GetValue(originalId) > 0)
+      {
+      // Find PrunedTree's vertex that corresponds to this originalId.
+      for (vtkIdType prunedId = 0;
+           prunedId < originalIdArray->GetNumberOfTuples(); ++prunedId)
+        {
+        if (originalIdArray->GetValue(prunedId) == originalId)
+          {
+          // determined where this collapsed subtree is rooted.
+          double point[3];
+          this->LayoutTree->GetPoint(prunedId, point);
+          point[0] = point[0] * this->MultiplierX + this->Position[0];
+          point[1] = point[1] * this->MultiplierY + this->Position[1];
+
+          // we also need the location of this node's parent
+          double parentPoint[3];
+          this->LayoutTree->GetPoint(
+            this->LayoutTree->GetParent(prunedId), parentPoint);
+          parentPoint[0] = parentPoint[0] * this->MultiplierX +
+            this->Position[0];
+          parentPoint[1] = parentPoint[1] * this->MultiplierY +
+            this->Position[1];
+
+          float xMin = 0.0;
+          float xMax = 0.0;
+          float yMin = 0.0;
+          float yMax = 0.0;
+
+          switch (orientation)
+            {
+            case vtkDendrogramItem::DOWN_TO_UP:
+              // proper width (X) range: within +/- LeafSpacing of the vertex's
+              // X value.
+              xMin = point[0] - this->LeafSpacing / 2;
+              xMax = point[0] + this->LeafSpacing / 2;
+
+              // proper height (Y) range: >= parent's Y value
+              yMin = parentPoint[1];
+              yMax = this->MaxY;
+              break;
+
+            case vtkDendrogramItem::RIGHT_TO_LEFT:
+              //proper width (X) range: <= parent's X value.
+              xMin = this->MinX;
+              xMax = parentPoint[0];
+
+              // proper height (Y) range: within +/- LeafSpacing of the vertex's
+              // Y value.
+              yMin = point[1] - this->LeafSpacing / 2;
+              yMax = point[1] + this->LeafSpacing / 2;
+              break;
+
+            case vtkDendrogramItem::UP_TO_DOWN:
+              // proper width (X) range: within +/- LeafSpacing of the vertex's
+              // X value.
+              xMin = point[0] - this->LeafSpacing / 2;
+              xMax = point[0] + this->LeafSpacing / 2;
+
+              // proper height (Y) range: <= parent's Y value
+              yMin = this->MinY;
+              yMax = parentPoint[1];
+              break;
+
+            case vtkDendrogramItem::LEFT_TO_RIGHT:
+            default:
+              //proper width (X) range: >= parent's X value.
+              xMin = parentPoint[0];
+              xMax = this->MaxX;
+
+              // proper height (Y) range: within +/- LeafSpacing of the vertex's
+              // Y value.
+              yMin = point[1] - this->LeafSpacing / 2;
+              yMax = point[1] + this->LeafSpacing / 2;
+              break;
+            }
+
+          if (x >= xMin && x <= xMax && y >= yMin && y <= yMax)
+            {
+            return prunedId;
+            }
+
+          break;
+          }
+        }
+      }
+    }
+  return -1;
+}
+
+//-----------------------------------------------------------------------------
+vtkIdType vtkDendrogramItem::GetClosestVertex(double x, double y)
+{
+  double minDistance = VTK_DOUBLE_MAX;
+  vtkIdType closestVertex = -1;
+  for (vtkIdType vertex = 0; vertex < this->LayoutTree->GetNumberOfVertices();
+       ++vertex)
+    {
+    if (this->LayoutTree->IsLeaf(vertex))
+      {
+      continue;
+      }
+    double point[3];
+    this->LayoutTree->GetPoint(vertex, point);
+    double distance = sqrt( (x - point[0]) * (x - point[0]) +
+                            (y - point[1]) * (y - point[1]) );
+
+    if (distance < minDistance)
+      {
+      minDistance = distance;
+      closestVertex = vertex;
+      }
+    }
+  return closestVertex;
+}
+
+//-----------------------------------------------------------------------------
+void vtkDendrogramItem::CollapseSubTree(vtkIdType vertex)
+{
+  // no removing the root of the tree
+  vtkIdType root = this->PrunedTree->GetRoot();
+  if (vertex == root)
+    {
+    return;
+    }
+
+  // look up the original ID of the vertex that's being collapsed.
+  vtkIdTypeArray *originalIdArray = vtkIdTypeArray::SafeDownCast(
+    this->PrunedTree->GetVertexData()->GetArray("OriginalId"));
+  vtkIdType originalId = originalIdArray->GetValue(vertex);
+
+  // use this value as the index to the original (un-reindexed) tree's
+  // "VertexIsPruned" array.  Mark that vertex as pruned by recording
+  // how many collapsed leaf nodes exist beneath it.
+  int numLeavesCollapsed = this->CountLeafNodes(originalId);
+
+  // make sure we're not about to collapse away the whole tree
+  int totalLeaves = this->CountLeafNodes(root);
+  if (numLeavesCollapsed >= totalLeaves)
+    {
+    return;
+    }
+
+  // no collapsing of leaf nodes.  This should never happen, but it doesn't
+  // hurt to be safe.
+  if (numLeavesCollapsed == 0)
+    {
+    return;
+    }
+
+  vtkUnsignedIntArray *vertexIsPruned = vtkUnsignedIntArray::SafeDownCast(
+    this->Tree->GetVertexData()->GetArray("VertexIsPruned"));
+  vertexIsPruned->SetValue(originalId, numLeavesCollapsed);
+
+  vtkNew<vtkTree> prunedTreeCopy;
+  prunedTreeCopy->ShallowCopy(this->PrunedTree);
+
+  this->PruneFilter->SetInputData(prunedTreeCopy.GetPointer());
+  this->PruneFilter->SetParentVertex(vertex);
+  this->PruneFilter->Update();
+  this->PrunedTree = this->PruneFilter->GetOutput();
+}
+
+//-----------------------------------------------------------------------------
+void vtkDendrogramItem::ExpandSubTree(vtkIdType vertex)
+{
+  // mark this vertex as "not pruned"
+  vtkUnsignedIntArray *vertexIsPruned = vtkUnsignedIntArray::SafeDownCast(
+    this->Tree->GetVertexData()->GetArray("VertexIsPruned"));
+  vtkIdType vertexOriginalId = this->GetOriginalId(vertex);
+  vertexIsPruned->SetValue(vertexOriginalId, 0);
+
+  // momentarily revert PrunedTree to the full (unpruned) Tree.
+  this->PrunedTree->DeepCopy(this->Tree);
+
+  // re-prune as necessary.  this->Tree has the list of originalIds that
+  // need to be re-pruned.
+  for (vtkIdType originalId = 0;
+       originalId < vertexIsPruned->GetNumberOfTuples(); ++originalId)
+    {
+    if (vertexIsPruned->GetValue(originalId) > 0)
+      {
+      // Find PrunedTree's vertex that corresponds to this originalId.
+      // Use this to re-collapse the subtrees that were not just expanded.
+      vtkIdTypeArray *originalIdArray = vtkIdTypeArray::SafeDownCast(
+        this->PrunedTree->GetVertexData()->GetArray("OriginalId"));
+      for (vtkIdType prunedId = 0;
+           prunedId < originalIdArray->GetNumberOfTuples(); ++prunedId)
+        {
+        if (originalIdArray->GetValue(prunedId) == originalId)
+          {
+          this->CollapseSubTree(prunedId);
+          break;
+          }
+        }
+      }
+    }
+}
+
+//-----------------------------------------------------------------------------
+vtkIdType vtkDendrogramItem::GetOriginalId(vtkIdType vertex)
+{
+  vtkIdTypeArray *originalIdArray = vtkIdTypeArray::SafeDownCast(
+    this->PrunedTree->GetVertexData()->GetArray("OriginalId"));
+  return originalIdArray->GetValue(vertex);
+}
+
+//-----------------------------------------------------------------------------
+vtkIdType vtkDendrogramItem::GetPrunedIdForOriginalId(vtkIdType originalId)
+{
+  vtkIdTypeArray *originalIdArray = vtkIdTypeArray::SafeDownCast(
+    this->PrunedTree->GetVertexData()->GetArray("OriginalId"));
+  for (vtkIdType i = 0; i < originalIdArray->GetNumberOfTuples(); ++i)
+    {
+    if (originalIdArray->GetValue(i) == originalId)
+      {
+      return i;
+      }
+    }
+  return -1;
+}
+
+//-----------------------------------------------------------------------------
+void vtkDendrogramItem::CollapseToNumberOfLeafNodes(unsigned int n)
+{
+  // check that the number requested is actually smaller than the number of
+  // leaf nodes in the tree.
+  unsigned int numLeaves = this->CountLeafNodes(this->Tree->GetRoot());
+  if (n >= numLeaves)
+    {
+    vtkWarningMacro( << "n >= total leaf nodes" );
+    return;
+    }
+
+  // reset pruned tree to contain the entire input tree
+  this->PrunedTree->DeepCopy(this->Tree);
+
+  // Initialize a priority queue of vertices based on their weight.
+  // Vertices with lower weight (closer to the root) have a higher priority.
+  std::priority_queue<vtkDendrogramItem::WeightedVertex,
+                      std::vector<vtkDendrogramItem::WeightedVertex>,
+                      vtkDendrogramItem::CompareWeightedVertices> queue;
+  std::vector<vtkIdType> verticesToCollapse;
+  vtkDoubleArray *nodeWeights = vtkDoubleArray::SafeDownCast(
+    this->Tree->GetVertexData()->GetAbstractArray(this->DistanceArrayName));
+
+  // initially, the priority queue contains the children of the root node.
+  vtkIdType root = this->Tree->GetRoot();
+  for (vtkIdType child = 0; child < this->Tree->GetNumberOfChildren(root);
+       ++child)
+    {
+    vtkIdType childVertex = this->Tree->GetChild(root, child);
+
+    double weight = 0.0;
+    if (nodeWeights != NULL)
+      {
+      weight = nodeWeights->GetValue(childVertex);
+      }
+    else
+      {
+      weight = static_cast<double>(this->Tree->GetLevel(childVertex));
+      }
+
+    vtkDendrogramItem::WeightedVertex v = {childVertex, weight};
+    queue.push(v);
+    }
+
+  // use the priority queue to find the vertices that we should collapse.
+  unsigned int numberOfLeafNodesFound = 0;
+  while (queue.size() + numberOfLeafNodesFound < n)
+    {
+    vtkDendrogramItem::WeightedVertex v = queue.top();
+    queue.pop();
+    if (this->Tree->GetNumberOfChildren(v.ID) == 0)
+      {
+      verticesToCollapse.push_back(v.ID);
+      ++numberOfLeafNodesFound;
+      continue;
+      }
+
+    for (vtkIdType child = 0; child < this->Tree->GetNumberOfChildren(v.ID);
+         ++child)
+      {
+      vtkIdType childVertex = this->Tree->GetChild(v.ID, child);
+
+      double weight = 0.0;
+      if (nodeWeights != NULL)
+        {
+        weight = nodeWeights->GetValue(childVertex);
+        }
+      else
+        {
+        weight = static_cast<double>(this->Tree->GetLevel(childVertex));
+        }
+
+      vtkDendrogramItem::WeightedVertex v2 = {childVertex, weight};
+      queue.push(v2);
+      }
+    }
+
+  // collapse the vertices that we found.
+  for (unsigned int i = 0; i < verticesToCollapse.size(); ++i)
+    {
+    vtkIdType prunedId = this->GetPrunedIdForOriginalId(verticesToCollapse[i]);
+    if (prunedId == -1)
+      {
+      vtkErrorMacro("prunedId is -1");
+      continue;
+      }
+    this->CollapseSubTree(prunedId);
+    }
+  while (!queue.empty())
+    {
+    vtkDendrogramItem::WeightedVertex v = queue.top();
+    queue.pop();
+    vtkIdType prunedId = this->GetPrunedIdForOriginalId(v.ID);
+    if (prunedId == -1)
+      {
+      vtkErrorMacro("prunedId is -1");
+      continue;
+      }
+    this->CollapseSubTree(prunedId);
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkDendrogramItem::SetColorArray(const char *arrayName)
+{
+  this->ColorArray = vtkDoubleArray::SafeDownCast(
+    this->Tree->GetVertexData()->GetArray(arrayName));
+  if (!this->ColorArray)
+    {
+    vtkErrorMacro("Could not downcast " << arrayName << " to a vtkDoubleArray");
+    this->ColorTree = false;
+    return;
+    }
+
+  this->ColorTree = true;
+
+  double minValue = VTK_DOUBLE_MAX;
+  double maxValue = VTK_DOUBLE_MIN;
+
+  for (vtkIdType id = 0; id < this->ColorArray->GetNumberOfTuples(); ++id)
+    {
+    double d = this->ColorArray->GetValue(id);
+    if (d > maxValue)
+      {
+      maxValue = d;
+      }
+    if (d < minValue)
+      {
+      minValue = d;
+      }
+    }
+
+  // special case: when there is no range of values to display, all edges should
+  // be drawn in grey.  Without this, all the edges would be drawn in either red
+  // or blue.
+  if (minValue == maxValue)
+    {
+    this->TreeLookupTable->SetNumberOfTableValues(1);
+    this->TreeLookupTable->SetTableValue(0, 0.60, 0.60, 0.60);
+    // this is done to prevent the legend from being drawn
+    this->LegendPositionSet = true;
+    return;
+    }
+
+  // how much we vary the colors from step to step
+  double inc = 0.06;
+
+  // setup the color lookup table.  It will contain 10 shades of red,
+  // 10 shades of blue, and a grey neutral value.
+
+  this->TreeLookupTable->SetNumberOfTableValues(21);
+  if (fabs(maxValue) > fabs(minValue))
+    {
+    this->TreeLookupTable->SetRange(-maxValue, maxValue);
+    }
+  else
+    {
+    this->TreeLookupTable->SetRange(minValue, -minValue);
+    }
+  for (vtkIdType i = 0; i < 10; ++i)
+    {
+    this->TreeLookupTable->SetTableValue(i,
+      1.0, 0.25 + inc * i, 0.25 + inc * i);
+    }
+  this->TreeLookupTable->SetTableValue(10, 0.60, 0.60, 0.60);
+  for (vtkIdType i = 11; i < 21; ++i)
+    {
+    this->TreeLookupTable->SetTableValue(i,
+      0.85 - inc * (i - 10), 0.85 - inc * (i - 10), 1.0);
+    }
+
+  // initialize color legend
+  this->ColorLegend->SetTransferFunction(
+    this->TreeLookupTable.GetPointer());
+  this->ColorLegend->SetTitle(arrayName);
+  this->PositionColorLegend();
+}
+
+//-----------------------------------------------------------------------------
+void vtkDendrogramItem::PositionColorLegend()
+{
+  // bail out early if we don't have meaningful bounds yet.
+  if (this->MinX > this->MaxX || this->MinY > this->MaxY)
+    {
+    return;
+    }
+
+  int orientation = this->GetOrientation();
+  switch(orientation)
+    {
+    case vtkDendrogramItem::DOWN_TO_UP:
+    case vtkDendrogramItem::UP_TO_DOWN:
+      this->ColorLegend->SetHorizontalAlignment(vtkChartLegend::RIGHT);
+      this->ColorLegend->SetVerticalAlignment(vtkChartLegend::CENTER);
+      this->ColorLegend->SetOrientation(vtkColorLegend::VERTICAL);
+      this->ColorLegend->SetPoint(
+        this->MinX - this->LeafSpacing,
+        this->MinY + (this->MaxY - this->MinY) / 2.0);
+      this->ColorLegend->SetTextureSize(
+        this->ColorLegend->GetSymbolWidth(),
+       this->MaxY - this->MinY);
+      break;
+
+    case vtkDendrogramItem::RIGHT_TO_LEFT:
+    case vtkDendrogramItem::LEFT_TO_RIGHT:
+    default:
+      this->ColorLegend->SetHorizontalAlignment(vtkChartLegend::CENTER);
+      this->ColorLegend->SetVerticalAlignment(vtkChartLegend::TOP);
+      this->ColorLegend->SetOrientation(vtkColorLegend::HORIZONTAL);
+      this->ColorLegend->SetPoint(
+        this->MinX + (this->MaxX - this->MinX) / 2.0,
+        this->MinY - this->LeafSpacing);
+      this->ColorLegend->SetTextureSize(
+        this->MaxX - this->MinX,
+        this->ColorLegend->GetSymbolWidth());
+      break;
+    }
+  this->ColorLegend->Update();
+  this->ColorLegend->SetVisible(true);
+  this->Scene->SetDirty(true);
+  this->LegendPositionSet = true;
+}
+
+//-----------------------------------------------------------------------------
+void vtkDendrogramItem::SetOrientation(int orientation)
+{
+  this->SetOrientation(this->Tree, orientation);
+}
+
+//-----------------------------------------------------------------------------
+void vtkDendrogramItem::SetOrientation(vtkTree *tree, int orientation)
+{
+  vtkIntArray *existingArray = vtkIntArray::SafeDownCast(
+    tree->GetFieldData()->GetArray("orientation"));
+  if (existingArray)
+    {
+    existingArray->SetValue(0, orientation);
+    }
+  else
+    {
+    vtkSmartPointer<vtkIntArray> orientationArray =
+      vtkSmartPointer<vtkIntArray>::New();
+    orientationArray->SetNumberOfComponents(1);
+    orientationArray->SetName("orientation");
+    orientationArray->InsertNextValue(orientation);
+    tree->GetFieldData()->AddArray(orientationArray);
+    }
+
+  if (tree == this->Tree)
+    {
+    this->SetOrientation(this->PrunedTree, orientation);
+    this->SetOrientation(this->LayoutTree, orientation);
+    }
+}
+
+//-----------------------------------------------------------------------------
+int vtkDendrogramItem::GetOrientation()
+{
+  vtkIntArray *orientationArray = vtkIntArray::SafeDownCast(
+    this->Tree->GetFieldData()->GetArray("orientation"));
+  if (orientationArray)
+    {
+    return orientationArray->GetValue(0);
+    }
+  return vtkDendrogramItem::LEFT_TO_RIGHT;
+}
+
+//-----------------------------------------------------------------------------
+double vtkDendrogramItem::GetAngleForOrientation(int orientation)
+{
+  switch(orientation)
+    {
+    case vtkDendrogramItem::DOWN_TO_UP:
+      return 180.0;
+      break;
+
+    case vtkDendrogramItem::RIGHT_TO_LEFT:
+      return 270.0;
+      break;
+
+    case vtkDendrogramItem::UP_TO_DOWN:
+      return 0.0;
+      break;
+
+    case vtkDendrogramItem::LEFT_TO_RIGHT:
+    default:
+      return 90.0;
+      break;
+    }
+}
+
+//-----------------------------------------------------------------------------
+double vtkDendrogramItem::GetTextAngleForOrientation(int orientation)
+{
+  switch(orientation)
+    {
+    case vtkDendrogramItem::DOWN_TO_UP:
+      return 90.0;
+      break;
+
+    case vtkDendrogramItem::RIGHT_TO_LEFT:
+      return 0.0;
+      break;
+
+    case vtkDendrogramItem::UP_TO_DOWN:
+      return 270.0;
+      break;
+
+    case vtkDendrogramItem::LEFT_TO_RIGHT:
+    default:
+      return 0.0;
+      break;
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkDendrogramItem::GetBounds(double bounds[4])
+{
+  bounds[0] = this->MinX;
+  bounds[1] = this->MaxX;
+  bounds[2] = this->MinY;
+  bounds[3] = this->MaxY;
+
+  if (this->LabelWidth == 0.0)
+    {
+    return;
+    }
+
+  double spacing = this->LeafSpacing * 0.5;
+
+  switch (this->GetOrientation())
+    {
+    case vtkDendrogramItem::LEFT_TO_RIGHT:
+    default:
+      bounds[1] += spacing + this->LabelWidth;
+      break;
+
+    case vtkDendrogramItem::UP_TO_DOWN:
+      bounds[2] -= spacing + this->LabelWidth;
+      break;
+
+    case vtkDendrogramItem::RIGHT_TO_LEFT:
+      bounds[0] -= spacing + this->LabelWidth;
+      break;
+
+    case vtkDendrogramItem::DOWN_TO_UP:
+      bounds[3] += spacing + this->LabelWidth;
+      break;
+    }
+}
+
+//-----------------------------------------------------------------------------
+float vtkDendrogramItem::GetLabelWidth()
+{
+  return this->LabelWidth;
+}
+
+//-----------------------------------------------------------------------------
+void vtkDendrogramItem::ComputeLabelWidth(vtkContext2D *painter)
+{
+  this->LabelWidth = 0.0;
+  if (!this->DrawLabels)
+    {
+    return;
+    }
+  int fontSize = painter->ComputeFontSizeForBoundedString("Igq", VTK_FLOAT_MAX,
+                                                           this->LeafSpacing);
+  if (fontSize < 8)
+    {
+    return;
+    }
+
+  // temporarily set text to default orientation
+  int orientation = painter->GetTextProp()->GetOrientation();
+  painter->GetTextProp()->SetOrientation(0.0);
+
+  // get array of node names from the tree
+  vtkStringArray *vertexNames = vtkStringArray::SafeDownCast(
+    this->LayoutTree->GetVertexData()->GetAbstractArray(
+    this->VertexNameArrayName));
+
+  float bounds[4];
+  for (vtkIdType i = 0; i < vertexNames->GetNumberOfTuples(); ++i)
+    {
+    painter->ComputeStringBounds(vertexNames->GetValue(i), bounds);
+    if (bounds[2] > this->LabelWidth)
+      {
+      this->LabelWidth = bounds[2];
+      }
+    }
+
+  // restore orientation
+  painter->GetTextProp()->SetOrientation(orientation);
+}
+
+//-----------------------------------------------------------------------------
+bool vtkDendrogramItem::GetPositionOfVertex(std::string vertexName,
+                                            double position[2])
+{
+  vtkStringArray *vertexNames = vtkStringArray::SafeDownCast(
+    this->LayoutTree->GetVertexData()->GetAbstractArray(
+    this->VertexNameArrayName));
+
+  vtkIdType vertex = vertexNames->LookupValue(vertexName);
+  if (vertex == -1)
+    {
+    return false;
+    }
+
+  double point[3];
+  this->LayoutTree->GetPoint(vertex, point);
+
+  position[0] = this->Position[0] + point[0] * this->MultiplierX;
+  position[1] = this->Position[1] + point[1] * this->MultiplierY;
+
+  return true;
+}
+
+//-----------------------------------------------------------------------------
+bool vtkDendrogramItem::Hit(const vtkContextMouseEvent &vtkNotUsed(mouse))
+{
+  // If we are interactive, we want to catch anything that propagates to the
+  // background, otherwise we do not want any mouse events.
+  return this->Interactive;
+}
+
+//-----------------------------------------------------------------------------
+void vtkDendrogramItem::PrintSelf(ostream &os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+  os << "Tree: " << (this->Tree ? "" : "(null)") << std::endl;
+  if (this->Tree->GetNumberOfVertices() > 0)
+    {
+    this->Tree->PrintSelf(os, indent.GetNextIndent());
+    }
+}
diff --git a/Views/Infovis/vtkDendrogramItem.h b/Views/Infovis/vtkDendrogramItem.h
new file mode 100644
index 0000000..2cd83c5
--- /dev/null
+++ b/Views/Infovis/vtkDendrogramItem.h
@@ -0,0 +1,360 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkDendrogramItem.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkDendrogramItem - A 2D graphics item for rendering a tree as
+// a dendrogram
+//
+// .SECTION Description
+// Draw a tree as a dendrogram
+// The input tree's vertex data must contain at least two arrays.
+// The first required array is a vtkStringArray called "node name".
+// This array is used to label the leaf nodes of the tree.
+// The second required array is a scalar array called "node weight".
+// This array is used by vtkTreeLayoutStrategy to set any particular
+// node's distance from the root of the tree.
+//
+// The vtkNewickTreeReader automatically initializes both of these
+// required arrays in its output tree.
+//
+// .SEE ALSO
+// vtkTree vtkNewickTreeReader
+
+#ifndef __vtkDendrogramItem_h
+#define __vtkDendrogramItem_h
+
+#include "vtkViewsInfovisModule.h" // For export macro
+#include "vtkContextItem.h"
+
+#include "vtkNew.h"               // For vtkNew ivars
+#include "vtkStdString.h"         // For SetGet ivars
+#include "vtkSmartPointer.h"      // For vtkSmartPointer ivars
+#include "vtkVector.h"            // For vtkVector2f ivar
+
+class vtkColorLegend;
+class vtkDoubleArray;
+class vtkGraphLayout;
+class vtkLookupTable;
+class vtkPruneTreeFilter;
+class vtkTree;
+
+class VTKVIEWSINFOVIS_EXPORT vtkDendrogramItem : public vtkContextItem
+{
+public:
+  static vtkDendrogramItem *New();
+  vtkTypeMacro(vtkDendrogramItem, vtkContextItem);
+  virtual void PrintSelf(ostream &os, vtkIndent indent);
+
+  // Description:
+  // Set the tree that this item draws.  Note that this tree's vertex data
+  // must contain a vtkStringArray called "node name".  The vtkNewickTreeReader
+  // automatically creates this required array for you.
+  virtual void SetTree(vtkTree *tree);
+
+  // Description:
+  // Get the tree that this item draws.
+  vtkTree * GetTree();
+
+  // Description:
+  // Collapse subtrees until there are only n leaf nodes left in the tree.
+  // The leaf nodes that remain are those that are closest to the root.
+  // Any subtrees that were collapsed prior to this function being called
+  // may be re-expanded.
+  void CollapseToNumberOfLeafNodes(unsigned int n);
+
+  // Description:
+  // Get the collapsed tree
+  vtkTree * GetPrunedTree();
+
+  // Description:
+  // Indicate which array within the Tree's VertexData should be used to
+  // color the tree.  The specified array must be a vtkDoubleArray.
+  // By default, the tree will be drawn in black.
+  void SetColorArray(const char *arrayName);
+
+  // Description:
+  // Get/set whether or not leaf nodes should be extended so that they all line
+  // up vertically.  The default is to NOT extend leaf nodes.  When extending
+  // leaf nodes, the extra length is drawn in grey so as to distinguish it from
+  // the actual length of the leaf node.
+  vtkSetMacro(ExtendLeafNodes, bool);
+  vtkGetMacro(ExtendLeafNodes, bool);
+  vtkBooleanMacro(ExtendLeafNodes, bool);
+
+  // Description:
+  // Set which way the tree should face within the visualization.  The default
+  // is for the tree to be drawn left to right.
+  void SetOrientation(int orientation);
+
+  // Description:
+  // Get the current tree orientation.
+  int GetOrientation();
+
+  // Description:
+  // Get the rotation angle (in degrees) that corresponds to the given
+  // tree orientation.  For the default orientation (LEFT_TO_RIGHT), this
+  // is 90 degrees.
+  double GetAngleForOrientation(int orientation);
+
+  // Description:
+  // Get the angle that vertex labels should be rotated for the correponding
+  // tree orientation.  For the default orientation (LEFT_TO_RIGHT), this
+  // is 0 degrees.
+  double GetTextAngleForOrientation(int orientation);
+
+  // Description:
+  // Get/Set whether or not leaf nodes should be labeled by this class.
+  // Default is true.
+  vtkSetMacro(DrawLabels, bool);
+  vtkGetMacro(DrawLabels, bool);
+  vtkBooleanMacro(DrawLabels, bool);
+
+  // Description:
+  // Set the position of the dendrogram.
+  vtkSetVector2Macro(Position, float);
+  void SetPosition(const vtkVector2f &pos);
+
+  // Description:
+  // Get position of the dendrogram.
+  vtkGetVector2Macro(Position, float);
+  vtkVector2f GetPositionVector();
+
+  // Description:
+  // Get/Set the spacing between the leaf nodes in our dendrogram.
+  // Default is 18 pixels.
+  vtkGetMacro(LeafSpacing, double);
+  vtkSetMacro(LeafSpacing, double);
+
+  // Description:
+  // This function calls RebuildBuffers() if necessary.
+  // Once PrepareToPaint() has been called, GetBounds() is guaranteed
+  // to provide useful information.
+  void PrepareToPaint(vtkContext2D *painter);
+
+  // Description:
+  // Get the bounds for this item as (Xmin,Xmax,Ymin,Ymax).
+  // These bounds are only guaranteed to be accurate after Paint() or
+  // PrepareToPaint() has been called.
+  virtual void GetBounds(double bounds[4]);
+
+  // Description:
+  // Compute the width of the longest leaf node label.
+  void ComputeLabelWidth(vtkContext2D *painter);
+
+  // Description:
+  // Get the width of the longest leaf node label.
+  float GetLabelWidth();
+
+  // Description:
+  // Find the position of the vertex with the specified name.  Store
+  // this information in the passed array.  Returns true if the vertex
+  // was found, false otherwise.
+  bool GetPositionOfVertex(std::string vertexName, double position[2]);
+
+  // Description:
+  // Paints the input tree as a dendrogram.
+  virtual bool Paint(vtkContext2D *painter);
+
+  // Description:
+  // Get/Set how wide the edges of this dendrogram should be.  Default is one pixel.
+  vtkGetMacro(LineWidth, float);
+  vtkSetMacro(LineWidth, float);
+
+  // Description:
+  // Get/set whether or not the number of collapsed leaf nodes should be written
+  // inside the triangle representing a collapsed subtree.  Default is true.
+  vtkSetMacro(DisplayNumberOfCollapsedLeafNodes, bool);
+  vtkGetMacro(DisplayNumberOfCollapsedLeafNodes, bool);
+  vtkBooleanMacro(DisplayNumberOfCollapsedLeafNodes, bool);
+
+  // Description:
+  // Get/Set the name of the array that specifies the distance of each vertex
+  // from the root (NOT the vertex's parent).  This array should be a part of
+  // the input tree's VertexData.  By default, this value is "node weight",
+  // which is the name of the array created by vtkNewickTreeReader.
+  vtkGetMacro(DistanceArrayName, vtkStdString);
+  vtkSetMacro(DistanceArrayName, vtkStdString);
+
+  // Description:
+  // Get/Set the name of a vtkStringArray that specifies the names of the
+  // vertices of the input tree.  This array should be a part of the input
+  // tree's VertexData.  By default, this value is "node name", which is the
+  // name of the array created by vtkNewickTreeReader.
+  vtkGetMacro(VertexNameArrayName, vtkStdString);
+  vtkSetMacro(VertexNameArrayName, vtkStdString);
+
+  // this struct & class allow us to generate a priority queue of vertices.
+  struct WeightedVertex
+  {
+    vtkIdType ID;
+    double weight;
+  };
+  class CompareWeightedVertices
+  {
+    public:
+    // Returns true if v2 is higher priority than v1
+    bool operator()(WeightedVertex& v1, WeightedVertex& v2)
+    {
+    if (v1.weight < v2.weight)
+      {
+      return false;
+      }
+     return true;
+    }
+  };
+
+  //BTX
+  // Description:
+  // Enum for Orientation.
+  enum
+    {
+    LEFT_TO_RIGHT,
+    UP_TO_DOWN,
+    RIGHT_TO_LEFT,
+    DOWN_TO_UP
+    };
+
+  // Description:
+  // Returns true if the transform is interactive, false otherwise.
+  virtual bool Hit(const vtkContextMouseEvent &mouse);
+
+  // Description:
+  // Collapse or expand a subtree when the user double clicks on an
+  // internal node.
+  virtual bool MouseDoubleClickEvent( const vtkContextMouseEvent &event);
+  //ETX
+
+protected:
+  vtkDendrogramItem();
+  ~vtkDendrogramItem();
+
+  vtkVector2f PositionVector;
+  float* Position;
+
+  // Description:
+  // Generate some data needed for painting.  We cache this information as
+  // it only needs to be generated when the input data changes.
+  virtual void RebuildBuffers();
+
+  // Description:
+  // This function does the bulk of the actual work in rendering our dendrogram
+  virtual void PaintBuffers(vtkContext2D *painter);
+
+  // Description:
+  // This function returns a bool indicating whether or not we need to rebuild
+  // our cached data before painting.
+  virtual bool IsDirty();
+
+  // Description:
+  // Compute how to scale our data so that text labels will fit within the
+  // bounds determined by the spacing between the leaf nodes of the tree.
+  void ComputeMultipliers();
+
+  // Description:
+  // Compute the bounds of our tree in pixel coordinates.
+  void ComputeBounds();
+
+  // Description:
+  // Count the number of leaf nodes in the tree
+  void CountLeafNodes();
+
+  // Description:
+  // Count the number of leaf nodes that descend from a given vertex.
+  int CountLeafNodes(vtkIdType vertex);
+
+  // Description:
+  // Get the tree vertex closest to the specified coordinates.
+  vtkIdType GetClosestVertex(double x, double y);
+
+  // Description:
+  // Collapse the subtree rooted at vertex.
+  void CollapseSubTree(vtkIdType vertex);
+
+  // Description:
+  // Expand the previously collapsed subtree rooted at vertex.
+  void ExpandSubTree(vtkIdType vertex);
+
+  // Description:
+  // Look up the original ID of a vertex in the pruned tree.
+  vtkIdType GetOriginalId(vtkIdType vertex);
+
+  // Description:
+  // Look up the ID of a vertex in the pruned tree from a vertex ID
+  // of the input tree.
+  vtkIdType GetPrunedIdForOriginalId(vtkIdType originalId);
+
+  // Description:
+  // Check if the click at (x, y) should be considered as a click on
+  // a collapsed subtree.  Returns the vtkIdType of the pruned subtree
+  // if so, -1 otherwise.
+  vtkIdType GetClickedCollapsedSubTree(double x, double y);
+
+  // Description:
+  // Calculate the extent of the data that is visible within the window.
+  // This information is used to ensure that we only draw details that
+  // will be seen by the user.  This improves rendering speed, particularly
+  // for larger data.
+  void UpdateVisibleSceneExtent(vtkContext2D *painter);
+
+  // Description:
+  // Returns true if any part of the line segment defined by endpoints
+  // (x0, y0), (x1, y1) falls within the extent of the currently
+  // visible scene.  Returns false otherwise.
+  bool LineIsVisible(double x0, double y0, double x1, double y1);
+
+  // Description:
+  // Internal function.  Use SetOrientation(int orientation) instead.
+  void SetOrientation(vtkTree *tree, int orientation);
+
+  // Setup the position, size, and orientation of this dendrogram's color
+  // legend based on the dendrogram's current orientation.
+  void PositionColorLegend();
+
+  vtkSmartPointer<vtkTree> Tree;
+  vtkSmartPointer<vtkTree> LayoutTree;
+
+private:
+  vtkDendrogramItem(const vtkDendrogramItem&); // Not implemented
+  void operator=(const vtkDendrogramItem&); // Not implemented
+
+  vtkSmartPointer<vtkTree> PrunedTree;
+  unsigned long DendrogramBuildTime;
+  vtkNew<vtkGraphLayout> Layout;
+  vtkNew<vtkPruneTreeFilter> PruneFilter;
+  vtkNew<vtkLookupTable> TriangleLookupTable;
+  vtkNew<vtkLookupTable> TreeLookupTable;
+  vtkNew<vtkColorLegend> ColorLegend;
+  vtkDoubleArray* ColorArray;
+  double MultiplierX;
+  double MultiplierY;
+  int NumberOfLeafNodes;
+  double LeafSpacing;
+
+  double MinX;
+  double MinY;
+  double MaxX;
+  double MaxY;
+  double SceneBottomLeft[3];
+  double SceneTopRight[3];
+  float LabelWidth;
+  float LineWidth;
+  bool ColorTree;
+  bool ExtendLeafNodes;
+  bool DrawLabels;
+  bool DisplayNumberOfCollapsedLeafNodes;
+  bool LegendPositionSet;
+  vtkStdString DistanceArrayName;
+  vtkStdString VertexNameArrayName;
+};
+
+#endif
diff --git a/Views/Infovis/vtkHeatmapItem.cxx b/Views/Infovis/vtkHeatmapItem.cxx
new file mode 100644
index 0000000..e924d78
--- /dev/null
+++ b/Views/Infovis/vtkHeatmapItem.cxx
@@ -0,0 +1,1240 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkHeatmapItem.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkHeatmapItem.h"
+
+#include "vtkBitArray.h"
+#include "vtkBrush.h"
+#include "vtkCategoryLegend.h"
+#include "vtkColorLegend.h"
+#include "vtkColorSeries.h"
+#include "vtkContext2D.h"
+#include "vtkContextMouseEvent.h"
+#include "vtkContextScene.h"
+#include "vtkFieldData.h"
+#include "vtkIntArray.h"
+#include "vtkLookupTable.h"
+#include "vtkNew.h"
+#include "vtkObjectFactory.h"
+#include "vtkPen.h"
+#include "vtkRect.h"
+#include "vtkStringArray.h"
+#include "vtkTable.h"
+#include "vtkTextProperty.h"
+#include "vtkTooltipItem.h"
+#include "vtkTransform2D.h"
+#include "vtkVariantArray.h"
+
+#include <sstream>
+
+vtkStandardNewMacro(vtkHeatmapItem);
+
+//-----------------------------------------------------------------------------
+vtkHeatmapItem::vtkHeatmapItem() : PositionVector(0, 0)
+{
+  this->Position = this->PositionVector.GetData();
+  this->Interactive = true;
+  this->HeatmapBuildTime = 0;
+  this->Table = vtkSmartPointer<vtkTable>::New();
+
+  this->CollapsedRowsArray = NULL;
+  this->CollapsedColumnsArray = NULL;
+
+  /* initialize bounds so that the mouse cursor is never considered
+   * "inside" the heatmap */
+  this->MinX = 1.0;
+  this->MinY = 1.0;
+  this->MaxX = 0.0;
+  this->MaxY = 0.0;
+
+  this->RowLabelWidth = 0.0;
+  this->ColumnLabelWidth = 0.0;
+
+  this->CellHeight = 18.0;
+  this->CellWidth = this->CellHeight * 2.0;
+
+  this->CategoryLegend->SetVisible(false);
+  this->CategoryLegend->CacheBoundsOff();
+  this->AddItem(this->CategoryLegend.GetPointer());
+
+  this->ColorLegend->SetVisible(false);
+  this->ColorLegend->DrawBorderOn();
+  this->ColorLegend->CacheBoundsOff();
+  this->AddItem(this->ColorLegend.GetPointer());
+
+  this->LegendPositionSet = false;
+
+  this->Tooltip->SetVisible(false);
+  this->AddItem(this->Tooltip.GetPointer());
+}
+
+//-----------------------------------------------------------------------------
+vtkHeatmapItem::~vtkHeatmapItem()
+{
+}
+
+//-----------------------------------------------------------------------------
+void vtkHeatmapItem::SetPosition(const vtkVector2f &pos)
+{
+  this->PositionVector = pos;
+}
+
+//-----------------------------------------------------------------------------
+vtkVector2f vtkHeatmapItem::GetPositionVector()
+{
+  return this->PositionVector;
+}
+
+//-----------------------------------------------------------------------------
+void vtkHeatmapItem::SetTable(vtkTable *table)
+{
+  if (table == NULL || table->GetNumberOfRows() == 0)
+    {
+    this->Table = vtkSmartPointer<vtkTable>::New();
+    return;
+    }
+  this->Table = table;
+}
+
+//-----------------------------------------------------------------------------
+vtkTable * vtkHeatmapItem::GetTable()
+{
+  return this->Table;
+}
+
+//-----------------------------------------------------------------------------
+bool vtkHeatmapItem::Paint(vtkContext2D *painter)
+{
+  if (this->Table->GetNumberOfRows() == 0)
+    {
+    return true;
+    }
+
+  if (this->IsDirty())
+    {
+    this->RebuildBuffers();
+    }
+
+  this->PaintBuffers(painter);
+  this->PaintChildren(painter);
+  return true;
+}
+
+//-----------------------------------------------------------------------------
+bool vtkHeatmapItem::IsDirty()
+{
+  if (this->Table->GetNumberOfRows() == 0)
+    {
+    return false;
+    }
+  if (this->Table->GetMTime() > this->HeatmapBuildTime)
+    {
+    return true;
+    }
+  return false;
+}
+
+//-----------------------------------------------------------------------------
+void vtkHeatmapItem::RebuildBuffers()
+{
+  if (this->Table->GetNumberOfRows() == 0)
+    {
+    return;
+    }
+
+  this->InitializeLookupTables();
+
+  this->CollapsedRowsArray = vtkBitArray::SafeDownCast(
+    this->Table->GetFieldData()->GetArray("collapsed rows"));
+  this->CollapsedColumnsArray = vtkBitArray::SafeDownCast(
+    this->Table->GetFieldData()->GetArray("collapsed columns"));
+
+  this->HeatmapBuildTime = this->Table->GetMTime();
+}
+
+//-----------------------------------------------------------------------------
+void vtkHeatmapItem::InitializeLookupTables()
+{
+  this->ColumnRanges.clear();
+  this->CategoricalDataValues->Reset();
+
+  for (vtkIdType column = 1; column < this->Table->GetNumberOfColumns();
+       ++column)
+    {
+    if (this->Table->GetValue(0, column).IsString())
+      {
+      this->AccumulateProminentCategoricalDataValues(column);
+      continue;
+      }
+    double min = VTK_DOUBLE_MAX;
+    double max = VTK_DOUBLE_MIN;
+    for (vtkIdType row = 0; row < this->Table->GetNumberOfRows(); ++row)
+      {
+      double value = this->Table->GetValue(row, column).ToDouble();
+      if (value > max)
+        {
+        max = value;
+        }
+      if (value < min)
+        {
+        min = value;
+        }
+      }
+    this->ColumnRanges[column] = std::pair<double, double>(min, max);
+    }
+
+  this->GenerateCategoricalDataLookupTable();
+  this->GenerateContinuousDataLookupTable();
+}
+
+//-----------------------------------------------------------------------------
+void vtkHeatmapItem::GenerateContinuousDataLookupTable()
+{
+  this->ContinuousDataLookupTable->SetNumberOfTableValues(255);
+  this->ContinuousDataLookupTable->Build();
+  this->ContinuousDataLookupTable->SetRange(0, 255);
+  this->ContinuousDataLookupTable->SetNanColor(0.75, 0.75, 0.75, 1.0);
+
+  // black to red
+  for (int i = 0; i < 85; ++i)
+    {
+    float f = static_cast<float>(i) / 84.0;
+    this->ContinuousDataLookupTable->SetTableValue(i, f, 0, 0);
+    }
+
+ // red to yellow
+  for (int i = 0; i < 85; ++i)
+    {
+    float f = static_cast<float>(i) / 84.0;
+    this->ContinuousDataLookupTable->SetTableValue(85 + i, 1.0, f, 0);
+    }
+
+ // yellow to white
+  for (int i = 0; i < 85; ++i)
+    {
+    float f = static_cast<float>(i) / 84.0;
+    this->ContinuousDataLookupTable->SetTableValue(170 + i, 1.0, 1.0, f);
+    }
+
+  this->ColorLegendLookupTable->DeepCopy(
+    this->ContinuousDataLookupTable.GetPointer());
+  this->ColorLegend->SetTransferFunction(
+    this->ColorLegendLookupTable.GetPointer());
+}
+
+//-----------------------------------------------------------------------------
+void vtkHeatmapItem::AccumulateProminentCategoricalDataValues(vtkIdType column)
+{
+  vtkStringArray *stringColumn = vtkStringArray::SafeDownCast(
+    this->Table->GetColumn(column));
+
+  // search for values that occur more than once
+  vtkNew<vtkStringArray> repeatedValues;
+  std::map<std::string, int> CountMap;
+  std::map<std::string, int>::iterator mapItr;
+  for (int i = 0; i < stringColumn->GetNumberOfTuples(); ++i)
+    {
+    CountMap[stringColumn->GetValue(i)]++;
+    }
+
+  for (mapItr = CountMap.begin(); mapItr != CountMap.end(); ++mapItr)
+    {
+    if (mapItr->second > 1)
+      {
+      repeatedValues->InsertNextValue(mapItr->first);
+      }
+    }
+
+  // add each distinct, repeated value from this column to our master list
+  for (int i = 0; i < repeatedValues->GetNumberOfTuples(); ++i)
+    {
+    vtkVariant v = repeatedValues->GetVariantValue(i);
+    if (this->CategoricalDataValues->LookupValue(v) == -1)
+      {
+      this->CategoricalDataValues->InsertNextValue(v.ToString());
+      }
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkHeatmapItem::GenerateCategoricalDataLookupTable()
+{
+  this->CategoricalDataLookupTable->ResetAnnotations();
+  this->CategoricalDataLookupTable->SetNanColor(0.75, 0.75, 0.75, 1.0);
+
+  // make each distinct categorical value an index into our lookup table
+  for (int i = 0; i < this->CategoricalDataValues->GetNumberOfTuples(); ++i)
+    {
+    this->CategoricalDataLookupTable->SetAnnotation(
+      this->CategoricalDataValues->GetValue(i),
+      this->CategoricalDataValues->GetValue(i));
+    }
+
+  vtkNew<vtkColorSeries> colorSeries;
+  colorSeries->SetColorScheme(vtkColorSeries::BREWER_QUALITATIVE_SET3);
+  colorSeries->BuildLookupTable(this->CategoricalDataLookupTable.GetPointer());
+
+  this->CategoryLegend->SetScalarsToColors(
+    this->CategoricalDataLookupTable.GetPointer());
+}
+
+//-----------------------------------------------------------------------------
+void vtkHeatmapItem::PaintBuffers(vtkContext2D *painter)
+{
+  // Calculate the extent of the data that is visible within the window.
+  this->UpdateVisibleSceneExtent(painter);
+
+  // Compute the bounds of the heatmap (excluding text labels)
+  this->ComputeBounds();
+
+  // leave a small amount of space between the heatmap and the row/column
+  // labels
+  double spacing = this->CellWidth * 0.25;
+
+  // variables used to calculate the positions of elements drawn on screen.
+  double cellStartX = 0.0;
+  double cellStartY = 0.0;
+  double labelStartX = 0.0;
+  double labelStartY = 0.0;
+
+  // the name of each row.
+  vtkStringArray *rowNames = vtkStringArray::SafeDownCast(
+    this->Table->GetColumn(0));
+
+  bool currentlyCollapsingRows = false;
+
+  bool currentlyCollapsingColumns = false;
+
+  // this map helps us display information about the correct row & column
+  // in our tooltips
+  this->SceneRowToTableRowMap.clear();
+  this->SceneRowToTableRowMap.assign(this->Table->GetNumberOfRows(), -1);
+  this->SceneColumnToTableColumnMap.clear();
+  this->SceneColumnToTableColumnMap.assign(
+    this->Table->GetNumberOfColumns(), -1);
+
+  // Setup text property & calculate an appropriate font size for this zoom
+  // level.  "Igq" was selected for the range of height of its characters.
+  painter->GetTextProp()->SetColor(0.0, 0.0, 0.0);
+  painter->GetTextProp()->SetVerticalJustificationToCentered();
+  painter->GetTextProp()->SetJustificationToLeft();
+  painter->GetTextProp()->SetOrientation(0.0);
+  int fontSize = painter->ComputeFontSizeForBoundedString("Igq", VTK_FLOAT_MAX,
+                                                          this->CellHeight);
+
+  //canDrawText is set to false if we're too zoomed out to draw legible text.
+  bool canDrawText = true;
+  if (fontSize < 8)
+    {
+    canDrawText = false;
+    }
+  bool drawRowLabels = canDrawText;
+  bool drawColumnLabels = canDrawText;
+
+  int orientation = this->GetOrientation();
+
+  // Detect if our row or column labels would be currently visible on screen.
+  if (canDrawText)
+    {
+    switch (orientation)
+      {
+      case vtkHeatmapItem::DOWN_TO_UP:
+        if (this->SceneBottomLeft[1] > this->MaxY + spacing ||
+            this->SceneTopRight[1] < this->MaxY + spacing)
+          {
+          drawRowLabels = false;
+          }
+        if (this->SceneBottomLeft[0] > this->MaxX + spacing ||
+            this->SceneTopRight[0] < this->MaxX + spacing)
+          {
+          drawColumnLabels = false;
+          }
+        break;
+
+      case vtkHeatmapItem::RIGHT_TO_LEFT:
+        if (this->SceneBottomLeft[0] > this->MinX - spacing ||
+            this->SceneTopRight[0] < this->MinX - spacing)
+          {
+          drawRowLabels = false;
+          }
+        else
+          {
+          painter->GetTextProp()->SetJustificationToRight();
+          }
+        if (this->SceneBottomLeft[1] > this->MaxY + spacing &&
+            this->SceneTopRight[1] < this->MaxY + spacing)
+          {
+          drawColumnLabels = false;
+          }
+        break;
+
+      case vtkHeatmapItem::UP_TO_DOWN:
+        if (this->SceneBottomLeft[1] > this->MinY - spacing ||
+            this->SceneTopRight[1] < this->MinY - spacing)
+          {
+          drawRowLabels = false;
+          }
+        else
+          {
+          painter->GetTextProp()->SetJustificationToRight();
+          }
+        if (this->SceneBottomLeft[0] > this->MaxX + spacing ||
+            this->SceneTopRight[0] < this->MaxX + spacing)
+          {
+          drawColumnLabels = false;
+          }
+        break;
+
+      case vtkHeatmapItem::LEFT_TO_RIGHT:
+      default:
+        if (this->SceneBottomLeft[0] > this->MaxX + spacing ||
+            this->SceneTopRight[0] < this->MaxX + spacing)
+          {
+          drawRowLabels = false;
+          }
+        if (this->SceneBottomLeft[1] > this->MaxY + spacing &&
+            this->SceneTopRight[1] < this->MaxY + spacing)
+          {
+          drawColumnLabels = false;
+          }
+        break;
+      }
+    }
+
+  // set the orientation of our text property to draw row names
+  if (drawRowLabels)
+    {
+    painter->GetTextProp()->SetOrientation(
+      this->GetTextAngleForOrientation(orientation));
+    }
+
+  // keep track of what row & column we're drawing next
+  vtkIdType rowToDraw = 0;
+  vtkIdType columnToDraw = 0;
+  bool columnMapSet = false;
+
+  for (vtkIdType row = 0; row != this->Table->GetNumberOfRows(); ++row)
+    {
+    // check if this row has been collapsed or not
+    if (this->CollapsedRowsArray && this->CollapsedRowsArray->GetValue(row) == 1)
+      {
+      // a contiguous block of collapsed rows is represented as a single blank
+      // row by this item.
+      if (!currentlyCollapsingRows)
+        {
+        this->SceneRowToTableRowMap[rowToDraw] = -1;
+        ++rowToDraw;
+        currentlyCollapsingRows = true;
+        }
+      continue;
+      }
+    currentlyCollapsingRows = false;
+
+    // get the name of this row
+    std::string name = "";
+    if (rowNames)
+      {
+      name = rowNames->GetValue(row);
+      }
+
+    // only draw the cells of this row if it isn't explicitly marked as blank
+    if (this->BlankRows.find(name) == this->BlankRows.end())
+      {
+      columnToDraw = 0;
+      for (vtkIdType column = 1; column < this->Table->GetNumberOfColumns();
+           ++column)
+        {
+
+        // check if this column has been collapsed or not
+        if (this->CollapsedColumnsArray &&
+            this->CollapsedColumnsArray->GetValue(column) == 1)
+          {
+          // a contiguous block of collapsed columns is represented as a single blank
+          // column by this item.
+          if (!currentlyCollapsingColumns)
+            {
+            this->SceneColumnToTableColumnMap[columnToDraw] = -1;
+            ++columnToDraw;
+            currentlyCollapsingColumns = true;
+            }
+          continue;
+          }
+        currentlyCollapsingColumns = false;
+
+        // get the color for this cell from the lookup table
+        double color[4];
+        vtkVariant value = this->Table->GetValue(row, column);
+        if (value.IsString())
+          {
+          this->CategoricalDataLookupTable->GetAnnotationColor(value, color);
+          }
+        else
+          {
+          // set the range on our continuous lookup table for this column
+          this->ContinuousDataLookupTable->SetRange(
+            this->ColumnRanges[column].first,
+            this->ColumnRanges[column].second);
+
+          // get the color for this value
+          this->ContinuousDataLookupTable->GetColor(value.ToDouble(), color);
+          }
+        painter->GetBrush()->SetColorF(color[0], color[1], color[2]);
+
+        // draw this cell of the table
+        double w = 0.0;
+        double h = 0.0;
+        switch(orientation)
+          {
+          case vtkHeatmapItem::DOWN_TO_UP:
+            cellStartX = this->Position[0] + this->CellHeight * rowToDraw;
+            cellStartY = this->MinY + this->CellWidth * columnToDraw;
+            h = this->CellWidth;
+            w = this->CellHeight;
+            break;
+
+          case vtkHeatmapItem::RIGHT_TO_LEFT:
+            cellStartX = this->MinX + this->CellWidth * columnToDraw;
+            cellStartY = this->Position[1] + this->CellHeight * rowToDraw;
+            w = this->CellWidth;
+            h = this->CellHeight;
+            break;
+
+          case vtkHeatmapItem::UP_TO_DOWN:
+            cellStartX = this->Position[0] + this->CellHeight * rowToDraw;
+            cellStartY = this->MinY + this->CellWidth * columnToDraw;
+            h = this->CellWidth;
+            w = this->CellHeight;
+            break;
+
+          case vtkHeatmapItem::LEFT_TO_RIGHT:
+          default:
+            cellStartX = this->MinX + this->CellWidth * columnToDraw;
+            cellStartY = this->Position[1] + this->CellHeight * rowToDraw;
+            w = this->CellWidth;
+            h = this->CellHeight;
+            break;
+          }
+
+        if (this->LineIsVisible(cellStartX, cellStartY,
+                                cellStartX + this->CellWidth,
+                                cellStartY + this->CellHeight) ||
+            this->LineIsVisible(cellStartX, cellStartY + this->CellHeight,
+                                cellStartX + this->CellWidth, cellStartY))
+          {
+          painter->DrawRect(cellStartX, cellStartY, w, h);
+          }
+
+        if (!columnMapSet)
+          {
+          this->SceneColumnToTableColumnMap[columnToDraw] = column;
+          }
+
+        ++columnToDraw;
+        }
+      columnMapSet = true;
+      }
+
+    this->SceneRowToTableRowMap[rowToDraw] = row;
+    ++rowToDraw;
+
+    // draw this row's label if it would be visible
+    if (!drawRowLabels)
+      {
+      continue;
+      }
+
+    switch (orientation)
+      {
+      case vtkHeatmapItem::DOWN_TO_UP:
+        labelStartX = cellStartX + this->CellHeight / 2.0;
+        labelStartY = this->MaxY + spacing;
+        break;
+      case vtkHeatmapItem::RIGHT_TO_LEFT:
+        labelStartX = this->MinX - spacing;
+        labelStartY = cellStartY + this->CellHeight / 2.0;
+        break;
+      case vtkHeatmapItem::UP_TO_DOWN:
+        labelStartX = cellStartX + this->CellHeight / 2.0;
+        labelStartY = this->MinY - spacing;
+        break;
+      case vtkHeatmapItem::LEFT_TO_RIGHT:
+      default:
+        labelStartX = this->MaxX + spacing;
+        labelStartY = cellStartY + this->CellHeight / 2.0;
+        break;
+      }
+
+    if (name != "" &&
+        this->SceneBottomLeft[0] < labelStartX &&
+        this->SceneTopRight[0] > labelStartX   &&
+        this->SceneBottomLeft[1] < labelStartY &&
+        this->SceneTopRight[1] > labelStartY)
+      {
+      painter->DrawString(labelStartX, labelStartY, name);
+      }
+    }
+
+  // draw column labels
+  if (!canDrawText)
+    {
+    this->RowLabelWidth = 0.0;
+    this->ColumnLabelWidth = 0.0;
+    return;
+    }
+
+  if (!drawColumnLabels)
+    {
+    this->ComputeLabelWidth(painter);
+    this->ColumnLabelWidth = 0.0;
+    return;
+    }
+
+  // set up our text property to draw column labels appropriately for
+  // the current orientation.
+  switch (orientation)
+    {
+    case vtkHeatmapItem::DOWN_TO_UP:
+    case vtkHeatmapItem::UP_TO_DOWN:
+      painter->GetTextProp()->SetOrientation(0);
+      break;
+
+    case vtkHeatmapItem::RIGHT_TO_LEFT:
+    case vtkHeatmapItem::LEFT_TO_RIGHT:
+    default:
+      painter->GetTextProp()->SetOrientation(90);
+      break;
+    }
+
+  painter->GetTextProp()->SetJustificationToLeft();
+
+  columnToDraw = 1;
+  for (vtkIdType column = 1; column < this->Table->GetNumberOfColumns();
+       ++column)
+    {
+    // check if this column has been collapsed or not
+    if (this->CollapsedColumnsArray &&
+        this->CollapsedColumnsArray->GetValue(column) == 1)
+      {
+      // a contiguous block of collapsed columns is represented as a single blank
+      // column by this item.
+      if (!currentlyCollapsingColumns)
+        {
+        ++columnToDraw;
+        currentlyCollapsingColumns = true;
+        }
+      continue;
+      }
+    currentlyCollapsingColumns = false;
+
+    switch(orientation)
+      {
+      case vtkHeatmapItem::DOWN_TO_UP:
+      case vtkHeatmapItem::UP_TO_DOWN:
+        labelStartX = this->MaxX + spacing;
+        labelStartY =
+          this->MinY + this->CellWidth * columnToDraw - this->CellWidth / 2;
+        break;
+
+      case vtkHeatmapItem::RIGHT_TO_LEFT:
+      case vtkHeatmapItem::LEFT_TO_RIGHT:
+      default:
+        labelStartX =
+          this->MinX + this->CellWidth * columnToDraw - this->CellWidth / 2;
+        labelStartY = this->MaxY + spacing;
+        break;
+      }
+
+    std::string columnName = this->Table->GetColumn(column)->GetName();
+    if (this->SceneBottomLeft[0] < labelStartX &&
+        this->SceneTopRight[0] > labelStartX &&
+        this->SceneBottomLeft[1] < labelStartY &&
+        this->SceneTopRight[1] > labelStartY)
+      {
+      painter->DrawString(labelStartX, labelStartY, columnName);
+      }
+    ++columnToDraw;
+    }
+
+  // update the size of our labels
+  this->ComputeLabelWidth(painter);
+}
+
+//-----------------------------------------------------------------------------
+void vtkHeatmapItem::UpdateVisibleSceneExtent(vtkContext2D *painter)
+{
+  float position[2];
+  painter->GetTransform()->GetPosition(position);
+  this->SceneBottomLeft[0] = -position[0];
+  this->SceneBottomLeft[1] = -position[1];
+  this->SceneBottomLeft[2] = 0.0;
+
+  this->SceneTopRight[0] =
+    static_cast<double>(this->GetScene()->GetSceneWidth() - position[0]);
+  this->SceneTopRight[1] =
+    static_cast<double>(this->GetScene()->GetSceneHeight() - position[1]);
+  this->SceneTopRight[2] = 0.0;
+  vtkNew<vtkMatrix3x3> inverse;
+  painter->GetTransform()->GetInverse(inverse.GetPointer());
+  inverse->MultiplyPoint(this->SceneBottomLeft, this->SceneBottomLeft);
+  inverse->MultiplyPoint(this->SceneTopRight, this->SceneTopRight);
+}
+
+//-----------------------------------------------------------------------------
+bool vtkHeatmapItem::LineIsVisible(double x0, double y0,
+                                        double x1, double y1)
+{
+  // use local variables to improve readibility
+  double xMinScene = this->SceneBottomLeft[0];
+  double yMinScene = this->SceneBottomLeft[1];
+  double xMaxScene = this->SceneTopRight[0];
+  double yMaxScene = this->SceneTopRight[1];
+
+  // if either end point of the line segment falls within the screen,
+  // then the line segment is visible.
+  if ( (xMinScene <= x0 && xMaxScene >= x0 &&
+        yMinScene <= y0 && yMaxScene >= y0) ||
+       (xMinScene <= x1 && xMaxScene >= x1 &&
+        yMinScene <= y1 && yMaxScene >= y1) )
+    {
+    return true;
+    }
+
+  // figure out which end point is "greater" than the other in both dimensions
+  double xMinLine, xMaxLine, yMinLine, yMaxLine;
+  if (x0 < x1)
+    {
+    xMinLine = x0;
+    xMaxLine = x1;
+    }
+  else
+    {
+    xMinLine = x1;
+    xMaxLine = x0;
+    }
+  if (y0 < y1)
+    {
+    yMinLine = y0;
+    yMaxLine = y1;
+    }
+  else
+    {
+    yMinLine = y1;
+    yMaxLine = y0;
+    }
+
+  // case where the Y range of the line falls within the visible scene
+  // and the X range of the line contains the entire visible scene
+  if (yMinScene <= yMinLine && yMaxScene >= yMinLine &&
+      yMinScene <= yMaxLine && yMaxScene >= yMaxLine &&
+      xMinLine <= xMinScene && xMaxLine >= xMaxScene)
+    {
+    return true;
+    }
+
+  // case where the X range of the line falls within the visible scene
+  // and the Y range of the line contains the entire visible scene
+  if (xMinScene <= xMinLine && xMaxScene >= xMinLine &&
+      xMinScene <= xMaxLine && xMaxScene >= xMaxLine &&
+      yMinLine <= yMinScene && yMaxLine >= yMaxScene)
+    {
+    return true;
+    }
+
+  return false;
+}
+
+//-----------------------------------------------------------------------------
+bool vtkHeatmapItem::MouseMoveEvent(const vtkContextMouseEvent &event)
+{
+  if (event.GetButton() == vtkContextMouseEvent::NO_BUTTON)
+    {
+    float pos[3];
+    vtkNew<vtkMatrix3x3> inverse;
+    pos[0] = event.GetPos().GetX();
+    pos[1] = event.GetPos().GetY();
+    pos[2] = 0;
+    this->GetScene()->GetTransform()->GetInverse(inverse.GetPointer());
+    inverse->MultiplyPoint(pos, pos);
+    if (pos[0] <= this->MaxX && pos[0] >= this->MinX &&
+        pos[1] <= this->MaxY && pos[1] >= this->MinY)
+      {
+      this->Tooltip->SetPosition(pos[0], pos[1]);
+
+      std::string tooltipText = this->GetTooltipText(pos[0], pos[1]);
+      if (tooltipText.compare("") != 0)
+        {
+        this->Tooltip->SetText(tooltipText);
+        this->Tooltip->SetVisible(true);
+        this->Scene->SetDirty(true);
+        return true;
+        }
+      }
+    bool shouldRepaint = this->Tooltip->GetVisible();
+    this->Tooltip->SetVisible(false);
+    if (shouldRepaint)
+      {
+      this->Scene->SetDirty(true);
+      }
+    }
+  return false;
+}
+
+//-----------------------------------------------------------------------------
+std::string vtkHeatmapItem::GetTooltipText(float x, float y)
+{
+  int sceneRow = 0;
+  int sceneColumn = 0;
+  int orientation = this->GetOrientation();
+  if (orientation == vtkHeatmapItem::UP_TO_DOWN ||
+      orientation == vtkHeatmapItem::DOWN_TO_UP)
+    {
+    sceneRow = floor(abs(x - this->Position[0]) / this->CellHeight);
+    sceneColumn = floor((y - this->MinY) / this->CellWidth);
+    }
+  else
+    {
+    sceneRow = floor(abs(y - this->Position[1]) / this->CellHeight);
+    sceneColumn = floor((x - this->MinX) / this->CellWidth);
+    }
+
+  vtkIdType row = -1;
+  if (static_cast<unsigned int>(sceneRow) < this->SceneRowToTableRowMap.size())
+    {
+    row = this->SceneRowToTableRowMap[sceneRow];
+    }
+  vtkIdType column = -1;
+  if (static_cast<unsigned int>(sceneColumn) <
+      this->SceneColumnToTableColumnMap.size())
+    {
+    column = this->SceneColumnToTableColumnMap[sceneColumn];
+    }
+
+  if (row > -1 && column > -1)
+    {
+    vtkStringArray *rowNames = vtkStringArray::SafeDownCast(
+      this->Table->GetColumn(0));
+    std::string rowName;
+    if (rowNames)
+      {
+      rowName = rowNames->GetValue(row);
+      }
+    else
+      {
+      std::stringstream ss;
+      ss << row;
+      rowName = ss.str();
+      }
+    if (this->BlankRows.find(rowName) != this->BlankRows.end())
+      {
+      return "";
+      }
+
+    std::string columnName = this->Table->GetColumn(column)->GetName();
+
+    std::string tooltipText = "(";
+    tooltipText += rowName;
+    tooltipText += ", ";
+    tooltipText += columnName;
+    tooltipText += ")\n";
+    tooltipText += this->Table->GetValue(row, column).ToString();
+
+    return tooltipText;
+    }
+  return "";
+}
+
+//-----------------------------------------------------------------------------
+void vtkHeatmapItem::SetOrientation(int orientation)
+{
+  vtkIntArray *existingArray = vtkIntArray::SafeDownCast(
+    this->Table->GetFieldData()->GetArray("orientation"));
+  if (existingArray)
+    {
+    existingArray->SetValue(0, orientation);
+    }
+  else
+    {
+    vtkSmartPointer<vtkIntArray> orientationArray =
+      vtkSmartPointer<vtkIntArray>::New();
+    orientationArray->SetNumberOfComponents(1);
+    orientationArray->SetName("orientation");
+    orientationArray->InsertNextValue(orientation);
+    this->Table->GetFieldData()->AddArray(orientationArray);
+    }
+
+  //reposition the legends
+  this->PositionLegends(orientation);
+}
+
+//-----------------------------------------------------------------------------
+int vtkHeatmapItem::GetOrientation()
+{
+  vtkIntArray *orientationArray = vtkIntArray::SafeDownCast(
+    this->Table->GetFieldData()->GetArray("orientation"));
+  if (orientationArray)
+    {
+    return orientationArray->GetValue(0);
+    }
+  return vtkHeatmapItem::LEFT_TO_RIGHT;
+}
+
+//-----------------------------------------------------------------------------
+double vtkHeatmapItem::GetTextAngleForOrientation(int orientation)
+{
+  switch(orientation)
+    {
+    case vtkHeatmapItem::DOWN_TO_UP:
+      return 90.0;
+      break;
+
+    case vtkHeatmapItem::RIGHT_TO_LEFT:
+      return 0.0;
+      break;
+
+    case vtkHeatmapItem::UP_TO_DOWN:
+      return 270.0;
+      break;
+
+    case vtkHeatmapItem::LEFT_TO_RIGHT:
+    default:
+      return 0.0;
+      break;
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkHeatmapItem::ComputeLabelWidth(vtkContext2D *painter)
+{
+  this->RowLabelWidth = 0.0;
+  this->ColumnLabelWidth = 0.0;
+
+  int fontSize = painter->ComputeFontSizeForBoundedString("Igq", VTK_FLOAT_MAX,
+                                                          this->CellHeight);
+  if (fontSize < 8)
+    {
+    return;
+    }
+
+  // temporarily set text to default orientation
+  int orientation = painter->GetTextProp()->GetOrientation();
+  painter->GetTextProp()->SetOrientation(0.0);
+
+  float bounds[4];
+  // find the longest row label
+  vtkStringArray *rowNames = vtkStringArray::SafeDownCast(
+    this->Table->GetColumn(0));
+  if (rowNames)
+    {
+
+    for (vtkIdType row = 0; row != this->Table->GetNumberOfRows(); ++row)
+      {
+      if (this->CollapsedRowsArray &&
+          this->CollapsedRowsArray->GetValue(row) == 1)
+        {
+        continue;
+        }
+      std::string name = rowNames->GetValue(row);
+      painter->ComputeStringBounds(name, bounds);
+      if (bounds[2] > this->RowLabelWidth)
+        {
+        this->RowLabelWidth = bounds[2];
+        }
+      }
+    }
+
+  // find the longest column label
+  for (vtkIdType col = 1; col != this->Table->GetNumberOfColumns(); ++col)
+    {
+    if (this->CollapsedColumnsArray &&
+        this->CollapsedColumnsArray->GetValue(col) == 1)
+      {
+      continue;
+      }
+    std::string name = this->Table->GetColumn(col)->GetName();
+    painter->ComputeStringBounds(name, bounds);
+    if (bounds[2] > this->ColumnLabelWidth)
+      {
+      this->ColumnLabelWidth = bounds[2];
+      }
+    }
+
+  // restore orientation
+  painter->GetTextProp()->SetOrientation(orientation);
+}
+
+//-----------------------------------------------------------------------------
+void vtkHeatmapItem::ComputeBounds()
+{
+  // figure out how many actual rows will be drawn
+  bool currentlyCollapsingRows = false;
+  int numRows = 0;
+  for (vtkIdType row = 0; row != this->Table->GetNumberOfRows(); ++row)
+    {
+    if (this->CollapsedRowsArray &&
+        this->CollapsedRowsArray->GetValue(row) == 1)
+      {
+      // a contiguous block of collapsed rows is represented as a single blank
+      // row by this item.
+      if (!currentlyCollapsingRows)
+        {
+        ++numRows;
+        currentlyCollapsingRows = true;
+        }
+      continue;
+      }
+    currentlyCollapsingRows = false;
+    ++numRows;
+    }
+
+  // figure out how many actual columns will be drawn
+  bool currentlyCollapsingColumns = false;
+  int numColumns = 0;
+  for (vtkIdType col = 1; col != this->Table->GetNumberOfColumns(); ++col)
+    {
+    if (this->CollapsedColumnsArray &&
+        this->CollapsedColumnsArray->GetValue(col) == 1)
+      {
+      // a contiguous block of collapsed columns is represented as a single blank
+      // column by this item.
+      if (!currentlyCollapsingColumns)
+        {
+        ++numColumns;
+        currentlyCollapsingColumns = true;
+        }
+      continue;
+      }
+    currentlyCollapsingColumns = false;
+    ++numColumns;
+    }
+
+  this->MinX = this->Position[0];
+  this->MinY = this->Position[1];
+  switch (this->GetOrientation())
+    {
+    case vtkHeatmapItem::RIGHT_TO_LEFT:
+    case vtkHeatmapItem::LEFT_TO_RIGHT:
+    default:
+      this->MaxX = this->MinX + this->CellWidth * numColumns;
+      this->MaxY = this->MinY + this->CellHeight * numRows;
+      break;
+
+    case vtkHeatmapItem::UP_TO_DOWN:
+    case vtkHeatmapItem::DOWN_TO_UP:
+      this->MaxX = this->MinX + this->CellHeight * numRows;
+      this->MaxY = this->MinY + this->CellWidth * numColumns;
+      break;
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkHeatmapItem::GetBounds(double bounds[4])
+{
+  bounds[0] = this->MinX;
+  bounds[1] = this->MaxX;
+  bounds[2] = this->MinY;
+  bounds[3] = this->MaxY;
+
+  if (this->RowLabelWidth == 0.0 && this->ColumnLabelWidth == 0.0)
+    {
+    return;
+    }
+
+  double spacing = this->CellWidth * 0.25;
+
+  switch (this->GetOrientation())
+    {
+    case vtkHeatmapItem::LEFT_TO_RIGHT:
+    default:
+      bounds[1] += spacing + this->RowLabelWidth;
+      bounds[3] += spacing + this->ColumnLabelWidth;
+      break;
+
+    case vtkHeatmapItem::UP_TO_DOWN:
+      bounds[1] += spacing + this->ColumnLabelWidth;
+      bounds[2] -= spacing + this->RowLabelWidth;
+      break;
+
+    case vtkHeatmapItem::RIGHT_TO_LEFT:
+      bounds[0] -= spacing + this->RowLabelWidth;
+      bounds[3] += spacing + this->ColumnLabelWidth;
+      break;
+
+    case vtkHeatmapItem::DOWN_TO_UP:
+      bounds[1] += spacing + this->ColumnLabelWidth;
+      bounds[3] += spacing + this->RowLabelWidth;
+      break;
+    }
+}
+
+//-----------------------------------------------------------------------------
+void vtkHeatmapItem::MarkRowAsBlank(std::string rowName)
+{
+  this->BlankRows.insert(rowName);
+}
+
+//-----------------------------------------------------------------------------
+bool vtkHeatmapItem::MouseDoubleClickEvent(const vtkContextMouseEvent &event)
+{
+  // get the position of the double click and convert it to scene coordinates
+  double pos[3];
+  vtkNew<vtkMatrix3x3> inverse;
+  pos[0] = event.GetPos().GetX();
+  pos[1] = event.GetPos().GetY();
+  pos[2] = 0;
+  this->GetScene()->GetTransform()->GetInverse(inverse.GetPointer());
+  inverse->MultiplyPoint(pos, pos);
+  if (pos[0] <= this->MaxX && pos[0] >= this->MinX &&
+      pos[1] <= this->MaxY && pos[1] >= this->MinY)
+    {
+    vtkIdType column = 0;
+    int orientation = this->GetOrientation();
+    if (orientation == vtkHeatmapItem::UP_TO_DOWN ||
+        orientation == vtkHeatmapItem::DOWN_TO_UP)
+      {
+      column = floor((pos[1] - this->MinY) / this->CellWidth);
+      }
+    else
+      {
+      column = floor((pos[0] - this->MinX) / this->CellWidth);
+      }
+    ++column;
+
+    if (!this->LegendPositionSet)
+      {
+      this->PositionLegends(this->GetOrientation());
+      }
+
+    if (this->Table->GetValue(0, column).IsString())
+      {
+      // categorical data
+      // generate an array of distinct values from this column
+      vtkStringArray *stringColumn = vtkStringArray::SafeDownCast(
+        this->Table->GetColumn(column));
+      this->CategoryLegendValues->Reset();
+      this->CategoryLegendValues->Squeeze();
+      stringColumn->SetMaxDiscreteValues(stringColumn->GetNumberOfTuples() - 1);
+      stringColumn->GetProminentComponentValues(
+        0, this->CategoryLegendValues.GetPointer());
+      this->CategoryLegendValues->Modified();
+
+      // these distinct values become the the input to our categorical legend
+      this->CategoryLegend->SetValues(this->CategoryLegendValues.GetPointer());
+      this->CategoryLegend->SetTitle(this->Table->GetColumn(column)->GetName());
+      this->CategoryLegend->SetVisible(true);
+      this->ColorLegend->SetVisible(false);
+      this->Scene->SetDirty(true);
+      return true;
+      }
+    else
+      {
+      // continuous data
+      // set up the scalar bar legend
+      this->ColorLegend->GetTransferFunction()->SetRange(
+        this->ColumnRanges[column].first,
+        this->ColumnRanges[column].second);
+
+      this->ColorLegend->SetTitle(this->Table->GetColumn(column)->GetName());
+
+      this->ColorLegend->Update();
+      this->ColorLegend->SetVisible(true);
+      this->CategoryLegend->SetVisible(false);
+      this->Scene->SetDirty(true);
+      return true;
+      }
+    }
+  bool shouldRepaint = this->ColorLegend->GetVisible() ||
+                       this->CategoryLegend->GetVisible();
+  this->CategoryLegend->SetVisible(false);
+  this->ColorLegend->SetVisible(false);
+  if (shouldRepaint)
+    {
+    this->Scene->SetDirty(true);
+    }
+
+  return false;
+}
+
+//-----------------------------------------------------------------------------
+void vtkHeatmapItem::PositionLegends(int orientation)
+{
+  // bail out early if we don't have meaningful bounds yet.
+  if (this->MinX > this->MaxX || this->MinY > this->MaxY)
+    {
+    return;
+    }
+
+  switch(orientation)
+    {
+    case vtkHeatmapItem::DOWN_TO_UP:
+    case vtkHeatmapItem::UP_TO_DOWN:
+
+      this->CategoryLegend->SetHorizontalAlignment(vtkChartLegend::RIGHT);
+      this->CategoryLegend->SetVerticalAlignment(vtkChartLegend::CENTER);
+      this->CategoryLegend->SetPoint(
+        this->MinX - this->CellHeight,
+        this->MinY + (this->MaxY - this->MinY) / 2.0);
+
+      this->ColorLegend->SetHorizontalAlignment(vtkChartLegend::RIGHT);
+      this->ColorLegend->SetVerticalAlignment(vtkChartLegend::CENTER);
+      this->ColorLegend->SetOrientation(vtkColorLegend::VERTICAL);
+      this->ColorLegend->SetPoint(
+        this->MinX - this->CellHeight,
+        this->MinY + (this->MaxY - this->MinY) / 2.0);
+      this->ColorLegend->SetTextureSize(
+        this->ColorLegend->GetSymbolWidth(),
+       this->MaxY - this->MinY);
+      break;
+
+    case vtkHeatmapItem::RIGHT_TO_LEFT:
+    case vtkHeatmapItem::LEFT_TO_RIGHT:
+    default:
+
+      this->CategoryLegend->SetHorizontalAlignment(vtkChartLegend::CENTER);
+      this->CategoryLegend->SetVerticalAlignment(vtkChartLegend::TOP);
+      this->CategoryLegend->SetPoint(
+        this->MinX + (this->MaxX - this->MinX) / 2.0,
+        this->MinY - this->CellHeight);
+
+      this->ColorLegend->SetHorizontalAlignment(vtkChartLegend::CENTER);
+      this->ColorLegend->SetVerticalAlignment(vtkChartLegend::TOP);
+      this->ColorLegend->SetOrientation(vtkColorLegend::HORIZONTAL);
+      this->ColorLegend->SetPoint(
+        this->MinX + (this->MaxX - this->MinX) / 2.0,
+        this->MinY - this->CellHeight);
+      this->ColorLegend->SetTextureSize(
+        this->MaxX - this->MinX,
+        this->ColorLegend->GetSymbolWidth());
+      break;
+    }
+  this->LegendPositionSet = true;
+}
+
+//-----------------------------------------------------------------------------
+bool vtkHeatmapItem::Hit(const vtkContextMouseEvent &vtkNotUsed(mouse))
+{
+  // If we are interactive, we want to catch anything that propagates to the
+  // background, otherwise we do not want any mouse events.
+  return this->Interactive;
+}
+
+//-----------------------------------------------------------------------------
+void vtkHeatmapItem::PrintSelf(ostream &os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+  os << "Table: " << (this->Table ? "" : "(null)") << std::endl;
+  if (this->Table->GetNumberOfRows() > 0)
+    {
+    this->Table->PrintSelf(os, indent.GetNextIndent());
+    }
+}
diff --git a/Views/Infovis/vtkHeatmapItem.h b/Views/Infovis/vtkHeatmapItem.h
new file mode 100644
index 0000000..bb750f8
--- /dev/null
+++ b/Views/Infovis/vtkHeatmapItem.h
@@ -0,0 +1,258 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkHeatmapItem.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkHeatmapItem - A 2D graphics item for rendering a heatmap
+//
+// .SECTION Description
+// This item draws a heatmap as a part of a vtkContextScene.
+//
+// .SEE ALSO
+// vtkTable
+
+#ifndef __vtkHeatmapItem_h
+#define __vtkHeatmapItem_h
+
+#include "vtkViewsInfovisModule.h" // For export macro
+#include "vtkContextItem.h"
+
+#include "vtkNew.h"                // For vtkNew ivars
+#include "vtkSmartPointer.h"       // For vtkSmartPointer ivars
+#include "vtkVector.h"             // For vtkVector2f ivar
+#include <map>                     // For column ranges
+#include <set>                     // For blank row support
+#include <vector>                  // For row mapping
+
+class vtkBitArray;
+class vtkCategoryLegend;
+class vtkColorLegend;
+class vtkLookupTable;
+class vtkStringArray;
+class vtkTable;
+class vtkTooltipItem;
+class vtkVariantArray;
+
+class VTKVIEWSINFOVIS_EXPORT vtkHeatmapItem : public vtkContextItem
+{
+public:
+  static vtkHeatmapItem *New();
+  vtkTypeMacro(vtkHeatmapItem, vtkContextItem);
+  virtual void PrintSelf(ostream &os, vtkIndent indent);
+
+  // Description:
+  // Set the table that this item draws.  The first column of the table
+  // must contain the names of the rows.
+  virtual void SetTable(vtkTable *table);
+
+  // Description:
+  // Get the table that this item draws.
+  vtkTable * GetTable();
+
+  // Description:
+  // Set which way the table should face within the visualization.
+  void SetOrientation(int orientation);
+
+  // Description:
+  // Get the current heatmap orientation.
+  int GetOrientation();
+
+  // Description:
+  // Get the angle that row labels should be rotated for the correponding
+  // heatmap orientation.  For the default orientation (LEFT_TO_RIGHT), this
+  // is 0 degrees.
+  double GetTextAngleForOrientation(int orientation);
+
+  // Description:
+  // Set the position of the heatmap.
+  vtkSetVector2Macro(Position, float);
+  void SetPosition(const vtkVector2f &pos);
+
+  // Description:
+  // Get position of the heatmap.
+  vtkGetVector2Macro(Position, float);
+  vtkVector2f GetPositionVector();
+
+  // Description:
+  // Get/Set the height of the cells in our heatmap.
+  // Default is 18 pixels.
+  vtkGetMacro(CellHeight, double);
+  vtkSetMacro(CellHeight, double);
+
+  // Description:
+  // Get/Set the width of the cells in our heatmap.
+  // Default is 36 pixels.
+  vtkGetMacro(CellWidth, double);
+  vtkSetMacro(CellWidth, double);
+
+  // Description:
+  // Get the bounds for this item as (Xmin,Xmax,Ymin,Ymax).
+  virtual void GetBounds(double bounds[4]);
+
+  // Description:
+  // Mark a row as blank, meaning that no cells will be drawn for it.
+  // Used by vtkTreeHeatmapItem to represent missing data.
+  void MarkRowAsBlank(std::string rowName);
+
+  // Description:
+  // Paints the table as a heatmap.
+  virtual bool Paint(vtkContext2D *painter);
+
+  // Description:
+  // Get the width of the largest row or column label drawn by this
+  // heatmap.
+  vtkGetMacro(RowLabelWidth, float);
+  vtkGetMacro(ColumnLabelWidth, float);
+
+  //BTX
+
+  // Description:
+  // Enum for Orientation.
+  enum
+    {
+    LEFT_TO_RIGHT,
+    UP_TO_DOWN,
+    RIGHT_TO_LEFT,
+    DOWN_TO_UP
+    };
+
+  // Description:
+  // Returns true if the transform is interactive, false otherwise.
+  virtual bool Hit(const vtkContextMouseEvent &mouse);
+
+  // Description:
+  // Display a tooltip when the user mouses over a cell in the heatmap.
+  virtual bool MouseMoveEvent(const vtkContextMouseEvent &event);
+
+  // Description:
+  // Display a legend for a column of data.
+  virtual bool MouseDoubleClickEvent(const vtkContextMouseEvent &event);
+
+  //ETX
+
+protected:
+  vtkHeatmapItem();
+  ~vtkHeatmapItem();
+
+  vtkVector2f PositionVector;
+  float* Position;
+
+  // Description:
+  // Generate some data needed for painting.  We cache this information as
+  // it only needs to be generated when the input data changes.
+  virtual void RebuildBuffers();
+
+  // Description:
+  // This function does the bulk of the actual work in rendering our heatmap.
+  virtual void PaintBuffers(vtkContext2D *painter);
+
+  // Description:
+  // This function returns a bool indicating whether or not we need to rebuild
+  // our cached data before painting.
+  virtual bool IsDirty();
+
+  // Description:
+  // Generate a separate vtkLookupTable for each column in the table.
+  void InitializeLookupTables();
+
+  // Description:
+  // Helper function.  Find the prominent, distinct values in the specified
+  // column of strings and add it to our "master list" of categorical values.
+  // This list is then used to generate a vtkLookupTable for all categorical
+  // data within the heatmap.
+  void AccumulateProminentCategoricalDataValues(vtkIdType column);
+
+  // Description:
+  // Setup the default lookup table to use for continuous (not categorical)
+  // data.
+  void GenerateContinuousDataLookupTable();
+
+  // Description:
+  // Setup the default lookup table to use for categorical (not continuous)
+  // data.
+  void GenerateCategoricalDataLookupTable();
+
+  // Description:
+  // Get the value for the cell of the heatmap located at scene position (x, y)
+  // This function assumes the caller has already determined that (x, y) falls
+  // within the heatmap.
+  std::string GetTooltipText(float x, float y);
+
+  // Description:
+  // Calculate the extent of the data that is visible within the window.
+  // This information is used to ensure that we only draw details that
+  // will be seen by the user.  This improves rendering speed, particularly
+  // for larger data.
+  void UpdateVisibleSceneExtent(vtkContext2D *painter);
+
+  // Description:
+  // Returns true if any part of the line segment defined by endpoints
+  // (x0, y0), (x1, y1) falls within the extent of the currently
+  // visible scene.  Returns false otherwise.
+  bool LineIsVisible(double x0, double y0, double x1, double y1);
+
+  // Description:
+  // Compute the extent of the heatmap.  This does not include
+  // the text labels.
+  void ComputeBounds();
+
+  // Description:
+  // Compute the width of our longest row label and the width of our
+  // longest column label.  These values are used by GetBounds().
+  void ComputeLabelWidth(vtkContext2D *painter);
+
+  // Setup the position, size, and orientation of this heatmap's color
+  // legend based on the heatmap's current orientation.
+  void PositionColorLegend(int orientation);
+
+  // Setup the position, size, and orientation of this heatmap's
+  // legends based on the heatmap's current orientation.
+  void PositionLegends(int orientation);
+
+  vtkSmartPointer<vtkTable> Table;
+
+private:
+  vtkHeatmapItem(const vtkHeatmapItem&); // Not implemented
+  void operator=(const vtkHeatmapItem&); // Not implemented
+
+  unsigned long HeatmapBuildTime;
+  vtkNew<vtkCategoryLegend> CategoryLegend;
+  vtkNew<vtkColorLegend> ColorLegend;
+  vtkNew<vtkTooltipItem> Tooltip;
+  vtkNew<vtkLookupTable> ContinuousDataLookupTable;
+  vtkNew<vtkLookupTable> CategoricalDataLookupTable;
+  vtkNew<vtkLookupTable> ColorLegendLookupTable;
+  vtkNew<vtkStringArray> CategoricalDataValues;
+  vtkNew<vtkVariantArray> CategoryLegendValues;
+  double CellWidth;
+  double CellHeight;
+
+  std::map< vtkIdType, std::pair< double, double > > ColumnRanges;
+  std::vector< vtkIdType > SceneRowToTableRowMap;
+  std::vector< vtkIdType > SceneColumnToTableColumnMap;
+  std::set<std::string> BlankRows;
+
+  double MinX;
+  double MinY;
+  double MaxX;
+  double MaxY;
+  double SceneBottomLeft[3];
+  double SceneTopRight[3];
+  float RowLabelWidth;
+  float ColumnLabelWidth;
+
+  vtkBitArray* CollapsedRowsArray;
+  vtkBitArray* CollapsedColumnsArray;
+  bool LegendPositionSet;
+};
+
+#endif
diff --git a/Views/Infovis/vtkParallelCoordinatesRepresentation.cxx b/Views/Infovis/vtkParallelCoordinatesRepresentation.cxx
index d84077e..fa3a507 100644
--- a/Views/Infovis/vtkParallelCoordinatesRepresentation.cxx
+++ b/Views/Infovis/vtkParallelCoordinatesRepresentation.cxx
@@ -78,6 +78,7 @@ PURPOSE.  See the above copyright notice for more information.
 #include "vtkUnsignedIntArray.h"
 #include "vtkViewTheme.h"
 
+#include <algorithm>
 #include <vector>
 #include <vtksys/ios/sstream>
 
@@ -281,26 +282,13 @@ vtkParallelCoordinatesRepresentation::vtkParallelCoordinatesRepresentation()
 //------------------------------------------------------------------------------
 vtkParallelCoordinatesRepresentation::~vtkParallelCoordinatesRepresentation()
 {
-  if (I)
-    delete I;
-
-  if (this->Maxs)
-    delete [] this->Maxs;
-
-  if (this->Mins)
-    delete [] this->Mins;
-
-  if (this->MaxOffsets)
-    delete [] this->MaxOffsets;
-
-  if (this->MinOffsets)
-    delete [] this->MinOffsets;
-
-  if (this->Axes)
-    delete [] this->Axes;
-
-  if (this->Xs)
-    delete [] this->Xs;
+  delete I;
+  delete [] this->Maxs;
+  delete [] this->Mins;
+  delete [] this->MaxOffsets;
+  delete [] this->MinOffsets;
+  delete [] this->Axes;
+  delete [] this->Xs;
 
   this->SetInternalHoverText(0);
 }
@@ -860,12 +848,12 @@ int vtkParallelCoordinatesRepresentation::UpdatePlotProperties(vtkStringArray* i
 // Clear out all of the arrays and intialize them to defaults where appropriate.
 int vtkParallelCoordinatesRepresentation::ReallocateInternals()
 {
-  if (this->Maxs) delete [] this->Maxs;
-  if (this->Mins) delete [] this->Mins;
-  if (this->MaxOffsets) delete [] this->MaxOffsets;
-  if (this->MinOffsets) delete [] this->MinOffsets;
-  if (this->Axes)  delete [] this->Axes;
-  if (this->Xs) delete [] this->Xs;
+  delete [] this->Maxs;
+  delete [] this->Mins;
+  delete [] this->MaxOffsets;
+  delete [] this->MinOffsets;
+  delete [] this->Axes;
+  delete [] this->Xs;
 
   this->Maxs = new double[this->NumberOfAxes];
   this->Mins = new double[this->NumberOfAxes];
@@ -1239,6 +1227,7 @@ int vtkParallelCoordinatesRepresentation::PlaceCurves(vtkPolyData* polyData,
           this->Maxs[position] + this->MaxOffsets[position],
           points));
       }
+    iter->Delete();
     }
 
   // make a s-curve from (0,0) to (1,1) with the right number of segments.
diff --git a/Views/Infovis/vtkParallelCoordinatesView.cxx b/Views/Infovis/vtkParallelCoordinatesView.cxx
index 0b476e5..2f1719c 100644
--- a/Views/Infovis/vtkParallelCoordinatesView.cxx
+++ b/Views/Infovis/vtkParallelCoordinatesView.cxx
@@ -45,7 +45,7 @@ PURPOSE.  See the above copyright notice for more information.
 #include "vtkViewTheme.h"
 
 #include <vtksys/ios/sstream>
-#include <assert.h>
+#include <cassert>
 
 vtkStandardNewMacro(vtkParallelCoordinatesView);
 
diff --git a/Views/Infovis/vtkSCurveSpline.cxx b/Views/Infovis/vtkSCurveSpline.cxx
new file mode 100644
index 0000000..9aabe70
--- /dev/null
+++ b/Views/Infovis/vtkSCurveSpline.cxx
@@ -0,0 +1,256 @@
+/*=========================================================================
+
+Program:   Visualization Toolkit
+Module:    vtkSCurveSpline.cxx
+
+Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+All rights reserved.
+See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+This software is distributed WITHOUT ANY WARRANTY; without even
+the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+/*-------------------------------------------------------------------------
+  Copyright 2009 Sandia Corporation.
+  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+  the U.S. Government retains certain rights in this software.
+  -------------------------------------------------------------------------*/
+#include "vtkSCurveSpline.h"
+
+#include "vtkObjectFactory.h"
+#include "vtkPiecewiseFunction.h"
+#include <cassert>
+#include <algorithm> // for std::min()/std::max()
+
+vtkStandardNewMacro(vtkSCurveSpline);
+
+//----------------------------------------------------------------------------
+// Construct a SCurve Spline.
+vtkSCurveSpline::vtkSCurveSpline()
+{
+  this->NodeWeight = 0.0;
+}
+
+//----------------------------------------------------------------------------
+// Evaluate a 1D Spline
+double vtkSCurveSpline::Evaluate (double t)
+{
+  int index;
+  double *intervals;
+  double *coefficients;
+
+  // check to see if we need to recompute the spline
+  if (this->ComputeTime < this->GetMTime())
+    {
+    this->Compute ();
+    }
+
+  // make sure we have at least 2 points
+  int size = this->PiecewiseFunction->GetSize();
+
+  if (size < 2)
+    {
+    return 0.0;
+    }
+
+  intervals = this->Intervals;
+  coefficients = this->Coefficients;
+
+  if ( this->Closed )
+    {
+    size = size + 1;
+    }
+
+  // clamp the function at both ends
+  if (t < intervals[0])
+    {
+    t = intervals[0];
+    }
+  if (t > intervals[size - 1])
+    {
+    t = intervals[size - 1];
+    }
+
+  // find pointer to cubic spline coefficient using bisection method
+  index = this->FindIndex(size,t);
+
+  // calculate offset within interval
+  t = (t - intervals[index]);
+
+  // normalize to unit width
+  t /= intervals[index+1] - intervals[index];
+
+  // apply weighting function
+  if (this->NodeWeight > 0.0)
+    {
+    double shift = t * (t * (t * (-4*this->NodeWeight)
+                             + (6*this->NodeWeight)))
+      - this->NodeWeight;
+    // clamp t
+    t = std::max(std::min(t+shift,1.0),0.0);
+    }
+
+  // evaluate intervals value y
+  return (t * (t * (t * *(coefficients + index * 3 + 2) // a
+                    + *(coefficients + index * 3 + 1))) // b
+          + *(coefficients + index * 3)); // d
+}
+
+//----------------------------------------------------------------------------
+// Compute SCurve Splines for each dependent variable
+void vtkSCurveSpline::Compute ()
+{
+  double *ts, *xs;
+  //  double *work;
+  double *coefficients;
+  double *dependent;
+  int size;
+  int i;
+
+  // Make sure the function is up to date.
+  //this->PiecewiseFunction->Update();
+
+  // get the size of the independent variables
+  size = this->PiecewiseFunction->GetSize ();
+
+  if(size < 2)
+    {
+    vtkErrorMacro("Cannot compute a spline with less than 2 points. # of points is: " << size);
+    return;
+    }
+
+  // copy the independent variables. Note that if the spline
+  // is closed the first and last point are assumed repeated -
+  // so we add and extra point
+  if (this->Intervals)
+    {
+    delete [] this->Intervals;
+    }
+
+  if ( !this->Closed )
+    {
+    this->Intervals = new double[size];
+    ts = this->PiecewiseFunction->GetDataPointer ();
+    for (i = 0; i < size; i++)
+      {
+      this->Intervals[i] = *(ts + 2*i);
+      }
+
+    // allocate memory for work arrays
+    //    work = new double[size];
+
+    // allocate memory for coefficients
+    if (this->Coefficients)
+      {
+      delete [] this->Coefficients;
+      }
+    this->Coefficients = new double [3*size];
+
+    // allocate memory for dependent variables
+    dependent = new double [size];
+
+    // get start of coefficients for this dependent variable
+    coefficients = this->Coefficients;
+
+    // get the dependent variable values
+    xs = this->PiecewiseFunction->GetDataPointer () + 1;
+    for (int j = 0; j < size; j++)
+      {
+      *(dependent + j) = *(xs + 2*j);
+      }
+
+    for (int k = 0; k < size-1; k++)
+      {
+      *(coefficients + 3*k) = dependent[k]; // d
+      *(coefficients + 3*k+1) = 3*(dependent[k+1]-dependent[k]); // b
+      *(coefficients + 3*k+2) = -2*(dependent[k+1]-dependent[k]); // a
+      }
+    *(coefficients + 3*(size-1)) = dependent[size-1];
+    *(coefficients + 3*(size-1)+1) = dependent[size-1];
+    *(coefficients + 3*(size-1)+2) = dependent[size-1];
+    }
+
+  else //add extra "fictitious" point to close loop
+    {
+    size = size + 1;
+    this->Intervals = new double[size];
+    ts = this->PiecewiseFunction->GetDataPointer ();
+    for (i = 0; i < size-1; i++)
+      {
+      this->Intervals[i] = *(ts + 2*i);
+      }
+    if ( this->ParametricRange[0] != this->ParametricRange[1] )
+      {
+      this->Intervals[size-1] = this->ParametricRange[1];
+      }
+    else
+      {
+      this->Intervals[size-1] = this->Intervals[size-2] + 1.0;
+      }
+
+    // allocate memory for work arrays
+    //    work = new double[size];
+
+    // allocate memory for coefficients
+    if (this->Coefficients)
+      {
+      delete [] this->Coefficients;
+      }
+    //this->Coefficients = new double [4*size];
+    this->Coefficients = new double [3*size];
+
+    // allocate memory for dependent variables
+    dependent = new double [size];
+
+    // get start of coefficients for this dependent variable
+    coefficients = this->Coefficients;
+
+    // get the dependent variable values
+    xs = this->PiecewiseFunction->GetDataPointer () + 1;
+    for (int j = 0; j < size-1; j++)
+      {
+      *(dependent + j) = *(xs + 2*j);
+      }
+    dependent[size-1] = *xs;
+
+    for (int k = 0; k < size-1; k++)
+      {
+      *(coefficients + 3*k) = dependent[k]; // d
+      *(coefficients + 3*k+1) = 3*(dependent[k+1]-dependent[k]); // b
+      *(coefficients + 3*k+2) = -2*(dependent[k+1]-dependent[k]); // a
+      }
+    *(coefficients + 3*(size-1)) = dependent[size-1];
+    *(coefficients + 3*(size-1)+1) = dependent[size-1];
+    *(coefficients + 3*(size-1)+2) = dependent[size-1];
+    }
+
+  // free the work array and dependent variable storage
+  //delete [] work;
+  delete [] dependent;
+
+  // update compute time
+  this->ComputeTime = this->GetMTime();
+}
+
+//----------------------------------------------------------------------------
+void vtkSCurveSpline::DeepCopy(vtkSpline *s)
+{
+  vtkSCurveSpline *spline = vtkSCurveSpline::SafeDownCast(s);
+
+  if ( spline != NULL )
+    {
+    //nothing to do
+    }
+
+  // Now do superclass
+  this->vtkSpline::DeepCopy(s);
+}
+
+//----------------------------------------------------------------------------
+void vtkSCurveSpline::PrintSelf(ostream& os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os,indent);
+  os << "NodeWeight: " << this->NodeWeight << endl;
+}
diff --git a/Views/Infovis/vtkSCurveSpline.h b/Views/Infovis/vtkSCurveSpline.h
new file mode 100644
index 0000000..fb95592
--- /dev/null
+++ b/Views/Infovis/vtkSCurveSpline.h
@@ -0,0 +1,69 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkSCurveSpline.h
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+/*-------------------------------------------------------------------------
+  Copyright 2009 Sandia Corporation.
+  Under the terms of Contract DE-AC04-94AL85000 with Sandia Corporation,
+  the U.S. Government retains certain rights in this software.
+  -------------------------------------------------------------------------*/
+// .NAME vtkSCurveSpline - computes an interpolating spline using a
+// a SCurve basis.
+
+// .SECTION Description
+// vtkSCurveSpline is a concrete implementation of vtkSpline using a
+// SCurve basis.
+
+// .SECTION See Also
+// vtkSpline vtkKochanekSpline
+
+#ifndef __vtkSCurveSpline_h
+#define __vtkSCurveSpline_h
+
+#include "vtkViewsInfovisModule.h" // For export macro
+#include "vtkSpline.h"
+
+class VTKVIEWSINFOVIS_EXPORT vtkSCurveSpline : public vtkSpline
+{
+public:
+  static vtkSCurveSpline *New();
+
+  vtkTypeMacro(vtkSCurveSpline,vtkSpline);
+  void PrintSelf(ostream& os, vtkIndent indent);
+
+  // Description
+  // Compute SCurve Splines for each dependent variable
+  void Compute ();
+
+  // Description:
+  // Evaluate a 1D SCurve spline.
+  virtual double Evaluate (double t);
+
+  // Description:
+  // Deep copy of SCurve spline data.
+  virtual void DeepCopy(vtkSpline *s);
+
+  vtkSetMacro(NodeWeight,double);
+  vtkGetMacro(NodeWeight,double);
+protected:
+  vtkSCurveSpline();
+  ~vtkSCurveSpline() {}
+
+  double NodeWeight;
+
+private:
+  vtkSCurveSpline(const vtkSCurveSpline&);  // Not implemented.
+  void operator=(const vtkSCurveSpline&);  // Not implemented.
+};
+
+#endif
diff --git a/Views/Infovis/vtkTanglegramItem.cxx b/Views/Infovis/vtkTanglegramItem.cxx
new file mode 100644
index 0000000..e1932ca
--- /dev/null
+++ b/Views/Infovis/vtkTanglegramItem.cxx
@@ -0,0 +1,746 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    vtkTanglegramItem.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+#include "vtkTanglegramItem.h"
+
+#include "vtkColorSeries.h"
+#include "vtkContext2D.h"
+#include "vtkDataSetAttributes.h"
+#include "vtkDendrogramItem.h"
+#include "vtkDoubleArray.h"
+#include "vtkIdTypeArray.h"
+#include "vtkLookupTable.h"
+#include "vtkObjectFactory.h"
+#include "vtkPen.h"
+#include "vtkStringArray.h"
+#include "vtkTextProperty.h"
+#include "vtkTree.h"
+#include "vtkTreeBFSIterator.h"
+#include "vtkTreeDFSIterator.h"
+#include "vtkVariantArray.h"
+
+#include <queue>
+
+vtkStandardNewMacro(vtkTanglegramItem);
+
+//-----------------------------------------------------------------------------
+vtkTanglegramItem::vtkTanglegramItem()
+{
+  this->Dendrogram1 = vtkSmartPointer<vtkDendrogramItem>::New();
+  this->Dendrogram1->ExtendLeafNodesOn();
+  this->AddItem(this->Dendrogram1);
+
+  this->Dendrogram2 = vtkSmartPointer<vtkDendrogramItem>::New();
+  this->Dendrogram2->ExtendLeafNodesOn();
+  this->AddItem(this->Dendrogram2);
+
+  this->Table = vtkSmartPointer<vtkTable>::New();
+  this->Tree1Label = NULL;
+  this->Tree2Label = NULL;
+
+  this->LookupTable = vtkSmartPointer<vtkLookupTable>::New();
+
+  this->PositionSet = false;
+  this->TreeReordered = false;
+  this->Interactive = true;
+
+  this->Orientation = vtkDendrogramItem::LEFT_TO_RIGHT;
+
+  this->MinimumVisibleFontSize = 8;
+  this->LabelSizeDifference = 4;
+
+  this->CorrespondenceLineWidth = 2.0;
+}
+
+//-----------------------------------------------------------------------------
+vtkTanglegramItem::~vtkTanglegramItem()
+{
+  delete []this->Tree1Label;
+  delete []this->Tree2Label;
+}
+
+//-----------------------------------------------------------------------------
+void vtkTanglegramItem::SetTree1(vtkTree *tree)
+{
+  this->Dendrogram1->SetTree(tree);
+  this->Dendrogram1->SetOrientation(this->Orientation);
+  this->PositionSet = false;
+  this->TreeReordered = false;
+}
+
+//-----------------------------------------------------------------------------
+void vtkTanglegramItem::SetTree2(vtkTree *tree)
+{
+  this->Dendrogram2->SetTree(tree);
+  this->Dendrogram2->SetOrientation((this->Orientation + 2) % 4);
+  this->PositionSet = false;
+  this->TreeReordered = false;
+}
+
+//-----------------------------------------------------------------------------
+vtkTable * vtkTanglegramItem::GetTable()
+{
+  return this->Table;
+}
+
+//-----------------------------------------------------------------------------
+void vtkTanglegramItem::SetTable(vtkTable *table)
+{
+  if (table == NULL)
+    {
+    return;
+    }
+
+  this->Table = table;
+
+  this->SourceNames = vtkStringArray::SafeDownCast(this->Table->GetColumn(0));
+  this->GenerateLookupTable();
+  this->TreeReordered = false;
+}
+
+//-----------------------------------------------------------------------------
+bool vtkTanglegramItem::Paint(vtkContext2D *painter)
+{
+  this->RefreshBuffers(painter);
+
+  if (!this->TreeReordered)
+    {
+    this->ReorderTree();
+
+    // this will force Dendrogram2's PrunedTree to re-copy itself from the
+    // newly rearranged tree.
+    this->Dendrogram2->PrepareToPaint(painter);
+    }
+
+  if (!this->PositionSet)
+    {
+    this->PositionTree2();
+    }
+
+  this->PaintChildren(painter);
+
+  if (this->Table != NULL)
+    {
+    this->PaintCorrespondenceLines(painter);
+    }
+
+  if (this->Tree1Label != NULL || this->Tree2Label != NULL)
+    {
+    this->PaintTreeLabels(painter);
+    }
+
+  return true;
+}
+
+//-----------------------------------------------------------------------------
+void vtkTanglegramItem::RefreshBuffers(vtkContext2D *painter)
+{
+  this->Dendrogram1->PrepareToPaint(painter);
+  this->Spacing = this->Dendrogram1->GetLeafSpacing();
+  this->Dendrogram1->GetBounds(this->Tree1Bounds);
+  this->LabelWidth1 = this->Dendrogram1->GetLabelWidth();
+
+  this->Dendrogram2->PrepareToPaint(painter);
+  this->Dendrogram2->GetBounds(this->Tree2Bounds);
+  this->LabelWidth2 = this->Dendrogram2->GetLabelWidth();
+
+  this->Tree1Names = vtkStringArray::SafeDownCast(
+    this->Dendrogram1->GetPrunedTree()->GetVertexData()->
+    GetAbstractArray("node name"));
+
+  this->Tree2Names = vtkStringArray::SafeDownCast(
+    this->Dendrogram2->GetPrunedTree()->GetVertexData()->
+    GetAbstractArray("node name"));
+}
+
+//-----------------------------------------------------------------------------
+void vtkTanglegramItem::PositionTree2()
+{
+  // values used to calculate the amount of space we should leave between
+  // the two trees.
+  double averageX =
+    ((abs(this->Tree1Bounds[1] - this->Tree1Bounds[0]) +
+      abs(this->Tree2Bounds[1] - this->Tree2Bounds[0])) / 2.0);
+  double averageY =
+    ((abs(this->Tree1Bounds[3] - this->Tree1Bounds[2]) +
+      abs(this->Tree2Bounds[3] - this->Tree2Bounds[2])) / 2.0);
+
+  // the starting X position for tree #2
+  double x, x1, x2;
+
+  // the starting Y position for tree #2
+  double y, y1, y2;
+
+  switch(this->Orientation)
+    {
+    case vtkDendrogramItem::DOWN_TO_UP:
+      x1 = (this->Tree1Bounds[1] + this->Tree1Bounds[0]) / 2.0;
+      x2 = (this->Tree2Bounds[1] + this->Tree2Bounds[0]) / 2.0;
+      x = x1 - x2;
+
+      y = this->Tree1Bounds[3] +
+        abs(this->Tree2Bounds[3] - this->Tree2Bounds[2]) +
+        averageY;
+      break;
+
+    case vtkDendrogramItem::UP_TO_DOWN:
+      x1 = (this->Tree1Bounds[1] + this->Tree1Bounds[0]) / 2.0;
+      x2 = (this->Tree2Bounds[1] + this->Tree2Bounds[0]) / 2.0;
+      x = x1 - x2;
+
+      y = this->Tree1Bounds[2] -
+        abs(this->Tree2Bounds[3] - this->Tree2Bounds[2]) -
+        averageY;
+      break;
+
+    case vtkDendrogramItem::RIGHT_TO_LEFT:
+
+      x = this->Tree1Bounds[0] -
+        abs(this->Tree2Bounds[1] - this->Tree2Bounds[0]) -
+        averageX;
+
+      y1 = (this->Tree1Bounds[3] + this->Tree1Bounds[2]) / 2.0;
+      y2 = (this->Tree2Bounds[3] + this->Tree2Bounds[2]) / 2.0;
+      y = y1 - y2;
+      break;
+
+    case vtkDendrogramItem::LEFT_TO_RIGHT:
+    default:
+
+      x = this->Tree1Bounds[1] +
+        abs(this->Tree2Bounds[1] - this->Tree2Bounds[0]) +
+        averageX;
+
+      y1 = (this->Tree1Bounds[3] + this->Tree1Bounds[2]) / 2.0;
+      y2 = (this->Tree2Bounds[3] + this->Tree2Bounds[2]) / 2.0;
+      y = y1 - y2;
+      break;
+    }
+
+  this->Dendrogram2->SetPosition(x, y);
+  this->PositionSet = true;
+}
+
+//-----------------------------------------------------------------------------
+void vtkTanglegramItem::PaintCorrespondenceLines(vtkContext2D *painter)
+{
+  int textOrientation = painter->GetTextProp()->GetOrientation();
+  painter->GetTextProp()->SetOrientation(0.0);
+
+  float previousWidth = painter->GetPen()->GetWidth();
+  painter->GetPen()->SetWidth(this->CorrespondenceLineWidth);
+
+  for (vtkIdType row = 0; row < this->Table->GetNumberOfRows();
+       ++row)
+    {
+    std::string source = this->SourceNames->GetValue(row);
+    vtkIdType tree1Index = this->Tree1Names->LookupValue(source);
+    if (tree1Index == -1)
+      {
+      continue;
+      }
+
+    double sourcePosition[2] = {0, 0};
+    if (!this->Dendrogram1->GetPositionOfVertex(source, sourcePosition))
+      {
+      continue;
+      }
+    double sourceEdgePosition[2];
+    sourceEdgePosition[0] = sourcePosition[0];
+    sourceEdgePosition[1] = sourcePosition[1];
+
+    for (vtkIdType col = 1; col < this->Table->GetNumberOfColumns(); ++col)
+      {
+      double matrixValue = this->Table->GetValue(row, col).ToDouble();
+      if (matrixValue == 0.0)
+        {
+        continue;
+        }
+
+      std::string target = this->Table->GetColumnName(col);
+      if (target == "")
+        {
+        continue;
+        }
+
+      vtkIdType tree2Index = this->Tree2Names->LookupValue(target);
+      if (tree2Index == -1)
+        {
+        continue;
+        }
+
+      double targetPosition[2] = {0, 0};
+      if (!this->Dendrogram2->GetPositionOfVertex(target, targetPosition))
+        {
+        continue;
+        }
+      double targetEdgePosition[2];
+      targetEdgePosition[0] = targetPosition[0];
+      targetEdgePosition[1] = targetPosition[1];
+
+      int fontSize =
+        painter->ComputeFontSizeForBoundedString("Igq", VTK_FLOAT_MAX,
+                                                 this->Spacing);
+
+      switch(this->Orientation)
+        {
+        case vtkDendrogramItem::DOWN_TO_UP:
+          if (fontSize < this->MinimumVisibleFontSize)
+            {
+            sourcePosition[1] = this->Tree1Bounds[3] + this->Spacing;
+            targetPosition[1] = this->Tree2Bounds[2] - this->Spacing;
+            }
+          else
+            {
+            float stringBounds[4];
+            painter->ComputeStringBounds(source, stringBounds);
+            sourcePosition[1] =
+              this->Tree1Bounds[3] - (this->LabelWidth1 - stringBounds[2]);
+
+            sourceEdgePosition[1] = this->Tree1Bounds[3] + this->Spacing;
+
+            targetEdgePosition[1] = this->Tree2Bounds[2] - this->Spacing;
+
+            painter->ComputeStringBounds(target, stringBounds);
+            targetPosition[1] =
+              this->Tree2Bounds[2] + (this->LabelWidth2 - stringBounds[2]);
+            }
+          break;
+
+        case vtkDendrogramItem::UP_TO_DOWN:
+          if (fontSize < this->MinimumVisibleFontSize)
+            {
+            sourcePosition[1] = this->Tree1Bounds[2] - this->Spacing;
+            targetPosition[1] = this->Tree2Bounds[3] + this->Spacing;
+            }
+          else
+            {
+            float stringBounds[4];
+            painter->ComputeStringBounds(source, stringBounds);
+            sourcePosition[1] =
+              this->Tree1Bounds[2] + (this->LabelWidth1 - stringBounds[2]);
+
+            sourceEdgePosition[1] = this->Tree1Bounds[2] - this->Spacing;
+
+            targetEdgePosition[1] = this->Tree2Bounds[3] + this->Spacing;
+
+            painter->ComputeStringBounds(target, stringBounds);
+            targetPosition[1] =
+              this->Tree2Bounds[3] - (this->LabelWidth2 - stringBounds[2]);
+            }
+          break;
+
+        case vtkDendrogramItem::RIGHT_TO_LEFT:
+          if (fontSize < this->MinimumVisibleFontSize)
+            {
+            sourcePosition[0] = this->Tree1Bounds[0] - this->Spacing;
+            targetPosition[0] = this->Tree2Bounds[1] + this->Spacing;
+            }
+          else
+            {
+            float stringBounds[4];
+            painter->ComputeStringBounds(source, stringBounds);
+            sourcePosition[0] =
+              this->Tree1Bounds[0] + (this->LabelWidth1 - stringBounds[2]);
+
+            sourceEdgePosition[0] = this->Tree1Bounds[0] - this->Spacing;
+
+            targetEdgePosition[0] = this->Tree2Bounds[1] + this->Spacing;
+
+            painter->ComputeStringBounds(target, stringBounds);
+            targetPosition[0] =
+              this->Tree2Bounds[1] - (this->LabelWidth2 - stringBounds[2]);
+            }
+          break;
+
+        case vtkDendrogramItem::LEFT_TO_RIGHT:
+        default:
+          if (fontSize < this->MinimumVisibleFontSize)
+            {
+            sourcePosition[0] = this->Tree1Bounds[1] + this->Spacing;
+            targetPosition[0] = this->Tree2Bounds[0] - this->Spacing;
+            }
+          else
+            {
+            float stringBounds[4];
+            painter->ComputeStringBounds(source, stringBounds);
+            sourcePosition[0] =
+              this->Tree1Bounds[1] - (this->LabelWidth1 - stringBounds[2]);
+
+            sourceEdgePosition[0] = this->Tree1Bounds[1] + this->Spacing;
+
+            targetEdgePosition[0] = this->Tree2Bounds[0] - this->Spacing;
+
+            painter->ComputeStringBounds(target, stringBounds);
+            targetPosition[0] =
+              this->Tree2Bounds[0] + (this->LabelWidth2 - stringBounds[2]);
+            }
+          break;
+        }
+
+      double color[4];
+      this->LookupTable->GetColor(matrixValue, color);
+
+      if (fontSize < this->MinimumVisibleFontSize)
+        {
+        painter->GetPen()->SetColorF(color[0], color[1], color[2]);
+        painter->DrawLine(sourcePosition[0], sourcePosition[1],
+                          targetPosition[0], targetPosition[1]);
+        continue;
+        }
+
+      painter->GetPen()->SetColorF(0.0, 0.0, 0.0);
+      painter->GetPen()->SetLineType(vtkPen::DOT_LINE);
+
+      painter->DrawLine(sourcePosition[0], sourcePosition[1],
+                        sourceEdgePosition[0], sourceEdgePosition[1]);
+
+      painter->DrawLine(targetEdgePosition[0], targetEdgePosition[1],
+                        targetPosition[0], targetPosition[1]);
+
+      painter->GetPen()->SetColorF(color[0], color[1], color[2]);
+      painter->GetPen()->SetLineType(vtkPen::SOLID_LINE);
+      painter->DrawLine(sourceEdgePosition[0], sourceEdgePosition[1],
+                        targetEdgePosition[0], targetEdgePosition[1]);
+      }
+    }
+
+  painter->GetPen()->SetColorF(0.0, 0.0, 0.0);
+  painter->GetTextProp()->SetOrientation(textOrientation);
+  painter->GetPen()->SetWidth(previousWidth);
+}
+
+//-----------------------------------------------------------------------------
+void vtkTanglegramItem::PaintTreeLabels(vtkContext2D *painter)
+{
+  int fontSize = painter->GetTextProp()->GetFontSize();
+  painter->GetTextProp()->SetFontSize(fontSize + this->LabelSizeDifference);
+
+  int justification = painter->GetTextProp()->GetJustification();
+  painter->GetTextProp()->SetJustificationToCentered();
+
+  int textOrientation = painter->GetTextProp()->GetOrientation();
+  painter->GetTextProp()->SetOrientation(0.0);
+
+  painter->GetTextProp()->BoldOn();
+
+  double x, y;
+  switch(this->Orientation)
+    {
+    case vtkDendrogramItem::DOWN_TO_UP:
+      if (this->Tree1Label != NULL)
+        {
+        x = (this->Tree1Bounds[1] + this->Tree1Bounds[0]) / 2.0;
+        y = this->Tree1Bounds[2] - this->Spacing;
+        painter->DrawString(x, y, this->Tree1Label);
+        }
+
+      if (this->Tree2Label != NULL)
+        {
+        x = (this->Tree2Bounds[1] + this->Tree2Bounds[0]) / 2.0;
+        y = this->Tree2Bounds[3] + this->Spacing;
+        painter->DrawString(x, y, this->Tree2Label);
+        }
+      break;
+
+    case vtkDendrogramItem::UP_TO_DOWN:
+      if (this->Tree1Label != NULL)
+        {
+        x = (this->Tree1Bounds[1] + this->Tree1Bounds[0]) / 2.0;
+        y = this->Tree1Bounds[3] + this->Spacing;
+        painter->DrawString(x, y, this->Tree1Label);
+        }
+
+      if (this->Tree2Label != NULL)
+        {
+        x = (this->Tree2Bounds[1] + this->Tree2Bounds[0]) / 2.0;
+        y = this->Tree2Bounds[2] - this->Spacing;
+        painter->DrawString(x, y, this->Tree2Label);
+        }
+      break;
+
+    case vtkDendrogramItem::RIGHT_TO_LEFT:
+      if (this->Tree1Label != NULL)
+        {
+        x = this->Tree1Bounds[0] + this->LabelWidth1 + this->Spacing / 2.0;
+        y = this->Tree1Bounds[3] + this->Spacing * 2.0;
+        painter->DrawString(x, y, this->Tree1Label);
+        }
+
+      if (this->Tree2Label != NULL)
+        {
+        x = this->Tree2Bounds[1] - this->LabelWidth2 - this->Spacing / 2.0;
+        y = this->Tree2Bounds[3] + this->Spacing * 2.0;
+        painter->DrawString(x, y, this->Tree2Label);
+        }
+      break;
+
+    case vtkDendrogramItem::LEFT_TO_RIGHT:
+    default:
+      if (this->Tree1Label != NULL)
+        {
+        x = this->Tree1Bounds[1] - this->LabelWidth1 - this->Spacing / 2.0;
+        y = this->Tree1Bounds[3] + this->Spacing * 2.0;
+        painter->DrawString(x, y, this->Tree1Label);
+        }
+  painter->GetTextProp()->SetOrientation(0.0);
+      if (this->Tree2Label != NULL)
+        {
+        x = this->Tree2Bounds[0] + this->LabelWidth1 + this->Spacing / 2.0;
+        y = this->Tree2Bounds[3] + this->Spacing * 2.0;
+        painter->DrawString(x, y, this->Tree2Label);
+        }
+      break;
+    }
+
+
+  painter->GetTextProp()->SetFontSize(fontSize);
+  painter->GetTextProp()->SetJustification(justification);
+  painter->GetTextProp()->SetOrientation(textOrientation);
+  painter->GetTextProp()->BoldOff();
+}
+
+//-----------------------------------------------------------------------------
+void vtkTanglegramItem::ReorderTree()
+{
+  if (this->Dendrogram1->GetTree()->GetNumberOfVertices() == 0 ||
+      this->Dendrogram2->GetTree()->GetNumberOfVertices() == 0 ||
+      this->Table == NULL)
+    {
+    return;
+    }
+
+  vtkTree *tree = this->Dendrogram2->GetTree();
+
+  this->Tree2Names = vtkStringArray::SafeDownCast(
+    tree->GetVertexData()-> GetAbstractArray("node name"));
+
+  vtkNew<vtkTreeBFSIterator> bfsIterator;
+  bfsIterator->SetTree(tree);
+  bfsIterator->SetStartVertex(tree->GetRoot());
+  while(bfsIterator->HasNext())
+    {
+    vtkIdType vertex = bfsIterator->Next();
+    if (tree->GetNumberOfChildren(vertex) < 2)
+      {
+      continue;
+      }
+    this->ReorderTreeAtVertex(vertex, tree);
+    }
+
+  this->TreeReordered = true;
+}
+
+//-----------------------------------------------------------------------------
+void vtkTanglegramItem::ReorderTreeAtVertex(vtkIdType parent, vtkTree *tree)
+{
+  // Set up a priority queue to reorganize the vertices.  This queue sorts all
+  // the children of parent based on their "score".  This score roughly
+  // correponds to where the children should be positioned within the
+  // dendrogram to minimize crossings.  See the comments within
+  // GetPositionScoreForVertex() for more info.
+  std::priority_queue<vtkDendrogramItem::WeightedVertex,
+                      std::vector<vtkDendrogramItem::WeightedVertex>,
+                      vtkDendrogramItem::CompareWeightedVertices> queue;
+
+  for(vtkIdType i = 0; i < tree->GetNumberOfChildren(parent); ++i)
+    {
+    vtkIdType child = tree->GetChild(parent, i);
+    double score = this->GetPositionScoreForVertex(child, tree);
+    vtkDendrogramItem::WeightedVertex wv = {child, score};
+    queue.push(wv);
+    }
+
+  vtkNew<vtkIdTypeArray> newChildOrder;
+  while (!queue.empty())
+    {
+    vtkDendrogramItem::WeightedVertex wv = queue.top();
+    queue.pop();
+    newChildOrder->InsertNextValue(wv.ID);
+    }
+
+  tree->ReorderChildren(parent, newChildOrder.GetPointer());
+}
+
+//-----------------------------------------------------------------------------
+double vtkTanglegramItem::GetPositionScoreForVertex(vtkIdType vertex,
+                                                    vtkTree *tree)
+{
+  // score will be the average "height" (y coordinate for unrotated tanglegram)
+  // of all the leaf nodes in the fixed tree that are associated with leaf nodes
+  // that descend from the vertex parameter.
+  double score = 0.0;
+  double numLeafNodesFound = 0.0;
+  double position[2] = {0, 0};
+
+  // which dimension (x or y) should be used to calculate this vertex's score.
+  // this is determined by the orientation of our tanglegram.
+  int dimension = 1;
+  if (this->Orientation == vtkDendrogramItem::DOWN_TO_UP ||
+      this->Orientation == vtkDendrogramItem::UP_TO_DOWN)
+    {
+    dimension = 0;
+    }
+
+  vtkNew<vtkTreeDFSIterator> dfsIterator;
+  dfsIterator->SetTree(tree);
+  dfsIterator->SetStartVertex(vertex);
+
+  // search for leaf nodes that descend from this vertex
+  while(dfsIterator->HasNext())
+    {
+    vtkIdType v = dfsIterator->Next();
+    if (!tree->IsLeaf(v))
+      {
+      continue;
+      }
+
+    // get this leaf node's name
+    std::string tree2Name = this->Tree2Names->GetValue(v);
+
+    // find where this name appears in the correspondence table
+    vtkDoubleArray *column = vtkDoubleArray::SafeDownCast(
+      this->Table->GetColumnByName(tree2Name.c_str()));
+
+    if (column == NULL)
+      {
+      continue;
+      }
+
+    for (vtkIdType row = 0; row < column->GetNumberOfTuples(); ++row)
+      {
+      if (column->GetValue(row) > 0.0)
+        {
+        // get the position of the associated leaf node in the fixed tree
+        // and use it to update our score.
+        std::string tree1Name = this->Table->GetValue(row, 0).ToString();
+        if (!this->Dendrogram1->GetPositionOfVertex(tree1Name, position))
+          {
+          continue;
+          }
+        score += position[dimension];
+        ++numLeafNodesFound;
+        }
+      }
+    }
+
+  if (numLeafNodesFound == 0)
+    {
+    return VTK_DOUBLE_MAX;
+    }
+
+  int sign = 1;
+  if (this->Orientation == vtkDendrogramItem::LEFT_TO_RIGHT ||
+      this->Orientation == vtkDendrogramItem::UP_TO_DOWN)
+    {
+    // multiply by -1 because we want high numbers to be near the top.
+    sign = -1;
+    }
+
+  return sign * score / numLeafNodesFound;
+}
+
+//-----------------------------------------------------------------------------
+void vtkTanglegramItem::SetOrientation(int orientation)
+{
+  this->Orientation = orientation;
+  this->Dendrogram1->SetOrientation(this->Orientation);
+  this->Dendrogram2->SetOrientation((this->Orientation + 2) % 4);
+}
+
+//-----------------------------------------------------------------------------
+int vtkTanglegramItem::GetOrientation()
+{
+  return this->Orientation;
+}
+
+//-----------------------------------------------------------------------------
+void vtkTanglegramItem::GenerateLookupTable()
+{
+  this->LookupTable->SetNumberOfTableValues(255);
+  this->LookupTable->Build();
+
+  vtkNew<vtkColorSeries> colorSeries;
+  colorSeries->SetColorScheme(vtkColorSeries::BREWER_QUALITATIVE_ACCENT);
+  colorSeries->BuildLookupTable(this->LookupTable);
+
+  this->LookupTable->IndexedLookupOff();
+
+  double min = VTK_DOUBLE_MAX;
+  double max = VTK_DOUBLE_MIN;
+
+  for (vtkIdType row = 0; row < this->Table->GetNumberOfRows();
+       ++row)
+    {
+    for (vtkIdType col = 1; col < this->Table->GetNumberOfColumns(); ++col)
+      {
+      double d = this->Table->GetValue(row, col).ToDouble();
+      if (d == 0.0)
+        {
+        continue;
+        }
+      if (d > max)
+        {
+        max = d;
+        }
+      if (d < min)
+        {
+        min = d;
+        }
+      }
+    }
+
+  this->LookupTable->SetRange(min, max);
+}
+
+//-----------------------------------------------------------------------------
+bool vtkTanglegramItem::MouseDoubleClickEvent(
+  const vtkContextMouseEvent &event)
+{
+  bool tree1Changed = this->Dendrogram1->MouseDoubleClickEvent(event);
+  bool tree2Changed = false;
+  if (!tree1Changed)
+    {
+    tree2Changed = this->Dendrogram2->MouseDoubleClickEvent(event);
+    }
+
+  return tree1Changed || tree2Changed;
+}
+
+//-----------------------------------------------------------------------------
+float vtkTanglegramItem::GetTreeLineWidth()
+{
+  return this->Dendrogram1->GetLineWidth();
+}
+
+//-----------------------------------------------------------------------------
+void vtkTanglegramItem::SetTreeLineWidth(float width)
+{
+  this->Dendrogram1->SetLineWidth(width);
+  this->Dendrogram2->SetLineWidth(width);
+}
+
+//-----------------------------------------------------------------------------
+bool vtkTanglegramItem::Hit(const vtkContextMouseEvent &vtkNotUsed(mouse))
+{
+  // If we are interactive, we want to catch anything that propagates to the
+  // background, otherwise we do not want any mouse events.
+  return this->Interactive;
+}
+
+//-----------------------------------------------------------------------------
+void vtkTanglegramItem::PrintSelf(ostream &os, vtkIndent indent)
+{
+  this->Superclass::PrintSelf(os, indent);
+}
diff --git a/Views/Infovis/vtkTanglegramItem.h b/Views/Infovis/vtkTanglegramItem.h
new file mode 100644
index 0000000..2873443
--- /dev/null
+++ b/Views/Infovis/vtkTanglegramItem.h
@@ -0,0 +1,198 @@
+/*=========================================================================
+
+  Program:   Visualization Toolkit
+  Module:    TestDiagram.cxx
+
+  Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
+  All rights reserved.
+  See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
+
+     This software is distributed WITHOUT ANY WARRANTY; without even
+     the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
+     PURPOSE.  See the above copyright notice for more information.
+
+=========================================================================*/
+// .NAME vtkTanglegramItem - Display two related trees
+//
+// .SECTION Description
+// This item draws two trees with connections between their leaf nodes.
+// Use SetTable() to specify what leaf nodes correspond to one another
+// between the two trees.  See the documentation for this function for
+// more details on how this table should be formatted.
+//
+// .SEE ALSO
+// vtkTree vtkTable vtkDendrogramItem vtkNewickTreeReader
+
+#ifndef __vtkTanglegramItem_h
+#define __vtkTanglegramItem_h
+
+#include "vtkViewsInfovisModule.h" // For export macro
+
+#include "vtkContextItem.h"
+#include "vtkSmartPointer.h"       // For SmartPointer ivars
+#include "vtkTable.h"  // For get/set
+
+class vtkDendrogramItem;
+class vtkLookupTable;
+class vtkStringArray;
+class vtkTree;
+
+class VTKVIEWSINFOVIS_EXPORT vtkTanglegramItem : public vtkContextItem
+{
+public:
+  static vtkTanglegramItem *New();
+  vtkTypeMacro(vtkTanglegramItem, vtkContextItem);
+  virtual void PrintSelf(ostream &os, vtkIndent indent);
+
+  // Description:
+  // Set the first tree
+  virtual void SetTree1(vtkTree *tree);
+
+  // Description:
+  // Set the second tree
+  virtual void SetTree2(vtkTree *tree);
+
+  // Description:
+  // Get/Set the table that describes the correspondences between the
+  // two trees.  The first column should contain the names of the leaf
+  // nodes from tree #1.  The columns of this table should be named
+  // after the leaf nodes of tree #2.  A non-zero cell should be used
+  // to create a connection between the two trees.  Different numbers
+  // in the table will result in connections being drawn in different
+  // colors.
+  vtkTable * GetTable();
+  void SetTable(vtkTable *table);
+
+  // Description:
+  // Get/Set the label for tree #1.
+  vtkGetStringMacro(Tree1Label);
+  vtkSetStringMacro(Tree1Label);
+
+  // Description:
+  // Get/Set the label for tree #2.
+  vtkGetStringMacro(Tree2Label);
+  vtkSetStringMacro(Tree2Label);
+
+  // Description:
+  // Set which way the tanglegram should face within the visualization.
+  // The default is for tree #1 to be drawn left to right.
+  void SetOrientation(int orientation);
+
+  // Description:
+  // Get the current orientation.
+  int GetOrientation();
+
+  // Description:
+  // Get/Set the smallest font size that is still considered legible.
+  // If the current zoom level requires our vertex labels to be smaller
+  // than this size the labels will not be drawn at all.  Default value
+  // is 8 pt.
+  vtkGetMacro(MinimumVisibleFontSize, int);
+  vtkSetMacro(MinimumVisibleFontSize, int);
+
+  // Description:
+  // Get/Set how much larger the dendrogram labels should be compared to the
+  // vertex labels.  Because the vertex labels automatically resize based
+  // on zoom levels, this is a relative (not absolute) size.  Default value
+  // is 4 pts larger than the vertex labels.
+  vtkGetMacro(LabelSizeDifference, int);
+  vtkSetMacro(LabelSizeDifference, int);
+
+  // Description:
+  // Get/Set how wide the correspondence lines should be.  Default is two pixels.
+  vtkGetMacro(CorrespondenceLineWidth, float);
+  vtkSetMacro(CorrespondenceLineWidth, float);
+
+  // Description:
+  // Get/Set how wide the edges of the trees should be.  Default is one pixel.
+  float GetTreeLineWidth();
+  void SetTreeLineWidth(float width);
+
+  // BTX
+
+  // Description:
+  // Returns true if the transform is interactive, false otherwise.
+  virtual bool Hit(const vtkContextMouseEvent &mouse);
+
+  // Description:
+  // Propagate any double click onto the dendrograms to check if any
+  // subtrees should be collapsed or expanded.
+  virtual bool MouseDoubleClickEvent(const vtkContextMouseEvent &event);
+
+  //ETX
+
+protected:
+  vtkTanglegramItem();
+  ~vtkTanglegramItem();
+
+  // Description:
+  // Update the bounds of our two dendrograms.
+  void RefreshBuffers(vtkContext2D *painter);
+
+  // Description:
+  // Calculate and set an appropriate position for our second dendrogram.
+  void PositionTree2();
+
+  // Description:
+  // Draw the lines between the corresponding vertices of our two dendrograms.
+  void PaintCorrespondenceLines(vtkContext2D *painter);
+
+  // Description:
+  // Draw the labels of our two dendrograms.
+  void PaintTreeLabels(vtkContext2D *painter);
+
+  // Description:
+  // Reorder the children of tree #2 to minimize the amount of crossings
+  // in our tanglegram.
+  void ReorderTree();
+
+  // Description:
+  // Helper function used by ReorderTree.
+  // Rearrange the children of the specified parent vertex in order to minimize
+  // tanglegram crossings.
+  void ReorderTreeAtVertex(vtkIdType parent, vtkTree *tree);
+
+  // Description:
+  // Helper function used by ReorderTreeAtVertex.  Get the average height of
+  // the vertices that correspond to the vertex parameter.  This information
+  // is used to determine what order sibling vertices should have within the
+  // tree.
+  double GetPositionScoreForVertex(vtkIdType vertex, vtkTree *tree);
+
+  // Description:
+  // Initialize the lookup table used to color the lines between the two
+  // dendrograms.
+  void GenerateLookupTable();
+
+  // Description:
+  // Paints the tree & associated table as a heatmap.
+  virtual bool Paint(vtkContext2D *painter);
+
+private:
+  vtkSmartPointer<vtkDendrogramItem> Dendrogram1;
+  vtkSmartPointer<vtkDendrogramItem> Dendrogram2;
+  vtkSmartPointer<vtkLookupTable> LookupTable;
+  vtkSmartPointer<vtkTable> Table;
+  vtkStringArray *Tree1Names;
+  vtkStringArray *Tree2Names;
+  vtkStringArray *SourceNames;
+  double Tree1Bounds[4];
+  double Tree2Bounds[4];
+  double Spacing;
+  double LabelWidth1;
+  double LabelWidth2;
+  bool PositionSet;
+  bool TreeReordered;
+  char* Tree1Label;
+  char* Tree2Label;
+  int Orientation;
+  int MinimumVisibleFontSize;
+  int LabelSizeDifference;
+  float CorrespondenceLineWidth;
+
+  vtkTanglegramItem(const vtkTanglegramItem&); // Not implemented
+  void operator=(const vtkTanglegramItem&); // Not implemented
+
+};
+
+#endif
diff --git a/Views/Infovis/vtkTreeHeatmapItem.cxx b/Views/Infovis/vtkTreeHeatmapItem.cxx
index cdc0d60..b8071ff 100644
--- a/Views/Infovis/vtkTreeHeatmapItem.cxx
+++ b/Views/Infovis/vtkTreeHeatmapItem.cxx
@@ -1,7 +1,7 @@
 /*=========================================================================
 
   Program:   Visualization Toolkit
-  Module:    TestDiagram.cxx
+  Module:    vtkTreeHeatmapItem.cxx
 
   Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
   All rights reserved.
@@ -13,1450 +13,594 @@
 
 =========================================================================*/
 #include "vtkTreeHeatmapItem.h"
+#include "vtkDendrogramItem.h"
+#include "vtkHeatmapItem.h"
 
-#include "vtkBrush.h"
-#include "vtkContext2D.h"
-#include "vtkContextMouseEvent.h"
-#include "vtkContextScene.h"
 #include "vtkDataSetAttributes.h"
-#include "vtkDoubleArray.h"
-#include "vtkGraphLayout.h"
-#include "vtkIdTypeArray.h"
-#include "vtkLookupTable.h"
-#include "vtkNew.h"
+#include "vtkBitArray.h"
 #include "vtkObjectFactory.h"
-#include "vtkPen.h"
-#include "vtkPruneTreeFilter.h"
 #include "vtkStringArray.h"
 #include "vtkTable.h"
-#include "vtkTextProperty.h"
-#include "vtkTooltipItem.h"
-#include "vtkTransform2D.h"
 #include "vtkTree.h"
-#include "vtkTreeLayoutStrategy.h"
-#include "vtkUnsignedIntArray.h"
 
 #include <algorithm>
-#include <queue>
-#include <sstream>
 
 vtkStandardNewMacro(vtkTreeHeatmapItem);
+
 //-----------------------------------------------------------------------------
 vtkTreeHeatmapItem::vtkTreeHeatmapItem()
 {
   this->Interactive = true;
-  this->JustCollapsedOrExpanded = false;
-  this->ColorTree = false;
+  this->Orientation = vtkDendrogramItem::LEFT_TO_RIGHT;
   this->TreeHeatmapBuildTime = 0;
-  this->Tree = vtkSmartPointer<vtkTree>::New();
-  this->PrunedTree = vtkSmartPointer<vtkTree>::New();
-  this->LayoutTree = vtkSmartPointer<vtkTree>::New();
-  this->Table = vtkSmartPointer<vtkTable>::New();
-
-  /* initialize bounds so that the mouse cursor is never considered
-   * "inside" the heatmap or tree */
-  this->HeatmapMinX = 1.0;
-  this->HeatmapMinY = 1.0;
-  this->HeatmapMaxX = 0.0;
-  this->HeatmapMaxY = 0.0;
-  this->TreeMinX = 1.0;
-  this->TreeMinY = 1.0;
-  this->TreeMaxX = 0.0;
-  this->TreeMaxY = 0.0;
-
-  this->NumberOfLeafNodes = 0;
-  this->MultiplierX = 100.0;
-  this->MultiplierY = 100.0;
-  this->CellWidth = 100.0;
-  this->CellHeight = 50.0;
-
-  this->Tooltip->SetVisible(false);
-  this->AddItem(this->Tooltip.GetPointer());
-  this->PruneFilter->SetShouldPruneParentVertex(false);
+
+  this->Dendrogram = vtkSmartPointer<vtkDendrogramItem>::New();
+  this->Dendrogram->ExtendLeafNodesOn();
+  this->Dendrogram->SetVisible(false);
+  this->AddItem(this->Dendrogram);
+
+  this->ColumnDendrogram = vtkSmartPointer<vtkDendrogramItem>::New();
+  this->ColumnDendrogram->ExtendLeafNodesOn();
+  this->ColumnDendrogram->SetVisible(false);
+  this->ColumnDendrogram->SetDrawLabels(false);
+  this->AddItem(this->ColumnDendrogram);
+
+  this->Heatmap = vtkSmartPointer<vtkHeatmapItem>::New();
+  this->Heatmap->SetVisible(false);
+  this->AddItem(this->Heatmap);
+
+  this->ColumnDendrogram->SetLeafSpacing(this->Heatmap->GetCellWidth());
 }
 
 //-----------------------------------------------------------------------------
 vtkTreeHeatmapItem::~vtkTreeHeatmapItem()
 {
-  while (!this->LookupTables.empty())
-    {
-    this->LookupTables.back()->Delete();
-    this->LookupTables.pop_back();
-    }
 }
 
 //-----------------------------------------------------------------------------
 void vtkTreeHeatmapItem::SetTree(vtkTree *tree)
 {
-  if (tree == NULL || tree->GetNumberOfVertices() == 0)
+  this->Dendrogram->SetTree(tree);
+  if (tree == NULL)
     {
-    this->Tree = vtkSmartPointer<vtkTree>::New();
-    this->PrunedTree = vtkSmartPointer<vtkTree>::New();
-    this->LayoutTree = vtkSmartPointer<vtkTree>::New();
     return;
     }
 
-  this->Tree = tree;
-
-  // initialize some additional arrays for the tree's vertex data
-  vtkNew<vtkUnsignedIntArray> vertexIsPruned;
-  vertexIsPruned->SetNumberOfComponents(1);
-  vertexIsPruned->SetName("VertexIsPruned");
-  vertexIsPruned->SetNumberOfValues(
-    this->Tree->GetNumberOfVertices());
-  vertexIsPruned->FillComponent(0, 0.0);
-  this->Tree->GetVertexData()->AddArray(vertexIsPruned.GetPointer());
-
-  vtkNew<vtkIdTypeArray> originalId;
-  originalId->SetNumberOfComponents(1);
-  originalId->SetName("OriginalId");
-  vtkIdType numVertices = this->Tree->GetNumberOfVertices();
-  originalId->SetNumberOfValues(numVertices);
-  for (vtkIdType i = 0; i < numVertices; ++i)
+  if (this->GetTable() != NULL &&
+      this->GetTable()->GetNumberOfRows() != 0)
     {
-    originalId->SetValue(i, i);
+    this->Dendrogram->SetDrawLabels(false);
     }
-  this->Tree->GetVertexData()->AddArray(originalId.GetPointer());
-
-  // make a copy of the full tree for later pruning
-  this->PrunedTree->DeepCopy(this->Tree);
+  this->Dendrogram->SetVisible(true);
 
-  // setup the lookup table that's used to color the triangles representing
-  // collapsed subtrees.  First we find maximum possible value.
-  vtkIdType root = this->Tree->GetRoot();
-  if (this->Tree->GetNumberOfChildren(root) == 1)
-    {
-    root = this->Tree->GetChild(root, 0);
-    }
-  int numLeavesInBiggestSubTree = 0;
-  for (vtkIdType child = 0; child < this->Tree->GetNumberOfChildren(root);
-       ++child)
+  // rearrange our table to match the order of the leaf nodes in this tree.
+  if (this->GetTable() != NULL && this->GetTable()->GetNumberOfRows() != 0)
     {
-    vtkIdType childVertex = this->Tree->GetChild(root, child);
-    int numLeaves = this->CountLeafNodes(childVertex);
-    if (numLeaves > numLeavesInBiggestSubTree)
-      {
-      numLeavesInBiggestSubTree = numLeaves;
-      }
+    this->ReorderTable();
     }
-
-  double rangeMinimum = 2.0;
-  if (numLeavesInBiggestSubTree < rangeMinimum)
-    {
-    rangeMinimum = numLeavesInBiggestSubTree;
-    }
-
-  this->TriangleLookupTable->SetNumberOfTableValues(256);
-  this->TriangleLookupTable->SetHueRange(0.5, 0.045);
-  this->TriangleLookupTable->SetRange(
-    rangeMinimum, static_cast<double>(numLeavesInBiggestSubTree));
-  this->TriangleLookupTable->Build();
 }
 
 //-----------------------------------------------------------------------------
 void vtkTreeHeatmapItem::SetTable(vtkTable *table)
 {
-  if (table == NULL || table->GetNumberOfRows() == 0)
+  this->Heatmap->SetTable(table);
+  if (table == NULL)
     {
-    this->Table = vtkSmartPointer<vtkTable>::New();
     return;
     }
-  this->Table = table;
-}
 
-//-----------------------------------------------------------------------------
-vtkTree * vtkTreeHeatmapItem::GetTree()
-{
-  return this->Tree;
-}
-
-//-----------------------------------------------------------------------------
-vtkTable * vtkTreeHeatmapItem::GetTable()
-{
-  return this->Table;
-}
-
-//-----------------------------------------------------------------------------
-vtkTree * vtkTreeHeatmapItem::GetPrunedTree()
-{
-  return this->PrunedTree;
-}
-//-----------------------------------------------------------------------------
-bool vtkTreeHeatmapItem::Paint(vtkContext2D *painter)
-{
-  if (this->Tree->GetNumberOfVertices() == 0 &&
-      this->Table->GetNumberOfRows() == 0)
+  if (this->Dendrogram->GetTree() != NULL &&
+      this->Dendrogram->GetTree()->GetNumberOfVertices() != 0)
     {
-    return true;
+    this->Dendrogram->SetDrawLabels(false);
     }
+  this->Heatmap->SetVisible(true);
 
-  if (this->IsDirty())
-    {
-    this->RebuildBuffers();
-    }
-
-  this->PaintBuffers(painter);
-  this->PaintChildren(painter);
-  return true;
-}
 
-//-----------------------------------------------------------------------------
-bool vtkTreeHeatmapItem::IsDirty()
-{
-  if (this->Tree->GetNumberOfVertices() == 0 &&
-      this->Table->GetNumberOfRows() == 0)
-    {
-    return false;
-    }
-  if (this->PrunedTree->GetMTime() > this->TreeHeatmapBuildTime)
+  // rearrange our table to match the order of the leaf nodes in this tree.
+  if (this->GetTree() != NULL && this->GetTree()->GetNumberOfVertices() != 0)
     {
-    return true;
+    this->ReorderTable();
     }
-  if (this->Tree->GetNumberOfVertices() == 0)
-    {
-    if (this->Table->GetMTime() > this->TreeHeatmapBuildTime)
-      {
-      return true;
-      }
-    }
-  else if(this->Table->GetNumberOfRows() == 0)
+
+  // add an array to this table's field data to keep track of collapsed rows
+  // (unless it already has the array)
+  vtkBitArray *existingRowsArray = vtkBitArray::SafeDownCast(
+    this->GetTable()->GetFieldData()->GetArray("collapsed rows"));
+  if (existingRowsArray)
     {
-    if (this->Tree->GetMTime() > this->TreeHeatmapBuildTime)
+    for(vtkIdType row = 0; row < this->GetTable()->GetNumberOfRows(); ++row)
       {
-      return true;
+      existingRowsArray->SetValue(row, 0);
       }
     }
   else
     {
-    if (this->Tree->GetMTime() > this->TreeHeatmapBuildTime ||
-        this->Table->GetMTime() > this->GetMTime())
-      {
-      return true;
-      }
-    }
-  return false;
-}
-
-//-----------------------------------------------------------------------------
-void vtkTreeHeatmapItem::RebuildBuffers()
-{
-  if (this->Tree->GetNumberOfVertices() > 0)
-    {
-    vtkNew<vtkTreeLayoutStrategy> strategy;
-    strategy->SetDistanceArrayName("node weight");
-    strategy->SetLeafSpacing(1.0);
-    strategy->SetRotation(90.0);
-
-    this->Layout->SetLayoutStrategy(strategy.GetPointer());
-    this->Layout->SetInputData(this->PrunedTree);
-    this->Layout->Update();
-    this->LayoutTree = vtkTree::SafeDownCast(this->Layout->GetOutput());
-    this->CountLeafNodes();
-    }
-
-  this->ComputeMultipliers();
-
-  if (this->Tree->GetNumberOfVertices() > 0)
-    {
-    this->ComputeTreeBounds();
-
-    // calculate how large our table cells will be when they are drawn.
-    // These values are also used when representing collapsed subtrees.
-    double numLeaves = static_cast<double>(this->NumberOfLeafNodes);
-    if (numLeaves == 1)
+    vtkSmartPointer<vtkBitArray> collapsedRowsArray =
+      vtkSmartPointer<vtkBitArray>::New();
+    collapsedRowsArray->SetNumberOfComponents(1);
+    collapsedRowsArray->SetName("collapsed rows");
+    for(vtkIdType row = 0; row < this->GetTable()->GetNumberOfRows(); ++row)
       {
-      this->CellHeight = 50.0;
+      collapsedRowsArray->InsertNextValue(0);
       }
-    else
-      {
-      this->CellHeight = (this->TreeMaxY / (numLeaves - 1.0));
-      }
-    this->CellWidth = this->CellHeight * 2;
-    }
-
-  if (this->Table->GetNumberOfRows() > 0)
-    {
-    this->InitializeLookupTables();
+    this->GetTable()->GetFieldData()->AddArray(collapsedRowsArray);
     }
 
-  if (this->Tree->GetNumberOfVertices() == 0)
+  // add an array to this table's field data to keep track of collapsed columns
+  // (unless it already has the array)
+  vtkBitArray *existingColumnsArray = vtkBitArray::SafeDownCast(
+    this->GetTable()->GetFieldData()->GetArray("collapsed columns"));
+  if (existingColumnsArray)
     {
-    this->TreeHeatmapBuildTime = this->Table->GetMTime();
-    }
-  else if (this->Table->GetNumberOfRows() == 0)
-    {
-    if( this->PrunedTree->GetMTime() > this->Tree->GetMTime())
-      {
-      this->TreeHeatmapBuildTime = this->PrunedTree->GetMTime();
-      }
-    else if(this->Tree->GetMTime() > this->Table->GetMTime())
+    for(vtkIdType col = 0; col < this->GetTable()->GetNumberOfColumns(); ++col)
       {
-      this->TreeHeatmapBuildTime = this->Tree->GetMTime();
+      existingColumnsArray->SetValue(col, 0);
       }
     }
   else
     {
-    if( this->PrunedTree->GetMTime() > this->Tree->GetMTime())
-      {
-      this->TreeHeatmapBuildTime = this->PrunedTree->GetMTime();
-      }
-    else if(this->Tree->GetMTime() > this->Table->GetMTime())
+    vtkSmartPointer<vtkBitArray> collapsedColumnsArray =
+      vtkSmartPointer<vtkBitArray>::New();
+    collapsedColumnsArray->SetNumberOfComponents(1);
+    collapsedColumnsArray->SetName("collapsed columns");
+    for(vtkIdType col = 0; col < this->GetTable()->GetNumberOfColumns(); ++col)
       {
-      this->TreeHeatmapBuildTime = this->Tree->GetMTime();
-      }
-    else
-      {
-      this->TreeHeatmapBuildTime = this->Table->GetMTime();
+      collapsedColumnsArray->InsertNextValue(0);
       }
+    this->GetTable()->GetFieldData()->AddArray(collapsedColumnsArray);
     }
 }
 
 //-----------------------------------------------------------------------------
-void vtkTreeHeatmapItem::ComputeMultipliers()
+void vtkTreeHeatmapItem::SetColumnTree(vtkTree *tree)
 {
-  double targetFontSize = 18;
-  double yMax = 1;
-  double targetPoint[3];
-  if (this->Tree->GetNumberOfVertices() > 0)
+  this->ColumnDendrogram->SetTree(tree);
+  if (tree == NULL)
     {
-    for (vtkIdType edge = 0; edge < this->LayoutTree->GetNumberOfEdges(); ++edge)
-      {
-      vtkIdType target = this->LayoutTree->GetTargetVertex(edge);
-      this->LayoutTree->GetPoint(target, targetPoint);
-      if (targetPoint[1] > yMax)
-        {
-        yMax = targetPoint[1];
-        }
-      }
+    return;
     }
 
-  double numRows;
-  if (this->Tree->GetNumberOfVertices() == 0)
+  if (this->Orientation == vtkDendrogramItem::LEFT_TO_RIGHT ||
+      this->Orientation == vtkDendrogramItem::LEFT_TO_RIGHT)
     {
-    numRows = this->Table->GetNumberOfRows();
-    yMax = this->CellHeight * numRows;
+    this->ColumnDendrogram->SetOrientation(vtkDendrogramItem::UP_TO_DOWN);
     }
   else
     {
-    numRows = this->NumberOfLeafNodes;
+    this->ColumnDendrogram->SetOrientation(vtkDendrogramItem::RIGHT_TO_LEFT);
     }
 
-  double currentFontSize =
-    (yMax * this->MultiplierX) / numRows;
-  if (currentFontSize < targetFontSize)
-    {
-    this->MultiplierX = (numRows * targetFontSize) / yMax;
-    this->MultiplierY = this->MultiplierX;
-    }
-  if (this->JustCollapsedOrExpanded)
-    {
-    this->MultiplierY = (this->CellHeight * (numRows - 1)) / yMax;
-    this->JustCollapsedOrExpanded = false;
-    }
+  this->ColumnDendrogram->SetVisible(true);
 }
 
 //-----------------------------------------------------------------------------
-void vtkTreeHeatmapItem::ComputeTreeBounds()
+vtkTree * vtkTreeHeatmapItem::GetColumnTree()
 {
-  this->TreeMinX = VTK_DOUBLE_MAX;
-  this->TreeMinY = VTK_DOUBLE_MAX;
-  this->TreeMaxX = VTK_DOUBLE_MIN;
-  this->TreeMaxY = VTK_DOUBLE_MIN;
-
-  double sourcePoint[3];
-  double targetPoint[3];
-
-  for (vtkIdType edge = 0; edge < this->LayoutTree->GetNumberOfEdges(); ++edge)
-    {
-    vtkIdType source = this->LayoutTree->GetSourceVertex(edge);
-    this->LayoutTree->GetPoint(source, sourcePoint);
-    double x0 = sourcePoint[0] * this->MultiplierX;
-    double y0 = sourcePoint[1] * this->MultiplierY;
-
-    vtkIdType target = this->LayoutTree->GetTargetVertex(edge);
-    this->LayoutTree->GetPoint(target, targetPoint);
-    double x1 = targetPoint[0] * this->MultiplierX;
-    double y1 = targetPoint[1] * this->MultiplierY;
-
-    if (x0 < this->TreeMinX)
-      {
-      this->TreeMinX = x0;
-      }
-    if (y0 < this->TreeMinY)
-      {
-      this->TreeMinY = y0;
-      }
-    if (x0 > this->TreeMaxX)
-      {
-      this->TreeMaxX = x0;
-      }
-    if (y0 > this->TreeMaxY)
-      {
-      this->TreeMaxY = y0;
-      }
-    if (x1 < this->TreeMinX)
-      {
-      this->TreeMinX = x1;
-      }
-    if (y1 < this->TreeMinY)
-      {
-      this->TreeMinY = y1;
-      }
-    if (x1 > this->TreeMaxX)
-      {
-      this->TreeMaxX = x1;
-      }
-    if (y1 > this->TreeMaxY)
-      {
-      this->TreeMaxY = y1;
-      }
-    }
+  return this->ColumnDendrogram->GetTree();
 }
 
 //-----------------------------------------------------------------------------
-void vtkTreeHeatmapItem::CountLeafNodes()
+vtkSmartPointer<vtkDendrogramItem> vtkTreeHeatmapItem::GetDendrogram()
 {
-  // figure out how many leaf nodes we have.
-  this->NumberOfLeafNodes = 0;
-  for (vtkIdType vertex = 0; vertex < this->LayoutTree->GetNumberOfVertices();
-       ++vertex)
-    {
-    if (!this->LayoutTree->IsLeaf(vertex))
-      {
-      continue;
-      }
-    ++this->NumberOfLeafNodes;
-    }
+  return this->Dendrogram;
 }
 
 //-----------------------------------------------------------------------------
-int vtkTreeHeatmapItem::CountLeafNodes(vtkIdType vertex)
+void vtkTreeHeatmapItem::SetDendrogram(vtkSmartPointer<vtkDendrogramItem> item)
 {
-  // figure out how many leaf nodes descend from vertex.
-  int numLeaves = 0;
-  for (vtkIdType child = 0; child < this->Tree->GetNumberOfChildren(vertex);
-       ++child)
-    {
-    vtkIdType childVertex = this->Tree->GetChild(vertex, child);
-    if (this->Tree->IsLeaf(childVertex))
-      {
-      ++numLeaves;
-      }
-    else
-      {
-      numLeaves += this->CountLeafNodes(childVertex);
-      }
-    }
-  return numLeaves;
+  this->Dendrogram = item;
 }
 
 //-----------------------------------------------------------------------------
-void vtkTreeHeatmapItem::InitializeLookupTables()
+vtkSmartPointer<vtkHeatmapItem> vtkTreeHeatmapItem::GetHeatmap()
 {
-  while (!this->LookupTables.empty())
-    {
-    this->LookupTables.back()->Delete();
-    this->LookupTables.pop_back();
-    }
-  this->LookupTables.reserve( this->Table->GetNumberOfColumns() + 1 );
-
-  for (vtkIdType column = 1; column < this->Table->GetNumberOfColumns();
-       ++column)
-    {
-    if (this->Table->GetValue(0, column).IsString())
-      {
-      this->GenerateLookupTableForStringColumn(column);
-      continue;
-      }
-    double min = VTK_DOUBLE_MAX;
-    double max = VTK_DOUBLE_MIN;
-    vtkLookupTable *lookupTable = vtkLookupTable::New();
-    this->LookupTables.push_back(lookupTable);
-    for (vtkIdType row = 0; row < this->Table->GetNumberOfRows(); ++row)
-      {
-      double value = this->Table->GetValue(row, column).ToDouble();
-      if (value > max)
-        {
-        max = value;
-        }
-      else if (value < min)
-        {
-        min = value;
-        }
-      }
-    this->LookupTables[column-1]->SetNumberOfTableValues(256);
-    this->LookupTables[column-1]->SetRange(min, max);
-    this->LookupTables[column-1]->Build();
-    }
+  return this->Heatmap;
 }
 
 //-----------------------------------------------------------------------------
-void vtkTreeHeatmapItem::GenerateLookupTableForStringColumn(vtkIdType column)
+void vtkTreeHeatmapItem::SetHeatmap(vtkSmartPointer<vtkHeatmapItem> item)
 {
-  //generate a sorted vector of all the strings in this column
-  std::vector< std::string > sortedStrings;
-  for (vtkIdType row = 0; row < this->Table->GetNumberOfRows(); ++row)
-    {
-    std::string value = this->Table->GetValue(row, column).ToString();
-    // no duplicates
-    if (std::find(sortedStrings.begin(), sortedStrings.end(), value) ==
-        sortedStrings.end())
-      {
-      sortedStrings.push_back(value);
-      }
-    }
-  std::sort(sortedStrings.begin(), sortedStrings.end());
-
-  // map each string to a double value based on its position
-  // in alphabetical order
-  std::map< std::string, double> stringToDouble;
-  for (unsigned int i = 0; i < sortedStrings.size(); ++i)
-    {
-    stringToDouble[ sortedStrings[i] ] = (double)i;
-    }
-
-  // associate this mapping with the column number
-  this->StringToDoubleMaps[column] = stringToDouble;
-
-  // generate a lookup table for this column
-  this->LookupTables.push_back(vtkLookupTable::New());
-  this->LookupTables[column-1]->SetNumberOfTableValues(256);
-  this->LookupTables[column-1]->SetRange(0, sortedStrings.size() - 1);
-  this->LookupTables[column-1]->Build();
+  this->Heatmap = item;
 }
 
 //-----------------------------------------------------------------------------
-void vtkTreeHeatmapItem::PaintBuffers(vtkContext2D *painter)
+vtkTree * vtkTreeHeatmapItem::GetTree()
 {
-  if (this->Tree->GetNumberOfVertices() == 0)
-    {
-    this->PaintHeatmapWithoutTree(painter);
-    return;
-    }
-
-  double xStart, yStart;
-  double sourcePoint[3];
-  double targetPoint[3];
-  double spacing = 25;
-  int numberOfCollapsedSubTrees = 0;
-
-  vtkUnsignedIntArray *vertexIsPruned = vtkUnsignedIntArray::SafeDownCast(
-    this->Tree->GetVertexData()->GetArray("VertexIsPruned"));
-
-  // Calculate the extent of the data that is visible within the window.
-  this->UpdateVisibleSceneExtent(painter);
-
-  // draw the tree
-  for (vtkIdType edge = 0; edge < this->LayoutTree->GetNumberOfEdges(); ++edge)
-    {
-    vtkIdType source = this->LayoutTree->GetSourceVertex(edge);
-    vtkIdType target = this->LayoutTree->GetTargetVertex(edge);
-
-    this->LayoutTree->GetPoint(source, sourcePoint);
-    this->LayoutTree->GetPoint(target, targetPoint);
-
-    double x0 = sourcePoint[0] * this->MultiplierX;
-    double y0 = sourcePoint[1] * this->MultiplierY;
-    double x1 = targetPoint[0] * this->MultiplierX;
-    double y1 = targetPoint[1] * this->MultiplierY;
-
-    // check if the target vertex is the root of a collapsed tree
-    bool alreadyDrewCollapsedSubTree = false;
-    vtkIdType originalId = this->GetOriginalId(target);
-
-    double color[3];
-    double colorKey;
-    if (vertexIsPruned->GetValue(originalId) > 0)
-      {
-      ++numberOfCollapsedSubTrees;
-      float trianglePoints[6];
-      trianglePoints[0] = sourcePoint[0] * this->MultiplierX;
-      trianglePoints[1] = targetPoint[1] * this->MultiplierY;
-      trianglePoints[2] = this->TreeMaxX;
-      trianglePoints[3] = targetPoint[1]  * this->MultiplierY - this->CellHeight / 2;
-      trianglePoints[4] = this->TreeMaxX;
-      trianglePoints[5] = targetPoint[1] * this->MultiplierY + this->CellHeight / 2;
-      if (this->LineIsVisible(trianglePoints[0], trianglePoints[1],
-                              trianglePoints[2], trianglePoints[3]) ||
-          this->LineIsVisible(trianglePoints[0], trianglePoints[1],
-                              trianglePoints[4], trianglePoints[5]) ||
-          this->LineIsVisible(trianglePoints[2], trianglePoints[3],
-                              trianglePoints[4], trianglePoints[5]))
-        {
-        colorKey = static_cast<double>(vertexIsPruned->GetValue(originalId));
-        this->TriangleLookupTable->GetColor(colorKey, color);
-        painter->GetBrush()->SetColorF(color[0], color[1], color[2]);
-        painter->DrawPolygon(trianglePoints, 3);
-        }
-      alreadyDrewCollapsedSubTree = true;
-      }
-
-    // color this portion of the tree based on the target node
-    if (this->ColorTree)
-      {
-      painter->GetPen()->SetWidth(2.0);
-      colorKey = this->TreeColorArray->GetValue(target);
-      this->TreeLookupTable->GetColor(colorKey, color);
-      painter->GetPen()->SetColorF(color[0], color[1], color[2]);
-      }
-
-    if (this->LineIsVisible(x0, y0, x0, y1))
-      {
-      painter->DrawLine (x0, y0, x0, y1);
-      }
-    if (!alreadyDrewCollapsedSubTree)
-      {
-      if (this->LineIsVisible(x0, y1, x1, y1))
-        {
-        painter->DrawLine (x0, y1, x1, y1);
-        }
-      }
+  return this->Dendrogram->GetTree();
+}
 
-    if (this->ColorTree)
-      {
-      // revert to drawing thin black lines by default
-      painter->GetPen()->SetColorF(0.0, 0.0, 0.0);
-      painter->GetPen()->SetWidth(1.0);
-      }
-    }
+//-----------------------------------------------------------------------------
+vtkTable * vtkTreeHeatmapItem::GetTable()
+{
+  return this->Heatmap->GetTable();
+}
 
-  // special case: all the true leaf nodes have been collapsed
-  if (this->NumberOfLeafNodes <= numberOfCollapsedSubTrees)
+//-----------------------------------------------------------------------------
+void vtkTreeHeatmapItem::ReorderTable()
+{
+  // make a copy of our table and then empty out the original.
+  vtkNew<vtkTable> tableCopy;
+  tableCopy->DeepCopy(this->GetTable());
+  for (vtkIdType row = this->GetTable()->GetNumberOfRows() - 1; row > -1; --row)
     {
-    return;
+    this->GetTable()->RemoveRow(row);
     }
 
-  // get array of node names from the tree
-  vtkStringArray *nodeNames = vtkStringArray::SafeDownCast(
-    this->LayoutTree->GetVertexData()->GetAbstractArray("node name"));
-
-  // leave a small amount of space between the tree, the table,
-  // and the row/column labels
-  spacing = this->CellWidth * 0.25;
-
-  bool canDrawText = this->SetupTextProperty(painter);
-
-  if (this->Table->GetNumberOfRows() == 0)
-    {
-    // special case for tree with no table
-    // draw labels for the leaf nodes
-    xStart = this->TreeMaxX + spacing;
-
-    if (!canDrawText || this->SceneBottomLeft[0] > xStart ||
-        this->SceneTopRight[0] < xStart)
-      {
-      return;
-      }
-
-    for (vtkIdType vertex = 0; vertex < this->LayoutTree->GetNumberOfVertices();
-         ++vertex)
-      {
-      if (!this->LayoutTree->IsLeaf(vertex))
-        {
-        continue;
-        }
-      double point[3];
-      this->LayoutTree->GetPoint(vertex, point);
-      std::string nodeName = nodeNames->GetValue(vertex);
-      yStart = point[1] * this->MultiplierY;
-      if (this->SceneBottomLeft[1] < yStart && this->SceneTopRight[1] > yStart)
-        {
-        painter->DrawString(xStart, yStart, nodeName);
-        }
-      }
-    return;
-    }
+  // get the names of the vertices in our tree.
+  vtkStringArray *vertexNames = vtkStringArray::SafeDownCast(
+    this->GetTree()->GetVertexData()->GetAbstractArray("node name"));
 
   // get array of row names from the table.  We assume this is the first row.
-  vtkStringArray *tableNames = vtkStringArray::SafeDownCast(
-    this->Table->GetColumn(0));
-
-  this->RowMap.clear();
-  this->RowMap.assign(this->NumberOfLeafNodes, -1);
-  this->HeatmapMinX = this->TreeMaxX + spacing;
-  this->HeatmapMaxX = this->TreeMaxX + spacing +
-    this->CellWidth * (this->Table->GetNumberOfColumns() - 1);
-  this->HeatmapMinY = VTK_DOUBLE_MAX;
-  this->HeatmapMaxY = VTK_DOUBLE_MIN;
-  xStart = this->TreeMaxX + spacing * 2 +
-    this->CellWidth * (this->Table->GetNumberOfColumns() - 1);
-  bool drawRowLabels = canDrawText;
-  if (this->SceneBottomLeft[0] > xStart || this->SceneTopRight[0] < xStart)
-    {
-    drawRowLabels = false;
-    }
+  vtkStringArray *rowNames = vtkStringArray::SafeDownCast(
+    tableCopy->GetColumn(0));
 
-  for (vtkIdType vertex = 0; vertex < this->LayoutTree->GetNumberOfVertices();
+  for (vtkIdType vertex = 0; vertex < this->GetTree()->GetNumberOfVertices();
        ++vertex)
     {
-    if (!this->LayoutTree->IsLeaf(vertex))
-      {
-      continue;
-      }
-
-    // For now, we don't draw a heatmap row for a pruned branch.
-    vtkIdType originalId = this->GetOriginalId(vertex);
-    if (vertexIsPruned->GetValue(originalId) > 0)
+    if (!this->GetTree()->IsLeaf(vertex))
       {
       continue;
       }
 
-    // determine which row we're drawing
-    double point[3];
-    this->LayoutTree->GetPoint(vertex, point);
-    int currentRow = floor(point[1] * this->MultiplierY / this->CellHeight + 0.5);
-
     // find the row in the table that corresponds to this vertex
-    std::string nodeName = nodeNames->GetValue(vertex);
-    vtkIdType tableRow = tableNames->LookupValue(nodeName);
+    std::string vertexName = vertexNames->GetValue(vertex);
+    vtkIdType tableRow = rowNames->LookupValue(vertexName);
     if (tableRow < 0)
       {
+      vtkIdType newRowNum = this->GetTable()->InsertNextBlankRow();
+      this->GetTable()->SetValue(newRowNum, 0, vtkVariant(vertexName));
+      this->Heatmap->MarkRowAsBlank(vertexName);
       continue;
       }
 
-    this->RowMap[currentRow] = tableRow;
-
-    for (vtkIdType column = 1; column < this->Table->GetNumberOfColumns();
-         ++column)
-      {
-      // get the color for this cell from the lookup table
-      double color[3];
-      if (this->Table->GetValue(tableRow, column).IsString())
-        {
-        // get the string to int mapping for this column
-        std::map< std::string, double> stringToDouble =
-          this->StringToDoubleMaps[column];
-
-        // get the integer value for the current string
-        std::string cellStr =
-          this->Table->GetValue(tableRow, column).ToString();
-        double colorKey = stringToDouble[cellStr];
-
-        // now we can lookup the appropriate color for this string
-        this->LookupTables[column-1]->GetColor(colorKey, color);
-        }
-      else
-        {
-        vtkVariant value = this->Table->GetValue(tableRow, column);
-        this->LookupTables[column-1]->GetColor(value.ToDouble(), color);
-        }
-      painter->GetBrush()->SetColorF(color[0], color[1], color[2]);
-
-      // draw this cell of the table
-      xStart = this->HeatmapMinX + this->CellWidth * (column - 1);
-      yStart = point[1] * this->MultiplierY - (this->CellHeight / 2);
-      if (this->LineIsVisible(xStart, yStart, xStart + this->CellWidth,
-                              yStart + this->CellHeight) ||
-          this->LineIsVisible(xStart, yStart + this->CellHeight,
-                              xStart + this->CellWidth, yStart))
-        {
-        painter->DrawRect(xStart, yStart, this->CellWidth, this->CellHeight);
-        }
-
-      // keep track of where the top and bottom of the table is.
-      // this is used to position column labels and tool tips.
-      if (yStart + this->CellHeight > this->HeatmapMaxY)
-        {
-        this->HeatmapMaxY = yStart + this->CellHeight;
-        }
-      if (yStart < this->HeatmapMinY)
-        {
-        this->HeatmapMinY = yStart;
-        }
-      }
-
-    // draw the label for this row
-    if (drawRowLabels)
-      {
-      xStart = this->TreeMaxX + spacing * 2 +
-        this->CellWidth * (this->Table->GetNumberOfColumns() - 1);
-      yStart = point[1] * this->MultiplierY;
-      if (this->SceneBottomLeft[1] < yStart && this->SceneTopRight[1] > yStart)
-        {
-        painter->DrawString(xStart, yStart, nodeName);
-        }
-      }
+    // copy it back into our original table
+    this->GetTable()->InsertNextRow(tableCopy->GetRow(tableRow));
     }
 
-  // special case for when we've collapsed away the top row of the heatmap
-  if (this->HeatmapMaxY < this->TreeMaxY + this->CellHeight / 2)
+  if (this->Orientation == vtkDendrogramItem::DOWN_TO_UP ||
+      this->Orientation == vtkDendrogramItem::UP_TO_DOWN)
     {
-    this->HeatmapMaxY = this->TreeMaxY + this->CellHeight / 2;
+    this->ReverseTableColumns();
     }
-
-  // draw column labels
-  yStart = this->HeatmapMaxY + spacing;
-  if (canDrawText && this->SceneBottomLeft[1] < yStart &&
-      this->SceneTopRight[1] > yStart)
+  if (this->Orientation == vtkDendrogramItem::RIGHT_TO_LEFT ||
+      this->Orientation == vtkDendrogramItem::DOWN_TO_UP)
     {
-    painter->GetTextProp()->SetOrientation(90);
-    for (vtkIdType column = 1; column < this->Table->GetNumberOfColumns();
-         ++column)
-      {
-      std::string columnName = this->Table->GetColumn(column)->GetName();
-      xStart =
-        this->HeatmapMinX + this->CellWidth * column - this->CellWidth / 2;
-      if (this->SceneBottomLeft[0] < xStart && this->SceneTopRight[0] > xStart)
-        {
-        painter->DrawString(xStart, yStart, columnName);
-        }
-      }
+    this->ReverseTableRows();
     }
 }
 
 //-----------------------------------------------------------------------------
-void vtkTreeHeatmapItem::PaintHeatmapWithoutTree(vtkContext2D *painter)
+void vtkTreeHeatmapItem::ReverseTableRows()
 {
-  // leave a small amount of space between the tree, the table,
-  // and the row/column labels
-  double spacing = this->CellWidth * 0.25;
-
-  // get array of row names from the table.  We assume this is the first row.
-  vtkStringArray *tableNames = vtkStringArray::SafeDownCast(
-    this->Table->GetColumn(0));
-
-  // calculate a font size that's appropriate for this zoom level
-  this->SetupTextProperty(painter);
-
-  double xStart, yStart;
-  this->HeatmapMinX = 0;
-  this->HeatmapMaxX = this->CellWidth * (this->Table->GetNumberOfColumns() - 1);
-  this->HeatmapMinY = VTK_DOUBLE_MAX;
-  this->HeatmapMaxY = VTK_DOUBLE_MIN;
-
-  for (vtkIdType row = 0; row < this->Table->GetNumberOfRows();
-       ++row)
+  // make a copy of our table and then empty out the original.
+  vtkNew<vtkTable> tableCopy;
+  tableCopy->DeepCopy(this->GetTable());
+  for (vtkIdType row = 0; row < tableCopy->GetNumberOfRows(); ++row)
     {
-    for (vtkIdType column = 1; column < this->Table->GetNumberOfColumns();
-         ++column)
-      {
-      // get the color for this cell from the lookup table
-      double color[3];
-      if (this->Table->GetValue(row, column).IsString())
-        {
-        // get the string to int mapping for this column
-        std::map< std::string, double> stringToDouble =
-          this->StringToDoubleMaps[column];
-
-        // get the integer value for the current string
-        std::string cellStr = this->Table->GetValue(row, column).ToString();
-        double colorKey = stringToDouble[cellStr];
-
-        // now we can lookup the appropriate color for this string
-        this->LookupTables[column-1]->GetColor(colorKey, color);
-        }
-      else
-        {
-        vtkVariant value = this->Table->GetValue(row, column);
-        this->LookupTables[column-1]->GetColor(value.ToDouble(), color);
-        }
-      painter->GetBrush()->SetColorF(color[0], color[1], color[2]);
-
-      // draw this cell of the table
-      xStart = this->CellWidth * (column - 1);
-      yStart = this->CellHeight * row;
-      painter->DrawRect(xStart, yStart, this->CellWidth, this->CellHeight);
-
-      // keep track of where the top of the table is, so we know where to
-      // draw the column labels later.
-      if (yStart + this->CellHeight > this->HeatmapMaxY)
-        {
-        this->HeatmapMaxY = yStart + this->CellHeight;
-        }
-      if (yStart < this->HeatmapMinY)
-        {
-        this->HeatmapMinY = yStart;
-        }
-      }
-
-    // draw the label for this row
-    std::string rowLabel = tableNames->GetValue(row);
-    xStart = spacing * 2 + this->CellWidth * (this->Table->GetNumberOfColumns() - 1);
-    yStart = this->CellHeight * row + this->CellHeight / 2;
-    painter->DrawString(xStart, yStart, rowLabel);
+    this->GetTable()->RemoveRow(row);
     }
 
-  // draw column labels
-  painter->GetTextProp()->SetOrientation(90);
-  for (vtkIdType column = 1; column < this->Table->GetNumberOfColumns();
-       ++column)
+  // re-insert the rows back into our original table in reverse order
+  for (vtkIdType tableRow = tableCopy->GetNumberOfRows() - 1; tableRow >= 0;
+       --tableRow)
     {
-      std::string columnName = this->Table->GetColumn(column)->GetName();
-      xStart = this->CellWidth * column - this->CellWidth / 2;
-      yStart = this->HeatmapMaxY + spacing;
-      painter->DrawString(xStart, yStart, columnName);
+    this->GetTable()->InsertNextRow(tableCopy->GetRow(tableRow));
     }
 }
 
 //-----------------------------------------------------------------------------
-void vtkTreeHeatmapItem::UpdateVisibleSceneExtent(vtkContext2D *painter)
+void vtkTreeHeatmapItem::ReverseTableColumns()
 {
-  float position[2];
-  painter->GetTransform()->GetPosition(position);
-  this->SceneBottomLeft[0] = -position[0];
-  this->SceneBottomLeft[1] = -position[1];
-  this->SceneBottomLeft[2] = 0.0;
-
-  this->SceneTopRight[0] =
-    static_cast<double>(this->GetScene()->GetSceneWidth() - position[0]);
-  this->SceneTopRight[1] =
-    static_cast<double>(this->GetScene()->GetSceneHeight() - position[1]);
-  this->SceneTopRight[2] = 0.0;
-  vtkNew<vtkMatrix3x3> inverse;
-  painter->GetTransform()->GetInverse(inverse.GetPointer());
-  inverse->MultiplyPoint(this->SceneBottomLeft, this->SceneBottomLeft);
-  inverse->MultiplyPoint(this->SceneTopRight, this->SceneTopRight);
-}
-
-//-----------------------------------------------------------------------------
-bool vtkTreeHeatmapItem::LineIsVisible(double x0, double y0,
-                                        double x1, double y1)
-{
-  // use local variables to improve readibility
-  double xMinScene = this->SceneBottomLeft[0];
-  double yMinScene = this->SceneBottomLeft[1];
-  double xMaxScene = this->SceneTopRight[0];
-  double yMaxScene = this->SceneTopRight[1];
-
-  // if either end point of the line segment falls within the screen,
-  // then the line segment is visible.
-  if ( (xMinScene <= x0 && xMaxScene >= x0 &&
-        yMinScene <= y0 && yMaxScene >= y0) ||
-       (xMinScene <= x1 && xMaxScene >= x1 &&
-        yMinScene <= y1 && yMaxScene >= y1) )
-    {
-    return true;
-    }
-
-  // figure out which end point is "greater" than the other in both dimensions
-  double xMinLine, xMaxLine, yMinLine, yMaxLine;
-  if (x0 < x1)
-    {
-    xMinLine = x0;
-    xMaxLine = x1;
-    }
-  else
+  // make a copy of our table and then empty out the original.
+  vtkNew<vtkTable> tableCopy;
+  tableCopy->DeepCopy(this->GetTable());
+  for (vtkIdType col = tableCopy->GetNumberOfColumns() - 1; col > 0; --col)
     {
-    xMinLine = x1;
-    xMaxLine = x0;
-    }
-  if (y0 < y1)
-    {
-    yMinLine = y0;
-    yMaxLine = y1;
-    }
-  else
-    {
-    yMinLine = y1;
-    yMaxLine = y0;
+    this->GetTable()->RemoveColumn(col);
     }
 
-  // case where the Y range of the line falls within the visible scene
-  // and the X range of the line contains the entire visible scene
-  if (yMinScene <= yMinLine && yMaxScene >= yMinLine &&
-      yMinScene <= yMaxLine && yMaxScene >= yMaxLine &&
-      xMinLine <= xMinScene && xMaxLine >= xMaxScene)
+  // re-insert the columns back into our original table in reverse order
+  for (vtkIdType col = tableCopy->GetNumberOfColumns() - 1; col >= 1; --col)
     {
-    return true;
+    this->GetTable()->AddColumn(tableCopy->GetColumn(col));
     }
+}
 
-  // case where the X range of the line falls within the visible scene
-  // and the Y range of the line contains the entire visible scene
-  if (xMinScene <= xMinLine && xMaxScene >= xMinLine &&
-      xMinScene <= xMaxLine && xMaxScene >= xMaxLine &&
-      yMinLine <= yMinScene && yMaxLine >= yMaxScene)
-    {
-    return true;
+//-----------------------------------------------------------------------------
+bool vtkTreeHeatmapItem::Paint(vtkContext2D *painter)
+{
+  this->Dendrogram->Paint(painter);
+
+  double treeBounds[4];
+  this->Dendrogram->GetBounds(treeBounds);
+  double spacing = this->Dendrogram->GetLeafSpacing() / 2.0;
+
+  double heatmapStartX, heatmapStartY;
+
+  switch (this->Orientation)
+    {
+    case vtkDendrogramItem::UP_TO_DOWN:
+      heatmapStartX = treeBounds[0] - spacing;
+      heatmapStartY = treeBounds[2] - (this->GetTable()->GetNumberOfColumns() - 1) *
+                      this->Heatmap->GetCellWidth() - spacing;
+      break;
+    case vtkDendrogramItem::DOWN_TO_UP:
+      heatmapStartX = treeBounds[0] - spacing;
+      heatmapStartY = treeBounds[3] + spacing;
+      break;
+    case vtkDendrogramItem::RIGHT_TO_LEFT:
+      heatmapStartX = treeBounds[0] - (this->GetTable()->GetNumberOfColumns() - 1) *
+                      this->Heatmap->GetCellWidth() - spacing;
+      heatmapStartY = treeBounds[2] - spacing;
+      break;
+    case vtkDendrogramItem::LEFT_TO_RIGHT:
+    default:
+      heatmapStartX = treeBounds[1] + spacing;
+      heatmapStartY = treeBounds[2] - spacing;
+      break;
+    }
+  this->Heatmap->SetPosition(heatmapStartX, heatmapStartY);
+  this->Heatmap->Paint(painter);
+
+  if (this->ColumnDendrogram->GetVisible())
+    {
+    double columnTreeStartX, columnTreeStartY;
+
+    double heatmapBounds[4];
+    this->Heatmap->GetBounds(heatmapBounds);
+
+    this->ColumnDendrogram->PrepareToPaint(painter);
+    this->ColumnDendrogram->GetBounds(treeBounds);
+
+    float offset = 0.0;
+    if (this->Heatmap->GetRowLabelWidth() > 0.0)
+      {
+      offset = this->Heatmap->GetRowLabelWidth() + spacing;
+      }
+    switch (this->Orientation)
+      {
+      case vtkDendrogramItem::UP_TO_DOWN:
+        columnTreeStartX = heatmapBounds[1] + (treeBounds[1] - treeBounds[0]) +
+          spacing;
+        columnTreeStartY = heatmapBounds[3] -
+          this->ColumnDendrogram->GetLeafSpacing() / 2.0;
+        break;
+      case vtkDendrogramItem::DOWN_TO_UP:
+        columnTreeStartX = heatmapBounds[1] + (treeBounds[1] - treeBounds[0]) +
+          spacing;
+        columnTreeStartY = heatmapBounds[3] - offset -
+          this->ColumnDendrogram->GetLeafSpacing() / 2.0;
+        break;
+      case vtkDendrogramItem::RIGHT_TO_LEFT:
+        columnTreeStartX = heatmapBounds[0] + offset +
+          this->ColumnDendrogram->GetLeafSpacing() / 2.0;
+        columnTreeStartY = heatmapBounds[3] + spacing +
+          (treeBounds[3] - treeBounds[2]);
+        break;
+      case vtkDendrogramItem::LEFT_TO_RIGHT:
+      default:
+        columnTreeStartX = heatmapBounds[0] +
+          this->ColumnDendrogram->GetLeafSpacing() / 2.0;
+        columnTreeStartY = heatmapBounds[3] + spacing +
+          (treeBounds[3] - treeBounds[2]);
+        break;
+      }
+
+    this->ColumnDendrogram->SetPosition(columnTreeStartX, columnTreeStartY);
+    this->ColumnDendrogram->Paint(painter);
     }
 
-  return false;
+  return true;
 }
 
 //-----------------------------------------------------------------------------
-bool vtkTreeHeatmapItem::SetupTextProperty(vtkContext2D *painter)
+bool vtkTreeHeatmapItem::MouseDoubleClickEvent(
+  const vtkContextMouseEvent &event)
 {
-  // set up our text property to draw row names
-  painter->GetTextProp()->SetColor(0.0, 0.0, 0.0);
-  painter->GetTextProp()->SetJustificationToLeft();
-  painter->GetTextProp()->SetVerticalJustificationToCentered();
-  painter->GetTextProp()->SetOrientation(0);
-
-  // calculate an appropriate font size
-  float stringBounds[4];
-  stringBounds[3] = VTK_FLOAT_MAX;
-  std::string testString = "Igq"; //selected for range of height
-  int currentFontSize = floor(this->CellHeight);
-  if (currentFontSize > 500)
-    {
-    currentFontSize = 500;
-    }
-  painter->GetTextProp()->SetFontSize(currentFontSize);
-  painter->ComputeStringBounds(testString, stringBounds);
-  if (stringBounds[3] > this->CellWidth || stringBounds[3] > this->CellHeight)
+  bool treeChanged = this->Dendrogram->MouseDoubleClickEvent(event);
+
+  // update the heatmap if a subtree just collapsed or expanded.
+  if (treeChanged)
     {
-    while (currentFontSize > 0 &&
-            (stringBounds[3] > this->CellWidth ||
-             stringBounds[3] > this->CellHeight))
-      {
-      --currentFontSize;
-      if (currentFontSize < 8)
-      {
-        return false;
-      }
-      painter->GetTextProp()->SetFontSize(currentFontSize);
-      painter->ComputeStringBounds(testString, stringBounds);
-      }
+    this->CollapseHeatmapRows();
     }
   else
     {
-      while (stringBounds[3] < this->CellWidth &&
-             stringBounds[3] < this->CellHeight && currentFontSize < 500)
+    treeChanged = this->ColumnDendrogram->MouseDoubleClickEvent(event);
+    if (treeChanged)
       {
-      ++currentFontSize;
-      painter->GetTextProp()->SetFontSize(currentFontSize);
-      painter->ComputeStringBounds(testString, stringBounds);
+      this->CollapseHeatmapColumns();
       }
-    --currentFontSize;
-    painter->GetTextProp()->SetFontSize(currentFontSize);
     }
-  return true;
+  return treeChanged;
 }
 
 //-----------------------------------------------------------------------------
-bool vtkTreeHeatmapItem::MouseMoveEvent(const vtkContextMouseEvent &event)
+void vtkTreeHeatmapItem::CollapseHeatmapRows()
 {
-  if (event.GetButton() == vtkContextMouseEvent::NO_BUTTON)
+  vtkBitArray *collapsedRowsArray = vtkBitArray::SafeDownCast(
+    this->GetTable()->GetFieldData()->GetArray("collapsed rows"));
+
+  vtkStringArray *vertexNames = vtkStringArray::SafeDownCast(
+    this->Dendrogram->GetPrunedTree()->GetVertexData()
+    ->GetAbstractArray("node name"));
+
+  vtkStringArray *rowNames = vtkStringArray::SafeDownCast(
+    this->GetTable()->GetColumn(0));
+
+  for (vtkIdType row = 0; row < this->GetTable()->GetNumberOfRows(); ++row)
     {
-    float pos[3];
-    vtkNew<vtkMatrix3x3> inverse;
-    pos[0] = event.GetPos().GetX();
-    pos[1] = event.GetPos().GetY();
-    pos[2] = 0;
-    this->GetScene()->GetTransform()->GetInverse(inverse.GetPointer());
-    inverse->MultiplyPoint(pos, pos);
-    if (pos[0] <= this->HeatmapMaxX && pos[0] >= this->HeatmapMinX &&
-        pos[1] <= this->HeatmapMaxY && pos[1] >= this->HeatmapMinY)
+    std::string name = rowNames->GetValue(row);
+    // if we can't find this name in the layout tree, then the corresponding
+    // row in the heatmap should be marked as collapsed.
+    if (vertexNames->LookupValue(name) == -1)
       {
-      this->Tooltip->SetPosition(pos[0], pos[1]);
-
-      std::string tooltipText = this->GetTooltipText(pos[0], pos[1]);
-      if (tooltipText.compare("") != 0)
-        {
-        this->Tooltip->SetText(tooltipText);
-        this->Tooltip->SetVisible(true);
-        this->Scene->SetDirty(true);
-        }
-      return true;
+      collapsedRowsArray->SetValue(row, 1);
       }
     else
       {
-      bool shouldRepaint = this->Tooltip->GetVisible();
-      this->Tooltip->SetVisible(false);
-      if (shouldRepaint)
-        {
-        this->Scene->SetDirty(true);
-        }
+      collapsedRowsArray->SetValue(row, 0);
       }
     }
-  return false;
 }
 
 //-----------------------------------------------------------------------------
-std::string vtkTreeHeatmapItem::GetTooltipText(float x, float y)
+void vtkTreeHeatmapItem::CollapseHeatmapColumns()
 {
-  vtkIdType column = floor((x - this->HeatmapMinX) / this->CellWidth);
-  int sceneRow = floor(y / this->CellHeight + 0.5);
+  vtkBitArray *collapsedColumnsArray = vtkBitArray::SafeDownCast(
+    this->GetTable()->GetFieldData()->GetArray("collapsed columns"));
 
-  if (this->Tree->GetNumberOfVertices() > 0)
-    {
-    int dataRow = this->RowMap[sceneRow];
-    if (dataRow != -1)
-      {
-      return this->Table->GetValue(dataRow, column + 1).ToString();
-      }
-    return "";
-    }
-  return this->Table->GetValue(sceneRow, column + 1).ToString();
-}
+  vtkStringArray *vertexNames = vtkStringArray::SafeDownCast(
+    this->ColumnDendrogram->GetPrunedTree()->GetVertexData()
+    ->GetAbstractArray("node name"));
 
-//-----------------------------------------------------------------------------
-bool vtkTreeHeatmapItem::MouseDoubleClickEvent(
-  const vtkContextMouseEvent &event)
-{
-  // get the position of the double click and convert it to scene coordinates
-  double pos[3];
-  vtkNew<vtkMatrix3x3> inverse;
-  pos[0] = event.GetPos().GetX();
-  pos[1] = event.GetPos().GetY();
-  pos[2] = 0;
-  this->GetScene()->GetTransform()->GetInverse(inverse.GetPointer());
-  inverse->MultiplyPoint(pos, pos);
-
-  // this event is only captured within the tree (not the heatmap)
-  if (pos[0] <= this->TreeMaxX && pos[0] >= this->TreeMinX)
+  for (vtkIdType col = 1; col < this->GetTable()->GetNumberOfColumns(); ++col)
     {
-    vtkIdType collapsedSubTree =
-      this->GetClickedCollapsedSubTree(pos[0], pos[1]);
-    if (collapsedSubTree != -1)
+    std::string name = this->GetTable()->GetColumn(col)->GetName();
+
+    // if we can't find this name in the layout tree, then the corresponding
+    // column in the heatmap should be marked as collapsed.
+    if (vertexNames->LookupValue(name) == -1)
       {
-      // re-expand the subtree rooted at this vertex
-      this->ExpandSubTree(collapsedSubTree);
+      collapsedColumnsArray->SetValue(col, 1);
       }
     else
       {
-      // collapse the subtree rooted at this vertex
-      vtkIdType closestVertex =
-        this->GetClosestVertex(pos[0] / this->MultiplierX,
-                               pos[1] / this->MultiplierY);
-      this->CollapseSubTree(closestVertex);
+      collapsedColumnsArray->SetValue(col, 0);
       }
-
-    this->Scene->SetDirty(true);
-    return true;
     }
-
-  return false;
 }
 
 //-----------------------------------------------------------------------------
-vtkIdType vtkTreeHeatmapItem::GetClickedCollapsedSubTree(double x, double y)
+void vtkTreeHeatmapItem::SetOrientation(int orientation)
 {
-  // iterate over all the collapsed subtrees to see if this click refers
-  // to one of them.
-  vtkUnsignedIntArray *vertexIsPruned = vtkUnsignedIntArray::SafeDownCast(
-    this->Tree->GetVertexData()->GetArray("VertexIsPruned"));
-  vtkIdTypeArray *originalIdArray = vtkIdTypeArray::SafeDownCast(
-    this->PrunedTree->GetVertexData()->GetArray("OriginalId"));
-  for (vtkIdType originalId = 0;
-       originalId < vertexIsPruned->GetNumberOfTuples(); ++originalId)
-    {
-    if (vertexIsPruned->GetValue(originalId) > 0)
-      {
-      // Find PrunedTree's vertex that corresponds to this originalId.
-      for (vtkIdType prunedId = 0;
-           prunedId < originalIdArray->GetNumberOfTuples(); ++prunedId)
-        {
-        if (originalIdArray->GetValue(prunedId) == originalId)
-          {
-          // determined where this collapsed subtree is rooted.
-          double point[3];
-          this->LayoutTree->GetPoint(prunedId, point);
-
-          // we also need the location of this node's parent
-          double parentPoint[3];
-          this->LayoutTree->GetPoint(
-            this->LayoutTree->GetParent(prunedId), parentPoint);
-
-          // proper height (Y) range: within +/- CellHeight of the vertex's
-          // Y value.
-          float yMin = point[1] * this->MultiplierY - this->CellHeight / 2;
-          float yMax = point[1] * this->MultiplierY + this->CellHeight / 2;
-          if (y >= yMin && y <= yMax)
-            {
-            //proper width (X) range: >= parent's X value.
-            if (x >= parentPoint[0])
-              {
-              return prunedId;
-              }
-            }
-
-          break;
-          }
-        }
-      }
-    }
-  return -1;
-}
+  int previousOrientation = this->Orientation;
+  this->Orientation = orientation;
+  this->Dendrogram->SetOrientation(this->Orientation);
+  this->Heatmap->SetOrientation(this->Orientation);
 
-//-----------------------------------------------------------------------------
-vtkIdType vtkTreeHeatmapItem::GetClosestVertex(double x, double y)
-{
-  double minDistance = VTK_DOUBLE_MAX;
-  vtkIdType closestVertex = -1;
-  for (vtkIdType vertex = 0; vertex < this->LayoutTree->GetNumberOfVertices();
-       ++vertex)
+  if (this->Orientation == vtkDendrogramItem::LEFT_TO_RIGHT ||
+      this->Orientation == vtkDendrogramItem::RIGHT_TO_LEFT)
     {
-    double point[3];
-    this->LayoutTree->GetPoint(vertex, point);
-    double distance = sqrt( (x - point[0]) * (x - point[0]) +
-                            (y - point[1]) * (y - point[1]) );
-
-    if (distance < minDistance)
-      {
-      minDistance = distance;
-      closestVertex = vertex;
-      }
+    this->ColumnDendrogram->SetOrientation(vtkDendrogramItem::UP_TO_DOWN);
     }
-  return closestVertex;
-}
-
-//-----------------------------------------------------------------------------
-void vtkTreeHeatmapItem::CollapseSubTree(vtkIdType vertex)
-{
-  // no removing the root of the tree
-  if (vertex == this->PrunedTree->GetRoot())
+  else
     {
-    return;
+    this->ColumnDendrogram->SetOrientation(vtkDendrogramItem::RIGHT_TO_LEFT);
     }
 
-  // look up the original ID of the vertex that's being collapsed.
-  vtkIdTypeArray *originalIdArray = vtkIdTypeArray::SafeDownCast(
-    this->PrunedTree->GetVertexData()->GetArray("OriginalId"));
-  vtkIdType originalId = originalIdArray->GetValue(vertex);
-
-  // use this value as the index to the original (un-reindexed) tree's
-  // "VertexIsPruned" array.  Mark that vertex as pruned by recording
-  // how many collapsed leaf nodes exist beneath it.
-  int numLeavesCollapsed = this->CountLeafNodes(originalId);
-  // no collapsing of leaf nodes
-  if (numLeavesCollapsed == 0)
+  // reverse our table if we're changing from a "not backwards" orientation
+  // to one that it backwards.
+  if ( (this->Orientation == vtkDendrogramItem::UP_TO_DOWN ||
+        this->Orientation == vtkDendrogramItem::DOWN_TO_UP) &&
+       (previousOrientation != vtkDendrogramItem::UP_TO_DOWN &&
+        previousOrientation != vtkDendrogramItem::DOWN_TO_UP) )
     {
-    return;
+    this->ReverseTableColumns();
     }
-  vtkUnsignedIntArray *vertexIsPruned = vtkUnsignedIntArray::SafeDownCast(
-    this->Tree->GetVertexData()->GetArray("VertexIsPruned"));
-  vertexIsPruned->SetValue(originalId, numLeavesCollapsed);
-
-  vtkNew<vtkTree> prunedTreeCopy;
-  prunedTreeCopy->ShallowCopy(this->PrunedTree);
-
-  this->PruneFilter->SetInputData(prunedTreeCopy.GetPointer());
-  this->PruneFilter->SetParentVertex(vertex);
-  this->PruneFilter->Update();
-  this->PrunedTree = this->PruneFilter->GetOutput();
-  this->JustCollapsedOrExpanded = true;
-}
-
-//-----------------------------------------------------------------------------
-void vtkTreeHeatmapItem::ExpandSubTree(vtkIdType vertex)
-{
-  // mark this vertex as "not pruned"
-  vtkUnsignedIntArray *vertexIsPruned = vtkUnsignedIntArray::SafeDownCast(
-    this->Tree->GetVertexData()->GetArray("VertexIsPruned"));
-  vtkIdType vertexOriginalId = this->GetOriginalId(vertex);
-  vertexIsPruned->SetValue(vertexOriginalId, 0);
-
-  // momentarily revert PrunedTree to the full (unpruned) Tree.
-  this->PrunedTree->DeepCopy(this->Tree);
-
-  // re-prune as necessary.  this->Tree has the list of originalIds that
-  // need to be re-pruned.
-  for (vtkIdType originalId = 0;
-       originalId < vertexIsPruned->GetNumberOfTuples(); ++originalId)
+  if ( (this->Orientation == vtkDendrogramItem::RIGHT_TO_LEFT ||
+        this->Orientation == vtkDendrogramItem::DOWN_TO_UP) &&
+       (previousOrientation != vtkDendrogramItem::RIGHT_TO_LEFT &&
+        previousOrientation != vtkDendrogramItem::DOWN_TO_UP) )
     {
-    if (vertexIsPruned->GetValue(originalId) > 0)
-      {
-      // Find PrunedTree's vertex that corresponds to this originalId.
-      // Use this to re-collapse the subtrees that were not just expanded.
-      vtkIdTypeArray *originalIdArray = vtkIdTypeArray::SafeDownCast(
-        this->PrunedTree->GetVertexData()->GetArray("OriginalId"));
-      for (vtkIdType prunedId = 0;
-           prunedId < originalIdArray->GetNumberOfTuples(); ++prunedId)
-        {
-        if (originalIdArray->GetValue(prunedId) == originalId)
-          {
-          this->CollapseSubTree(prunedId);
-          break;
-          }
-        }
-      }
+    this->ReverseTableRows();
     }
-  this->JustCollapsedOrExpanded = true;
 }
 
 //-----------------------------------------------------------------------------
-vtkIdType vtkTreeHeatmapItem::GetOriginalId(vtkIdType vertex)
+int vtkTreeHeatmapItem::GetOrientation()
 {
-  vtkIdTypeArray *originalIdArray = vtkIdTypeArray::SafeDownCast(
-    this->PrunedTree->GetVertexData()->GetArray("OriginalId"));
-  return originalIdArray->GetValue(vertex);
+  return this->Orientation;
 }
 
 //-----------------------------------------------------------------------------
-vtkIdType vtkTreeHeatmapItem::GetPrunedIdForOriginalId(vtkIdType originalId)
+void vtkTreeHeatmapItem::GetBounds(double bounds[4])
 {
-  vtkIdTypeArray *originalIdArray = vtkIdTypeArray::SafeDownCast(
-    this->PrunedTree->GetVertexData()->GetArray("OriginalId"));
-  for (vtkIdType i = 0; i < originalIdArray->GetNumberOfTuples(); ++i)
+  double treeBounds[4] =
+    {VTK_DOUBLE_MAX, VTK_DOUBLE_MIN, VTK_DOUBLE_MAX, VTK_DOUBLE_MIN};
+  if (this->GetTree()->GetNumberOfVertices() > 0)
     {
-    if (originalIdArray->GetValue(i) == originalId)
-      {
-      return i;
-      }
+    this->Dendrogram->GetBounds(treeBounds);
     }
-  return -1;
-}
 
-// this struct & class allow us to generate a priority queue of vertices.
-struct WeightedVertex
-{
-  vtkIdType ID;
-  double weight;
-};
-class CompareWeightedVertices
-{
-  public:
-  // Returns true if v2 is higher priority than v1
-  bool operator()(WeightedVertex& v1, WeightedVertex& v2)
-  {
-  if (v1.weight < v2.weight)
+  double tableBounds[4] =
+    {VTK_DOUBLE_MAX, VTK_DOUBLE_MIN, VTK_DOUBLE_MAX, VTK_DOUBLE_MIN};
+  if (this->GetTable()->GetNumberOfRows() > 0)
     {
-    return false;
+    this->Heatmap->GetBounds(tableBounds);
     }
-   return true;
-  }
-};
 
-//-----------------------------------------------------------------------------
-void vtkTreeHeatmapItem::CollapseToNumberOfLeafNodes(unsigned int n)
-{
-  // check that the number requested is actually smaller than the number of
-  // leaf nodes in the tree.
-  unsigned int numLeaves = this->CountLeafNodes(this->Tree->GetRoot());
-  if (n >= numLeaves)
+  double columnTreeBounds[4] =
+    {VTK_DOUBLE_MAX, VTK_DOUBLE_MIN, VTK_DOUBLE_MAX, VTK_DOUBLE_MIN};
+  if (this->ColumnDendrogram->GetTree() != NULL)
     {
-    vtkWarningMacro( << "n >= total leaf nodes" );
-    return;
+    this->ColumnDendrogram->GetBounds(columnTreeBounds);
     }
 
-  // reset pruned tree to contain the entire input tree
-  this->PrunedTree->DeepCopy(this->Tree);
-
-  // Initialize a priority queue of vertices based on their weight.
-  // Vertices with lower weight (closer to the root) have a higher priority.
-  std::priority_queue<WeightedVertex, std::vector<WeightedVertex>,
-                      CompareWeightedVertices> queue;
-  std::vector<vtkIdType> verticesToCollapse;
-  vtkDoubleArray *nodeWeights = vtkDoubleArray::SafeDownCast(
-    this->Tree->GetVertexData()->GetAbstractArray("true node weight"));
-  if (nodeWeights == NULL)
-    {
-    nodeWeights = vtkDoubleArray::SafeDownCast(
-      this->Tree->GetVertexData()->GetAbstractArray("node weight"));
-    }
+  double xMin, xMax, yMin, yMax;
 
-  // initially, the priority queue contains the children of the root node.
-  vtkIdType root = this->Tree->GetRoot();
-  for (vtkIdType child = 0; child < this->Tree->GetNumberOfChildren(root);
-       ++child)
-    {
-    vtkIdType childVertex = this->Tree->GetChild(root, child);
-    WeightedVertex v = {childVertex, nodeWeights->GetValue(childVertex)};
-    queue.push(v);
-    }
+  xMin = std::min(std::min(treeBounds[0], tableBounds[0]), columnTreeBounds[0]);
+  xMax = std::max(std::max(treeBounds[1], tableBounds[1]), columnTreeBounds[1]);
+  yMin = std::min(std::min(treeBounds[2], tableBounds[2]), columnTreeBounds[2]);
+  yMax = std::max(std::max(treeBounds[3], tableBounds[3]), columnTreeBounds[3]);
 
-  // use the priority queue to find the vertices that we should collapse.
-  unsigned int numberOfLeafNodesFound = 0;
-  while (queue.size() + numberOfLeafNodesFound < n)
-    {
-    WeightedVertex v = queue.top();
-    queue.pop();
-    if (this->Tree->GetNumberOfChildren(v.ID) == 0)
-      {
-      verticesToCollapse.push_back(v.ID);
-      ++numberOfLeafNodesFound;
-      continue;
-      }
+  bounds[0] = xMin;
+  bounds[1] = xMax;
+  bounds[2] = yMin;
+  bounds[3] = yMax;
+}
 
-    for (vtkIdType child = 0; child < this->Tree->GetNumberOfChildren(v.ID);
-         ++child)
-      {
-      vtkIdType childVertex = this->Tree->GetChild(v.ID, child);
-      WeightedVertex v2 = {childVertex, nodeWeights->GetValue(childVertex)};
-      queue.push(v2);
-      }
-    }
+//-----------------------------------------------------------------------------
+void vtkTreeHeatmapItem::GetCenter(double center[2])
+{
+  double bounds[4];
+  this->GetBounds(bounds);
 
-  // collapse the vertices that we found.
-  for (unsigned int i = 0; i < verticesToCollapse.size(); ++i)
-    {
-    vtkIdType prunedId = this->GetPrunedIdForOriginalId(verticesToCollapse[i]);
-    if (prunedId == -1)
-      {
-      vtkErrorMacro("prunedId is -1");
-      continue;
-      }
-    this->CollapseSubTree(prunedId);
-    }
-  while (!queue.empty())
-    {
-    WeightedVertex v = queue.top();
-    queue.pop();
-    vtkIdType prunedId = this->GetPrunedIdForOriginalId(v.ID);
-    if (prunedId == -1)
-      {
-      vtkErrorMacro("prunedId is -1");
-      continue;
-      }
-    this->CollapseSubTree(prunedId);
-    }
+  center[0] = bounds[0] + (bounds[1] - bounds[0]) / 2.0;
+  center[1] = bounds[2] + (bounds[3] - bounds[2]) / 2.0;
 }
 
 //-----------------------------------------------------------------------------
-void vtkTreeHeatmapItem::SetTreeColorArray(const char *arrayName)
+void vtkTreeHeatmapItem::GetSize(double size[2])
 {
-  this->TreeColorArray = vtkDoubleArray::SafeDownCast(
-    this->Tree->GetVertexData()->GetArray(arrayName));
-  if (!this->TreeColorArray)
-    {
-    vtkErrorMacro("Could not downcast " << arrayName << " to a vtkDoubleArray");
-    this->ColorTree = false;
-    return;
-    }
+  double bounds[4];
+  this->GetBounds(bounds);
 
-  this->ColorTree = true;
-
-  double minDifference = VTK_DOUBLE_MAX;
-  double maxDifference = VTK_DOUBLE_MIN;
+  size[0] = abs(bounds[1] - bounds[0]);
+  size[1] = abs(bounds[3] - bounds[2]);
+}
 
-  for (vtkIdType id = 0; id < this->TreeColorArray->GetNumberOfTuples(); ++id)
-    {
-    double d = this->TreeColorArray->GetValue(id);
-    if (d > maxDifference)
-      {
-      maxDifference = d;
-      }
-    if (d < minDifference)
-      {
-      minDifference = d;
-      }
-    }
+//-----------------------------------------------------------------------------
+void vtkTreeHeatmapItem::SetTreeColorArray(const char *arrayName)
+{
+  this->Dendrogram->SetColorArray(arrayName);
+}
 
-  // special case when there is no difference.  Without this, all the
-  // edges would be drawn in either red or blue.
-  if (minDifference == maxDifference)
-    {
-    this->TreeLookupTable->SetNumberOfTableValues(1);
-    this->TreeLookupTable->SetTableValue(10, 0.60, 0.60, 0.60);
-    return;
-    }
+//-----------------------------------------------------------------------------
+void vtkTreeHeatmapItem::CollapseToNumberOfLeafNodes(unsigned int n)
+{
+  this->Dendrogram->CollapseToNumberOfLeafNodes(n);
+  this->CollapseHeatmapRows();
+}
 
-  // how much we vary the colors from step to step
-  double inc = 0.06;
+//-----------------------------------------------------------------------------
+float vtkTreeHeatmapItem::GetTreeLineWidth()
+{
+  return this->Dendrogram->GetLineWidth();
+}
 
-  // setup the color lookup table.  It will contain 10 shades of red,
-  // 10 shades of blue, and a grey neutral value.
+//-----------------------------------------------------------------------------
+void vtkTreeHeatmapItem::SetTreeLineWidth(float width)
+{
+  this->Dendrogram->SetLineWidth(width);
+  this->ColumnDendrogram->SetLineWidth(width);
+}
 
-  this->TreeLookupTable->SetNumberOfTableValues(21);
-  if (abs(maxDifference) > abs(minDifference))
-    {
-    this->TreeLookupTable->SetRange(-maxDifference, maxDifference);
-    }
-  else
-    {
-    this->TreeLookupTable->SetRange(minDifference, -minDifference);
-    }
-  for (vtkIdType i = 0; i < 10; ++i)
-    {
-    this->TreeLookupTable->SetTableValue(i,
-      1.0, 0.25 + inc * i, 0.25 + inc * i);
-    }
-  this->TreeLookupTable->SetTableValue(10, 0.60, 0.60, 0.60);
-  for (vtkIdType i = 11; i < 21; ++i)
-    {
-    this->TreeLookupTable->SetTableValue(i,
-      0.85 - inc * (i - 10), 0.85 - inc * (i - 10), 1.0);
-    }
+//-----------------------------------------------------------------------------
+vtkTree * vtkTreeHeatmapItem::GetPrunedTree()
+{
+  return this->Dendrogram->GetPrunedTree();
 }
 
 //-----------------------------------------------------------------------------
@@ -1471,14 +615,6 @@ bool vtkTreeHeatmapItem::Hit(const vtkContextMouseEvent &vtkNotUsed(mouse))
 void vtkTreeHeatmapItem::PrintSelf(ostream &os, vtkIndent indent)
 {
   this->Superclass::PrintSelf(os, indent);
-  os << "Tree: " << (this->Tree ? "" : "(null)") << std::endl;
-  if (this->Tree->GetNumberOfVertices() > 0)
-    {
-    this->Tree->PrintSelf(os, indent.GetNextIndent());
-    }
-  os << "Table: " << (this->Table ? "" : "(null)") << std::endl;
-  if (this->Table->GetNumberOfRows() > 0)
-    {
-    this->Table->PrintSelf(os, indent.GetNextIndent());
-    }
+  this->Dendrogram->PrintSelf(os, indent);
+  this->Heatmap->PrintSelf(os, indent);
 }
diff --git a/Views/Infovis/vtkTreeHeatmapItem.h b/Views/Infovis/vtkTreeHeatmapItem.h
index a740704..dee6f24 100644
--- a/Views/Infovis/vtkTreeHeatmapItem.h
+++ b/Views/Infovis/vtkTreeHeatmapItem.h
@@ -1,7 +1,7 @@
 /*=========================================================================
 
   Program:   Visualization Toolkit
-  Module:    TestDiagram.cxx
+  Module:    vtkTreeHeatmapItem.h
 
   Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
   All rights reserved.
@@ -28,7 +28,7 @@
 // required arrays in its output tree.
 //
 // .SEE ALSO
-// vtkTree vtkTable vtkNewickTreeReader
+// vtkDendrogramItem vtkHeatmapItem vtkTree vtkTable vtkNewickTreeReader
 
 #ifndef __vtkTreeHeatmapItem_h
 #define __vtkTreeHeatmapItem_h
@@ -41,13 +41,10 @@
 #include <vector>   // For lookup tables
 #include <map>      // For string lookup tables
 
-class vtkDoubleArray;
-class vtkGraphLayout;
-class vtkLookupTable;
+class vtkDendrogramItem;
+class vtkHeatmapItem;
 class vtkTable;
-class vtkTooltipItem;
 class vtkTree;
-class vtkPruneTreeFilter;
 
 class VTKVIEWSINFOVIS_EXPORT vtkTreeHeatmapItem : public vtkContextItem
 {
@@ -69,6 +66,17 @@ public:
   vtkTree * GetTree();
 
   // Description:
+  // Set a tree to be drawn for the columns of the heatmap.  This tree's
+  // vertex data must contain a vtkStringArray called "node name" that
+  // corresponds to the names of the columns in the heatmap.
+  virtual void SetColumnTree(vtkTree *tree);
+
+  // Description:
+  // Get the tree that represents the columns of the heatmap (if one has
+  // been set).
+  vtkTree * GetColumnTree();
+
+  // Description:
   // Set the table that this item draws.  The first column of the table
   // must contain the names of the rows.  These names, in turn, must correspond
   // with the nodes names in the input tree.  See SetTree for more information.
@@ -79,181 +87,116 @@ public:
   vtkTable * GetTable();
 
   // Description:
-  // Collapse subtrees until there are only n leaf nodes left in the tree.
-  // The leaf nodes that remain are those that are closest to the root.
-  // Any subtrees that were collapsed prior to this function being called
-  // may be re-expanded.
-  void CollapseToNumberOfLeafNodes(unsigned int n);
-
-  // Get the collapsed tree
-  vtkTree * GetPrunedTree();
+  // Get/Set the dendrogram contained by this item.
+  vtkSmartPointer<vtkDendrogramItem> GetDendrogram();
+  void SetDendrogram(vtkSmartPointer<vtkDendrogramItem>);
 
   // Description:
-  // Indicate which array within the Tree's VertexData should be used to
-  // color the tree.  The specified array must be a vtkDoubleArray.
-  // By default, the tree will be drawn in black.
-  void SetTreeColorArray(const char *arrayName);
-
-  //BTX
-  // Description:
-  // Returns true if the transform is interactive, false otherwise.
-  virtual bool Hit(const vtkContextMouseEvent &mouse);
-
-  // Description:
-  // Display a tooltip when the user mouses over a cell in the heatmap.
-  virtual bool MouseMoveEvent(const vtkContextMouseEvent &event);
-
-  // Description:
-  // Collapse or expand a subtree when the user double clicks on an
-  // internal node.
-  virtual bool MouseDoubleClickEvent( const vtkContextMouseEvent &event);
-  //ETX
-
-protected:
-  vtkTreeHeatmapItem();
-  ~vtkTreeHeatmapItem();
+  // Get/Set the heatmap contained by this item.
+  vtkSmartPointer<vtkHeatmapItem> GetHeatmap();
+  void SetHeatmap(vtkSmartPointer<vtkHeatmapItem>);
 
   // Description:
-  // Generate some data needed for painting.  We cache this information as
-  // it only needs to be generated when the input data changes.
-  virtual void RebuildBuffers();
+  // Reorder the rows in the table so they match the order of the leaf
+  // nodes in our tree.
+  void ReorderTable();
 
   // Description:
-  // This function does the bulk of the actual work in rendering our tree &
-  // heatmap data.
-  virtual void PaintBuffers(vtkContext2D *painter);
+  // Reverse the order of the rows in our input table.  This is used
+  // to simplify the table layout for DOWN_TO_UP and RIGHT_TO_LEFT
+  // orientations.
+  void ReverseTableRows();
 
   // Description:
-  // This function returns a bool indicating whether or not we need to rebuild
-  // our cached data before painting.
-  virtual bool IsDirty();
+  // Reverse the order of the rows in our input table.  This is used
+  // to simplify the table layout for DOWN_TO_UP and UP_TO_DOWN
+  // orientations.
+  void ReverseTableColumns();
 
   // Description:
-  // Compute how to scale our data so that text labels will fit within the
-  // bounds determined by the table's cells or the spacing between the leaf
-  // nodes of the tree.
-  void ComputeMultipliers();
+  // Set which way the tree / heatmap should face within the visualization.
+  // The default is for both components to be drawn left to right.
+  void SetOrientation(int orientation);
 
   // Description:
-  // Compute the bounds of our tree in pixel coordinates.
-  void ComputeTreeBounds();
+  // Get the current orientation.
+  int GetOrientation();
 
   // Description:
-  // Generate a separate vtkLookupTable for each column in the table.
-  void InitializeLookupTables();
+  // Get the bounds of this item (xMin, xMax, yMin, Max) in pixel coordinates.
+  void GetBounds(double bounds[4]);
 
   // Description:
-  // Paints the tree & associated table as a heatmap.
-  virtual bool Paint(vtkContext2D *painter);
+  // Get the center point of this item in pixel coordinates.
+  void GetCenter(double center[2]);
 
   // Description:
-  // Helper function.  Generates a vtkLookupTable for a Table column that
-  // contains only strings.  Each string will be assigned a separate color.
-  // This is useful for visualizing categorical data.
-  void GenerateLookupTableForStringColumn(vtkIdType column);
+  // Get the size of this item in pixel coordinates.
+  void GetSize(double size[2]);
 
   // Description:
-  // Draw the heatmap when no corresponding tree is present.
-  void PaintHeatmapWithoutTree(vtkContext2D *painter);
+  // Collapse subtrees until there are only n leaf nodes left in the tree.
+  // The leaf nodes that remain are those that are closest to the root.
+  // Any subtrees that were collapsed prior to this function being called
+  // may be re-expanded.  Use this function instead of
+  // this->GetDendrogram->CollapseToNumberOfLeafNodes(), as this function
+  // also handles the hiding of heatmap rows that correspond to newly
+  // collapsed subtrees.
+  void CollapseToNumberOfLeafNodes(unsigned int n);
 
   // Description:
-  // Initialize a vtkTextProperty for drawing labels.  This involves
-  // calculating an appropriate font size so that labels will fit within
-  // the specified cell size.  Returns FALSE if the text would be too
-  // small to easily read; TRUE otherwise.
-  bool SetupTextProperty(vtkContext2D *painter);
+  // Get/Set how wide the edges of the trees should be.  Default is one pixel.
+  float GetTreeLineWidth();
+  void SetTreeLineWidth(float width);
 
   // Description:
-  // Get the value for the cell of the heatmap located at scene position (x, y)
-  // This function assumes the caller has already determined that (x, y) falls
-  // within the heatmap.
-  std::string GetTooltipText(float x, float y);
+  // Deprecated.  Use this->GetDendrogram()->GetPrunedTree() instead.
+  vtkTree * GetPrunedTree();
 
   // Description:
-  // Count the number of leaf nodes in the tree
-  void CountLeafNodes();
+  // Deprecated.  Use this->GetDendrogram()->SetColorArray(const char *arrayName)
+  // instead.
+  void SetTreeColorArray(const char *arrayName);
 
-  // Description:
-  // Count the number of leaf nodes that descend from a given vertex.
-  int CountLeafNodes(vtkIdType vertex);
+  //BTX
 
   // Description:
-  // Get the tree vertex closest to the specified coordinates.
-  vtkIdType GetClosestVertex(double x, double y);
+  // Returns true if the transform is interactive, false otherwise.
+  virtual bool Hit(const vtkContextMouseEvent &mouse);
 
   // Description:
-  // Collapse the subtree rooted at vertex.
-  void CollapseSubTree(vtkIdType vertex);
+  // Propagate any double click onto the dendrogram to check if any
+  // subtrees should be collapsed or expanded.
+  virtual bool MouseDoubleClickEvent(const vtkContextMouseEvent &event);
 
-  // Description:
-  // Expand the previously collapsed subtree rooted at vertex.
-  void ExpandSubTree(vtkIdType vertex);
+  //ETX
 
-  // Description:
-  // Look up the original ID of a vertex in the pruned tree.
-  vtkIdType GetOriginalId(vtkIdType vertex);
+protected:
+  vtkTreeHeatmapItem();
+  ~vtkTreeHeatmapItem();
 
   // Description:
-  // Look up the ID of a vertex in the pruned tree from a vertex ID
-  // of the input tree.
-  vtkIdType GetPrunedIdForOriginalId(vtkIdType originalId);
+  // Paints the tree & associated table as a heatmap.
+  virtual bool Paint(vtkContext2D *painter);
 
   // Description:
-  // Check if the click at (x, y) should be considered as a click on
-  // a collapsed subtree.  Returns the vtkIdType of the pruned subtree
-  // if so, -1 otherwise.
-  vtkIdType GetClickedCollapsedSubTree(double x, double y);
+  // Mark heatmap rows as hidden when a subtree is collapsed.
+  void CollapseHeatmapRows();
 
   // Description:
-  // Calculate the extent of the data that is visible within the window.
-  // This information is used to ensure that we only draw details that
-  // will be seen by the user.  This improves rendering speed, particularly
-  // for larger data.
-  void UpdateVisibleSceneExtent(vtkContext2D *painter);
+  // Mark heatmap columns as hidden when a subtree is collapsed.
+  void CollapseHeatmapColumns();
 
-  // Description:
-  // Returns true if any part of the line segment defined by endpoints
-  // (x0, y0), (x1, y1) falls within the extent of the currently
-  // visible scene.  Returns false otherwise.
-  bool LineIsVisible(double x0, double y0, double x1, double y1);
+  vtkSmartPointer<vtkDendrogramItem> Dendrogram;
+  vtkSmartPointer<vtkDendrogramItem> ColumnDendrogram;
+  vtkSmartPointer<vtkHeatmapItem> Heatmap;
+  int Orientation;
 
 private:
   vtkTreeHeatmapItem(const vtkTreeHeatmapItem&); // Not implemented
   void operator=(const vtkTreeHeatmapItem&); // Not implemented
 
-  vtkSmartPointer<vtkTree> Tree;
-  vtkSmartPointer<vtkTable> Table;
-  vtkSmartPointer<vtkTree> PrunedTree;
-  vtkSmartPointer<vtkTree> LayoutTree;
   unsigned long TreeHeatmapBuildTime;
-  vtkNew<vtkGraphLayout> Layout;
-  vtkNew<vtkTooltipItem> Tooltip;
-  vtkNew<vtkPruneTreeFilter> PruneFilter;
-  vtkNew<vtkLookupTable> TriangleLookupTable;
-  vtkNew<vtkLookupTable> TreeLookupTable;
-  vtkDoubleArray* TreeColorArray;
-  std::vector< vtkLookupTable * > LookupTables;
-  std::vector< vtkIdType > RowMap;
-  double MultiplierX;
-  double MultiplierY;
-  int NumberOfLeafNodes;
-  double CellWidth;
-  double CellHeight;
-
-  std::map< int, std::map< std::string, double> > StringToDoubleMaps;
-
-  double HeatmapMinX;
-  double HeatmapMinY;
-  double HeatmapMaxX;
-  double HeatmapMaxY;
-  double TreeMinX;
-  double TreeMinY;
-  double TreeMaxX;
-  double TreeMaxY;
-  double SceneBottomLeft[3];
-  double SceneTopRight[3];
-  bool JustCollapsedOrExpanded;
-  bool ColorTree;
 };
 
 #endif
diff --git a/Views/Qt/CMakeLists.txt b/Views/Qt/CMakeLists.txt
index efb5588..c313737 100644
--- a/Views/Qt/CMakeLists.txt
+++ b/Views/Qt/CMakeLists.txt
@@ -1,3 +1,5 @@
+include(vtkQt)
+
 set(LibSrcs
   vtkQtAnnotationView.cxx
   vtkQtListView.cxx
@@ -17,14 +19,24 @@ set(MocHeaders
   vtkQtView.h
   )
 
-# import Qt4 build settings
-set(QT_USE_QTNETWORK 1)
-find_package(Qt4 REQUIRED QUIET)
-include(${QT_USE_FILE})
+if(VTK_QT_VERSION VERSION_GREATER "4")
+  find_package(Qt5Widgets REQUIRED QUIET)
+  include_directories(${Qt5Widgets_INCLUDE_DIRS})
+  add_definitions(${Qt5Widgets_DEFINITIONS})
+
+  qt5_wrap_cpp(LibMocSrcs ${MocHeaders})
+
+  set(QT_LIBRARIES ${Qt5Widgets_LIBRARIES})
+else()
+  # import Qt4 build settings
+  set(QT_USE_QTNETWORK 1)
+  find_package(Qt4 REQUIRED QUIET)
+  include(${QT_USE_FILE})
 
-qt4_wrap_cpp(LibMocSrcs ${MocHeaders})
+  qt4_wrap_cpp(LibMocSrcs ${MocHeaders})
+endif()
 
 set(${vtk-module}_NO_HeaderTest 1)
 vtk_module_library(${vtk-module} ${LibSrcs} ${LibMocSrcs})
 
-target_link_libraries(${vtk-module} ${QT_LIBRARIES})
+target_link_libraries(${vtk-module} LINK_PRIVATE ${QT_LIBRARIES})
diff --git a/Views/Qt/vtkQtTableRepresentation.cxx b/Views/Qt/vtkQtTableRepresentation.cxx
index 10d59d7..fea4b61 100644
--- a/Views/Qt/vtkQtTableRepresentation.cxx
+++ b/Views/Qt/vtkQtTableRepresentation.cxx
@@ -32,7 +32,7 @@
 #include <QModelIndex>
 #include <QColor>
 
-#include <assert.h>
+#include <cassert>
 
 // ----------------------------------------------------------------------
 
diff --git a/Views/Qt/vtkQtView.cxx b/Views/Qt/vtkQtView.cxx
index d9fe0cb..122bfa3 100644
--- a/Views/Qt/vtkQtView.cxx
+++ b/Views/Qt/vtkQtView.cxx
@@ -14,6 +14,7 @@
 
 #include <QApplication>
 #include <QPixmap>
+#include <QWidget>
 #include "vtkObjectFactory.h"
 
 
@@ -50,6 +51,10 @@ void vtkQtView::ProcessQtEventsNoUserInput()
 //----------------------------------------------------------------------------
 bool vtkQtView::SaveImage(const char* filename)
 {
+#if QT_VERSION >= 0x050000
+  return this->GetWidget() != 0 ? this->GetWidget()->grab().save(filename) : false;
+#else
   // This is ok even if this->GetWidget() returns null.
   return QPixmap::grabWidget(this->GetWidget()).save(filename);
+#endif
 }
diff --git a/Wrapping/Java/Baseline/Cone.png.md5 b/Wrapping/Java/Baseline/Cone.png.md5
new file mode 100644
index 0000000..5a9e3fd
--- /dev/null
+++ b/Wrapping/Java/Baseline/Cone.png.md5
@@ -0,0 +1 @@
+6d0d58c323af11216a942607c12450c4
diff --git a/Wrapping/Java/CMakeLists.txt b/Wrapping/Java/CMakeLists.txt
index 0dec5b9..f428de4 100644
--- a/Wrapping/Java/CMakeLists.txt
+++ b/Wrapping/Java/CMakeLists.txt
@@ -3,6 +3,31 @@ find_package(JNI REQUIRED)
 
 include_directories(${JAVA_INCLUDE_PATH} ${JAVA_INCLUDE_PATH2})
 
+# Add the option to package VTK for custom Java packaging
+option(VTK_JAVA_INSTALL "Use the Java rules to build the native libraries." OFF)
+if(VTK_JAVA_INSTALL)
+  set(VTK_CUSTOM_LIBRARY_SUFFIX "" CACHE STRING "" FORCE)
+  mark_as_advanced(VTK_CUSTOM_LIBRARY_SUFFIX)
+  if(APPLE)
+     set(CMAKE_INSTALL_NAME_DIR @loader_path CACHE STRING "" FORCE)
+     mark_as_advanced(CMAKE_INSTALL_NAME_DIR)
+  endif()
+  if(NOT MAVEN_LOCAL_NATIVE_NAME)
+    set(MAVEN_LOCAL_NATIVE_NAME "${CMAKE_SYSTEM_NAME}-${CMAKE_SYSTEM_PROCESSOR}" CACHE STRING "")
+  endif()
+  if(NOT MAVEN_NATIVE_ARTIFACTS)
+    set(MAVEN_NATIVE_ARTIFACTS "${MAVEN_LOCAL_NATIVE_NAME}" CACHE STRING "")
+  endif()
+  if(NOT MAVEN_LOCAL_REPOSITORY)
+    set(MAVEN_LOCAL_REPOSITORY "\${user.home}/vtk-maven-repos" CACHE STRING "")
+  endif()
+  if(NOT MAVEN_VTK_GROUP_ID)
+    set(MAVEN_VTK_GROUP_ID "kitware.community" CACHE STRING "")
+  endif()
+  mark_as_advanced(MAVEN_LOCAL_REPOSITORY MAVEN_NATIVE_ARTIFACTS MAVEN_LOCAL_NATIVE_NAME MAVEN_VTK_GROUP_ID)
+  set(MAVEN_DEPENDENCY_XML)
+endif()
+
 # Add the option for build the SWT component to VTK.
 option(VTK_JAVA_SWT_COMPONENT "Should SWT component for Java be built (it requires Eclipse)?" OFF)
 
@@ -10,6 +35,29 @@ if(VTK_JAVA_SWT_COMPONENT)
   find_package(SWT REQUIRED)
 endif()
 
+# Add the option for build the JOGL component to VTK.
+set(JOGL_VERSION "2.0.2")
+option(VTK_JAVA_JOGL_COMPONENT "Need JOGL jar files" OFF)
+
+if(VTK_JAVA_JOGL_COMPONENT)
+  set(MAVEN_REPO "$ENV{HOME}/.m2/repository")
+  find_file(JOGL_LIB jogl-all-${JOGL_VERSION}.jar PATHS ${MAVEN_REPO}/org/jogamp/jogl/jogl-all/${JOGL_VERSION} )
+  find_file(JOGL_GLUE gluegen-rt-${JOGL_VERSION}.jar PATHS ${MAVEN_REPO}/org/jogamp/gluegen/gluegen-rt/${JOGL_VERSION})
+  mark_as_advanced(JOGL_LIB JOGL_GLUE)
+  if(NOT JOGL_LIB)
+     message(ERROR "
+     CMake can not build VTK with VTK_JAVA_JOGL_COMPONENT ON if the JOGL jar file is not provided.
+     Please set the JOGL_LIB variable.
+     ")
+  endif()
+  if(NOT JOGL_GLUE)
+     message(ERROR "
+     CMake can not build VTK with VTK_JAVA_JOGL_COMPONENT ON if the GLUE jar file is not provided.
+     Please set the JOGL_GLUE variable.
+     ")
+  endif()
+endif()
+
 set(WrappingJava_SRCS
   vtkJavaUtil.cxx
   )
@@ -18,7 +66,7 @@ vtk_module_library(vtkWrappingJava ${WrappingJava_SRCS})
 
 # javac runs out memory on Mac so increase heapsize
 if(APPLE)
-  set(JAVAC_OPTIONS -J-Xmx256m)
+  set(JAVAC_OPTIONS -J-Xmx512m)
 endif()
 
 foreach(module ${VTK_MODULES_ENABLED})
@@ -74,7 +122,7 @@ foreach(module ${_MODULES_TO_WRAP})
 
   string(REGEX REPLACE "^vtk" "" _module_no_prefix "${module}")
 
-  if(APPLE)
+  if(APPLE AND NOT VTK_JAVA_INSTALL)
     set(src ${VTK_JAR_PATH}/libvtk${_module_no_prefix}Java.dylib)
     set(tgt ${VTK_JAR_PATH}/libvtk${_module_no_prefix}Java.jnilib)
     add_custom_command(OUTPUT ${tgt}
@@ -175,6 +223,14 @@ if(vtkRenderingCore_ENABLED AND vtkIOImage_ENABLED)
       rendering/swt/vtkSwtInteractorForwarderDecorator
     )
   endif()
+  if(VTK_JAVA_JOGL_COMPONENT)
+    set(VTK_JAVA_DEPENDENCIES ${VTK_JAVA_DEPENDENCIES}
+      rendering/jogl/vtkAbstractJoglComponent
+      rendering/jogl/vtkJoglCanvasComponent
+      rendering/jogl/vtkJoglPanelComponent
+      sample/rendering/JoglConeRendering
+    )
+  endif()
   set(VTK_JAVA_SAMPLE_DEPENDENCIES ${VTK_JAVA_SAMPLE_DEPENDENCIES}
     InternalFrames
     Demo
@@ -223,7 +279,7 @@ foreach(jfile ${VTK_JAVA_DEPENDENCIES})
 endforeach()
 
 if(WIN32)
-  set(SEPARATOR "\;")
+  set(SEPARATOR "\\;")
 else()
   set(SEPARATOR ":")
 endif()
@@ -232,13 +288,29 @@ if(VTK_JAVA_SWT_COMPONENT)
   set(SWT_FILES "${VTK_BINARY_DIR}/java/vtk/rendering/swt/*.java")
 endif()
 
+if(VTK_JAVA_JOGL_COMPONENT)
+  set(JOGL_FILES "${VTK_BINARY_DIR}/java/vtk/rendering/jogl/*.java" "${VTK_BINARY_DIR}/java/vtk/sample/rendering/JoglConeRendering.java")
+endif()
+
+# Handle Java class path
+set(JAVA_COMPILATION_CLASSPATH "${VTK_JAVA_HOME}/..")
+if(ECLIPSE_SWT_LIBRARIES)
+    set(JAVA_COMPILATION_CLASSPATH "${JAVA_COMPILATION_CLASSPATH}${SEPARATOR}${ECLIPSE_SWT_LIBRARIES}")
+endif()
+if(JOGL_LIB)
+    set(JAVA_COMPILATION_CLASSPATH "${JAVA_COMPILATION_CLASSPATH}${SEPARATOR}${JOGL_LIB}")
+endif()
+if(JOGL_GLUE)
+    set(JAVA_COMPILATION_CLASSPATH "${JAVA_COMPILATION_CLASSPATH}${SEPARATOR}${JOGL_GLUE}")
+endif()
+
 # Compile the Java classes
 add_custom_command(
   OUTPUT ${VTK_BINARY_DIR}/java/javac_stamp.txt
   DEPENDS ${VTK_JAVA_SOURCE_FILES}
   COMMAND ${JAVA_COMPILE} ${JAVAC_OPTIONS}
-    -source 1.5 -classpath ${VTK_JAVA_HOME}/..${SEPARATOR}${ECLIPSE_SWT_LIBRARIES} -sourcepath ${VTK_SOURCE_DIR}/Wrapping/Java/ -d ${VTK_BINARY_DIR}/java
-    ${VTK_BINARY_DIR}/java/vtk/*.java ${VTK_BINARY_DIR}/java/vtk/rendering/*.java ${VTK_BINARY_DIR}/java/vtk/rendering/awt/*.java ${SWT_FILES}
+    -source 1.5 -classpath ${JAVA_COMPILATION_CLASSPATH} -sourcepath ${VTK_SOURCE_DIR}/Wrapping/Java/ -d ${VTK_BINARY_DIR}/java
+    ${VTK_BINARY_DIR}/java/vtk/*.java ${VTK_BINARY_DIR}/java/vtk/rendering/*.java ${VTK_BINARY_DIR}/java/vtk/rendering/awt/*.java ${VTK_BINARY_DIR}/java/vtk/sample/*.java ${SWT_FILES} ${JOGL_FILES}
   COMMAND ${CMAKE_COMMAND} -E touch ${VTK_BINARY_DIR}/java/javac_stamp.txt
   COMMENT "Compiling Java Classes"
   )
@@ -285,10 +357,11 @@ if(BUILD_TESTING)
       # If JAVA_DATAMODEL_FLAG is set, pass it as first argument. Otherwise, it's empty.
       # For example, use "-d64" to force 64-bit java jvm.
       #
-      add_test(JavaRegression ${JAVA_RUNTIME} ${VTK_TEST_JAVA_DATAMODEL_FLAG}
-         -classpath ${VTK_JAR_PATH}/vtk.jar${SEPARATOR}${VTK_BINARY_DIR}/Wrapping/Java
-         vtk.test.Regression -D ${VTK_DATA_ROOT}
-         -V Baseline/Graphics/Cone.png
+      ExternalData_add_test(VTKData
+         JavaRegression ${JAVA_RUNTIME} ${VTK_TEST_JAVA_DATAMODEL_FLAG}
+         -classpath "${VTK_JAR_PATH}/vtk.jar${SEPARATOR}${VTK_BINARY_DIR}/Wrapping/Java"
+         vtk.test.Regression -D ${VTK_TEST_DATA_DIR}
+         -V DATA{Baseline/Cone.png,:}
          -T ${VTK_TEST_OUTPUT_DIR}
          )
     endif()
@@ -325,3 +398,24 @@ install(FILES
   ${VTK_JAR_PATH}/vtk.jar
   DESTINATION ${VTK_INSTALL_LIBRARY_DIR}
   COMPONENT RuntimeLibraries)
+
+#-----------------------------------------------------------------------------
+# Post-Install script for Custom Java Packaging
+# This install rule MUST stay at the bottom of that CMakeLists file as
+# it has to be the last install rule that get executed
+if(VTK_JAVA_INSTALL)
+  set(MAVEN_NATIVE_ARTIFACT_XML)
+  foreach(native_name ${MAVEN_NATIVE_ARTIFACTS})
+     set(MAVEN_NATIVE_ARTIFACT_XML "${MAVEN_NATIVE_ARTIFACT_XML}\n                                <artifact><file>vtk-${VTK_MAJOR_VERSION}.${VTK_MINOR_VERSION}-natives-${native_name}.jar</file><classifier>natives-${native_name}</classifier><type>jar</type></artifact>")
+  endforeach()
+  if(VTK_JAVA_JOGL_COMPONENT)
+    set(MAVEN_DEPENDENCY_XML "${MAVEN_DEPENDENCY_XML}\n      <dependency><groupId>org.jogamp.jogl</groupId><artifactId>jogl-all-main</artifactId><version>${JOGL_VERSION}</version></dependency>\n      <dependency><groupId>org.jogamp.gluegen</groupId><artifactId>gluegen-rt-main</artifactId><version>${JOGL_VERSION}</version></dependency>")
+  endif()
+  configure_file(JavaInstall.cmake.in
+               ${VTK_BINARY_DIR}/JavaInstall.cmake @ONLY)
+  configure_file(Maven/pom.xml.in
+               ${VTK_BINARY_DIR}/pom.xml @ONLY)
+  configure_file(Maven/README.txt.in
+               ${VTK_BINARY_DIR}/MAVEN-README.txt @ONLY)
+  install(SCRIPT ${VTK_BINARY_DIR}/JavaInstall.cmake)
+endif(VTK_JAVA_INSTALL)
diff --git a/Wrapping/Java/JavaInstall.cmake.in b/Wrapping/Java/JavaInstall.cmake.in
new file mode 100644
index 0000000..6e2894f
--- /dev/null
+++ b/Wrapping/Java/JavaInstall.cmake.in
@@ -0,0 +1,67 @@
+# Set native Jar name based on OS and Architecture
+set(NATIVE_NAME "natives- at MAVEN_LOCAL_NATIVE_NAME@")
+
+message("Package VTK for Java - ${NATIVE_NAME}")
+
+# Create the natives directory
+
+# Only for windows but no harm to do it always
+file (COPY "${CMAKE_INSTALL_PREFIX}/bin/"
+      DESTINATION "${CMAKE_INSTALL_PREFIX}/${NATIVE_NAME}"
+      FILES_MATCHING
+      PATTERN *.dll
+      PATTERN *.jnilib
+      PATTERN *.cmake
+      PATTERN *.txt
+      PATTERN *.in
+)
+
+# For Apple and Linux
+file (COPY "${CMAKE_INSTALL_PREFIX}/@VTK_INSTALL_LIBRARY_DIR@/"
+      DESTINATION "${CMAKE_INSTALL_PREFIX}/${NATIVE_NAME}"
+      FILES_MATCHING
+      PATTERN *.so
+      PATTERN *.dylib
+      PATTERN *.jnilib
+      PATTERN *.cmake
+      PATTERN *.in
+      PATTERN *.txt
+)
+
+# Rename vtk.jar with version number
+execute_process(
+  COMMAND "${CMAKE_COMMAND}" -E rename
+     "${CMAKE_INSTALL_PREFIX}/@VTK_INSTALL_LIBRARY_DIR@/vtk.jar"
+     "${CMAKE_INSTALL_PREFIX}/vtk- at VTK_MAJOR_VERSION@. at VTK_MINOR_VERSION@.jar"
+)
+
+# Create the jar file
+execute_process(
+    COMMAND "@JAVA_ARCHIVE@" -cf "@CMAKE_INSTALL_PREFIX@/vtk- at VTK_MAJOR_VERSION@. at VTK_MINOR_VERSION@-${NATIVE_NAME}.jar" -C "@CMAKE_INSTALL_PREFIX@/${NATIVE_NAME}" .
+)
+
+# Remove un-wanted files
+execute_process(
+  COMMAND "${CMAKE_COMMAND}" -E remove_directory "${CMAKE_INSTALL_PREFIX}/bin"
+)
+execute_process(
+  COMMAND "${CMAKE_COMMAND}" -E remove_directory "${CMAKE_INSTALL_PREFIX}/lib"
+)
+execute_process(
+  COMMAND "${CMAKE_COMMAND}" -E remove_directory "${CMAKE_INSTALL_PREFIX}/share"
+)
+execute_process(
+  COMMAND "${CMAKE_COMMAND}" -E remove_directory "${CMAKE_INSTALL_PREFIX}/include"
+)
+execute_process(
+  COMMAND "${CMAKE_COMMAND}" -E remove_directory "${CMAKE_INSTALL_PREFIX}/www"
+)
+
+execute_process(
+  COMMAND "${CMAKE_COMMAND}" -E copy pom.xml "${CMAKE_INSTALL_PREFIX}/pom.xml"
+)
+execute_process(
+  COMMAND "${CMAKE_COMMAND}" -E copy MAVEN-README.txt "${CMAKE_INSTALL_PREFIX}/README.txt"
+)
+
+message("Java repackaging done")
\ No newline at end of file
diff --git a/Wrapping/Java/Maven/README.txt.in b/Wrapping/Java/Maven/README.txt.in
new file mode 100644
index 0000000..b0ba6c5
--- /dev/null
+++ b/Wrapping/Java/Maven/README.txt.in
@@ -0,0 +1,93 @@
+# ----------------------------------------------------------------------------
+#                         Packaging VTK for Maven
+# ----------------------------------------------------------------------------
+
+This directory contains the resources necessary to deploy the VTK Java
+bindings as Maven artifacts.
+
+To build a standard VTK package for Maven packaging use the following
+CMake properties:
+
+     CMAKE_BUILD_TYPE:STRING=Release
+
+     CMAKE_INSTALL_PREFIX:PATH=[destination]/mvn-staging-install
+
+     VTK_WRAP_JAVA:BOOL=ON
+
+To enable rendering through JOGL (i.e. vtkAbstractJoglComponent) enable
+the following property:
+
+     VTK_JAVA_JOGL_COMPONENT:BOOL=ON
+
+Note that this will introduce a dependency on JOGL in the generated pom.xml
+file.
+
+In order to generate a custom pom.xml the user can configure those properties
+within CMake otherwise decent automatically value will be set.
+
+     MAVEN_VTK_GROUP_ID:STRING=kitware.community
+
+     MAVEN_LOCAL_NATIVE_NAME:STRING=Win32
+
+     MAVEN_NATIVE_ARTIFACTS:STRING=Win32;Win64;Linux32;Linux64;OSX
+
+     MAVEN_LOCAL_REPOSITORY:STRING=${user.home}/vtk-maven-repos
+
+# ----------------------------------------------------------------------------
+# Local Install
+# ----------------------------------------------------------------------------
+
+In order to install VTK inside your local repository execute the following
+command:
+
+  $ mvn install
+
+The default local Maven repostiory is in `~/.m2/repository`.
+
+# ----------------------------------------------------------------------------
+# Local Deploy - SNAPSHOT
+# ----------------------------------------------------------------------------
+
+Some extra work may be needed in the sense that VTK is a native library that
+needs to be build on each targeted platform. The idea would be to merge all
+install tree from all the targeted platform and only then trigger the given
+command line.
+
+If VTK has only been build on a single computer, then the generated snapshot
+will only include the native artifacts for that platform. Running the command:
+
+
+  $ mvn deploy
+
+The deployment url is defined by the property `vtk.snapshots.repo`. By default
+this is set to `file://${user.home}/vtk-maven-repos/snapshots`. To deploy
+to an internal artifact server (e.g. Nexus, Artifactory, etc.), you can set
+this property on the command line to the deployment URL, e.g.:
+
+  $ mvn deploy -Dvtk.snapshots.repo=http://maven.yoyodyne.com/repos/snapshots
+
+
+# ----------------------------------------------------------------------------
+# Local Deploy - RELEASE
+# ----------------------------------------------------------------------------
+
+The Maven release process involves several steps and a certain
+attention to detail to get correct. You must ensure all of the
+platform native artifacts to be included in the release are available
+as expencted in the pom.xml file. Remember that only one instance of a
+specific release version of an artifact can ever exist. Therefore all the
+necessary components must be properly assembled before going through
+the release process.
+
+The typical release process involves running:
+
+  $ mvn release:prepare [-DdryRun=true]
+  ... # respond to version info queries.
+  $ mvn release:perform
+
+
+It is recommended that the following resources be referenced before
+performing the release process for the first time.
+
+* http://maven.apache.org/maven-release/maven-release-plugin/
+* https://confluence.sakaiproject.org/display/REL/Maven+release+plugin+cheat+sheet
diff --git a/Wrapping/Java/Maven/pom.xml.in b/Wrapping/Java/Maven/pom.xml.in
new file mode 100644
index 0000000..b17e336
--- /dev/null
+++ b/Wrapping/Java/Maven/pom.xml.in
@@ -0,0 +1,126 @@
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/xsd/maven-4.0.0.xsd">
+    <modelVersion>4.0.0</modelVersion>
+
+    <properties>
+        <vtk.snapshots.repo>file://@MAVEN_LOCAL_REPOSITORY@/snapshots</vtk.snapshots.repo>
+        <vtk.releases.repo>file://@MAVEN_LOCAL_REPOSITORY@/releases</vtk.releases.repo>
+    </properties>
+
+    <groupId>@MAVEN_VTK_GROUP_ID@</groupId>
+    <artifactId>vtk</artifactId>
+    <version>@VTK_MAJOR_VERSION at .@VTK_MINOR_VERSION at -SNAPSHOT</version>
+    <packaging>pom</packaging>
+
+    <name>Maven VTK Wrapper</name>
+    <description>Builds VTK and Mavenize it</description>
+
+    <organization>
+        <name>Kitware Inc.</name>
+        <url>http://www.kitware.com</url>
+    </organization>
+
+    <issueManagement>
+        <system>Mantis</system>
+        <url>http://vtk.org/Bug/</url>
+    </issueManagement>
+
+    <licenses>
+        <license>
+            <name>BSD license</name>
+            <url>http://en.wikipedia.org/wiki/BSD_licenses</url>
+            <comments>
+                VTK is an open-source toolkit licensed under the BSD license.
+
+                Copyright (c) 1993-2008 Ken Martin, Will Schroeder, Bill Lorensen
+                All rights reserved.
+
+                Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
+
+                Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
+                Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution.
+                Neither name of Ken Martin, Will Schroeder, or Bill Lorensen nor the names of any contributors may be used to endorse or promote products derived from this software without specific prior written permission.
+                THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA [...]
+            </comments>
+            <distribution>repo</distribution>
+        </license>
+    </licenses>
+
+    <dependencies>@MAVEN_DEPENDENCY_XML@
+    </dependencies>
+
+    <distributionManagement>
+        <!-- Warning: Generating release artifacts is only meant to be used after
+             a compilation of VTK across all the targeted platforms while overriding
+             the following CMake properties:
+
+              # Customize the Group ID
+                -DMAVEN_VTK_GROUP_ID:STRING=kitware.community
+
+              # This will be generated by CMake automatically but can be override
+                -DMAVEN_LOCAL_NATIVE_NAME:STRING=Win32
+
+              # List of all the MAVEN_LOCAL_NATIVE_NAME on each computer
+                -DMAVEN_NATIVE_ARTIFACTS:STRING=Win32;Win64;Linux32;Linux64;OSX
+
+              # Path where the local repo should be stored at least at the end
+              # when Maven will be called
+                -DMAVEN_LOCAL_REPOSITORY:STRING=${user.home}/vtk-maven-repos
+
+             Each platform will generate an OS specific install directory but
+             the generated pom.xml will remain the same on each computer which
+             will ease the merge of those directory for a one step maven deploy.
+        -->
+        <repository>
+            <id>vtk-releases-repo</id>
+            <name>VTK Release Artifact Repository</name>
+            <url>${vtk.releases.repo}</url>
+        </repository>
+        <snapshotRepository>
+            <id>vtk-snapshots-repo</id>
+            <name>VTK Snapshot Artifact Repository</name>
+            <url>${vtk.snapshots.repo}</url>
+        </snapshotRepository>
+    </distributionManagement>
+
+    <pluginRepositories>
+        <pluginRepository>
+            <id>sonatype-public-repository</id>
+            <url>https://oss.sonatype.org/content/groups/public</url>
+            <snapshots>
+                <enabled>true</enabled>
+            </snapshots>
+            <releases>
+                <enabled>true</enabled>
+            </releases>
+        </pluginRepository>
+    </pluginRepositories>
+
+    <build>
+        <plugins>
+            <plugin>
+                <groupId>org.codehaus.mojo</groupId>
+                <artifactId>build-helper-maven-plugin</artifactId>
+                <version>1.8</version>
+                <executions>
+                    <execution>
+                        <id>attach-vtk-artifacts</id>
+                        <phase>package</phase>
+                        <goals>
+                            <goal>attach-artifact</goal>
+                        </goals>
+                        <configuration>
+                            <artifacts>
+                                <artifact>
+                                    <file>vtk- at VTK_MAJOR_VERSION@. at VTK_MINOR_VERSION@.jar</file>
+                                    <type>jar</type>
+                                </artifact>@MAVEN_NATIVE_ARTIFACT_XML@
+                            </artifacts>
+                        </configuration>
+                    </execution>
+                </executions>
+            </plugin>
+        </plugins>
+    </build>
+
+</project>
diff --git a/Wrapping/Java/vtk/rendering/awt/vtkAwtComponent.java b/Wrapping/Java/vtk/rendering/awt/vtkAwtComponent.java
index 11cfed4..3d87522 100644
--- a/Wrapping/Java/vtk/rendering/awt/vtkAwtComponent.java
+++ b/Wrapping/Java/vtk/rendering/awt/vtkAwtComponent.java
@@ -18,6 +18,7 @@ import vtk.rendering.vtkAbstractComponent;
 public class vtkAwtComponent extends vtkAbstractComponent<Canvas> {
   protected vtkInternalAwtComponent uiComponent;
   protected boolean isWindowCreated;
+  protected Runnable onWindowCreatedCallback;
 
   public vtkAwtComponent() {
     this(new vtkRenderWindow());
@@ -56,6 +57,12 @@ public class vtkAwtComponent extends vtkAbstractComponent<Canvas> {
 
       // Trigger the real render
       renderWindow.Render();
+
+      // Execute callback if need be
+      if(this.onWindowCreatedCallback != null) {
+          this.onWindowCreatedCallback.run();
+          this.onWindowCreatedCallback = null;
+      }
     } catch (InterruptedException e) {
       // Nothing that we can do except skipping execution
     } finally {
@@ -98,6 +105,18 @@ public class vtkAwtComponent extends vtkAbstractComponent<Canvas> {
   }
 
   /**
+   * Set a callback that get's called once the window is properly created and can be
+   * customized in its settings.
+   *
+   * Once called the callback will be released.
+   *
+   * @param callback
+   */
+  public void setWindowReadyCallback(Runnable callback) {
+	  this.onWindowCreatedCallback = callback;
+  }
+
+  /**
    * Just allow class in same package to affect inRenderCall boolean
    *
    * @param value
diff --git a/Wrapping/Java/vtk/rendering/jogl/vtkAbstractJoglComponent.java b/Wrapping/Java/vtk/rendering/jogl/vtkAbstractJoglComponent.java
new file mode 100644
index 0000000..842799b
--- /dev/null
+++ b/Wrapping/Java/vtk/rendering/jogl/vtkAbstractJoglComponent.java
@@ -0,0 +1,103 @@
+package vtk.rendering.jogl;
+
+import javax.media.opengl.GLAutoDrawable;
+import javax.media.opengl.GLContext;
+import javax.media.opengl.GLEventListener;
+
+import vtk.vtkGenericOpenGLRenderWindow;
+import vtk.vtkObject;
+import vtk.vtkRenderWindow;
+import vtk.rendering.vtkAbstractComponent;
+import vtk.rendering.vtkInteractorForwarder;
+
+/**
+ * Provide JOGL based rendering component for VTK
+ *
+ * @author Sebastien Jourdain - sebastien.jourdain at kitware.com
+ */
+public class vtkAbstractJoglComponent<T extends java.awt.Component> extends vtkAbstractComponent<T> {
+
+    protected T uiComponent;
+    protected boolean isWindowCreated;
+    protected GLEventListener glEventListener;
+    protected vtkGenericOpenGLRenderWindow glRenderWindow;
+
+
+    public vtkAbstractJoglComponent(vtkRenderWindow renderWindowToUse, T glContainer) {
+        super(renderWindowToUse);
+        this.isWindowCreated = false;
+        this.uiComponent = glContainer;
+        this.glRenderWindow = (vtkGenericOpenGLRenderWindow) renderWindowToUse;
+        this.glRenderWindow.SetIsDirect(1);
+        this.glRenderWindow.SetSupportsOpenGL(1);
+        this.glRenderWindow.SetIsCurrent(true);
+
+        // Create the JOGL Event Listener
+        this.glEventListener = new GLEventListener() {
+            public void init(GLAutoDrawable drawable) {
+                vtkAbstractJoglComponent.this.isWindowCreated = true;
+
+                // Make sure the JOGL Context is current
+                GLContext ctx = drawable.getContext();
+                if (!ctx.isCurrent()) {
+                    ctx.makeCurrent();
+                }
+
+                // Init VTK OpenGL RenderWindow
+                vtkAbstractJoglComponent.this.glRenderWindow.SetMapped(1);
+                vtkAbstractJoglComponent.this.glRenderWindow.SetPosition(0, 0);
+                vtkAbstractJoglComponent.this.setSize(drawable.getWidth(), drawable.getHeight());
+                vtkAbstractJoglComponent.this.glRenderWindow.OpenGLInit();
+            }
+
+            public void reshape(GLAutoDrawable drawable, int x, int y, int width, int height) {
+                vtkAbstractJoglComponent.this.setSize(width, height);
+            }
+
+            public void display(GLAutoDrawable drawable) {
+                vtkAbstractJoglComponent.this.inRenderCall = true;
+                vtkAbstractJoglComponent.this.glRenderWindow.Render();
+                vtkAbstractJoglComponent.this.inRenderCall = false;
+            }
+
+            public void dispose(GLAutoDrawable drawable) {
+                vtkAbstractJoglComponent.this.Delete();
+                vtkObject.JAVA_OBJECT_MANAGER.gc(false);
+            }
+        };
+
+        // Bind the interactor forwarder
+        vtkInteractorForwarder forwarder = this.getInteractorForwarder();
+        this.uiComponent.addMouseListener(forwarder);
+        this.uiComponent.addMouseMotionListener(forwarder);
+        this.uiComponent.addKeyListener(forwarder);
+
+        // Make sure when VTK internaly request a Render, the Render get
+        // properly triggered
+        renderWindowToUse.AddObserver("WindowFrameEvent", this, "Render");
+        renderWindowToUse.GetInteractor().AddObserver("RenderEvent", this, "Render");
+        renderWindowToUse.GetInteractor().SetEnableRender(false);
+    }
+
+    public T getComponent() {
+        return this.uiComponent;
+    }
+
+    /**
+     * Render the internal component
+     */
+    public void Render() {
+        // Make sure we can render
+        if (!inRenderCall) {
+            this.uiComponent.repaint();
+        }
+    }
+
+    /**
+     * @return true if the graphical component has been properly set and
+     * operation can be performed on it.
+     */
+    public boolean isWindowSet() {
+        return this.isWindowCreated;
+    }
+}
diff --git a/Wrapping/Java/vtk/rendering/jogl/vtkJoglCanvasComponent.java b/Wrapping/Java/vtk/rendering/jogl/vtkJoglCanvasComponent.java
new file mode 100644
index 0000000..ace1c40
--- /dev/null
+++ b/Wrapping/Java/vtk/rendering/jogl/vtkJoglCanvasComponent.java
@@ -0,0 +1,24 @@
+package vtk.rendering.jogl;
+
+import javax.media.opengl.GLCapabilities;
+import javax.media.opengl.GLProfile;
+import javax.media.opengl.awt.GLCanvas;
+
+import vtk.vtkGenericOpenGLRenderWindow;
+import vtk.vtkRenderWindow;
+
+public class vtkJoglCanvasComponent extends vtkAbstractJoglComponent<GLCanvas> {
+
+	public vtkJoglCanvasComponent() {
+		this(new vtkGenericOpenGLRenderWindow());
+	}
+
+	public vtkJoglCanvasComponent(vtkRenderWindow renderWindow) {
+		this(renderWindow, new GLCapabilities(GLProfile.getDefault()));
+	}
+
+	public vtkJoglCanvasComponent(vtkRenderWindow renderWindow, GLCapabilities capabilities) {
+		super(renderWindow, new GLCanvas(capabilities));
+		this.getComponent().addGLEventListener(this.glEventListener);
+	}
+}
diff --git a/Wrapping/Java/vtk/rendering/jogl/vtkJoglPanelComponent.java b/Wrapping/Java/vtk/rendering/jogl/vtkJoglPanelComponent.java
new file mode 100644
index 0000000..609f78d
--- /dev/null
+++ b/Wrapping/Java/vtk/rendering/jogl/vtkJoglPanelComponent.java
@@ -0,0 +1,24 @@
+package vtk.rendering.jogl;
+
+import javax.media.opengl.GLCapabilities;
+import javax.media.opengl.GLProfile;
+import javax.media.opengl.awt.GLJPanel;
+
+import vtk.vtkGenericOpenGLRenderWindow;
+import vtk.vtkRenderWindow;
+
+public class vtkJoglPanelComponent extends vtkAbstractJoglComponent<GLJPanel> {
+
+	public vtkJoglPanelComponent() {
+		this(new vtkGenericOpenGLRenderWindow());
+	}
+
+	public vtkJoglPanelComponent(vtkRenderWindow renderWindow) {
+		this(renderWindow, new GLCapabilities(GLProfile.getDefault()));
+	}
+
+	public vtkJoglPanelComponent(vtkRenderWindow renderWindow, GLCapabilities capabilities) {
+		super(renderWindow, new GLJPanel(capabilities));
+		this.getComponent().addGLEventListener(this.glEventListener);
+	}
+}
diff --git a/Wrapping/Java/vtk/rendering/swt/vtkInternalSwtComponent.java b/Wrapping/Java/vtk/rendering/swt/vtkInternalSwtComponent.java
index e04c087..f0f910e 100644
--- a/Wrapping/Java/vtk/rendering/swt/vtkInternalSwtComponent.java
+++ b/Wrapping/Java/vtk/rendering/swt/vtkInternalSwtComponent.java
@@ -9,6 +9,12 @@ import org.eclipse.swt.widgets.Listener;
 
 import vtk.vtkObject;
 
+/**
+ * @author    Joachim Pouderoux - joachim.pouderoux at kitware.com, Kitware SAS 2012
+ * @copyright This work was supported by CEA/CESTA
+ *            Commissariat a l'Energie Atomique et aux Energies Alternatives,
+ *            15 avenue des Sablieres, CS 60001, 33116 Le Barp, France.
+ */
 public class vtkInternalSwtComponent extends GLCanvas implements Listener {
 
   private vtkSwtComponent parent;
diff --git a/Wrapping/Java/vtk/rendering/swt/vtkSwtComponent.java b/Wrapping/Java/vtk/rendering/swt/vtkSwtComponent.java
index a84df99..4ed8d31 100644
--- a/Wrapping/Java/vtk/rendering/swt/vtkSwtComponent.java
+++ b/Wrapping/Java/vtk/rendering/swt/vtkSwtComponent.java
@@ -9,7 +9,10 @@ import vtk.rendering.vtkAbstractComponent;
 /**
  * Provide SWT based vtk rendering component
  *
- * @author Joachim Pouderoux - joachim.pouderoux at kitware.com
+ * @author    Joachim Pouderoux - joachim.pouderoux at kitware.com, Kitware SAS 2012
+ * @copyright This work was supported by CEA/CESTA
+ *            Commissariat a l'Energie Atomique et aux Energies Alternatives,
+ *            15 avenue des Sablieres, CS 60001, 33116 Le Barp, France.
  */
 public class vtkSwtComponent extends vtkAbstractComponent<GLCanvas> {
 
diff --git a/Wrapping/Java/vtk/rendering/swt/vtkSwtInteractorForwarderDecorator.java b/Wrapping/Java/vtk/rendering/swt/vtkSwtInteractorForwarderDecorator.java
index ba02def..b1d73a1 100644
--- a/Wrapping/Java/vtk/rendering/swt/vtkSwtInteractorForwarderDecorator.java
+++ b/Wrapping/Java/vtk/rendering/swt/vtkSwtInteractorForwarderDecorator.java
@@ -18,7 +18,10 @@ import org.eclipse.swt.events.MouseWheelListener;
  * Decorator class used to implement all Mouse/Key SWT listener and convert them
  * into the vtkInteractorForwarder proper AWT event.
  *
- * @author Joachim Pouderoux - joachim.pouderoux at kitware.com
+ * @author    Joachim Pouderoux - joachim.pouderoux at kitware.com, Kitware SAS 2012
+ * @copyright This work was supported by CEA/CESTA
+ *            Commissariat a l'Energie Atomique et aux Energies Alternatives,
+ *            15 avenue des Sablieres, CS 60001, 33116 Le Barp, France.
  */
 public class vtkSwtInteractorForwarderDecorator extends vtkInteractorForwarder
 implements MouseListener, MouseMoveListener, MouseTrackListener, MouseWheelListener, KeyListener {
diff --git a/Wrapping/Java/vtk/rendering/vtkAbstractComponent.java b/Wrapping/Java/vtk/rendering/vtkAbstractComponent.java
index a21b7e3..5b85608 100644
--- a/Wrapping/Java/vtk/rendering/vtkAbstractComponent.java
+++ b/Wrapping/Java/vtk/rendering/vtkAbstractComponent.java
@@ -2,10 +2,12 @@ package vtk.rendering;
 
 import java.util.concurrent.locks.ReentrantLock;
 
+import vtk.vtkAxesActor;
 import vtk.vtkCamera;
 import vtk.vtkGenericRenderWindowInteractor;
 import vtk.vtkInteractorStyle;
 import vtk.vtkInteractorStyleTrackballCamera;
+import vtk.vtkOrientationMarkerWidget;
 import vtk.vtkRenderWindow;
 import vtk.vtkRenderer;
 
@@ -17,8 +19,11 @@ import vtk.vtkRenderer;
  *            The concrete type of the graphical component that will contains
  *            the vtkRenderWindow.
  *
- * @authors Sebastien Jourdain - sebastien.jourdain at kitware.com
- *          Joachim Pouderoux - joachim.pouderoux at kitware.com
+ * @authors Sebastien Jourdain - sebastien.jourdain at kitware.com, Kitware Inc 2012
+ *          Joachim Pouderoux - joachim.pouderoux at kitware.com, Kitware SAS 2012
+ * @copyright This work was supported by CEA/CESTA
+ *            Commissariat a l'Energie Atomique et aux Energies Alternatives,
+ *            15 avenue des Sablieres, CS 60001, 33116 Le Barp, France.
  */
 public abstract class vtkAbstractComponent<T> implements vtkComponent<T> {
   protected vtkRenderWindow renderWindow;
@@ -153,4 +158,24 @@ public abstract class vtkAbstractComponent<T> implements vtkComponent<T> {
   }
 
   public abstract T getComponent();
+
+  /**
+   * Generic helper method used to attach orientation axes to a vtkComponent
+   *
+   * @param vtkComponent<?>
+   */
+  public static void attachOrientationAxes(vtkComponent<?> component) {
+      // only build this once, because it creates its own renderer.
+      // Extra renderers causes issues with resetting.
+      vtkAxesActor axes = new vtkAxesActor();
+      vtkOrientationMarkerWidget axesWidget = new vtkOrientationMarkerWidget();
+
+      axesWidget.SetOutlineColor(0.9300, 0.5700, 0.1300);
+      axesWidget.SetOrientationMarker(axes);
+      axesWidget.SetInteractor(component.getRenderWindowInteractor());
+      axesWidget.SetDefaultRenderer(component.getRenderer());
+      axesWidget.SetViewport(0.0, 0.0, .2, .2);
+      axesWidget.EnabledOn();
+      axesWidget.InteractiveOff();
+  }
 }
diff --git a/Wrapping/Java/vtk/rendering/vtkAbstractEventInterceptor.java b/Wrapping/Java/vtk/rendering/vtkAbstractEventInterceptor.java
new file mode 100644
index 0000000..0561b3b
--- /dev/null
+++ b/Wrapping/Java/vtk/rendering/vtkAbstractEventInterceptor.java
@@ -0,0 +1,68 @@
+package vtk.rendering;
+
+import java.awt.event.KeyEvent;
+import java.awt.event.KeyListener;
+import java.awt.event.MouseEvent;
+import java.awt.event.MouseListener;
+import java.awt.event.MouseMotionListener;
+
+/**
+ * This class implement vtkEventInterceptor with no event interception at all.
+ *
+ * @see {@link MouseMotionListener} {@link MouseListener} {@link KeyListener}
+ *
+ * @author    Sebastien Jourdain - sebastien.jourdain at kitware.com, Kitware Inc 2013
+ */
+
+public class vtkAbstractEventInterceptor implements vtkEventInterceptor {
+
+	@Override
+	public boolean keyPressed(KeyEvent e) {
+		return false;
+	}
+
+	@Override
+	public boolean keyReleased(KeyEvent e) {
+		return false;
+	}
+
+	@Override
+	public boolean keyTyped(KeyEvent e) {
+		return false;
+	}
+
+	@Override
+	public boolean mouseDragged(MouseEvent e) {
+		return false;
+	}
+
+	@Override
+	public boolean mouseMoved(MouseEvent e) {
+		return false;
+	}
+
+	@Override
+	public boolean mouseClicked(MouseEvent e) {
+		return false;
+	}
+
+	@Override
+	public boolean mouseEntered(MouseEvent e) {
+		return false;
+	}
+
+	@Override
+	public boolean mouseExited(MouseEvent e) {
+		return false;
+	}
+
+	@Override
+	public boolean mousePressed(MouseEvent e) {
+		return false;
+	}
+
+	@Override
+	public boolean mouseReleased(MouseEvent e) {
+		return false;
+	}
+}
diff --git a/Wrapping/Java/vtk/rendering/vtkComponent.java b/Wrapping/Java/vtk/rendering/vtkComponent.java
index e43fd3e..d9d72de 100644
--- a/Wrapping/Java/vtk/rendering/vtkComponent.java
+++ b/Wrapping/Java/vtk/rendering/vtkComponent.java
@@ -15,7 +15,10 @@ import vtk.vtkRenderer;
  *            The concrete type of the graphical component that will contains
  *            the vtkRenderWindow.
  *
- * @author Sebastien Jourdain - sebastien.jourdain at kitware.com
+ * @author    Sebastien Jourdain - sebastien.jourdain at kitware.com, Kitware Inc 2012
+ * @copyright This work was supported by CEA/CESTA
+ *            Commissariat a l'Energie Atomique et aux Energies Alternatives,
+ *            15 avenue des Sablieres, CS 60001, 33116 Le Barp, France.
  */
 
 public interface vtkComponent<T> {
diff --git a/Wrapping/Java/vtk/rendering/vtkEventInterceptor.java b/Wrapping/Java/vtk/rendering/vtkEventInterceptor.java
index 26da763..d830253 100644
--- a/Wrapping/Java/vtk/rendering/vtkEventInterceptor.java
+++ b/Wrapping/Java/vtk/rendering/vtkEventInterceptor.java
@@ -12,7 +12,10 @@ import java.awt.event.MouseMotionListener;
  *
  * @see {@link MouseMotionListener} {@link MouseListener} {@link KeyListener}
  *
- * @author Sebastien Jourdain - sebastien.jourdain at kitware.com
+ * @author    Sebastien Jourdain - sebastien.jourdain at kitware.com, Kitware Inc 2012
+ * @copyright This work was supported by CEA/CESTA
+ *            Commissariat a l'Energie Atomique et aux Energies Alternatives,
+ *            15 avenue des Sablieres, CS 60001, 33116 Le Barp, France.
  */
 public interface vtkEventInterceptor {
 
diff --git a/Wrapping/Java/vtk/rendering/vtkInteractorForwarder.java b/Wrapping/Java/vtk/rendering/vtkInteractorForwarder.java
index 4c4482a..d6782a6 100644
--- a/Wrapping/Java/vtk/rendering/vtkInteractorForwarder.java
+++ b/Wrapping/Java/vtk/rendering/vtkInteractorForwarder.java
@@ -14,8 +14,11 @@ import java.util.concurrent.TimeUnit;
  * Helper class used to implement all Mouse/Key Java listener and convert them
  * into the vtkInteractor proper event.
  *
- * @authors Sebastien Jourdain - sebastien.jourdain at kitware.com
- *          Joachim Pouderoux - joachim.pouderoux at kitware.com
+ * @authors   Sebastien Jourdain - sebastien.jourdain at kitware.com, Kitware Inc 2012
+ *            Joachim Pouderoux - joachim.pouderoux at kitware.com, Kitware SAS 2012
+ * @copyright This work was supported by CEA/CESTA
+ *            Commissariat a l'Energie Atomique et aux Energies Alternatives,
+ *            15 avenue des Sablieres, CS 60001, 33116 Le Barp, France.
  */
 public class vtkInteractorForwarder implements MouseListener, MouseMotionListener, KeyListener {
   final public static int MOUSE_BUTTON_1 = 1;
diff --git a/Wrapping/Java/vtk/sample/rendering/JoglConeRendering.java b/Wrapping/Java/vtk/sample/rendering/JoglConeRendering.java
new file mode 100644
index 0000000..9667730
--- /dev/null
+++ b/Wrapping/Java/vtk/sample/rendering/JoglConeRendering.java
@@ -0,0 +1,178 @@
+package vtk.sample.rendering;
+
+import java.awt.BorderLayout;
+import java.awt.event.KeyEvent;
+import java.awt.event.KeyListener;
+import java.awt.event.MouseEvent;
+import java.util.Arrays;
+
+import javax.swing.JFrame;
+import javax.swing.SwingUtilities;
+
+import vtk.vtkActor;
+import vtk.vtkBoxRepresentation;
+import vtk.vtkBoxWidget2;
+import vtk.vtkCell;
+import vtk.vtkCellPicker;
+import vtk.vtkConeSource;
+import vtk.vtkLookupTable;
+import vtk.vtkNativeLibrary;
+import vtk.vtkPolyDataMapper;
+import vtk.vtkScalarBarRepresentation;
+import vtk.vtkScalarBarWidget;
+import vtk.vtkTransform;
+import vtk.rendering.vtkAbstractEventInterceptor;
+import vtk.rendering.vtkEventInterceptor;
+import vtk.rendering.jogl.vtkAbstractJoglComponent;
+import vtk.rendering.jogl.vtkJoglCanvasComponent;
+import vtk.rendering.jogl.vtkJoglPanelComponent;
+
+public class JoglConeRendering {
+    // -----------------------------------------------------------------
+    // Load VTK library and print which library was not properly loaded
+
+    static {
+        if (!vtkNativeLibrary.LoadAllNativeLibraries()) {
+            for (vtkNativeLibrary lib : vtkNativeLibrary.values()) {
+                if (!lib.IsLoaded()) {
+                    System.out.println(lib.GetLibraryName() + " not loaded");
+                }
+            }
+        }
+        vtkNativeLibrary.DisableOutputWindow(null);
+    }
+
+    public static void main(String[] args) {
+        final boolean usePanel = Boolean.getBoolean("usePanel");
+
+        SwingUtilities.invokeLater(new Runnable() {
+            public void run() {
+                // build VTK Pipeline
+                vtkConeSource cone = new vtkConeSource();
+                cone.SetResolution(8);
+                cone.Update();
+
+                vtkPolyDataMapper coneMapper = new vtkPolyDataMapper();
+                coneMapper.SetInputConnection(cone.GetOutputPort());
+
+                final vtkActor coneActor = new vtkActor();
+                coneActor.SetMapper(coneMapper);
+
+                // VTK rendering part
+                final vtkAbstractJoglComponent<?> joglWidget = usePanel ? new vtkJoglPanelComponent() : new vtkJoglCanvasComponent();
+                System.out.println("We are using " + joglWidget.getComponent().getClass().getName() + " for the rendering.");
+
+                joglWidget.getRenderer().AddActor(coneActor);
+
+                // Add orientation axes
+                vtkAbstractJoglComponent.attachOrientationAxes(joglWidget);
+
+                // Add Scalar bar widget
+                vtkLookupTable lut = new vtkLookupTable();
+                lut.SetHueRange(.66, 0);
+                lut.Build();
+                vtkScalarBarWidget scalarBar = new vtkScalarBarWidget();
+                scalarBar.SetInteractor(joglWidget.getRenderWindowInteractor());
+
+                scalarBar.GetScalarBarActor().SetTitle("Example");
+                scalarBar.GetScalarBarActor().SetLookupTable(lut);
+                scalarBar.GetScalarBarActor().SetOrientationToHorizontal();
+                scalarBar.GetScalarBarActor().SetTextPositionToPrecedeScalarBar();
+                vtkScalarBarRepresentation srep = (vtkScalarBarRepresentation) scalarBar.GetRepresentation();
+                srep.SetPosition(0.5, 0.053796);
+                srep.SetPosition2(0.33, 0.106455);
+                //scalarBar.ProcessEventsOff();
+                scalarBar.EnabledOn();
+                scalarBar.RepositionableOn();
+
+                // Add interactive 3D Widget
+                final vtkBoxRepresentation representation = new vtkBoxRepresentation();
+                representation.SetPlaceFactor(1.25);
+                representation.PlaceWidget(cone.GetOutput().GetBounds());
+
+                final vtkBoxWidget2 boxWidget = new vtkBoxWidget2();
+                boxWidget.SetRepresentation(representation);
+                boxWidget.SetInteractor(joglWidget.getRenderWindowInteractor());
+                boxWidget.SetPriority(1);
+
+                final Runnable callback = new Runnable() {
+                    vtkTransform trasform = new vtkTransform();
+
+                    public void run() {
+                        vtkBoxRepresentation rep = (vtkBoxRepresentation) boxWidget.GetRepresentation();
+                        rep.GetTransform(trasform);
+                        coneActor.SetUserTransform(trasform);
+                    }
+                };
+
+                // Bind widget
+                boxWidget.AddObserver("InteractionEvent", callback, "run");
+                representation.VisibilityOn();
+                representation.HandlesOn();
+                boxWidget.SetEnabled(1);
+                boxWidget.SetMoveFacesEnabled(1);
+
+                // Add cell picker
+                final vtkCellPicker picker = new vtkCellPicker();
+                Runnable pickerCallback = new Runnable() {
+					public void run() {
+						if(picker.GetCellId() != -1) {
+							vtkCell cell = picker.GetDataSet().GetCell(picker.GetCellId());
+							System.out.println("Pick cell: " +  picker.GetCellId() + " - Bounds: " + Arrays.toString(cell.GetBounds()));
+						}
+					}
+				};
+                joglWidget.getRenderWindowInteractor().SetPicker(picker);
+                picker.AddObserver("EndPickEvent", pickerCallback, "run");
+
+                // Bind pick action to double-click
+                joglWidget.getInteractorForwarder().setEventInterceptor(new vtkAbstractEventInterceptor() {
+
+					public boolean mouseClicked(MouseEvent e) {
+						// Request picking action on double-click
+						final double[] position = {e.getX(), joglWidget.getComponent().getHeight() - e.getY(), 0};
+						if(e.getClickCount() == 2) {
+							System.out.println("Click trigger the picking (" + position[0] + ", " +position[1] + ")");
+							picker.Pick(position, joglWidget.getRenderer());
+						}
+
+						// We let the InteractionStyle process the event anyway
+						return false;
+					}
+				});
+
+                // UI part
+                JFrame frame = new JFrame("SimpleVTK");
+                frame.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE);
+                frame.getContentPane().setLayout(new BorderLayout());
+                frame.getContentPane().add(joglWidget.getComponent(),
+                        BorderLayout.CENTER);
+                frame.setSize(400, 400);
+                frame.setLocationRelativeTo(null);
+                frame.setVisible(true);
+                joglWidget.resetCamera();
+                joglWidget.getComponent().requestFocus();
+
+                // Add r:ResetCamera and q:Quit key binding
+                joglWidget.getComponent().addKeyListener(new KeyListener() {
+                    @Override
+                    public void keyTyped(KeyEvent e) {
+                        if (e.getKeyChar() == 'r') {
+                            joglWidget.resetCamera();
+                        } else if (e.getKeyChar() == 'q') {
+                            System.exit(0);
+                        }
+                    }
+
+                    @Override
+                    public void keyReleased(KeyEvent e) {
+                    }
+
+                    @Override
+                    public void keyPressed(KeyEvent e) {
+                    }
+                });
+            }
+        });
+    }
+}
diff --git a/Wrapping/Java/vtk/sample/rendering/annotation/LabeledCubeAxesActor.java b/Wrapping/Java/vtk/sample/rendering/annotation/LabeledCubeAxesActor.java
index 26fee77..5f8ef4a 100644
--- a/Wrapping/Java/vtk/sample/rendering/annotation/LabeledCubeAxesActor.java
+++ b/Wrapping/Java/vtk/sample/rendering/annotation/LabeledCubeAxesActor.java
@@ -1,3 +1,5 @@
+package vtk.sample.rendering.annotation;
+
 import javax.swing.JFrame;
 
 import vtk.vtkActor;
diff --git a/Wrapping/Python/CMakeLists.txt b/Wrapping/Python/CMakeLists.txt
index 8cb11bb..6f54ff4 100644
--- a/Wrapping/Python/CMakeLists.txt
+++ b/Wrapping/Python/CMakeLists.txt
@@ -171,9 +171,9 @@ if(PYTHON_EXECUTABLE)
   unset(VTK_PYTHON_IMPORT_ALL)
   foreach(module ${VTK_PYTHON_MODULES})
     set(VTK_PYTHON_IMPORT_ALL "${VTK_PYTHON_IMPORT_ALL}from ${module} import *\n")
-    configure_file(vtk/module.py.in vtk/${module}.py @ONLY IMMEDIATE)
+    configure_file(vtk/module.py.in vtk/${module}.py @ONLY)
   endforeach()
-  configure_file(vtk/__init__.py.in vtk/__init__.py @ONLY IMMEDIATE)
+  configure_file(vtk/__init__.py.in vtk/__init__.py @ONLY)
 
 # vtk.util package
   list(APPEND VTK_PYTHON_FILES
@@ -251,7 +251,7 @@ if(PYTHON_EXECUTABLE)
   # Byte compile the Python files.
   configure_file(${CMAKE_CURRENT_SOURCE_DIR}/compile_all_vtk.py.in
     ${CMAKE_CURRENT_BINARY_DIR}/compile_all_vtk.py
-    @ONLY IMMEDIATE)
+    @ONLY)
   add_custom_command(
     COMMAND ${PYTHON_EXECUTABLE} ${CMAKE_CURRENT_BINARY_DIR}/compile_all_vtk.py
     DEPENDS ${VTK_PYTHON_SOURCE_FILES} ${CMAKE_CURRENT_BINARY_DIR}/compile_all_vtk.py
@@ -268,106 +268,38 @@ if(PYTHON_EXECUTABLE)
     ${vtkpython_pyc_depends})
 
   # If no runtime is to be installed then do not install python modules.
-  if(VTK_INSTALL_NO_RUNTIME)
-    set(VTK_INSTALL_NO_PYTHON 1)
-  endif()
+  if(NOT VTK_INSTALL_NO_RUNTIME)
 
-  # If set (generally by projects that include VTK), we install the
-  # vtk{*}Python libraries using CMake rather than setup.py. Which implies that
-  # they will be put will all other libraries. This is done in
-  # KitCommonPythonWrapBlock.cmake
-  if(VTK_INSTALL_PYTHON_USING_CMAKE)
-    set(VTK_INSTALL_NO_PYTHON 1)
-    # Install python module directory (*.py and *.pyd)
+    # Install python modules
     install(DIRECTORY "${CMAKE_CURRENT_BINARY_DIR}/vtk"
-      DESTINATION "${VTK_INSTALL_RUNTIME_DIR}/Python" COMPONENT RuntimeLibraries
+      DESTINATION ${VTK_INSTALL_PYTHON_MODULE_DIR} COMPONENT RuntimeLibraries
       USE_SOURCE_PERMISSIONS)
-  endif()
-
-  # Add a rule to use python distutils to install the python wrappers.
-  if(NOT VTK_INSTALL_NO_PYTHON)
-    set(DOLLAR "$")
 
-    # Create default python setup arguments if they are not set.
-    if(NOT DEFINED VTK_PYTHON_SETUP_ARGS)
-      set(VTK_PYTHON_SETUP_ARGS "--prefix=\"${DOLLAR}{CMAKE_INSTALL_PREFIX}\""
-        CACHE STRING "Arguments passed to \"python setup.py install ...\" during installation.")
-      mark_as_advanced(VTK_PYTHON_SETUP_ARGS)
+    # Install python extension library that backs the modules
+    if (BUILD_SHARED_LIBS AND NOT VTK_INSTALL_NO_LIBRARIES)
+      foreach(module ${VTK_PYTHON_MODULES})
+        install(TARGETS ${module}Python
+          EXPORT ${VTK_INSTALL_EXPORT_NAME}
+          RUNTIME DESTINATION ${VTK_INSTALL_RUNTIME_DIR} COMPONENT RuntimeLibraries
+          LIBRARY DESTINATION ${VTK_INSTALL_PYTHON_MODULE_DIR}/vtk COMPONENT RuntimeLibraries
+          ARCHIVE DESTINATION ${VTK_INSTALL_ARCHIVE_DIR} COMPONENT Development
+          )
+      endforeach()
     endif()
 
-    # Change dollar sign to DOLLARSIGN
-    string(REGEX REPLACE "\\${DOLLAR}" "DOLLARSIGN"
-      PYTHON_SETUP_ARGS "${VTK_PYTHON_SETUP_ARGS}")
-
-    # Get the install path for python modules
-    execute_process(COMMAND "${PYTHON_EXECUTABLE}"
-        "setup_install_paths.py" "purelib" ${PYTHON_SETUP_ARGS}
-      WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
-      OUTPUT_VARIABLE PYTHON_MODULE_INSTALL_DIR)
-
-    # Get the install path for binary python modules
-    execute_process(COMMAND "${PYTHON_EXECUTABLE}"
-        "setup_install_paths.py" "platlib" ${PYTHON_SETUP_ARGS}
-      WORKING_DIRECTORY "${CMAKE_CURRENT_SOURCE_DIR}"
-      OUTPUT_VARIABLE PYTHON_MODULE_PLATFORM_INSTALL_DIR)
-
-    # Change DOLLARSIGN to dollar sign
-    string(REGEX REPLACE "DOLLARSIGN" "${DOLLAR}"
-      PYTHON_MODULE_INSTALL_DIR "${PYTHON_MODULE_INSTALL_DIR}")
-    string(REGEX REPLACE "DOLLARSIGN" "${DOLLAR}"
-      PYTHON_MODULE_PLATFORM_INSTALL_DIR
-      "${PYTHON_MODULE_PLATFORM_INSTALL_DIR}")
-
-    # Replace backslashes with forward slashes
-    if(WIN32)
-      string(REGEX REPLACE "\\\\" "/" PYTHON_MODULE_INSTALL_DIR
-        "${PYTHON_MODULE_INSTALL_DIR}")
-      string(REGEX REPLACE "\\\\" "/" PYTHON_MODULE_PLATFORM_INSTALL_DIR
-        "${PYTHON_MODULE_PLATFORM_INSTALL_DIR}")
+    # Install the conveniently configured python interpretters
+    if(NOT VTK_INSTALL_NO_PYTHON_EXES)
+      # Install the vtkpython executable
+      install(TARGETS vtkpython
+        DESTINATION ${VTK_INSTALL_RUNTIME_DIR})
+
+      if(PVTKPYTHON_EXECUTABLE)
+        # Install the mpi enabled vtkpython executable
+        install(TARGETS pvtkpython
+          DESTINATION ${VTK_INSTALL_RUNTIME_DIR})
+      endif()
     endif()
 
-    set(VTK_PYTHON_MODULE_INSTALL_DIR "${PYTHON_MODULE_INSTALL_DIR}"
-      CACHE INTERNAL "Install directory for Python .py and .pyc files")
-
-    set(VTK_PYTHON_MODULE_PLATFORM_INSTALL_DIR
-      "${PYTHON_MODULE_PLATFORM_INSTALL_DIR}"
-      CACHE INTERNAL "Install directory for Python binary modules")
-
-    # If there are multiple configurations then add a BUILD_TYPE=...
-    # argument to the python setup.py call.  The build type to use is set
-    # in the CMake variable BUILD_TYPE while running the install script.
-    if(CMAKE_CONFIGURATION_TYPES)
-      set(VTK_PYTHON_SETUP_BUILD_TYPE "BUILD_TYPE=${DOLLAR}{BUILD_TYPE}")
-    else()
-      set(VTK_PYTHON_SETUP_BUILD_TYPE)
-    endif()
-
-    # Configure the post-install script to run python on setup.py.
-    configure_file(${CMAKE_CURRENT_SOURCE_DIR}/PythonInstall.cmake.in
-      ${CMAKE_CURRENT_BINARY_DIR}/PythonInstall.cmake
-      @ONLY IMMEDIATE)
-
-    install(SCRIPT ${CMAKE_CURRENT_BINARY_DIR}/PythonInstall.cmake
-      COMPONENT RuntimeLibraries)
   endif()
-endif()
 
-# Create the setup.py file.
-if(CMAKE_CONFIGURATION_TYPES)
-  # The build has multiple configuration types.  If CMAKE_BUILD_TYPE
-  # is set use it as the default BUILD_TYPE for setup.py to install.
-  set(VTK_PYTHON_HAS_CONFIG_TYPES 1)
-  if(CMAKE_BUILD_TYPE)
-    set(VTK_PYTHON_BUILD_TYPE "\"${CMAKE_BUILD_TYPE}\"")
-  else()
-    set(VTK_PYTHON_BUILD_TYPE "[]")
-  endif()
-else()
-  # The build has one configuration type.  The build type does not
-  # affect installation.
-  set(VTK_PYTHON_HAS_CONFIG_TYPES 0)
-  set(VTK_PYTHON_BUILD_TYPE "[]")
 endif()
-
-configure_file(${CMAKE_CURRENT_SOURCE_DIR}/setup.py.in
-  ${CMAKE_CURRENT_BINARY_DIR}/setup.py @ONLY IMMEDIATE)
diff --git a/Wrapping/Python/PythonInstall.cmake.in b/Wrapping/Python/PythonInstall.cmake.in
deleted file mode 100644
index f4904fb..0000000
--- a/Wrapping/Python/PythonInstall.cmake.in
+++ /dev/null
@@ -1,18 +0,0 @@
-# Configured file and directory locations.
-SET(PYTHON_EXECUTABLE "@PYTHON_EXECUTABLE@")
-SET(CMAKE_INSTALL_PREFIX "${CMAKE_INSTALL_PREFIX}")
-SET(VTK_BINARY_DIR "@VTK_BINARY_DIR@")
-
-# Convert the prefix to a windows path if necessary.  The python
-# distutils implementation seems sensitive to the slash direction.
-IF(WIN32)
-  IF(NOT CYGWIN)
-    STRING(REGEX REPLACE "/" "\\\\" CMAKE_INSTALL_PREFIX
-      "${CMAKE_INSTALL_PREFIX}")
-  ENDIF(NOT CYGWIN)
-ENDIF(WIN32)
-
-# Run python on setup.py to install the python modules.
-EXEC_PROGRAM("${PYTHON_EXECUTABLE}" "${VTK_BINARY_DIR}/Wrapping/Python" ARGS
-  "setup.py" "install" @VTK_PYTHON_SETUP_BUILD_TYPE@ @VTK_PYTHON_SETUP_ARGS@
-  )
diff --git a/Wrapping/Python/README.txt b/Wrapping/Python/README.txt
index 31b0f5c..65d48df 100644
--- a/Wrapping/Python/README.txt
+++ b/Wrapping/Python/README.txt
@@ -44,7 +44,7 @@ modules.
 
   (2) Using the package from the source build without installing it
       system wide and without using `vtkpython`.  This is most
-      useful when you build VTK off a CVS checkout and do not want
+      useful when you build VTK off a git clone and do not want
       to install it system wide and still want to use the vanilla
       Python interpreter.  This is also useful if you are not the
       administrator of the machine you are using/building VTK on.
@@ -82,25 +82,17 @@ ${VTK_ROOT}/Wrapping/Python:${CMAKE_LIBRARY_OUTPUT_DIRECTORY}
       RelWithDebInfo.
 
 
-  (3) Installation via distutils to a directory different from the
+  (3) Installation via to a directory different from the
       `VTK_ROOT` directory.  To install VTK built from source you
       simply need to run the "install rule".  Under Unix this
       implies running `make install` and under Windows this implies
       running the INSTALL target.
 
-      The installation rule internally executes the
-      ``VTK_BINARY_DIR/Wrapping/Python/setup.py`` script.
-      `setup.py` is a distutils script that should install
-      VTK-Python correctly.  The `setup.py` script may also be
-      executed from the `VTK_BINARY_DIR` in order to build an
-      installer (via `bdist_wininst`) or to build a Python Egg.
-
-
 VTK-Python interpreters
 ^^^^^^^^^^^^^^^^^^^^^^^
 
 In order to solve some problems with running VTK-Python on some
-platforms and compilers a special Python interpreter is distributed
+platforms and compilers a Python interpreter is distributed
 along with VTK.  This new interpreter is called `vtkpython` and is
 the recommended way of running VTK-Python scripts.  If the vanilla
 Python interpreter is good enough and works well for you, please use
@@ -128,71 +120,19 @@ users will just do::
   # or
   from vtk import *
 
-and all the available 'kits' will be loaded - just like with the
-older vtkpython.  The name of the kits is available in the kits
-variable::
-
-  import vtk
-  print vtk.kits
-  ['common', 'filtering', 'io', ...]
+and all the available 'modules' will be loaded - just like with the
+older vtkpython.
 
-If the user specifically wants just the classes the Common directory
+If the user specifically wants just the classes the Common/Core module
 imported the user does::
 
-  import vtk.common
+  import vtk.vtkCommonCore
 
-All the kit names are in lowercase.  This is similar to the way in
-which the Tcl packages are split.  Similarly, classes specifically
-in other kits can be imported by using the appropriate kit name.
-Please do note that even when you import vtk.common, the vtk
+Please do note that even when you import vtk.vtkCommonCore, the vtk
 namespace will still have all the kits loaded inside it.  Its just
-that vtk.common will have only the classes from the Common
+that vtk.vtkCommonCore will have only the classes from the Common/Core
 directory.
 
-
-Valid Kit names
-~~~~~~~~~~~~~~~
-
-Required Kits
--------------
-
-common, filtering, io, imaging and graphics.
-
-These are the required kits that you must have to use VTK.  You
-can import all of them using the required module like so:
-
-    from vtk.required import *
-
-You should have all the required kits in your namespace.  If any
-of them is not importable you *will* get an ImportError.
-
-Optional Kits
--------------
-
-genericfiltering, hybrid, parallel, rendering, volumerendering,
-and widgets.
-
-These are the optional kits.  Unlike the Tcl packages importing
-these kits *will not* import all the required kits in as well.
-For the rationale behind this please read this mail and also the
-thread here:
-
-http://public.kitware.com/pipermail/vtk-developers/2001-October/000828.html
-
-If you don't have a particular optional kit then Python will not
-raise an exception when importing vtk, but if you try loading it
-directly like so::
-
-    import vtk.parallel
-
-Then you will receive an import error if there was one.  Also, if
-the module exists but there are linking errors you will get a
-LinkError exception.
-
-
-Other VTK related modules
-^^^^^^^^^^^^^^^^^^^^^^^^^
-
 Apart from the basic VTK functionality there are other useful VTK
 related modules in the package.  There are various widgets for
 different GUI toolkits.  These are available in the various
@@ -228,16 +168,6 @@ There is also a `vtk.test` package that allows one to create unit
 tests for VTK-Python.
 
 
-Backwards compatibility
-^^^^^^^^^^^^^^^^^^^^^^^
-
-Since VTK-4.0, the usage of `vtkpython`, `vtkpythontk`,
-`vtkTkRenderWidget` and other modules in the Wrapping/Python
-directory were deprecated.  As of VTK-5.0, these files are no longer
-available.  Please use the `vtk` package instead which provides the
-functionality.
-
-
 Writing and running VTK-Python tests
 ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
 
@@ -253,7 +183,8 @@ When you need to run a VTK-Python unittest that uses the
 argument to see all the supported options.  Typically you will need
 to do something like the following::
 
-  python TestTkRenderWidget.py -B $VTK_DATA_ROOT/Baseline/Rendering
+  python TestTkRenderWidget.py \
+    -B $VTK_ROOT/ExternalData/Rendering/Tk/Testing/Data/Baseline
 
 To see the options available use this::
 
@@ -286,22 +217,15 @@ in the form of Debian, RPM and other packages.
      files should be installed somewhere in the linkers path (for
      example in `\usr\lib`).  Under Windows these should be installed
      in a directory that is in the `PATH`.  The Python extension modules
-     should be  installed via the `setup.py` file inside the `vtk`
-     package.  Typically these should be installed to
+     should are typically installed to
      `/usr/lib/pythonX.Y/site-packages/vtk` (or
      `PythonX.Y\Lib\site-packages\vtk`).
 
 The VTK install rule (`make install` under Unix) will usually do the
 right thing in installing everything.  Make sure that the
-`CMAKE_INSTALL_PREFIX` variable is set appropriately.  There are two
-ways to customize python module installation.  First, one may modify
-the VTK_PYTHON_SETUP_ARGS CMake cache variable to set the options
-passed to the setup.py script.  The default value for this variable
-provides reasonable behavior for packagers.  Second, one may
-define VTK_INSTALL_NO_PYTHON:BOOL=ON in the CMake cache which will
-disable the automatic execution of setup.py as part of the install
-process.  Then one may run ``python setup.py install`` manually
-with the desired options.
+`CMAKE_INSTALL_PREFIX` variable is set appropriately.  You may modify
+the VTK_INSTALL_PYTHON_MODULE_DIR CMake cache variable to change the
+installation location for the Python extenson modules.
 
 
 Common problems
diff --git a/Wrapping/Python/setup.py.in b/Wrapping/Python/setup.py.in
deleted file mode 100755
index e7cc979..0000000
--- a/Wrapping/Python/setup.py.in
+++ /dev/null
@@ -1,144 +0,0 @@
-#!/usr/bin/env python
-
-"""
-setup.py for installing the VTK-Python bindings using distutils.
-
-Created by Prabhu Ramachandran, June 2002.
-
-Updated for install with configuration types by Brad King, August 2005.
-
-"""
-
-import sys
-import string
-import os
-import os.path
-from types import StringType
-from distutils.core import setup
-from distutils.command.install_data import install_data
-from distutils.sysconfig import get_config_var
-
-# Support for Python Eggs:
-#  http://peak.telecommunity.com/DevCenter/PythonEggs
-#  http://peak.telecommunity.com/DevCenter/EasyInstall
-has_setup_tools = 0
-try:
-    from setuptools import setup
-except ImportError:
-    pass
-else:
-    has_setup_tools = 1
-
-# VTK build configuration settings.
-vtk_version = "@VTK_MAJOR_VERSION at .@VTK_MINOR_VERSION@"
-vtk_lib_dir = "@CMAKE_LIBRARY_OUTPUT_DIRECTORY@"
-vtk_bin_dir = "@CMAKE_RUNTIME_OUTPUT_DIRECTORY@"
-vtk_has_configuration_types = @VTK_PYTHON_HAS_CONFIG_TYPES@
-vtk_modules = """@VTK_PYTHON_MODULES@""".split(';')
-
-# The build type ('Release', 'Debug' etc.).  If vtk_has_configuration_types
-# is true this must be set.  It may be set on the command line by something
-# like 'BUILD_TYPE=Release'.  For example::
-#   python setup.py install --prefix=D:\\Python23 BUILD_TYPE=Release
-vtk_build_type = @VTK_PYTHON_BUILD_TYPE@
-
-# Construct the list of executable names to be installed.
-vtk_exe_names = ['vtkpython']
-
-def get_libs():
-    """Returns a list of libraries to be installed.  """
-    libs = []
-
-    # Select platform-specific components of the module file names.
-    if os.name == 'posix':
-        dir = vtk_lib_dir
-        suffix = get_config_var('SO')
-    else:
-        dir = vtk_bin_dir.replace('/', '\\')
-        suffix = '.pyd'
-
-    # If this build has configuration types append the selected configuration.
-    if vtk_has_configuration_types:
-        dir = os.path.join(dir, vtk_build_type)
-
-    # Enumerate the list of module files.
-    for mod in vtk_modules:
-        libs.append(os.path.abspath(os.path.join(dir, mod+'Python'+suffix)))
-
-    return libs
-
-
-def get_scripts():
-    """Returns the appropriate vtkpython executable and pvtkpython
-    that is to be installed."""
-    scripts = []
-
-    # Select platform-specific components of the executable file names.
-    if os.name == 'posix':
-        dir = vtk_lib_dir
-        suffix = ''
-    else:
-        dir = vtk_bin_dir.replace('/', '\\')
-        suffix = '.exe'
-
-    # If this build has configuration types append the selected configuration.
-    if vtk_has_configuration_types:
-        dir = os.path.join(dir, vtk_build_type)
-
-    # Enumerate the list of executable files.
-    for exe in vtk_exe_names:
-        scripts.append(os.path.abspath(os.path.join(dir, exe+suffix)))
-
-    return scripts
-
-
-class my_install_data (install_data):
-    def finalize_options (self):
-        """Needed to make this thing work properly."""
-        self.set_undefined_options ('install',
-                                    ('install_lib', 'install_dir'),
-                                    ('root', 'root'),
-                                    ('force', 'force'),
-                                    )
-
-if __name__ == '__main__':
-    # Get the optional build type argument.
-    for x in sys.argv[:]:
-        if string.find(x, 'BUILD_TYPE') > -1:
-            vtk_build_type = string.strip(string.split(x, '=')[1])
-            sys.argv.remove(x)
-            break
-
-    # Make sure a build type was specified if it is required.
-    if vtk_has_configuration_types:
-        if not vtk_build_type:
-            raise "ERROR: Must specify BUILD_TYPE=<config-name> on command line."
-
-    def mk_dict(**kw):
-        # Unnecessary in recent Pythons but handy for earlier
-        # versions.
-        return kw
-
-    # The options for setup.
-    opts = mk_dict(name              = "VTK",
-                   version           = vtk_version,
-                   description       = "The Visualization Toolkit",
-                   maintainer        = "VTK Developers",
-                   maintainer_email  = "vtk-developers at vtk.org",
-                   license           = "BSD",
-                   long_description  = "A high level visualization library",
-                   url               = "http://www.vtk.org/",
-                   platforms         = ['Any'],
-                   cmdclass          = {'install_data': my_install_data},
-                   packages          = ['vtk', 'vtk.gtk', 'vtk.qt4',
-                                        'vtk.tk', 'vtk.util', 'vtk.wx',
-                                        'vtk.test'],
-                   #scripts           = get_scripts(),
-                   data_files        = [('vtk', get_libs())]
-                   )
-    # If setup_tools is available, then add an extra option to disable
-    # creation of a ZIP file.
-    if has_setup_tools:
-        opts['zip_safe'] = 0
-
-    setup(**opts)
diff --git a/Wrapping/Python/setup_install_paths.py b/Wrapping/Python/setup_install_paths.py
deleted file mode 100755
index f5bf49f..0000000
--- a/Wrapping/Python/setup_install_paths.py
+++ /dev/null
@@ -1,137 +0,0 @@
-#!/bin/env python
-
-"""
-This script will return the paths that distutils will use for installing
-a package.  To use this script, execute it the same way that you would
-execute setup.py, but instead of providing 'install' or 'build' as the
-command, specify 'purelib' or 'platlib' and the corresponding path
-will be printed.  The 'purelib' command will print the install location
-for .py files, while the 'platlib' command will print the install location
-of binary modules (.so or .dll).
-
-Written by David Gobbi, Feb 25, 2006.
-"""
-
-
-import string
-import sys
-import os
-
-def get_install_path(command, *args):
-    """Return the module install path, given the arguments that were
-    provided to setup.py.  The paths that you can request are 'purelib'
-    for the .py installation directory and 'platlib' for the binary
-    module installation directory.
-    """
-
-    # convert setup args into an option dictionary
-    options = {}
-
-    for arg in args:
-        if arg == '--':
-            break
-        if arg[0:2] == "--":
-            try:
-                option, value = string.split(arg,"=")
-                options[option] = value
-            except ValueError:
-                options[arg] = 1
-
-    # check for the prefix and exec_prefix
-    try:
-        prefix = options["--prefix"]
-    except KeyError:
-        prefix = None
-
-    try:
-        exec_prefix = options["--exec-prefix"]
-    except KeyError:
-        exec_prefix = prefix
-
-    # if prefix or exec_prefix aren't set, use default system values
-    if prefix == None:
-        prefix = sys.prefix
-
-    if exec_prefix == None:
-        exec_prefix = sys.exec_prefix
-
-    # replace backslashes with slashes
-    if os.name != 'posix':
-        prefix = string.replace(prefix, os.sep, "/")
-        exec_prefix = string.replace(exec_prefix, os.sep, "/")
-
-    # get rid of trailing separator
-    if prefix != "" and prefix[-1] == "/":
-        prefix = prefix[0:-1]
-
-    if exec_prefix != "" and exec_prefix[-1] == "/":
-        exec_prefix = exec_prefix[0:-1]
-
-    # check for "home" install scheme
-    try:
-        home = options["--home"]
-        if os.name != 'posix':
-            home = string.replace(home, os.sep, "/")
-        if home != "" and home[-1] == "/":
-            home = home[0:-1]
-    except KeyError:
-        home = None
-
-    # apply "home" install scheme, but not for Windows with python < 2.4
-    # (distutils didn't allow home scheme for windows until 2.4)
-    if home != None and not (os.name != 'posix' and sys.version < '2.4'):
-        purelib = home+'/lib/python'
-        platlib = home+'/lib/python'
-        scripts = home+'/bin'
-        data    = home
-    elif os.name == 'posix':
-        ver = sys.version[0:3]
-        purelib = prefix+'/lib/python'+ver+'/site-packages'
-        platlib = exec_prefix+'/lib/python'+ver+'/site-packages'
-        scripts = prefix+'/bin'
-        data    = prefix
-    elif sys.version < '2.2':
-        purelib = prefix
-        platlib = prefix
-        scripts = prefix+'/Scripts'
-        data    = prefix
-    else:
-        purelib = prefix+'/Lib/site-packages'
-        platlib = prefix+'/Lib/site-packages'
-        scripts = prefix+'/Scripts'
-        data    = prefix
-
-    # allow direct setting of install directories
-    try:
-        purelib = options["--install-purelib"]
-    except KeyError:
-        pass
-
-    try:
-        platlib = options["--install-platlib"]
-    except KeyError:
-        pass
-
-    try:
-        scripts = options["--install-scripts"]
-    except KeyError:
-        pass
-
-    try:
-        data = options["--install-data"]
-    except KeyError:
-        pass
-
-    # return the information that was asked for
-    if command == 'purelib':
-        return purelib
-    elif command == 'platlib':
-        return platlib
-    elif command == 'scripts':
-        return scripts
-    elif command == 'data':
-        return data
-
-
-if __name__ == "__main__":
-    print apply(get_install_path, sys.argv[1:])
diff --git a/Wrapping/Python/vtk/qt4/QVTKRenderWindowInteractor.py b/Wrapping/Python/vtk/qt4/QVTKRenderWindowInteractor.py
index 46ec257..7500e41 100644
--- a/Wrapping/Python/vtk/qt4/QVTKRenderWindowInteractor.py
+++ b/Wrapping/Python/vtk/qt4/QVTKRenderWindowInteractor.py
@@ -25,6 +25,9 @@ Changes by Phil Thompson, Oct. 2007
 
 Changes by Phil Thompson, Mar. 2008
  Added cursor support.
+
+Changes by Rodrigo Mologni, Sep. 2013 (Credit to Daniele Esposti)
+ Bug fix to PySide: Converts PyCObject to void pointer.
 """
 
 
@@ -154,7 +157,17 @@ class QVTKRenderWindowInteractor(QtGui.QWidget):
         else:
             self._RenderWindow = vtk.vtkRenderWindow()
 
-        self._RenderWindow.SetWindowInfo(str(int(self.winId())))
+        WId = self.winId()
+
+        if type(WId).__name__ == 'PyCObject':
+            from ctypes import pythonapi, c_void_p, py_object
+
+            pythonapi.PyCObject_AsVoidPtr.restype  = c_void_p
+            pythonapi.PyCObject_AsVoidPtr.argtypes = [py_object]
+
+            WId = pythonapi.PyCObject_AsVoidPtr(WId)
+
+        self._RenderWindow.SetWindowInfo(str(int(WId)))
 
         if stereo: # stereo mode
             self._RenderWindow.StereoCapableWindowOn()
diff --git a/Wrapping/Python/vtkPythonAppInit.cxx b/Wrapping/Python/vtkPythonAppInit.cxx
index d202154..f17f48f 100644
--- a/Wrapping/Python/vtkPythonAppInit.cxx
+++ b/Wrapping/Python/vtkPythonAppInit.cxx
@@ -36,6 +36,7 @@
 
 #include <string>
 #include <vtksys/SystemTools.hxx>
+#include <vtksys/SystemInformation.hxx>
 
 #ifdef VTK_COMPILED_USING_MPI
 class vtkMPICleanup {
@@ -73,6 +74,7 @@ extern "C" {
 
 static void vtkPythonAppInitEnableMSVCDebugHook();
 static void vtkPythonAppInitPrependPath(const char* self_dir);
+static void RemoveArgumentFromArgv(int &argc, char **&argv, int at);
 
 /* The maximum length of a file name.  */
 #if defined(PATH_MAX)
@@ -89,6 +91,7 @@ static void vtkPythonAppInitPrependPath(const char* self_dir);
 #define VTK_PYTHON_TO_STRING1(x) #x
 #define VTK_PYTHON_VERSION VTK_PYTHON_TO_STRING(PY_MAJOR_VERSION.PY_MINOR_VERSION)
 
+
 int main(int argc, char **argv)
 {
   vtkPythonAppInitEnableMSVCDebugHook();
@@ -106,7 +109,12 @@ int main(int argc, char **argv)
       if ( strcmp(argv[cc], "-V") == 0 )
         {
         displayVersion = 1;
-        break;
+        }
+      else
+      if ( strcmp(argv[cc], "--enable-bt") == 0 )
+        {
+        RemoveArgumentFromArgv(argc, argv, cc);
+        vtksys::SystemInformation::SetStackTraceOnError(1);
         }
       }
     }
@@ -302,3 +310,15 @@ static void vtkPythonAppInitPrependPath(const char* self_dir)
 #endif
     }
 }
+
+//----------------------------------------------------------------------------
+static void RemoveArgumentFromArgv(int &argc, char **&argv, int at)
+{
+  int ii=at+1;
+  while (ii<argc)
+    {
+    argv[ii-1]=argv[ii];
+    ii+=1;
+    }
+  argc-=1;
+}
diff --git a/Wrapping/PythonCore/PyVTKObject.cxx b/Wrapping/PythonCore/PyVTKObject.cxx
index b88640b..060bce1 100644
--- a/Wrapping/PythonCore/PyVTKObject.cxx
+++ b/Wrapping/PythonCore/PyVTKObject.cxx
@@ -80,7 +80,7 @@ static PyObject *PyVTKObject_Repr(PyObject *op)
 }
 
 //--------------------------------------------------------------------
-int PyVTKObject_SetAttr(PyObject *op, PyObject *attr, PyObject *value)
+static int PyVTKObject_SetAttr(PyObject *op, PyObject *attr, PyObject *value)
 {
   PyVTKObject *self = (PyVTKObject *)op;
   char *name = PyString_AsString(attr);
diff --git a/Wrapping/PythonCore/PyVTKSpecialObject.h b/Wrapping/PythonCore/PyVTKSpecialObject.h
index 39248c8..bb6a4de 100644
--- a/Wrapping/PythonCore/PyVTKSpecialObject.h
+++ b/Wrapping/PythonCore/PyVTKSpecialObject.h
@@ -40,7 +40,7 @@ class VTKWRAPPINGPYTHONCORE_EXPORT PyVTKSpecialType
 {
 public:
   PyVTKSpecialType() :
-    py_type(0), methods(0), constructors(0), docstring(0), copy_func(0) {};
+    py_type(0), methods(0), constructors(0), docstring(0), copy_func(0) {}
 
   PyVTKSpecialType(
     PyTypeObject *typeobj, PyMethodDef *cmethods, PyMethodDef *ccons,
diff --git a/Wrapping/PythonCore/vtkPythonArgs.cxx b/Wrapping/PythonCore/vtkPythonArgs.cxx
index fef34b2..b67f38f 100644
--- a/Wrapping/PythonCore/vtkPythonArgs.cxx
+++ b/Wrapping/PythonCore/vtkPythonArgs.cxx
@@ -220,7 +220,7 @@ inline bool vtkPythonGetStdStringValue(PyObject *o, std::string &a, const char *
 //--------------------------------------------------------------------
 // Overloaded methods, mostly based on the above templates
 
-bool vtkPythonGetValue(PyObject *o, const void *&a)
+static bool vtkPythonGetValue(PyObject *o, const void *&a)
 {
   PyBufferProcs *b = o->ob_type->tp_as_buffer;
   if (b && b->bf_getreadbuffer && b->bf_getsegcount)
@@ -269,7 +269,7 @@ bool vtkPythonGetValue(PyObject *o, void *&a)
 {
   // should have an alternate form for non-const "void *" that uses
   // writebuffer instead of readbuffer, but that would break existing code
-  const void *b;
+  const void *b = NULL;
   bool r = vtkPythonGetValue(o, b);
   a = const_cast<void *>(b);
   return r;
@@ -1355,3 +1355,19 @@ bool vtkPythonSequenceError(PyObject *o, Py_ssize_t n, Py_ssize_t m)
   PyErr_SetString(PyExc_TypeError, text);
   return false;
 }
+
+//--------------------------------------------------------------------
+// Checking size of array arg.
+int vtkPythonArgs::GetArgSize(int i)
+{
+  int size = 0;
+  if (this->M + i < this->N)
+    {
+    PyObject *o = PyTuple_GET_ITEM(this->Args, this->M + i);
+    if (PySequence_Check(o))
+      {
+      size = static_cast<int>(PySequence_Size(o));
+      }
+    }
+  return size;
+}
diff --git a/Wrapping/PythonCore/vtkPythonArgs.h b/Wrapping/PythonCore/vtkPythonArgs.h
index 74a66f9..5b8b0d5 100644
--- a/Wrapping/PythonCore/vtkPythonArgs.h
+++ b/Wrapping/PythonCore/vtkPythonArgs.h
@@ -98,6 +98,12 @@ public:
   bool NoArgsLeft() { return (this->I >= this->N); }
 
   // Description:
+  // Get the size of an arg, if it is a sequence.
+  // If no size is available, or if the arg is out of range,
+  // then it returns 0 but doesn't set error.
+  int GetArgSize(int i);
+
+  // Description:
   // Get the next argument as a vtkObjectBase derived type.
   // It uses a C-style cast instead of a static_cast, which
   // means that it works on incomplete types, and also means
diff --git a/Wrapping/PythonCore/vtkPythonOverload.cxx b/Wrapping/PythonCore/vtkPythonOverload.cxx
index 132e58d..42d6e67 100644
--- a/Wrapping/PythonCore/vtkPythonOverload.cxx
+++ b/Wrapping/PythonCore/vtkPythonOverload.cxx
@@ -144,9 +144,9 @@ bool vtkPythonOverloadHelper::next(const char **format, const char **classname)
 
 #if VTK_SIZEOF_LONG != VTK_SIZEOF_INT
 #ifdef PY_LONG_LONG
-int vtkPythonIntPenalty(PY_LONG_LONG tmpi, int penalty, char format)
+static int vtkPythonIntPenalty(PY_LONG_LONG tmpi, int penalty, char format)
 #else
-int vtkPythonIntPenalty(long tmpi, int penalty, char format)
+static int vtkPythonIntPenalty(long tmpi, int penalty, char format)
 #endif
 {
   if (tmpi > VTK_INT_MAX || tmpi < VTK_INT_MIN)
diff --git a/Wrapping/PythonCore/vtkPythonUtil.cxx b/Wrapping/PythonCore/vtkPythonUtil.cxx
index 1e3b8f6..c19b6b9 100644
--- a/Wrapping/PythonCore/vtkPythonUtil.cxx
+++ b/Wrapping/PythonCore/vtkPythonUtil.cxx
@@ -61,11 +61,31 @@ public:
 //--------------------------------------------------------------------
 // There are five maps associated with the Python wrappers
 
+//--------------------------------------------------------------------
+class vtkPythonReferenceCountedValue
+{
+public:
+  vtkPythonReferenceCountedValue()
+  {
+    this->VTKObject = NULL;
+    this->PythonObject = NULL;
+  }
+  vtkPythonReferenceCountedValue(vtkSmartPointerBase const& cpp,
+                                 PyObject* python)
+  {
+    this->VTKObject = cpp;
+    this->PythonObject = python;
+  }
+
+  vtkSmartPointerBase VTKObject;
+  PyObject* PythonObject;
+};
+
 // Map VTK objects to python objects (this is also the cornerstone
 // of the vtk/python garbage collection system, because it contains
 // exactly one pointer reference for each VTK object known to python)
 class vtkPythonObjectMap
-  : public std::map<vtkSmartPointerBase, PyObject*>
+  : public std::map<vtkObjectBase*, vtkPythonReferenceCountedValue>
 {
 };
 
@@ -290,7 +310,7 @@ void vtkPythonUtil::AddObjectToMap(PyObject *obj, vtkObjectBase *ptr)
 #endif
 
   ((PyVTKObject *)obj)->vtk_ptr = ptr;
-  (*vtkPythonMap->ObjectMap)[ptr] = obj;
+  (*vtkPythonMap->ObjectMap)[ptr] = vtkPythonReferenceCountedValue(ptr, obj);
 
 #ifdef VTKPYTHONDEBUG
   vtkGenericWarningMacro("Added object to map obj= " << obj << " "
@@ -308,7 +328,7 @@ void vtkPythonUtil::RemoveObjectFromMap(PyObject *obj)
                          << pobj << " " << pobj->vtk_ptr);
 #endif
 
-  if (vtkPythonMap)
+  if (vtkPythonMap && vtkPythonMap->ObjectMap->count(pobj->vtk_ptr))
     {
     vtkWeakPointerBase wptr;
 
@@ -368,11 +388,11 @@ PyObject *vtkPythonUtil::GetObjectFromPointer(vtkObjectBase *ptr)
 
   if (ptr)
     {
-    std::map<vtkSmartPointerBase, PyObject*>::iterator i =
+    std::map<vtkObjectBase*, vtkPythonReferenceCountedValue>::iterator i =
       vtkPythonMap->ObjectMap->find(ptr);
     if (i != vtkPythonMap->ObjectMap->end())
       {
-      obj = i->second;
+      obj = i->second.PythonObject;
       }
     if (obj)
       {
diff --git a/Wrapping/Tcl/CMakeLists.txt b/Wrapping/Tcl/CMakeLists.txt
index 7e49cac..60b0384 100644
--- a/Wrapping/Tcl/CMakeLists.txt
+++ b/Wrapping/Tcl/CMakeLists.txt
@@ -19,7 +19,7 @@
 CONFIGURE_FILE(
   ${VTK_SOURCE_DIR}/Wrapping/Tcl/vtkTkAppInitConfigure.h.in
   ${VTK_BINARY_DIR}/Wrapping/Tcl/vtkTkAppInitConfigure.h
-  @ONLY IMMEDIATE
+  @ONLY
 )
 
 get_property(VTK_TCL_WRAPPED GLOBAL PROPERTY VTK_TCL_WRAPPED)
@@ -133,10 +133,12 @@ endif()
 
 # Create the pvtk Tcl wrapper executable with MPI support.
 if(vtkParallelMPI_ENABLED)
+  include(vtkMPI)
   vtk_module_config(PVTK vtkParallelMPI)
   include_directories(${PVTK_INCLUDE_DIRS})
 
   VTK_ADD_EXECUTABLE(pvtk vtkParaTkAppInit.cxx ${VTK_EXE_RESOURCE_FILES})
+  vtk_mpi_link(pvtk)
   IF(VTK_USE_CARBON)
     FIND_PROGRAM(VTK_APPLE_RESOURCE Rez "${OSX_DEVELOPER_ROOT}/usr/bin")
     IF(VTK_APPLE_RESOURCE)
@@ -150,19 +152,19 @@ if(vtkParallelMPI_ENABLED)
     ENDIF(VTK_APPLE_RESOURCE)
   ENDIF(VTK_USE_CARBON)
 
-  target_link_libraries(pvtk ${PVTK_LIBRARIES})
+  target_link_libraries(pvtk LINK_PRIVATE ${PVTK_LIBRARIES})
   if(VTK_USE_TK)
-    target_link_libraries(pvtk ${VTK_TK_LIBRARIES})
+    target_link_libraries(pvtk LINK_PRIVATE ${VTK_TK_LIBRARIES})
   endif(VTK_USE_TK)
 
   # Link to TCL static libraries
   if(BUILD_SHARED_LIBS)
     # vtkCommonCoreTCL is required even for shared builds
     # to link the vtkTclUtil object - FIXME ??
-    target_link_libraries(pvtk vtkCommonCoreTCL)
+    target_link_libraries(pvtk LINK_PRIVATE vtkCommonCoreTCL)
   else()
     foreach(module ${VTK_TCL_WRAPPED})
-      target_link_libraries(pvtk ${${module}_TCL_NAME}TCL)
+      target_link_libraries(pvtk LINK_PRIVATE ${${module}_TCL_NAME}TCL)
     endforeach()
   endif()
 endif()
@@ -195,19 +197,19 @@ if(VTK_TCL_CONFIGURATION_TYPES)
     set(VTK_TCL_LIBRARY_DIR "${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/${config}")
     configure_file(${VTK_SOURCE_DIR}/Wrapping/Tcl/pkgIndex.tcl.in
                    ${VTK_BINARY_DIR}/Wrapping/Tcl/${config}/pkgIndex.tcl
-                   @ONLY IMMEDIATE)
+                   @ONLY)
   endforeach()
 else()
   set(VTK_TCL_LIBRARY_DIR "${CMAKE_LIBRARY_OUTPUT_DIRECTORY}")
   configure_file(${VTK_SOURCE_DIR}/Wrapping/Tcl/pkgIndex.tcl.in
                  ${VTK_BINARY_DIR}/Wrapping/Tcl/pkgIndex.tcl
-                 @ONLY IMMEDIATE)
+                 @ONLY)
 endif()
 
 # A few hand coded bits of Tcl to glue things together.
 macro(configure_tcl_files)
   foreach(file ${ARGN})
-    configure_file(${file}.tcl.in "${VTK_TCL_HOME}/${file}.tcl" @ONLY IMMEDIATE)
+    configure_file(${file}.tcl.in "${VTK_TCL_HOME}/${file}.tcl" @ONLY)
   endforeach()
 endmacro()
 
@@ -247,7 +249,7 @@ foreach(mod ${VTK_TCL_WRAPPED})
   set(module "${${mod}_TCL_NAME}")
   string(TOLOWER ${module} module_lc)
   configure_file(vtkmodule.tcl.in
-    "${VTK_TCL_HOME}/${module_lc}/${module_lc}.tcl" @ONLY IMMEDIATE)
+    "${VTK_TCL_HOME}/${module_lc}/${module_lc}.tcl" @ONLY)
   if(NOT VTK_INSTALL_NO_RUNTIME)
     install(FILES
       "${VTK_TCL_HOME}/${module_lc}/${module_lc}.tcl"
@@ -266,7 +268,7 @@ ELSE(UNIX)
 ENDIF(UNIX)
 configure_file(${VTK_SOURCE_DIR}/Wrapping/Tcl/pkgIndex.tcl.in
                ${VTK_BINARY_DIR}/Wrapping/Tcl/Install/Hide/pkgIndex.tcl
-               @ONLY IMMEDIATE)
+               @ONLY)
 IF(NOT VTK_INSTALL_NO_RUNTIME)
   INSTALL(FILES
     ${VTK_BINARY_DIR}/Wrapping/Tcl/Install/Hide/pkgIndex.tcl
diff --git a/Wrapping/Tcl/vtkTkAppInit.cxx b/Wrapping/Tcl/vtkTkAppInit.cxx
index 93b0986..f2b96f0 100644
--- a/Wrapping/Tcl/vtkTkAppInit.cxx
+++ b/Wrapping/Tcl/vtkTkAppInit.cxx
@@ -236,7 +236,7 @@ int Tcl_AppInit(Tcl_Interp *interp)
     "  }\n"
     "  return $package_res\n"
     "}\n"
-    "if {\"-A\" ni $argv} {\n"
+    "if $tcl_interactive {\n"
     "puts {Enter: \"package require vtk\" to load VTK commands}\n"
     "}\n";
   Tcl_Eval(interp, script);
diff --git a/Wrapping/Tools/README.txt b/Wrapping/Tools/README.txt
index 802b4d3..8096db1 100644
--- a/Wrapping/Tools/README.txt
+++ b/Wrapping/Tools/README.txt
@@ -25,8 +25,21 @@ a) Convert tabs to 8 spaces, e.g. :%s/\t/        /g
 b) Remove extra whitespace from the ends of lines, e.g. :%s/  *$//
 c) Remove blank lines at the beginning and end of the file
 d) Replace "int yyl;" with "yy_size_t yyl;", e.g. :%s/int yyl;/yy_size_t yyl;/
+e) Remove any instances of the "register" keyword.
 
-The final step removes a potential signed/unsigned comparison compiler
+Some known warnings with recent flex/gcc:
+
+   - Add the following code if not already present to avoid warnings about
+     isatty being used without a declaration:
+         #ifndef __cplusplus
+         extern int isatty(int);
+         #endif /* __cplusplus */
+   - Change 'int i;' to 'yy_size_t i;' in yy_scan_bytes (line ~3700).
+   - Add text after "@param line_number" (line ~3505) since doxygen
+     does not permit empty @param paragraphs (clang -Wdocumentation).
+     upstream bug: <https://sourceforge.net/p/flex/bugs/158/>
+
+Step "d" removes a potential signed/unsigned comparison compiler
 warning.  It might not be necessary in later versions of flex.
 
 
@@ -45,10 +58,7 @@ used to generate the file vtkParse.tab.c, which contains the parser.
 a) Convert tabs to 8 spaces, e.g. :%s/\t/        /g
 b) Remove extra whitespace from the ends of lines, e.g. :%s/  *$//
 c) Remove blank lines at the beginning and end of the file
-d) Remove the "goto yyerrlab1;" that appears right before yyerrlab1:
-e) Search for the second "Tokens" listing, i.e. the set of token macro
-   constants that appear after the token enumerated constants.  Remove
-   all of them.
+d) Replace all instances of "static inline" with "static".
 
 When yacc is run, it should not report any shift/reduce or reduce/reduce
 warnings.  If modifications to the rules cause these warnings to occur,
diff --git a/Wrapping/Tools/hints b/Wrapping/Tools/hints
index 7913280..539c346 100644
--- a/Wrapping/Tools/hints
+++ b/Wrapping/Tools/hints
@@ -19,8 +19,14 @@ vtkAxisActor                          GetBounds                         307  6
 vtkBoxRepresentation                  GetBounds                         307  6
 vtkCamera                             GetOrientation                    307  3
 vtkCamera                             GetOrientationWXYZ                307  4
+vtkCarbonRenderWindow                 GetPosition                       304  2
+vtkCarbonRenderWindow                 GetScreenSize                     304  2
+vtkCarbonRenderWindow                 GetSize                           304  2
 vtkCell                               GetBounds                         307  6
 vtkCharArray                          GetValueRange                     303  2
+vtkCocoaRenderWindow                  GetPosition                       304  2
+vtkCocoaRenderWindow                  GetScreenSize                     304  2
+vtkCocoaRenderWindow                  GetSize                           304  2
 vtkColorTransferFunction              GetColor                          307  3
 vtkCompositePolyDataMapper2           GetBounds                         307  6
 vtkCompositePolyDataMapper            GetBounds                         307  6
diff --git a/Wrapping/Tools/lex.yy.c b/Wrapping/Tools/lex.yy.c
index 5266275..2ff339a 100644
--- a/Wrapping/Tools/lex.yy.c
+++ b/Wrapping/Tools/lex.yy.c
@@ -9,7 +9,7 @@
 #define FLEX_SCANNER
 #define YY_FLEX_MAJOR_VERSION 2
 #define YY_FLEX_MINOR_VERSION 5
-#define YY_FLEX_SUBMINOR_VERSION 35
+#define YY_FLEX_SUBMINOR_VERSION 37
 #if YY_FLEX_SUBMINOR_VERSION > 0
 #define FLEX_BETA
 #endif
@@ -24,6 +24,10 @@
 
 /* end standard C headers. */
 
+#ifndef __cplusplus
+extern int isatty(int);
+#endif /* __cplusplus */
+
 /* flex integer type definitions */
 
 #ifndef FLEXINT_H
@@ -54,7 +58,6 @@ typedef int flex_int32_t;
 typedef unsigned char flex_uint8_t;
 typedef unsigned short int flex_uint16_t;
 typedef unsigned int flex_uint32_t;
-#endif /* ! C99 */
 
 /* Limits of integral types. */
 #ifndef INT8_MIN
@@ -85,6 +88,8 @@ typedef unsigned int flex_uint32_t;
 #define UINT32_MAX             (4294967295U)
 #endif
 
+#endif /* ! C99 */
+
 #endif /* ! FLEXINT_H */
 
 #ifdef __cplusplus
@@ -373,8 +378,8 @@ static void yy_fatal_error (yyconst char msg[]  );
         *yy_cp = '\0'; \
         (yy_c_buf_p) = yy_cp;
 
-#define YY_NUM_RULES 174
-#define YY_END_OF_BUFFER 175
+#define YY_NUM_RULES 199
+#define YY_END_OF_BUFFER 200
 /* This struct is not used in this scanner,
    but its presence is necessary. */
 struct yy_trans_info
@@ -382,134 +387,155 @@ struct yy_trans_info
         flex_int32_t yy_verify;
         flex_int32_t yy_nxt;
         };
-static yyconst flex_int16_t yy_accept[1150] =
+static yyconst flex_int16_t yy_accept[1339] =
     {   0,
-        0,    0,  175,  173,  143,  144,  144,  172,  173,  172,
-      172,  173,  172,  172,  172,  172,  172,  172,  172,  141,
-      140,  172,  172,  172,  172,  134,  134,  134,  134,  134,
-      134,  170,  173,  171,  172,  134,  134,  134,  134,  134,
-      134,  134,  134,  134,  134,  134,  134,  134,  134,  134,
-      134,  134,  134,  134,  172,  143,   11,   11,   14,  172,
-      173,  143,  165,    0,   15,    0,  158,  162,  159,    0,
-        0,    0,  116,  117,    0,    0,    0,    0,    0,  156,
-      152,  154,  153,  155,  150,  148,    0,  135,    1,   13,
-      157,  136,  139,    0,    0,  140,  140,  140,  169,  147,
-
-      166,  164,  167,  151,  134,  134,  134,  134,  134,  134,
-      134,  134,  142,    0,  161,  134,  134,  134,  134,  134,
-      134,  134,  134,  134,  134,  134,  134,  134,  134,  134,
-      134,  134,  134,  134,  134,  134,  134,  134,   70,  134,
-      134,  134,  134,  134,  134,  134,  134,  134,  134,  134,
-      134,  134,  134,  134,  134,  134,  160,  163,  143,   11,
-       14,    0,    0,   14,   14,   12,    0,   15,   16,   16,
-        0,    0,    0,    0,    0,    0,  149,  168,  135,  135,
-       13,  136,  136,  139,  139,    0,  137,  138,  145,  146,
-      134,  134,  134,  134,    0,  134,  134,  134,  134,  134,
-
-      134,  134,  134,  134,   68,  134,  134,  134,  134,  134,
-      134,  134,  134,  134,  134,  134,  134,  134,  134,  134,
-      134,   25,  134,  134,  134,  134,   58,   72,  134,  134,
-      134,  134,  134,  134,  134,  134,  134,  134,  134,  134,
-      134,  134,  134,  134,  134,  134,  134,  134,  134,  134,
-      134,  134,  134,   74,   12,   14,   12,   12,   12,   12,
-        0,    0,    0,    0,    0,    0,    0,    0,    0,    0,
-        0,  135,    0,  136,  137,  138,  134,  134,  134,  134,
-        0,  134,  134,  134,  134,  134,  134,  134,  134,  134,
-      134,  134,   66,  134,  134,   29,   24,  134,  134,  134,
-
-      134,  134,  134,   42,  134,  134,  134,  134,  134,  134,
-       23,  134,  134,  134,  134,  134,  134,  134,  134,  134,
-      134,  134,  134,  134,  134,  134,  134,    0,  134,  134,
-      134,  134,  134,  134,  134,  134,  134,   28,  134,  134,
-      134,  134,  134,  134,  134,  134,  134,  134,  134,  134,
-      134,  134,   12,   12,   12,   12,   12,   12,   12,   12,
-        0,    0,    0,    0,    0,    0,    0,    0,    0,    0,
-      134,  134,  134,  134,  129,  134,  134,  134,  134,  134,
-      134,  134,  134,  134,  134,  134,  134,  134,  134,   77,
-       37,   78,   45,  134,  134,  134,  134,  134,   20,  134,
-
-      134,  134,  134,  134,  134,  134,   71,  134,  134,  134,
-      134,  134,  134,   22,  134,  134,  134,  134,  134,    0,
-      134,  134,  134,   61,  134,  134,   43,  134,   57,  134,
-      134,  134,  134,  134,  134,  134,  134,  134,  134,  134,
-      134,  134,  134,  134,  134,   12,   12,   12,   12,   12,
-       12,   12,    2,    3,    0,    0,    0,    0,    0,    0,
-        0,    0,    0,    0,  134,  134,  134,  134,  129,  134,
-      134,  134,  122,  134,  134,  134,  134,  134,  134,  134,
-      134,   69,   76,  134,   59,   19,  134,  134,   52,   49,
-       50,  134,  134,  134,   73,  134,  134,  134,  134,   39,
-
-      134,  134,  134,   27,   30,  134,   51,    0,    0,    0,
-      134,   38,  134,  134,  134,  134,  134,  134,  134,  134,
-      134,  134,  134,  134,  134,  134,  134,  134,  134,  134,
-      134,   75,   12,   12,   12,    2,    3,   12,   12,   12,
-        2,    3,    0,    0,    0,    0,    0,    0,    0,    0,
-        0,    0,  134,  134,  134,  134,  134,  134,  134,  121,
-      134,  132,  134,  134,  134,   21,  134,  134,  134,  134,
-      134,  134,   47,  134,  134,  134,   40,  134,    0,  134,
-      134,  134,   31,  134,    0,    0,    0,  125,  125,  134,
-       55,  134,  134,   44,  134,  134,  134,  134,  134,  134,
-
-      134,  134,  134,  134,  134,  134,  134,  134,  134,  134,
-      134,  134,  134,  134,    0,   12,   12,   12,    2,    3,
-       12,   12,   12,    0,    0,    0,    0,    0,  119,  120,
-        0,    0,    0,    0,    0,  122,   17,  122,   32,  134,
-      134,  134,  134,  134,  134,  134,  134,  134,  134,  134,
-       60,  124,  124,  134,   48,  123,  123,    0,  134,    0,
-       67,  134,    0,   36,  134,    0,    0,    0,   53,   54,
-       26,   46,  134,  134,  134,  134,  134,  134,  134,  134,
-      134,  134,  134,  134,  134,  134,  134,  134,  134,  134,
-      134,  134,  134,  134,  134,   12,   12,   12,    5,   12,
-
-       12,    0,    0,    0,    0,    0,  118,    0,    0,    0,
-        0,  134,  134,  134,  134,  134,  134,  134,  134,  134,
-       56,    0,   41,    0,  134,  134,    0,    0,    0,  134,
-      134,  134,  134,  134,  134,  134,   79,  134,  134,  134,
-      134,  134,  134,  134,  134,  134,  134,  134,  134,  134,
-      134,  134,    5,   12,   12,    5,   12,   12,    0,    0,
-        0,    0,  134,  133,  134,  134,  131,   64,  134,    0,
-        0,    0,  134,  134,    0,    0,    0,  134,  134,  134,
-      134,  134,  134,  134,   18,  134,  134,  134,  134,  134,
-      134,  134,  134,  134,  134,  134,  134,  134,  134,    5,
-
-       12,   12,   12,   12,    0,    4,  134,  134,  134,  134,
-        0,    0,    0,  134,   62,    0,    0,    0,  134,  134,
-      134,   81,  134,  134,  134,  134,   80,  134,  134,  134,
-      134,  134,  134,  134,  134,  106,  134,  134,  134,  134,
-      134,  134,   12,   12,   12,   12,    0,  134,  134,  134,
-       63,    0,    0,    0,  134,    0,    0,  134,  128,  134,
-      134,  134,  134,  134,  134,  134,  134,  126,  134,  108,
-      110,  112,  102,  134,  134,  134,  134,  107,  134,  134,
-      134,   12,   12,   12,   12,    0,  134,  134,  130,    0,
-        0,    0,   33,  134,  134,  134,  134,  134,  134,  134,
-
-      134,  134,  134,  134,  134,  134,  134,  134,  134,  134,
-      134,  134,  134,  134,  109,  111,  113,  134,  134,  134,
-       12,   12,   12,   12,   12,   12,   12,    0,  134,  134,
-        0,   34,    0,  134,  134,  134,  134,  134,  134,  134,
-      134,  134,  134,  134,  134,  134,  134,  134,  134,  134,
-      134,  114,  115,  134,  134,  134,  134,   12,   12,   12,
-       12,   12,   12,   12,   12,   12,   12,    0,  134,  104,
-        0,  134,   87,  134,  134,  134,  134,  134,  134,  134,
-      134,  134,  134,  134,  134,  134,  134,  134,  134,  134,
-      134,  134,  134,   12,   12,   12,   12,   12,   12,   12,
-
-       12,   12,    6,  134,    0,   35,   65,  134,  134,  134,
-      134,  134,  134,  134,  134,  134,   84,  134,  134,  134,
-      134,  134,  134,  134,  134,  127,  134,  134,   12,   12,
-       12,   12,    6,   12,   12,   12,   12,    6,  134,  134,
-      134,   86,   83,  134,  134,  134,  134,   97,   85,   82,
-      134,  134,  134,  134,   96,  134,  134,  134,   12,   12,
-       12,   12,    6,   12,   12,   12,   12,  105,  134,  134,
-       92,   93,   94,   95,   88,   89,   90,   91,  134,  134,
-      134,   12,   12,   12,   12,   12,   12,   12,   12,  134,
-      134,  134,  134,  134,   12,   12,   12,   12,   10,   12,
-
-       12,   12,  100,  134,  103,  134,  134,   10,   12,   12,
-       12,   10,   12,    8,    9,  134,  134,  134,   10,   12,
-        8,    9,   12,    8,    9,  134,  134,  134,   12,    8,
-        9,   12,  134,  134,   99,   12,    7,  134,  134,    7,
-        7,  134,  134,    7,  134,   98,  134,  101,    0
+        0,    0,  200,  198,  162,  163,  163,  197,  198,  197,
+      197,  198,  197,  197,  197,  197,  197,  197,  197,  160,
+      159,  197,  197,  197,  197,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,  195,  198,  196,  197,  153,  153,
+      153,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,  153,  153,  153,  153,  197,  162,
+       11,   11,   14,  197,  198,  162,  190,    0,    0,   15,
+       17,    0,  169,  183,  166,  187,  184,    0,    0,    0,
+      128,  129,    0,    0,    0,    0,    0,  181,  177,  179,
+      178,  180,  176,  174,    0,  154,    1,   13,  182,  155,
+
+      158,    0,    0,  159,  159,  159,  194,  168,  165,  167,
+      173,  191,  189,  192,  170,  153,  153,  153,  153,    0,
+        0,  153,  153,  153,    0,  153,  153,    0,  164,  161,
+        0,  186,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,   82,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  185,  188,  162,   11,   14,    0,    0,
+       14,   14,   12,    0,   15,    0,    0,   17,    0,   15,
+
+       16,   16,    0,    0,    0,    0,    0,    0,  175,  193,
+      154,  154,   13,  155,  155,  158,  158,    0,  156,  157,
+      159,  156,  171,  172,  153,  153,  153,  153,    0,  153,
+      153,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,   80,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,   27,  153,  153,  153,  153,   66,
+      153,   84,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+       70,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+
+       86,   12,   14,   12,   12,   12,   12,    0,    0,    0,
+       15,   15,    0,    0,    0,    0,    0,    0,    0,    0,
+        0,    0,    0,  154,    0,  154,  155,    0,  155,  158,
+      156,  156,  157,  157,  156,  153,  153,  153,  153,    0,
+      153,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,  153,  153,  153,   20,  153,  153,
+       31,  153,   26,  153,  153,  153,  153,  153,  153,  153,
+      153,   48,  153,  153,  153,  153,  153,  153,   25,  153,
+      153,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,  153,  153,  153,    0,  153,  153,
+
+      153,  153,  153,  153,  153,  153,  153,  153,   30,  153,
+      153,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,   12,   12,   12,   12,   12,   12,   12,
+       12,    0,    0,   15,    0,   15,   15,   16,   16,    0,
+        0,    0,    0,    0,    0,    0,    0,  154,  154,  155,
+      155,  156,  156,  157,  157,  157,  157,  156,  153,  153,
+      153,  153,  140,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,   89,   71,  153,  153,   43,   90,   51,
+      153,  153,  153,  153,  153,  153,  153,   22,  153,  153,
+
+      153,  153,  153,  153,  153,  153,  153,   83,  153,  153,
+      153,  153,  153,  153,   24,  153,  153,  153,  153,  153,
+        0,  153,  153,  153,  153,   69,  153,  153,   49,  153,
+       65,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,  153,  153,  153,   12,   12,   12,
+       12,   12,   12,   12,    2,    3,    0,    0,   15,   15,
+       15,   16,   16,    0,    0,    0,    0,    0,    0,    0,
+        0,  156,  153,  153,  153,  153,  140,  153,  153,  153,
+      134,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,  153,   81,   88,  153,  153,  153,
+
+      153,  153,  153,   67,   21,  153,  153,   60,   55,   56,
+      153,  153,  153,  153,   85,  153,  153,  153,  153,  153,
+       45,  153,  153,  153,   29,  153,  153,   58,    0,    0,
+        0,    0,  153,   44,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,   87,   12,   12,   12,    2,    3,
+       12,   12,   12,    2,    3,    0,    0,   15,    0,    0,
+        0,    0,    0,    0,    0,    0,  153,  153,  153,  153,
+      153,  153,  153,  133,  153,  146,  153,  153,  153,  153,
+      150,  153,  153,  153,   23,  153,  153,  142,  153,  153,
+
+      153,  153,  153,   74,  153,  153,  153,   53,  153,  153,
+      152,  153,  153,   46,  153,    0,  153,  153,  153,   35,
+       35,  153,  153,    0,    0,    0,    0,    0,    0,  137,
+      137,  153,  153,   63,  153,  153,   50,  153,  153,  153,
+      153,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,  153,  153,    0,   34,   12,   12,
+       12,    2,    3,   12,   12,   12,    0,    0,    0,    0,
+        0,  131,  132,    0,    0,    0,    0,    0,  134,   18,
+      134,   38,  153,  153,  153,  143,  144,  153,  153,  153,
+      153,  153,  153,  153,  153,   32,   33,  153,  153,   73,
+
+      153,   68,  136,  136,  153,   72,  153,   54,  135,  135,
+        0,  153,    0,   79,  153,    0,   42,   36,   36,  153,
+      153,    0,    0,    0,    0,    0,    0,   61,  153,   62,
+       28,   52,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,    0,   12,   12,   12,    5,   12,
+       12,    0,    0,    0,    0,    0,  130,    0,    0,    0,
+        0,  153,  153,  153,  147,  153,  153,  153,  153,  153,
+      153,   57,  153,   64,  153,    0,   47,    0,  153,  153,
+      153,    0,    0,    0,    0,    0,    0,  153,  153,  153,
+
+      153,  153,  153,  153,   91,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+        0,    5,   12,   12,    5,   12,   12,    0,    0,    0,
+        0,  153,  151,  153,  153,  153,  149,   77,  153,   37,
+       37,    0,    0,    0,  153,  153,  153,    0,    0,    0,
+        0,    0,    0,  153,  153,  153,  153,  153,  153,  153,
+       19,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,  153,    5,   12,   12,   12,   12,
+        0,    4,  153,  153,  153,  153,  153,    0,    0,    0,
+      153,  153,   75,    0,    0,    0,    0,    0,    0,  153,
+
+      153,  153,   93,  153,  153,  153,  153,   92,  153,  153,
+      153,  153,  153,  153,  153,  153,  118,  153,  153,  153,
+      153,  153,  153,   12,   12,   12,   12,    0,  153,  153,
+      153,  153,   76,    0,    0,    0,  153,  153,    0,    0,
+        0,    0,   59,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  138,  153,  120,  122,  124,  114,  153,  153,
+      153,  153,  119,  153,  153,  153,   12,   12,   12,   12,
+        0,  153,  153,  145,  148,    0,    0,    0,   39,  153,
+      141,    0,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+
+      153,  153,  121,  123,  125,  153,  153,  153,   12,   12,
+       12,   12,   12,   12,   12,    0,  153,  153,    0,   40,
+        0,  153,    0,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+      126,  127,  153,  153,  153,  153,   12,   12,   12,   12,
+       12,   12,   12,   12,   12,   12,    0,  153,  116,    0,
+      153,   99,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,  153,  153,  153,  153,  153,  153,  153,  153,
+      153,  153,   12,   12,   12,   12,   12,   12,   12,   12,
+       12,    6,  153,    0,   41,   78,  153,  153,  153,  153,
+
+      153,  153,  153,  153,  153,   96,  153,  153,  153,  153,
+      153,  153,  153,  153,  139,  153,  153,   12,   12,   12,
+       12,    6,   12,   12,   12,   12,    6,  153,  153,  153,
+       98,   95,  153,  153,  153,  153,  109,   97,   94,  153,
+      153,  153,  153,  108,  153,  153,  153,   12,   12,   12,
+       12,    6,   12,   12,   12,   12,  117,  153,  153,  104,
+      105,  106,  107,  100,  101,  102,  103,  153,  153,  153,
+       12,   12,   12,   12,   12,   12,   12,   12,  153,  153,
+      153,  153,  153,   12,   12,   12,   12,   10,   12,   12,
+       12,  112,  153,  115,  153,  153,   10,   12,   12,   12,
+
+       10,   12,    8,    9,  153,  153,  153,   10,   12,    8,
+        9,   12,    8,    9,  153,  153,  153,   12,    8,    9,
+       12,  153,  153,  111,   12,    7,  153,  153,    7,    7,
+      153,  153,    7,  153,  110,  153,  113,    0
     } ;
 
 static yyconst flex_int32_t yy_ec[256] =
@@ -518,622 +544,826 @@ static yyconst flex_int32_t yy_ec[256] =
         4,    4,    5,    1,    1,    1,    1,    1,    1,    1,
         1,    1,    1,    1,    1,    1,    1,    1,    1,    1,
         1,    6,    7,    8,    9,    1,   10,   11,   12,   13,
-       14,   15,   16,   14,   17,   18,   19,   20,   21,   22,
-       23,   24,   25,   26,   25,   27,   25,   28,   14,   29,
-       30,   31,    1,   32,   33,   34,   35,   36,   37,   38,
-       39,   40,   41,   42,   43,   44,   45,   46,   47,   48,
-       49,   50,   51,   52,   53,   54,   55,   56,   57,   40,
-       58,   59,   60,   61,   62,    1,   63,   64,   65,   66,
-
-       67,   68,   69,   70,   71,   72,   73,   74,   75,   76,
-       77,   78,   79,   80,   81,   82,   83,   84,   85,   86,
-       87,   88,   14,   89,   14,   14,    1,    1,    1,    1,
-        1,    1,    1,    1,    1,    1,    1,    1,    1,    1,
-        1,    1,    1,    1,    1,    1,    1,    1,    1,    1,
-        1,    1,    1,    1,    1,    1,    1,    1,    1,    1,
-        1,    1,    1,    1,    1,    1,    1,    1,    1,    1,
-        1,    1,    1,    1,    1,    1,    1,    1,    1,    1,
-        1,    1,    1,    1,    1,    1,    1,    1,    1,    1,
-        1,    1,    1,    1,    1,    1,    1,    1,    1,    1,
-
-        1,    1,    1,    1,    1,    1,    1,    1,    1,    1,
-        1,    1,    1,    1,    1,    1,    1,    1,    1,    1,
-        1,    1,    1,    1,    1,    1,    1,    1,    1,    1,
-        1,    1,    1,    1,    1,    1,    1,    1,    1,    1,
-        1,    1,    1,    1,    1,    1,    1,    1,    1,    1,
-        1,    1,    1,    1,    1
+       14,   15,   16,   17,   18,   19,   20,   21,   22,   23,
+       24,   25,   26,   27,   26,   28,   26,   29,   17,   30,
+       31,   32,    1,   33,   34,   35,   36,   37,   38,   39,
+       40,   41,   42,   43,   44,   45,   46,   47,   48,   49,
+       50,   51,   52,   53,   54,   55,   56,   57,   58,   41,
+       59,   60,   61,   62,   63,    1,   64,   65,   66,   67,
+
+       68,   69,   70,   71,   72,   73,   74,   75,   76,   77,
+       78,   79,   80,   81,   82,   83,   84,   85,   86,   87,
+       88,   89,   17,   90,   17,   17,    1,   91,   91,   91,
+       91,   91,   91,   91,   91,   91,   91,   91,   91,   91,
+       91,   91,   91,   91,   91,   91,   91,   91,   91,   91,
+       91,   91,   91,   91,   91,   91,   91,   91,   91,   91,
+       91,   91,   91,   91,   91,   91,   91,   91,   91,   91,
+       91,   91,   91,   91,   91,   91,   91,   91,   91,   91,
+       91,   91,   91,   91,   91,   91,   91,   91,   91,   91,
+       91,   91,   91,   91,   91,   91,   91,   91,   91,   91,
+
+       91,   91,   91,   91,   91,   91,   91,   91,   91,   91,
+       91,   91,   91,   91,   91,   91,   91,   91,   91,   91,
+       91,   91,   91,   91,   91,   91,   91,   91,   91,   91,
+       91,   91,   91,   91,   91,   91,   91,   91,   91,   91,
+       91,   91,   91,   91,   91,   91,   91,   91,   91,   91,
+       91,   91,   91,   91,   91
     } ;
 
-static yyconst flex_int32_t yy_meta[90] =
+static yyconst flex_int32_t yy_meta[92] =
     {   0,
-        1,    2,    3,    1,    2,    2,    1,    1,    1,    1,
-        2,    4,    1,    1,    2,    1,    1,    5,    1,    6,
-        6,    6,    6,    6,    6,    6,    6,    7,    1,    1,
-        1,    1,    8,    8,    8,    8,    8,    8,    9,    9,
-        9,    9,    9,    9,    9,    9,    9,    9,    9,    9,
-        9,    9,    9,    9,    9,    9,    9,    1,    1,    1,
-        1,    9,    8,    8,    8,    8,    8,    8,    9,    9,
-        9,    9,    9,    9,    9,    9,    9,    9,    9,    9,
-        9,    9,    9,    9,    9,    9,    9,    9,    1
+        1,    2,    3,    4,    5,    2,    1,    1,    1,    1,
+        6,    7,    1,    4,    6,    1,    1,    1,    8,    1,
+        9,    9,    9,    9,    9,    9,    9,    9,   10,    1,
+        1,    1,    1,   11,   11,   11,   11,   11,   11,   12,
+       12,   12,   12,   12,   12,   12,   12,   12,   12,   12,
+       12,   12,   12,   12,   12,   12,   12,   12,    1,    1,
+        1,    1,   12,   11,   11,   11,   11,   11,   11,   12,
+       12,   12,   12,   12,   12,   12,   12,   12,   12,   12,
+       12,   12,   12,   12,   12,   12,   12,   12,   12,    1,
+       13
+
     } ;
 
-static yyconst flex_int16_t yy_base[1187] =
+static yyconst flex_int16_t yy_base[1417] =
     {   0,
-        0,   88, 2673, 2674,   90, 2674, 2674, 2642,   87, 2641,
-       87, 2611,   97, 2674, 2639,   85,   88,  113,   91,  140,
-      158, 2640,   84, 2637,   92,   63, 2604,   83,   42,   89,
-       85, 2674,  124, 2674, 2635, 2602,   92,  126,  124,  125,
-      128,  131,   93,   80,   87,  146,  137,  144,   81,  158,
-      168,  160,  171,   94,  142,  241, 2674, 2674, 2604,  237,
-     2591,  243, 2674,  192, 2674,  202, 2674, 2674, 2674,  132,
-      204,  263, 2674, 2674,  125, 2633,  204,  193,  226, 2674,
-     2674, 2674, 2674, 2674, 2645, 2674, 2641,  260, 2674,    0,
-     2674,  279,  308,  327,    0,  337,  358,  370, 2674, 2628,
-
-     2674, 2674, 2674, 2627, 2594,  217,  227,  213,  274,  231,
-      228,  230, 2674, 2652, 2674,  340,  229,  232,  255,  200,
-      245,  247,  294,  250,  257,  336,  258,  260,  337,  338,
-      345,  348,  339,  351,  354,  304,  355,  305, 2592,  356,
-      363,  208,  370,  358,  362,  374,  380,  382,  385,  261,
-      387,  390,  386,  393,  389,  391, 2674, 2674,  467, 2674,
-     2594, 2633, 2580, 2591,  423,  450,  399,  446, 2674,  447,
-     2621, 2620,  449,  435,  450,  452, 2674, 2674,  487,  502,
-        0,  515,  527,  537,  549,  557,  565,  573, 2674, 2674,
-      454,  309,  439,  459, 2619, 2584,  469,  433,  423,  306,
-
-      441,  470,  539,  430, 2583,  468,  540,  494,  542,  543,
-      550,  551,  552,  251,  553,  419,  556,  558,  564,  567,
-      571, 2582,  569,  581,  582,  584, 2581, 2580,  573,  610,
-      586,  590,  593,  594,  592,  595,  596,  597,  598,  599,
-      600,  636,  607,  605,  608,  617,  618,  626,  624,  619,
-      627,  629,  668, 2579,  690,  691,    0,  681, 2588, 2587,
-     2560, 2555,  723,  675,  687,  678,  680,  682,  685,  688,
-      739,  748,  756,  764,  772,  780,  686,  685,  774,  693,
-        0,  775,  689,  674,  677,  748,  678,  758,  749,  691,
-      761,  787, 2574,  759,  760, 2573, 2572,  771,  764,  779,
-
-      780,  789,  791, 2571,  763,  788,  793,  794,  795,  797,
-     2570,  765,  796,  823,  679,  803,  805,  811,  814,  807,
-      817,  818,  821,  824, 2569,  822,  826, 2602,  830,  831,
-      833,  832,  839,  837,  840,  842,  846, 2567,  848,  850,
-      856,  852,  854,  858,  860,  861,  863,  864,  870,  869,
-      871,  897,    0,  918, 2576, 2575,  893, 2559, 2569, 2568,
-     2549, 2555,  914,  917,  907,  122,  919,  920,  921,  924,
-      436,  909,  924,  921,    0,  904,  926,  912,  927,  905,
-      907,  908,  903,  914,  949,  916,  917,  918,  928, 2559,
-     2558, 2557, 2556,  938,  940,  920,  941,  942, 2555,  946,
-
-      947,  930,  948,  951,  953,  954, 2554,  957,  955,  959,
-      961,  963,  966, 2553,  965,  968,  994, 2552,  973,  969,
-      979,  980,  981, 2551,  985,  991, 2550,  987, 2549,  995,
-      989,  993,  997,  999, 1002, 1013, 1004, 1005, 1006, 1007,
-     1009, 1011, 1012, 1027, 1019, 1037, 2543, 2553, 2552, 2574,
-     2569, 2524,    0,    0, 2537, 2523, 1054, 1062, 1063, 1066,
-     1068, 1069, 1071, 1072, 1051, 1052, 1068, 1070,    0, 1055,
-     1072, 1075, 1061, 1048, 1050, 1053, 1058, 1056, 1098, 1063,
-     1073, 2540, 2539, 1090, 2538, 2537, 1078, 1079, 2536, 2535,
-     2534, 1067, 1082, 1083, 2533, 1085, 1086, 1092, 1094, 1161,
-
-     1103, 1095, 1096, 2532, 2531, 1131, 2530, 2510, 2509, 2507,
-     1223, 2526, 1107, 1110, 1109, 1112, 1111, 1118, 1120, 1124,
-     1119, 1150, 1121, 1137, 1224, 1140, 1136, 1144, 1126, 1143,
-      690, 2525, 2553, 2548, 2503,    0,    0, 2538, 2547, 2516,
-        0,    0, 2505, 2511, 1172, 1179, 1259, 1182, 1229, 1193,
-     1232, 1235, 1158, 1214, 1227, 1215, 1231, 1239, 1232, 2516,
-     1154, 2515, 1217, 1226, 1234, 2514, 1228, 1236, 1243, 2513,
-     1241, 1323, 2512, 1246, 1294, 1412, 1311, 1248, 1316, 1295,
-     1298, 1383, 2511, 1325, 2490, 2489, 2490, 2674, 2507, 1296,
-     2506, 1297, 1299, 2505, 1304, 1305, 1336, 1315, 1316, 1306,
-
-     1318, 1329, 1330, 1384, 1331, 1339, 1342, 1385, 1332, 1386,
-     1413, 1387, 1391, 1341, 2538, 2520, 2529, 2498,    0,    0,
-     2525, 2509, 2480, 2492, 2495, 1422, 1431, 1475, 2674, 2529,
-     1480, 1381, 1424, 1382, 1429, 1414, 2494, 1436, 2493, 2492,
-     1426, 2491, 1427, 1429, 1432, 1431, 1435, 1438, 1434, 1465,
-     2490, 2674, 2489, 1444, 2488, 2674, 2487, 1511, 1456, 2474,
-     2485, 1445, 1518, 2674, 1466, 2466, 2465, 2473, 2481, 2480,
-     2479, 2478, 1469, 1457, 1471, 1473, 1464, 1472, 1477, 1478,
-     1481, 1487, 1479, 1485, 1489, 1493, 1492, 1494, 1497, 1498,
-     1500, 1522, 1504, 1502, 1536, 2502, 2486, 2457,    0, 2495,
-
-     2464, 2458, 2468, 1572, 1580, 1588, 2674, 1537, 1548, 1552,
-     1553, 1551, 1539, 1552, 1535, 1542, 1544, 1547, 1537, 1553,
-     2470, 2457, 1627, 2453, 1543, 1550, 2462, 2461, 2451, 1561,
-     1562, 1572, 1563, 1573, 1574, 1576, 2464, 1582, 1579, 1584,
-     1585, 1588, 1587, 1589, 1591, 1593, 1600, 1594, 1595, 1599,
-     1601, 1605,    0, 2484, 2453,    0, 2476, 2444, 2439, 2453,
-     1642, 1644, 1618, 2457, 1621, 1612, 2456, 2455, 1614, 2439,
-     1682, 2433, 1624, 1628, 2451, 2450, 2443, 1646, 1630, 1632,
-     1634, 1635, 1637, 1640, 2449, 1641, 1643, 1639, 1645, 1647,
-     1650, 1652, 1702, 1660, 1665, 1668, 1258, 1669, 1670,    0,
-
-     2463, 2431, 2462, 2425, 2443, 2674, 1700, 1686, 2443, 1673,
-     2422, 2429, 2421, 2439, 2438, 2424, 2423,    0, 1679, 1681,
-     1687, 2435, 1675, 1683, 1691, 1699, 2434, 1684, 1696, 1692,
-     1698, 1694, 1733, 1747, 1746, 2433, 1711, 1712, 1754, 1716,
-     1744, 1720, 2448, 2411, 2486, 2420, 2408, 1735, 1748, 1749,
-     2427, 2407, 2410, 1799, 1750,    0,    0, 1727, 2424, 1732,
-     1745, 1751, 1798, 1755, 1769, 1778, 1806, 2423, 1810, 2422,
-     2421, 2420, 2419, 1763, 1773, 1804, 1779, 2418, 1775, 1776,
-     1780, 2473, 2407, 1812, 2400, 2405, 2413, 1794, 1802, 1856,
-     2392, 1868, 2674, 1787, 1795, 1803, 1814, 1816, 1807, 1835,
-
-     1837, 1838, 1823, 1825, 1826, 1829, 1849, 1850, 1853, 1856,
-     1840, 1845, 1880, 1843, 2411, 2410, 2409, 1783, 1844, 1846,
-     1874, 2393, 2406, 2401, 2400, 2399, 2389, 2387, 1877, 1870,
-     1921, 2674, 2382, 1857, 1860, 1866, 1869, 1878, 1867, 1873,
-     1882, 1884, 1886, 1879, 1888, 1892, 1898, 1900, 1902, 1904,
-     1906, 2400, 2399, 1894, 1896, 1907, 1912, 2397, 2392, 2391,
-     2390, 2380, 2371, 2373, 2386, 2385, 2423, 2374, 1915, 2387,
-     1973, 1911, 2386, 1935, 1919, 1920, 1921, 1927, 1929, 1933,
-     1922, 1928, 1934, 1941, 1942, 1944, 1946, 1948, 1950, 1970,
-     1722, 1954, 1955, 2363, 2365, 2378, 2377, 2415, 2375, 2376,
-
-     2434, 2433,    0, 1984, 2020, 2674, 2376, 1965, 1962, 1972,
-     1973, 1971, 1974, 1975, 1976, 1980, 2375, 1981, 1982, 1983,
-     1985, 1990, 1991, 1998, 1977, 2374, 2002, 1979, 2368, 2369,
-     2427, 2420,    0, 2354, 2328, 2366, 2327,    0, 2018, 2004,
-     2005, 2319, 2310, 2006, 2010, 2014, 2015, 2301, 2292, 2283,
-     2016, 2017, 2019, 2020, 2253, 2023, 2022, 2028, 2243, 2217,
-     2255, 2216,    0, 2188, 2194, 2181, 2176, 2184, 2024, 2037,
-     2176, 2172, 2168, 2163, 2155, 2150, 2138, 2133, 2038, 2039,
-     2058, 2109, 2115, 2111, 2110, 2102, 2097, 2080, 2078, 2044,
-     2043, 2045, 2047, 2048, 2077, 2079, 2075, 2074,    0, 2072,
-
-     2076, 2075, 2086, 2052, 2083, 2050, 2051,    0, 1992, 1954,
-     1427,    0, 1389,    0,    0, 2053, 2074, 2063,    0, 1202,
-        0,    0, 1142,    0,    0, 2064, 2065, 2069, 1051,    0,
-        0,  908, 2087, 2072,  561,  347,    0, 2076, 2071,    0,
-        0, 2079, 2073,    0, 2080,  157, 2085,   47, 2674, 2162,
-     2171, 2179, 2184, 2188, 2197, 2201, 2205, 2214, 2218, 2223,
-     2227, 2235, 2239, 2243, 2252, 2254, 2258, 2267, 2276, 2285,
-     2294, 2303, 2312, 2321, 2328, 2335, 2342, 2351, 2360, 2369,
-     2378, 2387, 2396, 2405, 2414, 2423
+        0,   90, 4032, 4033,   92, 4033, 4033, 4000,   98,   76,
+       86, 3970,  113, 4033, 3998,   98,   95,  115,  130,  156,
+      174,   93,  122, 3997,   89,   97, 3964,   99,  123,   81,
+      140,  155,  102,  124,  186,  151, 4033, 3995,  170,  144,
+      142,  163,  162,  145,  172,  108,   94,  152,  188,  176,
+      181,  105,  196,  206,  252,  198,  183,  153,  139,  280,
+     4033, 4033, 3965,  273, 3952,  269, 4033,  293,  151,  235,
+      265,  306, 4033, 4033, 4033, 4033, 4033,  242,  251,  321,
+     4033, 4033,  288, 3994,  287,  263,  255, 4033, 4033, 4033,
+     4033, 4033, 4007, 4033, 4002,  318, 4033,    0, 4033,  337,
+
+      366,  385,    0,  395,    0,  414, 4033, 4033, 4033, 4033,
+     3989, 4033, 4033, 4033, 3988, 3955,  286,  284,  285,  323,
+     3957,  361,  332,  304, 4003,  291,  305,  394, 4033, 4033,
+     4012, 4033,  368,  308,  366,  310,  407,  383,  311,  319,
+      364,  386,  365,  227,  287,  389,  393,  391,  395,  394,
+      398,  404,  397,  417,  420,  421,  422,  423,  424,  425,
+      431,  428, 3951,  432,  439,  444,  449,  438,  309,  441,
+      459,  451,  455,  362,  462,  512,  461,  465,  466,  467,
+      352,  475,  468, 4033, 4033,  548, 4033, 3953, 3992, 3939,
+     3950,  529,  529,  468, 3946,  540, 3995, 4033,  600,  574,
+
+     3944,  543, 3977, 3976,  523,  527,  532,  569, 4033, 4033,
+        0,  647,    0,    0,  660,    0,  673,  569,  618,  681,
+        0,  700, 4033, 4033,  520,   46,  521,  174, 3975, 3940,
+      536,  497,  496,  508,  523,  573,  584,  666,  585,  586,
+      603,  594,  593,  667, 3939,  673,  675,  539,  592,  629,
+      630,  677,  650,  668,  671,  689,  679,  691,  695,  697,
+      696,  698,  700,  702, 3938,  703,  706,  714,  707, 3937,
+      704, 3936,  710,  716,  742,  718,  719,  720,  723,  724,
+      725,  726,  729,  727,  730,  731,  771,  745,  746,  748,
+     3935,  750,  749,  752,  758,  757,  765,  759,  801,  767,
+
+     3934,  827,  798,    0,  823, 3943, 3942, 3915, 3910,    0,
+      829,  892,  982,    0, 1073,  834,  809,  821,  817,  815,
+      650,  823,  842,    0,  851,  905,    0,  913,  921,    0,
+      934,    0, 1144,    0,    0,  821,  818,  824,  851,    0,
+     1139,  901,  800,  822,  823,  852,  900,  833,  902, 1110,
+      903,  798, 1113,  904,  909, 1112, 1152, 3929, 1122, 1123,
+     3928, 1129, 1179,  806, 1130, 1124, 1131, 1134, 1143, 1153,
+     1154, 3927, 1159, 1156, 1158, 1161, 1162,  910, 3926,  854,
+     1164,  911, 1185, 1166,  912, 1169, 1172,  916, 1184, 1171,
+     1173, 1181, 1187, 1193, 3925, 1191, 1188, 3958, 1195, 1199,
+
+     1200, 1203,  832, 1206, 1205, 1208, 1211, 1213, 3923, 1216,
+     1214, 1217, 1221, 1223, 1222, 1225, 1224, 1228, 1231, 1232,
+     1233, 1238, 1260,    0, 1283, 3932, 3931, 1259, 3915, 3925,
+     3924, 3905, 3911,    0, 1328, 1419, 1509,    0, 1599, 1278,
+     1280, 1392, 1393, 1394, 1404, 1396, 1411, 1432,    0, 1443,
+        0,    0,    0, 1669,    0,    0,    0,    0, 1135, 1399,
+     1409, 1435,    0, 1255, 1436, 1263, 1434, 1414, 1421, 1419,
+     1422, 1258, 1425, 1261, 1379, 1426, 1424, 1467, 1429, 1468,
+     1439, 1469, 1441, 3915, 3914, 1509, 1510, 3913, 3912, 1480,
+     1471, 1472, 1483, 1489, 1481, 1492, 1497, 3911, 1498, 1499,
+
+     1500, 1491, 1512, 1513, 1514, 1515, 1557, 3910, 1516, 1558,
+     1519, 1520, 1559, 1521, 3909, 1560, 1525, 1586, 3908, 1524,
+     1561, 1564, 1562, 1532, 1530, 3907, 1583, 1573, 3906, 1585,
+     3905, 1589, 1584, 1587, 1591, 1581, 1604, 1592, 1600, 1605,
+     1603, 1608, 1610, 1618, 1621, 3904, 1622, 1627, 3898, 3908,
+     3907, 3929, 3924, 3879,    0,    0, 3892, 3878,    0, 1738,
+     1828,    0, 1918, 1647, 1680, 1669, 1657, 1658, 1648, 1693,
+     1681,    0, 1650, 1664, 1724, 1725,    0, 1663, 1722, 1730,
+     1669, 1656, 1662, 1649, 1702, 1709, 1703, 1654, 1710, 1474,
+     1714, 1753, 1718, 1726, 1720, 3895, 3894, 3893, 3892, 1741,
+
+     1721, 1727, 1728, 3891, 3890, 1746, 1731, 3889, 3888, 3887,
+     1738, 1742, 1732, 1786, 3886, 1787, 1788, 1750, 1752, 1789,
+     1816, 1799, 1790, 1791, 3885, 2008, 1817, 3884, 3864, 3861,
+     3862, 1792, 2099, 3880, 1793, 3879, 1427, 1806, 1815, 1818,
+     1821, 1822, 1814, 1853, 1807, 1840, 1906,  510, 1823, 1761,
+     1854, 1835, 1878, 1876, 3878, 3906, 3901, 3856,    0,    0,
+     3891, 3900, 3869,    0,    0, 3858, 3864, 2190, 1914, 1873,
+     2043, 1788, 1882, 1915, 1924, 1916, 1897, 1903, 1913, 1909,
+     1926, 1929, 1921, 3869, 1904, 3868, 1849, 1910, 3867, 1381,
+     3866, 1908, 1911, 1930, 3865, 1912, 1932, 3864, 2012, 2013,
+
+     1934, 1922, 1936, 3863, 3862, 1979, 2280, 3861, 1937, 1980,
+     3860, 1984, 2371, 2075, 1939, 2051, 1992, 1981, 2148, 4033,
+     3859, 2462, 2021, 3838, 3845, 3836, 3829, 3845, 3835, 4033,
+     3852, 1998, 2099, 3851, 2011, 2019, 3850, 2020, 2026, 2071,
+     1942, 1760, 2072, 2073, 2077, 2081, 1989, 2027, 1841, 2082,
+     2079, 2083, 2125, 2094, 2105, 2030, 3883, 3848, 3864, 3873,
+     3842,    0,    0, 3869, 3853, 3824, 3836, 3839, 2120, 2141,
+     2210, 4033, 3873, 2224, 2153, 2119, 2157, 2195, 2124, 3838,
+     2168, 3837, 3836, 2178, 3835, 3834, 3833, 2170, 2180, 2172,
+     2106, 2112, 2171, 2173, 2179, 3832, 3831, 2175, 2177, 3830,
+
+     2199, 3829, 4033, 3828, 2198, 3827, 2280, 3826, 4033, 3825,
+     2265, 2076, 3812, 3823, 2182, 2317, 4033, 4033, 3822, 2197,
+     2209, 3803, 3808, 3801, 3813, 3791, 3807, 3815, 2251, 3814,
+     3813, 3812, 2211, 2181, 2258, 2252, 2253, 2264, 2263, 2265,
+     2213, 2269, 2275, 2273, 2286, 2274, 2281, 2282, 2287, 2290,
+     2363, 2294, 2343, 2371, 2336, 3836, 3820, 3791,    0, 3829,
+     3798, 3792, 3802, 2407, 2496, 2414, 4033, 2336, 2340, 2421,
+     2422, 2385, 2304, 2386, 3804, 2362, 2364, 2372, 2378, 2379,
+     2376, 3803, 2380, 3802, 2553, 3789, 2524, 3785, 2389, 2437,
+     2446, 3794, 3782, 3792, 3796, 3790, 3780, 2392, 2383, 2440,
+
+     2450, 2442, 2206, 2449, 3793, 2297, 2441, 2452, 2471, 2444,
+     2455, 2472, 2473, 2457, 2480, 2525, 2526, 2474, 2527, 2528,
+     3772,    0, 3812, 3781,    0, 3804, 3772, 3767, 3781, 2518,
+     2519, 2536, 3785, 2538, 2534, 2539, 3784, 3783, 2535, 4033,
+     3782, 3766, 2616, 3760, 2533, 2541, 2542, 3778, 3758, 3776,
+     3756, 3775, 3767, 2486, 2382, 2557, 2548, 2564, 2560, 2568,
+     3773, 2566, 2569, 2570, 2565, 2571, 2575, 2576, 2626, 2578,
+     2588, 2572, 2604, 2594, 2595,    0, 3787, 3755, 3786, 3749,
+     3767, 4033, 2627, 2613, 2600, 3767, 2602, 3746, 3753, 3745,
+     3763, 2605, 3762, 3748, 3742, 3746,    0, 3738,    0, 2606,
+
+     2607, 2619, 3757, 2611, 2617, 2612, 2632, 3756, 2616, 2621,
+     2620, 2633, 2625, 2665, 2674, 2675, 3755, 2639, 2641, 2683,
+     2643, 2478, 2646, 3770, 3733, 3809, 3742, 3720, 2666, 2676,
+     2649, 2678, 3727, 3695, 3686, 2716, 2687, 2650,    0, 3688,
+        0,    0, 3675, 2668, 2667, 2686, 2689, 2733, 2679, 2698,
+     2701, 2742, 3662, 2724, 3649, 3636, 3623, 3619, 2691, 2709,
+     2717, 2734, 3618, 2705, 2699, 2707, 3663, 3596, 2739, 3578,
+     3583, 3580, 2720, 3579, 2735, 2779, 3547, 2787, 4033, 2731,
+     3566, 3534, 2736, 2739, 2737, 2740, 2760, 2761, 2763, 2764,
+     2748, 2756, 2750, 2765, 2769, 2774, 2779, 2784, 2770, 2808,
+
+     2810, 2773, 3553, 3532, 3519, 2776, 2775, 2777, 2807, 3491,
+     3492, 3464, 3433, 3421, 3399, 3392, 2817, 2794, 2858, 4033,
+     3383, 2783,    0, 2789, 2782, 2803, 2809, 2813, 2815, 2818,
+     2820, 2822, 2793, 2827, 2828, 2832, 2834, 2836, 2838, 2840,
+     3397, 3392, 2841, 2842, 2805, 2844, 3386, 3377, 3359, 3335,
+     3325, 3316, 3314, 3316, 3303, 3333, 3280, 2854, 3289, 2906,
+     2847, 3284, 2869, 2850, 2853, 2859, 2860, 2861, 2870, 2865,
+     2851, 2874, 2875, 2876, 2877, 2878, 2882, 2884, 2903, 2887,
+     2795, 2888, 3257, 3255, 3264, 3259, 3293, 3253, 3254, 3313,
+     3299,    0, 2917, 2956, 4033, 3241, 2900, 2904, 2891, 2897,
+
+     2905, 2907, 2908, 2910, 2914, 3240, 2915, 2916, 2918, 2919,
+     2920, 2921, 2927, 2932, 3225, 2934, 2924, 3214, 3211, 3266,
+     3261,    0, 3198, 3176, 3212, 3181,    0, 2953, 2945, 2940,
+     3181, 3169, 2941, 2946, 2949, 2950, 3163, 3160, 3155, 2951,
+     2952, 2954, 2955, 3149, 2957, 2958, 2963, 3136, 3113, 3144,
+     3113,    0, 3093, 3079, 3070, 3065, 3071, 2959, 2971, 3070,
+     3069, 3068, 3065, 3056, 3035, 3033, 3031, 2973, 2974, 2993,
+     3010, 3019, 3015, 3014, 3006, 3008, 3003, 3002, 2979, 2978,
+     2981, 2982, 2983, 2995, 2931, 2894, 2892,    0, 2787, 2700,
+     2642, 2443, 2986, 2305, 2985, 2989,    0, 2283, 2239, 1890,
+
+        0, 1841,    0,    0, 2988, 3004, 2998,    0, 1658,    0,
+        0, 1548,    0,    0, 2999, 3000, 3005, 1467,    0,    0,
+     1240, 3023, 3007, 1170,  634,    0, 3008, 3011,    0,    0,
+     3012, 3017,    0, 3018,  434, 3019,  275, 4033, 3099, 3112,
+     3124, 3130, 3135, 3148, 3161, 3174, 3179, 3184, 3197, 3202,
+     3208, 3213, 3216, 3229, 3242, 3247, 3252, 3257, 3262, 3267,
+     3272, 3285, 3288, 3301, 3304, 3312, 3317, 3322, 3327, 3332,
+     3337, 3342, 3347, 3352, 3355, 3368, 3381, 3386, 3399, 3412,
+     3417, 3430, 3435, 3440, 3445, 3450, 3455, 3460, 3473, 3486,
+     3491, 3504, 3517, 3522, 3535, 3540, 3553, 3566, 3579, 3592,
+
+     3605, 3618, 3631, 3644, 3657, 3670, 3683, 3696, 3709, 3722,
+     3735, 3748, 3761, 3774, 3787, 3800
     } ;
 
-static yyconst flex_int16_t yy_def[1187] =
+static yyconst flex_int16_t yy_def[1417] =
     {   0,
-     1149,    1, 1149, 1149, 1149, 1149, 1149, 1149, 1150, 1149,
-     1149, 1151, 1152, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-     1153, 1149, 1149, 1149, 1149, 1154, 1154, 1154, 1154, 1154,
-     1154, 1149, 1149, 1149, 1149, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1149, 1149, 1149, 1149, 1155, 1149,
-     1149, 1149, 1149, 1150, 1149, 1150, 1149, 1149, 1149, 1151,
-     1151, 1152, 1149, 1149, 1156, 1156, 1156, 1156, 1156, 1149,
-     1149, 1149, 1149, 1149, 1149, 1149, 1149, 1157, 1149, 1158,
-     1149, 1159, 1160, 1149, 1161, 1153, 1153, 1153, 1149, 1149,
-
-     1149, 1149, 1149, 1149, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1149, 1149, 1149, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1149, 1149, 1149, 1149,
-     1155, 1149, 1149, 1155, 1155, 1162, 1149, 1150, 1149, 1151,
-     1156, 1149, 1156, 1156, 1156, 1156, 1149, 1149, 1157, 1157,
-     1158, 1159, 1159, 1160, 1160, 1149, 1163, 1164, 1149, 1149,
-     1154, 1154, 1154, 1154, 1149, 1154, 1154, 1154, 1154, 1154,
-
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1165, 1155, 1162, 1162, 1162, 1162,
-     1149, 1149, 1152, 1156, 1156, 1156, 1156, 1156, 1156, 1156,
-     1149, 1157, 1149, 1159, 1163, 1164, 1154, 1154, 1154, 1154,
-     1166, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1149, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1165, 1165, 1165, 1165, 1162, 1162, 1162, 1162,
-     1149, 1149, 1156, 1156, 1156, 1156, 1156, 1156, 1156, 1156,
-     1154, 1154, 1154, 1154, 1167, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1149,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1165, 1165, 1165, 1165, 1162,
-     1162, 1162, 1168, 1169, 1149, 1149, 1156, 1156, 1156, 1156,
-     1156, 1156, 1156, 1156, 1154, 1154, 1154, 1154, 1167, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1149, 1149, 1149,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1165, 1165, 1165, 1170, 1171, 1162, 1162, 1162,
-     1168, 1169, 1149, 1149, 1156, 1156, 1156, 1156, 1156, 1156,
-     1156, 1156, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154,  511, 1154, 1154, 1154,  511, 1154, 1154, 1149, 1154,
-     1154, 1154, 1154, 1154, 1149, 1149, 1149, 1149, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1149, 1165, 1165, 1165, 1170, 1171,
-     1162, 1162, 1162, 1149, 1149, 1156, 1156, 1149, 1149, 1156,
-     1156, 1156, 1156, 1156, 1156, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1149, 1154, 1154, 1154, 1149, 1154, 1149, 1154, 1149,
-     1154, 1154, 1149, 1149, 1154, 1149, 1149, 1149, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1165, 1165, 1165, 1172, 1162,
-
-     1162, 1149, 1149, 1156, 1156, 1149, 1149, 1156, 1156, 1156,
-     1156, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1149, 1154, 1149, 1154, 1154, 1149, 1149, 1149, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1173, 1165, 1165, 1172, 1162, 1162, 1149, 1149,
-     1156, 1156, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1149,
-     1149, 1149, 1154, 1154, 1149, 1149, 1149, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1173,
-
-     1165, 1165, 1162, 1162, 1149, 1149, 1154, 1154, 1154, 1154,
-     1149, 1149, 1149, 1154, 1154, 1149, 1149, 1174, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1165, 1165, 1162, 1162, 1149, 1154, 1154, 1154,
-     1154, 1149, 1149, 1149, 1154, 1175, 1176, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1165, 1165, 1162, 1162, 1149, 1154, 1154, 1154, 1149,
-     1149, 1149, 1149, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1165, 1165, 1162, 1162, 1162, 1162, 1162, 1149, 1154, 1154,
-     1149, 1149, 1149, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1165, 1165, 1165,
-     1165, 1165, 1162, 1162, 1162, 1162, 1162, 1149, 1154, 1154,
-     1149, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1165, 1165, 1165, 1165, 1165, 1162, 1162,
-
-     1162, 1162, 1177, 1154, 1149, 1149, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1165, 1165,
-     1165, 1165, 1178, 1162, 1162, 1162, 1162, 1177, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1165, 1165,
-     1165, 1165, 1178, 1162, 1162, 1162, 1162, 1154, 1154, 1154,
-     1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154, 1154,
-     1154, 1165, 1165, 1165, 1165, 1162, 1162, 1162, 1162, 1154,
-     1154, 1154, 1154, 1154, 1165, 1165, 1165, 1165, 1179, 1162,
-
-     1162, 1162, 1154, 1154, 1154, 1154, 1154, 1180, 1165, 1165,
-     1165, 1179, 1162, 1181, 1182, 1154, 1154, 1154, 1180, 1165,
-     1183, 1184, 1162, 1181, 1182, 1154, 1154, 1154, 1165, 1183,
-     1184, 1162, 1154, 1154, 1154, 1165, 1185, 1154, 1154, 1186,
-     1185, 1154, 1154, 1186, 1154, 1154, 1154, 1154,    0, 1149,
-     1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149, 1149, 1149, 1149
+     1338,    1, 1338, 1338, 1338, 1338, 1338, 1338, 1339, 1338,
+     1338, 1340, 1341, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1342, 1338, 1338, 1338, 1338, 1343, 1343, 1343, 1343, 1343,
+     1343, 1343, 1343, 1343, 1338, 1338, 1338, 1338, 1343, 1343,
+     1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343,
+     1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1338, 1338,
+     1338, 1338, 1344, 1338, 1338, 1338, 1338, 1339, 1345, 1346,
+     1345, 1339, 1338, 1338, 1338, 1338, 1338, 1340, 1340, 1341,
+     1338, 1338, 1347, 1347, 1347, 1347, 1347, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1348, 1338, 1349, 1338, 1350,
+
+     1351, 1338, 1352, 1342, 1353, 1353, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1343, 1343, 1343, 1343, 1345,
+     1340, 1343, 1343, 1343, 1346, 1343, 1343, 1338, 1338, 1338,
+     1338, 1338, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343,
+     1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343,
+     1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343,
+     1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343,
+     1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343,
+     1343, 1343, 1343, 1338, 1338, 1338, 1338, 1344, 1338, 1338,
+     1344, 1344, 1354, 1338, 1338, 1345, 1346, 1338, 1355, 1339,
+
+     1338, 1340, 1347, 1338, 1347, 1347, 1347, 1347, 1338, 1338,
+     1356, 1356, 1349, 1357, 1357, 1358, 1358, 1338, 1359, 1360,
+     1353, 1361, 1338, 1338, 1343, 1343, 1343, 1343, 1338, 1343,
+     1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343,
+     1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343,
+     1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343,
+     1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343,
+     1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343,
+     1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343,
+     1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343, 1343,
+
+     1343, 1362, 1344, 1354, 1354, 1354, 1354, 1338, 1338, 1363,
+     1345, 1364, 1338, 1365, 1338, 1366, 1347, 1347, 1347, 1347,
+     1347, 1347, 1347, 1356, 1338, 1367, 1357, 1338, 1368, 1358,
+     1369, 1370, 1371, 1372, 1373, 1374, 1374, 1374, 1374, 1375,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1338, 1374, 1374,
+
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1376, 1376, 1376, 1376, 1377, 1377, 1377,
+     1377, 1338, 1338, 1378, 1338, 1379, 1380, 1381, 1382, 1347,
+     1347, 1347, 1347, 1347, 1347, 1347, 1347, 1383, 1384, 1385,
+     1386, 1387, 1370, 1371,  454, 1372, 1372, 1373, 1374, 1374,
+     1374, 1374, 1388, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1338, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1376, 1376, 1376,
+     1376, 1377, 1377, 1377, 1389, 1390, 1338, 1338, 1391, 1392,
+     1393, 1394, 1395, 1396, 1396, 1396, 1396, 1396, 1396, 1396,
+     1396, 1387, 1374, 1374, 1374, 1374, 1388, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1338, 1338,
+     1338, 1338,  626, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1376, 1376, 1376, 1397, 1398,
+     1377, 1377, 1377, 1389, 1390, 1338, 1338, 1392, 1396, 1396,
+     1396, 1396, 1396, 1396, 1396, 1396, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+
+     1374, 1374, 1374, 1374, 1374, 1374,  626, 1374, 1374, 1374,
+     1374, 1374,  626, 1374, 1374, 1338, 1374, 1374, 1374, 1338,
+     1374,  626, 1374, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1338, 1374, 1376, 1376,
+     1376, 1397, 1398, 1377, 1377, 1377, 1338, 1338, 1396, 1396,
+     1338, 1338, 1396, 1396, 1396, 1396, 1396, 1396, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+
+     1374, 1374, 1338, 1374, 1374, 1374, 1374, 1374, 1338, 1374,
+     1338, 1374, 1338, 1374, 1374, 1338, 1338, 1338, 1374, 1374,
+     1374, 1338, 1338, 1338, 1338, 1338, 1338, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1338, 1376, 1376, 1376, 1399, 1377,
+     1377, 1338, 1338, 1396, 1396, 1338, 1338, 1396, 1396, 1396,
+     1396, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374,  626, 1338, 1374, 1338, 1374, 1374,
+     1374, 1338, 1338, 1338, 1338, 1338, 1338, 1374, 1374, 1374,
+
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1338, 1400, 1376, 1376, 1399, 1377, 1377, 1338, 1338, 1396,
+     1396, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1338,
+     1374, 1338, 1338, 1338, 1374, 1374, 1374, 1338, 1338, 1338,
+     1338, 1338, 1338, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1400, 1376, 1376, 1377, 1377,
+     1338, 1338, 1374, 1374, 1374, 1374, 1374, 1338, 1338, 1338,
+     1374, 1374, 1374, 1338, 1338, 1338, 1401, 1338, 1402, 1374,
+
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1376, 1376, 1377, 1377, 1338, 1374, 1374,
+     1374, 1374, 1374, 1338, 1338, 1338, 1374, 1374, 1403, 1338,
+     1404, 1405, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1376, 1376, 1377, 1377,
+     1338, 1374, 1374, 1374, 1374, 1338, 1338, 1338, 1338, 1374,
+     1374, 1338, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1376, 1376,
+     1377, 1377, 1377, 1377, 1377, 1338, 1374, 1374, 1338, 1338,
+     1338, 1374, 1406, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1376, 1376, 1376, 1376,
+     1376, 1377, 1377, 1377, 1377, 1377, 1338, 1374, 1374, 1338,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1376, 1376, 1376, 1376, 1376, 1377, 1377, 1377,
+     1377, 1407, 1374, 1338, 1338, 1374, 1374, 1374, 1374, 1374,
+
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1376, 1376, 1376,
+     1376, 1408, 1377, 1377, 1377, 1377, 1407, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1376, 1376, 1376,
+     1376, 1408, 1377, 1377, 1377, 1377, 1374, 1374, 1374, 1374,
+     1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374, 1374,
+     1376, 1376, 1376, 1376, 1377, 1377, 1377, 1377, 1374, 1374,
+     1374, 1374, 1374, 1376, 1376, 1376, 1376, 1409, 1377, 1377,
+     1377, 1374, 1374, 1374, 1374, 1374, 1410, 1376, 1376, 1376,
+
+     1409, 1377, 1411, 1412, 1374, 1374, 1374, 1410, 1376, 1413,
+     1414, 1377, 1411, 1412, 1374, 1374, 1374, 1376, 1413, 1414,
+     1377, 1374, 1374, 1374, 1376, 1415, 1374, 1374, 1416, 1415,
+     1374, 1374, 1416, 1374, 1374, 1374, 1374,    0, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338
     } ;
 
-static yyconst flex_int16_t yy_nxt[2764] =
+static yyconst flex_int16_t yy_nxt[4125] =
     {   0,
         4,    5,    6,    7,    7,    5,    8,    9,    4,   10,
-       11,   12,   13,   14,   15,   16,   17,   18,   19,   20,
-       21,   21,   21,   21,   21,   21,   21,   22,   23,   24,
-       25,    4,   26,   27,   28,   27,   27,   27,   27,   27,
-       27,   27,   27,   27,   27,   27,   27,   27,   29,   27,
-       27,   27,   27,   30,   31,   27,   27,   32,   33,   34,
-       35,   36,   37,   38,   39,   40,   41,   42,   27,   27,
-       43,   27,   27,   44,   45,   46,   47,   48,   27,   49,
-       50,   51,   52,   53,   27,   54,   27,   27,   55,   56,
-       57,   62,   58,   56,   65,   62,   59,   68,   72,   72,
-
-       81,   72,   72,  109,   83,   89,   60,   73,  107,   90,
-      106,   74,  100,  101,   82,  108,   69,   84,   85,   61,
-       91,  103,  104,  110,  107,  112,  113,   86,  114,   75,
-       87,   77,   88,   88,   88,   88,   88,   88,   88,   88,
-      111,  107,  107,  169,  107,   66,  107,  143,  107,  172,
-      107,   78,  172,  107,  107,  107,  133,   92,   79,   93,
-       93,   93,   93,   93,   93,   93,   93,  117,  131,  134,
-      156,  157,  173,  132,  118,   92,   94,   96,   96,   96,
-       96,   96,   96,   96,   96,  107,  107,  107,  460,  107,
-       71,  124,  107,  121,   98,   95,  119,  122,  107,   65,
-
-      123,  125,  120,  127,  129,  107,   94,  107,  135,  168,
-      130,  126,  136,  128,  138,  170,  139,  140,  107,  107,
-      172,  107,  137,  141,   98,   95,  142,  144,  145,  107,
-      158,  172,  107,  175,  148,  151,  174,  149,  146,  147,
-      152,  153,  159,  160,   62,  160,  159,  154,   62,  161,
-       66,   89,  155,  172,  150,  166,  193,  191,  195,  162,
-       66,  107,   71,  192,   72,   72,   91,   72,   72,  107,
-      196,  234,  163,   73,  107,  197,  208,   74,  107,   88,
-       88,   88,   88,   88,   88,   88,   88,  176,  107,  107,
-      107,  107,  107,  107,  205,   75,  180,   77,   88,   88,
-
-       88,   88,   88,   88,   88,   88,  107,  209,  107,  210,
-      192,  107,  107,  206,  302,  183,  107,   78,  107,  107,
-      194,  107,  107,  213,   79,   92,  180,   93,   93,   93,
-       93,   93,   93,   93,   93,  107,  207,  217,  246,  214,
-      216,  218,  186,  186,  185,  183,  187,  187,  187,  187,
-      187,  187,  187,  187,   92,  107,   96,   96,   96,   96,
-       96,   96,   96,   96,  278,  107,  107,  107,  211,  212,
-      107,  229,  287,   98,  185, 1149,  192, 1149, 1149, 1149,
-     1149, 1149, 1149, 1149, 1149,  186,  186, 1149,  227,  187,
-      187,  187,  187,  187,  187,  187,  187,  107,  107,  107,
-
-      107,  107,  198,   98,  199,  200,  107,  201,  220,  107,
-      202,  215,  107,  219,  224,  107,  107,  107,  221,  107,
-      203,  204, 1140,  107,  107,  164,  222,  256,  226,  223,
-      238,  107,  225,  232,  237,  107,  228,  231,  235,  233,
-      236,  107,  241,  107,  240,  242,  107,  107,  107,  239,
-      107,  107,  107,   65,  107,  258,  244,  247,  169,  243,
-      249,  253,  172,  251,  245,  250,  252,  248,  159,  160,
-      254,  160,  159,  261,  262,  161,  172,  172,  265,  172,
-      107,  165,  279,  259,  107,  162,  260,  465,  285,  264,
-      277,  107,  280,  304,  107,  266,  286,  107,  163,  291,
-
-      107,  283,  107,  288,   66,   71, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149, 1149,  284,  107,  267,  271,  271,  268,
-      107,  272,  272,  272,  272,  272,  272,  272,  272,  107,
-      107,  107,  269,  270, 1149, 1149, 1149, 1149, 1149, 1149,
-     1149, 1149,  273,  273,  293,  289,  274,  274,  274,  274,
-      274,  274,  274,  274, 1149,  107, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149, 1149,  186,  186, 1149,  296,  187,  187,
-      187,  187,  187,  187,  187,  187,  187,  187,  187,  187,
-      187,  187,  187,  187,  187,  187,  187,  187,  187,  187,
-      187,  187,  188,  188,  188,  188,  188,  188,  188,  188,
-
-      107,  107,  294,  107,  107,  188,  188,  188,  188,  188,
-      188,  107,  107,  107,  107,  303,  295,  107,  301,  107,
-      290,  297,  107,  298,  306,  107,  307,  299,  107,  305,
-      107,  300,  107,  308,  107,  188,  188,  188,  188,  188,
-      188,  309,  107,  107,  312,  107,  192,  107,  310,  311,
-      313,  107,  315,  107,  107,  107,  107,  107,  107,  107,
-      107,  107,  321,  328,  325,  317,  107,  320,  107,  107,
-      322,  107,  324,  318,  319,  323,  316,  329,  107,  107,
-      107,  327,  331,  333,  332,  107,  326,  107,  107,  330,
-      107,  339,  338,  164,  334,  354,  335,  107,  357,  336,
-
-      337,  340,  172,  341,  342,  172,  343,  172,  344,  172,
-      365,  363,  172,  345,  172,  172,  358,  615,  346,  347,
-      348,  349,  350,  355, 1149, 1149,  356, 1149, 1149,  107,
-      364,  371,  372,   73,  374,  107,  379,   74,  107,  107,
-      107,  406,  383,  381,  368,  366,  107,  107,  351,  165,
-      107,  107,  107,  367,  107,  380,  386,  370,  272,  272,
-      272,  272,  272,  272,  272,  272,  369,  272,  272,  272,
-      272,  272,  272,  272,  272,  274,  274,  274,  274,  274,
-      274,  274,  274,  274,  274,  274,  274,  274,  274,  274,
-      274, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-
-     1149, 1149, 1149, 1149, 1149, 1149, 1149,  373,  376,  107,
-      107,  192, 1149, 1149, 1149, 1149, 1149, 1149,  377,  107,
-      107,  107,  107,  192,  107,  107,  107,  382,  403,  378,
-      385,  387,  107,  397,  389,  107,  107,  392,  384,  390,
-      107,  107, 1149, 1149, 1149, 1149, 1149, 1149,  107,  107,
-      107,  391,  107,  388,  107,  107,  107,  107,  107,  192,
-      393,  394,  395,  402,  107,  396,  107,  398,  107,  400,
-      401,  408,  107,  409,  399,  107,  404,  411,  107,  107,
-      410,  407,  107,  107,  107,  107,  415,  107,  418,  405,
-      416,  107,  107,  107,  107,  422,  419,  412,  107,  413,
-
-      107,  107,  414,  107,  425,  421,  423,  107,  428,  107,
-      429,  107,  427,  107,  426,  107,  424,  107,  433,  107,
-      435,  107,  107,  436,  107,  107,  432,  438,  430,  431,
-      107,  107,  107,  192,  172,  446,  437,  434,  450,  441,
-      442,  172,  439,  451,  172,  443,  172,  172,  172,  440,
-      458,  172,  444,  447,  459,  466,  467,  468,  107,  457,
-      470,  472,  471,  445,  107,  107,  107,  473,  107,  107,
-      107,  475,  476,  107,  479,  107,  477,  107,  107,  107,
-      480,  107,  107, 1137,  474,  107,  463,  107,  107,  107,
-      487,  107,  492,  483,  464,  478,  482,  481,  461,  107,
-
-      462,  107,  107,  107,  485,  488,  486,  107,  107,  107,
-      107,  490,  107,  491,  107,  107,  107,  489,  107,  497,
-      107,  493,  107,  499,  107,  500,  107,  107,  494,  107,
-      192,  495,  502,  504,  107,  496,  498,  507,  503,  508,
-      107,  107,  107,  513,  501,  509,  107,  511,  107,  510,
-      107,  514,  107,  515,  107,  107,  107,  517,  107,  518,
-      107,  512,  516,  107,  523,  107,  107,  107,  107,  519,
-      107,  526,  107,  107,  107,  505,  521,  529,  520,  528,
-      107,  172,  533,  522,  527,  524,  525,  534,  107,  172,
-      172,  530,  531,  172,  546,  172,  172,  532,  172,  172,
-
-      553,  554,  555,  547,  556,  545,  557,  559,  560,  107,
-      558,  107,  107,  107,  107,  563,  107,  107,  561,  107,
-      565,  566,  107,  562,  107,  567,  192, 1136,  107,  107,
-      548,  107,  549,  107,  107,  551,  107,  568,  564,  107,
-      107,  572,  570,  107,  107,  574,  107,  107,  573,  571,
-      550,  107,  552,  107,  569,  107,  107,  107,  577,  107,
-      576,  575,  579,  579,  107,  579,  579,  192,  107,  580,
-      107,  107,  107,  107,  581,  578,  582,  591,  593,  107,
-      107,  107,  107,  592,  594,  107,  597,  107,  590,  604,
-      610,  595,  107,  596,  599,  598,  600,  107,  107,  172,
-
-      601,  107,  611,  602,  107,  107,  172,  603,  612,  172,
-      613,  107,  583,  627,  636,  107,  614,  643, 1132,  107,
-      172,  626,  107,  588,  588,  588,  588,  588,  588,  588,
-      588,  588,  588,  588,  588,  588,  588,  588,  588,  588,
-      588,  588,  589,  589,  589,  589,  589,  589,  589,  589,
-      588,  588,  588,  588,  588,  631,  172,  633,  605,  172,
-      628,  628,  172,  628,  628,  637,  639,  640,  606,  638,
-      607,  641, 1129,  629,  608,  107,  107,  609,  107,  642,
-      588,  588,  588,  588,  107,  107,  172,  107,  107,  107,
-      644,  632,  107,  107,  634,  107,  646,  107,  648,  635,
-
-      107,  647,  107,  645,  107,  649,  630,  107,  840,  107,
-      654,  588,  658,  658,  659,  658,  658,  579,  579,  107,
-      579,  579,  651,  652,  652,  652,  652,  652,  652,  652,
-      652,  652,  652,  652,  652,  652,  652,  652,  652,  652,
-      652,  652,  653,  653,  653,  653,  653,  653,  653,  653,
-      652,  652,  652,  652,  652,  107,  107,  107,  107,  107,
-      107,  192,  669,  670,  671,  107,  107,  107,  674,  677,
-      672,  673,  107,  655,  661,  662,  107,  107,  676,  107,
-      652,  652,  652,  652,  663,  663,  107,  663,  663,  665,
-      107,  107,  107,  107,  675,  679,  660,  107,  686,  678,
-
-      107,  683,  107,  107,  682,  684,  695,  680,  172,  172,
-      664,  652,  656,  656,  656,  656,  656,  656,  656,  656,
-      656,  656,  656,  656,  656,  656,  656,  656,  656,  656,
-      656,  657,  657,  657,  657,  657,  657,  657,  657,  656,
-      656,  656,  656,  656,  107,  107,  107,  107,  107,  172,
-      688,  172,  107,  689,  708,  710,  172,  690,  172, 1123,
-      713,  560,  691,  693,  681,  692,  685,  687,  694,  656,
-      656,  656,  656,  705,  107,  107,  628,  628,  704,  628,
-      628,  706,  706,  560,  706,  706,  709,  107,  107,  629,
-      107,  711,  107,  107,  707,  107,  107,  107,  716,  107,
-
-      656,  192,  562, 1122,  717,  107,  107,  172,  562,  715,
-      721,  718,  658,  658,  719,  658,  658,  107,  107,  663,
-      663,  723,  663,  663,  725,  107,  107,  107,  726,  720,
-      107,  730,  107,  107,  107,  734,  731,  733,  107,  107,
-      107,  736,  107,  740,  737,  664,  107,  738,  107,  739,
-      107,  735,  732,  107,  107,  107,  741,  743,  107,  107,
-      747,  107,  749,  107,  172,  107,  748,  745,  742,  750,
-      752,  744,  746,  628,  628,  172,  628,  628,  751,  172,
-      172,  628,  628,  107,  628,  628,  629,  192,  765,  706,
-      706,  722,  706,  706,  629,  764,  107,  107,  107,  172,
-
-      107,  763,  707,  107,  107,  107,  767,  172,  107,  773,
-      631,  107,  107,  107,  107,  769,  766,  562,  768,  630,
-      562,  761,  107,  107,  107,  631,  762,  630,  771,  771,
-      774,  771,  771,  107,  107,  107,  778,  107,  780,  782,
-      107,  779,  781,  107,  783,  107,  107,  785,  107,  107,
-      107,  788,  107,  786,  107,  107,  107,  784,  789,  791,
-      107,  107,  107,  787,  794,  797,  107,  792,  790,  172,
-      796,  172,  807,  107,  793,  107,  808,  795,  809,  107,
-      798,  799,  107,  771,  771,  107,  771,  771,  107,  107,
-      819,  107,  820,  107,  810,  107,  107,  821,  107,  823,
-
-      107,  107,  107,  828,  107,  814,  107,  107,  107,  815,
-      822,  107,  824,  107,  832,  631,  825,  631,  826,  827,
-      829,  107,  833,  830,  834,  831,  107,  835,  836,  107,
-      107,  107,  848,  372,  107,  838,  107,  849,  860,  837,
-      107,  858,  107,  864,  107,  107,  842,  107,  107,  839,
-      841,  862,  107,  107,  851,  107,  861,  107,  870,  107,
-      107,  107,  812,  107,  866,  865,  868,  859,  871,  872,
-      863,  867,  107,  107,  875,  869,  876,  107,  880,  877,
-      878,  107,  887,  107,  888,  192,  192,  873,  107,  897,
-     1026,  895,  874,  107,  107,  898,  107,  879,  915,  881,
-
-      892,  892,  917,  892,  892,  107,  107,  107,  107,  107,
-      889,  107,  107,  905,  894,  107,  107,  904,  896,  899,
-      900,  901,  906,  902,  107,  916,  893,  907,  908,  909,
-      107,  910,  912,  914,  107,  913,  107,  107,  192,  107,
-      107,  107,  903,  930,  107,  920,  923,  924,  107,  934,
-      911,  939,  919,  955,  918,  107,  107,  931,  931,  107,
-      931,  931,  925,  107,  107,  107,  952,  107,  107,  892,
-      892,  107,  892,  892,  935,  107,  937,  107,  938,  940,
-      936,  941,  942,  932,  107,  943,  107,  107,  945,  944,
-      107,  946,  926,  947,  948,  893,  107,  949,  107,  107,
-
-      950,  107,  951,  953,  107,  107,  107,  107,  958,  959,
-      107,  107,  969,  192,  107,  970,  957,  107,  107,  954,
-      956,  107,  931,  931,  960,  931,  931,  107,  107,  977,
-      107,  107,  974,  975,  107,  978,  973,  972,  107,  107,
-      107,  107,  976,  107,  979,  107,  980,  107,  932,  107,
-      981, 1004,  983,  107,  961,  107,  984,  107,  982,  107,
-      985,  107,  986,  107,  987,  107,  988,  107,  107,  990,
-      989,  991,  107,  107, 1005, 1005,  107, 1005, 1005, 1008,
-      107,  107,  107,  107, 1009, 1012,  992,  993,  107,  107,
-      107, 1013, 1007, 1014,  107,  107,  107, 1015, 1010, 1011,
-
-     1006, 1016,  107,  107, 1017,  107, 1020,  107, 1021,  107,
-     1022,  107, 1023, 1018, 1025,  107,  107, 1028, 1039, 1027,
-     1019, 1005, 1005,  107, 1005, 1005,  107, 1040, 1041, 1024,
-     1121,  107,  107,  107,  107,  107,  107,  107,  107, 1056,
-      107,  107,  107,  107,  107,  107,  107, 1006, 1042, 1043,
-     1044,  107,  107, 1045, 1046, 1047, 1048, 1049, 1050,  107,
-     1058, 1068, 1051,  107, 1052,  107,  107,  107, 1069, 1053,
-     1054,  107, 1057, 1120, 1055,  107,  107,  107,  107,  107,
-      107,  107, 1071,  107,  107,  107, 1072, 1079, 1070,  107,
-     1073, 1074, 1075, 1076, 1081, 1077, 1078, 1080,  107,  107,
-
-      107, 1093, 1094, 1090,  107,  107,  107, 1091,  107,  107,
-     1107,  107,  107,  107,  107, 1118, 1117, 1092, 1127,  107,
-     1103, 1105, 1116, 1104,  107,  107,  107, 1134, 1106, 1126,
-      107, 1138,  107,  107,  107,  107, 1139,  107, 1142, 1133,
-      107,  107, 1128, 1145,  107, 1135,  107,  107,  107, 1146,
-     1143, 1115, 1114, 1113, 1111, 1110, 1109, 1108, 1102, 1147,
-     1101, 1148,   64,   64,   64,   64,   64,   64,   64,   64,
-       64,   70,   70,   70, 1100,   70,   70,   70,   70,   70,
-       76,   76, 1099, 1098, 1097, 1096,   76,   76,   97,   97,
-     1095,   97,   97,  105,  107,  105,  105,  164,  164,  107,
-
-      164,  164,  164,  164,  164,  164,  171,  171,  171,  171,
-      179,  107,  179,  179,  181,  181,  107,  181,  181,  181,
-      181,  181,  181,  182,  107,  182,  182,  184,  184,  107,
-      184,  184,  188,  107,  188,  257,  257,  107,  257,  257,
-      257,  257,  257,  257,  275,  107,  275,  275,  276, 1089,
-      276,  276,  353,  353, 1088,  353,  353,  353,  353,  353,
-      353,  375,  375,  469, 1087,  469,  469,  541,  541, 1086,
-      541,  541,  541,  541,  541,  541,  542,  542, 1085,  542,
-      542,  542,  542,  542,  542,  619,  619, 1084,  619,  619,
-      619,  619,  619,  619,  620,  620, 1083,  620,  620,  620,
-
-      620,  620,  620,  756,  756, 1082,  756,  756,  756,  756,
-      756,  756,  800,  800,  107,  800,  800,  800,  800,  800,
-      800,  588,  588,  588,  588,  588,  588,  588,  652,  652,
-      652,  652,  652,  652,  652,  656,  656,  656,  656,  656,
-      656,  656, 1038, 1038,  107, 1038, 1038, 1038, 1038, 1038,
-     1038, 1063, 1063,  107, 1063, 1063, 1063, 1063, 1063, 1063,
-     1112, 1112,  107, 1112, 1112, 1112, 1112, 1112, 1112, 1119,
-     1119,  107, 1119, 1119, 1119, 1119, 1119, 1119, 1124, 1124,
-      107, 1124, 1124, 1124, 1124, 1124, 1124, 1125, 1125, 1067,
-     1125, 1125, 1125, 1125, 1125, 1125, 1130, 1130, 1066, 1130,
-
-     1130, 1130, 1130, 1130, 1130, 1131, 1131, 1065, 1131, 1131,
-     1131, 1131, 1131, 1131, 1141, 1141, 1064, 1141, 1141, 1141,
-     1141, 1141, 1141, 1144, 1144, 1062, 1144, 1144, 1144, 1144,
-     1144, 1144, 1061, 1060, 1059,  107,  107,  107, 1037, 1036,
-     1035, 1034, 1033, 1032, 1031, 1030, 1029,  107,  107,  806,
-     1003, 1002, 1001, 1000,  999,  998,  997,  996,  995,  994,
-      107,  107,  971,  968,  967,  966,  965,  964,  963,  962,
-      107,  107,  107,  933,  929,  928,  927,  922,  921,  107,
-      107,  107,  107,  107,  107,  107,  891,  890,  107,  886,
-      885,  884,  883,  882,  107,  107,  107,  857,  856,  107,
-
-      855,  854,  853,  852,  850,  847,  846,  845,  844,  843,
-      107,  818,  817,  816,  813,  811,  107,  107,  107,  806,
-      805,  804,  803,  802,  801,  107,  777,  776,  775,  772,
-      770,  107,  760,  759,  758,  757,  755,  754,  753,  107,
-      107,  107,  107,  729,  728,  727,  107,  724,  107,  107,
-      107,  107,  714,  712,  107,  107,  172,  703,  702,  701,
-      700,  699,  698,  697,  696,  420,  107,  107,  107,  668,
-      667,  666,  107,  107,  650,  107,  107,  107,  625,  624,
-      623,  622,  621,  618,  617,  616,  107,  107,  587,  586,
-      585,  584,  107,  107,  107,  107,  107,  107,  107,  107,
-
-      107,  107,  544,  543,  540,  539,  538,  537,  536,  535,
-      107,  107,  107,  506,  107,  107,  107,  484,  107,  107,
-      107,  456,  455,  454,  453,  452,  449,  448,  107,  420,
-      417,  107,  107,  107,  107,  107,  362,  361,  360,  359,
-      352,  314,  107,  107,  292,  282,  281,  263,  172,  165,
-      167,  255,  165,  230,  113,  107,  190,  189,  178,  177,
-      172,  167,  165,  116,  115,  107,  102,   99,   80,   71,
-       67,   63, 1149,    3, 1149, 1149, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-
-     1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149
+       11,   12,   13,   14,   15,   16,   14,   17,   18,   19,
+       20,   21,   21,   21,   21,   21,   21,   21,   22,   23,
+       24,   25,    4,   26,   27,   28,   27,   27,   27,   27,
+       27,   27,   27,   27,   29,   27,   27,   27,   27,   30,
+       31,   27,   27,   32,   33,   34,   27,   27,   35,   36,
+       37,   38,   39,   40,   41,   42,   43,   44,   45,   27,
+       27,   46,   27,   27,   47,   48,   49,   50,   51,   27,
+       52,   53,   54,   55,   56,   57,   58,   27,   27,   59,
+       27,   60,   61,   66,   62,   60,   76,   66,   63,   69,
+
+       69,   69,  337,   69,   73,   70,   74,   75,  118,   64,
+       71,   69,   91,   89,   80,   80,   77,   80,   80,  114,
+      115,  107,   65,   81,  108,   92,   93,   82,   90,   94,
+      120,  109,  119,   95,  121,   96,   96,   96,   96,   96,
+       96,   96,   96,  123,   97,  117,   83,  125,   85,   98,
+      110,  111,  112,  130,  126,  131,  118,   72,  195,  118,
+       99,  118,  120,  124,  118,  127,  121,  118,   86,  184,
+      118,  156,  167,  122,  100,   87,  101,  101,  101,  101,
+      101,  101,  101,  101,  154,  118,  118,  128,  128,  155,
+      128,  128,  100,  102,  104,  104,  104,  104,  104,  104,
+
+      104,  104,  118,  133,  118,  122,  118,  118,  339,  134,
+      196,  106,  103,  141,  118,  118,  135,  118,  138,  142,
+      139,  150,  136,  102,  118,  118,  143,  140,  185,  147,
+      183,  151,  137,  144,  118,  157,  118,  145,  118,  148,
+      146,  106,  103,  118,  129,  118,  152,  198,  182,  149,
+      118,  158,  153,  201,  162,  159,  163,  164,  118,  120,
+      118,  165,  202,  121,  166,  160,  168,  169,  118,  179,
+       66,  161,  195,  172,   66,  180,  173,  170,  171,  176,
+      181,  186,  187,  204,  187,  186,  174,   97,  188,  118,
+      250,  204,  193,  175,   69,   69,   69,  199,   69,  189,
+
+       70,   79,  122,   99,  207,   71,   69,   69,   69,   69,
+       79,   69,  190,  200,  118,  204,  204,  208,   71,   69,
+      206,  226,   80,   80,  196,   80,   80,  225,  177,  227,
+      195,   81,  229,  178,  230,   82,  205,  118,   96,   96,
+       96,   96,   96,   96,   96,   96,  118,  118,  118,  118,
+      251,  231,   72,  118,   83,  212,   85,   96,   96,   96,
+       96,   96,   96,   96,   96,   72,  118,  118,  125,  226,
+      118,  118,  118,  118,  215,  234,   86,  245,  283,  228,
+      236,  118,  196,   87,  100,  212,  101,  101,  101,  101,
+      101,  101,  101,  101,  118,  128,  128,  284,  128,  128,
+
+      218,  246,  218,  217,  215,  219,  219,  219,  219,  219,
+      219,  219,  219,  100,  118,  104,  104,  104,  104,  104,
+      104,  104,  104,  118,  118,  299,  118,  118,  118,  218,
+      118,  218,  106,  217,  222,  222,  222,  222,  222,  222,
+      222,  222,  232,  235,  226,  118,  247,  249,  118,  291,
+      233,  118,  129,  118,  244,  118,  118,  118,  254,  118,
+      118,  255,  106,  248,  252,  253,  118,  256,  263,  118,
+      237,  258,  238,  239,  257,  240,  260,  259,  241,  118,
+      261,  262,  118,  118,  118,  118,  118,  118,  242,  243,
+      118,  264,  271,  118,  118,  274,  118,  267,  269,  265,
+
+      118,  118,  266,  118,  268,  273,  118,  272,  279,  270,
+      277,  118,  285,  118,  276,  282,  278,  118,  280,  120,
+      281,  118,  286,  118,  118,  287,  289,  118,  118,  118,
+      118,  191,  293,  303,  305,  290,  295,  118,  297,  288,
+      292,  298,  294,  308,  309,  300,  296,  311,  301,  186,
+      187,  204,  187,  186,  201,  204,  188,  336,  118,  118,
+      204,  752,  122,  306,  317,  338,  307,  189,  343,  342,
+      118,  318,  118,  344,  118,   69,   69,   69,  319,   69,
+      190,   70,  118,  118,  345,  118,   71,   69,  192,  219,
+      219,  219,  219,  219,  219,  219,  219,  204,  118,  196,
+
+      197,  118,   79,  346,  197,  315,  197,  197,  197,  197,
+      197,  197,  198,  361,  197,  197,  197,  197,  197,  197,
+      197,  197,  197,  197,  197,  197,  197,  197,  197,  197,
+      197,  197,  197,   72,  320,  118,  313,  321,  219,  219,
+      219,  219,  219,  219,  219,  219,  118,  118,  118,  352,
+      322,  323,  351,  347,  118,  118,  118,  362,  197,  197,
+      197,  197,  325,  355,  325,  118,  348,  326,  326,  326,
+      326,  326,  326,  326,  326,  328,  354,  328,  204,  353,
+      329,  329,  329,  329,  329,  329,  329,  329,  218,  197,
+      218,  118,  118,  331,  331,  331,  331,  331,  331,  331,
+
+      331,  220,  220,  220,  220,  220,  220,  220,  220,  363,
+     1329,  364,  118,  445,  333,  333,  333,  333,  333,  333,
+      222,  222,  222,  222,  222,  222,  222,  222,  118,  118,
+      118,  366,  349,  118,  368,  118,  356,  118,  359,  118,
+      350,  118,  367,  370,  333,  333,  333,  333,  333,  333,
+      358,  118,  360,  118,  371,  365,  369,  118,  118,  118,
+      118,  375,  118,  374,  118,  118,  118,  376,  118,  118,
+      372,  373,  118,  377,  381,  379,  118,  380,  118,  226,
+      118,  118,  118,  378,  384,  118,  118,  118,  118,  118,
+      382,  118,  118,  118,  395,  391,  385,  390,  387,  398,
+
+      191,  392,  389,  388,  118,  394,  393,  118,  118,  386,
+      118,  118,  118,  397,  118,  402,  399,  404,  396,  118,
+      118,  118,  410,  406,  401,  403,  405,  118,  400,  118,
+      422,  409,  425,  118,  407,  411,  195,  204,  412,  408,
+      413,  428,  414,  204,   81,  204,  440,  415,   82,  204,
+      442,  204,  416,  417,  418,  419,  420,  192,  461,  429,
+      118,  426,  118,  118,  427,  441,  460,  459,  118,  468,
+      204,  448,  448,  448,  448,  448,  448,  448,  448,  477,
+      118,  443,  421,  118,  118,  118,  118,  488,  196,  444,
+      470,  435,  197,  462,  118,  118,  197,  469,  197,  197,
+
+      197,  197,  197,  197,  198,  446,  197,  197,  197,  197,
+      197,  197,  447,  118,  118,  473,  118,  526,  502,  471,
+      197,  197,  197,  197,  197,  326,  326,  326,  326,  326,
+      326,  326,  326,  450,  450,  450,  450,  450,  450,  450,
+      450,  329,  329,  329,  329,  329,  329,  329,  329,  467,
+      197,  197,  197,  197,  331,  331,  331,  331,  331,  331,
+      331,  331,  118,  118,  118,  118,  118,  472,  476,  474,
+      479,  118,  118,  118,  118,  507,  504,  501,  118,  510,
+      480,  197,   68,   69,   69,   69,   68,   69,   68,   70,
+       68,   68,   68,   68,   71,   69,   68,   68,   68,   68,
+
+       68,   68,   68,   68,   68,   68,   68,   68,   68,   68,
+       68,   68,   68,   68,   68,  437,  437,  437,  437,  437,
+      437,  437,  437,  437,  437,  437,  437,  437,  437,  437,
+      437,  437,  437,  437,  437,  437,  437,  437,  437,  437,
+       68,   72,   68,   68,  437,  437,  437,  437,  437,  437,
+      437,  437,  437,  437,  437,  437,  437,  437,  437,  437,
+      437,  437,  437,  437,  437,  437,  437,  437,  437,  437,
+      437,   68,  437,   78,   78,   78,   78,   78,   78,   78,
+       78,   78,   78,   78,  201,   78,   78,   78,   78,   78,
+       78,   78,   78,   78,   78,   78,   78,   78,   78,   78,
+
+       78,   78,   78,   78,   78,   78,  439,  439,  439,  439,
+      439,  439,  439,  439,  439,  439,  439,  439,  439,  439,
+      439,  439,  439,  439,  439,  439,  439,  439,  439,  439,
+      439,   78,   79,   78,   78,  439,  439,  439,  439,  439,
+      439,  439,  439,  439,  439,  439,  439,  439,  439,  439,
+      439,  439,  439,  439,  439,  439,  439,  439,  439,  439,
+      439,  439,   78,  439,  454,  454,  454,  454,  454,  454,
+      454,  454,  118,  464,  118,  118,  226,  455,  455,  455,
+      455,  455,  455,  465,  118,  118,  118,  573,  481,  226,
+      475,  118,  118,  118,  466,  478,  118,  118,  483,  485,
+
+      486,  118,  487,  484,  489,  118,  490,  455,  455,  455,
+      455,  455,  455,  491,  118,  118,  118,  492,  118,  482,
+      118,  118,  226,  118,  118,  493,  118,  494,  118,  495,
+      496,  118,  118,  118,  118,  118,  497,  499,  500,  509,
+      498,  118,  512,  118,  506,  503,  118,  118,  508,  118,
+      118,  511,  505,  118,  513,  118,  516,  118,  519,  520,
+      517,  118,  118,  514,  523,  118,  525,  118,  118,  515,
+      118,  522,  527,  118,  524,  118,  118,  530,  118,  118,
+      531,  529,  528,  118,  118,  118,  118,  118,  536,  537,
+      118,  534,  539,  118,  118,  118,  532,  226,  533,  538,
+
+      118,  548,  543,  535,  542,  552,  204,  540,  204,  544,
+      553,  541,  578,  580,  565,  545, 1326,  118,  546,  549,
+      118,  586,  118,  118,  564,  118,  588,  547,   69,   69,
+       69,   69,   69,   69,   69,  195,   69,   69,   69,   69,
+       69,   69,   69,   69,   69,   69,   69,   69,   69,   69,
+       69,   69,   69,   69,   69,   69,   69,   69,   69,   69,
+       69,  560,  560,  560,  560,  560,  560,  560,  560,  560,
+      560,  560,  560,  560,  560,  560,  560,  560,  560,  560,
+      560,  560,  560,  560,  560,  560,   69,  196,   69,   69,
+      560,  560,  560,  560,  560,  560,  560,  560,  560,  560,
+
+      560,  560,  560,  560,  560,  560,  560,  560,  560,  560,
+      560,  560,  560,  560,  560,  560,  560,   69,  560,  197,
+      204,  204,  204,  197,  204,  197,  197,  197,  197,  197,
+      197,  198,  204,  197,  197,  197,  197,  197,  197,  204,
+      566,  118,  575,  118,  589,  790,  574,  197,  197,  197,
+      197,  197,  448,  448,  448,  448,  448,  448,  448,  448,
+      567,  118,  570,  450,  450,  450,  450,  450,  450,  450,
+      450,  118,  576,  579,  568,  581,  118,  197,  197,  197,
+      197,  118,  571,  118,  118,  569,  118,  118,  118,  118,
+      582,  118,  583,  592,  593,  734,  118,  118,  118,  584,
+
+      590,  118,  595,  118,  585,  587,  591,  597,  197,   68,
+       69,   69,   69,   68,   69,   68,   70,   68,   68,   68,
+       68,   71,   69,   68,   68,   68,   68,   68,   68,  118,
+      118,  118,  599,  118,  118,  598,  118,   68,   68,   68,
+       68,   68,  600,  118, 1325,  118,  603,  601,  596,  594,
+      604,  118,  606,  118,  118,  693,  605,  607,  602,  118,
+      118,  118,  118,  611,  609,  612,  610,   68,   72,   68,
+       68,  118,  118,  608,  118,  118,  118,  118,  118,  618,
+      614,  118,  118,  118,  620,  621,  118,  118,  623,  628,
+      613,  625,  118,  615,  118,  635,  636,  616,   68,   78,
+
+       78,   78,   78,   78,   78,   78,   78,   78,   78,   78,
+      201,   78,   78,   78,   78,   78,   78,   78,   78,  118,
+      118,  118,  118,  226,  118, 1321,  118,   78,   78,   78,
+       78,   78,  629,  633,  624,  118,  638,  630,  631,  617,
+      619,  622,  632,  118,  634,  118,  118,  118,  118,  118,
+      637,  118,  640,  118,  118,  641,  645,   78,   79,   78,
+       78,  639,  118,  644,  642,  118,  118,  118,  626,  643,
+      118,  648,  118,  656,  646,  204,  204,  651,  657,  650,
+      118,  649,  647,  118,  118,  204,  204,  653,   78,  454,
+      454,  454,  454,  454,  454,  454,  454,  204,  652,  669,
+
+      677,  655,  455,  455,  455,  455,  455,  455,  204,  204,
+      671,  118,  118,  670,  678,  681,  118,  684,  118,  685,
+      687,  204,  672,  673,  118,  118,  118,  686,  691, 1318,
+      674,  118,  455,  455,  455,  455,  455,  455,   69,   69,
+       69,   69,   69,   69,   69,  195,   69,   69,   69,   69,
+       69,   69,   69,   69,   69,   69,   69,   69,  675,  679,
+      680,  682,  676,  683,  118,  118,   69,   69,   69,   69,
+       69,  118,  118,  692,  690,  689,  118,  695,  226,  694,
+      118,  696,  118,  118,  118,  688,  118,  118,  118,  118,
+      118,  697,  118,  118,  118,  709,   69,  196,   69,   69,
+
+      118,  698,  706,  118,  118,  703,  701,  702,  118,  708,
+      704,  705,  118,  707,  118,  118,  204,  716,  716,  714,
+      716,  716,  118,  118,  836,  713,  754,   69,   68,   69,
+       69,   69,   68,   69,   68,   70,   68,   68,   68,   68,
+       71,   69,   68,   68,   68,   68,   68,   68,  118,  118,
+      118,  118,  118,  118,  226,  118,   68,   68,   68,   68,
+       68,  118,  774,  727,  710,  712,  717,  711,  118,  118,
+      718,  715,  719,  728,  729,  732,  118,  118,  118,  118,
+      118,  735,  736,  118,  118,  118,   68,   72,   68,   68,
+      753,  740,  737,  746,  745,  738,  739,  118,  741,  722,
+
+      742,  204,  118,  118,  743,  843,  757,  744,  770,  756,
+      204,  118, 1312,  226,  787,  118,  118,   68,   78,   78,
+       78,   78,   78,   78,   78,   78,   78,   78,   78,  201,
+       78,   78,   78,   78,   78,   78,   78,   78,  118,  755,
+      118,  747,  204,  204,  204,  775,   78,   78,   78,   78,
+       78,  748,  204,  749,  779,  780,  781,  750,  758,  118,
+      751,  782,  784,  783,  769,  118,  118, 1311,  118,  785,
+      118,  118,  118,  118,  118,  118,   78,   79,   78,   78,
+      776,  778,  791,  118,  118,  786,  794,  777,  118,  792,
+      788,  118,  118,  793,  118,  795,  118,  798,  118,  118,
+
+      799,  118,  805,  800,  118,  835,  812,   78,  720,  720,
+      720,  720,  720,  720,  720,  720,  720,  720,  720,  720,
+      720,  720,  720,  720,  720,  720,  720,  720,  721,  721,
+      721,  721,  721,  721,  721,  721,  720,  720,  720,  720,
+      720,  118,  118,  118,  771,  771,  118,  771,  771,  226,
+      226,  118,  716,  716,  118,  716,  716,  772,  226,  815,
+      118,  802,  806,  841,  808,  828,  720,  720,  720,  720,
+      118,  204,  814,  118,  118,  118,  811,  811,  830,  811,
+      811,  118,  118,  118,  820,  831,  821,  832,  118,  118,
+      842,  773,  118,  833,  796,  797,  854,  720,  721,  730,
+
+      730,  730,  730,  730,  730,  730,  730,  730,  730,  730,
+      730,  730,  730,  730,  730,  730,  730,  730,  730,  731,
+      731,  731,  731,  731,  731,  731,  731,  730,  730,  730,
+      730,  730,  813,  118,  118,  118,  226,  118,  118,  118,
+      838,  118,  887,  118,  118,  118,  845,  204,  204,  816,
+      816,  834,  816,  816,  837,  839,  118,  730,  730,  730,
+      730,  118,  840,  847,  844,  846,  848,  118,  118,  204,
+      849,  852,  684,  829,  118,  850,  817,  864,  851,  878,
+      691,  204,  869,  853,  865,  204,  118,  118,  730,  731,
+       69,   69,   69,   69,   69,   69,   69,  195,   69,   69,
+
+       69,   69,   69,   69,   69,   69,   69,   69,   69,   69,
+      118,  771,  771,  873,  771,  771,  684,  226,   69,   69,
+       69,   69,   69,  204,  772,  866,  866,  868,  866,  866,
+      118,  870,  118,  118,  118,  118,  226,  118,  867,  118,
+      118,  118,  118,  118,  118,  879,  875,  691,   69,  196,
+       69,   69,  204,  880,  876,  877,  881,  882,  871,  118,
+      118,  118,  889,  900,  883,  884,  811,  811,  118,  811,
+      811,  118,  891,  118,  899,  118,  907,  959,  890,   69,
+      803,  803,  803,  803,  803,  803,  803,  803,  803,  803,
+      803,  803,  803,  803,  803,  803,  803,  803,  803,  803,
+
+      804,  804,  804,  804,  804,  804,  804,  804,  803,  803,
+      803,  803,  803,  118,  118,  118, 1310,  226,  816,  816,
+      118,  816,  816,  901,  902,  118,  118,  118,  898,  904,
+      905,  118,  906,  903,  908,  118,  118,  118,  803,  803,
+      803,  803,  118,  118,  118,  817,  886,  909,  118,  118,
+      915,  911,  118,  910,  912,  913,  118,  916,  914,  118,
+      918,  933,  885,  961,  204, 1309,  118,  118,  204,  803,
+      804,  809,  809,  809,  809,  809,  809,  809,  809,  809,
+      809,  809,  809,  809,  809,  809,  809,  809,  809,  809,
+      809,  810,  810,  810,  810,  810,  810,  810,  810,  809,
+
+      809,  809,  809,  809,  917,  118,  920,  629,  771,  771,
+      774,  771,  771,  631,  930,  866,  866,  921,  866,  866,
+      919,  772,  226,  934,  118,  118,  118, 1001,  867,  809,
+      809,  809,  809,  118,  118,  204,  932,  937,  118,  935,
+      118,  118,  118,  939,  118,  118,  936,  118,  118,  204,
+      204,  118,  691,  691,  118,  773,  945,  954,  938,  955,
+      809,  810,  818,  818,  818,  818,  818,  818,  818,  818,
+      818,  818,  818,  818,  818,  818,  818,  818,  818,  818,
+      818,  818,  819,  819,  819,  819,  819,  819,  819,  819,
+      818,  818,  818,  818,  818,  774,  931,  771,  771,  118,
+
+      771,  771,  118,  118,  118,  118,  118,  956,  118,  958,
+      772,  118,  118, 1065,  118,  965,  962,  118,  946,  118,
+      818,  818,  818,  818,  204,  943,  943,  947,  943,  943,
+      957,  960,  963,  118,  118,  118,  118,  966,  964,  969,
+      118,  973,  118,  967,  773,  970,  204,  204,  118, 1000,
+      968,  818,  819,  940,  940,  940,  940,  940,  940,  940,
+      940,  940,  940,  940,  940,  940,  940,  940,  940,  940,
+      940,  940,  940,  941,  941,  941,  941,  941,  941,  941,
+      941,  940,  940,  940,  940,  940,  118,  118,  118,  118,
+      118,  983,  774,  774,  984,  118,  118,  118,  118,  985,
+
+      118,  118,  972,  118,  118,  975,  986,  974,  992,  971,
+      118,  940,  940,  940,  940,  991,  987,  943,  943,  118,
+      943,  943,  118, 1002,  993, 1003,  118,  118,  118, 1004,
+      118,  118,  118,  118,  118, 1009, 1005,  118,  118, 1013,
+      118, 1010,  940,  941, 1007, 1006, 1008, 1014, 1011, 1015,
+      118, 1012, 1016, 1017, 1020, 1021,  118,  118, 1018, 1019,
+     1029,  460,  118, 1031,  118, 1030,  118,  118,  118,  118,
+     1044, 1045, 1023,  118,  118,  118, 1022, 1049,  118,  118,
+     1043,  118,  118,  118, 1033, 1038, 1047,  118,  118,  118,
+     1051, 1055, 1048, 1046,  118,  118, 1056,  989, 1050, 1057,
+
+     1052,  118, 1053,  118, 1060,  118, 1061, 1054,  118, 1062,
+     1063,  118,  118, 1073, 1072,  226, 1058, 1078, 1078, 1304,
+     1078, 1078, 1059, 1074,  226, 1064, 1066,  118,  118,  118,
+      118, 1085, 1081, 1083, 1086, 1103,  118,  118,  118, 1104,
+     1075,  118, 1092, 1093, 1079,  118, 1094, 1100,  118,  118,
+     1101,  118, 1080,  118, 1084, 1087, 1088, 1089, 1105, 1090,
+      118,  118, 1102,  118, 1095, 1096, 1097,  118, 1098,  118,
+     1118,  118,  226, 1108, 1111, 1112, 1107, 1303, 1091,  118,
+     1119, 1119,  118, 1119, 1119, 1106,  118, 1099, 1078, 1078,
+     1113, 1078, 1078,  118, 1122,  118,  118,  118,  118,  118,
+
+     1126,  118,  118, 1127,  118, 1128, 1129, 1120, 1130, 1131,
+      118, 1132,  118, 1134, 1136, 1079, 1124, 1125,  118, 1137,
+     1114, 1133,  118,  118, 1138,  118,  118,  118, 1135, 1139,
+     1141,  118,  118, 1140, 1142,  118,  118,  118,  118,  118,
+     1159,  118, 1147, 1148,  118,  118,  118, 1144, 1146, 1163,
+     1143,  118, 1145, 1158,  226,  118,  118,  118, 1149, 1119,
+     1119, 1216, 1119, 1119, 1161,  118, 1162,  118, 1164, 1302,
+      118,  118,  118, 1171, 1165,  118, 1166,  118, 1167,  118,
+      118, 1168,  118, 1169,  118, 1181, 1120, 1170, 1150,  118,
+      118, 1193, 1172, 1173,  118, 1174,  118, 1175,  118, 1176,
+
+      118, 1177,  118,  118,  118, 1178,  118, 1194, 1194,  118,
+     1194, 1194,  118,  118, 1197,  118,  118, 1179, 1180, 1198,
+     1182,  118,  118,  118, 1201, 1202, 1203,  118, 1206, 1196,
+     1199,  118,  118, 1200, 1195, 1204,  118,  118,  118,  118,
+      118, 1209, 1210, 1211,  118, 1205,  118, 1212, 1214,  118,
+      118, 1217, 1228,  118, 1207, 1208, 1215, 1194, 1194,  118,
+     1194, 1194,  118, 1229, 1213,  118,  118,  118, 1231,  118,
+      118, 1230,  118, 1300, 1232, 1299,  118,  118,  118,  118,
+      118,  118,  118,  118, 1195, 1233,  118, 1234, 1235,  118,
+     1236, 1237, 1238, 1239,  118, 1245,  118, 1257, 1240, 1241,
+
+     1242, 1243,  118,  118, 1244, 1246, 1247,  118,  118, 1298,
+     1258,  118,  118,  118,  118,  118,  118,  118, 1260,  118,
+      118,  118, 1268, 1261, 1259,  118, 1262, 1263, 1264, 1265,
+     1270, 1266, 1267,  118, 1269,  118,  118, 1282, 1283, 1279,
+      118,  118, 1280,  118,  118,  118, 1296,  118,  118, 1316,
+      118,  118, 1306, 1281, 1307,  118, 1292, 1305, 1294, 1293,
+      118,  118,  118, 1323, 1295, 1315,  118,  118, 1327,  118,
+      118, 1331, 1328,  118,  118, 1322, 1297, 1334, 1317,  118,
+      118,  118, 1324, 1291, 1290,  118, 1289, 1288, 1287, 1286,
+     1285, 1332, 1284,  118, 1335,  118, 1337,  118, 1336,   68,
+
+       68,   68,   68,   68,   68,   68,   68,   68,   68,   68,
+       68,   68,   78,   78,   78,   78,   78,   78,  118,   78,
+       78,   78,   78,   78,   78,   84,   84,  118,   84,   84,
+      118,  118,  118,  118,   84,   84,   84,  105,  105, 1278,
+      105,  105,  105,  116, 1277,  116,  116,  116,  191,  191,
+     1276,  191,  191,  191,  191,  191,  191,  191,  191,  191,
+      191,   69,   69,   69,   69,   69,   69,   69,   69,   69,
+       69,   69,   69,   69,  197, 1275, 1274, 1273,  197,  197,
+      197,  197,  197,  197,  197,  197,  197,  203,  203,  203,
+      203,  203,  211, 1272,  211,  211,  211,  213,  213, 1271,
+
+      213,  213,  213,  213,  213,  213,  213,  213,  213,  213,
+      214,  118,  214,  214,  214,  216,  216,  118,  216,  216,
+      216,  220,  118,  220,  221,  118,  221,  221,  221,  304,
+      304,  118,  304,  304,  304,  304,  304,  304,  304,  304,
+      304,  304,  312,  118, 1256, 1255,  312,  312,  312,  312,
+      312,  312,  312,  312,  312,  324, 1254,  324,  324,  324,
+      327, 1253,  327,  327,  327,  330, 1251,  330,  330,  330,
+      332, 1250,  332,  332,  332,  334, 1249,  334,  334,  334,
+      335, 1248,  335,  335,  335,  424,  424,  118,  424,  424,
+      424,  424,  424,  424,  424,  424,  424,  424,  434,  434,
+
+      434,  436,  118,  118, 1226,  436,  436,  436,  436,  436,
+      436,  436,  436,  436,  438,  438,  438,   84, 1225, 1224,
+     1223, 1222,   84,   84,   84,  449, 1221,  449,  449,  449,
+      451, 1220,  451,  451,  451,  452, 1219,  452,  452,  452,
+      453, 1218,  453,  453,  453,  456,  118,  456,  456,  456,
+      457,  118,  457,  457,  457,  458,  982,  458,  458,  458,
+      116, 1192,  116,  116,  116,  463,  463,  463,  424,  424,
+     1191,  424,  424,  424,  424,  424,  424,  424,  424,  424,
+      424,  304,  304, 1190,  304,  304,  304,  304,  304,  304,
+      304,  304,  304,  304,  559, 1189,  559,  559,  559,  436,
+
+     1188, 1187, 1186,  436,  436,  436,  436,  436,  436,  436,
+      436,  436,  561,  561,  561,  561,  561,  561,  561,  561,
+      561,  561,  561,  561,  561,  562, 1185,  562,  562,  562,
+      563,  563,  563,  563,  563,  563,  563,  563,  563,  563,
+      563,  563,  563,  211, 1184,  211,  211,  211,  324, 1183,
+      324,  324,  324,  214,  118,  214,  214,  214,  327,  118,
+      327,  327,  327,  572, 1160,  572,  572,  572,  577, 1157,
+      577,  577,  577,  664,  664, 1156,  664,  664,  664,  664,
+      664,  664,  664,  664,  664,  664,  665,  665, 1155,  665,
+      665,  665,  665,  665,  665,  665,  665,  665,  665,  559,
+
+     1154,  559,  559,  559,  668,  668,  668,  668,  668,  668,
+      668,  668,  668,  668,  668,  668,  668,  561,  561,  561,
+      561,  561,  561,  561,  561,  561,  561,  561,  561,  561,
+      562, 1153,  562,  562,  562,  563,  563,  563,  563,  563,
+      563,  563,  563,  563,  563,  563,  563,  563,  203,  203,
+      203,  203,  203,  762,  762, 1152,  762,  762,  762,  762,
+      762,  762,  762,  762,  762,  762,  763,  763, 1151,  763,
+      763,  763,  763,  763,  763,  763,  763,  763,  763,  925,
+      925,  118,  925,  925,  925,  925,  925,  925,  925,  925,
+      925,  925,  976,  976,  118,  976,  976,  976,  976,  976,
+
+      976,  976,  976,  976,  976,  720,  720,  720,  720,  720,
+      720,  720,  720,  720,  720,  118, 1123,  720,  730,  730,
+      730,  730,  730,  730,  730,  730,  730,  730,  118, 1121,
+      730,  803,  803,  803,  803,  803,  803,  803,  803,  803,
+      803,  118, 1117,  803,  809,  809,  809,  809,  809,  809,
+      809,  809,  809,  809, 1116, 1115,  809,  818,  818,  818,
+      818,  818,  818,  818,  818,  818,  818, 1110, 1109,  818,
+      940,  940,  940,  940,  940,  940,  940,  940,  940,  940,
+      118,  118,  940, 1227, 1227,  118, 1227, 1227, 1227, 1227,
+     1227, 1227, 1227, 1227, 1227, 1227, 1252, 1252,  118, 1252,
+
+     1252, 1252, 1252, 1252, 1252, 1252, 1252, 1252, 1252, 1301,
+     1301,  118, 1301, 1301, 1301, 1301, 1301, 1301, 1301, 1301,
+     1301, 1301, 1308, 1308,  118, 1308, 1308, 1308, 1308, 1308,
+     1308, 1308, 1308, 1308, 1308, 1313, 1313,  118, 1313, 1313,
+     1313, 1313, 1313, 1313, 1313, 1313, 1313, 1313, 1314, 1314,
+     1082, 1314, 1314, 1314, 1314, 1314, 1314, 1314, 1314, 1314,
+     1314, 1319, 1319, 1077, 1319, 1319, 1319, 1319, 1319, 1319,
+     1319, 1319, 1319, 1319, 1320, 1320, 1076, 1320, 1320, 1320,
+     1320, 1320, 1320, 1320, 1320, 1320, 1320, 1330, 1330,  118,
+     1330, 1330, 1330, 1330, 1330, 1330, 1330, 1330, 1330, 1330,
+
+     1333, 1333, 1071, 1333, 1333, 1333, 1333, 1333, 1333, 1333,
+     1333, 1333, 1333, 1070, 1069, 1068, 1067,  118,  118,  118,
+     1042, 1041, 1040, 1039,  118, 1037, 1036, 1035, 1034, 1032,
+     1028, 1027, 1026, 1025, 1024,  118,  999,  998,  997,  996,
+      995,  994,  990,  988,  118,  118,  118,  118,  982,  981,
+      980,  979,  978,  977,  729,  118,  953,  952,  951,  950,
+      949,  948,  944,  942,  118,  118,  118,  929,  928,  927,
+      926,  924,  923,  922,  118,  118,  118,  118,  897,  896,
+      895,  894,  893,  892,  118,  118,  888,  118,  118,  118,
+      118,  118,  118,  118,  118,  118,  118,  874,  872,  118,
+
+      118,  204,  863,  862,  861,  860,  859,  858,  857,  856,
+      118,  855,  118,  118,  118,  827,  826,  825,  824,  823,
+      822,  118,  807,  118,  801,  118,  118,  118,  118,  789,
+      118,  118,  768,  767,  766,  765,  764,  761,  760,  759,
+      118,  733,  118,  726,  725,  724,  723,  118,  118,  118,
+      118,  118,  118,  118,  700,  699,  118,  118,  667,  666,
+      663,  662,  661,  660,  659,  658,  654,  118,  118,  118,
+      627,  118,  118,  118,  118,  118,  118,  118,  558,  557,
+      556,  555,  554,  551,  550,  118,  521,  518,  118,  118,
+      118,  118,  433,  432,  431,  430,  423,  118,  383,  118,
+
+      118,  357,  341,  340,  316,  204,  314,  198,  310,  192,
+      194,  302,  192,  275,  130,  198,   79,  118,  224,  223,
+      210,  209,  204,  194,  192,  132,  118,  113,   88,   79,
+       67, 1338,    3, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338
     } ;
 
-static yyconst flex_int16_t yy_chk[2764] =
+static yyconst flex_int16_t yy_chk[4125] =
     {   0,
         1,    1,    1,    1,    1,    1,    1,    1,    1,    1,
         1,    1,    1,    1,    1,    1,    1,    1,    1,    1,
@@ -1143,316 +1373,468 @@ static yyconst flex_int16_t yy_chk[2764] =
         1,    1,    1,    1,    1,    1,    1,    1,    1,    1,
         1,    1,    1,    1,    1,    1,    1,    1,    1,    1,
         1,    1,    1,    1,    1,    1,    1,    1,    1,    1,
-        1,    1,    1,    1,    1,    1,    1,    1,    1,    2,
-        2,    5,    2,    2,    9,    5,    2,   11,   13,   13,
-
-       16,   13,   13,   29,   17,   19,    2,   13, 1148,   19,
-       26,   13,   23,   23,   16,   28,   11,   17,   17,    2,
-       19,   25,   25,   29,   26,   31,   33,   18,   33,   13,
-       18,   13,   18,   18,   18,   18,   18,   18,   18,   18,
-       30,   44,   49,   70,   28,    9,   31,   49,   45,  366,
-       30,   13,   75,   37,   43,   54,   44,   20,   13,   20,
-       20,   20,   20,   20,   20,   20,   20,   37,   43,   45,
-       54,   55,   75,   43,   37,   21,   20,   21,   21,   21,
-       21,   21,   21,   21,   21,   39,   40,   38,  366,   41,
-       70,   40,   42,   39,   21,   20,   38,   39,   47,   64,
-
-       39,   40,   38,   41,   42,   48,   20,   46,   46,   66,
-       42,   40,   46,   41,   47,   71,   47,   47, 1146,   50,
-       78,   52,   46,   48,   21,   20,   48,   50,   50,   51,
-       55,   77,   53,   78,   51,   52,   77,   51,   50,   50,
-       52,   53,   56,   56,   62,   56,   56,   53,   62,   56,
-       64,   60,   53,   79,   51,   60,  108,  106,  110,   56,
-       66,  120,   71,  107,   72,   72,   60,   72,   72,  142,
-      111,  142,   56,   72,  108,  112,  120,   72,  106,   88,
-       88,   88,   88,   88,   88,   88,   88,   79,  107,  111,
-      117,  112,  110,  118,  117,   72,   88,   72,   92,   92,
-
-       92,   92,   92,   92,   92,   92,  121,  121,  122,  122,
-      109,  124,  214,  118,  214,   92,  119,   72,  125,  127,
-      109,  128,  150,  124,   72,   93,   88,   93,   93,   93,
-       93,   93,   93,   93,   93,  109,  119,  128,  150,  125,
-      127,  128,   94,   94,   93,   92,   94,   94,   94,   94,
-       94,   94,   94,   94,   96,  123,   96,   96,   96,   96,
-       96,   96,   96,   96,  192,  136,  138,  200,  123,  123,
-      192,  138,  200,   96,   93,   97,  116,   97,   97,   97,
-       97,   97,   97,   97,   97,   98,   98,   98,  136,   98,
-       98,   98,   98,   98,   98,   98,   98,  126,  129,  130,
-
-      133,  116,  116,   96,  116,  116,  131,  116,  130,  132,
-      116,  126,  134,  129,  133,  135,  137,  140,  131,  144,
-      116,  116, 1136,  145,  141,  165,  131,  165,  135,  132,
-      145,  143,  134,  141,  144,  146,  137,  140,  143,  141,
-      143,  147,  147,  148,  146,  147,  149,  153,  151,  145,
-      155,  152,  156,  168,  154,  166,  148,  151,  170,  147,
-      152,  155,  174,  154,  149,  153,  154,  151,  159,  159,
-      156,  159,  159,  167,  167,  159,  173,  175,  174,  176,
-      216,  165,  193,  166,  199,  159,  166,  371,  199,  173,
-      191,  204,  194,  216,  198,  175,  199,  371,  159,  204,
-
-      193,  197,  201,  201,  168,  170,  179,  179,  179,  179,
-      179,  179,  179,  179,  198,  191,  176,  180,  180,  176,
-      194,  180,  180,  180,  180,  180,  180,  180,  180,  206,
-      197,  202,  176,  176,  182,  182,  182,  182,  182,  182,
-      182,  182,  183,  183,  206,  202,  183,  183,  183,  183,
-      183,  183,  183,  183,  184,  208,  184,  184,  184,  184,
-      184,  184,  184,  184,  185,  185,  185,  208,  185,  185,
-      185,  185,  185,  185,  185,  185,  186,  186,  186,  186,
-      186,  186,  186,  186,  187,  187,  187,  187,  187,  187,
-      187,  187,  188,  188,  188,  188,  188,  188,  188,  188,
-
-      203,  207,  207,  209,  210,  188,  188,  188,  188,  188,
-      188,  211,  212,  213,  215,  215,  207,  217,  213,  218,
-      203,  209, 1135,  210,  218,  219,  219,  211,  220,  217,
-      223,  212,  221,  220,  229,  188,  188,  188,  188,  188,
-      188,  221,  224,  225,  225,  226,  230,  231,  223,  224,
-      226,  232,  229,  235,  233,  234,  236,  237,  238,  239,
-      240,  241,  235,  242,  239,  231,  244,  234,  243,  245,
-      236,  230,  238,  232,  233,  237,  230,  243,  246,  247,
-      250,  241,  244,  246,  245,  249,  240,  248,  251,  243,
-      252,  252,  251,  256,  247,  255,  248,  242,  258,  249,
-
-      250,  253,  264,  253,  253,  266,  253,  267,  253,  268,
-      266,  264,  269,  253,  265,  270,  258,  531,  253,  253,
-      253,  253,  253,  255,  263,  263,  255,  263,  263,  253,
-      265,  277,  278,  263,  280,  284,  283,  263,  285,  287,
-      315,  315,  287,  285,  268,  267,  278,  277,  253,  256,
-      283,  531,  290,  267,  280,  284,  290,  270,  271,  271,
-      271,  271,  271,  271,  271,  271,  269,  272,  272,  272,
-      272,  272,  272,  272,  272,  273,  273,  273,  273,  273,
-      273,  273,  273,  274,  274,  274,  274,  274,  274,  274,
-      274,  275,  275,  275,  275,  275,  275,  275,  275,  276,
-
-      276,  276,  276,  276,  276,  276,  276,  279,  282,  286,
-      289,  282,  276,  276,  276,  276,  276,  276,  282,  288,
-      294,  295,  291,  292,  305,  299,  312,  286,  312,  282,
-      289,  291,  298,  305,  294,  279,  282,  299,  288,  295,
-      300,  301,  276,  276,  276,  276,  276,  276,  292,  306,
-      302,  298,  303,  292,  307,  308,  309,  313,  310,  314,
-      300,  301,  302,  310,  316,  303,  317,  306,  320,  308,
-      309,  317,  318,  318,  307,  319,  313,  320,  321,  322,
-      319,  316,  323,  326,  314,  324,  324,  327,  326,  314,
-      324,  329,  330,  332,  331,  330,  327,  321,  334,  322,
-
-      333,  335,  323,  336,  333,  329,  331,  337,  335,  339,
-      336,  340,  334,  342,  333,  343,  332,  341,  341,  344,
-      343,  345,  346,  344,  347,  348,  340,  346,  337,  339,
-      350,  349,  351,  352,  365,  354,  345,  342,  357,  348,
-      349,  363,  346,  357,  364,  350,  367,  368,  369,  347,
-      364,  370,  351,  354,  365,  372,  373,  374,  352,  363,
-      376,  378,  377,  352,  383,  376,  380,  379,  381,  382,
-      372,  381,  382,  378,  385,  384,  383,  386,  387,  388,
-      386,  396,  374, 1132,  380,  373,  369,  377,  379,  389,
-      396,  402,  402,  389,  370,  384,  388,  387,  367,  394,
-
-      368,  395,  397,  398,  394,  397,  395,  400,  401,  403,
-      385,  400,  404,  401,  405,  406,  409,  398,  408,  408,
-      410,  403,  411,  410,  412,  411,  415,  413,  404,  416,
-      417,  405,  413,  416,  419,  406,  409,  419,  415,  420,
-      421,  422,  423,  423,  412,  420,  425,  421,  428,  420,
-      431,  425,  426,  426,  432,  417,  430,  430,  433,  431,
-      434,  422,  428,  435,  436,  437,  438,  439,  440,  432,
-      441,  439,  442,  443,  436,  417,  434,  442,  433,  441,
-      445,  457,  446,  435,  440,  437,  438,  446,  444,  458,
-      459,  443,  444,  460,  458,  461,  462,  445,  463,  464,
-
-      465,  466,  467,  459,  468,  457,  470,  472,  473,  474,
-      471,  475,  465,  466,  476,  476,  470,  478,  474,  477,
-      478,  479,  473,  475,  480,  480,  484, 1129,  492,  467,
-      460,  468,  461,  471,  481,  463,  472,  481,  477,  487,
-      488,  492,  487,  493,  494,  494,  496,  497,  493,  488,
-      462,  484,  464,  498,  484,  499,  502,  503,  498,  479,
-      497,  496,  500,  500,  501,  500,  500,  506,  513,  501,
-      515,  514,  517,  516,  502,  499,  503,  514,  516,  518,
-      521,  519,  523,  515,  517,  520,  520,  529,  513,  524,
-      526,  518,  506,  519,  522,  521,  522,  527,  524,  545,
-
-      522,  526,  527,  522,  530,  528,  546,  523,  528,  548,
-      529,  522,  506,  546,  553,  561,  530,  561, 1123,  553,
-      550,  545,  500,  511,  511,  511,  511,  511,  511,  511,
-      511,  511,  511,  511,  511,  511,  511,  511,  511,  511,
-      511,  511,  511,  511,  511,  511,  511,  511,  511,  511,
-      511,  511,  511,  511,  511,  548,  549,  550,  525,  551,
-      547,  547,  552,  547,  547,  554,  556,  557,  525,  555,
-      525,  558, 1120,  547,  525,  554,  556,  525,  563,  559,
-      511,  511,  511,  511,  511,  525,  547,  564,  555,  567,
-      563,  549,  557,  559,  551,  565,  565,  568,  568,  552,
-
-      558,  567,  571,  564,  569,  569,  547,  574,  797,  578,
-      574,  511,  577,  577,  578,  577,  577,  579,  579,  797,
-      579,  579,  571,  572,  572,  572,  572,  572,  572,  572,
-      572,  572,  572,  572,  572,  572,  572,  572,  572,  572,
-      572,  572,  572,  572,  572,  572,  572,  572,  572,  572,
-      572,  572,  572,  572,  572,  575,  580,  590,  592,  581,
-      593,  584,  590,  592,  593,  595,  596,  600,  597,  600,
-      595,  596,  577,  575,  580,  581,  598,  599,  599,  601,
-      572,  572,  572,  572,  582,  582,  584,  582,  582,  584,
-      602,  603,  605,  609,  598,  602,  579,  597,  609,  601,
-
-      606,  606,  614,  607,  605,  607,  614,  603,  632,  634,
-      582,  572,  576,  576,  576,  576,  576,  576,  576,  576,
-      576,  576,  576,  576,  576,  576,  576,  576,  576,  576,
-      576,  576,  576,  576,  576,  576,  576,  576,  576,  576,
-      576,  576,  576,  576,  582,  604,  608,  610,  612,  626,
-      611,  633,  613,  611,  632,  634,  635,  611,  627, 1113,
-      641,  636,  611,  612,  604,  611,  608,  610,  613,  576,
-      576,  576,  576,  627,  611,  636,  628,  628,  626,  628,
-      628,  631,  631,  638,  631,  631,  633,  641,  643,  628,
-      644,  635,  646,  645,  631,  649,  647,  638,  645,  648,
-
-      576,  650,  644, 1111,  646,  654,  662,  631,  647,  643,
-      654,  648,  658,  658,  649,  658,  658,  659,  674,  663,
-      663,  659,  663,  663,  662,  677,  650,  665,  665,  650,
-      673,  673,  675,  678,  676,  677,  674,  676,  679,  680,
-      683,  679,  681,  683,  680,  663,  684,  681,  682,  682,
-      685,  678,  675,  687,  686,  688,  684,  686,  689,  690,
-      690,  691,  692,  694,  708,  693,  691,  688,  685,  693,
-      695,  687,  689,  704,  704,  709,  704,  704,  694,  710,
-      711,  705,  705,  692,  705,  705,  704,  712,  714,  706,
-      706,  658,  706,  706,  705,  713,  715,  695,  719,  704,
-
-      713,  712,  706,  716,  725,  717,  716,  705,  718,  725,
-      708,  726,  712,  714,  720,  720,  715,  717,  719,  704,
-      718,  709,  730,  731,  733,  710,  711,  705,  723,  723,
-      726,  723,  723,  732,  734,  735,  730,  736,  732,  734,
-      739,  731,  733,  738,  735,  740,  741,  738,  743,  742,
-      744,  741,  745,  739,  746,  748,  749,  736,  742,  744,
-      750,  747,  751,  740,  747,  750,  752,  745,  743,  761,
-      749,  762,  763,  766,  746,  769,  765,  748,  766,  763,
-      751,  752,  765,  771,  771,  773,  771,  771,  723,  774,
-      778,  779,  779,  780,  769,  781,  782,  780,  783,  782,
-
-      788,  784,  786,  788,  787,  773,  789,  778,  790,  774,
-      781,  791,  783,  792,  792,  761,  784,  762,  786,  787,
-      789,  794,  793,  790,  793,  791,  795,  793,  793,  796,
-      798,  799,  807,  808,  810,  795,  823,  808,  821,  794,
-      819,  819,  820,  826,  824,  828,  799,  808,  821,  796,
-      798,  824,  825,  830,  810,  832,  823,  829,  833,  831,
-      826,  807,  771,  793,  829,  828,  831,  820,  834,  835,
-      825,  830,  837,  838,  839,  832,  839,  840,  841,  839,
-      839,  842,  848,  991,  849,  850,  855,  837,  858,  861,
-      991,  858,  838,  860,  833,  862,  848,  840,  875,  842,
-
-      854,  854,  877,  854,  854,  841,  861,  835,  834,  849,
-      850,  855,  862,  865,  855,  839,  864,  864,  860,  863,
-      863,  863,  866,  863,  874,  876,  854,  867,  867,  867,
-      865,  867,  869,  874,  875,  869,  879,  880,  889,  866,
-      877,  881,  863,  888,  918,  881,  884,  884,  894,  894,
-      867,  899,  880,  918,  879,  888,  895,  890,  890,  863,
-      890,  890,  884,  889,  896,  876,  912,  867,  899,  892,
-      892,  869,  892,  892,  895,  897,  897,  898,  898,  900,
-      896,  901,  902,  890,  903,  903,  904,  905,  905,  904,
-      906,  906,  884,  907,  908,  892,  900,  909,  901,  902,
-
-      910,  911,  911,  913,  914,  919,  912,  920,  921,  921,
-      907,  908,  929,  929,  909,  930,  920,  910,  934,  914,
-      919,  935,  931,  931,  921,  931,  931,  936,  939,  939,
-      937,  930,  936,  937,  940,  940,  935,  934,  929,  938,
-      944,  913,  938,  941,  941,  942,  942,  943,  931,  945,
-      943,  969,  945,  946,  921,  954,  946,  955,  944,  947,
-      947,  948,  948,  949,  949,  950,  950,  951,  956,  954,
-      951,  955,  972,  957,  971,  971,  969,  971,  971,  974,
-      975,  976,  977,  981,  974,  977,  956,  957,  978,  982,
-      979,  978,  972,  979,  980,  983,  974,  980,  975,  976,
-
-      971,  981,  984,  985,  982,  986,  985,  987,  986,  988,
-      987,  989,  988,  983,  990,  992,  993,  993, 1004,  992,
-      984, 1005, 1005, 1009, 1005, 1005, 1008, 1008, 1009,  989,
-     1110,  990, 1012, 1010, 1011, 1013, 1014, 1015, 1025, 1025,
-     1028, 1016, 1018, 1019, 1020, 1004, 1021, 1005, 1010, 1011,
-     1012, 1022, 1023, 1013, 1014, 1015, 1016, 1018, 1019, 1024,
-     1028, 1039, 1020, 1027, 1021, 1040, 1041, 1044, 1040, 1022,
-     1023, 1045, 1027, 1109, 1024, 1046, 1047, 1051, 1052, 1039,
-     1053, 1054, 1044, 1057, 1056, 1069, 1045, 1056, 1041, 1058,
-     1046, 1047, 1051, 1052, 1058, 1053, 1054, 1057, 1070, 1079,
-
-     1080, 1080, 1081, 1069, 1091, 1090, 1092, 1070, 1093, 1094,
-     1094, 1106, 1107, 1104, 1116, 1107, 1106, 1079, 1117, 1081,
-     1090, 1092, 1104, 1091, 1118, 1126, 1127, 1127, 1093, 1116,
-     1128, 1133, 1139, 1134, 1143, 1117, 1134, 1138, 1138, 1126,
-     1142, 1145, 1118, 1142, 1105, 1128, 1147, 1103, 1133, 1143,
-     1139, 1102, 1101, 1100, 1098, 1097, 1096, 1095, 1089, 1145,
-     1088, 1147, 1150, 1150, 1150, 1150, 1150, 1150, 1150, 1150,
-     1150, 1151, 1151, 1151, 1087, 1151, 1151, 1151, 1151, 1151,
-     1152, 1152, 1086, 1085, 1084, 1083, 1152, 1152, 1153, 1153,
-     1082, 1153, 1153, 1154, 1078, 1154, 1154, 1155, 1155, 1077,
-
-     1155, 1155, 1155, 1155, 1155, 1155, 1156, 1156, 1156, 1156,
-     1157, 1076, 1157, 1157, 1158, 1158, 1075, 1158, 1158, 1158,
-     1158, 1158, 1158, 1159, 1074, 1159, 1159, 1160, 1160, 1073,
-     1160, 1160, 1161, 1072, 1161, 1162, 1162, 1071, 1162, 1162,
-     1162, 1162, 1162, 1162, 1163, 1068, 1163, 1163, 1164, 1067,
-     1164, 1164, 1165, 1165, 1066, 1165, 1165, 1165, 1165, 1165,
-     1165, 1166, 1166, 1167, 1065, 1167, 1167, 1168, 1168, 1064,
-     1168, 1168, 1168, 1168, 1168, 1168, 1169, 1169, 1062, 1169,
-     1169, 1169, 1169, 1169, 1169, 1170, 1170, 1061, 1170, 1170,
-     1170, 1170, 1170, 1170, 1171, 1171, 1060, 1171, 1171, 1171,
-
-     1171, 1171, 1171, 1172, 1172, 1059, 1172, 1172, 1172, 1172,
-     1172, 1172, 1173, 1173, 1055, 1173, 1173, 1173, 1173, 1173,
-     1173, 1174, 1174, 1174, 1174, 1174, 1174, 1174, 1175, 1175,
-     1175, 1175, 1175, 1175, 1175, 1176, 1176, 1176, 1176, 1176,
-     1176, 1176, 1177, 1177, 1050, 1177, 1177, 1177, 1177, 1177,
-     1177, 1178, 1178, 1049, 1178, 1178, 1178, 1178, 1178, 1178,
-     1179, 1179, 1048, 1179, 1179, 1179, 1179, 1179, 1179, 1180,
-     1180, 1043, 1180, 1180, 1180, 1180, 1180, 1180, 1181, 1181,
-     1042, 1181, 1181, 1181, 1181, 1181, 1181, 1182, 1182, 1037,
-     1182, 1182, 1182, 1182, 1182, 1182, 1183, 1183, 1036, 1183,
-
-     1183, 1183, 1183, 1183, 1183, 1184, 1184, 1035, 1184, 1184,
-     1184, 1184, 1184, 1184, 1185, 1185, 1034, 1185, 1185, 1185,
-     1185, 1185, 1185, 1186, 1186, 1032, 1186, 1186, 1186, 1186,
-     1186, 1186, 1031, 1030, 1029, 1026, 1017, 1007, 1002, 1001,
-     1000,  999,  998,  997,  996,  995,  994,  973,  970,  968,
-      967,  966,  965,  964,  963,  962,  961,  960,  959,  958,
-      953,  952,  933,  928,  927,  926,  925,  924,  923,  922,
-      917,  916,  915,  891,  887,  886,  885,  883,  882,  878,
-      873,  872,  871,  870,  868,  859,  853,  852,  851,  847,
-      846,  845,  844,  843,  836,  827,  822,  817,  816,  815,
-
-      814,  813,  812,  811,  809,  805,  804,  803,  802,  801,
-      785,  777,  776,  775,  772,  770,  768,  767,  764,  760,
-      759,  758,  757,  755,  754,  737,  729,  728,  727,  724,
-      722,  721,  703,  702,  701,  700,  698,  697,  696,  672,
-      671,  670,  669,  668,  667,  666,  661,  660,  657,  655,
-      653,  651,  642,  640,  639,  637,  630,  625,  624,  623,
-      622,  621,  618,  617,  616,  615,  594,  591,  589,  587,
-      586,  585,  583,  573,  570,  566,  562,  560,  544,  543,
-      540,  539,  538,  535,  534,  533,  532,  512,  510,  509,
-      508,  507,  505,  504,  495,  491,  490,  489,  486,  485,
-
-      483,  482,  456,  455,  452,  451,  450,  449,  448,  447,
-      429,  427,  424,  418,  414,  407,  399,  393,  392,  391,
-      390,  362,  361,  360,  359,  358,  356,  355,  338,  328,
-      325,  311,  304,  297,  296,  293,  262,  261,  260,  259,
-      254,  228,  227,  222,  205,  196,  195,  172,  171,  164,
-      163,  162,  161,  139,  114,  105,  104,  100,   87,   85,
-       76,   61,   59,   36,   35,   27,   24,   22,   15,   12,
-       10,    8,    3, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-
-     1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149, 1149,
-     1149, 1149, 1149
+        1,    1,    1,    1,    1,    1,    1,    1,    1,    1,
+        1,    2,    2,    5,    2,    2,   11,    5,    2,    9,
+
+        9,    9,  226,    9,   10,    9,   10,   10,  226,    2,
+        9,    9,   17,   16,   13,   13,   11,   13,   13,   25,
+       25,   22,    2,   13,   22,   17,   17,   13,   16,   18,
+       29,   23,   28,   18,   29,   18,   18,   18,   18,   18,
+       18,   18,   18,   30,   19,   26,   13,   31,   13,   19,
+       23,   23,   23,   36,   33,   36,   47,    9,   69,   26,
+       19,   28,   32,   30,   33,   34,   32,   52,   13,   59,
+       46,   47,   52,   29,   20,   13,   20,   20,   20,   20,
+       20,   20,   20,   20,   46,   29,   34,   35,   35,   46,
+       35,   35,   21,   20,   21,   21,   21,   21,   21,   21,
+
+       21,   21,   31,   39,   41,   32,   40,   44,  228,   39,
+       69,   21,   20,   41,   48,   58,   39,   32,   40,   41,
+       40,   44,   39,   20,   43,   42,   42,   40,   59,   43,
+       58,   44,   39,   42,   45,   48,  228,   42,   50,   43,
+       42,   21,   20,   51,   35,   57,   45,   70,   57,   43,
+       49,   49,   45,   78,   50,   49,   50,   50,   53,   55,
+       56,   51,   79,   55,   51,   49,   53,   53,   54,   56,
+       66,   49,   71,   54,   66,   56,   54,   53,   53,   55,
+       56,   60,   60,   87,   60,   60,   54,   64,   60,  144,
+      144,   86,   64,   54,   68,   68,   68,   70,   68,   60,
+
+       68,   78,   55,   64,   86,   68,   68,   72,   72,   72,
+       79,   72,   60,   72,   55,   85,   83,   87,   72,   72,
+       85,  118,   80,   80,   71,   80,   80,  117,   55,  119,
+      120,   80,  124,   55,  126,   80,   83, 1337,   96,   96,
+       96,   96,   96,   96,   96,   96,  118,  119,  117,  145,
+      145,  127,   68,  126,   80,   96,   80,  100,  100,  100,
+      100,  100,  100,  100,  100,   72,  124,  127,  122,  123,
+      134,  169,  136,  139,  100,  134,   80,  139,  169,  123,
+      136,  140,  120,   80,  101,   96,  101,  101,  101,  101,
+      101,  101,  101,  101,  123,  128,  128,  169,  128,  128,
+
+      102,  140,  102,  101,  100,  102,  102,  102,  102,  102,
+      102,  102,  102,  104,  181,  104,  104,  104,  104,  104,
+      104,  104,  104,  122,  174,  181,  141,  143,  135,  106,
+      133,  106,  104,  101,  106,  106,  106,  106,  106,  106,
+      106,  106,  133,  135,  137,  138,  141,  143,  142,  174,
+      133,  146,  128,  148,  138,  147,  150,  149,  147,  153,
+      151,  147,  104,  142,  146,  146,  152,  147,  153,  137,
+      137,  149,  137,  137,  148,  137,  151,  150,  137,  154,
+      151,  152,  155,  156,  157,  158,  159,  160,  137,  137,
+      162,  154,  160,  161,  164,  162, 1335,  156,  158,  154,
+
+      168,  165,  155,  170,  157,  161,  166,  160,  166,  159,
+      165,  167,  170,  172,  164,  168,  165,  173,  167,  176,
+      167,  171,  171,  177,  175,  171,  172,  178,  179,  180,
+      183,  192,  177,  192,  193,  173,  178,  182,  180,  171,
+      175,  180,  177,  194,  194,  182,  179,  196,  183,  186,
+      186,  205,  186,  186,  202,  206,  186,  225,  233,  232,
+      207,  648,  176,  193,  205,  227,  193,  186,  232,  231,
+      234,  206,  648,  233,  176,  200,  200,  200,  207,  200,
+      186,  200,  225,  227,  234,  235,  200,  200,  192,  218,
+      218,  218,  218,  218,  218,  218,  218,  208,  231,  196,
+
+      199,  248,  202,  235,  199,  202,  199,  199,  199,  199,
+      199,  199,  199,  248,  199,  199,  199,  199,  199,  199,
+      199,  199,  199,  199,  199,  199,  199,  199,  199,  199,
+      199,  199,  199,  200,  208,  236,  200,  208,  219,  219,
+      219,  219,  219,  219,  219,  219,  237,  239,  240,  240,
+      208,  208,  239,  236,  249,  243,  242,  249,  199,  199,
+      199,  199,  212,  243,  212,  241,  237,  212,  212,  212,
+      212,  212,  212,  212,  212,  215,  242,  215,  321,  241,
+      215,  215,  215,  215,  215,  215,  215,  215,  217,  199,
+      217,  250,  251,  217,  217,  217,  217,  217,  217,  217,
+
+      217,  220,  220,  220,  220,  220,  220,  220,  220,  250,
+     1325,  251,  253,  321,  220,  220,  220,  220,  220,  220,
+      222,  222,  222,  222,  222,  222,  222,  222,  238,  244,
+      254,  253,  238,  255,  255,  246,  244,  247,  247,  252,
+      238,  257,  254,  257,  220,  220,  220,  220,  220,  220,
+      246,  256,  247,  258,  258,  252,  256,  259,  261,  260,
+      262,  262,  263,  261,  264,  266,  271,  263,  267,  269,
+      259,  260,  273,  264,  269,  267,  268,  268,  274,  275,
+      276,  277,  278,  266,  273,  279,  280,  281,  282,  284,
+      271,  283,  285,  286,  284,  280,  274,  279,  276,  287,
+
+      303,  281,  278,  277,  275,  283,  282,  288,  289,  275,
+      290,  293,  292,  286,  294,  290,  288,  292,  285,  296,
+      295,  298,  298,  294,  289,  290,  293,  297,  288,  300,
+      300,  297,  302,  287,  295,  299,  311,  317,  299,  296,
+      299,  305,  299,  320,  316,  319,  317,  299,  316,  318,
+      319,  322,  299,  299,  299,  299,  299,  303,  338,  305,
+      352,  302,  343,  299,  302,  318,  337,  336,  364,  343,
+      323,  325,  325,  325,  325,  325,  325,  325,  325,  352,
+      337,  320,  299,  336,  344,  345,  338,  364,  311,  320,
+      345,  311,  312,  339,  403,  348,  312,  344,  312,  312,
+
+      312,  312,  312,  312,  312,  322,  312,  312,  312,  312,
+      312,  312,  323,  339,  346,  348,  380,  403,  380,  346,
+      312,  312,  312,  312,  312,  326,  326,  326,  326,  326,
+      326,  326,  326,  328,  328,  328,  328,  328,  328,  328,
+      328,  329,  329,  329,  329,  329,  329,  329,  329,  342,
+      312,  312,  312,  312,  331,  331,  331,  331,  331,  331,
+      331,  331,  347,  342,  349,  351,  354,  347,  351,  349,
+      354,  355,  378,  382,  385,  385,  382,  378,  388,  388,
+      355,  312,  313,  313,  313,  313,  313,  313,  313,  313,
+      313,  313,  313,  313,  313,  313,  313,  313,  313,  313,
+
+      313,  313,  313,  313,  313,  313,  313,  313,  313,  313,
+      313,  313,  313,  313,  313,  313,  313,  313,  313,  313,
+      313,  313,  313,  313,  313,  313,  313,  313,  313,  313,
+      313,  313,  313,  313,  313,  313,  313,  313,  313,  313,
+      313,  313,  313,  313,  313,  313,  313,  313,  313,  313,
+      313,  313,  313,  313,  313,  313,  313,  313,  313,  313,
+      313,  313,  313,  313,  313,  313,  313,  313,  313,  313,
+      313,  313,  313,  315,  315,  315,  315,  315,  315,  315,
+      315,  315,  315,  315,  315,  315,  315,  315,  315,  315,
+      315,  315,  315,  315,  315,  315,  315,  315,  315,  315,
+
+      315,  315,  315,  315,  315,  315,  315,  315,  315,  315,
+      315,  315,  315,  315,  315,  315,  315,  315,  315,  315,
+      315,  315,  315,  315,  315,  315,  315,  315,  315,  315,
+      315,  315,  315,  315,  315,  315,  315,  315,  315,  315,
+      315,  315,  315,  315,  315,  315,  315,  315,  315,  315,
+      315,  315,  315,  315,  315,  315,  315,  315,  315,  315,
+      315,  315,  315,  315,  333,  333,  333,  333,  333,  333,
+      333,  333,  350,  341,  356,  353,  341,  333,  333,  333,
+      333,  333,  333,  341,  359,  360,  366,  459,  356,  357,
+      350,  362,  365,  367,  341,  353,  368,  459,  359,  362,
+
+      363,  341,  363,  360,  365,  369,  366,  333,  333,  333,
+      333,  333,  333,  367,  357,  370,  371,  368,  374,  357,
+      375,  373,  383,  376,  377,  369,  381,  370,  384,  371,
+      373,  386, 1324,  390,  387,  391,  374,  376,  377,  387,
+      375,  363,  390,  392,  384,  381,  389,  383,  386,  393,
+      397,  389,  383,  396,  391,  394,  394,  399,  396,  397,
+      394,  400,  401,  392,  400,  402,  402,  405,  404,  393,
+      406,  399,  404,  407,  401,  408,  411,  406,  410,  412,
+      407,  405,  404,  413,  415,  414,  417,  416,  413,  414,
+      418,  411,  416,  419,  420,  421,  408,  423,  410,  415,
+
+      422,  425,  419,  412,  418,  428,  440,  416,  441,  420,
+      428,  417,  464,  466,  441,  421, 1321,  464,  422,  425,
+      472,  472,  423,  474,  440,  466,  474,  423,  435,  435,
+      435,  435,  435,  435,  435,  435,  435,  435,  435,  435,
+      435,  435,  435,  435,  435,  435,  435,  435,  435,  435,
+      435,  435,  435,  435,  435,  435,  435,  435,  435,  435,
+      435,  435,  435,  435,  435,  435,  435,  435,  435,  435,
+      435,  435,  435,  435,  435,  435,  435,  435,  435,  435,
+      435,  435,  435,  435,  435,  435,  435,  435,  435,  435,
+      435,  435,  435,  435,  435,  435,  435,  435,  435,  435,
+
+      435,  435,  435,  435,  435,  435,  435,  435,  435,  435,
+      435,  435,  435,  435,  435,  435,  435,  435,  435,  436,
+      442,  443,  444,  436,  446,  436,  436,  436,  436,  436,
+      436,  436,  445,  436,  436,  436,  436,  436,  436,  447,
+      442,  475,  461,  690,  475,  690,  460,  436,  436,  436,
+      436,  436,  448,  448,  448,  448,  448,  448,  448,  448,
+      443,  460,  446,  450,  450,  450,  450,  450,  450,  450,
+      450,  461,  462,  465,  444,  467,  468,  436,  436,  436,
+      436,  470,  447,  469,  471,  445,  477,  473,  476,  637,
+      468,  479,  469,  478,  479,  637,  467,  462,  465,  470,
+
+      476,  481,  481,  483,  471,  473,  477,  483,  436,  437,
+      437,  437,  437,  437,  437,  437,  437,  437,  437,  437,
+      437,  437,  437,  437,  437,  437,  437,  437,  437,  478,
+      480,  482,  487,  491,  492,  486,  590,  437,  437,  437,
+      437,  437,  490,  495, 1318,  493,  492,  490,  482,  480,
+      493,  494,  495,  502,  496,  590,  494,  496,  491,  497,
+      499,  500,  501,  501,  499,  502,  500,  437,  437,  437,
+      437,  486,  487,  497,  503,  504,  505,  506,  509,  509,
+      504,  511,  512,  514,  511,  512,  520,  517,  514,  520,
+      503,  517,  525,  505,  524,  524,  525,  506,  437,  439,
+
+      439,  439,  439,  439,  439,  439,  439,  439,  439,  439,
+      439,  439,  439,  439,  439,  439,  439,  439,  439,  507,
+      510,  513,  516,  518,  523, 1312,  522,  439,  439,  439,
+      439,  439,  521,  522,  516,  528,  528,  521,  521,  507,
+      510,  513,  521,  536,  523,  527,  533,  530,  518,  534,
+      527,  532,  532,  535,  538,  533,  537,  439,  439,  439,
+      439,  530,  539,  536,  534,  541,  537,  540,  518,  535,
+      542,  540,  543,  548,  538,  564,  569,  543,  548,  542,
+      544,  541,  539,  545,  547,  567,  568,  545,  439,  454,
+      454,  454,  454,  454,  454,  454,  454,  566,  544,  564,
+
+      573,  547,  454,  454,  454,  454,  454,  454,  565,  571,
+      566,  584,  573,  565,  574,  578,  588,  581,  582,  582,
+      584,  570,  567,  568,  583,  578,  574,  583,  588, 1309,
+      569,  581,  454,  454,  454,  454,  454,  454,  560,  560,
+      560,  560,  560,  560,  560,  560,  560,  560,  560,  560,
+      560,  560,  560,  560,  560,  560,  560,  560,  570,  575,
+      576,  579,  571,  580,  585,  587,  560,  560,  560,  560,
+      560,  586,  589,  589,  587,  586,  591,  592,  600,  591,
+      593,  593,  595,  601,  579,  585,  575,  576,  594,  602,
+      603,  594,  580,  607,  613,  613,  560,  560,  560,  560,
+
+      611,  595,  607,  600,  612,  602,  600,  601,  606,  612,
+      603,  606,  618,  611,  619,  592,  672,  621,  621,  619,
+      621,  621,  742,  650,  742,  618,  650,  560,  561,  561,
+      561,  561,  561,  561,  561,  561,  561,  561,  561,  561,
+      561,  561,  561,  561,  561,  561,  561,  561,  614,  616,
+      617,  620,  623,  624,  627,  635,  561,  561,  561,  561,
+      561,  622,  672,  632,  614,  617,  622,  616,  638,  645,
+      623,  620,  624,  632,  632,  635,  643,  639,  621,  627,
+      640,  638,  639,  641,  642,  649,  561,  561,  561,  561,
+      649,  643,  640,  646,  645,  641,  642,  652,  644,  627,
+
+      644,  670,  646,  749,  644,  749,  653,  644,  670,  652,
+      673,  687, 1302,  654,  687,  644,  651,  561,  563,  563,
+      563,  563,  563,  563,  563,  563,  563,  563,  563,  563,
+      563,  563,  563,  563,  563,  563,  563,  563,  654,  651,
+      653,  647,  669,  674,  676,  673,  563,  563,  563,  563,
+      563,  647,  675,  647,  677,  678,  679,  647,  654,  677,
+      647,  680,  682,  681,  669,  678,  685, 1300,  647,  683,
+      692,  680,  688,  693,  696,  679,  563,  563,  563,  563,
+      674,  676,  692,  683,  702,  685,  696,  675,  681,  693,
+      688,  682,  694,  694,  697,  697,  701,  701,  703,  709,
+
+      702,  715,  709,  703,  741,  741,  715,  563,  626,  626,
+      626,  626,  626,  626,  626,  626,  626,  626,  626,  626,
+      626,  626,  626,  626,  626,  626,  626,  626,  626,  626,
+      626,  626,  626,  626,  626,  626,  626,  626,  626,  626,
+      626,  706,  710,  718,  671,  671,  712,  671,  671,  699,
+      700,  747,  716,  716,  717,  716,  716,  671,  723,  718,
+      732,  706,  710,  747,  712,  732,  626,  626,  626,  626,
+      626,  671,  717,  735,  699,  700,  714,  714,  735,  714,
+      714,  736,  738,  723,  723,  736,  723,  738,  739,  748,
+      748,  671,  756,  739,  699,  700,  756,  626,  626,  633,
+
+      633,  633,  633,  633,  633,  633,  633,  633,  633,  633,
+      633,  633,  633,  633,  633,  633,  633,  633,  633,  633,
+      633,  633,  633,  633,  633,  633,  633,  633,  633,  633,
+      633,  633,  716,  740,  743,  744,  733,  714,  812,  745,
+      744,  751,  812,  746,  750,  752,  751,  776,  769,  719,
+      719,  740,  719,  719,  743,  745,  754,  633,  633,  633,
+      633,  733,  746,  753,  750,  752,  753,  755,  791,  770,
+      753,  754,  779,  733,  792,  753,  719,  769,  753,  792,
+      791,  775,  776,  755,  770,  777,  779,  753,  633,  633,
+      668,  668,  668,  668,  668,  668,  668,  668,  668,  668,
+
+      668,  668,  668,  668,  668,  668,  668,  668,  668,  668,
+      719,  771,  771,  784,  771,  771,  781,  789,  668,  668,
+      668,  668,  668,  778,  771,  774,  774,  775,  774,  774,
+      781,  777,  788,  793,  790,  794,  801,  798,  774,  799,
+      784,  795,  789,  834,  815,  793,  788,  794,  668,  668,
+      668,  668,  774,  795,  789,  790,  798,  799,  778,  820,
+      805,  801,  815,  834,  801,  805,  811,  811,  903,  811,
+      811,  821,  821,  833,  833,  841,  841,  903,  820,  668,
+      707,  707,  707,  707,  707,  707,  707,  707,  707,  707,
+      707,  707,  707,  707,  707,  707,  707,  707,  707,  707,
+
+      707,  707,  707,  707,  707,  707,  707,  707,  707,  707,
+      707,  707,  707,  829,  836,  837, 1299,  807,  816,  816,
+      835,  816,  816,  835,  836,  839,  838,  840,  829,  838,
+      839,  842,  840,  837,  842,  844,  846,  843,  707,  707,
+      707,  707,  807,  847,  848,  816,  811,  843,  845,  849,
+      849,  845,  850,  844,  846,  847,  852,  850,  848,  906,
+      852,  873,  807,  906,  868, 1298,  873, 1294,  869,  707,
+      707,  713,  713,  713,  713,  713,  713,  713,  713,  713,
+      713,  713,  713,  713,  713,  713,  713,  713,  713,  713,
+      713,  713,  713,  713,  713,  713,  713,  713,  713,  713,
+
+      713,  713,  713,  713,  851,  853,  854,  855,  864,  864,
+      868,  864,  864,  855,  869,  866,  866,  855,  866,  866,
+      853,  864,  872,  874,  876,  851,  877,  955,  866,  713,
+      713,  713,  713,  854,  878,  864,  872,  878,  881,  876,
+      879,  880,  883,  883,  955,  899,  877,  872,  874,  870,
+      871,  889,  879,  880,  898,  864,  889,  898,  881,  899,
+      713,  713,  722,  722,  722,  722,  722,  722,  722,  722,
+      722,  722,  722,  722,  722,  722,  722,  722,  722,  722,
+      722,  722,  722,  722,  722,  722,  722,  722,  722,  722,
+      722,  722,  722,  722,  722,  870,  871,  865,  865,  890,
+
+      865,  865,  900,  907,  902, 1292,  910,  900,  891,  902,
+      865,  904,  901, 1022,  908,  910,  907,  911,  890,  914,
+      722,  722,  722,  722,  865,  887,  887,  891,  887,  887,
+      901,  904,  908,  909,  912,  913,  918,  911,  909,  914,
+     1022,  918,  915,  912,  865,  915,  930,  931,  954,  954,
+      913,  722,  722,  885,  885,  885,  885,  885,  885,  885,
+      885,  885,  885,  885,  885,  885,  885,  885,  885,  885,
+      885,  885,  885,  885,  885,  885,  885,  885,  885,  885,
+      885,  885,  885,  885,  885,  885,  887,  916,  917,  919,
+      920,  932,  930,  931,  934,  945,  935,  939,  932,  935,
+
+      934,  936,  917,  946,  947,  920,  936,  919,  946,  916,
+      957,  885,  885,  885,  885,  945,  939,  943,  943,  956,
+      943,  943,  959,  956,  947,  957,  958,  965,  962,  958,
+      960,  963,  964,  966,  972,  964,  959,  967,  968,  968,
+      970,  965,  885,  885,  962,  960,  963,  969,  966,  969,
+      971,  967,  969,  969,  972,  973,  974,  975,  970,  971,
+      983,  984,  985,  985,  987,  984,  973,  992, 1000, 1001,
+     1001, 1002,  975, 1004, 1006,  984,  974, 1007, 1009, 1005,
+     1000, 1002, 1011, 1010,  987,  992, 1005, 1013,  969,  983,
+     1010, 1014, 1006, 1004, 1007, 1012, 1015,  943, 1009, 1016,
+
+     1011, 1018, 1012, 1019, 1020, 1021, 1020, 1013, 1023, 1020,
+     1020, 1031, 1038, 1030, 1029, 1032, 1018, 1036, 1036, 1291,
+     1036, 1036, 1019, 1031, 1037, 1021, 1023, 1014, 1029, 1045,
+     1044, 1046, 1038, 1044, 1047, 1060, 1015, 1016, 1030, 1061,
+     1032, 1049, 1049, 1050, 1036, 1020, 1051, 1054, 1046, 1037,
+     1054, 1047, 1037, 1059, 1045, 1048, 1048, 1048, 1062, 1048,
+     1050, 1065, 1059, 1051, 1052, 1052, 1052, 1064, 1052, 1066,
+     1073, 1060, 1075, 1066, 1069, 1069, 1065, 1290, 1048, 1061,
+     1076, 1076, 1073, 1076, 1076, 1064, 1054, 1052, 1078, 1078,
+     1069, 1078, 1078, 1080, 1080, 1048, 1062, 1075, 1083, 1085,
+
+     1085, 1084, 1086, 1086, 1052, 1087, 1088, 1076, 1089, 1090,
+     1091, 1091, 1093, 1093, 1095, 1078, 1083, 1084, 1092, 1096,
+     1069, 1092, 1087, 1088, 1097, 1089, 1090, 1094, 1094, 1098,
+     1100, 1095, 1099, 1099, 1101, 1102, 1096, 1107, 1106, 1108,
+     1118, 1097, 1109, 1109, 1125, 1122, 1098, 1106, 1108, 1125,
+     1102, 1124, 1107, 1117, 1117, 1133, 1118, 1181, 1109, 1119,
+     1119, 1181, 1119, 1119, 1122, 1126, 1124, 1145, 1126, 1289,
+     1100, 1127, 1101, 1133, 1127, 1128, 1128, 1129, 1129, 1117,
+     1130, 1130, 1131, 1131, 1132, 1145, 1119, 1132, 1109, 1134,
+     1135, 1158, 1134, 1135, 1136, 1136, 1137, 1137, 1138, 1138,
+
+     1139, 1139, 1140, 1143, 1144, 1140, 1146, 1160, 1160, 1161,
+     1160, 1160, 1164, 1171, 1163, 1165, 1158, 1143, 1144, 1163,
+     1146, 1166, 1167, 1168, 1166, 1167, 1168, 1170, 1171, 1161,
+     1164, 1163, 1169, 1165, 1160, 1169, 1172, 1173, 1174, 1175,
+     1176, 1174, 1175, 1176, 1177, 1170, 1178, 1177, 1179, 1180,
+     1182, 1182, 1193, 1199, 1172, 1173, 1180, 1194, 1194, 1200,
+     1194, 1194, 1197, 1197, 1178, 1179, 1198, 1201, 1199, 1202,
+     1203, 1198, 1204, 1287, 1200, 1286, 1205, 1207, 1208, 1193,
+     1209, 1210, 1211, 1212, 1194, 1201, 1217, 1202, 1203, 1213,
+     1204, 1205, 1207, 1208, 1214, 1214, 1216, 1228, 1209, 1210,
+
+     1211, 1212, 1230, 1233, 1213, 1216, 1217, 1229, 1234, 1285,
+     1229, 1235, 1236, 1240, 1241, 1228, 1242, 1243, 1233, 1245,
+     1246, 1258, 1245, 1234, 1230, 1247, 1235, 1236, 1240, 1241,
+     1247, 1242, 1243, 1259, 1246, 1268, 1269, 1269, 1270, 1258,
+     1280, 1279, 1259, 1281, 1282, 1283, 1283, 1295, 1293, 1306,
+     1305, 1296, 1295, 1268, 1296, 1270, 1279, 1293, 1281, 1280,
+     1307, 1315, 1316, 1316, 1282, 1305, 1306, 1317, 1322, 1323,
+     1327, 1327, 1323, 1328, 1331, 1315, 1284, 1331, 1307, 1332,
+     1334, 1336, 1317, 1278, 1277, 1322, 1276, 1275, 1274, 1273,
+     1272, 1328, 1271, 1267, 1332, 1266, 1336, 1265, 1334, 1339,
+
+     1339, 1339, 1339, 1339, 1339, 1339, 1339, 1339, 1339, 1339,
+     1339, 1339, 1340, 1340, 1340, 1340, 1340, 1340, 1264, 1340,
+     1340, 1340, 1340, 1340, 1340, 1341, 1341, 1263, 1341, 1341,
+     1262, 1261, 1260, 1257, 1341, 1341, 1341, 1342, 1342, 1256,
+     1342, 1342, 1342, 1343, 1255, 1343, 1343, 1343, 1344, 1344,
+     1254, 1344, 1344, 1344, 1344, 1344, 1344, 1344, 1344, 1344,
+     1344, 1345, 1345, 1345, 1345, 1345, 1345, 1345, 1345, 1345,
+     1345, 1345, 1345, 1345, 1346, 1253, 1251, 1250, 1346, 1346,
+     1346, 1346, 1346, 1346, 1346, 1346, 1346, 1347, 1347, 1347,
+     1347, 1347, 1348, 1249, 1348, 1348, 1348, 1349, 1349, 1248,
+
+     1349, 1349, 1349, 1349, 1349, 1349, 1349, 1349, 1349, 1349,
+     1350, 1244, 1350, 1350, 1350, 1351, 1351, 1239, 1351, 1351,
+     1351, 1352, 1238, 1352, 1353, 1237, 1353, 1353, 1353, 1354,
+     1354, 1232, 1354, 1354, 1354, 1354, 1354, 1354, 1354, 1354,
+     1354, 1354, 1355, 1231, 1226, 1225, 1355, 1355, 1355, 1355,
+     1355, 1355, 1355, 1355, 1355, 1356, 1224, 1356, 1356, 1356,
+     1357, 1223, 1357, 1357, 1357, 1358, 1221, 1358, 1358, 1358,
+     1359, 1220, 1359, 1359, 1359, 1360, 1219, 1360, 1360, 1360,
+     1361, 1218, 1361, 1361, 1361, 1362, 1362, 1215, 1362, 1362,
+     1362, 1362, 1362, 1362, 1362, 1362, 1362, 1362, 1363, 1363,
+
+     1363, 1364, 1206, 1196, 1191, 1364, 1364, 1364, 1364, 1364,
+     1364, 1364, 1364, 1364, 1365, 1365, 1365, 1366, 1190, 1189,
+     1188, 1187, 1366, 1366, 1366, 1367, 1186, 1367, 1367, 1367,
+     1368, 1185, 1368, 1368, 1368, 1369, 1184, 1369, 1369, 1369,
+     1370, 1183, 1370, 1370, 1370, 1371, 1162, 1371, 1371, 1371,
+     1372, 1159, 1372, 1372, 1372, 1373, 1157, 1373, 1373, 1373,
+     1374, 1156, 1374, 1374, 1374, 1375, 1375, 1375, 1376, 1376,
+     1155, 1376, 1376, 1376, 1376, 1376, 1376, 1376, 1376, 1376,
+     1376, 1377, 1377, 1154, 1377, 1377, 1377, 1377, 1377, 1377,
+     1377, 1377, 1377, 1377, 1378, 1153, 1378, 1378, 1378, 1379,
+
+     1152, 1151, 1150, 1379, 1379, 1379, 1379, 1379, 1379, 1379,
+     1379, 1379, 1380, 1380, 1380, 1380, 1380, 1380, 1380, 1380,
+     1380, 1380, 1380, 1380, 1380, 1381, 1149, 1381, 1381, 1381,
+     1382, 1382, 1382, 1382, 1382, 1382, 1382, 1382, 1382, 1382,
+     1382, 1382, 1382, 1383, 1148, 1383, 1383, 1383, 1384, 1147,
+     1384, 1384, 1384, 1385, 1142, 1385, 1385, 1385, 1386, 1141,
+     1386, 1386, 1386, 1387, 1121, 1387, 1387, 1387, 1388, 1116,
+     1388, 1388, 1388, 1389, 1389, 1115, 1389, 1389, 1389, 1389,
+     1389, 1389, 1389, 1389, 1389, 1389, 1390, 1390, 1114, 1390,
+     1390, 1390, 1390, 1390, 1390, 1390, 1390, 1390, 1390, 1391,
+
+     1113, 1391, 1391, 1391, 1392, 1392, 1392, 1392, 1392, 1392,
+     1392, 1392, 1392, 1392, 1392, 1392, 1392, 1393, 1393, 1393,
+     1393, 1393, 1393, 1393, 1393, 1393, 1393, 1393, 1393, 1393,
+     1394, 1112, 1394, 1394, 1394, 1395, 1395, 1395, 1395, 1395,
+     1395, 1395, 1395, 1395, 1395, 1395, 1395, 1395, 1396, 1396,
+     1396, 1396, 1396, 1397, 1397, 1111, 1397, 1397, 1397, 1397,
+     1397, 1397, 1397, 1397, 1397, 1397, 1398, 1398, 1110, 1398,
+     1398, 1398, 1398, 1398, 1398, 1398, 1398, 1398, 1398, 1399,
+     1399, 1105, 1399, 1399, 1399, 1399, 1399, 1399, 1399, 1399,
+     1399, 1399, 1400, 1400, 1104, 1400, 1400, 1400, 1400, 1400,
+
+     1400, 1400, 1400, 1400, 1400, 1401, 1401, 1401, 1401, 1401,
+     1401, 1401, 1401, 1401, 1401, 1103, 1082, 1401, 1402, 1402,
+     1402, 1402, 1402, 1402, 1402, 1402, 1402, 1402, 1081, 1077,
+     1402, 1403, 1403, 1403, 1403, 1403, 1403, 1403, 1403, 1403,
+     1403, 1074, 1072, 1403, 1404, 1404, 1404, 1404, 1404, 1404,
+     1404, 1404, 1404, 1404, 1071, 1070, 1404, 1405, 1405, 1405,
+     1405, 1405, 1405, 1405, 1405, 1405, 1405, 1068, 1067, 1405,
+     1406, 1406, 1406, 1406, 1406, 1406, 1406, 1406, 1406, 1406,
+     1063, 1058, 1406, 1407, 1407, 1057, 1407, 1407, 1407, 1407,
+     1407, 1407, 1407, 1407, 1407, 1407, 1408, 1408, 1056, 1408,
+
+     1408, 1408, 1408, 1408, 1408, 1408, 1408, 1408, 1408, 1409,
+     1409, 1055, 1409, 1409, 1409, 1409, 1409, 1409, 1409, 1409,
+     1409, 1409, 1410, 1410, 1053, 1410, 1410, 1410, 1410, 1410,
+     1410, 1410, 1410, 1410, 1410, 1411, 1411, 1043, 1411, 1411,
+     1411, 1411, 1411, 1411, 1411, 1411, 1411, 1411, 1412, 1412,
+     1040, 1412, 1412, 1412, 1412, 1412, 1412, 1412, 1412, 1412,
+     1412, 1413, 1413, 1035, 1413, 1413, 1413, 1413, 1413, 1413,
+     1413, 1413, 1413, 1413, 1414, 1414, 1034, 1414, 1414, 1414,
+     1414, 1414, 1414, 1414, 1414, 1414, 1414, 1415, 1415, 1033,
+     1415, 1415, 1415, 1415, 1415, 1415, 1415, 1415, 1415, 1415,
+
+     1416, 1416, 1028, 1416, 1416, 1416, 1416, 1416, 1416, 1416,
+     1416, 1416, 1416, 1027, 1026, 1025, 1024, 1017, 1008, 1003,
+      998,  996,  995,  994,  993,  991,  990,  989,  988,  986,
+      981,  980,  979,  978,  977,  961,  953,  952,  951,  950,
+      949,  948,  944,  942,  941,  938,  937,  933,  929,  928,
+      927,  926,  924,  923,  921,  905,  897,  896,  895,  894,
+      893,  892,  888,  886,  884,  882,  875,  863,  862,  861,
+      860,  858,  857,  856,  832,  831,  830,  828,  827,  826,
+      825,  824,  823,  822,  819,  814,  813,  810,  808,  806,
+      804,  802,  800,  797,  796,  787,  786,  785,  783,  782,
+
+      780,  773,  768,  767,  766,  765,  764,  761,  760,  759,
+      758,  757,  737,  734,  731,  729,  728,  727,  726,  725,
+      724,  721,  711,  708,  705,  704,  698,  695,  691,  689,
+      686,  684,  667,  666,  663,  662,  661,  658,  657,  656,
+      655,  636,  634,  631,  630,  629,  628,  625,  615,  610,
+      609,  608,  605,  604,  599,  598,  597,  596,  558,  557,
+      554,  553,  552,  551,  550,  549,  546,  531,  529,  526,
+      519,  515,  508,  498,  489,  488,  485,  484,  433,  432,
+      431,  430,  429,  427,  426,  409,  398,  395,  379,  372,
+      361,  358,  309,  308,  307,  306,  301,  291,  272,  270,
+
+      265,  245,  230,  229,  204,  203,  201,  197,  195,  191,
+      190,  189,  188,  163,  131,  125,  121,  116,  115,  111,
+       95,   93,   84,   65,   63,   38,   27,   24,   15,   12,
+        8,    3, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338, 1338,
+     1338, 1338, 1338, 1338
     } ;
 
 /* Table of booleans, true if rule could match eol. */
-static yyconst flex_int32_t yy_rule_can_match_eol[175] =
+static yyconst flex_int32_t yy_rule_can_match_eol[200] =
     {   0,
 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 1, 1, 1, 0, 0, 0,
-    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 0, 0, 0,
+    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 0, 1,
+    1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
     0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1,
-    1, 0, 0, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-    0, 0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,     };
+    0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1, 1, 0, 0, 1, 1, 1, 0, 0,
+    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+    0, 1, 0, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+        };
 
 static yy_state_type yy_last_accepting_state;
 static char *yy_last_accepting_cpos;
@@ -1524,6 +1906,8 @@ static int skip_ahead_until(const char *text);
 static int skip_to_next_directive();
 static int skip_conditional_block();
 
+static const char *raw_string(const char *begin);
+
 static void preprocessor_directive(const char *text, size_t l);
 static void print_preprocessor_error(int result, const char *cp, size_t n);
 static const char *get_macro_arguments();
@@ -1538,7 +1922,7 @@ static void push_macro(MacroInfo *macro);
 static void pop_macro();
 static int in_macro();
 
-#line 1542 "lex.yy.c"
+#line 1922 "lex.yy.c"
 
 #define INITIAL 0
 
@@ -1627,7 +2011,7 @@ static int input (void );
 /* This used to be an fputs(), but since the string might contain NUL's,
  * we now use fwrite().
  */
-#define ECHO fwrite( yytext, yyleng, 1, yyout )
+#define ECHO do { if (fwrite( yytext, yyleng, 1, yyout )) {} } while (0)
 #endif
 
 /* Gets input and stuffs it into "buf".  number of characters read, or YY_NULL,
@@ -1638,7 +2022,7 @@ static int input (void );
         if ( YY_CURRENT_BUFFER_LVALUE->yy_is_interactive ) \
                 { \
                 int c = '*'; \
-                yy_size_t n; \
+                size_t n; \
                 for ( n = 0; n < max_size && \
                              (c = getc( yyin )) != EOF && c != '\n'; ++n ) \
                         buf[n] = (char) c; \
@@ -1719,14 +2103,14 @@ extern int yylex (void);
  */
 YY_DECL
 {
-        register yy_state_type yy_current_state;
-        register char *yy_cp, *yy_bp;
-        register int yy_act;
+        yy_state_type yy_current_state;
+        char *yy_cp, *yy_bp;
+        int yy_act;
 
-#line 77 "vtkParse.l"
+#line 79 "vtkParse.l"
 
 
-#line 1730 "lex.yy.c"
+#line 2110 "lex.yy.c"
 
         if ( !(yy_init) )
                 {
@@ -1771,7 +2155,7 @@ YY_DECL
 yy_match:
                 do
                         {
-                        register YY_CHAR yy_c = yy_ec[YY_SC_TO_UI(*yy_cp)];
+                        YY_CHAR yy_c = yy_ec[YY_SC_TO_UI(*yy_cp)];
                         if ( yy_accept[yy_current_state] )
                                 {
                                 (yy_last_accepting_state) = yy_current_state;
@@ -1780,13 +2164,13 @@ yy_match:
                         while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state )
                                 {
                                 yy_current_state = (int) yy_def[yy_current_state];
-                                if ( yy_current_state >= 1150 )
+                                if ( yy_current_state >= 1339 )
                                         yy_c = yy_meta[(unsigned int) yy_c];
                                 }
                         yy_current_state = yy_nxt[yy_base[yy_current_state] + (unsigned int) yy_c];
                         ++yy_cp;
                         }
-                while ( yy_base[yy_current_state] != 2674 );
+                while ( yy_base[yy_current_state] != 4033 );
 
 yy_find_action:
                 yy_act = yy_accept[yy_current_state];
@@ -1822,12 +2206,12 @@ do_action:        /* This label is used only to access EOF actions. */
 
 case 1:
 YY_RULE_SETUP
-#line 79 "vtkParse.l"
+#line 81 "vtkParse.l"
 { skip_comment(); }
         YY_BREAK
 case 2:
 YY_RULE_SETUP
-#line 81 "vtkParse.l"
+#line 83 "vtkParse.l"
 {
       if (!IgnoreBTX) {
         skip_ahead_until("//ETX");
@@ -1836,19 +2220,19 @@ YY_RULE_SETUP
         YY_BREAK
 case 3:
 YY_RULE_SETUP
-#line 87 "vtkParse.l"
+#line 89 "vtkParse.l"
 /* eat mismatched ETX */
         YY_BREAK
 case 4:
 YY_RULE_SETUP
-#line 89 "vtkParse.l"
+#line 91 "vtkParse.l"
 {
       skip_ahead_until("@end");
     }
         YY_BREAK
 case 5:
 YY_RULE_SETUP
-#line 93 "vtkParse.l"
+#line 95 "vtkParse.l"
 {
       size_t pos = 1;
       while (yytext[pos-1] != 'M' || yytext[pos] != 'E')
@@ -1860,38 +2244,38 @@ YY_RULE_SETUP
         YY_BREAK
 case 6:
 YY_RULE_SETUP
-#line 102 "vtkParse.l"
+#line 104 "vtkParse.l"
 { setCommentState(1); }
         YY_BREAK
 case 7:
 YY_RULE_SETUP
-#line 103 "vtkParse.l"
+#line 105 "vtkParse.l"
 { setCommentState(2); }
         YY_BREAK
 case 8:
 YY_RULE_SETUP
-#line 104 "vtkParse.l"
+#line 106 "vtkParse.l"
 { setCommentState(3); }
         YY_BREAK
 case 9:
 YY_RULE_SETUP
-#line 105 "vtkParse.l"
+#line 107 "vtkParse.l"
 { setCommentState(3); }
         YY_BREAK
 case 10:
 YY_RULE_SETUP
-#line 106 "vtkParse.l"
+#line 108 "vtkParse.l"
 { setCommentState(4); }
         YY_BREAK
 case 11:
 /* rule 11 can match eol */
 YY_RULE_SETUP
-#line 107 "vtkParse.l"
+#line 109 "vtkParse.l"
 { closeOrClearComment(); }
         YY_BREAK
 case 12:
 YY_RULE_SETUP
-#line 108 "vtkParse.l"
+#line 110 "vtkParse.l"
 {
       size_t pos = 2;
       while (yytext[pos-2] != '/' || yytext[pos-1] != '/') pos++;
@@ -1900,13 +2284,13 @@ YY_RULE_SETUP
         YY_BREAK
 case 13:
 YY_RULE_SETUP
-#line 114 "vtkParse.l"
+#line 116 "vtkParse.l"
 /* eat C++ comments */
         YY_BREAK
 case 14:
 /* rule 14 can match eol */
 YY_RULE_SETUP
-#line 116 "vtkParse.l"
+#line 118 "vtkParse.l"
 {
       skip_trailing_comment(yytext, yyleng);
       preprocessor_directive(yytext, yyleng);
@@ -1915,7 +2299,7 @@ YY_RULE_SETUP
 case 15:
 /* rule 15 can match eol */
 YY_RULE_SETUP
-#line 121 "vtkParse.l"
+#line 123 "vtkParse.l"
 {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(STRING_LITERAL);
@@ -1924,7 +2308,7 @@ YY_RULE_SETUP
 case 16:
 /* rule 16 can match eol */
 YY_RULE_SETUP
-#line 126 "vtkParse.l"
+#line 128 "vtkParse.l"
 {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(CHAR_LITERAL);
@@ -1932,12 +2316,20 @@ YY_RULE_SETUP
         YY_BREAK
 case 17:
 YY_RULE_SETUP
-#line 131 "vtkParse.l"
-/* ignore EXPORT macros */
+#line 133 "vtkParse.l"
+{
+      yylval.str = raw_string(yytext);
+      return(STRING_LITERAL);
+    }
         YY_BREAK
 case 18:
 YY_RULE_SETUP
-#line 133 "vtkParse.l"
+#line 138 "vtkParse.l"
+/* ignore EXPORT macros */
+        YY_BREAK
+case 19:
+YY_RULE_SETUP
+#line 140 "vtkParse.l"
 { /* let the wrappers see the parameter */
       const char *args = NULL;
       const char *cp;
@@ -1948,7 +2340,7 @@ YY_RULE_SETUP
         cp = args;
         if (*cp == '(') { cp++; }
         while (*cp == ' ' || *cp == '\t') { cp++; }
-        l = vtkidlen(cp);
+        l = vtkParse_SkipId(cp);
         if (l)
           {
           yylval.str = vtkstrndup(cp, l);
@@ -1959,93 +2351,80 @@ YY_RULE_SETUP
         }
     }
         YY_BREAK
-case 19:
-YY_RULE_SETUP
-#line 154 "vtkParse.l"
-return(DOUBLE);
-        YY_BREAK
 case 20:
 YY_RULE_SETUP
-#line 155 "vtkParse.l"
-return(FLOAT);
+#line 161 "vtkParse.l"
+return(AUTO);
         YY_BREAK
 case 21:
 YY_RULE_SETUP
-#line 156 "vtkParse.l"
-return(INT64__);
+#line 163 "vtkParse.l"
+return(DOUBLE);
         YY_BREAK
 case 22:
 YY_RULE_SETUP
-#line 157 "vtkParse.l"
-return(SHORT);
+#line 164 "vtkParse.l"
+return(FLOAT);
         YY_BREAK
 case 23:
 YY_RULE_SETUP
-#line 158 "vtkParse.l"
-return(LONG);
+#line 165 "vtkParse.l"
+return(INT64__);
         YY_BREAK
 case 24:
 YY_RULE_SETUP
-#line 159 "vtkParse.l"
-return(CHAR);
+#line 166 "vtkParse.l"
+return(SHORT);
         YY_BREAK
 case 25:
 YY_RULE_SETUP
-#line 160 "vtkParse.l"
-return(INT);
+#line 167 "vtkParse.l"
+return(LONG);
         YY_BREAK
 case 26:
 YY_RULE_SETUP
-#line 162 "vtkParse.l"
-return(UNSIGNED);
+#line 168 "vtkParse.l"
+return(CHAR);
         YY_BREAK
 case 27:
 YY_RULE_SETUP
-#line 163 "vtkParse.l"
-return(SIGNED);
+#line 169 "vtkParse.l"
+return(INT);
         YY_BREAK
 case 28:
 YY_RULE_SETUP
-#line 165 "vtkParse.l"
-return(VOID);
+#line 171 "vtkParse.l"
+return(UNSIGNED);
         YY_BREAK
 case 29:
 YY_RULE_SETUP
-#line 166 "vtkParse.l"
-return(BOOL);
+#line 172 "vtkParse.l"
+return(SIGNED);
         YY_BREAK
 case 30:
 YY_RULE_SETUP
-#line 168 "vtkParse.l"
-return(SIZE_T);
+#line 174 "vtkParse.l"
+return(VOID);
         YY_BREAK
 case 31:
 YY_RULE_SETUP
-#line 169 "vtkParse.l"
-return(SSIZE_T);
+#line 175 "vtkParse.l"
+return(BOOL);
         YY_BREAK
 case 32:
 YY_RULE_SETUP
-#line 171 "vtkParse.l"
-/* ignore the Q_OBJECT macro from Qt */
+#line 177 "vtkParse.l"
+return(CHAR16_T);
         YY_BREAK
 case 33:
-/* rule 33 can match eol */
-*yy_cp = (yy_hold_char); /* undo effects of setting up yytext */
-(yy_c_buf_p) = yy_cp -= 1;
-YY_DO_BEFORE_ACTION; /* set up yytext again */
 YY_RULE_SETUP
-#line 172 "vtkParse.l"
-return(PUBLIC);
+#line 178 "vtkParse.l"
+return(CHAR32_T);
         YY_BREAK
 case 34:
-/* rule 34 can match eol */
-*yy_cp = (yy_hold_char); /* undo effects of setting up yytext */
-(yy_c_buf_p) = yy_cp -= 1;
-YY_DO_BEFORE_ACTION; /* set up yytext again */
 YY_RULE_SETUP
-#line 173 "vtkParse.l"
-return(PRIVATE);
+#line 179 "vtkParse.l"
+return(WCHAR_T);
         YY_BREAK
 case 35:
 /* rule 35 can match eol */
@@ -2053,8 +2432,11 @@ case 35:
 (yy_c_buf_p) = yy_cp -= 1;
 YY_DO_BEFORE_ACTION; /* set up yytext again */
 YY_RULE_SETUP
-#line 174 "vtkParse.l"
-return(PROTECTED);
+#line 181 "vtkParse.l"
+{
+      yylval.str = (yytext[3] == ':' ? "std::size_t" : "size_t");
+      return(SIZE_T);
+    }
         YY_BREAK
 case 36:
 /* rule 36 can match eol */
@@ -2062,408 +2444,494 @@ case 36:
 (yy_c_buf_p) = yy_cp -= 1;
 YY_DO_BEFORE_ACTION; /* set up yytext again */
 YY_RULE_SETUP
-#line 175 "vtkParse.l"
-return(PROTECTED);
+#line 186 "vtkParse.l"
+{
+      yylval.str = (yytext[3] == ':' ? "std::ssize_t" : "ssize_t");
+      return(SSIZE_T);
+    }
         YY_BREAK
 case 37:
+/* rule 37 can match eol */
+*yy_cp = (yy_hold_char); /* undo effects of setting up yytext */
+(yy_c_buf_p) = yy_cp -= 1;
+YY_DO_BEFORE_ACTION; /* set up yytext again */
 YY_RULE_SETUP
-#line 177 "vtkParse.l"
-return(CLASS);
+#line 191 "vtkParse.l"
+{
+      yylval.str = (yytext[3] == ':' ? "std::nullptr_t" : "nullptr_t");
+      return(NULLPTR_T);
+    }
         YY_BREAK
 case 38:
 YY_RULE_SETUP
-#line 178 "vtkParse.l"
-return(STRUCT);
+#line 196 "vtkParse.l"
+/* ignore the Q_OBJECT macro from Qt */
         YY_BREAK
 case 39:
+/* rule 39 can match eol */
+*yy_cp = (yy_hold_char); /* undo effects of setting up yytext */
+(yy_c_buf_p) = yy_cp -= 1;
+YY_DO_BEFORE_ACTION; /* set up yytext again */
 YY_RULE_SETUP
-#line 179 "vtkParse.l"
+#line 197 "vtkParse.l"
 return(PUBLIC);
         YY_BREAK
 case 40:
+/* rule 40 can match eol */
+*yy_cp = (yy_hold_char); /* undo effects of setting up yytext */
+(yy_c_buf_p) = yy_cp -= 1;
+YY_DO_BEFORE_ACTION; /* set up yytext again */
 YY_RULE_SETUP
-#line 180 "vtkParse.l"
+#line 198 "vtkParse.l"
 return(PRIVATE);
         YY_BREAK
 case 41:
+/* rule 41 can match eol */
+*yy_cp = (yy_hold_char); /* undo effects of setting up yytext */
+(yy_c_buf_p) = yy_cp -= 1;
+YY_DO_BEFORE_ACTION; /* set up yytext again */
 YY_RULE_SETUP
-#line 181 "vtkParse.l"
+#line 199 "vtkParse.l"
 return(PROTECTED);
         YY_BREAK
 case 42:
+/* rule 42 can match eol */
+*yy_cp = (yy_hold_char); /* undo effects of setting up yytext */
+(yy_c_buf_p) = yy_cp -= 1;
+YY_DO_BEFORE_ACTION; /* set up yytext again */
 YY_RULE_SETUP
-#line 182 "vtkParse.l"
-return(ENUM);
+#line 200 "vtkParse.l"
+return(PROTECTED);
         YY_BREAK
 case 43:
 YY_RULE_SETUP
-#line 183 "vtkParse.l"
-return(UNION);
+#line 202 "vtkParse.l"
+return(CLASS);
         YY_BREAK
 case 44:
 YY_RULE_SETUP
-#line 184 "vtkParse.l"
-return(VIRTUAL);
+#line 203 "vtkParse.l"
+return(STRUCT);
         YY_BREAK
 case 45:
 YY_RULE_SETUP
-#line 185 "vtkParse.l"
-return(CONST);
+#line 204 "vtkParse.l"
+return(PUBLIC);
         YY_BREAK
 case 46:
 YY_RULE_SETUP
-#line 186 "vtkParse.l"
-return(VOLATILE);
+#line 205 "vtkParse.l"
+return(PRIVATE);
         YY_BREAK
 case 47:
 YY_RULE_SETUP
-#line 187 "vtkParse.l"
-return(MUTABLE);
+#line 206 "vtkParse.l"
+return(PROTECTED);
         YY_BREAK
 case 48:
 YY_RULE_SETUP
-#line 188 "vtkParse.l"
-return(OPERATOR);
+#line 207 "vtkParse.l"
+return(ENUM);
         YY_BREAK
 case 49:
 YY_RULE_SETUP
-#line 189 "vtkParse.l"
-return(FRIEND);
+#line 208 "vtkParse.l"
+return(UNION);
         YY_BREAK
 case 50:
 YY_RULE_SETUP
-#line 190 "vtkParse.l"
-return(INLINE);
+#line 209 "vtkParse.l"
+return(VIRTUAL);
         YY_BREAK
 case 51:
 YY_RULE_SETUP
-#line 191 "vtkParse.l"
-return(STATIC);
+#line 210 "vtkParse.l"
+return(CONST);
         YY_BREAK
 case 52:
 YY_RULE_SETUP
-#line 192 "vtkParse.l"
-return(EXTERN);
+#line 211 "vtkParse.l"
+return(VOLATILE);
         YY_BREAK
 case 53:
 YY_RULE_SETUP
-#line 193 "vtkParse.l"
-return(TEMPLATE);
+#line 212 "vtkParse.l"
+return(MUTABLE);
         YY_BREAK
 case 54:
 YY_RULE_SETUP
-#line 194 "vtkParse.l"
-return(TYPENAME);
+#line 213 "vtkParse.l"
+return(OPERATOR);
         YY_BREAK
 case 55:
 YY_RULE_SETUP
-#line 195 "vtkParse.l"
-return(TYPEDEF);
+#line 214 "vtkParse.l"
+return(FRIEND);
         YY_BREAK
 case 56:
 YY_RULE_SETUP
-#line 196 "vtkParse.l"
-return(NAMESPACE);
+#line 215 "vtkParse.l"
+return(INLINE);
         YY_BREAK
 case 57:
 YY_RULE_SETUP
-#line 197 "vtkParse.l"
-return(USING);
+#line 216 "vtkParse.l"
+return(CONSTEXPR);
         YY_BREAK
 case 58:
 YY_RULE_SETUP
-#line 198 "vtkParse.l"
-return(NEW);
+#line 217 "vtkParse.l"
+return(STATIC);
         YY_BREAK
 case 59:
 YY_RULE_SETUP
-#line 199 "vtkParse.l"
-return(DELETE);
+#line 218 "vtkParse.l"
+return(THREAD_LOCAL);
         YY_BREAK
 case 60:
 YY_RULE_SETUP
-#line 200 "vtkParse.l"
-return(EXPLICIT);
+#line 219 "vtkParse.l"
+return(EXTERN);
         YY_BREAK
 case 61:
 YY_RULE_SETUP
-#line 201 "vtkParse.l"
-return(THROW);
+#line 220 "vtkParse.l"
+return(TEMPLATE);
         YY_BREAK
 case 62:
 YY_RULE_SETUP
-#line 203 "vtkParse.l"
-return(STATIC_CAST);
+#line 221 "vtkParse.l"
+return(TYPENAME);
         YY_BREAK
 case 63:
 YY_RULE_SETUP
-#line 204 "vtkParse.l"
-return(DYNAMIC_CAST);
+#line 222 "vtkParse.l"
+return(TYPEDEF);
         YY_BREAK
 case 64:
 YY_RULE_SETUP
-#line 205 "vtkParse.l"
-return(CONST_CAST);
+#line 223 "vtkParse.l"
+return(NAMESPACE);
         YY_BREAK
 case 65:
 YY_RULE_SETUP
-#line 206 "vtkParse.l"
-return(REINTERPRET_CAST);
+#line 224 "vtkParse.l"
+return(USING);
         YY_BREAK
 case 66:
 YY_RULE_SETUP
-#line 208 "vtkParse.l"
-/* irrelevant to wrappers */
+#line 225 "vtkParse.l"
+return(NEW);
         YY_BREAK
 case 67:
 YY_RULE_SETUP
-#line 209 "vtkParse.l"
-/* irrelevant to wrappers */
+#line 226 "vtkParse.l"
+return(DELETE);
         YY_BREAK
 case 68:
 YY_RULE_SETUP
-#line 211 "vtkParse.l"
-return(OP_LOGIC_AND);
+#line 227 "vtkParse.l"
+return(EXPLICIT);
         YY_BREAK
 case 69:
 YY_RULE_SETUP
-#line 212 "vtkParse.l"
-return(OP_AND_EQ);
+#line 228 "vtkParse.l"
+return(THROW);
         YY_BREAK
 case 70:
 YY_RULE_SETUP
-#line 213 "vtkParse.l"
-return(OP_LOGIC_OR);
+#line 229 "vtkParse.l"
+return(TRY);
         YY_BREAK
 case 71:
 YY_RULE_SETUP
-#line 214 "vtkParse.l"
-return(OP_OR_EQ);
+#line 230 "vtkParse.l"
+return(CATCH);
         YY_BREAK
 case 72:
 YY_RULE_SETUP
-#line 215 "vtkParse.l"
-return('!');
+#line 231 "vtkParse.l"
+return(NOEXCEPT);
         YY_BREAK
 case 73:
 YY_RULE_SETUP
-#line 216 "vtkParse.l"
-return(OP_LOGIC_NEQ);
+#line 232 "vtkParse.l"
+return(DECLTYPE);
         YY_BREAK
 case 74:
 YY_RULE_SETUP
-#line 217 "vtkParse.l"
-return('^');
+#line 233 "vtkParse.l"
+return(DEFAULT);
         YY_BREAK
 case 75:
 YY_RULE_SETUP
-#line 218 "vtkParse.l"
-return(OP_XOR_EQ);
+#line 235 "vtkParse.l"
+return(STATIC_CAST);
         YY_BREAK
 case 76:
 YY_RULE_SETUP
-#line 219 "vtkParse.l"
-return('&');
+#line 236 "vtkParse.l"
+return(DYNAMIC_CAST);
         YY_BREAK
 case 77:
 YY_RULE_SETUP
-#line 220 "vtkParse.l"
-return('|');
+#line 237 "vtkParse.l"
+return(CONST_CAST);
         YY_BREAK
 case 78:
 YY_RULE_SETUP
-#line 221 "vtkParse.l"
-return('~');
+#line 238 "vtkParse.l"
+return(REINTERPRET_CAST);
         YY_BREAK
 case 79:
 YY_RULE_SETUP
-#line 223 "vtkParse.l"
-return(IdType);
+#line 240 "vtkParse.l"
+/* irrelevant to wrappers */
         YY_BREAK
 case 80:
 YY_RULE_SETUP
-#line 224 "vtkParse.l"
-return(SetMacro);
+#line 242 "vtkParse.l"
+return(OP_LOGIC_AND);
         YY_BREAK
 case 81:
 YY_RULE_SETUP
-#line 225 "vtkParse.l"
-return(GetMacro);
+#line 243 "vtkParse.l"
+return(OP_AND_EQ);
         YY_BREAK
 case 82:
 YY_RULE_SETUP
-#line 226 "vtkParse.l"
-return(SetStringMacro);
+#line 244 "vtkParse.l"
+return(OP_LOGIC_OR);
         YY_BREAK
 case 83:
 YY_RULE_SETUP
-#line 227 "vtkParse.l"
-return(GetStringMacro);
+#line 245 "vtkParse.l"
+return(OP_OR_EQ);
         YY_BREAK
 case 84:
 YY_RULE_SETUP
-#line 228 "vtkParse.l"
-return(SetClampMacro);
+#line 246 "vtkParse.l"
+return('!');
         YY_BREAK
 case 85:
 YY_RULE_SETUP
-#line 229 "vtkParse.l"
-return(SetObjectMacro);
+#line 247 "vtkParse.l"
+return(OP_LOGIC_NEQ);
         YY_BREAK
 case 86:
 YY_RULE_SETUP
-#line 230 "vtkParse.l"
-return(GetObjectMacro);
+#line 248 "vtkParse.l"
+return('^');
         YY_BREAK
 case 87:
 YY_RULE_SETUP
-#line 231 "vtkParse.l"
-return(BooleanMacro);
+#line 249 "vtkParse.l"
+return(OP_XOR_EQ);
         YY_BREAK
 case 88:
 YY_RULE_SETUP
-#line 232 "vtkParse.l"
-return(SetVector2Macro);
+#line 250 "vtkParse.l"
+return('&');
         YY_BREAK
 case 89:
 YY_RULE_SETUP
-#line 233 "vtkParse.l"
-return(SetVector3Macro);
+#line 251 "vtkParse.l"
+return('|');
         YY_BREAK
 case 90:
 YY_RULE_SETUP
-#line 234 "vtkParse.l"
-return(SetVector4Macro);
+#line 252 "vtkParse.l"
+return('~');
         YY_BREAK
 case 91:
 YY_RULE_SETUP
-#line 235 "vtkParse.l"
-return(SetVector6Macro);
+#line 254 "vtkParse.l"
+return(IdType);
         YY_BREAK
 case 92:
 YY_RULE_SETUP
-#line 236 "vtkParse.l"
-return(GetVector2Macro);
+#line 255 "vtkParse.l"
+return(SetMacro);
         YY_BREAK
 case 93:
 YY_RULE_SETUP
-#line 237 "vtkParse.l"
-return(GetVector3Macro);
+#line 256 "vtkParse.l"
+return(GetMacro);
         YY_BREAK
 case 94:
 YY_RULE_SETUP
-#line 238 "vtkParse.l"
-return(GetVector4Macro);
+#line 257 "vtkParse.l"
+return(SetStringMacro);
         YY_BREAK
 case 95:
 YY_RULE_SETUP
-#line 239 "vtkParse.l"
-return(GetVector6Macro);
+#line 258 "vtkParse.l"
+return(GetStringMacro);
         YY_BREAK
 case 96:
 YY_RULE_SETUP
-#line 240 "vtkParse.l"
-return(SetVectorMacro);
+#line 259 "vtkParse.l"
+return(SetClampMacro);
         YY_BREAK
 case 97:
 YY_RULE_SETUP
-#line 241 "vtkParse.l"
-return(GetVectorMacro);
+#line 260 "vtkParse.l"
+return(SetObjectMacro);
         YY_BREAK
 case 98:
 YY_RULE_SETUP
-#line 242 "vtkParse.l"
-return(ViewportCoordinateMacro);
+#line 261 "vtkParse.l"
+return(GetObjectMacro);
         YY_BREAK
 case 99:
 YY_RULE_SETUP
-#line 243 "vtkParse.l"
-return(WorldCoordinateMacro);
+#line 262 "vtkParse.l"
+return(BooleanMacro);
         YY_BREAK
 case 100:
 YY_RULE_SETUP
-#line 244 "vtkParse.l"
-return(TypeMacro);
+#line 263 "vtkParse.l"
+return(SetVector2Macro);
         YY_BREAK
 case 101:
 YY_RULE_SETUP
-#line 245 "vtkParse.l"
-return(TypeMacro);
+#line 264 "vtkParse.l"
+return(SetVector3Macro);
         YY_BREAK
 case 102:
 YY_RULE_SETUP
-#line 246 "vtkParse.l"
-return(TypeMacro);
+#line 265 "vtkParse.l"
+return(SetVector4Macro);
         YY_BREAK
 case 103:
 YY_RULE_SETUP
-#line 247 "vtkParse.l"
-return(TypeMacro);
+#line 266 "vtkParse.l"
+return(SetVector6Macro);
         YY_BREAK
 case 104:
 YY_RULE_SETUP
-#line 248 "vtkParse.l"
-;
+#line 267 "vtkParse.l"
+return(GetVector2Macro);
         YY_BREAK
 case 105:
 YY_RULE_SETUP
-#line 249 "vtkParse.l"
-return(VTK_BYTE_SWAP_DECL);
+#line 268 "vtkParse.l"
+return(GetVector3Macro);
         YY_BREAK
 case 106:
 YY_RULE_SETUP
-#line 250 "vtkParse.l"
-return(TypeInt8);
+#line 269 "vtkParse.l"
+return(GetVector4Macro);
         YY_BREAK
 case 107:
 YY_RULE_SETUP
-#line 251 "vtkParse.l"
+#line 270 "vtkParse.l"
+return(GetVector6Macro);
+        YY_BREAK
+case 108:
+YY_RULE_SETUP
+#line 271 "vtkParse.l"
+return(SetVectorMacro);
+        YY_BREAK
+case 109:
+YY_RULE_SETUP
+#line 272 "vtkParse.l"
+return(GetVectorMacro);
+        YY_BREAK
+case 110:
+YY_RULE_SETUP
+#line 273 "vtkParse.l"
+return(ViewportCoordinateMacro);
+        YY_BREAK
+case 111:
+YY_RULE_SETUP
+#line 274 "vtkParse.l"
+return(WorldCoordinateMacro);
+        YY_BREAK
+case 112:
+YY_RULE_SETUP
+#line 275 "vtkParse.l"
+return(TypeMacro);
+        YY_BREAK
+case 113:
+YY_RULE_SETUP
+#line 276 "vtkParse.l"
+return(TypeMacro);
+        YY_BREAK
+case 114:
+YY_RULE_SETUP
+#line 277 "vtkParse.l"
+return(TypeMacro);
+        YY_BREAK
+case 115:
+YY_RULE_SETUP
+#line 278 "vtkParse.l"
+return(TypeMacro);
+        YY_BREAK
+case 116:
+YY_RULE_SETUP
+#line 279 "vtkParse.l"
+;
+        YY_BREAK
+case 117:
+YY_RULE_SETUP
+#line 280 "vtkParse.l"
+return(VTK_BYTE_SWAP_DECL);
+        YY_BREAK
+case 118:
+YY_RULE_SETUP
+#line 281 "vtkParse.l"
+return(TypeInt8);
+        YY_BREAK
+case 119:
+YY_RULE_SETUP
+#line 282 "vtkParse.l"
 return(TypeUInt8);
         YY_BREAK
-case 108:
+case 120:
 YY_RULE_SETUP
-#line 252 "vtkParse.l"
+#line 283 "vtkParse.l"
 return(TypeInt16);
         YY_BREAK
-case 109:
+case 121:
 YY_RULE_SETUP
-#line 253 "vtkParse.l"
+#line 284 "vtkParse.l"
 return(TypeUInt16);
         YY_BREAK
-case 110:
+case 122:
 YY_RULE_SETUP
-#line 254 "vtkParse.l"
+#line 285 "vtkParse.l"
 return(TypeInt32);
         YY_BREAK
-case 111:
+case 123:
 YY_RULE_SETUP
-#line 255 "vtkParse.l"
+#line 286 "vtkParse.l"
 return(TypeUInt32);
         YY_BREAK
-case 112:
+case 124:
 YY_RULE_SETUP
-#line 256 "vtkParse.l"
+#line 287 "vtkParse.l"
 return(TypeInt64);
         YY_BREAK
-case 113:
+case 125:
 YY_RULE_SETUP
-#line 257 "vtkParse.l"
+#line 288 "vtkParse.l"
 return(TypeUInt64);
         YY_BREAK
-case 114:
+case 126:
 YY_RULE_SETUP
-#line 258 "vtkParse.l"
+#line 289 "vtkParse.l"
 return(TypeFloat32);
         YY_BREAK
-case 115:
+case 127:
 YY_RULE_SETUP
-#line 259 "vtkParse.l"
+#line 290 "vtkParse.l"
 return(TypeFloat64);
         YY_BREAK
-case 116:
-/* rule 116 can match eol */
+case 128:
+/* rule 128 can match eol */
 YY_RULE_SETUP
-#line 261 "vtkParse.l"
+#line 292 "vtkParse.l"
 {
       size_t i = 1;
       size_t j;
@@ -2475,10 +2943,10 @@ YY_RULE_SETUP
       return(LA);
     }
         YY_BREAK
-case 117:
-/* rule 117 can match eol */
+case 129:
+/* rule 129 can match eol */
 YY_RULE_SETUP
-#line 272 "vtkParse.l"
+#line 303 "vtkParse.l"
 {
       size_t i = 1;
       size_t j;
@@ -2490,129 +2958,156 @@ YY_RULE_SETUP
       return(LP);
     }
         YY_BREAK
-case 118:
-/* rule 118 can match eol */
+case 130:
+/* rule 130 can match eol */
 YY_RULE_SETUP
-#line 283 "vtkParse.l"
+#line 314 "vtkParse.l"
 {
       yylval.str = "";
       return(LP);
     }
         YY_BREAK
-case 119:
-/* rule 119 can match eol */
+case 131:
+/* rule 131 can match eol */
 YY_RULE_SETUP
-#line 288 "vtkParse.l"
+#line 319 "vtkParse.l"
 {
       yylval.str = "";
       return(LP);
     }
         YY_BREAK
-case 120:
-/* rule 120 can match eol */
+case 132:
+/* rule 132 can match eol */
 YY_RULE_SETUP
-#line 293 "vtkParse.l"
+#line 324 "vtkParse.l"
 {
       yylval.str = "";
       return(LP);
     }
         YY_BREAK
-case 121:
+case 133:
 YY_RULE_SETUP
-#line 298 "vtkParse.l"
+#line 329 "vtkParse.l"
 return('*');
         YY_BREAK
-case 122:
+case 134:
 YY_RULE_SETUP
-#line 300 "vtkParse.l"
+#line 331 "vtkParse.l"
 /* misc unused win32 macros */
         YY_BREAK
-case 123:
-/* rule 123 can match eol */
+case 135:
+/* rule 135 can match eol */
 *yy_cp = (yy_hold_char); /* undo effects of setting up yytext */
 (yy_c_buf_p) = yy_cp -= 1;
 YY_DO_BEFORE_ACTION; /* set up yytext again */
 YY_RULE_SETUP
-#line 302 "vtkParse.l"
+#line 333 "vtkParse.l"
 {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(OSTREAM);
     }
         YY_BREAK
-case 124:
-/* rule 124 can match eol */
+case 136:
+/* rule 136 can match eol */
 *yy_cp = (yy_hold_char); /* undo effects of setting up yytext */
 (yy_c_buf_p) = yy_cp -= 1;
 YY_DO_BEFORE_ACTION; /* set up yytext again */
 YY_RULE_SETUP
-#line 307 "vtkParse.l"
+#line 338 "vtkParse.l"
 {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(ISTREAM);
     }
         YY_BREAK
-case 125:
-/* rule 125 can match eol */
+case 137:
+/* rule 137 can match eol */
 *yy_cp = (yy_hold_char); /* undo effects of setting up yytext */
 (yy_c_buf_p) = yy_cp -= 1;
 YY_DO_BEFORE_ACTION; /* set up yytext again */
 YY_RULE_SETUP
-#line 312 "vtkParse.l"
+#line 343 "vtkParse.l"
 {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(StdString);
     }
         YY_BREAK
-case 126:
+case 138:
 YY_RULE_SETUP
-#line 317 "vtkParse.l"
+#line 348 "vtkParse.l"
 {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(StdString);
     }
         YY_BREAK
-case 127:
+case 139:
 YY_RULE_SETUP
-#line 322 "vtkParse.l"
+#line 353 "vtkParse.l"
 {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(UnicodeString);
     }
         YY_BREAK
-case 128:
+case 140:
 YY_RULE_SETUP
-#line 327 "vtkParse.l"
+#line 358 "vtkParse.l"
 {
       yylval.str = vtkstrndup(yytext, yyleng);
-      return(VTK_ID);
+      return(QT_ID);
     }
         YY_BREAK
-case 129:
+case 141:
 YY_RULE_SETUP
-#line 332 "vtkParse.l"
-{
-      yylval.str = vtkstrndup(yytext, yyleng);
-      return(QT_ID);
-    }
+#line 363 "vtkParse.l"
+get_macro_arguments(); /* C++11 */
         YY_BREAK
-case 130:
+case 142:
+YY_RULE_SETUP
+#line 365 "vtkParse.l"
+get_macro_arguments(); /* C++11 */
+        YY_BREAK
+case 143:
+YY_RULE_SETUP
+#line 367 "vtkParse.l"
+get_macro_arguments(); /* C11 */
+        YY_BREAK
+case 144:
+YY_RULE_SETUP
+#line 369 "vtkParse.l"
+get_macro_arguments(); /* C11 */
+        YY_BREAK
+case 145:
+YY_RULE_SETUP
+#line 371 "vtkParse.l"
+return(THREAD_LOCAL); /* C11 */
+        YY_BREAK
+case 146:
+YY_RULE_SETUP
+#line 373 "vtkParse.l"
+/* C11 */
+        YY_BREAK
+case 147:
+YY_RULE_SETUP
+#line 375 "vtkParse.l"
+/* C11 */
+        YY_BREAK
+case 148:
 YY_RULE_SETUP
-#line 337 "vtkParse.l"
+#line 377 "vtkParse.l"
 get_macro_arguments(); /* gcc attributes */
         YY_BREAK
-case 131:
+case 149:
 YY_RULE_SETUP
-#line 339 "vtkParse.l"
+#line 379 "vtkParse.l"
 get_macro_arguments(); /* Windows linkage */
         YY_BREAK
-case 132:
+case 150:
 YY_RULE_SETUP
-#line 341 "vtkParse.l"
+#line 381 "vtkParse.l"
 /* Windows */
         YY_BREAK
-case 133:
+case 151:
 YY_RULE_SETUP
-#line 343 "vtkParse.l"
+#line 383 "vtkParse.l"
 {
       const char *args = get_macro_arguments();
       if (args && args[0] != '\0' && args[1] != '\0')
@@ -2628,9 +3123,17 @@ YY_RULE_SETUP
       free((char *)args);
     }
         YY_BREAK
-case 134:
+case 152:
 YY_RULE_SETUP
-#line 358 "vtkParse.l"
+#line 398 "vtkParse.l"
+{
+      yylval.str = vtkstrndup(yytext, yyleng);
+      return(NULLPTR);
+    }
+        YY_BREAK
+case 153:
+YY_RULE_SETUP
+#line 403 "vtkParse.l"
 {
       const char *name = vtkstrndup(yytext, yyleng);
       MacroInfo *macro = vtkParsePreprocess_GetMacro(preprocessor, name);
@@ -2657,6 +3160,7 @@ YY_RULE_SETUP
             if (!emacro)
               {
               print_preprocessor_error(VTK_PARSE_MACRO_NUMARGS, NULL, 0);
+              exit(1);
               }
             free((char *)args);
             }
@@ -2678,6 +3182,7 @@ YY_RULE_SETUP
             if (!emacro)
               {
               print_preprocessor_error(r, NULL, 0);
+              exit(1);
               }
             }
           }
@@ -2715,234 +3220,268 @@ YY_RULE_SETUP
         }
     }
         YY_BREAK
-case 135:
+case 154:
 YY_RULE_SETUP
-#line 442 "vtkParse.l"
+#line 489 "vtkParse.l"
 {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(FLOAT_LITERAL);
     }
         YY_BREAK
-case 136:
+case 155:
 YY_RULE_SETUP
-#line 447 "vtkParse.l"
+#line 494 "vtkParse.l"
 {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(FLOAT_LITERAL);
     }
         YY_BREAK
-case 137:
+case 156:
 YY_RULE_SETUP
-#line 452 "vtkParse.l"
+#line 499 "vtkParse.l"
 {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(FLOAT_LITERAL);
     }
         YY_BREAK
-case 138:
+case 157:
 YY_RULE_SETUP
-#line 457 "vtkParse.l"
+#line 504 "vtkParse.l"
 {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(HEX_LITERAL);
     }
         YY_BREAK
-case 139:
+case 158:
 YY_RULE_SETUP
-#line 462 "vtkParse.l"
+#line 509 "vtkParse.l"
 {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(OCT_LITERAL);
     }
         YY_BREAK
-case 140:
+case 159:
 YY_RULE_SETUP
-#line 467 "vtkParse.l"
+#line 514 "vtkParse.l"
 {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(INT_LITERAL);
     }
         YY_BREAK
-case 141:
+case 160:
 YY_RULE_SETUP
-#line 472 "vtkParse.l"
+#line 519 "vtkParse.l"
 {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(ZERO);
     }
         YY_BREAK
-case 142:
-/* rule 142 can match eol */
+case 161:
+/* rule 161 can match eol */
 YY_RULE_SETUP
-#line 477 "vtkParse.l"
+#line 524 "vtkParse.l"
 /* escaped newlines */
         YY_BREAK
-case 143:
+case 162:
 YY_RULE_SETUP
-#line 478 "vtkParse.l"
+#line 525 "vtkParse.l"
 /* whitespace */
         YY_BREAK
-case 144:
-/* rule 144 can match eol */
+case 163:
+/* rule 163 can match eol */
 YY_RULE_SETUP
-#line 479 "vtkParse.l"
+#line 526 "vtkParse.l"
 /* whitespace */
         YY_BREAK
-case 145:
+case 164:
+/* rule 164 can match eol */
 YY_RULE_SETUP
-#line 481 "vtkParse.l"
+#line 528 "vtkParse.l"
+return(BEGIN_ATTRIB);
+        YY_BREAK
+case 165:
+YY_RULE_SETUP
+#line 530 "vtkParse.l"
+return('{');
+        YY_BREAK
+case 166:
+YY_RULE_SETUP
+#line 531 "vtkParse.l"
+return('}');
+        YY_BREAK
+case 167:
+YY_RULE_SETUP
+#line 532 "vtkParse.l"
+return('[');
+        YY_BREAK
+case 168:
+YY_RULE_SETUP
+#line 533 "vtkParse.l"
+return(']');
+        YY_BREAK
+case 169:
+YY_RULE_SETUP
+#line 534 "vtkParse.l"
+return('#');
+        YY_BREAK
+case 170:
+*yy_cp = (yy_hold_char); /* undo effects of setting up yytext */
+(yy_c_buf_p) = yy_cp = yy_bp + 1;
+YY_DO_BEFORE_ACTION; /* set up yytext again */
+YY_RULE_SETUP
+#line 536 "vtkParse.l"
+return(OP_RSHIFT_A);
+        YY_BREAK
+case 171:
+YY_RULE_SETUP
+#line 538 "vtkParse.l"
 return(OP_LSHIFT_EQ);
         YY_BREAK
-case 146:
+case 172:
 YY_RULE_SETUP
-#line 482 "vtkParse.l"
+#line 539 "vtkParse.l"
 return(OP_RSHIFT_EQ);
         YY_BREAK
-case 147:
+case 173:
 YY_RULE_SETUP
-#line 483 "vtkParse.l"
+#line 540 "vtkParse.l"
 return(OP_LSHIFT);
         YY_BREAK
-case 148:
+case 174:
 YY_RULE_SETUP
-#line 484 "vtkParse.l"
+#line 541 "vtkParse.l"
 return(OP_DOT_POINTER);
         YY_BREAK
-case 149:
+case 175:
 YY_RULE_SETUP
-#line 485 "vtkParse.l"
+#line 542 "vtkParse.l"
 return(OP_ARROW_POINTER);
         YY_BREAK
-case 150:
+case 176:
 YY_RULE_SETUP
-#line 486 "vtkParse.l"
+#line 543 "vtkParse.l"
 return(OP_ARROW);
         YY_BREAK
-case 151:
-YY_RULE_SETUP
-#line 487 "vtkParse.l"
-return(OP_RSHIFT);
-        YY_BREAK
-case 152:
+case 177:
 YY_RULE_SETUP
-#line 488 "vtkParse.l"
+#line 544 "vtkParse.l"
 return(OP_INCR);
         YY_BREAK
-case 153:
+case 178:
 YY_RULE_SETUP
-#line 489 "vtkParse.l"
+#line 545 "vtkParse.l"
 return(OP_DECR);
         YY_BREAK
-case 154:
+case 179:
 YY_RULE_SETUP
-#line 490 "vtkParse.l"
+#line 546 "vtkParse.l"
 return(OP_PLUS_EQ);
         YY_BREAK
-case 155:
+case 180:
 YY_RULE_SETUP
-#line 491 "vtkParse.l"
+#line 547 "vtkParse.l"
 return(OP_MINUS_EQ);
         YY_BREAK
-case 156:
+case 181:
 YY_RULE_SETUP
-#line 492 "vtkParse.l"
+#line 548 "vtkParse.l"
 return(OP_TIMES_EQ);
         YY_BREAK
-case 157:
+case 182:
 YY_RULE_SETUP
-#line 493 "vtkParse.l"
+#line 549 "vtkParse.l"
 return(OP_DIVIDE_EQ);
         YY_BREAK
-case 158:
+case 183:
 YY_RULE_SETUP
-#line 494 "vtkParse.l"
+#line 550 "vtkParse.l"
 return(OP_REMAINDER_EQ);
         YY_BREAK
-case 159:
+case 184:
 YY_RULE_SETUP
-#line 495 "vtkParse.l"
+#line 551 "vtkParse.l"
 return(OP_AND_EQ);
         YY_BREAK
-case 160:
+case 185:
 YY_RULE_SETUP
-#line 496 "vtkParse.l"
+#line 552 "vtkParse.l"
 return(OP_OR_EQ);
         YY_BREAK
-case 161:
+case 186:
 YY_RULE_SETUP
-#line 497 "vtkParse.l"
+#line 553 "vtkParse.l"
 return(OP_XOR_EQ);
         YY_BREAK
-case 162:
+case 187:
 YY_RULE_SETUP
-#line 498 "vtkParse.l"
+#line 554 "vtkParse.l"
 return(OP_LOGIC_AND);
         YY_BREAK
-case 163:
+case 188:
 YY_RULE_SETUP
-#line 499 "vtkParse.l"
+#line 555 "vtkParse.l"
 return(OP_LOGIC_OR);
         YY_BREAK
-case 164:
+case 189:
 YY_RULE_SETUP
-#line 500 "vtkParse.l"
+#line 556 "vtkParse.l"
 return(OP_LOGIC_EQ);
         YY_BREAK
-case 165:
+case 190:
 YY_RULE_SETUP
-#line 501 "vtkParse.l"
+#line 557 "vtkParse.l"
 return(OP_LOGIC_NEQ);
         YY_BREAK
-case 166:
+case 191:
 YY_RULE_SETUP
-#line 502 "vtkParse.l"
+#line 558 "vtkParse.l"
 return(OP_LOGIC_LEQ);
         YY_BREAK
-case 167:
+case 192:
 YY_RULE_SETUP
-#line 503 "vtkParse.l"
+#line 559 "vtkParse.l"
 return(OP_LOGIC_GEQ);
         YY_BREAK
-case 168:
+case 193:
 YY_RULE_SETUP
-#line 504 "vtkParse.l"
+#line 560 "vtkParse.l"
 return(ELLIPSIS);
         YY_BREAK
-case 169:
+case 194:
 YY_RULE_SETUP
-#line 505 "vtkParse.l"
+#line 561 "vtkParse.l"
 return(DOUBLE_COLON);
         YY_BREAK
-case 170:
+case 195:
 YY_RULE_SETUP
-#line 507 "vtkParse.l"
+#line 563 "vtkParse.l"
 return('[');
         YY_BREAK
-case 171:
+case 196:
 YY_RULE_SETUP
-#line 508 "vtkParse.l"
+#line 564 "vtkParse.l"
 return(']');
         YY_BREAK
-case 172:
+case 197:
 YY_RULE_SETUP
-#line 510 "vtkParse.l"
+#line 566 "vtkParse.l"
 return(yytext[0]);
         YY_BREAK
 case YY_STATE_EOF(INITIAL):
-#line 512 "vtkParse.l"
+#line 568 "vtkParse.l"
 { if (!pop_buffer()) { yyterminate(); } }
         YY_BREAK
-case 173:
+case 198:
 YY_RULE_SETUP
-#line 514 "vtkParse.l"
+#line 570 "vtkParse.l"
 { return(OTHER); }
         YY_BREAK
-case 174:
+case 199:
 YY_RULE_SETUP
-#line 516 "vtkParse.l"
+#line 572 "vtkParse.l"
 YY_FATAL_ERROR( "flex scanner jammed" );
         YY_BREAK
-#line 2946 "lex.yy.c"
+#line 3481 "lex.yy.c"
 
         case YY_END_OF_BUFFER:
                 {
@@ -3082,9 +3621,9 @@ YY_FATAL_ERROR( "flex scanner jammed" );
  */
 static int yy_get_next_buffer (void)
 {
-            register char *dest = YY_CURRENT_BUFFER_LVALUE->yy_ch_buf;
-        register char *source = (yytext_ptr);
-        register int number_to_move, i;
+            char *dest = YY_CURRENT_BUFFER_LVALUE->yy_ch_buf;
+        char *source = (yytext_ptr);
+        int number_to_move, i;
         int ret_val;
 
         if ( (yy_c_buf_p) > &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[(yy_n_chars) + 1] )
@@ -3133,7 +3672,7 @@ static int yy_get_next_buffer (void)
                         { /* Not enough room in the buffer - grow it. */
 
                         /* just a shorter name for the current buffer */
-                        YY_BUFFER_STATE b = YY_CURRENT_BUFFER;
+                        YY_BUFFER_STATE b = YY_CURRENT_BUFFER_LVALUE;
 
                         int yy_c_buf_p_offset =
                                 (int) ((yy_c_buf_p) - b->yy_ch_buf);
@@ -3216,15 +3755,15 @@ static int yy_get_next_buffer (void)
 
     static yy_state_type yy_get_previous_state (void)
 {
-        register yy_state_type yy_current_state;
-        register char *yy_cp;
+        yy_state_type yy_current_state;
+        char *yy_cp;
 
         yy_current_state = (yy_start);
         yy_current_state += YY_AT_BOL();
 
         for ( yy_cp = (yytext_ptr) + YY_MORE_ADJ; yy_cp < (yy_c_buf_p); ++yy_cp )
                 {
-                register YY_CHAR yy_c = (*yy_cp ? yy_ec[YY_SC_TO_UI(*yy_cp)] : 1);
+                YY_CHAR yy_c = (*yy_cp ? yy_ec[YY_SC_TO_UI(*yy_cp)] : 1);
                 if ( yy_accept[yy_current_state] )
                         {
                         (yy_last_accepting_state) = yy_current_state;
@@ -3233,7 +3772,7 @@ static int yy_get_next_buffer (void)
                 while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state )
                         {
                         yy_current_state = (int) yy_def[yy_current_state];
-                        if ( yy_current_state >= 1150 )
+                        if ( yy_current_state >= 1339 )
                                 yy_c = yy_meta[(unsigned int) yy_c];
                         }
                 yy_current_state = yy_nxt[yy_base[yy_current_state] + (unsigned int) yy_c];
@@ -3249,10 +3788,10 @@ static int yy_get_next_buffer (void)
  */
     static yy_state_type yy_try_NUL_trans  (yy_state_type yy_current_state )
 {
-        register int yy_is_jam;
-            register char *yy_cp = (yy_c_buf_p);
+        int yy_is_jam;
+            char *yy_cp = (yy_c_buf_p);
 
-        register YY_CHAR yy_c = 1;
+        YY_CHAR yy_c = 1;
         if ( yy_accept[yy_current_state] )
                 {
                 (yy_last_accepting_state) = yy_current_state;
@@ -3261,18 +3800,18 @@ static int yy_get_next_buffer (void)
         while ( yy_chk[yy_base[yy_current_state] + yy_c] != yy_current_state )
                 {
                 yy_current_state = (int) yy_def[yy_current_state];
-                if ( yy_current_state >= 1150 )
+                if ( yy_current_state >= 1339 )
                         yy_c = yy_meta[(unsigned int) yy_c];
                 }
         yy_current_state = yy_nxt[yy_base[yy_current_state] + (unsigned int) yy_c];
-        yy_is_jam = (yy_current_state == 1149);
+        yy_is_jam = (yy_current_state == 1338);
 
-        return yy_is_jam ? 0 : yy_current_state;
+                return yy_is_jam ? 0 : yy_current_state;
 }
 
-    static void yyunput (int c, register char * yy_bp )
+    static void yyunput (int c, char * yy_bp )
 {
-        register char *yy_cp;
+        char *yy_cp;
 
     yy_cp = (yy_c_buf_p);
 
@@ -3282,10 +3821,10 @@ static int yy_get_next_buffer (void)
         if ( yy_cp < YY_CURRENT_BUFFER_LVALUE->yy_ch_buf + 2 )
                 { /* need to shift things up to make room */
                 /* +2 for EOB chars. */
-                register yy_size_t number_to_move = (yy_n_chars) + 2;
-                register char *dest = &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[
+                yy_size_t number_to_move = (yy_n_chars) + 2;
+                char *dest = &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[
                                         YY_CURRENT_BUFFER_LVALUE->yy_buf_size + 2];
-                register char *source =
+                char *source =
                                 &YY_CURRENT_BUFFER_LVALUE->yy_ch_buf[number_to_move];
 
                 while ( source > YY_CURRENT_BUFFER_LVALUE->yy_ch_buf )
@@ -3359,7 +3898,7 @@ static int yy_get_next_buffer (void)
                                 case EOB_ACT_END_OF_FILE:
                                         {
                                         if ( yywrap( ) )
-                                                return 0;
+                                                return EOF;
 
                                         if ( ! (yy_did_buffer_switch_on_eof) )
                                                 YY_NEW_FILE;
@@ -3501,10 +4040,6 @@ static void yy_load_buffer_state  (void)
         yyfree((void *) b  );
 }
 
-#ifndef __cplusplus
-extern int isatty (int );
-#endif /* __cplusplus */
-
 /* Initializes or reinitializes a buffer.
  * This function is sometimes called more than once on the same buffer,
  * such as during a yyrestart() or at EOF.
@@ -3718,7 +4253,8 @@ YY_BUFFER_STATE yy_scan_bytes  (yyconst char * yybytes, yy_size_t  _yybytes_len
 {
         YY_BUFFER_STATE b;
         char *buf;
-        yy_size_t n, i;
+        yy_size_t n;
+        yy_size_t i;
 
         /* Get memory for full buffer, including space for trailing EOB's. */
         n = _yybytes_len + 2;
@@ -3815,6 +4351,7 @@ char *yyget_text  (void)
 }
 
 /** Set the current line number.
+ * @param line_number
  *
  */
 void yyset_lineno (int  line_number )
@@ -3909,7 +4446,7 @@ int yylex_destroy  (void)
 #ifndef yytext_ptr
 static void yy_flex_strncpy (char* s1, yyconst char * s2, int n )
 {
-        register int i;
+        int i;
         for ( i = 0; i < n; ++i )
                 s1[i] = s2[i];
 }
@@ -3918,7 +4455,7 @@ static void yy_flex_strncpy (char* s1, yyconst char * s2, int n )
 #ifdef YY_NEED_STRLEN
 static int yy_flex_strlen (yyconst char * s )
 {
-        register int n;
+        int n;
         for ( n = 0; s[n]; ++n )
                 ;
 
@@ -3950,7 +4487,7 @@ void yyfree (void * ptr )
 
 #define YYTABLES_NAME "yytables"
 
-#line 516 "vtkParse.l"
+#line 572 "vtkParse.l"
 
 
 
@@ -4116,14 +4653,16 @@ const char *get_macro_arguments()
  */
 int skip_comment()
 {
+   int savelineno = yylineno;
    int c1 = 0, c2 = input();
    for (;;)
      {
-     if (c2 == 0)
+     if (c2 == 0 || c2 == EOF)
        {
+       yylineno = savelineno;
        print_preprocessor_error(VTK_PARSE_SYNTAX_ERROR,
          "Cannot find end of comment.", 27);
-       return 0;
+       exit(1);
        }
      if (c1 == '*' && c2 == '/') break;
      c1 = c2; c2 = input();
@@ -4210,7 +4749,7 @@ int skip_to_next_directive()
 
   c = input();
 
-  while (c != 0)
+  while (c != 0 && c != EOF)
     {
     /* whitespace */
     if (c == ' ' || c == '\t')
@@ -4247,7 +4786,7 @@ int skip_to_next_directive()
           c = input();
           }
         }
-      else if (c != 0)
+      else if (c != 0 && c != EOF)
         {
         c = input();
         }
@@ -4294,7 +4833,7 @@ int skip_conditional_block()
       {
       c = input();
       }
-    if (c == 0)
+    if (c == 0 || c == EOF)
       {
       return 0;
       }
@@ -4302,7 +4841,7 @@ int skip_conditional_block()
     /* eat the whole line */
     i = 0;
     linebuf[i++] = '#';
-    while (c != 0 && c != '\n')
+    while (c != 0 && c != EOF && c != '\n')
       {
       if (i >= linemaxlen-5)
         {
@@ -4346,7 +4885,7 @@ int skip_conditional_block()
 int skip_ahead_multi(const char *strings[])
 {
   char textbuf[SKIP_MATCH_MAXLEN+1];
-  char c = 0;
+  int c = 0;
   size_t i;
 
   for (i = 0; i < (SKIP_MATCH_MAXLEN+1); i++)
@@ -4361,7 +4900,8 @@ int skip_ahead_multi(const char *strings[])
       textbuf[i] = textbuf[i+1];
       }
 
-    if ((c = (char)input()) == '\0')
+    c = input();
+    if (c == 0 || c == EOF)
       {
       print_preprocessor_error(VTK_PARSE_SYNTAX_ERROR, NULL, 0);
       return 0;
@@ -4383,9 +4923,9 @@ int skip_ahead_multi(const char *strings[])
       }
     }
 
-  while (c != '\0' && c != '\n')
+  while (c != 0 && c != EOF && c != '\n')
     {
-    c = (char)input();
+    c = input();
     }
 
   return 1;
@@ -4404,6 +4944,123 @@ int skip_ahead_until(const char *text)
 }
 
 /*
+ * Convert a raw string into a non-raw string.
+ */
+const char *raw_string(const char *begin)
+{
+  int savelineno = yylineno;
+  char *textbuf;
+  int c = 0;
+  const char *delim;
+  const char *cp = begin;
+  char *dp;
+  char *result;
+  size_t i, j, n;
+  size_t m = 1024;
+
+  result = (char *)malloc(m);
+  dp = result;
+
+  while (*cp != '\"')
+    {
+    *dp++ = *cp++;
+    }
+  --dp;
+  *dp++ = *cp++;
+
+  delim = cp;
+
+  for (n = 0;; n++)
+    {
+    if (delim[n] == '(') { break; }
+    }
+
+  textbuf = (char *)malloc(n+1);
+
+  for (i = 0; i < n+1; i++)
+    {
+    c = input();
+    textbuf[i] = c;
+    }
+
+  while (c != EOF)
+    {
+    if (textbuf[0] == ')' && (n == 0 || strncmp(&textbuf[1], delim, n) == 0))
+      {
+      break;
+      }
+
+    j = dp - result;
+    if (j > m - 8)
+      {
+      m += 1024;
+      result = (char *)realloc(result, m);
+      dp = result + j;
+      }
+
+    if ((*textbuf >= ' ' && *textbuf <= '~') ||
+        (*textbuf & 0x80) != 0)
+      {
+      *dp++ = *textbuf;
+      }
+    else switch (*textbuf)
+      {
+      case '\a': *dp++ = '\\'; *dp++ = 'a'; break;
+      case '\b': *dp++ = '\\'; *dp++ = 'b'; break;
+      case '\f': *dp++ = '\\'; *dp++ = 'f'; break;
+      case '\n': *dp++ = '\\'; *dp++ = 'n'; break;
+      case '\r': *dp++ = '\\'; *dp++ = 'r'; break;
+      case '\t': *dp++ = '\\'; *dp++ = 't'; break;
+      case '\v': *dp++ = '\\'; *dp++ = 'v'; break;
+      case '\\': *dp++ = '\\'; *dp++ = '\\'; break;
+      case '\'': *dp++ = '\\'; *dp++ = '\''; break;
+      case '\"': *dp++ = '\\'; *dp++ = '\"'; break;
+      default:
+        sprintf(dp, "\\%3.3o", *textbuf);
+        dp += 4;
+        break;
+      }
+
+    for (i = 0; i < n; i++)
+      {
+      textbuf[i] = textbuf[i+1];
+      }
+
+    c = input();
+    textbuf[n] = c;
+    }
+
+  if (c == EOF || '\"' != input())
+    {
+    yylineno = savelineno;
+    print_preprocessor_error(VTK_PARSE_SYNTAX_ERROR,
+      "Unterminated raw string.", 24);
+    exit(1);
+    }
+
+  *dp++ = '\"';
+
+  c = input();
+  if (c == '_')
+    {
+    do
+      {
+      *dp++ = c;
+      c = input();
+      }
+    while (vtkParse_CharType(c, CPRE_IDGIT));
+    }
+  unput(c);
+
+  *dp = '\0';
+  cp = vtkstrdup(result);
+  free(result);
+  free(textbuf);
+
+  return cp;
+}
+
+/*
  * buffer stack, used for macro expansion and include files
  */
 static size_t buffer_stack_size = 0;
@@ -4651,6 +5308,11 @@ void print_parser_error(const char *text, const char *cp, size_t n)
   size_t j = 0;
   const char *fn = "(none)";
 
+  if (CommandName)
+    {
+    fprintf(yyout, "%s: ", CommandName);
+    }
+
   if (data->FileName)
     {
     fn = data->FileName;
@@ -4661,15 +5323,19 @@ void print_parser_error(const char *text, const char *cp, size_t n)
     fprintf(yyout, "%i:\nIn %s:",
             lineno_stack[j], include_stack[j]->FileName);
     }
-  fprintf(yyout, "%i:\n", yylineno);
+  fprintf(yyout, "%i:", yylineno);
 
   if (cp)
     {
-    fprintf(yyout, "%s: %*.*s\n", text, (int)n, (int)n, cp);
+    fprintf(yyout, " %s: %*.*s\n", text, (int)n, (int)n, cp);
     }
   else if (text)
     {
-    fprintf(yyout, "%s.\n", text);
+    fprintf(yyout, " %s.\n", text);
+    }
+  else
+    {
+    fprintf(yyout, "\n");
     }
 }
 
@@ -4730,6 +5396,10 @@ void preprocessor_directive(const char *text, size_t l)
   else if (result != VTK_PARSE_OK)
     {
     print_preprocessor_error(result, text, l);
+    if ((result & VTK_PARSE_FATAL_ERROR) != 0)
+      {
+      exit(1);
+      }
     }
   else if (n == 6 && strncmp(directive, "define", n) == 0 &&
            ep - cp > 4 && strncmp(cp, "VTK", 3) == 0)
diff --git a/Wrapping/Tools/vtkParse.h b/Wrapping/Tools/vtkParse.h
index d81a24e..b51ab03 100644
--- a/Wrapping/Tools/vtkParse.h
+++ b/Wrapping/Tools/vtkParse.h
@@ -55,6 +55,11 @@ const char *vtkParse_FindIncludeFile(const char *filename);
 void vtkParse_SetIgnoreBTX(int option);
 
 /**
+ * Set the command name, for error reporting and diagnostics.
+ */
+void vtkParse_SetCommandName(const char *name);
+
+/**
  * Parse a header file and return a FileInfo struct
  */
 FileInfo *vtkParse_ParseFile(
diff --git a/Wrapping/Tools/vtkParse.l b/Wrapping/Tools/vtkParse.l
index e09bb13..7ab58b3 100644
--- a/Wrapping/Tools/vtkParse.l
+++ b/Wrapping/Tools/vtkParse.l
@@ -54,6 +54,8 @@ static int skip_ahead_until(const char *text);
 static int skip_to_next_directive();
 static int skip_conditional_block();
 
+static const char *raw_string(const char *begin);
+
 static void preprocessor_directive(const char *text, size_t l);
 static void print_preprocessor_error(int result, const char *cp, size_t n);
 static const char *get_macro_arguments();
@@ -72,7 +74,7 @@ static int in_macro();
 
 WS [\t\n\r ]+
 PS [\t\n\r ]*
-ID [a-zA-Z_][0-9a-zA-Z_]*
+ID [a-zA-Z_\x80-\xff][0-9a-zA-Z_\x80-\xff]*
 
 %%
 
@@ -118,16 +120,21 @@ ID [a-zA-Z_][0-9a-zA-Z_]*
       preprocessor_directive(yytext, yyleng);
    }
 
-\"([^\"]|\\\")*\"  {
+("u8"|"u"|"U"|"L")?\"([^\"]|\\\")*\"(_{ID})?  {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(STRING_LITERAL);
     }
 
-\'([^\']|\\\')+\'  {
+[uUL]?\'([^\']|\\\')+\'(_{ID})?  {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(CHAR_LITERAL);
     }
 
+("R"|"u8R"|"uR"|"UR"|"LR")?\"[^ ()\t\v\f\n]*"(" {
+      yylval.str = raw_string(yytext);
+      return(STRING_LITERAL);
+    }
+
 {ID}"_EXPORT"  /* ignore EXPORT macros */
 
 "vtkNotUsed" { /* let the wrappers see the parameter */
@@ -140,7 +147,7 @@ ID [a-zA-Z_][0-9a-zA-Z_]*
         cp = args;
         if (*cp == '(') { cp++; }
         while (*cp == ' ' || *cp == '\t') { cp++; }
-        l = vtkidlen(cp);
+        l = vtkParse_SkipId(cp);
         if (l)
           {
           yylval.str = vtkstrndup(cp, l);
@@ -151,6 +158,8 @@ ID [a-zA-Z_][0-9a-zA-Z_]*
         }
     }
 
+"auto"          return(AUTO);
+
 "double"        return(DOUBLE);
 "float"         return(FLOAT);
 "__int64"       return(INT64__);
@@ -165,8 +174,24 @@ ID [a-zA-Z_][0-9a-zA-Z_]*
 "void"          return(VOID);
 "bool"          return(BOOL);
 
-"size_t"        return(SIZE_T);
-"ssize_t"       return(SSIZE_T);
+"char16_t"      return(CHAR16_T);
+"char32_t"      return(CHAR32_T);
+"wchar_t"       return(WCHAR_T);
+
+"std::"?"size_t"/[^a-zA-Z_] {
+      yylval.str = (yytext[3] == ':' ? "std::size_t" : "size_t");
+      return(SIZE_T);
+    }
+
+"std::"?"ssize_t"/[^a-zA-Z_] {
+      yylval.str = (yytext[3] == ':' ? "std::ssize_t" : "ssize_t");
+      return(SSIZE_T);
+    }
+
+"std::"?"nullptr_t"/[^a-zA-Z_] {
+      yylval.str = (yytext[3] == ':' ? "std::nullptr_t" : "nullptr_t");
+      return(NULLPTR_T);
+    }
 
 "Q_OBJECT"      /* ignore the Q_OBJECT macro from Qt */
 "public"{WS}"slots"{PS}/:    return(PUBLIC);
@@ -188,7 +213,9 @@ ID [a-zA-Z_][0-9a-zA-Z_]*
 "operator"      return(OPERATOR);
 "friend"        return(FRIEND);
 "inline"        return(INLINE);
+"constexpr"     return(CONSTEXPR);
 "static"        return(STATIC);
+"thread_local"  return(THREAD_LOCAL);
 "extern"        return(EXTERN);
 "template"      return(TEMPLATE);
 "typename"      return(TYPENAME);
@@ -199,13 +226,17 @@ ID [a-zA-Z_][0-9a-zA-Z_]*
 "delete"        return(DELETE);
 "explicit"      return(EXPLICIT);
 "throw"         return(THROW);
+"try"           return(TRY);
+"catch"         return(CATCH);
+"noexcept"      return(NOEXCEPT);
+"decltype"      return(DECLTYPE);
+"default"       return(DEFAULT);
 
 "static_cast"   return(STATIC_CAST);
 "dynamic_cast"  return(DYNAMIC_CAST);
 "const_cast"    return(CONST_CAST);
 "reinterpret_cast" return(REINTERPRET_CAST);
 
-"auto"          /* irrelevant to wrappers */
 "register"      /* irrelevant to wrappers */
 
 "and"           return(OP_LOGIC_AND);
@@ -324,16 +355,25 @@ ID [a-zA-Z_][0-9a-zA-Z_]*
       return(UnicodeString);
     }
 
-"vtkDataArray" {
-      yylval.str = vtkstrndup(yytext, yyleng);
-      return(VTK_ID);
-    }
-
 "Qt::"{ID} {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(QT_ID);
     }
 
+"static_assert"  get_macro_arguments(); /* C++11 */
+
+"alignas"  get_macro_arguments(); /* C++11 */
+
+"_Alignas" get_macro_arguments(); /* C11 */
+
+"_Generic" get_macro_arguments(); /* C11 */
+
+"_Thread_local" return(THREAD_LOCAL); /* C11 */
+
+"_Atomic"    /* C11 */
+
+"_Noreturn"  /* C11 */
+
 "__attribute__"  get_macro_arguments(); /* gcc attributes */
 
 "__declspec"  get_macro_arguments(); /* Windows linkage */
@@ -355,6 +395,11 @@ ID [a-zA-Z_][0-9a-zA-Z_]*
       free((char *)args);
     }
 
+"nullptr" {
+      yylval.str = vtkstrndup(yytext, yyleng);
+      return(NULLPTR);
+    }
+
 {ID}  {
       const char *name = vtkstrndup(yytext, yyleng);
       MacroInfo *macro = vtkParsePreprocess_GetMacro(preprocessor, name);
@@ -381,6 +426,7 @@ ID [a-zA-Z_][0-9a-zA-Z_]*
             if (!emacro)
               {
               print_preprocessor_error(VTK_PARSE_MACRO_NUMARGS, NULL, 0);
+              exit(1);
               }
             free((char *)args);
             }
@@ -402,6 +448,7 @@ ID [a-zA-Z_][0-9a-zA-Z_]*
             if (!emacro)
               {
               print_preprocessor_error(r, NULL, 0);
+              exit(1);
               }
             }
           }
@@ -439,32 +486,32 @@ ID [a-zA-Z_][0-9a-zA-Z_]*
         }
     }
 
-[0-9]*"."[0-9]+([eE][+-]?[0-9]+)?[a-zA-Z_]*  {
+[0-9]*"."[0-9]+([eE][+-]?[0-9]+)?{ID}?  {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(FLOAT_LITERAL);
     }
 
-[0-9]+"."([eE][+-]?[0-9]+)?[a-zA-Z_]*  {
+[0-9]+"."([eE][+-]?[0-9]+)?{ID}?  {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(FLOAT_LITERAL);
     }
 
-[0-9]+[eE][+-]?[0-9]+[a-zA-Z_]*  {
+[0-9]+[eE][+-]?[0-9]+{ID}?  {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(FLOAT_LITERAL);
     }
 
-"0"[xX][0-9a-fA-F]+[g-zG-Z_]*      {
+"0"[xX][0-9a-fA-F]+{ID}?  {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(HEX_LITERAL);
     }
 
-"0"[0-9]+[a-zA-Z_]*   {
+"0"[0-9]+{ID}? {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(OCT_LITERAL);
     }
 
-[1-9][0-9]*[a-zA-Z_]* {
+[1-9][0-9]*{ID}? {
       yylval.str = vtkstrndup(yytext, yyleng);
       return(INT_LITERAL);
     }
@@ -478,13 +525,22 @@ ID [a-zA-Z_][0-9a-zA-Z_]*
 [\t ]+            /* whitespace */
 [\n\r\v\f]        /* whitespace */
 
+"["{PS}"["        return(BEGIN_ATTRIB);
+
+"<%"              return('{');
+"%>"              return('}');
+"<:"              return('[');
+":>"              return(']');
+"%:"              return('#');
+
+">"/>             return(OP_RSHIFT_A);
+
 "<<="             return(OP_LSHIFT_EQ);
 ">>="             return(OP_RSHIFT_EQ);
 "<<"              return(OP_LSHIFT);
 ".*"              return(OP_DOT_POINTER);
 "->*"             return(OP_ARROW_POINTER);
 "->"              return(OP_ARROW);
-">>"              return(OP_RSHIFT);
 "++"              return(OP_INCR);
 "--"              return(OP_DECR);
 "+="              return(OP_PLUS_EQ);
@@ -677,14 +733,16 @@ const char *get_macro_arguments()
  */
 int skip_comment()
 {
+   int savelineno = yylineno;
    int c1 = 0, c2 = input();
    for (;;)
      {
-     if (c2 == 0)
+     if (c2 == 0 || c2 == EOF)
        {
+       yylineno = savelineno;
        print_preprocessor_error(VTK_PARSE_SYNTAX_ERROR,
          "Cannot find end of comment.", 27);
-       return 0;
+       exit(1);
        }
      if (c1 == '*' && c2 == '/') break;
      c1 = c2; c2 = input();
@@ -771,7 +829,7 @@ int skip_to_next_directive()
 
   c = input();
 
-  while (c != 0)
+  while (c != 0 && c != EOF)
     {
     /* whitespace */
     if (c == ' ' || c == '\t')
@@ -808,7 +866,7 @@ int skip_to_next_directive()
           c = input();
           }
         }
-      else if (c != 0)
+      else if (c != 0 && c != EOF)
         {
         c = input();
         }
@@ -855,7 +913,7 @@ int skip_conditional_block()
       {
       c = input();
       }
-    if (c == 0)
+    if (c == 0 || c == EOF)
       {
       return 0;
       }
@@ -863,7 +921,7 @@ int skip_conditional_block()
     /* eat the whole line */
     i = 0;
     linebuf[i++] = '#';
-    while (c != 0 && c != '\n')
+    while (c != 0 && c != EOF && c != '\n')
       {
       if (i >= linemaxlen-5)
         {
@@ -907,7 +965,7 @@ int skip_conditional_block()
 int skip_ahead_multi(const char *strings[])
 {
   char textbuf[SKIP_MATCH_MAXLEN+1];
-  char c = 0;
+  int c = 0;
   size_t i;
 
   for (i = 0; i < (SKIP_MATCH_MAXLEN+1); i++)
@@ -922,7 +980,8 @@ int skip_ahead_multi(const char *strings[])
       textbuf[i] = textbuf[i+1];
       }
 
-    if ((c = (char)input()) == '\0')
+    c = input();
+    if (c == 0 || c == EOF)
       {
       print_preprocessor_error(VTK_PARSE_SYNTAX_ERROR, NULL, 0);
       return 0;
@@ -944,9 +1003,9 @@ int skip_ahead_multi(const char *strings[])
       }
     }
 
-  while (c != '\0' && c != '\n')
+  while (c != 0 && c != EOF && c != '\n')
     {
-    c = (char)input();
+    c = input();
     }
 
   return 1;
@@ -965,6 +1024,123 @@ int skip_ahead_until(const char *text)
 }
 
 /*
+ * Convert a raw string into a non-raw string.
+ */
+const char *raw_string(const char *begin)
+{
+  int savelineno = yylineno;
+  char *textbuf;
+  int c = 0;
+  const char *delim;
+  const char *cp = begin;
+  char *dp;
+  char *result;
+  size_t i, j, n;
+  size_t m = 1024;
+
+  result = (char *)malloc(m);
+  dp = result;
+
+  while (*cp != '\"')
+    {
+    *dp++ = *cp++;
+    }
+  --dp;
+  *dp++ = *cp++;
+
+  delim = cp;
+
+  for (n = 0;; n++)
+    {
+    if (delim[n] == '(') { break; }
+    }
+
+  textbuf = (char *)malloc(n+1);
+
+  for (i = 0; i < n+1; i++)
+    {
+    c = input();
+    textbuf[i] = c;
+    }
+
+  while (c != EOF)
+    {
+    if (textbuf[0] == ')' && (n == 0 || strncmp(&textbuf[1], delim, n) == 0))
+      {
+      break;
+      }
+
+    j = dp - result;
+    if (j > m - 8)
+      {
+      m += 1024;
+      result = (char *)realloc(result, m);
+      dp = result + j;
+      }
+
+    if ((*textbuf >= ' ' && *textbuf <= '~') ||
+        (*textbuf & 0x80) != 0)
+      {
+      *dp++ = *textbuf;
+      }
+    else switch (*textbuf)
+      {
+      case '\a': *dp++ = '\\'; *dp++ = 'a'; break;
+      case '\b': *dp++ = '\\'; *dp++ = 'b'; break;
+      case '\f': *dp++ = '\\'; *dp++ = 'f'; break;
+      case '\n': *dp++ = '\\'; *dp++ = 'n'; break;
+      case '\r': *dp++ = '\\'; *dp++ = 'r'; break;
+      case '\t': *dp++ = '\\'; *dp++ = 't'; break;
+      case '\v': *dp++ = '\\'; *dp++ = 'v'; break;
+      case '\\': *dp++ = '\\'; *dp++ = '\\'; break;
+      case '\'': *dp++ = '\\'; *dp++ = '\''; break;
+      case '\"': *dp++ = '\\'; *dp++ = '\"'; break;
+      default:
+        sprintf(dp, "\\%3.3o", *textbuf);
+        dp += 4;
+        break;
+      }
+
+    for (i = 0; i < n; i++)
+      {
+      textbuf[i] = textbuf[i+1];
+      }
+
+    c = input();
+    textbuf[n] = c;
+    }
+
+  if (c == EOF || '\"' != input())
+    {
+    yylineno = savelineno;
+    print_preprocessor_error(VTK_PARSE_SYNTAX_ERROR,
+      "Unterminated raw string.", 24);
+    exit(1);
+    }
+
+  *dp++ = '\"';
+
+  c = input();
+  if (c == '_')
+    {
+    do
+      {
+      *dp++ = c;
+      c = input();
+      }
+    while (vtkParse_CharType(c, CPRE_IDGIT));
+    }
+  unput(c);
+
+  *dp = '\0';
+  cp = vtkstrdup(result);
+  free(result);
+  free(textbuf);
+
+  return cp;
+}
+
+/*
  * buffer stack, used for macro expansion and include files
  */
 static size_t buffer_stack_size = 0;
@@ -1212,6 +1388,11 @@ void print_parser_error(const char *text, const char *cp, size_t n)
   size_t j = 0;
   const char *fn = "(none)";
 
+  if (CommandName)
+    {
+    fprintf(yyout, "%s: ", CommandName);
+    }
+
   if (data->FileName)
     {
     fn = data->FileName;
@@ -1222,15 +1403,19 @@ void print_parser_error(const char *text, const char *cp, size_t n)
     fprintf(yyout, "%i:\nIn %s:",
             lineno_stack[j], include_stack[j]->FileName);
     }
-  fprintf(yyout, "%i:\n", yylineno);
+  fprintf(yyout, "%i:", yylineno);
 
   if (cp)
     {
-    fprintf(yyout, "%s: %*.*s\n", text, (int)n, (int)n, cp);
+    fprintf(yyout, " %s: %*.*s\n", text, (int)n, (int)n, cp);
     }
   else if (text)
     {
-    fprintf(yyout, "%s.\n", text);
+    fprintf(yyout, " %s.\n", text);
+    }
+  else
+    {
+    fprintf(yyout, "\n");
     }
 }
 
@@ -1291,6 +1476,10 @@ void preprocessor_directive(const char *text, size_t l)
   else if (result != VTK_PARSE_OK)
     {
     print_preprocessor_error(result, text, l);
+    if ((result & VTK_PARSE_FATAL_ERROR) != 0)
+      {
+      exit(1);
+      }
     }
   else if (n == 6 && strncmp(directive, "define", n) == 0 &&
            ep - cp > 4 && strncmp(cp, "VTK", 3) == 0)
diff --git a/Wrapping/Tools/vtkParse.tab.c b/Wrapping/Tools/vtkParse.tab.c
index 75c3aaf..6c3443d 100644
--- a/Wrapping/Tools/vtkParse.tab.c
+++ b/Wrapping/Tools/vtkParse.tab.c
@@ -1,9 +1,8 @@
 /* A Bison parser, made by GNU Bison 2.4.1.  */
 
-/* Skeleton implementation for Bison's Yacc-like parsers in C
+/* Skeleton implementation for Bison GLR parsers in C
 
-      Copyright (C) 1984, 1989, 1990, 2000, 2001, 2002, 2003, 2004, 2005, 2006
-   Free Software Foundation, Inc.
+      Copyright (C) 2002, 2003, 2004, 2005, 2006 Free Software Foundation, Inc.
 
    This program is free software: you can redistribute it and/or modify
    it under the terms of the GNU General Public License as published by
@@ -31,15 +30,7 @@
    This special exception was added by the Free Software Foundation in
    version 2.2 of Bison.  */
 
-/* C LALR(1) parser skeleton written by Richard Stallman, by
-   simplifying the original so-called "semantic" parser.  */
-
-/* All symbols defined below should begin with yy or YY, to avoid
-   infringing on user name space.  This should be done even for local
-   variables, as they might otherwise be expanded by user macros.
-   There are some unavoidable exceptions within include files to
-   define necessary library symbols; they are noted "INFRINGES ON
-   USER NAME SPACE" below.  */
+/* C GLR parser skeleton written by Paul Hilfinger.  */
 
 /* Identify Bison output.  */
 #define YYBISON 1
@@ -48,25 +39,20 @@
 #define YYBISON_VERSION "2.4.1"
 
 /* Skeleton name.  */
-#define YYSKELETON_NAME "yacc.c"
+#define YYSKELETON_NAME "glr.c"
 
 /* Pure parsers.  */
 #define YYPURE 0
 
-/* Push parsers.  */
-#define YYPUSH 0
-
-/* Pull parsers.  */
-#define YYPULL 1
-
 /* Using locations.  */
 #define YYLSP_NEEDED 0
 
 
 
+
 /* Copy the first part of user declarations.  */
 
-/* Line 189 of yacc.c  */
+/* Line 172 of glr.c  */
 #line 15 "vtkParse.y"
 
 
@@ -81,9 +67,7 @@ Run yacc like this:
 Modify vtkParse.tab.c:
   - convert TABs to spaces (eight per tab)
   - remove spaces from ends of lines, s/ *$//g
-  - remove the "goto yyerrlab1;" that appears right before yyerrlab1:
-  - remove the #defined constants that appear right after the anonymous_enums
-
+  - replace all instances of "static inline" with "static".
 */
 
 /*
@@ -127,10 +111,10 @@ create a yacc parser without any shift/reduce conflicts.  The rules for
 types are organized according to the way that types are usually defined
 in working code, rather than strictly according to C++ grammar.
 
-The declaration specifiers "friend" and "typedef" can only appear at the
-beginning of a declaration sequence.  There are also restrictions on
-where class and enum specifiers can be used: you can declare a new struct
-within a variable declaration, but not within a parameter declaration.
+The declaration specifier "typedef" can only appear at the beginning
+of a declaration sequence.  There are also restrictions on where class
+and enum specifiers can be used: you can declare a new struct within a
+variable declaration, but not within a parameter declaration.
 
 The lexer returns each of "(scope::*", "(*", "(a::b::*", etc. as single
 tokens.  The C++ BNF, in contrast, would consider these to be a "("
@@ -146,30 +130,12 @@ a class.  This parser always interprets this pattern as a constructor
 declaration, because function calls are ignored by the parser, and
 variable declarations of the form y(x); are exceedingly rare compared
 to the more usual form y x; without parentheses.
-
-One ambiguous structure that has been found in some working code, but
-is currently not dealt with properly by the parser, is the following:
-
-  enum { x = mytemplate<int,2>::x };
-
-This is interpreted as the following ungrammatical statement:
-
-  enum { x = mytemplate < int ,
-         2 > ::x };
-
-This has proven to be very hard to fix in the parser, but it possible
-to modify the statement so that it does not confuse the parser:
-
-  enum { x = (mytemplate<int,2>::x) };
-
-The parentheses serve to disambiguate the statement.
-
 */
 
 #include <stdio.h>
 #include <stdlib.h>
 #include <string.h>
-#define yyerror(a) fprintf(stderr,"%s\n",a)
+#define yyerror(a) print_parser_error(a, NULL, 0)
 #define yywrap() 1
 
 /* Make sure yacc-generated code knows we have included stdlib.h.  */
@@ -268,6 +234,7 @@ const char   **Definitions;
 /* options that can be set by the programs that use the parser */
 int            IgnoreBTX = 0;
 int            Recursive = 0;
+const char    *CommandName = NULL;
 
 /* various state variables */
 NamespaceInfo *currentNamespace = NULL;
@@ -276,6 +243,7 @@ FunctionInfo  *currentFunction = NULL;
 TemplateInfo  *currentTemplate = NULL;
 const char    *currentEnumName = NULL;
 const char    *currentEnumValue = NULL;
+unsigned int   currentEnumType = 0;
 parse_access_t access_level = VTK_ACCESS_PUBLIC;
 
 /* functions from vtkParse.l */
@@ -284,10 +252,9 @@ void print_parser_error(const char *text, const char *cp, size_t n);
 /* helper functions */
 const char *type_class(unsigned int type, const char *classname);
 void start_class(const char *classname, int is_struct_or_union);
-void reject_class(const char *classname, int is_struct_or_union);
 void end_class();
 void add_base_class(ClassInfo *cls, const char *name, int access_lev,
-                    int is_virtual);
+                    unsigned int extra);
 void output_friend_function(void);
 void output_function(void);
 void reject_function(void);
@@ -298,7 +265,8 @@ void add_parameter(FunctionInfo *func, unsigned int type,
 void add_template_parameter(unsigned int datatype,
                             unsigned int extra, const char *funcSig);
 void add_using(const char *name, int is_namespace);
-void start_enum(const char *enumname);
+void start_enum(const char *name, int is_scoped,
+                unsigned int type, const char *basename);
 void add_enum(const char *name, const char *value);
 void end_enum();
 unsigned int guess_constant_type(const char *value);
@@ -320,6 +288,7 @@ void outputSetVectorMacro(const char *var, unsigned int paramType,
 void outputGetVectorMacro(const char *var, unsigned int paramType,
                           const char *typeText, int n);
 
+
 /*----------------------------------------------------------------
  * String utility methods
  *
@@ -441,28 +410,6 @@ static const char *vtkstrcat7(const char *str1, const char *str2,
   return vtkstrncat(7, cp);
 }
 
-static size_t vtkidlen(const char *text)
-{
-  size_t i = 0;
-  char c = text[0];
-
-  if ((c >= 'a' && c <= 'z') ||
-      (c >= 'A' && c <= 'Z') ||
-       c == '_')
-    {
-    do
-      {
-      c = text[++i];
-      }
-    while ((c >= 'a' && c <= 'z') ||
-           (c >= 'A' && c <= 'Z') ||
-           (c >= '0' && c <= '9') ||
-           c == '_');
-    }
-
-  return i;
-}
-
 /*----------------------------------------------------------------
  * Comments
  */
@@ -933,8 +880,8 @@ void setTypeMod(unsigned int mod)
 /* modify the indirection (pointers, refs) in the storage type */
 void setTypePtr(unsigned int ind)
 {
-  storedType &= ~(unsigned int)(VTK_PARSE_INDIRECT);
-  ind &= VTK_PARSE_INDIRECT;
+  storedType &= ~(unsigned int)(VTK_PARSE_INDIRECT | VTK_PARSE_RVALUE);
+  ind &= (VTK_PARSE_INDIRECT | VTK_PARSE_RVALUE);
   storedType |= ind;
 }
 
@@ -1259,11 +1206,8 @@ void prepend_scope(char *cp, const char *arg)
   n = strlen(arg);
   i = m;
   while (i > 0 &&
-         ((cp[i-1] >= 'a' && cp[i-1] <= 'z') ||
-          (cp[i-1] >= 'A' && cp[i-1] <= 'Z') ||
-          (cp[i-1] >= '0' && cp[i-1] <= '9') ||
-          cp[i-1] == '_' || cp[i-1] == ':' ||
-          cp[i-1] == '>'))
+         (vtkParse_CharType(cp[i-1], CPRE_IDGIT) ||
+          cp[i-1] == ':' || cp[i-1] == '>'))
     {
     i--;
     if (cp[i] == '>')
@@ -1312,6 +1256,12 @@ unsigned int add_indirection(unsigned int type1, unsigned int type2)
   result = ((type1 & ~VTK_PARSE_POINTER_MASK) |
             (type2 & ~VTK_PARSE_POINTER_MASK));
 
+  /* if there are two ampersands, it is an rvalue reference */
+  if ((type1 & type2 & VTK_PARSE_REF) != 0)
+    {
+    result |= VTK_PARSE_RVALUE;
+    }
+
   while (ptr2)
     {
     reverse = ((reverse << 2) | (ptr2 & VTK_PARSE_POINTER_LOWMASK));
@@ -1366,26 +1316,10 @@ unsigned int add_indirection_to_array(unsigned int type)
 
 
 
-/* Line 189 of yacc.c  */
-#line 1372 "vtkParse.tab.c"
-
-/* Enabling traces.  */
-#ifndef YYDEBUG
-# define YYDEBUG 0
-#endif
+/* Line 172 of glr.c  */
+#line 1322 "vtkParse.tab.c"
 
-/* Enabling verbose error messages.  */
-#ifdef YYERROR_VERBOSE
-# undef YYERROR_VERBOSE
-# define YYERROR_VERBOSE 1
-#else
-# define YYERROR_VERBOSE 0
-#endif
 
-/* Enabling the token table.  */
-#ifndef YYTOKEN_TABLE
-# define YYTOKEN_TABLE 0
-#endif
 
 
 /* Tokens.  */
@@ -1410,186 +1344,190 @@ unsigned int add_indirection_to_array(unsigned int type)
      FLOAT_LITERAL = 271,
      CHAR_LITERAL = 272,
      ZERO = 273,
-     STRUCT = 274,
-     CLASS = 275,
-     UNION = 276,
-     ENUM = 277,
-     PUBLIC = 278,
-     PRIVATE = 279,
-     PROTECTED = 280,
-     CONST = 281,
-     VOLATILE = 282,
-     MUTABLE = 283,
-     STATIC = 284,
-     VIRTUAL = 285,
-     EXPLICIT = 286,
-     INLINE = 287,
-     FRIEND = 288,
-     EXTERN = 289,
-     OPERATOR = 290,
-     TEMPLATE = 291,
-     THROW = 292,
-     TYPENAME = 293,
-     TYPEDEF = 294,
-     NAMESPACE = 295,
-     USING = 296,
-     NEW = 297,
-     DELETE = 298,
-     STATIC_CAST = 299,
-     DYNAMIC_CAST = 300,
-     CONST_CAST = 301,
-     REINTERPRET_CAST = 302,
-     OP_LSHIFT_EQ = 303,
-     OP_RSHIFT_EQ = 304,
-     OP_LSHIFT = 305,
-     OP_RSHIFT = 306,
-     OP_DOT_POINTER = 307,
-     OP_ARROW_POINTER = 308,
-     OP_ARROW = 309,
-     OP_INCR = 310,
-     OP_DECR = 311,
-     OP_PLUS_EQ = 312,
-     OP_MINUS_EQ = 313,
-     OP_TIMES_EQ = 314,
-     OP_DIVIDE_EQ = 315,
-     OP_REMAINDER_EQ = 316,
-     OP_AND_EQ = 317,
-     OP_OR_EQ = 318,
-     OP_XOR_EQ = 319,
-     OP_LOGIC_AND = 320,
-     OP_LOGIC_OR = 321,
-     OP_LOGIC_EQ = 322,
-     OP_LOGIC_NEQ = 323,
-     OP_LOGIC_LEQ = 324,
-     OP_LOGIC_GEQ = 325,
-     ELLIPSIS = 326,
-     DOUBLE_COLON = 327,
-     OTHER = 328,
-     VOID = 329,
-     BOOL = 330,
-     FLOAT = 331,
-     DOUBLE = 332,
-     INT = 333,
-     SHORT = 334,
-     LONG = 335,
-     INT64__ = 336,
-     CHAR = 337,
-     SIGNED = 338,
-     UNSIGNED = 339,
-     SSIZE_T = 340,
-     SIZE_T = 341,
-     IdType = 342,
-     TypeInt8 = 343,
-     TypeUInt8 = 344,
-     TypeInt16 = 345,
-     TypeUInt16 = 346,
-     TypeInt32 = 347,
-     TypeUInt32 = 348,
-     TypeInt64 = 349,
-     TypeUInt64 = 350,
-     TypeFloat32 = 351,
-     TypeFloat64 = 352,
-     SetMacro = 353,
-     GetMacro = 354,
-     SetStringMacro = 355,
-     GetStringMacro = 356,
-     SetClampMacro = 357,
-     SetObjectMacro = 358,
-     GetObjectMacro = 359,
-     BooleanMacro = 360,
-     SetVector2Macro = 361,
-     SetVector3Macro = 362,
-     SetVector4Macro = 363,
-     SetVector6Macro = 364,
-     GetVector2Macro = 365,
-     GetVector3Macro = 366,
-     GetVector4Macro = 367,
-     GetVector6Macro = 368,
-     SetVectorMacro = 369,
-     GetVectorMacro = 370,
-     ViewportCoordinateMacro = 371,
-     WorldCoordinateMacro = 372,
-     TypeMacro = 373,
-     VTK_BYTE_SWAP_DECL = 374
+     NULLPTR = 274,
+     SSIZE_T = 275,
+     SIZE_T = 276,
+     NULLPTR_T = 277,
+     BEGIN_ATTRIB = 278,
+     STRUCT = 279,
+     CLASS = 280,
+     UNION = 281,
+     ENUM = 282,
+     PUBLIC = 283,
+     PRIVATE = 284,
+     PROTECTED = 285,
+     CONST = 286,
+     VOLATILE = 287,
+     MUTABLE = 288,
+     STATIC = 289,
+     THREAD_LOCAL = 290,
+     VIRTUAL = 291,
+     EXPLICIT = 292,
+     INLINE = 293,
+     CONSTEXPR = 294,
+     FRIEND = 295,
+     EXTERN = 296,
+     OPERATOR = 297,
+     TEMPLATE = 298,
+     THROW = 299,
+     TRY = 300,
+     CATCH = 301,
+     NOEXCEPT = 302,
+     DECLTYPE = 303,
+     TYPENAME = 304,
+     TYPEDEF = 305,
+     NAMESPACE = 306,
+     USING = 307,
+     NEW = 308,
+     DELETE = 309,
+     DEFAULT = 310,
+     STATIC_CAST = 311,
+     DYNAMIC_CAST = 312,
+     CONST_CAST = 313,
+     REINTERPRET_CAST = 314,
+     OP_LSHIFT_EQ = 315,
+     OP_RSHIFT_EQ = 316,
+     OP_LSHIFT = 317,
+     OP_RSHIFT_A = 318,
+     OP_DOT_POINTER = 319,
+     OP_ARROW_POINTER = 320,
+     OP_ARROW = 321,
+     OP_INCR = 322,
+     OP_DECR = 323,
+     OP_PLUS_EQ = 324,
+     OP_MINUS_EQ = 325,
+     OP_TIMES_EQ = 326,
+     OP_DIVIDE_EQ = 327,
+     OP_REMAINDER_EQ = 328,
+     OP_AND_EQ = 329,
+     OP_OR_EQ = 330,
+     OP_XOR_EQ = 331,
+     OP_LOGIC_AND = 332,
+     OP_LOGIC_OR = 333,
+     OP_LOGIC_EQ = 334,
+     OP_LOGIC_NEQ = 335,
+     OP_LOGIC_LEQ = 336,
+     OP_LOGIC_GEQ = 337,
+     ELLIPSIS = 338,
+     DOUBLE_COLON = 339,
+     OTHER = 340,
+     AUTO = 341,
+     VOID = 342,
+     BOOL = 343,
+     FLOAT = 344,
+     DOUBLE = 345,
+     INT = 346,
+     SHORT = 347,
+     LONG = 348,
+     INT64__ = 349,
+     CHAR = 350,
+     CHAR16_T = 351,
+     CHAR32_T = 352,
+     WCHAR_T = 353,
+     SIGNED = 354,
+     UNSIGNED = 355,
+     IdType = 356,
+     TypeInt8 = 357,
+     TypeUInt8 = 358,
+     TypeInt16 = 359,
+     TypeUInt16 = 360,
+     TypeInt32 = 361,
+     TypeUInt32 = 362,
+     TypeInt64 = 363,
+     TypeUInt64 = 364,
+     TypeFloat32 = 365,
+     TypeFloat64 = 366,
+     SetMacro = 367,
+     GetMacro = 368,
+     SetStringMacro = 369,
+     GetStringMacro = 370,
+     SetClampMacro = 371,
+     SetObjectMacro = 372,
+     GetObjectMacro = 373,
+     BooleanMacro = 374,
+     SetVector2Macro = 375,
+     SetVector3Macro = 376,
+     SetVector4Macro = 377,
+     SetVector6Macro = 378,
+     GetVector2Macro = 379,
+     GetVector3Macro = 380,
+     GetVector4Macro = 381,
+     GetVector6Macro = 382,
+     SetVectorMacro = 383,
+     GetVectorMacro = 384,
+     ViewportCoordinateMacro = 385,
+     WorldCoordinateMacro = 386,
+     TypeMacro = 387,
+     VTK_BYTE_SWAP_DECL = 388
    };
 #endif
 
 
-
-
-#if ! defined YYSTYPE && ! defined YYSTYPE_IS_DECLARED
+#ifndef YYSTYPE
 typedef union YYSTYPE
 {
 
-/* Line 222 of yacc.c  */
-#line 1320 "vtkParse.y"
+/* Line 215 of glr.c  */
+#line 1293 "vtkParse.y"
 
   const char   *str;
   unsigned int  integer;
 
 
 
-/* Line 222 of yacc.c  */
-#line 1653 "vtkParse.tab.c"
+/* Line 215 of glr.c  */
+#line 1481 "vtkParse.tab.c"
 } YYSTYPE;
 # define YYSTYPE_IS_TRIVIAL 1
-# define yystype YYSTYPE /* obsolescent; will be withdrawn */
-# define YYSTYPE_IS_DECLARED 1
 #endif
 
+#if ! defined YYLTYPE && ! defined YYLTYPE_IS_DECLARED
+typedef struct YYLTYPE
+{
 
-/* Copy the second part of user declarations.  */
+  char yydummy;
 
+} YYLTYPE;
+# define YYLTYPE_IS_DECLARED 1
+# define YYLTYPE_IS_TRIVIAL 1
+#endif
 
-/* Line 264 of yacc.c  */
-#line 1665 "vtkParse.tab.c"
 
-#ifdef short
-# undef short
-#endif
 
-#ifdef YYTYPE_UINT8
-typedef YYTYPE_UINT8 yytype_uint8;
-#else
-typedef unsigned char yytype_uint8;
+/* Enabling traces.  */
+#ifndef YYDEBUG
+# define YYDEBUG 0
 #endif
 
-#ifdef YYTYPE_INT8
-typedef YYTYPE_INT8 yytype_int8;
-#elif (defined __STDC__ || defined __C99__FUNC__ \
-     || defined __cplusplus || defined _MSC_VER)
-typedef signed char yytype_int8;
+/* Enabling verbose error messages.  */
+#ifdef YYERROR_VERBOSE
+# undef YYERROR_VERBOSE
+# define YYERROR_VERBOSE 1
 #else
-typedef short int yytype_int8;
+# define YYERROR_VERBOSE 0
 #endif
 
-#ifdef YYTYPE_UINT16
-typedef YYTYPE_UINT16 yytype_uint16;
-#else
-typedef unsigned short int yytype_uint16;
+/* Enabling the token table.  */
+#ifndef YYTOKEN_TABLE
+# define YYTOKEN_TABLE 0
 #endif
 
-#ifdef YYTYPE_INT16
-typedef YYTYPE_INT16 yytype_int16;
-#else
-typedef short int yytype_int16;
-#endif
+/* Default (constant) value used for initialization for null
+   right-hand sides.  Unlike the standard yacc.c template,
+   here we set the default value of $$ to a zeroed-out value.
+   Since the default value is undefined, this behavior is
+   technically correct.  */
+static YYSTYPE yyval_default;
 
-#ifndef YYSIZE_T
-# ifdef __SIZE_TYPE__
-#  define YYSIZE_T __SIZE_TYPE__
-# elif defined size_t
-#  define YYSIZE_T size_t
-# elif ! defined YYSIZE_T && (defined __STDC__ || defined __C99__FUNC__ \
-     || defined __cplusplus || defined _MSC_VER)
-#  include <stddef.h> /* INFRINGES ON USER NAME SPACE */
-#  define YYSIZE_T size_t
-# else
-#  define YYSIZE_T unsigned int
-# endif
-#endif
+/* Copy the second part of user declarations.  */
+
+
+/* Line 243 of glr.c  */
+#line 1528 "vtkParse.tab.c"
 
-#define YYSIZE_MAXIMUM ((YYSIZE_T) -1)
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
 
 #ifndef YY_
 # if YYENABLE_NLS
@@ -1604,11 +1542,7 @@ typedef short int yytype_int16;
 #endif
 
 /* Suppress unused-variable warnings by "using" E.  */
-#if ! defined lint || defined __GNUC__
-# define YYUSE(e) ((void) (e))
-#else
-# define YYUSE(e) /* empty */
-#endif
+#define YYUSE(e) ((void) (e))
 
 /* Identity function, used to suppress warnings about constant conditions.  */
 #ifndef lint
@@ -1617,180 +1551,109 @@ typedef short int yytype_int16;
 #if (defined __STDC__ || defined __C99__FUNC__ \
      || defined __cplusplus || defined _MSC_VER)
 static int
-YYID (int yyi)
+YYID (int i)
 #else
 static int
-YYID (yyi)
-    int yyi;
+YYID (i)
+    int i;
 #endif
 {
-  return yyi;
+  return i;
 }
 #endif
 
-#if ! defined yyoverflow || YYERROR_VERBOSE
-
-/* The parser invokes alloca or malloc; define the necessary symbols.  */
-
-# ifdef YYSTACK_USE_ALLOCA
-#  if YYSTACK_USE_ALLOCA
-#   ifdef __GNUC__
-#    define YYSTACK_ALLOC __builtin_alloca
-#   elif defined __BUILTIN_VA_ARG_INCR
-#    include <alloca.h> /* INFRINGES ON USER NAME SPACE */
-#   elif defined _AIX
-#    define YYSTACK_ALLOC __alloca
-#   elif defined _MSC_VER
-#    include <malloc.h> /* INFRINGES ON USER NAME SPACE */
-#    define alloca _alloca
-#   else
-#    define YYSTACK_ALLOC alloca
-#    if ! defined _ALLOCA_H && ! defined _STDLIB_H && (defined __STDC__ || defined __C99__FUNC__ \
-     || defined __cplusplus || defined _MSC_VER)
-#     include <stdlib.h> /* INFRINGES ON USER NAME SPACE */
-#     ifndef _STDLIB_H
-#      define _STDLIB_H 1
-#     endif
-#    endif
-#   endif
-#  endif
-# endif
-
-# ifdef YYSTACK_ALLOC
-   /* Pacify GCC's `empty if-body' warning.  */
-#  define YYSTACK_FREE(Ptr) do { /* empty */; } while (YYID (0))
-#  ifndef YYSTACK_ALLOC_MAXIMUM
-    /* The OS might guarantee only one guard page at the bottom of the stack,
-       and a page size can be as small as 4096 bytes.  So we cannot safely
-       invoke alloca (N) if N exceeds 4096.  Use a slightly smaller number
-       to allow for a few compiler-allocated temporary stack slots.  */
-#   define YYSTACK_ALLOC_MAXIMUM 4032 /* reasonable circa 2006 */
-#  endif
-# else
-#  define YYSTACK_ALLOC YYMALLOC
-#  define YYSTACK_FREE YYFREE
-#  ifndef YYSTACK_ALLOC_MAXIMUM
-#   define YYSTACK_ALLOC_MAXIMUM YYSIZE_MAXIMUM
-#  endif
-#  if (defined __cplusplus && ! defined _STDLIB_H \
-       && ! ((defined YYMALLOC || defined malloc) \
-             && (defined YYFREE || defined free)))
-#   include <stdlib.h> /* INFRINGES ON USER NAME SPACE */
-#   ifndef _STDLIB_H
-#    define _STDLIB_H 1
-#   endif
-#  endif
-#  ifndef YYMALLOC
-#   define YYMALLOC malloc
-#   if ! defined malloc && ! defined _STDLIB_H && (defined __STDC__ || defined __C99__FUNC__ \
-     || defined __cplusplus || defined _MSC_VER)
-void *malloc (YYSIZE_T); /* INFRINGES ON USER NAME SPACE */
-#   endif
-#  endif
-#  ifndef YYFREE
-#   define YYFREE free
-#   if ! defined free && ! defined _STDLIB_H && (defined __STDC__ || defined __C99__FUNC__ \
-     || defined __cplusplus || defined _MSC_VER)
-void free (void *); /* INFRINGES ON USER NAME SPACE */
-#   endif
-#  endif
-# endif
-#endif /* ! defined yyoverflow || YYERROR_VERBOSE */
+#ifndef YYFREE
+# define YYFREE free
+#endif
+#ifndef YYMALLOC
+# define YYMALLOC malloc
+#endif
+#ifndef YYREALLOC
+# define YYREALLOC realloc
+#endif
 
+#define YYSIZEMAX ((size_t) -1)
 
-#if (! defined yyoverflow \
-     && (! defined __cplusplus \
-         || (defined YYSTYPE_IS_TRIVIAL && YYSTYPE_IS_TRIVIAL)))
+#ifdef __cplusplus
+   typedef bool yybool;
+#else
+   typedef unsigned char yybool;
+#endif
+#define yytrue 1
+#define yyfalse 0
+
+#ifndef YYSETJMP
+# include <setjmp.h>
+# define YYJMP_BUF jmp_buf
+# define YYSETJMP(env) setjmp (env)
+# define YYLONGJMP(env, val) longjmp (env, val)
+#endif
 
-/* A type that is properly aligned for any stack member.  */
-union yyalloc
-{
-  yytype_int16 yyss_alloc;
-  YYSTYPE yyvs_alloc;
-};
+/*-----------------.
+| GCC extensions.  |
+`-----------------*/
 
-/* The size of the maximum gap between one aligned stack and the next.  */
-# define YYSTACK_GAP_MAXIMUM (sizeof (union yyalloc) - 1)
-
-/* The size of an array large to enough to hold all stacks, each with
-   N elements.  */
-# define YYSTACK_BYTES(N) \
-     ((N) * (sizeof (yytype_int16) + sizeof (YYSTYPE)) \
-      + YYSTACK_GAP_MAXIMUM)
-
-/* Copy COUNT objects from FROM to TO.  The source and destination do
-   not overlap.  */
-# ifndef YYCOPY
-#  if defined __GNUC__ && 1 < __GNUC__
-#   define YYCOPY(To, From, Count) \
-      __builtin_memcpy (To, From, (Count) * sizeof (*(From)))
-#  else
-#   define YYCOPY(To, From, Count)                \
-      do                                        \
-        {                                        \
-          YYSIZE_T yyi;                                \
-          for (yyi = 0; yyi < (Count); yyi++)        \
-            (To)[yyi] = (From)[yyi];                \
-        }                                        \
-      while (YYID (0))
-#  endif
+#ifndef __attribute__
+/* This feature is available in gcc versions 2.5 and later.  */
+# if (! defined __GNUC__ || __GNUC__ < 2 \
+      || (__GNUC__ == 2 && __GNUC_MINOR__ < 5) || __STRICT_ANSI__)
+#  define __attribute__(Spec) /* empty */
 # endif
+#endif
+
 
-/* Relocate STACK from its old location to the new one.  The
-   local variables YYSIZE and YYSTACKSIZE give the old and new number of
-   elements in the stack, and YYPTR gives the new location of the
-   stack.  Advance YYPTR to a properly aligned location for the next
-   stack.  */
-# define YYSTACK_RELOCATE(Stack_alloc, Stack)                                \
-    do                                                                        \
-      {                                                                        \
-        YYSIZE_T yynewbytes;                                                \
-        YYCOPY (&yyptr->Stack_alloc, Stack, yysize);                        \
-        Stack = &yyptr->Stack_alloc;                                        \
-        yynewbytes = yystacksize * sizeof (*Stack) + YYSTACK_GAP_MAXIMUM; \
-        yyptr += yynewbytes / sizeof (*yyptr);                                \
-      }                                                                        \
-    while (YYID (0))
+#ifdef __cplusplus
+# define YYOPTIONAL_LOC(Name) /* empty */
+#else
+# define YYOPTIONAL_LOC(Name) Name __attribute__ ((__unused__))
+#endif
 
+#ifndef YYASSERT
+# define YYASSERT(condition) ((void) ((condition) || (abort (), 0)))
 #endif
 
 /* YYFINAL -- State number of the termination state.  */
 #define YYFINAL  3
 /* YYLAST -- Last index in YYTABLE.  */
-#define YYLAST   5605
+#define YYLAST   9147
 
 /* YYNTOKENS -- Number of terminals.  */
-#define YYNTOKENS  143
+#define YYNTOKENS  157
 /* YYNNTS -- Number of nonterminals.  */
-#define YYNNTS  240
+#define YYNNTS  276
 /* YYNRULES -- Number of rules.  */
-#define YYNRULES  604
+#define YYNRULES  701
 /* YYNRULES -- Number of states.  */
-#define YYNSTATES  969
-
-/* YYTRANSLATE(YYLEX) -- Bison symbol number corresponding to YYLEX.  */
+#define YYNSTATES  1182
+/* YYMAXRHS -- Maximum number of symbols on right-hand side of rule.  */
+#define YYMAXRHS 10
+/* YYMAXLEFT -- Maximum number of symbols to the left of a handle
+   accessed by $0, $-1, etc., in any rule.  */
+#define YYMAXLEFT 0
+
+/* YYTRANSLATE(X) -- Bison symbol number corresponding to X.  */
 #define YYUNDEFTOK  2
-#define YYMAXUTOK   374
+#define YYMAXUTOK   388
 
 #define YYTRANSLATE(YYX)                                                \
   ((unsigned int) (YYX) <= YYMAXUTOK ? yytranslate[YYX] : YYUNDEFTOK)
 
 /* YYTRANSLATE[YYLEX] -- Bison symbol number corresponding to YYLEX.  */
-static const yytype_uint8 yytranslate[] =
+static const unsigned char yytranslate[] =
 {
        0,     2,     2,     2,     2,     2,     2,     2,     2,     2,
        2,     2,     2,     2,     2,     2,     2,     2,     2,     2,
        2,     2,     2,     2,     2,     2,     2,     2,     2,     2,
-       2,     2,     2,   139,     2,     2,     2,   135,   133,     2,
-     126,   127,   134,   138,   125,   137,   142,   136,     2,     2,
-       2,     2,     2,     2,     2,     2,     2,     2,   124,   120,
-     128,   123,   129,     2,     2,     2,     2,     2,     2,     2,
+       2,     2,     2,   153,     2,     2,     2,   149,   146,     2,
+     140,   141,   147,   152,   139,   151,   156,   150,     2,     2,
+       2,     2,     2,     2,     2,     2,     2,     2,   138,   134,
+     142,   137,   148,     2,     2,     2,     2,     2,     2,     2,
        2,     2,     2,     2,     2,     2,     2,     2,     2,     2,
        2,     2,     2,     2,     2,     2,     2,     2,     2,     2,
-       2,   130,     2,   131,   141,     2,     2,     2,     2,     2,
+       2,   143,     2,   144,   155,     2,     2,     2,     2,     2,
        2,     2,     2,     2,     2,     2,     2,     2,     2,     2,
        2,     2,     2,     2,     2,     2,     2,     2,     2,     2,
-       2,     2,     2,   121,   140,   122,   132,     2,     2,     2,
+       2,     2,     2,   135,   154,   136,   145,     2,     2,     2,
        2,     2,     2,     2,     2,     2,     2,     2,     2,     2,
        2,     2,     2,     2,     2,     2,     2,     2,     2,     2,
        2,     2,     2,     2,     2,     2,     2,     2,     2,     2,
@@ -1815,307 +1678,360 @@ static const yytype_uint8 yytranslate[] =
       85,    86,    87,    88,    89,    90,    91,    92,    93,    94,
       95,    96,    97,    98,    99,   100,   101,   102,   103,   104,
      105,   106,   107,   108,   109,   110,   111,   112,   113,   114,
-     115,   116,   117,   118,   119
+     115,   116,   117,   118,   119,   120,   121,   122,   123,   124,
+     125,   126,   127,   128,   129,   130,   131,   132,   133
 };
 
 #if YYDEBUG
 /* YYPRHS[YYN] -- Index of the first RHS symbol of rule number YYN in
    YYRHS.  */
-static const yytype_uint16 yyprhs[] =
-{
-       0,     0,     3,     5,     6,     7,    11,    13,    15,    17,
-      19,    21,    23,    25,    27,    29,    31,    33,    35,    37,
-      40,    42,    45,    48,    51,    54,    60,    65,    66,    73,
-      79,    81,    84,    88,    93,    98,   104,   105,   111,   112,
-     117,   118,   122,   124,   126,   128,   129,   130,   134,   138,
-     140,   142,   144,   146,   148,   150,   152,   154,   156,   158,
-     160,   162,   164,   167,   170,   172,   175,   178,   181,   185,
-     188,   192,   193,   195,   198,   200,   204,   206,   210,   214,
-     215,   217,   218,   220,   222,   224,   226,   231,   237,   238,
-     244,   247,   249,   250,   252,   254,   257,   261,   263,   264,
-     269,   276,   280,   285,   288,   292,   298,   302,   304,   307,
-     313,   319,   326,   332,   339,   342,   343,   347,   350,   352,
-     354,   355,   356,   364,   366,   370,   372,   375,   378,   381,
-     385,   389,   394,   398,   399,   405,   407,   408,   413,   414,
-     415,   421,   422,   423,   429,   430,   431,   432,   440,   442,
-     444,   445,   447,   448,   452,   454,   457,   460,   463,   466,
-     469,   472,   476,   479,   483,   486,   490,   494,   497,   501,
-     506,   509,   511,   513,   516,   518,   521,   524,   525,   526,
-     534,   537,   538,   542,   543,   549,   552,   554,   557,   558,
-     561,   562,   566,   568,   571,   575,   577,   578,   584,   586,
-     588,   589,   590,   596,   597,   603,   604,   607,   609,   613,
-     616,   617,   618,   621,   623,   624,   629,   633,   634,   635,
-     641,   642,   644,   645,   649,   654,   657,   658,   661,   662,
-     663,   668,   671,   672,   674,   677,   678,   684,   687,   688,
-     694,   696,   698,   700,   702,   704,   705,   706,   707,   714,
-     716,   717,   720,   723,   727,   729,   732,   734,   737,   738,
-     740,   742,   746,   748,   750,   752,   753,   755,   756,   759,
-     761,   764,   765,   770,   771,   772,   775,   777,   779,   781,
-     783,   786,   789,   792,   795,   798,   802,   806,   807,   813,
-     815,   817,   818,   824,   826,   828,   830,   832,   834,   836,
-     838,   841,   844,   847,   850,   853,   856,   859,   861,   863,
-     865,   867,   869,   871,   873,   875,   877,   879,   881,   883,
-     885,   887,   889,   891,   893,   895,   897,   899,   900,   903,
-     905,   907,   909,   911,   913,   916,   918,   920,   922,   924,
-     926,   929,   931,   933,   935,   937,   939,   941,   943,   946,
-     949,   950,   954,   955,   960,   962,   963,   967,   969,   971,
-     974,   977,   980,   981,   985,   986,   991,   993,   995,   997,
-    1000,  1003,  1006,  1008,  1010,  1012,  1014,  1016,  1018,  1020,
-    1022,  1024,  1026,  1028,  1030,  1032,  1034,  1036,  1038,  1040,
-    1042,  1044,  1046,  1048,  1050,  1052,  1054,  1056,  1058,  1060,
-    1062,  1064,  1066,  1068,  1070,  1072,  1074,  1076,  1079,  1081,
-    1083,  1084,  1088,  1090,  1093,  1094,  1102,  1103,  1104,  1105,
-    1115,  1116,  1122,  1123,  1129,  1130,  1131,  1142,  1143,  1151,
-    1152,  1153,  1154,  1164,  1171,  1172,  1180,  1181,  1189,  1190,
-    1198,  1199,  1207,  1208,  1216,  1217,  1225,  1226,  1234,  1235,
-    1243,  1244,  1254,  1255,  1265,  1270,  1275,  1283,  1284,  1286,
-    1289,  1292,  1296,  1300,  1302,  1304,  1306,  1308,  1310,  1312,
-    1314,  1316,  1318,  1320,  1322,  1324,  1326,  1328,  1330,  1332,
-    1334,  1336,  1338,  1340,  1342,  1344,  1346,  1348,  1350,  1352,
-    1354,  1356,  1358,  1360,  1362,  1364,  1366,  1368,  1370,  1372,
-    1374,  1376,  1378,  1380,  1382,  1384,  1386,  1388,  1390,  1392,
-    1394,  1396,  1398,  1400,  1402,  1404,  1406,  1408,  1410,  1412,
-    1414,  1416,  1418,  1420,  1422,  1424,  1426,  1428,  1430,  1432,
-    1434,  1436,  1438,  1440,  1442,  1445,  1447,  1449,  1451,  1453,
-    1455,  1457,  1459,  1461,  1463,  1465,  1467,  1468,  1471,  1473,
-    1475,  1477,  1479,  1481,  1483,  1485,  1487,  1488,  1491,  1492,
-    1495,  1497,  1499,  1501,  1503,  1505,  1506,  1511,  1512,  1517,
-    1518,  1523,  1524,  1529,  1530,  1535,  1536,  1541,  1542,  1545,
-    1546,  1549,  1551,  1553,  1555,  1557,  1559,  1561,  1563,  1565,
-    1567,  1569,  1571,  1573,  1575,  1577,  1579,  1581,  1583,  1585,
-    1589,  1593,  1597,  1599,  1601
+static const unsigned short int yyprhs[] =
+{
+       0,     0,     3,     5,     6,     7,    12,    14,    16,    18,
+      20,    22,    24,    26,    28,    30,    32,    34,    36,    38,
+      40,    42,    44,    47,    49,    52,    55,    58,    61,    64,
+      70,    75,    81,    86,    87,    94,   100,   102,   105,   110,
+     114,   120,   125,   131,   132,   138,   139,   147,   152,   153,
+     159,   162,   164,   166,   168,   172,   177,   180,   182,   184,
+     185,   187,   188,   189,   194,   198,   200,   202,   204,   206,
+     208,   210,   212,   214,   216,   218,   220,   222,   224,   226,
+     228,   230,   233,   236,   238,   241,   244,   247,   250,   253,
+     257,   260,   264,   266,   271,   274,   279,   284,   285,   287,
+     288,   290,   292,   294,   296,   302,   306,   313,   318,   324,
+     325,   331,   336,   340,   342,   345,   348,   349,   350,   354,
+     356,   360,   361,   363,   364,   369,   376,   379,   381,   387,
+     394,   398,   403,   409,   413,   415,   418,   424,   430,   437,
+     443,   450,   453,   454,   458,   461,   463,   465,   466,   467,
+     476,   478,   482,   484,   487,   490,   493,   497,   501,   506,
+     507,   516,   520,   521,   527,   529,   530,   535,   536,   537,
+     543,   544,   545,   551,   552,   553,   554,   562,   563,   565,
+     567,   569,   570,   572,   573,   577,   579,   582,   585,   588,
+     591,   594,   598,   603,   606,   610,   613,   617,   622,   625,
+     630,   636,   640,   642,   644,   647,   649,   652,   656,   657,
+     658,   668,   671,   672,   677,   678,   686,   689,   691,   695,
+     696,   699,   700,   704,   706,   709,   711,   714,   716,   718,
+     720,   723,   726,   727,   729,   730,   734,   738,   740,   742,
+     749,   750,   757,   758,   766,   767,   768,   775,   776,   783,
+     784,   787,   789,   793,   797,   798,   799,   802,   804,   805,
+     810,   814,   816,   817,   818,   824,   825,   827,   828,   832,
+     833,   836,   841,   844,   845,   848,   849,   850,   855,   858,
+     859,   861,   865,   866,   873,   877,   878,   884,   885,   889,
+     891,   892,   893,   894,   902,   904,   905,   908,   911,   915,
+     919,   922,   924,   927,   929,   932,   933,   935,   938,   943,
+     945,   947,   949,   950,   952,   953,   956,   958,   961,   962,
+     968,   969,   970,   973,   975,   977,   979,   981,   983,   986,
+     989,   992,   995,   998,  1001,  1004,  1007,  1011,  1015,  1019,
+    1020,  1026,  1028,  1030,  1032,  1033,  1039,  1040,  1044,  1046,
+    1048,  1050,  1052,  1054,  1056,  1058,  1060,  1062,  1064,  1066,
+    1068,  1070,  1072,  1074,  1076,  1078,  1080,  1082,  1084,  1086,
+    1088,  1090,  1092,  1094,  1096,  1098,  1100,  1101,  1105,  1107,
+    1109,  1111,  1113,  1116,  1120,  1122,  1124,  1126,  1128,  1130,
+    1132,  1135,  1137,  1139,  1141,  1143,  1145,  1147,  1149,  1151,
+    1154,  1157,  1158,  1162,  1163,  1168,  1170,  1174,  1179,  1181,
+    1183,  1184,  1189,  1192,  1195,  1198,  1199,  1203,  1204,  1209,
+    1212,  1213,  1217,  1218,  1223,  1225,  1227,  1229,  1231,  1234,
+    1237,  1240,  1243,  1246,  1248,  1250,  1252,  1254,  1256,  1258,
+    1260,  1262,  1264,  1266,  1268,  1270,  1272,  1274,  1276,  1278,
+    1280,  1282,  1284,  1286,  1288,  1290,  1292,  1294,  1296,  1298,
+    1300,  1302,  1304,  1306,  1308,  1310,  1312,  1314,  1316,  1318,
+    1320,  1322,  1324,  1327,  1330,  1333,  1334,  1339,  1340,  1342,
+    1344,  1347,  1348,  1351,  1352,  1353,  1360,  1361,  1369,  1370,
+    1371,  1372,  1382,  1383,  1389,  1390,  1396,  1397,  1398,  1409,
+    1410,  1418,  1419,  1420,  1421,  1431,  1438,  1439,  1447,  1448,
+    1456,  1457,  1465,  1466,  1474,  1475,  1483,  1484,  1492,  1493,
+    1501,  1502,  1510,  1511,  1521,  1522,  1532,  1537,  1542,  1550,
+    1551,  1553,  1556,  1559,  1563,  1567,  1569,  1571,  1573,  1575,
+    1578,  1581,  1584,  1586,  1588,  1590,  1592,  1594,  1596,  1598,
+    1600,  1602,  1604,  1606,  1608,  1610,  1612,  1614,  1616,  1618,
+    1620,  1622,  1624,  1626,  1628,  1630,  1632,  1634,  1636,  1638,
+    1640,  1642,  1644,  1646,  1648,  1650,  1652,  1654,  1656,  1658,
+    1660,  1662,  1664,  1666,  1668,  1670,  1672,  1674,  1676,  1678,
+    1680,  1682,  1684,  1686,  1688,  1690,  1692,  1694,  1696,  1698,
+    1700,  1702,  1704,  1706,  1708,  1710,  1712,  1714,  1716,  1718,
+    1720,  1722,  1724,  1726,  1728,  1730,  1733,  1735,  1737,  1739,
+    1740,  1744,  1746,  1748,  1750,  1752,  1754,  1756,  1758,  1760,
+    1762,  1764,  1766,  1768,  1770,  1771,  1774,  1776,  1778,  1780,
+    1782,  1784,  1786,  1788,  1790,  1792,  1793,  1796,  1797,  1800,
+    1802,  1804,  1806,  1808,  1810,  1811,  1816,  1818,  1820,  1821,
+    1826,  1827,  1833,  1834,  1839,  1840,  1845,  1846,  1851,  1852,
+    1857,  1858,  1861,  1862,  1865,  1867,  1869,  1871,  1873,  1875,
+    1877,  1879,  1881,  1883,  1885,  1887,  1889,  1891,  1893,  1895,
+    1897,  1899,  1901,  1903,  1905,  1907,  1911,  1915,  1920,  1924,
+    1926,  1928
 };
 
 /* YYRHS -- A `-1'-separated list of the rules' RHS.  */
-static const yytype_int16 yyrhs[] =
-{
-     144,     0,    -1,   145,    -1,    -1,    -1,   145,   146,   147,
-      -1,   198,    -1,   196,    -1,   153,    -1,   150,    -1,   152,
-      -1,   149,    -1,   186,    -1,   259,    -1,   175,    -1,   155,
-      -1,   216,    -1,   148,    -1,   327,    -1,   289,   120,    -1,
-     120,    -1,   199,   155,    -1,   199,   216,    -1,   199,   183,
-      -1,   199,   148,    -1,    34,    12,   121,   145,   122,    -1,
-      40,   121,   375,   122,    -1,    -1,    40,   299,   151,   121,
-     145,   122,    -1,    40,   299,   123,   291,   120,    -1,   154,
-      -1,   199,   154,    -1,   161,   289,   120,    -1,   302,   161,
-     289,   120,    -1,   156,   300,   261,   120,    -1,   302,   156,
-     300,   261,   120,    -1,    -1,   158,   157,   121,   162,   122,
-      -1,    -1,   161,   289,   159,   168,    -1,    -1,   161,   160,
-     168,    -1,    20,    -1,    19,    -1,    21,    -1,    -1,    -1,
-     162,   163,   165,    -1,   162,   164,   124,    -1,    23,    -1,
-      24,    -1,    25,    -1,   196,    -1,   153,    -1,   167,    -1,
-     186,    -1,   259,    -1,   175,    -1,   155,    -1,   220,    -1,
-     166,    -1,   327,    -1,   119,   381,    -1,   289,   120,    -1,
-     120,    -1,   199,   155,    -1,   199,   220,    -1,    33,   184,
-      -1,    33,   199,   184,    -1,    33,   153,    -1,    33,   221,
-     237,    -1,    -1,   169,    -1,   124,   170,    -1,   171,    -1,
-     170,   125,   171,    -1,   289,    -1,    30,   173,   289,    -1,
-     174,   172,   289,    -1,    -1,    30,    -1,    -1,   174,    -1,
-      23,    -1,    24,    -1,    25,    -1,   176,   300,   261,   120,
-      -1,   302,   176,   300,   261,   120,    -1,    -1,   178,   121,
-     177,   179,   122,    -1,    22,   289,    -1,    22,    -1,    -1,
-     180,    -1,   181,    -1,   180,   125,    -1,   180,   125,   181,
-      -1,   298,    -1,    -1,   298,   123,   182,   355,    -1,   308,
-     292,   298,   123,   376,   120,    -1,   161,   289,   185,    -1,
-     302,   161,   289,   185,    -1,   161,   185,    -1,   302,   161,
-     185,    -1,   121,   375,   122,   376,   120,    -1,   124,   376,
-     120,    -1,   187,    -1,   302,   187,    -1,    39,   308,   195,
-     189,   120,    -1,    39,   156,   300,   188,   120,    -1,    39,
-     302,   156,   300,   188,   120,    -1,    39,   176,   300,   188,
-     120,    -1,    39,   302,   176,   300,   188,   120,    -1,   190,
-     189,    -1,    -1,   189,   125,   190,    -1,   265,   195,    -1,
-     268,    -1,   192,    -1,    -1,    -1,   279,   126,   193,   249,
-     127,   194,   275,    -1,   191,    -1,    41,   197,   120,    -1,
-     289,    -1,    38,   289,    -1,   292,   231,    -1,   292,   226,
-      -1,   295,   292,   231,    -1,   295,   292,   226,    -1,    41,
-      40,   289,   120,    -1,    36,   128,   129,    -1,    -1,    36,
-     128,   200,   201,   129,    -1,   203,    -1,    -1,   201,   125,
-     202,   203,    -1,    -1,    -1,   204,   314,   266,   205,   212,
-      -1,    -1,    -1,   206,   211,   266,   207,   212,    -1,    -1,
-      -1,    -1,   208,   199,    20,   209,   266,   210,   212,    -1,
-      20,    -1,    38,    -1,    -1,   213,    -1,    -1,   123,   214,
-     215,    -1,   363,    -1,   215,   363,    -1,   217,   237,    -1,
-     222,   237,    -1,   218,   237,    -1,   219,   237,    -1,   308,
-     233,    -1,   308,   292,   233,    -1,   292,   241,    -1,   302,
-     292,   241,    -1,   292,   223,    -1,   302,   292,   223,    -1,
-     308,   292,   227,    -1,   221,   237,    -1,   292,   231,   120,
-      -1,   302,   292,   231,   120,    -1,   308,   233,    -1,   222,
-      -1,   241,    -1,   302,   241,    -1,   223,    -1,   302,   223,
-      -1,   308,   227,    -1,    -1,    -1,   226,   126,   224,   249,
-     127,   225,   234,    -1,   232,   308,    -1,    -1,   229,   228,
-     234,    -1,    -1,   231,   126,   230,   249,   127,    -1,   232,
-     351,    -1,    35,    -1,   238,   234,    -1,    -1,   234,   235,
-      -1,    -1,    37,   236,   369,    -1,    26,    -1,   123,    18,
-      -1,   121,   375,   122,    -1,   120,    -1,    -1,   240,   126,
-     239,   249,   127,    -1,   298,    -1,   296,    -1,    -1,    -1,
-     244,   242,   246,   243,   234,    -1,    -1,   240,   126,   245,
-     249,   127,    -1,    -1,   124,   247,    -1,   248,    -1,   247,
-     125,   248,    -1,   289,   381,    -1,    -1,    -1,   250,   251,
-      -1,   253,    -1,    -1,   251,   125,   252,   253,    -1,   251,
-     125,    71,    -1,    -1,    -1,   254,   308,   266,   255,   256,
-      -1,    -1,   257,    -1,    -1,   123,   258,   355,    -1,   308,
-     260,   262,   120,    -1,   268,   256,    -1,    -1,   264,   262,
-      -1,    -1,    -1,   262,   125,   263,   264,    -1,   265,   260,
-      -1,    -1,   322,    -1,   278,   281,    -1,    -1,   270,   276,
-     127,   267,   272,    -1,   279,   281,    -1,    -1,   271,   277,
-     127,   269,   272,    -1,   126,    -1,    10,    -1,    11,    -1,
-      10,    -1,    11,    -1,    -1,    -1,    -1,   126,   273,   249,
-     127,   274,   275,    -1,   282,    -1,    -1,   275,    28,    -1,
-     275,    26,    -1,   275,    37,   381,    -1,   266,    -1,   322,
-     266,    -1,   268,    -1,   322,   268,    -1,    -1,   279,    -1,
-     298,    -1,   298,   124,   280,    -1,    15,    -1,    13,    -1,
-      14,    -1,    -1,   282,    -1,    -1,   283,   284,    -1,   285,
-      -1,   284,   285,    -1,    -1,   130,   286,   287,   131,    -1,
-      -1,    -1,   288,   355,    -1,   290,    -1,   291,    -1,   298,
-      -1,   296,    -1,   292,   290,    -1,   295,   290,    -1,   295,
-     291,    -1,   294,   295,    -1,   296,   295,    -1,   292,   294,
-     295,    -1,   292,   296,   295,    -1,    -1,   292,    36,   293,
-     296,   295,    -1,   299,    -1,    72,    -1,    -1,   299,   128,
-     297,   361,   129,    -1,     4,    -1,     5,    -1,     3,    -1,
-       9,    -1,     8,    -1,     6,    -1,     7,    -1,   132,     4,
-      -1,   132,     5,    -1,   132,     3,    -1,   132,     9,    -1,
-     132,     8,    -1,   132,     6,    -1,   132,     7,    -1,    86,
-      -1,    85,    -1,    88,    -1,    89,    -1,    90,    -1,    91,
-      -1,    92,    -1,    93,    -1,    94,    -1,    95,    -1,    96,
-      -1,    97,    -1,    87,    -1,     3,    -1,     5,    -1,     4,
-      -1,     9,    -1,     8,    -1,     6,    -1,     7,    -1,    -1,
-     300,   301,    -1,   303,    -1,   321,    -1,    39,    -1,    33,
-      -1,   303,    -1,   302,   303,    -1,   304,    -1,   305,    -1,
-     306,    -1,    28,    -1,    34,    -1,    34,    12,    -1,    29,
-      -1,    32,    -1,    30,    -1,    31,    -1,    26,    -1,    27,
-      -1,   306,    -1,   307,   306,    -1,   309,   265,    -1,    -1,
-     312,   310,   300,    -1,    -1,   302,   312,   311,   300,    -1,
-     319,    -1,    -1,    38,   313,   289,    -1,   296,    -1,   291,
-      -1,   161,   289,    -1,    22,   289,    -1,   315,   265,    -1,
-      -1,   318,   316,   300,    -1,    -1,   302,   312,   317,   300,
-      -1,   319,    -1,   296,    -1,   291,    -1,    19,   289,    -1,
-      21,   289,    -1,    22,   289,    -1,   321,    -1,   320,    -1,
-       6,    -1,     7,    -1,     8,    -1,     9,    -1,     3,    -1,
-       4,    -1,     5,    -1,    85,    -1,    86,    -1,    88,    -1,
-      89,    -1,    90,    -1,    91,    -1,    92,    -1,    93,    -1,
-      94,    -1,    95,    -1,    96,    -1,    97,    -1,    87,    -1,
-      74,    -1,    75,    -1,    76,    -1,    77,    -1,    82,    -1,
-      78,    -1,    79,    -1,    80,    -1,    81,    -1,    83,    -1,
-      84,    -1,   323,    -1,   326,    -1,   326,   323,    -1,   133,
-      -1,   134,    -1,    -1,   134,   325,   307,    -1,   324,    -1,
-     326,   324,    -1,    -1,    98,   126,   298,   125,   328,   308,
-     127,    -1,    -1,    -1,    -1,    99,   126,   329,   298,   125,
-     330,   308,   331,   127,    -1,    -1,   100,   126,   332,   298,
-     127,    -1,    -1,   101,   126,   333,   298,   127,    -1,    -1,
-      -1,   102,   126,   298,   125,   334,   308,   335,   125,   376,
-     127,    -1,    -1,   103,   126,   298,   125,   336,   308,   127,
-      -1,    -1,    -1,    -1,   104,   126,   337,   298,   125,   338,
-     308,   339,   127,    -1,   105,   126,   298,   125,   308,   127,
-      -1,    -1,   106,   126,   298,   125,   340,   308,   127,    -1,
-      -1,   110,   126,   298,   125,   341,   308,   127,    -1,    -1,
-     107,   126,   298,   125,   342,   308,   127,    -1,    -1,   111,
-     126,   298,   125,   343,   308,   127,    -1,    -1,   108,   126,
-     298,   125,   344,   308,   127,    -1,    -1,   112,   126,   298,
-     125,   345,   308,   127,    -1,    -1,   109,   126,   298,   125,
-     346,   308,   127,    -1,    -1,   113,   126,   298,   125,   347,
-     308,   127,    -1,    -1,   114,   126,   298,   125,   348,   308,
-     125,    13,   127,    -1,    -1,   115,   126,   298,   125,   349,
-     308,   125,    13,   127,    -1,   116,   126,   298,   127,    -1,
-     117,   126,   298,   127,    -1,   118,   126,   298,   125,   289,
-     350,   127,    -1,    -1,   125,    -1,   126,   127,    -1,   130,
-     131,    -1,    42,   130,   131,    -1,    43,   130,   131,    -1,
-     128,    -1,   129,    -1,   125,    -1,   123,    -1,   352,    -1,
-     135,    -1,   134,    -1,   136,    -1,   137,    -1,   138,    -1,
-     139,    -1,   132,    -1,   133,    -1,   140,    -1,   141,    -1,
-      42,    -1,    43,    -1,    48,    -1,    49,    -1,    50,    -1,
-      51,    -1,    52,    -1,    53,    -1,    54,    -1,    57,    -1,
-      58,    -1,    59,    -1,    60,    -1,    61,    -1,    55,    -1,
-      56,    -1,    62,    -1,    63,    -1,    64,    -1,    65,    -1,
-      66,    -1,    67,    -1,    68,    -1,    69,    -1,    70,    -1,
-      39,    -1,    38,    -1,    20,    -1,    19,    -1,    21,    -1,
-      36,    -1,    23,    -1,    25,    -1,    24,    -1,    26,    -1,
-      29,    -1,    32,    -1,    30,    -1,    34,    -1,    41,    -1,
-      40,    -1,    35,    -1,    22,    -1,    37,    -1,    46,    -1,
-      45,    -1,    44,    -1,    47,    -1,    15,    -1,    13,    -1,
-      14,    -1,    16,    -1,    17,    -1,    12,    -1,    18,    -1,
-     358,    -1,   355,   358,    -1,   367,    -1,   369,    -1,   373,
-      -1,   352,    -1,   124,    -1,   142,    -1,    72,    -1,   353,
-      -1,   354,    -1,   321,    -1,   320,    -1,    -1,   357,   359,
-      -1,   356,    -1,   128,    -1,   129,    -1,   358,    -1,   123,
-      -1,   125,    -1,   359,    -1,   120,    -1,    -1,   361,   364,
-      -1,    -1,   362,   360,    -1,   365,    -1,   356,    -1,   363,
-      -1,   123,    -1,   125,    -1,    -1,   128,   366,   361,   129,
-      -1,    -1,   130,   368,   357,   131,    -1,    -1,   126,   370,
-     357,   127,    -1,    -1,    10,   371,   357,   127,    -1,    -1,
-      11,   372,   357,   127,    -1,    -1,   121,   374,   362,   122,
-      -1,    -1,   375,   377,    -1,    -1,   376,   378,    -1,   378,
-      -1,   120,    -1,   379,    -1,   381,    -1,   380,    -1,    72,
-      -1,    71,    -1,   352,    -1,   124,    -1,   142,    -1,   128,
-      -1,   129,    -1,   123,    -1,   125,    -1,   353,    -1,   354,
-      -1,   319,    -1,    73,    -1,   121,   375,   122,    -1,   130,
-     375,   131,    -1,   382,   375,   127,    -1,   126,    -1,    10,
-      -1,    11,    -1
+static const short int yyrhs[] =
+{
+     158,     0,    -1,   159,    -1,    -1,    -1,   159,   160,   367,
+     161,    -1,   218,    -1,   216,    -1,   219,    -1,   168,    -1,
+     191,    -1,   165,    -1,   167,    -1,   164,    -1,   206,    -1,
+     289,    -1,   192,    -1,   170,    -1,   239,    -1,   162,    -1,
+     163,    -1,   371,    -1,   319,   134,    -1,   134,    -1,   221,
+     170,    -1,   221,   239,    -1,   221,   202,    -1,   221,   162,
+      -1,   221,   219,    -1,    41,    43,   428,   425,   134,    -1,
+      43,   428,   425,   134,    -1,    41,    12,   135,   159,   136,
+      -1,    51,   135,   424,   136,    -1,    -1,    51,   332,   166,
+     135,   159,   136,    -1,    51,   332,   137,   321,   134,    -1,
+     169,    -1,   221,   169,    -1,   176,   367,   177,   134,    -1,
+     176,   367,   134,    -1,   335,   176,   367,   177,   134,    -1,
+     171,   333,   291,   134,    -1,   335,   171,   333,   291,   134,
+      -1,    -1,   173,   172,   135,   180,   136,    -1,    -1,   176,
+     367,   177,   179,   138,   174,   186,    -1,   176,   367,   177,
+     179,    -1,    -1,   176,   367,   138,   175,   186,    -1,   176,
+     367,    -1,    25,    -1,    24,    -1,    26,    -1,   322,   178,
+     367,    -1,   326,   322,   178,   367,    -1,   178,   367,    -1,
+     331,    -1,   327,    -1,    -1,     3,    -1,    -1,    -1,   180,
+     181,   367,   183,    -1,   180,   182,   138,    -1,    28,    -1,
+      29,    -1,    30,    -1,   216,    -1,   219,    -1,   168,    -1,
+     191,    -1,   185,    -1,   206,    -1,   289,    -1,   192,    -1,
+     170,    -1,   243,    -1,   184,    -1,   163,    -1,   371,    -1,
+     133,   431,    -1,   319,   134,    -1,   134,    -1,   221,   170,
+      -1,   221,   243,    -1,   221,   184,    -1,   221,   219,    -1,
+      40,   204,    -1,    40,   221,   204,    -1,    40,   168,    -1,
+      40,   244,   265,    -1,   187,    -1,   186,   139,   367,   187,
+      -1,   319,   233,    -1,    36,   189,   319,   233,    -1,   190,
+     188,   319,   233,    -1,    -1,    36,    -1,    -1,   190,    -1,
+      28,    -1,    29,    -1,    30,    -1,   196,   367,   319,   197,
+     134,    -1,   196,   367,   134,    -1,   335,   196,   367,   319,
+     197,   134,    -1,   193,   333,   291,   134,    -1,   335,   193,
+     333,   291,   134,    -1,    -1,   195,   135,   194,   199,   136,
+      -1,   196,   367,   319,   197,    -1,   196,   367,   197,    -1,
+      27,    -1,    27,    25,    -1,    27,    24,    -1,    -1,    -1,
+     138,   198,   342,    -1,   200,    -1,   199,   139,   200,    -1,
+      -1,   331,    -1,    -1,   331,   137,   201,   399,    -1,   341,
+     233,   322,   331,   203,   134,    -1,   137,   425,    -1,   429,
+      -1,   176,   367,   177,   179,   205,    -1,   335,   176,   367,
+     177,   179,   205,    -1,   176,   367,   205,    -1,   335,   176,
+     367,   205,    -1,   135,   424,   136,   425,   134,    -1,   138,
+     425,   134,    -1,   207,    -1,   335,   207,    -1,    50,   341,
+     215,   209,   134,    -1,    50,   171,   333,   208,   134,    -1,
+      50,   335,   171,   333,   208,   134,    -1,    50,   193,   333,
+     208,   134,    -1,    50,   335,   193,   333,   208,   134,    -1,
+     210,   209,    -1,    -1,   209,   139,   210,    -1,   295,   215,
+      -1,   298,    -1,   212,    -1,    -1,    -1,   233,   309,   140,
+     213,   278,   141,   214,   305,    -1,   211,    -1,    52,   217,
+     134,    -1,   319,    -1,    49,   319,    -1,   322,   254,    -1,
+     322,   249,    -1,   326,   322,   254,    -1,   326,   322,   249,
+      -1,    52,    51,   319,   134,    -1,    -1,    52,   319,   367,
+     137,   220,   341,   296,   134,    -1,    43,   142,   414,    -1,
+      -1,    43,   142,   222,   223,   414,    -1,   225,    -1,    -1,
+     223,   139,   224,   225,    -1,    -1,    -1,   226,   352,   296,
+     227,   235,    -1,    -1,    -1,   228,   234,   296,   229,   235,
+      -1,    -1,    -1,    -1,   230,   221,    25,   231,   296,   232,
+     235,    -1,    -1,    83,    -1,    25,    -1,    49,    -1,    -1,
+     236,    -1,    -1,   137,   237,   238,    -1,   410,    -1,   238,
+     410,    -1,   240,   265,    -1,   245,   265,    -1,   241,   265,
+      -1,   242,   265,    -1,   341,   233,   256,    -1,   341,   233,
+     322,   256,    -1,   322,   270,    -1,   335,   322,   270,    -1,
+     322,   246,    -1,   335,   322,   246,    -1,   341,   233,   322,
+     250,    -1,   244,   265,    -1,   322,   254,   367,   134,    -1,
+     335,   322,   254,   367,   134,    -1,   341,   233,   256,    -1,
+     245,    -1,   270,    -1,   335,   270,    -1,   246,    -1,   335,
+     246,    -1,   341,   233,   250,    -1,    -1,    -1,   249,   140,
+     247,   278,   141,   367,   248,   257,   262,    -1,   255,   341,
+      -1,    -1,   252,   251,   257,   262,    -1,    -1,   254,   367,
+     140,   253,   278,   141,   367,    -1,   255,   395,    -1,    42,
+      -1,   268,   257,   262,    -1,    -1,   257,   258,    -1,    -1,
+      44,   259,   418,    -1,    31,    -1,   137,    18,    -1,     3,
+      -1,   260,   418,    -1,   260,    -1,   261,    -1,    47,    -1,
+     137,    54,    -1,   137,    55,    -1,    -1,   263,    -1,    -1,
+      66,   264,   348,    -1,   135,   424,   136,    -1,   266,    -1,
+     134,    -1,    45,   275,   135,   424,   136,   267,    -1,    -1,
+     267,    46,   431,   135,   424,   136,    -1,    -1,   320,   367,
+     140,   269,   278,   141,   367,    -1,    -1,    -1,   273,   271,
+     275,   272,   257,   262,    -1,    -1,   320,   140,   274,   278,
+     141,   367,    -1,    -1,   138,   276,    -1,   277,    -1,   276,
+     139,   277,    -1,   319,   431,   233,    -1,    -1,    -1,   279,
+     280,    -1,   282,    -1,    -1,   280,   139,   281,   282,    -1,
+     280,   139,    83,    -1,    83,    -1,    -1,    -1,   283,   341,
+     296,   284,   285,    -1,    -1,   286,    -1,    -1,   137,   287,
+     399,    -1,    -1,   288,   422,    -1,   341,   290,   292,   134,
+      -1,   298,   285,    -1,    -1,   294,   292,    -1,    -1,    -1,
+     292,   139,   293,   294,    -1,   295,   290,    -1,    -1,   360,
+      -1,   233,   308,   302,    -1,    -1,   300,   367,   306,   141,
+     297,   302,    -1,   233,   309,   311,    -1,    -1,   300,   307,
+     141,   299,   302,    -1,    -1,    10,   301,   365,    -1,    11,
+      -1,    -1,    -1,    -1,   140,   303,   278,   141,   367,   304,
+     305,    -1,   312,    -1,    -1,   305,    33,    -1,   305,    31,
+      -1,   305,    44,   431,    -1,   305,    47,   431,    -1,   305,
+      47,    -1,   296,    -1,   360,   296,    -1,   298,    -1,   360,
+     298,    -1,    -1,   309,    -1,   320,   367,    -1,   320,   367,
+     138,   310,    -1,    15,    -1,    13,    -1,    14,    -1,    -1,
+     312,    -1,    -1,   313,   314,    -1,   315,    -1,   314,   315,
+      -1,    -1,   143,   316,   317,   144,   367,    -1,    -1,    -1,
+     318,   399,    -1,   320,    -1,   321,    -1,   331,    -1,   327,
+      -1,   329,    -1,   324,   178,    -1,   324,   329,    -1,   322,
+     320,    -1,   326,   320,    -1,   326,   321,    -1,   325,   326,
+      -1,   327,   326,    -1,   329,   326,    -1,   322,   325,   326,
+      -1,   322,   327,   326,    -1,   322,   329,   326,    -1,    -1,
+     322,    43,   323,   327,   326,    -1,   145,    -1,   332,    -1,
+      84,    -1,    -1,   332,   142,   328,   408,   414,    -1,    -1,
+      48,   330,   418,    -1,     4,    -1,     5,    -1,     3,    -1,
+       9,    -1,     8,    -1,     6,    -1,     7,    -1,    22,    -1,
+      21,    -1,    20,    -1,   102,    -1,   103,    -1,   104,    -1,
+     105,    -1,   106,    -1,   107,    -1,   108,    -1,   109,    -1,
+     110,    -1,   111,    -1,   101,    -1,     3,    -1,     5,    -1,
+       4,    -1,     9,    -1,     8,    -1,     6,    -1,     7,    -1,
+      -1,   333,   334,   367,    -1,   336,    -1,   359,    -1,    50,
+      -1,    40,    -1,   336,   367,    -1,   335,   336,   367,    -1,
+     337,    -1,   338,    -1,   339,    -1,    39,    -1,    33,    -1,
+      41,    -1,    41,    12,    -1,    34,    -1,    35,    -1,    38,
+      -1,    36,    -1,    37,    -1,    31,    -1,    32,    -1,   339,
+      -1,   340,   339,    -1,   342,   295,    -1,    -1,   345,   343,
+     333,    -1,    -1,   335,   345,   344,   333,    -1,   346,    -1,
+     176,   367,   177,    -1,   196,   367,   319,   367,    -1,   357,
+      -1,   329,    -1,    -1,    49,   347,   319,   367,    -1,   327,
+     367,    -1,   321,   367,    -1,   349,   295,    -1,    -1,   346,
+     350,   333,    -1,    -1,   335,   346,   351,   333,    -1,   353,
+     295,    -1,    -1,   356,   354,   333,    -1,    -1,   335,   345,
+     355,   333,    -1,   357,    -1,   329,    -1,   327,    -1,   321,
+      -1,    24,   319,    -1,    26,   319,    -1,   196,   319,    -1,
+     359,   367,    -1,   358,   367,    -1,     6,    -1,     7,    -1,
+       8,    -1,     9,    -1,     3,    -1,     4,    -1,     5,    -1,
+      22,    -1,    20,    -1,    21,    -1,   102,    -1,   103,    -1,
+     104,    -1,   105,    -1,   106,    -1,   107,    -1,   108,    -1,
+     109,    -1,   110,    -1,   111,    -1,   101,    -1,    86,    -1,
+      87,    -1,    88,    -1,    89,    -1,    90,    -1,    95,    -1,
+      96,    -1,    97,    -1,    98,    -1,    91,    -1,    92,    -1,
+      93,    -1,    94,    -1,    99,    -1,   100,    -1,   361,    -1,
+     362,    -1,   366,    -1,   366,   361,    -1,   146,   367,    -1,
+      77,   367,    -1,    -1,   147,   367,   364,   365,    -1,    -1,
+     340,    -1,   363,    -1,   366,   363,    -1,    -1,   367,   368,
+      -1,    -1,    -1,    23,   369,   404,   370,   144,   144,    -1,
+      -1,   112,   140,   331,   139,   372,   341,   141,    -1,    -1,
+      -1,    -1,   113,   140,   373,   331,   139,   374,   341,   375,
+     141,    -1,    -1,   114,   140,   376,   331,   141,    -1,    -1,
+     115,   140,   377,   331,   141,    -1,    -1,    -1,   116,   140,
+     331,   139,   378,   341,   379,   139,   425,   141,    -1,    -1,
+     117,   140,   331,   139,   380,   341,   141,    -1,    -1,    -1,
+      -1,   118,   140,   381,   331,   139,   382,   341,   383,   141,
+      -1,   119,   140,   331,   139,   341,   141,    -1,    -1,   120,
+     140,   331,   139,   384,   341,   141,    -1,    -1,   124,   140,
+     331,   139,   385,   341,   141,    -1,    -1,   121,   140,   331,
+     139,   386,   341,   141,    -1,    -1,   125,   140,   331,   139,
+     387,   341,   141,    -1,    -1,   122,   140,   331,   139,   388,
+     341,   141,    -1,    -1,   126,   140,   331,   139,   389,   341,
+     141,    -1,    -1,   123,   140,   331,   139,   390,   341,   141,
+      -1,    -1,   127,   140,   331,   139,   391,   341,   141,    -1,
+      -1,   128,   140,   331,   139,   392,   341,   139,    13,   141,
+      -1,    -1,   129,   140,   331,   139,   393,   341,   139,    13,
+     141,    -1,   130,   140,   331,   141,    -1,   131,   140,   331,
+     141,    -1,   132,   140,   331,   139,   319,   394,   141,    -1,
+      -1,   139,    -1,   140,   141,    -1,   143,   144,    -1,    53,
+     143,   144,    -1,    54,   143,   144,    -1,   142,    -1,   148,
+      -1,   139,    -1,   137,    -1,    63,   148,    -1,    63,    63,
+      -1,    12,     3,    -1,   396,    -1,   149,    -1,   147,    -1,
+     150,    -1,   151,    -1,   152,    -1,   153,    -1,   145,    -1,
+     146,    -1,   154,    -1,   155,    -1,    53,    -1,    54,    -1,
+      60,    -1,    61,    -1,    62,    -1,    64,    -1,    65,    -1,
+      66,    -1,    69,    -1,    70,    -1,    71,    -1,    72,    -1,
+      73,    -1,    67,    -1,    68,    -1,    74,    -1,    75,    -1,
+      76,    -1,    77,    -1,    78,    -1,    79,    -1,    80,    -1,
+      81,    -1,    82,    -1,    50,    -1,    49,    -1,    25,    -1,
+      24,    -1,    26,    -1,    43,    -1,    28,    -1,    30,    -1,
+      29,    -1,    31,    -1,    34,    -1,    35,    -1,    39,    -1,
+      38,    -1,    36,    -1,    37,    -1,    48,    -1,    55,    -1,
+      41,    -1,    52,    -1,    51,    -1,    42,    -1,    27,    -1,
+      44,    -1,    47,    -1,    58,    -1,    57,    -1,    56,    -1,
+      59,    -1,    15,    -1,    13,    -1,    14,    -1,    16,    -1,
+      17,    -1,    12,    -1,    18,    -1,    19,    -1,   400,    -1,
+     399,   400,    -1,   402,    -1,   412,    -1,   142,    -1,    -1,
+     148,   401,   403,    -1,    63,    -1,   403,    -1,    84,    -1,
+     415,    -1,   418,    -1,   422,    -1,   396,    -1,   138,    -1,
+     156,    -1,   397,    -1,   398,    -1,   359,    -1,   358,    -1,
+      -1,   404,   406,    -1,   402,    -1,   142,    -1,   148,    -1,
+      63,    -1,   405,    -1,   137,    -1,   139,    -1,   406,    -1,
+     134,    -1,    -1,   408,   411,    -1,    -1,   409,   407,    -1,
+     412,    -1,   402,    -1,   410,    -1,   137,    -1,   139,    -1,
+      -1,   142,   413,   408,   414,    -1,   148,    -1,    63,    -1,
+      -1,   143,   416,   404,   144,    -1,    -1,    23,   417,   404,
+     144,   144,    -1,    -1,   140,   419,   404,   141,    -1,    -1,
+      10,   420,   404,   141,    -1,    -1,    11,   421,   404,   141,
+      -1,    -1,   135,   423,   409,   136,    -1,    -1,   424,   426,
+      -1,    -1,   425,   427,    -1,   427,    -1,   134,    -1,   428,
+      -1,   142,    -1,   429,    -1,   431,    -1,   430,    -1,    84,
+      -1,    83,    -1,   396,    -1,    63,    -1,   138,    -1,   156,
+      -1,   148,    -1,   137,    -1,   139,    -1,   397,    -1,   398,
+      -1,   359,    -1,   358,    -1,    85,    -1,   135,   424,   136,
+      -1,   143,   424,   144,    -1,    23,   424,   144,   144,    -1,
+     432,   424,   141,    -1,   140,    -1,    10,    -1,    11,    -1
 };
 
 /* YYRLINE[YYN] -- source line where rule number YYN was defined.  */
-static const yytype_uint16 yyrline[] =
-{
-       0,  1478,  1478,  1480,  1482,  1481,  1492,  1493,  1494,  1495,
-    1496,  1497,  1498,  1499,  1500,  1501,  1502,  1503,  1504,  1505,
-    1506,  1509,  1510,  1511,  1512,  1519,  1526,  1527,  1527,  1531,
-    1538,  1539,  1542,  1543,  1546,  1547,  1550,  1550,  1564,  1564,
-    1566,  1566,  1570,  1571,  1572,  1574,  1576,  1575,  1584,  1588,
-    1589,  1590,  1593,  1594,  1595,  1596,  1597,  1598,  1599,  1600,
-    1601,  1602,  1603,  1604,  1605,  1608,  1609,  1612,  1613,  1614,
-    1615,  1617,  1618,  1621,  1624,  1625,  1628,  1630,  1632,  1636,
-    1637,  1640,  1641,  1644,  1645,  1646,  1657,  1658,  1662,  1662,
-    1675,  1676,  1678,  1679,  1682,  1683,  1684,  1687,  1688,  1688,
-    1696,  1699,  1700,  1701,  1702,  1705,  1706,  1714,  1715,  1718,
-    1719,  1721,  1723,  1725,  1729,  1731,  1732,  1735,  1738,  1739,
-    1742,  1743,  1742,  1747,  1781,  1784,  1785,  1786,  1788,  1790,
-    1792,  1796,  1803,  1806,  1805,  1823,  1825,  1824,  1829,  1831,
-    1829,  1833,  1835,  1833,  1837,  1838,  1840,  1837,  1851,  1852,
-    1854,  1855,  1858,  1858,  1868,  1869,  1877,  1878,  1879,  1880,
-    1883,  1886,  1887,  1888,  1891,  1892,  1893,  1896,  1897,  1898,
-    1901,  1902,  1903,  1904,  1907,  1908,  1909,  1913,  1917,  1912,
-    1929,  1933,  1933,  1944,  1943,  1952,  1956,  1959,  1968,  1969,
-    1972,  1972,  1973,  1974,  1982,  1983,  1987,  1986,  1994,  1995,
-    2003,  2004,  2003,  2022,  2022,  2025,  2026,  2029,  2030,  2033,
-    2039,  2040,  2040,  2043,  2044,  2044,  2046,  2050,  2052,  2050,
-    2076,  2077,  2080,  2080,  2088,  2091,  2150,  2151,  2153,  2154,
-    2154,  2157,  2160,  2161,  2165,  2166,  2166,  2185,  2186,  2186,
-    2204,  2205,  2207,  2211,  2213,  2216,  2217,  2218,  2217,  2223,
-    2225,  2226,  2227,  2228,  2231,  2232,  2236,  2237,  2241,  2242,
-    2245,  2246,  2249,  2250,  2251,  2254,  2255,  2258,  2258,  2261,
-    2262,  2265,  2265,  2268,  2269,  2269,  2276,  2277,  2280,  2281,
-    2284,  2286,  2288,  2292,  2294,  2296,  2298,  2300,  2300,  2305,
-    2308,  2311,  2311,  2326,  2327,  2328,  2329,  2330,  2331,  2332,
-    2333,  2334,  2335,  2336,  2337,  2338,  2339,  2340,  2341,  2342,
-    2343,  2344,  2345,  2346,  2347,  2348,  2349,  2350,  2351,  2352,
-    2359,  2360,  2361,  2362,  2363,  2364,  2365,  2372,  2373,  2376,
-    2377,  2379,  2380,  2383,  2384,  2387,  2388,  2389,  2392,  2393,
-    2394,  2395,  2398,  2399,  2400,  2403,  2404,  2407,  2408,  2417,
-    2420,  2420,  2422,  2422,  2426,  2427,  2427,  2429,  2431,  2433,
-    2435,  2439,  2442,  2442,  2444,  2444,  2448,  2449,  2451,  2453,
-    2455,  2457,  2461,  2462,  2465,  2466,  2467,  2468,  2469,  2470,
-    2471,  2472,  2473,  2474,  2475,  2476,  2477,  2478,  2479,  2480,
-    2481,  2482,  2483,  2484,  2487,  2488,  2489,  2490,  2491,  2492,
-    2493,  2494,  2495,  2496,  2497,  2517,  2518,  2519,  2522,  2525,
-    2526,  2526,  2541,  2542,  2559,  2559,  2569,  2570,  2570,  2569,
-    2579,  2579,  2589,  2589,  2598,  2598,  2598,  2631,  2630,  2641,
-    2642,  2642,  2641,  2651,  2669,  2669,  2674,  2674,  2679,  2679,
-    2684,  2684,  2689,  2689,  2694,  2694,  2699,  2699,  2704,  2704,
-    2709,  2709,  2726,  2726,  2740,  2777,  2815,  2852,  2853,  2860,
-    2861,  2862,  2863,  2864,  2865,  2866,  2867,  2868,  2871,  2872,
-    2873,  2874,  2875,  2876,  2877,  2878,  2879,  2880,  2881,  2882,
-    2883,  2884,  2885,  2886,  2887,  2888,  2889,  2890,  2891,  2892,
-    2893,  2894,  2895,  2896,  2897,  2898,  2899,  2900,  2901,  2902,
-    2903,  2904,  2905,  2908,  2909,  2910,  2911,  2912,  2913,  2914,
-    2915,  2916,  2917,  2918,  2919,  2920,  2921,  2922,  2923,  2924,
-    2925,  2926,  2927,  2928,  2929,  2930,  2933,  2934,  2935,  2936,
-    2937,  2938,  2939,  2946,  2947,  2950,  2951,  2952,  2953,  2984,
-    2984,  2985,  2986,  2987,  2988,  2989,  3012,  3013,  3015,  3016,
-    3017,  3019,  3020,  3021,  3023,  3024,  3026,  3027,  3029,  3030,
-    3033,  3034,  3037,  3038,  3039,  3043,  3042,  3056,  3056,  3060,
-    3060,  3062,  3062,  3064,  3064,  3068,  3068,  3073,  3074,  3076,
-    3077,  3080,  3081,  3084,  3085,  3086,  3087,  3088,  3089,  3090,
-    3090,  3090,  3090,  3090,  3090,  3091,  3091,  3092,  3093,  3096,
-    3099,  3102,  3105,  3105,  3105
+static const unsigned short int yyrline[] =
+{
+       0,  1467,  1467,  1469,  1471,  1470,  1481,  1482,  1483,  1484,
+    1485,  1486,  1487,  1488,  1489,  1490,  1491,  1492,  1493,  1494,
+    1495,  1496,  1497,  1498,  1501,  1502,  1503,  1504,  1505,  1508,
+    1509,  1516,  1523,  1524,  1524,  1528,  1535,  1536,  1539,  1540,
+    1541,  1544,  1545,  1548,  1548,  1563,  1562,  1568,  1574,  1573,
+    1578,  1584,  1585,  1586,  1589,  1591,  1593,  1596,  1597,  1600,
+    1601,  1603,  1605,  1604,  1613,  1617,  1618,  1619,  1622,  1623,
+    1624,  1625,  1626,  1627,  1628,  1629,  1630,  1631,  1632,  1633,
+    1634,  1635,  1636,  1637,  1640,  1641,  1642,  1643,  1646,  1647,
+    1648,  1649,  1652,  1653,  1656,  1658,  1661,  1666,  1667,  1670,
+    1671,  1674,  1675,  1676,  1687,  1688,  1689,  1693,  1694,  1698,
+    1698,  1711,  1717,  1725,  1726,  1727,  1730,  1731,  1731,  1735,
+    1736,  1738,  1739,  1740,  1740,  1748,  1752,  1753,  1756,  1758,
+    1760,  1761,  1764,  1765,  1773,  1774,  1777,  1778,  1780,  1782,
+    1784,  1788,  1790,  1791,  1794,  1797,  1798,  1801,  1802,  1801,
+    1806,  1840,  1843,  1844,  1845,  1847,  1849,  1851,  1855,  1858,
+    1858,  1889,  1892,  1891,  1909,  1911,  1910,  1915,  1917,  1915,
+    1919,  1921,  1919,  1923,  1924,  1926,  1923,  1937,  1938,  1941,
+    1942,  1944,  1945,  1948,  1948,  1958,  1959,  1967,  1968,  1969,
+    1970,  1973,  1976,  1977,  1978,  1981,  1982,  1983,  1986,  1987,
+    1988,  1992,  1993,  1994,  1995,  1998,  1999,  2000,  2004,  2009,
+    2003,  2021,  2025,  2025,  2037,  2036,  2045,  2049,  2052,  2061,
+    2062,  2065,  2065,  2066,  2067,  2073,  2078,  2079,  2080,  2083,
+    2086,  2087,  2089,  2090,  2093,  2093,  2101,  2102,  2103,  2106,
+    2108,  2109,  2113,  2112,  2125,  2126,  2125,  2145,  2145,  2149,
+    2150,  2153,  2154,  2157,  2163,  2164,  2164,  2167,  2168,  2168,
+    2170,  2172,  2176,  2178,  2176,  2202,  2203,  2206,  2206,  2208,
+    2208,  2216,  2219,  2278,  2279,  2281,  2282,  2282,  2285,  2288,
+    2289,  2293,  2304,  2304,  2323,  2325,  2325,  2343,  2343,  2345,
+    2349,  2350,  2351,  2350,  2356,  2358,  2359,  2360,  2361,  2362,
+    2363,  2366,  2367,  2371,  2372,  2376,  2377,  2380,  2381,  2385,
+    2386,  2387,  2390,  2391,  2394,  2394,  2397,  2398,  2401,  2401,
+    2405,  2406,  2406,  2413,  2414,  2417,  2418,  2419,  2420,  2421,
+    2424,  2426,  2428,  2432,  2434,  2436,  2438,  2440,  2442,  2444,
+    2444,  2449,  2452,  2455,  2458,  2458,  2466,  2466,  2475,  2476,
+    2477,  2478,  2479,  2480,  2481,  2482,  2483,  2484,  2485,  2486,
+    2487,  2488,  2489,  2490,  2491,  2492,  2493,  2494,  2495,  2502,
+    2503,  2504,  2505,  2506,  2507,  2508,  2514,  2515,  2518,  2519,
+    2521,  2522,  2525,  2526,  2529,  2530,  2531,  2532,  2535,  2536,
+    2537,  2538,  2539,  2543,  2544,  2545,  2548,  2549,  2552,  2553,
+    2561,  2564,  2564,  2566,  2566,  2570,  2571,  2573,  2577,  2578,
+    2580,  2580,  2582,  2584,  2588,  2591,  2591,  2593,  2593,  2597,
+    2600,  2600,  2602,  2602,  2606,  2607,  2609,  2611,  2613,  2615,
+    2617,  2621,  2622,  2625,  2626,  2627,  2628,  2629,  2630,  2631,
+    2632,  2633,  2634,  2635,  2636,  2637,  2638,  2639,  2640,  2641,
+    2642,  2643,  2644,  2645,  2648,  2649,  2650,  2651,  2652,  2653,
+    2654,  2655,  2656,  2657,  2658,  2659,  2660,  2661,  2662,  2682,
+    2683,  2684,  2685,  2688,  2692,  2696,  2696,  2700,  2701,  2716,
+    2717,  2733,  2734,  2737,  2737,  2737,  2744,  2744,  2754,  2755,
+    2755,  2754,  2764,  2764,  2774,  2774,  2783,  2783,  2783,  2816,
+    2815,  2826,  2827,  2827,  2826,  2836,  2854,  2854,  2859,  2859,
+    2864,  2864,  2869,  2869,  2874,  2874,  2879,  2879,  2884,  2884,
+    2889,  2889,  2894,  2894,  2911,  2911,  2925,  2962,  3000,  3037,
+    3038,  3045,  3046,  3047,  3048,  3049,  3050,  3051,  3052,  3053,
+    3054,  3055,  3056,  3059,  3060,  3061,  3062,  3063,  3064,  3065,
+    3066,  3067,  3068,  3069,  3070,  3071,  3072,  3073,  3074,  3075,
+    3076,  3077,  3078,  3079,  3080,  3081,  3082,  3083,  3084,  3085,
+    3086,  3087,  3088,  3089,  3090,  3091,  3092,  3095,  3096,  3097,
+    3098,  3099,  3100,  3101,  3102,  3103,  3104,  3105,  3106,  3107,
+    3108,  3109,  3110,  3111,  3112,  3113,  3114,  3115,  3116,  3117,
+    3118,  3119,  3120,  3121,  3122,  3123,  3126,  3127,  3128,  3129,
+    3130,  3131,  3132,  3133,  3140,  3141,  3144,  3145,  3146,  3147,
+    3147,  3148,  3151,  3152,  3155,  3156,  3157,  3158,  3188,  3188,
+    3189,  3190,  3191,  3192,  3215,  3216,  3219,  3220,  3221,  3222,
+    3225,  3226,  3227,  3230,  3231,  3233,  3234,  3236,  3237,  3240,
+    3241,  3244,  3245,  3246,  3250,  3249,  3263,  3264,  3267,  3267,
+    3269,  3269,  3273,  3273,  3275,  3275,  3277,  3277,  3281,  3281,
+    3286,  3287,  3289,  3290,  3293,  3294,  3297,  3298,  3301,  3302,
+    3303,  3304,  3305,  3306,  3307,  3308,  3308,  3308,  3308,  3308,
+    3309,  3310,  3311,  3312,  3313,  3316,  3319,  3320,  3323,  3326,
+    3326,  3326
 };
 #endif
 
@@ -2127,1850 +2043,3698 @@ static const char *const yytname[] =
   "$end", "error", "$undefined", "ID", "VTK_ID", "QT_ID", "StdString",
   "UnicodeString", "OSTREAM", "ISTREAM", "LP", "LA", "STRING_LITERAL",
   "INT_LITERAL", "HEX_LITERAL", "OCT_LITERAL", "FLOAT_LITERAL",
-  "CHAR_LITERAL", "ZERO", "STRUCT", "CLASS", "UNION", "ENUM", "PUBLIC",
-  "PRIVATE", "PROTECTED", "CONST", "VOLATILE", "MUTABLE", "STATIC",
-  "VIRTUAL", "EXPLICIT", "INLINE", "FRIEND", "EXTERN", "OPERATOR",
-  "TEMPLATE", "THROW", "TYPENAME", "TYPEDEF", "NAMESPACE", "USING", "NEW",
-  "DELETE", "STATIC_CAST", "DYNAMIC_CAST", "CONST_CAST",
-  "REINTERPRET_CAST", "OP_LSHIFT_EQ", "OP_RSHIFT_EQ", "OP_LSHIFT",
-  "OP_RSHIFT", "OP_DOT_POINTER", "OP_ARROW_POINTER", "OP_ARROW", "OP_INCR",
-  "OP_DECR", "OP_PLUS_EQ", "OP_MINUS_EQ", "OP_TIMES_EQ", "OP_DIVIDE_EQ",
+  "CHAR_LITERAL", "ZERO", "NULLPTR", "SSIZE_T", "SIZE_T", "NULLPTR_T",
+  "BEGIN_ATTRIB", "STRUCT", "CLASS", "UNION", "ENUM", "PUBLIC", "PRIVATE",
+  "PROTECTED", "CONST", "VOLATILE", "MUTABLE", "STATIC", "THREAD_LOCAL",
+  "VIRTUAL", "EXPLICIT", "INLINE", "CONSTEXPR", "FRIEND", "EXTERN",
+  "OPERATOR", "TEMPLATE", "THROW", "TRY", "CATCH", "NOEXCEPT", "DECLTYPE",
+  "TYPENAME", "TYPEDEF", "NAMESPACE", "USING", "NEW", "DELETE", "DEFAULT",
+  "STATIC_CAST", "DYNAMIC_CAST", "CONST_CAST", "REINTERPRET_CAST",
+  "OP_LSHIFT_EQ", "OP_RSHIFT_EQ", "OP_LSHIFT", "OP_RSHIFT_A",
+  "OP_DOT_POINTER", "OP_ARROW_POINTER", "OP_ARROW", "OP_INCR", "OP_DECR",
+  "OP_PLUS_EQ", "OP_MINUS_EQ", "OP_TIMES_EQ", "OP_DIVIDE_EQ",
   "OP_REMAINDER_EQ", "OP_AND_EQ", "OP_OR_EQ", "OP_XOR_EQ", "OP_LOGIC_AND",
   "OP_LOGIC_OR", "OP_LOGIC_EQ", "OP_LOGIC_NEQ", "OP_LOGIC_LEQ",
-  "OP_LOGIC_GEQ", "ELLIPSIS", "DOUBLE_COLON", "OTHER", "VOID", "BOOL",
-  "FLOAT", "DOUBLE", "INT", "SHORT", "LONG", "INT64__", "CHAR", "SIGNED",
-  "UNSIGNED", "SSIZE_T", "SIZE_T", "IdType", "TypeInt8", "TypeUInt8",
-  "TypeInt16", "TypeUInt16", "TypeInt32", "TypeUInt32", "TypeInt64",
-  "TypeUInt64", "TypeFloat32", "TypeFloat64", "SetMacro", "GetMacro",
-  "SetStringMacro", "GetStringMacro", "SetClampMacro", "SetObjectMacro",
-  "GetObjectMacro", "BooleanMacro", "SetVector2Macro", "SetVector3Macro",
-  "SetVector4Macro", "SetVector6Macro", "GetVector2Macro",
-  "GetVector3Macro", "GetVector4Macro", "GetVector6Macro",
-  "SetVectorMacro", "GetVectorMacro", "ViewportCoordinateMacro",
-  "WorldCoordinateMacro", "TypeMacro", "VTK_BYTE_SWAP_DECL", "';'", "'{'",
-  "'}'", "'='", "':'", "','", "'('", "')'", "'<'", "'>'", "'['", "']'",
-  "'~'", "'&'", "'*'", "'%'", "'/'", "'-'", "'+'", "'!'", "'|'", "'^'",
-  "'.'", "$accept", "translation_unit", "opt_declaration_seq", "$@1",
-  "declaration", "template_declaration", "linkage_specification",
+  "OP_LOGIC_GEQ", "ELLIPSIS", "DOUBLE_COLON", "OTHER", "AUTO", "VOID",
+  "BOOL", "FLOAT", "DOUBLE", "INT", "SHORT", "LONG", "INT64__", "CHAR",
+  "CHAR16_T", "CHAR32_T", "WCHAR_T", "SIGNED", "UNSIGNED", "IdType",
+  "TypeInt8", "TypeUInt8", "TypeInt16", "TypeUInt16", "TypeInt32",
+  "TypeUInt32", "TypeInt64", "TypeUInt64", "TypeFloat32", "TypeFloat64",
+  "SetMacro", "GetMacro", "SetStringMacro", "GetStringMacro",
+  "SetClampMacro", "SetObjectMacro", "GetObjectMacro", "BooleanMacro",
+  "SetVector2Macro", "SetVector3Macro", "SetVector4Macro",
+  "SetVector6Macro", "GetVector2Macro", "GetVector3Macro",
+  "GetVector4Macro", "GetVector6Macro", "SetVectorMacro", "GetVectorMacro",
+  "ViewportCoordinateMacro", "WorldCoordinateMacro", "TypeMacro",
+  "VTK_BYTE_SWAP_DECL", "';'", "'{'", "'}'", "'='", "':'", "','", "'('",
+  "')'", "'<'", "'['", "']'", "'~'", "'&'", "'*'", "'>'", "'%'", "'/'",
+  "'-'", "'+'", "'!'", "'|'", "'^'", "'.'", "$accept", "translation_unit",
+  "opt_declaration_seq", "$@1", "declaration", "template_declaration",
+  "explicit_instantiation", "linkage_specification",
   "namespace_definition", "$@2", "namespace_alias_definition",
   "forward_declaration", "simple_forward_declaration", "class_definition",
   "class_specifier", "$@3", "class_head", "$@4", "$@5", "class_key",
-  "member_specification", "$@6", "member_access_specifier",
-  "member_declaration", "template_member_declaration",
-  "friend_declaration", "opt_base_clause", "base_clause",
+  "class_head_name", "class_name", "opt_final", "member_specification",
+  "$@6", "member_access_specifier", "member_declaration",
+  "template_member_declaration", "friend_declaration",
   "base_specifier_list", "base_specifier", "opt_virtual",
-  "opt_access_specifier", "access_specifier", "enum_definition",
-  "enum_specifier", "$@7", "enum_head", "opt_enumerator_list",
-  "enumerator_list", "enumerator_definition", "$@8",
-  "nested_variable_initialization", "ignored_class", "ignored_class_body",
-  "typedef_declaration", "basic_typedef_declaration",
-  "typedef_declarator_list", "typedef_declarator_list_cont",
-  "typedef_declarator", "typedef_direct_declarator",
-  "function_direct_declarator", "$@9", "$@10", "typedef_declarator_id",
-  "using_declaration", "using_id", "using_directive", "template_head",
-  "$@11", "template_parameter_list", "$@12", "template_parameter", "$@13",
-  "$@14", "$@15", "$@16", "$@17", "$@18", "$@19", "class_or_typename",
-  "opt_template_parameter_initializer", "template_parameter_initializer",
-  "$@20", "template_parameter_value", "function_definition",
-  "function_declaration", "nested_method_declaration",
-  "nested_operator_declaration", "method_definition", "method_declaration",
-  "operator_declaration", "conversion_function", "$@21", "$@22",
-  "conversion_function_id", "operator_function_nr", "$@23",
-  "operator_function_sig", "$@24", "operator_function_id", "operator_sig",
-  "function_nr", "function_trailer_clause", "function_trailer", "$@25",
-  "function_body", "function_sig", "$@26", "function_name",
-  "structor_declaration", "$@27", "$@28", "structor_sig", "$@29",
-  "opt_ctor_initializer", "mem_initializer_list", "mem_initializer",
-  "parameter_declaration_clause", "$@30", "parameter_list", "$@31",
-  "parameter_declaration", "$@32", "$@33", "opt_initializer",
-  "initializer", "$@34", "variable_declaration", "init_declarator_id",
-  "opt_declarator_list", "declarator_list_cont", "$@35", "init_declarator",
-  "opt_ptr_operator_seq", "direct_abstract_declarator", "$@36",
-  "direct_declarator", "$@37", "p_or_lp_or_la", "lp_or_la",
-  "opt_array_or_parameters", "$@38", "$@39", "function_qualifiers",
-  "abstract_declarator", "declarator", "opt_declarator_id",
-  "declarator_id", "bitfield_size", "opt_array_decorator_seq",
-  "array_decorator_seq", "$@40", "array_decorator_seq_impl",
-  "array_decorator", "$@41", "array_size_specifier", "$@42",
-  "id_expression", "unqualified_id", "qualified_id",
-  "nested_name_specifier", "$@43", "identifier_sig", "scope_operator_sig",
-  "template_id", "$@44", "simple_id", "identifier",
+  "opt_access_specifier", "access_specifier", "opaque_enum_declaration",
+  "enum_definition", "enum_specifier", "$@7", "enum_head", "enum_key",
+  "opt_enum_base", "$@8", "enumerator_list", "enumerator_definition",
+  "$@9", "nested_variable_initialization", "ignored_initializer",
+  "ignored_class", "ignored_class_body", "typedef_declaration",
+  "basic_typedef_declaration", "typedef_declarator_list",
+  "typedef_declarator_list_cont", "typedef_declarator",
+  "typedef_direct_declarator", "function_direct_declarator", "$@10",
+  "$@11", "typedef_declarator_id", "using_declaration", "using_id",
+  "using_directive", "alias_declaration", "$@12", "template_head", "$@13",
+  "template_parameter_list", "$@14", "template_parameter", "$@15", "$@16",
+  "$@17", "$@18", "$@19", "$@20", "$@21", "opt_ellipsis",
+  "class_or_typename", "opt_template_parameter_initializer",
+  "template_parameter_initializer", "$@22", "template_parameter_value",
+  "function_definition", "function_declaration",
+  "nested_method_declaration", "nested_operator_declaration",
+  "method_definition", "method_declaration", "operator_declaration",
+  "conversion_function", "$@23", "$@24", "conversion_function_id",
+  "operator_function_nr", "$@25", "operator_function_sig", "$@26",
+  "operator_function_id", "operator_sig", "function_nr",
+  "function_trailer_clause", "function_trailer", "$@27", "noexcept_sig",
+  "function_body_as_trailer", "opt_trailing_return_type",
+  "trailing_return_type", "$@28", "function_body", "function_try_block",
+  "handler_seq", "function_sig", "$@29", "structor_declaration", "$@30",
+  "$@31", "structor_sig", "$@32", "opt_ctor_initializer",
+  "mem_initializer_list", "mem_initializer",
+  "parameter_declaration_clause", "$@33", "parameter_list", "$@34",
+  "parameter_declaration", "$@35", "$@36", "opt_initializer",
+  "initializer", "$@37", "$@38", "variable_declaration",
+  "init_declarator_id", "opt_declarator_list", "declarator_list_cont",
+  "$@39", "init_declarator", "opt_ptr_operator_seq",
+  "direct_abstract_declarator", "$@40", "direct_declarator", "$@41",
+  "lp_or_la", "$@42", "opt_array_or_parameters", "$@43", "$@44",
+  "function_qualifiers", "abstract_declarator", "declarator",
+  "opt_declarator_id", "declarator_id", "bitfield_size",
+  "opt_array_decorator_seq", "array_decorator_seq", "$@45",
+  "array_decorator_seq_impl", "array_decorator", "$@46",
+  "array_size_specifier", "$@47", "id_expression", "unqualified_id",
+  "qualified_id", "nested_name_specifier", "$@48", "tilde_sig",
+  "identifier_sig", "scope_operator_sig", "template_id", "$@49",
+  "decltype_specifier", "$@50", "simple_id", "identifier",
   "opt_decl_specifier_seq", "decl_specifier2", "decl_specifier_seq",
   "decl_specifier", "storage_class_specifier", "function_specifier",
   "cv_qualifier", "cv_qualifier_seq", "store_type", "store_type_specifier",
-  "$@45", "$@46", "type_specifier", "$@47", "tparam_type",
-  "tparam_type_specifier2", "$@48", "$@49", "tparam_type_specifier",
-  "simple_type_specifier", "type_name", "primitive_type",
-  "ptr_operator_seq", "reference", "pointer", "$@50", "pointer_seq",
-  "declaration_macro", "$@51", "$@52", "$@53", "$@54", "$@55", "$@56",
-  "$@57", "$@58", "$@59", "$@60", "$@61", "$@62", "$@63", "$@64", "$@65",
-  "$@66", "$@67", "$@68", "$@69", "$@70", "$@71", "$@72", "opt_comma",
+  "$@51", "$@52", "type_specifier", "trailing_type_specifier", "$@53",
+  "trailing_type_specifier_seq", "trailing_type_specifier_seq2", "$@54",
+  "$@55", "tparam_type", "tparam_type_specifier2", "$@56", "$@57",
+  "tparam_type_specifier", "simple_type_specifier", "type_name",
+  "primitive_type", "ptr_operator_seq", "reference", "rvalue_reference",
+  "pointer", "$@58", "ptr_cv_qualifier_seq", "pointer_seq",
+  "attribute_specifier_seq", "attribute_specifier", "$@59", "$@60",
+  "declaration_macro", "$@61", "$@62", "$@63", "$@64", "$@65", "$@66",
+  "$@67", "$@68", "$@69", "$@70", "$@71", "$@72", "$@73", "$@74", "$@75",
+  "$@76", "$@77", "$@78", "$@79", "$@80", "$@81", "$@82", "opt_comma",
   "operator_id", "operator_id_no_delim", "keyword", "literal",
-  "constant_expression", "common_bracket_item", "any_bracket_contents",
-  "bracket_pitem", "any_bracket_item", "braces_item",
-  "angle_bracket_contents", "braces_contents", "angle_bracket_pitem",
-  "angle_bracket_item", "angle_brackets_sig", "$@73", "brackets_sig",
-  "$@74", "parentheses_sig", "$@75", "$@76", "$@77", "braces_sig", "$@78",
+  "constant_expression", "constant_expression_item", "$@83",
+  "common_bracket_item", "common_bracket_item_no_scope_operator",
+  "any_bracket_contents", "bracket_pitem", "any_bracket_item",
+  "braces_item", "angle_bracket_contents", "braces_contents",
+  "angle_bracket_pitem", "angle_bracket_item", "angle_brackets_sig",
+  "$@84", "right_angle_bracket", "brackets_sig", "$@85", "$@86",
+  "parentheses_sig", "$@87", "$@88", "$@89", "braces_sig", "$@90",
   "ignored_items", "ignored_expression", "ignored_item",
-  "ignored_item_no_semi", "ignored_braces", "ignored_brackets",
-  "ignored_parentheses", "ignored_left_parenthesis", 0
+  "ignored_item_no_semi", "ignored_item_no_angle", "ignored_braces",
+  "ignored_brackets", "ignored_parentheses", "ignored_left_parenthesis", 0
 };
 #endif
 
-# ifdef YYPRINT
-/* YYTOKNUM[YYLEX-NUM] -- Internal token number corresponding to
-   token YYLEX-NUM.  */
-static const yytype_uint16 yytoknum[] =
-{
-       0,   256,   257,   258,   259,   260,   261,   262,   263,   264,
-     265,   266,   267,   268,   269,   270,   271,   272,   273,   274,
-     275,   276,   277,   278,   279,   280,   281,   282,   283,   284,
-     285,   286,   287,   288,   289,   290,   291,   292,   293,   294,
-     295,   296,   297,   298,   299,   300,   301,   302,   303,   304,
-     305,   306,   307,   308,   309,   310,   311,   312,   313,   314,
-     315,   316,   317,   318,   319,   320,   321,   322,   323,   324,
-     325,   326,   327,   328,   329,   330,   331,   332,   333,   334,
-     335,   336,   337,   338,   339,   340,   341,   342,   343,   344,
-     345,   346,   347,   348,   349,   350,   351,   352,   353,   354,
-     355,   356,   357,   358,   359,   360,   361,   362,   363,   364,
-     365,   366,   367,   368,   369,   370,   371,   372,   373,   374,
-      59,   123,   125,    61,    58,    44,    40,    41,    60,    62,
-      91,    93,   126,    38,    42,    37,    47,    45,    43,    33,
-     124,    94,    46
-};
-# endif
-
 /* YYR1[YYN] -- Symbol number of symbol that rule YYN derives.  */
-static const yytype_uint16 yyr1[] =
-{
-       0,   143,   144,   145,   146,   145,   147,   147,   147,   147,
-     147,   147,   147,   147,   147,   147,   147,   147,   147,   147,
-     147,   148,   148,   148,   148,   149,   150,   151,   150,   152,
-     153,   153,   154,   154,   155,   155,   157,   156,   159,   158,
-     160,   158,   161,   161,   161,   162,   163,   162,   162,   164,
-     164,   164,   165,   165,   165,   165,   165,   165,   165,   165,
-     165,   165,   165,   165,   165,   166,   166,   167,   167,   167,
-     167,   168,   168,   169,   170,   170,   171,   171,   171,   172,
-     172,   173,   173,   174,   174,   174,   175,   175,   177,   176,
-     178,   178,   179,   179,   180,   180,   180,   181,   182,   181,
-     183,   184,   184,   184,   184,   185,   185,   186,   186,   187,
-     187,   187,   187,   187,   188,   189,   189,   190,   191,   191,
-     193,   194,   192,   195,   196,   197,   197,   197,   197,   197,
-     197,   198,   199,   200,   199,   201,   202,   201,   204,   205,
-     203,   206,   207,   203,   208,   209,   210,   203,   211,   211,
-     212,   212,   214,   213,   215,   215,   216,   216,   216,   216,
-     217,   218,   218,   218,   219,   219,   219,   220,   220,   220,
-     221,   221,   221,   221,   222,   222,   222,   224,   225,   223,
-     226,   228,   227,   230,   229,   231,   232,   233,   234,   234,
-     236,   235,   235,   235,   237,   237,   239,   238,   240,   240,
-     242,   243,   241,   245,   244,   246,   246,   247,   247,   248,
-     249,   250,   249,   251,   252,   251,   251,   254,   255,   253,
-     256,   256,   258,   257,   259,   260,   261,   261,   262,   263,
-     262,   264,   265,   265,   266,   267,   266,   268,   269,   268,
-     270,   270,   270,   271,   271,   272,   273,   274,   272,   272,
-     275,   275,   275,   275,   276,   276,   277,   277,   278,   278,
-     279,   279,   280,   280,   280,   281,   281,   283,   282,   284,
-     284,   286,   285,   287,   288,   287,   289,   289,   290,   290,
-     291,   291,   291,   292,   292,   292,   292,   293,   292,   294,
-     295,   297,   296,   298,   298,   298,   298,   298,   298,   298,
-     298,   298,   298,   298,   298,   298,   298,   298,   298,   298,
-     298,   298,   298,   298,   298,   298,   298,   298,   298,   298,
-     299,   299,   299,   299,   299,   299,   299,   300,   300,   301,
-     301,   301,   301,   302,   302,   303,   303,   303,   304,   304,
-     304,   304,   305,   305,   305,   306,   306,   307,   307,   308,
-     310,   309,   311,   309,   312,   313,   312,   312,   312,   312,
-     312,   314,   316,   315,   317,   315,   318,   318,   318,   318,
-     318,   318,   319,   319,   320,   320,   320,   320,   320,   320,
-     320,   320,   320,   320,   320,   320,   320,   320,   320,   320,
-     320,   320,   320,   320,   321,   321,   321,   321,   321,   321,
-     321,   321,   321,   321,   321,   322,   322,   322,   323,   324,
-     325,   324,   326,   326,   328,   327,   329,   330,   331,   327,
-     332,   327,   333,   327,   334,   335,   327,   336,   327,   337,
-     338,   339,   327,   327,   340,   327,   341,   327,   342,   327,
-     343,   327,   344,   327,   345,   327,   346,   327,   347,   327,
-     348,   327,   349,   327,   327,   327,   327,   350,   350,   351,
-     351,   351,   351,   351,   351,   351,   351,   351,   352,   352,
-     352,   352,   352,   352,   352,   352,   352,   352,   352,   352,
-     352,   352,   352,   352,   352,   352,   352,   352,   352,   352,
-     352,   352,   352,   352,   352,   352,   352,   352,   352,   352,
-     352,   352,   352,   353,   353,   353,   353,   353,   353,   353,
-     353,   353,   353,   353,   353,   353,   353,   353,   353,   353,
-     353,   353,   353,   353,   353,   353,   354,   354,   354,   354,
-     354,   354,   354,   355,   355,   356,   356,   356,   356,   356,
-     356,   356,   356,   356,   356,   356,   357,   357,   358,   358,
-     358,   359,   359,   359,   360,   360,   361,   361,   362,   362,
-     363,   363,   364,   364,   364,   366,   365,   368,   367,   370,
-     369,   371,   369,   372,   369,   374,   373,   375,   375,   376,
-     376,   377,   377,   378,   378,   378,   378,   378,   378,   378,
-     378,   378,   378,   378,   378,   378,   378,   378,   378,   379,
-     380,   381,   382,   382,   382
+static const unsigned short int yyr1[] =
+{
+       0,   157,   158,   159,   160,   159,   161,   161,   161,   161,
+     161,   161,   161,   161,   161,   161,   161,   161,   161,   161,
+     161,   161,   161,   161,   162,   162,   162,   162,   162,   163,
+     163,   164,   165,   166,   165,   167,   168,   168,   169,   169,
+     169,   170,   170,   172,   171,   174,   173,   173,   175,   173,
+     173,   176,   176,   176,   177,   177,   177,   178,   178,   179,
+     179,   180,   181,   180,   180,   182,   182,   182,   183,   183,
+     183,   183,   183,   183,   183,   183,   183,   183,   183,   183,
+     183,   183,   183,   183,   184,   184,   184,   184,   185,   185,
+     185,   185,   186,   186,   187,   187,   187,   188,   188,   189,
+     189,   190,   190,   190,   191,   191,   191,   192,   192,   194,
+     193,   195,   195,   196,   196,   196,   197,   198,   197,   199,
+     199,   200,   200,   201,   200,   202,   203,   203,   204,   204,
+     204,   204,   205,   205,   206,   206,   207,   207,   207,   207,
+     207,   208,   209,   209,   210,   211,   211,   213,   214,   212,
+     215,   216,   217,   217,   217,   217,   217,   217,   218,   220,
+     219,   221,   222,   221,   223,   224,   223,   226,   227,   225,
+     228,   229,   225,   230,   231,   232,   225,   233,   233,   234,
+     234,   235,   235,   237,   236,   238,   238,   239,   239,   239,
+     239,   240,   241,   241,   241,   242,   242,   242,   243,   243,
+     243,   244,   244,   244,   244,   245,   245,   245,   247,   248,
+     246,   249,   251,   250,   253,   252,   254,   255,   256,   257,
+     257,   259,   258,   258,   258,   258,   258,   258,   258,   260,
+     261,   261,   262,   262,   264,   263,   265,   265,   265,   266,
+     267,   267,   269,   268,   271,   272,   270,   274,   273,   275,
+     275,   276,   276,   277,   278,   279,   278,   280,   281,   280,
+     280,   280,   283,   284,   282,   285,   285,   287,   286,   288,
+     286,   289,   290,   291,   291,   292,   293,   292,   294,   295,
+     295,   296,   297,   296,   298,   299,   298,   301,   300,   300,
+     302,   303,   304,   302,   302,   305,   305,   305,   305,   305,
+     305,   306,   306,   307,   307,   308,   308,   309,   309,   310,
+     310,   310,   311,   311,   313,   312,   314,   314,   316,   315,
+     317,   318,   317,   319,   319,   320,   320,   320,   320,   320,
+     321,   321,   321,   322,   322,   322,   322,   322,   322,   323,
+     322,   324,   325,   326,   328,   327,   330,   329,   331,   331,
+     331,   331,   331,   331,   331,   331,   331,   331,   331,   331,
+     331,   331,   331,   331,   331,   331,   331,   331,   331,   332,
+     332,   332,   332,   332,   332,   332,   333,   333,   334,   334,
+     334,   334,   335,   335,   336,   336,   336,   336,   337,   337,
+     337,   337,   337,   338,   338,   338,   339,   339,   340,   340,
+     341,   343,   342,   344,   342,   345,   345,   345,   346,   346,
+     347,   346,   346,   346,   348,   350,   349,   351,   349,   352,
+     354,   353,   355,   353,   356,   356,   356,   356,   356,   356,
+     356,   357,   357,   358,   358,   358,   358,   358,   358,   358,
+     358,   358,   358,   358,   358,   358,   358,   358,   358,   358,
+     358,   358,   358,   358,   359,   359,   359,   359,   359,   359,
+     359,   359,   359,   359,   359,   359,   359,   359,   359,   360,
+     360,   360,   360,   361,   362,   364,   363,   365,   365,   366,
+     366,   367,   367,   369,   370,   368,   372,   371,   373,   374,
+     375,   371,   376,   371,   377,   371,   378,   379,   371,   380,
+     371,   381,   382,   383,   371,   371,   384,   371,   385,   371,
+     386,   371,   387,   371,   388,   371,   389,   371,   390,   371,
+     391,   371,   392,   371,   393,   371,   371,   371,   371,   394,
+     394,   395,   395,   395,   395,   395,   395,   395,   395,   395,
+     395,   395,   395,   396,   396,   396,   396,   396,   396,   396,
+     396,   396,   396,   396,   396,   396,   396,   396,   396,   396,
+     396,   396,   396,   396,   396,   396,   396,   396,   396,   396,
+     396,   396,   396,   396,   396,   396,   396,   397,   397,   397,
+     397,   397,   397,   397,   397,   397,   397,   397,   397,   397,
+     397,   397,   397,   397,   397,   397,   397,   397,   397,   397,
+     397,   397,   397,   397,   397,   397,   398,   398,   398,   398,
+     398,   398,   398,   398,   399,   399,   400,   400,   400,   401,
+     400,   400,   402,   402,   403,   403,   403,   403,   403,   403,
+     403,   403,   403,   403,   404,   404,   405,   405,   405,   405,
+     406,   406,   406,   407,   407,   408,   408,   409,   409,   410,
+     410,   411,   411,   411,   413,   412,   414,   414,   416,   415,
+     417,   415,   419,   418,   420,   418,   421,   418,   423,   422,
+     424,   424,   425,   425,   426,   426,   427,   427,   428,   428,
+     428,   428,   428,   428,   428,   428,   428,   428,   428,   428,
+     428,   428,   428,   428,   428,   429,   430,   430,   431,   432,
+     432,   432
 };
 
 /* YYR2[YYN] -- Number of symbols composing right hand side of rule YYN.  */
-static const yytype_uint8 yyr2[] =
+static const unsigned char yyr2[] =
 {
-       0,     2,     1,     0,     0,     3,     1,     1,     1,     1,
-       1,     1,     1,     1,     1,     1,     1,     1,     1,     2,
-       1,     2,     2,     2,     2,     5,     4,     0,     6,     5,
-       1,     2,     3,     4,     4,     5,     0,     5,     0,     4,
-       0,     3,     1,     1,     1,     0,     0,     3,     3,     1,
+       0,     2,     1,     0,     0,     4,     1,     1,     1,     1,
        1,     1,     1,     1,     1,     1,     1,     1,     1,     1,
-       1,     1,     2,     2,     1,     2,     2,     2,     3,     2,
-       3,     0,     1,     2,     1,     3,     1,     3,     3,     0,
-       1,     0,     1,     1,     1,     1,     4,     5,     0,     5,
-       2,     1,     0,     1,     1,     2,     3,     1,     0,     4,
-       6,     3,     4,     2,     3,     5,     3,     1,     2,     5,
-       5,     6,     5,     6,     2,     0,     3,     2,     1,     1,
-       0,     0,     7,     1,     3,     1,     2,     2,     2,     3,
-       3,     4,     3,     0,     5,     1,     0,     4,     0,     0,
-       5,     0,     0,     5,     0,     0,     0,     7,     1,     1,
-       0,     1,     0,     3,     1,     2,     2,     2,     2,     2,
-       2,     3,     2,     3,     2,     3,     3,     2,     3,     4,
-       2,     1,     1,     2,     1,     2,     2,     0,     0,     7,
-       2,     0,     3,     0,     5,     2,     1,     2,     0,     2,
-       0,     3,     1,     2,     3,     1,     0,     5,     1,     1,
-       0,     0,     5,     0,     5,     0,     2,     1,     3,     2,
-       0,     0,     2,     1,     0,     4,     3,     0,     0,     5,
-       0,     1,     0,     3,     4,     2,     0,     2,     0,     0,
-       4,     2,     0,     1,     2,     0,     5,     2,     0,     5,
-       1,     1,     1,     1,     1,     0,     0,     0,     6,     1,
-       0,     2,     2,     3,     1,     2,     1,     2,     0,     1,
-       1,     3,     1,     1,     1,     0,     1,     0,     2,     1,
-       2,     0,     4,     0,     0,     2,     1,     1,     1,     1,
-       2,     2,     2,     2,     2,     3,     3,     0,     5,     1,
-       1,     0,     5,     1,     1,     1,     1,     1,     1,     1,
-       2,     2,     2,     2,     2,     2,     2,     1,     1,     1,
+       1,     1,     2,     1,     2,     2,     2,     2,     2,     5,
+       4,     5,     4,     0,     6,     5,     1,     2,     4,     3,
+       5,     4,     5,     0,     5,     0,     7,     4,     0,     5,
+       2,     1,     1,     1,     3,     4,     2,     1,     1,     0,
+       1,     0,     0,     4,     3,     1,     1,     1,     1,     1,
        1,     1,     1,     1,     1,     1,     1,     1,     1,     1,
-       1,     1,     1,     1,     1,     1,     1,     0,     2,     1,
-       1,     1,     1,     1,     2,     1,     1,     1,     1,     1,
-       2,     1,     1,     1,     1,     1,     1,     1,     2,     2,
-       0,     3,     0,     4,     1,     0,     3,     1,     1,     2,
-       2,     2,     0,     3,     0,     4,     1,     1,     1,     2,
-       2,     2,     1,     1,     1,     1,     1,     1,     1,     1,
+       1,     2,     2,     1,     2,     2,     2,     2,     2,     3,
+       2,     3,     1,     4,     2,     4,     4,     0,     1,     0,
+       1,     1,     1,     1,     5,     3,     6,     4,     5,     0,
+       5,     4,     3,     1,     2,     2,     0,     0,     3,     1,
+       3,     0,     1,     0,     4,     6,     2,     1,     5,     6,
+       3,     4,     5,     3,     1,     2,     5,     5,     6,     5,
+       6,     2,     0,     3,     2,     1,     1,     0,     0,     8,
+       1,     3,     1,     2,     2,     2,     3,     3,     4,     0,
+       8,     3,     0,     5,     1,     0,     4,     0,     0,     5,
+       0,     0,     5,     0,     0,     0,     7,     0,     1,     1,
+       1,     0,     1,     0,     3,     1,     2,     2,     2,     2,
+       2,     3,     4,     2,     3,     2,     3,     4,     2,     4,
+       5,     3,     1,     1,     2,     1,     2,     3,     0,     0,
+       9,     2,     0,     4,     0,     7,     2,     1,     3,     0,
+       2,     0,     3,     1,     2,     1,     2,     1,     1,     1,
+       2,     2,     0,     1,     0,     3,     3,     1,     1,     6,
+       0,     6,     0,     7,     0,     0,     6,     0,     6,     0,
+       2,     1,     3,     3,     0,     0,     2,     1,     0,     4,
+       3,     1,     0,     0,     5,     0,     1,     0,     3,     0,
+       2,     4,     2,     0,     2,     0,     0,     4,     2,     0,
+       1,     3,     0,     6,     3,     0,     5,     0,     3,     1,
+       0,     0,     0,     7,     1,     0,     2,     2,     3,     3,
+       2,     1,     2,     1,     2,     0,     1,     2,     4,     1,
+       1,     1,     0,     1,     0,     2,     1,     2,     0,     5,
+       0,     0,     2,     1,     1,     1,     1,     1,     2,     2,
+       2,     2,     2,     2,     2,     2,     3,     3,     3,     0,
+       5,     1,     1,     1,     0,     5,     0,     3,     1,     1,
        1,     1,     1,     1,     1,     1,     1,     1,     1,     1,
        1,     1,     1,     1,     1,     1,     1,     1,     1,     1,
-       1,     1,     1,     1,     1,     1,     1,     2,     1,     1,
-       0,     3,     1,     2,     0,     7,     0,     0,     0,     9,
-       0,     5,     0,     5,     0,     0,    10,     0,     7,     0,
-       0,     0,     9,     6,     0,     7,     0,     7,     0,     7,
+       1,     1,     1,     1,     1,     1,     0,     3,     1,     1,
+       1,     1,     2,     3,     1,     1,     1,     1,     1,     1,
+       2,     1,     1,     1,     1,     1,     1,     1,     1,     2,
+       2,     0,     3,     0,     4,     1,     3,     4,     1,     1,
+       0,     4,     2,     2,     2,     0,     3,     0,     4,     2,
+       0,     3,     0,     4,     1,     1,     1,     1,     2,     2,
+       2,     2,     2,     1,     1,     1,     1,     1,     1,     1,
+       1,     1,     1,     1,     1,     1,     1,     1,     1,     1,
+       1,     1,     1,     1,     1,     1,     1,     1,     1,     1,
+       1,     1,     1,     1,     1,     1,     1,     1,     1,     1,
+       1,     1,     2,     2,     2,     0,     4,     0,     1,     1,
+       2,     0,     2,     0,     0,     6,     0,     7,     0,     0,
+       0,     9,     0,     5,     0,     5,     0,     0,    10,     0,
+       7,     0,     0,     0,     9,     6,     0,     7,     0,     7,
        0,     7,     0,     7,     0,     7,     0,     7,     0,     7,
-       0,     9,     0,     9,     4,     4,     7,     0,     1,     2,
-       2,     3,     3,     1,     1,     1,     1,     1,     1,     1,
+       0,     7,     0,     9,     0,     9,     4,     4,     7,     0,
+       1,     2,     2,     3,     3,     1,     1,     1,     1,     2,
+       2,     2,     1,     1,     1,     1,     1,     1,     1,     1,
        1,     1,     1,     1,     1,     1,     1,     1,     1,     1,
        1,     1,     1,     1,     1,     1,     1,     1,     1,     1,
        1,     1,     1,     1,     1,     1,     1,     1,     1,     1,
        1,     1,     1,     1,     1,     1,     1,     1,     1,     1,
        1,     1,     1,     1,     1,     1,     1,     1,     1,     1,
        1,     1,     1,     1,     1,     1,     1,     1,     1,     1,
-       1,     1,     1,     1,     2,     1,     1,     1,     1,     1,
-       1,     1,     1,     1,     1,     1,     0,     2,     1,     1,
-       1,     1,     1,     1,     1,     1,     0,     2,     0,     2,
-       1,     1,     1,     1,     1,     0,     4,     0,     4,     0,
-       4,     0,     4,     0,     4,     0,     4,     0,     2,     0,
-       2,     1,     1,     1,     1,     1,     1,     1,     1,     1,
-       1,     1,     1,     1,     1,     1,     1,     1,     1,     3,
-       3,     3,     1,     1,     1
-};
-
-/* YYDEFACT[STATE-NAME] -- Default rule to reduce with in state
-   STATE-NUM when YYTABLE doesn't specify something else to do.  Zero
-   means the default is an error.  */
-static const yytype_uint16 yydefact[] =
-{
-       3,     0,     4,     1,     0,   378,   379,   380,   374,   375,
-     376,   377,    43,    42,    44,    91,   345,   346,   338,   341,
-     343,   344,   342,   339,   186,     0,   355,     0,     0,     0,
-     290,   394,   395,   396,   397,   399,   400,   401,   402,   398,
-     403,   404,   381,   382,   393,   383,   384,   385,   386,   387,
-     388,   389,   390,   391,   392,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,    20,     0,     5,    17,
-      11,     9,    10,     8,    30,    15,   327,    36,    40,    14,
-     327,     0,    12,   107,     7,     6,     0,    16,     0,     0,
-       0,     0,   174,     0,     0,    13,     0,   276,   358,     0,
-       0,     0,   357,   278,   289,     0,   333,   335,   336,   337,
-       0,   232,   350,   354,   373,   372,    18,   295,   293,   294,
-     298,   299,   297,   296,   308,   307,   319,   309,   310,   311,
-     312,   313,   314,   315,   316,   317,   318,   360,   277,     0,
-     279,   340,   133,     0,   378,   379,   380,   374,   375,   376,
-     377,   339,   381,   382,   393,   383,   384,   385,   386,   387,
-     388,   389,   390,   391,   392,   327,    40,   327,   358,   357,
-       0,     0,   320,   322,   321,   325,   326,   324,   323,   577,
-      27,     0,     0,     0,   125,     0,     0,     0,   416,   420,
-     422,     0,     0,   429,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,   302,   300,
-     301,   305,   306,   304,   303,   232,     0,    71,   359,   232,
-      88,     0,    24,    31,    21,    23,     0,    22,     0,     0,
-     195,   577,   156,   158,   159,   157,   177,     0,     0,   180,
-      19,   287,   164,     0,   162,   200,   280,     0,   279,   278,
-     283,   281,   282,   284,   291,   327,    40,   327,   108,   175,
-       0,   334,   352,   243,   244,   176,   181,     0,     0,   160,
-     188,     0,   228,   220,     0,   265,     0,   199,   260,   408,
-     409,   349,   233,   405,   412,   406,   327,   279,     3,   132,
-     138,   356,   340,   232,   359,   232,   327,   327,   295,   293,
-     294,   298,   299,   297,   296,   123,   119,   115,   118,   265,
-     260,     0,     0,     0,   126,     0,   124,   128,   127,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,   332,   331,     0,   228,     0,   328,   329,   330,
-      45,     0,    41,    72,    32,    71,     0,    92,   360,     0,
-       0,   198,     0,   211,   359,     0,   203,   205,   285,   286,
-     556,   232,   359,   232,   165,   163,   327,   188,   183,   478,
-     479,   480,   481,   482,   483,   484,   485,   486,   492,   493,
-     487,   488,   489,   490,   491,   494,   495,   496,   497,   498,
-     499,   500,   501,   502,   466,   465,     0,   463,   464,     0,
-     474,   475,   469,   468,   470,   471,   472,   473,   476,   477,
-     185,   467,   187,   196,     0,   222,   225,   221,   256,     0,
-       0,   237,   266,     0,   166,   161,   199,     0,     0,   407,
-     413,   351,     4,     0,   135,     0,     0,     0,     0,   115,
-       0,     0,   232,   232,     0,   120,   378,   379,   380,   374,
-     375,   376,   377,   603,   604,   531,   527,   528,   526,   529,
-     530,   532,   506,   505,   507,   520,   509,   511,   510,   512,
-     513,   515,   514,   516,   519,   508,   521,   504,   503,   518,
-     517,   478,   479,   524,   523,   522,   525,   587,   586,   598,
-     582,   577,    26,   593,   589,   594,   602,   591,   592,   577,
-     590,   597,   588,   595,   596,   578,   581,   583,   585,   584,
-     577,     0,     0,     3,   131,   130,   129,   414,     0,     0,
-       0,   424,   427,     0,     0,   434,   438,   442,   446,   436,
-     440,   444,   448,   450,   452,   454,   455,     0,    34,   227,
-     231,    46,    83,    84,    85,    81,    73,    74,    79,    76,
-      39,    86,     0,    93,    94,    97,   198,   194,     0,   217,
-       0,     0,   211,     0,   201,     0,     0,    33,     0,   353,
-     182,   211,     0,     0,   459,   460,   192,   190,     0,   189,
-     211,   224,   229,     0,   238,   257,   271,   268,   269,   263,
-     264,   262,   261,   347,   411,    25,   136,   134,     0,     0,
-       0,   368,   367,     0,   258,   232,   362,   366,   148,   149,
-     258,     0,   110,   114,   117,   112,     0,     0,   109,   232,
-     211,     0,     0,     0,    29,     4,     0,   417,   421,   423,
-       0,     0,   430,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,   457,    49,    50,    51,    37,     0,
-       0,     0,    82,     0,    80,     0,    89,    95,    98,   579,
-     178,   212,   213,     0,   288,     0,   206,   207,     0,   188,
-     571,   573,   541,   575,   563,   539,   564,   569,   565,   292,
-     567,   540,   545,   544,   538,   542,   543,   561,   562,   557,
-     560,   535,   536,   537,    35,    87,     0,   461,   462,     0,
-     193,     0,   232,   549,   550,   223,   548,   533,   245,   274,
-     270,   348,   138,   369,   370,   371,   364,   241,   242,   240,
-     139,   258,   265,   259,   361,   327,   142,   145,   111,   113,
-     116,     0,   599,   600,   601,    28,     0,     0,   425,     0,
-       0,   433,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,   458,     0,     0,     0,     0,    64,    53,    58,
-      47,    60,    54,    57,    55,    52,     0,    59,     0,   171,
-     172,    56,     0,     0,   357,     0,     0,    61,    48,    77,
-      75,    78,    96,     0,     0,   188,   214,   258,   204,     0,
-     209,   202,   546,   546,   558,   546,   556,   546,   184,   191,
-     197,   230,   534,   246,   239,   249,     0,     0,   137,   327,
-     150,   254,     0,   258,   234,   363,   150,   258,   121,   415,
-     418,     0,   428,   431,   435,   439,   443,   447,   437,   441,
-     445,   449,     0,     0,   456,    69,     0,    67,     0,     0,
-     357,     0,     0,    62,    65,    66,     0,   167,    63,     0,
-     173,     0,   170,   199,    99,   100,   580,   179,   216,   217,
-     218,   208,     0,     0,     0,     0,     0,     0,   211,   272,
-     275,   365,   152,   140,   151,   235,   255,   143,   146,   250,
-       0,   579,     0,     0,     0,   577,   579,   103,   359,     0,
-      68,     0,    70,     0,   168,     0,   215,   220,   552,   553,
-     572,   551,   547,   574,   555,   576,   554,   559,   570,   566,
-     568,     0,     0,   245,   150,   122,   419,     0,   432,   451,
-     453,     0,     0,   101,     0,     0,   104,   359,   169,   219,
-     247,   153,   154,   236,   147,   252,   251,     0,   426,   579,
-     106,     0,   102,   250,   155,   253,     0,   248,   105
-};
-
-/* YYDEFGOTO[NTERM-NUM].  */
-static const yytype_int16 yydefgoto[] =
-{
-      -1,     1,     2,     4,    78,   232,    80,    81,   323,    82,
-      83,    84,   234,    86,   226,    87,   365,   227,   247,   561,
-     669,   670,   780,   781,   782,   362,   363,   566,   567,   675,
-     671,   568,    89,    90,   367,    91,   572,   573,   574,   803,
-     235,   857,   907,    92,    93,   458,   464,   459,   315,   316,
-     640,   899,   317,    94,   193,    95,   236,   300,   453,   732,
-     454,   455,   830,   456,   836,   457,   837,   934,   630,   893,
-     894,   932,   951,   237,    98,    99,   100,   787,   788,   101,
-     102,   373,   805,   103,   275,   387,   276,   591,   277,   104,
-     279,   432,   599,   719,   242,   280,   600,   253,   790,   377,
-     689,   255,   582,   584,   686,   687,   578,   579,   681,   879,
-     682,   683,   917,   436,   437,   603,   105,   282,   354,   434,
-     722,   355,   356,   740,   933,   283,   728,   741,   284,   824,
-     888,   963,   935,   832,   439,   742,   743,   612,   441,   442,
-     443,   607,   608,   729,   826,   827,   194,   107,   178,   149,
-     375,   110,   111,   179,   380,   113,   114,   225,   357,   248,
-     116,   117,   118,   119,   614,   239,   121,   296,   386,   122,
-     153,   624,   625,   745,   829,   626,   123,   124,   125,   292,
-     293,   294,   448,   295,   126,   646,   332,   757,   900,   333,
-     334,   650,   841,   651,   337,   760,   902,   654,   658,   655,
-     659,   656,   660,   657,   661,   662,   663,   773,   430,   704,
-     705,   706,   725,   726,   882,   921,   922,   927,   585,   884,
-     708,   709,   710,   816,   711,   817,   712,   815,   812,   813,
-     713,   814,   321,   804,   525,   526,   527,   528,   529,   530
-};
-
-/* YYPACT[STATE-NUM] -- Index in YYTABLE of the portion describing
-   STATE-NUM.  */
-#define YYPACT_NINF -877
-static const yytype_int16 yypact[] =
-{
-    -877,    50,    90,  -877,  4206,   173,   191,   212,   241,   267,
-     280,   283,  -877,  -877,  -877,  4853,  -877,  -877,  -877,  -877,
-    -877,  -877,  -877,    85,  -877,    82,  -877,  5377,   116,  1031,
-    -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-    -877,  -877,   -18,    42,   122,   133,   152,   171,   203,   207,
-     211,   227,   257,   260,   264,   -16,    37,   114,   129,   138,
-     161,   164,   169,   196,   230,   233,   242,   249,   271,   276,
-     278,   281,   288,   289,   309,   316,  -877,   602,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  4853,  -877,
-    -877,   327,  -877,  -877,  -877,  -877,  5092,  -877,   100,   100,
-     100,   100,  -877,   329,  5472,  -877,    21,  -877,   333,  4731,
-     385,  4853,   -25,  -877,   331,  5187,  -877,  -877,  -877,  -877,
-    3993,    99,  -877,  -877,  -877,  -877,  -877,     2,     9,    15,
-      46,    70,    88,   121,  -877,  -877,  -877,  -877,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,  -877,   343,  -877,  4867,
-     385,   346,   339,  4853,     2,     9,    15,    46,    70,    88,
-     121,   465,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,  4853,  -877,  -877,   385,
-    5377,  4705,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-     356,  4853,  4853,   360,  -877,  4731,  4853,  4901,  -877,  -877,
-    -877,  4901,  4901,  -877,  4901,  4901,  4901,  4901,  4901,  4901,
-    4901,  4901,  4901,  4901,  4901,  4901,  4901,  4901,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  4974,   361,   357,   188,  4974,
-    -877,  4853,  -877,  -877,  -877,  -877,  5092,  -877,  5282,  4880,
-    -877,  -877,  -877,  -877,  -877,  -877,  -877,  4853,  5472,  -877,
-    -877,  -877,  -877,   358,  -877,  -877,  -877,   385,   -49,   364,
-    -877,  -877,  -877,  -877,  -877,  -877,  4853,  -877,  -877,  -877,
-    4731,  -877,  -877,  -877,  -877,  -877,  -877,   365,  5017,  -877,
-    -877,   368,  -877,   363,  3979,   353,  4731,   385,   -66,  -877,
-     250,  -877,  -877,  -877,  -877,    99,  -877,   385,  -877,  -877,
-      73,  -877,  -877,  4545,     5,  4545,  -877,  -877,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,   -13,
-     371,  1224,   467,   366,  -877,   376,  -877,  -877,  -877,  3884,
-    4731,   372,  4901,  4901,  4901,   373,   377,  4901,   378,   380,
-     381,   384,   386,   387,   388,   389,   396,   397,   399,   383,
-     401,   405,  -877,  -877,   411,  -877,  4705,  -877,  -877,  -877,
-    -877,  4609,  -877,  -877,  -877,   357,   421,  4901,  -877,  5282,
-    4731,  -877,  1364,   416,  -877,   621,  -877,   422,  -877,  -877,
-    -877,  4974,   252,  4974,  -877,  -877,  -877,  -877,  -877,   415,
-     420,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,   424,  -877,  -877,   423,
-    -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-    -877,  -877,    41,  -877,    81,  -877,  -877,  -877,  -877,   429,
-    4705,  -877,  -877,   427,  -877,  -877,   385,   432,   362,  -877,
-    -877,  1113,   436,    11,  -877,  5508,    62,   452,   439,  -877,
-    4705,   442,  4545,  4545,   131,  -877,  -877,  -877,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-    -877,   443,   385,  -877,  -877,  -877,  -877,  -877,   441,   437,
-     440,  -877,  -877,   444,  5472,  -877,  -877,  -877,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,  -877,  4853,  -877,   448,
-    -877,    47,  -877,  -877,  -877,   438,   449,  -877,   538,  -877,
-    -877,  -877,   454,   456,  -877,   460,   464,  -877,   450,  -877,
-     385,   331,   416,  4853,  -877,  3324,   468,  -877,   469,  1113,
-      41,   416,   459,   461,  -877,  -877,  -877,  -877,   573,  -877,
-     416,  -877,  -877,  3604,  -877,  -877,  -877,   427,  -877,  -877,
-    -877,  -877,  -877,  -877,   362,  -877,  -877,  -877,  4853,  4853,
-    4853,  -877,   385,  5472,  4657,    99,  -877,  -877,  -877,  -877,
-    4657,   574,  -877,   472,  -877,  -877,   478,   479,  -877,    99,
-     416,  1504,  1644,  1784,  -877,   480,  5472,  -877,  -877,  -877,
-    5472,  5472,  -877,   473,  5472,  5472,  5472,  5472,  5472,  5472,
-    5472,  5472,  5472,  5472,   476,  -877,  -877,  -877,  -877,  4088,
-     488,  4853,  -877,  4609,  -877,  4853,  -877,  4901,  -877,  -877,
-    -877,   492,  -877,  5472,  -877,   486,   493,  -877,    52,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,   507,  -877,  -877,    58,
-    -877,   508,    99,  -877,  -877,  3604,  -877,  -877,   204,   489,
-    -877,  -877,    73,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-    -877,  1084,   353,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-    -877,   509,  -877,  -877,  -877,  -877,   510,  5472,  -877,   512,
-    5472,  -877,   513,   514,   515,   516,   517,   519,   520,   521,
-     494,   524,  -877,   523,  4324,  4805,    52,  -877,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,  4514,  -877,   100,  -877,
-    -877,  -877,   531,  4731,   234,  4419,  3993,  -877,  -877,  -877,
-    -877,  -877,  -877,  3604,  2064,  -877,   581,  4657,  -877,  4853,
-    -877,    41,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,   522,  3604,  -877,  -877,
-     534,  -877,   527,  4657,  -877,  1113,   534,  4657,  -877,  -877,
-    -877,   535,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-    -877,  -877,   646,   651,  -877,  -877,  4683,  -877,   506,   100,
-     -49,  4514,  4880,  -877,  -877,  -877,  4514,  -877,  -877,   546,
-    -877,  4731,  -877,  -877,  3604,  -877,  -877,    41,  -877,  -877,
-    -877,  -877,  2764,  2904,  2624,  3044,  3464,  3184,   416,  -877,
-    3604,  1113,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-     550,  -877,   554,   556,   560,  -877,  -877,  -877,   313,  4683,
-    -877,   506,  -877,  4683,  -877,   553,  -877,   363,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-    -877,   562,  3744,   204,   534,   189,  -877,  2204,  -877,  -877,
-    -877,  1924,  2344,  -877,   313,  4683,  -877,   319,  -877,  -877,
-    -877,  3744,  -877,  -877,  -877,  -877,  -877,    52,  -877,  -877,
-    -877,   319,  -877,  -877,  -877,  -877,  2484,   189,  -877
-};
-
-/* YYPGOTO[NTERM-NUM].  */
-static const yytype_int16 yypgoto[] =
-{
-    -877,  -877,  -286,  -877,  -877,   686,  -877,  -877,  -877,  -877,
-    -628,   -94,     1,   -26,  -877,  -877,  -877,  -877,    16,  -877,
-    -877,  -877,  -877,  -877,  -877,   337,  -877,  -877,    40,  -877,
-    -877,   154,    25,   -21,  -877,  -877,  -877,  -877,    19,  -877,
-    -877,  -141,  -495,    51,  -102,  -254,   262,    83,  -877,  -877,
-    -877,  -877,   263,    55,  -877,  -877,    -1,  -877,  -877,  -877,
-      -7,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -757,
-    -877,  -877,  -877,   723,  -877,  -877,  -877,   -57,   -43,  -604,
-     -80,  -877,  -877,  -150,  -231,  -877,  -877,  -877,  -155,    35,
-    -267,  -361,  -877,  -877,   -91,  -877,  -877,  -105,   -67,  -877,
-    -877,  -877,  -877,  -877,  -877,   -76,  -552,  -877,  -877,  -877,
-    -145,  -877,  -877,  -182,  -877,  -877,    67,   390,  -170,   392,
-    -877,    17,   -64,  -576,  -877,  -157,  -877,  -877,  -877,  -193,
-    -877,  -877,  -222,  -877,  -877,  -877,   -98,  -877,     0,  -695,
-    -877,  -877,   136,  -877,  -877,  -877,    13,   -45,     3,    32,
-    -877,   -63,    23,    -4,  -877,   215,   -24,     8,  -877,    10,
-     -81,  -877,  -877,  -417,  -877,    49,  -877,  -877,  -877,   -19,
-    -877,  -877,  -877,  -877,  -877,  -877,   117,  -367,   275,  -263,
-     455,   457,  -877,  -877,    75,  -877,  -877,  -877,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,
-    -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,  -877,    20,
-     120,   172,  -754,  -541,  -469,  -511,  -139,  -877,   -65,  -877,
-    -876,  -877,  -877,  -877,  -877,  -877,    30,  -877,  -877,  -877,
-    -877,  -877,  -225,  -821,  -877,  -766,  -877,  -877,  -661,  -877
+       1,     1,     1,     1,     1,     2,     1,     1,     1,     0,
+       3,     1,     1,     1,     1,     1,     1,     1,     1,     1,
+       1,     1,     1,     1,     0,     2,     1,     1,     1,     1,
+       1,     1,     1,     1,     1,     0,     2,     0,     2,     1,
+       1,     1,     1,     1,     0,     4,     1,     1,     0,     4,
+       0,     5,     0,     4,     0,     4,     0,     4,     0,     4,
+       0,     2,     0,     2,     1,     1,     1,     1,     1,     1,
+       1,     1,     1,     1,     1,     1,     1,     1,     1,     1,
+       1,     1,     1,     1,     1,     3,     3,     4,     3,     1,
+       1,     1
 };
 
-/* YYTABLE[YYPACT[STATE-NUM]].  What to do in state STATE-NUM.  If
-   positive, shift that token.  If negative, reduce the rule which
-   number is the opposite.  If zero, do what YYDEFACT says.
-   If YYTABLE_NINF, syntax error.  */
-#define YYTABLE_NINF -411
-static const yytype_int16 yytable[] =
-{
-     112,   175,   233,    96,   190,    85,   177,   108,   243,   244,
-     245,   150,   452,   268,   115,   281,   372,   106,   148,   445,
-      88,   440,   285,    30,   318,   150,   590,   810,   147,   252,
-     685,   613,   148,   825,   271,   269,   109,   180,   876,   716,
-     328,   778,   254,   176,   707,   327,   257,    30,   721,   874,
-       3,   461,   196,   120,   746,   444,   952,   291,   447,   366,
-    -198,   195,   473,   474,   256,   789,   261,   596,   690,   691,
-     665,   666,   667,   890,  -320,   964,   181,  -199,   597,   897,
-     937,  -322,   628,   319,   150,   942,   257,  -321,   751,   265,
-      -2,   148,   727,  -141,   267,  -279,   272,   151,   229,   271,
-     629,   228,  -308,   445,   256,   258,   238,   150,  -308,  -144,
-     197,  -141,    88,   465,   262,   863,   287,  -267,  -325,   182,
-     183,   184,   185,   186,   187,   188,   -38,   438,   109,   -38,
-    -320,   266,   257,   260,   281,   263,   616,  -322,   966,   444,
-     617,   250,  -326,  -321,   358,   297,   855,   270,   358,   150,
-     256,   261,   286,   249,   306,   278,   148,   271,   269,   307,
-    -324,   272,  -307,   198,   598,   831,   301,   271,  -307,   668,
-     789,   876,   150,   263,  -325,   536,   876,   954,   516,   148,
-     535,   281,   789,   303,   697,   305,   285,   150,   150,   304,
-     384,   297,   150,  -323,   148,   148,   176,   731,  -326,   262,
-     876,   601,   263,   385,   324,   325,   602,   257,   636,   637,
-     152,   586,   265,   588,   822,   955,  -324,   956,   702,   272,
-     240,   241,   358,   257,   358,   256,   957,   150,   330,   272,
-     329,   880,   289,   290,   148,   287,   702,   189,   825,   460,
-     199,   460,  -319,   150,   368,  -320,   369,   645,  -319,  -323,
-     148,   638,   176,  -309,   266,   200,   639,   896,   285,  -309,
-     374,   898,   150,  -322,   201,   281,   258,   257,   109,   148,
-     270,   370,  -310,   381,   278,   383,  -410,  -410,  -310,   382,
-     378,   379,   446,   605,  -321,   256,   641,   202,   271,   269,
-     203,  -311,   727,  -295,   642,   204,   965,  -311,   431,  -295,
-     358,  -320,   358,   318,   451,   643,    30,   257,   364,   -38,
-     263,  -293,   -38,  -325,   462,   463,   727,  -293,   532,  -322,
-     379,   278,   205,  -312,   259,   531,   297,  -313,   811,  -312,
-     823,  -314,  -294,  -313,  -267,   288,   931,  -314,  -294,  -326,
-    -321,   522,   285,   265,   883,   707,   885,  -315,   887,   431,
-     272,   581,  -324,  -315,  -279,  -323,   206,   150,   702,   207,
-    -199,  -298,   319,   822,   148,   329,   446,  -298,   208,  -325,
-     358,   580,   587,   -38,   569,   209,   -38,  -316,   249,   822,
-    -317,   358,   358,  -316,  -318,   176,  -317,  -299,    16,    17,
-    -318,   707,   522,  -299,   589,  -326,   320,   210,   460,   460,
-    -297,   270,   211,  -296,   212,   278,  -297,   213,  -324,  -296,
-     707,  -323,   331,   943,   214,   215,   335,   336,   946,   338,
-     339,   340,   341,   342,   343,   344,   345,   346,   347,   348,
-     349,   350,   351,   364,   905,   216,   702,   906,   521,   587,
-     905,   523,   217,   906,   877,   609,   610,   611,   230,   943,
-     946,   622,   962,  -277,   371,   246,   631,    30,   621,   264,
-     702,   562,   563,   564,   -90,   623,   962,   298,   299,   379,
-     182,   183,   184,   185,   186,   187,   188,   302,   833,   322,
-     326,   361,   360,  -267,   376,   259,   435,   533,    25,   521,
-    -198,   388,   523,   524,   433,   447,   534,   537,   541,   320,
-     359,   371,   542,   544,   359,   545,   546,   702,   358,   547,
-     555,   548,   549,   550,   551,   702,   702,   702,   702,   702,
-     702,   552,   553,   702,   554,    12,    13,    14,   556,   872,
-     557,   558,    16,    17,    18,    19,    20,    21,    22,    30,
-     161,   571,   271,  -210,   524,   592,   583,   538,   539,   540,
-     593,   594,   543,   150,   595,   263,   604,   606,   615,   632,
-     148,   744,   635,   644,   648,   702,   647,   649,   674,   652,
-     664,   320,   627,   602,   673,   460,   676,   680,   359,   150,
-     359,   677,   575,   678,   702,   576,   148,   679,   714,   715,
-     717,   720,   718,   653,   747,   872,   688,   639,   748,   749,
-     761,   772,   755,   684,   736,   218,   219,   220,   221,   222,
-     223,   224,   798,   808,   150,   150,   150,   806,   809,   852,
-    -273,   148,   148,   148,   182,   183,   184,   185,   186,   187,
-     188,   733,   734,   735,   818,   820,   838,   839,   869,   842,
-     844,   845,   846,   847,   848,   263,   849,   850,   851,   853,
-     854,   868,   878,   889,   895,   320,   359,   892,   359,   903,
-     901,   522,   522,   522,   904,   794,   914,   150,   786,   150,
-     779,   150,   108,   948,   148,   320,   148,   936,   148,   795,
-     941,   938,   792,   939,   799,    88,   569,   940,   801,   950,
-      79,   281,   233,   268,   783,   756,   802,   867,   285,   758,
-     759,   793,   570,   762,   763,   764,   765,   766,   767,   768,
-     769,   770,   771,   800,   271,   269,   915,   910,   796,   672,
-     784,   633,   750,   634,   785,   828,   359,    97,   870,   865,
-     257,   859,   807,   881,   916,   949,   791,   359,   359,   821,
-     953,   967,   834,   730,   797,   926,   560,   559,   256,   819,
-     449,   886,   450,   835,   358,     0,     0,   281,   521,   521,
-     521,   523,   523,   523,   233,     0,     0,     0,   912,   265,
-     860,   150,   581,   858,   267,     0,   272,     0,   148,     0,
-     271,   269,   860,     0,   861,   271,   269,   864,     0,   297,
-     856,   860,   873,     0,   870,     0,   866,     0,   196,   870,
-       0,     0,    88,     0,     0,   150,   840,   195,   257,   843,
-     358,   266,   148,   524,   524,   524,     0,   263,   793,     0,
-       0,     0,   688,   862,   522,     0,   256,   871,   278,     0,
-     271,   278,     0,     0,     0,   862,     0,   891,   581,   320,
-     265,     0,   272,     0,     0,   320,     0,   272,     0,     0,
-       0,     0,   150,     0,     0,     0,     0,   860,   873,   148,
-     703,     0,   860,     0,   359,     0,     0,   297,   911,   908,
-       0,     0,     0,     0,   909,     0,     0,   913,   703,     0,
-       0,     0,   266,   263,   259,     0,     0,     0,     0,     0,
-       0,     0,   575,     0,     0,     0,     0,   278,   871,     0,
-       0,     0,     0,     0,     0,   150,   278,     0,     0,   150,
-       0,     0,   148,     0,     0,     0,   148,     0,     0,     0,
-       0,   521,   944,     0,   523,     0,   947,   945,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,   150,     0,     0,     0,     0,     0,     0,   148,     0,
-       0,     0,     0,     0,     0,     0,   320,   522,   961,     0,
-       0,   522,   522,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,   524,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,   522,     0,     0,   371,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-     703,   371,     0,     0,     0,     0,     0,     0,     0,     0,
-     371,   288,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,   320,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,   127,   128,   129,   130,   131,   132,
-     133,     0,     0,     0,     0,     0,     0,     0,   320,     0,
-       0,     0,   320,     0,   521,     0,     0,   523,   521,   521,
-       0,   523,   523,     0,     0,     0,     0,     0,     0,   191,
-       0,   192,     0,     0,     0,     0,   371,   371,   703,     0,
-       0,   371,     0,   521,     0,     0,   523,   308,   309,   310,
-     311,   312,   313,   314,   737,   738,     0,     0,     0,     0,
-       0,     0,   703,    30,     0,     0,     0,     0,     0,   524,
-     359,     0,     0,   524,   524,     0,   134,   135,   136,   137,
-     138,   139,   140,   141,   142,   143,   144,   145,   146,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,   524,    16,
-      17,    18,    19,    20,    21,    22,   352,   161,     0,   703,
-       0,     0,   353,     0,     0,     0,     0,   703,   703,   703,
-     703,   703,   703,    77,     0,   703,   359,     0,     0,   134,
-     135,   136,   137,   138,   139,   140,   141,   142,   143,   144,
-     145,   146,     0,     0,     0,     0,     0,    31,    32,    33,
-      34,    35,    36,    37,    38,    39,    40,    41,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,   703,     0,     0,
-     739,     0,     0,     0,     0,     0,    77,   289,   290,     0,
-       0,     0,     0,     0,     0,     0,   703,   466,   467,   468,
-     469,   470,   471,   472,   473,   474,   475,   476,   477,   478,
-     479,   480,   481,   482,   483,   484,   485,   486,   487,   488,
-     489,     0,     0,   490,   491,     0,   492,     0,   493,   494,
-     495,   496,   497,   498,   499,   500,   501,   502,   503,   504,
-     505,   506,   391,   392,   393,   394,   395,   396,   397,   398,
-     399,   400,   401,   402,   403,   404,   405,   406,   407,   408,
-     409,   410,   411,   412,   413,   507,   508,   509,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   162,
-     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
-     173,   174,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,   510,   511,   512,   513,   514,   515,
-     516,     0,   517,   518,   519,     0,   420,   421,   422,   423,
-     424,   425,   426,   427,   428,   429,   520,   466,   467,   468,
-     469,   470,   471,   472,   473,   474,   475,   476,   477,   478,
-     479,   480,   481,   482,   483,   484,   485,   486,   487,   488,
-     489,     0,     0,   490,   491,     0,   492,     0,   493,   494,
-     495,   496,   497,   498,   499,   500,   501,   502,   503,   504,
-     505,   506,   391,   392,   393,   394,   395,   396,   397,   398,
-     399,   400,   401,   402,   403,   404,   405,   406,   407,   408,
-     409,   410,   411,   412,   413,   507,   508,   509,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   162,
-     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
-     173,   174,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,   510,   511,   577,   513,   514,   515,
-     516,     0,   517,   518,   519,     0,   420,   421,   422,   423,
-     424,   425,   426,   427,   428,   429,   520,   466,   467,   468,
-     469,   470,   471,   472,   473,   474,   475,   476,   477,   478,
-     479,   480,   481,   482,   483,   484,   485,   486,   487,   488,
-     489,     0,     0,   490,   491,     0,   492,     0,   493,   494,
-     495,   496,   497,   498,   499,   500,   501,   502,   503,   504,
-     505,   506,   391,   392,   393,   394,   395,   396,   397,   398,
-     399,   400,   401,   402,   403,   404,   405,   406,   407,   408,
-     409,   410,   411,   412,   413,   507,   508,   509,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   162,
-     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
-     173,   174,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,   510,   511,   752,   513,   514,   515,
-     516,     0,   517,   518,   519,     0,   420,   421,   422,   423,
-     424,   425,   426,   427,   428,   429,   520,   466,   467,   468,
-     469,   470,   471,   472,   473,   474,   475,   476,   477,   478,
-     479,   480,   481,   482,   483,   484,   485,   486,   487,   488,
-     489,     0,     0,   490,   491,     0,   492,     0,   493,   494,
-     495,   496,   497,   498,   499,   500,   501,   502,   503,   504,
-     505,   506,   391,   392,   393,   394,   395,   396,   397,   398,
-     399,   400,   401,   402,   403,   404,   405,   406,   407,   408,
-     409,   410,   411,   412,   413,   507,   508,   509,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   162,
-     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
-     173,   174,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,   510,   511,     0,   513,   514,   515,
-     516,     0,   517,   518,   519,   753,   420,   421,   422,   423,
-     424,   425,   426,   427,   428,   429,   520,   466,   467,   468,
-     469,   470,   471,   472,   473,   474,   475,   476,   477,   478,
-     479,   480,   481,   482,   483,   484,   485,   486,   487,   488,
-     489,     0,     0,   490,   491,     0,   492,     0,   493,   494,
-     495,   496,   497,   498,   499,   500,   501,   502,   503,   504,
-     505,   506,   391,   392,   393,   394,   395,   396,   397,   398,
-     399,   400,   401,   402,   403,   404,   405,   406,   407,   408,
-     409,   410,   411,   412,   413,   507,   508,   509,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   162,
-     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
-     173,   174,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,   510,   511,     0,   513,   514,   515,
-     516,   754,   517,   518,   519,     0,   420,   421,   422,   423,
-     424,   425,   426,   427,   428,   429,   520,   466,   467,   468,
-     469,   470,   471,   472,   473,   474,   475,   476,   477,   478,
-     479,   480,   481,   482,   483,   484,   485,   486,   487,   488,
-     489,     0,     0,   490,   491,     0,   492,     0,   493,   494,
-     495,   496,   497,   498,   499,   500,   501,   502,   503,   504,
-     505,   506,   391,   392,   393,   394,   395,   396,   397,   398,
-     399,   400,   401,   402,   403,   404,   405,   406,   407,   408,
-     409,   410,   411,   412,   413,   507,   508,   509,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   162,
-     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
-     173,   174,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,   510,   511,   959,   513,   514,   515,
-     516,     0,   517,   518,   519,     0,   420,   421,   422,   423,
-     424,   425,   426,   427,   428,   429,   520,   466,   467,   468,
-     469,   470,   471,   472,   473,   474,   475,   476,   477,   478,
-     479,   480,   481,   482,   483,   484,   485,   486,   487,   488,
-     489,     0,     0,   490,   491,     0,   492,     0,   493,   494,
-     495,   496,   497,   498,   499,   500,   501,   502,   503,   504,
-     505,   506,   391,   392,   393,   394,   395,   396,   397,   398,
-     399,   400,   401,   402,   403,   404,   405,   406,   407,   408,
-     409,   410,   411,   412,   413,   507,   508,   509,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   162,
-     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
-     173,   174,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,   875,   511,     0,   513,   514,   515,
-     516,     0,   517,   518,   519,     0,   420,   421,   422,   423,
-     424,   425,   426,   427,   428,   429,   520,   466,   467,   468,
-     469,   470,   471,   472,   473,   474,   475,   476,   477,   478,
-     479,   480,   481,   482,   483,   484,   485,   486,   487,   488,
-     489,     0,     0,   490,   491,     0,   492,     0,   493,   494,
-     495,   496,   497,   498,   499,   500,   501,   502,   503,   504,
-     505,   506,   391,   392,   393,   394,   395,   396,   397,   398,
-     399,   400,   401,   402,   403,   404,   405,   406,   407,   408,
-     409,   410,   411,   412,   413,   507,   508,   509,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   162,
-     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
-     173,   174,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,   511,     0,   513,   514,   515,
-     516,   958,   517,   518,   519,     0,   420,   421,   422,   423,
-     424,   425,   426,   427,   428,   429,   520,   466,   467,   468,
-     469,   470,   471,   472,   473,   474,   475,   476,   477,   478,
-     479,   480,   481,   482,   483,   484,   485,   486,   487,   488,
-     489,     0,     0,   490,   491,     0,   492,     0,   493,   494,
-     495,   496,   497,   498,   499,   500,   501,   502,   503,   504,
-     505,   506,   391,   392,   393,   394,   395,   396,   397,   398,
-     399,   400,   401,   402,   403,   404,   405,   406,   407,   408,
-     409,   410,   411,   412,   413,   507,   508,   509,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   162,
-     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
-     173,   174,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,   960,   511,     0,   513,   514,   515,
-     516,     0,   517,   518,   519,     0,   420,   421,   422,   423,
-     424,   425,   426,   427,   428,   429,   520,   466,   467,   468,
-     469,   470,   471,   472,   473,   474,   475,   476,   477,   478,
-     479,   480,   481,   482,   483,   484,   485,   486,   487,   488,
-     489,     0,     0,   490,   491,     0,   492,     0,   493,   494,
-     495,   496,   497,   498,   499,   500,   501,   502,   503,   504,
-     505,   506,   391,   392,   393,   394,   395,   396,   397,   398,
-     399,   400,   401,   402,   403,   404,   405,   406,   407,   408,
-     409,   410,   411,   412,   413,   507,   508,   509,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   162,
-     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
-     173,   174,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,   968,   511,     0,   513,   514,   515,
-     516,     0,   517,   518,   519,     0,   420,   421,   422,   423,
-     424,   425,   426,   427,   428,   429,   520,   466,   467,   468,
-     469,   470,   471,   472,   690,   691,   475,   476,   477,   478,
-     479,   480,   481,   482,   483,   484,   485,   486,   487,   488,
-     489,     0,     0,   490,   491,     0,   492,     0,   493,   494,
-     495,   496,   497,   498,   499,   500,   501,   502,   503,   504,
-     505,   506,   391,   392,   393,   394,   395,   396,   397,   398,
-     399,   400,   401,   402,   403,   404,   405,   406,   407,   408,
-     409,   410,   411,   412,   413,     0,   692,     0,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   162,
-     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
-     173,   174,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,   924,   693,   925,   918,   695,   919,
-     697,     0,   723,   724,   700,     0,   420,   421,   422,   423,
-     424,   425,   426,   427,   428,   429,   701,   466,   467,   468,
-     469,   470,   471,   472,   690,   691,   475,   476,   477,   478,
-     479,   480,   481,   482,   483,   484,   485,   486,   487,   488,
-     489,     0,     0,   490,   491,     0,   492,     0,   493,   494,
-     495,   496,   497,   498,   499,   500,   501,   502,   503,   504,
-     505,   506,   391,   392,   393,   394,   395,   396,   397,   398,
-     399,   400,   401,   402,   403,   404,   405,   406,   407,   408,
-     409,   410,   411,   412,   413,     0,   692,     0,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   162,
-     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
-     173,   174,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,   693,     0,   918,   695,   919,
-     697,   920,   723,   724,   700,     0,   420,   421,   422,   423,
-     424,   425,   426,   427,   428,   429,   701,   466,   467,   468,
-     469,   470,   471,   472,   690,   691,   475,   476,   477,   478,
-     479,   480,   481,   482,   483,   484,   485,   486,   487,   488,
-     489,     0,     0,   490,   491,     0,   492,     0,   493,   494,
-     495,   496,   497,   498,   499,   500,   501,   502,   503,   504,
-     505,   506,   391,   392,   393,   394,   395,   396,   397,   398,
-     399,   400,   401,   402,   403,   404,   405,   406,   407,   408,
-     409,   410,   411,   412,   413,     0,   692,     0,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   162,
-     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
-     173,   174,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,   693,     0,   918,   695,   919,
-     697,   923,   723,   724,   700,     0,   420,   421,   422,   423,
-     424,   425,   426,   427,   428,   429,   701,   466,   467,   468,
-     469,   470,   471,   472,   690,   691,   475,   476,   477,   478,
-     479,   480,   481,   482,   483,   484,   485,   486,   487,   488,
-     489,     0,     0,   490,   491,     0,   492,     0,   493,   494,
-     495,   496,   497,   498,   499,   500,   501,   502,   503,   504,
-     505,   506,   391,   392,   393,   394,   395,   396,   397,   398,
-     399,   400,   401,   402,   403,   404,   405,   406,   407,   408,
-     409,   410,   411,   412,   413,     0,   692,     0,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   162,
-     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
-     173,   174,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,   693,     0,   918,   695,   919,
-     697,   928,   723,   724,   700,     0,   420,   421,   422,   423,
-     424,   425,   426,   427,   428,   429,   701,   466,   467,   468,
-     469,   470,   471,   472,   690,   691,   475,   476,   477,   478,
-     479,   480,   481,   482,   483,   484,   485,   486,   487,   488,
-     489,     0,     0,   490,   491,     0,   492,     0,   493,   494,
-     495,   496,   497,   498,   499,   500,   501,   502,   503,   504,
-     505,   506,   391,   392,   393,   394,   395,   396,   397,   398,
-     399,   400,   401,   402,   403,   404,   405,   406,   407,   408,
-     409,   410,   411,   412,   413,     0,   692,     0,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   162,
-     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
-     173,   174,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,   693,     0,   918,   695,   919,
-     697,     0,   723,   724,   700,   930,   420,   421,   422,   423,
-     424,   425,   426,   427,   428,   429,   701,   466,   467,   468,
-     469,   470,   471,   472,   690,   691,   475,   476,   477,   478,
-     479,   480,   481,   482,   483,   484,   485,   486,   487,   488,
-     489,     0,     0,   490,   491,     0,   492,     0,   493,   494,
-     495,   496,   497,   498,   499,   500,   501,   502,   503,   504,
-     505,   506,   391,   392,   393,   394,   395,   396,   397,   398,
-     399,   400,   401,   402,   403,   404,   405,   406,   407,   408,
-     409,   410,   411,   412,   413,     0,   692,     0,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   162,
-     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
-     173,   174,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,   693,     0,   694,   695,   696,
-     697,     0,   698,   699,   700,     0,   420,   421,   422,   423,
-     424,   425,   426,   427,   428,   429,   701,   466,   467,   468,
-     469,   470,   471,   472,   690,   691,   475,   476,   477,   478,
-     479,   480,   481,   482,   483,   484,   485,   486,   487,   488,
-     489,     0,     0,   490,   491,     0,   492,     0,   493,   494,
-     495,   496,   497,   498,   499,   500,   501,   502,   503,   504,
-     505,   506,   391,   392,   393,   394,   395,   396,   397,   398,
-     399,   400,   401,   402,   403,   404,   405,   406,   407,   408,
-     409,   410,   411,   412,   413,     0,   692,     0,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   162,
-     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
-     173,   174,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,   693,     0,   694,   695,   696,
-     697,     0,   698,   929,   700,     0,   420,   421,   422,   423,
-     424,   425,   426,   427,   428,   429,   701,   466,   467,   468,
-     469,   470,   471,   472,   690,   691,   475,   476,   477,   478,
-     479,   480,   481,   482,   483,   484,   485,   486,   487,   488,
-     489,     0,     0,   490,   491,     0,   492,     0,   493,   494,
-     495,   496,   497,   498,   499,   500,   501,   502,   503,   504,
-     505,   506,   391,   392,   393,   394,   395,   396,   397,   398,
-     399,   400,   401,   402,   403,   404,   405,   406,   407,   408,
-     409,   410,   411,   412,   413,     0,   692,     0,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   162,
-     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
-     173,   174,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,   693,     0,     0,   695,     0,
-     697,     0,   723,   724,   700,     0,   420,   421,   422,   423,
-     424,   425,   426,   427,   428,   429,   701,   466,   467,   468,
-     469,   470,   471,   472,   690,   691,   475,   476,   477,   478,
-     479,   480,   481,   482,   483,   484,   485,   486,   487,   488,
-     489,     0,     0,   490,   491,     0,   492,     0,   493,   494,
-     495,   496,   497,   498,   499,   500,   501,   502,   503,   504,
-     505,   506,   391,   392,   393,   394,   395,   396,   397,   398,
-     399,   400,   401,   402,   403,   404,   405,   406,   407,   408,
-     409,   410,   411,   412,   413,     0,   692,     0,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   162,
-     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
-     173,   174,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,   693,     0,     0,   695,     0,
-     697,     0,   698,     0,   700,     0,   420,   421,   422,   423,
-     424,   425,   426,   427,   428,   429,   701,   154,   155,   156,
-     157,   158,   159,   160,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,    12,    13,    14,   231,     0,     0,     0,
-      16,    17,    18,    19,    20,    21,    22,     0,   161,     0,
-       0,     0,    26,     0,     0,     0,   389,   390,     0,     0,
-       0,     0,   391,   392,   393,   394,   395,   396,   397,   398,
-     399,   400,   401,   402,   403,   404,   405,   406,   407,   408,
-     409,   410,   411,   412,   413,     0,    30,     0,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   162,
-     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
-     173,   174,   308,   309,   310,   311,   312,   313,   314,   273,
-     274,     0,     0,     0,     0,     0,   127,   128,   129,   130,
-     131,   132,   133,   273,   274,     0,     0,   414,     0,   415,
-     416,     0,   417,   418,   419,     0,   420,   421,   422,   423,
-     424,   425,   426,   427,   428,   429,     0,     0,    24,     0,
+/* YYDPREC[RULE-NUM] -- Dynamic precedence of rule #RULE-NUM (0 if none).  */
+static const unsigned char yydprec[] =
+{
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
        0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
        0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
        0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,   134,   135,   136,   137,   138,   139,
-     140,   141,   142,   143,   144,   145,   146,     0,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,   143,   144,   145,
-     146,     5,     6,     7,     8,     9,    10,    11,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,    12,    13,    14,
-      15,    77,   289,   290,    16,    17,    18,    19,    20,    21,
-      22,   774,   161,    24,    25,    77,    26,    27,     0,   775,
        0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
        0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
        0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-      30,     0,    31,    32,    33,    34,    35,    36,    37,    38,
-      39,    40,    41,    42,    43,    44,    45,    46,    47,    48,
-      49,    50,    51,    52,    53,    54,    55,    56,    57,    58,
-      59,    60,    61,    62,    63,    64,    65,    66,    67,    68,
-      69,    70,    71,    72,    73,    74,    75,   776,   777,     5,
-       6,     7,     8,     9,    10,    11,     0,     0,     0,     0,
-      77,     0,     0,     0,     0,    12,    13,    14,    15,     0,
-       0,     0,    16,    17,    18,    19,    20,    21,    22,     0,
-      23,    24,    25,     0,    26,    27,    28,    29,     0,     0,
        0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
        0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,    30,     0,
-      31,    32,    33,    34,    35,    36,    37,    38,    39,    40,
-      41,    42,    43,    44,    45,    46,    47,    48,    49,    50,
-      51,    52,    53,    54,    55,    56,    57,    58,    59,    60,
-      61,    62,    63,    64,    65,    66,    67,    68,    69,    70,
-      71,    72,    73,    74,    75,     0,    76,     5,     6,     7,
-       8,     9,    10,    11,     0,     0,     0,     0,    77,     0,
-       0,     0,     0,    12,    13,    14,   231,     0,     0,     0,
-      16,    17,    18,    19,    20,    21,    22,     0,   161,    24,
-      25,     0,    26,     0,     0,     0,     0,     0,     0,     0,
        0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
        0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,    30,     0,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,    42,
-      43,    44,    45,    46,    47,    48,    49,    50,    51,    52,
-      53,    54,     5,     6,     7,     8,     9,    10,    11,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,    12,    13,
-      14,    15,     0,     0,     0,    16,    17,    18,    19,    20,
-      21,    22,     0,   161,    24,     0,    77,    26,    27,     0,
        0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
        0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
        0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,    30,     0,    31,    32,    33,    34,    35,    36,    37,
-      38,    39,    40,    41,    42,    43,    44,    45,    46,    47,
-      48,    49,    50,    51,    52,    53,    54,     5,     6,     7,
-       8,     9,    10,    11,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,    12,    13,    14,   231,     0,     0,     0,
-      16,    17,    18,    19,    20,    21,    22,     0,   161,    24,
-       0,    77,    26,     0,     0,     0,     0,     0,     0,     0,
        0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,    16,    17,    18,    19,    20,    21,    22,   352,   161,
-       0,     0,     0,     0,   353,     0,    30,     0,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,    42,
-      43,    44,    45,    46,    47,    48,    49,    50,    51,    52,
-      53,    54,   127,   128,   129,   130,   131,   132,   133,    31,
-      32,    33,    34,    35,    36,    37,    38,    39,    40,    41,
-       0,     0,   562,   563,   564,     0,     0,     0,     0,   565,
-       0,     0,     0,     0,     0,     0,    77,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-     308,   309,   310,   311,   312,   313,   314,   737,   738,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,   289,   290,
-       0,    30,     0,     0,     0,     0,   127,   128,   129,   130,
-     131,   132,   133,     0,   134,   135,   136,   137,   138,   139,
-     140,   141,   142,   143,   144,   145,   146,     0,   308,   309,
-     310,   311,   312,   313,   314,   273,   274,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,   127,   128,   129,   130,   131,   132,
-     133,    77,   134,   135,   136,   137,   138,   139,   140,   141,
-     142,   143,   144,   145,   146,    30,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,    24,   251,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,   143,   144,   145,
-     146,     0,     0,   739,     0,     0,     0,     0,     0,    77,
-     134,   135,   136,   137,   138,   139,   140,   141,   142,   143,
-     144,   145,   146,     0,   905,     0,     0,   906,   127,   128,
-     129,   130,   131,   132,   133,    77,   134,   135,   136,   137,
-     138,   139,   140,   141,   142,   143,   144,   145,   146,     0,
-       0,     0,     0,     0,     0,     0,     0,    77,     0,     0,
-       0,     0,     0,   191,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,   127,   128,   129,   130,
-     131,   132,   133,    77,     0,     0,     0,     0,     0,     0,
-     127,   128,   129,   130,   131,   132,   133,    30,     0,     0,
-       0,     0,     0,   127,   128,   129,   130,   131,   132,   133,
-     134,   135,   136,   137,   138,   139,   140,   141,   142,   143,
-     144,   145,   146,   251,   308,   309,   310,   311,   312,   313,
-     314,     0,     0,     0,     0,    24,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,    30,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,    77,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,   143,   144,   145,
-     146,     0,   134,   135,   136,   137,   138,   139,   140,   141,
-     142,   143,   144,   145,   146,   134,   135,   136,   137,   138,
-     139,   140,   141,   142,   143,   144,   145,   146,     0,     0,
-       0,     0,     0,     0,     0,    77,   134,   135,   136,   137,
-     138,   139,   140,   141,   142,   143,   144,   145,   146,    77,
-      16,    17,    18,    19,    20,    21,    22,   352,   161,     0,
-       0,     0,    77,   353,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,    77,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,    31,    32,
-      33,    34,    35,    36,    37,    38,    39,    40,    41,   389,
-     390,     0,     0,     0,     0,   391,   392,   393,   394,   395,
-     396,   397,   398,   399,   400,   401,   402,   403,   404,   405,
-     406,   407,   408,   409,   410,   411,   412,   413,     0,     0,
-       0,     0,     0,     0,  -226,   154,   155,   156,   157,   158,
-     159,   160,     0,     0,     0,     0,     0,   289,   290,     0,
-       0,    12,    13,    14,   231,     0,     0,     0,    16,    17,
-      18,    19,    20,    21,    22,     0,   161,    24,    25,     0,
-      26,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-     414,     0,   415,   416,     0,   417,   418,   419,     0,   420,
-     421,   422,   423,   424,   425,   426,   427,   428,   429,     0,
-       0,     0,     0,     0,    30,     0,    31,    32,    33,    34,
-      35,    36,    37,    38,    39,    40,    41,   162,   163,   164,
-     165,   166,   167,   168,   169,   170,   171,   172,   173,   174,
-     154,   155,   156,   157,   158,   159,   160,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,    12,    13,    14,    15,
-       0,     0,     0,    16,    17,    18,    19,    20,    21,    22,
-       0,   161,    24,     0,     0,    26,    27,     0,     0,     0,
        0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
        0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,    30,
-       0,    31,    32,    33,    34,    35,    36,    37,    38,    39,
-      40,    41,   162,   163,   164,   165,   166,   167,   168,   169,
-     170,   171,   172,   173,   174,   154,   155,   156,   157,   158,
-     159,   160,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,    12,    13,    14,   231,     0,     0,     0,    16,    17,
-      18,    19,    20,    21,    22,     0,   161,    24,     0,     0,
-      26,     0,     0,     0,     0,     0,     0,     0,     0,     0,
        0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
        0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,    30,     0,    31,    32,    33,    34,
-      35,    36,    37,    38,    39,    40,    41,   162,   163,   164,
-     165,   166,   167,   168,   169,   170,   171,   172,   173,   174,
-     154,   155,   156,   157,   158,   159,   160,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,    12,    13,    14,    15,
-       0,     0,     0,    16,    17,    18,    19,    20,    21,    22,
-       0,   161,     0,     0,     0,    26,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,     0,     0,    30,
-       0,    31,    32,    33,    34,    35,    36,    37,    38,    39,
-      40,    41,   162,   163,   164,   165,   166,   167,   168,   169,
-     170,   171,   172,   173,   174,   154,   155,   156,   157,   158,
-     159,   160,     0,     0,     0,     0,     0,     0,     0,     0,
-       0,    12,    13,    14,   231,     0,     0,     0,    16,    17,
-      18,    19,    20,    21,    22,     0,   161,     0,     0,     0,
-      26,   154,   155,   156,   157,   158,   159,   160,     0,     0,
-       0,     0,     0,     0,     0,     0,     0,   618,     0,   619,
-     620,     0,     0,     0,    16,    17,    18,    19,    20,    21,
-      22,     0,   161,     0,    30,     0,    31,    32,    33,    34,
-      35,    36,    37,    38,    39,    40,    41,   162,   163,   164,
-     165,   166,   167,   168,   169,   170,   171,   172,   173,   174,
        0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
-      30,     0,    31,    32,    33,    34,    35,    36,    37,    38,
-      39,    40,    41,   162,   163,   164,   165,   166,   167,   168,
-     169,   170,   171,   172,   173,   174
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0
 };
 
-static const yytype_int16 yycheck[] =
-{
-       4,    27,    96,     4,    28,     4,    27,     4,    99,   100,
-     101,    15,   298,   115,     4,   120,   241,     4,    15,   286,
-       4,   284,   120,    72,   181,    29,   387,   688,    15,   109,
-     582,   448,    29,   728,   115,   115,     4,    27,   804,   591,
-     195,   669,   109,    27,   585,   195,   109,    72,   600,   803,
-       0,   305,    29,     4,   630,   286,   932,   121,   124,   229,
-     126,    29,    10,    11,   109,   669,   111,    26,    10,    11,
-      23,    24,    25,   827,    72,   951,    27,   126,    37,   836,
-     901,    72,    20,   181,    88,   906,   149,    72,   640,   115,
-       0,    88,   603,    20,   115,   120,   115,    12,    90,   180,
-      38,    88,   120,   370,   149,   109,    96,   111,   126,    36,
-     126,    38,    96,   126,   111,   776,   120,   130,    72,     3,
-       4,     5,     6,     7,     8,     9,   121,   284,    96,   124,
-     128,   115,   195,   110,   239,   112,   125,   128,   959,   370,
-     129,   120,    72,   128,   225,   149,   774,   115,   229,   153,
-     195,   196,   120,   104,   180,   120,   153,   238,   238,   180,
-      72,   180,   120,   126,   123,   741,   153,   248,   126,   122,
-     774,   937,   176,   150,   128,   330,   942,   934,   126,   176,
-     330,   286,   786,   175,   126,   177,   284,   191,   192,   176,
-     270,   195,   196,    72,   191,   192,   180,   614,   128,   196,
-     966,   120,   179,   270,   191,   192,   125,   270,   462,   463,
-     128,   381,   238,   383,   725,    26,   128,    28,   585,   238,
-     120,   121,   303,   286,   305,   270,    37,   231,   196,   248,
-     195,   807,   133,   134,   231,   239,   603,   121,   933,   303,
-     126,   305,   120,   247,   231,    72,   236,   533,   126,   128,
-     247,   120,   236,   120,   238,   126,   125,   833,   356,   126,
-     247,   837,   266,    72,   126,   370,   270,   330,   236,   266,
-     238,   239,   120,   265,   239,   267,    26,    27,   126,   266,
-     257,   258,   286,   440,    72,   330,   511,   126,   369,   369,
-     126,   120,   803,   120,   519,   126,   957,   126,   278,   126,
-     381,   128,   383,   460,   296,   530,    72,   370,   120,   121,
-     287,   120,   124,    72,   306,   307,   827,   126,   322,   128,
-     297,   286,   126,   120,   109,   322,   330,   120,   689,   126,
-     126,   120,   120,   126,   130,   120,   888,   126,   126,    72,
-     128,   321,   440,   369,   813,   886,   815,   120,   817,   329,
-     369,   375,    72,   126,   120,    72,   126,   361,   725,   126,
-     126,   120,   460,   874,   361,   330,   370,   126,   126,   128,
-     451,   375,   120,   121,   361,   126,   124,   120,   329,   890,
-     120,   462,   463,   126,   120,   369,   126,   120,    26,    27,
-     126,   932,   372,   126,   386,   128,   181,   126,   462,   463,
-     120,   369,   126,   120,   126,   370,   126,   126,   128,   126,
-     951,   128,   197,   908,   126,   126,   201,   202,   913,   204,
-     205,   206,   207,   208,   209,   210,   211,   212,   213,   214,
-     215,   216,   217,   120,   121,   126,   803,   124,   321,   120,
-     121,   321,   126,   124,   805,    13,    14,    15,   121,   944,
-     945,   455,   947,   120,   239,   126,   457,    72,   455,   128,
-     827,    23,    24,    25,   121,   455,   961,   121,   129,   446,
-       3,     4,     5,     6,     7,     8,     9,    12,   741,   123,
-     120,   124,   121,   130,   126,   270,   123,   121,    36,   372,
-     126,   126,   372,   321,   126,   124,   120,   125,   125,   284,
-     225,   286,   125,   125,   229,   125,   125,   874,   589,   125,
-     127,   125,   125,   125,   125,   882,   883,   884,   885,   886,
-     887,   125,   125,   890,   125,    19,    20,    21,   127,   796,
-     125,   120,    26,    27,    28,    29,    30,    31,    32,    72,
-      34,   120,   623,   127,   372,   130,   124,   332,   333,   334,
-     130,   127,   337,   557,   131,   532,   127,   130,   122,   120,
-     557,   625,   120,   120,   127,   932,   125,   127,    30,   125,
-     557,   356,   455,   125,   125,   639,   122,   127,   303,   583,
-     305,   125,   367,   123,   951,   370,   583,   123,   120,   120,
-     131,    18,   131,   544,    20,   862,   583,   125,   120,   120,
-     127,   125,   122,   580,   623,     3,     4,     5,     6,     7,
-       8,     9,   124,   127,   618,   619,   620,   125,   125,   125,
-     131,   618,   619,   620,     3,     4,     5,     6,     7,     8,
-       9,   618,   619,   620,   127,   127,   127,   127,   793,   127,
-     127,   127,   127,   127,   127,   622,   127,   127,   127,   125,
-     127,   120,    71,   131,   127,   440,   381,   123,   383,    13,
-     125,   641,   642,   643,    13,   669,   120,   671,   669,   673,
-     669,   675,   669,   120,   671,   460,   673,   127,   675,   669,
-     905,   127,   669,   127,   671,   669,   673,   127,   675,   127,
-       4,   796,   786,   795,   669,   646,   677,   788,   796,   650,
-     651,   669,   365,   654,   655,   656,   657,   658,   659,   660,
-     661,   662,   663,   673,   795,   795,   871,   858,   669,   565,
-     669,   459,   639,   460,   669,   732,   451,     4,   795,   786,
-     793,   774,   683,   809,   879,   917,   669,   462,   463,   722,
-     933,   963,   742,   607,   669,   884,   356,   355,   793,   719,
-     295,   816,   295,   745,   835,    -1,    -1,   862,   641,   642,
-     643,   641,   642,   643,   858,    -1,    -1,    -1,   859,   795,
-     774,   775,   796,   774,   795,    -1,   795,    -1,   775,    -1,
-     861,   861,   786,    -1,   774,   866,   866,   786,    -1,   793,
-     774,   795,   796,    -1,   861,    -1,   786,    -1,   775,   866,
-      -1,    -1,   786,    -1,    -1,   809,   757,   775,   871,   760,
-     891,   795,   809,   641,   642,   643,    -1,   794,   786,    -1,
-      -1,    -1,   809,   774,   804,    -1,   871,   795,   793,    -1,
-     911,   796,    -1,    -1,    -1,   786,    -1,   829,   862,   624,
-     866,    -1,   861,    -1,    -1,   630,    -1,   866,    -1,    -1,
-      -1,    -1,   856,    -1,    -1,    -1,    -1,   861,   862,   856,
-     585,    -1,   866,    -1,   589,    -1,    -1,   871,   858,   856,
-      -1,    -1,    -1,    -1,   858,    -1,    -1,   861,   603,    -1,
-      -1,    -1,   866,   860,   669,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,   677,    -1,    -1,    -1,    -1,   862,   866,    -1,
-      -1,    -1,    -1,    -1,    -1,   909,   871,    -1,    -1,   913,
-      -1,    -1,   909,    -1,    -1,    -1,   913,    -1,    -1,    -1,
-      -1,   804,   909,    -1,   804,    -1,   913,   911,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,   945,    -1,    -1,    -1,    -1,    -1,    -1,   945,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,   741,   937,   945,    -1,
-      -1,   941,   942,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,   804,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,   966,    -1,    -1,   774,
-      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-     725,   786,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-     795,   796,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,   807,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,     3,     4,     5,     6,     7,     8,
-       9,    -1,    -1,    -1,    -1,    -1,    -1,    -1,   833,    -1,
-      -1,    -1,   837,    -1,   937,    -1,    -1,   937,   941,   942,
-      -1,   941,   942,    -1,    -1,    -1,    -1,    -1,    -1,    38,
-      -1,    40,    -1,    -1,    -1,    -1,   861,   862,   803,    -1,
-      -1,   866,    -1,   966,    -1,    -1,   966,     3,     4,     5,
-       6,     7,     8,     9,    10,    11,    -1,    -1,    -1,    -1,
-      -1,    -1,   827,    72,    -1,    -1,    -1,    -1,    -1,   937,
-     835,    -1,    -1,   941,   942,    -1,    85,    86,    87,    88,
-      89,    90,    91,    92,    93,    94,    95,    96,    97,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,   966,    26,
-      27,    28,    29,    30,    31,    32,    33,    34,    -1,   874,
-      -1,    -1,    39,    -1,    -1,    -1,    -1,   882,   883,   884,
-     885,   886,   887,   132,    -1,   890,   891,    -1,    -1,    85,
-      86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,    -1,    -1,    -1,    -1,    -1,    74,    75,    76,
-      77,    78,    79,    80,    81,    82,    83,    84,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    -1,   932,    -1,    -1,
-     126,    -1,    -1,    -1,    -1,    -1,   132,   133,   134,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,   951,     3,     4,     5,
-       6,     7,     8,     9,    10,    11,    12,    13,    14,    15,
-      16,    17,    18,    19,    20,    21,    22,    23,    24,    25,
-      26,    -1,    -1,    29,    30,    -1,    32,    -1,    34,    35,
-      36,    37,    38,    39,    40,    41,    42,    43,    44,    45,
-      46,    47,    48,    49,    50,    51,    52,    53,    54,    55,
-      56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
-      66,    67,    68,    69,    70,    71,    72,    73,    74,    75,
-      76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
-      86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,   120,   121,   122,   123,   124,   125,
-     126,    -1,   128,   129,   130,    -1,   132,   133,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,     3,     4,     5,
-       6,     7,     8,     9,    10,    11,    12,    13,    14,    15,
-      16,    17,    18,    19,    20,    21,    22,    23,    24,    25,
-      26,    -1,    -1,    29,    30,    -1,    32,    -1,    34,    35,
-      36,    37,    38,    39,    40,    41,    42,    43,    44,    45,
-      46,    47,    48,    49,    50,    51,    52,    53,    54,    55,
-      56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
-      66,    67,    68,    69,    70,    71,    72,    73,    74,    75,
-      76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
-      86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,   120,   121,   122,   123,   124,   125,
-     126,    -1,   128,   129,   130,    -1,   132,   133,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,     3,     4,     5,
-       6,     7,     8,     9,    10,    11,    12,    13,    14,    15,
-      16,    17,    18,    19,    20,    21,    22,    23,    24,    25,
-      26,    -1,    -1,    29,    30,    -1,    32,    -1,    34,    35,
-      36,    37,    38,    39,    40,    41,    42,    43,    44,    45,
-      46,    47,    48,    49,    50,    51,    52,    53,    54,    55,
-      56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
-      66,    67,    68,    69,    70,    71,    72,    73,    74,    75,
-      76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
-      86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,   120,   121,   122,   123,   124,   125,
-     126,    -1,   128,   129,   130,    -1,   132,   133,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,     3,     4,     5,
-       6,     7,     8,     9,    10,    11,    12,    13,    14,    15,
-      16,    17,    18,    19,    20,    21,    22,    23,    24,    25,
-      26,    -1,    -1,    29,    30,    -1,    32,    -1,    34,    35,
-      36,    37,    38,    39,    40,    41,    42,    43,    44,    45,
-      46,    47,    48,    49,    50,    51,    52,    53,    54,    55,
-      56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
-      66,    67,    68,    69,    70,    71,    72,    73,    74,    75,
-      76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
-      86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,   120,   121,    -1,   123,   124,   125,
-     126,    -1,   128,   129,   130,   131,   132,   133,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,     3,     4,     5,
-       6,     7,     8,     9,    10,    11,    12,    13,    14,    15,
-      16,    17,    18,    19,    20,    21,    22,    23,    24,    25,
-      26,    -1,    -1,    29,    30,    -1,    32,    -1,    34,    35,
-      36,    37,    38,    39,    40,    41,    42,    43,    44,    45,
-      46,    47,    48,    49,    50,    51,    52,    53,    54,    55,
-      56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
-      66,    67,    68,    69,    70,    71,    72,    73,    74,    75,
-      76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
-      86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+/* YYMERGER[RULE-NUM] -- Index of merging function for rule #RULE-NUM.  */
+static const unsigned char yymerger[] =
+{
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0
+};
+
+/* YYDEFACT[S] -- default rule to reduce with in state S when YYTABLE
+   doesn't specify something else to do.  Zero means the default is an
+   error.  */
+static const unsigned short int yydefact[] =
+{
+       3,     0,     4,     1,   481,     0,   437,   438,   439,   433,
+     434,   435,   436,   441,   442,   440,   483,    52,    51,    53,
+     113,   396,   397,   388,   391,   392,   394,   395,   393,   387,
+     389,   217,     0,   346,   410,     0,     0,     0,   343,   454,
+     455,   456,   457,   458,   463,   464,   465,   466,   459,   460,
+     461,   462,   467,   468,   453,   443,   444,   445,   446,   447,
+     448,   449,   450,   451,   452,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,    23,   341,     5,    19,
+      20,    13,    11,    12,     9,    36,    17,   376,    43,   481,
+      10,    16,   376,     0,   481,    14,   134,     7,     6,     8,
+       0,    18,     0,     0,     0,     0,   205,     0,     0,    15,
+       0,   323,   481,     0,     0,     0,     0,   481,   409,   325,
+     342,     0,   481,   384,   385,   386,   177,   279,   401,   405,
+     408,   481,   481,   482,    21,   634,   115,   114,   390,     0,
+     437,   438,   439,   433,   434,   435,   436,   700,   701,   611,
+     607,   608,   606,   609,   610,   612,   613,   441,   442,   440,
+     670,   580,   579,   581,   599,   583,   585,   584,   586,   587,
+     588,   591,   592,   590,   589,   595,   598,   582,   600,   601,
+     593,   578,   577,   597,   596,   553,   554,   594,   604,   603,
+     602,   605,   555,   556,   557,   684,   558,   559,   560,   566,
+     567,   561,   562,   563,   564,   565,   568,   569,   570,   571,
+     572,   573,   574,   575,   576,   682,   681,   694,   453,   443,
+     444,   445,   446,   447,   448,   449,   450,   451,   452,   670,
+     688,   685,   689,   699,   162,   670,   549,   550,   544,   687,
+     543,   545,   546,   547,   548,   551,   552,   686,   693,   692,
+     683,   690,   691,   672,   678,   680,   679,   670,     0,     0,
+     437,   438,   439,   433,   434,   435,   436,   389,   376,   481,
+     376,   481,   481,     0,   481,   409,     0,   177,   369,   371,
+     370,   374,   375,   373,   372,   670,    33,   350,   348,   349,
+     353,   354,   352,   351,   357,   356,   355,     0,     0,   368,
+     358,   359,   360,   361,   362,   363,   364,   365,   366,   367,
+       0,   481,   324,     0,     0,   326,   327,     0,   488,   492,
+     494,     0,     0,   501,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,   279,     0,
+      50,   279,   109,   116,     0,     0,    27,    37,    24,   481,
+      26,    28,     0,    25,     0,   177,   249,   238,   670,   187,
+     237,   189,   190,   188,   208,   481,     0,   211,    22,   413,
+     339,   195,   193,   244,   330,     0,   326,   327,   328,    58,
+     329,    57,     0,   333,   331,   332,   334,   412,   335,   344,
+     376,   481,   376,   481,   135,   206,     0,   481,   403,   382,
+     287,   289,   178,     0,   275,   265,   177,   481,   481,   481,
+     400,   280,   469,   470,   479,   471,   376,   432,   431,   484,
+       3,   672,     0,     0,   657,   656,   167,   161,     0,     0,
+       0,   664,   666,   662,   347,   481,   390,   279,    50,   279,
+     116,   330,   376,   376,   150,   146,   142,     0,   145,     0,
+       0,     0,   153,     0,   151,     0,   155,   154,     0,     0,
+     350,   348,   349,   353,   354,   352,   351,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,   381,   380,
+       0,   275,   177,   481,   378,   379,    61,    39,    48,   406,
+     481,     0,     0,    58,     0,     0,   121,   105,   117,   112,
+     481,   481,     0,     0,     0,     0,     0,     0,   255,     0,
+       0,   249,   247,   336,   337,   338,   645,   279,    50,   279,
+     116,   196,   194,   383,   376,   477,   207,   212,   481,     0,
+     191,   219,   312,   481,     0,     0,   267,   272,   266,     0,
+       0,   303,     0,   177,   474,   473,   475,   472,   480,   402,
+     660,   639,   623,   668,   641,   628,   642,   637,   658,   638,
+     629,   633,   632,     0,   627,   630,   631,   636,   622,   640,
+     635,   624,   625,   626,     4,     0,   675,   677,     0,   671,
+     674,   676,   695,     0,   164,     0,     0,     0,   696,    30,
+     673,   698,   634,   634,   634,   411,     0,   142,   177,   406,
+       0,   481,   279,   279,     0,   312,   481,   326,   327,    32,
+       0,     0,     3,   158,   159,     0,   553,   554,     0,   538,
+     537,     0,   535,     0,   536,   216,   542,   157,   156,   486,
+       0,     0,     0,   496,   499,     0,     0,   506,   510,   514,
+     518,   508,   512,   516,   520,   522,   524,   526,   527,     0,
+      41,   274,   278,   377,    62,     0,    60,    38,    47,    56,
+     481,    58,     0,     0,   107,     0,   119,   122,     0,   111,
+     407,   481,   481,     0,   250,   251,     0,   670,   236,     0,
+     262,   406,     0,   245,   255,     0,     0,   406,     0,   481,
+     404,   398,   478,   288,   219,     0,   232,   284,   313,     0,
+     307,   197,   192,   271,   276,     0,   270,   285,   304,   477,
+     634,   647,   634,     0,    31,    29,   697,   165,   163,     0,
+       0,     0,   427,   426,   425,     0,   177,   279,   420,   424,
+     179,   180,   177,     0,     0,     0,     0,   137,   141,   144,
+     139,   111,     0,     0,   136,   279,   147,   307,    35,     4,
+       0,   541,     0,     0,   540,   539,   531,   532,     0,   489,
+     493,   495,     0,     0,   502,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,   529,    65,    66,    67,
+      44,   481,     0,   101,   102,   103,    99,    49,    92,    97,
+     177,    45,    54,   481,   110,   121,   123,   118,   104,     0,
+     325,     0,   177,     0,   481,   261,   256,   257,     0,   340,
+     219,     0,   652,   653,   654,   650,   651,   646,   649,   345,
+      42,    40,   108,   111,   399,   232,   214,   225,   223,   221,
+     229,   234,     0,   220,   227,   228,   218,   233,   318,   315,
+     316,     0,   242,   279,   621,   618,   619,   268,   614,   616,
+     617,   290,   476,     0,     0,     0,   485,   167,   428,   429,
+     430,   422,   305,   168,   481,   419,   376,   171,   174,   665,
+     667,   663,   138,   140,   143,   255,    34,   177,   533,   534,
+       0,     0,   497,     0,     0,   505,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,   530,     0,     0,    64,
+       0,   100,   481,    98,     0,    94,     0,    55,   120,     0,
+     672,     0,   127,   252,   253,   240,   209,   258,   177,   232,
+     481,   645,   106,   213,   255,     0,     0,   224,   230,   231,
+     226,   321,   317,   310,   311,   309,   308,   255,   277,     0,
+     615,   291,   286,   294,     0,   644,   669,   643,   648,   659,
+     166,   376,   290,   306,   181,   177,   421,   181,   177,     0,
+       0,   487,   490,     0,   500,   503,   507,   511,   515,   519,
+     509,   513,   517,   521,     0,     0,   528,     0,   389,     0,
+       0,    83,    79,    70,    76,    63,    78,    72,    71,    75,
+      73,    68,    69,     0,    77,     0,   202,   203,    74,     0,
+     323,     0,     0,   177,    80,   177,     0,   177,    46,   124,
+     126,   125,   239,   219,   260,   262,   263,   246,   248,     0,
+       0,   222,     0,   415,   235,   279,     0,     0,     0,   620,
+     255,   661,   423,   281,   183,   169,   182,   301,     0,   177,
+     172,   175,   148,   160,     0,   672,     0,     0,     0,    90,
+     481,    88,     0,     0,     0,     0,   177,    81,    84,    86,
+      87,     0,    85,     0,   198,    82,   481,   204,     0,     0,
+      95,    93,    96,     0,   232,   259,   265,   655,   481,   417,
+     376,   414,   481,   322,   481,     0,     0,   282,   302,   181,
+     295,   491,     0,   504,   523,   525,     0,   481,    89,     0,
+      91,   481,     0,     0,     0,   481,   201,     0,   210,   264,
+     215,   376,   416,   319,   243,   481,   184,   185,   290,   176,
+     149,   498,   670,   672,   406,   130,     0,   481,     0,   199,
+       0,   670,   418,   292,   186,   283,   297,   296,     0,   300,
+       0,     0,     0,    59,     0,   406,   131,   200,     0,   295,
+     298,   299,   672,   133,   128,    59,     0,   241,   293,     0,
+     129,   132
+};
+
+/* YYPDEFGOTO[NTERM-NUM].  */
+static const short int yydefgoto[] =
+{
+      -1,     1,     2,     4,    88,   356,    90,    91,    92,   461,
+      93,    94,    95,   358,    97,   349,    98,   926,   675,   375,
+     509,   510,   678,   674,   801,   802,  1005,  1079,  1007,   807,
+     808,   924,   920,   809,   100,   101,   102,   516,   103,   359,
+     519,   688,   685,   686,   929,   360,   931,  1071,  1145,   105,
+     106,   616,   624,   617,   454,   455,   895,  1110,   456,   107,
+     320,   108,   361,   770,   362,   436,   603,   877,   604,   605,
+     974,   606,   977,   607,   978,  1109,   882,   752,  1055,  1056,
+    1106,  1136,   363,   112,   113,   114,  1082,  1015,  1016,   116,
+     528,  1033,   117,   546,   714,   547,   944,   548,   118,   550,
+     716,   853,   945,   854,   855,   856,   857,   946,   369,   370,
+    1032,   551,   957,  1017,   531,   830,   383,   704,   526,   694,
+     695,   699,   700,   826,  1035,   827,   828,  1096,   557,   558,
+     725,   559,   119,   414,   500,   555,   863,   501,   502,   883,
+    1138,   415,   871,   416,   545,   962,  1050,  1169,  1140,  1058,
+     562,   972,   552,   956,   717,   963,   719,   859,   860,   951,
+    1046,  1047,   810,   121,   282,   283,   530,   124,   125,   126,
+     284,   536,   285,   268,   129,   130,   348,   503,   376,   132,
+     133,   134,   135,   712,  1076,   137,   426,   544,   138,   139,
+     269,  1044,  1045,  1100,  1131,   746,   747,   886,   971,   748,
+     140,   141,   142,   421,   422,   423,   424,   729,   713,   425,
+     690,   143,   145,   583,   144,   778,   478,   901,  1064,   479,
+     480,   782,   983,   783,   483,   904,  1066,   786,   790,   787,
+     791,   788,   792,   789,   793,   794,   795,   917,   645,   584,
+     585,   586,   867,   868,   959,   587,   588,   429,   589,   590,
+     968,   705,   874,   836,   837,   870,   941,   437,   591,   732,
+     730,   592,   614,   612,   613,   593,   731,   432,   439,   599,
+     600,   601,   264,   265,   266,   267
+};
+
+/* YYPACT[STATE-NUM] -- Index in YYTABLE of the portion describing
+   STATE-NUM.  */
+#define YYPACT_NINF -1028
+static const short int yypact[] =
+{
+   -1028,   108,   115, -1028, -1028,  6791,   217,   221,   231,   247,
+     250,   288,   291,   -71,   -29,   -12, -1028, -1028, -1028, -1028,
+     285, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,
+     129, -1028,  4328, -1028, -1028,  8663,   173,  7489, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028,   148,   150,   192,   246,   269,   273,
+     292,   300,   304,   329,   342,   -51,   -10,    20,   128,   135,
+     143,   185,   209,   214,   218,   222,   230,   254,   284,   289,
+     299,   306,   314,   326,   354,   358, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028,   113, -1028, -1028, -1028, -1028, -1028, -1028,
+    8336, -1028,   164,   164,   164,   164, -1028,   375,  8663, -1028,
+     123, -1028,   341,  1951,  9036,   437,  7756,   166,   196, -1028,
+       7,  8445, -1028, -1028, -1028, -1028,   256,   140, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,   383,  4790,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028,    15, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,    55,  7756,
+      -7,    -3,     4,    28,   152,   162,   179,   511, -1028, -1028,
+   -1028, -1028, -1028,  7778,   437,   437,  8663,   256, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028,   395,    -7,    -3,     4,
+      28,   152,   162,   179, -1028, -1028, -1028,  7756,  7756, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,
+     400,   406, -1028,  1951,  7756,   437,   437,  6747, -1028, -1028,
+   -1028,  6747,  6747, -1028,  6747,  6747,  6747,  6747,  6747,  6747,
+    6747,  6747,  6747,  6747,  6747,  6747,  6747,  6747,  8191,   407,
+    8001,  8191, -1028,  7424,   403,  7756, -1028, -1028, -1028, -1028,
+   -1028, -1028,  8336, -1028,  8554,   463,   409, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028,  8663, -1028, -1028,   525,
+   -1028, -1028, -1028, -1028,   410,   437,   437,   437, -1028, -1028,
+   -1028, -1028,     7, -1028, -1028, -1028, -1028,   525, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028,  1951, -1028, -1028,   525,
+   -1028, -1028, -1028,  7826, -1028,   158,    74, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028,   366, -1028,   525,   525,  5714,
+   -1028, -1028,  2172,  2326, -1028, -1028,   228, -1028,  2480,  3558,
+    2634, -1028, -1028, -1028, -1028, -1028, -1028,  8219,  8110,  8219,
+    7598, -1028, -1028, -1028, -1028, -1028, -1028,  7890, -1028,  2788,
+     412,   416, -1028,   428, -1028,    57, -1028, -1028,  6638,  1951,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028,   425,  6747,  6747,
+    6747,   426,   427,  6747,   429,   432,   433,   436,   438,   442,
+     443,   445,   447,   448,   449,   450,   452,   451, -1028, -1028,
+     460, -1028,   256, -1028, -1028, -1028, -1028, -1028, -1028,    54,
+   -1028,  9011,   649,   437,   437,   464,  6747, -1028, -1028, -1028,
+     206, -1028,  7644,  8554,  7826,  7756,   444,  2942,   461,  8990,
+     676,   409, -1028, -1028, -1028, -1028, -1028,  8191,  8110,  8191,
+    7598, -1028, -1028,   525, -1028,   499, -1028, -1028, -1028,  1610,
+   -1028, -1028,   458, -1028,  1951,   122, -1028, -1028, -1028,   468,
+    7890, -1028,   467,   256,   525,   525,   525, -1028, -1028,  1417,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028,   466, -1028, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028,   481,  3712, -1028, -1028,   474, -1028,
+   -1028, -1028, -1028,   181, -1028,  8772,    77,   576, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028,   525,   488, -1028,   256,    64,
+     489,   189,  8219,  8219,   183,   239, -1028, -1028, -1028, -1028,
+     490,   437, -1028, -1028, -1028,   623,   484,   485,    27, -1028,
+   -1028,   495, -1028,   486, -1028, -1028, -1028, -1028, -1028, -1028,
+     492,   496,   497, -1028, -1028,   501,  8663, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,  7756,
+   -1028,   502, -1028,   525,    67,  7378, -1028, -1028,   504,   525,
+   -1028,   437,   437,  9011, -1028,   335, -1028,   506,  8663,   510,
+     525, -1028, -1028,  1951,   508, -1028,    65, -1028, -1028,   518,
+     562, -1028,   437, -1028,   461,  5868,   515,    84,   526,   206,
+    1417, -1028,   499, -1028, -1028,    70,   117, -1028, -1028,   519,
+     109, -1028, -1028, -1028, -1028,  6176, -1028, -1028, -1028,   499,
+   -1028, -1028, -1028,   521, -1028, -1028, -1028, -1028, -1028,  7756,
+    7756,  7756, -1028,   437,   437,  8663,   256,   140, -1028, -1028,
+   -1028, -1028,   256,   638,  4944,  5098,  5252, -1028,   527, -1028,
+   -1028, -1028,   533,   534, -1028,   140, -1028,    78, -1028,   535,
+    8663, -1028,   528,   529, -1028, -1028, -1028, -1028,  8663, -1028,
+   -1028, -1028,  8663,  8663, -1028,   546,  8663,  8663,  8663,  8663,
+    8663,  8663,  8663,  8663,  8663,  8663,   530, -1028, -1028, -1028,
+   -1028, -1028,   536, -1028, -1028, -1028,   382,   537, -1028,   653,
+     463, -1028,   525, -1028, -1028,  6747, -1028, -1028, -1028,    83,
+     324,  7756,   463,  3096, -1028, -1028,   551, -1028,  8663, -1028,
+   -1028,   552, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028,   560, -1028,   117, -1028, -1028, -1028, -1028,
+   -1028, -1028,    89, -1028,    55, -1028, -1028, -1028, -1028,   519,
+   -1028,   514, -1028,   140, -1028,  6022, -1028,  6176, -1028, -1028,
+   -1028,   345, -1028,  5406,  4482,  5560, -1028,   228, -1028, -1028,
+   -1028, -1028,  7890, -1028, -1028, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028,   461, -1028,   256, -1028, -1028,
+     554,  8663, -1028,   555,  8663, -1028,   557,   559,   561,   563,
+     564,   565,   566,   567,   570,   571, -1028,   572,  1787, -1028,
+    7756, -1028, -1028, -1028,  7756, -1028,  7378,   525, -1028,  6176,
+   -1028,   569, -1028, -1028, -1028, -1028,   525,   632,   256,   117,
+   -1028, -1028, -1028, -1028,   461,    55,  8881, -1028, -1028, -1028,
+   -1028,   574, -1028, -1028, -1028, -1028, -1028,   461, -1028,  6484,
+   -1028, -1028, -1028, -1028,   575, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028,   345, -1028,   584,    50,  1417,   584,   256,   581,
+     589, -1028, -1028,   585, -1028, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028,   713,   714, -1028,  7038,   226,  7709,
+      65, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028,  6923, -1028,   164, -1028, -1028, -1028,   595,
+     410,  1951,  7153,   256, -1028,   463,  1981,   463,   537,  6176,
+    4636, -1028,   684, -1028, -1028, -1028, -1028, -1028,   525,  5868,
+     590, -1028,  8881, -1028, -1028,   140,   588,  6176,   592, -1028,
+     461, -1028,  1417, -1028, -1028, -1028, -1028, -1028,   598,   256,
+   -1028, -1028, -1028, -1028,   600, -1028,   602,   604,   606, -1028,
+   -1028, -1028,   805,   164,   410,  7268,   463, -1028, -1028, -1028,
+   -1028,  6923, -1028,  7268, -1028, -1028, -1028, -1028,  1951,  7826,
+   -1028, -1028, -1028,    65,   117, -1028,   158, -1028, -1028, -1028,
+   -1028, -1028, -1028,  6176, -1028,   607,  6330, -1028, -1028,   584,
+   -1028, -1028,  3866, -1028, -1028, -1028,  7936, -1028, -1028,   805,
+   -1028, -1028,  7826,  7268,    90, -1028, -1028,   614, -1028, -1028,
+     525, -1028,  1417,   525,   525, -1028,  6330, -1028,   345, -1028,
+     434, -1028, -1028, -1028,    95, -1028,  7936, -1028,  8072, -1028,
+      93, -1028,  1417,   525, -1028, -1028, -1028, -1028,    65,    65,
+    3250,  4020,   373,    56,  8072,   107, -1028, -1028,  3404, -1028,
+   -1028, -1028, -1028, -1028, -1028,    59,   373, -1028,   434,  4174,
+   -1028, -1028
+};
+
+/* YYPGOTO[NTERM-NUM].  */
+static const short int yypgoto[] =
+{
+   -1028, -1028,  -401, -1028, -1028,   745,  -167, -1028, -1028, -1028,
+   -1028,  -847,   -99,    -2,   -31, -1028, -1028, -1028, -1028,    21,
+    -404,  -107,  -828, -1028, -1028, -1028, -1028,  -166, -1028,  -173,
+    -272, -1028, -1028,   -49,  -163,  -159,   -27, -1028, -1028,    11,
+    -483, -1028, -1028,   -54, -1028, -1028, -1028,  -309,  -771,  -156,
+    -119,  -409,   147,     0, -1028, -1028, -1028, -1028,   149,  -152,
+   -1028, -1028,    18, -1028,    25, -1028, -1028, -1028,  -109, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028,   405, -1028,  -923, -1028,
+   -1028, -1028,   766, -1028, -1028, -1028,  -145,  -218,    26,  -121,
+   -1028, -1028,  -268,  -502, -1028, -1028, -1028,  -310,  -296,  -526,
+    -690, -1028, -1028, -1028, -1028,  -802, -1028, -1028,   -80, -1028,
+   -1028, -1028, -1028,  -117, -1028, -1028, -1028, -1028,   249, -1028,
+     -40,  -684, -1028, -1028, -1028,  -253, -1028, -1028,  -313, -1028,
+   -1028, -1028,  -131,   287,  -302,   293, -1028,   -73,  -136,  -713,
+   -1028,  -251, -1028,  -699, -1028,  -930, -1028, -1028,  -374, -1028,
+   -1028, -1028,  -432, -1028, -1028,  -478, -1028, -1028,   -63, -1028,
+   -1028, -1028,  1119,  1082,  1110,    14, -1028, -1028,   -32,   700,
+      -5, -1028,    45, -1028,   930,   -21,   397, -1028,    94,   998,
+   -1028, -1028,  -497, -1028,   943,   110, -1028, -1028,  -124,  -876,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,
+     195,    51,   440,  -402,   376, -1028,   377, -1028,    75, -1028,
+     493, -1028, -1028, -1028,  -115, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,
+   -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028, -1028,     9,
+     121,   174,  -835,  -822, -1028,  -633,  -151,  -458, -1028,   -65,
+   -1028,  -130, -1028, -1027, -1028,  -683, -1028,  -582, -1028, -1028,
+   -1028,  -259, -1028, -1028, -1028,   251, -1028,  -181,  -426, -1028,
+    -421,    36,    -8, -1028,  -658, -1028
+};
+
+/* YYTABLE[YYPACT[STATE-NUM]].  What to do in state STATE-NUM.  If
+   positive, shift that token.  If negative, reduce the rule which
+   number is the opposite.  If zero, do what YYDEFACT says.
+   If YYTABLE_NINF, syntax error.  */
+#define YYTABLE_NINF -655
+static const short int yytable[] =
+{
+     127,   420,   381,    96,   278,   595,   382,   408,   280,   444,
+     405,   357,   404,   467,   563,   296,   104,   388,   610,   123,
+     831,   738,   838,   109,   845,   625,    99,   468,   722,   594,
+     110,   115,   325,   371,   372,   373,   458,   689,   822,   887,
+     620,   260,  1053,   943,   619,   960,   281,   884,   711,   515,
+     128,   323,   721,   884,  1060,   466,   279,   676,   433,   676,
+     410,   411,   676,  -357,   438,   441,   442,   676,   263,  -357,
+    1043,  1003,   835,    16,   718,   157,   158,  -369,   434,  1137,
+      16,  -371,   326,   258,   410,   411,   440,   676,  -370,   327,
+     774,   385,   869,    16,  1029,   797,   798,   799,   676,   131,
+     400,    16,   750,   392,   402,  -356,    16,   947,     3,  1154,
+     676,  -356,  -374,    16,   459,    -2,    16,   549,   386,   389,
+     847,   325,  -355,   839,   123,   701,   751,   417,  -355,   286,
+     328,    99,    16,   412,   707,  -369,   115,  1037,   761,  -371,
+     939,   148,   403,   948,   949,   406,  -370,   718,   848,   399,
+    1069,   417,   401,   261,   754,   755,   756,   412,   260,   648,
+     329,   849,   408,   435,   850,   561,  1099,   722,   387,   390,
+    -374,   326,   149,   468,   610,   775,   288,   289,   290,   291,
+     292,   293,   294,   851,   980,   431,  1139,   527,   677,   -59,
+     677,   721,   -59,   841,   634,   443,   418,   419,   884,   -59,
+     258,   647,   -59,   800,   364,   243,   262,   960,  1155,   366,
+     846,   979,  1103,   762,   763,   844,   861,   417,   841,   -59,
+     418,   419,   -59,   862,  1149,  1036,   843,  1167,   549,   677,
+     -59,   769,   711,   -59,   869,   706,  -375,   708,   446,   884,
+     408,   841,   -59,   405,   434,   -59,  -373,   861,   352,   862,
+      38,   385,   408,  -170,   852,   452,   723,   378,   549,   453,
+    1040,   724,  1057,  -372,   325,  1061,   410,   411,   330,   149,
+     261,  -173,   873,  1048,   875,   331,   884,  -170,   386,   884,
+      38,   960,  -368,   332,  -358,   541,   418,   419,  -368,   542,
+    -358,   385,  1128,  -269,  -375,   556,   869,   281,   367,   368,
+    -326,  -369,   325,   325,  -373,  -371,  -326,   279,   295,   146,
+     147,   618,   728,   618,   326,  -370,  1162,   764,   386,   325,
+     737,  -372,   765,   262,  -116,   333,  -359,   518,   387,   435,
+    -327,  -374,  -359,   400,  -375,  1162,  -327,  1176,   469,   412,
+    -116,  -116,  1077,  1094,   518,   513,  1108,  1176,   325,   334,
+     325,  -350,   326,   326,   335,  -348,   838,  -350,   336,  -369,
+     884,  -348,   337,  -371,   511,  -349,  1105,   458,   387,   326,
+     338,  -349,  -373,  -370,   385,  -372,   123,  1166,   406,   766,
+    -360,  -353,  -314,   279,  -354,   401,  -360,  -353,   115,  -374,
+    -354,  1174,  -375,  1166,   339,   514,   869,   549,   326,   408,
+     326,   386,   405,  -361,   680,  1180,   835,  -362,   325,  -361,
+     803,   804,   805,  -362,   869,   288,   289,   290,   291,   292,
+     293,   294,  -352,   838,   340,  -351,  -363,   554,  -352,   341,
+    -373,  -351,  -363,  -372,  -364,  1127,   392,   385,  -365,   342,
+    -364,   260,   260,   513,  -365,   325,   343,   260,   260,   260,
+     973,   387,   627,   838,   344,   631,   523,  1097,   326,   239,
+      33,   930,   511,  -366,   386,  1156,   345,  1157,   260,  -366,
+     869,   814,   259,   835,   815,  -324,  -367,   646,  1158,   385,
+     581,  1159,  -367,   258,   258,   961,   618,   618,  -314,   258,
+     258,   258,   400,   514,   346,   326,    38,     5,   347,   351,
+    1170,  1171,   628,   835,  1030,   514,   681,   631,  1142,   392,
+     258,  1143,   418,   419,   387,   374,   823,   325,   430,   325,
+     325,    38,   385,   446,   513,   702,   683,   953,   954,   955,
+      21,    22,   460,   513,   464,   325,   260,   406,   693,   392,
+    -152,   413,   506,   511,   279,   244,   412,   525,    16,   386,
+     532,   632,   511,   261,   261,   627,   682,   514,   646,   261,
+     261,   261,   633,  1126,   649,   653,   654,   326,   656,   326,
+     326,   657,   658,  1059,   514,   659,   813,   660,   258,   697,
+     261,   661,   662,   514,   663,   326,   664,   665,   666,   259,
+     669,   667,   350,   668,   670,   950,  1126,   353,   684,   387,
+     743,  -314,  -254,   573,   260,   628,   262,   262,   727,   610,
+     733,   885,   262,   262,   262,   379,   741,   734,   736,   354,
+     397,   881,   757,   760,   768,   409,   771,   772,   773,   618,
+     777,   779,   753,   262,   427,   428,   776,   780,   781,  1112,
+     784,   724,   811,   816,   818,   825,   258,   821,   261,   840,
+     744,   385,   288,   289,   290,   291,   292,   293,   294,   824,
+     842,   385,   858,   888,   325,   876,   765,   892,   893,   916,
+     325,   896,   898,   899,   919,   447,   922,   449,   681,   288,
+     289,   290,   291,   292,   293,   294,  1041,   905,   386,   923,
+     937,   610,   457,   940,   942,   981,   984,    33,   986,   745,
+     987,   262,   988,  1031,   989,   990,   991,   992,   993,   994,
+     995,  1086,  1144,   996,   326,  1034,   261,  1161,  -320,  1051,
+     326,  1054,  1062,  1063,  1065,   549,  1067,  1068,   682,  1085,
+    1093,  1098,  1102,  1104,   325,   325,   325,   324,   387,  1107,
+     610,  1111,  1163,  1113,  1165,  1114,  1179,  1115,  1135,  1151,
+      89,  1002,  1006,  1028,  1091,  1008,   581,   921,   610,  1009,
+    1175,   928,  1010,  1118,   758,   894,  1011,   759,   970,   262,
+     524,   111,   448,  1014,   450,   379,   581,   397,  1125,  1073,
+     703,   933,  1095,  1129,   326,   326,   326,  1018,   505,   672,
+     958,   505,   549,   549,   671,  1178,   952,   537,   817,   539,
+     749,   567,   568,  1024,   872,   581,   581,   581,  1049,   967,
+     726,  1039,   932,     0,   465,     0,   325,     0,     0,     0,
+       0,   560,     0,   569,     0,   393,   549,   396,   398,    17,
+      18,    19,   260,     0,     0,     0,    21,    22,    23,    24,
+      25,    26,    27,    28,    29,     0,   277,     0,     0,   622,
+     623,     0,   522,     0,     0,     0,     0,     0,     0,     0,
+       0,   392,     0,     0,     0,     0,   326,     0,   529,   582,
+       0,     0,   259,   259,   258,     0,     0,   627,   259,   259,
+     259,     0,     0,     0,     0,     0,     0,   505,     0,   505,
+       0,     0,     0,     0,   538,     0,   540,     0,   408,   259,
+     543,   405,     0,   404,     0,  1087,     0,   560,     0,  1101,
+     564,   565,   566,   127,   357,   325,  1004,     0,   581,   325,
+       0,   325,     0,     0,   581,   581,   581,   628,     0,   104,
+       0,     0,  1021,     0,     0,  1084,  1012,     0,   615,    99,
+       0,   710,     0,  1013,   261,     0,     0,     0,   136,     0,
+       0,   408,     0,     0,   405,     0,     0,     0,  1087,   408,
+       0,  1160,   405,   128,     0,   326,  1087,   259,   560,   326,
+    1168,   326,     0,   357,     0,     0,     0,   505,   287,   505,
+     581,     0,     0,     0,   396,   398,     0,     0,     0,   385,
+       0,   400,   127,  1120,   325,   402,   673,   262,     0,   408,
+       0,     0,   405,   679,     0,     0,  1087,     0,   127,   505,
+     581,  1078,  1022,   323,   465,     0,   386,   127,  1070,     0,
+       0,   325,  1072,   457,     0,   396,   398,  1021,     0,     0,
+       0,  1080,     0,   403,    99,   259,  1088,     0,  1081,   260,
+    1042,   715,   128,   401,   326,     0,   720,     0,     0,     0,
+     512,     0,   400,   365,   391,     0,   385,     0,   128,     0,
+       0,   377,   505,   505,     0,     0,   387,   128,   392,     0,
+     127,   326,     0,     0,     0,     0,   127,     0,   127,  1078,
+     581,   258,     0,   386,   627,   533,   534,   535,     0,     0,
+     581,  1075,   400,  1117,     0,  1021,  1121,  1088,   581,  1080,
+       0,   392,   279,     0,   401,     0,  1081,  1083,     0,     0,
+       0,   513,     0,     0,     0,   122,     0,   627,   127,   767,
+     128,   260,     0,     0,   120,     0,   128,     0,   128,   407,
+     511,     0,     0,   387,   628,     0,     0,  1088,     0,     0,
+    1147,   513,     0,   513,   279,   582,     0,   322,   512,     0,
+     505,   261,     0,     0,   581,     0,   321,   581,     0,   513,
+     511,   514,   511,   258,     0,   582,  1119,   628,   128,   260,
+     260,     0,     0,   812,     0,  1123,     0,   260,   511,     0,
+       0,     0,     0,     0,     0,   819,     0,   581,   260,     0,
+       0,   514,     0,   514,   582,   582,   582,     0,     0,     0,
+       0,     0,     0,     0,   262,   384,     0,     0,   394,   514,
+       0,   258,   258,   396,   398,   925,     0,     0,     0,   258,
+       0,     0,     0,     0,     0,     0,     0,   934,     0,   512,
+     258,     0,     0,   261,     0,     0,   395,     0,   512,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,   477,     0,     0,
+       0,   481,   482,   259,   484,   485,   486,   487,   488,   489,
+     490,   491,   492,   493,   494,   495,   496,   497,     0,     0,
+     391,   261,   261,   976,   407,     0,   262,     0,     0,   261,
+       0,     0,     0,     0,   918,     0,     0,     0,     0,     0,
+     261,     0,     0,     0,     0,   365,   927,   582,     0,     0,
+       0,     0,     0,   582,   582,   582,     0,   936,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,   396,     0,     0,   262,   262,     0,     0,     0,     0,
+       0,     0,   262,     0,     0,     0,   504,     0,     0,   504,
+       0,     0,     0,   262,     0,     0,     0,     0,     0,     0,
+       0,     0,   407,     0,     0,   451,     0,     0,  1052,   582,
+       0,     0,     0,     0,   407,     0,     0,   975,   391,   322,
+       0,   534,   535,     0,     0,     0,     0,     0,   445,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,   582,
+       0,     0,   829,     0,     0,   451,   394,     0,   650,   651,
+     652,   377,     0,   655,     0,  1026,   505,   322,   322,     0,
+       0,     0,     0,     0,     0,     0,   462,   463,  1089,     0,
+    1090,     0,  1092,  1038,   395,     0,     0,     0,     0,     0,
+       0,   391,     0,   396,   398,   504,   687,   504,    21,    22,
+      23,    24,    25,    26,    27,    28,    29,   498,   277,   391,
+       0,     0,     0,   322,     0,   322,     0,   499,   391,   582,
+     259,     0,   520,     0,   521,     0,     0,     0,     0,   582,
+       0,  1122,     0,     0,     0,     0,     0,   582,   384,     0,
+       0,     0,   505,     0,     0,   553,     0,  1132,     0,     0,
+       0,     0,     0,    39,    40,    41,    42,    43,    44,    45,
+      46,    47,    48,    49,    50,    51,    52,    53,     0,     0,
+       0,   407,     0,     0,     0,     0,     0,     0,  1152,     0,
+       0,     0,     0,     0,     0,   504,     0,   504,     0,   626,
+       0,     0,     0,   582,     0,     0,   582,     0,     0,     0,
+       0,   451,   259,     0,     0,     0,     0,     0,     0,     0,
+     322,     0,     0,  1116,     0,     0,     0,   504,     0,   621,
+     630,     0,   505,     0,     0,     0,   582,     0,     0,  1124,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,  1130,   505,     0,     0,  1133,     0,  1134,     0,   785,
+     259,   259,     0,     0,     0,     0,   692,     0,   259,     0,
+    1146,     0,     0,   391,  1148,     0,     0,     0,  1150,   259,
+     504,   504,   635,   820,     0,     0,     0,     0,  1153,     0,
+       0,     0,   322,     0,     0,   322,   692,     0,     0,     0,
+    1164,   691,   626,     0,   696,     0,     0,     0,     0,     0,
+     322,     0,     0,     0,     0,     0,     0,     0,     0,   709,
+       0,     0,     0,   636,   637,     0,     0,     0,     0,     0,
+     202,   203,   204,   638,   206,   207,   208,   209,   210,   211,
+     212,   213,   214,   215,   216,   217,   218,   219,   220,   221,
+     222,   223,   224,     0,     0,     0,     0,     0,     0,   324,
+       0,     0,     0,     0,     0,     0,     0,     0,   504,     0,
+       0,     0,     0,   897,     0,   742,     0,     0,     0,     0,
+       0,   900,     0,     0,     0,   902,   903,     0,     0,   906,
+     907,   908,   909,   910,   911,   912,   913,   914,   915,     0,
+       0,     0,     0,   407,     0,   687,     0,   639,     0,   640,
+     641,     0,   642,   643,     0,   246,   247,   248,   644,   250,
+     251,   252,   253,   254,   255,   256,     0,     0,     0,     0,
+       0,   938,     0,     0,     0,   692,     0,     0,     0,   322,
+       0,     0,     0,     0,     0,   322,     0,     0,   796,     0,
+       6,     7,     8,     9,    10,    11,    12,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,    13,    14,    15,
+      16,    17,    18,    19,    20,     0,   512,     0,    21,    22,
+      23,    24,    25,    26,    27,    28,    29,   997,   998,    31,
+      32,     0,     0,     0,     0,    33,    34,    35,     0,   999,
+       0,     0,     0,     0,   982,     0,   512,   985,   512,   322,
+     322,   322,     0,     0,     0,     0,     0,     0,   878,   879,
+     880,  1023,     0,     0,   512,     0,     0,     0,     0,     0,
+       0,    38,     0,    39,    40,    41,    42,    43,    44,    45,
+      46,    47,    48,    49,    50,    51,    52,    53,    54,    55,
+      56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
+      66,    67,    68,    69,    70,    71,    72,    73,    74,    75,
+      76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
+    1000,  1001,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,   322,    87,     0,     0,     0,     0,     0,     0,     0,
+     696,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,   297,   298,   299,   300,   301,   302,
+     303,     0,     0,     0,   626,     0,     0,     0,     0,     0,
+       0,   304,   305,   306,   504,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,   297,   298,   299,   300,   301,   302,
+     303,     0,     0,    31,   380,     0,     0,     0,     0,    33,
+    1020,   304,   305,   306,    16,     0,     0,     0,     0,   803,
+     804,   805,     0,     0,     0,     0,     0,   806,     0,     0,
+     407,     0,     0,     0,     0,     0,     0,     0,   122,    33,
+     322,     0,     0,     0,   322,     0,   322,  1019,     0,  1025,
+     407,     0,     0,  1027,     0,     0,   391,     0,     0,     0,
+     504,     0,   309,   310,   311,   312,   313,   314,   315,   316,
+     317,   318,   319,     0,     0,    38,     0,     0,     0,     0,
+       0,     0,     0,   407,     0,     0,   391,     0,   391,  1074,
+       0,   407,   309,   310,   311,   312,   313,   314,   315,   316,
+     317,   318,   319,     0,   391,  1074,    87,     0,     0,     0,
+       0,     0,     0,   451,  1074,     0,     0,     0,     0,   322,
+       0,     0,     0,     0,     0,     0,     0,   407,   321,     0,
+       0,   407,     0,     0,     0,     0,    87,     0,     0,     0,
+     504,     0,     0,     0,     0,     0,   322,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+     504,     0,     0,     0,     0,     0,     0,  1074,     0,     0,
+       0,     0,     0,  1074,     0,  1074,     0,     0,     0,     0,
+     451,   553,     0,     0,     0,   150,   151,   152,   153,   154,
+     155,   156,   157,   158,   159,   160,   161,   162,   163,   164,
+     165,   166,   167,   168,   169,   170,   171,   172,   173,   174,
+     175,   176,   177,   178,   692,  1074,   179,   180,   181,   182,
+     183,   184,     0,   185,   186,   187,   188,     0,     0,   189,
+     190,   191,   192,   193,   194,   195,   196,   197,   198,   199,
+     200,   201,   202,   203,   204,   205,   206,   207,   208,   209,
+     210,   211,   212,   213,   214,   215,   216,   217,   218,   219,
+     220,   221,   222,   223,   224,   225,   226,   227,    39,    40,
+      41,    42,    43,    44,    45,    46,    47,    48,    49,    50,
+      51,    52,    53,   228,   229,   230,   231,   232,   233,   234,
+     235,   236,   237,   238,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,   596,   239,     0,   240,
+     241,   242,   243,     0,   597,   245,   598,   246,   247,   248,
+     249,   250,   251,   252,   253,   254,   255,   256,   257,   150,
+     151,   152,   153,   154,   155,   156,   157,   158,   159,   160,
+     161,   162,   163,   164,   165,   166,   167,   168,   169,   170,
+     171,   172,   173,   174,   175,   176,   177,   178,     0,     0,
+     179,   180,   181,   182,   183,   184,     0,   185,   186,   187,
+     188,     0,     0,   189,   190,   191,   192,   193,   194,   195,
+     196,   197,   198,   199,   200,   201,   202,   203,   204,   205,
+     206,   207,   208,   209,   210,   211,   212,   213,   214,   215,
+     216,   217,   218,   219,   220,   221,   222,   223,   224,   225,
+     226,   227,    39,    40,    41,    42,    43,    44,    45,    46,
+      47,    48,    49,    50,    51,    52,    53,   228,   229,   230,
+     231,   232,   233,   234,   235,   236,   237,   238,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+     596,   239,   602,   240,   241,   242,   243,     0,   597,   245,
+       0,   246,   247,   248,   249,   250,   251,   252,   253,   254,
+     255,   256,   257,   150,   151,   152,   153,   154,   155,   156,
+     157,   158,   159,   160,   161,   162,   163,   164,   165,   166,
+     167,   168,   169,   170,   171,   172,   173,   174,   175,   176,
+     177,   178,     0,     0,   179,   180,   181,   182,   183,   184,
+       0,   185,   186,   187,   188,     0,     0,   189,   190,   191,
+     192,   193,   194,   195,   196,   197,   198,   199,   200,   201,
+     202,   203,   204,   205,   206,   207,   208,   209,   210,   211,
+     212,   213,   214,   215,   216,   217,   218,   219,   220,   221,
+     222,   223,   224,   225,   226,   227,    39,    40,    41,    42,
+      43,    44,    45,    46,    47,    48,    49,    50,    51,    52,
+      53,   228,   229,   230,   231,   232,   233,   234,   235,   236,
+     237,   238,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,   596,   239,     0,   240,   241,   242,
+     243,     0,   597,   245,   608,   246,   247,   248,   249,   250,
+     251,   252,   253,   254,   255,   256,   257,   150,   151,   152,
+     153,   154,   155,   156,   157,   158,   159,   160,   161,   162,
+     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
+     173,   174,   175,   176,   177,   178,     0,     0,   179,   180,
+     181,   182,   183,   184,     0,   185,   186,   187,   188,     0,
+       0,   189,   190,   191,   192,   193,   194,   195,   196,   197,
+     198,   199,   200,   201,   202,   203,   204,   205,   206,   207,
+     208,   209,   210,   211,   212,   213,   214,   215,   216,   217,
+     218,   219,   220,   221,   222,   223,   224,   225,   226,   227,
+      39,    40,    41,    42,    43,    44,    45,    46,    47,    48,
+      49,    50,    51,    52,    53,   228,   229,   230,   231,   232,
+     233,   234,   235,   236,   237,   238,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,   596,   239,
+       0,   240,   241,   242,   243,   611,   597,   245,     0,   246,
+     247,   248,   249,   250,   251,   252,   253,   254,   255,   256,
+     257,   150,   151,   152,   153,   154,   155,   156,   157,   158,
+     159,   160,   161,   162,   163,   164,   165,   166,   167,   168,
+     169,   170,   171,   172,   173,   174,   175,   176,   177,   178,
+       0,     0,   179,   180,   181,   182,   183,   184,     0,   185,
+     186,   187,   188,     0,     0,   189,   190,   191,   192,   193,
+     194,   195,   196,   197,   198,   199,   200,   201,   202,   203,
+     204,   205,   206,   207,   208,   209,   210,   211,   212,   213,
+     214,   215,   216,   217,   218,   219,   220,   221,   222,   223,
+     224,   225,   226,   227,    39,    40,    41,    42,    43,    44,
+      45,    46,    47,    48,    49,    50,    51,    52,    53,   228,
+     229,   230,   231,   232,   233,   234,   235,   236,   237,   238,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,   596,   239,   629,   240,   241,   242,   243,     0,
+     597,   245,     0,   246,   247,   248,   249,   250,   251,   252,
+     253,   254,   255,   256,   257,   150,   151,   152,   153,   154,
+     155,   156,   157,   158,   159,   160,   161,   162,   163,   164,
+     165,   166,   167,   168,   169,   170,   171,   172,   173,   174,
+     175,   176,   177,   178,     0,     0,   179,   180,   181,   182,
+     183,   184,     0,   185,   186,   187,   188,     0,     0,   189,
+     190,   191,   192,   193,   194,   195,   196,   197,   198,   199,
+     200,   201,   202,   203,   204,   205,   206,   207,   208,   209,
+     210,   211,   212,   213,   214,   215,   216,   217,   218,   219,
+     220,   221,   222,   223,   224,   225,   226,   227,    39,    40,
+      41,    42,    43,    44,    45,    46,    47,    48,    49,    50,
+      51,    52,    53,   228,   229,   230,   231,   232,   233,   234,
+     235,   236,   237,   238,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,   596,   239,   698,   240,
+     241,   242,   243,     0,   597,   245,     0,   246,   247,   248,
+     249,   250,   251,   252,   253,   254,   255,   256,   257,   150,
+     151,   152,   153,   154,   155,   156,   157,   158,   159,   160,
+     161,   162,   163,   164,   165,   166,   167,   168,   169,   170,
+     171,   172,   173,   174,   175,   176,   177,   178,     0,     0,
+     179,   180,   181,   182,   183,   184,     0,   185,   186,   187,
+     188,     0,     0,   189,   190,   191,   192,   193,   194,   195,
+     196,   197,   198,   199,   200,   201,   202,   203,   204,   205,
+     206,   207,   208,   209,   210,   211,   212,   213,   214,   215,
+     216,   217,   218,   219,   220,   221,   222,   223,   224,   225,
+     226,   227,    39,    40,    41,    42,    43,    44,    45,    46,
+      47,    48,    49,    50,    51,    52,    53,   228,   229,   230,
+     231,   232,   233,   234,   235,   236,   237,   238,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+     596,   239,   935,   240,   241,   242,   243,     0,   597,   245,
+       0,   246,   247,   248,   249,   250,   251,   252,   253,   254,
+     255,   256,   257,   150,   151,   152,   153,   154,   155,   156,
+     157,   158,   159,   160,   161,   162,   163,   164,   165,   166,
+     167,   168,   169,   170,   171,   172,   173,   174,   175,   176,
+     177,   178,     0,     0,   179,   180,   181,   182,   183,   184,
+       0,   185,   186,   187,   188,     0,     0,   189,   190,   191,
+     192,   193,   194,   195,   196,   197,   198,   199,   200,   201,
+     202,   203,   204,   205,   206,   207,   208,   209,   210,   211,
+     212,   213,   214,   215,   216,   217,   218,   219,   220,   221,
+     222,   223,   224,   225,   226,   227,    39,    40,    41,    42,
+      43,    44,    45,    46,    47,    48,    49,    50,    51,    52,
+      53,   228,   229,   230,   231,   232,   233,   234,   235,   236,
+     237,   238,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,   596,   239,  1172,   240,   241,   242,
+     243,     0,   597,   245,     0,   246,   247,   248,   249,   250,
+     251,   252,   253,   254,   255,   256,   257,   150,   151,   152,
+     153,   154,   155,   156,   157,   158,   159,   160,   161,   162,
+     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
+     173,   174,   175,   176,   177,   178,     0,     0,   179,   180,
+     181,   182,   183,   184,     0,   185,   186,   187,   188,     0,
+       0,   189,   190,   191,   192,   193,   194,   195,   196,   197,
+     198,   199,   200,   201,   202,   203,   204,   205,   206,   207,
+     208,   209,   210,   211,   212,   213,   214,   215,   216,   217,
+     218,   219,   220,   221,   222,   223,   224,   225,   226,   227,
+      39,    40,    41,    42,    43,    44,    45,    46,    47,    48,
+      49,    50,    51,    52,    53,   228,   229,   230,   231,   232,
+     233,   234,   235,   236,   237,   238,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,   596,   239,
+    1177,   240,   241,   242,   243,     0,   597,   245,     0,   246,
+     247,   248,   249,   250,   251,   252,   253,   254,   255,   256,
+     257,   150,   151,   152,   153,   154,   155,   156,   157,   158,
+     159,   160,   161,   162,   163,   164,   165,   166,   167,   168,
+     169,   170,   171,   172,   173,   174,   175,   176,   177,   178,
+       0,     0,   179,   180,   181,   182,   183,   184,     0,   185,
+     186,   187,   188,     0,     0,   189,   190,   191,   192,   193,
+     194,   195,   196,   197,   198,   199,   200,   201,   202,   203,
+     204,   205,   206,   207,   208,   209,   210,   211,   212,   213,
+     214,   215,   216,   217,   218,   219,   220,   221,   222,   223,
+     224,   225,   226,   227,    39,    40,    41,    42,    43,    44,
+      45,    46,    47,    48,    49,    50,    51,    52,    53,   228,
+     229,   230,   231,   232,   233,   234,   235,   236,   237,   238,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,   609,   239,     0,   240,   241,   242,   243,     0,
+     597,   245,     0,   246,   247,   248,   249,   250,   251,   252,
+     253,   254,   255,   256,   257,   150,   151,   152,   153,   154,
+     155,   156,   157,   158,   159,   160,   161,   162,   163,   164,
+     165,   166,   167,   168,   169,   170,   171,   172,   173,   174,
+     175,   176,   177,   178,     0,     0,   179,   180,   181,   182,
+     183,   184,     0,   185,   186,   187,   188,     0,     0,   189,
+     190,   191,   192,   193,   194,   195,   196,   197,   198,   199,
+     200,   201,   202,   203,   204,   205,   206,   207,   208,   209,
+     210,   211,   212,   213,   214,   215,   216,   217,   218,   219,
+     220,   221,   222,   223,   224,   225,   226,   227,    39,    40,
+      41,    42,    43,    44,    45,    46,    47,    48,    49,    50,
+      51,    52,    53,   228,   229,   230,   231,   232,   233,   234,
+     235,   236,   237,   238,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,   735,   239,     0,   240,
+     241,   242,   243,     0,   597,   245,     0,   246,   247,   248,
+     249,   250,   251,   252,   253,   254,   255,   256,   257,   150,
+     151,   152,   153,   154,   155,   156,   157,   158,   159,   160,
+     161,   162,   163,   164,   165,   166,   167,   168,   169,   170,
+     171,   172,   173,   174,   175,   176,   177,   178,     0,     0,
+     179,   180,   181,   182,   183,   184,     0,   185,   186,   187,
+     188,     0,     0,   189,   190,   191,   192,   193,   194,   195,
+     196,   197,   198,   199,   200,   201,   202,   203,   204,   205,
+     206,   207,   208,   209,   210,   211,   212,   213,   214,   215,
+     216,   217,   218,   219,   220,   221,   222,   223,   224,   225,
+     226,   227,    39,    40,    41,    42,    43,    44,    45,    46,
+      47,    48,    49,    50,    51,    52,    53,   228,   229,   230,
+     231,   232,   233,   234,   235,   236,   237,   238,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,   239,     0,   240,   241,   242,   243,  1141,   597,   245,
+       0,   246,   247,   248,   249,   250,   251,   252,   253,   254,
+     255,   256,   257,   150,   151,   152,   153,   154,   155,   156,
+     157,   158,   159,   160,   161,   162,   163,   164,   165,   166,
+     167,   168,   169,   170,   171,   172,   173,   174,   175,   176,
+     177,   178,     0,     0,   179,   180,   181,   182,   183,   184,
+       0,   185,   186,   187,   188,     0,     0,   189,   190,   191,
+     192,   193,   194,   195,   196,   197,   198,   199,   200,   201,
+     202,   203,   204,   205,   206,   207,   208,   209,   210,   211,
+     212,   213,   214,   215,   216,   217,   218,   219,   220,   221,
+     222,   223,   224,   225,   226,   227,    39,    40,    41,    42,
+      43,    44,    45,    46,    47,    48,    49,    50,    51,    52,
+      53,   228,   229,   230,   231,   232,   233,   234,   235,   236,
+     237,   238,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,  1173,   239,     0,   240,   241,   242,
+     243,     0,   597,   245,     0,   246,   247,   248,   249,   250,
+     251,   252,   253,   254,   255,   256,   257,   150,   151,   152,
+     153,   154,   155,   156,   157,   158,   159,   160,   161,   162,
+     163,   164,   165,   166,   167,   168,   169,   170,   171,   172,
+     173,   174,   175,   176,   177,   178,     0,     0,   179,   180,
+     181,   182,   183,   184,     0,   185,   186,   187,   188,     0,
+       0,   189,   190,   191,   192,   193,   194,   195,   196,   197,
+     198,   199,   200,   201,   202,   203,   204,   205,   206,   207,
+     208,   209,   210,   211,   212,   213,   214,   215,   216,   217,
+     218,   219,   220,   221,   222,   223,   224,   225,   226,   227,
+      39,    40,    41,    42,    43,    44,    45,    46,    47,    48,
+      49,    50,    51,    52,    53,   228,   229,   230,   231,   232,
+     233,   234,   235,   236,   237,   238,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,  1181,   239,
+       0,   240,   241,   242,   243,     0,   597,   245,     0,   246,
+     247,   248,   249,   250,   251,   252,   253,   254,   255,   256,
+     257,   150,   151,   152,   153,   154,   155,   156,   157,   158,
+     159,   160,   161,   162,   163,   164,   165,   166,   167,   168,
+     169,   170,   171,   172,   173,   174,   175,   176,   177,   178,
+       0,     0,   179,   180,   181,   182,   183,   184,     0,   185,
+     186,   187,   188,     0,     0,   189,   190,   191,   192,   193,
+     194,   195,   196,   197,   198,   199,   200,   201,   202,   203,
+     204,   205,   206,   207,   208,   209,   210,   211,   212,   213,
+     214,   215,   216,   217,   218,   219,   220,   221,   222,   223,
+     224,   225,   226,   227,    39,    40,    41,    42,    43,    44,
+      45,    46,    47,    48,    49,    50,    51,    52,    53,   228,
+     229,   230,   231,   232,   233,   234,   235,   236,   237,   238,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,   239,     0,   240,   241,   242,   243,     0,
+     244,   245,     0,   246,   247,   248,   249,   250,   251,   252,
+     253,   254,   255,   256,   257,   150,   151,   152,   153,   154,
+     155,   156,   441,   442,   159,   160,   161,   162,   163,   164,
+     165,   166,   167,   168,   169,   570,   171,   172,   173,   174,
+     175,   176,   177,   178,     0,     0,   179,   180,   181,   182,
+     183,   184,     0,   185,   186,   187,   188,     0,     0,   189,
+     190,   191,   192,   193,   194,   195,   196,   197,   198,   199,
+     200,   201,   202,   203,   204,   571,   206,   207,   208,   209,
+     210,   211,   212,   213,   214,   215,   216,   217,   218,   219,
+     220,   221,   222,   223,   224,     0,   572,     0,    39,    40,
+      41,    42,    43,    44,    45,    46,    47,    48,    49,    50,
+      51,    52,    53,   228,   229,   230,   231,   232,   233,   234,
+     235,   236,   237,   238,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,   965,   573,   966,   574,
+     575,   576,   443,     0,   577,   578,     0,   246,   247,   248,
+     579,   250,   251,   252,   253,   254,   255,   256,   580,   150,
+     151,   152,   153,   154,   155,   156,   157,   158,   159,   160,
+     161,   162,   163,   164,   165,   166,   167,   168,   169,   170,
+     171,   172,   173,   174,   175,   176,   177,   178,     0,     0,
+     179,   180,   181,   182,   183,   184,     0,   185,   186,   187,
+     188,     0,     0,   189,   190,   191,   192,   193,   194,   195,
+     196,   197,   198,   199,   200,   201,   202,   203,   204,   205,
+     206,   207,   208,   209,   210,   211,   212,   213,   214,   215,
+     216,   217,   218,   219,   220,   221,   222,   223,   224,   225,
+     226,   227,    39,    40,    41,    42,    43,    44,    45,    46,
+      47,    48,    49,    50,    51,    52,    53,   228,   229,   230,
+     231,   232,   233,   234,   235,   236,   237,   238,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,   239,     0,   240,   241,   242,   243,     0,   597,   245,
+       0,   246,   247,   248,   249,   250,   251,   252,   253,   254,
+     255,   256,   257,   150,   151,   152,   153,   154,   155,   156,
+     157,   158,   159,   160,   161,   162,   163,   164,   165,   166,
+     167,   168,   169,   170,   171,   172,   173,   174,   175,   176,
+     177,   178,     0,     0,   179,   180,   181,   182,   183,   184,
+       0,   185,   186,   187,   188,     0,     0,   189,   190,   191,
+     192,   193,   194,   195,   196,   197,   198,   199,   200,   201,
+     202,   203,   204,   205,   206,   207,   208,   209,   210,   211,
+     212,   213,   214,   215,   216,   217,   218,   219,   220,   221,
+     222,   223,   224,   225,   226,   227,    39,    40,    41,    42,
+      43,    44,    45,    46,    47,    48,    49,    50,    51,    52,
+      53,   228,   229,   230,   231,   232,   233,   234,   235,   236,
+     237,   238,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,   239,     0,   240,   241,   242,
+     243,     0,     0,   245,     0,   246,   247,   248,   249,   250,
+     251,   252,   253,   254,   255,   256,   257,   150,   151,   152,
+     153,   154,   155,   156,   441,   442,   159,   160,   161,   162,
+     163,   164,   165,   166,   167,   168,   169,   570,   171,   172,
+     173,   174,   175,   176,   177,   178,     0,     0,   179,   180,
+     181,   182,   183,   184,     0,   185,   186,   187,   188,     0,
+       0,   189,   190,   191,   192,   193,   194,   195,   196,   197,
+     198,   199,   200,   201,   202,   203,   204,   571,   206,   207,
+     208,   209,   210,   211,   212,   213,   214,   215,   216,   217,
+     218,   219,   220,   221,   222,   223,   224,     0,   572,     0,
+      39,    40,    41,    42,    43,    44,    45,    46,    47,    48,
+      49,    50,    51,    52,    53,   228,   229,   230,   231,   232,
+     233,   234,   235,   236,   237,   238,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,   573,
+       0,   574,   575,   576,   443,   889,   577,   578,     0,   246,
+     247,   248,   579,   250,   251,   252,   253,   254,   255,   256,
+     580,   150,   151,   152,   153,   154,   155,   156,   441,   442,
+     159,   160,   161,   162,   163,   164,   165,   166,   167,   168,
+     169,   570,   171,   172,   173,   174,   175,   176,   177,   178,
+       0,     0,   179,   180,   181,   182,   183,   184,     0,   185,
+     186,   187,   188,     0,     0,   189,   190,   191,   192,   193,
+     194,   195,   196,   197,   198,   199,   200,   201,   202,   203,
+     204,   571,   206,   207,   208,   209,   210,   211,   212,   213,
+     214,   215,   216,   217,   218,   219,   220,   221,   222,   223,
+     224,     0,   572,     0,    39,    40,    41,    42,    43,    44,
+      45,    46,    47,    48,    49,    50,    51,    52,    53,   228,
+     229,   230,   231,   232,   233,   234,   235,   236,   237,   238,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,   573,     0,   574,   575,   576,   443,   890,
+     577,   578,     0,   246,   247,   248,   579,   250,   251,   252,
+     253,   254,   255,   256,   580,   150,   151,   152,   153,   154,
+     155,   156,   441,   442,   159,   160,   161,   162,   163,   164,
+     165,   166,   167,   168,   169,   570,   171,   172,   173,   174,
+     175,   176,   177,   178,     0,     0,   179,   180,   181,   182,
+     183,   184,     0,   185,   186,   187,   188,     0,     0,   189,
+     190,   191,   192,   193,   194,   195,   196,   197,   198,   199,
+     200,   201,   202,   203,   204,   571,   206,   207,   208,   209,
+     210,   211,   212,   213,   214,   215,   216,   217,   218,   219,
+     220,   221,   222,   223,   224,     0,   572,     0,    39,    40,
+      41,    42,    43,    44,    45,    46,    47,    48,    49,    50,
+      51,    52,    53,   228,   229,   230,   231,   232,   233,   234,
+     235,   236,   237,   238,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,   573,     0,   574,
+     575,   576,   443,   891,   577,   578,     0,   246,   247,   248,
+     579,   250,   251,   252,   253,   254,   255,   256,   580,   150,
+     151,   152,   153,   154,   155,   156,   441,   442,   159,   160,
+     161,   162,   163,   164,   165,   166,   167,   168,   169,   570,
+     171,   172,   173,   174,   175,   176,   177,   178,     0,     0,
+     179,   180,   181,   182,   183,   184,     0,   185,   186,   187,
+     188,     0,     0,   189,   190,   191,   192,   193,   194,   195,
+     196,   197,   198,   199,   200,   201,   202,   203,   204,   571,
+     206,   207,   208,   209,   210,   211,   212,   213,   214,   215,
+     216,   217,   218,   219,   220,   221,   222,   223,   224,     0,
+     572,     0,    39,    40,    41,    42,    43,    44,    45,    46,
+      47,    48,    49,    50,    51,    52,    53,   228,   229,   230,
+     231,   232,   233,   234,   235,   236,   237,   238,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,   573,     0,   574,   575,   576,   443,     0,   577,   578,
+     964,   246,   247,   248,   579,   250,   251,   252,   253,   254,
+     255,   256,   580,   150,   151,   152,   153,   154,   155,   156,
+     441,   442,   159,   160,   161,   162,   163,   164,   165,   166,
+     167,   168,   169,   570,   171,   172,   173,   174,   175,   176,
+     177,   178,     0,     0,   179,   180,   181,   182,   183,   184,
+       0,   185,   186,   187,   188,     0,     0,   189,   190,   191,
+     192,   193,   194,   195,   196,   197,   198,   199,   200,   201,
+     202,   203,   204,   571,   206,   207,   208,   209,   210,   211,
+     212,   213,   214,   215,   216,   217,   218,   219,   220,   221,
+     222,   223,   224,     0,   572,     0,    39,    40,    41,    42,
+      43,    44,    45,    46,    47,    48,    49,    50,    51,    52,
+      53,   228,   229,   230,   231,   232,   233,   234,   235,   236,
+     237,   238,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,   573,     0,   574,   575,   576,
+     443,     0,   577,   578,   969,   246,   247,   248,   579,   250,
+     251,   252,   253,   254,   255,   256,   580,   150,   151,   152,
+     153,   154,   155,   156,   441,   442,   159,   160,   161,   162,
+     163,   164,   165,   166,   167,   168,   169,   570,   171,   172,
+     173,   174,   175,   176,   177,   178,     0,     0,   179,   180,
+     181,   182,   183,   184,     0,   185,   186,   187,   188,     0,
+       0,   189,   190,   191,   192,   193,   194,   195,   196,   197,
+     198,   199,   200,   201,   202,   203,   204,   571,   206,   207,
+     208,   209,   210,   211,   212,   213,   214,   215,   216,   217,
+     218,   219,   220,   221,   222,   223,   224,     0,   572,     0,
+      39,    40,    41,    42,    43,    44,    45,    46,    47,    48,
+      49,    50,    51,    52,    53,   228,   229,   230,   231,   232,
+     233,   234,   235,   236,   237,   238,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,   573,
+       0,   574,   575,   576,   443,     0,   577,   578,     0,   246,
+     247,   248,   579,   250,   251,   252,   253,   254,   255,   256,
+     580,   150,   151,   152,   153,   154,   155,   156,   441,   442,
+     159,   160,   161,   162,   163,   164,   165,   166,   167,   168,
+     169,   570,   171,   172,   173,   174,   175,   176,   177,   178,
+       0,     0,   179,   180,   181,   182,   183,   184,     0,   185,
+     186,   187,   188,     0,     0,   189,   190,   191,   192,   193,
+     194,   195,   196,   197,   198,   199,   200,   201,   202,   203,
+     204,   434,   206,   207,   208,   209,   210,   211,   212,   213,
+     214,   215,   216,   217,   218,   219,   220,   221,   222,   223,
+     224,     0,   572,     0,    39,    40,    41,    42,    43,    44,
+      45,    46,    47,    48,    49,    50,    51,    52,    53,   228,
+     229,   230,   231,   232,   233,   234,   235,   236,   237,   238,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,   573,     0,   832,   575,   833,   443,     0,
+     834,   578,     0,   246,   247,   248,   435,   250,   251,   252,
+     253,   254,   255,   256,   580,  -618,  -618,  -618,  -618,  -618,
+    -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,
+    -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,
+    -618,  -618,  -618,  -618,     0,     0,  -618,  -618,  -618,  -618,
+    -618,  -618,     0,  -618,  -618,  -618,  -618,     0,     0,  -618,
+    -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,
+    -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,
+    -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,
+    -618,  -618,  -618,  -618,  -618,     0,  -618,     0,  -618,  -618,
+    -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,
+    -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,
+    -618,  -618,  -618,  -618,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,  -618,     0,  -654,
+    -618,  -618,  -618,     0,  -618,  -618,     0,  -618,  -618,  -618,
+    -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,  -618,   150,
+     151,   152,   153,   154,   155,   156,   441,   442,   159,   160,
+     161,   162,   163,   164,   165,   166,   167,   168,   169,   570,
+     171,   172,   173,   174,   175,   176,   177,   178,     0,     0,
+     179,   180,   181,   182,   183,   184,     0,   185,   186,   187,
+     188,     0,     0,   189,   190,   191,   192,   193,   194,   195,
+     196,   197,   198,   199,   200,   201,   202,   203,   204,   864,
+     206,   207,   208,   209,   210,   211,   212,   213,   214,   215,
+     216,   217,   218,   219,   220,   221,   222,   223,   224,     0,
+     572,     0,    39,    40,    41,    42,    43,    44,    45,    46,
+      47,    48,    49,    50,    51,    52,    53,   228,   229,   230,
+     231,   232,   233,   234,   235,   236,   237,   238,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,   573,     0,     0,   575,     0,   443,     0,   865,   578,
+       0,   246,   247,   248,   866,   250,   251,   252,   253,   254,
+     255,   256,   580,   150,   151,   152,   153,   154,   155,   156,
+     441,   442,   159,   160,   161,   162,   163,   164,   165,   166,
+     167,   168,   169,   570,   171,   172,   173,   174,   175,   176,
+     177,   178,     0,     0,   179,   180,   181,   182,   183,   184,
+       0,   185,   186,   187,   188,     0,     0,   189,   190,   191,
+     192,   193,   194,   195,   196,   197,   198,   199,   200,   201,
+     202,   203,   204,     0,   206,   207,   208,   209,   210,   211,
+     212,   213,   214,   215,   216,   217,   218,   219,   220,   221,
+     222,   223,   224,     0,   572,     0,    39,    40,    41,    42,
+      43,    44,    45,    46,    47,    48,    49,    50,    51,    52,
+      53,   228,   229,   230,   231,   232,   233,   234,   235,   236,
+     237,   238,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,   573,     0,     0,   575,     0,
+     443,     0,   834,   578,     0,   246,   247,   248,     0,   250,
+     251,   252,   253,   254,   255,   256,   580,   150,   151,   152,
+     153,   154,   155,   156,   441,   442,   159,   160,   161,   162,
+     163,   164,   165,   166,   167,   168,   169,   570,   171,   172,
+     173,   174,   175,   176,   177,   178,     0,     0,   179,   180,
+     181,   182,   183,   184,     0,   185,   186,   187,   188,     0,
+       0,   189,   190,   191,   192,   193,   194,   195,   196,   197,
+     198,   199,   200,   201,   202,   203,   204,     0,   206,   207,
+     208,   209,   210,   211,   212,   213,   214,   215,   216,   217,
+     218,   219,   220,   221,   222,   223,   224,     0,     0,     0,
+      39,    40,    41,    42,    43,    44,    45,    46,    47,    48,
+      49,    50,    51,    52,    53,   228,   229,   230,   231,   232,
+     233,   234,   235,   236,   237,   238,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,   573,
+       0,     0,   575,     0,   443,     0,     0,   578,     0,   246,
+     247,   248,     0,   250,   251,   252,   253,   254,   255,   256,
+     580,   270,   271,   272,   273,   274,   275,   276,     0,     0,
+     635,     0,     0,     0,     0,     0,     0,     0,   167,   168,
+     169,     0,    17,    18,    19,    20,     0,     0,     0,    21,
+      22,    23,    24,    25,    26,    27,    28,    29,     0,   277,
+       0,     0,     0,     0,     0,     0,    33,    34,     0,     0,
+       0,   636,   637,     0,     0,     0,     0,     0,   202,   203,
+     204,   638,   206,   207,   208,   209,   210,   211,   212,   213,
+     214,   215,   216,   217,   218,   219,   220,   221,   222,   223,
+     224,     0,    38,     0,    39,    40,    41,    42,    43,    44,
+      45,    46,    47,    48,    49,    50,    51,    52,    53,   228,
+     229,   230,   231,   232,   233,   234,   235,   236,   237,   238,
+     470,   471,   472,   473,   474,   475,   476,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,   304,   305,   306,
+       0,     0,     0,     0,     0,   639,     0,   640,   641,     0,
+     642,   643,     0,   246,   247,   248,   644,   250,   251,   252,
+     253,   254,   255,   256,     6,     7,     8,     9,    10,    11,
+      12,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,    13,    14,    15,    16,    17,    18,    19,    20,     0,
+       0,     0,    21,    22,    23,    24,    25,    26,    27,    28,
+      29,     0,    30,    31,    32,     0,     0,     0,     0,    33,
+      34,    35,    36,    37,     0,     0,     0,     0,   309,   310,
+     311,   312,   313,   314,   315,   316,   317,   318,   319,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,    38,     0,    39,    40,    41,
+      42,    43,    44,    45,    46,    47,    48,    49,    50,    51,
+      52,    53,    54,    55,    56,    57,    58,    59,    60,    61,
+      62,    63,    64,    65,    66,    67,    68,    69,    70,    71,
+      72,    73,    74,    75,    76,    77,    78,    79,    80,    81,
+      82,    83,    84,    85,     0,    86,     6,     7,     8,     9,
+      10,    11,    12,     0,     0,     0,    87,     0,     0,     0,
+       0,     0,     0,    13,    14,    15,     0,    17,    18,    19,
+      20,     0,     0,     0,    21,    22,    23,    24,    25,    26,
+      27,    28,    29,     0,   277,    31,   354,     0,     0,     0,
+       0,    33,    34,     0,     0,   355,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,    38,     0,    39,
+      40,    41,    42,    43,    44,    45,    46,    47,    48,    49,
+      50,    51,    52,    53,    54,    55,    56,    57,    58,    59,
+      60,    61,    62,    63,    64,     0,     0,     0,     0,     0,
+       0,     6,     7,     8,     9,    10,    11,    12,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,    13,    14,
+      15,     0,    17,    18,    19,    20,     0,     0,    87,    21,
+      22,    23,    24,    25,    26,    27,    28,    29,     0,   277,
+      31,   354,     0,     0,     0,     0,    33,    34,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,    38,     0,    39,    40,    41,    42,    43,    44,
+      45,    46,    47,    48,    49,    50,    51,    52,    53,    54,
+      55,    56,    57,    58,    59,    60,    61,    62,    63,    64,
+       0,     0,     0,     0,     0,     0,     6,     7,     8,     9,
+      10,    11,    12,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,    13,    14,    15,     0,    17,    18,    19,
+      20,     0,     0,    87,    21,    22,    23,    24,    25,    26,
+      27,    28,    29,     0,   277,    31,     0,     0,     0,     0,
+       0,    33,    34,    35,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,    38,     0,    39,
+      40,    41,    42,    43,    44,    45,    46,    47,    48,    49,
+      50,    51,    52,    53,    54,    55,    56,    57,    58,    59,
+      60,    61,    62,    63,    64,     0,     0,     0,     0,     0,
+       0,     6,     7,     8,     9,    10,    11,    12,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,    13,    14,
+      15,     0,    17,    18,    19,    20,     0,     0,    87,    21,
+      22,    23,    24,    25,    26,    27,    28,    29,     0,   277,
+      31,     0,     0,     0,     0,     0,    33,    34,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,    38,     0,    39,    40,    41,    42,    43,    44,
+      45,    46,    47,    48,    49,    50,    51,    52,    53,    54,
+      55,    56,    57,    58,    59,    60,    61,    62,    63,    64,
+       0,   297,   298,   299,   300,   301,   302,   303,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,   304,   305,
+     306,     0,     0,     0,     0,     0,   803,   804,   805,     0,
+       0,     0,     0,    87,   806,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,    33,   297,   298,   299,
+     300,   301,   302,   303,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,   304,   305,   306,    16,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,    38,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,    33,     0,     0,     0,     0,     0,     0,   309,
+     310,   311,   312,   313,   314,   315,   316,   317,   318,   319,
+       0,     0,   297,   298,   299,   300,   301,   302,   303,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,    38,   304,
+     305,   306,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,    87,     0,   309,   310,   311,   312,   313,
+     314,   315,   316,   317,   318,   319,     0,    33,   307,     0,
+     308,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,   517,     0,
+       0,     0,   518,     0,     0,     0,     0,     0,     0,    87,
+       0,     0,     0,    38,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+     309,   310,   311,   312,   313,   314,   315,   316,   317,   318,
+     319,   297,   298,   299,   300,   301,   302,   303,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,   304,   305,
+     306,    16,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,    87,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,    33,   297,   298,   299,
+     300,   301,   302,   303,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,   304,   305,   306,    16,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,    38,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,    33,     0,     0,     0,     0,     0,     0,   309,
+     310,   311,   312,   313,   314,   315,   316,   317,   318,   319,
+       0,     0,   297,   298,   299,   300,   301,   302,   303,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,    38,   304,
+     305,   306,     0,     0,     0,     0,   518,     0,     0,     0,
+       0,     0,     0,    87,     0,   309,   310,   311,   312,   313,
+     314,   315,   316,   317,   318,   319,     0,    33,   307,   297,
+     298,   299,   300,   301,   302,   303,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,   304,   305,   306,     0,
+       0,   297,   298,   299,   300,   301,   302,   303,     0,    87,
+       0,     0,     0,    38,     0,     0,     0,     0,   304,   305,
+     306,     0,     0,     0,    33,     0,     0,     0,     0,     0,
+     309,   310,   311,   312,   313,   314,   315,   316,   317,   318,
+     319,   380,     0,     0,     0,     0,    33,     0,     0,   297,
+     298,   299,   300,   301,   302,   303,     0,     0,     0,     0,
+      38,     0,     0,     0,     0,     0,   304,   305,   306,     0,
+       0,     0,     0,     0,    87,     0,     0,   309,   310,   311,
+     312,   313,   314,   315,   316,   317,   318,   319,    31,     0,
+       0,     0,     0,     0,    33,     0,     0,     0,     0,   309,
+     310,   311,   312,   313,   314,   315,   316,   317,   318,   319,
+       0,     0,     0,   297,   298,   299,   300,   301,   302,   303,
+       0,    87,     0,     0,     0,     0,     0,     0,     0,     0,
+     304,   305,   306,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,    87,     0,     0,     0,   309,   310,   311,
+     312,   313,   314,   315,   316,   317,   318,   319,    33,   297,
+     298,   299,   300,   301,   302,   303,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,   304,   305,   306,    16,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,    87,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,    33,     0,     0,     0,     0,     0,
+       0,   309,   310,   311,   312,   313,   314,   315,   316,   317,
+     318,   319,     0,     0,   297,   298,   299,   300,   301,   302,
+     303,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+      38,   304,   305,   306,    16,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,    87,     0,   309,   310,   311,
+     312,   313,   314,   315,   316,   317,   318,   319,     0,    33,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+     507,  1142,     0,     0,  1143,   297,   298,   299,   300,   301,
+     302,   303,     0,     0,     0,    38,     0,     0,     0,     0,
+       0,     0,   304,   305,   306,    16,     0,     0,     0,     0,
+       0,     0,   309,   310,   311,   312,   313,   314,   315,   316,
+     317,   318,   319,   297,   298,   299,   300,   301,   302,   303,
+      33,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+     304,   305,   306,    16,     0,   507,     0,     0,     0,   508,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,    38,     0,    33,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,   309,   310,   311,   312,   313,   314,   315,
+     316,   317,   318,   319,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,    38,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,  1142,     0,     0,
+    1143,   309,   310,   311,   312,   313,   314,   315,   316,   317,
+     318,   319,    21,    22,    23,    24,    25,    26,    27,    28,
+      29,   498,   277,     0,     0,     0,     0,     0,     0,     0,
+       0,   499,     0,     0,     0,     0,     0,     0,   508,     0,
+      21,    22,    23,    24,    25,    26,    27,    28,    29,   498,
+     277,     0,     0,     0,     0,     0,     0,     0,   417,   499,
+       0,     0,     0,     0,     0,     0,     0,    39,    40,    41,
+      42,    43,    44,    45,    46,    47,    48,    49,    50,    51,
+      52,    53,     0,     0,     0,     0,   417,     0,     0,     0,
+       0,     0,     0,     0,     0,    39,    40,    41,    42,    43,
+      44,    45,    46,    47,    48,    49,    50,    51,    52,    53,
+       0,     0,     0,     0,     0,  -273,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,   418,   419,   270,
+     271,   272,   273,   274,   275,   276,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,   167,   168,   169,     0,
+      17,    18,    19,    20,     0,   418,   419,    21,    22,    23,
+      24,    25,    26,    27,    28,    29,     0,   277,    31,   354,
+       0,     0,     0,     0,    33,    34,     0,     0,   355,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+      38,     0,    39,    40,    41,    42,    43,    44,    45,    46,
+      47,    48,    49,    50,    51,    52,    53,   228,   229,   230,
+     231,   232,   233,   234,   235,   236,   237,   238,   270,   271,
+     272,   273,   274,   275,   276,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,   167,   168,   169,     0,    17,
+      18,    19,    20,     0,     0,     0,    21,    22,    23,    24,
+      25,    26,    27,    28,    29,     0,   277,    31,     0,     0,
+       0,     0,     0,    33,    34,    35,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,    38,
+       0,    39,    40,    41,    42,    43,    44,    45,    46,    47,
+      48,    49,    50,    51,    52,    53,   228,   229,   230,   231,
+     232,   233,   234,   235,   236,   237,   238,   270,   271,   272,
+     273,   274,   275,   276,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,   167,   168,   169,     0,    17,    18,
+      19,    20,     0,     0,     0,    21,    22,    23,    24,    25,
+      26,    27,    28,    29,     0,   277,    31,     0,     0,     0,
+       0,     0,    33,    34,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,    38,     0,
+      39,    40,    41,    42,    43,    44,    45,    46,    47,    48,
+      49,    50,    51,    52,    53,   228,   229,   230,   231,   232,
+     233,   234,   235,   236,   237,   238,   270,   271,   272,   273,
+     274,   275,   276,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,   167,   168,   169,     0,    17,    18,    19,
+      20,     0,     0,     0,    21,    22,    23,    24,    25,    26,
+      27,    28,    29,     0,   277,     0,     0,     0,     0,     0,
+       0,    33,    34,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,    38,     0,    39,
+      40,    41,    42,    43,    44,    45,    46,    47,    48,    49,
+      50,    51,    52,    53,   228,   229,   230,   231,   232,   233,
+     234,   235,   236,   237,   238,   270,   271,   272,   273,   274,
+     275,   276,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,   167,   168,   169,     0,   739,     0,   740,    20,
+       0,     0,     0,    21,    22,    23,    24,    25,    26,    27,
+      28,    29,     0,   277,     0,     0,     0,     0,     0,     0,
+      33,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,    38,     0,    39,    40,
+      41,    42,    43,    44,    45,    46,    47,    48,    49,    50,
+      51,    52,    53,   228,   229,   230,   231,   232,   233,   234,
+     235,   236,   237,   238,   270,   271,   272,   273,   274,   275,
+     276,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,   167,   168,   169,     0,     0,     0,     0,     0,     0,
+       0,     0,    21,    22,    23,    24,    25,    26,    27,    28,
+      29,     0,   277,     0,     0,     0,     0,     0,     0,    33,
+      34,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,    38,     0,    39,    40,    41,
+      42,    43,    44,    45,    46,    47,    48,    49,    50,    51,
+      52,    53,   228,   229,   230,   231,   232,   233,   234,   235,
+     236,   237,   238,   297,   298,   299,   300,   301,   302,   303,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+     304,   305,   306,    16,   297,   298,   299,   300,   301,   302,
+     303,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,   304,   305,   306,     0,     0,     0,     0,    33,   297,
+     298,   299,   300,   301,   302,   303,     0,     0,     0,     0,
+       0,     0,     0,     0,   380,     0,   304,   305,   306,    33,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,    38,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,    33,     0,     0,     0,     0,     0,
+       0,   309,   310,   311,   312,   313,   314,   315,   316,   317,
+     318,   319,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,   309,   310,   311,   312,   313,   314,   315,   316,
+     317,   318,   319,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,   309,   310,   311,
+     312,   313,   314,   315,   316,   317,   318,   319
+};
+
+/* YYCONFLP[YYPACT[STATE-NUM]] -- Pointer into YYCONFL of start of
+   list of conflicting reductions corresponding to action entry for
+   state STATE-NUM in yytable.  0 means no conflicts.  The list in
+   yyconfl is terminated by a rule number of 0.  */
+static const unsigned char yyconflp[] =
+{
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     1,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     3,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     5,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,   247,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+     249,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     7,     9,    11,    13,    15,
+      17,    19,    21,    23,    25,    27,    29,    31,    33,    35,
+      37,    39,    41,    43,    45,    47,    49,    51,    53,    55,
+      57,    59,    61,    63,     0,     0,    65,    67,    69,    71,
+      73,    75,     0,    77,    79,    81,    83,     0,     0,    85,
+      87,    89,    91,    93,    95,    97,    99,   101,   103,   105,
+     107,   109,   111,   113,   115,   117,   119,   121,   123,   125,
+     127,   129,   131,   133,   135,   137,   139,   141,   143,   145,
+     147,   149,   151,   153,   155,     0,   157,     0,   159,   161,
+     163,   165,   167,   169,   171,   173,   175,   177,   179,   181,
+     183,   185,   187,   189,   191,   193,   195,   197,   199,   201,
+     203,   205,   207,   209,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,   211,     0,     0,
+     213,   215,   217,     0,   219,   221,     0,   223,   225,   227,
+     229,   231,   233,   235,   237,   239,   241,   243,   245,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0,     0,     0,
+       0,     0,     0,     0,     0,     0,     0,     0
+};
+
+/* YYCONFL[I] -- lists of conflicting rule numbers, each terminated by
+   0, pointed into by YYCONFLP.  */
+static const short int yyconfl[] =
+{
+       0,   406,     0,   406,     0,   406,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   654,     0,   654,
+       0,   654,     0,   654,     0,   654,     0,   406,     0,   406,
+       0
+};
+
+static const short int yycheck[] =
+{
+       5,   137,   123,     5,    35,   431,   123,   131,    35,   268,
+     131,   110,   131,   323,   416,    36,     5,   124,   439,     5,
+     704,   603,   705,     5,   714,   457,     5,   323,   554,   430,
+       5,     5,    37,   113,   114,   115,   287,   520,   696,   752,
+     449,    32,   972,   845,   448,   867,    35,   746,   545,   351,
+       5,    37,   554,   752,   977,   323,    35,     3,   239,     3,
+      10,    11,     3,   134,   245,    10,    11,     3,    32,   140,
+     946,   918,   705,    23,   552,    10,    11,    84,    63,  1106,
+      23,    84,    37,    32,    10,    11,   267,     3,    84,   140,
+      63,   123,   725,    23,   929,    28,    29,    30,     3,     5,
+     131,    23,    25,   124,   131,   134,    23,    18,     0,  1136,
+       3,   140,    84,    23,   295,     0,    23,   413,   123,   124,
+       3,   126,   134,   705,   110,   529,    49,    77,   140,    35,
+     140,   110,    23,    83,   538,   142,   110,   939,   621,   142,
+     830,    12,   131,    54,    55,   131,   142,   625,    31,   142,
+     997,    77,   131,    32,   612,   613,   614,    83,   149,   469,
+     140,    44,   286,   148,    47,   416,  1042,   693,   123,   124,
+     142,   126,    43,   469,   595,   148,     3,     4,     5,     6,
+       7,     8,     9,    66,   897,   149,  1109,   368,   134,   135,
+     134,   693,   138,   134,   137,   140,   146,   147,   897,   135,
+     149,   469,   138,   136,   110,   140,    32,  1029,  1138,    45,
+     140,   895,  1047,   622,   623,   712,   138,    77,   134,   135,
+     146,   147,   138,   140,   134,   938,   709,   134,   524,   134,
+     135,   632,   729,   138,   867,   537,    84,   539,    12,   938,
+     364,   134,   135,   364,    63,   138,    84,   138,   135,   140,
+      84,   283,   376,    25,   137,   286,   134,   134,   554,   286,
+     944,   139,   975,    84,   269,   978,    10,    11,   140,    43,
+     149,    43,   730,   957,   732,   140,   975,    49,   283,   978,
+      84,  1103,   134,   140,   134,   406,   146,   147,   140,   406,
+     140,   323,  1094,   135,   142,   137,   929,   286,   134,   135,
+     134,    84,   307,   308,   142,    84,   140,   286,   135,    24,
+      25,   447,   563,   449,   269,    84,  1144,   134,   323,   324,
+     139,   142,   139,   149,   135,   140,   134,   138,   283,   148,
+     134,    84,   140,   364,    84,  1163,   140,  1165,   324,    83,
+     134,   135,  1000,  1033,   138,   350,  1059,  1175,   353,   140,
+     355,   134,   307,   308,   140,   134,  1039,   140,   140,   142,
+    1059,   140,   140,   142,   350,   134,  1050,   618,   323,   324,
+     140,   140,    84,   142,   406,    84,   362,  1148,   364,   140,
+     134,   134,   143,   362,   134,   364,   140,   140,   362,   142,
+     140,  1162,   142,  1164,   140,   350,  1029,   693,   353,   523,
+     355,   406,   523,   134,   511,  1176,  1039,   134,   413,   140,
+      28,    29,    30,   140,  1047,     3,     4,     5,     6,     7,
+       8,     9,   134,  1106,   140,   134,   134,   413,   140,   140,
+     142,   140,   140,   142,   134,  1093,   457,   469,   134,   140,
+     140,   432,   433,   448,   140,   450,   140,   438,   439,   440,
+     882,   406,   457,  1136,   140,   460,   362,  1039,   413,   135,
+      48,   137,   448,   134,   469,    31,   140,    33,   459,   140,
+    1103,   136,    32,  1106,   139,   134,   134,   468,    44,   511,
+     429,    47,   140,   432,   433,   140,   622,   623,   143,   438,
+     439,   440,   523,   448,   140,   450,    84,     4,   140,   102,
+    1158,  1159,   457,  1136,   930,   460,   511,   512,   135,   530,
+     459,   138,   146,   147,   469,   140,   697,   522,   135,   524,
+     525,    84,   554,    12,   529,   530,   512,    13,    14,    15,
+      31,    32,   137,   538,   134,   540,   527,   523,   524,   560,
+     134,   136,   135,   529,   523,   142,    83,   138,    23,   554,
+     140,   135,   538,   432,   433,   560,   511,   512,   549,   438,
+     439,   440,   134,  1089,   139,   139,   139,   522,   139,   524,
+     525,   139,   139,   975,   529,   139,   683,   139,   527,   135,
+     459,   139,   139,   538,   139,   540,   139,   139,   139,   149,
+     139,   141,    99,   141,   134,   854,  1122,   104,   134,   554,
+     605,   143,   141,   135,   595,   560,   432,   433,   141,  1030,
+     144,   747,   438,   439,   440,   122,   605,   136,   144,    43,
+     127,   745,   134,   134,   134,   132,     3,   143,   143,   765,
+     144,   139,   607,   459,   141,   142,   141,   141,   141,  1065,
+     139,   139,   138,   137,   134,    83,   595,   139,   527,   134,
+     605,   683,     3,     4,     5,     6,     7,     8,     9,   141,
+     134,   693,   143,    25,   669,   144,   139,   134,   134,   139,
+     675,   136,   144,   144,   138,   278,   139,   280,   683,     3,
+       4,     5,     6,     7,     8,     9,   945,   141,   693,    36,
+     139,  1112,   287,   141,   134,   141,   141,    48,   141,   605,
+     141,   527,   141,   134,   141,   141,   141,   141,   141,   139,
+     139,  1021,  1116,   141,   669,    83,   595,  1143,   144,   144,
+     675,   137,   141,   134,   139,  1021,    13,    13,   683,   134,
+      46,   141,   144,   141,   739,   740,   741,    37,   693,   141,
+    1161,   141,  1146,   141,  1148,   141,  1172,   141,   141,   135,
+       5,   918,   918,   926,  1026,   918,   705,   806,  1179,   918,
+    1164,   815,   918,  1072,   617,   765,   918,   618,   877,   595,
+     365,     5,   279,   918,   281,   282,   725,   284,  1088,   997,
+     531,   821,  1035,  1096,   739,   740,   741,   918,   348,   502,
+     863,   351,  1088,  1089,   501,  1169,   859,   400,   688,   402,
+     605,   425,   425,   918,   729,   754,   755,   756,   959,   874,
+     559,   941,   820,    -1,   321,    -1,   821,    -1,    -1,    -1,
+      -1,   416,    -1,   426,    -1,   125,  1122,   127,   128,    24,
+      25,    26,   823,    -1,    -1,    -1,    31,    32,    33,    34,
+      35,    36,    37,    38,    39,    -1,    41,    -1,    -1,   452,
+     453,    -1,   359,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,   882,    -1,    -1,    -1,    -1,   821,    -1,   375,   429,
+      -1,    -1,   432,   433,   823,    -1,    -1,   882,   438,   439,
+     440,    -1,    -1,    -1,    -1,    -1,    -1,   447,    -1,   449,
+      -1,    -1,    -1,    -1,   401,    -1,   403,    -1,  1022,   459,
+     407,  1022,    -1,  1022,    -1,  1022,    -1,   502,    -1,  1045,
+     417,   418,   419,   918,  1013,   920,   918,    -1,   867,   924,
+      -1,   926,    -1,    -1,   873,   874,   875,   882,    -1,   918,
+      -1,    -1,   918,    -1,    -1,  1015,   918,    -1,   445,   918,
+      -1,   544,    -1,   918,   823,    -1,    -1,    -1,     5,    -1,
+      -1,  1075,    -1,    -1,  1075,    -1,    -1,    -1,  1075,  1083,
+      -1,  1142,  1083,   918,    -1,   920,  1083,   527,   563,   924,
+    1151,   926,    -1,  1072,    -1,    -1,    -1,   537,    35,   539,
+     929,    -1,    -1,    -1,   284,   285,    -1,    -1,    -1,  1021,
+      -1,  1022,   997,  1073,   999,  1022,   503,   823,    -1,  1123,
+      -1,    -1,  1123,   510,    -1,    -1,  1123,    -1,  1013,   569,
+     959,  1013,   918,   999,   521,    -1,  1021,  1022,   997,    -1,
+      -1,  1026,   997,   618,    -1,   325,   326,  1013,    -1,    -1,
+      -1,  1013,    -1,  1022,  1013,   595,  1022,    -1,  1013,  1030,
+     946,   548,   997,  1022,   999,    -1,   553,    -1,    -1,    -1,
+     350,    -1,  1083,   110,   124,    -1,  1088,    -1,  1013,    -1,
+      -1,   118,   622,   623,    -1,    -1,  1021,  1022,  1089,    -1,
+    1075,  1026,    -1,    -1,    -1,    -1,  1081,    -1,  1083,  1081,
+    1029,  1030,    -1,  1088,  1089,   385,   386,   387,    -1,    -1,
+    1039,   997,  1123,  1072,    -1,  1081,  1075,  1083,  1047,  1081,
+      -1,  1122,  1081,    -1,  1083,    -1,  1081,  1013,    -1,    -1,
+      -1,  1116,    -1,    -1,    -1,     5,    -1,  1122,  1123,   626,
+    1075,  1112,    -1,    -1,     5,    -1,  1081,    -1,  1083,   131,
+    1116,    -1,    -1,  1088,  1089,    -1,    -1,  1123,    -1,    -1,
+    1119,  1146,    -1,  1148,  1123,   705,    -1,    37,   448,    -1,
+     710,  1030,    -1,    -1,  1103,    -1,    37,  1106,    -1,  1164,
+    1146,  1116,  1148,  1112,    -1,   725,  1072,  1122,  1123,  1160,
+    1161,    -1,    -1,   680,    -1,  1081,    -1,  1168,  1164,    -1,
+      -1,    -1,    -1,    -1,    -1,   692,    -1,  1136,  1179,    -1,
+      -1,  1146,    -1,  1148,   754,   755,   756,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,  1030,   123,    -1,    -1,   126,  1164,
+      -1,  1160,  1161,   513,   514,   810,    -1,    -1,    -1,  1168,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,   822,    -1,   529,
+    1179,    -1,    -1,  1112,    -1,    -1,   126,    -1,   538,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,   327,    -1,    -1,
+      -1,   331,   332,   823,   334,   335,   336,   337,   338,   339,
+     340,   341,   342,   343,   344,   345,   346,   347,    -1,    -1,
+     350,  1160,  1161,   886,   286,    -1,  1112,    -1,    -1,  1168,
+      -1,    -1,    -1,    -1,   801,    -1,    -1,    -1,    -1,    -1,
+    1179,    -1,    -1,    -1,    -1,   362,   813,   867,    -1,    -1,
+      -1,    -1,    -1,   873,   874,   875,    -1,   824,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,   631,    -1,    -1,  1160,  1161,    -1,    -1,    -1,    -1,
+      -1,    -1,  1168,    -1,    -1,    -1,   348,    -1,    -1,   351,
+      -1,    -1,    -1,  1179,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,   364,    -1,    -1,   283,    -1,    -1,   971,   929,
+      -1,    -1,    -1,    -1,   376,    -1,    -1,   884,   448,   269,
+      -1,   681,   682,    -1,    -1,    -1,    -1,    -1,   269,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,   959,
+      -1,    -1,   702,    -1,    -1,   323,   324,    -1,   478,   479,
+     480,   468,    -1,   483,    -1,   922,   976,   307,   308,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,   307,   308,  1023,    -1,
+    1025,    -1,  1027,   940,   324,    -1,    -1,    -1,    -1,    -1,
+      -1,   511,    -1,   743,   744,   447,   516,   449,    31,    32,
+      33,    34,    35,    36,    37,    38,    39,    40,    41,   529,
+      -1,    -1,    -1,   353,    -1,   355,    -1,    50,   538,  1029,
+    1030,    -1,   353,    -1,   355,    -1,    -1,    -1,    -1,  1039,
+      -1,  1076,    -1,    -1,    -1,    -1,    -1,  1047,   406,    -1,
+      -1,    -1,  1052,    -1,    -1,   413,    -1,  1100,    -1,    -1,
+      -1,    -1,    -1,    86,    87,    88,    89,    90,    91,    92,
+      93,    94,    95,    96,    97,    98,    99,   100,    -1,    -1,
+      -1,   523,    -1,    -1,    -1,    -1,    -1,    -1,  1131,    -1,
+      -1,    -1,    -1,    -1,    -1,   537,    -1,   539,    -1,   457,
+      -1,    -1,    -1,  1103,    -1,    -1,  1106,    -1,    -1,    -1,
+      -1,   469,  1112,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+     450,    -1,    -1,  1070,    -1,    -1,    -1,   569,    -1,   450,
+     460,    -1,  1132,    -1,    -1,    -1,  1136,    -1,    -1,  1086,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,  1098,  1152,    -1,    -1,  1102,    -1,  1104,    -1,   656,
+    1160,  1161,    -1,    -1,    -1,    -1,   524,    -1,  1168,    -1,
+    1117,    -1,    -1,   683,  1121,    -1,    -1,    -1,  1125,  1179,
+     622,   623,    12,   693,    -1,    -1,    -1,    -1,  1135,    -1,
+      -1,    -1,   522,    -1,    -1,   525,   554,    -1,    -1,    -1,
+    1147,   522,   560,    -1,   525,    -1,    -1,    -1,    -1,    -1,
+     540,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,   540,
+      -1,    -1,    -1,    53,    54,    -1,    -1,    -1,    -1,    -1,
+      60,    61,    62,    63,    64,    65,    66,    67,    68,    69,
+      70,    71,    72,    73,    74,    75,    76,    77,    78,    79,
+      80,    81,    82,    -1,    -1,    -1,    -1,    -1,    -1,   999,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,   710,    -1,
+      -1,    -1,    -1,   770,    -1,   605,    -1,    -1,    -1,    -1,
+      -1,   778,    -1,    -1,    -1,   782,   783,    -1,    -1,   786,
+     787,   788,   789,   790,   791,   792,   793,   794,   795,    -1,
+      -1,    -1,    -1,   745,    -1,   815,    -1,   137,    -1,   139,
+     140,    -1,   142,   143,    -1,   145,   146,   147,   148,   149,
+     150,   151,   152,   153,   154,   155,    -1,    -1,    -1,    -1,
+      -1,   828,    -1,    -1,    -1,   693,    -1,    -1,    -1,   669,
+      -1,    -1,    -1,    -1,    -1,   675,    -1,    -1,   669,    -1,
+       3,     4,     5,     6,     7,     8,     9,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    20,    21,    22,
+      23,    24,    25,    26,    27,    -1,  1116,    -1,    31,    32,
+      33,    34,    35,    36,    37,    38,    39,    40,    41,    42,
+      43,    -1,    -1,    -1,    -1,    48,    49,    50,    -1,    52,
+      -1,    -1,    -1,    -1,   901,    -1,  1146,   904,  1148,   739,
+     740,   741,    -1,    -1,    -1,    -1,    -1,    -1,   739,   740,
+     741,   918,    -1,    -1,  1164,    -1,    -1,    -1,    -1,    -1,
+      -1,    84,    -1,    86,    87,    88,    89,    90,    91,    92,
+      93,    94,    95,    96,    97,    98,    99,   100,   101,   102,
+     103,   104,   105,   106,   107,   108,   109,   110,   111,   112,
+     113,   114,   115,   116,   117,   118,   119,   120,   121,   122,
+     123,   124,   125,   126,   127,   128,   129,   130,   131,   132,
+     133,   134,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,   821,   145,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+     821,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,     3,     4,     5,     6,     7,     8,
+       9,    -1,    -1,    -1,   882,    -1,    -1,    -1,    -1,    -1,
+      -1,    20,    21,    22,   976,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,     3,     4,     5,     6,     7,     8,
+       9,    -1,    -1,    42,    43,    -1,    -1,    -1,    -1,    48,
+     918,    20,    21,    22,    23,    -1,    -1,    -1,    -1,    28,
+      29,    30,    -1,    -1,    -1,    -1,    -1,    36,    -1,    -1,
+    1022,    -1,    -1,    -1,    -1,    -1,    -1,    -1,   918,    48,
+     920,    -1,    -1,    -1,   924,    -1,   926,   918,    -1,   920,
+    1042,    -1,    -1,   924,    -1,    -1,  1116,    -1,    -1,    -1,
+    1052,    -1,   101,   102,   103,   104,   105,   106,   107,   108,
+     109,   110,   111,    -1,    -1,    84,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,  1075,    -1,    -1,  1146,    -1,  1148,   997,
+      -1,  1083,   101,   102,   103,   104,   105,   106,   107,   108,
+     109,   110,   111,    -1,  1164,  1013,   145,    -1,    -1,    -1,
+      -1,    -1,    -1,  1021,  1022,    -1,    -1,    -1,    -1,   999,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,  1119,   999,    -1,
+      -1,  1123,    -1,    -1,    -1,    -1,   145,    -1,    -1,    -1,
+    1132,    -1,    -1,    -1,    -1,    -1,  1026,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+    1152,    -1,    -1,    -1,    -1,    -1,    -1,  1075,    -1,    -1,
+      -1,    -1,    -1,  1081,    -1,  1083,    -1,    -1,    -1,    -1,
+    1088,  1089,    -1,    -1,    -1,     3,     4,     5,     6,     7,
+       8,     9,    10,    11,    12,    13,    14,    15,    16,    17,
+      18,    19,    20,    21,    22,    23,    24,    25,    26,    27,
+      28,    29,    30,    31,  1122,  1123,    34,    35,    36,    37,
+      38,    39,    -1,    41,    42,    43,    44,    -1,    -1,    47,
+      48,    49,    50,    51,    52,    53,    54,    55,    56,    57,
+      58,    59,    60,    61,    62,    63,    64,    65,    66,    67,
+      68,    69,    70,    71,    72,    73,    74,    75,    76,    77,
+      78,    79,    80,    81,    82,    83,    84,    85,    86,    87,
+      88,    89,    90,    91,    92,    93,    94,    95,    96,    97,
+      98,    99,   100,   101,   102,   103,   104,   105,   106,   107,
+     108,   109,   110,   111,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,   134,   135,    -1,   137,
+     138,   139,   140,    -1,   142,   143,   144,   145,   146,   147,
+     148,   149,   150,   151,   152,   153,   154,   155,   156,     3,
+       4,     5,     6,     7,     8,     9,    10,    11,    12,    13,
+      14,    15,    16,    17,    18,    19,    20,    21,    22,    23,
+      24,    25,    26,    27,    28,    29,    30,    31,    -1,    -1,
+      34,    35,    36,    37,    38,    39,    -1,    41,    42,    43,
+      44,    -1,    -1,    47,    48,    49,    50,    51,    52,    53,
+      54,    55,    56,    57,    58,    59,    60,    61,    62,    63,
+      64,    65,    66,    67,    68,    69,    70,    71,    72,    73,
+      74,    75,    76,    77,    78,    79,    80,    81,    82,    83,
+      84,    85,    86,    87,    88,    89,    90,    91,    92,    93,
+      94,    95,    96,    97,    98,    99,   100,   101,   102,   103,
+     104,   105,   106,   107,   108,   109,   110,   111,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,   120,   121,    -1,   123,   124,   125,
-     126,   127,   128,   129,   130,    -1,   132,   133,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,     3,     4,     5,
+     134,   135,   136,   137,   138,   139,   140,    -1,   142,   143,
+      -1,   145,   146,   147,   148,   149,   150,   151,   152,   153,
+     154,   155,   156,     3,     4,     5,     6,     7,     8,     9,
+      10,    11,    12,    13,    14,    15,    16,    17,    18,    19,
+      20,    21,    22,    23,    24,    25,    26,    27,    28,    29,
+      30,    31,    -1,    -1,    34,    35,    36,    37,    38,    39,
+      -1,    41,    42,    43,    44,    -1,    -1,    47,    48,    49,
+      50,    51,    52,    53,    54,    55,    56,    57,    58,    59,
+      60,    61,    62,    63,    64,    65,    66,    67,    68,    69,
+      70,    71,    72,    73,    74,    75,    76,    77,    78,    79,
+      80,    81,    82,    83,    84,    85,    86,    87,    88,    89,
+      90,    91,    92,    93,    94,    95,    96,    97,    98,    99,
+     100,   101,   102,   103,   104,   105,   106,   107,   108,   109,
+     110,   111,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,   134,   135,    -1,   137,   138,   139,
+     140,    -1,   142,   143,   144,   145,   146,   147,   148,   149,
+     150,   151,   152,   153,   154,   155,   156,     3,     4,     5,
        6,     7,     8,     9,    10,    11,    12,    13,    14,    15,
       16,    17,    18,    19,    20,    21,    22,    23,    24,    25,
-      26,    -1,    -1,    29,    30,    -1,    32,    -1,    34,    35,
-      36,    37,    38,    39,    40,    41,    42,    43,    44,    45,
-      46,    47,    48,    49,    50,    51,    52,    53,    54,    55,
+      26,    27,    28,    29,    30,    31,    -1,    -1,    34,    35,
+      36,    37,    38,    39,    -1,    41,    42,    43,    44,    -1,
+      -1,    47,    48,    49,    50,    51,    52,    53,    54,    55,
       56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
       66,    67,    68,    69,    70,    71,    72,    73,    74,    75,
       76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
       86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      96,    97,    98,    99,   100,   101,   102,   103,   104,   105,
+     106,   107,   108,   109,   110,   111,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,   134,   135,
+      -1,   137,   138,   139,   140,   141,   142,   143,    -1,   145,
+     146,   147,   148,   149,   150,   151,   152,   153,   154,   155,
+     156,     3,     4,     5,     6,     7,     8,     9,    10,    11,
+      12,    13,    14,    15,    16,    17,    18,    19,    20,    21,
+      22,    23,    24,    25,    26,    27,    28,    29,    30,    31,
+      -1,    -1,    34,    35,    36,    37,    38,    39,    -1,    41,
+      42,    43,    44,    -1,    -1,    47,    48,    49,    50,    51,
+      52,    53,    54,    55,    56,    57,    58,    59,    60,    61,
+      62,    63,    64,    65,    66,    67,    68,    69,    70,    71,
+      72,    73,    74,    75,    76,    77,    78,    79,    80,    81,
+      82,    83,    84,    85,    86,    87,    88,    89,    90,    91,
+      92,    93,    94,    95,    96,    97,    98,    99,   100,   101,
+     102,   103,   104,   105,   106,   107,   108,   109,   110,   111,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,   134,   135,   136,   137,   138,   139,   140,    -1,
+     142,   143,    -1,   145,   146,   147,   148,   149,   150,   151,
+     152,   153,   154,   155,   156,     3,     4,     5,     6,     7,
+       8,     9,    10,    11,    12,    13,    14,    15,    16,    17,
+      18,    19,    20,    21,    22,    23,    24,    25,    26,    27,
+      28,    29,    30,    31,    -1,    -1,    34,    35,    36,    37,
+      38,    39,    -1,    41,    42,    43,    44,    -1,    -1,    47,
+      48,    49,    50,    51,    52,    53,    54,    55,    56,    57,
+      58,    59,    60,    61,    62,    63,    64,    65,    66,    67,
+      68,    69,    70,    71,    72,    73,    74,    75,    76,    77,
+      78,    79,    80,    81,    82,    83,    84,    85,    86,    87,
+      88,    89,    90,    91,    92,    93,    94,    95,    96,    97,
+      98,    99,   100,   101,   102,   103,   104,   105,   106,   107,
+     108,   109,   110,   111,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,   134,   135,   136,   137,
+     138,   139,   140,    -1,   142,   143,    -1,   145,   146,   147,
+     148,   149,   150,   151,   152,   153,   154,   155,   156,     3,
+       4,     5,     6,     7,     8,     9,    10,    11,    12,    13,
+      14,    15,    16,    17,    18,    19,    20,    21,    22,    23,
+      24,    25,    26,    27,    28,    29,    30,    31,    -1,    -1,
+      34,    35,    36,    37,    38,    39,    -1,    41,    42,    43,
+      44,    -1,    -1,    47,    48,    49,    50,    51,    52,    53,
+      54,    55,    56,    57,    58,    59,    60,    61,    62,    63,
+      64,    65,    66,    67,    68,    69,    70,    71,    72,    73,
+      74,    75,    76,    77,    78,    79,    80,    81,    82,    83,
+      84,    85,    86,    87,    88,    89,    90,    91,    92,    93,
+      94,    95,    96,    97,    98,    99,   100,   101,   102,   103,
+     104,   105,   106,   107,   108,   109,   110,   111,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+     134,   135,   136,   137,   138,   139,   140,    -1,   142,   143,
+      -1,   145,   146,   147,   148,   149,   150,   151,   152,   153,
+     154,   155,   156,     3,     4,     5,     6,     7,     8,     9,
+      10,    11,    12,    13,    14,    15,    16,    17,    18,    19,
+      20,    21,    22,    23,    24,    25,    26,    27,    28,    29,
+      30,    31,    -1,    -1,    34,    35,    36,    37,    38,    39,
+      -1,    41,    42,    43,    44,    -1,    -1,    47,    48,    49,
+      50,    51,    52,    53,    54,    55,    56,    57,    58,    59,
+      60,    61,    62,    63,    64,    65,    66,    67,    68,    69,
+      70,    71,    72,    73,    74,    75,    76,    77,    78,    79,
+      80,    81,    82,    83,    84,    85,    86,    87,    88,    89,
+      90,    91,    92,    93,    94,    95,    96,    97,    98,    99,
+     100,   101,   102,   103,   104,   105,   106,   107,   108,   109,
+     110,   111,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,   120,   121,   122,   123,   124,   125,
-     126,    -1,   128,   129,   130,    -1,   132,   133,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,     3,     4,     5,
+      -1,    -1,    -1,    -1,   134,   135,   136,   137,   138,   139,
+     140,    -1,   142,   143,    -1,   145,   146,   147,   148,   149,
+     150,   151,   152,   153,   154,   155,   156,     3,     4,     5,
        6,     7,     8,     9,    10,    11,    12,    13,    14,    15,
       16,    17,    18,    19,    20,    21,    22,    23,    24,    25,
-      26,    -1,    -1,    29,    30,    -1,    32,    -1,    34,    35,
-      36,    37,    38,    39,    40,    41,    42,    43,    44,    45,
-      46,    47,    48,    49,    50,    51,    52,    53,    54,    55,
+      26,    27,    28,    29,    30,    31,    -1,    -1,    34,    35,
+      36,    37,    38,    39,    -1,    41,    42,    43,    44,    -1,
+      -1,    47,    48,    49,    50,    51,    52,    53,    54,    55,
       56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
       66,    67,    68,    69,    70,    71,    72,    73,    74,    75,
       76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
       86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      96,    97,    98,    99,   100,   101,   102,   103,   104,   105,
+     106,   107,   108,   109,   110,   111,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,   134,   135,
+     136,   137,   138,   139,   140,    -1,   142,   143,    -1,   145,
+     146,   147,   148,   149,   150,   151,   152,   153,   154,   155,
+     156,     3,     4,     5,     6,     7,     8,     9,    10,    11,
+      12,    13,    14,    15,    16,    17,    18,    19,    20,    21,
+      22,    23,    24,    25,    26,    27,    28,    29,    30,    31,
+      -1,    -1,    34,    35,    36,    37,    38,    39,    -1,    41,
+      42,    43,    44,    -1,    -1,    47,    48,    49,    50,    51,
+      52,    53,    54,    55,    56,    57,    58,    59,    60,    61,
+      62,    63,    64,    65,    66,    67,    68,    69,    70,    71,
+      72,    73,    74,    75,    76,    77,    78,    79,    80,    81,
+      82,    83,    84,    85,    86,    87,    88,    89,    90,    91,
+      92,    93,    94,    95,    96,    97,    98,    99,   100,   101,
+     102,   103,   104,   105,   106,   107,   108,   109,   110,   111,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,   134,   135,    -1,   137,   138,   139,   140,    -1,
+     142,   143,    -1,   145,   146,   147,   148,   149,   150,   151,
+     152,   153,   154,   155,   156,     3,     4,     5,     6,     7,
+       8,     9,    10,    11,    12,    13,    14,    15,    16,    17,
+      18,    19,    20,    21,    22,    23,    24,    25,    26,    27,
+      28,    29,    30,    31,    -1,    -1,    34,    35,    36,    37,
+      38,    39,    -1,    41,    42,    43,    44,    -1,    -1,    47,
+      48,    49,    50,    51,    52,    53,    54,    55,    56,    57,
+      58,    59,    60,    61,    62,    63,    64,    65,    66,    67,
+      68,    69,    70,    71,    72,    73,    74,    75,    76,    77,
+      78,    79,    80,    81,    82,    83,    84,    85,    86,    87,
+      88,    89,    90,    91,    92,    93,    94,    95,    96,    97,
+      98,    99,   100,   101,   102,   103,   104,   105,   106,   107,
+     108,   109,   110,   111,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,   134,   135,    -1,   137,
+     138,   139,   140,    -1,   142,   143,    -1,   145,   146,   147,
+     148,   149,   150,   151,   152,   153,   154,   155,   156,     3,
+       4,     5,     6,     7,     8,     9,    10,    11,    12,    13,
+      14,    15,    16,    17,    18,    19,    20,    21,    22,    23,
+      24,    25,    26,    27,    28,    29,    30,    31,    -1,    -1,
+      34,    35,    36,    37,    38,    39,    -1,    41,    42,    43,
+      44,    -1,    -1,    47,    48,    49,    50,    51,    52,    53,
+      54,    55,    56,    57,    58,    59,    60,    61,    62,    63,
+      64,    65,    66,    67,    68,    69,    70,    71,    72,    73,
+      74,    75,    76,    77,    78,    79,    80,    81,    82,    83,
+      84,    85,    86,    87,    88,    89,    90,    91,    92,    93,
+      94,    95,    96,    97,    98,    99,   100,   101,   102,   103,
+     104,   105,   106,   107,   108,   109,   110,   111,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,   120,   121,    -1,   123,   124,   125,
-     126,    -1,   128,   129,   130,    -1,   132,   133,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,     3,     4,     5,
+      -1,   135,    -1,   137,   138,   139,   140,   141,   142,   143,
+      -1,   145,   146,   147,   148,   149,   150,   151,   152,   153,
+     154,   155,   156,     3,     4,     5,     6,     7,     8,     9,
+      10,    11,    12,    13,    14,    15,    16,    17,    18,    19,
+      20,    21,    22,    23,    24,    25,    26,    27,    28,    29,
+      30,    31,    -1,    -1,    34,    35,    36,    37,    38,    39,
+      -1,    41,    42,    43,    44,    -1,    -1,    47,    48,    49,
+      50,    51,    52,    53,    54,    55,    56,    57,    58,    59,
+      60,    61,    62,    63,    64,    65,    66,    67,    68,    69,
+      70,    71,    72,    73,    74,    75,    76,    77,    78,    79,
+      80,    81,    82,    83,    84,    85,    86,    87,    88,    89,
+      90,    91,    92,    93,    94,    95,    96,    97,    98,    99,
+     100,   101,   102,   103,   104,   105,   106,   107,   108,   109,
+     110,   111,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,   134,   135,    -1,   137,   138,   139,
+     140,    -1,   142,   143,    -1,   145,   146,   147,   148,   149,
+     150,   151,   152,   153,   154,   155,   156,     3,     4,     5,
        6,     7,     8,     9,    10,    11,    12,    13,    14,    15,
       16,    17,    18,    19,    20,    21,    22,    23,    24,    25,
-      26,    -1,    -1,    29,    30,    -1,    32,    -1,    34,    35,
-      36,    37,    38,    39,    40,    41,    42,    43,    44,    45,
-      46,    47,    48,    49,    50,    51,    52,    53,    54,    55,
+      26,    27,    28,    29,    30,    31,    -1,    -1,    34,    35,
+      36,    37,    38,    39,    -1,    41,    42,    43,    44,    -1,
+      -1,    47,    48,    49,    50,    51,    52,    53,    54,    55,
       56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
       66,    67,    68,    69,    70,    71,    72,    73,    74,    75,
       76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
       86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      96,    97,    98,    99,   100,   101,   102,   103,   104,   105,
+     106,   107,   108,   109,   110,   111,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,   134,   135,
+      -1,   137,   138,   139,   140,    -1,   142,   143,    -1,   145,
+     146,   147,   148,   149,   150,   151,   152,   153,   154,   155,
+     156,     3,     4,     5,     6,     7,     8,     9,    10,    11,
+      12,    13,    14,    15,    16,    17,    18,    19,    20,    21,
+      22,    23,    24,    25,    26,    27,    28,    29,    30,    31,
+      -1,    -1,    34,    35,    36,    37,    38,    39,    -1,    41,
+      42,    43,    44,    -1,    -1,    47,    48,    49,    50,    51,
+      52,    53,    54,    55,    56,    57,    58,    59,    60,    61,
+      62,    63,    64,    65,    66,    67,    68,    69,    70,    71,
+      72,    73,    74,    75,    76,    77,    78,    79,    80,    81,
+      82,    83,    84,    85,    86,    87,    88,    89,    90,    91,
+      92,    93,    94,    95,    96,    97,    98,    99,   100,   101,
+     102,   103,   104,   105,   106,   107,   108,   109,   110,   111,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,   135,    -1,   137,   138,   139,   140,    -1,
+     142,   143,    -1,   145,   146,   147,   148,   149,   150,   151,
+     152,   153,   154,   155,   156,     3,     4,     5,     6,     7,
+       8,     9,    10,    11,    12,    13,    14,    15,    16,    17,
+      18,    19,    20,    21,    22,    23,    24,    25,    26,    27,
+      28,    29,    30,    31,    -1,    -1,    34,    35,    36,    37,
+      38,    39,    -1,    41,    42,    43,    44,    -1,    -1,    47,
+      48,    49,    50,    51,    52,    53,    54,    55,    56,    57,
+      58,    59,    60,    61,    62,    63,    64,    65,    66,    67,
+      68,    69,    70,    71,    72,    73,    74,    75,    76,    77,
+      78,    79,    80,    81,    82,    -1,    84,    -1,    86,    87,
+      88,    89,    90,    91,    92,    93,    94,    95,    96,    97,
+      98,    99,   100,   101,   102,   103,   104,   105,   106,   107,
+     108,   109,   110,   111,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,   134,   135,   136,   137,
+     138,   139,   140,    -1,   142,   143,    -1,   145,   146,   147,
+     148,   149,   150,   151,   152,   153,   154,   155,   156,     3,
+       4,     5,     6,     7,     8,     9,    10,    11,    12,    13,
+      14,    15,    16,    17,    18,    19,    20,    21,    22,    23,
+      24,    25,    26,    27,    28,    29,    30,    31,    -1,    -1,
+      34,    35,    36,    37,    38,    39,    -1,    41,    42,    43,
+      44,    -1,    -1,    47,    48,    49,    50,    51,    52,    53,
+      54,    55,    56,    57,    58,    59,    60,    61,    62,    63,
+      64,    65,    66,    67,    68,    69,    70,    71,    72,    73,
+      74,    75,    76,    77,    78,    79,    80,    81,    82,    83,
+      84,    85,    86,    87,    88,    89,    90,    91,    92,    93,
+      94,    95,    96,    97,    98,    99,   100,   101,   102,   103,
+     104,   105,   106,   107,   108,   109,   110,   111,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,   121,    -1,   123,   124,   125,
-     126,   127,   128,   129,   130,    -1,   132,   133,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,     3,     4,     5,
+      -1,   135,    -1,   137,   138,   139,   140,    -1,   142,   143,
+      -1,   145,   146,   147,   148,   149,   150,   151,   152,   153,
+     154,   155,   156,     3,     4,     5,     6,     7,     8,     9,
+      10,    11,    12,    13,    14,    15,    16,    17,    18,    19,
+      20,    21,    22,    23,    24,    25,    26,    27,    28,    29,
+      30,    31,    -1,    -1,    34,    35,    36,    37,    38,    39,
+      -1,    41,    42,    43,    44,    -1,    -1,    47,    48,    49,
+      50,    51,    52,    53,    54,    55,    56,    57,    58,    59,
+      60,    61,    62,    63,    64,    65,    66,    67,    68,    69,
+      70,    71,    72,    73,    74,    75,    76,    77,    78,    79,
+      80,    81,    82,    83,    84,    85,    86,    87,    88,    89,
+      90,    91,    92,    93,    94,    95,    96,    97,    98,    99,
+     100,   101,   102,   103,   104,   105,   106,   107,   108,   109,
+     110,   111,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,   135,    -1,   137,   138,   139,
+     140,    -1,    -1,   143,    -1,   145,   146,   147,   148,   149,
+     150,   151,   152,   153,   154,   155,   156,     3,     4,     5,
        6,     7,     8,     9,    10,    11,    12,    13,    14,    15,
       16,    17,    18,    19,    20,    21,    22,    23,    24,    25,
-      26,    -1,    -1,    29,    30,    -1,    32,    -1,    34,    35,
-      36,    37,    38,    39,    40,    41,    42,    43,    44,    45,
-      46,    47,    48,    49,    50,    51,    52,    53,    54,    55,
+      26,    27,    28,    29,    30,    31,    -1,    -1,    34,    35,
+      36,    37,    38,    39,    -1,    41,    42,    43,    44,    -1,
+      -1,    47,    48,    49,    50,    51,    52,    53,    54,    55,
       56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
       66,    67,    68,    69,    70,    71,    72,    73,    74,    75,
-      76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
+      76,    77,    78,    79,    80,    81,    82,    -1,    84,    -1,
       86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      96,    97,    98,    99,   100,   101,   102,   103,   104,   105,
+     106,   107,   108,   109,   110,   111,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,   135,
+      -1,   137,   138,   139,   140,   141,   142,   143,    -1,   145,
+     146,   147,   148,   149,   150,   151,   152,   153,   154,   155,
+     156,     3,     4,     5,     6,     7,     8,     9,    10,    11,
+      12,    13,    14,    15,    16,    17,    18,    19,    20,    21,
+      22,    23,    24,    25,    26,    27,    28,    29,    30,    31,
+      -1,    -1,    34,    35,    36,    37,    38,    39,    -1,    41,
+      42,    43,    44,    -1,    -1,    47,    48,    49,    50,    51,
+      52,    53,    54,    55,    56,    57,    58,    59,    60,    61,
+      62,    63,    64,    65,    66,    67,    68,    69,    70,    71,
+      72,    73,    74,    75,    76,    77,    78,    79,    80,    81,
+      82,    -1,    84,    -1,    86,    87,    88,    89,    90,    91,
+      92,    93,    94,    95,    96,    97,    98,    99,   100,   101,
+     102,   103,   104,   105,   106,   107,   108,   109,   110,   111,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,   135,    -1,   137,   138,   139,   140,   141,
+     142,   143,    -1,   145,   146,   147,   148,   149,   150,   151,
+     152,   153,   154,   155,   156,     3,     4,     5,     6,     7,
+       8,     9,    10,    11,    12,    13,    14,    15,    16,    17,
+      18,    19,    20,    21,    22,    23,    24,    25,    26,    27,
+      28,    29,    30,    31,    -1,    -1,    34,    35,    36,    37,
+      38,    39,    -1,    41,    42,    43,    44,    -1,    -1,    47,
+      48,    49,    50,    51,    52,    53,    54,    55,    56,    57,
+      58,    59,    60,    61,    62,    63,    64,    65,    66,    67,
+      68,    69,    70,    71,    72,    73,    74,    75,    76,    77,
+      78,    79,    80,    81,    82,    -1,    84,    -1,    86,    87,
+      88,    89,    90,    91,    92,    93,    94,    95,    96,    97,
+      98,    99,   100,   101,   102,   103,   104,   105,   106,   107,
+     108,   109,   110,   111,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,   135,    -1,   137,
+     138,   139,   140,   141,   142,   143,    -1,   145,   146,   147,
+     148,   149,   150,   151,   152,   153,   154,   155,   156,     3,
+       4,     5,     6,     7,     8,     9,    10,    11,    12,    13,
+      14,    15,    16,    17,    18,    19,    20,    21,    22,    23,
+      24,    25,    26,    27,    28,    29,    30,    31,    -1,    -1,
+      34,    35,    36,    37,    38,    39,    -1,    41,    42,    43,
+      44,    -1,    -1,    47,    48,    49,    50,    51,    52,    53,
+      54,    55,    56,    57,    58,    59,    60,    61,    62,    63,
+      64,    65,    66,    67,    68,    69,    70,    71,    72,    73,
+      74,    75,    76,    77,    78,    79,    80,    81,    82,    -1,
+      84,    -1,    86,    87,    88,    89,    90,    91,    92,    93,
+      94,    95,    96,    97,    98,    99,   100,   101,   102,   103,
+     104,   105,   106,   107,   108,   109,   110,   111,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,   120,   121,    -1,   123,   124,   125,
-     126,    -1,   128,   129,   130,    -1,   132,   133,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,     3,     4,     5,
+      -1,   135,    -1,   137,   138,   139,   140,    -1,   142,   143,
+     144,   145,   146,   147,   148,   149,   150,   151,   152,   153,
+     154,   155,   156,     3,     4,     5,     6,     7,     8,     9,
+      10,    11,    12,    13,    14,    15,    16,    17,    18,    19,
+      20,    21,    22,    23,    24,    25,    26,    27,    28,    29,
+      30,    31,    -1,    -1,    34,    35,    36,    37,    38,    39,
+      -1,    41,    42,    43,    44,    -1,    -1,    47,    48,    49,
+      50,    51,    52,    53,    54,    55,    56,    57,    58,    59,
+      60,    61,    62,    63,    64,    65,    66,    67,    68,    69,
+      70,    71,    72,    73,    74,    75,    76,    77,    78,    79,
+      80,    81,    82,    -1,    84,    -1,    86,    87,    88,    89,
+      90,    91,    92,    93,    94,    95,    96,    97,    98,    99,
+     100,   101,   102,   103,   104,   105,   106,   107,   108,   109,
+     110,   111,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,   135,    -1,   137,   138,   139,
+     140,    -1,   142,   143,   144,   145,   146,   147,   148,   149,
+     150,   151,   152,   153,   154,   155,   156,     3,     4,     5,
        6,     7,     8,     9,    10,    11,    12,    13,    14,    15,
       16,    17,    18,    19,    20,    21,    22,    23,    24,    25,
-      26,    -1,    -1,    29,    30,    -1,    32,    -1,    34,    35,
-      36,    37,    38,    39,    40,    41,    42,    43,    44,    45,
-      46,    47,    48,    49,    50,    51,    52,    53,    54,    55,
+      26,    27,    28,    29,    30,    31,    -1,    -1,    34,    35,
+      36,    37,    38,    39,    -1,    41,    42,    43,    44,    -1,
+      -1,    47,    48,    49,    50,    51,    52,    53,    54,    55,
       56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
       66,    67,    68,    69,    70,    71,    72,    73,    74,    75,
-      76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
+      76,    77,    78,    79,    80,    81,    82,    -1,    84,    -1,
       86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      96,    97,    98,    99,   100,   101,   102,   103,   104,   105,
+     106,   107,   108,   109,   110,   111,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,   120,   121,    -1,   123,   124,   125,
-     126,    -1,   128,   129,   130,    -1,   132,   133,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,     3,     4,     5,
-       6,     7,     8,     9,    10,    11,    12,    13,    14,    15,
-      16,    17,    18,    19,    20,    21,    22,    23,    24,    25,
-      26,    -1,    -1,    29,    30,    -1,    32,    -1,    34,    35,
-      36,    37,    38,    39,    40,    41,    42,    43,    44,    45,
-      46,    47,    48,    49,    50,    51,    52,    53,    54,    55,
-      56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
-      66,    67,    68,    69,    70,    -1,    72,    -1,    74,    75,
-      76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
-      86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,   135,
+      -1,   137,   138,   139,   140,    -1,   142,   143,    -1,   145,
+     146,   147,   148,   149,   150,   151,   152,   153,   154,   155,
+     156,     3,     4,     5,     6,     7,     8,     9,    10,    11,
+      12,    13,    14,    15,    16,    17,    18,    19,    20,    21,
+      22,    23,    24,    25,    26,    27,    28,    29,    30,    31,
+      -1,    -1,    34,    35,    36,    37,    38,    39,    -1,    41,
+      42,    43,    44,    -1,    -1,    47,    48,    49,    50,    51,
+      52,    53,    54,    55,    56,    57,    58,    59,    60,    61,
+      62,    63,    64,    65,    66,    67,    68,    69,    70,    71,
+      72,    73,    74,    75,    76,    77,    78,    79,    80,    81,
+      82,    -1,    84,    -1,    86,    87,    88,    89,    90,    91,
+      92,    93,    94,    95,    96,    97,    98,    99,   100,   101,
+     102,   103,   104,   105,   106,   107,   108,   109,   110,   111,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,   120,   121,   122,   123,   124,   125,
-     126,    -1,   128,   129,   130,    -1,   132,   133,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,     3,     4,     5,
-       6,     7,     8,     9,    10,    11,    12,    13,    14,    15,
-      16,    17,    18,    19,    20,    21,    22,    23,    24,    25,
-      26,    -1,    -1,    29,    30,    -1,    32,    -1,    34,    35,
-      36,    37,    38,    39,    40,    41,    42,    43,    44,    45,
-      46,    47,    48,    49,    50,    51,    52,    53,    54,    55,
-      56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
-      66,    67,    68,    69,    70,    -1,    72,    -1,    74,    75,
-      76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
-      86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,   121,    -1,   123,   124,   125,
-     126,   127,   128,   129,   130,    -1,   132,   133,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,     3,     4,     5,
-       6,     7,     8,     9,    10,    11,    12,    13,    14,    15,
-      16,    17,    18,    19,    20,    21,    22,    23,    24,    25,
-      26,    -1,    -1,    29,    30,    -1,    32,    -1,    34,    35,
-      36,    37,    38,    39,    40,    41,    42,    43,    44,    45,
-      46,    47,    48,    49,    50,    51,    52,    53,    54,    55,
-      56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
-      66,    67,    68,    69,    70,    -1,    72,    -1,    74,    75,
-      76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
-      86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,   135,    -1,   137,   138,   139,   140,    -1,
+     142,   143,    -1,   145,   146,   147,   148,   149,   150,   151,
+     152,   153,   154,   155,   156,     3,     4,     5,     6,     7,
+       8,     9,    10,    11,    12,    13,    14,    15,    16,    17,
+      18,    19,    20,    21,    22,    23,    24,    25,    26,    27,
+      28,    29,    30,    31,    -1,    -1,    34,    35,    36,    37,
+      38,    39,    -1,    41,    42,    43,    44,    -1,    -1,    47,
+      48,    49,    50,    51,    52,    53,    54,    55,    56,    57,
+      58,    59,    60,    61,    62,    63,    64,    65,    66,    67,
+      68,    69,    70,    71,    72,    73,    74,    75,    76,    77,
+      78,    79,    80,    81,    82,    -1,    84,    -1,    86,    87,
+      88,    89,    90,    91,    92,    93,    94,    95,    96,    97,
+      98,    99,   100,   101,   102,   103,   104,   105,   106,   107,
+     108,   109,   110,   111,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,   121,    -1,   123,   124,   125,
-     126,   127,   128,   129,   130,    -1,   132,   133,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,     3,     4,     5,
-       6,     7,     8,     9,    10,    11,    12,    13,    14,    15,
-      16,    17,    18,    19,    20,    21,    22,    23,    24,    25,
-      26,    -1,    -1,    29,    30,    -1,    32,    -1,    34,    35,
-      36,    37,    38,    39,    40,    41,    42,    43,    44,    45,
-      46,    47,    48,    49,    50,    51,    52,    53,    54,    55,
-      56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
-      66,    67,    68,    69,    70,    -1,    72,    -1,    74,    75,
-      76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
-      86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,   135,    -1,   137,
+     138,   139,   140,    -1,   142,   143,    -1,   145,   146,   147,
+     148,   149,   150,   151,   152,   153,   154,   155,   156,     3,
+       4,     5,     6,     7,     8,     9,    10,    11,    12,    13,
+      14,    15,    16,    17,    18,    19,    20,    21,    22,    23,
+      24,    25,    26,    27,    28,    29,    30,    31,    -1,    -1,
+      34,    35,    36,    37,    38,    39,    -1,    41,    42,    43,
+      44,    -1,    -1,    47,    48,    49,    50,    51,    52,    53,
+      54,    55,    56,    57,    58,    59,    60,    61,    62,    63,
+      64,    65,    66,    67,    68,    69,    70,    71,    72,    73,
+      74,    75,    76,    77,    78,    79,    80,    81,    82,    -1,
+      84,    -1,    86,    87,    88,    89,    90,    91,    92,    93,
+      94,    95,    96,    97,    98,    99,   100,   101,   102,   103,
+     104,   105,   106,   107,   108,   109,   110,   111,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,   121,    -1,   123,   124,   125,
-     126,   127,   128,   129,   130,    -1,   132,   133,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,     3,     4,     5,
-       6,     7,     8,     9,    10,    11,    12,    13,    14,    15,
-      16,    17,    18,    19,    20,    21,    22,    23,    24,    25,
-      26,    -1,    -1,    29,    30,    -1,    32,    -1,    34,    35,
-      36,    37,    38,    39,    40,    41,    42,    43,    44,    45,
-      46,    47,    48,    49,    50,    51,    52,    53,    54,    55,
-      56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
-      66,    67,    68,    69,    70,    -1,    72,    -1,    74,    75,
-      76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
-      86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,   121,    -1,   123,   124,   125,
-     126,    -1,   128,   129,   130,   131,   132,   133,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,     3,     4,     5,
-       6,     7,     8,     9,    10,    11,    12,    13,    14,    15,
-      16,    17,    18,    19,    20,    21,    22,    23,    24,    25,
-      26,    -1,    -1,    29,    30,    -1,    32,    -1,    34,    35,
-      36,    37,    38,    39,    40,    41,    42,    43,    44,    45,
-      46,    47,    48,    49,    50,    51,    52,    53,    54,    55,
-      56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
-      66,    67,    68,    69,    70,    -1,    72,    -1,    74,    75,
-      76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
-      86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,   135,    -1,    -1,   138,    -1,   140,    -1,   142,   143,
+      -1,   145,   146,   147,   148,   149,   150,   151,   152,   153,
+     154,   155,   156,     3,     4,     5,     6,     7,     8,     9,
+      10,    11,    12,    13,    14,    15,    16,    17,    18,    19,
+      20,    21,    22,    23,    24,    25,    26,    27,    28,    29,
+      30,    31,    -1,    -1,    34,    35,    36,    37,    38,    39,
+      -1,    41,    42,    43,    44,    -1,    -1,    47,    48,    49,
+      50,    51,    52,    53,    54,    55,    56,    57,    58,    59,
+      60,    61,    62,    -1,    64,    65,    66,    67,    68,    69,
+      70,    71,    72,    73,    74,    75,    76,    77,    78,    79,
+      80,    81,    82,    -1,    84,    -1,    86,    87,    88,    89,
+      90,    91,    92,    93,    94,    95,    96,    97,    98,    99,
+     100,   101,   102,   103,   104,   105,   106,   107,   108,   109,
+     110,   111,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,   121,    -1,   123,   124,   125,
-     126,    -1,   128,   129,   130,    -1,   132,   133,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,     3,     4,     5,
+      -1,    -1,    -1,    -1,    -1,   135,    -1,    -1,   138,    -1,
+     140,    -1,   142,   143,    -1,   145,   146,   147,    -1,   149,
+     150,   151,   152,   153,   154,   155,   156,     3,     4,     5,
        6,     7,     8,     9,    10,    11,    12,    13,    14,    15,
       16,    17,    18,    19,    20,    21,    22,    23,    24,    25,
-      26,    -1,    -1,    29,    30,    -1,    32,    -1,    34,    35,
-      36,    37,    38,    39,    40,    41,    42,    43,    44,    45,
-      46,    47,    48,    49,    50,    51,    52,    53,    54,    55,
-      56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
-      66,    67,    68,    69,    70,    -1,    72,    -1,    74,    75,
-      76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
+      26,    27,    28,    29,    30,    31,    -1,    -1,    34,    35,
+      36,    37,    38,    39,    -1,    41,    42,    43,    44,    -1,
+      -1,    47,    48,    49,    50,    51,    52,    53,    54,    55,
+      56,    57,    58,    59,    60,    61,    62,    -1,    64,    65,
+      66,    67,    68,    69,    70,    71,    72,    73,    74,    75,
+      76,    77,    78,    79,    80,    81,    82,    -1,    -1,    -1,
       86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      96,    97,    98,    99,   100,   101,   102,   103,   104,   105,
+     106,   107,   108,   109,   110,   111,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,   121,    -1,   123,   124,   125,
-     126,    -1,   128,   129,   130,    -1,   132,   133,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,     3,     4,     5,
-       6,     7,     8,     9,    10,    11,    12,    13,    14,    15,
-      16,    17,    18,    19,    20,    21,    22,    23,    24,    25,
-      26,    -1,    -1,    29,    30,    -1,    32,    -1,    34,    35,
-      36,    37,    38,    39,    40,    41,    42,    43,    44,    45,
-      46,    47,    48,    49,    50,    51,    52,    53,    54,    55,
-      56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
-      66,    67,    68,    69,    70,    -1,    72,    -1,    74,    75,
-      76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
-      86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,   135,
+      -1,    -1,   138,    -1,   140,    -1,    -1,   143,    -1,   145,
+     146,   147,    -1,   149,   150,   151,   152,   153,   154,   155,
+     156,     3,     4,     5,     6,     7,     8,     9,    -1,    -1,
+      12,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    20,    21,
+      22,    -1,    24,    25,    26,    27,    -1,    -1,    -1,    31,
+      32,    33,    34,    35,    36,    37,    38,    39,    -1,    41,
+      -1,    -1,    -1,    -1,    -1,    -1,    48,    49,    -1,    -1,
+      -1,    53,    54,    -1,    -1,    -1,    -1,    -1,    60,    61,
+      62,    63,    64,    65,    66,    67,    68,    69,    70,    71,
+      72,    73,    74,    75,    76,    77,    78,    79,    80,    81,
+      82,    -1,    84,    -1,    86,    87,    88,    89,    90,    91,
+      92,    93,    94,    95,    96,    97,    98,    99,   100,   101,
+     102,   103,   104,   105,   106,   107,   108,   109,   110,   111,
+       3,     4,     5,     6,     7,     8,     9,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    20,    21,    22,
+      -1,    -1,    -1,    -1,    -1,   137,    -1,   139,   140,    -1,
+     142,   143,    -1,   145,   146,   147,   148,   149,   150,   151,
+     152,   153,   154,   155,     3,     4,     5,     6,     7,     8,
+       9,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    20,    21,    22,    23,    24,    25,    26,    27,    -1,
+      -1,    -1,    31,    32,    33,    34,    35,    36,    37,    38,
+      39,    -1,    41,    42,    43,    -1,    -1,    -1,    -1,    48,
+      49,    50,    51,    52,    -1,    -1,    -1,    -1,   101,   102,
+     103,   104,   105,   106,   107,   108,   109,   110,   111,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,   121,    -1,    -1,   124,    -1,
-     126,    -1,   128,   129,   130,    -1,   132,   133,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,     3,     4,     5,
-       6,     7,     8,     9,    10,    11,    12,    13,    14,    15,
-      16,    17,    18,    19,    20,    21,    22,    23,    24,    25,
-      26,    -1,    -1,    29,    30,    -1,    32,    -1,    34,    35,
-      36,    37,    38,    39,    40,    41,    42,    43,    44,    45,
-      46,    47,    48,    49,    50,    51,    52,    53,    54,    55,
-      56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
-      66,    67,    68,    69,    70,    -1,    72,    -1,    74,    75,
-      76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
-      86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    84,    -1,    86,    87,    88,
+      89,    90,    91,    92,    93,    94,    95,    96,    97,    98,
+      99,   100,   101,   102,   103,   104,   105,   106,   107,   108,
+     109,   110,   111,   112,   113,   114,   115,   116,   117,   118,
+     119,   120,   121,   122,   123,   124,   125,   126,   127,   128,
+     129,   130,   131,   132,    -1,   134,     3,     4,     5,     6,
+       7,     8,     9,    -1,    -1,    -1,   145,    -1,    -1,    -1,
+      -1,    -1,    -1,    20,    21,    22,    -1,    24,    25,    26,
+      27,    -1,    -1,    -1,    31,    32,    33,    34,    35,    36,
+      37,    38,    39,    -1,    41,    42,    43,    -1,    -1,    -1,
+      -1,    48,    49,    -1,    -1,    52,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,   121,    -1,    -1,   124,    -1,
-     126,    -1,   128,    -1,   130,    -1,   132,   133,   134,   135,
-     136,   137,   138,   139,   140,   141,   142,     3,     4,     5,
-       6,     7,     8,     9,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    19,    20,    21,    22,    -1,    -1,    -1,
-      26,    27,    28,    29,    30,    31,    32,    -1,    34,    -1,
-      -1,    -1,    38,    -1,    -1,    -1,    42,    43,    -1,    -1,
-      -1,    -1,    48,    49,    50,    51,    52,    53,    54,    55,
-      56,    57,    58,    59,    60,    61,    62,    63,    64,    65,
-      66,    67,    68,    69,    70,    -1,    72,    -1,    74,    75,
-      76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
-      86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,     3,     4,     5,     6,     7,     8,     9,    10,
-      11,    -1,    -1,    -1,    -1,    -1,     3,     4,     5,     6,
-       7,     8,     9,    10,    11,    -1,    -1,   123,    -1,   125,
-     126,    -1,   128,   129,   130,    -1,   132,   133,   134,   135,
-     136,   137,   138,   139,   140,   141,    -1,    -1,    35,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    84,    -1,    86,
+      87,    88,    89,    90,    91,    92,    93,    94,    95,    96,
+      97,    98,    99,   100,   101,   102,   103,   104,   105,   106,
+     107,   108,   109,   110,   111,    -1,    -1,    -1,    -1,    -1,
+      -1,     3,     4,     5,     6,     7,     8,     9,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    20,    21,
+      22,    -1,    24,    25,    26,    27,    -1,    -1,   145,    31,
+      32,    33,    34,    35,    36,    37,    38,    39,    -1,    41,
+      42,    43,    -1,    -1,    -1,    -1,    48,    49,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    85,    86,    87,    88,    89,    90,
-      91,    92,    93,    94,    95,    96,    97,    -1,    85,    86,
+      -1,    -1,    84,    -1,    86,    87,    88,    89,    90,    91,
+      92,    93,    94,    95,    96,    97,    98,    99,   100,   101,
+     102,   103,   104,   105,   106,   107,   108,   109,   110,   111,
+      -1,    -1,    -1,    -1,    -1,    -1,     3,     4,     5,     6,
+       7,     8,     9,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    20,    21,    22,    -1,    24,    25,    26,
+      27,    -1,    -1,   145,    31,    32,    33,    34,    35,    36,
+      37,    38,    39,    -1,    41,    42,    -1,    -1,    -1,    -1,
+      -1,    48,    49,    50,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    84,    -1,    86,
       87,    88,    89,    90,    91,    92,    93,    94,    95,    96,
-      97,     3,     4,     5,     6,     7,     8,     9,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    -1,    19,    20,    21,
-      22,   132,   133,   134,    26,    27,    28,    29,    30,    31,
-      32,    33,    34,    35,    36,   132,    38,    39,    -1,    41,
+      97,    98,    99,   100,   101,   102,   103,   104,   105,   106,
+     107,   108,   109,   110,   111,    -1,    -1,    -1,    -1,    -1,
+      -1,     3,     4,     5,     6,     7,     8,     9,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    20,    21,
+      22,    -1,    24,    25,    26,    27,    -1,    -1,   145,    31,
+      32,    33,    34,    35,    36,    37,    38,    39,    -1,    41,
+      42,    -1,    -1,    -1,    -1,    -1,    48,    49,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      72,    -1,    74,    75,    76,    77,    78,    79,    80,    81,
-      82,    83,    84,    85,    86,    87,    88,    89,    90,    91,
+      -1,    -1,    84,    -1,    86,    87,    88,    89,    90,    91,
       92,    93,    94,    95,    96,    97,    98,    99,   100,   101,
      102,   103,   104,   105,   106,   107,   108,   109,   110,   111,
-     112,   113,   114,   115,   116,   117,   118,   119,   120,     3,
+      -1,     3,     4,     5,     6,     7,     8,     9,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    20,    21,
+      22,    -1,    -1,    -1,    -1,    -1,    28,    29,    30,    -1,
+      -1,    -1,    -1,   145,    36,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    48,     3,     4,     5,
+       6,     7,     8,     9,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    20,    21,    22,    23,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    84,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    48,    -1,    -1,    -1,    -1,    -1,    -1,   101,
+     102,   103,   104,   105,   106,   107,   108,   109,   110,   111,
+      -1,    -1,     3,     4,     5,     6,     7,     8,     9,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    84,    20,
+      21,    22,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,   145,    -1,   101,   102,   103,   104,   105,
+     106,   107,   108,   109,   110,   111,    -1,    48,    49,    -1,
+      51,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,   134,    -1,
+      -1,    -1,   138,    -1,    -1,    -1,    -1,    -1,    -1,   145,
+      -1,    -1,    -1,    84,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+     101,   102,   103,   104,   105,   106,   107,   108,   109,   110,
+     111,     3,     4,     5,     6,     7,     8,     9,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    20,    21,
+      22,    23,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,   145,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    48,     3,     4,     5,
+       6,     7,     8,     9,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    20,    21,    22,    23,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    84,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    48,    -1,    -1,    -1,    -1,    -1,    -1,   101,
+     102,   103,   104,   105,   106,   107,   108,   109,   110,   111,
+      -1,    -1,     3,     4,     5,     6,     7,     8,     9,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    84,    20,
+      21,    22,    -1,    -1,    -1,    -1,   138,    -1,    -1,    -1,
+      -1,    -1,    -1,   145,    -1,   101,   102,   103,   104,   105,
+     106,   107,   108,   109,   110,   111,    -1,    48,    49,     3,
+       4,     5,     6,     7,     8,     9,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    20,    21,    22,    -1,
+      -1,     3,     4,     5,     6,     7,     8,     9,    -1,   145,
+      -1,    -1,    -1,    84,    -1,    -1,    -1,    -1,    20,    21,
+      22,    -1,    -1,    -1,    48,    -1,    -1,    -1,    -1,    -1,
+     101,   102,   103,   104,   105,   106,   107,   108,   109,   110,
+     111,    43,    -1,    -1,    -1,    -1,    48,    -1,    -1,     3,
+       4,     5,     6,     7,     8,     9,    -1,    -1,    -1,    -1,
+      84,    -1,    -1,    -1,    -1,    -1,    20,    21,    22,    -1,
+      -1,    -1,    -1,    -1,   145,    -1,    -1,   101,   102,   103,
+     104,   105,   106,   107,   108,   109,   110,   111,    42,    -1,
+      -1,    -1,    -1,    -1,    48,    -1,    -1,    -1,    -1,   101,
+     102,   103,   104,   105,   106,   107,   108,   109,   110,   111,
+      -1,    -1,    -1,     3,     4,     5,     6,     7,     8,     9,
+      -1,   145,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      20,    21,    22,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,   145,    -1,    -1,    -1,   101,   102,   103,
+     104,   105,   106,   107,   108,   109,   110,   111,    48,     3,
        4,     5,     6,     7,     8,     9,    -1,    -1,    -1,    -1,
-     132,    -1,    -1,    -1,    -1,    19,    20,    21,    22,    -1,
-      -1,    -1,    26,    27,    28,    29,    30,    31,    32,    -1,
-      34,    35,    36,    -1,    38,    39,    40,    41,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    20,    21,    22,    23,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,   145,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    48,    -1,    -1,    -1,    -1,    -1,
+      -1,   101,   102,   103,   104,   105,   106,   107,   108,   109,
+     110,   111,    -1,    -1,     3,     4,     5,     6,     7,     8,
+       9,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      84,    20,    21,    22,    23,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,   145,    -1,   101,   102,   103,
+     104,   105,   106,   107,   108,   109,   110,   111,    -1,    48,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    72,    -1,
-      74,    75,    76,    77,    78,    79,    80,    81,    82,    83,
-      84,    85,    86,    87,    88,    89,    90,    91,    92,    93,
-      94,    95,    96,    97,    98,    99,   100,   101,   102,   103,
-     104,   105,   106,   107,   108,   109,   110,   111,   112,   113,
-     114,   115,   116,   117,   118,    -1,   120,     3,     4,     5,
-       6,     7,     8,     9,    -1,    -1,    -1,    -1,   132,    -1,
-      -1,    -1,    -1,    19,    20,    21,    22,    -1,    -1,    -1,
-      26,    27,    28,    29,    30,    31,    32,    -1,    34,    35,
-      36,    -1,    38,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+     134,   135,    -1,    -1,   138,     3,     4,     5,     6,     7,
+       8,     9,    -1,    -1,    -1,    84,    -1,    -1,    -1,    -1,
+      -1,    -1,    20,    21,    22,    23,    -1,    -1,    -1,    -1,
+      -1,    -1,   101,   102,   103,   104,   105,   106,   107,   108,
+     109,   110,   111,     3,     4,     5,     6,     7,     8,     9,
+      48,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      20,    21,    22,    23,    -1,   134,    -1,    -1,    -1,   138,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    72,    -1,    74,    75,
-      76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
-      86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,     3,     4,     5,     6,     7,     8,     9,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    19,    20,
-      21,    22,    -1,    -1,    -1,    26,    27,    28,    29,    30,
-      31,    32,    -1,    34,    35,    -1,   132,    38,    39,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    84,    -1,    48,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,   101,   102,   103,   104,   105,   106,   107,
+     108,   109,   110,   111,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    84,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,   135,    -1,    -1,
+     138,   101,   102,   103,   104,   105,   106,   107,   108,   109,
+     110,   111,    31,    32,    33,    34,    35,    36,    37,    38,
+      39,    40,    41,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    50,    -1,    -1,    -1,    -1,    -1,    -1,   138,    -1,
+      31,    32,    33,    34,    35,    36,    37,    38,    39,    40,
+      41,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    77,    50,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    86,    87,    88,
+      89,    90,    91,    92,    93,    94,    95,    96,    97,    98,
+      99,   100,    -1,    -1,    -1,    -1,    77,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    86,    87,    88,    89,    90,
+      91,    92,    93,    94,    95,    96,    97,    98,    99,   100,
+      -1,    -1,    -1,    -1,    -1,   134,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,   146,   147,     3,
+       4,     5,     6,     7,     8,     9,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    20,    21,    22,    -1,
+      24,    25,    26,    27,    -1,   146,   147,    31,    32,    33,
+      34,    35,    36,    37,    38,    39,    -1,    41,    42,    43,
+      -1,    -1,    -1,    -1,    48,    49,    -1,    -1,    52,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    72,    -1,    74,    75,    76,    77,    78,    79,    80,
-      81,    82,    83,    84,    85,    86,    87,    88,    89,    90,
-      91,    92,    93,    94,    95,    96,    97,     3,     4,     5,
-       6,     7,     8,     9,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    19,    20,    21,    22,    -1,    -1,    -1,
-      26,    27,    28,    29,    30,    31,    32,    -1,    34,    35,
-      -1,   132,    38,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    26,    27,    28,    29,    30,    31,    32,    33,    34,
-      -1,    -1,    -1,    -1,    39,    -1,    72,    -1,    74,    75,
-      76,    77,    78,    79,    80,    81,    82,    83,    84,    85,
-      86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
-      96,    97,     3,     4,     5,     6,     7,     8,     9,    74,
-      75,    76,    77,    78,    79,    80,    81,    82,    83,    84,
-      -1,    -1,    23,    24,    25,    -1,    -1,    -1,    -1,    30,
-      -1,    -1,    -1,    -1,    -1,    -1,   132,    -1,    -1,    -1,
+      84,    -1,    86,    87,    88,    89,    90,    91,    92,    93,
+      94,    95,    96,    97,    98,    99,   100,   101,   102,   103,
+     104,   105,   106,   107,   108,   109,   110,   111,     3,     4,
+       5,     6,     7,     8,     9,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    20,    21,    22,    -1,    24,
+      25,    26,    27,    -1,    -1,    -1,    31,    32,    33,    34,
+      35,    36,    37,    38,    39,    -1,    41,    42,    -1,    -1,
+      -1,    -1,    -1,    48,    49,    50,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-       3,     4,     5,     6,     7,     8,     9,    10,    11,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,   133,   134,
-      -1,    72,    -1,    -1,    -1,    -1,     3,     4,     5,     6,
-       7,     8,     9,    -1,    85,    86,    87,    88,    89,    90,
-      91,    92,    93,    94,    95,    96,    97,    -1,     3,     4,
-       5,     6,     7,     8,     9,    10,    11,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,     3,     4,     5,     6,     7,     8,
-       9,   132,    85,    86,    87,    88,    89,    90,    91,    92,
-      93,    94,    95,    96,    97,    72,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    35,    36,    85,    86,
-      87,    88,    89,    90,    91,    92,    93,    94,    95,    96,
-      97,    -1,    -1,   126,    -1,    -1,    -1,    -1,    -1,   132,
-      85,    86,    87,    88,    89,    90,    91,    92,    93,    94,
-      95,    96,    97,    -1,   121,    -1,    -1,   124,     3,     4,
-       5,     6,     7,     8,     9,   132,    85,    86,    87,    88,
-      89,    90,    91,    92,    93,    94,    95,    96,    97,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    -1,   132,    -1,    -1,
-      -1,    -1,    -1,    38,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,     3,     4,     5,     6,
-       7,     8,     9,   132,    -1,    -1,    -1,    -1,    -1,    -1,
-       3,     4,     5,     6,     7,     8,     9,    72,    -1,    -1,
-      -1,    -1,    -1,     3,     4,     5,     6,     7,     8,     9,
-      85,    86,    87,    88,    89,    90,    91,    92,    93,    94,
-      95,    96,    97,    36,     3,     4,     5,     6,     7,     8,
-       9,    -1,    -1,    -1,    -1,    35,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    72,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    -1,   132,    85,    86,
-      87,    88,    89,    90,    91,    92,    93,    94,    95,    96,
-      97,    -1,    85,    86,    87,    88,    89,    90,    91,    92,
-      93,    94,    95,    96,    97,    85,    86,    87,    88,    89,
-      90,    91,    92,    93,    94,    95,    96,    97,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,   132,    85,    86,    87,    88,
-      89,    90,    91,    92,    93,    94,    95,    96,    97,   132,
-      26,    27,    28,    29,    30,    31,    32,    33,    34,    -1,
-      -1,    -1,   132,    39,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    84,
+      -1,    86,    87,    88,    89,    90,    91,    92,    93,    94,
+      95,    96,    97,    98,    99,   100,   101,   102,   103,   104,
+     105,   106,   107,   108,   109,   110,   111,     3,     4,     5,
+       6,     7,     8,     9,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    20,    21,    22,    -1,    24,    25,
+      26,    27,    -1,    -1,    -1,    31,    32,    33,    34,    35,
+      36,    37,    38,    39,    -1,    41,    42,    -1,    -1,    -1,
+      -1,    -1,    48,    49,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,   132,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    74,    75,
-      76,    77,    78,    79,    80,    81,    82,    83,    84,    42,
-      43,    -1,    -1,    -1,    -1,    48,    49,    50,    51,    52,
-      53,    54,    55,    56,    57,    58,    59,    60,    61,    62,
-      63,    64,    65,    66,    67,    68,    69,    70,    -1,    -1,
-      -1,    -1,    -1,    -1,   120,     3,     4,     5,     6,     7,
-       8,     9,    -1,    -1,    -1,    -1,    -1,   133,   134,    -1,
-      -1,    19,    20,    21,    22,    -1,    -1,    -1,    26,    27,
-      28,    29,    30,    31,    32,    -1,    34,    35,    36,    -1,
-      38,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-     123,    -1,   125,   126,    -1,   128,   129,   130,    -1,   132,
-     133,   134,   135,   136,   137,   138,   139,   140,   141,    -1,
-      -1,    -1,    -1,    -1,    72,    -1,    74,    75,    76,    77,
-      78,    79,    80,    81,    82,    83,    84,    85,    86,    87,
-      88,    89,    90,    91,    92,    93,    94,    95,    96,    97,
-       3,     4,     5,     6,     7,     8,     9,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    19,    20,    21,    22,
-      -1,    -1,    -1,    26,    27,    28,    29,    30,    31,    32,
-      -1,    34,    35,    -1,    -1,    38,    39,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    84,    -1,
+      86,    87,    88,    89,    90,    91,    92,    93,    94,    95,
+      96,    97,    98,    99,   100,   101,   102,   103,   104,   105,
+     106,   107,   108,   109,   110,   111,     3,     4,     5,     6,
+       7,     8,     9,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    20,    21,    22,    -1,    24,    25,    26,
+      27,    -1,    -1,    -1,    31,    32,    33,    34,    35,    36,
+      37,    38,    39,    -1,    41,    -1,    -1,    -1,    -1,    -1,
+      -1,    48,    49,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    72,
-      -1,    74,    75,    76,    77,    78,    79,    80,    81,    82,
-      83,    84,    85,    86,    87,    88,    89,    90,    91,    92,
-      93,    94,    95,    96,    97,     3,     4,     5,     6,     7,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    84,    -1,    86,
+      87,    88,    89,    90,    91,    92,    93,    94,    95,    96,
+      97,    98,    99,   100,   101,   102,   103,   104,   105,   106,
+     107,   108,   109,   110,   111,     3,     4,     5,     6,     7,
        8,     9,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    19,    20,    21,    22,    -1,    -1,    -1,    26,    27,
-      28,    29,    30,    31,    32,    -1,    34,    35,    -1,    -1,
-      38,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    20,    21,    22,    -1,    24,    -1,    26,    27,
+      -1,    -1,    -1,    31,    32,    33,    34,    35,    36,    37,
+      38,    39,    -1,    41,    -1,    -1,    -1,    -1,    -1,    -1,
+      48,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    72,    -1,    74,    75,    76,    77,
-      78,    79,    80,    81,    82,    83,    84,    85,    86,    87,
+      -1,    -1,    -1,    -1,    -1,    -1,    84,    -1,    86,    87,
       88,    89,    90,    91,    92,    93,    94,    95,    96,    97,
-       3,     4,     5,     6,     7,     8,     9,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    19,    20,    21,    22,
-      -1,    -1,    -1,    26,    27,    28,    29,    30,    31,    32,
-      -1,    34,    -1,    -1,    -1,    38,    -1,    -1,    -1,    -1,
+      98,    99,   100,   101,   102,   103,   104,   105,   106,   107,
+     108,   109,   110,   111,     3,     4,     5,     6,     7,     8,
+       9,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    20,    21,    22,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    31,    32,    33,    34,    35,    36,    37,    38,
+      39,    -1,    41,    -1,    -1,    -1,    -1,    -1,    -1,    48,
+      49,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    72,
-      -1,    74,    75,    76,    77,    78,    79,    80,    81,    82,
-      83,    84,    85,    86,    87,    88,    89,    90,    91,    92,
-      93,    94,    95,    96,    97,     3,     4,     5,     6,     7,
-       8,     9,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      -1,    19,    20,    21,    22,    -1,    -1,    -1,    26,    27,
-      28,    29,    30,    31,    32,    -1,    34,    -1,    -1,    -1,
-      38,     3,     4,     5,     6,     7,     8,     9,    -1,    -1,
-      -1,    -1,    -1,    -1,    -1,    -1,    -1,    19,    -1,    21,
-      22,    -1,    -1,    -1,    26,    27,    28,    29,    30,    31,
-      32,    -1,    34,    -1,    72,    -1,    74,    75,    76,    77,
-      78,    79,    80,    81,    82,    83,    84,    85,    86,    87,
-      88,    89,    90,    91,    92,    93,    94,    95,    96,    97,
+      -1,    -1,    -1,    -1,    -1,    84,    -1,    86,    87,    88,
+      89,    90,    91,    92,    93,    94,    95,    96,    97,    98,
+      99,   100,   101,   102,   103,   104,   105,   106,   107,   108,
+     109,   110,   111,     3,     4,     5,     6,     7,     8,     9,
       -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
-      72,    -1,    74,    75,    76,    77,    78,    79,    80,    81,
-      82,    83,    84,    85,    86,    87,    88,    89,    90,    91,
-      92,    93,    94,    95,    96,    97
+      20,    21,    22,    23,     3,     4,     5,     6,     7,     8,
+       9,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    20,    21,    22,    -1,    -1,    -1,    -1,    48,     3,
+       4,     5,     6,     7,     8,     9,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    43,    -1,    20,    21,    22,    48,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    84,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    48,    -1,    -1,    -1,    -1,    -1,
+      -1,   101,   102,   103,   104,   105,   106,   107,   108,   109,
+     110,   111,    -1,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,   101,   102,   103,   104,   105,   106,   107,   108,
+     109,   110,   111,    -1,    -1,    -1,    -1,    -1,    -1,    -1,
+      -1,    -1,    -1,    -1,    -1,    -1,    -1,   101,   102,   103,
+     104,   105,   106,   107,   108,   109,   110,   111
 };
 
 /* YYSTOS[STATE-NUM] -- The (internal number of the) accessing
    symbol of state STATE-NUM.  */
-static const yytype_uint16 yystos[] =
+static const unsigned short int yystos[] =
 {
-       0,   144,   145,     0,   146,     3,     4,     5,     6,     7,
-       8,     9,    19,    20,    21,    22,    26,    27,    28,    29,
-      30,    31,    32,    34,    35,    36,    38,    39,    40,    41,
-      72,    74,    75,    76,    77,    78,    79,    80,    81,    82,
-      83,    84,    85,    86,    87,    88,    89,    90,    91,    92,
-      93,    94,    95,    96,    97,    98,    99,   100,   101,   102,
-     103,   104,   105,   106,   107,   108,   109,   110,   111,   112,
-     113,   114,   115,   116,   117,   118,   120,   132,   147,   148,
-     149,   150,   152,   153,   154,   155,   156,   158,   161,   175,
-     176,   178,   186,   187,   196,   198,   199,   216,   217,   218,
-     219,   222,   223,   226,   232,   259,   289,   290,   291,   292,
-     294,   295,   296,   298,   299,   302,   303,   304,   305,   306,
-     308,   309,   312,   319,   320,   321,   327,     3,     4,     5,
-       6,     7,     8,     9,    85,    86,    87,    88,    89,    90,
-      91,    92,    93,    94,    95,    96,    97,   289,   291,   292,
-     296,    12,   128,   313,     3,     4,     5,     6,     7,     8,
-       9,    34,    85,    86,    87,    88,    89,    90,    91,    92,
-      93,    94,    95,    96,    97,   156,   161,   176,   291,   296,
-     302,   308,     3,     4,     5,     6,     7,     8,     9,   121,
-     299,    38,    40,   197,   289,   292,   295,   126,   126,   126,
-     126,   126,   126,   126,   126,   126,   126,   126,   126,   126,
-     126,   126,   126,   126,   126,   126,   126,   126,     3,     4,
-       5,     6,     7,     8,     9,   300,   157,   160,   289,   300,
-     121,    22,   148,   154,   155,   183,   199,   216,   302,   308,
-     120,   121,   237,   237,   237,   237,   126,   161,   302,   308,
-     120,    36,   223,   240,   241,   244,   290,   294,   296,   298,
-     295,   290,   291,   295,   128,   156,   161,   176,   187,   223,
-     292,   303,   312,    10,    11,   227,   229,   231,   232,   233,
-     238,   240,   260,   268,   271,   279,   292,   296,   298,   133,
-     134,   265,   322,   323,   324,   326,   310,   296,   121,   129,
-     200,   289,    12,   300,   289,   300,   156,   176,     3,     4,
-       5,     6,     7,     8,     9,   191,   192,   195,   268,   279,
-     298,   375,   123,   151,   289,   289,   120,   226,   231,   232,
-     292,   298,   329,   332,   333,   298,   298,   337,   298,   298,
-     298,   298,   298,   298,   298,   298,   298,   298,   298,   298,
-     298,   298,    33,    39,   261,   264,   265,   301,   303,   321,
-     121,   124,   168,   169,   120,   159,   261,   177,   289,   302,
-     292,   298,   375,   224,   289,   293,   126,   242,   295,   295,
-     297,   300,   289,   300,   223,   241,   311,   228,   126,    42,
-      43,    48,    49,    50,    51,    52,    53,    54,    55,    56,
-      57,    58,    59,    60,    61,    62,    63,    64,    65,    66,
-      67,    68,    69,    70,   123,   125,   126,   128,   129,   130,
-     132,   133,   134,   135,   136,   137,   138,   139,   140,   141,
-     351,   352,   234,   126,   262,   123,   256,   257,   268,   277,
-     322,   281,   282,   283,   227,   233,   296,   124,   325,   323,
-     324,   300,   145,   201,   203,   204,   206,   208,   188,   190,
-     265,   188,   300,   300,   189,   126,     3,     4,     5,     6,
-       7,     8,     9,    10,    11,    12,    13,    14,    15,    16,
-      17,    18,    19,    20,    21,    22,    23,    24,    25,    26,
-      29,    30,    32,    34,    35,    36,    37,    38,    39,    40,
-      41,    42,    43,    44,    45,    46,    47,    71,    72,    73,
-     120,   121,   122,   123,   124,   125,   126,   128,   129,   130,
-     142,   319,   352,   353,   354,   377,   378,   379,   380,   381,
-     382,   291,   296,   121,   120,   226,   231,   125,   298,   298,
-     298,   125,   125,   298,   125,   125,   125,   125,   125,   125,
-     125,   125,   125,   125,   125,   127,   127,   125,   120,   262,
-     260,   162,    23,    24,    25,    30,   170,   171,   174,   289,
-     168,   120,   179,   180,   181,   298,   298,   122,   249,   250,
-     296,   299,   245,   124,   246,   361,   261,   120,   261,   300,
-     234,   230,   130,   130,   127,   131,    26,    37,   123,   235,
-     239,   120,   125,   258,   127,   268,   130,   284,   285,    13,
-      14,    15,   280,   306,   307,   122,   125,   129,    19,    21,
-      22,   291,   296,   302,   314,   315,   318,   319,    20,    38,
-     211,   199,   120,   189,   195,   120,   188,   188,   120,   125,
-     193,   375,   375,   375,   120,   145,   328,   125,   127,   127,
-     334,   336,   125,   308,   340,   342,   344,   346,   341,   343,
-     345,   347,   348,   349,   289,    23,    24,    25,   122,   163,
-     164,   173,   174,   125,    30,   172,   122,   125,   123,   123,
-     127,   251,   253,   254,   295,   249,   247,   248,   289,   243,
-      10,    11,    72,   121,   123,   124,   125,   126,   128,   129,
-     130,   142,   320,   321,   352,   353,   354,   356,   363,   364,
-     365,   367,   369,   373,   120,   120,   249,   131,   131,   236,
-      18,   249,   263,   128,   129,   355,   356,   358,   269,   286,
-     285,   306,   202,   289,   289,   289,   312,    10,    11,   126,
-     266,   270,   278,   279,   265,   316,   266,    20,   120,   120,
-     190,   249,   122,   131,   127,   122,   308,   330,   308,   308,
-     338,   127,   308,   308,   308,   308,   308,   308,   308,   308,
-     308,   308,   125,   350,    33,    41,   119,   120,   153,   155,
-     165,   166,   167,   175,   186,   196,   199,   220,   221,   222,
-     241,   259,   289,   292,   296,   302,   308,   327,   124,   289,
-     171,   289,   181,   182,   376,   225,   125,   308,   127,   125,
-     381,   234,   371,   372,   374,   370,   366,   368,   127,   369,
-     127,   264,   358,   126,   272,   282,   287,   288,   203,   317,
-     205,   266,   276,   322,   281,   300,   207,   209,   127,   127,
-     308,   335,   127,   308,   127,   127,   127,   127,   127,   127,
-     127,   127,   125,   125,   127,   153,   161,   184,   199,   221,
-     296,   302,   308,   381,   155,   220,   302,   237,   120,   231,
-     241,   292,   233,   296,   355,   120,   378,   234,    71,   252,
-     266,   248,   357,   357,   362,   357,   361,   357,   273,   131,
-     355,   300,   123,   212,   213,   127,   266,   212,   266,   194,
-     331,   125,   339,    13,    13,   121,   124,   185,   289,   161,
-     184,   302,   237,   161,   120,   231,   253,   255,   123,   125,
-     127,   358,   359,   127,   120,   122,   359,   360,   127,   129,
-     131,   249,   214,   267,   210,   275,   127,   376,   127,   127,
-     127,   375,   376,   185,   289,   161,   185,   289,   120,   256,
-     127,   215,   363,   272,   212,    26,    28,    37,   127,   122,
-     120,   289,   185,   274,   363,   381,   376,   275,   120
-};
-
-#define yyerrok                (yyerrstatus = 0)
-#define yyclearin        (yychar = YYEMPTY)
-#define YYEMPTY                (-2)
-#define YYEOF                0
-
-#define YYACCEPT        goto yyacceptlab
-#define YYABORT                goto yyabortlab
-#define YYERROR                goto yyerrorlab
-
-
-/* Like YYERROR except do call yyerror.  This remains here temporarily
-   to ease the transition to the new meaning of YYERROR, for GCC.
-   Once GCC version 2 has supplanted version 1, this can go.  */
-
-#define YYFAIL                goto yyerrlab
-
-#define YYRECOVERING()  (!!yyerrstatus)
-
-#define YYBACKUP(Token, Value)                                        \
-do                                                                \
-  if (yychar == YYEMPTY && yylen == 1)                                \
-    {                                                                \
-      yychar = (Token);                                                \
-      yylval = (Value);                                                \
-      yytoken = YYTRANSLATE (yychar);                                \
-      YYPOPSTACK (1);                                                \
-      goto yybackup;                                                \
-    }                                                                \
-  else                                                                \
-    {                                                                \
-      yyerror (YY_("syntax error: cannot back up")); \
-      YYERROR;                                                        \
-    }                                                                \
-while (YYID (0))
-
+       0,   158,   159,     0,   160,   367,     3,     4,     5,     6,
+       7,     8,     9,    20,    21,    22,    23,    24,    25,    26,
+      27,    31,    32,    33,    34,    35,    36,    37,    38,    39,
+      41,    42,    43,    48,    49,    50,    51,    52,    84,    86,
+      87,    88,    89,    90,    91,    92,    93,    94,    95,    96,
+      97,    98,    99,   100,   101,   102,   103,   104,   105,   106,
+     107,   108,   109,   110,   111,   112,   113,   114,   115,   116,
+     117,   118,   119,   120,   121,   122,   123,   124,   125,   126,
+     127,   128,   129,   130,   131,   132,   134,   145,   161,   162,
+     163,   164,   165,   167,   168,   169,   170,   171,   173,   176,
+     191,   192,   193,   195,   196,   206,   207,   216,   218,   219,
+     221,   239,   240,   241,   242,   245,   246,   249,   255,   289,
+     319,   320,   321,   322,   324,   325,   326,   327,   329,   331,
+     332,   335,   336,   337,   338,   339,   341,   342,   345,   346,
+     357,   358,   359,   368,   371,   369,    24,    25,    12,    43,
+       3,     4,     5,     6,     7,     8,     9,    10,    11,    12,
+      13,    14,    15,    16,    17,    18,    19,    20,    21,    22,
+      23,    24,    25,    26,    27,    28,    29,    30,    31,    34,
+      35,    36,    37,    38,    39,    41,    42,    43,    44,    47,
+      48,    49,    50,    51,    52,    53,    54,    55,    56,    57,
+      58,    59,    60,    61,    62,    63,    64,    65,    66,    67,
+      68,    69,    70,    71,    72,    73,    74,    75,    76,    77,
+      78,    79,    80,    81,    82,    83,    84,    85,   101,   102,
+     103,   104,   105,   106,   107,   108,   109,   110,   111,   135,
+     137,   138,   139,   140,   142,   143,   145,   146,   147,   148,
+     149,   150,   151,   152,   153,   154,   155,   156,   358,   359,
+     396,   397,   398,   428,   429,   430,   431,   432,   330,   347,
+       3,     4,     5,     6,     7,     8,     9,    41,   171,   176,
+     193,   196,   321,   322,   327,   329,   335,   341,     3,     4,
+       5,     6,     7,     8,     9,   135,   332,     3,     4,     5,
+       6,     7,     8,     9,    20,    21,    22,    49,    51,   101,
+     102,   103,   104,   105,   106,   107,   108,   109,   110,   111,
+     217,   319,   321,   322,   326,   327,   329,   140,   140,   140,
+     140,   140,   140,   140,   140,   140,   140,   140,   140,   140,
+     140,   140,   140,   140,   140,   140,   140,   140,   333,   172,
+     367,   333,   135,   367,    43,    52,   162,   169,   170,   196,
+     202,   219,   221,   239,   335,   341,    45,   134,   135,   265,
+     266,   265,   265,   265,   140,   176,   335,   341,   134,   367,
+      43,   246,   270,   273,   320,   325,   327,   329,   178,   327,
+     329,   331,   332,   326,   320,   321,   326,   367,   326,   142,
+     171,   176,   193,   196,   207,   246,   322,   336,   345,   367,
+      10,    11,    83,   233,   290,   298,   300,    77,   146,   147,
+     295,   360,   361,   362,   363,   366,   343,   367,   367,   404,
+     135,   428,   424,   424,    63,   148,   222,   414,   424,   425,
+     424,    10,    11,   140,   418,   319,    12,   333,   367,   333,
+     367,   320,   171,   193,   211,   212,   215,   233,   298,   424,
+     137,   166,   319,   319,   134,   367,   249,   254,   255,   322,
+       3,     4,     5,     6,     7,     8,     9,   331,   373,   376,
+     377,   331,   331,   381,   331,   331,   331,   331,   331,   331,
+     331,   331,   331,   331,   331,   331,   331,   331,    40,    50,
+     291,   294,   295,   334,   336,   359,   135,   134,   138,   177,
+     178,   322,   326,   327,   329,   291,   194,   134,   138,   197,
+     319,   319,   367,   335,   233,   138,   275,   424,   247,   367,
+     323,   271,   140,   326,   326,   326,   328,   333,   367,   333,
+     367,   246,   270,   367,   344,   301,   250,   252,   254,   255,
+     256,   268,   309,   320,   322,   292,   137,   285,   286,   288,
+     233,   298,   307,   360,   367,   367,   367,   361,   363,   333,
+      23,    63,    84,   135,   137,   138,   139,   142,   143,   148,
+     156,   358,   359,   370,   396,   397,   398,   402,   403,   405,
+     406,   415,   418,   422,   159,   425,   134,   142,   144,   426,
+     427,   428,   136,   223,   225,   226,   228,   230,   144,   134,
+     427,   141,   420,   421,   419,   367,   208,   210,   295,   177,
+     208,   319,   333,   333,   209,   309,   320,   327,   329,   136,
+     321,   327,   135,   134,   137,    12,    53,    54,    63,   137,
+     139,   140,   142,   143,   148,   395,   396,   249,   254,   139,
+     331,   331,   331,   139,   139,   331,   139,   139,   139,   139,
+     139,   139,   139,   139,   139,   139,   139,   141,   141,   139,
+     134,   292,   290,   367,   180,   175,     3,   134,   179,   367,
+     178,   327,   329,   322,   134,   199,   200,   331,   198,   197,
+     367,   319,   320,   322,   276,   277,   319,   135,   136,   278,
+     279,   177,   327,   275,   274,   408,   291,   177,   291,   319,
+     333,   339,   340,   365,   251,   367,   257,   311,   312,   313,
+     367,   250,   256,   134,   139,   287,   422,   141,   298,   364,
+     417,   423,   416,   144,   136,   134,   144,   139,   414,    24,
+      26,   196,   321,   327,   329,   335,   352,   353,   356,   357,
+      25,    49,   234,   221,   404,   404,   404,   134,   209,   215,
+     134,   197,   208,   208,   134,   139,   140,   367,   134,   159,
+     220,     3,   143,   143,    63,   148,   141,   144,   372,   139,
+     141,   141,   378,   380,   139,   341,   384,   386,   388,   390,
+     385,   387,   389,   391,   392,   393,   319,    28,    29,    30,
+     136,   181,   182,    28,    29,    30,    36,   186,   187,   190,
+     319,   138,   367,   178,   136,   139,   137,   342,   134,   367,
+     331,   139,   431,   424,   141,    83,   280,   282,   283,   326,
+     272,   278,   137,   139,   142,   402,   410,   411,   412,   414,
+     134,   134,   134,   197,   339,   257,   140,     3,    31,    44,
+      47,    66,   137,   258,   260,   261,   262,   263,   143,   314,
+     315,   138,   140,   293,    63,   142,   148,   399,   400,   402,
+     412,   299,   365,   404,   409,   404,   144,   224,   319,   319,
+     319,   345,   233,   296,   300,   295,   354,   296,    25,   141,
+     141,   141,   134,   134,   210,   213,   136,   341,   144,   144,
+     341,   374,   341,   341,   382,   141,   341,   341,   341,   341,
+     341,   341,   341,   341,   341,   341,   139,   394,   367,   138,
+     189,   190,   139,    36,   188,   233,   174,   367,   200,   201,
+     137,   203,   429,   277,   233,   136,   367,   139,   341,   257,
+     141,   413,   134,   262,   253,   259,   264,    18,    54,    55,
+     418,   316,   315,    13,    14,    15,   310,   269,   294,   401,
+     400,   140,   302,   312,   144,   134,   136,   406,   407,   144,
+     225,   355,   308,   309,   227,   367,   333,   229,   231,   278,
+     296,   141,   341,   379,   141,   341,   141,   141,   141,   141,
+     141,   141,   141,   141,   139,   139,   141,    40,    41,    52,
+     133,   134,   163,   168,   170,   183,   184,   185,   191,   192,
+     206,   216,   219,   221,   243,   244,   245,   270,   289,   319,
+     320,   322,   335,   341,   371,   319,   367,   319,   186,   399,
+     425,   134,   267,   248,    83,   281,   296,   262,   367,   408,
+     278,   418,   335,   346,   348,   349,   317,   318,   278,   403,
+     303,   144,   333,   302,   137,   235,   236,   296,   306,   360,
+     235,   296,   141,   134,   375,   139,   383,    13,    13,   168,
+     176,   204,   221,   244,   320,   335,   341,   431,   170,   184,
+     219,   221,   243,   335,   265,   134,   254,   270,   322,   233,
+     233,   187,   233,    46,   257,   282,   284,   414,   141,   346,
+     350,   295,   144,   399,   141,   278,   237,   141,   296,   232,
+     214,   141,   425,   141,   141,   141,   367,   176,   204,   335,
+     265,   176,   233,   335,   367,   254,   256,   431,   262,   285,
+     367,   351,   333,   367,   367,   141,   238,   410,   297,   235,
+     305,   141,   135,   138,   177,   205,   367,   176,   367,   134,
+     367,   135,   333,   367,   410,   302,    31,    33,    44,    47,
+     424,   425,   179,   177,   367,   177,   205,   134,   424,   304,
+     431,   431,   136,   134,   205,   177,   179,   136,   305,   425,
+     205,   134
+};
 
-#define YYTERROR        1
-#define YYERRCODE        256
+

+/* Prevent warning if -Wmissing-prototypes.  */
+int yyparse (void);
 
+/* Error token number */
+#define YYTERROR 1
 
 /* YYLLOC_DEFAULT -- Set CURRENT to span from RHS[1] to RHS[N].
    If N is 0, then set CURRENT to the empty location which ends
    the previous symbol: RHS[0] (always defined).  */
 
-#define YYRHSLOC(Rhs, K) ((Rhs)[K])
+
 #ifndef YYLLOC_DEFAULT
-# define YYLLOC_DEFAULT(Current, Rhs, N)                                \
-    do                                                                        \
-      if (YYID (N))                                                    \
-        {                                                                \
-          (Current).first_line   = YYRHSLOC (Rhs, 1).first_line;        \
-          (Current).first_column = YYRHSLOC (Rhs, 1).first_column;        \
-          (Current).last_line    = YYRHSLOC (Rhs, N).last_line;                \
-          (Current).last_column  = YYRHSLOC (Rhs, N).last_column;        \
-        }                                                                \
-      else                                                                \
-        {                                                                \
-          (Current).first_line   = (Current).last_line   =                \
-            YYRHSLOC (Rhs, 0).last_line;                                \
-          (Current).first_column = (Current).last_column =                \
-            YYRHSLOC (Rhs, 0).last_column;                                \
-        }                                                                \
-    while (YYID (0))
+# define YYLLOC_DEFAULT(Current, Rhs, N) ((void)Rhs)
 #endif
 
 
-/* YY_LOCATION_PRINT -- Print the location on the stream.
-   This macro was not mandated originally: define only if we know
-   we won't break user code: when these are the locations we know.  */
-
 #ifndef YY_LOCATION_PRINT
-# if YYLTYPE_IS_TRIVIAL
-#  define YY_LOCATION_PRINT(File, Loc)                        \
-     fprintf (File, "%d.%d-%d.%d",                        \
-              (Loc).first_line, (Loc).first_column,        \
-              (Loc).last_line,  (Loc).last_column)
-# else
-#  define YY_LOCATION_PRINT(File, Loc) ((void) 0)
-# endif
+# define YY_LOCATION_PRINT(File, Loc) ((void) 0)
 #endif
 
 
 /* YYLEX -- calling `yylex' with the right arguments.  */
+#define YYLEX yylex ()
 
-#ifdef YYLEX_PARAM
-# define YYLEX yylex (YYLEX_PARAM)
-#else
-# define YYLEX yylex ()
-#endif
+YYSTYPE yylval;
+
+YYLTYPE yylloc;
+
+int yynerrs;
+int yychar;
+
+static const int YYEOF = 0;
+static const int YYEMPTY = -2;
+
+typedef enum { yyok, yyaccept, yyabort, yyerr } YYRESULTTAG;
+
+#define YYCHK(YYE)                                                             \
+   do { YYRESULTTAG yyflag = YYE; if (yyflag != yyok) return yyflag; }             \
+   while (YYID (0))
 
-/* Enable debugging if requested.  */
 #if YYDEBUG
 
 # ifndef YYFPRINTF
-#  include <stdio.h> /* INFRINGES ON USER NAME SPACE */
 #  define YYFPRINTF fprintf
 # endif
 
@@ -3980,34 +5744,14 @@ do {                                                \
     YYFPRINTF Args;                                \
 } while (YYID (0))
 
-# define YY_SYMBOL_PRINT(Title, Type, Value, Location)                          \
-do {                                                                          \
-  if (yydebug)                                                                  \
-    {                                                                          \
-      YYFPRINTF (stderr, "%s ", Title);                                          \
-      yy_symbol_print (stderr,                                                  \
-                  Type, Value); \
-      YYFPRINTF (stderr, "\n");                                                  \
-    }                                                                          \
-} while (YYID (0))
-
 
 /*--------------------------------.
 | Print this symbol on YYOUTPUT.  |
 `--------------------------------*/
 
 /*ARGSUSED*/
-#if (defined __STDC__ || defined __C99__FUNC__ \
-     || defined __cplusplus || defined _MSC_VER)
 static void
 yy_symbol_value_print (FILE *yyoutput, int yytype, YYSTYPE const * const yyvaluep)
-#else
-static void
-yy_symbol_value_print (yyoutput, yytype, yyvaluep)
-    FILE *yyoutput;
-    int yytype;
-    YYSTYPE const * const yyvaluep;
-#endif
 {
   if (!yyvaluep)
     return;
@@ -4029,17 +5773,8 @@ yy_symbol_value_print (yyoutput, yytype, yyvaluep)
 | Print this symbol on YYOUTPUT.  |
 `--------------------------------*/
 
-#if (defined __STDC__ || defined __C99__FUNC__ \
-     || defined __cplusplus || defined _MSC_VER)
 static void
 yy_symbol_print (FILE *yyoutput, int yytype, YYSTYPE const * const yyvaluep)
-#else
-static void
-yy_symbol_print (yyoutput, yytype, yyvaluep)
-    FILE *yyoutput;
-    int yytype;
-    YYSTYPE const * const yyvaluep;
-#endif
 {
   if (yytype < YYNTOKENS)
     YYFPRINTF (yyoutput, "token %s (", yytname[yytype]);
@@ -4050,85 +5785,27 @@ yy_symbol_print (yyoutput, yytype, yyvaluep)
   YYFPRINTF (yyoutput, ")");
 }
 
-/*------------------------------------------------------------------.
-| yy_stack_print -- Print the state stack from its BOTTOM up to its |
-| TOP (included).                                                   |
-`------------------------------------------------------------------*/
-
-#if (defined __STDC__ || defined __C99__FUNC__ \
-     || defined __cplusplus || defined _MSC_VER)
-static void
-yy_stack_print (yytype_int16 *yybottom, yytype_int16 *yytop)
-#else
-static void
-yy_stack_print (yybottom, yytop)
-    yytype_int16 *yybottom;
-    yytype_int16 *yytop;
-#endif
-{
-  YYFPRINTF (stderr, "Stack now");
-  for (; yybottom <= yytop; yybottom++)
-    {
-      int yybot = *yybottom;
-      YYFPRINTF (stderr, " %d", yybot);
-    }
-  YYFPRINTF (stderr, "\n");
-}
-
-# define YY_STACK_PRINT(Bottom, Top)                                \
-do {                                                                \
-  if (yydebug)                                                        \
-    yy_stack_print ((Bottom), (Top));                                \
-} while (YYID (0))
-
-
-/*------------------------------------------------.
-| Report that the YYRULE is going to be reduced.  |
-`------------------------------------------------*/
-
-#if (defined __STDC__ || defined __C99__FUNC__ \
-     || defined __cplusplus || defined _MSC_VER)
-static void
-yy_reduce_print (YYSTYPE *yyvsp, int yyrule)
-#else
-static void
-yy_reduce_print (yyvsp, yyrule)
-    YYSTYPE *yyvsp;
-    int yyrule;
-#endif
-{
-  int yynrhs = yyr2[yyrule];
-  int yyi;
-  unsigned long int yylno = yyrline[yyrule];
-  YYFPRINTF (stderr, "Reducing stack by rule %d (line %lu):\n",
-             yyrule - 1, yylno);
-  /* The symbols being reduced.  */
-  for (yyi = 0; yyi < yynrhs; yyi++)
-    {
-      YYFPRINTF (stderr, "   $%d = ", yyi + 1);
-      yy_symbol_print (stderr, yyrhs[yyprhs[yyrule] + yyi],
-                       &(yyvsp[(yyi + 1) - (yynrhs)])
-                                              );
-      YYFPRINTF (stderr, "\n");
-    }
-}
-
-# define YY_REDUCE_PRINT(Rule)                \
-do {                                        \
-  if (yydebug)                                \
-    yy_reduce_print (yyvsp, Rule); \
+# define YY_SYMBOL_PRINT(Title, Type, Value, Location)                            \
+do {                                                                            \
+  if (yydebug)                                                                    \
+    {                                                                            \
+      YYFPRINTF (stderr, "%s ", Title);                                            \
+      yy_symbol_print (stderr, Type,                                            \
+                       Value);  \
+      YYFPRINTF (stderr, "\n");                                                    \
+    }                                                                            \
 } while (YYID (0))
 
 /* Nonzero means print parse trace.  It is left uninitialized so that
    multiple parsers can coexist.  */
 int yydebug;
+
 #else /* !YYDEBUG */
+
 # define YYDPRINTF(Args)
 # define YY_SYMBOL_PRINT(Title, Type, Value, Location)
-# define YY_STACK_PRINT(Bottom, Top)
-# define YY_REDUCE_PRINT(Rule)
-#endif /* !YYDEBUG */
 
+#endif /* !YYDEBUG */
 
 /* YYINITDEPTH -- initial size of the parser's stacks.  */
 #ifndef        YYINITDEPTH
@@ -4139,555 +5816,321 @@ int yydebug;
    if the built-in stack extension method is used).
 
    Do not make this value too large; the results are undefined if
-   YYSTACK_ALLOC_MAXIMUM < YYSTACK_BYTES (YYMAXDEPTH)
+   SIZE_MAX < YYMAXDEPTH * sizeof (GLRStackItem)
    evaluated with infinite-precision integer arithmetic.  */
 
 #ifndef YYMAXDEPTH
 # define YYMAXDEPTH 10000
 #endif
 
-

-
-#if YYERROR_VERBOSE
-
-# ifndef yystrlen
-#  if defined __GLIBC__ && defined _STRING_H
-#   define yystrlen strlen
-#  else
-/* Return the length of YYSTR.  */
-#if (defined __STDC__ || defined __C99__FUNC__ \
-     || defined __cplusplus || defined _MSC_VER)
-static YYSIZE_T
-yystrlen (const char *yystr)
-#else
-static YYSIZE_T
-yystrlen (yystr)
-    const char *yystr;
-#endif
-{
-  YYSIZE_T yylen;
-  for (yylen = 0; yystr[yylen]; yylen++)
-    continue;
-  return yylen;
-}
-#  endif
-# endif
-
-# ifndef yystpcpy
-#  if defined __GLIBC__ && defined _STRING_H && defined _GNU_SOURCE
-#   define yystpcpy stpcpy
-#  else
-/* Copy YYSRC to YYDEST, returning the address of the terminating '\0' in
-   YYDEST.  */
-#if (defined __STDC__ || defined __C99__FUNC__ \
-     || defined __cplusplus || defined _MSC_VER)
-static char *
-yystpcpy (char *yydest, const char *yysrc)
-#else
-static char *
-yystpcpy (yydest, yysrc)
-    char *yydest;
-    const char *yysrc;
-#endif
-{
-  char *yyd = yydest;
-  const char *yys = yysrc;
-
-  while ((*yyd++ = *yys++) != '\0')
-    continue;
-
-  return yyd - 1;
-}
-#  endif
-# endif
-
-# ifndef yytnamerr
-/* Copy to YYRES the contents of YYSTR after stripping away unnecessary
-   quotes and backslashes, so that it's suitable for yyerror.  The
-   heuristic is that double-quoting is unnecessary unless the string
-   contains an apostrophe, a comma, or backslash (other than
-   backslash-backslash).  YYSTR is taken from yytname.  If YYRES is
-   null, do not copy; instead, return the length of what the result
-   would have been.  */
-static YYSIZE_T
-yytnamerr (char *yyres, const char *yystr)
-{
-  if (*yystr == '"')
-    {
-      YYSIZE_T yyn = 0;
-      char const *yyp = yystr;
-
-      for (;;)
-        switch (*++yyp)
-          {
-          case '\'':
-          case ',':
-            goto do_not_strip_quotes;
-
-          case '\\':
-            if (*++yyp != '\\')
-              goto do_not_strip_quotes;
-            /* Fall through.  */
-          default:
-            if (yyres)
-              yyres[yyn] = *yyp;
-            yyn++;
-            break;
-
-          case '"':
-            if (yyres)
-              yyres[yyn] = '\0';
-            return yyn;
-          }
-    do_not_strip_quotes: ;
-    }
-
-  if (! yyres)
-    return yystrlen (yystr);
-
-  return yystpcpy (yyres, yystr) - yyres;
-}
-# endif
-
-/* Copy into YYRESULT an error message about the unexpected token
-   YYCHAR while in state YYSTATE.  Return the number of bytes copied,
-   including the terminating null byte.  If YYRESULT is null, do not
-   copy anything; just return the number of bytes that would be
-   copied.  As a special case, return 0 if an ordinary "syntax error"
-   message will do.  Return YYSIZE_MAXIMUM if overflow occurs during
-   size calculation.  */
-static YYSIZE_T
-yysyntax_error (char *yyresult, int yystate, int yychar)
-{
-  int yyn = yypact[yystate];
-
-  if (! (YYPACT_NINF < yyn && yyn <= YYLAST))
-    return 0;
-  else
-    {
-      int yytype = YYTRANSLATE (yychar);
-      YYSIZE_T yysize0 = yytnamerr (0, yytname[yytype]);
-      YYSIZE_T yysize = yysize0;
-      YYSIZE_T yysize1;
-      int yysize_overflow = 0;
-      enum { YYERROR_VERBOSE_ARGS_MAXIMUM = 5 };
-      char const *yyarg[YYERROR_VERBOSE_ARGS_MAXIMUM];
-      int yyx;
-
-# if 0
-      /* This is so xgettext sees the translatable formats that are
-         constructed on the fly.  */
-      YY_("syntax error, unexpected %s");
-      YY_("syntax error, unexpected %s, expecting %s");
-      YY_("syntax error, unexpected %s, expecting %s or %s");
-      YY_("syntax error, unexpected %s, expecting %s or %s or %s");
-      YY_("syntax error, unexpected %s, expecting %s or %s or %s or %s");
-# endif
-      char *yyfmt;
-      char const *yyf;
-      static char const yyunexpected[] = "syntax error, unexpected %s";
-      static char const yyexpecting[] = ", expecting %s";
-      static char const yyor[] = " or %s";
-      char yyformat[sizeof yyunexpected
-                    + sizeof yyexpecting - 1
-                    + ((YYERROR_VERBOSE_ARGS_MAXIMUM - 2)
-                       * (sizeof yyor - 1))];
-      char const *yyprefix = yyexpecting;
-
-      /* Start YYX at -YYN if negative to avoid negative indexes in
-         YYCHECK.  */
-      int yyxbegin = yyn < 0 ? -yyn : 0;
-
-      /* Stay within bounds of both yycheck and yytname.  */
-      int yychecklim = YYLAST - yyn + 1;
-      int yyxend = yychecklim < YYNTOKENS ? yychecklim : YYNTOKENS;
-      int yycount = 1;
-
-      yyarg[0] = yytname[yytype];
-      yyfmt = yystpcpy (yyformat, yyunexpected);
-
-      for (yyx = yyxbegin; yyx < yyxend; ++yyx)
-        if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR)
-          {
-            if (yycount == YYERROR_VERBOSE_ARGS_MAXIMUM)
-              {
-                yycount = 1;
-                yysize = yysize0;
-                yyformat[sizeof yyunexpected - 1] = '\0';
-                break;
-              }
-            yyarg[yycount++] = yytname[yyx];
-            yysize1 = yysize + yytnamerr (0, yytname[yyx]);
-            yysize_overflow |= (yysize1 < yysize);
-            yysize = yysize1;
-            yyfmt = yystpcpy (yyfmt, yyprefix);
-            yyprefix = yyor;
-          }
-
-      yyf = YY_(yyformat);
-      yysize1 = yysize + yystrlen (yyf);
-      yysize_overflow |= (yysize1 < yysize);
-      yysize = yysize1;
-
-      if (yysize_overflow)
-        return YYSIZE_MAXIMUM;
-
-      if (yyresult)
-        {
-          /* Avoid sprintf, as that infringes on the user's name space.
-             Don't have undefined behavior even if the translation
-             produced a string with the wrong number of "%s"s.  */
-          char *yyp = yyresult;
-          int yyi = 0;
-          while ((*yyp = *yyf) != '\0')
-            {
-              if (*yyp == '%' && yyf[1] == 's' && yyi < yycount)
-                {
-                  yyp += yytnamerr (yyp, yyarg[yyi++]);
-                  yyf += 2;
-                }
-              else
-                {
-                  yyp++;
-                  yyf++;
-                }
-            }
-        }
-      return yysize;
-    }
-}
-#endif /* YYERROR_VERBOSE */
-

-
-/*-----------------------------------------------.
-| Release the memory associated to this symbol.  |
-`-----------------------------------------------*/
-
-/*ARGSUSED*/
-#if (defined __STDC__ || defined __C99__FUNC__ \
-     || defined __cplusplus || defined _MSC_VER)
-static void
-yydestruct (const char *yymsg, int yytype, YYSTYPE *yyvaluep)
-#else
-static void
-yydestruct (yymsg, yytype, yyvaluep)
-    const char *yymsg;
-    int yytype;
-    YYSTYPE *yyvaluep;
-#endif
-{
-  YYUSE (yyvaluep);
-
-  if (!yymsg)
-    yymsg = "Deleting";
-  YY_SYMBOL_PRINT (yymsg, yytype, yyvaluep, yylocationp);
-
-  switch (yytype)
-    {
-
-      default:
-        break;
-    }
-}
-
-/* Prevent warnings from -Wmissing-prototypes.  */
-#ifdef YYPARSE_PARAM
-#if defined __STDC__ || defined __cplusplus
-int yyparse (void *YYPARSE_PARAM);
-#else
-int yyparse ();
-#endif
-#else /* ! YYPARSE_PARAM */
-#if defined __STDC__ || defined __cplusplus
-int yyparse (void);
-#else
-int yyparse ();
-#endif
-#endif /* ! YYPARSE_PARAM */
-
-
-/* The lookahead symbol.  */
-int yychar;
-
-/* The semantic value of the lookahead symbol.  */
-YYSTYPE yylval;
-
-/* Number of syntax errors so far.  */
-int yynerrs;
-
-
-
-/*-------------------------.
-| yyparse or yypush_parse.  |
-`-------------------------*/
-
-#ifdef YYPARSE_PARAM
-#if (defined __STDC__ || defined __C99__FUNC__ \
-     || defined __cplusplus || defined _MSC_VER)
-int
-yyparse (void *YYPARSE_PARAM)
-#else
-int
-yyparse (YYPARSE_PARAM)
-    void *YYPARSE_PARAM;
-#endif
-#else /* ! YYPARSE_PARAM */
-#if (defined __STDC__ || defined __C99__FUNC__ \
-     || defined __cplusplus || defined _MSC_VER)
-int
-yyparse (void)
-#else
-int
-yyparse ()
-
-#endif
-#endif
-{
-
-
-    int yystate;
-    /* Number of tokens to shift before error messages enabled.  */
-    int yyerrstatus;
-
-    /* The stacks and their tools:
-       `yyss': related to states.
-       `yyvs': related to semantic values.
-
-       Refer to the stacks thru separate pointers, to allow yyoverflow
-       to reallocate them elsewhere.  */
-
-    /* The state stack.  */
-    yytype_int16 yyssa[YYINITDEPTH];
-    yytype_int16 *yyss;
-    yytype_int16 *yyssp;
-
-    /* The semantic value stack.  */
-    YYSTYPE yyvsa[YYINITDEPTH];
-    YYSTYPE *yyvs;
-    YYSTYPE *yyvsp;
-
-    YYSIZE_T yystacksize;
-
-  int yyn;
-  int yyresult;
-  /* Lookahead token as an internal (translated) token number.  */
-  int yytoken;
-  /* The variables used to return semantic value and location from the
-     action routines.  */
-  YYSTYPE yyval;
-
-#if YYERROR_VERBOSE
-  /* Buffer for error messages, and its allocated size.  */
-  char yymsgbuf[128];
-  char *yymsg = yymsgbuf;
-  YYSIZE_T yymsg_alloc = sizeof yymsgbuf;
+/* Minimum number of free items on the stack allowed after an
+   allocation.  This is to allow allocation and initialization
+   to be completed by functions that call yyexpandGLRStack before the
+   stack is expanded, thus insuring that all necessary pointers get
+   properly redirected to new data.  */
+#define YYHEADROOM 2
+
+#ifndef YYSTACKEXPANDABLE
+# if (! defined __cplusplus \
+      || (defined YYSTYPE_IS_TRIVIAL && YYSTYPE_IS_TRIVIAL))
+#  define YYSTACKEXPANDABLE 1
+# else
+#  define YYSTACKEXPANDABLE 0
+# endif
 #endif
 
-#define YYPOPSTACK(N)   (yyvsp -= (N), yyssp -= (N))
+#if YYSTACKEXPANDABLE
+# define YY_RESERVE_GLRSTACK(Yystack)                        \
+  do {                                                        \
+    if (Yystack->yyspaceLeft < YYHEADROOM)                \
+      yyexpandGLRStack (Yystack);                        \
+  } while (YYID (0))
+#else
+# define YY_RESERVE_GLRSTACK(Yystack)                        \
+  do {                                                        \
+    if (Yystack->yyspaceLeft < YYHEADROOM)                \
+      yyMemoryExhausted (Yystack);                        \
+  } while (YYID (0))
+#endif
 
-  /* The number of symbols on the RHS of the reduced rule.
-     Keep to zero when no symbol should be popped.  */
-  int yylen = 0;
 
-  yytoken = 0;
-  yyss = yyssa;
-  yyvs = yyvsa;
-  yystacksize = YYINITDEPTH;
+#if YYERROR_VERBOSE
 
-  YYDPRINTF ((stderr, "Starting parse\n"));
+# ifndef yystpcpy
+#  if defined __GLIBC__ && defined _STRING_H && defined _GNU_SOURCE
+#   define yystpcpy stpcpy
+#  else
+/* Copy YYSRC to YYDEST, returning the address of the terminating '\0' in
+   YYDEST.  */
+static char *
+yystpcpy (char *yydest, const char *yysrc)
+{
+  char *yyd = yydest;
+  const char *yys = yysrc;
 
-  yystate = 0;
-  yyerrstatus = 0;
-  yynerrs = 0;
-  yychar = YYEMPTY; /* Cause a token to be read.  */
+  while ((*yyd++ = *yys++) != '\0')
+    continue;
 
-  /* Initialize stack pointers.
-     Waste one element of value and location stack
-     so that they stay on the same level as the state stack.
-     The wasted elements are never initialized.  */
-  yyssp = yyss;
-  yyvsp = yyvs;
+  return yyd - 1;
+}
+#  endif
+# endif
 
-  goto yysetstate;
+# ifndef yytnamerr
+/* Copy to YYRES the contents of YYSTR after stripping away unnecessary
+   quotes and backslashes, so that it's suitable for yyerror.  The
+   heuristic is that double-quoting is unnecessary unless the string
+   contains an apostrophe, a comma, or backslash (other than
+   backslash-backslash).  YYSTR is taken from yytname.  If YYRES is
+   null, do not copy; instead, return the length of what the result
+   would have been.  */
+static size_t
+yytnamerr (char *yyres, const char *yystr)
+{
+  if (*yystr == '"')
+    {
+      size_t yyn = 0;
+      char const *yyp = yystr;
 
-/*------------------------------------------------------------.
-| yynewstate -- Push a new state, which is found in yystate.  |
-`------------------------------------------------------------*/
- yynewstate:
-  /* In all cases, when you get here, the value and location stacks
-     have just been pushed.  So pushing a state here evens the stacks.  */
-  yyssp++;
+      for (;;)
+        switch (*++yyp)
+          {
+          case '\'':
+          case ',':
+            goto do_not_strip_quotes;
 
- yysetstate:
-  *yyssp = yystate;
+          case '\\':
+            if (*++yyp != '\\')
+              goto do_not_strip_quotes;
+            /* Fall through.  */
+          default:
+            if (yyres)
+              yyres[yyn] = *yyp;
+            yyn++;
+            break;
 
-  if (yyss + yystacksize - 1 <= yyssp)
-    {
-      /* Get the current used size of the three stacks, in elements.  */
-      YYSIZE_T yysize = yyssp - yyss + 1;
+          case '"':
+            if (yyres)
+              yyres[yyn] = '\0';
+            return yyn;
+          }
+    do_not_strip_quotes: ;
+    }
 
-#ifdef yyoverflow
-      {
-        /* Give user a chance to reallocate the stack.  Use copies of
-           these so that the &'s don't force the real ones into
-           memory.  */
-        YYSTYPE *yyvs1 = yyvs;
-        yytype_int16 *yyss1 = yyss;
-
-        /* Each stack pointer address is followed by the size of the
-           data in use in that stack, in bytes.  This used to be a
-           conditional around just the two extra args, but that might
-           be undefined if yyoverflow is a macro.  */
-        yyoverflow (YY_("memory exhausted"),
-                    &yyss1, yysize * sizeof (*yyssp),
-                    &yyvs1, yysize * sizeof (*yyvsp),
-                    &yystacksize);
-
-        yyss = yyss1;
-        yyvs = yyvs1;
-      }
-#else /* no yyoverflow */
-# ifndef YYSTACK_RELOCATE
-      goto yyexhaustedlab;
-# else
-      /* Extend the stack our own way.  */
-      if (YYMAXDEPTH <= yystacksize)
-        goto yyexhaustedlab;
-      yystacksize *= 2;
-      if (YYMAXDEPTH < yystacksize)
-        yystacksize = YYMAXDEPTH;
+  if (! yyres)
+    return strlen (yystr);
 
-      {
-        yytype_int16 *yyss1 = yyss;
-        union yyalloc *yyptr =
-          (union yyalloc *) YYSTACK_ALLOC (YYSTACK_BYTES (yystacksize));
-        if (! yyptr)
-          goto yyexhaustedlab;
-        YYSTACK_RELOCATE (yyss_alloc, yyss);
-        YYSTACK_RELOCATE (yyvs_alloc, yyvs);
-#  undef YYSTACK_RELOCATE
-        if (yyss1 != yyssa)
-          YYSTACK_FREE (yyss1);
-      }
+  return yystpcpy (yyres, yystr) - yyres;
+}
 # endif
-#endif /* no yyoverflow */
 
-      yyssp = yyss + yysize - 1;
-      yyvsp = yyvs + yysize - 1;
+#endif /* !YYERROR_VERBOSE */
+
+/** State numbers, as in LALR(1) machine */
+typedef int yyStateNum;
+
+/** Rule numbers, as in LALR(1) machine */
+typedef int yyRuleNum;
+
+/** Grammar symbol */
+typedef short int yySymbol;
+
+/** Item references, as in LALR(1) machine */
+typedef short int yyItemNum;
+
+typedef struct yyGLRState yyGLRState;
+typedef struct yyGLRStateSet yyGLRStateSet;
+typedef struct yySemanticOption yySemanticOption;
+typedef union yyGLRStackItem yyGLRStackItem;
+typedef struct yyGLRStack yyGLRStack;
+
+struct yyGLRState {
+  /** Type tag: always true.  */
+  yybool yyisState;
+  /** Type tag for yysemantics.  If true, yysval applies, otherwise
+   *  yyfirstVal applies.  */
+  yybool yyresolved;
+  /** Number of corresponding LALR(1) machine state.  */
+  yyStateNum yylrState;
+  /** Preceding state in this stack */
+  yyGLRState* yypred;
+  /** Source position of the first token produced by my symbol */
+  size_t yyposn;
+  union {
+    /** First in a chain of alternative reductions producing the
+     *  non-terminal corresponding to this state, threaded through
+     *  yynext.  */
+    yySemanticOption* yyfirstVal;
+    /** Semantic value for this state.  */
+    YYSTYPE yysval;
+  } yysemantics;
+  /** Source location for this state.  */
+  YYLTYPE yyloc;
+};
 
-      YYDPRINTF ((stderr, "Stack size increased to %lu\n",
-                  (unsigned long int) yystacksize));
+struct yyGLRStateSet {
+  yyGLRState** yystates;
+  /** During nondeterministic operation, yylookaheadNeeds tracks which
+   *  stacks have actually needed the current lookahead.  During deterministic
+   *  operation, yylookaheadNeeds[0] is not maintained since it would merely
+   *  duplicate yychar != YYEMPTY.  */
+  yybool* yylookaheadNeeds;
+  size_t yysize, yycapacity;
+};
 
-      if (yyss + yystacksize - 1 <= yyssp)
-        YYABORT;
-    }
+struct yySemanticOption {
+  /** Type tag: always false.  */
+  yybool yyisState;
+  /** Rule number for this reduction */
+  yyRuleNum yyrule;
+  /** The last RHS state in the list of states to be reduced.  */
+  yyGLRState* yystate;
+  /** The lookahead for this reduction.  */
+  int yyrawchar;
+  YYSTYPE yyval;
+  YYLTYPE yyloc;
+  /** Next sibling in chain of options.  To facilitate merging,
+   *  options are chained in decreasing order by address.  */
+  yySemanticOption* yynext;
+};
 
-  YYDPRINTF ((stderr, "Entering state %d\n", yystate));
+/** Type of the items in the GLR stack.  The yyisState field
+ *  indicates which item of the union is valid.  */
+union yyGLRStackItem {
+  yyGLRState yystate;
+  yySemanticOption yyoption;
+};
 
-  if (yystate == YYFINAL)
-    YYACCEPT;
+struct yyGLRStack {
+  int yyerrState;
 
-  goto yybackup;
 
-/*-----------.
-| yybackup.  |
-`-----------*/
-yybackup:
+  YYJMP_BUF yyexception_buffer;
+  yyGLRStackItem* yyitems;
+  yyGLRStackItem* yynextFree;
+  size_t yyspaceLeft;
+  yyGLRState* yysplitPoint;
+  yyGLRState* yylastDeleted;
+  yyGLRStateSet yytops;
+};
 
-  /* Do appropriate processing given the current state.  Read a
-     lookahead token if we need one and don't already have one.  */
+#if YYSTACKEXPANDABLE
+static void yyexpandGLRStack (yyGLRStack* yystackp);
+#endif
 
-  /* First try to decide what to do without reference to lookahead token.  */
-  yyn = yypact[yystate];
-  if (yyn == YYPACT_NINF)
-    goto yydefault;
+static void yyFail (yyGLRStack* yystackp, const char* yymsg)
+  __attribute__ ((__noreturn__));
+static void
+yyFail (yyGLRStack* yystackp, const char* yymsg)
+{
+  if (yymsg != NULL)
+    yyerror (yymsg);
+  YYLONGJMP (yystackp->yyexception_buffer, 1);
+}
 
-  /* Not known => get a lookahead token if don't already have one.  */
+static void yyMemoryExhausted (yyGLRStack* yystackp)
+  __attribute__ ((__noreturn__));
+static void
+yyMemoryExhausted (yyGLRStack* yystackp)
+{
+  YYLONGJMP (yystackp->yyexception_buffer, 2);
+}
 
-  /* YYCHAR is either YYEMPTY or YYEOF or a valid lookahead symbol.  */
-  if (yychar == YYEMPTY)
-    {
-      YYDPRINTF ((stderr, "Reading a token: "));
-      yychar = YYLEX;
-    }
+#if YYDEBUG || YYERROR_VERBOSE
+/** A printable representation of TOKEN.  */
+static const char*
+yytokenName (yySymbol yytoken)
+{
+  if (yytoken == YYEMPTY)
+    return "";
 
-  if (yychar <= YYEOF)
-    {
-      yychar = yytoken = YYEOF;
-      YYDPRINTF ((stderr, "Now at end of input.\n"));
-    }
-  else
-    {
-      yytoken = YYTRANSLATE (yychar);
-      YY_SYMBOL_PRINT ("Next token is", yytoken, &yylval, &yylloc);
-    }
+  return yytname[yytoken];
+}
+#endif
 
-  /* If the proper action on seeing token YYTOKEN is to reduce or to
-     detect an error, take that action.  */
-  yyn += yytoken;
-  if (yyn < 0 || YYLAST < yyn || yycheck[yyn] != yytoken)
-    goto yydefault;
-  yyn = yytable[yyn];
-  if (yyn <= 0)
+/** Fill in YYVSP[YYLOW1 .. YYLOW0-1] from the chain of states starting
+ *  at YYVSP[YYLOW0].yystate.yypred.  Leaves YYVSP[YYLOW1].yystate.yypred
+ *  containing the pointer to the next state in the chain.  */
+static void yyfillin (yyGLRStackItem *, int, int);
+static void
+yyfillin (yyGLRStackItem *yyvsp, int yylow0, int yylow1)
+{
+  yyGLRState* s;
+  int i;
+  s = yyvsp[yylow0].yystate.yypred;
+  for (i = yylow0-1; i >= yylow1; i -= 1)
     {
-      if (yyn == 0 || yyn == YYTABLE_NINF)
-        goto yyerrlab;
-      yyn = -yyn;
-      goto yyreduce;
+      YYASSERT (s->yyresolved);
+      yyvsp[i].yystate.yyresolved = yytrue;
+      yyvsp[i].yystate.yysemantics.yysval = s->yysemantics.yysval;
+      yyvsp[i].yystate.yyloc = s->yyloc;
+      s = yyvsp[i].yystate.yypred = s->yypred;
     }
+}
 
-  /* Count tokens shifted since error; after three, turn off error
-     status.  */
-  if (yyerrstatus)
-    yyerrstatus--;
-
-  /* Shift the lookahead token.  */
-  YY_SYMBOL_PRINT ("Shifting", yytoken, &yylval, &yylloc);
-
-  /* Discard the shifted token.  */
-  yychar = YYEMPTY;
-
-  yystate = yyn;
-  *++yyvsp = yylval;
-
-  goto yynewstate;
-
-
-/*-----------------------------------------------------------.
-| yydefault -- do the default action for the current state.  |
-`-----------------------------------------------------------*/
-yydefault:
-  yyn = yydefact[yystate];
-  if (yyn == 0)
-    goto yyerrlab;
-  goto yyreduce;
-
-
-/*-----------------------------.
-| yyreduce -- Do a reduction.  |
-`-----------------------------*/
-yyreduce:
-  /* yyn is the number of a rule to reduce with.  */
-  yylen = yyr2[yyn];
-
-  /* If YYLEN is nonzero, implement the default value of the action:
-     `$$ = $1'.
+/* Do nothing if YYNORMAL or if *YYLOW <= YYLOW1.  Otherwise, fill in
+ * YYVSP[YYLOW1 .. *YYLOW-1] as in yyfillin and set *YYLOW = YYLOW1.
+ * For convenience, always return YYLOW1.  */
+static int yyfill (yyGLRStackItem *, int *, int, yybool);
 
-     Otherwise, the following line sets YYVAL to garbage.
-     This behavior is undocumented and Bison
-     users should not rely upon it.  Assigning to YYVAL
-     unconditionally makes the parser a bit smaller, and it avoids a
-     GCC warning that YYVAL may be used uninitialized.  */
-  yyval = yyvsp[1-yylen];
+static int
+yyfill (yyGLRStackItem *yyvsp, int *yylow, int yylow1, yybool yynormal)
+{
+  if (!yynormal && yylow1 < *yylow)
+    {
+      yyfillin (yyvsp, *yylow, yylow1);
+      *yylow = yylow1;
+    }
+  return yylow1;
+}
 
+/** Perform user action for rule number YYN, with RHS length YYRHSLEN,
+ *  and top stack item YYVSP.  YYLVALP points to place to put semantic
+ *  value ($$), and yylocp points to place for location information
+ *  (@$).  Returns yyok for normal return, yyaccept for YYACCEPT,
+ *  yyerr for YYERROR, yyabort for YYABORT.  */
+/*ARGSUSED*/ static YYRESULTTAG
+yyuserAction (yyRuleNum yyn, int yyrhslen, yyGLRStackItem* yyvsp,
+              YYSTYPE* yyvalp,
+              YYLTYPE* YYOPTIONAL_LOC (yylocp),
+              yyGLRStack* yystackp
+              )
+{
+  yybool yynormal =
+    (yystackp->yysplitPoint == NULL);
+  int yylow;
+# undef yyerrok
+# define yyerrok (yystackp->yyerrState = 0)
+# undef YYACCEPT
+# define YYACCEPT return yyaccept
+# undef YYABORT
+# define YYABORT return yyabort
+# undef YYERROR
+# define YYERROR return yyerrok, yyerr
+# undef YYRECOVERING
+# define YYRECOVERING() (yystackp->yyerrState != 0)
+# undef yyclearin
+# define yyclearin (yychar = YYEMPTY)
+# undef YYFILL
+# define YYFILL(N) yyfill (yyvsp, &yylow, N, yynormal)
+# undef YYBACKUP
+# define YYBACKUP(Token, Value)                                                     \
+  return yyerror (YY_("syntax error: cannot back up")),     \
+         yyerrok, yyerr
+
+  yylow = 1;
+  if (yyrhslen == 0)
+    *yyvalp = yyval_default;
+  else
+    *yyvalp = yyvsp[YYFILL (1-yyrhslen)].yystate.yysemantics.yysval;
+  YYLLOC_DEFAULT ((*yylocp), (yyvsp - yyrhslen), yyrhslen);
 
-  YY_REDUCE_PRINT (yyn);
   switch (yyn)
     {
         case 4:
 
-/* Line 1455 of yacc.c  */
-#line 1482 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1471 "vtkParse.y"
     {
       startSig();
       clearType();
@@ -4697,31 +6140,31 @@ yyreduce:
     }
     break;
 
-  case 27:
+  case 33:
 
-/* Line 1455 of yacc.c  */
-#line 1527 "vtkParse.y"
-    { pushNamespace((yyvsp[(2) - (2)].str)); }
+/* Line 936 of glr.c  */
+#line 1524 "vtkParse.y"
+    { pushNamespace((((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 28:
+  case 34:
 
-/* Line 1455 of yacc.c  */
-#line 1528 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1525 "vtkParse.y"
     { popNamespace(); }
     break;
 
-  case 36:
+  case 43:
 
-/* Line 1455 of yacc.c  */
-#line 1550 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1548 "vtkParse.y"
     { pushType(); }
     break;
 
-  case 37:
+  case 44:
 
-/* Line 1455 of yacc.c  */
-#line 1551 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1549 "vtkParse.y"
     {
       const char *name = (currentClass ? currentClass->Name : NULL);
       popType();
@@ -4735,45 +6178,97 @@ yyreduce:
     }
     break;
 
-  case 38:
+  case 45:
 
-/* Line 1455 of yacc.c  */
-#line 1564 "vtkParse.y"
-    { start_class((yyvsp[(2) - (2)].str), (yyvsp[(1) - (2)].integer)); }
+/* Line 936 of glr.c  */
+#line 1563 "vtkParse.y"
+    {
+      start_class((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (5))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (5))].yystate.yysemantics.yysval.integer));
+      currentClass->IsFinal = (((yyGLRStackItem const *)yyvsp)[YYFILL ((4) - (5))].yystate.yysemantics.yysval.integer);
+    }
     break;
 
-  case 40:
+  case 47:
 
-/* Line 1455 of yacc.c  */
-#line 1566 "vtkParse.y"
-    { start_class(NULL, (yyvsp[(1) - (1)].integer)); }
+/* Line 936 of glr.c  */
+#line 1569 "vtkParse.y"
+    {
+      start_class((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (4))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (4))].yystate.yysemantics.yysval.integer));
+      currentClass->IsFinal = (((yyGLRStackItem const *)yyvsp)[YYFILL ((4) - (4))].yystate.yysemantics.yysval.integer);
+    }
     break;
 
-  case 42:
+  case 48:
 
-/* Line 1455 of yacc.c  */
-#line 1570 "vtkParse.y"
-    { (yyval.integer) = 0; }
+/* Line 936 of glr.c  */
+#line 1574 "vtkParse.y"
+    {
+      start_class(NULL, (((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (3))].yystate.yysemantics.yysval.integer));
+    }
     break;
 
-  case 43:
+  case 50:
 
-/* Line 1455 of yacc.c  */
-#line 1571 "vtkParse.y"
-    { (yyval.integer) = 1; }
+/* Line 936 of glr.c  */
+#line 1579 "vtkParse.y"
+    {
+      start_class(NULL, (((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.integer));
+    }
     break;
 
-  case 44:
+  case 51:
+
+/* Line 936 of glr.c  */
+#line 1584 "vtkParse.y"
+    { ((*yyvalp).integer) = 0; }
+    break;
 
-/* Line 1455 of yacc.c  */
-#line 1572 "vtkParse.y"
-    { (yyval.integer) = 2; }
+  case 52:
+
+/* Line 936 of glr.c  */
+#line 1585 "vtkParse.y"
+    { ((*yyvalp).integer) = 1; }
+    break;
+
+  case 53:
+
+/* Line 936 of glr.c  */
+#line 1586 "vtkParse.y"
+    { ((*yyvalp).integer) = 2; }
+    break;
+
+  case 54:
+
+/* Line 936 of glr.c  */
+#line 1590 "vtkParse.y"
+    { ((*yyvalp).str) = vtkstrcat((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (3))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (3))].yystate.yysemantics.yysval.str)); }
+    break;
+
+  case 55:
+
+/* Line 936 of glr.c  */
+#line 1592 "vtkParse.y"
+    { ((*yyvalp).str) = vtkstrcat3("::", (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (4))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (4))].yystate.yysemantics.yysval.str)); }
+    break;
+
+  case 59:
+
+/* Line 936 of glr.c  */
+#line 1600 "vtkParse.y"
+    { ((*yyvalp).integer) = 0; }
+    break;
+
+  case 60:
+
+/* Line 936 of glr.c  */
+#line 1601 "vtkParse.y"
+    { ((*yyvalp).integer) = (strcmp((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str), "final") == 0); }
     break;
 
-  case 46:
+  case 62:
 
-/* Line 1455 of yacc.c  */
-#line 1576 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1605 "vtkParse.y"
     {
       startSig();
       clearType();
@@ -4783,187 +6278,239 @@ yyreduce:
     }
     break;
 
-  case 49:
+  case 65:
 
-/* Line 1455 of yacc.c  */
-#line 1588 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1617 "vtkParse.y"
     { access_level = VTK_ACCESS_PUBLIC; }
     break;
 
-  case 50:
+  case 66:
 
-/* Line 1455 of yacc.c  */
-#line 1589 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1618 "vtkParse.y"
     { access_level = VTK_ACCESS_PRIVATE; }
     break;
 
-  case 51:
+  case 67:
 
-/* Line 1455 of yacc.c  */
-#line 1590 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1619 "vtkParse.y"
     { access_level = VTK_ACCESS_PROTECTED; }
     break;
 
-  case 70:
+  case 91:
 
-/* Line 1455 of yacc.c  */
-#line 1615 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1649 "vtkParse.y"
     { output_friend_function(); }
     break;
 
-  case 76:
+  case 94:
 
-/* Line 1455 of yacc.c  */
-#line 1629 "vtkParse.y"
-    { add_base_class(currentClass, (yyvsp[(1) - (1)].str), access_level, 0); }
+/* Line 936 of glr.c  */
+#line 1657 "vtkParse.y"
+    { add_base_class(currentClass, (((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.str), access_level, (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.integer)); }
     break;
 
-  case 77:
+  case 95:
 
-/* Line 1455 of yacc.c  */
-#line 1631 "vtkParse.y"
-    { add_base_class(currentClass, (yyvsp[(3) - (3)].str), (yyvsp[(2) - (3)].integer), 1); }
+/* Line 936 of glr.c  */
+#line 1659 "vtkParse.y"
+    { add_base_class(currentClass, (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (4))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (4))].yystate.yysemantics.yysval.integer),
+                     (VTK_PARSE_VIRTUAL | (((yyGLRStackItem const *)yyvsp)[YYFILL ((4) - (4))].yystate.yysemantics.yysval.integer))); }
     break;
 
-  case 78:
+  case 96:
 
-/* Line 1455 of yacc.c  */
-#line 1633 "vtkParse.y"
-    { add_base_class(currentClass, (yyvsp[(3) - (3)].str), (yyvsp[(1) - (3)].integer), (yyvsp[(2) - (3)].integer)); }
+/* Line 936 of glr.c  */
+#line 1662 "vtkParse.y"
+    { add_base_class(currentClass, (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (4))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (4))].yystate.yysemantics.yysval.integer),
+                     ((((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (4))].yystate.yysemantics.yysval.integer) | (((yyGLRStackItem const *)yyvsp)[YYFILL ((4) - (4))].yystate.yysemantics.yysval.integer))); }
     break;
 
-  case 79:
+  case 97:
 
-/* Line 1455 of yacc.c  */
-#line 1636 "vtkParse.y"
-    { (yyval.integer) = 0; }
+/* Line 936 of glr.c  */
+#line 1666 "vtkParse.y"
+    { ((*yyvalp).integer) = 0; }
     break;
 
-  case 80:
+  case 98:
 
-/* Line 1455 of yacc.c  */
-#line 1637 "vtkParse.y"
-    { (yyval.integer) = 1; }
+/* Line 936 of glr.c  */
+#line 1667 "vtkParse.y"
+    { ((*yyvalp).integer) = VTK_PARSE_VIRTUAL; }
     break;
 
-  case 81:
+  case 99:
 
-/* Line 1455 of yacc.c  */
-#line 1640 "vtkParse.y"
-    { (yyval.integer) = access_level; }
+/* Line 936 of glr.c  */
+#line 1670 "vtkParse.y"
+    { ((*yyvalp).integer) = access_level; }
     break;
 
-  case 83:
+  case 101:
 
-/* Line 1455 of yacc.c  */
-#line 1644 "vtkParse.y"
-    { (yyval.integer) = VTK_ACCESS_PUBLIC; }
+/* Line 936 of glr.c  */
+#line 1674 "vtkParse.y"
+    { ((*yyvalp).integer) = VTK_ACCESS_PUBLIC; }
     break;
 
-  case 84:
+  case 102:
 
-/* Line 1455 of yacc.c  */
-#line 1645 "vtkParse.y"
-    { (yyval.integer) = VTK_ACCESS_PRIVATE; }
+/* Line 936 of glr.c  */
+#line 1675 "vtkParse.y"
+    { ((*yyvalp).integer) = VTK_ACCESS_PRIVATE; }
     break;
 
-  case 85:
+  case 103:
 
-/* Line 1455 of yacc.c  */
-#line 1646 "vtkParse.y"
-    { (yyval.integer) = VTK_ACCESS_PROTECTED; }
+/* Line 936 of glr.c  */
+#line 1676 "vtkParse.y"
+    { ((*yyvalp).integer) = VTK_ACCESS_PROTECTED; }
     break;
 
-  case 88:
+  case 109:
 
-/* Line 1455 of yacc.c  */
-#line 1662 "vtkParse.y"
-    { pushType(); start_enum((yyvsp[(1) - (2)].str)); }
+/* Line 936 of glr.c  */
+#line 1698 "vtkParse.y"
+    { pushType(); }
     break;
 
-  case 89:
+  case 110:
 
-/* Line 1455 of yacc.c  */
-#line 1663 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1699 "vtkParse.y"
     {
       popType();
       clearTypeId();
-      if ((yyvsp[(1) - (5)].str) != NULL)
+      if ((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (5))].yystate.yysemantics.yysval.str) != NULL)
         {
-        setTypeId((yyvsp[(1) - (5)].str));
-        setTypeBase(guess_id_type((yyvsp[(1) - (5)].str)));
+        setTypeId((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (5))].yystate.yysemantics.yysval.str));
+        setTypeBase(guess_id_type((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (5))].yystate.yysemantics.yysval.str)));
         }
       end_enum();
     }
     break;
 
-  case 90:
+  case 111:
 
-/* Line 1455 of yacc.c  */
-#line 1675 "vtkParse.y"
-    { (yyval.str) = (yyvsp[(2) - (2)].str); }
+/* Line 936 of glr.c  */
+#line 1712 "vtkParse.y"
+    {
+      start_enum((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (4))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (4))].yystate.yysemantics.yysval.integer), (((yyGLRStackItem const *)yyvsp)[YYFILL ((4) - (4))].yystate.yysemantics.yysval.integer), getTypeId());
+      clearTypeId();
+      ((*yyvalp).str) = (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (4))].yystate.yysemantics.yysval.str);
+    }
     break;
 
-  case 91:
+  case 112:
 
-/* Line 1455 of yacc.c  */
-#line 1676 "vtkParse.y"
-    { (yyval.str) = NULL; }
+/* Line 936 of glr.c  */
+#line 1718 "vtkParse.y"
+    {
+      start_enum(NULL, (((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (3))].yystate.yysemantics.yysval.integer), (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (3))].yystate.yysemantics.yysval.integer), getTypeId());
+      clearTypeId();
+      ((*yyvalp).str) = NULL;
+    }
     break;
 
-  case 97:
+  case 113:
 
-/* Line 1455 of yacc.c  */
-#line 1687 "vtkParse.y"
-    { add_enum((yyvsp[(1) - (1)].str), NULL); }
+/* Line 936 of glr.c  */
+#line 1725 "vtkParse.y"
+    { ((*yyvalp).integer) = 0; }
     break;
 
-  case 98:
+  case 114:
+
+/* Line 936 of glr.c  */
+#line 1726 "vtkParse.y"
+    { ((*yyvalp).integer) = 1; }
+    break;
+
+  case 115:
+
+/* Line 936 of glr.c  */
+#line 1727 "vtkParse.y"
+    { ((*yyvalp).integer) = 1; }
+    break;
+
+  case 116:
+
+/* Line 936 of glr.c  */
+#line 1730 "vtkParse.y"
+    { ((*yyvalp).integer) = 0; }
+    break;
 
-/* Line 1455 of yacc.c  */
-#line 1688 "vtkParse.y"
+  case 117:
+
+/* Line 936 of glr.c  */
+#line 1731 "vtkParse.y"
+    { pushType(); }
+    break;
+
+  case 118:
+
+/* Line 936 of glr.c  */
+#line 1732 "vtkParse.y"
+    { ((*yyvalp).integer) = getType(); popType(); }
+    break;
+
+  case 122:
+
+/* Line 936 of glr.c  */
+#line 1739 "vtkParse.y"
+    { add_enum((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str), NULL); }
+    break;
+
+  case 123:
+
+/* Line 936 of glr.c  */
+#line 1740 "vtkParse.y"
     { postSig("="); markSig(); }
     break;
 
-  case 99:
+  case 124:
 
-/* Line 1455 of yacc.c  */
-#line 1689 "vtkParse.y"
-    { chopSig(); add_enum((yyvsp[(1) - (4)].str), copySig()); }
+/* Line 936 of glr.c  */
+#line 1741 "vtkParse.y"
+    { chopSig(); add_enum((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (4))].yystate.yysemantics.yysval.str), copySig()); }
     break;
 
-  case 120:
+  case 147:
 
-/* Line 1455 of yacc.c  */
-#line 1742 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1801 "vtkParse.y"
     { pushFunction(); postSig("("); }
     break;
 
-  case 121:
+  case 148:
 
-/* Line 1455 of yacc.c  */
-#line 1743 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1802 "vtkParse.y"
     { postSig(")"); }
     break;
 
-  case 122:
+  case 149:
 
-/* Line 1455 of yacc.c  */
-#line 1744 "vtkParse.y"
-    { (yyval.integer) = VTK_PARSE_FUNCTION; popFunction(); }
+/* Line 936 of glr.c  */
+#line 1803 "vtkParse.y"
+    { ((*yyvalp).integer) = (VTK_PARSE_FUNCTION | (((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (8))].yystate.yysemantics.yysval.integer)); popFunction(); }
     break;
 
-  case 123:
+  case 150:
 
-/* Line 1455 of yacc.c  */
-#line 1748 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1807 "vtkParse.y"
     {
       ValueInfo *item = (ValueInfo *)malloc(sizeof(ValueInfo));
       vtkParse_InitValue(item);
       item->ItemType = VTK_TYPEDEF_INFO;
       item->Access = access_level;
 
-      handle_complex_type(item, getType(), (yyvsp[(1) - (1)].integer), getSig());
+      handle_complex_type(item, getType(), (((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.integer), getSig());
 
       if (getVarName())
         {
@@ -4985,66 +6532,102 @@ yyreduce:
     }
     break;
 
-  case 124:
+  case 151:
+
+/* Line 936 of glr.c  */
+#line 1840 "vtkParse.y"
+    { add_using((((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (3))].yystate.yysemantics.yysval.str), 0); }
+    break;
+
+  case 153:
+
+/* Line 936 of glr.c  */
+#line 1844 "vtkParse.y"
+    { ((*yyvalp).str) = (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str); }
+    break;
+
+  case 154:
 
-/* Line 1455 of yacc.c  */
-#line 1781 "vtkParse.y"
-    { add_using((yyvsp[(2) - (3)].str), 0); }
+/* Line 936 of glr.c  */
+#line 1846 "vtkParse.y"
+    { ((*yyvalp).str) = vtkstrcat((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 126:
+  case 155:
 
-/* Line 1455 of yacc.c  */
-#line 1785 "vtkParse.y"
-    { (yyval.str) = (yyvsp[(2) - (2)].str); }
+/* Line 936 of glr.c  */
+#line 1848 "vtkParse.y"
+    { ((*yyvalp).str) = vtkstrcat((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 127:
+  case 156:
 
-/* Line 1455 of yacc.c  */
-#line 1787 "vtkParse.y"
-    { (yyval.str) = vtkstrcat((yyvsp[(1) - (2)].str), (yyvsp[(2) - (2)].str)); }
+/* Line 936 of glr.c  */
+#line 1850 "vtkParse.y"
+    { ((*yyvalp).str) = vtkstrcat3((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (3))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (3))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (3))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 128:
+  case 157:
 
-/* Line 1455 of yacc.c  */
-#line 1789 "vtkParse.y"
-    { (yyval.str) = vtkstrcat((yyvsp[(1) - (2)].str), (yyvsp[(2) - (2)].str)); }
+/* Line 936 of glr.c  */
+#line 1852 "vtkParse.y"
+    { ((*yyvalp).str) = vtkstrcat3((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (3))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (3))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (3))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 129:
+  case 158:
 
-/* Line 1455 of yacc.c  */
-#line 1791 "vtkParse.y"
-    { (yyval.str) = vtkstrcat3((yyvsp[(1) - (3)].str), (yyvsp[(2) - (3)].str), (yyvsp[(3) - (3)].str)); }
+/* Line 936 of glr.c  */
+#line 1855 "vtkParse.y"
+    { add_using((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (4))].yystate.yysemantics.yysval.str), 1); }
     break;
 
-  case 130:
+  case 159:
 
-/* Line 1455 of yacc.c  */
-#line 1793 "vtkParse.y"
-    { (yyval.str) = vtkstrcat3((yyvsp[(1) - (3)].str), (yyvsp[(2) - (3)].str), (yyvsp[(3) - (3)].str)); }
+/* Line 936 of glr.c  */
+#line 1858 "vtkParse.y"
+    { markSig(); }
     break;
 
-  case 131:
+  case 160:
+
+/* Line 936 of glr.c  */
+#line 1860 "vtkParse.y"
+    {
+      ValueInfo *item = (ValueInfo *)malloc(sizeof(ValueInfo));
+      vtkParse_InitValue(item);
+      item->ItemType = VTK_TYPEDEF_INFO;
+      item->Access = access_level;
+
+      handle_complex_type(item, getType(), (((yyGLRStackItem const *)yyvsp)[YYFILL ((6) - (8))].yystate.yysemantics.yysval.integer), copySig());
+
+      item->Name = (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (8))].yystate.yysemantics.yysval.str);
 
-/* Line 1455 of yacc.c  */
-#line 1796 "vtkParse.y"
-    { add_using((yyvsp[(3) - (4)].str), 1); }
+      if (currentTemplate)
+        {
+        vtkParse_FreeValue(item);
+        }
+      else if (currentClass)
+        {
+        vtkParse_AddTypedefToClass(currentClass, item);
+        }
+      else
+        {
+        vtkParse_AddTypedefToNamespace(currentNamespace, item);
+        }
+    }
     break;
 
-  case 132:
+  case 161:
 
-/* Line 1455 of yacc.c  */
-#line 1804 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1890 "vtkParse.y"
     { postSig("template<> "); clearTypeId(); }
     break;
 
-  case 133:
+  case 162:
 
-/* Line 1455 of yacc.c  */
-#line 1806 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1892 "vtkParse.y"
     {
       postSig("template<");
       pushType();
@@ -5054,10 +6637,10 @@ yyreduce:
     }
     break;
 
-  case 134:
+  case 163:
 
-/* Line 1455 of yacc.c  */
-#line 1814 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1900 "vtkParse.y"
     {
       chopSig();
       if (getSig()[getSigLength()-1] == '>') { postSig(" "); }
@@ -5067,94 +6650,108 @@ yyreduce:
     }
     break;
 
-  case 136:
+  case 165:
 
-/* Line 1455 of yacc.c  */
-#line 1825 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1911 "vtkParse.y"
     { chopSig(); postSig(", "); clearType(); clearTypeId(); }
     break;
 
-  case 138:
+  case 167:
 
-/* Line 1455 of yacc.c  */
-#line 1829 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1915 "vtkParse.y"
     { markSig(); }
     break;
 
-  case 139:
+  case 168:
 
-/* Line 1455 of yacc.c  */
-#line 1831 "vtkParse.y"
-    { add_template_parameter(getType(), (yyvsp[(3) - (3)].integer), copySig()); }
+/* Line 936 of glr.c  */
+#line 1917 "vtkParse.y"
+    { add_template_parameter(getType(), (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (3))].yystate.yysemantics.yysval.integer), copySig()); }
     break;
 
-  case 141:
+  case 170:
 
-/* Line 1455 of yacc.c  */
-#line 1833 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1919 "vtkParse.y"
     { markSig(); }
     break;
 
-  case 142:
+  case 171:
 
-/* Line 1455 of yacc.c  */
-#line 1835 "vtkParse.y"
-    { add_template_parameter(0, (yyvsp[(3) - (3)].integer), copySig()); }
+/* Line 936 of glr.c  */
+#line 1921 "vtkParse.y"
+    { add_template_parameter(0, (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (3))].yystate.yysemantics.yysval.integer), copySig()); }
     break;
 
-  case 144:
+  case 173:
 
-/* Line 1455 of yacc.c  */
-#line 1837 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1923 "vtkParse.y"
     { pushTemplate(); markSig(); }
     break;
 
-  case 145:
+  case 174:
 
-/* Line 1455 of yacc.c  */
-#line 1838 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1924 "vtkParse.y"
     { postSig("class "); }
     break;
 
-  case 146:
+  case 175:
 
-/* Line 1455 of yacc.c  */
-#line 1840 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1926 "vtkParse.y"
     {
       int i;
       TemplateInfo *newTemplate = currentTemplate;
       popTemplate();
-      add_template_parameter(0, (yyvsp[(5) - (5)].integer), copySig());
+      add_template_parameter(0, (((yyGLRStackItem const *)yyvsp)[YYFILL ((5) - (5))].yystate.yysemantics.yysval.integer), copySig());
       i = currentTemplate->NumberOfParameters-1;
       currentTemplate->Parameters[i]->Template = newTemplate;
     }
     break;
 
-  case 148:
+  case 177:
+
+/* Line 936 of glr.c  */
+#line 1937 "vtkParse.y"
+    { ((*yyvalp).integer) = 0; }
+    break;
 
-/* Line 1455 of yacc.c  */
-#line 1851 "vtkParse.y"
+  case 178:
+
+/* Line 936 of glr.c  */
+#line 1938 "vtkParse.y"
+    { postSig("..."); ((*yyvalp).integer) = VTK_PARSE_PACK; }
+    break;
+
+  case 179:
+
+/* Line 936 of glr.c  */
+#line 1941 "vtkParse.y"
     { postSig("class "); }
     break;
 
-  case 149:
+  case 180:
 
-/* Line 1455 of yacc.c  */
-#line 1852 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1942 "vtkParse.y"
     { postSig("typename "); }
     break;
 
-  case 152:
+  case 183:
 
-/* Line 1455 of yacc.c  */
-#line 1858 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1948 "vtkParse.y"
     { postSig("="); markSig(); }
     break;
 
-  case 153:
+  case 184:
 
-/* Line 1455 of yacc.c  */
-#line 1860 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1950 "vtkParse.y"
     {
       int i = currentTemplate->NumberOfParameters-1;
       ValueInfo *param = currentTemplate->Parameters[i];
@@ -5163,62 +6760,63 @@ yyreduce:
     }
     break;
 
-  case 156:
+  case 187:
 
-/* Line 1455 of yacc.c  */
-#line 1877 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1967 "vtkParse.y"
     { output_function(); }
     break;
 
-  case 157:
+  case 188:
 
-/* Line 1455 of yacc.c  */
-#line 1878 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1968 "vtkParse.y"
     { output_function(); }
     break;
 
-  case 158:
+  case 189:
 
-/* Line 1455 of yacc.c  */
-#line 1879 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1969 "vtkParse.y"
     { reject_function(); }
     break;
 
-  case 159:
+  case 190:
 
-/* Line 1455 of yacc.c  */
-#line 1880 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1970 "vtkParse.y"
     { reject_function(); }
     break;
 
-  case 167:
+  case 198:
 
-/* Line 1455 of yacc.c  */
-#line 1896 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 1986 "vtkParse.y"
     { output_function(); }
     break;
 
-  case 177:
+  case 208:
 
-/* Line 1455 of yacc.c  */
-#line 1913 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2004 "vtkParse.y"
     {
       postSig("(");
+      currentFunction->IsExplicit = ((getType() & VTK_PARSE_EXPLICIT) != 0);
       set_return(currentFunction, getType(), getTypeId(), 0);
     }
     break;
 
-  case 178:
+  case 209:
 
-/* Line 1455 of yacc.c  */
-#line 1917 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2009 "vtkParse.y"
     { postSig(")"); }
     break;
 
-  case 179:
+  case 210:
 
-/* Line 1455 of yacc.c  */
-#line 1919 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2011 "vtkParse.y"
     {
       postSig(";");
       closeSig();
@@ -5229,37 +6827,37 @@ yyreduce:
     }
     break;
 
-  case 180:
+  case 211:
 
-/* Line 1455 of yacc.c  */
-#line 1930 "vtkParse.y"
-    { (yyval.str) = copySig(); }
+/* Line 936 of glr.c  */
+#line 2022 "vtkParse.y"
+    { ((*yyvalp).str) = copySig(); }
     break;
 
-  case 181:
+  case 212:
 
-/* Line 1455 of yacc.c  */
-#line 1933 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2025 "vtkParse.y"
     { postSig(")"); }
     break;
 
-  case 182:
+  case 213:
 
-/* Line 1455 of yacc.c  */
-#line 1934 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2027 "vtkParse.y"
     {
       postSig(";");
       closeSig();
-      currentFunction->Name = (yyvsp[(1) - (3)].str);
+      currentFunction->Name = (((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (4))].yystate.yysemantics.yysval.str);
       currentFunction->Comment = vtkstrdup(getComment());
       vtkParseDebug("Parsed operator", currentFunction->Name);
     }
     break;
 
-  case 183:
+  case 214:
 
-/* Line 1455 of yacc.c  */
-#line 1944 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2037 "vtkParse.y"
     {
       postSig("(");
       currentFunction->IsOperator = 1;
@@ -5267,58 +6865,58 @@ yyreduce:
     }
     break;
 
-  case 185:
+  case 216:
 
-/* Line 1455 of yacc.c  */
-#line 1953 "vtkParse.y"
-    { chopSig(); (yyval.str) = vtkstrcat(copySig(), (yyvsp[(2) - (2)].str)); postSig((yyvsp[(2) - (2)].str)); }
+/* Line 936 of glr.c  */
+#line 2046 "vtkParse.y"
+    { chopSig(); ((*yyvalp).str) = vtkstrcat(copySig(), (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str)); postSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 186:
+  case 217:
 
-/* Line 1455 of yacc.c  */
-#line 1956 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2049 "vtkParse.y"
     { markSig(); postSig("operator "); }
     break;
 
-  case 187:
+  case 218:
 
-/* Line 1455 of yacc.c  */
-#line 1960 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2053 "vtkParse.y"
     {
       postSig(";");
       closeSig();
-      currentFunction->Name = (yyvsp[(1) - (2)].str);
+      currentFunction->Name = (((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (3))].yystate.yysemantics.yysval.str);
       currentFunction->Comment = vtkstrdup(getComment());
       vtkParseDebug("Parsed func", currentFunction->Name);
     }
     break;
 
-  case 190:
+  case 221:
 
-/* Line 1455 of yacc.c  */
-#line 1972 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2065 "vtkParse.y"
     { postSig(" throw "); }
     break;
 
-  case 191:
+  case 222:
 
-/* Line 1455 of yacc.c  */
-#line 1972 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2065 "vtkParse.y"
     { chopSig(); }
     break;
 
-  case 192:
+  case 223:
 
-/* Line 1455 of yacc.c  */
-#line 1973 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2066 "vtkParse.y"
     { postSig(" const"); currentFunction->IsConst = 1; }
     break;
 
-  case 193:
+  case 224:
 
-/* Line 1455 of yacc.c  */
-#line 1975 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2068 "vtkParse.y"
     {
       postSig(" = 0");
       currentFunction->IsPureVirtual = 1;
@@ -5326,41 +6924,89 @@ yyreduce:
     }
     break;
 
-  case 196:
+  case 225:
+
+/* Line 936 of glr.c  */
+#line 2074 "vtkParse.y"
+    {
+      postSig(" "); postSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str));
+      if (strcmp((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str), "final") == 0) { currentFunction->IsFinal = 1; }
+    }
+    break;
+
+  case 226:
+
+/* Line 936 of glr.c  */
+#line 2078 "vtkParse.y"
+    { chopSig(); }
+    break;
+
+  case 229:
+
+/* Line 936 of glr.c  */
+#line 2083 "vtkParse.y"
+    { postSig(" noexcept"); }
+    break;
+
+  case 230:
+
+/* Line 936 of glr.c  */
+#line 2086 "vtkParse.y"
+    { currentFunction->IsDeleted = 1; }
+    break;
+
+  case 234:
+
+/* Line 936 of glr.c  */
+#line 2093 "vtkParse.y"
+    { postSig(" -> "); clearType(); clearTypeId(); }
+    break;
+
+  case 235:
+
+/* Line 936 of glr.c  */
+#line 2095 "vtkParse.y"
+    {
+      chopSig();
+      set_return(currentFunction, getType(), getTypeId(), 0);
+    }
+    break;
+
+  case 242:
 
-/* Line 1455 of yacc.c  */
-#line 1987 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2113 "vtkParse.y"
     {
       postSig("(");
       set_return(currentFunction, getType(), getTypeId(), 0);
     }
     break;
 
-  case 197:
+  case 243:
 
-/* Line 1455 of yacc.c  */
-#line 1991 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2117 "vtkParse.y"
     { postSig(")"); }
     break;
 
-  case 200:
+  case 244:
 
-/* Line 1455 of yacc.c  */
-#line 2003 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2125 "vtkParse.y"
     { closeSig(); }
     break;
 
-  case 201:
+  case 245:
 
-/* Line 1455 of yacc.c  */
-#line 2004 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2126 "vtkParse.y"
     { openSig(); }
     break;
 
-  case 202:
+  case 246:
 
-/* Line 1455 of yacc.c  */
-#line 2005 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2128 "vtkParse.y"
     {
       postSig(";");
       closeSig();
@@ -5372,70 +7018,77 @@ yyreduce:
         {
         currentFunction->IsExplicit = 1;
         }
-      currentFunction->Name = (yyvsp[(1) - (5)].str);
+      currentFunction->Name = (((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (6))].yystate.yysemantics.yysval.str);
       currentFunction->Comment = vtkstrdup(getComment());
       vtkParseDebug("Parsed func", currentFunction->Name);
     }
     break;
 
-  case 203:
+  case 247:
 
-/* Line 1455 of yacc.c  */
-#line 2022 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2145 "vtkParse.y"
     { pushType(); postSig("("); }
     break;
 
-  case 204:
+  case 248:
 
-/* Line 1455 of yacc.c  */
-#line 2023 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2147 "vtkParse.y"
     { popType(); postSig(")"); }
     break;
 
-  case 211:
+  case 255:
 
-/* Line 1455 of yacc.c  */
-#line 2040 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2164 "vtkParse.y"
     { clearType(); clearTypeId(); }
     break;
 
-  case 213:
+  case 257:
 
-/* Line 1455 of yacc.c  */
-#line 2043 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2167 "vtkParse.y"
     { clearType(); clearTypeId(); }
     break;
 
-  case 214:
+  case 258:
 
-/* Line 1455 of yacc.c  */
-#line 2044 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2168 "vtkParse.y"
     { clearType(); clearTypeId(); postSig(", "); }
     break;
 
-  case 216:
+  case 260:
 
-/* Line 1455 of yacc.c  */
-#line 2047 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2171 "vtkParse.y"
     { currentFunction->IsVariadic = 1; postSig(", ..."); }
     break;
 
-  case 217:
+  case 261:
 
-/* Line 1455 of yacc.c  */
-#line 2050 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2173 "vtkParse.y"
+    { currentFunction->IsVariadic = 1; postSig("..."); }
+    break;
+
+  case 262:
+
+/* Line 936 of glr.c  */
+#line 2176 "vtkParse.y"
     { markSig(); }
     break;
 
-  case 218:
+  case 263:
 
-/* Line 1455 of yacc.c  */
-#line 2052 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2178 "vtkParse.y"
     {
       ValueInfo *param = (ValueInfo *)malloc(sizeof(ValueInfo));
       vtkParse_InitValue(param);
 
-      handle_complex_type(param, getType(), (yyvsp[(3) - (3)].integer), copySig());
+      handle_complex_type(param, getType(), (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (3))].yystate.yysemantics.yysval.integer), copySig());
       add_legacy_parameter(currentFunction, param);
 
       if (getVarName())
@@ -5447,10 +7100,10 @@ yyreduce:
     }
     break;
 
-  case 219:
+  case 264:
 
-/* Line 1455 of yacc.c  */
-#line 2067 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2193 "vtkParse.y"
     {
       int i = currentFunction->NumberOfParameters-1;
       if (getVarValue())
@@ -5460,31 +7113,45 @@ yyreduce:
     }
     break;
 
-  case 220:
+  case 265:
 
-/* Line 1455 of yacc.c  */
-#line 2076 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2202 "vtkParse.y"
     { clearVarValue(); }
     break;
 
-  case 222:
+  case 267:
 
-/* Line 1455 of yacc.c  */
-#line 2080 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2206 "vtkParse.y"
     { postSig("="); clearVarValue(); markSig(); }
     break;
 
-  case 223:
+  case 268:
 
-/* Line 1455 of yacc.c  */
-#line 2081 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2207 "vtkParse.y"
     { chopSig(); setVarValue(copySig()); }
     break;
 
-  case 225:
+  case 269:
+
+/* Line 936 of glr.c  */
+#line 2208 "vtkParse.y"
+    { clearVarValue(); markSig(); }
+    break;
+
+  case 270:
+
+/* Line 936 of glr.c  */
+#line 2209 "vtkParse.y"
+    { chopSig(); setVarValue(copySig()); }
+    break;
 
-/* Line 1455 of yacc.c  */
-#line 2092 "vtkParse.y"
+  case 272:
+
+/* Line 936 of glr.c  */
+#line 2220 "vtkParse.y"
     {
       unsigned int type = getType();
       ValueInfo *var = (ValueInfo *)malloc(sizeof(ValueInfo));
@@ -5492,7 +7159,7 @@ yyreduce:
       var->ItemType = VTK_VARIABLE_INFO;
       var->Access = access_level;
 
-      handle_complex_type(var, type, (yyvsp[(1) - (2)].integer), getSig());
+      handle_complex_type(var, type, (((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.integer), getSig());
 
       var->Name = getVarName();
 
@@ -5507,11 +7174,11 @@ yyreduce:
         var->ItemType = VTK_TYPEDEF_INFO;
         if (currentClass)
           {
-          vtkParse_AddVariableToClass(currentClass, var);
+          vtkParse_AddTypedefToClass(currentClass, var);
           }
         else
           {
-          vtkParse_AddVariableToNamespace(currentNamespace, var);
+          vtkParse_AddTypedefToNamespace(currentNamespace, var);
           }
         }
       /* Is this a constant? */
@@ -5544,1061 +7211,1165 @@ yyreduce:
     }
     break;
 
-  case 229:
+  case 276:
 
-/* Line 1455 of yacc.c  */
-#line 2154 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2282 "vtkParse.y"
     { postSig(", "); }
     break;
 
-  case 232:
+  case 279:
 
-/* Line 1455 of yacc.c  */
-#line 2160 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2288 "vtkParse.y"
     { setTypePtr(0); }
     break;
 
-  case 233:
+  case 280:
 
-/* Line 1455 of yacc.c  */
-#line 2161 "vtkParse.y"
-    { setTypePtr((yyvsp[(1) - (1)].integer)); }
+/* Line 936 of glr.c  */
+#line 2289 "vtkParse.y"
+    { setTypePtr((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.integer)); }
     break;
 
-  case 234:
+  case 281:
 
-/* Line 1455 of yacc.c  */
-#line 2165 "vtkParse.y"
-    { (yyval.integer) = 0; }
+/* Line 936 of glr.c  */
+#line 2294 "vtkParse.y"
+    {
+      if ((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (3))].yystate.yysemantics.yysval.integer) == VTK_PARSE_FUNCTION)
+        {
+        ((*yyvalp).integer) = (VTK_PARSE_FUNCTION_PTR | (((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (3))].yystate.yysemantics.yysval.integer));
+        }
+      else
+        {
+        ((*yyvalp).integer) = (((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (3))].yystate.yysemantics.yysval.integer);
+        }
+    }
     break;
 
-  case 235:
+  case 282:
 
-/* Line 1455 of yacc.c  */
-#line 2166 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2304 "vtkParse.y"
     { postSig(")"); }
     break;
 
-  case 236:
+  case 283:
 
-/* Line 1455 of yacc.c  */
-#line 2168 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2306 "vtkParse.y"
     {
       const char *scope = getScope();
-      unsigned int parens = add_indirection((yyvsp[(1) - (5)].integer), (yyvsp[(2) - (5)].integer));
-      if ((yyvsp[(5) - (5)].integer) == VTK_PARSE_FUNCTION)
+      unsigned int parens = add_indirection((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (6))].yystate.yysemantics.yysval.integer), (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (6))].yystate.yysemantics.yysval.integer));
+      if ((((yyGLRStackItem const *)yyvsp)[YYFILL ((6) - (6))].yystate.yysemantics.yysval.integer) == VTK_PARSE_FUNCTION)
         {
         if (scope) { scope = vtkstrndup(scope, strlen(scope) - 2); }
         getFunction()->Class = scope;
-        (yyval.integer) = (parens | VTK_PARSE_FUNCTION);
+        ((*yyvalp).integer) = (parens | VTK_PARSE_FUNCTION);
         }
-      else if ((yyvsp[(5) - (5)].integer) == VTK_PARSE_ARRAY)
+      else if ((((yyGLRStackItem const *)yyvsp)[YYFILL ((6) - (6))].yystate.yysemantics.yysval.integer) == VTK_PARSE_ARRAY)
         {
-        (yyval.integer) = add_indirection_to_array(parens);
+        ((*yyvalp).integer) = add_indirection_to_array(parens);
         }
     }
     break;
 
-  case 237:
+  case 284:
 
-/* Line 1455 of yacc.c  */
-#line 2185 "vtkParse.y"
-    { (yyval.integer) = 0; }
+/* Line 936 of glr.c  */
+#line 2324 "vtkParse.y"
+    { ((*yyvalp).integer) = (((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (3))].yystate.yysemantics.yysval.integer); }
     break;
 
-  case 238:
+  case 285:
 
-/* Line 1455 of yacc.c  */
-#line 2186 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2325 "vtkParse.y"
     { postSig(")"); }
     break;
 
-  case 239:
+  case 286:
 
-/* Line 1455 of yacc.c  */
-#line 2188 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2327 "vtkParse.y"
     {
       const char *scope = getScope();
-      unsigned int parens = add_indirection((yyvsp[(1) - (5)].integer), (yyvsp[(2) - (5)].integer));
-      if ((yyvsp[(5) - (5)].integer) == VTK_PARSE_FUNCTION)
+      unsigned int parens = add_indirection((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (5))].yystate.yysemantics.yysval.integer), (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (5))].yystate.yysemantics.yysval.integer));
+      if ((((yyGLRStackItem const *)yyvsp)[YYFILL ((5) - (5))].yystate.yysemantics.yysval.integer) == VTK_PARSE_FUNCTION)
         {
         if (scope) { scope = vtkstrndup(scope, strlen(scope) - 2); }
         getFunction()->Class = scope;
-        (yyval.integer) = (parens | VTK_PARSE_FUNCTION);
+        ((*yyvalp).integer) = (parens | VTK_PARSE_FUNCTION);
         }
-      else if ((yyvsp[(5) - (5)].integer) == VTK_PARSE_ARRAY)
+      else if ((((yyGLRStackItem const *)yyvsp)[YYFILL ((5) - (5))].yystate.yysemantics.yysval.integer) == VTK_PARSE_ARRAY)
         {
-        (yyval.integer) = add_indirection_to_array(parens);
+        ((*yyvalp).integer) = add_indirection_to_array(parens);
         }
     }
     break;
 
-  case 240:
-
-/* Line 1455 of yacc.c  */
-#line 2204 "vtkParse.y"
-    { postSig("("); scopeSig(""); (yyval.integer) = 0; }
-    break;
-
-  case 241:
-
-/* Line 1455 of yacc.c  */
-#line 2205 "vtkParse.y"
-    { postSig("("); scopeSig((yyvsp[(1) - (1)].str)); postSig("*");
-         (yyval.integer) = VTK_PARSE_POINTER; }
-    break;
-
-  case 242:
+  case 287:
 
-/* Line 1455 of yacc.c  */
-#line 2207 "vtkParse.y"
-    { postSig("("); scopeSig((yyvsp[(1) - (1)].str)); postSig("&");
-         (yyval.integer) = VTK_PARSE_REF; }
+/* Line 936 of glr.c  */
+#line 2343 "vtkParse.y"
+    { postSig("("); scopeSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); postSig("*"); }
     break;
 
-  case 243:
+  case 288:
 
-/* Line 1455 of yacc.c  */
-#line 2211 "vtkParse.y"
-    { postSig("("); scopeSig((yyvsp[(1) - (1)].str)); postSig("*");
-         (yyval.integer) = VTK_PARSE_POINTER; }
+/* Line 936 of glr.c  */
+#line 2344 "vtkParse.y"
+    { ((*yyvalp).integer) = (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (3))].yystate.yysemantics.yysval.integer); }
     break;
 
-  case 244:
+  case 289:
 
-/* Line 1455 of yacc.c  */
-#line 2213 "vtkParse.y"
-    { postSig("("); scopeSig((yyvsp[(1) - (1)].str)); postSig("&");
-         (yyval.integer) = VTK_PARSE_REF; }
+/* Line 936 of glr.c  */
+#line 2345 "vtkParse.y"
+    { postSig("("); scopeSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); postSig("&");
+         ((*yyvalp).integer) = VTK_PARSE_REF; }
     break;
 
-  case 245:
+  case 290:
 
-/* Line 1455 of yacc.c  */
-#line 2216 "vtkParse.y"
-    { (yyval.integer) = 0; }
+/* Line 936 of glr.c  */
+#line 2349 "vtkParse.y"
+    { ((*yyvalp).integer) = 0; }
     break;
 
-  case 246:
+  case 291:
 
-/* Line 1455 of yacc.c  */
-#line 2217 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2350 "vtkParse.y"
     { pushFunction(); postSig("("); }
     break;
 
-  case 247:
+  case 292:
 
-/* Line 1455 of yacc.c  */
-#line 2218 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2351 "vtkParse.y"
     { postSig(")"); }
     break;
 
-  case 248:
+  case 293:
 
-/* Line 1455 of yacc.c  */
-#line 2219 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2352 "vtkParse.y"
     {
-      (yyval.integer) = VTK_PARSE_FUNCTION;
+      ((*yyvalp).integer) = VTK_PARSE_FUNCTION;
       popFunction();
     }
     break;
 
-  case 249:
+  case 294:
 
-/* Line 1455 of yacc.c  */
-#line 2223 "vtkParse.y"
-    { (yyval.integer) = VTK_PARSE_ARRAY; }
+/* Line 936 of glr.c  */
+#line 2356 "vtkParse.y"
+    { ((*yyvalp).integer) = VTK_PARSE_ARRAY; }
     break;
 
-  case 252:
+  case 297:
 
-/* Line 1455 of yacc.c  */
-#line 2227 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2360 "vtkParse.y"
     { currentFunction->IsConst = 1; }
     break;
 
-  case 255:
+  case 302:
 
-/* Line 1455 of yacc.c  */
-#line 2233 "vtkParse.y"
-    { (yyval.integer) = add_indirection((yyvsp[(1) - (2)].integer), (yyvsp[(2) - (2)].integer)); }
+/* Line 936 of glr.c  */
+#line 2368 "vtkParse.y"
+    { ((*yyvalp).integer) = add_indirection((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.integer), (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.integer)); }
     break;
 
-  case 257:
+  case 304:
 
-/* Line 1455 of yacc.c  */
-#line 2238 "vtkParse.y"
-    { (yyval.integer) = add_indirection((yyvsp[(1) - (2)].integer), (yyvsp[(2) - (2)].integer)); }
+/* Line 936 of glr.c  */
+#line 2373 "vtkParse.y"
+    { ((*yyvalp).integer) = add_indirection((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.integer), (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.integer)); }
     break;
 
-  case 258:
+  case 305:
 
-/* Line 1455 of yacc.c  */
-#line 2241 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2376 "vtkParse.y"
     { clearVarName(); chopSig(); }
     break;
 
-  case 260:
+  case 307:
 
-/* Line 1455 of yacc.c  */
-#line 2245 "vtkParse.y"
-    { setVarName((yyvsp[(1) - (1)].str)); }
+/* Line 936 of glr.c  */
+#line 2380 "vtkParse.y"
+    { setVarName((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 261:
+  case 308:
 
-/* Line 1455 of yacc.c  */
-#line 2246 "vtkParse.y"
-    { setVarName((yyvsp[(1) - (3)].str)); }
+/* Line 936 of glr.c  */
+#line 2382 "vtkParse.y"
+    { setVarName((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (4))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 265:
+  case 312:
 
-/* Line 1455 of yacc.c  */
-#line 2254 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2390 "vtkParse.y"
     { clearArray(); }
     break;
 
-  case 267:
+  case 314:
 
-/* Line 1455 of yacc.c  */
-#line 2258 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2394 "vtkParse.y"
     { clearArray(); }
     break;
 
-  case 271:
+  case 318:
 
-/* Line 1455 of yacc.c  */
-#line 2265 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2401 "vtkParse.y"
     { postSig("["); }
     break;
 
-  case 272:
+  case 319:
 
-/* Line 1455 of yacc.c  */
-#line 2265 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2402 "vtkParse.y"
     { postSig("]"); }
     break;
 
-  case 273:
+  case 320:
 
-/* Line 1455 of yacc.c  */
-#line 2268 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2405 "vtkParse.y"
     { pushArraySize(""); }
     break;
 
-  case 274:
+  case 321:
 
-/* Line 1455 of yacc.c  */
-#line 2269 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2406 "vtkParse.y"
     { markSig(); }
     break;
 
-  case 275:
+  case 322:
 
-/* Line 1455 of yacc.c  */
-#line 2269 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2406 "vtkParse.y"
     { chopSig(); pushArraySize(copySig()); }
     break;
 
-  case 280:
+  case 328:
 
-/* Line 1455 of yacc.c  */
-#line 2285 "vtkParse.y"
-    { (yyval.str) = vtkstrcat((yyvsp[(1) - (2)].str), (yyvsp[(2) - (2)].str)); }
+/* Line 936 of glr.c  */
+#line 2420 "vtkParse.y"
+    { ((*yyvalp).str) = vtkstrcat("~", (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 281:
+  case 329:
 
-/* Line 1455 of yacc.c  */
-#line 2287 "vtkParse.y"
-    { (yyval.str) = vtkstrcat((yyvsp[(1) - (2)].str), (yyvsp[(2) - (2)].str)); }
+/* Line 936 of glr.c  */
+#line 2421 "vtkParse.y"
+    { ((*yyvalp).str) = vtkstrcat("~", (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 282:
+  case 330:
 
-/* Line 1455 of yacc.c  */
-#line 2289 "vtkParse.y"
-    { (yyval.str) = vtkstrcat((yyvsp[(1) - (2)].str), (yyvsp[(2) - (2)].str)); }
+/* Line 936 of glr.c  */
+#line 2425 "vtkParse.y"
+    { ((*yyvalp).str) = vtkstrcat((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 283:
+  case 331:
 
-/* Line 1455 of yacc.c  */
-#line 2293 "vtkParse.y"
-    { (yyval.str) = vtkstrcat((yyvsp[(1) - (2)].str), (yyvsp[(2) - (2)].str)); }
+/* Line 936 of glr.c  */
+#line 2427 "vtkParse.y"
+    { ((*yyvalp).str) = vtkstrcat((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 284:
+  case 332:
 
-/* Line 1455 of yacc.c  */
-#line 2295 "vtkParse.y"
-    { (yyval.str) = vtkstrcat((yyvsp[(1) - (2)].str), (yyvsp[(2) - (2)].str)); }
+/* Line 936 of glr.c  */
+#line 2429 "vtkParse.y"
+    { ((*yyvalp).str) = vtkstrcat((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 285:
+  case 333:
 
-/* Line 1455 of yacc.c  */
-#line 2297 "vtkParse.y"
-    { (yyval.str) = vtkstrcat3((yyvsp[(1) - (3)].str), (yyvsp[(2) - (3)].str), (yyvsp[(3) - (3)].str)); }
+/* Line 936 of glr.c  */
+#line 2433 "vtkParse.y"
+    { ((*yyvalp).str) = vtkstrcat((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 286:
+  case 334:
 
-/* Line 1455 of yacc.c  */
-#line 2299 "vtkParse.y"
-    { (yyval.str) = vtkstrcat3((yyvsp[(1) - (3)].str), (yyvsp[(2) - (3)].str), (yyvsp[(3) - (3)].str)); }
+/* Line 936 of glr.c  */
+#line 2435 "vtkParse.y"
+    { ((*yyvalp).str) = vtkstrcat((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 287:
+  case 335:
+
+/* Line 936 of glr.c  */
+#line 2437 "vtkParse.y"
+    { ((*yyvalp).str) = vtkstrcat((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str)); }
+    break;
+
+  case 336:
+
+/* Line 936 of glr.c  */
+#line 2439 "vtkParse.y"
+    { ((*yyvalp).str) = vtkstrcat3((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (3))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (3))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (3))].yystate.yysemantics.yysval.str)); }
+    break;
+
+  case 337:
+
+/* Line 936 of glr.c  */
+#line 2441 "vtkParse.y"
+    { ((*yyvalp).str) = vtkstrcat3((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (3))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (3))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (3))].yystate.yysemantics.yysval.str)); }
+    break;
+
+  case 338:
+
+/* Line 936 of glr.c  */
+#line 2443 "vtkParse.y"
+    { ((*yyvalp).str) = vtkstrcat3((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (3))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (3))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (3))].yystate.yysemantics.yysval.str)); }
+    break;
+
+  case 339:
 
-/* Line 1455 of yacc.c  */
-#line 2300 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2444 "vtkParse.y"
     { postSig("template "); }
     break;
 
-  case 288:
+  case 340:
 
-/* Line 1455 of yacc.c  */
-#line 2302 "vtkParse.y"
-    { (yyval.str) = vtkstrcat4((yyvsp[(1) - (5)].str), "template ", (yyvsp[(4) - (5)].str), (yyvsp[(5) - (5)].str)); }
+/* Line 936 of glr.c  */
+#line 2446 "vtkParse.y"
+    { ((*yyvalp).str) = vtkstrcat4((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (5))].yystate.yysemantics.yysval.str), "template ", (((yyGLRStackItem const *)yyvsp)[YYFILL ((4) - (5))].yystate.yysemantics.yysval.str), (((yyGLRStackItem const *)yyvsp)[YYFILL ((5) - (5))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 289:
+  case 341:
 
-/* Line 1455 of yacc.c  */
-#line 2305 "vtkParse.y"
-    { postSig((yyvsp[(1) - (1)].str)); }
+/* Line 936 of glr.c  */
+#line 2449 "vtkParse.y"
+    { postSig("~"); }
     break;
 
-  case 290:
+  case 342:
+
+/* Line 936 of glr.c  */
+#line 2452 "vtkParse.y"
+    { postSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); }
+    break;
+
+  case 343:
 
-/* Line 1455 of yacc.c  */
-#line 2308 "vtkParse.y"
-    { (yyval.str) = "::"; postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2455 "vtkParse.y"
+    { ((*yyvalp).str) = "::"; postSig(((*yyvalp).str)); }
     break;
 
-  case 291:
+  case 344:
 
-/* Line 1455 of yacc.c  */
-#line 2311 "vtkParse.y"
-    { markSig(); postSig((yyvsp[(1) - (2)].str)); postSig("<"); }
+/* Line 936 of glr.c  */
+#line 2458 "vtkParse.y"
+    { markSig(); postSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.str)); postSig("<"); }
     break;
 
-  case 292:
+  case 345:
 
-/* Line 1455 of yacc.c  */
-#line 2313 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2460 "vtkParse.y"
     {
       chopSig(); if (getSig()[getSigLength()-1] == '>') { postSig(" "); }
-      postSig(">"); (yyval.str) = copySig(); clearTypeId();
+      postSig(">"); ((*yyvalp).str) = copySig(); clearTypeId();
     }
     break;
 
-  case 293:
+  case 346:
 
-/* Line 1455 of yacc.c  */
-#line 2326 "vtkParse.y"
-    { postSig((yyvsp[(1) - (1)].str)); }
+/* Line 936 of glr.c  */
+#line 2466 "vtkParse.y"
+    { markSig(); postSig("decltype"); }
     break;
 
-  case 294:
+  case 347:
 
-/* Line 1455 of yacc.c  */
-#line 2327 "vtkParse.y"
-    { postSig((yyvsp[(1) - (1)].str)); }
+/* Line 936 of glr.c  */
+#line 2467 "vtkParse.y"
+    { chopSig(); ((*yyvalp).str) = copySig(); clearTypeId(); }
+    break;
+
+  case 348:
+
+/* Line 936 of glr.c  */
+#line 2475 "vtkParse.y"
+    { postSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); }
+    break;
+
+  case 349:
+
+/* Line 936 of glr.c  */
+#line 2476 "vtkParse.y"
+    { postSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); }
+    break;
+
+  case 350:
+
+/* Line 936 of glr.c  */
+#line 2477 "vtkParse.y"
+    { postSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); }
+    break;
+
+  case 351:
+
+/* Line 936 of glr.c  */
+#line 2478 "vtkParse.y"
+    { postSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); }
+    break;
+
+  case 352:
+
+/* Line 936 of glr.c  */
+#line 2479 "vtkParse.y"
+    { postSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); }
+    break;
+
+  case 353:
+
+/* Line 936 of glr.c  */
+#line 2480 "vtkParse.y"
+    { postSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); }
+    break;
+
+  case 354:
+
+/* Line 936 of glr.c  */
+#line 2481 "vtkParse.y"
+    { postSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); }
+    break;
+
+  case 355:
+
+/* Line 936 of glr.c  */
+#line 2482 "vtkParse.y"
+    { postSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); }
+    break;
+
+  case 356:
+
+/* Line 936 of glr.c  */
+#line 2483 "vtkParse.y"
+    { postSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 295:
+  case 357:
 
-/* Line 1455 of yacc.c  */
-#line 2328 "vtkParse.y"
-    { postSig((yyvsp[(1) - (1)].str)); }
+/* Line 936 of glr.c  */
+#line 2484 "vtkParse.y"
+    { postSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 296:
+  case 358:
 
-/* Line 1455 of yacc.c  */
-#line 2329 "vtkParse.y"
-    { postSig((yyvsp[(1) - (1)].str)); }
+/* Line 936 of glr.c  */
+#line 2485 "vtkParse.y"
+    { ((*yyvalp).str) = "vtkTypeInt8"; postSig(((*yyvalp).str)); }
     break;
 
-  case 297:
+  case 359:
 
-/* Line 1455 of yacc.c  */
-#line 2330 "vtkParse.y"
-    { postSig((yyvsp[(1) - (1)].str)); }
+/* Line 936 of glr.c  */
+#line 2486 "vtkParse.y"
+    { ((*yyvalp).str) = "vtkTypeUInt8"; postSig(((*yyvalp).str)); }
     break;
 
-  case 298:
+  case 360:
 
-/* Line 1455 of yacc.c  */
-#line 2331 "vtkParse.y"
-    { postSig((yyvsp[(1) - (1)].str)); }
+/* Line 936 of glr.c  */
+#line 2487 "vtkParse.y"
+    { ((*yyvalp).str) = "vtkTypeInt16"; postSig(((*yyvalp).str)); }
     break;
 
-  case 299:
+  case 361:
 
-/* Line 1455 of yacc.c  */
-#line 2332 "vtkParse.y"
-    { postSig((yyvsp[(1) - (1)].str)); }
+/* Line 936 of glr.c  */
+#line 2488 "vtkParse.y"
+    { ((*yyvalp).str) = "vtkTypeUInt16"; postSig(((*yyvalp).str)); }
     break;
 
-  case 300:
+  case 362:
 
-/* Line 1455 of yacc.c  */
-#line 2333 "vtkParse.y"
-    { (yyval.str) = vtkstrcat("~",(yyvsp[(2) - (2)].str)); postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2489 "vtkParse.y"
+    { ((*yyvalp).str) = "vtkTypeInt32"; postSig(((*yyvalp).str)); }
     break;
 
-  case 301:
+  case 363:
 
-/* Line 1455 of yacc.c  */
-#line 2334 "vtkParse.y"
-    { (yyval.str) = vtkstrcat("~",(yyvsp[(2) - (2)].str)); postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2490 "vtkParse.y"
+    { ((*yyvalp).str) = "vtkTypeUInt32"; postSig(((*yyvalp).str)); }
     break;
 
-  case 302:
+  case 364:
 
-/* Line 1455 of yacc.c  */
-#line 2335 "vtkParse.y"
-    { (yyval.str) = vtkstrcat("~",(yyvsp[(2) - (2)].str)); postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2491 "vtkParse.y"
+    { ((*yyvalp).str) = "vtkTypeInt64"; postSig(((*yyvalp).str)); }
     break;
 
-  case 303:
+  case 365:
 
-/* Line 1455 of yacc.c  */
-#line 2336 "vtkParse.y"
-    { (yyval.str) = vtkstrcat("~",(yyvsp[(2) - (2)].str)); postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2492 "vtkParse.y"
+    { ((*yyvalp).str) = "vtkTypeUInt64"; postSig(((*yyvalp).str)); }
     break;
 
-  case 304:
+  case 366:
 
-/* Line 1455 of yacc.c  */
-#line 2337 "vtkParse.y"
-    { (yyval.str) = vtkstrcat("~",(yyvsp[(2) - (2)].str)); postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2493 "vtkParse.y"
+    { ((*yyvalp).str) = "vtkTypeFloat32"; postSig(((*yyvalp).str)); }
     break;
 
-  case 305:
+  case 367:
 
-/* Line 1455 of yacc.c  */
-#line 2338 "vtkParse.y"
-    { (yyval.str) = vtkstrcat("~",(yyvsp[(2) - (2)].str)); postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2494 "vtkParse.y"
+    { ((*yyvalp).str) = "vtkTypeFloat64"; postSig(((*yyvalp).str)); }
     break;
 
-  case 306:
+  case 368:
 
-/* Line 1455 of yacc.c  */
-#line 2339 "vtkParse.y"
-    { (yyval.str) = vtkstrcat("~",(yyvsp[(2) - (2)].str)); postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2495 "vtkParse.y"
+    { ((*yyvalp).str) = "vtkIdType"; postSig(((*yyvalp).str)); }
     break;
 
-  case 307:
+  case 379:
 
-/* Line 1455 of yacc.c  */
-#line 2340 "vtkParse.y"
-    { (yyval.str) = "size_t"; postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2520 "vtkParse.y"
+    { setTypeBase(buildTypeBase(getType(), (((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.integer))); }
     break;
 
-  case 308:
+  case 380:
 
-/* Line 1455 of yacc.c  */
-#line 2341 "vtkParse.y"
-    { (yyval.str) = "ssize_t"; postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2521 "vtkParse.y"
+    { setTypeMod(VTK_PARSE_TYPEDEF); }
     break;
 
-  case 309:
+  case 381:
 
-/* Line 1455 of yacc.c  */
-#line 2342 "vtkParse.y"
-    { (yyval.str) = "vtkTypeInt8"; postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2522 "vtkParse.y"
+    { setTypeMod(VTK_PARSE_FRIEND); }
     break;
 
-  case 310:
+  case 384:
 
-/* Line 1455 of yacc.c  */
-#line 2343 "vtkParse.y"
-    { (yyval.str) = "vtkTypeUInt8"; postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2529 "vtkParse.y"
+    { setTypeMod((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.integer)); }
     break;
 
-  case 311:
+  case 385:
 
-/* Line 1455 of yacc.c  */
-#line 2344 "vtkParse.y"
-    { (yyval.str) = "vtkTypeInt16"; postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2530 "vtkParse.y"
+    { setTypeMod((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.integer)); }
     break;
 
-  case 312:
+  case 386:
 
-/* Line 1455 of yacc.c  */
-#line 2345 "vtkParse.y"
-    { (yyval.str) = "vtkTypeUInt16"; postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2531 "vtkParse.y"
+    { setTypeMod((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.integer)); }
     break;
 
-  case 313:
+  case 387:
 
-/* Line 1455 of yacc.c  */
-#line 2346 "vtkParse.y"
-    { (yyval.str) = "vtkTypeInt32"; postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2532 "vtkParse.y"
+    { postSig("constexpr "); ((*yyvalp).integer) = 0; }
     break;
 
-  case 314:
+  case 388:
 
-/* Line 1455 of yacc.c  */
-#line 2347 "vtkParse.y"
-    { (yyval.str) = "vtkTypeUInt32"; postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2535 "vtkParse.y"
+    { postSig("mutable "); ((*yyvalp).integer) = VTK_PARSE_MUTABLE; }
     break;
 
-  case 315:
+  case 389:
 
-/* Line 1455 of yacc.c  */
-#line 2348 "vtkParse.y"
-    { (yyval.str) = "vtkTypeInt64"; postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2536 "vtkParse.y"
+    { ((*yyvalp).integer) = 0; }
     break;
 
-  case 316:
+  case 390:
 
-/* Line 1455 of yacc.c  */
-#line 2349 "vtkParse.y"
-    { (yyval.str) = "vtkTypeUInt64"; postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2537 "vtkParse.y"
+    { ((*yyvalp).integer) = 0; }
     break;
 
-  case 317:
+  case 391:
 
-/* Line 1455 of yacc.c  */
-#line 2350 "vtkParse.y"
-    { (yyval.str) = "vtkTypeFloat32"; postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2538 "vtkParse.y"
+    { postSig("static "); ((*yyvalp).integer) = VTK_PARSE_STATIC; }
     break;
 
-  case 318:
+  case 392:
 
-/* Line 1455 of yacc.c  */
-#line 2351 "vtkParse.y"
-    { (yyval.str) = "vtkTypeFloat64"; postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2540 "vtkParse.y"
+    { postSig("thread_local "); ((*yyvalp).integer) = VTK_PARSE_THREAD_LOCAL; }
     break;
 
-  case 319:
+  case 393:
 
-/* Line 1455 of yacc.c  */
-#line 2352 "vtkParse.y"
-    { (yyval.str) = "vtkIdType"; postSig((yyval.str)); }
+/* Line 936 of glr.c  */
+#line 2543 "vtkParse.y"
+    { ((*yyvalp).integer) = 0; }
     break;
 
-  case 330:
+  case 394:
 
-/* Line 1455 of yacc.c  */
-#line 2378 "vtkParse.y"
-    { setTypeBase(buildTypeBase(getType(), (yyvsp[(1) - (1)].integer))); }
+/* Line 936 of glr.c  */
+#line 2544 "vtkParse.y"
+    { postSig("virtual "); ((*yyvalp).integer) = VTK_PARSE_VIRTUAL; }
     break;
 
-  case 331:
+  case 395:
 
-/* Line 1455 of yacc.c  */
-#line 2379 "vtkParse.y"
-    { setTypeMod(VTK_PARSE_TYPEDEF); }
+/* Line 936 of glr.c  */
+#line 2545 "vtkParse.y"
+    { postSig("explicit "); ((*yyvalp).integer) = VTK_PARSE_EXPLICIT; }
     break;
 
-  case 332:
+  case 396:
 
-/* Line 1455 of yacc.c  */
-#line 2380 "vtkParse.y"
-    { setTypeMod(VTK_PARSE_FRIEND); }
+/* Line 936 of glr.c  */
+#line 2548 "vtkParse.y"
+    { postSig("const "); ((*yyvalp).integer) = VTK_PARSE_CONST; }
     break;
 
-  case 335:
+  case 397:
 
-/* Line 1455 of yacc.c  */
-#line 2387 "vtkParse.y"
-    { setTypeMod((yyvsp[(1) - (1)].integer)); }
+/* Line 936 of glr.c  */
+#line 2549 "vtkParse.y"
+    { postSig("volatile "); ((*yyvalp).integer) = VTK_PARSE_VOLATILE; }
     break;
 
-  case 336:
+  case 399:
 
-/* Line 1455 of yacc.c  */
-#line 2388 "vtkParse.y"
-    { setTypeMod((yyvsp[(1) - (1)].integer)); }
+/* Line 936 of glr.c  */
+#line 2554 "vtkParse.y"
+    { ((*yyvalp).integer) = ((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.integer) | (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.integer)); }
     break;
 
-  case 337:
+  case 401:
 
-/* Line 1455 of yacc.c  */
-#line 2389 "vtkParse.y"
-    { setTypeMod((yyvsp[(1) - (1)].integer)); }
+/* Line 936 of glr.c  */
+#line 2564 "vtkParse.y"
+    { setTypeBase((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.integer)); }
     break;
 
-  case 338:
+  case 403:
 
-/* Line 1455 of yacc.c  */
-#line 2392 "vtkParse.y"
-    { postSig("mutable "); (yyval.integer) = VTK_PARSE_MUTABLE; }
+/* Line 936 of glr.c  */
+#line 2566 "vtkParse.y"
+    { setTypeBase((((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.integer)); }
     break;
 
-  case 339:
+  case 406:
 
-/* Line 1455 of yacc.c  */
-#line 2393 "vtkParse.y"
-    { (yyval.integer) = 0; }
+/* Line 936 of glr.c  */
+#line 2572 "vtkParse.y"
+    { postSig(" "); setTypeId((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (3))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = guess_id_type((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (3))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 340:
+  case 407:
 
-/* Line 1455 of yacc.c  */
-#line 2394 "vtkParse.y"
-    { (yyval.integer) = 0; }
+/* Line 936 of glr.c  */
+#line 2574 "vtkParse.y"
+    { postSig(" "); setTypeId((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (4))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = guess_id_type((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (4))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 341:
+  case 409:
 
-/* Line 1455 of yacc.c  */
-#line 2395 "vtkParse.y"
-    { postSig("static "); (yyval.integer) = VTK_PARSE_STATIC; }
+/* Line 936 of glr.c  */
+#line 2579 "vtkParse.y"
+    { postSig(" "); setTypeId((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = 0; }
     break;
 
-  case 342:
+  case 410:
 
-/* Line 1455 of yacc.c  */
-#line 2398 "vtkParse.y"
-    { (yyval.integer) = 0; }
+/* Line 936 of glr.c  */
+#line 2580 "vtkParse.y"
+    { postSig("typename "); }
     break;
 
-  case 343:
+  case 411:
 
-/* Line 1455 of yacc.c  */
-#line 2399 "vtkParse.y"
-    { postSig("virtual "); (yyval.integer) = VTK_PARSE_VIRTUAL; }
+/* Line 936 of glr.c  */
+#line 2581 "vtkParse.y"
+    { postSig(" "); setTypeId((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (4))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = guess_id_type((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (4))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 344:
+  case 412:
 
-/* Line 1455 of yacc.c  */
-#line 2400 "vtkParse.y"
-    { postSig("explicit "); (yyval.integer) = VTK_PARSE_EXPLICIT; }
+/* Line 936 of glr.c  */
+#line 2583 "vtkParse.y"
+    { postSig(" "); setTypeId((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = guess_id_type((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 345:
+  case 413:
 
-/* Line 1455 of yacc.c  */
-#line 2403 "vtkParse.y"
-    { postSig("const "); (yyval.integer) = VTK_PARSE_CONST; }
+/* Line 936 of glr.c  */
+#line 2585 "vtkParse.y"
+    { postSig(" "); setTypeId((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = guess_id_type((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 346:
+  case 415:
 
-/* Line 1455 of yacc.c  */
-#line 2404 "vtkParse.y"
-    { postSig("volatile "); (yyval.integer) = VTK_PARSE_VOLATILE; }
+/* Line 936 of glr.c  */
+#line 2591 "vtkParse.y"
+    { setTypeBase((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.integer)); }
     break;
 
-  case 348:
+  case 417:
 
-/* Line 1455 of yacc.c  */
-#line 2409 "vtkParse.y"
-    { (yyval.integer) = ((yyvsp[(1) - (2)].integer) | (yyvsp[(2) - (2)].integer)); }
+/* Line 936 of glr.c  */
+#line 2593 "vtkParse.y"
+    { setTypeBase((((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.integer)); }
     break;
 
-  case 350:
+  case 420:
 
-/* Line 1455 of yacc.c  */
-#line 2420 "vtkParse.y"
-    { setTypeBase((yyvsp[(1) - (1)].integer)); }
+/* Line 936 of glr.c  */
+#line 2600 "vtkParse.y"
+    { setTypeBase((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.integer)); }
     break;
 
-  case 352:
+  case 422:
 
-/* Line 1455 of yacc.c  */
-#line 2422 "vtkParse.y"
-    { setTypeBase((yyvsp[(2) - (2)].integer)); }
+/* Line 936 of glr.c  */
+#line 2602 "vtkParse.y"
+    { setTypeBase((((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.integer)); }
     break;
 
-  case 355:
+  case 425:
 
-/* Line 1455 of yacc.c  */
-#line 2427 "vtkParse.y"
-    { postSig("typename "); }
+/* Line 936 of glr.c  */
+#line 2608 "vtkParse.y"
+    { postSig(" "); setTypeId((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = 0; }
     break;
 
-  case 356:
+  case 426:
 
-/* Line 1455 of yacc.c  */
-#line 2428 "vtkParse.y"
-    { postSig(" "); setTypeId((yyvsp[(3) - (3)].str)); (yyval.integer) = guess_id_type((yyvsp[(3) - (3)].str)); }
+/* Line 936 of glr.c  */
+#line 2610 "vtkParse.y"
+    { postSig(" "); setTypeId((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = guess_id_type((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 357:
+  case 427:
 
-/* Line 1455 of yacc.c  */
-#line 2430 "vtkParse.y"
-    { postSig(" "); setTypeId((yyvsp[(1) - (1)].str)); (yyval.integer) = guess_id_type((yyvsp[(1) - (1)].str)); }
+/* Line 936 of glr.c  */
+#line 2612 "vtkParse.y"
+    { postSig(" "); setTypeId((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = guess_id_type((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 358:
+  case 428:
 
-/* Line 1455 of yacc.c  */
-#line 2432 "vtkParse.y"
-    { postSig(" "); setTypeId((yyvsp[(1) - (1)].str)); (yyval.integer) = guess_id_type((yyvsp[(1) - (1)].str)); }
+/* Line 936 of glr.c  */
+#line 2614 "vtkParse.y"
+    { postSig(" "); setTypeId((((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = guess_id_type((((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 359:
+  case 429:
 
-/* Line 1455 of yacc.c  */
-#line 2434 "vtkParse.y"
-    { postSig(" "); setTypeId((yyvsp[(2) - (2)].str)); (yyval.integer) = guess_id_type((yyvsp[(2) - (2)].str)); }
+/* Line 936 of glr.c  */
+#line 2616 "vtkParse.y"
+    { postSig(" "); setTypeId((((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = guess_id_type((((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 360:
+  case 430:
 
-/* Line 1455 of yacc.c  */
-#line 2436 "vtkParse.y"
-    { postSig(" "); setTypeId((yyvsp[(2) - (2)].str)); (yyval.integer) = guess_id_type((yyvsp[(2) - (2)].str)); }
+/* Line 936 of glr.c  */
+#line 2618 "vtkParse.y"
+    { postSig(" "); setTypeId((((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = guess_id_type((((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str)); }
     break;
 
-  case 362:
+  case 431:
 
-/* Line 1455 of yacc.c  */
-#line 2442 "vtkParse.y"
-    { setTypeBase((yyvsp[(1) - (1)].integer)); }
+/* Line 936 of glr.c  */
+#line 2621 "vtkParse.y"
+    { setTypeId(""); }
     break;
 
-  case 364:
+  case 433:
 
-/* Line 1455 of yacc.c  */
-#line 2444 "vtkParse.y"
-    { setTypeBase((yyvsp[(2) - (2)].integer)); }
+/* Line 936 of glr.c  */
+#line 2625 "vtkParse.y"
+    { typeSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = VTK_PARSE_STRING; }
     break;
 
-  case 367:
+  case 434:
 
-/* Line 1455 of yacc.c  */
-#line 2450 "vtkParse.y"
-    { postSig(" "); setTypeId((yyvsp[(1) - (1)].str)); (yyval.integer) = guess_id_type((yyvsp[(1) - (1)].str)); }
+/* Line 936 of glr.c  */
+#line 2626 "vtkParse.y"
+    { typeSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = VTK_PARSE_UNICODE_STRING;}
     break;
 
-  case 368:
+  case 435:
 
-/* Line 1455 of yacc.c  */
-#line 2452 "vtkParse.y"
-    { postSig(" "); setTypeId((yyvsp[(1) - (1)].str)); (yyval.integer) = guess_id_type((yyvsp[(1) - (1)].str)); }
+/* Line 936 of glr.c  */
+#line 2627 "vtkParse.y"
+    { typeSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = VTK_PARSE_OSTREAM; }
     break;
 
-  case 369:
+  case 436:
 
-/* Line 1455 of yacc.c  */
-#line 2454 "vtkParse.y"
-    { postSig(" "); setTypeId((yyvsp[(2) - (2)].str)); (yyval.integer) = guess_id_type((yyvsp[(2) - (2)].str)); }
+/* Line 936 of glr.c  */
+#line 2628 "vtkParse.y"
+    { typeSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = VTK_PARSE_ISTREAM; }
     break;
 
-  case 370:
+  case 437:
 
-/* Line 1455 of yacc.c  */
-#line 2456 "vtkParse.y"
-    { postSig(" "); setTypeId((yyvsp[(2) - (2)].str)); (yyval.integer) = guess_id_type((yyvsp[(2) - (2)].str)); }
+/* Line 936 of glr.c  */
+#line 2629 "vtkParse.y"
+    { typeSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = VTK_PARSE_UNKNOWN; }
     break;
 
-  case 371:
+  case 438:
 
-/* Line 1455 of yacc.c  */
-#line 2458 "vtkParse.y"
-    { postSig(" "); setTypeId((yyvsp[(2) - (2)].str)); (yyval.integer) = guess_id_type((yyvsp[(2) - (2)].str)); }
+/* Line 936 of glr.c  */
+#line 2630 "vtkParse.y"
+    { typeSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = VTK_PARSE_OBJECT; }
     break;
 
-  case 372:
+  case 439:
 
-/* Line 1455 of yacc.c  */
-#line 2461 "vtkParse.y"
-    { setTypeId(""); }
+/* Line 936 of glr.c  */
+#line 2631 "vtkParse.y"
+    { typeSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = VTK_PARSE_QOBJECT; }
     break;
 
-  case 374:
+  case 440:
 
-/* Line 1455 of yacc.c  */
-#line 2465 "vtkParse.y"
-    { typeSig((yyvsp[(1) - (1)].str)); (yyval.integer) = VTK_PARSE_STRING; }
+/* Line 936 of glr.c  */
+#line 2632 "vtkParse.y"
+    { typeSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = VTK_PARSE_NULLPTR_T; }
     break;
 
-  case 375:
+  case 441:
 
-/* Line 1455 of yacc.c  */
-#line 2466 "vtkParse.y"
-    { typeSig((yyvsp[(1) - (1)].str)); (yyval.integer) = VTK_PARSE_UNICODE_STRING;}
+/* Line 936 of glr.c  */
+#line 2633 "vtkParse.y"
+    { typeSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = VTK_PARSE_SSIZE_T; }
     break;
 
-  case 376:
+  case 442:
 
-/* Line 1455 of yacc.c  */
-#line 2467 "vtkParse.y"
-    { typeSig((yyvsp[(1) - (1)].str)); (yyval.integer) = VTK_PARSE_OSTREAM; }
+/* Line 936 of glr.c  */
+#line 2634 "vtkParse.y"
+    { typeSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); ((*yyvalp).integer) = VTK_PARSE_SIZE_T; }
     break;
 
-  case 377:
+  case 443:
 
-/* Line 1455 of yacc.c  */
-#line 2468 "vtkParse.y"
-    { typeSig((yyvsp[(1) - (1)].str)); (yyval.integer) = VTK_PARSE_ISTREAM; }
+/* Line 936 of glr.c  */
+#line 2635 "vtkParse.y"
+    { typeSig("vtkTypeInt8"); ((*yyvalp).integer) = VTK_PARSE_INT8; }
     break;
 
-  case 378:
+  case 444:
 
-/* Line 1455 of yacc.c  */
-#line 2469 "vtkParse.y"
-    { typeSig((yyvsp[(1) - (1)].str)); (yyval.integer) = VTK_PARSE_UNKNOWN; }
+/* Line 936 of glr.c  */
+#line 2636 "vtkParse.y"
+    { typeSig("vtkTypeUInt8"); ((*yyvalp).integer) = VTK_PARSE_UINT8; }
     break;
 
-  case 379:
+  case 445:
 
-/* Line 1455 of yacc.c  */
-#line 2470 "vtkParse.y"
-    { typeSig((yyvsp[(1) - (1)].str)); (yyval.integer) = VTK_PARSE_OBJECT; }
+/* Line 936 of glr.c  */
+#line 2637 "vtkParse.y"
+    { typeSig("vtkTypeInt16"); ((*yyvalp).integer) = VTK_PARSE_INT16; }
     break;
 
-  case 380:
+  case 446:
 
-/* Line 1455 of yacc.c  */
-#line 2471 "vtkParse.y"
-    { typeSig((yyvsp[(1) - (1)].str)); (yyval.integer) = VTK_PARSE_QOBJECT; }
+/* Line 936 of glr.c  */
+#line 2638 "vtkParse.y"
+    { typeSig("vtkTypeUInt16"); ((*yyvalp).integer) = VTK_PARSE_UINT16; }
     break;
 
-  case 381:
+  case 447:
 
-/* Line 1455 of yacc.c  */
-#line 2472 "vtkParse.y"
-    { typeSig("ssize_t"); (yyval.integer) = VTK_PARSE_SSIZE_T; }
+/* Line 936 of glr.c  */
+#line 2639 "vtkParse.y"
+    { typeSig("vtkTypeInt32"); ((*yyvalp).integer) = VTK_PARSE_INT32; }
     break;
 
-  case 382:
+  case 448:
 
-/* Line 1455 of yacc.c  */
-#line 2473 "vtkParse.y"
-    { typeSig("size_t"); (yyval.integer) = VTK_PARSE_SIZE_T; }
+/* Line 936 of glr.c  */
+#line 2640 "vtkParse.y"
+    { typeSig("vtkTypeUInt32"); ((*yyvalp).integer) = VTK_PARSE_UINT32; }
     break;
 
-  case 383:
+  case 449:
 
-/* Line 1455 of yacc.c  */
-#line 2474 "vtkParse.y"
-    { typeSig("vtkTypeInt8"); (yyval.integer) = VTK_PARSE_INT8; }
+/* Line 936 of glr.c  */
+#line 2641 "vtkParse.y"
+    { typeSig("vtkTypeInt64"); ((*yyvalp).integer) = VTK_PARSE_INT64; }
     break;
 
-  case 384:
+  case 450:
 
-/* Line 1455 of yacc.c  */
-#line 2475 "vtkParse.y"
-    { typeSig("vtkTypeUInt8"); (yyval.integer) = VTK_PARSE_UINT8; }
+/* Line 936 of glr.c  */
+#line 2642 "vtkParse.y"
+    { typeSig("vtkTypeUInt64"); ((*yyvalp).integer) = VTK_PARSE_UINT64; }
     break;
 
-  case 385:
+  case 451:
 
-/* Line 1455 of yacc.c  */
-#line 2476 "vtkParse.y"
-    { typeSig("vtkTypeInt16"); (yyval.integer) = VTK_PARSE_INT16; }
+/* Line 936 of glr.c  */
+#line 2643 "vtkParse.y"
+    { typeSig("vtkTypeFloat32"); ((*yyvalp).integer) = VTK_PARSE_FLOAT32; }
     break;
 
-  case 386:
+  case 452:
 
-/* Line 1455 of yacc.c  */
-#line 2477 "vtkParse.y"
-    { typeSig("vtkTypeUInt16"); (yyval.integer) = VTK_PARSE_UINT16; }
+/* Line 936 of glr.c  */
+#line 2644 "vtkParse.y"
+    { typeSig("vtkTypeFloat64"); ((*yyvalp).integer) = VTK_PARSE_FLOAT64; }
     break;
 
-  case 387:
+  case 453:
 
-/* Line 1455 of yacc.c  */
-#line 2478 "vtkParse.y"
-    { typeSig("vtkTypeInt32"); (yyval.integer) = VTK_PARSE_INT32; }
+/* Line 936 of glr.c  */
+#line 2645 "vtkParse.y"
+    { typeSig("vtkIdType"); ((*yyvalp).integer) = VTK_PARSE_ID_TYPE; }
     break;
 
-  case 388:
+  case 454:
 
-/* Line 1455 of yacc.c  */
-#line 2479 "vtkParse.y"
-    { typeSig("vtkTypeUInt32"); (yyval.integer) = VTK_PARSE_UINT32; }
+/* Line 936 of glr.c  */
+#line 2648 "vtkParse.y"
+    { postSig("auto "); ((*yyvalp).integer) = 0; }
     break;
 
-  case 389:
+  case 455:
 
-/* Line 1455 of yacc.c  */
-#line 2480 "vtkParse.y"
-    { typeSig("vtkTypeInt64"); (yyval.integer) = VTK_PARSE_INT64; }
+/* Line 936 of glr.c  */
+#line 2649 "vtkParse.y"
+    { postSig("void "); ((*yyvalp).integer) = VTK_PARSE_VOID; }
     break;
 
-  case 390:
+  case 456:
 
-/* Line 1455 of yacc.c  */
-#line 2481 "vtkParse.y"
-    { typeSig("vtkTypeUInt64"); (yyval.integer) = VTK_PARSE_UINT64; }
+/* Line 936 of glr.c  */
+#line 2650 "vtkParse.y"
+    { postSig("bool "); ((*yyvalp).integer) = VTK_PARSE_BOOL; }
     break;
 
-  case 391:
+  case 457:
 
-/* Line 1455 of yacc.c  */
-#line 2482 "vtkParse.y"
-    { typeSig("vtkTypeFloat32"); (yyval.integer) = VTK_PARSE_FLOAT32; }
+/* Line 936 of glr.c  */
+#line 2651 "vtkParse.y"
+    { postSig("float "); ((*yyvalp).integer) = VTK_PARSE_FLOAT; }
     break;
 
-  case 392:
+  case 458:
 
-/* Line 1455 of yacc.c  */
-#line 2483 "vtkParse.y"
-    { typeSig("vtkTypeFloat64"); (yyval.integer) = VTK_PARSE_FLOAT64; }
+/* Line 936 of glr.c  */
+#line 2652 "vtkParse.y"
+    { postSig("double "); ((*yyvalp).integer) = VTK_PARSE_DOUBLE; }
     break;
 
-  case 393:
+  case 459:
 
-/* Line 1455 of yacc.c  */
-#line 2484 "vtkParse.y"
-    { typeSig("vtkIdType"); (yyval.integer) = VTK_PARSE_ID_TYPE; }
+/* Line 936 of glr.c  */
+#line 2653 "vtkParse.y"
+    { postSig("char "); ((*yyvalp).integer) = VTK_PARSE_CHAR; }
     break;
 
-  case 394:
+  case 460:
 
-/* Line 1455 of yacc.c  */
-#line 2487 "vtkParse.y"
-    { postSig("void "); (yyval.integer) = VTK_PARSE_VOID; }
+/* Line 936 of glr.c  */
+#line 2654 "vtkParse.y"
+    { postSig("char16_t "); ((*yyvalp).integer) = VTK_PARSE_CHAR16_T; }
     break;
 
-  case 395:
+  case 461:
 
-/* Line 1455 of yacc.c  */
-#line 2488 "vtkParse.y"
-    { postSig("bool "); (yyval.integer) = VTK_PARSE_BOOL; }
+/* Line 936 of glr.c  */
+#line 2655 "vtkParse.y"
+    { postSig("char32_t "); ((*yyvalp).integer) = VTK_PARSE_CHAR32_T; }
     break;
 
-  case 396:
+  case 462:
 
-/* Line 1455 of yacc.c  */
-#line 2489 "vtkParse.y"
-    { postSig("float "); (yyval.integer) = VTK_PARSE_FLOAT; }
+/* Line 936 of glr.c  */
+#line 2656 "vtkParse.y"
+    { postSig("wchar_t "); ((*yyvalp).integer) = VTK_PARSE_WCHAR_T; }
     break;
 
-  case 397:
+  case 463:
 
-/* Line 1455 of yacc.c  */
-#line 2490 "vtkParse.y"
-    { postSig("double "); (yyval.integer) = VTK_PARSE_DOUBLE; }
+/* Line 936 of glr.c  */
+#line 2657 "vtkParse.y"
+    { postSig("int "); ((*yyvalp).integer) = VTK_PARSE_INT; }
     break;
 
-  case 398:
+  case 464:
 
-/* Line 1455 of yacc.c  */
-#line 2491 "vtkParse.y"
-    { postSig("char "); (yyval.integer) = VTK_PARSE_CHAR; }
+/* Line 936 of glr.c  */
+#line 2658 "vtkParse.y"
+    { postSig("short "); ((*yyvalp).integer) = VTK_PARSE_SHORT; }
     break;
 
-  case 399:
+  case 465:
 
-/* Line 1455 of yacc.c  */
-#line 2492 "vtkParse.y"
-    { postSig("int "); (yyval.integer) = VTK_PARSE_INT; }
+/* Line 936 of glr.c  */
+#line 2659 "vtkParse.y"
+    { postSig("long "); ((*yyvalp).integer) = VTK_PARSE_LONG; }
     break;
 
-  case 400:
+  case 466:
 
-/* Line 1455 of yacc.c  */
-#line 2493 "vtkParse.y"
-    { postSig("short "); (yyval.integer) = VTK_PARSE_SHORT; }
+/* Line 936 of glr.c  */
+#line 2660 "vtkParse.y"
+    { postSig("__int64 "); ((*yyvalp).integer) = VTK_PARSE___INT64; }
     break;
 
-  case 401:
+  case 467:
 
-/* Line 1455 of yacc.c  */
-#line 2494 "vtkParse.y"
-    { postSig("long "); (yyval.integer) = VTK_PARSE_LONG; }
+/* Line 936 of glr.c  */
+#line 2661 "vtkParse.y"
+    { postSig("signed "); ((*yyvalp).integer) = VTK_PARSE_INT; }
     break;
 
-  case 402:
+  case 468:
 
-/* Line 1455 of yacc.c  */
-#line 2495 "vtkParse.y"
-    { postSig("__int64 "); (yyval.integer) = VTK_PARSE___INT64; }
+/* Line 936 of glr.c  */
+#line 2662 "vtkParse.y"
+    { postSig("unsigned "); ((*yyvalp).integer) = VTK_PARSE_UNSIGNED_INT; }
     break;
 
-  case 403:
+  case 472:
 
-/* Line 1455 of yacc.c  */
-#line 2496 "vtkParse.y"
-    { postSig("signed "); (yyval.integer) = VTK_PARSE_INT; }
+/* Line 936 of glr.c  */
+#line 2685 "vtkParse.y"
+    { ((*yyvalp).integer) = ((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.integer) | (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.integer)); }
     break;
 
-  case 404:
+  case 473:
 
-/* Line 1455 of yacc.c  */
-#line 2497 "vtkParse.y"
-    { postSig("unsigned "); (yyval.integer) = VTK_PARSE_UNSIGNED_INT; }
+/* Line 936 of glr.c  */
+#line 2689 "vtkParse.y"
+    { postSig("&"); ((*yyvalp).integer) = VTK_PARSE_REF; }
     break;
 
-  case 407:
+  case 474:
 
-/* Line 1455 of yacc.c  */
-#line 2519 "vtkParse.y"
-    { (yyval.integer) = ((yyvsp[(1) - (2)].integer) | (yyvsp[(2) - (2)].integer)); }
+/* Line 936 of glr.c  */
+#line 2693 "vtkParse.y"
+    { postSig("&&"); ((*yyvalp).integer) = (VTK_PARSE_RVALUE | VTK_PARSE_REF); }
     break;
 
-  case 408:
+  case 475:
 
-/* Line 1455 of yacc.c  */
-#line 2522 "vtkParse.y"
-    { postSig("&"); (yyval.integer) = VTK_PARSE_REF; }
+/* Line 936 of glr.c  */
+#line 2696 "vtkParse.y"
+    { postSig("*"); }
     break;
 
-  case 409:
+  case 476:
 
-/* Line 1455 of yacc.c  */
-#line 2525 "vtkParse.y"
-    { postSig("*"); (yyval.integer) = VTK_PARSE_POINTER; }
+/* Line 936 of glr.c  */
+#line 2697 "vtkParse.y"
+    { ((*yyvalp).integer) = (((yyGLRStackItem const *)yyvsp)[YYFILL ((4) - (4))].yystate.yysemantics.yysval.integer); }
     break;
 
-  case 410:
+  case 477:
 
-/* Line 1455 of yacc.c  */
-#line 2526 "vtkParse.y"
-    { postSig("*"); }
+/* Line 936 of glr.c  */
+#line 2700 "vtkParse.y"
+    { ((*yyvalp).integer) = VTK_PARSE_POINTER; }
     break;
 
-  case 411:
+  case 478:
 
-/* Line 1455 of yacc.c  */
-#line 2527 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2702 "vtkParse.y"
     {
-      if (((yyvsp[(3) - (3)].integer) & VTK_PARSE_CONST) != 0)
+      if (((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.integer) & VTK_PARSE_CONST) != 0)
         {
-        (yyval.integer) = VTK_PARSE_CONST_POINTER;
+        ((*yyvalp).integer) = VTK_PARSE_CONST_POINTER;
         }
-      if (((yyvsp[(3) - (3)].integer) & VTK_PARSE_VOLATILE) != 0)
+      if (((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.integer) & VTK_PARSE_VOLATILE) != 0)
         {
-        (yyval.integer) = VTK_PARSE_BAD_INDIRECT;
+        ((*yyvalp).integer) = VTK_PARSE_BAD_INDIRECT;
         }
     }
     break;
 
-  case 413:
+  case 480:
 
-/* Line 1455 of yacc.c  */
-#line 2543 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2718 "vtkParse.y"
     {
       unsigned int n;
-      n = (((yyvsp[(1) - (2)].integer) << 2) | (yyvsp[(2) - (2)].integer));
+      n = (((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (2))].yystate.yysemantics.yysval.integer) << 2) | (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.integer));
       if ((n & VTK_PARSE_INDIRECT) != n)
         {
         n = VTK_PARSE_BAD_INDIRECT;
         }
-      (yyval.integer) = n;
+      ((*yyvalp).integer) = n;
     }
     break;
 
-  case 414:
+  case 483:
+
+/* Line 936 of glr.c  */
+#line 2737 "vtkParse.y"
+    { closeSig(); }
+    break;
+
+  case 484:
+
+/* Line 936 of glr.c  */
+#line 2737 "vtkParse.y"
+    { openSig(); }
+    break;
 
-/* Line 1455 of yacc.c  */
-#line 2559 "vtkParse.y"
+  case 486:
+
+/* Line 936 of glr.c  */
+#line 2744 "vtkParse.y"
     {preSig("void Set"); postSig("(");}
     break;
 
-  case 415:
+  case 487:
 
-/* Line 1455 of yacc.c  */
-#line 2560 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2745 "vtkParse.y"
     {
    postSig("a);");
    currentFunction->Macro = "vtkSetMacro";
-   currentFunction->Name = vtkstrcat("Set", (yyvsp[(3) - (7)].str));
+   currentFunction->Name = vtkstrcat("Set", (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (7))].yystate.yysemantics.yysval.str));
    currentFunction->Comment = vtkstrdup(getComment());
    add_parameter(currentFunction, getType(), getTypeId(), 0);
    set_return(currentFunction, VTK_PARSE_VOID, "void", 0);
@@ -6606,56 +8377,56 @@ yyreduce:
    }
     break;
 
-  case 416:
+  case 488:
 
-/* Line 1455 of yacc.c  */
-#line 2569 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2754 "vtkParse.y"
     {postSig("Get");}
     break;
 
-  case 417:
+  case 489:
 
-/* Line 1455 of yacc.c  */
-#line 2570 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2755 "vtkParse.y"
     {markSig();}
     break;
 
-  case 418:
+  case 490:
 
-/* Line 1455 of yacc.c  */
-#line 2570 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2755 "vtkParse.y"
     {swapSig();}
     break;
 
-  case 419:
+  case 491:
 
-/* Line 1455 of yacc.c  */
-#line 2571 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2756 "vtkParse.y"
     {
    postSig("();");
    currentFunction->Macro = "vtkGetMacro";
-   currentFunction->Name = vtkstrcat("Get", (yyvsp[(4) - (9)].str));
+   currentFunction->Name = vtkstrcat("Get", (((yyGLRStackItem const *)yyvsp)[YYFILL ((4) - (9))].yystate.yysemantics.yysval.str));
    currentFunction->Comment = vtkstrdup(getComment());
    set_return(currentFunction, getType(), getTypeId(), 0);
    output_function();
    }
     break;
 
-  case 420:
+  case 492:
 
-/* Line 1455 of yacc.c  */
-#line 2579 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2764 "vtkParse.y"
     {preSig("void Set");}
     break;
 
-  case 421:
+  case 493:
 
-/* Line 1455 of yacc.c  */
-#line 2580 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2765 "vtkParse.y"
     {
    postSig("(char *);");
    currentFunction->Macro = "vtkSetStringMacro";
-   currentFunction->Name = vtkstrcat("Set", (yyvsp[(4) - (5)].str));
+   currentFunction->Name = vtkstrcat("Set", (((yyGLRStackItem const *)yyvsp)[YYFILL ((4) - (5))].yystate.yysemantics.yysval.str));
    currentFunction->Comment = vtkstrdup(getComment());
    add_parameter(currentFunction, VTK_PARSE_CHAR_PTR, "char", 0);
    set_return(currentFunction, VTK_PARSE_VOID, "void", 0);
@@ -6663,52 +8434,52 @@ yyreduce:
    }
     break;
 
-  case 422:
+  case 494:
 
-/* Line 1455 of yacc.c  */
-#line 2589 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2774 "vtkParse.y"
     {preSig("char *Get");}
     break;
 
-  case 423:
+  case 495:
 
-/* Line 1455 of yacc.c  */
-#line 2590 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2775 "vtkParse.y"
     {
    postSig("();");
    currentFunction->Macro = "vtkGetStringMacro";
-   currentFunction->Name = vtkstrcat("Get", (yyvsp[(4) - (5)].str));
+   currentFunction->Name = vtkstrcat("Get", (((yyGLRStackItem const *)yyvsp)[YYFILL ((4) - (5))].yystate.yysemantics.yysval.str));
    currentFunction->Comment = vtkstrdup(getComment());
    set_return(currentFunction, VTK_PARSE_CHAR_PTR, "char", 0);
    output_function();
    }
     break;
 
-  case 424:
+  case 496:
 
-/* Line 1455 of yacc.c  */
-#line 2598 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2783 "vtkParse.y"
     {startSig(); markSig();}
     break;
 
-  case 425:
+  case 497:
 
-/* Line 1455 of yacc.c  */
-#line 2598 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2783 "vtkParse.y"
     {closeSig();}
     break;
 
-  case 426:
+  case 498:
 
-/* Line 1455 of yacc.c  */
-#line 2600 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2785 "vtkParse.y"
     {
    const char *typeText;
    chopSig();
    typeText = copySig();
 
    currentFunction->Macro = "vtkSetClampMacro";
-   currentFunction->Name = vtkstrcat("Set", (yyvsp[(3) - (10)].str));
+   currentFunction->Name = vtkstrcat("Set", (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (10))].yystate.yysemantics.yysval.str));
    currentFunction->Signature =
      vtkstrcat5("void ", currentFunction->Name, "(", typeText, ");");
    currentFunction->Comment = vtkstrdup(getComment());
@@ -6717,7 +8488,7 @@ yyreduce:
    output_function();
 
    currentFunction->Macro = "vtkSetClampMacro";
-   currentFunction->Name = vtkstrcat3("Get", (yyvsp[(3) - (10)].str), "MinValue");
+   currentFunction->Name = vtkstrcat3("Get", (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (10))].yystate.yysemantics.yysval.str), "MinValue");
    currentFunction->Signature =
      vtkstrcat4(typeText, " ", currentFunction->Name, "();");
    currentFunction->Comment = vtkstrdup(getComment());
@@ -6725,7 +8496,7 @@ yyreduce:
    output_function();
 
    currentFunction->Macro = "vtkSetClampMacro";
-   currentFunction->Name = vtkstrcat3("Get", (yyvsp[(3) - (10)].str), "MaxValue");
+   currentFunction->Name = vtkstrcat3("Get", (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (10))].yystate.yysemantics.yysval.str), "MaxValue");
    currentFunction->Signature =
      vtkstrcat4(typeText, " ", currentFunction->Name, "();");
    currentFunction->Comment = vtkstrdup(getComment());
@@ -6734,21 +8505,21 @@ yyreduce:
    }
     break;
 
-  case 427:
+  case 499:
 
-/* Line 1455 of yacc.c  */
-#line 2631 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2816 "vtkParse.y"
     {preSig("void Set"); postSig("("); }
     break;
 
-  case 428:
+  case 500:
 
-/* Line 1455 of yacc.c  */
-#line 2632 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2817 "vtkParse.y"
     {
    postSig("*);");
    currentFunction->Macro = "vtkSetObjectMacro";
-   currentFunction->Name = vtkstrcat("Set", (yyvsp[(3) - (7)].str));
+   currentFunction->Name = vtkstrcat("Set", (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (7))].yystate.yysemantics.yysval.str));
    currentFunction->Comment = vtkstrdup(getComment());
    add_parameter(currentFunction, VTK_PARSE_OBJECT_PTR, getTypeId(), 0);
    set_return(currentFunction, VTK_PARSE_VOID, "void", 0);
@@ -6756,48 +8527,48 @@ yyreduce:
    }
     break;
 
-  case 429:
+  case 501:
 
-/* Line 1455 of yacc.c  */
-#line 2641 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2826 "vtkParse.y"
     {postSig("*Get");}
     break;
 
-  case 430:
+  case 502:
 
-/* Line 1455 of yacc.c  */
-#line 2642 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2827 "vtkParse.y"
     {markSig();}
     break;
 
-  case 431:
+  case 503:
 
-/* Line 1455 of yacc.c  */
-#line 2642 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2827 "vtkParse.y"
     {swapSig();}
     break;
 
-  case 432:
+  case 504:
 
-/* Line 1455 of yacc.c  */
-#line 2643 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2828 "vtkParse.y"
     {
    postSig("();");
    currentFunction->Macro = "vtkGetObjectMacro";
-   currentFunction->Name = vtkstrcat("Get", (yyvsp[(4) - (9)].str));
+   currentFunction->Name = vtkstrcat("Get", (((yyGLRStackItem const *)yyvsp)[YYFILL ((4) - (9))].yystate.yysemantics.yysval.str));
    currentFunction->Comment = vtkstrdup(getComment());
    set_return(currentFunction, VTK_PARSE_OBJECT_PTR, getTypeId(), 0);
    output_function();
    }
     break;
 
-  case 433:
+  case 505:
 
-/* Line 1455 of yacc.c  */
-#line 2652 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2837 "vtkParse.y"
     {
    currentFunction->Macro = "vtkBooleanMacro";
-   currentFunction->Name = vtkstrcat((yyvsp[(3) - (6)].str), "On");
+   currentFunction->Name = vtkstrcat((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (6))].yystate.yysemantics.yysval.str), "On");
    currentFunction->Comment = vtkstrdup(getComment());
    currentFunction->Signature =
      vtkstrcat3("void ", currentFunction->Name, "();");
@@ -6805,7 +8576,7 @@ yyreduce:
    output_function();
 
    currentFunction->Macro = "vtkBooleanMacro";
-   currentFunction->Name = vtkstrcat((yyvsp[(3) - (6)].str), "Off");
+   currentFunction->Name = vtkstrcat((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (6))].yystate.yysemantics.yysval.str), "Off");
    currentFunction->Comment = vtkstrdup(getComment());
    currentFunction->Signature =
      vtkstrcat3("void ", currentFunction->Name, "();");
@@ -6814,202 +8585,202 @@ yyreduce:
    }
     break;
 
-  case 434:
+  case 506:
 
-/* Line 1455 of yacc.c  */
-#line 2669 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2854 "vtkParse.y"
     {startSig(); markSig();}
     break;
 
-  case 435:
+  case 507:
 
-/* Line 1455 of yacc.c  */
-#line 2670 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2855 "vtkParse.y"
     {
    chopSig();
-   outputSetVectorMacro((yyvsp[(3) - (7)].str), getType(), copySig(), 2);
+   outputSetVectorMacro((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (7))].yystate.yysemantics.yysval.str), getType(), copySig(), 2);
    }
     break;
 
-  case 436:
+  case 508:
 
-/* Line 1455 of yacc.c  */
-#line 2674 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2859 "vtkParse.y"
     {startSig(); markSig();}
     break;
 
-  case 437:
+  case 509:
 
-/* Line 1455 of yacc.c  */
-#line 2675 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2860 "vtkParse.y"
     {
    chopSig();
-   outputGetVectorMacro((yyvsp[(3) - (7)].str), getType(), copySig(), 2);
+   outputGetVectorMacro((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (7))].yystate.yysemantics.yysval.str), getType(), copySig(), 2);
    }
     break;
 
-  case 438:
+  case 510:
 
-/* Line 1455 of yacc.c  */
-#line 2679 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2864 "vtkParse.y"
     {startSig(); markSig();}
     break;
 
-  case 439:
+  case 511:
 
-/* Line 1455 of yacc.c  */
-#line 2680 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2865 "vtkParse.y"
     {
    chopSig();
-   outputSetVectorMacro((yyvsp[(3) - (7)].str), getType(), copySig(), 3);
+   outputSetVectorMacro((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (7))].yystate.yysemantics.yysval.str), getType(), copySig(), 3);
    }
     break;
 
-  case 440:
+  case 512:
 
-/* Line 1455 of yacc.c  */
-#line 2684 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2869 "vtkParse.y"
     {startSig(); markSig();}
     break;
 
-  case 441:
+  case 513:
 
-/* Line 1455 of yacc.c  */
-#line 2685 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2870 "vtkParse.y"
     {
    chopSig();
-   outputGetVectorMacro((yyvsp[(3) - (7)].str), getType(), copySig(), 3);
+   outputGetVectorMacro((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (7))].yystate.yysemantics.yysval.str), getType(), copySig(), 3);
    }
     break;
 
-  case 442:
+  case 514:
 
-/* Line 1455 of yacc.c  */
-#line 2689 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2874 "vtkParse.y"
     {startSig(); markSig();}
     break;
 
-  case 443:
+  case 515:
 
-/* Line 1455 of yacc.c  */
-#line 2690 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2875 "vtkParse.y"
     {
    chopSig();
-   outputSetVectorMacro((yyvsp[(3) - (7)].str), getType(), copySig(), 4);
+   outputSetVectorMacro((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (7))].yystate.yysemantics.yysval.str), getType(), copySig(), 4);
    }
     break;
 
-  case 444:
+  case 516:
 
-/* Line 1455 of yacc.c  */
-#line 2694 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2879 "vtkParse.y"
     {startSig(); markSig();}
     break;
 
-  case 445:
+  case 517:
 
-/* Line 1455 of yacc.c  */
-#line 2695 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2880 "vtkParse.y"
     {
    chopSig();
-   outputGetVectorMacro((yyvsp[(3) - (7)].str), getType(), copySig(), 4);
+   outputGetVectorMacro((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (7))].yystate.yysemantics.yysval.str), getType(), copySig(), 4);
    }
     break;
 
-  case 446:
+  case 518:
 
-/* Line 1455 of yacc.c  */
-#line 2699 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2884 "vtkParse.y"
     {startSig(); markSig();}
     break;
 
-  case 447:
+  case 519:
 
-/* Line 1455 of yacc.c  */
-#line 2700 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2885 "vtkParse.y"
     {
    chopSig();
-   outputSetVectorMacro((yyvsp[(3) - (7)].str), getType(), copySig(), 6);
+   outputSetVectorMacro((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (7))].yystate.yysemantics.yysval.str), getType(), copySig(), 6);
    }
     break;
 
-  case 448:
+  case 520:
 
-/* Line 1455 of yacc.c  */
-#line 2704 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2889 "vtkParse.y"
     {startSig(); markSig();}
     break;
 
-  case 449:
+  case 521:
 
-/* Line 1455 of yacc.c  */
-#line 2705 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2890 "vtkParse.y"
     {
    chopSig();
-   outputGetVectorMacro((yyvsp[(3) - (7)].str), getType(), copySig(), 6);
+   outputGetVectorMacro((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (7))].yystate.yysemantics.yysval.str), getType(), copySig(), 6);
    }
     break;
 
-  case 450:
+  case 522:
 
-/* Line 1455 of yacc.c  */
-#line 2709 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2894 "vtkParse.y"
     {startSig(); markSig();}
     break;
 
-  case 451:
+  case 523:
 
-/* Line 1455 of yacc.c  */
-#line 2711 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2896 "vtkParse.y"
     {
    const char *typeText;
    chopSig();
    typeText = copySig();
    currentFunction->Macro = "vtkSetVectorMacro";
-   currentFunction->Name = vtkstrcat("Set", (yyvsp[(3) - (9)].str));
+   currentFunction->Name = vtkstrcat("Set", (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (9))].yystate.yysemantics.yysval.str));
    currentFunction->Signature =
      vtkstrcat7("void ", currentFunction->Name, "(", typeText,
-                " a[", (yyvsp[(8) - (9)].str), "]);");
+                " a[", (((yyGLRStackItem const *)yyvsp)[YYFILL ((8) - (9))].yystate.yysemantics.yysval.str), "]);");
    currentFunction->Comment = vtkstrdup(getComment());
    add_parameter(currentFunction, (VTK_PARSE_POINTER | getType()),
-                 getTypeId(), (int)strtol((yyvsp[(8) - (9)].str), NULL, 0));
+                 getTypeId(), (int)strtol((((yyGLRStackItem const *)yyvsp)[YYFILL ((8) - (9))].yystate.yysemantics.yysval.str), NULL, 0));
    set_return(currentFunction, VTK_PARSE_VOID, "void", 0);
    output_function();
    }
     break;
 
-  case 452:
+  case 524:
 
-/* Line 1455 of yacc.c  */
-#line 2726 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2911 "vtkParse.y"
     {startSig();}
     break;
 
-  case 453:
+  case 525:
 
-/* Line 1455 of yacc.c  */
-#line 2728 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2913 "vtkParse.y"
     {
    chopSig();
    currentFunction->Macro = "vtkGetVectorMacro";
-   currentFunction->Name = vtkstrcat("Get", (yyvsp[(3) - (9)].str));
+   currentFunction->Name = vtkstrcat("Get", (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (9))].yystate.yysemantics.yysval.str));
    postSig(" *");
    postSig(currentFunction->Name);
    postSig("();");
    currentFunction->Comment = vtkstrdup(getComment());
    set_return(currentFunction, (VTK_PARSE_POINTER | getType()),
-              getTypeId(), (int)strtol((yyvsp[(8) - (9)].str), NULL, 0));
+              getTypeId(), (int)strtol((((yyGLRStackItem const *)yyvsp)[YYFILL ((8) - (9))].yystate.yysemantics.yysval.str), NULL, 0));
    output_function();
    }
     break;
 
-  case 454:
+  case 526:
 
-/* Line 1455 of yacc.c  */
-#line 2741 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2926 "vtkParse.y"
     {
      currentFunction->Macro = "vtkViewportCoordinateMacro";
-     currentFunction->Name = vtkstrcat3("Get", (yyvsp[(3) - (4)].str), "Coordinate");
+     currentFunction->Name = vtkstrcat3("Get", (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (4))].yystate.yysemantics.yysval.str), "Coordinate");
      currentFunction->Signature =
        vtkstrcat3("vtkCoordinate *", currentFunction->Name, "();");
      currentFunction->Comment = vtkstrdup(getComment());
@@ -7017,7 +8788,7 @@ yyreduce:
      output_function();
 
      currentFunction->Macro = "vtkViewportCoordinateMacro";
-     currentFunction->Name = vtkstrcat("Set", (yyvsp[(3) - (4)].str));
+     currentFunction->Name = vtkstrcat("Set", (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (4))].yystate.yysemantics.yysval.str));
      currentFunction->Signature =
        vtkstrcat3("void ", currentFunction->Name, "(double, double);");
      currentFunction->Comment = vtkstrdup(getComment());
@@ -7027,7 +8798,7 @@ yyreduce:
      output_function();
 
      currentFunction->Macro = "vtkViewportCoordinateMacro";
-     currentFunction->Name = vtkstrcat("Set", (yyvsp[(3) - (4)].str));
+     currentFunction->Name = vtkstrcat("Set", (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (4))].yystate.yysemantics.yysval.str));
      currentFunction->Signature =
        vtkstrcat3("void ", currentFunction->Name, "(double a[2]);");
      currentFunction->Comment = vtkstrdup(getComment());
@@ -7036,7 +8807,7 @@ yyreduce:
      output_function();
 
      currentFunction->Macro = "vtkViewportCoordinateMacro";
-     currentFunction->Name = vtkstrcat("Get", (yyvsp[(3) - (4)].str));
+     currentFunction->Name = vtkstrcat("Get", (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (4))].yystate.yysemantics.yysval.str));
      currentFunction->Signature =
        vtkstrcat3("double *", currentFunction->Name, "();");
      currentFunction->Comment = vtkstrdup(getComment());
@@ -7045,13 +8816,13 @@ yyreduce:
    }
     break;
 
-  case 455:
+  case 527:
 
-/* Line 1455 of yacc.c  */
-#line 2778 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 2963 "vtkParse.y"
     {
      currentFunction->Macro = "vtkWorldCoordinateMacro";
-     currentFunction->Name = vtkstrcat3("Get", (yyvsp[(3) - (4)].str), "Coordinate");
+     currentFunction->Name = vtkstrcat3("Get", (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (4))].yystate.yysemantics.yysval.str), "Coordinate");
      currentFunction->Signature =
        vtkstrcat3("vtkCoordinate *", currentFunction->Name, "();");
      currentFunction->Comment = vtkstrdup(getComment());
@@ -7059,7 +8830,7 @@ yyreduce:
      output_function();
 
      currentFunction->Macro = "vtkWorldCoordinateMacro";
-     currentFunction->Name = vtkstrcat("Set", (yyvsp[(3) - (4)].str));
+     currentFunction->Name = vtkstrcat("Set", (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (4))].yystate.yysemantics.yysval.str));
      currentFunction->Signature =
        vtkstrcat3("void ", currentFunction->Name, "(double, double, double);");
      currentFunction->Comment = vtkstrdup(getComment());
@@ -7070,7 +8841,7 @@ yyreduce:
      output_function();
 
      currentFunction->Macro = "vtkWorldCoordinateMacro";
-     currentFunction->Name = vtkstrcat("Set", (yyvsp[(3) - (4)].str));
+     currentFunction->Name = vtkstrcat("Set", (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (4))].yystate.yysemantics.yysval.str));
      currentFunction->Signature =
        vtkstrcat3("void ", currentFunction->Name, "(double a[3]);");
      currentFunction->Comment = vtkstrdup(getComment());
@@ -7079,7 +8850,7 @@ yyreduce:
      output_function();
 
      currentFunction->Macro = "vtkWorldCoordinateMacro";
-     currentFunction->Name = vtkstrcat("Get", (yyvsp[(3) - (4)].str));
+     currentFunction->Name = vtkstrcat("Get", (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (4))].yystate.yysemantics.yysval.str));
      currentFunction->Signature =
        vtkstrcat3("double *", currentFunction->Name, "();");
      currentFunction->Comment = vtkstrdup(getComment());
@@ -7088,10 +8859,10 @@ yyreduce:
    }
     break;
 
-  case 456:
+  case 528:
 
-/* Line 1455 of yacc.c  */
-#line 2816 "vtkParse.y"
+/* Line 936 of glr.c  */
+#line 3001 "vtkParse.y"
     {
    currentFunction->Macro = "vtkTypeMacro";
    currentFunction->Name = "GetClassName";
@@ -7112,939 +8883,2512 @@ yyreduce:
 
    currentFunction->Macro = "vtkTypeMacro";
    currentFunction->Name = "NewInstance";
-   currentFunction->Signature = vtkstrcat((yyvsp[(3) - (7)].str), " *NewInstance();");
+   currentFunction->Signature = vtkstrcat((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (7))].yystate.yysemantics.yysval.str), " *NewInstance();");
    currentFunction->Comment = vtkstrdup(getComment());
-   set_return(currentFunction, VTK_PARSE_OBJECT_PTR, (yyvsp[(3) - (7)].str), 0);
+   set_return(currentFunction, VTK_PARSE_OBJECT_PTR, (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (7))].yystate.yysemantics.yysval.str), 0);
    output_function();
 
    currentFunction->Macro = "vtkTypeMacro";
    currentFunction->Name = "SafeDownCast";
    currentFunction->Signature =
-     vtkstrcat((yyvsp[(3) - (7)].str), " *SafeDownCast(vtkObject* o);");
+     vtkstrcat((((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (7))].yystate.yysemantics.yysval.str), " *SafeDownCast(vtkObject* o);");
    currentFunction->Comment = vtkstrdup(getComment());
    add_parameter(currentFunction, VTK_PARSE_OBJECT_PTR, "vtkObject", 0);
    set_return(currentFunction, (VTK_PARSE_STATIC | VTK_PARSE_OBJECT_PTR),
-              (yyvsp[(3) - (7)].str), 0);
+              (((yyGLRStackItem const *)yyvsp)[YYFILL ((3) - (7))].yystate.yysemantics.yysval.str), 0);
    output_function();
    }
     break;
 
-  case 459:
+  case 531:
+
+/* Line 936 of glr.c  */
+#line 3045 "vtkParse.y"
+    { ((*yyvalp).str) = "()"; }
+    break;
+
+  case 532:
+
+/* Line 936 of glr.c  */
+#line 3046 "vtkParse.y"
+    { ((*yyvalp).str) = "[]"; }
+    break;
+
+  case 533:
+
+/* Line 936 of glr.c  */
+#line 3047 "vtkParse.y"
+    { ((*yyvalp).str) = " new[]"; }
+    break;
+
+  case 534:
+
+/* Line 936 of glr.c  */
+#line 3048 "vtkParse.y"
+    { ((*yyvalp).str) = " delete[]"; }
+    break;
+
+  case 535:
+
+/* Line 936 of glr.c  */
+#line 3049 "vtkParse.y"
+    { ((*yyvalp).str) = "<"; }
+    break;
+
+  case 536:
+
+/* Line 936 of glr.c  */
+#line 3050 "vtkParse.y"
+    { ((*yyvalp).str) = ">"; }
+    break;
+
+  case 537:
+
+/* Line 936 of glr.c  */
+#line 3051 "vtkParse.y"
+    { ((*yyvalp).str) = ","; }
+    break;
+
+  case 538:
+
+/* Line 936 of glr.c  */
+#line 3052 "vtkParse.y"
+    { ((*yyvalp).str) = "="; }
+    break;
+
+  case 539:
+
+/* Line 936 of glr.c  */
+#line 3053 "vtkParse.y"
+    { ((*yyvalp).str) = ">>"; }
+    break;
+
+  case 540:
+
+/* Line 936 of glr.c  */
+#line 3054 "vtkParse.y"
+    { ((*yyvalp).str) = ">>"; }
+    break;
+
+  case 541:
+
+/* Line 936 of glr.c  */
+#line 3055 "vtkParse.y"
+    { ((*yyvalp).str) = vtkstrcat("\"\" ", (((yyGLRStackItem const *)yyvsp)[YYFILL ((2) - (2))].yystate.yysemantics.yysval.str)); }
+    break;
+
+  case 543:
+
+/* Line 936 of glr.c  */
+#line 3059 "vtkParse.y"
+    { ((*yyvalp).str) = "%"; }
+    break;
+
+  case 544:
+
+/* Line 936 of glr.c  */
+#line 3060 "vtkParse.y"
+    { ((*yyvalp).str) = "*"; }
+    break;
+
+  case 545:
+
+/* Line 936 of glr.c  */
+#line 3061 "vtkParse.y"
+    { ((*yyvalp).str) = "/"; }
+    break;
+
+  case 546:
+
+/* Line 936 of glr.c  */
+#line 3062 "vtkParse.y"
+    { ((*yyvalp).str) = "-"; }
+    break;
+
+  case 547:
+
+/* Line 936 of glr.c  */
+#line 3063 "vtkParse.y"
+    { ((*yyvalp).str) = "+"; }
+    break;
+
+  case 548:
+
+/* Line 936 of glr.c  */
+#line 3064 "vtkParse.y"
+    { ((*yyvalp).str) = "!"; }
+    break;
+
+  case 549:
+
+/* Line 936 of glr.c  */
+#line 3065 "vtkParse.y"
+    { ((*yyvalp).str) = "~"; }
+    break;
+
+  case 550:
+
+/* Line 936 of glr.c  */
+#line 3066 "vtkParse.y"
+    { ((*yyvalp).str) = "&"; }
+    break;
+
+  case 551:
+
+/* Line 936 of glr.c  */
+#line 3067 "vtkParse.y"
+    { ((*yyvalp).str) = "|"; }
+    break;
+
+  case 552:
+
+/* Line 936 of glr.c  */
+#line 3068 "vtkParse.y"
+    { ((*yyvalp).str) = "^"; }
+    break;
+
+  case 553:
+
+/* Line 936 of glr.c  */
+#line 3069 "vtkParse.y"
+    { ((*yyvalp).str) = " new"; }
+    break;
+
+  case 554:
+
+/* Line 936 of glr.c  */
+#line 3070 "vtkParse.y"
+    { ((*yyvalp).str) = " delete"; }
+    break;
+
+  case 555:
+
+/* Line 936 of glr.c  */
+#line 3071 "vtkParse.y"
+    { ((*yyvalp).str) = "<<="; }
+    break;
+
+  case 556:
+
+/* Line 936 of glr.c  */
+#line 3072 "vtkParse.y"
+    { ((*yyvalp).str) = ">>="; }
+    break;
+
+  case 557:
+
+/* Line 936 of glr.c  */
+#line 3073 "vtkParse.y"
+    { ((*yyvalp).str) = "<<"; }
+    break;
+
+  case 558:
+
+/* Line 936 of glr.c  */
+#line 3074 "vtkParse.y"
+    { ((*yyvalp).str) = ".*"; }
+    break;
+
+  case 559:
+
+/* Line 936 of glr.c  */
+#line 3075 "vtkParse.y"
+    { ((*yyvalp).str) = "->*"; }
+    break;
+
+  case 560:
+
+/* Line 936 of glr.c  */
+#line 3076 "vtkParse.y"
+    { ((*yyvalp).str) = "->"; }
+    break;
+
+  case 561:
+
+/* Line 936 of glr.c  */
+#line 3077 "vtkParse.y"
+    { ((*yyvalp).str) = "+="; }
+    break;
+
+  case 562:
+
+/* Line 936 of glr.c  */
+#line 3078 "vtkParse.y"
+    { ((*yyvalp).str) = "-="; }
+    break;
+
+  case 563:
+
+/* Line 936 of glr.c  */
+#line 3079 "vtkParse.y"
+    { ((*yyvalp).str) = "*="; }
+    break;
+
+  case 564:
+
+/* Line 936 of glr.c  */
+#line 3080 "vtkParse.y"
+    { ((*yyvalp).str) = "/="; }
+    break;
+
+  case 565:
+
+/* Line 936 of glr.c  */
+#line 3081 "vtkParse.y"
+    { ((*yyvalp).str) = "%="; }
+    break;
+
+  case 566:
+
+/* Line 936 of glr.c  */
+#line 3082 "vtkParse.y"
+    { ((*yyvalp).str) = "++"; }
+    break;
+
+  case 567:
+
+/* Line 936 of glr.c  */
+#line 3083 "vtkParse.y"
+    { ((*yyvalp).str) = "--"; }
+    break;
+
+  case 568:
+
+/* Line 936 of glr.c  */
+#line 3084 "vtkParse.y"
+    { ((*yyvalp).str) = "&="; }
+    break;
+
+  case 569:
+
+/* Line 936 of glr.c  */
+#line 3085 "vtkParse.y"
+    { ((*yyvalp).str) = "|="; }
+    break;
+
+  case 570:
+
+/* Line 936 of glr.c  */
+#line 3086 "vtkParse.y"
+    { ((*yyvalp).str) = "^="; }
+    break;
+
+  case 571:
+
+/* Line 936 of glr.c  */
+#line 3087 "vtkParse.y"
+    { ((*yyvalp).str) = "&&"; }
+    break;
+
+  case 572:
+
+/* Line 936 of glr.c  */
+#line 3088 "vtkParse.y"
+    { ((*yyvalp).str) = "||"; }
+    break;
+
+  case 573:
+
+/* Line 936 of glr.c  */
+#line 3089 "vtkParse.y"
+    { ((*yyvalp).str) = "=="; }
+    break;
+
+  case 574:
+
+/* Line 936 of glr.c  */
+#line 3090 "vtkParse.y"
+    { ((*yyvalp).str) = "!="; }
+    break;
+
+  case 575:
+
+/* Line 936 of glr.c  */
+#line 3091 "vtkParse.y"
+    { ((*yyvalp).str) = "<="; }
+    break;
+
+  case 576:
+
+/* Line 936 of glr.c  */
+#line 3092 "vtkParse.y"
+    { ((*yyvalp).str) = ">="; }
+    break;
+
+  case 577:
+
+/* Line 936 of glr.c  */
+#line 3095 "vtkParse.y"
+    { ((*yyvalp).str) = "typedef"; }
+    break;
+
+  case 578:
+
+/* Line 936 of glr.c  */
+#line 3096 "vtkParse.y"
+    { ((*yyvalp).str) = "typename"; }
+    break;
+
+  case 579:
+
+/* Line 936 of glr.c  */
+#line 3097 "vtkParse.y"
+    { ((*yyvalp).str) = "class"; }
+    break;
+
+  case 580:
 
-/* Line 1455 of yacc.c  */
-#line 2860 "vtkParse.y"
-    { (yyval.str) = "()"; }
+/* Line 936 of glr.c  */
+#line 3098 "vtkParse.y"
+    { ((*yyvalp).str) = "struct"; }
     break;
 
-  case 460:
+  case 581:
 
-/* Line 1455 of yacc.c  */
-#line 2861 "vtkParse.y"
-    { (yyval.str) = "[]"; }
+/* Line 936 of glr.c  */
+#line 3099 "vtkParse.y"
+    { ((*yyvalp).str) = "union"; }
     break;
 
-  case 461:
+  case 582:
 
-/* Line 1455 of yacc.c  */
-#line 2862 "vtkParse.y"
-    { (yyval.str) = " new[]"; }
+/* Line 936 of glr.c  */
+#line 3100 "vtkParse.y"
+    { ((*yyvalp).str) = "template"; }
     break;
 
-  case 462:
+  case 583:
 
-/* Line 1455 of yacc.c  */
-#line 2863 "vtkParse.y"
-    { (yyval.str) = " delete[]"; }
+/* Line 936 of glr.c  */
+#line 3101 "vtkParse.y"
+    { ((*yyvalp).str) = "public"; }
     break;
 
-  case 463:
+  case 584:
 
-/* Line 1455 of yacc.c  */
-#line 2864 "vtkParse.y"
-    { (yyval.str) = "<"; }
+/* Line 936 of glr.c  */
+#line 3102 "vtkParse.y"
+    { ((*yyvalp).str) = "protected"; }
     break;
 
-  case 464:
+  case 585:
 
-/* Line 1455 of yacc.c  */
-#line 2865 "vtkParse.y"
-    { (yyval.str) = ">"; }
+/* Line 936 of glr.c  */
+#line 3103 "vtkParse.y"
+    { ((*yyvalp).str) = "private"; }
     break;
 
-  case 465:
+  case 586:
 
-/* Line 1455 of yacc.c  */
-#line 2866 "vtkParse.y"
-    { (yyval.str) = ","; }
+/* Line 936 of glr.c  */
+#line 3104 "vtkParse.y"
+    { ((*yyvalp).str) = "const"; }
     break;
 
-  case 466:
+  case 587:
 
-/* Line 1455 of yacc.c  */
-#line 2867 "vtkParse.y"
-    { (yyval.str) = "="; }
+/* Line 936 of glr.c  */
+#line 3105 "vtkParse.y"
+    { ((*yyvalp).str) = "static"; }
     break;
 
-  case 468:
+  case 588:
 
-/* Line 1455 of yacc.c  */
-#line 2871 "vtkParse.y"
-    { (yyval.str) = "%"; }
+/* Line 936 of glr.c  */
+#line 3106 "vtkParse.y"
+    { ((*yyvalp).str) = "thread_local"; }
     break;
 
-  case 469:
+  case 589:
 
-/* Line 1455 of yacc.c  */
-#line 2872 "vtkParse.y"
-    { (yyval.str) = "*"; }
+/* Line 936 of glr.c  */
+#line 3107 "vtkParse.y"
+    { ((*yyvalp).str) = "constexpr"; }
     break;
 
-  case 470:
+  case 590:
 
-/* Line 1455 of yacc.c  */
-#line 2873 "vtkParse.y"
-    { (yyval.str) = "/"; }
+/* Line 936 of glr.c  */
+#line 3108 "vtkParse.y"
+    { ((*yyvalp).str) = "inline"; }
     break;
 
-  case 471:
+  case 591:
 
-/* Line 1455 of yacc.c  */
-#line 2874 "vtkParse.y"
-    { (yyval.str) = "-"; }
+/* Line 936 of glr.c  */
+#line 3109 "vtkParse.y"
+    { ((*yyvalp).str) = "virtual"; }
     break;
 
-  case 472:
+  case 592:
 
-/* Line 1455 of yacc.c  */
-#line 2875 "vtkParse.y"
-    { (yyval.str) = "+"; }
+/* Line 936 of glr.c  */
+#line 3110 "vtkParse.y"
+    { ((*yyvalp).str) = "explicit"; }
     break;
 
-  case 473:
+  case 593:
 
-/* Line 1455 of yacc.c  */
-#line 2876 "vtkParse.y"
-    { (yyval.str) = "!"; }
+/* Line 936 of glr.c  */
+#line 3111 "vtkParse.y"
+    { ((*yyvalp).str) = "decltype"; }
     break;
 
-  case 474:
+  case 594:
 
-/* Line 1455 of yacc.c  */
-#line 2877 "vtkParse.y"
-    { (yyval.str) = "~"; }
+/* Line 936 of glr.c  */
+#line 3112 "vtkParse.y"
+    { ((*yyvalp).str) = "default"; }
     break;
 
-  case 475:
+  case 595:
 
-/* Line 1455 of yacc.c  */
-#line 2878 "vtkParse.y"
-    { (yyval.str) = "&"; }
+/* Line 936 of glr.c  */
+#line 3113 "vtkParse.y"
+    { ((*yyvalp).str) = "extern"; }
     break;
 
-  case 476:
+  case 596:
 
-/* Line 1455 of yacc.c  */
-#line 2879 "vtkParse.y"
-    { (yyval.str) = "|"; }
+/* Line 936 of glr.c  */
+#line 3114 "vtkParse.y"
+    { ((*yyvalp).str) = "using"; }
     break;
 
-  case 477:
+  case 597:
 
-/* Line 1455 of yacc.c  */
-#line 2880 "vtkParse.y"
-    { (yyval.str) = "^"; }
+/* Line 936 of glr.c  */
+#line 3115 "vtkParse.y"
+    { ((*yyvalp).str) = "namespace"; }
     break;
 
-  case 478:
+  case 598:
 
-/* Line 1455 of yacc.c  */
-#line 2881 "vtkParse.y"
-    { (yyval.str) = " new"; }
+/* Line 936 of glr.c  */
+#line 3116 "vtkParse.y"
+    { ((*yyvalp).str) = "operator"; }
     break;
 
-  case 479:
+  case 599:
 
-/* Line 1455 of yacc.c  */
-#line 2882 "vtkParse.y"
-    { (yyval.str) = " delete"; }
+/* Line 936 of glr.c  */
+#line 3117 "vtkParse.y"
+    { ((*yyvalp).str) = "enum"; }
     break;
 
-  case 480:
+  case 600:
 
-/* Line 1455 of yacc.c  */
-#line 2883 "vtkParse.y"
-    { (yyval.str) = "<<="; }
+/* Line 936 of glr.c  */
+#line 3118 "vtkParse.y"
+    { ((*yyvalp).str) = "throw"; }
     break;
 
-  case 481:
+  case 601:
 
-/* Line 1455 of yacc.c  */
-#line 2884 "vtkParse.y"
-    { (yyval.str) = ">>="; }
+/* Line 936 of glr.c  */
+#line 3119 "vtkParse.y"
+    { ((*yyvalp).str) = "noexcept"; }
     break;
 
-  case 482:
+  case 602:
 
-/* Line 1455 of yacc.c  */
-#line 2885 "vtkParse.y"
-    { (yyval.str) = "<<"; }
+/* Line 936 of glr.c  */
+#line 3120 "vtkParse.y"
+    { ((*yyvalp).str) = "const_cast"; }
     break;
 
-  case 483:
+  case 603:
 
-/* Line 1455 of yacc.c  */
-#line 2886 "vtkParse.y"
-    { (yyval.str) = ">>"; }
+/* Line 936 of glr.c  */
+#line 3121 "vtkParse.y"
+    { ((*yyvalp).str) = "dynamic_cast"; }
     break;
 
-  case 484:
+  case 604:
 
-/* Line 1455 of yacc.c  */
-#line 2887 "vtkParse.y"
-    { (yyval.str) = ".*"; }
+/* Line 936 of glr.c  */
+#line 3122 "vtkParse.y"
+    { ((*yyvalp).str) = "static_cast"; }
     break;
 
-  case 485:
+  case 605:
 
-/* Line 1455 of yacc.c  */
-#line 2888 "vtkParse.y"
-    { (yyval.str) = "->*"; }
+/* Line 936 of glr.c  */
+#line 3123 "vtkParse.y"
+    { ((*yyvalp).str) = "reinterpret_cast"; }
     break;
 
-  case 486:
+  case 618:
 
-/* Line 1455 of yacc.c  */
-#line 2889 "vtkParse.y"
-    { (yyval.str) = "->"; }
+/* Line 936 of glr.c  */
+#line 3146 "vtkParse.y"
+    { postSig("< "); }
     break;
 
-  case 487:
+  case 619:
 
-/* Line 1455 of yacc.c  */
-#line 2890 "vtkParse.y"
-    { (yyval.str) = "+="; }
+/* Line 936 of glr.c  */
+#line 3147 "vtkParse.y"
+    { postSig("> "); }
     break;
 
-  case 488:
+  case 621:
 
-/* Line 1455 of yacc.c  */
-#line 2891 "vtkParse.y"
-    { (yyval.str) = "-="; }
+/* Line 936 of glr.c  */
+#line 3148 "vtkParse.y"
+    { postSig(">"); }
     break;
 
-  case 489:
+  case 623:
 
-/* Line 1455 of yacc.c  */
-#line 2892 "vtkParse.y"
-    { (yyval.str) = "*="; }
+/* Line 936 of glr.c  */
+#line 3152 "vtkParse.y"
+    { chopSig(); postSig("::"); }
     break;
 
-  case 490:
+  case 627:
 
-/* Line 1455 of yacc.c  */
-#line 2893 "vtkParse.y"
-    { (yyval.str) = "/="; }
+/* Line 936 of glr.c  */
+#line 3159 "vtkParse.y"
+    {
+      if ((((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str))[0] == '+' || ((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str))[0] == '-' ||
+           ((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str))[0] == '*' || ((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str))[0] == '&') &&
+          ((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str))[1] == '\0')
+        {
+        int c1 = 0;
+        size_t l;
+        const char *cp;
+        chopSig();
+        cp = getSig();
+        l = getSigLength();
+        if (l != 0) { c1 = cp[l-1]; }
+        if (c1 != 0 && c1 != '(' && c1 != '[' && c1 != '=')
+          {
+          postSig(" ");
+          }
+        postSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str));
+        if (vtkParse_CharType(c1, (CPRE_IDGIT|CPRE_QUOTE)) ||
+            c1 == ')' || c1 == ']')
+          {
+          postSig(" ");
+          }
+        }
+       else
+        {
+        postSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str));
+        postSig(" ");
+        }
+    }
     break;
 
-  case 491:
+  case 628:
 
-/* Line 1455 of yacc.c  */
-#line 2894 "vtkParse.y"
-    { (yyval.str) = "%="; }
+/* Line 936 of glr.c  */
+#line 3188 "vtkParse.y"
+    { postSig(":"); postSig(" "); }
     break;
 
-  case 492:
+  case 629:
 
-/* Line 1455 of yacc.c  */
-#line 2895 "vtkParse.y"
-    { (yyval.str) = "++"; }
+/* Line 936 of glr.c  */
+#line 3188 "vtkParse.y"
+    { postSig("."); }
     break;
 
-  case 493:
+  case 630:
 
-/* Line 1455 of yacc.c  */
-#line 2896 "vtkParse.y"
-    { (yyval.str) = "--"; }
+/* Line 936 of glr.c  */
+#line 3189 "vtkParse.y"
+    { postSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); postSig(" "); }
     break;
 
-  case 494:
+  case 631:
 
-/* Line 1455 of yacc.c  */
-#line 2897 "vtkParse.y"
-    { (yyval.str) = "&="; }
+/* Line 936 of glr.c  */
+#line 3190 "vtkParse.y"
+    { postSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); postSig(" "); }
     break;
 
-  case 495:
+  case 633:
 
-/* Line 1455 of yacc.c  */
-#line 2898 "vtkParse.y"
-    { (yyval.str) = "|="; }
+/* Line 936 of glr.c  */
+#line 3193 "vtkParse.y"
+    {
+      int c1 = 0;
+      size_t l;
+      const char *cp;
+      chopSig();
+      cp = getSig();
+      l = getSigLength();
+      if (l != 0) { c1 = cp[l-1]; }
+      while (vtkParse_CharType(c1, CPRE_IDGIT) && l != 0)
+        {
+        --l;
+        c1 = cp[l-1];
+        }
+      if (l < 2 || cp[l-1] != ':' || cp[l-2] != ':')
+        {
+        cp = add_const_scope(&cp[l]);
+        resetSig(l);
+        postSig(cp);
+        }
+      postSig(" ");
+    }
     break;
 
-  case 496:
+  case 637:
 
-/* Line 1455 of yacc.c  */
-#line 2899 "vtkParse.y"
-    { (yyval.str) = "^="; }
+/* Line 936 of glr.c  */
+#line 3220 "vtkParse.y"
+    { postSig("< "); }
     break;
 
-  case 497:
+  case 638:
 
-/* Line 1455 of yacc.c  */
-#line 2900 "vtkParse.y"
-    { (yyval.str) = "&&"; }
+/* Line 936 of glr.c  */
+#line 3221 "vtkParse.y"
+    { postSig("> "); }
     break;
 
-  case 498:
+  case 639:
 
-/* Line 1455 of yacc.c  */
-#line 2901 "vtkParse.y"
-    { (yyval.str) = "||"; }
+/* Line 936 of glr.c  */
+#line 3222 "vtkParse.y"
+    { postSig(">"); }
     break;
 
-  case 499:
+  case 641:
 
-/* Line 1455 of yacc.c  */
-#line 2902 "vtkParse.y"
-    { (yyval.str) = "=="; }
+/* Line 936 of glr.c  */
+#line 3226 "vtkParse.y"
+    { postSig("= "); }
     break;
 
-  case 500:
+  case 642:
 
-/* Line 1455 of yacc.c  */
-#line 2903 "vtkParse.y"
-    { (yyval.str) = "!="; }
+/* Line 936 of glr.c  */
+#line 3227 "vtkParse.y"
+    { chopSig(); postSig(", "); }
     break;
 
-  case 501:
+  case 644:
 
-/* Line 1455 of yacc.c  */
-#line 2904 "vtkParse.y"
-    { (yyval.str) = "<="; }
+/* Line 936 of glr.c  */
+#line 3231 "vtkParse.y"
+    { chopSig(); postSig(";"); }
     break;
 
-  case 502:
+  case 652:
 
-/* Line 1455 of yacc.c  */
-#line 2905 "vtkParse.y"
-    { (yyval.str) = ">="; }
+/* Line 936 of glr.c  */
+#line 3245 "vtkParse.y"
+    { postSig("= "); }
     break;
 
-  case 503:
+  case 653:
 
-/* Line 1455 of yacc.c  */
-#line 2908 "vtkParse.y"
-    { (yyval.str) = "typedef"; }
+/* Line 936 of glr.c  */
+#line 3246 "vtkParse.y"
+    { chopSig(); postSig(", "); }
     break;
 
-  case 504:
+  case 654:
 
-/* Line 1455 of yacc.c  */
-#line 2909 "vtkParse.y"
-    { (yyval.str) = "typename"; }
+/* Line 936 of glr.c  */
+#line 3250 "vtkParse.y"
+    {
+      chopSig();
+      if (getSig()[getSigLength()-1] == '<') { postSig(" "); }
+      postSig("<");
+    }
     break;
 
-  case 505:
+  case 655:
 
-/* Line 1455 of yacc.c  */
-#line 2910 "vtkParse.y"
-    { (yyval.str) = "class"; }
+/* Line 936 of glr.c  */
+#line 3256 "vtkParse.y"
+    {
+      chopSig();
+      if (getSig()[getSigLength()-1] == '>') { postSig(" "); }
+      postSig("> ");
+    }
     break;
 
-  case 506:
+  case 658:
 
-/* Line 1455 of yacc.c  */
-#line 2911 "vtkParse.y"
-    { (yyval.str) = "struct"; }
+/* Line 936 of glr.c  */
+#line 3267 "vtkParse.y"
+    { postSig("["); }
     break;
 
-  case 507:
+  case 659:
 
-/* Line 1455 of yacc.c  */
-#line 2912 "vtkParse.y"
-    { (yyval.str) = "union"; }
+/* Line 936 of glr.c  */
+#line 3268 "vtkParse.y"
+    { chopSig(); postSig("] "); }
     break;
 
-  case 508:
+  case 660:
 
-/* Line 1455 of yacc.c  */
-#line 2913 "vtkParse.y"
-    { (yyval.str) = "template"; }
+/* Line 936 of glr.c  */
+#line 3269 "vtkParse.y"
+    { postSig("[["); }
     break;
 
-  case 509:
+  case 661:
 
-/* Line 1455 of yacc.c  */
-#line 2914 "vtkParse.y"
-    { (yyval.str) = "public"; }
+/* Line 936 of glr.c  */
+#line 3270 "vtkParse.y"
+    { chopSig(); postSig("]] "); }
     break;
 
-  case 510:
+  case 662:
 
-/* Line 1455 of yacc.c  */
-#line 2915 "vtkParse.y"
-    { (yyval.str) = "protected"; }
+/* Line 936 of glr.c  */
+#line 3273 "vtkParse.y"
+    { postSig("("); }
     break;
 
-  case 511:
+  case 663:
 
-/* Line 1455 of yacc.c  */
-#line 2916 "vtkParse.y"
-    { (yyval.str) = "private"; }
+/* Line 936 of glr.c  */
+#line 3274 "vtkParse.y"
+    { chopSig(); postSig(") "); }
     break;
 
-  case 512:
+  case 664:
 
-/* Line 1455 of yacc.c  */
-#line 2917 "vtkParse.y"
-    { (yyval.str) = "const"; }
+/* Line 936 of glr.c  */
+#line 3275 "vtkParse.y"
+    { postSig("("); postSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); postSig("*"); }
     break;
 
-  case 513:
+  case 665:
 
-/* Line 1455 of yacc.c  */
-#line 2918 "vtkParse.y"
-    { (yyval.str) = "static"; }
+/* Line 936 of glr.c  */
+#line 3276 "vtkParse.y"
+    { chopSig(); postSig(") "); }
     break;
 
-  case 514:
+  case 666:
 
-/* Line 1455 of yacc.c  */
-#line 2919 "vtkParse.y"
-    { (yyval.str) = "inline"; }
+/* Line 936 of glr.c  */
+#line 3277 "vtkParse.y"
+    { postSig("("); postSig((((yyGLRStackItem const *)yyvsp)[YYFILL ((1) - (1))].yystate.yysemantics.yysval.str)); postSig("&"); }
     break;
 
-  case 515:
+  case 667:
 
-/* Line 1455 of yacc.c  */
-#line 2920 "vtkParse.y"
-    { (yyval.str) = "virtual"; }
+/* Line 936 of glr.c  */
+#line 3278 "vtkParse.y"
+    { chopSig(); postSig(") "); }
     break;
 
-  case 516:
+  case 668:
 
-/* Line 1455 of yacc.c  */
-#line 2921 "vtkParse.y"
-    { (yyval.str) = "extern"; }
+/* Line 936 of glr.c  */
+#line 3281 "vtkParse.y"
+    { postSig("{ "); }
     break;
 
-  case 517:
+  case 669:
 
-/* Line 1455 of yacc.c  */
-#line 2922 "vtkParse.y"
-    { (yyval.str) = "using"; }
+/* Line 936 of glr.c  */
+#line 3281 "vtkParse.y"
+    { postSig("} "); }
     break;
 
-  case 518:
 
-/* Line 1455 of yacc.c  */
-#line 2923 "vtkParse.y"
-    { (yyval.str) = "namespace"; }
-    break;
 
-  case 519:
+/* Line 936 of glr.c  */
+#line 9710 "vtkParse.tab.c"
+      default: break;
+    }
 
-/* Line 1455 of yacc.c  */
-#line 2924 "vtkParse.y"
-    { (yyval.str) = "operator"; }
-    break;
+  return yyok;
+# undef yyerrok
+# undef YYABORT
+# undef YYACCEPT
+# undef YYERROR
+# undef YYBACKUP
+# undef yyclearin
+# undef YYRECOVERING
+}
+

 
-  case 520:
+/*ARGSUSED*/ static void
+yyuserMerge (int yyn, YYSTYPE* yy0, YYSTYPE* yy1)
+{
+  YYUSE (yy0);
+  YYUSE (yy1);
 
-/* Line 1455 of yacc.c  */
-#line 2925 "vtkParse.y"
-    { (yyval.str) = "enum"; }
-    break;
+  switch (yyn)
+    {
 
-  case 521:
+      default: break;
+    }
+}
 
-/* Line 1455 of yacc.c  */
-#line 2926 "vtkParse.y"
-    { (yyval.str) = "throw"; }
-    break;
+                              /* Bison grammar-table manipulation.  */
 
-  case 522:
+/*-----------------------------------------------.
+| Release the memory associated to this symbol.  |
+`-----------------------------------------------*/
 
-/* Line 1455 of yacc.c  */
-#line 2927 "vtkParse.y"
-    { (yyval.str) = "const_cast"; }
-    break;
+/*ARGSUSED*/
+static void
+yydestruct (const char *yymsg, int yytype, YYSTYPE *yyvaluep)
+{
+  YYUSE (yyvaluep);
 
-  case 523:
+  if (!yymsg)
+    yymsg = "Deleting";
+  YY_SYMBOL_PRINT (yymsg, yytype, yyvaluep, yylocationp);
 
-/* Line 1455 of yacc.c  */
-#line 2928 "vtkParse.y"
-    { (yyval.str) = "dynamic_cast"; }
-    break;
+  switch (yytype)
+    {
 
-  case 524:
+      default:
+        break;
+    }
+}
 
-/* Line 1455 of yacc.c  */
-#line 2929 "vtkParse.y"
-    { (yyval.str) = "static_cast"; }
-    break;
+/** Number of symbols composing the right hand side of rule #RULE.  */
+static int
+yyrhsLength (yyRuleNum yyrule)
+{
+  return yyr2[yyrule];
+}
 
-  case 525:
+static void
+yydestroyGLRState (char const *yymsg, yyGLRState *yys)
+{
+  if (yys->yyresolved)
+    yydestruct (yymsg, yystos[yys->yylrState],
+                &yys->yysemantics.yysval);
+  else
+    {
+#if YYDEBUG
+      if (yydebug)
+        {
+          if (yys->yysemantics.yyfirstVal)
+            YYFPRINTF (stderr, "%s unresolved ", yymsg);
+          else
+            YYFPRINTF (stderr, "%s incomplete ", yymsg);
+          yy_symbol_print (stderr, yystos[yys->yylrState],
+                           NULL);
+          YYFPRINTF (stderr, "\n");
+        }
+#endif
+
+      if (yys->yysemantics.yyfirstVal)
+        {
+          yySemanticOption *yyoption = yys->yysemantics.yyfirstVal;
+          yyGLRState *yyrh;
+          int yyn;
+          for (yyrh = yyoption->yystate, yyn = yyrhsLength (yyoption->yyrule);
+               yyn > 0;
+               yyrh = yyrh->yypred, yyn -= 1)
+            yydestroyGLRState (yymsg, yyrh);
+        }
+    }
+}
+
+/** Left-hand-side symbol for rule #RULE.  */
+static yySymbol
+yylhsNonterm (yyRuleNum yyrule)
+{
+  return yyr1[yyrule];
+}
+
+#define yyis_pact_ninf(yystate) \
+  ((yystate) == YYPACT_NINF)
+
+/** True iff LR state STATE has only a default reduction (regardless
+ *  of token).  */
+static yybool
+yyisDefaultedState (yyStateNum yystate)
+{
+  return yyis_pact_ninf (yypact[yystate]);
+}
+
+/** The default reduction for STATE, assuming it has one.  */
+static yyRuleNum
+yydefaultAction (yyStateNum yystate)
+{
+  return yydefact[yystate];
+}
+
+#define yyis_table_ninf(yytable_value) \
+  YYID (0)
+
+/** Set *YYACTION to the action to take in YYSTATE on seeing YYTOKEN.
+ *  Result R means
+ *    R < 0:  Reduce on rule -R.
+ *    R = 0:  Error.
+ *    R > 0:  Shift to state R.
+ *  Set *CONFLICTS to a pointer into yyconfl to 0-terminated list of
+ *  conflicting reductions.
+ */
+static void
+yygetLRActions (yyStateNum yystate, int yytoken,
+                int* yyaction, const short int** yyconflicts)
+{
+  int yyindex = yypact[yystate] + yytoken;
+  if (yyindex < 0 || YYLAST < yyindex || yycheck[yyindex] != yytoken)
+    {
+      *yyaction = -yydefact[yystate];
+      *yyconflicts = yyconfl;
+    }
+  else if (! yyis_table_ninf (yytable[yyindex]))
+    {
+      *yyaction = yytable[yyindex];
+      *yyconflicts = yyconfl + yyconflp[yyindex];
+    }
+  else
+    {
+      *yyaction = 0;
+      *yyconflicts = yyconfl + yyconflp[yyindex];
+    }
+}
+
+static yyStateNum
+yyLRgotoState (yyStateNum yystate, yySymbol yylhs)
+{
+  int yyr;
+  yyr = yypgoto[yylhs - YYNTOKENS] + yystate;
+  if (0 <= yyr && yyr <= YYLAST && yycheck[yyr] == yystate)
+    return yytable[yyr];
+  else
+    return yydefgoto[yylhs - YYNTOKENS];
+}
+
+static yybool
+yyisShiftAction (int yyaction)
+{
+  return 0 < yyaction;
+}
+
+static yybool
+yyisErrorAction (int yyaction)
+{
+  return yyaction == 0;
+}
+
+                                /* GLRStates */
+
+/** Return a fresh GLRStackItem.  Callers should call
+ * YY_RESERVE_GLRSTACK afterwards to make sure there is sufficient
+ * headroom.  */
+
+static yyGLRStackItem*
+yynewGLRStackItem (yyGLRStack* yystackp, yybool yyisState)
+{
+  yyGLRStackItem* yynewItem = yystackp->yynextFree;
+  yystackp->yyspaceLeft -= 1;
+  yystackp->yynextFree += 1;
+  yynewItem->yystate.yyisState = yyisState;
+  return yynewItem;
+}
+
+/** Add a new semantic action that will execute the action for rule
+ *  RULENUM on the semantic values in RHS to the list of
+ *  alternative actions for STATE.  Assumes that RHS comes from
+ *  stack #K of *STACKP. */
+static void
+yyaddDeferredAction (yyGLRStack* yystackp, size_t yyk, yyGLRState* yystate,
+                     yyGLRState* rhs, yyRuleNum yyrule)
+{
+  yySemanticOption* yynewOption =
+    &yynewGLRStackItem (yystackp, yyfalse)->yyoption;
+  yynewOption->yystate = rhs;
+  yynewOption->yyrule = yyrule;
+  if (yystackp->yytops.yylookaheadNeeds[yyk])
+    {
+      yynewOption->yyrawchar = yychar;
+      yynewOption->yyval = yylval;
+      yynewOption->yyloc = yylloc;
+    }
+  else
+    yynewOption->yyrawchar = YYEMPTY;
+  yynewOption->yynext = yystate->yysemantics.yyfirstVal;
+  yystate->yysemantics.yyfirstVal = yynewOption;
+
+  YY_RESERVE_GLRSTACK (yystackp);
+}
+
+                                /* GLRStacks */
+
+/** Initialize SET to a singleton set containing an empty stack.  */
+static yybool
+yyinitStateSet (yyGLRStateSet* yyset)
+{
+  yyset->yysize = 1;
+  yyset->yycapacity = 16;
+  yyset->yystates = (yyGLRState**) YYMALLOC (16 * sizeof yyset->yystates[0]);
+  if (! yyset->yystates)
+    return yyfalse;
+  yyset->yystates[0] = NULL;
+  yyset->yylookaheadNeeds =
+    (yybool*) YYMALLOC (16 * sizeof yyset->yylookaheadNeeds[0]);
+  if (! yyset->yylookaheadNeeds)
+    {
+      YYFREE (yyset->yystates);
+      return yyfalse;
+    }
+  return yytrue;
+}
+
+static void yyfreeStateSet (yyGLRStateSet* yyset)
+{
+  YYFREE (yyset->yystates);
+  YYFREE (yyset->yylookaheadNeeds);
+}
+
+/** Initialize STACK to a single empty stack, with total maximum
+ *  capacity for all stacks of SIZE.  */
+static yybool
+yyinitGLRStack (yyGLRStack* yystackp, size_t yysize)
+{
+  yystackp->yyerrState = 0;
+  yynerrs = 0;
+  yystackp->yyspaceLeft = yysize;
+  yystackp->yyitems =
+    (yyGLRStackItem*) YYMALLOC (yysize * sizeof yystackp->yynextFree[0]);
+  if (!yystackp->yyitems)
+    return yyfalse;
+  yystackp->yynextFree = yystackp->yyitems;
+  yystackp->yysplitPoint = NULL;
+  yystackp->yylastDeleted = NULL;
+  return yyinitStateSet (&yystackp->yytops);
+}
 
-/* Line 1455 of yacc.c  */
-#line 2930 "vtkParse.y"
-    { (yyval.str) = "reinterpret_cast"; }
-    break;
 
-  case 538:
+#if YYSTACKEXPANDABLE
+# define YYRELOC(YYFROMITEMS,YYTOITEMS,YYX,YYTYPE) \
+  &((YYTOITEMS) - ((YYFROMITEMS) - (yyGLRStackItem*) (YYX)))->YYTYPE
 
-/* Line 1455 of yacc.c  */
-#line 2954 "vtkParse.y"
-    {
-      if ((((yyvsp[(1) - (1)].str))[0] == '+' || ((yyvsp[(1) - (1)].str))[0] == '-' ||
-           ((yyvsp[(1) - (1)].str))[0] == '*' || ((yyvsp[(1) - (1)].str))[0] == '&') &&
-          ((yyvsp[(1) - (1)].str))[1] == '\0')
+/** If STACK is expandable, extend it.  WARNING: Pointers into the
+    stack from outside should be considered invalid after this call.
+    We always expand when there are 1 or fewer items left AFTER an
+    allocation, so that we can avoid having external pointers exist
+    across an allocation.  */
+static void
+yyexpandGLRStack (yyGLRStack* yystackp)
+{
+  yyGLRStackItem* yynewItems;
+  yyGLRStackItem* yyp0, *yyp1;
+  size_t yysize, yynewSize;
+  size_t yyn;
+  yysize = yystackp->yynextFree - yystackp->yyitems;
+  if (YYMAXDEPTH - YYHEADROOM < yysize)
+    yyMemoryExhausted (yystackp);
+  yynewSize = 2*yysize;
+  if (YYMAXDEPTH < yynewSize)
+    yynewSize = YYMAXDEPTH;
+  yynewItems = (yyGLRStackItem*) YYMALLOC (yynewSize * sizeof yynewItems[0]);
+  if (! yynewItems)
+    yyMemoryExhausted (yystackp);
+  for (yyp0 = yystackp->yyitems, yyp1 = yynewItems, yyn = yysize;
+       0 < yyn;
+       yyn -= 1, yyp0 += 1, yyp1 += 1)
+    {
+      *yyp1 = *yyp0;
+      if (*(yybool *) yyp0)
         {
-        int c1 = 0;
-        size_t l;
-        const char *cp;
-        chopSig();
-        cp = getSig();
-        l = getSigLength();
-        if (l != 0) { c1 = cp[l-1]; }
-        if (c1 != 0 && c1 != '(' && c1 != '[' && c1 != '=')
-          {
-          postSig(" ");
-          }
-        postSig((yyvsp[(1) - (1)].str));
-        if ((c1 >= 'A' && c1 <= 'Z') || (c1 >= 'a' && c1 <= 'z') ||
-            (c1 >= '0' && c1 <= '9') || c1 == '_' || c1 == '\'' ||
-            c1 == '\"' || c1 == ')' || c1 == ']')
-          {
-          postSig(" ");
-          }
+          yyGLRState* yys0 = &yyp0->yystate;
+          yyGLRState* yys1 = &yyp1->yystate;
+          if (yys0->yypred != NULL)
+            yys1->yypred =
+              YYRELOC (yyp0, yyp1, yys0->yypred, yystate);
+          if (! yys0->yyresolved && yys0->yysemantics.yyfirstVal != NULL)
+            yys1->yysemantics.yyfirstVal =
+              YYRELOC(yyp0, yyp1, yys0->yysemantics.yyfirstVal, yyoption);
         }
-       else
+      else
         {
-        postSig((yyvsp[(1) - (1)].str));
-        postSig(" ");
+          yySemanticOption* yyv0 = &yyp0->yyoption;
+          yySemanticOption* yyv1 = &yyp1->yyoption;
+          if (yyv0->yystate != NULL)
+            yyv1->yystate = YYRELOC (yyp0, yyp1, yyv0->yystate, yystate);
+          if (yyv0->yynext != NULL)
+            yyv1->yynext = YYRELOC (yyp0, yyp1, yyv0->yynext, yyoption);
         }
     }
-    break;
+  if (yystackp->yysplitPoint != NULL)
+    yystackp->yysplitPoint = YYRELOC (yystackp->yyitems, yynewItems,
+                                 yystackp->yysplitPoint, yystate);
+
+  for (yyn = 0; yyn < yystackp->yytops.yysize; yyn += 1)
+    if (yystackp->yytops.yystates[yyn] != NULL)
+      yystackp->yytops.yystates[yyn] =
+        YYRELOC (yystackp->yyitems, yynewItems,
+                 yystackp->yytops.yystates[yyn], yystate);
+  YYFREE (yystackp->yyitems);
+  yystackp->yyitems = yynewItems;
+  yystackp->yynextFree = yynewItems + yysize;
+  yystackp->yyspaceLeft = yynewSize - yysize;
+}
+#endif
 
-  case 539:
+static void
+yyfreeGLRStack (yyGLRStack* yystackp)
+{
+  YYFREE (yystackp->yyitems);
+  yyfreeStateSet (&yystackp->yytops);
+}
 
-/* Line 1455 of yacc.c  */
-#line 2984 "vtkParse.y"
-    { postSig(":"); postSig(" "); }
-    break;
+/** Assuming that S is a GLRState somewhere on STACK, update the
+ *  splitpoint of STACK, if needed, so that it is at least as deep as
+ *  S.  */
+static void
+yyupdateSplit (yyGLRStack* yystackp, yyGLRState* yys)
+{
+  if (yystackp->yysplitPoint != NULL && yystackp->yysplitPoint > yys)
+    yystackp->yysplitPoint = yys;
+}
 
-  case 540:
+/** Invalidate stack #K in STACK.  */
+static void
+yymarkStackDeleted (yyGLRStack* yystackp, size_t yyk)
+{
+  if (yystackp->yytops.yystates[yyk] != NULL)
+    yystackp->yylastDeleted = yystackp->yytops.yystates[yyk];
+  yystackp->yytops.yystates[yyk] = NULL;
+}
 
-/* Line 1455 of yacc.c  */
-#line 2984 "vtkParse.y"
-    { postSig("."); }
-    break;
+/** Undelete the last stack that was marked as deleted.  Can only be
+    done once after a deletion, and only when all other stacks have
+    been deleted.  */
+static void
+yyundeleteLastStack (yyGLRStack* yystackp)
+{
+  if (yystackp->yylastDeleted == NULL || yystackp->yytops.yysize != 0)
+    return;
+  yystackp->yytops.yystates[0] = yystackp->yylastDeleted;
+  yystackp->yytops.yysize = 1;
+  YYDPRINTF ((stderr, "Restoring last deleted stack as stack #0.\n"));
+  yystackp->yylastDeleted = NULL;
+}
 
-  case 541:
+static void
+yyremoveDeletes (yyGLRStack* yystackp)
+{
+  size_t yyi, yyj;
+  yyi = yyj = 0;
+  while (yyj < yystackp->yytops.yysize)
+    {
+      if (yystackp->yytops.yystates[yyi] == NULL)
+        {
+          if (yyi == yyj)
+            {
+              YYDPRINTF ((stderr, "Removing dead stacks.\n"));
+            }
+          yystackp->yytops.yysize -= 1;
+        }
+      else
+        {
+          yystackp->yytops.yystates[yyj] = yystackp->yytops.yystates[yyi];
+          /* In the current implementation, it's unnecessary to copy
+             yystackp->yytops.yylookaheadNeeds[yyi] since, after
+             yyremoveDeletes returns, the parser immediately either enters
+             deterministic operation or shifts a token.  However, it doesn't
+             hurt, and the code might evolve to need it.  */
+          yystackp->yytops.yylookaheadNeeds[yyj] =
+            yystackp->yytops.yylookaheadNeeds[yyi];
+          if (yyj != yyi)
+            {
+              YYDPRINTF ((stderr, "Rename stack %lu -> %lu.\n",
+                          (unsigned long int) yyi, (unsigned long int) yyj));
+            }
+          yyj += 1;
+        }
+      yyi += 1;
+    }
+}
 
-/* Line 1455 of yacc.c  */
-#line 2985 "vtkParse.y"
-    { chopSig(); postSig("::"); }
-    break;
+/** Shift to a new state on stack #K of STACK, corresponding to LR state
+ * LRSTATE, at input position POSN, with (resolved) semantic value SVAL.  */
+static void
+yyglrShift (yyGLRStack* yystackp, size_t yyk, yyStateNum yylrState,
+            size_t yyposn,
+            YYSTYPE* yyvalp, YYLTYPE* yylocp)
+{
+  yyGLRState* yynewState = &yynewGLRStackItem (yystackp, yytrue)->yystate;
 
-  case 542:
+  yynewState->yylrState = yylrState;
+  yynewState->yyposn = yyposn;
+  yynewState->yyresolved = yytrue;
+  yynewState->yypred = yystackp->yytops.yystates[yyk];
+  yynewState->yysemantics.yysval = *yyvalp;
+  yynewState->yyloc = *yylocp;
+  yystackp->yytops.yystates[yyk] = yynewState;
 
-/* Line 1455 of yacc.c  */
-#line 2986 "vtkParse.y"
-    { postSig((yyvsp[(1) - (1)].str)); postSig(" "); }
-    break;
+  YY_RESERVE_GLRSTACK (yystackp);
+}
 
-  case 543:
+/** Shift stack #K of YYSTACK, to a new state corresponding to LR
+ *  state YYLRSTATE, at input position YYPOSN, with the (unresolved)
+ *  semantic value of YYRHS under the action for YYRULE.  */
+static void
+yyglrShiftDefer (yyGLRStack* yystackp, size_t yyk, yyStateNum yylrState,
+                 size_t yyposn, yyGLRState* rhs, yyRuleNum yyrule)
+{
+  yyGLRState* yynewState = &yynewGLRStackItem (yystackp, yytrue)->yystate;
 
-/* Line 1455 of yacc.c  */
-#line 2987 "vtkParse.y"
-    { postSig((yyvsp[(1) - (1)].str)); postSig(" "); }
-    break;
+  yynewState->yylrState = yylrState;
+  yynewState->yyposn = yyposn;
+  yynewState->yyresolved = yyfalse;
+  yynewState->yypred = yystackp->yytops.yystates[yyk];
+  yynewState->yysemantics.yyfirstVal = NULL;
+  yystackp->yytops.yystates[yyk] = yynewState;
 
-  case 545:
+  /* Invokes YY_RESERVE_GLRSTACK.  */
+  yyaddDeferredAction (yystackp, yyk, yynewState, rhs, yyrule);
+}
 
-/* Line 1455 of yacc.c  */
-#line 2990 "vtkParse.y"
+/** Pop the symbols consumed by reduction #RULE from the top of stack
+ *  #K of STACK, and perform the appropriate semantic action on their
+ *  semantic values.  Assumes that all ambiguities in semantic values
+ *  have been previously resolved.  Set *VALP to the resulting value,
+ *  and *LOCP to the computed location (if any).  Return value is as
+ *  for userAction.  */
+static YYRESULTTAG
+yydoAction (yyGLRStack* yystackp, size_t yyk, yyRuleNum yyrule,
+            YYSTYPE* yyvalp, YYLTYPE* yylocp)
+{
+  int yynrhs = yyrhsLength (yyrule);
+
+  if (yystackp->yysplitPoint == NULL)
+    {
+      /* Standard special case: single stack.  */
+      yyGLRStackItem* rhs = (yyGLRStackItem*) yystackp->yytops.yystates[yyk];
+      YYASSERT (yyk == 0);
+      yystackp->yynextFree -= yynrhs;
+      yystackp->yyspaceLeft += yynrhs;
+      yystackp->yytops.yystates[0] = & yystackp->yynextFree[-1].yystate;
+      return yyuserAction (yyrule, yynrhs, rhs,
+                           yyvalp, yylocp, yystackp);
+    }
+  else
     {
-      int c1 = 0;
-      size_t l;
-      const char *cp;
-      chopSig();
-      cp = getSig();
-      l = getSigLength();
-      if (l != 0) { c1 = cp[l-1]; }
-      while (((c1 >= 'A' && c1 <= 'Z') || (c1 >= 'a' && c1 <= 'z') ||
-              (c1 >= '0' && c1 <= '9') || c1 == '_') && l != 0)
-        {
-        --l;
-        c1 = cp[l-1];
-        }
-      if (l < 2 || cp[l-1] != ':' || cp[l-2] != ':')
+      /* At present, doAction is never called in nondeterministic
+       * mode, so this branch is never taken.  It is here in
+       * anticipation of a future feature that will allow immediate
+       * evaluation of selected actions in nondeterministic mode.  */
+      int yyi;
+      yyGLRState* yys;
+      yyGLRStackItem yyrhsVals[YYMAXRHS + YYMAXLEFT + 1];
+      yys = yyrhsVals[YYMAXRHS + YYMAXLEFT].yystate.yypred
+        = yystackp->yytops.yystates[yyk];
+      for (yyi = 0; yyi < yynrhs; yyi += 1)
         {
-        cp = add_const_scope(&cp[l]);
-        resetSig(l);
-        postSig(cp);
+          yys = yys->yypred;
+          YYASSERT (yys);
         }
+      yyupdateSplit (yystackp, yys);
+      yystackp->yytops.yystates[yyk] = yys;
+      return yyuserAction (yyrule, yynrhs, yyrhsVals + YYMAXRHS + YYMAXLEFT - 1,
+                           yyvalp, yylocp, yystackp);
     }
-    break;
+}
 
-  case 549:
+#if !YYDEBUG
+# define YY_REDUCE_PRINT(Args)
+#else
+# define YY_REDUCE_PRINT(Args)                \
+do {                                        \
+  if (yydebug)                                \
+    yy_reduce_print Args;                \
+} while (YYID (0))
 
-/* Line 1455 of yacc.c  */
-#line 3016 "vtkParse.y"
-    { postSig("< "); }
-    break;
+/*----------------------------------------------------------.
+| Report that the RULE is going to be reduced on stack #K.  |
+`----------------------------------------------------------*/
 
-  case 550:
+/*ARGSUSED*/ static void
+yy_reduce_print (yyGLRStack* yystackp, size_t yyk, yyRuleNum yyrule,
+                 YYSTYPE* yyvalp, YYLTYPE* yylocp)
+{
+  int yynrhs = yyrhsLength (yyrule);
+  yybool yynormal __attribute__ ((__unused__)) =
+    (yystackp->yysplitPoint == NULL);
+  yyGLRStackItem* yyvsp = (yyGLRStackItem*) yystackp->yytops.yystates[yyk];
+  int yylow = 1;
+  int yyi;
+  YYUSE (yyvalp);
+  YYUSE (yylocp);
+  YYFPRINTF (stderr, "Reducing stack %lu by rule %d (line %lu):\n",
+             (unsigned long int) yyk, yyrule - 1,
+             (unsigned long int) yyrline[yyrule]);
+  /* The symbols being reduced.  */
+  for (yyi = 0; yyi < yynrhs; yyi++)
+    {
+      YYFPRINTF (stderr, "   $%d = ", yyi + 1);
+      yy_symbol_print (stderr, yyrhs[yyprhs[yyrule] + yyi],
+                       &(((yyGLRStackItem const *)yyvsp)[YYFILL ((yyi + 1) - (yynrhs))].yystate.yysemantics.yysval)
+                                              );
+      YYFPRINTF (stderr, "\n");
+    }
+}
+#endif
 
-/* Line 1455 of yacc.c  */
-#line 3017 "vtkParse.y"
-    { postSig("> "); }
-    break;
+/** Pop items off stack #K of STACK according to grammar rule RULE,
+ *  and push back on the resulting nonterminal symbol.  Perform the
+ *  semantic action associated with RULE and store its value with the
+ *  newly pushed state, if FORCEEVAL or if STACK is currently
+ *  unambiguous.  Otherwise, store the deferred semantic action with
+ *  the new state.  If the new state would have an identical input
+ *  position, LR state, and predecessor to an existing state on the stack,
+ *  it is identified with that existing state, eliminating stack #K from
+ *  the STACK.  In this case, the (necessarily deferred) semantic value is
+ *  added to the options for the existing state's semantic value.
+ */
+static YYRESULTTAG
+yyglrReduce (yyGLRStack* yystackp, size_t yyk, yyRuleNum yyrule,
+             yybool yyforceEval)
+{
+  size_t yyposn = yystackp->yytops.yystates[yyk]->yyposn;
 
-  case 552:
+  if (yyforceEval || yystackp->yysplitPoint == NULL)
+    {
+      YYSTYPE yysval;
+      YYLTYPE yyloc = {0};
 
-/* Line 1455 of yacc.c  */
-#line 3020 "vtkParse.y"
-    { postSig("= "); }
-    break;
+      YY_REDUCE_PRINT ((yystackp, yyk, yyrule, &yysval, &yyloc));
+      YYCHK (yydoAction (yystackp, yyk, yyrule, &yysval,
+                         &yyloc));
+      YY_SYMBOL_PRINT ("-> $$ =", yyr1[yyrule], &yysval, &yyloc);
+      yyglrShift (yystackp, yyk,
+                  yyLRgotoState (yystackp->yytops.yystates[yyk]->yylrState,
+                                 yylhsNonterm (yyrule)),
+                  yyposn, &yysval, &yyloc);
+    }
+  else
+    {
+      size_t yyi;
+      int yyn;
+      yyGLRState* yys, *yys0 = yystackp->yytops.yystates[yyk];
+      yyStateNum yynewLRState;
 
-  case 553:
+      for (yys = yystackp->yytops.yystates[yyk], yyn = yyrhsLength (yyrule);
+           0 < yyn; yyn -= 1)
+        {
+          yys = yys->yypred;
+          YYASSERT (yys);
+        }
+      yyupdateSplit (yystackp, yys);
+      yynewLRState = yyLRgotoState (yys->yylrState, yylhsNonterm (yyrule));
+      YYDPRINTF ((stderr,
+                  "Reduced stack %lu by rule #%d; action deferred.  Now in state %d.\n",
+                  (unsigned long int) yyk, yyrule - 1, yynewLRState));
+      for (yyi = 0; yyi < yystackp->yytops.yysize; yyi += 1)
+        if (yyi != yyk && yystackp->yytops.yystates[yyi] != NULL)
+          {
+            yyGLRState* yyp, *yysplit = yystackp->yysplitPoint;
+            yyp = yystackp->yytops.yystates[yyi];
+            while (yyp != yys && yyp != yysplit && yyp->yyposn >= yyposn)
+              {
+                if (yyp->yylrState == yynewLRState && yyp->yypred == yys)
+                  {
+                    yyaddDeferredAction (yystackp, yyk, yyp, yys0, yyrule);
+                    yymarkStackDeleted (yystackp, yyk);
+                    YYDPRINTF ((stderr, "Merging stack %lu into stack %lu.\n",
+                                (unsigned long int) yyk,
+                                (unsigned long int) yyi));
+                    return yyok;
+                  }
+                yyp = yyp->yypred;
+              }
+          }
+      yystackp->yytops.yystates[yyk] = yys;
+      yyglrShiftDefer (yystackp, yyk, yynewLRState, yyposn, yys0, yyrule);
+    }
+  return yyok;
+}
 
-/* Line 1455 of yacc.c  */
-#line 3021 "vtkParse.y"
-    { chopSig(); postSig(", "); }
-    break;
+static size_t
+yysplitStack (yyGLRStack* yystackp, size_t yyk)
+{
+  if (yystackp->yysplitPoint == NULL)
+    {
+      YYASSERT (yyk == 0);
+      yystackp->yysplitPoint = yystackp->yytops.yystates[yyk];
+    }
+  if (yystackp->yytops.yysize >= yystackp->yytops.yycapacity)
+    {
+      yyGLRState** yynewStates;
+      yybool* yynewLookaheadNeeds;
 
-  case 555:
+      yynewStates = NULL;
 
-/* Line 1455 of yacc.c  */
-#line 3024 "vtkParse.y"
-    { chopSig(); postSig(";"); }
-    break;
+      if (yystackp->yytops.yycapacity
+          > (YYSIZEMAX / (2 * sizeof yynewStates[0])))
+        yyMemoryExhausted (yystackp);
+      yystackp->yytops.yycapacity *= 2;
 
-  case 563:
+      yynewStates =
+        (yyGLRState**) YYREALLOC (yystackp->yytops.yystates,
+                                  (yystackp->yytops.yycapacity
+                                   * sizeof yynewStates[0]));
+      if (yynewStates == NULL)
+        yyMemoryExhausted (yystackp);
+      yystackp->yytops.yystates = yynewStates;
 
-/* Line 1455 of yacc.c  */
-#line 3038 "vtkParse.y"
-    { postSig("= "); }
-    break;
+      yynewLookaheadNeeds =
+        (yybool*) YYREALLOC (yystackp->yytops.yylookaheadNeeds,
+                             (yystackp->yytops.yycapacity
+                              * sizeof yynewLookaheadNeeds[0]));
+      if (yynewLookaheadNeeds == NULL)
+        yyMemoryExhausted (yystackp);
+      yystackp->yytops.yylookaheadNeeds = yynewLookaheadNeeds;
+    }
+  yystackp->yytops.yystates[yystackp->yytops.yysize]
+    = yystackp->yytops.yystates[yyk];
+  yystackp->yytops.yylookaheadNeeds[yystackp->yytops.yysize]
+    = yystackp->yytops.yylookaheadNeeds[yyk];
+  yystackp->yytops.yysize += 1;
+  return yystackp->yytops.yysize-1;
+}
 
-  case 564:
+/** True iff Y0 and Y1 represent identical options at the top level.
+ *  That is, they represent the same rule applied to RHS symbols
+ *  that produce the same terminal symbols.  */
+static yybool
+yyidenticalOptions (yySemanticOption* yyy0, yySemanticOption* yyy1)
+{
+  if (yyy0->yyrule == yyy1->yyrule)
+    {
+      yyGLRState *yys0, *yys1;
+      int yyn;
+      for (yys0 = yyy0->yystate, yys1 = yyy1->yystate,
+           yyn = yyrhsLength (yyy0->yyrule);
+           yyn > 0;
+           yys0 = yys0->yypred, yys1 = yys1->yypred, yyn -= 1)
+        if (yys0->yyposn != yys1->yyposn)
+          return yyfalse;
+      return yytrue;
+    }
+  else
+    return yyfalse;
+}
 
-/* Line 1455 of yacc.c  */
-#line 3039 "vtkParse.y"
-    { chopSig(); postSig(", "); }
-    break;
+/** Assuming identicalOptions (Y0,Y1), destructively merge the
+ *  alternative semantic values for the RHS-symbols of Y1 and Y0.  */
+static void
+yymergeOptionSets (yySemanticOption* yyy0, yySemanticOption* yyy1)
+{
+  yyGLRState *yys0, *yys1;
+  int yyn;
+  for (yys0 = yyy0->yystate, yys1 = yyy1->yystate,
+       yyn = yyrhsLength (yyy0->yyrule);
+       yyn > 0;
+       yys0 = yys0->yypred, yys1 = yys1->yypred, yyn -= 1)
+    {
+      if (yys0 == yys1)
+        break;
+      else if (yys0->yyresolved)
+        {
+          yys1->yyresolved = yytrue;
+          yys1->yysemantics.yysval = yys0->yysemantics.yysval;
+        }
+      else if (yys1->yyresolved)
+        {
+          yys0->yyresolved = yytrue;
+          yys0->yysemantics.yysval = yys1->yysemantics.yysval;
+        }
+      else
+        {
+          yySemanticOption** yyz0p;
+          yySemanticOption* yyz1;
+          yyz0p = &yys0->yysemantics.yyfirstVal;
+          yyz1 = yys1->yysemantics.yyfirstVal;
+          while (YYID (yytrue))
+            {
+              if (yyz1 == *yyz0p || yyz1 == NULL)
+                break;
+              else if (*yyz0p == NULL)
+                {
+                  *yyz0p = yyz1;
+                  break;
+                }
+              else if (*yyz0p < yyz1)
+                {
+                  yySemanticOption* yyz = *yyz0p;
+                  *yyz0p = yyz1;
+                  yyz1 = yyz1->yynext;
+                  (*yyz0p)->yynext = yyz;
+                }
+              yyz0p = &(*yyz0p)->yynext;
+            }
+          yys1->yysemantics.yyfirstVal = yys0->yysemantics.yyfirstVal;
+        }
+    }
+}
 
-  case 565:
+/** Y0 and Y1 represent two possible actions to take in a given
+ *  parsing state; return 0 if no combination is possible,
+ *  1 if user-mergeable, 2 if Y0 is preferred, 3 if Y1 is preferred.  */
+static int
+yypreference (yySemanticOption* y0, yySemanticOption* y1)
+{
+  yyRuleNum r0 = y0->yyrule, r1 = y1->yyrule;
+  int p0 = yydprec[r0], p1 = yydprec[r1];
 
-/* Line 1455 of yacc.c  */
-#line 3043 "vtkParse.y"
+  if (p0 == p1)
     {
-      chopSig();
-      if (getSig()[getSigLength()-1] == '<') { postSig(" "); }
-      postSig("<");
+      if (yymerger[r0] == 0 || yymerger[r0] != yymerger[r1])
+        return 0;
+      else
+        return 1;
     }
-    break;
+  if (p0 == 0 || p1 == 0)
+    return 0;
+  if (p0 < p1)
+    return 3;
+  if (p1 < p0)
+    return 2;
+  return 0;
+}
 
-  case 566:
+static YYRESULTTAG yyresolveValue (yyGLRState* yys,
+                                   yyGLRStack* yystackp);
 
-/* Line 1455 of yacc.c  */
-#line 3049 "vtkParse.y"
+
+/** Resolve the previous N states starting at and including state S.  If result
+ *  != yyok, some states may have been left unresolved possibly with empty
+ *  semantic option chains.  Regardless of whether result = yyok, each state
+ *  has been left with consistent data so that yydestroyGLRState can be invoked
+ *  if necessary.  */
+static YYRESULTTAG
+yyresolveStates (yyGLRState* yys, int yyn,
+                 yyGLRStack* yystackp)
+{
+  if (0 < yyn)
     {
-      chopSig();
-      if (getSig()[getSigLength()-1] == '>') { postSig(" "); }
-      postSig("> ");
+      YYASSERT (yys->yypred);
+      YYCHK (yyresolveStates (yys->yypred, yyn-1, yystackp));
+      if (! yys->yyresolved)
+        YYCHK (yyresolveValue (yys, yystackp));
     }
-    break;
+  return yyok;
+}
 
-  case 567:
+/** Resolve the states for the RHS of OPT, perform its user action, and return
+ *  the semantic value and location.  Regardless of whether result = yyok, all
+ *  RHS states have been destroyed (assuming the user action destroys all RHS
+ *  semantic values if invoked).  */
+static YYRESULTTAG
+yyresolveAction (yySemanticOption* yyopt, yyGLRStack* yystackp,
+                 YYSTYPE* yyvalp, YYLTYPE* yylocp)
+{
+  yyGLRStackItem yyrhsVals[YYMAXRHS + YYMAXLEFT + 1];
+  int yynrhs;
+  int yychar_current;
+  YYSTYPE yylval_current;
+  YYLTYPE yylloc_current;
+  YYRESULTTAG yyflag;
+
+  yynrhs = yyrhsLength (yyopt->yyrule);
+  yyflag = yyresolveStates (yyopt->yystate, yynrhs, yystackp);
+  if (yyflag != yyok)
+    {
+      yyGLRState *yys;
+      for (yys = yyopt->yystate; yynrhs > 0; yys = yys->yypred, yynrhs -= 1)
+        yydestroyGLRState ("Cleanup: popping", yys);
+      return yyflag;
+    }
+
+  yyrhsVals[YYMAXRHS + YYMAXLEFT].yystate.yypred = yyopt->yystate;
+  yychar_current = yychar;
+  yylval_current = yylval;
+  yylloc_current = yylloc;
+  yychar = yyopt->yyrawchar;
+  yylval = yyopt->yyval;
+  yylloc = yyopt->yyloc;
+  yyflag = yyuserAction (yyopt->yyrule, yynrhs,
+                           yyrhsVals + YYMAXRHS + YYMAXLEFT - 1,
+                           yyvalp, yylocp, yystackp);
+  yychar = yychar_current;
+  yylval = yylval_current;
+  yylloc = yylloc_current;
+  return yyflag;
+}
 
-/* Line 1455 of yacc.c  */
-#line 3056 "vtkParse.y"
-    { postSig("["); }
-    break;
+#if YYDEBUG
+static void
+yyreportTree (yySemanticOption* yyx, int yyindent)
+{
+  int yynrhs = yyrhsLength (yyx->yyrule);
+  int yyi;
+  yyGLRState* yys;
+  yyGLRState* yystates[1 + YYMAXRHS];
+  yyGLRState yyleftmost_state;
 
-  case 568:
+  for (yyi = yynrhs, yys = yyx->yystate; 0 < yyi; yyi -= 1, yys = yys->yypred)
+    yystates[yyi] = yys;
+  if (yys == NULL)
+    {
+      yyleftmost_state.yyposn = 0;
+      yystates[0] = &yyleftmost_state;
+    }
+  else
+    yystates[0] = yys;
 
-/* Line 1455 of yacc.c  */
-#line 3057 "vtkParse.y"
-    { chopSig(); postSig("] "); }
-    break;
+  if (yyx->yystate->yyposn < yys->yyposn + 1)
+    YYFPRINTF (stderr, "%*s%s -> <Rule %d, empty>\n",
+               yyindent, "", yytokenName (yylhsNonterm (yyx->yyrule)),
+               yyx->yyrule - 1);
+  else
+    YYFPRINTF (stderr, "%*s%s -> <Rule %d, tokens %lu .. %lu>\n",
+               yyindent, "", yytokenName (yylhsNonterm (yyx->yyrule)),
+               yyx->yyrule - 1, (unsigned long int) (yys->yyposn + 1),
+               (unsigned long int) yyx->yystate->yyposn);
+  for (yyi = 1; yyi <= yynrhs; yyi += 1)
+    {
+      if (yystates[yyi]->yyresolved)
+        {
+          if (yystates[yyi-1]->yyposn+1 > yystates[yyi]->yyposn)
+            YYFPRINTF (stderr, "%*s%s <empty>\n", yyindent+2, "",
+                       yytokenName (yyrhs[yyprhs[yyx->yyrule]+yyi-1]));
+          else
+            YYFPRINTF (stderr, "%*s%s <tokens %lu .. %lu>\n", yyindent+2, "",
+                       yytokenName (yyrhs[yyprhs[yyx->yyrule]+yyi-1]),
+                       (unsigned long int) (yystates[yyi - 1]->yyposn + 1),
+                       (unsigned long int) yystates[yyi]->yyposn);
+        }
+      else
+        yyreportTree (yystates[yyi]->yysemantics.yyfirstVal, yyindent+2);
+    }
+}
+#endif
 
-  case 569:
+/*ARGSUSED*/ static YYRESULTTAG
+yyreportAmbiguity (yySemanticOption* yyx0,
+                   yySemanticOption* yyx1)
+{
+  YYUSE (yyx0);
+  YYUSE (yyx1);
 
-/* Line 1455 of yacc.c  */
-#line 3060 "vtkParse.y"
-    { postSig("("); }
-    break;
+#if YYDEBUG
+  YYFPRINTF (stderr, "Ambiguity detected.\n");
+  YYFPRINTF (stderr, "Option 1,\n");
+  yyreportTree (yyx0, 2);
+  YYFPRINTF (stderr, "\nOption 2,\n");
+  yyreportTree (yyx1, 2);
+  YYFPRINTF (stderr, "\n");
+#endif
 
-  case 570:
+  yyerror (YY_("syntax is ambiguous"));
+  return yyabort;
+}
 
-/* Line 1455 of yacc.c  */
-#line 3061 "vtkParse.y"
-    { chopSig(); postSig(") "); }
-    break;
+/** Starting at and including state S1, resolve the location for each of the
+ *  previous N1 states that is unresolved.  The first semantic option of a state
+ *  is always chosen.  */
+static void
+yyresolveLocations (yyGLRState* yys1, int yyn1,
+                    yyGLRStack *yystackp)
+{
+  if (0 < yyn1)
+    {
+      yyresolveLocations (yys1->yypred, yyn1 - 1, yystackp);
+      if (!yys1->yyresolved)
+        {
+          yySemanticOption *yyoption;
+          yyGLRStackItem yyrhsloc[1 + YYMAXRHS];
+          int yynrhs;
+          int yychar_current;
+          YYSTYPE yylval_current;
+          YYLTYPE yylloc_current;
+          yyoption = yys1->yysemantics.yyfirstVal;
+          YYASSERT (yyoption != NULL);
+          yynrhs = yyrhsLength (yyoption->yyrule);
+          if (yynrhs > 0)
+            {
+              yyGLRState *yys;
+              int yyn;
+              yyresolveLocations (yyoption->yystate, yynrhs,
+                                  yystackp);
+              for (yys = yyoption->yystate, yyn = yynrhs;
+                   yyn > 0;
+                   yys = yys->yypred, yyn -= 1)
+                yyrhsloc[yyn].yystate.yyloc = yys->yyloc;
+            }
+          else
+            {
+              /* Both yyresolveAction and yyresolveLocations traverse the GSS
+                 in reverse rightmost order.  It is only necessary to invoke
+                 yyresolveLocations on a subforest for which yyresolveAction
+                 would have been invoked next had an ambiguity not been
+                 detected.  Thus the location of the previous state (but not
+                 necessarily the previous state itself) is guaranteed to be
+                 resolved already.  */
+              yyGLRState *yyprevious = yyoption->yystate;
+              yyrhsloc[0].yystate.yyloc = yyprevious->yyloc;
+            }
+          yychar_current = yychar;
+          yylval_current = yylval;
+          yylloc_current = yylloc;
+          yychar = yyoption->yyrawchar;
+          yylval = yyoption->yyval;
+          yylloc = yyoption->yyloc;
+          YYLLOC_DEFAULT ((yys1->yyloc), yyrhsloc, yynrhs);
+          yychar = yychar_current;
+          yylval = yylval_current;
+          yylloc = yylloc_current;
+        }
+    }
+}
 
-  case 571:
+/** Resolve the ambiguity represented in state S, perform the indicated
+ *  actions, and set the semantic value of S.  If result != yyok, the chain of
+ *  semantic options in S has been cleared instead or it has been left
+ *  unmodified except that redundant options may have been removed.  Regardless
+ *  of whether result = yyok, S has been left with consistent data so that
+ *  yydestroyGLRState can be invoked if necessary.  */
+static YYRESULTTAG
+yyresolveValue (yyGLRState* yys, yyGLRStack* yystackp)
+{
+  yySemanticOption* yyoptionList = yys->yysemantics.yyfirstVal;
+  yySemanticOption* yybest;
+  yySemanticOption** yypp;
+  yybool yymerge;
+  YYSTYPE yysval;
+  YYRESULTTAG yyflag;
+  YYLTYPE *yylocp = &yys->yyloc;
+
+  yybest = yyoptionList;
+  yymerge = yyfalse;
+  for (yypp = &yyoptionList->yynext; *yypp != NULL; )
+    {
+      yySemanticOption* yyp = *yypp;
+
+      if (yyidenticalOptions (yybest, yyp))
+        {
+          yymergeOptionSets (yybest, yyp);
+          *yypp = yyp->yynext;
+        }
+      else
+        {
+          switch (yypreference (yybest, yyp))
+            {
+            case 0:
+              yyresolveLocations (yys, 1, yystackp);
+              return yyreportAmbiguity (yybest, yyp);
+              break;
+            case 1:
+              yymerge = yytrue;
+              break;
+            case 2:
+              break;
+            case 3:
+              yybest = yyp;
+              yymerge = yyfalse;
+              break;
+            default:
+              /* This cannot happen so it is not worth a YYASSERT (yyfalse),
+                 but some compilers complain if the default case is
+                 omitted.  */
+              break;
+            }
+          yypp = &yyp->yynext;
+        }
+    }
+
+  if (yymerge)
+    {
+      yySemanticOption* yyp;
+      int yyprec = yydprec[yybest->yyrule];
+      yyflag = yyresolveAction (yybest, yystackp, &yysval,
+                                yylocp);
+      if (yyflag == yyok)
+        for (yyp = yybest->yynext; yyp != NULL; yyp = yyp->yynext)
+          {
+            if (yyprec == yydprec[yyp->yyrule])
+              {
+                YYSTYPE yysval_other;
+                YYLTYPE yydummy;
+                yyflag = yyresolveAction (yyp, yystackp, &yysval_other,
+                                          &yydummy);
+                if (yyflag != yyok)
+                  {
+                    yydestruct ("Cleanup: discarding incompletely merged value for",
+                                yystos[yys->yylrState],
+                                &yysval);
+                    break;
+                  }
+                yyuserMerge (yymerger[yyp->yyrule], &yysval, &yysval_other);
+              }
+          }
+    }
+  else
+    yyflag = yyresolveAction (yybest, yystackp, &yysval, yylocp);
 
-/* Line 1455 of yacc.c  */
-#line 3062 "vtkParse.y"
-    { postSig("("); postSig((yyvsp[(1) - (1)].str)); postSig("*"); }
-    break;
+  if (yyflag == yyok)
+    {
+      yys->yyresolved = yytrue;
+      yys->yysemantics.yysval = yysval;
+    }
+  else
+    yys->yysemantics.yyfirstVal = NULL;
+  return yyflag;
+}
 
-  case 572:
+static YYRESULTTAG
+yyresolveStack (yyGLRStack* yystackp)
+{
+  if (yystackp->yysplitPoint != NULL)
+    {
+      yyGLRState* yys;
+      int yyn;
 
-/* Line 1455 of yacc.c  */
-#line 3063 "vtkParse.y"
-    { chopSig(); postSig(") "); }
-    break;
+      for (yyn = 0, yys = yystackp->yytops.yystates[0];
+           yys != yystackp->yysplitPoint;
+           yys = yys->yypred, yyn += 1)
+        continue;
+      YYCHK (yyresolveStates (yystackp->yytops.yystates[0], yyn, yystackp
+                             ));
+    }
+  return yyok;
+}
 
-  case 573:
+static void
+yycompressStack (yyGLRStack* yystackp)
+{
+  yyGLRState* yyp, *yyq, *yyr;
 
-/* Line 1455 of yacc.c  */
-#line 3064 "vtkParse.y"
-    { postSig("("); postSig((yyvsp[(1) - (1)].str)); postSig("&"); }
-    break;
+  if (yystackp->yytops.yysize != 1 || yystackp->yysplitPoint == NULL)
+    return;
 
-  case 574:
+  for (yyp = yystackp->yytops.yystates[0], yyq = yyp->yypred, yyr = NULL;
+       yyp != yystackp->yysplitPoint;
+       yyr = yyp, yyp = yyq, yyq = yyp->yypred)
+    yyp->yypred = yyr;
 
-/* Line 1455 of yacc.c  */
-#line 3065 "vtkParse.y"
-    { chopSig(); postSig(") "); }
-    break;
+  yystackp->yyspaceLeft += yystackp->yynextFree - yystackp->yyitems;
+  yystackp->yynextFree = ((yyGLRStackItem*) yystackp->yysplitPoint) + 1;
+  yystackp->yyspaceLeft -= yystackp->yynextFree - yystackp->yyitems;
+  yystackp->yysplitPoint = NULL;
+  yystackp->yylastDeleted = NULL;
 
-  case 575:
+  while (yyr != NULL)
+    {
+      yystackp->yynextFree->yystate = *yyr;
+      yyr = yyr->yypred;
+      yystackp->yynextFree->yystate.yypred = &yystackp->yynextFree[-1].yystate;
+      yystackp->yytops.yystates[0] = &yystackp->yynextFree->yystate;
+      yystackp->yynextFree += 1;
+      yystackp->yyspaceLeft -= 1;
+    }
+}
 
-/* Line 1455 of yacc.c  */
-#line 3068 "vtkParse.y"
-    { postSig("{ "); }
-    break;
+static YYRESULTTAG
+yyprocessOneStack (yyGLRStack* yystackp, size_t yyk,
+                   size_t yyposn)
+{
+  int yyaction;
+  const short int* yyconflicts;
+  yyRuleNum yyrule;
 
-  case 576:
+  while (yystackp->yytops.yystates[yyk] != NULL)
+    {
+      yyStateNum yystate = yystackp->yytops.yystates[yyk]->yylrState;
+      YYDPRINTF ((stderr, "Stack %lu Entering state %d\n",
+                  (unsigned long int) yyk, yystate));
 
-/* Line 1455 of yacc.c  */
-#line 3068 "vtkParse.y"
-    { postSig("} "); }
-    break;
+      YYASSERT (yystate != YYFINAL);
 
+      if (yyisDefaultedState (yystate))
+        {
+          yyrule = yydefaultAction (yystate);
+          if (yyrule == 0)
+            {
+              YYDPRINTF ((stderr, "Stack %lu dies.\n",
+                          (unsigned long int) yyk));
+              yymarkStackDeleted (yystackp, yyk);
+              return yyok;
+            }
+          YYCHK (yyglrReduce (yystackp, yyk, yyrule, yyfalse));
+        }
+      else
+        {
+          yySymbol yytoken;
+          yystackp->yytops.yylookaheadNeeds[yyk] = yytrue;
+          if (yychar == YYEMPTY)
+            {
+              YYDPRINTF ((stderr, "Reading a token: "));
+              yychar = YYLEX;
+            }
 
+          if (yychar <= YYEOF)
+            {
+              yychar = yytoken = YYEOF;
+              YYDPRINTF ((stderr, "Now at end of input.\n"));
+            }
+          else
+            {
+              yytoken = YYTRANSLATE (yychar);
+              YY_SYMBOL_PRINT ("Next token is", yytoken, &yylval, &yylloc);
+            }
 
-/* Line 1455 of yacc.c  */
-#line 7956 "vtkParse.tab.c"
-      default: break;
-    }
-  YY_SYMBOL_PRINT ("-> $$ =", yyr1[yyn], &yyval, &yyloc);
+          yygetLRActions (yystate, yytoken, &yyaction, &yyconflicts);
 
-  YYPOPSTACK (yylen);
-  yylen = 0;
-  YY_STACK_PRINT (yyss, yyssp);
+          while (*yyconflicts != 0)
+            {
+              size_t yynewStack = yysplitStack (yystackp, yyk);
+              YYDPRINTF ((stderr, "Splitting off stack %lu from %lu.\n",
+                          (unsigned long int) yynewStack,
+                          (unsigned long int) yyk));
+              YYCHK (yyglrReduce (yystackp, yynewStack,
+                                  *yyconflicts, yyfalse));
+              YYCHK (yyprocessOneStack (yystackp, yynewStack,
+                                        yyposn));
+              yyconflicts += 1;
+            }
 
-  *++yyvsp = yyval;
+          if (yyisShiftAction (yyaction))
+            break;
+          else if (yyisErrorAction (yyaction))
+            {
+              YYDPRINTF ((stderr, "Stack %lu dies.\n",
+                          (unsigned long int) yyk));
+              yymarkStackDeleted (yystackp, yyk);
+              break;
+            }
+          else
+            YYCHK (yyglrReduce (yystackp, yyk, -yyaction,
+                                yyfalse));
+        }
+    }
+  return yyok;
+}
 
-  /* Now `shift' the result of the reduction.  Determine what state
-     that goes to, based on the state we popped back to and the rule
-     number reduced by.  */
+/*ARGSUSED*/ static void
+yyreportSyntaxError (yyGLRStack* yystackp)
+{
+  if (yystackp->yyerrState == 0)
+    {
+#if YYERROR_VERBOSE
+      int yyn;
+      yyn = yypact[yystackp->yytops.yystates[0]->yylrState];
+      if (YYPACT_NINF < yyn && yyn <= YYLAST)
+        {
+          yySymbol yytoken = YYTRANSLATE (yychar);
+          size_t yysize0 = yytnamerr (NULL, yytokenName (yytoken));
+          size_t yysize = yysize0;
+          size_t yysize1;
+          yybool yysize_overflow = yyfalse;
+          char* yymsg = NULL;
+          enum { YYERROR_VERBOSE_ARGS_MAXIMUM = 5 };
+          char const *yyarg[YYERROR_VERBOSE_ARGS_MAXIMUM];
+          int yyx;
+          char *yyfmt;
+          char const *yyf;
+          static char const yyunexpected[] = "syntax error, unexpected %s";
+          static char const yyexpecting[] = ", expecting %s";
+          static char const yyor[] = " or %s";
+          char yyformat[sizeof yyunexpected
+                        + sizeof yyexpecting - 1
+                        + ((YYERROR_VERBOSE_ARGS_MAXIMUM - 2)
+                           * (sizeof yyor - 1))];
+          char const *yyprefix = yyexpecting;
+
+          /* Start YYX at -YYN if negative to avoid negative indexes in
+             YYCHECK.  */
+          int yyxbegin = yyn < 0 ? -yyn : 0;
+
+          /* Stay within bounds of both yycheck and yytname.  */
+          int yychecklim = YYLAST - yyn + 1;
+          int yyxend = yychecklim < YYNTOKENS ? yychecklim : YYNTOKENS;
+          int yycount = 1;
+
+          yyarg[0] = yytokenName (yytoken);
+          yyfmt = yystpcpy (yyformat, yyunexpected);
+
+          for (yyx = yyxbegin; yyx < yyxend; ++yyx)
+            if (yycheck[yyx + yyn] == yyx && yyx != YYTERROR)
+              {
+                if (yycount == YYERROR_VERBOSE_ARGS_MAXIMUM)
+                  {
+                    yycount = 1;
+                    yysize = yysize0;
+                    yyformat[sizeof yyunexpected - 1] = '\0';
+                    break;
+                  }
+                yyarg[yycount++] = yytokenName (yyx);
+                yysize1 = yysize + yytnamerr (NULL, yytokenName (yyx));
+                yysize_overflow |= yysize1 < yysize;
+                yysize = yysize1;
+                yyfmt = yystpcpy (yyfmt, yyprefix);
+                yyprefix = yyor;
+              }
 
-  yyn = yyr1[yyn];
+          yyf = YY_(yyformat);
+          yysize1 = yysize + strlen (yyf);
+          yysize_overflow |= yysize1 < yysize;
+          yysize = yysize1;
 
-  yystate = yypgoto[yyn - YYNTOKENS] + *yyssp;
-  if (0 <= yystate && yystate <= YYLAST && yycheck[yystate] == *yyssp)
-    yystate = yytable[yystate];
-  else
-    yystate = yydefgoto[yyn - YYNTOKENS];
+          if (!yysize_overflow)
+            yymsg = (char *) YYMALLOC (yysize);
 
-  goto yynewstate;
+          if (yymsg)
+            {
+              char *yyp = yymsg;
+              int yyi = 0;
+              while ((*yyp = *yyf))
+                {
+                  if (*yyp == '%' && yyf[1] == 's' && yyi < yycount)
+                    {
+                      yyp += yytnamerr (yyp, yyarg[yyi++]);
+                      yyf += 2;
+                    }
+                  else
+                    {
+                      yyp++;
+                      yyf++;
+                    }
+                }
+              yyerror (yymsg);
+              YYFREE (yymsg);
+            }
+          else
+            {
+              yyerror (YY_("syntax error"));
+              yyMemoryExhausted (yystackp);
+            }
+        }
+      else
+#endif /* YYERROR_VERBOSE */
+        yyerror (YY_("syntax error"));
+      yynerrs += 1;
+    }
+}
 
+/* Recover from a syntax error on *YYSTACKP, assuming that *YYSTACKP->YYTOKENP,
+   yylval, and yylloc are the syntactic category, semantic value, and location
+   of the lookahead.  */
+/*ARGSUSED*/ static void
+yyrecoverSyntaxError (yyGLRStack* yystackp)
+{
+  size_t yyk;
+  int yyj;
 
-/*------------------------------------.
-| yyerrlab -- here on detecting error |
-`------------------------------------*/
-yyerrlab:
-  /* If not already recovering from an error, report this error.  */
-  if (!yyerrstatus)
-    {
-      ++yynerrs;
-#if ! YYERROR_VERBOSE
-      yyerror (YY_("syntax error"));
-#else
+  if (yystackp->yyerrState == 3)
+    /* We just shifted the error token and (perhaps) took some
+       reductions.  Skip tokens until we can proceed.  */
+    while (YYID (yytrue))
       {
-        YYSIZE_T yysize = yysyntax_error (0, yystate, yychar);
-        if (yymsg_alloc < yysize && yymsg_alloc < YYSTACK_ALLOC_MAXIMUM)
+        yySymbol yytoken;
+        if (yychar == YYEOF)
+          yyFail (yystackp, NULL);
+        if (yychar != YYEMPTY)
           {
-            YYSIZE_T yyalloc = 2 * yysize;
-            if (! (yysize <= yyalloc && yyalloc <= YYSTACK_ALLOC_MAXIMUM))
-              yyalloc = YYSTACK_ALLOC_MAXIMUM;
-            if (yymsg != yymsgbuf)
-              YYSTACK_FREE (yymsg);
-            yymsg = (char *) YYSTACK_ALLOC (yyalloc);
-            if (yymsg)
-              yymsg_alloc = yyalloc;
-            else
-              {
-                yymsg = yymsgbuf;
-                yymsg_alloc = sizeof yymsgbuf;
-              }
+            yytoken = YYTRANSLATE (yychar);
+            yydestruct ("Error: discarding",
+                        yytoken, &yylval);
           }
-
-        if (0 < yysize && yysize <= yymsg_alloc)
+        YYDPRINTF ((stderr, "Reading a token: "));
+        yychar = YYLEX;
+        if (yychar <= YYEOF)
           {
-            (void) yysyntax_error (yymsg, yystate, yychar);
-            yyerror (yymsg);
+            yychar = yytoken = YYEOF;
+            YYDPRINTF ((stderr, "Now at end of input.\n"));
           }
         else
           {
-            yyerror (YY_("syntax error"));
-            if (yysize != 0)
-              goto yyexhaustedlab;
+            yytoken = YYTRANSLATE (yychar);
+            YY_SYMBOL_PRINT ("Next token is", yytoken, &yylval, &yylloc);
           }
+        yyj = yypact[yystackp->yytops.yystates[0]->yylrState];
+        if (yyis_pact_ninf (yyj))
+          return;
+        yyj += yytoken;
+        if (yyj < 0 || YYLAST < yyj || yycheck[yyj] != yytoken)
+          {
+            if (yydefact[yystackp->yytops.yystates[0]->yylrState] != 0)
+              return;
+          }
+        else if (yytable[yyj] != 0 && ! yyis_table_ninf (yytable[yyj]))
+          return;
       }
-#endif
-    }
-
-
-
-  if (yyerrstatus == 3)
-    {
-      /* If just tried and failed to reuse lookahead token after an
-         error, discard it.  */
 
-      if (yychar <= YYEOF)
-        {
-          /* Return failure if at end of input.  */
-          if (yychar == YYEOF)
-            YYABORT;
-        }
-      else
+  /* Reduce to one stack.  */
+  for (yyk = 0; yyk < yystackp->yytops.yysize; yyk += 1)
+    if (yystackp->yytops.yystates[yyk] != NULL)
+      break;
+  if (yyk >= yystackp->yytops.yysize)
+    yyFail (yystackp, NULL);
+  for (yyk += 1; yyk < yystackp->yytops.yysize; yyk += 1)
+    yymarkStackDeleted (yystackp, yyk);
+  yyremoveDeletes (yystackp);
+  yycompressStack (yystackp);
+
+  /* Now pop stack until we find a state that shifts the error token.  */
+  yystackp->yyerrState = 3;
+  while (yystackp->yytops.yystates[0] != NULL)
+    {
+      yyGLRState *yys = yystackp->yytops.yystates[0];
+      yyj = yypact[yys->yylrState];
+      if (! yyis_pact_ninf (yyj))
         {
-          yydestruct ("Error: discarding",
-                      yytoken, &yylval);
-          yychar = YYEMPTY;
+          yyj += YYTERROR;
+          if (0 <= yyj && yyj <= YYLAST && yycheck[yyj] == YYTERROR
+              && yyisShiftAction (yytable[yyj]))
+            {
+              /* Shift the error token having adjusted its location.  */
+              YYLTYPE yyerrloc = {0};
+              YY_SYMBOL_PRINT ("Shifting", yystos[yytable[yyj]],
+                               &yylval, &yyerrloc);
+              yyglrShift (yystackp, 0, yytable[yyj],
+                          yys->yyposn, &yylval, &yyerrloc);
+              yys = yystackp->yytops.yystates[0];
+              break;
+            }
         }
-    }
 
-  /* Else will try to reuse lookahead token after shifting the error
-     token.  */
-  goto yyerrlab1;
+      if (yys->yypred != NULL)
+        yydestroyGLRState ("Error: popping", yys);
+      yystackp->yytops.yystates[0] = yys->yypred;
+      yystackp->yynextFree -= 1;
+      yystackp->yyspaceLeft += 1;
+    }
+  if (yystackp->yytops.yystates[0] == NULL)
+    yyFail (yystackp, NULL);
+}
 
+#define YYCHK1(YYE)                                                             \
+  do {                                                                             \
+    switch (YYE) {                                                             \
+    case yyok:                                                                     \
+      break;                                                                     \
+    case yyabort:                                                             \
+      goto yyabortlab;                                                             \
+    case yyaccept:                                                             \
+      goto yyacceptlab;                                                             \
+    case yyerr:                                                                     \
+      goto yyuser_error;                                                     \
+    default:                                                                     \
+      goto yybuglab;                                                             \
+    }                                                                             \
+  } while (YYID (0))
+
+
+/*----------.
+| yyparse.  |
+`----------*/
 
-/*---------------------------------------------------.
-| yyerrorlab -- error raised explicitly by YYERROR.  |
-`---------------------------------------------------*/
-yyerrorlab:
+int
+yyparse (void)
+{
+  int yyresult;
+  yyGLRStack yystack;
+  yyGLRStack* const yystackp = &yystack;
+  size_t yyposn;
 
-  /* Pacify compilers like GCC when the user code never invokes
-     YYERROR and the label yyerrorlab therefore never appears in user
-     code.  */
-  if (/*CONSTCOND*/ 0)
-     goto yyerrorlab;
+  YYDPRINTF ((stderr, "Starting parse\n"));
 
-  /* Do not reclaim the symbols of the rule which action triggered
-     this YYERROR.  */
-  YYPOPSTACK (yylen);
-  yylen = 0;
-  YY_STACK_PRINT (yyss, yyssp);
-  yystate = *yyssp;
+  yychar = YYEMPTY;
+  yylval = yyval_default;
 
 
-/*-------------------------------------------------------------.
-| yyerrlab1 -- common code for both syntax error and YYERROR.  |
-`-------------------------------------------------------------*/
-yyerrlab1:
-  yyerrstatus = 3;        /* Each real token shifted decrements this.  */
+  if (! yyinitGLRStack (yystackp, YYINITDEPTH))
+    goto yyexhaustedlab;
+  switch (YYSETJMP (yystack.yyexception_buffer))
+    {
+    case 0: break;
+    case 1: goto yyabortlab;
+    case 2: goto yyexhaustedlab;
+    default: goto yybuglab;
+    }
+  yyglrShift (&yystack, 0, 0, 0, &yylval, &yylloc);
+  yyposn = 0;
 
-  for (;;)
+  while (YYID (yytrue))
     {
-      yyn = yypact[yystate];
-      if (yyn != YYPACT_NINF)
+      /* For efficiency, we have two loops, the first of which is
+         specialized to deterministic operation (single stack, no
+         potential ambiguity).  */
+      /* Standard mode */
+      while (YYID (yytrue))
         {
-          yyn += YYTERROR;
-          if (0 <= yyn && yyn <= YYLAST && yycheck[yyn] == YYTERROR)
+          yyRuleNum yyrule;
+          int yyaction;
+          const short int* yyconflicts;
+
+          yyStateNum yystate = yystack.yytops.yystates[0]->yylrState;
+          YYDPRINTF ((stderr, "Entering state %d\n", yystate));
+          if (yystate == YYFINAL)
+            goto yyacceptlab;
+          if (yyisDefaultedState (yystate))
+            {
+              yyrule = yydefaultAction (yystate);
+              if (yyrule == 0)
+                {
+
+                  yyreportSyntaxError (&yystack);
+                  goto yyuser_error;
+                }
+              YYCHK1 (yyglrReduce (&yystack, 0, yyrule, yytrue));
+            }
+          else
             {
-              yyn = yytable[yyn];
-              if (0 < yyn)
+              yySymbol yytoken;
+              if (yychar == YYEMPTY)
+                {
+                  YYDPRINTF ((stderr, "Reading a token: "));
+                  yychar = YYLEX;
+                }
+
+              if (yychar <= YYEOF)
+                {
+                  yychar = yytoken = YYEOF;
+                  YYDPRINTF ((stderr, "Now at end of input.\n"));
+                }
+              else
+                {
+                  yytoken = YYTRANSLATE (yychar);
+                  YY_SYMBOL_PRINT ("Next token is", yytoken, &yylval, &yylloc);
+                }
+
+              yygetLRActions (yystate, yytoken, &yyaction, &yyconflicts);
+              if (*yyconflicts != 0)
                 break;
+              if (yyisShiftAction (yyaction))
+                {
+                  YY_SYMBOL_PRINT ("Shifting", yytoken, &yylval, &yylloc);
+                  yychar = YYEMPTY;
+                  yyposn += 1;
+                  yyglrShift (&yystack, 0, yyaction, yyposn, &yylval, &yylloc);
+                  if (0 < yystack.yyerrState)
+                    yystack.yyerrState -= 1;
+                }
+              else if (yyisErrorAction (yyaction))
+                {
+
+                  yyreportSyntaxError (&yystack);
+                  goto yyuser_error;
+                }
+              else
+                YYCHK1 (yyglrReduce (&yystack, 0, -yyaction, yytrue));
             }
         }
 
-      /* Pop the current state because it cannot handle the error token.  */
-      if (yyssp == yyss)
-        YYABORT;
+      while (YYID (yytrue))
+        {
+          yySymbol yytoken_to_shift;
+          size_t yys;
 
+          for (yys = 0; yys < yystack.yytops.yysize; yys += 1)
+            yystackp->yytops.yylookaheadNeeds[yys] = yychar != YYEMPTY;
 
-      yydestruct ("Error: popping",
-                  yystos[yystate], yyvsp);
-      YYPOPSTACK (1);
-      yystate = *yyssp;
-      YY_STACK_PRINT (yyss, yyssp);
-    }
+          /* yyprocessOneStack returns one of three things:
 
-  *++yyvsp = yylval;
+              - An error flag.  If the caller is yyprocessOneStack, it
+                immediately returns as well.  When the caller is finally
+                yyparse, it jumps to an error label via YYCHK1.
 
+              - yyok, but yyprocessOneStack has invoked yymarkStackDeleted
+                (&yystack, yys), which sets the top state of yys to NULL.  Thus,
+                yyparse's following invocation of yyremoveDeletes will remove
+                the stack.
 
-  /* Shift the error token.  */
-  YY_SYMBOL_PRINT ("Shifting", yystos[yyn], yyvsp, yylsp);
+              - yyok, when ready to shift a token.
 
-  yystate = yyn;
-  goto yynewstate;
+             Except in the first case, yyparse will invoke yyremoveDeletes and
+             then shift the next token onto all remaining stacks.  This
+             synchronization of the shift (that is, after all preceding
+             reductions on all stacks) helps prevent double destructor calls
+             on yylval in the event of memory exhaustion.  */
 
+          for (yys = 0; yys < yystack.yytops.yysize; yys += 1)
+            YYCHK1 (yyprocessOneStack (&yystack, yys, yyposn));
+          yyremoveDeletes (&yystack);
+          if (yystack.yytops.yysize == 0)
+            {
+              yyundeleteLastStack (&yystack);
+              if (yystack.yytops.yysize == 0)
+                yyFail (&yystack, YY_("syntax error"));
+              YYCHK1 (yyresolveStack (&yystack));
+              YYDPRINTF ((stderr, "Returning to deterministic operation.\n"));
+
+              yyreportSyntaxError (&yystack);
+              goto yyuser_error;
+            }
+
+          /* If any yyglrShift call fails, it will fail after shifting.  Thus,
+             a copy of yylval will already be on stack 0 in the event of a
+             failure in the following loop.  Thus, yychar is set to YYEMPTY
+             before the loop to make sure the user destructor for yylval isn't
+             called twice.  */
+          yytoken_to_shift = YYTRANSLATE (yychar);
+          yychar = YYEMPTY;
+          yyposn += 1;
+          for (yys = 0; yys < yystack.yytops.yysize; yys += 1)
+            {
+              int yyaction;
+              const short int* yyconflicts;
+              yyStateNum yystate = yystack.yytops.yystates[yys]->yylrState;
+              yygetLRActions (yystate, yytoken_to_shift, &yyaction,
+                              &yyconflicts);
+              /* Note that yyconflicts were handled by yyprocessOneStack.  */
+              YYDPRINTF ((stderr, "On stack %lu, ", (unsigned long int) yys));
+              YY_SYMBOL_PRINT ("shifting", yytoken_to_shift, &yylval, &yylloc);
+              yyglrShift (&yystack, yys, yyaction, yyposn,
+                          &yylval, &yylloc);
+              YYDPRINTF ((stderr, "Stack %lu now in state #%d\n",
+                          (unsigned long int) yys,
+                          yystack.yytops.yystates[yys]->yylrState));
+            }
+
+          if (yystack.yytops.yysize == 1)
+            {
+              YYCHK1 (yyresolveStack (&yystack));
+              YYDPRINTF ((stderr, "Returning to deterministic operation.\n"));
+              yycompressStack (&yystack);
+              break;
+            }
+        }
+      continue;
+    yyuser_error:
+      yyrecoverSyntaxError (&yystack);
+      yyposn = yystack.yytops.yystates[0]->yyposn;
+    }
 
-/*-------------------------------------.
-| yyacceptlab -- YYACCEPT comes here.  |
-`-------------------------------------*/
-yyacceptlab:
+ yyacceptlab:
   yyresult = 0;
   goto yyreturn;
 
-/*-----------------------------------.
-| yyabortlab -- YYABORT comes here.  |
-`-----------------------------------*/
-yyabortlab:
+ yybuglab:
+  YYASSERT (yyfalse);
+  goto yyabortlab;
+
+ yyabortlab:
   yyresult = 1;
   goto yyreturn;
 
-#if !defined(yyoverflow) || YYERROR_VERBOSE
-/*-------------------------------------------------.
-| yyexhaustedlab -- memory exhaustion comes here.  |
-`-------------------------------------------------*/
-yyexhaustedlab:
+ yyexhaustedlab:
   yyerror (YY_("memory exhausted"));
   yyresult = 2;
-  /* Fall through.  */
-#endif
+  goto yyreturn;
 
-yyreturn:
+ yyreturn:
   if (yychar != YYEMPTY)
-     yydestruct ("Cleanup: discarding lookahead",
-                 yytoken, &yylval);
-  /* Do not reclaim the symbols of the rule which action triggered
-     this YYABORT or YYACCEPT.  */
-  YYPOPSTACK (yylen);
-  YY_STACK_PRINT (yyss, yyssp);
-  while (yyssp != yyss)
-    {
-      yydestruct ("Cleanup: popping",
-                  yystos[*yyssp], yyvsp);
-      YYPOPSTACK (1);
-    }
-#ifndef yyoverflow
-  if (yyss != yyssa)
-    YYSTACK_FREE (yyss);
-#endif
-#if YYERROR_VERBOSE
-  if (yymsg != yymsgbuf)
-    YYSTACK_FREE (yymsg);
-#endif
+    yydestruct ("Cleanup: discarding lookahead",
+                YYTRANSLATE (yychar),
+                &yylval);
+
+  /* If the stack is well-formed, pop the stack until it is empty,
+     destroying its entries as we go.  But free the stack regardless
+     of whether it is well-formed.  */
+  if (yystack.yyitems)
+    {
+      yyGLRState** yystates = yystack.yytops.yystates;
+      if (yystates)
+        {
+          size_t yysize = yystack.yytops.yysize;
+          size_t yyk;
+          for (yyk = 0; yyk < yysize; yyk += 1)
+            if (yystates[yyk])
+              {
+                while (yystates[yyk])
+                  {
+                    yyGLRState *yys = yystates[yyk];
+                    if (yys->yypred != NULL)
+                      yydestroyGLRState ("Cleanup: popping", yys);
+                    yystates[yyk] = yys->yypred;
+                    yystack.yynextFree -= 1;
+                    yystack.yyspaceLeft += 1;
+                  }
+                break;
+              }
+        }
+      yyfreeGLRStack (&yystack);
+    }
+
   /* Make sure YYID is used.  */
   return YYID (yyresult);
 }
 
+/* DEBUGGING ONLY */
+#if YYDEBUG
+static void yypstack (yyGLRStack* yystackp, size_t yyk)
+  __attribute__ ((__unused__));
+static void yypdumpstack (yyGLRStack* yystackp) __attribute__ ((__unused__));
 
+static void
+yy_yypstack (yyGLRState* yys)
+{
+  if (yys->yypred)
+    {
+      yy_yypstack (yys->yypred);
+      YYFPRINTF (stderr, " -> ");
+    }
+  YYFPRINTF (stderr, "%d@%lu", yys->yylrState,
+             (unsigned long int) yys->yyposn);
+}
 
-/* Line 1675 of yacc.c  */
-#line 3107 "vtkParse.y"
+static void
+yypstates (yyGLRState* yyst)
+{
+  if (yyst == NULL)
+    YYFPRINTF (stderr, "<null>");
+  else
+    yy_yypstack (yyst);
+  YYFPRINTF (stderr, "\n");
+}
+
+static void
+yypstack (yyGLRStack* yystackp, size_t yyk)
+{
+  yypstates (yystackp->yytops.yystates[yyk]);
+}
+
+#define YYINDEX(YYX)                                                             \
+    ((YYX) == NULL ? -1 : (yyGLRStackItem*) (YYX) - yystackp->yyitems)
+
+
+static void
+yypdumpstack (yyGLRStack* yystackp)
+{
+  yyGLRStackItem* yyp;
+  size_t yyi;
+  for (yyp = yystackp->yyitems; yyp < yystackp->yynextFree; yyp += 1)
+    {
+      YYFPRINTF (stderr, "%3lu. ",
+                 (unsigned long int) (yyp - yystackp->yyitems));
+      if (*(yybool *) yyp)
+        {
+          YYFPRINTF (stderr, "Res: %d, LR State: %d, posn: %lu, pred: %ld",
+                     yyp->yystate.yyresolved, yyp->yystate.yylrState,
+                     (unsigned long int) yyp->yystate.yyposn,
+                     (long int) YYINDEX (yyp->yystate.yypred));
+          if (! yyp->yystate.yyresolved)
+            YYFPRINTF (stderr, ", firstVal: %ld",
+                       (long int) YYINDEX (yyp->yystate
+                                             .yysemantics.yyfirstVal));
+        }
+      else
+        {
+          YYFPRINTF (stderr, "Option. rule: %d, state: %ld, next: %ld",
+                     yyp->yyoption.yyrule - 1,
+                     (long int) YYINDEX (yyp->yyoption.yystate),
+                     (long int) YYINDEX (yyp->yyoption.yynext));
+        }
+      YYFPRINTF (stderr, "\n");
+    }
+  YYFPRINTF (stderr, "Tops:");
+  for (yyi = 0; yyi < yystackp->yytops.yysize; yyi += 1)
+    YYFPRINTF (stderr, "%lu: %ld; ", (unsigned long int) yyi,
+               (long int) YYINDEX (yystackp->yytops.yystates[yyi]));
+  YYFPRINTF (stderr, "\n");
+}
+#endif
+
+
+
+/* Line 2634 of glr.c  */
+#line 3328 "vtkParse.y"
 
 #include <string.h>
 #include "lex.yy.c"
@@ -8058,6 +11402,9 @@ const char *type_class(unsigned int type, const char *classname)
       {
       switch ((type & VTK_PARSE_BASE_TYPE))
         {
+        case 0:
+          classname = "auto";
+          break;
         case VTK_PARSE_VOID:
           classname = "void";
           break;
@@ -8076,6 +11423,15 @@ const char *type_class(unsigned int type, const char *classname)
         case VTK_PARSE_CHAR:
           classname = "char";
           break;
+        case VTK_PARSE_CHAR16_T:
+          classname = "char16_t";
+          break;
+        case VTK_PARSE_CHAR32_T:
+          classname = "char32_t";
+          break;
+        case VTK_PARSE_WCHAR_T:
+          classname = "wchar_t";
+          break;
         case VTK_PARSE_UNSIGNED_CHAR:
           classname = "unsigned char";
           break;
@@ -8169,27 +11525,6 @@ void start_class(const char *classname, int is_struct_or_union)
   clearComment();
 }
 
-/* reject the class */
-void reject_class(const char *classname, int is_struct_or_union)
-{
-  static ClassInfo static_class;
-
-  pushClass();
-  currentClass = &static_class;
-  currentClass->Name = classname;
-  vtkParse_InitClass(currentClass);
-
-  access_level = VTK_ACCESS_PRIVATE;
-  if (is_struct_or_union)
-    {
-    access_level = VTK_ACCESS_PUBLIC;
-    }
-
-  vtkParse_InitFunction(currentFunction);
-  startSig();
-  clearComment();
-}
-
 /* reached the end of a class definition */
 void end_class()
 {
@@ -8200,9 +11535,13 @@ void end_class()
 }
 
 /* add a base class to the specified class */
-void add_base_class(ClassInfo *cls, const char *name, int al, int virt)
+void add_base_class(ClassInfo *cls, const char *name, int al,
+  unsigned int extra)
 {
-  if (cls && al == VTK_ACCESS_PUBLIC && virt == 0)
+  /* "extra" can contain VTK_PARSE_VIRTUAL and VTK_PARSE_PACK */
+  if (cls && al == VTK_ACCESS_PUBLIC &&
+      (extra & VTK_PARSE_VIRTUAL) == 0 &&
+      (extra & VTK_PARSE_PACK) == 0)
     {
     vtkParse_AddStringToArray(&cls->SuperClasses,
                               &cls->NumberOfSuperClasses,
@@ -8243,12 +11582,20 @@ void add_using(const char *name, int is_namespace)
 }
 
 /* start a new enum */
-void start_enum(const char *name)
+void start_enum(const char *name, int is_scoped,
+                unsigned int type, const char *basename)
 {
   EnumInfo *item;
 
+  currentEnumType = (type ? type : VTK_PARSE_INT);
   currentEnumName = "int";
   currentEnumValue = NULL;
+
+  if (type == 0 && is_scoped)
+    {
+    type = VTK_PARSE_INT;
+    }
+
   if (name)
     {
     currentEnumName = name;
@@ -8256,6 +11603,7 @@ void start_enum(const char *name)
     vtkParse_InitEnum(item);
     item->Name = name;
     item->Access = access_level;
+
     if (currentClass)
       {
       vtkParse_AddEnumToClass(currentClass, item);
@@ -8264,12 +11612,30 @@ void start_enum(const char *name)
       {
       vtkParse_AddEnumToNamespace(currentNamespace, item);
       }
+
+    if (type)
+      {
+      vtkParse_AddStringToArray(&item->SuperClasses,
+                                &item->NumberOfSuperClasses,
+                                type_class(type, basename));
+      }
+
+    if (is_scoped)
+      {
+      pushClass();
+      currentClass = item;
+      }
     }
 }
 
 /* finish the enum */
 void end_enum()
 {
+  if (currentClass && currentClass->ItemType == VTK_ENUM_INFO)
+    {
+    popClass();
+    }
+
   currentEnumName = NULL;
   currentEnumValue = NULL;
 }
@@ -8316,7 +11682,7 @@ void add_enum(const char *name, const char *value)
     currentEnumValue = "0";
     }
 
-  add_constant(name, currentEnumValue, VTK_PARSE_INT, currentEnumName, 2);
+  add_constant(name, currentEnumValue, currentEnumType, currentEnumName, 2);
 }
 
 /* for a macro constant, guess the constant type, doesn't do any math */
@@ -8332,18 +11698,10 @@ unsigned int guess_constant_type(const char *valstring)
     return 0;
     }
 
-  if (valstring[0] < '0' || valstring[0] > '9')
+  k = vtkParse_SkipId(valstring);
+  if (valstring[k] == '\0')
     {
-    k = 0;
-    while ((valstring[k] >= '0' && valstring[k] <= '9') ||
-           (valstring[k] >= 'a' && valstring[k] <= 'z') ||
-           (valstring[k] >= 'A' && valstring[k] <= 'Z') ||
-           valstring[k] == '_') { k++; }
-
-    if (valstring[k] == '\0')
-      {
-      is_name = 1;
-      }
+    is_name = 1;
     }
 
   if (strcmp(valstring, "true") == 0 || strcmp(valstring, "false") == 0)
@@ -8351,6 +11709,11 @@ unsigned int guess_constant_type(const char *valstring)
     return VTK_PARSE_BOOL;
     }
 
+  if (strcmp(valstring, "nullptr") == 0)
+    {
+    return VTK_PARSE_NULLPTR_T;
+    }
+
   if (valstring[0] == '\'')
     {
     return VTK_PARSE_CHAR;
@@ -8769,6 +12132,13 @@ void handle_complex_type(
   /* remove specifiers like "friend" and "typedef" */
   datatype &= VTK_PARSE_QUALIFIED_TYPE;
 
+  /* remove the pack specifier caused by "..." */
+  if ((extra & VTK_PARSE_PACK) != 0)
+    {
+    val->IsPack = 1;
+    extra ^= VTK_PARSE_PACK;
+    }
+
   /* if "extra" was set, parentheses were involved */
   if ((extra & VTK_PARSE_BASE_TYPE) == VTK_PARSE_FUNCTION)
     {
@@ -8789,7 +12159,7 @@ void handle_complex_type(
     /* the val type is whatever was inside the parentheses */
     clearTypeId();
     setTypeId(func->Class ? "method" : "function");
-    datatype = (extra & VTK_PARSE_UNQUALIFIED_TYPE);
+    datatype = (extra & (VTK_PARSE_UNQUALIFIED_TYPE | VTK_PARSE_RVALUE));
     }
   else if ((extra & VTK_PARSE_INDIRECT) == VTK_PARSE_BAD_INDIRECT)
     {
@@ -8797,12 +12167,12 @@ void handle_complex_type(
     }
   else if ((extra & VTK_PARSE_INDIRECT) != 0)
     {
-    extra = (extra & VTK_PARSE_INDIRECT);
+    extra = (extra & (VTK_PARSE_INDIRECT | VTK_PARSE_RVALUE));
 
     if ((extra & VTK_PARSE_REF) != 0)
       {
-      datatype = (datatype | VTK_PARSE_REF);
-      extra = (extra & ~VTK_PARSE_REF);
+      datatype = (datatype | (extra & (VTK_PARSE_REF | VTK_PARSE_RVALUE)));
+      extra = (extra & ~(VTK_PARSE_REF | VTK_PARSE_RVALUE));
       }
 
     if (extra != 0 && getArrayNDims() > 0)
@@ -8916,6 +12286,25 @@ void output_function()
       }
     }
 
+  /* friend */
+  if (currentFunction->ReturnValue &&
+      currentFunction->ReturnValue->Type & VTK_PARSE_FRIEND)
+    {
+    currentFunction->ReturnValue->Type ^= VTK_PARSE_FRIEND;
+    output_friend_function();
+    return;
+    }
+
+  /* typedef */
+  if (currentFunction->ReturnValue &&
+      currentFunction->ReturnValue->Type & VTK_PARSE_TYPEDEF)
+    {
+    /* for now, reject it instead of turning a method into a typedef */
+    currentFunction->ReturnValue->Type ^= VTK_PARSE_TYPEDEF;
+    reject_function();
+    return;
+    }
+
   /* static */
   if (currentFunction->ReturnValue &&
       currentFunction->ReturnValue->Type & VTK_PARSE_STATIC)
@@ -9193,6 +12582,12 @@ void vtkParse_SetRecursive(int option)
     }
 }
 
+/* Set the global variable that stores the current executable */
+void vtkParse_SetCommandName(const char *name)
+{
+  CommandName = name;
+}
+
 /* Parse a header file and return a FileInfo struct */
 FileInfo *vtkParse_ParseFile(
   const char *filename, FILE *ifile, FILE *errfile)
@@ -9211,6 +12606,7 @@ FileInfo *vtkParse_ParseFile(
   /* "preprocessor" is a global struct used by the parser */
   preprocessor = (PreprocessInfo *)malloc(sizeof(PreprocessInfo));
   vtkParsePreprocess_Init(preprocessor, filename);
+  preprocessor->Strings = data->Strings;
   vtkParsePreprocess_AddStandardMacros(preprocessor, VTK_PARSE_NATIVE);
 
   /* add include files specified on the command line */
@@ -9280,7 +12676,6 @@ FileInfo *vtkParse_ParseFile(
 
   if (ret)
     {
-    print_parser_error("syntax error", NULL, 0);
     return NULL;
     }
 
@@ -9441,7 +12836,7 @@ void vtkParse_Free(FileInfo *file_info)
 /** Define a preprocessor macro. Function macros are not supported.  */
 void vtkParse_DefineMacro(const char *name, const char *definition)
 {
-  size_t n = vtkidlen(name);
+  size_t n = vtkParse_SkipId(name);
   size_t l;
   char *cp;
 
@@ -9468,7 +12863,7 @@ void vtkParse_DefineMacro(const char *name, const char *definition)
 /** Undefine a preprocessor macro.  */
 void vtkParse_UndefineMacro(const char *name)
 {
-  size_t n = vtkidlen(name);
+  size_t n = vtkParse_SkipId(name);
   char *cp;
 
   cp = (char *)malloc(n+2);
@@ -9505,7 +12900,8 @@ void vtkParse_IncludeDirectory(const char *dirname)
 /** Return the full path to a header file.  */
 const char *vtkParse_FindIncludeFile(const char *filename)
 {
-  static PreprocessInfo info = {0, 0, 0, 0, 0, 0, 0, 0, 0};
+  static StringCache cache = {0, 0, 0, 0};
+  static PreprocessInfo info = {0, 0, 0, 0, 0, 0, &cache, 0, 0, 0};
   int val;
   int i;
 
diff --git a/Wrapping/Tools/vtkParse.y b/Wrapping/Tools/vtkParse.y
index 4f474af..caf2535 100644
--- a/Wrapping/Tools/vtkParse.y
+++ b/Wrapping/Tools/vtkParse.y
@@ -25,9 +25,7 @@ Run yacc like this:
 Modify vtkParse.tab.c:
   - convert TABs to spaces (eight per tab)
   - remove spaces from ends of lines, s/ *$//g
-  - remove the "goto yyerrlab1;" that appears right before yyerrlab1:
-  - remove the #defined constants that appear right after the anonymous_enums
-
+  - replace all instances of "static inline" with "static".
 */
 
 /*
@@ -71,10 +69,10 @@ create a yacc parser without any shift/reduce conflicts.  The rules for
 types are organized according to the way that types are usually defined
 in working code, rather than strictly according to C++ grammar.
 
-The declaration specifiers "friend" and "typedef" can only appear at the
-beginning of a declaration sequence.  There are also restrictions on
-where class and enum specifiers can be used: you can declare a new struct
-within a variable declaration, but not within a parameter declaration.
+The declaration specifier "typedef" can only appear at the beginning
+of a declaration sequence.  There are also restrictions on where class
+and enum specifiers can be used: you can declare a new struct within a
+variable declaration, but not within a parameter declaration.
 
 The lexer returns each of "(scope::*", "(*", "(a::b::*", etc. as single
 tokens.  The C++ BNF, in contrast, would consider these to be a "("
@@ -90,30 +88,12 @@ a class.  This parser always interprets this pattern as a constructor
 declaration, because function calls are ignored by the parser, and
 variable declarations of the form y(x); are exceedingly rare compared
 to the more usual form y x; without parentheses.
-
-One ambiguous structure that has been found in some working code, but
-is currently not dealt with properly by the parser, is the following:
-
-  enum { x = mytemplate<int,2>::x };
-
-This is interpreted as the following ungrammatical statement:
-
-  enum { x = mytemplate < int ,
-         2 > ::x };
-
-This has proven to be very hard to fix in the parser, but it possible
-to modify the statement so that it does not confuse the parser:
-
-  enum { x = (mytemplate<int,2>::x) };
-
-The parentheses serve to disambiguate the statement.
-
 */
 
 #include <stdio.h>
 #include <stdlib.h>
 #include <string.h>
-#define yyerror(a) fprintf(stderr,"%s\n",a)
+#define yyerror(a) print_parser_error(a, NULL, 0)
 #define yywrap() 1
 
 /* Make sure yacc-generated code knows we have included stdlib.h.  */
@@ -212,6 +192,7 @@ const char   **Definitions;
 /* options that can be set by the programs that use the parser */
 int            IgnoreBTX = 0;
 int            Recursive = 0;
+const char    *CommandName = NULL;
 
 /* various state variables */
 NamespaceInfo *currentNamespace = NULL;
@@ -220,6 +201,7 @@ FunctionInfo  *currentFunction = NULL;
 TemplateInfo  *currentTemplate = NULL;
 const char    *currentEnumName = NULL;
 const char    *currentEnumValue = NULL;
+unsigned int   currentEnumType = 0;
 parse_access_t access_level = VTK_ACCESS_PUBLIC;
 
 /* functions from vtkParse.l */
@@ -228,10 +210,9 @@ void print_parser_error(const char *text, const char *cp, size_t n);
 /* helper functions */
 const char *type_class(unsigned int type, const char *classname);
 void start_class(const char *classname, int is_struct_or_union);
-void reject_class(const char *classname, int is_struct_or_union);
 void end_class();
 void add_base_class(ClassInfo *cls, const char *name, int access_lev,
-                    int is_virtual);
+                    unsigned int extra);
 void output_friend_function(void);
 void output_function(void);
 void reject_function(void);
@@ -242,7 +223,8 @@ void add_parameter(FunctionInfo *func, unsigned int type,
 void add_template_parameter(unsigned int datatype,
                             unsigned int extra, const char *funcSig);
 void add_using(const char *name, int is_namespace);
-void start_enum(const char *enumname);
+void start_enum(const char *name, int is_scoped,
+                unsigned int type, const char *basename);
 void add_enum(const char *name, const char *value);
 void end_enum();
 unsigned int guess_constant_type(const char *value);
@@ -264,6 +246,7 @@ void outputSetVectorMacro(const char *var, unsigned int paramType,
 void outputGetVectorMacro(const char *var, unsigned int paramType,
                           const char *typeText, int n);
 
+
 /*----------------------------------------------------------------
  * String utility methods
  *
@@ -385,28 +368,6 @@ static const char *vtkstrcat7(const char *str1, const char *str2,
   return vtkstrncat(7, cp);
 }
 
-static size_t vtkidlen(const char *text)
-{
-  size_t i = 0;
-  char c = text[0];
-
-  if ((c >= 'a' && c <= 'z') ||
-      (c >= 'A' && c <= 'Z') ||
-       c == '_')
-    {
-    do
-      {
-      c = text[++i];
-      }
-    while ((c >= 'a' && c <= 'z') ||
-           (c >= 'A' && c <= 'Z') ||
-           (c >= '0' && c <= '9') ||
-           c == '_');
-    }
-
-  return i;
-}
-
 /*----------------------------------------------------------------
  * Comments
  */
@@ -877,8 +838,8 @@ void setTypeMod(unsigned int mod)
 /* modify the indirection (pointers, refs) in the storage type */
 void setTypePtr(unsigned int ind)
 {
-  storedType &= ~(unsigned int)(VTK_PARSE_INDIRECT);
-  ind &= VTK_PARSE_INDIRECT;
+  storedType &= ~(unsigned int)(VTK_PARSE_INDIRECT | VTK_PARSE_RVALUE);
+  ind &= (VTK_PARSE_INDIRECT | VTK_PARSE_RVALUE);
   storedType |= ind;
 }
 
@@ -1203,11 +1164,8 @@ void prepend_scope(char *cp, const char *arg)
   n = strlen(arg);
   i = m;
   while (i > 0 &&
-         ((cp[i-1] >= 'a' && cp[i-1] <= 'z') ||
-          (cp[i-1] >= 'A' && cp[i-1] <= 'Z') ||
-          (cp[i-1] >= '0' && cp[i-1] <= '9') ||
-          cp[i-1] == '_' || cp[i-1] == ':' ||
-          cp[i-1] == '>'))
+         (vtkParse_CharType(cp[i-1], CPRE_IDGIT) ||
+          cp[i-1] == ':' || cp[i-1] == '>'))
     {
     i--;
     if (cp[i] == '>')
@@ -1256,6 +1214,12 @@ unsigned int add_indirection(unsigned int type1, unsigned int type2)
   result = ((type1 & ~VTK_PARSE_POINTER_MASK) |
             (type2 & ~VTK_PARSE_POINTER_MASK));
 
+  /* if there are two ampersands, it is an rvalue reference */
+  if ((type1 & type2 & VTK_PARSE_REF) != 0)
+    {
+    result |= VTK_PARSE_RVALUE;
+    }
+
   while (ptr2)
     {
     reverse = ((reverse << 2) | (ptr2 & VTK_PARSE_POINTER_LOWMASK));
@@ -1313,6 +1277,15 @@ unsigned int add_indirection_to_array(unsigned int type)
  * Start of yacc section
  */
 
+/* Use the GLR parser algorithm for tricky cases */
+%glr-parser
+
+/* Expect five shift-reduce conflicts from opt_final (final classes) */
+%expect 5
+
+/* Expect 120 reduce/reduce conflicts, these can be cleared by removing
+   either '<' or angle_brackets_sig from constant_expression_item. */
+%expect-rr 120
 
 /* The parser will shift/reduce values <str> or <integer>, where
    <str> is for IDs and <integer> is for types, modifiers, etc. */
@@ -1351,6 +1324,15 @@ unsigned int add_indirection_to_array(unsigned int type)
 %token <str> FLOAT_LITERAL
 %token <str> CHAR_LITERAL
 %token <str> ZERO
+%token <str> NULLPTR
+
+/* typedef types */
+%token <str> SSIZE_T
+%token <str> SIZE_T
+%token <str> NULLPTR_T
+
+/* This is the '[[' that marks the start of an attribute */
+%token BEGIN_ATTRIB
 
 /* keywords (many unused keywords have been omitted) */
 %token STRUCT
@@ -1364,20 +1346,27 @@ unsigned int add_indirection_to_array(unsigned int type)
 %token VOLATILE
 %token MUTABLE
 %token STATIC
+%token THREAD_LOCAL
 %token VIRTUAL
 %token EXPLICIT
 %token INLINE
+%token CONSTEXPR
 %token FRIEND
 %token EXTERN
 %token OPERATOR
 %token TEMPLATE
 %token THROW
+%token TRY
+%token CATCH
+%token NOEXCEPT
+%token DECLTYPE
 %token TYPENAME
 %token TYPEDEF
 %token NAMESPACE
 %token USING
 %token NEW
 %token DELETE
+%token DEFAULT
 %token STATIC_CAST
 %token DYNAMIC_CAST
 %token CONST_CAST
@@ -1387,7 +1376,7 @@ unsigned int add_indirection_to_array(unsigned int type)
 %token OP_LSHIFT_EQ
 %token OP_RSHIFT_EQ
 %token OP_LSHIFT
-%token OP_RSHIFT
+%token OP_RSHIFT_A
 %token OP_DOT_POINTER
 %token OP_ARROW_POINTER
 %token OP_ARROW
@@ -1414,6 +1403,7 @@ unsigned int add_indirection_to_array(unsigned int type)
 %token OTHER
 
 /* types */
+%token AUTO
 %token VOID
 %token BOOL
 %token FLOAT
@@ -1423,13 +1413,12 @@ unsigned int add_indirection_to_array(unsigned int type)
 %token LONG
 %token INT64__
 %token CHAR
+%token CHAR16_T
+%token CHAR32_T
+%token WCHAR_T
 %token SIGNED
 %token UNSIGNED
 
-/* typedef types */
-%token SSIZE_T
-%token SIZE_T
-
 /* VTK typedef types */
 %token IdType
 %token TypeInt8
@@ -1486,12 +1475,14 @@ opt_declaration_seq:
       clearTemplate();
       closeComment();
     }
-    declaration
+    attribute_specifier_seq declaration
 
 declaration:
     using_directive
   | using_declaration
+  | alias_declaration
   | forward_declaration
+  | opaque_enum_declaration
   | namespace_definition
   | namespace_alias_definition
   | linkage_specification
@@ -1501,6 +1492,7 @@ declaration:
   | class_definition
   | function_definition
   | template_declaration
+  | explicit_instantiation
   | declaration_macro
   | id_expression ';'
   | ';'
@@ -1510,6 +1502,11 @@ template_declaration:
   | template_head function_definition
   | template_head nested_variable_initialization
   | template_head template_declaration
+  | template_head alias_declaration
+
+explicit_instantiation:
+    EXTERN TEMPLATE ignored_item_no_angle ignored_expression ';'
+  | TEMPLATE ignored_item_no_angle ignored_expression ';'
 
 /*
  * extern section is parsed, but "extern" is ignored
@@ -1539,8 +1536,9 @@ forward_declaration:
   | template_head simple_forward_declaration
 
 simple_forward_declaration:
-    class_key id_expression ';'
-  | decl_specifier_seq class_key id_expression ';'
+    class_key attribute_specifier_seq class_head_name ';'
+  | class_key attribute_specifier_seq ';'
+  | decl_specifier_seq class_key attribute_specifier_seq class_head_name ';'
 
 class_definition:
     class_specifier opt_decl_specifier_seq opt_declarator_list ';'
@@ -1561,16 +1559,47 @@ class_specifier:
     }
 
 class_head:
-    class_key id_expression { start_class($<str>2, $<integer>1); }
-    opt_base_clause
-  | class_key { start_class(NULL, $<integer>1); }
-    opt_base_clause
+    class_key attribute_specifier_seq class_head_name opt_final ':'
+    {
+      start_class($<str>3, $<integer>1);
+      currentClass->IsFinal = $<integer>4;
+    }
+    base_specifier_list
+  | class_key attribute_specifier_seq class_head_name opt_final
+    {
+      start_class($<str>3, $<integer>1);
+      currentClass->IsFinal = $<integer>4;
+    }
+  | class_key attribute_specifier_seq ':'
+    {
+      start_class(NULL, $<integer>1);
+    }
+    base_specifier_list
+  | class_key attribute_specifier_seq
+    {
+      start_class(NULL, $<integer>1);
+    }
 
 class_key:
     CLASS { $<integer>$ = 0; }
   | STRUCT { $<integer>$ = 1; }
   | UNION { $<integer>$ = 2; }
 
+class_head_name:
+    nested_name_specifier class_name attribute_specifier_seq
+    { $<str>$ = vtkstrcat($<str>1, $<str>2); }
+  | scope_operator_sig nested_name_specifier class_name attribute_specifier_seq
+    { $<str>$ = vtkstrcat3("::", $<str>2, $<str>3); }
+  | class_name attribute_specifier_seq
+
+class_name:
+    simple_id
+  | template_id
+
+opt_final:
+  { $<integer>$ = 0; }
+  | ID { $<integer>$ = (strcmp($<str>1, "final") == 0); }
+
 member_specification:
   | member_specification
     {
@@ -1580,7 +1609,7 @@ member_specification:
       clearTemplate();
       closeComment();
     }
-    member_declaration
+    attribute_specifier_seq member_declaration
   | member_specification
     member_access_specifier ':'
 
@@ -1591,7 +1620,9 @@ member_access_specifier:
 
 member_declaration:
     using_declaration
+  | alias_declaration
   | forward_declaration
+  | opaque_enum_declaration
   | friend_declaration
   | typedef_declaration
   | variable_declaration
@@ -1599,6 +1630,7 @@ member_declaration:
   | class_definition
   | method_definition
   | template_member_declaration
+  | explicit_instantiation
   | declaration_macro
   | VTK_BYTE_SWAP_DECL ignored_parentheses
   | id_expression ';'
@@ -1607,6 +1639,8 @@ member_declaration:
 template_member_declaration:
     template_head class_definition
   | template_head method_definition
+  | template_head template_member_declaration
+  | template_head alias_declaration
 
 friend_declaration:
     FRIEND ignored_class
@@ -1614,27 +1648,23 @@ friend_declaration:
   | FRIEND forward_declaration
   | FRIEND method_declaration function_body { output_friend_function(); }
 
-opt_base_clause:
-  | base_clause
-
-base_clause:
-    ':' base_specifier_list
-
 base_specifier_list:
     base_specifier
-  | base_specifier_list ',' base_specifier
+  | base_specifier_list ',' attribute_specifier_seq base_specifier
 
 base_specifier:
-    id_expression
-    { add_base_class(currentClass, $<str>1, access_level, 0); }
-  | VIRTUAL opt_access_specifier id_expression
-    { add_base_class(currentClass, $<str>3, $<integer>2, 1); }
-  | access_specifier opt_virtual id_expression
-    { add_base_class(currentClass, $<str>3, $<integer>1, $<integer>2); }
+    id_expression opt_ellipsis
+    { add_base_class(currentClass, $<str>1, access_level, $<integer>2); }
+  | VIRTUAL opt_access_specifier id_expression opt_ellipsis
+    { add_base_class(currentClass, $<str>3, $<integer>2,
+                     (VTK_PARSE_VIRTUAL | $<integer>4)); }
+  | access_specifier opt_virtual id_expression opt_ellipsis
+    { add_base_class(currentClass, $<str>3, $<integer>1,
+                     ($<integer>2 | $<integer>4)); }
 
 opt_virtual:
     { $<integer>$ = 0; }
-  | VIRTUAL { $<integer>$ = 1; }
+  | VIRTUAL { $<integer>$ = VTK_PARSE_VIRTUAL; }
 
 opt_access_specifier:
     { $<integer>$ = access_level; }
@@ -1653,13 +1683,19 @@ access_specifier:
  * as long as all IDs are properly scoped.
  */
 
+opaque_enum_declaration:
+    enum_key attribute_specifier_seq id_expression opt_enum_base ';'
+  | enum_key attribute_specifier_seq ';'
+  | decl_specifier_seq enum_key attribute_specifier_seq
+    id_expression opt_enum_base ';'
+
 enum_definition:
     enum_specifier opt_decl_specifier_seq opt_declarator_list ';'
   | decl_specifier_seq enum_specifier opt_decl_specifier_seq
     opt_declarator_list ';'
 
 enum_specifier:
-    enum_head '{' { pushType(); start_enum($<str>1); } opt_enumerator_list '}'
+    enum_head '{' { pushType(); } enumerator_list '}'
     {
       popType();
       clearTypeId();
@@ -1672,19 +1708,35 @@ enum_specifier:
     }
 
 enum_head:
-    ENUM id_expression { $<str>$ = $<str>2; }
-  | ENUM { $<str>$ = NULL; }
+    enum_key attribute_specifier_seq id_expression opt_enum_base
+    {
+      start_enum($<str>3, $<integer>1, $<integer>4, getTypeId());
+      clearTypeId();
+      $<str>$ = $<str>3;
+    }
+  | enum_key attribute_specifier_seq opt_enum_base
+    {
+      start_enum(NULL, $<integer>1, $<integer>3, getTypeId());
+      clearTypeId();
+      $<str>$ = NULL;
+    }
 
-opt_enumerator_list:
-  | enumerator_list
+enum_key:
+    ENUM { $<integer>$ = 0; }
+  | ENUM CLASS { $<integer>$ = 1; }
+  | ENUM STRUCT { $<integer>$ = 1; }
+
+opt_enum_base:
+    { $<integer>$ = 0; }
+  | ':' { pushType(); } store_type_specifier
+    { $<integer>$ = getType(); popType(); }
 
 enumerator_list:
     enumerator_definition
-  | enumerator_list ','
   | enumerator_list ',' enumerator_definition
 
 enumerator_definition:
-    simple_id { add_enum($<str>1, NULL); }
+  | simple_id { add_enum($<str>1, NULL); }
   | simple_id '=' { postSig("="); markSig(); }
     constant_expression { chopSig(); add_enum($<str>1, copySig()); }
 
@@ -1693,13 +1745,20 @@ enumerator_definition:
  */
 
 nested_variable_initialization:
-    store_type nested_name_specifier simple_id '=' ignored_expression ';'
+    store_type opt_ellipsis nested_name_specifier simple_id
+    ignored_initializer ';'
+
+ignored_initializer:
+    '=' ignored_expression
+  | ignored_braces
 
 ignored_class:
-    class_key id_expression ignored_class_body
-  | decl_specifier_seq class_key id_expression ignored_class_body
-  | class_key ignored_class_body
-  | decl_specifier_seq class_key ignored_class_body
+    class_key attribute_specifier_seq
+    class_head_name opt_final ignored_class_body
+  | decl_specifier_seq class_key attribute_specifier_seq
+    class_head_name opt_final ignored_class_body
+  | class_key attribute_specifier_seq ignored_class_body
+  | decl_specifier_seq class_key attribute_specifier_seq ignored_class_body
 
 ignored_class_body:
     '{' ignored_items '}' ignored_expression ';'
@@ -1739,9 +1798,9 @@ typedef_direct_declarator:
   | function_direct_declarator
 
 function_direct_declarator:
-    declarator_id '(' { pushFunction(); postSig("("); }
-    parameter_declaration_clause ')' { postSig(")"); }
-    function_qualifiers { $<integer>$ = VTK_PARSE_FUNCTION; popFunction(); }
+    opt_ellipsis declarator_id '(' { pushFunction(); postSig("("); }
+    parameter_declaration_clause ')' { postSig(")"); } function_qualifiers
+    { $<integer>$ = (VTK_PARSE_FUNCTION | $<integer>1); popFunction(); }
 
 typedef_declarator_id:
     typedef_direct_declarator
@@ -1778,7 +1837,7 @@ typedef_declarator_id:
  */
 
 using_declaration:
-    USING  using_id ';' { add_using($<str>2, 0); }
+    USING using_id ';' { add_using($<str>2, 0); }
 
 using_id:
     id_expression
@@ -1795,12 +1854,39 @@ using_id:
 using_directive:
     USING NAMESPACE id_expression ';' { add_using($<str>3, 1); }
 
+alias_declaration:
+    USING id_expression attribute_specifier_seq '=' { markSig(); }
+    store_type direct_abstract_declarator ';'
+    {
+      ValueInfo *item = (ValueInfo *)malloc(sizeof(ValueInfo));
+      vtkParse_InitValue(item);
+      item->ItemType = VTK_TYPEDEF_INFO;
+      item->Access = access_level;
+
+      handle_complex_type(item, getType(), $<integer>6, copySig());
+
+      item->Name = $<str>2;
+
+      if (currentTemplate)
+        {
+        vtkParse_FreeValue(item);
+        }
+      else if (currentClass)
+        {
+        vtkParse_AddTypedefToClass(currentClass, item);
+        }
+      else
+        {
+        vtkParse_AddTypedefToNamespace(currentNamespace, item);
+        }
+    }
+
 /*
  * Templates
  */
 
 template_head:
-    TEMPLATE '<' '>'
+    TEMPLATE '<' right_angle_bracket
     { postSig("template<> "); clearTypeId(); }
   | TEMPLATE '<'
     {
@@ -1810,7 +1896,7 @@ template_head:
       clearTypeId();
       startTemplate();
     }
-    template_parameter_list '>'
+    template_parameter_list right_angle_bracket
     {
       chopSig();
       if (getSig()[getSigLength()-1] == '>') { postSig(" "); }
@@ -1847,6 +1933,10 @@ template_parameter:
     }
     opt_template_parameter_initializer
 
+opt_ellipsis:
+    { $<integer>$ = 0; }
+  | ELLIPSIS { postSig("..."); $<integer>$ = VTK_PARSE_PACK; }
+
 class_or_typename:
     CLASS { postSig("class "); }
   | TYPENAME { postSig("typename "); }
@@ -1880,25 +1970,26 @@ function_definition:
   | nested_operator_declaration function_body { reject_function(); }
 
 function_declaration:
-    store_type function_nr
+    store_type opt_ellipsis function_nr
 
 nested_method_declaration:
-    store_type nested_name_specifier function_nr
+    store_type opt_ellipsis nested_name_specifier function_nr
   | nested_name_specifier structor_declaration
   | decl_specifier_seq nested_name_specifier structor_declaration
 
 nested_operator_declaration:
     nested_name_specifier conversion_function
   | decl_specifier_seq nested_name_specifier conversion_function
-  | store_type nested_name_specifier operator_function_nr
+  | store_type opt_ellipsis nested_name_specifier operator_function_nr
 
 method_definition:
     method_declaration function_body { output_function(); }
-  | nested_name_specifier operator_function_id ';'
-  | decl_specifier_seq nested_name_specifier operator_function_id ';'
+  | nested_name_specifier operator_function_id attribute_specifier_seq ';'
+  | decl_specifier_seq nested_name_specifier operator_function_id
+    attribute_specifier_seq ';'
 
 method_declaration:
-    store_type function_nr
+    store_type opt_ellipsis function_nr
   | operator_declaration
   | structor_declaration
   | decl_specifier_seq structor_declaration
@@ -1906,16 +1997,17 @@ method_declaration:
 operator_declaration:
     conversion_function
   | decl_specifier_seq conversion_function
-  | store_type operator_function_nr
+  | store_type opt_ellipsis operator_function_nr
 
 conversion_function:
     conversion_function_id '('
     {
       postSig("(");
+      currentFunction->IsExplicit = ((getType() & VTK_PARSE_EXPLICIT) != 0);
       set_return(currentFunction, getType(), getTypeId(), 0);
     }
-    parameter_declaration_clause ')' { postSig(")"); }
-    function_trailer_clause
+    parameter_declaration_clause ')' attribute_specifier_seq { postSig(")"); }
+    function_trailer_clause opt_trailing_return_type
     {
       postSig(";");
       closeSig();
@@ -1930,7 +2022,8 @@ conversion_function_id:
     { $<str>$ = copySig(); }
 
 operator_function_nr:
-    operator_function_sig { postSig(")"); } function_trailer_clause
+    operator_function_sig { postSig(")"); }
+    function_trailer_clause opt_trailing_return_type
     {
       postSig(";");
       closeSig();
@@ -1940,13 +2033,13 @@ operator_function_nr:
     }
 
 operator_function_sig:
-    operator_function_id '('
+    operator_function_id attribute_specifier_seq '('
     {
       postSig("(");
       currentFunction->IsOperator = 1;
       set_return(currentFunction, getType(), getTypeId(), 0);
     }
-    parameter_declaration_clause ')'
+    parameter_declaration_clause ')' attribute_specifier_seq
 
 operator_function_id:
     operator_sig operator_id
@@ -1956,7 +2049,7 @@ operator_sig:
     OPERATOR { markSig(); postSig("operator "); }
 
 function_nr:
-    function_sig function_trailer_clause
+    function_sig function_trailer_clause opt_trailing_return_type
     {
       postSig(";");
       closeSig();
@@ -1977,22 +2070,51 @@ function_trailer:
       currentFunction->IsPureVirtual = 1;
       if (currentClass) { currentClass->IsAbstract = 1; }
     }
+  | ID
+    {
+      postSig(" "); postSig($<str>1);
+      if (strcmp($<str>1, "final") == 0) { currentFunction->IsFinal = 1; }
+    }
+  | noexcept_sig parentheses_sig { chopSig(); }
+  | noexcept_sig
+  | function_body_as_trailer
+
+noexcept_sig:
+    NOEXCEPT { postSig(" noexcept"); }
+
+function_body_as_trailer:
+    '=' DELETE { currentFunction->IsDeleted = 1; }
+  | '=' DEFAULT
+
+opt_trailing_return_type:
+  | trailing_return_type
+
+trailing_return_type:
+    OP_ARROW { postSig(" -> "); clearType(); clearTypeId(); }
+    trailing_type_specifier_seq
+    {
+      chopSig();
+      set_return(currentFunction, getType(), getTypeId(), 0);
+    }
 
 function_body:
     '{' ignored_items '}'
+  | function_try_block
   | ';'
 
+function_try_block:
+    TRY opt_ctor_initializer '{' ignored_items '}' handler_seq
+
+handler_seq:
+  | handler_seq CATCH ignored_parentheses '{' ignored_items '}'
+
 function_sig:
-    function_name '('
+    unqualified_id attribute_specifier_seq '('
     {
       postSig("(");
       set_return(currentFunction, getType(), getTypeId(), 0);
     }
-    parameter_declaration_clause ')' { postSig(")"); }
-
-function_name:
-    simple_id
-  | template_id
+    parameter_declaration_clause ')' attribute_specifier_seq { postSig(")"); }
 
 
 /*
@@ -2001,7 +2123,8 @@ function_name:
 
 structor_declaration:
     structor_sig { closeSig(); }
-    opt_ctor_initializer { openSig(); } function_trailer_clause
+    opt_ctor_initializer { openSig(); }
+    function_trailer_clause opt_trailing_return_type
     {
       postSig(";");
       closeSig();
@@ -2019,8 +2142,9 @@ structor_declaration:
     }
 
 structor_sig:
-    function_name '(' { pushType(); postSig("("); }
-    parameter_declaration_clause ')' { popType(); postSig(")"); }
+    unqualified_id '(' { pushType(); postSig("("); }
+    parameter_declaration_clause ')' attribute_specifier_seq
+    { popType(); postSig(")"); }
 
 opt_ctor_initializer:
   | ':' mem_initializer_list
@@ -2030,7 +2154,7 @@ mem_initializer_list:
   | mem_initializer_list ',' mem_initializer
 
 mem_initializer:
-    id_expression ignored_parentheses
+    id_expression ignored_parentheses opt_ellipsis
 
 /*
  * Parameters
@@ -2045,6 +2169,8 @@ parameter_list:
     parameter_declaration
   | parameter_list ',' ELLIPSIS
     { currentFunction->IsVariadic = 1; postSig(", ..."); }
+  | ELLIPSIS
+    { currentFunction->IsVariadic = 1; postSig("..."); }
 
 parameter_declaration:
     { markSig(); }
@@ -2079,6 +2205,8 @@ opt_initializer:
 initializer:
     '=' { postSig("="); clearVarValue(); markSig(); }
     constant_expression { chopSig(); setVarValue(copySig()); }
+  | { clearVarValue(); markSig(); }
+    braces_sig { chopSig(); setVarValue(copySig()); }
 
 /*
  * Variables
@@ -2111,11 +2239,11 @@ init_declarator_id:
         var->ItemType = VTK_TYPEDEF_INFO;
         if (currentClass)
           {
-          vtkParse_AddVariableToClass(currentClass, var);
+          vtkParse_AddTypedefToClass(currentClass, var);
           }
         else
           {
-          vtkParse_AddVariableToNamespace(currentNamespace, var);
+          vtkParse_AddTypedefToNamespace(currentNamespace, var);
           }
         }
       /* Is this a constant? */
@@ -2162,19 +2290,29 @@ opt_ptr_operator_seq:
 
 /* for parameters, the declarator_id is optional */
 direct_abstract_declarator:
-    opt_declarator_id opt_array_decorator_seq { $<integer>$ = 0; }
-  | p_or_lp_or_la abstract_declarator ')' { postSig(")"); }
+    opt_ellipsis opt_declarator_id opt_array_or_parameters
+    {
+      if ($<integer>3 == VTK_PARSE_FUNCTION)
+        {
+        $<integer>$ = (VTK_PARSE_FUNCTION_PTR | $<integer>1);
+        }
+      else
+        {
+        $<integer>$ = $<integer>1;
+        }
+    }
+  | lp_or_la attribute_specifier_seq abstract_declarator ')' { postSig(")"); }
     opt_array_or_parameters
     {
       const char *scope = getScope();
-      unsigned int parens = add_indirection($<integer>1, $<integer>2);
-      if ($<integer>5 == VTK_PARSE_FUNCTION)
+      unsigned int parens = add_indirection($<integer>1, $<integer>3);
+      if ($<integer>6 == VTK_PARSE_FUNCTION)
         {
         if (scope) { scope = vtkstrndup(scope, strlen(scope) - 2); }
         getFunction()->Class = scope;
         $<integer>$ = (parens | VTK_PARSE_FUNCTION);
         }
-      else if ($<integer>5 == VTK_PARSE_ARRAY)
+      else if ($<integer>6 == VTK_PARSE_ARRAY)
         {
         $<integer>$ = add_indirection_to_array(parens);
         }
@@ -2182,7 +2320,8 @@ direct_abstract_declarator:
 
 /* for variables, the declarator_id is mandatory */
 direct_declarator:
-    declarator_id opt_array_decorator_seq { $<integer>$ = 0; }
+    opt_ellipsis declarator_id opt_array_decorator_seq
+    { $<integer>$ = $<integer>1; }
   | lp_or_la declarator ')' { postSig(")"); }
     opt_array_or_parameters
     {
@@ -2200,22 +2339,16 @@ direct_declarator:
         }
     }
 
-p_or_lp_or_la:
-    '(' { postSig("("); scopeSig(""); $<integer>$ = 0; }
-  | LP { postSig("("); scopeSig($<str>1); postSig("*");
-         $<integer>$ = VTK_PARSE_POINTER; }
-  | LA { postSig("("); scopeSig($<str>1); postSig("&");
-         $<integer>$ = VTK_PARSE_REF; }
-
 lp_or_la:
-    LP { postSig("("); scopeSig($<str>1); postSig("*");
-         $<integer>$ = VTK_PARSE_POINTER; }
+    LP { postSig("("); scopeSig($<str>1); postSig("*"); }
+    ptr_cv_qualifier_seq { $<integer>$ = $<integer>3; }
   | LA { postSig("("); scopeSig($<str>1); postSig("&");
          $<integer>$ = VTK_PARSE_REF; }
 
-opt_array_or_parameters: { $<integer>$ = 0; }
+opt_array_or_parameters:
+    { $<integer>$ = 0; }
   | '(' { pushFunction(); postSig("("); } parameter_declaration_clause ')'
-    { postSig(")"); } function_qualifiers
+    attribute_specifier_seq { postSig(")"); } function_qualifiers
     {
       $<integer>$ = VTK_PARSE_FUNCTION;
       popFunction();
@@ -2226,6 +2359,8 @@ function_qualifiers:
   | function_qualifiers MUTABLE
   | function_qualifiers CONST { currentFunction->IsConst = 1; }
   | function_qualifiers THROW ignored_parentheses
+  | function_qualifiers NOEXCEPT ignored_parentheses
+  | function_qualifiers NOEXCEPT
 
 abstract_declarator:
     direct_abstract_declarator
@@ -2242,8 +2377,9 @@ opt_declarator_id:
   | declarator_id
 
 declarator_id:
-    simple_id { setVarName($<str>1); }
-  | simple_id ':' bitfield_size { setVarName($<str>1); }
+    unqualified_id attribute_specifier_seq { setVarName($<str>1); }
+  | unqualified_id attribute_specifier_seq ':' bitfield_size
+    { setVarName($<str>1); }
 
 bitfield_size:
     OCT_LITERAL
@@ -2262,7 +2398,8 @@ array_decorator_seq_impl:
   | array_decorator_seq_impl array_decorator
 
 array_decorator:
-    '[' { postSig("["); } array_size_specifier ']' { postSig("]"); }
+    '[' { postSig("["); } array_size_specifier ']' attribute_specifier_seq
+    { postSig("]"); }
 
 array_size_specifier:
     { pushArraySize(""); }
@@ -2279,6 +2416,9 @@ id_expression:
 unqualified_id:
     simple_id
   | template_id
+  | decltype_specifier
+  | tilde_sig class_name { $<str>$ = vtkstrcat("~", $<str>2); }
+  | tilde_sig decltype_specifier { $<str>$ = vtkstrcat("~", $<str>2); }
 
 qualified_id:
     nested_name_specifier unqualified_id
@@ -2293,14 +2433,21 @@ nested_name_specifier:
     { $<str>$ = vtkstrcat($<str>1, $<str>2); }
   | template_id scope_operator_sig
     { $<str>$ = vtkstrcat($<str>1, $<str>2); }
+  | decltype_specifier scope_operator_sig
+    { $<str>$ = vtkstrcat($<str>1, $<str>2); }
   | nested_name_specifier identifier_sig scope_operator_sig
     { $<str>$ = vtkstrcat3($<str>1, $<str>2, $<str>3); }
   | nested_name_specifier template_id scope_operator_sig
     { $<str>$ = vtkstrcat3($<str>1, $<str>2, $<str>3); }
+  | nested_name_specifier decltype_specifier scope_operator_sig
+    { $<str>$ = vtkstrcat3($<str>1, $<str>2, $<str>3); }
   | nested_name_specifier TEMPLATE { postSig("template "); }
     template_id scope_operator_sig
     { $<str>$ = vtkstrcat4($<str>1, "template ", $<str>4, $<str>5); }
 
+tilde_sig:
+    '~' { postSig("~"); }
+
 identifier_sig:
     identifier { postSig($<str>1); }
 
@@ -2309,17 +2456,19 @@ scope_operator_sig:
 
 template_id:
     identifier '<' { markSig(); postSig($<str>1); postSig("<"); }
-    angle_bracket_contents '>'
+    angle_bracket_contents right_angle_bracket
     {
       chopSig(); if (getSig()[getSigLength()-1] == '>') { postSig(" "); }
       postSig(">"); $<str>$ = copySig(); clearTypeId();
     }
 
+decltype_specifier:
+    DECLTYPE { markSig(); postSig("decltype"); } parentheses_sig
+    { chopSig(); $<str>$ = copySig(); clearTypeId(); }
+
+
 /*
- * simple_id evaluates to string and sigs itself, note that '~' is
- * considered part of the ID because this simplifies the handling of
- * destructor names, and since the parser doesn't do any math, there
- * is no conflict with the '~' operator.
+ * simple_id evaluates to string and sigs itself.
  */
 
 simple_id:
@@ -2330,15 +2479,9 @@ simple_id:
   | OSTREAM { postSig($<str>1); }
   | StdString { postSig($<str>1); }
   | UnicodeString { postSig($<str>1); }
-  | '~' VTK_ID { $<str>$ = vtkstrcat("~",$<str>2); postSig($<str>$); }
-  | '~' QT_ID { $<str>$ = vtkstrcat("~",$<str>2); postSig($<str>$); }
-  | '~' ID { $<str>$ = vtkstrcat("~",$<str>2); postSig($<str>$); }
-  | '~' ISTREAM { $<str>$ = vtkstrcat("~",$<str>2); postSig($<str>$); }
-  | '~' OSTREAM { $<str>$ = vtkstrcat("~",$<str>2); postSig($<str>$); }
-  | '~' StdString { $<str>$ = vtkstrcat("~",$<str>2); postSig($<str>$); }
-  | '~' UnicodeString { $<str>$ = vtkstrcat("~",$<str>2); postSig($<str>$); }
-  | SIZE_T { $<str>$ = "size_t"; postSig($<str>$); }
-  | SSIZE_T { $<str>$ = "ssize_t"; postSig($<str>$); }
+  | NULLPTR_T { postSig($<str>1); }
+  | SIZE_T { postSig($<str>1); }
+  | SSIZE_T { postSig($<str>1); }
   | TypeInt8 { $<str>$ = "vtkTypeInt8"; postSig($<str>$); }
   | TypeUInt8 { $<str>$ = "vtkTypeUInt8"; postSig($<str>$); }
   | TypeInt16 { $<str>$ = "vtkTypeInt16"; postSig($<str>$); }
@@ -2364,13 +2507,12 @@ identifier:
   | StdString
   | UnicodeString
 
-
 /*
  * Declaration specifiers
  */
 
 opt_decl_specifier_seq:
-  | opt_decl_specifier_seq decl_specifier2
+  | opt_decl_specifier_seq decl_specifier2 attribute_specifier_seq
 
 decl_specifier2:
     decl_specifier
@@ -2380,19 +2522,22 @@ decl_specifier2:
   | FRIEND { setTypeMod(VTK_PARSE_FRIEND); }
 
 decl_specifier_seq:
-    decl_specifier
-  | decl_specifier_seq decl_specifier
+    decl_specifier attribute_specifier_seq
+  | decl_specifier_seq decl_specifier attribute_specifier_seq
 
 decl_specifier:
     storage_class_specifier { setTypeMod($<integer>1); }
   | function_specifier { setTypeMod($<integer>1); }
   | cv_qualifier { setTypeMod($<integer>1); }
+  | CONSTEXPR { postSig("constexpr "); $<integer>$ = 0; }
 
 storage_class_specifier:
     MUTABLE { postSig("mutable "); $<integer>$ = VTK_PARSE_MUTABLE; }
   | EXTERN { $<integer>$ = 0; }
   | EXTERN STRING_LITERAL { $<integer>$ = 0; }
   | STATIC { postSig("static "); $<integer>$ = VTK_PARSE_STATIC; }
+  | THREAD_LOCAL
+    { postSig("thread_local "); $<integer>$ = VTK_PARSE_THREAD_LOCAL; }
 
 function_specifier:
     INLINE { $<integer>$ = 0; }
@@ -2408,7 +2553,6 @@ cv_qualifier_seq:
   | cv_qualifier_seq cv_qualifier
     { $<integer>$ = ($<integer>1 | $<integer>2); }
 
-
 /*
  * Types
  */
@@ -2423,17 +2567,31 @@ store_type_specifier:
     opt_decl_specifier_seq
 
 type_specifier:
+    trailing_type_specifier
+  | class_key attribute_specifier_seq class_head_name
+    { postSig(" "); setTypeId($<str>3); $<integer>$ = guess_id_type($<str>3); }
+  | enum_key attribute_specifier_seq id_expression attribute_specifier_seq
+    { postSig(" "); setTypeId($<str>3); $<integer>$ = guess_id_type($<str>3); }
+
+trailing_type_specifier:
     simple_type_specifier
-  | TYPENAME { postSig("typename "); } id_expression
+  | decltype_specifier
+    { postSig(" "); setTypeId($<str>1); $<integer>$ = 0; }
+  | TYPENAME { postSig("typename "); } id_expression attribute_specifier_seq
     { postSig(" "); setTypeId($<str>3); $<integer>$ = guess_id_type($<str>3); }
-  | template_id
+  | template_id attribute_specifier_seq
     { postSig(" "); setTypeId($<str>1); $<integer>$ = guess_id_type($<str>1); }
-  | qualified_id
+  | qualified_id attribute_specifier_seq
     { postSig(" "); setTypeId($<str>1); $<integer>$ = guess_id_type($<str>1); }
-  | class_key id_expression
-    { postSig(" "); setTypeId($<str>2); $<integer>$ = guess_id_type($<str>2); }
-  | ENUM id_expression
-    { postSig(" "); setTypeId($<str>2); $<integer>$ = guess_id_type($<str>2); }
+
+trailing_type_specifier_seq:
+    trailing_type_specifier_seq2 opt_ptr_operator_seq
+
+trailing_type_specifier_seq2:
+    trailing_type_specifier { setTypeBase($<integer>1); }
+    opt_decl_specifier_seq
+  | decl_specifier_seq trailing_type_specifier { setTypeBase($<integer>2); }
+    opt_decl_specifier_seq
 
 tparam_type:
     tparam_type_specifier2 opt_ptr_operator_seq
@@ -2446,6 +2604,8 @@ tparam_type_specifier2:
 
 tparam_type_specifier:
     simple_type_specifier
+  | decltype_specifier
+    { postSig(" "); setTypeId($<str>1); $<integer>$ = 0; }
   | template_id
     { postSig(" "); setTypeId($<str>1); $<integer>$ = guess_id_type($<str>1); }
   | qualified_id
@@ -2454,12 +2614,12 @@ tparam_type_specifier:
     { postSig(" "); setTypeId($<str>2); $<integer>$ = guess_id_type($<str>2); }
   | UNION id_expression
     { postSig(" "); setTypeId($<str>2); $<integer>$ = guess_id_type($<str>2); }
-  | ENUM id_expression
+  | enum_key id_expression
     { postSig(" "); setTypeId($<str>2); $<integer>$ = guess_id_type($<str>2); }
 
 simple_type_specifier:
-    primitive_type { setTypeId(""); }
-  | type_name
+    primitive_type attribute_specifier_seq { setTypeId(""); }
+  | type_name attribute_specifier_seq
 
 type_name:
     StdString { typeSig($<str>1); $<integer>$ = VTK_PARSE_STRING; }
@@ -2469,8 +2629,9 @@ type_name:
   | ID { typeSig($<str>1); $<integer>$ = VTK_PARSE_UNKNOWN; }
   | VTK_ID { typeSig($<str>1); $<integer>$ = VTK_PARSE_OBJECT; }
   | QT_ID { typeSig($<str>1); $<integer>$ = VTK_PARSE_QOBJECT; }
-  | SSIZE_T { typeSig("ssize_t"); $<integer>$ = VTK_PARSE_SSIZE_T; }
-  | SIZE_T { typeSig("size_t"); $<integer>$ = VTK_PARSE_SIZE_T; }
+  | NULLPTR_T { typeSig($<str>1); $<integer>$ = VTK_PARSE_NULLPTR_T; }
+  | SSIZE_T { typeSig($<str>1); $<integer>$ = VTK_PARSE_SSIZE_T; }
+  | SIZE_T { typeSig($<str>1); $<integer>$ = VTK_PARSE_SIZE_T; }
   | TypeInt8 { typeSig("vtkTypeInt8"); $<integer>$ = VTK_PARSE_INT8; }
   | TypeUInt8 { typeSig("vtkTypeUInt8"); $<integer>$ = VTK_PARSE_UINT8; }
   | TypeInt16 { typeSig("vtkTypeInt16"); $<integer>$ = VTK_PARSE_INT16; }
@@ -2484,11 +2645,15 @@ type_name:
   | IdType { typeSig("vtkIdType"); $<integer>$ = VTK_PARSE_ID_TYPE; }
 
 primitive_type:
-    VOID   { postSig("void "); $<integer>$ = VTK_PARSE_VOID; }
+    AUTO   { postSig("auto "); $<integer>$ = 0; }
+  | VOID   { postSig("void "); $<integer>$ = VTK_PARSE_VOID; }
   | BOOL { postSig("bool "); $<integer>$ = VTK_PARSE_BOOL; }
   | FLOAT  { postSig("float "); $<integer>$ = VTK_PARSE_FLOAT; }
   | DOUBLE { postSig("double "); $<integer>$ = VTK_PARSE_DOUBLE; }
   | CHAR   { postSig("char "); $<integer>$ = VTK_PARSE_CHAR; }
+  | CHAR16_T { postSig("char16_t "); $<integer>$ = VTK_PARSE_CHAR16_T; }
+  | CHAR32_T   { postSig("char32_t "); $<integer>$ = VTK_PARSE_CHAR32_T; }
+  | WCHAR_T { postSig("wchar_t "); $<integer>$ = VTK_PARSE_WCHAR_T; }
   | INT    { postSig("int "); $<integer>$ = VTK_PARSE_INT; }
   | SHORT  { postSig("short "); $<integer>$ = VTK_PARSE_SHORT; }
   | LONG   { postSig("long "); $<integer>$ = VTK_PARSE_LONG; }
@@ -2515,21 +2680,31 @@ primitive_type:
 
 ptr_operator_seq:
     reference
+  | rvalue_reference
   | pointer_seq
   | pointer_seq reference { $<integer>$ = ($<integer>1 | $<integer>2); }
 
 reference:
-    '&' { postSig("&"); $<integer>$ = VTK_PARSE_REF; }
+    '&' attribute_specifier_seq
+    { postSig("&"); $<integer>$ = VTK_PARSE_REF; }
+
+rvalue_reference:
+    OP_LOGIC_AND attribute_specifier_seq
+    { postSig("&&"); $<integer>$ = (VTK_PARSE_RVALUE | VTK_PARSE_REF); }
 
 pointer:
-    '*' { postSig("*"); $<integer>$ = VTK_PARSE_POINTER; }
-  | '*' { postSig("*"); } cv_qualifier_seq
+    '*' attribute_specifier_seq { postSig("*"); }
+    ptr_cv_qualifier_seq { $<integer>$ = $<integer>4; }
+
+ptr_cv_qualifier_seq:
+    { $<integer>$ = VTK_PARSE_POINTER; }
+  | cv_qualifier_seq
     {
-      if (($<integer>3 & VTK_PARSE_CONST) != 0)
+      if (($<integer>1 & VTK_PARSE_CONST) != 0)
         {
         $<integer>$ = VTK_PARSE_CONST_POINTER;
         }
-      if (($<integer>3 & VTK_PARSE_VOLATILE) != 0)
+      if (($<integer>1 & VTK_PARSE_VOLATILE) != 0)
         {
         $<integer>$ = VTK_PARSE_BAD_INDIRECT;
         }
@@ -2552,6 +2727,16 @@ pointer_seq:
 
 
 /*
+ * Attributes
+ */
+
+attribute_specifier_seq:
+  | attribute_specifier_seq attribute_specifier
+
+attribute_specifier:
+    BEGIN_ATTRIB { closeSig(); } any_bracket_contents { openSig(); } ']' ']'
+
+/*
  * VTK Macros
  */
 
@@ -2865,6 +3050,9 @@ operator_id:
   | '>' { $<str>$ = ">"; }
   | ',' { $<str>$ = ","; }
   | '=' { $<str>$ = "="; }
+  | OP_RSHIFT_A '>' { $<str>$ = ">>"; }
+  | OP_RSHIFT_A OP_RSHIFT_A { $<str>$ = ">>"; }
+  | STRING_LITERAL ID { $<str>$ = vtkstrcat("\"\" ", $<str>2); }
   | operator_id_no_delim
 
 operator_id_no_delim:
@@ -2883,7 +3071,6 @@ operator_id_no_delim:
   | OP_LSHIFT_EQ { $<str>$ = "<<="; }
   | OP_RSHIFT_EQ { $<str>$ = ">>="; }
   | OP_LSHIFT { $<str>$ = "<<"; }
-  | OP_RSHIFT { $<str>$ = ">>"; }
   | OP_DOT_POINTER { $<str>$ = ".*"; }
   | OP_ARROW_POINTER { $<str>$ = "->*"; }
   | OP_ARROW { $<str>$ = "->"; }
@@ -2916,14 +3103,20 @@ keyword:
   | PRIVATE { $<str>$ = "private"; }
   | CONST { $<str>$ = "const"; }
   | STATIC { $<str>$ = "static"; }
+  | THREAD_LOCAL { $<str>$ = "thread_local"; }
+  | CONSTEXPR { $<str>$ = "constexpr"; }
   | INLINE { $<str>$ = "inline"; }
   | VIRTUAL { $<str>$ = "virtual"; }
+  | EXPLICIT { $<str>$ = "explicit"; }
+  | DECLTYPE { $<str>$ = "decltype"; }
+  | DEFAULT { $<str>$ = "default"; }
   | EXTERN { $<str>$ = "extern"; }
   | USING { $<str>$ = "using"; }
   | NAMESPACE { $<str>$ = "namespace"; }
   | OPERATOR { $<str>$ = "operator"; }
   | ENUM { $<str>$ = "enum"; }
   | THROW { $<str>$ = "throw"; }
+  | NOEXCEPT { $<str>$ = "noexcept"; }
   | CONST_CAST { $<str>$ = "const_cast"; }
   | DYNAMIC_CAST { $<str>$ = "dynamic_cast"; }
   | STATIC_CAST { $<str>$ = "static_cast"; }
@@ -2937,16 +3130,28 @@ literal:
   | CHAR_LITERAL
   | STRING_LITERAL
   | ZERO
+  | NULLPTR
 
 /*
  * Constant expressions that evaluate to one or more values
  */
 
 constant_expression:
-    bracket_pitem
-  | constant_expression bracket_pitem;
+    constant_expression_item
+  | constant_expression constant_expression_item
+
+constant_expression_item:
+    common_bracket_item
+  | angle_brackets_sig
+  | '<' { postSig("< "); }
+  | '>' { postSig("> "); } common_bracket_item_no_scope_operator
+  | OP_RSHIFT_A { postSig(">"); }
 
 common_bracket_item:
+    common_bracket_item_no_scope_operator
+  | DOUBLE_COLON { chopSig(); postSig("::"); }
+
+common_bracket_item_no_scope_operator:
     brackets_sig
   | parentheses_sig
   | braces_sig
@@ -2968,9 +3173,8 @@ common_bracket_item:
           postSig(" ");
           }
         postSig($<str>1);
-        if ((c1 >= 'A' && c1 <= 'Z') || (c1 >= 'a' && c1 <= 'z') ||
-            (c1 >= '0' && c1 <= '9') || c1 == '_' || c1 == '\'' ||
-            c1 == '\"' || c1 == ')' || c1 == ']')
+        if (vtkParse_CharType(c1, (CPRE_IDGIT|CPRE_QUOTE)) ||
+            c1 == ')' || c1 == ']')
           {
           postSig(" ");
           }
@@ -2982,7 +3186,6 @@ common_bracket_item:
         }
     }
   | ':' { postSig(":"); postSig(" "); } | '.' { postSig("."); }
-  | DOUBLE_COLON { chopSig(); postSig("::"); }
   | keyword { postSig($<str>1); postSig(" "); }
   | literal { postSig($<str>1); postSig(" "); }
   | primitive_type
@@ -2995,8 +3198,7 @@ common_bracket_item:
       cp = getSig();
       l = getSigLength();
       if (l != 0) { c1 = cp[l-1]; }
-      while (((c1 >= 'A' && c1 <= 'Z') || (c1 >= 'a' && c1 <= 'z') ||
-              (c1 >= '0' && c1 <= '9') || c1 == '_') && l != 0)
+      while (vtkParse_CharType(c1, CPRE_IDGIT) && l != 0)
         {
         --l;
         c1 = cp[l-1];
@@ -3007,20 +3209,25 @@ common_bracket_item:
         resetSig(l);
         postSig(cp);
         }
+      postSig(" ");
     }
 
 any_bracket_contents:
   | any_bracket_contents any_bracket_item
 
-bracket_pitem: common_bracket_item
+bracket_pitem:
+    common_bracket_item
   | '<' { postSig("< "); }
   | '>' { postSig("> "); }
+  | OP_RSHIFT_A { postSig(">"); }
 
-any_bracket_item: bracket_pitem
+any_bracket_item:
+    bracket_pitem
   | '=' { postSig("= "); }
   | ',' { chopSig(); postSig(", "); }
 
-braces_item: any_bracket_item
+braces_item:
+    any_bracket_item
   | ';' { chopSig(); postSig(";"); }
 
 angle_bracket_contents:
@@ -3045,16 +3252,22 @@ angle_brackets_sig:
       if (getSig()[getSigLength()-1] == '<') { postSig(" "); }
       postSig("<");
     }
-    angle_bracket_contents '>'
+    angle_bracket_contents right_angle_bracket
     {
       chopSig();
       if (getSig()[getSigLength()-1] == '>') { postSig(" "); }
       postSig("> ");
     }
 
+right_angle_bracket:
+    '>'
+  | OP_RSHIFT_A
+
 brackets_sig:
     '[' { postSig("["); } any_bracket_contents ']'
     { chopSig(); postSig("] "); }
+  | BEGIN_ATTRIB { postSig("[["); } any_bracket_contents ']' ']'
+    { chopSig(); postSig("]] "); }
 
 parentheses_sig:
     '(' { postSig("("); } any_bracket_contents ')'
@@ -3081,28 +3294,36 @@ ignored_item:
   | ';'
 
 ignored_item_no_semi:
+    ignored_item_no_angle
+  | '<'
+
+ignored_item_no_angle:
     ignored_braces
   | ignored_parentheses
   | ignored_brackets
   | DOUBLE_COLON
   | ELLIPSIS
   | operator_id_no_delim
-  | ':' | '.' | '<' | '>' | '=' | ','
-  | keyword | literal
-  | simple_type_specifier
+  | OP_RSHIFT_A
+  | ':' | '.' | '>' | '=' | ','
+  | keyword
+  | literal
+  | primitive_type
+  | type_name
   | OTHER
 
 ignored_braces:
-  '{' ignored_items '}'
+    '{' ignored_items '}'
 
 ignored_brackets:
-  '[' ignored_items ']'
+    '[' ignored_items ']'
+  | BEGIN_ATTRIB ignored_items ']' ']'
 
 ignored_parentheses:
-  ignored_left_parenthesis ignored_items ')'
+    ignored_left_parenthesis ignored_items ')'
 
 ignored_left_parenthesis:
-  '(' | LP | LA
+    '(' | LP | LA
 
 %%
 #include <string.h>
@@ -3117,6 +3338,9 @@ const char *type_class(unsigned int type, const char *classname)
       {
       switch ((type & VTK_PARSE_BASE_TYPE))
         {
+        case 0:
+          classname = "auto";
+          break;
         case VTK_PARSE_VOID:
           classname = "void";
           break;
@@ -3135,6 +3359,15 @@ const char *type_class(unsigned int type, const char *classname)
         case VTK_PARSE_CHAR:
           classname = "char";
           break;
+        case VTK_PARSE_CHAR16_T:
+          classname = "char16_t";
+          break;
+        case VTK_PARSE_CHAR32_T:
+          classname = "char32_t";
+          break;
+        case VTK_PARSE_WCHAR_T:
+          classname = "wchar_t";
+          break;
         case VTK_PARSE_UNSIGNED_CHAR:
           classname = "unsigned char";
           break;
@@ -3228,27 +3461,6 @@ void start_class(const char *classname, int is_struct_or_union)
   clearComment();
 }
 
-/* reject the class */
-void reject_class(const char *classname, int is_struct_or_union)
-{
-  static ClassInfo static_class;
-
-  pushClass();
-  currentClass = &static_class;
-  currentClass->Name = classname;
-  vtkParse_InitClass(currentClass);
-
-  access_level = VTK_ACCESS_PRIVATE;
-  if (is_struct_or_union)
-    {
-    access_level = VTK_ACCESS_PUBLIC;
-    }
-
-  vtkParse_InitFunction(currentFunction);
-  startSig();
-  clearComment();
-}
-
 /* reached the end of a class definition */
 void end_class()
 {
@@ -3259,9 +3471,13 @@ void end_class()
 }
 
 /* add a base class to the specified class */
-void add_base_class(ClassInfo *cls, const char *name, int al, int virt)
+void add_base_class(ClassInfo *cls, const char *name, int al,
+  unsigned int extra)
 {
-  if (cls && al == VTK_ACCESS_PUBLIC && virt == 0)
+  /* "extra" can contain VTK_PARSE_VIRTUAL and VTK_PARSE_PACK */
+  if (cls && al == VTK_ACCESS_PUBLIC &&
+      (extra & VTK_PARSE_VIRTUAL) == 0 &&
+      (extra & VTK_PARSE_PACK) == 0)
     {
     vtkParse_AddStringToArray(&cls->SuperClasses,
                               &cls->NumberOfSuperClasses,
@@ -3302,12 +3518,20 @@ void add_using(const char *name, int is_namespace)
 }
 
 /* start a new enum */
-void start_enum(const char *name)
+void start_enum(const char *name, int is_scoped,
+                unsigned int type, const char *basename)
 {
   EnumInfo *item;
 
+  currentEnumType = (type ? type : VTK_PARSE_INT);
   currentEnumName = "int";
   currentEnumValue = NULL;
+
+  if (type == 0 && is_scoped)
+    {
+    type = VTK_PARSE_INT;
+    }
+
   if (name)
     {
     currentEnumName = name;
@@ -3315,6 +3539,7 @@ void start_enum(const char *name)
     vtkParse_InitEnum(item);
     item->Name = name;
     item->Access = access_level;
+
     if (currentClass)
       {
       vtkParse_AddEnumToClass(currentClass, item);
@@ -3323,12 +3548,30 @@ void start_enum(const char *name)
       {
       vtkParse_AddEnumToNamespace(currentNamespace, item);
       }
+
+    if (type)
+      {
+      vtkParse_AddStringToArray(&item->SuperClasses,
+                                &item->NumberOfSuperClasses,
+                                type_class(type, basename));
+      }
+
+    if (is_scoped)
+      {
+      pushClass();
+      currentClass = item;
+      }
     }
 }
 
 /* finish the enum */
 void end_enum()
 {
+  if (currentClass && currentClass->ItemType == VTK_ENUM_INFO)
+    {
+    popClass();
+    }
+
   currentEnumName = NULL;
   currentEnumValue = NULL;
 }
@@ -3375,7 +3618,7 @@ void add_enum(const char *name, const char *value)
     currentEnumValue = "0";
     }
 
-  add_constant(name, currentEnumValue, VTK_PARSE_INT, currentEnumName, 2);
+  add_constant(name, currentEnumValue, currentEnumType, currentEnumName, 2);
 }
 
 /* for a macro constant, guess the constant type, doesn't do any math */
@@ -3391,18 +3634,10 @@ unsigned int guess_constant_type(const char *valstring)
     return 0;
     }
 
-  if (valstring[0] < '0' || valstring[0] > '9')
+  k = vtkParse_SkipId(valstring);
+  if (valstring[k] == '\0')
     {
-    k = 0;
-    while ((valstring[k] >= '0' && valstring[k] <= '9') ||
-           (valstring[k] >= 'a' && valstring[k] <= 'z') ||
-           (valstring[k] >= 'A' && valstring[k] <= 'Z') ||
-           valstring[k] == '_') { k++; }
-
-    if (valstring[k] == '\0')
-      {
-      is_name = 1;
-      }
+    is_name = 1;
     }
 
   if (strcmp(valstring, "true") == 0 || strcmp(valstring, "false") == 0)
@@ -3410,6 +3645,11 @@ unsigned int guess_constant_type(const char *valstring)
     return VTK_PARSE_BOOL;
     }
 
+  if (strcmp(valstring, "nullptr") == 0)
+    {
+    return VTK_PARSE_NULLPTR_T;
+    }
+
   if (valstring[0] == '\'')
     {
     return VTK_PARSE_CHAR;
@@ -3828,6 +4068,13 @@ void handle_complex_type(
   /* remove specifiers like "friend" and "typedef" */
   datatype &= VTK_PARSE_QUALIFIED_TYPE;
 
+  /* remove the pack specifier caused by "..." */
+  if ((extra & VTK_PARSE_PACK) != 0)
+    {
+    val->IsPack = 1;
+    extra ^= VTK_PARSE_PACK;
+    }
+
   /* if "extra" was set, parentheses were involved */
   if ((extra & VTK_PARSE_BASE_TYPE) == VTK_PARSE_FUNCTION)
     {
@@ -3848,7 +4095,7 @@ void handle_complex_type(
     /* the val type is whatever was inside the parentheses */
     clearTypeId();
     setTypeId(func->Class ? "method" : "function");
-    datatype = (extra & VTK_PARSE_UNQUALIFIED_TYPE);
+    datatype = (extra & (VTK_PARSE_UNQUALIFIED_TYPE | VTK_PARSE_RVALUE));
     }
   else if ((extra & VTK_PARSE_INDIRECT) == VTK_PARSE_BAD_INDIRECT)
     {
@@ -3856,12 +4103,12 @@ void handle_complex_type(
     }
   else if ((extra & VTK_PARSE_INDIRECT) != 0)
     {
-    extra = (extra & VTK_PARSE_INDIRECT);
+    extra = (extra & (VTK_PARSE_INDIRECT | VTK_PARSE_RVALUE));
 
     if ((extra & VTK_PARSE_REF) != 0)
       {
-      datatype = (datatype | VTK_PARSE_REF);
-      extra = (extra & ~VTK_PARSE_REF);
+      datatype = (datatype | (extra & (VTK_PARSE_REF | VTK_PARSE_RVALUE)));
+      extra = (extra & ~(VTK_PARSE_REF | VTK_PARSE_RVALUE));
       }
 
     if (extra != 0 && getArrayNDims() > 0)
@@ -3975,6 +4222,25 @@ void output_function()
       }
     }
 
+  /* friend */
+  if (currentFunction->ReturnValue &&
+      currentFunction->ReturnValue->Type & VTK_PARSE_FRIEND)
+    {
+    currentFunction->ReturnValue->Type ^= VTK_PARSE_FRIEND;
+    output_friend_function();
+    return;
+    }
+
+  /* typedef */
+  if (currentFunction->ReturnValue &&
+      currentFunction->ReturnValue->Type & VTK_PARSE_TYPEDEF)
+    {
+    /* for now, reject it instead of turning a method into a typedef */
+    currentFunction->ReturnValue->Type ^= VTK_PARSE_TYPEDEF;
+    reject_function();
+    return;
+    }
+
   /* static */
   if (currentFunction->ReturnValue &&
       currentFunction->ReturnValue->Type & VTK_PARSE_STATIC)
@@ -4252,6 +4518,12 @@ void vtkParse_SetRecursive(int option)
     }
 }
 
+/* Set the global variable that stores the current executable */
+void vtkParse_SetCommandName(const char *name)
+{
+  CommandName = name;
+}
+
 /* Parse a header file and return a FileInfo struct */
 FileInfo *vtkParse_ParseFile(
   const char *filename, FILE *ifile, FILE *errfile)
@@ -4270,6 +4542,7 @@ FileInfo *vtkParse_ParseFile(
   /* "preprocessor" is a global struct used by the parser */
   preprocessor = (PreprocessInfo *)malloc(sizeof(PreprocessInfo));
   vtkParsePreprocess_Init(preprocessor, filename);
+  preprocessor->Strings = data->Strings;
   vtkParsePreprocess_AddStandardMacros(preprocessor, VTK_PARSE_NATIVE);
 
   /* add include files specified on the command line */
@@ -4339,7 +4612,6 @@ FileInfo *vtkParse_ParseFile(
 
   if (ret)
     {
-    print_parser_error("syntax error", NULL, 0);
     return NULL;
     }
 
@@ -4500,7 +4772,7 @@ void vtkParse_Free(FileInfo *file_info)
 /** Define a preprocessor macro. Function macros are not supported.  */
 void vtkParse_DefineMacro(const char *name, const char *definition)
 {
-  size_t n = vtkidlen(name);
+  size_t n = vtkParse_SkipId(name);
   size_t l;
   char *cp;
 
@@ -4527,7 +4799,7 @@ void vtkParse_DefineMacro(const char *name, const char *definition)
 /** Undefine a preprocessor macro.  */
 void vtkParse_UndefineMacro(const char *name)
 {
-  size_t n = vtkidlen(name);
+  size_t n = vtkParse_SkipId(name);
   char *cp;
 
   cp = (char *)malloc(n+2);
@@ -4564,7 +4836,8 @@ void vtkParse_IncludeDirectory(const char *dirname)
 /** Return the full path to a header file.  */
 const char *vtkParse_FindIncludeFile(const char *filename)
 {
-  static PreprocessInfo info = {0, 0, 0, 0, 0, 0, 0, 0, 0};
+  static StringCache cache = {0, 0, 0, 0};
+  static PreprocessInfo info = {0, 0, 0, 0, 0, 0, &cache, 0, 0, 0};
   int val;
   int i;
 
diff --git a/Wrapping/Tools/vtkParseData.c b/Wrapping/Tools/vtkParseData.c
index 6b27e86..95a7373 100644
--- a/Wrapping/Tools/vtkParseData.c
+++ b/Wrapping/Tools/vtkParseData.c
@@ -124,6 +124,8 @@ void vtkParse_InitFunction(FunctionInfo *func)
   func->IsOperator = 0;
   func->IsVariadic = 0;
   func->IsConst = 0;
+  func->IsDeleted = 0;
+  func->IsFinal = 0;
   func->IsExplicit = 0;
   func->IsLegacy = 0;
 
@@ -194,6 +196,8 @@ void vtkParse_CopyFunction(FunctionInfo *func, const FunctionInfo *orig)
   func->IsOperator = orig->IsOperator;
   func->IsVariadic = orig->IsVariadic;
   func->IsConst = orig->IsConst;
+  func->IsDeleted = orig->IsDeleted;
+  func->IsFinal = orig->IsFinal;
   func->IsExplicit = orig->IsExplicit;
   func->IsLegacy = orig->IsLegacy;
 
@@ -259,6 +263,7 @@ void vtkParse_InitValue(ValueInfo *val)
   val->Template = NULL;
   val->IsStatic = 0;
   val->IsEnum = 0;
+  val->IsPack = 0;
 }
 
 /* Copy a Value struct */
@@ -303,6 +308,7 @@ void vtkParse_CopyValue(ValueInfo *val, const ValueInfo *orig)
 
   val->IsStatic = orig->IsStatic;
   val->IsEnum = orig->IsEnum;
+  val->IsPack = orig->IsPack;
 }
 
 /* Free a Value struct */
@@ -328,19 +334,14 @@ void vtkParse_FreeValue(ValueInfo *value_info)
 /* Initialize an Enum struct */
 void vtkParse_InitEnum(EnumInfo *item)
 {
+  vtkParse_InitClass(item);
   item->ItemType = VTK_ENUM_INFO;
-  item->Access = VTK_ACCESS_PUBLIC;
-  item->Name = NULL;
-  item->Comment = NULL;
 }
 
 /* Copy an Enum struct */
 void vtkParse_CopyEnum(EnumInfo *item, const EnumInfo *orig)
 {
-  item->ItemType = orig->ItemType;
-  item->Access = orig->Access;
-  item->Name = orig->Name;
-  item->Comment = orig->Comment;
+  vtkParse_CopyClass(item, orig);
 }
 
 /* Free an Enum struct */
@@ -406,6 +407,7 @@ void vtkParse_InitClass(ClassInfo *cls)
   cls->NumberOfNamespaces = 0;
   cls->Namespaces = NULL;
   cls->IsAbstract = 0;
+  cls->IsFinal = 0;
   cls->HasDelete = 0;
 }
 
@@ -546,6 +548,7 @@ void vtkParse_CopyClass(ClassInfo *cls, const ClassInfo *orig)
     }
 
   cls->IsAbstract = orig->IsAbstract;
+  cls->IsFinal = orig->IsFinal;
   cls->HasDelete = orig->HasDelete;
 }
 
diff --git a/Wrapping/Tools/vtkParseData.h b/Wrapping/Tools/vtkParseData.h
index cd03e67..7c81c45 100644
--- a/Wrapping/Tools/vtkParseData.h
+++ b/Wrapping/Tools/vtkParseData.h
@@ -112,6 +112,7 @@ struct _ValueInfo
   TemplateInfo  *Template;   /* template parameters, or NULL */
   int            IsStatic;   /* for class variables only */
   int            IsEnum;     /* for constants only */
+  int            IsPack;     /* for pack expansions */
 };
 
 /**
@@ -138,6 +139,8 @@ struct _FunctionInfo
   int            IsVirtual;   /* methods only */
   int            IsPureVirtual; /* methods only */
   int            IsConst;     /* methods only */
+  int            IsDeleted;   /* methods only */
+  int            IsFinal;     /* methods only */
   int            IsExplicit;  /* constructors only */
 #ifndef VTK_PARSE_LEGACY_REMOVE
   int            NumberOfArguments;   /* legacy */
@@ -155,18 +158,6 @@ struct _FunctionInfo
 };
 
 /**
- * EnumInfo is for enums
- * Constants are at the same level as the Enum, not inside it.
- */
-typedef struct _EnumInfo
-{
-  parse_item_t   ItemType;
-  parse_access_t Access;
-  const char    *Name;
-  const char    *Comment;
-} EnumInfo;
-
-/**
  * UsingInfo is for using directives
  */
 typedef struct _UsingInfo
@@ -201,7 +192,7 @@ typedef struct _ClassInfo
   int            NumberOfVariables;
   ValueInfo    **Variables;
   int            NumberOfEnums;
-  EnumInfo     **Enums;
+  struct _ClassInfo **Enums;
   int            NumberOfTypedefs;
   ValueInfo    **Typedefs;
   int            NumberOfUsings;
@@ -209,10 +200,18 @@ typedef struct _ClassInfo
   int            NumberOfNamespaces;
   struct _ClassInfo **Namespaces;
   int            IsAbstract;
+  int            IsFinal;
   int            HasDelete;
 } ClassInfo;
 
 /**
+ * EnumInfo is for enums
+ * For scoped enums, the constants are in the enum itself, but for
+ * standard enums, the constants are at the same level as the enum.
+ */
+typedef struct _ClassInfo EnumInfo;
+
+/**
  * Namespace is for namespaces
  */
 typedef struct _ClassInfo NamespaceInfo;
diff --git a/Wrapping/Tools/vtkParseExtras.c b/Wrapping/Tools/vtkParseExtras.c
index f44763b..fd04c01 100644
--- a/Wrapping/Tools/vtkParseExtras.c
+++ b/Wrapping/Tools/vtkParseExtras.c
@@ -28,88 +28,6 @@
 #include <ctype.h>
 #include <assert.h>
 
-/* skip over an identifier */
-static size_t vtkparse_id_len(const char *text)
-{
-  size_t i = 0;
-  char c = text[0];
-
-  if ((c >= 'a' && c <= 'z') ||
-      (c >= 'A' && c <= 'Z') ||
-       c == '_')
-    {
-    do
-      {
-      c = text[++i];
-      }
-    while ((c >= 'a' && c <= 'z') ||
-           (c >= 'A' && c <= 'Z') ||
-           (c >= '0' && c <= '9') ||
-           c == '_');
-    }
-
-  return i;
-}
-
-/* skip over numbers, int or float, including suffixes */
-static size_t vtkparse_number_len(const char *text)
-{
-  size_t i = 0;
-  char c = text[0];
-
-  if (c == '.')
-    {
-    c = text[1];
-    }
-
-  if (c >= '0' && c <= '9')
-    {
-    do
-      {
-      do
-        {
-        c = text[++i];
-        }
-      while ((c >= '0' && c <= '9') ||
-             (c >= 'a' && c <= 'z') ||
-             (c >= 'A' && c <= 'Z') ||
-             c == '_' || c == '.');
-      }
-    while ((c == '-' || c == '+') &&
-           (text[i-1] == 'e' || text[i-1] == 'E'));
-    }
-
-  return i;
-}
-
-/* skip over string and char literals. */
-static size_t vtkparse_quote_len(const char *text)
-{
-  size_t i = 0;
-  const char qc = text[0];
-  char c = text[0];
-
-  if (c == '\'' || c == '\"')
-    {
-    do
-      {
-      do
-        {
-        c = text[++i];
-        }
-      while (c != qc && c != '\n' && c != '\0');
-      }
-    while (c == qc && text[i-1] == '\\');
-
-    if (c == qc)
-      {
-      ++i;
-      }
-    }
-
-  return i;
-}
-
 /* skip over an expression in brackets */
 static size_t vtkparse_bracket_len(const char *text)
 {
@@ -131,9 +49,9 @@ static size_t vtkparse_bracket_len(const char *text)
     i += j;
     j = 1;
     c = text[i];
-    if (c == '\'' || c == '\"')
+    if (vtkParse_CharType(c, CPRE_QUOTE))
       {
-      j = vtkparse_quote_len(&text[i]);
+      j = vtkParse_SkipQuotes(&text[i]);
       }
     else if (c == bc || c == '(' || c == '[' || c == '{')
       {
@@ -155,7 +73,7 @@ static size_t vtkparse_bracket_len(const char *text)
  * total number of characters in the name */
 size_t vtkParse_IdentifierLength(const char *text)
 {
-  return vtkparse_id_len(text);
+  return vtkParse_SkipId(text);
 }
 
 /* skip over a name that might be templated, return the
@@ -164,7 +82,7 @@ size_t vtkParse_UnscopedNameLength(const char *text)
 {
   size_t i = 0;
 
-  i += vtkparse_id_len(text);
+  i += vtkParse_SkipId(text);
   if (text[i] == '<')
     {
     i += vtkparse_bracket_len(&text[i]);
@@ -225,17 +143,15 @@ static const char *vtkparse_string_replace(
     lastPos = i;
 
     /* skip all chars that aren't part of a name */
-    while ((cp[i] < 'a' || cp[i] > 'z') &&
-           (cp[i] < 'A' || cp[i] > 'Z') &&
-           cp[i] != '_' && cp[i] != '\0')
+    while (!vtkParse_CharType(cp[i], CPRE_ID) && cp[i] != '\0')
       {
-      if (cp[i] == '\'' || cp[i] == '\"')
+      if (vtkParse_CharType(cp[i], CPRE_QUOTE))
         {
-        i += vtkparse_quote_len(&cp[i]);
+        i += vtkParse_SkipQuotes(&cp[i]);
         }
-      else if (cp[i] >= '0' && cp[i] <= '9')
+      else if (vtkParse_CharType(cp[i], CPRE_QUOTE))
         {
-        i += vtkparse_number_len(&cp[i]);
+        i += vtkParse_SkipNumber(&cp[i]);
         }
       else
         {
@@ -245,7 +161,7 @@ static const char *vtkparse_string_replace(
     nameBegin = i;
 
     /* skip all chars that are part of a name */
-    i += vtkparse_id_len(&cp[i]);
+    i += vtkParse_SkipId(&cp[i]);
     nameEnd = i;
 
     /* search through the list of names to replace */
@@ -589,11 +505,10 @@ size_t vtkParse_BasicTypeFromString(
   const char *classname = NULL;
   size_t len = 0;
 
-  while (*cp == ' ' || *cp == '\t') { cp++; }
+  while (vtkParse_CharType(*cp, CPRE_HSPACE)) { cp++; }
 
-  while ((*cp >= 'a' && *cp <= 'z') ||
-         (*cp >= 'A' && *cp <= 'Z') ||
-         (*cp == '_') || (cp[0] == ':' && cp[1] == ':'))
+  while (vtkParse_CharType(*cp, CPRE_ID) ||
+         (cp[0] == ':' && cp[1] == ':'))
     {
     /* skip all chars that are part of a name */
     n = vtkParse_NameLength(cp);
@@ -792,7 +707,7 @@ size_t vtkParse_BasicTypeFromString(
       }
 
     cp += n;
-    while (*cp == ' ' || *cp == '\t') { cp++; }
+    while (vtkParse_CharType(*cp, CPRE_HSPACE)) { cp++; }
     }
 
   if ((unsigned_bits & VTK_PARSE_UNSIGNED) != 0)
@@ -862,15 +777,12 @@ size_t vtkParse_ValueInfoFromString(
     {
     cp++;
     pointer_bits = (pointer_bits << 2);
-    while (*cp == ' ' || *cp == '\t') { cp++; }
+    while (vtkParse_CharType(*cp, CPRE_HSPACE)) { cp++; }
     if (strncmp(cp, "const", 5) == 0 &&
-        (cp[5] < 'a' || cp[5] > 'z') &&
-        (cp[5] < 'A' || cp[5] > 'Z') &&
-        (cp[5] < '0' || cp[5] > '9') &&
-        cp[5] != '_')
+        !vtkParse_CharType(cp[5], CPRE_IDGIT))
       {
       cp += 5;
-      while (*cp == ' ' || *cp == '\t') { cp++; }
+      while (vtkParse_CharType(*cp, CPRE_HSPACE)) { cp++; }
       pointer_bits = (pointer_bits | VTK_PARSE_CONST_POINTER);
       }
     else
@@ -884,20 +796,18 @@ size_t vtkParse_ValueInfoFromString(
   if (*cp == '&')
     {
     cp++;
-    while (*cp == ' ' || *cp == '\t') { cp++; }
+    while (vtkParse_CharType(*cp, CPRE_HSPACE)) { cp++; }
     ref_bits = VTK_PARSE_REF;
     }
 
   /* look for the variable name */
-  if ((*cp >= 'a' && *cp <= 'z') ||
-      (*cp >= 'A' && *cp <= 'Z') ||
-      (*cp == '_'))
+  if (vtkParse_CharType(*cp, CPRE_ID))
     {
     /* skip all chars that are part of a name */
-    n = vtkparse_id_len(cp);
+    n = vtkParse_SkipId(cp);
     data->Name = vtkParse_CacheString(cache, cp, n);
     cp += n;
-    while (*cp == ' ' || *cp == '\t') { cp++; }
+    while (vtkParse_CharType(*cp, CPRE_HSPACE)) { cp++; }
     }
 
   /* look for array brackets */
@@ -913,23 +823,24 @@ size_t vtkParse_ValueInfoFromString(
         cp++;
         n -= 2;
         }
-      while (*cp == ' ' || *cp == '\t') { cp++; n--; }
-      while (n > 0 && (cp[n-1] == ' ' || cp[n-1] == '\t')) { n--; }
+      while (vtkParse_CharType(*cp, CPRE_HSPACE)) { cp++; n--; }
+      while (n > 0 && vtkParse_CharType(cp[n-1], CPRE_HSPACE)) { n--; }
       vtkParse_AddStringToArray(
         &data->Dimensions,
         &data->NumberOfDimensions,
         vtkParse_CacheString(cache, cp, n));
       m = 0;
-      if (*cp >= '0' && *cp <= '9' && vtkparse_number_len(cp) == n)
+      if (vtkParse_CharType(*cp, CPRE_DIGIT) &&
+          vtkParse_SkipNumber(cp) == n)
         {
         m = (int)strtol(cp, NULL, 0);
         }
       count *= m;
 
       cp += n;
-      while (*cp == ' ' || *cp == '\t') { cp++; }
+      while (vtkParse_CharType(*cp, CPRE_HSPACE)) { cp++; }
       if (*cp == ']') { cp++; }
-      while (*cp == ' ' || *cp == '\t') { cp++; }
+      while (vtkParse_CharType(*cp, CPRE_HSPACE)) { cp++; }
       }
     }
 
@@ -1237,7 +1148,7 @@ size_t vtkParse_DecomposeTemplatedType(
     /* extract the template arguments */
     for (;;)
       {
-      while (text[i] == ' ' || text[i] == '\t') { i++; }
+      while (vtkParse_CharType(text[i], CPRE_HSPACE)) { i++; }
       j = i;
       while (text[j] != ',' && text[j] != '>' &&
              text[j] != '\n' && text[j] != '\0')
@@ -1247,9 +1158,9 @@ size_t vtkParse_DecomposeTemplatedType(
           {
           j += vtkparse_bracket_len(&text[j]);
           }
-        else if (text[j] == '\'' || text[j] == '\"')
+        else if (vtkParse_CharType(text[j], CPRE_QUOTE))
           {
-          j += vtkparse_quote_len(&text[j]);
+          j += vtkParse_SkipQuotes(&text[j]);
           }
         else
           {
@@ -1258,7 +1169,7 @@ size_t vtkParse_DecomposeTemplatedType(
         }
 
       k = j;
-      while (text[k-1] == ' ' || text[k-1] == '\t') { --k; }
+      while (vtkParse_CharType(text[k-1], CPRE_HSPACE)) { --k; }
 
       new_text = (char *)malloc(k-i + 1);
       strncpy(new_text, &text[i], k-i);
diff --git a/Wrapping/Tools/vtkParseJava.c b/Wrapping/Tools/vtkParseJava.c
index b38bcee..d99bdb3 100644
--- a/Wrapping/Tools/vtkParseJava.c
+++ b/Wrapping/Tools/vtkParseJava.c
@@ -64,7 +64,7 @@ void output_temp(FILE *fp,int i)
       case VTK_PARSE_BOOL:        fprintf(fp,"boolean "); break;
       case VTK_PARSE_VOID:        fprintf(fp,"void "); break;
       case VTK_PARSE_CHAR:        fprintf(fp,"char "); break;
-      case VTK_PARSE_VTK_OBJECT:  fprintf(fp,"%s ",currentFunction->ArgClasses[i]); break;
+      case VTK_PARSE_OBJECT:  fprintf(fp,"%s ",currentFunction->ArgClasses[i]); break;
       case VTK_PARSE_UNKNOWN: return;
       }
     }
@@ -72,7 +72,7 @@ void output_temp(FILE *fp,int i)
   fprintf(fp,"id%i",i);
   if (((aType & VTK_PARSE_INDIRECT) == VTK_PARSE_POINTER) &&
       (aType != VTK_PARSE_CHAR_PTR) &&
-      (aType != VTK_PARSE_VTK_OBJECT_PTR))
+      (aType != VTK_PARSE_OBJECT_PTR))
     {
     fprintf(fp,"[]");
     }
@@ -121,7 +121,7 @@ void return_result(FILE *fp)
     case VTK_PARSE_STRING_REF:
       fprintf(fp,"String ");
       break;
-    case VTK_PARSE_VTK_OBJECT_PTR:
+    case VTK_PARSE_OBJECT_PTR:
       fprintf(fp,"%s ",currentFunction->ReturnClass);
       break;
 
@@ -199,7 +199,7 @@ void return_result_native(FILE *fp)
     case VTK_PARSE_STRING_REF:
       fprintf(fp,"String ");
       break;
-    case VTK_PARSE_VTK_OBJECT_PTR:
+    case VTK_PARSE_OBJECT_PTR:
       fprintf(fp,"long ");
       break;
 
@@ -262,7 +262,7 @@ static int CheckMatch(
   if ((type1 & VTK_PARSE_UNQUALIFIED_TYPE) ==
       (type2 & VTK_PARSE_UNQUALIFIED_TYPE))
     {
-    if ((type1 & VTK_PARSE_BASE_TYPE) == VTK_PARSE_VTK_OBJECT)
+    if ((type1 & VTK_PARSE_BASE_TYPE) == VTK_PARSE_OBJECT)
       {
       if (strcmp(c1, c2) == 0)
         {
@@ -467,7 +467,7 @@ int checkFunctionSignature(ClassInfo *data)
     VTK_PARSE_ID_TYPE, VTK_PARSE_UNSIGNED_ID_TYPE,
     VTK_PARSE_LONG_LONG, VTK_PARSE_UNSIGNED_LONG_LONG,
     VTK_PARSE___INT64, VTK_PARSE_UNSIGNED___INT64,
-    VTK_PARSE_VTK_OBJECT, VTK_PARSE_STRING,
+    VTK_PARSE_OBJECT, VTK_PARSE_STRING,
     0
   };
 
@@ -545,7 +545,7 @@ int checkFunctionSignature(ClassInfo *data)
         }
       }
 
-    if (aType == VTK_PARSE_VTK_OBJECT) args_ok = 0;
+    if (aType == VTK_PARSE_OBJECT) args_ok = 0;
     if (((aType & VTK_PARSE_INDIRECT) != VTK_PARSE_POINTER) &&
         ((aType & VTK_PARSE_INDIRECT) != 0) &&
         (aType != VTK_PARSE_STRING_REF)) args_ok = 0;
@@ -602,7 +602,7 @@ int checkFunctionSignature(ClassInfo *data)
 
     if (((aType & VTK_PARSE_INDIRECT) == VTK_PARSE_POINTER)&&
         (currentFunction->ArgCounts[i] <= 0)&&
-        (aType != VTK_PARSE_VTK_OBJECT_PTR)&&
+        (aType != VTK_PARSE_OBJECT_PTR)&&
         (aType != VTK_PARSE_CHAR_PTR)) args_ok = 0;
     }
 
@@ -735,7 +735,7 @@ void outputFunction(FILE *fp, ClassInfo *data)
         }
 
       /* if returning object, lookup in global hash */
-      if (rType == VTK_PARSE_VTK_OBJECT_PTR)
+      if (rType == VTK_PARSE_OBJECT_PTR)
         {
         fprintf(fp,") {");
         fprintf(fp,"\n    long temp = %s_%i(",currentFunction->Name, numberOfWrappedFunctions);
diff --git a/Wrapping/Tools/vtkParseJavaBeans.c b/Wrapping/Tools/vtkParseJavaBeans.c
index 868e930..38995a6 100644
--- a/Wrapping/Tools/vtkParseJavaBeans.c
+++ b/Wrapping/Tools/vtkParseJavaBeans.c
@@ -304,7 +304,7 @@ int checkFunctionSignature(ClassInfo *data)
     VTK_PARSE_ID_TYPE, VTK_PARSE_UNSIGNED_ID_TYPE,
     VTK_PARSE_LONG_LONG, VTK_PARSE_UNSIGNED_LONG_LONG,
     VTK_PARSE___INT64, VTK_PARSE_UNSIGNED___INT64,
-    VTK_PARSE_VTK_OBJECT, VTK_PARSE_STRING,
+    VTK_PARSE_OBJECT, VTK_PARSE_STRING,
     0
   };
 
@@ -382,7 +382,7 @@ int checkFunctionSignature(ClassInfo *data)
         }
       }
 
-    if (aType == VTK_PARSE_VTK_OBJECT) args_ok = 0;
+    if (aType == VTK_PARSE_OBJECT) args_ok = 0;
     if (((aType & VTK_PARSE_INDIRECT) != VTK_PARSE_POINTER) &&
         ((aType & VTK_PARSE_INDIRECT) != 0) &&
         (aType != VTK_PARSE_STRING_REF)) args_ok = 0;
@@ -439,7 +439,7 @@ int checkFunctionSignature(ClassInfo *data)
 
     if (((aType & VTK_PARSE_INDIRECT) == VTK_PARSE_POINTER)&&
         (currentFunction->ArgCounts[i] <= 0)&&
-        (aType != VTK_PARSE_VTK_OBJECT_PTR)&&
+        (aType != VTK_PARSE_OBJECT_PTR)&&
         (aType != VTK_PARSE_CHAR_PTR)) args_ok = 0;
     }
 
diff --git a/Wrapping/Tools/vtkParseMain.c b/Wrapping/Tools/vtkParseMain.c
index 75b2528..96fa88c 100644
--- a/Wrapping/Tools/vtkParseMain.c
+++ b/Wrapping/Tools/vtkParseMain.c
@@ -370,6 +370,9 @@ FileInfo *vtkParse_Main(int argc, char *argv[])
   int argn;
   char **args;
 
+  /* set the command name for diagnostics */
+  vtkParse_SetCommandName(parse_exename(argv[0]));
+
   /* expand any "@file" args */
   vtkParse_InitStringCache(&strings);
   parse_expand_args(&strings, argc, argv, &argn, &args);
@@ -485,6 +488,9 @@ void vtkParse_MainMulti(int argc, char *argv[])
   char **args;
   StringCache strings;
 
+  /* set the command name for diagnostics */
+  vtkParse_SetCommandName(parse_exename(argv[0]));
+
   /* expand any "@file" args */
   vtkParse_InitStringCache(&strings);
   parse_expand_args(&strings, argc, argv, &argn, &args);
diff --git a/Wrapping/Tools/vtkParsePreprocess.c b/Wrapping/Tools/vtkParsePreprocess.c
index 9daaab8..deb3efc 100644
--- a/Wrapping/Tools/vtkParsePreprocess.c
+++ b/Wrapping/Tools/vtkParsePreprocess.c
@@ -54,76 +54,6 @@
 #define HASH_LINE       0x7c9a15adu
 #define HASH_PRAGMA     0x1566a9fdu
 
-/** Various possible char types */
-#define CPRE_ID         0x01  /* A-Z a-z and _ */
-#define CPRE_DIGIT      0x02  /* 0-9 */
-#define CPRE_IDGIT      0x03  /* 0-9 A-Z a-z and _ */
-#define CPRE_HEX        0x04  /* 0-9A-Fa-f */
-#define CPRE_EXP        0x08  /* EPep (exponents for floats) */
-#define CPRE_SIGN       0x10  /* +- (sign for floats) */
-#define CPRE_QUOTE      0x20  /* " and ' */
-#define CPRE_HSPACE     0x40  /* space, tab, carriage return */
-#define CPRE_VSPACE     0x80  /* newline, vertical tab, form feed */
-#define CPRE_WHITE      0xC0  /* all whitespace characters */
-
-/** Whitespace types.
- * WS_NO_EOL treats newline as end-of-line, instead of whitespace.
- * WS_ALL treats newlines as regular whitespace.
- * WS_COMMENT does not treat comments as whitespace, allowing
- * comments blocks to be returned as tokens. */
-typedef enum _preproc_space_t
-{
-  WS_NO_EOL = CPRE_HSPACE, /* skip horizontal whitespace only */
-  WS_ALL    = CPRE_WHITE,  /* skip all whitespace */
-  WS_COMMENT = (CPRE_WHITE | 0x100), /* comments as tokens */
-} preproc_space_t;
-
-/** Preprocessor tokens. */
-typedef enum _preproc_token_t
-{
-  TOK_OTHER = 257,
-  TOK_ID,        /* any id */
-  TOK_CHAR,      /* char literal */
-  TOK_STRING,    /* string literal */
-  TOK_NUMBER,    /* any numeric literal */
-  TOK_COMMENT,   /* C or C++ comment */
-  TOK_DBLHASH,   /* ## */
-  TOK_SCOPE,     /* :: */
-  TOK_INCR,      /* ++ */
-  TOK_DECR,      /* -- */
-  TOK_RSHIFT,    /* >> */
-  TOK_LSHIFT,    /* << */
-  TOK_AND,       /* && */
-  TOK_OR,        /* || */
-  TOK_EQ,        /* == */
-  TOK_NE,        /* != */
-  TOK_GE,        /* >= */
-  TOK_LE,        /* <= */
-  TOK_ADD_EQ,    /* += */
-  TOK_SUB_EQ,    /* -= */
-  TOK_MUL_EQ,    /* *= */
-  TOK_DIV_EQ,    /* /= */
-  TOK_MOD_EQ,    /* %= */
-  TOK_AND_EQ,    /* &= */
-  TOK_OR_EQ,     /* |= */
-  TOK_XOR_EQ,    /* ^= */
-  TOK_ARROW,     /* -> */
-  TOK_DOT_STAR,  /* .* */
-  TOK_ARROW_STAR,/* ->* */
-  TOK_RSHIFT_EQ, /* >>= */
-  TOK_LSHIFT_EQ, /* <<= */
-  TOK_ELLIPSIS,  /* ... */
-} preproc_token_t;
-
-/** A struct for going through the input one token at a time. */
-typedef struct _preproc_tokenizer
-{
-  int tok;
-  unsigned int hash;
-  const char *text;
-  size_t len;
-} preproc_tokenizer;
-
 /** Extend dynamic arrays in a progression of powers of two.
  * Whenever "n" reaches a power of two, then the array size is
  * doubled so that "n" can be safely incremented. */
@@ -165,354 +95,6 @@ static preproc_uint_t string_to_preproc_uint(const char *cp, int base)
 #endif
 }
 
-/** Array for quick lookup of char types */
-static unsigned char preproc_charbits[] = {
-  0, 0, 0, 0, 0, 0, 0, 0, 0,
-  CPRE_HSPACE, /* tab */
-  CPRE_VSPACE, CPRE_VSPACE, CPRE_VSPACE, /* newline, vtab, form feed */
-  CPRE_HSPACE, /* carriage return */
-  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-  CPRE_HSPACE, /* ' ' */
-  0, CPRE_QUOTE, 0, 0, 0, 0, CPRE_QUOTE, 0, 0, /* !"#$%&'() */
-  0, CPRE_SIGN, 0, CPRE_SIGN, 0, 0, /* *+,-./ */
-  CPRE_DIGIT|CPRE_HEX, /* 0 */
-  CPRE_DIGIT|CPRE_HEX, CPRE_DIGIT|CPRE_HEX,
-  CPRE_DIGIT|CPRE_HEX, CPRE_DIGIT|CPRE_HEX,
-  CPRE_DIGIT|CPRE_HEX, CPRE_DIGIT|CPRE_HEX,
-  CPRE_DIGIT|CPRE_HEX, CPRE_DIGIT|CPRE_HEX,
-  CPRE_DIGIT|CPRE_HEX, /* 9 */
-  0, 0, 0, 0, 0, 0, 0, /* :;<=>?@ */
-  CPRE_ID|CPRE_HEX, /* A */
-  CPRE_ID|CPRE_HEX, CPRE_ID|CPRE_HEX, CPRE_ID|CPRE_HEX, /* BCD */
-  CPRE_ID|CPRE_HEX|CPRE_EXP, /* E */
-  CPRE_ID|CPRE_HEX, CPRE_ID, CPRE_ID, CPRE_ID, /* FGHI */
-  CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, /* JKLM */
-  CPRE_ID, CPRE_ID, CPRE_ID|CPRE_EXP, CPRE_ID, /* NOPQ */
-  CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, /* RSTU */
-  CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, /* VWXY */
-  CPRE_ID, /* Z */
-  0, 0, 0, 0, /* [\\]^ */
-  CPRE_ID, /* _ */
-  0, /* ` */
-  CPRE_ID|CPRE_HEX, /* a */
-  CPRE_ID|CPRE_HEX, CPRE_ID|CPRE_HEX, CPRE_ID|CPRE_HEX, /* bcd */
-  CPRE_ID|CPRE_HEX|CPRE_EXP, /* e */
-  CPRE_ID|CPRE_HEX, CPRE_ID, CPRE_ID, CPRE_ID, /* fghi */
-  CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, /* jklm */
-  CPRE_ID, CPRE_ID, CPRE_ID|CPRE_EXP, CPRE_ID, /* nopq */
-  CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, /* rstu */
-  CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, /* vwxy */
-  CPRE_ID, /* z */
-  0, 0, 0, 0, /* {|}~ */
-  0, /* '\x7f' */
-  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-};
-
-/** Macro to get char type */
-#define preproc_chartype(c, bits) \
-  ((preproc_charbits[(unsigned char)(c)] & bits) != 0)
-
-/** Skip over a comment. */
-static void preproc_skip_comment(const char **cpp)
-{
-  const char *cp = *cpp;
-
-  if (cp[0] == '/')
-    {
-    if (cp[1] == '/')
-      {
-      cp += 2;
-      while (*cp != '\n' && *cp != '\0')
-        {
-        if (cp[0] == '\\')
-          {
-          if (cp[1] == '\n') { cp++; }
-          else if (cp[1] == '\r' && cp[2] == '\n') { cp += 2; }
-          }
-        cp++;
-        }
-      }
-    else if (cp[1] == '*')
-      {
-      cp += 2;
-      while (*cp != '\0')
-        {
-        if (cp[0] == '*' && cp[1] == '/') { cp += 2; break; }
-        cp++;
-        }
-      }
-    }
-
-  *cpp = cp;
-}
-
-/** Skip over whitespace, but not newlines unless preceded by backlash. */
-static void preproc_skip_whitespace(
-  const char **cpp, preproc_space_t spacetype)
-{
-  const char *cp = *cpp;
-
-  for (;;)
-    {
-    if (preproc_chartype(*cp, spacetype))
-      {
-      do
-        {
-        cp++;
-        }
-      while (preproc_chartype(*cp, spacetype));
-      }
-    if (cp[0] == '\\')
-      {
-      if (cp[1] == '\n')
-        {
-        cp += 2;
-        }
-      else if (cp[1] == '\r' && cp[2] == '\n')
-        {
-        cp += 3;
-        }
-      else
-        {
-        break;
-        }
-      }
-    else if (cp[0] == '/' && (spacetype & WS_COMMENT) != WS_COMMENT)
-      {
-      if (cp[1] == '/' || cp[1] == '*')
-        {
-        preproc_skip_comment(&cp);
-        }
-      else
-        {
-        break;
-        }
-      }
-    else
-      {
-      break;
-      }
-    }
-
-  *cpp = cp;
-}
-
-/** Skip over string and char literals. */
-static void preproc_skip_quotes(const char **cpp)
-{
-  const char *cp = *cpp;
-  const char qc = *cp;
-
-  if (preproc_chartype(*cp, CPRE_QUOTE))
-    {
-    cp++;
-    while (*cp != qc && *cp != '\n' && *cp != '\0')
-      {
-      if (*cp++ == '\\')
-        {
-        if (cp[0] == '\r' && cp[1] == '\n') { cp += 2; }
-        else if (*cp != '\0') { cp++; }
-        }
-      }
-    }
-  if (*cp == qc)
-    {
-    cp++;
-    }
-
-  *cpp = cp;
-}
-
-/** Skip over a name. */
-static void preproc_skip_name(const char **cpp)
-{
-  const char *cp = *cpp;
-
-  if (preproc_chartype(*cp, CPRE_ID))
-    {
-    do
-      {
-      cp++;
-      }
-    while (preproc_chartype(*cp, CPRE_IDGIT));
-    }
-
-  *cpp = cp;
-}
-
-/** A simple 32-bit hash function based on "djb2". */
-static unsigned int preproc_hash_name(const char **cpp)
-{
-  const char *cp = (*cpp);
-  unsigned int h = 5381;
-
-  if (preproc_chartype(*cp, CPRE_ID))
-    {
-    do { h = (h << 5) + h + (unsigned char)*cp++; }
-    while (preproc_chartype(*cp, CPRE_IDGIT));
-    }
-
-  *cpp = cp;
-  return h;
-}
-
-/** Skip over a number. */
-static void preproc_skip_number(const char **cpp)
-{
-  const char *cp = *cpp;
-
-  if (preproc_chartype(cp[0], CPRE_DIGIT) ||
-      (cp[0] == '.' && preproc_chartype(cp[1], CPRE_DIGIT)))
-    {
-    do
-      {
-      char c = *cp++;
-      if (preproc_chartype(c, CPRE_EXP) &&
-          preproc_chartype(*cp, CPRE_SIGN))
-        {
-        cp++;
-        }
-      }
-    while (preproc_chartype(*cp, CPRE_IDGIT) || *cp == '.');
-    }
-
-  *cpp = cp;
-}
-
-/** Return the next preprocessor token, or '0' if none left. */
-static int preproc_next(preproc_tokenizer *tokens)
-{
-  const char *cp = tokens->text + tokens->len;
-  preproc_skip_whitespace(&cp, WS_NO_EOL);
-
-  if (preproc_chartype(*cp, CPRE_ID))
-    {
-    const char *ep = cp;
-    unsigned int h = preproc_hash_name(&ep);
-    tokens->tok = TOK_ID;
-    tokens->hash = h;
-    tokens->text = cp;
-    tokens->len = ep - cp;
-    }
-  else if (preproc_chartype(*cp, CPRE_QUOTE))
-    {
-    const char *ep = cp;
-    preproc_skip_quotes(&ep);
-    tokens->tok = (*cp == '\"' ? TOK_STRING : TOK_CHAR);
-    tokens->hash = 0;
-    tokens->text = cp;
-    tokens->len = ep - cp;
-    }
-  else if (preproc_chartype(*cp, CPRE_DIGIT) ||
-           (cp[0] == '.' && preproc_chartype(cp[1], CPRE_DIGIT)))
-    {
-    const char *ep = cp;
-    preproc_skip_number(&ep);
-    tokens->tok = TOK_NUMBER;
-    tokens->hash = 0;
-    tokens->text = cp;
-    tokens->len = ep - cp;
-    }
-  else if (cp[0] == '/' && (cp[1] == '/' || cp[1] == '*'))
-    {
-    const char *ep = cp;
-    preproc_skip_comment(&ep);
-    tokens->tok = TOK_COMMENT;
-    tokens->hash = 0;
-    tokens->text = cp;
-    tokens->len = ep - cp;
-    }
-  else
-    {
-    int t = cp[0];
-    size_t l = 1;
-
-    switch (cp[0])
-      {
-      case ':':
-        if (cp[1] == ':') { l = 2; t = TOK_SCOPE; }
-        break;
-      case '.':
-        if (cp[1] == '.' && cp[2] == '.') { l = 3; t = TOK_ELLIPSIS; }
-        else if (cp[1] == '*') { l = 2; t = TOK_DOT_STAR; }
-        break;
-      case '=':
-        if (cp[1] == '=') { l = 2; t = TOK_EQ; }
-        break;
-      case '!':
-        if (cp[1] == '=') { l = 2; t = TOK_NE; }
-        break;
-      case '<':
-        if (cp[1] == '<' && cp[2] == '=') { l = 3; t = TOK_LSHIFT_EQ; }
-        else if (cp[1] == '<') { l = 2; t = TOK_LSHIFT; }
-        else if (cp[1] == '=') { l = 2; t = TOK_LE; }
-        break;
-      case '>':
-        if (cp[1] == '>' && cp[2] == '=') { l = 3; t = TOK_RSHIFT_EQ; }
-        else if (cp[1] == '>') { l = 2; t = TOK_RSHIFT; }
-        else if (cp[1] == '=') { l = 2; t = TOK_GE; }
-        break;
-      case '&':
-        if (cp[1] == '=') { l = 2; t = TOK_AND_EQ; }
-        else if (cp[1] == '&') { l = 2; t = TOK_AND; }
-        break;
-      case '|':
-        if (cp[1] == '=') { l = 2; t = TOK_OR_EQ; }
-        else if (cp[1] == '|') { l = 2; t = TOK_OR; }
-        break;
-      case '^':
-        if (cp[1] == '=') { l = 2; t = TOK_XOR_EQ; }
-        break;
-      case '*':
-        if (cp[1] == '=') { l = 2; t = TOK_MUL_EQ; }
-        break;
-      case '/':
-        if (cp[1] == '=') { l = 2; t = TOK_DIV_EQ; }
-        break;
-      case '%':
-        if (cp[1] == '=') { l = 2; t = TOK_MOD_EQ; }
-        break;
-      case '+':
-        if (cp[1] == '+') { l = 2; t = TOK_INCR; }
-        else if (cp[1] == '=') { l = 2; t = TOK_ADD_EQ; }
-        break;
-      case '-':
-        if (cp[1] == '>' && cp[2] == '*') { l = 3; t = TOK_ARROW_STAR; }
-        else if (cp[1] == '>') { l = 2; t = TOK_ARROW; }
-        else if (cp[1] == '-') { l = 2; t = TOK_DECR; }
-        else if (cp[1] == '=') { l = 2; t = TOK_SUB_EQ; }
-        break;
-      case '#':
-        if (cp[1] == '#') { l = 2; t = TOK_DBLHASH; }
-        break;
-      case '\n':
-      case '\0':
-        { l = 0; t = 0; }
-        break;
-      }
-
-    tokens->tok = t;
-    tokens->hash = 0;
-    tokens->text = cp;
-    tokens->len = l;
-    }
-
-  return tokens->tok;
-}
-
-/** Initialize the tokenizer. */
-static void preproc_init(preproc_tokenizer *tokens, const char *text)
-{
-  tokens->tok = 0;
-  tokens->hash = 0;
-  tokens->text = text;
-  tokens->len = 0;
-  preproc_next(tokens);
-}
-
 /** Tokenize and compare two strings */
 static int preproc_identical(const char *text1, const char *text2)
 {
@@ -524,11 +106,11 @@ static int preproc_identical(const char *text1, const char *text2)
 
     if (text1 && text2)
       {
-      preproc_tokenizer t1;
-      preproc_tokenizer t2;
+      StringTokenizer t1;
+      StringTokenizer t2;
 
-      preproc_init(&t1, text1);
-      preproc_init(&t2, text2);
+      vtkParse_InitTokenizer(&t1, text1, WS_PREPROC);
+      vtkParse_InitTokenizer(&t2, text2, WS_PREPROC);
 
       do
         {
@@ -539,8 +121,8 @@ static int preproc_identical(const char *text1, const char *text2)
           {
           break;
           }
-        preproc_next(&t1);
-        preproc_next(&t2);
+        vtkParse_NextToken(&t1);
+        vtkParse_NextToken(&t2);
         }
       while (t1.tok && t2.tok);
 
@@ -551,18 +133,6 @@ static int preproc_identical(const char *text1, const char *text2)
   return result;
 }
 
-/** Duplicate the first n bytes of a string. */
-static const char *preproc_strndup(const char *in, size_t n)
-{
-  char *res = NULL;
-
-  res = (char *)malloc(n+1);
-  strncpy(res, in, n);
-  res[n] = '\0';
-
-  return res;
-}
-
 /** Create a new preprocessor macro. */
 static MacroInfo *preproc_new_macro(
   PreprocessInfo *info, const char *name, const char *definition)
@@ -572,28 +142,25 @@ static MacroInfo *preproc_new_macro(
 
   if (name)
     {
-    size_t n;
-    const char *cp = name;
-    preproc_skip_name(&cp);
-    n = cp - name;
-    macro->Name = preproc_strndup(name, n);
+    size_t n = vtkParse_SkipId(name);
+    macro->Name = vtkParse_CacheString(info->Strings, name, n);
     }
 
   if (definition)
     {
     size_t n;
     const char *cp = definition;
-    preproc_tokenizer tokens;
-    preproc_init(&tokens, cp);
+    StringTokenizer tokens;
+    vtkParse_InitTokenizer(&tokens, cp, WS_PREPROC);
 
     do
       {
       cp = tokens.text + tokens.len;
       }
-    while (preproc_next(&tokens));
+    while (vtkParse_NextToken(&tokens));
 
     n = cp - definition;
-    macro->Definition = preproc_strndup(definition, n);
+    macro->Definition = vtkParse_CacheString(info->Strings, definition, n);
     }
 
   macro->IsExternal = info->IsExternal;
@@ -609,7 +176,7 @@ static void preproc_free_macro(MacroInfo *info)
 
 /** Find a preprocessor macro, return 0 if not found. */
 static MacroInfo *preproc_find_macro(
-  PreprocessInfo *info, preproc_tokenizer *token)
+  PreprocessInfo *info, StringTokenizer *token)
 {
   unsigned int m = PREPROC_HASH_TABLE_SIZE - 1;
   unsigned int i = (token->hash & m);
@@ -641,7 +208,7 @@ static MacroInfo *preproc_find_macro(
 /** Return the address of the macro within the hash table.
   * If "insert" is nonzero, add a new location if macro not found. */
 static MacroInfo **preproc_macro_location(
-  PreprocessInfo *info, preproc_tokenizer *token, int insert)
+  PreprocessInfo *info, StringTokenizer *token, int insert)
 {
   MacroInfo ***htable = info->MacroHashTable;
   unsigned int m = PREPROC_HASH_TABLE_SIZE - 1;
@@ -724,7 +291,7 @@ static MacroInfo **preproc_macro_location(
 
 /** Remove a preprocessor macro.  Returns 0 if macro not found. */
 static int preproc_remove_macro(
-  PreprocessInfo *info, preproc_tokenizer *token)
+  PreprocessInfo *info, StringTokenizer *token)
 {
   MacroInfo **hptr;
 
@@ -751,11 +318,11 @@ static int preproc_remove_macro(
 static MacroInfo *preproc_add_macro_definition(
   PreprocessInfo *info, const char *name, const char *definition)
 {
-  preproc_tokenizer token;
+  StringTokenizer token;
   MacroInfo *macro;
   MacroInfo **macro_p;
 
-  preproc_init(&token, name);
+  vtkParse_InitTokenizer(&token, name, WS_PREPROC);
 
   macro = preproc_new_macro(info, name, definition);
   macro_p = preproc_macro_location(info, &token, 1);
@@ -771,7 +338,7 @@ static MacroInfo *preproc_add_macro_definition(
 }
 
 /** Skip over parentheses, return nonzero if not closed. */
-static int preproc_skip_parentheses(preproc_tokenizer *tokens)
+static int preproc_skip_parentheses(StringTokenizer *tokens)
 {
   int depth = 0;
 
@@ -779,7 +346,7 @@ static int preproc_skip_parentheses(preproc_tokenizer *tokens)
     {
     depth = 1;
 
-    while (depth > 0 && preproc_next(tokens))
+    while (depth > 0 && vtkParse_NextToken(tokens))
       {
       if (tokens->tok == '(')
         {
@@ -794,7 +361,7 @@ static int preproc_skip_parentheses(preproc_tokenizer *tokens)
 
   if (tokens->tok == ')')
     {
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
     return VTK_PARSE_OK;
     }
 
@@ -839,7 +406,7 @@ static int preproc_evaluate_char(
       else if (*cp == 'x')
         {
         *val = string_to_preproc_int(cp+1, 16);
-        do { cp++; } while (preproc_chartype(*cp, CPRE_HEX));
+        do { cp++; } while (vtkParse_CharType(*cp, CPRE_HEX));
         }
       }
     if (*cp != '\'')
@@ -874,12 +441,12 @@ static int preproc_evaluate_integer(
     base = 16;
     *is_unsigned = 1;
     ep = cp;
-    while (preproc_chartype(*ep, CPRE_HEX))
+    while (vtkParse_CharType(*ep, CPRE_HEX))
       {
       ep++;
       }
     }
-  else if (cp[0] == '0' && preproc_chartype(cp[1], CPRE_DIGIT))
+  else if (cp[0] == '0' && vtkParse_CharType(cp[1], CPRE_DIGIT))
     {
     cp += 1;
     base = 8;
@@ -894,7 +461,7 @@ static int preproc_evaluate_integer(
     {
     base = 10;
     *is_unsigned = 0;
-    while (preproc_chartype(*ep, CPRE_DIGIT))
+    while (vtkParse_CharType(*ep, CPRE_DIGIT))
       {
       ep++;
       }
@@ -927,12 +494,12 @@ static int preproc_evaluate_integer(
 
 /* forward declaration */
 static int preproc_evaluate_expression(
-  PreprocessInfo *info, preproc_tokenizer *tokens,
+  PreprocessInfo *info, StringTokenizer *tokens,
   preproc_int_t *val, int *is_unsigned);
 
 /** Evaluate a single item in an expression. */
 static int preproc_evaluate_single(
-  PreprocessInfo *info, preproc_tokenizer *tokens,
+  PreprocessInfo *info, StringTokenizer *tokens,
   preproc_int_t *val, int *is_unsigned)
 {
   int result = VTK_PARSE_OK;
@@ -944,12 +511,12 @@ static int preproc_evaluate_single(
         strncmp("defined", tokens->text, tokens->len) == 0)
       {
       int paren = 0;
-      preproc_next(tokens);
+      vtkParse_NextToken(tokens);
 
       if (tokens->tok == '(')
         {
         paren = 1;
-        preproc_next(tokens);
+        vtkParse_NextToken(tokens);
         }
       if (tokens->tok != TOK_ID)
         {
@@ -965,7 +532,7 @@ static int preproc_evaluate_single(
       *is_unsigned = 0;
       *val = (preproc_find_macro(info, tokens) != 0);
 
-      preproc_next(tokens);
+      vtkParse_NextToken(tokens);
       if (paren)
         {
         if (tokens->tok != ')')
@@ -975,7 +542,7 @@ static int preproc_evaluate_single(
 #endif
           return VTK_PARSE_SYNTAX_ERROR;
           }
-        preproc_next(tokens);
+        vtkParse_NextToken(tokens);
         }
 
       return result;
@@ -987,7 +554,7 @@ static int preproc_evaluate_single(
       const char *args = NULL;
       const char *expansion = NULL;
       const char *cp;
-      preproc_next(tokens);
+      vtkParse_NextToken(tokens);
       *val = 0;
       *is_unsigned = 0;
 
@@ -1018,7 +585,7 @@ static int preproc_evaluate_single(
         return (args ? VTK_PARSE_MACRO_NUMARGS : VTK_PARSE_SYNTAX_ERROR);
         }
       cp = expansion;
-      preproc_skip_whitespace(&cp, WS_NO_EOL);
+      cp += vtkParse_SkipWhitespace(cp, WS_PREPROC);
       if (*cp != '\0')
         {
         macro->IsExcluded = 1;
@@ -1036,13 +603,13 @@ static int preproc_evaluate_single(
 
   if (tokens->tok == '(')
     {
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
     result = preproc_evaluate_expression(info, tokens, val, is_unsigned);
     if ((result & VTK_PARSE_FATAL_ERROR) == 0)
       {
       if (tokens->tok == ')')
         {
-        preproc_next(tokens);
+        vtkParse_NextToken(tokens);
         return result;
         }
 #if PREPROC_DEBUG
@@ -1060,23 +627,23 @@ static int preproc_evaluate_single(
       {
       result = VTK_PARSE_PREPROC_FLOAT;
       }
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
     return result;
     }
   else if (tokens->tok == TOK_CHAR)
     {
     result = preproc_evaluate_char(tokens->text, val, is_unsigned);
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
     return result;
     }
   else if (tokens->tok == TOK_STRING)
     {
     *val = 0;
     *is_unsigned = 0;
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
     while (tokens->tok == TOK_STRING)
       {
-      preproc_next(tokens);
+      vtkParse_NextToken(tokens);
       }
     return VTK_PARSE_PREPROC_STRING;
     }
@@ -1091,7 +658,7 @@ static int preproc_evaluate_single(
 }
 
 static int preproc_evaluate_unary(
-  PreprocessInfo *info, preproc_tokenizer *tokens,
+  PreprocessInfo *info, StringTokenizer *tokens,
   preproc_int_t *val, int *is_unsigned)
 {
   int op = tokens->tok;
@@ -1102,7 +669,7 @@ static int preproc_evaluate_unary(
     return preproc_evaluate_single(info, tokens, val, is_unsigned);
     }
 
-  preproc_next(tokens);
+  vtkParse_NextToken(tokens);
 
   result = preproc_evaluate_unary(info, tokens, val, is_unsigned);
   if ((result & VTK_PARSE_FATAL_ERROR) == 0)
@@ -1117,7 +684,7 @@ static int preproc_evaluate_unary(
 }
 
 static int preproc_evaluate_multiply(
-  PreprocessInfo *info, preproc_tokenizer *tokens,
+  PreprocessInfo *info, StringTokenizer *tokens,
   preproc_int_t *val, int *is_unsigned)
 {
   int op;
@@ -1134,7 +701,7 @@ static int preproc_evaluate_multiply(
       return result;
       }
 
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
 
     result = preproc_evaluate_unary(info, tokens, &rval, &rtype);
 
@@ -1215,7 +782,7 @@ static int preproc_evaluate_multiply(
 }
 
 static int preproc_evaluate_add(
-  PreprocessInfo *info, preproc_tokenizer *tokens,
+  PreprocessInfo *info, StringTokenizer *tokens,
   preproc_int_t *val, int *is_unsigned)
 {
   int op;
@@ -1232,7 +799,7 @@ static int preproc_evaluate_add(
       return result;
       }
 
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
 
     result = preproc_evaluate_multiply(info, tokens, &rval, &rtype);
 
@@ -1252,7 +819,7 @@ static int preproc_evaluate_add(
 }
 
 static int preproc_evaluate_bitshift(
-  PreprocessInfo *info, preproc_tokenizer *tokens,
+  PreprocessInfo *info, StringTokenizer *tokens,
   preproc_int_t *val, int *is_unsigned)
 {
   int op;
@@ -1270,7 +837,7 @@ static int preproc_evaluate_bitshift(
       return result;
       }
 
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
 
     result = preproc_evaluate_add(info, tokens, &rval, &rtype);
 
@@ -1302,7 +869,7 @@ static int preproc_evaluate_bitshift(
 }
 
 static int preproc_evaluate_compare(
-  PreprocessInfo *info, preproc_tokenizer *tokens,
+  PreprocessInfo *info, StringTokenizer *tokens,
   preproc_int_t *val, int *is_unsigned)
 {
   int op;
@@ -1319,7 +886,7 @@ static int preproc_evaluate_compare(
       return result;
       }
 
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
 
     result = preproc_evaluate_bitshift(info, tokens, &rval, &rtype);
 
@@ -1370,7 +937,7 @@ static int preproc_evaluate_compare(
 }
 
 static int preproc_evaluate_equal(
-  PreprocessInfo *info, preproc_tokenizer *tokens,
+  PreprocessInfo *info, StringTokenizer *tokens,
   preproc_int_t *val, int *is_unsigned)
 {
   int op;
@@ -1387,7 +954,7 @@ static int preproc_evaluate_equal(
       return result;
       }
 
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
 
     result = preproc_evaluate_compare(info, tokens, &rval, &rtype);
 
@@ -1406,7 +973,7 @@ static int preproc_evaluate_equal(
 }
 
 static int preproc_evaluate_and(
-  PreprocessInfo *info, preproc_tokenizer *tokens,
+  PreprocessInfo *info, StringTokenizer *tokens,
   preproc_int_t *val, int *is_unsigned)
 {
   preproc_int_t rval;
@@ -1421,7 +988,7 @@ static int preproc_evaluate_and(
       return result;
       }
 
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
 
     result = preproc_evaluate_equal(info, tokens, &rval, &rtype);
 
@@ -1433,7 +1000,7 @@ static int preproc_evaluate_and(
 }
 
 static int preproc_evaluate_xor(
-  PreprocessInfo *info, preproc_tokenizer *tokens,
+  PreprocessInfo *info, StringTokenizer *tokens,
   preproc_int_t *val, int *is_unsigned)
 {
   preproc_int_t rval;
@@ -1448,7 +1015,7 @@ static int preproc_evaluate_xor(
       return result;
       }
 
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
 
     result = preproc_evaluate_and(info, tokens, &rval, &rtype);
 
@@ -1460,7 +1027,7 @@ static int preproc_evaluate_xor(
 }
 
 static int preproc_evaluate_or(
-  PreprocessInfo *info, preproc_tokenizer *tokens,
+  PreprocessInfo *info, StringTokenizer *tokens,
   preproc_int_t *val, int *is_unsigned)
 {
   preproc_int_t rval;
@@ -1475,7 +1042,7 @@ static int preproc_evaluate_or(
       return result;
       }
 
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
 
     result = preproc_evaluate_xor(info, tokens, &rval, &rtype);
 
@@ -1487,7 +1054,7 @@ static int preproc_evaluate_or(
 }
 
 static int preproc_evaluate_logic_and(
-  PreprocessInfo *info, preproc_tokenizer *tokens,
+  PreprocessInfo *info, StringTokenizer *tokens,
   preproc_int_t *val, int *is_unsigned)
 {
   preproc_int_t rval;
@@ -1502,7 +1069,7 @@ static int preproc_evaluate_logic_and(
       return result;
       }
 
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
 
     if (*val == 0)
       {
@@ -1523,7 +1090,7 @@ static int preproc_evaluate_logic_and(
           }
         else
           {
-          preproc_next(tokens);
+          vtkParse_NextToken(tokens);
           }
         }
 
@@ -1542,7 +1109,7 @@ static int preproc_evaluate_logic_and(
 }
 
 static int preproc_evaluate_logic_or(
-  PreprocessInfo *info, preproc_tokenizer *tokens,
+  PreprocessInfo *info, StringTokenizer *tokens,
   preproc_int_t *val, int *is_unsigned)
 {
   preproc_int_t rval;
@@ -1557,7 +1124,7 @@ static int preproc_evaluate_logic_or(
       return result;
       }
 
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
 
     if (*val != 0)
       {
@@ -1578,7 +1145,7 @@ static int preproc_evaluate_logic_or(
           }
         else
           {
-          preproc_next(tokens);
+          vtkParse_NextToken(tokens);
           }
         }
 
@@ -1598,7 +1165,7 @@ static int preproc_evaluate_logic_or(
 
 /** Evaluate an arimetic *expression.  */
 int preproc_evaluate_expression(
-  PreprocessInfo *info, preproc_tokenizer *tokens,
+  PreprocessInfo *info, StringTokenizer *tokens,
   preproc_int_t *val, int *is_unsigned)
 {
   preproc_int_t rval, sval;
@@ -1613,7 +1180,7 @@ int preproc_evaluate_expression(
       return result;
       }
 
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
 
     result = preproc_evaluate_expression(info, tokens, &rval, &rtype);
     if ((result & VTK_PARSE_FATAL_ERROR) != 0)
@@ -1629,7 +1196,7 @@ int preproc_evaluate_expression(
       return VTK_PARSE_SYNTAX_ERROR;
       }
 
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
 
     result = preproc_evaluate_expression(info, tokens, &sval, &stype);
     if ((result & VTK_PARSE_FATAL_ERROR) != 0)
@@ -1656,7 +1223,7 @@ int preproc_evaluate_expression(
  * Returns VTK_PARSE_OK if the expression is true,
  * or VTK_PARSE_SKIP of the expression is false. */
 int preproc_evaluate_conditional(
-  PreprocessInfo *info, preproc_tokenizer *tokens)
+  PreprocessInfo *info, StringTokenizer *tokens)
 {
   preproc_int_t rval;
   int rtype;
@@ -1685,7 +1252,7 @@ int preproc_evaluate_conditional(
  * the following code block should be skipped.
  */
 static int preproc_evaluate_if(
-  PreprocessInfo *info, preproc_tokenizer *tokens)
+  PreprocessInfo *info, StringTokenizer *tokens)
 {
   MacroInfo *macro;
   int v1, v2;
@@ -1699,13 +1266,13 @@ static int preproc_evaluate_if(
       {
       if (tokens->hash == HASH_IF)
         {
-        preproc_next(tokens);
+        vtkParse_NextToken(tokens);
         result = preproc_evaluate_conditional(info, tokens);
         }
       else
         {
         v1 = (tokens->hash != HASH_IFNDEF);
-        preproc_next(tokens);
+        vtkParse_NextToken(tokens);
         if (tokens->tok != TOK_ID)
           {
 #if PREPROC_DEBUG
@@ -1715,7 +1282,7 @@ static int preproc_evaluate_if(
           }
         macro = preproc_find_macro(info, tokens);
         v2 = (macro && !macro->IsExcluded);
-        preproc_next(tokens);
+        vtkParse_NextToken(tokens);
         result = ( (v1 ^ v2) ? VTK_PARSE_SKIP : VTK_PARSE_OK);
         }
 
@@ -1751,12 +1318,12 @@ static int preproc_evaluate_if(
       {
       if (tokens->hash == HASH_ELIF)
         {
-        preproc_next(tokens);
+        vtkParse_NextToken(tokens);
         result = preproc_evaluate_conditional(info, tokens);
         }
       else
         {
-        preproc_next(tokens);
+        vtkParse_NextToken(tokens);
         }
       if (result != VTK_PARSE_SKIP)
         {
@@ -1769,7 +1336,7 @@ static int preproc_evaluate_if(
     }
   else if (tokens->hash == HASH_ENDIF)
     {
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
     if (info->ConditionalDepth > 0)
       {
       /* decrease the skip depth */
@@ -1789,20 +1356,23 @@ static int preproc_evaluate_if(
  * Handle the #define and #undef directives.
  */
 static int preproc_evaluate_define(
-  PreprocessInfo *info, preproc_tokenizer *tokens)
+  PreprocessInfo *info, StringTokenizer *tokens)
 {
   MacroInfo **macro_p;
   MacroInfo *macro;
   int is_function;
+  int is_variadic;
   const char *name;
   size_t namelen;
   const char *definition = 0;
   int n = 0;
   const char **params = NULL;
+  const char *param;
+  size_t l;
 
   if (tokens->hash == HASH_DEFINE)
     {
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
     if (tokens->tok != TOK_ID)
       {
 #if PREPROC_DEBUG
@@ -1814,13 +1384,14 @@ static int preproc_evaluate_define(
     macro_p = preproc_macro_location(info, tokens, 1);
     name = tokens->text;
     namelen = tokens->len;
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
 
     is_function = 0;
+    is_variadic = 0;
     if (name[namelen] == '(')
       {
       is_function = 1;
-      preproc_next(tokens);
+      vtkParse_NextToken(tokens);
       while (tokens->tok != 0 && tokens->tok != ')')
         {
         if (tokens->tok != TOK_ID && tokens->tok != TOK_ELLIPSIS)
@@ -1832,15 +1403,33 @@ static int preproc_evaluate_define(
           return VTK_PARSE_SYNTAX_ERROR;
           }
 
+        param = tokens->text;
+        l = tokens->len;
+
+        if (tokens->tok == TOK_ELLIPSIS)
+          {
+          is_variadic = 1;
+          param = "__VA_ARGS__";
+          l = 11;
+          }
+
         /* add to the arg list */
         params = (const char **)preproc_array_check(
           (char **)params, sizeof(char *), n);
-        params[n++] = preproc_strndup(tokens->text, tokens->len);
+        params[n++] = vtkParse_CacheString(info->Strings, param, l);
+
+        vtkParse_NextToken(tokens);
+
+        /* check for gnu cpp "arg..." parameter */
+        if (tokens->tok == TOK_ELLIPSIS)
+          {
+          is_variadic = 1;
+          vtkParse_NextToken(tokens);
+          }
 
-        preproc_next(tokens);
         if (tokens->tok == ',')
           {
-          preproc_next(tokens);
+          vtkParse_NextToken(tokens);
           }
         else if (tokens->tok != ')')
           {
@@ -1851,7 +1440,7 @@ static int preproc_evaluate_define(
           return VTK_PARSE_SYNTAX_ERROR;
           }
         }
-      preproc_next(tokens);
+      vtkParse_NextToken(tokens);
       }
 
     if (tokens->tok)
@@ -1875,6 +1464,7 @@ static int preproc_evaluate_define(
 
     macro = preproc_new_macro(info, name, definition);
     macro->IsFunction = is_function;
+    macro->IsVariadic = is_variadic;
     macro->NumberOfParameters = n;
     macro->Parameters = params;
     *macro_p = macro;
@@ -1883,7 +1473,7 @@ static int preproc_evaluate_define(
     }
   else if (tokens->hash == HASH_UNDEF)
     {
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
     if (tokens->tok != TOK_ID)
       {
 #if PREPROC_DEBUG
@@ -1904,7 +1494,6 @@ static int preproc_evaluate_define(
 static int preproc_add_include_file(PreprocessInfo *info, const char *name)
 {
   int i, n;
-  char *dp;
 
   n = info->NumberOfIncludeFiles;
   for (i = 0; i < n; i++)
@@ -1915,12 +1504,10 @@ static int preproc_add_include_file(PreprocessInfo *info, const char *name)
       }
     }
 
-  dp = (char *)malloc(strlen(name)+1);
-  strcpy(dp, name);
-
   info->IncludeFiles = (const char **)preproc_array_check(
     (char **)info->IncludeFiles, sizeof(char *), info->NumberOfIncludeFiles);
-  info->IncludeFiles[info->NumberOfIncludeFiles++] = dp;
+  info->IncludeFiles[info->NumberOfIncludeFiles++] =
+    vtkParse_CacheString(info->Strings, name, strlen(name));
 
   return 1;
 }
@@ -1961,7 +1548,7 @@ const char *preproc_find_include_file(
 
   /* check for absolute path of form DRIVE: or /path/to/file */
   j = 0;
-  while (preproc_chartype(filename[j], CPRE_IDGIT)) { j++; }
+  while (vtkParse_CharType(filename[j], CPRE_IDGIT)) { j++; }
 
   if (filename[j] == ':' || filename[0] == '/' || filename[0] == '\\')
     {
@@ -2074,12 +1661,13 @@ const char *preproc_find_include_file(
         }
       else if (stat(output, &fs) == 0)
         {
+        nn = info->NumberOfIncludeFiles;
         info->IncludeFiles = (const char **)preproc_array_check(
-          (char **)info->IncludeFiles, sizeof(char *),
-          info->NumberOfIncludeFiles);
-        info->IncludeFiles[info->NumberOfIncludeFiles++] = output;
-
-        return output;
+          (char **)info->IncludeFiles, sizeof(char *), nn);
+        info->IncludeFiles[info->NumberOfIncludeFiles++] =
+          vtkParse_CacheString(info->Strings, output, strlen(output));
+        free(output);
+        return info->IncludeFiles[nn];
         }
       }
     }
@@ -2089,19 +1677,88 @@ const char *preproc_find_include_file(
 }
 
 /**
+ * Convert a raw string into a normal string.  This is a helper
+ * function for preproc_include_file() to allow raw strings to
+ * be used in preprocessor directives.
+ */
+void preproc_escape_string(
+  char **linep, size_t *linelenp, size_t *jp, size_t d, size_t dl)
+{
+  char *line = *linep;
+  char *r = 0;
+  size_t linelen = *linelenp;
+  size_t l = *jp - d - 2*dl - 2;
+  size_t i;
+  size_t j = d;
+
+  if (l != 0)
+    {
+    r = (char *)malloc(l);
+    memcpy(r, &line[j+dl+1], l);
+    }
+
+  /* remove the "R" prefix */
+  if (j >= 2 && line[j-1] == '\"' && line[j-2] == 'R')
+    {
+    line[j - 2] = '\"';
+    j--;
+    }
+
+  for (i = 0; i < l; i++)
+    {
+    /* expand line buffer as necessary */
+    while (j+4 > linelen)
+      {
+      linelen *= 2;
+      line = (char *)realloc(line, linelen);
+      }
+
+    if ((r[i] >= ' ' && r[i] <= '~') || (r[i] & 0x80) != 0)
+      {
+      line[j++] = r[i];
+      }
+    else switch (r[i])
+      {
+      case '\a': line[j++] = '\\'; line[j++] = 'a'; break;
+      case '\b': line[j++] = '\\'; line[j++] = 'b'; break;
+      case '\f': line[j++] = '\\'; line[j++] = 'f'; break;
+      case '\n': line[j++] = '\\'; line[j++] = 'n'; break;
+      case '\r': line[j++] = '\\'; line[j++] = 'r'; break;
+      case '\t': line[j++] = '\\'; line[j++] = 't'; break;
+      case '\v': line[j++] = '\\'; line[j++] = 'v'; break;
+      case '\\': line[j++] = '\\'; line[j++] = '\\'; break;
+      case '\'': line[j++] = '\\'; line[j++] = '\''; break;
+      case '\"': line[j++] = '\\'; line[j++] = '\"'; break;
+      default:
+        sprintf(&line[j], "\\%3.3o", r[i]);
+        j += 4;
+        break;
+      }
+    }
+
+  free(r);
+  *linep = line;
+  *linelenp = linelen;
+  *jp = j;
+}
+
+/**
  * Include a file.  All macros defined in the included file
  * will have their IsExternal flag set.
  */
 static int preproc_include_file(
   PreprocessInfo *info, const char *filename, int system_first)
 {
+  const char *switchchars = "\n\r\"\'\?\\/*()";
+  char switchchar[256];
   char *tbuf;
   size_t tbuflen = FILE_BUFFER_SIZE;
   char *line;
   size_t linelen = 80;
   size_t i, j, n, r;
-  int in_comment = 0;
-  int in_quote = 0;
+  size_t d = 0;
+  size_t dn = 0;
+  int state = 0;
   int result = VTK_PARSE_OK;
   FILE *fp = NULL;
   const char *path = NULL;
@@ -2158,6 +1815,14 @@ static int preproc_include_file(
   info->IsExternal = 1;
   info->FileName = path;
 
+  /* make a table of interesting characters */
+  memset(switchchar, '\0', 256);
+  n = strlen(switchchars) + 1;
+  for (i = 0; i < n; i++)
+    {
+    switchchar[(unsigned char)(switchchars[i])] = 1;
+    }
+
   tbuf = (char *)malloc(tbuflen+4);
   line = (char *)malloc(linelen);
 
@@ -2241,25 +1906,89 @@ static int preproc_include_file(
         line = (char *)realloc(line, linelen);
         }
 
-      if (in_comment)
+      /* check for uninteresting characters first */
+      if (!switchchar[(unsigned char)(tbuf[i])])
+        {
+        line[j++] = tbuf[i++];
+        }
+      else if (state == '(')
+        {
+        /* look for end of raw string delimiter */
+        if (tbuf[i] == '(')
+          {
+          dn = j - d;
+          state = ')';
+          }
+        line[j++] = tbuf[i++];
+        }
+      else if (state == ')')
+        {
+        /* look for end of raw string */
+        if (tbuf[i] == '\"')
+          {
+          if ((j - d) > 2*dn+1 && line[j-dn-1] == ')' &&
+              strncmp(&line[d], &line[j-dn], dn) == 0)
+            {
+            preproc_escape_string(&line, &linelen, &j, d, dn);
+            state = 0;
+            }
+          }
+        line[j++] = tbuf[i++];
+        }
+#ifdef PREPROC_TRIGRAPHS
+      else if (tbuf[i] == '?' && tbuf[i+1] == '?')
+        {
+        i += 2;
+        switch (tbuf[i])
+          {
+          case '=': tbuf[i] = '#'; break;
+          case '/': tbuf[i] = '\\'; break;
+          case '\'': tbuf[i] = '^'; break;
+          case '(': tbuf[i] = '['; break;
+          case ')': tbuf[i] = ']'; break;
+          case '!': tbuf[i] = '|'; break;
+          case '<': tbuf[i] = '{'; break;
+          case '>': tbuf[i] = '}'; break;
+          case '-': tbuf[i] = '~'; break;
+          default: line[j++] = tbuf[--i];
+          }
+        }
+#endif
+      else if (tbuf[i] == '\\' && tbuf[i+1] == '\n')
+        {
+        i += 2;
+        }
+      else if (tbuf[i] == '\\' && tbuf[i+1] == '\r' && tbuf[i+2] == '\n')
+        {
+        i += 3;
+        }
+      else if (tbuf[i] == '\r' && tbuf[i+1] == '\n')
+        {
+        i++;
+        }
+      else if (state == '*')
         {
         if (tbuf[i] == '*' && tbuf[i+1] == '/')
           {
           line[j++] = tbuf[i++];
           line[j++] = tbuf[i++];
-          in_comment = 0;
+          state = 0;
           }
         else
           {
           line[j++] = tbuf[i++];
           }
         }
-      else if (in_quote)
+      else if (state == '/' && tbuf[i] != '\n')
         {
-        if (tbuf[i] == '\"')
+        line[j++] = tbuf[i++];
+        }
+      else if (state == '\'' || state == '\"')
+        {
+        if (tbuf[i] == state)
           {
           line[j++] = tbuf[i++];
-          in_quote = 0;
+          state = 0;
           }
         else if (tbuf[i] == '\\' && tbuf[i+1] != '\0')
           {
@@ -2271,26 +2000,34 @@ static int preproc_include_file(
           line[j++] = tbuf[i++];
           }
         }
-      else if (tbuf[i] == '/' && tbuf[i+1] == '*')
+      else if (tbuf[i] == '/')
         {
-        line[j++] = tbuf[i++];
-        line[j++] = tbuf[i++];
-        in_comment = 1;
-        }
-      else if (tbuf[i] == '\"')
-        {
-        line[j++] = tbuf[i++];
-        in_quote = 1;
-        }
-      else if (tbuf[i] == '\\' && tbuf[i+1] == '\n')
-        {
-        line[j++] = tbuf[i++];
+        if (tbuf[i+1] == '*' || tbuf[i+1] == '/')
+          {
+          state = tbuf[i+1];
+          line[j++] = tbuf[i++];
+          }
         line[j++] = tbuf[i++];
         }
-      else if (tbuf[i] == '\\' && tbuf[i+1] == '\r' && tbuf[i+2] == '\n')
-        {
-        line[j++] = tbuf[i++];
-        line[j++] = tbuf[i++];
+      else if (tbuf[i] == '\"' || tbuf[i] == '\'')
+        {
+        state = tbuf[i];
+        /* check for raw string prefixes */
+        if (state == '\"' && j > 0 && line[j-1] == 'R' &&
+            ((j > 2 &&
+              (line[j-3] == 'u' || line[j-2] == '8') &&
+              (j == 3 ||
+               !vtkParse_CharType(line[j-4], CPRE_IDGIT|CPRE_QUOTE))) ||
+             (j > 1 &&
+              (line[j-2] == 'u' || line[j-2] == 'U' || line[j-2] == 'L') &&
+              (j == 2 ||
+               !vtkParse_CharType(line[j-3], CPRE_IDGIT|CPRE_QUOTE))) ||
+             (j == 1 ||
+              !vtkParse_CharType(line[j-2], CPRE_IDGIT|CPRE_QUOTE))))
+          {
+          state = '(';
+          d = j + 1;
+          }
         line[j++] = tbuf[i++];
         }
       else if (tbuf[i] != '\n' && tbuf[i] != '\0')
@@ -2309,7 +2046,7 @@ static int preproc_include_file(
       const char *cp = line;
       line[j] = '\0';
       j = 0;
-      preproc_skip_whitespace(&cp, WS_NO_EOL);
+      cp += vtkParse_SkipWhitespace(cp, WS_PREPROC);
       if (*cp == '#')
         {
         vtkParsePreprocess_HandleDirective(info, line);
@@ -2333,14 +2070,14 @@ static int preproc_include_file(
  * only go through the preprocessor.
  */
 static int preproc_evaluate_include(
-  PreprocessInfo *info, preproc_tokenizer *tokens)
+  PreprocessInfo *info, StringTokenizer *tokens)
 {
   const char *cp;
   const char *filename;
 
   if (tokens->hash == HASH_INCLUDE)
     {
-    preproc_next(tokens);
+    vtkParse_NextToken(tokens);
 
     cp = tokens->text;
 
@@ -2364,7 +2101,7 @@ static int preproc_evaluate_include(
     if (*cp == '\"')
       {
       filename = cp + 1;
-      preproc_skip_quotes(&cp);
+      cp += vtkParse_SkipQuotes(cp);
       if (cp <= filename + 1 || *(cp-1) != '\"')
         {
         return VTK_PARSE_SYNTAX_ERROR;
@@ -2397,16 +2134,16 @@ int vtkParsePreprocess_HandleDirective(
   PreprocessInfo *info, const char *directive)
 {
   int result = VTK_PARSE_OK;
-  preproc_tokenizer tokens;
+  StringTokenizer tokens;
 
-  preproc_init(&tokens, directive);
+  vtkParse_InitTokenizer(&tokens, directive, WS_PREPROC);
 
   if (tokens.tok != '#')
     {
     return VTK_PARSE_SYNTAX_ERROR;
     }
 
-  preproc_next(&tokens);
+  vtkParse_NextToken(&tokens);
 
   if (tokens.tok == TOK_ID)
     {
@@ -2424,7 +2161,7 @@ int vtkParsePreprocess_HandleDirective(
          strncmp("endif", tokens.text, tokens.len) == 0))
       {
       result = preproc_evaluate_if(info, &tokens);
-      while (tokens.tok) { preproc_next(&tokens); }
+      while (tokens.tok) { vtkParse_NextToken(&tokens); }
 #if PREPROC_DEBUG
         {
         size_t n = tokens.text - directive;
@@ -2478,8 +2215,8 @@ int vtkParsePreprocess_EvaluateExpression(
   PreprocessInfo *info, const char *text,
   preproc_int_t *val, int *is_unsigned)
 {
-  preproc_tokenizer tokens;
-  preproc_init(&tokens, text);
+  StringTokenizer tokens;
+  vtkParse_InitTokenizer(&tokens, text, WS_PREPROC);
 
   return preproc_evaluate_expression(info, &tokens, val, is_unsigned);
 }
@@ -2582,11 +2319,11 @@ void vtkParsePreprocess_AddStandardMacros(
 int vtkParsePreprocess_AddMacro(
   PreprocessInfo *info, const char *name, const char *definition)
 {
-  preproc_tokenizer token;
+  StringTokenizer token;
   MacroInfo **macro_p;
   MacroInfo *macro;
 
-  preproc_init(&token, name);
+  vtkParse_InitTokenizer(&token, name, WS_PREPROC);
   macro_p = preproc_macro_location(info, &token, 1);
   if (*macro_p)
     {
@@ -2614,10 +2351,10 @@ int vtkParsePreprocess_AddMacro(
 MacroInfo *vtkParsePreprocess_GetMacro(
   PreprocessInfo *info, const char *name)
 {
-  preproc_tokenizer token;
+  StringTokenizer token;
   MacroInfo *macro;
 
-  preproc_init(&token, name);
+  vtkParse_InitTokenizer(&token, name, WS_PREPROC);
   macro = preproc_find_macro(info, &token);
 
   if (macro && !macro->IsExcluded)
@@ -2634,9 +2371,9 @@ MacroInfo *vtkParsePreprocess_GetMacro(
 int vtkParsePreprocess_RemoveMacro(
   PreprocessInfo *info, const char *name)
 {
-  preproc_tokenizer token;
+  StringTokenizer token;
 
-  preproc_init(&token, name);
+  vtkParse_InitTokenizer(&token, name, WS_PREPROC);
 
   if (preproc_remove_macro(info, &token))
     {
@@ -2668,6 +2405,7 @@ const char *vtkParsePreprocess_ExpandMacro(
   size_t k = 0;
   int stringify = 0;
   int noexpand = 0;
+  int empty_variadic = 0;
   int depth = 1;
   int c;
 
@@ -2689,11 +2427,11 @@ const char *vtkParsePreprocess_ExpandMacro(
         {
         if (*cp == '\"' || *cp == '\'')
           {
-          preproc_skip_quotes(&cp);
+          cp += vtkParse_SkipQuotes(cp);
           }
         else if (cp[0] == '/' && (cp[1] == '*' || cp[1] == '/'))
           {
-          preproc_skip_comment(&cp);
+          cp += vtkParse_SkipComment(cp);
           }
         else if (*cp == '(')
           {
@@ -2749,17 +2487,26 @@ const char *vtkParsePreprocess_ExpandMacro(
       }
 #endif
 
+    /* one arg that is only whitespace can also be no args*/
     if (macro->NumberOfParameters == 0 && n == 1)
       {
       const char *tp = values[0];
-      preproc_skip_whitespace(&tp, WS_NO_EOL);
+      tp += vtkParse_SkipWhitespace(tp, WS_PREPROC);
       if (tp + 1 >= values[1])
         {
         n = 0;
         }
       }
 
-    if (n != macro->NumberOfParameters)
+    /* allow the variadic arg to be empty */
+    if (macro->IsVariadic && n == macro->NumberOfParameters-1)
+      {
+      empty_variadic = 1;
+      }
+
+    /* check for correct number of arguments */
+    if (n < (macro->NumberOfParameters - empty_variadic) ||
+        (n > macro->NumberOfParameters && !macro->IsVariadic))
       {
       if (values != stack_values) { free((char **)values); }
 #if PREPROC_DEBUG
@@ -2784,24 +2531,24 @@ const char *vtkParsePreprocess_ExpandMacro(
     stringify = 0;
     noexpand = 0;
     /* skip all chars that aren't part of a name */
-    while (!preproc_chartype(*cp, CPRE_ID) && *cp != '\0')
+    while (!vtkParse_CharType(*cp, CPRE_ID) && *cp != '\0')
       {
       dp = cp;
-      preproc_skip_whitespace(&cp, WS_NO_EOL);
+      cp += vtkParse_SkipWhitespace(cp, WS_PREPROC);
       if (cp > dp)
         {
         dp = cp;
         }
-      else if (preproc_chartype(*cp, CPRE_QUOTE))
+      else if (vtkParse_CharType(*cp, CPRE_QUOTE))
         {
-        preproc_skip_quotes(&cp);
+        cp += vtkParse_SkipQuotes(cp);
         dp = cp;
         wp = cp;
         noexpand = 0;
         }
-      else if (preproc_chartype(*cp, CPRE_DIGIT))
+      else if (vtkParse_CharType(*cp, CPRE_DIGIT))
         {
-        preproc_skip_number(&cp);
+        cp += vtkParse_SkipNumber(cp);
         dp = cp;
         wp = cp;
         noexpand = 0;
@@ -2812,7 +2559,7 @@ const char *vtkParsePreprocess_ExpandMacro(
         dp = wp;
         cp += 2;
         wp = cp;
-        preproc_skip_whitespace(&cp, WS_NO_EOL);
+        cp += vtkParse_SkipWhitespace(cp, WS_PREPROC);
         break;
         }
       else if (*cp == '#')
@@ -2821,7 +2568,7 @@ const char *vtkParsePreprocess_ExpandMacro(
         dp = cp;
         wp = cp;
         cp++;
-        preproc_skip_whitespace(&cp, WS_NO_EOL);
+        cp += vtkParse_SkipWhitespace(cp, WS_PREPROC);
         break;
         }
       else
@@ -2854,22 +2601,31 @@ const char *vtkParsePreprocess_ExpandMacro(
 
     /* get the name */
     pp = cp;
-    preproc_skip_name(&cp);
-    l = cp - pp;
+    l = vtkParse_SkipId(cp);
+    cp += l;
     if (l > 0)
       {
-      for (j = 0; j < n; j++)
+      for (j = 0; j < macro->NumberOfParameters; j++)
         {
         /* check whether the name matches a parameter */
         if (strncmp(pp, macro->Parameters[j], l) == 0 &&
             macro->Parameters[j][l] == '\0')
           {
-          /* substitute the argument value */
-          l = values[j+1] - values[j] - 1;
-          pp = values[j];
+          if (macro->IsVariadic && j == macro->NumberOfParameters-1)
+            {
+            /* if variadic arg, use all remaining args */
+            pp = values[j] - empty_variadic;
+            l = values[n] - pp - 1;
+            }
+          else
+            {
+            /* else just get one arg */
+            pp = values[j];
+            l = values[j+1] - pp - 1;
+            }
           /* remove leading whitespace from argument */
           c = *pp;
-          while (preproc_chartype(c, CPRE_WHITE))
+          while (vtkParse_CharType(c, CPRE_WHITE))
             {
             c = *(++pp);
             l--;
@@ -2878,7 +2634,7 @@ const char *vtkParsePreprocess_ExpandMacro(
           if (l > 0)
             {
             c = pp[l - 1];
-            while (preproc_chartype(c, CPRE_WHITE))
+            while (vtkParse_CharType(c, CPRE_WHITE))
               {
               if (--l == 0)
                 {
@@ -2889,7 +2645,7 @@ const char *vtkParsePreprocess_ExpandMacro(
             }
           /* check if followed by "##" */
           wp = cp;
-          preproc_skip_whitespace(&wp, WS_NO_EOL);
+          wp += vtkParse_SkipWhitespace(wp, WS_PREPROC);
           if (wp[0] == '#' && wp[1] == '#')
             {
             noexpand = 1;
@@ -2925,6 +2681,7 @@ const char *vtkParsePreprocess_ExpandMacro(
         }
       if (stringify)
         {
+        /* convert argument into a string, due to "#" */
         rp[i++] = '\"';
         for (k = 0; k < l; k++)
           {
@@ -2937,8 +2694,26 @@ const char *vtkParsePreprocess_ExpandMacro(
           }
         rp[i++] = '\"';
         }
+      else if (empty_variadic && j == macro->NumberOfParameters-1)
+        {
+        /* remove trailing comma before empty variadic (non-standard) */
+        k = i;
+        if (k > 0)
+          {
+          do
+            {
+            c = rp[--k];
+            }
+          while (k > 0 && vtkParse_CharType(c, CPRE_WHITE));
+          if (rp[k] == ',')
+            {
+            i = k;
+            }
+          }
+        }
       else if (noexpand)
         {
+        /* do not expand args that will be concatenated with "##" */
         strncpy(&rp[i], pp, l);
         i += l;
         }
@@ -3009,8 +2784,8 @@ const char *vtkParsePreprocess_ProcessString(
   size_t i = 0;
   size_t rs = 128;
   int last_tok = 0;
-  preproc_tokenizer tokens;
-  preproc_init(&tokens, text);
+  StringTokenizer tokens;
+  vtkParse_InitTokenizer(&tokens, text, WS_PREPROC);
 
   rp = stack_rp;
   rp[0] = '\0';
@@ -3028,7 +2803,8 @@ const char *vtkParsePreprocess_ProcessString(
         {
         do { --i; } while (i > 0 && rp[i] != '\"');
         }
-      cp++;
+      while (*cp != '\"' && l > 1) { cp++; l--; }
+      if (*cp == '\"' && l > 1) { cp++; l--; }
       }
 
     if (i + l + 2 >= rs)
@@ -3074,12 +2850,12 @@ const char *vtkParsePreprocess_ProcessString(
         if (macro->IsFunction)
           {
           /* expand function macros using the arguments */
-          preproc_next(&tokens);
+          vtkParse_NextToken(&tokens);
           if (tokens.tok == '(')
             {
             int depth = 1;
             args = tokens.text;
-            while (depth > 0 && preproc_next(&tokens))
+            while (depth > 0 && vtkParse_NextToken(&tokens))
               {
               if (tokens.tok == '(')
                 {
@@ -3155,7 +2931,7 @@ const char *vtkParsePreprocess_ProcessString(
     last_tok = tokens.tok;
     l = tokens.len;
     cp = tokens.text;
-    if (preproc_next(&tokens) && tokens.text > cp + l)
+    if (vtkParse_NextToken(&tokens) && tokens.text > cp + l)
       {
       rp[i++] = ' ';
       }
@@ -3233,7 +3009,7 @@ void vtkParsePreprocess_IncludeDirectory(
     (char **)info->IncludeDirectories, sizeof(char *),
     info->NumberOfIncludeDirectories);
   info->IncludeDirectories[info->NumberOfIncludeDirectories++] =
-    preproc_strndup(name, strlen(name));
+    vtkParse_CacheString(info->Strings, name, strlen(name));
 }
 
 /**
@@ -3267,6 +3043,7 @@ void vtkParsePreprocess_InitMacro(MacroInfo *macro)
   macro->NumberOfParameters = 0;
   macro->Parameters = NULL;
   macro->IsFunction = 0;
+  macro->IsVariadic = 0;
   macro->IsExternal = 0;
   macro->IsExcluded = 0;
 }
@@ -3276,17 +3053,6 @@ void vtkParsePreprocess_InitMacro(MacroInfo *macro)
  */
 void vtkParsePreprocess_FreeMacro(MacroInfo *macro)
 {
-  int i, n;
-
-  free((char *)macro->Name);
-  free((char *)macro->Definition);
-  free((char *)macro->Comment);
-
-  n = macro->NumberOfParameters;
-  for (i = 0; i < n; i++)
-    {
-    free((char *)macro->Parameters[i]);
-    }
   free((char **)macro->Parameters);
 
   free(macro);
@@ -3304,13 +3070,16 @@ void vtkParsePreprocess_Init(
   info->IncludeDirectories = NULL;
   info->NumberOfIncludeFiles = 0;
   info->IncludeFiles = NULL;
+  info->Strings = NULL;
   info->IsExternal = 0;
   info->ConditionalDepth = 0;
   info->ConditionalDone = 0;
 
   if (filename)
     {
-    info->FileName = preproc_strndup(filename, strlen(filename));
+    char *cp = (char *)malloc(strlen(filename) + 1);
+    strcpy(cp, filename);
+    info->FileName = cp;
     }
 }
 
@@ -3342,18 +3111,7 @@ void vtkParsePreprocess_Free(PreprocessInfo *info)
     free(info->MacroHashTable);
     }
 
-  n = info->NumberOfIncludeDirectories;
-  for (i = 0; i < n; i++)
-    {
-    free((char *)info->IncludeDirectories[i]);
-    }
   free((char **)info->IncludeDirectories);
-
-  n = info->NumberOfIncludeFiles;
-  for (i = 0; i < n; i++)
-    {
-    free((char *)info->IncludeFiles[i]);
-    }
   free((char **)info->IncludeFiles);
 
   free(info);
diff --git a/Wrapping/Tools/vtkParsePreprocess.h b/Wrapping/Tools/vtkParsePreprocess.h
index 9a4010b..528294e 100644
--- a/Wrapping/Tools/vtkParsePreprocess.h
+++ b/Wrapping/Tools/vtkParsePreprocess.h
@@ -43,6 +43,8 @@
 #ifndef VTK_PARSE_PREPROCESS_H
 #define VTK_PARSE_PREPROCESS_H
 
+#include "vtkParseString.h"
+
 /**
  * The preprocessor int type.  Use the compiler's longest int type.
  */
@@ -65,6 +67,7 @@ typedef struct _MacroInfo
   int            NumberOfParameters; /* only if IsFunction == 1 */
   const char   **Parameters; /* symbols for parameters */
   int            IsFunction; /* this macro requires arguments */
+  int            IsVariadic; /* this macro can take unlimited arguments */
   int            IsExternal; /* this macro is from an included file */
   int            IsExcluded; /* do not expand this macro */
 } MacroInfo;
@@ -81,6 +84,7 @@ typedef struct _PreprocessInfo
   const char   **IncludeDirectories;
   int            NumberOfIncludeFiles; /* all included files */
   const char   **IncludeFiles;
+  StringCache   *Strings;          /* to aid string allocation */
   int            IsExternal;       /* label all macros as "external" */
   int            ConditionalDepth; /* internal state variable */
   int            ConditionalDone;  /* internal state variable */
@@ -90,7 +94,7 @@ typedef struct _PreprocessInfo
  * Platforms.  Always choose native unless crosscompiling.
  */
 enum _preproc_platform_t {
-  VTK_PARSE_NATIVE = 0,
+  VTK_PARSE_NATIVE = 0
 };
 
 /**
diff --git a/Wrapping/Tools/vtkParseString.c b/Wrapping/Tools/vtkParseString.c
index f1863c0..0ebbc0c 100644
--- a/Wrapping/Tools/vtkParseString.c
+++ b/Wrapping/Tools/vtkParseString.c
@@ -23,8 +23,433 @@
 #include <stdlib.h>
 #include <string.h>
 
+
+/*----------------------------------------------------------------
+ * String tokenization methods
+ *
+ * Strings must be broken into C++ tokens.
+ * A hash is computed for ids, but not for other tokens.
+ * Comments are generally considered to be whitespace, but
+ * WS_COMMENT can be used to consider comments as tokens.
+ */
+
+/** Array for quick lookup of char types */
+unsigned char parse_charbits[256] = {
+  0, 0, 0, 0, 0, 0, 0, 0, 0,
+  CPRE_HSPACE, /* tab */
+  CPRE_VSPACE, CPRE_VSPACE, CPRE_VSPACE, /* newline, vtab, form feed */
+  CPRE_HSPACE, /* carriage return */
+  0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+  CPRE_HSPACE, /* ' ' */
+  0, CPRE_QUOTE, 0, 0, 0, 0, CPRE_QUOTE, 0, 0, /* !"#$%&'() */
+  0, CPRE_SIGN, 0, CPRE_SIGN, 0, 0, /* *+,-./ */
+  CPRE_DIGIT|CPRE_HEX, /* 0 */
+  CPRE_DIGIT|CPRE_HEX, CPRE_DIGIT|CPRE_HEX,
+  CPRE_DIGIT|CPRE_HEX, CPRE_DIGIT|CPRE_HEX,
+  CPRE_DIGIT|CPRE_HEX, CPRE_DIGIT|CPRE_HEX,
+  CPRE_DIGIT|CPRE_HEX, CPRE_DIGIT|CPRE_HEX,
+  CPRE_DIGIT|CPRE_HEX, /* 9 */
+  0, 0, 0, 0, 0, 0, 0, /* :;<=>?@ */
+  CPRE_ID|CPRE_HEX, /* A */
+  CPRE_ID|CPRE_HEX, CPRE_ID|CPRE_HEX, CPRE_ID|CPRE_HEX, /* BCD */
+  CPRE_ID|CPRE_HEX|CPRE_EXP, /* E */
+  CPRE_ID|CPRE_HEX, CPRE_ID, CPRE_ID, CPRE_ID, /* FGHI */
+  CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, /* JKLM */
+  CPRE_ID, CPRE_ID, CPRE_ID|CPRE_EXP, CPRE_ID, /* NOPQ */
+  CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, /* RSTU */
+  CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, /* VWXY */
+  CPRE_ID, /* Z */
+  0, 0, 0, 0, /* [\\]^ */
+  CPRE_ID, /* _ */
+  0, /* ` */
+  CPRE_ID|CPRE_HEX, /* a */
+  CPRE_ID|CPRE_HEX, CPRE_ID|CPRE_HEX, CPRE_ID|CPRE_HEX, /* bcd */
+  CPRE_ID|CPRE_HEX|CPRE_EXP, /* e */
+  CPRE_ID|CPRE_HEX, CPRE_ID, CPRE_ID, CPRE_ID, /* fghi */
+  CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, /* jklm */
+  CPRE_ID, CPRE_ID, CPRE_ID|CPRE_EXP, CPRE_ID, /* nopq */
+  CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, /* rstu */
+  CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, /* vwxy */
+  CPRE_ID, /* z */
+  0, 0, 0, 0, /* {|}~ */
+  0, /* '\x7f' */
+  CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID,
+  CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID,
+  CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID,
+  CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID, CPRE_ID,
+};
+
+#define parse_chartype(c, bits) \
+  ((parse_charbits[(unsigned char)(c)] & (bits)) != 0)
+
+/** Skip over a comment. */
+size_t vtkParse_SkipComment(const char *text)
+{
+  const char *cp = text;
+
+  if (cp[0] == '/')
+    {
+    if (cp[1] == '/')
+      {
+      cp += 2;
+      while (*cp != '\n' && *cp != '\0')
+        {
+        if (cp[0] == '\\')
+          {
+          if (cp[1] == '\n') { cp++; }
+          else if (cp[1] == '\r' && cp[2] == '\n') { cp += 2; }
+          }
+        cp++;
+        }
+      }
+    else if (cp[1] == '*')
+      {
+      cp += 2;
+      while (*cp != '\0')
+        {
+        if (cp[0] == '*' && cp[1] == '/') { cp += 2; break; }
+        cp++;
+        }
+      }
+    }
+
+  return cp - text;
+}
+
+/** Skip over whitespace. */
+size_t vtkParse_SkipWhitespace(const char *text, parse_space_t spacetype)
+{
+  const char *cp = text;
+
+  for (;;)
+    {
+    if (parse_chartype(*cp, spacetype))
+      {
+      do
+        {
+        cp++;
+        }
+      while (parse_chartype(*cp, spacetype));
+      }
+    if (cp[0] == '\\')
+      {
+      if (cp[1] == '\n')
+        {
+        cp += 2;
+        }
+      else if (cp[1] == '\r' && cp[2] == '\n')
+        {
+        cp += 3;
+        }
+      else
+        {
+        break;
+        }
+      }
+    else if (cp[0] == '/' && (spacetype & WS_COMMENT) != WS_COMMENT)
+      {
+      if (cp[1] == '/' || cp[1] == '*')
+        {
+        cp += vtkParse_SkipComment(cp);
+        }
+      else
+        {
+        break;
+        }
+      }
+    else
+      {
+      break;
+      }
+    }
+
+  return cp - text;
+}
+
+/** Skip over string and char literals. */
+size_t vtkParse_SkipQuotes(const char *text)
+{
+  const char *cp = text;
+  const char qc = *cp;
+
+  if (parse_chartype(*cp, CPRE_QUOTE))
+    {
+    cp++;
+    while (*cp != qc && *cp != '\n' && *cp != '\0')
+      {
+      if (*cp++ == '\\')
+        {
+        if (cp[0] == '\r' && cp[1] == '\n') { cp += 2; }
+        else if (*cp != '\0') { cp++; }
+        }
+      }
+    }
+  if (*cp == qc)
+    {
+    cp++;
+    }
+
+  return cp - text;
+}
+
+/** Skip over a number. */
+size_t vtkParse_SkipNumber(const char *text)
+{
+  const char *cp = text;
+
+  if (parse_chartype(cp[0], CPRE_DIGIT) ||
+      (cp[0] == '.' && parse_chartype(cp[1], CPRE_DIGIT)))
+    {
+    do
+      {
+      char c = *cp++;
+      if (parse_chartype(c, CPRE_EXP) &&
+          parse_chartype(*cp, CPRE_SIGN))
+        {
+        cp++;
+        }
+      }
+    while (parse_chartype(*cp, CPRE_IDGIT) || *cp == '.');
+    }
+
+  return cp - text;
+}
+
+/** Skip over a name. */
+size_t vtkParse_SkipId(const char *text)
+{
+  const char *cp = text;
+
+  if (parse_chartype(*cp, CPRE_ID))
+    {
+    do
+      {
+      cp++;
+      }
+    while (parse_chartype(*cp, CPRE_IDGIT));
+    }
+
+  return cp - text;
+}
+
+/** A simple 32-bit hash function based on "djb2". */
+#define parse_hash_name(cp, h) \
+  h = 5381; \
+  do { h = (h << 5) + h + (unsigned char)*cp++; } \
+  while (parse_chartype(*cp, CPRE_IDGIT));
+
+unsigned int vtkParse_HashId(const char *cp)
+{
+  unsigned int h = 0;
+
+  if (parse_chartype(*cp, CPRE_ID))
+    {
+    parse_hash_name(cp, h);
+    }
+
+  return h;
+}
+
+/** Skip a string or */
+size_t parse_skip_quotes_with_suffix(const char *cp)
+{
+  size_t l = vtkParse_SkipQuotes(cp);
+  if (l && cp[l] == '_')
+    {
+    l += vtkParse_SkipId(cp + l);
+    }
+  return l;
+}
+
+/** Return the next token, or 0 if none left. */
+int vtkParse_NextToken(StringTokenizer *tokens)
+{
+  const char *cp = tokens->text + tokens->len;
+
+  /* avoid extra function call for simple whitespace */
+  if (parse_chartype(*cp, tokens->ws))
+    {
+    do { cp++; } while (parse_chartype(*cp, tokens->ws));
+    }
+  /* function call is necessary if slash encountered */
+  if (*cp == '/' || *cp == '\\')
+    {
+    cp += vtkParse_SkipWhitespace(cp, tokens->ws);
+    }
+
+  if (parse_chartype(*cp, CPRE_ID))
+    {
+    const char *ep = cp;
+    unsigned int h;
+
+    /* use a macro to compute the hash */
+    parse_hash_name(ep, h);
+
+    tokens->tok = TOK_ID;
+    tokens->hash = h;
+    tokens->text = cp;
+    tokens->len = ep - cp;
+
+    /* check if this is a prefixed string */
+    if (parse_chartype(*ep, CPRE_QUOTE) &&
+        ((*ep == '\'' && tokens->len == 1 &&
+          (*cp == 'u' || *cp == 'U' || *cp == 'L')) ||
+         (*ep == '\"' && tokens->len == 1 &&
+          (*cp == 'U' || *cp == 'u' || *cp == 'L')) ||
+         (*ep == '\"' && tokens->len == 2 && cp[0] == 'u' && cp[1] == '8')))
+      {
+      tokens->tok = (*ep == '\"' ? TOK_STRING : TOK_CHAR);
+      tokens->hash = 0;
+      tokens->len += parse_skip_quotes_with_suffix(ep);
+      }
+    else
+      {
+      /* check if this ID is a named operator */
+      static const char *op_str_array[32] = {
+        "compl", 0, 0, 0, 0, "bitor", "or", 0, 0, 0, 0, "not_eq",
+        0, "and_eq", 0, 0, 0, 0, 0, "xor_eq", 0, 0, "not", "bitand",
+        "and", 0, 0, "or_eq", 0, 0, "xor", 0 };
+      static unsigned char op_len_array[32] = {
+        5, 0, 0, 0, 0, 5, 2, 0, 0, 0, 0, 6,
+        0, 6, 0, 0, 0, 0, 0, 6, 0, 0, 3, 6,
+        3, 0, 0, 5, 0, 0, 3, 0 };
+      static int op_tok_array[32] = {
+        '~', 0, 0, 0, 0, '|', TOK_OR, 0, 0, 0, 0, TOK_NE,
+        0, TOK_AND_EQ, 0, 0, 0, 0, 0, TOK_XOR_EQ, 0, 0, '!', '&',
+        TOK_AND, 0, 0, TOK_OR_EQ, 0, 0, '^', 0 };
+
+      h &= 0x1f;
+      ep = op_str_array[h];
+      if (ep && tokens->len == op_len_array[h] &&
+          strncmp(cp, ep, tokens->len) == 0)
+        {
+        tokens->tok = op_tok_array[h];
+        tokens->hash = 0;
+        }
+      }
+    }
+  else if (parse_chartype(*cp, CPRE_QUOTE))
+    {
+    tokens->tok = (*cp == '\"' ? TOK_STRING : TOK_CHAR);
+    tokens->hash = 0;
+    tokens->text = cp;
+    tokens->len = parse_skip_quotes_with_suffix(cp);
+    }
+  else if (parse_chartype(*cp, CPRE_DIGIT) ||
+           (cp[0] == '.' && parse_chartype(cp[1], CPRE_DIGIT)))
+    {
+    tokens->tok = TOK_NUMBER;
+    tokens->hash = 0;
+    tokens->text = cp;
+    tokens->len = vtkParse_SkipNumber(cp);
+    }
+  else if (cp[0] == '/' && (cp[1] == '/' || cp[1] == '*'))
+    {
+    tokens->tok = TOK_COMMENT;
+    tokens->hash = 0;
+    tokens->text = cp;
+    tokens->len = vtkParse_SkipComment(cp);
+    }
+  else
+    {
+    int t = cp[0];
+    size_t l = 1;
+
+    switch (cp[0])
+      {
+      case ':':
+        if (cp[1] == ':') { l = 2; t = TOK_SCOPE; }
+        else if (cp[1] == '>') { l = 2; t = ']'; }
+        break;
+      case '.':
+        if (cp[1] == '.' && cp[2] == '.') { l = 3; t = TOK_ELLIPSIS; }
+        else if (cp[1] == '*') { l = 2; t = TOK_DOT_STAR; }
+        break;
+      case '=':
+        if (cp[1] == '=') { l = 2; t = TOK_EQ; }
+        break;
+      case '!':
+        if (cp[1] == '=') { l = 2; t = TOK_NE; }
+        break;
+      case '<':
+        if (cp[1] == '<' && cp[2] == '=') { l = 3; t = TOK_LSHIFT_EQ; }
+        else if (cp[1] == '<') { l = 2; t = TOK_LSHIFT; }
+        else if (cp[1] == '=') { l = 2; t = TOK_LE; }
+        else if (cp[1] == '%') { l = 2; t = '{'; }
+        else if (cp[1] == ':') { l = 2; t = '['; }
+        break;
+      case '>':
+        if (cp[1] == '>' && cp[2] == '=') { l = 3; t = TOK_RSHIFT_EQ; }
+        else if (cp[1] == '>') { l = 2; t = TOK_RSHIFT; }
+        else if (cp[1] == '=') { l = 2; t = TOK_GE; }
+        break;
+      case '&':
+        if (cp[1] == '=') { l = 2; t = TOK_AND_EQ; }
+        else if (cp[1] == '&') { l = 2; t = TOK_AND; }
+        break;
+      case '|':
+        if (cp[1] == '=') { l = 2; t = TOK_OR_EQ; }
+        else if (cp[1] == '|') { l = 2; t = TOK_OR; }
+        break;
+      case '^':
+        if (cp[1] == '=') { l = 2; t = TOK_XOR_EQ; }
+        break;
+      case '*':
+        if (cp[1] == '=') { l = 2; t = TOK_MUL_EQ; }
+        break;
+      case '/':
+        if (cp[1] == '=') { l = 2; t = TOK_DIV_EQ; }
+        break;
+      case '%':
+        if (cp[1] == '=') { l = 2; t = TOK_MOD_EQ; }
+        else if (cp[1] == '>') { l = 2; t = '}'; }
+        else if (cp[1] == ':') {
+          if (cp[2] == '%' && cp[3] == ':') { l = 4; t = TOK_DBLHASH; }
+          else { l = 2; t = '#'; } }
+        break;
+      case '+':
+        if (cp[1] == '+') { l = 2; t = TOK_INCR; }
+        else if (cp[1] == '=') { l = 2; t = TOK_ADD_EQ; }
+        break;
+      case '-':
+        if (cp[1] == '>' && cp[2] == '*') { l = 3; t = TOK_ARROW_STAR; }
+        else if (cp[1] == '>') { l = 2; t = TOK_ARROW; }
+        else if (cp[1] == '-') { l = 2; t = TOK_DECR; }
+        else if (cp[1] == '=') { l = 2; t = TOK_SUB_EQ; }
+        break;
+      case '#':
+        if (cp[1] == '#') { l = 2; t = TOK_DBLHASH; }
+        break;
+      case '\n':
+      case '\0':
+        { l = 0; t = 0; }
+        break;
+      }
+
+    tokens->tok = t;
+    tokens->hash = 0;
+    tokens->text = cp;
+    tokens->len = l;
+    }
+
+  return tokens->tok;
+}
+
+/** Initialize the tokenizer. */
+void vtkParse_InitTokenizer(
+  StringTokenizer *tokens, const char *text, parse_space_t wstype)
+{
+  tokens->tok = 0;
+  tokens->hash = 0;
+  tokens->text = text;
+  tokens->len = 0;
+  tokens->ws = wstype;
+  vtkParse_NextToken(tokens);
+}
+
+
 /*----------------------------------------------------------------
- * String utility methods
+ * String allocation methods
  *
  * Strings are centrally allocated and are const.  They should not
  * be freed until the parse is complete and all the data structures
diff --git a/Wrapping/Tools/vtkParseString.h b/Wrapping/Tools/vtkParseString.h
index 1143082..47621e9 100644
--- a/Wrapping/Tools/vtkParseString.h
+++ b/Wrapping/Tools/vtkParseString.h
@@ -12,16 +12,32 @@
      PURPOSE.  See the above copyright notice for more information.
 
 =========================================================================*/
+/*-------------------------------------------------------------------------
+  Copyright (c) 2012 David Gobbi.
 
-/*
-  String allocation routines used by vtkParse.
+  Contributed to the VisualizationToolkit by the author in April 2012
+  under the terms of the Visualization Toolkit 2008 copyright.
+-------------------------------------------------------------------------*/
 
-  The parser uses "const char *" as its string object type, and expects
-  all string objects to persist and be constant for the entire lifetime
-  of the data generated by the parse (usually this means until the parser
-  executable has exited).  All strings that are stored in the parser's
-  data objects should either be statically allocated, or allocated with
-  the vtkParse_NewString() or vtkParse_CacheString() methods declared here.
+/**
+  This file provides string handling routines.
+
+  The two important jobs done by these routines are string tokenization
+  and string cacheing.
+
+  Tokenization is done as per the rules of a C++ preprocessor, and
+  breaks the strings into ids, literals, and operators.  Any string
+  is a valid input for the tokenizer, and it is up to the parser to
+  decide if the resulting tokens are valid within the grammar.  The
+  two primary tokenization functions are vtkParse_InitTokenizer()
+  and vtkParse_NextToken().
+
+  Cacheing refers to how string memory management is done.  The
+  parser uses "const char *" for all strings, and expects all strings
+  to be persistent and constant.  These conditions are automatically
+  met by static strings, but dynamically-generated strings must be
+  cached until the parse is complete.  The primary cacheing functions
+  are vtkParse_CacheString() and vtkParse_FreeStringCache().
 */
 
 #ifndef VTK_PARSE_STRING_H
@@ -34,8 +50,159 @@ extern "C" {
 #endif
 
 /**
+ * Various important char types for tokenization
+ */
+typedef enum _parse_char_type
+{
+  CPRE_ID       = 0x01,  /* A-Z a-z and _ */
+  CPRE_DIGIT    = 0x02,  /* 0-9 */
+  CPRE_IDGIT    = 0x03,  /* 0-9 A-Z a-z and _ */
+  CPRE_HEX      = 0x04,  /* 0-9A-Fa-f */
+  CPRE_EXP      = 0x08,  /* EPep (exponents for floats) */
+  CPRE_SIGN     = 0x10,  /* +- (sign for floats) */
+  CPRE_QUOTE    = 0x20,  /* " and ' */
+  CPRE_HSPACE   = 0x40,  /* space, tab, carriage return */
+  CPRE_VSPACE   = 0x80,  /* newline, vertical tab, form feed */
+  CPRE_WHITE    = 0xC0,  /* all whitespace characters */
+} parse_char_type;
+
+/**
+ * Character type lookup table
+ */
+extern unsigned char parse_charbits[256];
+
+/**
+ * Macro to check if a char is of a certain type
+ */
+#define vtkParse_CharType(c, bits) \
+  ((parse_charbits[(unsigned char)(c)] & (bits)) != 0)
+
+/**
+ * Whitespace types that can be used with the tokenizer.
+ * - WS_DEFAULT treats newlines and formfeeds as regular whitespace.
+ * - WS_PREPROC treats newline as end-of-line, not as whitespace.
+ * - WS_COMMENT treats comments as tokens, not as whitespace.
+ */
+typedef enum _parse_space_t
+{
+  WS_DEFAULT = CPRE_WHITE,  /* skip all whitespace */
+  WS_PREPROC = CPRE_HSPACE, /* skip horizontal whitespace only */
+  WS_COMMENT = (CPRE_WHITE | 0x100), /* comments as tokens */
+} parse_space_t;
+
+/**
+ * Preprocessor tokens for C++.
+ */
+typedef enum _preproc_token_t
+{
+  TOK_OTHER = 257,
+  TOK_ID,        /* any id */
+  TOK_CHAR,      /* char literal */
+  TOK_STRING,    /* string literal */
+  TOK_NUMBER,    /* any numeric literal */
+  TOK_COMMENT,   /* C or C++ comment */
+  TOK_DBLHASH,   /* ## */
+  TOK_SCOPE,     /* :: */
+  TOK_INCR,      /* ++ */
+  TOK_DECR,      /* -- */
+  TOK_RSHIFT,    /* >> */
+  TOK_LSHIFT,    /* << */
+  TOK_AND,       /* && */
+  TOK_OR,        /* || */
+  TOK_EQ,        /* == */
+  TOK_NE,        /* != */
+  TOK_GE,        /* >= */
+  TOK_LE,        /* <= */
+  TOK_ADD_EQ,    /* += */
+  TOK_SUB_EQ,    /* -= */
+  TOK_MUL_EQ,    /* *= */
+  TOK_DIV_EQ,    /* /= */
+  TOK_MOD_EQ,    /* %= */
+  TOK_AND_EQ,    /* &= */
+  TOK_OR_EQ,     /* |= */
+  TOK_XOR_EQ,    /* ^= */
+  TOK_ARROW,     /* -> */
+  TOK_DOT_STAR,  /* .* */
+  TOK_ARROW_STAR,/* ->* */
+  TOK_RSHIFT_EQ, /* >>= */
+  TOK_LSHIFT_EQ, /* <<= */
+  TOK_ELLIPSIS,  /* ... */
+} preproc_token_t;
+
+/**
+ * A struct for going through a string one token at a time.
+ * If ws is set to WS_PREPROC, then tokenization stops when a
+ * newline or null is encountered.  If ws is set to WS_DEFAULT,
+ * then tokenization only stops when a null is encountered.  If
+ * ws is set to WS_COMMENT, then tokenization stops only when
+ * a null is encountered, and comments are returned as tokens
+ * instead of being skipped as whitespace.
+ */
+typedef struct _StringTokenizer
+{
+  int tok;           /* the current token */
+  unsigned int hash; /* the hash of the current token, if it is an id */
+  const char *text;  /* the text for the current token, not null-teminated */
+  size_t len;        /* the length of the current token */
+  parse_space_t ws;  /* controls what to consider as whitespace */
+} StringTokenizer;
+
+/**
+ * Initialize the tokenizer and get the first token.
+ */
+void vtkParse_InitTokenizer(
+  StringTokenizer *tokens, const char *text, parse_space_t wstype);
+
+/**
+ * Return the next preprocessor token, or '0' if none left.
+ */
+int vtkParse_NextToken(StringTokenizer *tokens);
+
+/**
+ * Skip over whitespace.
+ * Return the number of chars until the first non-whitespace token.
+ * Set spacetype to WS_DEFAULT, WS_PREPROC, or WS_COMMENT.
+ */
+size_t vtkParse_SkipWhitespace(
+  const char *cp, parse_space_t spacetype);
+
+/**
+ * Skip over a comment, C style or C++ style.
+ * Return the number of chars until the end of the comment.
+ */
+size_t vtkParse_SkipComment(const char *cp);
+
+/**
+ * Skip over a string in double or single quotes.
+ * Return the number of chars until the end of the quotes.
+ */
+size_t vtkParse_SkipQuotes(const char *cp);
+
+/**
+ * Skip over a number.  Uses preprocessor semantics.
+ * Return the number of chars until the end of the number.
+ */
+size_t vtkParse_SkipNumber(const char *cp);
+
+/**
+ * Skip over an identifier.
+ * Return the number of chars until the end of the identifier.
+ */
+size_t vtkParse_SkipId(const char *cp);
+
+/**
+ * Compute the hash for a id, for use in hash table lookups.
+ * This stops at the first non-Id character, so it is safe to use
+ * on a string that is not null-terminated as long as there is either
+ * whitespace or an operator character before the end of the string.
+ * It can be used on null-terminated strings as well, of course.
+ */
+unsigned int vtkParse_HashId(const char *cp);
+
+
+/**
  * StringCache provides a simple way of allocating strings centrally.
- * It eliminates the need to allocate and free each individual strings,
+ * It eliminates the need to allocate and free each individual string,
  * which makes the code simpler and more efficient.
  */
 typedef struct _StringCache
@@ -63,7 +230,8 @@ char *vtkParse_NewString(StringCache *cache, size_t n);
  * At most 'n' chars will be copied, and the string will be terminated.
  * If a null pointer is provided, then a null pointer will be returned.
  */
-const char *vtkParse_CacheString(StringCache *cache, const char *cp, size_t n);
+const char *vtkParse_CacheString(
+  StringCache *cache, const char *cp, size_t n);
 
 /**
  * Free all strings that were created with vtkParse_NewString() or
diff --git a/Wrapping/Tools/vtkParseType.h b/Wrapping/Tools/vtkParseType.h
index 14b923b..19a54d6 100644
--- a/Wrapping/Tools/vtkParseType.h
+++ b/Wrapping/Tools/vtkParseType.h
@@ -70,18 +70,21 @@
 #define VTK_PARSE_EXPLICIT    0x00080000
 #define VTK_PARSE_MUTABLE     0x00100000
 #define VTK_PARSE_VOLATILE    0x00200000
-#define VTK_PARSE_NEWINSTANCE 0x00400000
+#define VTK_PARSE_RVALUE      0x00400000
+#define VTK_PARSE_THREAD_LOCAL 0x00800000
+#define VTK_PARSE_NEWINSTANCE 0x01000000
 
 /**
  * Special
  */
-#define VTK_PARSE_TYPEDEF     0x010000000
-#define VTK_PARSE_FRIEND      0x020000000
+#define VTK_PARSE_TYPEDEF     0x10000000
+#define VTK_PARSE_FRIEND      0x20000000
+#define VTK_PARSE_PACK        0x40000000
 
 /**
  * Mask for removing qualifiers
  */
-#define VTK_PARSE_QUALIFIED_TYPE   0x00FFFFFF
+#define VTK_PARSE_QUALIFIED_TYPE   0x01FFFFFF
 #define VTK_PARSE_UNQUALIFIED_TYPE 0x0000FFFF
 
 /**
@@ -180,6 +183,10 @@
 #define VTK_PARSE_FUNCTION            0x25
 #define VTK_PARSE_QOBJECT             0x26
 #define VTK_PARSE_LONG_DOUBLE         0x27
+#define VTK_PARSE_WCHAR_T             0x28
+#define VTK_PARSE_CHAR16_T            0x29
+#define VTK_PARSE_CHAR32_T            0x2A
+#define VTK_PARSE_NULLPTR_T           0x2B
 
 /**
  * Basic pointer types
@@ -214,6 +221,11 @@
 #define VTK_PARSE_FUNCTION_PTR            0x225
 #define VTK_PARSE_QOBJECT_PTR             0x226
 #define VTK_PARSE_LONG_DOUBLE_PTR         0x227
+#define VTK_PARSE_WCHAR_T_PTR             0x228
+#define VTK_PARSE_CHAR16_T_PTR            0x229
+#define VTK_PARSE_CHAR32_T_PTR            0x22A
+#define VTK_PARSE_NULLPTR_T_PTR           0x22B
+
 
 /**
  * Basic reference types
@@ -247,12 +259,18 @@
 #define VTK_PARSE_ISTREAM_REF             0x124
 #define VTK_PARSE_QOBJECT_REF             0x126
 #define VTK_PARSE_LONG_DOUBLE_REF         0x127
+#define VTK_PARSE_WCHAR_T_REF             0x128
+#define VTK_PARSE_CHAR16_T_REF            0x129
+#define VTK_PARSE_CHAR32_T_REF            0x12A
+#define VTK_PARSE_NULLPTR_T_REF           0x12B
 
 /**
  * For backwards compatibility
  */
+#ifndef VTK_PARSE_LEGACY_REMOVE
 #define VTK_PARSE_VTK_OBJECT        VTK_PARSE_OBJECT
 #define VTK_PARSE_VTK_OBJECT_PTR    VTK_PARSE_OBJECT_PTR
 #define VTK_PARSE_VTK_OBJECT_REF    VTK_PARSE_OBJECT_REF
+#endif
 
 #endif
diff --git a/Wrapping/Tools/vtkWrap.c b/Wrapping/Tools/vtkWrap.c
index 25ede98..d9846b9 100644
--- a/Wrapping/Tools/vtkWrap.c
+++ b/Wrapping/Tools/vtkWrap.c
@@ -65,6 +65,13 @@ int vtkWrap_IsCharPointer(ValueInfo *val)
   return (t == VTK_PARSE_CHAR && vtkWrap_IsPointer(val));
 }
 
+int vtkWrap_IsPODPointer(ValueInfo *val)
+{
+  unsigned int t = (val->Type & VTK_PARSE_BASE_TYPE);
+  return (t != VTK_PARSE_CHAR && vtkWrap_IsNumeric(val) &&
+          vtkWrap_IsPointer(val));
+}
+
 int vtkWrap_IsVTKObject(ValueInfo *val)
 {
   unsigned int t = (val->Type & VTK_PARSE_UNQUALIFIED_TYPE);
@@ -921,7 +928,7 @@ void vtkWrap_DeclareVariable(
       fprintf(fp, "*");
       }
     /* arrays of unknown size are handled via pointers */
-    else if (val->CountHint)
+    else if (val->CountHint || vtkWrap_IsPODPointer(val))
       {
       fprintf(fp, "*");
       }
@@ -945,7 +952,8 @@ void vtkWrap_DeclareVariable(
         aType != VTK_PARSE_VOID_PTR &&
         aType != VTK_PARSE_OBJECT_PTR &&
         !vtkWrap_IsQtObject(val) &&
-        val->CountHint == NULL)
+        val->CountHint == NULL &&
+        !vtkWrap_IsPODPointer(val))
       {
       if (val->NumberOfDimensions == 1 && val->Count > 0)
         {
@@ -974,7 +982,7 @@ void vtkWrap_DeclareVariable(
       {
       fprintf(fp, " = NULL");
       }
-    else if (val->CountHint)
+    else if (val->CountHint || vtkWrap_IsPODPointer(val))
       {
       fprintf(fp, " = NULL");
       }
@@ -1016,12 +1024,12 @@ void vtkWrap_DeclareVariableSize(
 
     fprintf(fp, " };\n");
     }
-  else if (val->Count != 0 || val->CountHint)
+  else if (val->Count != 0 || val->CountHint || vtkWrap_IsPODPointer(val))
     {
     fprintf(fp,
             "  %sint %s%s = %d;\n",
-            (val->CountHint ? "" : "const "), name, idx,
-            (val->CountHint ? 0 : val->Count));
+            (val->Count == 0 ? "" : "const "), name, idx,
+            (val->Count == 0 ? 0 : val->Count));
     }
   else if (val->NumberOfDimensions == 1)
     {
diff --git a/Wrapping/Tools/vtkWrap.h b/Wrapping/Tools/vtkWrap.h
index 8b385eb..04d216d 100644
--- a/Wrapping/Tools/vtkWrap.h
+++ b/Wrapping/Tools/vtkWrap.h
@@ -38,12 +38,14 @@ extern "C" {
 
 /**
  * Check for common types.
+ * IsPODPointer is for unsized arrays of POD types.
  */
 /*@{*/
 int vtkWrap_IsVoid(ValueInfo *val);
 int vtkWrap_IsVoidFunction(ValueInfo *val);
 int vtkWrap_IsVoidPointer(ValueInfo *val);
 int vtkWrap_IsCharPointer(ValueInfo *val);
+int vtkWrap_IsPODPointer(ValueInfo *val);
 int vtkWrap_IsVTKObject(ValueInfo *val);
 int vtkWrap_IsSpecialObject(ValueInfo *val);
 int vtkWrap_IsQtObject(ValueInfo *val);
diff --git a/Wrapping/Tools/vtkWrapJava.c b/Wrapping/Tools/vtkWrapJava.c
index d4e18a5..32ab62c 100644
--- a/Wrapping/Tools/vtkWrapJava.c
+++ b/Wrapping/Tools/vtkWrapJava.c
@@ -801,7 +801,7 @@ int checkFunctionSignature(ClassInfo *data)
     VTK_PARSE_ID_TYPE, VTK_PARSE_UNSIGNED_ID_TYPE,
     VTK_PARSE_LONG_LONG, VTK_PARSE_UNSIGNED_LONG_LONG,
     VTK_PARSE___INT64, VTK_PARSE_UNSIGNED___INT64,
-    VTK_PARSE_VTK_OBJECT, VTK_PARSE_STRING,
+    VTK_PARSE_OBJECT, VTK_PARSE_STRING,
     0
   };
 
@@ -879,7 +879,7 @@ int checkFunctionSignature(ClassInfo *data)
         }
       }
 
-    if (aType == VTK_PARSE_VTK_OBJECT) args_ok = 0;
+    if (aType == VTK_PARSE_OBJECT) args_ok = 0;
     if (((aType & VTK_PARSE_INDIRECT) != VTK_PARSE_POINTER) &&
         ((aType & VTK_PARSE_INDIRECT) != 0) &&
         (aType != VTK_PARSE_STRING_REF)) args_ok = 0;
@@ -936,7 +936,7 @@ int checkFunctionSignature(ClassInfo *data)
 
     if (((aType & VTK_PARSE_INDIRECT) == VTK_PARSE_POINTER)&&
         (currentFunction->ArgCounts[i] <= 0)&&
-        (aType != VTK_PARSE_VTK_OBJECT_PTR)&&
+        (aType != VTK_PARSE_OBJECT_PTR)&&
         (aType != VTK_PARSE_CHAR_PTR)) args_ok = 0;
     }
 
diff --git a/Wrapping/Tools/vtkWrapPython.c b/Wrapping/Tools/vtkWrapPython.c
index df6d8a7..dcfe28d 100644
--- a/Wrapping/Tools/vtkWrapPython.c
+++ b/Wrapping/Tools/vtkWrapPython.c
@@ -134,7 +134,7 @@ static void vtkWrapPython_OverloadMethodDef(
 static void vtkWrapPython_OverloadMasterMethod(
   FILE *fp, const char *classname, int *overloadMap, int maxArgs,
   FunctionInfo **wrappedFunctions, int numberOfWrappedFunctions, int fnum,
-  int numberOfOccurrences, int is_vtkobject, int all_legacy);
+  int is_vtkobject, int all_legacy);
 
 /* output the MethodDef table for this class */
 static void vtkWrapPython_ClassMethodDef(
@@ -183,7 +183,7 @@ static char *vtkWrapPython_FormatString(
   FunctionInfo *currentFunction);
 
 /* weed out methods that will never be called */
-static void vtkWrapPython_RemovePreceededMethods(
+static void vtkWrapPython_RemovePrecededMethods(
   FunctionInfo *wrappedFunctions[],
   int numberWrapped, int fnum);
 
@@ -657,7 +657,8 @@ static void vtkWrapPython_DeclareVariables(
       }
 
     /* temps for arrays */
-    if (vtkWrap_IsArray(arg) || vtkWrap_IsNArray(arg))
+    if (vtkWrap_IsArray(arg) || vtkWrap_IsNArray(arg) ||
+        vtkWrap_IsPODPointer(arg))
       {
       storageSize = 4;
       if (!vtkWrap_IsConst(arg) &&
@@ -667,7 +668,7 @@ static void vtkWrapPython_DeclareVariables(
         vtkWrap_DeclareVariable(fp, arg, "save", i, VTK_WRAP_ARG);
         storageSize *= 2;
         }
-      if (arg->CountHint)
+      if (arg->CountHint || vtkWrap_IsPODPointer(arg))
         {
         fprintf(fp,
                 "  %s small%d[%d];\n",
@@ -702,54 +703,64 @@ static void vtkWrapPython_GetSizesForArrays(
   FILE *fp, FunctionInfo *theFunc, int is_vtkobject)
 {
   int i, j, n;
-  const char *ndnt;
+  const char *indentation = "";
   const char *mtwo;
-
-  /* the indentation amount */
-  ndnt = (is_vtkobject ? "  " : "");
+  ValueInfo *arg;
 
   n = vtkWrap_CountWrappedParameters(theFunc);
 
-  j = (is_vtkobject ? 1 : 0);
+  j = ((is_vtkobject && !theFunc->IsStatic) ? 1 : 0);
   for (i = 0; i < n; i++)
     {
-    if (theFunc->Parameters[i]->CountHint)
+    arg = theFunc->Parameters[i];
+
+    if (arg->CountHint || vtkWrap_IsPODPointer(arg))
       {
       if (j == 1)
         {
         fprintf(fp,
                 "  if (op)\n"
                 "    {\n");
+        indentation = "  ";
         }
       j += 2;
-      fprintf(fp,
-              "  %ssize%d = op->%s;\n",
-              ((j & 1) != 0 ? "  " : ""), i,
-              theFunc->Parameters[i]->CountHint);
+      if (arg->CountHint)
+        {
+        fprintf(fp,
+              "%s  size%d = op->%s;\n",
+              indentation, i, arg->CountHint);
+        }
+      else
+        {
+        fprintf(fp,
+              "%s  size%d = ap.GetArgSize(%d);\n",
+              indentation, i, i);
+        }
 
       /* for non-const arrays, alloc twice as much space */
       mtwo = "";
-      if (!vtkWrap_IsConst(theFunc->Parameters[i]) &&
-          !vtkWrap_IsSetVectorMethod(theFunc))
+      if (!vtkWrap_IsConst(arg) && !vtkWrap_IsSetVectorMethod(theFunc))
         {
         mtwo = "2*";
         }
 
       fprintf(fp,
-              "  %stemp%d = small%d;\n"
-              "  %sif (size%d > 4)\n"
-              "    %s{\n"
-              "    %stemp%d = new %s[%ssize%d];\n"
-              "    %s}\n",
-              ndnt, i, i, ndnt, i, ndnt, ndnt,
-              i, vtkWrap_GetTypeName(theFunc->Parameters[i]), mtwo, i,
-              ndnt);
+              "%s  temp%d = small%d;\n"
+              "%s  if (size%d > 4)\n"
+              "%s    {\n"
+              "%s    temp%d = new %s[%ssize%d];\n"
+              "%s    }\n",
+              indentation, i, i,
+              indentation, i,
+              indentation,
+              indentation, i, vtkWrap_GetTypeName(arg), mtwo, i,
+              indentation);
 
       if (*mtwo)
         {
         fprintf(fp,
-              "  %ssave%d = &temp%d[size%d];\n",
-              ndnt, i, i, i);
+              "%s  save%d = &temp%d[size%d];\n",
+              indentation, i, i, i);
         }
       }
     }
@@ -853,6 +864,11 @@ static void vtkWrapPython_GetSingleArgument(
     fprintf(fp, "%sGetArray(%stemp%d, size%d)",
             prefix, argname, i, i);
     }
+  else if (vtkWrap_IsPODPointer(arg))
+    {
+    fprintf(fp, "%sGetArray(%stemp%d, size%d)",
+            prefix, argname, i, i);
+    }
 }
 
 /* -------------------------------------------------------------------- */
@@ -1307,7 +1323,8 @@ static char *vtkWrapPython_ArgCheckString(
       currPos += strlen(pythonname);
       }
 
-    else if (vtkWrap_IsArray(arg) || vtkWrap_IsNArray(arg))
+    else if (vtkWrap_IsArray(arg) || vtkWrap_IsNArray(arg) ||
+             vtkWrap_IsPODPointer(arg))
       {
       result[currPos++] = ' ';
       result[currPos++] = '*';
@@ -1340,7 +1357,7 @@ static char *vtkWrapPython_ArgCheckString(
  * The type closest to the native Python type wins.
  */
 
-void vtkWrapPython_RemovePreceededMethods(
+void vtkWrapPython_RemovePrecededMethods(
   FunctionInfo *wrappedFunctions[],
   int numberOfWrappedFunctions, int fnum)
 {
@@ -1350,6 +1367,7 @@ void vtkWrapPython_RemovePreceededMethods(
   FunctionInfo *sig2;
   ValueInfo *val1;
   ValueInfo *val2;
+  int dim1, dim2;
   int vote1 = 0;
   int vote2 = 0;
   int occ1, occ2;
@@ -1387,7 +1405,11 @@ void vtkWrapPython_RemovePreceededMethods(
             argmatch = 0;
             val1 = sig1->Parameters[i];
             val2 = sig2->Parameters[i];
-            if (val1->NumberOfDimensions != val2->NumberOfDimensions)
+            dim1 = (val1->NumberOfDimensions > 0 ? val1->NumberOfDimensions :
+                    (vtkWrap_IsPODPointer(val1) || vtkWrap_IsArray(val1)));
+            dim2 = (val2->NumberOfDimensions > 0 ? val2->NumberOfDimensions :
+                    (vtkWrap_IsPODPointer(val2) || vtkWrap_IsArray(val2)));
+            if (dim1 != dim2)
               {
               vote1 = 0;
               vote2 = 0;
@@ -1699,12 +1721,13 @@ void vtkWrapPython_SaveArrayArgs(FILE *fp, FunctionInfo *currentFunction)
     {
     arg = currentFunction->Parameters[i];
     n = arg->NumberOfDimensions;
-    if (n < 1 && vtkWrap_IsArray(arg))
+    if (n < 1 && (vtkWrap_IsArray(arg) || vtkWrap_IsPODPointer(arg)))
       {
       n = 1;
       }
 
-    if ((vtkWrap_IsArray(arg) || vtkWrap_IsNArray(arg)) &&
+    if ((vtkWrap_IsArray(arg) || vtkWrap_IsNArray(arg) ||
+         vtkWrap_IsPODPointer(arg)) &&
         (arg->Type & VTK_PARSE_CONST) == 0)
       {
       noneDone = 0;
@@ -1952,7 +1975,7 @@ static void vtkWrapPython_WriteBackToArgs(
     {
     arg = currentFunction->Parameters[i];
     n = arg->NumberOfDimensions;
-    if (n < 1 && vtkWrap_IsArray(arg))
+    if (n < 1 && (vtkWrap_IsArray(arg) || vtkWrap_IsPODPointer(arg)))
       {
       n = 1;
       }
@@ -1968,7 +1991,8 @@ static void vtkWrapPython_WriteBackToArgs(
               i, i);
       }
 
-    else if ((vtkWrap_IsArray(arg) || vtkWrap_IsNArray(arg)) &&
+    else if ((vtkWrap_IsArray(arg) || vtkWrap_IsNArray(arg) ||
+              vtkWrap_IsPODPointer(arg)) &&
              !vtkWrap_IsConst(arg) &&
              !vtkWrap_IsSetVectorMethod(currentFunction))
       {
@@ -2028,7 +2052,7 @@ static void vtkWrapPython_FreeAllocatedArrays(
     {
     arg = currentFunction->Parameters[i];
 
-    if (arg->CountHint)
+    if (arg->CountHint || vtkWrap_IsPODPointer(arg))
       {
       fprintf(fp,
               "  if (temp%d && temp%d != small%d)\n"
@@ -2164,10 +2188,11 @@ static void vtkWrapPython_OverloadMethodDef(
       }
 
     fprintf(fp,
-            "  {NULL, Py%s_%s%s, 1,\n"
+            "  {NULL, Py%s_%s%s, METH_VARARGS%s,\n"
             "   (char*)\"%s\"},\n",
             classname, wrappedFunctions[occ]->Name,
             occSuffix,
+            theOccurrence->IsStatic ? " | METH_STATIC" : "",
             vtkWrapPython_ArgCheckString(
               (is_vtkobject && !theOccurrence->IsStatic),
               wrappedFunctions[occ]));
@@ -2199,11 +2224,12 @@ static void vtkWrapPython_OverloadMethodDef(
 static void vtkWrapPython_OverloadMasterMethod(
   FILE *fp, const char *classname, int *overloadMap, int maxArgs,
   FunctionInfo **wrappedFunctions, int numberOfWrappedFunctions, int fnum,
-  int numberOfOccurrences, int is_vtkobject, int all_legacy)
+  int is_vtkobject, int all_legacy)
 {
   FunctionInfo *currentFunction;
+  FunctionInfo *theOccurrence;
   int overlap = 0;
-  int occ;
+  int occ, occCounter;
   int i;
   int foundOne;
   int any_static = 0;
@@ -2258,25 +2284,46 @@ static void vtkWrapPython_OverloadMasterMethod(
           "  switch(nargs)\n"
           "    {\n");
 
-  for (occ = 1; occ <= numberOfOccurrences; occ++)
+  /* find all occurrences of this method */
+  occCounter = 0;
+  for (occ = fnum; occ < numberOfWrappedFunctions; occ++)
     {
-    foundOne = 0;
-    for (i = 0; i <= maxArgs; i++)
+    theOccurrence = wrappedFunctions[occ];
+
+    /* is it the same name */
+    if (theOccurrence->Name &&
+        strcmp(currentFunction->Name, theOccurrence->Name) == 0)
       {
-      if (overloadMap[i] == occ)
+      occCounter++;
+
+      foundOne = 0;
+      for (i = 0; i <= maxArgs; i++)
+        {
+        if (overloadMap[i] == occCounter)
+          {
+          if (!foundOne && theOccurrence->IsLegacy && !all_legacy)
+            {
+            fprintf(fp,
+                 "#if !defined(VTK_LEGACY_REMOVE)\n");
+            }
+          fprintf(fp,
+                  "    case %d:\n",
+                  i);
+          foundOne = 1;
+          }
+        }
+      if (foundOne)
         {
         fprintf(fp,
-                "    case %d:\n",
-                i);
-        foundOne = 1;
+                "      return Py%s_%s_s%d(self, args);\n",
+                classname, currentFunction->Name, occCounter);
+        if (theOccurrence->IsLegacy && !all_legacy)
+          {
+          fprintf(fp,
+                "#endif\n");
+          }
         }
       }
-    if (foundOne)
-      {
-      fprintf(fp,
-              "      return Py%s_%s_s%d(self, args);\n",
-              classname, currentFunction->Name, occ);
-      }
     }
 
   if (overlap)
@@ -2507,7 +2554,7 @@ void vtkWrapPython_GenerateOneMethod(
     vtkWrapPython_OverloadMasterMethod(
       fp, classname, overloadMap, maxArgs,
       wrappedFunctions, numberOfWrappedFunctions,
-      fnum, numberOfOccurrences, is_vtkobject, all_legacy);
+      fnum, is_vtkobject, all_legacy);
     }
 
   /* set the legacy flag */
@@ -2594,7 +2641,7 @@ static void vtkWrapPython_GenerateMethods(
 
     /* check for type precedence, don't need a "float" method if a
        "double" method exists */
-    vtkWrapPython_RemovePreceededMethods(
+    vtkWrapPython_RemovePrecededMethods(
       wrappedFunctions, numberOfWrappedFunctions, fnum);
 
     /* if theFunc wasn't removed, process all its signatures */
@@ -2654,9 +2701,10 @@ static void vtkWrapPython_ClassMethodDef(
         comment, maxlen - strlen(signatures));
 
       fprintf(fp,
-              "  {(char*)\"%s\", Py%s_%s, 1,\n",
+              "  {(char*)\"%s\", Py%s_%s, METH_VARARGS%s,\n",
               wrappedFunctions[fnum]->Name, classname,
-              wrappedFunctions[fnum]->Name);
+              wrappedFunctions[fnum]->Name,
+              wrappedFunctions[fnum]->IsStatic ? " | METH_STATIC" : "");
 
       fprintf(fp,
               "   (char*)\"%s\\n\\n%s\"},\n",
@@ -2796,7 +2844,8 @@ static int vtkWrapPython_IsValueWrappable(
   else if (vtkWrap_IsPointer(val))
     {
     if (vtkWrap_IsCharPointer(val) ||
-        vtkWrap_IsVoidPointer(val))
+        vtkWrap_IsVoidPointer(val) ||
+        vtkWrap_IsPODPointer(val))
       {
       return 1;
       }
@@ -4172,7 +4221,11 @@ static void vtkWrapPython_GenerateSpecialType(
       "#define DECLARED_Py%s_Type\n"
       "#endif\n"
       "\n",
+#if defined(VTK_BUILD_SHARED_LIBS)
       supername, (is_external ? "VTK_ABI_IMPORT" : "VTK_ABI_EXPORT"),
+#else
+      supername, "VTK_ABI_EXPORT",
+#endif
       supername, supername);
     }
 
diff --git a/Wrapping/Tools/vtkWrapPythonInit.c b/Wrapping/Tools/vtkWrapPythonInit.c
index d516af6..dd86a43 100644
--- a/Wrapping/Tools/vtkWrapPythonInit.c
+++ b/Wrapping/Tools/vtkWrapPythonInit.c
@@ -2,35 +2,19 @@
 #include <stdlib.h>
 #include <string.h>
 
-/* warning this code is also in getclasses.cxx under pcmaker */
-/* this roputine creates the init file */
-static void CreateInitFile(const char *libName,
-  int numFiles, char **files,
-  FILE *fout)
+static void CreateInitFile(const char *libName, FILE *fout)
 {
-  int i;
-
   const char *prefix = "";
   const char* dllexp = "VTK_ABI_EXPORT ";
 
   fprintf(fout,"// Generated by vtkWrapPythonInit in VTK/Wrapping\n");
-  fprintf(fout,"#include \"vtkPython.h\"\n\n");
   fprintf(fout,"#include \"vtkSystemIncludes.h\"\n");
-  fprintf(fout,"#include <string.h>\n");
   fprintf(fout,"// Handle compiler warning messages, etc.\n"
           "#if defined( _MSC_VER ) && !defined(VTK_DISPLAY_WIN32_WARNINGS)\n"
           "#pragma warning ( disable : 4706 )\n"
           "#endif // Windows Warnings \n\n");
 
-  for (i = 0; i < numFiles; i++)
-    {
-    fprintf(fout,"extern  \"C\" {%sPyObject *PyVTKAddFile_%s(PyObject *, const char *); }\n", dllexp, files[i]);
-    }
-
-  fprintf(fout,"\nstatic PyMethodDef Py%s_ClassMethods[] = {\n", libName);
-  fprintf(fout,"{NULL, NULL, 0, NULL}};\n\n");
-
-  fprintf(fout,"static void real_init%s(const char *modulename);\n\n", libName);
+  fprintf(fout,"extern \"C\" void real_init%s(const char *modulename);\n\n", libName);
 
   for (;;)
     {
@@ -50,6 +34,37 @@ static void CreateInitFile(const char *libName,
 #endif
     break;
     }
+}
+
+
+/* warning this code is also in getclasses.cxx under pcmaker */
+/* this routine creates the init file */
+static void CreateImplFile(const char *libName,
+  int numFiles, char **files,
+  FILE *fout)
+{
+  int i;
+
+  const char* dllexp = "VTK_ABI_EXPORT ";
+
+  fprintf(fout,"// Generated by vtkWrapPythonInit in VTK/Wrapping\n");
+  fprintf(fout,"#include \"vtkPython.h\"\n\n");
+  fprintf(fout,"#include \"vtkSystemIncludes.h\"\n");
+  fprintf(fout,"#include <string.h>\n");
+  fprintf(fout,"// Handle compiler warning messages, etc.\n"
+          "#if defined( _MSC_VER ) && !defined(VTK_DISPLAY_WIN32_WARNINGS)\n"
+          "#pragma warning ( disable : 4706 )\n"
+          "#endif // Windows Warnings \n\n");
+
+  for (i = 0; i < numFiles; i++)
+    {
+    fprintf(fout,"extern  \"C\" {%sPyObject *PyVTKAddFile_%s(PyObject *, const char *); }\n", dllexp, files[i]);
+    }
+
+  fprintf(fout,"\nstatic PyMethodDef Py%s_ClassMethods[] = {\n", libName);
+  fprintf(fout,"{NULL, NULL, 0, NULL}};\n\n");
+
+  fprintf(fout,"extern  \"C\" {%svoid real_init%s(const char *modulename); }\n\n", dllexp, libName);
 
   fprintf(fout,"void real_init%s(const char *modulename)\n{\n", libName);
 
@@ -77,15 +92,16 @@ static void CreateInitFile(const char *libName,
 int main(int argc,char *argv[])
 {
   FILE *file;
-  FILE *fout;
+  FILE *fout_init;
+  FILE *fout_impl;
   int numFiles = 0;
   char libName[250];
   char tmpVal[250];
   char *files[4000];
 
-  if (argc < 3)
+  if (argc < 4)
     {
-    fprintf(stderr,"Usage: %s input_file output_file\n",argv[0]);
+    fprintf(stderr,"Usage: %s input_file init_file impl_file\n",argv[0]);
     return 1;
     }
 
@@ -96,19 +112,11 @@ int main(int argc,char *argv[])
     return 1;
     }
 
-  fout = fopen(argv[2],"w");
-  if (!fout)
-    {
-    fclose(file);
-    return 1;
-    }
-
   /* read the info from the file */
   if (fscanf(file,"%s",libName) != 1)
     {
     fprintf(stderr,"Error getting libName\n");
     fclose(file);
-    fclose(fout);
     return 1;
     }
   /* read in the classes */
@@ -119,6 +127,20 @@ int main(int argc,char *argv[])
     }
   /* close the file */
   fclose(file);
+  file = NULL;
+
+  fout_init = fopen(argv[2],"w");
+  if (!fout_init)
+    {
+    return 1;
+    }
+
+  fout_impl = fopen(argv[3],"w");
+  if (!fout_impl)
+    {
+    fclose(fout_init);
+    return 1;
+    }
 
   /* extra object for the Common module */
   if (strcmp(libName, "vtkCommonCorePython") == 0)
@@ -128,8 +150,10 @@ int main(int argc,char *argv[])
     numFiles++;
     }
 
-  CreateInitFile(libName, numFiles, files, fout);
-  fclose(fout);
+  CreateInitFile(libName, fout_init);
+  CreateImplFile(libName, numFiles, files, fout_impl);
+  fclose(fout_init);
+  fclose(fout_impl);
 
   return 0;
 }
diff --git a/Wrapping/Tools/vtkWrapText.c b/Wrapping/Tools/vtkWrapText.c
index 14b71ae..d95f28a 100644
--- a/Wrapping/Tools/vtkWrapText.c
+++ b/Wrapping/Tools/vtkWrapText.c
@@ -931,22 +931,20 @@ static void vtkWrapText_PythonTypeSignature(
     classname = "int";
     }
 
-  if (vtkWrap_IsArray(arg))
+  if ((vtkWrap_IsArray(arg) && arg->CountHint) ||
+      vtkWrap_IsPODPointer(arg))
     {
-    if (arg->CountHint)
-      {
-      vtkWPString_Append(result, braces[0]);
-      vtkWPString_Append(result, classname);
-      vtkWPString_Append(result, ", ...");
-      vtkWPString_Append(result, braces[1]);
-      }
-    else
-      {
-      sprintf(text, "%d", arg->Count);
-      dimension = text;
-      vtkWrapText_PythonArraySignature(result, classname, braces,
-        1, &dimension);
-      }
+    vtkWPString_Append(result, braces[0]);
+    vtkWPString_Append(result, classname);
+    vtkWPString_Append(result, ", ...");
+    vtkWPString_Append(result, braces[1]);
+    }
+  else if (vtkWrap_IsArray(arg))
+    {
+    sprintf(text, "%d", arg->Count);
+    dimension = text;
+    vtkWrapText_PythonArraySignature(result, classname, braces,
+      1, &dimension);
     }
   else if (vtkWrap_IsNArray(arg))
     {

-- 
Alioth's /usr/local/bin/git-commit-notice on /srv/git.debian.org/git/debian-science/packages/vtk6.git



More information about the debian-science-commits mailing list